+ ./ya make . -T --test-size=small --test-size=medium --stat --test-threads 52 --link-threads 12 -DUSE_EAT_MY_DATA --build relwithdebinfo -DDEBUGINFO_LINES_ONLY --bazel-remote-store --bazel-remote-base-uri http://cachesrv.internal:8081 --bazel-remote-username cache_user --bazel-remote-password-file /tmp/tmp.lplzmd7qPM --bazel-remote-put --dist-cache-max-file-size=209715200 -A --retest --stat -DCONSISTENT_DEBUG --no-dir-outputs --test-failure-code 0 --build-all --cache-size 2TB --force-build-depends --log-file /home/runner/actions_runner/_work/ydb/ydb/tmp/results/ya_log.txt --evlog-file /home/runner/actions_runner/_work/ydb/ydb/tmp/results/try_1/ya_evlog.jsonl --junit /home/runner/actions_runner/_work/ydb/ydb/tmp/results/try_1/junit.xml --build-results-report /home/runner/actions_runner/_work/ydb/ydb/tmp/results/try_1/report.json --output /home/runner/actions_runner/_work/ydb/ydb/tmp/out Output root is subdirectory of Arcadia root, this may cause non-idempotent build Configuring dependencies for platform default-linux-x86_64-relwithdebinfo Configuring dependencies for platform tools [2 ymakes processing] [8132/8132 modules configured] [2261/4529 modules rendered] [2 ymakes processing] [8132/8132 modules configured] [4468/4529 modules rendered] [2 ymakes processing] [8132/8132 modules configured] [4529/4529 modules rendered] Configuring dependencies for platform test_tool_tc1-global [0 ymakes processing] [8138/8138 modules configured] [4529/4529 modules rendered] Configuring tests execution Configuring local and dist store caches Configuration done. Preparing for execution |33.3%| CLEANING SYMRES | 1.3%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/ut/olap/ydb-core-kqp-ut-olap | 5.2%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/opt/peephole/libkqp-opt-peephole.a | 5.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/farmhash/arch/sse41/libfarmhash-arch-sse41.a | 5.7%| PREPARE $(VCS) | 5.8%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/containers/disjoint_interval_tree/libcpp-containers-disjoint_interval_tree.a | 5.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/base64/plain64/liblibs-base64-plain64.a | 5.9%| [AR] {BAZEL_DOWNLOAD} $(B)/certs/libcerts.global.a | 6.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/linuxvdso/original/liblibs-linuxvdso-original.a | 8.4%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/host/libcore-kqp-host.a | 8.6%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/opt/libcore-kqp-opt.a | 8.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/antlr4_cpp_runtime/libcontrib-libs-antlr4_cpp_runtime.a | 8.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/apache/orc/liblibs-apache-orc.a |12.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/control_plane_storage/internal/liblibs-control_plane_storage-internal.a |12.7%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/lcs/liblibrary-cpp-lcs.a |13.8%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/json/yson/libcpp-json-yson.a |13.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/gateway/local_rpc/libkqp-gateway-local_rpc.a |13.9%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/http/simple/libcpp-http-simple.a |14.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/formats/arrow/hash/libformats-arrow-hash.a |14.1%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/monlib/encode/json/libmonlib-encode-json.a |14.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/init/libfq-libs-init.a |14.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/gateway/behaviour/view/libgateway-behaviour-view.global.a |14.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/openssl/libcontrib-libs-openssl.a |14.9%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/ipmath/liblibrary-cpp-ipmath.a |15.1%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/int128/liblibrary-cpp-int128.a |15.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/libydb-core-kqp.global.a |15.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/gateway/utils/libkqp-gateway-utils.a |15.2%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/monlib/encode/libcpp-monlib-encode.a |15.4%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/monlib/encode/legacy_protobuf/protos/libencode-legacy_protobuf-protos.a |15.5%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/ipv6_address/liblibrary-cpp-ipv6_address.a |15.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/formats/libydb-core-formats.a |15.6%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/json/writer/libcpp-json-writer.a |15.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/gateway/behaviour/view/libgateway-behaviour-view.a |15.7%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/monlib/encode/prometheus/libmonlib-encode-prometheus.a |15.8%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/monlib/encode/spack/libmonlib-encode-spack.a |15.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/checkpoint_storage/events/liblibs-checkpoint_storage-events.a |15.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/audit/libfq-libs-audit.a |16.0%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/monlib/exception/libcpp-monlib-exception.a |16.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/node_service/libcore-kqp-node_service.a |16.1%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/monlib/encode/text/libmonlib-encode-text.a |16.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/audit/events/liblibs-audit-events.a |16.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/formats/arrow/transformer/libformats-arrow-transformer.a |16.4%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/monlib/messagebus/libcpp-monlib-messagebus.a |16.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/checkpoint_storage/proto/liblibs-checkpoint_storage-proto.a |16.6%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/json/liblibrary-cpp-json.a |16.7%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/l2_distance/liblibrary-cpp-l2_distance.a |16.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/gateway/behaviour/tablestore/operations/libbehaviour-tablestore-operations.a |16.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/compute/common/liblibs-compute-common.a |16.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/gateway/behaviour/tablestore/operations/libbehaviour-tablestore-operations.global.a |16.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/checkpointing_common/libfq-libs-checkpointing_common.a |16.8%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/iterator/liblibrary-cpp-iterator.a |17.0%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/monlib/service/libcpp-monlib-service.a |17.1%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/monlib/service/pages/resources/libservice-pages-resources.global.a |17.2%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/monlib/service/pages/libmonlib-service-pages.a |17.2%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/monlib/metrics/libcpp-monlib-metrics.a |17.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/gateway/behaviour/tablestore/libgateway-behaviour-tablestore.global.a |17.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/common/libfq-libs-common.a |17.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/checkpointing/libfq-libs-checkpointing.a |17.9%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yt/library/quantile_digest/libyt-library-quantile_digest.a |17.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/cloud_audit/libfq-libs-cloud_audit.a |18.0%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yt/library/profiling/resource_tracker/liblibrary-profiling-resource_tracker.global.a |18.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/control_plane_storage/proto/liblibs-control_plane_storage-proto.a |18.1%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yt/library/procfs/libyt-library-procfs.a |18.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/opt/peephole/kqp_opt_peephole_wide_read.cpp |18.2%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yt/library/profiling/resource_tracker/liblibrary-profiling-resource_tracker.a |18.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/compute/ydb/synchronization_service/libcompute-ydb-synchronization_service.a |18.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/gateway/libcore-kqp-gateway.a |18.2%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yt/client/query_tracker_client/libyt-client-query_tracker_client.a |18.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/compute/ydb/control_plane/libcompute-ydb-control_plane.a |18.4%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/ytflow/integration/interface/libytflow-integration-interface.a |18.5%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yt/library/profiling/libyt-library-profiling.a |18.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/checkpoint_storage/libfq-libs-checkpoint_storage.a |19.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/compute/ydb/liblibs-compute-ydb.a |19.2%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/proto/libproviders-yt-proto.a |19.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/actors/libfq-libs-actors.a |19.8%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/mkql_dq/libproviders-yt-mkql_dq.a |20.3%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/opt/libproviders-yt-opt.a |20.4%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/lib/graph_reorder/libyt-lib-graph_reorder.a |20.8%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/providers/common/metrics/protos/libcommon-metrics-protos.a |21.4%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/providers/common/metrics/libproviders-common-metrics.a |21.6%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/parser/lexer_common/libessentials-parser-lexer_common.a |21.6%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/parser/pg_catalog/libessentials-parser-pg_catalog.global.a |21.8%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/parser/common/libessentials-parser-common.a |22.0%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/common/libproviders-yt-common.a |22.3%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/providers/common/schema/libproviders-common-schema.a |22.3%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/providers/common/mkql/libproviders-common-mkql.a |22.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/control_plane_proxy/libfq-libs-control_plane_proxy.a |22.9%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/parser/proto_ast/gen/jsonpath/libproto_ast-gen-jsonpath.a |22.9%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/datetime/libessentials-minikql-datetime.a |23.1%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/dom/libessentials-minikql-dom.a |23.3%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/providers/common/schema/expr/libcommon-schema-expr.a |23.1%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/url_lister/interface/libcore-url_lister-interface.a |23.1%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/providers/common/proto/libproviders-common-proto.a |23.5%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/ytflow/expr_nodes/libproviders-ytflow-expr_nodes.a |23.6%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/provider/libproviders-yt-provider.global.a |23.8%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/codegen/llvm16/libminikql-codegen-llvm16.a |23.9%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/services/mounts/libcore-services-mounts.a |23.9%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/providers/common/schema/parser/libcommon-schema-parser.a |24.1%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/providers/common/schema/mkql/libcommon-schema-mkql.a |24.2%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/providers/common/provider/libproviders-common-provider.a |24.3%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/pg_settings/libessentials-core-pg_settings.a |24.5%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/qplayer/storage/interface/libqplayer-storage-interface.a |24.4%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/services/mounts/libcore-services-mounts.global.a |24.5%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/qplayer/udf_resolver/libcore-qplayer-udf_resolver.a |24.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/highwayhash/arch/sse41/libhighwayhash-arch-sse41.a |24.9%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/providers/common/structured_token/libproviders-common-structured_token.a |25.1%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/sql_types/libessentials-core-sql_types.a |25.2%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/highwayhash/arch/avx2/libhighwayhash-arch-avx2.a |25.3%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/providers/common/schema/skiff/libcommon-schema-skiff.a |25.4%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/services/libessentials-core-services.a |25.4%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/histogram/libessentials-core-histogram.a |23.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/highwayhash/libcontrib-libs-highwayhash.a |23.6%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/issue/libessentials-core-issue.a |23.8%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/file_storage/proto/libcore-file_storage-proto.a |23.9%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/issue/libessentials-core-issue.global.a |24.1%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/issue/protos/libcore-issue-protos.a |24.3%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/langver/libessentials-core-langver.a |24.3%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/file_storage/defs/libcore-file_storage-defs.a |24.6%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/file_storage/http_download/libcore-file_storage-http_download.a |24.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/opt/logical/libkqp-opt-logical.a |24.8%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/dq_integration/libessentials-core-dq_integration.a |24.9%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/file_storage/http_download/proto/libfile_storage-http_download-proto.a |24.9%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/credentials/libessentials-core-credentials.a |24.9%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/yt/yt/client/libyt-yt-client.a |24.8%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/file_storage/libessentials-core-file_storage.a |24.9%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/dq_integration/transform/libcore-dq_integration-transform.a |25.1%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/ast/serialize/libessentials-ast-serialize.a |25.1%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/cbo/libessentials-core-cbo.a |25.3%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/facade/libessentials-core-facade.a |25.5%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/providers/pg/expr_nodes/libproviders-pg-expr_nodes.a |26.0%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/providers/common/udf_resolve/libproviders-common-udf_resolve.a |25.9%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/providers/common/transform/libproviders-common-transform.a |26.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/tablet/libydb-services-tablet.a |26.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/metadata/secret/libservices-metadata-secret.global.a |26.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/metadata/libydb-services-metadata.a |26.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/topic/libsrc-client-topic.a |27.0%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/providers/config/libessentials-providers-config.a |27.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/table/query_stats/libclient-table-query_stats.a |27.4%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yt/build/libyt-yt-build.a |27.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/rate_limiter/libydb-services-rate_limiter.a |27.5%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/ast/libyql-essentials-ast.a |27.6%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/peephole_opt/libessentials-core-peephole_opt.a |27.6%| PREPARE $(YMAKE_PYTHON3-4256832079) |27.8%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yt/client/arrow/libyt-client-arrow.a |27.7%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/ytflow/integration/proto/libytflow-integration-proto.a |27.7%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/providers/result/expr_nodes/libproviders-result-expr_nodes.a |28.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/iam_private/libsrc-client-iam_private.a |28.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/iam/libsrc-client-iam.a |28.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/lib/yson_value/libpublic-lib-yson_value.a |28.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/include/ydb-cpp-sdk/client/topic/libydb-cpp-sdk-client-topic.a |28.6%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yt/client/arrow/fbs/libclient-arrow-fbs.a |28.7%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/providers/pg/provider/libproviders-pg-provider.a |28.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/lib/scheme_types/libpublic-lib-scheme_types.a |28.5%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/public/decimal/libessentials-public-decimal.a |28.9%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/public/issue/libessentials-public-issue.a |28.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/ymq/libydb-services-ymq.a |29.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/lib/value/libpublic-lib-value.a |29.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/out/libapi-protos-out.a |29.4%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/public/langver/libessentials-public-langver.a |29.4%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/providers/result/provider/libproviders-result-provider.a |29.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/lib/base/libpublic-lib-base.a |29.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/lib/deprecated/client/liblib-deprecated-client.a |29.5%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/public/issue/protos/libpublic-issue-protos.a |29.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/ydb/proto/libproviders-ydb-proto.a |29.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/solomon/gateway/libproviders-solomon-gateway.a |29.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/solomon/expr_nodes/libproviders-solomon-expr_nodes.a |30.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/lib/deprecated/kicli/liblib-deprecated-kicli.a |30.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/solomon/events/libproviders-solomon-events.a |30.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/client/yc_public/events/libclient-yc_public-events.a |30.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/s3/statistics/libproviders-s3-statistics.a |30.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/solomon/common/libproviders-solomon-common.a |30.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/s3/serializations/libproviders-s3-serializations.a |30.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/pq/common/libproviders-pq-common.a |30.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/generic/pushdown/libproviders-generic-pushdown.a |30.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/pq/cm_client/libproviders-pq-cm_client.a |30.7%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/type_ann/libessentials-core-type_ann.a |30.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/s3/compressors/libproviders-s3-compressors.a |31.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/generic/proto/libproviders-generic-proto.a |31.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/generic/expr_nodes/libproviders-generic-expr_nodes.a |31.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/generic/connector/libcpp/libgeneric-connector-libcpp.a |31.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/generic/connector/api/service/protos/libapi-service-protos.a |31.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/dq/worker_manager/libproviders-dq-worker_manager.a |31.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/client/yc_private/iam/libclient-yc_private-iam.a |31.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/generic/connector/api/service/libconnector-api-service.a |32.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/dq/interface/libproviders-dq-interface.a |32.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/generic/actors/libproviders-generic-actors.a |31.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/dq/mkql/libproviders-dq-mkql.a |31.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/pq/async_io/libproviders-pq-async_io.a |32.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/dq/task_runner_actor/libproviders-dq-task_runner_actor.a |32.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/solomon/provider/libproviders-solomon-provider.a |32.7%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/public/types/libessentials-public-types.a |32.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/dq/provider/exec/libdq-provider-exec.a |32.9%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/public/udf/libessentials-public-udf.a |32.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/dq/worker_manager/interface/libdq-worker_manager-interface.a |33.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/dq/planner/libproviders-dq-planner.a |33.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/dq/task_runner/libproviders-dq-task_runner.a |33.3%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/public/udf/arrow/libpublic-udf-arrow.a |33.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/pq/expr_nodes/libproviders-pq-expr_nodes.a |33.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/dq/runtime/libproviders-dq-runtime.a |33.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/s3/actors/libproviders-s3-actors.a |33.6%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/public/result_format/libessentials-public-result_format.a |33.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/pq/gateway/native/libpq-gateway-native.a |33.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/generic/provider/libproviders-generic-provider.a |33.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/s3/actors_factory/libproviders-s3-actors_factory.a |33.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/pq/task_meta/libproviders-pq-task_meta.a |34.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/pq/proto/libproviders-pq-proto.a |34.0%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/sql/libyql-essentials-sql.a |34.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/dq/provider/libproviders-dq-provider.a |33.7%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/libyql-essentials-core.a |33.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/dq/opt/libproviders-dq-opt.a |33.9%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/public/udf/support/libpublic-udf-support.a |34.2%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/public/udf/service/exception_policy/libudf-service-exception_policy.global.a |34.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/s3/credentials/libproviders-s3-credentials.a |34.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/s3/common/libproviders-s3-common.a |34.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/s3/events/libproviders-s3-events.a |34.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/s3/expr_nodes/libproviders-s3-expr_nodes.a |34.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/s3/object_listers/libproviders-s3-object_listers.a |34.5%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yt/library/erasure/libyt-library-erasure.a |34.6%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yt/library/auth/libyt-library-auth.a |34.8%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/sql/pg_dummy/libessentials-sql-pg_dummy.a |35.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/s3/range_helpers/libproviders-s3-range_helpers.a |35.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/s3/path_generator/libproviders-s3-path_generator.a |35.1%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yt/client/formats/libyt-client-formats.a |35.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/s3/proto/libproviders-s3-proto.a |35.3%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/sql/v0/lexer/libsql-v0-lexer.a |35.3%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/sql/settings/libessentials-sql-settings.a |35.5%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yt/core/https/libyt-core-https.a |35.8%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/sql/v1/lexer/antlr3_ansi/libv1-lexer-antlr3_ansi.a |35.9%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/common_opt/libessentials-core-common_opt.a |35.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/solomon/solomon_accessor/client/libsolomon-solomon_accessor-client.a |35.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/solomon/actors/libproviders-solomon-actors.a |36.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/ydb/expr_nodes/libproviders-ydb-expr_nodes.a |36.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/solomon/proto/libproviders-solomon-proto.a |35.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/solomon/solomon_accessor/grpc/libsolomon-solomon_accessor-grpc.a |36.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/common/arrow/interface/libcommon-arrow-interface.a |36.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/public/ydb_issue/libyql-public-ydb_issue.a |36.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/common/arrow/libproviders-common-arrow.a |36.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/clickhouse/proto/libproviders-clickhouse-proto.a |36.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/pq/provider/libproviders-pq-provider.a |36.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/dq/transform/libyql-dq-transform.a |36.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/dq/actors/task_runner/libdq-actors-task_runner.a |37.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/workload/kv/liblibrary-workload-kv.global.a |37.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/schlab/mon/liblibrary-schlab-mon.a |37.2%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yt/core/http/libyt-core-http.a |37.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/workload/stock/liblibrary-workload-stock.a |37.2%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/sql/v1/format/libsql-v1-format.global.a |37.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/signals/libydb-library-signals.a |37.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/table_creator/libydb-library-table_creator.a |37.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/utils/actor_log/libyql-utils-actor_log.a |37.7%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yt/core/libyt-yt-core.global.a |37.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/chunks_limiter/libydb-library-chunks_limiter.a |37.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/utils/actors/libyql-utils-actors.a |38.0%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/sql/v1/format/libsql-v1-format.a |38.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/clickhouse/provider/libproviders-clickhouse-provider.a |38.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/actors/util/liblibrary-actors-util.a |37.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/actors/testlib/common/libactors-testlib-common.a |37.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/ydb/provider/libproviders-ydb-provider.a |37.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/db_pool/libydb-library-db_pool.a |37.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/client/nc_private/accessservice/libclient-nc_private-accessservice.a |38.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/accessor/libydb-library-accessor.a |38.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/actors/protos/liblibrary-actors-protos.a |38.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/utils/plan/libyql-utils-plan.a |38.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/actors/testlib/liblibrary-actors-testlib.a |38.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/ymq/queues/fifo/libymq-queues-fifo.a |38.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/ymq/queues/common/libymq-queues-common.a |38.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/actors/core/harmonizer/libactors-core-harmonizer.a |38.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/query_actor/libydb-library-query_actor.a |38.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/ymq/proto/libcore-ymq-proto.a |38.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/arrow_kernels/libydb-library-arrow_kernels.a |38.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/s3/provider/libproviders-s3-provider.a |38.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/ymq/queues/std/libymq-queues-std.a |38.9%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yt/core/misc/isa_crc64/libisa-l_crc_yt_patch.a |38.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/ymq/http/libcore-ymq-http.a |38.7%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/sql/v1/lexer/libsql-v1-lexer.a |38.9%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/sql/v1/lexer/antlr4/libv1-lexer-antlr4.a |39.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/client/yc_private/operation/libclient-yc_private-operation.a |39.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/ymq/base/libcore-ymq-base.a |39.1%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/sql/v1/lexer/antlr3/libv1-lexer-antlr3.a |39.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/sharding/libcore-tx-sharding.a |39.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/schemeshard/olap/ttl/libschemeshard-olap-ttl.a |39.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/schemeshard/olap/store/libschemeshard-olap-store.a |39.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/client/yc_private/accessservice/libclient-yc_private-accessservice.a |39.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/schemeshard/olap/manager/libschemeshard-olap-manager.a |39.5%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/sql/v0/libessentials-sql-v0.a |39.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/schemeshard/olap/bg_tasks/events/libolap-bg_tasks-events.a |39.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/schemeshard/olap/operations/alter/standalone/liboperations-alter-standalone.a |39.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/schemeshard/olap/columns/libschemeshard-olap-columns.a |39.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/ydb/libydb-services-ydb.a |39.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/actors/core/liblibrary-actors-core.a |39.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/wrappers/libydb-core-wrappers.a |40.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/priorities/usage/libtx-priorities-usage.a |39.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/schemeshard/olap/bg_tasks/tx_chain/libolap-bg_tasks-tx_chain.a |40.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/client/yc_private/servicecontrol/libclient-yc_private-servicecontrol.a |40.0%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/sql/v1/lexer/antlr4_ansi/libv1-lexer-antlr4_ansi.a |40.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/program/libcore-tx-program.a |40.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/viewer/protos/libcore-viewer-protos.a |40.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/client/yc_public/common/libclient-yc_public-common.a |40.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/client/yc_public/iam/libclient-yc_public-iam.a |40.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/conveyor/usage/libtx-conveyor-usage.a |40.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/transactions/protos/libcolumnshard-transactions-protos.a |40.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/sequenceshard/libcore-tx-sequenceshard.a |40.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/client/yc_private/resourcemanager/libclient-yc_private-resourcemanager.a |40.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/actors/interconnect/liblibrary-actors-interconnect.a |40.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/conveyor/service/libtx-conveyor-service.a |40.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/transactions/locks/libcolumnshard-transactions-locks.a |41.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/storage/optimizer/lcbuckets/planner/selector/liblcbuckets-planner-selector.a |41.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/writer/libcolumnshard-engines-writer.a |41.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/export/protos/libcolumnshard-export-protos.a |41.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/storage/optimizer/lbuckets/planner/liboptimizer-lbuckets-planner.global.a |41.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/storage/optimizer/lcbuckets/constructor/level/liblcbuckets-constructor-level.global.a |41.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/storage/optimizer/lbuckets/planner/liboptimizer-lbuckets-planner.a |41.4%| PREPARE $(LLD_ROOT-3808007503) |41.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/storage/indexes/skip_index/libstorage-indexes-skip_index.a |41.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/storage/indexes/max/libstorage-indexes-max.global.a |41.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/storage/actualizer/abstract/libstorage-actualizer-abstract.a |41.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/storage/actualizer/common/libstorage-actualizer-common.a |41.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/storage/chunks/libengines-storage-chunks.a |41.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/storage/indexes/count_min_sketch/libstorage-indexes-count_min_sketch.global.a |41.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/storage/optimizer/lbuckets/constructor/liboptimizer-lbuckets-constructor.global.a |42.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/scheme/tiering/libengines-scheme-tiering.a |42.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/storage/actualizer/scheme/libstorage-actualizer-scheme.a |41.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/storage/actualizer/tiering/libstorage-actualizer-tiering.a |41.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/scheme/column/libengines-scheme-column.a |41.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/scheme/defaults/common/libscheme-defaults-common.a |42.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/scheme/abstract/libengines-scheme-abstract.a |42.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/reader/simple_reader/constructor/libreader-simple_reader-constructor.global.a |42.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/reader/simple_reader/constructor/libreader-simple_reader-constructor.a |42.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/reader/plain_reader/constructor/libreader-plain_reader-constructor.a |42.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/scheme/versions/libengines-scheme-versions.a |42.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/scheme/libcolumnshard-engines-scheme.a |42.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/reader/common_reader/iterator/libreader-common_reader-iterator.global.a |42.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/reader/plain_reader/constructor/libreader-plain_reader-constructor.global.a |42.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/scheme/indexes/abstract/libscheme-indexes-abstract.a |42.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/reader/common_reader/constructor/libreader-common_reader-constructor.a |43.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/reader/abstract/libengines-reader-abstract.a |43.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/protos/libcolumnshard-engines-protos.a |43.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/changes/compaction/plain/libchanges-compaction-plain.global.a |43.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/predicate/libcolumnshard-engines-predicate.a |43.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/reader/common_reader/iterator/libreader-common_reader-iterator.a |43.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/grpc/draft/libapi-grpc-draft.a |43.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/changes/actualization/construction/libchanges-actualization-construction.a |43.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/reader/plain_reader/iterator/libreader-plain_reader-iterator.a |43.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/data_sharing/common/context/libdata_sharing-common-context.a |43.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/data_sharing/destination/session/libdata_sharing-destination-session.a |43.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/grpc/libapi-grpc.a |43.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/data_sharing/destination/events/libdata_sharing-destination-events.a |43.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/changes/abstract/libengines-changes-abstract.a |43.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/data_sharing/destination/transactions/libdata_sharing-destination-transactions.a |43.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/data_sharing/common/session/libdata_sharing-common-session.a |43.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/data_locks/manager/libcolumnshard-data_locks-manager.a |43.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/data_sharing/modification/transactions/libdata_sharing-modification-transactions.a |43.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/data_sharing/modification/tasks/libdata_sharing-modification-tasks.a |43.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/data_sharing/source/transactions/libdata_sharing-source-transactions.a |44.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/data_accessor/local_db/libcolumnshard-data_accessor-local_db.global.a |44.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/data_accessor/in_mem/libcolumnshard-data_accessor-in_mem.global.a |44.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/data_locks/locks/libcolumnshard-data_locks-locks.a |44.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/blobs_action/common/libcolumnshard-blobs_action-common.a |44.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/data_accessor/in_mem/libcolumnshard-data_accessor-in_mem.a |44.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/data_accessor/libtx-columnshard-data_accessor.a |44.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/bg_tasks/protos/libcolumnshard-bg_tasks-protos.a |44.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/changes/libcolumnshard-engines-changes.a |44.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/portions/libcolumnshard-engines-portions.a |44.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/bg_tasks/events/libcolumnshard-bg_tasks-events.a |44.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/blobs_action/abstract/libcolumnshard-blobs_action-abstract.a |44.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/sys_view/sessions/libcore-sys_view-sessions.a |44.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/bg_tasks/transactions/libcolumnshard-bg_tasks-transactions.a |44.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/bg_tasks/manager/libcolumnshard-bg_tasks-manager.a |44.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/sys_view/resource_pools/libcore-sys_view-resource_pools.a |44.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/sys_view/resource_pool_classifiers/libcore-sys_view-resource_pool_classifiers.a |44.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/statistics/database/libcore-statistics-database.a |44.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/blobs_action/bs/libcolumnshard-blobs_action-bs.a |44.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/persqueue/codecs/libcore-persqueue-codecs.a |44.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/resource_pools/libydb-core-resource_pools.a |44.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/sys_view/service/libcore-sys_view-service.a |44.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/security/certificate_check/libcore-security-certificate_check.a |44.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/scheme/libydb-core-scheme.a |44.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/dq/opt/libyql-dq-opt.a |44.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/metering/libydb-core-metering.a |45.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/log_backend/libydb-core-log_backend.a |45.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/mon_alloc/libydb-core-mon_alloc.a |45.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/sys_view/common/libcore-sys_view-common.a |45.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/mind/address_classification/libcore-mind-address_classification.a |45.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/workload_service/common/libkqp-workload_service-common.a |45.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/mon/libydb-core-mon.a |45.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/workload_service/tables/libkqp-workload_service-tables.a |45.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/out/libcore-protos-out.a |45.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/memory_controller/libydb-core-memory_controller.a |45.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/workload_service/libcore-kqp-workload_service.a |45.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/local_pgwire/libydb-core-local_pgwire.a |45.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/workload_service/actors/libkqp-workload_service-actors.a |45.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/pgproxy/libydb-core-pgproxy.a |45.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/persqueue/partition_key_range/libcore-persqueue-partition_key_range.a |45.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/persqueue/events/libcore-persqueue-events.a |45.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/persqueue/config/libcore-persqueue-config.a |46.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/gateway/behaviour/resource_pool_classifier/libgateway-behaviour-resource_pool_classifier.global.a |46.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/gateway/behaviour/external_data_source/libgateway-behaviour-external_data_source.global.a |46.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/persqueue/purecalc/libcore-persqueue-purecalc.a |46.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/statistics/aggregator/libcore-statistics-aggregator.a |46.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/io_formats/arrow/scheme/libio_formats-arrow-scheme.a |46.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/graph/protos/libcore-graph-protos.a |46.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/federated_query/libcore-kqp-federated_query.a |46.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/public_http/libydb-core-public_http.global.a |46.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/pgproxy/protos/libcore-pgproxy-protos.a |46.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/quoter/public/libcore-quoter-public.a |46.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/schemeshard/libcore-protos-schemeshard.a |46.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/row_dispatcher/format_handler/filters/librow_dispatcher-format_handler-filters.a |46.8%| PREPARE $(PYTHON) |46.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/raw_socket/libydb-core-raw_socket.a |46.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/graph/shard/libcore-graph-shard.a |46.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/public_http/libydb-core-public_http.a |46.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/public_http/protos/libcore-public_http-protos.a |46.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/logs/libfq-libs-logs.a |46.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/metrics/libfq-libs-metrics.a |47.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/scheme_types/libydb-core-scheme_types.a |46.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/quota_manager/libfq-libs-quota_manager.a |46.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/quoter/libydb-core-quoter.a |47.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/security/ldap_auth_provider/libcore-security-ldap_auth_provider.a |47.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/scheme/protos/libcore-scheme-protos.a |47.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/persqueue/writer/libcore-persqueue-writer.a |47.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/load_test/libydb-core-load_test.a |47.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/security/libydb-core-security.a |47.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/formats/arrow/splitter/libformats-arrow-splitter.a |47.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/sys_view/libydb-core-sys_view.a |47.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/private_client/libfq-libs-private_client.a |47.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/external_sources/object_storage/libcore-external_sources-object_storage.a |47.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/formats/arrow/accessor/dictionary/libarrow-accessor-dictionary.global.a |47.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/formats/arrow/accessor/abstract/libarrow-accessor-abstract.a |47.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/sys_view/nodes/libcore-sys_view-nodes.a |47.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/statistics/service/libcore-statistics-service.a |47.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/sys_view/pg_tables/libcore-sys_view-pg_tables.a |47.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/config/validation/libcore-config-validation.a |47.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/cms/console/validators/libcms-console-validators.a |48.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/erasure/libydb-core-erasure.a |48.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/mind/libydb-core-mind.a |48.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/sys_view/partition_stats/libcore-sys_view-partition_stats.a |48.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/bg_tasks/session/libcolumnshard-bg_tasks-session.a |48.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/sys_view/auth/libcore-sys_view-auth.a |48.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/sys_view/query_stats/libcore-sys_view-query_stats.a |48.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/config/init/libcore-config-init.a |48.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tablet_flat/protos/libcore-tablet_flat-protos.a |48.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/client/minikql_compile/libcore-client-minikql_compile.a |48.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/sys_view/processor/libcore-sys_view-processor.a |48.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/sys_view/tablets/libcore-sys_view-tablets.a |48.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/vdisk/synclog/libblobstorage-vdisk-synclog.a |48.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/sys_view/show_create/libcore-sys_view-show_create.a |49.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/testlib/actors/libcore-testlib-actors.a |49.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/testlib/basics/libcore-testlib-basics.a |49.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/vdisk/huge/libblobstorage-vdisk-huge.a |49.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/vdisk/defrag/libblobstorage-vdisk-defrag.a |49.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/pdisk/mock/libblobstorage-pdisk-mock.a |49.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/sys_view/storage/libcore-sys_view-storage.a |49.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/balance_coverage/libcore-tx-balance_coverage.a |49.4%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/type_info/tz/libcpp-type_info-tz.a |49.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/transfer/libydb-core-transfer.a |49.7%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/string_utils/ztstrbuf/libcpp-string_utils-ztstrbuf.a |49.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/bg_tasks/abstract/libcolumnshard-bg_tasks-abstract.a |49.8%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/string_utils/url/libcpp-string_utils-url.a |49.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tracing/libydb-core-tracing.a |49.6%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/unicode/normalization/libcpp-unicode-normalization.a |49.7%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/terminate_handler/liblibrary-cpp-terminate_handler.a |49.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/blobs_action/counters/libcolumnshard-blobs_action-counters.a |49.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/blobs_action/libtx-columnshard-blobs_action.a |49.9%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/string_utils/quote/libcpp-string_utils-quote.a |50.0%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/string_utils/scan/libcpp-string_utils-scan.a |50.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/blobs_action/events/libcolumnshard-blobs_action-events.a |50.0%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/streams/bzip2/libcpp-streams-bzip2.a |50.1%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/streams/brotli/libcpp-streams-brotli.a |49.9%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/sse/liblibrary-cpp-sse.a |50.1%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/streams/lzma/libcpp-streams-lzma.a |50.2%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/sliding_window/liblibrary-cpp-sliding_window.a |50.3%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/skiff/liblibrary-cpp-skiff.a |50.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/counters/libtx-columnshard-counters.a |50.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/blobs_action/tier/libcolumnshard-blobs_action-tier.a |50.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/blobs_action/protos/libcolumnshard-blobs_action-protos.a |50.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/blobs_action/storages_manager/libcolumnshard-blobs_action-storages_manager.a |50.3%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/openssl/method/libcpp-openssl-method.a |50.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/mind/hive/libcore-mind-hive.a |50.4%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/packers/liblibrary-cpp-packers.a |50.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/blobs_action/local/libcolumnshard-blobs_action-local.a |50.5%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/messagebus/www/libcpp-messagebus-www.global.a |50.6%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/monlib/encode/buffered/libmonlib-encode-buffered.a |50.6%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/messagebus/oldmodule/libcpp-messagebus-oldmodule.a |50.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/testlib/libydb-core-testlib.a |50.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/mind/bscontroller/libcore-mind-bscontroller.a |50.6%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/lwtrace/protos/libcpp-lwtrace-protos.a |50.6%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/lwtrace/mon/libcpp-lwtrace-mon.global.a |50.7%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/enumbitset/liblibrary-cpp-enumbitset.a |50.7%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/getopt/liblibrary-cpp-getopt.global.a |50.8%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/dot_product/liblibrary-cpp-dot_product.a |50.8%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/execprofile/liblibrary-cpp-execprofile.a |50.9%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/digest/old_crc/libcpp-digest-old_crc.a |50.9%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/dns/liblibrary-cpp-dns.a |50.8%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/deprecated/kmp/libcpp-deprecated-kmp.a |50.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/test_tablet/libydb-core-test_tablet.a |50.9%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/containers/str_map/libcpp-containers-str_map.a |51.1%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/lwtrace/liblibrary-cpp-lwtrace.a |51.3%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/coroutine/engine/libcpp-coroutine-engine.a |51.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/persqueue/libydb-core-persqueue.a |51.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tablet/libydb-core-tablet.a |51.3%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/containers/absl_flat_hash/libcpp-containers-absl_flat_hash.a |51.4%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/containers/atomizer/libcpp-containers-atomizer.a |51.7%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/libminikql-comp_nodes-llvm16.a |51.7%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/containers/2d_array/libcpp-containers-2d_array.a |51.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/cms/libydb-core-cms.a |51.8%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/config/liblibrary-cpp-config.a |51.7%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/blockcodecs/codecs/zlib/libblockcodecs-codecs-zlib.global.a |51.7%| PREPARE $(CLANG_FORMAT-1286082657) |51.7%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/bit_io/liblibrary-cpp-bit_io.a |51.8%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/blockcodecs/codecs/lz4/libblockcodecs-codecs-lz4.global.a |51.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/aws/aws-c-sdkutils/librestricted-aws-aws-c-sdkutils.a |51.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/abseil-cpp/absl/profiling/libabseil-cpp-absl-profiling.a |51.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/common/protos/libcolumnshard-common-protos.a |51.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/aws/aws-crt-cpp/librestricted-aws-aws-crt-cpp.a |51.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/blobs_reader/libtx-columnshard-blobs_reader.a |51.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/common/libtx-columnshard-common.a |51.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/abseil-cpp-tstring/y_absl/strings/libabseil-cpp-tstring-y_absl-strings.a |51.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/abseil-cpp-tstring/y_absl/container/libabseil-cpp-tstring-y_absl-container.a |51.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/abseil-cpp-tstring/y_absl/debugging/libabseil-cpp-tstring-y_absl-debugging.a |51.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/yaml/libcontrib-libs-yaml.a |51.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/xxhash/libcontrib-libs-xxhash.a |51.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/tcmalloc/no_percpu_cache/liblibs-tcmalloc-no_percpu_cache.a |51.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/protobuf/libcontrib-libs-protobuf.global.a |51.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/blobs_action/transaction/libcolumnshard-blobs_action-transaction.a |51.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/abseil-cpp-tstring/y_absl/flags/libabseil-cpp-tstring-y_absl-flags.a |51.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/zstd06/libcontrib-libs-zstd06.a |51.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/cms/console/libcore-cms-console.a |51.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/pcre/pcre32/liblibs-pcre-pcre32.a |51.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/re2/libcontrib-libs-re2.a |51.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/nghttp3/libcontrib-libs-nghttp3.a |51.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/lz4/libcontrib-libs-lz4.a |51.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/poco/XML/liblibs-poco-XML.a |51.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/nghttp2/libcontrib-libs-nghttp2.a |51.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/lzma/libcontrib-libs-lzma.a |51.9%| [CP] {default-linux-x86_64, relwithdebinfo} $(B)/common_test.context |52.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/lzmasdk/libcontrib-libs-lzmasdk.a |52.0%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/digest/sfh/libcpp-digest-sfh.a |52.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/Transforms/CFGuard/liblib-Transforms-CFGuard.a |52.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/lua/libcontrib-libs-lua.a |52.0%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/provider/libproviders-yt-provider.a |52.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/opentelemetry-proto/libcontrib-libs-opentelemetry-proto.a |52.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/Target/X86/AsmParser/libTarget-X86-AsmParser.a |52.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/Remarks/libllvm16-lib-Remarks.a |52.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tablet_flat/libydb-core-tablet_flat.a |52.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/MC/MCDisassembler/liblib-MC-MCDisassembler.a |52.1%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/ExecutionEngine/libllvm16-lib-ExecutionEngine.a |51.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/data_accessor/abstract/libcolumnshard-data_accessor-abstract.a |51.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/ExecutionEngine/Orc/TargetProcess/libExecutionEngine-Orc-TargetProcess.a |52.0%| [CF] {default-linux-x86_64, relwithdebinfo} $(B)/library/cpp/build_info/build_info.cpp |52.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/Target/X86/MCTargetDesc/libTarget-X86-MCTargetDesc.a |52.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/DebugInfo/Symbolize/liblib-DebugInfo-Symbolize.a |52.0%| [CF] {default-linux-x86_64, relwithdebinfo} $(B)/library/cpp/build_info/sandbox.cpp |52.1%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/MC/MCParser/liblib-MC-MCParser.a |52.1%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/ProfileData/libllvm16-lib-ProfileData.a |52.1%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/linuxvdso/libcontrib-libs-linuxvdso.a |52.1%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/BinaryFormat/libllvm16-lib-BinaryFormat.a |52.1%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/grpc/third_party/address_sorting/libgrpc-third_party-address_sorting.a |52.1%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/antlr3_cpp_runtime/libcontrib-libs-antlr3_cpp_runtime.a |52.1%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/cxxsupp/libcxxabi-parts/liblibs-cxxsupp-libcxxabi-parts.a |52.2%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/crcutil/libcontrib-libs-crcutil.a |52.2%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/grpc/third_party/upb/libgrpc-third_party-upb.a |52.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/DebugInfo/CodeView/liblib-DebugInfo-CodeView.a |52.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/DebugInfo/DWARF/liblib-DebugInfo-DWARF.a |52.1%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/base64/neon32/liblibs-base64-neon32.a |52.1%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/cctz/tzdata/liblibs-cctz-tzdata.global.a |52.1%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/libfyaml/libcontrib-libs-libfyaml.a |52.1%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/backtrace/libcontrib-libs-backtrace.a |52.2%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/base64/neon64/liblibs-base64-neon64.a |52.2%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/base64/avx2/liblibs-base64-avx2.a |52.2%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/MC/libllvm16-lib-MC.a |52.2%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/libiconv/static/liblibs-libiconv-static.a |52.2%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/brotli/common/liblibs-brotli-common.a |52.3%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/brotli/dec/liblibs-brotli-dec.a |52.3%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/AsmParser/libllvm16-lib-AsmParser.a |52.3%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/apache/orc-format/liblibs-apache-orc-format.a |52.3%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/brotli/enc/liblibs-brotli-enc.a |52.3%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/Passes/libllvm16-lib-Passes.a |52.2%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/c-ares/libcontrib-libs-c-ares.a |52.2%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/CodeGen/AsmPrinter/liblib-CodeGen-AsmPrinter.a |52.2%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/Object/libllvm16-lib-Object.a |52.3%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/libxml/libcontrib-libs-libxml.a |52.3%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/base64/ssse3/liblibs-base64-ssse3.a |52.3%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/base64/plain32/liblibs-base64-plain32.a |52.3%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/cxxsupp/builtins/liblibs-cxxsupp-builtins.a |52.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/data_accessor/local_db/libcolumnshard-data_accessor-local_db.a |52.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/cxxsupp/libcxxrt/liblibs-cxxsupp-libcxxrt.a |52.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/farmhash/arch/sse42/libfarmhash-arch-sse42.a |52.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/curl/libcontrib-libs-curl.a |52.3%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/farmhash/libcontrib-libs-farmhash.a |52.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/farmhash/arch/sse42_aesni/libfarmhash-arch-sse42_aesni.a |52.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/cctz/libcontrib-libs-cctz.a |52.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/fastlz/libcontrib-libs-fastlz.a |52.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/apache/avro/liblibs-apache-avro.a |52.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/hdr_histogram/libcontrib-libs-hdr_histogram.a |52.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/fmt/libcontrib-libs-fmt.a |52.5%| PREPARE $(FLAKE8_LINTER-sbr:6561765464) |52.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/flatbuffers/libcontrib-libs-flatbuffers.a |52.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/protobuf/libcontrib-libs-protobuf.a |52.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/libbz2/libcontrib-libs-libbz2.a |52.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/cxxsupp/libcxx/liblibs-cxxsupp-libcxx.a |52.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/libaio/static/liblibs-libaio-static.a |52.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/libc_compat/libcontrib-libs-libc_compat.a |52.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/libevent/event_openssl/liblibs-libevent-event_openssl.a |52.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/libevent/event_thread/liblibs-libevent-event_thread.a |52.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/libevent/event_core/liblibs-libevent-event_core.a |52.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/aws-sdk-cpp/aws-cpp-sdk-core/liblibs-aws-sdk-cpp-aws-cpp-sdk-core.a |52.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/libidn/static/liblibs-libidn-static.a |52.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/libunwind/libcontrib-libs-libunwind.a |52.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/libssh2/libcontrib-libs-libssh2.a |52.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/liburing/libcontrib-libs-liburing.a |52.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/googleapis-common-protos/libcontrib-libs-googleapis-common-protos.a |52.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/hyperscan/runtime_core2/liblibs-hyperscan-runtime_core2.a |52.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/Bitstream/Reader/liblib-Bitstream-Reader.a |52.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/hyperscan/runtime_avx2/liblibs-hyperscan-runtime_avx2.a |52.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/DebugInfo/MSF/liblib-DebugInfo-MSF.a |52.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/Bitcode/Reader/liblib-Bitcode-Reader.a |52.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/hyperscan/runtime_corei7/liblibs-hyperscan-runtime_corei7.a |52.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/Bitcode/Writer/liblib-Bitcode-Writer.a |52.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/ExecutionEngine/MCJIT/liblib-ExecutionEngine-MCJIT.a |52.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/IRPrinter/libllvm16-lib-IRPrinter.a |52.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/IRReader/libllvm16-lib-IRReader.a |52.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/Demangle/libllvm16-lib-Demangle.a |52.6%| PREPARE $(FLAKE8_PY3-715603131) |52.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/Frontend/OpenMP/liblib-Frontend-OpenMP.a |52.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/ExecutionEngine/RuntimeDyld/liblib-ExecutionEngine-RuntimeDyld.a |52.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/Linker/libllvm16-lib-Linker.a |52.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/Target/libllvm16-lib-Target.a |52.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/Target/X86/TargetInfo/libTarget-X86-TargetInfo.a |52.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/Target/X86/Disassembler/libTarget-X86-Disassembler.a |52.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/aws-sdk-cpp/aws-cpp-sdk-s3/liblibs-aws-sdk-cpp-aws-cpp-sdk-s3.a |52.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/TargetParser/libllvm16-lib-TargetParser.a |52.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/nayuki_md5/libcontrib-libs-nayuki_md5.a |52.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/Transforms/AggressiveInstCombine/liblib-Transforms-AggressiveInstCombine.a |52.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/TextAPI/libllvm16-lib-TextAPI.a |52.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/data_sharing/initiator/status/libdata_sharing-initiator-status.a |52.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/CodeGen/GlobalISel/liblib-CodeGen-GlobalISel.a |52.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/Transforms/ObjCARC/liblib-Transforms-ObjCARC.a |52.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/openldap/libcontrib-libs-openldap.a |52.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/data_sharing/initiator/controller/libdata_sharing-initiator-controller.global.a |52.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/data_sharing/initiator/controller/libdata_sharing-initiator-controller.a |52.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/data_reader/libtx-columnshard-data_reader.a |52.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/Transforms/Coroutines/liblib-Transforms-Coroutines.a |52.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/ngtcp2/libcontrib-libs-ngtcp2.a |52.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/openldap/libraries/liblber/libopenldap-libraries-liblber.a |52.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/data_sharing/modification/events/libdata_sharing-modification-events.a |52.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/pcre/pcre16/liblibs-pcre-pcre16.a |52.7%| [ld] {default-linux-x86_64, relwithdebinfo} $(B)/tools/flake8_linter/flake8_linter |52.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/snappy/libcontrib-libs-snappy.a |52.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/data_sharing/manager/libcolumnshard-data_sharing-manager.a |52.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/poco/Crypto/liblibs-poco-Crypto.a |52.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/tcmalloc/malloc_extension/liblibs-tcmalloc-malloc_extension.a |52.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/poco/JSON/liblibs-poco-JSON.a |52.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/poco/NetSSL_OpenSSL/liblibs-poco-NetSSL_OpenSSL.a |52.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/utf8proc/libcontrib-libs-utf8proc.a |52.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/sasl/libcontrib-libs-sasl.a |52.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/simdjson/libcontrib-libs-simdjson.a |52.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/DebugInfo/PDB/liblib-DebugInfo-PDB.a |52.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/data_sharing/source/events/libdata_sharing-source-events.a |52.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/zlib/libcontrib-libs-zlib.a |52.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/Transforms/Scalar/liblib-Transforms-Scalar.a |52.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/poco/Util/liblibs-poco-Util.a |52.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/data_sharing/source/session/libdata_sharing-source-session.a |52.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/data_sharing/protos/libcolumnshard-data_sharing-protos.a |52.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/yaml-cpp/libcontrib-libs-yaml-cpp.a |52.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/abseil-cpp-tstring/y_absl/hash/libabseil-cpp-tstring-y_absl-hash.a |52.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/abseil-cpp-tstring/y_absl/base/libabseil-cpp-tstring-y_absl-base.a |52.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/abseil-cpp-tstring/y_absl/numeric/libabseil-cpp-tstring-y_absl-numeric.a |52.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/abseil-cpp-tstring/y_absl/profiling/libabseil-cpp-tstring-y_absl-profiling.a |52.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/abseil-cpp-tstring/y_absl/log/libabseil-cpp-tstring-y_absl-log.a |53.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/poco/Net/liblibs-poco-Net.a |52.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/abseil-cpp-tstring/y_absl/status/libabseil-cpp-tstring-y_absl-status.a |52.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/abseil-cpp-tstring/y_absl/types/libabseil-cpp-tstring-y_absl-types.a |52.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/abseil-cpp/absl/base/libabseil-cpp-absl-base.a |52.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/udfs/common/clickhouse/client/libclickhouse_client_udf.global.a |52.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/abseil-cpp-tstring/y_absl/synchronization/libabseil-cpp-tstring-y_absl-synchronization.a |53.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/abseil-cpp/absl/container/libabseil-cpp-absl-container.a |53.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/abseil-cpp-tstring/y_absl/time/libabseil-cpp-tstring-y_absl-time.a |53.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/zstd/libcontrib-libs-zstd.a |53.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/abseil-cpp/absl/hash/libabseil-cpp-absl-hash.a |53.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/abseil-cpp/absl/log/libabseil-cpp-absl-log.a |53.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/tcmalloc/no_percpu_cache/liblibs-tcmalloc-no_percpu_cache.global.a |53.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/Transforms/Instrumentation/liblib-Transforms-Instrumentation.a |53.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/abseil-cpp/absl/numeric/libabseil-cpp-absl-numeric.a |53.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/abseil-cpp/absl/random/libabseil-cpp-absl-random.a |53.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/Support/libllvm16-lib-Support.a |53.1%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/abseil-cpp/absl/status/libabseil-cpp-absl-status.a |53.1%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/abseil-cpp/absl/types/libabseil-cpp-absl-types.a |53.1%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/abseil-cpp/absl/synchronization/libabseil-cpp-absl-synchronization.a |53.1%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/aws/aws-c-cal/librestricted-aws-aws-c-cal.a |53.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/Analysis/libllvm16-lib-Analysis.a |53.1%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/abseil-cpp/absl/strings/libabseil-cpp-absl-strings.a |53.1%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/aws/aws-c-auth/librestricted-aws-aws-c-auth.a |53.1%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/Transforms/Vectorize/liblib-Transforms-Vectorize.a |53.1%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/aws/aws-c-compression/librestricted-aws-aws-c-compression.a |53.2%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/aws/aws-c-event-stream/librestricted-aws-aws-c-event-stream.a |53.1%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/aws/aws-c-io/librestricted-aws-aws-c-io.a |53.1%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/aws/aws-c-common/librestricted-aws-aws-c-common.a |53.1%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/aws/aws-checksums/librestricted-aws-aws-checksums.a |53.1%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/aws/aws-c-s3/librestricted-aws-aws-c-s3.a |53.2%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/aws/aws-c-http/librestricted-aws-aws-c-http.a |53.2%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/CodeGen/SelectionDAG/liblib-CodeGen-SelectionDAG.a |53.1%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/aws/aws-c-mqtt/librestricted-aws-aws-c-mqtt.a |53.1%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/boost/atomic/librestricted-boost-atomic.a |53.2%| PREPARE $(TEST_TOOL_HOST-sbr:8689590287) |53.2%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/IR/libllvm16-lib-IR.a |53.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/changes/compaction/abstract/libchanges-compaction-abstract.a |53.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/changes/actualization/controller/libchanges-actualization-controller.a |53.1%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/boost/context/impl_common/libboost-context-impl_common.a |53.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/changes/compaction/plain/libchanges-compaction-plain.a |53.2%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/boost/container/librestricted-boost-container.a |53.2%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/boost/coroutine/librestricted-boost-coroutine.a |53.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/changes/compaction/dictionary/libchanges-compaction-dictionary.global.a |53.3%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/boost/iostreams/librestricted-boost-iostreams.a |53.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/changes/compaction/common/libchanges-compaction-common.a |53.2%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/aws/s2n/librestricted-aws-s2n.a |53.2%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/cityhash-1.0.2/libcontrib-restricted-cityhash-1.0.2.a |53.2%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/dragonbox/libdragonbox.a |53.2%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/boost/thread/librestricted-boost-thread.a |53.3%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/Transforms/Utils/liblib-Transforms-Utils.a |53.3%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/boost/regex/librestricted-boost-regex.a |53.3%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/googletest/googlemock/librestricted-googletest-googlemock.a |53.3%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/binsaver/liblibrary-cpp-binsaver.a |53.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/boost/graph/librestricted-boost-graph.a |53.2%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/uriparser/libcontrib-restricted-uriparser.a |53.2%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/boost/program_options/librestricted-boost-program_options.a |53.3%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/blockcodecs/codecs/fastlz/libblockcodecs-codecs-fastlz.global.a |53.3%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/blockcodecs/codecs/brotli/libblockcodecs-codecs-brotli.global.a |53.3%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/blockcodecs/codecs/legacy_zstd06/libblockcodecs-codecs-legacy_zstd06.global.a |53.3%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/blockcodecs/codecs/bzip/libblockcodecs-codecs-bzip.global.a |53.3%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/boost/serialization/librestricted-boost-serialization.a |53.4%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/blockcodecs/codecs/snappy/libblockcodecs-codecs-snappy.global.a |53.4%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/blockcodecs/codecs/lzma/libblockcodecs-codecs-lzma.global.a |53.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/changes/compaction/libengines-changes-compaction.a |53.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/googletest/googletest/librestricted-googletest-googletest.a |53.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/boost/locale/librestricted-boost-locale.a |53.3%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/blockcodecs/codecs/zstd/libblockcodecs-codecs-zstd.global.a |53.4%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/blockcodecs/liblibrary-cpp-blockcodecs.a |53.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/Transforms/IPO/liblib-Transforms-IPO.a |53.4%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/case_insensitive_string/liblibrary-cpp-case_insensitive_string.a |53.4%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/charset/liblibrary-cpp-charset.a |53.5%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/codecs/greedy_dict/libcpp-codecs-greedy_dict.a |53.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/thrift/libcontrib-restricted-thrift.a |53.5%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/charset/lite/libcpp-charset-lite.a |53.5%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/codecs/liblibrary-cpp-codecs.a |53.3%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/compproto/liblibrary-cpp-compproto.a |53.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/changes/compaction/sub_columns/libchanges-compaction-sub_columns.global.a |53.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/loading/libcolumnshard-engines-loading.a |53.4%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/containers/bitseq/libcpp-containers-bitseq.a |53.4%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/blockcodecs/core/libcpp-blockcodecs-core.a |53.4%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/comptable/liblibrary-cpp-comptable.a |53.4%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/containers/intrusive_rb_tree/libcpp-containers-intrusive_rb_tree.a |53.5%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/containers/paged_vector/libcpp-containers-paged_vector.a |53.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/changes/compaction/sparsed/libchanges-compaction-sparsed.global.a |53.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/Target/X86/liblib-Target-X86.a |53.5%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/containers/ring_buffer/libcpp-containers-ring_buffer.a |53.5%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/containers/stack_vector/libcpp-containers-stack_vector.a |53.6%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/coroutine/listener/libcpp-coroutine-listener.a |53.6%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/cpuid_check/liblibrary-cpp-cpuid_check.global.a |53.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/libtx-columnshard-engines.a |53.6%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/containers/sorted_vector/libcpp-containers-sorted_vector.a |53.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/changes/compaction/sub_columns/libchanges-compaction-sub_columns.a |53.6%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/deprecated/accessors/libcpp-deprecated-accessors.a |53.6%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/deprecated/enum_codegen/libcpp-deprecated-enum_codegen.a |53.6%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/diff/liblibrary-cpp-diff.a |53.6%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/deprecated/split/libcpp-deprecated-split.a |53.5%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/digest/argonish/libcpp-digest-argonish.a |53.6%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/digest/crc32c/libcpp-digest-crc32c.a |53.6%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/digest/argonish/internal/proxies/avx2/libinternal-proxies-avx2.a |53.6%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/digest/lower_case/libcpp-digest-lower_case.a |53.6%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/digest/md5/libcpp-digest-md5.a |53.6%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/digest/argonish/internal/proxies/ref/libinternal-proxies-ref.a |53.7%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/disjoint_sets/liblibrary-cpp-disjoint_sets.a |53.7%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/dwarf_backtrace/liblibrary-cpp-dwarf_backtrace.a |53.7%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/digest/argonish/internal/proxies/sse41/libinternal-proxies-sse41.a |53.6%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/digest/argonish/internal/proxies/sse2/libinternal-proxies-sse2.a |53.6%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/digest/argonish/internal/proxies/ssse3/libinternal-proxies-ssse3.a |53.6%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/histogram/hdr/libcpp-histogram-hdr.a |53.6%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/html/escape/libcpp-html-escape.a |53.7%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/html/pcdata/libcpp-html-pcdata.a |53.7%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/digest/murmur/libcpp-digest-murmur.a |53.7%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/http/io/libcpp-http-io.a |53.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/changes/counters/libengines-changes-counters.a |53.7%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/http/server/libcpp-http-server.a |53.6%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/getopt/small/libcpp-getopt-small.a |53.6%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/http/misc/libcpp-http-misc.a |53.6%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/lua/liblibrary-cpp-lua.a |53.6%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/logger/liblibrary-cpp-logger.a |53.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/insert_table/libcolumnshard-engines-insert_table.a |53.7%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yt/core/libyt-yt-core.a |53.7%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/logger/global/libcpp-logger-global.a |53.7%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/malloc/api/libcpp-malloc-api.a |53.7%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/http/fetch/libcpp-http-fetch.a |53.8%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/lwtrace/mon/analytics/liblwtrace-mon-analytics.a |53.8%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/logger/liblibrary-cpp-logger.global.a |53.8%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/malloc/tcmalloc/libcpp-malloc-tcmalloc.a |53.8%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/streams/xz/libcpp-streams-xz.a |53.7%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/messagebus/config/libcpp-messagebus-config.a |53.7%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/messagebus/monitoring/libcpp-messagebus-monitoring.a |53.7%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/messagebus/protobuf/libmessagebus_protobuf.a |53.7%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/messagebus/scheduler/libcpp-messagebus-scheduler.a |53.7%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/mime/types/libcpp-mime-types.a |53.8%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/monlib/deprecated/json/libmonlib-deprecated-json.a |53.8%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/messagebus/www/libcpp-messagebus-www.a |53.8%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/protobuf/json/proto/libprotobuf-json-proto.a |53.8%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/protobuf/interop/libcpp-protobuf-interop.a |53.9%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/monlib/dynamic_counters/libcpp-monlib-dynamic_counters.a |53.9%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/protobuf/util/proto/libprotobuf-util-proto.a |53.9%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/random_provider/liblibrary-cpp-random_provider.a |53.7%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/retry/liblibrary-cpp-retry.a |53.8%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/protobuf/json/libcpp-protobuf-json.a |53.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/CodeGen/libllvm16-lib-CodeGen.a |53.8%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/retry/protos/libcpp-retry-protos.a |53.8%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/resource/liblibrary-cpp-resource.a |53.8%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/sighandler/liblibrary-cpp-sighandler.a |53.9%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/string_utils/base64/libcpp-string_utils-base64.a |53.9%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/messagebus/liblibrary-cpp-messagebus.a |53.9%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/string_utils/indent_text/libcpp-string_utils-indent_text.a |53.9%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/string_utils/levenshtein_diff/libcpp-string_utils-levenshtein_diff.a |53.9%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/string_utils/parse_size/libcpp-string_utils-parse_size.a |54.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/reader/common/libengines-reader-common.a |54.0%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/scheme/liblibrary-cpp-scheme.a |53.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/reader/actor/libengines-reader-actor.a |53.9%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/testing/gmock_in_unittest/libcpp-testing-gmock_in_unittest.global.a |53.9%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/string_utils/relaxed_escaper/libcpp-string_utils-relaxed_escaper.a |54.0%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/regex/hyperscan/libcpp-regex-hyperscan.a |54.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/scheme/defaults/protos/libscheme-defaults-protos.a |54.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/scheme/common/libengines-scheme-common.a |54.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/storage/actualizer/counters/libstorage-actualizer-counters.a |53.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/reader/sys_view/constructor/libreader-sys_view-constructor.a |53.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/reader/simple_reader/duplicates/libreader-simple_reader-duplicates.a |53.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/storage/actualizer/index/libstorage-actualizer-index.a |53.9%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/testing/unittest_main/libcpp-testing-unittest_main.a |53.9%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/testing/hook/libcpp-testing-hook.a |53.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/grpc/libcontrib-libs-grpc.a |54.0%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/threading/atomic/libcpp-threading-atomic.a |54.0%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/threading/equeue/libcpp-threading-equeue.a |54.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/reader/simple_reader/iterator/sync_points/libsimple_reader-iterator-sync_points.a |54.0%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/threading/future/libcpp-threading-future.a |54.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/reader/simple_reader/iterator/collections/libsimple_reader-iterator-collections.a |54.1%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/time_provider/liblibrary-cpp-time_provider.a |54.1%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/threading/task_scheduler/libcpp-threading-task_scheduler.a |54.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/reader/sys_view/optimizer/libreader-sys_view-optimizer.global.a |54.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/reader/sys_view/chunks/libreader-sys_view-chunks.global.a |54.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/reader/sys_view/granules/libreader-sys_view-granules.global.a |54.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/reader/sys_view/portions/libreader-sys_view-portions.global.a |54.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/reader/transaction/libengines-reader-transaction.a |54.1%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/threading/skip_list/libcpp-threading-skip_list.a |54.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/storage/indexes/bloom_ngramm/libstorage-indexes-bloom_ngramm.a |54.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/storage/indexes/bits_storage/libstorage-indexes-bits_storage.a |54.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/reader/simple_reader/iterator/libreader-simple_reader-iterator.a |54.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/reader/sys_view/abstract/libreader-sys_view-abstract.a |54.1%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/xml/init/libcpp-xml-init.a |54.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/storage/indexes/bits_storage/libstorage-indexes-bits_storage.global.a |54.2%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/yson/json/libcpp-yson-json.a |54.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/storage/indexes/bloom/libstorage-indexes-bloom.global.a |54.1%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/yt/backtrace/libcpp-yt-backtrace.a |54.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/storage/indexes/categories_bloom/libstorage-indexes-categories_bloom.a |54.1%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/yt/assert/libcpp-yt-assert.a |54.1%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/yson_pull/libyson_pull.a |54.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/storage/indexes/categories_bloom/libstorage-indexes-categories_bloom.global.a |54.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/storage/granule/libengines-storage-granule.a |54.2%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/icu/libcontrib-libs-icu.a |54.2%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/yt/global/libcpp-yt-global.a |54.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/storage/indexes/portions/libstorage-indexes-portions.a |54.1%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/yt/logging/plain_text_formatter/libyt-logging-plain_text_formatter.a |54.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/storage/indexes/portions/extractor/libindexes-portions-extractor.global.a |54.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/storage/indexes/portions/extractor/libindexes-portions-extractor.a |54.1%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/yt/threading/libcpp-yt-threading.a |54.2%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/yt/string/libcpp-yt-string.a |54.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/storage/optimizer/lcbuckets/constructor/selector/liblcbuckets-constructor-selector.a |54.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/storage/optimizer/abstract/libstorage-optimizer-abstract.a |54.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/storage/optimizer/lcbuckets/constructor/level/liblcbuckets-constructor-level.a |54.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/storage/optimizer/lcbuckets/constructor/liboptimizer-lcbuckets-constructor.global.a |54.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/storage/optimizer/lcbuckets/planner/liboptimizer-lcbuckets-planner.global.a |54.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/writer/buffer/libengines-writer-buffer.a |54.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/storage/optimizer/lcbuckets/constructor/selector/liblcbuckets-constructor-selector.global.a |54.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/export/events/libcolumnshard-export-events.a |54.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/export/common/libcolumnshard-export-common.a |54.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/storage/optimizer/lcbuckets/planner/level/liblcbuckets-planner-level.a |54.2%| [AR] {BAZEL_DOWNLOAD} $(B)/util/charset/libutil-charset.a |54.3%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/yt/error/libcpp-yt-error.a |54.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/audit/libydb-core-audit.a |54.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/export/actor/libcolumnshard-export-actor.a |54.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/actorlib_impl/libydb-core-actorlib_impl.a |54.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/export/session/selector/abstract/libsession-selector-abstract.a |54.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/backup/common/proto/libbackup-common-proto.a |54.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/export/session/libcolumnshard-export-session.a |54.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/export/session/selector/backup/libsession-selector-backup.global.a |54.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/backup/impl/libcore-backup-impl.a |54.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/transactions/libtx-columnshard-transactions.a |54.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/export/session/storage/s3/libsession-storage-s3.global.a |54.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/export/session/storage/abstract/libsession-storage-abstract.a |54.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/export/session/libcolumnshard-export-session.global.a |54.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/crypto/libcore-blobstorage-crypto.a |54.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/normalizer/abstract/libcolumnshard-normalizer-abstract.a |54.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/loading/libtx-columnshard-loading.a |54.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/common/libcore-blobstorage-common.a |54.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/libydb-core-protos.a |54.4%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/sql/v1/proto_parser/antlr3_ansi/libv1-proto_parser-antlr3_ansi.a |54.4%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/sql/v1/proto_parser/antlr3/libv1-proto_parser-antlr3.a |54.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/groupinfo/libcore-blobstorage-groupinfo.a |54.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/annotations/libapi-protos-annotations.a |54.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/export/session/storage/tier/libsession-storage-tier.global.a |54.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/hooks/abstract/libcolumnshard-hooks-abstract.a |54.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/storage/indexes/bloom_ngramm/libstorage-indexes-bloom_ngramm.global.a |54.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/vdisk/hulldb/bulksst_add/libvdisk-hulldb-bulksst_add.a |54.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/lib/json_value/libpublic-lib-json_value.a |54.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/lib/ydb_cli/commands/sdk_core_access/libydb_sdk_core_access.a |54.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/lib/ydb_cli/commands/ydb_discovery/libydb_cli_command_ydb_discovery.a |54.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/backpressure/libcore-blobstorage-backpressure.a |54.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/normalizer/insert_table/libcolumnshard-normalizer-insert_table.global.a |54.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/vdisk/hulldb/barriers/libvdisk-hulldb-barriers.a |54.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/base/libydb-core-base.a |54.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/normalizer/portion/libcolumnshard-normalizer-portion.a |54.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/vdisk/hulldb/recovery/libvdisk-hulldb-recovery.a |54.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/normalizer/granule/libcolumnshard-normalizer-granule.global.a |54.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/vdisk/common/libblobstorage-vdisk-common.a |54.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/normalizer/schema_version/libcolumnshard-normalizer-schema_version.global.a |54.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/vdisk/libcore-blobstorage-vdisk.a |54.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/lib/ydb_cli/commands/command_base/libydb_cli_command_base.a |54.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/normalizer/tables/libcolumnshard-normalizer-tables.global.a |54.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/normalizer/tablet/libcolumnshard-normalizer-tablet.global.a |54.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/operations/common/libcolumnshard-operations-common.a |54.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/operations/batch_builder/libcolumnshard-operations-batch_builder.a |54.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/client/metadata/libcore-client-metadata.a |54.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/lib/experimental/libpublic-lib-experimental.a |54.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/cms/console/util/libcms-console-util.a |54.5%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yt/library/numeric/libyt-library-numeric.a |54.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/lib/fq/libpublic-lib-fq.a |54.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/control/libydb-core-control.a |54.5%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yt/library/decimal/libyt-library-decimal.a |54.6%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/sql/v1/proto_parser/antlr4/libv1-proto_parser-antlr4.a |54.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/debug/libydb-core-debug.a |54.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/docapi/libydb-core-docapi.a |54.6%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yt/library/column_converters/libyt-library-column_converters.a |54.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/driver_lib/cli_config_base/libcore-driver_lib-cli_config_base.a |54.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/driver_lib/base_utils/libbase_utils.a |54.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/operations/libtx-columnshard-operations.a |54.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/apache/arrow/liblibs-apache-arrow.a |54.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/resources/libtx-columnshard-resources.a |54.6%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/sql/v1/proto_parser/antlr4_ansi/libv1-proto_parser-antlr4_ansi.a |54.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/lib/ydb_cli/common/yql_parser/libydb_cli-common-yql_parser.a |54.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/external_sources/object_storage/inference/libexternal_sources-object_storage-inference.a |54.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/resource_subscriber/libtx-columnshard-resource_subscriber.a |54.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/splitter/abstract/libcolumnshard-splitter-abstract.a |54.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/vdisk/syncer/libblobstorage-vdisk-syncer.a |54.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/vdisk/hullop/libblobstorage-vdisk-hullop.a |54.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/normalizer/portion/libcolumnshard-normalizer-portion.global.a |54.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/operations/slice_builder/libcolumnshard-operations-slice_builder.a |54.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/lib/ydb_cli/dump/files/libydb_cli-dump-files.a |54.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/subscriber/abstract/events/libsubscriber-abstract-events.a |54.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/engine/minikql/libcore-engine-minikql.a |54.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/engine/libydb-core-engine.a |54.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/vdisk/scrub/libblobstorage-vdisk-scrub.a |54.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/formats/arrow/accessor/sparsed/libarrow-accessor-sparsed.global.a |54.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/formats/arrow/accessor/dictionary/libarrow-accessor-dictionary.a |54.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/splitter/libtx-columnshard-splitter.a |54.8%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/sql/v1/proto_parser/libsql-v1-proto_parser.a |54.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/formats/arrow/common/libformats-arrow-common.a |54.7%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/types/binary_json/libessentials-types-binary_json.a |54.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/subscriber/abstract/subscriber/libsubscriber-abstract-subscriber.a |54.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/formats/arrow/accessor/sparsed/libarrow-accessor-sparsed.a |54.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/subscriber/events/tables_erased/libsubscriber-events-tables_erased.a |54.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/adapters/issue/libcpp-adapters-issue.a |54.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/lib/ydb_cli/dump/util/libydb_cli-dump-util.a |54.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/formats/arrow/program/libformats-arrow-program.global.a |54.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/tablet/libtx-columnshard-tablet.a |54.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/formats/arrow/save_load/libformats-arrow-save_load.a |54.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/nodewarden/libcore-blobstorage-nodewarden.a |54.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/formats/arrow/serializer/libformats-arrow-serializer.a |54.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/formats/arrow/switch/libformats-arrow-switch.a |54.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/mock/libfq-libs-mock.a |54.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/formats/arrow/serializer/libformats-arrow-serializer.global.a |54.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/libcore-tx-columnshard.a |54.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/lib/ydb_cli/common/libcommon.a |54.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/pdisk/libcore-blobstorage-pdisk.a |54.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/formats/arrow/rows/libformats-arrow-rows.a |54.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/dsproxy/libcore-blobstorage-dsproxy.a |54.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/rate_limiter/quoter_service/liblibs-rate_limiter-quoter_service.a |54.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/rate_limiter/control_plane_service/liblibs-rate_limiter-control_plane_service.a |54.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/common_client/libsrc-client-common_client.a |54.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/common_client/impl/libclient-common_client-impl.a |54.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/formats/arrow/libcore-formats-arrow.a |54.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/formats/arrow/dictionary/libformats-arrow-dictionary.a |54.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/transactions/locks/libcolumnshard-transactions-locks.global.a |55.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/row_dispatcher/events/liblibs-row_dispatcher-events.a |55.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/result_formatter/libfq-libs-result_formatter.a |54.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/transactions/operators/ev_write/libtransactions-operators-ev_write.a |54.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/driver_lib/cli_utils/libcli_utils.a |54.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/tasks_packer/libfq-libs-tasks_packer.a |54.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/row_dispatcher/protos/liblibs-row_dispatcher-protos.a |55.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/row_dispatcher/format_handler/liblibs-row_dispatcher-format_handler.a |55.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/test_connection/events/liblibs-test_connection-events.a |55.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/test_helper/libtx-columnshard-test_helper.a |55.0%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/types/uuid/libessentials-types-uuid.a |54.9%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/types/dynumber/libessentials-types-dynumber.a |54.9%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/math/libmath_udf.global.a |55.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/transactions/operators/ev_write/libtransactions-operators-ev_write.global.a |55.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/ydb/libfq-libs-ydb.a |55.0%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/sql/v1/libessentials-sql-v1.a |55.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/transactions/operators/libcolumnshard-transactions-operators.a |55.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/row_dispatcher/purecalc_no_pg_wrapper/liblibs-row_dispatcher-purecalc_no_pg_wrapper.a |55.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/vdisk/skeleton/libblobstorage-vdisk-skeleton.a |55.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/client/server/libcore-client-server.a |55.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/grpc_services/cancelation/libcore-grpc_services-cancelation.a |55.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/transactions/operators/libcolumnshard-transactions-operators.global.a |55.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/transactions/transactions/libcolumnshard-transactions-transactions.a |55.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/driver/libsrc-client-driver.a |55.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/coordination/libsrc-client-coordination.a |55.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/grpc_services/counters/libcore-grpc_services-counters.a |55.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/tx_reader/libtx-columnshard-tx_reader.a |55.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/coordinator/protos/libtx-coordinator-protos.a |55.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/discovery/libsrc-client-discovery.a |55.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/io_formats/cell_maker/libcore-io_formats-cell_maker.a |55.1%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/math/lib/libcommon-math-lib.a |55.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/coordinator/public/libtx-coordinator-public.a |55.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/data_events/common/libtx-data_events-common.a |55.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/common/batch/libkqp-common-batch.a |55.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/export/libsrc-client-export.a |55.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/datashard/libcore-tx-datashard.global.a |55.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/extension_common/libsrc-client-extension_common.a |55.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/common/simple/libkqp-common-simple.a |55.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/extensions/solomon_stats/libclient-extensions-solomon_stats.a |55.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/health_check/libydb-core-health_check.a |55.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/expr_nodes/libcore-kqp-expr_nodes.a |55.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/executer_actor/shards_resolver/libkqp-executer_actor-shards_resolver.a |55.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/data_events/libcore-tx-data_events.a |55.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/libydb-core-tx.a |55.2%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yt/library/formats/libyt-library-formats.a |55.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/limiter/service/libtx-limiter-service.a |55.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/datastreams/libsrc-client-datastreams.a |55.2%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/utils/libyql-essentials-utils.a |55.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/gateway/actors/libkqp-gateway-actors.a |55.2%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/unicode_base/lib/libcommon-unicode_base-lib.a |55.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/common/libcore-kqp-common.a |55.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/coordinator/libcore-tx-coordinator.a |55.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/limiter/grouped_memory/service/liblimiter-grouped_memory-service.a |55.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/limiter/usage/libtx-limiter-usage.a |55.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/limiter/grouped_memory/usage/liblimiter-grouped_memory-usage.a |55.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/federated_topic/libsrc-client-federated_topic.a |55.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/run_script_actor/libcore-kqp-run_script_actor.a |55.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/datashard/ut_common/libtx-datashard-ut_common.a |55.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/impl/ydb_internal/common/libimpl-ydb_internal-common.a |55.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/impl/ydb_endpoints/libclient-impl-ydb_endpoints.a |55.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/long_tx_service/public/libtx-long_tx_service-public.a |55.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/query_data/libcore-kqp-query_data.a |53.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/locks/libcore-tx-locks.a |53.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/priorities/service/libtx-priorities-service.a |53.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/gateway/behaviour/resource_pool_classifier/libgateway-behaviour-resource_pool_classifier.a |53.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/impl/ydb_internal/kqp_session_common/libimpl-ydb_internal-kqp_session_common.a |53.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/replication/common/libtx-replication-common.a |53.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/keyvalue/libydb-core-keyvalue.a |53.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/impl/ydb_internal/db_driver_state/libimpl-ydb_internal-db_driver_state.a |53.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/opt/peephole/kqp_opt_peephole.cpp |53.6%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/unicode_base/libunicode_udf.global.a |53.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/federated_topic/impl/libclient-federated_topic-impl.a |53.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/long_tx_service/libcore-tx-long_tx_service.a |53.2%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/string/libstring_udf.global.a |53.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kesus/tablet/libcore-kesus-tablet.a |53.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/draft/libsrc-client-draft.a |53.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/compute_actor/libcore-kqp-compute_actor.a |53.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/schemeshard/common/libtx-schemeshard-common.a |53.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/scheme_cache/libcore-tx-scheme_cache.a |54.1%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/utils/failure_injector/libessentials-utils-failure_injector.a |54.2%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/utils/fetch/libessentials-utils-fetch.a |54.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/mediator/libcore-tx-mediator.a |54.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/schemeshard/olap/bg_tasks/adapter/libolap-bg_tasks-adapter.a |54.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/replication/ydb_proxy/libtx-replication-ydb_proxy.a |54.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/impl/ydb_internal/make_request/libimpl-ydb_internal-make_request.a |54.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/replication/service/libtx-replication-service.a |54.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/impl/ydb_internal/grpc_connections/libimpl-ydb_internal-grpc_connections.a |54.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/impl/ydb_internal/logger/libimpl-ydb_internal-logger.a |54.5%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/utils/backtrace/libessentials-utils-backtrace.a |54.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/schemeshard/olap/bg_tasks/protos/libolap-bg_tasks-protos.a |54.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/impl/ydb_internal/retry/libimpl-ydb_internal-retry.a |54.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/impl/ydb_internal/plain_status/libimpl-ydb_internal-plain_status.a |54.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/schemeshard/olap/bg_tasks/transactions/libolap-bg_tasks-transactions.a |54.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/schemeshard/olap/indexes/libschemeshard-olap-indexes.a |54.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/schemeshard/olap/common/libschemeshard-olap-common.a |54.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/impl/ydb_stats/libclient-impl-ydb_stats.a |54.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/impl/ydb_internal/value_helpers/libimpl-ydb_internal-value_helpers.a |54.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/schemeshard/olap/column_families/libschemeshard-olap-column_families.a |54.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/impl/ydb_internal/session_pool/libimpl-ydb_internal-session_pool.a |54.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/schemeshard/olap/layout/libschemeshard-olap-layout.a |55.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/impl/ydb_internal/thread_pool/libimpl-ydb_internal-thread_pool.a |54.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/schemeshard/olap/operations/alter/abstract/liboperations-alter-abstract.a |54.8%| PREPARE $(OS_SDK_ROOT-sbr:243881345) |54.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/runtime/libcore-kqp-runtime.a |54.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/schemeshard/olap/operations/alter/in_store/common/libalter-in_store-common.a |54.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/schemeshard/olap/operations/alter/common/liboperations-alter-common.a |55.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/schemeshard/olap/operations/alter/in_store/resharding/libalter-in_store-resharding.a |55.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/schemeshard/olap/operations/alter/in_store/liboperations-alter-in_store.a |55.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/control_plane_config/events/liblibs-control_plane_config-events.a |55.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/scheme_board/libcore-tx-scheme_board.a |55.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/schemeshard/olap/operations/alter/in_store/config_shards/libalter-in_store-config_shards.a |55.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/config/libfq-libs-config.a |55.2%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yt/library/re2/libyt-library-re2.a |55.2%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/utils/log/libessentials-utils-log.a |55.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/opt/peephole/kqp_opt_peephole_write_constraint.cpp |55.2%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/utils/log/proto/libutils-log-proto.a |55.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kafka_proxy/libydb-core-kafka_proxy.a |55.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/import/libsrc-client-import.a |55.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/schemeshard/olap/operations/alter/in_store/transfer/libalter-in_store-transfer.a |55.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/schemeshard/olap/operations/alter/in_store/schema/libalter-in_store-schema.a |55.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/schemeshard/olap/options/libschemeshard-olap-options.a |55.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/schemeshard/olap/table/libschemeshard-olap-table.a |55.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/libapi-protos.a |55.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/params/libsrc-client-params.a |55.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/replication/controller/libtx-replication-controller.a |55.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/opt/physical/libkqp-opt-physical.a |55.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/schemeshard/olap/schema/libschemeshard-olap-schema.a |55.2%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/utils/threading/libessentials-utils-threading.a |55.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/proto/libsrc-client-proto.a |55.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/persqueue_public/include/libclient-persqueue_public-include.a |55.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/sequenceshard/public/libtx-sequenceshard-public.a |55.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/sequenceproxy/public/libtx-sequenceproxy-public.a |55.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/operation/libsrc-client-operation.a |55.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/sequenceproxy/libcore-tx-sequenceproxy.a |55.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/tiering/tier/libtx-tiering-tier.a |55.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/tiering/abstract/libtx-tiering-abstract.a |55.2%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yt/library/signals/libyt-library-signals.a |55.2%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/providers/stat/expr_nodes/libproviders-stat-expr_nodes.a |55.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/time_cast/libcore-tx-time_cast.a |55.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/tracing/service/libtx-tracing-service.a |55.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/sharding/libcore-tx-sharding.global.a |55.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/query/impl/libclient-query-impl.a |55.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/schemeshard/olap/operations/libschemeshard-olap-operations.a |55.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/tx_allocator_client/libcore-tx-tx_allocator_client.a |55.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/tiering/libcore-tx-tiering.a |55.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/result/libsrc-client-result.a |55.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/tracing/usage/libtx-tracing-usage.a |55.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/resources/libsrc-client-resources.a |55.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/util/evlog/libcore-util-evlog.a |55.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/util/actorsys_test/libcore-util-actorsys_test.a |55.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/resources/libsrc-client-resources.global.a |55.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/tx_allocator/libcore-tx-tx_allocator.a |55.4%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/cpp/mapreduce/common/libcpp-mapreduce-common.a |55.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/viewer/json/libcore-viewer-json.a |55.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/rate_limiter/libsrc-client-rate_limiter.a |55.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/query/libsrc-client-query.a |55.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/opt/physical/effects/libopt-physical-effects.a |55.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/wrappers/events/libcore-wrappers-events.a |55.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/util/libydb-core-util.a |55.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/viewer/yaml/libcore-viewer-yaml.a |55.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/wrappers/ut_helpers/libcore-wrappers-ut_helpers.a |55.4%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/cpp/mapreduce/http/libcpp-mapreduce-http.a |55.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/aclib/libydb-library-aclib.a |55.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/scheme/libsrc-client-scheme.a |55.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/double-conversion/libcontrib-libs-double-conversion.a |55.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/expat/libcontrib-libs-expat.a |55.4%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yt/library/tracing/libyt-library-tracing.a |55.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/persqueue_public/impl/libclient-persqueue_public-impl.a |55.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/ydb_convert/libydb-core-ydb_convert.a |55.4%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yt/library/skiff_ext/libyt-library-skiff_ext.a |55.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/ss_tasks/libsrc-client-ss_tasks.a |55.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/aclib/protos/liblibrary-aclib-protos.a |55.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/actors/actor_type/liblibrary-actors-actor_type.a |55.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/actors/dnscachelib/liblibrary-actors-dnscachelib.a |55.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/topic/codecs/libclient-topic-codecs.global.a |55.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/actors/dnsresolver/liblibrary-actors-dnsresolver.a |55.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/config/protos/liblibs-config-protos.a |55.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/actors/log_backend/liblibrary-actors-log_backend.a |55.5%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/cpp/mapreduce/http_client/libcpp-mapreduce-http_client.a |55.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/actors/helpers/liblibrary-actors-helpers.a |55.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/actors/interconnect/mock/libactors-interconnect-mock.a |55.4%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/cpp/mapreduce/interface/logging/libmapreduce-interface-logging.a |55.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/libevent/event_extra/liblibs-libevent-event_extra.a |55.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/actors/prof/liblibrary-actors-prof.a |55.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/actors/memory_log/liblibrary-actors-memory_log.a |55.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/topic/common/libclient-topic-common.a |55.5%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/cpp/mapreduce/io/libcpp-mapreduce-io.a |55.5%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/cpp/mapreduce/client/libcpp-mapreduce-client.a |55.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/table/libsrc-client-table.a |55.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/actors/wilson/liblibrary-actors-wilson.a |55.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/tx_proxy/libcore-tx-tx_proxy.a |55.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/arrow_clickhouse/Common/liblibrary-arrow_clickhouse-Common.a |55.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/types/credentials/libclient-types-credentials.a |55.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/arrow_clickhouse/Columns/liblibrary-arrow_clickhouse-Columns.a |55.5%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/cpp/mapreduce/interface/libcpp-mapreduce-interface.a |55.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/types/credentials/login/libtypes-credentials-login.a |55.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/actors/http/liblibrary-actors-http.a |55.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/arrow_clickhouse/DataStreams/liblibrary-arrow_clickhouse-DataStreams.a |55.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/types/exceptions/libclient-types-exceptions.a |55.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/table/impl/libclient-table-impl.a |55.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/ExecutionEngine/Orc/Shared/libExecutionEngine-Orc-Shared.a |55.7%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/cpp/mapreduce/library/user_job_statistics/libmapreduce-library-user_job_statistics.a |55.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/ExecutionEngine/PerfJITEvents/liblib-ExecutionEngine-PerfJITEvents.a |55.5%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yt/library/tvm/libyt-library-tvm.a |55.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/conclusion/libydb-library-conclusion.a |55.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/arrow_parquet/libydb-library-arrow_parquet.a |55.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/folder_service/mock/liblibrary-folder_service-mock.a |55.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/folder_service/libydb-library-folder_service.a |55.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/types/credentials/oauth2_token_exchange/libtypes-credentials-oauth2_token_exchange.a |55.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/viewer/libydb-core-viewer.global.a |55.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/types/fatal_error_handlers/libclient-types-fatal_error_handlers.a |55.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/db_pool/protos/liblibrary-db_pool-protos.a |55.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/types/operation/libclient-types-operation.a |55.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/folder_service/proto/liblibrary-folder_service-proto.a |55.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/formats/arrow/csv/converter/libarrow-csv-converter.a |55.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/formats/arrow/hash/liblibrary-formats-arrow-hash.a |55.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/formats/arrow/modifier/liblibrary-formats-arrow-modifier.a |55.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/library/decimal/libsrc-library-decimal.a |55.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/types/libsrc-client-types.a |55.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/types/status/libclient-types-status.a |55.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/formats/arrow/simple_builder/liblibrary-formats-arrow-simple_builder.a |55.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/formats/arrow/scalar/liblibrary-formats-arrow-scalar.a |55.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/formats/arrow/switch/liblibrary-formats-arrow-switch.a |55.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/arrow_clickhouse/libydb-library-arrow_clickhouse.a |55.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/formats/arrow/splitter/liblibrary-formats-arrow-splitter.a |55.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/value/libsrc-client-value.a |55.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/formats/arrow/liblibrary-formats-arrow.a |55.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/formats/arrow/transformer/liblibrary-formats-arrow-transformer.a |55.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/pcre/libcontrib-libs-pcre.a |55.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/formats/arrow/validation/liblibrary-formats-arrow-validation.a |55.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/formats/arrow/protos/liblibrary-formats-arrow-protos.a |55.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/fyamlcpp/libydb-library-fyamlcpp.a |55.8%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/openssl/big_integer/libcpp-openssl-big_integer.a |55.8%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/monlib/service/pages/tablesorter/libservice-pages-tablesorter.global.a |55.7%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yt/library/tz_types/libyt-library-tz_types.a |55.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/grpc/server/actors/libgrpc-server-actors.a |55.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/global_plugins/libydb-library-global_plugins.a |55.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/control_plane_config/libfq-libs-control_plane_config.a |55.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/library/grpc/client/libsdk-library-grpc-client-v3.a |55.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/t1ha/libcontrib-libs-t1ha.a |55.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/yajl/libcontrib-libs-yajl.a |55.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/http_proxy/authorization/liblibrary-http_proxy-authorization.a |55.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/http_proxy/error/liblibrary-http_proxy-error.a |55.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/grpc/server/liblibrary-grpc-server.a |55.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/library/jwt/libsrc-library-jwt.a |55.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/library/issue/libsrc-library-issue.a |55.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/logger/libydb-library-logger.a |55.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/login/cache/liblibrary-login-cache.a |55.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/login/account_lockout/liblibrary-login-account_lockout.a |55.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/poco/Foundation/liblibs-poco-Foundation.a |55.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/library/operation_id/protos/liblibrary-operation_id-protos.a |55.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/library/operation_id/libsrc-library-operation_id.a |55.7%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yt/library/undumpable/libyt-library-undumpable.a |55.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/abseil-cpp-tstring/y_absl/random/libabseil-cpp-tstring-y_absl-random.a |55.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/login/libydb-library-login.a |55.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/login/password_checker/liblibrary-login-password_checker.a |55.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/abseil-cpp/absl/debugging/libabseil-cpp-absl-debugging.a |55.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/library/persqueue/topic_parser_public/libsdk-library-persqueue-topic_parser_public-v3.a |55.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/mkql_proto/protos/liblibrary-mkql_proto-protos.a |55.6%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yt/library/ytprof/api/liblibrary-ytprof-api.a |55.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/abseil-cpp/absl/flags/libabseil-cpp-absl-flags.a |55.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/topic/impl/libclient-topic-impl.a |55.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/library/persqueue/obfuscate/libsdk-library-persqueue-obfuscate-v3.a |55.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/naming_conventions/libydb-library-naming_conventions.a |55.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/login/protos/liblibrary-login-protos.a |55.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/auth/libydb-services-auth.a |55.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/Transforms/InstCombine/liblib-Transforms-InstCombine.a |55.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/library/string_utils/helpers/liblibrary-string_utils-helpers.a |55.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/ncloud/impl/liblibrary-ncloud-impl.a |55.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/persqueue/counter_time_keeper/liblibrary-persqueue-counter_time_keeper.a |55.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/persqueue/deprecated/read_batch_converter/libpersqueue-deprecated-read_batch_converter.a |55.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/abseil-cpp/absl/time/libabseil-cpp-absl-time.a |55.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/library/uuid/libsrc-library-uuid.a |55.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/pdisk_io/protos/liblibrary-pdisk_io-protos.a |55.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/bg_tasks/abstract/libservices-bg_tasks-abstract.a |55.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/pdisk_io/libydb-library-pdisk_io.a |55.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/bg_tasks/protos/libservices-bg_tasks-protos.a |55.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/persqueue/topic_parser/liblibrary-persqueue-topic_parser.a |55.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/schlab/probes/liblibrary-schlab-probes.a |55.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/schlab/schemu/liblibrary-schlab-schemu.a |55.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/schlab/mon/liblibrary-schlab-mon.global.a |55.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/boost/exception/librestricted-boost-exception.a |55.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/boost/context/fcontext_impl/libboost-context-fcontext_impl.a |55.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/schlab/libydb-library-schlab.a |55.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/pretty_types_print/protobuf/liblibrary-pretty_types_print-protobuf.a |55.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/protobuf_printer/libydb-library-protobuf_printer.a |56.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/boost/chrono/librestricted-boost-chrono.a |56.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/schlab/protos/liblibrary-schlab-protos.a |56.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/boost/random/librestricted-boost-random.a |56.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/schlab/schine/liblibrary-schlab-schine.a |56.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/schlab/schoot/liblibrary-schlab-schoot.a |55.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/security/libydb-library-security.a |56.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/services/libydb-library-services.a |56.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/http-parser/libcontrib-restricted-http-parser.a |56.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/control_plane_proxy/actors/liblibs-control_plane_proxy-actors.a |56.0%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/on_disk/chunks/libcpp-on_disk-chunks.a |56.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/control_plane_proxy/events/liblibs-control_plane_proxy-events.a |56.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/config/libydb-services-config.a |56.1%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/archive/liblibrary-cpp-archive.a |56.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/signal_backtrace/libydb-library-signal_backtrace.a |55.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/backup/libydb-services-backup.a |56.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/deprecated/persqueue_v0/api/grpc/libapi-grpc-persqueue-deprecated.a |56.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/workload/stock/liblibrary-workload-stock.global.a |56.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/workload/abstract/liblibrary-workload-abstract.a |56.0%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yt_proto/yt/core/libyt_proto-yt-core.a |56.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/cms/libydb-services-cms.a |56.1%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/cache/liblibrary-cpp-cache.a |56.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yaml_config/public/liblibrary-yaml_config-public.a |56.1%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yt_proto/yt/formats/libyt_proto-yt-formats.a |56.1%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/cgiparam/liblibrary-cpp-cgiparam.a |56.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/workload/kv/liblibrary-workload-kv.a |56.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yaml_config/protos/libyaml-config-protos.a |56.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/deprecated/persqueue_v0/api/protos/libapi-protos-persqueue-deprecated.a |56.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/viewer/libydb-core-viewer.a |56.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yaml_json/libydb-library-yaml_json.a |56.1%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/colorizer/liblibrary-cpp-colorizer.a |56.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/ydb_issue/libydb-library-ydb_issue.global.a |56.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yaml_config/libydb-library-yaml_config.a |56.1%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/containers/intrusive_avl_tree/libcpp-containers-intrusive_avl_tree.a |56.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/discovery/libydb-services-discovery.a |56.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/ext_index/common/libservices-ext_index-common.a |56.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/ydb_issue/proto/liblibrary-ydb_issue-proto.a |56.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/ext_index/metadata/extractor/libext_index-metadata-extractor.a |56.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/ydb_issue/libydb-library-ydb_issue.a |56.1%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/dbg_output/liblibrary-cpp-dbg_output.a |56.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/dq/actors/common/libdq-actors-common.a |56.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/deprecated/persqueue_v0/libservices-deprecated-persqueue_v0.a |56.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/control_plane_storage/events/liblibs-control_plane_storage-events.a |56.2%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/jsonpath/libessentials-minikql-jsonpath.a |56.2%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/codec/codegen/llvm16/libcodec-codegen-llvm16.a |56.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/ycloud/impl/liblibrary-ycloud-impl.a |56.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/ext_index/metadata/extractor/libext_index-metadata-extractor.global.a |56.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/dynamic_config/libydb-services-dynamic_config.a |56.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/dq/actors/libyql-dq-actors.a |56.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/dq/actors/input_transforms/libdq-actors-input_transforms.a |56.2%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/codec/codegen/llvm16/libcodec-codegen-llvm16.global.a |56.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/persqueue/ut/common/libpersqueue-ut-common.a |56.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/ext_index/metadata/libservices-ext_index-metadata.global.a |56.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/dq/comp_nodes/libyql-dq-comp_nodes.a |56.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/dq/common/libyql-dq-common.a |56.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/dq/actors/spilling/libdq-actors-spilling.a |56.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/dq/state/libyql-dq-state.a |56.2%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/jsonpath/parser/libminikql-jsonpath-parser.a |56.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/dq/actors/protos/libdq-actors-protos.a |56.2%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/lfalloc/dbg_info/libcpp-lfalloc-dbg_info.a |56.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/dq/tasks/libyql-dq-tasks.a |56.3%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/hyperscan/libcontrib-libs-hyperscan.a |56.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/ext_index/metadata/libservices-ext_index-metadata.a |56.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/common/db_id_async_resolver/libproviders-common-db_id_async_resolver.a |56.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/dq/actors/compute/libdq-actors-compute.a |56.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/clickhouse/expr_nodes/libproviders-clickhouse-expr_nodes.a |56.2%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/json/fast_sax/libcpp-json-fast_sax.a |56.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/dq/type_ann/libyql-dq-type_ann.a |56.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/dq/proto/libyql-dq-proto.a |56.2%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/json/common/libcpp-json-common.a |56.2%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/messagebus/actor/libmessagebus_actor.a |56.2%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/json/easy_parse/libcpp-json-easy_parse.a |56.3%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yt_proto/yt/client/libyt_proto-yt-client.a |56.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/lib/auth/libservices-lib-auth.a |56.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/common/http_gateway/libproviders-common-http_gateway.a |56.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/lib/sharding/libservices-lib-sharding.a |56.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/kesus/libydb-services-kesus.a |56.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/common/pushdown/libproviders-common-pushdown.a |56.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/ext_index/service/libservices-ext_index-service.a |56.3%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/packedtypes/liblibrary-cpp-packedtypes.a |56.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/common/token_accessor/client/libcommon-token_accessor-client.a |56.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/common/token_accessor/grpc/libcommon-token_accessor-grpc.a |56.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/dq/actors/events/libdq-actors-events.a |56.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/lib/actors/libservices-lib-actors.a |56.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/ymq/actor/libcore-ymq-actor.a |56.4%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/lwtrace/mon/libcpp-lwtrace-mon.a |56.4%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/codec/libproviders-yt-codec.a |56.4%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/protobuf/util/libcpp-protobuf-util.a |56.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/dq/api/grpc/libdq-api-grpc.a |56.3%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/regex/pcre/libcpp-regex-pcre.a |56.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/local_discovery/libydb-services-local_discovery.a |56.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/dq/config/libproviders-dq-config.a |56.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/mkql_proto/libydb-library-mkql_proto.a |56.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/dq/counters/libproviders-dq-counters.a |56.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/dq/common/libproviders-dq-common.a |56.4%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/string_utils/base32/libcpp-string_utils-base32.a |56.4%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/streams/zc_memory_input/libcpp-streams-zc_memory_input.a |56.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/metadata/abstract/libservices-metadata-abstract.a |56.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/keyvalue/libydb-services-keyvalue.a |56.5%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/streams/zstd/libcpp-streams-zstd.a |56.5%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/string_utils/csv/libcpp-string_utils-csv.a |56.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/dq/helper/libproviders-dq-helper.a |56.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/metadata/common/libservices-metadata-common.a |56.4%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/tdigest/liblibrary-cpp-tdigest.a |56.4%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/testing/gmock_in_unittest/libcpp-testing-gmock_in_unittest.a |56.4%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/terminate_handler/liblibrary-cpp-terminate_handler.global.a |56.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/datastreams/libydb-services-datastreams.a |56.5%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/testing/common/libcpp-testing-common.a |56.5%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/openssl/crypto/libcpp-openssl-crypto.a |56.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/dq/api/protos/libdq-api-protos.a |56.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/gateway/behaviour/table/libgateway-behaviour-table.global.a |56.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/dq/runtime/libyql-dq-runtime.a |56.5%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/testing/gtest_extensions/libcpp-testing-gtest_extensions.a |56.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/maintenance/libydb-services-maintenance.a |56.4%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/threading/cancellation/libcpp-threading-cancellation.a |56.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/metadata/ds_table/libservices-metadata-ds_table.a |56.5%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/threading/blocking_queue/libcpp-threading-blocking_queue.a |56.5%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/testing/unittest/libcpp-testing-unittest.a |56.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/metadata/request/libservices-metadata-request.a |56.5%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/threading/cron/libcpp-threading-cron.a |56.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/dq/actors/libproviders-dq-actors.a |56.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/metadata/manager/libservices-metadata-manager.a |56.6%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/threading/poor_man_openmp/libcpp-threading-poor_man_openmp.a |56.6%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/threading/hot_swap/libcpp-threading-hot_swap.a |56.5%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/threading/light_rw_lock/libcpp-threading-light_rw_lock.a |56.5%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/threading/thread_local/libcpp-threading-thread_local.a |56.6%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/threading/queue/libcpp-threading-queue.a |56.6%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/type_info/liblibrary-cpp-type_info.a |56.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/metadata/secret/accessor/libmetadata-secret-accessor.a |56.6%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/unified_agent_client/liblibrary-cpp-unified_agent_client.global.a |56.6%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/unicode/set/libcpp-unicode-set.a |56.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/gateway/behaviour/tablestore/libgateway-behaviour-tablestore.a |56.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/metadata/initializer/libservices-metadata-initializer.a |56.5%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/xml/document/libcpp-xml-document.a |56.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/fq/libydb-services-fq.a |56.6%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/unified_agent_client/proto/libcpp-unified_agent_client-proto.a |56.6%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/unified_agent_client/liblibrary-cpp-unified_agent_client.a |56.6%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/uri/liblibrary-cpp-uri.a |56.6%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/yson/node/libcpp-yson-node.a |56.6%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/yt/backtrace/cursors/libunwind/libbacktrace-cursors-libunwind.a |56.5%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/yson/liblibrary-cpp-yson.a |56.5%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/yt/cpu_clock/libcpp-yt-cpu_clock.a |56.5%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/openssl/holders/libcpp-openssl-holders.a |56.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/persqueue_cluster_discovery/cluster_ordering/libservices-persqueue_cluster_discovery-cluster_ordering.a |56.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/db_id_async_resolver_impl/libfq-libs-db_id_async_resolver_impl.a |56.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/monitoring/libydb-services-monitoring.a |56.6%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/yt/exception/libcpp-yt-exception.a |56.6%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/yt/misc/libcpp-yt-misc.a |56.6%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/yt/logging/libcpp-yt-logging.a |56.7%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/yt/malloc/libcpp-yt-malloc.a |56.7%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/yt/memory/libcpp-yt-memory.a |56.7%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/comp_nodes/llvm16/libyt-comp_nodes-llvm16.a |56.7%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/yt/system/libcpp-yt-system.a |56.7%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/yt/yson/libcpp-yt-yson.a |56.6%| [AR] {BAZEL_DOWNLOAD} $(B)/tools/enum_parser/enum_serialization_runtime/libtools-enum_parser-enum_serialization_runtime.a |56.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/datashard/libcore-tx-datashard.a |56.6%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/ytalloc/api/libcpp-ytalloc-api.a |56.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/control_plane_storage/libfq-libs-control_plane_storage.a |56.7%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/expr_nodes/libproviders-yt-expr_nodes.a |56.7%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/yt/yson_string/libcpp-yt-yson_string.a |56.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/persqueue_cluster_discovery/libydb-services-persqueue_cluster_discovery.a |56.7%| [AR] {BAZEL_DOWNLOAD} $(B)/util/draft/libutil-draft.a |56.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/metadata/secret/libservices-metadata-secret.a |56.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/apps/version/libversion_definition.a |56.8%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/minsketch/libessentials-core-minsketch.a |56.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/backup/common/libcore-backup-common.a |56.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/replication/libydb-services-replication.a |56.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/backup/controller/libcore-backup-controller.a |56.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/base/generated/libcore-base-generated.a |56.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/view/libydb-services-view.a |56.8%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/arrow_kernels/registry/libcore-arrow_kernels-registry.a |56.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/events/libfq-libs-events.a |56.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/db_schema/libfq-libs-db_schema.a |56.8%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/arrow_kernels/request/libcore-arrow_kernels-request.a |56.8%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/comp_nodes/dq/llvm16/libcomp_nodes-dq-llvm16.a |56.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/crypto/chacha_512/libblobstorage-crypto-chacha_512.a |56.7%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/expr_nodes/libessentials-core-expr_nodes.a |56.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/base/libcore-blobstorage-base.a |56.8%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/expr_nodes_gen/libessentials-core-expr_nodes_gen.a |56.8%| [AR] {BAZEL_DOWNLOAD} $(B)/util/libyutil.a |56.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/dsproxy/bridge/libblobstorage-dsproxy-bridge.a |56.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/lwtrace_probes/libcore-blobstorage-lwtrace_probes.a |56.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/dsproxy/mock/libblobstorage-dsproxy-mock.a |56.9%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/gateway/qplayer/libyt-gateway-qplayer.a |56.7%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/gateway/lib/libyt-gateway-lib.a |56.8%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/lib/config_clusters/libyt-lib-config_clusters.a |56.8%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/file_storage/download/libcore-file_storage-download.a |56.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/other/libcore-blobstorage-other.a |56.8%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/job/libproviders-yt-job.a |56.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/vdisk/hulldb/base/libvdisk-hulldb-base.a |56.8%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/extract_predicate/libessentials-core-extract_predicate.a |56.9%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/arrow/libessentials-minikql-arrow.a |56.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/vdisk/hulldb/compstrat/libvdisk-hulldb-compstrat.a |56.7%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/url_preprocessing/interface/libcore-url_preprocessing-interface.a |56.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/incrhuge/libcore-blobstorage-incrhuge.a |56.8%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/user_data/libessentials-core-user_data.a |56.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/vdisk/hulldb/cache_block/libvdisk-hulldb-cache_block.a |56.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/vdisk/anubis_osiris/libblobstorage-vdisk-anubis_osiris.a |56.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/vdisk/ingress/libblobstorage-vdisk-ingress.a |56.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/vdisk/hullop/hullcompdelete/libvdisk-hullop-hullcompdelete.a |56.9%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/openssl/io/libcpp-openssl-io.a |56.9%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/openssl/init/libcpp-openssl-init.a |56.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/persqueue_v1/libydb-services-persqueue_v1.a |56.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/vdisk/balance/libblobstorage-vdisk-balance.a |56.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blob_depot/agent/libcore-blob_depot-agent.a |56.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/grpc/libfq-libs-grpc.a |56.9%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/lib/expr_traits/libyt-lib-expr_traits.a |56.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/gateway/libfq-libs-gateway.a |56.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/vdisk/localrecovery/libblobstorage-vdisk-localrecovery.a |56.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/vdisk/hulldb/fresh/libvdisk-hulldb-fresh.a |56.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/vdisk/protos/libblobstorage-vdisk-protos.a |57.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/vdisk/hulldb/generic/libvdisk-hulldb-generic.a |57.0%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/lib/hash/libyt-lib-hash.a |56.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blockstore/core/libcore-blockstore-core.a |56.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/change_exchange/libydb-core-change_exchange.a |56.9%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/gateway/native/libyt-gateway-native.a |57.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/client/scheme_cache_lib/libcore-client-scheme_cache_lib.a |57.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/persqueue_v1/actors/libservices-persqueue_v1-actors.a |57.0%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/computation/llvm16/libminikql-computation-llvm16.a |56.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/config/protos/libcore-config-protos.a |56.9%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/jsonpath/rewrapper/hyperscan/libjsonpath-rewrapper-hyperscan.global.a |56.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/control/lib/libcore-control-lib.a |56.9%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/lib/init_yt_api/libyt-lib-init_yt_api.a |57.0%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/jsonpath/rewrapper/libminikql-jsonpath-rewrapper.a |57.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/vdisk/repl/libblobstorage-vdisk-repl.a |57.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/discovery/libydb-core-discovery.a |57.0%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/jsonpath/rewrapper/re2/libjsonpath-rewrapper-re2.global.a |57.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/driver_lib/version/libversion.a |57.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/filestore/core/libcore-filestore-core.a |56.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/formats/arrow/accessor/composite/liblibrary-formats-arrow-accessor-composite.a |56.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/formats/arrow/accessor/common/liblibrary-formats-arrow-accessor-common.a |56.9%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/jsonpath/rewrapper/proto/libjsonpath-rewrapper-proto.a |56.9%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/lib/infer_schema/libyt-lib-infer_schema.a |57.0%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/lib/lambda_builder/libyt-lib-lambda_builder.a |57.0%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/lib/key_filter/libyt-lib-key_filter.a |57.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/graph_params/proto/liblibs-graph_params-proto.a |57.0%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/parser/common/antlr4/libparser-common-antlr4.a |57.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/driver_lib/cli_base/libcli_base.a |57.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/formats/arrow/accessor/composite_serial/libarrow-accessor-composite_serial.a |57.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/formats/arrow/accessor/plain/libarrow-accessor-plain.a |57.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/vdisk/query/libblobstorage-vdisk-query.a |57.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/formats/arrow/accessor/plain/libarrow-accessor-plain.global.a |57.0%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/parser/proto_ast/antlr3/libparser-proto_ast-antlr3.a |57.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/external_sources/libydb-core-external_sources.a |57.1%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/parser/pg_catalog/proto/libparser-pg_catalog-proto.a |57.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/formats/arrow/accessor/sub_columns/libarrow-accessor-sub_columns.global.a |57.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/cms/libydb-core-cms.global.a |57.1%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/parser/proto_ast/antlr4/libparser-proto_ast-antlr4.a |57.1%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/lib/log/libyt-lib-log.a |57.0%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/parser/pg_wrapper/interface/libparser-pg_wrapper-interface.a |57.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/formats/arrow/reader/libformats-arrow-reader.a |57.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/quota_manager/events/liblibs-quota_manager-events.a |57.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/quota_manager/proto/liblibs-quota_manager-proto.a |57.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/rate_limiter/utils/liblibs-rate_limiter-utils.a |57.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/driver_lib/run/librun.a |57.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/rate_limiter/events/liblibs-rate_limiter-events.a |57.2%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/parser/pg_catalog/libessentials-parser-pg_catalog.a |57.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/row_dispatcher/format_handler/common/librow_dispatcher-format_handler-common.a |57.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/protos/libfq-libs-protos.a |57.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/formats/arrow/accessor/sub_columns/libarrow-accessor-sub_columns.a |57.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/shared_resources/interface/liblibs-shared_resources-interface.a |57.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/read_rule/libfq-libs-read_rule.a |57.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/row_dispatcher/format_handler/parsers/librow_dispatcher-format_handler-parsers.a |57.1%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/libyql-essentials-minikql.a |57.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/hmac/libfq-libs-hmac.a |57.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/row_dispatcher/purecalc_compilation/liblibs-row_dispatcher-purecalc_compilation.a |57.2%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/lib/res_pull/libyt-lib-res_pull.a |57.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/health/libfq-libs-health.a |57.2%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/query_compiler/libcore-kqp-query_compiler.a |57.1%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/lib/mkql_helpers/libyt-lib-mkql_helpers.a |57.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/signer/libfq-libs-signer.a |57.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/shared_resources/libfq-libs-shared_resources.a |57.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/graph/service/libcore-graph-service.a |57.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/graph/shard/protos/libgraph-shard-protos.a |57.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/grpc_caching/libydb-core-grpc_caching.a |57.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/test_connection/libfq-libs-test_connection.a |57.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/grpc_services/cancelation/protos/libgrpc_services-cancelation-protos.a |57.1%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/opt/rbo/libkqp-opt-rbo.a |57.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/row_dispatcher/libfq-libs-row_dispatcher.a |57.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blob_depot/libydb-core-blob_depot.a |57.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/grpc_services/tablet/libcore-grpc_services-tablet.a |57.2%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/computation/libessentials-minikql-computation.a |57.2%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/lib/schema/libyt-lib-schema.a |57.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/grpc_streaming/libydb-core-grpc_streaming.a |57.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/formats/arrow/program/libformats-arrow-program.a |57.2%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/parser/proto_ast/gen/v0/libproto_ast-gen-v0.a |57.2%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/lib/row_spec/libyt-lib-row_spec.a |57.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/io_formats/ydb_dump/libcore-io_formats-ydb_dump.a |57.2%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/protos/libyql-essentials-protos.a |57.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/jaeger_tracing/libydb-core-jaeger_tracing.a |57.3%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/providers/common/arrow_resolve/libproviders-common-arrow_resolve.a |57.3%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/providers/common/activation/libproviders-common-activation.a |57.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/common/events/libkqp-common-events.a |57.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kesus/proxy/libcore-kesus-proxy.a |57.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/keyvalue/protos/libcore-keyvalue-protos.a |57.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/common/buffer/libkqp-common-buffer.a |57.2%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/lib/yt_download/libyt-lib-yt_download.a |57.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/common/compilation/libkqp-common-compilation.a |57.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/common/shutdown/libkqp-common-shutdown.a |57.3%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/lib/url_mapper/libyt-lib-url_mapper.a |57.3%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/lib/skiff/libyt-lib-skiff.a |57.3%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/providers/common/codec/arrow/libcommon-codec-arrow.a |57.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/gateway/behaviour/resource_pool/libgateway-behaviour-resource_pool.global.a |57.3%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/providers/common/config/libproviders-common-config.a |57.3%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/lib/yson_helpers/libyt-lib-yson_helpers.a |57.3%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/providers/common/codec/libproviders-common-codec.a |57.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/gateway/behaviour/external_data_source/libgateway-behaviour-external_data_source.a |57.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/counters/libcore-kqp-counters.a |57.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/finalize_script_service/libcore-kqp-finalize_script_service.a |57.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/gateway/behaviour/resource_pool/libgateway-behaviour-resource_pool.a |57.4%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/providers/common/dq/libproviders-common-dq.a |57.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/proxy_service/proto/libkqp-proxy_service-proto.a |57.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/compile_service/libcore-kqp-compile_service.a |57.4%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/providers/common/comp_nodes/libproviders-common-comp_nodes.a |57.4%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/providers/common/gateways_utils/libproviders-common-gateways_utils.a |57.4%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/providers/common/gateway/libproviders-common-gateway.a |57.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/topics/libcore-kqp-topics.a |57.4%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/parser/proto_ast/gen/v1/libproto_ast-gen-v1.a |57.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/rm_service/libcore-kqp-rm_service.a |57.4%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/parser/proto_ast/gen/v1_antlr4/libproto_ast-gen-v1_antlr4.a |57.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/olap/compression_ut.cpp |57.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/client/cancel_tx_ut.cpp |57.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/olap/sparsed_ut.cpp |57.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/olap/dictionary_ut.cpp |57.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/olap/clickbench_ut.cpp |57.4%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/parser/proto_ast/gen/v1_ansi_antlr4/libproto_ast-gen-v1_ansi_antlr4.a |57.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/olap/datatime64_ut.cpp |57.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/olap/delete_ut.cpp |57.4%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/parser/proto_ast/gen/v1_ansi/libproto_ast-gen-v1_ansi.a |57.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/olap/optimizer_ut.cpp |57.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/client/locks_ut.cpp |57.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/olap/json_ut.cpp |57.4%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/parser/proto_ast/gen/v0_proto_split/libproto_ast-gen-v0_proto_split.a |57.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/proxy_service/libcore-kqp-proxy_service.a |57.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/ut/olap/combinatory/libut-olap-combinatory.a |57.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/olap/indexes_ut.cpp |57.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/olap/write_ut.cpp |57.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/olap/statistics_ut.cpp |57.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/olap/locks_ut.cpp |57.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/olap/sys_view_ut.cpp |57.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/olap/kqp_olap_stats_ut.cpp |57.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/query_compiler/kqp_mkql_compiler.cpp |57.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/query_compiler/kqp_olap_compiler.cpp |57.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kesus/tablet/quoter_resource_tree_ut.cpp |57.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/session_actor/libcore-kqp-session_actor.a |57.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/arrow/kqp_arrow_in_channels_ut.cpp |57.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/olap/tiering_ut.cpp |57.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/arrow/kqp_types_arrow_ut.cpp |57.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/scheme_board/ut_helpers.cpp |57.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/query/kqp_analyze_ut.cpp |57.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/query/kqp_types_ut.cpp |57.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/fq/libs/common/cache_ut.cpp |57.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/fq/libs/common/rows_proto_splitter_ut.cpp |57.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/olap/kqp_olap_ut.cpp |57.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kafka_proxy/ut/ut_kafka_functions.cpp |57.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/fq/libs/common/entity_id_ut.cpp |57.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/fq/libs/common/iceberg_processor_ut.cpp |57.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/fq/libs/common/util_ut.cpp |57.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kesus/tablet/tablet_ut.cpp |57.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kafka_proxy/ut/ut_transaction_coordinator.cpp |57.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/persqueue/ut/ut_with_sdk/topic_ut.cpp |57.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/persqueue/ut/ut_with_sdk/mirrorer_ut.cpp |57.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/query/kqp_explain_ut.cpp |57.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/http_proxy/libydb-core-http_proxy.a |57.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/persqueue/ut/ut_with_sdk/commitoffset_ut.cpp |57.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/tx/kqp_locks_ut.cpp |57.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/query/kqp_query_ut.cpp |57.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/ut/olap/helpers/libut-olap-helpers.a |57.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/security/ldap_auth_provider/ldap_utils_ut.cpp |57.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/tx/kqp_tx_ut.cpp |57.6%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/threading/chunk_queue/libcpp-threading-chunk_queue.a |57.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/tx/kqp_locks_tricky_ut.cpp |57.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_ru_calculator/ut_ru_calculator.cpp |57.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/tx/kqp_mvcc_ut.cpp |57.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/gateway/ut/metadata_conversion.cpp |57.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/schemeshard/libcore-tx-schemeshard.a |57.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/statistics/ut_common/libcore-statistics-ut_common.a |57.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/executer_actor/libcore-kqp-executer_actor.a |57.7%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/containers/comptrie/libcpp-containers-comptrie.a |57.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/security/ldap_auth_provider/ldap_auth_provider_ut.cpp |57.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/breakpad/src/client/linux/libsrc-client-linux.a |57.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kafka_proxy/ut/ut_produce_actor.cpp |57.7%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/testing/gtest/libcpp-testing-gtest.a |57.7%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/testing/gtest_main/libcpp-testing-gtest_main.a |57.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kafka_proxy/ut/metarequest_ut.cpp |57.7%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/containers/compact_vector/libcpp-containers-compact_vector.a |57.7%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/lfalloc/alloc_profiler/libcpp-lfalloc-alloc_profiler.a |57.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/hooks/testing/libcolumnshard-hooks-testing.a |57.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/backup/impl/local_partition_reader_ut.cpp |57.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/olap/decimal_ut.cpp |57.8%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/containers/stack_array/libcpp-containers-stack_array.a |57.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/breakpad/src/liblibs-breakpad-src.a |57.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/keyvalue/keyvalue_collector_ut.cpp |57.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/olap/aggregations_ut.cpp |57.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/croaring/libcontrib-libs-croaring.a |57.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/ut/common/libkqp-ut-common.a |57.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp |57.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/scheme_board/monitoring_ut.cpp |57.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/scheme_board/subscriber_ut.cpp |57.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/client/object_storage_listing_ut.cpp |57.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kafka_proxy/ut/ut_protocol.cpp |57.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/query/kqp_limits_ut.cpp |57.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/keyvalue/keyvalue_storage_read_request_ut.cpp |57.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/fq/libs/checkpointing/ut/checkpoint_coordinator_ut.cpp |57.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kesus/tablet/ut_helpers.cpp |57.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/persqueue/ut/ut_with_sdk/balancing_ut.cpp |57.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/tx/kqp_sink_locks_ut.cpp |57.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/dsproxy/ut_fat/dsproxy_ut.cpp |57.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/tx/kqp_sink_mvcc_ut.cpp |57.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/client/flat_ut.cpp |57.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/view/view_ut.cpp |57.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/tx/kqp_sink_tx_ut.cpp |57.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kafka_proxy/ut/kafka_test_client.cpp |57.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/query/kqp_params_ut.cpp |57.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/keyvalue/keyvalue_ut.cpp |57.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/query/kqp_stats_ut.cpp |57.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/scheme_board/ut_helpers.cpp |57.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kafka_proxy/ut/ut_transaction_actor.cpp |57.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/olap/blobs_sharing_ut.cpp |57.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kafka_proxy/ut/ut_serialization.cpp |57.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kafka_proxy/ut/actors_ut.cpp |57.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/persqueue/ut/ut_with_sdk/autoscaling_ut.cpp |57.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/delegating_client.cpp |57.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/chaos_client/replication_card_cache.cpp |57.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/chunk_client/ready_event_reader_base.cpp |57.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/chunk_client/data_statistics.cpp |57.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/complex_types/merge_complex_types.cpp |57.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/provider/libcore-kqp-provider.a |57.9%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/scheme/protos/type_info.{pb.h ... grpc.pb.h} |58.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/misc/config.cpp |58.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/misc/method_helpers.cpp |58.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/node_tracker_client/helpers.cpp |57.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/node_tracker_client/public.cpp |57.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/health_check/health_check_ut.cpp |57.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/security_client/acl.cpp |58.0%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/datashard_config.{pb.h ... grpc.pb.h} |58.0%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/ydb_persqueue_v1.pb.{h, cc} |58.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/column_rename_descriptor.cpp |58.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/signature/generator.cpp |58.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/scheduler/operation_cache.cpp |58.0%| PREPARE $(JDK17-472926544) |58.0%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/msgbus_pq.{pb.h ... grpc.pb.h} |58.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/record_helpers.cpp |58.0%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/invoke_builtins/llvm16/libminikql-invoke_builtins-llvm16.a |58.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/tablet_client/config.cpp |58.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/table_output.cpp |58.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/wire_protocol.cpp |58.0%| PREPARE $(JDK_DEFAULT-472926544) |58.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/distributed_table_session.cpp |58.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/signature/validator.cpp |58.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/validate_logical_type.cpp |58.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/bundle_controller_client/bundle_controller_client.cpp |58.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/opt/kqp_constant_folding_transformer.cpp |58.1%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/antlr4-c3/libcontrib-libs-antlr4-c3.a |58.1%| PREPARE $(WITH_JDK-sbr:7832760150) |58.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/transaction_client/batching_timestamp_provider.cpp |58.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/schema_serialization_helpers.cpp |58.1%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/external_sources.{pb.h ... grpc.pb.h} |58.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/tablet_client/public.cpp |58.1%| PREPARE $(WITH_JDK17-sbr:7832760150) |58.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/logical_type.cpp |58.1%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/scheme_board.{pb.h ... grpc.pb.h} |58.1%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/config/protos/read_actors_factory.pb.{h, cc} |58.2%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/config/protos/marker.pb.{h, cc} |58.1%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/config/protos/resource_manager.pb.{h, cc} |58.1%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/blobstorage_vdisk_config.{pb.h ... grpc.pb.h} |58.1%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/data_integrity_trails.{pb.h ... grpc.pb.h} |58.1%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/subdomains.{pb.h ... grpc.pb.h} |58.1%| [PB] {BAZEL_DOWNLOAD} $(B)/library/cpp/lwtrace/protos/lwtrace.pb.{h, cc} |58.1%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/schemeshard/operations.{pb.h ... grpc.pb.h} |58.2%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/data_events.{pb.h ... grpc.pb.h} |58.2%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/compaction.{pb.h ... grpc.pb.h} |58.2%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/config/protos/control_plane_storage.pb.{h, cc} |58.2%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/config/protos/row_dispatcher.pb.{h, cc} |58.2%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/blobstorage_base.{pb.h ... grpc.pb.h} |58.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/row_batch.cpp |58.2%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/parser/proto_ast/gen/v1_proto_split/libproto_ast-gen-v1_proto_split.a |58.2%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/blobstorage_disk_color.{pb.h ... grpc.pb.h} |58.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/timestamped_schema_helpers.cpp |58.2%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/base.{pb.h ... grpc.pb.h} |58.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/opt/kqp_statistics_transformer.cpp |58.2%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/tablet_counters.{pb.h ... grpc.pb.h} |58.2%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/local.{pb.h ... grpc.pb.h} |58.2%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/export.{pb.h ... grpc.pb.h} |58.3%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/netclassifier.{pb.h ... grpc.pb.h} |58.3%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/blobstorage_config.{pb.h ... grpc.pb.h} |58.3%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/flat_tx_scheme.{pb.h ... grpc.pb.h} |58.3%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/msgbus_kv.{pb.h ... grpc.pb.h} |58.3%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/resource_broker.{pb.h ... grpc.pb.h} |58.3%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/library/login/protos/login.pb.{h, cc} |58.3%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/sys_view_types.{pb.h ... grpc.pb.h} |58.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/versioned_reader.cpp |58.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/queue_client/producer_client.cpp |58.3%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/library/folder_service/proto/config.pb.{h, cc} |58.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/query_client/query_builder.cpp |58.3%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/scheme/protos/key_range.{pb.h ... grpc.pb.h} |58.3%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/library/actors/protos/actors.pb.{h, cc} |58.3%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/tablet_database.{pb.h ... grpc.pb.h} |58.3%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/tx.{pb.h ... grpc.pb.h} |58.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/node_tracker_client/node_directory.cpp |58.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/security_client/helpers.cpp |58.4%| PREPARE $(CLANG-1922233694) |58.3%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/jemalloc/libcontrib-libs-jemalloc.a |58.3%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/ydb_clickhouse_internal.pb.{h, cc} |58.3%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/dq/proto/dq_tasks.pb.{h, cc} |58.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/serialize.cpp |58.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/schemaless_row_reorderer.cpp |58.4%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/ydb_formats.pb.{h, cc} |58.4%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/draft/fq.pb.{h, cc} |58.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/rpc_proxy/table_reader.cpp |58.4%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/ydb_export.pb.{h, cc} |58.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/coordinator/coordinator_ut.cpp |58.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/coordinator/coordinator_volatile_ut.cpp |58.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/grpc_services/libydb-core-grpc_services.a |58.1%| PREPARE $(CLANG-874354456) |58.1%| PREPARE $(CLANG18-1866954364) |58.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/opt/kqp_opt_effects.cpp |58.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/ymq/actor/ut/attributes_md5_ut.cpp |58.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/ymq/actor/ut/message_delay_stats_ut.cpp |58.2%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/tbb/libcontrib-libs-tbb.a |58.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tablet_flat/test/libs/rows/libtest-libs-rows.a |58.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/ymq/actor/ut/metering_ut.cpp |58.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/ymq/actor/ut/sha256_ut.cpp |58.1%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/library/cpp/svnversion/svnversion.cpp |58.2%| PREPARE $(GDB) |58.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/ymq/actor/ut/infly_ut.cpp |58.2%| [CC] {default-linux-x86_64, relwithdebinfo} $(B)/library/cpp/build_info/build_info.cpp |58.2%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/library/cpp/svnversion/svn_interface.c |58.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/schemeshard/ut_helpers/libtx-schemeshard-ut_helpers.a |58.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_replication_reboots/ut_replication_reboots.cpp |58.0%| [BI] {default-linux-x86_64, relwithdebinfo} $(B)/library/cpp/build_info/buildinfo_data.h |58.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/object_client/public.cpp |58.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/sharding/ut/ut_sharding.cpp |58.2%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/library/cpp/build_info/build_info_static.cpp |58.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/journal_client/public.cpp |58.2%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/clickbench/ydb-tests-functional-clickbench |57.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/opt/rbo/kqp_rbo.cpp |57.3%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/protobuf/builtin_proto/protos_from_protobuf/libpy3protobuf-builtin_proto-protos_from_protobuf.global.a |57.3%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/tcmalloc/libcontrib-libs-tcmalloc.a |57.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/opt/rbo/kqp_rbo_transformer.cpp |58.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/MarkupSafe/py3/libpy3python-MarkupSafe-py3.a |58.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/cffi/py3/libpy3python-cffi-py3.global.a |58.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/protobuf/builtin_proto/protos_from_protoc/libpy3protobuf-builtin_proto-protos_from_protoc.global.a |57.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/Jinja2/py3/libpy3python-Jinja2-py3.global.a |57.9%| [CC] {default-linux-x86_64, relwithdebinfo} $(B)/library/cpp/build_info/sandbox.cpp |57.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/multidict/libpy3contrib-python-multidict.a |58.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/typing-extensions/py3/libpy3python-typing-extensions-py3.global.a |58.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/ruamel.yaml.clib/py3/libpy3python-ruamel.yaml.clib-py3.global.a |58.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/requests/py3/libpy3python-requests-py3.global.a |58.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/ruamel.yaml/py3/libpy3python-ruamel.yaml-py3.global.a |58.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/google-auth/py3/libpy3python-google-auth-py3.global.a |57.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/typeguard/libpy3contrib-python-typeguard.global.a |57.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/opt/rbo/kqp_rbo_rules.cpp |58.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/setuptools/py3/libpy3python-setuptools-py3.global.a |58.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/tenacity/py3/libpy3python-tenacity-py3.global.a |58.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/six/py3/libpy3python-six-py3.global.a |58.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/rsa/py3/libpy3python-rsa-py3.global.a |58.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/yarl/libpy3contrib-python-yarl.a |58.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/websocket-client/libpy3contrib-python-websocket-client.global.a |58.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/multidict/libpy3contrib-python-multidict.global.a |58.1%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/urllib3/py3/libpy3python-urllib3-py3.global.a |58.1%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/wheel/libpy3contrib-python-wheel.global.a |58.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/platformdirs/libpy3contrib-python-platformdirs.global.a |58.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/yarl/libpy3contrib-python-yarl.global.a |58.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/opt/rbo/kqp_operator.cpp |58.1%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/ydb/py3/libpy3python-ydb-py3.global.a |58.1%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/iniconfig/libpy3contrib-python-iniconfig.global.a |58.1%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/frozenlist/libpy3contrib-python-frozenlist.global.a |58.1%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/future/py3/libpy3python-future-py3.global.a |58.1%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/aiosignal/libpy3contrib-python-aiosignal.global.a |58.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/frozenlist/libpy3contrib-python-frozenlist.a |58.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/MarkupSafe/py3/libpy3python-MarkupSafe-py3.global.a |58.1%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/PyHamcrest/py3/libpy3python-PyHamcrest-py3.global.a |58.1%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/PyYAML/py3/libpy3python-PyYAML-py3.a |58.1%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/tcmalloc/libcontrib-libs-tcmalloc.global.a |58.1%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/PyJWT/py3/libpy3python-PyJWT-py3.global.a |58.1%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/PyYAML/py3/libpy3python-PyYAML-py3.global.a |58.1%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/grpcio/py3/libpy3python-grpcio-py3.a |58.1%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/aiohttp/libpy3contrib-python-aiohttp.global.a |58.1%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/aiohttp/libpy3contrib-python-aiohttp.a |58.1%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/cryptography/py3/libpy3python-cryptography-py3.global.a |58.1%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/cachetools/py3/libpy3python-cachetools-py3.global.a |58.1%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/cffi/py3/libpy3python-cffi-py3.a |58.1%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/attrs/py3/libpy3python-attrs-py3.global.a |58.1%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/charset-normalizer/libpy3contrib-python-charset-normalizer.global.a |58.2%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/requests-oauthlib/libpy3contrib-python-requests-oauthlib.global.a |58.2%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/python-dateutil/py3/libpy3python-python-dateutil-py3.global.a |58.2%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/ruamel.yaml.clib/py3/libpy3python-ruamel.yaml.clib-py3.a |58.2%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/cryptography/py3/libpy3python-cryptography-py3.a |58.2%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/idna/py3/libpy3python-idna-py3.global.a |58.2%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/grpcio/py3/libpy3python-grpcio-py3.global.a |58.2%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/pytest/py3/libpy3python-pytest-py3.global.a |58.2%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/jsonschema/py3/libpy3python-jsonschema-py3.global.a |58.2%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/jaraco.context/libpy3contrib-python-jaraco.context.global.a |58.2%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/jaraco.collections/libpy3contrib-python-jaraco.collections.global.a |58.2%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/importlib-resources/libpy3contrib-python-importlib-resources.global.a |58.2%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/jaraco.text/libpy3contrib-python-jaraco.text.global.a |58.2%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/jaraco.functools/py3/libpy3python-jaraco.functools-py3.global.a |58.2%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/pyrsistent/py3/libpy3python-pyrsistent-py3.global.a |58.2%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/packaging/py3/libpy3python-packaging-py3.global.a |58.2%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/more-itertools/py3/libpy3python-more-itertools-py3.global.a |58.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/opt/rbo/kqp_convert_to_physical.cpp |58.2%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/protobuf/py3/libpy3python-protobuf-py3.global.a |58.2%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/oauthlib/libpy3contrib-python-oauthlib.global.a |58.2%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/pluggy/py3/libpy3python-pluggy-py3.global.a |58.2%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/postgres_integrations/go-libpq/ydb-tests-postgres_integrations-go-libpq |58.2%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/pycparser/py3/libpy3python-pycparser-py3.global.a |58.2%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/pyasn1/py3/libpy3python-pyasn1-py3.global.a |58.3%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/protobuf/py3/libpy3python-protobuf-py3.a |58.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/replication/ut_helpers/libtx-replication-ut_helpers.a |58.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/query_compiler/kqp_query_compiler.cpp |58.2%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/pyasn1-modules/py3/libpy3python-pyasn1-modules-py3.global.a |58.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/replication/service/topic_reader_ut.cpp |58.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_filestore_reboots/ut_filestore_reboots.cpp |58.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_index_build_reboots/ut_index_build_reboots.cpp |58.3%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/kubernetes/libpy3contrib-python-kubernetes.global.a |58.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/viewer/ut/ut_utils.cpp |58.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/testlib/service_mocks/ldap_mock/libtestlib-service_mocks-ldap_mock.a |58.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_split_merge_reboots/ut_split_merge_reboots.cpp |58.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/viewer/topic_data_ut.cpp |58.3%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/bucket_quoter/liblibrary-cpp-bucket_quoter.a |58.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_move_reboots/ut_move_reboots.cpp |58.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard_ut_reassign.cpp |58.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/transaction_client/helpers.cpp |58.3%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/olap_workload/tests/ydb-tests-stress-olap_workload-tests |58.3%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/yds/ydb-tests-fq-yds |58.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard_ut_kqp_errors.cpp |58.3%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/patched/replxx/librestricted-patched-replxx.a |58.4%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/http_config.{pb.h ... grpc.pb.h} |58.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/viewer/viewer_ut.cpp |58.4%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/ydb_scheme.pb.{h, cc} |58.3%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/replication.{pb.h ... grpc.pb.h} |58.3%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/long_tx_service.{pb.h ... grpc.pb.h} |58.3%| [PR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/dq/expr_nodes/dqs_expr_nodes.{gen.h ... defs.inl.h} |58.3%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/metrics.{pb.h ... grpc.pb.h} |58.3%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/ydb_status_codes.pb.{h, cc} |58.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/opt/kqp_opt_phy_finalize.cpp |58.4%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/tablet_tx.{pb.h ... grpc.pb.h} |58.4%| [PB] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/file_storage/proto/file_storage.pb.{h, cc} |58.4%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/tx_columnshard.{pb.h ... grpc.pb.h} |58.4%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/ydb_table.pb.{h, cc} |58.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/hive/timestamp_map.cpp |58.2%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/autoconfig/ydb-tests-functional-autoconfig |58.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/private.cpp |58.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/rowset.cpp |58.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/rpc_proxy/file_writer.cpp |58.3%| PREPARE $(CLANG16-1380963495) |58.4%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/sqs/merge_split_common_table/fifo/functional-sqs-merge_split_common_table-fifo |57.7%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/datashard/split_merge/ydb-tests-datashard-split_merge |57.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/llhttp/libcontrib-restricted-llhttp.a |57.5%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/datashard/partitioning/ydb-tests-datashard-partitioning |57.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/tools/python3/lib2/py/libpy3python3-lib2-py.global.a |57.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/tools/python3/Lib/libpy3tools-python3-Lib.global.a |57.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_ru_calculator/ydb-core-tx-schemeshard-ut_ru_calculator |57.5%| [LD] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/test/test_import/libtest_import_udf.so |57.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/libffi/libcontrib-restricted-libffi.a |57.7%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/hyperloglog/liblibrary-cpp-hyperloglog.a |57.7%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/histogram/adaptive/protos/libhistogram-adaptive-protos.a |57.7%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/histogram/adaptive/libcpp-histogram-adaptive.a |57.7%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/containers/top_keeper/libcpp-containers-top_keeper.a |57.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/apps/ydb/commands/libcommands.a |57.8%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/public/udf/service/stub/libudf-service-stub.global.a |57.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_restore/ut_restore.cpp |57.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/tools/python3/libcontrib-tools-python3.a |57.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/columnshard/ut_rw/ut_normalizer.cpp |57.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/columnshard/ut_rw/ut_columnshard_read_write.cpp |57.6%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/streams/lz/lz4/libstreams-lz-lz4.a |57.4%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/threading/local_executor/libcpp-threading-local_executor.a |57.4%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/streams/lz/libcpp-streams-lz.a |57.4%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/streams/factory/open_by_signature/libstreams-factory-open_by_signature.a |57.4%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/streams/lz/snappy/libstreams-lz-snappy.a |57.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/workload/tpc_base/liblibrary-workload-tpc_base.a |57.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/benchmarks/gen/tpch-dbgen/libbenchmarks-gen-tpch-dbgen.a |57.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/workload/benchmark_base/liblibrary-workload-benchmark_base.a |57.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/benchmarks/gen/tpcds-dbgen/libbenchmarks-gen-tpcds-dbgen.a |57.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/benchmarks/gen/tpcds-dbgen/libbenchmarks-gen-tpcds-dbgen.global.a |57.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/formats/arrow/csv/table/libarrow-csv-table.a |57.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/backup/libkikimr_backup.a |57.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/benchmarks/queries/tpch/libbenchmarks-queries-tpch.global.a |57.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/benchmarks/queries/tpcds/libbenchmarks-queries-tpcds.global.a |57.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/workload/log/liblibrary-workload-log.global.a |57.3%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/tools/pq_read/test/ydb-tests-tools-pq_read-test |57.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/workload/clickbench/liblibrary-workload-clickbench.global.a |57.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/workload/clickbench/liblibrary-workload-clickbench.a |57.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/workload/log/liblibrary-workload-log.a |57.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/workload/mixed/liblibrary-workload-mixed.global.a |57.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/workload/tpcds/liblibrary-workload-tpcds.a |57.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/workload/mixed/liblibrary-workload-mixed.a |57.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/apps/ydb/ydb |57.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/workload/tpch/liblibrary-workload-tpch.a |57.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/lib/ydb_cli/dump/liblib-ydb_cli-dump.a |57.1%| PREPARE $(BLACK_LINTER-sbr:8415400280) |57.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/apps/ydb/main.cpp |57.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/workload/tpcds/liblibrary-workload-tpcds.global.a |57.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/lib/ydb_cli/commands/transfer_workload/libtransfer_workload.a |57.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/workload/tpch/liblibrary-workload-tpch.global.a |57.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/lib/stat_visualization/libpublic-lib-stat_visualization.a |57.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/lib/ydb_cli/commands/topic_workload/libtopic_workload.a |57.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/lib/ydb_cli/commands/interactive/libydb_cli-commands-interactive.a |57.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/lib/ydb_cli/commands/interactive/highlight/color/libinteractive-highlight-color.a |57.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/lib/ydb_cli/commands/interactive/complete/libcommands-interactive-complete.a |57.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/lib/ydb_cli/commands/interactive/highlight/libcommands-interactive-highlight.a |57.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/lib/ydb_cli/commands/libclicommands.a |57.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_olap/ut_olap.cpp |57.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard_ut_change_collector.cpp |56.7%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/apps/ydb/objcopy_774cbd1f10ee287899289ecb3f.o |56.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/config/libsrc-client-config.a |56.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/lib/ydb_cli/topic/libtopic.a |56.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/scheme_board/ut_subscriber/ydb-core-tx-scheme_board-ut_subscriber |56.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/lib/ydb_cli/import/liblib-ydb_cli-import.a >> conftest.py::flake8 [GOOD] >> docker_wrapper_test.py::flake8 [GOOD] |56.9%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/malloc/jemalloc/libcpp-malloc-jemalloc.a |57.2%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/windows/libpy3library-python-windows.global.a |57.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/nodewarden/ut_sequence/dsproxy_config_retrieval.cpp |57.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/apps/ydbd/main.cpp |57.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/apps/ydbd/export.cpp |57.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/cms/sentinel_ut_unstable.cpp |57.5%| [ld] {default-linux-x86_64, relwithdebinfo} $(B)/tools/black_linter/black_linter |57.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/cms/cms_ut_common.cpp |57.3%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/unicode/punycode/libcpp-unicode-punycode.a |57.3%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/tld/liblibrary-cpp-tld.a |57.3%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/postgres_integrations/go-libpq/flake8 >> docker_wrapper_test.py::flake8 [GOOD] |57.4%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/tools/s3_recipe/s3_recipe |57.3%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/regex/pire/libcpp-regex-pire.a |57.3%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/timezone_conversion/liblibrary-cpp-timezone_conversion.a |57.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/pq/gateway/dummy/libpq-gateway-dummy.a |57.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/config/protos/libpy3core-config-protos.global.a |57.3%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/gateway/ut/ydb-core-kqp-gateway-ut |57.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/cms/cms_ut_common.cpp >> collection.py::flake8 [GOOD] >> conftest.py::flake8 [GOOD] >> select_datetime_with_service_name.py::flake8 [GOOD] >> select_positive_with_service_name.py::flake8 [GOOD] >> test.py::flake8 [GOOD] |57.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/cms/sentinel_ut.cpp |57.3%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/svn_version/libpy3library-python-svn_version.a |57.3%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/sql/pg/libessentials-sql-pg.a |57.3%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/func/libpy3library-python-func.global.a |57.3%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/fs/libpy3library-python-fs.global.a |57.3%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/filelock/libpy3library-python-filelock.global.a |57.3%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/cores/libpy3library-python-cores.global.a |57.3%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/certifi/libpy3library-python-certifi.global.a |57.4%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/strings/libpy3library-python-strings.global.a |57.4%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/resource/libpy3library-python-resource.global.a |57.4%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/runtime_py3/libpy3library-python-runtime_py3.a |57.3%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/reservoir_sampling/libpy3library-python-reservoir_sampling.global.a |57.3%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/retry/libpy3library-python-retry.global.a |57.3%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/library/yql/providers/generic/connector/tests/datasource/oracle/flake8 >> test.py::flake8 [GOOD] |57.3%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/runtime_py3/libpy3library-python-runtime_py3.global.a |57.4%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/yt/kqp_yt_file/part6/ydb-tests-fq-yt-kqp_yt_file-part6 |57.4%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/strings/libpy3library-python-strings.a |57.4%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/testing/yatest_lib/libpy3python-testing-yatest_lib.global.a |57.4%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/symbols/module/libpy3python-symbols-module.global.a |57.4%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/testing/yatest_common/libpy3python-testing-yatest_common.global.a |57.4%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/symbols/libc/libpython-symbols-libc.global.a |57.4%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/svn_version/libpy3library-python-svn_version.global.a |57.4%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/symbols/module/libpy3python-symbols-module.a |57.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/client/minikql_compile/yql_expr_minikql_compile_ut.cpp |57.4%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/symbols/registry/libpython-symbols-registry.a |57.4%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/symbols/python/libpy3cpython-symbols-python.global.a |57.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/persqueue_v1/ut/describes_ut/ic_cache_ut.cpp |57.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/persqueue_v1/ut/describes_ut/describe_topic_ut.cpp |57.4%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/lfalloc/liblibrary-cpp-lfalloc.a |57.4%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/sql/v1/complete/name/service/libcomplete-name-service.a |57.5%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/sql/v1/complete/core/libv1-complete-core.a |57.5%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/sql/v1/complete/name/object/simple/libname-object-simple.a |57.5%| [LD] {BAZEL_DOWNLOAD} $(B)/contrib/python/moto/bin/moto_server |57.5%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/sql/v1/complete/name/object/libcomplete-name-object.a |57.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/dynamic_config/dynamic_config_ut.cpp |57.4%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/sql/v1/complete/libsql-v1-complete.a |57.4%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/sql/v1/complete/text/libv1-complete-text.a |57.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/persqueue_cluster_discovery/cluster_discovery_service_ut.cpp |57.4%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/sql/v1/complete/name/service/ranking/libname-service-ranking.global.a |57.4%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/sql/v1/complete/name/service/ranking/libname-service-ranking.a |57.5%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/sql/v1/complete/name/service/schema/libname-service-schema.a |57.5%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/parser/antlr_ast/gen/v1_ansi_antlr4/libantlr_ast-gen-v1_ansi_antlr4.a |57.5%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/sql/v1/complete/name/service/static/libname-service-static.a |57.5%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/sql/v1/complete/name/service/union/libname-service-union.a |57.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/fq/libs/checkpointing/ut/ydb-core-fq-libs-checkpointing-ut |57.6%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/sql/v1/complete/syntax/libv1-complete-syntax.a |57.6%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/sql/v1/lexer/antlr4_pure_ansi/libv1-lexer-antlr4_pure_ansi.a |57.5%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/sql/v1/lexer/regex/libv1-lexer-regex.a |57.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/config/protos/libpy3libs-config-protos.global.a |57.5%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/sql/v1/highlight/libsql-v1-highlight.a |57.5%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/sql/v1/lexer/antlr4_pure/libv1-lexer-antlr4_pure.a |57.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/public/sdk/cpp/src/client/persqueue_public/ut/basic_usage_ut.cpp |57.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/public/sdk/cpp/src/client/persqueue_public/ut/common_ut.cpp |57.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_data_erasure/ut_data_erasure.cpp |57.5%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/sql/v1/complete/analysis/global/libcomplete-analysis-global.a |57.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/public/sdk/cpp/src/client/persqueue_public/ut/retry_policy_ut.cpp |57.6%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/sql/v1/complete/name/service/static/libname-service-static.global.a |57.6%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/sql/v1/reflect/libsql-v1-reflect.a |57.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/public/sdk/cpp/src/client/persqueue_public/ut/compress_executor_ut.cpp |57.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/topic/ut/ut_utils/libtopic-ut-ut_utils.a |57.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/public/sdk/cpp/src/client/persqueue_public/ut/common_ut.cpp |57.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/public/sdk/cpp/src/client/persqueue_public/ut/compress_executor_ut.cpp |57.5%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/sql/v1/reflect/libsql-v1-reflect.global.a |57.5%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/parser/pg_wrapper/libessentials-parser-pg_wrapper.a |57.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/scheme_board/ut_monitoring/ydb-core-tx-scheme_board-ut_monitoring |57.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/replication/service/table_writer_ut.cpp |57.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/workload/tpc_base/liblibrary-workload-tpc_base.global.a |57.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/public/sdk/cpp/src/client/persqueue_public/ut/read_session_ut.cpp |57.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/public/sdk/cpp/src/client/persqueue_public/ut/basic_usage_ut.cpp |57.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/cms/libsrc-client-cms.a |57.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/persqueue_public/ut/ut_utils/libpersqueue_public-ut-ut_utils.a |57.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/monitoring/libsrc-client-monitoring.a |57.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/debug/libsrc-client-debug.a |57.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/public/sdk/cpp/src/client/persqueue_public/ut/compression_ut.cpp |57.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/security/ldap_auth_provider/ut/ydb-core-security-ldap_auth_provider-ut |57.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/storagepoolmon/ut/storagepoolmon_ut.cpp |57.6%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/sql/v1/complete/antlr4/libv1-complete-antlr4.a |57.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/public/sdk/cpp/src/client/persqueue_public/ut/retry_policy_ut.cpp |57.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/external_sources/s3/ut/s3_aws_credentials_ut.cpp |57.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/ymq/actor/ut/ydb-core-ymq-actor-ut |57.6%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/sql/v1/highlight/libsql-v1-highlight.global.a |57.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/tools/kqprun/runlib/libtools-kqprun-runlib.a |57.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/columnshard/ut_rw/ut_backup.cpp |57.5%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/streams/factory/open_common/libstreams-factory-open_common.a |57.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/row_dispatcher/format_handler/ut/common/libformat_handler-ut-common.a |57.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/public/sdk/cpp/src/client/persqueue_public/ut/read_session_ut.cpp |57.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/tests/tools/fqrun/fqrun.cpp |57.6%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/parser/antlr_ast/gen/v1_antlr4/libantlr_ast-gen-v1_antlr4.a |57.6%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/ydb_operation.pb.{h, cc} |57.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/tools/fqrun/src/libtools-fqrun-src.a |57.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/tablet_client/helpers.cpp |57.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/key_bound_compressor.cpp |57.7%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/ydb_issue_message.pb.{h, cc} |57.5%| [PR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/expr_nodes/yql_expr_nodes.{gen.h ... defs.inl.h} |57.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/rpc_proxy/helpers.cpp |57.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/coordinator/ut/ydb-core-tx-coordinator-ut |57.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/opt/kqp_opt_kql.cpp |57.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/fq/libs/row_dispatcher/ut/coordinator_ut.cpp |57.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/fq/libs/row_dispatcher/ut/row_dispatcher_ut.cpp >> test_workload.py::flake8 [GOOD] |57.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/fq/libs/row_dispatcher/ut/leader_election_ut.cpp |57.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_split_merge_reboots/ydb-core-tx-schemeshard-ut_split_merge_reboots |57.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/viewer/ut/ydb-core-viewer-ut |57.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kesus/tablet/quoter_performance_test/main.cpp |57.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/fq/libs/row_dispatcher/ut/topic_session_ut.cpp |57.6%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/stress/simple_queue/tests/flake8 >> test_workload.py::flake8 [GOOD] |57.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/table_client.cpp |57.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/public_http/http_router_ut.cpp |57.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/sticky_transaction_pool.cpp |57.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/services/dynamic_config/ut/ydb-services-dynamic_config-ut |57.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kesus/tablet/ut_helpers.cpp |57.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/services/persqueue_cluster_discovery/ut/ydb-services-persqueue_cluster_discovery-ut |57.6%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/feature_flags.{pb.h ... grpc.pb.h} |57.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/chunk_client/chunk_replica.cpp |57.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/ut/arrow/ydb-core-kqp-ut-arrow |57.7%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/library/services/services.{pb.h ... grpc.pb.h} |57.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/sharding/ut/ydb-core-tx-sharding-ut |57.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/fq/libs/control_plane_proxy/ut/control_plane_proxy_ut.cpp |57.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/effects/kqp_effects_ut.cpp |57.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/effects/kqp_inplace_update_ut.cpp |57.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/effects/kqp_write_ut.cpp |57.6%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/config/protos/token_accessor.pb.{h, cc} |57.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/complex_types/infinite_entity.cpp |57.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/effects/kqp_immediate_effects_ut.cpp |57.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_index_build_reboots/ydb-core-tx-schemeshard-ut_index_build_reboots >> test_sql_streaming.py::flake8 [GOOD] |57.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/complex_types/check_yson_token.cpp |57.5%| [PB] {BAZEL_DOWNLOAD} $(B)/yql/essentials/public/issue/protos/issue_severity.pb.{h, cc} |57.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/quota_manager/ut_helpers/liblibs-quota_manager-ut_helpers.a |57.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/graph/ut/graph_ut.cpp |57.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/service/kqp_document_api_ut.cpp |57.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/ut/tx/ydb-core-kqp-ut-tx |57.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kesus/tablet/ut/ydb-core-kesus-tablet-ut |57.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/service/kqp_service_ut.cpp |57.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/service/kqp_qs_scripts_ut.cpp |57.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/public/sdk/cpp/src/client/persqueue_public/ut/with_offset_ranges_mode_ut/with_offset_ranges_mode_ut |57.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/services/persqueue_v1/ut/describes_ut/ydb-services-persqueue_v1-ut-describes_ut >> conftest.py::black [GOOD] >> test_clickhouse.py::black [GOOD] >> test_greenplum.py::black [GOOD] >> test_join.py::black [GOOD] >> test_mysql.py::black [GOOD] >> test_postgresql.py::black [GOOD] >> test_ydb.py::black [GOOD] |57.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_move_reboots/ydb-core-tx-schemeshard-ut_move_reboots |57.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/service/kqp_qs_queries_ut.cpp |57.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_filestore_reboots/ydb-core-tx-schemeshard-ut_filestore_reboots |57.5%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/compress_base/lib/libcommon-compress_base-lib.a |57.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/backup/impl/ut_local_partition_reader/ydb-core-backup-impl-ut_local_partition_reader |57.5%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/fq/streaming_optimize/flake8 >> test_sql_streaming.py::flake8 [GOOD] |57.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/replication/service/ut_topic_reader/ydb-core-tx-replication-service-ut_topic_reader |57.4%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/datetime2/libdatetime2_udf.global.a |57.4%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/library/ut/ydb-tests-library-ut |57.5%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/compress_base/libcompress_udf.global.a |57.5%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/digest/libdigest_udf.global.a |57.5%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/yt/kqp_yt_file/part0/ydb-tests-fq-yt-kqp_yt_file-part0 |57.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/ut/view/ydb-core-kqp-ut-view |57.5%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/yson2/libyson2_udf.global.a |57.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/ut/federated_query/common/libut-federated_query-common.a |57.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/keyvalue/ut/ydb-core-keyvalue-ut >> __main__.py::flake8 [GOOD] >> parser.py::flake8 [GOOD] >> conftest.py::black [GOOD] >> test_join.py::black [GOOD] |57.5%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/re2/libre2_udf.global.a |57.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/schemeshard/libpy3core-protos-schemeshard.global.a |57.5%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/fq/generic/analytics/black >> test_ydb.py::black [GOOD] |57.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/columnshard/splitter/ut/batch_slice.cpp |57.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/scheme/protos/libpy3core-scheme-protos.global.a |57.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/libpy3ydb-core-protos.global.a |57.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_reassign/ydb-core-tx-datashard-ut_reassign |57.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/columnshard/splitter/ut/ut_splitter.cpp |57.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kesus/proxy/ut_helpers.cpp |57.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kesus/proxy/proxy_actor_ut.cpp >> test_tpcds.py::flake8 [GOOD] >> test_tpch_spilling.py::flake8 [GOOD] |57.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/grpc_services/tablet/rpc_restart_tablet_ut.cpp |57.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/grpc_services/tablet/rpc_change_schema_ut.cpp |57.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/ut/query/ydb-core-kqp-ut-query |57.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/data_integrity/kqp_data_integrity_trails_ut.cpp |57.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/vdisk/skeleton/skeleton_oos_logic_ut.cpp |57.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/grpc_services/tablet/rpc_execute_mkql_ut.cpp |57.6%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/fq/generic/streaming/black >> test_join.py::black [GOOD] |57.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/vdisk/skeleton/skeleton_vpatch_actor_ut.cpp |57.7%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/olap/docs/generator/flake8 >> parser.py::flake8 [GOOD] |57.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/keyvalue/keyvalue_ut_trace.cpp |57.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/grpc_streaming/grpc_streaming_ut.cpp |57.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/grpc_streaming/ut/grpc/libgrpc_streaming-ut-grpc.a |57.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/tools/fqrun/fqrun |57.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_replication_reboots/ydb-core-tx-schemeshard-ut_replication_reboots |57.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_olap/ydb-core-tx-schemeshard-ut_olap |57.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_restore/ydb-core-tx-schemeshard-ut_restore |57.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/persqueue/ut/ut_with_sdk/ydb-core-persqueue-ut-ut_with_sdk >> test_workload.py::flake8 [GOOD] |57.1%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/tpc/large/flake8 >> test_tpch_spilling.py::flake8 [GOOD] |57.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/dsproxy/ut_fat/ydb-core-blobstorage-dsproxy-ut_fat |57.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/public/sdk/cpp/src/client/persqueue_public/ut/ydb-public-sdk-cpp-src-client-persqueue_public-ut |57.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/health_check/ut/ydb-core-health_check-ut |57.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_kqp_errors/ydb-core-tx-datashard-ut_kqp_errors |57.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_change_collector/ydb-core-tx-datashard-ut_change_collector |57.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/common/protos/libpy3columnshard-common-protos.global.a |56.8%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/stress/kv/tests/flake8 >> test_workload.py::flake8 [GOOD] |57.4%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kafka_proxy/ut/ydb-core-kafka_proxy-ut |57.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/security/ticket_parser_ut.cpp |57.3%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/fq/libs/common/ut/ydb-core-fq-libs-common-ut |57.1%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/yt/kqp_yt_file/part14/ydb-tests-fq-yt-kqp_yt_file-part14 |57.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/protos/libpy3columnshard-engines-protos.global.a |57.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/federated_query/s3/kqp_federated_query_ut.cpp |57.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/federated_query/s3/s3_recipe_ut_helpers.cpp |57.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/scheme/defaults/protos/libpy3scheme-defaults-protos.global.a |57.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/federated_query/s3/kqp_s3_plan_ut.cpp |57.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/federated_query/large_results/kqp_scriptexec_results_ut.cpp >> test_copy_table.py::flake8 [GOOD] |56.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/federated_query/s3/kqp_federated_scheme_ut.cpp |56.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/shared_cache_switchable_ut.cpp |56.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/shared_cache_s3fifo_ut.cpp |57.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/shared_cache_clock_pro_ut.cpp >> test_commit.py::flake8 [GOOD] >> test_timeout.py::flake8 [GOOD] |56.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/flat_executor_gclogic_ut.cpp |56.7%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/datashard/copy_table/flake8 >> test_copy_table.py::flake8 [GOOD] |57.0%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/yt/kqp_yt_file/part5/ydb-tests-fq-yt-kqp_yt_file-part5 |57.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/flat_table_part_ut.cpp |57.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/ut_sequence/datashard_ut_sequence.cpp >> test_pdisk_format_info.py::flake8 [GOOD] >> test_replication.py::flake8 [GOOD] >> test_self_heal.py::flake8 [GOOD] >> test_tablet_channel_migration.py::flake8 [GOOD] |57.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/quoter/ut_helpers.cpp |57.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/quoter/quoter_service_ut.cpp |57.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/quoter/kesus_quoter_ut.cpp |57.0%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/tools/pq_read/test/flake8 >> test_timeout.py::flake8 [GOOD] |57.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet/tablet_counters_aggregator_ut.cpp |57.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/ut_rw/ydb-core-tx-columnshard-ut_rw >> test_parametrized_queries.py::flake8 [GOOD] >> test_dump_restore.py::flake8 [GOOD] >> test_large_import.py::flake8 [GOOD] >> collection.py::flake8 [GOOD] >> conftest.py::flake8 [GOOD] >> scenario.py::flake8 [GOOD] >> test.py::flake8 [GOOD] >> test_case.py::flake8 [GOOD] |57.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard_ut_data_cleanup.cpp |57.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard_ut_background_compaction.cpp |57.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet/pipe_tracker_ut.cpp |57.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet/tablet_metrics_ut.cpp |57.1%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/datashard/parametrized_queries/flake8 >> test_parametrized_queries.py::flake8 [GOOD] |57.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/flat_executor_ut.cpp |57.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet/resource_broker_ut.cpp >> test_async_replication.py::flake8 [GOOD] |57.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/sys_view/ut_large.cpp |57.1%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/datashard/dump_restore/flake8 >> test_dump_restore.py::flake8 [GOOD] |57.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet/tablet_resolver_ut.cpp |57.1%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/blobstorage/flake8 >> test_tablet_channel_migration.py::flake8 [GOOD] |57.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet/tablet_pipecache_ut.cpp |57.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/flat_executor_leases_ut.cpp |57.1%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/datashard/async_replication/flake8 >> test_async_replication.py::flake8 [GOOD] |57.2%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/olap/s3_import/large/flake8 >> test_large_import.py::flake8 [GOOD] >> test.py::flake8 [GOOD] >> tablet_scheme_tests.py::flake8 [GOOD] >> helpers.py::flake8 [GOOD] >> test_base.py::flake8 [GOOD] >> test_query.py::flake8 [GOOD] >> test_s3.py::flake8 [GOOD] |57.1%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/library/yql/providers/generic/connector/tests/join/flake8 >> test_case.py::flake8 [GOOD] |57.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet/tablet_req_blockbs_ut.cpp |57.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/columnshard/engines/ut/ut_insert_table.cpp |57.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet/tablet_pipe_ut.cpp |57.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/columnshard/engines/ut/helper.cpp |57.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard_ut_object_storage_listing.cpp |57.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/memory_controller/memory_controller_ut.cpp >> test_vector_index.py::flake8 [GOOD] >> test.py::flake8 [GOOD] >> test_query_cache.py::flake8 [GOOD] |57.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/memory_controller/memtable_collection_ut.cpp |57.1%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/scheme_tests/flake8 >> tablet_scheme_tests.py::flake8 [GOOD] |57.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/flat_cxx_database_ut.cpp |57.2%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/fq/yt/kqp_yt_file/part7/flake8 >> test.py::flake8 [GOOD] |57.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/data/kqp_read_null_ut.cpp |57.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/columnshard/engines/ut/ut_program.cpp >> column_table_helper.py::flake8 [GOOD] >> range_allocator.py::flake8 [GOOD] >> s3_client.py::flake8 [GOOD] >> thread_helper.py::flake8 [GOOD] >> time_histogram.py::flake8 [GOOD] >> utils.py::flake8 [GOOD] >> ydb_client.py::flake8 [GOOD] >> conftest.py::flake8 [GOOD] >> test_auditlog.py::flake8 [GOOD] >> collection.py::flake8 [GOOD] >> conftest.py::flake8 [GOOD] >> select_datetime.py::flake8 [GOOD] >> select_positive.py::flake8 [GOOD] >> test.py::flake8 [GOOD] |57.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/columnshard/engines/ut/ut_script.cpp |57.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/columnshard/engines/ut/ut_logs_engine.cpp |57.1%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/sql/lib/flake8 >> test_s3.py::flake8 [GOOD] |57.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/sys_view/ut_kqp.cpp |57.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/sys_view/ut_counters.cpp |57.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/sys_view/ut_common.cpp |57.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/query_actor/query_actor_ut.cpp >> test_base.py::flake8 [GOOD] >> test_postgres.py::flake8 [GOOD] >> test_sql_logic.py::flake8 [GOOD] >> test_stream_query.py::flake8 [GOOD] >> hive_matchers.py::flake8 [GOOD] >> test_create_tablets.py::flake8 [GOOD] >> test_drain.py::flake8 [GOOD] >> test_kill_tablets.py::flake8 [GOOD] >> test_example.py::flake8 [GOOD] |57.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/client/ut/ydb-core-client-ut |57.1%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/datashard/vector_index/flake8 >> test_vector_index.py::flake8 [GOOD] |57.2%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/query_cache/flake8 >> test_query_cache.py::flake8 [GOOD] |57.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard_ut_read_iterator_ext_blobs.cpp |57.2%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/fq/yt/kqp_yt_file/part4/flake8 >> test.py::flake8 [GOOD] |57.2%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/olap/common/flake8 >> ydb_client.py::flake8 [GOOD] |57.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_data_erasure/ydb-core-tx-schemeshard-ut_data_erasure |57.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/sequenceproxy/sequenceproxy_ut.cpp >> test_ttl.py::flake8 [GOOD] >> test_secondary_index.py::flake8 [GOOD] |57.2%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/suite_tests/flake8 >> test_stream_query.py::flake8 [GOOD] |57.2%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/flake8 >> test_auditlog.py::flake8 [GOOD] |57.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard_ut_init.cpp |57.2%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/example/flake8 >> test_example.py::flake8 [GOOD] |57.2%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/library/yql/providers/generic/connector/tests/datasource/ms_sql_server/flake8 >> test.py::flake8 [GOOD] |57.3%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/hive/flake8 >> test_kill_tablets.py::flake8 [GOOD] |57.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard_ut_read_iterator.cpp |57.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/flat_row_versions_ut.cpp >> test_mixed.py::flake8 [GOOD] |57.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/mediator/mediator_ut.cpp |57.2%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/datashard/secondary_index/flake8 >> test_secondary_index.py::flake8 [GOOD] |57.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/sys_view/ut_common.cpp |57.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard_ut_snapshot.cpp |57.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet/bootstrapper_ut.cpp |57.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/sys_view/ut_labeled.cpp |57.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet/tablet_counters_ut.cpp >> base.py::flake8 [GOOD] >> test_tpch_import.py::flake8 [GOOD] |57.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/time_cast/time_cast_ut.cpp |57.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard_ut_column_stats.cpp |57.3%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/datashard/ttl/flake8 >> test_ttl.py::flake8 [GOOD] |57.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/grpc_services/operation_helpers_ut.cpp |57.3%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/stress/mixedpy/flake8 >> test_mixed.py::flake8 [GOOD] |57.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_compaction/ut_compaction.cpp |57.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/ut/ut_versions.cpp >> test_account_actions.py::flake8 [GOOD] >> test_acl.py::flake8 [GOOD] >> test_counters.py::flake8 [GOOD] >> test_format_without_version.py::flake8 [GOOD] >> test_garbage_collection.py::flake8 [GOOD] >> test_multiplexing_tables_format.py::flake8 [GOOD] >> test_ping.py::flake8 [GOOD] >> test_queue_attributes_validation.py::flake8 [GOOD] >> test_queue_counters.py::flake8 [GOOD] >> test_queue_tags.py::flake8 [GOOD] |57.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard_ut_followers.cpp |57.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/ut/ut_iterator.cpp |57.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/ut/ut_compaction.cpp |57.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/ut/ut_btree_index_iter_charge.cpp >> test_queues_managing.py::flake8 [GOOD] >> test_throttling.py::flake8 [GOOD] >> overlapping_portions.py::flake8 [GOOD] |57.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/shared_handle_ut.cpp |57.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/ut/flat_test_db.cpp |57.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_olap_reboots/ut_olap_reboots.cpp |57.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/ut/ut_bloom.cpp |57.1%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/olap/s3_import/flake8 >> test_tpch_import.py::flake8 [GOOD] |57.2%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/common/flake8 >> test_throttling.py::flake8 [GOOD] |57.2%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/olap/oom/flake8 >> overlapping_portions.py::flake8 [GOOD] |57.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/replication/service/ut_table_writer/ydb-core-tx-replication-service-ut_table_writer |57.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/ut/ut_comp_gen.cpp |57.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/ut/ut_charge.cpp |57.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/actors/protos/libpy3library-actors-protos.global.a |57.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/sequenceshard/ut_helpers.cpp |57.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/ut/ut_btree_index_nodes.cpp >> test.py::flake8 [GOOD] |57.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/export_s3_buffer_ut.cpp |57.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/long_tx_service/long_tx_service_ut.cpp |57.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/folder_service/proto/libpy3library-folder_service-proto.global.a |57.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/breakpad/libydb-library-breakpad.global.a |57.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/sequenceshard/ut_sequenceshard.cpp |57.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_serverless_reboots/ut_serverless_reboots.cpp |57.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/tx_proxy/proxy_ext_tenant_ut.cpp |57.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/ut/ut_forward.cpp |57.2%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/fq/yt/kqp_yt_file/part10/flake8 >> test.py::flake8 [GOOD] |57.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/ut/ut_db_iface.cpp |57.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/formats/arrow/protos/libpy3library-formats-arrow-protos.global.a |57.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/keys/libydb-library-keys.a |57.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/ut/ut_data_cleanup.cpp |57.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/ut/ut_compaction_multi.cpp >> test.py::flake8 [GOOD] |57.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/apps/pgwire/pg_ydb_connection.cpp |57.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/tiering/ut/ut_object.cpp |57.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/tiering/ut/ut_tiers.cpp |57.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/scheme_board/ut_helpers.cpp |57.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_auditsettings/ut_auditsettings.cpp |57.3%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/public_http/ut/ydb-core-public_http-ut |57.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/apps/pgwire/pg_ydb_proxy.cpp |57.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/scheme_board/replica_ut.cpp |57.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/apps/pgwire/pgwire.cpp |57.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_transfer/ut_transfer.cpp |57.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/apps/pgwire/main.cpp |57.3%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/fq/yt/kqp_yt_file/part8/flake8 >> test.py::flake8 [GOOD] |57.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_bsvolume/ut_bsvolume.cpp |57.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_sequence/ut_sequence.cpp |57.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/mkql_proto/protos/libpy3library-mkql_proto-protos.global.a |57.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/login/protos/libpy3library-login-protos.global.a |57.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/ut/ut_datetime.cpp |57.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/locks/range_treap_ut.cpp |57.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_replication/ut_replication.cpp >> conftest.py::flake8 [GOOD] >> test_stats_mode.py::flake8 [GOOD] >> test.py::flake8 [GOOD] >> test_workload.py::flake8 [GOOD] >> test_serializable.py::flake8 [GOOD] |57.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_cdc_stream_reboots/ut_cdc_stream_reboots.cpp |57.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/ut/ut_decimal.cpp |57.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/tx_proxy/proxy_ut_helpers.cpp |57.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/ut/ut_db_scheme.cpp |57.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_background_cleaning/ut_background_cleaning.cpp |57.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_vector_index_build_reboots/ut_vector_index_build_reboots.cpp |57.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/yql_testlib/libydb-core-yql_testlib.a >> test.py::flake8 [GOOD] |57.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard_ut_incremental_backup.cpp |57.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/ut/ut_sausage.cpp |57.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/ut/ut_memtable.cpp |57.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/ut/ut_stat.cpp |57.4%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/fq/plans/flake8 >> test_stats_mode.py::flake8 [GOOD] |57.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/ut/ut_part_multi.cpp >> test.py::flake8 [GOOD] >> test.py::flake8 [GOOD] |57.4%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/fq/yt/kqp_yt_file/part5/flake8 >> test.py::flake8 [GOOD] |57.4%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/stress/oltp_workload/tests/flake8 >> test_workload.py::flake8 [GOOD] |57.4%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/large_serializable/flake8 >> test_serializable.py::flake8 [GOOD] |57.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/services/libpy3ydb-library-services.global.a |57.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/ydb_issue/proto/libpy3library-ydb_issue-proto.global.a |57.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/ut/ut_pages.cpp >> test_actorsystem.py::flake8 [GOOD] |57.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/ut/ut_rename_table_column.cpp |57.4%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/merge_split_common_table/std/flake8 >> test.py::flake8 [GOOD] |57.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/ut/ut_part.cpp |57.5%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/fq/yt/kqp_yt_file/part13/flake8 >> test.py::flake8 [GOOD] |57.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/dq/actors/protos/libpy3dq-actors-protos.global.a >> test_cms_erasure.py::flake8 [GOOD] >> test_cms_restart.py::flake8 [GOOD] >> test_cms_state_storage.py::flake8 [GOOD] >> utils.py::flake8 [GOOD] >> test_crud.py::flake8 [GOOD] >> test_discovery.py::flake8 [GOOD] >> test_execute_scheme.py::flake8 [GOOD] >> test_indexes.py::flake8 [GOOD] >> test_insert.py::flake8 [GOOD] >> test_isolation.py::flake8 [GOOD] >> test_public_api.py::flake8 [GOOD] >> test_read_table.py::flake8 [GOOD] >> test_session_grace_shutdown.py::flake8 [GOOD] >> test_session_pool.py::flake8 [GOOD] |57.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/ut/ut_redo.cpp |57.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/ut/ut_proto.cpp |57.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/ut/ut_slice_loader.cpp |57.5%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/autoconfig/flake8 >> test_actorsystem.py::flake8 [GOOD] >> test.py::flake8 [GOOD] >> conftest.py::flake8 [GOOD] >> test_serverless.py::flake8 [GOOD] |57.4%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/fq/yt/kqp_yt_file/part2/flake8 >> test.py::flake8 [GOOD] >> conftest.py::flake8 [GOOD] |57.4%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/api/flake8 >> test_session_pool.py::flake8 [GOOD] >> test_select.py::flake8 [GOOD] |57.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/ut/ut_self.cpp |57.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/ut/ut_shared_sausagecache_actor.cpp |57.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/ut/ut_screen.cpp >> test_insert_restarts.py::flake8 [GOOD] >> test_workload.py::flake8 [GOOD] >> conftest.py::flake8 [GOOD] >> test_unknown_data_source.py::flake8 [GOOD] >> test.py::flake8 [GOOD] >> test_encryption.py::flake8 [GOOD] >> conftest.py::flake8 [GOOD] >> test_join.py::flake8 [GOOD] >> collection.py::flake8 [GOOD] >> collection.py::flake8 [GOOD] >> conftest.py::flake8 [GOOD] >> select_datetime.py::flake8 [GOOD] >> select_positive.py::flake8 [GOOD] >> select_positive_with_schema.py::flake8 [GOOD] >> test.py::flake8 [GOOD] >> conftest.py::flake8 [GOOD] >> select_datetime.py::flake8 [GOOD] >> select_positive.py::flake8 [GOOD] >> test.py::flake8 [GOOD] >> test_partitioning.py::flake8 [GOOD] >> test_workload.py::flake8 [GOOD] |57.4%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/cms/flake8 >> utils.py::flake8 [GOOD] |57.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_index_build/ut_vector_index_build.cpp |57.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_index_build/ut_index_build.cpp |57.5%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/fq/yt/kqp_yt_file/part11/flake8 >> test.py::flake8 [GOOD] |57.5%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/tpc/large/ydb-tests-functional-tpc-large |57.5%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/serverless/flake8 >> test_serverless.py::flake8 [GOOD] |57.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/ut/ut_slice.cpp |57.5%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/encryption/flake8 >> test_encryption.py::flake8 [GOOD] |57.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/dq/proto/libpy3yql-dq-proto.global.a |57.5%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/fq/generic/streaming/flake8 >> test_join.py::flake8 [GOOD] |57.6%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/stress/log/tests/flake8 >> test_workload.py::flake8 [GOOD] |57.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/grpc_services/rpc_calls_ut.cpp |57.6%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/fq/restarts/flake8 >> test_insert_restarts.py::flake8 [GOOD] |57.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/ut/ut_shared_sausagecache.cpp |57.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tablet_flat/test/libs/table/libtest-libs-table.a |57.6%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/datashard/select/flake8 >> test_select.py::flake8 [GOOD] |57.6%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/stress/transfer/tests/flake8 >> test_workload.py::flake8 [GOOD] |57.6%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/viewer/tests/flake8 >> test.py::flake8 [GOOD] |57.6%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/library/yql/providers/generic/connector/tests/datasource/postgresql/flake8 >> test.py::flake8 [GOOD] |57.6%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/fq/common/flake8 >> test_unknown_data_source.py::flake8 [GOOD] |57.6%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/library/yql/providers/generic/connector/tests/datasource/mysql/flake8 >> test.py::flake8 [GOOD] |57.7%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/datashard/partitioning/flake8 >> test_partitioning.py::flake8 [GOOD] |57.7%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/core/tablet_flat/ut/objcopy_9f29b589555ed64086e5eadccf.o >> test_clickbench.py::flake8 [GOOD] >> test_diff_processing.py::flake8 [GOOD] >> test_tpch.py::flake8 [GOOD] >> test_sql.py::flake8 [GOOD] |57.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/ut/ut_other.cpp |57.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/flat_executor_database_ut.cpp |57.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_login_large/ut_login_large.cpp |57.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_serverless/ut_serverless.cpp >> test.py::flake8 [GOOD] |57.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/flat_range_cache_ut.cpp |57.6%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/canonical/flake8 >> test_sql.py::flake8 [GOOD] >> test_alter_ops.py::flake8 [GOOD] >> test_copy_ops.py::flake8 [GOOD] >> test_scheme_shard_operations.py::flake8 [GOOD] |57.6%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/tpc/medium/flake8 >> test_tpch.py::flake8 [GOOD] >> test.py::flake8 [GOOD] |57.6%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/fq/yt/kqp_yt_file/part17/flake8 >> test.py::flake8 [GOOD] |57.6%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/scheme_shard/flake8 >> test_scheme_shard_operations.py::flake8 [GOOD] >> test_update_script_tables.py::flake8 [GOOD] |57.5%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/fq/yt/kqp_yt_file/part12/flake8 >> test.py::flake8 [GOOD] |57.5%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/script_execution/flake8 >> test_update_script_tables.py::flake8 [GOOD] >> alter_compression.py::flake8 [GOOD] >> base.py::flake8 [GOOD] >> test.py::flake8 [GOOD] >> test.py::flake8 [GOOD] |57.3%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/splitter/ut/ydb-core-tx-columnshard-splitter-ut |57.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/generic/connector/api/service/protos/libpy3api-service-protos.global.a |57.3%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/olap/column_family/compression/flake8 >> base.py::flake8 [GOOD] >> test_workload.py::flake8 [GOOD] |57.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/common/ut_helpers/libproviders-common-ut_helpers.a |57.3%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/fq/yt/kqp_yt_file/part9/flake8 >> test.py::flake8 [GOOD] |57.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/s3/proto/libpy3providers-s3-proto.global.a |57.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/grpc/draft/libpy3api-grpc-draft.global.a |57.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/libpy3api-protos.global.a >> test.py::flake8 [GOOD] |57.3%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/stress/olap_workload/tests/flake8 >> test_workload.py::flake8 [GOOD] |57.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/annotations/libpy3api-protos-annotations.global.a |57.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/grpc/libpy3api-grpc.global.a |57.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/client/ydb_topic/include/libclient-ydb_topic-include.a |57.4%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/fq/yt/kqp_yt_file/part3/flake8 >> test.py::flake8 [GOOD] |57.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/ymq/actor/yc_search_ut/test_events_writer.cpp |57.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/udfs/common/datetime/libdatetime_udf.global.a >> test.py::flake8 [GOOD] >> test.py::flake8 [GOOD] |57.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/udfs/common/roaring/libroaring.global.a |57.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/udfs/common/knn/libknn_udf.global.a |57.4%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/fq/yt/kqp_yt_file/part16/flake8 >> test.py::flake8 [GOOD] |57.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/testlib/s3_recipe_helper/liblibrary-testlib-s3_recipe_helper.a |57.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/ymq/actor/yc_search_ut/index_events_processor_ut.cpp |57.4%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/fq/yt/kqp_yt_file/part15/flake8 >> test.py::flake8 [GOOD] >> test_kqprun_recipe.py::flake8 [GOOD] >> test.py::flake8 [GOOD] >> test_schemeshard_limits.py::flake8 [GOOD] |57.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_export_reboots_s3/ut_export_reboots_s3.cpp |57.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/ycloud/impl/user_account_service_ut.cpp |57.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/client/yc_private/oauth/libclient-yc_private-oauth.a |57.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/tx_proxy/proxy_ut_helpers.cpp |57.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/tx_proxy/storage_tenant_ut.cpp >> test_cp_ic.py::flake8 [GOOD] >> test_dispatch.py::flake8 [GOOD] >> test_retry.py::flake8 [GOOD] >> test_retry_high_rate.py::flake8 [GOOD] >> test_log_scenario.py::flake8 [GOOD] >> zip_bomb.py::flake8 [GOOD] >> test_quoting.py::flake8 [GOOD] |57.3%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/fq/yt/kqp_yt_file/part0/flake8 >> test.py::flake8 [GOOD] |57.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/ycloud/impl/service_account_service_ut.cpp |57.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/ycloud/impl/folder_service_ut.cpp |57.3%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/fq/yt/kqp_yt_file/part18/flake8 >> test.py::flake8 [GOOD] |57.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/ycloud/impl/access_service_ut.cpp |57.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/python/enable_v3_new_behavior/libpy3sdk-python-enable_v3_new_behavior.global.a |57.4%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/limits/flake8 >> test_schemeshard_limits.py::flake8 [GOOD] >> test_disk.py::flake8 [GOOD] >> test_tablet.py::flake8 [GOOD] |57.4%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/tools/kqprun/tests/flake8 >> test_kqprun_recipe.py::flake8 [GOOD] |57.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard_ut_ext_blobs_multiple_channels.cpp |57.4%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/with_quotas/flake8 >> test_quoting.py::flake8 [GOOD] |57.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_base_reboots/ut_base_reboots.cpp |57.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_backup_collection/ut_backup_collection.cpp >> test_quota_exhaustion.py::flake8 [GOOD] >> common.py::flake8 [GOOD] >> conftest.py::flake8 [GOOD] >> test_rename.py::flake8 [GOOD] >> test_ttl.py::flake8 [GOOD] |57.4%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/fq/multi_plane/flake8 >> test_retry_high_rate.py::flake8 [GOOD] |57.4%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/olap/flake8 >> zip_bomb.py::flake8 [GOOD] |57.4%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/tools/nemesis/ut/flake8 >> test_tablet.py::flake8 [GOOD] |57.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard_ut_order.cpp >> test.py::flake8 [GOOD] >> ydb-tests-tools-pq_read-test::import_test [GOOD] |57.4%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/public/tools/ydb_recipe/ydb_recipe |57.4%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/olap/data_quotas/flake8 >> test_quota_exhaustion.py::flake8 [GOOD] |57.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/lib/ut_helpers/libpublic-lib-ut_helpers.a |57.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/providers/generic/actors/ut/yql_generic_lookup_actor_ut.cpp >> test_common.py::flake8 [GOOD] >> test_yandex_cloud_mode.py::flake8 [GOOD] >> test_yandex_cloud_queue_counters.py::flake8 [GOOD] >> test_alloc_default.py::flake8 [GOOD] >> test_dc_local.py::flake8 [GOOD] >> test_result_limits.py::flake8 [GOOD] >> test_scheduling.py::flake8 [GOOD] |57.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/generic/connector/libcpp/ut_helpers/libconnector-libcpp-ut_helpers.a |57.4%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/rename/flake8 >> test_rename.py::flake8 [GOOD] |57.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/scheme_board/ut_helpers.cpp |57.4%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/tools/pq_read/test/import_test >> ydb-tests-tools-pq_read-test::import_test [GOOD] |57.4%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/clickbench/flake8 >> test.py::flake8 [GOOD] |57.4%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/ttl/flake8 >> test_ttl.py::flake8 [GOOD] |57.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/ncloud/impl/access_service_ut.cpp >> test_liveness_wardens.py::flake8 [GOOD] >> __main__.py::flake8 [GOOD] |57.4%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/cloud/flake8 >> test_yandex_cloud_queue_counters.py::flake8 [GOOD] |57.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/replication/ydb_proxy/partition_end_watcher_ut.cpp |57.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/replication/ydb_proxy/ydb_proxy_ut.cpp >> test_break.py::flake8 [GOOD] >> kikimr_config.py::flake8 [GOOD] >> ydb-tests-library-ut::import_test [GOOD] |57.4%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/fq/mem_alloc/flake8 >> test_scheduling.py::flake8 [GOOD] |57.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/rbo/kqp_rbo_ut.cpp |57.4%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/wardens/flake8 >> test_liveness_wardens.py::flake8 [GOOD] >> functional-sqs-merge_split_common_table-fifo::import_test [GOOD] |57.4%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/tools/kqprun/recipe/flake8 >> __main__.py::flake8 [GOOD] |57.4%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/library/ut/import_test >> ydb-tests-library-ut::import_test [GOOD] |57.5%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/library/ut/flake8 >> kikimr_config.py::flake8 [GOOD] |57.4%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/minidumps/flake8 >> test_break.py::flake8 [GOOD] |57.5%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/merge_split_common_table/fifo/import_test >> functional-sqs-merge_split_common_table-fifo::import_test [GOOD] |57.4%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/sqs/with_quotas/ydb-tests-functional-sqs-with_quotas |57.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/library/libpy3ydb-tests-library.global.a |57.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tools/ydbd_slice/libpy3ydbd_slice.global.a |57.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tools/cfg/walle/libpy3tools-cfg-walle.global.a |57.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/library/clients/libpy3tests-library-clients.global.a |57.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/library/wardens/libpy3tests-library-wardens.global.a |57.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tools/cfg/libpy3ydb-tools-cfg.global.a |57.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/oss/ydb_sdk_import/libpy3tests-oss-ydb_sdk_import.global.a |57.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tools/cfg/k8s_api/libpy3tools-cfg-k8s_api.global.a |57.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/oss/canonical/libpy3tests-oss-canonical.global.a |57.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/keyvalue/grpc_service_ut.cpp |57.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/pq_async_io/libtests-fq-pq_async_io.a |57.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/tools/fq_runner/libpy3tests-tools-fq_runner.global.a |57.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/metadata/initializer/ut/ut_init.cpp |57.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/ut/effects/ydb-core-kqp-ut-effects |57.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/cms/cms_ut.cpp |57.2%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/tx_scheme.{pb.h ... grpc.pb.h} |57.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/scheme_board/cache_ut.cpp |57.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/scheduler/spec_patch.cpp |57.2%| [PB] {BAZEL_DOWNLOAD} $(B)/yql/essentials/providers/common/proto/udf_resolver.pb.{h, cc} >> conftest.py::flake8 [GOOD] >> test_auth_system_views.py::flake8 [GOOD] >> test_create_users.py::flake8 [GOOD] >> test_create_users_strict_acl_checks.py::flake8 [GOOD] >> test_db_counters.py::flake8 [GOOD] >> test_dynamic_tenants.py::flake8 [GOOD] >> test_publish_into_schemeboard_with_common_ssring.py::flake8 [GOOD] >> test_storage_config.py::flake8 [GOOD] >> test_system_views.py::flake8 [GOOD] >> test_tenants.py::flake8 [GOOD] >> test_user_administration.py::flake8 [GOOD] >> test_users_groups_with_acl.py::flake8 [GOOD] |57.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/signature/signature.cpp |57.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/election/public.cpp |57.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/blob_reader.cpp |57.1%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/tenants/flake8 >> test_users_groups_with_acl.py::flake8 [GOOD] >> test.py::flake8 [GOOD] >> test_crud.py::flake8 [GOOD] >> test_inserts.py::flake8 [GOOD] >> test_kv.py::flake8 [GOOD] |57.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/cms/ut_sentinel_unstable/ydb-core-cms-ut_sentinel_unstable |57.2%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/file_storage/proto/libpy3core-file_storage-proto.global.a |57.2%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/protos/libpy3yql-essentials-protos.global.a >> test_leader_start_inflight.py::flake8 [GOOD] >> test_config_migration.py::flake8 [GOOD] >> test_config_with_metadata.py::flake8 [GOOD] >> test_configuration_version.py::flake8 [GOOD] >> test_distconf.py::flake8 [GOOD] >> test_generate_dynamic_config.py::flake8 [GOOD] >> test_postgres.py::flake8 [GOOD] |57.2%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/issue/protos/libpy3core-issue-protos.global.a |57.2%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/fq/solomon/flake8 >> test.py::flake8 [GOOD] |57.2%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/histogram/libhistogram_udf.global.a |57.2%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/providers/common/proto/libpy3providers-common-proto.global.a |57.2%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/sql/flake8 >> test_kv.py::flake8 [GOOD] |57.3%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/oltp_workload/tests/ydb-tests-stress-oltp_workload-tests >> conftest.py::flake8 [GOOD] >> helpers.py::flake8 [GOOD] >> test_ctas.py::flake8 [GOOD] >> test_yt_reading.py::flake8 [GOOD] >> conftest.py::flake8 [GOOD] >> test_ydb_backup.py::flake8 [GOOD] >> test_ydb_flame_graph.py::flake8 [GOOD] >> test_ydb_impex.py::flake8 [GOOD] >> test_ydb_recursive_remove.py::flake8 [GOOD] >> test_ydb_scheme.py::flake8 [GOOD] >> test_ydb_scripting.py::flake8 [GOOD] >> test_ydb_sql.py::flake8 [GOOD] >> test_ydb_table.py::flake8 [GOOD] |57.2%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/postgresql/flake8 >> test_postgres.py::flake8 [GOOD] |57.2%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/url_base/lib/libcommon-url_base-lib.a |57.2%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/public/issue/protos/libpy3public-issue-protos.global.a |57.3%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/hyperloglog/libhyperloglog_udf.global.a |57.3%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/config/flake8 >> test_generate_dynamic_config.py::flake8 [GOOD] |57.3%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/ip_base/lib/libcommon-ip_base-lib.a |57.3%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/large/flake8 >> test_leader_start_inflight.py::flake8 [GOOD] >> test_multinode_cluster.py::flake8 [GOOD] >> test_recompiles_requests.py::flake8 [GOOD] >> test_compatibility.py::flake8 [GOOD] >> test_example.py::flake8 [GOOD] >> test_export_s3.py::flake8 [GOOD] >> test_followers.py::flake8 [GOOD] >> test_rolling.py::flake8 [GOOD] >> test_statistics.py::flake8 [GOOD] >> test_stress.py::flake8 [GOOD] >> udf/test_datetime2.py::flake8 [GOOD] >> udf/test_digest.py::flake8 [GOOD] >> test_cte.py::flake8 [GOOD] |57.3%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/fq/yt/kqp_yt_import/flake8 >> test_yt_reading.py::flake8 [GOOD] |57.3%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/public/types/libpy3essentials-public-types.global.a |57.3%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/ydb_cli/ydb-tests-functional-ydb_cli |57.3%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/json/libjson_udf.global.a |57.3%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/hyperscan/libhyperscan_udf.global.a |57.3%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/stat/libstat_udf.global.a |57.3%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/ydb_cli/flake8 >> test_ydb_table.py::flake8 [GOOD] >> test.py::flake8 [GOOD] |57.3%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/pire/libpire_udf.global.a |57.3%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/library/query_actor/ut/ydb-library-query_actor-ut |57.3%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/apps/ydbd/ydbd |57.3%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/compatibility/flake8 >> udf/test_digest.py::flake8 [GOOD] |57.3%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/topfreq/static/libcommon-topfreq-static.a |57.3%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/json2/libjson2_udf.global.a |57.4%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/stat/static/libcommon-stat-static.a |57.3%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/multinode/flake8 >> test_recompiles_requests.py::flake8 [GOOD] |57.3%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/ip_base/libip_udf.global.a |57.4%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/providers/common/mkql_simple_file/libproviders-common-mkql_simple_file.a |57.4%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/set/libset_udf.global.a |57.4%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/kqp/plan2svg/flake8 >> test_cte.py::flake8 [GOOD] |57.4%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/logs/dsv/libdsv_udf.global.a |57.4%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/topfreq/libtopfreq_udf.global.a >> test_restarts.py::flake8 [GOOD] >> conftest.py::flake8 [GOOD] >> test_clickhouse.py::flake8 [GOOD] |57.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/tests/tools/kqprun/kqprun.cpp >> test_greenplum.py::flake8 [GOOD] |57.4%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/fq/yt/kqp_yt_file/part6/flake8 >> test.py::flake8 [GOOD] >> test_join.py::flake8 [GOOD] |57.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/tools/kqprun/src/proto/libkqprun-src-proto.a >> test_mysql.py::flake8 [GOOD] |57.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/mind/ut_fat/blobstorage_node_warden_ut_fat.cpp >> test_postgresql.py::flake8 [GOOD] |57.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/mind/bscontroller/grouper_ut.cpp >> test_ydb.py::flake8 [GOOD] |57.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/fq/ut_integration/ut_utils.cpp >> test_clickbench.py::flake8 [GOOD] >> test_external.py::flake8 [GOOD] >> test_tpcds.py::flake8 [GOOD] >> test_tpch.py::flake8 [GOOD] >> test_bulkupserts_tpch.py::flake8 [GOOD] >> test_insert_delete_duplicate_records.py::flake8 [GOOD] >> test_insertinto_selectfrom.py::flake8 [GOOD] >> test_tiering.py::flake8 [GOOD] >> test_workload_manager.py::flake8 [GOOD] >> conftest.py::flake8 [GOOD] >> test_2_selects_limit.py::flake8 [GOOD] >> test_3_selects.py::flake8 [GOOD] >> test_bad_syntax.py::flake8 [GOOD] >> test_base.py::flake8 [GOOD] >> test_big_state.py::flake8 [GOOD] >> test_continue_mode.py::flake8 [GOOD] >> test_cpu_quota.py::flake8 [GOOD] >> test_delete_read_rules_after_abort_by_system.py::flake8 [GOOD] >> test_disposition.py::flake8 [GOOD] >> test_eval.py::flake8 [GOOD] |57.4%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/restarts/flake8 >> test_restarts.py::flake8 [GOOD] >> test_invalid_consumer.py::flake8 [GOOD] |57.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/mind/bscontroller/mv_object_map_ut.cpp >> test_kill_pq_bill.py::flake8 [GOOD] |57.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/mind/bscontroller/group_mapper_ut.cpp >> test_mem_alloc.py::flake8 [GOOD] |57.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/tools/kqprun/src/libtools-kqprun-src.a >> test_metrics_cleanup.py::flake8 [GOOD] |57.4%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/url_base/liburl_udf.global.a >> test_split_merge.py::flake8 [GOOD] >> test_pq_read_write.py::flake8 [GOOD] |57.3%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/top/libtop_udf.global.a >> test_public_metrics.py::flake8 [GOOD] |57.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/scan/kqp_flowcontrol_ut.cpp >> test_read_rules_deletion.py::flake8 [GOOD] |57.3%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/fq/generic/analytics/flake8 >> test_ydb.py::flake8 [GOOD] >> allure_utils.py::flake8 [GOOD] >> collection.py::flake8 [GOOD] >> test_recovery.py::flake8 [GOOD] >> test_recovery_match_recognize.py::flake8 [GOOD] >> test_recovery_mz.py::flake8 [GOOD] >> results_processor.py::flake8 [GOOD] >> utils.py::flake8 [GOOD] >> conftest.py::flake8 [GOOD] >> ydb_cli.py::flake8 [GOOD] >> test.py::flake8 [GOOD] >> test_restart_query.py::flake8 [GOOD] >> collection.py::flake8 [GOOD] >> select_datetime.py::flake8 [GOOD] >> ydb_cluster.py::flake8 [GOOD] >> test_row_dispatcher.py::flake8 [GOOD] >> select_positive.py::flake8 [GOOD] >> conftest.py::flake8 [GOOD] >> test.py::flake8 [GOOD] >> select_positive.py::flake8 [GOOD] >> test_select_1.py::flake8 [GOOD] >> test.py::flake8 [GOOD] >> test.py::flake8 [GOOD] >> test_select_limit.py::flake8 [GOOD] >> test_select_limit_db_id.py::flake8 [GOOD] >> test_select_timings.py::flake8 [GOOD] >> test_stop.py::flake8 [GOOD] >> test_watermarks.py::flake8 [GOOD] >> test_yds_bindings.py::flake8 [GOOD] >> test_yq_streaming.py::flake8 [GOOD] |57.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/comparator.cpp |57.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/fq/ut_integration/fq_ut.cpp |57.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/key.cpp |57.4%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/olap/load/flake8 >> test_tpch.py::flake8 [GOOD] |57.4%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/sql/large/flake8 >> test_workload_manager.py::flake8 [GOOD] |57.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/helpers.cpp |57.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/record_codegen_cpp.cpp |57.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/scan/kqp_split_ut.cpp |57.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/tests/fq/control_plane_storage/in_memory_control_plane_storage_ut.cpp |57.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/ext_index/ut/ut_ext_index.cpp |57.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/scan/kqp_scan_ut.cpp >> test.py::flake8 [GOOD] |57.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/tests/fq/control_plane_storage/ydb_control_plane_storage_ut.cpp |57.5%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/library/yql/providers/generic/connector/tests/datasource/clickhouse/flake8 >> test.py::flake8 [GOOD] |57.5%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/olap/lib/flake8 >> ydb_cluster.py::flake8 [GOOD] |57.5%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/fq/yt/kqp_yt_file/part1/flake8 >> test.py::flake8 [GOOD] |57.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/tests/fq/control_plane_storage/ydb_control_plane_storage_quotas_ut.cpp |57.6%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/merge_split_common_table/fifo/flake8 >> test.py::flake8 [GOOD] |57.6%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/fq/yds/flake8 >> test_yq_streaming.py::flake8 [GOOD] |57.6%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/datashard/split_merge/flake8 >> test_split_merge.py::flake8 [GOOD] |57.6%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/library/yql/providers/generic/connector/tests/datasource/ydb/flake8 >> test.py::flake8 [GOOD] |57.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/cypress_client/public.cpp |57.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/host/kqp_transform.cpp |57.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/misc/io_tags.cpp |57.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/public.cpp |57.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/tests/fq/control_plane_storage/ydb_control_plane_storage_bindings_ut.cpp |57.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/unversioned_value.cpp |57.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/tests/fq/control_plane_storage/ydb_control_plane_storage_connections_permissions_ut.cpp |57.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/tests/fq/control_plane_storage/ydb_control_plane_storage_bindings_permissions_ut.cpp |57.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/tests/fq/control_plane_storage/ydb_control_plane_storage_connections_ut.cpp |57.6%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/fq/yt/kqp_yt_file/part19/flake8 >> test.py::flake8 [GOOD] >> test.py::flake8 [GOOD] >> base.py::flake8 [GOOD] >> data_correctness.py::flake8 [GOOD] >> data_migration_when_alter_ttl.py::flake8 [GOOD] >> tier_delete.py::flake8 [GOOD] >> ttl_delete_s3.py::flake8 [GOOD] >> ttl_unavailable_s3.py::flake8 [GOOD] >> unstable_connection.py::flake8 [GOOD] |57.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/tests/fq/control_plane_storage/ydb_control_plane_storage_queries_permissions_ut.cpp |57.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/versioned_row.cpp |57.6%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/draft/ydb_object_storage.pb.{h, cc} |57.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/complex_types/time_text.cpp |57.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/tests/fq/control_plane_storage/ydb_control_plane_storage_queries_ut.cpp |57.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/journal_client/config.cpp |57.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/transaction_client/remote_timestamp_provider.cpp |57.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/tests/fq/control_plane_storage/ydb_control_plane_storage_internal_ut.cpp |57.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/etc_client.cpp |57.7%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/tests/integration/server_restart/public-sdk-cpp-tests-integration-server_restart |57.7%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/fq/yt/kqp_yt_file/part14/flake8 >> test.py::flake8 [GOOD] |57.6%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/table_service_config.{pb.h ... grpc.pb.h} |57.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/scheme_board/ut_replica/ydb-core-tx-scheme_board-ut_replica |57.7%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/sys_view.{pb.h ... grpc.pb.h} |57.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/rpc_proxy/table_writer.cpp |57.7%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/config/protos/test_connection.pb.{h, cc} >> conftest.py::flake8 [GOOD] >> test_alter_compression.py::flake8 [GOOD] >> test_alter_tiering.py::flake8 [GOOD] >> test_insert.py::flake8 [GOOD] >> test_read_update_write_load.py::flake8 [GOOD] >> test_scheme_load.py::flake8 [GOOD] >> test_simple.py::flake8 [GOOD] >> test_base.py::flake8 [GOOD] >> test_http_api.py::flake8 [GOOD] |57.6%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/filestore_config.{pb.h ... grpc.pb.h} |57.6%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/config/protos/storage.pb.{h, cc} |57.7%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/config/protos/compute.pb.{h, cc} |57.7%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/config/protos/gateways.pb.{h, cc} |57.7%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/olap/ttl_tiering/flake8 >> unstable_connection.py::flake8 [GOOD] |57.7%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/config/protos/db_pool.pb.{h, cc} |57.7%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/library/actors/protos/services_common.pb.{h, cc} |57.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/ut/service/ydb-core-kqp-ut-service |57.7%| [PB] {BAZEL_DOWNLOAD} $(B)/contrib/libs/opentelemetry-proto/opentelemetry/proto/resource/v1/resource.{pb.h ... grpc.pb.h} |57.7%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/config/protos/quotas_manager.pb.{h, cc} |57.7%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/bind_channel_storage_pool.{pb.h ... grpc.pb.h} |57.7%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/counters_schemeshard.{pb.h ... grpc.pb.h} |57.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kesus/tablet/quoter_performance_test/quoter_performance_test |57.7%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/olap/scenario/flake8 >> test_simple.py::flake8 [GOOD] |57.7%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/tenant_pool.{pb.h ... grpc.pb.h} |57.6%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/blockstore_config.{pb.h ... grpc.pb.h} |57.7%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/follower_group.{pb.h ... grpc.pb.h} |57.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/keyvalue/ut_trace/ydb-core-keyvalue-ut_trace |57.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/tools/query_replay/main.cpp |57.7%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/fq/http_api/flake8 >> test_http_api.py::flake8 [GOOD] |57.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/tools/query_replay/query_proccessor.cpp |57.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/tools/query_replay/query_replay.cpp >> test_fifo_messaging.py::flake8 [GOOD] >> test_generic_messaging.py::flake8 [GOOD] >> test_polling.py::flake8 [GOOD] |57.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/tools/query_replay/query_compiler.cpp |57.7%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/memory_controller_config.{pb.h ... grpc.pb.h} |57.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/engines/ut/ydb-core-tx-columnshard-engines-ut |57.7%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/blobstorage.{pb.h ... grpc.pb.h} |57.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/merge_table_schemas.cpp |57.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/tx_proxy/ut_ext_tenant/ydb-core-tx-tx_proxy-ut_ext_tenant |57.7%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/tablet_pipe.{pb.h ... grpc.pb.h} |57.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/services/metadata/initializer/ut/ydb-services-metadata-initializer-ut >> ydb-tests-datashard-partitioning::import_test [GOOD] >> test_dml.py::flake8 [GOOD] >> test.py::flake8 [GOOD] |57.7%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/stream.{pb.h ... grpc.pb.h} |57.7%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/shared_cache.{pb.h ... grpc.pb.h} |57.7%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/scheme/protos/pathid.{pb.h ... grpc.pb.h} |57.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_extsubdomain/ut_extsubdomain.cpp |57.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/rpc_proxy/row_batch_writer.cpp |57.7%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/config.{pb.h ... grpc.pb.h} |57.7%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/whiteboard_flags.{pb.h ... grpc.pb.h} |57.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/persistent_queue.cpp |57.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_object_storage_listing/ydb-core-tx-datashard-ut_object_storage_listing |57.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/shuffle_client.cpp |57.8%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/tx_proxy.{pb.h ... grpc.pb.h} |57.8%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/library/formats/arrow/protos/ssa.pb.{h, cc} >> test_s3.py::flake8 [GOOD] |57.8%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/messaging/flake8 >> test_polling.py::flake8 [GOOD] |57.8%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/datashard/dml/flake8 >> test_dml.py::flake8 [GOOD] |57.8%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/draft/persqueue_error_codes.pb.{h, cc} |57.8%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/draft/persqueue_common.pb.{h, cc} |57.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_pq_reboots/ut_pq_reboots.cpp |57.8%| [PR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/dq/expr_nodes/dq_expr_nodes.{gen.h ... defs.inl.h} |57.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/journal_client.cpp |57.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/complex_types/yson_format_conversion.cpp |57.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/kafka/protocol.cpp |57.9%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/datashard/partitioning/import_test >> ydb-tests-datashard-partitioning::import_test [GOOD] |57.9%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/ydb_query_stats.pb.{h, cc} |57.7%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/ydb_coordination.pb.{h, cc} |57.7%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/ydb_discovery.pb.{h, cc} |57.7%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/serializable/flake8 >> test.py::flake8 [GOOD] |57.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/client.cpp |57.7%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/datashard/s3/flake8 >> test_s3.py::flake8 [GOOD] |57.8%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/ydb_monitoring.pb.{h, cc} |57.8%| [PB] {BAZEL_DOWNLOAD} $(B)/yql/essentials/protos/yql_mount.pb.{h, cc} |57.8%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/config/protos/activation.pb.{h, cc} |57.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/host/kqp_explain_prepared.cpp |57.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/rpc_proxy/journal_reader.cpp |57.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/operation_client.cpp |57.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/hydra/version.cpp |57.8%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/gateway/file/libyt-gateway-file.a |57.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/sequenceproxy/ut/ydb-core-tx-sequenceproxy-ut |57.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_background_compaction/ydb-core-tx-datashard-ut_background_compaction |57.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/mediator/ut/ydb-core-tx-mediator-ut |57.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/misc/workload.cpp |57.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_followers/ydb-core-tx-datashard-ut_followers |57.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/chunk_client/read_limit.cpp |57.9%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/protobuf/dynamic_prototype/libcpp-protobuf-dynamic_prototype.a |57.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_data_cleanup/ydb-core-tx-datashard-ut_data_cleanup |57.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_olap_reboots/ydb-core-tx-schemeshard-ut_olap_reboots |57.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/public/sdk/cpp/src/client/topic/ut/basic_usage_ut.cpp |57.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_sysview_reboots/ut_sysview_reboots.cpp |57.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/config.cpp |57.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/host/kqp_host.cpp |57.9%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/topic/ut/with_direct_read_ut/objcopy_4f055c289b3de8f2a1e827ae5c.o |57.9%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/protobuf/yql/libcpp-protobuf-yql.a |57.9%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/utils/network/libessentials-utils-network.a |57.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/public/sdk/cpp/src/client/topic/ut/local_partition_ut.cpp |57.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/public/sdk/cpp/src/client/topic/ut/describe_topic_ut.cpp |57.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/opt/kqp_opt_phase.cpp |57.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/fq/libs/row_dispatcher/ut/ydb-core-fq-libs-row_dispatcher-ut |57.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/transaction.cpp |58.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_snapshot/ydb-core-tx-datashard-ut_snapshot |58.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/public/sdk/cpp/src/client/topic/ut/topic_to_table_ut.cpp |58.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/host/kqp_gateway_proxy.cpp |57.9%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/log/tests/ydb-tests-stress-log-tests |57.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/security_client.cpp |57.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/job_tracker_client/helpers.cpp |57.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/public/sdk/cpp/src/client/topic/ut/direct_read_ut.cpp |57.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/rpc_proxy/timestamp_provider.cpp |57.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_bsvolume/ydb-core-tx-schemeshard-ut_bsvolume |57.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/external_sources/s3/ut/ydb-core-external_sources-s3-ut |57.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_read_iterator/ydb-core-tx-datashard-ut_read_iterator |57.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/queue_client/common.cpp |57.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/chunk_stripe_statistics.cpp |58.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_sequence/ydb-core-tx-datashard-ut_sequence |58.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/columnar.cpp |58.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_serverless_reboots/ydb-core-tx-schemeshard-ut_serverless_reboots |57.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/composite_compare.cpp |57.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/services/keyvalue/ut/ydb-services-keyvalue-ut |57.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_column_stats/ydb-core-tx-datashard-ut_column_stats |57.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_transfer/ydb-core-tx-schemeshard-ut_transfer |57.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/schema.cpp |57.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_compaction/ydb-core-tx-schemeshard-ut_compaction |57.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/ut/data_integrity/ydb-core-kqp-ut-data_integrity |57.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_cdc_stream_reboots/ydb-core-tx-schemeshard-ut_cdc_stream_reboots |57.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_column_build/ut_column_build.cpp |57.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/graph/ut/ydb-core-graph-ut |58.0%| [PR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/expr_nodes/kqp_expr_nodes.{gen.h ... defs.inl.h} |57.9%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/tablet.{pb.h ... grpc.pb.h} |57.9%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/protos/dq_effects.pb.{h, cc} |57.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/storagepoolmon/ut/ydb-core-blobstorage-storagepoolmon-ut |57.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/tools/kqprun/kqprun |57.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/transaction_client/config.cpp |57.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/sequenceshard/ut/ydb-core-tx-sequenceshard-ut |57.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/queue_client/config.cpp |58.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/table_upload_options.cpp |58.0%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/tenant_slot_broker.{pb.h ... grpc.pb.h} >> ydb-tests-datashard-split_merge::import_test [GOOD] |57.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/rpc_proxy/client_impl.cpp |57.9%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/config/protos/private_api.pb.{h, cc} |57.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/executer_actor/ut/ydb-core-kqp-executer_actor-ut |57.9%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/tracing.{pb.h ... grpc.pb.h} |57.9%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/common/protos/snapshot.pb.{h, cc} |58.0%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/scheme/defaults/protos/data.pb.{h, cc} |58.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_sequence/ydb-core-tx-schemeshard-ut_sequence |57.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/nodewarden/ut_sequence/ydb-core-blobstorage-nodewarden-ut_sequence |57.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/kafka/packet.cpp |57.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/client/minikql_compile/ut/ydb-core-client-minikql_compile-ut |57.9%| [PB] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/issue/protos/issue_id.pb.{h, cc} |57.9%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/config/protos/pending_fetcher.pb.{h, cc} |57.9%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/common/protos/blob_range.pb.{h, cc} |57.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/tiering/ut/ydb-core-tx-tiering-ut |58.0%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/config/protos/pinger.pb.{h, cc} |58.0%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/datashard/split_merge/import_test >> ydb-tests-datashard-split_merge::import_test [GOOD] |58.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/rpc_proxy/connection.cpp |58.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/library/ycloud/impl/ut/ydb-library-ycloud-impl-ut |58.0%| [PB] {BAZEL_DOWNLOAD} $(B)/yql/essentials/protos/clickhouse.pb.{h, cc} |57.9%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/config/protos/health_config.pb.{h, cc} |58.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/services/fq/ut_integration/ydb-services-fq-ut_integration |58.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tablet/ut/ydb-core-tablet-ut |58.0%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/config/protos/rate_limiter.pb.{h, cc} |58.0%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/blob_depot_config.{pb.h ... grpc.pb.h} |58.0%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/ydb_cms.pb.{h, cc} |58.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_replication/ydb-core-tx-schemeshard-ut_replication |58.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/distributed_table_client.cpp |58.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/grpc_streaming/ut/ydb-core-grpc_streaming-ut |58.0%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/library/formats/arrow/protos/fields.pb.{h, cc} |57.9%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/node_broker.{pb.h ... grpc.pb.h} |58.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/security/ut/ydb-core-security-ut |58.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_incremental_backup/ydb-core-tx-datashard-ut_incremental_backup |58.0%| [PR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/provider/yql_kikimr_expr_nodes.{gen.h ... defs.inl.h} |58.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/rpc_proxy/target_cluster_injecting_channel.cpp |58.0%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/kqp_physical.{pb.h ... grpc.pb.h} |58.0%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/s3_settings.{pb.h ... grpc.pb.h} >> ydb-tests-functional-clickbench::import_test [GOOD] |58.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/ut/federated_query/large_results/ydb-core-kqp-ut-federated_query-large_results |58.0%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/dq/proto/dq_state_load_plan.pb.{h, cc} |58.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_auditsettings/ydb-core-tx-schemeshard-ut_auditsettings |58.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/options.cpp |58.0%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/config/protos/fq_config.pb.{h, cc} |58.0%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/clickbench/import_test >> ydb-tests-functional-clickbench::import_test [GOOD] |58.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/long_tx_service/ut/ydb-core-tx-long_tx_service-ut |58.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/queue_transaction_mixin.cpp |57.9%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/cms.{pb.h ... grpc.pb.h} |57.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/client_common.cpp |57.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/quoter/ut/ydb-core-quoter-ut |57.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/rpc_proxy/address_helpers.cpp |57.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/opt/kqp_opt_build_txs.cpp |57.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/dynamic_table_transaction_mixin.cpp |58.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/host/kqp_translate.cpp |58.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/ydb_convert/compression_ut.cpp |58.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/ydb_convert/table_description_ut.cpp |58.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/ydb_convert/ydb_convert_ut.cpp >> ydb-tests-functional-tpc-large::import_test [GOOD] |58.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_base_reboots/ydb-core-tx-schemeshard-ut_base_reboots |58.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_login_large/ydb-core-tx-schemeshard-ut_login_large |58.0%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/s3/proto/retry_config.pb.{h, cc} |58.0%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/generic/connector/api/service/protos/connector.pb.{h, cc} |58.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_index_build/ydb-core-tx-schemeshard-ut_index_build |58.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/rpc_proxy/row_batch_reader.cpp |58.0%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/generic/connector/api/service/connector.{pb.h ... grpc.pb.h} |58.0%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/tpc/large/import_test >> ydb-tests-functional-tpc-large::import_test [GOOD] >> conftest.py::flake8 [GOOD] |58.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/bundle_controller_client/bundle_controller_settings.cpp |58.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/chunk_client/config.cpp |58.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/opt/kqp_opt_build_phy_query.cpp |58.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_background_cleaning/ydb-core-tx-schemeshard-ut_background_cleaning |58.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/rpc_proxy/table_mount_cache.cpp |58.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/opt/kqp_query_plan.cpp |58.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/host/kqp_runner.cpp |58.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tablet_flat/ut/ydb-core-tablet_flat-ut >> s3_helpers.py::flake8 [GOOD] >> test_bindings_0.py::flake8 [GOOD] >> test_bindings_1.py::flake8 [GOOD] >> test_compressions.py::flake8 [GOOD] >> test_early_finish.py::flake8 [GOOD] >> test_explicit_partitioning_0.py::flake8 [GOOD] >> test_explicit_partitioning_1.py::flake8 [GOOD] >> test_format_setting.py::flake8 [GOOD] >> test_formats.py::flake8 [GOOD] >> test_inflight.py::flake8 [GOOD] >> test_insert.py::flake8 [GOOD] >> test_public_metrics.py::flake8 [GOOD] >> test_push_down.py::flake8 [GOOD] >> test_s3_0.py::flake8 [GOOD] >> test_s3_1.py::flake8 [GOOD] >> test_size_limit.py::flake8 [GOOD] >> test_statistics.py::flake8 [GOOD] >> test_streaming_join.py::flake8 [GOOD] >> test_test_connection.py::flake8 [GOOD] >> test_validation.py::flake8 [GOOD] >> test_ydb_over_fq.py::flake8 [GOOD] >> test_yq_v2.py::flake8 [GOOD] |58.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/chunk_client/helpers.cpp |58.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/row_base.cpp |58.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/grpc_services/ut/ydb-core-grpc_services-ut |58.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/ymq/actor/yc_search_ut/ydb-core-ymq-actor-yc_search_ut |58.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/unordered_schemaful_reader.cpp |58.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/value_consumer.cpp |58.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_vector_index_build_reboots/tx-schemeshard-ut_vector_index_build_reboots |57.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/services/ext_index/ut/ydb-services-ext_index-ut |57.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/queue_client/partition_reader.cpp |57.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/fq/libs/control_plane_proxy/ut/ydb-core-fq-libs-control_plane_proxy-ut |58.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/sys_view/ut_large/ydb-core-sys_view-ut_large |58.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/transaction_client/noop_timestamp_provider.cpp |58.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/services/cms/ut/ydb-services-cms-ut |58.0%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/fq/s3/flake8 >> test_yq_v2.py::flake8 [GOOD] |58.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/kafka/requests.cpp |58.0%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/persqueue_error_codes_v1.pb.{h, cc} |58.0%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/config/protos/common.pb.{h, cc} |58.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/google/benchmark/librestricted-google-benchmark.a |58.1%| [PR] {BAZEL_DOWNLOAD} $(B)/ydb/core/base/generated/runtime_feature_flags.h |58.1%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/config/protos/checkpoint_coordinator.pb.{h, cc} |58.0%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/config/protos/control_plane_proxy.pb.{h, cc} |58.0%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/workload_manager_config.{pb.h ... grpc.pb.h} |58.0%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/auth.{pb.h ... grpc.pb.h} |58.0%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/blobstorage_disk.{pb.h ... grpc.pb.h} |58.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/time_cast/ut/ydb-core-tx-time_cast-ut |58.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_init/ydb-core-tx-datashard-ut_init |58.1%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/labeled_counters.{pb.h ... grpc.pb.h} |58.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kesus/proxy/ut/ydb-core-kesus-proxy-ut |58.1%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/hive/ydb-tests-functional-hive |58.0%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/bootstrap.{pb.h ... grpc.pb.h} |58.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_export/ydb-core-tx-datashard-ut_export |58.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/library/ncloud/impl/ut/ydb-library-ncloud-impl-ut |58.1%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/compile_service_config.{pb.h ... grpc.pb.h} |58.1%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/node_limits.{pb.h ... grpc.pb.h} |58.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/apps/pgwire/pgwire |58.1%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/query_stats.{pb.h ... grpc.pb.h} |58.1%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/annotations/sensitive.pb.{h, cc} |58.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/rpc_proxy/transaction.cpp |58.1%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/library/actors/protos/interconnect.pb.{h, cc} |58.1%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/grpc/draft/dummy.{pb.h ... grpc.pb.h} |58.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/opt/kqp_query_blocks_transformer.cpp |58.1%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/ydb_value.pb.{h, cc} |58.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/tablet_client/table_mount_cache_detail.cpp |58.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/opt/kqp_opt.cpp |58.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/tx_proxy/ut_storage_tenant/ydb-core-tx-tx_proxy-ut_storage_tenant |58.1%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/pqconfig.{pb.h ... grpc.pb.h} |58.1%| [PR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/providers/result/expr_nodes/yql_res_expr_nodes.{gen.h ... defs.inl.h} |58.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/rpc_proxy/connection_impl.cpp |58.1%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/opt/kqp_query_plan.h_serialized.cpp |58.1%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/flat_scheme_op.{pb.h ... grpc.pb.h} |58.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/fq/control_plane_storage/ydb-tests-fq-control_plane_storage |58.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/rpc_proxy/journal_writer.cpp |58.0%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/library/formats/arrow/protos/accessor.pb.{h, cc} |58.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/rpc_proxy/transaction_impl.cpp |58.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/cms/ut_sentinel/ydb-core-cms-ut_sentinel |58.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/sys_view/ut/ydb-core-sys_view-ut |58.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/chaos_client/config.cpp |58.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/library/yql/providers/generic/actors/ut/ydb-library-yql-providers-generic-actors-ut |58.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/vdisk/skeleton/ut/ydb-core-blobstorage-vdisk-skeleton-ut |58.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/adapters.cpp |58.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/rpc_proxy/client_base.cpp |58.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_serverless/ydb-core-tx-schemeshard-ut_serverless |58.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/row_buffer.cpp |58.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/queue_client/consumer_client.cpp |58.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_backup_collection/ydb-core-tx-schemeshard-ut_backup_collection |58.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/locks/ut_range_treap/ydb-core-tx-locks-ut_range_treap |58.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_external_blobs/ydb-core-tx-datashard-ut_external_blobs |58.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/security_client/access_control.cpp |58.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/scheduler/operation_id_or_alias.cpp |58.0%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/library/ydb_issue/proto/issue_id.{pb.h ... grpc.pb.h} |58.0%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/scheme_log.{pb.h ... grpc.pb.h} |58.0%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/whiteboard_disk_states.{pb.h ... grpc.pb.h} |58.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/replication/ydb_proxy/ut/ydb-core-tx-replication-ydb_proxy-ut |58.0%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/dq/actors/protos/dq_status_codes.pb.{h, cc} |58.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/scheme_board/ut_cache/ydb-core-tx-scheme_board-ut_cache |58.1%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/table_stats.{pb.h ... grpc.pb.h} |58.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/client_cache.cpp |58.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/public.cpp |58.0%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/config/protos/private_proxy.pb.{h, cc} |58.0%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/blobstorage_base3.{pb.h ... grpc.pb.h} |58.0%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/config/protos/task_controller.pb.{h, cc} |58.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/grpc_services/tablet/ut/ydb-core-grpc_services-tablet-ut |58.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/rpc_proxy/config.cpp |58.0%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/counters.{pb.h ... grpc.pb.h} |58.0%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/ydb_federation_discovery.pb.{h, cc} |58.1%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/node_whiteboard.{pb.h ... grpc.pb.h} |58.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/file_client/config.cpp |58.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/chunk_client/public.cpp |58.1%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/dq/actors/protos/dq_stats.pb.{h, cc} |58.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/opt/kqp_column_statistics_requester.cpp |57.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/ut/data/ydb-core-kqp-ut-data |57.9%| [PB] {BAZEL_DOWNLOAD} $(B)/yql/essentials/public/types/yql_types.pb.{h, cc} |57.9%| [PB] {BAZEL_DOWNLOAD} $(B)/yql/essentials/providers/common/proto/gateways_config.pb.{h, cc} |57.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/table_partition_reader.cpp |57.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/ut/federated_query/s3/ydb-core-kqp-ut-federated_query-s3 |58.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/memory_controller/ut/ydb-core-memory_controller-ut |58.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_export_reboots_s3/ydb-core-tx-schemeshard-ut_export_reboots_s3 |58.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/config.cpp >> ydb-tests-functional-autoconfig::import_test [GOOD] |57.8%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/testing/gbenchmark/libcpp-testing-gbenchmark.a |57.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/host/kqp_statement_rewrite.cpp |57.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_order/ydb-core-tx-datashard-ut_order |57.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/rpc_proxy/public.cpp |57.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/chaos_client/replication_card.cpp |57.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/unversioned_row.cpp |57.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/versioned_io_options.cpp |57.9%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/ydb_common.pb.{h, cc} |57.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/job_tracker_client/public.cpp |57.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/ypath/parser_detail.cpp |57.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tools/query_replay/ydb_query_replay |57.8%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/config_units.{pb.h ... grpc.pb.h} |57.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/public/sdk/cpp/src/client/topic/ut/with_direct_read_ut/src-client-topic-ut-with_direct_read_ut |57.9%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/drivemodel.{pb.h ... grpc.pb.h} |57.9%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/autoconfig/import_test >> ydb-tests-functional-autoconfig::import_test [GOOD] |57.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/tx_allocator_client/actor_client_ut.cpp |57.9%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/audit/ydb-tests-functional-audit |57.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/tx_allocator_client/ut_helpers.cpp |57.9%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/channel_purpose.{pb.h ... grpc.pb.h} |57.8%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/kqp_stats.{pb.h ... grpc.pb.h} |57.8%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/blobstorage_pdisk_config.{pb.h ... grpc.pb.h} |57.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/complex_types/uuid_text.cpp |57.8%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/dq/proto/dq_transport.pb.{h, cc} |57.8%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/dq/actors/protos/dq_events.pb.{h, cc} |57.8%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/ydb_query.pb.{h, cc} |57.9%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/kqp.{pb.h ... grpc.pb.h} >> ydb-tests-postgres_integrations-go-libpq::import_test [GOOD] |57.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/yql/essentials/tools/sql2yql/sql2yql |57.5%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/scheme_tests/ydb-tests-functional-scheme_tests |57.5%| [PB] {BAZEL_DOWNLOAD} $(B)/contrib/libs/opentelemetry-proto/opentelemetry/proto/trace/v1/trace.{pb.h ... grpc.pb.h} |57.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/queue_client/helpers.cpp |57.2%| [PB] {BAZEL_DOWNLOAD} $(B)/yql/essentials/protos/common.pb.{h, cc} |57.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/helpers.cpp |57.3%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/postgres_integrations/go-libpq/import_test >> ydb-tests-postgres_integrations-go-libpq::import_test [GOOD] |57.4%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_pq_reboots/ydb-core-tx-schemeshard-ut_pq_reboots |57.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/dsproxy/ut/dsproxy_counters_ut.cpp |57.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/dsproxy/ut/dsproxy_patch_ut.cpp |57.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/dsproxy/ut/dsproxy_put_ut.cpp |57.3%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_sysview_reboots/ydb-core-tx-schemeshard-ut_sysview_reboots |57.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/dsproxy/ut/dsproxy_sequence_ut.cpp |57.3%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/ut/rbo/ydb-core-kqp-ut-rbo |57.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/dsproxy/ut/dsproxy_request_reporting_ut.cpp |57.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/yql/kqp_yql_ut.cpp |57.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/dsproxy/ut/dsproxy_quorum_tracker_ut.cpp |57.3%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/sql/v1/lexer/check/libv1-lexer-check.a |57.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/yql/kqp_pragma_ut.cpp |57.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/dsproxy/ut/dsproxy_get_ut.cpp |57.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yql/essentials/tools/sql2yql/sql2yql.cpp |57.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/yql/kqp_scripting_ut.cpp |57.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/key_bound.cpp |57.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/read_only_vdisk.cpp |57.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/ut_blobstorage/lib/libblobstorage-ut_blobstorage-lib.a |57.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/column_sort_schema.cpp |57.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/table_consumer.cpp |57.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/pipe.cpp |57.3%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/annotations/validation.pb.{h, cc} |56.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_extsubdomain/ydb-core-tx-schemeshard-ut_extsubdomain |56.9%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/yql_translation_settings.{pb.h ... grpc.pb.h} >> ydb-tests-fq-yds::import_test [GOOD] |56.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/mind/bscontroller/ut/ydb-core-mind-bscontroller-ut |56.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_column_build/ydb-core-tx-schemeshard-ut_column_build |56.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/delegating_transaction.cpp |56.7%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/config/protos/audit.pb.{h, cc} |56.7%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/library/mkql_proto/protos/minikql.{pb.h ... grpc.pb.h} >> ydb-tests-functional-ydb_cli::import_test [GOOD] |56.3%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/fq/yds/import_test >> ydb-tests-fq-yds::import_test [GOOD] |56.5%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/protos/portion_info.pb.{h, cc} |56.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/driver_lib/run/auto_config_initializer_ut.cpp |56.9%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/tx_datashard.{pb.h ... grpc.pb.h} |56.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/runtime/kqp_scan_data_ut.cpp |57.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/external_sources/hive_metastore/ut/hive_metastore_client_ut.cpp >> ydb-tests-stress-olap_workload-tests::import_test [GOOD] |56.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/fq/libs/checkpoint_storage/ut/ydb_state_storage_ut.cpp |56.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/indexes/kqp_indexes_multishard_ut.cpp |56.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/runtime/kqp_compute_scheduler_ut.cpp |56.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/indexes/kqp_indexes_prefixed_vector_ut.cpp |56.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/fq/libs/row_dispatcher/format_handler/ut/topic_parser_ut.cpp |56.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/opt/kqp_merge_ut.cpp |56.9%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/stress/olap_workload/tests/import_test >> ydb-tests-stress-olap_workload-tests::import_test [GOOD] |56.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/opt/kqp_agg_ut.cpp |56.8%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/ydb_cli/import_test >> ydb-tests-functional-ydb_cli::import_test [GOOD] |56.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/runtime/kqp_scan_logging_ut.cpp |56.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/join/kqp_flip_join_ut.cpp |56.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/opt/kqp_kv_ut.cpp |56.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/runtime/kqp_scan_spilling_ut.cpp |56.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/mind/address_classification/net_classifier_ut.cpp |56.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/join/kqp_join_order_ut.cpp |56.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/indexes/kqp_indexes_ut.cpp |56.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/join/kqp_join_ut.cpp |56.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/join/kqp_index_lookup_join_ut.cpp |56.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/indexes/kqp_indexes_vector_ut.cpp |56.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/opt/kqp_returning_ut.cpp |56.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/opt/kqp_named_expressions_ut.cpp |56.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/opt/kqp_ranges_ut.cpp |56.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/proxy_service/kqp_proxy_ut.cpp |56.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/idx_test/ydb_index_ut.cpp |56.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/workload_service/ut/common/libworkload_service-ut-common.a |56.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/opt/kqp_not_null_ut.cpp |56.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/proxy_service/kqp_script_executions_ut.cpp |56.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/persqueue/ut/slow/pq_ut.cpp |56.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/persqueue/ut/slow/autopartitioning_ut.cpp |56.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/opt/kqp_sqlin_ut.cpp |56.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/opt/kqp_sort_ut.cpp |56.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/opt/kqp_extract_predicate_unpack_ut.cpp |56.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/cms/downtime_ut.cpp |56.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/cms/cms_ut_common.cpp |56.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/benchmark/b_part.cpp |56.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/external_sources/hive_metastore/ut/hive_metastore_fetcher_ut.cpp |57.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/cost/kqp_cost_ut.cpp |57.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/statistics/service/ut/ut_column_statistics.cpp |57.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/control/immediate_control_board_actor_ut.cpp |57.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/cms/cms_tenants_ut.cpp |56.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/external_sources/hive_metastore/libcore-external_sources-hive_metastore.a |56.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/opt/kqp_ne_ut.cpp |56.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/cms/cms_maintenance_api_ut.cpp |56.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/cms/cluster_info_ut.cpp |56.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/statistics/service/ut/ut_basic_statistics.cpp |57.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/cms/ut_helpers.cpp |57.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/cms/console/feature_flags_configurator_ut.cpp |57.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/statistics/service/ut/ut_http_request.cpp |57.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/cms/console/configs_cache_ut.cpp |57.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/external_sources/hive_metastore/ut/common.cpp |57.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/cms/cms_ut.cpp >> ydb-tests-functional-sqs-with_quotas::import_test [GOOD] |56.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/dsproxy/ut_ftol/dsproxy_fault_tolerance_ut.cpp |56.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/load_test/ut_ycsb.cpp |56.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/cms/console/console_ut_tenants.cpp |56.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/cms/console/modifications_validator_ut.cpp |56.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/cms/console/configs_dispatcher_ut.cpp |57.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/base/board_subscriber_ut.cpp |57.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/fq/libs/row_dispatcher/format_handler/ut/topic_filter_ut.cpp |57.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/fq/libs/checkpoint_storage/ut/ydb_checkpoint_storage_ut.cpp |57.0%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/with_quotas/import_test >> ydb-tests-functional-sqs-with_quotas::import_test [GOOD] |56.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/cms/console/log_settings_configurator_ut.cpp |57.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/cms/console/immediate_controls_configurator_ut.cpp |57.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/cms/console/jaeger_tracing_configurator_ut.cpp |57.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/fq/libs/row_dispatcher/format_handler/ut/format_handler_ut.cpp |57.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/actorlib_impl/actor_activity_ut.cpp |57.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/cms/console/console_ut_configs.cpp |57.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/security/certificate_check/cert_check_ut.cpp |57.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/cms/console/net_classifier_updater_ut.cpp |57.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/fq/libs/ydb/ut/ydb_ut.cpp |57.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/provider/read_attributes_utils_ut.cpp |57.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/provider/yql_kikimr_provider_ut.cpp |57.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/actorlib_impl/test_protocols_ut.cpp |57.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/provider/yql_kikimr_gateway_ut.cpp |57.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/nodewarden/bind_queue_ut.cpp |57.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/vdisk/syncer/blobstorage_syncer_localwriter_ut.cpp |57.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/security/certificate_check/cert_utils_ut.cpp |57.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/nodewarden/blobstorage_node_warden_ut.cpp |57.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/statistics/aggregator/ut/ut_analyze_columnshard.cpp |57.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/vdisk/syncer/blobstorage_syncer_data_ut.cpp |57.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/ut_vdisk/lib/libblobstorage-ut_vdisk-lib.a |57.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/fq/libs/checkpoint_storage/ut/storage_service_ydb_ut.cpp |57.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/actorlib_impl/actor_bootstrapped_ut.cpp |56.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/actorlib_impl/test_interconnect_ut.cpp |56.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/actorlib_impl/actor_tracker_ut.cpp |56.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/vdisk/syncer/blobstorage_syncer_broker_ut.cpp |57.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/fq/libs/actors/ut/database_resolver_ut.cpp |57.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/fq/libs/checkpoint_storage/ut/gc_ut.cpp |57.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/statistics/aggregator/ut/ut_analyze_datashard.cpp |57.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/vdisk/syncer/blobstorage_syncquorum_ut.cpp |57.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/statistics/aggregator/ut/ut_traverse_columnshard.cpp |57.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/statistics/aggregator/ut/ut_traverse_datashard.cpp |57.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/rpc_proxy/row_stream.cpp |57.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/transaction_client/timestamp_provider_base.cpp |57.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/external_sources/hive_metastore/hive_metastore_native/libexternal_sources-hive_metastore-hive_metastore_native.a |57.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/chaos_client/replication_card_serialization.cpp |56.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/http_proxy/ut/ymq_ut.cpp |56.9%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/core/http_proxy/ut/inside_ydb_ut/objcopy_484246668d943fbae3b476ec7d.o |56.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/http_proxy/ut/inside_ydb_ut/inside_ydb_ut.cpp |56.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/http_proxy/ut/kinesis_ut.cpp |57.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/mind/hive/hive_impl_ut.cpp |57.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/fq/libs/result_formatter/result_formatter_ut.cpp |57.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/mind/hive/sequencer_ut.cpp |56.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/ut/scan/ydb-core-kqp-ut-scan |56.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/mind/hive/scale_recommender_policy_ut.cpp |56.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard_ut_range_ops.cpp |56.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/graph/shard/ut/shard_ut.cpp |56.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/mind/hive/hive_ut.cpp |56.8%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/encryption/ydb-tests-functional-encryption |56.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard_ut_stats.cpp |56.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/ydb_convert/ut/ydb-core-ydb_convert-ut |56.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/wrappers/s3_wrapper_ut.cpp |56.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/batch_operations/kqp_batch_delete_ut.cpp |56.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/tx_allocator_client/ut/ydb-core-tx-tx_allocator_client-ut |56.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/batch_operations/kqp_batch_update_ut.cpp |56.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/mind/hive/object_distribution_ut.cpp |56.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/mind/hive/storage_pool_info_ut.cpp |56.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/mind/ut_fat/ydb-core-mind-ut_fat |56.7%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/tools/solomon_emulator/recipe/solomon_recipe |56.3%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/yt/kqp_yt_file/part12/ydb-tests-fq-yt-kqp_yt_file-part12 |56.4%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/kqp/plan2svg/ydb-tests-functional-kqp-plan2svg |55.6%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/restarts/ydb-tests-functional-restarts |55.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard_ut_common_pq.cpp |55.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/tx_allocator/txallocator_ut_helpers.cpp |55.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tools/query_replay_yt/query_replay_yt >> ydb-tests-stress-log-tests::import_test [GOOD] |55.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_cdc_stream/ut_cdc_stream.cpp |55.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/balance_coverage/balance_coverage_builder_ut.cpp |55.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/pg/kqp_pg_ut.cpp |55.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard_ut_minstep.cpp |55.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/scheme_board/populator_ut.cpp |55.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/tx_allocator/txallocator_ut.cpp |55.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_subdomain_reboots/ut_subdomain_reboots.cpp |55.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/pg/pg_catalog_ut.cpp |55.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/build_index/ut/ut_reshuffle_kmeans.cpp |55.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/tools/query_replay_yt/query_compiler.cpp |55.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/engine/mkql_proto_ut.cpp |55.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/engine/mkql_engine_flat_host_ut.cpp |55.1%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/stress/log/tests/import_test >> ydb-tests-stress-log-tests::import_test [GOOD] |55.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/engine/kikimr_program_builder_ut.cpp |55.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/tools/query_replay_yt/main.cpp |55.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/engine/mkql_engine_flat_ut.cpp |55.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/tools/query_replay_yt/query_replay.cpp |55.9%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/apps/ydb/ut/ydb-apps-ydb-ut |55.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_reboots/ut_reboots.cpp |55.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard_ut_replication.cpp |55.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/replication/service/worker_ut.cpp |56.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/tools/stress_tool/device_test_tool.cpp |56.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/load_test/ut/group_test_ut.cpp |56.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/runtime/ut/ydb-core-kqp-runtime-ut |56.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/quoter/quoter_service_bandwidth_test/quota_requester.cpp |56.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_external_data_source_reboots/ut_external_data_source_reboots.cpp |56.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_split_merge/ut_find_split_key.cpp |55.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_sequence_reboots/ut_sequence_reboots.cpp |55.9%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/protobuf_udf/libessentials-minikql-protobuf_udf.a |55.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/tx_proxy/schemereq_ut.cpp |55.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/workload_service/ut/kqp_workload_service_actors_ut.cpp |55.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_export/ut_export.cpp |55.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tools/stress_tool/ydb_stress_tool |56.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/workload_service/ut/kqp_workload_service_tables_ut.cpp |56.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/quoter/quoter_service_bandwidth_test/server.cpp |56.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard_ut_write.cpp |55.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard_ut_change_exchange.cpp |56.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/workload_service/ut/kqp_workload_service_ut.cpp |56.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/replication/service/json_change_record_ut.cpp |56.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/statistics/service/ut/ut_aggregation/ut_aggregate_statistics.cpp |56.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/tx_proxy/encrypted_storage_ut.cpp |56.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/replication/controller/target_discoverer_ut.cpp |56.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard_ut_read_table.cpp |56.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard_ut_kqp_stream_lookup.cpp |56.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_base/ut_commit_redo_limit.cpp |56.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/wrappers/ut/ydb-core-wrappers-ut |56.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_base/ut_table_pg_types.cpp |56.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_base/ut_info_types.cpp >> ydb-tests-stress-oltp_workload-tests::import_test [GOOD] |56.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_base/ut_table_decimal_types.cpp |56.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard_ut_kqp.cpp |56.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/tx_proxy/proxy_ut_helpers.cpp |56.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/replication/controller/dst_creator_ut.cpp |56.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/client/server/msgbus_server_pq_metarequest_ut.cpp |56.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_extsubdomain_reboots/ut_extsubdomain_reboots.cpp |56.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tools/stress_tool/lib/libydb_device_test.a |56.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tools/stress_tool/proto/libtools-stress_tool-proto.a |56.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/lib/idx_test/libpublic-lib-idx_test.a |56.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/providers/solomon/actors/ut/ut_helpers.cpp |56.1%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/stress/oltp_workload/tests/import_test >> ydb-tests-stress-oltp_workload-tests::import_test [GOOD] |56.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_backup/ut_backup.cpp |56.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_continuous_backup/ut_continuous_backup.cpp |56.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/providers/solomon/actors/ut/dq_solomon_write_actor_ut.cpp |56.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/tx_proxy/proxy_ut_helpers.cpp |56.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_user_attributes_reboots/ut_user_attributes_reboots.cpp |56.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard_ut_erase_rows.cpp |56.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/replication/controller/stream_creator_ut.cpp |56.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_base/ut_base.cpp |56.1%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/streaming/libstreaming_udf.global.a |56.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/persqueue/ut/cache_eviction_ut.cpp |56.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/federated_query/generic_ut/iceberg_ut_data.cpp |56.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/tx_proxy/proxy_ut.cpp |56.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/persqueue/tests/liblibrary-persqueue-tests.a |56.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard_ut_compaction.cpp |56.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard_ut_upload_rows.cpp |56.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_rtmr/ut_rtmr.cpp |56.1%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/protobuf/libprotobuf_udf.global.a |56.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_backup_collection_reboots/ut_backup_collection_reboots.cpp |56.1%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/file/libfile_udf.global.a |56.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/public/lib/ydb_cli/topic/topic_write_ut.cpp |56.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/federated_query/generic_ut/kqp_generic_provider_ut.cpp |56.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/public/lib/ydb_cli/topic/topic_write.cpp |56.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_user_attributes/ut_user_attributes.cpp |56.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/columnshard/ut_schema/ut_columnshard_schema.cpp |56.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_external_table/ut_external_table.cpp |56.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_stats/ut_stats.cpp |56.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/public/lib/ydb_cli/topic/topic_read_ut.cpp |56.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/ydb/backup_ut/list_objects_in_s3_export_ut.cpp |56.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/scheme_board/ut_helpers.cpp |56.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/rm_service/kqp_rm_ut.cpp |56.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/ut/yql/ydb-core-kqp-ut-yql |56.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/ydb/backup_ut/backup_path_ut.cpp |56.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/persqueue/ut/make_config.cpp |56.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/build_index/ut/ut_local_kmeans.cpp |56.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/persqueue/ut/internals_ut.cpp |56.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/persqueue/ut/fetch_request_ut.cpp |56.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/persqueue/ut/metering_sink_ut.cpp |56.1%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/backup/ydb-tests-functional-backup |56.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/ydb/backup_ut/encrypted_backup_ut.cpp |56.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/persqueue/ut/list_all_topics_ut.cpp |56.2%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/core/persqueue/ut/objcopy_1d0482d354dc270d18e7123281.o |56.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/persqueue/ut/microseconds_sliding_window_ut.cpp |56.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/ydb/backup_ut/ydb_backup_ut.cpp |56.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/persqueue/ut/pqtablet_mock.cpp |56.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/persqueue/ut/pqrb_describes_ut.cpp |56.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/persqueue/ut/partitiongraph_ut.cpp |56.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/persqueue/ut/quota_tracker_ut.cpp |56.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/persqueue/ut/partition_chooser_ut.cpp |56.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/persqueue/ut/type_codecs_ut.cpp |56.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/persqueue/ut/user_info_ut.cpp |56.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/persqueue/ut/utils_ut.cpp |56.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/persqueue/ut/sourceid_ut.cpp |56.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/persqueue/ut/partition_ut.cpp |56.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_split_merge/ut_split_merge.cpp |56.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/tests/fq/pq_async_io/ut/dq_pq_write_actor_ut.cpp |56.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/tests/fq/pq_async_io/ut/dq_pq_read_actor_ut.cpp |56.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard_ut_trace.cpp |56.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/tests/fq/pq_async_io/ut/dq_pq_rd_read_actor_ut.cpp |56.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/persqueue/ut/pq_ut.cpp |56.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_bsvolume_reboots/ut_bsvolume_reboots.cpp |56.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_external_table_reboots/ut_external_table_reboots.cpp |56.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/replication/controller/assign_tx_id_ut.cpp |56.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/sysview/kqp_sys_col_ut.cpp |56.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/persqueue/ut/counters_ut.cpp |56.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/sys_view/query_stats/query_stats_ut.cpp |56.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/public/sdk/cpp/src/client/federated_topic/ut/basic_usage_ut.cpp |56.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/conveyor_composite/ut/ut_simple.cpp |56.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/conveyor_composite/usage/libtx-conveyor_composite-usage.a |56.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/sysview/kqp_sys_view_ut.cpp |56.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard_ut_incremental_restore_scan.cpp |56.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/conveyor_composite/service/libtx-conveyor_composite-service.a |56.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_sysview/ut_sysview.cpp |56.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/persqueue/dread_cache_service/ut/caching_proxy_ut.cpp |56.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_external_data_source/ut_external_data_source.cpp |56.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/build_index/ut/ut_prefix_kmeans.cpp |56.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/persqueue/ut/pqtablet_ut.cpp |56.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/build_index/ut/ut_sample_k.cpp >> ydb-tests-functional-audit::import_test [GOOD] |56.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard_ut_minikql.cpp |56.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_view/ut_view.cpp |56.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/build_index/ut/ut_secondary_index.cpp |56.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard_ut_locks.cpp |56.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/quoter/quoter_service_bandwidth_test/main.cpp |56.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/security_client/public.cpp |56.3%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/library/aclib/protos/aclib.pb.{h, cc} |56.3%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/index_builder.{pb.h ... grpc.pb.h} |56.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/opt/kqp_opt_phy_check.cpp |56.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/complex_types/check_type_compatibility.cpp |56.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/object_client/helpers.cpp |56.4%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/alloc.{pb.h ... grpc.pb.h} >> ydb-tests-functional-hive::import_test [GOOD] |56.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/columnar_statistics.cpp |56.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard_ut_volatile.cpp |56.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/rpc_proxy/file_reader.cpp |56.3%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/import_test >> ydb-tests-functional-audit::import_test [GOOD] |56.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/check_schema_compatibility.cpp |56.4%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/hive/import_test >> ydb-tests-functional-hive::import_test [GOOD] >> ydb-tests-functional-scheme_tests::import_test [GOOD] |56.3%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/config/protos/nodes_manager.pb.{h, cc} |56.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/chaos_client/helpers.cpp |56.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/schemaless_dynamic_table_writer.cpp |56.3%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/ydb_persqueue_cluster_discovery.pb.{h, cc} |56.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/skynet.cpp |56.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/query_client/query_statistics.cpp |56.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/tablet_client/table_mount_cache.cpp |56.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/ypath/rich.cpp |56.4%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/scheme_tests/import_test >> ydb-tests-functional-scheme_tests::import_test [GOOD] |56.2%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/memory_stats.{pb.h ... grpc.pb.h} |56.2%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/hive.{pb.h ... grpc.pb.h} |56.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/persqueue_v1/first_class_src_ids_ut.cpp |56.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/persqueue_v1/ut/demo_tx.cpp |56.3%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_incremental_restore_scan/ydb-core-tx-datashard-ut_incremental_restore_scan |56.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/persqueue_v1/ut/kqp_mock.cpp |56.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/persqueue_v1/topic_yql_ut.cpp >> ydb-tests-functional-kqp-plan2svg::import_test [GOOD] |56.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/persqueue_v1/persqueue_common_ut.cpp |56.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/ydb/ydb_table_split_ut.cpp |56.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/persqueue_v1/persqueue_compat_ut.cpp |56.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/persqueue_v1/persqueue_ut.cpp |56.2%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/kqp/plan2svg/import_test >> ydb-tests-functional-kqp-plan2svg::import_test [GOOD] |56.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/persqueue_v1/ut/partition_writer_cache_actor_ut.cpp |56.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/persqueue_v1/ut/partition_writer_cache_actor_fixture.cpp |56.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/persqueue_v1/ut/rate_limiter_test_setup.cpp |56.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/persqueue_v1/ut/pqtablet_mock.cpp |56.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/persqueue_v1/ut/functions_executor_wrapper.cpp |56.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/services/ydb/backup_ut/ydb-services-ydb-backup_ut |56.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/tablet_client/watermark_runtime_data.cpp |56.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/persqueue_v1/ut/topic_service_ut.cpp |56.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/rpc_proxy/wire_row_stream.cpp |56.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/name_table.cpp |56.3%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/yt/kqp_yt_file/part4/ydb-tests-fq-yt-kqp_yt_file-part4 |56.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/queue_client/queue_rowset.cpp |56.3%| [PB] {BAZEL_DOWNLOAD} $(B)/contrib/libs/opentelemetry-proto/opentelemetry/proto/common/v1/common.{pb.h ... grpc.pb.h} |56.3%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/key.{pb.h ... grpc.pb.h} |56.3%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/ut/idx_test/ydb-core-kqp-ut-idx_test |56.2%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/ydb_topic.pb.{h, cc} |56.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_range_ops/ydb-core-tx-datashard-ut_range_ops |56.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/fq/pq_async_io/ut/ydb-tests-fq-pq_async_io-ut |56.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_external_table_reboots/ydb-core-tx-schemeshard-ut_external_table_reboots |56.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/provider/ut/ydb-core-kqp-provider-ut |56.2%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/tools/solomon_emulator/bin/solomon_emulator |56.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/internal_client.cpp |56.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/public/sdk/cpp/src/client/topic/ut/basic_usage_ut.cpp |56.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/driver_lib/run/ut/ydb-core-driver_lib-run-ut |56.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/scheme_board/ut_populator/ydb-core-tx-scheme_board-ut_populator |56.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/public/sdk/cpp/src/client/topic/ut/topic_to_table_ut.cpp |56.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/public/sdk/cpp/src/client/topic/ut/describe_topic_ut.cpp |56.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/public/sdk/cpp/src/client/topic/ut/local_partition_ut.cpp |56.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/library/yql/providers/solomon/actors/ut/ydb-library-yql-providers-solomon-actors-ut |56.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/public/sdk/cpp/src/client/topic/ut/trace_ut.cpp |56.2%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/topic/ut/objcopy_1406195445f45d950dda89fcd8.o |56.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/control/ut/ydb-core-control-ut |56.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_stats/ydb-core-tx-datashard-ut_stats |56.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_sysview/ydb-core-tx-schemeshard-ut_sysview |56.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/query_tracker_client.cpp |56.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/build_index/ut/ydb-core-tx-datashard-build_index-ut |56.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/tx_allocator/ut/ydb-core-tx-tx_allocator-ut |57.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_view/ydb-core-tx-schemeshard-ut_view |57.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/ydb/ydb_bulk_upsert_olap_ut.cpp |57.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/ydb/ydb_scripting_ut.cpp |57.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/ydb/ydb_stats_ut.cpp |57.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/ydb/ydb_coordination_ut.cpp |57.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/ydb/ydb_logstore_ut.cpp |57.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/ydb/ydb_index_table_ut.cpp |57.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/ydb/ydb_import_ut.cpp |57.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/ydb/ydb_ldap_login_ut.cpp |57.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/ydb/ydb_monitoring_ut.cpp |57.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/ydb/ydb_bulk_upsert_ut.cpp |57.4%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/ut/runtime/ydb-core-kqp-ut-runtime |57.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/rate_limiter/rate_limiter_ut.cpp |57.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/ydb/ydb_object_storage_ut.cpp |57.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/ydb/ydb_query_ut.cpp |57.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/ydb/ydb_olapstore_ut.cpp |57.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/ydb/ydb_read_rows_ut.cpp |57.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/tx_proxy/ut_encrypted_storage/ydb-core-tx-tx_proxy-ut_encrypted_storage |57.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/ydb/ydb_login_ut.cpp |57.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/ydb/ydb_register_node_ut.cpp |57.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/mkql_proto/ut/helpers/libmkql_proto-ut-helpers.a |57.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/scheme/kqp_acl_ut.cpp |57.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/scheme/kqp_constraints_ut.cpp |57.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/persqueue_v1/ut/rate_limiter_test_setup.cpp |57.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_split_merge/ydb-core-tx-schemeshard-ut_split_merge |57.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/tx_proxy/ut_schemereq/ydb-core-tx-tx_proxy-ut_schemereq |57.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/persqueue_v1/persqueue_common_new_schemecache_ut.cpp |58.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/ydb/ydb_table_ut.cpp |58.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/ydb/ydb_ut.cpp |58.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/persqueue_v1/persqueue_new_schemecache_ut.cpp |58.4%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_read_only_vdisk/ut_blobstorage-ut_read_only_vdisk |58.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/datastreams/datastreams_ut.cpp |59.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard_ut_rs.cpp |60.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/statistics/service/ut/ydb-core-statistics-service-ut |60.4%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/dsproxy/ut/ydb-core-blobstorage-dsproxy-ut |61.4%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/load_test/ut_ycsb/ydb-core-load_test-ut_ycsb |62.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/scheme/kqp_scheme_ut.cpp |62.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/tests/olap/high_load/read_update_write.cpp |63.1%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/library/cpp/build_info/liblibrary-cpp-build_info.a |63.4%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/library/cpp/svnversion/liblibrary-cpp-svnversion.a |63.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_cdc_stream/ydb-core-tx-schemeshard-ut_cdc_stream |64.8%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/mem_alloc/ydb-tests-fq-mem_alloc |65.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/mind/address_classification/ut/ydb-core-mind-address_classification-ut |66.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_subdomain_reboots/ydb-core-tx-schemeshard-ut_subdomain_reboots |66.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/fq/libs/row_dispatcher/format_handler/ut/ydb-core-fq-libs-row_dispatcher-format_handler-ut >> ydb-tests-functional-encryption::import_test [GOOD] |66.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_reboots/ydb-core-tx-schemeshard-ut_reboots |67.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/fq/libs/ydb/ut/ydb-core-fq-libs-ydb-ut |67.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/public/sdk/cpp/src/client/federated_topic/ut/ydb-public-sdk-cpp-src-client-federated_topic-ut |67.4%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_export/ydb-core-tx-schemeshard-ut_export |67.4%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/encryption/import_test >> ydb-tests-functional-encryption::import_test [GOOD] |67.4%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_replication/ydb-core-tx-datashard-ut_replication |70.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_minikql/ydb-core-tx-datashard-ut_minikql |70.3%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/security/certificate_check/ut/ydb-core-security-certificate_check-ut |70.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/replication/service/ut_worker/ydb-core-tx-replication-service-ut_worker |70.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_minstep/ydb-core-tx-datashard-ut_minstep |70.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/services/persqueue_v1/ut/ydb-services-persqueue_v1-ut |71.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/external_sources/hive_metastore/ut/ydb-core-external_sources-hive_metastore-ut |72.4%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/base/ut_board_subscriber/ydb-core-base-ut_board_subscriber |72.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/public/lib/ydb_cli/topic/ut/ydb-public-lib-ydb_cli-topic-ut |73.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_continuous_backup/ydb-core-tx-schemeshard-ut_continuous_backup |73.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_write/ydb-core-tx-datashard-ut_write |73.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/proxy_service/ut/ydb-core-kqp-proxy_service-ut |74.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/statistics/aggregator/ut/ydb-core-statistics-aggregator-ut |74.3%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/ut/opt/ydb-core-kqp-ut-opt |74.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_bsvolume_reboots/ydb-core-tx-schemeshard-ut_bsvolume_reboots |75.3%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_change_exchange/ydb-core-tx-datashard-ut_change_exchange |75.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/ut/indexes/ydb-core-kqp-ut-indexes |75.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_index/ut_unique_index.cpp |75.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_sequence_reboots/ydb-core-tx-schemeshard-ut_sequence_reboots |75.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_index/ut_vector_index.cpp |76.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_index/ut_async_index.cpp |76.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_extsubdomain_reboots/ydb-core-tx-schemeshard-ut_extsubdomain_reboots |76.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/services/ydb/table_split_ut/ydb-services-ydb-table_split_ut |77.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/actorlib_impl/ut/ydb-core-actorlib_impl-ut |77.9%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/yt/kqp_yt_file/part8/ydb-tests-fq-yt-kqp_yt_file-part8 |78.3%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tablet_flat/benchmark/core_tablet_flat_benchmark |78.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/host/kqp_type_ann.cpp |78.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_read_table/ydb-core-tx-datashard-ut_read_table |78.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_backup/ydb-core-tx-schemeshard-ut_backup |78.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/sys_view/query_stats/ut/ydb-core-sys_view-query_stats-ut >> ydb-tests-functional-restarts::import_test [GOOD] |78.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_volatile/ydb-core-tx-datashard-ut_volatile |79.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/table_creator/table_creator_ut.cpp |79.2%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/restarts/import_test >> ydb-tests-functional-restarts::import_test [GOOD] |79.3%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/apps/ydb/ydb |79.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/apps/ydb/ydb |80.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/replication/controller/ut_assign_tx_id/core-tx-replication-controller-ut_assign_tx_id |80.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/replication/controller/ut_dst_creator/ydb-core-tx-replication-controller-ut_dst_creator |81.3%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/tx_proxy/ut_base_tenant/ydb-core-tx-tx_proxy-ut_base_tenant |81.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/mind/hive/ut/ydb-core-mind-hive-ut |82.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/services/datastreams/ut/ydb-services-datastreams-ut |82.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/replication/controller/ut_target_discoverer/replication-controller-ut_target_discoverer |82.4%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/balance_coverage/ut/ydb-core-tx-balance_coverage-ut |82.4%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/services/persqueue_v1/ut/new_schemecache_ut/ydb-services-persqueue_v1-ut-new_schemecache_ut |82.4%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/persqueue/dread_cache_service/ut/ydb-core-persqueue-dread_cache_service-ut |82.4%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/replication/controller/ut_stream_creator/tx-replication-controller-ut_stream_creator |82.4%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/persqueue/ut/slow/ydb-core-persqueue-ut-slow |82.4%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/dsproxy/ut_ftol/ydb-core-blobstorage-dsproxy-ut_ftol |82.4%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_external_data_source_reboots/schemeshard-ut_external_data_source_reboots |82.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/ut/cost/ydb-core-kqp-ut-cost |82.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/replication/service/ut_json_change_record/tx-replication-service-ut_json_change_record |82.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/sys_view/partition_stats/partition_stats_ut.cpp |82.3%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/ut_schema/ydb-core-tx-columnshard-ut_schema |82.3%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/quoter/quoter_service_bandwidth_test/quoter_service_bandwidth_test |82.3%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/http_proxy/ut/inside_ydb_ut/ydb-core-http_proxy-ut-inside_ydb_ut |82.3%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/vdisk/syncer/ut/ydb-core-blobstorage-vdisk-syncer-ut |82.3%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/fq/libs/actors/ut/ydb-core-fq-libs-actors-ut |82.3%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/olap/high_load/ydb-tests-olap-high_load |82.3%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/public/sdk/cpp/src/client/topic/ut/ydb-public-sdk-cpp-src-client-topic-ut |82.3%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_external_table/ydb-core-tx-schemeshard-ut_external_table |82.3%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/services/ydb/ut/ydb-services-ydb-ut |82.3%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_external_data_source/ydb-core-tx-schemeshard-ut_external_data_source |82.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/ut/batch_operations/ydb-core-kqp-ut-batch_operations |82.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_backup_collection_reboots/tx-schemeshard-ut_backup_collection_reboots |82.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/cms/ut/ydb-core-cms-ut |82.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/nodewarden/ut/ydb-core-blobstorage-nodewarden-ut |82.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/services/rate_limiter/ut/ydb-services-rate_limiter-ut |82.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/backup/impl/table_writer_ut.cpp |82.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_user_attributes/ydb-core-tx-schemeshard-ut_user_attributes |82.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/graph/shard/ut/ydb-core-graph-shard-ut |82.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_compaction/ydb-core-tx-datashard-ut_compaction |82.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/fq/libs/checkpoint_storage/ut/ydb-core-fq-libs-checkpoint_storage-ut |82.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/cms/console/ut/ydb-core-cms-console-ut |82.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_erase_rows/ydb-core-tx-datashard-ut_erase_rows |82.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/ut/join/ydb-core-kqp-ut-join |82.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_rtmr/ydb-core-tx-schemeshard-ut_rtmr |81.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/statistics/service/ut/ut_aggregation/ydb-core-statistics-service-ut-ut_aggregation |81.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/fq/libs/result_formatter/ut/ydb-core-fq-libs-result_formatter-ut |81.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/ut/pg/ydb-core-kqp-ut-pg |81.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/rm_service/ut/ydb-core-kqp-rm_service-ut |81.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/conveyor_composite/ut/ydb-core-tx-conveyor_composite-ut |81.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/mind/bscontroller/ut_bscontroller/main.cpp |81.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_stats/ydb-core-tx-schemeshard-ut_stats |81.9%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/yt/yt/client/libyt-yt-client.a |81.8%| [AR] {BAZEL_UPLOAD, SKIPPED} $(B)/yt/yt/client/libyt-yt-client.a |81.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_ttl/ut_ttl_utility.cpp |81.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/workload_service/ut/ydb-core-kqp-workload_service-ut |81.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_ttl/ut_ttl.cpp |81.8%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/minidumps/ydb-tests-functional-minidumps |81.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_user_attributes_reboots/core-tx-schemeshard-ut_user_attributes_reboots |81.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_base/ydb-core-tx-schemeshard-ut_base |81.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_trace/ydb-core-tx-datashard-ut_trace |81.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/config/bsconfig_ut.cpp |81.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_kqp/ydb-core-tx-datashard-ut_kqp |81.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_upload_rows/ydb-core-tx-datashard-ut_upload_rows |81.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/tools/kqprun/recipe/libpy3kqprun_recipe.global.a |81.6%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/core/http_proxy/ut/objcopy_5fddfa8f171a3216cad65e02ab.o |81.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_locks/ydb-core-tx-datashard-ut_locks |81.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/http_proxy/ut/json_proto_conversion_ut.cpp |81.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/statistics/database/ut/ut_database.cpp |81.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard_ut_kqp_scan.cpp |81.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/engine/ut/ydb-core-engine-ut |81.5%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/plans/ydb-tests-fq-plans |81.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/mind/tenant_node_enumeration_ut.cpp |81.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/mind/tenant_ut_local.cpp |81.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/mind/tenant_ut_pool.cpp |81.2%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/yql/essentials/tools/sql2yql/sql2yql |81.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/yql/essentials/tools/sql2yql/sql2yql |81.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/perf/kqp_workload_ut.cpp |81.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/perf/kqp_query_perf_ut.cpp |81.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/mind/node_broker_ut.cpp |81.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/library/table_creator/ut/ydb-library-table_creator-ut |81.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_topic_splitmerge/ut_topic_splitmerge.cpp |81.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_index/ydb-core-tx-schemeshard-ut_index |81.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/ut/federated_query/generic_ut/ydb-core-kqp-ut-federated_query-generic_ut |81.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/load_test/ut/ydb-core-load_test-ut |80.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_rs/ydb-core-tx-datashard-ut_rs |81.0%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/restarts/ydb-tests-fq-restarts |80.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/services/config/ut/ydb-services-config-ut |80.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/ut/sysview/ydb-core-kqp-ut-sysview |80.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/ut/scheme/ydb-core-kqp-ut-scheme |80.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/client/server/ut/ydb-core-client-server-ut |80.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/metadata/secret/ut/ut_secret.cpp >> ydb-tests-fq-mem_alloc::import_test [GOOD] |80.3%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/fq/mem_alloc/import_test >> ydb-tests-fq-mem_alloc::import_test [GOOD] |80.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard_ut_keys.cpp |80.3%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_ttl/ydb-core-tx-schemeshard-ut_ttl |80.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_subdomain/ut_subdomain.cpp |80.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_move/ut_move.cpp |79.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_rtmr_reboots/ut_rtmr_reboots.cpp |80.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/sys_view/partition_stats/ut/ydb-core-sys_view-partition_stats-ut |80.0%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/kqp/kqp_query_svc/ydb-tests-functional-kqp-kqp_query_svc |80.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/services/metadata/secret/ut/ydb-services-metadata-secret-ut |79.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_data_erasure_reboots/ut_data_erasure_reboots.cpp |79.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/tools/stress_tool/device_test_tool_ut.cpp |79.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/mind/bscontroller/ut_bscontroller/ydb-core-mind-bscontroller-ut_bscontroller |79.3%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_topic_splitmerge/ydb-core-tx-schemeshard-ut_topic_splitmerge |79.3%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_kqp_scan/ydb-core-tx-datashard-ut_kqp_scan |79.4%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/yt/kqp_yt_file/part10/ydb-tests-fq-yt-kqp_yt_file-part10 |79.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tools/stress_tool/ut/ydb-tools-stress_tool-ut |79.0%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/wardens/ydb-tests-functional-wardens |78.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/backup/impl/ut_table_writer/ydb-core-backup-impl-ut_table_writer |78.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/statistics/database/ut/ydb-core-statistics-database-ut |78.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_move/ydb-core-tx-schemeshard-ut_move |78.6%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/yt/kqp_yt_file/part2/ydb-tests-fq-yt-kqp_yt_file-part2 |78.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_subdomain/ydb-core-tx-schemeshard-ut_subdomain |78.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_rtmr_reboots/ydb-core-tx-schemeshard-ut_rtmr_reboots |78.4%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_data_erasure_reboots/ydb-core-tx-schemeshard-ut_data_erasure_reboots |78.4%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/tools/kqprun/recipe/kqprun_recipe |78.3%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_keys/ydb-core-tx-datashard-ut_keys |78.3%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/mind/ut/ydb-core-mind-ut |77.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/persqueue/ut/ydb-core-persqueue-ut |78.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/ut/perf/ydb-core-kqp-ut-perf |77.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_login/ut_login.cpp |76.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_login/ydb-core-tx-schemeshard-ut_login |76.8%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/datashard/dump_restore/ydb-tests-datashard-dump_restore |76.6%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/query_cache/ydb-tests-functional-query_cache |76.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/http_proxy/ut/ydb-core-http_proxy-ut |75.9%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/multi_plane/ydb-tests-fq-multi_plane |75.9%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/config/ydb-tests-functional-config |75.7%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/sqs/large/ydb-tests-functional-sqs-large >> ydb-tests-functional-minidumps::import_test [GOOD] |75.3%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/kv/tests/ydb-tests-stress-kv-tests |75.3%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/minidumps/import_test >> ydb-tests-functional-minidumps::import_test [GOOD] >> ydb-tests-fq-plans::import_test [GOOD] |74.3%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/compatibility/ydb-tests-compatibility |74.1%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/large_serializable/ydb-tests-functional-large_serializable |74.1%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/fq/plans/import_test >> ydb-tests-fq-plans::import_test [GOOD] |73.5%| [LD] {BAZEL_DOWNLOAD} $(B)/library/recipes/docker_compose/docker_compose |73.3%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/yt/kqp_yt_file/part16/ydb-tests-fq-yt-kqp_yt_file-part16 >> ydb-tests-fq-restarts::import_test [GOOD] |73.1%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/yt/kqp_yt_file/part18/ydb-tests-fq-yt-kqp_yt_file-part18 |73.0%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/fq/restarts/import_test >> ydb-tests-fq-restarts::import_test [GOOD] |71.7%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/datashard/ttl/ydb-tests-datashard-ttl >> ydb-tests-functional-wardens::import_test [GOOD] |71.3%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/core/transfer/ut/functional/ydb-core-transfer-ut-functional |71.3%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/wardens/import_test >> ydb-tests-functional-wardens::import_test [GOOD] |71.0%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/yt/kqp_yt_file/part13/ydb-tests-fq-yt-kqp_yt_file-part13 >> kqprun_recipe::import_test [GOOD] |70.0%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/solomon/ydb-tests-fq-solomon |69.7%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/tools/kqprun/recipe/import_test >> kqprun_recipe::import_test [GOOD] |69.4%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/kqp/kqp_query_session/ydb-tests-functional-kqp-kqp_query_session >> ydb-tests-functional-query_cache::import_test [GOOD] |68.4%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/query_cache/import_test >> ydb-tests-functional-query_cache::import_test [GOOD] >> ydb-tests-fq-multi_plane::import_test [GOOD] |68.2%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/tools/solomon_emulator_grpc/solomon_recipe_grpc |68.2%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/yt/kqp_yt_file/part15/ydb-tests-fq-yt-kqp_yt_file-part15 >> ydb-tests-functional-config::import_test [GOOD] |68.1%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/fq/multi_plane/import_test >> ydb-tests-fq-multi_plane::import_test [GOOD] |68.1%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/config/import_test >> ydb-tests-functional-config::import_test [GOOD] >> ydb-tests-functional-sqs-large::import_test [GOOD] >> ydb-tests-stress-kv-tests::import_test [GOOD] >> ydb-tests-functional-large_serializable::import_test [GOOD] |67.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/olap/docs/generator/libpy3olap-docs-generator.global.a |67.6%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/large/import_test >> ydb-tests-functional-sqs-large::import_test [GOOD] |67.6%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/yt/kqp_yt_file/part17/ydb-tests-fq-yt-kqp_yt_file-part17 >> ydb-tests-compatibility::import_test [GOOD] |67.5%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/stress/kv/tests/import_test >> ydb-tests-stress-kv-tests::import_test [GOOD] |67.5%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/large_serializable/import_test >> ydb-tests-functional-large_serializable::import_test [GOOD] |67.4%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/compatibility/import_test >> ydb-tests-compatibility::import_test [GOOD] |67.1%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/common/ydb-tests-fq-common |66.9%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/olap/s3_import/large/ydb-tests-olap-s3_import-large |66.4%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/yt/kqp_yt_import/ydb-tests-fq-yt-kqp_yt_import |66.4%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/limits/ydb-tests-functional-limits |65.6%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/serializable/ydb-tests-functional-serializable |65.4%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/tenants/ydb-tests-functional-tenants |65.0%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/scheme_shard/ydb-tests-functional-scheme_shard >> ydb-tests-datashard-dump_restore::import_test [GOOD] |64.7%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/datashard/dump_restore/import_test >> ydb-tests-datashard-dump_restore::import_test [GOOD] |64.7%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/tools/nemesis/ut/ydb-tests-tools-nemesis-ut |64.6%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/tests/integration/bulk_upsert/ydb-public-sdk-cpp-tests-integration-bulk_upsert |64.6%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/transfer/tests/ydb-tests-stress-transfer-tests >> ydb-tests-fq-yt-kqp_yt_import::import_test [GOOD] |64.4%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/fq/yt/kqp_yt_import/import_test >> ydb-tests-fq-yt-kqp_yt_import::import_test [GOOD] >> ydb-tests-olap-s3_import-large::import_test [GOOD] |64.1%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/olap/s3_import/large/import_test >> ydb-tests-olap-s3_import-large::import_test [GOOD] |63.9%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/yt/kqp_yt_file/part3/ydb-tests-fq-yt-kqp_yt_file-part3 >> ydb-tests-datashard-ttl::import_test [GOOD] |63.2%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/datashard/ttl/import_test >> ydb-tests-datashard-ttl::import_test [GOOD] |62.9%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/sqs/merge_split_common_table/std/functional-sqs-merge_split_common_table-std |62.7%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/tests/integration/basic_example/public-sdk-cpp-tests-integration-basic_example |62.7%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/tpc/medium/ydb-tests-functional-tpc-medium >> ydb-tests-functional-limits::import_test [GOOD] >> ydb-tests-fq-common::import_test [GOOD] |62.3%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/canonical/ydb-tests-functional-canonical |62.3%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/core/viewer/tests/ydb-core-viewer-tests |62.3%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/limits/import_test >> ydb-tests-functional-limits::import_test [GOOD] |62.3%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/fq/common/import_test >> ydb-tests-fq-common::import_test [GOOD] |61.9%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/olap/docs/generator/generator |61.7%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/tools/pq_read/pq_read >> ydb-tests-functional-serializable::import_test [GOOD] |61.5%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/serializable/import_test >> ydb-tests-functional-serializable::import_test [GOOD] |61.5%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/backup/s3_path_style/ydb-tests-functional-backup-s3_path_style |61.2%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/http_api/ydb-tests-fq-http_api >> ydb-tests-functional-tenants::import_test [GOOD] |61.1%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/tools/kqprun/tests/ydb-tests-tools-kqprun-tests |61.0%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/tenants/import_test >> ydb-tests-functional-tenants::import_test [GOOD] |60.9%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/olap/s3_import/ydb-tests-olap-s3_import |60.9%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/script_execution/ydb-tests-functional-script_execution >> ydb-tests-tools-nemesis-ut::import_test [GOOD] >> ydb-tests-functional-scheme_shard::import_test [GOOD] |60.8%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/rename/ydb-tests-functional-rename |60.8%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/scheme_shard/import_test >> ydb-tests-functional-scheme_shard::import_test [GOOD] |60.8%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/tools/nemesis/ut/import_test >> ydb-tests-tools-nemesis-ut::import_test [GOOD] >> ydb-tests-stress-transfer-tests::import_test [GOOD] |60.3%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/stress/transfer/tests/import_test >> ydb-tests-stress-transfer-tests::import_test [GOOD] |60.3%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/kqp/opt/rbo/kqp_rbo.cpp >> generator::import_test [GOOD] |60.2%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/yt/kqp_yt_file/part9/ydb-tests-fq-yt-kqp_yt_file-part9 |60.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/opt/rbo/kqp_rbo.cpp |60.2%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/olap/docs/generator/import_test >> generator::import_test [GOOD] |60.1%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/yt/kqp_yt_file/part7/ydb-tests-fq-yt-kqp_yt_file-part7 |59.5%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/datashard/copy_table/ydb-tests-datashard-copy_table |59.5%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/kqp/kqp_indexes/ydb-tests-functional-kqp-kqp_indexes >> ydb-tests-tools-kqprun-tests::import_test [GOOD] |59.2%| [LD] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/pire/libpire_udf.so |59.2%| [LD] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/re2/libre2_udf.so |59.3%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/olap/oom/ydb-tests-olap-oom |59.3%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/serverless/ydb-tests-functional-serverless |59.1%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/udfs/common/datetime/libdatetime_udf.so |59.1%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/tools/kqprun/tests/import_test >> ydb-tests-tools-kqprun-tests::import_test [GOOD] >> functional-sqs-merge_split_common_table-std::import_test [GOOD] |58.9%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/olap/load/ydb-tests-olap-load |58.9%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/merge_split_common_table/std/import_test >> functional-sqs-merge_split_common_table-std::import_test [GOOD] >> ydb-tests-functional-tpc-medium::import_test [GOOD] |58.8%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/kqp/opt/rbo/kqp_rbo_rules.cpp |58.8%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/tpc/medium/import_test >> ydb-tests-functional-tpc-medium::import_test [GOOD] |58.7%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/postgresql/ydb-tests-functional-postgresql >> ydb-tests-functional-canonical::import_test [GOOD] |58.4%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/canonical/import_test >> ydb-tests-functional-canonical::import_test [GOOD] |58.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/opt/rbo/kqp_rbo_rules.cpp |58.4%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/kqp/opt/rbo/kqp_rbo_transformer.cpp >> ydb-core-viewer-tests::import_test [GOOD] |58.2%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/olap/data_quotas/ydb-tests-olap-data_quotas |58.2%| [UN] {default-linux-x86_64, relwithdebinfo} $(B)/yql/essentials/tests/common/test_framework/udfs_deps/common-test_framework-udfs_deps.pkg.fake |58.2%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/viewer/tests/import_test >> ydb-core-viewer-tests::import_test [GOOD] |58.2%| [LD] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/streaming/libstreaming_udf.so |58.1%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/yt/kqp_yt_file/part1/ydb-tests-fq-yt-kqp_yt_file-part1 |58.1%| [LD] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/math/libmath_udf.so |58.1%| [LD] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/examples/dicts/libdicts_udf.so |57.9%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/streaming_optimize/ydb-tests-fq-streaming_optimize |58.0%| [AR] {RESULT} $(B)/yt/yt/client/libyt-yt-client.a >> ydb-tests-fq-http_api::import_test [GOOD] |57.7%| [LD] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/logs/dsv/libdsv_udf.so |57.7%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/replication/ydb-tests-functional-replication |57.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/opt/rbo/kqp_rbo_transformer.cpp |57.7%| [LD] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/examples/lists/liblists_udf.so |57.1%| [LD] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/examples/dummylog/libdummylog.so >> ydb-tests-functional-rename::import_test [GOOD] |56.4%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/fq/http_api/import_test >> ydb-tests-fq-http_api::import_test [GOOD] |56.4%| [LD] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/examples/callables/libcallables_udf.so |56.4%| [LD] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/hyperloglog/libhyperloglog_udf.so |56.4%| [LD] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/test/simple/libsimple_udf.so |56.5%| [LD] {RESULT} $(B)/yql/essentials/tools/sql2yql/sql2yql |56.5%| [LD] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/vector/libvector_udf.so |56.5%| [LD] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/examples/structs/libstructs_udf.so >> ydb-tests-functional-script_execution::import_test [GOOD] >> ydb-tests-olap-s3_import::import_test [GOOD] |56.1%| RESOURCE $(sbr:4966407557) |56.1%| [LD] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/examples/type_inspection/libtype_inspection_udf.so |56.1%| [LD] {RESULT} $(B)/ydb/apps/ydb/ydb |56.1%| [LD] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/topfreq/libtopfreq_udf.so |56.1%| [LD] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/datetime2/libdatetime2_udf.so >> ydb-tests-olap-load::import_test [GOOD] |55.6%| [LD] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/set/libset_udf.so |55.6%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/rename/import_test >> ydb-tests-functional-rename::import_test [GOOD] |55.6%| [SB] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/tests/functional/postgresql/psql/psql |55.2%| [LD] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/digest/libdigest_udf.so |55.2%| [LD] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/stat/libstat_udf.so |55.2%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/script_execution/import_test >> ydb-tests-functional-script_execution::import_test [GOOD] |55.2%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/olap/load/import_test >> ydb-tests-olap-load::import_test [GOOD] |55.2%| [UN] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/tests/functional/postgresql/psql/psql |55.2%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/olap/s3_import/import_test >> ydb-tests-olap-s3_import::import_test [GOOD] |55.2%| [LD] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/top/libtop_udf.so |55.2%| COMPACTING CACHE 17.9GiB |55.2%| [LD] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/yson2/libyson2_udf.so |55.2%| [LD] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/histogram/libhistogram_udf.so |55.2%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/sqs/cloud/ydb-tests-functional-sqs-cloud |55.2%| [TS] {RESULT} ydb/tests/functional/large_serializable/import_test |55.2%| [TS] {RESULT} ydb/tests/functional/script_execution/flake8 |55.2%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/kqp/opt/rbo/kqp_operator.cpp |55.2%| [TS] {RESULT} ydb/tests/functional/ydb_cli/flake8 |55.3%| RESOURCE $(sbr:770480022) |55.3%| [TS] {RESULT} ydb/tests/functional/audit/flake8 |55.3%| [TS] {RESULT} ydb/tests/fq/yt/kqp_yt_file/part12/flake8 |55.3%| [TS] {RESULT} ydb/tests/stress/kv/tests/import_test |55.3%| [TS] {RESULT} ydb/tests/functional/canonical/import_test |55.3%| [TS] {RESULT} ydb/tests/functional/minidumps/flake8 |55.3%| [TS] {RESULT} ydb/tests/functional/config/flake8 |55.3%| [TS] {RESULT} ydb/tests/functional/cms/flake8 |55.3%| [TS] {RESULT} ydb/tests/library/ut/flake8 |55.3%| [TS] {RESULT} ydb/tests/functional/rename/flake8 |55.3%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/transfer/transfer |55.3%| [SB] {default-linux-x86_64, relwithdebinfo} $(B)/library/recipes/docker_compose/bin/docker-compose |55.3%| [UN] {default-linux-x86_64, relwithdebinfo} $(B)/library/recipes/docker_compose/bin/docker-compose |55.3%| [LD] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/file/libfile_udf.so |55.3%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/blobstorage/ydb-tests-functional-blobstorage |55.3%| [PK] {default-linux-x86_64, relwithdebinfo} $(B)/library/recipes/docker_compose/bin/{recipes-docker_compose-bin.final.pkg.fake ... library/recipes/docker_compose/bin/docker-compose} |55.3%| [LD] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/string/libstring_udf.so |55.3%| [TS] {RESULT} ydb/tests/functional/large_serializable/flake8 |55.3%| [TS] {RESULT} ydb/tests/tools/kqprun/tests/import_test |55.4%| [TS] {RESULT} ydb/tests/functional/sqs/large/flake8 |55.4%| [TS] {RESULT} ydb/tests/stress/log/tests/flake8 |55.4%| [TS] {RESULT} ydb/tests/fq/streaming_optimize/flake8 |55.4%| [TS] {RESULT} ydb/tests/olap/scenario/flake8 |55.4%| [TS] {RESULT} ydb/tests/stress/olap_workload/tests/import_test |55.4%| [TS] {RESULT} ydb/tests/fq/plans/flake8 |55.4%| [TS] {RESULT} ydb/tests/functional/sqs/messaging/flake8 |55.4%| [TS] {RESULT} ydb/tests/datashard/dump_restore/flake8 |55.4%| [TS] {RESULT} ydb/tests/functional/scheme_tests/flake8 |55.4%| [TS] {RESULT} ydb/tests/functional/autoconfig/import_test |55.4%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/yt/kqp_yt_file/part11/ydb-tests-fq-yt-kqp_yt_file-part11 >> ydb-tests-fq-streaming_optimize::import_test [GOOD] |55.4%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/fq/streaming_optimize/import_test >> ydb-tests-fq-streaming_optimize::import_test [GOOD] |55.4%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/sqs/messaging/ydb-tests-functional-sqs-messaging |55.4%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/supp/ydb_supp |55.4%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/sdk/cpp/sdk_credprovider/ydb-tests-functional-sdk-cpp-sdk_credprovider |55.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/opt/rbo/kqp_operator.cpp |55.4%| [TS] {RESULT} ydb/tests/datashard/secondary_index/flake8 |55.4%| [TS] {RESULT} ydb/tests/functional/scheme_shard/flake8 |55.4%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/olap/scenario/ydb-tests-olap-scenario |55.4%| [TS] {RESULT} ydb/tests/functional/suite_tests/flake8 |55.4%| [TS] {RESULT} ydb/tests/tools/nemesis/ut/import_test |55.5%| [TS] {RESULT} ydb/tests/fq/common/flake8 |55.5%| [TS] {RESULT} ydb/tests/functional/clickbench/flake8 |55.5%| [TS] {RESULT} ydb/tests/functional/serverless/flake8 |55.5%| [TS] {RESULT} ydb/tests/stress/simple_queue/tests/flake8 |55.5%| [TS] {RESULT} ydb/tests/fq/yt/kqp_yt_file/part17/flake8 |55.5%| [TS] {RESULT} ydb/tests/olap/data_quotas/flake8 |55.5%| [TS] {RESULT} ydb/tests/fq/generic/analytics/black |55.5%| [TS] {RESULT} ydb/tests/stress/oltp_workload/tests/import_test |55.5%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/sql/ydb-tests-sql |55.5%| [LD] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/hyperscan/libhyperscan_udf.so |55.5%| [TS] {RESULT} ydb/tests/olap/lib/flake8 |55.5%| [LD] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/unicode_base/libunicode_udf.so |55.5%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/simple_queue/tests/ydb-tests-stress-simple_queue-tests |55.5%| [TS] {RESULT} ydb/tests/fq/common/import_test |55.5%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/kqp/opt/peephole/kqp_opt_peephole.cpp >> ydb-tests-olap-oom::import_test [GOOD] |55.5%| [TS] {RESULT} ydb/tests/stress/mixedpy/flake8 |55.5%| [TS] {RESULT} ydb/tests/functional/sqs/with_quotas/import_test |55.5%| [TS] {RESULT} ydb/tests/fq/yt/kqp_yt_file/part4/flake8 |55.5%| [TS] {RESULT} ydb/tests/functional/query_cache/flake8 |55.5%| [TS] {RESULT} ydb/tests/fq/plans/import_test |55.5%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/suite_tests/ydb-tests-functional-suite_tests |55.6%| [TS] {RESULT} ydb/tests/fq/yt/kqp_yt_file/part15/flake8 |55.6%| [TS] {RESULT} ydb/tests/postgres_integrations/go-libpq/flake8 |55.6%| [TS] {RESULT} ydb/tests/fq/streaming_optimize/import_test >> ydb-tests-functional-serverless::import_test [GOOD] |55.6%| [TS] {RESULT} ydb/tests/datashard/partitioning/import_test |55.6%| [TS] {RESULT} ydb/tests/fq/mem_alloc/flake8 |55.6%| [LD] {BAZEL_DOWNLOAD} $(B)/yql/essentials/tools/astdiff/astdiff |55.6%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/olap/oom/import_test >> ydb-tests-olap-oom::import_test [GOOD] |55.6%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/yt/kqp_yt_file/part19/ydb-tests-fq-yt-kqp_yt_file-part19 |55.6%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/tests/kikimr_tpch/ydb-core-kqp-tests-kikimr_tpch |55.6%| [TS] {RESULT} ydb/tests/datashard/split_merge/import_test |55.6%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/datashard/vector_index/ydb-tests-datashard-vector_index |55.6%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/serverless/import_test >> ydb-tests-functional-serverless::import_test [GOOD] |55.6%| [TS] {RESULT} ydb/tests/olap/s3_import/flake8 |55.6%| [TS] {RESULT} ydb/tests/functional/scheme_tests/import_test |55.6%| [TS] {RESULT} ydb/tests/olap/s3_import/large/flake8 |55.6%| [LD] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/url_base/liburl_udf.so |55.6%| [TS] {RESULT} ydb/tests/fq/yt/kqp_yt_file/part10/flake8 |55.6%| [TS] {RESULT} ydb/tests/example/flake8 |55.6%| [TS] {RESULT} ydb/tests/olap/s3_import/import_test |55.6%| [TS] {RESULT} ydb/tests/fq/yt/kqp_yt_file/part16/flake8 |55.7%| [TS] {RESULT} ydb/tests/functional/postgresql/flake8 |55.7%| [TS] {RESULT} ydb/tests/fq/yt/kqp_yt_file/part3/flake8 |55.7%| [TS] {RESULT} ydb/tests/fq/mem_alloc/import_test |55.7%| [TS] {RESULT} ydb/tests/functional/encryption/import_test |55.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/opt/peephole/kqp_opt_peephole.cpp |55.7%| [TS] {RESULT} ydb/tests/compatibility/flake8 |55.7%| [LD] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/python/python3_small/libpython3_udf.so |55.7%| [TS] {RESULT} ydb/tests/datashard/copy_table/flake8 |55.7%| [TS] {RESULT} ydb/tests/olap/oom/flake8 |55.7%| [LD] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/json2/libjson2_udf.so |55.7%| [TS] {RESULT} ydb/tests/functional/encryption/flake8 |55.7%| [TS] {RESULT} ydb/tests/functional/sqs/cloud/flake8 >> ydb-tests-functional-postgresql::import_test [GOOD] |55.7%| [TS] {RESULT} ydb/tests/functional/sqs/with_quotas/flake8 |55.7%| [TS] {RESULT} ydb/tests/functional/serverless/import_test |55.7%| [TS] {RESULT} ydb/tests/fq/restarts/flake8 |55.7%| [TS] {RESULT} ydb/library/yql/providers/generic/connector/tests/datasource/ms_sql_server/flake8 |55.7%| [TS] {RESULT} ydb/tests/fq/generic/analytics/flake8 |55.7%| [TS] {RESULT} ydb/tests/functional/sqs/merge_split_common_table/fifo/import_test |55.8%| [TS] {RESULT} ydb/tests/datashard/dump_restore/import_test |55.8%| [TS] {RESULT} ydb/tests/olap/oom/import_test |55.8%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/example/ydb-tests-example |55.8%| [TS] {RESULT} ydb/tests/fq/yt/kqp_yt_file/part11/flake8 |55.8%| [TS] {RESULT} ydb/tests/fq/yds/import_test |55.8%| [TS] {RESULT} ydb/tests/stress/transfer/tests/flake8 |55.8%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/postgresql/import_test >> ydb-tests-functional-postgresql::import_test [GOOD] |55.8%| [TS] {RESULT} ydb/tests/fq/yt/kqp_yt_file/part6/flake8 |55.8%| [TS] {RESULT} ydb/tests/fq/s3/flake8 |55.8%| [TS] {RESULT} ydb/tests/fq/yt/kqp_yt_file/part19/flake8 |55.8%| [TS] {RESULT} ydb/tests/fq/generic/streaming/flake8 |55.8%| [TS] {RESULT} ydb/tests/fq/generic/streaming/black |55.8%| [TS] {RESULT} ydb/tests/functional/config/import_test |55.8%| [TS] {RESULT} ydb/tests/fq/yt/kqp_yt_file/part5/flake8 |55.8%| [TS] {RESULT} ydb/tests/library/ut/import_test |55.8%| [LD] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/protobuf/libprotobuf_udf.so |55.8%| [TS] {RESULT} ydb/library/yql/providers/generic/connector/tests/datasource/mysql/flake8 |55.8%| [TS] {RESULT} ydb/tests/stress/oltp_workload/tests/flake8 |55.8%| [TS] {RESULT} ydb/tests/olap/load/import_test |55.8%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/datashard/async_replication/ydb-tests-datashard-async_replication |55.9%| [TS] {RESULT} ydb/tests/datashard/ttl/import_test |55.9%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/s3/ydb-tests-fq-s3 |55.9%| [TS] {RESULT} ydb/tests/functional/script_execution/import_test |55.9%| [TS] {RESULT} ydb/tests/fq/multi_plane/flake8 |55.9%| [TS] {RESULT} ydb/tests/postgres_integrations/go-libpq/import_test |55.9%| [TS] {RESULT} ydb/core/viewer/tests/flake8 |55.9%| [TS] {RESULT} ydb/tests/fq/yt/kqp_yt_file/part1/flake8 |55.9%| [TS] {RESULT} ydb/tests/stress/transfer/tests/import_test |55.9%| [TS] {RESULT} ydb/tests/olap/docs/generator/import_test |55.9%| [TS] {RESULT} ydb/tests/functional/postgresql/import_test |55.9%| [TS] {RESULT} ydb/library/yql/providers/generic/connector/tests/datasource/clickhouse/flake8 |55.9%| [TS] {RESULT} ydb/tests/functional/sqs/common/flake8 |55.9%| [TS] {RESULT} ydb/tests/sql/lib/flake8 |55.9%| [TS] {RESULT} ydb/tests/datashard/async_replication/flake8 |55.9%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/kqp/opt/kqp_opt_build_txs.cpp |55.9%| [TS] {RESULT} ydb/tests/functional/tpc/medium/flake8 |55.9%| [TS] {RESULT} ydb/tests/fq/yt/kqp_yt_file/part18/flake8 |55.9%| [TS] {RESULT} ydb/tests/functional/canonical/flake8 |55.9%| [TS] {RESULT} ydb/tests/functional/tenants/import_test |56.0%| [TS] {RESULT} ydb/tests/datashard/parametrized_queries/flake8 |56.0%| [TS] {RESULT} ydb/tests/fq/yt/kqp_yt_file/part7/flake8 |56.0%| [TS] {RESULT} ydb/tests/datashard/vector_index/flake8 |56.0%| [TS] {RESULT} ydb/tests/tools/nemesis/ut/flake8 |56.0%| [TS] {RESULT} ydb/tests/functional/hive/flake8 |56.0%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/datashard/select/ydb-tests-datashard-select |56.0%| [TS] {RESULT} ydb/tests/functional/serializable/import_test |56.0%| [TS] {RESULT} ydb/tests/functional/sqs/merge_split_common_table/std/flake8 |56.0%| [TS] {RESULT} ydb/tests/functional/rename/import_test |56.0%| [TS] {RESULT} ydb/tests/functional/tpc/medium/import_test |56.0%| [TS] {RESULT} ydb/tests/tools/kqprun/tests/flake8 >> ydb-tests-olap-data_quotas::import_test [GOOD] |56.0%| [TS] {RESULT} ydb/tests/functional/limits/import_test |56.0%| [TS] {RESULT} ydb/tests/olap/flake8 |56.0%| [TS] {RESULT} ydb/tests/functional/wardens/import_test |56.0%| [TS] {RESULT} ydb/tests/functional/restarts/import_test |56.0%| [TS] {RESULT} ydb/tests/olap/s3_import/large/import_test |56.0%| [TS] {RESULT} ydb/tests/stress/kv/tests/flake8 |56.1%| [TS] {RESULT} ydb/tests/functional/wardens/flake8 |56.1%| [TS] {RESULT} ydb/tests/stress/log/tests/import_test |56.1%| [TS] {RESULT} ydb/tests/functional/limits/flake8 |56.1%| [TS] {RESULT} ydb/library/yql/providers/generic/connector/tests/datasource/postgresql/flake8 |56.1%| [PK] {default-linux-x86_64, relwithdebinfo} $(B)/yql/essentials/tests/common/test_framework/udfs_deps/{common-test_framework-udfs_deps.final.pkg.fake ... yql/essentials/udfs/common/hyperscan/libhyperscan_udf.so} |56.1%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/olap/data_quotas/import_test >> ydb-tests-olap-data_quotas::import_test [GOOD] |56.1%| [TS] {RESULT} ydb/tests/functional/ttl/flake8 |56.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/opt/kqp_opt_build_txs.cpp |56.1%| [TS] {RESULT} ydb/tests/stress/olap_workload/tests/flake8 |56.1%| [TS] {RESULT} ydb/tests/functional/blobstorage/flake8 |56.1%| [TS] {RESULT} ydb/tests/functional/tenants/flake8 |56.1%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/api/ydb-tests-functional-api |56.1%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/ttl/ydb-tests-functional-ttl |56.1%| [TS] {RESULT} ydb/tests/functional/sqs/multinode/flake8 |56.1%| [TS] {RESULT} ydb/tests/functional/audit/import_test |56.1%| [TS] {RESULT} ydb/tests/tools/kqprun/recipe/import_test |56.1%| [TS] {RESULT} ydb/tests/fq/yt/kqp_yt_file/part14/flake8 |56.1%| [TS] {RESULT} ydb/tests/fq/http_api/flake8 |56.1%| [TS] {RESULT} ydb/tests/fq/restarts/import_test |56.2%| [TS] {RESULT} ydb/tests/olap/docs/generator/flake8 |56.2%| [TS] {RESULT} ydb/tests/functional/serializable/flake8 |56.2%| [TS] {RESULT} ydb/library/yql/providers/generic/connector/tests/datasource/oracle/flake8 |56.2%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/olap/ydb-tests-olap |56.2%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/tests/integration/sessions_pool/public-sdk-cpp-tests-integration-sessions_pool |56.2%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/cms/ydb-tests-functional-cms |56.2%| [TS] {RESULT} ydb/tests/sql/large/flake8 |56.2%| [TS] {RESULT} ydb/tests/olap/data_quotas/import_test |56.2%| [TS] {RESULT} ydb/tests/functional/sqs/large/import_test |56.2%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/datashard/s3/ydb-tests-datashard-s3 |56.2%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/mixedpy/ydb-tests-stress-mixedpy |56.2%| [TS] {RESULT} ydb/library/yql/providers/generic/connector/tests/datasource/ydb/flake8 |56.2%| [TS] {RESULT} ydb/tests/datashard/dml/flake8 |56.2%| [TS] {RESULT} ydb/tests/functional/sqs/merge_split_common_table/fifo/flake8 |56.2%| [TS] {RESULT} ydb/tests/functional/autoconfig/flake8 |56.2%| [TS] {RESULT} ydb/tests/compatibility/import_test |56.2%| [TS] {RESULT} ydb/tests/functional/scheme_shard/import_test |56.2%| [TS] {RESULT} ydb/tests/functional/hive/import_test |56.2%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/tests/integration/topic/ydb-public-sdk-cpp-tests-integration-topic |56.2%| [TS] {RESULT} ydb/tests/fq/multi_plane/import_test |56.2%| [TS] {RESULT} ydb/tests/fq/yt/kqp_yt_import/import_test |56.2%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/tests/integration/sessions/ydb-public-sdk-cpp-tests-integration-sessions |56.3%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/datashard/secondary_index/ydb-tests-datashard-secondary_index |56.3%| [TS] {RESULT} ydb/tests/fq/solomon/flake8 |56.3%| [TS] {RESULT} ydb/tests/sql/flake8 |56.3%| [TS] {RESULT} ydb/tests/functional/ydb_cli/import_test |56.3%| [TS] {RESULT} ydb/tests/functional/restarts/flake8 |56.3%| [TS] {RESULT} ydb/tests/functional/minidumps/import_test |56.3%| [TS] {RESULT} ydb/tests/olap/ttl_tiering/flake8 |56.3%| [TS] {RESULT} ydb/tests/functional/kqp/plan2svg/import_test |56.3%| [TS] {RESULT} ydb/tests/functional/kqp/plan2svg/flake8 |56.3%| [TS] {RESULT} ydb/tests/tools/pq_read/test/flake8 |56.3%| [TS] {RESULT} ydb/tests/fq/yt/kqp_yt_file/part8/flake8 |56.3%| [TS] {RESULT} ydb/tests/tools/pq_read/test/import_test |56.3%| [TS] {RESULT} ydb/tests/fq/http_api/import_test |56.3%| [TS] {RESULT} ydb/tests/datashard/select/flake8 |56.3%| [TS] {RESULT} ydb/tests/fq/yt/kqp_yt_file/part9/flake8 |56.3%| [TS] {RESULT} ydb/tests/functional/tpc/large/flake8 |56.3%| [TS] {RESULT} ydb/core/viewer/tests/import_test |56.3%| [TS] {RESULT} ydb/tests/fq/yds/flake8 |56.4%| [TS] {RESULT} ydb/tests/fq/yt/kqp_yt_import/flake8 |56.4%| [TS] {RESULT} ydb/tests/fq/yt/kqp_yt_file/part0/flake8 |56.4%| [TS] {RESULT} ydb/tests/datashard/split_merge/flake8 |56.4%| [TS] {RESULT} ydb/tests/fq/yt/kqp_yt_file/part2/flake8 |56.4%| [TS] {RESULT} ydb/tests/datashard/s3/flake8 |56.4%| [TS] {RESULT} ydb/library/yql/providers/generic/connector/tests/join/flake8 |56.4%| [TS] {RESULT} ydb/tests/olap/load/flake8 |56.4%| [TS] {RESULT} ydb/tests/functional/tpc/large/import_test |56.4%| [TS] {RESULT} ydb/tests/functional/clickbench/import_test >> ydb-tests-datashard-copy_table::import_test [GOOD] |56.4%| [TS] {RESULT} ydb/tests/functional/api/flake8 |56.4%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/olap/column_family/compression/ydb-tests-olap-column_family-compression |56.5%| [TS] {RESULT} ydb/tests/datashard/ttl/flake8 |56.5%| [TS] {RESULT} ydb/tests/tools/kqprun/recipe/flake8 |56.5%| [TS] {RESULT} ydb/tests/olap/common/flake8 |56.6%| [TS] {RESULT} ydb/tests/datashard/partitioning/flake8 |56.6%| [TS] {RESULT} ydb/tests/functional/query_cache/import_test |56.6%| [TS] {RESULT} ydb/tests/functional/sqs/merge_split_common_table/std/import_test |56.6%| [TS] {RESULT} ydb/tests/fq/yt/kqp_yt_file/part13/flake8 |56.6%| [TS] {RESULT} ydb/tests/olap/column_family/compression/flake8 |56.8%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/datashard/copy_table/import_test >> ydb-tests-datashard-copy_table::import_test [GOOD] |56.8%| [TS] {RESULT} ydb/tests/datashard/copy_table/import_test |56.8%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/datashard/parametrized_queries/ydb-tests-datashard-parametrized_queries |56.8%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/kqp/opt/peephole/libkqp-opt-peephole.a |56.8%| [AR] {RESULT} $(B)/ydb/core/kqp/opt/peephole/libkqp-opt-peephole.a |56.8%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/datashard/dml/ydb-tests-datashard-dml |56.8%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/kqp/opt/libcore-kqp-opt.a |56.8%| [AR] {RESULT} $(B)/ydb/core/kqp/opt/libcore-kqp-opt.a |56.8%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/kqp/opt/peephole/libkqp-opt-peephole.a |56.8%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/sqs/common/ydb-tests-functional-sqs-common |56.9%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/sqs/multinode/ydb-tests-functional-sqs-multinode |56.9%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/olap/ttl_tiering/ydb-tests-olap-ttl_tiering |56.9%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/sql/large/ydb-tests-sql-large |56.9%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/kqp/opt/libcore-kqp-opt.a >> ydb-tests-functional-sqs-cloud::import_test [GOOD] |56.9%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/cloud/import_test >> ydb-tests-functional-sqs-cloud::import_test [GOOD] |56.9%| [TS] {RESULT} ydb/tests/functional/sqs/cloud/import_test >> ydb-tests-functional-blobstorage::import_test [GOOD] |56.9%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/blobstorage/import_test >> ydb-tests-functional-blobstorage::import_test [GOOD] |56.9%| [TS] {RESULT} ydb/tests/functional/blobstorage/import_test >> ydb-tests-stress-simple_queue-tests::import_test [GOOD] >> ydb-tests-functional-sqs-messaging::import_test [GOOD] >> ydb-tests-functional-suite_tests::import_test [GOOD] |56.9%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/stress/simple_queue/tests/import_test >> ydb-tests-stress-simple_queue-tests::import_test [GOOD] |56.9%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/messaging/import_test >> ydb-tests-functional-sqs-messaging::import_test [GOOD] |56.9%| [TS] {RESULT} ydb/tests/stress/simple_queue/tests/import_test |56.9%| [TS] {RESULT} ydb/tests/functional/sqs/messaging/import_test |56.9%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/suite_tests/import_test >> ydb-tests-functional-suite_tests::import_test [GOOD] |56.9%| [TS] {RESULT} ydb/tests/functional/suite_tests/import_test >> ydb-tests-example::import_test [GOOD] |56.9%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/example/import_test >> ydb-tests-example::import_test [GOOD] |57.0%| [TS] {RESULT} ydb/tests/example/import_test |56.9%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/kqp/host/kqp_runner.cpp |57.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/host/kqp_runner.cpp >> ydb-tests-functional-ttl::import_test [GOOD] |57.0%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/ttl/import_test >> ydb-tests-functional-ttl::import_test [GOOD] |57.0%| [TS] {RESULT} ydb/tests/functional/ttl/import_test >> ydb-tests-functional-api::import_test [GOOD] >> ydb-tests-functional-cms::import_test [GOOD] |57.0%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/kqp/query_compiler/kqp_query_compiler.cpp |57.0%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/api/import_test >> ydb-tests-functional-api::import_test [GOOD] |57.0%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/cms/import_test >> ydb-tests-functional-cms::import_test [GOOD] |57.0%| [TS] {RESULT} ydb/tests/functional/cms/import_test |57.0%| [TS] {RESULT} ydb/tests/functional/api/import_test |57.0%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/kqp/query_compiler/libcore-kqp-query_compiler.a |57.0%| [AR] {RESULT} $(B)/ydb/core/kqp/query_compiler/libcore-kqp-query_compiler.a |57.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/query_compiler/kqp_query_compiler.cpp |57.0%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/kqp/query_compiler/libcore-kqp-query_compiler.a >> ydb-tests-stress-mixedpy::import_test [GOOD] |57.0%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/kqp/opt/rbo/kqp_convert_to_physical.cpp |57.0%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/stress/mixedpy/import_test >> ydb-tests-stress-mixedpy::import_test [GOOD] >> ydb-tests-olap::import_test [GOOD] |57.0%| [TS] {RESULT} ydb/tests/stress/mixedpy/import_test |57.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/opt/rbo/kqp_convert_to_physical.cpp |57.1%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/kqp/opt/rbo/libkqp-opt-rbo.a |57.1%| [AR] {RESULT} $(B)/ydb/core/kqp/opt/rbo/libkqp-opt-rbo.a |57.1%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/kqp/opt/rbo/libkqp-opt-rbo.a |57.1%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/olap/import_test >> ydb-tests-olap::import_test [GOOD] |57.1%| [TS] {RESULT} ydb/tests/olap/import_test >> ydb-tests-olap-column_family-compression::import_test [GOOD] >> ydb-tests-functional-sqs-common::import_test [GOOD] >> ydb-tests-functional-sqs-multinode::import_test [GOOD] >> ydb-tests-olap-scenario::import_test [GOOD] |57.1%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/olap/column_family/compression/import_test >> ydb-tests-olap-column_family-compression::import_test [GOOD] |57.1%| [TS] {RESULT} ydb/tests/olap/column_family/compression/import_test |57.1%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/common/import_test >> ydb-tests-functional-sqs-common::import_test [GOOD] |57.1%| [TS] {RESULT} ydb/tests/functional/sqs/common/import_test |57.1%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/multinode/import_test >> ydb-tests-functional-sqs-multinode::import_test [GOOD] >> ydb-tests-sql::import_test [GOOD] |57.1%| [TS] {RESULT} ydb/tests/functional/sqs/multinode/import_test |57.1%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/olap/scenario/import_test >> ydb-tests-olap-scenario::import_test [GOOD] |57.1%| [TS] {RESULT} ydb/tests/olap/scenario/import_test >> ydb-tests-olap-ttl_tiering::import_test [GOOD] |57.1%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/sql/import_test >> ydb-tests-sql::import_test [GOOD] |57.1%| [TS] {RESULT} ydb/tests/sql/import_test |57.1%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/olap/ttl_tiering/import_test >> ydb-tests-olap-ttl_tiering::import_test [GOOD] |57.2%| [TS] {RESULT} ydb/tests/olap/ttl_tiering/import_test >> ydb-tests-datashard-vector_index::import_test [GOOD] |57.2%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/datashard/vector_index/import_test >> ydb-tests-datashard-vector_index::import_test [GOOD] |57.2%| [TS] {RESULT} ydb/tests/datashard/vector_index/import_test |57.2%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/apps/pgwire/pgwire |57.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/apps/pgwire/pgwire |57.2%| [LD] {RESULT} $(B)/ydb/apps/pgwire/pgwire >> ydb-tests-datashard-async_replication::import_test [GOOD] |57.2%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/datashard/async_replication/import_test >> ydb-tests-datashard-async_replication::import_test [GOOD] |57.2%| [TS] {RESULT} ydb/tests/datashard/async_replication/import_test |57.2%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/kqp/host/kqp_type_ann.cpp >> ydb-tests-fq-s3::import_test [GOOD] |57.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/host/kqp_type_ann.cpp |57.2%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/kqp/host/libcore-kqp-host.a >> ydb-tests-datashard-secondary_index::import_test [GOOD] >> ydb-tests-datashard-select::import_test [GOOD] |57.2%| [AR] {RESULT} $(B)/ydb/core/kqp/host/libcore-kqp-host.a |57.2%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/fq/s3/import_test >> ydb-tests-fq-s3::import_test [GOOD] |57.2%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/datashard/secondary_index/import_test >> ydb-tests-datashard-secondary_index::import_test [GOOD] |57.2%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/datashard/select/import_test >> ydb-tests-datashard-select::import_test [GOOD] >> ydb-tests-datashard-s3::import_test [GOOD] |57.2%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/kqp/host/libcore-kqp-host.a |57.2%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/datashard/s3/import_test >> ydb-tests-datashard-s3::import_test [GOOD] >> ydb-tests-datashard-parametrized_queries::import_test [GOOD] >> ydb-tests-datashard-dml::import_test [GOOD] |57.3%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/datashard/parametrized_queries/import_test >> ydb-tests-datashard-parametrized_queries::import_test [GOOD] >> ydb-tests-sql-large::import_test [GOOD] |57.3%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/datashard/dml/import_test >> ydb-tests-datashard-dml::import_test [GOOD] |57.3%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/sql/large/import_test >> ydb-tests-sql-large::import_test [GOOD] |57.3%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/schemeshard/ut_ru_calculator/ydb-core-tx-schemeshard-ut_ru_calculator |57.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_ru_calculator/ydb-core-tx-schemeshard-ut_ru_calculator |57.3%| [TS] {RESULT} ydb/tests/fq/s3/import_test |57.3%| [TS] {RESULT} ydb/tests/datashard/secondary_index/import_test |57.3%| [TS] {RESULT} ydb/tests/datashard/select/import_test |57.3%| [TS] {RESULT} ydb/tests/datashard/s3/import_test |57.3%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/kqp/gateway/ut/ydb-core-kqp-gateway-ut |57.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/gateway/ut/ydb-core-kqp-gateway-ut |57.3%| [TS] {RESULT} ydb/tests/datashard/parametrized_queries/import_test |57.3%| [TS] {RESULT} ydb/tests/datashard/dml/import_test |57.3%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_ru_calculator/ydb-core-tx-schemeshard-ut_ru_calculator |57.3%| [TS] {RESULT} ydb/tests/sql/large/import_test |57.3%| [LD] {RESULT} $(B)/ydb/core/kqp/gateway/ut/ydb-core-kqp-gateway-ut >> TRUCalculatorTests::TestReadTable [GOOD] >> TRUCalculatorTests::TestBulkUpsert [GOOD] >> MetadataConversion::MakeAuthTest [GOOD] >> MetadataConversion::ConvertingExternalSourceMetadata [GOOD] |57.3%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_ru_calculator/unittest >> TRUCalculatorTests::TestBulkUpsert [GOOD] |57.3%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/gateway/ut/gtest >> MetadataConversion::ConvertingExternalSourceMetadata [GOOD] |57.3%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/scheme_board/ut_monitoring/ydb-core-tx-scheme_board-ut_monitoring |57.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/scheme_board/ut_monitoring/ydb-core-tx-scheme_board-ut_monitoring |57.4%| [TS] {RESULT} ydb/core/tx/schemeshard/ut_ru_calculator/unittest |57.4%| [TS] {RESULT} ydb/core/kqp/gateway/ut/gtest |57.4%| [LD] {RESULT} $(B)/ydb/core/tx/scheme_board/ut_monitoring/ydb-core-tx-scheme_board-ut_monitoring |57.4%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/fq/libs/checkpointing/ut/ydb-core-fq-libs-checkpointing-ut |57.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/fq/libs/checkpointing/ut/ydb-core-fq-libs-checkpointing-ut |57.4%| [LD] {RESULT} $(B)/ydb/core/fq/libs/checkpointing/ut/ydb-core-fq-libs-checkpointing-ut |57.4%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/scheme_board/ut_subscriber/ydb-core-tx-scheme_board-ut_subscriber |57.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/scheme_board/ut_subscriber/ydb-core-tx-scheme_board-ut_subscriber |57.4%| [LD] {RESULT} $(B)/ydb/core/tx/scheme_board/ut_subscriber/ydb-core-tx-scheme_board-ut_subscriber |57.4%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/services/dynamic_config/ut/ydb-services-dynamic_config-ut |57.4%| [LD] {RESULT} $(B)/ydb/services/dynamic_config/ut/ydb-services-dynamic_config-ut |57.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/services/dynamic_config/ut/ydb-services-dynamic_config-ut >> TMonitoringTests::ValidActorId >> TMonitoringTests::InvalidActorId >> TCheckpointCoordinatorTests::ShouldTriggerCheckpointWithSource |57.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/scheme_board/ut_monitoring/unittest |57.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/scheme_board/ut_monitoring/unittest |57.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/scheme_board/ut_monitoring/unittest >> TCheckpointCoordinatorTests::ShouldTriggerCheckpointWithSource [GOOD] >> TCheckpointCoordinatorTests::ShouldTriggerCheckpointWithSourcesAndWithChannel [GOOD] >> TCheckpointCoordinatorTests::ShouldAllSnapshots [GOOD] >> TCheckpointCoordinatorTests::Should2Increments1Snapshot [GOOD] >> TCheckpointCoordinatorTests::ShouldAbortPreviousCheckpointsIfNodeStateCantBeSaved [GOOD] |57.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/scheme_board/ut_monitoring/unittest |57.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/scheme_board/ut_monitoring/unittest |57.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/scheme_board/ut_monitoring/unittest |57.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/scheme_board/ut_monitoring/unittest >> TSubscriberCombinationsTest::CombinationsRootDomain >> TSubscriberTest::StrongNotificationAfterCommit >> TSubscriberTest::NotifyUpdate >> TSubscriberTest::SyncPartial >> TSubscriberTest::ReconnectOnFailure >> TMonitoringTests::InvalidActorId [GOOD] >> TSubscriberTest::NotifyDelete >> TSubscriberTest::Sync >> TSubscriberCombinationsTest::MigratedPathRecreation >> TSubscriberTest::InvalidNotification |57.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/scheme_board/ut_monitoring/unittest >> TSubscriberTest::SyncWithOutdatedReplica >> TMonitoringTests::ValidActorId [GOOD] |57.5%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/services/dynamic_config/ut/unittest |57.5%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/services/dynamic_config/ut/unittest |57.5%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/services/dynamic_config/ut/unittest >> TGRpcConsoleTest::SimpleConfigTest [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/fq/libs/checkpointing/ut/unittest >> TCheckpointCoordinatorTests::ShouldAbortPreviousCheckpointsIfNodeStateCantBeSaved [GOOD] Test command err: 2025-05-29T15:21:27.963097Z node 1 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: checkpoint_coordinator.cpp:72: [my-graph-id.42] TEvReadyState, streaming disposition { }, state load mode FROM_LAST_CHECKPOINT 2025-05-29T15:21:27.963155Z node 1 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: checkpoint_coordinator.cpp:113: [my-graph-id.42] Send TEvRegisterCoordinatorRequest Waiting for TEvRegisterCoordinatorRequest (storage) 2025-05-29T15:21:27.963191Z node 1 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: checkpoint_coordinator.cpp:130: [my-graph-id.42] Got TEvRegisterCoordinatorResponse; issues: 2025-05-29T15:21:27.963196Z node 1 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: checkpoint_coordinator.cpp:139: [my-graph-id.42] Successfully registered in storage 2025-05-29T15:21:27.963200Z node 1 :STREAMS_CHECKPOINT_COORDINATOR INFO: checkpoint_coordinator.cpp:140: [my-graph-id.42] Send TEvNewCheckpointCoordinator to 3 actor(s) 2025-05-29T15:21:27.963214Z node 1 :STREAMS_CHECKPOINT_COORDINATOR INFO: checkpoint_coordinator.cpp:148: [my-graph-id.42] Send TEvGetCheckpointsMetadataRequest; state load mode: FROM_LAST_CHECKPOINT; load graph: 0 Waiting for TEvGetCheckpointsMetadataRequest (storage) 2025-05-29T15:21:27.964025Z node 1 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: checkpoint_coordinator.cpp:185: [my-graph-id.42] Got TEvGetCheckpointsMetadataResponse 2025-05-29T15:21:27.964033Z node 1 :STREAMS_CHECKPOINT_COORDINATOR INFO: checkpoint_coordinator.cpp:211: [my-graph-id.42] Found no checkpoints to restore from, creating a 'zero' checkpoint 2025-05-29T15:21:27.964038Z node 1 :STREAMS_CHECKPOINT_COORDINATOR INFO: checkpoint_coordinator.cpp:348: [my-graph-id.42] [42:1] Registering new checkpoint in storage Waiting for TEvCreateCheckpointRequest (storage) 2025-05-29T15:21:27.964729Z node 1 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: checkpoint_coordinator.cpp:388: [my-graph-id.42] [42:1] Got TEvCreateCheckpointResponse 2025-05-29T15:21:27.964739Z node 1 :STREAMS_CHECKPOINT_COORDINATOR INFO: checkpoint_coordinator.cpp:434: [my-graph-id.42] [42:1] Checkpoint successfully created, going to inject barriers to 1 actor(s) 2025-05-29T15:21:27.964747Z node 1 :STREAMS_CHECKPOINT_COORDINATOR INFO: checkpoint_coordinator.cpp:440: [my-graph-id.42] [42:1] Send TEvRun to all actors Waiting for TEvInjectCheckpointBarrier (ingress) 2025-05-29T15:21:27.964768Z node 1 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: checkpoint_coordinator.cpp:461: [my-graph-id.42] [42:1] Got TEvSaveTaskStateResult; task 0, status: OK, size: 100 2025-05-29T15:21:27.964774Z node 1 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: checkpoint_coordinator.cpp:471: [my-graph-id.42] [42:1] Task state saved, need 2 more acks 2025-05-29T15:21:27.964780Z node 1 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: checkpoint_coordinator.cpp:461: [my-graph-id.42] [42:1] Got TEvSaveTaskStateResult; task 0, status: OK, size: 100 2025-05-29T15:21:27.964785Z node 1 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: checkpoint_coordinator.cpp:471: [my-graph-id.42] [42:1] Task state saved, need 1 more acks 2025-05-29T15:21:27.964790Z node 1 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: checkpoint_coordinator.cpp:461: [my-graph-id.42] [42:1] Got TEvSaveTaskStateResult; task 0, status: OK, size: 100 2025-05-29T15:21:27.964794Z node 1 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: checkpoint_coordinator.cpp:471: [my-graph-id.42] [42:1] Task state saved, need 0 more acks 2025-05-29T15:21:27.964799Z node 1 :STREAMS_CHECKPOINT_COORDINATOR INFO: checkpoint_coordinator.cpp:484: [my-graph-id.42] [42:1] Got all acks, changing checkpoint status to 'PendingCommit' Waiting for TEvSetCheckpointPendingCommitStatusRequest (storage) 2025-05-29T15:21:27.964811Z node 1 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: checkpoint_coordinator.cpp:496: [my-graph-id.42] [42:1] Got TEvSetCheckpointPendingCommitStatusResponse 2025-05-29T15:21:27.964816Z node 1 :STREAMS_CHECKPOINT_COORDINATOR INFO: checkpoint_coordinator.cpp:511: [my-graph-id.42] [42:1] Checkpoint status changed to 'PendingCommit', committing states Waiting for TEvCommitChanges (ingress) Waiting for TEvCommitChanges (egress) 2025-05-29T15:21:27.964840Z node 1 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: checkpoint_coordinator.cpp:527: [my-graph-id.42] [42:1] Got TEvStateCommitted; task: 1 2025-05-29T15:21:27.964847Z node 1 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: checkpoint_coordinator.cpp:536: [my-graph-id.42] [42:1] State committed [1:6:2053], need 1 more acks 2025-05-29T15:21:27.964852Z node 1 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: checkpoint_coordinator.cpp:527: [my-graph-id.42] [42:1] Got TEvStateCommitted; task: 3 2025-05-29T15:21:27.964857Z node 1 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: checkpoint_coordinator.cpp:536: [my-graph-id.42] [42:1] State committed [1:8:2055], need 0 more acks 2025-05-29T15:21:27.964861Z node 1 :STREAMS_CHECKPOINT_COORDINATOR INFO: checkpoint_coordinator.cpp:538: [my-graph-id.42] [42:1] Got all acks, changing checkpoint status to 'Completed' Waiting for TEvCompleteCheckpointRequest (storage) 2025-05-29T15:21:27.964872Z node 1 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: checkpoint_coordinator.cpp:549: [my-graph-id.42] [42:1] Got TEvCompleteCheckpointResponse 2025-05-29T15:21:27.964879Z node 1 :STREAMS_CHECKPOINT_COORDINATOR INFO: checkpoint_coordinator.cpp:564: [my-graph-id.42] [42:1] Checkpoint completed 2025-05-29T15:21:27.976066Z node 2 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: checkpoint_coordinator.cpp:72: [my-graph-id.42] TEvReadyState, streaming disposition { }, state load mode FROM_LAST_CHECKPOINT 2025-05-29T15:21:27.976107Z node 2 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: checkpoint_coordinator.cpp:113: [my-graph-id.42] Send TEvRegisterCoordinatorRequest Waiting for TEvRegisterCoordinatorRequest (storage) 2025-05-29T15:21:27.976133Z node 2 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: checkpoint_coordinator.cpp:130: [my-graph-id.42] Got TEvRegisterCoordinatorResponse; issues: 2025-05-29T15:21:27.976138Z node 2 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: checkpoint_coordinator.cpp:139: [my-graph-id.42] Successfully registered in storage 2025-05-29T15:21:27.976142Z node 2 :STREAMS_CHECKPOINT_COORDINATOR INFO: checkpoint_coordinator.cpp:140: [my-graph-id.42] Send TEvNewCheckpointCoordinator to 3 actor(s) 2025-05-29T15:21:27.976152Z node 2 :STREAMS_CHECKPOINT_COORDINATOR INFO: checkpoint_coordinator.cpp:148: [my-graph-id.42] Send TEvGetCheckpointsMetadataRequest; state load mode: FROM_LAST_CHECKPOINT; load graph: 0 Waiting for TEvGetCheckpointsMetadataRequest (storage) 2025-05-29T15:21:27.976185Z node 2 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: checkpoint_coordinator.cpp:185: [my-graph-id.42] Got TEvGetCheckpointsMetadataResponse 2025-05-29T15:21:27.976190Z node 2 :STREAMS_CHECKPOINT_COORDINATOR INFO: checkpoint_coordinator.cpp:211: [my-graph-id.42] Found no checkpoints to restore from, creating a 'zero' checkpoint 2025-05-29T15:21:27.976195Z node 2 :STREAMS_CHECKPOINT_COORDINATOR INFO: checkpoint_coordinator.cpp:348: [my-graph-id.42] [42:1] Registering new checkpoint in storage Waiting for TEvCreateCheckpointRequest (storage) 2025-05-29T15:21:27.976223Z node 2 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: checkpoint_coordinator.cpp:388: [my-graph-id.42] [42:1] Got TEvCreateCheckpointResponse 2025-05-29T15:21:27.976228Z node 2 :STREAMS_CHECKPOINT_COORDINATOR INFO: checkpoint_coordinator.cpp:434: [my-graph-id.42] [42:1] Checkpoint successfully created, going to inject barriers to 1 actor(s) 2025-05-29T15:21:27.976234Z node 2 :STREAMS_CHECKPOINT_COORDINATOR INFO: checkpoint_coordinator.cpp:440: [my-graph-id.42] [42:1] Send TEvRun to all actors Waiting for TEvInjectCheckpointBarrier (ingress) 2025-05-29T15:21:27.976252Z node 2 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: checkpoint_coordinator.cpp:461: [my-graph-id.42] [42:1] Got TEvSaveTaskStateResult; task 0, status: OK, size: 100 2025-05-29T15:21:27.976258Z node 2 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: checkpoint_coordinator.cpp:471: [my-graph-id.42] [42:1] Task state saved, need 2 more acks 2025-05-29T15:21:27.976265Z node 2 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: checkpoint_coordinator.cpp:461: [my-graph-id.42] [42:1] Got TEvSaveTaskStateResult; task 0, status: OK, size: 100 2025-05-29T15:21:27.976269Z node 2 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: checkpoint_coordinator.cpp:471: [my-graph-id.42] [42:1] Task state saved, need 1 more acks 2025-05-29T15:21:27.976273Z node 2 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: checkpoint_coordinator.cpp:461: [my-graph-id.42] [42:1] Got TEvSaveTaskStateResult; task 0, status: OK, size: 100 2025-05-29T15:21:27.976277Z node 2 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: checkpoint_coordinator.cpp:471: [my-graph-id.42] [42:1] Task state saved, need 0 more acks 2025-05-29T15:21:27.976280Z node 2 :STREAMS_CHECKPOINT_COORDINATOR INFO: checkpoint_coordinator.cpp:484: [my-graph-id.42] [42:1] Got all acks, changing checkpoint status to 'PendingCommit' Waiting for TEvSetCheckpointPendingCommitStatusRequest (storage) 2025-05-29T15:21:27.976290Z node 2 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: checkpoint_coordinator.cpp:496: [my-graph-id.42] [42:1] Got TEvSetCheckpointPendingCommitStatusResponse 2025-05-29T15:21:27.976294Z node 2 :STREAMS_CHECKPOINT_COORDINATOR INFO: checkpoint_coordinator.cpp:511: [my-graph-id.42] [42:1] Checkpoint status changed to 'PendingCommit', committing states Waiting for TEvCommitChanges (ingress) Waiting for TEvCommitChanges (egress) 2025-05-29T15:21:27.976316Z node 2 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: checkpoint_coordinator.cpp:527: [my-graph-id.42] [42:1] Got TEvStateCommitted; task: 1 2025-05-29T15:21:27.976322Z node 2 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: checkpoint_coordinator.cpp:536: [my-graph-id.42] [42:1] State committed [2:6:2053], need 1 more acks 2025-05-29T15:21:27.976327Z node 2 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: checkpoint_coordinator.cpp:527: [my-graph-id.42] [42:1] Got TEvStateCommitted; task: 3 2025-05-29T15:21:27.976332Z node 2 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: checkpoint_coordinator.cpp:536: [my-graph-id.42] [42:1] State committed [2:8:2055], need 0 more acks 2025-05-29T15:21:27.976336Z node 2 :STREAMS_CHECKPOINT_COORDINATOR INFO: checkpoint_coordinator.cpp:538: [my-graph-id.42] [42:1] Got all acks, changing checkpoint status to 'Completed' Waiting for TEvCompleteCheckpointRequest (storage) 2025-05-29T15:21:27.976346Z node 2 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: checkpoint_coordinator.cpp:549: [my-graph-id.42] [42:1] Got TEvCompleteCheckpointResponse 2025-05-29T15:21:27.976351Z node 2 :STREAMS_CHECKPOINT_COORDINATOR INFO: checkpoint_coordinator.cpp:564: [my-graph-id.42] [42:1] Checkpoint completed 2025-05-29T15:21:27.987041Z node 3 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: checkpoint_coordinator.cpp:72: [my-graph-id.42] TEvReadyState, streaming disposition { }, state load mode FROM_LAST_CHECKPOINT 2025-05-29T15:21:27.987082Z node 3 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: checkpoint_coordinator.cpp:113: [my-graph-id.42] Send TEvRegisterCoordinatorRequest Waiting for TEvRegisterCoordinatorRequest (storage) 2025-05-29T15:21:27.987109Z node 3 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: checkpoint_coordinator.cpp:130: [my-graph-id.42] Got TEvRegisterCoordinatorResponse; issues: 2025-05-29T15:21:27.987114Z node 3 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: checkpoint_coordinator.cpp:139: [my-graph-id.42] Successfully registered in storage 2025-05-29T15:21:27.987119Z node 3 :STREAMS_CHECKPOINT_COORDINATOR INFO: checkpoint_coordinator.cpp:140: [my-graph-id.42] Send TEvNewCheckpointCoordinator to 3 actor(s) 2025-05-29T15:21:27.987128Z node 3 :STREAMS_CHECKPOINT_COORDINATOR INFO: checkpoint_coordinator.cpp:148: [my-graph-id.42] Send TEvGetCheckpointsMetadataRequest; state load mode: FROM_LAST_CHECKPOINT; load graph: 0 Waiting for TEvGetCheckpointsMetadataRequest (storage) 2025-05-29T15:21:27.987163Z node 3 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: checkpoint_coordinator.cpp:185: [my-graph-id.42] Got TEvGetCheckpointsMetadataResponse 2025-05-29T15:21:27.987168Z node 3 :STREAMS_CHECKPOINT_COORDINATOR INFO: checkp ... Registering new checkpoint in storage Waiting for TEvCreateCheckpointRequest (storage) 2025-05-29T15:21:27.998065Z node 4 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: checkpoint_coordinator.cpp:388: [my-graph-id.42] [42:3] Got TEvCreateCheckpointResponse 2025-05-29T15:21:27.998067Z node 4 :STREAMS_CHECKPOINT_COORDINATOR INFO: checkpoint_coordinator.cpp:434: [my-graph-id.42] [42:3] Checkpoint successfully created, going to inject barriers to 1 actor(s) Waiting for TEvInjectCheckpointBarrier (ingress) 2025-05-29T15:21:27.998072Z node 4 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: checkpoint_coordinator.cpp:461: [my-graph-id.42] [42:3] Got TEvSaveTaskStateResult; task 0, status: OK, size: 100 2025-05-29T15:21:27.998075Z node 4 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: checkpoint_coordinator.cpp:471: [my-graph-id.42] [42:3] Task state saved, need 2 more acks 2025-05-29T15:21:27.998077Z node 4 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: checkpoint_coordinator.cpp:461: [my-graph-id.42] [42:3] Got TEvSaveTaskStateResult; task 0, status: OK, size: 100 2025-05-29T15:21:27.998079Z node 4 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: checkpoint_coordinator.cpp:471: [my-graph-id.42] [42:3] Task state saved, need 1 more acks 2025-05-29T15:21:27.998082Z node 4 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: checkpoint_coordinator.cpp:461: [my-graph-id.42] [42:3] Got TEvSaveTaskStateResult; task 0, status: OK, size: 100 2025-05-29T15:21:27.998084Z node 4 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: checkpoint_coordinator.cpp:471: [my-graph-id.42] [42:3] Task state saved, need 0 more acks 2025-05-29T15:21:27.998086Z node 4 :STREAMS_CHECKPOINT_COORDINATOR INFO: checkpoint_coordinator.cpp:484: [my-graph-id.42] [42:3] Got all acks, changing checkpoint status to 'PendingCommit' Waiting for TEvSetCheckpointPendingCommitStatusRequest (storage) 2025-05-29T15:21:27.998090Z node 4 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: checkpoint_coordinator.cpp:496: [my-graph-id.42] [42:3] Got TEvSetCheckpointPendingCommitStatusResponse 2025-05-29T15:21:27.998092Z node 4 :STREAMS_CHECKPOINT_COORDINATOR INFO: checkpoint_coordinator.cpp:511: [my-graph-id.42] [42:3] Checkpoint status changed to 'PendingCommit', committing states Waiting for TEvCommitChanges (ingress) Waiting for TEvCommitChanges (egress) 2025-05-29T15:21:27.998099Z node 4 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: checkpoint_coordinator.cpp:527: [my-graph-id.42] [42:3] Got TEvStateCommitted; task: 1 2025-05-29T15:21:27.998102Z node 4 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: checkpoint_coordinator.cpp:536: [my-graph-id.42] [42:3] State committed [4:6:2053], need 1 more acks 2025-05-29T15:21:27.998104Z node 4 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: checkpoint_coordinator.cpp:527: [my-graph-id.42] [42:3] Got TEvStateCommitted; task: 3 2025-05-29T15:21:27.998108Z node 4 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: checkpoint_coordinator.cpp:536: [my-graph-id.42] [42:3] State committed [4:8:2055], need 0 more acks 2025-05-29T15:21:27.998110Z node 4 :STREAMS_CHECKPOINT_COORDINATOR INFO: checkpoint_coordinator.cpp:538: [my-graph-id.42] [42:3] Got all acks, changing checkpoint status to 'Completed' Waiting for TEvCompleteCheckpointRequest (storage) 2025-05-29T15:21:27.998114Z node 4 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: checkpoint_coordinator.cpp:549: [my-graph-id.42] [42:3] Got TEvCompleteCheckpointResponse 2025-05-29T15:21:27.998116Z node 4 :STREAMS_CHECKPOINT_COORDINATOR INFO: checkpoint_coordinator.cpp:564: [my-graph-id.42] [42:3] Checkpoint completed 2025-05-29T15:21:27.998118Z node 4 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: checkpoint_coordinator.cpp:372: [my-graph-id.42] Got TEvScheduleCheckpointing 2025-05-29T15:21:27.998121Z node 4 :STREAMS_CHECKPOINT_COORDINATOR INFO: checkpoint_coordinator.cpp:348: [my-graph-id.42] [42:4] Registering new checkpoint in storage Waiting for TEvCreateCheckpointRequest (storage) 2025-05-29T15:21:27.998125Z node 4 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: checkpoint_coordinator.cpp:388: [my-graph-id.42] [42:4] Got TEvCreateCheckpointResponse 2025-05-29T15:21:27.998127Z node 4 :STREAMS_CHECKPOINT_COORDINATOR INFO: checkpoint_coordinator.cpp:434: [my-graph-id.42] [42:4] Checkpoint successfully created, going to inject barriers to 1 actor(s) Waiting for TEvInjectCheckpointBarrier (ingress) 2025-05-29T15:21:27.998132Z node 4 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: checkpoint_coordinator.cpp:461: [my-graph-id.42] [42:4] Got TEvSaveTaskStateResult; task 0, status: OK, size: 100 2025-05-29T15:21:27.998135Z node 4 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: checkpoint_coordinator.cpp:471: [my-graph-id.42] [42:4] Task state saved, need 2 more acks 2025-05-29T15:21:27.998137Z node 4 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: checkpoint_coordinator.cpp:461: [my-graph-id.42] [42:4] Got TEvSaveTaskStateResult; task 0, status: OK, size: 100 2025-05-29T15:21:27.998139Z node 4 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: checkpoint_coordinator.cpp:471: [my-graph-id.42] [42:4] Task state saved, need 1 more acks 2025-05-29T15:21:27.998142Z node 4 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: checkpoint_coordinator.cpp:461: [my-graph-id.42] [42:4] Got TEvSaveTaskStateResult; task 0, status: OK, size: 100 2025-05-29T15:21:27.998144Z node 4 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: checkpoint_coordinator.cpp:471: [my-graph-id.42] [42:4] Task state saved, need 0 more acks 2025-05-29T15:21:27.998146Z node 4 :STREAMS_CHECKPOINT_COORDINATOR INFO: checkpoint_coordinator.cpp:484: [my-graph-id.42] [42:4] Got all acks, changing checkpoint status to 'PendingCommit' Waiting for TEvSetCheckpointPendingCommitStatusRequest (storage) 2025-05-29T15:21:27.998150Z node 4 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: checkpoint_coordinator.cpp:496: [my-graph-id.42] [42:4] Got TEvSetCheckpointPendingCommitStatusResponse 2025-05-29T15:21:27.998152Z node 4 :STREAMS_CHECKPOINT_COORDINATOR INFO: checkpoint_coordinator.cpp:511: [my-graph-id.42] [42:4] Checkpoint status changed to 'PendingCommit', committing states Waiting for TEvCommitChanges (ingress) Waiting for TEvCommitChanges (egress) 2025-05-29T15:21:27.998159Z node 4 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: checkpoint_coordinator.cpp:527: [my-graph-id.42] [42:4] Got TEvStateCommitted; task: 1 2025-05-29T15:21:27.998162Z node 4 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: checkpoint_coordinator.cpp:536: [my-graph-id.42] [42:4] State committed [4:6:2053], need 1 more acks 2025-05-29T15:21:27.998164Z node 4 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: checkpoint_coordinator.cpp:527: [my-graph-id.42] [42:4] Got TEvStateCommitted; task: 3 2025-05-29T15:21:27.998166Z node 4 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: checkpoint_coordinator.cpp:536: [my-graph-id.42] [42:4] State committed [4:8:2055], need 0 more acks 2025-05-29T15:21:27.998170Z node 4 :STREAMS_CHECKPOINT_COORDINATOR INFO: checkpoint_coordinator.cpp:538: [my-graph-id.42] [42:4] Got all acks, changing checkpoint status to 'Completed' Waiting for TEvCompleteCheckpointRequest (storage) 2025-05-29T15:21:27.998174Z node 4 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: checkpoint_coordinator.cpp:549: [my-graph-id.42] [42:4] Got TEvCompleteCheckpointResponse 2025-05-29T15:21:27.998176Z node 4 :STREAMS_CHECKPOINT_COORDINATOR INFO: checkpoint_coordinator.cpp:564: [my-graph-id.42] [42:4] Checkpoint completed 2025-05-29T15:21:28.006307Z node 5 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: checkpoint_coordinator.cpp:72: [my-graph-id.42] TEvReadyState, streaming disposition { }, state load mode FROM_LAST_CHECKPOINT 2025-05-29T15:21:28.006337Z node 5 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: checkpoint_coordinator.cpp:113: [my-graph-id.42] Send TEvRegisterCoordinatorRequest Waiting for TEvRegisterCoordinatorRequest (storage) 2025-05-29T15:21:28.006356Z node 5 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: checkpoint_coordinator.cpp:130: [my-graph-id.42] Got TEvRegisterCoordinatorResponse; issues: 2025-05-29T15:21:28.006359Z node 5 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: checkpoint_coordinator.cpp:139: [my-graph-id.42] Successfully registered in storage 2025-05-29T15:21:28.006362Z node 5 :STREAMS_CHECKPOINT_COORDINATOR INFO: checkpoint_coordinator.cpp:140: [my-graph-id.42] Send TEvNewCheckpointCoordinator to 3 actor(s) 2025-05-29T15:21:28.006367Z node 5 :STREAMS_CHECKPOINT_COORDINATOR INFO: checkpoint_coordinator.cpp:148: [my-graph-id.42] Send TEvGetCheckpointsMetadataRequest; state load mode: FROM_LAST_CHECKPOINT; load graph: 0 Waiting for TEvGetCheckpointsMetadataRequest (storage) 2025-05-29T15:21:28.006389Z node 5 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: checkpoint_coordinator.cpp:185: [my-graph-id.42] Got TEvGetCheckpointsMetadataResponse 2025-05-29T15:21:28.006392Z node 5 :STREAMS_CHECKPOINT_COORDINATOR INFO: checkpoint_coordinator.cpp:211: [my-graph-id.42] Found no checkpoints to restore from, creating a 'zero' checkpoint 2025-05-29T15:21:28.006395Z node 5 :STREAMS_CHECKPOINT_COORDINATOR INFO: checkpoint_coordinator.cpp:348: [my-graph-id.42] [42:1] Registering new checkpoint in storage Waiting for TEvCreateCheckpointRequest (storage) 2025-05-29T15:21:28.006415Z node 5 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: checkpoint_coordinator.cpp:388: [my-graph-id.42] [42:1] Got TEvCreateCheckpointResponse 2025-05-29T15:21:28.006418Z node 5 :STREAMS_CHECKPOINT_COORDINATOR INFO: checkpoint_coordinator.cpp:434: [my-graph-id.42] [42:1] Checkpoint successfully created, going to inject barriers to 1 actor(s) 2025-05-29T15:21:28.006421Z node 5 :STREAMS_CHECKPOINT_COORDINATOR INFO: checkpoint_coordinator.cpp:440: [my-graph-id.42] [42:1] Send TEvRun to all actors Waiting for TEvInjectCheckpointBarrier (ingress) 2025-05-29T15:21:28.006435Z node 5 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: checkpoint_coordinator.cpp:461: [my-graph-id.42] [42:1] Got TEvSaveTaskStateResult; task 0, status: OK, size: 100 2025-05-29T15:21:28.006438Z node 5 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: checkpoint_coordinator.cpp:471: [my-graph-id.42] [42:1] Task state saved, need 2 more acks 2025-05-29T15:21:28.006441Z node 5 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: checkpoint_coordinator.cpp:461: [my-graph-id.42] [42:1] Got TEvSaveTaskStateResult; task 0, status: STORAGE_ERROR, size: 0 2025-05-29T15:21:28.006443Z node 5 :STREAMS_CHECKPOINT_COORDINATOR ERROR: checkpoint_coordinator.cpp:474: [my-graph-id.42] [42:1] StorageError: can't save node state, aborting checkpoint 2025-05-29T15:21:28.006446Z node 5 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: checkpoint_coordinator.cpp:461: [my-graph-id.42] [42:1] Got TEvSaveTaskStateResult; task 0, status: STORAGE_ERROR, size: 0 2025-05-29T15:21:28.006452Z node 5 :STREAMS_CHECKPOINT_COORDINATOR ERROR: checkpoint_coordinator.cpp:474: [my-graph-id.42] [42:1] StorageError: can't save node state, aborting checkpoint 2025-05-29T15:21:28.006454Z node 5 :STREAMS_CHECKPOINT_COORDINATOR ERROR: checkpoint_coordinator.cpp:479: [my-graph-id.42] [42:1] Got all acks for aborted checkpoint, aborting in storage Waiting for TEvAbortCheckpointRequest (storage) 2025-05-29T15:21:28.006461Z node 5 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: checkpoint_coordinator.cpp:575: [my-graph-id.42] [42:1] Got TEvAbortCheckpointResponse 2025-05-29T15:21:28.006463Z node 5 :STREAMS_CHECKPOINT_COORDINATOR WARN: checkpoint_coordinator.cpp:581: [my-graph-id.42] [42:1] Checkpoint aborted 2025-05-29T15:21:28.006467Z node 5 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: checkpoint_coordinator.cpp:372: [my-graph-id.42] Got TEvScheduleCheckpointing 2025-05-29T15:21:28.006470Z node 5 :STREAMS_CHECKPOINT_COORDINATOR INFO: checkpoint_coordinator.cpp:348: [my-graph-id.42] [42:2] Registering new checkpoint in storage Waiting for TEvCreateCheckpointRequest (storage) 2025-05-29T15:21:28.006474Z node 5 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: checkpoint_coordinator.cpp:388: [my-graph-id.42] [42:2] Got TEvCreateCheckpointResponse 2025-05-29T15:21:28.006477Z node 5 :STREAMS_CHECKPOINT_COORDINATOR INFO: checkpoint_coordinator.cpp:434: [my-graph-id.42] [42:2] Checkpoint successfully created, going to inject barriers to 1 actor(s) Waiting for TEvInjectCheckpointBarrier (ingress) |57.5%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/services/dynamic_config/ut/unittest |57.5%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/services/dynamic_config/ut/unittest |57.5%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/services/dynamic_config/ut/unittest |57.5%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/services/dynamic_config/ut/unittest |57.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/scheme_board/ut_monitoring/unittest >> TMonitoringTests::InvalidActorId [GOOD] |57.5%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/services/dynamic_config/ut/unittest |57.5%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/services/dynamic_config/ut/unittest |57.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/scheme_board/ut_monitoring/unittest >> TMonitoringTests::ValidActorId [GOOD] >> TSubscriberTest::SyncPartial [GOOD] >> TSubscriberTest::Sync [GOOD] >> TSubscriberTest::ReconnectOnFailure [GOOD] >> TSubscriberTest::StrongNotificationAfterCommit [GOOD] >> TSubscriberCombinationsTest::MigratedPathRecreation [GOOD] >> TSubscriberTest::Boot >> TSubscriberTest::NotifyUpdate [GOOD] >> TSubscriberTest::NotifyDelete [GOOD] >> TSubscriberTest::SyncWithOutdatedReplica [GOOD] |57.5%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/services/dynamic_config/ut/unittest >> TGRpcConsoleTest::SimpleConfigTest [GOOD] >> TSubscriberTest::InvalidNotification [GOOD] |57.5%| [TA] $(B)/ydb/core/tx/scheme_board/ut_monitoring/test-results/unittest/{meta.json ... results_accumulator.log} |57.5%| [TA] $(B)/ydb/services/dynamic_config/ut/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/scheme_board/ut_subscriber/unittest >> TSubscriberTest::Sync [GOOD] Test command err: 2025-05-29T15:21:28.083104Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:960: [main][1:35:2066][path] Handle NKikimr::TEvStateStorage::TEvResolveReplicasList 2025-05-29T15:21:28.083374Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:365: [replica][1:39:2066][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path PathId: [OwnerId: 1, LocalPathId: 1] Version: 1 }: sender# [1:3:2050] 2025-05-29T15:21:28.083385Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:365: [replica][1:40:2066][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path PathId: [OwnerId: 1, LocalPathId: 1] Version: 1 }: sender# [1:6:2053] 2025-05-29T15:21:28.083391Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:365: [replica][1:41:2066][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path PathId: [OwnerId: 1, LocalPathId: 1] Version: 1 }: sender# [1:9:2056] 2025-05-29T15:21:28.083398Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:807: [main][1:35:2066][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path PathId: [OwnerId: 1, LocalPathId: 1] Version: 1 }: sender# [1:36:2066] 2025-05-29T15:21:28.083404Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:807: [main][1:35:2066][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path PathId: [OwnerId: 1, LocalPathId: 1] Version: 1 }: sender# [1:37:2066] 2025-05-29T15:21:28.083412Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: subscriber.cpp:836: [main][1:35:2066][path] Set up state: owner# [1:33:2064], state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 1, LocalPathId: 1], Version: 1) DomainId: AbandonedSchemeShards: there are 0 elements } 2025-05-29T15:21:28.083429Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:807: [main][1:35:2066][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path PathId: [OwnerId: 1, LocalPathId: 1] Version: 1 }: sender# [1:38:2066] 2025-05-29T15:21:28.083434Z node 1 :SCHEME_BOARD_SUBSCRIBER INFO: subscriber.cpp:854: [main][1:35:2066][path] Path was already updated: owner# [1:33:2064], state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 1, LocalPathId: 1], Version: 1) DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 1, LocalPathId: 1], Version: 1) DomainId: AbandonedSchemeShards: there are 0 elements } 2025-05-29T15:21:28.083446Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:871: [main][1:35:2066][path] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvSyncRequest: sender# [1:33:2064], cookie# 1 2025-05-29T15:21:28.083456Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:381: [replica][1:39:2066][path] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: path }: sender# [1:36:2066], cookie# 1 2025-05-29T15:21:28.083461Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:381: [replica][1:40:2066][path] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: path }: sender# [1:37:2066], cookie# 1 2025-05-29T15:21:28.083466Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:381: [replica][1:41:2066][path] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: path }: sender# [1:38:2066], cookie# 1 2025-05-29T15:21:28.083472Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:390: [replica][1:39:2066][path] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 1 Partial: 0 }: sender# [1:3:2050], cookie# 1 2025-05-29T15:21:28.083476Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:390: [replica][1:40:2066][path] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 1 Partial: 0 }: sender# [1:6:2053], cookie# 1 2025-05-29T15:21:28.083479Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:390: [replica][1:41:2066][path] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 1 Partial: 0 }: sender# [1:9:2056], cookie# 1 2025-05-29T15:21:28.083485Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:892: [main][1:35:2066][path] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 1 Partial: 0 }: sender# [1:36:2066], cookie# 1 2025-05-29T15:21:28.083490Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:932: [main][1:35:2066][path] Sync is in progress: cookie# 1, size# 3, half# 1, successes# 1, faulires# 0 2025-05-29T15:21:28.083495Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:892: [main][1:35:2066][path] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 1 Partial: 0 }: sender# [1:37:2066], cookie# 1 2025-05-29T15:21:28.083498Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:946: [main][1:35:2066][path] Sync is done: cookie# 1, size# 3, half# 1, successes# 2, faulires# 0, partial# 0 2025-05-29T15:21:28.083503Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:892: [main][1:35:2066][path] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 1 Partial: 0 }: sender# [1:38:2066], cookie# 1 2025-05-29T15:21:28.083506Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:906: [main][1:35:2066][path] Unexpected sync response: sender# [1:38:2066], cookie# 1 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/scheme_board/ut_subscriber/unittest >> TSubscriberTest::SyncPartial [GOOD] Test command err: 2025-05-29T15:21:28.084478Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:960: [main][1:34:2065][path] Handle NKikimr::TEvStateStorage::TEvResolveReplicasList 2025-05-29T15:21:28.084723Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:365: [replica][1:38:2065][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [1:3:2050] 2025-05-29T15:21:28.084734Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:365: [replica][1:39:2065][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [1:6:2053] 2025-05-29T15:21:28.084739Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:365: [replica][1:40:2065][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [1:9:2056] 2025-05-29T15:21:28.084746Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:807: [main][1:34:2065][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [1:35:2065] 2025-05-29T15:21:28.084758Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:807: [main][1:34:2065][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [1:36:2065] 2025-05-29T15:21:28.084764Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: subscriber.cpp:836: [main][1:34:2065][path] Set up state: owner# [1:33:2064], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2025-05-29T15:21:28.084772Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:807: [main][1:34:2065][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [1:37:2065] 2025-05-29T15:21:28.084776Z node 1 :SCHEME_BOARD_SUBSCRIBER INFO: subscriber.cpp:854: [main][1:34:2065][path] Ignore empty state: owner# [1:33:2064], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2025-05-29T15:21:28.084796Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:871: [main][1:34:2065][path] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvSyncRequest: sender# [1:33:2064], cookie# 1 2025-05-29T15:21:28.084811Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:381: [replica][1:38:2065][path] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: path }: sender# [1:35:2065], cookie# 1 2025-05-29T15:21:28.084818Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:381: [replica][1:39:2065][path] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: path }: sender# [1:36:2065], cookie# 1 2025-05-29T15:21:28.084823Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:381: [replica][1:40:2065][path] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: path }: sender# [1:37:2065], cookie# 1 2025-05-29T15:21:28.084829Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:390: [replica][1:39:2065][path] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 0 Partial: 0 }: sender# [1:6:2053], cookie# 1 2025-05-29T15:21:28.084833Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:390: [replica][1:40:2065][path] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 0 Partial: 0 }: sender# [1:9:2056], cookie# 1 2025-05-29T15:21:28.084839Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:892: [main][1:34:2065][path] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 0 Partial: 1 }: sender# [1:35:2065], cookie# 1 2025-05-29T15:21:28.084844Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:932: [main][1:34:2065][path] Sync is in progress: cookie# 1, size# 3, half# 1, successes# 0, faulires# 1 2025-05-29T15:21:28.084847Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:807: [main][1:34:2065][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [1:35:2065] 2025-05-29T15:21:28.084850Z node 1 :SCHEME_BOARD_SUBSCRIBER INFO: subscriber.cpp:854: [main][1:34:2065][path] Ignore empty state: owner# [1:33:2064], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2025-05-29T15:21:28.084855Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:892: [main][1:34:2065][path] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 0 Partial: 0 }: sender# [1:36:2065], cookie# 1 2025-05-29T15:21:28.084858Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:932: [main][1:34:2065][path] Sync is in progress: cookie# 1, size# 3, half# 1, successes# 1, faulires# 1 2025-05-29T15:21:28.084861Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:892: [main][1:34:2065][path] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 0 Partial: 0 }: sender# [1:37:2065], cookie# 1 2025-05-29T15:21:28.084864Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:946: [main][1:34:2065][path] Sync is done: cookie# 1, size# 3, half# 1, successes# 2, faulires# 1, partial# 0 2025-05-29T15:21:28.084873Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:871: [main][1:34:2065][path] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvSyncRequest: sender# [1:33:2064], cookie# 2 2025-05-29T15:21:28.084881Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:892: [main][1:34:2065][path] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 0 Partial: 1 }: sender# [1:35:2065], cookie# 2 2025-05-29T15:21:28.084884Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:932: [main][1:34:2065][path] Sync is in progress: cookie# 2, size# 3, half# 1, successes# 0, faulires# 1 2025-05-29T15:21:28.084887Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:381: [replica][1:39:2065][path] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: path }: sender# [1:36:2065], cookie# 2 2025-05-29T15:21:28.084891Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:381: [replica][1:40:2065][path] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: path }: sender# [1:37:2065], cookie# 2 2025-05-29T15:21:28.084896Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:390: [replica][1:40:2065][path] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 0 Partial: 0 }: sender# [1:9:2056], cookie# 2 2025-05-29T15:21:28.084900Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:892: [main][1:34:2065][path] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 0 Partial: 1 }: sender# [1:36:2065], cookie# 2 2025-05-29T15:21:28.084903Z node 1 :SCHEME_BOARD_SUBSCRIBER WARN: subscriber.cpp:948: [main][1:34:2065][path] Sync is done: cookie# 2, size# 3, half# 1, successes# 0, faulires# 2, partial# 1 2025-05-29T15:21:28.084907Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:807: [main][1:34:2065][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [1:36:2065] 2025-05-29T15:21:28.084910Z node 1 :SCHEME_BOARD_SUBSCRIBER INFO: subscriber.cpp:854: [main][1:34:2065][path] Ignore empty state: owner# [1:33:2064], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2025-05-29T15:21:28.084914Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:892: [main][1:34:2065][path] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 0 Partial: 0 }: sender# [1:37:2065], cookie# 2 2025-05-29T15:21:28.084916Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:906: [main][1:34:2065][path] Unexpected sync response: sender# [1:37:2065], cookie# 2 2025-05-29T15:21:28.084922Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:871: [main][1:34:2065][path] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvSyncRequest: sender# [1:33:2064], cookie# 3 2025-05-29T15:21:28.084929Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:892: [main][1:34:2065][path] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 0 Partial: 1 }: sender# [1:35:2065], cookie# 3 2025-05-29T15:21:28.084933Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:932: [main][1:34:2065][path] Sync is in progress: cookie# 3, size# 3, half# 1, successes# 0, faulires# 1 2025-05-29T15:21:28.084936Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:892: [main][1:34:2065][path] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 0 Partial: 1 }: sender# [1:36:2065], cookie# 3 2025-05-29T15:21:28.084939Z node 1 :SCHEME_BOARD_SUBSCRIBER WARN: subscriber.cpp:948: [main][1:34:2065][path] Sync is done: cookie# 3, size# 3, half# 1, successes# 0, faulires# 2, partial# 1 2025-05-29T15:21:28.084942Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:381: [replica][1:40:2065][path] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: path }: sender# [1:37:2065], cookie# 3 2025-05-29T15:21:28.084948Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:892: [main][1:34:2065][path] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 0 Partial: 1 }: sender# [1:37:2065], cookie# 3 2025-05-29T15:21:28.084950Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:906: [main][1:34:2065][path] Unexpected sync response: sender# [1:37:2065], cookie# 3 2025-05-29T15:21:28.084954Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:807: [main][1:34:2065][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [1:37:2065] 2025-05-29T15:21:28.084957Z node 1 :SCHEME_BOARD_SUBSCRIBER INFO: subscriber.cpp:854: [main][1:34:2065][path] Ignore empty state: owner# [1:33:2064], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/scheme_board/ut_subscriber/unittest >> TSubscriberTest::ReconnectOnFailure [GOOD] Test command err: 2025-05-29T15:21:28.084799Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:960: [main][2:34:2065][path] Handle NKikimr::TEvStateStorage::TEvResolveReplicasList 2025-05-29T15:21:28.085122Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:365: [replica][2:38:2065][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [1:3:2050] 2025-05-29T15:21:28.085132Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:365: [replica][2:39:2065][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [1:6:2053] 2025-05-29T15:21:28.085138Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:365: [replica][2:40:2065][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [1:9:2056] 2025-05-29T15:21:28.085156Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:807: [main][2:34:2065][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [2:35:2065] 2025-05-29T15:21:28.085163Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:807: [main][2:34:2065][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [2:36:2065] 2025-05-29T15:21:28.085168Z node 2 :SCHEME_BOARD_SUBSCRIBER NOTICE: subscriber.cpp:836: [main][2:34:2065][path] Set up state: owner# [2:33:2064], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2025-05-29T15:21:28.085178Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:807: [main][2:34:2065][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [2:37:2065] 2025-05-29T15:21:28.085182Z node 2 :SCHEME_BOARD_SUBSCRIBER INFO: subscriber.cpp:854: [main][2:34:2065][path] Ignore empty state: owner# [2:33:2064], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2025-05-29T15:21:28.085229Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:807: [main][2:34:2065][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [2:35:2065] 2025-05-29T15:21:28.085233Z node 2 :SCHEME_BOARD_SUBSCRIBER INFO: subscriber.cpp:854: [main][2:34:2065][path] Ignore empty state: owner# [2:33:2064], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2025-05-29T15:21:28.085238Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:807: [main][2:34:2065][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [2:36:2065] 2025-05-29T15:21:28.085241Z node 2 :SCHEME_BOARD_SUBSCRIBER INFO: subscriber.cpp:854: [main][2:34:2065][path] Ignore empty state: owner# [2:33:2064], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2025-05-29T15:21:28.085244Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:807: [main][2:34:2065][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [2:37:2065] 2025-05-29T15:21:28.085247Z node 2 :SCHEME_BOARD_SUBSCRIBER INFO: subscriber.cpp:854: [main][2:34:2065][path] Ignore empty state: owner# [2:33:2064], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2025-05-29T15:21:28.095515Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:365: [replica][2:45:2065][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [1:3:2050] 2025-05-29T15:21:28.095549Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:807: [main][2:34:2065][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [2:35:2065] 2025-05-29T15:21:28.095565Z node 2 :SCHEME_BOARD_SUBSCRIBER INFO: subscriber.cpp:854: [main][2:34:2065][path] Ignore empty state: owner# [2:33:2064], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2025-05-29T15:21:28.095588Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:365: [replica][2:46:2065][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [1:6:2053] 2025-05-29T15:21:28.095595Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:365: [replica][2:47:2065][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [1:9:2056] 2025-05-29T15:21:28.095604Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:807: [main][2:34:2065][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [2:36:2065] 2025-05-29T15:21:28.095610Z node 2 :SCHEME_BOARD_SUBSCRIBER INFO: subscriber.cpp:854: [main][2:34:2065][path] Ignore empty state: owner# [2:33:2064], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2025-05-29T15:21:28.095622Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:807: [main][2:34:2065][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [2:37:2065] 2025-05-29T15:21:28.095628Z node 2 :SCHEME_BOARD_SUBSCRIBER INFO: subscriber.cpp:854: [main][2:34:2065][path] Ignore empty state: owner# [2:33:2064], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2025-05-29T15:21:28.095726Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:365: [replica][2:45:2065][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path PathId: [OwnerId: 1, LocalPathId: 1] Version: 1 }: sender# [1:3:2050] 2025-05-29T15:21:28.095737Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:807: [main][2:34:2065][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path PathId: [OwnerId: 1, LocalPathId: 1] Version: 1 }: sender# [2:35:2065] 2025-05-29T15:21:28.095745Z node 2 :SCHEME_BOARD_SUBSCRIBER NOTICE: subscriber.cpp:842: [main][2:34:2065][path] Update to strong state: owner# [2:33:2064], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, new state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 1, LocalPathId: 1], Version: 1) DomainId: AbandonedSchemeShards: there are 0 elements } ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/scheme_board/ut_subscriber/unittest >> TSubscriberTest::NotifyUpdate [GOOD] Test command err: 2025-05-29T15:21:28.090914Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:960: [main][1:34:2065][path] Handle NKikimr::TEvStateStorage::TEvResolveReplicasList 2025-05-29T15:21:28.091239Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:365: [replica][1:38:2065][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [1:3:2050] 2025-05-29T15:21:28.091256Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:365: [replica][1:39:2065][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [1:6:2053] 2025-05-29T15:21:28.091262Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:365: [replica][1:40:2065][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [1:9:2056] 2025-05-29T15:21:28.091272Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:807: [main][1:34:2065][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [1:35:2065] 2025-05-29T15:21:28.091288Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:807: [main][1:34:2065][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [1:36:2065] 2025-05-29T15:21:28.091296Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: subscriber.cpp:836: [main][1:34:2065][path] Set up state: owner# [1:33:2064], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2025-05-29T15:21:28.091305Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:807: [main][1:34:2065][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [1:37:2065] 2025-05-29T15:21:28.091311Z node 1 :SCHEME_BOARD_SUBSCRIBER INFO: subscriber.cpp:854: [main][1:34:2065][path] Ignore empty state: owner# [1:33:2064], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2025-05-29T15:21:28.091401Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:365: [replica][1:38:2065][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path PathId: [OwnerId: 1, LocalPathId: 1] Version: 1 }: sender# [1:3:2050] 2025-05-29T15:21:28.091409Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:807: [main][1:34:2065][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path PathId: [OwnerId: 1, LocalPathId: 1] Version: 1 }: sender# [1:35:2065] 2025-05-29T15:21:28.091415Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: subscriber.cpp:842: [main][1:34:2065][path] Update to strong state: owner# [1:33:2064], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, new state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 1, LocalPathId: 1], Version: 1) DomainId: AbandonedSchemeShards: there are 0 elements } ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/scheme_board/ut_subscriber/unittest >> TSubscriberTest::SyncWithOutdatedReplica [GOOD] Test command err: 2025-05-29T15:21:28.145263Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:960: [main][1:35:2066][path] Handle NKikimr::TEvStateStorage::TEvResolveReplicasList 2025-05-29T15:21:28.145642Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:365: [replica][1:39:2066][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path PathId: [OwnerId: 1, LocalPathId: 2] Version: 2 }: sender# [1:3:2050] 2025-05-29T15:21:28.145660Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:365: [replica][1:40:2066][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path PathId: [OwnerId: 2, LocalPathId: 2] Version: 1 }: sender# [1:6:2053] 2025-05-29T15:21:28.145667Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:365: [replica][1:41:2066][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path PathId: [OwnerId: 2, LocalPathId: 2] Version: 1 }: sender# [1:9:2056] 2025-05-29T15:21:28.145678Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:807: [main][1:35:2066][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path PathId: [OwnerId: 1, LocalPathId: 2] Version: 2 }: sender# [1:36:2066] 2025-05-29T15:21:28.145686Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:807: [main][1:35:2066][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path PathId: [OwnerId: 2, LocalPathId: 2] Version: 1 }: sender# [1:37:2066] 2025-05-29T15:21:28.145697Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: subscriber.cpp:836: [main][1:35:2066][path] Set up state: owner# [1:33:2064], state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 2, LocalPathId: 2], Version: 1) DomainId: [OwnerId: 2, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements } 2025-05-29T15:21:28.145725Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:807: [main][1:35:2066][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path PathId: [OwnerId: 2, LocalPathId: 2] Version: 1 }: sender# [1:38:2066] 2025-05-29T15:21:28.145734Z node 1 :SCHEME_BOARD_SUBSCRIBER INFO: subscriber.cpp:854: [main][1:35:2066][path] Path was already updated: owner# [1:33:2064], state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 2, LocalPathId: 2], Version: 1) DomainId: [OwnerId: 2, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 2, LocalPathId: 2], Version: 1) DomainId: [OwnerId: 2, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements } 2025-05-29T15:21:28.145752Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:871: [main][1:35:2066][path] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvSyncRequest: sender# [1:33:2064], cookie# 1 2025-05-29T15:21:28.145766Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:381: [replica][1:39:2066][path] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: path }: sender# [1:36:2066], cookie# 1 2025-05-29T15:21:28.145774Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:381: [replica][1:40:2066][path] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: path }: sender# [1:37:2066], cookie# 1 2025-05-29T15:21:28.145780Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:381: [replica][1:41:2066][path] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: path }: sender# [1:38:2066], cookie# 1 2025-05-29T15:21:28.145788Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:390: [replica][1:39:2066][path] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:3:2050], cookie# 1 2025-05-29T15:21:28.145793Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:390: [replica][1:40:2066][path] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 1 Partial: 0 }: sender# [1:6:2053], cookie# 1 2025-05-29T15:21:28.145797Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:390: [replica][1:41:2066][path] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 1 Partial: 0 }: sender# [1:9:2056], cookie# 1 2025-05-29T15:21:28.145805Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:892: [main][1:35:2066][path] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:36:2066], cookie# 1 2025-05-29T15:21:28.145813Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:932: [main][1:35:2066][path] Sync is in progress: cookie# 1, size# 3, half# 1, successes# 1, faulires# 0 2025-05-29T15:21:28.145819Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:892: [main][1:35:2066][path] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 1 Partial: 0 }: sender# [1:37:2066], cookie# 1 2025-05-29T15:21:28.145823Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:946: [main][1:35:2066][path] Sync is done: cookie# 1, size# 3, half# 1, successes# 2, faulires# 0, partial# 0 2025-05-29T15:21:28.145829Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:892: [main][1:35:2066][path] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 1 Partial: 0 }: sender# [1:38:2066], cookie# 1 2025-05-29T15:21:28.145833Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:906: [main][1:35:2066][path] Unexpected sync response: sender# [1:38:2066], cookie# 1 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/scheme_board/ut_subscriber/unittest >> TSubscriberTest::StrongNotificationAfterCommit [GOOD] Test command err: 2025-05-29T15:21:28.113221Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:960: [main][1:34:2065][path] Handle NKikimr::TEvStateStorage::TEvResolveReplicasList 2025-05-29T15:21:28.113494Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:365: [replica][1:38:2065][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [1:3:2050] 2025-05-29T15:21:28.113505Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:365: [replica][1:39:2065][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [1:6:2053] 2025-05-29T15:21:28.113510Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:365: [replica][1:40:2065][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [1:9:2056] 2025-05-29T15:21:28.113517Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:807: [main][1:34:2065][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [1:35:2065] 2025-05-29T15:21:28.113530Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:807: [main][1:34:2065][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [1:36:2065] 2025-05-29T15:21:28.113537Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: subscriber.cpp:836: [main][1:34:2065][path] Set up state: owner# [1:33:2064], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2025-05-29T15:21:28.113544Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:807: [main][1:34:2065][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [1:37:2065] 2025-05-29T15:21:28.113549Z node 1 :SCHEME_BOARD_SUBSCRIBER INFO: subscriber.cpp:854: [main][1:34:2065][path] Ignore empty state: owner# [1:33:2064], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2025-05-29T15:21:28.113599Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:365: [replica][1:38:2065][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [1:3:2050] 2025-05-29T15:21:28.113605Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:807: [main][1:34:2065][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [1:35:2065] 2025-05-29T15:21:28.113609Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: subscriber.cpp:842: [main][1:34:2065][path] Update to strong state: owner# [1:33:2064], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, new state# { Deleted: 1 Strong: 1 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2025-05-29T15:21:28.113622Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:365: [replica][1:39:2065][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [1:6:2053] 2025-05-29T15:21:28.113628Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:807: [main][1:34:2065][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [1:36:2065] 2025-05-29T15:21:28.113631Z node 1 :SCHEME_BOARD_SUBSCRIBER INFO: subscriber.cpp:854: [main][1:34:2065][path] Ignore empty state: owner# [1:33:2064], state# { Deleted: 1 Strong: 1 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 1 Version: DomainId: AbandonedSchemeShards: there are 0 elements } ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/scheme_board/ut_subscriber/unittest >> TSubscriberTest::NotifyDelete [GOOD] Test command err: 2025-05-29T15:21:28.150771Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:960: [main][1:35:2066][path] Handle NKikimr::TEvStateStorage::TEvResolveReplicasList 2025-05-29T15:21:28.151043Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:365: [replica][1:39:2066][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path PathId: [OwnerId: 1, LocalPathId: 1] Version: 1 }: sender# [1:3:2050] 2025-05-29T15:21:28.151054Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:365: [replica][1:40:2066][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path PathId: [OwnerId: 1, LocalPathId: 1] Version: 1 }: sender# [1:6:2053] 2025-05-29T15:21:28.151059Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:365: [replica][1:41:2066][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path PathId: [OwnerId: 1, LocalPathId: 1] Version: 1 }: sender# [1:9:2056] 2025-05-29T15:21:28.151066Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:807: [main][1:35:2066][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path PathId: [OwnerId: 1, LocalPathId: 1] Version: 1 }: sender# [1:36:2066] 2025-05-29T15:21:28.151072Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:807: [main][1:35:2066][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path PathId: [OwnerId: 1, LocalPathId: 1] Version: 1 }: sender# [1:37:2066] 2025-05-29T15:21:28.151079Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: subscriber.cpp:836: [main][1:35:2066][path] Set up state: owner# [1:33:2064], state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 1, LocalPathId: 1], Version: 1) DomainId: AbandonedSchemeShards: there are 0 elements } 2025-05-29T15:21:28.151096Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:807: [main][1:35:2066][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path PathId: [OwnerId: 1, LocalPathId: 1] Version: 1 }: sender# [1:38:2066] 2025-05-29T15:21:28.151101Z node 1 :SCHEME_BOARD_SUBSCRIBER INFO: subscriber.cpp:854: [main][1:35:2066][path] Path was already updated: owner# [1:33:2064], state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 1, LocalPathId: 1], Version: 1) DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 1, LocalPathId: 1], Version: 1) DomainId: AbandonedSchemeShards: there are 0 elements } 2025-05-29T15:21:28.151166Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:365: [replica][1:39:2066][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path PathId: [OwnerId: 1, LocalPathId: 1] Version: 18446744073709551615 }: sender# [1:3:2050] 2025-05-29T15:21:28.151173Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:365: [replica][1:40:2066][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path PathId: [OwnerId: 1, LocalPathId: 1] Version: 18446744073709551615 }: sender# [1:6:2053] 2025-05-29T15:21:28.151182Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:365: [replica][1:41:2066][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path PathId: [OwnerId: 1, LocalPathId: 1] Version: 18446744073709551615 }: sender# [1:9:2056] 2025-05-29T15:21:28.151190Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:807: [main][1:35:2066][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path PathId: [OwnerId: 1, LocalPathId: 1] Version: 18446744073709551615 }: sender# [1:36:2066] 2025-05-29T15:21:28.151195Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: subscriber.cpp:842: [main][1:35:2066][path] Path was updated to new version: owner# [1:33:2064], state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 1, LocalPathId: 1], Version: 1) DomainId: AbandonedSchemeShards: there are 0 elements }, new state# { Deleted: 1 Strong: 1 Version: (PathId: [OwnerId: 1, LocalPathId: 1], Version: 18446744073709551615) DomainId: AbandonedSchemeShards: there are 0 elements } 2025-05-29T15:21:28.151199Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:807: [main][1:35:2066][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path PathId: [OwnerId: 1, LocalPathId: 1] Version: 18446744073709551615 }: sender# [1:37:2066] 2025-05-29T15:21:28.151205Z node 1 :SCHEME_BOARD_SUBSCRIBER INFO: subscriber.cpp:854: [main][1:35:2066][path] Path was already updated: owner# [1:33:2064], state# { Deleted: 1 Strong: 1 Version: (PathId: [OwnerId: 1, LocalPathId: 1], Version: 18446744073709551615) DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 1 Version: (PathId: [OwnerId: 1, LocalPathId: 1], Version: 18446744073709551615) DomainId: AbandonedSchemeShards: there are 0 elements } 2025-05-29T15:21:28.151209Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:807: [main][1:35:2066][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path PathId: [OwnerId: 1, LocalPathId: 1] Version: 18446744073709551615 }: sender# [1:38:2066] 2025-05-29T15:21:28.151213Z node 1 :SCHEME_BOARD_SUBSCRIBER INFO: subscriber.cpp:854: [main][1:35:2066][path] Path was already updated: owner# [1:33:2064], state# { Deleted: 1 Strong: 1 Version: (PathId: [OwnerId: 1, LocalPathId: 1], Version: 18446744073709551615) DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 1 Version: (PathId: [OwnerId: 1, LocalPathId: 1], Version: 18446744073709551615) DomainId: AbandonedSchemeShards: there are 0 elements } >> TSubscriberTest::Boot [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/scheme_board/ut_subscriber/unittest >> TSubscriberTest::InvalidNotification [GOOD] Test command err: 2025-05-29T15:21:28.199114Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:960: [main][1:34:2065][path] Handle NKikimr::TEvStateStorage::TEvResolveReplicasList 2025-05-29T15:21:28.199412Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:365: [replica][1:38:2065][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [1:3:2050] 2025-05-29T15:21:28.199424Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:365: [replica][1:39:2065][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [1:6:2053] 2025-05-29T15:21:28.199430Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:365: [replica][1:40:2065][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [1:9:2056] 2025-05-29T15:21:28.199437Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:807: [main][1:34:2065][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [1:35:2065] 2025-05-29T15:21:28.199450Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:807: [main][1:34:2065][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [1:36:2065] 2025-05-29T15:21:28.199456Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: subscriber.cpp:836: [main][1:34:2065][path] Set up state: owner# [1:33:2064], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2025-05-29T15:21:28.199463Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:807: [main][1:34:2065][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [1:37:2065] 2025-05-29T15:21:28.199469Z node 1 :SCHEME_BOARD_SUBSCRIBER INFO: subscriber.cpp:854: [main][1:34:2065][path] Ignore empty state: owner# [1:33:2064], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2025-05-29T15:21:28.199498Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:807: [main][1:34:2065][path] Handle NKikimrSchemeBoard.TEvNotify { PathId: [OwnerId: 1, LocalPathId: 1] Version: 0 }: sender# [1:33:2064] 2025-05-29T15:21:28.199502Z node 1 :SCHEME_BOARD_SUBSCRIBER ERROR: subscriber.cpp:811: [main][1:34:2065][path] Suspicious NKikimrSchemeBoard.TEvNotify { PathId: [OwnerId: 1, LocalPathId: 1] Version: 0 }: sender# [1:33:2064] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/scheme_board/ut_subscriber/unittest >> TSubscriberTest::Boot [GOOD] Test command err: 2025-05-29T15:21:28.115793Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:751: [1:3:2050] Handle NKikimrSchemeBoard.TEvHandshake { Owner: 800 Generation: 1 }: sender# [1:34:2065] 2025-05-29T15:21:28.115810Z node 1 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:769: [1:3:2050] Successful handshake: owner# 800, generation# 1 2025-05-29T15:21:28.115837Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:958: [1:3:2050] Handle NKikimrSchemeBoard.TEvCommitGeneration { Owner: 800 Generation: 1 }: sender# [1:34:2065] 2025-05-29T15:21:28.115842Z node 1 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:985: [1:3:2050] Commit generation: owner# 800, generation# 1 2025-05-29T15:21:28.115849Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:751: [1:6:2053] Handle NKikimrSchemeBoard.TEvHandshake { Owner: 900 Generation: 1 }: sender# [1:35:2066] 2025-05-29T15:21:28.115853Z node 1 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:769: [1:6:2053] Successful handshake: owner# 900, generation# 1 2025-05-29T15:21:28.115881Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:958: [1:6:2053] Handle NKikimrSchemeBoard.TEvCommitGeneration { Owner: 900 Generation: 1 }: sender# [1:35:2066] 2025-05-29T15:21:28.115884Z node 1 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:985: [1:6:2053] Commit generation: owner# 900, generation# 1 2025-05-29T15:21:28.115894Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:960: [main][1:37:2068][/root/db/dir_inside] Handle NKikimr::TEvStateStorage::TEvResolveReplicasList 2025-05-29T15:21:28.115961Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1061: [1:3:2050] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /root/db/dir_inside DomainOwnerId: 1 }: sender# [1:41:2068] 2025-05-29T15:21:28.115968Z node 1 :SCHEME_BOARD_REPLICA INFO: replica.cpp:520: [1:3:2050] Upsert description: path# /root/db/dir_inside 2025-05-29T15:21:28.115994Z node 1 :SCHEME_BOARD_REPLICA INFO: replica.cpp:646: [1:3:2050] Subscribe: subscriber# [1:41:2068], path# /root/db/dir_inside, domainOwnerId# 1, capabilities# AckNotifications: true 2025-05-29T15:21:28.116020Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1061: [1:6:2053] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /root/db/dir_inside DomainOwnerId: 1 }: sender# [1:42:2068] 2025-05-29T15:21:28.116024Z node 1 :SCHEME_BOARD_REPLICA INFO: replica.cpp:520: [1:6:2053] Upsert description: path# /root/db/dir_inside 2025-05-29T15:21:28.116028Z node 1 :SCHEME_BOARD_REPLICA INFO: replica.cpp:646: [1:6:2053] Subscribe: subscriber# [1:42:2068], path# /root/db/dir_inside, domainOwnerId# 1, capabilities# AckNotifications: true 2025-05-29T15:21:28.116037Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1061: [1:9:2056] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /root/db/dir_inside DomainOwnerId: 1 }: sender# [1:43:2068] 2025-05-29T15:21:28.116040Z node 1 :SCHEME_BOARD_REPLICA INFO: replica.cpp:520: [1:9:2056] Upsert description: path# /root/db/dir_inside 2025-05-29T15:21:28.116045Z node 1 :SCHEME_BOARD_REPLICA INFO: replica.cpp:646: [1:9:2056] Subscribe: subscriber# [1:43:2068], path# /root/db/dir_inside, domainOwnerId# 1, capabilities# AckNotifications: true 2025-05-29T15:21:28.116056Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:365: [replica][1:41:2068][/root/db/dir_inside] Handle NKikimrSchemeBoard.TEvNotify { Path: /root/db/dir_inside Version: 0 }: sender# [1:3:2050] 2025-05-29T15:21:28.116063Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1089: [1:3:2050] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 0 }: sender# [1:41:2068] 2025-05-29T15:21:28.116070Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:365: [replica][1:42:2068][/root/db/dir_inside] Handle NKikimrSchemeBoard.TEvNotify { Path: /root/db/dir_inside Version: 0 }: sender# [1:6:2053] 2025-05-29T15:21:28.116075Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1089: [1:6:2053] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 0 }: sender# [1:42:2068] 2025-05-29T15:21:28.116080Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:365: [replica][1:43:2068][/root/db/dir_inside] Handle NKikimrSchemeBoard.TEvNotify { Path: /root/db/dir_inside Version: 0 }: sender# [1:9:2056] 2025-05-29T15:21:28.116087Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1089: [1:9:2056] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 0 }: sender# [1:43:2068] 2025-05-29T15:21:28.116097Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:807: [main][1:37:2068][/root/db/dir_inside] Handle NKikimrSchemeBoard.TEvNotify { Path: /root/db/dir_inside Version: 0 }: sender# [1:38:2068] 2025-05-29T15:21:28.116115Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:807: [main][1:37:2068][/root/db/dir_inside] Handle NKikimrSchemeBoard.TEvNotify { Path: /root/db/dir_inside Version: 0 }: sender# [1:39:2068] 2025-05-29T15:21:28.116124Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: subscriber.cpp:836: [main][1:37:2068][/root/db/dir_inside] Set up state: owner# [1:36:2067], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2025-05-29T15:21:28.116132Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:807: [main][1:37:2068][/root/db/dir_inside] Handle NKikimrSchemeBoard.TEvNotify { Path: /root/db/dir_inside Version: 0 }: sender# [1:40:2068] 2025-05-29T15:21:28.116140Z node 1 :SCHEME_BOARD_SUBSCRIBER INFO: subscriber.cpp:854: [main][1:37:2068][/root/db/dir_inside] Ignore empty state: owner# [1:36:2067], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } =========== !argsLeft.IsDeletion 2025-05-29T15:21:28.116195Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:782: [1:3:2050] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 800 Generation: 1 }: sender# [1:34:2065], cookie# 0, event size# 118 2025-05-29T15:21:28.116203Z node 1 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:822: [1:3:2050] Update description: path# /root/db/dir_inside, pathId# [OwnerId: 800, LocalPathId: 1111], deletion# false 2025-05-29T15:21:28.117201Z node 1 :SCHEME_BOARD_REPLICA INFO: replica.cpp:550: [1:3:2050] Upsert description: path# /root/db/dir_inside, pathId# [OwnerId: 800, LocalPathId: 1111], pathDescription# {Status StatusSuccess, Path /root/db/dir_inside, PathId [OwnerId: 800, LocalPathId: 1111], PathVersion 1, SubdomainPathId [OwnerId: 800, LocalPathId: 1], PathAbandonedTenantsSchemeShards size 0, DescribeSchemeResultSerialized size 67} 2025-05-29T15:21:28.117258Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:365: [replica][1:41:2068][/root/db/dir_inside] Handle NKikimrSchemeBoard.TEvNotify { Path: /root/db/dir_inside PathId: [OwnerId: 800, LocalPathId: 1111] Version: 1 }: sender# [1:3:2050] 2025-05-29T15:21:28.117267Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1089: [1:3:2050] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 1 }: sender# [1:41:2068] 2025-05-29T15:21:28.117276Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:807: [main][1:37:2068][/root/db/dir_inside] Handle NKikimrSchemeBoard.TEvNotify { Path: /root/db/dir_inside PathId: [OwnerId: 800, LocalPathId: 1111] Version: 1 }: sender# [1:38:2068] 2025-05-29T15:21:28.117286Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: subscriber.cpp:842: [main][1:37:2068][/root/db/dir_inside] Update to strong state: owner# [1:36:2067], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, new state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 800, LocalPathId: 1111], Version: 1) DomainId: [OwnerId: 800, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements } =========== argsLeft.GetSuperId() < argsRight.GetSuperId() =========== !argsRight.IsDeletion 2025-05-29T15:21:28.117325Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:782: [1:6:2053] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 900 Generation: 1 }: sender# [1:35:2066], cookie# 0, event size# 117 2025-05-29T15:21:28.117330Z node 1 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:822: [1:6:2053] Update description: path# /root/db/dir_inside, pathId# [OwnerId: 900, LocalPathId: 11], deletion# false 2025-05-29T15:21:28.117338Z node 1 :SCHEME_BOARD_REPLICA INFO: replica.cpp:550: [1:6:2053] Upsert description: path# /root/db/dir_inside, pathId# [OwnerId: 900, LocalPathId: 11], pathDescription# {Status StatusSuccess, Path /root/db/dir_inside, PathId [OwnerId: 900, LocalPathId: 11], PathVersion 1, SubdomainPathId [OwnerId: 800, LocalPathId: 1], PathAbandonedTenantsSchemeShards size 0, DescribeSchemeResultSerialized size 67} 2025-05-29T15:21:28.117352Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:365: [replica][1:42:2068][/root/db/dir_inside] Handle NKikimrSchemeBoard.TEvNotify { Path: /root/db/dir_inside PathId: [OwnerId: 900, LocalPathId: 11] Version: 1 }: sender# [1:6:2053] 2025-05-29T15:21:28.117358Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1089: [1:6:2053] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 1 }: sender# [1:42:2068] 2025-05-29T15:21:28.117366Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:807: [main][1:37:2068][/root/db/dir_inside] Handle NKikimrSchemeBoard.TEvNotify { Path: /root/db/dir_inside PathId: [OwnerId: 900, LocalPathId: 11] Version: 1 }: sender# [1:39:2068] 2025-05-29T15:21:28.117373Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: subscriber.cpp:842: [main][1:37:2068][/root/db/dir_inside] Path was updated to new version: owner# [1:36:2067], state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 800, LocalPathId: 1111], Version: 1) DomainId: [OwnerId: 800, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements }, new state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 900, LocalPathId: 11], Version: 1) DomainId: [OwnerId: 800, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements } 2025-05-29T15:21:28.522321Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:960: [main][3:34:2065][path] Handle NKikimr::TEvStateStorage::TEvResolveReplicasList 2025-05-29T15:21:28.522412Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:365: [replica][3:38:2065][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [3:3:2050] 2025-05-29T15:21:28.522421Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:365: [replica][3:39:2065][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [3:6:2053] 2025-05-29T15:21:28.522426Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:365: [replica][3:40:2065][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [3:9:2056] 2025-05-29T15:21:28.522434Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:807: [main][3:34:2065][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [3:35:2065] 2025-05-29T15:21:28.522445Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:807: [main][3:34:2065][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [3:36:2065] 2025-05-29T15:21:28.522451Z node 3 :SCHEME_BOARD_SUBSCRIBER NOTICE: subscriber.cpp:836: [main][3:34:2065][path] Set up state: owner# [3:33:2064], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2025-05-29T15:21:28.522458Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:807: [main][3:34:2065][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [3:37:2065] 2025-05-29T15:21:28.522463Z node 3 :SCHEME_BOARD_SUBSCRIBER INFO: subscriber.cpp:854: [main][3:34:2065][path] Ignore empty state: owner# [3:33:2064], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } |57.6%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/kqp/ut/arrow/ydb-core-kqp-ut-arrow |57.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/ut/arrow/ydb-core-kqp-ut-arrow |57.6%| [TM] {RESULT} ydb/core/fq/libs/checkpointing/ut/unittest |57.6%| [TA] {RESULT} $(B)/ydb/core/tx/scheme_board/ut_monitoring/test-results/unittest/{meta.json ... results_accumulator.log} |57.6%| [TA] {RESULT} $(B)/ydb/services/dynamic_config/ut/test-results/unittest/{meta.json ... results_accumulator.log} |57.6%| [LD] {RESULT} $(B)/ydb/core/kqp/ut/arrow/ydb-core-kqp-ut-arrow |57.6%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/kqp/ut/olap/ydb-core-kqp-ut-olap |57.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/ut/olap/ydb-core-kqp-ut-olap |57.6%| [LD] {RESULT} $(B)/ydb/core/kqp/ut/olap/ydb-core-kqp-ut-olap |57.6%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/viewer/ut/ydb-core-viewer-ut |57.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/viewer/ut/ydb-core-viewer-ut |57.6%| [LD] {RESULT} $(B)/ydb/core/viewer/ut/ydb-core-viewer-ut |57.6%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/coordinator/ut/ydb-core-tx-coordinator-ut |57.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/coordinator/ut/ydb-core-tx-coordinator-ut |57.7%| [LD] {RESULT} $(B)/ydb/core/tx/coordinator/ut/ydb-core-tx-coordinator-ut |57.7%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/security/ldap_auth_provider/ut/ydb-core-security-ldap_auth_provider-ut |57.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/security/ldap_auth_provider/ut/ydb-core-security-ldap_auth_provider-ut |57.7%| [LD] {RESULT} $(B)/ydb/core/security/ldap_auth_provider/ut/ydb-core-security-ldap_auth_provider-ut >> KqpScanArrowInChanels::AllTypesColumns >> KqpScanArrowInChanels::AggregateNoColumn >> KqpScanArrowFormat::AllTypesColumns >> KqpOlapAggregations::Aggregation_Some_NullGroupBy >> KqpScanArrowFormat::SingleKey >> KqpScanArrowFormat::AggregateCountStar >> KqpOlapWrite::TestRemoveTableBeforeIndexation >> KqpOlap::PredicatePushdown_Datetime_QS >> KqpOlapStats::AddRowsTableStandalone >> KqpOlapOptimizer::SpecialSliceToOneLayer >> KqpOlapAggregations::JsonDoc_Exists >> KqpOlapAggregations::AggregationCountPushdown >> KqpOlapAggregations::Json_GetValue_ToInt >> KqpOlapSysView::StatsSysViewRanges >> KqpOlap::OlapRead_UsesScanOnJoin >> KqpOlapJson::RestoreJsonArrayVariants >> KqpOlapJson::EmptyVariants >> KqpOlapAggregations::Aggregation_NoPushdownOnDisabledEmitAggApply >> KqpOlapLocks::TwoQueriesWithRestartTablet >> KqpOlap::SimpleQueryOlapStats >> KqpOlapAggregations::Json_GetValue_ToString >> KqpOlapJson::BrokenJsonWriting >> KqpOlapAggregations::Aggregation_Some_Null >> KqpOlap::PredicatePushdown_DifferentLvlOfFilters >> KqpOlapDelete::DeleteWithDiffrentTypesPKColumns-isStream >> KqpOlapIndexes::TablesInStore >> KqpOlapAggregations::BlockGenericWithDistinct >> KqpOlap::PredicatePushdown >> KqpOlapAggregations::Aggregation_Sum_GroupByNullMix >> KqpOlap::OlapRead_GenericQuery >> KqpOlapLocks::DeleteAbsentMultipleShards+Reboot >> KqpOlapTiering::LoadTtlSettings >> KqpOlapAggregations::Aggregation_ResultL_FilterL_OrderL_Limit2 >> KqpOlapSysView::StatsSysViewBytesColumnActualization >> KqpOlapAggregations::JsonDoc_GetValue_ToInt >> KqpOlapAggregations::BlockGenericSelectAll >> KqpDecimalColumnShard::TestSimpleQueries >> KqpOlap::OlapUpsertImmediate >> KqpOlapAggregations::JsonDoc_GetValue >> KqpOlapDictionary::EmptyStringVariants >> KqpOlapStats::AddRowsSomeTablesInTableStore >> KqpOlap::SingleShardRead >> KqpOlapAggregations::Filter_NotAllUsedFieldsInResultSet >> KqpOlap::CheckEarlyFilterOnEmptySelect >> KqpDecimalColumnShard::TestFilterEqual >> KqpOlap::BlockChannelScalar >> KqpOlap::SimpleQueryOlap |57.7%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/kqp/ut/tx/ydb-core-kqp-ut-tx |57.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/ut/tx/ydb-core-kqp-ut-tx |57.7%| [LD] {RESULT} $(B)/ydb/core/kqp/ut/tx/ydb-core-kqp-ut-tx >> KqpOlapSparsed::SwitchingStandalone >> KqpOlapBlobsSharing::SplitEmpty >> KqpOlap::ScanQueryOltpAndOlap >> KqpOlapAggregations::Aggregation |57.7%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/columnshard/ut_rw/ydb-core-tx-columnshard-ut_rw |57.7%| [LD] {RESULT} $(B)/ydb/core/tx/columnshard/ut_rw/ydb-core-tx-columnshard-ut_rw |57.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/columnshard/ut_rw/ydb-core-tx-columnshard-ut_rw >> KqpOlapAggregations::JsonDoc_Exists [GOOD] >> KqpOlapIndexes::TablesInStore [GOOD] >> KqpOlap::PredicatePushdown_DifferentLvlOfFilters [GOOD] >> KqpOlap::PredicatePushdown_Datetime_SQ >> KqpOlapAggregations::BlockGenericSelectAll [GOOD] >> KqpOlapAggregations::Filter_NotAllUsedFieldsInResultSet [GOOD] >> KqpOlapAggregations::Json_GetValue_ToInt [GOOD] >> KqpOlap::SimpleQueryOlap [GOOD] >> KqpOlap::SimpleQueryOlapMeta >> KqpOlapAggregations::JsonDoc_GetValue_ToInt [GOOD] >> KqpOlapAggregations::Aggregation_Some_Null [GOOD] >> KqpOlap::OlapRead_UsesScanOnJoin [GOOD] >> KqpOlap::OlapRead_UsesScanOnJoinWithDataShardTable >> KqpOlapAggregations::Json_GetValue_ToString [GOOD] >> KqpOlapAggregations::BlockGenericWithDistinct [GOOD] >> KqpOlap::SimpleQueryOlapStats [GOOD] >> KqpOlap::SimpleRangeOlap ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/olap/unittest >> KqpOlapAggregations::JsonDoc_Exists [GOOD] Test command err: Trying to start YDB, gRPC: 12251, MsgBus: 20170 2025-05-29T15:21:33.423010Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509888158532627084:2202];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:21:33.427281Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/0026f9/r3tmp/tmplJyH4W/pdisk_1.dat 2025-05-29T15:21:33.612327Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [1:7509888158532626920:2079] 1748532093408810 != 1748532093408813 2025-05-29T15:21:33.612472Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 12251, node 1 2025-05-29T15:21:33.627289Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:21:33.627300Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-05-29T15:21:33.627302Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-05-29T15:21:33.627331Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-05-29T15:21:33.631158Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:21:33.631178Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:21:33.631700Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:20170 TClient is connected to server localhost:20170 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-29T15:21:33.841537Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:21:33.847126Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-05-29T15:21:33.854534Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 Status: 53 TxId: 281474976715658 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 2 2025-05-29T15:21:33.877449Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037889;self_id=[1:7509888158532627620:2314];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-05-29T15:21:33.878815Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037889;self_id=[1:7509888158532627620:2314];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-05-29T15:21:33.878861Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037889;self_id=[1:7509888158532627620:2314];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-05-29T15:21:33.878880Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037889;self_id=[1:7509888158532627620:2314];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-05-29T15:21:33.878898Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037889;self_id=[1:7509888158532627620:2314];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-05-29T15:21:33.878930Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037889;self_id=[1:7509888158532627620:2314];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-05-29T15:21:33.878948Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037889;self_id=[1:7509888158532627620:2314];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-05-29T15:21:33.878967Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037889;self_id=[1:7509888158532627620:2314];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-05-29T15:21:33.878991Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037889;self_id=[1:7509888158532627620:2314];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-05-29T15:21:33.879014Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037889;self_id=[1:7509888158532627620:2314];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-05-29T15:21:33.879035Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037889;self_id=[1:7509888158532627620:2314];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2025-05-29T15:21:33.879063Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037889;self_id=[1:7509888158532627620:2314];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2025-05-29T15:21:33.884074Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;self_id=[1:7509888158532627621:2315];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-05-29T15:21:33.884396Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;self_id=[1:7509888158532627621:2315];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-05-29T15:21:33.884440Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;self_id=[1:7509888158532627621:2315];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-05-29T15:21:33.884457Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;self_id=[1:7509888158532627621:2315];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-05-29T15:21:33.884478Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;self_id=[1:7509888158532627621:2315];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-05-29T15:21:33.884495Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;self_id=[1:7509888158532627621:2315];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-05-29T15:21:33.884510Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;self_id=[1:7509888158532627621:2315];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-05-29T15:21:33.884526Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;self_id=[1:7509888158532627621:2315];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-05-29T15:21:33.884544Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;self_id=[1:7509888158532627621:2315];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-05-29T15:21:33.884562Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;self_id=[1:7509888158532627621:2315];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-05-29T15:21:33.884579Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;self_id=[1:7509888158532627621:2315];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2025-05-29T15:21:33.884595Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;self_id=[1:7509888158532627621:2315];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2025-05-29T15:21:33.888798Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037890;self_id=[1:7509888158532627622:2316];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-05-29T15:21:33.888818Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037890;self_id=[1:7509888158532627622:2316];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-05-29T15:21:33.888855Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037890;self_id=[1:7509888158532627622:2316];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-05-29T15:21:33.888872Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037890;self_id=[1:7509888158532627622:2316];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-05-29T15:21:33.888892Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037890;self_id=[1:7509888158532627622:2316];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fl ... :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-05-29T15:21:33.894991Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanDeprecatedSnapshot;id=CleanDeprecatedSnapshot; 2025-05-29T15:21:33.894997Z node 1 :TX_COLUMNSHARD CRIT: log.cpp:784: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=clean_deprecated_snapshot.cpp:30;tasks_for_rewrite=0; 2025-05-29T15:21:33.895006Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanDeprecatedSnapshot;id=17; 2025-05-29T15:21:33.895010Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV0ChunksMeta;id=RestoreV0ChunksMeta; 2025-05-29T15:21:33.895055Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV0ChunksMeta;id=18; 2025-05-29T15:21:33.895062Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-05-29T15:21:33.924077Z node 1 :TX_COLUMNSHARD_TX WARN: log.cpp:784: tablet_id=72075186224037888;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715658;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715658; 2025-05-29T15:21:33.924973Z node 1 :TX_COLUMNSHARD_TX WARN: log.cpp:784: tablet_id=72075186224037890;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715658;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715658; 2025-05-29T15:21:33.925813Z node 1 :TX_COLUMNSHARD_TX WARN: log.cpp:784: tablet_id=72075186224037889;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715658;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715658; 2025-05-29T15:21:33.927036Z node 1 :TX_COLUMNSHARD_TX WARN: log.cpp:784: tablet_id=72075186224037891;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715658;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715658; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=1448;columns=6; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=1448;columns=6; REQUEST: --!syntax_v1 PRAGMA Kikimr.OptUseFinalizeByKey; SELECT id, JSON_EXISTS(jsonval, "$.col1"), JSON_EXISTS(jsondoc, "$.col1") FROM `/Root/tableWithNulls` WHERE JSON_EXISTS(jsondoc, "$.col1") AND id = 6; 2025-05-29T15:21:34.004560Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509888162827595174:2415], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:21:34.004593Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:21:34.004713Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509888162827595209:2418], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:21:34.005413Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480 2025-05-29T15:21:34.007945Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976715659, at schemeshard: 72057594046644480 2025-05-29T15:21:34.008022Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7509888162827595211:2419], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2025-05-29T15:21:34.079474Z node 1 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [1:7509888162827595262:2482] txid# 281474976715660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-29T15:21:34.312906Z node 1 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:238: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1748532094057, txId: 18446744073709551615] shutting down REQUEST: --!syntax_v1 PRAGMA Kikimr.OptUseFinalizeByKey; SELECT id, JSON_EXISTS(jsonval, "$.col1"), JSON_EXISTS(jsondoc, "$.col1") FROM `/Root/tableWithNulls` WHERE JSON_EXISTS(jsondoc, "$.col1") AND id = 6; JSON Plan: {"Plan":{"Plans":[{"PlanNodeId":4,"Plans":[{"PlanNodeId":3,"Plans":[{"PlanNodeId":2,"Plans":[{"Tables":["tableWithNulls"],"PlanNodeId":1,"Operators":[{"Inputs":[{"InternalOperatorId":1}],"E-Rows":"No estimate","Predicate":"","Pushdown":"True","Name":"Filter","E-Size":"No estimate","E-Cost":"No estimate"},{"Scan":"Parallel","E-Size":"No estimate","ReadRanges":["id [6, 6]"],"Name":"TableRangeScan","Inputs":[],"Path":"\/Root\/tableWithNulls","E-Rows":"No estimate","Table":"tableWithNulls","ReadRangesKeys":["id"],"ReadColumns":["id","jsondoc","jsonval"],"SsaProgram":{"Command":[{"Assign":{"Constant":{"Text":"$.col1"},"Column":{"Id":7}}},{"Assign":{"Function":{"KernelName":"JsonExists","KernelIdx":0,"FunctionType":2,"Arguments":[{"Id":6},{"Id":7}]},"Column":{"Id":8}}},{"Assign":{"Function":{"Id":23,"Arguments":[{"Id":8}]},"Column":{"Id":9}}},{"Assign":{"Constant":{"Uint8":0},"Column":{"Id":10}}},{"Assign":{"Function":{"YqlOperationId":17,"KernelIdx":1,"FunctionType":2,"Arguments":[{"Id":9},{"Id":10}]},"Column":{"Id":11}}},{"Filter":{"Predicate":{"Id":11}}},{"Projection":{"Columns":[{"Id":1},{"Id":6},{"Id":5}]}}]},"E-Cost":"No estimate","ReadRangesExpectedSize":1}],"Node Type":"Filter-TableRangeScan"}],"Node Type":"UnionAll","PlanNodeType":"Connection"}],"Node Type":"Collect"}],"Node Type":"ResultSet_1","PlanNodeType":"ResultSet"}],"Node Type":"Query","Stats":{"ResourcePoolId":"default"},"PlanNodeType":"Query"},"meta":{"version":"0.2","type":"query"},"tables":[{"name":"\/Root\/tableWithNulls","reads":[{"columns":["id","jsondoc","jsonval"],"scan_by":["id [6, 6]"],"type":"Scan"}]}],"SimplifiedPlan":{"PlanNodeId":0,"Plans":[{"PlanNodeId":1,"Plans":[{"PlanNodeId":4,"Plans":[{"PlanNodeId":5,"Operators":[{"Scan":"Parallel","E-Size":"No estimate","ReadRanges":["id [6, 6]"],"Name":"TableRangeScan","Path":"\/Root\/tableWithNulls","E-Rows":"No estimate","Table":"tableWithNulls","ReadRangesKeys":["id"],"ReadColumns":["id","jsondoc","jsonval"],"SsaProgram":{"Command":[{"Assign":{"Constant":{"Text":"$.col1"},"Column":{"Id":7}}},{"Assign":{"Function":{"KernelName":"JsonExists","KernelIdx":0,"FunctionType":2,"Arguments":[{"Id":6},{"Id":7}]},"Column":{"Id":8}}},{"Assign":{"Function":{"Id":23,"Arguments":[{"Id":8}]},"Column":{"Id":9}}},{"Assign":{"Constant":{"Uint8":0},"Column":{"Id":10}}},{"Assign":{"Function":{"YqlOperationId":17,"KernelIdx":1,"FunctionType":2,"Arguments":[{"Id":9},{"Id":10}]},"Column":{"Id":11}}},{"Filter":{"Predicate":{"Id":11}}},{"Projection":{"Columns":[{"Id":1},{"Id":6},{"Id":5}]}}]},"E-Cost":"No estimate","ReadRangesExpectedSize":1}],"Node Type":"TableRangeScan"}],"Operators":[{"E-Rows":"No estimate","Predicate":"","Pushdown":"True","Name":"Filter","E-Size":"No estimate","E-Cost":"No estimate"}],"Node Type":"Filter"}],"Node Type":"ResultSet_1","PlanNodeType":"ResultSet"}],"Node Type":"Query","OptimizerStats":{"EquiJoinsCount":0,"JoinsCount":0},"PlanNodeType":"Query"}} AST: ( (declare %kqp%tx_result_binding_0_0 (TupleType (ListType (TupleType (TupleType (OptionalType (DataType 'Int32)) (DataType 'Int32)) (TupleType (OptionalType (DataType 'Int32)) (DataType 'Int32)))))) (let $1 '('('"_logical_id" '1318) '('"_id" '"f3460a01-c9b4645-1cd2f946-dc1cfe3a") '('"_partition_mode" '"single"))) (let $2 (DqPhyStage '() (lambda '() (block '( (let $17 (Int32 '"6")) (let $18 (Just $17)) (let $19 (Int32 '1)) (let $20 '($18 $19)) (let $21 (If (== $17 (Int32 '2147483647)) $20 '((+ $18 $19) (Int32 '0)))) (return (ToStream (Just '((RangeFinalize (RangeMultiply (Uint64 '10000) (RangeUnion (RangeCreate (AsList '($20 $21)))))))))) ))) $1)) (let $3 (DqCnValue (TDqOutput $2 '0))) (let $4 (KqpPhysicalTx '($2) '($3) '() '('('"type" '"compute")))) (let $5 '"%kqp%tx_result_binding_0_0") (let $6 (DataType 'Int32)) (let $7 (TupleType (OptionalType $6) $6)) (let $8 (TupleType (ListType (TupleType $7 $7)))) (let $9 (OptionalType (DataType 'Bool))) (let $10 (DqPhyStage '() (lambda '() (block '( (let $22 (KqpTable '"/Root/tableWithNulls" '"72057594046644480:2" '"" '1)) (let $23 '('"id" '"jsondoc" '"jsonval")) (let $24 '('('"UsedKeyColumns" '('"id")) '('"ExpectedMaxRanges" '1) '('"PointPrefixLen" '1))) (let $25 (Utf8 '"$.col1")) (let $26 (Bool 'false)) (let $27 (KqpWideReadOlapTableRanges $22 %kqp%tx_result_binding_0_0 $23 '() $24 (lambda '($28) (block '( (let $29 '('?? (KqpOlapJsonExists '"jsondoc" $25) $26)) (return (KqpOlapFilter $28 $29)) ))))) (return (FromFlow (NarrowMap $27 (lambda '($30 $31 $32) (block '( (let $33 '($9)) (let $34 (ResourceType '"JsonNode")) (let $35 (OptionalType $34)) (let $36 '((ResourceType '"JsonPath"))) (let $37 (DataType 'Utf8)) (let $38 (DictType $37 $34)) (let $39 '($38)) (let $40 (CallableType '() $33 '($35) $36 $39 $33)) (let $41 '('('"strict"))) (let $42 (Udf '"Json2.SqlExists" (Void) (VoidType) '"" $40 (VoidType) '"" $41)) (let $43 (IfPresent $32 (lambda '($53) (block '( (let $54 '((DataType 'Json) '"" '1)) (let $55 (CallableType '() '($34) $54)) (let $56 (Udf '"Json2.Parse" (Void) (VoidType) '"" $55 (VoidType) '"" '())) (return (Just (Apply $56 $53))) ))) (Nothing $35))) (let $44 (CallableType '() $36 '($37))) (let $45 (Udf '"Json2.CompilePath" (Void) (VoidType) '"" $44 (VoidType) '"" '())) (let $46 (Apply $45 $25)) (let $47 (Dict $38)) (let $48 (Just $26)) (let $49 (Apply $42 $43 $46 $47 $48)) (let $50 (CallableType '() $33 '((OptionalType (DataType 'JsonDocument))) $36 $39 $33)) (let $51 (Udf '"Json2.JsonDocumentSqlExists" (Void) (VoidType) '"" $50 (VoidType) '"" $41)) (let $52 (Apply $51 $31 $46 $47 $48)) (return (AsStruct '('"column1" $49) '('"column2" $52) '('"id" $30))) )))))) ))) '('('"_logical_id" '1388) '('"_id" '"331cfe1-9fe1ab32-49f707ed-6809671e")))) (let $11 (DqCnUnionAll (TDqOutput $10 '0))) (let $12 (DqPhyStage '($11) (lambda '($57) $57) '('('"_logical_id" '1824) '('"_id" '"7a146ed0-41fcd8dd-c450aa10-498d4221")))) (let $13 '('"id" '"column1" '"column2")) (let $14 (DqCnResult (TDqOutput $12 '0) $13)) (let $15 (KqpTxResultBinding $8 '0 '0)) (let $16 (KqpPhysicalTx '($10 $12) '($14) '('($5 $15)) '('('"type" '"scan")))) (return (KqpPhysicalQuery '($4 $16) '((KqpTxResultBinding (ListType (StructType '('"column1" $9) '('"column2" $9) '('"id" $6))) '1 '0)) '('('"type" '"scan_query")))) ) >> KqpOlap::OlapRead_GenericQuery [GOOD] >> KqpOlap::OlapRead_ScanQuery ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/olap/unittest >> KqpOlapIndexes::TablesInStore [GOOD] Test command err: Trying to start YDB, gRPC: 15306, MsgBus: 24423 2025-05-29T15:21:33.648477Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509888159294572471:2197];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:21:33.648673Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/0026b4/r3tmp/tmpUc5Itj/pdisk_1.dat 2025-05-29T15:21:33.769750Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [1:7509888159294572313:2079] 1748532093646033 != 1748532093646036 2025-05-29T15:21:33.771868Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:21:33.771894Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:21:33.772937Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-29T15:21:33.774937Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 15306, node 1 2025-05-29T15:21:33.804690Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:21:33.804699Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-05-29T15:21:33.804701Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-05-29T15:21:33.804736Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:24423 TClient is connected to server localhost:24423 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-29T15:21:33.942633Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:21:33.950928Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-05-29T15:21:33.964340Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnStore, opId: 281474976715658:0, at schemeshard: 72057594046644480 Status: 53 TxId: 281474976715658 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 2 2025-05-29T15:21:33.987117Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037891;self_id=[1:7509888159294573014:2314];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-05-29T15:21:33.987202Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037891;self_id=[1:7509888159294573014:2314];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-05-29T15:21:33.987281Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037891;self_id=[1:7509888159294573014:2314];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-05-29T15:21:33.987304Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037891;self_id=[1:7509888159294573014:2314];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-05-29T15:21:33.987335Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037891;self_id=[1:7509888159294573014:2314];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-05-29T15:21:33.987357Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037891;self_id=[1:7509888159294573014:2314];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-05-29T15:21:33.987376Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037891;self_id=[1:7509888159294573014:2314];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-05-29T15:21:33.987400Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037891;self_id=[1:7509888159294573014:2314];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-05-29T15:21:33.987420Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037891;self_id=[1:7509888159294573014:2314];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-05-29T15:21:33.987444Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037891;self_id=[1:7509888159294573014:2314];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-05-29T15:21:33.987463Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037891;self_id=[1:7509888159294573014:2314];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2025-05-29T15:21:33.987484Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037891;self_id=[1:7509888159294573014:2314];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2025-05-29T15:21:34.001818Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;self_id=[1:7509888159294573015:2315];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-05-29T15:21:34.001851Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;self_id=[1:7509888159294573015:2315];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-05-29T15:21:34.001920Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;self_id=[1:7509888159294573015:2315];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-05-29T15:21:34.001948Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;self_id=[1:7509888159294573015:2315];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-05-29T15:21:34.001970Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;self_id=[1:7509888159294573015:2315];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-05-29T15:21:34.001992Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;self_id=[1:7509888159294573015:2315];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-05-29T15:21:34.002010Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;self_id=[1:7509888159294573015:2315];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-05-29T15:21:34.002029Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;self_id=[1:7509888159294573015:2315];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-05-29T15:21:34.002049Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;self_id=[1:7509888159294573015:2315];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-05-29T15:21:34.002068Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;self_id=[1:7509888159294573015:2315];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-05-29T15:21:34.002087Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;self_id=[1:7509888159294573015:2315];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2025-05-29T15:21:34.002105Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;self_id=[1:7509888159294573015:2315];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2025-05-29T15:21:34.011508Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037889;self_id=[1:7509888159294573016:2316];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-05-29T15:21:34.011540Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037889;self_id=[1:7509888159294573016:2316];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-05-29T15:21:34.011613Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037889;self_id=[1:7509888159294573016:2316];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-05-29T15:21:34.011634Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037889;self_id=[1:7509888159294573016:2316];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-05-29T15:21:34.011655Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037889;self_id=[1:7509888159294573016:2316];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fl ... Chunks; 2025-05-29T15:21:34.022270Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-05-29T15:21:34.022274Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanDeprecatedSnapshot;id=CleanDeprecatedSnapshot; 2025-05-29T15:21:34.022280Z node 1 :TX_COLUMNSHARD CRIT: log.cpp:784: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=clean_deprecated_snapshot.cpp:30;tasks_for_rewrite=0; 2025-05-29T15:21:34.022284Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanDeprecatedSnapshot;id=17; 2025-05-29T15:21:34.022289Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV0ChunksMeta;id=RestoreV0ChunksMeta; 2025-05-29T15:21:34.022334Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV0ChunksMeta;id=18; 2025-05-29T15:21:34.022338Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-05-29T15:21:34.027619Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-05-29T15:21:34.027637Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-05-29T15:21:34.027650Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-05-29T15:21:34.027656Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-05-29T15:21:34.027677Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-05-29T15:21:34.027683Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-05-29T15:21:34.027694Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-05-29T15:21:34.027701Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-05-29T15:21:34.027712Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-05-29T15:21:34.027718Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-05-29T15:21:34.027725Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-05-29T15:21:34.027731Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-05-29T15:21:34.027756Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-05-29T15:21:34.027764Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-05-29T15:21:34.027784Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-05-29T15:21:34.027790Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-05-29T15:21:34.027803Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-05-29T15:21:34.027808Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanDeprecatedSnapshot;id=CleanDeprecatedSnapshot; 2025-05-29T15:21:34.027817Z node 1 :TX_COLUMNSHARD CRIT: log.cpp:784: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=clean_deprecated_snapshot.cpp:30;tasks_for_rewrite=0; 2025-05-29T15:21:34.027823Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanDeprecatedSnapshot;id=17; 2025-05-29T15:21:34.027828Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV0ChunksMeta;id=RestoreV0ChunksMeta; 2025-05-29T15:21:34.027949Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV0ChunksMeta;id=18; 2025-05-29T15:21:34.027953Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-05-29T15:21:34.048592Z node 1 :TX_COLUMNSHARD_TX WARN: log.cpp:784: tablet_id=72075186224037890;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715658;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715658; 2025-05-29T15:21:34.048679Z node 1 :TX_COLUMNSHARD_TX WARN: log.cpp:784: tablet_id=72075186224037889;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715658;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715658; 2025-05-29T15:21:34.048742Z node 1 :TX_COLUMNSHARD_TX WARN: log.cpp:784: tablet_id=72075186224037888;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715658;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715658; 2025-05-29T15:21:34.048948Z node 1 :TX_COLUMNSHARD_TX WARN: log.cpp:784: tablet_id=72075186224037891;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715658;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715658; 2025-05-29T15:21:34.056655Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 Status: 53 TxId: 281474976715659 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 3 2025-05-29T15:21:34.066469Z node 1 :TX_COLUMNSHARD_TX WARN: log.cpp:784: tablet_id=72075186224037890;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715659;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715659; 2025-05-29T15:21:34.067514Z node 1 :TX_COLUMNSHARD_TX WARN: log.cpp:784: tablet_id=72075186224037889;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715659;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715659; 2025-05-29T15:21:34.068878Z node 1 :TX_COLUMNSHARD_TX WARN: log.cpp:784: tablet_id=72075186224037888;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715659;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715659; 2025-05-29T15:21:34.399383Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509888163589540601:2407], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:21:34.399417Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:21:34.453497Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnStore, opId: 281474976715660:0, at schemeshard: 72057594046644480 2025-05-29T15:21:34.457018Z node 1 :TX_COLUMNSHARD_TX WARN: log.cpp:784: tablet_id=72075186224037888;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715660;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715660; 2025-05-29T15:21:34.457193Z node 1 :TX_COLUMNSHARD_TX WARN: log.cpp:784: tablet_id=72075186224037890;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715660;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715660; 2025-05-29T15:21:34.457329Z node 1 :TX_COLUMNSHARD_TX WARN: log.cpp:784: tablet_id=72075186224037889;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715660;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715660; 2025-05-29T15:21:34.457505Z node 1 :TX_COLUMNSHARD_TX WARN: log.cpp:784: tablet_id=72075186224037891;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715660;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715660; 2025-05-29T15:21:34.482720Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509888163589540651:2412], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:21:34.482763Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:21:34.487398Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 2025-05-29T15:21:34.491734Z node 1 :TX_COLUMNSHARD_TX WARN: log.cpp:784: tablet_id=72075186224037891;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715661;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715661; 2025-05-29T15:21:34.493886Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:5981: Got TEvUpdateTabletsObjectReply for unknown txId 281474976715661 at schemeshard 72057594046644480 >> KqpOlapAggregations::JsonDoc_GetValue [GOOD] >> KqpOlapAggregations::AggregationCountPushdown [GOOD] >> KqpOlapAggregations::AggregationCountGroupByPushdown ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/olap/unittest >> KqpOlapAggregations::BlockGenericSelectAll [GOOD] Test command err: Trying to start YDB, gRPC: 30239, MsgBus: 16308 test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/0026ba/r3tmp/tmpkeffd0/pdisk_1.dat 2025-05-29T15:21:33.878998Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-05-29T15:21:33.946809Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [1:7509888158822438048:2079] 1748532093758497 != 1748532093758500 2025-05-29T15:21:33.959026Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 30239, node 1 2025-05-29T15:21:33.959469Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:21:33.959486Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:21:33.971029Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-29T15:21:33.975244Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:21:33.975253Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-05-29T15:21:33.975255Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-05-29T15:21:33.975290Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:16308 TClient is connected to server localhost:16308 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-29T15:21:34.129002Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:21:34.138333Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-05-29T15:21:34.139912Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 Status: 53 TxId: 281474976710658 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 2 2025-05-29T15:21:34.169051Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;self_id=[1:7509888163117406036:2315];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-05-29T15:21:34.169114Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;self_id=[1:7509888163117406036:2315];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-05-29T15:21:34.169175Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;self_id=[1:7509888163117406036:2315];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-05-29T15:21:34.169194Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;self_id=[1:7509888163117406036:2315];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-05-29T15:21:34.169223Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;self_id=[1:7509888163117406036:2315];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-05-29T15:21:34.169246Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;self_id=[1:7509888163117406036:2315];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-05-29T15:21:34.169265Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;self_id=[1:7509888163117406036:2315];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-05-29T15:21:34.169289Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;self_id=[1:7509888163117406036:2315];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-05-29T15:21:34.169308Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;self_id=[1:7509888163117406036:2315];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-05-29T15:21:34.169329Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;self_id=[1:7509888163117406036:2315];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-05-29T15:21:34.169347Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;self_id=[1:7509888163117406036:2315];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2025-05-29T15:21:34.169366Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;self_id=[1:7509888163117406036:2315];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2025-05-29T15:21:34.173799Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037889;self_id=[1:7509888163117406037:2316];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-05-29T15:21:34.173813Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037889;self_id=[1:7509888163117406037:2316];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-05-29T15:21:34.173853Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037889;self_id=[1:7509888163117406037:2316];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-05-29T15:21:34.173872Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037889;self_id=[1:7509888163117406037:2316];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-05-29T15:21:34.173892Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037889;self_id=[1:7509888163117406037:2316];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-05-29T15:21:34.173909Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037889;self_id=[1:7509888163117406037:2316];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-05-29T15:21:34.173927Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037889;self_id=[1:7509888163117406037:2316];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-05-29T15:21:34.173957Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037889;self_id=[1:7509888163117406037:2316];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-05-29T15:21:34.173977Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037889;self_id=[1:7509888163117406037:2316];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-05-29T15:21:34.173996Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037889;self_id=[1:7509888163117406037:2316];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-05-29T15:21:34.174014Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037889;self_id=[1:7509888163117406037:2316];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2025-05-29T15:21:34.174031Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037889;self_id=[1:7509888163117406037:2316];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2025-05-29T15:21:34.190493Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037891;self_id=[1:7509888163117406034:2314];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-05-29T15:21:34.194776Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037891;self_id=[1:7509888163117406034:2314];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-05-29T15:21:34.194857Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037891;self_id=[1:7509888163117406034:2314];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-05-29T15:21:34.194879Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037891;self_id=[1:7509888163117406034:2314];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-05-29T15:21:34.194900Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037891;self_id=[1:7509888163117406034:2314];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-05-29T15:21:34.194930Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037891;self_id=[1:7509888163117406034:231 ... 25-05-29T15:21:34.204331Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-05-29T15:21:34.204350Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-05-29T15:21:34.204354Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-05-29T15:21:34.204370Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-05-29T15:21:34.204373Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-05-29T15:21:34.204384Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-05-29T15:21:34.204388Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanDeprecatedSnapshot;id=CleanDeprecatedSnapshot; 2025-05-29T15:21:34.204394Z node 1 :TX_COLUMNSHARD CRIT: log.cpp:784: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=clean_deprecated_snapshot.cpp:30;tasks_for_rewrite=0; 2025-05-29T15:21:34.204400Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanDeprecatedSnapshot;id=17; 2025-05-29T15:21:34.204404Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV0ChunksMeta;id=RestoreV0ChunksMeta; 2025-05-29T15:21:34.204463Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV0ChunksMeta;id=18; 2025-05-29T15:21:34.204466Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-05-29T15:21:34.217967Z node 1 :TX_COLUMNSHARD_TX WARN: log.cpp:784: tablet_id=72075186224037890;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710658;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710658; 2025-05-29T15:21:34.218974Z node 1 :TX_COLUMNSHARD_TX WARN: log.cpp:784: tablet_id=72075186224037888;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710658;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710658; 2025-05-29T15:21:34.219833Z node 1 :TX_COLUMNSHARD_TX WARN: log.cpp:784: tablet_id=72075186224037889;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710658;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710658; 2025-05-29T15:21:34.220700Z node 1 :TX_COLUMNSHARD_TX WARN: log.cpp:784: tablet_id=72075186224037891;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710658;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710658; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=1448;columns=6; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=1448;columns=6; REQUEST: --!syntax_v1 PRAGMA Kikimr.OptUseFinalizeByKey; SELECT id, resource_id, level FROM `/Root/tableWithNulls` WHERE level != 5 OR level IS NULL ORDER BY id, resource_id, level; 2025-05-29T15:21:34.408116Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509888163117406321:2415], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:21:34.408138Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:21:34.408264Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509888163117406333:2418], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:21:34.408998Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710659:3, at schemeshard: 72057594046644480 2025-05-29T15:21:34.411639Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976710659, at schemeshard: 72057594046644480 2025-05-29T15:21:34.411704Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7509888163117406335:2419], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710659 completed, doublechecking } 2025-05-29T15:21:34.471774Z node 1 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [1:7509888163117406386:2484] txid# 281474976710660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } REQUEST: --!syntax_v1 PRAGMA Kikimr.OptUseFinalizeByKey; SELECT id, resource_id, level FROM `/Root/tableWithNulls` WHERE level != 5 OR level IS NULL ORDER BY id, resource_id, level; JSON Plan: {"Plan":{"Plans":[{"PlanNodeId":4,"Plans":[{"PlanNodeId":3,"Plans":[{"PlanNodeId":2,"Plans":[{"Tables":["tableWithNulls"],"PlanNodeId":1,"Operators":[{"Inputs":[{"InternalOperatorId":1}],"SortBy":"[row.id,row.resource_id,row.level]","Name":"Sort"},{"Inputs":[{"InternalOperatorId":2}],"E-Rows":"No estimate","Predicate":"level != 5","Pushdown":"True","Name":"Filter","E-Size":"No estimate","E-Cost":"No estimate"},{"Scan":"Parallel","E-Size":"No estimate","ReadRanges":["id (-∞, +∞)"],"Name":"TableFullScan","Inputs":[],"Path":"\/Root\/tableWithNulls","E-Rows":"No estimate","Table":"tableWithNulls","ReadColumns":["id","level","resource_id"],"SsaProgram":{"Command":[{"Assign":{"Constant":{"Int32":5},"Column":{"Id":7}}},{"Assign":{"Function":{"YqlOperationId":12,"KernelIdx":0,"FunctionType":2,"Arguments":[{"Id":3},{"Id":7}]},"Column":{"Id":8}}},{"Assign":{"Constant":{"Uint8":0},"Column":{"Id":9}}},{"Assign":{"Function":{"YqlOperationId":17,"KernelIdx":1,"FunctionType":2,"Arguments":[{"Id":8},{"Id":9}]},"Column":{"Id":10}}},{"Assign":{"Function":{"Id":7,"Arguments":[{"Id":3}]},"Column":{"Id":11}}},{"Assign":{"Function":{"Id":23,"Arguments":[{"Id":11}]},"Column":{"Id":12}}},{"Assign":{"Function":{"YqlOperationId":1,"KernelIdx":2,"FunctionType":2,"Arguments":[{"Id":10},{"Id":12}]},"Column":{"Id":13}}},{"Filter":{"Predicate":{"Id":13}}},{"Projection":{"Columns":[{"Id":1},{"Id":3},{"Id":2}]}}]},"E-Cost":"No estimate"}],"Node Type":"Sort-Filter-TableFullScan"}],"Node Type":"Merge","SortColumns":["id (Asc)","resource_id (Asc)","level (Asc)"],"PlanNodeType":"Connection"}],"Node Type":"Stage"}],"Node Type":"ResultSet","PlanNodeType":"ResultSet"}],"Node Type":"Query","Stats":{"ResourcePoolId":"default"},"PlanNodeType":"Query"},"meta":{"version":"0.2","type":"query"},"tables":[{"name":"\/Root\/tableWithNulls","reads":[{"columns":["id","level","resource_id"],"scan_by":["id (-∞, +∞)"],"type":"FullScan"}]}],"SimplifiedPlan":{"PlanNodeId":0,"Plans":[{"PlanNodeId":1,"Plans":[{"PlanNodeId":4,"Plans":[{"PlanNodeId":5,"Plans":[{"PlanNodeId":6,"Operators":[{"Scan":"Parallel","E-Size":"No estimate","ReadRanges":["id (-∞, +∞)"],"Name":"TableFullScan","Path":"\/Root\/tableWithNulls","E-Rows":"No estimate","Table":"tableWithNulls","ReadColumns":["id","level","resource_id"],"SsaProgram":{"Command":[{"Assign":{"Constant":{"Int32":5},"Column":{"Id":7}}},{"Assign":{"Function":{"YqlOperationId":12,"KernelIdx":0,"FunctionType":2,"Arguments":[{"Id":3},{"Id":7}]},"Column":{"Id":8}}},{"Assign":{"Constant":{"Uint8":0},"Column":{"Id":9}}},{"Assign":{"Function":{"YqlOperationId":17,"KernelIdx":1,"FunctionType":2,"Arguments":[{"Id":8},{"Id":9}]},"Column":{"Id":10}}},{"Assign":{"Function":{"Id":7,"Arguments":[{"Id":3}]},"Column":{"Id":11}}},{"Assign":{"Function":{"Id":23,"Arguments":[{"Id":11}]},"Column":{"Id":12}}},{"Assign":{"Function":{"YqlOperationId":1,"KernelIdx":2,"FunctionType":2,"Arguments":[{"Id":10},{"Id":12}]},"Column":{"Id":13}}},{"Filter":{"Predicate":{"Id":13}}},{"Projection":{"Columns":[{"Id":1},{"Id":3},{"Id":2}]}}]},"E-Cost":"No estimate"}],"Node Type":"TableFullScan"}],"Operators":[{"E-Rows":"No estimate","Predicate":"level != 5","Pushdown":"True","Name":"Filter","E-Size":"No estimate","E-Cost":"No estimate"}],"Node Type":"Filter"}],"Operators":[{"SortBy":"[row.id,row.resource_id,row.level]","Name":"Sort"}],"Node Type":"Sort"}],"Node Type":"ResultSet","PlanNodeType":"ResultSet"}],"Node Type":"Query","OptimizerStats":{"EquiJoinsCount":0,"JoinsCount":0},"PlanNodeType":"Query"}} AST: ( (let $1 (DataType 'Int32)) (let $2 (StructType '('"id" $1) '('"level" (OptionalType $1)) '('"resource_id" (OptionalType (DataType 'Utf8))))) (let $3 '('('"_logical_id" '920) '('"_id" '"4a6ecbf0-276272c2-176d444b-fa36db95") '('"_wide_channels" $2))) (let $4 (DqPhyStage '() (lambda '() (block '( (let $10 (KqpTable '"/Root/tableWithNulls" '"72057594046644480:2" '"" '1)) (let $11 '('"id" '"level" '"resource_id")) (let $12 (KqpBlockReadOlapTableRanges $10 (Void) $11 '() '() (lambda '($18) (block '( (let $19 '('neq '"level" (Int32 '"5"))) (let $20 '('?? $19 (Bool 'false))) (return (KqpOlapFilter $18 (KqpOlapOr $20 '('empty '"level")))) ))))) (let $13 (lambda '($21 $22 $23 $24) (block '( (let $25 (BlockAsStruct '('"id" $21) '('"level" $22) '('"resource_id" $23))) (return $25 $24) )))) (let $14 (Bool 'true)) (let $15 '($14 $14 $14)) (let $16 (Sort (NarrowMap (ToFlow (WideFromBlocks (FromFlow (WideMap $12 $13)))) (lambda '($26) $26)) $15 (lambda '($27) '((Member $27 '"id") (Member $27 '"resource_id") (Member $27 '"level"))))) (let $17 (lambda '($28) (Member $28 '"id") (Member $28 '"level") (Member $28 '"resource_id"))) (return (FromFlow (ExpandMap $16 $17))) ))) $3)) (let $5 '('('"0" '"Asc") '('"2" '"Asc") '('1 '"Asc"))) (let $6 (DqCnMerge (TDqOutput $4 '"0") $5)) (let $7 (DqPhyStage '($6) (lambda '($29) (FromFlow (NarrowMap (ToFlow $29) (lambda '($30 $31 $32) (AsStruct '('"id" $30) '('"level" $31) '('"resource_id" $32)))))) '('('"_logical_id" '932) '('"_id" '"7eb01d21-687e0791-8d6e417d-3a19066a")))) (let $8 '('"id" '"resource_id" '"level")) (let $9 (DqCnResult (TDqOutput $7 '"0") $8)) (return (KqpPhysicalQuery '((KqpPhysicalTx '($4 $7) '($9) '() '('('"type" '"generic")))) '((KqpTxResultBinding (ListType $2) '"0" '"0")) '('('"type" '"query")))) ) ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/olap/unittest >> KqpOlapAggregations::Json_GetValue_ToInt [GOOD] Test command err: Trying to start YDB, gRPC: 10616, MsgBus: 20544 2025-05-29T15:21:33.516140Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509888156959140502:2220];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:21:33.560849Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/002686/r3tmp/tmpx8BTVX/pdisk_1.dat 2025-05-29T15:21:33.586515Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:21:33.588366Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [1:7509888156959140295:2079] 1748532093502737 != 1748532093502740 TServer::EnableGrpc on GrpcPort 10616, node 1 2025-05-29T15:21:33.602813Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:21:33.602823Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-05-29T15:21:33.602825Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-05-29T15:21:33.602858Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-05-29T15:21:33.655171Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:21:33.655191Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:21:33.658983Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:20544 TClient is connected to server localhost:20544 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-29T15:21:33.763338Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:21:33.767316Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-05-29T15:21:33.911026Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 Status: 53 TxId: 281474976715658 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 2 2025-05-29T15:21:33.930524Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037891;self_id=[1:7509888156959140990:2314];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-05-29T15:21:33.930597Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037891;self_id=[1:7509888156959140990:2314];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-05-29T15:21:33.930642Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037891;self_id=[1:7509888156959140990:2314];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-05-29T15:21:33.930666Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037891;self_id=[1:7509888156959140990:2314];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-05-29T15:21:33.930686Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037891;self_id=[1:7509888156959140990:2314];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-05-29T15:21:33.930716Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037891;self_id=[1:7509888156959140990:2314];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-05-29T15:21:33.938866Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037891;self_id=[1:7509888156959140990:2314];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-05-29T15:21:33.938950Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037891;self_id=[1:7509888156959140990:2314];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-05-29T15:21:33.938981Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037891;self_id=[1:7509888156959140990:2314];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-05-29T15:21:33.939002Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037891;self_id=[1:7509888156959140990:2314];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-05-29T15:21:33.939026Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037891;self_id=[1:7509888156959140990:2314];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2025-05-29T15:21:33.939047Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037891;self_id=[1:7509888156959140990:2314];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2025-05-29T15:21:33.959603Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037890;self_id=[1:7509888156959140991:2315];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-05-29T15:21:33.959624Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037890;self_id=[1:7509888156959140991:2315];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-05-29T15:21:33.959691Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037890;self_id=[1:7509888156959140991:2315];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-05-29T15:21:33.959710Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037890;self_id=[1:7509888156959140991:2315];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-05-29T15:21:33.959727Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037890;self_id=[1:7509888156959140991:2315];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-05-29T15:21:33.959747Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037890;self_id=[1:7509888156959140991:2315];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-05-29T15:21:33.959765Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037890;self_id=[1:7509888156959140991:2315];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-05-29T15:21:33.959784Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037890;self_id=[1:7509888156959140991:2315];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-05-29T15:21:33.959801Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037890;self_id=[1:7509888156959140991:2315];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-05-29T15:21:33.959820Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037890;self_id=[1:7509888156959140991:2315];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-05-29T15:21:33.959839Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037890;self_id=[1:7509888156959140991:2315];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2025-05-29T15:21:33.959856Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037890;self_id=[1:7509888156959140991:2315];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2025-05-29T15:21:33.976004Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037889;self_id=[1:7509888156959140998:2316];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-05-29T15:21:33.976026Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037889;self_id=[1:7509888156959140998:2316];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-05-29T15:21:33.976092Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037889;self_id=[1:7509888156959140998:2316];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-05-29T15:21:33.976116Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037889;self_id=[1:7509888156959140998:2316];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-05-29T15:21:33.976143Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037889;self_id=[1:7509888156959140998:2316];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fl ... log.cpp:784: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanDeprecatedSnapshot;id=17; 2025-05-29T15:21:34.031384Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV0ChunksMeta;id=RestoreV0ChunksMeta; 2025-05-29T15:21:34.031500Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV0ChunksMeta;id=18; 2025-05-29T15:21:34.031504Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-05-29T15:21:34.068963Z node 1 :TX_COLUMNSHARD_TX WARN: log.cpp:784: tablet_id=72075186224037888;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715658;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715658; 2025-05-29T15:21:34.069784Z node 1 :TX_COLUMNSHARD_TX WARN: log.cpp:784: tablet_id=72075186224037891;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715658;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715658; 2025-05-29T15:21:34.070552Z node 1 :TX_COLUMNSHARD_TX WARN: log.cpp:784: tablet_id=72075186224037890;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715658;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715658; 2025-05-29T15:21:34.083719Z node 1 :TX_COLUMNSHARD_TX WARN: log.cpp:784: tablet_id=72075186224037889;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715658;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715658; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=1448;columns=6; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=1448;columns=6; REQUEST: --!syntax_v1 PRAGMA Kikimr.OptUseFinalizeByKey; SELECT id, JSON_VALUE(jsonval, "$.obj.obj_col2_int" RETURNING Int), JSON_VALUE(jsondoc, "$.obj.obj_col2_int" RETURNING Int) FROM `/Root/tableWithNulls` WHERE JSON_VALUE(jsonval, "$.obj.obj_col2_int" RETURNING Int) = 16 AND id = 1; 2025-05-29T15:21:34.352127Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509888161254108556:2415], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:21:34.352148Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:21:34.352362Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509888161254108583:2418], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:21:34.353157Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480 2025-05-29T15:21:34.355963Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976715659, at schemeshard: 72057594046644480 2025-05-29T15:21:34.356034Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7509888161254108585:2419], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2025-05-29T15:21:34.427481Z node 1 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [1:7509888161254108636:2483] txid# 281474976715660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-29T15:21:34.648870Z node 1 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:238: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1748532094407, txId: 18446744073709551615] shutting down REQUEST: --!syntax_v1 PRAGMA Kikimr.OptUseFinalizeByKey; SELECT id, JSON_VALUE(jsonval, "$.obj.obj_col2_int" RETURNING Int), JSON_VALUE(jsondoc, "$.obj.obj_col2_int" RETURNING Int) FROM `/Root/tableWithNulls` WHERE JSON_VALUE(jsonval, "$.obj.obj_col2_int" RETURNING Int) = 16 AND id = 1; JSON Plan: {"Plan":{"Plans":[{"PlanNodeId":4,"Plans":[{"PlanNodeId":3,"Plans":[{"PlanNodeId":2,"Plans":[{"Tables":["tableWithNulls"],"PlanNodeId":1,"Operators":[{"Inputs":[{"InternalOperatorId":1}],"E-Rows":"No estimate","Predicate":"KqpOlapJsonValue == 16","Pushdown":"True","Name":"Filter","E-Size":"No estimate","E-Cost":"No estimate"},{"Scan":"Parallel","E-Size":"No estimate","ReadRanges":["id [1, 1]"],"Name":"TableRangeScan","Inputs":[],"Path":"\/Root\/tableWithNulls","E-Rows":"No estimate","Table":"tableWithNulls","ReadRangesKeys":["id"],"ReadColumns":["id","jsondoc","jsonval"],"SsaProgram":{"Command":[{"Assign":{"Constant":{"Text":"$.obj.obj_col2_int"},"Column":{"Id":7}}},{"Assign":{"Function":{"KernelName":"JsonValue","KernelIdx":0,"FunctionType":2,"Arguments":[{"Id":5},{"Id":7}]},"Column":{"Id":8}}},{"Assign":{"Constant":{"Int32":16},"Column":{"Id":9}}},{"Assign":{"Function":{"YqlOperationId":11,"KernelIdx":1,"FunctionType":2,"Arguments":[{"Id":8},{"Id":9}]},"Column":{"Id":10}}},{"Assign":{"Constant":{"Uint8":0},"Column":{"Id":11}}},{"Assign":{"Function":{"YqlOperationId":17,"KernelIdx":2,"FunctionType":2,"Arguments":[{"Id":10},{"Id":11}]},"Column":{"Id":12}}},{"Filter":{"Predicate":{"Id":12}}},{"Projection":{"Columns":[{"Id":1},{"Id":6},{"Id":5}]}}]},"E-Cost":"No estimate","ReadRangesExpectedSize":1}],"Node Type":"Filter-TableRangeScan"}],"Node Type":"UnionAll","PlanNodeType":"Connection"}],"Node Type":"Collect"}],"Node Type":"ResultSet_1","PlanNodeType":"ResultSet"}],"Node Type":"Query","Stats":{"ResourcePoolId":"default"},"PlanNodeType":"Query"},"meta":{"version":"0.2","type":"query"},"tables":[{"name":"\/Root\/tableWithNulls","reads":[{"columns":["id","jsondoc","jsonval"],"scan_by":["id [1, 1]"],"type":"Scan"}]}],"SimplifiedPlan":{"PlanNodeId":0,"Plans":[{"PlanNodeId":1,"Plans":[{"PlanNodeId":4,"Plans":[{"PlanNodeId":5,"Operators":[{"Scan":"Parallel","E-Size":"No estimate","ReadRanges":["id [1, 1]"],"Name":"TableRangeScan","Path":"\/Root\/tableWithNulls","E-Rows":"No estimate","Table":"tableWithNulls","ReadRangesKeys":["id"],"ReadColumns":["id","jsondoc","jsonval"],"SsaProgram":{"Command":[{"Assign":{"Constant":{"Text":"$.obj.obj_col2_int"},"Column":{"Id":7}}},{"Assign":{"Function":{"KernelName":"JsonValue","KernelIdx":0,"FunctionType":2,"Arguments":[{"Id":5},{"Id":7}]},"Column":{"Id":8}}},{"Assign":{"Constant":{"Int32":16},"Column":{"Id":9}}},{"Assign":{"Function":{"YqlOperationId":11,"KernelIdx":1,"FunctionType":2,"Arguments":[{"Id":8},{"Id":9}]},"Column":{"Id":10}}},{"Assign":{"Constant":{"Uint8":0},"Column":{"Id":11}}},{"Assign":{"Function":{"YqlOperationId":17,"KernelIdx":2,"FunctionType":2,"Arguments":[{"Id":10},{"Id":11}]},"Column":{"Id":12}}},{"Filter":{"Predicate":{"Id":12}}},{"Projection":{"Columns":[{"Id":1},{"Id":6},{"Id":5}]}}]},"E-Cost":"No estimate","ReadRangesExpectedSize":1}],"Node Type":"TableRangeScan"}],"Operators":[{"E-Rows":"No estimate","Predicate":"KqpOlapJsonValue == 16","Pushdown":"True","Name":"Filter","E-Size":"No estimate","E-Cost":"No estimate"}],"Node Type":"Filter"}],"Node Type":"ResultSet_1","PlanNodeType":"ResultSet"}],"Node Type":"Query","OptimizerStats":{"EquiJoinsCount":0,"JoinsCount":0},"PlanNodeType":"Query"}} AST: ( (declare %kqp%tx_result_binding_0_0 (TupleType (ListType (TupleType (TupleType (OptionalType (DataType 'Int32)) (DataType 'Int32)) (TupleType (OptionalType (DataType 'Int32)) (DataType 'Int32)))))) (let $1 '('('"_logical_id" '1356) '('"_id" '"d91a6c4f-d9c41c62-96fa8013-84535fd1") '('"_partition_mode" '"single"))) (let $2 (DqPhyStage '() (lambda '() (block '( (let $17 (Int32 '1)) (let $18 (Just $17)) (let $19 '($18 $17)) (let $20 (If (== $17 (Int32 '2147483647)) $19 '((+ $18 $17) (Int32 '0)))) (return (ToStream (Just '((RangeFinalize (RangeMultiply (Uint64 '10000) (RangeUnion (RangeCreate (AsList '($19 $20)))))))))) ))) $1)) (let $3 (DqCnValue (TDqOutput $2 '0))) (let $4 (KqpPhysicalTx '($2) '($3) '() '('('"type" '"compute")))) (let $5 '"%kqp%tx_result_binding_0_0") (let $6 (DataType 'Int32)) (let $7 (OptionalType $6)) (let $8 (TupleType $7 $6)) (let $9 (TupleType (ListType (TupleType $8 $8)))) (let $10 (DqPhyStage '() (lambda '() (block '( (let $21 (KqpTable '"/Root/tableWithNulls" '"72057594046644480:2" '"" '1)) (let $22 '('"id" '"jsondoc" '"jsonval")) (let $23 '('('"UsedKeyColumns" '('"id")) '('"ExpectedMaxRanges" '1) '('"PointPrefixLen" '1))) (let $24 (Utf8 '"$.obj.obj_col2_int")) (let $25 (KqpWideReadOlapTableRanges $21 %kqp%tx_result_binding_0_0 $22 '() $23 (lambda '($26) (block '( (let $27 (KqpOlapJsonValue '"jsonval" $24 $6)) (let $28 '('eq $27 (Int32 '"16"))) (let $29 '('?? $28 (Bool 'false))) (return (KqpOlapFilter $26 $29)) ))))) (return (FromFlow (NarrowMap $25 (lambda '($30 $31 $32) (block '( (let $33 '((VariantType (TupleType (TupleType (DataType 'Uint8) (DataType 'String)) (OptionalType (DataType 'Double)))))) (let $34 (ResourceType '"JsonNode")) (let $35 (OptionalType $34)) (let $36 '((ResourceType '"JsonPath"))) (let $37 (DataType 'Utf8)) (let $38 (DictType $37 $34)) (let $39 '($38)) (let $40 (CallableType '() $33 '($35) $36 $39)) (let $41 '('('"strict"))) (let $42 (Udf '"Json2.SqlValueNumber" (Void) (VoidType) '"" $40 (VoidType) '"" $41)) (let $43 (IfPresent $32 (lambda '($57) (block '( (let $58 '((DataType 'Json) '"" '1)) (let $59 (CallableType '() '($34) $58)) (let $60 (Udf '"Json2.Parse" (Void) (VoidType) '"" $59 (VoidType) '"" '())) (return (Just (Apply $60 $57))) ))) (Nothing $35))) (let $44 (CallableType '() $36 '($37))) (let $45 (Udf '"Json2.CompilePath" (Void) (VoidType) '"" $44 (VoidType) '"" '())) (let $46 (Apply $45 $24)) (let $47 (Dict $38)) (let $48 (Apply $42 $43 $46 $47)) (let $49 (Nothing $7)) (let $50 (lambda '($61) $49)) (let $51 (lambda '($62) (If (Exists $62) (SafeCast $62 $7) $49))) (let $52 (Visit $48 '0 $50 '1 $51)) (let $53 (CallableType '() $33 '((OptionalType (DataType 'JsonDocument))) $36 $39)) (let $54 (Udf '"Json2.JsonDocumentSqlValueNumber" (Void) (VoidType) '"" $53 (VoidType) '"" $41)) (let $55 (Apply $54 $31 $46 $47)) (let $56 (Visit $55 '0 $50 '1 $51)) (return (AsStruct '('"column1" $52) '('"column2" $56) '('"id" $30))) )))))) ))) '('('"_logical_id" '1427) '('"_id" '"69bfb08c-2659b2a5-a115ed85-e5f03ff4")))) (let $11 (DqCnUnionAll (TDqOutput $10 '0))) (let $12 (DqPhyStage '($11) (lambda '($63) $63) '('('"_logical_id" '1866) '('"_id" '"71c9c511-174ddf35-b6793988-8c2d8e0f")))) (let $13 '('"id" '"column1" '"column2")) (let $14 (DqCnResult (TDqOutput $12 '0) $13)) (let $15 (KqpTxResultBinding $9 '0 '0)) (let $16 (KqpPhysicalTx '($10 $12) '($14) '('($5 $15)) '('('"type" '"scan")))) (return (KqpPhysicalQuery '($4 $16) '((KqpTxResultBinding (ListType (StructType '('"column1" $7) '('"column2" $7) '('"id" $6))) '1 '0)) '('('"type" '"scan_query")))) ) ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/olap/unittest >> KqpOlapAggregations::Aggregation_Some_Null [GOOD] Test command err: Trying to start YDB, gRPC: 28785, MsgBus: 31451 2025-05-29T15:21:33.522817Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509888156005365651:2196];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:21:33.522929Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/0026f5/r3tmp/tmpKlWGqd/pdisk_1.dat 2025-05-29T15:21:33.687065Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:21:33.687897Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [1:7509888156005365494:2079] 1748532093518633 != 1748532093518636 2025-05-29T15:21:33.711763Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:21:33.711789Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:21:33.719093Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 28785, node 1 2025-05-29T15:21:33.738693Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:21:33.738708Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-05-29T15:21:33.738710Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-05-29T15:21:33.738767Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:31451 TClient is connected to server localhost:31451 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-29T15:21:33.855673Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:21:33.863284Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-05-29T15:21:33.867714Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 Status: 53 TxId: 281474976715658 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 2 2025-05-29T15:21:33.893696Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037889;self_id=[1:7509888156005366201:2314];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-05-29T15:21:33.912542Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037889;self_id=[1:7509888156005366201:2314];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-05-29T15:21:33.912652Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037889;self_id=[1:7509888156005366201:2314];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-05-29T15:21:33.912674Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037889;self_id=[1:7509888156005366201:2314];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-05-29T15:21:33.912695Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037889;self_id=[1:7509888156005366201:2314];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-05-29T15:21:33.912714Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037889;self_id=[1:7509888156005366201:2314];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-05-29T15:21:33.912750Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037889;self_id=[1:7509888156005366201:2314];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-05-29T15:21:33.912769Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037889;self_id=[1:7509888156005366201:2314];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-05-29T15:21:33.912789Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037889;self_id=[1:7509888156005366201:2314];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-05-29T15:21:33.912808Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037889;self_id=[1:7509888156005366201:2314];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-05-29T15:21:33.912832Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037889;self_id=[1:7509888156005366201:2314];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2025-05-29T15:21:33.912860Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037889;self_id=[1:7509888156005366201:2314];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2025-05-29T15:21:33.917623Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037891;self_id=[1:7509888156005366203:2315];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-05-29T15:21:33.917652Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037891;self_id=[1:7509888156005366203:2315];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-05-29T15:21:33.917708Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037891;self_id=[1:7509888156005366203:2315];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-05-29T15:21:33.917736Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037891;self_id=[1:7509888156005366203:2315];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-05-29T15:21:33.917757Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037891;self_id=[1:7509888156005366203:2315];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-05-29T15:21:33.917778Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037891;self_id=[1:7509888156005366203:2315];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-05-29T15:21:33.917797Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037891;self_id=[1:7509888156005366203:2315];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-05-29T15:21:33.917816Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037891;self_id=[1:7509888156005366203:2315];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-05-29T15:21:33.917844Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037891;self_id=[1:7509888156005366203:2315];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-05-29T15:21:33.917866Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037891;self_id=[1:7509888156005366203:2315];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-05-29T15:21:33.917886Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037891;self_id=[1:7509888156005366203:2315];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2025-05-29T15:21:33.917906Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037891;self_id=[1:7509888156005366203:2315];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2025-05-29T15:21:33.939449Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037890;self_id=[1:7509888156005366204:2316];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-05-29T15:21:33.939482Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037890;self_id=[1:7509888156005366204:2316];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-05-29T15:21:33.939570Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037890;self_id=[1:7509888156005366204:2316];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-05-29T15:21:33.939591Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037890;self_id=[1:7509888156005366204:2316];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-05-29T15:21:33.939617Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037890;self_id=[1:7509888156005366204:2316];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fl ... 33.958338Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-05-29T15:21:33.958360Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-05-29T15:21:33.958365Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-05-29T15:21:33.958385Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-05-29T15:21:33.958390Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-05-29T15:21:33.958402Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-05-29T15:21:33.958406Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanDeprecatedSnapshot;id=CleanDeprecatedSnapshot; 2025-05-29T15:21:33.958411Z node 1 :TX_COLUMNSHARD CRIT: log.cpp:784: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=clean_deprecated_snapshot.cpp:30;tasks_for_rewrite=0; 2025-05-29T15:21:33.958416Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanDeprecatedSnapshot;id=17; 2025-05-29T15:21:33.958420Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV0ChunksMeta;id=RestoreV0ChunksMeta; 2025-05-29T15:21:33.958502Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV0ChunksMeta;id=18; 2025-05-29T15:21:33.958506Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-05-29T15:21:33.974204Z node 1 :TX_COLUMNSHARD_TX WARN: log.cpp:784: tablet_id=72075186224037888;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715658;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715658; 2025-05-29T15:21:33.975095Z node 1 :TX_COLUMNSHARD_TX WARN: log.cpp:784: tablet_id=72075186224037890;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715658;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715658; 2025-05-29T15:21:33.975975Z node 1 :TX_COLUMNSHARD_TX WARN: log.cpp:784: tablet_id=72075186224037889;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715658;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715658; 2025-05-29T15:21:33.976793Z node 1 :TX_COLUMNSHARD_TX WARN: log.cpp:784: tablet_id=72075186224037891;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715658;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715658; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=1448;columns=6; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=1448;columns=6; REQUEST: --!syntax_v1 PRAGMA Kikimr.OptUseFinalizeByKey; SELECT SOME(level) FROM `/Root/tableWithNulls` WHERE id > 5 2025-05-29T15:21:34.307212Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509888160300333780:2415], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:21:34.307240Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:21:34.307582Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509888160300333792:2418], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:21:34.308547Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480 2025-05-29T15:21:34.311628Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976715659, at schemeshard: 72057594046644480 2025-05-29T15:21:34.311694Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7509888160300333794:2419], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2025-05-29T15:21:34.391526Z node 1 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [1:7509888160300333845:2486] txid# 281474976715660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-29T15:21:34.672246Z node 1 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:238: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1748532094365, txId: 18446744073709551615] shutting down REQUEST: --!syntax_v1 PRAGMA Kikimr.OptUseFinalizeByKey; SELECT SOME(level) FROM `/Root/tableWithNulls` WHERE id > 5 JSON Plan: {"Plan":{"Plans":[{"PlanNodeId":7,"Plans":[{"PlanNodeId":6,"Operators":[{"Inputs":[],"Iterator":"precompute_1_0","Name":"Iterator"}],"Node Type":"ConstantExpr","CTE Name":"precompute_1_0"}],"Node Type":"ResultSet_2","PlanNodeType":"ResultSet"},{"PlanNodeId":4,"Subplan Name":"CTE precompute_1_0","Plans":[{"PlanNodeId":3,"Plans":[{"PlanNodeId":2,"Plans":[{"Tables":["tableWithNulls"],"PlanNodeId":1,"Operators":[{"Scan":"Parallel","E-Size":"No estimate","ReadRanges":["id (5, +∞)"],"Name":"TableRangeScan","Inputs":[],"Path":"\/Root\/tableWithNulls","E-Rows":"No estimate","Table":"tableWithNulls","ReadRangesKeys":["id"],"ReadColumns":["level"],"SsaProgram":{"Command":[{"GroupBy":{"Aggregates":[{"Function":{"Id":1,"Arguments":[{"Id":3}]},"Column":{"Id":7}}]}},{"Projection":{"Columns":[{"Id":7}]}}]},"E-Cost":"No estimate","ReadRangesExpectedSize":1}],"Node Type":"TableRangeScan"}],"Node Type":"UnionAll","PlanNodeType":"Connection"}],"Operators":[{"Inputs":[{"InternalOperatorId":1}],"Name":"Aggregate","Phase":"Final"},{"Inputs":[{"InternalOperatorId":2}],"Name":"Limit","Limit":"1"},{"Inputs":[{"ExternalPlanNodeId":2}],"Name":"Aggregate","Phase":"Final"}],"Node Type":"Aggregate-Limit-Aggregate"}],"Node Type":"Precompute_1","Parent Relationship":"InitPlan","PlanNodeType":"Materialize"}],"Node Type":"Query","Stats":{"ResourcePoolId":"default"},"PlanNodeType":"Query"},"meta":{"version":"0.2","type":"query"},"tables":[{"name":"\/Root\/tableWithNulls","reads":[{"columns":["level"],"scan_by":["id (5, +∞)"],"type":"Scan"}]}],"SimplifiedPlan":{"PlanNodeId":0,"Plans":[{"PlanNodeId":1,"Plans":[{"PlanNodeId":4,"Plans":[{"PlanNodeId":5,"Plans":[{"PlanNodeId":6,"Plans":[{"PlanNodeId":8,"Operators":[{"Scan":"Parallel","E-Size":"No estimate","ReadRanges":["id (5, +∞)"],"Name":"TableRangeScan","Path":"\/Root\/tableWithNulls","E-Rows":"No estimate","Table":"tableWithNulls","ReadRangesKeys":["id"],"ReadColumns":["level"],"SsaProgram":{"Command":[{"GroupBy":{"Aggregates":[{"Function":{"Id":1,"Arguments":[{"Id":3}]},"Column":{"Id":7}}]}},{"Projection":{"Columns":[{"Id":7}]}}]},"E-Cost":"No estimate","ReadRangesExpectedSize":1}],"Node Type":"TableRangeScan"}],"Operators":[{"Name":"Aggregate","Phase":"Final"}],"Node Type":"Aggregate"}],"Operators":[{"Name":"Limit","Limit":"1"}],"Node Type":"Limit"}],"Operators":[{"Name":"Aggregate","Phase":"Final"}],"Node Type":"Aggregate"}],"Node Type":"ResultSet_2","PlanNodeType":"ResultSet"}],"Node Type":"Query","OptimizerStats":{"EquiJoinsCount":0,"JoinsCount":0},"PlanNodeType":"Query"}} AST: ( (declare %kqp%tx_result_binding_0_0 (TupleType (ListType (TupleType (TupleType (OptionalType (DataType 'Int32)) (DataType 'Int32)) (TupleType (OptionalType (DataType 'Int32)) (DataType 'Int32)))))) (declare %kqp%tx_result_binding_1_0 (ListType (StructType '('"column0" (OptionalType (DataType 'Int32)))))) (let $1 (DataType 'Int32)) (let $2 (OptionalType $1)) (let $3 '('"_partition_mode" '"single")) (let $4 '('('"_logical_id" '743) '('"_id" '"8fc2357e-a859141e-5ea533da-aead6b3c") $3)) (let $5 (DqPhyStage '() (lambda '() (block '( (let $27 (Int32 '0)) (return (ToStream (Just '((RangeFinalize (RangeMultiply (Uint64 '10000) (RangeUnion (RangeCreate (AsList '('((Just (Int32 '"5")) $27) '((Nothing $2) $27))))))))))) ))) $4)) (let $6 (DqCnValue (TDqOutput $5 '0))) (let $7 (KqpPhysicalTx '($5) '($6) '() '('('"type" '"compute")))) (let $8 '"%kqp%tx_result_binding_0_0") (let $9 (TupleType $2 $1)) (let $10 (TupleType (ListType (TupleType $9 $9)))) (let $11 '('('"_logical_id" '801) '('"_id" '"84a2ca96-843efaed-60cee116-b54ecbbd") '('"_wide_channels" (StructType '('_yql_agg_0 $2))))) (let $12 (DqPhyStage '() (lambda '() (block '( (let $28 (KqpTable '"/Root/tableWithNulls" '"72057594046644480:2" '"" '1)) (let $29 '('('"UsedKeyColumns" '('"id")) '('"ExpectedMaxRanges" '1) '('"PointPrefixLen" '0))) (let $30 (KqpWideReadOlapTableRanges $28 %kqp%tx_result_binding_0_0 '('"level") '() $29 (lambda '($31) (TKqpOlapAgg $31 '('('_yql_agg_0 'some '"level")) '())))) (return (FromFlow $30)) ))) $11)) (let $13 (DqCnUnionAll (TDqOutput $12 '0))) (let $14 (DqPhyStage '($13) (lambda '($32) (block '( (let $33 (Bool 'false)) (let $34 (WideCondense1 (ToFlow $32) (lambda '($36) $36) (lambda '($37 $38) $33) (lambda '($39 $40) (Coalesce $40 $39)))) (let $35 (Condense (NarrowMap (Take $34 (Uint64 '1)) (lambda '($41) (AsStruct '('Some0 $41)))) (Nothing (OptionalType (StructType '('Some0 $2)))) (lambda '($42 $43) $33) (lambda '($44 $45) (Just $44)))) (return (FromFlow (Map $35 (lambda '($46) (AsList (AsStruct '('"column0" (Member $46 'Some0)))))))) ))) '('('"_logical_id" '1335) '('"_id" '"fd39fdb9-ff7bfa71-2b5ddb4e-e31a401a")))) (let $15 (DqCnValue (TDqOutput $14 '0))) (let $16 (KqpTxResultBinding $10 '0 '0)) (let $17 '('('"type" '"scan"))) (let $18 (KqpPhysicalTx '($12 $14) '($15) '('($8 $16)) $17)) (let $19 '"%kqp%tx_result_binding_1_0") (let $20 (ListType (StructType '('"column0" $2)))) (let $21 '('('"_logical_id" '1431) '('"_id" '"baa2abab-628a1ba0-47fd205f-7538a3c4") $3)) (let $22 (DqPhyStage '() (lambda '() (Iterator %kqp%tx_result_binding_1_0)) $21)) (let $23 (DqCnResult (TDqOutput $22 '0) '('"column0"))) (let $24 (KqpTxResultBinding $20 '1 '0)) (let $25 (KqpPhysicalTx '($22) '($23) '('($19 $24)) $17)) (let $26 '($7 $18 $25)) (return (KqpPhysicalQuery $26 '((KqpTxResultBinding $20 '"2" '0)) '('('"type" '"scan_query")))) ) ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/olap/unittest >> KqpOlapAggregations::BlockGenericWithDistinct [GOOD] Test command err: Trying to start YDB, gRPC: 27008, MsgBus: 15144 2025-05-29T15:21:33.766754Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509888156500007875:2218];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/0026b2/r3tmp/tmpRKi1XQ/pdisk_1.dat 2025-05-29T15:21:33.827231Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-05-29T15:21:33.868414Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [1:7509888156500007671:2079] 1748532093761995 != 1748532093761998 2025-05-29T15:21:33.877948Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 27008, node 1 2025-05-29T15:21:33.904300Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:21:33.904313Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-05-29T15:21:33.904315Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-05-29T15:21:33.904349Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-05-29T15:21:33.929814Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:21:33.929845Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:21:33.932502Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:15144 TClient is connected to server localhost:15144 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-29T15:21:34.074258Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:21:34.079182Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-05-29T15:21:34.091272Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 Status: 53 TxId: 281474976715658 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 2 2025-05-29T15:21:34.112909Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037891;self_id=[1:7509888160794975658:2315];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-05-29T15:21:34.118829Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037891;self_id=[1:7509888160794975658:2315];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-05-29T15:21:34.118880Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037891;self_id=[1:7509888160794975658:2315];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-05-29T15:21:34.118906Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037891;self_id=[1:7509888160794975658:2315];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-05-29T15:21:34.118929Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037891;self_id=[1:7509888160794975658:2315];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-05-29T15:21:34.118958Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037891;self_id=[1:7509888160794975658:2315];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-05-29T15:21:34.118981Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037891;self_id=[1:7509888160794975658:2315];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-05-29T15:21:34.119000Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037891;self_id=[1:7509888160794975658:2315];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-05-29T15:21:34.119027Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037891;self_id=[1:7509888160794975658:2315];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-05-29T15:21:34.119050Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037891;self_id=[1:7509888160794975658:2315];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-05-29T15:21:34.119079Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037891;self_id=[1:7509888160794975658:2315];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2025-05-29T15:21:34.119101Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037891;self_id=[1:7509888160794975658:2315];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2025-05-29T15:21:34.127944Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037889;self_id=[1:7509888160794975665:2316];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-05-29T15:21:34.127976Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037889;self_id=[1:7509888160794975665:2316];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-05-29T15:21:34.128046Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037889;self_id=[1:7509888160794975665:2316];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-05-29T15:21:34.128073Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037889;self_id=[1:7509888160794975665:2316];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-05-29T15:21:34.128099Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037889;self_id=[1:7509888160794975665:2316];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-05-29T15:21:34.128121Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037889;self_id=[1:7509888160794975665:2316];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-05-29T15:21:34.128150Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037889;self_id=[1:7509888160794975665:2316];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-05-29T15:21:34.128172Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037889;self_id=[1:7509888160794975665:2316];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-05-29T15:21:34.128194Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037889;self_id=[1:7509888160794975665:2316];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-05-29T15:21:34.128227Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037889;self_id=[1:7509888160794975665:2316];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-05-29T15:21:34.128248Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037889;self_id=[1:7509888160794975665:2316];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2025-05-29T15:21:34.128268Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037889;self_id=[1:7509888160794975665:2316];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2025-05-29T15:21:34.134121Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;self_id=[1:7509888160794975657:2314];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-05-29T15:21:34.139762Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;self_id=[1:7509888160794975657:2314];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-05-29T15:21:34.139877Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;self_id=[1:7509888160794975657:2314];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-05-29T15:21:34.139917Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;self_id=[1:7509888160794975657:2314];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-05-29T15:21:34.139946Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;self_id=[1:7509888160794975657:2314];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fl ... ewrite=0; 2025-05-29T15:21:34.147659Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanDeprecatedSnapshot;id=17; 2025-05-29T15:21:34.147668Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV0ChunksMeta;id=RestoreV0ChunksMeta; 2025-05-29T15:21:34.147717Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV0ChunksMeta;id=18; 2025-05-29T15:21:34.147720Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-05-29T15:21:34.162903Z node 1 :TX_COLUMNSHARD_TX WARN: log.cpp:784: tablet_id=72075186224037890;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715658;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715658; 2025-05-29T15:21:34.163917Z node 1 :TX_COLUMNSHARD_TX WARN: log.cpp:784: tablet_id=72075186224037888;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715658;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715658; 2025-05-29T15:21:34.164836Z node 1 :TX_COLUMNSHARD_TX WARN: log.cpp:784: tablet_id=72075186224037889;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715658;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715658; 2025-05-29T15:21:34.175429Z node 1 :TX_COLUMNSHARD_TX WARN: log.cpp:784: tablet_id=72075186224037891;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715658;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715658; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=1448;columns=6; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=1448;columns=6; REQUEST: --!syntax_v1 PRAGMA Kikimr.OptUseFinalizeByKey; SELECT COUNT(DISTINCT id) FROM `/Root/tableWithNulls` WHERE level = 5 AND Cast(id AS String) = "5"; 2025-05-29T15:21:34.364955Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509888160794975937:2415], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:21:34.364994Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:21:34.365119Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509888160794975957:2418], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:21:34.365968Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480 2025-05-29T15:21:34.368854Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976715659, at schemeshard: 72057594046644480 2025-05-29T15:21:34.368911Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7509888160794975959:2419], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2025-05-29T15:21:34.468200Z node 1 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [1:7509888160794976010:2482] txid# 281474976715660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } REQUEST: --!syntax_v1 PRAGMA Kikimr.OptUseFinalizeByKey; SELECT COUNT(DISTINCT id) FROM `/Root/tableWithNulls` WHERE level = 5 AND Cast(id AS String) = "5"; JSON Plan: {"Plan":{"Plans":[{"PlanNodeId":6,"Plans":[{"PlanNodeId":5,"Plans":[{"PlanNodeId":4,"Plans":[{"PlanNodeId":3,"Plans":[{"PlanNodeId":2,"Plans":[{"Tables":["tableWithNulls"],"PlanNodeId":1,"Operators":[{"Inputs":[{"InternalOperatorId":1}],"GroupBy":"item.id","Aggregation":"state","Name":"Aggregate","Phase":"Intermediate"},{"Inputs":[{"InternalOperatorId":2}],"E-Rows":"No estimate","Predicate":"level == 5 AND id == \"5\"","Pushdown":"True","Name":"Filter","E-Size":"No estimate","E-Cost":"No estimate"},{"Scan":"Parallel","E-Size":"No estimate","ReadRanges":["id (-∞, +∞)"],"Name":"TableFullScan","Inputs":[],"Path":"\/Root\/tableWithNulls","E-Rows":"No estimate","Table":"tableWithNulls","ReadColumns":["id","level"],"SsaProgram":{"Command":[{"Assign":{"Constant":{"Int32":5},"Column":{"Id":7}}},{"Assign":{"Function":{"YqlOperationId":11,"KernelIdx":0,"FunctionType":2,"Arguments":[{"Id":3},{"Id":7}]},"Column":{"Id":8}}},{"Assign":{"Constant":{"Uint8":0},"Column":{"Id":9}}},{"Assign":{"Function":{"YqlOperationId":17,"KernelIdx":1,"FunctionType":2,"Arguments":[{"Id":8},{"Id":9}]},"Column":{"Id":10}}},{"Assign":{"Function":{"KernelName":"","KernelIdx":2,"FunctionType":2,"Arguments":[{"Id":1}]},"Column":{"Id":11}}},{"Assign":{"Function":{"YqlOperationId":0,"KernelIdx":3,"FunctionType":2,"Arguments":[{"Id":10},{"Id":11}]},"Column":{"Id":12}}},{"Filter":{"Predicate":{"Id":12}}},{"Projection":{"Columns":[{"Id":1}]}}]},"E-Cost":"No estimate"}],"Node Type":"Aggregate-Filter-TableFullScan"}],"Node Type":"HashShuffle","KeyColumns":["id"],"PlanNodeType":"Connection"}],"Operators":[{"Inputs":[{"InternalOperatorId":1}],"Name":"Aggregate","Phase":"Intermediate"},{"Inputs":[{"ExternalPlanNodeId":2}],"Name":"Aggregate","Phase":"Final"}],"Node Type":"Aggregate-Aggregate"}],"Node Type":"UnionAll","PlanNodeType":"Connection"}],"Operators":[{"Inputs":[{"InternalOperatorId":1}],"Name":"Aggregate","Phase":"Final"},{"Inputs":[{"InternalOperatorId":2}],"Name":"Limit","Limit":"1"},{"Inputs":[{"ExternalPlanNodeId":4}],"Name":"Aggregate","Phase":"Final"}],"Node Type":"Aggregate-Limit-Aggregate"}],"Node Type":"ResultSet","PlanNodeType":"ResultSet"}],"Node Type":"Query","Stats":{"ResourcePoolId":"default"},"PlanNodeType":"Query"},"meta":{"version":"0.2","type":"query"},"tables":[{"name":"\/Root\/tableWithNulls","reads":[{"columns":["id","level"],"scan_by":["id (-∞, +∞)"],"type":"FullScan"}]}],"SimplifiedPlan":{"PlanNodeId":0,"Plans":[{"PlanNodeId":1,"Plans":[{"PlanNodeId":2,"Plans":[{"PlanNodeId":3,"Plans":[{"PlanNodeId":4,"Plans":[{"PlanNodeId":6,"Plans":[{"PlanNodeId":7,"Plans":[{"PlanNodeId":8,"Plans":[{"PlanNodeId":9,"Plans":[{"PlanNodeId":10,"Plans":[{"PlanNodeId":11,"Operators":[{"Scan":"Parallel","E-Size":"No estimate","ReadRanges":["id (-∞, +∞)"],"Name":"TableFullScan","Path":"\/Root\/tableWithNulls","E-Rows":"No estimate","Table":"tableWithNulls","ReadColumns":["id","level"],"SsaProgram":{"Command":[{"Assign":{"Constant":{"Int32":5},"Column":{"Id":7}}},{"Assign":{"Function":{"YqlOperationId":11,"KernelIdx":0,"FunctionType":2,"Arguments":[{"Id":3},{"Id":7}]},"Column":{"Id":8}}},{"Assign":{"Constant":{"Uint8":0},"Column":{"Id":9}}},{"Assign":{"Function":{"YqlOperationId":17,"KernelIdx":1,"FunctionType":2,"Arguments":[{"Id":8},{"Id":9}]},"Column":{"Id":10}}},{"Assign":{"Function":{"KernelName":"","KernelIdx":2,"FunctionType":2,"Arguments":[{"Id":1}]},"Column":{"Id":11}}},{"Assign":{"Function":{"YqlOperationId":0,"KernelIdx":3,"FunctionType":2,"Arguments":[{"Id":10},{"Id":11}]},"Column":{"Id":12}}},{"Filter":{"Predicate":{"Id":12}}},{"Projection":{"Columns":[{"Id":1}]}}]},"E-Cost":"No estimate"}],"Node Type":"TableFullScan"}],"Operators":[{"E-Rows":"No estimate","Predicate":"level == 5 AND id == \"5\"","Pushdown":"True","Name":"Filter","E-Size":"No estimate","E-Cost":"No estimate"}],"Node Type":"Filter"}],"Operators":[{"GroupBy":"item.id","Aggregation":"state","Name":"Aggregate","Phase":"Intermediate"}],"Node Type":"Aggregate"}],"Node Type":"HashShuffle (KeyColumns: [\"id\"])","PlanNodeType":"Connection"}],"Operators":[{"Name":"Aggregate","Phase":"Final"}],"Node Type":"Aggregate"}],"Operators":[{"Name":"Aggregate","Phase":"Intermediate"}],"Node Type":"Aggregate"}],"Operators":[{"Name":"Aggregate","Phase":"Final"}],"Node Type":"Aggregate"}],"Operators":[{"Name":"Limit","Limit":"1"}],"Node Type":"Limit"}],"Operators":[{"Name":"Aggregate","Phase":"Final"}],"Node Type":"Aggregate"}],"Node Type":"ResultSet","PlanNodeType":"ResultSet"}],"Node Type":"Query","OptimizerStats":{"EquiJoinsCount":0,"JoinsCount":0},"PlanNodeType":"Query"}} AST: ( (let $1 (Bool 'false)) (let $2 (DataType 'Int32)) (let $3 '('"id" $2)) (let $4 '('('"_logical_id" '1112) '('"_id" '"96c064e9-1e502626-a41cb503-3489a553") '('"_wide_channels" (StructType $3)))) (let $5 (DqPhyStage '() (lambda '() (block '( (let $15 (KqpTable '"/Root/tableWithNulls" '"72057594046644480:2" '"" '1)) (let $16 (KqpBlockReadOlapTableRanges $15 (Void) '('"id" '"level") '() '() (lambda '($18) (block '( (let $19 '('eq '"level" (Int32 '"5"))) (let $20 '('?? $19 $1)) (let $21 (KqpOlapApply (lambda '($22) (== (SafeCast $22 (DataType 'String)) (String '"5"))) '((KqpOlapApplyColumnArg (StructType $3 '('"level" (OptionalType $2))) '"id")) '"")) (return (TKqpOlapExtractMembers (KqpOlapFilter $18 (KqpOlapAnd $20 $21)) '('"id"))) ))))) (let $17 (lambda '($23 $24) (BlockAsStruct '('"id" $23)) $24)) (return (FromFlow (WideCombiner (ToFlow (WideFromBlocks (FromFlow (WideMap $16 $17)))) '-1073741824 (lambda '($25) (Member $25 '"id")) (lambda '($26 $27) $26) (lambda '($28 $29 $30) $30) (lambda '($31 $32) $32)))) ))) $4)) (let $6 (DqCnHashShuffle (TDqOutput $5 '0) '('0))) (let $7 (Uint64 '1)) (let $8 (DataType 'Uint64)) (let $9 '('('"_logical_id" '1817) '('"_id" '"9791057a-4fc7a82c-5849a7f3-4214182e") '('"_wide_channels" (StructType '('_yql_agg_0 (OptionalType $8)))))) (let $10 (DqPhyStage '($6) (lambda '($33) (block '( (let $34 (lambda '($38) $38)) (let $35 (lambda '($39 $40))) (let $36 (WideCombiner (ToFlow $33) '"" $34 $35 $35 $34)) (let $37 (Condense1 (NarrowMap $36 (lambda '($41) (AsStruct '('"id" $41)))) (lambda '($42) $7) (lambda '($43 $44) $1) (lambda '($45 $46) (Inc $46)))) (return (FromFlow (ExpandMap $37 (lambda '($47) (Just $47))))) ))) $9)) (let $11 (DqCnUnionAll (TDqOutput $10 '0))) (let $12 (DqPhyStage '($11) (lambda '($48) (block '( (let $49 (WideCondense1 (ToFlow $48) (lambda '($51) $51) (lambda '($52 $53) $1) (lambda '($54 $55) (IfPresent $54 (lambda '($56) (IfPresent $55 (lambda '($57) (Just (AggrAdd $56 $57))) $54)) $55)))) (let $50 (Condense (NarrowMap (Take $49 $7) (lambda '($58) (AsStruct '('Count0 (Unwrap $58))))) (Nothing (OptionalType (StructType '('Count0 $8)))) (lambda '($59 $60) $1) (lambda '($61 $62) (Just $61)))) (return (FromFlow (Map $50 (lambda '($63) (AsStruct '('"column0" (Coalesce (Member $63 'Count0) (Uint64 '0)))))))) ))) '('('"_logical_id" '2552) '('"_id" '"e6df288c-dcb35ceb-7a61fb3a-741ec58a")))) (let $13 '($5 $10 $12)) (let $14 (DqCnResult (TDqOutput $12 '0) '('"column0"))) (return (KqpPhysicalQuery '((KqpPhysicalTx $13 '($14) '() '('('"type" '"generic")))) '((KqpTxResultBinding (ListType (StructType '('"column0" $8))) '0 '0)) '('('"type" '"query")))) ) ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/olap/unittest >> KqpOlapAggregations::Filter_NotAllUsedFieldsInResultSet [GOOD] Test command err: Trying to start YDB, gRPC: 8682, MsgBus: 16966 2025-05-29T15:21:33.741751Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509888157074352324:2220];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/0026c5/r3tmp/tmp9pUpKe/pdisk_1.dat 2025-05-29T15:21:33.797363Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-05-29T15:21:33.850673Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:21:33.854155Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [1:7509888157074352115:2079] 1748532093723426 != 1748532093723429 TServer::EnableGrpc on GrpcPort 8682, node 1 2025-05-29T15:21:33.875402Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:21:33.875414Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-05-29T15:21:33.875416Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-05-29T15:21:33.875456Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-05-29T15:21:33.892965Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:21:33.892989Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:21:33.894214Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:16966 TClient is connected to server localhost:16966 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... 2025-05-29T15:21:34.044099Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-05-29T15:21:34.051038Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-05-29T15:21:34.056202Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 Status: 53 TxId: 281474976715658 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 2 2025-05-29T15:21:34.077168Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;self_id=[1:7509888161369320104:2314];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-05-29T15:21:34.079015Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;self_id=[1:7509888161369320104:2314];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-05-29T15:21:34.079097Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;self_id=[1:7509888161369320104:2314];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-05-29T15:21:34.079124Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;self_id=[1:7509888161369320104:2314];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-05-29T15:21:34.079146Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;self_id=[1:7509888161369320104:2314];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-05-29T15:21:34.079164Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;self_id=[1:7509888161369320104:2314];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-05-29T15:21:34.079183Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;self_id=[1:7509888161369320104:2314];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-05-29T15:21:34.079199Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;self_id=[1:7509888161369320104:2314];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-05-29T15:21:34.079227Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;self_id=[1:7509888161369320104:2314];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-05-29T15:21:34.079243Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;self_id=[1:7509888161369320104:2314];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-05-29T15:21:34.079260Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;self_id=[1:7509888161369320104:2314];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2025-05-29T15:21:34.079283Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;self_id=[1:7509888161369320104:2314];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2025-05-29T15:21:34.090194Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037889;self_id=[1:7509888161369320106:2315];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-05-29T15:21:34.090223Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037889;self_id=[1:7509888161369320106:2315];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-05-29T15:21:34.090286Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037889;self_id=[1:7509888161369320106:2315];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-05-29T15:21:34.090305Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037889;self_id=[1:7509888161369320106:2315];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-05-29T15:21:34.090325Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037889;self_id=[1:7509888161369320106:2315];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-05-29T15:21:34.090344Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037889;self_id=[1:7509888161369320106:2315];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-05-29T15:21:34.090362Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037889;self_id=[1:7509888161369320106:2315];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-05-29T15:21:34.090382Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037889;self_id=[1:7509888161369320106:2315];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-05-29T15:21:34.090402Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037889;self_id=[1:7509888161369320106:2315];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-05-29T15:21:34.090419Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037889;self_id=[1:7509888161369320106:2315];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-05-29T15:21:34.090438Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037889;self_id=[1:7509888161369320106:2315];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2025-05-29T15:21:34.090456Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037889;self_id=[1:7509888161369320106:2315];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2025-05-29T15:21:34.095461Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037890;self_id=[1:7509888161369320118:2316];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-05-29T15:21:34.095483Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037890;self_id=[1:7509888161369320118:2316];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-05-29T15:21:34.095540Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037890;self_id=[1:7509888161369320118:2316];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-05-29T15:21:34.095561Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037890;self_id=[1:7509888161369320118:2316];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-05-29T15:21:34.095587Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037890;self_id=[1:7509888161369320118:2316];tablet_id=72075186224037890;process=TTxInitSchema::Execute;flin ... 075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-05-29T15:21:34.108574Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-05-29T15:21:34.108585Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-05-29T15:21:34.108596Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-05-29T15:21:34.108607Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-05-29T15:21:34.108612Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-05-29T15:21:34.108618Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-05-29T15:21:34.108624Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-05-29T15:21:34.108655Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-05-29T15:21:34.108663Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-05-29T15:21:34.108684Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-05-29T15:21:34.108692Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-05-29T15:21:34.108705Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-05-29T15:21:34.108710Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanDeprecatedSnapshot;id=CleanDeprecatedSnapshot; 2025-05-29T15:21:34.108719Z node 1 :TX_COLUMNSHARD CRIT: log.cpp:784: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=clean_deprecated_snapshot.cpp:30;tasks_for_rewrite=0; 2025-05-29T15:21:34.108726Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanDeprecatedSnapshot;id=17; 2025-05-29T15:21:34.108732Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV0ChunksMeta;id=RestoreV0ChunksMeta; 2025-05-29T15:21:34.108802Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV0ChunksMeta;id=18; 2025-05-29T15:21:34.108806Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-05-29T15:21:34.131576Z node 1 :TX_COLUMNSHARD_TX WARN: log.cpp:784: tablet_id=72075186224037890;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715658;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715658; 2025-05-29T15:21:34.132476Z node 1 :TX_COLUMNSHARD_TX WARN: log.cpp:784: tablet_id=72075186224037891;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715658;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715658; 2025-05-29T15:21:34.133303Z node 1 :TX_COLUMNSHARD_TX WARN: log.cpp:784: tablet_id=72075186224037888;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715658;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715658; 2025-05-29T15:21:34.134107Z node 1 :TX_COLUMNSHARD_TX WARN: log.cpp:784: tablet_id=72075186224037889;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715658;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715658; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=1448;columns=6; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=1448;columns=6; REQUEST: --!syntax_v1 PRAGMA Kikimr.OptUseFinalizeByKey; SELECT id, resource_id FROM `/Root/tableWithNulls` WHERE level = 5; 2025-05-29T15:21:34.347474Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509888161369320367:2414], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:21:34.347532Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:21:34.347689Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509888161369320403:2418], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:21:34.348456Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480 2025-05-29T15:21:34.351478Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976715659, at schemeshard: 72057594046644480 2025-05-29T15:21:34.351575Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7509888161369320405:2419], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2025-05-29T15:21:34.447561Z node 1 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [1:7509888161369320456:2483] txid# 281474976715660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-29T15:21:34.743441Z node 1 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:238: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1748532094400, txId: 18446744073709551615] shutting down REQUEST: --!syntax_v1 PRAGMA Kikimr.OptUseFinalizeByKey; SELECT id, resource_id FROM `/Root/tableWithNulls` WHERE level = 5; JSON Plan: {"Plan":{"Plans":[{"PlanNodeId":4,"Plans":[{"PlanNodeId":3,"Plans":[{"PlanNodeId":2,"Plans":[{"Tables":["tableWithNulls"],"PlanNodeId":1,"Operators":[{"Inputs":[{"InternalOperatorId":1}],"E-Rows":"No estimate","Predicate":"level == 5","Pushdown":"True","Name":"Filter","E-Size":"No estimate","E-Cost":"No estimate"},{"Scan":"Parallel","E-Size":"No estimate","ReadRanges":["id (-∞, +∞)"],"Name":"TableFullScan","Inputs":[],"Path":"\/Root\/tableWithNulls","E-Rows":"No estimate","Table":"tableWithNulls","ReadColumns":["id","level","resource_id"],"SsaProgram":{"Command":[{"Assign":{"Constant":{"Int32":5},"Column":{"Id":7}}},{"Assign":{"Function":{"YqlOperationId":11,"KernelIdx":0,"FunctionType":2,"Arguments":[{"Id":3},{"Id":7}]},"Column":{"Id":8}}},{"Assign":{"Constant":{"Uint8":0},"Column":{"Id":9}}},{"Assign":{"Function":{"YqlOperationId":17,"KernelIdx":1,"FunctionType":2,"Arguments":[{"Id":8},{"Id":9}]},"Column":{"Id":10}}},{"Filter":{"Predicate":{"Id":10}}},{"Projection":{"Columns":[{"Id":1},{"Id":2}]}}]},"E-Cost":"No estimate"}],"Node Type":"Filter-TableFullScan"}],"Node Type":"UnionAll","PlanNodeType":"Connection"}],"Node Type":"Collect"}],"Node Type":"ResultSet","PlanNodeType":"ResultSet"}],"Node Type":"Query","Stats":{"ResourcePoolId":"default"},"PlanNodeType":"Query"},"meta":{"version":"0.2","type":"query"},"tables":[{"name":"\/Root\/tableWithNulls","reads":[{"columns":["id","level","resource_id"],"scan_by":["id (-∞, +∞)"],"type":"FullScan"}]}],"SimplifiedPlan":{"PlanNodeId":0,"Plans":[{"PlanNodeId":1,"Plans":[{"PlanNodeId":4,"Plans":[{"PlanNodeId":5,"Operators":[{"Scan":"Parallel","E-Size":"No estimate","ReadRanges":["id (-∞, +∞)"],"Name":"TableFullScan","Path":"\/Root\/tableWithNulls","E-Rows":"No estimate","Table":"tableWithNulls","ReadColumns":["id","level","resource_id"],"SsaProgram":{"Command":[{"Assign":{"Constant":{"Int32":5},"Column":{"Id":7}}},{"Assign":{"Function":{"YqlOperationId":11,"KernelIdx":0,"FunctionType":2,"Arguments":[{"Id":3},{"Id":7}]},"Column":{"Id":8}}},{"Assign":{"Constant":{"Uint8":0},"Column":{"Id":9}}},{"Assign":{"Function":{"YqlOperationId":17,"KernelIdx":1,"FunctionType":2,"Arguments":[{"Id":8},{"Id":9}]},"Column":{"Id":10}}},{"Filter":{"Predicate":{"Id":10}}},{"Projection":{"Columns":[{"Id":1},{"Id":2}]}}]},"E-Cost":"No estimate"}],"Node Type":"TableFullScan"}],"Operators":[{"E-Rows":"No estimate","Predicate":"level == 5","Pushdown":"True","Name":"Filter","E-Size":"No estimate","E-Cost":"No estimate"}],"Node Type":"Filter"}],"Node Type":"ResultSet","PlanNodeType":"ResultSet"}],"Node Type":"Query","OptimizerStats":{"EquiJoinsCount":0,"JoinsCount":0},"PlanNodeType":"Query"}} AST: ( (let $1 '('"id" '"resource_id")) (let $2 (DqPhyStage '() (lambda '() (block '( (let $6 (KqpTable '"/Root/tableWithNulls" '"72057594046644480:2" '"" '1)) (let $7 '('"id" '"level" '"resource_id")) (let $8 (KqpWideReadOlapTableRanges $6 (Void) $7 '() '() (lambda '($9) (block '( (let $10 '('eq '"level" (Int32 '"5"))) (let $11 '('?? $10 (Bool 'false))) (return (TKqpOlapExtractMembers (KqpOlapFilter $9 $11) $1)) ))))) (return (FromFlow (NarrowMap $8 (lambda '($12 $13) (AsStruct '('"id" $12) '('"resource_id" $13)))))) ))) '('('"_logical_id" '586) '('"_id" '"15b1510b-7fcc5f98-ee4ca8f5-66e66b50")))) (let $3 (DqCnUnionAll (TDqOutput $2 '"0"))) (let $4 (DqPhyStage '($3) (lambda '($14) $14) '('('"_logical_id" '754) '('"_id" '"6abc7fd5-ecbe0a51-b3e54cb4-aa3bc1c6")))) (let $5 (DqCnResult (TDqOutput $4 '"0") $1)) (return (KqpPhysicalQuery '((KqpPhysicalTx '($2 $4) '($5) '() '('('"type" '"scan")))) '((KqpTxResultBinding (ListType (StructType '('"id" (DataType 'Int32)) '('"resource_id" (OptionalType (DataType 'Utf8))))) '"0" '"0")) '('('"type" '"scan_query")))) ) ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/olap/unittest >> KqpOlapAggregations::JsonDoc_GetValue_ToInt [GOOD] Test command err: Trying to start YDB, gRPC: 9268, MsgBus: 22260 2025-05-29T15:21:33.717005Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509888159111229972:2060];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:21:33.717342Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/0026e9/r3tmp/tmpZPfvAo/pdisk_1.dat 2025-05-29T15:21:33.885555Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [1:7509888159111229953:2079] 1748532093716567 != 1748532093716570 2025-05-29T15:21:33.885654Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 9268, node 1 2025-05-29T15:21:33.901484Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:21:33.901503Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:21:33.905513Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-29T15:21:33.922923Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:21:33.922936Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-05-29T15:21:33.922938Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-05-29T15:21:33.922973Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:22260 TClient is connected to server localhost:22260 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-29T15:21:34.107613Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:21:34.116512Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 Status: 53 TxId: 281474976715658 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 2 2025-05-29T15:21:34.126108Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-05-29T15:21:34.141802Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037891;self_id=[1:7509888163406197945:2314];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-05-29T15:21:34.141865Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037891;self_id=[1:7509888163406197945:2314];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-05-29T15:21:34.141922Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037891;self_id=[1:7509888163406197945:2314];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-05-29T15:21:34.141942Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037891;self_id=[1:7509888163406197945:2314];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-05-29T15:21:34.141965Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037891;self_id=[1:7509888163406197945:2314];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-05-29T15:21:34.141987Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037891;self_id=[1:7509888163406197945:2314];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-05-29T15:21:34.142006Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037891;self_id=[1:7509888163406197945:2314];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-05-29T15:21:34.142026Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037891;self_id=[1:7509888163406197945:2314];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-05-29T15:21:34.142050Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037891;self_id=[1:7509888163406197945:2314];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-05-29T15:21:34.142070Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037891;self_id=[1:7509888163406197945:2314];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-05-29T15:21:34.142088Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037891;self_id=[1:7509888163406197945:2314];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2025-05-29T15:21:34.142107Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037891;self_id=[1:7509888163406197945:2314];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2025-05-29T15:21:34.146475Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;self_id=[1:7509888163406197946:2315];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-05-29T15:21:34.146489Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;self_id=[1:7509888163406197946:2315];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-05-29T15:21:34.146524Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;self_id=[1:7509888163406197946:2315];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-05-29T15:21:34.146544Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;self_id=[1:7509888163406197946:2315];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-05-29T15:21:34.146563Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;self_id=[1:7509888163406197946:2315];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-05-29T15:21:34.146585Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;self_id=[1:7509888163406197946:2315];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-05-29T15:21:34.146603Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;self_id=[1:7509888163406197946:2315];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-05-29T15:21:34.146622Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;self_id=[1:7509888163406197946:2315];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-05-29T15:21:34.146643Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;self_id=[1:7509888163406197946:2315];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-05-29T15:21:34.146661Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;self_id=[1:7509888163406197946:2315];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-05-29T15:21:34.146680Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;self_id=[1:7509888163406197946:2315];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2025-05-29T15:21:34.146698Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;self_id=[1:7509888163406197946:2315];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2025-05-29T15:21:34.156198Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037889;self_id=[1:7509888163406197951:2316];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-05-29T15:21:34.156226Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037889;self_id=[1:7509888163406197951:2316];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-05-29T15:21:34.156271Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037889;self_id=[1:7509888163406197951:2316];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-05-29T15:21:34.156295Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037889;self_id=[1:7509888163406197951:2316];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-05-29T15:21:34.156316Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037889;self_id=[1:7509888163406197951:2316];tablet_id=72075186224037889;process=TTxInitSchema::Execute;flin ... ecute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanDeprecatedSnapshot;id=17; 2025-05-29T15:21:34.163407Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV0ChunksMeta;id=RestoreV0ChunksMeta; 2025-05-29T15:21:34.163478Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV0ChunksMeta;id=18; 2025-05-29T15:21:34.163481Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-05-29T15:21:34.201934Z node 1 :TX_COLUMNSHARD_TX WARN: log.cpp:784: tablet_id=72075186224037889;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715658;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715658; 2025-05-29T15:21:34.202880Z node 1 :TX_COLUMNSHARD_TX WARN: log.cpp:784: tablet_id=72075186224037890;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715658;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715658; 2025-05-29T15:21:34.203750Z node 1 :TX_COLUMNSHARD_TX WARN: log.cpp:784: tablet_id=72075186224037891;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715658;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715658; 2025-05-29T15:21:34.204564Z node 1 :TX_COLUMNSHARD_TX WARN: log.cpp:784: tablet_id=72075186224037888;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715658;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715658; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=1448;columns=6; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=1448;columns=6; REQUEST: --!syntax_v1 PRAGMA Kikimr.OptUseFinalizeByKey; SELECT id, JSON_VALUE(jsonval, "$.obj.obj_col2_int"), JSON_VALUE(jsondoc, "$.obj.obj_col2_int" RETURNING Int) FROM `/Root/tableWithNulls` WHERE JSON_VALUE(jsondoc, "$.obj.obj_col2_int" RETURNING Int) = 16 AND id = 6; 2025-05-29T15:21:34.344484Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509888163406198206:2415], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:21:34.344505Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:21:34.344650Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509888163406198241:2418], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:21:34.345490Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480 2025-05-29T15:21:34.347564Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976715659, at schemeshard: 72057594046644480 2025-05-29T15:21:34.347637Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7509888163406198243:2419], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2025-05-29T15:21:34.443415Z node 1 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [1:7509888163406198294:2482] txid# 281474976715660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-29T15:21:34.720633Z node 1 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:238: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1748532094400, txId: 18446744073709551615] shutting down REQUEST: --!syntax_v1 PRAGMA Kikimr.OptUseFinalizeByKey; SELECT id, JSON_VALUE(jsonval, "$.obj.obj_col2_int"), JSON_VALUE(jsondoc, "$.obj.obj_col2_int" RETURNING Int) FROM `/Root/tableWithNulls` WHERE JSON_VALUE(jsondoc, "$.obj.obj_col2_int" RETURNING Int) = 16 AND id = 6; JSON Plan: {"Plan":{"Plans":[{"PlanNodeId":4,"Plans":[{"PlanNodeId":3,"Plans":[{"PlanNodeId":2,"Plans":[{"Tables":["tableWithNulls"],"PlanNodeId":1,"Operators":[{"Inputs":[{"InternalOperatorId":1}],"E-Rows":"No estimate","Predicate":"KqpOlapJsonValue == 16","Pushdown":"True","Name":"Filter","E-Size":"No estimate","E-Cost":"No estimate"},{"Scan":"Parallel","E-Size":"No estimate","ReadRanges":["id [6, 6]"],"Name":"TableRangeScan","Inputs":[],"Path":"\/Root\/tableWithNulls","E-Rows":"No estimate","Table":"tableWithNulls","ReadRangesKeys":["id"],"ReadColumns":["id","jsondoc","jsonval"],"SsaProgram":{"Command":[{"Assign":{"Constant":{"Text":"$.obj.obj_col2_int"},"Column":{"Id":7}}},{"Assign":{"Function":{"KernelName":"JsonValue","KernelIdx":0,"FunctionType":2,"Arguments":[{"Id":6},{"Id":7}]},"Column":{"Id":8}}},{"Assign":{"Constant":{"Int32":16},"Column":{"Id":9}}},{"Assign":{"Function":{"YqlOperationId":11,"KernelIdx":1,"FunctionType":2,"Arguments":[{"Id":8},{"Id":9}]},"Column":{"Id":10}}},{"Assign":{"Constant":{"Uint8":0},"Column":{"Id":11}}},{"Assign":{"Function":{"YqlOperationId":17,"KernelIdx":2,"FunctionType":2,"Arguments":[{"Id":10},{"Id":11}]},"Column":{"Id":12}}},{"Filter":{"Predicate":{"Id":12}}},{"Projection":{"Columns":[{"Id":1},{"Id":6},{"Id":5}]}}]},"E-Cost":"No estimate","ReadRangesExpectedSize":1}],"Node Type":"Filter-TableRangeScan"}],"Node Type":"UnionAll","PlanNodeType":"Connection"}],"Node Type":"Collect"}],"Node Type":"ResultSet_1","PlanNodeType":"ResultSet"}],"Node Type":"Query","Stats":{"ResourcePoolId":"default"},"PlanNodeType":"Query"},"meta":{"version":"0.2","type":"query"},"tables":[{"name":"\/Root\/tableWithNulls","reads":[{"columns":["id","jsondoc","jsonval"],"scan_by":["id [6, 6]"],"type":"Scan"}]}],"SimplifiedPlan":{"PlanNodeId":0,"Plans":[{"PlanNodeId":1,"Plans":[{"PlanNodeId":4,"Plans":[{"PlanNodeId":5,"Operators":[{"Scan":"Parallel","E-Size":"No estimate","ReadRanges":["id [6, 6]"],"Name":"TableRangeScan","Path":"\/Root\/tableWithNulls","E-Rows":"No estimate","Table":"tableWithNulls","ReadRangesKeys":["id"],"ReadColumns":["id","jsondoc","jsonval"],"SsaProgram":{"Command":[{"Assign":{"Constant":{"Text":"$.obj.obj_col2_int"},"Column":{"Id":7}}},{"Assign":{"Function":{"KernelName":"JsonValue","KernelIdx":0,"FunctionType":2,"Arguments":[{"Id":6},{"Id":7}]},"Column":{"Id":8}}},{"Assign":{"Constant":{"Int32":16},"Column":{"Id":9}}},{"Assign":{"Function":{"YqlOperationId":11,"KernelIdx":1,"FunctionType":2,"Arguments":[{"Id":8},{"Id":9}]},"Column":{"Id":10}}},{"Assign":{"Constant":{"Uint8":0},"Column":{"Id":11}}},{"Assign":{"Function":{"YqlOperationId":17,"KernelIdx":2,"FunctionType":2,"Arguments":[{"Id":10},{"Id":11}]},"Column":{"Id":12}}},{"Filter":{"Predicate":{"Id":12}}},{"Projection":{"Columns":[{"Id":1},{"Id":6},{"Id":5}]}}]},"E-Cost":"No estimate","ReadRangesExpectedSize":1}],"Node Type":"TableRangeScan"}],"Operators":[{"E-Rows":"No estimate","Predicate":"KqpOlapJsonValue == 16","Pushdown":"True","Name":"Filter","E-Size":"No estimate","E-Cost":"No estimate"}],"Node Type":"Filter"}],"Node Type":"ResultSet_1","PlanNodeType":"ResultSet"}],"Node Type":"Query","OptimizerStats":{"EquiJoinsCount":0,"JoinsCount":0},"PlanNodeType":"Query"}} AST: ( (declare %kqp%tx_result_binding_0_0 (TupleType (ListType (TupleType (TupleType (OptionalType (DataType 'Int32)) (DataType 'Int32)) (TupleType (OptionalType (DataType 'Int32)) (DataType 'Int32)))))) (let $1 '('('"_logical_id" '1354) '('"_id" '"de2219c6-6d064200-d2fdf07c-992e3760") '('"_partition_mode" '"single"))) (let $2 (DqPhyStage '() (lambda '() (block '( (let $19 (Int32 '"6")) (let $20 (Just $19)) (let $21 (Int32 '1)) (let $22 '($20 $21)) (let $23 (If (== $19 (Int32 '2147483647)) $22 '((+ $20 $21) (Int32 '0)))) (return (ToStream (Just '((RangeFinalize (RangeMultiply (Uint64 '10000) (RangeUnion (RangeCreate (AsList '($22 $23)))))))))) ))) $1)) (let $3 (DqCnValue (TDqOutput $2 '0))) (let $4 (KqpPhysicalTx '($2) '($3) '() '('('"type" '"compute")))) (let $5 '"%kqp%tx_result_binding_0_0") (let $6 (DataType 'Int32)) (let $7 (OptionalType $6)) (let $8 (TupleType $7 $6)) (let $9 (TupleType (ListType (TupleType $8 $8)))) (let $10 (DataType 'Utf8)) (let $11 (OptionalType $10)) (let $12 (DqPhyStage '() (lambda '() (block '( (let $24 (KqpTable '"/Root/tableWithNulls" '"72057594046644480:2" '"" '1)) (let $25 '('"id" '"jsondoc" '"jsonval")) (let $26 '('('"UsedKeyColumns" '('"id")) '('"ExpectedMaxRanges" '1) '('"PointPrefixLen" '1))) (let $27 (Utf8 '"$.obj.obj_col2_int")) (let $28 (KqpWideReadOlapTableRanges $24 %kqp%tx_result_binding_0_0 $25 '() $26 (lambda '($29) (block '( (let $30 (KqpOlapJsonValue '"jsondoc" $27 $6)) (let $31 '('eq $30 (Int32 '"16"))) (let $32 '('?? $31 (Bool 'false))) (return (KqpOlapFilter $29 $32)) ))))) (return (FromFlow (NarrowMap $28 (lambda '($33 $34 $35) (block '( (let $36 (TupleType (DataType 'Uint8) (DataType 'String))) (let $37 (ResourceType '"JsonNode")) (let $38 (OptionalType $37)) (let $39 '((ResourceType '"JsonPath"))) (let $40 (DictType $10 $37)) (let $41 '($40)) (let $42 (CallableType '() '((VariantType (TupleType $36 $11))) '($38) $39 $41)) (let $43 '('('"strict"))) (let $44 (Udf '"Json2.SqlValueConvertToUtf8" (Void) (VoidType) '"" $42 (VoidType) '"" $43)) (let $45 (IfPresent $35 (lambda '($57) (block '( (let $58 '((DataType 'Json) '"" '1)) (let $59 (CallableType '() '($37) $58)) (let $60 (Udf '"Json2.Parse" (Void) (VoidType) '"" $59 (VoidType) '"" '())) (return (Just (Apply $60 $57))) ))) (Nothing $38))) (let $46 (CallableType '() $39 '($10))) (let $47 (Udf '"Json2.CompilePath" (Void) (VoidType) '"" $46 (VoidType) '"" '())) (let $48 (Apply $47 $27)) (let $49 (Dict $40)) (let $50 (Apply $44 $45 $48 $49)) (let $51 (Visit $50 '0 (lambda '($61) (Nothing $11)) '1 (lambda '($62) $62))) (let $52 (CallableType '() '((VariantType (TupleType $36 (OptionalType (DataType 'Double))))) '((OptionalType (DataType 'JsonDocument))) $39 $41)) (let $53 (Udf '"Json2.JsonDocumentSqlValueNumber" (Void) (VoidType) '"" $52 (VoidType) '"" $43)) (let $54 (Apply $53 $34 $48 $49)) (let $55 (Nothing $7)) (let $56 (Visit $54 '0 (lambda '($63) $55) '1 (lambda '($64) (If (Exists $64) (SafeCast $64 $7) $55)))) (return (AsStruct '('"column1" $51) '('"column2" $56) '('"id" $33))) )))))) ))) '('('"_logical_id" '1425) '('"_id" '"3252ce0b-9a83bf12-3caa4ea1-8f2c8184")))) (let $13 (DqCnUnionAll (TDqOutput $12 '0))) (let $14 (DqPhyStage '($13) (lambda '($65) $65) '('('"_logical_id" '1864) '('"_id" '"9212de1a-66d5920-586b40cd-8aaa393b")))) (let $15 '('"id" '"column1" '"column2")) (let $16 (DqCnResult (TDqOutput $14 '0) $15)) (let $17 (KqpTxResultBinding $9 '0 '0)) (let $18 (KqpPhysicalTx '($12 $14) '($16) '('($5 $17)) '('('"type" '"scan")))) (return (KqpPhysicalQuery '($4 $18) '((KqpTxResultBinding (ListType (StructType '('"column1" $11) '('"column2" $7) '('"id" $6))) '1 '0)) '('('"type" '"scan_query")))) ) ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/olap/unittest >> KqpOlapAggregations::Json_GetValue_ToString [GOOD] Test command err: Trying to start YDB, gRPC: 6035, MsgBus: 19124 2025-05-29T15:21:33.646868Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509888158217362870:2196];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:21:33.647020Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/00267f/r3tmp/tmpn7N0fb/pdisk_1.dat 2025-05-29T15:21:33.803557Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:21:33.805915Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:21:33.805928Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:21:33.806163Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [1:7509888158217362713:2079] 1748532093641012 != 1748532093641015 2025-05-29T15:21:33.815374Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 6035, node 1 2025-05-29T15:21:33.847286Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:21:33.847299Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-05-29T15:21:33.847301Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-05-29T15:21:33.847334Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:19124 TClient is connected to server localhost:19124 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-29T15:21:34.015979Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:21:34.020052Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-05-29T15:21:34.026454Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 Status: 53 TxId: 281474976710658 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 2 2025-05-29T15:21:34.048082Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;self_id=[1:7509888162512330699:2314];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-05-29T15:21:34.055303Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;self_id=[1:7509888162512330699:2314];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-05-29T15:21:34.055397Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;self_id=[1:7509888162512330699:2314];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-05-29T15:21:34.055419Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;self_id=[1:7509888162512330699:2314];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-05-29T15:21:34.055438Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;self_id=[1:7509888162512330699:2314];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-05-29T15:21:34.055456Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;self_id=[1:7509888162512330699:2314];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-05-29T15:21:34.055473Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;self_id=[1:7509888162512330699:2314];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-05-29T15:21:34.055499Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;self_id=[1:7509888162512330699:2314];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-05-29T15:21:34.055520Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;self_id=[1:7509888162512330699:2314];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-05-29T15:21:34.055543Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;self_id=[1:7509888162512330699:2314];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-05-29T15:21:34.055563Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;self_id=[1:7509888162512330699:2314];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2025-05-29T15:21:34.055582Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;self_id=[1:7509888162512330699:2314];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2025-05-29T15:21:34.068399Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037889;self_id=[1:7509888162512330700:2315];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-05-29T15:21:34.068453Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037889;self_id=[1:7509888162512330700:2315];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-05-29T15:21:34.069194Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037889;self_id=[1:7509888162512330700:2315];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-05-29T15:21:34.069358Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037889;self_id=[1:7509888162512330700:2315];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-05-29T15:21:34.069386Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037889;self_id=[1:7509888162512330700:2315];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-05-29T15:21:34.069408Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037889;self_id=[1:7509888162512330700:2315];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-05-29T15:21:34.069427Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037889;self_id=[1:7509888162512330700:2315];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-05-29T15:21:34.069447Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037889;self_id=[1:7509888162512330700:2315];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-05-29T15:21:34.069481Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037889;self_id=[1:7509888162512330700:2315];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-05-29T15:21:34.069498Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037889;self_id=[1:7509888162512330700:2315];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-05-29T15:21:34.069517Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037889;self_id=[1:7509888162512330700:2315];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2025-05-29T15:21:34.069536Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037889;self_id=[1:7509888162512330700:2315];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2025-05-29T15:21:34.074035Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037891;self_id=[1:7509888162512330701:2316];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-05-29T15:21:34.089584Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037891;self_id=[1:7509888162512330701:2316];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-05-29T15:21:34.089670Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037891;self_id=[1:7509888162512330701:2316];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-05-29T15:21:34.089693Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037891;self_id=[1:7509888162512330701:2316];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-05-29T15:21:34.089715Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037891;self_id=[1:7509888162512330701:2316];tablet_id=72075186224037891;process=TTxInitSchema::Execute;flin ... 0;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanDeprecatedSnapshot;id=17; 2025-05-29T15:21:34.111299Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV0ChunksMeta;id=RestoreV0ChunksMeta; 2025-05-29T15:21:34.111395Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV0ChunksMeta;id=18; 2025-05-29T15:21:34.111399Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-05-29T15:21:34.144176Z node 1 :TX_COLUMNSHARD_TX WARN: log.cpp:784: tablet_id=72075186224037891;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710658;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710658; 2025-05-29T15:21:34.145113Z node 1 :TX_COLUMNSHARD_TX WARN: log.cpp:784: tablet_id=72075186224037889;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710658;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710658; 2025-05-29T15:21:34.145964Z node 1 :TX_COLUMNSHARD_TX WARN: log.cpp:784: tablet_id=72075186224037890;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710658;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710658; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=1448;columns=6; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=1448;columns=6; 2025-05-29T15:21:34.157191Z node 1 :TX_COLUMNSHARD_TX WARN: log.cpp:784: tablet_id=72075186224037888;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710658;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710658; REQUEST: --!syntax_v1 PRAGMA Kikimr.OptUseFinalizeByKey; SELECT id, JSON_VALUE(jsonval, "$.col1" RETURNING String), JSON_VALUE(jsondoc, "$.col1") FROM `/Root/tableWithNulls` WHERE JSON_VALUE(jsonval, "$.col1" RETURNING String) = "val1" AND id = 1; 2025-05-29T15:21:34.524472Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509888162512330959:2414], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:21:34.524500Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:21:34.524619Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509888162512330995:2418], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:21:34.525346Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710659:3, at schemeshard: 72057594046644480 2025-05-29T15:21:34.527987Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976710659, at schemeshard: 72057594046644480 2025-05-29T15:21:34.528081Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7509888162512330997:2419], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710659 completed, doublechecking } 2025-05-29T15:21:34.610917Z node 1 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [1:7509888162512331048:2483] txid# 281474976710660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-29T15:21:34.801212Z node 1 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:238: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1748532094582, txId: 18446744073709551615] shutting down REQUEST: --!syntax_v1 PRAGMA Kikimr.OptUseFinalizeByKey; SELECT id, JSON_VALUE(jsonval, "$.col1" RETURNING String), JSON_VALUE(jsondoc, "$.col1") FROM `/Root/tableWithNulls` WHERE JSON_VALUE(jsonval, "$.col1" RETURNING String) = "val1" AND id = 1; JSON Plan: {"Plan":{"Plans":[{"PlanNodeId":4,"Plans":[{"PlanNodeId":3,"Plans":[{"PlanNodeId":2,"Plans":[{"Tables":["tableWithNulls"],"PlanNodeId":1,"Operators":[{"Inputs":[{"InternalOperatorId":1}],"E-Rows":"No estimate","Predicate":"KqpOlapJsonValue == \"val1\"","Pushdown":"True","Name":"Filter","E-Size":"No estimate","E-Cost":"No estimate"},{"Scan":"Parallel","E-Size":"No estimate","ReadRanges":["id [1, 1]"],"Name":"TableRangeScan","Inputs":[],"Path":"\/Root\/tableWithNulls","E-Rows":"No estimate","Table":"tableWithNulls","ReadRangesKeys":["id"],"ReadColumns":["id","jsondoc","jsonval"],"SsaProgram":{"Command":[{"Assign":{"Constant":{"Text":"$.col1"},"Column":{"Id":7}}},{"Assign":{"Function":{"KernelName":"JsonValue","KernelIdx":0,"FunctionType":2,"Arguments":[{"Id":5},{"Id":7}]},"Column":{"Id":8}}},{"Assign":{"Constant":{"Bytes":"val1"},"Column":{"Id":9}}},{"Assign":{"Function":{"YqlOperationId":11,"KernelIdx":1,"FunctionType":2,"Arguments":[{"Id":8},{"Id":9}]},"Column":{"Id":10}}},{"Assign":{"Constant":{"Uint8":0},"Column":{"Id":11}}},{"Assign":{"Function":{"YqlOperationId":17,"KernelIdx":2,"FunctionType":2,"Arguments":[{"Id":10},{"Id":11}]},"Column":{"Id":12}}},{"Filter":{"Predicate":{"Id":12}}},{"Projection":{"Columns":[{"Id":1},{"Id":6},{"Id":5}]}}]},"E-Cost":"No estimate","ReadRangesExpectedSize":1}],"Node Type":"Filter-TableRangeScan"}],"Node Type":"UnionAll","PlanNodeType":"Connection"}],"Node Type":"Collect"}],"Node Type":"ResultSet_1","PlanNodeType":"ResultSet"}],"Node Type":"Query","Stats":{"ResourcePoolId":"default"},"PlanNodeType":"Query"},"meta":{"version":"0.2","type":"query"},"tables":[{"name":"\/Root\/tableWithNulls","reads":[{"columns":["id","jsondoc","jsonval"],"scan_by":["id [1, 1]"],"type":"Scan"}]}],"SimplifiedPlan":{"PlanNodeId":0,"Plans":[{"PlanNodeId":1,"Plans":[{"PlanNodeId":4,"Plans":[{"PlanNodeId":5,"Operators":[{"Scan":"Parallel","E-Size":"No estimate","ReadRanges":["id [1, 1]"],"Name":"TableRangeScan","Path":"\/Root\/tableWithNulls","E-Rows":"No estimate","Table":"tableWithNulls","ReadRangesKeys":["id"],"ReadColumns":["id","jsondoc","jsonval"],"SsaProgram":{"Command":[{"Assign":{"Constant":{"Text":"$.col1"},"Column":{"Id":7}}},{"Assign":{"Function":{"KernelName":"JsonValue","KernelIdx":0,"FunctionType":2,"Arguments":[{"Id":5},{"Id":7}]},"Column":{"Id":8}}},{"Assign":{"Constant":{"Bytes":"val1"},"Column":{"Id":9}}},{"Assign":{"Function":{"YqlOperationId":11,"KernelIdx":1,"FunctionType":2,"Arguments":[{"Id":8},{"Id":9}]},"Column":{"Id":10}}},{"Assign":{"Constant":{"Uint8":0},"Column":{"Id":11}}},{"Assign":{"Function":{"YqlOperationId":17,"KernelIdx":2,"FunctionType":2,"Arguments":[{"Id":10},{"Id":11}]},"Column":{"Id":12}}},{"Filter":{"Predicate":{"Id":12}}},{"Projection":{"Columns":[{"Id":1},{"Id":6},{"Id":5}]}}]},"E-Cost":"No estimate","ReadRangesExpectedSize":1}],"Node Type":"TableRangeScan"}],"Operators":[{"E-Rows":"No estimate","Predicate":"KqpOlapJsonValue == \"val1\"","Pushdown":"True","Name":"Filter","E-Size":"No estimate","E-Cost":"No estimate"}],"Node Type":"Filter"}],"Node Type":"ResultSet_1","PlanNodeType":"ResultSet"}],"Node Type":"Query","OptimizerStats":{"EquiJoinsCount":0,"JoinsCount":0},"PlanNodeType":"Query"}} AST: ( (declare %kqp%tx_result_binding_0_0 (TupleType (ListType (TupleType (TupleType (OptionalType (DataType 'Int32)) (DataType 'Int32)) (TupleType (OptionalType (DataType 'Int32)) (DataType 'Int32)))))) (let $1 '('('"_logical_id" '1354) '('"_id" '"3a30a0d1-b26695b4-ff26da56-af57925c") '('"_partition_mode" '"single"))) (let $2 (DqPhyStage '() (lambda '() (block '( (let $20 (Int32 '1)) (let $21 (Just $20)) (let $22 '($21 $20)) (let $23 (If (== $20 (Int32 '2147483647)) $22 '((+ $21 $20) (Int32 '0)))) (return (ToStream (Just '((RangeFinalize (RangeMultiply (Uint64 '10000) (RangeUnion (RangeCreate (AsList '($22 $23)))))))))) ))) $1)) (let $3 (DqCnValue (TDqOutput $2 '0))) (let $4 (KqpPhysicalTx '($2) '($3) '() '('('"type" '"compute")))) (let $5 '"%kqp%tx_result_binding_0_0") (let $6 (DataType 'Int32)) (let $7 (TupleType (OptionalType $6) $6)) (let $8 (TupleType (ListType (TupleType $7 $7)))) (let $9 (DataType 'String)) (let $10 (DataType 'Utf8)) (let $11 (OptionalType $10)) (let $12 (OptionalType $9)) (let $13 (DqPhyStage '() (lambda '() (block '( (let $24 (KqpTable '"/Root/tableWithNulls" '"72057594046644480:2" '"" '1)) (let $25 '('"id" '"jsondoc" '"jsonval")) (let $26 '('('"UsedKeyColumns" '('"id")) '('"ExpectedMaxRanges" '1) '('"PointPrefixLen" '1))) (let $27 (Utf8 '"$.col1")) (let $28 (KqpWideReadOlapTableRanges $24 %kqp%tx_result_binding_0_0 $25 '() $26 (lambda '($29) (block '( (let $30 (KqpOlapJsonValue '"jsonval" $27 $9)) (let $31 '('eq $30 (String '"val1"))) (let $32 '('?? $31 (Bool 'false))) (return (KqpOlapFilter $29 $32)) ))))) (return (FromFlow (NarrowMap $28 (lambda '($33 $34 $35) (block '( (let $36 '((VariantType (TupleType (TupleType (DataType 'Uint8) $9) $11)))) (let $37 (ResourceType '"JsonNode")) (let $38 (OptionalType $37)) (let $39 '((ResourceType '"JsonPath"))) (let $40 (DictType $10 $37)) (let $41 '($40)) (let $42 (CallableType '() $36 '($38) $39 $41)) (let $43 '('('"strict"))) (let $44 (Udf '"Json2.SqlValueUtf8" (Void) (VoidType) '"" $42 (VoidType) '"" $43)) (let $45 (IfPresent $35 (lambda '($57) (block '( (let $58 '((DataType 'Json) '"" '1)) (let $59 (CallableType '() '($37) $58)) (let $60 (Udf '"Json2.Parse" (Void) (VoidType) '"" $59 (VoidType) '"" '())) (return (Just (Apply $60 $57))) ))) (Nothing $38))) (let $46 (CallableType '() $39 '($10))) (let $47 (Udf '"Json2.CompilePath" (Void) (VoidType) '"" $46 (VoidType) '"" '())) (let $48 (Apply $47 $27)) (let $49 (Dict $40)) (let $50 (Apply $44 $45 $48 $49)) (let $51 (Nothing $12)) (let $52 (Visit $50 '0 (lambda '($61) $51) '1 (lambda '($62) (block '( (let $63 (IfPresent $62 (lambda '($64) (Just (SafeCast $64 $9))) $51)) (return (If (Exists $62) $63 $51)) ))))) (let $53 (CallableType '() $36 '((OptionalType (DataType 'JsonDocument))) $39 $41)) (let $54 (Udf '"Json2.JsonDocumentSqlValueConvertToUtf8" (Void) (VoidType) '"" $53 (VoidType) '"" $43)) (let $55 (Apply $54 $34 $48 $49)) (let $56 (Visit $55 '0 (lambda '($65) (Nothing $11)) '1 (lambda '($66) $66))) (return (AsStruct '('"column1" $52) '('"column2" $56) '('"id" $33))) )))))) ))) '('('"_logical_id" '1425) '('"_id" '"964520a2-18e76a03-853b9aa7-220323c8")))) (let $14 (DqCnUnionAll (TDqOutput $13 '0))) (let $15 (DqPhyStage '($14) (lambda '($67) $67) '('('"_logical_id" '1864) '('"_id" '"f785c9a0-e30d4f47-da837c7f-7a44a082")))) (let $16 '('"id" '"column1" '"column2")) (let $17 (DqCnResult (TDqOutput $15 '0) $16)) (let $18 (KqpTxResultBinding $8 '0 '0)) (let $19 (KqpPhysicalTx '($13 $15) '($17) '('($5 $18)) '('('"type" '"scan")))) (return (KqpPhysicalQuery '($4 $19) '((KqpTxResultBinding (ListType (StructType '('"column1" $12) '('"column2" $11) '('"id" $6))) '1 '0)) '('('"type" '"scan_query")))) ) >> KqpOlapAggregations::Aggregation_Some_NullGroupBy [GOOD] >> KqpOlapStats::AddRowsTableStandalone [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/olap/unittest >> KqpOlapAggregations::JsonDoc_GetValue [GOOD] Test command err: Trying to start YDB, gRPC: 4048, MsgBus: 9179 test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/0026df/r3tmp/tmpxo7YGj/pdisk_1.dat 2025-05-29T15:21:34.015326Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-05-29T15:21:34.130598Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:21:34.130636Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:21:34.136585Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [1:7509888155481498535:2079] 1748532093830663 != 1748532093830666 2025-05-29T15:21:34.142951Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:21:34.143554Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 4048, node 1 2025-05-29T15:21:34.167312Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:21:34.167322Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-05-29T15:21:34.167324Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-05-29T15:21:34.167360Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:9179 TClient is connected to server localhost:9179 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-29T15:21:34.314918Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:21:34.327036Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-05-29T15:21:34.348022Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 Status: 53 TxId: 281474976710658 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 2 2025-05-29T15:21:34.386546Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;self_id=[1:7509888159776466534:2314];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-05-29T15:21:34.386917Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;self_id=[1:7509888159776466534:2314];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-05-29T15:21:34.390908Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;self_id=[1:7509888159776466534:2314];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-05-29T15:21:34.390949Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;self_id=[1:7509888159776466534:2314];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-05-29T15:21:34.390973Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;self_id=[1:7509888159776466534:2314];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-05-29T15:21:34.390992Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;self_id=[1:7509888159776466534:2314];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-05-29T15:21:34.391010Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;self_id=[1:7509888159776466534:2314];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-05-29T15:21:34.391027Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;self_id=[1:7509888159776466534:2314];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-05-29T15:21:34.391050Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;self_id=[1:7509888159776466534:2314];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-05-29T15:21:34.391070Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;self_id=[1:7509888159776466534:2314];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-05-29T15:21:34.391088Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;self_id=[1:7509888159776466534:2314];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2025-05-29T15:21:34.391116Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;self_id=[1:7509888159776466534:2314];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2025-05-29T15:21:34.402490Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037889;self_id=[1:7509888159776466535:2315];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-05-29T15:21:34.402509Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037889;self_id=[1:7509888159776466535:2315];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-05-29T15:21:34.402563Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037889;self_id=[1:7509888159776466535:2315];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-05-29T15:21:34.402576Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037889;self_id=[1:7509888159776466535:2315];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-05-29T15:21:34.402594Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037889;self_id=[1:7509888159776466535:2315];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-05-29T15:21:34.402607Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037889;self_id=[1:7509888159776466535:2315];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-05-29T15:21:34.402617Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037889;self_id=[1:7509888159776466535:2315];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-05-29T15:21:34.402628Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037889;self_id=[1:7509888159776466535:2315];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-05-29T15:21:34.402640Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037889;self_id=[1:7509888159776466535:2315];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-05-29T15:21:34.402650Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037889;self_id=[1:7509888159776466535:2315];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-05-29T15:21:34.402662Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037889;self_id=[1:7509888159776466535:2315];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2025-05-29T15:21:34.402673Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037889;self_id=[1:7509888159776466535:2315];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2025-05-29T15:21:34.405237Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037890;self_id=[1:7509888159776466536:2316];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-05-29T15:21:34.405247Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037890;self_id=[1:7509888159776466536:2316];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-05-29T15:21:34.405272Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037890;self_id=[1:7509888159776466536:2316];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-05-29T15:21:34.405289Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037890;self_id=[1:7509888159776466536:2316];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-05-29T15:21:34.405306Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037890;self_id=[1:7509888159776466536:2316];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-05-29T15:21:34.405318Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037890;self_id=[1:7509888159776466536:2316];ta ... T: log.cpp:784: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=clean_deprecated_snapshot.cpp:30;tasks_for_rewrite=0; 2025-05-29T15:21:34.412185Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanDeprecatedSnapshot;id=17; 2025-05-29T15:21:34.412190Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV0ChunksMeta;id=RestoreV0ChunksMeta; 2025-05-29T15:21:34.412242Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV0ChunksMeta;id=18; 2025-05-29T15:21:34.412249Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-05-29T15:21:34.420517Z node 1 :TX_COLUMNSHARD_TX WARN: log.cpp:784: tablet_id=72075186224037888;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710658;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710658; 2025-05-29T15:21:34.421486Z node 1 :TX_COLUMNSHARD_TX WARN: log.cpp:784: tablet_id=72075186224037890;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710658;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710658; 2025-05-29T15:21:34.422324Z node 1 :TX_COLUMNSHARD_TX WARN: log.cpp:784: tablet_id=72075186224037889;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710658;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710658; 2025-05-29T15:21:34.431516Z node 1 :TX_COLUMNSHARD_TX WARN: log.cpp:784: tablet_id=72075186224037891;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710658;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710658; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=1448;columns=6; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=1448;columns=6; REQUEST: --!syntax_v1 PRAGMA Kikimr.OptUseFinalizeByKey; SELECT id, JSON_VALUE(jsonval, "$.col1"), JSON_VALUE(jsondoc, "$.col1") FROM `/Root/tableWithNulls` WHERE JSON_VALUE(jsondoc, "$.col1") = "val1" AND id = 6; 2025-05-29T15:21:34.697073Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509888159776466792:2414], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:21:34.697108Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:21:34.697265Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509888159776466828:2418], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:21:34.698253Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710659:3, at schemeshard: 72057594046644480 2025-05-29T15:21:34.701262Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976710659, at schemeshard: 72057594046644480 2025-05-29T15:21:34.701334Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7509888159776466830:2419], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710659 completed, doublechecking } 2025-05-29T15:21:34.765113Z node 1 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [1:7509888159776466881:2486] txid# 281474976710660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-29T15:21:34.933859Z node 1 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:238: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1748532094750, txId: 18446744073709551615] shutting down REQUEST: --!syntax_v1 PRAGMA Kikimr.OptUseFinalizeByKey; SELECT id, JSON_VALUE(jsonval, "$.col1"), JSON_VALUE(jsondoc, "$.col1") FROM `/Root/tableWithNulls` WHERE JSON_VALUE(jsondoc, "$.col1") = "val1" AND id = 6; JSON Plan: {"Plan":{"Plans":[{"PlanNodeId":4,"Plans":[{"PlanNodeId":3,"Plans":[{"PlanNodeId":2,"Plans":[{"Tables":["tableWithNulls"],"PlanNodeId":1,"Operators":[{"Inputs":[{"InternalOperatorId":1}],"E-Rows":"No estimate","Predicate":"KqpOlapJsonValue == \"val1\"","Pushdown":"True","Name":"Filter","E-Size":"No estimate","E-Cost":"No estimate"},{"Scan":"Parallel","E-Size":"No estimate","ReadRanges":["id [6, 6]"],"Name":"TableRangeScan","Inputs":[],"Path":"\/Root\/tableWithNulls","E-Rows":"No estimate","Table":"tableWithNulls","ReadRangesKeys":["id"],"ReadColumns":["id","jsondoc","jsonval"],"SsaProgram":{"Command":[{"Assign":{"Constant":{"Text":"$.col1"},"Column":{"Id":7}}},{"Assign":{"Function":{"KernelName":"JsonValue","KernelIdx":0,"FunctionType":2,"Arguments":[{"Id":6},{"Id":7}]},"Column":{"Id":8}}},{"Assign":{"Constant":{"Bytes":"val1"},"Column":{"Id":9}}},{"Assign":{"Function":{"YqlOperationId":11,"KernelIdx":1,"FunctionType":2,"Arguments":[{"Id":8},{"Id":9}]},"Column":{"Id":10}}},{"Assign":{"Constant":{"Uint8":0},"Column":{"Id":11}}},{"Assign":{"Function":{"YqlOperationId":17,"KernelIdx":2,"FunctionType":2,"Arguments":[{"Id":10},{"Id":11}]},"Column":{"Id":12}}},{"Filter":{"Predicate":{"Id":12}}},{"Projection":{"Columns":[{"Id":1},{"Id":6},{"Id":5}]}}]},"E-Cost":"No estimate","ReadRangesExpectedSize":1}],"Node Type":"Filter-TableRangeScan"}],"Node Type":"UnionAll","PlanNodeType":"Connection"}],"Node Type":"Collect"}],"Node Type":"ResultSet_1","PlanNodeType":"ResultSet"}],"Node Type":"Query","Stats":{"ResourcePoolId":"default"},"PlanNodeType":"Query"},"meta":{"version":"0.2","type":"query"},"tables":[{"name":"\/Root\/tableWithNulls","reads":[{"columns":["id","jsondoc","jsonval"],"scan_by":["id [6, 6]"],"type":"Scan"}]}],"SimplifiedPlan":{"PlanNodeId":0,"Plans":[{"PlanNodeId":1,"Plans":[{"PlanNodeId":4,"Plans":[{"PlanNodeId":5,"Operators":[{"Scan":"Parallel","E-Size":"No estimate","ReadRanges":["id [6, 6]"],"Name":"TableRangeScan","Path":"\/Root\/tableWithNulls","E-Rows":"No estimate","Table":"tableWithNulls","ReadRangesKeys":["id"],"ReadColumns":["id","jsondoc","jsonval"],"SsaProgram":{"Command":[{"Assign":{"Constant":{"Text":"$.col1"},"Column":{"Id":7}}},{"Assign":{"Function":{"KernelName":"JsonValue","KernelIdx":0,"FunctionType":2,"Arguments":[{"Id":6},{"Id":7}]},"Column":{"Id":8}}},{"Assign":{"Constant":{"Bytes":"val1"},"Column":{"Id":9}}},{"Assign":{"Function":{"YqlOperationId":11,"KernelIdx":1,"FunctionType":2,"Arguments":[{"Id":8},{"Id":9}]},"Column":{"Id":10}}},{"Assign":{"Constant":{"Uint8":0},"Column":{"Id":11}}},{"Assign":{"Function":{"YqlOperationId":17,"KernelIdx":2,"FunctionType":2,"Arguments":[{"Id":10},{"Id":11}]},"Column":{"Id":12}}},{"Filter":{"Predicate":{"Id":12}}},{"Projection":{"Columns":[{"Id":1},{"Id":6},{"Id":5}]}}]},"E-Cost":"No estimate","ReadRangesExpectedSize":1}],"Node Type":"TableRangeScan"}],"Operators":[{"E-Rows":"No estimate","Predicate":"KqpOlapJsonValue == \"val1\"","Pushdown":"True","Name":"Filter","E-Size":"No estimate","E-Cost":"No estimate"}],"Node Type":"Filter"}],"Node Type":"ResultSet_1","PlanNodeType":"ResultSet"}],"Node Type":"Query","OptimizerStats":{"EquiJoinsCount":0,"JoinsCount":0},"PlanNodeType":"Query"}} AST: ( (declare %kqp%tx_result_binding_0_0 (TupleType (ListType (TupleType (TupleType (OptionalType (DataType 'Int32)) (DataType 'Int32)) (TupleType (OptionalType (DataType 'Int32)) (DataType 'Int32)))))) (let $1 '('('"_logical_id" '1350) '('"_id" '"91e19fa8-a974ebee-1fb6c13a-1d51a407") '('"_partition_mode" '"single"))) (let $2 (DqPhyStage '() (lambda '() (block '( (let $18 (Int32 '"6")) (let $19 (Just $18)) (let $20 (Int32 '1)) (let $21 '($19 $20)) (let $22 (If (== $18 (Int32 '2147483647)) $21 '((+ $19 $20) (Int32 '0)))) (return (ToStream (Just '((RangeFinalize (RangeMultiply (Uint64 '10000) (RangeUnion (RangeCreate (AsList '($21 $22)))))))))) ))) $1)) (let $3 (DqCnValue (TDqOutput $2 '0))) (let $4 (KqpPhysicalTx '($2) '($3) '() '('('"type" '"compute")))) (let $5 '"%kqp%tx_result_binding_0_0") (let $6 (DataType 'Int32)) (let $7 (TupleType (OptionalType $6) $6)) (let $8 (TupleType (ListType (TupleType $7 $7)))) (let $9 (DataType 'Utf8)) (let $10 (OptionalType $9)) (let $11 (DqPhyStage '() (lambda '() (block '( (let $23 (KqpTable '"/Root/tableWithNulls" '"72057594046644480:2" '"" '1)) (let $24 '('"id" '"jsondoc" '"jsonval")) (let $25 '('('"UsedKeyColumns" '('"id")) '('"ExpectedMaxRanges" '1) '('"PointPrefixLen" '1))) (let $26 (Utf8 '"$.col1")) (let $27 (KqpWideReadOlapTableRanges $23 %kqp%tx_result_binding_0_0 $24 '() $25 (lambda '($28) (block '( (let $29 (KqpOlapJsonValue '"jsondoc" $26 $9)) (let $30 '('eq $29 (String '"val1"))) (let $31 '('?? $30 (Bool 'false))) (return (KqpOlapFilter $28 $31)) ))))) (return (FromFlow (NarrowMap $27 (lambda '($32 $33 $34) (block '( (let $35 '((VariantType (TupleType (TupleType (DataType 'Uint8) (DataType 'String)) $10)))) (let $36 (ResourceType '"JsonNode")) (let $37 (OptionalType $36)) (let $38 '((ResourceType '"JsonPath"))) (let $39 (DictType $9 $36)) (let $40 '($39)) (let $41 (CallableType '() $35 '($37) $38 $40)) (let $42 '('('"strict"))) (let $43 (Udf '"Json2.SqlValueConvertToUtf8" (Void) (VoidType) '"" $41 (VoidType) '"" $42)) (let $44 (IfPresent $34 (lambda '($57) (block '( (let $58 '((DataType 'Json) '"" '1)) (let $59 (CallableType '() '($36) $58)) (let $60 (Udf '"Json2.Parse" (Void) (VoidType) '"" $59 (VoidType) '"" '())) (return (Just (Apply $60 $57))) ))) (Nothing $37))) (let $45 (CallableType '() $38 '($9))) (let $46 (Udf '"Json2.CompilePath" (Void) (VoidType) '"" $45 (VoidType) '"" '())) (let $47 (Apply $46 $26)) (let $48 (Dict $39)) (let $49 (Apply $43 $44 $47 $48)) (let $50 (lambda '($61) (Nothing $10))) (let $51 (lambda '($62) $62)) (let $52 (Visit $49 '0 $50 '1 $51)) (let $53 (CallableType '() $35 '((OptionalType (DataType 'JsonDocument))) $38 $40)) (let $54 (Udf '"Json2.JsonDocumentSqlValueConvertToUtf8" (Void) (VoidType) '"" $53 (VoidType) '"" $42)) (let $55 (Apply $54 $33 $47 $48)) (let $56 (Visit $55 '0 $50 '1 $51)) (return (AsStruct '('"column1" $52) '('"column2" $56) '('"id" $32))) )))))) ))) '('('"_logical_id" '1421) '('"_id" '"5df88937-91a3c37f-c186e63b-aca273b2")))) (let $12 (DqCnUnionAll (TDqOutput $11 '0))) (let $13 (DqPhyStage '($12) (lambda '($63) $63) '('('"_logical_id" '1862) '('"_id" '"58b8566d-a6670edb-ab1696de-d020047e")))) (let $14 '('"id" '"column1" '"column2")) (let $15 (DqCnResult (TDqOutput $13 '0) $14)) (let $16 (KqpTxResultBinding $8 '0 '0)) (let $17 (KqpPhysicalTx '($11 $13) '($15) '('($5 $16)) '('('"type" '"scan")))) (return (KqpPhysicalQuery '($4 $17) '((KqpTxResultBinding (ListType (StructType '('"column1" $10) '('"column2" $10) '('"id" $6))) '1 '0)) '('('"type" '"scan_query")))) ) |57.8%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/blobstorage/storagepoolmon/ut/ydb-core-blobstorage-storagepoolmon-ut >> KqpOlap::SimpleRangeOlap [GOOD] >> KqpOlap::OlapRead_UsesScanOnJoinWithDataShardTable [GOOD] |57.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/blobstorage/storagepoolmon/ut/ydb-core-blobstorage-storagepoolmon-ut |57.8%| [LD] {RESULT} $(B)/ydb/core/blobstorage/storagepoolmon/ut/ydb-core-blobstorage-storagepoolmon-ut >> KqpOlap::SimpleQueryOlapMeta [GOOD] >> KqpDecimalColumnShard::TestFilterEqual [GOOD] >> KqpDecimalColumnShard::TestFilterCompare >> KqpOlapAggregations::Aggregation_Sum_GroupByNullMix [GOOD] >> KqpOlap::OlapRead_ScanQuery [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/olap/unittest >> KqpOlapAggregations::Aggregation_Some_NullGroupBy [GOOD] Test command err: Trying to start YDB, gRPC: 61396, MsgBus: 5183 2025-05-29T15:21:33.222688Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509888156688478941:2197];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/0025e3/r3tmp/tmpehYYyy/pdisk_1.dat 2025-05-29T15:21:33.226437Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-05-29T15:21:33.327500Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:21:33.330819Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [1:7509888156688478784:2079] 1748532093215296 != 1748532093215299 TServer::EnableGrpc on GrpcPort 61396, node 1 2025-05-29T15:21:33.358933Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:21:33.358945Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-05-29T15:21:33.358947Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-05-29T15:21:33.358985Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:5183 2025-05-29T15:21:33.391063Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:21:33.391117Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:21:33.391802Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:5183 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-29T15:21:33.491928Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:21:33.495253Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-05-29T15:21:33.500026Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 Status: 53 TxId: 281474976715658 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 2 2025-05-29T15:21:33.522101Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037891;self_id=[1:7509888156688479483:2316];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-05-29T15:21:33.522179Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037891;self_id=[1:7509888156688479483:2316];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-05-29T15:21:33.522244Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037891;self_id=[1:7509888156688479483:2316];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-05-29T15:21:33.522266Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037891;self_id=[1:7509888156688479483:2316];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-05-29T15:21:33.522294Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037891;self_id=[1:7509888156688479483:2316];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-05-29T15:21:33.522319Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037891;self_id=[1:7509888156688479483:2316];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-05-29T15:21:33.522340Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037891;self_id=[1:7509888156688479483:2316];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-05-29T15:21:33.522363Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037891;self_id=[1:7509888156688479483:2316];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-05-29T15:21:33.522386Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037891;self_id=[1:7509888156688479483:2316];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-05-29T15:21:33.522424Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037891;self_id=[1:7509888156688479483:2316];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-05-29T15:21:33.522449Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037891;self_id=[1:7509888156688479483:2316];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2025-05-29T15:21:33.522470Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037891;self_id=[1:7509888156688479483:2316];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2025-05-29T15:21:33.532821Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037890;self_id=[1:7509888156688479476:2314];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-05-29T15:21:33.532852Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037890;self_id=[1:7509888156688479476:2314];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-05-29T15:21:33.532939Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037890;self_id=[1:7509888156688479476:2314];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-05-29T15:21:33.532959Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037890;self_id=[1:7509888156688479476:2314];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-05-29T15:21:33.532986Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037890;self_id=[1:7509888156688479476:2314];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-05-29T15:21:33.533009Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037890;self_id=[1:7509888156688479476:2314];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-05-29T15:21:33.533027Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037890;self_id=[1:7509888156688479476:2314];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-05-29T15:21:33.533047Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037890;self_id=[1:7509888156688479476:2314];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-05-29T15:21:33.533082Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037890;self_id=[1:7509888156688479476:2314];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-05-29T15:21:33.533104Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037890;self_id=[1:7509888156688479476:2314];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-05-29T15:21:33.533125Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037890;self_id=[1:7509888156688479476:2314];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2025-05-29T15:21:33.533145Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037890;self_id=[1:7509888156688479476:2314];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2025-05-29T15:21:33.538648Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037889;self_id=[1:7509888156688479482:2315];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-05-29T15:21:33.538676Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037889;self_id=[1:7509888156688479482:2315];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-05-29T15:21:33.541552Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037889;self_id=[1:7509888156688479482:2315];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-05-29T15:21:33.541647Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037889;self_id=[1:7509888156688479482:2315];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-05-29T15:21:33.541672Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037889;self_id=[1:7509888156688479482:2315];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline ... p:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-05-29T15:21:33.550863Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-05-29T15:21:33.550883Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-05-29T15:21:33.550887Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-05-29T15:21:33.550906Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-05-29T15:21:33.550911Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-05-29T15:21:33.550924Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-05-29T15:21:33.550928Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanDeprecatedSnapshot;id=CleanDeprecatedSnapshot; 2025-05-29T15:21:33.550934Z node 1 :TX_COLUMNSHARD CRIT: log.cpp:784: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=clean_deprecated_snapshot.cpp:30;tasks_for_rewrite=0; 2025-05-29T15:21:33.550939Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanDeprecatedSnapshot;id=17; 2025-05-29T15:21:33.550943Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV0ChunksMeta;id=RestoreV0ChunksMeta; 2025-05-29T15:21:33.550997Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV0ChunksMeta;id=18; 2025-05-29T15:21:33.551000Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-05-29T15:21:33.570125Z node 1 :TX_COLUMNSHARD_TX WARN: log.cpp:784: tablet_id=72075186224037888;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715658;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715658; 2025-05-29T15:21:33.571178Z node 1 :TX_COLUMNSHARD_TX WARN: log.cpp:784: tablet_id=72075186224037890;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715658;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715658; 2025-05-29T15:21:33.572057Z node 1 :TX_COLUMNSHARD_TX WARN: log.cpp:784: tablet_id=72075186224037889;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715658;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715658; 2025-05-29T15:21:33.573001Z node 1 :TX_COLUMNSHARD_TX WARN: log.cpp:784: tablet_id=72075186224037891;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715658;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715658; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=1448;columns=6; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=1448;columns=6; REQUEST: --!syntax_v1 PRAGMA Kikimr.OptUseFinalizeByKey; SELECT id, SOME(level) FROM `/Root/tableWithNulls` WHERE id BETWEEN 6 AND 7 GROUP BY id ORDER BY id; 2025-05-29T15:21:33.884638Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509888156688479747:2415], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:21:33.884677Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:21:33.886509Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509888156688479774:2418], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:21:33.887597Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480 2025-05-29T15:21:33.890877Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976715659, at schemeshard: 72057594046644480 2025-05-29T15:21:33.890962Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7509888156688479776:2419], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2025-05-29T15:21:33.959971Z node 1 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [1:7509888156688479827:2483] txid# 281474976715660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } REQUEST: --!syntax_v1 PRAGMA Kikimr.OptUseFinalizeByKey; SELECT id, SOME(level) FROM `/Root/tableWithNulls` WHERE id BETWEEN 6 AND 7 GROUP BY id ORDER BY id; 2025-05-29T15:21:35.169256Z node 1 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:238: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1748532094000, txId: 18446744073709551615] shutting down JSON Plan: {"Plan":{"Plans":[{"PlanNodeId":6,"Plans":[{"PlanNodeId":5,"Plans":[{"PlanNodeId":4,"Plans":[{"PlanNodeId":3,"Plans":[{"PlanNodeId":2,"Plans":[{"Tables":["tableWithNulls"],"PlanNodeId":1,"Operators":[{"Scan":"Parallel","E-Size":"No estimate","ReadRanges":["id [6, 7]"],"Name":"TableRangeScan","Inputs":[],"Path":"\/Root\/tableWithNulls","E-Rows":"No estimate","Table":"tableWithNulls","ReadRangesKeys":["id"],"ReadColumns":["id","level"],"SsaProgram":{"Command":[{"GroupBy":{"Aggregates":[{"Function":{"Id":1,"Arguments":[{"Id":3}]},"Column":{"Id":7}}],"KeyColumns":[{"Id":1}]}},{"Projection":{"Columns":[{"Id":7},{"Id":1}]}}]},"E-Cost":"No estimate","ReadRangesExpectedSize":1}],"Node Type":"TableRangeScan"}],"Node Type":"HashShuffle","KeyColumns":["id"],"PlanNodeType":"Connection"}],"Operators":[{"Inputs":[{"InternalOperatorId":1}],"SortBy":"row.id","Name":"Sort"},{"Inputs":[{"ExternalPlanNodeId":2}],"Name":"Aggregate","Phase":"Final"}],"Node Type":"Sort-Aggregate"}],"Node Type":"Merge","SortColumns":["id (Asc)"],"PlanNodeType":"Connection"}],"Node Type":"Stage"}],"Node Type":"ResultSet_1","PlanNodeType":"ResultSet"}],"Node Type":"Query","Stats":{"ResourcePoolId":"default"},"PlanNodeType":"Query"},"meta":{"version":"0.2","type":"query"},"tables":[{"name":"\/Root\/tableWithNulls","reads":[{"columns":["id","level"],"scan_by":["id [6, 7]"],"type":"Scan"}]}],"SimplifiedPlan":{"PlanNodeId":0,"Plans":[{"PlanNodeId":1,"Plans":[{"PlanNodeId":4,"Plans":[{"PlanNodeId":5,"Plans":[{"PlanNodeId":6,"Plans":[{"PlanNodeId":7,"Operators":[{"Scan":"Parallel","E-Size":"No estimate","ReadRanges":["id [6, 7]"],"Name":"TableRangeScan","Path":"\/Root\/tableWithNulls","E-Rows":"No estimate","Table":"tableWithNulls","ReadRangesKeys":["id"],"ReadColumns":["id","level"],"SsaProgram":{"Command":[{"GroupBy":{"Aggregates":[{"Function":{"Id":1,"Arguments":[{"Id":3}]},"Column":{"Id":7}}],"KeyColumns":[{"Id":1}]}},{"Projection":{"Columns":[{"Id":7},{"Id":1}]}}]},"E-Cost":"No estimate","ReadRangesExpectedSize":1}],"Node Type":"TableRangeScan"}],"Node Type":"HashShuffle (KeyColumns: [\"id\"])","PlanNodeType":"Connection"}],"Operators":[{"Name":"Aggregate","Phase":"Final"}],"Node Type":"Aggregate"}],"Operators":[{"SortBy":"row.id","Name":"Sort"}],"Node Type":"Sort"}],"Node Type":"ResultSet_1","PlanNodeType":"ResultSet"}],"Node Type":"Query","OptimizerStats":{"EquiJoinsCount":0,"JoinsCount":0},"PlanNodeType":"Query"}} AST: ( (declare %kqp%tx_result_binding_0_0 (TupleType (ListType (TupleType (TupleType (OptionalType (DataType 'Int32)) (DataType 'Int32)) (TupleType (OptionalType (DataType 'Int32)) (DataType 'Int32)))))) (let $1 (DataType 'Int32)) (let $2 (OptionalType $1)) (let $3 '('('"_logical_id" '892) '('"_id" '"31270344-1c6d7a2-8694e1e6-25565811") '('"_partition_mode" '"single"))) (let $4 (DqPhyStage '() (lambda '() (block '( (let $23 (Int32 '1)) (let $24 '((Nothing $2) (Int32 '0))) (return (ToStream (Just '((RangeFinalize (RangeMultiply (Uint64 '10000) (RangeUnion (RangeIntersect (RangeCreate (AsList '('((Just (Int32 '"6")) $23) $24))) (RangeCreate (AsList '($24 '((Just (Int32 '"7")) $23)))))))))))) ))) $3)) (let $5 (DqCnValue (TDqOutput $4 '0))) (let $6 (KqpPhysicalTx '($4) '($5) '() '('('"type" '"compute")))) (let $7 '"%kqp%tx_result_binding_0_0") (let $8 (TupleType $2 $1)) (let $9 (TupleType (ListType (TupleType $8 $8)))) (let $10 '('"id" $1)) (let $11 '('('"_logical_id" '951) '('"_id" '"48cd8671-b2f9ec43-fd2c0abd-d513cdc0") '('"_wide_channels" (StructType '('_yql_agg_0 $2) $10)))) (let $12 (DqPhyStage '() (lambda '() (block '( (let $25 (KqpTable '"/Root/tableWithNulls" '"72057594046644480:2" '"" '1)) (let $26 '('"id")) (let $27 '('('"UsedKeyColumns" $26) '('"ExpectedMaxRanges" '1) '('"PointPrefixLen" '0))) (let $28 (KqpWideReadOlapTableRanges $25 %kqp%tx_result_binding_0_0 '('"id" '"level") '() $27 (lambda '($29) (TKqpOlapAgg $29 '('('_yql_agg_0 'some '"level")) $26)))) (return (FromFlow $28)) ))) $11)) (let $13 (DqCnHashShuffle (TDqOutput $12 '0) '('1))) (let $14 (StructType '('"column1" $2) $10)) (let $15 '('('"_logical_id" '1374) '('"_id" '"5daf5264-e06e893b-dd75b481-c2ecedcd") '('"_wide_channels" $14))) (let $16 (DqPhyStage '($13) (lambda '($30) (block '( (let $31 (lambda '($42 $43) $43 $42)) (let $32 (WideCombiner (ToFlow $30) '"" (lambda '($33 $34) $34) (lambda '($35 $36 $37) $36) (lambda '($38 $39 $40 $41) (Coalesce $41 $39)) $31)) (return (FromFlow (WideSort $32 '('('1 (Bool 'true)))))) ))) $15)) (let $17 (DqCnMerge (TDqOutput $16 '0) '('('1 '"Asc")))) (let $18 (DqPhyStage '($17) (lambda '($44) (FromFlow (NarrowMap (ToFlow $44) (lambda '($45 $46) (AsStruct '('"column1" $45) '('"id" $46)))))) '('('"_logical_id" '1386) '('"_id" '"cbafbb6a-31d4f688-39d75d9c-8b428ff2")))) (let $19 '($12 $16 $18)) (let $20 (DqCnResult (TDqOutput $18 '0) '('"id" '"column1"))) (let $21 (KqpTxResultBinding $9 '0 '0)) (let $22 (KqpPhysicalTx $19 '($20) '('($7 $21)) '('('"type" '"scan")))) (return (KqpPhysicalQuery '($6 $22) '((KqpTxResultBinding (ListType $14) '1 '0)) '('('"type" '"scan_query")))) ) ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/olap/unittest >> KqpOlapStats::AddRowsTableStandalone [GOOD] Test command err: Trying to start YDB, gRPC: 26590, MsgBus: 4335 2025-05-29T15:21:33.399808Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509888159033036803:2072];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:21:33.400041Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/00270c/r3tmp/tmpJ9Z6yp/pdisk_1.dat 2025-05-29T15:21:33.550878Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:21:33.574621Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:21:33.574645Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting TServer::EnableGrpc on GrpcPort 26590, node 1 2025-05-29T15:21:33.583380Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-29T15:21:33.588042Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:21:33.588049Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-05-29T15:21:33.588052Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-05-29T15:21:33.588081Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:4335 TClient is connected to server localhost:4335 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-29T15:21:33.770199Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:21:33.773109Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 CREATE TABLE `/Root/ColumnTableTest` (id Int32 NOT NULL, resource_id Utf8, level Int32, PRIMARY KEY (id)) PARTITION BY HASH(id) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 1); 2025-05-29T15:21:34.391260Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509888163328004727:2331], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:21:34.391281Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:21:34.428524Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-05-29T15:21:34.444061Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;self_id=[1:7509888163328004812:2336];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-05-29T15:21:34.444128Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;self_id=[1:7509888163328004812:2336];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-05-29T15:21:34.444206Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;self_id=[1:7509888163328004812:2336];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-05-29T15:21:34.444228Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;self_id=[1:7509888163328004812:2336];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-05-29T15:21:34.444250Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;self_id=[1:7509888163328004812:2336];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-05-29T15:21:34.444270Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;self_id=[1:7509888163328004812:2336];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-05-29T15:21:34.444297Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;self_id=[1:7509888163328004812:2336];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-05-29T15:21:34.444321Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;self_id=[1:7509888163328004812:2336];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-05-29T15:21:34.444352Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;self_id=[1:7509888163328004812:2336];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-05-29T15:21:34.444372Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;self_id=[1:7509888163328004812:2336];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-05-29T15:21:34.444393Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;self_id=[1:7509888163328004812:2336];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2025-05-29T15:21:34.444419Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;self_id=[1:7509888163328004812:2336];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2025-05-29T15:21:34.445647Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-05-29T15:21:34.445687Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-05-29T15:21:34.445703Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-05-29T15:21:34.445709Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-05-29T15:21:34.445730Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-05-29T15:21:34.445736Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-05-29T15:21:34.445747Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-05-29T15:21:34.445752Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-05-29T15:21:34.445763Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-05-29T15:21:34.445768Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-05-29T15:21:34.445774Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-05-29T15:21:34.445779Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-05-29T15:21:34.445801Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-05-29T15:21:34.445810Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-05-29T15:21:34.445831Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-05-29T15:21:34.445838Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-05-29T15:21:34.445852Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-05-29T15:21:34.445857Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanDeprecated ... 2TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:502;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:502;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:502;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:502;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:502;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:502;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:502;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:502;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:502;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:502;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:502;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:502;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:502;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:502;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:502;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:502;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:502;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:502;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:502;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:502;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:502;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:502;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:502;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:502;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:502;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:502;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:502;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:502;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:502;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:502;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:502;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:502;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:502;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:502;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:502;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:502;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:502;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:502;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:502;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:502;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:502;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:502;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:502;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:502;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:502;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:502;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:502;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:502;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:502;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:502;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:502;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:502;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:502;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:502;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:502;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:502;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:502;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:502;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:502;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:502;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:502;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:502;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:502;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:502;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:502;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:502;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:502;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:502;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:502;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:502;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:502;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:502;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:502;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:502;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:502;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:502;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:502;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:502;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:502;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:502;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:502;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:502;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:502;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:502;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:502;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:502;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:502;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:502;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:502;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:502;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:502;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:502;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:502;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:502;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:502;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:502;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:502;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:502;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:502;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:502;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:502;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:502;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:502;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:502;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:502;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:502;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:502;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=24288;columns=3; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=24288;columns=3; >> KqpDecimalColumnShard::TestSimpleQueries [GOOD] >> KqpDecimalColumnShard::TestOrderByDecimal >> KqpOlapAggregations::Aggregation_Sum_NullGroupBy |57.8%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/http_proxy/ut/ydb-core-http_proxy-ut |57.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/http_proxy/ut/ydb-core-http_proxy-ut |57.8%| [LD] {RESULT} $(B)/ydb/core/http_proxy/ut/ydb-core-http_proxy-ut ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/olap/unittest >> KqpOlap::SimpleRangeOlap [GOOD] Test command err: Trying to start YDB, gRPC: 12310, MsgBus: 28406 2025-05-29T15:21:33.631046Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509888156026211594:2214];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/0026e1/r3tmp/tmpH0jjrz/pdisk_1.dat 2025-05-29T15:21:33.739223Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-05-29T15:21:33.842829Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [1:7509888156026211402:2079] 1748532093612552 != 1748532093612555 2025-05-29T15:21:33.847886Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:21:33.848631Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:21:33.848645Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:21:33.855021Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 12310, node 1 2025-05-29T15:21:33.882267Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:21:33.882279Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-05-29T15:21:33.882281Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-05-29T15:21:33.882315Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:28406 TClient is connected to server localhost:28406 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-29T15:21:34.056788Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:21:34.060123Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-05-29T15:21:34.068181Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnStore, opId: 281474976710658:0, at schemeshard: 72057594046644480 Status: 53 TxId: 281474976710658 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 2 2025-05-29T15:21:34.101314Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037889;self_id=[1:7509888160321179398:2314];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-05-29T15:21:34.101463Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037889;self_id=[1:7509888160321179398:2314];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-05-29T15:21:34.111931Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037889;self_id=[1:7509888160321179398:2314];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-05-29T15:21:34.111967Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037889;self_id=[1:7509888160321179398:2314];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-05-29T15:21:34.111986Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037889;self_id=[1:7509888160321179398:2314];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-05-29T15:21:34.112007Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037889;self_id=[1:7509888160321179398:2314];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-05-29T15:21:34.112022Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037889;self_id=[1:7509888160321179398:2314];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-05-29T15:21:34.112040Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037889;self_id=[1:7509888160321179398:2314];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-05-29T15:21:34.112060Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037889;self_id=[1:7509888160321179398:2314];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-05-29T15:21:34.112092Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037889;self_id=[1:7509888160321179398:2314];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-05-29T15:21:34.112120Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037889;self_id=[1:7509888160321179398:2314];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2025-05-29T15:21:34.112138Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037889;self_id=[1:7509888160321179398:2314];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2025-05-29T15:21:34.116285Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037891;self_id=[1:7509888160321179399:2315];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-05-29T15:21:34.116300Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037891;self_id=[1:7509888160321179399:2315];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-05-29T15:21:34.116337Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037891;self_id=[1:7509888160321179399:2315];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-05-29T15:21:34.116355Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037891;self_id=[1:7509888160321179399:2315];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-05-29T15:21:34.116371Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037891;self_id=[1:7509888160321179399:2315];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-05-29T15:21:34.116393Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037891;self_id=[1:7509888160321179399:2315];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-05-29T15:21:34.116416Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037891;self_id=[1:7509888160321179399:2315];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-05-29T15:21:34.116433Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037891;self_id=[1:7509888160321179399:2315];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-05-29T15:21:34.116451Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037891;self_id=[1:7509888160321179399:2315];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-05-29T15:21:34.116468Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037891;self_id=[1:7509888160321179399:2315];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-05-29T15:21:34.116484Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037891;self_id=[1:7509888160321179399:2315];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2025-05-29T15:21:34.116513Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037891;self_id=[1:7509888160321179399:2315];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2025-05-29T15:21:34.141150Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;self_id=[1:7509888160321179400:2316];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-05-29T15:21:34.141173Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;self_id=[1:7509888160321179400:2316];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-05-29T15:21:34.141227Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;self_id=[1:7509888160321179400:2316];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-05-29T15:21:34.141247Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;self_id=[1:7509888160321179400:2316];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-05-29T15:21:34.141268Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;self_id=[1:7509888160321179400:2316];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fl ... _switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-05-29T15:21:35.372320Z node 2 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-05-29T15:21:35.372326Z node 2 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-05-29T15:21:35.372340Z node 2 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-05-29T15:21:35.372345Z node 2 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanDeprecatedSnapshot;id=CleanDeprecatedSnapshot; 2025-05-29T15:21:35.372352Z node 2 :TX_COLUMNSHARD CRIT: log.cpp:784: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=clean_deprecated_snapshot.cpp:30;tasks_for_rewrite=0; 2025-05-29T15:21:35.372359Z node 2 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanDeprecatedSnapshot;id=17; 2025-05-29T15:21:35.372364Z node 2 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV0ChunksMeta;id=RestoreV0ChunksMeta; 2025-05-29T15:21:35.372433Z node 2 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV0ChunksMeta;id=18; 2025-05-29T15:21:35.372437Z node 2 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-05-29T15:21:35.372557Z node 2 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-05-29T15:21:35.372564Z node 2 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-05-29T15:21:35.372577Z node 2 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-05-29T15:21:35.372583Z node 2 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-05-29T15:21:35.372602Z node 2 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-05-29T15:21:35.372608Z node 2 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-05-29T15:21:35.372620Z node 2 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-05-29T15:21:35.372625Z node 2 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-05-29T15:21:35.372635Z node 2 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-05-29T15:21:35.372640Z node 2 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-05-29T15:21:35.372647Z node 2 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-05-29T15:21:35.372652Z node 2 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-05-29T15:21:35.372677Z node 2 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-05-29T15:21:35.372683Z node 2 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-05-29T15:21:35.372704Z node 2 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-05-29T15:21:35.372710Z node 2 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-05-29T15:21:35.372724Z node 2 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-05-29T15:21:35.372728Z node 2 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanDeprecatedSnapshot;id=CleanDeprecatedSnapshot; 2025-05-29T15:21:35.372736Z node 2 :TX_COLUMNSHARD CRIT: log.cpp:784: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=clean_deprecated_snapshot.cpp:30;tasks_for_rewrite=0; 2025-05-29T15:21:35.372743Z node 2 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanDeprecatedSnapshot;id=17; 2025-05-29T15:21:35.372750Z node 2 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV0ChunksMeta;id=RestoreV0ChunksMeta; 2025-05-29T15:21:35.372814Z node 2 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV0ChunksMeta;id=18; 2025-05-29T15:21:35.372818Z node 2 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-05-29T15:21:35.394396Z node 2 :TX_COLUMNSHARD_TX WARN: log.cpp:784: tablet_id=72075186224037890;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715658;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715658; 2025-05-29T15:21:35.394493Z node 2 :TX_COLUMNSHARD_TX WARN: log.cpp:784: tablet_id=72075186224037888;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715658;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715658; 2025-05-29T15:21:35.394573Z node 2 :TX_COLUMNSHARD_TX WARN: log.cpp:784: tablet_id=72075186224037889;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715658;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715658; 2025-05-29T15:21:35.394658Z node 2 :TX_COLUMNSHARD_TX WARN: log.cpp:784: tablet_id=72075186224037891;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715658;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715658; 2025-05-29T15:21:35.403771Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 Status: 53 TxId: 281474976715659 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 3 2025-05-29T15:21:35.409517Z node 2 :TX_COLUMNSHARD_TX WARN: log.cpp:784: tablet_id=72075186224037888;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715659;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715659; 2025-05-29T15:21:35.410266Z node 2 :TX_COLUMNSHARD_TX WARN: log.cpp:784: tablet_id=72075186224037890;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715659;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715659; 2025-05-29T15:21:35.410834Z node 2 :TX_COLUMNSHARD_TX WARN: log.cpp:784: tablet_id=72075186224037889;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715659;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715659; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=2568;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=2568;columns=5; 2025-05-29T15:21:35.608311Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7509888165760151510:2357], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:21:35.608358Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:21:35.608495Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7509888165760151545:2360], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:21:35.609381Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715660:3, at schemeshard: 72057594046644480 2025-05-29T15:21:35.612193Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976715660, at schemeshard: 72057594046644480 2025-05-29T15:21:35.612270Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7509888165760151547:2361], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715660 completed, doublechecking } 2025-05-29T15:21:35.674651Z node 2 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [2:7509888165760151598:2474] txid# 281474976715661, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 7], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-29T15:21:35.730838Z node 2 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:238: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1748532095667, txId: 18446744073709551615] shutting down ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/olap/unittest >> KqpOlap::SimpleQueryOlapMeta [GOOD] Test command err: Trying to start YDB, gRPC: 10944, MsgBus: 5513 2025-05-29T15:21:34.015508Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509888161655247642:2208];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:21:34.015734Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/0026b8/r3tmp/tmp5uTkYN/pdisk_1.dat 2025-05-29T15:21:34.171072Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 10944, node 1 2025-05-29T15:21:34.199029Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:21:34.199040Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-05-29T15:21:34.199041Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-05-29T15:21:34.199082Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:5513 TClient is connected to server localhost:5513 WaitRootIsUp 'Root'... TClient::Ls request: Root 2025-05-29T15:21:34.343120Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:21:34.343149Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:21:34.344469Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-29T15:21:34.375655Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:21:34.384483Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-05-29T15:21:34.392059Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnStore, opId: 281474976715658:0, at schemeshard: 72057594046644480 Status: 53 TxId: 281474976715658 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 2 2025-05-29T15:21:34.420049Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;self_id=[1:7509888161655248150:2314];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-05-29T15:21:34.420135Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;self_id=[1:7509888161655248150:2314];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-05-29T15:21:34.420187Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;self_id=[1:7509888161655248150:2314];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-05-29T15:21:34.420208Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;self_id=[1:7509888161655248150:2314];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-05-29T15:21:34.420230Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;self_id=[1:7509888161655248150:2314];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-05-29T15:21:34.420257Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;self_id=[1:7509888161655248150:2314];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-05-29T15:21:34.420275Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;self_id=[1:7509888161655248150:2314];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-05-29T15:21:34.420298Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;self_id=[1:7509888161655248150:2314];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-05-29T15:21:34.420318Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;self_id=[1:7509888161655248150:2314];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-05-29T15:21:34.420337Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;self_id=[1:7509888161655248150:2314];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-05-29T15:21:34.420357Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;self_id=[1:7509888161655248150:2314];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2025-05-29T15:21:34.420380Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;self_id=[1:7509888161655248150:2314];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2025-05-29T15:21:34.429521Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037891;self_id=[1:7509888161655248156:2315];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-05-29T15:21:34.429553Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037891;self_id=[1:7509888161655248156:2315];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-05-29T15:21:34.429638Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037891;self_id=[1:7509888161655248156:2315];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-05-29T15:21:34.429656Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037891;self_id=[1:7509888161655248156:2315];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-05-29T15:21:34.429680Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037891;self_id=[1:7509888161655248156:2315];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-05-29T15:21:34.429699Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037891;self_id=[1:7509888161655248156:2315];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-05-29T15:21:34.429716Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037891;self_id=[1:7509888161655248156:2315];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-05-29T15:21:34.429734Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037891;self_id=[1:7509888161655248156:2315];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-05-29T15:21:34.429753Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037891;self_id=[1:7509888161655248156:2315];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-05-29T15:21:34.429769Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037891;self_id=[1:7509888161655248156:2315];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-05-29T15:21:34.429793Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037891;self_id=[1:7509888161655248156:2315];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2025-05-29T15:21:34.429810Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037891;self_id=[1:7509888161655248156:2315];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2025-05-29T15:21:34.434670Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037890;self_id=[1:7509888161655248157:2316];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-05-29T15:21:34.434702Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037890;self_id=[1:7509888161655248157:2316];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-05-29T15:21:34.434817Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037890;self_id=[1:7509888161655248157:2316];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-05-29T15:21:34.434841Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037890;self_id=[1:7509888161655248157:2316];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-05-29T15:21:34.434861Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037890;self_id=[1:7509888161655248157:2316];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-05-29T15:21:34.434885Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037890;self_id=[1:750988816165 ... abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-05-29T15:21:35.251781Z node 2 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-05-29T15:21:35.251794Z node 2 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-05-29T15:21:35.251799Z node 2 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanDeprecatedSnapshot;id=CleanDeprecatedSnapshot; 2025-05-29T15:21:35.251806Z node 2 :TX_COLUMNSHARD CRIT: log.cpp:784: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=clean_deprecated_snapshot.cpp:30;tasks_for_rewrite=0; 2025-05-29T15:21:35.251811Z node 2 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanDeprecatedSnapshot;id=17; 2025-05-29T15:21:35.251817Z node 2 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV0ChunksMeta;id=RestoreV0ChunksMeta; 2025-05-29T15:21:35.251872Z node 2 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV0ChunksMeta;id=18; 2025-05-29T15:21:35.251875Z node 2 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-05-29T15:21:35.251922Z node 2 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-05-29T15:21:35.251929Z node 2 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-05-29T15:21:35.251939Z node 2 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-05-29T15:21:35.251944Z node 2 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-05-29T15:21:35.251961Z node 2 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-05-29T15:21:35.251965Z node 2 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-05-29T15:21:35.251975Z node 2 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-05-29T15:21:35.251979Z node 2 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-05-29T15:21:35.251987Z node 2 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-05-29T15:21:35.251991Z node 2 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-05-29T15:21:35.251997Z node 2 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-05-29T15:21:35.252000Z node 2 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-05-29T15:21:35.252018Z node 2 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-05-29T15:21:35.252022Z node 2 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-05-29T15:21:35.252040Z node 2 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-05-29T15:21:35.252044Z node 2 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-05-29T15:21:35.252056Z node 2 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-05-29T15:21:35.252060Z node 2 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanDeprecatedSnapshot;id=CleanDeprecatedSnapshot; 2025-05-29T15:21:35.252066Z node 2 :TX_COLUMNSHARD CRIT: log.cpp:784: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=clean_deprecated_snapshot.cpp:30;tasks_for_rewrite=0; 2025-05-29T15:21:35.252071Z node 2 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanDeprecatedSnapshot;id=17; 2025-05-29T15:21:35.252075Z node 2 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV0ChunksMeta;id=RestoreV0ChunksMeta; 2025-05-29T15:21:35.252123Z node 2 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV0ChunksMeta;id=18; 2025-05-29T15:21:35.252131Z node 2 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-05-29T15:21:35.277858Z node 2 :TX_COLUMNSHARD_TX WARN: log.cpp:784: tablet_id=72075186224037888;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715658;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715658; 2025-05-29T15:21:35.277950Z node 2 :TX_COLUMNSHARD_TX WARN: log.cpp:784: tablet_id=72075186224037890;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715658;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715658; 2025-05-29T15:21:35.278010Z node 2 :TX_COLUMNSHARD_TX WARN: log.cpp:784: tablet_id=72075186224037889;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715658;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715658; 2025-05-29T15:21:35.278079Z node 2 :TX_COLUMNSHARD_TX WARN: log.cpp:784: tablet_id=72075186224037891;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715658;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715658; 2025-05-29T15:21:35.280237Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 Status: 53 TxId: 281474976715659 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 3 2025-05-29T15:21:35.294440Z node 2 :TX_COLUMNSHARD_TX WARN: log.cpp:784: tablet_id=72075186224037888;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715659;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715659; 2025-05-29T15:21:35.295452Z node 2 :TX_COLUMNSHARD_TX WARN: log.cpp:784: tablet_id=72075186224037890;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715659;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715659; 2025-05-29T15:21:35.296298Z node 2 :TX_COLUMNSHARD_TX WARN: log.cpp:784: tablet_id=72075186224037889;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715659;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715659; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=2568;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=2568;columns=5; 2025-05-29T15:21:35.500070Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7509888165449553376:2357], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:21:35.500194Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:21:35.500319Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7509888165449553388:2360], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:21:35.501204Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715660:3, at schemeshard: 72057594046644480 2025-05-29T15:21:35.504640Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976715660, at schemeshard: 72057594046644480 2025-05-29T15:21:35.504682Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7509888165449553390:2361], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715660 completed, doublechecking } 2025-05-29T15:21:35.599541Z node 2 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [2:7509888165449553441:2480] txid# 281474976715661, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 7], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-29T15:21:35.648352Z node 2 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:238: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1748532095555, txId: 18446744073709551615] shutting down 2025-05-29T15:21:35.712002Z node 2 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:238: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1748532095674, txId: 18446744073709551615] shutting down ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/olap/unittest >> KqpOlap::OlapRead_UsesScanOnJoinWithDataShardTable [GOOD] Test command err: Trying to start YDB, gRPC: 25923, MsgBus: 21487 test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/0026ee/r3tmp/tmpkWgtFM/pdisk_1.dat 2025-05-29T15:21:33.723028Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-05-29T15:21:33.778875Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [1:7509888156525116737:2079] 1748532093617874 != 1748532093617877 2025-05-29T15:21:33.779012Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 25923, node 1 2025-05-29T15:21:33.811447Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:21:33.811467Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:21:33.819051Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-29T15:21:33.819285Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:21:33.819289Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-05-29T15:21:33.819292Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-05-29T15:21:33.819322Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:21487 TClient is connected to server localhost:21487 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-29T15:21:33.975176Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:21:33.995121Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-05-29T15:21:34.004045Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 Status: 53 TxId: 281474976715658 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 2 2025-05-29T15:21:34.027181Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;self_id=[1:7509888160820084738:2314];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-05-29T15:21:34.027281Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;self_id=[1:7509888160820084738:2314];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-05-29T15:21:34.027338Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;self_id=[1:7509888160820084738:2314];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-05-29T15:21:34.027371Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;self_id=[1:7509888160820084738:2314];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-05-29T15:21:34.027391Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;self_id=[1:7509888160820084738:2314];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-05-29T15:21:34.027412Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;self_id=[1:7509888160820084738:2314];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-05-29T15:21:34.027435Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;self_id=[1:7509888160820084738:2314];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-05-29T15:21:34.027459Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;self_id=[1:7509888160820084738:2314];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-05-29T15:21:34.027485Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;self_id=[1:7509888160820084738:2314];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-05-29T15:21:34.027504Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;self_id=[1:7509888160820084738:2314];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-05-29T15:21:34.027525Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;self_id=[1:7509888160820084738:2314];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2025-05-29T15:21:34.027551Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;self_id=[1:7509888160820084738:2314];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2025-05-29T15:21:34.037552Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037890;self_id=[1:7509888160820084739:2315];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-05-29T15:21:34.037589Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037890;self_id=[1:7509888160820084739:2315];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-05-29T15:21:34.037631Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037890;self_id=[1:7509888160820084739:2315];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-05-29T15:21:34.037650Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037890;self_id=[1:7509888160820084739:2315];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-05-29T15:21:34.037670Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037890;self_id=[1:7509888160820084739:2315];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-05-29T15:21:34.037690Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037890;self_id=[1:7509888160820084739:2315];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-05-29T15:21:34.037707Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037890;self_id=[1:7509888160820084739:2315];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-05-29T15:21:34.037725Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037890;self_id=[1:7509888160820084739:2315];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-05-29T15:21:34.037744Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037890;self_id=[1:7509888160820084739:2315];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-05-29T15:21:34.037766Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037890;self_id=[1:7509888160820084739:2315];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-05-29T15:21:34.037783Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037890;self_id=[1:7509888160820084739:2315];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2025-05-29T15:21:34.037802Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037890;self_id=[1:7509888160820084739:2315];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2025-05-29T15:21:34.049232Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037891;self_id=[1:7509888160820084740:2316];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-05-29T15:21:34.049273Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037891;self_id=[1:7509888160820084740:2316];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-05-29T15:21:34.049323Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037891;self_id=[1:7509888160820084740:2316];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-05-29T15:21:34.049348Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037891;self_id=[1:7509888160820084740:2316];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-05-29T15:21:34.049369Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037891;self_id=[1:7509888160820084740:2316];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-05-29T15:21:34.049389Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037891;self_id=[1:7509888160820084740:231 ... ablet_id=72075186224037893;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanDeprecatedSnapshot;id=CleanDeprecatedSnapshot; 2025-05-29T15:21:35.278549Z node 2 :TX_COLUMNSHARD CRIT: log.cpp:784: tablet_id=72075186224037893;process=TTxUpdateSchema::Execute;fline=clean_deprecated_snapshot.cpp:30;tasks_for_rewrite=0; 2025-05-29T15:21:35.278555Z node 2 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037893;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanDeprecatedSnapshot;id=17; 2025-05-29T15:21:35.278560Z node 2 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037893;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV0ChunksMeta;id=RestoreV0ChunksMeta; 2025-05-29T15:21:35.278610Z node 2 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037893;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV0ChunksMeta;id=18; 2025-05-29T15:21:35.278615Z node 2 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037893;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-05-29T15:21:35.278879Z node 2 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037895;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-05-29T15:21:35.278885Z node 2 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037895;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-05-29T15:21:35.278896Z node 2 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037895;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-05-29T15:21:35.278900Z node 2 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037895;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-05-29T15:21:35.278919Z node 2 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037895;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-05-29T15:21:35.278923Z node 2 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037895;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-05-29T15:21:35.278934Z node 2 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037895;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-05-29T15:21:35.278939Z node 2 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037895;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-05-29T15:21:35.278948Z node 2 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037895;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-05-29T15:21:35.278953Z node 2 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037895;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-05-29T15:21:35.278960Z node 2 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037895;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-05-29T15:21:35.278964Z node 2 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037895;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-05-29T15:21:35.278987Z node 2 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037895;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-05-29T15:21:35.278992Z node 2 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037895;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-05-29T15:21:35.279009Z node 2 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037895;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-05-29T15:21:35.279015Z node 2 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037895;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-05-29T15:21:35.279027Z node 2 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037895;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-05-29T15:21:35.279032Z node 2 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037895;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanDeprecatedSnapshot;id=CleanDeprecatedSnapshot; 2025-05-29T15:21:35.279039Z node 2 :TX_COLUMNSHARD CRIT: log.cpp:784: tablet_id=72075186224037895;process=TTxUpdateSchema::Execute;fline=clean_deprecated_snapshot.cpp:30;tasks_for_rewrite=0; 2025-05-29T15:21:35.279044Z node 2 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037895;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanDeprecatedSnapshot;id=17; 2025-05-29T15:21:35.279049Z node 2 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037895;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV0ChunksMeta;id=RestoreV0ChunksMeta; 2025-05-29T15:21:35.279106Z node 2 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037895;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV0ChunksMeta;id=18; 2025-05-29T15:21:35.279110Z node 2 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037895;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-05-29T15:21:35.316597Z node 2 :TX_COLUMNSHARD_TX WARN: log.cpp:784: tablet_id=72075186224037893;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715659;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715659; 2025-05-29T15:21:35.316874Z node 2 :TX_COLUMNSHARD_TX WARN: log.cpp:784: tablet_id=72075186224037895;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715659;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715659; 2025-05-29T15:21:35.316951Z node 2 :TX_COLUMNSHARD_TX WARN: log.cpp:784: tablet_id=72075186224037892;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715659;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715659; 2025-05-29T15:21:35.317026Z node 2 :TX_COLUMNSHARD_TX WARN: log.cpp:784: tablet_id=72075186224037894;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715659;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715659; 2025-05-29T15:21:35.329121Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 Status: 53 TxId: 281474976715660 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 4 2025-05-29T15:21:35.334024Z node 2 :TX_COLUMNSHARD_TX WARN: log.cpp:784: tablet_id=72075186224037892;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715660;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715660; 2025-05-29T15:21:35.335149Z node 2 :TX_COLUMNSHARD_TX WARN: log.cpp:784: tablet_id=72075186224037894;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715660;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715660; 2025-05-29T15:21:35.336051Z node 2 :TX_COLUMNSHARD_TX WARN: log.cpp:784: tablet_id=72075186224037893;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715660;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715660; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=1448;columns=6; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=1448;columns=6; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=2568;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=2568;columns=5; 2025-05-29T15:21:35.453903Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7509888164683477208:2390], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:21:35.453933Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:21:35.466850Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7509888164683477253:2397], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:21:35.466879Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:21:35.467012Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7509888164683477258:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:21:35.467847Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715661:3, at schemeshard: 72057594046644480 2025-05-29T15:21:35.470555Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976715661, at schemeshard: 72057594046644480 2025-05-29T15:21:35.470626Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7509888164683477260:2401], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715661 completed, doublechecking } 2025-05-29T15:21:35.565989Z node 2 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [2:7509888164683477311:2597] txid# 281474976715662, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 8], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-29T15:21:35.704962Z node 2 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:238: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1748532095520, txId: 18446744073709551615] shutting down >> KqpOlapSysView::StatsSysViewBytesDictActualization |57.8%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/services/metadata/initializer/ut/ydb-services-metadata-initializer-ut |57.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/services/metadata/initializer/ut/ydb-services-metadata-initializer-ut |57.8%| [LD] {RESULT} $(B)/ydb/services/metadata/initializer/ut/ydb-services-metadata-initializer-ut ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/olap/unittest >> KqpOlap::OlapRead_ScanQuery [GOOD] Test command err: Trying to start YDB, gRPC: 7437, MsgBus: 25842 test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/0026bc/r3tmp/tmpxWryXz/pdisk_1.dat 2025-05-29T15:21:33.786828Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-05-29T15:21:33.840976Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:21:33.841861Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [1:7509888157621306278:2079] 1748532093644588 != 1748532093644591 2025-05-29T15:21:33.861215Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:21:33.861237Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:21:33.862608Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 7437, node 1 2025-05-29T15:21:33.878896Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:21:33.878907Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-05-29T15:21:33.878910Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-05-29T15:21:33.878946Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:25842 TClient is connected to server localhost:25842 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-29T15:21:34.021810Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:21:34.027326Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-05-29T15:21:34.040097Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 Status: 53 TxId: 281474976715658 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 2 2025-05-29T15:21:34.071494Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037889;self_id=[1:7509888161916274277:2314];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-05-29T15:21:34.071552Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037889;self_id=[1:7509888161916274277:2314];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-05-29T15:21:34.071604Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037889;self_id=[1:7509888161916274277:2314];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-05-29T15:21:34.071625Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037889;self_id=[1:7509888161916274277:2314];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-05-29T15:21:34.071644Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037889;self_id=[1:7509888161916274277:2314];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-05-29T15:21:34.071663Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037889;self_id=[1:7509888161916274277:2314];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-05-29T15:21:34.071679Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037889;self_id=[1:7509888161916274277:2314];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-05-29T15:21:34.071703Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037889;self_id=[1:7509888161916274277:2314];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-05-29T15:21:34.071721Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037889;self_id=[1:7509888161916274277:2314];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-05-29T15:21:34.071740Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037889;self_id=[1:7509888161916274277:2314];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-05-29T15:21:34.071757Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037889;self_id=[1:7509888161916274277:2314];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2025-05-29T15:21:34.071775Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037889;self_id=[1:7509888161916274277:2314];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2025-05-29T15:21:34.088854Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037890;self_id=[1:7509888161916274278:2315];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-05-29T15:21:34.088897Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037890;self_id=[1:7509888161916274278:2315];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-05-29T15:21:34.088971Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037890;self_id=[1:7509888161916274278:2315];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-05-29T15:21:34.089002Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037890;self_id=[1:7509888161916274278:2315];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-05-29T15:21:34.089022Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037890;self_id=[1:7509888161916274278:2315];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-05-29T15:21:34.089042Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037890;self_id=[1:7509888161916274278:2315];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-05-29T15:21:34.089059Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037890;self_id=[1:7509888161916274278:2315];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-05-29T15:21:34.089077Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037890;self_id=[1:7509888161916274278:2315];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-05-29T15:21:34.089096Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037890;self_id=[1:7509888161916274278:2315];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-05-29T15:21:34.089113Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037890;self_id=[1:7509888161916274278:2315];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-05-29T15:21:34.089129Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037890;self_id=[1:7509888161916274278:2315];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2025-05-29T15:21:34.089153Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037890;self_id=[1:7509888161916274278:2315];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2025-05-29T15:21:34.093778Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037891;self_id=[1:7509888161916274279:2316];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-05-29T15:21:34.093804Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037891;self_id=[1:7509888161916274279:2316];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-05-29T15:21:34.093846Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037891;self_id=[1:7509888161916274279:2316];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-05-29T15:21:34.093866Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037891;self_id=[1:7509888161916274279:2316];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-05-29T15:21:34.093886Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037891;self_id=[1:7509888161916274279:2316];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-05-29T15:21:34.093905Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037891;self_id=[1:7509888161916274279:2316] ... id=72075186224037895;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-05-29T15:21:35.493796Z node 2 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037895;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanDeprecatedSnapshot;id=CleanDeprecatedSnapshot; 2025-05-29T15:21:35.493803Z node 2 :TX_COLUMNSHARD CRIT: log.cpp:784: tablet_id=72075186224037895;process=TTxUpdateSchema::Execute;fline=clean_deprecated_snapshot.cpp:30;tasks_for_rewrite=0; 2025-05-29T15:21:35.493810Z node 2 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037895;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanDeprecatedSnapshot;id=17; 2025-05-29T15:21:35.493814Z node 2 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037895;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV0ChunksMeta;id=RestoreV0ChunksMeta; 2025-05-29T15:21:35.493883Z node 2 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037895;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV0ChunksMeta;id=18; 2025-05-29T15:21:35.493887Z node 2 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037895;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-05-29T15:21:35.494014Z node 2 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037892;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-05-29T15:21:35.494027Z node 2 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037892;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-05-29T15:21:35.494038Z node 2 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037892;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-05-29T15:21:35.494043Z node 2 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037892;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-05-29T15:21:35.494063Z node 2 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037892;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-05-29T15:21:35.494068Z node 2 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037892;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-05-29T15:21:35.494079Z node 2 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037892;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-05-29T15:21:35.494084Z node 2 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037892;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-05-29T15:21:35.494094Z node 2 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037892;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-05-29T15:21:35.494098Z node 2 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037892;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-05-29T15:21:35.494106Z node 2 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037892;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-05-29T15:21:35.494112Z node 2 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037892;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-05-29T15:21:35.494139Z node 2 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037892;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-05-29T15:21:35.494144Z node 2 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037892;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-05-29T15:21:35.494163Z node 2 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037892;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-05-29T15:21:35.494167Z node 2 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037892;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-05-29T15:21:35.494180Z node 2 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037892;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-05-29T15:21:35.494185Z node 2 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037892;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanDeprecatedSnapshot;id=CleanDeprecatedSnapshot; 2025-05-29T15:21:35.494192Z node 2 :TX_COLUMNSHARD CRIT: log.cpp:784: tablet_id=72075186224037892;process=TTxUpdateSchema::Execute;fline=clean_deprecated_snapshot.cpp:30;tasks_for_rewrite=0; 2025-05-29T15:21:35.494199Z node 2 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037892;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanDeprecatedSnapshot;id=17; 2025-05-29T15:21:35.494204Z node 2 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037892;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV0ChunksMeta;id=RestoreV0ChunksMeta; 2025-05-29T15:21:35.494259Z node 2 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037892;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV0ChunksMeta;id=18; 2025-05-29T15:21:35.494262Z node 2 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037892;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-05-29T15:21:35.519386Z node 2 :TX_COLUMNSHARD_TX WARN: log.cpp:784: tablet_id=72075186224037894;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715659;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715659; 2025-05-29T15:21:35.519474Z node 2 :TX_COLUMNSHARD_TX WARN: log.cpp:784: tablet_id=72075186224037895;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715659;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715659; 2025-05-29T15:21:35.519562Z node 2 :TX_COLUMNSHARD_TX WARN: log.cpp:784: tablet_id=72075186224037892;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715659;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715659; 2025-05-29T15:21:35.519647Z node 2 :TX_COLUMNSHARD_TX WARN: log.cpp:784: tablet_id=72075186224037893;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715659;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715659; 2025-05-29T15:21:35.531755Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 Status: 53 TxId: 281474976715660 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 4 2025-05-29T15:21:35.547782Z node 2 :TX_COLUMNSHARD_TX WARN: log.cpp:784: tablet_id=72075186224037892;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715660;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715660; 2025-05-29T15:21:35.548613Z node 2 :TX_COLUMNSHARD_TX WARN: log.cpp:784: tablet_id=72075186224037894;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715660;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715660; 2025-05-29T15:21:35.549312Z node 2 :TX_COLUMNSHARD_TX WARN: log.cpp:784: tablet_id=72075186224037893;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715660;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715660; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=1448;columns=6; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=1448;columns=6; 2025-05-29T15:21:35.696689Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7509888165443048960:2387], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:21:35.696720Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:21:35.704102Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7509888165443048985:2392], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:21:35.704131Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:21:35.704234Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7509888165443048990:2395], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:21:35.705032Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715661:3, at schemeshard: 72057594046644480 2025-05-29T15:21:35.707740Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976715661, at schemeshard: 72057594046644480 2025-05-29T15:21:35.707806Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7509888165443048992:2396], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715661 completed, doublechecking } 2025-05-29T15:21:35.769960Z node 2 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [2:7509888165443049043:2590] txid# 281474976715662, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 8], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-29T15:21:35.826448Z node 2 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:238: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1748532095758, txId: 18446744073709551615] shutting down ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/olap/unittest >> KqpOlapAggregations::Aggregation_Sum_GroupByNullMix [GOOD] Test command err: Trying to start YDB, gRPC: 23369, MsgBus: 22797 2025-05-29T15:21:33.709501Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509888159235161520:2220];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/0026c8/r3tmp/tmp0syVQ4/pdisk_1.dat 2025-05-29T15:21:33.763347Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-05-29T15:21:33.789038Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [1:7509888159235161313:2079] 1748532093694816 != 1748532093694819 2025-05-29T15:21:33.791051Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 23369, node 1 2025-05-29T15:21:33.819286Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:21:33.819298Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-05-29T15:21:33.819299Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-05-29T15:21:33.819327Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-05-29T15:21:33.862278Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:21:33.862308Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:21:33.862606Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:22797 TClient is connected to server localhost:22797 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-29T15:21:33.955575Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:21:33.958368Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-05-29T15:21:33.964114Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 Status: 53 TxId: 281474976715658 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 2 2025-05-29T15:21:33.986732Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;self_id=[1:7509888159235162015:2314];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-05-29T15:21:33.994943Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;self_id=[1:7509888159235162015:2314];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-05-29T15:21:33.995022Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;self_id=[1:7509888159235162015:2314];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-05-29T15:21:33.995043Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;self_id=[1:7509888159235162015:2314];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-05-29T15:21:33.995062Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;self_id=[1:7509888159235162015:2314];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-05-29T15:21:33.995083Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;self_id=[1:7509888159235162015:2314];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-05-29T15:21:33.995101Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;self_id=[1:7509888159235162015:2314];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-05-29T15:21:33.995123Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;self_id=[1:7509888159235162015:2314];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-05-29T15:21:33.995145Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;self_id=[1:7509888159235162015:2314];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-05-29T15:21:33.995165Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;self_id=[1:7509888159235162015:2314];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-05-29T15:21:33.995184Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;self_id=[1:7509888159235162015:2314];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2025-05-29T15:21:33.995204Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;self_id=[1:7509888159235162015:2314];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2025-05-29T15:21:34.019792Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037891;self_id=[1:7509888159235162016:2315];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-05-29T15:21:34.019818Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037891;self_id=[1:7509888159235162016:2315];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-05-29T15:21:34.019882Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037891;self_id=[1:7509888159235162016:2315];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-05-29T15:21:34.019904Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037891;self_id=[1:7509888159235162016:2315];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-05-29T15:21:34.019924Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037891;self_id=[1:7509888159235162016:2315];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-05-29T15:21:34.019945Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037891;self_id=[1:7509888159235162016:2315];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-05-29T15:21:34.019963Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037891;self_id=[1:7509888159235162016:2315];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-05-29T15:21:34.019982Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037891;self_id=[1:7509888159235162016:2315];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-05-29T15:21:34.020003Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037891;self_id=[1:7509888159235162016:2315];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-05-29T15:21:34.020021Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037891;self_id=[1:7509888159235162016:2315];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-05-29T15:21:34.020041Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037891;self_id=[1:7509888159235162016:2315];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2025-05-29T15:21:34.020062Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037891;self_id=[1:7509888159235162016:2315];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2025-05-29T15:21:34.024847Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037890;self_id=[1:7509888159235162019:2316];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-05-29T15:21:34.030795Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037890;self_id=[1:7509888159235162019:2316];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-05-29T15:21:34.030897Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037890;self_id=[1:7509888159235162019:2316];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-05-29T15:21:34.030931Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037890;self_id=[1:7509888159235162019:2316];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-05-29T15:21:34.030966Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037890;self_id=[1:7509888159235162019:2316];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fl ... TxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-05-29T15:21:34.063083Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-05-29T15:21:34.063089Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-05-29T15:21:34.063109Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-05-29T15:21:34.063113Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-05-29T15:21:34.063126Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-05-29T15:21:34.063131Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanDeprecatedSnapshot;id=CleanDeprecatedSnapshot; 2025-05-29T15:21:34.063138Z node 1 :TX_COLUMNSHARD CRIT: log.cpp:784: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=clean_deprecated_snapshot.cpp:30;tasks_for_rewrite=0; 2025-05-29T15:21:34.063144Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanDeprecatedSnapshot;id=17; 2025-05-29T15:21:34.063223Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV0ChunksMeta;id=RestoreV0ChunksMeta; 2025-05-29T15:21:34.063337Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV0ChunksMeta;id=18; 2025-05-29T15:21:34.063342Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-05-29T15:21:34.093986Z node 1 :TX_COLUMNSHARD_TX WARN: log.cpp:784: tablet_id=72075186224037889;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715658;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715658; 2025-05-29T15:21:34.094944Z node 1 :TX_COLUMNSHARD_TX WARN: log.cpp:784: tablet_id=72075186224037888;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715658;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715658; 2025-05-29T15:21:34.095851Z node 1 :TX_COLUMNSHARD_TX WARN: log.cpp:784: tablet_id=72075186224037890;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715658;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715658; 2025-05-29T15:21:34.096664Z node 1 :TX_COLUMNSHARD_TX WARN: log.cpp:784: tablet_id=72075186224037891;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715658;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715658; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=1448;columns=6; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=1448;columns=6; REQUEST: --!syntax_v1 PRAGMA Kikimr.OptUseFinalizeByKey; SELECT level, SUM(id), SUM(level) FROM `/Root/tableWithNulls` WHERE id >= 5 GROUP BY level ORDER BY level; 2025-05-29T15:21:34.524213Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509888163530129569:2414], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:21:34.524244Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:21:34.524398Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509888163530129605:2418], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:21:34.525199Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480 2025-05-29T15:21:34.535171Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7509888163530129607:2419], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2025-05-29T15:21:34.586438Z node 1 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [1:7509888163530129658:2484] txid# 281474976715660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-29T15:21:35.781233Z node 1 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:238: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1748532094603, txId: 18446744073709551615] shutting down REQUEST: --!syntax_v1 PRAGMA Kikimr.OptUseFinalizeByKey; SELECT level, SUM(id), SUM(level) FROM `/Root/tableWithNulls` WHERE id >= 5 GROUP BY level ORDER BY level; JSON Plan: {"Plan":{"Plans":[{"PlanNodeId":6,"Plans":[{"PlanNodeId":5,"Plans":[{"PlanNodeId":4,"Plans":[{"PlanNodeId":3,"Plans":[{"PlanNodeId":2,"Plans":[{"Tables":["tableWithNulls"],"PlanNodeId":1,"Operators":[{"Scan":"Parallel","E-Size":"No estimate","ReadRanges":["id [5, +∞)"],"Name":"TableRangeScan","Inputs":[],"Path":"\/Root\/tableWithNulls","E-Rows":"No estimate","Table":"tableWithNulls","ReadRangesKeys":["id"],"ReadColumns":["id","level"],"SsaProgram":{"Command":[{"GroupBy":{"Aggregates":[{"Function":{"Id":5,"Arguments":[{"Id":1}]},"Column":{"Id":7}},{"Function":{"Id":5,"Arguments":[{"Id":3}]},"Column":{"Id":8}}],"KeyColumns":[{"Id":3}]}},{"Projection":{"Columns":[{"Id":7},{"Id":8},{"Id":3}]}}]},"E-Cost":"No estimate","ReadRangesExpectedSize":1}],"Node Type":"TableRangeScan"}],"Node Type":"HashShuffle","KeyColumns":["level"],"PlanNodeType":"Connection"}],"Operators":[{"Inputs":[{"InternalOperatorId":1}],"SortBy":"row.level","Name":"Sort"},{"Inputs":[{"ExternalPlanNodeId":2}],"Name":"Aggregate","Phase":"Final"}],"Node Type":"Sort-Aggregate"}],"Node Type":"Merge","SortColumns":["level (Asc)"],"PlanNodeType":"Connection"}],"Node Type":"Stage"}],"Node Type":"ResultSet_1","PlanNodeType":"ResultSet"}],"Node Type":"Query","Stats":{"ResourcePoolId":"default"},"PlanNodeType":"Query"},"meta":{"version":"0.2","type":"query"},"tables":[{"name":"\/Root\/tableWithNulls","reads":[{"columns":["id","level"],"scan_by":["id [5, +∞)"],"type":"Scan"}]}],"SimplifiedPlan":{"PlanNodeId":0,"Plans":[{"PlanNodeId":1,"Plans":[{"PlanNodeId":4,"Plans":[{"PlanNodeId":5,"Plans":[{"PlanNodeId":6,"Plans":[{"PlanNodeId":7,"Operators":[{"Scan":"Parallel","E-Size":"No estimate","ReadRanges":["id [5, +∞)"],"Name":"TableRangeScan","Path":"\/Root\/tableWithNulls","E-Rows":"No estimate","Table":"tableWithNulls","ReadRangesKeys":["id"],"ReadColumns":["id","level"],"SsaProgram":{"Command":[{"GroupBy":{"Aggregates":[{"Function":{"Id":5,"Arguments":[{"Id":1}]},"Column":{"Id":7}},{"Function":{"Id":5,"Arguments":[{"Id":3}]},"Column":{"Id":8}}],"KeyColumns":[{"Id":3}]}},{"Projection":{"Columns":[{"Id":7},{"Id":8},{"Id":3}]}}]},"E-Cost":"No estimate","ReadRangesExpectedSize":1}],"Node Type":"TableRangeScan"}],"Node Type":"HashShuffle (KeyColumns: [\"level\"])","PlanNodeType":"Connection"}],"Operators":[{"Name":"Aggregate","Phase":"Final"}],"Node Type":"Aggregate"}],"Operators":[{"SortBy":"row.level","Name":"Sort"}],"Node Type":"Sort"}],"Node Type":"ResultSet_1","PlanNodeType":"ResultSet"}],"Node Type":"Query","OptimizerStats":{"EquiJoinsCount":0,"JoinsCount":0},"PlanNodeType":"Query"}} AST: ( (declare %kqp%tx_result_binding_0_0 (TupleType (ListType (TupleType (TupleType (OptionalType (DataType 'Int32)) (DataType 'Int32)) (TupleType (OptionalType (DataType 'Int32)) (DataType 'Int32)))))) (let $1 (DataType 'Int32)) (let $2 (OptionalType $1)) (let $3 '('('"_logical_id" '1259) '('"_id" '"dcde366a-85630138-e16a1e1b-2ba435eb") '('"_partition_mode" '"single"))) (let $4 (DqPhyStage '() (lambda '() (ToStream (Just '((RangeFinalize (RangeMultiply (Uint64 '10000) (RangeUnion (RangeCreate (AsList '('((Just (Int32 '"5")) (Int32 '1)) '((Nothing $2) (Int32 '0)))))))))))) $3)) (let $5 (DqCnValue (TDqOutput $4 '0))) (let $6 (KqpPhysicalTx '($4) '($5) '() '('('"type" '"compute")))) (let $7 '"%kqp%tx_result_binding_0_0") (let $8 (TupleType $2 $1)) (let $9 (TupleType (ListType (TupleType $8 $8)))) (let $10 (DataType 'Int64)) (let $11 (OptionalType $10)) (let $12 '('"level" $2)) (let $13 (StructType '('_yql_agg_0 $10) '('_yql_agg_1 $11) $12)) (let $14 '('('"_logical_id" '1318) '('"_id" '"2dd316d1-ffed9270-faa41ccf-edbd6f86") '('"_wide_channels" $13))) (let $15 (DqPhyStage '() (lambda '() (block '( (let $27 (KqpTable '"/Root/tableWithNulls" '"72057594046644480:2" '"" '1)) (let $28 '('('"UsedKeyColumns" '('"id")) '('"ExpectedMaxRanges" '1) '('"PointPrefixLen" '0))) (let $29 (KqpWideReadOlapTableRanges $27 %kqp%tx_result_binding_0_0 '('"id" '"level") '() $28 (lambda '($30) (block '( (let $31 '('_yql_agg_0 'sum '"id")) (let $32 '('_yql_agg_1 'sum '"level")) (return (TKqpOlapAgg $30 '($31 $32) '('"level"))) ))))) (return (FromFlow $29)) ))) $14)) (let $16 (DqCnHashShuffle (TDqOutput $15 '0) '('2))) (let $17 (StructType '('"column1" $10) '('"column2" $11) $12)) (let $18 '('('"_logical_id" '2158) '('"_id" '"94b17790-1c47be69-9efdd906-92a6c86b") '('"_wide_channels" $17))) (let $19 (DqPhyStage '($16) (lambda '($33) (block '( (let $34 (lambda '($41 $42 $43 $44) $42 $43)) (let $35 (lambda '($45 $46 $47 $48 $49 $50) (AggrAdd $46 $49) (AggrAdd $47 $50))) (let $36 (lambda '($51 $52 $53) $52 $53 $51)) (let $37 (WideCombiner (ToFlow $33) '"" (lambda '($38 $39 $40) $40) $34 $35 $36)) (return (FromFlow (WideSort $37 '('('2 (Bool 'true)))))) ))) $18)) (let $20 (DqCnMerge (TDqOutput $19 '0) '('('2 '"Asc")))) (let $21 (DqPhyStage '($20) (lambda '($54) (FromFlow (NarrowMap (ToFlow $54) (lambda '($55 $56 $57) (AsStruct '('"column1" $55) '('"column2" $56) '('"level" $57)))))) '('('"_logical_id" '2170) '('"_id" '"5f6f5e3d-443d600d-a0a1ea34-217a5466")))) (let $22 '($15 $19 $21)) (let $23 '('"level" '"column1" '"column2")) (let $24 (DqCnResult (TDqOutput $21 '0) $23)) (let $25 (KqpTxResultBinding $9 '0 '0)) (let $26 (KqpPhysicalTx $22 '($24) '('($7 $25)) '('('"type" '"scan")))) (return (KqpPhysicalQuery '($6 $26) '((KqpTxResultBinding (ListType $17) '1 '0)) '('('"type" '"scan_query")))) ) >> TColumnShardTestReadWrite::WriteReadStandalone >> KqpOlap::PKDescScan >> KqpOlapCompression::DefaultCompressionViaCSConfig >> TColumnShardTestReadWrite::WriteReadStandaloneExoticTypes >> KqpOlapAggregations::Aggregation_Count_Null >> KqpOlapJson::DuplicationCompactionVariants >> KqpOlapJson::BloomCategoryIndexesVariants >> TColumnShardTestReadWrite::CompactionSplitGranule_PKUInt64 >> KqpOlapJson::EmptyVariants [FAIL] >> KqpOlapDictionary::EmptyStringVariants [FAIL] >> TColumnShardTestReadWrite::WriteOverload+InStore+WithWritePortionsOnInsert >> KqpOlapLocks::DeleteAbsentMultipleShards+Reboot [FAIL] >> KqpOlapJson::RestoreJsonArrayVariants [FAIL] >> TColumnShardTestReadWrite::CompactionSplitGranule_PKInt64 >> KqpOlapAggregations::AggregationCountGroupByPushdown [GOOD] >> TColumnShardTestReadWrite::CompactionInGranule_PKInt64 >> KqpOlap::OlapUpsertImmediate [FAIL] >> KqpOlap::OlapUpsert >> KqpOlap::ScanQueryOltpAndOlap [FAIL] >> KqpOlap::SelectLimit1ManyShards >> TColumnShardTestReadWrite::CompactionInGranule_PKUInt32_Reboot >> TColumnShardTestReadWrite::CompactionInGranule_PKDatetime_Reboot >> KqpOlap::SingleShardRead [FAIL] >> KqpOlap::SimpleRequestHasProjections >> Normalizers::CleanEmptyPortionsNormalizer >> KqpOlap::PredicatePushdown_Datetime_QS [FAIL] >> KqpOlap::PredicatePushdownWithParametersILike >> TColumnShardTestReadWrite::RebootWriteRead ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/olap/unittest >> KqpOlapAggregations::AggregationCountGroupByPushdown [GOOD] Test command err: Trying to start YDB, gRPC: 15453, MsgBus: 21215 2025-05-29T15:21:33.566174Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509888159059008676:2213];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:21:33.568697Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/0026ed/r3tmp/tmpUKkWVy/pdisk_1.dat 2025-05-29T15:21:33.654194Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 15453, node 1 2025-05-29T15:21:33.671321Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:21:33.671332Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-05-29T15:21:33.671334Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-05-29T15:21:33.671365Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-05-29T15:21:33.674980Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:21:33.675004Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:21:33.677924Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:21215 TClient is connected to server localhost:21215 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-29T15:21:33.885589Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:21:33.888338Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-05-29T15:21:33.902399Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnStore, opId: 281474976710658:0, at schemeshard: 72057594046644480 Status: 53 TxId: 281474976710658 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 2 2025-05-29T15:21:33.933611Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037891;self_id=[1:7509888159059009178:2314];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-05-29T15:21:33.938842Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037891;self_id=[1:7509888159059009178:2314];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-05-29T15:21:33.938951Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037891;self_id=[1:7509888159059009178:2314];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-05-29T15:21:33.938986Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037891;self_id=[1:7509888159059009178:2314];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-05-29T15:21:33.939017Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037891;self_id=[1:7509888159059009178:2314];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-05-29T15:21:33.939046Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037891;self_id=[1:7509888159059009178:2314];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-05-29T15:21:33.939074Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037891;self_id=[1:7509888159059009178:2314];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-05-29T15:21:33.939106Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037891;self_id=[1:7509888159059009178:2314];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-05-29T15:21:33.939135Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037891;self_id=[1:7509888159059009178:2314];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-05-29T15:21:33.939163Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037891;self_id=[1:7509888159059009178:2314];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-05-29T15:21:33.939187Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037891;self_id=[1:7509888159059009178:2314];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2025-05-29T15:21:33.939215Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037891;self_id=[1:7509888159059009178:2314];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2025-05-29T15:21:33.952833Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037889;self_id=[1:7509888159059009179:2315];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-05-29T15:21:33.952861Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037889;self_id=[1:7509888159059009179:2315];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-05-29T15:21:33.952911Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037889;self_id=[1:7509888159059009179:2315];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-05-29T15:21:33.952932Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037889;self_id=[1:7509888159059009179:2315];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-05-29T15:21:33.952954Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037889;self_id=[1:7509888159059009179:2315];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-05-29T15:21:33.952974Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037889;self_id=[1:7509888159059009179:2315];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-05-29T15:21:33.952993Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037889;self_id=[1:7509888159059009179:2315];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-05-29T15:21:33.953012Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037889;self_id=[1:7509888159059009179:2315];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-05-29T15:21:33.953032Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037889;self_id=[1:7509888159059009179:2315];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-05-29T15:21:33.953049Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037889;self_id=[1:7509888159059009179:2315];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-05-29T15:21:33.953068Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037889;self_id=[1:7509888159059009179:2315];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2025-05-29T15:21:33.953087Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037889;self_id=[1:7509888159059009179:2315];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2025-05-29T15:21:33.961427Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037890;self_id=[1:7509888159059009198:2317];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-05-29T15:21:33.961448Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037890;self_id=[1:7509888159059009198:2317];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-05-29T15:21:33.961489Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037890;self_id=[1:7509888159059009198:2317];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-05-29T15:21:33.961509Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037890;self_id=[1:7509888159059009198:2317];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-05-29T15:21:33.961536Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037890;self_id=[1:7509888159059009198:2317];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-05-29T15:21:33.961556Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037890;self_id=[1:750988815 ... s=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-05-29T15:21:35.727280Z node 2 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-05-29T15:21:35.727288Z node 2 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-05-29T15:21:35.727301Z node 2 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-05-29T15:21:35.727309Z node 2 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanDeprecatedSnapshot;id=CleanDeprecatedSnapshot; 2025-05-29T15:21:35.727315Z node 2 :TX_COLUMNSHARD CRIT: log.cpp:784: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=clean_deprecated_snapshot.cpp:30;tasks_for_rewrite=0; 2025-05-29T15:21:35.727325Z node 2 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanDeprecatedSnapshot;id=17; 2025-05-29T15:21:35.727329Z node 2 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV0ChunksMeta;id=RestoreV0ChunksMeta; 2025-05-29T15:21:35.727395Z node 2 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV0ChunksMeta;id=18; 2025-05-29T15:21:35.727403Z node 2 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-05-29T15:21:35.754195Z node 2 :TX_COLUMNSHARD_TX WARN: log.cpp:784: tablet_id=72075186224037889;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715658;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715658; 2025-05-29T15:21:35.754279Z node 2 :TX_COLUMNSHARD_TX WARN: log.cpp:784: tablet_id=72075186224037890;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715658;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715658; 2025-05-29T15:21:35.754356Z node 2 :TX_COLUMNSHARD_TX WARN: log.cpp:784: tablet_id=72075186224037888;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715658;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715658; 2025-05-29T15:21:35.754431Z node 2 :TX_COLUMNSHARD_TX WARN: log.cpp:784: tablet_id=72075186224037891;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715658;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715658; 2025-05-29T15:21:35.756500Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 Status: 53 TxId: 281474976715659 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 3 2025-05-29T15:21:35.766167Z node 2 :TX_COLUMNSHARD_TX WARN: log.cpp:784: tablet_id=72075186224037888;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715659;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715659; 2025-05-29T15:21:35.767041Z node 2 :TX_COLUMNSHARD_TX WARN: log.cpp:784: tablet_id=72075186224037890;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715659;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715659; 2025-05-29T15:21:35.767882Z node 2 :TX_COLUMNSHARD_TX WARN: log.cpp:784: tablet_id=72075186224037889;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715659;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715659; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=1175912;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=1175912;columns=5; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=1175912;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=1175912;columns=5; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=1175912;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=1175912;columns=5; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=1175912;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=1175912;columns=5; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=1175912;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=1175912;columns=5; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=8228912;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=8228912;columns=5; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=12930912;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=12930912;columns=5; 2025-05-29T15:21:36.012021Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7509888171745234768:2403], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:21:36.012048Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:21:36.012225Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7509888171745234780:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:21:36.013094Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715660:3, at schemeshard: 72057594046644480 2025-05-29T15:21:36.015362Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7509888171745234782:2407], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715660 completed, doublechecking } 2025-05-29T15:21:36.115562Z node 2 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [2:7509888171745234833:2512] txid# 281474976715661, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 7], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-29T15:21:37.018485Z node 2 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:238: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1748532096066, txId: 18446744073709551615] shutting down JSON Plan: {"Plan":{"Plans":[{"PlanNodeId":6,"Plans":[{"PlanNodeId":5,"Plans":[{"PlanNodeId":4,"Plans":[{"PlanNodeId":3,"Plans":[{"PlanNodeId":2,"Plans":[{"Tables":["olapStore\/olapTable"],"PlanNodeId":1,"Operators":[{"Scan":"Parallel","E-Size":"No estimate","ReadRanges":["timestamp (-∞, +∞)","uid (-∞, +∞)"],"Name":"TableFullScan","Inputs":[],"Path":"\/Root\/olapStore\/olapTable","E-Rows":"No estimate","Table":"olapStore\/olapTable","ReadColumns":["level"],"SsaProgram":{"Command":[{"GroupBy":{"Aggregates":[{"Function":{"Id":2,"Arguments":[{"Id":4}]},"Column":{"Id":6}}],"KeyColumns":[{"Id":4}]}},{"Projection":{"Columns":[{"Id":6},{"Id":4}]}}]},"E-Cost":"No estimate"}],"Node Type":"TableFullScan"}],"Node Type":"HashShuffle","KeyColumns":["level"],"PlanNodeType":"Connection"}],"Operators":[{"Inputs":[{"InternalOperatorId":1}],"SortBy":"row.level","Name":"Sort"},{"Inputs":[{"ExternalPlanNodeId":2}],"Name":"Aggregate","Phase":"Final"}],"Node Type":"Sort-Aggregate"}],"Node Type":"Merge","SortColumns":["level (Asc)"],"PlanNodeType":"Connection"}],"Node Type":"Stage"}],"Node Type":"ResultSet","PlanNodeType":"ResultSet"}],"Node Type":"Query","Stats":{"ResourcePoolId":"default"},"PlanNodeType":"Query"},"meta":{"version":"0.2","type":"query"},"tables":[{"name":"\/Root\/olapStore\/olapTable","reads":[{"columns":["level"],"scan_by":["timestamp (-∞, +∞)","uid (-∞, +∞)"],"type":"FullScan"}]}],"SimplifiedPlan":{"PlanNodeId":0,"Plans":[{"PlanNodeId":1,"Plans":[{"PlanNodeId":4,"Plans":[{"PlanNodeId":5,"Plans":[{"PlanNodeId":6,"Plans":[{"PlanNodeId":7,"Operators":[{"Scan":"Parallel","E-Size":"No estimate","ReadRanges":["timestamp (-∞, +∞)","uid (-∞, +∞)"],"Name":"TableFullScan","Path":"\/Root\/olapStore\/olapTable","E-Rows":"No estimate","Table":"olapStore\/olapTable","ReadColumns":["level"],"SsaProgram":{"Command":[{"GroupBy":{"Aggregates":[{"Function":{"Id":2,"Arguments":[{"Id":4}]},"Column":{"Id":6}}],"KeyColumns":[{"Id":4}]}},{"Projection":{"Columns":[{"Id":6},{"Id":4}]}}]},"E-Cost":"No estimate"}],"Node Type":"TableFullScan"}],"Node Type":"HashShuffle (KeyColumns: [\"level\"])","PlanNodeType":"Connection"}],"Operators":[{"Name":"Aggregate","Phase":"Final"}],"Node Type":"Aggregate"}],"Operators":[{"SortBy":"row.level","Name":"Sort"}],"Node Type":"Sort"}],"Node Type":"ResultSet","PlanNodeType":"ResultSet"}],"Node Type":"Query","OptimizerStats":{"EquiJoinsCount":0,"JoinsCount":0},"PlanNodeType":"Query"}} AST: ( (let $1 (DataType 'Uint64)) (let $2 '('"level" (OptionalType (DataType 'Int32)))) (let $3 '('('"_logical_id" '599) '('"_id" '"2803bcdc-f2cccad7-fc1b6249-1624b89a") '('"_wide_channels" (StructType '('_yql_agg_0 $1) $2)))) (let $4 (DqPhyStage '() (lambda '() (block '( (let $13 (KqpTable '"/Root/olapStore/olapTable" '"72057594046644480:3" '"" '1)) (let $14 '('"level")) (let $15 (KqpWideReadOlapTableRanges $13 (Void) $14 '() '() (lambda '($16) (TKqpOlapAgg $16 '('('_yql_agg_0 'count '"level")) $14)))) (return (FromFlow $15)) ))) $3)) (let $5 (DqCnHashShuffle (TDqOutput $4 '0) '('1))) (let $6 (StructType '('"column1" $1) $2)) (let $7 '('('"_logical_id" '1015) '('"_id" '"42aa25e6-8f705763-4b1a1e31-ae9c5c47") '('"_wide_channels" $6))) (let $8 (DqPhyStage '($5) (lambda '($17) (block '( (let $18 (lambda '($29 $30) $30 $29)) (let $19 (WideCombiner (ToFlow $17) '"" (lambda '($20 $21) $21) (lambda '($22 $23 $24) $23) (lambda '($25 $26 $27 $28) (AggrAdd $26 $28)) $18)) (return (FromFlow (WideSort $19 '('('1 (Bool 'true)))))) ))) $7)) (let $9 (DqCnMerge (TDqOutput $8 '0) '('('1 '"Asc")))) (let $10 (DqPhyStage '($9) (lambda '($31) (FromFlow (NarrowMap (ToFlow $31) (lambda '($32 $33) (AsStruct '('"column1" $32) '('"level" $33)))))) '('('"_logical_id" '1027) '('"_id" '"64677d9c-f1bc0230-2c216e25-6f963e67")))) (let $11 '($4 $8 $10)) (let $12 (DqCnResult (TDqOutput $10 '0) '('"level" '"column1"))) (return (KqpPhysicalQuery '((KqpPhysicalTx $11 '($12) '() '('('"type" '"scan")))) '((KqpTxResultBinding (ListType $6) '0 '0)) '('('"type" '"scan_query")))) ) >> KqpOlapAggregations::Aggregation_Count_Null [GOOD] >> KqpOlapAggregations::Aggregation_Count_GroupByNullMix [GOOD] >> KqpOlap::PKDescScan [GOOD] >> KqpOlap::PredicateDoNotPushdown >> KqpDecimalColumnShard::TestOrderByDecimal [GOOD] >> KqpOlapAggregations::Aggregation_Sum_NullGroupBy [GOOD] >> KqpScanArrowFormat::AllTypesColumnsCellvec >> KqpScanArrowFormat::JoinWithParams >> KqpDecimalColumnShard::TestFilterCompare [GOOD] >> KqpOlapAggregations::Aggregation [GOOD] >> KqpOlapAggregations::AggregationAndFilterPushdownOnDiffCols >> KqpOlap::OlapUpsert [FAIL] >> KqpScanArrowInChanels::SingleKey >> KqpOlap::PredicatePushdown_Datetime_SQ [FAIL] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/olap/unittest >> KqpOlapAggregations::Aggregation_Count_GroupByNullMix [GOOD] Test command err: Trying to start YDB, gRPC: 4325, MsgBus: 10492 2025-05-29T15:21:36.931506Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509888172147941221:2203];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:21:36.931576Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/0026a7/r3tmp/tmpI0mxTB/pdisk_1.dat TServer::EnableGrpc on GrpcPort 4325, node 1 2025-05-29T15:21:37.025665Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:21:37.026229Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [1:7509888172147941048:2079] 1748532096928337 != 1748532096928340 2025-05-29T15:21:37.032514Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:21:37.032560Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:21:37.033665Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-29T15:21:37.036895Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:21:37.036905Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-05-29T15:21:37.036906Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-05-29T15:21:37.036944Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:10492 TClient is connected to server localhost:10492 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-29T15:21:37.109862Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:21:37.112610Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-05-29T15:21:37.152087Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 Status: 53 TxId: 281474976715658 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 2 2025-05-29T15:21:37.170994Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037891;self_id=[1:7509888176442909043:2315];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-05-29T15:21:37.171069Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037891;self_id=[1:7509888176442909043:2315];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-05-29T15:21:37.171139Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037891;self_id=[1:7509888176442909043:2315];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-05-29T15:21:37.171190Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037891;self_id=[1:7509888176442909043:2315];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-05-29T15:21:37.171213Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037891;self_id=[1:7509888176442909043:2315];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-05-29T15:21:37.171237Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037891;self_id=[1:7509888176442909043:2315];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-05-29T15:21:37.171255Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037891;self_id=[1:7509888176442909043:2315];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-05-29T15:21:37.171276Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037891;self_id=[1:7509888176442909043:2315];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-05-29T15:21:37.171296Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037891;self_id=[1:7509888176442909043:2315];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-05-29T15:21:37.171316Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037891;self_id=[1:7509888176442909043:2315];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-05-29T15:21:37.171336Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037891;self_id=[1:7509888176442909043:2315];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2025-05-29T15:21:37.171382Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037891;self_id=[1:7509888176442909043:2315];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2025-05-29T15:21:37.174704Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;self_id=[1:7509888176442909042:2314];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-05-29T15:21:37.174728Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;self_id=[1:7509888176442909042:2314];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-05-29T15:21:37.174828Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;self_id=[1:7509888176442909042:2314];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-05-29T15:21:37.174849Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;self_id=[1:7509888176442909042:2314];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-05-29T15:21:37.174872Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;self_id=[1:7509888176442909042:2314];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-05-29T15:21:37.174893Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;self_id=[1:7509888176442909042:2314];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-05-29T15:21:37.174909Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;self_id=[1:7509888176442909042:2314];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-05-29T15:21:37.174938Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;self_id=[1:7509888176442909042:2314];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-05-29T15:21:37.174965Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;self_id=[1:7509888176442909042:2314];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-05-29T15:21:37.174983Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;self_id=[1:7509888176442909042:2314];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-05-29T15:21:37.175002Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;self_id=[1:7509888176442909042:2314];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2025-05-29T15:21:37.175020Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;self_id=[1:7509888176442909042:2314];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2025-05-29T15:21:37.175617Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-05-29T15:21:37.175633Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-05-29T15:21:37.175646Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-05-29T15:21:37.175651Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-05-29T15:21:37.175677Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-05-29T15:21:37.175681Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switche ... zer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-05-29T15:21:37.183960Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-05-29T15:21:37.183965Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-05-29T15:21:37.183985Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-05-29T15:21:37.183989Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-05-29T15:21:37.184001Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-05-29T15:21:37.184005Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanDeprecatedSnapshot;id=CleanDeprecatedSnapshot; 2025-05-29T15:21:37.184019Z node 1 :TX_COLUMNSHARD CRIT: log.cpp:784: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=clean_deprecated_snapshot.cpp:30;tasks_for_rewrite=0; 2025-05-29T15:21:37.184030Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanDeprecatedSnapshot;id=17; 2025-05-29T15:21:37.184034Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV0ChunksMeta;id=RestoreV0ChunksMeta; 2025-05-29T15:21:37.184085Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV0ChunksMeta;id=18; 2025-05-29T15:21:37.184088Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-05-29T15:21:37.220864Z node 1 :TX_COLUMNSHARD_TX WARN: log.cpp:784: tablet_id=72075186224037891;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715658;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715658; 2025-05-29T15:21:37.221965Z node 1 :TX_COLUMNSHARD_TX WARN: log.cpp:784: tablet_id=72075186224037889;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715658;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715658; 2025-05-29T15:21:37.223061Z node 1 :TX_COLUMNSHARD_TX WARN: log.cpp:784: tablet_id=72075186224037888;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715658;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715658; 2025-05-29T15:21:37.223955Z node 1 :TX_COLUMNSHARD_TX WARN: log.cpp:784: tablet_id=72075186224037890;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715658;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715658; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=1448;columns=6; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=1448;columns=6; REQUEST: --!syntax_v1 PRAGMA Kikimr.OptUseFinalizeByKey; SELECT COUNT(level) FROM `/Root/tableWithNulls` WHERE id > 5; 2025-05-29T15:21:37.352169Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509888176442909310:2415], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:21:37.352207Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:21:37.352327Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509888176442909337:2418], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:21:37.353230Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480 2025-05-29T15:21:37.356610Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976715659, at schemeshard: 72057594046644480 2025-05-29T15:21:37.356684Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7509888176442909339:2419], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2025-05-29T15:21:37.444454Z node 1 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [1:7509888176442909390:2483] txid# 281474976715660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-29T15:21:37.655084Z node 1 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:238: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1748532097410, txId: 18446744073709551615] shutting down REQUEST: --!syntax_v1 PRAGMA Kikimr.OptUseFinalizeByKey; SELECT COUNT(level) FROM `/Root/tableWithNulls` WHERE id > 5; JSON Plan: {"Plan":{"Plans":[{"PlanNodeId":7,"Plans":[{"PlanNodeId":6,"Operators":[{"Inputs":[],"Iterator":"precompute_1_0","Name":"Iterator"}],"Node Type":"ConstantExpr","CTE Name":"precompute_1_0"}],"Node Type":"ResultSet_2","PlanNodeType":"ResultSet"},{"PlanNodeId":4,"Subplan Name":"CTE precompute_1_0","Plans":[{"PlanNodeId":3,"Plans":[{"PlanNodeId":2,"Plans":[{"Tables":["tableWithNulls"],"PlanNodeId":1,"Operators":[{"Scan":"Parallel","E-Size":"No estimate","ReadRanges":["id (5, +∞)"],"Name":"TableRangeScan","Inputs":[],"Path":"\/Root\/tableWithNulls","E-Rows":"No estimate","Table":"tableWithNulls","ReadRangesKeys":["id"],"ReadColumns":["level"],"SsaProgram":{"Command":[{"GroupBy":{"Aggregates":[{"Function":{"Id":2,"Arguments":[{"Id":3}]},"Column":{"Id":7}}]}},{"Projection":{"Columns":[{"Id":7}]}}]},"E-Cost":"No estimate","ReadRangesExpectedSize":1}],"Node Type":"TableRangeScan"}],"Node Type":"UnionAll","PlanNodeType":"Connection"}],"Operators":[{"Inputs":[{"InternalOperatorId":1}],"Name":"Aggregate","Phase":"Final"},{"Inputs":[{"InternalOperatorId":2}],"Name":"Limit","Limit":"1"},{"Inputs":[{"ExternalPlanNodeId":2}],"Name":"Aggregate","Phase":"Final"}],"Node Type":"Aggregate-Limit-Aggregate"}],"Node Type":"Precompute_1","Parent Relationship":"InitPlan","PlanNodeType":"Materialize"}],"Node Type":"Query","Stats":{"ResourcePoolId":"default"},"PlanNodeType":"Query"},"meta":{"version":"0.2","type":"query"},"tables":[{"name":"\/Root\/tableWithNulls","reads":[{"columns":["level"],"scan_by":["id (5, +∞)"],"type":"Scan"}]}],"SimplifiedPlan":{"PlanNodeId":0,"Plans":[{"PlanNodeId":1,"Plans":[{"PlanNodeId":4,"Plans":[{"PlanNodeId":5,"Plans":[{"PlanNodeId":6,"Plans":[{"PlanNodeId":8,"Operators":[{"Scan":"Parallel","E-Size":"No estimate","ReadRanges":["id (5, +∞)"],"Name":"TableRangeScan","Path":"\/Root\/tableWithNulls","E-Rows":"No estimate","Table":"tableWithNulls","ReadRangesKeys":["id"],"ReadColumns":["level"],"SsaProgram":{"Command":[{"GroupBy":{"Aggregates":[{"Function":{"Id":2,"Arguments":[{"Id":3}]},"Column":{"Id":7}}]}},{"Projection":{"Columns":[{"Id":7}]}}]},"E-Cost":"No estimate","ReadRangesExpectedSize":1}],"Node Type":"TableRangeScan"}],"Operators":[{"Name":"Aggregate","Phase":"Final"}],"Node Type":"Aggregate"}],"Operators":[{"Name":"Limit","Limit":"1"}],"Node Type":"Limit"}],"Operators":[{"Name":"Aggregate","Phase":"Final"}],"Node Type":"Aggregate"}],"Node Type":"ResultSet_2","PlanNodeType":"ResultSet"}],"Node Type":"Query","OptimizerStats":{"EquiJoinsCount":0,"JoinsCount":0},"PlanNodeType":"Query"}} AST: ( (declare %kqp%tx_result_binding_0_0 (TupleType (ListType (TupleType (TupleType (OptionalType (DataType 'Int32)) (DataType 'Int32)) (TupleType (OptionalType (DataType 'Int32)) (DataType 'Int32)))))) (declare %kqp%tx_result_binding_1_0 (ListType (StructType '('"column0" (DataType 'Uint64))))) (let $1 (DataType 'Int32)) (let $2 (OptionalType $1)) (let $3 '('"_partition_mode" '"single")) (let $4 '('('"_logical_id" '750) '('"_id" '"62474ab-77b71411-8c11d67e-bd33481d") $3)) (let $5 (DqPhyStage '() (lambda '() (block '( (let $28 (Int32 '0)) (return (ToStream (Just '((RangeFinalize (RangeMultiply (Uint64 '10000) (RangeUnion (RangeCreate (AsList '('((Just (Int32 '"5")) $28) '((Nothing $2) $28))))))))))) ))) $4)) (let $6 (DqCnValue (TDqOutput $5 '0))) (let $7 (KqpPhysicalTx '($5) '($6) '() '('('"type" '"compute")))) (let $8 '"%kqp%tx_result_binding_0_0") (let $9 (TupleType $2 $1)) (let $10 (TupleType (ListType (TupleType $9 $9)))) (let $11 (DataType 'Uint64)) (let $12 '('('"_logical_id" '808) '('"_id" '"3c8772b2-f4b1d63d-1cba148e-52b56cfa") '('"_wide_channels" (StructType '('_yql_agg_0 $11))))) (let $13 (DqPhyStage '() (lambda '() (block '( (let $29 (KqpTable '"/Root/tableWithNulls" '"72057594046644480:2" '"" '1)) (let $30 '('('"UsedKeyColumns" '('"id")) '('"ExpectedMaxRanges" '1) '('"PointPrefixLen" '0))) (let $31 (KqpWideReadOlapTableRanges $29 %kqp%tx_result_binding_0_0 '('"level") '() $30 (lambda '($32) (TKqpOlapAgg $32 '('('_yql_agg_0 'count '"level")) '())))) (return (FromFlow $31)) ))) $12)) (let $14 (DqCnUnionAll (TDqOutput $13 '0))) (let $15 (DqPhyStage '($14) (lambda '($33) (block '( (let $34 (Bool 'false)) (let $35 (WideCondense1 (ToFlow $33) (lambda '($37) $37) (lambda '($38 $39) $34) (lambda '($40 $41) (AggrAdd $40 $41)))) (let $36 (Condense (NarrowMap (Take $35 (Uint64 '1)) (lambda '($42) (AsStruct '('Count0 $42)))) (Nothing (OptionalType (StructType '('Count0 $11)))) (lambda '($43 $44) $34) (lambda '($45 $46) (Just $45)))) (return (FromFlow (Map $36 (lambda '($47) (AsList (AsStruct '('"column0" (Coalesce (Member $47 'Count0) (Uint64 '0))))))))) ))) '('('"_logical_id" '1363) '('"_id" '"6ead677b-723e159-86a0f195-ad6ef925")))) (let $16 (DqCnValue (TDqOutput $15 '0))) (let $17 (KqpTxResultBinding $10 '0 '0)) (let $18 '('('"type" '"scan"))) (let $19 (KqpPhysicalTx '($13 $15) '($16) '('($8 $17)) $18)) (let $20 '"%kqp%tx_result_binding_1_0") (let $21 (ListType (StructType '('"column0" $11)))) (let $22 '('('"_logical_id" '1466) '('"_id" '"f6550e7d-f8315f6-3ca98e5a-8c0853da") $3)) (let $23 (DqPhyStage '() (lambda '() (Iterator %kqp%tx_result_binding_1_0)) $22)) (let $24 (DqCnResult (TDqOutput $23 '0) '('"column0"))) (let $25 (KqpTxResultBinding $21 '1 '0)) (let $26 (KqpPhysicalTx '($23) '($24) '('($20 $25)) $18)) (let $27 '($7 $19 $26)) (return (KqpPhysicalQuery $27 '((KqpTxResultBinding $21 '"2" '0)) '('('"type" '"scan_query")))) ) ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/olap/unittest >> KqpDecimalColumnShard::TestOrderByDecimal [GOOD] Test command err: Trying to start YDB, gRPC: 23819, MsgBus: 13475 test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/0026bb/r3tmp/tmpaoCmct/pdisk_1.dat 2025-05-29T15:21:33.898861Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-05-29T15:21:33.927788Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [1:7509888157034861386:2079] 1748532093737607 != 1748532093737610 2025-05-29T15:21:33.929379Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 23819, node 1 2025-05-29T15:21:33.954946Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:21:33.954961Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-05-29T15:21:33.954963Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-05-29T15:21:33.955005Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-05-29T15:21:33.986970Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:21:33.987004Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:21:33.990891Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:13475 TClient is connected to server localhost:13475 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-29T15:21:34.120239Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:21:34.135171Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 CREATE TABLE `/Root/Table1` (id Int32 NOT NULL, int Int64, dec Decimal(22,9), PRIMARY KEY (id)) PARTITION BY HASH(id) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 1); 2025-05-29T15:21:34.455493Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509888161329829350:2331], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:21:34.455522Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:21:34.509939Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-05-29T15:21:34.522481Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;self_id=[1:7509888161329829426:2335];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-05-29T15:21:34.522576Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;self_id=[1:7509888161329829426:2335];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-05-29T15:21:34.522633Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;self_id=[1:7509888161329829426:2335];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-05-29T15:21:34.522651Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;self_id=[1:7509888161329829426:2335];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-05-29T15:21:34.522672Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;self_id=[1:7509888161329829426:2335];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-05-29T15:21:34.522690Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;self_id=[1:7509888161329829426:2335];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-05-29T15:21:34.522712Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;self_id=[1:7509888161329829426:2335];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-05-29T15:21:34.522894Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;self_id=[1:7509888161329829426:2335];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-05-29T15:21:34.522924Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;self_id=[1:7509888161329829426:2335];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-05-29T15:21:34.522945Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;self_id=[1:7509888161329829426:2335];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-05-29T15:21:34.522963Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;self_id=[1:7509888161329829426:2335];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2025-05-29T15:21:34.522982Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;self_id=[1:7509888161329829426:2335];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2025-05-29T15:21:34.525513Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-05-29T15:21:34.525528Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-05-29T15:21:34.525545Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-05-29T15:21:34.525552Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-05-29T15:21:34.525591Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-05-29T15:21:34.525598Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-05-29T15:21:34.525610Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-05-29T15:21:34.525617Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-05-29T15:21:34.525638Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-05-29T15:21:34.525644Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-05-29T15:21:34.525652Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-05-29T15:21:34.525658Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-05-29T15:21:34.525682Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-05-29T15:21:34.525689Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-05-29T15:21:34.525712Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-05-29T15:21:34.525730Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-05-29T15:21:34.525744Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-05-29T15:21:34.525750Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanDeprecatedSnapshot;id=CleanDepr ... COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-05-29T15:21:37.340119Z node 4 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-05-29T15:21:37.340131Z node 4 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-05-29T15:21:37.340135Z node 4 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-05-29T15:21:37.340153Z node 4 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-05-29T15:21:37.340157Z node 4 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-05-29T15:21:37.340169Z node 4 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-05-29T15:21:37.340177Z node 4 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-05-29T15:21:37.340187Z node 4 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-05-29T15:21:37.340191Z node 4 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-05-29T15:21:37.340199Z node 4 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-05-29T15:21:37.340203Z node 4 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-05-29T15:21:37.340224Z node 4 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-05-29T15:21:37.340231Z node 4 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-05-29T15:21:37.340250Z node 4 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-05-29T15:21:37.340255Z node 4 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-05-29T15:21:37.340267Z node 4 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-05-29T15:21:37.340271Z node 4 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanDeprecatedSnapshot;id=CleanDeprecatedSnapshot; 2025-05-29T15:21:37.340278Z node 4 :TX_COLUMNSHARD CRIT: log.cpp:784: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=clean_deprecated_snapshot.cpp:30;tasks_for_rewrite=0; 2025-05-29T15:21:37.340284Z node 4 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanDeprecatedSnapshot;id=17; 2025-05-29T15:21:37.340288Z node 4 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV0ChunksMeta;id=RestoreV0ChunksMeta; 2025-05-29T15:21:37.340363Z node 4 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV0ChunksMeta;id=18; 2025-05-29T15:21:37.340367Z node 4 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-05-29T15:21:37.386358Z node 4 :TX_COLUMNSHARD_TX WARN: log.cpp:784: tablet_id=72075186224037888;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715658;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715658; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:502;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:502;T=N5arrow9Int64TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:502;T=N5arrow14Decimal128TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:502;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:502;T=N5arrow9Int64TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:502;T=N5arrow14Decimal128TypeE; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=296;columns=3; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=296;columns=3; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:502;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:502;T=N5arrow9Int64TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:502;T=N5arrow14Decimal128TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:502;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:502;T=N5arrow9Int64TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:502;T=N5arrow14Decimal128TypeE; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=296;columns=3; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=296;columns=3; 2025-05-29T15:21:37.423841Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7509888176336235394:2353], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:21:37.423889Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:21:37.424687Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7509888176336235399:2356], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:21:37.426045Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480 2025-05-29T15:21:37.433882Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976715659, at schemeshard: 72057594046644480 2025-05-29T15:21:37.433995Z node 3 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7509888176336235401:2357], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2025-05-29T15:21:37.528490Z node 3 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [3:7509888176336235452:2391] txid# 281474976715660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-29T15:21:37.585305Z node 3 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:238: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1748532097480, txId: 18446744073709551615] shutting down 2025-05-29T15:21:37.588726Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7509888173311820877:2352], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:21:37.588743Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:21:37.588855Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7509888173311820882:2355], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:21:37.589673Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480 2025-05-29T15:21:37.598889Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976715659, at schemeshard: 72057594046644480 2025-05-29T15:21:37.598942Z node 4 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [4:7509888173311820884:2356], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2025-05-29T15:21:37.672749Z node 4 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [4:7509888173311820935:2388] txid# 281474976715660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-29T15:21:37.736800Z node 4 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:238: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1748532097641, txId: 18446744073709551615] shutting down >> KqpScanArrowInChanels::AggregateNoColumnNoRemaps ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/olap/unittest >> KqpOlapAggregations::Aggregation_Sum_NullGroupBy [GOOD] Test command err: Trying to start YDB, gRPC: 6277, MsgBus: 30339 2025-05-29T15:21:36.529716Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509888170167483587:2279];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:21:36.529852Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/0026ad/r3tmp/tmp2ufeAI/pdisk_1.dat 2025-05-29T15:21:36.564049Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:21:36.566953Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [1:7509888170167483332:2079] 1748532096474835 != 1748532096474838 TServer::EnableGrpc on GrpcPort 6277, node 1 2025-05-29T15:21:36.583654Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:21:36.583671Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-05-29T15:21:36.583673Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-05-29T15:21:36.583726Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:30339 TClient is connected to server localhost:30339 2025-05-29T15:21:36.633579Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:21:36.633605Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:21:36.634439Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-29T15:21:36.655657Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:21:36.670570Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-05-29T15:21:36.679887Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 Status: 53 TxId: 281474976715658 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 2 2025-05-29T15:21:36.699183Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;self_id=[1:7509888170167484034:2314];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-05-29T15:21:36.699256Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;self_id=[1:7509888170167484034:2314];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-05-29T15:21:36.699317Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;self_id=[1:7509888170167484034:2314];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-05-29T15:21:36.699352Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;self_id=[1:7509888170167484034:2314];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-05-29T15:21:36.699375Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;self_id=[1:7509888170167484034:2314];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-05-29T15:21:36.699396Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;self_id=[1:7509888170167484034:2314];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-05-29T15:21:36.699415Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;self_id=[1:7509888170167484034:2314];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-05-29T15:21:36.699436Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;self_id=[1:7509888170167484034:2314];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-05-29T15:21:36.699462Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;self_id=[1:7509888170167484034:2314];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-05-29T15:21:36.699485Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;self_id=[1:7509888170167484034:2314];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-05-29T15:21:36.699505Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;self_id=[1:7509888170167484034:2314];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2025-05-29T15:21:36.699525Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;self_id=[1:7509888170167484034:2314];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2025-05-29T15:21:36.709302Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037891;self_id=[1:7509888170167484035:2315];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-05-29T15:21:36.709327Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037891;self_id=[1:7509888170167484035:2315];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-05-29T15:21:36.709397Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037891;self_id=[1:7509888170167484035:2315];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-05-29T15:21:36.709418Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037891;self_id=[1:7509888170167484035:2315];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-05-29T15:21:36.709442Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037891;self_id=[1:7509888170167484035:2315];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-05-29T15:21:36.709463Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037891;self_id=[1:7509888170167484035:2315];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-05-29T15:21:36.709480Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037891;self_id=[1:7509888170167484035:2315];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-05-29T15:21:36.709499Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037891;self_id=[1:7509888170167484035:2315];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-05-29T15:21:36.709519Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037891;self_id=[1:7509888170167484035:2315];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-05-29T15:21:36.709536Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037891;self_id=[1:7509888170167484035:2315];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-05-29T15:21:36.709555Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037891;self_id=[1:7509888170167484035:2315];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2025-05-29T15:21:36.709587Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037891;self_id=[1:7509888170167484035:2315];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2025-05-29T15:21:36.714088Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037890;self_id=[1:7509888170167484036:2316];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-05-29T15:21:36.714421Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037890;self_id=[1:7509888170167484036:2316];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-05-29T15:21:36.714473Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037890;self_id=[1:7509888170167484036:2316];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-05-29T15:21:36.714494Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037890;self_id=[1:7509888170167484036:2316];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-05-29T15:21:36.714526Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037890;self_id=[1:7509888170167484036:2316];tablet_id=72075186224037890;process=TTxInitSchema::Execute;flin ... CLASS_NAME=GCCountersNormalizer;id=9; 2025-05-29T15:21:36.723803Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-05-29T15:21:36.723823Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-05-29T15:21:36.723828Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-05-29T15:21:36.723847Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-05-29T15:21:36.723851Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-05-29T15:21:36.723862Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-05-29T15:21:36.723867Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanDeprecatedSnapshot;id=CleanDeprecatedSnapshot; 2025-05-29T15:21:36.723874Z node 1 :TX_COLUMNSHARD CRIT: log.cpp:784: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=clean_deprecated_snapshot.cpp:30;tasks_for_rewrite=0; 2025-05-29T15:21:36.723879Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanDeprecatedSnapshot;id=17; 2025-05-29T15:21:36.723883Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV0ChunksMeta;id=RestoreV0ChunksMeta; 2025-05-29T15:21:36.723941Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV0ChunksMeta;id=18; 2025-05-29T15:21:36.723945Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-05-29T15:21:36.745494Z node 1 :TX_COLUMNSHARD_TX WARN: log.cpp:784: tablet_id=72075186224037889;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715658;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715658; 2025-05-29T15:21:36.746692Z node 1 :TX_COLUMNSHARD_TX WARN: log.cpp:784: tablet_id=72075186224037891;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715658;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715658; 2025-05-29T15:21:36.749000Z node 1 :TX_COLUMNSHARD_TX WARN: log.cpp:784: tablet_id=72075186224037888;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715658;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715658; 2025-05-29T15:21:36.750217Z node 1 :TX_COLUMNSHARD_TX WARN: log.cpp:784: tablet_id=72075186224037890;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715658;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715658; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=1448;columns=6; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=1448;columns=6; REQUEST: --!syntax_v1 PRAGMA Kikimr.OptUseFinalizeByKey; SELECT id, SUM(level) FROM `/Root/tableWithNulls` WHERE id BETWEEN 6 AND 7 GROUP BY id ORDER BY id; 2025-05-29T15:21:36.907435Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509888170167484304:2418], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:21:36.907459Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509888170167484292:2414], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:21:36.907487Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:21:36.909786Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480 2025-05-29T15:21:36.912403Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976715659, at schemeshard: 72057594046644480 2025-05-29T15:21:36.912502Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7509888170167484330:2419], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2025-05-29T15:21:36.971475Z node 1 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [1:7509888170167484381:2483] txid# 281474976715660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-29T15:21:37.901418Z node 1 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:238: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1748532097004, txId: 18446744073709551615] shutting down REQUEST: --!syntax_v1 PRAGMA Kikimr.OptUseFinalizeByKey; SELECT id, SUM(level) FROM `/Root/tableWithNulls` WHERE id BETWEEN 6 AND 7 GROUP BY id ORDER BY id; JSON Plan: {"Plan":{"Plans":[{"PlanNodeId":6,"Plans":[{"PlanNodeId":5,"Plans":[{"PlanNodeId":4,"Plans":[{"PlanNodeId":3,"Plans":[{"PlanNodeId":2,"Plans":[{"Tables":["tableWithNulls"],"PlanNodeId":1,"Operators":[{"Scan":"Parallel","E-Size":"No estimate","ReadRanges":["id [6, 7]"],"Name":"TableRangeScan","Inputs":[],"Path":"\/Root\/tableWithNulls","E-Rows":"No estimate","Table":"tableWithNulls","ReadRangesKeys":["id"],"ReadColumns":["id","level"],"SsaProgram":{"Command":[{"GroupBy":{"Aggregates":[{"Function":{"Id":5,"Arguments":[{"Id":3}]},"Column":{"Id":7}}],"KeyColumns":[{"Id":1}]}},{"Projection":{"Columns":[{"Id":7},{"Id":1}]}}]},"E-Cost":"No estimate","ReadRangesExpectedSize":1}],"Node Type":"TableRangeScan"}],"Node Type":"HashShuffle","KeyColumns":["id"],"PlanNodeType":"Connection"}],"Operators":[{"Inputs":[{"InternalOperatorId":1}],"SortBy":"row.id","Name":"Sort"},{"Inputs":[{"ExternalPlanNodeId":2}],"Name":"Aggregate","Phase":"Final"}],"Node Type":"Sort-Aggregate"}],"Node Type":"Merge","SortColumns":["id (Asc)"],"PlanNodeType":"Connection"}],"Node Type":"Stage"}],"Node Type":"ResultSet_1","PlanNodeType":"ResultSet"}],"Node Type":"Query","Stats":{"ResourcePoolId":"default"},"PlanNodeType":"Query"},"meta":{"version":"0.2","type":"query"},"tables":[{"name":"\/Root\/tableWithNulls","reads":[{"columns":["id","level"],"scan_by":["id [6, 7]"],"type":"Scan"}]}],"SimplifiedPlan":{"PlanNodeId":0,"Plans":[{"PlanNodeId":1,"Plans":[{"PlanNodeId":4,"Plans":[{"PlanNodeId":5,"Plans":[{"PlanNodeId":6,"Plans":[{"PlanNodeId":7,"Operators":[{"Scan":"Parallel","E-Size":"No estimate","ReadRanges":["id [6, 7]"],"Name":"TableRangeScan","Path":"\/Root\/tableWithNulls","E-Rows":"No estimate","Table":"tableWithNulls","ReadRangesKeys":["id"],"ReadColumns":["id","level"],"SsaProgram":{"Command":[{"GroupBy":{"Aggregates":[{"Function":{"Id":5,"Arguments":[{"Id":3}]},"Column":{"Id":7}}],"KeyColumns":[{"Id":1}]}},{"Projection":{"Columns":[{"Id":7},{"Id":1}]}}]},"E-Cost":"No estimate","ReadRangesExpectedSize":1}],"Node Type":"TableRangeScan"}],"Node Type":"HashShuffle (KeyColumns: [\"id\"])","PlanNodeType":"Connection"}],"Operators":[{"Name":"Aggregate","Phase":"Final"}],"Node Type":"Aggregate"}],"Operators":[{"SortBy":"row.id","Name":"Sort"}],"Node Type":"Sort"}],"Node Type":"ResultSet_1","PlanNodeType":"ResultSet"}],"Node Type":"Query","OptimizerStats":{"EquiJoinsCount":0,"JoinsCount":0},"PlanNodeType":"Query"}} AST: ( (declare %kqp%tx_result_binding_0_0 (TupleType (ListType (TupleType (TupleType (OptionalType (DataType 'Int32)) (DataType 'Int32)) (TupleType (OptionalType (DataType 'Int32)) (DataType 'Int32)))))) (let $1 (DataType 'Int32)) (let $2 (OptionalType $1)) (let $3 '('('"_logical_id" '1035) '('"_id" '"fbb28e4-d0080b8a-f8ccb2cb-1f256711") '('"_partition_mode" '"single"))) (let $4 (DqPhyStage '() (lambda '() (block '( (let $24 (Int32 '1)) (let $25 '((Nothing $2) (Int32 '0))) (return (ToStream (Just '((RangeFinalize (RangeMultiply (Uint64 '10000) (RangeUnion (RangeIntersect (RangeCreate (AsList '('((Just (Int32 '"6")) $24) $25))) (RangeCreate (AsList '($25 '((Just (Int32 '"7")) $24)))))))))))) ))) $3)) (let $5 (DqCnValue (TDqOutput $4 '0))) (let $6 (KqpPhysicalTx '($4) '($5) '() '('('"type" '"compute")))) (let $7 '"%kqp%tx_result_binding_0_0") (let $8 (TupleType $2 $1)) (let $9 (TupleType (ListType (TupleType $8 $8)))) (let $10 (OptionalType (DataType 'Int64))) (let $11 '('"id" $1)) (let $12 '('('"_logical_id" '1094) '('"_id" '"6325122e-ae8de73a-db2e5502-39678808") '('"_wide_channels" (StructType '('_yql_agg_0 $10) $11)))) (let $13 (DqPhyStage '() (lambda '() (block '( (let $26 (KqpTable '"/Root/tableWithNulls" '"72057594046644480:2" '"" '1)) (let $27 '('"id")) (let $28 '('('"UsedKeyColumns" $27) '('"ExpectedMaxRanges" '1) '('"PointPrefixLen" '0))) (let $29 (KqpWideReadOlapTableRanges $26 %kqp%tx_result_binding_0_0 '('"id" '"level") '() $28 (lambda '($30) (TKqpOlapAgg $30 '('('_yql_agg_0 'sum '"level")) $27)))) (return (FromFlow $29)) ))) $12)) (let $14 (DqCnHashShuffle (TDqOutput $13 '0) '('1))) (let $15 (StructType '('"column1" $10) $11)) (let $16 '('('"_logical_id" '1661) '('"_id" '"fcb54fa9-7bcf862c-493560d9-2779f3f9") '('"_wide_channels" $15))) (let $17 (DqPhyStage '($14) (lambda '($31) (block '( (let $32 (lambda '($43 $44) $44 $43)) (let $33 (WideCombiner (ToFlow $31) '"" (lambda '($34 $35) $35) (lambda '($36 $37 $38) $37) (lambda '($39 $40 $41 $42) (AggrAdd $40 $42)) $32)) (return (FromFlow (WideSort $33 '('('1 (Bool 'true)))))) ))) $16)) (let $18 (DqCnMerge (TDqOutput $17 '0) '('('1 '"Asc")))) (let $19 (DqPhyStage '($18) (lambda '($45) (FromFlow (NarrowMap (ToFlow $45) (lambda '($46 $47) (AsStruct '('"column1" $46) '('"id" $47)))))) '('('"_logical_id" '1673) '('"_id" '"eb4509a9-253a044d-9ec1b414-38b02c14")))) (let $20 '($13 $17 $19)) (let $21 (DqCnResult (TDqOutput $19 '0) '('"id" '"column1"))) (let $22 (KqpTxResultBinding $9 '0 '0)) (let $23 (KqpPhysicalTx $20 '($21) '('($7 $22)) '('('"type" '"scan")))) (return (KqpPhysicalQuery '($6 $23) '((KqpTxResultBinding (ListType $15) '1 '0)) '('('"type" '"scan_query")))) ) >> KqpOlap::SimpleRequestHasProjections [GOOD] >> KqpOlap::PredicatePushdownWithParametersILike [GOOD] >> KqpOlap::PredicateDoNotPushdown [GOOD] >> KqpScanArrowFormat::AggregateByColumn ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/olap/unittest >> KqpDecimalColumnShard::TestFilterCompare [GOOD] Test command err: Trying to start YDB, gRPC: 27145, MsgBus: 22380 2025-05-29T15:21:33.986552Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509888158407517974:2202];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:21:33.986670Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/0026b3/r3tmp/tmpHa4Luk/pdisk_1.dat 2025-05-29T15:21:34.192633Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [1:7509888158407517800:2079] 1748532093981765 != 1748532093981768 2025-05-29T15:21:34.198791Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 27145, node 1 2025-05-29T15:21:34.225224Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:21:34.225236Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-05-29T15:21:34.225239Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-05-29T15:21:34.225278Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-05-29T15:21:34.243193Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:21:34.243219Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:21:34.243951Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:22380 TClient is connected to server localhost:22380 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-29T15:21:34.439766Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:21:34.443718Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 CREATE TABLE `/Root/Table1` (id Int32 NOT NULL, int Int64, dec Decimal(22,9), PRIMARY KEY (id)) PARTITION BY HASH(id) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 1); 2025-05-29T15:21:34.547892Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509888162702485761:2331], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:21:34.547924Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:21:34.604968Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-05-29T15:21:34.624100Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;self_id=[1:7509888162702485838:2335];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-05-29T15:21:34.624191Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;self_id=[1:7509888162702485838:2335];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-05-29T15:21:34.624273Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;self_id=[1:7509888162702485838:2335];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-05-29T15:21:34.624296Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;self_id=[1:7509888162702485838:2335];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-05-29T15:21:34.624322Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;self_id=[1:7509888162702485838:2335];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-05-29T15:21:34.624345Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;self_id=[1:7509888162702485838:2335];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-05-29T15:21:34.624368Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;self_id=[1:7509888162702485838:2335];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-05-29T15:21:34.624397Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;self_id=[1:7509888162702485838:2335];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-05-29T15:21:34.624420Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;self_id=[1:7509888162702485838:2335];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-05-29T15:21:34.624441Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;self_id=[1:7509888162702485838:2335];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-05-29T15:21:34.624462Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;self_id=[1:7509888162702485838:2335];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2025-05-29T15:21:34.624489Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;self_id=[1:7509888162702485838:2335];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2025-05-29T15:21:34.625680Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-05-29T15:21:34.625690Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-05-29T15:21:34.625704Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-05-29T15:21:34.625710Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-05-29T15:21:34.625742Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-05-29T15:21:34.625748Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-05-29T15:21:34.625760Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-05-29T15:21:34.625766Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-05-29T15:21:34.625778Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-05-29T15:21:34.625783Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-05-29T15:21:34.625791Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-05-29T15:21:34.625796Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-05-29T15:21:34.625820Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-05-29T15:21:34.625826Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-05-29T15:21:34.625854Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-05-29T15:21:34.625860Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-05-29T15:21:34.625875Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-05-29T15:21:34.6 ... leanGranuleId;id=CleanGranuleId; 2025-05-29T15:21:37.241983Z node 4 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-05-29T15:21:37.241991Z node 4 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-05-29T15:21:37.242004Z node 4 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-05-29T15:21:37.242011Z node 4 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-05-29T15:21:37.242020Z node 4 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-05-29T15:21:37.242027Z node 4 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-05-29T15:21:37.242056Z node 4 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-05-29T15:21:37.242065Z node 4 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-05-29T15:21:37.242089Z node 4 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-05-29T15:21:37.242098Z node 4 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-05-29T15:21:37.242132Z node 4 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-05-29T15:21:37.242138Z node 4 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanDeprecatedSnapshot;id=CleanDeprecatedSnapshot; 2025-05-29T15:21:37.242149Z node 4 :TX_COLUMNSHARD CRIT: log.cpp:784: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=clean_deprecated_snapshot.cpp:30;tasks_for_rewrite=0; 2025-05-29T15:21:37.242156Z node 4 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanDeprecatedSnapshot;id=17; 2025-05-29T15:21:37.242162Z node 4 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV0ChunksMeta;id=RestoreV0ChunksMeta; 2025-05-29T15:21:37.242326Z node 4 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV0ChunksMeta;id=18; 2025-05-29T15:21:37.242331Z node 4 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-05-29T15:21:37.285968Z node 4 :TX_COLUMNSHARD_TX WARN: log.cpp:784: tablet_id=72075186224037888;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715658;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715658; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:502;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:502;T=N5arrow9Int64TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:502;T=N5arrow14Decimal128TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:502;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:502;T=N5arrow9Int64TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:502;T=N5arrow14Decimal128TypeE; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=296;columns=3; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=296;columns=3; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:502;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:502;T=N5arrow9Int64TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:502;T=N5arrow14Decimal128TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:502;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:502;T=N5arrow9Int64TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:502;T=N5arrow14Decimal128TypeE; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=296;columns=3; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=296;columns=3; 2025-05-29T15:21:37.316873Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7509888173210485171:2353], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:21:37.316913Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:21:37.317026Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7509888173210485176:2356], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:21:37.317986Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480 2025-05-29T15:21:37.320740Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976715659, at schemeshard: 72057594046644480 2025-05-29T15:21:37.320777Z node 3 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7509888173210485178:2357], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2025-05-29T15:21:37.398217Z node 3 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [3:7509888173210485229:2391] txid# 281474976715660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-29T15:21:37.453474Z node 3 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:238: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1748532097375, txId: 18446744073709551615] shutting down 2025-05-29T15:21:37.521203Z node 3 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:238: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1748532097480, txId: 18446744073709551615] shutting down 2025-05-29T15:21:37.596163Z node 3 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:238: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1748532097550, txId: 18446744073709551615] shutting down 2025-05-29T15:21:37.678734Z node 3 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:238: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1748532097620, txId: 18446744073709551615] shutting down 2025-05-29T15:21:37.682926Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7509888174429233249:2352], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:21:37.682956Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:21:37.683137Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7509888174429233254:2355], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:21:37.684033Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480 2025-05-29T15:21:37.687152Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976715659, at schemeshard: 72057594046644480 2025-05-29T15:21:37.687269Z node 4 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [4:7509888174429233256:2356], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2025-05-29T15:21:37.741103Z node 4 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [4:7509888174429233307:2391] txid# 281474976715660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-29T15:21:37.827229Z node 4 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:238: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1748532097739, txId: 18446744073709551615] shutting down 2025-05-29T15:21:37.893173Z node 4 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:238: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1748532097844, txId: 18446744073709551615] shutting down 2025-05-29T15:21:37.966203Z node 4 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:238: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1748532097914, txId: 18446744073709551615] shutting down 2025-05-29T15:21:38.037782Z node 4 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:238: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1748532098000, txId: 18446744073709551615] shutting down >> TColumnShardTestReadWrite::WriteReadStandalone [GOOD] >> TColumnShardTestReadWrite::ReadStale ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/olap/unittest >> KqpOlapJson::EmptyVariants [FAIL] Test command err: Trying to start YDB, gRPC: 11760, MsgBus: 22500 2025-05-29T15:21:33.642858Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509888159470909773:2207];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:21:33.813097Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/002611/r3tmp/tmp5ocMiA/pdisk_1.dat 2025-05-29T15:21:33.992682Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [1:7509888159470909596:2079] 1748532093618813 != 1748532093618816 2025-05-29T15:21:34.020182Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:21:34.020354Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:21:34.020366Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:21:34.034195Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 11760, node 1 2025-05-29T15:21:34.058895Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:21:34.058906Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-05-29T15:21:34.058908Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-05-29T15:21:34.058947Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:22500 TClient is connected to server localhost:22500 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-29T15:21:34.208720Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:21:34.211470Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 EXECUTE: CREATE TABLE `/Root/ColumnTable` ( Col1 Uint64 NOT NULL, Col2 JsonDocument, PRIMARY KEY (Col1) ) PARTITION BY HASH(Col1) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 1); 2025-05-29T15:21:34.563453Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509888163765877545:2328], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:21:34.563482Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:21:34.673117Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-05-29T15:21:34.681523Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;self_id=[1:7509888163765877618:2332];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-05-29T15:21:34.681792Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;self_id=[1:7509888163765877618:2332];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-05-29T15:21:34.681849Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;self_id=[1:7509888163765877618:2332];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-05-29T15:21:34.681867Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;self_id=[1:7509888163765877618:2332];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-05-29T15:21:34.681881Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;self_id=[1:7509888163765877618:2332];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-05-29T15:21:34.681895Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;self_id=[1:7509888163765877618:2332];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-05-29T15:21:34.681919Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;self_id=[1:7509888163765877618:2332];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-05-29T15:21:34.681933Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;self_id=[1:7509888163765877618:2332];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-05-29T15:21:34.681953Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;self_id=[1:7509888163765877618:2332];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-05-29T15:21:34.682013Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;self_id=[1:7509888163765877618:2332];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-05-29T15:21:34.682028Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;self_id=[1:7509888163765877618:2332];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2025-05-29T15:21:34.682046Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;self_id=[1:7509888163765877618:2332];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2025-05-29T15:21:34.686971Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-05-29T15:21:34.686993Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-05-29T15:21:34.687010Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-05-29T15:21:34.687018Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-05-29T15:21:34.687043Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-05-29T15:21:34.687049Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-05-29T15:21:34.687063Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-05-29T15:21:34.687071Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-05-29T15:21:34.687084Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-05-29T15:21:34.687090Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-05-29T15:21:34.687099Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-05-29T15:21:34.687106Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-05-29T15:21:34.687134Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-05-29T15:21:34.687142Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-05-29T15:21:34.687168Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-05-29T15:21:34.687175Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-05-29T15:21:34.687190Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-05-29T15:21:34.687196Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanDeprecatedSnapshot;id=CleanDeprecatedSnapshot; 2025-05-29T15:21:34.687205Z node 1 :TX_COLUMNSHARD CRIT: log.cpp:784: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=clean_deprecated_snapshot.cpp:30;tasks_for_rewrite=0; 2025-05-29T15:21:34.687212Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanDeprecatedSnapshot;id=17; 2025-05-29T15:21:34.687218Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV0ChunksMeta;id=RestoreV0ChunksMeta; 2025-05-29T15:21:34.687387Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV0ChunksMeta;id=18; 2025-05-29T15:21:34.687392Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-05-29T15:21:34.695554Z node 1 :TX_COLUMNSHARD_TX WARN: log.cpp:784: tablet_id=72075186224037888;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710658;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710658; EXECUTE: ALTER OBJECT `/Root/ColumnTable` (TYPE TABLE) SET (ACTION=UPSERT_OPTIONS, `SCAN_READER_POLICY_NAME`=`SIMPLE`) 2025-05-29T15:21:34.708678Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 2025-05-29T15:21:34.709083Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509888163765877745:2397], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:21:34.709220Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:21:34.715744Z node 1 :TX_COLUMNSHARD_TX WARN: log.cpp:784: tablet_id=72075186224037888;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710659;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710659; EXECUTE: ALTER OBJECT `/Root/ColumnTable` (TYPE TABLE) SET (ACTION=ALTER_COLUMN, NAME=Col2, `DATA_ACCESSOR_CONSTRUCTOR.CLASS_NAME`=`SUB_COLUMNS`, `FORCE_SIMD_PARSING`=`true`, `COLUMNS_LIMIT`=`1024`, `SPARSED_DETECTOR_KFF`=`0`, `MEM_LIMIT_CHUNK`=`0`, `OTHERS_ALLOWED_FRACTION`=`0`) 2025-05-29T15:21:34.738636Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-05-29T15:21:34.738842Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509888163765877775:2402], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:21:34.738864Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:21:34.746028Z node 1 :TX_COLUMNSHARD_TX WARN: log.cpp:784: tablet_id=72075186224037888;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710660;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710660; EXECUTE: REPLACE INTO `/Root/ColumnTable` (Col1) VALUES (1u), (2u), (3u), (4u) 2025-05-29T15:21:34.756354Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509888163765877806:2408], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:21:34.756375Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:21:34.756485Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509888163765877811:2411], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:21:34.757275Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710661:3, at schemeshard: 72057594046644480 2025-05-29T15:21:34.759509Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976710661, at schemeshard: 72057594046644480 2025-05-29T15:21:34.759577Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7509888163765877813:2412], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710661 completed, doublechecking } 2025-05-29T15:21:34.819688Z node 1 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [1:7509888163765877873:2455] txid# 281474976710662, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-29T15:21:34.835055Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [1:7509888163765877882:2417], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:21:34.835666Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=1&id=NzZkY2RmNzMtNjViOWY0NmMtNGNmYjA5OTEtMmNhZWRlMg==, ActorId: [1:7509888163765877804:2407], ActorState: ExecuteState, TraceId: 01jwea4ss3c85g3xwcmm0bwms9, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: 2025-05-29T15:21:35.690147Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;self_id=[1:7509888163765877618:2332];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037888;fline=columnshard_impl.cpp:830;event=skip_compaction;reason=disabled; 2025-05-29T15:21:35.691041Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;self_id=[1:7509888163765877618:2332];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037888;fline=columnshard_impl.cpp:830;event=skip_compaction;reason=disabled; 2025-05-29T15:21:36.691979Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;self_id=[1:7509888163765877618:2332];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037888;fline=columnshard_impl.cpp:830;event=skip_compaction;reason=disabled; 2025-05-29T15:21:36.692629Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;self_id=[1:7509888163765877618:2332];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037888;fline=columnshard_impl.cpp:830;event=skip_compaction;reason=disabled; assertion failed at ydb/core/kqp/ut/olap/combinatory/execute.h:41, virtual TConclusionStatus NKikimr::NKqp::TDataCommand::DoExecute(TKikimrRunner &): (prepareResult.IsSuccess())
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 0. /-S/util/system/backtrace.cpp:284: ?? @ 0x13D433CB 1. /tmp//-S/library/cpp/testing/unittest/registar.cpp:46: RaiseError @ 0x13EFB238 2. /-S/ydb/core/kqp/ut/olap/combinatory/execute.h:41: DoExecute @ 0x265B8EA9 3. /-S/ydb/core/kqp/ut/olap/combinatory/abstract.h:75: Execute @ 0x265B0763 4. /tmp//-S/ydb/core/kqp/ut/olap/combinatory/executor.cpp:22: Execute @ 0x265B0763 5. /-S/ydb/core/kqp/ut/olap/combinatory/variator.h:27: Execute @ 0x13B8A7E7 6. /tmp//-S/ydb/core/kqp/ut/olap/json_ut.cpp:60: Execute_ @ 0x13B8A7E7 7. /tmp//-S/ydb/core/kqp/ut/olap/json_ut.cpp:27: operator() @ 0x13B90F46 8. /tmp//-S/library/cpp/testing/unittest/registar.cpp:373: Run @ 0x13EFD0ED 9. /tmp//-S/ydb/core/kqp/ut/olap/json_ut.cpp:27: Execute @ 0x13B9090C 10. /tmp//-S/library/cpp/testing/unittest/registar.cpp:494: Execute @ 0x13EFD862 11. /tmp//-S/library/cpp/testing/unittest/utmain.cpp:872: RunMain @ 0x13F0F40C 12. ??:0: ?? @ 0x7F61B4CADD8F 13. ??:0: ?? @ 0x7F61B4CADE3F 14. ??:0: ?? @ 0x12AB1028 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/olap/unittest >> KqpOlapDelete::DeleteWithDiffrentTypesPKColumns-isStream Test command err: Trying to start YDB, gRPC: 18899, MsgBus: 24424 test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/0026b9/r3tmp/tmp80tbl0/pdisk_1.dat 2025-05-29T15:21:33.799137Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-05-29T15:21:33.881158Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [1:7509888156836964723:2079] 1748532093646048 != 1748532093646051 2025-05-29T15:21:33.894908Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:21:33.894929Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:21:33.902637Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:21:33.903223Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 18899, node 1 2025-05-29T15:21:33.942891Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:21:33.942902Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-05-29T15:21:33.942903Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-05-29T15:21:33.942936Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:24424 TClient is connected to server localhost:24424 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-29T15:21:34.135677Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:21:34.138332Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-05-29T15:21:34.151704Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:21:34.192839Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:21:34.257550Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:21:34.284088Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:21:34.387358Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509888161131933662:2403], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:21:34.387381Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:21:34.508081Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-05-29T15:21:34.530551Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-05-29T15:21:34.570825Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-05-29T15:21:34.609755Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-05-29T15:21:34.636602Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-05-29T15:21:34.668365Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-05-29T15:21:34.692201Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-05-29T15:21:34.727031Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509888161131934316:2466], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:21:34.727061Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:21:34.727154Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509888161131934321:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:21:34.728039Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710669:3, at schemeshard: 72057594046644480 2025-05-29T15:21:34.730793Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7509888161131934323:2470], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710669 completed, doublechecking } 2025-05-29T15:21:34.813400Z node 1 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [1:7509888161131934383:3401] txid# 281474976710670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-29T15:21:34.947200Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [1:7509888161131934392:2475], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:21:34.947533Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=1&id=NDgzYWE0ZjEtNGI3ZDRmOTEtNzdjNmM5NmItMjM1ZWQxZTY=, ActorId: [1:7509888161131933635:2400], ActorState: ExecuteState, TraceId: 01jwea4sr6b0kra475tnw2y38p, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: VERIFY failed (2025-05-29T15:21:34.951343Z): assertion failed in non-unittest thread with message: assertion failed at ydb/core/kqp/ut/common/kqp_ut_common.h:375, void NKikimr::NKqp::AssertSuccessResult(const NYdb::TStatus &): (result.IsSuccess())
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 library/cpp/testing/unittest/registar.cpp:36 RaiseError(): requirement UnittestThread failed 0. /-S/util/system/yassert.cpp:83: InternalPanicImpl @ 0x13D61B95 1. /-S/util/system/yassert.cpp:55: Panic @ 0x13D58B96 2. /tmp//-S/library/cpp/testing/unittest/registar.cpp:36: RaiseError @ 0x13EFB326 3. /-S/ydb/core/kqp/ut/common/kqp_ut_common.h:375: AssertSuccessResult @ 0x2648BA32 4. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:365: CreateSampleTables @ 0x2648B332 5. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:581: operator() @ 0x264AA42C 6. /-S/library/cpp/threading/future/core/future-inl.h:493: SetValue @ 0x264AA42C 7. /-S/library/cpp/threading/future/async.h:24: operator() @ 0x264AA42C 8. /-S/util/thread/pool.h:71: Process @ 0x264AA42C 9. /-S/util/thread/pool.cpp:418: DoExecute @ 0x13D69519 10. /-S/util/thread/factory.h:15: Execute @ 0x13D67F09 11. /-S/util/thread/factory.cpp:36: ThreadProc @ 0x13D67F09 12. /-S/util/system/thread.cpp:244: ThreadProxy @ 0x13D6337C 13. ??:0: ?? @ 0x7FF624E60AC2 14. ??:0: ?? @ 0x7FF624EF284F >> KqpOlap::BlockChannelForce >> TColumnShardTestReadWrite::WriteReadStandaloneExoticTypes [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/columnshard/ut_rw/unittest >> TColumnShardTestReadWrite::WriteReadStandalone [GOOD] Test command err: 2025-05-29T15:21:36.981942Z node 1 :TX_COLUMNSHARD INFO: log.cpp:784: tablet_id=9437184;self_id=[1:139:2170];fline=columnshard.cpp:102;event=initialize_shard;step=OnActivateExecutor; 2025-05-29T15:21:36.986015Z node 1 :TX_COLUMNSHARD INFO: log.cpp:784: tablet_id=9437184;self_id=[1:139:2170];fline=columnshard.cpp:120;event=initialize_shard;step=initialize_tiring_finished; 2025-05-29T15:21:36.986094Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Execute at tablet 9437184 2025-05-29T15:21:36.986921Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=9437184;self_id=[1:139:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-05-29T15:21:36.986984Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=9437184;self_id=[1:139:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-05-29T15:21:36.987023Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=9437184;self_id=[1:139:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-05-29T15:21:36.987045Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=9437184;self_id=[1:139:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-05-29T15:21:36.987068Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=9437184;self_id=[1:139:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-05-29T15:21:36.987095Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=9437184;self_id=[1:139:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-05-29T15:21:36.987113Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=9437184;self_id=[1:139:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-05-29T15:21:36.987133Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=9437184;self_id=[1:139:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-05-29T15:21:36.987152Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=9437184;self_id=[1:139:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-05-29T15:21:36.987172Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=9437184;self_id=[1:139:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-05-29T15:21:36.987194Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=9437184;self_id=[1:139:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2025-05-29T15:21:36.987222Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=9437184;self_id=[1:139:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2025-05-29T15:21:36.995618Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Complete at tablet 9437184 2025-05-29T15:21:36.995684Z node 1 :TX_COLUMNSHARD INFO: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:137;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2025-05-29T15:21:36.995698Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-05-29T15:21:36.995736Z node 1 :TX_COLUMNSHARD INFO: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-05-29T15:21:36.995781Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-05-29T15:21:36.995796Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-05-29T15:21:36.995801Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-05-29T15:21:36.995832Z node 1 :TX_COLUMNSHARD INFO: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2025-05-29T15:21:36.995842Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-05-29T15:21:36.995850Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-05-29T15:21:36.995855Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-05-29T15:21:36.995875Z node 1 :TX_COLUMNSHARD INFO: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-05-29T15:21:36.995885Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-05-29T15:21:36.995893Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-05-29T15:21:36.995898Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-05-29T15:21:36.995910Z node 1 :TX_COLUMNSHARD INFO: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-05-29T15:21:36.995917Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-05-29T15:21:36.995926Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-05-29T15:21:36.995931Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=8;type=CleanInsertionDedup; 2025-05-29T15:21:36.995945Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-05-29T15:21:36.995953Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-05-29T15:21:36.995958Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-05-29T15:21:36.995968Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-05-29T15:21:36.995977Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-05-29T15:21:36.995983Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-05-29T15:21:36.996009Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-05-29T15:21:36.996018Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-05-29T15:21:36.996023Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2025-05-29T15:21:36.996045Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-05-29T15:21:36.996054Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-05-29T15:21:36.996059Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2025-05-29T15:21:36.996072Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-05-29T15:21:36.996079Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanDeprecatedSnapshot;id=CleanDeprecatedSnapshot; 2025-05-29T15:21:36.996084Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=17;type=CleanDeprecatedSnapshot; 2025-05-29T15:21:36.996093Z node 1 :TX_COLUMNSHARD CRIT: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_deprecated_snapshot.cpp:30;tasks_for_rewrite=0; 2025-05-29T15:21:36.996102Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanDeprecatedSnapshot;id=17; 2025-05-29T15:21:36.996110Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV0ChunksMeta;id=RestoreV0ChunksMeta; 2025-05-29T15:21:36.996116Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=18;type=RestoreV0ChunksMeta; 2025-05-29T15:21:36.996184Z node 1 :TX_COLUMNSHARD INFO: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=9; 2025-05-29T15:21:36.996194Z node 1 :TX_COLUMNSHARD INFO: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=6; 2025-05-29T15:21:36.996203Z node 1 :TX_COLUMNSHARD INFO: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute; ... quest_id: string;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;;); 2025-05-29T15:21:39.142143Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:784: TEST_STEP=11;SelfId=[1:395:2407];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;method=produce result;fline=plain_read_data.cpp:73;event=DoExtractReadyResults;result=0;count=0;finished=1; 2025-05-29T15:21:39.142156Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:784: TEST_STEP=11;SelfId=[1:395:2407];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:230;stage=ready result;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;;);columns=10;rows=31; 2025-05-29T15:21:39.142170Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:784: TEST_STEP=11;SelfId=[1:395:2407];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:250;stage=data_format;batch_size=1984;num_rows=31;batch_columns=timestamp,resource_type,resource_id,uid,level,message,json_payload,ingested_at,saved_at,request_id; 2025-05-29T15:21:39.142221Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:784: TEST_STEP=11;SelfId=[1:395:2407];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:366;event=send_data;compute_actor_id=[1:394:2406];bytes=1984;rows=31;faults=0;finished=0;fault=0;schema=timestamp: timestamp[us] resource_type: string resource_id: string uid: string level: int32 message: string json_payload: string ingested_at: timestamp[us] saved_at: timestamp[us] request_id: string; 2025-05-29T15:21:39.142239Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:784: TEST_STEP=11;SelfId=[1:395:2407];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:270;stage=finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;;); 2025-05-29T15:21:39.142254Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:784: TEST_STEP=11;SelfId=[1:395:2407];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:188;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;;); 2025-05-29T15:21:39.142265Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:784: TEST_STEP=11;SelfId=[1:395:2407];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:193;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;;); 2025-05-29T15:21:39.142298Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:784: TEST_STEP=11;SelfId=[1:395:2407];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=actor.cpp:105;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2025-05-29T15:21:39.142311Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:784: TEST_STEP=11;SelfId=[1:395:2407];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:188;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;;); 2025-05-29T15:21:39.142325Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:784: TEST_STEP=11;SelfId=[1:395:2407];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:193;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;;); 2025-05-29T15:21:39.142331Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: actor.cpp:410: Scan [1:395:2407] finished for tablet 9437184 2025-05-29T15:21:39.142425Z node 1 :TX_COLUMNSHARD_SCAN INFO: log.cpp:784: TEST_STEP=11;SelfId=[1:395:2407];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=actor.cpp:416;event=scan_finish;compute_actor_id=[1:394:2406];stats={"p":[{"events":["f_bootstrap","l_bootstrap","f_processing","f_ProduceResults","f_task_result"],"t":0},{"events":["f_ack","l_ack","l_processing","l_ProduceResults","f_Finish","l_Finish","l_task_result"],"t":0.002}],"full":{"a":1748532099140000,"name":"_full_task","f":1748532099140000,"d_finished":0,"c":0,"l":1748532099142343,"d":2343},"events":[{"name":"bootstrap","f":1748532099140053,"d_finished":455,"c":1,"l":1748532099140508,"d":455},{"a":1748532099142295,"name":"ack","f":1748532099142103,"d_finished":165,"c":1,"l":1748532099142268,"d":213},{"a":1748532099142293,"name":"processing","f":1748532099140699,"d_finished":991,"c":10,"l":1748532099142269,"d":1041},{"name":"ProduceResults","f":1748532099140311,"d_finished":450,"c":13,"l":1748532099142328,"d":450},{"a":1748532099142329,"name":"Finish","f":1748532099142329,"d_finished":0,"c":0,"l":1748532099142343,"d":14},{"name":"task_result","f":1748532099140703,"d_finished":802,"c":9,"l":1748532099142059,"d":802}],"id":"9437184::12"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;;); 2025-05-29T15:21:39.142436Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:784: TEST_STEP=11;SelfId=[1:395:2407];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=actor.cpp:366;event=send_data;compute_actor_id=[1:394:2406];bytes=0;rows=0;faults=0;finished=1;fault=0;schema=; 2025-05-29T15:21:39.142481Z node 1 :TX_COLUMNSHARD_SCAN INFO: log.cpp:784: TEST_STEP=11;SelfId=[1:395:2407];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=actor.cpp:371;event=scan_finished;compute_actor_id=[1:394:2406];packs_sum=0;bytes=0;bytes_sum=0;rows=0;rows_sum=0;faults=0;finished=1;fault=0;stats={"p":[{"events":["f_bootstrap","l_bootstrap","f_processing","f_ProduceResults","f_task_result"],"t":0},{"events":["f_ack","l_ack","l_processing","l_ProduceResults","f_Finish","l_Finish","l_task_result"],"t":0.002}],"full":{"a":1748532099140000,"name":"_full_task","f":1748532099140000,"d_finished":0,"c":0,"l":1748532099142442,"d":2442},"events":[{"name":"bootstrap","f":1748532099140053,"d_finished":455,"c":1,"l":1748532099140508,"d":455},{"a":1748532099142295,"name":"ack","f":1748532099142103,"d_finished":165,"c":1,"l":1748532099142268,"d":312},{"a":1748532099142293,"name":"processing","f":1748532099140699,"d_finished":991,"c":10,"l":1748532099142269,"d":1140},{"name":"ProduceResults","f":1748532099140311,"d_finished":450,"c":13,"l":1748532099142328,"d":450},{"a":1748532099142329,"name":"Finish","f":1748532099142329,"d_finished":0,"c":0,"l":1748532099142442,"d":113},{"name":"task_result","f":1748532099140703,"d_finished":802,"c":9,"l":1748532099142059,"d":802}],"id":"9437184::12"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;;); 2025-05-29T15:21:39.142498Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:784: TEST_STEP=11;SelfId=[1:395:2407];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=stats.cpp:8;event=statistic;begin=2025-05-29T15:21:39.139843Z;index_granules=0;index_portions=1;index_batches=1;committed_batches=0;schema_columns=10;filter_columns=0;additional_columns=0;compacted_portions_bytes=0;inserted_portions_bytes=7600;committed_portions_bytes=0;data_filter_bytes=0;data_additional_bytes=0;delta_bytes=7600;selected_rows=0; 2025-05-29T15:21:39.142505Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:784: TEST_STEP=11;SelfId=[1:395:2407];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=read_context.h:190;event=scan_aborted;reason=unexpected on destructor; 2025-05-29T15:21:39.142560Z node 1 :TX_COLUMNSHARD_SCAN INFO: log.cpp:784: TEST_STEP=11;SelfId=[1:395:2407];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=context.h:81;fetching=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;; ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/olap/unittest >> KqpOlapDictionary::EmptyStringVariants [FAIL] Test command err: Trying to start YDB, gRPC: 25690, MsgBus: 13409 2025-05-29T15:21:34.050234Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509888156208508875:2215];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/0026d9/r3tmp/tmp4D1wu4/pdisk_1.dat 2025-05-29T15:21:34.052544Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-05-29T15:21:34.094976Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [1:7509888156208508675:2079] 1748532093979106 != 1748532093979109 2025-05-29T15:21:34.111412Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 25690, node 1 2025-05-29T15:21:34.126889Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:21:34.126899Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-05-29T15:21:34.126901Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-05-29T15:21:34.126937Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-05-29T15:21:34.155058Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:21:34.155083Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:21:34.163032Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:13409 TClient is connected to server localhost:13409 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-29T15:21:34.287583Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:21:34.295215Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 EXECUTE: CREATE TABLE `/Root/ColumnTable` ( Col1 Uint64 NOT NULL, Col2 Utf8, PRIMARY KEY (Col1) ) PARTITION BY HASH(Col1) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 1); 2025-05-29T15:21:34.665009Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509888160503476605:2328], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:21:34.665060Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:21:34.705516Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-05-29T15:21:34.720601Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;self_id=[1:7509888160503476705:2332];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-05-29T15:21:34.720679Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;self_id=[1:7509888160503476705:2332];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-05-29T15:21:34.720721Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;self_id=[1:7509888160503476705:2332];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-05-29T15:21:34.720742Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;self_id=[1:7509888160503476705:2332];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-05-29T15:21:34.720762Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;self_id=[1:7509888160503476705:2332];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-05-29T15:21:34.720783Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;self_id=[1:7509888160503476705:2332];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-05-29T15:21:34.720798Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;self_id=[1:7509888160503476705:2332];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-05-29T15:21:34.720814Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;self_id=[1:7509888160503476705:2332];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-05-29T15:21:34.720835Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;self_id=[1:7509888160503476705:2332];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-05-29T15:21:34.720851Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;self_id=[1:7509888160503476705:2332];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-05-29T15:21:34.720873Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;self_id=[1:7509888160503476705:2332];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2025-05-29T15:21:34.720889Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;self_id=[1:7509888160503476705:2332];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2025-05-29T15:21:34.731206Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-05-29T15:21:34.731222Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-05-29T15:21:34.731244Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-05-29T15:21:34.731249Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-05-29T15:21:34.731269Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-05-29T15:21:34.731273Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-05-29T15:21:34.731283Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-05-29T15:21:34.731290Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-05-29T15:21:34.731299Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-05-29T15:21:34.731304Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-05-29T15:21:34.731310Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-05-29T15:21:34.731315Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-05-29T15:21:34.731333Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-05-29T15:21:34.731340Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-05-29T15:21:34.731369Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-05-29T15:21:34.731375Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-05-29T15:21:34.731387Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-05-29T15:21:34.731391Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanDeprecatedSnapshot;id=CleanDeprecatedSnapshot; 2025-05-29T15:21:34.731397Z node 1 :TX_COLUMNSHARD CRIT: log.cpp:784: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=clean_deprecated_snapshot.cpp:30;tasks_for_rewrite=0; 2025-05-29T15:21:34.731403Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanDeprecatedSnapshot;id=17; 2025-05-29T15:21:34.731415Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV0ChunksMeta;id=RestoreV0ChunksMeta; 2025-05-29T15:21:34.731528Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV0ChunksMeta;id=18; 2025-05-29T15:21:34.731532Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-05-29T15:21:34.774058Z node 1 :TX_COLUMNSHARD_TX WARN: log.cpp:784: tablet_id=72075186224037888;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710658;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710658; EXECUTE: ALTER OBJECT `/Root/ColumnTable` (TYPE TABLE) SET (ACTION=UPSERT_OPTIONS, `SCAN_READER_POLICY_NAME`=`SIMPLE`) 2025-05-29T15:21:34.787014Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509888160503476831:2397], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:21:34.787034Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:21:34.791153Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 2025-05-29T15:21:34.808453Z node 1 :TX_COLUMNSHARD_TX WARN: log.cpp:784: tablet_id=72075186224037888;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710659;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710659; EXECUTE: ALTER OBJECT `/Root/ColumnTable` (TYPE TABLE) SET (ACTION=ALTER_COLUMN, NAME=Col2, `DATA_ACCESSOR_CONSTRUCTOR.CLASS_NAME`=`DICTIONARY`) 2025-05-29T15:21:34.842008Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-05-29T15:21:34.842427Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509888160503476861:2402], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:21:34.842516Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:21:34.844442Z node 1 :TX_COLUMNSHARD_TX WARN: log.cpp:784: tablet_id=72075186224037888;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710660;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710660; EXECUTE: ALTER OBJECT `/Root/ColumnTable` (TYPE TABLE) SET (ACTION=ALTER_COLUMN, NAME=Col1, `DATA_ACCESSOR_CONSTRUCTOR.CLASS_NAME`=`DICTIONARY`) 2025-05-29T15:21:34.866093Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-05-29T15:21:34.866797Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509888160503476891:2407], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:21:34.866824Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:21:34.872703Z node 1 :TX_COLUMNSHARD_TX WARN: log.cpp:784: tablet_id=72075186224037888;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710661;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710661; EXECUTE: REPLACE INTO `/Root/ColumnTable` (Col1) VALUES (1u) 2025-05-29T15:21:34.884214Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509888160503476922:2413], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:21:34.884237Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:21:34.884370Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509888160503476927:2416], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:21:34.885140Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710662:3, at schemeshard: 72057594046644480 2025-05-29T15:21:34.887455Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976710662, at schemeshard: 72057594046644480 2025-05-29T15:21:34.887518Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7509888160503476929:2417], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710662 completed, doublechecking } 2025-05-29T15:21:34.956069Z node 1 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [1:7509888160503476980:2477] txid# 281474976710663, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-29T15:21:34.984863Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [1:7509888160503476989:2421], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:21:34.985457Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=1&id=NWVkYTNkNDEtMWQyOWI4ZDEtYjU1MWU4NTItOTg4ZjJhNGQ=, ActorId: [1:7509888160503476920:2412], ActorState: ExecuteState, TraceId: 01jwea4sx389bd8qw3emg7v03d, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: assertion failed at ydb/core/kqp/ut/olap/combinatory/execute.h:41, virtual TConclusionStatus NKikimr::NKqp::TDataCommand::DoExecute(TKikimrRunner &): (prepareResult.IsSuccess())
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 0. /-S/util/system/backtrace.cpp:284: ?? @ 0x13D433CB 1. /tmp//-S/library/cpp/testing/unittest/registar.cpp:46: RaiseError @ 0x13EFB238 2. /-S/ydb/core/kqp/ut/olap/combinatory/execute.h:41: DoExecute @ 0x265B8EA9 3. /-S/ydb/core/kqp/ut/olap/combinatory/abstract.h:75: Execute @ 0x265B0763 4. /tmp//-S/ydb/core/kqp/ut/olap/combinatory/executor.cpp:22: Execute @ 0x265B0763 5. /-S/ydb/core/kqp/ut/olap/combinatory/variator.h:27: Execute @ 0x13B064A7 6. /tmp//-S/ydb/core/kqp/ut/olap/dictionary_ut.cpp:107: Execute_ @ 0x13B064A7 7. /tmp//-S/ydb/core/kqp/ut/olap/dictionary_ut.cpp:27: operator() @ 0x13B08AE6 8. /tmp//-S/library/cpp/testing/unittest/registar.cpp:373: Run @ 0x13EFD0ED 9. /tmp//-S/ydb/core/kqp/ut/olap/dictionary_ut.cpp:27: Execute @ 0x13B084A4 10. /tmp//-S/library/cpp/testing/unittest/registar.cpp:494: Execute @ 0x13EFD862 11. /tmp//-S/library/cpp/testing/unittest/utmain.cpp:872: RunMain @ 0x13F0F40C 12. ??:0: ?? @ 0x7F8461C48D8F 13. ??:0: ?? @ 0x7F8461C48E3F 14. ??:0: ?? @ 0x12AB1028 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/olap/unittest >> KqpOlap::PredicateDoNotPushdown [GOOD] Test command err: Trying to start YDB, gRPC: 23014, MsgBus: 8683 2025-05-29T15:21:36.908957Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509888172422943618:2069];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:21:36.909218Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/0026aa/r3tmp/tmpaQ6EhN/pdisk_1.dat 2025-05-29T15:21:36.980755Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 23014, node 1 2025-05-29T15:21:37.002766Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:21:37.002778Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-05-29T15:21:37.002780Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-05-29T15:21:37.002819Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-05-29T15:21:37.012909Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:21:37.012931Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:21:37.015200Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:8683 TClient is connected to server localhost:8683 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-29T15:21:37.093690Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:21:37.109273Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-05-29T15:21:37.156381Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnStore, opId: 281474976710658:0, at schemeshard: 72057594046644480 Status: 53 TxId: 281474976710658 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 2 2025-05-29T15:21:37.173446Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037890;self_id=[1:7509888176717911577:2316];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-05-29T15:21:37.173515Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037890;self_id=[1:7509888176717911577:2316];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-05-29T15:21:37.173592Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037890;self_id=[1:7509888176717911577:2316];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-05-29T15:21:37.173614Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037890;self_id=[1:7509888176717911577:2316];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-05-29T15:21:37.173640Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037890;self_id=[1:7509888176717911577:2316];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-05-29T15:21:37.173660Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037890;self_id=[1:7509888176717911577:2316];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-05-29T15:21:37.173678Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037890;self_id=[1:7509888176717911577:2316];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-05-29T15:21:37.173698Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037890;self_id=[1:7509888176717911577:2316];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-05-29T15:21:37.173718Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037890;self_id=[1:7509888176717911577:2316];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-05-29T15:21:37.173741Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037890;self_id=[1:7509888176717911577:2316];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-05-29T15:21:37.173761Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037890;self_id=[1:7509888176717911577:2316];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2025-05-29T15:21:37.173781Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037890;self_id=[1:7509888176717911577:2316];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2025-05-29T15:21:37.182299Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;self_id=[1:7509888176717911575:2314];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-05-29T15:21:37.182325Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;self_id=[1:7509888176717911575:2314];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-05-29T15:21:37.182372Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;self_id=[1:7509888176717911575:2314];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-05-29T15:21:37.182392Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;self_id=[1:7509888176717911575:2314];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-05-29T15:21:37.182414Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;self_id=[1:7509888176717911575:2314];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-05-29T15:21:37.182436Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;self_id=[1:7509888176717911575:2314];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-05-29T15:21:37.182453Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;self_id=[1:7509888176717911575:2314];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-05-29T15:21:37.182472Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;self_id=[1:7509888176717911575:2314];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-05-29T15:21:37.182492Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;self_id=[1:7509888176717911575:2314];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-05-29T15:21:37.182510Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;self_id=[1:7509888176717911575:2314];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-05-29T15:21:37.182528Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;self_id=[1:7509888176717911575:2314];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2025-05-29T15:21:37.182546Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;self_id=[1:7509888176717911575:2314];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2025-05-29T15:21:37.187847Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037889;self_id=[1:7509888176717911576:2315];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-05-29T15:21:37.187872Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037889;self_id=[1:7509888176717911576:2315];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-05-29T15:21:37.187916Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037889;self_id=[1:7509888176717911576:2315];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-05-29T15:21:37.187938Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037889;self_id=[1:7509888176717911576:2315];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-05-29T15:21:37.187963Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037889;self_id=[1:7509888176717911576:2315];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-05-29T15:21:37.187984Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037889;self_id=[1:750988817671 ... on=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-05-29T15:21:38.386075Z node 2 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-05-29T15:21:38.386093Z node 2 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-05-29T15:21:38.386098Z node 2 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-05-29T15:21:38.386110Z node 2 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-05-29T15:21:38.386115Z node 2 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanDeprecatedSnapshot;id=CleanDeprecatedSnapshot; 2025-05-29T15:21:38.386122Z node 2 :TX_COLUMNSHARD CRIT: log.cpp:784: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=clean_deprecated_snapshot.cpp:30;tasks_for_rewrite=0; 2025-05-29T15:21:38.386127Z node 2 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanDeprecatedSnapshot;id=17; 2025-05-29T15:21:38.386132Z node 2 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV0ChunksMeta;id=RestoreV0ChunksMeta; 2025-05-29T15:21:38.386187Z node 2 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV0ChunksMeta;id=18; 2025-05-29T15:21:38.386191Z node 2 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-05-29T15:21:38.386282Z node 2 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-05-29T15:21:38.386287Z node 2 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-05-29T15:21:38.386297Z node 2 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-05-29T15:21:38.386302Z node 2 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-05-29T15:21:38.386318Z node 2 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-05-29T15:21:38.386322Z node 2 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-05-29T15:21:38.386333Z node 2 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-05-29T15:21:38.386338Z node 2 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-05-29T15:21:38.386347Z node 2 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-05-29T15:21:38.386351Z node 2 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-05-29T15:21:38.386357Z node 2 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-05-29T15:21:38.386361Z node 2 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-05-29T15:21:38.386380Z node 2 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-05-29T15:21:38.386384Z node 2 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-05-29T15:21:38.386402Z node 2 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-05-29T15:21:38.386408Z node 2 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-05-29T15:21:38.386427Z node 2 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-05-29T15:21:38.386432Z node 2 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanDeprecatedSnapshot;id=CleanDeprecatedSnapshot; 2025-05-29T15:21:38.386438Z node 2 :TX_COLUMNSHARD CRIT: log.cpp:784: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=clean_deprecated_snapshot.cpp:30;tasks_for_rewrite=0; 2025-05-29T15:21:38.386443Z node 2 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanDeprecatedSnapshot;id=17; 2025-05-29T15:21:38.386447Z node 2 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV0ChunksMeta;id=RestoreV0ChunksMeta; 2025-05-29T15:21:38.386499Z node 2 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV0ChunksMeta;id=18; 2025-05-29T15:21:38.386502Z node 2 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-05-29T15:21:38.410177Z node 2 :TX_COLUMNSHARD_TX WARN: log.cpp:784: tablet_id=72075186224037888;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710658;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710658; 2025-05-29T15:21:38.410369Z node 2 :TX_COLUMNSHARD_TX WARN: log.cpp:784: tablet_id=72075186224037890;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710658;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710658; 2025-05-29T15:21:38.410439Z node 2 :TX_COLUMNSHARD_TX WARN: log.cpp:784: tablet_id=72075186224037891;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710658;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710658; 2025-05-29T15:21:38.410511Z node 2 :TX_COLUMNSHARD_TX WARN: log.cpp:784: tablet_id=72075186224037889;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710658;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710658; Status: 53 TxId: 281474976710659 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 3 2025-05-29T15:21:38.415700Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 2025-05-29T15:21:38.420985Z node 2 :TX_COLUMNSHARD_TX WARN: log.cpp:784: tablet_id=72075186224037888;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710659;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710659; 2025-05-29T15:21:38.421936Z node 2 :TX_COLUMNSHARD_TX WARN: log.cpp:784: tablet_id=72075186224037890;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710659;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710659; 2025-05-29T15:21:38.422899Z node 2 :TX_COLUMNSHARD_TX WARN: log.cpp:784: tablet_id=72075186224037889;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710659;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710659; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=5800;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=5800;columns=5; 2025-05-29T15:21:38.571539Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7509888178870321791:2364], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:21:38.571559Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7509888178870321780:2361], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:21:38.571593Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:21:38.572556Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710660:3, at schemeshard: 72057594046644480 2025-05-29T15:21:38.581112Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976710660, at schemeshard: 72057594046644480 2025-05-29T15:21:38.581221Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7509888178870321817:2365], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710660 completed, doublechecking } 2025-05-29T15:21:38.647638Z node 2 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [2:7509888178870321868:2481] txid# 281474976710661, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 7], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/olap/unittest >> KqpOlapJson::RestoreJsonArrayVariants [FAIL] Test command err: Trying to start YDB, gRPC: 3699, MsgBus: 24014 2025-05-29T15:21:33.616100Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509888155462273301:2196];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:21:33.616189Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/0026eb/r3tmp/tmpTjK9WO/pdisk_1.dat 2025-05-29T15:21:33.963465Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [1:7509888155462273144:2079] 1748532093612950 != 1748532093612953 2025-05-29T15:21:34.010688Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:21:34.010860Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:21:34.010946Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:21:34.027160Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 3699, node 1 2025-05-29T15:21:34.062899Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:21:34.062912Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-05-29T15:21:34.062914Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-05-29T15:21:34.062946Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:24014 TClient is connected to server localhost:24014 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-29T15:21:34.274872Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:21:34.283238Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 EXECUTE: CREATE TABLE `/Root/ColumnTable` ( Col1 Uint64 NOT NULL, Col2 JsonDocument, PRIMARY KEY (Col1) ) PARTITION BY HASH(Col1) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 1); 2025-05-29T15:21:34.563461Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509888159757241099:2328], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:21:34.563484Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:21:34.599378Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-05-29T15:21:34.615870Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;self_id=[1:7509888159757241175:2332];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-05-29T15:21:34.615931Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;self_id=[1:7509888159757241175:2332];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-05-29T15:21:34.615988Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;self_id=[1:7509888159757241175:2332];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-05-29T15:21:34.616009Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;self_id=[1:7509888159757241175:2332];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-05-29T15:21:34.616030Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;self_id=[1:7509888159757241175:2332];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-05-29T15:21:34.616052Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;self_id=[1:7509888159757241175:2332];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-05-29T15:21:34.616075Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;self_id=[1:7509888159757241175:2332];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-05-29T15:21:34.616095Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;self_id=[1:7509888159757241175:2332];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-05-29T15:21:34.616115Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;self_id=[1:7509888159757241175:2332];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-05-29T15:21:34.616133Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;self_id=[1:7509888159757241175:2332];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-05-29T15:21:34.616157Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;self_id=[1:7509888159757241175:2332];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2025-05-29T15:21:34.616177Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;self_id=[1:7509888159757241175:2332];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2025-05-29T15:21:34.621634Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-05-29T15:21:34.621647Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-05-29T15:21:34.621662Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-05-29T15:21:34.621668Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-05-29T15:21:34.621690Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-05-29T15:21:34.621696Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-05-29T15:21:34.621708Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-05-29T15:21:34.621714Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-05-29T15:21:34.621726Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-05-29T15:21:34.621732Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-05-29T15:21:34.621740Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-05-29T15:21:34.621745Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-05-29T15:21:34.621770Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-05-29T15:21:34.621777Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-05-29T15:21:34.621800Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-05-29T15:21:34.621805Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-05-29T15:21:34.621818Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-05-29T15:21:34.621824Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanDeprecatedSnapshot;id=CleanDeprecatedSnapshot; 2025-05-29T15:21:34.621832Z node 1 :TX_COLUMNSHARD CRIT: log.cpp:784: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=clean_deprecated_snapshot.cpp:30;tasks_for_rewrite=0; 2025-05-29T15:21:34.621837Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanDeprecatedSnapshot;id=17; 2025-05-29T15:21:34.621842Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV0ChunksMeta;id=RestoreV0ChunksMeta; 2025-05-29T15:21:34.621969Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV0ChunksMeta;id=18; 2025-05-29T15:21:34.621979Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-05-29T15:21:34.663739Z node 1 :TX_COLUMNSHARD_TX WARN: log.cpp:784: tablet_id=72075186224037888;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710658;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710658; EXECUTE: ALTER OBJECT `/Root/ColumnTable` (TYPE TABLE) SET (ACTION=UPSERT_OPTIONS, `SCAN_READER_POLICY_NAME`=`SIMPLE`) 2025-05-29T15:21:34.685310Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509888159757241310:2398], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:21:34.685341Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:21:34.686398Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 2025-05-29T15:21:34.696548Z node 1 :TX_COLUMNSHARD_TX WARN: log.cpp:784: tablet_id=72075186224037888;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710659;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710659; EXECUTE: ALTER OBJECT `/Root/ColumnTable` (TYPE TABLE) SET (ACTION=ALTER_COLUMN, NAME=Col2, `DATA_EXTRACTOR_CLASS_NAME`=`JSON_SCANNER`, `SCAN_FIRST_LEVEL_ONLY`=`false`, `DATA_ACCESSOR_CONSTRUCTOR.CLASS_NAME`=`SUB_COLUMNS`, `FORCE_SIMD_PARSING`=`true`, `COLUMNS_LIMIT`=`1024`, `SPARSED_DETECTOR_KFF`=`0`, `MEM_LIMIT_CHUNK`=`0`, `OTHERS_ALLOWED_FRACTION`=`0`) 2025-05-29T15:21:34.724551Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509888159757241340:2403], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:21:34.724584Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:21:34.727155Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-05-29T15:21:34.733582Z node 1 :TX_COLUMNSHARD_TX WARN: log.cpp:784: tablet_id=72075186224037888;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710660;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710660; EXECUTE: REPLACE INTO `/Root/ColumnTable` (Col1, Col2) VALUES(1u, JsonDocument('["a", {"v" : 4}, 1,2,3,4,5,6,7,8,9,10,11,12]')) 2025-05-29T15:21:34.749327Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509888159757241371:2409], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:21:34.749348Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:21:34.749491Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509888159757241376:2412], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:21:34.750413Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710661:3, at schemeshard: 72057594046644480 2025-05-29T15:21:34.753389Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976710661, at schemeshard: 72057594046644480 2025-05-29T15:21:34.753469Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7509888159757241378:2413], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710661 completed, doublechecking } 2025-05-29T15:21:34.831718Z node 1 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [1:7509888159757241429:2456] txid# 281474976710662, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-29T15:21:34.878044Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [1:7509888159757241438:2417], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:21:34.878621Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=1&id=YzM1M2IwZmEtMzJmZjVlYTctZjU3NWI0ZmItNjIzODZiM2I=, ActorId: [1:7509888159757241369:2408], ActorState: ExecuteState, TraceId: 01jwea4srw5xg1jgtvmwsefr39, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: assertion failed at ydb/core/kqp/ut/olap/combinatory/execute.h:41, virtual TConclusionStatus NKikimr::NKqp::TDataCommand::DoExecute(TKikimrRunner &): (prepareResult.IsSuccess())
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 0. /-S/util/system/backtrace.cpp:284: ?? @ 0x13D433CB 1. /tmp//-S/library/cpp/testing/unittest/registar.cpp:46: RaiseError @ 0x13EFB238 2. /-S/ydb/core/kqp/ut/olap/combinatory/execute.h:41: DoExecute @ 0x265B8EA9 3. /-S/ydb/core/kqp/ut/olap/combinatory/abstract.h:75: Execute @ 0x265B0763 4. /tmp//-S/ydb/core/kqp/ut/olap/combinatory/executor.cpp:22: Execute @ 0x265B0763 5. /-S/ydb/core/kqp/ut/olap/combinatory/variator.h:27: Execute @ 0x13B8C627 6. /tmp//-S/ydb/core/kqp/ut/olap/json_ut.cpp:277: Execute_ @ 0x13B8C627 7. /tmp//-S/ydb/core/kqp/ut/olap/json_ut.cpp:27: operator() @ 0x13B90F46 8. /tmp//-S/library/cpp/testing/unittest/registar.cpp:373: Run @ 0x13EFD0ED 9. /tmp//-S/ydb/core/kqp/ut/olap/json_ut.cpp:27: Execute @ 0x13B9090C 10. /tmp//-S/library/cpp/testing/unittest/registar.cpp:494: Execute @ 0x13EFD862 11. /tmp//-S/library/cpp/testing/unittest/utmain.cpp:872: RunMain @ 0x13F0F40C 12. ??:0: ?? @ 0x7FF5AF68FD8F 13. ??:0: ?? @ 0x7FF5AF68FE3F 14. ??:0: ?? @ 0x12AB1028 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/olap/unittest >> KqpOlapLocks::DeleteAbsentMultipleShards+Reboot [FAIL] Test command err: Trying to start YDB, gRPC: 1341, MsgBus: 21554 test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/0026e6/r3tmp/tmpZiuOxb/pdisk_1.dat 2025-05-29T15:21:33.826880Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-05-29T15:21:33.876107Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [1:7509888159544716628:2079] 1748532093667289 != 1748532093667292 2025-05-29T15:21:33.886930Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 1341, node 1 2025-05-29T15:21:33.906412Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:21:33.906421Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-05-29T15:21:33.906423Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-05-29T15:21:33.906464Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-05-29T15:21:33.919350Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:21:33.919367Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting TClient is connected to server localhost:21554 2025-05-29T15:21:33.923399Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:21554 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-29T15:21:34.079660Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:21:34.083163Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 CREATE TABLE `/Root/ttt` (id Int64 NOT NULL, value Int32, PRIMARY KEY (id)) PARTITION BY HASH(id) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 2); 2025-05-29T15:21:34.627142Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509888163839684592:2331], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:21:34.627210Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:21:34.685740Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-05-29T15:21:34.703691Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;self_id=[1:7509888163839684671:2335];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-05-29T15:21:34.703782Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;self_id=[1:7509888163839684671:2335];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-05-29T15:21:34.703832Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;self_id=[1:7509888163839684671:2335];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-05-29T15:21:34.703860Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;self_id=[1:7509888163839684671:2335];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-05-29T15:21:34.703878Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;self_id=[1:7509888163839684671:2335];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-05-29T15:21:34.703897Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;self_id=[1:7509888163839684671:2335];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-05-29T15:21:34.703917Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;self_id=[1:7509888163839684671:2335];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-05-29T15:21:34.703936Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;self_id=[1:7509888163839684671:2335];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-05-29T15:21:34.703958Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;self_id=[1:7509888163839684671:2335];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-05-29T15:21:34.703982Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;self_id=[1:7509888163839684671:2335];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-05-29T15:21:34.704001Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;self_id=[1:7509888163839684671:2335];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2025-05-29T15:21:34.704020Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;self_id=[1:7509888163839684671:2335];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2025-05-29T15:21:34.709765Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-05-29T15:21:34.709782Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-05-29T15:21:34.709795Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-05-29T15:21:34.709801Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-05-29T15:21:34.709821Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-05-29T15:21:34.709827Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-05-29T15:21:34.709839Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-05-29T15:21:34.709844Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-05-29T15:21:34.709855Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-05-29T15:21:34.709860Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-05-29T15:21:34.709867Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-05-29T15:21:34.709873Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-05-29T15:21:34.709901Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-05-29T15:21:34.709909Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-05-29T15:21:34.709928Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-05-29T15:21:34.709934Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-05-29T15:21:34.709945Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-05-29T15:21:34.709951Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanDeprecatedSnapshot;id=CleanDeprecatedSnapshot; 2025-0 ... 9888163839684675:2336];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-05-29T15:21:34.711183Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037889;self_id=[1:7509888163839684675:2336];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-05-29T15:21:34.711200Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037889;self_id=[1:7509888163839684675:2336];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-05-29T15:21:34.711215Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037889;self_id=[1:7509888163839684675:2336];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-05-29T15:21:34.711232Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037889;self_id=[1:7509888163839684675:2336];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-05-29T15:21:34.711250Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037889;self_id=[1:7509888163839684675:2336];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-05-29T15:21:34.711266Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037889;self_id=[1:7509888163839684675:2336];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-05-29T15:21:34.711282Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037889;self_id=[1:7509888163839684675:2336];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2025-05-29T15:21:34.711297Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037889;self_id=[1:7509888163839684675:2336];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2025-05-29T15:21:34.712889Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-05-29T15:21:34.712902Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-05-29T15:21:34.712915Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-05-29T15:21:34.712920Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-05-29T15:21:34.712937Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-05-29T15:21:34.712942Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-05-29T15:21:34.712952Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-05-29T15:21:34.712957Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-05-29T15:21:34.712967Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-05-29T15:21:34.712971Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-05-29T15:21:34.712977Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-05-29T15:21:34.712982Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-05-29T15:21:34.713002Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-05-29T15:21:34.713007Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-05-29T15:21:34.713026Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-05-29T15:21:34.713030Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-05-29T15:21:34.713042Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-05-29T15:21:34.713047Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanDeprecatedSnapshot;id=CleanDeprecatedSnapshot; 2025-05-29T15:21:34.713054Z node 1 :TX_COLUMNSHARD CRIT: log.cpp:784: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=clean_deprecated_snapshot.cpp:30;tasks_for_rewrite=0; 2025-05-29T15:21:34.713060Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanDeprecatedSnapshot;id=17; 2025-05-29T15:21:34.713064Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV0ChunksMeta;id=RestoreV0ChunksMeta; 2025-05-29T15:21:34.713126Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV0ChunksMeta;id=18; 2025-05-29T15:21:34.713130Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-05-29T15:21:34.722101Z node 1 :TX_COLUMNSHARD_TX WARN: log.cpp:784: tablet_id=72075186224037889;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715658;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715658; 2025-05-29T15:21:34.723000Z node 1 :TX_COLUMNSHARD_TX WARN: log.cpp:784: tablet_id=72075186224037888;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715658;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715658; 2025-05-29T15:21:34.740161Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509888163839684827:2404], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:21:34.740198Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:21:34.740471Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509888163839684832:2407], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:21:34.741477Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480 2025-05-29T15:21:34.745119Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976715659, at schemeshard: 72057594046644480 2025-05-29T15:21:34.745199Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7509888163839684834:2408], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2025-05-29T15:21:34.828229Z node 1 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [1:7509888163839684886:2433] txid# 281474976715660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-29T15:21:34.851320Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [1:7509888163839684910:2413], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:21:34.852159Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=1&id=NjFmMzBhZWItZDZjY2QxMTctZWI3MjU5ZTctMjU2MTY2Y2U=, ActorId: [1:7509888163839684825:2403], ActorState: ExecuteState, TraceId: 01jwea4srjf39cg5281hysnjec, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: assertion failed at ydb/core/kqp/ut/olap/locks_ut.cpp:129, void NKikimr::NKqp::NTestSuiteKqpOlapLocks::TestDeleteAbsent(const size_t, bool): (result.IsSuccess())
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 0. /-S/util/system/backtrace.cpp:284: ?? @ 0x13D433CB 1. /tmp//-S/library/cpp/testing/unittest/registar.cpp:46: RaiseError @ 0x13EFB238 2. /tmp//-S/ydb/core/kqp/ut/olap/locks_ut.cpp:129: TestDeleteAbsent @ 0x13BBB5D3 3. /tmp//-S/ydb/core/kqp/ut/olap/locks_ut.cpp:17: operator() @ 0x13BC1FA6 4. /tmp//-S/library/cpp/testing/unittest/registar.cpp:373: Run @ 0x13EFD0ED 5. /tmp//-S/ydb/core/kqp/ut/olap/locks_ut.cpp:17: Execute @ 0x13BC196C 6. /tmp//-S/library/cpp/testing/unittest/registar.cpp:494: Execute @ 0x13EFD862 7. /tmp//-S/library/cpp/testing/unittest/utmain.cpp:872: RunMain @ 0x13F0F40C 8. ??:0: ?? @ 0x7FDF57631D8F 9. ??:0: ?? @ 0x7FDF57631E3F 10. ??:0: ?? @ 0x12AB1028 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/columnshard/ut_rw/unittest >> TColumnShardTestReadWrite::WriteReadStandaloneExoticTypes [GOOD] Test command err: 2025-05-29T15:21:37.032638Z node 1 :TX_COLUMNSHARD INFO: log.cpp:784: tablet_id=9437184;self_id=[1:139:2170];fline=columnshard.cpp:102;event=initialize_shard;step=OnActivateExecutor; 2025-05-29T15:21:37.036687Z node 1 :TX_COLUMNSHARD INFO: log.cpp:784: tablet_id=9437184;self_id=[1:139:2170];fline=columnshard.cpp:120;event=initialize_shard;step=initialize_tiring_finished; 2025-05-29T15:21:37.036777Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Execute at tablet 9437184 2025-05-29T15:21:37.037499Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=9437184;self_id=[1:139:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-05-29T15:21:37.037548Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=9437184;self_id=[1:139:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-05-29T15:21:37.037592Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=9437184;self_id=[1:139:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-05-29T15:21:37.037606Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=9437184;self_id=[1:139:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-05-29T15:21:37.037619Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=9437184;self_id=[1:139:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-05-29T15:21:37.037637Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=9437184;self_id=[1:139:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-05-29T15:21:37.037649Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=9437184;self_id=[1:139:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-05-29T15:21:37.037662Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=9437184;self_id=[1:139:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-05-29T15:21:37.037675Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=9437184;self_id=[1:139:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-05-29T15:21:37.037687Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=9437184;self_id=[1:139:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-05-29T15:21:37.037703Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=9437184;self_id=[1:139:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2025-05-29T15:21:37.037728Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=9437184;self_id=[1:139:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2025-05-29T15:21:37.044159Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Complete at tablet 9437184 2025-05-29T15:21:37.044228Z node 1 :TX_COLUMNSHARD INFO: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:137;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2025-05-29T15:21:37.044239Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-05-29T15:21:37.044273Z node 1 :TX_COLUMNSHARD INFO: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-05-29T15:21:37.044308Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-05-29T15:21:37.044324Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-05-29T15:21:37.044330Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-05-29T15:21:37.044340Z node 1 :TX_COLUMNSHARD INFO: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2025-05-29T15:21:37.044350Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-05-29T15:21:37.044358Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-05-29T15:21:37.044363Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-05-29T15:21:37.044382Z node 1 :TX_COLUMNSHARD INFO: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-05-29T15:21:37.044391Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-05-29T15:21:37.044399Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-05-29T15:21:37.044404Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-05-29T15:21:37.044415Z node 1 :TX_COLUMNSHARD INFO: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-05-29T15:21:37.044422Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-05-29T15:21:37.044430Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-05-29T15:21:37.044435Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=8;type=CleanInsertionDedup; 2025-05-29T15:21:37.044451Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-05-29T15:21:37.044458Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-05-29T15:21:37.044463Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-05-29T15:21:37.044472Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-05-29T15:21:37.044480Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-05-29T15:21:37.044486Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-05-29T15:21:37.044513Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-05-29T15:21:37.044522Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-05-29T15:21:37.044527Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2025-05-29T15:21:37.044548Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-05-29T15:21:37.044556Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-05-29T15:21:37.044561Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2025-05-29T15:21:37.044575Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-05-29T15:21:37.044582Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanDeprecatedSnapshot;id=CleanDeprecatedSnapshot; 2025-05-29T15:21:37.044587Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=17;type=CleanDeprecatedSnapshot; 2025-05-29T15:21:37.044596Z node 1 :TX_COLUMNSHARD CRIT: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_deprecated_snapshot.cpp:30;tasks_for_rewrite=0; 2025-05-29T15:21:37.044605Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanDeprecatedSnapshot;id=17; 2025-05-29T15:21:37.044613Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV0ChunksMeta;id=RestoreV0ChunksMeta; 2025-05-29T15:21:37.044618Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=18;type=RestoreV0ChunksMeta; 2025-05-29T15:21:37.044694Z node 1 :TX_COLUMNSHARD INFO: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=9; 2025-05-29T15:21:37.044706Z node 1 :TX_COLUMNSHARD INFO: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=6; 2025-05-29T15:21:37.044715Z node 1 :TX_COLUMNSHARD INFO: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute; ... =;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;;); 2025-05-29T15:21:39.310325Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:784: TEST_STEP=11;SelfId=[1:395:2407];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;method=produce result;fline=plain_read_data.cpp:73;event=DoExtractReadyResults;result=0;count=0;finished=1; 2025-05-29T15:21:39.310340Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:784: TEST_STEP=11;SelfId=[1:395:2407];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:230;stage=ready result;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;;);columns=10;rows=31; 2025-05-29T15:21:39.310355Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:784: TEST_STEP=11;SelfId=[1:395:2407];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:250;stage=data_format;batch_size=2759;num_rows=31;batch_columns=timestamp,resource_type,resource_id,uid,level,message,json_payload,ingested_at,saved_at,request_id; 2025-05-29T15:21:39.310401Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:784: TEST_STEP=11;SelfId=[1:395:2407];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:366;event=send_data;compute_actor_id=[1:394:2406];bytes=2759;rows=31;faults=0;finished=0;fault=0;schema=timestamp: timestamp[us] resource_type: string resource_id: string uid: string level: int32 message: binary json_payload: binary ingested_at: timestamp[us] saved_at: timestamp[us] request_id: binary; 2025-05-29T15:21:39.310417Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:784: TEST_STEP=11;SelfId=[1:395:2407];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:270;stage=finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;;); 2025-05-29T15:21:39.310433Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:784: TEST_STEP=11;SelfId=[1:395:2407];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:188;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;;); 2025-05-29T15:21:39.310446Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:784: TEST_STEP=11;SelfId=[1:395:2407];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:193;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;;); 2025-05-29T15:21:39.310479Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:784: TEST_STEP=11;SelfId=[1:395:2407];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=actor.cpp:105;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2025-05-29T15:21:39.310493Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:784: TEST_STEP=11;SelfId=[1:395:2407];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:188;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;;); 2025-05-29T15:21:39.310506Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:784: TEST_STEP=11;SelfId=[1:395:2407];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:193;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;;); 2025-05-29T15:21:39.310513Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: actor.cpp:410: Scan [1:395:2407] finished for tablet 9437184 2025-05-29T15:21:39.310583Z node 1 :TX_COLUMNSHARD_SCAN INFO: log.cpp:784: TEST_STEP=11;SelfId=[1:395:2407];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=actor.cpp:416;event=scan_finish;compute_actor_id=[1:394:2406];stats={"p":[{"events":["f_bootstrap","l_bootstrap","f_processing","f_ProduceResults","f_task_result"],"t":0},{"events":["l_task_result"],"t":0.001},{"events":["f_ack","l_ack","l_processing","l_ProduceResults","f_Finish","l_Finish"],"t":0.002}],"full":{"a":1748532099308255,"name":"_full_task","f":1748532099308255,"d_finished":0,"c":0,"l":1748532099310522,"d":2267},"events":[{"name":"bootstrap","f":1748532099308295,"d_finished":436,"c":1,"l":1748532099308731,"d":436},{"a":1748532099310476,"name":"ack","f":1748532099310288,"d_finished":161,"c":1,"l":1748532099310449,"d":207},{"a":1748532099310474,"name":"processing","f":1748532099308924,"d_finished":1011,"c":10,"l":1748532099310450,"d":1059},{"name":"ProduceResults","f":1748532099308542,"d_finished":451,"c":13,"l":1748532099310510,"d":451},{"a":1748532099310510,"name":"Finish","f":1748532099310510,"d_finished":0,"c":0,"l":1748532099310522,"d":12},{"name":"task_result","f":1748532099308928,"d_finished":825,"c":9,"l":1748532099310247,"d":825}],"id":"9437184::12"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;;); 2025-05-29T15:21:39.310594Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:784: TEST_STEP=11;SelfId=[1:395:2407];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=actor.cpp:366;event=send_data;compute_actor_id=[1:394:2406];bytes=0;rows=0;faults=0;finished=1;fault=0;schema=; 2025-05-29T15:21:39.310640Z node 1 :TX_COLUMNSHARD_SCAN INFO: log.cpp:784: TEST_STEP=11;SelfId=[1:395:2407];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=actor.cpp:371;event=scan_finished;compute_actor_id=[1:394:2406];packs_sum=0;bytes=0;bytes_sum=0;rows=0;rows_sum=0;faults=0;finished=1;fault=0;stats={"p":[{"events":["f_bootstrap","l_bootstrap","f_processing","f_ProduceResults","f_task_result"],"t":0},{"events":["l_task_result"],"t":0.001},{"events":["f_ack","l_ack","l_processing","l_ProduceResults","f_Finish","l_Finish"],"t":0.002}],"full":{"a":1748532099308255,"name":"_full_task","f":1748532099308255,"d_finished":0,"c":0,"l":1748532099310600,"d":2345},"events":[{"name":"bootstrap","f":1748532099308295,"d_finished":436,"c":1,"l":1748532099308731,"d":436},{"a":1748532099310476,"name":"ack","f":1748532099310288,"d_finished":161,"c":1,"l":1748532099310449,"d":285},{"a":1748532099310474,"name":"processing","f":1748532099308924,"d_finished":1011,"c":10,"l":1748532099310450,"d":1137},{"name":"ProduceResults","f":1748532099308542,"d_finished":451,"c":13,"l":1748532099310510,"d":451},{"a":1748532099310510,"name":"Finish","f":1748532099310510,"d_finished":0,"c":0,"l":1748532099310600,"d":90},{"name":"task_result","f":1748532099308928,"d_finished":825,"c":9,"l":1748532099310247,"d":825}],"id":"9437184::12"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;;); 2025-05-29T15:21:39.310656Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:784: TEST_STEP=11;SelfId=[1:395:2407];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=stats.cpp:8;event=statistic;begin=2025-05-29T15:21:39.308105Z;index_granules=0;index_portions=1;index_batches=1;committed_batches=0;schema_columns=10;filter_columns=0;additional_columns=0;compacted_portions_bytes=0;inserted_portions_bytes=7928;committed_portions_bytes=0;data_filter_bytes=0;data_additional_bytes=0;delta_bytes=7928;selected_rows=0; 2025-05-29T15:21:39.310662Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:784: TEST_STEP=11;SelfId=[1:395:2407];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=read_context.h:190;event=scan_aborted;reason=unexpected on destructor; 2025-05-29T15:21:39.310708Z node 1 :TX_COLUMNSHARD_SCAN INFO: log.cpp:784: TEST_STEP=11;SelfId=[1:395:2407];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=context.h:81;fetching=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;; >> KqpOlapJson::DuplicationCompactionVariants [FAIL] >> JsonProtoConversion::NlohmannJsonToProtoArray [GOOD] >> TColumnShardTestReadWrite::ReadStale [GOOD] >> KqpOlapJson::BloomCategoryIndexesVariants [FAIL] |57.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/storagepoolmon/ut/unittest |57.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/storagepoolmon/ut/unittest |57.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/http_proxy/ut/unittest >> JsonProtoConversion::ProtoMapToJson_ReceiveMessageResult [GOOD] |57.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/http_proxy/ut/unittest >> JsonProtoConversion::NlohmannJsonToProtoArray [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/columnshard/ut_rw/unittest >> TColumnShardTestReadWrite::ReadStale [GOOD] Test command err: 2025-05-29T15:21:39.431223Z node 1 :TX_COLUMNSHARD INFO: log.cpp:784: tablet_id=9437184;self_id=[1:139:2170];fline=columnshard.cpp:102;event=initialize_shard;step=OnActivateExecutor; 2025-05-29T15:21:39.435381Z node 1 :TX_COLUMNSHARD INFO: log.cpp:784: tablet_id=9437184;self_id=[1:139:2170];fline=columnshard.cpp:120;event=initialize_shard;step=initialize_tiring_finished; 2025-05-29T15:21:39.435461Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Execute at tablet 9437184 2025-05-29T15:21:39.436289Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=9437184;self_id=[1:139:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-05-29T15:21:39.436349Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=9437184;self_id=[1:139:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-05-29T15:21:39.436392Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=9437184;self_id=[1:139:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-05-29T15:21:39.436424Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=9437184;self_id=[1:139:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-05-29T15:21:39.436453Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=9437184;self_id=[1:139:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-05-29T15:21:39.436475Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=9437184;self_id=[1:139:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-05-29T15:21:39.436495Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=9437184;self_id=[1:139:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-05-29T15:21:39.436516Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=9437184;self_id=[1:139:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-05-29T15:21:39.436536Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=9437184;self_id=[1:139:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-05-29T15:21:39.436555Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=9437184;self_id=[1:139:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-05-29T15:21:39.436582Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=9437184;self_id=[1:139:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2025-05-29T15:21:39.436606Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=9437184;self_id=[1:139:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2025-05-29T15:21:39.463154Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Complete at tablet 9437184 2025-05-29T15:21:39.463254Z node 1 :TX_COLUMNSHARD INFO: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:137;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2025-05-29T15:21:39.463268Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-05-29T15:21:39.463309Z node 1 :TX_COLUMNSHARD INFO: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-05-29T15:21:39.463354Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-05-29T15:21:39.463375Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-05-29T15:21:39.463382Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-05-29T15:21:39.463395Z node 1 :TX_COLUMNSHARD INFO: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2025-05-29T15:21:39.463408Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-05-29T15:21:39.463418Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-05-29T15:21:39.463424Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-05-29T15:21:39.463448Z node 1 :TX_COLUMNSHARD INFO: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-05-29T15:21:39.463459Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-05-29T15:21:39.463467Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-05-29T15:21:39.463472Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-05-29T15:21:39.463483Z node 1 :TX_COLUMNSHARD INFO: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-05-29T15:21:39.463492Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-05-29T15:21:39.463502Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-05-29T15:21:39.463509Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=8;type=CleanInsertionDedup; 2025-05-29T15:21:39.463526Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-05-29T15:21:39.463535Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-05-29T15:21:39.463541Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-05-29T15:21:39.463563Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-05-29T15:21:39.463574Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-05-29T15:21:39.463581Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-05-29T15:21:39.463652Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-05-29T15:21:39.463664Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-05-29T15:21:39.463669Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2025-05-29T15:21:39.463692Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-05-29T15:21:39.463700Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-05-29T15:21:39.463704Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2025-05-29T15:21:39.463718Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-05-29T15:21:39.463725Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanDeprecatedSnapshot;id=CleanDeprecatedSnapshot; 2025-05-29T15:21:39.463729Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=17;type=CleanDeprecatedSnapshot; 2025-05-29T15:21:39.463738Z node 1 :TX_COLUMNSHARD CRIT: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_deprecated_snapshot.cpp:30;tasks_for_rewrite=0; 2025-05-29T15:21:39.463746Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanDeprecatedSnapshot;id=17; 2025-05-29T15:21:39.463754Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV0ChunksMeta;id=RestoreV0ChunksMeta; 2025-05-29T15:21:39.463759Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=18;type=RestoreV0ChunksMeta; 2025-05-29T15:21:39.463862Z node 1 :TX_COLUMNSHARD INFO: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=14; 2025-05-29T15:21:39.463875Z node 1 :TX_COLUMNSHARD INFO: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=7; 2025-05-29T15:21:39.463886Z node 1 :TX_COLUMNSHARD INFO: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute ... _id=9437184;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=10;fline=column_engine_logs.cpp:485;event=OnTieringModified;path_id=1; 2025-05-29T15:21:40.089101Z node 1 :TX_COLUMNSHARD_TX WARN: log.cpp:784: tablet_id=9437184;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=10;fline=tx_controller.cpp:214;event=finished_tx;tx_id=10; 2025-05-29T15:21:40.119116Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxPlanStep[2] complete at tablet 9437184 2025-05-29T15:21:40.119198Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:784: tablet_id=9437184;tx_state=TTxProgressTx::Complete;fline=columnshard_impl.cpp:784;event=start_indexation_tasks;insert_overload_size=0; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=54320;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=54320;columns=10; 2025-05-29T15:21:40.121825Z node 1 :TX_COLUMNSHARD_WRITE DEBUG: log.cpp:784: tablet_id=9437184;self_id=[1:139:2170];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=9437184;event=TEvWrite;fline=manager.cpp:210;event=register_operation;operation_id=1;last=1; 2025-05-29T15:21:40.121846Z node 1 :TX_COLUMNSHARD_WRITE DEBUG: log.cpp:784: tablet_id=9437184;self_id=[1:139:2170];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=9437184;event=TEvWrite;fline=write_queue.cpp:15;writing_size=54320;operation_id=9e90f066-3ca011f0-a19624ac-d7964a5f;in_flight=1;size_in_flight=54320; 2025-05-29T15:21:40.125087Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=9437184;self_id=[1:139:2170];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=9437184;event=TEvWrite;scope=TBuildBatchesTask::DoExecute;tablet_id=9437184;parent_id=[1:139:2170];write_id=1;table_id=1;entity_id=3;size=11104;limit=10240;r_count=999;fline=column_info.h:130;sizes=5552,5552;s_splitted=5616,5720;r_splitted=499,500; 2025-05-29T15:21:40.125315Z node 1 :TX_COLUMNSHARD_WRITE DEBUG: log.cpp:784: tablet_id=9437184;self_id=[1:139:2170];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=9437184;event=TEvWrite;scope=TBuildBatchesTask::DoExecute;tablet_id=9437184;parent_id=[1:139:2170];write_id=1;table_id=1;fline=write_actor.cpp:24;event=actor_created;tablet_id=9437184;debug=size=74272;count=11;actions=__DEFAULT,;waiting=1;; 2025-05-29T15:21:40.131676Z node 1 :TX_COLUMNSHARD_WRITE DEBUG: log.cpp:784: tablet_id=9437184;self_id=[1:139:2170];ev=NKikimr::NColumnShard::NPrivateEvents::NWrite::TEvWritePortionResult;tablet_id=9437184;event=TEvWritePortionResult;fline=columnshard__write.cpp:112;writing_size=54320;event=data_write_finished;writing_id=9e90f066-3ca011f0-a19624ac-d7964a5f; 2025-05-29T15:21:40.131797Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:784: tablet_id=9437184;self_id=[1:139:2170];ev=NKikimr::NColumnShard::NPrivateEvents::NWrite::TEvWritePortionResult;tablet_id=9437184;event=TEvWritePortionResult;tablet_id=9437184;local_tx_no=4;method=execute;tx_info=TTxBlobsWritingFinished;tablet_id=9437184;tx_state=execute;fline=constructor_meta.cpp:54;memory_size=86;data_size=62;sum=86;count=1; 2025-05-29T15:21:40.131824Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:784: tablet_id=9437184;self_id=[1:139:2170];ev=NKikimr::NColumnShard::NPrivateEvents::NWrite::TEvWritePortionResult;tablet_id=9437184;event=TEvWritePortionResult;tablet_id=9437184;local_tx_no=4;method=execute;tx_info=TTxBlobsWritingFinished;tablet_id=9437184;tx_state=execute;fline=constructor_meta.cpp:75;memory_size=566;data_size=558;sum=566;count=2;size_of_meta=144; 2025-05-29T15:21:40.131840Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:784: tablet_id=9437184;self_id=[1:139:2170];ev=NKikimr::NColumnShard::NPrivateEvents::NWrite::TEvWritePortionResult;tablet_id=9437184;event=TEvWritePortionResult;tablet_id=9437184;local_tx_no=4;method=execute;tx_info=TTxBlobsWritingFinished;tablet_id=9437184;tx_state=execute;fline=constructor_portion.cpp:40;memory_size=638;data_size=630;sum=638;count=1;size_of_portion=216; 2025-05-29T15:21:40.132101Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: BlobManager on execute at tablet 9437184 Save Batch GenStep: 2:1 Blob count: 11 2025-05-29T15:21:40.132144Z node 1 :TX_COLUMNSHARD_WRITE DEBUG: log.cpp:784: tablet_id=9437184;self_id=[1:139:2170];ev=NKikimr::NColumnShard::NPrivateEvents::NWrite::TEvWritePortionResult;tablet_id=9437184;event=TEvWritePortionResult;tablet_id=9437184;local_tx_no=4;method=execute;tx_info=TTxBlobsWritingFinished;tablet_id=9437184;tx_state=execute;fline=manager.h:148;event=add_by_insert_id;id=1;operation_id=1; 2025-05-29T15:21:40.157050Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: BlobManager at tablet 9437184 Save Batch GenStep: 2:1 Blob count: 11 2025-05-29T15:21:40.171822Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: PlanStep 1748532100434 at tablet 9437184, mediator 0 2025-05-29T15:21:40.171861Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxPlanStep[6] execute at tablet 9437184 2025-05-29T15:21:40.171949Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:784: tablet_id=9437184;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=100;fline=abstract.h:83;progress_tx_id=100;lock_id=1;broken=0; 2025-05-29T15:21:40.172122Z node 1 :TX_COLUMNSHARD_TX WARN: log.cpp:784: tablet_id=9437184;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=100;fline=tx_controller.cpp:214;event=finished_tx;tx_id=100; 2025-05-29T15:21:40.187198Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxPlanStep[6] complete at tablet 9437184 2025-05-29T15:21:40.187250Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:784: tablet_id=9437184;tx_state=TTxProgressTx::Complete;fline=abstract.h:93;progress_tx_id=100;lock_id=1;broken=0; 2025-05-29T15:21:40.187319Z node 1 :TX_COLUMNSHARD_WRITE DEBUG: log.cpp:784: tablet_id=9437184;tx_state=TTxProgressTx::Complete;commit_tx_id=100;commit_lock_id=1;fline=manager.cpp:177;event=remove_by_insert_id;id=1;operation_id=1; 2025-05-29T15:21:40.187325Z node 1 :TX_COLUMNSHARD_WRITE DEBUG: log.cpp:784: tablet_id=9437184;tx_state=TTxProgressTx::Complete;commit_tx_id=100;commit_lock_id=1;fline=manager.cpp:180;event=remove_operation;operation_id=1; 2025-05-29T15:21:40.187349Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:784: tablet_id=9437184;tx_state=TTxProgressTx::Complete;fline=columnshard_impl.cpp:784;event=start_indexation_tasks;insert_overload_size=0; 2025-05-29T15:21:40.187439Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:784: tablet_id=9437184;self_id=[1:139:2170];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;fline=columnshard.cpp:240;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=9437184; 2025-05-29T15:21:40.187446Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:784: tablet_id=9437184;self_id=[1:139:2170];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:515;event=EnqueueBackgroundActivities;periodic=0; 2025-05-29T15:21:40.187459Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:784: tablet_id=9437184;self_id=[1:139:2170];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:784;event=start_indexation_tasks;insert_overload_size=0; 2025-05-29T15:21:40.187477Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:784: tablet_id=9437184;self_id=[1:139:2170];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:246;event=StartCleanup;portions_count=0; 2025-05-29T15:21:40.188001Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:784: tablet_id=9437184;self_id=[1:139:2170];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:321;event=StartCleanup;portions_count=0;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2025-05-29T15:21:40.188038Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:784: tablet_id=9437184;self_id=[1:139:2170];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:1060;background=cleanup;skip_reason=no_changes; 2025-05-29T15:21:40.188044Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:784: tablet_id=9437184;self_id=[1:139:2170];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:1092;background=cleanup;skip_reason=no_changes; 2025-05-29T15:21:40.188079Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:784: tablet_id=9437184;self_id=[1:139:2170];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:1001;background=ttl;skip_reason=no_changes; 2025-05-29T15:21:40.188222Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: EvScan txId: 18446744073709551615 scanId: 1 version: {1748531740434:max} readable: {1748532100434:max} at tablet 9437184 2025-05-29T15:21:40.211092Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TTxScan prepare txId: 18446744073709551615 scanId: 1 at tablet 9437184 2025-05-29T15:21:40.211154Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:784: tx_id=18446744073709551615;scan_id=1;gen=0;table=test_olap_table;snapshot={1748531740434:max};tablet=9437184;timeout=0.000000s;cpu_limits=Disabled;;fline=constructor.cpp:18;event=overriden_columns;ids=1,2,3,4,5,6,7,8,9,10,4294967040,4294967041,4294967042,4294967043; 2025-05-29T15:21:40.211180Z node 1 :TX_COLUMNSHARD_SCAN WARN: log.cpp:784: tx_id=18446744073709551615;scan_id=1;gen=0;table=test_olap_table;snapshot={1748531740434:max};tablet=9437184;timeout=0.000000s;cpu_limits=Disabled;;fline=tx_scan.cpp:14;event=TTxScan failed;problem=cannot build metadata withno ranges;details=Snapshot too old: {1748531740434:max}. CS min read snapshot: {1748531800434:max}. now: 2025-05-29T15:21:40.211172Z; 2025-05-29T15:21:40.227291Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: EvScan txId: 18446744073709551615 scanId: 0 version: {1748531740434:max} readable: {1748532100434:max} at tablet 9437184 2025-05-29T15:21:40.243514Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TTxScan prepare txId: 18446744073709551615 scanId: 0 at tablet 9437184 2025-05-29T15:21:40.244037Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:784: tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1748531740434:max};tablet=9437184;timeout=0.000000s;cpu_limits=Disabled;;fline=program.cpp:33;event=parse_program;program=Command { Projection { Columns { Id: 1 } Columns { Id: 6 } } } ; 2025-05-29T15:21:40.244062Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:784: tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1748531740434:max};tablet=9437184;timeout=0.000000s;cpu_limits=Disabled;;fline=program.cpp:102;parse_proto_program=Command { Projection { Columns { Id: 1 } Columns { Id: 6 } } } ; 2025-05-29T15:21:40.244255Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:784: tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1748531740434:max};tablet=9437184;timeout=0.000000s;cpu_limits=Disabled;;fline=program.cpp:51;event=program_parsed;result={"edges":[{"owner_id":0,"inputs":[{"from":2},{"from":4}]},{"owner_id":2,"inputs":[{"from":5}]},{"owner_id":4,"inputs":[{"from":5}]},{"owner_id":5,"inputs":[{"from":6}]},{"owner_id":6,"inputs":[]}],"nodes":{"2":{"p":{"i":"1","p":{"address":{"name":"timestamp","id":1}},"o":"1","t":"AssembleOriginalData"},"w":9,"id":2},"6":{"p":{"p":{"data":[{"name":"timestamp","id":1},{"name":"message","id":6}]},"o":"0","t":"ReserveMemory"},"w":0,"id":6},"5":{"p":{"i":"0","p":{"data":[{"name":"timestamp","id":1},{"name":"message","id":6}]},"o":"1,6","t":"FetchOriginalData"},"w":4,"id":5},"4":{"p":{"i":"6","p":{"address":{"name":"message","id":6}},"o":"6","t":"AssembleOriginalData"},"w":9,"id":4},"0":{"p":{"i":"1,6","t":"Projection"},"w":18,"id":0}}}; 2025-05-29T15:21:40.244281Z node 1 :TX_COLUMNSHARD_SCAN WARN: log.cpp:784: tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1748531740434:max};tablet=9437184;timeout=0.000000s;cpu_limits=Disabled;;fline=tx_scan.cpp:14;event=TTxScan failed;problem=cannot build metadata withno ranges;details=Snapshot too old: {1748531740434:max}. CS min read snapshot: {1748531800434:max}. now: 2025-05-29T15:21:40.244275Z; ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/olap/unittest >> KqpOlap::OlapUpsert [FAIL] Test command err: Trying to start YDB, gRPC: 30895, MsgBus: 32686 2025-05-29T15:21:33.844191Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509888156283515719:2201];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:21:33.844698Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/0026e3/r3tmp/tmpGscgg4/pdisk_1.dat 2025-05-29T15:21:33.949400Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 30895, node 1 2025-05-29T15:21:33.975249Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:21:33.975259Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-05-29T15:21:33.975261Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-05-29T15:21:33.975290Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-05-29T15:21:33.989523Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:21:33.989543Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:21:33.990918Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:32686 TClient is connected to server localhost:32686 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-29T15:21:34.184476Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:21:34.187230Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-05-29T15:21:34.667229Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509888160578483507:2328], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:21:34.667252Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:21:34.783480Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-05-29T15:21:34.803475Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;self_id=[1:7509888160578483581:2332];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-05-29T15:21:34.803544Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;self_id=[1:7509888160578483581:2332];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-05-29T15:21:34.803586Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;self_id=[1:7509888160578483581:2332];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-05-29T15:21:34.803604Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;self_id=[1:7509888160578483581:2332];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-05-29T15:21:34.803628Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;self_id=[1:7509888160578483581:2332];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-05-29T15:21:34.803648Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;self_id=[1:7509888160578483581:2332];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-05-29T15:21:34.803666Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;self_id=[1:7509888160578483581:2332];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-05-29T15:21:34.803691Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;self_id=[1:7509888160578483581:2332];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-05-29T15:21:34.803714Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;self_id=[1:7509888160578483581:2332];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-05-29T15:21:34.803734Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;self_id=[1:7509888160578483581:2332];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-05-29T15:21:34.803758Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;self_id=[1:7509888160578483581:2332];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2025-05-29T15:21:34.803779Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;self_id=[1:7509888160578483581:2332];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2025-05-29T15:21:34.814897Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-05-29T15:21:34.814916Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-05-29T15:21:34.814929Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-05-29T15:21:34.814936Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-05-29T15:21:34.814956Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-05-29T15:21:34.814961Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-05-29T15:21:34.814972Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-05-29T15:21:34.814978Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-05-29T15:21:34.814989Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-05-29T15:21:34.814995Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-05-29T15:21:34.815003Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-05-29T15:21:34.815008Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-05-29T15:21:34.815029Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-05-29T15:21:34.815038Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-05-29T15:21:34.815058Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-05-29T15:21:34.815064Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-05-29T15:21:34.815078Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-05-29T15:21:34.815083Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanDeprecatedSnapshot;id=CleanDeprecatedSnapshot; 2025-05-29T15:21:34.815090Z node 1 :TX_COLUMNSHARD CRIT: log.cpp:784: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=clean_deprecated_s ... E=TablesCleaner; 2025-05-29T15:21:38.147645Z node 2 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037889;self_id=[2:7509888180766309407:2333];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-05-29T15:21:38.147670Z node 2 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037889;self_id=[2:7509888180766309407:2333];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-05-29T15:21:38.147691Z node 2 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037889;self_id=[2:7509888180766309407:2333];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-05-29T15:21:38.147713Z node 2 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037889;self_id=[2:7509888180766309407:2333];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-05-29T15:21:38.147736Z node 2 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037889;self_id=[2:7509888180766309407:2333];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-05-29T15:21:38.147767Z node 2 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037889;self_id=[2:7509888180766309407:2333];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-05-29T15:21:38.147794Z node 2 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037889;self_id=[2:7509888180766309407:2333];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2025-05-29T15:21:38.147816Z node 2 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037889;self_id=[2:7509888180766309407:2333];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2025-05-29T15:21:38.148800Z node 2 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-05-29T15:21:38.148816Z node 2 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-05-29T15:21:38.148830Z node 2 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-05-29T15:21:38.148835Z node 2 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-05-29T15:21:38.148858Z node 2 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-05-29T15:21:38.148864Z node 2 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-05-29T15:21:38.148876Z node 2 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-05-29T15:21:38.148882Z node 2 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-05-29T15:21:38.148895Z node 2 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-05-29T15:21:38.148900Z node 2 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-05-29T15:21:38.148909Z node 2 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-05-29T15:21:38.148914Z node 2 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-05-29T15:21:38.148939Z node 2 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-05-29T15:21:38.148948Z node 2 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-05-29T15:21:38.148971Z node 2 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-05-29T15:21:38.148978Z node 2 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-05-29T15:21:38.148993Z node 2 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-05-29T15:21:38.148998Z node 2 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanDeprecatedSnapshot;id=CleanDeprecatedSnapshot; 2025-05-29T15:21:38.149007Z node 2 :TX_COLUMNSHARD CRIT: log.cpp:784: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=clean_deprecated_snapshot.cpp:30;tasks_for_rewrite=0; 2025-05-29T15:21:38.149014Z node 2 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanDeprecatedSnapshot;id=17; 2025-05-29T15:21:38.149019Z node 2 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV0ChunksMeta;id=RestoreV0ChunksMeta; 2025-05-29T15:21:38.149124Z node 2 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV0ChunksMeta;id=18; 2025-05-29T15:21:38.149135Z node 2 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-05-29T15:21:38.183118Z node 2 :TX_COLUMNSHARD_TX WARN: log.cpp:784: tablet_id=72075186224037888;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715658;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715658; 2025-05-29T15:21:38.183727Z node 2 :TX_COLUMNSHARD_TX WARN: log.cpp:784: tablet_id=72075186224037889;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715658;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715658; 2025-05-29T15:21:38.192830Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7509888180766309497:2348], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:21:38.192860Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:21:38.193026Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7509888180766309502:2351], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:21:38.193960Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480 2025-05-29T15:21:38.197240Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976715659, at schemeshard: 72057594046644480 2025-05-29T15:21:38.197357Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7509888180766309504:2352], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2025-05-29T15:21:38.287776Z node 2 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [2:7509888180766309555:2404] txid# 281474976715660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-29T15:21:38.299316Z node 2 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [2:7509888180766309564:2356], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:21:38.299955Z node 2 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=2&id=M2RkYmI5NmUtNTFmNGFhZGEtNzcyZTViMGUtZGU3YWQ5Mjc=, ActorId: [2:7509888180766309313:2325], ActorState: ExecuteState, TraceId: 01jwea4x4g0nxr8c28ejwcz76z, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: assertion failed at ydb/core/kqp/ut/olap/kqp_olap_ut.cpp:2329, void NKikimr::NKqp::NTestSuiteKqpOlap::TestOlapUpsert(ui32): (result.GetStatus() == EStatus::SUCCESS) failed: (INTERNAL_ERROR != SUCCESS)
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 , with diff: (INT|SUCC)E(RNAL_ERROR|SS) 0. /-S/util/system/backtrace.cpp:284: ?? @ 0x13D433CB 1. /tmp//-S/library/cpp/testing/unittest/registar.cpp:46: RaiseError @ 0x13EFB238 2. /tmp//-S/ydb/core/kqp/ut/olap/kqp_olap_ut.cpp:2329: TestOlapUpsert @ 0x13AA5232 3. /tmp//-S/ydb/core/kqp/ut/olap/kqp_olap_ut.cpp:27: operator() @ 0x13AE2C06 4. /tmp//-S/library/cpp/testing/unittest/registar.cpp:373: Run @ 0x13EFD0ED 5. /tmp//-S/ydb/core/kqp/ut/olap/kqp_olap_ut.cpp:27: Execute @ 0x13AE25C5 6. /tmp//-S/library/cpp/testing/unittest/registar.cpp:494: Execute @ 0x13EFD862 7. /tmp//-S/library/cpp/testing/unittest/utmain.cpp:872: RunMain @ 0x13F0F40C 8. ??:0: ?? @ 0x7F72C6CA8D8F 9. ??:0: ?? @ 0x7F72C6CA8E3F 10. ??:0: ?? @ 0x12AB1028 |58.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/metadata/initializer/ut/unittest ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/olap/unittest >> KqpOlap::PredicatePushdown_Datetime_SQ [FAIL] Test command err: Trying to start YDB, gRPC: 16175, MsgBus: 20385 test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/0025f4/r3tmp/tmpMHDddF/pdisk_1.dat 2025-05-29T15:21:33.609456Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509888156882904990:2283];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:21:33.609528Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-05-29T15:21:33.668617Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:21:33.674792Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [1:7509888156882904721:2079] 1748532093543631 != 1748532093543634 TServer::EnableGrpc on GrpcPort 16175, node 1 2025-05-29T15:21:33.710794Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:21:33.710808Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-05-29T15:21:33.710810Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-05-29T15:21:33.710853Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-05-29T15:21:33.711165Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:21:33.711186Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:21:33.715081Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:20385 TClient is connected to server localhost:20385 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-29T15:21:33.874049Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:21:33.882708Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-05-29T15:21:33.891345Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnStore, opId: 281474976710658:0, at schemeshard: 72057594046644480 Status: 53 TxId: 281474976710658 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 2 2025-05-29T15:21:33.919043Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037890;self_id=[1:7509888156882905412:2316];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-05-29T15:21:33.919096Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037890;self_id=[1:7509888156882905412:2316];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-05-29T15:21:33.919133Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037890;self_id=[1:7509888156882905412:2316];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-05-29T15:21:33.919160Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037890;self_id=[1:7509888156882905412:2316];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-05-29T15:21:33.919182Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037890;self_id=[1:7509888156882905412:2316];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-05-29T15:21:33.919208Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037890;self_id=[1:7509888156882905412:2316];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-05-29T15:21:33.919237Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037890;self_id=[1:7509888156882905412:2316];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-05-29T15:21:33.919262Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037890;self_id=[1:7509888156882905412:2316];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-05-29T15:21:33.919288Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037890;self_id=[1:7509888156882905412:2316];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-05-29T15:21:33.919308Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037890;self_id=[1:7509888156882905412:2316];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-05-29T15:21:33.919333Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037890;self_id=[1:7509888156882905412:2316];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2025-05-29T15:21:33.919364Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037890;self_id=[1:7509888156882905412:2316];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2025-05-29T15:21:33.926454Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;self_id=[1:7509888156882905410:2314];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-05-29T15:21:33.926475Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;self_id=[1:7509888156882905410:2314];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-05-29T15:21:33.926537Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;self_id=[1:7509888156882905410:2314];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-05-29T15:21:33.926557Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;self_id=[1:7509888156882905410:2314];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-05-29T15:21:33.926583Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;self_id=[1:7509888156882905410:2314];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-05-29T15:21:33.926603Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;self_id=[1:7509888156882905410:2314];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-05-29T15:21:33.926629Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;self_id=[1:7509888156882905410:2314];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-05-29T15:21:33.926648Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;self_id=[1:7509888156882905410:2314];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-05-29T15:21:33.926702Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;self_id=[1:7509888156882905410:2314];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-05-29T15:21:33.926727Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;self_id=[1:7509888156882905410:2314];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-05-29T15:21:33.926762Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;self_id=[1:7509888156882905410:2314];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2025-05-29T15:21:33.926781Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;self_id=[1:7509888156882905410:2314];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2025-05-29T15:21:33.931641Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037889;self_id=[1:7509888156882905411:2315];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-05-29T15:21:33.931666Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037889;self_id=[1:7509888156882905411:2315];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-05-29T15:21:33.931711Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037889;self_id=[1:7509888156882905411:2315];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-05-29T15:21:33.931732Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037889;self_id=[1:7509888156882905411:2315];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-05-29T15:21:33.931753Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037889;self_id=[1:7509888156882905411:2315];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fl ... et_id=72075186224037951;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715658;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715658; 2025-05-29T15:21:35.750251Z node 2 :TX_COLUMNSHARD_TX WARN: log.cpp:784: tablet_id=72075186224037925;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715658;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715658; 2025-05-29T15:21:35.751307Z node 2 :TX_COLUMNSHARD_TX WARN: log.cpp:784: tablet_id=72075186224037928;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715658;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715658; 2025-05-29T15:21:35.751974Z node 2 :TX_COLUMNSHARD_TX WARN: log.cpp:784: tablet_id=72075186224037893;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715658;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715658; 2025-05-29T15:21:35.752613Z node 2 :TX_COLUMNSHARD_TX WARN: log.cpp:784: tablet_id=72075186224037901;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715658;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715658; 2025-05-29T15:21:35.753303Z node 2 :TX_COLUMNSHARD_TX WARN: log.cpp:784: tablet_id=72075186224037902;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715658;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715658; 2025-05-29T15:21:35.753621Z node 2 :TX_COLUMNSHARD_TX WARN: log.cpp:784: tablet_id=72075186224037905;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715658;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715658; 2025-05-29T15:21:35.754974Z node 2 :TX_COLUMNSHARD_TX WARN: log.cpp:784: tablet_id=72075186224037911;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715658;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715658; 2025-05-29T15:21:35.755093Z node 2 :TX_COLUMNSHARD_TX WARN: log.cpp:784: tablet_id=72075186224037912;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715658;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715658; 2025-05-29T15:21:35.755973Z node 2 :TX_COLUMNSHARD_TX WARN: log.cpp:784: tablet_id=72075186224037937;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715658;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715658; 2025-05-29T15:21:35.756234Z node 2 :TX_COLUMNSHARD_TX WARN: log.cpp:784: tablet_id=72075186224037895;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715658;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715658; 2025-05-29T15:21:35.756926Z node 2 :TX_COLUMNSHARD_TX WARN: log.cpp:784: tablet_id=72075186224037923;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715658;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715658; 2025-05-29T15:21:35.757279Z node 2 :TX_COLUMNSHARD_TX WARN: log.cpp:784: tablet_id=72075186224037927;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715658;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715658; 2025-05-29T15:21:35.757839Z node 2 :TX_COLUMNSHARD_TX WARN: log.cpp:784: tablet_id=72075186224037891;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715658;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715658; 2025-05-29T15:21:35.758343Z node 2 :TX_COLUMNSHARD_TX WARN: log.cpp:784: tablet_id=72075186224037926;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715658;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715658; 2025-05-29T15:21:35.758817Z node 2 :TX_COLUMNSHARD_TX WARN: log.cpp:784: tablet_id=72075186224037935;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715658;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715658; 2025-05-29T15:21:35.759439Z node 2 :TX_COLUMNSHARD_TX WARN: log.cpp:784: tablet_id=72075186224037897;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715658;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715658; 2025-05-29T15:21:35.759828Z node 2 :TX_COLUMNSHARD_TX WARN: log.cpp:784: tablet_id=72075186224037918;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715658;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715658; 2025-05-29T15:21:35.760417Z node 2 :TX_COLUMNSHARD_TX WARN: log.cpp:784: tablet_id=72075186224037888;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715658;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715658; 2025-05-29T15:21:35.760826Z node 2 :TX_COLUMNSHARD_TX WARN: log.cpp:784: tablet_id=72075186224037936;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715658;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715658; 2025-05-29T15:21:35.761482Z node 2 :TX_COLUMNSHARD_TX WARN: log.cpp:784: tablet_id=72075186224037947;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715658;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715658; 2025-05-29T15:21:35.762574Z node 2 :TX_COLUMNSHARD_TX WARN: log.cpp:784: tablet_id=72075186224037921;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715658;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715658; 2025-05-29T15:21:35.763050Z node 2 :TX_COLUMNSHARD_TX WARN: log.cpp:784: tablet_id=72075186224037933;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715658;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715658; 2025-05-29T15:21:35.763663Z node 2 :TX_COLUMNSHARD_TX WARN: log.cpp:784: tablet_id=72075186224037939;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715658;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715658; 2025-05-29T15:21:35.764146Z node 2 :TX_COLUMNSHARD_TX WARN: log.cpp:784: tablet_id=72075186224037889;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715658;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715658; 2025-05-29T15:21:35.764699Z node 2 :TX_COLUMNSHARD_TX WARN: log.cpp:784: tablet_id=72075186224037941;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715658;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715658; 2025-05-29T15:21:35.765101Z node 2 :TX_COLUMNSHARD_TX WARN: log.cpp:784: tablet_id=72075186224037907;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715658;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715658; 2025-05-29T15:21:35.765795Z node 2 :TX_COLUMNSHARD_TX WARN: log.cpp:784: tablet_id=72075186224037945;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715658;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715658; 2025-05-29T15:21:35.766149Z node 2 :TX_COLUMNSHARD_TX WARN: log.cpp:784: tablet_id=72075186224037940;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715658;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715658; 2025-05-29T15:21:35.766848Z node 2 :TX_COLUMNSHARD_TX WARN: log.cpp:784: tablet_id=72075186224037948;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715658;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715658; 2025-05-29T15:21:35.767130Z node 2 :TX_COLUMNSHARD_TX WARN: log.cpp:784: tablet_id=72075186224037920;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715658;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715658; 2025-05-29T15:21:35.768144Z node 2 :TX_COLUMNSHARD_TX WARN: log.cpp:784: tablet_id=72075186224037924;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715658;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715658; 2025-05-29T15:21:35.768233Z node 2 :TX_COLUMNSHARD_TX WARN: log.cpp:784: tablet_id=72075186224037919;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715658;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715658; 2025-05-29T15:21:35.769139Z node 2 :TX_COLUMNSHARD_TX WARN: log.cpp:784: tablet_id=72075186224037950;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715658;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715658; 2025-05-29T15:21:35.769380Z node 2 :TX_COLUMNSHARD_TX WARN: log.cpp:784: tablet_id=72075186224037946;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715658;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715658; 2025-05-29T15:21:35.770147Z node 2 :TX_COLUMNSHARD_TX WARN: log.cpp:784: tablet_id=72075186224037917;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715658;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715658; 2025-05-29T15:21:35.776410Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7509888165359853582:2721], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:21:35.776452Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:21:35.776555Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7509888165359853587:2724], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:21:35.777506Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480 2025-05-29T15:21:35.780286Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7509888165359853589:2725], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2025-05-29T15:21:35.862956Z node 2 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [2:7509888165359853640:3595] txid# 281474976715660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-29T15:21:35.888297Z node 2 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [2:7509888165359853656:2729], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:21:35.889066Z node 2 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=2&id=NWZiMWQ1ZjgtNjhmYzlkZWYtNTAxN2U5ZDktMjdhOGFmYTQ=, ActorId: [2:7509888165359853580:2720], ActorState: ExecuteState, TraceId: 01jwea4trz011wpg45mfwasxk8, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: assertion failed at ydb/core/kqp/ut/olap/kqp_olap_ut.cpp:1663, virtual void NKikimr::NKqp::NTestSuiteKqpOlap::TTestCasePredicatePushdown_Datetime_SQ::Execute_(NUnitTest::TTestContext &): (insertRes.IsSuccess()) 0. /-S/util/system/backtrace.cpp:284: ?? @ 0x13D433CB 1. /tmp//-S/library/cpp/testing/unittest/registar.cpp:46: RaiseError @ 0x13EFB238 2. /tmp//-S/ydb/core/kqp/ut/olap/kqp_olap_ut.cpp:1663: Execute_ @ 0x13A8D26C 3. /tmp//-S/ydb/core/kqp/ut/olap/kqp_olap_ut.cpp:27: operator() @ 0x13AE2C06 4. /tmp//-S/library/cpp/testing/unittest/registar.cpp:373: Run @ 0x13EFD0ED 5. /tmp//-S/ydb/core/kqp/ut/olap/kqp_olap_ut.cpp:27: Execute @ 0x13AE25C5 6. /tmp//-S/library/cpp/testing/unittest/registar.cpp:494: Execute @ 0x13EFD862 7. /tmp//-S/library/cpp/testing/unittest/utmain.cpp:872: RunMain @ 0x13F0F40C 8. ??:0: ?? @ 0x7F1AD46FAD8F 9. ??:0: ?? @ 0x7F1AD46FAE3F 10. ??:0: ?? @ 0x12AB1028 |58.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/http_proxy/ut/unittest >> JsonProtoConversion::ProtoMapToJson_ReceiveMessageResult [GOOD] |58.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/metadata/initializer/ut/unittest |58.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/metadata/initializer/ut/unittest |58.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/metadata/initializer/ut/unittest ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/olap/unittest >> KqpOlap::SimpleRequestHasProjections [GOOD] Test command err: Trying to start YDB, gRPC: 5113, MsgBus: 29695 2025-05-29T15:21:33.945304Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509888156167675482:2204];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:21:33.945397Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/0026bf/r3tmp/tmpstOngh/pdisk_1.dat 2025-05-29T15:21:34.101259Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:21:34.101283Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:21:34.110853Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:21:34.111919Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 5113, node 1 2025-05-29T15:21:34.142565Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:21:34.142577Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-05-29T15:21:34.142579Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-05-29T15:21:34.142614Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:29695 TClient is connected to server localhost:29695 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-29T15:21:34.319896Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:21:34.323205Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-05-29T15:21:34.528709Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509888160462643241:2327], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:21:34.528710Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509888160462643276:2330], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:21:34.528725Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:21:34.529453Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-05-29T15:21:34.531342Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7509888160462643278:2331], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-05-29T15:21:34.603417Z node 1 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [1:7509888160462643329:2325] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-29T15:21:34.701151Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-05-29T15:21:34.717597Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037889;self_id=[1:7509888160462643432:2338];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-05-29T15:21:34.721271Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037890;self_id=[1:7509888160462643433:2339];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-05-29T15:21:34.721323Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037890;self_id=[1:7509888160462643433:2339];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-05-29T15:21:34.721369Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037890;self_id=[1:7509888160462643433:2339];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-05-29T15:21:34.721388Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037890;self_id=[1:7509888160462643433:2339];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-05-29T15:21:34.721408Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037890;self_id=[1:7509888160462643433:2339];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-05-29T15:21:34.721427Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037890;self_id=[1:7509888160462643433:2339];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-05-29T15:21:34.721446Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037890;self_id=[1:7509888160462643433:2339];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-05-29T15:21:34.721463Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037890;self_id=[1:7509888160462643433:2339];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-05-29T15:21:34.721481Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037890;self_id=[1:7509888160462643433:2339];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-05-29T15:21:34.721497Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037890;self_id=[1:7509888160462643433:2339];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-05-29T15:21:34.721512Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037890;self_id=[1:7509888160462643433:2339];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2025-05-29T15:21:34.721528Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037890;self_id=[1:7509888160462643433:2339];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2025-05-29T15:21:34.727447Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-05-29T15:21:34.727464Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-05-29T15:21:34.727478Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-05-29T15:21:34.727483Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-05-29T15:21:34.727504Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-05-29T15:21:34.727509Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-05-29T15:21:34.727521Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-05-29T15:21:34.727527Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-05-29T15:21:34.727538Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-05-29T15:21:34.727543Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-05-29T15:21:34.727551Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-05-29T15:21:34.727556Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Sy ... SS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-05-29T15:21:37.906075Z node 2 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-05-29T15:21:37.906082Z node 2 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-05-29T15:21:37.906093Z node 2 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-05-29T15:21:37.906097Z node 2 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanDeprecatedSnapshot;id=CleanDeprecatedSnapshot; 2025-05-29T15:21:37.906103Z node 2 :TX_COLUMNSHARD CRIT: log.cpp:784: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=clean_deprecated_snapshot.cpp:30;tasks_for_rewrite=0; 2025-05-29T15:21:37.906108Z node 2 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanDeprecatedSnapshot;id=17; 2025-05-29T15:21:37.906112Z node 2 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV0ChunksMeta;id=RestoreV0ChunksMeta; 2025-05-29T15:21:37.906157Z node 2 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV0ChunksMeta;id=18; 2025-05-29T15:21:37.906160Z node 2 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-05-29T15:21:37.906209Z node 2 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-05-29T15:21:37.906213Z node 2 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-05-29T15:21:37.906222Z node 2 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-05-29T15:21:37.906227Z node 2 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-05-29T15:21:37.906242Z node 2 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-05-29T15:21:37.906246Z node 2 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-05-29T15:21:37.906256Z node 2 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-05-29T15:21:37.906260Z node 2 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-05-29T15:21:37.906267Z node 2 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-05-29T15:21:37.906271Z node 2 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-05-29T15:21:37.906277Z node 2 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-05-29T15:21:37.906281Z node 2 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-05-29T15:21:37.906300Z node 2 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-05-29T15:21:37.906305Z node 2 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-05-29T15:21:37.906322Z node 2 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-05-29T15:21:37.906326Z node 2 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-05-29T15:21:37.906336Z node 2 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-05-29T15:21:37.906340Z node 2 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanDeprecatedSnapshot;id=CleanDeprecatedSnapshot; 2025-05-29T15:21:37.906346Z node 2 :TX_COLUMNSHARD CRIT: log.cpp:784: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=clean_deprecated_snapshot.cpp:30;tasks_for_rewrite=0; 2025-05-29T15:21:37.906351Z node 2 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanDeprecatedSnapshot;id=17; 2025-05-29T15:21:37.906355Z node 2 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV0ChunksMeta;id=RestoreV0ChunksMeta; 2025-05-29T15:21:37.906395Z node 2 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV0ChunksMeta;id=18; 2025-05-29T15:21:37.906399Z node 2 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-05-29T15:21:37.926347Z node 2 :TX_COLUMNSHARD_TX WARN: log.cpp:784: tablet_id=72075186224037888;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710658;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710658; 2025-05-29T15:21:37.926385Z node 2 :TX_COLUMNSHARD_TX WARN: log.cpp:784: tablet_id=72075186224037890;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710658;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710658; 2025-05-29T15:21:37.926416Z node 2 :TX_COLUMNSHARD_TX WARN: log.cpp:784: tablet_id=72075186224037889;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710658;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710658; 2025-05-29T15:21:37.926443Z node 2 :TX_COLUMNSHARD_TX WARN: log.cpp:784: tablet_id=72075186224037891;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710658;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710658; 2025-05-29T15:21:37.928492Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 Status: 53 TxId: 281474976710659 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 3 2025-05-29T15:21:37.935921Z node 2 :TX_COLUMNSHARD_TX WARN: log.cpp:784: tablet_id=72075186224037888;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710659;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710659; 2025-05-29T15:21:37.936550Z node 2 :TX_COLUMNSHARD_TX WARN: log.cpp:784: tablet_id=72075186224037890;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710659;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710659; 2025-05-29T15:21:37.937038Z node 2 :TX_COLUMNSHARD_TX WARN: log.cpp:784: tablet_id=72075186224037889;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710659;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710659; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=22056;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=22056;columns=5; 2025-05-29T15:21:38.141489Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7509888178926073879:2360], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:21:38.141528Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:21:38.141676Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7509888178926073902:2364], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:21:38.142563Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710660:3, at schemeshard: 72057594046644480 2025-05-29T15:21:38.145219Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7509888178926073904:2365], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710660 completed, doublechecking } 2025-05-29T15:21:38.243583Z node 2 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [2:7509888178926073955:2482] txid# 281474976710661, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 7], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-29T15:21:38.315954Z node 2 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:238: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1748532098194, txId: 18446744073709551615] shutting down 2025-05-29T15:21:38.523544Z node 2 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:238: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1748532098320, txId: 18446744073709551615] shutting down >> Normalizers::CleanEmptyPortionsNormalizer [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/olap/unittest >> KqpOlap::PredicatePushdownWithParametersILike [GOOD] Test command err: Trying to start YDB, gRPC: 5920, MsgBus: 29325 test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/00263d/r3tmp/tmpt2H3t8/pdisk_1.dat 2025-05-29T15:21:33.442878Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-05-29T15:21:33.469058Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [1:7509888158237465103:2079] 1748532093303572 != 1748532093303575 TServer::EnableGrpc on GrpcPort 5920, node 1 2025-05-29T15:21:33.488134Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:21:33.531110Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:21:33.531144Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:21:33.535122Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-29T15:21:33.538908Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:21:33.538920Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-05-29T15:21:33.538922Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-05-29T15:21:33.538958Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:29325 TClient is connected to server localhost:29325 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-29T15:21:33.761072Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:21:33.768290Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-05-29T15:21:34.422991Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509888162532433065:2331], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:21:34.423022Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:21:34.529770Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-05-29T15:21:34.725847Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037943;self_id=[1:7509888162532433580:2361];tablet_id=72075186224037943;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-05-29T15:21:34.725936Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037943;self_id=[1:7509888162532433580:2361];tablet_id=72075186224037943;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-05-29T15:21:34.725995Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037943;self_id=[1:7509888162532433580:2361];tablet_id=72075186224037943;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-05-29T15:21:34.726019Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037943;self_id=[1:7509888162532433580:2361];tablet_id=72075186224037943;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-05-29T15:21:34.726045Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037943;self_id=[1:7509888162532433580:2361];tablet_id=72075186224037943;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-05-29T15:21:34.726070Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037943;self_id=[1:7509888162532433580:2361];tablet_id=72075186224037943;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-05-29T15:21:34.726095Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037943;self_id=[1:7509888162532433580:2361];tablet_id=72075186224037943;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-05-29T15:21:34.726123Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037943;self_id=[1:7509888162532433580:2361];tablet_id=72075186224037943;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-05-29T15:21:34.726147Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037943;self_id=[1:7509888162532433580:2361];tablet_id=72075186224037943;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-05-29T15:21:34.726176Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037943;self_id=[1:7509888162532433580:2361];tablet_id=72075186224037943;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-05-29T15:21:34.726199Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037943;self_id=[1:7509888162532433580:2361];tablet_id=72075186224037943;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2025-05-29T15:21:34.726223Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037943;self_id=[1:7509888162532433580:2361];tablet_id=72075186224037943;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2025-05-29T15:21:34.726622Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037947;self_id=[1:7509888162532433558:2340];tablet_id=72075186224037947;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-05-29T15:21:34.726639Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037947;self_id=[1:7509888162532433558:2340];tablet_id=72075186224037947;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-05-29T15:21:34.726674Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037947;self_id=[1:7509888162532433558:2340];tablet_id=72075186224037947;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-05-29T15:21:34.726687Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037947;self_id=[1:7509888162532433558:2340];tablet_id=72075186224037947;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-05-29T15:21:34.726700Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037947;self_id=[1:7509888162532433558:2340];tablet_id=72075186224037947;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-05-29T15:21:34.726713Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037947;self_id=[1:7509888162532433558:2340];tablet_id=72075186224037947;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-05-29T15:21:34.726729Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037947;self_id=[1:7509888162532433558:2340];tablet_id=72075186224037947;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-05-29T15:21:34.726850Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037947;self_id=[1:7509888162532433558:2340];tablet_id=72075186224037947;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-05-29T15:21:34.726869Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037947;self_id=[1:7509888162532433558:2340];tablet_id=72075186224037947;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-05-29T15:21:34.726891Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037947;self_id=[1:7509888162532433558:2340];tablet_id=72075186224037947;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-05-29T15:21:34.726904Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037947;self_id=[1:7509888162532433558:2340];tablet_id=72075186224037947;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2025-05-29T15:21:34.726915Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037947;self_id=[1:7509888162532433558:2340];tablet_id=72075186224037947;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2025-05-29T15:21:34.732441Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037909;self_id=[1:7509888162532433577:2358];tablet_id=72075186224037909;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-05-29T15:21:34.732466Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037909;self_id=[1:7509888162532433577:2358];tablet_id=72075186224037909;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-05-29T15:21:34.732510Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037909;self_id=[1:7509888162532433577:2358];tablet_id=72075186224037909;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-05-29T15:21:34.732524Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037909;self_id=[1:7509888162532433577:2358];tablet_id=72075186224037909;process=TTxInitSc ... 0109u];[3000113u];[3000114u];[3000118u];[3000119u];[3000123u];[3000124u];[3000128u];[3000129u];[3000133u];[3000134u];[3000138u];[3000139u];[3000143u];[3000144u];[3000148u];[3000149u];[3000153u];[3000154u];[3000158u];[3000159u];[3000163u];[3000164u];[3000168u];[3000169u];[3000173u];[3000174u];[3000178u];[3000179u];[3000183u];[3000184u];[3000188u];[3000189u];[3000193u];[3000194u];[3000198u];[3000199u];[3000203u];[3000204u];[3000208u];[3000209u];[3000213u];[3000214u];[3000218u];[3000219u];[3000223u];[3000224u];[3000228u];[3000229u];[3000233u];[3000234u];[3000238u];[3000239u];[3000243u];[3000244u];[3000248u];[3000249u];[3000253u];[3000254u];[3000258u];[3000259u];[3000263u];[3000264u];[3000268u];[3000269u];[3000273u];[3000274u];[3000278u];[3000279u];[3000283u];[3000284u];[3000288u];[3000289u];[3000293u];[3000294u];[3000298u];[3000299u];[3000303u];[3000304u];[3000308u];[3000309u];[3000313u];[3000314u];[3000318u];[3000319u];[3000323u];[3000324u];[3000328u];[3000329u];[3000333u];[3000334u];[3000338u];[3000339u];[3000343u];[3000344u];[3000348u];[3000349u];[3000353u];[3000354u];[3000358u];[3000359u];[3000363u];[3000364u];[3000368u];[3000369u];[3000373u];[3000374u];[3000378u];[3000379u];[3000383u];[3000384u];[3000388u];[3000389u];[3000393u];[3000394u];[3000398u];[3000399u];[3000403u];[3000404u];[3000408u];[3000409u];[3000413u];[3000414u];[3000418u];[3000419u];[3000423u];[3000424u];[3000428u];[3000429u];[3000433u];[3000434u];[3000438u];[3000439u];[3000443u];[3000444u];[3000448u];[3000449u];[3000453u];[3000454u];[3000458u];[3000459u];[3000463u];[3000464u];[3000468u];[3000469u];[3000473u];[3000474u];[3000478u];[3000479u];[3000483u];[3000484u];[3000488u];[3000489u];[3000493u];[3000494u];[3000498u];[3000499u];[3000503u];[3000504u];[3000508u];[3000509u];[3000513u];[3000514u];[3000518u];[3000519u];[3000523u];[3000524u];[3000528u];[3000529u];[3000533u];[3000534u];[3000538u];[3000539u];[3000543u];[3000544u];[3000548u];[3000549u];[3000553u];[3000554u];[3000558u];[3000559u];[3000563u];[3000564u];[3000568u];[3000569u];[3000573u];[3000574u];[3000578u];[3000579u];[3000583u];[3000584u];[3000588u];[3000589u];[3000593u];[3000594u];[3000598u];[3000599u];[3000603u];[3000604u];[3000608u];[3000609u];[3000613u];[3000614u];[3000618u];[3000619u];[3000623u];[3000624u];[3000628u];[3000629u];[3000633u];[3000634u];[3000638u];[3000639u];[3000643u];[3000644u];[3000648u];[3000649u];[3000653u];[3000654u];[3000658u];[3000659u];[3000663u];[3000664u];[3000668u];[3000669u];[3000673u];[3000674u];[3000678u];[3000679u];[3000683u];[3000684u];[3000688u];[3000689u];[3000693u];[3000694u];[3000698u];[3000699u];[3000703u];[3000704u];[3000708u];[3000709u];[3000713u];[3000714u];[3000718u];[3000719u];[3000723u];[3000724u];[3000728u];[3000729u];[3000733u];[3000734u];[3000738u];[3000739u];[3000743u];[3000744u];[3000748u];[3000749u];[3000753u];[3000754u];[3000758u];[3000759u];[3000763u];[3000764u];[3000768u];[3000769u];[3000773u];[3000774u];[3000778u];[3000779u];[3000783u];[3000784u];[3000788u];[3000789u];[3000793u];[3000794u];[3000798u];[3000799u];[3000803u];[3000804u];[3000808u];[3000809u];[3000813u];[3000814u];[3000818u];[3000819u];[3000823u];[3000824u];[3000828u];[3000829u];[3000833u];[3000834u];[3000838u];[3000839u];[3000843u];[3000844u];[3000848u];[3000849u];[3000853u];[3000854u];[3000858u];[3000859u];[3000863u];[3000864u];[3000868u];[3000869u];[3000873u];[3000874u];[3000878u];[3000879u];[3000883u];[3000884u];[3000888u];[3000889u];[3000893u];[3000894u];[3000898u];[3000899u];[3000903u];[3000904u];[3000908u];[3000909u];[3000913u];[3000914u];[3000918u];[3000919u];[3000923u];[3000924u];[3000928u];[3000929u];[3000933u];[3000934u];[3000938u];[3000939u];[3000943u];[3000944u];[3000948u];[3000949u];[3000953u];[3000954u];[3000958u];[3000959u];[3000963u];[3000964u];[3000968u];[3000969u];[3000973u];[3000974u];[3000978u];[3000979u];[3000983u];[3000984u];[3000988u];[3000989u];[3000993u];[3000994u];[3000998u];[3000999u]] Received: [[3000003u];[3000004u];[3000008u];[3000009u];[3000013u];[3000014u];[3000018u];[3000019u];[3000023u];[3000024u];[3000028u];[3000029u];[3000033u];[3000034u];[3000038u];[3000039u];[3000043u];[3000044u];[3000048u];[3000049u];[3000053u];[3000054u];[3000058u];[3000059u];[3000063u];[3000064u];[3000068u];[3000069u];[3000073u];[3000074u];[3000078u];[3000079u];[3000083u];[3000084u];[3000088u];[3000089u];[3000093u];[3000094u];[3000098u];[3000099u];[3000103u];[3000104u];[3000108u];[3000109u];[3000113u];[3000114u];[3000118u];[3000119u];[3000123u];[3000124u];[3000128u];[3000129u];[3000133u];[3000134u];[3000138u];[3000139u];[3000143u];[3000144u];[3000148u];[3000149u];[3000153u];[3000154u];[3000158u];[3000159u];[3000163u];[3000164u];[3000168u];[3000169u];[3000173u];[3000174u];[3000178u];[3000179u];[3000183u];[3000184u];[3000188u];[3000189u];[3000193u];[3000194u];[3000198u];[3000199u];[3000203u];[3000204u];[3000208u];[3000209u];[3000213u];[3000214u];[3000218u];[3000219u];[3000223u];[3000224u];[3000228u];[3000229u];[3000233u];[3000234u];[3000238u];[3000239u];[3000243u];[3000244u];[3000248u];[3000249u];[3000253u];[3000254u];[3000258u];[3000259u];[3000263u];[3000264u];[3000268u];[3000269u];[3000273u];[3000274u];[3000278u];[3000279u];[3000283u];[3000284u];[3000288u];[3000289u];[3000293u];[3000294u];[3000298u];[3000299u];[3000303u];[3000304u];[3000308u];[3000309u];[3000313u];[3000314u];[3000318u];[3000319u];[3000323u];[3000324u];[3000328u];[3000329u];[3000333u];[3000334u];[3000338u];[3000339u];[3000343u];[3000344u];[3000348u];[3000349u];[3000353u];[3000354u];[3000358u];[3000359u];[3000363u];[3000364u];[3000368u];[3000369u];[3000373u];[3000374u];[3000378u];[3000379u];[3000383u];[3000384u];[3000388u];[3000389u];[3000393u];[3000394u];[3000398u];[3000399u];[3000403u];[3000404u];[3000408u];[3000409u];[3000413u];[3000414u];[3000418u];[3000419u];[3000423u];[3000424u];[3000428u];[3000429u];[3000433u];[3000434u];[3000438u];[3000439u];[3000443u];[3000444u];[3000448u];[3000449u];[3000453u];[3000454u];[3000458u];[3000459u];[3000463u];[3000464u];[3000468u];[3000469u];[3000473u];[3000474u];[3000478u];[3000479u];[3000483u];[3000484u];[3000488u];[3000489u];[3000493u];[3000494u];[3000498u];[3000499u];[3000503u];[3000504u];[3000508u];[3000509u];[3000513u];[3000514u];[3000518u];[3000519u];[3000523u];[3000524u];[3000528u];[3000529u];[3000533u];[3000534u];[3000538u];[3000539u];[3000543u];[3000544u];[3000548u];[3000549u];[3000553u];[3000554u];[3000558u];[3000559u];[3000563u];[3000564u];[3000568u];[3000569u];[3000573u];[3000574u];[3000578u];[3000579u];[3000583u];[3000584u];[3000588u];[3000589u];[3000593u];[3000594u];[3000598u];[3000599u];[3000603u];[3000604u];[3000608u];[3000609u];[3000613u];[3000614u];[3000618u];[3000619u];[3000623u];[3000624u];[3000628u];[3000629u];[3000633u];[3000634u];[3000638u];[3000639u];[3000643u];[3000644u];[3000648u];[3000649u];[3000653u];[3000654u];[3000658u];[3000659u];[3000663u];[3000664u];[3000668u];[3000669u];[3000673u];[3000674u];[3000678u];[3000679u];[3000683u];[3000684u];[3000688u];[3000689u];[3000693u];[3000694u];[3000698u];[3000699u];[3000703u];[3000704u];[3000708u];[3000709u];[3000713u];[3000714u];[3000718u];[3000719u];[3000723u];[3000724u];[3000728u];[3000729u];[3000733u];[3000734u];[3000738u];[3000739u];[3000743u];[3000744u];[3000748u];[3000749u];[3000753u];[3000754u];[3000758u];[3000759u];[3000763u];[3000764u];[3000768u];[3000769u];[3000773u];[3000774u];[3000778u];[3000779u];[3000783u];[3000784u];[3000788u];[3000789u];[3000793u];[3000794u];[3000798u];[3000799u];[3000803u];[3000804u];[3000808u];[3000809u];[3000813u];[3000814u];[3000818u];[3000819u];[3000823u];[3000824u];[3000828u];[3000829u];[3000833u];[3000834u];[3000838u];[3000839u];[3000843u];[3000844u];[3000848u];[3000849u];[3000853u];[3000854u];[3000858u];[3000859u];[3000863u];[3000864u];[3000868u];[3000869u];[3000873u];[3000874u];[3000878u];[3000879u];[3000883u];[3000884u];[3000888u];[3000889u];[3000893u];[3000894u];[3000898u];[3000899u];[3000903u];[3000904u];[3000908u];[3000909u];[3000913u];[3000914u];[3000918u];[3000919u];[3000923u];[3000924u];[3000928u];[3000929u];[3000933u];[3000934u];[3000938u];[3000939u];[3000943u];[3000944u];[3000948u];[3000949u];[3000953u];[3000954u];[3000958u];[3000959u];[3000963u];[3000964u];[3000968u];[3000969u];[3000973u];[3000974u];[3000978u];[3000979u];[3000983u];[3000984u];[3000988u];[3000989u];[3000993u];[3000994u];[3000998u];[3000999u]] {"Plan":{"Plans":[{"PlanNodeId":4,"Plans":[{"PlanNodeId":3,"Plans":[{"PlanNodeId":2,"Plans":[{"Tables":["olapStore\/olapTable"],"PlanNodeId":1,"Operators":[{"Inputs":[{"InternalOperatorId":1}],"SortBy":"row.timestamp","Name":"Sort"},{"Inputs":[{"InternalOperatorId":2}],"E-Rows":"No estimate","Predicate":"level \u003E $in_level AND Apply","Pushdown":"True","Name":"Filter","E-Size":"No estimate","E-Cost":"No estimate"},{"Scan":"Parallel","E-Size":"No estimate","ReadRanges":["timestamp (-∞, +∞)","uid (-∞, +∞)"],"Name":"TableFullScan","Inputs":[],"Path":"\/Root\/olapStore\/olapTable","E-Rows":"No estimate","Table":"olapStore\/olapTable","ReadColumns":["level","timestamp","uid"],"SsaProgram":{"Command":[{"Assign":{"Column":{"Id":6},"Parameter":{"Name":"$in_level"}}},{"Assign":{"Function":{"YqlOperationId":15,"KernelIdx":0,"FunctionType":2,"Arguments":[{"Id":4},{"Id":6}]},"Column":{"Id":7}}},{"Assign":{"Constant":{"Uint8":0},"Column":{"Id":8}}},{"Assign":{"Function":{"YqlOperationId":17,"KernelIdx":1,"FunctionType":2,"Arguments":[{"Id":7},{"Id":8}]},"Column":{"Id":9}}},{"Assign":{"Column":{"Id":10},"Parameter":{"Name":"$in_uid"}}},{"Assign":{"Function":{"KernelName":"","KernelIdx":2,"FunctionType":2,"Arguments":[{"Id":3},{"Id":10}]},"Column":{"Id":11}}},{"Assign":{"Function":{"YqlOperationId":0,"KernelIdx":3,"FunctionType":2,"Arguments":[{"Id":9},{"Id":11}]},"Column":{"Id":12}}},{"Filter":{"Predicate":{"Id":12}}},{"Projection":{"Columns":[{"Id":1}]}}]},"E-Cost":"No estimate"}],"Node Type":"Sort-Filter-TableFullScan"}],"Node Type":"Merge","SortColumns":["timestamp (Asc)"],"PlanNodeType":"Connection"}],"Node Type":"Stage"}],"Node Type":"ResultSet","PlanNodeType":"ResultSet"}],"Node Type":"Query","Stats":{"ResourcePoolId":"default"},"PlanNodeType":"Query"},"meta":{"version":"0.2","type":"query"},"tables":[{"name":"\/Root\/olapStore\/olapTable","reads":[{"columns":["level","timestamp","uid"],"scan_by":["timestamp (-∞, +∞)","uid (-∞, +∞)"],"type":"FullScan"}]}],"SimplifiedPlan":{"PlanNodeId":0,"Plans":[{"PlanNodeId":1,"Plans":[{"PlanNodeId":4,"Plans":[{"PlanNodeId":5,"Plans":[{"PlanNodeId":6,"Operators":[{"Scan":"Parallel","E-Size":"No estimate","ReadRanges":["timestamp (-∞, +∞)","uid (-∞, +∞)"],"Name":"TableFullScan","Path":"\/Root\/olapStore\/olapTable","E-Rows":"No estimate","Table":"olapStore\/olapTable","ReadColumns":["level","timestamp","uid"],"SsaProgram":{"Command":[{"Assign":{"Column":{"Id":6},"Parameter":{"Name":"$in_level"}}},{"Assign":{"Function":{"YqlOperationId":15,"KernelIdx":0,"FunctionType":2,"Arguments":[{"Id":4},{"Id":6}]},"Column":{"Id":7}}},{"Assign":{"Constant":{"Uint8":0},"Column":{"Id":8}}},{"Assign":{"Function":{"YqlOperationId":17,"KernelIdx":1,"FunctionType":2,"Arguments":[{"Id":7},{"Id":8}]},"Column":{"Id":9}}},{"Assign":{"Column":{"Id":10},"Parameter":{"Name":"$in_uid"}}},{"Assign":{"Function":{"KernelName":"","KernelIdx":2,"FunctionType":2,"Arguments":[{"Id":3},{"Id":10}]},"Column":{"Id":11}}},{"Assign":{"Function":{"YqlOperationId":0,"KernelIdx":3,"FunctionType":2,"Arguments":[{"Id":9},{"Id":11}]},"Column":{"Id":12}}},{"Filter":{"Predicate":{"Id":12}}},{"Projection":{"Columns":[{"Id":1}]}}]},"E-Cost":"No estimate"}],"Node Type":"TableFullScan"}],"Operators":[{"E-Rows":"No estimate","Predicate":"level \u003E $in_level AND Apply","Pushdown":"True","Name":"Filter","E-Size":"No estimate","E-Cost":"No estimate"}],"Node Type":"Filter"}],"Operators":[{"SortBy":"row.timestamp","Name":"Sort"}],"Node Type":"Sort"}],"Node Type":"ResultSet","PlanNodeType":"ResultSet"}],"Node Type":"Query","OptimizerStats":{"EquiJoinsCount":0,"JoinsCount":0},"PlanNodeType":"Query"}} |58.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/metadata/initializer/ut/unittest |58.0%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/schemeshard/ut_extsubdomain/ydb-core-tx-schemeshard-ut_extsubdomain |58.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_extsubdomain/ydb-core-tx-schemeshard-ut_extsubdomain |58.0%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_extsubdomain/ydb-core-tx-schemeshard-ut_extsubdomain |58.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/metadata/initializer/ut/unittest |58.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/metadata/initializer/ut/unittest >> TColumnShardTestReadWrite::RebootWriteRead [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/columnshard/ut_rw/unittest >> Normalizers::CleanEmptyPortionsNormalizer [GOOD] Test command err: 2025-05-29T15:21:37.877242Z node 1 :TX_COLUMNSHARD INFO: log.cpp:784: tablet_id=9437184;self_id=[1:138:2169];fline=columnshard.cpp:102;event=initialize_shard;step=OnActivateExecutor; 2025-05-29T15:21:37.880642Z node 1 :TX_COLUMNSHARD INFO: log.cpp:784: tablet_id=9437184;self_id=[1:138:2169];fline=columnshard.cpp:120;event=initialize_shard;step=initialize_tiring_finished; 2025-05-29T15:21:37.880712Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Execute at tablet 9437184 2025-05-29T15:21:37.881239Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=9437184;self_id=[1:138:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=EmptyPortionsCleaner; 2025-05-29T15:21:37.881267Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=9437184;self_id=[1:138:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=NO_VALUE_OPTIONAL; 2025-05-29T15:21:37.881295Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=9437184;self_id=[1:138:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-05-29T15:21:37.881309Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=9437184;self_id=[1:138:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-05-29T15:21:37.881322Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=9437184;self_id=[1:138:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-05-29T15:21:37.881336Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=9437184;self_id=[1:138:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-05-29T15:21:37.881348Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=9437184;self_id=[1:138:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-05-29T15:21:37.881359Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=9437184;self_id=[1:138:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-05-29T15:21:37.881370Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=9437184;self_id=[1:138:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-05-29T15:21:37.881382Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=9437184;self_id=[1:138:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-05-29T15:21:37.881395Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=9437184;self_id=[1:138:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-05-29T15:21:37.881409Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=9437184;self_id=[1:138:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2025-05-29T15:21:37.881420Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=9437184;self_id=[1:138:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2025-05-29T15:21:37.888167Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Complete at tablet 9437184 2025-05-29T15:21:37.888219Z node 1 :TX_COLUMNSHARD INFO: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:137;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=12;current_normalizer=CLASS_NAME=EmptyPortionsCleaner; 2025-05-29T15:21:37.888232Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=NO_VALUE_OPTIONAL;seq_id=NO_VALUE_OPTIONAL;type=NO_VALUE_OPTIONAL; 2025-05-29T15:21:37.888283Z node 1 :TX_COLUMNSHARD CRIT: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_empty.cpp:323;tasks_for_remove=0;distribution=; 2025-05-29T15:21:37.888322Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=EmptyPortionsCleaner;id=NO_VALUE_OPTIONAL; 2025-05-29T15:21:37.888338Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Granules;id=Granules; 2025-05-29T15:21:37.888344Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=NO_VALUE_OPTIONAL;seq_id=1;type=Granules; 2025-05-29T15:21:37.888363Z node 1 :TX_COLUMNSHARD INFO: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-05-29T15:21:37.888375Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-05-29T15:21:37.888384Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-05-29T15:21:37.888387Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=NO_VALUE_OPTIONAL;seq_id=2;type=Chunks; 2025-05-29T15:21:37.888393Z node 1 :TX_COLUMNSHARD INFO: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2025-05-29T15:21:37.888398Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-05-29T15:21:37.888404Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-05-29T15:21:37.888406Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=NO_VALUE_OPTIONAL;seq_id=4;type=TablesCleaner; 2025-05-29T15:21:37.888418Z node 1 :TX_COLUMNSHARD INFO: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-05-29T15:21:37.888423Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-05-29T15:21:37.888428Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-05-29T15:21:37.888431Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=NO_VALUE_OPTIONAL;seq_id=6;type=CleanGranuleId; 2025-05-29T15:21:37.888439Z node 1 :TX_COLUMNSHARD INFO: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-05-29T15:21:37.888443Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-05-29T15:21:37.888450Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-05-29T15:21:37.888456Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=NO_VALUE_OPTIONAL;seq_id=8;type=CleanInsertionDedup; 2025-05-29T15:21:37.888468Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-05-29T15:21:37.888476Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-05-29T15:21:37.888481Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=NO_VALUE_OPTIONAL;seq_id=9;type=GCCountersNormalizer; 2025-05-29T15:21:37.888491Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-05-29T15:21:37.888499Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-05-29T15:21:37.888503Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=NO_VALUE_OPTIONAL;seq_id=11;type=SyncPortionFromChunks; 2025-05-29T15:21:37.888527Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-05-29T15:21:37.888535Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-05-29T15:21:37.888540Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=NO_VALUE_OPTIONAL;seq_id=15;type=RestoreV1Chunks_V2; 2025-05-29T15:21:37.888561Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-05-29T15:21:37.888569Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-05-29T15:21:37.888574Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=NO_VALUE_OPTIONAL;seq_id=16;type=RestoreV2Chunks; 2025-05-29T15:21:37.888587Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-05-29T15:21:37.888595Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanDeprecatedSnapshot;id=CleanDeprecatedSnapshot; 2025-05-29T15:21:37.888600Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=NO_VALUE_OPTIONAL;seq_id=17;type=CleanDeprecatedSnapshot; 202 ... fId=[1:451:2448];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=actor.cpp:85;event=TEvTaskProcessedResult; 2025-05-29T15:21:41.255841Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:784: SelfId=[1:451:2448];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=merge.cpp:75;event=DoApply;interval_idx=0; 2025-05-29T15:21:41.255850Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:784: SelfId=[1:451:2448];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=scanner.cpp:21;event=interval_result_received;interval_idx=0;intervalId=2; 2025-05-29T15:21:41.255868Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:784: SelfId=[1:451:2448];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=scanner.cpp:47;event=interval_result;interval_idx=0;count=20048;merger=0;interval_id=2; 2025-05-29T15:21:41.255876Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:784: SelfId=[1:451:2448];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=scanner.cpp:65;event=intervals_finished; 2025-05-29T15:21:41.255898Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:784: SelfId=[1:451:2448];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:188;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2;column_names=key1,key2;);;ff=(column_ids=1,2,3;column_names=field,key1,key2;);;program_input=(column_ids=1,2,3;column_names=field,key1,key2;);;;); 2025-05-29T15:21:41.255907Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:784: SelfId=[1:451:2448];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;method=produce result;fline=plain_read_data.cpp:73;event=DoExtractReadyResults;result=1;count=20048;finished=1; 2025-05-29T15:21:41.255917Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:784: SelfId=[1:451:2448];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:199;stage=limit exhausted;limit=limits:(bytes=0;chunks=0);; 2025-05-29T15:21:41.255985Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:784: SelfId=[1:451:2448];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=actor.cpp:105;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2025-05-29T15:21:41.256024Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:784: SelfId=[1:451:2448];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:188;stage=start;iterator=ready_results:(count:1;records_count:20048;schema=key1: uint64 key2: uint64 field: string;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2;column_names=key1,key2;);;ff=(column_ids=1,2,3;column_names=field,key1,key2;);;program_input=(column_ids=1,2,3;column_names=field,key1,key2;);;;); 2025-05-29T15:21:41.256030Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:784: SelfId=[1:451:2448];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;method=produce result;fline=plain_read_data.cpp:73;event=DoExtractReadyResults;result=0;count=0;finished=1; 2025-05-29T15:21:41.256045Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:784: SelfId=[1:451:2448];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:230;stage=ready result;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2;column_names=key1,key2;);;ff=(column_ids=1,2,3;column_names=field,key1,key2;);;program_input=(column_ids=1,2,3;column_names=field,key1,key2;);;;);columns=3;rows=20048; 2025-05-29T15:21:41.256062Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:784: SelfId=[1:451:2448];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:250;stage=data_format;batch_size=2405760;num_rows=20048;batch_columns=key1,key2,field; 2025-05-29T15:21:41.256114Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:784: SelfId=[1:451:2448];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:366;event=send_data;compute_actor_id=[1:449:2447];bytes=2405760;rows=20048;faults=0;finished=0;fault=0;schema=key1: uint64 key2: uint64 field: string; 2025-05-29T15:21:41.256132Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:784: SelfId=[1:451:2448];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:270;stage=finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2;column_names=key1,key2;);;ff=(column_ids=1,2,3;column_names=field,key1,key2;);;program_input=(column_ids=1,2,3;column_names=field,key1,key2;);;;); 2025-05-29T15:21:41.256147Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:784: SelfId=[1:451:2448];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:188;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2;column_names=key1,key2;);;ff=(column_ids=1,2,3;column_names=field,key1,key2;);;program_input=(column_ids=1,2,3;column_names=field,key1,key2;);;;); 2025-05-29T15:21:41.256159Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:784: SelfId=[1:451:2448];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:193;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2;column_names=key1,key2;);;ff=(column_ids=1,2,3;column_names=field,key1,key2;);;program_input=(column_ids=1,2,3;column_names=field,key1,key2;);;;); 2025-05-29T15:21:41.256805Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:784: SelfId=[1:451:2448];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=actor.cpp:105;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2025-05-29T15:21:41.256837Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:784: SelfId=[1:451:2448];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:188;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2;column_names=key1,key2;);;ff=(column_ids=1,2,3;column_names=field,key1,key2;);;program_input=(column_ids=1,2,3;column_names=field,key1,key2;);;;); 2025-05-29T15:21:41.256873Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:784: SelfId=[1:451:2448];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:193;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2;column_names=key1,key2;);;ff=(column_ids=1,2,3;column_names=field,key1,key2;);;program_input=(column_ids=1,2,3;column_names=field,key1,key2;);;;); 2025-05-29T15:21:41.256882Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: actor.cpp:410: Scan [1:451:2448] finished for tablet 9437184 2025-05-29T15:21:41.257026Z node 1 :TX_COLUMNSHARD_SCAN INFO: log.cpp:784: SelfId=[1:451:2448];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=actor.cpp:416;event=scan_finish;compute_actor_id=[1:449:2447];stats={"p":[{"events":["f_bootstrap","l_bootstrap","f_ProduceResults"],"t":0},{"events":["f_processing","f_task_result"],"t":0.001},{"events":["f_ack","l_task_result"],"t":0.078},{"events":["l_ack","l_processing","l_ProduceResults","f_Finish","l_Finish"],"t":0.079}],"full":{"a":1748532101177037,"name":"_full_task","f":1748532101177037,"d_finished":0,"c":0,"l":1748532101256939,"d":79902},"events":[{"name":"bootstrap","f":1748532101177106,"d_finished":589,"c":1,"l":1748532101177695,"d":589},{"a":1748532101256795,"name":"ack","f":1748532101255977,"d_finished":186,"c":1,"l":1748532101256163,"d":330},{"a":1748532101256788,"name":"processing","f":1748532101178080,"d_finished":42892,"c":9,"l":1748532101256163,"d":43043},{"name":"ProduceResults","f":1748532101177485,"d_finished":493,"c":12,"l":1748532101256877,"d":493},{"a":1748532101256878,"name":"Finish","f":1748532101256878,"d_finished":0,"c":0,"l":1748532101256939,"d":61},{"name":"task_result","f":1748532101178085,"d_finished":42658,"c":8,"l":1748532101255932,"d":42658}],"id":"9437184::2"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2;column_names=key1,key2;);;ff=(column_ids=1,2,3;column_names=field,key1,key2;);;program_input=(column_ids=1,2,3;column_names=field,key1,key2;);;;); 2025-05-29T15:21:41.257044Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:784: SelfId=[1:451:2448];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=actor.cpp:366;event=send_data;compute_actor_id=[1:449:2447];bytes=0;rows=0;faults=0;finished=1;fault=0;schema=; 2025-05-29T15:21:41.257101Z node 1 :TX_COLUMNSHARD_SCAN INFO: log.cpp:784: SelfId=[1:451:2448];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=actor.cpp:371;event=scan_finished;compute_actor_id=[1:449:2447];packs_sum=0;bytes=0;bytes_sum=0;rows=0;rows_sum=0;faults=0;finished=1;fault=0;stats={"p":[{"events":["f_bootstrap","l_bootstrap","f_ProduceResults"],"t":0},{"events":["f_processing","f_task_result"],"t":0.001},{"events":["f_ack","l_task_result"],"t":0.078},{"events":["l_ProduceResults","f_Finish"],"t":0.079},{"events":["l_ack","l_processing","l_Finish"],"t":0.08}],"full":{"a":1748532101177037,"name":"_full_task","f":1748532101177037,"d_finished":0,"c":0,"l":1748532101257051,"d":80014},"events":[{"name":"bootstrap","f":1748532101177106,"d_finished":589,"c":1,"l":1748532101177695,"d":589},{"a":1748532101256795,"name":"ack","f":1748532101255977,"d_finished":186,"c":1,"l":1748532101256163,"d":442},{"a":1748532101256788,"name":"processing","f":1748532101178080,"d_finished":42892,"c":9,"l":1748532101256163,"d":43155},{"name":"ProduceResults","f":1748532101177485,"d_finished":493,"c":12,"l":1748532101256877,"d":493},{"a":1748532101256878,"name":"Finish","f":1748532101256878,"d_finished":0,"c":0,"l":1748532101257051,"d":173},{"name":"task_result","f":1748532101178085,"d_finished":42658,"c":8,"l":1748532101255932,"d":42658}],"id":"9437184::2"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2;column_names=key1,key2;);;ff=(column_ids=1,2,3;column_names=field,key1,key2;);;program_input=(column_ids=1,2,3;column_names=field,key1,key2;);;;); 2025-05-29T15:21:41.257120Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:784: SelfId=[1:451:2448];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=stats.cpp:8;event=statistic;begin=2025-05-29T15:21:41.176851Z;index_granules=0;index_portions=1;index_batches=440;committed_batches=0;schema_columns=3;filter_columns=0;additional_columns=0;compacted_portions_bytes=0;inserted_portions_bytes=2488696;committed_portions_bytes=0;data_filter_bytes=0;data_additional_bytes=0;delta_bytes=2488696;selected_rows=0; 2025-05-29T15:21:41.257128Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:784: SelfId=[1:451:2448];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=read_context.h:190;event=scan_aborted;reason=unexpected on destructor; 2025-05-29T15:21:41.257191Z node 1 :TX_COLUMNSHARD_SCAN INFO: log.cpp:784: SelfId=[1:451:2448];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=context.h:81;fetching=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2;column_names=key1,key2;);;ff=(column_ids=1,2,3;column_names=field,key1,key2;);;program_input=(column_ids=1,2,3;column_names=field,key1,key2;);;; |58.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/metadata/initializer/ut/unittest |58.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/metadata/initializer/ut/unittest ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/columnshard/ut_rw/unittest >> TColumnShardTestReadWrite::RebootWriteRead [GOOD] Test command err: 2025-05-29T15:21:37.968976Z node 1 :TX_COLUMNSHARD INFO: log.cpp:784: tablet_id=9437184;self_id=[1:139:2170];fline=columnshard.cpp:102;event=initialize_shard;step=OnActivateExecutor; 2025-05-29T15:21:37.972454Z node 1 :TX_COLUMNSHARD INFO: log.cpp:784: tablet_id=9437184;self_id=[1:139:2170];fline=columnshard.cpp:120;event=initialize_shard;step=initialize_tiring_finished; 2025-05-29T15:21:37.972537Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Execute at tablet 9437184 2025-05-29T15:21:37.973282Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=9437184;self_id=[1:139:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-05-29T15:21:37.973335Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=9437184;self_id=[1:139:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-05-29T15:21:37.973370Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=9437184;self_id=[1:139:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-05-29T15:21:37.973389Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=9437184;self_id=[1:139:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-05-29T15:21:37.973407Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=9437184;self_id=[1:139:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-05-29T15:21:37.973431Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=9437184;self_id=[1:139:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-05-29T15:21:37.973446Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=9437184;self_id=[1:139:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-05-29T15:21:37.973466Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=9437184;self_id=[1:139:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-05-29T15:21:37.973483Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=9437184;self_id=[1:139:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-05-29T15:21:37.973504Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=9437184;self_id=[1:139:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-05-29T15:21:37.973523Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=9437184;self_id=[1:139:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2025-05-29T15:21:37.973541Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=9437184;self_id=[1:139:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2025-05-29T15:21:37.986128Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Complete at tablet 9437184 2025-05-29T15:21:37.986194Z node 1 :TX_COLUMNSHARD INFO: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:137;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2025-05-29T15:21:37.986207Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-05-29T15:21:37.986244Z node 1 :TX_COLUMNSHARD INFO: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-05-29T15:21:37.986286Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-05-29T15:21:37.986299Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-05-29T15:21:37.986304Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-05-29T15:21:37.986313Z node 1 :TX_COLUMNSHARD INFO: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2025-05-29T15:21:37.986322Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-05-29T15:21:37.986329Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-05-29T15:21:37.986333Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-05-29T15:21:37.986372Z node 1 :TX_COLUMNSHARD INFO: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-05-29T15:21:37.986379Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-05-29T15:21:37.986386Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-05-29T15:21:37.986390Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-05-29T15:21:37.986400Z node 1 :TX_COLUMNSHARD INFO: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-05-29T15:21:37.986406Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-05-29T15:21:37.986413Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-05-29T15:21:37.986417Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=8;type=CleanInsertionDedup; 2025-05-29T15:21:37.986429Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-05-29T15:21:37.986436Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-05-29T15:21:37.986439Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-05-29T15:21:37.986448Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-05-29T15:21:37.986456Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-05-29T15:21:37.986460Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-05-29T15:21:37.986483Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-05-29T15:21:37.986492Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-05-29T15:21:37.986496Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2025-05-29T15:21:37.986516Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-05-29T15:21:37.986522Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-05-29T15:21:37.986526Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2025-05-29T15:21:37.986539Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-05-29T15:21:37.986545Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanDeprecatedSnapshot;id=CleanDeprecatedSnapshot; 2025-05-29T15:21:37.986549Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=17;type=CleanDeprecatedSnapshot; 2025-05-29T15:21:37.986557Z node 1 :TX_COLUMNSHARD CRIT: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_deprecated_snapshot.cpp:30;tasks_for_rewrite=0; 2025-05-29T15:21:37.986564Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanDeprecatedSnapshot;id=17; 2025-05-29T15:21:37.986570Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV0ChunksMeta;id=RestoreV0ChunksMeta; 2025-05-29T15:21:37.986574Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=18;type=RestoreV0ChunksMeta; 2025-05-29T15:21:37.986650Z node 1 :TX_COLUMNSHARD INFO: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=11; 2025-05-29T15:21:37.986659Z node 1 :TX_COLUMNSHARD INFO: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=5; 2025-05-29T15:21:37.986667Z node 1 :TX_COLUMNSHARD INFO: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute ... ;;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;;); 2025-05-29T15:21:41.807818Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:784: TEST_STEP=11;SelfId=[1:1015:2871];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;method=produce result;fline=plain_read_data.cpp:73;event=DoExtractReadyResults;result=0;count=0;finished=1; 2025-05-29T15:21:41.807832Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:784: TEST_STEP=11;SelfId=[1:1015:2871];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:230;stage=ready result;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;;);columns=10;rows=31; 2025-05-29T15:21:41.807845Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:784: TEST_STEP=11;SelfId=[1:1015:2871];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:250;stage=data_format;batch_size=1984;num_rows=31;batch_columns=timestamp,resource_type,resource_id,uid,level,message,json_payload,ingested_at,saved_at,request_id; 2025-05-29T15:21:41.807889Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:784: TEST_STEP=11;SelfId=[1:1015:2871];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:366;event=send_data;compute_actor_id=[1:1014:2870];bytes=1984;rows=31;faults=0;finished=0;fault=0;schema=timestamp: timestamp[us] resource_type: string resource_id: string uid: string level: int32 message: string json_payload: string ingested_at: timestamp[us] saved_at: timestamp[us] request_id: string; 2025-05-29T15:21:41.807904Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:784: TEST_STEP=11;SelfId=[1:1015:2871];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:270;stage=finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;;); 2025-05-29T15:21:41.807919Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:784: TEST_STEP=11;SelfId=[1:1015:2871];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:188;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;;); 2025-05-29T15:21:41.807932Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:784: TEST_STEP=11;SelfId=[1:1015:2871];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:193;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;;); 2025-05-29T15:21:41.807964Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:784: TEST_STEP=11;SelfId=[1:1015:2871];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=actor.cpp:105;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2025-05-29T15:21:41.807976Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:784: TEST_STEP=11;SelfId=[1:1015:2871];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:188;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;;); 2025-05-29T15:21:41.807989Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:784: TEST_STEP=11;SelfId=[1:1015:2871];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:193;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;;); 2025-05-29T15:21:41.807995Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: actor.cpp:410: Scan [1:1015:2871] finished for tablet 9437184 2025-05-29T15:21:41.808060Z node 1 :TX_COLUMNSHARD_SCAN INFO: log.cpp:784: TEST_STEP=11;SelfId=[1:1015:2871];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=actor.cpp:416;event=scan_finish;compute_actor_id=[1:1014:2870];stats={"p":[{"events":["f_bootstrap","l_bootstrap","f_processing","f_ProduceResults","f_task_result"],"t":0},{"events":["f_ack","l_task_result"],"t":0.001},{"events":["l_ack","l_processing","l_ProduceResults","f_Finish","l_Finish"],"t":0.002}],"full":{"a":1748532101805907,"name":"_full_task","f":1748532101805907,"d_finished":0,"c":0,"l":1748532101808004,"d":2097},"events":[{"name":"bootstrap","f":1748532101805942,"d_finished":422,"c":1,"l":1748532101806364,"d":422},{"a":1748532101807961,"name":"ack","f":1748532101807781,"d_finished":154,"c":1,"l":1748532101807935,"d":197},{"a":1748532101807959,"name":"processing","f":1748532101806540,"d_finished":854,"c":10,"l":1748532101807936,"d":899},{"name":"ProduceResults","f":1748532101806178,"d_finished":392,"c":13,"l":1748532101807993,"d":392},{"a":1748532101807993,"name":"Finish","f":1748532101807993,"d_finished":0,"c":0,"l":1748532101808004,"d":11},{"name":"task_result","f":1748532101806544,"d_finished":677,"c":9,"l":1748532101807743,"d":677}],"id":"9437184::12"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;;); 2025-05-29T15:21:41.808071Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:784: TEST_STEP=11;SelfId=[1:1015:2871];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=actor.cpp:366;event=send_data;compute_actor_id=[1:1014:2870];bytes=0;rows=0;faults=0;finished=1;fault=0;schema=; 2025-05-29T15:21:41.808118Z node 1 :TX_COLUMNSHARD_SCAN INFO: log.cpp:784: TEST_STEP=11;SelfId=[1:1015:2871];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=actor.cpp:371;event=scan_finished;compute_actor_id=[1:1014:2870];packs_sum=0;bytes=0;bytes_sum=0;rows=0;rows_sum=0;faults=0;finished=1;fault=0;stats={"p":[{"events":["f_bootstrap","l_bootstrap","f_processing","f_ProduceResults","f_task_result"],"t":0},{"events":["f_ack","l_task_result"],"t":0.001},{"events":["l_ack","l_processing","l_ProduceResults","f_Finish","l_Finish"],"t":0.002}],"full":{"a":1748532101805907,"name":"_full_task","f":1748532101805907,"d_finished":0,"c":0,"l":1748532101808077,"d":2170},"events":[{"name":"bootstrap","f":1748532101805942,"d_finished":422,"c":1,"l":1748532101806364,"d":422},{"a":1748532101807961,"name":"ack","f":1748532101807781,"d_finished":154,"c":1,"l":1748532101807935,"d":270},{"a":1748532101807959,"name":"processing","f":1748532101806540,"d_finished":854,"c":10,"l":1748532101807936,"d":972},{"name":"ProduceResults","f":1748532101806178,"d_finished":392,"c":13,"l":1748532101807993,"d":392},{"a":1748532101807993,"name":"Finish","f":1748532101807993,"d_finished":0,"c":0,"l":1748532101808077,"d":84},{"name":"task_result","f":1748532101806544,"d_finished":677,"c":9,"l":1748532101807743,"d":677}],"id":"9437184::12"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;;); 2025-05-29T15:21:41.808133Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:784: TEST_STEP=11;SelfId=[1:1015:2871];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=stats.cpp:8;event=statistic;begin=2025-05-29T15:21:41.805780Z;index_granules=0;index_portions=1;index_batches=1;committed_batches=0;schema_columns=10;filter_columns=0;additional_columns=0;compacted_portions_bytes=0;inserted_portions_bytes=7600;committed_portions_bytes=0;data_filter_bytes=0;data_additional_bytes=0;delta_bytes=7600;selected_rows=0; 2025-05-29T15:21:41.808139Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:784: TEST_STEP=11;SelfId=[1:1015:2871];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=read_context.h:190;event=scan_aborted;reason=unexpected on destructor; 2025-05-29T15:21:41.808180Z node 1 :TX_COLUMNSHARD_SCAN INFO: log.cpp:784: TEST_STEP=11;SelfId=[1:1015:2871];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=context.h:81;fetching=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;; |58.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/metadata/initializer/ut/unittest ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/olap/unittest >> KqpOlapJson::DuplicationCompactionVariants [FAIL] Test command err: Trying to start YDB, gRPC: 14796, MsgBus: 2933 2025-05-29T15:21:36.929367Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509888168682864505:2063];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:21:36.929377Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/0026a8/r3tmp/tmpVnIUgd/pdisk_1.dat 2025-05-29T15:21:37.051100Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:21:37.051186Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [1:7509888168682864483:2079] 1748532096929135 != 1748532096929138 TServer::EnableGrpc on GrpcPort 14796, node 1 2025-05-29T15:21:37.078595Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:21:37.078607Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-05-29T15:21:37.078609Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-05-29T15:21:37.078651Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:2933 2025-05-29T15:21:37.110171Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:21:37.110195Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:21:37.111360Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:2933 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-29T15:21:37.148151Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:21:37.152621Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 EXECUTE: CREATE TABLE `/Root/ColumnTable` ( Col1 Uint64 NOT NULL, Col2 JsonDocument, PRIMARY KEY (Col1) ) PARTITION BY HASH(Col1) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 1); 2025-05-29T15:21:37.414999Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509888172977832437:2328], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:21:37.415030Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:21:37.454017Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-05-29T15:21:37.465476Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;self_id=[1:7509888172977832512:2332];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-05-29T15:21:37.465554Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;self_id=[1:7509888172977832512:2332];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-05-29T15:21:37.465624Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;self_id=[1:7509888172977832512:2332];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-05-29T15:21:37.465647Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;self_id=[1:7509888172977832512:2332];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-05-29T15:21:37.465671Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;self_id=[1:7509888172977832512:2332];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-05-29T15:21:37.465689Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;self_id=[1:7509888172977832512:2332];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-05-29T15:21:37.465707Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;self_id=[1:7509888172977832512:2332];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-05-29T15:21:37.465726Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;self_id=[1:7509888172977832512:2332];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-05-29T15:21:37.465746Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;self_id=[1:7509888172977832512:2332];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-05-29T15:21:37.465772Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;self_id=[1:7509888172977832512:2332];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-05-29T15:21:37.465790Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;self_id=[1:7509888172977832512:2332];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2025-05-29T15:21:37.465810Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;self_id=[1:7509888172977832512:2332];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2025-05-29T15:21:37.466364Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-05-29T15:21:37.466374Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-05-29T15:21:37.466388Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-05-29T15:21:37.466393Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-05-29T15:21:37.466413Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-05-29T15:21:37.466417Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-05-29T15:21:37.466428Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-05-29T15:21:37.466432Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-05-29T15:21:37.466443Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-05-29T15:21:37.466447Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-05-29T15:21:37.466453Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-05-29T15:21:37.466458Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-05-29T15:21:37.466481Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-05-29T15:21:37.466490Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-05-29T15:21:37.466511Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-05-29T15:21:37.466520Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-05-29T15:21:37.466533Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-05-29T15:21:37.466538Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanDeprecatedSnapshot;id=CleanDeprecatedSnapshot; 2025-05-29T15:21:37.466546Z node 1 :TX_COLUMNSHARD CRIT: log.cpp:784: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=clean_deprecated_snapshot.cpp:30;tasks_for_rewrite=0; 2025-05-29T15:21:37.466552Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanDeprecatedSnapshot;id=17; 2025-05-29T15:21:37.466557Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV0ChunksMeta;id=RestoreV0ChunksMeta; 2025-05-29T15:21:37.466646Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV0ChunksMeta;id=18; 2025-05-29T15:21:37.466650Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-05-29T15:21:37.479915Z node 1 :TX_COLUMNSHARD_TX WARN: log.cpp:784: tablet_id=72075186224037888;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710658;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710658; EXECUTE: ALTER OBJECT `/Root/ColumnTable` (TYPE TABLE) SET (ACTION=UPSERT_OPTIONS, `SCAN_READER_POLICY_NAME`=`SIMPLE`) 2025-05-29T15:21:37.488035Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509888172977832638:2397], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:21:37.488071Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:21:37.488319Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 2025-05-29T15:21:37.490175Z node 1 :TX_COLUMNSHARD_TX WARN: log.cpp:784: tablet_id=72075186224037888;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710659;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710659; EXECUTE: ALTER OBJECT `/Root/ColumnTable` (TYPE TABLE) SET (ACTION=ALTER_COLUMN, NAME=Col2, `DATA_ACCESSOR_CONSTRUCTOR.CLASS_NAME`=`SUB_COLUMNS`, `FORCE_SIMD_PARSING`=`true`, `COLUMNS_LIMIT`=`0`, `SPARSED_DETECTOR_KFF`=`0`, `MEM_LIMIT_CHUNK`=`0`, `OTHERS_ALLOWED_FRACTION`=`0`) 2025-05-29T15:21:37.505797Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509888172977832668:2402], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:21:37.505819Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:21:37.507013Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-05-29T15:21:37.510549Z node 1 :TX_COLUMNSHARD_TX WARN: log.cpp:784: tablet_id=72075186224037888;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710660;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976710660; EXECUTE: REPLACE INTO `/Root/ColumnTable` (Col1, Col2) VALUES(1u, JsonDocument('{"a" : "a1"}')), (2u, JsonDocument('{"a" : "a2"}')), (3u, JsonDocument('{"b" : "b3"}')), (4u, JsonDocument('{"b" : "b4", "a" : "a4"}')) 2025-05-29T15:21:37.518716Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509888172977832699:2408], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:21:37.518752Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:21:37.518914Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509888172977832704:2411], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:21:37.520936Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710661:3, at schemeshard: 72057594046644480 2025-05-29T15:21:37.523477Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7509888172977832706:2412], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710661 completed, doublechecking } 2025-05-29T15:21:37.588138Z node 1 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [1:7509888172977832757:2454] txid# 281474976710662, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-29T15:21:37.616447Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [1:7509888172977832773:2416], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:21:37.617038Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=1&id=YTgyOTExOGMtNGNiMDAwNmQtMjVjMGY5MGEtOTU5ZjE5OGI=, ActorId: [1:7509888172977832697:2407], ActorState: ExecuteState, TraceId: 01jwea4wfdc0f3hc9rc4rb87tk, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: 2025-05-29T15:21:38.470973Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;self_id=[1:7509888172977832512:2332];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037888;fline=columnshard_impl.cpp:830;event=skip_compaction;reason=disabled; 2025-05-29T15:21:38.471875Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;self_id=[1:7509888172977832512:2332];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037888;fline=columnshard_impl.cpp:830;event=skip_compaction;reason=disabled; 2025-05-29T15:21:39.475256Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;self_id=[1:7509888172977832512:2332];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037888;fline=columnshard_impl.cpp:830;event=skip_compaction;reason=disabled; 2025-05-29T15:21:39.476279Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;self_id=[1:7509888172977832512:2332];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037888;fline=columnshard_impl.cpp:830;event=skip_compaction;reason=disabled; assertion failed at ydb/core/kqp/ut/olap/combinatory/execute.h:41, virtual TConclusionStatus NKikimr::NKqp::TDataCommand::DoExecute(TKikimrRunner &): (prepareResult.IsSuccess())
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 0. /-S/util/system/backtrace.cpp:284: ?? @ 0x13D433CB 1. /tmp//-S/library/cpp/testing/unittest/registar.cpp:46: RaiseError @ 0x13EFB238 2. /-S/ydb/core/kqp/ut/olap/combinatory/execute.h:41: DoExecute @ 0x265B8EA9 3. /-S/ydb/core/kqp/ut/olap/combinatory/abstract.h:75: Execute @ 0x265B0763 4. /tmp//-S/ydb/core/kqp/ut/olap/combinatory/executor.cpp:22: Execute @ 0x265B0763 5. /-S/ydb/core/kqp/ut/olap/combinatory/variator.h:27: Execute @ 0x13B8DEA7 6. /tmp//-S/ydb/core/kqp/ut/olap/json_ut.cpp:911: Execute_ @ 0x13B8DEA7 7. /tmp//-S/ydb/core/kqp/ut/olap/json_ut.cpp:27: operator() @ 0x13B90F46 8. /tmp//-S/library/cpp/testing/unittest/registar.cpp:373: Run @ 0x13EFD0ED 9. /tmp//-S/ydb/core/kqp/ut/olap/json_ut.cpp:27: Execute @ 0x13B9090C 10. /tmp//-S/library/cpp/testing/unittest/registar.cpp:494: Execute @ 0x13EFD862 11. /tmp//-S/library/cpp/testing/unittest/utmain.cpp:872: RunMain @ 0x13F0F40C 12. ??:0: ?? @ 0x7F647BE20D8F 13. ??:0: ?? @ 0x7F647BE20E3F 14. ??:0: ?? @ 0x12AB1028 |58.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/metadata/initializer/ut/unittest |58.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/metadata/initializer/ut/unittest |58.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/metadata/initializer/ut/unittest |58.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/metadata/initializer/ut/unittest >> Initializer::Simple |58.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/metadata/initializer/ut/unittest |58.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/metadata/initializer/ut/unittest |58.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/metadata/initializer/ut/unittest |58.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/metadata/initializer/ut/unittest |58.1%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/services/persqueue_cluster_discovery/ut/ydb-services-persqueue_cluster_discovery-ut |58.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/services/persqueue_cluster_discovery/ut/ydb-services-persqueue_cluster_discovery-ut |58.1%| [LD] {RESULT} $(B)/ydb/services/persqueue_cluster_discovery/ut/ydb-services-persqueue_cluster_discovery-ut |58.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/metadata/initializer/ut/unittest ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/olap/unittest >> KqpOlapJson::BloomCategoryIndexesVariants [FAIL] Test command err: Trying to start YDB, gRPC: 24242, MsgBus: 28775 2025-05-29T15:21:37.023049Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509888176265074715:2079];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:21:37.023333Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/0026a6/r3tmp/tmpodY1qH/pdisk_1.dat 2025-05-29T15:21:37.121381Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:21:37.125899Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:21:37.125922Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting TServer::EnableGrpc on GrpcPort 24242, node 1 2025-05-29T15:21:37.129147Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-29T15:21:37.136426Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:21:37.136438Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-05-29T15:21:37.136440Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-05-29T15:21:37.136471Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:28775 TClient is connected to server localhost:28775 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-29T15:21:37.220940Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:21:37.228683Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 EXECUTE: CREATE TABLE `/Root/ColumnTable` ( Col1 Uint64 NOT NULL, Col2 JsonDocument, PRIMARY KEY (Col1) ) PARTITION BY HASH(Col1) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 2); 2025-05-29T15:21:37.513578Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509888176265075322:2328], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:21:37.513609Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:21:37.558026Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-05-29T15:21:37.572509Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;self_id=[1:7509888176265075407:2332];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-05-29T15:21:37.572585Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;self_id=[1:7509888176265075407:2332];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-05-29T15:21:37.572643Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;self_id=[1:7509888176265075407:2332];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-05-29T15:21:37.572666Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;self_id=[1:7509888176265075407:2332];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-05-29T15:21:37.572688Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;self_id=[1:7509888176265075407:2332];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-05-29T15:21:37.572710Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;self_id=[1:7509888176265075407:2332];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-05-29T15:21:37.572733Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;self_id=[1:7509888176265075407:2332];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-05-29T15:21:37.572754Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;self_id=[1:7509888176265075407:2332];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-05-29T15:21:37.572775Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;self_id=[1:7509888176265075407:2332];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-05-29T15:21:37.572799Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;self_id=[1:7509888176265075407:2332];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-05-29T15:21:37.572820Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;self_id=[1:7509888176265075407:2332];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2025-05-29T15:21:37.572844Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;self_id=[1:7509888176265075407:2332];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2025-05-29T15:21:37.577799Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-05-29T15:21:37.577817Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-05-29T15:21:37.577834Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-05-29T15:21:37.577839Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-05-29T15:21:37.577861Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-05-29T15:21:37.577867Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-05-29T15:21:37.577878Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-05-29T15:21:37.577884Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-05-29T15:21:37.577894Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-05-29T15:21:37.577900Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-05-29T15:21:37.577907Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-05-29T15:21:37.577913Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-05-29T15:21:37.577936Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-05-29T15:21:37.577945Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-05-29T15:21:37.577965Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-05-29T15:21:37.577970Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-05-29T15:21:37.577984Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-05-29T15:21:37.577989Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanDepre ... ct.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-05-29T15:21:37.584001Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-05-29T15:21:37.584012Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-05-29T15:21:37.584022Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanDeprecatedSnapshot;id=CleanDeprecatedSnapshot; 2025-05-29T15:21:37.584029Z node 1 :TX_COLUMNSHARD CRIT: log.cpp:784: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=clean_deprecated_snapshot.cpp:30;tasks_for_rewrite=0; 2025-05-29T15:21:37.584036Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanDeprecatedSnapshot;id=17; 2025-05-29T15:21:37.584042Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV0ChunksMeta;id=RestoreV0ChunksMeta; 2025-05-29T15:21:37.584143Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV0ChunksMeta;id=18; 2025-05-29T15:21:37.584152Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-05-29T15:21:37.615764Z node 1 :TX_COLUMNSHARD_TX WARN: log.cpp:784: tablet_id=72075186224037888;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715658;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715658; 2025-05-29T15:21:37.616687Z node 1 :TX_COLUMNSHARD_TX WARN: log.cpp:784: tablet_id=72075186224037889;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715658;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715658; EXECUTE: ALTER OBJECT `/Root/ColumnTable` (TYPE TABLE) SET (ACTION=UPSERT_OPTIONS, `SCAN_READER_POLICY_NAME`=`SIMPLE`) 2025-05-29T15:21:37.622383Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509888176265075564:2403], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:21:37.622405Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:21:37.624630Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 2025-05-29T15:21:37.627193Z node 1 :TX_COLUMNSHARD_TX WARN: log.cpp:784: tablet_id=72075186224037889;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715659;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715659; 2025-05-29T15:21:37.627322Z node 1 :TX_COLUMNSHARD_TX WARN: log.cpp:784: tablet_id=72075186224037888;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715659;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715659; EXECUTE: ALTER OBJECT `/Root/ColumnTable` (TYPE TABLE) SET (ACTION=ALTER_COLUMN, NAME=Col2, `DATA_ACCESSOR_CONSTRUCTOR.CLASS_NAME`=`SUB_COLUMNS`, `DATA_EXTRACTOR_CLASS_NAME`=`JSON_SCANNER`, `FORCE_SIMD_PARSING`=`true`, `SCAN_FIRST_LEVEL_ONLY`=`true`, `COLUMNS_LIMIT`=`0`, `SPARSED_DETECTOR_KFF`=`0`, `MEM_LIMIT_CHUNK`=`0`, `OTHERS_ALLOWED_FRACTION`=`0`) 2025-05-29T15:21:37.633108Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509888176265075598:2408], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:21:37.633131Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:21:37.635748Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 2025-05-29T15:21:37.641337Z node 1 :TX_COLUMNSHARD_TX WARN: log.cpp:784: tablet_id=72075186224037888;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715660;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715660; 2025-05-29T15:21:37.641485Z node 1 :TX_COLUMNSHARD_TX WARN: log.cpp:784: tablet_id=72075186224037889;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715660;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715660; EXECUTE: REPLACE INTO `/Root/ColumnTable` (Col1, Col2) VALUES(1u, JsonDocument('{"a.b.c" : "a1"}')), (2u, JsonDocument('{"a.b.c" : "a2"}')), (3u, JsonDocument('{"b.c.d" : "b3"}')), (4u, JsonDocument('{"b.c.d" : "b4", "a" : "a4"}')) 2025-05-29T15:21:37.647106Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509888176265075633:2414], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:21:37.647126Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:21:37.647174Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509888176265075638:2417], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:21:37.648024Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715661:3, at schemeshard: 72057594046644480 2025-05-29T15:21:37.656273Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7509888176265075640:2418], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715661 completed, doublechecking } 2025-05-29T15:21:37.723857Z node 1 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [1:7509888176265075691:2483] txid# 281474976715662, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-29T15:21:37.750221Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [1:7509888176265075700:2422], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:21:37.750849Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=1&id=NTk3OWQxZDMtMWY0YTJmZDctYTc1MTYzY2MtZWFlZmViZWY=, ActorId: [1:7509888176265075631:2413], ActorState: ExecuteState, TraceId: 01jwea4wke5gfv9pht1desp2ks, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: 2025-05-29T15:21:38.583000Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;self_id=[1:7509888176265075407:2332];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037888;fline=columnshard_impl.cpp:830;event=skip_compaction;reason=disabled; 2025-05-29T15:21:38.583978Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;self_id=[1:7509888176265075407:2332];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037888;fline=columnshard_impl.cpp:830;event=skip_compaction;reason=disabled; 2025-05-29T15:21:38.586372Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037889;self_id=[1:7509888176265075431:2341];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037889;fline=columnshard_impl.cpp:830;event=skip_compaction;reason=disabled; 2025-05-29T15:21:38.591808Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037889;self_id=[1:7509888176265075431:2341];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037889;fline=columnshard_impl.cpp:830;event=skip_compaction;reason=disabled; 2025-05-29T15:21:39.586945Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;self_id=[1:7509888176265075407:2332];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037888;fline=columnshard_impl.cpp:830;event=skip_compaction;reason=disabled; 2025-05-29T15:21:39.587103Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037889;self_id=[1:7509888176265075431:2341];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037889;fline=columnshard_impl.cpp:830;event=skip_compaction;reason=disabled; 2025-05-29T15:21:39.587359Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;self_id=[1:7509888176265075407:2332];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037888;fline=columnshard_impl.cpp:830;event=skip_compaction;reason=disabled; 2025-05-29T15:21:39.587378Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037889;self_id=[1:7509888176265075431:2341];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037889;fline=columnshard_impl.cpp:830;event=skip_compaction;reason=disabled; assertion failed at ydb/core/kqp/ut/olap/combinatory/execute.h:41, virtual TConclusionStatus NKikimr::NKqp::TDataCommand::DoExecute(TKikimrRunner &): (prepareResult.IsSuccess())
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 0. /-S/util/system/backtrace.cpp:284: ?? @ 0x13D433CB 1. /tmp//-S/library/cpp/testing/unittest/registar.cpp:46: RaiseError @ 0x13EFB238 2. /-S/ydb/core/kqp/ut/olap/combinatory/execute.h:41: DoExecute @ 0x265B8EA9 3. /-S/ydb/core/kqp/ut/olap/combinatory/abstract.h:75: Execute @ 0x265B0763 4. /tmp//-S/ydb/core/kqp/ut/olap/combinatory/executor.cpp:22: Execute @ 0x265B0763 5. /-S/ydb/core/kqp/ut/olap/combinatory/variator.h:27: Execute @ 0x13B8D967 6. /tmp//-S/ydb/core/kqp/ut/olap/json_ut.cpp:738: Execute_ @ 0x13B8D967 7. /tmp//-S/ydb/core/kqp/ut/olap/json_ut.cpp:27: operator() @ 0x13B90F46 8. /tmp//-S/library/cpp/testing/unittest/registar.cpp:373: Run @ 0x13EFD0ED 9. /tmp//-S/ydb/core/kqp/ut/olap/json_ut.cpp:27: Execute @ 0x13B9090C 10. /tmp//-S/library/cpp/testing/unittest/registar.cpp:494: Execute @ 0x13EFD862 11. /tmp//-S/library/cpp/testing/unittest/utmain.cpp:872: RunMain @ 0x13F0F40C 12. ??:0: ?? @ 0x7F9454338D8F 13. ??:0: ?? @ 0x7F9454338E3F 14. ??:0: ?? @ 0x12AB1028 |58.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/metadata/initializer/ut/unittest |58.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/metadata/initializer/ut/unittest |58.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/metadata/initializer/ut/unittest |58.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/metadata/initializer/ut/unittest >> TColumnShardTestReadWrite::WriteOverload+InStore+WithWritePortionsOnInsert [GOOD] >> KqpScanArrowFormat::AggregateNoColumnNoRemaps |58.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/metadata/initializer/ut/unittest ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/columnshard/ut_rw/unittest >> TColumnShardTestReadWrite::WriteOverload+InStore+WithWritePortionsOnInsert [GOOD] Test command err: 2025-05-29T15:21:37.430840Z node 1 :TX_COLUMNSHARD INFO: log.cpp:784: tablet_id=9437184;self_id=[1:139:2170];fline=columnshard.cpp:102;event=initialize_shard;step=OnActivateExecutor; 2025-05-29T15:21:37.435146Z node 1 :TX_COLUMNSHARD INFO: log.cpp:784: tablet_id=9437184;self_id=[1:139:2170];fline=columnshard.cpp:120;event=initialize_shard;step=initialize_tiring_finished; 2025-05-29T15:21:37.435226Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Execute at tablet 9437184 2025-05-29T15:21:37.436039Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=9437184;self_id=[1:139:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-05-29T15:21:37.436093Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=9437184;self_id=[1:139:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-05-29T15:21:37.436139Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=9437184;self_id=[1:139:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-05-29T15:21:37.436159Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=9437184;self_id=[1:139:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-05-29T15:21:37.436184Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=9437184;self_id=[1:139:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-05-29T15:21:37.436206Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=9437184;self_id=[1:139:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-05-29T15:21:37.436224Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=9437184;self_id=[1:139:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-05-29T15:21:37.436244Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=9437184;self_id=[1:139:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-05-29T15:21:37.436263Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=9437184;self_id=[1:139:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-05-29T15:21:37.436290Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=9437184;self_id=[1:139:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-05-29T15:21:37.436312Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=9437184;self_id=[1:139:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2025-05-29T15:21:37.436332Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=9437184;self_id=[1:139:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2025-05-29T15:21:37.443689Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Complete at tablet 9437184 2025-05-29T15:21:37.443757Z node 1 :TX_COLUMNSHARD INFO: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:137;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2025-05-29T15:21:37.443770Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-05-29T15:21:37.443815Z node 1 :TX_COLUMNSHARD INFO: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-05-29T15:21:37.443854Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-05-29T15:21:37.443869Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-05-29T15:21:37.443875Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-05-29T15:21:37.443885Z node 1 :TX_COLUMNSHARD INFO: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2025-05-29T15:21:37.443895Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-05-29T15:21:37.443903Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-05-29T15:21:37.443908Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-05-29T15:21:37.443928Z node 1 :TX_COLUMNSHARD INFO: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-05-29T15:21:37.443937Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-05-29T15:21:37.443945Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-05-29T15:21:37.443950Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-05-29T15:21:37.443961Z node 1 :TX_COLUMNSHARD INFO: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-05-29T15:21:37.443969Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-05-29T15:21:37.443978Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-05-29T15:21:37.443983Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=8;type=CleanInsertionDedup; 2025-05-29T15:21:37.443997Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-05-29T15:21:37.444004Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-05-29T15:21:37.444009Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-05-29T15:21:37.444018Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-05-29T15:21:37.444029Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-05-29T15:21:37.444034Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-05-29T15:21:37.444063Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-05-29T15:21:37.444071Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-05-29T15:21:37.444076Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2025-05-29T15:21:37.444098Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-05-29T15:21:37.444107Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-05-29T15:21:37.444112Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2025-05-29T15:21:37.444126Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-05-29T15:21:37.444158Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanDeprecatedSnapshot;id=CleanDeprecatedSnapshot; 2025-05-29T15:21:37.444163Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=17;type=CleanDeprecatedSnapshot; 2025-05-29T15:21:37.444173Z node 1 :TX_COLUMNSHARD CRIT: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_deprecated_snapshot.cpp:30;tasks_for_rewrite=0; 2025-05-29T15:21:37.444182Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanDeprecatedSnapshot;id=17; 2025-05-29T15:21:37.444190Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV0ChunksMeta;id=RestoreV0ChunksMeta; 2025-05-29T15:21:37.444195Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=18;type=RestoreV0ChunksMeta; 2025-05-29T15:21:37.444274Z node 1 :TX_COLUMNSHARD INFO: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=12; 2025-05-29T15:21:37.444286Z node 1 :TX_COLUMNSHARD INFO: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=6; 2025-05-29T15:21:37.444295Z node 1 :TX_COLUMNSHARD INFO: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute ... mnShard::NPrivateEvents::NWrite::TEvWritePortionResult;tablet_id=9437184;event=TEvWritePortionResult;tablet_id=9437184;local_tx_no=21;method=execute;tx_info=TTxBlobsWritingFinished;tablet_id=9437184;tx_state=execute;fline=constructor_meta.cpp:75;memory_size=254;data_size=244;sum=4572;count=36;size_of_meta=144; 2025-05-29T15:21:42.855488Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:784: tablet_id=9437184;self_id=[1:139:2170];ev=NKikimr::NColumnShard::NPrivateEvents::NWrite::TEvWritePortionResult;tablet_id=9437184;event=TEvWritePortionResult;tablet_id=9437184;local_tx_no=21;method=execute;tx_info=TTxBlobsWritingFinished;tablet_id=9437184;tx_state=execute;fline=constructor_portion.cpp:40;memory_size=326;data_size=316;sum=5868;count=18;size_of_portion=216; 2025-05-29T15:21:42.855743Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: BlobManager on execute at tablet 9437184 Save Batch GenStep: 2:18 Blob count: 1 2025-05-29T15:21:42.855771Z node 1 :TX_COLUMNSHARD_WRITE DEBUG: log.cpp:784: tablet_id=9437184;self_id=[1:139:2170];ev=NKikimr::NColumnShard::NPrivateEvents::NWrite::TEvWritePortionResult;tablet_id=9437184;event=TEvWritePortionResult;tablet_id=9437184;local_tx_no=21;method=execute;tx_info=TTxBlobsWritingFinished;tablet_id=9437184;tx_state=execute;fline=manager.h:148;event=add_by_insert_id;id=18;operation_id=18; 2025-05-29T15:21:42.875046Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: BlobManager at tablet 9437184 Save Batch GenStep: 2:18 Blob count: 1 2025-05-29T15:21:42.875235Z node 1 :TX_COLUMNSHARD_WRITE DEBUG: log.cpp:784: tablet_id=9437184;self_id=[1:139:2170];ev=NKikimr::NColumnShard::NPrivateEvents::NWrite::TEvWritePortionResult;tablet_id=9437184;event=TEvWritePortionResult;fline=columnshard__write.cpp:112;writing_size=6330728;event=data_write_finished;writing_id=9f5ed4ea-3ca011f0-bd81fa3a-ed2cce88; 2025-05-29T15:21:42.875304Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:784: tablet_id=9437184;self_id=[1:139:2170];ev=NKikimr::NColumnShard::NPrivateEvents::NWrite::TEvWritePortionResult;tablet_id=9437184;event=TEvWritePortionResult;tablet_id=9437184;local_tx_no=22;method=execute;tx_info=TTxBlobsWritingFinished;tablet_id=9437184;tx_state=execute;fline=constructor_meta.cpp:54;memory_size=94;data_size=68;sum=1786;count=37; 2025-05-29T15:21:42.875322Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:784: tablet_id=9437184;self_id=[1:139:2170];ev=NKikimr::NColumnShard::NPrivateEvents::NWrite::TEvWritePortionResult;tablet_id=9437184;event=TEvWritePortionResult;tablet_id=9437184;local_tx_no=22;method=execute;tx_info=TTxBlobsWritingFinished;tablet_id=9437184;tx_state=execute;fline=constructor_meta.cpp:75;memory_size=254;data_size=244;sum=4826;count=38;size_of_meta=144; 2025-05-29T15:21:42.875336Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:784: tablet_id=9437184;self_id=[1:139:2170];ev=NKikimr::NColumnShard::NPrivateEvents::NWrite::TEvWritePortionResult;tablet_id=9437184;event=TEvWritePortionResult;tablet_id=9437184;local_tx_no=22;method=execute;tx_info=TTxBlobsWritingFinished;tablet_id=9437184;tx_state=execute;fline=constructor_portion.cpp:40;memory_size=326;data_size=316;sum=6194;count=19;size_of_portion=216; 2025-05-29T15:21:42.875572Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: BlobManager on execute at tablet 9437184 Save Batch GenStep: 2:19 Blob count: 1 2025-05-29T15:21:42.875598Z node 1 :TX_COLUMNSHARD_WRITE DEBUG: log.cpp:784: tablet_id=9437184;self_id=[1:139:2170];ev=NKikimr::NColumnShard::NPrivateEvents::NWrite::TEvWritePortionResult;tablet_id=9437184;event=TEvWritePortionResult;tablet_id=9437184;local_tx_no=22;method=execute;tx_info=TTxBlobsWritingFinished;tablet_id=9437184;tx_state=execute;fline=manager.h:148;event=add_by_insert_id;id=19;operation_id=19; 2025-05-29T15:21:42.887544Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: BlobManager at tablet 9437184 Save Batch GenStep: 2:19 Blob count: 1 2025-05-29T15:21:42.887736Z node 1 :TX_COLUMNSHARD_WRITE DEBUG: log.cpp:784: tablet_id=9437184;self_id=[1:139:2170];ev=NKikimr::NColumnShard::NPrivateEvents::NWrite::TEvWritePortionResult;tablet_id=9437184;event=TEvWritePortionResult;fline=columnshard__write.cpp:112;writing_size=6330728;event=data_write_finished;writing_id=9f759b9e-3ca011f0-bc3cdc66-895aa85e; 2025-05-29T15:21:42.887808Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:784: tablet_id=9437184;self_id=[1:139:2170];ev=NKikimr::NColumnShard::NPrivateEvents::NWrite::TEvWritePortionResult;tablet_id=9437184;event=TEvWritePortionResult;tablet_id=9437184;local_tx_no=23;method=execute;tx_info=TTxBlobsWritingFinished;tablet_id=9437184;tx_state=execute;fline=constructor_meta.cpp:54;memory_size=94;data_size=68;sum=1880;count=39; 2025-05-29T15:21:42.887830Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:784: tablet_id=9437184;self_id=[1:139:2170];ev=NKikimr::NColumnShard::NPrivateEvents::NWrite::TEvWritePortionResult;tablet_id=9437184;event=TEvWritePortionResult;tablet_id=9437184;local_tx_no=23;method=execute;tx_info=TTxBlobsWritingFinished;tablet_id=9437184;tx_state=execute;fline=constructor_meta.cpp:75;memory_size=254;data_size=244;sum=5080;count=40;size_of_meta=144; 2025-05-29T15:21:42.887843Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:784: tablet_id=9437184;self_id=[1:139:2170];ev=NKikimr::NColumnShard::NPrivateEvents::NWrite::TEvWritePortionResult;tablet_id=9437184;event=TEvWritePortionResult;tablet_id=9437184;local_tx_no=23;method=execute;tx_info=TTxBlobsWritingFinished;tablet_id=9437184;tx_state=execute;fline=constructor_portion.cpp:40;memory_size=326;data_size=316;sum=6520;count=20;size_of_portion=216; 2025-05-29T15:21:42.888080Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: BlobManager on execute at tablet 9437184 Save Batch GenStep: 2:20 Blob count: 1 2025-05-29T15:21:42.888107Z node 1 :TX_COLUMNSHARD_WRITE DEBUG: log.cpp:784: tablet_id=9437184;self_id=[1:139:2170];ev=NKikimr::NColumnShard::NPrivateEvents::NWrite::TEvWritePortionResult;tablet_id=9437184;event=TEvWritePortionResult;tablet_id=9437184;local_tx_no=23;method=execute;tx_info=TTxBlobsWritingFinished;tablet_id=9437184;tx_state=execute;fline=manager.h:148;event=add_by_insert_id;id=20;operation_id=20; 2025-05-29T15:21:42.899480Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: BlobManager at tablet 9437184 Save Batch GenStep: 2:20 Blob count: 1 2025-05-29T15:21:42.899675Z node 1 :TX_COLUMNSHARD_WRITE DEBUG: log.cpp:784: tablet_id=9437184;self_id=[1:139:2170];ev=NKikimr::NColumnShard::NPrivateEvents::NWrite::TEvWritePortionResult;tablet_id=9437184;event=TEvWritePortionResult;fline=columnshard__write.cpp:112;writing_size=6330728;event=data_write_finished;writing_id=9f8b9f2a-3ca011f0-96fe4d66-9195b965; 2025-05-29T15:21:42.899745Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:784: tablet_id=9437184;self_id=[1:139:2170];ev=NKikimr::NColumnShard::NPrivateEvents::NWrite::TEvWritePortionResult;tablet_id=9437184;event=TEvWritePortionResult;tablet_id=9437184;local_tx_no=24;method=execute;tx_info=TTxBlobsWritingFinished;tablet_id=9437184;tx_state=execute;fline=constructor_meta.cpp:54;memory_size=94;data_size=68;sum=1974;count=41; 2025-05-29T15:21:42.899768Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:784: tablet_id=9437184;self_id=[1:139:2170];ev=NKikimr::NColumnShard::NPrivateEvents::NWrite::TEvWritePortionResult;tablet_id=9437184;event=TEvWritePortionResult;tablet_id=9437184;local_tx_no=24;method=execute;tx_info=TTxBlobsWritingFinished;tablet_id=9437184;tx_state=execute;fline=constructor_meta.cpp:75;memory_size=254;data_size=244;sum=5334;count=42;size_of_meta=144; 2025-05-29T15:21:42.899781Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:784: tablet_id=9437184;self_id=[1:139:2170];ev=NKikimr::NColumnShard::NPrivateEvents::NWrite::TEvWritePortionResult;tablet_id=9437184;event=TEvWritePortionResult;tablet_id=9437184;local_tx_no=24;method=execute;tx_info=TTxBlobsWritingFinished;tablet_id=9437184;tx_state=execute;fline=constructor_portion.cpp:40;memory_size=326;data_size=316;sum=6846;count=21;size_of_portion=216; 2025-05-29T15:21:42.900028Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: BlobManager on execute at tablet 9437184 Save Batch GenStep: 2:21 Blob count: 1 2025-05-29T15:21:42.900054Z node 1 :TX_COLUMNSHARD_WRITE DEBUG: log.cpp:784: tablet_id=9437184;self_id=[1:139:2170];ev=NKikimr::NColumnShard::NPrivateEvents::NWrite::TEvWritePortionResult;tablet_id=9437184;event=TEvWritePortionResult;tablet_id=9437184;local_tx_no=24;method=execute;tx_info=TTxBlobsWritingFinished;tablet_id=9437184;tx_state=execute;fline=manager.h:148;event=add_by_insert_id;id=21;operation_id=21; 2025-05-29T15:21:42.911603Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: BlobManager at tablet 9437184 Save Batch GenStep: 2:21 Blob count: 1 2025-05-29T15:21:42.912644Z node 1 :TX_COLUMNSHARD_WRITE DEBUG: log.cpp:784: tablet_id=9437184;self_id=[1:139:2170];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=9437184;event=TEvWrite;fline=manager.cpp:210;event=register_operation;operation_id=22;last=22; 2025-05-29T15:21:42.912673Z node 1 :TX_COLUMNSHARD_WRITE DEBUG: log.cpp:784: tablet_id=9437184;self_id=[1:139:2170];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=9437184;event=TEvWrite;fline=write_queue.cpp:15;writing_size=6330728;operation_id=a03ac874-3ca011f0-96aecfd6-8300d7ce;in_flight=1;size_in_flight=6330728; 2025-05-29T15:21:43.080860Z node 1 :TX_COLUMNSHARD_WRITE DEBUG: log.cpp:784: tablet_id=9437184;self_id=[1:139:2170];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=9437184;event=TEvWrite;scope=TBuildBatchesTask::DoExecute;tablet_id=9437184;parent_id=[1:139:2170];write_id=22;table_id=1;fline=write_actor.cpp:24;event=actor_created;tablet_id=9437184;debug=size=8246112;count=1;actions=__DEFAULT,;waiting=1;; 2025-05-29T15:21:43.112447Z node 1 :TX_COLUMNSHARD_WRITE DEBUG: log.cpp:784: tablet_id=9437184;self_id=[1:139:2170];ev=NKikimr::NColumnShard::NPrivateEvents::NWrite::TEvWritePortionResult;tablet_id=9437184;event=TEvWritePortionResult;fline=columnshard__write.cpp:112;writing_size=6330728;event=data_write_finished;writing_id=a03ac874-3ca011f0-96aecfd6-8300d7ce; 2025-05-29T15:21:43.112553Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:784: tablet_id=9437184;self_id=[1:139:2170];ev=NKikimr::NColumnShard::NPrivateEvents::NWrite::TEvWritePortionResult;tablet_id=9437184;event=TEvWritePortionResult;tablet_id=9437184;local_tx_no=25;method=execute;tx_info=TTxBlobsWritingFinished;tablet_id=9437184;tx_state=execute;fline=constructor_meta.cpp:54;memory_size=94;data_size=68;sum=2068;count=43; 2025-05-29T15:21:43.112581Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:784: tablet_id=9437184;self_id=[1:139:2170];ev=NKikimr::NColumnShard::NPrivateEvents::NWrite::TEvWritePortionResult;tablet_id=9437184;event=TEvWritePortionResult;tablet_id=9437184;local_tx_no=25;method=execute;tx_info=TTxBlobsWritingFinished;tablet_id=9437184;tx_state=execute;fline=constructor_meta.cpp:75;memory_size=254;data_size=244;sum=5588;count=44;size_of_meta=144; 2025-05-29T15:21:43.112596Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:784: tablet_id=9437184;self_id=[1:139:2170];ev=NKikimr::NColumnShard::NPrivateEvents::NWrite::TEvWritePortionResult;tablet_id=9437184;event=TEvWritePortionResult;tablet_id=9437184;local_tx_no=25;method=execute;tx_info=TTxBlobsWritingFinished;tablet_id=9437184;tx_state=execute;fline=constructor_portion.cpp:40;memory_size=326;data_size=316;sum=7172;count=22;size_of_portion=216; 2025-05-29T15:21:43.112904Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: BlobManager on execute at tablet 9437184 Save Batch GenStep: 2:22 Blob count: 1 2025-05-29T15:21:43.112944Z node 1 :TX_COLUMNSHARD_WRITE DEBUG: log.cpp:784: tablet_id=9437184;self_id=[1:139:2170];ev=NKikimr::NColumnShard::NPrivateEvents::NWrite::TEvWritePortionResult;tablet_id=9437184;event=TEvWritePortionResult;tablet_id=9437184;local_tx_no=25;method=execute;tx_info=TTxBlobsWritingFinished;tablet_id=9437184;tx_state=execute;fline=manager.h:148;event=add_by_insert_id;id=22;operation_id=22; 2025-05-29T15:21:43.124742Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: BlobManager at tablet 9437184 Save Batch GenStep: 2:22 Blob count: 1 >> TSchemeShardExtSubDomainTest::CreateItemsInsideExtSubdomainAtGSSwithoutTSS |58.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/metadata/initializer/ut/unittest |58.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/metadata/initializer/ut/unittest |58.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/metadata/initializer/ut/unittest |58.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/metadata/initializer/ut/unittest >> TSchemeShardExtSubDomainTest::CreateAndWait >> TSchemeShardExtSubDomainTest::Fake [GOOD] >> TSchemeShardExtSubDomainTest::CreateWithOnlyDotsNotAllowed >> TSchemeShardExtSubDomainTest::CreateAndAlterAlterAddStoragePool-ExternalHive |58.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/metadata/initializer/ut/unittest >> TSchemeShardExtSubDomainTest::CreateAndAlterWithoutEnablingTx >> KqpScanArrowInChanels::JoinWithParams >> TSchemeShardExtSubDomainTest::CreateItemsInsideExtSubdomainAtGSSwithoutTSS [GOOD] >> TSchemeShardExtSubDomainTest::CreateWithExtraPathSymbolsAllowed-AlterDatabaseCreateHiveFirst-false >> TSchemeShardExtSubDomainTest::AlterWithPlainAlterSubdomain >> KqpScanArrowInChanels::AggregateWithFunction >> TSchemeShardExtSubDomainTest::NothingInsideGSS-AlterDatabaseCreateHiveFirst-true >> KqpOlapWrite::TestRemoveTableBeforeIndexation [GOOD] >> TSchemeShardExtSubDomainTest::AlterCantChangeExternalSchemeShard-AlterDatabaseCreateHiveFirst-false >> KqpScanArrowInChanels::AggregateCountStar >> TSchemeShardExtSubDomainTest::CreateWithOnlyDotsNotAllowed [GOOD] >> TSchemeShardExtSubDomainTest::NothingInsideGSS-AlterDatabaseCreateHiveFirst-false >> TSchemeShardExtSubDomainTest::CreateAndWait [GOOD] >> TSchemeShardExtSubDomainTest::CreateAndAlterWithoutEnablingTx-ExternalHive >> TSchemeShardExtSubDomainTest::Create >> TSchemeShardExtSubDomainTest::AlterTwiceAndWithPlainAlterSubdomain |58.2%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/sharding/ut/ydb-core-tx-sharding-ut |58.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/sharding/ut/ydb-core-tx-sharding-ut |58.2%| [LD] {RESULT} $(B)/ydb/core/tx/sharding/ut/ydb-core-tx-sharding-ut >> TColumnShardTestReadWrite::CompactionInGranule_PKString_Reboot >> TSchemeShardExtSubDomainTest::CreateAndAlterWithoutEnablingTx [GOOD] >> TSchemeShardExtSubDomainTest::CreateAndAlterWithoutEnablingTx-AlterDatabaseCreateHiveFirst >> TSchemeShardExtSubDomainTest::AlterWithPlainAlterSubdomain [GOOD] >> TSchemeShardExtSubDomainTest::AlterWithPlainAlterSubdomain-ExternalHive >> TSchemeShardExtSubDomainTest::CreateAndAlterAlterAddStoragePool-ExternalHive [GOOD] >> TSchemeShardExtSubDomainTest::CreateAndAlterAlterAddStoragePool-AlterDatabaseCreateHiveFirst-ExternalHive >> TSchemeShardExtSubDomainTest::NothingInsideGSS-AlterDatabaseCreateHiveFirst-true [GOOD] >> TSchemeShardExtSubDomainTest::SysViewProcessorSync-AlterDatabaseCreateHiveFirst-false >> TSchemeShardExtSubDomainTest::CreateAndAlterWithoutEnablingTx-ExternalHive [GOOD] >> TSchemeShardExtSubDomainTest::AlterCantChangeExternalSchemeShard-AlterDatabaseCreateHiveFirst-false [GOOD] >> TSchemeShardExtSubDomainTest::CreateAndAlterWithoutEnablingTx-AlterDatabaseCreateHiveFirst-ExternalHive >> KqpScanArrowFormat::AggregateNoColumn >> TSchemeShardExtSubDomainTest::NothingInsideGSS-AlterDatabaseCreateHiveFirst-false [GOOD] >> TSchemeShardExtSubDomainTest::Drop >> TSchemeShardExtSubDomainTest::AlterCantChangeExternalSchemeShard-AlterDatabaseCreateHiveFirst-true ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/olap/unittest >> KqpOlapWrite::TestRemoveTableBeforeIndexation [GOOD] Test command err: Trying to start YDB, gRPC: 3448, MsgBus: 5362 test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/0025d6/r3tmp/tmp6BCkcr/pdisk_1.dat 2025-05-29T15:21:33.450837Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-05-29T15:21:33.456431Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 3448, node 1 2025-05-29T15:21:33.486881Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:21:33.486892Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-05-29T15:21:33.486894Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-05-29T15:21:33.486928Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:5362 2025-05-29T15:21:33.518984Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:21:33.519009Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:21:33.523078Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:5362 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-29T15:21:33.609700Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:21:33.612603Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-05-29T15:21:33.616513Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnStore, opId: 281474976710658:0, at schemeshard: 72057594046644480 Status: 53 TxId: 281474976710658 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 2 2025-05-29T15:21:33.636985Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;self_id=[1:7509888157795215665:2314];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-05-29T15:21:33.641017Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;self_id=[1:7509888157795215665:2314];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-05-29T15:21:33.641083Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;self_id=[1:7509888157795215665:2314];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-05-29T15:21:33.641107Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;self_id=[1:7509888157795215665:2314];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-05-29T15:21:33.641128Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;self_id=[1:7509888157795215665:2314];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-05-29T15:21:33.641146Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;self_id=[1:7509888157795215665:2314];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-05-29T15:21:33.641162Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;self_id=[1:7509888157795215665:2314];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-05-29T15:21:33.641179Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;self_id=[1:7509888157795215665:2314];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-05-29T15:21:33.641196Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;self_id=[1:7509888157795215665:2314];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-05-29T15:21:33.641225Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;self_id=[1:7509888157795215665:2314];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-05-29T15:21:33.641246Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;self_id=[1:7509888157795215665:2314];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2025-05-29T15:21:33.641262Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;self_id=[1:7509888157795215665:2314];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2025-05-29T15:21:33.645159Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037890;self_id=[1:7509888157795215666:2315];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-05-29T15:21:33.645172Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037890;self_id=[1:7509888157795215666:2315];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-05-29T15:21:33.645211Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037890;self_id=[1:7509888157795215666:2315];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-05-29T15:21:33.645228Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037890;self_id=[1:7509888157795215666:2315];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-05-29T15:21:33.645246Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037890;self_id=[1:7509888157795215666:2315];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-05-29T15:21:33.645263Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037890;self_id=[1:7509888157795215666:2315];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-05-29T15:21:33.645280Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037890;self_id=[1:7509888157795215666:2315];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-05-29T15:21:33.645298Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037890;self_id=[1:7509888157795215666:2315];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-05-29T15:21:33.645315Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037890;self_id=[1:7509888157795215666:2315];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-05-29T15:21:33.645332Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037890;self_id=[1:7509888157795215666:2315];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-05-29T15:21:33.645348Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037890;self_id=[1:7509888157795215666:2315];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2025-05-29T15:21:33.645364Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037890;self_id=[1:7509888157795215666:2315];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2025-05-29T15:21:33.657451Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037891;self_id=[1:7509888157795215667:2316];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-05-29T15:21:33.657478Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037891;self_id=[1:7509888157795215667:2316];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-05-29T15:21:33.657525Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037891;self_id=[1:7509888157795215667:2316];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-05-29T15:21:33.657553Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037891;self_id=[1:7509888157795215667:2316];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-05-29T15:21:33.657590Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037891;self_id=[1:7509888157795215667:2316];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-05-29T15:21:33.657619Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037891;self_id=[1:7509888157795215667:2316];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-05-29T15:21:33.657636Z node 1 :TX_COLUMNSHARD WAR ... 97682Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:784: tablet_id=72075186224037890;self_id=[1:7509888157795215666:2315];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037890;fline=columnshard_impl.cpp:1060;background=cleanup;skip_reason=no_changes; 2025-05-29T15:21:43.697684Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:784: tablet_id=72075186224037890;self_id=[1:7509888157795215666:2315];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037890;fline=columnshard_impl.cpp:1092;background=cleanup;skip_reason=no_changes; 2025-05-29T15:21:43.697705Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:784: tablet_id=72075186224037890;self_id=[1:7509888157795215666:2315];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037890;fline=columnshard_impl.cpp:1001;background=ttl;skip_reason=no_changes; 2025-05-29T15:21:43.697723Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: Notified by mediator time cast with PlanStep# 1748532103000 at tablet 72075186224037890 2025-05-29T15:21:43.697727Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:784: tablet_id=72075186224037890;self_id=[1:7509888157795215666:2315];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037890;fline=columnshard_impl.cpp:515;event=EnqueueBackgroundActivities;periodic=1; 2025-05-29T15:21:43.697730Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:784: tablet_id=72075186224037890;self_id=[1:7509888157795215666:2315];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037890;fline=columnshard_impl.cpp:784;event=start_indexation_tasks;insert_overload_size=0; 2025-05-29T15:21:43.697733Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:784: tablet_id=72075186224037890;self_id=[1:7509888157795215666:2315];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037890;fline=column_engine_logs.cpp:246;event=StartCleanup;portions_count=0; 2025-05-29T15:21:43.697737Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:784: tablet_id=72075186224037890;self_id=[1:7509888157795215666:2315];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037890;fline=column_engine_logs.cpp:321;event=StartCleanup;portions_count=0;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2025-05-29T15:21:43.697739Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:784: tablet_id=72075186224037890;self_id=[1:7509888157795215666:2315];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037890;fline=columnshard_impl.cpp:1060;background=cleanup;skip_reason=no_changes; 2025-05-29T15:21:43.697741Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:784: tablet_id=72075186224037890;self_id=[1:7509888157795215666:2315];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037890;fline=columnshard_impl.cpp:1092;background=cleanup;skip_reason=no_changes; 2025-05-29T15:21:43.697747Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:784: tablet_id=72075186224037890;self_id=[1:7509888157795215666:2315];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037890;queue=ttl;external_count=0;fline=granule.cpp:164;event=skip_actualization;waiting=0.999950s; 2025-05-29T15:21:43.697750Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:784: tablet_id=72075186224037890;self_id=[1:7509888157795215666:2315];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037890;fline=columnshard_impl.cpp:1001;background=ttl;skip_reason=no_changes; 2025-05-29T15:21:43.697764Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:784: tablet_id=72075186224037889;self_id=[1:7509888157795215708:2322];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:254;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=72075186224037889; 2025-05-29T15:21:43.697785Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:784: tablet_id=72075186224037889;self_id=[1:7509888157795215708:2322];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;fline=columnshard.cpp:243;event=TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037889; 2025-05-29T15:21:43.697793Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: Send periodic stats. 2025-05-29T15:21:43.697798Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:784: tablet_id=72075186224037889;self_id=[1:7509888157795215708:2322];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037889;fline=columnshard_impl.cpp:515;event=EnqueueBackgroundActivities;periodic=0; 2025-05-29T15:21:43.697804Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:784: tablet_id=72075186224037889;self_id=[1:7509888157795215708:2322];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037889;fline=columnshard_impl.cpp:784;event=start_indexation_tasks;insert_overload_size=0; 2025-05-29T15:21:43.697812Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:784: tablet_id=72075186224037889;self_id=[1:7509888157795215708:2322];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037889;fline=column_engine_logs.cpp:246;event=StartCleanup;portions_count=0; 2025-05-29T15:21:43.697817Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:784: tablet_id=72075186224037889;self_id=[1:7509888157795215708:2322];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037889;fline=column_engine_logs.cpp:321;event=StartCleanup;portions_count=0;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2025-05-29T15:21:43.697819Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:784: tablet_id=72075186224037889;self_id=[1:7509888157795215708:2322];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037889;fline=columnshard_impl.cpp:1060;background=cleanup;skip_reason=no_changes; 2025-05-29T15:21:43.697821Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:784: tablet_id=72075186224037889;self_id=[1:7509888157795215708:2322];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037889;fline=columnshard_impl.cpp:1092;background=cleanup;skip_reason=no_changes; 2025-05-29T15:21:43.697830Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:784: tablet_id=72075186224037889;self_id=[1:7509888157795215708:2322];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037889;fline=columnshard_impl.cpp:1001;background=ttl;skip_reason=no_changes; 2025-05-29T15:21:43.697839Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: Notified by mediator time cast with PlanStep# 1748532103000 at tablet 72075186224037889 2025-05-29T15:21:43.697842Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:784: tablet_id=72075186224037889;self_id=[1:7509888157795215708:2322];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037889;fline=columnshard_impl.cpp:515;event=EnqueueBackgroundActivities;periodic=1; 2025-05-29T15:21:43.697846Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:784: tablet_id=72075186224037889;self_id=[1:7509888157795215708:2322];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037889;fline=columnshard_impl.cpp:784;event=start_indexation_tasks;insert_overload_size=0; 2025-05-29T15:21:43.697848Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:784: tablet_id=72075186224037889;self_id=[1:7509888157795215708:2322];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037889;fline=column_engine_logs.cpp:246;event=StartCleanup;portions_count=0; 2025-05-29T15:21:43.697852Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:784: tablet_id=72075186224037889;self_id=[1:7509888157795215708:2322];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037889;fline=column_engine_logs.cpp:321;event=StartCleanup;portions_count=0;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2025-05-29T15:21:43.697855Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:784: tablet_id=72075186224037889;self_id=[1:7509888157795215708:2322];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037889;fline=columnshard_impl.cpp:1060;background=cleanup;skip_reason=no_changes; 2025-05-29T15:21:43.697856Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:784: tablet_id=72075186224037889;self_id=[1:7509888157795215708:2322];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037889;fline=columnshard_impl.cpp:1092;background=cleanup;skip_reason=no_changes; 2025-05-29T15:21:43.697861Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:784: tablet_id=72075186224037889;self_id=[1:7509888157795215708:2322];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037889;queue=ttl;external_count=0;fline=granule.cpp:164;event=skip_actualization;waiting=0.999968s; 2025-05-29T15:21:43.697864Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:784: tablet_id=72075186224037889;self_id=[1:7509888157795215708:2322];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037889;fline=columnshard_impl.cpp:1001;background=ttl;skip_reason=no_changes; 2025-05-29T15:21:43.697873Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:784: tablet_id=72075186224037891;self_id=[1:7509888157795215667:2316];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:254;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=72075186224037891; 2025-05-29T15:21:43.697877Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:784: tablet_id=72075186224037891;self_id=[1:7509888157795215667:2316];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;fline=columnshard.cpp:243;event=TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037891; 2025-05-29T15:21:43.697883Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: Send periodic stats. 2025-05-29T15:21:43.697886Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:784: tablet_id=72075186224037891;self_id=[1:7509888157795215667:2316];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037891;fline=columnshard_impl.cpp:515;event=EnqueueBackgroundActivities;periodic=0; 2025-05-29T15:21:43.697891Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:784: tablet_id=72075186224037891;self_id=[1:7509888157795215667:2316];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037891;fline=columnshard_impl.cpp:521;problem=Background activities cannot be started: no index at tablet; 2025-05-29T15:21:43.697895Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: Notified by mediator time cast with PlanStep# 1748532103000 at tablet 72075186224037891 2025-05-29T15:21:43.697899Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:784: tablet_id=72075186224037891;self_id=[1:7509888157795215667:2316];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037891;fline=columnshard_impl.cpp:515;event=EnqueueBackgroundActivities;periodic=1; 2025-05-29T15:21:43.697901Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:784: tablet_id=72075186224037891;self_id=[1:7509888157795215667:2316];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037891;fline=columnshard_impl.cpp:521;problem=Background activities cannot be started: no index at tablet; 2025-05-29T15:21:43.719443Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:784: tablet_id=72075186224037891;parent=[1:7509888157795215667:2316];fline=actor.cpp:33;event=skip_flush_writing; 2025-05-29T15:21:43.719478Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:784: tablet_id=72075186224037889;parent=[1:7509888157795215708:2322];fline=actor.cpp:33;event=skip_flush_writing; 2025-05-29T15:21:44.190818Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:784: tablet_id=72075186224037888;parent=[1:7509888157795215665:2314];fline=actor.cpp:33;event=skip_flush_writing; 2025-05-29T15:21:44.190857Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:784: tablet_id=72075186224037890;parent=[1:7509888157795215666:2315];fline=actor.cpp:33;event=skip_flush_writing; 2025-05-29T15:21:44.222798Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:784: tablet_id=72075186224037891;parent=[1:7509888157795215667:2316];fline=actor.cpp:33;event=skip_flush_writing; 2025-05-29T15:21:44.222825Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:784: tablet_id=72075186224037889;parent=[1:7509888157795215708:2322];fline=actor.cpp:33;event=skip_flush_writing; |58.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/metadata/initializer/ut/unittest >> TSchemeShardExtSubDomainTest::CreateWithExtraPathSymbolsAllowed-AlterDatabaseCreateHiveFirst-false [GOOD] >> TSchemeShardExtSubDomainTest::CreateWithExtraPathSymbolsAllowed-AlterDatabaseCreateHiveFirst-true >> TSchemeShardExtSubDomainTest::AlterTwiceAndWithPlainAlterSubdomain [GOOD] >> TSchemeShardExtSubDomainTest::AlterCantChangeSetParams-AlterDatabaseCreateHiveFirst-false >> TSchemeShardExtSubDomainTest::Create [GOOD] >> TSchemeShardExtSubDomainTest::CreateAndAlter >> TSchemeShardExtSubDomainTest::CreateAndAlterAlterAddStoragePool-AlterDatabaseCreateHiveFirst-ExternalHive [GOOD] >> TSchemeShardExtSubDomainTest::CreateAndAlterAlterSameStoragePools |58.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/metadata/initializer/ut/unittest |58.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/metadata/initializer/ut/unittest >> TSchemeShardExtSubDomainTest::CreateAndAlterWithoutEnablingTx-AlterDatabaseCreateHiveFirst [GOOD] >> TSchemeShardExtSubDomainTest::CreateAndAlterWithExternalHive-AlterDatabaseCreateHiveFirst-false >> TSchemeShardExtSubDomainTest::AlterCantChangeExternalSchemeShard-AlterDatabaseCreateHiveFirst-true [GOOD] >> TSchemeShardExtSubDomainTest::AlterCantChangeExternalHive-AlterDatabaseCreateHiveFirst-false >> TSchemeShardExtSubDomainTest::AlterWithPlainAlterSubdomain-ExternalHive [GOOD] >> TSchemeShardExtSubDomainTest::AlterWithPlainAlterSubdomain-AlterDatabaseCreateHiveFirst >> TSchemeShardExtSubDomainTest::AlterCantChangeSetParams-AlterDatabaseCreateHiveFirst-false [GOOD] >> TSchemeShardExtSubDomainTest::AlterCantChangeSetParams-AlterDatabaseCreateHiveFirst-true >> TSchemeShardExtSubDomainTest::CreateAndAlterWithoutEnablingTx-AlterDatabaseCreateHiveFirst-ExternalHive [GOOD] >> TSchemeShardExtSubDomainTest::CreateAndSameAlterTwice >> TSchemeShardExtSubDomainTest::CreateAndAlterAlterSameStoragePools [GOOD] >> TSchemeShardExtSubDomainTest::CreateAndAlterAlterSameStoragePools-ExternalHive >> TSchemeShardExtSubDomainTest::CreateAndAlterWithExternalHive-AlterDatabaseCreateHiveFirst-false [GOOD] >> TSchemeShardExtSubDomainTest::CreateAndAlterWithExternalHive-AlterDatabaseCreateHiveFirst-true >> TSchemeShardExtSubDomainTest::AlterCantChangeExternalHive-AlterDatabaseCreateHiveFirst-false [GOOD] >> TSchemeShardExtSubDomainTest::AlterCantChangeExternalHive-AlterDatabaseCreateHiveFirst-true >> TSchemeShardExtSubDomainTest::Drop [GOOD] >> TSchemeShardExtSubDomainTest::Drop-ExternalHive |58.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/metadata/initializer/ut/unittest ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/olap/unittest >> KqpOlap::BlockChannelForce Test command err: Trying to start YDB, gRPC: 12048, MsgBus: 5841 2025-05-29T15:21:33.908000Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509888155540323876:2217];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:21:33.908074Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/0026b0/r3tmp/tmpH9KhCT/pdisk_1.dat 2025-05-29T15:21:34.037384Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [1:7509888155540323673:2079] 1748532093890195 != 1748532093890198 2025-05-29T15:21:34.039238Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 12048, node 1 2025-05-29T15:21:34.071194Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:21:34.071205Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-05-29T15:21:34.071207Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-05-29T15:21:34.071254Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-05-29T15:21:34.079662Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:21:34.079687Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:21:34.085405Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:5841 TClient is connected to server localhost:5841 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-29T15:21:34.246321Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:21:34.255131Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-05-29T15:21:34.263563Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:21:34.298314Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-05-29T15:21:34.350320Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:21:34.423927Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 2025-05-29T15:21:34.646557Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509888159835292608:2403], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:21:34.646588Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:21:34.743303Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-05-29T15:21:34.778199Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-05-29T15:21:34.788789Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-05-29T15:21:34.801904Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-05-29T15:21:34.816244Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-05-29T15:21:34.830630Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-05-29T15:21:34.847246Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-05-29T15:21:34.874902Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509888159835293262:2466], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:21:34.874923Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:21:34.875038Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509888159835293267:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:21:34.879780Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715669:3, at schemeshard: 72057594046644480 2025-05-29T15:21:34.883305Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7509888159835293269:2470], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715669 completed, doublechecking } 2025-05-29T15:21:34.938444Z node 1 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [1:7509888159835293329:3398] txid# 281474976715670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-29T15:21:35.059787Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [1:7509888159835293345:2475], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:21:35.059901Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=1&id=MmNkMTAzMGItMTAzMmNlYS0yNGRjMzY5Yi0xYTI5NDExOQ==, ActorId: [1:7509888159835292581:2400], ActorState: ExecuteState, TraceId: 01jwea4swp74bgyv56rm8j7x5f, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: VERIFY failed (2025-05-29T15:21:35.062826Z): assertion failed in non-unittest thread with message: assertion failed at ydb/core/kqp/ut/common/kqp_ut_common.h:375, void NKikimr::NKqp::AssertSuccessResult(const NYdb::TStatus &): (result.IsSuccess())
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 library/cpp/testing/unittest/registar.cpp:36 RaiseError(): requirement UnittestThread failed 0. /-S/util/system/yassert.cpp:83: InternalPanicImpl @ 0x13D61B95 1. /-S/util/system/yassert.cpp:55: Panic @ 0x13D58B96 2. /tmp//-S/library/cpp/testing/unittest/registar.cpp:36: RaiseError @ 0x13EFB326 3. /-S/ydb/core/kqp/ut/common/kqp_ut_common.h:375: AssertSuccessResult @ 0x2648BA32 4. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:365: CreateSampleTables @ 0x2648B332 5. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:581: operator() @ 0x264AA42C 6. /-S/library/cpp/threading/future/core/future-inl.h:493: SetValue @ 0x264AA42C 7. /-S/library/cpp/threading/future/async.h:24: operator() @ 0x264AA42C 8. /-S/util/thread/pool.h:71: Process @ 0x264AA42C 9. /-S/util/thread/pool.cpp:418: DoExecute @ 0x13D69519 10. /-S/util/thread/factory.h:15: Execute @ 0x13D67F09 11. /-S/util/thread/factory.cpp:36: ThreadProc @ 0x13D67F09 12. /-S/util/system/thread.cpp:244: ThreadProxy @ 0x13D6337C 13. ??:0: ?? @ 0x7FD10C00AAC2 14. ??:0: ?? @ 0x7FD10C09C84F Trying to start YDB, gRPC: 12668, MsgBus: 22084 2025-05-29T15:21:39.449189Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509888182052014189:2222];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:21:39.449234Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/0026b0/r3tmp/tmp7wN7Uu/pdisk_1.dat 2025-05-29T15:21:39.522980Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:21:39.523215Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [1:7509888182052014006:2079] 1748532099447412 != 1748532099447415 TServer::EnableGrpc on GrpcPort 12668, node 1 2025-05-29T15:21:39.538936Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:21:39.538952Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-05-29T15:21:39.538955Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-05-29T15:21:39.538997Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:22084 TClient is connected to server localhost:22084 WaitRootIsUp 'Root'... TClient::Ls request: Root 2025-05-29T15:21:39.599139Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:21:39.599171Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting TClient::Ls response: 2025-05-29T15:21:39.603094Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-29T15:21:39.639532Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:21:39.651086Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-05-29T15:21:39.668027Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:21:39.742482Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-05-29T15:21:39.767972Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 2025-05-29T15:21:39.830385Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:21:40.171409Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509888186346982949:2403], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:21:40.171443Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:21:40.264801Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-05-29T15:21:40.283039Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-05-29T15:21:40.298429Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-05-29T15:21:40.318963Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-05-29T15:21:40.337048Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-05-29T15:21:40.352221Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-05-29T15:21:40.363850Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-05-29T15:21:40.406823Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509888186346983602:2466], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:21:40.406870Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:21:40.407090Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509888186346983607:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:21:40.408315Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715669:3, at schemeshard: 72057594046644480 2025-05-29T15:21:40.412176Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976715669, at schemeshard: 72057594046644480 2025-05-29T15:21:40.412242Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7509888186346983609:2470], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715669 completed, doublechecking } 2025-05-29T15:21:40.475020Z node 1 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [1:7509888186346983669:3401] txid# 281474976715670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-29T15:21:40.663025Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [1:7509888186346983678:2475], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:21:40.663289Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=1&id=MjBlNDM5ZGItZGNmY2E3NTgtZjNjYTFlMDYtZDlmODBiNg==, ActorId: [1:7509888186346982922:2400], ActorState: ExecuteState, TraceId: 01jwea4z9gead31nw884ssbyfa, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: VERIFY failed (2025-05-29T15:21:40.664181Z): assertion failed in non-unittest thread with message: assertion failed at ydb/core/kqp/ut/common/kqp_ut_common.h:375, void NKikimr::NKqp::AssertSuccessResult(const NYdb::TStatus &): (result.IsSuccess())
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 library/cpp/testing/unittest/registar.cpp:36 RaiseError(): requirement UnittestThread failed 0. /-S/util/system/yassert.cpp:83: InternalPanicImpl @ 0x13D61B95 1. /-S/util/system/yassert.cpp:55: Panic @ 0x13D58B96 2. /tmp//-S/library/cpp/testing/unittest/registar.cpp:36: RaiseError @ 0x13EFB326 3. /-S/ydb/core/kqp/ut/common/kqp_ut_common.h:375: AssertSuccessResult @ 0x2648BA32 4. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:365: CreateSampleTables @ 0x2648B332 5. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:581: operator() @ 0x264AA42C 6. /-S/library/cpp/threading/future/core/future-inl.h:493: SetValue @ 0x264AA42C 7. /-S/library/cpp/threading/future/async.h:24: operator() @ 0x264AA42C 8. /-S/util/thread/pool.h:71: Process @ 0x264AA42C 9. /-S/util/thread/pool.cpp:418: DoExecute @ 0x13D69519 10. /-S/util/thread/factory.h:15: Execute @ 0x13D67F09 11. /-S/util/thread/factory.cpp:36: ThreadProc @ 0x13D67F09 12. /-S/util/system/thread.cpp:244: ThreadProxy @ 0x13D6337C 13. ??:0: ?? @ 0x7F0150CB5AC2 14. ??:0: ?? @ 0x7F0150D4784F |58.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/metadata/initializer/ut/unittest >> TSchemeShardExtSubDomainTest::AlterWithPlainAlterSubdomain-AlterDatabaseCreateHiveFirst [GOOD] >> TSchemeShardExtSubDomainTest::AlterWithPlainAlterSubdomain-AlterDatabaseCreateHiveFirst-ExternalHive >> TSchemeShardExtSubDomainTest::AlterCantChangeSetParams-AlterDatabaseCreateHiveFirst-true [GOOD] >> TSchemeShardExtSubDomainTest::AlterRequiresParamCombinations-AlterDatabaseCreateHiveFirst-false >> TSchemeShardExtSubDomainTest::CreateAndAlter [GOOD] >> TSchemeShardExtSubDomainTest::CreateAndAlter-ExternalHive >> KqpOlapAggregations::Aggregation_ResultL_FilterL_OrderL_Limit2 [GOOD] >> TSchemeShardExtSubDomainTest::SysViewProcessorSync-AlterDatabaseCreateHiveFirst-false [GOOD] >> TSchemeShardExtSubDomainTest::SysViewProcessorSync-AlterDatabaseCreateHiveFirst-true >> TPQCDTest::TestDiscoverClusters >> TPQCDTest::TestUnavailableWithoutClustersList >> TPQCDTest::TestUnavailableWithoutNetClassifier >> TPQCDTest::TestCloudClientsAreConsistentlyDistributed >> TSchemeShardExtSubDomainTest::CreateAndSameAlterTwice [GOOD] >> TSchemeShardExtSubDomainTest::CreateAndSameAlterTwice-ExternalHive >> TSchemeShardExtSubDomainTest::CreateWithExtraPathSymbolsAllowed-AlterDatabaseCreateHiveFirst-true [GOOD] >> TSchemeShardExtSubDomainTest::CreateNameConflicts-AlterDatabaseCreateHiveFirst-false >> TPQCDTest::TestPrioritizeLocalDatacenter |58.3%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/kqp/executer_actor/ut/ydb-core-kqp-executer_actor-ut |58.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/executer_actor/ut/ydb-core-kqp-executer_actor-ut |58.3%| [LD] {RESULT} $(B)/ydb/core/kqp/executer_actor/ut/ydb-core-kqp-executer_actor-ut >> TSchemeShardExtSubDomainTest::CreateAndAlterWithExternalHive-AlterDatabaseCreateHiveFirst-true [GOOD] >> TSchemeShardExtSubDomainTest::CreateAndAlterThenDropChangesParent-ExternalHive >> TSchemeShardExtSubDomainTest::AlterRequiresParamCombinations-AlterDatabaseCreateHiveFirst-false [GOOD] >> TSchemeShardExtSubDomainTest::AlterRequiresParamCombinations-AlterDatabaseCreateHiveFirst-true >> TSchemeShardExtSubDomainTest::CreateAndAlterAlterSameStoragePools-ExternalHive [GOOD] >> TSchemeShardExtSubDomainTest::CreateAndAlterAlterSameStoragePools-AlterDatabaseCreateHiveFirst >> TSchemeShardExtSubDomainTest::Drop-ExternalHive [GOOD] >> TSchemeShardExtSubDomainTest::Drop-AlterDatabaseCreateHiveFirst >> TSchemeShardExtSubDomainTest::AlterWithPlainAlterSubdomain-AlterDatabaseCreateHiveFirst-ExternalHive [GOOD] >> TSchemeShardExtSubDomainTest::AlterTwiceAndWithPlainAlterSubdomain-ExternalHive >> TSchemeShardExtSubDomainTest::AlterRequiresParamCombinations-AlterDatabaseCreateHiveFirst-true [GOOD] >> TSchemeShardExtSubDomainTest::AlterNameConflicts-AlterDatabaseCreateHiveFirst-false >> TSchemeShardExtSubDomainTest::AlterCantChangeExternalHive-AlterDatabaseCreateHiveFirst-true [GOOD] >> TSchemeShardExtSubDomainTest::AlterCantChangeExternalSysViewProcessor-AlterDatabaseCreateHiveFirst-false >> TSchemeShardExtSubDomainTest::CreateNameConflicts-AlterDatabaseCreateHiveFirst-false [GOOD] >> TSchemeShardExtSubDomainTest::CreateNameConflicts-AlterDatabaseCreateHiveFirst-true >> TSchemeShardExtSubDomainTest::CreateAndAlter-ExternalHive [GOOD] >> TSchemeShardExtSubDomainTest::CreateAndAlter-AlterDatabaseCreateHiveFirst >> TSchemeShardExtSubDomainTest::CreateAndSameAlterTwice-ExternalHive [GOOD] >> TSchemeShardExtSubDomainTest::CreateAndSameAlterTwice-AlterDatabaseCreateHiveFirst ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/olap/unittest >> KqpOlapAggregations::Aggregation_ResultL_FilterL_OrderL_Limit2 [GOOD] Test command err: Trying to start YDB, gRPC: 27185, MsgBus: 13846 2025-05-29T15:21:33.769333Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509888156270954356:2156];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/0026cd/r3tmp/tmpCaRMxk/pdisk_1.dat 2025-05-29T15:21:33.845414Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; TServer::EnableGrpc on GrpcPort 27185, node 1 2025-05-29T15:21:33.879098Z node 1 :GRPC_SERVER WARN: grpc_request_proxy.cpp:529: SchemeBoardDelete /Root Strong=0 2025-05-29T15:21:33.882931Z node 1 :GRPC_SERVER WARN: grpc_request_proxy.cpp:529: SchemeBoardDelete /Root Strong=0 2025-05-29T15:21:33.883100Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:21:33.883102Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-05-29T15:21:33.883104Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-05-29T15:21:33.883152Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-05-29T15:21:33.903223Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:21:33.939112Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:21:33.939139Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting TClient is connected to server localhost:13846 2025-05-29T15:21:33.945179Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:13846 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-29T15:21:34.075527Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:21:34.079219Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-05-29T15:21:34.096173Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnStore, opId: 281474976715658:0, at schemeshard: 72057594046644480 Status: 53 TxId: 281474976715658 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 2 2025-05-29T15:21:34.137009Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037889;self_id=[1:7509888160565922210:2314];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-05-29T15:21:34.137218Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037889;self_id=[1:7509888160565922210:2314];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-05-29T15:21:34.143127Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037889;self_id=[1:7509888160565922210:2314];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-05-29T15:21:34.143166Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037889;self_id=[1:7509888160565922210:2314];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-05-29T15:21:34.143200Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037889;self_id=[1:7509888160565922210:2314];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-05-29T15:21:34.143220Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037889;self_id=[1:7509888160565922210:2314];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-05-29T15:21:34.143237Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037889;self_id=[1:7509888160565922210:2314];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-05-29T15:21:34.143256Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037889;self_id=[1:7509888160565922210:2314];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-05-29T15:21:34.143280Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037889;self_id=[1:7509888160565922210:2314];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-05-29T15:21:34.143318Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037889;self_id=[1:7509888160565922210:2314];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-05-29T15:21:34.143341Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037889;self_id=[1:7509888160565922210:2314];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2025-05-29T15:21:34.143359Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037889;self_id=[1:7509888160565922210:2314];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2025-05-29T15:21:34.180307Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037891;self_id=[1:7509888160565922211:2315];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-05-29T15:21:34.180338Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037891;self_id=[1:7509888160565922211:2315];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-05-29T15:21:34.180422Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037891;self_id=[1:7509888160565922211:2315];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-05-29T15:21:34.180446Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037891;self_id=[1:7509888160565922211:2315];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-05-29T15:21:34.180474Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037891;self_id=[1:7509888160565922211:2315];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-05-29T15:21:34.180502Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037891;self_id=[1:7509888160565922211:2315];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-05-29T15:21:34.180522Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037891;self_id=[1:7509888160565922211:2315];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-05-29T15:21:34.180542Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037891;self_id=[1:7509888160565922211:2315];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-05-29T15:21:34.180564Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037891;self_id=[1:7509888160565922211:2315];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-05-29T15:21:34.180584Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037891;self_id=[1:7509888160565922211:2315];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-05-29T15:21:34.180606Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037891;self_id=[1:7509888160565922211:2315];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2025-05-29T15:21:34.180627Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037891;self_id=[1:7509888160565922211:2315];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2025-05-29T15:21:34.194111Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;self_id=[1:7509888160565922212:2316];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-05-29T15:21:34.198922Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;self_id=[1:7509888160565922212:2316];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-05-29T15:21:34.199023Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;self_id=[1:7509888160565922212:2316];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-05-29T15:21:34.199045Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;self_id=[1:7509888160565922212:2316];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-05-29T15:21:34.199068Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;self_id=[1:7509888160565922212:2316];tablet_id=72075186224037888;proces ... :21:42.288141Z node 2 :KQP_EXECUTER DEBUG: kqp_executer_impl.h:434: ActorId: [2:1768:2929] TxId: 281474976715662. Ctx: { TraceId: 01jwea4x7hd8g0pkf5hy2b4y0g, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=YzY3OGNkZmMtY2Y5ZTg2MTktYjJlZGVmOGQtOGRhODU3ZTc=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ActorState: ExecuteState, got execution state from compute actor: [2:1773:3032], task: 65, state: COMPUTE_STATE_EXECUTING, stats: { CpuTimeUs: 5649 Tasks { TaskId: 65 StageId: 1 CpuTimeUs: 361 FinishTimeMs: 1748532098856 InputRows: 2 InputBytes: 14 OutputRows: 2 OutputBytes: 14 ResultRows: 2 ResultBytes: 14 ComputeCpuTimeUs: 130 BuildCpuTimeUs: 231 HostName: "ghrun-lxxdcki4qu" NodeId: 2 CreateTimeMs: 1748532098785 CurrentWaitOutputTimeUs: 3431843 UpdateTimeMs: 1748532102288 } MaxMemoryUsage: 1048576 } 2025-05-29T15:21:42.288166Z node 2 :KQP_EXECUTER DEBUG: kqp_executer_impl.h:645: ActorId: [2:1768:2929] TxId: 281474976715662. Ctx: { TraceId: 01jwea4x7hd8g0pkf5hy2b4y0g, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=YzY3OGNkZmMtY2Y5ZTg2MTktYjJlZGVmOGQtOGRhODU3ZTc=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Waiting for: CA [2:1773:3032], 2025-05-29T15:21:42.519007Z node 3 :TX_COLUMNSHARD DEBUG: log.cpp:784: tablet_id=72075186224037888;self_id=[3:1279:2391];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:254;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=72075186224037888; 2025-05-29T15:21:42.519053Z node 3 :TX_COLUMNSHARD DEBUG: log.cpp:784: tablet_id=72075186224037888;parent=[3:1279:2391];fline=actor.cpp:33;event=skip_flush_writing; 2025-05-29T15:21:42.743045Z node 3 :TX_COLUMNSHARD DEBUG: log.cpp:784: tablet_id=72075186224037888;parent=[3:1279:2391];fline=actor.cpp:33;event=skip_flush_writing; 2025-05-29T15:21:42.806944Z node 2 :KQP_COMPUTE DEBUG: kqp_pure_compute_actor.cpp:148: SelfId: [2:1773:3032], TxId: 281474976715662, task: 65. Ctx: { TraceId : 01jwea4x7hd8g0pkf5hy2b4y0g. CustomerSuppliedId : . SessionId : ydb://session/3?node_id=2&id=YzY3OGNkZmMtY2Y5ZTg2MTktYjJlZGVmOGQtOGRhODU3ZTc=. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. CA StateFunc 65538 2025-05-29T15:21:42.806983Z node 2 :KQP_COMPUTE DEBUG: kqp_compute_scheduler.h:167: SelfId: [2:1773:3032], TxId: 281474976715662, task: 65. Ctx: { TraceId : 01jwea4x7hd8g0pkf5hy2b4y0g. CustomerSuppliedId : . SessionId : ydb://session/3?node_id=2&id=YzY3OGNkZmMtY2Y5ZTg2MTktYjJlZGVmOGQtOGRhODU3ZTc=. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. wakeup with tag 2 2025-05-29T15:21:42.807068Z node 2 :KQP_COMPUTE DEBUG: dq_compute_actor_impl.h:2021: SelfId: [2:1773:3032], TxId: 281474976715662, task: 65. Ctx: { TraceId : 01jwea4x7hd8g0pkf5hy2b4y0g. CustomerSuppliedId : . SessionId : ydb://session/3?node_id=2&id=YzY3OGNkZmMtY2Y5ZTg2MTktYjJlZGVmOGQtOGRhODU3ZTc=. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. Send stats to executor actor [2:1768:2929] TaskId: 65 Stats: CpuTimeUs: 5796 Tasks { TaskId: 65 StageId: 1 CpuTimeUs: 361 FinishTimeMs: 1748532098856 InputRows: 2 InputBytes: 14 OutputRows: 2 OutputBytes: 14 ResultRows: 2 ResultBytes: 14 ComputeCpuTimeUs: 130 BuildCpuTimeUs: 231 HostName: "ghrun-lxxdcki4qu" NodeId: 2 CreateTimeMs: 1748532098785 CurrentWaitOutputTimeUs: 3950835 UpdateTimeMs: 1748532102807 } MaxMemoryUsage: 1048576 2025-05-29T15:21:42.807128Z node 2 :KQP_EXECUTER DEBUG: kqp_executer_impl.h:434: ActorId: [2:1768:2929] TxId: 281474976715662. Ctx: { TraceId: 01jwea4x7hd8g0pkf5hy2b4y0g, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=YzY3OGNkZmMtY2Y5ZTg2MTktYjJlZGVmOGQtOGRhODU3ZTc=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ActorState: ExecuteState, got execution state from compute actor: [2:1773:3032], task: 65, state: COMPUTE_STATE_EXECUTING, stats: { CpuTimeUs: 5796 Tasks { TaskId: 65 StageId: 1 CpuTimeUs: 361 FinishTimeMs: 1748532098856 InputRows: 2 InputBytes: 14 OutputRows: 2 OutputBytes: 14 ResultRows: 2 ResultBytes: 14 ComputeCpuTimeUs: 130 BuildCpuTimeUs: 231 HostName: "ghrun-lxxdcki4qu" NodeId: 2 CreateTimeMs: 1748532098785 CurrentWaitOutputTimeUs: 3950835 UpdateTimeMs: 1748532102807 } MaxMemoryUsage: 1048576 } 2025-05-29T15:21:42.807157Z node 2 :KQP_EXECUTER DEBUG: kqp_executer_impl.h:645: ActorId: [2:1768:2929] TxId: 281474976715662. Ctx: { TraceId: 01jwea4x7hd8g0pkf5hy2b4y0g, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=YzY3OGNkZmMtY2Y5ZTg2MTktYjJlZGVmOGQtOGRhODU3ZTc=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Waiting for: CA [2:1773:3032], 2025-05-29T15:21:43.195717Z node 3 :TX_COLUMNSHARD DEBUG: log.cpp:784: tablet_id=72075186224037888;self_id=[3:1279:2391];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:254;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=72075186224037888; 2025-05-29T15:21:43.195772Z node 3 :TX_COLUMNSHARD DEBUG: log.cpp:784: tablet_id=72075186224037888;parent=[3:1279:2391];fline=actor.cpp:33;event=skip_flush_writing; 2025-05-29T15:21:43.542943Z node 3 :TX_COLUMNSHARD DEBUG: log.cpp:784: tablet_id=72075186224037888;parent=[3:1279:2391];fline=actor.cpp:33;event=skip_flush_writing; 2025-05-29T15:21:43.674417Z node 2 :KQP_COMPUTE DEBUG: kqp_pure_compute_actor.cpp:148: SelfId: [2:1773:3032], TxId: 281474976715662, task: 65. Ctx: { TraceId : 01jwea4x7hd8g0pkf5hy2b4y0g. CustomerSuppliedId : . SessionId : ydb://session/3?node_id=2&id=YzY3OGNkZmMtY2Y5ZTg2MTktYjJlZGVmOGQtOGRhODU3ZTc=. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. CA StateFunc 65538 2025-05-29T15:21:43.674451Z node 2 :KQP_COMPUTE DEBUG: kqp_compute_scheduler.h:167: SelfId: [2:1773:3032], TxId: 281474976715662, task: 65. Ctx: { TraceId : 01jwea4x7hd8g0pkf5hy2b4y0g. CustomerSuppliedId : . SessionId : ydb://session/3?node_id=2&id=YzY3OGNkZmMtY2Y5ZTg2MTktYjJlZGVmOGQtOGRhODU3ZTc=. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. wakeup with tag 2 2025-05-29T15:21:43.674556Z node 2 :KQP_COMPUTE DEBUG: dq_compute_actor_impl.h:2021: SelfId: [2:1773:3032], TxId: 281474976715662, task: 65. Ctx: { TraceId : 01jwea4x7hd8g0pkf5hy2b4y0g. CustomerSuppliedId : . SessionId : ydb://session/3?node_id=2&id=YzY3OGNkZmMtY2Y5ZTg2MTktYjJlZGVmOGQtOGRhODU3ZTc=. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. Send stats to executor actor [2:1768:2929] TaskId: 65 Stats: CpuTimeUs: 5953 Tasks { TaskId: 65 StageId: 1 CpuTimeUs: 361 FinishTimeMs: 1748532098856 InputRows: 2 InputBytes: 14 OutputRows: 2 OutputBytes: 14 ResultRows: 2 ResultBytes: 14 ComputeCpuTimeUs: 130 BuildCpuTimeUs: 231 HostName: "ghrun-lxxdcki4qu" NodeId: 2 CreateTimeMs: 1748532098785 CurrentWaitOutputTimeUs: 4818313 UpdateTimeMs: 1748532103674 } MaxMemoryUsage: 1048576 2025-05-29T15:21:43.674631Z node 2 :KQP_EXECUTER DEBUG: kqp_executer_impl.h:434: ActorId: [2:1768:2929] TxId: 281474976715662. Ctx: { TraceId: 01jwea4x7hd8g0pkf5hy2b4y0g, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=YzY3OGNkZmMtY2Y5ZTg2MTktYjJlZGVmOGQtOGRhODU3ZTc=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ActorState: ExecuteState, got execution state from compute actor: [2:1773:3032], task: 65, state: COMPUTE_STATE_EXECUTING, stats: { CpuTimeUs: 5953 Tasks { TaskId: 65 StageId: 1 CpuTimeUs: 361 FinishTimeMs: 1748532098856 InputRows: 2 InputBytes: 14 OutputRows: 2 OutputBytes: 14 ResultRows: 2 ResultBytes: 14 ComputeCpuTimeUs: 130 BuildCpuTimeUs: 231 HostName: "ghrun-lxxdcki4qu" NodeId: 2 CreateTimeMs: 1748532098785 CurrentWaitOutputTimeUs: 4818313 UpdateTimeMs: 1748532103674 } MaxMemoryUsage: 1048576 } 2025-05-29T15:21:43.674658Z node 2 :KQP_EXECUTER DEBUG: kqp_executer_impl.h:645: ActorId: [2:1768:2929] TxId: 281474976715662. Ctx: { TraceId: 01jwea4x7hd8g0pkf5hy2b4y0g, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=YzY3OGNkZmMtY2Y5ZTg2MTktYjJlZGVmOGQtOGRhODU3ZTc=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Waiting for: CA [2:1773:3032], 2025-05-29T15:21:44.099024Z node 3 :TX_COLUMNSHARD DEBUG: log.cpp:784: tablet_id=72075186224037888;self_id=[3:1279:2391];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:254;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=72075186224037888; 2025-05-29T15:21:44.099082Z node 3 :TX_COLUMNSHARD DEBUG: log.cpp:784: tablet_id=72075186224037888;parent=[3:1279:2391];fline=actor.cpp:33;event=skip_flush_writing; 2025-05-29T15:21:44.439055Z node 3 :TX_COLUMNSHARD DEBUG: log.cpp:784: tablet_id=72075186224037888;parent=[3:1279:2391];fline=actor.cpp:33;event=skip_flush_writing; 2025-05-29T15:21:44.518163Z node 2 :KQP_COMPUTE DEBUG: kqp_pure_compute_actor.cpp:148: SelfId: [2:1773:3032], TxId: 281474976715662, task: 65. Ctx: { TraceId : 01jwea4x7hd8g0pkf5hy2b4y0g. CustomerSuppliedId : . SessionId : ydb://session/3?node_id=2&id=YzY3OGNkZmMtY2Y5ZTg2MTktYjJlZGVmOGQtOGRhODU3ZTc=. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. CA StateFunc 65538 2025-05-29T15:21:44.518197Z node 2 :KQP_COMPUTE DEBUG: kqp_compute_scheduler.h:167: SelfId: [2:1773:3032], TxId: 281474976715662, task: 65. Ctx: { TraceId : 01jwea4x7hd8g0pkf5hy2b4y0g. CustomerSuppliedId : . SessionId : ydb://session/3?node_id=2&id=YzY3OGNkZmMtY2Y5ZTg2MTktYjJlZGVmOGQtOGRhODU3ZTc=. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. wakeup with tag 2 2025-05-29T15:21:44.518300Z node 2 :KQP_COMPUTE DEBUG: dq_compute_actor_impl.h:2021: SelfId: [2:1773:3032], TxId: 281474976715662, task: 65. Ctx: { TraceId : 01jwea4x7hd8g0pkf5hy2b4y0g. CustomerSuppliedId : . SessionId : ydb://session/3?node_id=2&id=YzY3OGNkZmMtY2Y5ZTg2MTktYjJlZGVmOGQtOGRhODU3ZTc=. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. Send stats to executor actor [2:1768:2929] TaskId: 65 Stats: CpuTimeUs: 6126 Tasks { TaskId: 65 StageId: 1 CpuTimeUs: 361 FinishTimeMs: 1748532098856 InputRows: 2 InputBytes: 14 OutputRows: 2 OutputBytes: 14 ResultRows: 2 ResultBytes: 14 ComputeCpuTimeUs: 130 BuildCpuTimeUs: 231 HostName: "ghrun-lxxdcki4qu" NodeId: 2 CreateTimeMs: 1748532098785 CurrentWaitOutputTimeUs: 5662058 UpdateTimeMs: 1748532104518 } MaxMemoryUsage: 1048576 2025-05-29T15:21:44.518371Z node 2 :KQP_EXECUTER DEBUG: kqp_executer_impl.h:434: ActorId: [2:1768:2929] TxId: 281474976715662. Ctx: { TraceId: 01jwea4x7hd8g0pkf5hy2b4y0g, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=YzY3OGNkZmMtY2Y5ZTg2MTktYjJlZGVmOGQtOGRhODU3ZTc=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ActorState: ExecuteState, got execution state from compute actor: [2:1773:3032], task: 65, state: COMPUTE_STATE_EXECUTING, stats: { CpuTimeUs: 6126 Tasks { TaskId: 65 StageId: 1 CpuTimeUs: 361 FinishTimeMs: 1748532098856 InputRows: 2 InputBytes: 14 OutputRows: 2 OutputBytes: 14 ResultRows: 2 ResultBytes: 14 ComputeCpuTimeUs: 130 BuildCpuTimeUs: 231 HostName: "ghrun-lxxdcki4qu" NodeId: 2 CreateTimeMs: 1748532098785 CurrentWaitOutputTimeUs: 5662058 UpdateTimeMs: 1748532104518 } MaxMemoryUsage: 1048576 } 2025-05-29T15:21:44.518399Z node 2 :KQP_EXECUTER DEBUG: kqp_executer_impl.h:645: ActorId: [2:1768:2929] TxId: 281474976715662. Ctx: { TraceId: 01jwea4x7hd8g0pkf5hy2b4y0g, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=YzY3OGNkZmMtY2Y5ZTg2MTktYjJlZGVmOGQtOGRhODU3ZTc=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Waiting for: CA [2:1773:3032], 2025-05-29T15:21:44.837626Z node 3 :TX_COLUMNSHARD DEBUG: log.cpp:784: tablet_id=72075186224037888;self_id=[3:1279:2391];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:254;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=72075186224037888; 2025-05-29T15:21:44.837672Z node 3 :TX_COLUMNSHARD DEBUG: log.cpp:784: tablet_id=72075186224037888;parent=[3:1279:2391];fline=actor.cpp:33;event=skip_flush_writing; >> TSchemeShardExtSubDomainTest::CreateAndAlterThenDropChangesParent-ExternalHive [GOOD] >> TSchemeShardExtSubDomainTest::CreateAndAlterThenDropChangesParent-AlterDatabaseCreateHiveFirst >> TSchemeShardExtSubDomainTest::AlterNameConflicts-AlterDatabaseCreateHiveFirst-false [GOOD] >> TSchemeShardExtSubDomainTest::AlterNameConflicts-AlterDatabaseCreateHiveFirst-true |58.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/persqueue_cluster_discovery/ut/unittest >> TSchemeShardExtSubDomainTest::Drop-AlterDatabaseCreateHiveFirst [GOOD] >> TSchemeShardExtSubDomainTest::Drop-AlterDatabaseCreateHiveFirst-ExternalHive >> TPQCDTest::TestRelatedServicesAreRunning >> TSchemeShardExtSubDomainTest::SysViewProcessorSync-AlterDatabaseCreateHiveFirst-true [GOOD] >> TSchemeShardExtSubDomainTest::StatisticsAggregatorSync-AlterDatabaseCreateHiveFirst-false >> TSchemeShardExtSubDomainTest::AlterCantChangeExternalSysViewProcessor-AlterDatabaseCreateHiveFirst-false [GOOD] >> TSchemeShardExtSubDomainTest::AlterCantChangeExternalSysViewProcessor-AlterDatabaseCreateHiveFirst-true >> TSchemeShardExtSubDomainTest::CreateAndAlterAlterSameStoragePools-AlterDatabaseCreateHiveFirst [GOOD] >> TSchemeShardExtSubDomainTest::CreateAndAlterAlterSameStoragePools-AlterDatabaseCreateHiveFirst-ExternalHive >> TPQCDTest::TestUnavailableWithoutBoth >> TSchemeShardExtSubDomainTest::CreateAndSameAlterTwice-AlterDatabaseCreateHiveFirst [GOOD] >> TSchemeShardExtSubDomainTest::CreateAndSameAlterTwice-AlterDatabaseCreateHiveFirst-ExternalHive >> TSchemeShardExtSubDomainTest::CreateNameConflicts-AlterDatabaseCreateHiveFirst-true [GOOD] >> TSchemeShardExtSubDomainTest::CreateThenDropChangesParent-AlterDatabaseCreateHiveFirst-false >> TSchemeShardExtSubDomainTest::CreateAndAlter-AlterDatabaseCreateHiveFirst [GOOD] >> TSchemeShardExtSubDomainTest::CreateAndAlter-AlterDatabaseCreateHiveFirst-ExternalHive >> TSchemeShardExtSubDomainTest::AlterTwiceAndWithPlainAlterSubdomain-ExternalHive [GOOD] >> TSchemeShardExtSubDomainTest::AlterTwiceAndWithPlainAlterSubdomain-AlterDatabaseCreateHiveFirst |58.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/persqueue_cluster_discovery/ut/unittest >> TSchemeShardExtSubDomainTest::AlterNameConflicts-AlterDatabaseCreateHiveFirst-true [GOOD] >> TSchemeShardExtSubDomainTest::CreateAndAlterThenDropChangesParent-AlterDatabaseCreateHiveFirst [GOOD] >> TSchemeShardExtSubDomainTest::CreateAndAlterThenDropChangesParent-AlterDatabaseCreateHiveFirst-ExternalHive >> TPQCDTest::TestCloudClientsAreConsistentlyDistributed [FAIL] >> TSchemeShardExtSubDomainTest::CreateAndSameAlterTwice-AlterDatabaseCreateHiveFirst-ExternalHive [GOOD] |58.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/persqueue_cluster_discovery/ut/unittest >> TSchemeShardExtSubDomainTest::CreateThenDropChangesParent-AlterDatabaseCreateHiveFirst-false [GOOD] >> TSchemeShardExtSubDomainTest::CreateThenDropChangesParent-AlterDatabaseCreateHiveFirst-true >> TPQCDTest::TestUnavailableWithoutNetClassifier [FAIL] >> TPQCDTest::TestDiscoverClusters [FAIL] >> TSchemeShardExtSubDomainTest::CreateAndAlterThenDropChangesParent-AlterDatabaseCreateHiveFirst-ExternalHive [GOOD] >> TSchemeShardExtSubDomainTest::Drop-AlterDatabaseCreateHiveFirst-ExternalHive [GOOD] |58.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/metadata/initializer/ut/unittest >> TSchemeShardExtSubDomainTest::AlterCantChangeExternalSysViewProcessor-AlterDatabaseCreateHiveFirst-true [GOOD] >> TSchemeShardExtSubDomainTest::AlterCantChangeExternalStatisticsAggregator-AlterDatabaseCreateHiveFirst-false >> TSchemeShardExtSubDomainTest::CreateAndAlterAlterSameStoragePools-AlterDatabaseCreateHiveFirst-ExternalHive [GOOD] >> TSchemeShardExtSubDomainTest::CreateAndAlterThenDropChangesParent >> TPQCDTest::TestPrioritizeLocalDatacenter [FAIL] >> TSchemeShardExtSubDomainTest::StatisticsAggregatorSync-AlterDatabaseCreateHiveFirst-false [GOOD] >> TSchemeShardExtSubDomainTest::StatisticsAggregatorSync-AlterDatabaseCreateHiveFirst-true >> TSchemeShardExtSubDomainTest::CreateAndAlter-AlterDatabaseCreateHiveFirst-ExternalHive [GOOD] >> TSchemeShardExtSubDomainTest::CreateAndAlterAlterAddStoragePool >> TSchemeShardExtSubDomainTest::AlterTwiceAndWithPlainAlterSubdomain-AlterDatabaseCreateHiveFirst [GOOD] >> TSchemeShardExtSubDomainTest::AlterTwiceAndWithPlainAlterSubdomain-AlterDatabaseCreateHiveFirst-ExternalHive >> TSchemeShardExtSubDomainTest::CreateThenDropChangesParent-AlterDatabaseCreateHiveFirst-true [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_extsubdomain/unittest >> TSchemeShardExtSubDomainTest::AlterNameConflicts-AlterDatabaseCreateHiveFirst-true [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:116:2058] recipient: [1:111:2141] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:116:2058] recipient: [1:111:2141] Leader for TabletID 72057594046678944 is [1:128:2152] sender: [1:132:2058] recipient: [1:111:2141] 2025-05-29T15:21:44.701456Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7488: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-05-29T15:21:44.701499Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7516: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:21:44.701505Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7402: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-05-29T15:21:44.701511Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7418: OperationsProcessing config: using default configuration 2025-05-29T15:21:44.701518Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-05-29T15:21:44.701522Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-05-29T15:21:44.701532Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7548: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:21:44.701547Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:39: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-05-29T15:21:44.701698Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7619: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-05-29T15:21:44.701789Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-05-29T15:21:44.716064Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7309: Cannot subscribe to console configs 2025-05-29T15:21:44.716088Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:21:44.719605Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-05-29T15:21:44.719743Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-05-29T15:21:44.719788Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-05-29T15:21:44.728290Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-05-29T15:21:44.730880Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:445: Clear TempDirsState with owners number: 0 2025-05-29T15:21:44.731026Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1348: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-05-29T15:21:44.731080Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:32: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-05-29T15:21:44.731776Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:157: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:21:44.731815Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:84: [RootDataErasureManager] Stop 2025-05-29T15:21:44.732060Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:21:44.732069Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:21:44.732086Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-05-29T15:21:44.732097Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-29T15:21:44.732103Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:202: TTxServerlessStorageBilling.Complete 2025-05-29T15:21:44.732135Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6671: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-05-29T15:21:44.734722Z node 1 :HIVE INFO: tablet_helpers.cpp:1130: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:128:2152] sender: [1:242:2058] recipient: [1:15:2062] 2025-05-29T15:21:44.755842Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:372: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-05-29T15:21:44.755911Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:21:44.755972Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-05-29T15:21:44.756023Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:130: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-05-29T15:21:44.756034Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:21:44.756914Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:451: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-05-29T15:21:44.756941Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-05-29T15:21:44.756999Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:21:44.757009Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:313: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-05-29T15:21:44.757015Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:367: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-05-29T15:21:44.757021Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 2 -> 3 2025-05-29T15:21:44.757520Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:21:44.757534Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-05-29T15:21:44.757540Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 3 -> 128 2025-05-29T15:21:44.758149Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:21:44.758177Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:21:44.758189Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:21:44.758199Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1650: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-05-29T15:21:44.759004Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1719: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-05-29T15:21:44.764446Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:652: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-05-29T15:21:44.764551Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1751: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-05-29T15:21:44.764820Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:676: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-05-29T15:21:44.764880Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:680: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 124 RawX2: 4294969445 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-05-29T15:21:44.764893Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:21:44.764998Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 128 -> 240 2025-05-29T15:21:44.765009Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:21:44.765051Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-05-29T15:21:44.765066Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:402: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-05-29T15:21:44.765872Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:21:44.765887Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-29T15:21:44.765947Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29 ... 2057594046678944 2025-05-29T15:21:46.706809Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-05-29T15:21:46.706815Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 3 -> 128 2025-05-29T15:21:46.707179Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:21:46.707197Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:21:46.707206Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:21:46.707214Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1650: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-05-29T15:21:46.707249Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1719: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-05-29T15:21:46.707574Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:652: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-05-29T15:21:46.707615Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1751: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-05-29T15:21:46.707812Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:676: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-05-29T15:21:46.707837Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:680: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 30064773226 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-05-29T15:21:46.707845Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:21:46.707911Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 128 -> 240 2025-05-29T15:21:46.707919Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:21:46.707952Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-05-29T15:21:46.707965Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:402: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-05-29T15:21:46.708365Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:21:46.708376Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-29T15:21:46.708424Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:21:46.708431Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [7:210:2211], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-05-29T15:21:46.708518Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:21:46.708527Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:482: [72057594046678944] TDone opId# 1:0 ProgressState 2025-05-29T15:21:46.708540Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:906: Part operation is done id#1:0 progress is 1/1 2025-05-29T15:21:46.708545Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-05-29T15:21:46.708552Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:906: Part operation is done id#1:0 progress is 1/1 2025-05-29T15:21:46.708555Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-05-29T15:21:46.708560Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1606: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-05-29T15:21:46.708565Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-05-29T15:21:46.708570Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:973: Operation and all the parts is done, operation id: 1:0 2025-05-29T15:21:46.708575Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5174: RemoveTx for txid 1:0 2025-05-29T15:21:46.708588Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-05-29T15:21:46.708594Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:982: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-05-29T15:21:46.708599Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:989: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-05-29T15:21:46.708691Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:5834: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-05-29T15:21:46.708705Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-05-29T15:21:46.708712Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2025-05-29T15:21:46.708718Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2025-05-29T15:21:46.708723Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-05-29T15:21:46.708736Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 1, subscribers: 0 2025-05-29T15:21:46.709332Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1 2025-05-29T15:21:46.709434Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 1, at schemeshard: 72057594046678944 TestModificationResults wait txId: 101 2025-05-29T15:21:46.709647Z node 7 :TX_PROXY DEBUG: proxy_impl.cpp:434: actor# [7:273:2263] Bootstrap 2025-05-29T15:21:46.711646Z node 7 :TX_PROXY DEBUG: proxy_impl.cpp:453: actor# [7:273:2263] Become StateWork (SchemeCache [7:278:2268]) 2025-05-29T15:21:46.712447Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:372: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpAlterExtSubDomain SubDomain { PlanResolution: 50 Coordinators: 1 Mediators: 1 Name: "USER_1" ExternalSchemeShard: true } } TxId: 101 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-05-29T15:21:46.712491Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_alter_extsubdomain.cpp:1102: [72057594046678944] CreateCompatibleAlterExtSubDomain, opId 101:0, feature flag EnableAlterDatabaseCreateHiveFirst 1, tx WorkingDir: "/MyRoot" OperationType: ESchemeOpAlterExtSubDomain SubDomain { PlanResolution: 50 Coordinators: 1 Mediators: 1 Name: "USER_1" ExternalSchemeShard: true } 2025-05-29T15:21:46.712498Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_alter_extsubdomain.cpp:1108: [72057594046678944] CreateCompatibleAlterExtSubDomain, opId 101:0, path /MyRoot/USER_1 2025-05-29T15:21:46.712533Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_just_reject.cpp:47: TReject Propose, opId: 101:0, explain: Invalid AlterExtSubDomain request: Check failed: path: '/MyRoot/USER_1', error: path hasn't been resolved, nearest resolved path: '/MyRoot' (id: [OwnerId: 72057594046678944, LocalPathId: 1]), source_location: ydb/core/tx/schemeshard/schemeshard__operation_alter_extsubdomain.cpp:1125, at schemeshard: 72057594046678944 2025-05-29T15:21:46.712543Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:130: IgniteOperation, opId: 101:1, propose status:StatusPathDoesNotExist, reason: Invalid AlterExtSubDomain request: Check failed: path: '/MyRoot/USER_1', error: path hasn't been resolved, nearest resolved path: '/MyRoot' (id: [OwnerId: 72057594046678944, LocalPathId: 1]), source_location: ydb/core/tx/schemeshard/schemeshard__operation_alter_extsubdomain.cpp:1125, at schemeshard: 72057594046678944 2025-05-29T15:21:46.712895Z node 7 :TX_PROXY DEBUG: proxy_impl.cpp:213: actor# [7:273:2263] HANDLE TEvClientConnected success connect from tablet# 72057594046447617 2025-05-29T15:21:46.713495Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:451: TTxOperationPropose Complete, txId: 101, response: Status: StatusPathDoesNotExist Reason: "Invalid AlterExtSubDomain request: Check failed: path: \'/MyRoot/USER_1\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1]), source_location: ydb/core/tx/schemeshard/schemeshard__operation_alter_extsubdomain.cpp:1125" TxId: 101 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2025-05-29T15:21:46.713532Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 101, database: /MyRoot, subject: , status: StatusPathDoesNotExist, reason: Invalid AlterExtSubDomain request: Check failed: path: '/MyRoot/USER_1', error: path hasn't been resolved, nearest resolved path: '/MyRoot' (id: [OwnerId: 72057594046678944, LocalPathId: 1]), source_location: ydb/core/tx/schemeshard/schemeshard__operation_alter_extsubdomain.cpp:1125, operation: ALTER DATABASE, path: /MyRoot/USER_1 2025-05-29T15:21:46.713658Z node 7 :TX_PROXY DEBUG: client.cpp:89: Handle TEvAllocateResult ACCEPTED RangeBegin# 281474976715656 RangeEnd# 281474976720656 txAllocator# 72057594046447617 TestModificationResult got TxId: 101, wait until txId: 101 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_extsubdomain/unittest >> TSchemeShardExtSubDomainTest::CreateAndSameAlterTwice-AlterDatabaseCreateHiveFirst-ExternalHive [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:116:2058] recipient: [1:111:2141] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:116:2058] recipient: [1:111:2141] Leader for TabletID 72057594046678944 is [1:128:2152] sender: [1:132:2058] recipient: [1:111:2141] 2025-05-29T15:21:44.398481Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7488: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-05-29T15:21:44.398510Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7516: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:21:44.398517Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7402: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-05-29T15:21:44.398524Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7418: OperationsProcessing config: using default configuration 2025-05-29T15:21:44.398530Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-05-29T15:21:44.398534Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-05-29T15:21:44.398544Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7548: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:21:44.398559Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:39: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-05-29T15:21:44.398690Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7619: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-05-29T15:21:44.398800Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-05-29T15:21:44.414155Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7309: Cannot subscribe to console configs 2025-05-29T15:21:44.414180Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:21:44.416879Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-05-29T15:21:44.416996Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-05-29T15:21:44.417035Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-05-29T15:21:44.418429Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-05-29T15:21:44.418572Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:445: Clear TempDirsState with owners number: 0 2025-05-29T15:21:44.418688Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1348: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-05-29T15:21:44.418754Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:32: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-05-29T15:21:44.419238Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:157: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:21:44.419281Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:84: [RootDataErasureManager] Stop 2025-05-29T15:21:44.419548Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:21:44.419559Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:21:44.419577Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-05-29T15:21:44.419587Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-29T15:21:44.419593Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:202: TTxServerlessStorageBilling.Complete 2025-05-29T15:21:44.419629Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6671: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-05-29T15:21:44.421106Z node 1 :HIVE INFO: tablet_helpers.cpp:1130: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:128:2152] sender: [1:242:2058] recipient: [1:15:2062] 2025-05-29T15:21:44.443931Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:372: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-05-29T15:21:44.444009Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:21:44.444078Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-05-29T15:21:44.444131Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:130: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-05-29T15:21:44.444143Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:21:44.445003Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:451: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-05-29T15:21:44.445036Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-05-29T15:21:44.445095Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:21:44.445106Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:313: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-05-29T15:21:44.445112Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:367: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-05-29T15:21:44.445118Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 2 -> 3 2025-05-29T15:21:44.445593Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:21:44.445606Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-05-29T15:21:44.445611Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 3 -> 128 2025-05-29T15:21:44.445985Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:21:44.445996Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:21:44.446005Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:21:44.446012Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1650: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-05-29T15:21:44.446730Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1719: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-05-29T15:21:44.447132Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:652: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-05-29T15:21:44.447175Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1751: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-05-29T15:21:44.447366Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:676: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-05-29T15:21:44.447392Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:680: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 124 RawX2: 4294969445 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-05-29T15:21:44.447399Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:21:44.447466Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 128 -> 240 2025-05-29T15:21:44.447473Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:21:44.447509Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-05-29T15:21:44.447521Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:402: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-05-29T15:21:44.447934Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:21:44.447943Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-29T15:21:44.447991Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29 ... ersion: 6 2025-05-29T15:21:46.760630Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 7 2025-05-29T15:21:46.760643Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1606: TOperation IsReadyToNotify, TxId: 103, ready parts: 0/1, is published: true 2025-05-29T15:21:46.761148Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5809: Handle TEvSyncTenantSchemeShard, at schemeshard: 72057594046678944, msg: DomainSchemeShard: 72057594046678944 DomainPathId: 2 TabletID: 72075186234409546 Generation: 2 EffectiveACLVersion: 0 SubdomainVersion: 4 UserAttributesVersion: 1 TenantHive: 72075186233409546 TenantSysViewProcessor: 18446744073709551615 TenantRootACL: "" TenantStatisticsAggregator: 18446744073709551615 TenantGraphShard: 18446744073709551615 2025-05-29T15:21:46.761166Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__sync_update_tenants.cpp:26: TTxSyncTenant DoExecute, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2025-05-29T15:21:46.761183Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:567: DoUpdateTenant no hasChanges, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], tenantLink: TSubDomainsLinks::TLink { DomainKey: [OwnerId: 72057594046678944, LocalPathId: 2], Generation: 2, ActorId:[7:399:2369], EffectiveACLVersion: 0, SubdomainVersion: 4, UserAttributesVersion: 1, TenantHive: 72075186233409546, TenantSysViewProcessor: 18446744073709551615, TenantStatisticsAggregator: 18446744073709551615, TenantGraphShard: 18446744073709551615, TenantRootACL: }, subDomain->GetVersion(): 4, actualEffectiveACLVersion: 0, actualUserAttrsVersion: 1, tenantHive: 72075186233409546, tenantSysViewProcessor: 18446744073709551615, at schemeshard: 72057594046678944 2025-05-29T15:21:46.761196Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72075186234409546 2025-05-29T15:21:46.761201Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72075186234409546, txId: 0, path id: [OwnerId: 72075186234409546, LocalPathId: 1] 2025-05-29T15:21:46.761223Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72075186234409546 2025-05-29T15:21:46.761229Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [7:496:2438], at schemeshard: 72075186234409546, txId: 0, path id: 1 2025-05-29T15:21:46.761504Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:5834: Handle TEvUpdateAck, at schemeshard: 72075186234409546, msg: Owner: 72075186234409546 Generation: 2 LocalPathId: 1 Version: 6 PathOwnerId: 72075186234409546, cookie: 0 2025-05-29T15:21:46.761598Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 103:0, at schemeshard: 72057594046678944 2025-05-29T15:21:46.761608Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:482: [72057594046678944] TDone opId# 103:0 ProgressState 2025-05-29T15:21:46.761621Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:906: Part operation is done id#103:0 progress is 1/1 2025-05-29T15:21:46.761626Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2025-05-29T15:21:46.761631Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:906: Part operation is done id#103:0 progress is 1/1 2025-05-29T15:21:46.761634Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2025-05-29T15:21:46.761638Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1606: TOperation IsReadyToNotify, TxId: 103, ready parts: 1/1, is published: true 2025-05-29T15:21:46.761644Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2025-05-29T15:21:46.761648Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:973: Operation and all the parts is done, operation id: 103:0 2025-05-29T15:21:46.761653Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5174: RemoveTx for txid 103:0 2025-05-29T15:21:46.761667Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 6 2025-05-29T15:21:46.761728Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 103 2025-05-29T15:21:46.761738Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__sync_update_tenants.cpp:36: TTxSyncTenant DoComplete, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 TestModificationResult got TxId: 103, wait until txId: 103 TestWaitNotification wait txId: 103 2025-05-29T15:21:46.762063Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:210: tests -- TTxNotificationSubscriber for txId 103: send EvNotifyTxCompletion 2025-05-29T15:21:46.762073Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:256: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 103 2025-05-29T15:21:46.762139Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 103, at schemeshard: 72057594046678944 2025-05-29T15:21:46.762159Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:227: tests -- TTxNotificationSubscriber for txId 103: got EvNotifyTxCompletionResult 2025-05-29T15:21:46.762164Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:236: tests -- TTxNotificationSubscriber for txId 103: satisfy waiter [7:576:2516] TestWaitNotification: OK eventTxId 103 2025-05-29T15:21:46.762237Z node 7 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-05-29T15:21:46.762265Z node 7 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/USER_0" took 37us result status StatusSuccess 2025-05-29T15:21:46.762346Z node 7 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/USER_0" PathDescription { Self { Name: "USER_0" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeExtSubDomain CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 6 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 6 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 4 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 4 PlanResolution: 50 Coordinators: 72075186234409547 TimeCastBucketsPerMediator: 2 Mediators: 72075186234409548 SchemeShard: 72075186234409546 Hive: 72075186233409546 } DomainKey { SchemeShard: 72057594046678944 PathId: 2 } StoragePools { Name: "pool-1" Kind: "hdd" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 4 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 2 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { } } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-05-29T15:21:46.762400Z node 7 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-05-29T15:21:46.762413Z node 7 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/USER_0" took 15us result status StatusSuccess 2025-05-29T15:21:46.762453Z node 7 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/USER_0" PathDescription { Self { Name: "USER_0" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeExtSubDomain CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 6 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 6 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 4 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 4 PlanResolution: 50 Coordinators: 72075186234409547 TimeCastBucketsPerMediator: 2 Mediators: 72075186234409548 SchemeShard: 72075186234409546 Hive: 72075186233409546 } DomainKey { SchemeShard: 72057594046678944 PathId: 2 } StoragePools { Name: "pool-1" Kind: "hdd" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 4 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 2 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { } } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-05-29T15:21:46.762502Z node 7 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72075186234409546 2025-05-29T15:21:46.762516Z node 7 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72075186234409546 describe path "/MyRoot/USER_0" took 16us result status StatusSuccess 2025-05-29T15:21:46.762555Z node 7 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/USER_0" PathDescription { Self { Name: "MyRoot/USER_0" PathId: 1 SchemeshardId: 72075186234409546 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 6 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 6 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 4 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 2 ProcessingParams { Version: 4 PlanResolution: 50 Coordinators: 72075186234409547 TimeCastBucketsPerMediator: 2 Mediators: 72075186234409548 SchemeShard: 72075186234409546 Hive: 72075186233409546 } DomainKey { SchemeShard: 72057594046678944 PathId: 2 } StoragePools { Name: "pool-1" Kind: "hdd" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 4 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 2 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/MyRoot/USER_0" } } } PathId: 1 PathOwnerId: 72075186234409546, at schemeshard: 72075186234409546 |58.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/metadata/initializer/ut/unittest >> TSchemeShardExtSubDomainTest::CreateAndAlterThenDropChangesParent [GOOD] >> TSchemeShardExtSubDomainTest::AlterTwiceAndWithPlainAlterSubdomain-AlterDatabaseCreateHiveFirst-ExternalHive [GOOD] >> TSchemeShardExtSubDomainTest::CreateAndAlterAlterAddStoragePool [GOOD] >> TSchemeShardExtSubDomainTest::CreateAndAlterAlterAddStoragePool-AlterDatabaseCreateHiveFirst ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_extsubdomain/unittest >> TSchemeShardExtSubDomainTest::CreateAndAlterThenDropChangesParent-AlterDatabaseCreateHiveFirst-ExternalHive [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:116:2058] recipient: [1:111:2141] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:116:2058] recipient: [1:111:2141] Leader for TabletID 72057594046678944 is [1:128:2152] sender: [1:132:2058] recipient: [1:111:2141] 2025-05-29T15:21:44.471218Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7488: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-05-29T15:21:44.471244Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7516: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:21:44.471250Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7402: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-05-29T15:21:44.471255Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7418: OperationsProcessing config: using default configuration 2025-05-29T15:21:44.471266Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-05-29T15:21:44.471271Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-05-29T15:21:44.471280Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7548: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:21:44.471294Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:39: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-05-29T15:21:44.471395Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7619: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-05-29T15:21:44.471461Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-05-29T15:21:44.495556Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7309: Cannot subscribe to console configs 2025-05-29T15:21:44.495582Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:21:44.502660Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-05-29T15:21:44.502809Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-05-29T15:21:44.502851Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-05-29T15:21:44.504332Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-05-29T15:21:44.504464Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:445: Clear TempDirsState with owners number: 0 2025-05-29T15:21:44.504567Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1348: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-05-29T15:21:44.504610Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:32: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-05-29T15:21:44.505033Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:157: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:21:44.505071Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:84: [RootDataErasureManager] Stop 2025-05-29T15:21:44.505316Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:21:44.505326Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:21:44.505343Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-05-29T15:21:44.505353Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-29T15:21:44.505359Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:202: TTxServerlessStorageBilling.Complete 2025-05-29T15:21:44.505391Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6671: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-05-29T15:21:44.509514Z node 1 :HIVE INFO: tablet_helpers.cpp:1130: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:128:2152] sender: [1:242:2058] recipient: [1:15:2062] 2025-05-29T15:21:44.534799Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:372: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-05-29T15:21:44.534860Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:21:44.534910Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-05-29T15:21:44.534961Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:130: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-05-29T15:21:44.534973Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:21:44.535536Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:451: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-05-29T15:21:44.535561Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-05-29T15:21:44.535609Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:21:44.535619Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:313: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-05-29T15:21:44.535625Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:367: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-05-29T15:21:44.535631Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 2 -> 3 2025-05-29T15:21:44.536017Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:21:44.536030Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-05-29T15:21:44.536035Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 3 -> 128 2025-05-29T15:21:44.536383Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:21:44.536393Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:21:44.536402Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:21:44.536410Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1650: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-05-29T15:21:44.537051Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1719: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-05-29T15:21:44.537432Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:652: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-05-29T15:21:44.537469Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1751: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-05-29T15:21:44.537661Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:676: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-05-29T15:21:44.537685Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:680: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 124 RawX2: 4294969445 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-05-29T15:21:44.537692Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:21:44.537744Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 128 -> 240 2025-05-29T15:21:44.537751Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:21:44.537781Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-05-29T15:21:44.537792Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:402: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-05-29T15:21:44.538194Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:21:44.538203Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-29T15:21:44.538241Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29 ... ion: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 103 2025-05-29T15:21:46.981256Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72057594046678944, txId: 103 2025-05-29T15:21:46.981260Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 103, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 18446744073709551615 2025-05-29T15:21:46.981265Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 7 2025-05-29T15:21:46.981277Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1606: TOperation IsReadyToNotify, TxId: 103, ready parts: 0/1, is published: true 2025-05-29T15:21:46.981717Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:46: Free shard 72057594046678944:1 hive 72075186233409546 at ss 72057594046678944 2025-05-29T15:21:46.981730Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:46: Free shard 72057594046678944:3 hive 72075186233409546 at ss 72057594046678944 2025-05-29T15:21:46.981735Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:46: Free shard 72057594046678944:2 hive 72075186233409546 at ss 72057594046678944 2025-05-29T15:21:46.981740Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:46: Free shard 72057594046678944:4 hive 72075186233409546 at ss 72057594046678944 2025-05-29T15:21:46.982039Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 103:0, at schemeshard: 72057594046678944 2025-05-29T15:21:46.982049Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:482: [72057594046678944] TDone opId# 103:0 ProgressState 2025-05-29T15:21:46.982062Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:906: Part operation is done id#103:0 progress is 1/1 2025-05-29T15:21:46.982067Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2025-05-29T15:21:46.982072Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:906: Part operation is done id#103:0 progress is 1/1 2025-05-29T15:21:46.982076Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2025-05-29T15:21:46.982081Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1606: TOperation IsReadyToNotify, TxId: 103, ready parts: 1/1, is published: true 2025-05-29T15:21:46.982086Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2025-05-29T15:21:46.982092Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:973: Operation and all the parts is done, operation id: 103:0 2025-05-29T15:21:46.982096Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5174: RemoveTx for txid 103:0 2025-05-29T15:21:46.982132Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 6 2025-05-29T15:21:46.982312Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 103 2025-05-29T15:21:46.982940Z node 7 :HIVE INFO: tablet_helpers.cpp:1356: [72075186233409546] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 1 TxId_Deprecated: 1 TabletID: 72075186233409546 2025-05-29T15:21:46.983003Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5938: Free tablet reply, message: Status: OK Origin: 72075186233409546 TxId_Deprecated: 1 ShardOwnerId: 72057594046678944 ShardLocalIdx: 1, at schemeshard: 72057594046678944 2025-05-29T15:21:46.983058Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 5 2025-05-29T15:21:46.983118Z node 7 :HIVE INFO: tablet_helpers.cpp:1356: [72075186233409546] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 3 TxId_Deprecated: 3 TabletID: 72075186234409547 2025-05-29T15:21:46.983181Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5938: Free tablet reply, message: Status: OK Origin: 72075186233409546 TxId_Deprecated: 3 ShardOwnerId: 72057594046678944 ShardLocalIdx: 3, at schemeshard: 72057594046678944 2025-05-29T15:21:46.983208Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 Forgetting tablet 72075186234409547 2025-05-29T15:21:46.983502Z node 7 :HIVE INFO: tablet_helpers.cpp:1356: [72075186233409546] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 2 TxId_Deprecated: 2 TabletID: 72075186234409546 2025-05-29T15:21:46.983739Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:445: Clear TempDirsState with owners number: 0 2025-05-29T15:21:46.984415Z node 7 :HIVE INFO: tablet_helpers.cpp:1356: [72075186233409546] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 4 TxId_Deprecated: 4 TabletID: 72075186234409548 2025-05-29T15:21:46.984486Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5938: Free tablet reply, message: Status: OK Origin: 72075186233409546 TxId_Deprecated: 2 ShardOwnerId: 72057594046678944 ShardLocalIdx: 2, at schemeshard: 72057594046678944 2025-05-29T15:21:46.984531Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 Forgetting tablet 72075186234409546 2025-05-29T15:21:46.984659Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5938: Free tablet reply, message: Status: OK Origin: 72075186233409546 TxId_Deprecated: 4 ShardOwnerId: 72057594046678944 ShardLocalIdx: 4, at schemeshard: 72057594046678944 2025-05-29T15:21:46.984681Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 Forgetting tablet 72075186234409548 2025-05-29T15:21:46.985177Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 103 2025-05-29T15:21:46.985266Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:205: TTxCleanDroppedSubDomains Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-05-29T15:21:46.985272Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:224: TTxCleanDroppedPaths: PersistRemoveSubDomain for PathId# [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2025-05-29T15:21:46.985291Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 2025-05-29T15:21:46.985340Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:123: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-05-29T15:21:46.985343Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:137: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2025-05-29T15:21:46.985352Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-05-29T15:21:46.985764Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:174: Deleted shardIdx 72057594046678944:1 2025-05-29T15:21:46.985774Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:180: Close pipe to deleted shardIdx 72057594046678944:1 tabletId 72075186233409546 2025-05-29T15:21:46.985783Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:174: Deleted shardIdx 72057594046678944:3 2025-05-29T15:21:46.985786Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:180: Close pipe to deleted shardIdx 72057594046678944:3 tabletId 72075186234409547 2025-05-29T15:21:46.985793Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:174: Deleted shardIdx 72057594046678944:2 2025-05-29T15:21:46.985796Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:180: Close pipe to deleted shardIdx 72057594046678944:2 tabletId 72075186234409546 2025-05-29T15:21:46.986083Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:174: Deleted shardIdx 72057594046678944:4 2025-05-29T15:21:46.986093Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:180: Close pipe to deleted shardIdx 72057594046678944:4 tabletId 72075186234409548 2025-05-29T15:21:46.986130Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:245: TTxCleanDroppedSubDomains Complete, done PersistRemoveSubDomain for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 2025-05-29T15:21:46.986146Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:160: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestModificationResult got TxId: 103, wait until txId: 103 TestWaitNotification wait txId: 103 2025-05-29T15:21:46.986200Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:210: tests -- TTxNotificationSubscriber for txId 103: send EvNotifyTxCompletion 2025-05-29T15:21:46.986206Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:256: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 103 2025-05-29T15:21:46.986252Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 103, at schemeshard: 72057594046678944 2025-05-29T15:21:46.986266Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:227: tests -- TTxNotificationSubscriber for txId 103: got EvNotifyTxCompletionResult 2025-05-29T15:21:46.986269Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:236: tests -- TTxNotificationSubscriber for txId 103: satisfy waiter [7:584:2524] TestWaitNotification: OK eventTxId 103 2025-05-29T15:21:46.986318Z node 7 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-05-29T15:21:46.986361Z node 7 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/USER_0" took 50us result status StatusPathDoesNotExist 2025-05-29T15:21:46.986393Z node 7 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/USER_0\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1]), source_location: ydb/core/tx/schemeshard/schemeshard_path_describer.cpp:1157" Path: "/MyRoot/USER_0" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: false } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 >> TSchemeShardExtSubDomainTest::AlterCantChangeExternalStatisticsAggregator-AlterDatabaseCreateHiveFirst-false [GOOD] >> TSchemeShardExtSubDomainTest::AlterCantChangeExternalStatisticsAggregator-AlterDatabaseCreateHiveFirst-true ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_extsubdomain/unittest >> TSchemeShardExtSubDomainTest::Drop-AlterDatabaseCreateHiveFirst-ExternalHive [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:116:2058] recipient: [1:111:2141] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:116:2058] recipient: [1:111:2141] Leader for TabletID 72057594046678944 is [1:128:2152] sender: [1:132:2058] recipient: [1:111:2141] 2025-05-29T15:21:44.396446Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7488: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-05-29T15:21:44.396473Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7516: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:21:44.396479Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7402: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-05-29T15:21:44.396485Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7418: OperationsProcessing config: using default configuration 2025-05-29T15:21:44.396491Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-05-29T15:21:44.396495Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-05-29T15:21:44.396504Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7548: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:21:44.396518Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:39: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-05-29T15:21:44.396631Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7619: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-05-29T15:21:44.396702Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-05-29T15:21:44.410814Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7309: Cannot subscribe to console configs 2025-05-29T15:21:44.410840Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:21:44.413645Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-05-29T15:21:44.413778Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-05-29T15:21:44.413814Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-05-29T15:21:44.415492Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-05-29T15:21:44.415668Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:445: Clear TempDirsState with owners number: 0 2025-05-29T15:21:44.415804Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1348: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-05-29T15:21:44.415865Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:32: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-05-29T15:21:44.416472Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:157: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:21:44.416522Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:84: [RootDataErasureManager] Stop 2025-05-29T15:21:44.416835Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:21:44.416857Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:21:44.416888Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-05-29T15:21:44.416902Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-29T15:21:44.416911Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:202: TTxServerlessStorageBilling.Complete 2025-05-29T15:21:44.416958Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6671: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-05-29T15:21:44.418667Z node 1 :HIVE INFO: tablet_helpers.cpp:1130: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:128:2152] sender: [1:242:2058] recipient: [1:15:2062] 2025-05-29T15:21:44.440024Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:372: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-05-29T15:21:44.440084Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:21:44.440143Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-05-29T15:21:44.440188Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:130: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-05-29T15:21:44.440200Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:21:44.441029Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:451: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-05-29T15:21:44.441052Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-05-29T15:21:44.441103Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:21:44.441113Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:313: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-05-29T15:21:44.441119Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:367: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-05-29T15:21:44.441124Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 2 -> 3 2025-05-29T15:21:44.441494Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:21:44.441508Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-05-29T15:21:44.441513Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 3 -> 128 2025-05-29T15:21:44.441887Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:21:44.441899Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:21:44.441907Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:21:44.441914Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1650: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-05-29T15:21:44.442625Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1719: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-05-29T15:21:44.442974Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:652: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-05-29T15:21:44.443010Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1751: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-05-29T15:21:44.443184Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:676: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-05-29T15:21:44.443208Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:680: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 124 RawX2: 4294969445 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-05-29T15:21:44.443215Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:21:44.443266Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 128 -> 240 2025-05-29T15:21:44.443274Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:21:44.443304Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-05-29T15:21:44.443316Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:402: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-05-29T15:21:44.443662Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:21:44.443671Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-29T15:21:44.443708Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29 ... TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 1 TxId_Deprecated: 1 TabletID: 72075186233409546 2025-05-29T15:21:46.954778Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5938: Free tablet reply, message: Status: OK Origin: 72075186233409546 TxId_Deprecated: 1 ShardOwnerId: 72057594046678944 ShardLocalIdx: 1, at schemeshard: 72057594046678944 2025-05-29T15:21:46.954849Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 5 2025-05-29T15:21:46.954906Z node 6 :HIVE INFO: tablet_helpers.cpp:1356: [72075186233409546] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 3 TxId_Deprecated: 3 TabletID: 72075186234409547 2025-05-29T15:21:46.954941Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5938: Free tablet reply, message: Status: OK Origin: 72075186233409546 TxId_Deprecated: 3 ShardOwnerId: 72057594046678944 ShardLocalIdx: 3, at schemeshard: 72057594046678944 2025-05-29T15:21:46.954963Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 2025-05-29T15:21:46.955178Z node 6 :HIVE INFO: tablet_helpers.cpp:1356: [72075186233409546] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 2 TxId_Deprecated: 2 TabletID: 72075186234409546 2025-05-29T15:21:46.955221Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5938: Free tablet reply, message: Status: OK Origin: 72075186233409546 TxId_Deprecated: 2 ShardOwnerId: 72057594046678944 ShardLocalIdx: 2, at schemeshard: 72057594046678944 2025-05-29T15:21:46.955245Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2025-05-29T15:21:46.955470Z node 6 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:445: Clear TempDirsState with owners number: 0 2025-05-29T15:21:46.956117Z node 6 :TX_DATASHARD ERROR: datashard.cpp:3573: Datashard's schemeshard pipe destroyed while no messages to sent at 72075186234409549 2025-05-29T15:21:46.956129Z node 6 :TX_DATASHARD ERROR: datashard.cpp:3573: Datashard's schemeshard pipe destroyed while no messages to sent at 72075186234409550 Forgetting tablet 72075186234409547 2025-05-29T15:21:46.956330Z node 6 :HIVE INFO: tablet_helpers.cpp:1356: [72075186233409546] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 4 TxId_Deprecated: 4 TabletID: 72075186234409548 Forgetting tablet 72075186234409546 Forgetting tablet 72075186234409548 2025-05-29T15:21:46.956591Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5938: Free tablet reply, message: Status: OK Origin: 72075186233409546 TxId_Deprecated: 4 ShardOwnerId: 72057594046678944 ShardLocalIdx: 4, at schemeshard: 72057594046678944 2025-05-29T15:21:46.956633Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2025-05-29T15:21:46.956931Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:205: TTxCleanDroppedSubDomains Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-05-29T15:21:46.956940Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:224: TTxCleanDroppedPaths: PersistRemoveSubDomain for PathId# [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2025-05-29T15:21:46.956961Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 2025-05-29T15:21:46.957036Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:123: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-05-29T15:21:46.957041Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:137: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2025-05-29T15:21:46.957051Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-05-29T15:21:46.959623Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:174: Deleted shardIdx 72057594046678944:1 2025-05-29T15:21:46.959639Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:180: Close pipe to deleted shardIdx 72057594046678944:1 tabletId 72075186233409546 2025-05-29T15:21:46.959655Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:174: Deleted shardIdx 72057594046678944:3 2025-05-29T15:21:46.959658Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:180: Close pipe to deleted shardIdx 72057594046678944:3 tabletId 72075186234409547 2025-05-29T15:21:46.959726Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:174: Deleted shardIdx 72057594046678944:2 2025-05-29T15:21:46.959733Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:180: Close pipe to deleted shardIdx 72057594046678944:2 tabletId 72075186234409546 2025-05-29T15:21:46.960049Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:174: Deleted shardIdx 72057594046678944:4 2025-05-29T15:21:46.960059Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:180: Close pipe to deleted shardIdx 72057594046678944:4 tabletId 72075186234409548 2025-05-29T15:21:46.960100Z node 6 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:245: TTxCleanDroppedSubDomains Complete, done PersistRemoveSubDomain for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 2025-05-29T15:21:46.960112Z node 6 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:160: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestModificationResult got TxId: 105, wait until txId: 105 TestWaitNotification wait txId: 105 2025-05-29T15:21:46.960180Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:210: tests -- TTxNotificationSubscriber for txId 105: send EvNotifyTxCompletion 2025-05-29T15:21:46.960188Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:256: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 105 2025-05-29T15:21:46.960246Z node 6 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 105, at schemeshard: 72057594046678944 2025-05-29T15:21:46.960264Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:227: tests -- TTxNotificationSubscriber for txId 105: got EvNotifyTxCompletionResult 2025-05-29T15:21:46.960269Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:236: tests -- TTxNotificationSubscriber for txId 105: satisfy waiter [6:793:2700] TestWaitNotification: OK eventTxId 105 2025-05-29T15:21:46.960346Z node 6 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0/dir/table_1" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-05-29T15:21:46.960381Z node 6 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/USER_0/dir/table_1" took 46us result status StatusPathDoesNotExist 2025-05-29T15:21:46.960412Z node 6 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/USER_0/dir/table_1\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1]), source_location: ydb/core/tx/schemeshard/schemeshard_path_describer.cpp:1157" Path: "/MyRoot/USER_0/dir/table_1" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: false } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 2025-05-29T15:21:46.960465Z node 6 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-05-29T15:21:46.960476Z node 6 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/USER_0" took 12us result status StatusPathDoesNotExist 2025-05-29T15:21:46.960492Z node 6 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/USER_0\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1]), source_location: ydb/core/tx/schemeshard/schemeshard_path_describer.cpp:1157" Path: "/MyRoot/USER_0" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: false } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 2025-05-29T15:21:46.960537Z node 6 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-05-29T15:21:46.960558Z node 6 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot" took 22us result status StatusSuccess 2025-05-29T15:21:46.960620Z node 6 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot" PathDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 7 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 7 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 5 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/MyRoot" } } } PathId: 1 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_extsubdomain/unittest >> TSchemeShardExtSubDomainTest::CreateThenDropChangesParent-AlterDatabaseCreateHiveFirst-true [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:116:2058] recipient: [1:111:2141] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:116:2058] recipient: [1:111:2141] Leader for TabletID 72057594046678944 is [1:128:2152] sender: [1:132:2058] recipient: [1:111:2141] 2025-05-29T15:21:44.193076Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7488: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-05-29T15:21:44.193112Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7516: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:21:44.193118Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7402: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-05-29T15:21:44.193124Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7418: OperationsProcessing config: using default configuration 2025-05-29T15:21:44.193131Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-05-29T15:21:44.193135Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-05-29T15:21:44.193145Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7548: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:21:44.193159Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:39: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-05-29T15:21:44.193286Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7619: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-05-29T15:21:44.193373Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-05-29T15:21:44.207617Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7309: Cannot subscribe to console configs 2025-05-29T15:21:44.207647Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:21:44.210575Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-05-29T15:21:44.210712Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-05-29T15:21:44.210780Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-05-29T15:21:44.212490Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-05-29T15:21:44.212664Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:445: Clear TempDirsState with owners number: 0 2025-05-29T15:21:44.212782Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1348: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-05-29T15:21:44.212837Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:32: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-05-29T15:21:44.213324Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:157: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:21:44.213365Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:84: [RootDataErasureManager] Stop 2025-05-29T15:21:44.213640Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:21:44.213655Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:21:44.213681Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-05-29T15:21:44.213690Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-29T15:21:44.213696Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:202: TTxServerlessStorageBilling.Complete 2025-05-29T15:21:44.213732Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6671: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-05-29T15:21:44.215133Z node 1 :HIVE INFO: tablet_helpers.cpp:1130: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:128:2152] sender: [1:242:2058] recipient: [1:15:2062] 2025-05-29T15:21:44.236913Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:372: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-05-29T15:21:44.236979Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:21:44.237035Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-05-29T15:21:44.237081Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:130: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-05-29T15:21:44.237091Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:21:44.237803Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:451: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-05-29T15:21:44.237829Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-05-29T15:21:44.237883Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:21:44.237894Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:313: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-05-29T15:21:44.237900Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:367: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-05-29T15:21:44.237905Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 2 -> 3 2025-05-29T15:21:44.238290Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:21:44.238301Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-05-29T15:21:44.238307Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 3 -> 128 2025-05-29T15:21:44.238620Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:21:44.238630Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:21:44.238636Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:21:44.238643Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1650: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-05-29T15:21:44.239294Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1719: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-05-29T15:21:44.239640Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:652: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-05-29T15:21:44.239680Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1751: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-05-29T15:21:44.239855Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:676: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-05-29T15:21:44.239879Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:680: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 124 RawX2: 4294969445 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-05-29T15:21:44.239886Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:21:44.239936Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 128 -> 240 2025-05-29T15:21:44.239942Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:21:44.239971Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-05-29T15:21:44.239981Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:402: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-05-29T15:21:44.240339Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:21:44.240348Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-29T15:21:44.240385Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29 ... Id: 2] 2025-05-29T15:21:47.164309Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 102:0 128 -> 134 2025-05-29T15:21:47.164428Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2025-05-29T15:21:47.164630Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2025-05-29T15:21:47.164855Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2025-05-29T15:21:47.164864Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_drop_extsubdomain.cpp:104: TDropExtSubdomain TDeleteExternalShards, operationId: 102:0 ProgressState, at schemeshard: 72057594046678944 2025-05-29T15:21:47.164888Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 102:0 134 -> 135 2025-05-29T15:21:47.164909Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-05-29T15:21:47.164916Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 FAKE_COORDINATOR: Erasing txId 102 2025-05-29T15:21:47.165188Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:21:47.165196Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 102, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-29T15:21:47.165217Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 102, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2025-05-29T15:21:47.165238Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:21:47.165242Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [7:210:2211], at schemeshard: 72057594046678944, txId: 102, path id: 1 2025-05-29T15:21:47.165247Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [7:210:2211], at schemeshard: 72057594046678944, txId: 102, path id: 2 2025-05-29T15:21:47.165290Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2025-05-29T15:21:47.165296Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:398: [72057594046678944] TDeleteParts opId# 102:0 ProgressState 2025-05-29T15:21:47.165300Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 102:0 135 -> 240 2025-05-29T15:21:47.165390Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:5834: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 7 PathOwnerId: 72057594046678944, cookie: 102 2025-05-29T15:21:47.165399Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 7 PathOwnerId: 72057594046678944, cookie: 102 2025-05-29T15:21:47.165403Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72057594046678944, txId: 102 2025-05-29T15:21:47.165407Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 7 2025-05-29T15:21:47.165410Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-05-29T15:21:47.165510Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:5834: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 102 2025-05-29T15:21:47.165522Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 102 2025-05-29T15:21:47.165526Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72057594046678944, txId: 102 2025-05-29T15:21:47.165530Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 18446744073709551615 2025-05-29T15:21:47.165533Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2025-05-29T15:21:47.165544Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1606: TOperation IsReadyToNotify, TxId: 102, ready parts: 0/1, is published: true 2025-05-29T15:21:47.166956Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2025-05-29T15:21:47.166971Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:482: [72057594046678944] TDone opId# 102:0 ProgressState 2025-05-29T15:21:47.166984Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:906: Part operation is done id#102:0 progress is 1/1 2025-05-29T15:21:47.166989Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-05-29T15:21:47.166993Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:906: Part operation is done id#102:0 progress is 1/1 2025-05-29T15:21:47.166997Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-05-29T15:21:47.167001Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1606: TOperation IsReadyToNotify, TxId: 102, ready parts: 1/1, is published: true 2025-05-29T15:21:47.167006Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-05-29T15:21:47.167010Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:973: Operation and all the parts is done, operation id: 102:0 2025-05-29T15:21:47.167014Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5174: RemoveTx for txid 102:0 2025-05-29T15:21:47.167027Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2025-05-29T15:21:47.167103Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:205: TTxCleanDroppedSubDomains Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-05-29T15:21:47.167109Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:224: TTxCleanDroppedPaths: PersistRemoveSubDomain for PathId# [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2025-05-29T15:21:47.167120Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 2025-05-29T15:21:47.167141Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:123: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-05-29T15:21:47.167145Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:137: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2025-05-29T15:21:47.167157Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-05-29T15:21:47.167255Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2025-05-29T15:21:47.167463Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2025-05-29T15:21:47.167776Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:245: TTxCleanDroppedSubDomains Complete, done PersistRemoveSubDomain for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 2025-05-29T15:21:47.167789Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:160: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestModificationResult got TxId: 102, wait until txId: 102 TestWaitNotification wait txId: 102 2025-05-29T15:21:47.167835Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:210: tests -- TTxNotificationSubscriber for txId 102: send EvNotifyTxCompletion 2025-05-29T15:21:47.167842Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:256: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 102 2025-05-29T15:21:47.167908Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 102, at schemeshard: 72057594046678944 2025-05-29T15:21:47.167922Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:227: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2025-05-29T15:21:47.167927Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:236: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [7:347:2337] TestWaitNotification: OK eventTxId 102 2025-05-29T15:21:47.167992Z node 7 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-05-29T15:21:47.168018Z node 7 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/USER_0" took 37us result status StatusPathDoesNotExist 2025-05-29T15:21:47.168047Z node 7 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/USER_0\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1]), source_location: ydb/core/tx/schemeshard/schemeshard_path_describer.cpp:1157" Path: "/MyRoot/USER_0" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: false } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 >> TPQCDTest::TestRelatedServicesAreRunning [FAIL] >> TSchemeShardExtSubDomainTest::CreateAndAlterAlterAddStoragePool-AlterDatabaseCreateHiveFirst [GOOD] |58.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/metadata/initializer/ut/unittest >> TSchemeShardExtSubDomainTest::AlterCantChangeExternalStatisticsAggregator-AlterDatabaseCreateHiveFirst-true [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_extsubdomain/unittest >> TSchemeShardExtSubDomainTest::AlterTwiceAndWithPlainAlterSubdomain-AlterDatabaseCreateHiveFirst-ExternalHive [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:116:2058] recipient: [1:111:2141] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:116:2058] recipient: [1:111:2141] Leader for TabletID 72057594046678944 is [1:128:2152] sender: [1:132:2058] recipient: [1:111:2141] 2025-05-29T15:21:44.598508Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7488: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-05-29T15:21:44.598538Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7516: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:21:44.598543Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7402: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-05-29T15:21:44.598548Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7418: OperationsProcessing config: using default configuration 2025-05-29T15:21:44.598554Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-05-29T15:21:44.598558Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-05-29T15:21:44.598566Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7548: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:21:44.598578Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:39: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-05-29T15:21:44.598680Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7619: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-05-29T15:21:44.598768Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-05-29T15:21:44.612464Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7309: Cannot subscribe to console configs 2025-05-29T15:21:44.612492Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:21:44.615209Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-05-29T15:21:44.615328Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-05-29T15:21:44.615362Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-05-29T15:21:44.616919Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-05-29T15:21:44.617129Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:445: Clear TempDirsState with owners number: 0 2025-05-29T15:21:44.617246Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1348: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-05-29T15:21:44.617304Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:32: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-05-29T15:21:44.617897Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:157: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:21:44.617938Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:84: [RootDataErasureManager] Stop 2025-05-29T15:21:44.618185Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:21:44.618195Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:21:44.618213Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-05-29T15:21:44.618223Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-29T15:21:44.618229Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:202: TTxServerlessStorageBilling.Complete 2025-05-29T15:21:44.618262Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6671: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-05-29T15:21:44.621341Z node 1 :HIVE INFO: tablet_helpers.cpp:1130: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:128:2152] sender: [1:242:2058] recipient: [1:15:2062] 2025-05-29T15:21:44.645384Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:372: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-05-29T15:21:44.645446Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:21:44.645498Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-05-29T15:21:44.645542Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:130: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-05-29T15:21:44.645553Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:21:44.646282Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:451: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-05-29T15:21:44.646308Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-05-29T15:21:44.646358Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:21:44.646368Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:313: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-05-29T15:21:44.646373Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:367: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-05-29T15:21:44.646377Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 2 -> 3 2025-05-29T15:21:44.646838Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:21:44.646850Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-05-29T15:21:44.646856Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 3 -> 128 2025-05-29T15:21:44.647256Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:21:44.647268Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:21:44.647276Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:21:44.647283Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1650: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-05-29T15:21:44.647877Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1719: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-05-29T15:21:44.648280Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:652: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-05-29T15:21:44.648314Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1751: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-05-29T15:21:44.648477Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:676: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-05-29T15:21:44.648501Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:680: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 124 RawX2: 4294969445 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-05-29T15:21:44.648507Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:21:44.648557Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 128 -> 240 2025-05-29T15:21:44.648564Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:21:44.648592Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-05-29T15:21:44.648602Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:402: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-05-29T15:21:44.648994Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:21:44.649004Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-29T15:21:44.649042Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29 ... pose ProgressState leave, operationId 103:0, at tablet# 72057594046678944 2025-05-29T15:21:47.463725Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1650: TOperation IsReadyToPropose , TxId: 103 ready parts: 1/1 2025-05-29T15:21:47.463758Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1719: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 103 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-05-29T15:21:47.464099Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:652: Send tablet strongly msg operationId: 103:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:103 msg type: 269090816 2025-05-29T15:21:47.464137Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1751: TOperation RegisterRelationByTabletId, TxId: 103, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 103 at step: 5000004 FAKE_COORDINATOR: advance: minStep5000004 State->FrontStep: 5000003 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 103 at step: 5000004 2025-05-29T15:21:47.464225Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:676: TTxOperationPlanStep Execute, stepId: 5000004, transactions count in step: 1, at schemeshard: 72057594046678944 2025-05-29T15:21:47.464249Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:680: TTxOperationPlanStep Execute, message: Transactions { TxId: 103 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 30064773226 } } Step: 5000004 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-05-29T15:21:47.464256Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 103:0, at tablet# 72057594046678944 2025-05-29T15:21:47.464327Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 103:0 128 -> 240 2025-05-29T15:21:47.464335Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 103:0, at tablet# 72057594046678944 2025-05-29T15:21:47.464357Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 6 2025-05-29T15:21:47.464396Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:575: Send TEvUpdateTenantSchemeShard, to actor: [7:399:2369], msg: TabletId: 72057594046678944 Generation: 2 StoragePools { Name: "pool-1" Kind: "hdd" } SubdomainVersion: 4, at schemeshard: 72057594046678944 2025-05-29T15:21:47.464828Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:5822: Handle TEvUpdateTenantSchemeShard, at schemeshard: 72075186234409546, msg: TabletId: 72057594046678944 Generation: 2 StoragePools { Name: "pool-1" Kind: "hdd" } SubdomainVersion: 4 2025-05-29T15:21:47.464853Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__sync_update_tenants.cpp:79: TTxUpdateTenant DoExecute, msg: TabletId: 72057594046678944 Generation: 2 StoragePools { Name: "pool-1" Kind: "hdd" } SubdomainVersion: 4, at schemeshard: 72075186234409546 2025-05-29T15:21:47.464890Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:588: Cannot publish paths for unknown operation id#0 FAKE_COORDINATOR: Erasing txId 103 2025-05-29T15:21:47.464964Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:21:47.464970Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 103, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2025-05-29T15:21:47.465011Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:21:47.465016Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [7:210:2211], at schemeshard: 72057594046678944, txId: 103, path id: 2 2025-05-29T15:21:47.465111Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 103:0, at schemeshard: 72057594046678944 2025-05-29T15:21:47.465121Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_alter_extsubdomain.cpp:787: [72057594046678944] TSyncHive, operationId 103:0, ProgressState, NeedSyncHive: 0 2025-05-29T15:21:47.465126Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 103:0 240 -> 240 2025-05-29T15:21:47.465226Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:5834: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 6 PathOwnerId: 72057594046678944, cookie: 103 2025-05-29T15:21:47.465241Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 6 PathOwnerId: 72057594046678944, cookie: 103 2025-05-29T15:21:47.465246Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72057594046678944, txId: 103 2025-05-29T15:21:47.465251Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 103, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 6 2025-05-29T15:21:47.465258Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 7 2025-05-29T15:21:47.465275Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1606: TOperation IsReadyToNotify, TxId: 103, ready parts: 0/1, is published: true 2025-05-29T15:21:47.465824Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5809: Handle TEvSyncTenantSchemeShard, at schemeshard: 72057594046678944, msg: DomainSchemeShard: 72057594046678944 DomainPathId: 2 TabletID: 72075186234409546 Generation: 2 EffectiveACLVersion: 0 SubdomainVersion: 4 UserAttributesVersion: 1 TenantHive: 72075186233409546 TenantSysViewProcessor: 18446744073709551615 TenantRootACL: "" TenantStatisticsAggregator: 18446744073709551615 TenantGraphShard: 18446744073709551615 2025-05-29T15:21:47.465847Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__sync_update_tenants.cpp:26: TTxSyncTenant DoExecute, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2025-05-29T15:21:47.465866Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:567: DoUpdateTenant no hasChanges, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], tenantLink: TSubDomainsLinks::TLink { DomainKey: [OwnerId: 72057594046678944, LocalPathId: 2], Generation: 2, ActorId:[7:399:2369], EffectiveACLVersion: 0, SubdomainVersion: 4, UserAttributesVersion: 1, TenantHive: 72075186233409546, TenantSysViewProcessor: 18446744073709551615, TenantStatisticsAggregator: 18446744073709551615, TenantGraphShard: 18446744073709551615, TenantRootACL: }, subDomain->GetVersion(): 4, actualEffectiveACLVersion: 0, actualUserAttrsVersion: 1, tenantHive: 72075186233409546, tenantSysViewProcessor: 18446744073709551615, at schemeshard: 72057594046678944 2025-05-29T15:21:47.465882Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72075186234409546 2025-05-29T15:21:47.465888Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72075186234409546, txId: 0, path id: [OwnerId: 72075186234409546, LocalPathId: 1] 2025-05-29T15:21:47.465914Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72075186234409546 2025-05-29T15:21:47.465920Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [7:496:2438], at schemeshard: 72075186234409546, txId: 0, path id: 1 2025-05-29T15:21:47.466210Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:5834: Handle TEvUpdateAck, at schemeshard: 72075186234409546, msg: Owner: 72075186234409546 Generation: 2 LocalPathId: 1 Version: 6 PathOwnerId: 72075186234409546, cookie: 0 2025-05-29T15:21:47.466337Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 103:0, at schemeshard: 72057594046678944 2025-05-29T15:21:47.466348Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:482: [72057594046678944] TDone opId# 103:0 ProgressState 2025-05-29T15:21:47.466362Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:906: Part operation is done id#103:0 progress is 1/1 2025-05-29T15:21:47.466367Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2025-05-29T15:21:47.466373Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:906: Part operation is done id#103:0 progress is 1/1 2025-05-29T15:21:47.466377Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2025-05-29T15:21:47.466381Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1606: TOperation IsReadyToNotify, TxId: 103, ready parts: 1/1, is published: true 2025-05-29T15:21:47.466388Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2025-05-29T15:21:47.466393Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:973: Operation and all the parts is done, operation id: 103:0 2025-05-29T15:21:47.466401Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5174: RemoveTx for txid 103:0 2025-05-29T15:21:47.466415Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 6 2025-05-29T15:21:47.466477Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 103 2025-05-29T15:21:47.466487Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__sync_update_tenants.cpp:36: TTxSyncTenant DoComplete, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 TestModificationResult got TxId: 103, wait until txId: 104 TestModificationResults wait txId: 104 TestModificationResult got TxId: 104, wait until txId: 104 TestWaitNotification wait txId: 103 2025-05-29T15:21:47.466905Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:210: tests -- TTxNotificationSubscriber for txId 103: send EvNotifyTxCompletion 2025-05-29T15:21:47.466942Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:256: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 103 2025-05-29T15:21:47.467024Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 103, at schemeshard: 72057594046678944 2025-05-29T15:21:47.467043Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:227: tests -- TTxNotificationSubscriber for txId 103: got EvNotifyTxCompletionResult 2025-05-29T15:21:47.467048Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:236: tests -- TTxNotificationSubscriber for txId 103: satisfy waiter [7:578:2518] TestWaitNotification: OK eventTxId 103 |58.4%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/kqp/ut/rbo/ydb-core-kqp-ut-rbo >> TSchemeShardExtSubDomainTest::StatisticsAggregatorSync-AlterDatabaseCreateHiveFirst-true [GOOD] >> TSchemeShardExtSubDomainTest::SchemeQuotas-AlterDatabaseCreateHiveFirst-false |58.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/ut/rbo/ydb-core-kqp-ut-rbo ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_extsubdomain/unittest >> TSchemeShardExtSubDomainTest::CreateAndAlterThenDropChangesParent [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:116:2058] recipient: [1:111:2141] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:116:2058] recipient: [1:111:2141] Leader for TabletID 72057594046678944 is [1:128:2152] sender: [1:132:2058] recipient: [1:111:2141] 2025-05-29T15:21:44.466504Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7488: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-05-29T15:21:44.466528Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7516: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:21:44.466534Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7402: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-05-29T15:21:44.466541Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7418: OperationsProcessing config: using default configuration 2025-05-29T15:21:44.466552Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-05-29T15:21:44.466556Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-05-29T15:21:44.466566Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7548: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:21:44.466581Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:39: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-05-29T15:21:44.466688Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7619: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-05-29T15:21:44.466774Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-05-29T15:21:44.481555Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7309: Cannot subscribe to console configs 2025-05-29T15:21:44.481596Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:21:44.484328Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-05-29T15:21:44.484457Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-05-29T15:21:44.484500Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-05-29T15:21:44.485918Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-05-29T15:21:44.486058Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:445: Clear TempDirsState with owners number: 0 2025-05-29T15:21:44.486169Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1348: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-05-29T15:21:44.486215Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:32: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-05-29T15:21:44.486654Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:157: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:21:44.486696Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:84: [RootDataErasureManager] Stop 2025-05-29T15:21:44.487003Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:21:44.487015Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:21:44.487036Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-05-29T15:21:44.487048Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-29T15:21:44.487055Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:202: TTxServerlessStorageBilling.Complete 2025-05-29T15:21:44.487091Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6671: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-05-29T15:21:44.488438Z node 1 :HIVE INFO: tablet_helpers.cpp:1130: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:128:2152] sender: [1:242:2058] recipient: [1:15:2062] 2025-05-29T15:21:44.510525Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:372: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-05-29T15:21:44.510599Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:21:44.510670Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-05-29T15:21:44.510723Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:130: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-05-29T15:21:44.510735Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:21:44.511499Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:451: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-05-29T15:21:44.511526Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-05-29T15:21:44.511588Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:21:44.511602Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:313: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-05-29T15:21:44.511608Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:367: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-05-29T15:21:44.511613Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 2 -> 3 2025-05-29T15:21:44.511980Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:21:44.511990Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-05-29T15:21:44.511996Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 3 -> 128 2025-05-29T15:21:44.512291Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:21:44.512300Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:21:44.512309Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:21:44.512317Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1650: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-05-29T15:21:44.513091Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1719: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-05-29T15:21:44.513449Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:652: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-05-29T15:21:44.513489Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1751: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-05-29T15:21:44.513708Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:676: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-05-29T15:21:44.513733Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:680: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 124 RawX2: 4294969445 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-05-29T15:21:44.513741Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:21:44.513801Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 128 -> 240 2025-05-29T15:21:44.513808Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:21:44.513844Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-05-29T15:21:44.513857Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:402: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-05-29T15:21:44.514263Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:21:44.514272Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-29T15:21:44.514314Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29 ... wnerId: 72057594046678944, cookie: 103 2025-05-29T15:21:47.270557Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72057594046678944, txId: 103 2025-05-29T15:21:47.270560Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 103, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 7 2025-05-29T15:21:47.270564Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-05-29T15:21:47.270628Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:5834: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 103 2025-05-29T15:21:47.270635Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 103 2025-05-29T15:21:47.270637Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72057594046678944, txId: 103 2025-05-29T15:21:47.270640Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 103, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 18446744073709551615 2025-05-29T15:21:47.270642Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 6 2025-05-29T15:21:47.270648Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1606: TOperation IsReadyToNotify, TxId: 103, ready parts: 0/1, is published: true 2025-05-29T15:21:47.271030Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:46: Free shard 72057594046678944:1 hive 72057594037968897 at ss 72057594046678944 2025-05-29T15:21:47.271039Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:46: Free shard 72057594046678944:3 hive 72057594037968897 at ss 72057594046678944 2025-05-29T15:21:47.271043Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:46: Free shard 72057594046678944:2 hive 72057594037968897 at ss 72057594046678944 2025-05-29T15:21:47.271072Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 103:0, at schemeshard: 72057594046678944 2025-05-29T15:21:47.271077Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:482: [72057594046678944] TDone opId# 103:0 ProgressState 2025-05-29T15:21:47.271087Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:906: Part operation is done id#103:0 progress is 1/1 2025-05-29T15:21:47.271092Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2025-05-29T15:21:47.271096Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:906: Part operation is done id#103:0 progress is 1/1 2025-05-29T15:21:47.271098Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2025-05-29T15:21:47.271101Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1606: TOperation IsReadyToNotify, TxId: 103, ready parts: 1/1, is published: true 2025-05-29T15:21:47.271105Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2025-05-29T15:21:47.271108Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:973: Operation and all the parts is done, operation id: 103:0 2025-05-29T15:21:47.271111Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5174: RemoveTx for txid 103:0 2025-05-29T15:21:47.271133Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 5 2025-05-29T15:21:47.271348Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 103 2025-05-29T15:21:47.271416Z node 7 :HIVE INFO: tablet_helpers.cpp:1356: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 1 TxId_Deprecated: 1 TabletID: 72075186233409546 2025-05-29T15:21:47.271453Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5938: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 1 ShardOwnerId: 72057594046678944 ShardLocalIdx: 1, at schemeshard: 72057594046678944 2025-05-29T15:21:47.271502Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 2025-05-29T15:21:47.271576Z node 7 :HIVE INFO: tablet_helpers.cpp:1356: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 3 TxId_Deprecated: 3 TabletID: 72075186233409548 2025-05-29T15:21:47.271703Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:445: Clear TempDirsState with owners number: 0 Forgetting tablet 72075186233409546 2025-05-29T15:21:47.272180Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 103 2025-05-29T15:21:47.272309Z node 7 :HIVE INFO: tablet_helpers.cpp:1356: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 2 TxId_Deprecated: 2 TabletID: 72075186233409547 2025-05-29T15:21:47.272404Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5938: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 3 ShardOwnerId: 72057594046678944 ShardLocalIdx: 3, at schemeshard: 72057594046678944 2025-05-29T15:21:47.272436Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 Forgetting tablet 72075186233409548 2025-05-29T15:21:47.272533Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5938: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 2 ShardOwnerId: 72057594046678944 ShardLocalIdx: 2, at schemeshard: 72057594046678944 2025-05-29T15:21:47.272547Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 Forgetting tablet 72075186233409547 2025-05-29T15:21:47.272789Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:205: TTxCleanDroppedSubDomains Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-05-29T15:21:47.272798Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:224: TTxCleanDroppedPaths: PersistRemoveSubDomain for PathId# [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2025-05-29T15:21:47.272813Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 2025-05-29T15:21:47.272967Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:123: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-05-29T15:21:47.272973Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:137: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2025-05-29T15:21:47.272981Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-05-29T15:21:47.273464Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:174: Deleted shardIdx 72057594046678944:1 2025-05-29T15:21:47.273480Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:180: Close pipe to deleted shardIdx 72057594046678944:1 tabletId 72075186233409546 2025-05-29T15:21:47.273497Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:174: Deleted shardIdx 72057594046678944:3 2025-05-29T15:21:47.273501Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:180: Close pipe to deleted shardIdx 72057594046678944:3 tabletId 72075186233409548 2025-05-29T15:21:47.273518Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:174: Deleted shardIdx 72057594046678944:2 2025-05-29T15:21:47.273524Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:180: Close pipe to deleted shardIdx 72057594046678944:2 tabletId 72075186233409547 2025-05-29T15:21:47.273823Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:245: TTxCleanDroppedSubDomains Complete, done PersistRemoveSubDomain for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 2025-05-29T15:21:47.273846Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:160: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestModificationResult got TxId: 103, wait until txId: 103 TestWaitNotification wait txId: 103 2025-05-29T15:21:47.273910Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:210: tests -- TTxNotificationSubscriber for txId 103: send EvNotifyTxCompletion 2025-05-29T15:21:47.273919Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:256: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 103 2025-05-29T15:21:47.273988Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 103, at schemeshard: 72057594046678944 2025-05-29T15:21:47.274008Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:227: tests -- TTxNotificationSubscriber for txId 103: got EvNotifyTxCompletionResult 2025-05-29T15:21:47.274014Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:236: tests -- TTxNotificationSubscriber for txId 103: satisfy waiter [7:540:2488] TestWaitNotification: OK eventTxId 103 2025-05-29T15:21:47.274090Z node 7 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-05-29T15:21:47.274126Z node 7 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/USER_0" took 48us result status StatusPathDoesNotExist 2025-05-29T15:21:47.274177Z node 7 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/USER_0\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1]), source_location: ydb/core/tx/schemeshard/schemeshard_path_describer.cpp:1157" Path: "/MyRoot/USER_0" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: false } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 |58.4%| [LD] {RESULT} $(B)/ydb/core/kqp/ut/rbo/ydb-core-kqp-ut-rbo |58.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/sharding/ut/unittest |58.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/sharding/ut/unittest ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_extsubdomain/unittest >> TSchemeShardExtSubDomainTest::CreateAndAlterAlterAddStoragePool-AlterDatabaseCreateHiveFirst [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:116:2058] recipient: [1:111:2141] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:116:2058] recipient: [1:111:2141] Leader for TabletID 72057594046678944 is [1:128:2152] sender: [1:132:2058] recipient: [1:111:2141] 2025-05-29T15:21:44.746076Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7488: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-05-29T15:21:44.746110Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7516: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:21:44.746116Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7402: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-05-29T15:21:44.746122Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7418: OperationsProcessing config: using default configuration 2025-05-29T15:21:44.746128Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-05-29T15:21:44.746133Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-05-29T15:21:44.746143Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7548: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:21:44.746159Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:39: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-05-29T15:21:44.746275Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7619: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-05-29T15:21:44.746364Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-05-29T15:21:44.767525Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7309: Cannot subscribe to console configs 2025-05-29T15:21:44.767549Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:21:44.772056Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-05-29T15:21:44.772210Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-05-29T15:21:44.772248Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-05-29T15:21:44.776719Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-05-29T15:21:44.777272Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:445: Clear TempDirsState with owners number: 0 2025-05-29T15:21:44.777424Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1348: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-05-29T15:21:44.777486Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:32: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-05-29T15:21:44.784882Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:157: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:21:44.784946Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:84: [RootDataErasureManager] Stop 2025-05-29T15:21:44.785259Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:21:44.785272Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:21:44.785291Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-05-29T15:21:44.785299Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-29T15:21:44.785306Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:202: TTxServerlessStorageBilling.Complete 2025-05-29T15:21:44.785343Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6671: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-05-29T15:21:44.787563Z node 1 :HIVE INFO: tablet_helpers.cpp:1130: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:128:2152] sender: [1:242:2058] recipient: [1:15:2062] 2025-05-29T15:21:44.812888Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:372: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-05-29T15:21:44.812962Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:21:44.813025Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-05-29T15:21:44.813077Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:130: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-05-29T15:21:44.813088Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:21:44.814383Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:451: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-05-29T15:21:44.814417Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-05-29T15:21:44.814482Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:21:44.814496Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:313: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-05-29T15:21:44.814503Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:367: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-05-29T15:21:44.814508Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 2 -> 3 2025-05-29T15:21:44.816204Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:21:44.816226Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-05-29T15:21:44.816233Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 3 -> 128 2025-05-29T15:21:44.816766Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:21:44.816781Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:21:44.816788Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:21:44.816795Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1650: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-05-29T15:21:44.817614Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1719: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-05-29T15:21:44.818113Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:652: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-05-29T15:21:44.818156Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1751: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-05-29T15:21:44.818382Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:676: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-05-29T15:21:44.818414Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:680: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 124 RawX2: 4294969445 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-05-29T15:21:44.818425Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:21:44.818492Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 128 -> 240 2025-05-29T15:21:44.818502Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:21:44.818536Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-05-29T15:21:44.818549Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:402: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-05-29T15:21:44.819055Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:21:44.819065Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-29T15:21:44.819108Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29 ... node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 104 ready parts: 1/1 2025-05-29T15:21:47.758555Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:973: Operation and all the parts is done, operation id: 104:0 2025-05-29T15:21:47.758559Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5174: RemoveTx for txid 104:0 2025-05-29T15:21:47.758565Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 6 2025-05-29T15:21:47.758570Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:982: Publication still in progress, tx: 104, publications: 1, subscribers: 0 2025-05-29T15:21:47.758572Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:989: Publication details: tx: 104, [OwnerId: 72057594046678944, LocalPathId: 2], 6 2025-05-29T15:21:47.759901Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:5822: Handle TEvUpdateTenantSchemeShard, at schemeshard: 72075186233409546, msg: TabletId: 72057594046678944 Generation: 2 UserAttributes { Key: "user__attr_1" Value: "value" } UserAttributesVersion: 2 2025-05-29T15:21:47.759928Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__sync_update_tenants.cpp:79: TTxUpdateTenant DoExecute, msg: TabletId: 72057594046678944 Generation: 2 UserAttributes { Key: "user__attr_1" Value: "value" } UserAttributesVersion: 2, at schemeshard: 72075186233409546 2025-05-29T15:21:47.759965Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:588: Cannot publish paths for unknown operation id#0 2025-05-29T15:21:47.760004Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:21:47.760011Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 104, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2025-05-29T15:21:47.760053Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:21:47.760059Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [7:210:2211], at schemeshard: 72057594046678944, txId: 104, path id: 2 FAKE_COORDINATOR: Erasing txId 104 2025-05-29T15:21:47.760187Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:5834: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 6 PathOwnerId: 72057594046678944, cookie: 104 2025-05-29T15:21:47.760199Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 6 PathOwnerId: 72057594046678944, cookie: 104 2025-05-29T15:21:47.760203Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 104 2025-05-29T15:21:47.760208Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 104, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 6 2025-05-29T15:21:47.760214Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 5 2025-05-29T15:21:47.760231Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 104, subscribers: 0 2025-05-29T15:21:47.760689Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5809: Handle TEvSyncTenantSchemeShard, at schemeshard: 72057594046678944, msg: DomainSchemeShard: 72057594046678944 DomainPathId: 2 TabletID: 72075186233409546 Generation: 2 EffectiveACLVersion: 0 SubdomainVersion: 3 UserAttributesVersion: 2 TenantHive: 18446744073709551615 TenantSysViewProcessor: 18446744073709551615 TenantRootACL: "" TenantStatisticsAggregator: 18446744073709551615 TenantGraphShard: 18446744073709551615 2025-05-29T15:21:47.760703Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__sync_update_tenants.cpp:26: TTxSyncTenant DoExecute, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2025-05-29T15:21:47.760721Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:567: DoUpdateTenant no hasChanges, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], tenantLink: TSubDomainsLinks::TLink { DomainKey: [OwnerId: 72057594046678944, LocalPathId: 2], Generation: 2, ActorId:[7:360:2337], EffectiveACLVersion: 0, SubdomainVersion: 3, UserAttributesVersion: 2, TenantHive: 18446744073709551615, TenantSysViewProcessor: 18446744073709551615, TenantStatisticsAggregator: 18446744073709551615, TenantGraphShard: 18446744073709551615, TenantRootACL: }, subDomain->GetVersion(): 3, actualEffectiveACLVersion: 0, actualUserAttrsVersion: 2, tenantHive: 18446744073709551615, tenantSysViewProcessor: 18446744073709551615, at schemeshard: 72057594046678944 2025-05-29T15:21:47.760849Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72075186233409546 2025-05-29T15:21:47.760855Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72075186233409546, txId: 0, path id: [OwnerId: 72075186233409546, LocalPathId: 1] 2025-05-29T15:21:47.760880Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72075186233409546 2025-05-29T15:21:47.760884Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [7:453:2404], at schemeshard: 72075186233409546, txId: 0, path id: 1 2025-05-29T15:21:47.761007Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:5834: Handle TEvUpdateAck, at schemeshard: 72075186233409546, msg: Owner: 72075186233409546 Generation: 2 LocalPathId: 1 Version: 6 PathOwnerId: 72075186233409546, cookie: 0 2025-05-29T15:21:47.761016Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 104 2025-05-29T15:21:47.761025Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__sync_update_tenants.cpp:36: TTxSyncTenant DoComplete, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 TestModificationResult got TxId: 104, wait until txId: 104 TestWaitNotification wait txId: 104 2025-05-29T15:21:47.761085Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:210: tests -- TTxNotificationSubscriber for txId 104: send EvNotifyTxCompletion 2025-05-29T15:21:47.761093Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:256: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 104 2025-05-29T15:21:47.761164Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 104, at schemeshard: 72057594046678944 2025-05-29T15:21:47.761179Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:227: tests -- TTxNotificationSubscriber for txId 104: got EvNotifyTxCompletionResult 2025-05-29T15:21:47.761183Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:236: tests -- TTxNotificationSubscriber for txId 104: satisfy waiter [7:550:2499] TestWaitNotification: OK eventTxId 104 2025-05-29T15:21:47.761251Z node 7 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-05-29T15:21:47.761275Z node 7 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/USER_0" took 31us result status StatusSuccess 2025-05-29T15:21:47.761352Z node 7 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/USER_0" PathDescription { Self { Name: "USER_0" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeExtSubDomain CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 6 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 6 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 2 ChildrenVersion: 1 SubDomainVersion: 3 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 3 PlanResolution: 50 Coordinators: 72075186233409547 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409548 SchemeShard: 72075186233409546 } DomainKey { SchemeShard: 72057594046678944 PathId: 2 } StoragePools { Name: "pool-1" Kind: "hdd" } StoragePools { Name: "pool-2" Kind: "hdd-1" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 3 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 2 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { } } UserAttributes { Key: "user__attr_1" Value: "value" } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-05-29T15:21:47.761412Z node 7 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72075186233409546 2025-05-29T15:21:47.761424Z node 7 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72075186233409546 describe path "/MyRoot/USER_0" took 13us result status StatusSuccess 2025-05-29T15:21:47.761452Z node 7 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/USER_0" PathDescription { Self { Name: "MyRoot/USER_0" PathId: 1 SchemeshardId: 72075186233409546 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 6 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 6 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 2 ChildrenVersion: 1 SubDomainVersion: 3 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 2 ProcessingParams { Version: 3 PlanResolution: 50 Coordinators: 72075186233409547 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409548 SchemeShard: 72075186233409546 } DomainKey { SchemeShard: 72057594046678944 PathId: 2 } StoragePools { Name: "pool-1" Kind: "hdd" } StoragePools { Name: "pool-2" Kind: "hdd-1" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 3 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 2 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/MyRoot/USER_0" } } UserAttributes { Key: "user__attr_1" Value: "value" } } PathId: 1 PathOwnerId: 72075186233409546, at schemeshard: 72075186233409546 >> Sharding::XXUsage [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_extsubdomain/unittest >> TSchemeShardExtSubDomainTest::AlterCantChangeExternalStatisticsAggregator-AlterDatabaseCreateHiveFirst-true [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:116:2058] recipient: [1:111:2141] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:116:2058] recipient: [1:111:2141] Leader for TabletID 72057594046678944 is [1:128:2152] sender: [1:132:2058] recipient: [1:111:2141] 2025-05-29T15:21:44.600402Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7488: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-05-29T15:21:44.600427Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7516: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:21:44.600433Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7402: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-05-29T15:21:44.600439Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7418: OperationsProcessing config: using default configuration 2025-05-29T15:21:44.600444Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-05-29T15:21:44.600449Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-05-29T15:21:44.600458Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7548: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:21:44.600471Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:39: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-05-29T15:21:44.600585Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7619: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-05-29T15:21:44.600646Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-05-29T15:21:44.612093Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7309: Cannot subscribe to console configs 2025-05-29T15:21:44.612116Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:21:44.614437Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-05-29T15:21:44.614547Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-05-29T15:21:44.614581Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-05-29T15:21:44.616510Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-05-29T15:21:44.616748Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:445: Clear TempDirsState with owners number: 0 2025-05-29T15:21:44.616866Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1348: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-05-29T15:21:44.616922Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:32: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-05-29T15:21:44.617315Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:157: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:21:44.617347Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:84: [RootDataErasureManager] Stop 2025-05-29T15:21:44.617538Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:21:44.617545Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:21:44.617576Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-05-29T15:21:44.617582Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-29T15:21:44.617586Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:202: TTxServerlessStorageBilling.Complete 2025-05-29T15:21:44.617612Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6671: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-05-29T15:21:44.618557Z node 1 :HIVE INFO: tablet_helpers.cpp:1130: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:128:2152] sender: [1:242:2058] recipient: [1:15:2062] 2025-05-29T15:21:44.632585Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:372: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-05-29T15:21:44.632656Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:21:44.632726Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-05-29T15:21:44.632768Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:130: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-05-29T15:21:44.632778Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:21:44.633376Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:451: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-05-29T15:21:44.633406Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-05-29T15:21:44.633468Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:21:44.633479Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:313: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-05-29T15:21:44.633485Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:367: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-05-29T15:21:44.633491Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 2 -> 3 2025-05-29T15:21:44.633926Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:21:44.633937Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-05-29T15:21:44.633944Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 3 -> 128 2025-05-29T15:21:44.634264Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:21:44.634276Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:21:44.634282Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:21:44.634289Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1650: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-05-29T15:21:44.634986Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1719: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-05-29T15:21:44.635406Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:652: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-05-29T15:21:44.635444Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1751: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-05-29T15:21:44.635610Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:676: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-05-29T15:21:44.635633Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:680: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 124 RawX2: 4294969445 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-05-29T15:21:44.635640Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:21:44.635698Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 128 -> 240 2025-05-29T15:21:44.635706Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:21:44.635737Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-05-29T15:21:44.635749Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:402: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-05-29T15:21:44.636140Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:21:44.636149Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-29T15:21:44.636192Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29 ... D DEBUG: schemeshard__operation_common_subdomain.cpp:120: NSubDomainState::TConfigureParts operationId# 102:0 Got OK TEvConfigureStatus from tablet# 72075186233409549 shardIdx# 72057594046678944:4 at schemeshard# 72057594046678944 2025-05-29T15:21:47.799601Z node 8 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 102:0 3 -> 128 2025-05-29T15:21:47.811201Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:647: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2025-05-29T15:21:47.811282Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2025-05-29T15:21:47.811293Z node 8 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 102:0, at schemeshard: 72057594046678944 2025-05-29T15:21:47.811301Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 102:0, at tablet# 72057594046678944 2025-05-29T15:21:47.811312Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1650: TOperation IsReadyToPropose , TxId: 102 ready parts: 1/1 2025-05-29T15:21:47.811359Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1719: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 102 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-05-29T15:21:47.819201Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:652: Send tablet strongly msg operationId: 102:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:102 msg type: 269090816 2025-05-29T15:21:47.819262Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1751: TOperation RegisterRelationByTabletId, TxId: 102, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 102 at step: 5000003 FAKE_COORDINATOR: advance: minStep5000003 State->FrontStep: 5000002 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 102 at step: 5000003 2025-05-29T15:21:47.819375Z node 8 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:676: TTxOperationPlanStep Execute, stepId: 5000003, transactions count in step: 1, at schemeshard: 72057594046678944 2025-05-29T15:21:47.819410Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:680: TTxOperationPlanStep Execute, message: Transactions { TxId: 102 Coordinator: 72057594046316545 AckTo { RawX1: 133 RawX2: 34359740523 } } Step: 5000003 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-05-29T15:21:47.819421Z node 8 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 102:0, at tablet# 72057594046678944 2025-05-29T15:21:47.819518Z node 8 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 102:0 128 -> 240 2025-05-29T15:21:47.819531Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 102:0, at tablet# 72057594046678944 2025-05-29T15:21:47.819565Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 6 2025-05-29T15:21:47.819594Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:567: DoUpdateTenant no hasChanges, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], tenantLink: TSubDomainsLinks::TLink { DomainKey: [OwnerId: 72057594046678944, LocalPathId: 2], Generation: 2, ActorId:[8:361:2334], EffectiveACLVersion: 0, SubdomainVersion: 2, UserAttributesVersion: 1, TenantHive: 18446744073709551615, TenantSysViewProcessor: 18446744073709551615, TenantStatisticsAggregator: 72075186233409549, TenantGraphShard: 18446744073709551615, TenantRootACL: }, subDomain->GetVersion(): 2, actualEffectiveACLVersion: 0, actualUserAttrsVersion: 1, tenantHive: 18446744073709551615, tenantSysViewProcessor: 18446744073709551615, at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 102 2025-05-29T15:21:47.827278Z node 8 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:21:47.827304Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 102, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2025-05-29T15:21:47.827364Z node 8 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:21:47.827371Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [8:205:2206], at schemeshard: 72057594046678944, txId: 102, path id: 2 2025-05-29T15:21:47.827455Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2025-05-29T15:21:47.827466Z node 8 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_alter_extsubdomain.cpp:787: [72057594046678944] TSyncHive, operationId 102:0, ProgressState, NeedSyncHive: 0 2025-05-29T15:21:47.827474Z node 8 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 102:0 240 -> 240 2025-05-29T15:21:47.827663Z node 8 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:5834: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 4 PathOwnerId: 72057594046678944, cookie: 102 2025-05-29T15:21:47.827676Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 4 PathOwnerId: 72057594046678944, cookie: 102 2025-05-29T15:21:47.827681Z node 8 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72057594046678944, txId: 102 2025-05-29T15:21:47.827688Z node 8 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 4 2025-05-29T15:21:47.827695Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 7 2025-05-29T15:21:47.827716Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1606: TOperation IsReadyToNotify, TxId: 102, ready parts: 0/1, is published: true 2025-05-29T15:21:47.843169Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2025-05-29T15:21:47.843204Z node 8 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:482: [72057594046678944] TDone opId# 102:0 ProgressState 2025-05-29T15:21:47.843229Z node 8 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:906: Part operation is done id#102:0 progress is 1/1 2025-05-29T15:21:47.843235Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-05-29T15:21:47.843241Z node 8 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:906: Part operation is done id#102:0 progress is 1/1 2025-05-29T15:21:47.843245Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-05-29T15:21:47.843251Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1606: TOperation IsReadyToNotify, TxId: 102, ready parts: 1/1, is published: true 2025-05-29T15:21:47.843271Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1629: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [8:299:2289] message: TxId: 102 2025-05-29T15:21:47.843281Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-05-29T15:21:47.843288Z node 8 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:973: Operation and all the parts is done, operation id: 102:0 2025-05-29T15:21:47.843294Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5174: RemoveTx for txid 102:0 2025-05-29T15:21:47.843353Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 6 2025-05-29T15:21:47.843500Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2025-05-29T15:21:47.848886Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:227: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2025-05-29T15:21:47.848911Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:236: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [8:505:2443] TestWaitNotification: OK eventTxId 102 TestModificationResults wait txId: 103 2025-05-29T15:21:47.849840Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:372: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpAlterExtSubDomain SubDomain { Name: "USER_0" ExternalStatisticsAggregator: false } } TxId: 103 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-05-29T15:21:47.849884Z node 8 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_alter_extsubdomain.cpp:1102: [72057594046678944] CreateCompatibleAlterExtSubDomain, opId 103:0, feature flag EnableAlterDatabaseCreateHiveFirst 1, tx WorkingDir: "/MyRoot" OperationType: ESchemeOpAlterExtSubDomain SubDomain { Name: "USER_0" ExternalStatisticsAggregator: false } 2025-05-29T15:21:47.849890Z node 8 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_alter_extsubdomain.cpp:1108: [72057594046678944] CreateCompatibleAlterExtSubDomain, opId 103:0, path /MyRoot/USER_0 2025-05-29T15:21:47.849920Z node 8 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_just_reject.cpp:47: TReject Propose, opId: 103:0, explain: Invalid AlterExtSubDomain request: Invalid ExtSubDomain request: ExternalStatisticsAggregator could only be added, not removed, at schemeshard: 72057594046678944 2025-05-29T15:21:47.849928Z node 8 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:130: IgniteOperation, opId: 103:1, propose status:StatusInvalidParameter, reason: Invalid AlterExtSubDomain request: Invalid ExtSubDomain request: ExternalStatisticsAggregator could only be added, not removed, at schemeshard: 72057594046678944 2025-05-29T15:21:47.851125Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:451: TTxOperationPropose Complete, txId: 103, response: Status: StatusInvalidParameter Reason: "Invalid AlterExtSubDomain request: Invalid ExtSubDomain request: ExternalStatisticsAggregator could only be added, not removed" TxId: 103 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2025-05-29T15:21:47.851171Z node 8 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 103, database: /MyRoot, subject: , status: StatusInvalidParameter, reason: Invalid AlterExtSubDomain request: Invalid ExtSubDomain request: ExternalStatisticsAggregator could only be added, not removed, operation: ALTER DATABASE, path: /MyRoot/USER_0 TestModificationResult got TxId: 103, wait until txId: 103 >> KqpOlapLocks::TwoQueriesWithRestartTablet [FAIL] |58.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/sharding/ut/unittest |58.4%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/public/sdk/cpp/src/client/persqueue_public/ut/with_offset_ranges_mode_ut/with_offset_ranges_mode_ut |58.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/public/sdk/cpp/src/client/persqueue_public/ut/with_offset_ranges_mode_ut/with_offset_ranges_mode_ut |58.4%| [LD] {RESULT} $(B)/ydb/public/sdk/cpp/src/client/persqueue_public/ut/with_offset_ranges_mode_ut/with_offset_ranges_mode_ut ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/sharding/ut/unittest >> Sharding::XXUsage [GOOD] Test command err: 15816748729620057094 3446294902387777564 4737000194841405505 5514154011729751550 13889309480827660807 17811821957134981582 1007196958907729442 17087663269720341298 15549252380079336562 1527359544822870606 908719726516615128 2692824268490615864 10760544359790659939 394528813202235384 12565695254038100487 16036018212523498834 15076766830590090701 8414912114725266411 8059648131965705235 5778886673607263117 17479902808771915750 8471234746579952937 10509811430388358437 5488076660738342296 16923890965037046620 13313635920212982964 9590348522794572977 5810115152982731494 3913252350813331939 14787062455031314794 10206759138305689309 8395582940709268852 4175798806572424762 15082624637204282987 13140465509305362596 9373650137292556154 7727067855836782676 4398143119480334737 13482534160837076245 14392043352053526133 12340834484481963313 14136231029975499885 16975244711762329054 12252194267891948756 11840004683441111462 364616555571782607 11112738316829990132 17892070046649302015 17235582342214271381 8010916774423224249 1750302349509622455 4164768259797174920 10050405648900607520 15691605756057894457 17835985877153576363 2829400459961180668 15885505239275366321 8296536113077518590 7321362498820919441 3021700205606080405 9940663889374234286 14641113253562656496 13441449661503407343 5937632866110037977 7440137528504489485 6958522165864662671 9398310837237487646 1502869616124066664 7585420412827857363 12258077452945955933 1395027393782670275 400953951346461642 13194464943026243243 16387570061687469071 18099365523727423081 10114168001772028891 16912101519253686766 8641436102757695822 5109804549325554922 6294867075272718439 16518208641159065680 17873996208903049249 2455769706064234165 5098371154227886870 6475960680588452211 5040563419058338651 2748442738105949437 14632837705466520628 14517291827140163824 11937182219923713247 |58.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/metadata/initializer/ut/unittest |58.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/metadata/initializer/ut/unittest |58.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/sharding/ut/unittest |58.4%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/executer_actor/ut/unittest |58.4%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/schemeshard/ut_index_build_reboots/ydb-core-tx-schemeshard-ut_index_build_reboots |58.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_index_build_reboots/ydb-core-tx-schemeshard-ut_index_build_reboots |58.4%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_index_build_reboots/ydb-core-tx-schemeshard-ut_index_build_reboots |58.5%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/executer_actor/ut/unittest |58.5%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/tests/tools/fqrun/fqrun |58.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/tools/fqrun/fqrun |58.5%| [LD] {RESULT} $(B)/ydb/tests/tools/fqrun/fqrun >> TPQCDTest::TestUnavailableWithoutClustersList [GOOD] >> KqpScanArrowFormat::AggregateWithFunction >> TSchemeShardExtSubDomainTest::SchemeQuotas-AlterDatabaseCreateHiveFirst-false [GOOD] >> TSchemeShardExtSubDomainTest::SchemeQuotas-AlterDatabaseCreateHiveFirst-true |58.5%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/executer_actor/ut/unittest |58.5%| [TA] $(B)/ydb/core/tx/sharding/ut/test-results/unittest/{meta.json ... results_accumulator.log} |58.5%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/kqp/ut/yql/ydb-core-kqp-ut-yql |58.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/ut/yql/ydb-core-kqp-ut-yql |58.5%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/executer_actor/ut/unittest |58.5%| [TA] {RESULT} $(B)/ydb/core/tx/sharding/ut/test-results/unittest/{meta.json ... results_accumulator.log} |58.5%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/executer_actor/ut/unittest >> KqpScanArrowInChanels::AggregateEmptySum |58.5%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/executer_actor/ut/unittest ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/persqueue_cluster_discovery/ut/unittest >> TPQCDTest::TestUnavailableWithoutClustersList [GOOD] Test command err: 2025-05-29T15:21:45.647084Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509888208314385074:2093];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:21:45.647368Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/001222/r3tmp/tmp2hCLov/pdisk_1.dat 2025-05-29T15:21:45.716784Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 2040, node 1 2025-05-29T15:21:45.751000Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/ciyv/001222/r3tmp/yandex9vaSul.tmp 2025-05-29T15:21:45.751015Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: /home/runner/.ya/build/build_root/ciyv/001222/r3tmp/yandex9vaSul.tmp 2025-05-29T15:21:45.751078Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:202: successfully initialized from file: /home/runner/.ya/build/build_root/ciyv/001222/r3tmp/yandex9vaSul.tmp 2025-05-29T15:21:45.751099Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-05-29T15:21:45.783261Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:21:45.783291Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:21:45.783924Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-29T15:21:46.132580Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509888212609352926:2330], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:21:46.132605Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:21:46.132681Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509888212609352938:2333], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:21:46.133863Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715657:3, at schemeshard: 72057594046644480 2025-05-29T15:21:46.137533Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7509888212609352940:2334], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715657 completed, doublechecking } 2025-05-29T15:21:46.235542Z node 1 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [1:7509888212609353001:2316] txid# 281474976715658, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-29T15:21:46.297407Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [1:7509888212609353011:2340], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-05-29T15:21:46.298129Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=1&id=YWI1OWVkNzMtZGRhODczZTAtNDdkNzkxMTYtMWFhMDA5OGE=, ActorId: [1:7509888212609352924:2329], ActorState: ExecuteState, TraceId: 01jwea54wm76sja0awksvhrxkf, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-05-29T15:21:46.305959Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: cluster_tracker.cpp:167: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-05-29T15:21:47.316598Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [1:7509888216904320395:2369], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-05-29T15:21:47.317264Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=1&id=OGViZDZmOWYtZGNjNjBkZTMtMmI0ZmFhNTUtZTRkNjBmYzU=, ActorId: [1:7509888216904320388:2365], ActorState: ExecuteState, TraceId: 01jwea561d2vg36v5rpcaet96r, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-05-29T15:21:47.317440Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: cluster_tracker.cpp:167: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-05-29T15:21:48.320908Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [1:7509888221199287745:2395], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-05-29T15:21:48.321004Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=1&id=N2YwNzhlLTFhNjM1ODhkLTM5MzE4OGZhLTMxMjVjYzgz, ActorId: [1:7509888221199287743:2394], ActorState: ExecuteState, TraceId: 01jwea570w8xnm30wsey4bk72f, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-05-29T15:21:48.321145Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: cluster_tracker.cpp:167: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } |58.5%| [LD] {RESULT} $(B)/ydb/core/kqp/ut/yql/ydb-core-kqp-ut-yql |58.5%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/executer_actor/ut/unittest |58.5%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/executer_actor/ut/unittest |58.5%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/executer_actor/ut/unittest |58.5%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/executer_actor/ut/unittest |58.5%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/mind/ut/ydb-core-mind-ut |58.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/mind/ut/ydb-core-mind-ut |58.6%| [LD] {RESULT} $(B)/ydb/core/mind/ut/ydb-core-mind-ut >> KqpOlapAggregations::Aggregation_NoPushdownOnDisabledEmitAggApply [GOOD] |58.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/metadata/initializer/ut/unittest ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/arrow/unittest >> KqpScanArrowInChanels::JoinWithParams Test command err: Trying to start YDB, gRPC: 23533, MsgBus: 22793 2025-05-29T15:21:33.091875Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509888157234141117:2061];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:21:33.091898Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/00181a/r3tmp/tmpru0pDd/pdisk_1.dat TServer::EnableGrpc on GrpcPort 23533, node 1 2025-05-29T15:21:33.174282Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:21:33.174551Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [1:7509888157234141098:2079] 1748532093091702 != 1748532093091705 2025-05-29T15:21:33.174633Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:21:33.174640Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-05-29T15:21:33.174643Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-05-29T15:21:33.174689Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:22793 2025-05-29T15:21:33.195040Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:21:33.195074Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:21:33.199447Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:22793 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-29T15:21:33.279686Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:21:33.282563Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-05-29T15:21:33.292956Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:21:33.320918Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:21:33.344569Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:21:33.407782Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:21:33.625382Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509888157234142743:2403], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:21:33.625414Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:21:33.695586Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-05-29T15:21:33.727665Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-05-29T15:21:33.747787Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-05-29T15:21:33.777105Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-05-29T15:21:33.846990Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-05-29T15:21:33.872330Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-05-29T15:21:33.894722Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-05-29T15:21:33.917986Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509888157234143396:2466], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:21:33.918009Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:21:33.918161Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509888157234143401:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:21:33.918950Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715669:3, at schemeshard: 72057594046644480 2025-05-29T15:21:33.922111Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976715669, at schemeshard: 72057594046644480 2025-05-29T15:21:33.922188Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7509888157234143403:2470], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715669 completed, doublechecking } 2025-05-29T15:21:34.015780Z node 1 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [1:7509888161529110750:3395] txid# 281474976715670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-29T15:21:34.257589Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [1:7509888161529110759:2474], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 VERIFY failed (2025-05-29T15:21:34.259696Z): assertion failed in non-unittest thread with message: assertion failed at ydb/core/kqp/ut/common/kqp_ut_common.h:375, void NKikimr::NKqp::AssertSuccessResult(const NYdb::TStatus &): (result.IsSuccess())
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 library/cpp/testing/unittest/registar.cpp:36 RaiseError(): requirement UnittestThread failed 2025-05-29T15:21:34.258861Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=1&id=Y2YzZTdlZjUtYjQ5YzNiM2QtZTVhZmNlYjktMjQxY2QyNQ==, ActorId: [1:7509888157234142725:2401], ActorState: ExecuteState, TraceId: 01jwea4ryx5czm12es6tt8p4c0, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: 0. /-S/util/system/yassert.cpp:83: InternalPanicImpl @ 0x13A80ED5 1. /-S/util/system/yassert.cpp:55: Panic @ 0x13A77ED6 2. /tmp//-S/library/cpp/testing/unittest/registar.cpp:36: RaiseError @ 0x13C19C66 3. /-S/ydb/core/kqp/ut/common/kqp_ut_common.h:375: AssertSuccessResult @ 0x260B97B2 4. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:365: CreateSampleTables @ 0x260B90B2 5. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:581: operator() @ 0x260DACEC 6. /-S/library/cpp/threading/future/core/future-inl.h:493: SetValue @ 0x260DACEC 7. /-S/library/cpp/threading/future/async.h:24: operator() @ 0x260DACEC 8. /-S/util/thread/pool.h:71: Process @ 0x260DACEC 9. /-S/util/thread/pool.cpp:418: DoExecute @ 0x13A88859 10. /-S/util/thread/factory.h:15: Execute @ 0x13A87249 11. /-S/util/thread/factory.cpp:36: ThreadProc @ 0x13A87249 12. /-S/util/system/thread.cpp:244: ThreadProxy @ 0x13A826BC 13. ??:0: ?? @ 0x7F1187EBBAC2 14. ??:0: ?? @ 0x7F1187F4D84F Trying to start YDB, gRPC: 63713, MsgBus: 18060 2025-05-29T15:21:38.631490Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509888178982691780:2202];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:21:38.632780Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/00181a/r3tmp/tmpDoOSmH/p ... :375: AssertSuccessResult @ 0x260B97B2 4. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:365: CreateSampleTables @ 0x260B90B2 5. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:581: operator() @ 0x260DACEC 6. /-S/library/cpp/threading/future/core/future-inl.h:493: SetValue @ 0x260DACEC 7. /-S/library/cpp/threading/future/async.h:24: operator() @ 0x260DACEC 8. /-S/util/thread/pool.h:71: Process @ 0x260DACEC 9. /-S/util/thread/pool.cpp:418: DoExecute @ 0x13A88859 10. /-S/util/thread/factory.h:15: Execute @ 0x13A87249 11. /-S/util/thread/factory.cpp:36: ThreadProc @ 0x13A87249 12. /-S/util/system/thread.cpp:244: ThreadProxy @ 0x13A826BC 13. ??:0: ?? @ 0x7F6DD71AFAC2 14. ??:0: ?? @ 0x7F6DD724184F Trying to start YDB, gRPC: 19901, MsgBus: 25703 2025-05-29T15:21:44.366918Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509888203055390311:2072];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:21:44.413888Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/00181a/r3tmp/tmpyzN5Ea/pdisk_1.dat 2025-05-29T15:21:44.449883Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [1:7509888203055390277:2079] 1748532104364179 != 1748532104364182 2025-05-29T15:21:44.453387Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 19901, node 1 2025-05-29T15:21:44.475028Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:21:44.475038Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-05-29T15:21:44.475039Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-05-29T15:21:44.475077Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:25703 2025-05-29T15:21:44.519109Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:21:44.519153Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:21:44.519983Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:25703 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... 2025-05-29T15:21:44.604345Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-05-29T15:21:44.614730Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:21:44.680760Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:21:44.713942Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:21:44.727060Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:21:44.804696Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509888203055391911:2403], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:21:44.804738Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:21:44.850277Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-05-29T15:21:44.916627Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-05-29T15:21:44.937465Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-05-29T15:21:44.953102Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-05-29T15:21:44.967595Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-05-29T15:21:44.988740Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-05-29T15:21:45.006394Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-05-29T15:21:45.024157Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509888207350359862:2466], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:21:45.024183Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:21:45.024301Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509888207350359867:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:21:45.025165Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715669:3, at schemeshard: 72057594046644480 2025-05-29T15:21:45.028289Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976715669, at schemeshard: 72057594046644480 2025-05-29T15:21:45.028448Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7509888207350359869:2470], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715669 completed, doublechecking } 2025-05-29T15:21:45.092916Z node 1 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [1:7509888207350359920:3396] txid# 281474976715670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-29T15:21:45.225745Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [1:7509888207350359936:2474], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:21:45.225951Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=1&id=ODQxNTJhN2YtMjNkNWMzMC05MjczNzYwYy00NTU0MzY2ZQ==, ActorId: [1:7509888203055391908:2401], ActorState: ExecuteState, TraceId: 01jwea53sz1n0bfkhdttensa79, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: VERIFY failed (2025-05-29T15:21:45.226919Z): assertion failed in non-unittest thread with message: assertion failed at ydb/core/kqp/ut/common/kqp_ut_common.h:375, void NKikimr::NKqp::AssertSuccessResult(const NYdb::TStatus &): (result.IsSuccess())
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 library/cpp/testing/unittest/registar.cpp:36 RaiseError(): requirement UnittestThread failed 0. /-S/util/system/yassert.cpp:83: InternalPanicImpl @ 0x13A80ED5 1. /-S/util/system/yassert.cpp:55: Panic @ 0x13A77ED6 2. /tmp//-S/library/cpp/testing/unittest/registar.cpp:36: RaiseError @ 0x13C19C66 3. /-S/ydb/core/kqp/ut/common/kqp_ut_common.h:375: AssertSuccessResult @ 0x260B97B2 4. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:365: CreateSampleTables @ 0x260B90B2 5. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:581: operator() @ 0x260DACEC 6. /-S/library/cpp/threading/future/core/future-inl.h:493: SetValue @ 0x260DACEC 7. /-S/library/cpp/threading/future/async.h:24: operator() @ 0x260DACEC 8. /-S/util/thread/pool.h:71: Process @ 0x260DACEC 9. /-S/util/thread/pool.cpp:418: DoExecute @ 0x13A88859 10. /-S/util/thread/factory.h:15: Execute @ 0x13A87249 11. /-S/util/thread/factory.cpp:36: ThreadProc @ 0x13A87249 12. /-S/util/system/thread.cpp:244: ThreadProxy @ 0x13A826BC 13. ??:0: ?? @ 0x7FDA3CFACAC2 14. ??:0: ?? @ 0x7FDA3D03E84F >> KqpScanArrowInChanels::AggregateByColumn >> TPQCDTest::TestUnavailableWithoutBoth [GOOD] >> TSchemeShardExtSubDomainTest::SchemeQuotas-AlterDatabaseCreateHiveFirst-true [GOOD] >> KqpScanArrowFormat::AggregateEmptySum >> KqpRbo::Filter >> KqpRbo::Bench_JoinFilter >> KqpRbo::Bench_Filter ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/olap/unittest >> KqpOlapAggregations::Aggregation_NoPushdownOnDisabledEmitAggApply [GOOD] Test command err: Trying to start YDB, gRPC: 22699, MsgBus: 6348 2025-05-29T15:21:33.545507Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509888158451652266:2091];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:21:33.545776Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/002796/r3tmp/tmppqHyfW/pdisk_1.dat 2025-05-29T15:21:33.634857Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 22699, node 1 2025-05-29T15:21:33.672362Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:21:33.672374Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-05-29T15:21:33.672376Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-05-29T15:21:33.672417Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-05-29T15:21:33.695179Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:21:33.695210Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:21:33.699042Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:6348 TClient is connected to server localhost:6348 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... 2025-05-29T15:21:33.863163Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-05-29T15:21:33.867103Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-05-29T15:21:33.883791Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnStore, opId: 281474976715658:0, at schemeshard: 72057594046644480 Status: 53 TxId: 281474976715658 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 2 2025-05-29T15:21:33.914499Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037889;self_id=[1:7509888158451652877:2314];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-05-29T15:21:33.914546Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037889;self_id=[1:7509888158451652877:2314];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-05-29T15:21:33.914597Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037889;self_id=[1:7509888158451652877:2314];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-05-29T15:21:33.914624Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037889;self_id=[1:7509888158451652877:2314];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-05-29T15:21:33.914649Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037889;self_id=[1:7509888158451652877:2314];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-05-29T15:21:33.914670Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037889;self_id=[1:7509888158451652877:2314];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-05-29T15:21:33.914690Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037889;self_id=[1:7509888158451652877:2314];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-05-29T15:21:33.914715Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037889;self_id=[1:7509888158451652877:2314];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-05-29T15:21:33.914752Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037889;self_id=[1:7509888158451652877:2314];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-05-29T15:21:33.914773Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037889;self_id=[1:7509888158451652877:2314];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-05-29T15:21:33.914799Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037889;self_id=[1:7509888158451652877:2314];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2025-05-29T15:21:33.914824Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037889;self_id=[1:7509888158451652877:2314];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2025-05-29T15:21:33.920932Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037891;self_id=[1:7509888158451652885:2315];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-05-29T15:21:33.920955Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037891;self_id=[1:7509888158451652885:2315];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-05-29T15:21:33.921009Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037891;self_id=[1:7509888158451652885:2315];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-05-29T15:21:33.921031Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037891;self_id=[1:7509888158451652885:2315];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-05-29T15:21:33.921054Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037891;self_id=[1:7509888158451652885:2315];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-05-29T15:21:33.921075Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037891;self_id=[1:7509888158451652885:2315];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-05-29T15:21:33.921093Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037891;self_id=[1:7509888158451652885:2315];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-05-29T15:21:33.921113Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037891;self_id=[1:7509888158451652885:2315];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-05-29T15:21:33.921143Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037891;self_id=[1:7509888158451652885:2315];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-05-29T15:21:33.921161Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037891;self_id=[1:7509888158451652885:2315];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-05-29T15:21:33.921181Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037891;self_id=[1:7509888158451652885:2315];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2025-05-29T15:21:33.921207Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037891;self_id=[1:7509888158451652885:2315];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2025-05-29T15:21:33.925912Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037890;self_id=[1:7509888158451652891:2316];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-05-29T15:21:33.925938Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037890;self_id=[1:7509888158451652891:2316];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-05-29T15:21:33.925982Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037890;self_id=[1:7509888158451652891:2316];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-05-29T15:21:33.926007Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037890;self_id=[1:7509888158451652891:2316];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-05-29T15:21:33.926029Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037890;self_id=[1:7509888158451652891:2316];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-05-29T15:21:33.926051Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037890;self_id=[1:750988815845 ... ks { TaskId: 1 CpuTimeUs: 143 FinishTimeMs: 1748532104010 OutputRows: 1 OutputBytes: 5 ResultRows: 1 ResultBytes: 5 ComputeCpuTimeUs: 7 BuildCpuTimeUs: 136 HostName: "ghrun-lxxdcki4qu" NodeId: 2 CreateTimeMs: 1748532104010 CurrentWaitOutputTimeUs: 3397986 UpdateTimeMs: 1748532107408 } MaxMemoryUsage: 1048576 2025-05-29T15:21:47.408405Z node 2 :KQP_EXECUTER DEBUG: kqp_executer_impl.h:434: ActorId: [2:1915:2929] TxId: 281474976715663. Ctx: { TraceId: 01jwea4yt01v7ygk7cdk1rhjva, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=MmQ1NmZjNjUtNzhmYTljOTEtYTBiMWI4ODMtZWIzNTMxNmI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ActorState: ExecuteState, got execution state from compute actor: [2:1918:3037], task: 1, state: COMPUTE_STATE_EXECUTING, stats: { CpuTimeUs: 1016 Tasks { TaskId: 1 CpuTimeUs: 143 FinishTimeMs: 1748532104010 OutputRows: 1 OutputBytes: 5 ResultRows: 1 ResultBytes: 5 ComputeCpuTimeUs: 7 BuildCpuTimeUs: 136 HostName: "ghrun-lxxdcki4qu" NodeId: 2 CreateTimeMs: 1748532104010 CurrentWaitOutputTimeUs: 3397986 UpdateTimeMs: 1748532107408 } MaxMemoryUsage: 1048576 } 2025-05-29T15:21:47.408429Z node 2 :KQP_EXECUTER DEBUG: kqp_executer_impl.h:645: ActorId: [2:1915:2929] TxId: 281474976715663. Ctx: { TraceId: 01jwea4yt01v7ygk7cdk1rhjva, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=MmQ1NmZjNjUtNzhmYTljOTEtYTBiMWI4ODMtZWIzNTMxNmI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Waiting for: CA [2:1918:3037], 2025-05-29T15:21:47.632114Z node 3 :TX_COLUMNSHARD DEBUG: log.cpp:784: tablet_id=72075186224037888;self_id=[3:1277:2391];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:254;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=72075186224037888; 2025-05-29T15:21:47.632205Z node 3 :TX_COLUMNSHARD DEBUG: log.cpp:784: tablet_id=72075186224037888;parent=[3:1277:2391];fline=actor.cpp:33;event=skip_flush_writing; 2025-05-29T15:21:47.849669Z node 3 :TX_COLUMNSHARD DEBUG: log.cpp:784: tablet_id=72075186224037888;parent=[3:1277:2391];fline=actor.cpp:33;event=skip_flush_writing; 2025-05-29T15:21:47.919500Z node 2 :KQP_COMPUTE DEBUG: kqp_pure_compute_actor.cpp:148: SelfId: [2:1918:3037], TxId: 281474976715663, task: 1. Ctx: { SessionId : ydb://session/3?node_id=2&id=MmQ1NmZjNjUtNzhmYTljOTEtYTBiMWI4ODMtZWIzNTMxNmI=. CustomerSuppliedId : . TraceId : 01jwea4yt01v7ygk7cdk1rhjva. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. CA StateFunc 65538 2025-05-29T15:21:47.919532Z node 2 :KQP_COMPUTE DEBUG: kqp_compute_scheduler.h:167: SelfId: [2:1918:3037], TxId: 281474976715663, task: 1. Ctx: { SessionId : ydb://session/3?node_id=2&id=MmQ1NmZjNjUtNzhmYTljOTEtYTBiMWI4ODMtZWIzNTMxNmI=. CustomerSuppliedId : . TraceId : 01jwea4yt01v7ygk7cdk1rhjva. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. wakeup with tag 2 2025-05-29T15:21:47.919600Z node 2 :KQP_COMPUTE DEBUG: dq_compute_actor_impl.h:2021: SelfId: [2:1918:3037], TxId: 281474976715663, task: 1. Ctx: { SessionId : ydb://session/3?node_id=2&id=MmQ1NmZjNjUtNzhmYTljOTEtYTBiMWI4ODMtZWIzNTMxNmI=. CustomerSuppliedId : . TraceId : 01jwea4yt01v7ygk7cdk1rhjva. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. Send stats to executor actor [2:1915:2929] TaskId: 1 Stats: CpuTimeUs: 1150 Tasks { TaskId: 1 CpuTimeUs: 143 FinishTimeMs: 1748532104010 OutputRows: 1 OutputBytes: 5 ResultRows: 1 ResultBytes: 5 ComputeCpuTimeUs: 7 BuildCpuTimeUs: 136 HostName: "ghrun-lxxdcki4qu" NodeId: 2 CreateTimeMs: 1748532104010 CurrentWaitOutputTimeUs: 3909245 UpdateTimeMs: 1748532107919 } MaxMemoryUsage: 1048576 2025-05-29T15:21:47.919724Z node 2 :KQP_EXECUTER DEBUG: kqp_executer_impl.h:434: ActorId: [2:1915:2929] TxId: 281474976715663. Ctx: { TraceId: 01jwea4yt01v7ygk7cdk1rhjva, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=MmQ1NmZjNjUtNzhmYTljOTEtYTBiMWI4ODMtZWIzNTMxNmI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ActorState: ExecuteState, got execution state from compute actor: [2:1918:3037], task: 1, state: COMPUTE_STATE_EXECUTING, stats: { CpuTimeUs: 1150 Tasks { TaskId: 1 CpuTimeUs: 143 FinishTimeMs: 1748532104010 OutputRows: 1 OutputBytes: 5 ResultRows: 1 ResultBytes: 5 ComputeCpuTimeUs: 7 BuildCpuTimeUs: 136 HostName: "ghrun-lxxdcki4qu" NodeId: 2 CreateTimeMs: 1748532104010 CurrentWaitOutputTimeUs: 3909245 UpdateTimeMs: 1748532107919 } MaxMemoryUsage: 1048576 } 2025-05-29T15:21:47.919749Z node 2 :KQP_EXECUTER DEBUG: kqp_executer_impl.h:645: ActorId: [2:1915:2929] TxId: 281474976715663. Ctx: { TraceId: 01jwea4yt01v7ygk7cdk1rhjva, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=MmQ1NmZjNjUtNzhmYTljOTEtYTBiMWI4ODMtZWIzNTMxNmI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Waiting for: CA [2:1918:3037], 2025-05-29T15:21:48.154363Z node 3 :TX_COLUMNSHARD DEBUG: log.cpp:784: tablet_id=72075186224037888;self_id=[3:1277:2391];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:254;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=72075186224037888; 2025-05-29T15:21:48.154409Z node 3 :TX_COLUMNSHARD DEBUG: log.cpp:784: tablet_id=72075186224037888;parent=[3:1277:2391];fline=actor.cpp:33;event=skip_flush_writing; 2025-05-29T15:21:48.345364Z node 3 :TX_COLUMNSHARD DEBUG: log.cpp:784: tablet_id=72075186224037888;parent=[3:1277:2391];fline=actor.cpp:33;event=skip_flush_writing; 2025-05-29T15:21:48.517064Z node 2 :KQP_COMPUTE DEBUG: kqp_pure_compute_actor.cpp:148: SelfId: [2:1918:3037], TxId: 281474976715663, task: 1. Ctx: { SessionId : ydb://session/3?node_id=2&id=MmQ1NmZjNjUtNzhmYTljOTEtYTBiMWI4ODMtZWIzNTMxNmI=. CustomerSuppliedId : . TraceId : 01jwea4yt01v7ygk7cdk1rhjva. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. CA StateFunc 65538 2025-05-29T15:21:48.517097Z node 2 :KQP_COMPUTE DEBUG: kqp_compute_scheduler.h:167: SelfId: [2:1918:3037], TxId: 281474976715663, task: 1. Ctx: { SessionId : ydb://session/3?node_id=2&id=MmQ1NmZjNjUtNzhmYTljOTEtYTBiMWI4ODMtZWIzNTMxNmI=. CustomerSuppliedId : . TraceId : 01jwea4yt01v7ygk7cdk1rhjva. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. wakeup with tag 2 2025-05-29T15:21:48.517181Z node 2 :KQP_COMPUTE DEBUG: dq_compute_actor_impl.h:2021: SelfId: [2:1918:3037], TxId: 281474976715663, task: 1. Ctx: { SessionId : ydb://session/3?node_id=2&id=MmQ1NmZjNjUtNzhmYTljOTEtYTBiMWI4ODMtZWIzNTMxNmI=. CustomerSuppliedId : . TraceId : 01jwea4yt01v7ygk7cdk1rhjva. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. Send stats to executor actor [2:1915:2929] TaskId: 1 Stats: CpuTimeUs: 1322 Tasks { TaskId: 1 CpuTimeUs: 143 FinishTimeMs: 1748532104010 OutputRows: 1 OutputBytes: 5 ResultRows: 1 ResultBytes: 5 ComputeCpuTimeUs: 7 BuildCpuTimeUs: 136 HostName: "ghrun-lxxdcki4qu" NodeId: 2 CreateTimeMs: 1748532104010 CurrentWaitOutputTimeUs: 4506810 UpdateTimeMs: 1748532108517 } MaxMemoryUsage: 1048576 2025-05-29T15:21:48.517254Z node 2 :KQP_EXECUTER DEBUG: kqp_executer_impl.h:434: ActorId: [2:1915:2929] TxId: 281474976715663. Ctx: { TraceId: 01jwea4yt01v7ygk7cdk1rhjva, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=MmQ1NmZjNjUtNzhmYTljOTEtYTBiMWI4ODMtZWIzNTMxNmI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ActorState: ExecuteState, got execution state from compute actor: [2:1918:3037], task: 1, state: COMPUTE_STATE_EXECUTING, stats: { CpuTimeUs: 1322 Tasks { TaskId: 1 CpuTimeUs: 143 FinishTimeMs: 1748532104010 OutputRows: 1 OutputBytes: 5 ResultRows: 1 ResultBytes: 5 ComputeCpuTimeUs: 7 BuildCpuTimeUs: 136 HostName: "ghrun-lxxdcki4qu" NodeId: 2 CreateTimeMs: 1748532104010 CurrentWaitOutputTimeUs: 4506810 UpdateTimeMs: 1748532108517 } MaxMemoryUsage: 1048576 } 2025-05-29T15:21:48.517280Z node 2 :KQP_EXECUTER DEBUG: kqp_executer_impl.h:645: ActorId: [2:1915:2929] TxId: 281474976715663. Ctx: { TraceId: 01jwea4yt01v7ygk7cdk1rhjva, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=MmQ1NmZjNjUtNzhmYTljOTEtYTBiMWI4ODMtZWIzNTMxNmI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Waiting for: CA [2:1918:3037], 2025-05-29T15:21:48.791794Z node 3 :TX_COLUMNSHARD DEBUG: log.cpp:784: tablet_id=72075186224037888;self_id=[3:1277:2391];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:254;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=72075186224037888; 2025-05-29T15:21:48.791844Z node 3 :TX_COLUMNSHARD DEBUG: log.cpp:784: tablet_id=72075186224037888;parent=[3:1277:2391];fline=actor.cpp:33;event=skip_flush_writing; 2025-05-29T15:21:48.996519Z node 3 :TX_COLUMNSHARD DEBUG: log.cpp:784: tablet_id=72075186224037888;parent=[3:1277:2391];fline=actor.cpp:33;event=skip_flush_writing; 2025-05-29T15:21:49.096395Z node 2 :KQP_COMPUTE DEBUG: kqp_pure_compute_actor.cpp:148: SelfId: [2:1918:3037], TxId: 281474976715663, task: 1. Ctx: { SessionId : ydb://session/3?node_id=2&id=MmQ1NmZjNjUtNzhmYTljOTEtYTBiMWI4ODMtZWIzNTMxNmI=. CustomerSuppliedId : . TraceId : 01jwea4yt01v7ygk7cdk1rhjva. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. CA StateFunc 65538 2025-05-29T15:21:49.096440Z node 2 :KQP_COMPUTE DEBUG: kqp_compute_scheduler.h:167: SelfId: [2:1918:3037], TxId: 281474976715663, task: 1. Ctx: { SessionId : ydb://session/3?node_id=2&id=MmQ1NmZjNjUtNzhmYTljOTEtYTBiMWI4ODMtZWIzNTMxNmI=. CustomerSuppliedId : . TraceId : 01jwea4yt01v7ygk7cdk1rhjva. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. wakeup with tag 2 2025-05-29T15:21:49.096529Z node 2 :KQP_COMPUTE DEBUG: dq_compute_actor_impl.h:2021: SelfId: [2:1918:3037], TxId: 281474976715663, task: 1. Ctx: { SessionId : ydb://session/3?node_id=2&id=MmQ1NmZjNjUtNzhmYTljOTEtYTBiMWI4ODMtZWIzNTMxNmI=. CustomerSuppliedId : . TraceId : 01jwea4yt01v7ygk7cdk1rhjva. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. Send stats to executor actor [2:1915:2929] TaskId: 1 Stats: CpuTimeUs: 1472 Tasks { TaskId: 1 CpuTimeUs: 143 FinishTimeMs: 1748532104010 OutputRows: 1 OutputBytes: 5 ResultRows: 1 ResultBytes: 5 ComputeCpuTimeUs: 7 BuildCpuTimeUs: 136 HostName: "ghrun-lxxdcki4qu" NodeId: 2 CreateTimeMs: 1748532104010 CurrentWaitOutputTimeUs: 5086156 UpdateTimeMs: 1748532109096 } MaxMemoryUsage: 1048576 2025-05-29T15:21:49.096607Z node 2 :KQP_EXECUTER DEBUG: kqp_executer_impl.h:434: ActorId: [2:1915:2929] TxId: 281474976715663. Ctx: { TraceId: 01jwea4yt01v7ygk7cdk1rhjva, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=MmQ1NmZjNjUtNzhmYTljOTEtYTBiMWI4ODMtZWIzNTMxNmI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ActorState: ExecuteState, got execution state from compute actor: [2:1918:3037], task: 1, state: COMPUTE_STATE_EXECUTING, stats: { CpuTimeUs: 1472 Tasks { TaskId: 1 CpuTimeUs: 143 FinishTimeMs: 1748532104010 OutputRows: 1 OutputBytes: 5 ResultRows: 1 ResultBytes: 5 ComputeCpuTimeUs: 7 BuildCpuTimeUs: 136 HostName: "ghrun-lxxdcki4qu" NodeId: 2 CreateTimeMs: 1748532104010 CurrentWaitOutputTimeUs: 5086156 UpdateTimeMs: 1748532109096 } MaxMemoryUsage: 1048576 } 2025-05-29T15:21:49.096635Z node 2 :KQP_EXECUTER DEBUG: kqp_executer_impl.h:645: ActorId: [2:1915:2929] TxId: 281474976715663. Ctx: { TraceId: 01jwea4yt01v7ygk7cdk1rhjva, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=MmQ1NmZjNjUtNzhmYTljOTEtYTBiMWI4ODMtZWIzNTMxNmI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Waiting for: CA [2:1918:3037], 2025-05-29T15:21:49.342997Z node 3 :TX_COLUMNSHARD DEBUG: log.cpp:784: tablet_id=72075186224037888;self_id=[3:1277:2391];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:254;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=72075186224037888; 2025-05-29T15:21:49.343049Z node 3 :TX_COLUMNSHARD DEBUG: log.cpp:784: tablet_id=72075186224037888;parent=[3:1277:2391];fline=actor.cpp:33;event=skip_flush_writing; |58.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/metadata/initializer/ut/unittest |58.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/metadata/initializer/ut/unittest ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/persqueue_cluster_discovery/ut/unittest >> TPQCDTest::TestUnavailableWithoutBoth [GOOD] Test command err: 2025-05-29T15:21:46.609654Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509888215128555628:2207];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:21:46.609733Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/00120c/r3tmp/tmpDTY8bo/pdisk_1.dat 2025-05-29T15:21:46.726904Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 12743, node 1 2025-05-29T15:21:46.775787Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:21:46.775860Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:21:46.776725Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-29T15:21:46.776837Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:21:46.776841Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-05-29T15:21:46.776843Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-05-29T15:21:46.776896Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-05-29T15:21:47.096735Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509888219423523348:2328], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:21:47.096785Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:21:47.096887Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509888219423523398:2331], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:21:47.098139Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715657:3, at schemeshard: 72057594046644480 2025-05-29T15:21:47.105048Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7509888219423523400:2332], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715657 completed, doublechecking } 2025-05-29T15:21:47.220281Z node 1 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [1:7509888219423523461:2321] txid# 281474976715658, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-29T15:21:47.331886Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [1:7509888219423523470:2338], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-05-29T15:21:47.332546Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=1&id=NWExYjcyY2UtOWUyMTIyMjYtZjk3ZjY3NDAtOGVjMTY2ZmE=, ActorId: [1:7509888219423523346:2327], ActorState: ExecuteState, TraceId: 01jwea55tqbnzy4w1b50zwx01r, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-05-29T15:21:47.335870Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: cluster_tracker.cpp:167: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-05-29T15:21:48.340101Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [1:7509888223718490859:2369], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-05-29T15:21:48.340168Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=1&id=ZWQ4NTBiNWEtNjMwY2ZiYzctMjk3NzYyMjctNDFjNTlhMzc=, ActorId: [1:7509888223718490852:2365], ActorState: ExecuteState, TraceId: 01jwea571g4qvbqpgxvrd8nkr4, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-05-29T15:21:48.340260Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: cluster_tracker.cpp:167: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-05-29T15:21:49.346776Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [1:7509888228013458209:2395], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-05-29T15:21:49.346868Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=1&id=NjkyMTllN2EtNDM5NTRmMjItM2I4ZTZiMGMtN2M0ZGUyMDE=, ActorId: [1:7509888228013458207:2394], ActorState: ExecuteState, TraceId: 01jwea580w4tr5fmk44b0g20kp, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-05-29T15:21:49.346980Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: cluster_tracker.cpp:167: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } |58.6%| [TA] $(B)/ydb/core/kqp/executer_actor/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> KqpRbo::Bench_Select ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/olap/unittest >> KqpOlapLocks::TwoQueriesWithRestartTablet [FAIL] Test command err: Trying to start YDB, gRPC: 1378, MsgBus: 1556 2025-05-29T15:21:33.502940Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509888156889847106:2198];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:21:33.503129Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/0026be/r3tmp/tmp4NCx9s/pdisk_1.dat 2025-05-29T15:21:33.601005Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [1:7509888156889846947:2079] 1748532093500514 != 1748532093500517 2025-05-29T15:21:33.604329Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 1378, node 1 2025-05-29T15:21:33.642943Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:21:33.642960Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-05-29T15:21:33.642962Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-05-29T15:21:33.642998Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-05-29T15:21:33.674808Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:21:33.674840Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:21:33.679621Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:1556 TClient is connected to server localhost:1556 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-29T15:21:33.790173Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:21:33.793130Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-05-29T15:21:34.099183Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509888161184814900:2328], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:21:34.099204Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:21:34.140297Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-05-29T15:21:34.185966Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037890;self_id=[1:7509888161184815020:2334];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-05-29T15:21:34.186014Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037890;self_id=[1:7509888161184815020:2334];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-05-29T15:21:34.186069Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037890;self_id=[1:7509888161184815020:2334];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-05-29T15:21:34.186090Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037890;self_id=[1:7509888161184815020:2334];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-05-29T15:21:34.186108Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037890;self_id=[1:7509888161184815020:2334];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-05-29T15:21:34.186130Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037890;self_id=[1:7509888161184815020:2334];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-05-29T15:21:34.186147Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037890;self_id=[1:7509888161184815020:2334];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-05-29T15:21:34.186169Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037890;self_id=[1:7509888161184815020:2334];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-05-29T15:21:34.186188Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037890;self_id=[1:7509888161184815020:2334];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-05-29T15:21:34.186211Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037890;self_id=[1:7509888161184815020:2334];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-05-29T15:21:34.186231Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037890;self_id=[1:7509888161184815020:2334];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2025-05-29T15:21:34.186250Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037890;self_id=[1:7509888161184815020:2334];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2025-05-29T15:21:34.186538Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037891;self_id=[1:7509888161184815021:2335];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-05-29T15:21:34.186568Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037891;self_id=[1:7509888161184815021:2335];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-05-29T15:21:34.186609Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037891;self_id=[1:7509888161184815021:2335];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-05-29T15:21:34.186629Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037891;self_id=[1:7509888161184815021:2335];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-05-29T15:21:34.186648Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037891;self_id=[1:7509888161184815021:2335];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-05-29T15:21:34.186666Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037891;self_id=[1:7509888161184815021:2335];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-05-29T15:21:34.186683Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037891;self_id=[1:7509888161184815021:2335];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-05-29T15:21:34.186705Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037891;self_id=[1:7509888161184815021:2335];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-05-29T15:21:34.204889Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037891;self_id=[1:7509888161184815021:2335];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-05-29T15:21:34.204953Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037891;self_id=[1:7509888161184815021:2335];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-05-29T15:21:34.204973Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037891;self_id=[1:7509888161184815021:2335];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2025-05-29T15:21:34.204993Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037891;self_id=[1:7509888161184815021:2335];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2025-05-29T15:21:34.209162Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;self_id=[1:7509888161184815019:2333];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-05-29T15:21:34.209176Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;self_id=[1:7509888161184815019:2333];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-05-29T15:21:34.209223Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;self_id=[1:7509888161184815019:2333];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_regist ... -29T15:21:35.352340Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037894;self_id=[1:7509888165479782887:2478];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=18; 2025-05-29T15:21:35.353654Z node 1 :TX_COLUMNSHARD_TX INFO: log.cpp:784: tablet_id=72075186224037894;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:composite_init/tx_controller;fline=tx_controller.cpp:90;override=0;no_dl=0;dl=0;operators=0;plan=0;dl_queue=0; 2025-05-29T15:21:35.353806Z node 1 :TX_COLUMNSHARD_TX DEBUG: log.cpp:784: tablet_id=72075186224037894;self_id=[1:7509888165479782887:2478];process=SwitchToWork;fline=columnshard__progress_tx.cpp:120;event=EnqueueProgressTx;tablet_id=72075186224037894;tx_id=NO_VALUE_OPTIONAL; 2025-05-29T15:21:35.353809Z node 1 :TX_COLUMNSHARD_TX DEBUG: log.cpp:784: tablet_id=72075186224037894;self_id=[1:7509888165479782887:2478];process=SwitchToWork;fline=columnshard__progress_tx.cpp:125;event=EnqueueProgressTxStart;tablet_id=72075186224037894;tx_id=NO_VALUE_OPTIONAL;tx_current=NO_VALUE_OPTIONAL; 2025-05-29T15:21:35.357036Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037890;self_id=[1:7509888165479782934:2483];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=18; 2025-05-29T15:21:35.358036Z node 1 :TX_COLUMNSHARD_TX INFO: log.cpp:784: tablet_id=72075186224037890;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:composite_init/tx_controller;fline=tx_controller.cpp:90;override=0;no_dl=0;dl=0;operators=0;plan=0;dl_queue=0; 2025-05-29T15:21:35.358163Z node 1 :TX_COLUMNSHARD_TX DEBUG: log.cpp:784: tablet_id=72075186224037890;self_id=[1:7509888165479782934:2483];process=SwitchToWork;fline=columnshard__progress_tx.cpp:120;event=EnqueueProgressTx;tablet_id=72075186224037890;tx_id=NO_VALUE_OPTIONAL; 2025-05-29T15:21:35.358165Z node 1 :TX_COLUMNSHARD_TX DEBUG: log.cpp:784: tablet_id=72075186224037890;self_id=[1:7509888165479782934:2483];process=SwitchToWork;fline=columnshard__progress_tx.cpp:125;event=EnqueueProgressTxStart;tablet_id=72075186224037890;tx_id=NO_VALUE_OPTIONAL;tx_current=NO_VALUE_OPTIONAL; 2025-05-29T15:21:35.359499Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037896;self_id=[1:7509888165479782884:2475];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=18; 2025-05-29T15:21:35.360683Z node 1 :TX_COLUMNSHARD_TX INFO: log.cpp:784: tablet_id=72075186224037896;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:composite_init/tx_controller;fline=tx_controller.cpp:90;override=0;no_dl=0;dl=0;operators=0;plan=0;dl_queue=0; 2025-05-29T15:21:35.360834Z node 1 :TX_COLUMNSHARD_TX DEBUG: log.cpp:784: tablet_id=72075186224037896;self_id=[1:7509888165479782884:2475];process=SwitchToWork;fline=columnshard__progress_tx.cpp:120;event=EnqueueProgressTx;tablet_id=72075186224037896;tx_id=NO_VALUE_OPTIONAL; 2025-05-29T15:21:35.360837Z node 1 :TX_COLUMNSHARD_TX DEBUG: log.cpp:784: tablet_id=72075186224037896;self_id=[1:7509888165479782884:2475];process=SwitchToWork;fline=columnshard__progress_tx.cpp:125;event=EnqueueProgressTxStart;tablet_id=72075186224037896;tx_id=NO_VALUE_OPTIONAL;tx_current=NO_VALUE_OPTIONAL; 2025-05-29T15:21:35.365805Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037893;self_id=[1:7509888165479782886:2477];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=18; 2025-05-29T15:21:35.365943Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037889;self_id=[1:7509888165479782931:2480];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=18; 2025-05-29T15:21:35.367353Z node 1 :TX_COLUMNSHARD_TX INFO: log.cpp:784: tablet_id=72075186224037889;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:composite_init/tx_controller;fline=tx_controller.cpp:90;override=0;no_dl=0;dl=0;operators=0;plan=0;dl_queue=0; 2025-05-29T15:21:35.367521Z node 1 :TX_COLUMNSHARD_TX DEBUG: log.cpp:784: tablet_id=72075186224037889;self_id=[1:7509888165479782931:2480];process=SwitchToWork;fline=columnshard__progress_tx.cpp:120;event=EnqueueProgressTx;tablet_id=72075186224037889;tx_id=NO_VALUE_OPTIONAL; 2025-05-29T15:21:35.367524Z node 1 :TX_COLUMNSHARD_TX DEBUG: log.cpp:784: tablet_id=72075186224037889;self_id=[1:7509888165479782931:2480];process=SwitchToWork;fline=columnshard__progress_tx.cpp:125;event=EnqueueProgressTxStart;tablet_id=72075186224037889;tx_id=NO_VALUE_OPTIONAL;tx_current=NO_VALUE_OPTIONAL; 2025-05-29T15:21:35.371914Z node 1 :TX_COLUMNSHARD_TX INFO: log.cpp:784: tablet_id=72075186224037893;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:composite_init/tx_controller;fline=tx_controller.cpp:90;override=0;no_dl=0;dl=0;operators=0;plan=0;dl_queue=0; 2025-05-29T15:21:35.372096Z node 1 :TX_COLUMNSHARD_TX DEBUG: log.cpp:784: tablet_id=72075186224037893;self_id=[1:7509888165479782886:2477];process=SwitchToWork;fline=columnshard__progress_tx.cpp:120;event=EnqueueProgressTx;tablet_id=72075186224037893;tx_id=NO_VALUE_OPTIONAL; 2025-05-29T15:21:35.372099Z node 1 :TX_COLUMNSHARD_TX DEBUG: log.cpp:784: tablet_id=72075186224037893;self_id=[1:7509888165479782886:2477];process=SwitchToWork;fline=columnshard__progress_tx.cpp:125;event=EnqueueProgressTxStart;tablet_id=72075186224037893;tx_id=NO_VALUE_OPTIONAL;tx_current=NO_VALUE_OPTIONAL; 2025-05-29T15:21:35.372773Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;self_id=[1:7509888165479782935:2484];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=18; 2025-05-29T15:21:35.373981Z node 1 :TX_COLUMNSHARD_TX INFO: log.cpp:784: tablet_id=72075186224037888;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:composite_init/tx_controller;fline=tx_controller.cpp:90;override=0;no_dl=0;dl=0;operators=0;plan=0;dl_queue=0; 2025-05-29T15:21:35.374107Z node 1 :TX_COLUMNSHARD_TX DEBUG: log.cpp:784: tablet_id=72075186224037888;self_id=[1:7509888165479782935:2484];process=SwitchToWork;fline=columnshard__progress_tx.cpp:120;event=EnqueueProgressTx;tablet_id=72075186224037888;tx_id=NO_VALUE_OPTIONAL; 2025-05-29T15:21:35.374110Z node 1 :TX_COLUMNSHARD_TX DEBUG: log.cpp:784: tablet_id=72075186224037888;self_id=[1:7509888165479782935:2484];process=SwitchToWork;fline=columnshard__progress_tx.cpp:125;event=EnqueueProgressTxStart;tablet_id=72075186224037888;tx_id=NO_VALUE_OPTIONAL;tx_current=NO_VALUE_OPTIONAL; 2025-05-29T15:21:35.376955Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037891;self_id=[1:7509888165479782932:2481];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=18; 2025-05-29T15:21:35.378248Z node 1 :TX_COLUMNSHARD_TX INFO: log.cpp:784: tablet_id=72075186224037891;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:composite_init/tx_controller;fline=tx_controller.cpp:90;override=0;no_dl=0;dl=0;operators=0;plan=0;dl_queue=0; 2025-05-29T15:21:35.378394Z node 1 :TX_COLUMNSHARD_TX DEBUG: log.cpp:784: tablet_id=72075186224037891;self_id=[1:7509888165479782932:2481];process=SwitchToWork;fline=columnshard__progress_tx.cpp:120;event=EnqueueProgressTx;tablet_id=72075186224037891;tx_id=NO_VALUE_OPTIONAL; 2025-05-29T15:21:35.378397Z node 1 :TX_COLUMNSHARD_TX DEBUG: log.cpp:784: tablet_id=72075186224037891;self_id=[1:7509888165479782932:2481];process=SwitchToWork;fline=columnshard__progress_tx.cpp:125;event=EnqueueProgressTxStart;tablet_id=72075186224037891;tx_id=NO_VALUE_OPTIONAL;tx_current=NO_VALUE_OPTIONAL; 2025-05-29T15:21:35.379435Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037897;self_id=[1:7509888165479782915:2479];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=18; 2025-05-29T15:21:35.380663Z node 1 :TX_COLUMNSHARD_TX INFO: log.cpp:784: tablet_id=72075186224037897;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:composite_init/tx_controller;fline=tx_controller.cpp:90;override=0;no_dl=0;dl=0;operators=0;plan=0;dl_queue=0; 2025-05-29T15:21:35.380800Z node 1 :TX_COLUMNSHARD_TX DEBUG: log.cpp:784: tablet_id=72075186224037897;self_id=[1:7509888165479782915:2479];process=SwitchToWork;fline=columnshard__progress_tx.cpp:120;event=EnqueueProgressTx;tablet_id=72075186224037897;tx_id=NO_VALUE_OPTIONAL; 2025-05-29T15:21:35.380803Z node 1 :TX_COLUMNSHARD_TX DEBUG: log.cpp:784: tablet_id=72075186224037897;self_id=[1:7509888165479782915:2479];process=SwitchToWork;fline=columnshard__progress_tx.cpp:125;event=EnqueueProgressTxStart;tablet_id=72075186224037897;tx_id=NO_VALUE_OPTIONAL;tx_current=NO_VALUE_OPTIONAL; 2025-05-29T15:21:35.382928Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037892;self_id=[1:7509888165479782933:2482];tablet_id=72075186224037892;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=18; 2025-05-29T15:21:35.384151Z node 1 :TX_COLUMNSHARD_TX INFO: log.cpp:784: tablet_id=72075186224037892;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:composite_init/tx_controller;fline=tx_controller.cpp:90;override=0;no_dl=0;dl=0;operators=0;plan=0;dl_queue=0; 2025-05-29T15:21:35.384333Z node 1 :TX_COLUMNSHARD_TX DEBUG: log.cpp:784: tablet_id=72075186224037892;self_id=[1:7509888165479782933:2482];process=SwitchToWork;fline=columnshard__progress_tx.cpp:120;event=EnqueueProgressTx;tablet_id=72075186224037892;tx_id=NO_VALUE_OPTIONAL; 2025-05-29T15:21:35.384341Z node 1 :TX_COLUMNSHARD_TX DEBUG: log.cpp:784: tablet_id=72075186224037892;self_id=[1:7509888165479782933:2482];process=SwitchToWork;fline=columnshard__progress_tx.cpp:125;event=EnqueueProgressTxStart;tablet_id=72075186224037892;tx_id=NO_VALUE_OPTIONAL;tx_current=NO_VALUE_OPTIONAL; 2025-05-29T15:21:38.502812Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7509888156889847106:2198];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:21:38.502840Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; assertion failed at ydb/core/kqp/ut/common/kqp_ut_common.cpp:595, void NKikimr::NKqp::CompareYson(const TString &, const TString &, const TString &): (ReformatYson(expected) == ReformatYson(actual)) failed: ("[]" != "[[1u;[\"test1\"];10];[2u;[\"test2\"];11];[3u;[\"test3\"];13];[4u;[\"test4\"];14]]") , with diff: "[(|[1u;[\"test1\")](|;10];[2u;[\)"(|test2\"];11];[3u;[\"test3\"];13];[4u;[\"test4\"];14]]") 0. /-S/util/system/backtrace.cpp:284: ?? @ 0x13D433CB 1. /tmp//-S/library/cpp/testing/unittest/registar.cpp:46: RaiseError @ 0x13EFB238 2. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:595: CompareYson @ 0x2648CC5E 3. /tmp//-S/ydb/core/kqp/ut/olap/locks_ut.cpp:70: Execute_ @ 0x13BB891C 4. /tmp//-S/ydb/core/kqp/ut/olap/locks_ut.cpp:17: operator() @ 0x13BC1FA6 5. /tmp//-S/library/cpp/testing/unittest/registar.cpp:373: Run @ 0x13EFD0ED 6. /tmp//-S/ydb/core/kqp/ut/olap/locks_ut.cpp:17: Execute @ 0x13BC196C 7. /tmp//-S/library/cpp/testing/unittest/registar.cpp:494: Execute @ 0x13EFD862 8. /tmp//-S/library/cpp/testing/unittest/utmain.cpp:872: RunMain @ 0x13F0F40C 9. ??:0: ?? @ 0x7FA42038DD8F 10. ??:0: ?? @ 0x7FA42038DE3F 11. ??:0: ?? @ 0x12AB1028 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_extsubdomain/unittest >> TSchemeShardExtSubDomainTest::SchemeQuotas-AlterDatabaseCreateHiveFirst-true [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:116:2058] recipient: [1:111:2141] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:116:2058] recipient: [1:111:2141] Leader for TabletID 72057594046678944 is [1:128:2152] sender: [1:132:2058] recipient: [1:111:2141] 2025-05-29T15:21:44.617038Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7488: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-05-29T15:21:44.617065Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7516: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:21:44.617071Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7402: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-05-29T15:21:44.617077Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7418: OperationsProcessing config: using default configuration 2025-05-29T15:21:44.617101Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-05-29T15:21:44.617105Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-05-29T15:21:44.617115Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7548: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:21:44.617130Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:39: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-05-29T15:21:44.617257Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7619: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-05-29T15:21:44.617318Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-05-29T15:21:44.631197Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7309: Cannot subscribe to console configs 2025-05-29T15:21:44.631222Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:21:44.633846Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-05-29T15:21:44.633963Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-05-29T15:21:44.634009Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-05-29T15:21:44.635545Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-05-29T15:21:44.635705Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:445: Clear TempDirsState with owners number: 0 2025-05-29T15:21:44.635817Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1348: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-05-29T15:21:44.635854Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:32: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-05-29T15:21:44.636347Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:157: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:21:44.636390Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:84: [RootDataErasureManager] Stop 2025-05-29T15:21:44.636633Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:21:44.636648Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:21:44.636671Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-05-29T15:21:44.636680Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-29T15:21:44.636686Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:202: TTxServerlessStorageBilling.Complete 2025-05-29T15:21:44.636717Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6671: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-05-29T15:21:44.637958Z node 1 :HIVE INFO: tablet_helpers.cpp:1130: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:128:2152] sender: [1:242:2058] recipient: [1:15:2062] 2025-05-29T15:21:44.659386Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:372: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-05-29T15:21:44.659444Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:21:44.659489Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-05-29T15:21:44.659534Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:130: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-05-29T15:21:44.659546Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:21:44.660152Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:451: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-05-29T15:21:44.660175Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-05-29T15:21:44.660220Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:21:44.660229Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:313: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-05-29T15:21:44.660235Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:367: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-05-29T15:21:44.660240Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 2 -> 3 2025-05-29T15:21:44.660710Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:21:44.660724Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-05-29T15:21:44.660730Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 3 -> 128 2025-05-29T15:21:44.661075Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:21:44.661087Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:21:44.661092Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:21:44.661098Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1650: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-05-29T15:21:44.661814Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1719: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-05-29T15:21:44.662187Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:652: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-05-29T15:21:44.662219Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1751: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-05-29T15:21:44.662372Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:676: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-05-29T15:21:44.662397Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:680: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 124 RawX2: 4294969445 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-05-29T15:21:44.662404Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:21:44.662456Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 128 -> 240 2025-05-29T15:21:44.662464Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:21:44.662487Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-05-29T15:21:44.662498Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:402: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-05-29T15:21:44.663033Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:21:44.663043Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-29T15:21:44.663073Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29 ... HARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72075186233409546, txId: 116, path id: [OwnerId: 72075186233409546, LocalPathId: 1] 2025-05-29T15:21:50.263059Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72075186233409546, txId: 116, path id: [OwnerId: 72075186233409546, LocalPathId: 9] 2025-05-29T15:21:50.263073Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72075186233409546 2025-05-29T15:21:50.263077Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [7:730:2631], at schemeshard: 72075186233409546, txId: 116, path id: 1 2025-05-29T15:21:50.263083Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [7:730:2631], at schemeshard: 72075186233409546, txId: 116, path id: 9 2025-05-29T15:21:50.263206Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 116:0, at schemeshard: 72075186233409546 2025-05-29T15:21:50.263216Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:313: TCreateParts opId# 116:0 ProgressState, operation type: TxCreateTable, at tablet# 72075186233409546 2025-05-29T15:21:50.263260Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:357: TCreateParts opId# 116:0 CreateRequest Event to Hive: 72057594037968897 msg: Owner: 72075186233409546 OwnerIdx: 11 TabletType: DataShard ObjectDomain { SchemeShard: 72057594046678944 PathId: 2 } ObjectId: 9 BindedChannels { StoragePoolName: "/dc-1/users/tenant-1:hdd" } BindedChannels { StoragePoolName: "/dc-1/users/tenant-1:hdd" } BindedChannels { StoragePoolName: "/dc-1/users/tenant-1:hdd" } AllowedDomains { SchemeShard: 72057594046678944 PathId: 2 } 2025-05-29T15:21:50.263360Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:5834: Handle TEvUpdateAck, at schemeshard: 72075186233409546, msg: Owner: 72075186233409546 Generation: 3 LocalPathId: 1 Version: 16 PathOwnerId: 72075186233409546, cookie: 116 2025-05-29T15:21:50.263376Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72075186233409546, msg: Owner: 72075186233409546 Generation: 3 LocalPathId: 1 Version: 16 PathOwnerId: 72075186233409546, cookie: 116 2025-05-29T15:21:50.263380Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72075186233409546, txId: 116 2025-05-29T15:21:50.263385Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72075186233409546, txId: 116, pathId: [OwnerId: 72075186233409546, LocalPathId: 1], version: 16 2025-05-29T15:21:50.263391Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72075186233409546, LocalPathId: 1] was 13 2025-05-29T15:21:50.263567Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:5834: Handle TEvUpdateAck, at schemeshard: 72075186233409546, msg: Owner: 72075186233409546 Generation: 3 LocalPathId: 9 Version: 1 PathOwnerId: 72075186233409546, cookie: 116 2025-05-29T15:21:50.263580Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72075186233409546, msg: Owner: 72075186233409546 Generation: 3 LocalPathId: 9 Version: 1 PathOwnerId: 72075186233409546, cookie: 116 2025-05-29T15:21:50.263584Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72075186233409546, txId: 116 2025-05-29T15:21:50.263588Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72075186233409546, txId: 116, pathId: [OwnerId: 72075186233409546, LocalPathId: 9], version: 1 2025-05-29T15:21:50.263592Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72075186233409546, LocalPathId: 9] was 4 2025-05-29T15:21:50.263604Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1606: TOperation IsReadyToNotify, TxId: 116, ready parts: 0/1, is published: true 2025-05-29T15:21:50.264057Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:652: Send tablet strongly msg operationId: 116:0 from tablet: 72075186233409546 to tablet: 72057594037968897 cookie: 72075186233409546:11 msg type: 268697601 2025-05-29T15:21:50.264085Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1751: TOperation RegisterRelationByTabletId, TxId: 116, partId: 0, tablet: 72057594037968897 2025-05-29T15:21:50.264091Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1779: TOperation RegisterRelationByShardIdx, TxId: 116, shardIdx: 72075186233409546:11, partId: 0 2025-05-29T15:21:50.264288Z node 7 :HIVE INFO: tablet_helpers.cpp:1181: [72057594037968897] TEvCreateTablet, msg: Owner: 72075186233409546 OwnerIdx: 11 TabletType: DataShard ObjectDomain { SchemeShard: 72057594046678944 PathId: 2 } ObjectId: 9 BindedChannels { StoragePoolName: "/dc-1/users/tenant-1:hdd" } BindedChannels { StoragePoolName: "/dc-1/users/tenant-1:hdd" } BindedChannels { StoragePoolName: "/dc-1/users/tenant-1:hdd" } AllowedDomains { SchemeShard: 72057594046678944 PathId: 2 } 2025-05-29T15:21:50.264339Z node 7 :HIVE INFO: tablet_helpers.cpp:1245: [72057594037968897] TEvCreateTablet, Owner 72075186233409546, OwnerIdx 11, type DataShard, boot OK, tablet id 72075186233409556 2025-05-29T15:21:50.264368Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5867: Handle TEvCreateTabletReply at schemeshard: 72075186233409546 message: Status: OK Owner: 72075186233409546 OwnerIdx: 11 TabletID: 72075186233409556 Origin: 72057594037968897 2025-05-29T15:21:50.264374Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1793: TOperation FindRelatedPartByShardIdx, TxId: 116, shardIdx: 72075186233409546:11, partId: 0 2025-05-29T15:21:50.264392Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:619: TTxOperationReply execute, operationId: 116:0, at schemeshard: 72075186233409546, message: Status: OK Owner: 72075186233409546 OwnerIdx: 11 TabletID: 72075186233409556 Origin: 72057594037968897 2025-05-29T15:21:50.264398Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:175: TCreateParts opId# 116:0 HandleReply TEvCreateTabletReply, at tabletId: 72075186233409546 2025-05-29T15:21:50.264404Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:178: TCreateParts opId# 116:0 HandleReply TEvCreateTabletReply, message: Status: OK Owner: 72075186233409546 OwnerIdx: 11 TabletID: 72075186233409556 Origin: 72057594037968897 2025-05-29T15:21:50.264423Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 116:0 2 -> 3 2025-05-29T15:21:50.264725Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72075186233409546, cookie: 116 2025-05-29T15:21:50.264779Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72075186233409546, cookie: 116 2025-05-29T15:21:50.265034Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:647: TTxOperationReply complete, operationId: 116:0, at schemeshard: 72075186233409546 2025-05-29T15:21:50.265075Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 116:0, at schemeshard: 72075186233409546 2025-05-29T15:21:50.265083Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_create_table.cpp:200: TCreateTable TConfigureParts operationId# 116:0 ProgressState at tabletId# 72075186233409546 2025-05-29T15:21:50.265093Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_create_table.cpp:220: TCreateTable TConfigureParts operationId# 116:0 ProgressState Propose modify scheme on datashard datashardId: 72075186233409556 seqNo: 3:8 2025-05-29T15:21:50.265161Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_create_table.cpp:236: TCreateTable TConfigureParts operationId# 116:0 ProgressState Propose modify scheme on datashard datashardId: 72075186233409556 message: TxKind: TX_KIND_SCHEME SourceDeprecated { RawX1: 677 RawX2: 30064773661 } TxBody: "\n\236\004\n\007Table11\020\t\032\r\n\003key\030\002 \001(\000@\000\032\020\n\005Value\030\200$ \002(\000@\000(\001:\262\003\022\253\003\010\200\200\200\002\020\254\002\030\364\003 \200\200\200\010(\0000\200\200\200 8\200\200\200\010@\2008H\000RX\010\000\020\000\030\010 \010(\200\200\200@0\377\377\377\377\0178\001B$\010e\020d\031\000\000\000\000\000\000\360?*\025background_compactionJ\017compaction_gen1P\nX\200\200\001`nh\000p\000Rb\010\001\020\200\200\200\024\030\005 \020(\200\200\200\200\0020\377\377\377\377\0178\000B$\010e\020d\031\000\000\000\000\000\000\360?*\025background_compactionJ\017compaction_gen2P\nX\200\200\001`nh\200\200\200\004p\200\200\200\004Rc\010\002\020\200\200\200\310\001\030\005 \020(\200\200\200\200@0\377\377\377\377\0178\000B$\010e\020d\031\000\000\000\000\000\000\360?*\025background_compactionJ\017compaction_gen3P\nX\200\200\001`nh\200\200\200(p\200\200\200(X\001`\005j$\010e\020d\031\000\000\000\000\000\000\360?*\025background_compactionr\017compaction_gen0z\017compaction_gen0\202\001\004scan\210\001\200\200\200\010\220\001\364\003\230\0012\270\001\2008\300\001\006R\002\020\001J\026/MyRoot/USER_0/Table11\242\001\006\001\000\000\000\000\200\252\001\000\260\001\001\270\001\000\210\002\001\222\002\013\t\n\000\220\000\000\020\000\001\020\t:\004\010\003\020\010" TxId: 116 ExecLevel: 0 Flags: 0 SchemeShardId: 72075186233409546 ProcessingParams { Version: 3 PlanResolution: 50 Coordinators: 72075186233409547 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409548 SchemeShard: 72075186233409546 } SubDomainPathId: 1 2025-05-29T15:21:50.265837Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:652: Send tablet strongly msg operationId: 116:0 from tablet: 72075186233409546 to tablet: 72075186233409556 cookie: 72075186233409546:11 msg type: 269549568 2025-05-29T15:21:50.265873Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1751: TOperation RegisterRelationByTabletId, TxId: 116, partId: 0, tablet: 72075186233409556 TestModificationResult got TxId: 116, wait until txId: 116 TestModificationResults wait txId: 117 2025-05-29T15:21:50.270592Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:372: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot/USER_0" OperationType: ESchemeOpCreateTable CreateTable { Name: "Table12" Columns { Name: "key" Type: "Uint32" } Columns { Name: "Value" Type: "Utf8" } KeyColumnNames: "key" } } TxId: 117 TabletId: 72075186233409546 , at schemeshard: 72075186233409546 2025-05-29T15:21:50.271061Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:451: TTxOperationPropose Complete, txId: 117, response: Status: StatusQuotaExceeded Reason: "Request exceeded a limit on the number of schema operations, try again later." TxId: 117 SchemeshardId: 72075186233409546, at schemeshard: 72075186233409546 2025-05-29T15:21:50.271095Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 117, database: /MyRoot/USER_0, subject: , status: StatusQuotaExceeded, reason: Request exceeded a limit on the number of schema operations, try again later., operation: CREATE TABLE, path: /MyRoot/USER_0/Table12 TestModificationResult got TxId: 117, wait until txId: 117 >> KqpRbo::Bench_CrossFilter >> KqpRbo::CrossFilter >> ReadSessionImplTest::ProperlyOrdersDecompressedData [GOOD] >> ReadSessionImplTest::PacksBatches_ExactlyTwoMessagesInBatch [GOOD] >> ReadSessionImplTest::PacksBatches_OneMessageInEveryBatch [GOOD] >> ReadSessionImplTest::PacksBatches_BigBatchDecompressWithTwoBatchTasks >> PersQueueSdkReadSessionTest::SpecifyClustersExplicitly ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/persqueue_cluster_discovery/ut/unittest >> TPQCDTest::TestCloudClientsAreConsistentlyDistributed [FAIL] Test command err: 2025-05-29T15:21:45.633101Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509888207800709785:2199];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:21:45.634000Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/00121b/r3tmp/tmputTpTt/pdisk_1.dat 2025-05-29T15:21:45.742934Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [1:7509888207800709625:2079] 1748532105632196 != 1748532105632199 2025-05-29T15:21:45.742938Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 10060, node 1 2025-05-29T15:21:45.753450Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/ciyv/00121b/r3tmp/yandex9hic5z.tmp 2025-05-29T15:21:45.753463Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: /home/runner/.ya/build/build_root/ciyv/00121b/r3tmp/yandex9hic5z.tmp 2025-05-29T15:21:45.753518Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:202: successfully initialized from file: /home/runner/.ya/build/build_root/ciyv/00121b/r3tmp/yandex9hic5z.tmp 2025-05-29T15:21:45.753552Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:62989 PQClient connected to localhost:10060 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-29T15:21:45.814862Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:21:45.814888Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:21:45.815646Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-05-29T15:21:45.817300Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected waiting... 2025-05-29T15:21:45.819160Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... waiting... 2025-05-29T15:21:45.827223Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976715658, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:21:46.215451Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509888212095677648:2331], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:21:46.215479Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:21:46.215923Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509888212095677660:2334], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:21:46.216791Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715661:3, at schemeshard: 72057594046644480 2025-05-29T15:21:46.226905Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976715661, at schemeshard: 72057594046644480 2025-05-29T15:21:46.227066Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7509888212095677662:2335], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715661 completed, doublechecking } 2025-05-29T15:21:46.263310Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-05-29T15:21:46.315333Z node 1 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [1:7509888212095677800:2414] txid# 281474976715663, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 8], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-29T15:21:46.339267Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-05-29T15:21:46.350810Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [1:7509888212095677808:2348], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:5:17: Error: At function: KiReadTable!
:5:17: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Versions]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-05-29T15:21:46.351625Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=1&id=YWE4N2VmY2EtZjA0YzQyYzQtNTQwNDI5NTAtNTUzNjcwNDE=, ActorId: [1:7509888212095677632:2329], ActorState: ExecuteState, TraceId: 01jwea54z40g941p56m2af9nt4, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-05-29T15:21:46.352192Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: cluster_tracker.cpp:167: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 5 column: 17 } message: "At function: KiReadTable!" end_position { row: 5 column: 17 } severity: 1 issues { position { row: 5 column: 17 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Versions]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 5 column: 17 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-05-29T15:21:46.368676Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost", true, true, 1000), ("dc2", "dc2.logbroker.yandex.net", false, true, 1000); 2025-05-29T15:21:46.438065Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [1:7509888212095678002:2378], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:21:46.438654Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=1&id=Zjc3MTFmMGItOTE0NDFiOTEtNTFhYWIwMTktOGRmNzYzZDI=, ActorId: [1:7509888212095677999:2376], ActorState: ExecuteState, TraceId: 01jwea55516e339tvaeg644qfc, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: assertion failed at ydb/core/testlib/test_pq_client.h:537, TMaybe NKikimr::NPersQueueTests::TFlatMsgBusPQClient::RunYqlDataQueryWithParams(TString, const NYdb::TParams &): (result.IsSuccess())
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 TBackTrace::Capture()+28 (0x137DF35C) NUnitTest::NPrivate::RaiseError(char const*, TBasicString> const&, bool)+137 (0x139926E9) NKikimr::NPersQueueTests::TFlatMsgBusPQClient::RunYqlDataQueryWithParams(TBasicString>, NYdb::Dev::TParams const&)+932 (0x136D9654) NKikimr::NPersQueueTests::TFlatMsgBusPQClient::RunYqlDataQuery(TBasicString>)+88 (0x136D84E8) NKikimr::NPersQueueTests::TFlatMsgBusPQClient::InitDCs(THashMap>, NKikimr::NPersQueueTests::TPQTestClusterInfo, THash>>, TEqualTo>>, std::__y1::allocator>>>, TBasicString> const&)+1170 (0x136B3322) NKikimr::NPQCDTests::NTestSuiteTPQCDTest::TTestCaseTestCloudClientsAreConsistentlyDistributed::Execute_(NUnitTest::TTestContext&)+805 (0x136C7E95) NKikimr::NPQCDTests::NTestSuiteTPQCDTest::TCurrentTest::Execute()::'lambda'()::operator()() const+71 (0x136CE677) NUnitTest::TTestBase::Run(std::__y1::function, TBasicString> const&, char const*, bool)+126 (0x1399459E) NKikimr::NPQCDTests::NTestSuiteTPQCDTest::TCurrentTest::Execute()+424 (0x136CDED8) NUnitTest::TTestFactory::Execute()+803 (0x13994D13) NUnitTest::RunMain(int, char**)+3021 (0x139A68BD) ??+0 (0x7FD20C387D90) __libc_start_main+128 (0x7FD20C387E40) _start+41 (0x1280D029) ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/persqueue_cluster_discovery/ut/unittest >> TPQCDTest::TestDiscoverClusters [FAIL] Test command err: 2025-05-29T15:21:45.618460Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509888208969701336:2134];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:21:45.619448Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/0012cc/r3tmp/tmpVRQgzr/pdisk_1.dat 2025-05-29T15:21:45.749600Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:21:45.750322Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:21:45.750345Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:21:45.755264Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 30730, node 1 2025-05-29T15:21:45.810987Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/ciyv/0012cc/r3tmp/yandexJy2ghA.tmp 2025-05-29T15:21:45.810997Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: /home/runner/.ya/build/build_root/ciyv/0012cc/r3tmp/yandexJy2ghA.tmp 2025-05-29T15:21:45.811059Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:202: successfully initialized from file: /home/runner/.ya/build/build_root/ciyv/0012cc/r3tmp/yandexJy2ghA.tmp 2025-05-29T15:21:45.811102Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:1699 PQClient connected to localhost:30730 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-29T15:21:45.931476Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:21:45.935451Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 waiting... waiting... waiting... 2025-05-29T15:21:46.226121Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509888213264669250:2334], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:21:46.226194Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509888213264669230:2331], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:21:46.226206Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:21:46.230946Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710661:3, at schemeshard: 72057594046644480 2025-05-29T15:21:46.239568Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976710661, at schemeshard: 72057594046644480 2025-05-29T15:21:46.242803Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7509888213264669267:2335], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710661 completed, doublechecking } 2025-05-29T15:21:46.266242Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-05-29T15:21:46.319586Z node 1 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [1:7509888213264669404:2413] txid# 281474976710663, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 8], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-29T15:21:46.335983Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-05-29T15:21:46.344344Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [1:7509888213264669421:2348], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:5:17: Error: At function: KiReadTable!
:5:17: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Versions]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-05-29T15:21:46.344771Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=1&id=NGU4MGIzM2QtMzExNWQ1ZjgtYWZmZmI5MGMtZTQ4OWQzNGM=, ActorId: [1:7509888213264669227:2329], ActorState: ExecuteState, TraceId: 01jwea54zhd2xpq74shgq4y22f, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-05-29T15:21:46.345095Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: cluster_tracker.cpp:167: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 5 column: 17 } message: "At function: KiReadTable!" end_position { row: 5 column: 17 } severity: 1 issues { position { row: 5 column: 17 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Versions]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 5 column: 17 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-05-29T15:21:46.402578Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost", true, true, 1000), ("dc2", "dc2.logbroker.yandex.net", false, true, 1000); 2025-05-29T15:21:46.452430Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [1:7509888213264669606:2378], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:21:46.453144Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=1&id=MWMyNTAzZmQtMTYwMGM1ZDktZmRmNjZkODEtYzdmMTJmNGQ=, ActorId: [1:7509888213264669603:2376], ActorState: ExecuteState, TraceId: 01jwea55656nvqb2td7ykcv7j4, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: assertion failed at ydb/core/testlib/test_pq_client.h:537, TMaybe NKikimr::NPersQueueTests::TFlatMsgBusPQClient::RunYqlDataQueryWithParams(TString, const NYdb::TParams &): (result.IsSuccess())
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 TBackTrace::Capture()+28 (0x137DF35C) NUnitTest::NPrivate::RaiseError(char const*, TBasicString> const&, bool)+137 (0x139926E9) NKikimr::NPersQueueTests::TFlatMsgBusPQClient::RunYqlDataQueryWithParams(TBasicString>, NYdb::Dev::TParams const&)+932 (0x136D9654) NKikimr::NPersQueueTests::TFlatMsgBusPQClient::RunYqlDataQuery(TBasicString>)+88 (0x136D84E8) NKikimr::NPersQueueTests::TFlatMsgBusPQClient::InitDCs(THashMap>, NKikimr::NPersQueueTests::TPQTestClusterInfo, THash>>, TEqualTo>>, std::__y1::allocator>>>, TBasicString> const&)+1170 (0x136B3322) NKikimr::NPQCDTests::NTestSuiteTPQCDTest::TTestCaseTestDiscoverClusters::Execute_(NUnitTest::TTestContext&)+482 (0x136B3F12) NKikimr::NPQCDTests::NTestSuiteTPQCDTest::TCurrentTest::Execute()::'lambda'()::operator()() const+71 (0x136CE677) NUnitTest::TTestBase::Run(std::__y1::function, TBasicString> const&, char const*, bool)+126 (0x1399459E) NKikimr::NPQCDTests::NTestSuiteTPQCDTest::TCurrentTest::Execute()+424 (0x136CDED8) NUnitTest::TTestFactory::Execute()+803 (0x13994D13) NUnitTest::RunMain(int, char**)+3021 (0x139A68BD) ??+0 (0x7F833C312D90) __libc_start_main+128 (0x7F833C312E40) _start+41 (0x1280D029) >> ApplyClusterEndpointTest::NoPorts [GOOD] >> ApplyClusterEndpointTest::PortFromCds [GOOD] >> ApplyClusterEndpointTest::PortFromDriver [GOOD] >> BasicUsage::MaxByteSizeEqualZero >> BasicUsage::WriteAndReadSomeMessagesWithAsyncCompression >> ReadSessionImplTest::ForcefulDestroyPartitionStream [GOOD] >> ReadSessionImplTest::DestroyPartitionStreamRequest [GOOD] >> ReadSessionImplTest::DecompressZstdEmptyMessage [GOOD] >> ReadSessionImplTest::PacksBatches_BatchABitBiggerThanLimit [GOOD] >> ReadSessionImplTest::PacksBatches_BatchesEqualToServerBatches [GOOD] >> ReadSessionImplTest::HoleBetweenOffsets [GOOD] >> ReadSessionImplTest::LOGBROKER_7702 [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/persqueue_cluster_discovery/ut/unittest >> TPQCDTest::TestUnavailableWithoutNetClassifier [FAIL] Test command err: 2025-05-29T15:21:45.758501Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509888207671029720:2090];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:21:45.758800Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/00128f/r3tmp/tmp51w9i9/pdisk_1.dat 2025-05-29T15:21:45.907741Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 21064, node 1 2025-05-29T15:21:45.922897Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:21:45.922909Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-05-29T15:21:45.922911Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-05-29T15:21:45.922948Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:65407 PQClient connected to localhost:21064 WaitRootIsUp 'Root'... TClient::Ls request: Root 2025-05-29T15:21:45.951069Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:21:45.951094Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:21:45.953029Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-29T15:21:45.991554Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:21:45.999741Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:21:46.009112Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976715658, at schemeshard: 72057594046644480 waiting... waiting... 2025-05-29T15:21:46.386222Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509888211965997642:2330], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:21:46.386629Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:21:46.390807Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509888211965997679:2334], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:21:46.393485Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509888211965997684:2336], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:21:46.393514Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:21:46.394083Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715661:3, at schemeshard: 72057594046644480 2025-05-29T15:21:46.396815Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976715661, at schemeshard: 72057594046644480 2025-05-29T15:21:46.396875Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7509888211965997685:2337], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715661 completed, doublechecking } 2025-05-29T15:21:46.445530Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-05-29T15:21:46.475669Z node 1 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [1:7509888211965997809:2411] txid# 281474976715663, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 8], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-29T15:21:46.497482Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [1:7509888211965997818:2348], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-05-29T15:21:46.498634Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=1&id=MmY4N2I4ZDMtMjk3NzA5ZmQtN2ViODJkLTYyOGUzYjU4, ActorId: [1:7509888211965997638:2327], ActorState: ExecuteState, TraceId: 01jwea554a7n9964a8tn4y17kq, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-05-29T15:21:46.499129Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: cluster_tracker.cpp:167: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-05-29T15:21:46.514424Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-05-29T15:21:46.570977Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost", true, true, 1000), ("dc2", "dc2.logbroker.yandex.net", false, true, 1000); 2025-05-29T15:21:46.626441Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [1:7509888211965998020:2378], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:21:46.627144Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=1&id=M2MzMzhlYTYtY2E2YTMxMTctNmViZWJlZDMtNTJmMDVlMzI=, ActorId: [1:7509888211965998017:2376], ActorState: ExecuteState, TraceId: 01jwea55b9ek1wxdvspb4bf8ae, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: assertion failed at ydb/core/testlib/test_pq_client.h:537, TMaybe NKikimr::NPersQueueTests::TFlatMsgBusPQClient::RunYqlDataQueryWithParams(TString, const NYdb::TParams &): (result.IsSuccess())
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 TBackTrace::Capture()+28 (0x137DF35C) NUnitTest::NPrivate::RaiseError(char const*, TBasicString> const&, bool)+137 (0x139926E9) NKikimr::NPersQueueTests::TFlatMsgBusPQClient::RunYqlDataQueryWithParams(TBasicString>, NYdb::Dev::TParams const&)+932 (0x136D9654) NKikimr::NPersQueueTests::TFlatMsgBusPQClient::RunYqlDataQuery(TBasicString>)+88 (0x136D84E8) NKikimr::NPersQueueTests::TFlatMsgBusPQClient::InitDCs(THashMap>, NKikimr::NPersQueueTests::TPQTestClusterInfo, THash>>, TEqualTo>>, std::__y1::allocator>>>, TBasicString> const&)+1170 (0x136B3322) NKikimr::NPQCDTests::NTestSuiteTPQCDTest::TTestCaseTestUnavailableWithoutNetClassifier::Execute_(NUnitTest::TTestContext&)+278 (0x136C6E36) NKikimr::NPQCDTests::NTestSuiteTPQCDTest::TCurrentTest::Execute()::'lambda'()::operator()() const+71 (0x136CE677) NUnitTest::TTestBase::Run(std::__y1::function, TBasicString> const&, char const*, bool)+126 (0x1399459E) NKikimr::NPQCDTests::NTestSuiteTPQCDTest::TCurrentTest::Execute()+424 (0x136CDED8) NUnitTest::TTestFactory::Execute()+803 (0x13994D13) NUnitTest::RunMain(int, char**)+3021 (0x139A68BD) ??+0 (0x7F978B2B1D90) __libc_start_main+128 (0x7F978B2B1E40) _start+41 (0x1280D029) |58.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_index_build_reboots/unittest >> KqpRbo::Filter [GOOD] >> KqpYql::TestUuidDefaultColumn >> KqpScripting::ScriptingCreateAndAlterTableTest >> KqpRbo::Bench_Filter [GOOD] >> KqpRbo::Bench_JoinFilter [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/public/sdk/cpp/src/client/persqueue_public/ut/with_offset_ranges_mode_ut/unittest >> ReadSessionImplTest::LOGBROKER_7702 [GOOD] Test command err: 2025-05-29T15:21:51.196168Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-05-29T15:21:51.196176Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-05-29T15:21:51.196181Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2025-05-29T15:21:51.196311Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2025-05-29T15:21:51.196456Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2025-05-29T15:21:51.197758Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-05-29T15:21:51.197859Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (NULL) 2025-05-29T15:21:51.198214Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-05-29T15:21:51.198219Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-05-29T15:21:51.198222Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2025-05-29T15:21:51.198280Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2025-05-29T15:21:51.198367Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2025-05-29T15:21:51.198396Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-05-29T15:21:51.198453Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (NULL) 2025-05-29T15:21:51.198529Z :INFO: [db] [sessionid] [cluster] Confirm partition stream destroy. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1 2025-05-29T15:21:51.198823Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-05-29T15:21:51.198828Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-05-29T15:21:51.198832Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2025-05-29T15:21:51.198891Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2025-05-29T15:21:51.199073Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2025-05-29T15:21:51.199105Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-05-29T15:21:51.199167Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (NULL) 2025-05-29T15:21:51.199373Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-05-29T15:21:51.199481Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-1) 2025-05-29T15:21:51.199524Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2025-05-29T15:21:51.199535Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 1, size 0 bytes 2025-05-29T15:21:51.199842Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-05-29T15:21:51.199847Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-05-29T15:21:51.199852Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2025-05-29T15:21:51.199904Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2025-05-29T15:21:51.200027Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2025-05-29T15:21:51.200075Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-05-29T15:21:51.200306Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (NULL) Message data size: 11 Compressed message data size: 31 2025-05-29T15:21:51.200523Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 Post function 2025-05-29T15:21:51.200559Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 Post function Getting new event 2025-05-29T15:21:51.200661Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (5-8) 2025-05-29T15:21:51.200679Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-4) 2025-05-29T15:21:51.200709Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2025-05-29T15:21:51.200716Z :DEBUG: Take Data. Partition 1. Read: {0, 1} (2-2) 2025-05-29T15:21:51.200722Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 2, size 22 bytes DataReceived { PartitionStreamId: 1 PartitionId: 1 Message { Data: ..11 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 1 SeqNo: 42 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k1": "v1", "k": "v" } } } Message { Data: ..11 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 2 SeqNo: 43 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k1": "v1", "k": "v" } } } } 2025-05-29T15:21:51.200760Z :DEBUG: [db] [sessionid] [cluster] Commit offsets [0, 3). Partition stream id: 1 GOT RANGE 0 3 Getting new event 2025-05-29T15:21:51.200779Z :DEBUG: Take Data. Partition 1. Read: {0, 2} (3-3) 2025-05-29T15:21:51.200783Z :DEBUG: Take Data. Partition 1. Read: {1, 0} (4-4) 2025-05-29T15:21:51.200786Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 2, size 22 bytes DataReceived { PartitionStreamId: 1 PartitionId: 1 Message { Data: ..11 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 3 SeqNo: 44 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k1": "v1", "k": "v" } } } Message { Data: ..11 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 4 SeqNo: 45 MessageGroupId: "src_id_2" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:05:21.000000Z Ip: "1.0.0.127" UncompressedSize: 0 Meta: { "v1": "k1", "v": "k" } } } } 2025-05-29T15:21:51.200805Z :DEBUG: [db] [sessionid] [cluster] Commit offsets [3, 5). Partition stream id: 1 GOT RANGE 3 5 Getting new event 2025-05-29T15:21:51.200818Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (5-5) 2025-05-29T15:21:51.200822Z :DEBUG: Take Data. Partition 1. Read: {0, 1} (6-6) 2025-05-29T15:21:51.200825Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 2, size 22 bytes DataReceived { PartitionStreamId: 1 PartitionId: 1 Message { Data: ..11 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 5 SeqNo: 46 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k1": "v1", "k": "v" } } } Message { Data: ..11 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 6 SeqNo: 47 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k1": "v1", "k": "v" } } } } 2025-05-29T15:21:51.200841Z :DEBUG: [db] [sessionid] [cluster] Commit offsets [5, 7). Partition stream id: 1 GOT RANGE 5 7 Getting new event 2025-05-29T15:21:51.200851Z :DEBUG: Take Data. Partition 1. Read: {0, 2} (7-7) 2025-05-29T15:21:51.200855Z :DEBUG: Take Data. Partition 1. Read: {1, 0} (8-8) 2025-05-29T15:21:51.200858Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 2, size 22 bytes DataReceived { PartitionStreamId: 1 PartitionId: 1 Message { Data: ..11 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 7 SeqNo: 48 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k1": "v1", "k": "v" } } } Message { Data: ..11 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 8 SeqNo: 49 MessageGroupId: "src_id_2" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:05:21.000000Z Ip: "1.0.0.127" UncompressedSize: 0 Meta: { "v1": "k1", "v": "k" } } } } 2025-05-29T15:21:51.200873Z :DEBUG: [db] [sessionid] [cluster] Commit offsets [7, 9). Partition stream id: 1 GOT RANGE 7 9 2025-05-29T15:21:51.201226Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-05-29T15:21:51.201230Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-05-29T15:21:51.201233Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2025-05-29T15:21:51.201287Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2025-05-29T15:21:51.201350Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2025-05-29T15:21:51.201452Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-05-29T15:21:51.201483Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (NULL) Message data size: 10 Compressed message data size: 30 2025-05-29T15:21:51.201659Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 Post function 2025-05-29T15:21:51.201690Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 Post function Getting new event 2025-05-29T15:21:51.201729Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (5-8) 2025-05-29T15:21:51.201739Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-4) 2025-05-29T15:21:51.201759Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2025-05-29T15:21:51.201764Z :DEBUG: Take Data. Partition 1. Read: {0, 1} (2-2) 2025-05-29T15:21:51.201768Z :DEBUG: Take Data. Partition 1. Read: {0, 2} (3-3) 2025-05-29T15:21:51.201771Z :DEBUG: Take Data. Partition 1. Read: {1, 0} (4-4) 2025-05-29T15:21:51.201776Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 4, size 40 bytes DataReceived { PartitionStreamId: 1 PartitionId: 1 Message { Data: ..10 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 1 SeqNo: 42 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k1": "v1", "k": "v" } } } Message { Data: ..10 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 2 SeqNo: 43 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k1": "v1", "k": "v" } } } Message { Data: ..10 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 3 SeqNo: 44 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k1": "v1", "k": "v" } } } Message { Data: ..10 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 4 SeqNo: 45 MessageGroupId: "src_id_2" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:05:21.000000Z Ip: "1.0.0.127" UncompressedSize: 0 Meta: { "v1": "k1", "v": "k" } } } } 2025-05-29T15:21:51.201813Z :DEBUG: [db] [sessionid] [cluster] Commit offsets [0, 5). Partition stream id: 1 GOT RANGE 0 5 Getting new event 2025-05-29T15:21:51.201829Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (5-5) 2025-05-29T15:21:51.201832Z :DEBUG: Take Data. Partition 1. Read: {0, 1} (6-6) 2025-05-29T15:21:51.201835Z :DEBUG: Take Data. Partition 1. Read: {0, 2} (7-7) 2025-05-29T15:21:51.201838Z :DEBUG: Take Data. Partition 1. Read: {1, 0} (8-8) 2025-05-29T15:21:51.201842Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 4, size 40 bytes DataReceived { PartitionStreamId: 1 PartitionId: 1 Message { Data: ..10 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 5 SeqNo: 46 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k1": "v1", "k": "v" } } } Message { Data: ..10 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 6 SeqNo: 47 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k1": "v1", "k": "v" } } } Message { Data: ..10 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 7 SeqNo: 48 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k1": "v1", "k": "v" } } } Message { Data: ..10 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 8 SeqNo: 49 MessageGroupId: "src_id_2" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:05:21.000000Z Ip: "1.0.0.127" UncompressedSize: 0 Meta: { "v1": "k1", "v": "k" } } } } 2025-05-29T15:21:51.201860Z :DEBUG: [db] [sessionid] [cluster] Commit offsets [5, 9). Partition stream id: 1 GOT RANGE 5 9 2025-05-29T15:21:51.202169Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-05-29T15:21:51.202173Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-05-29T15:21:51.202176Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2025-05-29T15:21:51.202218Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2025-05-29T15:21:51.202271Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2025-05-29T15:21:51.202295Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-05-29T15:21:51.202323Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (NULL) 2025-05-29T15:21:51.202448Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 Post function 2025-05-29T15:21:51.202548Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 Post function 2025-05-29T15:21:51.202578Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (10-11) 2025-05-29T15:21:51.202585Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-2) 2025-05-29T15:21:51.202603Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2025-05-29T15:21:51.202608Z :DEBUG: Take Data. Partition 1. Read: {0, 1} (2-2) 2025-05-29T15:21:51.202613Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (10-10) 2025-05-29T15:21:51.202617Z :DEBUG: Take Data. Partition 1. Read: {0, 1} (11-11) 2025-05-29T15:21:51.202623Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 2, size 16 bytes 2025-05-29T15:21:51.202627Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 2, size 16 bytes got data event: DataReceived { PartitionStreamId: 1 PartitionId: 1 Message { Data: ..8 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 1 SeqNo: 1 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:00:00.042000Z Ip: "::1" UncompressedSize: 0 Meta: { } } } Message { Data: ..8 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 2 SeqNo: 1 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:00:00.042000Z Ip: "::1" UncompressedSize: 0 Meta: { } } } Message { Data: ..8 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 10 SeqNo: 1 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:00:00.042000Z Ip: "::1" UncompressedSize: 0 Meta: { } } } Message { Data: ..8 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 11 SeqNo: 1 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:00:00.042000Z Ip: "::1" UncompressedSize: 0 Meta: { } } } } 2025-05-29T15:21:51.202651Z :DEBUG: [db] [sessionid] [cluster] Commit offsets [0, 3). Partition stream id: 1 Got commit req { offset_ranges { assign_id: 1 end_offset: 3 } } RANGE 0 3 2025-05-29T15:21:51.202678Z :DEBUG: [db] [sessionid] [cluster] Commit offsets [3, 12). Partition stream id: 1 Got commit req { offset_ranges { assign_id: 1 start_offset: 3 end_offset: 12 } } RANGE 3 12 >> KqpRbo::Bench_Select [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/rbo/unittest >> KqpRbo::Filter [GOOD] Test command err: Trying to start YDB, gRPC: 10707, MsgBus: 15586 2025-05-29T15:21:50.451674Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509888230275757857:2081];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:21:50.451875Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/002385/r3tmp/tmpFMysYH/pdisk_1.dat 2025-05-29T15:21:50.524682Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:21:50.524979Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [1:7509888230275757789:2079] 1748532110449188 != 1748532110449191 TServer::EnableGrpc on GrpcPort 10707, node 1 2025-05-29T15:21:50.540962Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:21:50.540977Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-05-29T15:21:50.540979Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-05-29T15:21:50.541027Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-05-29T15:21:50.551363Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:21:50.551390Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:21:50.552494Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:15586 TClient is connected to server localhost:15586 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-29T15:21:50.612945Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:21:50.971399Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509888230275758450:2328], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:21:50.971436Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:21:51.006729Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-05-29T15:21:51.071992Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509888234570725850:2339], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:21:51.072016Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:21:51.072057Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509888234570725855:2342], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:21:51.072908Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480 2025-05-29T15:21:51.076057Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7509888234570725857:2343], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2025-05-29T15:21:51.170565Z node 1 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [1:7509888234570725908:2383] txid# 281474976715660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } >> test_sql_streaming.py::test[suites-GroupByHopWithDataWatermarks-default.txt] >> KqpYql::UuidPrimaryKeyBulkUpsert >> KqpPragma::OrderedColumns >> KqpScripting::ScriptExplainCreatedTable ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/rbo/unittest >> KqpRbo::Bench_Filter [GOOD] Test command err: Trying to start YDB, gRPC: 24154, MsgBus: 12109 2025-05-29T15:21:50.595118Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509888230143654546:2072];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:21:50.595525Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/00239a/r3tmp/tmpQWeA7N/pdisk_1.dat 2025-05-29T15:21:50.679377Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 24154, node 1 2025-05-29T15:21:50.696040Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:21:50.696072Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:21:50.696941Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-29T15:21:50.700972Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:21:50.700983Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-05-29T15:21:50.700985Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-05-29T15:21:50.701034Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:12109 TClient is connected to server localhost:12109 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-29T15:21:50.797483Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:21:50.803983Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-05-29T15:21:51.036663Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509888234438622458:2328], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:21:51.036697Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:21:51.090988Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-05-29T15:21:51.157443Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509888234438622562:2339], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:21:51.157470Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:21:51.157549Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509888234438622567:2342], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:21:51.158460Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480 2025-05-29T15:21:51.161039Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7509888234438622569:2343], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2025-05-29T15:21:51.250449Z node 1 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [1:7509888234438622620:2383] txid# 281474976715660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } >> KqpScripting::ScanQuery >> KqpScripting::StreamExecuteYqlScriptWriteCancelAfterBruteForced >> ReadSessionImplTest::PacksBatches_BigBatchDecompressWithTwoBatchTasks [GOOD] >> ReadSessionImplTest::PacksBatches_DecompressesOneMessagePerTime >> ReadSessionImplTest::PacksBatches_DecompressesOneMessagePerTime [GOOD] >> ReadSessionImplTest::PartitionStreamStatus [GOOD] >> ReadSessionImplTest::PartitionStreamCallbacks [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/persqueue_cluster_discovery/ut/unittest >> TPQCDTest::TestPrioritizeLocalDatacenter [FAIL] Test command err: 2025-05-29T15:21:45.780337Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509888207897247518:2091];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:21:45.780590Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/00125c/r3tmp/tmpt9eGeB/pdisk_1.dat 2025-05-29T15:21:45.922168Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 12704, node 1 2025-05-29T15:21:45.957914Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/ciyv/00125c/r3tmp/yandexGN3Nax.tmp 2025-05-29T15:21:45.957928Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: /home/runner/.ya/build/build_root/ciyv/00125c/r3tmp/yandexGN3Nax.tmp 2025-05-29T15:21:45.957988Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:202: successfully initialized from file: /home/runner/.ya/build/build_root/ciyv/00125c/r3tmp/yandexGN3Nax.tmp 2025-05-29T15:21:45.958031Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-05-29T15:21:45.963097Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:21:45.963142Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:21:45.966321Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:4439 PQClient connected to localhost:12704 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-29T15:21:46.007668Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:21:46.016601Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:21:46.023176Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 waiting... waiting... 2025-05-29T15:21:46.049170Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976710660, at schemeshard: 72057594046644480 2025-05-29T15:21:46.319043Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509888212192215436:2330], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:21:46.319177Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:21:46.319396Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509888212192215473:2334], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:21:46.320367Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710661:3, at schemeshard: 72057594046644480 2025-05-29T15:21:46.329046Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976710661, at schemeshard: 72057594046644480 2025-05-29T15:21:46.332438Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7509888212192215475:2335], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710661 completed, doublechecking } 2025-05-29T15:21:46.384672Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-05-29T15:21:46.384968Z node 1 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [1:7509888212192215560:2382] txid# 281474976710662, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 8], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-29T15:21:46.425226Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-05-29T15:21:46.441677Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [1:7509888212192215590:2346], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:5:17: Error: At function: KiReadTable!
:5:17: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Versions]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-05-29T15:21:46.442340Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=1&id=ZGUwMDFlMzUtZGM1YmZhOWUtMTRiNTY5NTAtNmQ2ZTY1ZTg=, ActorId: [1:7509888212192215433:2328], ActorState: ExecuteState, TraceId: 01jwea552a18yfnjdnxxrjpknm, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-05-29T15:21:46.443137Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: cluster_tracker.cpp:167: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 5 column: 17 } message: "At function: KiReadTable!" end_position { row: 5 column: 17 } severity: 1 issues { position { row: 5 column: 17 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Versions]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 5 column: 17 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-05-29T15:21:46.506839Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost", true, true, 1000), ("dc2", "dc2.logbroker.yandex.net", false, true, 1000); 2025-05-29T15:21:46.582181Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [1:7509888212192215814:2378], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:21:46.582881Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=1&id=YjFjYWQzMi03MGI4M2FiYy02OGJmMmEyZC0xOTEzMDMzNg==, ActorId: [1:7509888212192215811:2376], ActorState: ExecuteState, TraceId: 01jwea55a445dp2fpnphnvh50r, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: assertion failed at ydb/core/testlib/test_pq_client.h:537, TMaybe NKikimr::NPersQueueTests::TFlatMsgBusPQClient::RunYqlDataQueryWithParams(TString, const NYdb::TParams &): (result.IsSuccess())
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 TBackTrace::Capture()+28 (0x137DF35C) NUnitTest::NPrivate::RaiseError(char const*, TBasicString> const&, bool)+137 (0x139926E9) NKikimr::NPersQueueTests::TFlatMsgBusPQClient::RunYqlDataQueryWithParams(TBasicString>, NYdb::Dev::TParams const&)+932 (0x136D9654) NKikimr::NPersQueueTests::TFlatMsgBusPQClient::RunYqlDataQuery(TBasicString>)+88 (0x136D84E8) NKikimr::NPersQueueTests::TFlatMsgBusPQClient::InitDCs(THashMap>, NKikimr::NPersQueueTests::TPQTestClusterInfo, THash>>, TEqualTo>>, std::__y1::allocator>>>, TBasicString> const&)+1170 (0x136B3322) NKikimr::NPQCDTests::NTestSuiteTPQCDTest::TTestCaseTestPrioritizeLocalDatacenter::Execute_(NUnitTest::TTestContext&)+474 (0x136C319A) NKikimr::NPQCDTests::NTestSuiteTPQCDTest::TCurrentTest::Execute()::'lambda'()::operator()() const+71 (0x136CE677) NUnitTest::TTestBase::Run(std::__y1::function, TBasicString> const&, char const*, bool)+126 (0x1399459E) NKikimr::NPQCDTests::NTestSuiteTPQCDTest::TCurrentTest::Execute()+424 (0x136CDED8) NUnitTest::TTestFactory::Execute()+803 (0x13994D13) NUnitTest::RunMain(int, char**)+3021 (0x139A68BD) ??+0 (0x7FC1411E2D90) __libc_start_main+128 (0x7FC1411E2E40) _start+41 (0x1280D029) ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/rbo/unittest >> KqpRbo::Bench_JoinFilter [GOOD] Test command err: Trying to start YDB, gRPC: 4354, MsgBus: 13306 2025-05-29T15:21:50.482934Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509888229710847981:2074];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:21:50.483346Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/00237e/r3tmp/tmpV1UERJ/pdisk_1.dat 2025-05-29T15:21:50.551007Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 4354, node 1 2025-05-29T15:21:50.573124Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:21:50.573140Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-05-29T15:21:50.573143Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-05-29T15:21:50.573194Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-05-29T15:21:50.584085Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:21:50.584121Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:21:50.585477Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:13306 TClient is connected to server localhost:13306 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-29T15:21:50.654888Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:21:50.658718Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-05-29T15:21:50.965136Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509888229710848593:2328], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:21:50.965165Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:21:51.021699Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-05-29T15:21:51.037649Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 2025-05-29T15:21:51.059794Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509888234005816062:2346], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:21:51.059826Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:21:51.059916Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509888234005816067:2349], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:21:51.060725Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710660:3, at schemeshard: 72057594046644480 2025-05-29T15:21:51.063293Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7509888234005816069:2350], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710660 completed, doublechecking } 2025-05-29T15:21:51.118661Z node 1 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [1:7509888234005816120:2429] txid# 281474976710661, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 7], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/rbo/unittest >> KqpRbo::Bench_Select [GOOD] Test command err: Trying to start YDB, gRPC: 18601, MsgBus: 18102 2025-05-29T15:21:50.777017Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509888231624831538:2079];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:21:50.777342Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/002368/r3tmp/tmpQcOQ3J/pdisk_1.dat 2025-05-29T15:21:50.871314Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:21:50.879860Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:21:50.879894Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting TServer::EnableGrpc on GrpcPort 18601, node 1 2025-05-29T15:21:50.886379Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-29T15:21:50.904045Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:21:50.904055Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-05-29T15:21:50.904057Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-05-29T15:21:50.904092Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:18102 TClient is connected to server localhost:18102 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-29T15:21:50.978426Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:21:51.204099Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509888235919799440:2331], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:21:51.204103Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509888235919799428:2328], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:21:51.204125Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:21:51.205094Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2025-05-29T15:21:51.207276Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7509888235919799442:2332], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-05-29T15:21:51.277680Z node 1 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [1:7509888235919799493:2324] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } >> KqpYql::EvaluateExprPgNull >> KqpRbo::Bench_CrossFilter [GOOD] >> KqpRbo::CrossFilter [GOOD] |58.7%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/persqueue/ut/ydb-core-persqueue-ut |58.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/persqueue/ut/ydb-core-persqueue-ut ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/persqueue_cluster_discovery/ut/unittest >> TPQCDTest::TestRelatedServicesAreRunning [FAIL] Test command err: 2025-05-29T15:21:46.602869Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509888214038713687:2087];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:21:46.603118Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/001209/r3tmp/tmpjQ0pGH/pdisk_1.dat 2025-05-29T15:21:46.722394Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [1:7509888214038713615:2079] 1748532106592333 != 1748532106592336 2025-05-29T15:21:46.724360Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 15277, node 1 2025-05-29T15:21:46.767017Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/ciyv/001209/r3tmp/yandexQVWyRa.tmp 2025-05-29T15:21:46.767030Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: /home/runner/.ya/build/build_root/ciyv/001209/r3tmp/yandexQVWyRa.tmp 2025-05-29T15:21:46.767098Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:202: successfully initialized from file: /home/runner/.ya/build/build_root/ciyv/001209/r3tmp/yandexQVWyRa.tmp 2025-05-29T15:21:46.767150Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-05-29T15:21:46.767629Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:21:46.767651Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:21:46.776732Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:8214 PQClient connected to localhost:15277 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-29T15:21:46.841510Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:21:46.844752Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... waiting... waiting... 2025-05-29T15:21:47.074381Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509888218333681621:2329], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:21:47.074425Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:21:47.074539Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509888218333681648:2332], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:21:47.075255Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715661:3, at schemeshard: 72057594046644480 2025-05-29T15:21:47.077550Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7509888218333681650:2333], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715661 completed, doublechecking } 2025-05-29T15:21:47.143567Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-05-29T15:21:47.168145Z node 1 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [1:7509888218333681815:2433] txid# 281474976715663, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 8], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-29T15:21:47.175490Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-05-29T15:21:47.189802Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [1:7509888218333681828:2349], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:5:17: Error: At function: KiReadTable!
:5:17: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Versions]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-05-29T15:21:47.190424Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=1&id=ODA5Yjk0NWUtZTY4NjJjYmItZTgxZjllNzktYjc1ZTVkNTg=, ActorId: [1:7509888218333681618:2327], ActorState: ExecuteState, TraceId: 01jwea55t1a63tznw3zbkyfmbv, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-05-29T15:21:47.190919Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: cluster_tracker.cpp:167: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 5 column: 17 } message: "At function: KiReadTable!" end_position { row: 5 column: 17 } severity: 1 issues { position { row: 5 column: 17 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Versions]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 5 column: 17 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-05-29T15:21:47.206565Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost", true, true, 1000), ("dc2", "dc2.logbroker.yandex.net", false, true, 1000); 2025-05-29T15:21:47.258522Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [1:7509888218333681987:2376], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:21:47.258826Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=1&id=NjliZTI2YzAtMjAzNDlkZDItMzUwZDcxNzEtY2I0YmVjZWU=, ActorId: [1:7509888218333681984:2374], ActorState: ExecuteState, TraceId: 01jwea55z1464tg9mwa9wetvyb, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: assertion failed at ydb/core/testlib/test_pq_client.h:537, TMaybe NKikimr::NPersQueueTests::TFlatMsgBusPQClient::RunYqlDataQueryWithParams(TString, const NYdb::TParams &): (result.IsSuccess())
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 TBackTrace::Capture()+28 (0x137DF35C) NUnitTest::NPrivate::RaiseError(char const*, TBasicString> const&, bool)+137 (0x139926E9) NKikimr::NPersQueueTests::TFlatMsgBusPQClient::RunYqlDataQueryWithParams(TBasicString>, NYdb::Dev::TParams const&)+932 (0x136D9654) NKikimr::NPersQueueTests::TFlatMsgBusPQClient::RunYqlDataQuery(TBasicString>)+88 (0x136D84E8) NKikimr::NPersQueueTests::TFlatMsgBusPQClient::InitDCs(THashMap>, NKikimr::NPersQueueTests::TPQTestClusterInfo, THash>>, TEqualTo>>, std::__y1::allocator>>>, TBasicString> const&)+1170 (0x136B3322) NKikimr::NPQCDTests::NTestSuiteTPQCDTest::TTestCaseTestRelatedServicesAreRunning::Execute_(NUnitTest::TTestContext&)+467 (0x136AF9F3) NKikimr::NPQCDTests::NTestSuiteTPQCDTest::TCurrentTest::Execute()::'lambda'()::operator()() const+71 (0x136CE677) NUnitTest::TTestBase::Run(std::__y1::function, TBasicString> const&, char const*, bool)+126 (0x1399459E) NKikimr::NPQCDTests::NTestSuiteTPQCDTest::TCurrentTest::Execute()+424 (0x136CDED8) NUnitTest::TTestFactory::Execute()+803 (0x13994D13) NUnitTest::RunMain(int, char**)+3021 (0x139A68BD) ??+0 (0x7FAC6DAC7D90) __libc_start_main+128 (0x7FAC6DAC7E40) _start+41 (0x1280D029) |58.7%| [TA] {RESULT} $(B)/ydb/core/kqp/executer_actor/ut/test-results/unittest/{meta.json ... results_accumulator.log} |58.7%| [LD] {RESULT} $(B)/ydb/core/persqueue/ut/ydb-core-persqueue-ut >> TNodeBrokerTest::NodesMigrationReuseRemovedID >> TTenantPoolTests::TestSensorsConfigForStaticSlot ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/rbo/unittest >> KqpRbo::Bench_CrossFilter [GOOD] Test command err: Trying to start YDB, gRPC: 31182, MsgBus: 1026 2025-05-29T15:21:51.013266Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509888233984637908:2081];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:21:51.013578Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/002371/r3tmp/tmpM51LE2/pdisk_1.dat 2025-05-29T15:21:51.082086Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 31182, node 1 2025-05-29T15:21:51.108989Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:21:51.109002Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-05-29T15:21:51.109005Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-05-29T15:21:51.109049Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-05-29T15:21:51.113446Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:21:51.113476Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:21:51.114804Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:1026 TClient is connected to server localhost:1026 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-29T15:21:51.186664Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:21:51.428187Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509888233984638499:2328], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:21:51.428219Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:21:51.481479Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-05-29T15:21:51.545983Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 2025-05-29T15:21:51.561152Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509888233984638673:2346], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:21:51.561178Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:21:51.561279Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509888233984638678:2349], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:21:51.562002Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710660:3, at schemeshard: 72057594046644480 2025-05-29T15:21:51.564615Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7509888233984638680:2350], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710660 completed, doublechecking } 2025-05-29T15:21:51.627055Z node 1 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [1:7509888233984638731:2429] txid# 281474976710661, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 7], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/public/sdk/cpp/src/client/persqueue_public/ut/with_offset_ranges_mode_ut/unittest >> ReadSessionImplTest::PartitionStreamCallbacks [GOOD] Test command err: 2025-05-29T15:21:51.151806Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-05-29T15:21:51.151814Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-05-29T15:21:51.151819Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2025-05-29T15:21:51.151962Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2025-05-29T15:21:51.152170Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2025-05-29T15:21:51.154156Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-05-29T15:21:51.154285Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (NULL) 2025-05-29T15:21:51.154580Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 Post function 2025-05-29T15:21:51.154678Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 Post function 2025-05-29T15:21:51.154776Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (2-2) 2025-05-29T15:21:51.154797Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-1) 2025-05-29T15:21:51.154862Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2025-05-29T15:21:51.154870Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (2-2) 2025-05-29T15:21:51.154880Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 1, size 8 bytes 2025-05-29T15:21:51.154886Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 1, size 8 bytes 2025-05-29T15:21:51.155381Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-05-29T15:21:51.155386Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-05-29T15:21:51.155391Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2025-05-29T15:21:51.155449Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2025-05-29T15:21:51.155536Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2025-05-29T15:21:51.155591Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-05-29T15:21:51.155638Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (NULL) Message data size: 10 Compressed message data size: 30 2025-05-29T15:21:51.155839Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 Post function 2025-05-29T15:21:51.155872Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 Post function Getting new event 2025-05-29T15:21:51.155912Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (5-8) 2025-05-29T15:21:51.155922Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-4) 2025-05-29T15:21:51.155951Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2025-05-29T15:21:51.155957Z :DEBUG: Take Data. Partition 1. Read: {0, 1} (2-2) 2025-05-29T15:21:51.155963Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 2, size 20 bytes DataReceived { PartitionStreamId: 1 PartitionId: 1 Message { Data: ..10 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 1 SeqNo: 42 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k1": "v1", "k": "v" } } } Message { Data: ..10 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 2 SeqNo: 43 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k1": "v1", "k": "v" } } } } 2025-05-29T15:21:51.156001Z :DEBUG: [db] [sessionid] [cluster] Commit offsets [0, 3). Partition stream id: 1 GOT RANGE 0 3 Getting new event 2025-05-29T15:21:51.156022Z :DEBUG: Take Data. Partition 1. Read: {0, 2} (3-3) 2025-05-29T15:21:51.156026Z :DEBUG: Take Data. Partition 1. Read: {1, 0} (4-4) 2025-05-29T15:21:51.156030Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 2, size 20 bytes DataReceived { PartitionStreamId: 1 PartitionId: 1 Message { Data: ..10 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 3 SeqNo: 44 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k1": "v1", "k": "v" } } } Message { Data: ..10 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 4 SeqNo: 45 MessageGroupId: "src_id_2" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:05:21.000000Z Ip: "1.0.0.127" UncompressedSize: 0 Meta: { "v1": "k1", "v": "k" } } } } 2025-05-29T15:21:51.156049Z :DEBUG: [db] [sessionid] [cluster] Commit offsets [3, 5). Partition stream id: 1 GOT RANGE 3 5 Getting new event 2025-05-29T15:21:51.156061Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (5-5) 2025-05-29T15:21:51.156065Z :DEBUG: Take Data. Partition 1. Read: {0, 1} (6-6) 2025-05-29T15:21:51.156069Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 2, size 20 bytes DataReceived { PartitionStreamId: 1 PartitionId: 1 Message { Data: ..10 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 5 SeqNo: 46 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k1": "v1", "k": "v" } } } Message { Data: ..10 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 6 SeqNo: 47 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k1": "v1", "k": "v" } } } } 2025-05-29T15:21:51.156081Z :DEBUG: [db] [sessionid] [cluster] Commit offsets [5, 7). Partition stream id: 1 GOT RANGE 5 7 Getting new event 2025-05-29T15:21:51.156091Z :DEBUG: Take Data. Partition 1. Read: {0, 2} (7-7) 2025-05-29T15:21:51.156096Z :DEBUG: Take Data. Partition 1. Read: {1, 0} (8-8) 2025-05-29T15:21:51.156100Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 2, size 20 bytes DataReceived { PartitionStreamId: 1 PartitionId: 1 Message { Data: ..10 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 7 SeqNo: 48 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k1": "v1", "k": "v" } } } Message { Data: ..10 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 8 SeqNo: 49 MessageGroupId: "src_id_2" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:05:21.000000Z Ip: "1.0.0.127" UncompressedSize: 0 Meta: { "v1": "k1", "v": "k" } } } } 2025-05-29T15:21:51.156117Z :DEBUG: [db] [sessionid] [cluster] Commit offsets [7, 9). Partition stream id: 1 GOT RANGE 7 9 2025-05-29T15:21:51.156430Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-05-29T15:21:51.156436Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-05-29T15:21:51.156439Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2025-05-29T15:21:51.156512Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2025-05-29T15:21:51.156638Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2025-05-29T15:21:51.156710Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-05-29T15:21:51.156752Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (NULL) Message data size: 100 Compressed message data size: 91 2025-05-29T15:21:51.156899Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 Post function 2025-05-29T15:21:51.156929Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 Post function Getting new event 2025-05-29T15:21:51.156968Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (5-8) 2025-05-29T15:21:51.156982Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-4) 2025-05-29T15:21:51.157014Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2025-05-29T15:21:51.157020Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 1, size 100 bytes DataReceived { PartitionStreamId: 1 PartitionId: 1 Message { Data: ..100 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 1 SeqNo: 42 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k1": "v1", "k": "v" } } } } 2025-05-29T15:21:51.157038Z :DEBUG: [db] [sessionid] [cluster] Commit offsets [0, 2). Partition stream id: 1 GOT RANGE 0 2 Getting new event 2025-05-29T15:21:51.157054Z :DEBUG: Take Data. Partition 1. Read: {0, 1} (2-2) 2025-05-29T15:21:51.157058Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 1, size 100 bytes DataReceived { PartitionStreamId: 1 PartitionId: 1 Message { Data: ..100 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 2 SeqNo: 43 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k1": "v1", "k": "v" } } } } 2025-05-29T15:21:51.157068Z :DEBUG: [db] [sessionid] [cluster] Commit offsets [2, 3). Partition stream id: 1 GOT RANGE 2 3 Getting new event 2025-05-29T15:21:51.157079Z :DEBUG: Take Data. Partition 1. Read: {0, 2} (3-3) 2025-05-29T15:21:51.157085Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 1, size 100 bytes DataReceived { PartitionStreamId: 1 PartitionId: 1 Message { Data: ..100 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 3 SeqNo: 44 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k1": "v1", "k": "v" } } } } 2025-05-29T15:21:51.157095Z :DEBUG: [db] [sessionid] [cluster] Commit offsets [3, 4). Partition stream id: 1 GOT RANGE 3 4 Getting new event 2025-05-29T15:21:51.157106Z :DEBUG: Take Data. Partition 1. Read: {1, 0} (4-4) 2025-05-29T15:21:51.157109Z :DEBUG: [db] [sessionid] [cluster] The application data ... er". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 190 SeqNo: 231 MessageGroupId: "src_id_2" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:05:21.000000Z Ip: "1.0.0.127" UncompressedSize: 0 Meta: { "v1": "k1", "v": "k" } } } Message { Data: ..1000000 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 191 SeqNo: 232 MessageGroupId: "src_id_2" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:05:21.000000Z Ip: "1.0.0.127" UncompressedSize: 0 Meta: { "v1": "k1", "v": "k" } } } Message { Data: ..1000000 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 192 SeqNo: 233 MessageGroupId: "src_id_2" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:05:21.000000Z Ip: "1.0.0.127" UncompressedSize: 0 Meta: { "v1": "k1", "v": "k" } } } Message { Data: ..1000000 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 193 SeqNo: 234 MessageGroupId: "src_id_2" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:05:21.000000Z Ip: "1.0.0.127" UncompressedSize: 0 Meta: { "v1": "k1", "v": "k" } } } Message { Data: ..1000000 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 194 SeqNo: 235 MessageGroupId: "src_id_2" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:05:21.000000Z Ip: "1.0.0.127" UncompressedSize: 0 Meta: { "v1": "k1", "v": "k" } } } Message { Data: ..1000000 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 195 SeqNo: 236 MessageGroupId: "src_id_2" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:05:21.000000Z Ip: "1.0.0.127" UncompressedSize: 0 Meta: { "v1": "k1", "v": "k" } } } Message { Data: ..1000000 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 196 SeqNo: 237 MessageGroupId: "src_id_2" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:05:21.000000Z Ip: "1.0.0.127" UncompressedSize: 0 Meta: { "v1": "k1", "v": "k" } } } Message { Data: ..1000000 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 197 SeqNo: 238 MessageGroupId: "src_id_2" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:05:21.000000Z Ip: "1.0.0.127" UncompressedSize: 0 Meta: { "v1": "k1", "v": "k" } } } Message { Data: ..1000000 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 198 SeqNo: 239 MessageGroupId: "src_id_2" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:05:21.000000Z Ip: "1.0.0.127" UncompressedSize: 0 Meta: { "v1": "k1", "v": "k" } } } Message { Data: ..1000000 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 199 SeqNo: 240 MessageGroupId: "src_id_2" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:05:21.000000Z Ip: "1.0.0.127" UncompressedSize: 0 Meta: { "v1": "k1", "v": "k" } } } Message { Data: ..1000000 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 200 SeqNo: 241 MessageGroupId: "src_id_2" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:05:21.000000Z Ip: "1.0.0.127" UncompressedSize: 0 Meta: { "v1": "k1", "v": "k" } } } } 2025-05-29T15:21:51.966942Z :DEBUG: [db] [sessionid] [cluster] Commit offsets [0, 201). Partition stream id: 1 GOT RANGE 0 201 2025-05-29T15:21:51.988844Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 5, ReadSizeServerDelta = 0 2025-05-29T15:21:51.988851Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 5, ReadSizeServerDelta = 0 2025-05-29T15:21:51.988855Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2025-05-29T15:21:51.988970Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2025-05-29T15:21:51.989100Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2025-05-29T15:21:51.989160Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 5, ReadSizeServerDelta = 0 2025-05-29T15:21:51.989219Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (NULL) Message data size: 1000000 Compressed message data size: 3028 Post function Getting new event 2025-05-29T15:21:52.021704Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-10) 2025-05-29T15:21:52.021919Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2025-05-29T15:21:52.022181Z :DEBUG: Take Data. Partition 1. Read: {0, 1} (2-2) 2025-05-29T15:21:52.022570Z :DEBUG: Take Data. Partition 1. Read: {0, 2} (3-3) 2025-05-29T15:21:52.022696Z :DEBUG: Take Data. Partition 1. Read: {0, 3} (4-4) 2025-05-29T15:21:52.023310Z :DEBUG: Take Data. Partition 1. Read: {0, 4} (5-5) 2025-05-29T15:21:52.023449Z :DEBUG: Take Data. Partition 1. Read: {0, 5} (6-6) 2025-05-29T15:21:52.023581Z :DEBUG: Take Data. Partition 1. Read: {1, 0} (7-7) 2025-05-29T15:21:52.023706Z :DEBUG: Take Data. Partition 1. Read: {1, 1} (8-8) 2025-05-29T15:21:52.024761Z :DEBUG: Take Data. Partition 1. Read: {1, 2} (9-9) 2025-05-29T15:21:52.024886Z :DEBUG: Take Data. Partition 1. Read: {1, 3} (10-10) 2025-05-29T15:21:52.024906Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 10, size 10000000 bytes 2025-05-29T15:21:52.024980Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 5, ReadSizeServerDelta = 0 DataReceived { PartitionStreamId: 1 PartitionId: 1 Message { Data: ..1000000 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 1 SeqNo: 42 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k1": "v1", "k": "v" } } } Message { Data: ..1000000 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 2 SeqNo: 43 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k1": "v1", "k": "v" } } } Message { Data: ..1000000 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 3 SeqNo: 44 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k1": "v1", "k": "v" } } } Message { Data: ..1000000 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 4 SeqNo: 45 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k1": "v1", "k": "v" } } } Message { Data: ..1000000 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 5 SeqNo: 46 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k1": "v1", "k": "v" } } } Message { Data: ..1000000 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 6 SeqNo: 47 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k1": "v1", "k": "v" } } } Message { Data: ..1000000 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 7 SeqNo: 48 MessageGroupId: "src_id_2" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:05:21.000000Z Ip: "1.0.0.127" UncompressedSize: 0 Meta: { "v1": "k1", "v": "k" } } } Message { Data: ..1000000 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 8 SeqNo: 49 MessageGroupId: "src_id_2" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:05:21.000000Z Ip: "1.0.0.127" UncompressedSize: 0 Meta: { "v1": "k1", "v": "k" } } } Message { Data: ..1000000 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 9 SeqNo: 50 MessageGroupId: "src_id_2" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:05:21.000000Z Ip: "1.0.0.127" UncompressedSize: 0 Meta: { "v1": "k1", "v": "k" } } } Message { Data: ..1000000 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 10 SeqNo: 51 MessageGroupId: "src_id_2" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:05:21.000000Z Ip: "1.0.0.127" UncompressedSize: 0 Meta: { "v1": "k1", "v": "k" } } } } 2025-05-29T15:21:52.026701Z :DEBUG: [db] [sessionid] [cluster] Commit offsets [0, 11). Partition stream id: 1 GOT RANGE 0 11 2025-05-29T15:21:52.027263Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-05-29T15:21:52.027268Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-05-29T15:21:52.027271Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2025-05-29T15:21:52.040988Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2025-05-29T15:21:52.041163Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2025-05-29T15:21:52.041250Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-05-29T15:21:52.042827Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (NULL) 2025-05-29T15:21:52.042957Z :DEBUG: [db] [sessionid] [cluster] Requesting status for partition stream id: 1 2025-05-29T15:21:52.043334Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-05-29T15:21:52.043339Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-05-29T15:21:52.043344Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2025-05-29T15:21:52.043770Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2025-05-29T15:21:52.043883Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2025-05-29T15:21:52.043939Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-05-29T15:21:52.046998Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-05-29T15:21:52.047046Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-1) 2025-05-29T15:21:52.047075Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2025-05-29T15:21:52.047085Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 1, size 8 bytes 2025-05-29T15:21:52.047121Z :INFO: [db] [sessionid] [cluster] Confirm partition stream destroy. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1 >> TNodeBrokerTest::NodesMigration999Nodes >> BasicUsage::MaxByteSizeEqualZero [FAIL] >> BasicUsage::TSimpleWriteSession_AutoSeqNo_BasicUsage >> TSlotIndexesPoolTest::Ranges [GOOD] >> BasicUsage::WriteAndReadSomeMessagesWithAsyncCompression [FAIL] >> BasicUsage::WriteAndReadSomeMessagesWithSyncCompression >> PersQueueSdkReadSessionTest::SpecifyClustersExplicitly [FAIL] >> PersQueueSdkReadSessionTest::StopResumeReadingData ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/rbo/unittest >> KqpRbo::CrossFilter [GOOD] Test command err: Trying to start YDB, gRPC: 18597, MsgBus: 20597 2025-05-29T15:21:51.204240Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509888236885487419:2203];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:21:51.204383Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/002363/r3tmp/tmpnd5f2i/pdisk_1.dat 2025-05-29T15:21:51.261733Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 18597, node 1 2025-05-29T15:21:51.281802Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:21:51.281820Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-05-29T15:21:51.281823Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-05-29T15:21:51.281891Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:20597 2025-05-29T15:21:51.306078Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:21:51.306119Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:21:51.307278Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:20597 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-29T15:21:51.355454Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:21:51.358214Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-05-29T15:21:51.663278Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509888236885487905:2328], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:21:51.663305Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:21:51.708089Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-05-29T15:21:51.776191Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 2025-05-29T15:21:51.789829Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509888236885488079:2346], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:21:51.789873Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:21:51.789896Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509888236885488084:2349], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:21:51.790876Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715660:3, at schemeshard: 72057594046644480 2025-05-29T15:21:51.796411Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7509888236885488086:2350], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715660 completed, doublechecking } 2025-05-29T15:21:51.872178Z node 1 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [1:7509888236885488137:2428] txid# 281474976715661, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 7], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } |58.7%| [TA] $(B)/ydb/services/persqueue_cluster_discovery/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> TNodeBrokerTest::DoNotReuseDynnodeIdsBelowMinDynamicNodeId |58.7%| [TA] $(B)/ydb/core/tx/schemeshard/ut_extsubdomain/test-results/unittest/{meta.json ... results_accumulator.log} |58.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/mind/ut/unittest >> TSlotIndexesPoolTest::Ranges [GOOD] >> KqpYql::UuidPrimaryKeyBulkUpsert [GOOD] >> TDynamicNameserverTest::CacheMissPipeDisconnect-EnableNodeBrokerDeltaProtocol-true >> TNodeBrokerTest::NodeNameExpiration >> TSlotIndexesPoolTest::Init [GOOD] >> TNodeBrokerTest::ResolveScopeIdForServerless >> TNodeBrokerTest::ListNodesEpochDeltasPersistance >> TTenantPoolTests::TestSensorsConfigForStaticSlot [GOOD] >> KqpOlap::PredicatePushdown [GOOD] >> KqpOlap::PredicatePushdownCastErrors >> TDynamicNameserverTest::CacheMissPipeDisconnect-EnableNodeBrokerDeltaProtocol-true [GOOD] >> TNodeBrokerTest::UpdateNodesLog >> TDynamicNameserverTest::CacheMissSameDeadline-EnableNodeBrokerDeltaProtocol-false ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/yql/unittest >> KqpYql::UuidPrimaryKeyBulkUpsert [GOOD] Test command err: Trying to start YDB, gRPC: 16565, MsgBus: 13355 2025-05-29T15:21:51.918821Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509888233256542027:2082];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:21:51.919180Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/00240f/r3tmp/tmpICnaqm/pdisk_1.dat 2025-05-29T15:21:52.000075Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:21:52.003818Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [1:7509888233256541971:2079] 1748532111900783 != 1748532111900786 TServer::EnableGrpc on GrpcPort 16565, node 1 2025-05-29T15:21:52.022942Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:21:52.022955Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-05-29T15:21:52.022957Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-05-29T15:21:52.022999Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:13355 2025-05-29T15:21:52.067136Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:21:52.067179Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:21:52.068087Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:13355 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-29T15:21:52.125031Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:21:52.131471Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-05-29T15:21:52.535071Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509888237551509924:2328], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:21:52.535123Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:21:52.632919Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-05-29T15:21:52.707436Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509888237551510034:2338], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:21:52.707461Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:21:52.707538Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509888237551510039:2341], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:21:52.708397Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710659:3, at schemeshard: 72057594046644480 2025-05-29T15:21:52.710966Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7509888237551510041:2342], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710659 completed, doublechecking } 2025-05-29T15:21:52.806859Z node 1 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [1:7509888237551510092:2390] txid# 281474976710660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } |58.7%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/load_test/ut/ydb-core-load_test-ut |58.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/load_test/ut/ydb-core-load_test-ut |58.8%| [TA] {RESULT} $(B)/ydb/services/persqueue_cluster_discovery/ut/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/mind/ut/unittest >> TTenantPoolTests::TestSensorsConfigForStaticSlot [GOOD] Test command err: 2025-05-29T15:21:52.785073Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7488: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-05-29T15:21:52.785102Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7516: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:21:52.785108Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7402: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-05-29T15:21:52.785113Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7418: OperationsProcessing config: using default configuration 2025-05-29T15:21:52.785128Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-05-29T15:21:52.785132Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-05-29T15:21:52.785142Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7548: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:21:52.785154Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:39: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-05-29T15:21:52.785256Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7619: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-05-29T15:21:52.785334Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-05-29T15:21:52.789698Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7309: Cannot subscribe to console configs 2025-05-29T15:21:52.789720Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:21:52.791386Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-05-29T15:21:52.791423Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-05-29T15:21:52.791444Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046578944 2025-05-29T15:21:52.792176Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-05-29T15:21:52.792276Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:445: Clear TempDirsState with owners number: 0 2025-05-29T15:21:52.792373Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1348: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046578944 2025-05-29T15:21:52.792453Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:32: TTxInitRoot DoExecute, path: dc-1, pathId: [OwnerId: 72057594046578944, LocalPathId: 1], at schemeshard: 72057594046578944 2025-05-29T15:21:52.792991Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:157: TTxInitRoot DoComplete, at schemeshard: 72057594046578944 2025-05-29T15:21:52.793040Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:84: [RootDataErasureManager] Stop 2025-05-29T15:21:52.793288Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046578944 2025-05-29T15:21:52.793297Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046578944 2025-05-29T15:21:52.793326Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-05-29T15:21:52.793337Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046578944, domainId: [OwnerId: 72057594046578944, LocalPathId: 1] 2025-05-29T15:21:52.793344Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:202: TTxServerlessStorageBilling.Complete 2025-05-29T15:21:52.793364Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6671: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046578944 2025-05-29T15:21:52.834283Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:372: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "dc-1" StoragePools { Name: "" Kind: "hdd" } StoragePools { Name: "" Kind: "hdd-3" } StoragePools { Name: "" Kind: "hdd-1" } StoragePools { Name: "" Kind: "hdd-2" } } } TxId: 1 TabletId: 72057594046578944 , at schemeshard: 72057594046578944 2025-05-29T15:21:52.834369Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //dc-1, opId: 1:0, at schemeshard: 72057594046578944 2025-05-29T15:21:52.834441Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046578944, LocalPathId: 1] was 0 2025-05-29T15:21:52.834501Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:130: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046578944 2025-05-29T15:21:52.834513Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944 2025-05-29T15:21:52.839653Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:451: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046578944 PathId: 1, at schemeshard: 72057594046578944 2025-05-29T15:21:52.839690Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //dc-1 2025-05-29T15:21:52.839760Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046578944 2025-05-29T15:21:52.839773Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:313: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046578944 2025-05-29T15:21:52.839780Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:367: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-05-29T15:21:52.839787Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 2 -> 3 2025-05-29T15:21:52.840592Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046578944 2025-05-29T15:21:52.840610Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046578944 2025-05-29T15:21:52.840617Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 3 -> 128 2025-05-29T15:21:52.841135Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046578944 2025-05-29T15:21:52.841149Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046578944 2025-05-29T15:21:52.841168Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046578944 2025-05-29T15:21:52.841176Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1650: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-05-29T15:21:52.841940Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1719: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046578944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-05-29T15:21:52.842547Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:652: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046578944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-05-29T15:21:52.842609Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1751: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 2025-05-29T15:21:52.842831Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__notify.cpp:30: NotifyTxCompletion operation in-flight, txId: 1, at schemeshard: 72057594046578944 2025-05-29T15:21:52.842842Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1606: TOperation IsReadyToNotify, TxId: 1, ready parts: 0/1, is published: true 2025-05-29T15:21:52.842848Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__notify.cpp:131: NotifyTxCompletion transaction is registered, txId: 1, at schemeshard: 72057594046578944 2025-05-29T15:21:53.112076Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:676: TTxOperationPlanStep Execute, stepId: 500, transactions count in step: 1, at schemeshard: 72057594046578944 2025-05-29T15:21:53.112132Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:680: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 AckTo { RawX1: 0 RawX2: 0 } } Step: 500 MediatorID: 72057594046382081 TabletID: 72057594046578944, at schemeshard: 72057594046578944 2025-05-29T15:21:53.112146Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046578944 2025-05-29T15:21:53.112255Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 128 -> 240 2025-05-29T15:21:53.112265Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046578944 2025-05-29T15:21:53.112302Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046578944, LocalPathId: 1] was 1 2025-05-29T15:21:53.112316Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:402: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046578944, LocalPathId: 1], at schemeshard: 72057594046578944 2025-05-29T15:21:53.115518Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046578944 2025-05-29T15:21:53.115546Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046578944, txId: 1, path id: [OwnerId: 72057594046578944, LocalPathId: 1] 2025-05-29T15:21:53.115602Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046578944 2025-05-29T15:21:53.115608Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:261:2249], at schemeshard: 72057594046578944, txId: 1, path id: 1 202 ... { Items { Kind: 10 Id: 4 Generation: 1 } } } AffectedKinds: 10 RawConsoleConfig { MonitoringConfig { ForceDatabaseLabels: true DatabaseLabels { StaticSlotLabelValue: "static-again" } } Version { Items { Kind: 10 Id: 1 Generation: 1 } Items { Kind: 10 Id: 2 Generation: 1 } Items { Kind: 10 Id: 3 Generation: 1 } Items { Kind: 10 Id: 4 Generation: 1 } } } } 2025-05-29T15:21:53.449093Z node 1 :CONFIGS_DISPATCHER TRACE: configs_dispatcher.cpp:221: StateWork, processing event TEvConsole::TEvConfigSubscriptionNotification 2025-05-29T15:21:53.449135Z node 1 :CONFIGS_DISPATCHER TRACE: configs_dispatcher.cpp:1036: Sending for kinds: MonitoringConfigItem 2025-05-29T15:21:53.449157Z node 1 :CONFIGS_DISPATCHER TRACE: configs_dispatcher.cpp:361: Send TEvConsole::TEvConfigNotificationRequest to [1:401:2357]: Config { MonitoringConfig { ForceDatabaseLabels: true DatabaseLabels { StaticSlotLabelValue: "static-again" } } } ItemKinds: 10 Local: true 2025-05-29T15:21:53.449163Z node 1 :CONFIGS_DISPATCHER TRACE: configs_dispatcher.cpp:1036: Sending for kinds: MonitoringConfigItem 2025-05-29T15:21:53.449171Z node 1 :CONFIGS_DISPATCHER TRACE: configs_dispatcher.cpp:361: Send TEvConsole::TEvConfigNotificationRequest to [1:404:2355]: Config { MonitoringConfig { ForceDatabaseLabels: true DatabaseLabels { StaticSlotLabelValue: "static-again" } } } ItemKinds: 10 Local: true 2025-05-29T15:21:53.449672Z node 1 :TENANT_POOL DEBUG: tenant_pool.cpp:486: TDomainTenantPool(dc-1) Got new monitoring config: MonitoringConfig { ForceDatabaseLabels: true DatabaseLabels { StaticSlotLabelValue: "static-again" } } 2025-05-29T15:21:53.449679Z node 1 :TENANT_POOL DEBUG: tenant_pool.cpp:452: TDomainTenantPool(dc-1) static slot label modified from static to static-again 2025-05-29T15:21:53.449682Z node 1 :TENANT_POOL DEBUG: tenant_pool.cpp:274: TDomainTenantPool(dc-1) send status update to [1:401:2357] 2025-05-29T15:21:53.449712Z node 1 :CONFIGS_DISPATCHER TRACE: configs_dispatcher.cpp:215: StateWork, received event# 273286162, Sender [1:401:2357], Recipient [1:400:2356]: NKikimr::NConsole::TEvConsole::TEvConfigNotificationResponse { SubscriptionId: 0 ConfigId { } } 2025-05-29T15:21:53.449719Z node 1 :CONFIGS_DISPATCHER TRACE: configs_dispatcher.cpp:227: StateWork, processing event TEvConsole::TEvConfigNotificationResponse 2025-05-29T15:21:53.449744Z node 1 :CONFIGS_DISPATCHER TRACE: configs_dispatcher.cpp:215: StateWork, received event# 273286162, Sender [1:404:2355], Recipient [1:400:2356]: NKikimr::NConsole::TEvConsole::TEvConfigNotificationResponse { SubscriptionId: 0 ConfigId { } } 2025-05-29T15:21:53.449747Z node 1 :CONFIGS_DISPATCHER TRACE: configs_dispatcher.cpp:227: StateWork, processing event TEvConsole::TEvConfigNotificationResponse 2025-05-29T15:21:53.471636Z node 1 :CONFIGS_DISPATCHER TRACE: configs_dispatcher.cpp:215: StateWork, received event# 273285146, Sender [1:406:2356], Recipient [1:400:2356]: NKikimr::NConsole::TEvConsole::TEvConfigSubscriptionNotification { Generation: 1 Config { MonitoringConfig { ForceDatabaseLabels: true DatabaseLabels { Enabled: false StaticSlotLabelValue: "static-again" } } FeatureFlags { EnableExternalHive: false EnableColumnStatistics: false EnableScaleRecommender: true } Version { Items { Kind: 10 Id: 5 Generation: 1 } } } AffectedKinds: 10 RawConsoleConfig { MonitoringConfig { ForceDatabaseLabels: true DatabaseLabels { Enabled: false StaticSlotLabelValue: "static-again" } } Version { Items { Kind: 10 Id: 1 Generation: 1 } Items { Kind: 10 Id: 2 Generation: 1 } Items { Kind: 10 Id: 3 Generation: 1 } Items { Kind: 10 Id: 4 Generation: 1 } Items { Kind: 10 Id: 5 Generation: 1 } } } } 2025-05-29T15:21:53.471670Z node 1 :CONFIGS_DISPATCHER TRACE: configs_dispatcher.cpp:221: StateWork, processing event TEvConsole::TEvConfigSubscriptionNotification 2025-05-29T15:21:53.471710Z node 1 :CONFIGS_DISPATCHER TRACE: configs_dispatcher.cpp:1036: Sending for kinds: MonitoringConfigItem 2025-05-29T15:21:53.471728Z node 1 :CONFIGS_DISPATCHER TRACE: configs_dispatcher.cpp:361: Send TEvConsole::TEvConfigNotificationRequest to [1:401:2357]: Config { MonitoringConfig { ForceDatabaseLabels: true DatabaseLabels { Enabled: false StaticSlotLabelValue: "static-again" } } } ItemKinds: 10 Local: true 2025-05-29T15:21:53.471736Z node 1 :CONFIGS_DISPATCHER TRACE: configs_dispatcher.cpp:1036: Sending for kinds: MonitoringConfigItem 2025-05-29T15:21:53.471746Z node 1 :CONFIGS_DISPATCHER TRACE: configs_dispatcher.cpp:361: Send TEvConsole::TEvConfigNotificationRequest to [1:404:2355]: Config { MonitoringConfig { ForceDatabaseLabels: true DatabaseLabels { Enabled: false StaticSlotLabelValue: "static-again" } } } ItemKinds: 10 Local: true 2025-05-29T15:21:53.472740Z node 1 :TENANT_POOL DEBUG: tenant_pool.cpp:486: TDomainTenantPool(dc-1) Got new monitoring config: MonitoringConfig { ForceDatabaseLabels: true DatabaseLabels { Enabled: false StaticSlotLabelValue: "static-again" } } 2025-05-29T15:21:53.472783Z node 1 :CONFIGS_DISPATCHER TRACE: configs_dispatcher.cpp:215: StateWork, received event# 273286162, Sender [1:401:2357], Recipient [1:400:2356]: NKikimr::NConsole::TEvConsole::TEvConfigNotificationResponse { SubscriptionId: 0 ConfigId { } } 2025-05-29T15:21:53.472792Z node 1 :CONFIGS_DISPATCHER TRACE: configs_dispatcher.cpp:227: StateWork, processing event TEvConsole::TEvConfigNotificationResponse 2025-05-29T15:21:53.472813Z node 1 :CONFIGS_DISPATCHER TRACE: configs_dispatcher.cpp:215: StateWork, received event# 273286162, Sender [1:404:2355], Recipient [1:400:2356]: NKikimr::NConsole::TEvConsole::TEvConfigNotificationResponse { SubscriptionId: 0 ConfigId { } } 2025-05-29T15:21:53.472817Z node 1 :CONFIGS_DISPATCHER TRACE: configs_dispatcher.cpp:227: StateWork, processing event TEvConsole::TEvConfigNotificationResponse 2025-05-29T15:21:53.494816Z node 1 :CONFIGS_DISPATCHER TRACE: configs_dispatcher.cpp:215: StateWork, received event# 273285146, Sender [1:406:2356], Recipient [1:400:2356]: NKikimr::NConsole::TEvConsole::TEvConfigSubscriptionNotification { Generation: 1 Config { MonitoringConfig { ForceDatabaseLabels: true DatabaseLabels { Enabled: true StaticSlotLabelValue: "static-again" } } FeatureFlags { EnableExternalHive: false EnableColumnStatistics: false EnableScaleRecommender: true } Version { Items { Kind: 10 Id: 6 Generation: 1 } } } AffectedKinds: 10 RawConsoleConfig { MonitoringConfig { ForceDatabaseLabels: true DatabaseLabels { Enabled: true StaticSlotLabelValue: "static-again" } } Version { Items { Kind: 10 Id: 1 Generation: 1 } Items { Kind: 10 Id: 2 Generation: 1 } Items { Kind: 10 Id: 3 Generation: 1 } Items { Kind: 10 Id: 4 Generation: 1 } Items { Kind: 10 Id: 5 Generation: 1 } Items { Kind: 10 Id: 6 Generation: 1 } } } } 2025-05-29T15:21:53.494844Z node 1 :CONFIGS_DISPATCHER TRACE: configs_dispatcher.cpp:221: StateWork, processing event TEvConsole::TEvConfigSubscriptionNotification 2025-05-29T15:21:53.494886Z node 1 :CONFIGS_DISPATCHER TRACE: configs_dispatcher.cpp:1036: Sending for kinds: MonitoringConfigItem 2025-05-29T15:21:53.494905Z node 1 :CONFIGS_DISPATCHER TRACE: configs_dispatcher.cpp:361: Send TEvConsole::TEvConfigNotificationRequest to [1:401:2357]: Config { MonitoringConfig { ForceDatabaseLabels: true DatabaseLabels { Enabled: true StaticSlotLabelValue: "static-again" } } } ItemKinds: 10 Local: true 2025-05-29T15:21:53.494913Z node 1 :CONFIGS_DISPATCHER TRACE: configs_dispatcher.cpp:1036: Sending for kinds: MonitoringConfigItem 2025-05-29T15:21:53.494925Z node 1 :CONFIGS_DISPATCHER TRACE: configs_dispatcher.cpp:361: Send TEvConsole::TEvConfigNotificationRequest to [1:404:2355]: Config { MonitoringConfig { ForceDatabaseLabels: true DatabaseLabels { Enabled: true StaticSlotLabelValue: "static-again" } } } ItemKinds: 10 Local: true 2025-05-29T15:21:53.495992Z node 1 :TENANT_POOL DEBUG: tenant_pool.cpp:486: TDomainTenantPool(dc-1) Got new monitoring config: MonitoringConfig { ForceDatabaseLabels: true DatabaseLabels { Enabled: true StaticSlotLabelValue: "static-again" } } 2025-05-29T15:21:53.496032Z node 1 :CONFIGS_DISPATCHER TRACE: configs_dispatcher.cpp:215: StateWork, received event# 273286162, Sender [1:401:2357], Recipient [1:400:2356]: NKikimr::NConsole::TEvConsole::TEvConfigNotificationResponse { SubscriptionId: 0 ConfigId { } } 2025-05-29T15:21:53.496040Z node 1 :CONFIGS_DISPATCHER TRACE: configs_dispatcher.cpp:227: StateWork, processing event TEvConsole::TEvConfigNotificationResponse 2025-05-29T15:21:53.496060Z node 1 :CONFIGS_DISPATCHER TRACE: configs_dispatcher.cpp:215: StateWork, received event# 273286162, Sender [1:404:2355], Recipient [1:400:2356]: NKikimr::NConsole::TEvConsole::TEvConfigNotificationResponse { SubscriptionId: 0 ConfigId { } } 2025-05-29T15:21:53.496064Z node 1 :CONFIGS_DISPATCHER TRACE: configs_dispatcher.cpp:227: StateWork, processing event TEvConsole::TEvConfigNotificationResponse 2025-05-29T15:21:53.518107Z node 1 :CONFIGS_DISPATCHER TRACE: configs_dispatcher.cpp:215: StateWork, received event# 273285146, Sender [1:406:2356], Recipient [1:400:2356]: NKikimr::NConsole::TEvConsole::TEvConfigSubscriptionNotification { Generation: 1 Config { MonitoringConfig { ForceDatabaseLabels: false DatabaseLabels { Enabled: true StaticSlotLabelValue: "static-again" } } FeatureFlags { EnableExternalHive: false EnableColumnStatistics: false EnableScaleRecommender: true } Version { Items { Kind: 10 Id: 7 Generation: 1 } } } AffectedKinds: 10 RawConsoleConfig { MonitoringConfig { ForceDatabaseLabels: false DatabaseLabels { Enabled: true StaticSlotLabelValue: "static-again" } } Version { Items { Kind: 10 Id: 1 Generation: 1 } Items { Kind: 10 Id: 2 Generation: 1 } Items { Kind: 10 Id: 3 Generation: 1 } Items { Kind: 10 Id: 4 Generation: 1 } Items { Kind: 10 Id: 5 Generation: 1 } Items { Kind: 10 Id: 6 Generation: 1 } Items { Kind: 10 Id: 7 Generation: 1 } } } } 2025-05-29T15:21:53.518137Z node 1 :CONFIGS_DISPATCHER TRACE: configs_dispatcher.cpp:221: StateWork, processing event TEvConsole::TEvConfigSubscriptionNotification 2025-05-29T15:21:53.518177Z node 1 :CONFIGS_DISPATCHER TRACE: configs_dispatcher.cpp:1036: Sending for kinds: MonitoringConfigItem 2025-05-29T15:21:53.518194Z node 1 :CONFIGS_DISPATCHER TRACE: configs_dispatcher.cpp:361: Send TEvConsole::TEvConfigNotificationRequest to [1:401:2357]: Config { MonitoringConfig { ForceDatabaseLabels: false DatabaseLabels { Enabled: true StaticSlotLabelValue: "static-again" } } } ItemKinds: 10 Local: true 2025-05-29T15:21:53.518200Z node 1 :CONFIGS_DISPATCHER TRACE: configs_dispatcher.cpp:1036: Sending for kinds: MonitoringConfigItem 2025-05-29T15:21:53.518210Z node 1 :CONFIGS_DISPATCHER TRACE: configs_dispatcher.cpp:361: Send TEvConsole::TEvConfigNotificationRequest to [1:404:2355]: Config { MonitoringConfig { ForceDatabaseLabels: false DatabaseLabels { Enabled: true StaticSlotLabelValue: "static-again" } } } ItemKinds: 10 Local: true 2025-05-29T15:21:53.519213Z node 1 :TENANT_POOL DEBUG: tenant_pool.cpp:486: TDomainTenantPool(dc-1) Got new monitoring config: MonitoringConfig { ForceDatabaseLabels: false DatabaseLabels { Enabled: true StaticSlotLabelValue: "static-again" } } 2025-05-29T15:21:53.519252Z node 1 :CONFIGS_DISPATCHER TRACE: configs_dispatcher.cpp:215: StateWork, received event# 273286162, Sender [1:401:2357], Recipient [1:400:2356]: NKikimr::NConsole::TEvConsole::TEvConfigNotificationResponse { SubscriptionId: 0 ConfigId { } } 2025-05-29T15:21:53.519260Z node 1 :CONFIGS_DISPATCHER TRACE: configs_dispatcher.cpp:227: StateWork, processing event TEvConsole::TEvConfigNotificationResponse 2025-05-29T15:21:53.519278Z node 1 :CONFIGS_DISPATCHER TRACE: configs_dispatcher.cpp:215: StateWork, received event# 273286162, Sender [1:404:2355], Recipient [1:400:2356]: NKikimr::NConsole::TEvConsole::TEvConfigNotificationResponse { SubscriptionId: 0 ConfigId { } } 2025-05-29T15:21:53.519283Z node 1 :CONFIGS_DISPATCHER TRACE: configs_dispatcher.cpp:227: StateWork, processing event TEvConsole::TEvConfigNotificationResponse |58.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/mind/ut/unittest >> TSlotIndexesPoolTest::Init [GOOD] >> test_sql_streaming.py::test[suites-GroupByHopWithDataWatermarks-default.txt] [GOOD] >> TNodeBrokerTest::NodesMigration1001Nodes >> TNodeBrokerTest::NodesMigrationExpireRemoved >> TNodeBrokerTest::NodesMigrationNewExpiredNode >> TDynamicNameserverTest::CacheMissSameDeadline-EnableNodeBrokerDeltaProtocol-false [GOOD] >> test_sql_streaming.py::test[suites-GroupByHoppingWindow-default.txt] >> BasicUsage::TSimpleWriteSession_AutoSeqNo_BasicUsage [FAIL] >> BasicUsage::TWriteSession_AutoBatching [GOOD] >> BasicUsage::TWriteSession_BatchingProducesContinueTokens [GOOD] >> BasicUsage::BrokenCredentialsProvider >> TNodeBrokerTest::ResolveScopeIdForServerless [GOOD] |58.8%| [TA] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_extsubdomain/test-results/unittest/{meta.json ... results_accumulator.log} |58.8%| [LD] {RESULT} $(B)/ydb/core/load_test/ut/ydb-core-load_test-ut >> BasicUsage::WriteAndReadSomeMessagesWithSyncCompression [FAIL] >> BasicUsage::WriteAndReadSomeMessagesWithNoCompression >> PersQueueSdkReadSessionTest::StopResumeReadingData [FAIL] >> ReadSessionImplTest::CreatePartitionStream [GOOD] >> ReadSessionImplTest::BrokenCompressedData [GOOD] >> ReadSessionImplTest::CommitOffsetTwiceIsError [GOOD] >> ReadSessionImplTest::DataReceivedCallback >> TNodeBrokerTest::NodesV2BackMigrationShiftIdRange >> KqpYql::TestUuidDefaultColumn [FAIL] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/mind/ut/unittest >> TDynamicNameserverTest::CacheMissSameDeadline-EnableNodeBrokerDeltaProtocol-false [GOOD] Test command err: 2025-05-29T15:21:53.284920Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-05-29T15:21:53.285139Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:21:53.292677Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:21:53.292797Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:21:53.329589Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7309: Cannot subscribe to console configs 2025-05-29T15:21:53.329621Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded ... waiting for nameservers are connected 2025-05-29T15:21:53.338134Z node 1 :NODE_BROKER DEBUG: node_broker_impl.h:236: StateInit event type: 10060000 event: NKikimr::TEvTablet::TEvBoot 2025-05-29T15:21:53.338631Z node 1 :NODE_BROKER DEBUG: node_broker_impl.h:236: StateInit event type: 10060001 event: NKikimr::TEvTablet::TEvRestored 2025-05-29T15:21:53.338722Z node 1 :NODE_BROKER DEBUG: node_broker__init_scheme.cpp:20: TTxInitScheme Execute 2025-05-29T15:21:53.338980Z node 1 :NODE_BROKER DEBUG: node_broker_impl.h:236: StateInit event type: 1006000c event: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-05-29T15:21:53.339673Z node 1 :NODE_BROKER DEBUG: node_broker__init_scheme.cpp:29: TTxInitScheme Complete 2025-05-29T15:21:53.339864Z node 1 :NODE_BROKER DEBUG: node_broker__load_state.cpp:19: TTxLoadState Execute 2025-05-29T15:21:53.339922Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:961: [DB] Using default config. 2025-05-29T15:21:53.339938Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:998: [DB] Starting the first epoch: #1.1 1970-01-01T00:00:00.025000Z - 1970-01-01T01:00:00.025000Z - 1970-01-01T02:00:00.025000Z 2025-05-29T15:21:53.339944Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:1024: [DB] Loaded the first approximate epoch start: #1.1 2025-05-29T15:21:53.339960Z node 1 :NODE_BROKER DEBUG: node_broker__load_state.cpp:27: TTxLoadState Complete 2025-05-29T15:21:53.339973Z node 1 :NODE_BROKER DEBUG: node_broker__migrate_state.cpp:84: TTxMigrateState Execute 2025-05-29T15:21:53.339979Z node 1 :NODE_BROKER DEBUG: node_broker__migrate_state.cpp:52: TTxMigrateState ProcessMigrationBatch UpdateNodes left 0, NewVersionUpdateNodes left 0 2025-05-29T15:21:53.339983Z node 1 :NODE_BROKER DEBUG: node_broker__migrate_state.cpp:21: TTxMigrateState FinalizeMigration 2025-05-29T15:21:53.339988Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:1311: [DB] Update epoch in database: #1.1 1970-01-01T00:00:00.025000Z - 1970-01-01T01:00:00.025000Z - 1970-01-01T02:00:00.025000Z 2025-05-29T15:21:53.340010Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:1330: [DB] Update approx epoch start in database: #1.1 2025-05-29T15:21:53.340016Z node 1 :NODE_BROKER NOTICE: node_broker.cpp:1343: [DB] Update main nodes table to: Nodes 2025-05-29T15:21:53.366588Z node 1 :NODE_BROKER DEBUG: node_broker__migrate_state.cpp:95: TTxMigrateState Complete 2025-05-29T15:21:53.366636Z node 1 :NODE_BROKER TRACE: node_broker.cpp:456: Scheduled epoch update at 1970-01-01T01:00:00.025000Z 2025-05-29T15:21:53.366648Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:562: Preparing nodes list cache for epoch #1.1 1970-01-01T00:00:00.025000Z - 1970-01-01T01:00:00.025000Z - 1970-01-01T02:00:00.025000Z, approximate epoch start #1.1 nodes=0 expired=0 2025-05-29T15:21:53.366659Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:603: Preparing update nodes log for epoch ##1.1 1970-01-01T00:00:00.025000Z - 1970-01-01T01:00:00.025000Z - 1970-01-01T02:00:00.025000Z nodes=0 expired=0 removed=0 2025-05-29T15:21:53.411247Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 269877761, Sender [1:201:2197], Recipient [1:170:2176]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-05-29T15:21:53.411305Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:657: Handle NKikimr::TEvTabletPipe::TEvClientConnected { TabletId: 72057594037936129 Status: OK ServerId: [1:201:2197] Leader: 1 Dead: 0 Generation: 2 VersionInfo:  } ... waiting for nameservers are connected (done) 2025-05-29T15:21:53.411877Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 272039950, Sender [1:18:2065], Recipient [1:170:2176]: NKikimr::NNodeBroker::TEvNodeBroker::TEvSubscribeNodesRequest { CachedVersion: 0 SeqNo: 0 } 2025-05-29T15:21:53.411893Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:254: StateWork, processing event TEvNodeBroker::TEvSubscribeNodesRequest 2025-05-29T15:21:53.411902Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:747: New subscriber [1:18:2065], seqNo: 0, version: 0, server pipe id: [1:201:2197] 2025-05-29T15:21:53.411914Z node 1 :NODE_BROKER TRACE: node_broker.cpp:730: Send TEvUpdateNodes v0 -> v1 to [1:18:2065] ... blocking NKikimr::NNodeBroker::TEvNodeBroker::TEvUpdateNodes from NODE_BROKER_ACTOR to NAMESERVICE cookie 0 2025-05-29T15:21:53.411964Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 269877761, Sender [1:205:2201], Recipient [1:170:2176]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-05-29T15:21:53.412009Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 272039938, Sender [1:203:2199], Recipient [1:170:2176]: NKikimr::NNodeBroker::TEvNodeBroker::TEvRegistrationRequest { Host: "host1" Port: 1001 ResolveHost: "host1.host1.host1" Address: "1.2.3.4" Location { DataCenter: "1" Module: "2" Rack: "3" Unit: "4" } FixedNodeId: false Path: "dc-1" } 2025-05-29T15:21:53.412015Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:248: StateWork, processing event TEvNodeBroker::TEvRegistrationRequest 2025-05-29T15:21:53.412024Z node 1 :NODE_BROKER TRACE: node_broker.cpp:1487: Handle TEvNodeBroker::TEvRegistrationRequest: request# Host: "host1" Port: 1001 ResolveHost: "host1.host1.host1" Address: "1.2.3.4" Location { DataCenter: "1" Module: "2" Rack: "3" Unit: "4" } FixedNodeId: false Path: "dc-1" 2025-05-29T15:21:53.412087Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2702: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [1:16:2063], request# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-05-29T15:21:53.412104Z node 1 :TX_PROXY_SCHEME_CACHE TRACE: cache.cpp:2322: Create subscriber: self# [1:16:2063], path# /dc-1, domainOwnerId# 72057594046678944 2025-05-29T15:21:53.423569Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2589: HandleNotify: self# [1:16:2063], notify# NKikimr::TSchemeBoardEvents::TEvNotifyUpdate { Path: /dc-1 PathId: [OwnerId: 72057594046678944, LocalPathId: 1] DescribeSchemeResult: Status: StatusSuccess Path: "/dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/dc-1" } } } PathId: 1 PathOwnerId: 72057594046678944 } 2025-05-29T15:21:53.423660Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2464: ResolveCacheItem: self# [1:16:2063], notify# NKikimr::TSchemeBoardEvents::TEvNotifyUpdate { Path: /dc-1 PathId: [OwnerId: 72057594046678944, LocalPathId: 1] DescribeSchemeResult: Status: StatusSuccess Path: "/dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/dc-1" } } } PathId: 1 PathOwnerId: 72057594046678944 }, by path# { Subscriber: { Subscriber: [1:207:2202] DomainOwnerId: 72057594046678944 Type: 2 SyncCookie: 0 } Filled: 0 Status: undefined Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, by pathId# nullptr 2025-05-29T15:21:53.423710Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:1842: FillEntry for TNavigate: self# [1:16:2063], cacheItem# { Subscriber: { Subscriber: [1:207:2202] DomainOwnerId: 72057594046678944 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusSuccess Kind: 2 TableKind: 0 Created: 1 CreateStep: 0 PathId: [OwnerId: 72057594046678944, LocalPathId: 1] DomainId: [OwnerId: 72057594046678944, LocalPathId: 1] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-05-29T15:21:53.423801Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:265: Send result: self# [1:214:2203], recipient# [1:206:2176], result# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [72057594046678944:1:0] RequestType: ByPath Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046678944, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046678944, LocalPathId: 1] Params { Version: 0 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } }] } 2025-05-29T15:21:53.423816Z node 1 :NODE_BROKER TRACE: node_broker.cpp:1532: Handle TEvTxProxySchemeCache::TEvNavigateKeySetResult: response# { Path: dc-1 TableId: [72057594046678944:1:0] RequestType: ByPath Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046678944, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046678944, LocalPathId: 1] Params { Version: 0 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } } 2025-05-29T15:21:53.423839Z node 1 :NODE_BROKER TRACE: node_broker.cpp:1558: Finish ... ePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-05-29T15:21:53.863661Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:265: Send result: self# [2:212:2201], recipient# [2:204:2176], result# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [72057594046678944:1:0] RequestType: ByPath Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046678944, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046678944, LocalPathId: 1] Params { Version: 0 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } }] } 2025-05-29T15:21:53.863676Z node 2 :NODE_BROKER TRACE: node_broker.cpp:1532: Handle TEvTxProxySchemeCache::TEvNavigateKeySetResult: response# { Path: dc-1 TableId: [72057594046678944:1:0] RequestType: ByPath Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046678944, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046678944, LocalPathId: 1] Params { Version: 0 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } } 2025-05-29T15:21:53.863693Z node 2 :NODE_BROKER TRACE: node_broker.cpp:1558: Finished resolving tenant: request# Host: "host1" Port: 1001 ResolveHost: "host1.host1.host1" Address: "1.2.3.4" Location { DataCenter: "1" Module: "2" Rack: "3" Unit: "4" } FixedNodeId: false Path: "dc-1": scope id# <72057594046678944:1>: serviced subdomain# 72057594046678944:1 2025-05-29T15:21:53.863707Z node 2 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 2146435073, Sender [2:204:2176], Recipient [2:170:2176]: NKikimr::NNodeBroker::TNodeBroker::TEvPrivate::TEvResolvedRegistrationRequest 2025-05-29T15:21:53.863712Z node 2 :NODE_BROKER TRACE: node_broker_impl.h:257: StateWork, processing event TEvPrivate::TEvResolvedRegistrationRequest 2025-05-29T15:21:53.863733Z node 2 :NODE_BROKER DEBUG: node_broker__register_node.cpp:77: TTxRegisterNode Execute 2025-05-29T15:21:53.863737Z node 2 :NODE_BROKER DEBUG: node_broker__register_node.cpp:81: Registration request from host1:1001 (not fixed) tenant: dc-1 2025-05-29T15:21:53.863768Z node 2 :NODE_BROKER DEBUG: node_broker.cpp:856: [DB] Adding node #1024.v2 host1:1001 to database state=Active resolvehost=host1.host1.host1 address=1.2.3.4 dc=1 location=DC=1/M=2/R=3/U=4/ lease=1 expire=Thu, 01 Jan 1970 02:00:00 UTC servicedsubdomain=72057594046678944:1 slotindex=0 authorizedbycertificate=false 2025-05-29T15:21:53.863820Z node 2 :NODE_BROKER DEBUG: node_broker.cpp:264: [Dirty] Register new active node #1024.v2 host1:1001 2025-05-29T15:21:53.863827Z node 2 :NODE_BROKER DEBUG: node_broker.cpp:552: [Dirty] Update current epoch version from 1 to 2 2025-05-29T15:21:53.863831Z node 2 :NODE_BROKER DEBUG: node_broker.cpp:1356: [DB] Update epoch version in database version=2 2025-05-29T15:21:53.881580Z node 2 :NODE_BROKER DEBUG: node_broker__register_node.cpp:186: TTxRegisterNode Complete 2025-05-29T15:21:53.881611Z node 2 :NODE_BROKER DEBUG: node_broker.cpp:264: [Committed] Register new active node #1024.v2 host1:1001 2025-05-29T15:21:53.881623Z node 2 :NODE_BROKER DEBUG: node_broker.cpp:552: [Committed] Update current epoch version from 1 to 2 2025-05-29T15:21:53.881628Z node 2 :NODE_BROKER DEBUG: node_broker.cpp:630: Add node #1024.v2 host1:1001 to epoch cache 2025-05-29T15:21:53.881654Z node 2 :NODE_BROKER DEBUG: node_broker.cpp:659: Add node #1024.v2 to update nodes log 2025-05-29T15:21:53.881696Z node 2 :NODE_BROKER TRACE: node_broker__register_node.cpp:58: TTxRegisterNode reply with: Status { Code: OK } Node { NodeId: 1024 Host: "host1" Port: 1001 ResolveHost: "host1.host1.host1" Address: "1.2.3.4" Location { DataCenter: "1" Module: "2" Rack: "3" Unit: "4" } Expire: 7200023000 Name: "slot-0" } 2025-05-29T15:21:53.881823Z node 2 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 269877761, Sender [2:216:2205], Recipient [2:170:2176]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-05-29T15:21:53.881861Z node 2 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 272039938, Sender [2:201:2197], Recipient [2:170:2176]: NKikimr::NNodeBroker::TEvNodeBroker::TEvRegistrationRequest { Host: "host2" Port: 1001 ResolveHost: "host2.host2.host2" Address: "1.2.3.5" Location { DataCenter: "1" Module: "2" Rack: "3" Unit: "5" } FixedNodeId: false Path: "dc-1" } 2025-05-29T15:21:53.881868Z node 2 :NODE_BROKER TRACE: node_broker_impl.h:248: StateWork, processing event TEvNodeBroker::TEvRegistrationRequest 2025-05-29T15:21:53.881877Z node 2 :NODE_BROKER TRACE: node_broker.cpp:1487: Handle TEvNodeBroker::TEvRegistrationRequest: request# Host: "host2" Port: 1001 ResolveHost: "host2.host2.host2" Address: "1.2.3.5" Location { DataCenter: "1" Module: "2" Rack: "3" Unit: "5" } FixedNodeId: false Path: "dc-1" 2025-05-29T15:21:53.881919Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2702: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [2:16:2063], request# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-05-29T15:21:53.881944Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:1842: FillEntry for TNavigate: self# [2:16:2063], cacheItem# { Subscriber: { Subscriber: [2:205:2200] DomainOwnerId: 72057594046678944 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusSuccess Kind: 2 TableKind: 0 Created: 1 CreateStep: 0 PathId: [OwnerId: 72057594046678944, LocalPathId: 1] DomainId: [OwnerId: 72057594046678944, LocalPathId: 1] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-05-29T15:21:53.881985Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:265: Send result: self# [2:218:2206], recipient# [2:217:2176], result# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [72057594046678944:1:0] RequestType: ByPath Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046678944, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046678944, LocalPathId: 1] Params { Version: 0 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } }] } 2025-05-29T15:21:53.881998Z node 2 :NODE_BROKER TRACE: node_broker.cpp:1532: Handle TEvTxProxySchemeCache::TEvNavigateKeySetResult: response# { Path: dc-1 TableId: [72057594046678944:1:0] RequestType: ByPath Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046678944, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046678944, LocalPathId: 1] Params { Version: 0 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } } 2025-05-29T15:21:53.882010Z node 2 :NODE_BROKER TRACE: node_broker.cpp:1558: Finished resolving tenant: request# Host: "host2" Port: 1001 ResolveHost: "host2.host2.host2" Address: "1.2.3.5" Location { DataCenter: "1" Module: "2" Rack: "3" Unit: "5" } FixedNodeId: false Path: "dc-1": scope id# <72057594046678944:1>: serviced subdomain# 72057594046678944:1 2025-05-29T15:21:53.882022Z node 2 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 2146435073, Sender [2:217:2176], Recipient [2:170:2176]: NKikimr::NNodeBroker::TNodeBroker::TEvPrivate::TEvResolvedRegistrationRequest 2025-05-29T15:21:53.882028Z node 2 :NODE_BROKER TRACE: node_broker_impl.h:257: StateWork, processing event TEvPrivate::TEvResolvedRegistrationRequest 2025-05-29T15:21:53.882045Z node 2 :NODE_BROKER DEBUG: node_broker__register_node.cpp:77: TTxRegisterNode Execute 2025-05-29T15:21:53.882049Z node 2 :NODE_BROKER DEBUG: node_broker__register_node.cpp:81: Registration request from host2:1001 (not fixed) tenant: dc-1 2025-05-29T15:21:53.882076Z node 2 :NODE_BROKER DEBUG: node_broker.cpp:856: [DB] Adding node #1025.v3 host2:1001 to database state=Active resolvehost=host2.host2.host2 address=1.2.3.5 dc=1 location=DC=1/M=2/R=3/U=5/ lease=1 expire=Thu, 01 Jan 1970 02:00:00 UTC servicedsubdomain=72057594046678944:1 slotindex=1 authorizedbycertificate=false 2025-05-29T15:21:53.882131Z node 2 :NODE_BROKER DEBUG: node_broker.cpp:264: [Dirty] Register new active node #1025.v3 host2:1001 2025-05-29T15:21:53.882139Z node 2 :NODE_BROKER DEBUG: node_broker.cpp:552: [Dirty] Update current epoch version from 2 to 3 2025-05-29T15:21:53.882143Z node 2 :NODE_BROKER DEBUG: node_broker.cpp:1356: [DB] Update epoch version in database version=3 2025-05-29T15:21:53.897900Z node 2 :NODE_BROKER DEBUG: node_broker__register_node.cpp:186: TTxRegisterNode Complete 2025-05-29T15:21:53.897928Z node 2 :NODE_BROKER DEBUG: node_broker.cpp:264: [Committed] Register new active node #1025.v3 host2:1001 2025-05-29T15:21:53.897938Z node 2 :NODE_BROKER DEBUG: node_broker.cpp:552: [Committed] Update current epoch version from 2 to 3 2025-05-29T15:21:53.897945Z node 2 :NODE_BROKER DEBUG: node_broker.cpp:630: Add node #1025.v3 host2:1001 to epoch cache 2025-05-29T15:21:53.897972Z node 2 :NODE_BROKER DEBUG: node_broker.cpp:659: Add node #1025.v3 to update nodes log 2025-05-29T15:21:53.898016Z node 2 :NODE_BROKER TRACE: node_broker__register_node.cpp:58: TTxRegisterNode reply with: Status { Code: OK } Node { NodeId: 1025 Host: "host2" Port: 1001 ResolveHost: "host2.host2.host2" Address: "1.2.3.5" Location { DataCenter: "1" Module: "2" Rack: "3" Unit: "5" } Expire: 7200023000 Name: "slot-1" } ... waiting for cache miss 2025-05-29T15:21:53.898105Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:525: Handle NActors::TEvInterconnect::TEvResolveNode { NodeId: 1024 Deadline: 1.107024s } 2025-05-29T15:21:53.898127Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:626: New cache miss: nodeId# 1024, deadline# 1.107024s 2025-05-29T15:21:53.898133Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:630: Schedule wakeup for new earliest deadline 1.107024s 2025-05-29T15:21:53.898142Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:525: Handle NActors::TEvInterconnect::TEvResolveNode { NodeId: 1025 Deadline: 1.107024s } 2025-05-29T15:21:53.898148Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:626: New cache miss: nodeId# 1025, deadline# 1.107024s ... blocking NKikimr::NNodeBroker::TEvNodeBroker::TEvResolveNode from NAMESERVICE to NODE_BROKER_ACTOR cookie 0 ... blocking NKikimr::NNodeBroker::TEvNodeBroker::TEvResolveNode from NAMESERVICE to NODE_BROKER_ACTOR cookie 0 ... waiting for cache miss (done) 2025-05-29T15:21:53.967258Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:873: HandleWakeup at 1.108024s 2025-05-29T15:21:53.967302Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:134: Cache miss failed: nodeId=1024, error=Deadline exceeded 2025-05-29T15:21:53.967315Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:134: Cache miss failed: nodeId=1025, error=Deadline exceeded ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/arrow/unittest >> KqpScanArrowFormat::AggregateWithFunction Test command err: Trying to start YDB, gRPC: 25055, MsgBus: 19754 2025-05-29T15:21:33.057333Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509888156452032496:2062];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:21:33.057390Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/0017f5/r3tmp/tmpc8shPl/pdisk_1.dat 2025-05-29T15:21:33.127149Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [1:7509888156452032476:2079] 1748532093057166 != 1748532093057169 2025-05-29T15:21:33.127484Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 25055, node 1 2025-05-29T15:21:33.140682Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:21:33.140699Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-05-29T15:21:33.140710Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-05-29T15:21:33.140751Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-05-29T15:21:33.160073Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:21:33.160100Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:21:33.161186Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:19754 TClient is connected to server localhost:19754 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-29T15:21:33.209429Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:21:33.212165Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-05-29T15:21:33.235504Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:21:33.295514Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:21:33.339413Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:21:33.403243Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:21:33.531387Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509888156452034113:2403], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:21:33.531418Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:21:33.585082Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-05-29T15:21:33.606513Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-05-29T15:21:33.629354Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-05-29T15:21:33.658141Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-05-29T15:21:33.675030Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-05-29T15:21:33.695673Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-05-29T15:21:33.720302Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-05-29T15:21:33.747557Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509888156452034768:2466], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:21:33.747580Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:21:33.747736Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509888156452034773:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:21:33.748518Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715669:3, at schemeshard: 72057594046644480 2025-05-29T15:21:33.755379Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7509888156452034775:2470], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715669 completed, doublechecking } 2025-05-29T15:21:33.831907Z node 1 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [1:7509888156452034826:3395] txid# 281474976715670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-29T15:21:34.088128Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [1:7509888156452034835:2474], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:21:34.090145Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=1&id=ZDZiYmUwMTUtNzNjMDJhYWMtNmNlYTExOGEtYmQ5NzQ0ZjM=, ActorId: [1:7509888156452034101:2400], ActorState: ExecuteState, TraceId: 01jwea4rsk718vvzn2vps63vn5, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: VERIFY failed (2025-05-29T15:21:34.093410Z): assertion failed in non-unittest thread with message: assertion failed at ydb/core/kqp/ut/common/kqp_ut_common.h:375, void NKikimr::NKqp::AssertSuccessResult(const NYdb::TStatus &): (result.IsSuccess())
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 library/cpp/testing/unittest/registar.cpp:36 RaiseError(): requirement UnittestThread failed 0. /-S/util/system/yassert.cpp:83: InternalPanicImpl @ 0x13A80ED5 1. /-S/util/system/yassert.cpp:55: Panic @ 0x13A77ED6 2. /tmp//-S/library/cpp/testing/unittest/registar.cpp:36: RaiseError @ 0x13C19C66 3. /-S/ydb/core/kqp/ut/common/kqp_ut_common.h:375: AssertSuccessResult @ 0x260B97B2 4. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:365: CreateSampleTables @ 0x260B90B2 5. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:581: operator() @ 0x260DACEC 6. /-S/library/cpp/threading/future/core/future-inl.h:493: SetValue @ 0x260DACEC 7. /-S/library/cpp/threading/future/async.h:24: operator() @ 0x260DACEC 8. /-S/util/thread/pool.h:71: Process @ 0x260DACEC 9. /-S/util/thread/pool.cpp:418: DoExecute @ 0x13A88859 10. /-S/util/thread/factory.h:15: Execute @ 0x13A87249 11. /-S/util/thread/factory.cpp:36: ThreadProc @ 0x13A87249 12. /-S/util/system/thread.cpp:244: ThreadProxy @ 0x13A826BC 13. ??:0: ?? @ 0x7F376E07FAC2 14. ??:0: ?? @ 0x7F376E11184F Trying to start YDB, gRPC: 11259, MsgBus: 7255 test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/0017f5/r3tmp/tmpFf43QB/pdisk_1.dat 2025-05-29T15:21:38.486891Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509888176999177766:2218];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:21:38.533211Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-05-29T15:21:38.556808Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [1:7509888176999177562:2079] 1748532098480 ... qp_ut_common.h:375: AssertSuccessResult @ 0x260B97B2 4. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:365: CreateSampleTables @ 0x260B90B2 5. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:581: operator() @ 0x260DACEC 6. /-S/library/cpp/threading/future/core/future-inl.h:493: SetValue @ 0x260DACEC 7. /-S/library/cpp/threading/future/async.h:24: operator() @ 0x260DACEC 8. /-S/util/thread/pool.h:71: Process @ 0x260DACEC 9. /-S/util/thread/pool.cpp:418: DoExecute @ 0x13A88859 10. /-S/util/thread/factory.h:15: Execute @ 0x13A87249 11. /-S/util/thread/factory.cpp:36: ThreadProc @ 0x13A87249 12. /-S/util/system/thread.cpp:244: ThreadProxy @ 0x13A826BC 13. ??:0: ?? @ 0x7F4E487D7AC2 14. ??:0: ?? @ 0x7F4E4886984F Trying to start YDB, gRPC: 28051, MsgBus: 17808 2025-05-29T15:21:49.259148Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509888227639263443:2081];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:21:49.259437Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/0017f5/r3tmp/tmpEjZ1io/pdisk_1.dat 2025-05-29T15:21:49.355841Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:21:49.355879Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:21:49.363354Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-29T15:21:49.364250Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 28051, node 1 2025-05-29T15:21:49.402987Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:21:49.403001Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-05-29T15:21:49.403004Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-05-29T15:21:49.403052Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:17808 TClient is connected to server localhost:17808 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-29T15:21:49.628075Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:21:49.632774Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-05-29T15:21:49.719380Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:21:49.771685Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:21:49.813841Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:21:49.838033Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:21:49.903390Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509888227639265008:2403], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:21:49.903422Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:21:49.952155Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-05-29T15:21:49.962931Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-05-29T15:21:49.971995Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-05-29T15:21:49.985238Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-05-29T15:21:50.000701Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-05-29T15:21:50.026381Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-05-29T15:21:50.037716Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-05-29T15:21:50.059404Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509888231934232958:2466], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:21:50.059446Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:21:50.059572Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509888231934232963:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:21:50.060602Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710669:3, at schemeshard: 72057594046644480 2025-05-29T15:21:50.064256Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976710669, at schemeshard: 72057594046644480 2025-05-29T15:21:50.064343Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7509888231934232965:2470], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710669 completed, doublechecking } 2025-05-29T15:21:50.163707Z node 1 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [1:7509888231934233016:3395] txid# 281474976710670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-29T15:21:50.264905Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [1:7509888231934233025:2474], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:21:50.266660Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=1&id=ZDUxMjVlNWUtODBmZDg5NDUtZGE1YTQ0OTMtNTM1ZTRlZWM=, ActorId: [1:7509888227639265005:2401], ActorState: ExecuteState, TraceId: 01jwea58qa0dsnbvjdjzt53erz, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: VERIFY failed (2025-05-29T15:21:50.267685Z): assertion failed in non-unittest thread with message: assertion failed at ydb/core/kqp/ut/common/kqp_ut_common.h:375, void NKikimr::NKqp::AssertSuccessResult(const NYdb::TStatus &): (result.IsSuccess())
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 library/cpp/testing/unittest/registar.cpp:36 RaiseError(): requirement UnittestThread failed 0. /-S/util/system/yassert.cpp:83: InternalPanicImpl @ 0x13A80ED5 1. /-S/util/system/yassert.cpp:55: Panic @ 0x13A77ED6 2. /tmp//-S/library/cpp/testing/unittest/registar.cpp:36: RaiseError @ 0x13C19C66 3. /-S/ydb/core/kqp/ut/common/kqp_ut_common.h:375: AssertSuccessResult @ 0x260B97B2 4. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:365: CreateSampleTables @ 0x260B90B2 5. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:581: operator() @ 0x260DACEC 6. /-S/library/cpp/threading/future/core/future-inl.h:493: SetValue @ 0x260DACEC 7. /-S/library/cpp/threading/future/async.h:24: operator() @ 0x260DACEC 8. /-S/util/thread/pool.h:71: Process @ 0x260DACEC 9. /-S/util/thread/pool.cpp:418: DoExecute @ 0x13A88859 10. /-S/util/thread/factory.h:15: Execute @ 0x13A87249 11. /-S/util/thread/factory.cpp:36: ThreadProc @ 0x13A87249 12. /-S/util/system/thread.cpp:244: ThreadProxy @ 0x13A826BC 13. ??:0: ?? @ 0x7F4512EB4AC2 14. ??:0: ?? @ 0x7F4512F4684F ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/mind/ut/unittest >> TNodeBrokerTest::ResolveScopeIdForServerless [GOOD] Test command err: 2025-05-29T15:21:53.684999Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:21:53.685720Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-05-29T15:21:53.698712Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:21:53.698785Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:21:53.698887Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 2 Deadline: 18446744073709.551615s } 2025-05-29T15:21:53.698935Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:21:53.699066Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:21:53.699249Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:21:53.699396Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-05-29T15:21:53.700204Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 2 Deadline: 18446744073709.551615s } 2025-05-29T15:21:53.737988Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7309: Cannot subscribe to console configs 2025-05-29T15:21:53.738015Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded ... waiting for nameservers are connected 2025-05-29T15:21:53.743555Z node 1 :NODE_BROKER DEBUG: node_broker_impl.h:236: StateInit event type: 10060000 event: NKikimr::TEvTablet::TEvBoot 2025-05-29T15:21:53.744040Z node 1 :NODE_BROKER DEBUG: node_broker_impl.h:236: StateInit event type: 10060001 event: NKikimr::TEvTablet::TEvRestored 2025-05-29T15:21:53.744114Z node 1 :NODE_BROKER DEBUG: node_broker__init_scheme.cpp:20: TTxInitScheme Execute 2025-05-29T15:21:53.744328Z node 1 :NODE_BROKER DEBUG: node_broker_impl.h:236: StateInit event type: 1006000c event: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-05-29T15:21:53.745108Z node 1 :NODE_BROKER DEBUG: node_broker__init_scheme.cpp:29: TTxInitScheme Complete 2025-05-29T15:21:53.745159Z node 1 :NODE_BROKER DEBUG: node_broker__load_state.cpp:19: TTxLoadState Execute 2025-05-29T15:21:53.745210Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:961: [DB] Using default config. 2025-05-29T15:21:53.745224Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:998: [DB] Starting the first epoch: #1.1 1970-01-01T00:00:00.025000Z - 1970-01-01T01:00:00.025000Z - 1970-01-01T02:00:00.025000Z 2025-05-29T15:21:53.745229Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:1024: [DB] Loaded the first approximate epoch start: #1.1 2025-05-29T15:21:53.745244Z node 1 :NODE_BROKER DEBUG: node_broker__load_state.cpp:27: TTxLoadState Complete 2025-05-29T15:21:53.745280Z node 1 :NODE_BROKER DEBUG: node_broker__migrate_state.cpp:84: TTxMigrateState Execute 2025-05-29T15:21:53.745285Z node 1 :NODE_BROKER DEBUG: node_broker__migrate_state.cpp:52: TTxMigrateState ProcessMigrationBatch UpdateNodes left 0, NewVersionUpdateNodes left 0 2025-05-29T15:21:53.745289Z node 1 :NODE_BROKER DEBUG: node_broker__migrate_state.cpp:21: TTxMigrateState FinalizeMigration 2025-05-29T15:21:53.745294Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:1311: [DB] Update epoch in database: #1.1 1970-01-01T00:00:00.025000Z - 1970-01-01T01:00:00.025000Z - 1970-01-01T02:00:00.025000Z 2025-05-29T15:21:53.745313Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:1330: [DB] Update approx epoch start in database: #1.1 2025-05-29T15:21:53.745319Z node 1 :NODE_BROKER NOTICE: node_broker.cpp:1343: [DB] Update main nodes table to: Nodes 2025-05-29T15:21:53.776832Z node 1 :NODE_BROKER DEBUG: node_broker__migrate_state.cpp:95: TTxMigrateState Complete 2025-05-29T15:21:53.776872Z node 1 :NODE_BROKER TRACE: node_broker.cpp:456: Scheduled epoch update at 1970-01-01T01:00:00.025000Z 2025-05-29T15:21:53.776883Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:562: Preparing nodes list cache for epoch #1.1 1970-01-01T00:00:00.025000Z - 1970-01-01T01:00:00.025000Z - 1970-01-01T02:00:00.025000Z, approximate epoch start #1.1 nodes=0 expired=0 2025-05-29T15:21:53.776894Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:603: Preparing update nodes log for epoch ##1.1 1970-01-01T00:00:00.025000Z - 1970-01-01T01:00:00.025000Z - 1970-01-01T02:00:00.025000Z nodes=0 expired=0 removed=0 2025-05-29T15:21:53.841000Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 269877761, Sender [1:251:2198], Recipient [1:217:2177]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-05-29T15:21:53.841049Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:657: Handle NKikimr::TEvTabletPipe::TEvClientConnected { TabletId: 72057594037936129 Status: OK ServerId: [1:251:2198] Leader: 1 Dead: 0 Generation: 2 VersionInfo:  } 2025-05-29T15:21:53.841524Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 272039936, Sender [1:19:2066], Recipient [1:217:2177]: NKikimr::NNodeBroker::TEvNodeBroker::TEvListNodes { MinEpoch: 1 } 2025-05-29T15:21:53.841539Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:246: StateWork, processing event TEvNodeBroker::TEvListNodes 2025-05-29T15:21:53.841566Z node 1 :NODE_BROKER TRACE: node_broker.cpp:423: Send TEvNodesInfo for epoch #1.1 1970-01-01T00:00:00.025000Z - 1970-01-01T01:00:00.025000Z - 1970-01-01T02:00:00.025000Z 2025-05-29T15:21:53.852010Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 269877761, Sender [1:253:2199], Recipient [1:217:2177]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-05-29T15:21:53.852063Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:657: Handle NKikimr::TEvTabletPipe::TEvClientConnected { TabletId: 72057594037936129 Status: OK ServerId: [1:253:2199] Leader: 1 Dead: 0 Generation: 2 VersionInfo:  } ... waiting for nameservers are connected (done) 2025-05-29T15:21:53.852178Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 272039936, Sender [2:41:2066], Recipient [1:253:2199] 2025-05-29T15:21:53.852185Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:246: StateWork, processing event TEvNodeBroker::TEvListNodes 2025-05-29T15:21:53.852202Z node 1 :NODE_BROKER TRACE: node_broker.cpp:423: Send TEvNodesInfo for epoch #1.1 1970-01-01T00:00:00.025000Z - 1970-01-01T01:00:00.025000Z - 1970-01-01T02:00:00.025000Z 2025-05-29T15:21:53.852216Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 269877761, Sender [1:257:2203], Recipient [1:217:2177]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-05-29T15:21:53.852244Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 272039936, Sender [1:255:2201], Recipient [1:217:2177]: NKikimr::NNodeBroker::TEvNodeBroker::TEvListNodes { } 2025-05-29T15:21:53.852247Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:246: StateWork, processing event TEvNodeBroker::TEvListNodes 2025-05-29T15:21:53.852253Z node 1 :NODE_BROKER TRACE: node_broker.cpp:423: Send TEvNodesInfo for epoch #1.1 1970-01-01T00:00:00.025000Z - 1970-01-01T01:00:00.025000Z - 1970-01-01T02:00:00.025000Z 2025-05-29T15:21:53.854310Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 101:0, at schemeshard: 72057594046678944 FAKE_COORDINATOR: Add transaction: 101 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 101 at step: 5000001 FAKE_COORDINATOR: Erasing txId 101 2025-05-29T15:21:53.859787Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 269877761, Sender [1:291:2235], Recipient [1:217:2177]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-05-29T15:21:53.859843Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 272039938, Sender [1:255:2201], Recipient [1:217:2177]: NKikimr::NNodeBroker::TEvNodeBroker::TEvRegistrationRequest { Host: "host1" Port: 1001 ResolveHost: "host1.yandex.net" Address: "1.2.3.4" Location { DataCenter: "1" Module: "2" Rack: "3" Unit: "4" } FixedNodeId: false Path: "/dc-1/SharedDB" } 2025-05-29T15:21:53.859851Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:248: StateWork, processing event TEvNodeBroker::TEvRegistrationRequest 2025-05-29T15:21:53.859861Z node 1 :NODE_BROKER TRACE: node_broker.cpp:1487: Handle TEvNodeBroker::TEvRegistrationRequest: request# Host: "host1" Port: 1001 ResolveHost: "host1.yandex.net" Address: "1.2.3.4" Location { DataCenter: "1" Module: "2" Rack: "3" Unit: "4" } FixedNodeId: false Path: "/dc-1/SharedDB" 2025-05-29T15:21:53.859921Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2702: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [1:17:2064], request# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/SharedDB TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-05-29T15:21:53.859938Z node 1 :TX_PROXY_SCHEME_CACHE TRACE: cache.cpp:2322: Create subscriber: self# [1:17:2064], path# /dc-1/SharedDB, domainOwnerId# 72057594046678944 2025-05-29T15:21:53.865039Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2589: HandleNotify: self# [1:17:2064], notify# NKikimr::TSchemeBoardEvents::TEvNotifyUpdate { Path: /dc-1/SharedDB PathId: [OwnerId: 72057594046678944, LocalPathId: 2] DescribeSchemeResult: Status: StatusSuccess Path: "/dc-1/SharedDB" PathDescription { Self { Name: "SharedDB" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeExtSubDomain CreateFinished: true CreateTxId: 101 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 1 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 0 TimeCastBucketsPerMediator: 0 } DomainKey { SchemeShard: 72057594046678944 PathId: 2 } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 2 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { } } } PathId: 2 PathOwnerId: 72057594046678944 } 2025-05-29T15:21:53.865126Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2464: ResolveCacheItem: self# [1:17:2064], notify# NKikimr::TSchemeBoardEvents::TEvNotifyUpdate { Path: /dc-1/SharedDB PathId: [OwnerId: 72057594046678944, LocalPathId: 2] DescribeSchemeResult: Status: StatusSuccess Path: "/dc-1/SharedDB" PathDescription { Self { Name: "SharedDB" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeExtSubDomain CreateFinished: true CreateTxId: 101 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 1 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 0 TimeCastBucketsPerMediator: 0 } DomainKey { SchemeShard: 72057594046678944 PathId: 2 } P ... : Registration request from host1:1001 (not fixed) tenant: /dc-1/SharedDB 2025-05-29T15:21:53.865405Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:856: [DB] Adding node #1024.v2 host1:1001 to database state=Active resolvehost=host1.yandex.net address=1.2.3.4 dc=1 location=DC=1/M=2/R=3/U=4/ lease=1 expire=Thu, 01 Jan 1970 02:00:00 UTC servicedsubdomain=72057594046678944:2 slotindex=0 authorizedbycertificate=false 2025-05-29T15:21:53.865447Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:264: [Dirty] Register new active node #1024.v2 host1:1001 2025-05-29T15:21:53.865454Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:552: [Dirty] Update current epoch version from 1 to 2 2025-05-29T15:21:53.865457Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:1356: [DB] Update epoch version in database version=2 2025-05-29T15:21:53.886680Z node 1 :NODE_BROKER DEBUG: node_broker__register_node.cpp:186: TTxRegisterNode Complete 2025-05-29T15:21:53.886708Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:264: [Committed] Register new active node #1024.v2 host1:1001 2025-05-29T15:21:53.886718Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:552: [Committed] Update current epoch version from 1 to 2 2025-05-29T15:21:53.886726Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:630: Add node #1024.v2 host1:1001 to epoch cache 2025-05-29T15:21:53.886773Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:659: Add node #1024.v2 to update nodes log 2025-05-29T15:21:53.886812Z node 1 :NODE_BROKER TRACE: node_broker__register_node.cpp:58: TTxRegisterNode reply with: Status { Code: OK } Node { NodeId: 1024 Host: "host1" Port: 1001 ResolveHost: "host1.yandex.net" Address: "1.2.3.4" Location { DataCenter: "1" Module: "2" Rack: "3" Unit: "4" } Expire: 7200025000 Name: "slot-0" } 2025-05-29T15:21:53.887099Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 102:0, at schemeshard: 72057594046678944 FAKE_COORDINATOR: Add transaction: 102 at step: 5000002 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000001 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 102 at step: 5000002 FAKE_COORDINATOR: Erasing txId 102 2025-05-29T15:21:53.890548Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 269877761, Sender [1:331:2267], Recipient [1:217:2177]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-05-29T15:21:53.890612Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 272039938, Sender [1:255:2201], Recipient [1:217:2177]: NKikimr::NNodeBroker::TEvNodeBroker::TEvRegistrationRequest { Host: "host2" Port: 1001 ResolveHost: "host2.yandex.net" Address: "1.2.3.5" Location { DataCenter: "1" Module: "2" Rack: "3" Unit: "5" } FixedNodeId: false Path: "/dc-1/ServerlessDB" } 2025-05-29T15:21:53.890619Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:248: StateWork, processing event TEvNodeBroker::TEvRegistrationRequest 2025-05-29T15:21:53.890629Z node 1 :NODE_BROKER TRACE: node_broker.cpp:1487: Handle TEvNodeBroker::TEvRegistrationRequest: request# Host: "host2" Port: 1001 ResolveHost: "host2.yandex.net" Address: "1.2.3.5" Location { DataCenter: "1" Module: "2" Rack: "3" Unit: "5" } FixedNodeId: false Path: "/dc-1/ServerlessDB" 2025-05-29T15:21:53.890675Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2702: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [1:17:2064], request# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/ServerlessDB TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-05-29T15:21:53.890691Z node 1 :TX_PROXY_SCHEME_CACHE TRACE: cache.cpp:2322: Create subscriber: self# [1:17:2064], path# /dc-1/ServerlessDB, domainOwnerId# 72057594046678944 2025-05-29T15:21:53.890959Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2589: HandleNotify: self# [1:17:2064], notify# NKikimr::TSchemeBoardEvents::TEvNotifyUpdate { Path: /dc-1/ServerlessDB PathId: [OwnerId: 72057594046678944, LocalPathId: 3] DescribeSchemeResult: Status: StatusSuccess Path: "/dc-1/ServerlessDB" PathDescription { Self { Name: "ServerlessDB" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeExtSubDomain CreateFinished: true CreateTxId: 102 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 1 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 0 TimeCastBucketsPerMediator: 0 } DomainKey { SchemeShard: 72057594046678944 PathId: 3 } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 2 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { } ServerlessComputeResourcesMode: EServerlessComputeResourcesModeShared } } PathId: 3 PathOwnerId: 72057594046678944 } 2025-05-29T15:21:53.891004Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2464: ResolveCacheItem: self# [1:17:2064], notify# NKikimr::TSchemeBoardEvents::TEvNotifyUpdate { Path: /dc-1/ServerlessDB PathId: [OwnerId: 72057594046678944, LocalPathId: 3] DescribeSchemeResult: Status: StatusSuccess Path: "/dc-1/ServerlessDB" PathDescription { Self { Name: "ServerlessDB" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeExtSubDomain CreateFinished: true CreateTxId: 102 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 1 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 0 TimeCastBucketsPerMediator: 0 } DomainKey { SchemeShard: 72057594046678944 PathId: 3 } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 2 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { } ServerlessComputeResourcesMode: EServerlessComputeResourcesModeShared } } PathId: 3 PathOwnerId: 72057594046678944 }, by path# { Subscriber: { Subscriber: [1:333:2268] DomainOwnerId: 72057594046678944 Type: 2 SyncCookie: 0 } Filled: 0 Status: undefined Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, by pathId# nullptr 2025-05-29T15:21:53.891034Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:1842: FillEntry for TNavigate: self# [1:17:2064], cacheItem# { Subscriber: { Subscriber: [1:333:2268] DomainOwnerId: 72057594046678944 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusSuccess Kind: 9 TableKind: 0 Created: 1 CreateStep: 5000002 PathId: [OwnerId: 72057594046678944, LocalPathId: 3] DomainId: [OwnerId: 72057594046678944, LocalPathId: 3] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/ServerlessDB TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-05-29T15:21:53.891077Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:265: Send result: self# [1:340:2269], recipient# [1:332:2177], result# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/ServerlessDB TableId: [72057594046678944:3:0] RequestType: ByPath Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindExtSubdomain DomainInfo { DomainKey: [OwnerId: 72057594046678944, LocalPathId: 3] ResourcesDomainKey: [OwnerId: 72057594046678944, LocalPathId: 2] Params { Version: 1 PlanResolution: 0 TimeCastBucketsPerMediator: 0 } ServerlessComputeResourcesMode: EServerlessComputeResourcesModeShared Users: [] Groups: [] } }] } 2025-05-29T15:21:53.891090Z node 1 :NODE_BROKER TRACE: node_broker.cpp:1532: Handle TEvTxProxySchemeCache::TEvNavigateKeySetResult: response# { Path: dc-1/ServerlessDB TableId: [72057594046678944:3:0] RequestType: ByPath Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindExtSubdomain DomainInfo { DomainKey: [OwnerId: 72057594046678944, LocalPathId: 3] ResourcesDomainKey: [OwnerId: 72057594046678944, LocalPathId: 2] Params { Version: 1 PlanResolution: 0 TimeCastBucketsPerMediator: 0 } ServerlessComputeResourcesMode: EServerlessComputeResourcesModeShared Users: [] Groups: [] } } 2025-05-29T15:21:53.891105Z node 1 :NODE_BROKER TRACE: node_broker.cpp:1558: Finished resolving tenant: request# Host: "host2" Port: 1001 ResolveHost: "host2.yandex.net" Address: "1.2.3.5" Location { DataCenter: "1" Module: "2" Rack: "3" Unit: "5" } FixedNodeId: false Path: "/dc-1/ServerlessDB": scope id# <72057594046678944:2>: serviced subdomain# 72057594046678944:3 2025-05-29T15:21:53.891121Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 2146435073, Sender [1:332:2177], Recipient [1:217:2177]: NKikimr::NNodeBroker::TNodeBroker::TEvPrivate::TEvResolvedRegistrationRequest 2025-05-29T15:21:53.891126Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:257: StateWork, processing event TEvPrivate::TEvResolvedRegistrationRequest 2025-05-29T15:21:53.891138Z node 1 :NODE_BROKER DEBUG: node_broker__register_node.cpp:77: TTxRegisterNode Execute 2025-05-29T15:21:53.891141Z node 1 :NODE_BROKER DEBUG: node_broker__register_node.cpp:81: Registration request from host2:1001 (not fixed) tenant: /dc-1/ServerlessDB 2025-05-29T15:21:53.891169Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:856: [DB] Adding node #1025.v3 host2:1001 to database state=Active resolvehost=host2.yandex.net address=1.2.3.5 dc=1 location=DC=1/M=2/R=3/U=5/ lease=1 expire=Thu, 01 Jan 1970 02:00:00 UTC servicedsubdomain=72057594046678944:3 slotindex=0 authorizedbycertificate=false 2025-05-29T15:21:53.891204Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:264: [Dirty] Register new active node #1025.v3 host2:1001 2025-05-29T15:21:53.891210Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:552: [Dirty] Update current epoch version from 2 to 3 2025-05-29T15:21:53.891214Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:1356: [DB] Update epoch version in database version=3 2025-05-29T15:21:53.902062Z node 1 :NODE_BROKER DEBUG: node_broker__register_node.cpp:186: TTxRegisterNode Complete 2025-05-29T15:21:53.902105Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:264: [Committed] Register new active node #1025.v3 host2:1001 2025-05-29T15:21:53.902116Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:552: [Committed] Update current epoch version from 2 to 3 2025-05-29T15:21:53.902121Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:630: Add node #1025.v3 host2:1001 to epoch cache 2025-05-29T15:21:53.902148Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:659: Add node #1025.v3 to update nodes log 2025-05-29T15:21:53.902189Z node 1 :NODE_BROKER TRACE: node_broker__register_node.cpp:58: TTxRegisterNode reply with: Status { Code: OK } Node { NodeId: 1025 Host: "host2" Port: 1001 ResolveHost: "host2.yandex.net" Address: "1.2.3.5" Location { DataCenter: "1" Module: "2" Rack: "3" Unit: "5" } Expire: 7200025000 Name: "slot-0" } >> KqpOlapSysView::StatsSysViewRanges [GOOD] >> TDynamicNameserverTest::TestCacheUsage ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/arrow/unittest >> KqpScanArrowInChanels::AggregateEmptySum Test command err: Trying to start YDB, gRPC: 13438, MsgBus: 14271 2025-05-29T15:21:33.092604Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509888155626307621:2204];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:21:33.092668Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/0017d1/r3tmp/tmpUjUq6I/pdisk_1.dat 2025-05-29T15:21:33.176033Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [1:7509888155626307455:2079] 1748532093091499 != 1748532093091502 2025-05-29T15:21:33.176236Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 13438, node 1 2025-05-29T15:21:33.195078Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:21:33.195109Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:21:33.196590Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-29T15:21:33.199237Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:21:33.199256Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-05-29T15:21:33.199258Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-05-29T15:21:33.199308Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:14271 TClient is connected to server localhost:14271 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-29T15:21:33.310885Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:21:33.320161Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-05-29T15:21:33.364878Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:21:33.405446Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:21:33.462370Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:21:33.495844Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:21:33.719491Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509888155626309107:2403], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:21:33.719532Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:21:33.804352Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-05-29T15:21:33.820802Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-05-29T15:21:33.840967Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-05-29T15:21:33.854723Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-05-29T15:21:33.874923Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-05-29T15:21:33.938320Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-05-29T15:21:33.950212Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-05-29T15:21:33.968978Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509888155626309765:2466], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:21:33.969002Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:21:33.969158Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509888155626309770:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:21:33.970145Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715669:3, at schemeshard: 72057594046644480 2025-05-29T15:21:33.974119Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7509888155626309772:2470], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715669 completed, doublechecking } 2025-05-29T15:21:34.055544Z node 1 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [1:7509888159921277119:3395] txid# 281474976715670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-29T15:21:34.270146Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [1:7509888159921277128:2474], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:21:34.271641Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=1&id=OWRhNGU4ZTItMTQ4ODdjMWMtMWFlOGE3YzktYzY2MjU2MDQ=, ActorId: [1:7509888155626309081:2401], ActorState: ExecuteState, TraceId: 01jwea4s0gd4gwj7msat2qwz6z, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: VERIFY failed (2025-05-29T15:21:34.274970Z): assertion failed in non-unittest thread with message: assertion failed at ydb/core/kqp/ut/common/kqp_ut_common.h:375, void NKikimr::NKqp::AssertSuccessResult(const NYdb::TStatus &): (result.IsSuccess())
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 library/cpp/testing/unittest/registar.cpp:36 RaiseError(): requirement UnittestThread failed 0. /-S/util/system/yassert.cpp:83: InternalPanicImpl @ 0x13A80ED5 1. /-S/util/system/yassert.cpp:55: Panic @ 0x13A77ED6 2. /tmp//-S/library/cpp/testing/unittest/registar.cpp:36: RaiseError @ 0x13C19C66 3. /-S/ydb/core/kqp/ut/common/kqp_ut_common.h:375: AssertSuccessResult @ 0x260B97B2 4. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:365: CreateSampleTables @ 0x260B90B2 5. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:581: operator() @ 0x260DACEC 6. /-S/library/cpp/threading/future/core/future-inl.h:493: SetValue @ 0x260DACEC 7. /-S/library/cpp/threading/future/async.h:24: operator() @ 0x260DACEC 8. /-S/util/thread/pool.h:71: Process @ 0x260DACEC 9. /-S/util/thread/pool.cpp:418: DoExecute @ 0x13A88859 10. /-S/util/thread/factory.h:15: Execute @ 0x13A87249 11. /-S/util/thread/factory.cpp:36: ThreadProc @ 0x13A87249 12. /-S/util/system/thread.cpp:244: ThreadProxy @ 0x13A826BC 13. ??:0: ?? @ 0x7F6930458AC2 14. ??:0: ?? @ 0x7F69304EA84F Trying to start YDB, gRPC: 17347, MsgBus: 11277 test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/0017d1/r3tmp/tmpazHIue/pdisk_1.dat 2025-05-29T15:21:39.042579Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509888177892419559:2272];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:21:39.042652Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-05-29T15:21:39.077779Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [1:7509888177892419313:2079] 174853209898 ... mon.h:375: AssertSuccessResult @ 0x260B97B2 4. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:365: CreateSampleTables @ 0x260B90B2 5. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:581: operator() @ 0x260DACEC 6. /-S/library/cpp/threading/future/core/future-inl.h:493: SetValue @ 0x260DACEC 7. /-S/library/cpp/threading/future/async.h:24: operator() @ 0x260DACEC 8. /-S/util/thread/pool.h:71: Process @ 0x260DACEC 9. /-S/util/thread/pool.cpp:418: DoExecute @ 0x13A88859 10. /-S/util/thread/factory.h:15: Execute @ 0x13A87249 11. /-S/util/thread/factory.cpp:36: ThreadProc @ 0x13A87249 12. /-S/util/system/thread.cpp:244: ThreadProxy @ 0x13A826BC 13. ??:0: ?? @ 0x7FDD29B45AC2 14. ??:0: ?? @ 0x7FDD29BD784F Trying to start YDB, gRPC: 9751, MsgBus: 7007 2025-05-29T15:21:49.875616Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509888227844720709:2263];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:21:49.875650Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/0017d1/r3tmp/tmpUH42W6/pdisk_1.dat 2025-05-29T15:21:49.951014Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:21:49.951310Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [1:7509888227844720485:2079] 1748532109873829 != 1748532109873832 TServer::EnableGrpc on GrpcPort 9751, node 1 2025-05-29T15:21:49.968422Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:21:49.968435Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-05-29T15:21:49.968437Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-05-29T15:21:49.968497Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:7007 2025-05-29T15:21:50.021276Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:21:50.021311Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:21:50.022186Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:7007 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-29T15:21:50.149228Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:21:50.159115Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-05-29T15:21:50.233726Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:21:50.300455Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-05-29T15:21:50.332095Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:21:50.356108Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 2025-05-29T15:21:50.401866Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509888232139689417:2403], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:21:50.401900Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:21:50.460464Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-05-29T15:21:50.472765Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-05-29T15:21:50.482298Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-05-29T15:21:50.495131Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-05-29T15:21:50.509396Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-05-29T15:21:50.525840Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-05-29T15:21:50.537250Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-05-29T15:21:50.556324Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509888232139690068:2466], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:21:50.556367Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:21:50.556594Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509888232139690073:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:21:50.557605Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715669:3, at schemeshard: 72057594046644480 2025-05-29T15:21:50.564457Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7509888232139690075:2470], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715669 completed, doublechecking } 2025-05-29T15:21:50.653977Z node 1 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [1:7509888232139690126:3396] txid# 281474976715670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-29T15:21:50.780483Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [1:7509888232139690135:2474], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:21:50.781215Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=1&id=ZTJjMjUzMTUtNGE5MTc3N2ItY2JlMjFhZTMtMjBmNDk3NmI=, ActorId: [1:7509888232139689414:2401], ActorState: ExecuteState, TraceId: 01jwea596vbj4fcycymr350twp, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: VERIFY failed (2025-05-29T15:21:50.783373Z): assertion failed in non-unittest thread with message: assertion failed at ydb/core/kqp/ut/common/kqp_ut_common.h:375, void NKikimr::NKqp::AssertSuccessResult(const NYdb::TStatus &): (result.IsSuccess())
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 library/cpp/testing/unittest/registar.cpp:36 RaiseError(): requirement UnittestThread failed 0. /-S/util/system/yassert.cpp:83: InternalPanicImpl @ 0x13A80ED5 1. /-S/util/system/yassert.cpp:55: Panic @ 0x13A77ED6 2. /tmp//-S/library/cpp/testing/unittest/registar.cpp:36: RaiseError @ 0x13C19C66 3. /-S/ydb/core/kqp/ut/common/kqp_ut_common.h:375: AssertSuccessResult @ 0x260B97B2 4. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:365: CreateSampleTables @ 0x260B90B2 5. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:581: operator() @ 0x260DACEC 6. /-S/library/cpp/threading/future/core/future-inl.h:493: SetValue @ 0x260DACEC 7. /-S/library/cpp/threading/future/async.h:24: operator() @ 0x260DACEC 8. /-S/util/thread/pool.h:71: Process @ 0x260DACEC 9. /-S/util/thread/pool.cpp:418: DoExecute @ 0x13A88859 10. /-S/util/thread/factory.h:15: Execute @ 0x13A87249 11. /-S/util/thread/factory.cpp:36: ThreadProc @ 0x13A87249 12. /-S/util/system/thread.cpp:244: ThreadProxy @ 0x13A826BC 13. ??:0: ?? @ 0x7F3196B4DAC2 14. ??:0: ?? @ 0x7F3196BDF84F >> TDynamicNameserverTest::CacheMissSimpleDeadline-EnableNodeBrokerDeltaProtocol-false ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/olap/unittest >> KqpOlapSysView::StatsSysViewRanges [GOOD] Test command err: Trying to start YDB, gRPC: 28706, MsgBus: 22867 2025-05-29T15:21:33.475087Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509888158777341513:2220];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/0026d7/r3tmp/tmp5zrnbn/pdisk_1.dat 2025-05-29T15:21:33.503136Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-05-29T15:21:33.559103Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [1:7509888158777341302:2079] 1748532093464307 != 1748532093464310 2025-05-29T15:21:33.560486Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 28706, node 1 2025-05-29T15:21:33.580868Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:21:33.580886Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-05-29T15:21:33.580888Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-05-29T15:21:33.580934Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-05-29T15:21:33.603177Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:21:33.603201Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:21:33.607181Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:22867 TClient is connected to server localhost:22867 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-29T15:21:33.739367Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:21:33.751518Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-05-29T15:21:33.798129Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnStore, opId: 281474976715658:0, at schemeshard: 72057594046644480 Status: 53 TxId: 281474976715658 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 2 2025-05-29T15:21:33.813404Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037891;self_id=[1:7509888158777341998:2316];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-05-29T15:21:33.813628Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037891;self_id=[1:7509888158777341998:2316];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-05-29T15:21:33.813671Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037891;self_id=[1:7509888158777341998:2316];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-05-29T15:21:33.813693Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037891;self_id=[1:7509888158777341998:2316];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-05-29T15:21:33.813716Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037891;self_id=[1:7509888158777341998:2316];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-05-29T15:21:33.813735Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037891;self_id=[1:7509888158777341998:2316];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-05-29T15:21:33.813753Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037891;self_id=[1:7509888158777341998:2316];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-05-29T15:21:33.813782Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037891;self_id=[1:7509888158777341998:2316];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-05-29T15:21:33.813802Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037891;self_id=[1:7509888158777341998:2316];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-05-29T15:21:33.813820Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037891;self_id=[1:7509888158777341998:2316];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-05-29T15:21:33.813843Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037891;self_id=[1:7509888158777341998:2316];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2025-05-29T15:21:33.813874Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037891;self_id=[1:7509888158777341998:2316];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2025-05-29T15:21:33.818149Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;self_id=[1:7509888158777341996:2314];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-05-29T15:21:33.818161Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;self_id=[1:7509888158777341996:2314];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-05-29T15:21:33.818193Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;self_id=[1:7509888158777341996:2314];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-05-29T15:21:33.818212Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;self_id=[1:7509888158777341996:2314];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-05-29T15:21:33.818231Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;self_id=[1:7509888158777341996:2314];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-05-29T15:21:33.818251Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;self_id=[1:7509888158777341996:2314];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-05-29T15:21:33.818274Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;self_id=[1:7509888158777341996:2314];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-05-29T15:21:33.818293Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;self_id=[1:7509888158777341996:2314];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-05-29T15:21:33.818314Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;self_id=[1:7509888158777341996:2314];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-05-29T15:21:33.818332Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;self_id=[1:7509888158777341996:2314];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-05-29T15:21:33.818350Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;self_id=[1:7509888158777341996:2314];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2025-05-29T15:21:33.818368Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;self_id=[1:7509888158777341996:2314];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2025-05-29T15:21:33.819035Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-05-29T15:21:33.819047Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-05-29T15:21:33.819064Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-05-29T15:21:33.819068Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-05-29T15:21:33.820887Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-05-29T15:21:33.820913Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switc ... tionId: 14 InternalEntityId: 3 ChunkIdx: 0 BlobId: [72075186224037890:1:14:16:3:7184:0] EntityType: COL BlobRangeSize: 7184 PathId: 5 Rows: 841 RawBytes: 12615 BlobRangeOffset: 0 TierName: __DEFAULT Activity:  TabletId: 72075186224037890 Kind: INSERTED EntityName: uid PortionId: 14 InternalEntityId: 3 ChunkIdx: 1 BlobId: [72075186224037890:1:14:16:4:7208:0] EntityType: COL BlobRangeSize: 7208 PathId: 5 Rows: 1681 RawBytes: 6724 BlobRangeOffset: 0 TierName: __DEFAULT Activity:  TabletId: 72075186224037890 Kind: INSERTED EntityName: level PortionId: 14 InternalEntityId: 4 ChunkIdx: 0 BlobId: [72075186224037890:1:14:16:5:4136:0] EntityType: COL BlobRangeSize: 4136 PathId: 5 Rows: 560 RawBytes: 636221 BlobRangeOffset: 0 TierName: __DEFAULT Activity:  TabletId: 72075186224037890 Kind: INSERTED EntityName: message PortionId: 14 InternalEntityId: 5 ChunkIdx: 0 BlobId: [72075186224037890:1:14:16:6:7976:0] EntityType: COL BlobRangeSize: 7976 PathId: 5 Rows: 560 RawBytes: 639172 BlobRangeOffset: 0 TierName: __DEFAULT Activity:  TabletId: 72075186224037890 Kind: INSERTED EntityName: message PortionId: 14 InternalEntityId: 5 ChunkIdx: 1 BlobId: [72075186224037890:1:14:16:7:7968:0] EntityType: COL BlobRangeSize: 7968 PathId: 5 Rows: 561 RawBytes: 640265 BlobRangeOffset: 0 TierName: __DEFAULT Activity:  TabletId: 72075186224037890 Kind: INSERTED EntityName: message PortionId: 14 InternalEntityId: 5 ChunkIdx: 2 BlobId: [72075186224037890:1:14:16:8:8032:0] EntityType: COL BlobRangeSize: 8032 PathId: 5 Rows: 1597 RawBytes: 12776 BlobRangeOffset: 0 TierName: __DEFAULT Activity:  TabletId: 72075186224037890 Kind: INSERTED EntityName: timestamp PortionId: 16 InternalEntityId: 1 ChunkIdx: 0 BlobId: [72075186224037890:1:16:18:0:6600:0] EntityType: COL BlobRangeSize: 6600 PathId: 5 Rows: 798 RawBytes: 6028 BlobRangeOffset: 0 TierName: __DEFAULT Activity:  TabletId: 72075186224037890 Kind: INSERTED EntityName: resource_id PortionId: 16 InternalEntityId: 2 ChunkIdx: 0 BlobId: [72075186224037890:1:16:18:1:6128:0] EntityType: COL BlobRangeSize: 6128 PathId: 5 Rows: 799 RawBytes: 6392 BlobRangeOffset: 0 TierName: __DEFAULT Activity:  TabletId: 72075186224037890 Kind: INSERTED EntityName: resource_id PortionId: 16 InternalEntityId: 2 ChunkIdx: 1 BlobId: [72075186224037890:1:16:18:2:6512:0] EntityType: COL BlobRangeSize: 6512 PathId: 5 Rows: 798 RawBytes: 11970 BlobRangeOffset: 0 TierName: __DEFAULT Activity:  TabletId: 72075186224037890 Kind: INSERTED EntityName: uid PortionId: 16 InternalEntityId: 3 ChunkIdx: 0 BlobId: [72075186224037890:1:16:18:3:6856:0] EntityType: COL BlobRangeSize: 6856 PathId: 5 Rows: 799 RawBytes: 11985 BlobRangeOffset: 0 TierName: __DEFAULT Activity:  TabletId: 72075186224037890 Kind: INSERTED EntityName: uid PortionId: 16 InternalEntityId: 3 ChunkIdx: 1 BlobId: [72075186224037890:1:16:18:4:6872:0] EntityType: COL BlobRangeSize: 6872 PathId: 5 Rows: 1597 RawBytes: 6388 BlobRangeOffset: 0 TierName: __DEFAULT Activity:  TabletId: 72075186224037890 Kind: INSERTED EntityName: level PortionId: 16 InternalEntityId: 4 ChunkIdx: 0 BlobId: [72075186224037890:1:16:18:5:3904:0] EntityType: COL BlobRangeSize: 3904 PathId: 5 Rows: 532 RawBytes: 603600 BlobRangeOffset: 0 TierName: __DEFAULT Activity:  TabletId: 72075186224037890 Kind: INSERTED EntityName: message PortionId: 16 InternalEntityId: 5 ChunkIdx: 0 BlobId: [72075186224037890:1:16:18:6:7624:0] EntityType: COL BlobRangeSize: 7624 PathId: 5 Rows: 532 RawBytes: 607260 BlobRangeOffset: 0 TierName: __DEFAULT Activity:  TabletId: 72075186224037890 Kind: INSERTED EntityName: message PortionId: 16 InternalEntityId: 5 ChunkIdx: 1 BlobId: [72075186224037890:1:16:18:7:7616:0] EntityType: COL BlobRangeSize: 7616 PathId: 5 Rows: 533 RawBytes: 609002 BlobRangeOffset: 0 TierName: __DEFAULT Activity:  TabletId: 72075186224037890 Kind: INSERTED EntityName: message PortionId: 16 InternalEntityId: 5 ChunkIdx: 2 BlobId: [72075186224037890:1:16:18:8:7664:0] EntityType: COL BlobRangeSize: 7664 PathId: 5 Rows: 1642 RawBytes: 13136 BlobRangeOffset: 0 TierName: __DEFAULT Activity:  TabletId: 72075186224037890 Kind: INSERTED EntityName: timestamp PortionId: 18 InternalEntityId: 1 ChunkIdx: 0 BlobId: [72075186224037890:1:18:20:0:6784:0] EntityType: COL BlobRangeSize: 6784 PathId: 5 Rows: 821 RawBytes: 6195 BlobRangeOffset: 0 TierName: __DEFAULT Activity:  TabletId: 72075186224037890 Kind: INSERTED EntityName: resource_id PortionId: 18 InternalEntityId: 2 ChunkIdx: 0 BlobId: [72075186224037890:1:18:20:1:6272:0] EntityType: COL BlobRangeSize: 6272 PathId: 5 Rows: 821 RawBytes: 6568 BlobRangeOffset: 0 TierName: __DEFAULT Activity:  TabletId: 72075186224037890 Kind: INSERTED EntityName: resource_id PortionId: 18 InternalEntityId: 2 ChunkIdx: 1 BlobId: [72075186224037890:1:18:20:2:6664:0] EntityType: COL BlobRangeSize: 6664 PathId: 5 Rows: 821 RawBytes: 12315 BlobRangeOffset: 0 TierName: __DEFAULT Activity:  TabletId: 72075186224037890 Kind: INSERTED EntityName: uid PortionId: 18 InternalEntityId: 3 ChunkIdx: 0 BlobId: [72075186224037890:1:18:20:3:7024:0] EntityType: COL BlobRangeSize: 7024 PathId: 5 Rows: 821 RawBytes: 12315 BlobRangeOffset: 0 TierName: __DEFAULT Activity:  TabletId: 72075186224037890 Kind: INSERTED EntityName: uid PortionId: 18 InternalEntityId: 3 ChunkIdx: 1 BlobId: [72075186224037890:1:18:20:4:7064:0] EntityType: COL BlobRangeSize: 7064 PathId: 5 Rows: 1642 RawBytes: 6568 BlobRangeOffset: 0 TierName: __DEFAULT Activity:  TabletId: 72075186224037890 Kind: INSERTED EntityName: level PortionId: 18 InternalEntityId: 4 ChunkIdx: 0 BlobId: [72075186224037890:1:18:20:5:3944:0] EntityType: COL BlobRangeSize: 3944 PathId: 5 Rows: 547 RawBytes: 623729 BlobRangeOffset: 0 TierName: __DEFAULT Activity:  TabletId: 72075186224037890 Kind: INSERTED EntityName: message PortionId: 18 InternalEntityId: 5 ChunkIdx: 0 BlobId: [72075186224037890:1:18:20:6:7808:0] EntityType: COL BlobRangeSize: 7808 PathId: 5 Rows: 547 RawBytes: 621463 BlobRangeOffset: 0 TierName: __DEFAULT Activity:  TabletId: 72075186224037890 Kind: INSERTED EntityName: message PortionId: 18 InternalEntityId: 5 ChunkIdx: 1 BlobId: [72075186224037890:1:18:20:7:7808:0] EntityType: COL BlobRangeSize: 7808 PathId: 5 Rows: 548 RawBytes: 627128 BlobRangeOffset: 0 TierName: __DEFAULT Activity:  TabletId: 72075186224037890 Kind: INSERTED EntityName: message PortionId: 18 InternalEntityId: 5 ChunkIdx: 2 BlobId: [72075186224037890:1:18:20:8:7864:0] EntityType: COL BlobRangeSize: 7864 PathId: 5 Rows: 1648 RawBytes: 13184 BlobRangeOffset: 0 TierName: __DEFAULT Activity:  TabletId: 72075186224037890 Kind: INSERTED EntityName: timestamp PortionId: 20 InternalEntityId: 1 ChunkIdx: 0 BlobId: [72075186224037890:1:20:22:0:6800:0] EntityType: COL BlobRangeSize: 6800 PathId: 5 Rows: 824 RawBytes: 6221 BlobRangeOffset: 0 TierName: __DEFAULT Activity:  TabletId: 72075186224037890 Kind: INSERTED EntityName: resource_id PortionId: 20 InternalEntityId: 2 ChunkIdx: 0 BlobId: [72075186224037890:1:20:22:1:6256:0] EntityType: COL BlobRangeSize: 6256 PathId: 5 Rows: 824 RawBytes: 6592 BlobRangeOffset: 0 TierName: __DEFAULT Activity:  TabletId: 72075186224037890 Kind: INSERTED EntityName: resource_id PortionId: 20 InternalEntityId: 2 ChunkIdx: 1 BlobId: [72075186224037890:1:20:22:2:6680:0] EntityType: COL BlobRangeSize: 6680 PathId: 5 Rows: 824 RawBytes: 12360 BlobRangeOffset: 0 TierName: __DEFAULT Activity:  TabletId: 72075186224037890 Kind: INSERTED EntityName: uid PortionId: 20 InternalEntityId: 3 ChunkIdx: 0 BlobId: [72075186224037890:1:20:22:3:7048:0] EntityType: COL BlobRangeSize: 7048 PathId: 5 Rows: 824 RawBytes: 12360 BlobRangeOffset: 0 TierName: __DEFAULT Activity:  TabletId: 72075186224037890 Kind: INSERTED EntityName: uid PortionId: 20 InternalEntityId: 3 ChunkIdx: 1 BlobId: [72075186224037890:1:20:22:4:7080:0] EntityType: COL BlobRangeSize: 7080 PathId: 5 Rows: 1648 RawBytes: 6592 BlobRangeOffset: 0 TierName: __DEFAULT Activity:  TabletId: 72075186224037890 Kind: INSERTED EntityName: level PortionId: 20 InternalEntityId: 4 ChunkIdx: 0 BlobId: [72075186224037890:1:20:22:5:3968:0] EntityType: COL BlobRangeSize: 3968 PathId: 5 Rows: 549 RawBytes: 624671 BlobRangeOffset: 0 TierName: __DEFAULT Activity:  TabletId: 72075186224037890 Kind: INSERTED EntityName: message PortionId: 20 InternalEntityId: 5 ChunkIdx: 0 BlobId: [72075186224037890:1:20:22:6:7856:0] EntityType: COL BlobRangeSize: 7856 PathId: 5 Rows: 549 RawBytes: 622686 BlobRangeOffset: 0 TierName: __DEFAULT Activity:  TabletId: 72075186224037890 Kind: INSERTED EntityName: message PortionId: 20 InternalEntityId: 5 ChunkIdx: 1 BlobId: [72075186224037890:1:20:22:7:7864:0] EntityType: COL BlobRangeSize: 7864 PathId: 5 Rows: 550 RawBytes: 627994 BlobRangeOffset: 0 TierName: __DEFAULT Activity:  TabletId: 72075186224037890 Kind: INSERTED EntityName: message PortionId: 20 InternalEntityId: 5 ChunkIdx: 2 BlobId: [72075186224037890:1:20:22:8:7872:0] EntityType: COL BlobRangeSize: 7872 PathId: 5 ==================================== QUERY: SELECT PathId, Kind, TabletId FROM `/Root/olapStore/.sys/store_primary_index_stats` WHERE PathId == UInt64("3") AND Activity == 1 GROUP BY TabletId, PathId, Kind ORDER BY TabletId, Kind RESULT: 2025-05-29T15:21:48.554806Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7301: Cannot get console configs 2025-05-29T15:21:48.554828Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:21:48.854520Z node 1 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:238: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1748532105745, txId: 281474976715668] shutting down TabletId: 72075186224037888 Kind: INSERTED PathId: 3 TabletId: 72075186224037889 Kind: INSERTED PathId: 3 TabletId: 72075186224037890 Kind: INSERTED PathId: 3 ==================================== QUERY: SELECT PathId, Kind, TabletId FROM `/Root/olapStore/.sys/store_primary_index_stats` GROUP BY PathId, Kind, TabletId ORDER BY PathId DESC, Kind DESC, TabletId DESC ; RESULT: 2025-05-29T15:21:51.716237Z node 1 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:238: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1748532108913, txId: 281474976715670] shutting down TabletId: 72075186224037891 Kind: INSERTED PathId: 5 TabletId: 72075186224037890 Kind: INSERTED PathId: 5 TabletId: 72075186224037888 Kind: INSERTED PathId: 5 TabletId: 72075186224037891 Kind: INSERTED PathId: 4 TabletId: 72075186224037889 Kind: INSERTED PathId: 4 TabletId: 72075186224037888 Kind: INSERTED PathId: 4 TabletId: 72075186224037890 Kind: INSERTED PathId: 3 TabletId: 72075186224037889 Kind: INSERTED PathId: 3 TabletId: 72075186224037888 Kind: INSERTED PathId: 3 ==================================== QUERY: SELECT PathId, Kind, TabletId FROM `/Root/olapStore/.sys/store_primary_index_stats` WHERE PathId > UInt64("0") AND PathId < UInt32("4") OR PathId > UInt64("4") AND PathId <= UInt64("5") GROUP BY PathId, Kind, TabletId ORDER BY PathId DESC, Kind DESC, TabletId DESC ; RESULT: TabletId: 72075186224037891 Kind: INSERTED PathId: 5 TabletId: 72075186224037890 Kind: INSERTED PathId: 5 TabletId: 72075186224037888 Kind: INSERTED PathId: 5 TabletId: 72075186224037890 Kind: INSERTED PathId: 3 TabletId: 72075186224037889 Kind: INSERTED PathId: 3 TabletId: 72075186224037888 Kind: INSERTED PathId: 3 2025-05-29T15:21:54.527807Z node 1 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:238: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1748532111782, txId: 281474976715672] shutting down >> TNodeBrokerTest::NodesSubscriberDisconnect >> TDynamicNameserverTest::CacheMissSimpleDeadline-EnableNodeBrokerDeltaProtocol-false [GOOD] >> TDynamicNameserverTest::CacheMissSameDeadline-EnableNodeBrokerDeltaProtocol-true >> BasicUsage::BrokenCredentialsProvider [FAIL] >> BasicUsage::WriteAndReadSomeMessagesWithNoCompression [FAIL] >> BasicUsage::TWriteSession_WriteAndReadAndCommitRandomMessages >> TDynamicNameserverTest::TestCacheUsage [GOOD] >> TDynamicNameserverTest::ListNodesCacheWhenNoChanges-EnableNodeBrokerDeltaProtocol-true ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/arrow/unittest >> KqpScanArrowFormat::AggregateEmptySum Test command err: Trying to start YDB, gRPC: 15374, MsgBus: 6907 2025-05-29T15:21:33.219689Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509888155443550650:2199];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:21:33.219933Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/0017e5/r3tmp/tmpcBSK6p/pdisk_1.dat 2025-05-29T15:21:33.308371Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [1:7509888155443550490:2079] 1748532093217513 != 1748532093217516 2025-05-29T15:21:33.311320Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 15374, node 1 2025-05-29T15:21:33.335932Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:21:33.335948Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-05-29T15:21:33.335951Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-05-29T15:21:33.336005Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-05-29T15:21:33.355202Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:21:33.355247Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:21:33.356185Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:6907 TClient is connected to server localhost:6907 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-29T15:21:33.464743Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:21:33.468137Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-05-29T15:21:33.471276Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:21:33.507998Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:21:33.545846Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:21:33.571959Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:21:34.179472Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509888159738519428:2403], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:21:34.179502Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:21:34.285616Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-05-29T15:21:34.302556Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-05-29T15:21:34.324978Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-05-29T15:21:34.343724Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-05-29T15:21:34.367119Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-05-29T15:21:34.385143Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-05-29T15:21:34.400695Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-05-29T15:21:34.420494Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509888159738520089:2467], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:21:34.420520Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:21:34.420679Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509888159738520094:2470], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:21:34.421696Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715669:3, at schemeshard: 72057594046644480 2025-05-29T15:21:34.425527Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976715669, at schemeshard: 72057594046644480 2025-05-29T15:21:34.425632Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7509888159738520096:2471], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715669 completed, doublechecking } 2025-05-29T15:21:34.491641Z node 1 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [1:7509888159738520147:3398] txid# 281474976715670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-29T15:21:34.609528Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [1:7509888159738520156:2475], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:21:34.610860Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=1&id=NDMyZTM3ZWItYzJmOGQ0NGEtYmQ3ZWI3NjEtZTFlNzUyNTM=, ActorId: [1:7509888159738519402:2401], ActorState: ExecuteState, TraceId: 01jwea4sek4f5f999ayma775nj, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: VERIFY failed (2025-05-29T15:21:34.611940Z): assertion failed in non-unittest thread with message: assertion failed at ydb/core/kqp/ut/common/kqp_ut_common.h:375, void NKikimr::NKqp::AssertSuccessResult(const NYdb::TStatus &): (result.IsSuccess())
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 library/cpp/testing/unittest/registar.cpp:36 RaiseError(): requirement UnittestThread failed 0. /-S/util/system/yassert.cpp:83: InternalPanicImpl @ 0x13A80ED5 1. /-S/util/system/yassert.cpp:55: Panic @ 0x13A77ED6 2. /tmp//-S/library/cpp/testing/unittest/registar.cpp:36: RaiseError @ 0x13C19C66 3. /-S/ydb/core/kqp/ut/common/kqp_ut_common.h:375: AssertSuccessResult @ 0x260B97B2 4. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:365: CreateSampleTables @ 0x260B90B2 5. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:581: operator() @ 0x260DACEC 6. /-S/library/cpp/threading/future/core/future-inl.h:493: SetValue @ 0x260DACEC 7. /-S/library/cpp/threading/future/async.h:24: operator() @ 0x260DACEC 8. /-S/util/thread/pool.h:71: Process @ 0x260DACEC 9. /-S/util/thread/pool.cpp:418: DoExecute @ 0x13A88859 10. /-S/util/thread/factory.h:15: Execute @ 0x13A87249 11. /-S/util/thread/factory.cpp:36: ThreadProc @ 0x13A87249 12. /-S/util/system/thread.cpp:244: ThreadProxy @ 0x13A826BC 13. ??:0: ?? @ 0x7FF97287EAC2 14. ??:0: ?? @ 0x7FF97291084F Trying to start YDB, gRPC: 1953, MsgBus: 20252 2025-05-29T15:21:39.259945Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509888181451409716:2209];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:21:39.260125Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/0017e5/r3tmp/tmpOb45Ht/pdisk ... n.h:375: AssertSuccessResult @ 0x260B97B2 4. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:365: CreateSampleTables @ 0x260B90B2 5. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:581: operator() @ 0x260DACEC 6. /-S/library/cpp/threading/future/core/future-inl.h:493: SetValue @ 0x260DACEC 7. /-S/library/cpp/threading/future/async.h:24: operator() @ 0x260DACEC 8. /-S/util/thread/pool.h:71: Process @ 0x260DACEC 9. /-S/util/thread/pool.cpp:418: DoExecute @ 0x13A88859 10. /-S/util/thread/factory.h:15: Execute @ 0x13A87249 11. /-S/util/thread/factory.cpp:36: ThreadProc @ 0x13A87249 12. /-S/util/system/thread.cpp:244: ThreadProxy @ 0x13A826BC 13. ??:0: ?? @ 0x7F791A5ACAC2 14. ??:0: ?? @ 0x7F791A63E84F Trying to start YDB, gRPC: 14241, MsgBus: 7601 2025-05-29T15:21:50.483075Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509888229290913803:2134];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:21:50.483941Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/0017e5/r3tmp/tmpzlvpOR/pdisk_1.dat 2025-05-29T15:21:50.553817Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:21:50.555842Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [1:7509888229290913705:2079] 1748532110482408 != 1748532110482411 TServer::EnableGrpc on GrpcPort 14241, node 1 2025-05-29T15:21:50.570920Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:21:50.570933Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-05-29T15:21:50.570934Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-05-29T15:21:50.570978Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-05-29T15:21:50.585414Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:21:50.585451Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:21:50.586789Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:7601 TClient is connected to server localhost:7601 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-29T15:21:50.654299Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:21:50.656955Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-05-29T15:21:50.658187Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:21:50.685122Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:21:50.711112Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:21:50.771616Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:21:50.956525Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509888229290915339:2403], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:21:50.956555Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:21:50.998841Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-05-29T15:21:51.007874Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-05-29T15:21:51.020198Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-05-29T15:21:51.034862Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-05-29T15:21:51.049273Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-05-29T15:21:51.062336Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-05-29T15:21:51.076824Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-05-29T15:21:51.095434Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509888233585883291:2466], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:21:51.095456Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:21:51.095475Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509888233585883296:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:21:51.096212Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715669:3, at schemeshard: 72057594046644480 2025-05-29T15:21:51.102595Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7509888233585883298:2470], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715669 completed, doublechecking } 2025-05-29T15:21:51.183315Z node 1 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [1:7509888233585883349:3395] txid# 281474976715670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-29T15:21:51.289577Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [1:7509888233585883365:2474], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:21:51.289703Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=1&id=ZTE1NGFkMzgtNzYxNjMxMDgtNmU2MjhjYTQtNDI1MjE1YjQ=, ActorId: [1:7509888229290915321:2401], ActorState: ExecuteState, TraceId: 01jwea59qq2asatwsyvp179t6h, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: VERIFY failed (2025-05-29T15:21:51.294818Z): assertion failed in non-unittest thread with message: assertion failed at ydb/core/kqp/ut/common/kqp_ut_common.h:375, void NKikimr::NKqp::AssertSuccessResult(const NYdb::TStatus &): (result.IsSuccess())
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 library/cpp/testing/unittest/registar.cpp:36 RaiseError(): requirement UnittestThread failed 0. /-S/util/system/yassert.cpp:83: InternalPanicImpl @ 0x13A80ED5 1. /-S/util/system/yassert.cpp:55: Panic @ 0x13A77ED6 2. /tmp//-S/library/cpp/testing/unittest/registar.cpp:36: RaiseError @ 0x13C19C66 3. /-S/ydb/core/kqp/ut/common/kqp_ut_common.h:375: AssertSuccessResult @ 0x260B97B2 4. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:365: CreateSampleTables @ 0x260B90B2 5. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:581: operator() @ 0x260DACEC 6. /-S/library/cpp/threading/future/core/future-inl.h:493: SetValue @ 0x260DACEC 7. /-S/library/cpp/threading/future/async.h:24: operator() @ 0x260DACEC 8. /-S/util/thread/pool.h:71: Process @ 0x260DACEC 9. /-S/util/thread/pool.cpp:418: DoExecute @ 0x13A88859 10. /-S/util/thread/factory.h:15: Execute @ 0x13A87249 11. /-S/util/thread/factory.cpp:36: ThreadProc @ 0x13A87249 12. /-S/util/system/thread.cpp:244: ThreadProxy @ 0x13A826BC 13. ??:0: ?? @ 0x7FC90CDDAAC2 14. ??:0: ?? @ 0x7FC90CE6C84F ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/arrow/unittest >> KqpScanArrowInChanels::AggregateByColumn Test command err: Trying to start YDB, gRPC: 13343, MsgBus: 27774 2025-05-29T15:21:33.236012Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509888155709093197:2200];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/00180e/r3tmp/tmpVMJhVb/pdisk_1.dat 2025-05-29T15:21:33.289045Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-05-29T15:21:33.320762Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [1:7509888155709093037:2079] 1748532093215140 != 1748532093215143 2025-05-29T15:21:33.320926Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 13343, node 1 2025-05-29T15:21:33.349485Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:21:33.349494Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-05-29T15:21:33.349496Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-05-29T15:21:33.349532Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-05-29T15:21:33.382921Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:21:33.382950Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:21:33.387055Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:27774 TClient is connected to server localhost:27774 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... 2025-05-29T15:21:33.533509Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-05-29T15:21:33.536458Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-05-29T15:21:33.543642Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:21:33.609533Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:21:33.688571Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:21:33.723376Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:21:33.895221Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509888155709094683:2403], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:21:33.895241Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:21:34.001014Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-05-29T15:21:34.026942Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-05-29T15:21:34.038221Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-05-29T15:21:34.054388Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-05-29T15:21:34.075522Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-05-29T15:21:34.100669Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-05-29T15:21:34.117630Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-05-29T15:21:34.147399Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509888160004062630:2466], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:21:34.147457Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:21:34.147653Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509888160004062638:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:21:34.148524Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715669:3, at schemeshard: 72057594046644480 2025-05-29T15:21:34.152005Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976715669, at schemeshard: 72057594046644480 2025-05-29T15:21:34.152101Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7509888160004062640:2470], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715669 completed, doublechecking } 2025-05-29T15:21:34.216830Z node 1 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [1:7509888160004062691:3397] txid# 281474976715670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-29T15:21:34.463800Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [1:7509888160004062700:2474], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:21:34.465627Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=1&id=ZmM2NjI2MmYtOTg4Mjc0OGUtYjExNjAxMmEtYzhjYWM3ZGE=, ActorId: [1:7509888155709094680:2401], ActorState: ExecuteState, TraceId: 01jwea4s5zcmbyth2h4ta9cyph, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: VERIFY failed (2025-05-29T15:21:34.467095Z): assertion failed in non-unittest thread with message: assertion failed at ydb/core/kqp/ut/common/kqp_ut_common.h:375, void NKikimr::NKqp::AssertSuccessResult(const NYdb::TStatus &): (result.IsSuccess())
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 library/cpp/testing/unittest/registar.cpp:36 RaiseError(): requirement UnittestThread failed 0. /-S/util/system/yassert.cpp:83: InternalPanicImpl @ 0x13A80ED5 1. /-S/util/system/yassert.cpp:55: Panic @ 0x13A77ED6 2. /tmp//-S/library/cpp/testing/unittest/registar.cpp:36: RaiseError @ 0x13C19C66 3. /-S/ydb/core/kqp/ut/common/kqp_ut_common.h:375: AssertSuccessResult @ 0x260B97B2 4. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:365: CreateSampleTables @ 0x260B90B2 5. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:581: operator() @ 0x260DACEC 6. /-S/library/cpp/threading/future/core/future-inl.h:493: SetValue @ 0x260DACEC 7. /-S/library/cpp/threading/future/async.h:24: operator() @ 0x260DACEC 8. /-S/util/thread/pool.h:71: Process @ 0x260DACEC 9. /-S/util/thread/pool.cpp:418: DoExecute @ 0x13A88859 10. /-S/util/thread/factory.h:15: Execute @ 0x13A87249 11. /-S/util/thread/factory.cpp:36: ThreadProc @ 0x13A87249 12. /-S/util/system/thread.cpp:244: ThreadProxy @ 0x13A826BC 13. ??:0: ?? @ 0x7F75F1BF1AC2 14. ??:0: ?? @ 0x7F75F1C8384F Trying to start YDB, gRPC: 9392, MsgBus: 61467 2025-05-29T15:21:38.578486Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509888180706245875:2220];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/00180e/r3tmp/tmpJBG18j/pdisk_1.dat 2025-05-29T15:21:38.617176Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered ... 0x13A80ED5 1. /-S/util/system/yassert.cpp:55: Panic @ 0x13A77ED6 2. /tmp//-S/library/cpp/testing/unittest/registar.cpp:36: RaiseError @ 0x13C19C66 3. /-S/ydb/core/kqp/ut/common/kqp_ut_common.h:375: AssertSuccessResult @ 0x260B97B2 4. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:365: CreateSampleTables @ 0x260B90B2 5. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:581: operator() @ 0x260DACEC 6. /-S/library/cpp/threading/future/core/future-inl.h:493: SetValue @ 0x260DACEC 7. /-S/library/cpp/threading/future/async.h:24: operator() @ 0x260DACEC 8. /-S/util/thread/pool.h:71: Process @ 0x260DACEC 9. /-S/util/thread/pool.cpp:418: DoExecute @ 0x13A88859 10. /-S/util/thread/factory.h:15: Execute @ 0x13A87249 11. /-S/util/thread/factory.cpp:36: ThreadProc @ 0x13A87249 12. /-S/util/system/thread.cpp:244: ThreadProxy @ 0x13A826BC 13. ??:0: ?? @ 0x7F3DA10E8AC2 14. ??:0: ?? @ 0x7F3DA117A84F Trying to start YDB, gRPC: 10575, MsgBus: 4038 2025-05-29T15:21:50.257814Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509888232396824534:2206];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:21:50.295764Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/00180e/r3tmp/tmpShYC2q/pdisk_1.dat 2025-05-29T15:21:50.326022Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [1:7509888232396824354:2079] 1748532110247155 != 1748532110247158 2025-05-29T15:21:50.327528Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 10575, node 1 2025-05-29T15:21:50.350662Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:21:50.350677Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-05-29T15:21:50.350680Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-05-29T15:21:50.350728Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:4038 2025-05-29T15:21:50.403183Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:21:50.403232Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:21:50.404354Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:4038 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-29T15:21:50.423987Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:21:50.430159Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:21:50.500664Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:21:50.569087Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:21:50.587463Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:21:50.719361Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509888232396825996:2403], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:21:50.719397Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:21:50.768727Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-05-29T15:21:50.780377Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-05-29T15:21:50.790073Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-05-29T15:21:50.845993Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-05-29T15:21:50.860171Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-05-29T15:21:50.915656Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-05-29T15:21:50.930176Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-05-29T15:21:50.945314Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509888232396826655:2466], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:21:50.945340Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509888232396826660:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:21:50.945344Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:21:50.946058Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710669:3, at schemeshard: 72057594046644480 2025-05-29T15:21:50.948206Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7509888232396826662:2470], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710669 completed, doublechecking } 2025-05-29T15:21:51.031611Z node 1 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [1:7509888236691794009:3398] txid# 281474976710670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-29T15:21:51.149673Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [1:7509888236691794025:2474], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:21:51.149795Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=1&id=MWVhYmQ1ZTMtY2U3MDcxMzktODBmZTg3MzQtZTBjOGJkYzg=, ActorId: [1:7509888232396825968:2399], ActorState: ExecuteState, TraceId: 01jwea59k0bmyeben325d2rpj7, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: VERIFY failed (2025-05-29T15:21:51.152225Z): assertion failed in non-unittest thread with message: assertion failed at ydb/core/kqp/ut/common/kqp_ut_common.h:375, void NKikimr::NKqp::AssertSuccessResult(const NYdb::TStatus &): (result.IsSuccess())
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 library/cpp/testing/unittest/registar.cpp:36 RaiseError(): requirement UnittestThread failed 0. /-S/util/system/yassert.cpp:83: InternalPanicImpl @ 0x13A80ED5 1. /-S/util/system/yassert.cpp:55: Panic @ 0x13A77ED6 2. /tmp//-S/library/cpp/testing/unittest/registar.cpp:36: RaiseError @ 0x13C19C66 3. /-S/ydb/core/kqp/ut/common/kqp_ut_common.h:375: AssertSuccessResult @ 0x260B97B2 4. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:365: CreateSampleTables @ 0x260B90B2 5. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:581: operator() @ 0x260DACEC 6. /-S/library/cpp/threading/future/core/future-inl.h:493: SetValue @ 0x260DACEC 7. /-S/library/cpp/threading/future/async.h:24: operator() @ 0x260DACEC 8. /-S/util/thread/pool.h:71: Process @ 0x260DACEC 9. /-S/util/thread/pool.cpp:418: DoExecute @ 0x13A88859 10. /-S/util/thread/factory.h:15: Execute @ 0x13A87249 11. /-S/util/thread/factory.cpp:36: ThreadProc @ 0x13A87249 12. /-S/util/system/thread.cpp:244: ThreadProxy @ 0x13A826BC 13. ??:0: ?? @ 0x7F80D52A9AC2 14. ??:0: ?? @ 0x7F80D533B84F >> TPartitionTests::WriteSubDomainOutOfSpace_DisableExpiration >> test_sql_streaming.py::test[suites-GroupByHoppingWindow-default.txt] [GOOD] >> test_sql_streaming.py::test[suites-GroupByHoppingWindowByStringKey-default.txt] >> TNodeBrokerTest::NodesMigration999Nodes [GOOD] >> TPQTest::TestReadRuleVersions >> TPartitionTests::IncorrectRange >> TDynamicNameserverTest::CacheMissSameDeadline-EnableNodeBrokerDeltaProtocol-true [GOOD] >> TDynamicNameserverTest::ListNodesCacheWhenNoChanges-EnableNodeBrokerDeltaProtocol-true [GOOD] |58.8%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/kqp/ut/perf/ydb-core-kqp-ut-perf |58.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/ut/perf/ydb-core-kqp-ut-perf |58.8%| [LD] {RESULT} $(B)/ydb/core/kqp/ut/perf/ydb-core-kqp-ut-perf >> TNodeBrokerTest::NodesMigrationReuseRemovedID [GOOD] >> TNodeBrokerTest::NodesMigrationNewExpiredNode [GOOD] >> KqpScripting::SecondaryIndexes |58.9%| [TA] $(B)/ydb/core/kqp/ut/arrow/test-results/unittest/{meta.json ... results_accumulator.log} >> TPartitionTests::CorrectRange_Commit ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/mind/ut/unittest >> TNodeBrokerTest::NodesMigration999Nodes [GOOD] Test command err: 2025-05-29T15:21:52.895644Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 2 Deadline: 18446744073709.551615s } 2025-05-29T15:21:52.895701Z node 3 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 3 Deadline: 18446744073709.551615s } 2025-05-29T15:21:52.895730Z node 4 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 4 Deadline: 18446744073709.551615s } 2025-05-29T15:21:52.895767Z node 5 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 5 Deadline: 18446744073709.551615s } 2025-05-29T15:21:52.895799Z node 6 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 6 Deadline: 18446744073709.551615s } 2025-05-29T15:21:52.895821Z node 7 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 7 Deadline: 18446744073709.551615s } 2025-05-29T15:21:52.902282Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:21:52.902412Z node 3 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:21:52.902463Z node 4 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:21:52.902504Z node 5 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:21:52.902541Z node 6 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:21:52.902577Z node 7 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:21:52.902651Z node 8 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 8 Deadline: 18446744073709.551615s } 2025-05-29T15:21:52.902682Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-05-29T15:21:52.903526Z node 3 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:21:52.903574Z node 4 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:21:52.903607Z node 5 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:21:52.903638Z node 6 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:21:52.903673Z node 7 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:21:52.903708Z node 8 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:21:52.903768Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:21:52.911543Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:21:52.911635Z node 8 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:21:52.911676Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:21:52.912673Z node 5 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:21:52.912727Z node 6 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:21:52.912771Z node 7 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:21:52.912908Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:21:52.912939Z node 3 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:21:52.912971Z node 4 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:21:52.913008Z node 5 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-05-29T15:21:52.913080Z node 6 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-05-29T15:21:52.913116Z node 7 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-05-29T15:21:52.913148Z node 8 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:21:52.913177Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:21:52.913381Z node 3 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-05-29T15:21:52.913414Z node 4 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-05-29T15:21:52.913516Z node 8 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-05-29T15:21:52.913670Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-05-29T15:21:52.913739Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 8 Deadline: 18446744073709.551615s } 2025-05-29T15:21:52.914537Z node 3 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 7 Deadline: 18446744073709.551615s } 2025-05-29T15:21:52.914556Z node 4 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 8 Deadline: 18446744073709.551615s } 2025-05-29T15:21:52.914567Z node 5 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 2 Deadline: 18446744073709.551615s } 2025-05-29T15:21:52.914580Z node 6 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 2 Deadline: 18446744073709.551615s } 2025-05-29T15:21:52.914591Z node 7 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 6 Deadline: 18446744073709.551615s } 2025-05-29T15:21:52.914606Z node 8 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 4 Deadline: 18446744073709.551615s } 2025-05-29T15:21:52.924080Z node 7 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 3 Deadline: 18446744073709.551615s } 2025-05-29T15:21:52.924280Z node 8 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 6 Deadline: 18446744073709.551615s } 2025-05-29T15:21:52.924326Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 5 Deadline: 18446744073709.551615s } 2025-05-29T15:21:52.924420Z node 6 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 7 Deadline: 18446744073709.551615s } 2025-05-29T15:21:52.925308Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 6 Deadline: 18446744073709.551615s } 2025-05-29T15:21:52.925623Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 5 Deadline: 18446744073709.551615s } 2025-05-29T15:21:52.926153Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 6 Deadline: 18446744073709.551615s } 2025-05-29T15:21:52.926221Z node 6 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 8 Deadline: 18446744073709.551615s } 2025-05-29T15:21:52.926331Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 7 Deadline: 18446744073709.551615s } 2025-05-29T15:21:52.926464Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 2 Deadline: 18446744073709.551615s } 2025-05-29T15:21:52.926589Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 3 Deadline: 18446744073709.551615s } 2025-05-29T15:21:52.926707Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 4 Deadline: 18446744073709.551615s } 2025-05-29T15:21:52.927982Z node 6 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 4 Deadline: 18446744073709.551615s } 2025-05-29T15:21:52.928275Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 3 Deadline: 18446744073709.551615s } 2025-05-29T15:21:52.928389Z node 4 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 6 Deadline: 18446744073709.551615s } 2025-05-29T15:21:52.928620Z node 3 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 2 Deadline: 18446744073709.551615s } 2025-05-29T15:21:52.957603Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7309: Cannot subscribe to console configs 2025-05-29T15:21:52.957632Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded ... waiting for nameservers are connected 2025-05-29T15:21:52.963058Z node 1 :NODE_BROKER DEBUG: node_broker_impl.h:236: StateInit event type: 10060000 event: NKikimr::TEvTablet::TEvBoot 2025-05-29T15:21:52.963557Z node 1 :NODE_BROKER DEBUG: node_broker_impl.h:236: StateInit event type: 10060001 event: NKikimr::TEvTablet::TEvRestored 2025-05-29T15:21:52.963633Z node 1 :NODE_BROKER DEBUG: node_broker__init_scheme.cpp:20: TTxInitScheme Execute 2025-05-29T15:21:52.963890Z node 1 :NODE_BROKER DEBUG: node_broker_impl.h:236: StateInit event type: 1006000c event: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-05-29T15:21:52.964841Z node 1 :NODE_BROKER DEBUG: node_broker__init_scheme.cpp:29: TTxInitScheme Complete 2025-05-29T15:21:52.964869Z node 1 :NODE_BROKER DEBUG: node_broker__load_state.cpp:19: TTxLoadState Execute 2025-05-29T15:21:52.964940Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:961: [DB] Using default config. 2025-05-29T15:21:52.964957Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:998: [DB] Starting the first epoch: #1.1 1970-01-01T00:00:00.025000Z - 1970-01-01T01:00:00.025000Z - 1970-01-01T02:00:00.025000Z 2025-05-29T15:21:52.964963Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:1024: [DB] Loaded the first approximate epoch start: #1.1 2025-05-29T15:21:52.964982Z node 1 :NODE_BROKER DEBUG: node_broker__load_state.cpp:27: TTxLoadState Complete 2025-05-29T15:21:52.965007Z node 1 :NODE_BROKER DEBUG: node_broker__migrate_state.cpp:84: TTxMigrateState Execute 2025-05-29T15:21:52.965014Z node 1 :NODE_BROKER DEBUG: node_broker__migrate_state.cpp:52: TTxMigrateState ProcessMigrationBatch UpdateNodes left 0, NewVersionUpdateNodes left 0 2025-05-29T15:21:52.965018Z node 1 :NODE_BROKER DEBUG: node_broker__migrate_state.cpp:21: TTxMigrateState FinalizeMigration 2025-05-29T15:21:52.965026Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:1311: [DB] Update epoch in database: #1.1 1970-01-01T00:00:00.025000Z - 1970-01-01T01:00:00.025000Z - 1970-01-01T02:00:00.025000Z 2025-05-29T15:21:52.965048Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:1330 ... date nodes log 2025-05-29T15:21:54.553925Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:659: Add node #1573.v504 to update nodes log 2025-05-29T15:21:54.553932Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:659: Add node #1572.v504 to update nodes log 2025-05-29T15:21:54.553938Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:659: Add node #1571.v504 to update nodes log 2025-05-29T15:21:54.553944Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:659: Add node #1570.v504 to update nodes log 2025-05-29T15:21:54.553949Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:659: Add node #1569.v504 to update nodes log 2025-05-29T15:21:54.553956Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:659: Add node #1568.v504 to update nodes log 2025-05-29T15:21:54.553971Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:659: Add node #1567.v504 to update nodes log 2025-05-29T15:21:54.553977Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:659: Add node #1566.v504 to update nodes log 2025-05-29T15:21:54.553984Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:659: Add node #1565.v504 to update nodes log 2025-05-29T15:21:54.553991Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:659: Add node #1564.v504 to update nodes log 2025-05-29T15:21:54.553997Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:659: Add node #1563.v504 to update nodes log 2025-05-29T15:21:54.554004Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:659: Add node #1562.v504 to update nodes log 2025-05-29T15:21:54.554011Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:659: Add node #1561.v504 to update nodes log 2025-05-29T15:21:54.554017Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:659: Add node #1560.v504 to update nodes log 2025-05-29T15:21:54.554024Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:659: Add node #1559.v504 to update nodes log 2025-05-29T15:21:54.554030Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:659: Add node #1558.v504 to update nodes log 2025-05-29T15:21:54.554038Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:659: Add node #1557.v504 to update nodes log 2025-05-29T15:21:54.554044Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:659: Add node #1556.v504 to update nodes log 2025-05-29T15:21:54.554051Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:659: Add node #1555.v504 to update nodes log 2025-05-29T15:21:54.554057Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:659: Add node #1554.v504 to update nodes log 2025-05-29T15:21:54.554063Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:659: Add node #1553.v504 to update nodes log 2025-05-29T15:21:54.554070Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:659: Add node #1552.v504 to update nodes log 2025-05-29T15:21:54.554076Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:659: Add node #1551.v504 to update nodes log 2025-05-29T15:21:54.554082Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:659: Add node #1550.v504 to update nodes log 2025-05-29T15:21:54.554088Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:659: Add node #1549.v504 to update nodes log 2025-05-29T15:21:54.554094Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:659: Add node #1548.v504 to update nodes log 2025-05-29T15:21:54.554100Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:659: Add node #1547.v504 to update nodes log 2025-05-29T15:21:54.554107Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:659: Add node #1546.v504 to update nodes log 2025-05-29T15:21:54.554113Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:659: Add node #1545.v504 to update nodes log 2025-05-29T15:21:54.554119Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:659: Add node #1544.v504 to update nodes log 2025-05-29T15:21:54.554125Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:659: Add node #1543.v504 to update nodes log 2025-05-29T15:21:54.554131Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:659: Add node #1542.v504 to update nodes log 2025-05-29T15:21:54.554137Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:659: Add node #1541.v504 to update nodes log 2025-05-29T15:21:54.554143Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:659: Add node #1540.v504 to update nodes log 2025-05-29T15:21:54.554148Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:659: Add node #1536.v504 to update nodes log 2025-05-29T15:21:54.554155Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:659: Add node #1787.v504 to update nodes log 2025-05-29T15:21:54.554425Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 269877761, Sender [1:2764:3794], Recipient [1:2755:3788]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-05-29T15:21:54.554540Z node 4 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:657: Handle NKikimr::TEvTabletPipe::TEvClientConnected { TabletId: 72057594037936129 Status: OK ServerId: [1:2766:3796] Leader: 1 Dead: 0 Generation: 3 VersionInfo:  } 2025-05-29T15:21:54.554560Z node 5 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:657: Handle NKikimr::TEvTabletPipe::TEvClientConnected { TabletId: 72057594037936129 Status: OK ServerId: [1:2768:3798] Leader: 1 Dead: 0 Generation: 3 VersionInfo:  } 2025-05-29T15:21:54.554590Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 269877761, Sender [1:2766:3796], Recipient [1:2755:3788]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-05-29T15:21:54.554678Z node 6 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:657: Handle NKikimr::TEvTabletPipe::TEvClientConnected { TabletId: 72057594037936129 Status: OK ServerId: [1:2767:3797] Leader: 1 Dead: 0 Generation: 3 VersionInfo:  } 2025-05-29T15:21:54.554690Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 269877761, Sender [1:2767:3797], Recipient [1:2755:3788]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-05-29T15:21:54.554698Z node 7 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:657: Handle NKikimr::TEvTabletPipe::TEvClientConnected { TabletId: 72057594037936129 Status: OK ServerId: [1:2770:3800] Leader: 1 Dead: 0 Generation: 3 VersionInfo:  } 2025-05-29T15:21:54.554708Z node 8 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:657: Handle NKikimr::TEvTabletPipe::TEvClientConnected { TabletId: 72057594037936129 Status: OK ServerId: [1:2764:3794] Leader: 1 Dead: 0 Generation: 3 VersionInfo:  } 2025-05-29T15:21:54.554730Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 269877761, Sender [1:2768:3798], Recipient [1:2755:3788]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-05-29T15:21:54.554779Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 269877761, Sender [1:2770:3800], Recipient [1:2755:3788]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-05-29T15:21:54.554870Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 272039936, Sender [8:221:2072], Recipient [1:2764:3794] 2025-05-29T15:21:54.554877Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:246: StateWork, processing event TEvNodeBroker::TEvListNodes 2025-05-29T15:21:54.554892Z node 1 :NODE_BROKER TRACE: node_broker.cpp:423: Send TEvNodesInfo for epoch #3.504 1970-01-01T02:00:00.025000Z - 1970-01-01T03:00:00.025000Z - 1970-01-01T04:00:00.025000Z 2025-05-29T15:21:54.554907Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 272039936, Sender [4:109:2072], Recipient [1:2766:3796] 2025-05-29T15:21:54.554910Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:246: StateWork, processing event TEvNodeBroker::TEvListNodes 2025-05-29T15:21:54.554918Z node 1 :NODE_BROKER TRACE: node_broker.cpp:423: Send TEvNodesInfo for epoch #3.504 1970-01-01T02:00:00.025000Z - 1970-01-01T03:00:00.025000Z - 1970-01-01T04:00:00.025000Z 2025-05-29T15:21:54.554925Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 272039936, Sender [6:165:2072], Recipient [1:2767:3797] 2025-05-29T15:21:54.554929Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:246: StateWork, processing event TEvNodeBroker::TEvListNodes 2025-05-29T15:21:54.554935Z node 1 :NODE_BROKER TRACE: node_broker.cpp:423: Send TEvNodesInfo for epoch #3.504 1970-01-01T02:00:00.025000Z - 1970-01-01T03:00:00.025000Z - 1970-01-01T04:00:00.025000Z 2025-05-29T15:21:54.554951Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 272039936, Sender [5:137:2072], Recipient [1:2768:3798] 2025-05-29T15:21:54.554955Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:246: StateWork, processing event TEvNodeBroker::TEvListNodes 2025-05-29T15:21:54.554961Z node 1 :NODE_BROKER TRACE: node_broker.cpp:423: Send TEvNodesInfo for epoch #3.504 1970-01-01T02:00:00.025000Z - 1970-01-01T03:00:00.025000Z - 1970-01-01T04:00:00.025000Z 2025-05-29T15:21:54.564491Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 272039936, Sender [7:193:2072], Recipient [1:2770:3800] 2025-05-29T15:21:54.564523Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:246: StateWork, processing event TEvNodeBroker::TEvListNodes 2025-05-29T15:21:54.564544Z node 1 :NODE_BROKER TRACE: node_broker.cpp:423: Send TEvNodesInfo for epoch #3.504 1970-01-01T02:00:00.025000Z - 1970-01-01T03:00:00.025000Z - 1970-01-01T04:00:00.025000Z 2025-05-29T15:21:54.570795Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 269877761, Sender [1:2812:3837], Recipient [1:2755:3788]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-05-29T15:21:54.570860Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 272039936, Sender [1:626:2214], Recipient [1:2755:3788]: NKikimr::NNodeBroker::TEvNodeBroker::TEvListNodes { } 2025-05-29T15:21:54.570866Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:246: StateWork, processing event TEvNodeBroker::TEvListNodes 2025-05-29T15:21:54.570881Z node 1 :NODE_BROKER TRACE: node_broker.cpp:423: Send TEvNodesInfo for epoch #3.504 1970-01-01T02:00:00.025000Z - 1970-01-01T03:00:00.025000Z - 1970-01-01T04:00:00.025000Z 2025-05-29T15:21:54.573119Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 269877761, Sender [1:2814:3839], Recipient [1:2755:3788]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-05-29T15:21:54.573181Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 272039936, Sender [1:626:2214], Recipient [1:2755:3788]: NKikimr::NNodeBroker::TEvNodeBroker::TEvListNodes { } 2025-05-29T15:21:54.573190Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:246: StateWork, processing event TEvNodeBroker::TEvListNodes 2025-05-29T15:21:54.573207Z node 1 :NODE_BROKER TRACE: node_broker.cpp:423: Send TEvNodesInfo for epoch #3.504 1970-01-01T02:00:00.025000Z - 1970-01-01T03:00:00.025000Z - 1970-01-01T04:00:00.025000Z 2025-05-29T15:21:54.577758Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 269877761, Sender [1:2816:3841], Recipient [1:2755:3788]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-05-29T15:21:54.577811Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 272039936, Sender [1:626:2214], Recipient [1:2755:3788]: NKikimr::NNodeBroker::TEvNodeBroker::TEvListNodes { } 2025-05-29T15:21:54.577821Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:246: StateWork, processing event TEvNodeBroker::TEvListNodes 2025-05-29T15:21:54.577837Z node 1 :NODE_BROKER TRACE: node_broker.cpp:423: Send TEvNodesInfo for epoch #3.504 1970-01-01T02:00:00.025000Z - 1970-01-01T03:00:00.025000Z - 1970-01-01T04:00:00.025000Z 2025-05-29T15:21:54.581229Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 269877761, Sender [1:2818:3843], Recipient [1:2755:3788]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-05-29T15:21:54.581281Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 272039936, Sender [1:626:2214], Recipient [1:2755:3788]: NKikimr::NNodeBroker::TEvNodeBroker::TEvListNodes { CachedVersion: 503 } 2025-05-29T15:21:54.581286Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:246: StateWork, processing event TEvNodeBroker::TEvListNodes 2025-05-29T15:21:54.581298Z node 1 :NODE_BROKER TRACE: node_broker.cpp:423: Send TEvNodesInfo for epoch #3.504 1970-01-01T02:00:00.025000Z - 1970-01-01T03:00:00.025000Z - 1970-01-01T04:00:00.025000Z ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/mind/ut/unittest >> TDynamicNameserverTest::ListNodesCacheWhenNoChanges-EnableNodeBrokerDeltaProtocol-true [GOOD] Test command err: 2025-05-29T15:21:55.006858Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-05-29T15:21:55.007249Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:21:55.010329Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:21:55.010414Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:21:55.039939Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7309: Cannot subscribe to console configs 2025-05-29T15:21:55.039964Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded ... waiting for nameservers are connected 2025-05-29T15:21:55.044429Z node 1 :NODE_BROKER DEBUG: node_broker_impl.h:236: StateInit event type: 10060000 event: NKikimr::TEvTablet::TEvBoot 2025-05-29T15:21:55.045269Z node 1 :NODE_BROKER DEBUG: node_broker_impl.h:236: StateInit event type: 10060001 event: NKikimr::TEvTablet::TEvRestored 2025-05-29T15:21:55.045347Z node 1 :NODE_BROKER DEBUG: node_broker__init_scheme.cpp:20: TTxInitScheme Execute 2025-05-29T15:21:55.045565Z node 1 :NODE_BROKER DEBUG: node_broker_impl.h:236: StateInit event type: 1006000c event: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-05-29T15:21:55.046207Z node 1 :NODE_BROKER DEBUG: node_broker__init_scheme.cpp:29: TTxInitScheme Complete 2025-05-29T15:21:55.046259Z node 1 :NODE_BROKER DEBUG: node_broker__load_state.cpp:19: TTxLoadState Execute 2025-05-29T15:21:55.046306Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:961: [DB] Using default config. 2025-05-29T15:21:55.046323Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:998: [DB] Starting the first epoch: #1.1 1970-01-01T00:00:00.024000Z - 1970-01-01T01:00:00.024000Z - 1970-01-01T02:00:00.024000Z 2025-05-29T15:21:55.046329Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:1024: [DB] Loaded the first approximate epoch start: #1.1 2025-05-29T15:21:55.046343Z node 1 :NODE_BROKER DEBUG: node_broker__load_state.cpp:27: TTxLoadState Complete 2025-05-29T15:21:55.046358Z node 1 :NODE_BROKER DEBUG: node_broker__migrate_state.cpp:84: TTxMigrateState Execute 2025-05-29T15:21:55.046363Z node 1 :NODE_BROKER DEBUG: node_broker__migrate_state.cpp:52: TTxMigrateState ProcessMigrationBatch UpdateNodes left 0, NewVersionUpdateNodes left 0 2025-05-29T15:21:55.046367Z node 1 :NODE_BROKER DEBUG: node_broker__migrate_state.cpp:21: TTxMigrateState FinalizeMigration 2025-05-29T15:21:55.046372Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:1311: [DB] Update epoch in database: #1.1 1970-01-01T00:00:00.024000Z - 1970-01-01T01:00:00.024000Z - 1970-01-01T02:00:00.024000Z 2025-05-29T15:21:55.046389Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:1330: [DB] Update approx epoch start in database: #1.1 2025-05-29T15:21:55.046394Z node 1 :NODE_BROKER NOTICE: node_broker.cpp:1343: [DB] Update main nodes table to: Nodes 2025-05-29T15:21:55.067727Z node 1 :NODE_BROKER DEBUG: node_broker__migrate_state.cpp:95: TTxMigrateState Complete 2025-05-29T15:21:55.067765Z node 1 :NODE_BROKER TRACE: node_broker.cpp:456: Scheduled epoch update at 1970-01-01T01:00:00.024000Z 2025-05-29T15:21:55.067777Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:562: Preparing nodes list cache for epoch #1.1 1970-01-01T00:00:00.024000Z - 1970-01-01T01:00:00.024000Z - 1970-01-01T02:00:00.024000Z, approximate epoch start #1.1 nodes=0 expired=0 2025-05-29T15:21:55.067789Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:603: Preparing update nodes log for epoch ##1.1 1970-01-01T00:00:00.024000Z - 1970-01-01T01:00:00.024000Z - 1970-01-01T02:00:00.024000Z nodes=0 expired=0 removed=0 2025-05-29T15:21:55.108785Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 269877761, Sender [1:201:2197], Recipient [1:170:2176]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-05-29T15:21:55.108842Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:657: Handle NKikimr::TEvTabletPipe::TEvClientConnected { TabletId: 72057594037936129 Status: OK ServerId: [1:201:2197] Leader: 1 Dead: 0 Generation: 2 VersionInfo:  } ... waiting for nameservers are connected (done) 2025-05-29T15:21:55.109435Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 272039936, Sender [1:18:2065], Recipient [1:170:2176]: NKikimr::NNodeBroker::TEvNodeBroker::TEvListNodes { MinEpoch: 1 } 2025-05-29T15:21:55.109456Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:246: StateWork, processing event TEvNodeBroker::TEvListNodes 2025-05-29T15:21:55.109471Z node 1 :NODE_BROKER TRACE: node_broker.cpp:423: Send TEvNodesInfo for epoch #1.1 1970-01-01T00:00:00.024000Z - 1970-01-01T01:00:00.024000Z - 1970-01-01T02:00:00.024000Z 2025-05-29T15:21:55.109584Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 269877761, Sender [1:205:2201], Recipient [1:170:2176]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-05-29T15:21:55.109624Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 272039938, Sender [1:203:2199], Recipient [1:170:2176]: NKikimr::NNodeBroker::TEvNodeBroker::TEvRegistrationRequest { Host: "host1" Port: 1001 ResolveHost: "host1.host1.host1" Address: "1.2.3.4" Location { DataCenter: "1" Module: "2" Rack: "3" Unit: "4" } FixedNodeId: false Path: "dc-1" } 2025-05-29T15:21:55.109630Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:248: StateWork, processing event TEvNodeBroker::TEvRegistrationRequest 2025-05-29T15:21:55.109641Z node 1 :NODE_BROKER TRACE: node_broker.cpp:1487: Handle TEvNodeBroker::TEvRegistrationRequest: request# Host: "host1" Port: 1001 ResolveHost: "host1.host1.host1" Address: "1.2.3.4" Location { DataCenter: "1" Module: "2" Rack: "3" Unit: "4" } FixedNodeId: false Path: "dc-1" 2025-05-29T15:21:55.109703Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2702: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [1:16:2063], request# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-05-29T15:21:55.109719Z node 1 :TX_PROXY_SCHEME_CACHE TRACE: cache.cpp:2322: Create subscriber: self# [1:16:2063], path# /dc-1, domainOwnerId# 72057594046678944 2025-05-29T15:21:55.117416Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2589: HandleNotify: self# [1:16:2063], notify# NKikimr::TSchemeBoardEvents::TEvNotifyUpdate { Path: /dc-1 PathId: [OwnerId: 72057594046678944, LocalPathId: 1] DescribeSchemeResult: Status: StatusSuccess Path: "/dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/dc-1" } } } PathId: 1 PathOwnerId: 72057594046678944 } 2025-05-29T15:21:55.117528Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2464: ResolveCacheItem: self# [1:16:2063], notify# NKikimr::TSchemeBoardEvents::TEvNotifyUpdate { Path: /dc-1 PathId: [OwnerId: 72057594046678944, LocalPathId: 1] DescribeSchemeResult: Status: StatusSuccess Path: "/dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/dc-1" } } } PathId: 1 PathOwnerId: 72057594046678944 }, by path# { Subscriber: { Subscriber: [1:207:2202] DomainOwnerId: 72057594046678944 Type: 2 SyncCookie: 0 } Filled: 0 Status: undefined Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, by pathId# nullptr 2025-05-29T15:21:55.117607Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:1842: FillEntry for TNavigate: self# [1:16:2063], cacheItem# { Subscriber: { Subscriber: [1:207:2202] DomainOwnerId: 72057594046678944 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusSuccess Kind: 2 TableKind: 0 Created: 1 CreateStep: 0 PathId: [OwnerId: 72057594046678944, LocalPathId: 1] DomainId: [OwnerId: 72057594046678944, LocalPathId: 1] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-05-29T15:21:55.117696Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:265: Send result: self# [1:214:2203], recipient# [1:206:2176], result# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [72057594046678944:1:0] RequestType: ByPath Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046678944, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046678944, LocalPathId: 1] Params { Version: 0 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } }] } 2025-05-29T15:21:55.117716Z node 1 :NODE_BROKER TRACE: node_broker.cpp:1532: Handle TEvTxProxySchemeCache::TEvNavigateKeySetResult: response# { Path: dc-1 TableId: [72057594046678944:1:0] RequestType: ByPath Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046678944, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046678944, LocalPathId: 1] Params { Version: 0 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } } 2025-05-29T15:21:55.117740Z node 1 :NODE_BROKER TRACE: node_broker.cpp:1558: Finished resolving tenant: request# Host: "host1" Port: 1001 ResolveHost: "host1.host1.host1" Address: "1.2.3.4" Location { DataCenter: "1" Module: "2" Rack: "3" Unit: "4" } FixedNodeId: false Path: "dc-1": scope id# <7205759404667894 ... [] } } 2025-05-29T15:21:55.938251Z node 2 :NODE_BROKER TRACE: node_broker.cpp:1558: Finished resolving tenant: request# Host: "host1" Port: 1001 ResolveHost: "host1.host1.host1" Address: "1.2.3.4" Location { DataCenter: "1" Module: "2" Rack: "3" Unit: "4" } FixedNodeId: false Path: "dc-1": scope id# <72057594046678944:1>: serviced subdomain# 72057594046678944:1 2025-05-29T15:21:55.938259Z node 2 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 2146435073, Sender [2:204:2176], Recipient [2:170:2176]: NKikimr::NNodeBroker::TNodeBroker::TEvPrivate::TEvResolvedRegistrationRequest 2025-05-29T15:21:55.938263Z node 2 :NODE_BROKER TRACE: node_broker_impl.h:257: StateWork, processing event TEvPrivate::TEvResolvedRegistrationRequest 2025-05-29T15:21:55.938281Z node 2 :NODE_BROKER DEBUG: node_broker__register_node.cpp:77: TTxRegisterNode Execute 2025-05-29T15:21:55.938284Z node 2 :NODE_BROKER DEBUG: node_broker__register_node.cpp:81: Registration request from host1:1001 (not fixed) tenant: dc-1 2025-05-29T15:21:55.938306Z node 2 :NODE_BROKER DEBUG: node_broker.cpp:856: [DB] Adding node #1024.v2 host1:1001 to database state=Active resolvehost=host1.host1.host1 address=1.2.3.4 dc=1 location=DC=1/M=2/R=3/U=4/ lease=1 expire=Thu, 01 Jan 1970 02:00:00 UTC servicedsubdomain=72057594046678944:1 slotindex=0 authorizedbycertificate=false 2025-05-29T15:21:55.938347Z node 2 :NODE_BROKER DEBUG: node_broker.cpp:264: [Dirty] Register new active node #1024.v2 host1:1001 2025-05-29T15:21:55.938351Z node 2 :NODE_BROKER DEBUG: node_broker.cpp:552: [Dirty] Update current epoch version from 1 to 2 2025-05-29T15:21:55.938354Z node 2 :NODE_BROKER DEBUG: node_broker.cpp:1356: [DB] Update epoch version in database version=2 2025-05-29T15:21:55.949237Z node 2 :NODE_BROKER DEBUG: node_broker__register_node.cpp:186: TTxRegisterNode Complete 2025-05-29T15:21:55.949269Z node 2 :NODE_BROKER DEBUG: node_broker.cpp:264: [Committed] Register new active node #1024.v2 host1:1001 2025-05-29T15:21:55.949281Z node 2 :NODE_BROKER DEBUG: node_broker.cpp:552: [Committed] Update current epoch version from 1 to 2 2025-05-29T15:21:55.949288Z node 2 :NODE_BROKER DEBUG: node_broker.cpp:630: Add node #1024.v2 host1:1001 to epoch cache 2025-05-29T15:21:55.949313Z node 2 :NODE_BROKER DEBUG: node_broker.cpp:659: Add node #1024.v2 to update nodes log 2025-05-29T15:21:55.949364Z node 2 :NODE_BROKER TRACE: node_broker__register_node.cpp:58: TTxRegisterNode reply with: Status { Code: OK } Node { NodeId: 1024 Host: "host1" Port: 1001 ResolveHost: "host1.host1.host1" Address: "1.2.3.4" Location { DataCenter: "1" Module: "2" Rack: "3" Unit: "4" } Expire: 7200023000 Name: "slot-0" } 2025-05-29T15:21:55.949430Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:21:55.949466Z node 2 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 272039952, Sender [2:18:2065], Recipient [2:170:2176]: NKikimr::NNodeBroker::TEvNodeBroker::TEvSyncNodesRequest { SeqNo: 0 } 2025-05-29T15:21:55.949473Z node 2 :NODE_BROKER TRACE: node_broker_impl.h:255: StateWork, processing event TEvNodeBroker::TEvSyncNodesRequest 2025-05-29T15:21:55.949482Z node 2 :NODE_BROKER TRACE: node_broker.cpp:730: Send TEvUpdateNodes v1 -> v2 to [2:18:2065] 2025-05-29T15:21:55.949519Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:724: Handle NKikimrNodeBroker.TUpdateNodes Epoch { Id: 1 Version: 2 Start: 23000 End: 3600023000 NextEnd: 7200023000 } Updates { Node { NodeId: 1024 Host: "host1" Port: 1001 ResolveHost: "host1.host1.host1" Address: "1.2.3.4" Location { DataCenter: "1" Module: "2" Rack: "3" Unit: "4" } Expire: 7200023000 Name: "slot-0" } } SeqNo: 0 2025-05-29T15:21:55.949542Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:789: Handle NKikimr::NNodeBroker::TEvNodeBroker::TEvSyncNodesResponse { SeqNo: 0 } 2025-05-29T15:21:55.949576Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:21:55.949591Z node 2 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 272039952, Sender [2:18:2065], Recipient [2:170:2176]: NKikimr::NNodeBroker::TEvNodeBroker::TEvSyncNodesRequest { SeqNo: 0 } 2025-05-29T15:21:55.949596Z node 2 :NODE_BROKER TRACE: node_broker_impl.h:255: StateWork, processing event TEvNodeBroker::TEvSyncNodesRequest 2025-05-29T15:21:55.949604Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:789: Handle NKikimr::NNodeBroker::TEvNodeBroker::TEvSyncNodesResponse { SeqNo: 0 } 2025-05-29T15:21:55.949671Z node 2 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 269877761, Sender [2:216:2205], Recipient [2:170:2176]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-05-29T15:21:55.949702Z node 2 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 272039938, Sender [2:201:2197], Recipient [2:170:2176]: NKikimr::NNodeBroker::TEvNodeBroker::TEvRegistrationRequest { Host: "host2" Port: 1001 ResolveHost: "host2.host2.host2" Address: "1.2.3.5" Location { DataCenter: "1" Module: "2" Rack: "3" Unit: "5" } FixedNodeId: false Path: "dc-1" } 2025-05-29T15:21:55.949708Z node 2 :NODE_BROKER TRACE: node_broker_impl.h:248: StateWork, processing event TEvNodeBroker::TEvRegistrationRequest 2025-05-29T15:21:55.949719Z node 2 :NODE_BROKER TRACE: node_broker.cpp:1487: Handle TEvNodeBroker::TEvRegistrationRequest: request# Host: "host2" Port: 1001 ResolveHost: "host2.host2.host2" Address: "1.2.3.5" Location { DataCenter: "1" Module: "2" Rack: "3" Unit: "5" } FixedNodeId: false Path: "dc-1" 2025-05-29T15:21:55.949763Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2702: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [2:16:2063], request# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-05-29T15:21:55.949790Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:1842: FillEntry for TNavigate: self# [2:16:2063], cacheItem# { Subscriber: { Subscriber: [2:205:2200] DomainOwnerId: 72057594046678944 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusSuccess Kind: 2 TableKind: 0 Created: 1 CreateStep: 0 PathId: [OwnerId: 72057594046678944, LocalPathId: 1] DomainId: [OwnerId: 72057594046678944, LocalPathId: 1] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-05-29T15:21:55.949837Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:265: Send result: self# [2:218:2206], recipient# [2:217:2176], result# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [72057594046678944:1:0] RequestType: ByPath Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046678944, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046678944, LocalPathId: 1] Params { Version: 0 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } }] } 2025-05-29T15:21:55.949852Z node 2 :NODE_BROKER TRACE: node_broker.cpp:1532: Handle TEvTxProxySchemeCache::TEvNavigateKeySetResult: response# { Path: dc-1 TableId: [72057594046678944:1:0] RequestType: ByPath Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046678944, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046678944, LocalPathId: 1] Params { Version: 0 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } } 2025-05-29T15:21:55.949885Z node 2 :NODE_BROKER TRACE: node_broker.cpp:1558: Finished resolving tenant: request# Host: "host2" Port: 1001 ResolveHost: "host2.host2.host2" Address: "1.2.3.5" Location { DataCenter: "1" Module: "2" Rack: "3" Unit: "5" } FixedNodeId: false Path: "dc-1": scope id# <72057594046678944:1>: serviced subdomain# 72057594046678944:1 2025-05-29T15:21:55.949897Z node 2 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 2146435073, Sender [2:217:2176], Recipient [2:170:2176]: NKikimr::NNodeBroker::TNodeBroker::TEvPrivate::TEvResolvedRegistrationRequest 2025-05-29T15:21:55.949902Z node 2 :NODE_BROKER TRACE: node_broker_impl.h:257: StateWork, processing event TEvPrivate::TEvResolvedRegistrationRequest 2025-05-29T15:21:55.949919Z node 2 :NODE_BROKER DEBUG: node_broker__register_node.cpp:77: TTxRegisterNode Execute 2025-05-29T15:21:55.949923Z node 2 :NODE_BROKER DEBUG: node_broker__register_node.cpp:81: Registration request from host2:1001 (not fixed) tenant: dc-1 2025-05-29T15:21:55.949947Z node 2 :NODE_BROKER DEBUG: node_broker.cpp:856: [DB] Adding node #1025.v3 host2:1001 to database state=Active resolvehost=host2.host2.host2 address=1.2.3.5 dc=1 location=DC=1/M=2/R=3/U=5/ lease=1 expire=Thu, 01 Jan 1970 02:00:00 UTC servicedsubdomain=72057594046678944:1 slotindex=1 authorizedbycertificate=false 2025-05-29T15:21:55.950001Z node 2 :NODE_BROKER DEBUG: node_broker.cpp:264: [Dirty] Register new active node #1025.v3 host2:1001 2025-05-29T15:21:55.950007Z node 2 :NODE_BROKER DEBUG: node_broker.cpp:552: [Dirty] Update current epoch version from 2 to 3 2025-05-29T15:21:55.950012Z node 2 :NODE_BROKER DEBUG: node_broker.cpp:1356: [DB] Update epoch version in database version=3 2025-05-29T15:21:55.968338Z node 2 :NODE_BROKER DEBUG: node_broker__register_node.cpp:186: TTxRegisterNode Complete 2025-05-29T15:21:55.968377Z node 2 :NODE_BROKER DEBUG: node_broker.cpp:264: [Committed] Register new active node #1025.v3 host2:1001 2025-05-29T15:21:55.968388Z node 2 :NODE_BROKER DEBUG: node_broker.cpp:552: [Committed] Update current epoch version from 2 to 3 2025-05-29T15:21:55.968397Z node 2 :NODE_BROKER DEBUG: node_broker.cpp:630: Add node #1025.v3 host2:1001 to epoch cache 2025-05-29T15:21:55.968424Z node 2 :NODE_BROKER DEBUG: node_broker.cpp:659: Add node #1025.v3 to update nodes log 2025-05-29T15:21:55.968474Z node 2 :NODE_BROKER TRACE: node_broker__register_node.cpp:58: TTxRegisterNode reply with: Status { Code: OK } Node { NodeId: 1025 Host: "host2" Port: 1001 ResolveHost: "host2.host2.host2" Address: "1.2.3.5" Location { DataCenter: "1" Module: "2" Rack: "3" Unit: "5" } Expire: 7200023000 Name: "slot-1" } 2025-05-29T15:21:55.968566Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:21:55.968616Z node 2 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 272039952, Sender [2:18:2065], Recipient [2:170:2176]: NKikimr::NNodeBroker::TEvNodeBroker::TEvSyncNodesRequest { SeqNo: 0 } 2025-05-29T15:21:55.968624Z node 2 :NODE_BROKER TRACE: node_broker_impl.h:255: StateWork, processing event TEvNodeBroker::TEvSyncNodesRequest 2025-05-29T15:21:55.968633Z node 2 :NODE_BROKER TRACE: node_broker.cpp:730: Send TEvUpdateNodes v2 -> v3 to [2:18:2065] 2025-05-29T15:21:55.968670Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:724: Handle NKikimrNodeBroker.TUpdateNodes Epoch { Id: 1 Version: 3 Start: 23000 End: 3600023000 NextEnd: 7200023000 } Updates { Node { NodeId: 1025 Host: "host2" Port: 1001 ResolveHost: "host2.host2.host2" Address: "1.2.3.5" Location { DataCenter: "1" Module: "2" Rack: "3" Unit: "5" } Expire: 7200023000 Name: "slot-1" } } SeqNo: 0 2025-05-29T15:21:55.968691Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:789: Handle NKikimr::NNodeBroker::TEvNodeBroker::TEvSyncNodesResponse { SeqNo: 0 } >> ReadSessionImplTest::DataReceivedCallback [GOOD] >> ReadSessionImplTest::CommonHandler [GOOD] >> TNodeBrokerTest::ListNodesEpochDeltasPersistance [GOOD] >> TPartitionTests::WriteSubDomainOutOfSpace_DisableExpiration [GOOD] >> TPartitionTests::IncorrectRange [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/mind/ut/unittest >> TDynamicNameserverTest::CacheMissSameDeadline-EnableNodeBrokerDeltaProtocol-true [GOOD] Test command err: 2025-05-29T15:21:55.332993Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-05-29T15:21:55.333203Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:21:55.340217Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:21:55.340326Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:21:55.372284Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7309: Cannot subscribe to console configs 2025-05-29T15:21:55.372304Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded ... waiting for nameservers are connected 2025-05-29T15:21:55.379834Z node 1 :NODE_BROKER DEBUG: node_broker_impl.h:236: StateInit event type: 10060000 event: NKikimr::TEvTablet::TEvBoot 2025-05-29T15:21:55.380296Z node 1 :NODE_BROKER DEBUG: node_broker_impl.h:236: StateInit event type: 10060001 event: NKikimr::TEvTablet::TEvRestored 2025-05-29T15:21:55.380368Z node 1 :NODE_BROKER DEBUG: node_broker__init_scheme.cpp:20: TTxInitScheme Execute 2025-05-29T15:21:55.380540Z node 1 :NODE_BROKER DEBUG: node_broker_impl.h:236: StateInit event type: 1006000c event: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-05-29T15:21:55.381240Z node 1 :NODE_BROKER DEBUG: node_broker__init_scheme.cpp:29: TTxInitScheme Complete 2025-05-29T15:21:55.384635Z node 1 :NODE_BROKER DEBUG: node_broker__load_state.cpp:19: TTxLoadState Execute 2025-05-29T15:21:55.384710Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:961: [DB] Using default config. 2025-05-29T15:21:55.384728Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:998: [DB] Starting the first epoch: #1.1 1970-01-01T00:00:00.025000Z - 1970-01-01T01:00:00.025000Z - 1970-01-01T02:00:00.025000Z 2025-05-29T15:21:55.384733Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:1024: [DB] Loaded the first approximate epoch start: #1.1 2025-05-29T15:21:55.384761Z node 1 :NODE_BROKER DEBUG: node_broker__load_state.cpp:27: TTxLoadState Complete 2025-05-29T15:21:55.384785Z node 1 :NODE_BROKER DEBUG: node_broker__migrate_state.cpp:84: TTxMigrateState Execute 2025-05-29T15:21:55.384791Z node 1 :NODE_BROKER DEBUG: node_broker__migrate_state.cpp:52: TTxMigrateState ProcessMigrationBatch UpdateNodes left 0, NewVersionUpdateNodes left 0 2025-05-29T15:21:55.384795Z node 1 :NODE_BROKER DEBUG: node_broker__migrate_state.cpp:21: TTxMigrateState FinalizeMigration 2025-05-29T15:21:55.384800Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:1311: [DB] Update epoch in database: #1.1 1970-01-01T00:00:00.025000Z - 1970-01-01T01:00:00.025000Z - 1970-01-01T02:00:00.025000Z 2025-05-29T15:21:55.384827Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:1330: [DB] Update approx epoch start in database: #1.1 2025-05-29T15:21:55.384833Z node 1 :NODE_BROKER NOTICE: node_broker.cpp:1343: [DB] Update main nodes table to: Nodes 2025-05-29T15:21:55.407266Z node 1 :NODE_BROKER DEBUG: node_broker__migrate_state.cpp:95: TTxMigrateState Complete 2025-05-29T15:21:55.407312Z node 1 :NODE_BROKER TRACE: node_broker.cpp:456: Scheduled epoch update at 1970-01-01T01:00:00.025000Z 2025-05-29T15:21:55.407324Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:562: Preparing nodes list cache for epoch #1.1 1970-01-01T00:00:00.025000Z - 1970-01-01T01:00:00.025000Z - 1970-01-01T02:00:00.025000Z, approximate epoch start #1.1 nodes=0 expired=0 2025-05-29T15:21:55.407335Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:603: Preparing update nodes log for epoch ##1.1 1970-01-01T00:00:00.025000Z - 1970-01-01T01:00:00.025000Z - 1970-01-01T02:00:00.025000Z nodes=0 expired=0 removed=0 2025-05-29T15:21:55.450979Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 269877761, Sender [1:201:2197], Recipient [1:170:2176]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-05-29T15:21:55.451048Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:657: Handle NKikimr::TEvTabletPipe::TEvClientConnected { TabletId: 72057594037936129 Status: OK ServerId: [1:201:2197] Leader: 1 Dead: 0 Generation: 2 VersionInfo:  } ... waiting for nameservers are connected (done) 2025-05-29T15:21:55.451665Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 272039936, Sender [1:18:2065], Recipient [1:170:2176]: NKikimr::NNodeBroker::TEvNodeBroker::TEvListNodes { MinEpoch: 1 } 2025-05-29T15:21:55.451689Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:246: StateWork, processing event TEvNodeBroker::TEvListNodes 2025-05-29T15:21:55.451704Z node 1 :NODE_BROKER TRACE: node_broker.cpp:423: Send TEvNodesInfo for epoch #1.1 1970-01-01T00:00:00.025000Z - 1970-01-01T01:00:00.025000Z - 1970-01-01T02:00:00.025000Z 2025-05-29T15:21:55.451774Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 269877761, Sender [1:205:2201], Recipient [1:170:2176]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-05-29T15:21:55.451816Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 272039938, Sender [1:203:2199], Recipient [1:170:2176]: NKikimr::NNodeBroker::TEvNodeBroker::TEvRegistrationRequest { Host: "host1" Port: 1001 ResolveHost: "host1.host1.host1" Address: "1.2.3.4" Location { DataCenter: "1" Module: "2" Rack: "3" Unit: "4" } FixedNodeId: false Path: "dc-1" } 2025-05-29T15:21:55.451822Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:248: StateWork, processing event TEvNodeBroker::TEvRegistrationRequest 2025-05-29T15:21:55.451832Z node 1 :NODE_BROKER TRACE: node_broker.cpp:1487: Handle TEvNodeBroker::TEvRegistrationRequest: request# Host: "host1" Port: 1001 ResolveHost: "host1.host1.host1" Address: "1.2.3.4" Location { DataCenter: "1" Module: "2" Rack: "3" Unit: "4" } FixedNodeId: false Path: "dc-1" 2025-05-29T15:21:55.451893Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2702: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [1:16:2063], request# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-05-29T15:21:55.451908Z node 1 :TX_PROXY_SCHEME_CACHE TRACE: cache.cpp:2322: Create subscriber: self# [1:16:2063], path# /dc-1, domainOwnerId# 72057594046678944 2025-05-29T15:21:55.457867Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2589: HandleNotify: self# [1:16:2063], notify# NKikimr::TSchemeBoardEvents::TEvNotifyUpdate { Path: /dc-1 PathId: [OwnerId: 72057594046678944, LocalPathId: 1] DescribeSchemeResult: Status: StatusSuccess Path: "/dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/dc-1" } } } PathId: 1 PathOwnerId: 72057594046678944 } 2025-05-29T15:21:55.457942Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2464: ResolveCacheItem: self# [1:16:2063], notify# NKikimr::TSchemeBoardEvents::TEvNotifyUpdate { Path: /dc-1 PathId: [OwnerId: 72057594046678944, LocalPathId: 1] DescribeSchemeResult: Status: StatusSuccess Path: "/dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/dc-1" } } } PathId: 1 PathOwnerId: 72057594046678944 }, by path# { Subscriber: { Subscriber: [1:207:2202] DomainOwnerId: 72057594046678944 Type: 2 SyncCookie: 0 } Filled: 0 Status: undefined Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, by pathId# nullptr 2025-05-29T15:21:55.457979Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:1842: FillEntry for TNavigate: self# [1:16:2063], cacheItem# { Subscriber: { Subscriber: [1:207:2202] DomainOwnerId: 72057594046678944 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusSuccess Kind: 2 TableKind: 0 Created: 1 CreateStep: 0 PathId: [OwnerId: 72057594046678944, LocalPathId: 1] DomainId: [OwnerId: 72057594046678944, LocalPathId: 1] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-05-29T15:21:55.458058Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:265: Send result: self# [1:214:2203], recipient# [1:206:2176], result# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [72057594046678944:1:0] RequestType: ByPath Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046678944, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046678944, LocalPathId: 1] Params { Version: 0 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } }] } 2025-05-29T15:21:55.458071Z node 1 :NODE_BROKER TRACE: node_broker.cpp:1532: Handle TEvTxProxySchemeCache::TEvNavigateKeySetResult: response# { Path: dc-1 TableId: [72057594046678944:1:0] RequestType: ByPath Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046678944, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046678944, LocalPathId: 1] Params { Version: 0 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } } 2025-05-29T15:21:55.458089Z node 1 :NODE_BROKER TRACE: node_broker.cpp:1558: Finished resolving tenant: request# Host: "host1" Port: 1001 ResolveHost: "host1.host1.host1" Address: "1.2.3.4" Location { DataCenter: "1" Module: "2" Rack: "3" Unit: "4" } FixedNodeId: false Path: "dc-1": scope id# <7205759404667894 ... inOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [72057594046678944:1:0] RequestType: ByPath Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046678944, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046678944, LocalPathId: 1] Params { Version: 0 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } }] } 2025-05-29T15:21:55.862902Z node 2 :NODE_BROKER TRACE: node_broker.cpp:1532: Handle TEvTxProxySchemeCache::TEvNavigateKeySetResult: response# { Path: dc-1 TableId: [72057594046678944:1:0] RequestType: ByPath Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046678944, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046678944, LocalPathId: 1] Params { Version: 0 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } } 2025-05-29T15:21:55.862917Z node 2 :NODE_BROKER TRACE: node_broker.cpp:1558: Finished resolving tenant: request# Host: "host1" Port: 1001 ResolveHost: "host1.host1.host1" Address: "1.2.3.4" Location { DataCenter: "1" Module: "2" Rack: "3" Unit: "4" } FixedNodeId: false Path: "dc-1": scope id# <72057594046678944:1>: serviced subdomain# 72057594046678944:1 2025-05-29T15:21:55.862932Z node 2 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 2146435073, Sender [2:204:2176], Recipient [2:170:2176]: NKikimr::NNodeBroker::TNodeBroker::TEvPrivate::TEvResolvedRegistrationRequest 2025-05-29T15:21:55.862937Z node 2 :NODE_BROKER TRACE: node_broker_impl.h:257: StateWork, processing event TEvPrivate::TEvResolvedRegistrationRequest 2025-05-29T15:21:55.862961Z node 2 :NODE_BROKER DEBUG: node_broker__register_node.cpp:77: TTxRegisterNode Execute 2025-05-29T15:21:55.862966Z node 2 :NODE_BROKER DEBUG: node_broker__register_node.cpp:81: Registration request from host1:1001 (not fixed) tenant: dc-1 2025-05-29T15:21:55.863000Z node 2 :NODE_BROKER DEBUG: node_broker.cpp:856: [DB] Adding node #1024.v2 host1:1001 to database state=Active resolvehost=host1.host1.host1 address=1.2.3.4 dc=1 location=DC=1/M=2/R=3/U=4/ lease=1 expire=Thu, 01 Jan 1970 02:00:00 UTC servicedsubdomain=72057594046678944:1 slotindex=0 authorizedbycertificate=false 2025-05-29T15:21:55.863090Z node 2 :NODE_BROKER DEBUG: node_broker.cpp:264: [Dirty] Register new active node #1024.v2 host1:1001 2025-05-29T15:21:55.863100Z node 2 :NODE_BROKER DEBUG: node_broker.cpp:552: [Dirty] Update current epoch version from 1 to 2 2025-05-29T15:21:55.863104Z node 2 :NODE_BROKER DEBUG: node_broker.cpp:1356: [DB] Update epoch version in database version=2 2025-05-29T15:21:55.874047Z node 2 :NODE_BROKER DEBUG: node_broker__register_node.cpp:186: TTxRegisterNode Complete 2025-05-29T15:21:55.874074Z node 2 :NODE_BROKER DEBUG: node_broker.cpp:264: [Committed] Register new active node #1024.v2 host1:1001 2025-05-29T15:21:55.874088Z node 2 :NODE_BROKER DEBUG: node_broker.cpp:552: [Committed] Update current epoch version from 1 to 2 2025-05-29T15:21:55.874095Z node 2 :NODE_BROKER DEBUG: node_broker.cpp:630: Add node #1024.v2 host1:1001 to epoch cache 2025-05-29T15:21:55.874122Z node 2 :NODE_BROKER DEBUG: node_broker.cpp:659: Add node #1024.v2 to update nodes log 2025-05-29T15:21:55.874195Z node 2 :NODE_BROKER TRACE: node_broker__register_node.cpp:58: TTxRegisterNode reply with: Status { Code: OK } Node { NodeId: 1024 Host: "host1" Port: 1001 ResolveHost: "host1.host1.host1" Address: "1.2.3.4" Location { DataCenter: "1" Module: "2" Rack: "3" Unit: "4" } Expire: 7200023000 Name: "slot-0" } 2025-05-29T15:21:55.874321Z node 2 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 269877761, Sender [2:216:2205], Recipient [2:170:2176]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-05-29T15:21:55.874355Z node 2 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 272039938, Sender [2:201:2197], Recipient [2:170:2176]: NKikimr::NNodeBroker::TEvNodeBroker::TEvRegistrationRequest { Host: "host2" Port: 1001 ResolveHost: "host2.host2.host2" Address: "1.2.3.5" Location { DataCenter: "1" Module: "2" Rack: "3" Unit: "5" } FixedNodeId: false Path: "dc-1" } 2025-05-29T15:21:55.874362Z node 2 :NODE_BROKER TRACE: node_broker_impl.h:248: StateWork, processing event TEvNodeBroker::TEvRegistrationRequest 2025-05-29T15:21:55.874373Z node 2 :NODE_BROKER TRACE: node_broker.cpp:1487: Handle TEvNodeBroker::TEvRegistrationRequest: request# Host: "host2" Port: 1001 ResolveHost: "host2.host2.host2" Address: "1.2.3.5" Location { DataCenter: "1" Module: "2" Rack: "3" Unit: "5" } FixedNodeId: false Path: "dc-1" 2025-05-29T15:21:55.874438Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2702: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [2:16:2063], request# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-05-29T15:21:55.874466Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:1842: FillEntry for TNavigate: self# [2:16:2063], cacheItem# { Subscriber: { Subscriber: [2:205:2200] DomainOwnerId: 72057594046678944 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusSuccess Kind: 2 TableKind: 0 Created: 1 CreateStep: 0 PathId: [OwnerId: 72057594046678944, LocalPathId: 1] DomainId: [OwnerId: 72057594046678944, LocalPathId: 1] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-05-29T15:21:55.874523Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:265: Send result: self# [2:218:2206], recipient# [2:217:2176], result# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [72057594046678944:1:0] RequestType: ByPath Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046678944, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046678944, LocalPathId: 1] Params { Version: 0 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } }] } 2025-05-29T15:21:55.874539Z node 2 :NODE_BROKER TRACE: node_broker.cpp:1532: Handle TEvTxProxySchemeCache::TEvNavigateKeySetResult: response# { Path: dc-1 TableId: [72057594046678944:1:0] RequestType: ByPath Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046678944, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046678944, LocalPathId: 1] Params { Version: 0 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } } 2025-05-29T15:21:55.874555Z node 2 :NODE_BROKER TRACE: node_broker.cpp:1558: Finished resolving tenant: request# Host: "host2" Port: 1001 ResolveHost: "host2.host2.host2" Address: "1.2.3.5" Location { DataCenter: "1" Module: "2" Rack: "3" Unit: "5" } FixedNodeId: false Path: "dc-1": scope id# <72057594046678944:1>: serviced subdomain# 72057594046678944:1 2025-05-29T15:21:55.874569Z node 2 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 2146435073, Sender [2:217:2176], Recipient [2:170:2176]: NKikimr::NNodeBroker::TNodeBroker::TEvPrivate::TEvResolvedRegistrationRequest 2025-05-29T15:21:55.874574Z node 2 :NODE_BROKER TRACE: node_broker_impl.h:257: StateWork, processing event TEvPrivate::TEvResolvedRegistrationRequest 2025-05-29T15:21:55.874589Z node 2 :NODE_BROKER DEBUG: node_broker__register_node.cpp:77: TTxRegisterNode Execute 2025-05-29T15:21:55.874607Z node 2 :NODE_BROKER DEBUG: node_broker__register_node.cpp:81: Registration request from host2:1001 (not fixed) tenant: dc-1 2025-05-29T15:21:55.874639Z node 2 :NODE_BROKER DEBUG: node_broker.cpp:856: [DB] Adding node #1025.v3 host2:1001 to database state=Active resolvehost=host2.host2.host2 address=1.2.3.5 dc=1 location=DC=1/M=2/R=3/U=5/ lease=1 expire=Thu, 01 Jan 1970 02:00:00 UTC servicedsubdomain=72057594046678944:1 slotindex=1 authorizedbycertificate=false 2025-05-29T15:21:55.874692Z node 2 :NODE_BROKER DEBUG: node_broker.cpp:264: [Dirty] Register new active node #1025.v3 host2:1001 2025-05-29T15:21:55.874698Z node 2 :NODE_BROKER DEBUG: node_broker.cpp:552: [Dirty] Update current epoch version from 2 to 3 2025-05-29T15:21:55.874702Z node 2 :NODE_BROKER DEBUG: node_broker.cpp:1356: [DB] Update epoch version in database version=3 2025-05-29T15:21:55.885726Z node 2 :NODE_BROKER DEBUG: node_broker__register_node.cpp:186: TTxRegisterNode Complete 2025-05-29T15:21:55.885751Z node 2 :NODE_BROKER DEBUG: node_broker.cpp:264: [Committed] Register new active node #1025.v3 host2:1001 2025-05-29T15:21:55.885760Z node 2 :NODE_BROKER DEBUG: node_broker.cpp:552: [Committed] Update current epoch version from 2 to 3 2025-05-29T15:21:55.885765Z node 2 :NODE_BROKER DEBUG: node_broker.cpp:630: Add node #1025.v3 host2:1001 to epoch cache 2025-05-29T15:21:55.885793Z node 2 :NODE_BROKER DEBUG: node_broker.cpp:659: Add node #1025.v3 to update nodes log 2025-05-29T15:21:55.885840Z node 2 :NODE_BROKER TRACE: node_broker__register_node.cpp:58: TTxRegisterNode reply with: Status { Code: OK } Node { NodeId: 1025 Host: "host2" Port: 1001 ResolveHost: "host2.host2.host2" Address: "1.2.3.5" Location { DataCenter: "1" Module: "2" Rack: "3" Unit: "5" } Expire: 7200023000 Name: "slot-1" } ... waiting for cache miss 2025-05-29T15:21:55.885907Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:525: Handle NActors::TEvInterconnect::TEvResolveNode { NodeId: 1024 Deadline: 1.107024s } 2025-05-29T15:21:55.885917Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:626: New cache miss: nodeId# 1024, deadline# 1.107024s 2025-05-29T15:21:55.885920Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:630: Schedule wakeup for new earliest deadline 1.107024s 2025-05-29T15:21:55.885927Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:525: Handle NActors::TEvInterconnect::TEvResolveNode { NodeId: 1025 Deadline: 1.107024s } 2025-05-29T15:21:55.885930Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:626: New cache miss: nodeId# 1025, deadline# 1.107024s ... blocking NKikimr::NNodeBroker::TEvNodeBroker::TEvSyncNodesRequest from NAMESERVICE to NODE_BROKER_ACTOR cookie 1 ... waiting for cache miss (done) 2025-05-29T15:21:55.896110Z node 2 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 2146435074, Sender [0:0:0], Recipient [2:170:2176]: NKikimr::NNodeBroker::TNodeBroker::TEvPrivate::TEvProcessSubscribersQueue 2025-05-29T15:21:55.896140Z node 2 :NODE_BROKER TRACE: node_broker_impl.h:258: StateWork, processing event TEvPrivate::TEvProcessSubscribersQueue 2025-05-29T15:21:55.896166Z node 2 :NODE_BROKER TRACE: node_broker.cpp:730: Send TEvUpdateNodes v1 -> v3 to [2:18:2065] ... blocking NKikimr::NNodeBroker::TEvNodeBroker::TEvUpdateNodes from NODE_BROKER_ACTOR to NAMESERVICE cookie 0 2025-05-29T15:21:55.958552Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:873: HandleWakeup at 1.108024s 2025-05-29T15:21:55.958590Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:134: Cache miss failed: nodeId=1024, error=Deadline exceeded 2025-05-29T15:21:55.958603Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:134: Cache miss failed: nodeId=1025, error=Deadline exceeded >> TPartitionTests::WriteSubDomainOutOfSpace_IgnoreQuotaDeadline >> TPartitionTests::GetPartitionWriteInfoSuccess >> TPQTabletTests::Partition_Send_Predicate_With_False ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/mind/ut/unittest >> TNodeBrokerTest::NodesMigrationNewExpiredNode [GOOD] Test command err: 2025-05-29T15:21:54.480319Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 2 Deadline: 18446744073709.551615s } 2025-05-29T15:21:54.480385Z node 3 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 3 Deadline: 18446744073709.551615s } 2025-05-29T15:21:54.480421Z node 4 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 4 Deadline: 18446744073709.551615s } 2025-05-29T15:21:54.480468Z node 5 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 5 Deadline: 18446744073709.551615s } 2025-05-29T15:21:54.480506Z node 6 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 6 Deadline: 18446744073709.551615s } 2025-05-29T15:21:54.480537Z node 7 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 7 Deadline: 18446744073709.551615s } 2025-05-29T15:21:54.490153Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:21:54.490310Z node 3 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:21:54.490375Z node 4 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:21:54.490431Z node 5 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:21:54.490494Z node 6 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:21:54.490542Z node 7 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:21:54.490642Z node 8 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 8 Deadline: 18446744073709.551615s } 2025-05-29T15:21:54.490681Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-05-29T15:21:54.491264Z node 3 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:21:54.491312Z node 4 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:21:54.491349Z node 5 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:21:54.491376Z node 6 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:21:54.491410Z node 7 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:21:54.491442Z node 8 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:21:54.491503Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:21:54.527238Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:21:54.527529Z node 8 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:21:54.527571Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:21:54.528773Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:21:54.528803Z node 3 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:21:54.528862Z node 4 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:21:54.528910Z node 5 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:21:54.528947Z node 6 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:21:54.528975Z node 7 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:21:54.529042Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:21:54.529284Z node 3 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-05-29T15:21:54.529363Z node 4 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-05-29T15:21:54.529395Z node 5 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-05-29T15:21:54.529424Z node 6 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-05-29T15:21:54.529453Z node 7 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-05-29T15:21:54.529490Z node 8 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:21:54.529544Z node 8 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-05-29T15:21:54.529680Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-05-29T15:21:54.529805Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 5 Deadline: 18446744073709.551615s } 2025-05-29T15:21:54.531133Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 3 Deadline: 18446744073709.551615s } 2025-05-29T15:21:54.531187Z node 4 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 5 Deadline: 18446744073709.551615s } 2025-05-29T15:21:54.531205Z node 5 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 2 Deadline: 18446744073709.551615s } 2025-05-29T15:21:54.531219Z node 6 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 8 Deadline: 18446744073709.551615s } 2025-05-29T15:21:54.531235Z node 7 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 2 Deadline: 18446744073709.551615s } 2025-05-29T15:21:54.531248Z node 8 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 6 Deadline: 18446744073709.551615s } 2025-05-29T15:21:54.542030Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 5 Deadline: 18446744073709.551615s } 2025-05-29T15:21:54.542149Z node 5 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 4 Deadline: 18446744073709.551615s } 2025-05-29T15:21:54.542189Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 7 Deadline: 18446744073709.551615s } 2025-05-29T15:21:54.542261Z node 8 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 5 Deadline: 18446744073709.551615s } 2025-05-29T15:21:54.543668Z node 3 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 2 Deadline: 18446744073709.551615s } 2025-05-29T15:21:54.543868Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 4 Deadline: 18446744073709.551615s } 2025-05-29T15:21:54.544529Z node 5 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 8 Deadline: 18446744073709.551615s } 2025-05-29T15:21:54.544609Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 6 Deadline: 18446744073709.551615s } 2025-05-29T15:21:54.544691Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 3 Deadline: 18446744073709.551615s } 2025-05-29T15:21:54.544748Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 7 Deadline: 18446744073709.551615s } 2025-05-29T15:21:54.545004Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 2 Deadline: 18446744073709.551615s } 2025-05-29T15:21:54.545100Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 8 Deadline: 18446744073709.551615s } 2025-05-29T15:21:54.546142Z node 5 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 7 Deadline: 18446744073709.551615s } 2025-05-29T15:21:54.546598Z node 7 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 5 Deadline: 18446744073709.551615s } 2025-05-29T15:21:54.546785Z node 3 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 7 Deadline: 18446744073709.551615s } 2025-05-29T15:21:54.547037Z node 7 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 3 Deadline: 18446744073709.551615s } 2025-05-29T15:21:54.604507Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7309: Cannot subscribe to console configs 2025-05-29T15:21:54.604534Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded ... waiting for nameservers are connected 2025-05-29T15:21:54.609951Z node 1 :NODE_BROKER DEBUG: node_broker_impl.h:236: StateInit event type: 10060000 event: NKikimr::TEvTablet::TEvBoot 2025-05-29T15:21:54.610426Z node 1 :NODE_BROKER DEBUG: node_broker_impl.h:236: StateInit event type: 10060001 event: NKikimr::TEvTablet::TEvRestored 2025-05-29T15:21:54.610497Z node 1 :NODE_BROKER DEBUG: node_broker__init_scheme.cpp:20: TTxInitScheme Execute 2025-05-29T15:21:54.610712Z node 1 :NODE_BROKER DEBUG: node_broker_impl.h:236: StateInit event type: 1006000c event: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-05-29T15:21:54.611424Z node 1 :NODE_BROKER DEBUG: node_broker__init_scheme.cpp:29: TTxInitScheme Complete 2025-05-29T15:21:54.611448Z node 1 :NODE_BROKER DEBUG: node_broker__load_state.cpp:19: TTxLoadState Execute 2025-05-29T15:21:54.611513Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:961: [DB] Using default config. 2025-05-29T15:21:54.611531Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:998: [DB] Starting the first epoch: #1.1 1970-01-01T00:00:00.024000Z - 1970-01-01T01:00:00.024000Z - 1970-01-01T02:00:00.024000Z 2025-05-29T15:21:54.611536Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:1024: [DB] Loaded the first approximate epoch start: #1.1 2025-05-29T15:21:54.611553Z node 1 :NODE_BROKER DEBUG: node_broker__load_state.cpp:27: TTxLoadState Complete 2025-05-29T15:21:54.611619Z node 1 :NODE_BROKER DEBUG: node_broker__migrate_state.cpp:84: TTxMigrateState Execute 2025-05-29T15:21:54.611624Z node 1 :NODE_BROKER DEBUG: node_broker__migrate_state.cpp:52: TTxMigrateState ProcessMigrationBatch UpdateNodes left 0, NewVersionUpdateNodes left 0 2025-05-29T15:21:54.611629Z node 1 :NODE_BROKER DEBUG: node_broker__migrate_state.cpp:21: TTxMigrateState FinalizeMigration 2025-05-29T15:21:54.611636Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:1311: [DB] Update epoch in database: #1.1 1970-01-01T00:00:00.024000Z - 1970-01-01T01:00:00.024000Z - 1970-01-01T02:00:00.024000Z 2025-05-29T15:21:54.611651Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:1330 ... 25-05-29T15:21:55.010042Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:988: [DB] Loaded current epoch: #3.4 1970-01-01T02:00:00.024000Z - 1970-01-01T03:00:00.024000Z - 1970-01-01T04:00:00.024000Z 2025-05-29T15:21:55.010049Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:1012: [DB] Approximate epoch start is changed: #3.4 2025-05-29T15:21:55.010054Z node 1 :NODE_BROKER NOTICE: node_broker.cpp:1034: [DB] Loaded main nodes table: Nodes 2025-05-29T15:21:55.010080Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:291: [Dirty] Added expired node #1024.v0 host1:1001 2025-05-29T15:21:55.010187Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:1113: [DB] Loaded node #1024.v0 { NodeId: 1024, State: Expired, Version: 0, Host: host1, Port: 1001, ResolveHost: host1.yandex.net, Address: 1.2.3.4, Lease: 1, Expire: Thu, 01 Jan 1970 02:00:00 UTC, Location: DC=1/M=2/R=3/U=4/, AuthorizedByCertificate: 0, SlotIndex: 0, ServicedSubDomain: 72057594046678944:1 } 2025-05-29T15:21:55.010204Z node 1 :NODE_BROKER NOTICE: node_broker.cpp:1263: [DB] Migrating new expired node #1024.v4 { NodeId: 1024, State: Expired, Version: 4, Host: host1, Port: 1001, ResolveHost: host1.yandex.net, Address: 1.2.3.4, Lease: 1, Expire: Thu, 01 Jan 1970 02:00:00 UTC, Location: DC=1/M=2/R=3/U=4/, AuthorizedByCertificate: 0, SlotIndex: 0, ServicedSubDomain: 72057594046678944:1 } 2025-05-29T15:21:55.010225Z node 1 :NODE_BROKER DEBUG: node_broker__load_state.cpp:27: TTxLoadState Complete 2025-05-29T15:21:55.010276Z node 1 :NODE_BROKER DEBUG: node_broker__migrate_state.cpp:84: TTxMigrateState Execute 2025-05-29T15:21:55.010282Z node 1 :NODE_BROKER DEBUG: node_broker__migrate_state.cpp:52: TTxMigrateState ProcessMigrationBatch UpdateNodes left 1, NewVersionUpdateNodes left 0 2025-05-29T15:21:55.010292Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:856: [DB] Adding node #1024.v4 host1:1001 to database state=Expired resolvehost=host1.yandex.net address=1.2.3.4 dc=1 location=DC=1/M=2/R=3/U=4/ lease=1 expire=Thu, 01 Jan 1970 02:00:00 UTC servicedsubdomain=72057594046678944:1 slotindex=0 authorizedbycertificate=false 2025-05-29T15:21:55.010338Z node 1 :NODE_BROKER DEBUG: node_broker__migrate_state.cpp:21: TTxMigrateState FinalizeMigration 2025-05-29T15:21:55.010344Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:1330: [DB] Update approx epoch start in database: #3.4 2025-05-29T15:21:55.023466Z node 1 :NODE_BROKER DEBUG: node_broker__migrate_state.cpp:95: TTxMigrateState Complete 2025-05-29T15:21:55.023518Z node 1 :NODE_BROKER TRACE: node_broker.cpp:456: Scheduled epoch update at 1970-01-01T03:00:00.024000Z 2025-05-29T15:21:55.023531Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:562: Preparing nodes list cache for epoch #3.4 1970-01-01T02:00:00.024000Z - 1970-01-01T03:00:00.024000Z - 1970-01-01T04:00:00.024000Z, approximate epoch start #3.4 nodes=0 expired=1 2025-05-29T15:21:55.023561Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:603: Preparing update nodes log for epoch ##3.4 1970-01-01T02:00:00.024000Z - 1970-01-01T03:00:00.024000Z - 1970-01-01T04:00:00.024000Z nodes=0 expired=1 removed=0 2025-05-29T15:21:55.023567Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:659: Add node #1024.v4 to update nodes log 2025-05-29T15:21:55.023687Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 269877761, Sender [1:683:2248], Recipient [1:674:2242]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-05-29T15:21:55.023774Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 269877761, Sender [1:685:2250], Recipient [1:674:2242]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-05-29T15:21:55.023807Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:657: Handle NKikimr::TEvTabletPipe::TEvClientConnected { TabletId: 72057594037936129 Status: OK ServerId: [1:685:2250] Leader: 1 Dead: 0 Generation: 3 VersionInfo:  } 2025-05-29T15:21:55.023826Z node 3 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:657: Handle NKikimr::TEvTabletPipe::TEvClientConnected { TabletId: 72057594037936129 Status: OK ServerId: [1:683:2248] Leader: 1 Dead: 0 Generation: 3 VersionInfo:  } 2025-05-29T15:21:55.023838Z node 4 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:657: Handle NKikimr::TEvTabletPipe::TEvClientConnected { TabletId: 72057594037936129 Status: OK ServerId: [1:686:2251] Leader: 1 Dead: 0 Generation: 3 VersionInfo:  } 2025-05-29T15:21:55.023850Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 269877761, Sender [1:686:2251], Recipient [1:674:2242]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-05-29T15:21:55.023859Z node 8 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:657: Handle NKikimr::TEvTabletPipe::TEvClientConnected { TabletId: 72057594037936129 Status: OK ServerId: [1:687:2252] Leader: 1 Dead: 0 Generation: 3 VersionInfo:  } 2025-05-29T15:21:55.023887Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 269877761, Sender [1:687:2252], Recipient [1:674:2242]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-05-29T15:21:55.023965Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 272039936, Sender [4:109:2072], Recipient [1:686:2251] 2025-05-29T15:21:55.023972Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:246: StateWork, processing event TEvNodeBroker::TEvListNodes 2025-05-29T15:21:55.023985Z node 1 :NODE_BROKER TRACE: node_broker.cpp:423: Send TEvNodesInfo for epoch #3.4 1970-01-01T02:00:00.024000Z - 1970-01-01T03:00:00.024000Z - 1970-01-01T04:00:00.024000Z 2025-05-29T15:21:55.024003Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 272039936, Sender [8:221:2072], Recipient [1:687:2252] 2025-05-29T15:21:55.024007Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:246: StateWork, processing event TEvNodeBroker::TEvListNodes 2025-05-29T15:21:55.024013Z node 1 :NODE_BROKER TRACE: node_broker.cpp:423: Send TEvNodesInfo for epoch #3.4 1970-01-01T02:00:00.024000Z - 1970-01-01T03:00:00.024000Z - 1970-01-01T04:00:00.024000Z 2025-05-29T15:21:55.024027Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 272039936, Sender [3:81:2072], Recipient [1:683:2248] 2025-05-29T15:21:55.024031Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:246: StateWork, processing event TEvNodeBroker::TEvListNodes 2025-05-29T15:21:55.024038Z node 1 :NODE_BROKER TRACE: node_broker.cpp:423: Send TEvNodesInfo for epoch #3.4 1970-01-01T02:00:00.024000Z - 1970-01-01T03:00:00.024000Z - 1970-01-01T04:00:00.024000Z 2025-05-29T15:21:55.024062Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 272039936, Sender [2:53:2072], Recipient [1:685:2250] 2025-05-29T15:21:55.024066Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:246: StateWork, processing event TEvNodeBroker::TEvListNodes 2025-05-29T15:21:55.024072Z node 1 :NODE_BROKER TRACE: node_broker.cpp:423: Send TEvNodesInfo for epoch #3.4 1970-01-01T02:00:00.024000Z - 1970-01-01T03:00:00.024000Z - 1970-01-01T04:00:00.024000Z 2025-05-29T15:21:55.024317Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 269877761, Sender [1:715:2275], Recipient [1:674:2242]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-05-29T15:21:55.024353Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 272039936, Sender [1:625:2213], Recipient [1:674:2242]: NKikimr::NNodeBroker::TEvNodeBroker::TEvListNodes { } 2025-05-29T15:21:55.024359Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:246: StateWork, processing event TEvNodeBroker::TEvListNodes 2025-05-29T15:21:55.024367Z node 1 :NODE_BROKER TRACE: node_broker.cpp:423: Send TEvNodesInfo for epoch #3.4 1970-01-01T02:00:00.024000Z - 1970-01-01T03:00:00.024000Z - 1970-01-01T04:00:00.024000Z 2025-05-29T15:21:55.024447Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 269877761, Sender [1:717:2277], Recipient [1:674:2242]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-05-29T15:21:55.024465Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 272039936, Sender [1:625:2213], Recipient [1:674:2242]: NKikimr::NNodeBroker::TEvNodeBroker::TEvListNodes { } 2025-05-29T15:21:55.024469Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:246: StateWork, processing event TEvNodeBroker::TEvListNodes 2025-05-29T15:21:55.024476Z node 1 :NODE_BROKER TRACE: node_broker.cpp:423: Send TEvNodesInfo for epoch #3.4 1970-01-01T02:00:00.024000Z - 1970-01-01T03:00:00.024000Z - 1970-01-01T04:00:00.024000Z 2025-05-29T15:21:55.024542Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 269877761, Sender [1:719:2279], Recipient [1:674:2242]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-05-29T15:21:55.024561Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 272039936, Sender [1:625:2213], Recipient [1:674:2242]: NKikimr::NNodeBroker::TEvNodeBroker::TEvListNodes { } 2025-05-29T15:21:55.024566Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:246: StateWork, processing event TEvNodeBroker::TEvListNodes 2025-05-29T15:21:55.024572Z node 1 :NODE_BROKER TRACE: node_broker.cpp:423: Send TEvNodesInfo for epoch #3.4 1970-01-01T02:00:00.024000Z - 1970-01-01T03:00:00.024000Z - 1970-01-01T04:00:00.024000Z 2025-05-29T15:21:55.024632Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 269877761, Sender [1:721:2281], Recipient [1:674:2242]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-05-29T15:21:55.024660Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 272039936, Sender [1:625:2213], Recipient [1:674:2242]: NKikimr::NNodeBroker::TEvNodeBroker::TEvListNodes { CachedVersion: 4 } 2025-05-29T15:21:55.024665Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:246: StateWork, processing event TEvNodeBroker::TEvListNodes 2025-05-29T15:21:55.024671Z node 1 :NODE_BROKER TRACE: node_broker.cpp:423: Send TEvNodesInfo for epoch #3.4 1970-01-01T02:00:00.024000Z - 1970-01-01T03:00:00.024000Z - 1970-01-01T04:00:00.024000Z 2025-05-29T15:21:55.024743Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 269877761, Sender [1:723:2283], Recipient [1:674:2242]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-05-29T15:21:55.024767Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 272039950, Sender [1:625:2213], Recipient [1:674:2242]: NKikimr::NNodeBroker::TEvNodeBroker::TEvSubscribeNodesRequest { CachedVersion: 4 SeqNo: 2 } 2025-05-29T15:21:55.024776Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:254: StateWork, processing event TEvNodeBroker::TEvSubscribeNodesRequest 2025-05-29T15:21:55.024783Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:747: New subscriber [1:625:2213], seqNo: 2, version: 4, server pipe id: [1:723:2283] 2025-05-29T15:21:55.024792Z node 1 :NODE_BROKER TRACE: node_broker.cpp:730: Send TEvUpdateNodes v4 -> v4 to [1:625:2213] 2025-05-29T15:21:55.024853Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 269877764, Sender [1:723:2283], Recipient [1:674:2242]: NKikimr::TEvTabletPipe::TEvServerDisconnected 2025-05-29T15:21:55.024860Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:768: Unsubscribed [1:625:2213], seqNo: 2, server pipe id: [1:723:2283] 2025-05-29T15:21:55.024889Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 269877761, Sender [1:725:2285], Recipient [1:674:2242]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-05-29T15:21:55.024911Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 272039937, Sender [1:625:2213], Recipient [1:674:2242]: NKikimr::NNodeBroker::TEvNodeBroker::TEvResolveNode { NodeId: 1024 } 2025-05-29T15:21:55.024917Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:247: StateWork, processing event TEvNodeBroker::TEvResolveNode 2025-05-29T15:21:55.024936Z node 1 :NODE_BROKER TRACE: node_broker.cpp:1478: Send TEvResolvedNode: NKikimr::NNodeBroker::TEvNodeBroker::TEvResolvedNode { Status { Code: WRONG_REQUEST Reason: "Unknown node" } } ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/mind/ut/unittest >> TNodeBrokerTest::NodesMigrationReuseRemovedID [GOOD] Test command err: 2025-05-29T15:21:52.788337Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 2 Deadline: 18446744073709.551615s } 2025-05-29T15:21:52.788404Z node 3 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 3 Deadline: 18446744073709.551615s } 2025-05-29T15:21:52.788439Z node 4 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 4 Deadline: 18446744073709.551615s } 2025-05-29T15:21:52.788482Z node 5 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 5 Deadline: 18446744073709.551615s } 2025-05-29T15:21:52.788512Z node 6 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 6 Deadline: 18446744073709.551615s } 2025-05-29T15:21:52.788540Z node 7 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 7 Deadline: 18446744073709.551615s } 2025-05-29T15:21:52.796272Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:21:52.796423Z node 3 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:21:52.796481Z node 4 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:21:52.796530Z node 5 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:21:52.796586Z node 6 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:21:52.796633Z node 7 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:21:52.796714Z node 8 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 8 Deadline: 18446744073709.551615s } 2025-05-29T15:21:52.796751Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-05-29T15:21:52.796925Z node 3 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:21:52.796950Z node 4 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:21:52.796972Z node 5 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:21:52.796989Z node 6 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:21:52.797012Z node 7 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:21:52.797031Z node 8 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:21:52.797069Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:21:52.828686Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:21:52.828962Z node 8 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:21:52.828997Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:21:52.830131Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:21:52.830160Z node 3 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:21:52.830208Z node 4 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:21:52.830238Z node 5 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:21:52.830297Z node 6 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:21:52.830326Z node 7 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:21:52.830386Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:21:52.830595Z node 3 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-05-29T15:21:52.830671Z node 4 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-05-29T15:21:52.830706Z node 5 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-05-29T15:21:52.830752Z node 6 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-05-29T15:21:52.830785Z node 7 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-05-29T15:21:52.830819Z node 8 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:21:52.830870Z node 8 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-05-29T15:21:52.830985Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-05-29T15:21:52.831103Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 4 Deadline: 18446744073709.551615s } 2025-05-29T15:21:52.832085Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 6 Deadline: 18446744073709.551615s } 2025-05-29T15:21:52.832118Z node 4 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 3 Deadline: 18446744073709.551615s } 2025-05-29T15:21:52.832129Z node 5 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 8 Deadline: 18446744073709.551615s } 2025-05-29T15:21:52.832140Z node 6 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 5 Deadline: 18446744073709.551615s } 2025-05-29T15:21:52.832152Z node 7 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 8 Deadline: 18446744073709.551615s } 2025-05-29T15:21:52.832163Z node 8 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 3 Deadline: 18446744073709.551615s } 2025-05-29T15:21:52.836944Z node 8 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 5 Deadline: 18446744073709.551615s } 2025-05-29T15:21:52.837152Z node 5 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 6 Deadline: 18446744073709.551615s } 2025-05-29T15:21:52.837201Z node 8 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 7 Deadline: 18446744073709.551615s } 2025-05-29T15:21:52.837295Z node 6 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 2 Deadline: 18446744073709.551615s } 2025-05-29T15:21:52.838336Z node 3 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 4 Deadline: 18446744073709.551615s } 2025-05-29T15:21:52.838361Z node 3 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 8 Deadline: 18446744073709.551615s } 2025-05-29T15:21:52.839168Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 5 Deadline: 18446744073709.551615s } 2025-05-29T15:21:52.839522Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 6 Deadline: 18446744073709.551615s } 2025-05-29T15:21:52.839627Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 3 Deadline: 18446744073709.551615s } 2025-05-29T15:21:52.839648Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 7 Deadline: 18446744073709.551615s } 2025-05-29T15:21:52.839895Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 2 Deadline: 18446744073709.551615s } 2025-05-29T15:21:52.839967Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 8 Deadline: 18446744073709.551615s } 2025-05-29T15:21:52.840647Z node 4 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 7 Deadline: 18446744073709.551615s } 2025-05-29T15:21:52.840962Z node 7 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 4 Deadline: 18446744073709.551615s } 2025-05-29T15:21:52.841292Z node 4 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 8 Deadline: 18446744073709.551615s } 2025-05-29T15:21:52.841658Z node 8 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 4 Deadline: 18446744073709.551615s } 2025-05-29T15:21:52.872683Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7309: Cannot subscribe to console configs 2025-05-29T15:21:52.872712Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded ... waiting for nameservers are connected 2025-05-29T15:21:52.877381Z node 1 :NODE_BROKER DEBUG: node_broker_impl.h:236: StateInit event type: 10060000 event: NKikimr::TEvTablet::TEvBoot 2025-05-29T15:21:52.877814Z node 1 :NODE_BROKER DEBUG: node_broker_impl.h:236: StateInit event type: 10060001 event: NKikimr::TEvTablet::TEvRestored 2025-05-29T15:21:52.877884Z node 1 :NODE_BROKER DEBUG: node_broker__init_scheme.cpp:20: TTxInitScheme Execute 2025-05-29T15:21:52.878110Z node 1 :NODE_BROKER DEBUG: node_broker_impl.h:236: StateInit event type: 1006000c event: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-05-29T15:21:52.878831Z node 1 :NODE_BROKER DEBUG: node_broker__init_scheme.cpp:29: TTxInitScheme Complete 2025-05-29T15:21:52.878854Z node 1 :NODE_BROKER DEBUG: node_broker__load_state.cpp:19: TTxLoadState Execute 2025-05-29T15:21:52.878914Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:961: [DB] Using default config. 2025-05-29T15:21:52.878930Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:998: [DB] Starting the first epoch: #1.1 1970-01-01T00:00:00.025000Z - 1970-01-01T01:00:00.025000Z - 1970-01-01T02:00:00.025000Z 2025-05-29T15:21:52.878935Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:1024: [DB] Loaded the first approximate epoch start: #1.1 2025-05-29T15:21:52.878952Z node 1 :NODE_BROKER DEBUG: node_broker__load_state.cpp:27: TTxLoadState Complete 2025-05-29T15:21:52.878970Z node 1 :NODE_BROKER DEBUG: node_broker__migrate_state.cpp:84: TTxMigrateState Execute 2025-05-29T15:21:52.878976Z node 1 :NODE_BROKER DEBUG: node_broker__migrate_state.cpp:52: TTxMigrateState ProcessMigrationBatch UpdateNodes left 0, NewVersionUpdateNodes left 0 2025-05-29T15:21:52.878981Z node 1 :NODE_BROKER DEBUG: node_broker__migrate_state.cpp:21: TTxMigrateState FinalizeMigration 2025-05-29T15:21:52.878987Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:1311: [DB] Update epoch in database: #1.1 1970-01-01T00:00:00.025000Z - 1970-01-01T01:00:00.025000Z - 1970-01-01T02:00:00.025000Z 2025-05-29T15:21:52.879005Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:1330 ... Index: 0, ServicedSubDomain: 0:0 } 2025-05-29T15:21:54.672872Z node 1 :NODE_BROKER NOTICE: node_broker.cpp:1214: [DB] Migrating changed node #1024.v7 { NodeId: 1024, State: Active, Version: 7, Host: host2, Port: 1001, ResolveHost: host2.yandex.net, Address: 1.2.3.4, Lease: 1, Expire: Thu, 01 Jan 1970 05:00:00 UTC, Location: DC=1/M=2/R=3/U=4/, AuthorizedByCertificate: 0, SlotIndex: 0, ServicedSubDomain: 72057594046678944:1 } 2025-05-29T15:21:54.672879Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:552: [Dirty] Update current epoch version from 6 to 7 2025-05-29T15:21:54.672893Z node 1 :NODE_BROKER DEBUG: node_broker__load_state.cpp:27: TTxLoadState Complete 2025-05-29T15:21:54.672954Z node 1 :NODE_BROKER DEBUG: node_broker__migrate_state.cpp:84: TTxMigrateState Execute 2025-05-29T15:21:54.672960Z node 1 :NODE_BROKER DEBUG: node_broker__migrate_state.cpp:52: TTxMigrateState ProcessMigrationBatch UpdateNodes left 0, NewVersionUpdateNodes left 1 2025-05-29T15:21:54.672967Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:1311: [DB] Update epoch in database: #4.7 1970-01-01T03:00:00.025000Z - 1970-01-01T04:00:00.025000Z - 1970-01-01T05:00:00.025000Z 2025-05-29T15:21:54.672990Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:856: [DB] Adding node #1024.v7 host2:1001 to database state=Active resolvehost=host2.yandex.net address=1.2.3.4 dc=1 location=DC=1/M=2/R=3/U=4/ lease=1 expire=Thu, 01 Jan 1970 05:00:00 UTC servicedsubdomain=72057594046678944:1 slotindex=0 authorizedbycertificate=false 2025-05-29T15:21:54.673021Z node 1 :NODE_BROKER DEBUG: node_broker__migrate_state.cpp:21: TTxMigrateState FinalizeMigration 2025-05-29T15:21:54.691620Z node 1 :NODE_BROKER DEBUG: node_broker__migrate_state.cpp:95: TTxMigrateState Complete 2025-05-29T15:21:54.691675Z node 1 :NODE_BROKER TRACE: node_broker.cpp:456: Scheduled epoch update at 1970-01-01T04:00:00.025000Z 2025-05-29T15:21:54.691687Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:562: Preparing nodes list cache for epoch #4.7 1970-01-01T03:00:00.025000Z - 1970-01-01T04:00:00.025000Z - 1970-01-01T05:00:00.025000Z, approximate epoch start #4.5 nodes=1 expired=0 2025-05-29T15:21:54.691723Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:603: Preparing update nodes log for epoch ##4.7 1970-01-01T03:00:00.025000Z - 1970-01-01T04:00:00.025000Z - 1970-01-01T05:00:00.025000Z nodes=1 expired=0 removed=0 2025-05-29T15:21:54.691730Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:659: Add node #1024.v7 to update nodes log 2025-05-29T15:21:54.691871Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 269877761, Sender [1:764:2289], Recipient [1:754:2283]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-05-29T15:21:54.691904Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 269877761, Sender [1:765:2290], Recipient [1:754:2283]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-05-29T15:21:54.692032Z node 4 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:657: Handle NKikimr::TEvTabletPipe::TEvClientConnected { TabletId: 72057594037936129 Status: OK ServerId: [1:764:2289] Leader: 1 Dead: 0 Generation: 3 VersionInfo:  } 2025-05-29T15:21:54.692050Z node 5 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:657: Handle NKikimr::TEvTabletPipe::TEvClientConnected { TabletId: 72057594037936129 Status: OK ServerId: [1:765:2290] Leader: 1 Dead: 0 Generation: 3 VersionInfo:  } 2025-05-29T15:21:54.692063Z node 6 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:657: Handle NKikimr::TEvTabletPipe::TEvClientConnected { TabletId: 72057594037936129 Status: OK ServerId: [1:767:2292] Leader: 1 Dead: 0 Generation: 3 VersionInfo:  } 2025-05-29T15:21:54.692075Z node 7 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:657: Handle NKikimr::TEvTabletPipe::TEvClientConnected { TabletId: 72057594037936129 Status: OK ServerId: [1:768:2293] Leader: 1 Dead: 0 Generation: 3 VersionInfo:  } 2025-05-29T15:21:54.692086Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 269877761, Sender [1:767:2292], Recipient [1:754:2283]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-05-29T15:21:54.692116Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 269877761, Sender [1:768:2293], Recipient [1:754:2283]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-05-29T15:21:54.692123Z node 8 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:657: Handle NKikimr::TEvTabletPipe::TEvClientConnected { TabletId: 72057594037936129 Status: OK ServerId: [1:769:2294] Leader: 1 Dead: 0 Generation: 3 VersionInfo:  } 2025-05-29T15:21:54.692153Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 269877761, Sender [1:769:2294], Recipient [1:754:2283]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-05-29T15:21:54.692246Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 272039936, Sender [4:109:2072], Recipient [1:764:2289] 2025-05-29T15:21:54.692252Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:246: StateWork, processing event TEvNodeBroker::TEvListNodes 2025-05-29T15:21:54.692260Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:375: Delaying list nodes request for epoch #5 2025-05-29T15:21:54.692268Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 272039936, Sender [5:137:2072], Recipient [1:765:2290] 2025-05-29T15:21:54.692271Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:246: StateWork, processing event TEvNodeBroker::TEvListNodes 2025-05-29T15:21:54.692276Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:375: Delaying list nodes request for epoch #5 2025-05-29T15:21:54.692283Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 272039936, Sender [6:165:2072], Recipient [1:767:2292] 2025-05-29T15:21:54.692287Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:246: StateWork, processing event TEvNodeBroker::TEvListNodes 2025-05-29T15:21:54.692291Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:375: Delaying list nodes request for epoch #5 2025-05-29T15:21:54.692298Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 272039936, Sender [7:193:2072], Recipient [1:768:2293] 2025-05-29T15:21:54.692302Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:246: StateWork, processing event TEvNodeBroker::TEvListNodes 2025-05-29T15:21:54.692306Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:375: Delaying list nodes request for epoch #5 2025-05-29T15:21:54.692313Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 272039936, Sender [8:221:2072], Recipient [1:769:2294] 2025-05-29T15:21:54.692320Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:246: StateWork, processing event TEvNodeBroker::TEvListNodes 2025-05-29T15:21:54.692324Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:375: Delaying list nodes request for epoch #5 2025-05-29T15:21:54.692479Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 269877761, Sender [1:797:2317], Recipient [1:754:2283]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-05-29T15:21:54.692510Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 272039936, Sender [1:626:2214], Recipient [1:754:2283]: NKikimr::NNodeBroker::TEvNodeBroker::TEvListNodes { } 2025-05-29T15:21:54.692514Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:246: StateWork, processing event TEvNodeBroker::TEvListNodes 2025-05-29T15:21:54.692523Z node 1 :NODE_BROKER TRACE: node_broker.cpp:423: Send TEvNodesInfo for epoch #4.7 1970-01-01T03:00:00.025000Z - 1970-01-01T04:00:00.025000Z - 1970-01-01T05:00:00.025000Z 2025-05-29T15:21:54.692593Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 269877761, Sender [1:799:2319], Recipient [1:754:2283]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-05-29T15:21:54.692610Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 272039936, Sender [1:626:2214], Recipient [1:754:2283]: NKikimr::NNodeBroker::TEvNodeBroker::TEvListNodes { } 2025-05-29T15:21:54.692614Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:246: StateWork, processing event TEvNodeBroker::TEvListNodes 2025-05-29T15:21:54.692619Z node 1 :NODE_BROKER TRACE: node_broker.cpp:423: Send TEvNodesInfo for epoch #4.7 1970-01-01T03:00:00.025000Z - 1970-01-01T04:00:00.025000Z - 1970-01-01T05:00:00.025000Z 2025-05-29T15:21:54.692677Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 269877761, Sender [1:801:2321], Recipient [1:754:2283]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-05-29T15:21:54.692694Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 272039936, Sender [1:626:2214], Recipient [1:754:2283]: NKikimr::NNodeBroker::TEvNodeBroker::TEvListNodes { } 2025-05-29T15:21:54.692699Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:246: StateWork, processing event TEvNodeBroker::TEvListNodes 2025-05-29T15:21:54.692706Z node 1 :NODE_BROKER TRACE: node_broker.cpp:423: Send TEvNodesInfo for epoch #4.7 1970-01-01T03:00:00.025000Z - 1970-01-01T04:00:00.025000Z - 1970-01-01T05:00:00.025000Z 2025-05-29T15:21:54.692758Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 269877761, Sender [1:803:2323], Recipient [1:754:2283]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-05-29T15:21:54.692782Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 272039936, Sender [1:626:2214], Recipient [1:754:2283]: NKikimr::NNodeBroker::TEvNodeBroker::TEvListNodes { CachedVersion: 6 } 2025-05-29T15:21:54.692786Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:246: StateWork, processing event TEvNodeBroker::TEvListNodes 2025-05-29T15:21:54.692792Z node 1 :NODE_BROKER TRACE: node_broker.cpp:423: Send TEvNodesInfo for epoch #4.7 1970-01-01T03:00:00.025000Z - 1970-01-01T04:00:00.025000Z - 1970-01-01T05:00:00.025000Z 2025-05-29T15:21:54.692858Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 269877761, Sender [1:805:2325], Recipient [1:754:2283]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-05-29T15:21:54.692879Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 272039950, Sender [1:626:2214], Recipient [1:754:2283]: NKikimr::NNodeBroker::TEvNodeBroker::TEvSubscribeNodesRequest { CachedVersion: 6 SeqNo: 2 } 2025-05-29T15:21:54.692885Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:254: StateWork, processing event TEvNodeBroker::TEvSubscribeNodesRequest 2025-05-29T15:21:54.692893Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:747: New subscriber [1:626:2214], seqNo: 2, version: 6, server pipe id: [1:805:2325] 2025-05-29T15:21:54.692905Z node 1 :NODE_BROKER TRACE: node_broker.cpp:730: Send TEvUpdateNodes v6 -> v7 to [1:626:2214] 2025-05-29T15:21:54.692971Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 269877764, Sender [1:805:2325], Recipient [1:754:2283]: NKikimr::TEvTabletPipe::TEvServerDisconnected 2025-05-29T15:21:54.692977Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:768: Unsubscribed [1:626:2214], seqNo: 2, server pipe id: [1:805:2325] 2025-05-29T15:21:54.693002Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 269877761, Sender [1:807:2327], Recipient [1:754:2283]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-05-29T15:21:54.693022Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 272039937, Sender [1:626:2214], Recipient [1:754:2283]: NKikimr::NNodeBroker::TEvNodeBroker::TEvResolveNode { NodeId: 1024 } 2025-05-29T15:21:54.693027Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:247: StateWork, processing event TEvNodeBroker::TEvResolveNode 2025-05-29T15:21:54.693063Z node 1 :NODE_BROKER TRACE: node_broker.cpp:1478: Send TEvResolvedNode: NKikimr::NNodeBroker::TEvNodeBroker::TEvResolvedNode { Status { Code: OK } Node { NodeId: 1024 Host: "host2" Port: 1001 ResolveHost: "host2.yandex.net" Address: "1.2.3.4" Location { DataCenter: "1" Module: "2" Rack: "3" Unit: "4" } Expire: 18000025000 Name: "slot-0" } } ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/yql/unittest >> KqpYql::TestUuidDefaultColumn [FAIL] Test command err: Trying to start YDB, gRPC: 8284, MsgBus: 26984 2025-05-29T15:21:51.505002Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509888234333723091:2071];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:21:51.505030Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/002449/r3tmp/tmp9CUkc7/pdisk_1.dat 2025-05-29T15:21:51.573400Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 8284, node 1 2025-05-29T15:21:51.593463Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:21:51.593480Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-05-29T15:21:51.593482Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-05-29T15:21:51.593524Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-05-29T15:21:51.605178Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:21:51.605208Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:21:51.606429Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:26984 TClient is connected to server localhost:26984 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-29T15:21:51.659128Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:21:51.666902Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-05-29T15:21:51.913995Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509888234333723703:2328], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:21:51.914031Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:21:51.969341Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-05-29T15:21:52.038984Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509888238628691103:2337], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:21:52.039012Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:21:52.039101Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509888238628691108:2340], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:21:52.039931Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480 2025-05-29T15:21:52.042621Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976715659, at schemeshard: 72057594046644480 2025-05-29T15:21:52.042702Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7509888238628691110:2341], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2025-05-29T15:21:52.119970Z node 1 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [1:7509888238628691161:2384] txid# 281474976715660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-29T15:21:52.147401Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [1:7509888238628691170:2345], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:21:52.148506Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=1&id=NzU1Y2I3ZjEtYTE5ZDY4N2ItYTY4YTI3NjQtZTYxNmMyZTQ=, ActorId: [1:7509888234333723685:2326], ActorState: ExecuteState, TraceId: 01jwea5an60j9yhm8qb5ef4dfj, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: assertion failed at ydb/core/kqp/ut/yql/kqp_yql_ut.cpp:912, virtual void NKikimr::NKqp::NTestSuiteKqpYql::TTestCaseTestUuidDefaultColumn::Execute_(NUnitTest::TTestContext &): (result.IsSuccess())
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 0. /-S/util/system/backtrace.cpp:284: ?? @ 0x15E305AB 1. /tmp//-S/library/cpp/testing/unittest/registar.cpp:46: RaiseError @ 0x15FE8418 2. /tmp//-S/ydb/core/kqp/ut/yql/kqp_yql_ut.cpp:912: Execute_ @ 0x15D264C7 3. /tmp//-S/ydb/core/kqp/ut/yql/kqp_yql_ut.cpp:14: operator() @ 0x15D2FE86 4. /tmp//-S/library/cpp/testing/unittest/registar.cpp:373: Run @ 0x15FEA2CD 5. /tmp//-S/ydb/core/kqp/ut/yql/kqp_yql_ut.cpp:14: Execute @ 0x15D2F844 6. /tmp//-S/library/cpp/testing/unittest/registar.cpp:494: Execute @ 0x15FEAA42 7. /tmp//-S/library/cpp/testing/unittest/utmain.cpp:872: RunMain @ 0x15FFC5EC 8. ??:0: ?? @ 0x7FC127D6FD8F 9. ??:0: ?? @ 0x7FC127D6FE3F 10. ??:0: ?? @ 0x14CFE028 >> TPQTest::TestReadRuleVersions [GOOD] >> TPQTabletTests::Partition_Send_Predicate_With_False [GOOD] >> TPQTest::TestSeveralOwners >> TColumnShardTestReadWrite::CompactionInGranule_PKInt64 [GOOD] >> TPartitionTests::CorrectRange_Commit [GOOD] >> TPartitionTests::WriteSubDomainOutOfSpace_IgnoreQuotaDeadline [GOOD] >> TPQTabletTests::ProposeTx_Missing_Operations >> GroupWriteTest::WithRead >> TNodeBrokerTest::NodeNameExpiration [GOOD] >> KqpScripting::ScriptExplain >> TPartitionTests::CorrectRange_Multiple_Consumers >> BasicUsage::TWriteSession_WriteAndReadAndCommitRandomMessages [FAIL] >> BasicUsage::TWriteSession_WriteAndReadAndCommitRandomMessagesNoClusterDiscovery >> KqpScripting::StreamOperationTimeout >> TPQTabletTests::ProposeTx_Missing_Operations [GOOD] >> KqpScripting::ScanQueryDisable ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/mind/ut/unittest >> TNodeBrokerTest::ListNodesEpochDeltasPersistance [GOOD] Test command err: 2025-05-29T15:21:53.649806Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 2 Deadline: 18446744073709.551615s } 2025-05-29T15:21:53.649875Z node 3 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 3 Deadline: 18446744073709.551615s } 2025-05-29T15:21:53.649911Z node 4 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 4 Deadline: 18446744073709.551615s } 2025-05-29T15:21:53.649960Z node 5 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 5 Deadline: 18446744073709.551615s } 2025-05-29T15:21:53.649997Z node 6 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 6 Deadline: 18446744073709.551615s } 2025-05-29T15:21:53.650026Z node 7 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 7 Deadline: 18446744073709.551615s } 2025-05-29T15:21:53.658629Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:21:53.658916Z node 3 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:21:53.658999Z node 4 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:21:53.659050Z node 5 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:21:53.659111Z node 6 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:21:53.659163Z node 7 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:21:53.659298Z node 8 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 8 Deadline: 18446744073709.551615s } 2025-05-29T15:21:53.659340Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-05-29T15:21:53.659560Z node 3 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:21:53.659594Z node 4 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:21:53.659624Z node 5 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:21:53.659644Z node 6 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:21:53.659670Z node 7 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:21:53.659694Z node 8 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:21:53.659742Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:21:53.685203Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:21:53.685477Z node 8 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:21:53.685518Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:21:53.687065Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:21:53.687104Z node 3 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:21:53.687156Z node 4 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:21:53.687191Z node 5 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:21:53.687243Z node 6 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:21:53.687272Z node 7 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:21:53.687485Z node 3 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-05-29T15:21:53.687555Z node 4 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-05-29T15:21:53.687590Z node 5 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-05-29T15:21:53.687621Z node 6 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-05-29T15:21:53.687651Z node 7 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-05-29T15:21:53.687685Z node 8 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:21:53.687711Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:21:53.687749Z node 8 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-05-29T15:21:53.687871Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-05-29T15:21:53.688994Z node 3 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 4 Deadline: 18446744073709.551615s } 2025-05-29T15:21:53.689015Z node 4 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 7 Deadline: 18446744073709.551615s } 2025-05-29T15:21:53.689025Z node 5 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 2 Deadline: 18446744073709.551615s } 2025-05-29T15:21:53.689036Z node 6 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 3 Deadline: 18446744073709.551615s } 2025-05-29T15:21:53.689048Z node 7 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 6 Deadline: 18446744073709.551615s } 2025-05-29T15:21:53.689058Z node 8 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 6 Deadline: 18446744073709.551615s } 2025-05-29T15:21:53.689091Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 5 Deadline: 18446744073709.551615s } 2025-05-29T15:21:53.689137Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 4 Deadline: 18446744073709.551615s } 2025-05-29T15:21:53.694309Z node 4 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 2 Deadline: 18446744073709.551615s } 2025-05-29T15:21:53.694614Z node 6 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 7 Deadline: 18446744073709.551615s } 2025-05-29T15:21:53.694653Z node 7 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 4 Deadline: 18446744073709.551615s } 2025-05-29T15:21:53.694704Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 5 Deadline: 18446744073709.551615s } 2025-05-29T15:21:53.694730Z node 3 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 6 Deadline: 18446744073709.551615s } 2025-05-29T15:21:53.694765Z node 4 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 3 Deadline: 18446744073709.551615s } 2025-05-29T15:21:53.694809Z node 6 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 8 Deadline: 18446744073709.551615s } 2025-05-29T15:21:53.696091Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 4 Deadline: 18446744073709.551615s } 2025-05-29T15:21:53.696887Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 6 Deadline: 18446744073709.551615s } 2025-05-29T15:21:53.697013Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 7 Deadline: 18446744073709.551615s } 2025-05-29T15:21:53.697370Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 2 Deadline: 18446744073709.551615s } 2025-05-29T15:21:53.697538Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 3 Deadline: 18446744073709.551615s } 2025-05-29T15:21:53.697707Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 8 Deadline: 18446744073709.551615s } 2025-05-29T15:21:53.698371Z node 3 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 5 Deadline: 18446744073709.551615s } 2025-05-29T15:21:53.698553Z node 7 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 3 Deadline: 18446744073709.551615s } 2025-05-29T15:21:53.698721Z node 4 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 6 Deadline: 18446744073709.551615s } 2025-05-29T15:21:53.699110Z node 5 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 3 Deadline: 18446744073709.551615s } 2025-05-29T15:21:53.699361Z node 6 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 4 Deadline: 18446744073709.551615s } 2025-05-29T15:21:53.699504Z node 3 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 7 Deadline: 18446744073709.551615s } 2025-05-29T15:21:53.701883Z node 6 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 5 Deadline: 18446744073709.551615s } 2025-05-29T15:21:53.701957Z node 7 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 2 Deadline: 18446744073709.551615s } 2025-05-29T15:21:53.702018Z node 3 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 2 Deadline: 18446744073709.551615s } 2025-05-29T15:21:53.702473Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 7 Deadline: 18446744073709.551615s } 2025-05-29T15:21:53.702550Z node 5 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 6 Deadline: 18446744073709.551615s } 2025-05-29T15:21:53.702653Z node 4 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 8 Deadline: 18446744073709.551615s } 2025-05-29T15:21:53.702687Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 3 Deadline: 18446744073709.551615s } 2025-05-29T15:21:53.703130Z node 8 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 4 Deadline: 18446744073709.551615s } 2025-05-29T15:21:53.733872Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7309: Cannot subscribe to console configs 2025-05-29T15:21:53.733900Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Ta ... broker_impl.h:242: StateWork, received event# 269877761, Sender [1:1001:2498], Recipient [1:936:2442]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-05-29T15:21:55.109511Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 272039950, Sender [1:638:2214], Recipient [1:936:2442]: NKikimr::NNodeBroker::TEvNodeBroker::TEvSubscribeNodesRequest { CachedVersion: 10 SeqNo: 27 } 2025-05-29T15:21:55.109517Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:254: StateWork, processing event TEvNodeBroker::TEvSubscribeNodesRequest 2025-05-29T15:21:55.109525Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:747: New subscriber [1:638:2214], seqNo: 27, version: 10, server pipe id: [1:1001:2498] 2025-05-29T15:21:55.109533Z node 1 :NODE_BROKER TRACE: node_broker.cpp:730: Send TEvUpdateNodes v10 -> v10 to [1:638:2214] 2025-05-29T15:21:55.109622Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 269877764, Sender [1:1001:2498], Recipient [1:936:2442]: NKikimr::TEvTabletPipe::TEvServerDisconnected 2025-05-29T15:21:55.109630Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:768: Unsubscribed [1:638:2214], seqNo: 27, server pipe id: [1:1001:2498] 2025-05-29T15:21:55.109667Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 269877761, Sender [1:1003:2500], Recipient [1:936:2442]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-05-29T15:21:55.109682Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 272039950, Sender [1:638:2214], Recipient [1:936:2442]: NKikimr::NNodeBroker::TEvNodeBroker::TEvSubscribeNodesRequest { CachedVersion: 9 SeqNo: 28 } 2025-05-29T15:21:55.109686Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:254: StateWork, processing event TEvNodeBroker::TEvSubscribeNodesRequest 2025-05-29T15:21:55.109693Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:747: New subscriber [1:638:2214], seqNo: 28, version: 9, server pipe id: [1:1003:2500] 2025-05-29T15:21:55.109698Z node 1 :NODE_BROKER TRACE: node_broker.cpp:730: Send TEvUpdateNodes v9 -> v10 to [1:638:2214] 2025-05-29T15:21:55.109754Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 269877764, Sender [1:1003:2500], Recipient [1:936:2442]: NKikimr::TEvTabletPipe::TEvServerDisconnected 2025-05-29T15:21:55.109761Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:768: Unsubscribed [1:638:2214], seqNo: 28, server pipe id: [1:1003:2500] 2025-05-29T15:21:55.109787Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 269877761, Sender [1:1005:2502], Recipient [1:936:2442]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-05-29T15:21:55.109809Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 272039950, Sender [1:638:2214], Recipient [1:936:2442]: NKikimr::NNodeBroker::TEvNodeBroker::TEvSubscribeNodesRequest { CachedVersion: 8 SeqNo: 29 } 2025-05-29T15:21:55.109813Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:254: StateWork, processing event TEvNodeBroker::TEvSubscribeNodesRequest 2025-05-29T15:21:55.109818Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:747: New subscriber [1:638:2214], seqNo: 29, version: 8, server pipe id: [1:1005:2502] 2025-05-29T15:21:55.109822Z node 1 :NODE_BROKER TRACE: node_broker.cpp:730: Send TEvUpdateNodes v8 -> v10 to [1:638:2214] 2025-05-29T15:21:55.109888Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 269877764, Sender [1:1005:2502], Recipient [1:936:2442]: NKikimr::TEvTabletPipe::TEvServerDisconnected 2025-05-29T15:21:55.109893Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:768: Unsubscribed [1:638:2214], seqNo: 29, server pipe id: [1:1005:2502] 2025-05-29T15:21:55.109921Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 269877761, Sender [1:1007:2504], Recipient [1:936:2442]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-05-29T15:21:55.109938Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 272039950, Sender [1:638:2214], Recipient [1:936:2442]: NKikimr::NNodeBroker::TEvNodeBroker::TEvSubscribeNodesRequest { CachedVersion: 7 SeqNo: 30 } 2025-05-29T15:21:55.109942Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:254: StateWork, processing event TEvNodeBroker::TEvSubscribeNodesRequest 2025-05-29T15:21:55.109947Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:747: New subscriber [1:638:2214], seqNo: 30, version: 7, server pipe id: [1:1007:2504] 2025-05-29T15:21:55.109952Z node 1 :NODE_BROKER TRACE: node_broker.cpp:730: Send TEvUpdateNodes v7 -> v10 to [1:638:2214] 2025-05-29T15:21:55.110015Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 269877764, Sender [1:1007:2504], Recipient [1:936:2442]: NKikimr::TEvTabletPipe::TEvServerDisconnected 2025-05-29T15:21:55.110020Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:768: Unsubscribed [1:638:2214], seqNo: 30, server pipe id: [1:1007:2504] 2025-05-29T15:21:55.110039Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 269877761, Sender [1:1009:2506], Recipient [1:936:2442]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-05-29T15:21:55.110054Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 272039950, Sender [1:638:2214], Recipient [1:936:2442]: NKikimr::NNodeBroker::TEvNodeBroker::TEvSubscribeNodesRequest { CachedVersion: 6 SeqNo: 31 } 2025-05-29T15:21:55.110058Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:254: StateWork, processing event TEvNodeBroker::TEvSubscribeNodesRequest 2025-05-29T15:21:55.110062Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:747: New subscriber [1:638:2214], seqNo: 31, version: 6, server pipe id: [1:1009:2506] 2025-05-29T15:21:55.110067Z node 1 :NODE_BROKER TRACE: node_broker.cpp:730: Send TEvUpdateNodes v6 -> v10 to [1:638:2214] 2025-05-29T15:21:55.110133Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 269877764, Sender [1:1009:2506], Recipient [1:936:2442]: NKikimr::TEvTabletPipe::TEvServerDisconnected 2025-05-29T15:21:55.110139Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:768: Unsubscribed [1:638:2214], seqNo: 31, server pipe id: [1:1009:2506] 2025-05-29T15:21:55.110167Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 269877761, Sender [1:1011:2508], Recipient [1:936:2442]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-05-29T15:21:55.110181Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 272039950, Sender [1:638:2214], Recipient [1:936:2442]: NKikimr::NNodeBroker::TEvNodeBroker::TEvSubscribeNodesRequest { CachedVersion: 5 SeqNo: 32 } 2025-05-29T15:21:55.110185Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:254: StateWork, processing event TEvNodeBroker::TEvSubscribeNodesRequest 2025-05-29T15:21:55.110189Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:747: New subscriber [1:638:2214], seqNo: 32, version: 5, server pipe id: [1:1011:2508] 2025-05-29T15:21:55.110193Z node 1 :NODE_BROKER TRACE: node_broker.cpp:730: Send TEvUpdateNodes v5 -> v10 to [1:638:2214] 2025-05-29T15:21:55.110256Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 269877764, Sender [1:1011:2508], Recipient [1:936:2442]: NKikimr::TEvTabletPipe::TEvServerDisconnected 2025-05-29T15:21:55.110262Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:768: Unsubscribed [1:638:2214], seqNo: 32, server pipe id: [1:1011:2508] 2025-05-29T15:21:55.110285Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 269877761, Sender [1:1013:2510], Recipient [1:936:2442]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-05-29T15:21:55.110305Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 272039950, Sender [1:638:2214], Recipient [1:936:2442]: NKikimr::NNodeBroker::TEvNodeBroker::TEvSubscribeNodesRequest { CachedVersion: 4 SeqNo: 33 } 2025-05-29T15:21:55.110309Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:254: StateWork, processing event TEvNodeBroker::TEvSubscribeNodesRequest 2025-05-29T15:21:55.110313Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:747: New subscriber [1:638:2214], seqNo: 33, version: 4, server pipe id: [1:1013:2510] 2025-05-29T15:21:55.110318Z node 1 :NODE_BROKER TRACE: node_broker.cpp:730: Send TEvUpdateNodes v4 -> v10 to [1:638:2214] 2025-05-29T15:21:55.110382Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 269877764, Sender [1:1013:2510], Recipient [1:936:2442]: NKikimr::TEvTabletPipe::TEvServerDisconnected 2025-05-29T15:21:55.110386Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:768: Unsubscribed [1:638:2214], seqNo: 33, server pipe id: [1:1013:2510] 2025-05-29T15:21:55.110415Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 269877761, Sender [1:1015:2512], Recipient [1:936:2442]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-05-29T15:21:55.110432Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 272039950, Sender [1:638:2214], Recipient [1:936:2442]: NKikimr::NNodeBroker::TEvNodeBroker::TEvSubscribeNodesRequest { CachedVersion: 3 SeqNo: 34 } 2025-05-29T15:21:55.110436Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:254: StateWork, processing event TEvNodeBroker::TEvSubscribeNodesRequest 2025-05-29T15:21:55.110440Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:747: New subscriber [1:638:2214], seqNo: 34, version: 3, server pipe id: [1:1015:2512] 2025-05-29T15:21:55.110444Z node 1 :NODE_BROKER TRACE: node_broker.cpp:730: Send TEvUpdateNodes v3 -> v10 to [1:638:2214] 2025-05-29T15:21:55.110506Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 269877764, Sender [1:1015:2512], Recipient [1:936:2442]: NKikimr::TEvTabletPipe::TEvServerDisconnected 2025-05-29T15:21:55.110511Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:768: Unsubscribed [1:638:2214], seqNo: 34, server pipe id: [1:1015:2512] 2025-05-29T15:21:55.110570Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 269877761, Sender [1:1017:2514], Recipient [1:936:2442]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-05-29T15:21:55.110609Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 272039950, Sender [1:638:2214], Recipient [1:936:2442]: NKikimr::NNodeBroker::TEvNodeBroker::TEvSubscribeNodesRequest { CachedVersion: 2 SeqNo: 35 } 2025-05-29T15:21:55.110613Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:254: StateWork, processing event TEvNodeBroker::TEvSubscribeNodesRequest 2025-05-29T15:21:55.110618Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:747: New subscriber [1:638:2214], seqNo: 35, version: 2, server pipe id: [1:1017:2514] 2025-05-29T15:21:55.110623Z node 1 :NODE_BROKER TRACE: node_broker.cpp:730: Send TEvUpdateNodes v2 -> v10 to [1:638:2214] 2025-05-29T15:21:55.110686Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 269877764, Sender [1:1017:2514], Recipient [1:936:2442]: NKikimr::TEvTabletPipe::TEvServerDisconnected 2025-05-29T15:21:55.110691Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:768: Unsubscribed [1:638:2214], seqNo: 35, server pipe id: [1:1017:2514] 2025-05-29T15:21:55.110719Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 269877761, Sender [1:1019:2516], Recipient [1:936:2442]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-05-29T15:21:55.110735Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 272039950, Sender [1:638:2214], Recipient [1:936:2442]: NKikimr::NNodeBroker::TEvNodeBroker::TEvSubscribeNodesRequest { CachedVersion: 1 SeqNo: 36 } 2025-05-29T15:21:55.111457Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:254: StateWork, processing event TEvNodeBroker::TEvSubscribeNodesRequest 2025-05-29T15:21:55.111469Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:747: New subscriber [1:638:2214], seqNo: 36, version: 1, server pipe id: [1:1019:2516] 2025-05-29T15:21:55.111480Z node 1 :NODE_BROKER TRACE: node_broker.cpp:730: Send TEvUpdateNodes v1 -> v10 to [1:638:2214] >> TNodeBrokerTest::DoNotReuseDynnodeIdsBelowMinDynamicNodeId [GOOD] >> TNodeBrokerTest::ConfigPipelining >> TQuotaTracker::TestSmallMessages [GOOD] >> TQuotaTracker::TestBigMessages [GOOD] >> TSourceIdTests::ExpensiveCleanup >> KqpPragma::MatchRecognizeWithoutTimeOrderRecoverer >> TNodeBrokerTest::NodesMigration1001Nodes [GOOD] >> TPQTabletTests::ProposeTx_Unknown_Partition_1 >> TPQTabletTests::ProposeTx_Unknown_Partition_1 [GOOD] >> TPartitionTests::SetOffset >> KqpYql::EvaluateExprYsonAndType |58.9%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/fq/libs/common/ut/ydb-core-fq-libs-common-ut |58.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/fq/libs/common/ut/ydb-core-fq-libs-common-ut |58.9%| [TA] {RESULT} $(B)/ydb/core/kqp/ut/arrow/test-results/unittest/{meta.json ... results_accumulator.log} |58.9%| [LD] {RESULT} $(B)/ydb/core/fq/libs/common/ut/ydb-core-fq-libs-common-ut ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/mind/ut/unittest >> TNodeBrokerTest::NodeNameExpiration [GOOD] Test command err: 2025-05-29T15:21:53.533338Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 2 Deadline: 18446744073709.551615s } 2025-05-29T15:21:53.533381Z node 3 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 3 Deadline: 18446744073709.551615s } 2025-05-29T15:21:53.533403Z node 4 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 4 Deadline: 18446744073709.551615s } 2025-05-29T15:21:53.533433Z node 5 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 5 Deadline: 18446744073709.551615s } 2025-05-29T15:21:53.533459Z node 6 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 6 Deadline: 18446744073709.551615s } 2025-05-29T15:21:53.533477Z node 7 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 7 Deadline: 18446744073709.551615s } 2025-05-29T15:21:53.540147Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:21:53.540275Z node 3 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:21:53.540330Z node 4 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:21:53.540372Z node 5 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:21:53.540411Z node 6 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:21:53.540442Z node 7 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:21:53.540499Z node 8 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 8 Deadline: 18446744073709.551615s } 2025-05-29T15:21:53.540525Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-05-29T15:21:53.540674Z node 3 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:21:53.540693Z node 4 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:21:53.540706Z node 5 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:21:53.540721Z node 6 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:21:53.540736Z node 7 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:21:53.540750Z node 8 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:21:53.540788Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:21:53.545384Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:21:53.545443Z node 8 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:21:53.545475Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:21:53.546354Z node 5 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:21:53.546391Z node 6 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:21:53.546425Z node 7 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:21:53.546505Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:21:53.546521Z node 3 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:21:53.546537Z node 4 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:21:53.546554Z node 5 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-05-29T15:21:53.546595Z node 6 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-05-29T15:21:53.546613Z node 7 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-05-29T15:21:53.546628Z node 8 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:21:53.546644Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:21:53.546804Z node 3 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-05-29T15:21:53.546834Z node 4 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-05-29T15:21:53.546957Z node 8 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-05-29T15:21:53.547065Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-05-29T15:21:53.547119Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 3 Deadline: 18446744073709.551615s } 2025-05-29T15:21:53.547885Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 7 Deadline: 18446744073709.551615s } 2025-05-29T15:21:53.547917Z node 4 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 7 Deadline: 18446744073709.551615s } 2025-05-29T15:21:53.547929Z node 6 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 3 Deadline: 18446744073709.551615s } 2025-05-29T15:21:53.547941Z node 7 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 2 Deadline: 18446744073709.551615s } 2025-05-29T15:21:53.547952Z node 8 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 2 Deadline: 18446744073709.551615s } 2025-05-29T15:21:53.552367Z node 7 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 4 Deadline: 18446744073709.551615s } 2025-05-29T15:21:53.552490Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 8 Deadline: 18446744073709.551615s } 2025-05-29T15:21:53.552533Z node 3 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 6 Deadline: 18446744073709.551615s } 2025-05-29T15:21:53.553651Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 7 Deadline: 18446744073709.551615s } 2025-05-29T15:21:53.554074Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 2 Deadline: 18446744073709.551615s } 2025-05-29T15:21:53.554118Z node 3 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 4 Deadline: 18446744073709.551615s } 2025-05-29T15:21:53.554216Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 5 Deadline: 18446744073709.551615s } 2025-05-29T15:21:53.554286Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 4 Deadline: 18446744073709.551615s } 2025-05-29T15:21:53.554414Z node 4 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 3 Deadline: 18446744073709.551615s } 2025-05-29T15:21:53.554535Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 6 Deadline: 18446744073709.551615s } 2025-05-29T15:21:53.554627Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 8 Deadline: 18446744073709.551615s } 2025-05-29T15:21:53.556419Z node 3 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 7 Deadline: 18446744073709.551615s } 2025-05-29T15:21:53.556455Z node 7 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 6 Deadline: 18446744073709.551615s } 2025-05-29T15:21:53.556621Z node 7 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 3 Deadline: 18446744073709.551615s } 2025-05-29T15:21:53.556746Z node 6 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 7 Deadline: 18446744073709.551615s } 2025-05-29T15:21:53.561082Z node 7 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 5 Deadline: 18446744073709.551615s } 2025-05-29T15:21:53.561308Z node 5 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 7 Deadline: 18446744073709.551615s } 2025-05-29T15:21:53.561739Z node 3 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 2 Deadline: 18446744073709.551615s } 2025-05-29T15:21:53.561968Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 3 Deadline: 18446744073709.551615s } 2025-05-29T15:21:53.596776Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7309: Cannot subscribe to console configs 2025-05-29T15:21:53.596812Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded ... waiting for nameservers are connected 2025-05-29T15:21:53.606800Z node 1 :NODE_BROKER DEBUG: node_broker_impl.h:236: StateInit event type: 10060000 event: NKikimr::TEvTablet::TEvBoot 2025-05-29T15:21:53.607290Z node 1 :NODE_BROKER DEBUG: node_broker_impl.h:236: StateInit event type: 10060001 event: NKikimr::TEvTablet::TEvRestored 2025-05-29T15:21:53.607368Z node 1 :NODE_BROKER DEBUG: node_broker__init_scheme.cpp:20: TTxInitScheme Execute 2025-05-29T15:21:53.607626Z node 1 :NODE_BROKER DEBUG: node_broker_impl.h:236: StateInit event type: 1006000c event: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-05-29T15:21:53.608501Z node 1 :NODE_BROKER DEBUG: node_broker__init_scheme.cpp:29: TTxInitScheme Complete 2025-05-29T15:21:53.608532Z node 1 :NODE_BROKER DEBUG: node_broker__load_state.cpp:19: TTxLoadState Execute 2025-05-29T15:21:53.608599Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:961: [DB] Using default config. 2025-05-29T15:21:53.608616Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:998: [DB] Starting the first epoch: #1.1 1970-01-01T00:00:00.025000Z - 1970-01-01T01:00:00.025000Z - 1970-01-01T02:00:00.025000Z 2025-05-29T15:21:53.608621Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:1024: [DB] Loaded the first approximate epoch start: #1.1 2025-05-29T15:21:53.608639Z node 1 :NODE_BROKER DEBUG: node_broker__load_state.cpp:27: TTxLoadState Complete 2025-05-29T15:21:53.608662Z node 1 :NODE_BROKER DEBUG: node_broker__migrate_state.cpp:84: TTxMigrateState Execute 2025-05-29T15:21:53.608667Z node 1 :NODE_BROKER DEBUG: node_broker__migrate_state.cpp:52: TTxMigrateState ProcessMigrationBatch UpdateNodes left 0, NewVersionUpdateNodes left 0 2025-05-29T15:21:53.608672Z node 1 :NODE_BROKER DEBU ... node #1024.v9 to update nodes log 2025-05-29T15:21:55.534438Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:659: Add node #1026.v10 to update nodes log 2025-05-29T15:21:55.534443Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:659: Add node #1027.v11 to update nodes log 2025-05-29T15:21:55.534448Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:659: Add node #1025.v12 to update nodes log 2025-05-29T15:21:55.534459Z node 1 :NODE_BROKER TRACE: node_broker.cpp:423: Send TEvNodesInfo for epoch #4.12 1970-01-01T03:00:00.025000Z - 1970-01-01T04:00:00.025000Z - 1970-01-01T05:00:00.025000Z 2025-05-29T15:21:55.534469Z node 1 :NODE_BROKER TRACE: node_broker.cpp:423: Send TEvNodesInfo for epoch #4.12 1970-01-01T03:00:00.025000Z - 1970-01-01T04:00:00.025000Z - 1970-01-01T05:00:00.025000Z 2025-05-29T15:21:55.534476Z node 1 :NODE_BROKER TRACE: node_broker.cpp:423: Send TEvNodesInfo for epoch #4.12 1970-01-01T03:00:00.025000Z - 1970-01-01T04:00:00.025000Z - 1970-01-01T05:00:00.025000Z 2025-05-29T15:21:55.534484Z node 1 :NODE_BROKER TRACE: node_broker.cpp:423: Send TEvNodesInfo for epoch #4.12 1970-01-01T03:00:00.025000Z - 1970-01-01T04:00:00.025000Z - 1970-01-01T05:00:00.025000Z 2025-05-29T15:21:55.534490Z node 1 :NODE_BROKER TRACE: node_broker.cpp:423: Send TEvNodesInfo for epoch #4.12 1970-01-01T03:00:00.025000Z - 1970-01-01T04:00:00.025000Z - 1970-01-01T05:00:00.025000Z 2025-05-29T15:21:55.534497Z node 1 :NODE_BROKER TRACE: node_broker.cpp:423: Send TEvNodesInfo for epoch #4.12 1970-01-01T03:00:00.025000Z - 1970-01-01T04:00:00.025000Z - 1970-01-01T05:00:00.025000Z 2025-05-29T15:21:55.534504Z node 1 :NODE_BROKER TRACE: node_broker.cpp:423: Send TEvNodesInfo for epoch #4.12 1970-01-01T03:00:00.025000Z - 1970-01-01T04:00:00.025000Z - 1970-01-01T05:00:00.025000Z 2025-05-29T15:21:55.534511Z node 1 :NODE_BROKER TRACE: node_broker.cpp:423: Send TEvNodesInfo for epoch #4.12 1970-01-01T03:00:00.025000Z - 1970-01-01T04:00:00.025000Z - 1970-01-01T05:00:00.025000Z 2025-05-29T15:21:55.559074Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 269877761, Sender [1:793:2323], Recipient [1:560:2184]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-05-29T15:21:55.559125Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 272039936, Sender [1:660:2244], Recipient [1:560:2184]: NKikimr::NNodeBroker::TEvNodeBroker::TEvListNodes { } 2025-05-29T15:21:55.559133Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:246: StateWork, processing event TEvNodeBroker::TEvListNodes 2025-05-29T15:21:55.559150Z node 1 :NODE_BROKER TRACE: node_broker.cpp:423: Send TEvNodesInfo for epoch #4.12 1970-01-01T03:00:00.025000Z - 1970-01-01T04:00:00.025000Z - 1970-01-01T05:00:00.025000Z 2025-05-29T15:21:55.559235Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 269877761, Sender [1:795:2325], Recipient [1:560:2184]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-05-29T15:21:55.559277Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 272039938, Sender [1:660:2244], Recipient [1:560:2184]: NKikimr::NNodeBroker::TEvNodeBroker::TEvRegistrationRequest { Host: "host5" Port: 19001 ResolveHost: "host5" Address: "" Location { } FixedNodeId: false Path: "/dc-1/my-database" } 2025-05-29T15:21:55.559284Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:248: StateWork, processing event TEvNodeBroker::TEvRegistrationRequest 2025-05-29T15:21:55.559294Z node 1 :NODE_BROKER TRACE: node_broker.cpp:1487: Handle TEvNodeBroker::TEvRegistrationRequest: request# Host: "host5" Port: 19001 ResolveHost: "host5" Address: "" Location { } FixedNodeId: false Path: "/dc-1/my-database" 2025-05-29T15:21:55.559358Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2702: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [1:23:2070], request# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/my-database TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-05-29T15:21:55.559377Z node 1 :TX_PROXY_SCHEME_CACHE TRACE: cache.cpp:2322: Create subscriber: self# [1:23:2070], path# /dc-1/my-database, domainOwnerId# 72057594046678944 2025-05-29T15:21:55.559706Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2589: HandleNotify: self# [1:23:2070], notify# NKikimr::TSchemeBoardEvents::TEvNotifyUpdate { Path: /dc-1/my-database PathId: [OwnerId: 72057594046678944, LocalPathId: 2] DescribeSchemeResult: Status: StatusSuccess Path: "/dc-1/my-database" PathDescription { Self { Name: "my-database" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeExtSubDomain CreateFinished: true CreateTxId: 101 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 1 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 0 TimeCastBucketsPerMediator: 0 } DomainKey { SchemeShard: 72057594046678944 PathId: 2 } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 2 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { } } } PathId: 2 PathOwnerId: 72057594046678944 } 2025-05-29T15:21:55.559755Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2464: ResolveCacheItem: self# [1:23:2070], notify# NKikimr::TSchemeBoardEvents::TEvNotifyUpdate { Path: /dc-1/my-database PathId: [OwnerId: 72057594046678944, LocalPathId: 2] DescribeSchemeResult: Status: StatusSuccess Path: "/dc-1/my-database" PathDescription { Self { Name: "my-database" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeExtSubDomain CreateFinished: true CreateTxId: 101 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 1 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 0 TimeCastBucketsPerMediator: 0 } DomainKey { SchemeShard: 72057594046678944 PathId: 2 } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 2 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { } } } PathId: 2 PathOwnerId: 72057594046678944 }, by path# { Subscriber: { Subscriber: [1:797:2326] DomainOwnerId: 72057594046678944 Type: 2 SyncCookie: 0 } Filled: 0 Status: undefined Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, by pathId# nullptr 2025-05-29T15:21:55.559794Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:1842: FillEntry for TNavigate: self# [1:23:2070], cacheItem# { Subscriber: { Subscriber: [1:797:2326] DomainOwnerId: 72057594046678944 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusSuccess Kind: 9 TableKind: 0 Created: 1 CreateStep: 5000001 PathId: [OwnerId: 72057594046678944, LocalPathId: 2] DomainId: [OwnerId: 72057594046678944, LocalPathId: 2] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/my-database TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-05-29T15:21:55.559882Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:265: Send result: self# [1:804:2327], recipient# [1:796:2184], result# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/my-database TableId: [72057594046678944:2:0] RequestType: ByPath Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindExtSubdomain DomainInfo { DomainKey: [OwnerId: 72057594046678944, LocalPathId: 2] ResourcesDomainKey: [OwnerId: 72057594046678944, LocalPathId: 2] Params { Version: 1 PlanResolution: 0 TimeCastBucketsPerMediator: 0 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } }] } 2025-05-29T15:21:55.559898Z node 1 :NODE_BROKER TRACE: node_broker.cpp:1532: Handle TEvTxProxySchemeCache::TEvNavigateKeySetResult: response# { Path: dc-1/my-database TableId: [72057594046678944:2:0] RequestType: ByPath Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindExtSubdomain DomainInfo { DomainKey: [OwnerId: 72057594046678944, LocalPathId: 2] ResourcesDomainKey: [OwnerId: 72057594046678944, LocalPathId: 2] Params { Version: 1 PlanResolution: 0 TimeCastBucketsPerMediator: 0 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } } 2025-05-29T15:21:55.559912Z node 1 :NODE_BROKER TRACE: node_broker.cpp:1558: Finished resolving tenant: request# Host: "host5" Port: 19001 ResolveHost: "host5" Address: "" Location { } FixedNodeId: false Path: "/dc-1/my-database": scope id# <72057594046678944:2>: serviced subdomain# 72057594046678944:2 2025-05-29T15:21:55.559931Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 2146435073, Sender [1:796:2184], Recipient [1:560:2184]: NKikimr::NNodeBroker::TNodeBroker::TEvPrivate::TEvResolvedRegistrationRequest 2025-05-29T15:21:55.559938Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:257: StateWork, processing event TEvPrivate::TEvResolvedRegistrationRequest 2025-05-29T15:21:55.559961Z node 1 :NODE_BROKER DEBUG: node_broker__register_node.cpp:77: TTxRegisterNode Execute 2025-05-29T15:21:55.559966Z node 1 :NODE_BROKER DEBUG: node_broker__register_node.cpp:81: Registration request from host5:19001 (not fixed) tenant: /dc-1/my-database 2025-05-29T15:21:55.559993Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:856: [DB] Adding node #1025.v13 host5:19001 to database state=Active resolvehost=host5 address= dc= location= lease=1 expire=Thu, 01 Jan 1970 05:00:00 UTC servicedsubdomain=72057594046678944:2 slotindex=1 authorizedbycertificate=false 2025-05-29T15:21:55.560054Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:264: [Dirty] Register new active node #1025.v13 host5:19001 2025-05-29T15:21:55.560065Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:552: [Dirty] Update current epoch version from 12 to 13 2025-05-29T15:21:55.560070Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:1356: [DB] Update epoch version in database version=13 2025-05-29T15:21:55.573096Z node 1 :NODE_BROKER DEBUG: node_broker__register_node.cpp:186: TTxRegisterNode Complete 2025-05-29T15:21:55.573122Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:264: [Committed] Register new active node #1025.v13 host5:19001 2025-05-29T15:21:55.573136Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:552: [Committed] Update current epoch version from 12 to 13 2025-05-29T15:21:55.573146Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:630: Add node #1025.v13 host5:19001 to epoch cache 2025-05-29T15:21:55.573165Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:659: Add node #1025.v13 to update nodes log 2025-05-29T15:21:55.573215Z node 1 :NODE_BROKER TRACE: node_broker__register_node.cpp:58: TTxRegisterNode reply with: Status { Code: OK } Node { NodeId: 1025 Host: "host5" Port: 19001 ResolveHost: "host5" Address: "" Location { } Expire: 18000025000 Name: "slot-1" } >> TPQTabletTests::ProposeTx_Unknown_WriteId >> TNodeBrokerTest::NodesMigrationExpireRemoved [GOOD] >> TPartitionTests::CorrectRange_Multiple_Consumers [GOOD] >> TSourceIdTests::ExpensiveCleanup [GOOD] >> TNodeBrokerTest::UpdateNodesLog [GOOD] >> TPQTest::TestSeveralOwners [GOOD] >> TPQTest::TestReserveBytes >> TPartitionTests::ConflictingTxIsAborted >> TPQTabletTests::ProposeTx_Unknown_WriteId [GOOD] >> GroupWriteTest::WriteHardRateDispatcher >> TPQTabletTests::ProposeTx_Unknown_Partition_2 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/columnshard/ut_rw/unittest >> TColumnShardTestReadWrite::CompactionInGranule_PKInt64 [GOOD] Test command err: 2025-05-29T15:21:37.760758Z node 1 :TX_COLUMNSHARD INFO: log.cpp:784: tablet_id=9437184;self_id=[1:139:2170];fline=columnshard.cpp:102;event=initialize_shard;step=OnActivateExecutor; 2025-05-29T15:21:37.764498Z node 1 :TX_COLUMNSHARD INFO: log.cpp:784: tablet_id=9437184;self_id=[1:139:2170];fline=columnshard.cpp:120;event=initialize_shard;step=initialize_tiring_finished; 2025-05-29T15:21:37.764585Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Execute at tablet 9437184 2025-05-29T15:21:37.765408Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=9437184;self_id=[1:139:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-05-29T15:21:37.765477Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=9437184;self_id=[1:139:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-05-29T15:21:37.765528Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=9437184;self_id=[1:139:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-05-29T15:21:37.765550Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=9437184;self_id=[1:139:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-05-29T15:21:37.765588Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=9437184;self_id=[1:139:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-05-29T15:21:37.765614Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=9437184;self_id=[1:139:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-05-29T15:21:37.765633Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=9437184;self_id=[1:139:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-05-29T15:21:37.765652Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=9437184;self_id=[1:139:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-05-29T15:21:37.765672Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=9437184;self_id=[1:139:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-05-29T15:21:37.765689Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=9437184;self_id=[1:139:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-05-29T15:21:37.765709Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=9437184;self_id=[1:139:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2025-05-29T15:21:37.765733Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=9437184;self_id=[1:139:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2025-05-29T15:21:37.779847Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Complete at tablet 9437184 2025-05-29T15:21:37.779926Z node 1 :TX_COLUMNSHARD INFO: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:137;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2025-05-29T15:21:37.779940Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-05-29T15:21:37.779977Z node 1 :TX_COLUMNSHARD INFO: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-05-29T15:21:37.780018Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-05-29T15:21:37.780032Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-05-29T15:21:37.780039Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-05-29T15:21:37.780050Z node 1 :TX_COLUMNSHARD INFO: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2025-05-29T15:21:37.780061Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-05-29T15:21:37.780070Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-05-29T15:21:37.780075Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-05-29T15:21:37.780099Z node 1 :TX_COLUMNSHARD INFO: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-05-29T15:21:37.780108Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-05-29T15:21:37.780116Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-05-29T15:21:37.780121Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-05-29T15:21:37.780134Z node 1 :TX_COLUMNSHARD INFO: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-05-29T15:21:37.780142Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-05-29T15:21:37.780153Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-05-29T15:21:37.780158Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=8;type=CleanInsertionDedup; 2025-05-29T15:21:37.780173Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-05-29T15:21:37.780182Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-05-29T15:21:37.780187Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-05-29T15:21:37.780198Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-05-29T15:21:37.780208Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-05-29T15:21:37.780214Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-05-29T15:21:37.780243Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-05-29T15:21:37.780253Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-05-29T15:21:37.780258Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2025-05-29T15:21:37.780283Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-05-29T15:21:37.780293Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-05-29T15:21:37.780298Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2025-05-29T15:21:37.780313Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-05-29T15:21:37.780321Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanDeprecatedSnapshot;id=CleanDeprecatedSnapshot; 2025-05-29T15:21:37.780327Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=17;type=CleanDeprecatedSnapshot; 2025-05-29T15:21:37.780337Z node 1 :TX_COLUMNSHARD CRIT: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_deprecated_snapshot.cpp:30;tasks_for_rewrite=0; 2025-05-29T15:21:37.780347Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanDeprecatedSnapshot;id=17; 2025-05-29T15:21:37.780356Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV0ChunksMeta;id=RestoreV0ChunksMeta; 2025-05-29T15:21:37.780362Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=18;type=RestoreV0ChunksMeta; 2025-05-29T15:21:37.780445Z node 1 :TX_COLUMNSHARD INFO: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=11; 2025-05-29T15:21:37.780458Z node 1 :TX_COLUMNSHARD INFO: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=7; 2025-05-29T15:21:37.780469Z node 1 :TX_COLUMNSHARD INFO: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute ... :TX_COLUMNSHARD INFO: log.cpp:784: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;fline=common_data.cpp:29;EXECUTE:granulesLoadingTime=7204; 2025-05-29T15:21:56.409576Z node 1 :TX_COLUMNSHARD INFO: log.cpp:784: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;fline=common_data.cpp:29;PRECHARGE:finishLoadingTime=2; 2025-05-29T15:21:56.410087Z node 1 :TX_COLUMNSHARD INFO: log.cpp:784: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;fline=common_data.cpp:29;EXECUTE:finishLoadingTime=500; 2025-05-29T15:21:56.410098Z node 1 :TX_COLUMNSHARD INFO: log.cpp:784: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:column_enginesLoadingTime=7778; 2025-05-29T15:21:56.410126Z node 1 :TX_COLUMNSHARD INFO: log.cpp:784: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:insert_tableLoadingTime=19; 2025-05-29T15:21:56.410272Z node 1 :TX_COLUMNSHARD INFO: log.cpp:784: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:composite_init/insert_table;fline=common_data.cpp:29;InsertTableLoadingTime=17; 2025-05-29T15:21:56.410284Z node 1 :TX_COLUMNSHARD INFO: log.cpp:784: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:insert_tableLoadingTime=152; 2025-05-29T15:21:56.410303Z node 1 :TX_COLUMNSHARD INFO: log.cpp:784: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:tx_controllerLoadingTime=12; 2025-05-29T15:21:56.410319Z node 1 :TX_COLUMNSHARD INFO: log.cpp:784: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:tx_controllerLoadingTime=12; 2025-05-29T15:21:56.410384Z node 1 :TX_COLUMNSHARD INFO: log.cpp:784: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:operations_managerLoadingTime=58; 2025-05-29T15:21:56.410443Z node 1 :TX_COLUMNSHARD INFO: log.cpp:784: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:operations_managerLoadingTime=50; 2025-05-29T15:21:56.414513Z node 1 :TX_COLUMNSHARD INFO: log.cpp:784: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:storages_managerLoadingTime=4052; 2025-05-29T15:21:56.418239Z node 1 :TX_COLUMNSHARD INFO: log.cpp:784: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:storages_managerLoadingTime=3699; 2025-05-29T15:21:56.418270Z node 1 :TX_COLUMNSHARD INFO: log.cpp:784: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:long_txLoadingTime=9; 2025-05-29T15:21:56.418277Z node 1 :TX_COLUMNSHARD INFO: log.cpp:784: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:long_txLoadingTime=2; 2025-05-29T15:21:56.418281Z node 1 :TX_COLUMNSHARD INFO: log.cpp:784: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:db_locksLoadingTime=0; 2025-05-29T15:21:56.418285Z node 1 :TX_COLUMNSHARD INFO: log.cpp:784: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:db_locksLoadingTime=0; 2025-05-29T15:21:56.418290Z node 1 :TX_COLUMNSHARD INFO: log.cpp:784: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:bg_sessionsLoadingTime=1; 2025-05-29T15:21:56.418303Z node 1 :TX_COLUMNSHARD INFO: log.cpp:784: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:bg_sessionsLoadingTime=9; 2025-05-29T15:21:56.418311Z node 1 :TX_COLUMNSHARD INFO: log.cpp:784: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:sharing_sessionsLoadingTime=1; 2025-05-29T15:21:56.418330Z node 1 :TX_COLUMNSHARD INFO: log.cpp:784: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:sharing_sessionsLoadingTime=11; 2025-05-29T15:21:56.418335Z node 1 :TX_COLUMNSHARD INFO: log.cpp:784: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:in_flight_readsLoadingTime=1; 2025-05-29T15:21:56.418344Z node 1 :TX_COLUMNSHARD INFO: log.cpp:784: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:in_flight_readsLoadingTime=5; 2025-05-29T15:21:56.418357Z node 1 :TX_COLUMNSHARD INFO: log.cpp:784: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:tiers_managerLoadingTime=9; 2025-05-29T15:21:56.418370Z node 1 :TX_COLUMNSHARD INFO: log.cpp:784: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:tiers_managerLoadingTime=9; 2025-05-29T15:21:56.418373Z node 1 :TX_COLUMNSHARD INFO: log.cpp:784: tablet_id=9437184;event=initialize_shard;fline=common_data.cpp:29;EXECUTE:composite_initLoadingTime=16790; 2025-05-29T15:21:56.418409Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: Index: tables 1 inserted {blob_bytes=0;raw_bytes=0;count=0;records=0} compacted {blob_bytes=0;raw_bytes=0;count=0;records=0} s-compacted {blob_bytes=7523144;raw_bytes=7389250;count=1;records=75200} inactive {blob_bytes=117955568;raw_bytes=114675450;count=218;records=1277000} evicted {blob_bytes=0;raw_bytes=0;count=0;records=0} at tablet 9437184 2025-05-29T15:21:56.418442Z node 1 :TX_COLUMNSHARD INFO: log.cpp:784: tablet_id=9437184;self_id=[1:5155:7145];process=SwitchToWork;fline=columnshard.cpp:77;event=initialize_shard;step=SwitchToWork; 2025-05-29T15:21:56.418451Z node 1 :TX_COLUMNSHARD INFO: log.cpp:784: tablet_id=9437184;self_id=[1:5155:7145];process=SwitchToWork;fline=columnshard.cpp:80;event=initialize_shard;step=SignalTabletActive; 2025-05-29T15:21:56.418465Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:784: tablet_id=9437184;self_id=[1:5155:7145];process=SwitchToWork;fline=columnshard_impl.cpp:1614;event=OnTieringModified;path_id=NO_VALUE_OPTIONAL; 2025-05-29T15:21:56.418471Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:784: tablet_id=9437184;self_id=[1:5155:7145];process=SwitchToWork;fline=column_engine_logs.cpp:493;event=OnTieringModified;new_count_tierings=0; 2025-05-29T15:21:56.418521Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:784: tablet_id=9437184;fline=columnshard_impl.cpp:515;event=EnqueueBackgroundActivities;periodic=0; 2025-05-29T15:21:56.418534Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:784: tablet_id=9437184;fline=columnshard_impl.cpp:784;event=start_indexation_tasks;insert_overload_size=0; 2025-05-29T15:21:56.418557Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:784: tablet_id=9437184;fline=column_engine_logs.cpp:246;event=StartCleanup;portions_count=10; 2025-05-29T15:21:56.418572Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:784: tablet_id=9437184;fline=column_engine_logs.cpp:288;event=StartCleanupStop;snapshot=plan_step=1748531801085;tx_id=18446744073709551615;;current_snapshot_ts=1748532099259; 2025-05-29T15:21:56.418579Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:784: tablet_id=9437184;fline=column_engine_logs.cpp:321;event=StartCleanup;portions_count=10;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2025-05-29T15:21:56.418590Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:784: tablet_id=9437184;fline=columnshard_impl.cpp:1060;background=cleanup;skip_reason=no_changes; 2025-05-29T15:21:56.418593Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:784: tablet_id=9437184;fline=columnshard_impl.cpp:1092;background=cleanup;skip_reason=no_changes; 2025-05-29T15:21:56.418613Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:784: tablet_id=9437184;fline=columnshard_impl.cpp:1001;background=ttl;skip_reason=no_changes; 2025-05-29T15:21:56.419276Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:784: tablet_id=9437184;tx_state=TTxProgressTx::Complete;fline=columnshard_impl.cpp:784;event=start_indexation_tasks;insert_overload_size=0; 2025-05-29T15:21:56.419296Z node 1 :TX_COLUMNSHARD INFO: log.cpp:784: self_id=[1:5194:7176];tablet_id=9437184;parent=[1:5155:7145];fline=manager.cpp:85;event=ask_data;request=request_id=40;1={portions_count=219};; 2025-05-29T15:21:56.420237Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:784: tablet_id=9437184;self_id=[1:5155:7145];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:254;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=9437184; 2025-05-29T15:21:56.420311Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:784: tablet_id=9437184;self_id=[1:5155:7145];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;fline=columnshard.cpp:243;event=TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184; 2025-05-29T15:21:56.420314Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: Send periodic stats. 2025-05-29T15:21:56.420317Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: Disabled periodic stats at tablet 9437184 2025-05-29T15:21:56.420321Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:784: tablet_id=9437184;self_id=[1:5155:7145];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:515;event=EnqueueBackgroundActivities;periodic=0; 2025-05-29T15:21:56.420330Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:784: tablet_id=9437184;self_id=[1:5155:7145];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:784;event=start_indexation_tasks;insert_overload_size=0; 2025-05-29T15:21:56.420339Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:784: tablet_id=9437184;self_id=[1:5155:7145];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:246;event=StartCleanup;portions_count=10; 2025-05-29T15:21:56.420351Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:784: tablet_id=9437184;self_id=[1:5155:7145];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:288;event=StartCleanupStop;snapshot=plan_step=1748531801085;tx_id=18446744073709551615;;current_snapshot_ts=1748532099259; 2025-05-29T15:21:56.420359Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:784: tablet_id=9437184;self_id=[1:5155:7145];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:321;event=StartCleanup;portions_count=10;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2025-05-29T15:21:56.420369Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:784: tablet_id=9437184;self_id=[1:5155:7145];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:1060;background=cleanup;skip_reason=no_changes; 2025-05-29T15:21:56.420374Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:784: tablet_id=9437184;self_id=[1:5155:7145];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:1092;background=cleanup;skip_reason=no_changes; 2025-05-29T15:21:56.420387Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:784: tablet_id=9437184;self_id=[1:5155:7145];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;queue=ttl;external_count=0;fline=granule.cpp:164;event=skip_actualization;waiting=0.999000s; 2025-05-29T15:21:56.420393Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:784: tablet_id=9437184;self_id=[1:5155:7145];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:1001;background=ttl;skip_reason=no_changes; |58.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/load_test/ut/unittest >> TPartitionTests::ConflictingTxIsAborted [GOOD] >> GroupWriteTest::TwoTables >> TPartitionTests::GetPartitionWriteInfoSuccess [GOOD] >> TPQTabletTests::ProposeTx_Unknown_Partition_2 [GOOD] >> TPartitionTests::SetOffset [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/mind/ut/unittest >> TNodeBrokerTest::UpdateNodesLog [GOOD] Test command err: 2025-05-29T15:21:53.935570Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 2 Deadline: 18446744073709.551615s } 2025-05-29T15:21:53.935625Z node 3 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 3 Deadline: 18446744073709.551615s } 2025-05-29T15:21:53.935655Z node 4 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 4 Deadline: 18446744073709.551615s } 2025-05-29T15:21:53.935692Z node 5 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 5 Deadline: 18446744073709.551615s } 2025-05-29T15:21:53.935725Z node 6 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 6 Deadline: 18446744073709.551615s } 2025-05-29T15:21:53.935749Z node 7 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 7 Deadline: 18446744073709.551615s } 2025-05-29T15:21:53.943341Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:21:53.943470Z node 3 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:21:53.943521Z node 4 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:21:53.943561Z node 5 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:21:53.943609Z node 6 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:21:53.943648Z node 7 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:21:53.943724Z node 8 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 8 Deadline: 18446744073709.551615s } 2025-05-29T15:21:53.943755Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-05-29T15:21:53.943930Z node 3 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:21:53.943958Z node 4 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:21:53.943981Z node 5 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:21:53.944003Z node 6 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:21:53.944028Z node 7 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:21:53.944050Z node 8 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:21:53.944092Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:21:53.955695Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:21:53.955929Z node 8 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:21:53.955962Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:21:53.957000Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:21:53.957025Z node 3 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:21:53.957066Z node 4 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:21:53.957106Z node 5 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:21:53.957140Z node 6 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:21:53.957164Z node 7 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:21:53.957216Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:21:53.957422Z node 3 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-05-29T15:21:53.957482Z node 4 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-05-29T15:21:53.957510Z node 5 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-05-29T15:21:53.957537Z node 6 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-05-29T15:21:53.957574Z node 7 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-05-29T15:21:53.957602Z node 8 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:21:53.957642Z node 8 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-05-29T15:21:53.957737Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-05-29T15:21:53.957847Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 3 Deadline: 18446744073709.551615s } 2025-05-29T15:21:53.958754Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 4 Deadline: 18446744073709.551615s } 2025-05-29T15:21:53.958783Z node 3 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 6 Deadline: 18446744073709.551615s } 2025-05-29T15:21:53.958793Z node 4 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 7 Deadline: 18446744073709.551615s } 2025-05-29T15:21:53.958802Z node 5 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 8 Deadline: 18446744073709.551615s } 2025-05-29T15:21:53.958813Z node 6 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 5 Deadline: 18446744073709.551615s } 2025-05-29T15:21:53.958826Z node 8 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 5 Deadline: 18446744073709.551615s } 2025-05-29T15:21:53.963724Z node 6 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 3 Deadline: 18446744073709.551615s } 2025-05-29T15:21:53.963815Z node 5 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 6 Deadline: 18446744073709.551615s } 2025-05-29T15:21:53.963834Z node 7 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 4 Deadline: 18446744073709.551615s } 2025-05-29T15:21:53.963848Z node 8 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 3 Deadline: 18446744073709.551615s } 2025-05-29T15:21:53.963896Z node 4 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 2 Deadline: 18446744073709.551615s } 2025-05-29T15:21:53.965208Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 2 Deadline: 18446744073709.551615s } 2025-05-29T15:21:53.965496Z node 3 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 8 Deadline: 18446744073709.551615s } 2025-05-29T15:21:53.965684Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 7 Deadline: 18446744073709.551615s } 2025-05-29T15:21:53.965738Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 4 Deadline: 18446744073709.551615s } 2025-05-29T15:21:53.965841Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 5 Deadline: 18446744073709.551615s } 2025-05-29T15:21:53.966044Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 6 Deadline: 18446744073709.551615s } 2025-05-29T15:21:53.966182Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 8 Deadline: 18446744073709.551615s } 2025-05-29T15:21:53.966477Z node 5 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 2 Deadline: 18446744073709.551615s } 2025-05-29T15:21:53.966556Z node 8 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 4 Deadline: 18446744073709.551615s } 2025-05-29T15:21:53.966717Z node 3 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 4 Deadline: 18446744073709.551615s } 2025-05-29T15:21:53.966975Z node 6 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 2 Deadline: 18446744073709.551615s } 2025-05-29T15:21:53.967204Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 5 Deadline: 18446744073709.551615s } 2025-05-29T15:21:53.967241Z node 4 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 3 Deadline: 18446744073709.551615s } 2025-05-29T15:21:53.967489Z node 4 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 8 Deadline: 18446744073709.551615s } 2025-05-29T15:21:53.967650Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 6 Deadline: 18446744073709.551615s } 2025-05-29T15:21:53.967664Z node 3 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 2 Deadline: 18446744073709.551615s } 2025-05-29T15:21:53.968254Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 3 Deadline: 18446744073709.551615s } 2025-05-29T15:21:53.975387Z node 6 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 4 Deadline: 18446744073709.551615s } 2025-05-29T15:21:53.979688Z node 4 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 6 Deadline: 18446744073709.551615s } 2025-05-29T15:21:54.010009Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7309: Cannot subscribe to console configs 2025-05-29T15:21:54.010029Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded ... waiting for nameservers are connected 2025-05-29T15:21:54.014541Z node 1 :NODE_BROKER DEBUG: node_broker_impl.h:236: StateInit event type: 10060000 event: NKikimr::TEvTablet::TEvBoot 2025-05-29T15:21:54.014998Z node 1 :NODE_BROKER DEBUG: node_broker_impl.h:236: StateInit event type: 10060001 event: NKikimr::TEvTablet::TEvRestored 2025-05-29T15:21:54.015073Z node 1 :NODE_BROKER DEBUG: node_broker__init_scheme.cpp:20: TTxInitScheme Execute 2025-05-29T15:21:54.015306Z node 1 :NODE_BROKER DEBUG: node_broker_impl.h:236: StateInit event type: 1006000c event: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-05-29T15:21:54.016176Z node 1 :NODE_BROKER DEBUG: node_ ... 0.025000Z - 1970-01-01T05:00:00.025000Z - 1970-01-01T06:00:00.025000Z, approximate epoch start #5.13 nodes=1 expired=0 2025-05-29T15:21:56.274465Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:603: Preparing update nodes log for epoch ##5.14 1970-01-01T04:00:00.025000Z - 1970-01-01T05:00:00.025000Z - 1970-01-01T06:00:00.025000Z nodes=1 expired=0 removed=2 2025-05-29T15:21:56.274472Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:659: Add node #1024.v11 to update nodes log 2025-05-29T15:21:56.274485Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:659: Add node #1025.v13 to update nodes log 2025-05-29T15:21:56.274490Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:659: Add node #1026.v14 to update nodes log 2025-05-29T15:21:56.274632Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 269877761, Sender [1:931:2398], Recipient [1:922:2392]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-05-29T15:21:56.274706Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:657: Handle NKikimr::TEvTabletPipe::TEvClientConnected { TabletId: 72057594037936129 Status: OK ServerId: [1:931:2398] Leader: 1 Dead: 0 Generation: 4 VersionInfo:  } 2025-05-29T15:21:56.274774Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 269877761, Sender [1:932:2399], Recipient [1:922:2392]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-05-29T15:21:56.274787Z node 8 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:657: Handle NKikimr::TEvTabletPipe::TEvClientConnected { TabletId: 72057594037936129 Status: OK ServerId: [1:935:2402] Leader: 1 Dead: 0 Generation: 4 VersionInfo:  } 2025-05-29T15:21:56.274799Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 269877761, Sender [1:934:2401], Recipient [1:922:2392]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-05-29T15:21:56.274817Z node 6 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:657: Handle NKikimr::TEvTabletPipe::TEvClientConnected { TabletId: 72057594037936129 Status: OK ServerId: [1:932:2399] Leader: 1 Dead: 0 Generation: 4 VersionInfo:  } 2025-05-29T15:21:56.274829Z node 7 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:657: Handle NKikimr::TEvTabletPipe::TEvClientConnected { TabletId: 72057594037936129 Status: OK ServerId: [1:934:2401] Leader: 1 Dead: 0 Generation: 4 VersionInfo:  } 2025-05-29T15:21:56.274854Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 269877761, Sender [1:935:2402], Recipient [1:922:2392]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-05-29T15:21:56.274935Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 272039936, Sender [2:53:2072], Recipient [1:931:2398] 2025-05-29T15:21:56.274942Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:246: StateWork, processing event TEvNodeBroker::TEvListNodes 2025-05-29T15:21:56.274949Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:375: Delaying list nodes request for epoch #6 2025-05-29T15:21:56.274964Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 272039936, Sender [7:193:2072], Recipient [1:934:2401] 2025-05-29T15:21:56.274969Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:246: StateWork, processing event TEvNodeBroker::TEvListNodes 2025-05-29T15:21:56.274973Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:375: Delaying list nodes request for epoch #6 2025-05-29T15:21:56.274981Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 272039936, Sender [8:221:2072], Recipient [1:935:2402] 2025-05-29T15:21:56.274984Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:246: StateWork, processing event TEvNodeBroker::TEvListNodes 2025-05-29T15:21:56.274989Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:375: Delaying list nodes request for epoch #6 2025-05-29T15:21:56.275000Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 272039936, Sender [6:165:2072], Recipient [1:932:2399] 2025-05-29T15:21:56.275004Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:246: StateWork, processing event TEvNodeBroker::TEvListNodes 2025-05-29T15:21:56.275009Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:375: Delaying list nodes request for epoch #6 2025-05-29T15:21:56.275168Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 269877761, Sender [1:963:2425], Recipient [1:922:2392]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-05-29T15:21:56.275196Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 272039936, Sender [1:634:2214], Recipient [1:922:2392]: NKikimr::NNodeBroker::TEvNodeBroker::TEvListNodes { } 2025-05-29T15:21:56.275201Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:246: StateWork, processing event TEvNodeBroker::TEvListNodes 2025-05-29T15:21:56.275211Z node 1 :NODE_BROKER TRACE: node_broker.cpp:423: Send TEvNodesInfo for epoch #5.14 1970-01-01T04:00:00.025000Z - 1970-01-01T05:00:00.025000Z - 1970-01-01T06:00:00.025000Z 2025-05-29T15:21:56.275281Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 269877761, Sender [1:965:2427], Recipient [1:922:2392]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-05-29T15:21:56.275307Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 272039950, Sender [1:634:2214], Recipient [1:922:2392]: NKikimr::NNodeBroker::TEvNodeBroker::TEvSubscribeNodesRequest { CachedVersion: 14 SeqNo: 21 } 2025-05-29T15:21:56.275313Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:254: StateWork, processing event TEvNodeBroker::TEvSubscribeNodesRequest 2025-05-29T15:21:56.275321Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:747: New subscriber [1:634:2214], seqNo: 21, version: 14, server pipe id: [1:965:2427] 2025-05-29T15:21:56.275329Z node 1 :NODE_BROKER TRACE: node_broker.cpp:730: Send TEvUpdateNodes v14 -> v14 to [1:634:2214] 2025-05-29T15:21:56.275394Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 269877764, Sender [1:965:2427], Recipient [1:922:2392]: NKikimr::TEvTabletPipe::TEvServerDisconnected 2025-05-29T15:21:56.275401Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:768: Unsubscribed [1:634:2214], seqNo: 21, server pipe id: [1:965:2427] 2025-05-29T15:21:56.275426Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 269877761, Sender [1:967:2429], Recipient [1:922:2392]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-05-29T15:21:56.275444Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 272039950, Sender [1:634:2214], Recipient [1:922:2392]: NKikimr::NNodeBroker::TEvNodeBroker::TEvSubscribeNodesRequest { CachedVersion: 13 SeqNo: 22 } 2025-05-29T15:21:56.275449Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:254: StateWork, processing event TEvNodeBroker::TEvSubscribeNodesRequest 2025-05-29T15:21:56.275453Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:747: New subscriber [1:634:2214], seqNo: 22, version: 13, server pipe id: [1:967:2429] 2025-05-29T15:21:56.275459Z node 1 :NODE_BROKER TRACE: node_broker.cpp:730: Send TEvUpdateNodes v13 -> v14 to [1:634:2214] 2025-05-29T15:21:56.275511Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 269877764, Sender [1:967:2429], Recipient [1:922:2392]: NKikimr::TEvTabletPipe::TEvServerDisconnected 2025-05-29T15:21:56.275517Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:768: Unsubscribed [1:634:2214], seqNo: 22, server pipe id: [1:967:2429] 2025-05-29T15:21:56.275540Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 269877761, Sender [1:969:2431], Recipient [1:922:2392]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-05-29T15:21:56.275557Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 272039950, Sender [1:634:2214], Recipient [1:922:2392]: NKikimr::NNodeBroker::TEvNodeBroker::TEvSubscribeNodesRequest { CachedVersion: 12 SeqNo: 23 } 2025-05-29T15:21:56.275564Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:254: StateWork, processing event TEvNodeBroker::TEvSubscribeNodesRequest 2025-05-29T15:21:56.275569Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:747: New subscriber [1:634:2214], seqNo: 23, version: 12, server pipe id: [1:969:2431] 2025-05-29T15:21:56.275573Z node 1 :NODE_BROKER TRACE: node_broker.cpp:730: Send TEvUpdateNodes v12 -> v14 to [1:634:2214] 2025-05-29T15:21:56.275624Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 269877764, Sender [1:969:2431], Recipient [1:922:2392]: NKikimr::TEvTabletPipe::TEvServerDisconnected 2025-05-29T15:21:56.275629Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:768: Unsubscribed [1:634:2214], seqNo: 23, server pipe id: [1:969:2431] 2025-05-29T15:21:56.275655Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 269877761, Sender [1:971:2433], Recipient [1:922:2392]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-05-29T15:21:56.275671Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 272039950, Sender [1:634:2214], Recipient [1:922:2392]: NKikimr::NNodeBroker::TEvNodeBroker::TEvSubscribeNodesRequest { CachedVersion: 11 SeqNo: 24 } 2025-05-29T15:21:56.275676Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:254: StateWork, processing event TEvNodeBroker::TEvSubscribeNodesRequest 2025-05-29T15:21:56.275680Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:747: New subscriber [1:634:2214], seqNo: 24, version: 11, server pipe id: [1:971:2433] 2025-05-29T15:21:56.275685Z node 1 :NODE_BROKER TRACE: node_broker.cpp:730: Send TEvUpdateNodes v11 -> v14 to [1:634:2214] 2025-05-29T15:21:56.275737Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 269877764, Sender [1:971:2433], Recipient [1:922:2392]: NKikimr::TEvTabletPipe::TEvServerDisconnected 2025-05-29T15:21:56.275743Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:768: Unsubscribed [1:634:2214], seqNo: 24, server pipe id: [1:971:2433] 2025-05-29T15:21:56.275766Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 269877761, Sender [1:973:2435], Recipient [1:922:2392]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-05-29T15:21:56.275782Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 272039950, Sender [1:634:2214], Recipient [1:922:2392]: NKikimr::NNodeBroker::TEvNodeBroker::TEvSubscribeNodesRequest { CachedVersion: 10 SeqNo: 25 } 2025-05-29T15:21:56.275787Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:254: StateWork, processing event TEvNodeBroker::TEvSubscribeNodesRequest 2025-05-29T15:21:56.275791Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:747: New subscriber [1:634:2214], seqNo: 25, version: 10, server pipe id: [1:973:2435] 2025-05-29T15:21:56.275796Z node 1 :NODE_BROKER TRACE: node_broker.cpp:730: Send TEvUpdateNodes v10 -> v14 to [1:634:2214] 2025-05-29T15:21:56.275849Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 269877764, Sender [1:973:2435], Recipient [1:922:2392]: NKikimr::TEvTabletPipe::TEvServerDisconnected 2025-05-29T15:21:56.275854Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:768: Unsubscribed [1:634:2214], seqNo: 25, server pipe id: [1:973:2435] 2025-05-29T15:21:56.275877Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 269877761, Sender [1:975:2437], Recipient [1:922:2392]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-05-29T15:21:56.275893Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 272039950, Sender [1:634:2214], Recipient [1:922:2392]: NKikimr::NNodeBroker::TEvNodeBroker::TEvSubscribeNodesRequest { CachedVersion: 0 SeqNo: 26 } 2025-05-29T15:21:56.275897Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:254: StateWork, processing event TEvNodeBroker::TEvSubscribeNodesRequest 2025-05-29T15:21:56.275903Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:747: New subscriber [1:634:2214], seqNo: 26, version: 0, server pipe id: [1:975:2437] 2025-05-29T15:21:56.275908Z node 1 :NODE_BROKER TRACE: node_broker.cpp:730: Send TEvUpdateNodes v0 -> v14 to [1:634:2214] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/persqueue/ut/unittest >> TSourceIdTests::ExpensiveCleanup [GOOD] Test command err: 2025-05-29T15:21:56.041425Z node 1 :PERSQUEUE NOTICE: pq_impl.cpp:1099: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-05-29T15:21:56.041453Z node 1 :PERSQUEUE INFO: pq_impl.cpp:800: [PQ: 72057594037927937] doesn't have tx writes info 2025-05-29T15:21:56.045001Z node 1 :PERSQUEUE DEBUG: partition_init.cpp:75: [Root/PQ/rt3.dc1--account--topic:1:Initializer] Start initializing step TInitConfigStep 2025-05-29T15:21:56.045117Z node 1 :PERSQUEUE DEBUG: partition_init.cpp:75: [Root/PQ/rt3.dc1--account--topic:1:Initializer] Start initializing step TInitInternalFieldsStep 2025-05-29T15:21:56.045200Z node 1 :PERSQUEUE INFO: partition_init.cpp:880: [PQ: 72057594037927937, Partition: 1, State: StateInit] bootstrapping 1 [1:180:2194] 2025-05-29T15:21:56.045447Z node 1 :PERSQUEUE DEBUG: partition_init.cpp:55: [Root/PQ/rt3.dc1--account--topic:1:Initializer] Initializing completed. 2025-05-29T15:21:56.045457Z node 1 :PERSQUEUE INFO: partition.cpp:560: [PQ: 72057594037927937, Partition: 1, State: StateInit] init complete for topic 'Root/PQ/rt3.dc1--account--topic' partition 1 generation 0 [1:180:2194] 2025-05-29T15:21:56.045466Z node 1 :PERSQUEUE DEBUG: partition.cpp:574: [PQ: 72057594037927937, Partition: 1, State: StateInit] SYNC INIT topic Root/PQ/rt3.dc1--account--topic partitition 1 so 0 endOffset 0 Head Offset 0 PartNo 0 PackedSize 0 count 0 nextOffset 0 batches 0 2025-05-29T15:21:56.045574Z node 1 :PERSQUEUE DEBUG: partition.cpp:3850: [PQ: 72057594037927937, Partition: 1, State: StateIdle] Process pending events. Count 0 2025-05-29T15:21:56.045635Z node 1 :PERSQUEUE DEBUG: partition.cpp:2185: [PQ: 72057594037927937, Partition: 1, State: StateIdle] === DumpKeyValueRequest === 2025-05-29T15:21:56.045640Z node 1 :PERSQUEUE DEBUG: partition.cpp:2186: [PQ: 72057594037927937, Partition: 1, State: StateIdle] --- delete ---------------- 2025-05-29T15:21:56.045644Z node 1 :PERSQUEUE DEBUG: partition.cpp:2194: [PQ: 72057594037927937, Partition: 1, State: StateIdle] --- write ----------------- 2025-05-29T15:21:56.045648Z node 1 :PERSQUEUE DEBUG: partition.cpp:2197: [PQ: 72057594037927937, Partition: 1, State: StateIdle] i0000000001 2025-05-29T15:21:56.045652Z node 1 :PERSQUEUE DEBUG: partition.cpp:2197: [PQ: 72057594037927937, Partition: 1, State: StateIdle] m0000000001cclient-1 2025-05-29T15:21:56.045655Z node 1 :PERSQUEUE DEBUG: partition.cpp:2197: [PQ: 72057594037927937, Partition: 1, State: StateIdle] m0000000001uclient-1 2025-05-29T15:21:56.045659Z node 1 :PERSQUEUE DEBUG: partition.cpp:2197: [PQ: 72057594037927937, Partition: 1, State: StateIdle] _config_1 2025-05-29T15:21:56.045663Z node 1 :PERSQUEUE DEBUG: partition.cpp:2199: [PQ: 72057594037927937, Partition: 1, State: StateIdle] --- rename ---------------- 2025-05-29T15:21:56.045667Z node 1 :PERSQUEUE DEBUG: partition.cpp:2204: [PQ: 72057594037927937, Partition: 1, State: StateIdle] =========================== 2025-05-29T15:21:56.045681Z node 1 :PERSQUEUE DEBUG: partition_read.cpp:779: [PQ: 72057594037927937, Partition: 1, State: StateIdle] Topic 'Root/PQ/rt3.dc1--account--topic' partition 1 user client-1 readTimeStamp for offset 0 initiated queuesize 0 startOffset 0 ReadingTimestamp 0 rrg 0 2025-05-29T15:21:56.045718Z node 1 :PERSQUEUE INFO: partition.cpp:3710: [PQ: 72057594037927937, Partition: 1, State: StateIdle] SubDomainOutOfSpace was changed. Topic: "Root/PQ/rt3.dc1--account--topic". Partition: 1. SubDomainOutOfSpace: 1 2025-05-29T15:21:56.045740Z node 1 :PERSQUEUE INFO: ownerinfo.cpp:30: new Cookie owner1|194c8e1d-bfd283c5-133aa390-ed3ac16c_0 generated for partition 1 topic 'Root/PQ/rt3.dc1--account--topic' owner owner1 Send disk status response with cookie: 0 2025-05-29T15:21:56.045827Z node 1 :PERSQUEUE DEBUG: partition_write.cpp:524: [PQ: 72057594037927937, Partition: 1, State: StateIdle] TPartition::HandleWriteResponse writeNewSize# 0 WriteNewSizeFromSupportivePartitions# 0 2025-05-29T15:21:56.045849Z node 1 :PERSQUEUE DEBUG: partition_write.cpp:35: [PQ: 72057594037927937, Partition: 1, State: StateIdle] TPartition::ReplyOwnerOk. Partition: 1 2025-05-29T15:21:56.045907Z node 1 :PERSQUEUE DEBUG: partition_write.cpp:1704: [PQ: 72057594037927937, Partition: 1, State: StateIdle] Send write quota request. Topic: "Root/PQ/rt3.dc1--account--topic". Partition: 1. Amount: 22. Cookie: 1 2025-05-29T15:21:56.045923Z node 1 :PERSQUEUE DEBUG: partition.cpp:3630: [PQ: 72057594037927937, Partition: 1, State: StateIdle] Got quota. Topic: "Root/PQ/rt3.dc1--account--topic". Partition: 1: Cookie: 1 2025-05-29T15:21:56.045940Z node 1 :PERSQUEUE DEBUG: partition_write.cpp:1233: [PQ: 72057594037927937, Partition: 1, State: StateIdle] Topic 'Root/PQ/rt3.dc1--account--topic' partition 1 part blob processing sourceId 'SourceId' seqNo 0 partNo 0 2025-05-29T15:21:56.045983Z node 1 :PERSQUEUE DEBUG: partition_write.cpp:1333: [PQ: 72057594037927937, Partition: 1, State: StateIdle] Topic 'Root/PQ/rt3.dc1--account--topic' partition 1 part blob complete sourceId 'SourceId' seqNo 0 partNo 0 FormedBlobsCount 0 NewHead: Offset 100 PartNo 0 PackedSize 118 count 1 nextOffset 101 batches 1 2025-05-29T15:21:56.046031Z node 1 :PERSQUEUE DEBUG: partition_write.cpp:1623: [PQ: 72057594037927937, Partition: 1, State: StateIdle] Add new write blob: topic 'Root/PQ/rt3.dc1--account--topic' partition 1 compactOffset 100,1 HeadOffset 0 endOffset 0 curOffset 101 d0000000001_00000000000000000100_00000_0000000001_00000| size 104 WTime 128 2025-05-29T15:21:56.046047Z node 1 :PERSQUEUE DEBUG: partition.cpp:2185: [PQ: 72057594037927937, Partition: 1, State: StateIdle] === DumpKeyValueRequest === 2025-05-29T15:21:56.046051Z node 1 :PERSQUEUE DEBUG: partition.cpp:2186: [PQ: 72057594037927937, Partition: 1, State: StateIdle] --- delete ---------------- 2025-05-29T15:21:56.046056Z node 1 :PERSQUEUE DEBUG: partition.cpp:2192: [PQ: 72057594037927937, Partition: 1, State: StateIdle] [x0000000001, x0000000002) 2025-05-29T15:21:56.046059Z node 1 :PERSQUEUE DEBUG: partition.cpp:2194: [PQ: 72057594037927937, Partition: 1, State: StateIdle] --- write ----------------- 2025-05-29T15:21:56.046063Z node 1 :PERSQUEUE DEBUG: partition.cpp:2197: [PQ: 72057594037927937, Partition: 1, State: StateIdle] m0000000001pSourceId 2025-05-29T15:21:56.046067Z node 1 :PERSQUEUE DEBUG: partition.cpp:2197: [PQ: 72057594037927937, Partition: 1, State: StateIdle] d0000000001_00000000000000000100_00000_0000000001_00000| 2025-05-29T15:21:56.046070Z node 1 :PERSQUEUE DEBUG: partition.cpp:2197: [PQ: 72057594037927937, Partition: 1, State: StateIdle] i0000000001 2025-05-29T15:21:56.046074Z node 1 :PERSQUEUE DEBUG: partition.cpp:2199: [PQ: 72057594037927937, Partition: 1, State: StateIdle] --- rename ---------------- 2025-05-29T15:21:56.046077Z node 1 :PERSQUEUE DEBUG: partition.cpp:2204: [PQ: 72057594037927937, Partition: 1, State: StateIdle] =========================== 2025-05-29T15:21:56.100593Z node 1 :PERSQUEUE DEBUG: partition_write.cpp:524: [PQ: 72057594037927937, Partition: 1, State: StateIdle] TPartition::HandleWriteResponse writeNewSize# 22 WriteNewSizeFromSupportivePartitions# 0 2025-05-29T15:21:56.100634Z node 1 :PERSQUEUE DEBUG: partition_write.cpp:58: [PQ: 72057594037927937, Partition: 1, State: StateIdle] TPartition::ReplyWrite. Partition: 1 2025-05-29T15:21:56.100653Z node 1 :PERSQUEUE DEBUG: partition_write.cpp:324: [PQ: 72057594037927937, Partition: 1, State: StateIdle] Answering for message sourceid: 'SourceId', Topic: 'Root/PQ/rt3.dc1--account--topic', Partition: 1, SeqNo: 0, partNo: 0, Offset: 100 is stored on disk 2025-05-29T15:21:56.100690Z node 1 :PERSQUEUE DEBUG: partition_read.cpp:779: [PQ: 72057594037927937, Partition: 1, State: StateIdle] Topic 'Root/PQ/rt3.dc1--account--topic' partition 1 user client-1 readTimeStamp for offset 0 initiated queuesize 0 startOffset 100 ReadingTimestamp 0 rrg 0 2025-05-29T15:21:56.427224Z node 1 :PERSQUEUE INFO: partition.cpp:3710: [PQ: 72057594037927937, Partition: 1, State: StateIdle] SubDomainOutOfSpace was changed. Topic: "Root/PQ/rt3.dc1--account--topic". Partition: 1. SubDomainOutOfSpace: 0 2025-05-29T15:21:56.447550Z node 1 :PERSQUEUE DEBUG: partition_write.cpp:1704: [PQ: 72057594037927937, Partition: 1, State: StateIdle] Send write quota request. Topic: "Root/PQ/rt3.dc1--account--topic". Partition: 1. Amount: 22. Cookie: 2 2025-05-29T15:21:56.447605Z node 1 :PERSQUEUE DEBUG: partition.cpp:3630: [PQ: 72057594037927937, Partition: 1, State: StateIdle] Got quota. Topic: "Root/PQ/rt3.dc1--account--topic". Partition: 1: Cookie: 2 2025-05-29T15:21:56.447642Z node 1 :PERSQUEUE DEBUG: partition_write.cpp:1233: [PQ: 72057594037927937, Partition: 1, State: StateIdle] Topic 'Root/PQ/rt3.dc1--account--topic' partition 1 part blob processing sourceId 'SourceId' seqNo 1 partNo 0 2025-05-29T15:21:56.447685Z node 1 :PERSQUEUE DEBUG: partition_write.cpp:1295: [PQ: 72057594037927937, Partition: 1, State: StateIdle] Topic 'Root/PQ/rt3.dc1--account--topic' partition 1 part blob sourceId 'SourceId' seqNo 1 partNo 0 result is x0000000001_00000000000000000100_00000_0000000001_00000 size 104 2025-05-29T15:21:56.447698Z node 1 :PERSQUEUE DEBUG: partition_write.cpp:1049: [PQ: 72057594037927937, Partition: 1, State: StateIdle] writing blob: topic 'Root/PQ/rt3.dc1--account--topic' partition 1 old key x0000000001_00000000000000000100_00000_0000000001_00000 new key d0000000001_00000000000000000100_00000_0000000001_00000 size 104 WTime 1329 2025-05-29T15:21:56.447750Z node 1 :PERSQUEUE DEBUG: partition_write.cpp:1333: [PQ: 72057594037927937, Partition: 1, State: StateIdle] Topic 'Root/PQ/rt3.dc1--account--topic' partition 1 part blob complete sourceId 'SourceId' seqNo 1 partNo 0 FormedBlobsCount 1 NewHead: Offset 200 PartNo 0 PackedSize 118 count 1 nextOffset 201 batches 1 2025-05-29T15:21:56.447790Z node 1 :PERSQUEUE DEBUG: partition_write.cpp:1623: [PQ: 72057594037927937, Partition: 1, State: StateIdle] Add new write blob: topic 'Root/PQ/rt3.dc1--account--topic' partition 1 compactOffset 200,1 HeadOffset 100 endOffset 101 curOffset 201 d0000000001_00000000000000000200_00000_0000000001_00000| size 105 WTime 1329 2025-05-29T15:21:56.447802Z node 1 :PERSQUEUE DEBUG: partition.cpp:2185: [PQ: 72057594037927937, Partition: 1, State: StateIdle] === DumpKeyValueRequest === 2025-05-29T15:21:56.447821Z node 1 :PERSQUEUE DEBUG: partition.cpp:2186: [PQ: 72057594037927937, Partition: 1, State: StateIdle] --- delete ---------------- 2025-05-29T15:21:56.447827Z node 1 :PERSQUEUE DEBUG: partition.cpp:2192: [PQ: 72057594037927937, Partition: 1, State: StateIdle] [x0000000001, x0000000002) 2025-05-29T15:21:56.447832Z node 1 :PERSQUEUE DEBUG: partition.cpp:2194: [PQ: 72057594037927937, Partition: 1, State: StateIdle] --- write ----------------- 2025-05-29T15:21:56.447836Z node 1 :PERSQUEUE DEBUG: partition.cpp:2197: [PQ: 72057594037927937, Partition: 1, State: StateIdle] d0000000001_00000000000000000100_00000_0000000001_00000 2025-05-29T15:21:56.447841Z node 1 :PERSQUEUE DEBUG: partition.cpp:2197: [PQ: 72057594037927937, Partition: 1, State: StateIdle] m0000000001pSourceId 2025-05-29T15:21:56.447845Z node 1 :PERSQUEUE DEBUG: partition.cpp:2197: [PQ: 72057594037927937, Partition: 1, State: StateIdle] d0000000001_00000000000000000200_00000_0000000001_00000| 2025-05-29T15:21:56.447849Z node 1 :PERSQUEUE DEBUG: partition.cpp:2197: [PQ: 72057594037927937, Partition: 1, State: StateIdle] i0000000001 2025-05-29T15:21:56.447853Z node 1 :PERSQUEUE DEBUG: partition.cpp:2199: [PQ: 72057594037927937, Partition: 1, State: StateIdle] --- rename ---------------- 2025-05-29T15:21:56.447857Z node 1 :PERSQUEUE DEBUG: partition.cpp:2204: [PQ: 72057594037927937, Partition: 1, State: StateIdle] =========================== 2025-05-29T15:21:56.470928Z node 1 :PERSQUEUE DEBUG: partition_write.cpp:524: [PQ: 72057594037927937, Partition: 1, State: StateIdle] TPartition::HandleWriteResponse writeNewSize# 22 WriteNewSizeFromSupportivePartitions# 0 2025-05-29T15:21 ... 2 Iteration 143 Iteration 144 Iteration 145 Iteration 146 Iteration 147 Iteration 148 Iteration 149 Iteration 150 Iteration 151 Iteration 152 Iteration 153 Iteration 154 Iteration 155 Iteration 156 Iteration 157 Iteration 158 Iteration 159 Iteration 160 Iteration 161 Iteration 162 Iteration 163 Iteration 164 Iteration 165 Iteration 166 Iteration 167 Iteration 168 Iteration 169 Iteration 170 Iteration 171 Iteration 172 Iteration 173 Iteration 174 Iteration 175 Iteration 176 Iteration 177 Iteration 178 Iteration 179 Iteration 180 Iteration 181 Iteration 182 Iteration 183 Iteration 184 Iteration 185 Iteration 186 Iteration 187 Iteration 188 Iteration 189 Iteration 190 Iteration 191 Iteration 192 Iteration 193 Iteration 194 Iteration 195 Iteration 196 Iteration 197 Iteration 198 Iteration 199 Iteration 200 Iteration 201 Iteration 202 Iteration 203 Iteration 204 Iteration 205 Iteration 206 Iteration 207 Iteration 208 Iteration 209 Iteration 210 Iteration 211 Iteration 212 Iteration 213 Iteration 214 Iteration 215 Iteration 216 Iteration 217 Iteration 218 Iteration 219 Iteration 220 Iteration 221 Iteration 222 Iteration 223 Iteration 224 Iteration 225 Iteration 226 Iteration 227 Iteration 228 Iteration 229 Iteration 230 Iteration 231 Iteration 232 Iteration 233 Iteration 234 Iteration 235 Iteration 236 Iteration 237 Iteration 238 Iteration 239 Iteration 240 Iteration 241 Iteration 242 Iteration 243 Iteration 244 Iteration 245 Iteration 246 Iteration 247 Iteration 248 Iteration 249 Iteration 250 Iteration 251 Iteration 252 Iteration 253 Iteration 254 Iteration 255 Iteration 256 Iteration 257 Iteration 258 Iteration 259 Iteration 260 Iteration 261 Iteration 262 Iteration 263 Iteration 264 Iteration 265 Iteration 266 Iteration 267 Iteration 268 Iteration 269 Iteration 270 Iteration 271 Iteration 272 Iteration 273 Iteration 274 Iteration 275 Iteration 276 Iteration 277 Iteration 278 Iteration 279 Iteration 280 Iteration 281 Iteration 282 Iteration 283 Iteration 284 Iteration 285 Iteration 286 Iteration 287 Iteration 288 Iteration 289 Iteration 290 Iteration 291 Iteration 292 Iteration 293 Iteration 294 Iteration 295 Iteration 296 Iteration 297 Iteration 298 Iteration 299 Iteration 300 Iteration 301 Iteration 302 Iteration 303 Iteration 304 Iteration 305 Iteration 306 Iteration 307 Iteration 308 Iteration 309 Iteration 310 Iteration 311 Iteration 312 Iteration 313 Iteration 314 Iteration 315 Iteration 316 Iteration 317 Iteration 318 Iteration 319 Iteration 320 Iteration 321 Iteration 322 Iteration 323 Iteration 324 Iteration 325 Iteration 326 Iteration 327 Iteration 328 Iteration 329 Iteration 330 Iteration 331 Iteration 332 Iteration 333 Iteration 334 Iteration 335 Iteration 336 Iteration 337 Iteration 338 Iteration 339 Iteration 340 Iteration 341 Iteration 342 Iteration 343 Iteration 344 Iteration 345 Iteration 346 Iteration 347 Iteration 348 Iteration 349 Iteration 350 Iteration 351 Iteration 352 Iteration 353 Iteration 354 Iteration 355 Iteration 356 Iteration 357 Iteration 358 Iteration 359 Iteration 360 Iteration 361 Iteration 362 Iteration 363 Iteration 364 Iteration 365 Iteration 366 Iteration 367 Iteration 368 Iteration 369 Iteration 370 Iteration 371 Iteration 372 Iteration 373 Iteration 374 Iteration 375 Iteration 376 Iteration 377 Iteration 378 Iteration 379 Iteration 380 Iteration 381 Iteration 382 Iteration 383 Iteration 384 Iteration 385 Iteration 386 Iteration 387 Iteration 388 Iteration 389 Iteration 390 Iteration 391 Iteration 392 Iteration 393 Iteration 394 Iteration 395 Iteration 396 Iteration 397 Iteration 398 Iteration 399 Iteration 400 Iteration 401 Iteration 402 Iteration 403 Iteration 404 Iteration 405 Iteration 406 Iteration 407 Iteration 408 Iteration 409 Iteration 410 Iteration 411 Iteration 412 Iteration 413 Iteration 414 Iteration 415 Iteration 416 Iteration 417 Iteration 418 Iteration 419 Iteration 420 Iteration 421 Iteration 422 Iteration 423 Iteration 424 Iteration 425 Iteration 426 Iteration 427 Iteration 428 Iteration 429 Iteration 430 Iteration 431 Iteration 432 Iteration 433 Iteration 434 Iteration 435 Iteration 436 Iteration 437 Iteration 438 Iteration 439 Iteration 440 Iteration 441 Iteration 442 Iteration 443 Iteration 444 Iteration 445 Iteration 446 Iteration 447 Iteration 448 Iteration 449 Iteration 450 Iteration 451 Iteration 452 Iteration 453 Iteration 454 Iteration 455 Iteration 456 Iteration 457 Iteration 458 Iteration 459 Iteration 460 Iteration 461 Iteration 462 Iteration 463 Iteration 464 Iteration 465 Iteration 466 Iteration 467 Iteration 468 Iteration 469 Iteration 470 Iteration 471 Iteration 472 Iteration 473 Iteration 474 Iteration 475 Iteration 476 Iteration 477 Iteration 478 Iteration 479 Iteration 480 Iteration 481 Iteration 482 Iteration 483 Iteration 484 Iteration 485 Iteration 486 Iteration 487 Iteration 488 Iteration 489 Iteration 490 Iteration 491 Iteration 492 Iteration 493 Iteration 494 Iteration 495 Iteration 496 Iteration 497 Iteration 498 Iteration 499 Iteration 500 Iteration 501 Iteration 502 Iteration 503 Iteration 504 Iteration 505 Iteration 506 Iteration 507 Iteration 508 Iteration 509 Iteration 510 Iteration 511 Iteration 512 Iteration 513 Iteration 514 Iteration 515 Iteration 516 Iteration 517 Iteration 518 Iteration 519 Iteration 520 Iteration 521 Iteration 522 Iteration 523 Iteration 524 Iteration 525 Iteration 526 Iteration 527 Iteration 528 Iteration 529 Iteration 530 Iteration 531 Iteration 532 Iteration 533 Iteration 534 Iteration 535 Iteration 536 Iteration 537 Iteration 538 Iteration 539 Iteration 540 Iteration 541 Iteration 542 Iteration 543 Iteration 544 Iteration 545 Iteration 546 Iteration 547 Iteration 548 Iteration 549 Iteration 550 Iteration 551 Iteration 552 Iteration 553 Iteration 554 Iteration 555 Iteration 556 Iteration 557 Iteration 558 Iteration 559 Iteration 560 Iteration 561 Iteration 562 Iteration 563 Iteration 564 Iteration 565 Iteration 566 Iteration 567 Iteration 568 Iteration 569 Iteration 570 Iteration 571 Iteration 572 Iteration 573 Iteration 574 Iteration 575 Iteration 576 Iteration 577 Iteration 578 Iteration 579 Iteration 580 Iteration 581 Iteration 582 Iteration 583 Iteration 584 Iteration 585 Iteration 586 Iteration 587 Iteration 588 Iteration 589 Iteration 590 Iteration 591 Iteration 592 Iteration 593 Iteration 594 Iteration 595 Iteration 596 Iteration 597 Iteration 598 Iteration 599 Iteration 600 Iteration 601 Iteration 602 Iteration 603 Iteration 604 Iteration 605 Iteration 606 Iteration 607 Iteration 608 Iteration 609 Iteration 610 Iteration 611 Iteration 612 Iteration 613 Iteration 614 Iteration 615 Iteration 616 Iteration 617 Iteration 618 Iteration 619 Iteration 620 Iteration 621 Iteration 622 Iteration 623 Iteration 624 Iteration 625 Iteration 626 Iteration 627 Iteration 628 Iteration 629 Iteration 630 Iteration 631 Iteration 632 Iteration 633 Iteration 634 Iteration 635 Iteration 636 Iteration 637 Iteration 638 Iteration 639 Iteration 640 Iteration 641 Iteration 642 Iteration 643 Iteration 644 Iteration 645 Iteration 646 Iteration 647 Iteration 648 Iteration 649 Iteration 650 Iteration 651 Iteration 652 Iteration 653 Iteration 654 Iteration 655 Iteration 656 Iteration 657 Iteration 658 Iteration 659 Iteration 660 Iteration 661 Iteration 662 Iteration 663 Iteration 664 Iteration 665 Iteration 666 Iteration 667 Iteration 668 Iteration 669 Iteration 670 Iteration 671 Iteration 672 Iteration 673 Iteration 674 Iteration 675 Iteration 676 Iteration 677 Iteration 678 Iteration 679 Iteration 680 Iteration 681 Iteration 682 Iteration 683 Iteration 684 Iteration 685 Iteration 686 Iteration 687 Iteration 688 Iteration 689 Iteration 690 Iteration 691 Iteration 692 Iteration 693 Iteration 694 Iteration 695 Iteration 696 Iteration 697 Iteration 698 Iteration 699 Iteration 700 Iteration 701 Iteration 702 Iteration 703 Iteration 704 Iteration 705 Iteration 706 Iteration 707 Iteration 708 Iteration 709 Iteration 710 Iteration 711 Iteration 712 Iteration 713 Iteration 714 Iteration 715 Iteration 716 Iteration 717 Iteration 718 Iteration 719 Iteration 720 Iteration 721 Iteration 722 Iteration 723 Iteration 724 Iteration 725 Iteration 726 Iteration 727 Iteration 728 Iteration 729 Iteration 730 Iteration 731 Iteration 732 Iteration 733 Iteration 734 Iteration 735 Iteration 736 Iteration 737 Iteration 738 Iteration 739 Iteration 740 Iteration 741 Iteration 742 Iteration 743 Iteration 744 Iteration 745 Iteration 746 Iteration 747 Iteration 748 Iteration 749 Iteration 750 Iteration 751 Iteration 752 Iteration 753 Iteration 754 Iteration 755 Iteration 756 Iteration 757 Iteration 758 Iteration 759 Iteration 760 Iteration 761 Iteration 762 Iteration 763 Iteration 764 Iteration 765 Iteration 766 Iteration 767 Iteration 768 Iteration 769 Iteration 770 Iteration 771 Iteration 772 Iteration 773 Iteration 774 Iteration 775 Iteration 776 Iteration 777 Iteration 778 Iteration 779 Iteration 780 Iteration 781 Iteration 782 Iteration 783 Iteration 784 Iteration 785 Iteration 786 Iteration 787 Iteration 788 Iteration 789 Iteration 790 Iteration 791 Iteration 792 Iteration 793 Iteration 794 Iteration 795 Iteration 796 Iteration 797 Iteration 798 Iteration 799 Iteration 800 Iteration 801 Iteration 802 Iteration 803 Iteration 804 Iteration 805 Iteration 806 Iteration 807 Iteration 808 Iteration 809 Iteration 810 Iteration 811 Iteration 812 Iteration 813 Iteration 814 Iteration 815 Iteration 816 Iteration 817 Iteration 818 Iteration 819 Iteration 820 Iteration 821 Iteration 822 Iteration 823 Iteration 824 Iteration 825 Iteration 826 Iteration 827 Iteration 828 Iteration 829 Iteration 830 Iteration 831 Iteration 832 Iteration 833 Iteration 834 Iteration 835 Iteration 836 Iteration 837 Iteration 838 Iteration 839 Iteration 840 Iteration 841 Iteration 842 Iteration 843 Iteration 844 Iteration 845 Iteration 846 Iteration 847 Iteration 848 Iteration 849 Iteration 850 Iteration 851 Iteration 852 Iteration 853 Iteration 854 Iteration 855 Iteration 856 Iteration 857 Iteration 858 Iteration 859 Iteration 860 Iteration 861 Iteration 862 Iteration 863 Iteration 864 Iteration 865 Iteration 866 Iteration 867 Iteration 868 Iteration 869 Iteration 870 Iteration 871 Iteration 872 Iteration 873 Iteration 874 Iteration 875 Iteration 876 Iteration 877 Iteration 878 Iteration 879 Iteration 880 Iteration 881 Iteration 882 Iteration 883 Iteration 884 Iteration 885 Iteration 886 Iteration 887 Iteration 888 Iteration 889 Iteration 890 Iteration 891 Iteration 892 Iteration 893 Iteration 894 Iteration 895 Iteration 896 Iteration 897 Iteration 898 Iteration 899 Iteration 900 Iteration 901 Iteration 902 Iteration 903 Iteration 904 Iteration 905 Iteration 906 Iteration 907 Iteration 908 Iteration 909 Iteration 910 Iteration 911 Iteration 912 Iteration 913 Iteration 914 Iteration 915 Iteration 916 Iteration 917 Iteration 918 Iteration 919 Iteration 920 Iteration 921 Iteration 922 Iteration 923 Iteration 924 Iteration 925 Iteration 926 Iteration 927 Iteration 928 Iteration 929 Iteration 930 Iteration 931 Iteration 932 Iteration 933 Iteration 934 Iteration 935 Iteration 936 Iteration 937 Iteration 938 Iteration 939 Iteration 940 Iteration 941 Iteration 942 Iteration 943 Iteration 944 Iteration 945 Iteration 946 Iteration 947 Iteration 948 Iteration 949 Iteration 950 Iteration 951 Iteration 952 Iteration 953 Iteration 954 Iteration 955 Iteration 956 Iteration 957 Iteration 958 Iteration 959 Iteration 960 Iteration 961 Iteration 962 Iteration 963 Iteration 964 Iteration 965 Iteration 966 Iteration 967 Iteration 968 Iteration 969 Iteration 970 Iteration 971 Iteration 972 Iteration 973 Iteration 974 Iteration 975 Iteration 976 Iteration 977 Iteration 978 Iteration 979 Iteration 980 Iteration 981 Iteration 982 Iteration 983 Iteration 984 Iteration 985 Iteration 986 Iteration 987 Iteration 988 Iteration 989 Iteration 990 Iteration 991 Iteration 992 Iteration 993 Iteration 994 Iteration 995 Iteration 996 Iteration 997 Iteration 998 Iteration 999 >> TPartitionTests::ConflictingTxProceedAfterRollback >> test_sql_streaming.py::test[suites-GroupByHoppingWindowByStringKey-default.txt] [GOOD] >> test_sql_streaming.py::test[suites-GroupByHoppingWindowExprKey-default.txt] >> TPartitionTests::OldPlanStep >> TNodeBrokerTest::NodesSubscriberDisconnect [GOOD] >> GroupWriteTest::Simple ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/mind/ut/unittest >> TNodeBrokerTest::NodesMigration1001Nodes [GOOD] Test command err: 2025-05-29T15:21:54.167268Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 2 Deadline: 18446744073709.551615s } 2025-05-29T15:21:54.167332Z node 3 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 3 Deadline: 18446744073709.551615s } 2025-05-29T15:21:54.167365Z node 4 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 4 Deadline: 18446744073709.551615s } 2025-05-29T15:21:54.167407Z node 5 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 5 Deadline: 18446744073709.551615s } 2025-05-29T15:21:54.167442Z node 6 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 6 Deadline: 18446744073709.551615s } 2025-05-29T15:21:54.167469Z node 7 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 7 Deadline: 18446744073709.551615s } 2025-05-29T15:21:54.176744Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:21:54.176907Z node 3 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:21:54.176989Z node 4 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:21:54.177040Z node 5 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:21:54.177091Z node 6 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:21:54.177140Z node 7 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:21:54.177226Z node 8 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 8 Deadline: 18446744073709.551615s } 2025-05-29T15:21:54.177266Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-05-29T15:21:54.177480Z node 3 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:21:54.177513Z node 4 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:21:54.177538Z node 5 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:21:54.177583Z node 6 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:21:54.177611Z node 7 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:21:54.177637Z node 8 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:21:54.177690Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:21:54.186389Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:21:54.186476Z node 8 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:21:54.186521Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:21:54.187938Z node 5 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:21:54.187998Z node 6 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:21:54.188039Z node 7 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:21:54.188186Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:21:54.188217Z node 3 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:21:54.188250Z node 4 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:21:54.188279Z node 8 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:21:54.188308Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:21:54.188765Z node 3 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-05-29T15:21:54.188810Z node 4 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-05-29T15:21:54.188841Z node 5 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-05-29T15:21:54.188875Z node 6 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-05-29T15:21:54.188920Z node 7 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-05-29T15:21:54.189040Z node 8 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-05-29T15:21:54.189179Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-05-29T15:21:54.189252Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 2 Deadline: 18446744073709.551615s } 2025-05-29T15:21:54.190341Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 8 Deadline: 18446744073709.551615s } 2025-05-29T15:21:54.190416Z node 3 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 2 Deadline: 18446744073709.551615s } 2025-05-29T15:21:54.190428Z node 4 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 7 Deadline: 18446744073709.551615s } 2025-05-29T15:21:54.190441Z node 6 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 7 Deadline: 18446744073709.551615s } 2025-05-29T15:21:54.190453Z node 7 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 5 Deadline: 18446744073709.551615s } 2025-05-29T15:21:54.196446Z node 7 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 4 Deadline: 18446744073709.551615s } 2025-05-29T15:21:54.196572Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 3 Deadline: 18446744073709.551615s } 2025-05-29T15:21:54.196597Z node 5 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 7 Deadline: 18446744073709.551615s } 2025-05-29T15:21:54.196615Z node 7 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 6 Deadline: 18446744073709.551615s } 2025-05-29T15:21:54.196625Z node 8 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 2 Deadline: 18446744073709.551615s } 2025-05-29T15:21:54.197886Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 3 Deadline: 18446744073709.551615s } 2025-05-29T15:21:54.198492Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 4 Deadline: 18446744073709.551615s } 2025-05-29T15:21:54.198666Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 6 Deadline: 18446744073709.551615s } 2025-05-29T15:21:54.198778Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 7 Deadline: 18446744073709.551615s } 2025-05-29T15:21:54.198845Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 5 Deadline: 18446744073709.551615s } 2025-05-29T15:21:54.198954Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 8 Deadline: 18446744073709.551615s } 2025-05-29T15:21:54.199369Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 7 Deadline: 18446744073709.551615s } 2025-05-29T15:21:54.199481Z node 8 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 5 Deadline: 18446744073709.551615s } 2025-05-29T15:21:54.199923Z node 7 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 2 Deadline: 18446744073709.551615s } 2025-05-29T15:21:54.199986Z node 5 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 8 Deadline: 18446744073709.551615s } 2025-05-29T15:21:54.200626Z node 8 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 6 Deadline: 18446744073709.551615s } 2025-05-29T15:21:54.200922Z node 6 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 8 Deadline: 18446744073709.551615s } 2025-05-29T15:21:54.201115Z node 8 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 7 Deadline: 18446744073709.551615s } 2025-05-29T15:21:54.201351Z node 7 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 8 Deadline: 18446744073709.551615s } 2025-05-29T15:21:54.238496Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7309: Cannot subscribe to console configs 2025-05-29T15:21:54.238528Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded ... waiting for nameservers are connected 2025-05-29T15:21:54.244023Z node 1 :NODE_BROKER DEBUG: node_broker_impl.h:236: StateInit event type: 10060000 event: NKikimr::TEvTablet::TEvBoot 2025-05-29T15:21:54.244451Z node 1 :NODE_BROKER DEBUG: node_broker_impl.h:236: StateInit event type: 10060001 event: NKikimr::TEvTablet::TEvRestored 2025-05-29T15:21:54.244513Z node 1 :NODE_BROKER DEBUG: node_broker__init_scheme.cpp:20: TTxInitScheme Execute 2025-05-29T15:21:54.244746Z node 1 :NODE_BROKER DEBUG: node_broker_impl.h:236: StateInit event type: 1006000c event: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-05-29T15:21:54.245614Z node 1 :NODE_BROKER DEBUG: node_broker__init_scheme.cpp:29: TTxInitScheme Complete 2025-05-29T15:21:54.245655Z node 1 :NODE_BROKER DEBUG: node_broker__load_state.cpp:19: TTxLoadState Execute 2025-05-29T15:21:54.245723Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:961: [DB] Using default config. 2025-05-29T15:21:54.245740Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:998: [DB] Starting the first epoch: #1.1 1970-01-01T00:00:00.025000Z - 1970-01-01T01:00:00.025000Z - 1970-01-01T02:00:00.025000Z 2025-05-29T15:21:54.245745Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:1024: [DB] Loaded the first approximate epoch start: #1.1 2025-05-29T15:21:54.245761Z node 1 :NODE_BROKER DEBUG: node_broker__load_state.cpp:27: TTxLoadState Complete 2025-05-29T15:21:54.245785Z node 1 :NODE_BROKER DEBUG: node_broker__migrate_state.cpp:84: TTxMigrateState Execute 2025-05-29T15:21:54.245792Z node 1 :NODE_BROKER DEBUG: node_broker__migrate_state.cpp:52: TTxMigrateState ProcessMigrationBatch UpdateNodes left 0, NewVersionUpdateNodes left 0 2025-05-29T15:21:54.245796Z node 1 :NODE_BROKER DEBU ... e 1 :NODE_BROKER DEBUG: node_broker.cpp:659: Add node #1598.v505 to update nodes log 2025-05-29T15:21:55.707351Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:659: Add node #1597.v505 to update nodes log 2025-05-29T15:21:55.707354Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:659: Add node #1596.v505 to update nodes log 2025-05-29T15:21:55.707357Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:659: Add node #1595.v505 to update nodes log 2025-05-29T15:21:55.707360Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:659: Add node #1594.v505 to update nodes log 2025-05-29T15:21:55.707364Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:659: Add node #1593.v505 to update nodes log 2025-05-29T15:21:55.707367Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:659: Add node #1592.v505 to update nodes log 2025-05-29T15:21:55.707370Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:659: Add node #1591.v505 to update nodes log 2025-05-29T15:21:55.707374Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:659: Add node #1590.v505 to update nodes log 2025-05-29T15:21:55.707377Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:659: Add node #1589.v505 to update nodes log 2025-05-29T15:21:55.707380Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:659: Add node #1588.v505 to update nodes log 2025-05-29T15:21:55.707384Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:659: Add node #1587.v505 to update nodes log 2025-05-29T15:21:55.707387Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:659: Add node #1586.v505 to update nodes log 2025-05-29T15:21:55.707390Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:659: Add node #1585.v505 to update nodes log 2025-05-29T15:21:55.707393Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:659: Add node #1584.v505 to update nodes log 2025-05-29T15:21:55.707397Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:659: Add node #1583.v505 to update nodes log 2025-05-29T15:21:55.707400Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:659: Add node #1582.v505 to update nodes log 2025-05-29T15:21:55.707403Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:659: Add node #1581.v505 to update nodes log 2025-05-29T15:21:55.707406Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:659: Add node #1580.v505 to update nodes log 2025-05-29T15:21:55.707410Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:659: Add node #1579.v505 to update nodes log 2025-05-29T15:21:55.707413Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:659: Add node #1578.v505 to update nodes log 2025-05-29T15:21:55.707418Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:659: Add node #1577.v505 to update nodes log 2025-05-29T15:21:55.707422Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:659: Add node #1576.v505 to update nodes log 2025-05-29T15:21:55.707425Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:659: Add node #1575.v505 to update nodes log 2025-05-29T15:21:55.707429Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:659: Add node #1574.v505 to update nodes log 2025-05-29T15:21:55.707432Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:659: Add node #1573.v505 to update nodes log 2025-05-29T15:21:55.707436Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:659: Add node #1572.v505 to update nodes log 2025-05-29T15:21:55.707439Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:659: Add node #1571.v505 to update nodes log 2025-05-29T15:21:55.707442Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:659: Add node #1570.v505 to update nodes log 2025-05-29T15:21:55.707446Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:659: Add node #1569.v505 to update nodes log 2025-05-29T15:21:55.707450Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:659: Add node #1568.v505 to update nodes log 2025-05-29T15:21:55.707453Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:659: Add node #1567.v505 to update nodes log 2025-05-29T15:21:55.707456Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:659: Add node #1566.v505 to update nodes log 2025-05-29T15:21:55.707459Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:659: Add node #1565.v505 to update nodes log 2025-05-29T15:21:55.707463Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:659: Add node #1564.v505 to update nodes log 2025-05-29T15:21:55.707466Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:659: Add node #1563.v505 to update nodes log 2025-05-29T15:21:55.707469Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:659: Add node #1562.v505 to update nodes log 2025-05-29T15:21:55.707472Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:659: Add node #1561.v505 to update nodes log 2025-05-29T15:21:55.707476Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:659: Add node #1560.v505 to update nodes log 2025-05-29T15:21:55.707479Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:659: Add node #1559.v505 to update nodes log 2025-05-29T15:21:55.707483Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:659: Add node #1558.v505 to update nodes log 2025-05-29T15:21:55.707486Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:659: Add node #1557.v505 to update nodes log 2025-05-29T15:21:55.707489Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:659: Add node #1556.v505 to update nodes log 2025-05-29T15:21:55.707493Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:659: Add node #1555.v505 to update nodes log 2025-05-29T15:21:55.707496Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:659: Add node #1554.v505 to update nodes log 2025-05-29T15:21:55.707499Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:659: Add node #1553.v505 to update nodes log 2025-05-29T15:21:55.707503Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:659: Add node #1552.v505 to update nodes log 2025-05-29T15:21:55.707506Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:659: Add node #1551.v505 to update nodes log 2025-05-29T15:21:55.707509Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:659: Add node #1550.v505 to update nodes log 2025-05-29T15:21:55.707512Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:659: Add node #1549.v505 to update nodes log 2025-05-29T15:21:55.707516Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:659: Add node #1548.v505 to update nodes log 2025-05-29T15:21:55.707519Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:659: Add node #1547.v505 to update nodes log 2025-05-29T15:21:55.707522Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:659: Add node #1546.v505 to update nodes log 2025-05-29T15:21:55.707526Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:659: Add node #1545.v505 to update nodes log 2025-05-29T15:21:55.707529Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:659: Add node #1544.v505 to update nodes log 2025-05-29T15:21:55.707532Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:659: Add node #1543.v505 to update nodes log 2025-05-29T15:21:55.707537Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:659: Add node #1542.v505 to update nodes log 2025-05-29T15:21:55.707541Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:659: Add node #1541.v505 to update nodes log 2025-05-29T15:21:55.707544Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:659: Add node #1540.v505 to update nodes log 2025-05-29T15:21:55.707548Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:659: Add node #1536.v505 to update nodes log 2025-05-29T15:21:55.707551Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:659: Add node #1787.v505 to update nodes log 2025-05-29T15:21:55.707723Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 269877761, Sender [1:2780:3806], Recipient [1:2770:3800]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-05-29T15:21:55.707787Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 269877761, Sender [1:2781:3807], Recipient [1:2770:3800]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-05-29T15:21:55.707813Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:657: Handle NKikimr::TEvTabletPipe::TEvClientConnected { TabletId: 72057594037936129 Status: OK ServerId: [1:2781:3807] Leader: 1 Dead: 0 Generation: 3 VersionInfo:  } 2025-05-29T15:21:55.707839Z node 8 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:657: Handle NKikimr::TEvTabletPipe::TEvClientConnected { TabletId: 72057594037936129 Status: OK ServerId: [1:2780:3806] Leader: 1 Dead: 0 Generation: 3 VersionInfo:  } 2025-05-29T15:21:55.707897Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 272039936, Sender [2:53:2072], Recipient [1:2781:3807] 2025-05-29T15:21:55.707901Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:246: StateWork, processing event TEvNodeBroker::TEvListNodes 2025-05-29T15:21:55.707911Z node 1 :NODE_BROKER TRACE: node_broker.cpp:423: Send TEvNodesInfo for epoch #3.505 1970-01-01T02:00:00.025000Z - 1970-01-01T03:00:00.025000Z - 1970-01-01T04:00:00.025000Z 2025-05-29T15:21:55.707921Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 272039936, Sender [8:221:2072], Recipient [1:2780:3806] 2025-05-29T15:21:55.707924Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:246: StateWork, processing event TEvNodeBroker::TEvListNodes 2025-05-29T15:21:55.707928Z node 1 :NODE_BROKER TRACE: node_broker.cpp:423: Send TEvNodesInfo for epoch #3.505 1970-01-01T02:00:00.025000Z - 1970-01-01T03:00:00.025000Z - 1970-01-01T04:00:00.025000Z 2025-05-29T15:21:55.711099Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 269877761, Sender [1:2836:3857], Recipient [1:2770:3800]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-05-29T15:21:55.711138Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 272039936, Sender [1:628:2214], Recipient [1:2770:3800]: NKikimr::NNodeBroker::TEvNodeBroker::TEvListNodes { } 2025-05-29T15:21:55.711142Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:246: StateWork, processing event TEvNodeBroker::TEvListNodes 2025-05-29T15:21:55.711151Z node 1 :NODE_BROKER TRACE: node_broker.cpp:423: Send TEvNodesInfo for epoch #3.505 1970-01-01T02:00:00.025000Z - 1970-01-01T03:00:00.025000Z - 1970-01-01T04:00:00.025000Z 2025-05-29T15:21:55.712482Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 269877761, Sender [1:2838:3859], Recipient [1:2770:3800]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-05-29T15:21:55.712507Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 272039936, Sender [1:628:2214], Recipient [1:2770:3800]: NKikimr::NNodeBroker::TEvNodeBroker::TEvListNodes { } 2025-05-29T15:21:55.712510Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:246: StateWork, processing event TEvNodeBroker::TEvListNodes 2025-05-29T15:21:55.712517Z node 1 :NODE_BROKER TRACE: node_broker.cpp:423: Send TEvNodesInfo for epoch #3.505 1970-01-01T02:00:00.025000Z - 1970-01-01T03:00:00.025000Z - 1970-01-01T04:00:00.025000Z 2025-05-29T15:21:55.714659Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 269877761, Sender [1:2840:3861], Recipient [1:2770:3800]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-05-29T15:21:55.714686Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 272039936, Sender [1:628:2214], Recipient [1:2770:3800]: NKikimr::NNodeBroker::TEvNodeBroker::TEvListNodes { } 2025-05-29T15:21:55.714690Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:246: StateWork, processing event TEvNodeBroker::TEvListNodes 2025-05-29T15:21:55.714696Z node 1 :NODE_BROKER TRACE: node_broker.cpp:423: Send TEvNodesInfo for epoch #3.505 1970-01-01T02:00:00.025000Z - 1970-01-01T03:00:00.025000Z - 1970-01-01T04:00:00.025000Z 2025-05-29T15:21:55.715934Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 269877761, Sender [1:2842:3863], Recipient [1:2770:3800]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-05-29T15:21:55.715976Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 272039936, Sender [1:628:2214], Recipient [1:2770:3800]: NKikimr::NNodeBroker::TEvNodeBroker::TEvListNodes { CachedVersion: 504 } 2025-05-29T15:21:55.715982Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:246: StateWork, processing event TEvNodeBroker::TEvListNodes 2025-05-29T15:21:55.715990Z node 1 :NODE_BROKER TRACE: node_broker.cpp:423: Send TEvNodesInfo for epoch #3.505 1970-01-01T02:00:00.025000Z - 1970-01-01T03:00:00.025000Z - 1970-01-01T04:00:00.025000Z ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/mind/ut/unittest >> TNodeBrokerTest::NodesMigrationExpireRemoved [GOOD] Test command err: 2025-05-29T15:21:54.180921Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 2 Deadline: 18446744073709.551615s } 2025-05-29T15:21:54.180985Z node 3 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 3 Deadline: 18446744073709.551615s } 2025-05-29T15:21:54.181019Z node 4 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 4 Deadline: 18446744073709.551615s } 2025-05-29T15:21:54.181060Z node 5 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 5 Deadline: 18446744073709.551615s } 2025-05-29T15:21:54.181095Z node 6 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 6 Deadline: 18446744073709.551615s } 2025-05-29T15:21:54.181124Z node 7 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 7 Deadline: 18446744073709.551615s } 2025-05-29T15:21:54.190129Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:21:54.190275Z node 3 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:21:54.190333Z node 4 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:21:54.190381Z node 5 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:21:54.190450Z node 6 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:21:54.190496Z node 7 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:21:54.190574Z node 8 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 8 Deadline: 18446744073709.551615s } 2025-05-29T15:21:54.190611Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-05-29T15:21:54.190835Z node 3 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:21:54.190869Z node 4 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:21:54.190892Z node 5 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:21:54.190917Z node 6 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:21:54.190943Z node 7 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:21:54.190966Z node 8 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:21:54.191013Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:21:54.202666Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:21:54.202964Z node 8 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:21:54.202999Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:21:54.204241Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:21:54.204270Z node 3 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:21:54.204313Z node 4 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:21:54.204356Z node 5 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:21:54.204397Z node 6 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:21:54.204426Z node 7 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:21:54.204485Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:21:54.204672Z node 3 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-05-29T15:21:54.204731Z node 4 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-05-29T15:21:54.204763Z node 5 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-05-29T15:21:54.204793Z node 6 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-05-29T15:21:54.204818Z node 7 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-05-29T15:21:54.204848Z node 8 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:21:54.204892Z node 8 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-05-29T15:21:54.205005Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-05-29T15:21:54.205120Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 4 Deadline: 18446744073709.551615s } 2025-05-29T15:21:54.206171Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 7 Deadline: 18446744073709.551615s } 2025-05-29T15:21:54.206202Z node 3 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 2 Deadline: 18446744073709.551615s } 2025-05-29T15:21:54.206214Z node 4 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 5 Deadline: 18446744073709.551615s } 2025-05-29T15:21:54.206224Z node 5 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 2 Deadline: 18446744073709.551615s } 2025-05-29T15:21:54.206234Z node 6 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 3 Deadline: 18446744073709.551615s } 2025-05-29T15:21:54.206250Z node 8 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 5 Deadline: 18446744073709.551615s } 2025-05-29T15:21:54.211077Z node 5 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 4 Deadline: 18446744073709.551615s } 2025-05-29T15:21:54.211246Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 5 Deadline: 18446744073709.551615s } 2025-05-29T15:21:54.211331Z node 7 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 2 Deadline: 18446744073709.551615s } 2025-05-29T15:21:54.211356Z node 5 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 8 Deadline: 18446744073709.551615s } 2025-05-29T15:21:54.212040Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 3 Deadline: 18446744073709.551615s } 2025-05-29T15:21:54.212516Z node 3 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 6 Deadline: 18446744073709.551615s } 2025-05-29T15:21:54.212893Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 5 Deadline: 18446744073709.551615s } 2025-05-29T15:21:54.213232Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 6 Deadline: 18446744073709.551615s } 2025-05-29T15:21:54.213294Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 7 Deadline: 18446744073709.551615s } 2025-05-29T15:21:54.213489Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 2 Deadline: 18446744073709.551615s } 2025-05-29T15:21:54.213628Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 3 Deadline: 18446744073709.551615s } 2025-05-29T15:21:54.213741Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 8 Deadline: 18446744073709.551615s } 2025-05-29T15:21:54.215472Z node 4 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 6 Deadline: 18446744073709.551615s } 2025-05-29T15:21:54.215611Z node 5 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 6 Deadline: 18446744073709.551615s } 2025-05-29T15:21:54.215987Z node 6 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 4 Deadline: 18446744073709.551615s } 2025-05-29T15:21:54.216122Z node 6 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 5 Deadline: 18446744073709.551615s } 2025-05-29T15:21:54.217130Z node 7 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 8 Deadline: 18446744073709.551615s } 2025-05-29T15:21:54.217773Z node 8 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 7 Deadline: 18446744073709.551615s } 2025-05-29T15:21:54.247287Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7309: Cannot subscribe to console configs 2025-05-29T15:21:54.247307Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded ... waiting for nameservers are connected 2025-05-29T15:21:54.250855Z node 1 :NODE_BROKER DEBUG: node_broker_impl.h:236: StateInit event type: 10060000 event: NKikimr::TEvTablet::TEvBoot 2025-05-29T15:21:54.251203Z node 1 :NODE_BROKER DEBUG: node_broker_impl.h:236: StateInit event type: 10060001 event: NKikimr::TEvTablet::TEvRestored 2025-05-29T15:21:54.251254Z node 1 :NODE_BROKER DEBUG: node_broker__init_scheme.cpp:20: TTxInitScheme Execute 2025-05-29T15:21:54.251419Z node 1 :NODE_BROKER DEBUG: node_broker_impl.h:236: StateInit event type: 1006000c event: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-05-29T15:21:54.251967Z node 1 :NODE_BROKER DEBUG: node_broker__init_scheme.cpp:29: TTxInitScheme Complete 2025-05-29T15:21:54.251986Z node 1 :NODE_BROKER DEBUG: node_broker__load_state.cpp:19: TTxLoadState Execute 2025-05-29T15:21:54.252026Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:961: [DB] Using default config. 2025-05-29T15:21:54.252038Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:998: [DB] Starting the first epoch: #1.1 1970-01-01T00:00:00.025000Z - 1970-01-01T01:00:00.025000Z - 1970-01-01T02:00:00.025000Z 2025-05-29T15:21:54.252042Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:1024: [DB] Loaded the first approximate epoch start: #1.1 2025-05-29T15:21:54.252054Z node 1 :NODE_BROKER DEBUG: node_broker__load_state.cpp:27: TTxLoadState Complete 2025-05-29T15:21:54.252076Z node 1 :NODE_BROKER DEBUG: node_broker__migrate_state.cpp:84: TTxMigrateState Execute 2025-05-29T15:21:54.252079Z node 1 :NODE_BROKER DEBUG: node_broker__migrate_state.cpp:52: TTxMigrateState ProcessMigrationBatch UpdateNodes left 0, NewVersionUpdateNodes left 0 2025-05-29T15:21:54.252083Z node 1 :NODE_BROKER DEBU ... .yandex.net address=1.2.3.4 dc=1 location=DC=1/M=2/R=3/U=4/ lease=1 expire=Thu, 01 Jan 1970 05:00:00 UTC servicedsubdomain=72057594046678944:1 slotindex=0 authorizedbycertificate=false 2025-05-29T15:21:56.044237Z node 1 :NODE_BROKER DEBUG: node_broker__migrate_state.cpp:21: TTxMigrateState FinalizeMigration 2025-05-29T15:21:56.044241Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:1330: [DB] Update approx epoch start in database: #6.8 2025-05-29T15:21:56.062117Z node 1 :NODE_BROKER DEBUG: node_broker__migrate_state.cpp:95: TTxMigrateState Complete 2025-05-29T15:21:56.062187Z node 1 :NODE_BROKER TRACE: node_broker.cpp:456: Scheduled epoch update at 1970-01-01T06:00:00.025000Z 2025-05-29T15:21:56.062200Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:562: Preparing nodes list cache for epoch #6.8 1970-01-01T05:00:00.025000Z - 1970-01-01T06:00:00.025000Z - 1970-01-01T07:00:00.025000Z, approximate epoch start #6.8 nodes=0 expired=1 2025-05-29T15:21:56.062231Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:603: Preparing update nodes log for epoch ##6.8 1970-01-01T05:00:00.025000Z - 1970-01-01T06:00:00.025000Z - 1970-01-01T07:00:00.025000Z nodes=0 expired=1 removed=0 2025-05-29T15:21:56.062239Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:659: Add node #1024.v8 to update nodes log 2025-05-29T15:21:56.062393Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 269877761, Sender [1:775:2298], Recipient [1:766:2292]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-05-29T15:21:56.062475Z node 4 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:657: Handle NKikimr::TEvTabletPipe::TEvClientConnected { TabletId: 72057594037936129 Status: OK ServerId: [1:776:2299] Leader: 1 Dead: 0 Generation: 3 VersionInfo:  } 2025-05-29T15:21:56.062493Z node 5 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:657: Handle NKikimr::TEvTabletPipe::TEvClientConnected { TabletId: 72057594037936129 Status: OK ServerId: [1:777:2300] Leader: 1 Dead: 0 Generation: 3 VersionInfo:  } 2025-05-29T15:21:56.062503Z node 6 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:657: Handle NKikimr::TEvTabletPipe::TEvClientConnected { TabletId: 72057594037936129 Status: OK ServerId: [1:778:2301] Leader: 1 Dead: 0 Generation: 3 VersionInfo:  } 2025-05-29T15:21:56.062528Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 269877761, Sender [1:776:2299], Recipient [1:766:2292]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-05-29T15:21:56.062539Z node 3 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:657: Handle NKikimr::TEvTabletPipe::TEvClientConnected { TabletId: 72057594037936129 Status: OK ServerId: [1:775:2298] Leader: 1 Dead: 0 Generation: 3 VersionInfo:  } 2025-05-29T15:21:56.062570Z node 7 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:657: Handle NKikimr::TEvTabletPipe::TEvClientConnected { TabletId: 72057594037936129 Status: OK ServerId: [1:780:2303] Leader: 1 Dead: 0 Generation: 3 VersionInfo:  } 2025-05-29T15:21:56.062583Z node 8 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:657: Handle NKikimr::TEvTabletPipe::TEvClientConnected { TabletId: 72057594037936129 Status: OK ServerId: [1:781:2304] Leader: 1 Dead: 0 Generation: 3 VersionInfo:  } 2025-05-29T15:21:56.062592Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 269877761, Sender [1:777:2300], Recipient [1:766:2292]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-05-29T15:21:56.062607Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 269877761, Sender [1:778:2301], Recipient [1:766:2292]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-05-29T15:21:56.062620Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 269877761, Sender [1:780:2303], Recipient [1:766:2292]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-05-29T15:21:56.062629Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 269877761, Sender [1:781:2304], Recipient [1:766:2292]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-05-29T15:21:56.062680Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 272039936, Sender [3:81:2072], Recipient [1:775:2298] 2025-05-29T15:21:56.062685Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:246: StateWork, processing event TEvNodeBroker::TEvListNodes 2025-05-29T15:21:56.062694Z node 1 :NODE_BROKER TRACE: node_broker.cpp:423: Send TEvNodesInfo for epoch #6.8 1970-01-01T05:00:00.025000Z - 1970-01-01T06:00:00.025000Z - 1970-01-01T07:00:00.025000Z 2025-05-29T15:21:56.062703Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 272039936, Sender [4:109:2072], Recipient [1:776:2299] 2025-05-29T15:21:56.062705Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:246: StateWork, processing event TEvNodeBroker::TEvListNodes 2025-05-29T15:21:56.062709Z node 1 :NODE_BROKER TRACE: node_broker.cpp:423: Send TEvNodesInfo for epoch #6.8 1970-01-01T05:00:00.025000Z - 1970-01-01T06:00:00.025000Z - 1970-01-01T07:00:00.025000Z 2025-05-29T15:21:56.062756Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 272039936, Sender [5:137:2072], Recipient [1:777:2300] 2025-05-29T15:21:56.062761Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:246: StateWork, processing event TEvNodeBroker::TEvListNodes 2025-05-29T15:21:56.062767Z node 1 :NODE_BROKER TRACE: node_broker.cpp:423: Send TEvNodesInfo for epoch #6.8 1970-01-01T05:00:00.025000Z - 1970-01-01T06:00:00.025000Z - 1970-01-01T07:00:00.025000Z 2025-05-29T15:21:56.062789Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 272039936, Sender [6:165:2072], Recipient [1:778:2301] 2025-05-29T15:21:56.062793Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:246: StateWork, processing event TEvNodeBroker::TEvListNodes 2025-05-29T15:21:56.062799Z node 1 :NODE_BROKER TRACE: node_broker.cpp:423: Send TEvNodesInfo for epoch #6.8 1970-01-01T05:00:00.025000Z - 1970-01-01T06:00:00.025000Z - 1970-01-01T07:00:00.025000Z 2025-05-29T15:21:56.062823Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 272039936, Sender [7:193:2072], Recipient [1:780:2303] 2025-05-29T15:21:56.062827Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:246: StateWork, processing event TEvNodeBroker::TEvListNodes 2025-05-29T15:21:56.062834Z node 1 :NODE_BROKER TRACE: node_broker.cpp:423: Send TEvNodesInfo for epoch #6.8 1970-01-01T05:00:00.025000Z - 1970-01-01T06:00:00.025000Z - 1970-01-01T07:00:00.025000Z 2025-05-29T15:21:56.062863Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 272039936, Sender [8:221:2072], Recipient [1:781:2304] 2025-05-29T15:21:56.062867Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:246: StateWork, processing event TEvNodeBroker::TEvListNodes 2025-05-29T15:21:56.062873Z node 1 :NODE_BROKER TRACE: node_broker.cpp:423: Send TEvNodesInfo for epoch #6.8 1970-01-01T05:00:00.025000Z - 1970-01-01T06:00:00.025000Z - 1970-01-01T07:00:00.025000Z 2025-05-29T15:21:56.063063Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 269877761, Sender [1:809:2327], Recipient [1:766:2292]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-05-29T15:21:56.063094Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 272039936, Sender [1:627:2213], Recipient [1:766:2292]: NKikimr::NNodeBroker::TEvNodeBroker::TEvListNodes { } 2025-05-29T15:21:56.063099Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:246: StateWork, processing event TEvNodeBroker::TEvListNodes 2025-05-29T15:21:56.063106Z node 1 :NODE_BROKER TRACE: node_broker.cpp:423: Send TEvNodesInfo for epoch #6.8 1970-01-01T05:00:00.025000Z - 1970-01-01T06:00:00.025000Z - 1970-01-01T07:00:00.025000Z 2025-05-29T15:21:56.063173Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 269877761, Sender [1:811:2329], Recipient [1:766:2292]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-05-29T15:21:56.063190Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 272039936, Sender [1:627:2213], Recipient [1:766:2292]: NKikimr::NNodeBroker::TEvNodeBroker::TEvListNodes { } 2025-05-29T15:21:56.063194Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:246: StateWork, processing event TEvNodeBroker::TEvListNodes 2025-05-29T15:21:56.063199Z node 1 :NODE_BROKER TRACE: node_broker.cpp:423: Send TEvNodesInfo for epoch #6.8 1970-01-01T05:00:00.025000Z - 1970-01-01T06:00:00.025000Z - 1970-01-01T07:00:00.025000Z 2025-05-29T15:21:56.063260Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 269877761, Sender [1:813:2331], Recipient [1:766:2292]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-05-29T15:21:56.063275Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 272039936, Sender [1:627:2213], Recipient [1:766:2292]: NKikimr::NNodeBroker::TEvNodeBroker::TEvListNodes { } 2025-05-29T15:21:56.063280Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:246: StateWork, processing event TEvNodeBroker::TEvListNodes 2025-05-29T15:21:56.063285Z node 1 :NODE_BROKER TRACE: node_broker.cpp:423: Send TEvNodesInfo for epoch #6.8 1970-01-01T05:00:00.025000Z - 1970-01-01T06:00:00.025000Z - 1970-01-01T07:00:00.025000Z 2025-05-29T15:21:56.063338Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 269877761, Sender [1:815:2333], Recipient [1:766:2292]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-05-29T15:21:56.063359Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 272039936, Sender [1:627:2213], Recipient [1:766:2292]: NKikimr::NNodeBroker::TEvNodeBroker::TEvListNodes { CachedVersion: 8 } 2025-05-29T15:21:56.063364Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:246: StateWork, processing event TEvNodeBroker::TEvListNodes 2025-05-29T15:21:56.063370Z node 1 :NODE_BROKER TRACE: node_broker.cpp:423: Send TEvNodesInfo for epoch #6.8 1970-01-01T05:00:00.025000Z - 1970-01-01T06:00:00.025000Z - 1970-01-01T07:00:00.025000Z 2025-05-29T15:21:56.063423Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 269877761, Sender [1:817:2335], Recipient [1:766:2292]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-05-29T15:21:56.063443Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 272039950, Sender [1:627:2213], Recipient [1:766:2292]: NKikimr::NNodeBroker::TEvNodeBroker::TEvSubscribeNodesRequest { CachedVersion: 8 SeqNo: 2 } 2025-05-29T15:21:56.063450Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:254: StateWork, processing event TEvNodeBroker::TEvSubscribeNodesRequest 2025-05-29T15:21:56.063457Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:747: New subscriber [1:627:2213], seqNo: 2, version: 8, server pipe id: [1:817:2335] 2025-05-29T15:21:56.063465Z node 1 :NODE_BROKER TRACE: node_broker.cpp:730: Send TEvUpdateNodes v8 -> v8 to [1:627:2213] 2025-05-29T15:21:56.063524Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 269877764, Sender [1:817:2335], Recipient [1:766:2292]: NKikimr::TEvTabletPipe::TEvServerDisconnected 2025-05-29T15:21:56.063531Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:768: Unsubscribed [1:627:2213], seqNo: 2, server pipe id: [1:817:2335] 2025-05-29T15:21:56.063557Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 269877761, Sender [1:819:2337], Recipient [1:766:2292]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-05-29T15:21:56.063578Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 272039937, Sender [1:627:2213], Recipient [1:766:2292]: NKikimr::NNodeBroker::TEvNodeBroker::TEvResolveNode { NodeId: 1024 } 2025-05-29T15:21:56.063583Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:247: StateWork, processing event TEvNodeBroker::TEvResolveNode 2025-05-29T15:21:56.063598Z node 1 :NODE_BROKER TRACE: node_broker.cpp:1478: Send TEvResolvedNode: NKikimr::NNodeBroker::TEvNodeBroker::TEvResolvedNode { Status { Code: WRONG_REQUEST Reason: "Unknown node" } } >> TPartitionTests::GetPartitionWriteInfoError >> TPQTabletTests::ProposeTx_Command_After_Propose |59.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/load_test/ut/unittest |59.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/load_test/ut/unittest >> TPQTabletTests::ProposeTx_Command_After_Propose [GOOD] >> TNodeBrokerTest::NodesV2BackMigrationShiftIdRange [GOOD] >> GroupWriteTest::WithRead [GOOD] >> TPartitionTests::OldPlanStep [GOOD] >> BasicUsage::TWriteSession_WriteAndReadAndCommitRandomMessagesNoClusterDiscovery [FAIL] >> BasicUsage::TWriteSession_WriteEncoded >> TPartitionTests::ReserveSubDomainOutOfSpace >> TPartitionTests::GetPartitionWriteInfoError [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/mind/ut/unittest >> TNodeBrokerTest::NodesSubscriberDisconnect [GOOD] Test command err: 2025-05-29T15:21:55.768258Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 2 Deadline: 18446744073709.551615s } 2025-05-29T15:21:55.768307Z node 3 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 3 Deadline: 18446744073709.551615s } 2025-05-29T15:21:55.768339Z node 4 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 4 Deadline: 18446744073709.551615s } 2025-05-29T15:21:55.768376Z node 5 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 5 Deadline: 18446744073709.551615s } 2025-05-29T15:21:55.768402Z node 6 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 6 Deadline: 18446744073709.551615s } 2025-05-29T15:21:55.768422Z node 7 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 7 Deadline: 18446744073709.551615s } 2025-05-29T15:21:55.776749Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:21:55.776910Z node 3 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:21:55.776966Z node 4 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:21:55.777017Z node 5 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:21:55.777073Z node 6 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:21:55.777120Z node 7 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:21:55.777206Z node 8 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 8 Deadline: 18446744073709.551615s } 2025-05-29T15:21:55.777244Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-05-29T15:21:55.777601Z node 3 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:21:55.777636Z node 4 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:21:55.777659Z node 5 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:21:55.777679Z node 6 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:21:55.777706Z node 7 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:21:55.777730Z node 8 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:21:55.777783Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:21:55.782709Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:21:55.782800Z node 8 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:21:55.782834Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:21:55.783749Z node 5 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:21:55.783789Z node 6 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:21:55.783816Z node 7 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:21:55.783900Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:21:55.783923Z node 3 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:21:55.783941Z node 4 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:21:55.783964Z node 5 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-05-29T15:21:55.784006Z node 6 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-05-29T15:21:55.784025Z node 7 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-05-29T15:21:55.784041Z node 8 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:21:55.784058Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:21:55.784249Z node 3 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-05-29T15:21:55.784271Z node 4 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-05-29T15:21:55.784343Z node 8 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-05-29T15:21:55.784424Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-05-29T15:21:55.784460Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 5 Deadline: 18446744073709.551615s } 2025-05-29T15:21:55.785031Z node 3 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 2 Deadline: 18446744073709.551615s } 2025-05-29T15:21:55.785043Z node 4 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 3 Deadline: 18446744073709.551615s } 2025-05-29T15:21:55.785050Z node 5 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 8 Deadline: 18446744073709.551615s } 2025-05-29T15:21:55.785057Z node 7 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 3 Deadline: 18446744073709.551615s } 2025-05-29T15:21:55.785065Z node 8 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 4 Deadline: 18446744073709.551615s } 2025-05-29T15:21:55.785193Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 4 Deadline: 18446744073709.551615s } 2025-05-29T15:21:55.788718Z node 8 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 5 Deadline: 18446744073709.551615s } 2025-05-29T15:21:55.788871Z node 3 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 4 Deadline: 18446744073709.551615s } 2025-05-29T15:21:55.788900Z node 4 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 8 Deadline: 18446744073709.551615s } 2025-05-29T15:21:55.788947Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 3 Deadline: 18446744073709.551615s } 2025-05-29T15:21:55.788960Z node 3 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 7 Deadline: 18446744073709.551615s } 2025-05-29T15:21:55.788968Z node 4 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 2 Deadline: 18446744073709.551615s } 2025-05-29T15:21:55.789892Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 2 Deadline: 18446744073709.551615s } 2025-05-29T15:21:55.790076Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 3 Deadline: 18446744073709.551615s } 2025-05-29T15:21:55.790181Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 6 Deadline: 18446744073709.551615s } 2025-05-29T15:21:55.790286Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 4 Deadline: 18446744073709.551615s } 2025-05-29T15:21:55.790534Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 7 Deadline: 18446744073709.551615s } 2025-05-29T15:21:55.790714Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 6 Deadline: 18446744073709.551615s } 2025-05-29T15:21:55.790856Z node 4 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 6 Deadline: 18446744073709.551615s } 2025-05-29T15:21:55.790896Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 8 Deadline: 18446744073709.551615s } 2025-05-29T15:21:55.791294Z node 6 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 2 Deadline: 18446744073709.551615s } 2025-05-29T15:21:55.791359Z node 6 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 4 Deadline: 18446744073709.551615s } 2025-05-29T15:21:55.791824Z node 8 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 2 Deadline: 18446744073709.551615s } 2025-05-29T15:21:55.792039Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 8 Deadline: 18446744073709.551615s } 2025-05-29T15:21:55.793611Z node 3 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 8 Deadline: 18446744073709.551615s } 2025-05-29T15:21:55.793884Z node 8 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 3 Deadline: 18446744073709.551615s } 2025-05-29T15:21:55.816391Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7309: Cannot subscribe to console configs 2025-05-29T15:21:55.816414Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded ... waiting for nameservers are connected 2025-05-29T15:21:55.820527Z node 1 :NODE_BROKER DEBUG: node_broker_impl.h:236: StateInit event type: 10060000 event: NKikimr::TEvTablet::TEvBoot 2025-05-29T15:21:55.820895Z node 1 :NODE_BROKER DEBUG: node_broker_impl.h:236: StateInit event type: 10060001 event: NKikimr::TEvTablet::TEvRestored 2025-05-29T15:21:55.820943Z node 1 :NODE_BROKER DEBUG: node_broker__init_scheme.cpp:20: TTxInitScheme Execute 2025-05-29T15:21:55.821109Z node 1 :NODE_BROKER DEBUG: node_broker_impl.h:236: StateInit event type: 1006000c event: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-05-29T15:21:55.822073Z node 1 :NODE_BROKER DEBUG: node_broker__init_scheme.cpp:29: TTxInitScheme Complete 2025-05-29T15:21:55.822096Z node 1 :NODE_BROKER DEBUG: node_broker__load_state.cpp:19: TTxLoadState Execute 2025-05-29T15:21:55.822144Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:961: [DB] Using default config. 2025-05-29T15:21:55.822156Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:998: [DB] Starting the first epoch: #1.1 1970-01-01T00:00:00.025000Z - 1970-01-01T01:00:00.025000Z - 1970-01-01T02:00:00.025000Z 2025-05-29T15:21:55.822159Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:1024: [DB] Loaded the first approximate epoch start: #1.1 2025-05-29T15:21:55.822170Z node 1 :NODE_BROKER DEBUG: node_broker__load_state.cpp:27: TTxLoadState Compl ... rizedbycertificate=false 2025-05-29T15:21:56.099949Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:264: [Dirty] Register new active node #1026.v4 host3:1001 2025-05-29T15:21:56.099957Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:552: [Dirty] Update current epoch version from 3 to 4 2025-05-29T15:21:56.099962Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:1356: [DB] Update epoch version in database version=4 2025-05-29T15:21:56.111074Z node 1 :NODE_BROKER DEBUG: node_broker__register_node.cpp:186: TTxRegisterNode Complete 2025-05-29T15:21:56.111107Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:264: [Committed] Register new active node #1026.v4 host3:1001 2025-05-29T15:21:56.111120Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:552: [Committed] Update current epoch version from 3 to 4 2025-05-29T15:21:56.111129Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:630: Add node #1026.v4 host3:1001 to epoch cache 2025-05-29T15:21:56.111161Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:659: Add node #1026.v4 to update nodes log 2025-05-29T15:21:56.111212Z node 1 :NODE_BROKER TRACE: node_broker__register_node.cpp:58: TTxRegisterNode reply with: Status { Code: OK } Node { NodeId: 1026 Host: "host3" Port: 1001 ResolveHost: "host3.yandex.net" Address: "1.2.3.4" Location { DataCenter: "1" Module: "2" Rack: "3" Unit: "4" } Expire: 7200025000 Name: "slot-2" } 2025-05-29T15:21:56.121508Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 2146435074, Sender [0:0:0], Recipient [1:562:2184]: NKikimr::NNodeBroker::TNodeBroker::TEvPrivate::TEvProcessSubscribersQueue 2025-05-29T15:21:56.121539Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:258: StateWork, processing event TEvPrivate::TEvProcessSubscribersQueue 2025-05-29T15:21:56.543037Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 269877761, Sender [1:702:2248], Recipient [1:562:2184]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-05-29T15:21:56.543115Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 272039950, Sender [1:630:2214], Recipient [1:562:2184]: NKikimr::NNodeBroker::TEvNodeBroker::TEvSubscribeNodesRequest { CachedVersion: 3 SeqNo: 2 } 2025-05-29T15:21:56.543125Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:254: StateWork, processing event TEvNodeBroker::TEvSubscribeNodesRequest 2025-05-29T15:21:56.543134Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:747: New subscriber [1:630:2214], seqNo: 2, version: 3, server pipe id: [1:702:2248] 2025-05-29T15:21:56.543149Z node 1 :NODE_BROKER TRACE: node_broker.cpp:730: Send TEvUpdateNodes v3 -> v4 to [1:630:2214] 2025-05-29T15:21:56.543172Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 269877761, Sender [1:703:2249], Recipient [1:562:2184]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-05-29T15:21:56.543188Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 272039936, Sender [1:630:2214], Recipient [1:562:2184]: NKikimr::NNodeBroker::TEvNodeBroker::TEvListNodes { } 2025-05-29T15:21:56.543194Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:246: StateWork, processing event TEvNodeBroker::TEvListNodes 2025-05-29T15:21:56.543209Z node 1 :NODE_BROKER TRACE: node_broker.cpp:423: Send TEvNodesInfo for epoch #1.4 1970-01-01T00:00:00.025000Z - 1970-01-01T01:00:00.025000Z - 1970-01-01T02:00:00.025000Z 2025-05-29T15:21:56.543322Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 269877761, Sender [1:705:2251], Recipient [1:562:2184]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-05-29T15:21:56.543362Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 272039938, Sender [1:630:2214], Recipient [1:562:2184]: NKikimr::NNodeBroker::TEvNodeBroker::TEvRegistrationRequest { Host: "host4" Port: 1001 ResolveHost: "host4.yandex.net" Address: "1.2.3.4" Location { DataCenter: "1" Module: "2" Rack: "3" Unit: "4" } FixedNodeId: false Path: "dc-1" } 2025-05-29T15:21:56.543368Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:248: StateWork, processing event TEvNodeBroker::TEvRegistrationRequest 2025-05-29T15:21:56.543378Z node 1 :NODE_BROKER TRACE: node_broker.cpp:1487: Handle TEvNodeBroker::TEvRegistrationRequest: request# Host: "host4" Port: 1001 ResolveHost: "host4.yandex.net" Address: "1.2.3.4" Location { DataCenter: "1" Module: "2" Rack: "3" Unit: "4" } FixedNodeId: false Path: "dc-1" 2025-05-29T15:21:56.543435Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2702: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [1:23:2070], request# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-05-29T15:21:56.543464Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:1842: FillEntry for TNavigate: self# [1:23:2070], cacheItem# { Subscriber: { Subscriber: [1:634:2217] DomainOwnerId: 72057594046678944 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusSuccess Kind: 2 TableKind: 0 Created: 1 CreateStep: 0 PathId: [OwnerId: 72057594046678944, LocalPathId: 1] DomainId: [OwnerId: 72057594046678944, LocalPathId: 1] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-05-29T15:21:56.543525Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:265: Send result: self# [1:707:2252], recipient# [1:706:2184], result# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [72057594046678944:1:0] RequestType: ByPath Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046678944, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046678944, LocalPathId: 1] Params { Version: 0 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } }] } 2025-05-29T15:21:56.543543Z node 1 :NODE_BROKER TRACE: node_broker.cpp:1532: Handle TEvTxProxySchemeCache::TEvNavigateKeySetResult: response# { Path: dc-1 TableId: [72057594046678944:1:0] RequestType: ByPath Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046678944, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046678944, LocalPathId: 1] Params { Version: 0 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } } 2025-05-29T15:21:56.543555Z node 1 :NODE_BROKER TRACE: node_broker.cpp:1558: Finished resolving tenant: request# Host: "host4" Port: 1001 ResolveHost: "host4.yandex.net" Address: "1.2.3.4" Location { DataCenter: "1" Module: "2" Rack: "3" Unit: "4" } FixedNodeId: false Path: "dc-1": scope id# <72057594046678944:1>: serviced subdomain# 72057594046678944:1 2025-05-29T15:21:56.543572Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 2146435073, Sender [1:706:2184], Recipient [1:562:2184]: NKikimr::NNodeBroker::TNodeBroker::TEvPrivate::TEvResolvedRegistrationRequest 2025-05-29T15:21:56.543577Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:257: StateWork, processing event TEvPrivate::TEvResolvedRegistrationRequest 2025-05-29T15:21:56.543602Z node 1 :NODE_BROKER DEBUG: node_broker__register_node.cpp:77: TTxRegisterNode Execute 2025-05-29T15:21:56.543608Z node 1 :NODE_BROKER DEBUG: node_broker__register_node.cpp:81: Registration request from host4:1001 (not fixed) tenant: dc-1 2025-05-29T15:21:56.543645Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:856: [DB] Adding node #1027.v5 host4:1001 to database state=Active resolvehost=host4.yandex.net address=1.2.3.4 dc=1 location=DC=1/M=2/R=3/U=4/ lease=1 expire=Thu, 01 Jan 1970 02:00:00 UTC servicedsubdomain=72057594046678944:1 slotindex=3 authorizedbycertificate=false 2025-05-29T15:21:56.543707Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:264: [Dirty] Register new active node #1027.v5 host4:1001 2025-05-29T15:21:56.543715Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:552: [Dirty] Update current epoch version from 4 to 5 2025-05-29T15:21:56.543719Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:1356: [DB] Update epoch version in database version=5 2025-05-29T15:21:56.563091Z node 1 :NODE_BROKER DEBUG: node_broker__register_node.cpp:186: TTxRegisterNode Complete 2025-05-29T15:21:56.563125Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:264: [Committed] Register new active node #1027.v5 host4:1001 2025-05-29T15:21:56.563138Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:552: [Committed] Update current epoch version from 4 to 5 2025-05-29T15:21:56.563145Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:630: Add node #1027.v5 host4:1001 to epoch cache 2025-05-29T15:21:56.563176Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:659: Add node #1027.v5 to update nodes log 2025-05-29T15:21:56.563232Z node 1 :NODE_BROKER TRACE: node_broker__register_node.cpp:58: TTxRegisterNode reply with: Status { Code: OK } Node { NodeId: 1027 Host: "host4" Port: 1001 ResolveHost: "host4.yandex.net" Address: "1.2.3.4" Location { DataCenter: "1" Module: "2" Rack: "3" Unit: "4" } Expire: 7200025000 Name: "slot-3" } 2025-05-29T15:21:56.563396Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 269877761, Sender [1:711:2256], Recipient [1:562:2184]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-05-29T15:21:56.563418Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 272039936, Sender [1:630:2214], Recipient [1:562:2184]: NKikimr::NNodeBroker::TEvNodeBroker::TEvListNodes { } 2025-05-29T15:21:56.563425Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:246: StateWork, processing event TEvNodeBroker::TEvListNodes 2025-05-29T15:21:56.563438Z node 1 :NODE_BROKER TRACE: node_broker.cpp:423: Send TEvNodesInfo for epoch #1.5 1970-01-01T00:00:00.025000Z - 1970-01-01T01:00:00.025000Z - 1970-01-01T02:00:00.025000Z 2025-05-29T15:21:56.574873Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 2146435074, Sender [0:0:0], Recipient [1:562:2184]: NKikimr::NNodeBroker::TNodeBroker::TEvPrivate::TEvProcessSubscribersQueue 2025-05-29T15:21:56.574898Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:258: StateWork, processing event TEvPrivate::TEvProcessSubscribersQueue 2025-05-29T15:21:56.574912Z node 1 :NODE_BROKER TRACE: node_broker.cpp:730: Send TEvUpdateNodes v4 -> v5 to [1:630:2214] 2025-05-29T15:21:56.575091Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 269877761, Sender [1:713:2258], Recipient [1:562:2184]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-05-29T15:21:56.575119Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 272039936, Sender [1:630:2214], Recipient [1:562:2184]: NKikimr::NNodeBroker::TEvNodeBroker::TEvListNodes { } 2025-05-29T15:21:56.575124Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:246: StateWork, processing event TEvNodeBroker::TEvListNodes 2025-05-29T15:21:56.575137Z node 1 :NODE_BROKER TRACE: node_broker.cpp:423: Send TEvNodesInfo for epoch #1.5 1970-01-01T00:00:00.025000Z - 1970-01-01T01:00:00.025000Z - 1970-01-01T02:00:00.025000Z 2025-05-29T15:21:56.575188Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 272039952, Sender [1:630:2214], Recipient [1:562:2184]: NKikimr::NNodeBroker::TEvNodeBroker::TEvSyncNodesRequest { SeqNo: 2 } 2025-05-29T15:21:56.575193Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:255: StateWork, processing event TEvNodeBroker::TEvSyncNodesRequest |59.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/load_test/ut/unittest ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/mind/ut/unittest >> TNodeBrokerTest::NodesV2BackMigrationShiftIdRange [GOOD] Test command err: 2025-05-29T15:21:54.737991Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 2 Deadline: 18446744073709.551615s } 2025-05-29T15:21:54.738054Z node 3 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 3 Deadline: 18446744073709.551615s } 2025-05-29T15:21:54.738091Z node 4 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 4 Deadline: 18446744073709.551615s } 2025-05-29T15:21:54.738134Z node 5 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 5 Deadline: 18446744073709.551615s } 2025-05-29T15:21:54.738173Z node 6 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 6 Deadline: 18446744073709.551615s } 2025-05-29T15:21:54.738199Z node 7 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 7 Deadline: 18446744073709.551615s } 2025-05-29T15:21:54.747785Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:21:54.747931Z node 3 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:21:54.748001Z node 4 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:21:54.748051Z node 5 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:21:54.748099Z node 6 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:21:54.748146Z node 7 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:21:54.748235Z node 8 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 8 Deadline: 18446744073709.551615s } 2025-05-29T15:21:54.748269Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-05-29T15:21:54.748442Z node 3 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:21:54.748467Z node 4 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:21:54.748488Z node 5 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:21:54.748504Z node 6 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:21:54.748525Z node 7 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:21:54.748544Z node 8 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:21:54.748581Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:21:54.800741Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:21:54.801021Z node 8 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:21:54.801061Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:21:54.802209Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:21:54.802242Z node 3 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:21:54.802294Z node 4 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:21:54.802347Z node 5 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:21:54.802387Z node 6 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:21:54.802418Z node 7 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:21:54.806179Z node 3 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-05-29T15:21:54.806291Z node 4 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-05-29T15:21:54.806342Z node 5 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-05-29T15:21:54.806386Z node 6 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-05-29T15:21:54.806426Z node 7 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-05-29T15:21:54.806479Z node 8 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:21:54.806520Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:21:54.806566Z node 8 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-05-29T15:21:54.806726Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-05-29T15:21:54.811848Z node 3 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 5 Deadline: 18446744073709.551615s } 2025-05-29T15:21:54.811897Z node 4 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 2 Deadline: 18446744073709.551615s } 2025-05-29T15:21:54.811919Z node 5 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 7 Deadline: 18446744073709.551615s } 2025-05-29T15:21:54.811936Z node 6 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 4 Deadline: 18446744073709.551615s } 2025-05-29T15:21:54.811958Z node 7 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 6 Deadline: 18446744073709.551615s } 2025-05-29T15:21:54.811975Z node 8 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 3 Deadline: 18446744073709.551615s } 2025-05-29T15:21:54.812059Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 5 Deadline: 18446744073709.551615s } 2025-05-29T15:21:54.812123Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 4 Deadline: 18446744073709.551615s } 2025-05-29T15:21:54.817715Z node 3 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 8 Deadline: 18446744073709.551615s } 2025-05-29T15:21:54.817799Z node 5 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 3 Deadline: 18446744073709.551615s } 2025-05-29T15:21:54.817945Z node 7 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 5 Deadline: 18446744073709.551615s } 2025-05-29T15:21:54.818014Z node 4 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 7 Deadline: 18446744073709.551615s } 2025-05-29T15:21:54.818053Z node 6 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 7 Deadline: 18446744073709.551615s } 2025-05-29T15:21:54.819694Z node 4 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 6 Deadline: 18446744073709.551615s } 2025-05-29T15:21:54.820166Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 6 Deadline: 18446744073709.551615s } 2025-05-29T15:21:54.820270Z node 7 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 4 Deadline: 18446744073709.551615s } 2025-05-29T15:21:54.820316Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 7 Deadline: 18446744073709.551615s } 2025-05-29T15:21:54.820505Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 2 Deadline: 18446744073709.551615s } 2025-05-29T15:21:54.820647Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 3 Deadline: 18446744073709.551615s } 2025-05-29T15:21:54.820726Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 4 Deadline: 18446744073709.551615s } 2025-05-29T15:21:54.820906Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 8 Deadline: 18446744073709.551615s } 2025-05-29T15:21:54.821206Z node 4 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 8 Deadline: 18446744073709.551615s } 2025-05-29T15:21:54.822019Z node 8 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 4 Deadline: 18446744073709.551615s } 2025-05-29T15:21:54.822146Z node 7 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 2 Deadline: 18446744073709.551615s } 2025-05-29T15:21:54.822409Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 7 Deadline: 18446744073709.551615s } 2025-05-29T15:21:54.871012Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7309: Cannot subscribe to console configs 2025-05-29T15:21:54.871045Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded ... waiting for nameservers are connected 2025-05-29T15:21:54.877859Z node 1 :NODE_BROKER DEBUG: node_broker_impl.h:236: StateInit event type: 10060000 event: NKikimr::TEvTablet::TEvBoot 2025-05-29T15:21:54.878340Z node 1 :NODE_BROKER DEBUG: node_broker_impl.h:236: StateInit event type: 10060001 event: NKikimr::TEvTablet::TEvRestored 2025-05-29T15:21:54.878421Z node 1 :NODE_BROKER DEBUG: node_broker__init_scheme.cpp:20: TTxInitScheme Execute 2025-05-29T15:21:54.878677Z node 1 :NODE_BROKER DEBUG: node_broker_impl.h:236: StateInit event type: 1006000c event: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-05-29T15:21:54.879537Z node 1 :NODE_BROKER DEBUG: node_broker__init_scheme.cpp:29: TTxInitScheme Complete 2025-05-29T15:21:54.879568Z node 1 :NODE_BROKER DEBUG: node_broker__load_state.cpp:19: TTxLoadState Execute 2025-05-29T15:21:54.879625Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:961: [DB] Using default config. 2025-05-29T15:21:54.879642Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:998: [DB] Starting the first epoch: #1.1 1970-01-01T00:00:00.025000Z - 1970-01-01T01:00:00.025000Z - 1970-01-01T02:00:00.025000Z 2025-05-29T15:21:54.879648Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:1024: [DB] Loaded the first approximate epoch start: #1.1 2025-05-29T15:21:54.879663Z node 1 :NODE_BROKER DEBUG: node_broker__load_state.cpp:27: TTxLoadState Complete 2025-05-29T15:21:54.879686Z node 1 :NODE_BROKER DEBUG: node_broker__migrate_state.cpp:84: TTxMigrateState Execute 2025-05-29T15:21:54.879692Z node 1 :NODE_BROKER DEBUG: node_broker__migrate_state.cpp:52: TTxMigrateState ProcessMigrationBatch UpdateNodes left 0, NewVersionUpdateNodes left 0 2025-05-29T15:21:54.879698Z node 1 :NODE_BROKER DEBU ... DE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 269877761, Sender [1:926:2418], Recipient [1:876:2379]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-05-29T15:21:56.714968Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 272039936, Sender [1:628:2214], Recipient [1:876:2379]: NKikimr::NNodeBroker::TEvNodeBroker::TEvListNodes { } 2025-05-29T15:21:56.714973Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:246: StateWork, processing event TEvNodeBroker::TEvListNodes 2025-05-29T15:21:56.714979Z node 1 :NODE_BROKER TRACE: node_broker.cpp:423: Send TEvNodesInfo for epoch #4.14 1970-01-01T03:00:00.025000Z - 1970-01-01T04:00:00.025000Z - 1970-01-01T05:00:00.025000Z 2025-05-29T15:21:56.715035Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 269877761, Sender [1:928:2420], Recipient [1:876:2379]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-05-29T15:21:56.715051Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 272039936, Sender [1:628:2214], Recipient [1:876:2379]: NKikimr::NNodeBroker::TEvNodeBroker::TEvListNodes { CachedVersion: 13 } 2025-05-29T15:21:56.715056Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:246: StateWork, processing event TEvNodeBroker::TEvListNodes 2025-05-29T15:21:56.715062Z node 1 :NODE_BROKER TRACE: node_broker.cpp:423: Send TEvNodesInfo for epoch #4.14 1970-01-01T03:00:00.025000Z - 1970-01-01T04:00:00.025000Z - 1970-01-01T05:00:00.025000Z 2025-05-29T15:21:56.715117Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 269877761, Sender [1:930:2422], Recipient [1:876:2379]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-05-29T15:21:56.715132Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 272039936, Sender [1:628:2214], Recipient [1:876:2379]: NKikimr::NNodeBroker::TEvNodeBroker::TEvListNodes { } 2025-05-29T15:21:56.715136Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:246: StateWork, processing event TEvNodeBroker::TEvListNodes 2025-05-29T15:21:56.715142Z node 1 :NODE_BROKER TRACE: node_broker.cpp:423: Send TEvNodesInfo for epoch #4.14 1970-01-01T03:00:00.025000Z - 1970-01-01T04:00:00.025000Z - 1970-01-01T05:00:00.025000Z 2025-05-29T15:21:56.715199Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 269877761, Sender [1:932:2424], Recipient [1:876:2379]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-05-29T15:21:56.715216Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 272039936, Sender [1:628:2214], Recipient [1:876:2379]: NKikimr::NNodeBroker::TEvNodeBroker::TEvListNodes { CachedVersion: 12 } 2025-05-29T15:21:56.715221Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:246: StateWork, processing event TEvNodeBroker::TEvListNodes 2025-05-29T15:21:56.715227Z node 1 :NODE_BROKER TRACE: node_broker.cpp:423: Send TEvNodesInfo for epoch #4.14 1970-01-01T03:00:00.025000Z - 1970-01-01T04:00:00.025000Z - 1970-01-01T05:00:00.025000Z 2025-05-29T15:21:56.715282Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 269877761, Sender [1:934:2426], Recipient [1:876:2379]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-05-29T15:21:56.715296Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 272039936, Sender [1:628:2214], Recipient [1:876:2379]: NKikimr::NNodeBroker::TEvNodeBroker::TEvListNodes { } 2025-05-29T15:21:56.715302Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:246: StateWork, processing event TEvNodeBroker::TEvListNodes 2025-05-29T15:21:56.715308Z node 1 :NODE_BROKER TRACE: node_broker.cpp:423: Send TEvNodesInfo for epoch #4.14 1970-01-01T03:00:00.025000Z - 1970-01-01T04:00:00.025000Z - 1970-01-01T05:00:00.025000Z 2025-05-29T15:21:56.715365Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 269877761, Sender [1:936:2428], Recipient [1:876:2379]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-05-29T15:21:56.715386Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 272039936, Sender [1:628:2214], Recipient [1:876:2379]: NKikimr::NNodeBroker::TEvNodeBroker::TEvListNodes { CachedVersion: 11 } 2025-05-29T15:21:56.715390Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:246: StateWork, processing event TEvNodeBroker::TEvListNodes 2025-05-29T15:21:56.715397Z node 1 :NODE_BROKER TRACE: node_broker.cpp:423: Send TEvNodesInfo for epoch #4.14 1970-01-01T03:00:00.025000Z - 1970-01-01T04:00:00.025000Z - 1970-01-01T05:00:00.025000Z 2025-05-29T15:21:56.715460Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 269877761, Sender [1:938:2430], Recipient [1:876:2379]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-05-29T15:21:56.715480Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 272039950, Sender [1:628:2214], Recipient [1:876:2379]: NKikimr::NNodeBroker::TEvNodeBroker::TEvSubscribeNodesRequest { CachedVersion: 14 SeqNo: 6 } 2025-05-29T15:21:56.715487Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:254: StateWork, processing event TEvNodeBroker::TEvSubscribeNodesRequest 2025-05-29T15:21:56.715495Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:747: New subscriber [1:628:2214], seqNo: 6, version: 14, server pipe id: [1:938:2430] 2025-05-29T15:21:56.715505Z node 1 :NODE_BROKER TRACE: node_broker.cpp:730: Send TEvUpdateNodes v14 -> v14 to [1:628:2214] 2025-05-29T15:21:56.715582Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 269877764, Sender [1:938:2430], Recipient [1:876:2379]: NKikimr::TEvTabletPipe::TEvServerDisconnected 2025-05-29T15:21:56.715589Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:768: Unsubscribed [1:628:2214], seqNo: 6, server pipe id: [1:938:2430] 2025-05-29T15:21:56.715614Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 269877761, Sender [1:940:2432], Recipient [1:876:2379]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-05-29T15:21:56.715630Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 272039950, Sender [1:628:2214], Recipient [1:876:2379]: NKikimr::NNodeBroker::TEvNodeBroker::TEvSubscribeNodesRequest { CachedVersion: 13 SeqNo: 7 } 2025-05-29T15:21:56.715635Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:254: StateWork, processing event TEvNodeBroker::TEvSubscribeNodesRequest 2025-05-29T15:21:56.715640Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:747: New subscriber [1:628:2214], seqNo: 7, version: 13, server pipe id: [1:940:2432] 2025-05-29T15:21:56.715645Z node 1 :NODE_BROKER TRACE: node_broker.cpp:730: Send TEvUpdateNodes v13 -> v14 to [1:628:2214] 2025-05-29T15:21:56.715707Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 269877764, Sender [1:940:2432], Recipient [1:876:2379]: NKikimr::TEvTabletPipe::TEvServerDisconnected 2025-05-29T15:21:56.715712Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:768: Unsubscribed [1:628:2214], seqNo: 7, server pipe id: [1:940:2432] 2025-05-29T15:21:56.715739Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 269877761, Sender [1:942:2434], Recipient [1:876:2379]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-05-29T15:21:56.715847Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 272039950, Sender [1:628:2214], Recipient [1:876:2379]: NKikimr::NNodeBroker::TEvNodeBroker::TEvSubscribeNodesRequest { CachedVersion: 12 SeqNo: 8 } 2025-05-29T15:21:56.715854Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:254: StateWork, processing event TEvNodeBroker::TEvSubscribeNodesRequest 2025-05-29T15:21:56.715859Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:747: New subscriber [1:628:2214], seqNo: 8, version: 12, server pipe id: [1:942:2434] 2025-05-29T15:21:56.715864Z node 1 :NODE_BROKER TRACE: node_broker.cpp:730: Send TEvUpdateNodes v12 -> v14 to [1:628:2214] 2025-05-29T15:21:56.715925Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 269877764, Sender [1:942:2434], Recipient [1:876:2379]: NKikimr::TEvTabletPipe::TEvServerDisconnected 2025-05-29T15:21:56.715930Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:768: Unsubscribed [1:628:2214], seqNo: 8, server pipe id: [1:942:2434] 2025-05-29T15:21:56.715954Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 269877761, Sender [1:944:2436], Recipient [1:876:2379]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-05-29T15:21:56.715969Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 272039950, Sender [1:628:2214], Recipient [1:876:2379]: NKikimr::NNodeBroker::TEvNodeBroker::TEvSubscribeNodesRequest { CachedVersion: 11 SeqNo: 9 } 2025-05-29T15:21:56.715973Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:254: StateWork, processing event TEvNodeBroker::TEvSubscribeNodesRequest 2025-05-29T15:21:56.715978Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:747: New subscriber [1:628:2214], seqNo: 9, version: 11, server pipe id: [1:944:2436] 2025-05-29T15:21:56.715983Z node 1 :NODE_BROKER TRACE: node_broker.cpp:730: Send TEvUpdateNodes v11 -> v14 to [1:628:2214] 2025-05-29T15:21:56.716045Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 269877764, Sender [1:944:2436], Recipient [1:876:2379]: NKikimr::TEvTabletPipe::TEvServerDisconnected 2025-05-29T15:21:56.716050Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:768: Unsubscribed [1:628:2214], seqNo: 9, server pipe id: [1:944:2436] 2025-05-29T15:21:56.716075Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 269877761, Sender [1:946:2438], Recipient [1:876:2379]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-05-29T15:21:56.716097Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 272039937, Sender [1:628:2214], Recipient [1:876:2379]: NKikimr::NNodeBroker::TEvNodeBroker::TEvResolveNode { NodeId: 1024 } 2025-05-29T15:21:56.716102Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:247: StateWork, processing event TEvNodeBroker::TEvResolveNode 2025-05-29T15:21:56.716142Z node 1 :NODE_BROKER TRACE: node_broker.cpp:1478: Send TEvResolvedNode: NKikimr::NNodeBroker::TEvNodeBroker::TEvResolvedNode { Status { Code: OK } Node { NodeId: 1024 Host: "host1" Port: 1001 ResolveHost: "host1.yandex.net" Address: "1.2.3.4" Location { DataCenter: "1" Module: "2" Rack: "3" Unit: "4" } Expire: 18000025000 Name: "slot-0" } } 2025-05-29T15:21:56.716210Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 269877761, Sender [1:948:2440], Recipient [1:876:2379]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-05-29T15:21:56.716228Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 272039937, Sender [1:628:2214], Recipient [1:876:2379]: NKikimr::NNodeBroker::TEvNodeBroker::TEvResolveNode { NodeId: 1025 } 2025-05-29T15:21:56.716232Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:247: StateWork, processing event TEvNodeBroker::TEvResolveNode 2025-05-29T15:21:56.716241Z node 1 :NODE_BROKER TRACE: node_broker.cpp:1478: Send TEvResolvedNode: NKikimr::NNodeBroker::TEvNodeBroker::TEvResolvedNode { Status { Code: WRONG_REQUEST Reason: "Unknown node" } } 2025-05-29T15:21:56.716295Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 269877761, Sender [1:950:2442], Recipient [1:876:2379]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-05-29T15:21:56.716313Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 272039937, Sender [1:628:2214], Recipient [1:876:2379]: NKikimr::NNodeBroker::TEvNodeBroker::TEvResolveNode { NodeId: 1026 } 2025-05-29T15:21:56.716319Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:247: StateWork, processing event TEvNodeBroker::TEvResolveNode 2025-05-29T15:21:56.716326Z node 1 :NODE_BROKER TRACE: node_broker.cpp:1478: Send TEvResolvedNode: NKikimr::NNodeBroker::TEvNodeBroker::TEvResolvedNode { Status { Code: WRONG_REQUEST Reason: "Unknown node" } } ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/load_test/ut/unittest >> GroupWriteTest::WithRead [GOOD] Test command err: RandomSeed# 8006378758788421118 2025-05-29T15:21:57.454782Z 1 00h01m00.010512s :BS_LOAD_TEST DEBUG: TabletId# 3 Generation# 1 is bootstrapped, going to send TEvDiscover {TabletId# 3 MinGeneration# 1 ReadBody# false DiscoverBlockedGeneration# true ForceBlockedGeneration# 0 FromLeader# true Deadline# 18446744073709551} 2025-05-29T15:21:57.459284Z 1 00h01m00.010512s :BS_LOAD_TEST INFO: TabletId# 3 Generation# 1 recieved TEvDiscoverResult {Status# NODATA BlockedGeneration# 0 Id# [0:0:0:0:0:0:0] Size# 0 MinGeneration# 1} 2025-05-29T15:21:57.459307Z 1 00h01m00.010512s :BS_LOAD_TEST DEBUG: TabletId# 3 Generation# 1 going to send TEvBlock {TabletId# 3 Generation# 1 Deadline# 18446744073709551 IsMonitored# 1} 2025-05-29T15:21:57.459867Z 1 00h01m00.010512s :BS_LOAD_TEST INFO: TabletId# 3 Generation# 1 recieved TEvBlockResult {Status# OK} 2025-05-29T15:21:57.472149Z 1 00h01m00.010512s :BS_LOAD_TEST DEBUG: TabletId# 3 Generation# 2 going to send TEvCollectGarbage {TabletId# 3 RecordGeneration# 2 PerGenerationCounter# 1 Channel# 0 Deadline# 18446744073709551 Collect# true CollectGeneration# 2 CollectStep# 0 Hard# true IsMultiCollectAllowed# 0 IsMonitored# 1} 2025-05-29T15:21:57.472940Z 1 00h01m00.010512s :BS_LOAD_TEST INFO: TabletId# 3 Generation# 2 recieved TEvCollectGarbageResult {TabletId# 3 RecordGeneration# 2 PerGenerationCounter# 1 Channel# 0 Status# OK} 2025-05-29T15:21:58.310811Z 1 00h01m10.010512s :BS_LOAD_TEST DEBUG: Load tablet recieved PoisonPill, going to die 2025-05-29T15:21:58.310840Z 1 00h01m10.010512s :BS_LOAD_TEST DEBUG: TabletId# 3 Generation# 2 end working, going to send TEvCollectGarbage {TabletId# 3 RecordGeneration# 2 PerGenerationCounter# 12 Channel# 0 Deadline# 18446744073709551 Collect# true CollectGeneration# 2 CollectStep# 4294967295 Hard# true IsMultiCollectAllowed# 0 IsMonitored# 1} 2025-05-29T15:21:58.310849Z 1 00h01m10.010512s :BS_LOAD_TEST DEBUG: Load tablet recieved PoisonPill, going to die 2025-05-29T15:21:58.310854Z 1 00h01m10.010512s :BS_LOAD_TEST DEBUG: TabletId# 3 Generation# 2 end working, going to send TEvCollectGarbage {TabletId# 3 RecordGeneration# 2 PerGenerationCounter# 13 Channel# 0 Deadline# 18446744073709551 Collect# true CollectGeneration# 2 CollectStep# 4294967295 Hard# true IsMultiCollectAllowed# 0 IsMonitored# 1} 2025-05-29T15:21:58.321313Z 1 00h01m10.010512s :BS_LOAD_TEST INFO: TabletId# 3 Generation# 2 recieved TEvCollectGarbageResult {TabletId# 3 RecordGeneration# 2 PerGenerationCounter# 12 Channel# 0 Status# OK} 2025-05-29T15:21:58.321344Z 1 00h01m10.010512s :BS_LOAD_TEST INFO: TabletId# 3 Generation# 2 recieved TEvCollectGarbageResult {TabletId# 3 RecordGeneration# 2 PerGenerationCounter# 13 Channel# 0 Status# OK} >> TPartitionTests::NonConflictingCommitsBatch >> KqpQueryPerf::Replace+QueryService-UseSink >> KqpOlapSysView::StatsSysViewBytesColumnActualization [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/persqueue/ut/unittest >> TPQTabletTests::ProposeTx_Command_After_Propose [GOOD] Test command err: 2025-05-29T15:21:56.768424Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:3098: [PQ: 72057594037927937] Handle TEvInterconnect::TEvNodeInfo 2025-05-29T15:21:56.770436Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:3130: [PQ: 72057594037927937] Transactions request. From tx_00000000000000000000, To tx_18446744073709551615 2025-05-29T15:21:56.770533Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:751: [PQ: 72057594037927937] doesn't have tx info 2025-05-29T15:21:56.770559Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:763: [PQ: 72057594037927937] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2025-05-29T15:21:56.770564Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:975: [PQ: 72057594037927937] no config, start with empty partitions and default config 2025-05-29T15:21:56.770571Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:4889: [PQ: 72057594037927937] Txs.size=0, PlannedTxs.size=0 2025-05-29T15:21:56.770581Z node 1 :PERSQUEUE NOTICE: pq_impl.cpp:1099: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-05-29T15:21:56.770592Z node 1 :PERSQUEUE INFO: pq_impl.cpp:800: [PQ: 72057594037927937] doesn't have tx writes info 2025-05-29T15:21:56.778030Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:2882: [PQ: 72057594037927937] server connected, pipe [1:207:2212], now have 1 active actors on pipe 2025-05-29T15:21:56.778059Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:1460: [PQ: 72057594037927937] Handle TEvPersQueue::TEvUpdateConfig 2025-05-29T15:21:56.780014Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:1646: [PQ: 72057594037927937] Config update version 1(current 0) received from actor [1:178:2192] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "topic" Version: 1 LocalDC: true Topic: "topic" TopicPath: "/topic" YcCloudId: "somecloud" YcFolderId: "somefolder" YdbDatabaseId: "PQ" YdbDatabasePath: "/Root/PQ" Partitions { PartitionId: 0 } ReadRuleGenerations: 1 FederationAccount: "federationAccount" MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 1 Important: false } 2025-05-29T15:21:56.780822Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:590: [PQ: 72057594037927937] Apply new config CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "topic" Version: 1 LocalDC: true Topic: "topic" TopicPath: "/topic" YcCloudId: "somecloud" YcFolderId: "somefolder" YdbDatabaseId: "PQ" YdbDatabasePath: "/Root/PQ" Partitions { PartitionId: 0 } ReadRuleGenerations: 1 FederationAccount: "federationAccount" MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 1 Important: false } 2025-05-29T15:21:56.780843Z node 1 :PERSQUEUE NOTICE: pq_impl.cpp:1099: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-05-29T15:21:56.781013Z node 1 :PERSQUEUE INFO: pq_impl.cpp:1487: [PQ: 72057594037927937] Config applied version 1 actor [1:178:2192] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "topic" Version: 1 LocalDC: true Topic: "topic" TopicPath: "/topic" YcCloudId: "somecloud" YcFolderId: "somefolder" YdbDatabaseId: "PQ" YdbDatabasePath: "/Root/PQ" Partitions { PartitionId: 0 } ReadRuleGenerations: 1 FederationAccount: "federationAccount" MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 1 Important: false } 2025-05-29T15:21:56.781043Z node 1 :PERSQUEUE DEBUG: partition_init.cpp:75: [topic:0:Initializer] Start initializing step TInitConfigStep 2025-05-29T15:21:56.781109Z node 1 :PERSQUEUE DEBUG: partition_init.cpp:75: [topic:0:Initializer] Start initializing step TInitInternalFieldsStep 2025-05-29T15:21:56.781180Z node 1 :PERSQUEUE INFO: partition_init.cpp:880: [PQ: 72057594037927937, Partition: 0, State: StateInit] bootstrapping 0 [1:215:2218] 2025-05-29T15:21:56.781327Z node 1 :PERSQUEUE DEBUG: partition_init.cpp:55: [topic:0:Initializer] Initializing completed. 2025-05-29T15:21:56.781332Z node 1 :PERSQUEUE INFO: partition.cpp:560: [PQ: 72057594037927937, Partition: 0, State: StateInit] init complete for topic 'topic' partition 0 generation 2 [1:215:2218] 2025-05-29T15:21:56.781339Z node 1 :PERSQUEUE DEBUG: partition.cpp:574: [PQ: 72057594037927937, Partition: 0, State: StateInit] SYNC INIT topic topic partitition 0 so 0 endOffset 0 Head Offset 0 PartNo 0 PackedSize 0 count 0 nextOffset 0 batches 0 2025-05-29T15:21:56.781437Z node 1 :PERSQUEUE DEBUG: partition.cpp:3850: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Process pending events. Count 0 2025-05-29T15:21:56.781454Z node 1 :PERSQUEUE DEBUG: partition.cpp:3155: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'topic' partition 0 user user reinit request with generation 1 2025-05-29T15:21:56.781459Z node 1 :PERSQUEUE DEBUG: partition.cpp:3224: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'topic' partition 0 user user reinit with generation 1 done 2025-05-29T15:21:56.781481Z node 1 :PERSQUEUE DEBUG: partition.cpp:2185: [PQ: 72057594037927937, Partition: 0, State: StateIdle] === DumpKeyValueRequest === 2025-05-29T15:21:56.781484Z node 1 :PERSQUEUE DEBUG: partition.cpp:2186: [PQ: 72057594037927937, Partition: 0, State: StateIdle] --- delete ---------------- 2025-05-29T15:21:56.781487Z node 1 :PERSQUEUE DEBUG: partition.cpp:2194: [PQ: 72057594037927937, Partition: 0, State: StateIdle] --- write ----------------- 2025-05-29T15:21:56.781490Z node 1 :PERSQUEUE DEBUG: partition.cpp:2197: [PQ: 72057594037927937, Partition: 0, State: StateIdle] i0000000000 2025-05-29T15:21:56.781492Z node 1 :PERSQUEUE DEBUG: partition.cpp:2197: [PQ: 72057594037927937, Partition: 0, State: StateIdle] m0000000000cuser 2025-05-29T15:21:56.781494Z node 1 :PERSQUEUE DEBUG: partition.cpp:2197: [PQ: 72057594037927937, Partition: 0, State: StateIdle] m0000000000uuser 2025-05-29T15:21:56.781497Z node 1 :PERSQUEUE DEBUG: partition.cpp:2199: [PQ: 72057594037927937, Partition: 0, State: StateIdle] --- rename ---------------- 2025-05-29T15:21:56.781500Z node 1 :PERSQUEUE DEBUG: partition.cpp:2204: [PQ: 72057594037927937, Partition: 0, State: StateIdle] =========================== 2025-05-29T15:21:56.781516Z node 1 :PERSQUEUE DEBUG: partition_read.cpp:779: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'topic' partition 0 user user readTimeStamp for offset 0 initiated queuesize 0 startOffset 0 ReadingTimestamp 0 rrg 0 2025-05-29T15:21:56.781539Z node 1 :PERSQUEUE DEBUG: read.h:262: CacheProxy. Passthrough write request to KV 2025-05-29T15:21:56.782260Z node 1 :PERSQUEUE DEBUG: partition_write.cpp:524: [PQ: 72057594037927937, Partition: 0, State: StateIdle] TPartition::HandleWriteResponse writeNewSize# 0 WriteNewSizeFromSupportivePartitions# 0 2025-05-29T15:21:56.782331Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:2882: [PQ: 72057594037927937] server connected, pipe [1:222:2223], now have 1 active actors on pipe 2025-05-29T15:21:56.782411Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:2882: [PQ: 72057594037927937] server connected, pipe [1:225:2225], now have 1 active actors on pipe 2025-05-29T15:21:56.782576Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:3222: [PQ: 72057594037927937] Handle TEvPersQueue::TEvProposeTransaction SourceActor { RawX1: 178 RawX2: 4294969488 } TxId: 67890 Data { Operations { PartitionId: 0 CommitOffsetsBegin: 0 CommitOffsetsEnd: 2 Consumer: "user" Path: "/topic" } SendingShards: 22222 ReceivingShards: 22222 Immediate: false } 2025-05-29T15:21:56.782584Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:3383: [PQ: 72057594037927937] distributed transaction 2025-05-29T15:21:56.782598Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:3697: [PQ: 72057594037927937] Propose TxId 67890, WriteId (empty maybe) 2025-05-29T15:21:56.782604Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:4293: [PQ: 72057594037927937] Try execute txs with state UNKNOWN 2025-05-29T15:21:56.782607Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:4338: [PQ: 72057594037927937] TxId 67890, State UNKNOWN 2025-05-29T15:21:56.782611Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:3922: [PQ: 72057594037927937] schedule TEvProposeTransactionResult(PREPARED) 2025-05-29T15:21:56.782615Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:4228: [PQ: 72057594037927937] TxId 67890, NewState PREPARING 2025-05-29T15:21:56.782620Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:3818: [PQ: 72057594037927937] write key for TxId 67890 2025-05-29T15:21:56.782640Z node 1 :PERSQUEUE DEBUG: transaction.cpp:374: [TxId: 67890] save tx TxId: 67890 State: PREPARED MinStep: 134 MaxStep: 30134 PredicatesReceived { TabletId: 22222 } PredicateRecipients: 22222 Operations { PartitionId: 0 CommitOffsetsBegin: 0 CommitOffsetsEnd: 2 Consumer: "user" Path: "/topic" } Kind: KIND_DATA SourceActor { RawX1: 178 RawX2: 4294969488 } Partitions { } 2025-05-29T15:21:56.782652Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:3635: [PQ: 72057594037927937] Send TEvKeyValue::TEvRequest (WRITE_TX_COOKIE) 2025-05-29T15:21:56.783701Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:1231: [PQ: 72057594037927937] Handle TEvKeyValue::TEvResponse (WRITE_TX_COOKIE) 2025-05-29T15:21:56.783734Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:4293: [PQ: 72057594037927937] Try execute txs with state PREPARING 2025-05-29T15:21:56.783740Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:4338: [PQ: 72057594037927937] TxId 67890, State PREPARING 2025-05-29T15:21:56.783745Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:4228: [PQ: 72057594037927937] TxId 67890, NewState PREPARED 2025-05-29T15:21:56.784576Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:3409: [PQ: 72057594037927937] Handle TEvTxProcessing::TEvPlanStep Transactions { TxId: 67890 AckTo { RawX1: 178 RawX2: 4294969488 } } Step: 100 2025-05-29T15:21:56.784601Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:4293: [PQ: 72057594037927937] Try execute txs with state PREPARED 2025-05-29T15:21:56.784606Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:4338: [PQ: 72057594037927937] TxId 67890, State PREPARED 2025-05-29T15:21:56.784612Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:4228: [PQ: 72057594037927937] TxId 67890, NewState PLANNING 2025-05-29T15:21:56.784620Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:3798: [PQ: 72057594037927937] PlanStep 100, PlanTxId 67890 2025-05-29T15:21:56.784629Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:3818: [PQ: 72057594037927937] write key for TxId 67890 2025-05-29T15:21:56.784673Z node 1 :PERSQUEUE DEBUG: transaction.cpp:374: [TxId: 67890] save tx TxId: 67890 State: PLANNED MinStep: 134 MaxStep: 30134 PredicatesReceived { TabletId: 22222 } PredicateRecipients: 22222 Operations { PartitionId: 0 CommitOffsetsBegin: 0 CommitOffsetsEnd: 2 Consumer: "user" Path: "/topic" } Step: 100 Kind: KIND_DATA SourceActor { RawX1: 178 RawX2: 4294969488 } Partitions { } 2025-05-29T15:21:56.784696Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:3635: [PQ: 72057594037927937] Send TEvKeyValue::TEvRequest (WRITE_TX_COOKIE) 2025-05-29T15:21:56.785884Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:1231: [PQ: 72057594037927937] Handle TEvKeyValue::TEvResponse (WRITE_TX_COOKIE) 2025-05-29T15:21:56.785904Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:4293: [PQ: 72057594037927937] Try execute txs with state PLANNING 2025-05-29T15:21:56.785909Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:4338: [PQ: 72057594037927937] TxId 67890, State PLANNING 2025-05-29T15:21:56.785914Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:4228: [PQ: 72057594037927937] TxId 67890, NewState PLANNED 2025-05-29T15:21:56.785920Z node 1 ... "/topic" YcCloudId: "somecloud" YcFolderId: "somefolder" YdbDatabaseId: "PQ" YdbDatabasePath: "/Root/PQ" Partitions { PartitionId: 0 } ReadRuleGenerations: 6 FederationAccount: "federationAccount" MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 6 Important: false } 2025-05-29T15:21:58.211658Z node 6 :PERSQUEUE DEBUG: partition_init.cpp:75: [topic:0:Initializer] Start initializing step TInitConfigStep 2025-05-29T15:21:58.211714Z node 6 :PERSQUEUE DEBUG: partition_init.cpp:75: [topic:0:Initializer] Start initializing step TInitInternalFieldsStep 2025-05-29T15:21:58.211755Z node 6 :PERSQUEUE INFO: partition_init.cpp:880: [PQ: 72057594037927937, Partition: 0, State: StateInit] bootstrapping 0 [6:185:2197] 2025-05-29T15:21:58.211922Z node 6 :PERSQUEUE DEBUG: partition_init.cpp:55: [topic:0:Initializer] Initializing completed. 2025-05-29T15:21:58.211929Z node 6 :PERSQUEUE INFO: partition.cpp:560: [PQ: 72057594037927937, Partition: 0, State: StateInit] init complete for topic 'topic' partition 0 generation 2 [6:185:2197] 2025-05-29T15:21:58.211935Z node 6 :PERSQUEUE DEBUG: partition.cpp:574: [PQ: 72057594037927937, Partition: 0, State: StateInit] SYNC INIT topic topic partitition 0 so 0 endOffset 0 Head Offset 0 PartNo 0 PackedSize 0 count 0 nextOffset 0 batches 0 2025-05-29T15:21:58.211983Z node 6 :PERSQUEUE DEBUG: partition.cpp:3850: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Process pending events. Count 0 2025-05-29T15:21:58.211994Z node 6 :PERSQUEUE DEBUG: partition.cpp:3155: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'topic' partition 0 user user reinit request with generation 6 2025-05-29T15:21:58.212001Z node 6 :PERSQUEUE DEBUG: partition.cpp:3224: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'topic' partition 0 user user reinit with generation 6 done 2025-05-29T15:21:58.212016Z node 6 :PERSQUEUE DEBUG: partition.cpp:2185: [PQ: 72057594037927937, Partition: 0, State: StateIdle] === DumpKeyValueRequest === 2025-05-29T15:21:58.212019Z node 6 :PERSQUEUE DEBUG: partition.cpp:2186: [PQ: 72057594037927937, Partition: 0, State: StateIdle] --- delete ---------------- 2025-05-29T15:21:58.212022Z node 6 :PERSQUEUE DEBUG: partition.cpp:2194: [PQ: 72057594037927937, Partition: 0, State: StateIdle] --- write ----------------- 2025-05-29T15:21:58.212044Z node 6 :PERSQUEUE DEBUG: partition.cpp:2197: [PQ: 72057594037927937, Partition: 0, State: StateIdle] i0000000000 2025-05-29T15:21:58.212048Z node 6 :PERSQUEUE DEBUG: partition.cpp:2197: [PQ: 72057594037927937, Partition: 0, State: StateIdle] m0000000000cuser 2025-05-29T15:21:58.212052Z node 6 :PERSQUEUE DEBUG: partition.cpp:2197: [PQ: 72057594037927937, Partition: 0, State: StateIdle] m0000000000uuser 2025-05-29T15:21:58.212056Z node 6 :PERSQUEUE DEBUG: partition.cpp:2199: [PQ: 72057594037927937, Partition: 0, State: StateIdle] --- rename ---------------- 2025-05-29T15:21:58.212059Z node 6 :PERSQUEUE DEBUG: partition.cpp:2204: [PQ: 72057594037927937, Partition: 0, State: StateIdle] =========================== 2025-05-29T15:21:58.212073Z node 6 :PERSQUEUE DEBUG: partition_read.cpp:779: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'topic' partition 0 user user readTimeStamp for offset 0 initiated queuesize 0 startOffset 0 ReadingTimestamp 0 rrg 0 2025-05-29T15:21:58.212100Z node 6 :PERSQUEUE DEBUG: read.h:262: CacheProxy. Passthrough write request to KV 2025-05-29T15:21:58.212688Z node 6 :PERSQUEUE DEBUG: partition_write.cpp:524: [PQ: 72057594037927937, Partition: 0, State: StateIdle] TPartition::HandleWriteResponse writeNewSize# 0 WriteNewSizeFromSupportivePartitions# 0 2025-05-29T15:21:58.212749Z node 6 :PERSQUEUE DEBUG: pq_impl.cpp:2882: [PQ: 72057594037927937] server connected, pipe [6:192:2202], now have 1 active actors on pipe 2025-05-29T15:21:58.212834Z node 6 :PERSQUEUE DEBUG: pq_impl.cpp:2882: [PQ: 72057594037927937] server connected, pipe [6:195:2204], now have 1 active actors on pipe 2025-05-29T15:21:58.212841Z node 6 :PERSQUEUE DEBUG: pq_impl.cpp:347: Handle TEvRequest topic: 'topic' requestId: 2025-05-29T15:21:58.212846Z node 6 :PERSQUEUE DEBUG: pq_impl.cpp:2796: [PQ: 72057594037927937] got client message batch for topic 'topic' partition 0 2025-05-29T15:21:58.212852Z node 6 :PERSQUEUE DEBUG: pq_impl.cpp:2718: [PQ: 72057594037927937] partition {0, {0, 3}, 100000} for WriteId {0, 3} 2025-05-29T15:21:58.212872Z node 6 :PERSQUEUE DEBUG: pq_impl.cpp:3582: [PQ: 72057594037927937] send TEvSubscribeLock for WriteId {0, 3} 2025-05-29T15:21:58.212885Z node 6 :PERSQUEUE DEBUG: pq_impl.cpp:3635: [PQ: 72057594037927937] Send TEvKeyValue::TEvRequest (WRITE_TX_COOKIE) 2025-05-29T15:21:58.213246Z node 6 :PERSQUEUE DEBUG: pq_impl.cpp:1231: [PQ: 72057594037927937] Handle TEvKeyValue::TEvResponse (WRITE_TX_COOKIE) 2025-05-29T15:21:58.213313Z node 6 :PERSQUEUE DEBUG: partition_init.cpp:75: [topic:{0, {0, 3}, 100000}:Initializer] Start initializing step TInitConfigStep 2025-05-29T15:21:58.213351Z node 6 :PERSQUEUE DEBUG: partition_init.cpp:75: [topic:{0, {0, 3}, 100000}:Initializer] Start initializing step TInitInternalFieldsStep 2025-05-29T15:21:58.213385Z node 6 :PERSQUEUE INFO: partition_init.cpp:880: [PQ: 72057594037927937, Partition: {0, {0, 3}, 100000}, State: StateInit] bootstrapping {0, {0, 3}, 100000} [6:201:2209] 2025-05-29T15:21:58.213498Z node 6 :PERSQUEUE DEBUG: partition_init.cpp:75: [topic:{0, {0, 3}, 100000}:Initializer] Start initializing step TInitDiskStatusStep 2025-05-29T15:21:58.213679Z node 6 :PERSQUEUE DEBUG: partition_init.cpp:75: [topic:{0, {0, 3}, 100000}:Initializer] Start initializing step TInitMetaStep 2025-05-29T15:21:58.213709Z node 6 :PERSQUEUE DEBUG: partition_init.cpp:75: [topic:{0, {0, 3}, 100000}:Initializer] Start initializing step TInitInfoRangeStep 2025-05-29T15:21:58.213738Z node 6 :PERSQUEUE DEBUG: partition_init.cpp:75: [topic:{0, {0, 3}, 100000}:Initializer] Start initializing step TInitDataRangeStep 2025-05-29T15:21:58.213755Z node 6 :PERSQUEUE DEBUG: partition_init.cpp:75: [topic:{0, {0, 3}, 100000}:Initializer] Start initializing step TInitDataStep 2025-05-29T15:21:58.213758Z node 6 :PERSQUEUE DEBUG: partition_init.cpp:75: [topic:{0, {0, 3}, 100000}:Initializer] Start initializing step TInitEndWriteTimestampStep 2025-05-29T15:21:58.213761Z node 6 :PERSQUEUE INFO: partition_init.cpp:774: [topic:{0, {0, 3}, 100000}:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp skipped because already initialized. 2025-05-29T15:21:58.213764Z node 6 :PERSQUEUE DEBUG: partition_init.cpp:55: [topic:{0, {0, 3}, 100000}:Initializer] Initializing completed. 2025-05-29T15:21:58.213770Z node 6 :PERSQUEUE INFO: partition.cpp:560: [PQ: 72057594037927937, Partition: {0, {0, 3}, 100000}, State: StateInit] init complete for topic 'topic' partition {0, {0, 3}, 100000} generation 2 [6:201:2209] 2025-05-29T15:21:58.213776Z node 6 :PERSQUEUE DEBUG: partition.cpp:574: [PQ: 72057594037927937, Partition: {0, {0, 3}, 100000}, State: StateInit] SYNC INIT topic topic partitition {0, {0, 3}, 100000} so 0 endOffset 0 Head Offset 0 PartNo 0 PackedSize 0 count 0 nextOffset 0 batches 0 2025-05-29T15:21:58.213779Z node 6 :PERSQUEUE DEBUG: partition.cpp:3850: [PQ: 72057594037927937, Partition: {0, {0, 3}, 100000}, State: StateIdle] Process pending events. Count 0 2025-05-29T15:21:58.213845Z node 6 :PERSQUEUE INFO: ownerinfo.cpp:30: new Cookie -=[ 0wn3r ]=-|8b77e97c-8b1a5816-57644a48-b50392df_0 generated for partition {0, {0, 3}, 100000} topic 'topic' owner -=[ 0wn3r ]=- 2025-05-29T15:21:58.213862Z node 6 :PERSQUEUE DEBUG: partition_write.cpp:35: [PQ: 72057594037927937, Partition: {0, {0, 3}, 100000}, State: StateIdle] TPartition::ReplyOwnerOk. Partition: {0, {0, 3}, 100000} 2025-05-29T15:21:58.213872Z node 6 :PERSQUEUE DEBUG: pq_impl.cpp:382: Answer ok topic: 'topic' partition: 0 messageNo: 0 requestId: cookie: 4 2025-05-29T15:21:58.213925Z node 6 :PERSQUEUE DEBUG: pq_impl.cpp:2907: [PQ: 72057594037927937] server disconnected, pipe [6:195:2204] destroyed 2025-05-29T15:21:58.213932Z node 6 :PERSQUEUE DEBUG: partition_write.cpp:138: [PQ: 72057594037927937, Partition: {0, {0, 3}, 100000}, State: StateIdle] TPartition::DropOwner. 2025-05-29T15:21:58.213949Z node 6 :PERSQUEUE DEBUG: pq_impl.cpp:2882: [PQ: 72057594037927937] server connected, pipe [6:213:2216], now have 1 active actors on pipe 2025-05-29T15:21:58.213983Z node 6 :PERSQUEUE DEBUG: pq_impl.cpp:3222: [PQ: 72057594037927937] Handle TEvPersQueue::TEvProposeTransaction SourceActor { RawX1: 176 RawX2: 25769805966 } TxId: 2 Data { Operations { PartitionId: 0 Path: "/topic" SupportivePartition: 100000 } Immediate: false WriteId { NodeId: 0 KeyId: 3 } } 2025-05-29T15:21:58.213987Z node 6 :PERSQUEUE DEBUG: pq_impl.cpp:3248: [PQ: 72057594037927937] PartitionId {0, {0, 3}, 100000} for WriteId {0, 3} 2025-05-29T15:21:58.213991Z node 6 :PERSQUEUE DEBUG: pq_impl.cpp:3337: [PQ: 72057594037927937] TxId 2 has WriteId {0, 3} 2025-05-29T15:21:58.213994Z node 6 :PERSQUEUE DEBUG: pq_impl.cpp:3383: [PQ: 72057594037927937] distributed transaction 2025-05-29T15:21:58.214002Z node 6 :PERSQUEUE DEBUG: pq_impl.cpp:3697: [PQ: 72057594037927937] Propose TxId 2, WriteId {0, 3} 2025-05-29T15:21:58.214005Z node 6 :PERSQUEUE DEBUG: pq_impl.cpp:3709: [PQ: 72057594037927937] Link TxId 2 with WriteId {0, 3} 2025-05-29T15:21:58.214009Z node 6 :PERSQUEUE DEBUG: pq_impl.cpp:4293: [PQ: 72057594037927937] Try execute txs with state UNKNOWN 2025-05-29T15:21:58.214012Z node 6 :PERSQUEUE DEBUG: pq_impl.cpp:4338: [PQ: 72057594037927937] TxId 2, State UNKNOWN 2025-05-29T15:21:58.214015Z node 6 :PERSQUEUE DEBUG: pq_impl.cpp:3922: [PQ: 72057594037927937] schedule TEvProposeTransactionResult(PREPARED) 2025-05-29T15:21:58.214021Z node 6 :PERSQUEUE DEBUG: pq_impl.cpp:4228: [PQ: 72057594037927937] TxId 2, NewState PREPARING 2025-05-29T15:21:58.214026Z node 6 :PERSQUEUE DEBUG: pq_impl.cpp:3818: [PQ: 72057594037927937] write key for TxId 2 2025-05-29T15:21:58.214043Z node 6 :PERSQUEUE DEBUG: transaction.cpp:374: [TxId: 2] save tx TxId: 2 State: PREPARED MinStep: 230 MaxStep: 30230 Operations { PartitionId: 0 Path: "/topic" SupportivePartition: 100000 } Kind: KIND_DATA SourceActor { RawX1: 176 RawX2: 25769805966 } WriteId { NodeId: 0 KeyId: 3 } Partitions { } 2025-05-29T15:21:58.214055Z node 6 :PERSQUEUE DEBUG: pq_impl.cpp:3635: [PQ: 72057594037927937] Send TEvKeyValue::TEvRequest (WRITE_TX_COOKIE) 2025-05-29T15:21:58.214784Z node 6 :PERSQUEUE DEBUG: pq_impl.cpp:1231: [PQ: 72057594037927937] Handle TEvKeyValue::TEvResponse (WRITE_TX_COOKIE) 2025-05-29T15:21:58.214799Z node 6 :PERSQUEUE DEBUG: pq_impl.cpp:4293: [PQ: 72057594037927937] Try execute txs with state PREPARING 2025-05-29T15:21:58.214804Z node 6 :PERSQUEUE DEBUG: pq_impl.cpp:4338: [PQ: 72057594037927937] TxId 2, State PREPARING 2025-05-29T15:21:58.214809Z node 6 :PERSQUEUE DEBUG: pq_impl.cpp:4228: [PQ: 72057594037927937] TxId 2, NewState PREPARED 2025-05-29T15:21:58.214858Z node 6 :PERSQUEUE DEBUG: pq_impl.cpp:2882: [PQ: 72057594037927937] server connected, pipe [6:220:2222], now have 1 active actors on pipe 2025-05-29T15:21:58.214871Z node 6 :PERSQUEUE DEBUG: pq_impl.cpp:347: Handle TEvRequest topic: 'topic' requestId: 2025-05-29T15:21:58.214875Z node 6 :PERSQUEUE DEBUG: pq_impl.cpp:2796: [PQ: 72057594037927937] got client message batch for topic 'topic' partition 0 2025-05-29T15:21:58.214881Z node 6 :PERSQUEUE WARN: event_helpers.cpp:42: tablet 72057594037927937 topic 'topic error: it is forbidden to write after a commit 2025-05-29T15:21:58.214890Z node 6 :PERSQUEUE DEBUG: pq_impl.cpp:1424: [PQ: 72057594037927937] Handle TEvPQ::TEvError Cookie 2, Error it is forbidden to write after a commit 2025-05-29T15:21:58.214894Z node 6 :PERSQUEUE DEBUG: pq_impl.cpp:401: Answer error topic: 'topic' partition: 0 messageNo: 0 requestId: error: it is forbidden to write after a commit >> TNodeBrokerTest::ConfigPipelining [GOOD] >> TPartitionTests::ReserveSubDomainOutOfSpace [GOOD] >> GroupWriteTest::TwoTables [GOOD] >> KqpQueryPerf::Delete-QueryService-UseSink >> TPartitionTests::ShadowPartitionCounters >> KqpQueryPerf::Update+QueryService+UseSink >> KqpQueryPerf::IndexInsert-QueryService-UseSink ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/olap/unittest >> KqpOlapSysView::StatsSysViewBytesColumnActualization [GOOD] Test command err: Trying to start YDB, gRPC: 12799, MsgBus: 28343 2025-05-29T15:21:33.837532Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509888159170701292:2196];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:21:33.837655Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/0026bd/r3tmp/tmpe4od0Z/pdisk_1.dat 2025-05-29T15:21:34.023888Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:21:34.024317Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [1:7509888159170701135:2079] 1748532093835518 != 1748532093835521 2025-05-29T15:21:34.035552Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:21:34.035578Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting TServer::EnableGrpc on GrpcPort 12799, node 1 2025-05-29T15:21:34.043417Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-29T15:21:34.051123Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:21:34.051131Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-05-29T15:21:34.051133Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-05-29T15:21:34.051166Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:28343 TClient is connected to server localhost:28343 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-29T15:21:34.214768Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:21:34.217535Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-05-29T15:21:34.228666Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnStore, opId: 281474976715658:0, at schemeshard: 72057594046644480 Status: 53 TxId: 281474976715658 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 2 2025-05-29T15:21:34.256381Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037891;self_id=[1:7509888163465669112:2314];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-05-29T15:21:34.256457Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037891;self_id=[1:7509888163465669112:2314];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-05-29T15:21:34.256525Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037891;self_id=[1:7509888163465669112:2314];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-05-29T15:21:34.256546Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037891;self_id=[1:7509888163465669112:2314];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-05-29T15:21:34.256570Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037891;self_id=[1:7509888163465669112:2314];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-05-29T15:21:34.256596Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037891;self_id=[1:7509888163465669112:2314];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-05-29T15:21:34.256615Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037891;self_id=[1:7509888163465669112:2314];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-05-29T15:21:34.256639Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037891;self_id=[1:7509888163465669112:2314];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-05-29T15:21:34.256659Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037891;self_id=[1:7509888163465669112:2314];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-05-29T15:21:34.256677Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037891;self_id=[1:7509888163465669112:2314];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-05-29T15:21:34.256698Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037891;self_id=[1:7509888163465669112:2314];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2025-05-29T15:21:34.256724Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037891;self_id=[1:7509888163465669112:2314];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2025-05-29T15:21:34.269445Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037889;self_id=[1:7509888163465669124:2316];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-05-29T15:21:34.269474Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037889;self_id=[1:7509888163465669124:2316];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-05-29T15:21:34.269532Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037889;self_id=[1:7509888163465669124:2316];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-05-29T15:21:34.269555Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037889;self_id=[1:7509888163465669124:2316];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-05-29T15:21:34.269591Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037889;self_id=[1:7509888163465669124:2316];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-05-29T15:21:34.269613Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037889;self_id=[1:7509888163465669124:2316];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-05-29T15:21:34.269629Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037889;self_id=[1:7509888163465669124:2316];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-05-29T15:21:34.269652Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037889;self_id=[1:7509888163465669124:2316];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-05-29T15:21:34.269672Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037889;self_id=[1:7509888163465669124:2316];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-05-29T15:21:34.269691Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037889;self_id=[1:7509888163465669124:2316];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-05-29T15:21:34.269707Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037889;self_id=[1:7509888163465669124:2316];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2025-05-29T15:21:34.269725Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037889;self_id=[1:7509888163465669124:2316];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2025-05-29T15:21:34.278296Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037890;self_id=[1:7509888163465669121:2315];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-05-29T15:21:34.278327Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037890;self_id=[1:7509888163465669121:2315];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-05-29T15:21:34.278372Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037890;self_id=[1:7509888163465669121:2315];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-05-29T15:21:34.278394Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037890;self_id=[1:7509888163465669121:2315];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-05-29T15:21:34.278415Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037890;self_id=[1:7509888163465669121:2315];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fl ... 027334Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037889;parent_id=[1:7509888163465669124:2316];path_id=3;entity_id=2;size=1070088;limit=10240;r_count=266648;fline=column_info.h:130;sizes=5192,5192;s_splitted=5288,5288;r_splitted=1269,1270; 2025-05-29T15:21:41.027513Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037889;parent_id=[1:7509888163465669124:2316];path_id=3;entity_id=2;size=1070088;limit=10240;r_count=266648;fline=column_info.h:130;sizes=5188,5188;s_splitted=5280,5288;r_splitted=1269,1270; 2025-05-29T15:21:41.027686Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037889;parent_id=[1:7509888163465669124:2316];path_id=3;entity_id=2;size=1070088;limit=10240;r_count=266648;fline=column_info.h:130;sizes=5188,5188;s_splitted=5288,5288;r_splitted=1269,1270; 2025-05-29T15:21:41.027858Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037889;parent_id=[1:7509888163465669124:2316];path_id=3;entity_id=2;size=1070088;limit=10240;r_count=266648;fline=column_info.h:130;sizes=5192,5192;s_splitted=5288,5288;r_splitted=1269,1270; 2025-05-29T15:21:41.028024Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037889;parent_id=[1:7509888163465669124:2316];path_id=3;entity_id=2;size=1070088;limit=10240;r_count=266648;fline=column_info.h:130;sizes=5188,5188;s_splitted=5280,5288;r_splitted=1269,1270; 2025-05-29T15:21:41.028195Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037889;parent_id=[1:7509888163465669124:2316];path_id=3;entity_id=2;size=1070088;limit=10240;r_count=266648;fline=column_info.h:130;sizes=5188,5188;s_splitted=5280,5288;r_splitted=1269,1270; 2025-05-29T15:21:41.028360Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037889;parent_id=[1:7509888163465669124:2316];path_id=3;entity_id=2;size=1070088;limit=10240;r_count=266648;fline=column_info.h:130;sizes=5188,5188;s_splitted=5280,5288;r_splitted=1269,1270; 2025-05-29T15:21:41.028532Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037889;parent_id=[1:7509888163465669124:2316];path_id=3;entity_id=2;size=1070088;limit=10240;r_count=266648;fline=column_info.h:130;sizes=5188,5188;s_splitted=5288,5288;r_splitted=1269,1270; 2025-05-29T15:21:41.028696Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037889;parent_id=[1:7509888163465669124:2316];path_id=3;entity_id=2;size=1070088;limit=10240;r_count=266648;fline=column_info.h:130;sizes=5192,5192;s_splitted=5288,5288;r_splitted=1269,1270; 2025-05-29T15:21:41.028867Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037889;parent_id=[1:7509888163465669124:2316];path_id=3;entity_id=2;size=1070088;limit=10240;r_count=266648;fline=column_info.h:130;sizes=5188,5188;s_splitted=5280,5288;r_splitted=1269,1270; 2025-05-29T15:21:41.029033Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037889;parent_id=[1:7509888163465669124:2316];path_id=3;entity_id=2;size=1070088;limit=10240;r_count=266648;fline=column_info.h:130;sizes=5188,5188;s_splitted=5288,5288;r_splitted=1269,1270; 2025-05-29T15:21:41.029220Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037889;parent_id=[1:7509888163465669124:2316];path_id=3;entity_id=2;size=1070088;limit=10240;r_count=266648;fline=column_info.h:130;sizes=5188,5188;s_splitted=5280,5288;r_splitted=1269,1270; 2025-05-29T15:21:41.029393Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037889;parent_id=[1:7509888163465669124:2316];path_id=3;entity_id=2;size=1070088;limit=10240;r_count=266648;fline=column_info.h:130;sizes=5188,5188;s_splitted=5288,5288;r_splitted=1269,1270; 2025-05-29T15:21:41.029576Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037889;parent_id=[1:7509888163465669124:2316];path_id=3;entity_id=2;size=1070088;limit=10240;r_count=266648;fline=column_info.h:130;sizes=5188,5188;s_splitted=5280,5288;r_splitted=1269,1270; 2025-05-29T15:21:41.029742Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037889;parent_id=[1:7509888163465669124:2316];path_id=3;entity_id=2;size=1070088;limit=10240;r_count=266648;fline=column_info.h:130;sizes=5188,5188;s_splitted=5288,5288;r_splitted=1269,1270; 2025-05-29T15:21:41.029908Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037889;parent_id=[1:7509888163465669124:2316];path_id=3;entity_id=2;size=1070088;limit=10240;r_count=266648;fline=column_info.h:130;sizes=5192,5192;s_splitted=5288,5288;r_splitted=1269,1270; 2025-05-29T15:21:41.030074Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037889;parent_id=[1:7509888163465669124:2316];path_id=3;entity_id=2;size=1070088;limit=10240;r_count=266648;fline=column_info.h:130;sizes=5188,5188;s_splitted=5280,5288;r_splitted=1269,1270; 2025-05-29T15:21:41.030245Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037889;parent_id=[1:7509888163465669124:2316];path_id=3;entity_id=2;size=1070088;limit=10240;r_count=266648;fline=column_info.h:130;sizes=5192,5192;s_splitted=5288,5288;r_splitted=1269,1270; 2025-05-29T15:21:41.030412Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037889;parent_id=[1:7509888163465669124:2316];path_id=3;entity_id=2;size=1070088;limit=10240;r_count=266648;fline=column_info.h:130;sizes=5188,5188;s_splitted=5288,5288;r_splitted=1269,1270; 2025-05-29T15:21:41.030601Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037889;parent_id=[1:7509888163465669124:2316];path_id=3;entity_id=2;size=1070088;limit=10240;r_count=266648;fline=column_info.h:130;sizes=5296,5296;s_splitted=5392,5392;r_splitted=1296,1296; WAIT_COMPACTION: 3 WAIT_COMPACTION: 3 WAIT_COMPACTION: 3 WAIT_COMPACTION: 3 WAIT_COMPACTION: 3 ==================================== QUERY: SELECT * FROM `/Root/olapStore/olapTable/.sys/primary_index_stats` WHERE Activity == 1 AND EntityName IN ('new_column_ui64') RESULT: 2025-05-29T15:21:46.991776Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509888215005277177:2489], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:21:46.991803Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:21:46.991938Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509888215005277189:2492], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:21:46.992896Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715660:3, at schemeshard: 72057594046644480 2025-05-29T15:21:46.996276Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7509888215005277191:2493], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715660 completed, doublechecking } 2025-05-29T15:21:47.063667Z node 1 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [1:7509888219300244538:2611] txid# 281474976715661, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 7], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-29T15:21:47.219858Z node 1 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:238: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1748532107166, txId: 281474976715662] shutting down Wait changes: 0/0 2025-05-29T15:21:49.014770Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7301: Cannot get console configs 2025-05-29T15:21:49.014786Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded ==================================== QUERY: SELECT * FROM `/Root/olapStore/olapTable/.sys/primary_index_stats` WHERE Activity == 1 AND EntityName IN ('new_column_ui64') RESULT: 0/0/0 2025-05-29T15:21:52.352618Z node 1 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:238: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1748532112292, txId: 281474976715664] shutting down 2025-05-29T15:21:52.372343Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnStore, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-05-29T15:21:52.376002Z node 1 :TX_COLUMNSHARD_TX WARN: log.cpp:784: tablet_id=72075186224037890;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715666;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715666; 2025-05-29T15:21:52.376114Z node 1 :TX_COLUMNSHARD_TX WARN: log.cpp:784: tablet_id=72075186224037891;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715666;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715666; 2025-05-29T15:21:52.376236Z node 1 :TX_COLUMNSHARD_TX WARN: log.cpp:784: tablet_id=72075186224037888;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715666;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715666; 2025-05-29T15:21:52.376349Z node 1 :TX_COLUMNSHARD_TX WARN: log.cpp:784: tablet_id=72075186224037889;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715666;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715666; 2025-05-29T15:21:52.385256Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnStore, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-05-29T15:21:52.391724Z node 1 :TX_COLUMNSHARD_TX WARN: log.cpp:784: tablet_id=72075186224037888;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715667;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715667; 2025-05-29T15:21:52.391876Z node 1 :TX_COLUMNSHARD_TX WARN: log.cpp:784: tablet_id=72075186224037890;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715667;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715667; 2025-05-29T15:21:52.392022Z node 1 :TX_COLUMNSHARD_TX WARN: log.cpp:784: tablet_id=72075186224037889;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715667;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715667; 2025-05-29T15:21:52.392087Z node 1 :TX_COLUMNSHARD_TX WARN: log.cpp:784: tablet_id=72075186224037891;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715667;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715667; waiting actualization: 3/0.000011s ==================================== QUERY: SELECT * FROM `/Root/olapStore/olapTable/.sys/primary_index_stats` WHERE Activity == 1 AND EntityName IN ('new_column_ui64','_yql_delete_flag') RESULT: 2025-05-29T15:21:53.484063Z node 1 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:238: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1748532113452, txId: 281474976715668] shutting down Wait changes: 0/0 ==================================== QUERY: SELECT * FROM `/Root/olapStore/olapTable/.sys/primary_index_stats` WHERE Activity == 1 AND EntityName IN ('new_column_ui64','_yql_delete_flag') RESULT: 0/0/0 2025-05-29T15:21:58.568411Z node 1 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:238: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1748532118532, txId: 281474976715670] shutting down >> KqpQueryPerf::Update+QueryService-UseSink >> KqpQueryPerf::Upsert+QueryService-UseSink >> KqpQueryPerf::IndexReplace+QueryService-UseSink >> KqpQueryPerf::IndexUpsert-QueryService-UseSink ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/load_test/ut/unittest >> GroupWriteTest::TwoTables [GOOD] Test command err: RandomSeed# 17985792271456077832 2025-05-29T15:21:58.083378Z 1 00h01m00.010512s :BS_LOAD_TEST DEBUG: TabletId# 72058679074007041 Generation# 1 is bootstrapped, going to send TEvDiscover {TabletId# 72058679074007041 MinGeneration# 1 ReadBody# false DiscoverBlockedGeneration# true ForceBlockedGeneration# 0 FromLeader# true Deadline# 18446744073709551} 2025-05-29T15:21:58.083403Z 1 00h01m00.010512s :BS_LOAD_TEST DEBUG: TabletId# 72058502699329537 Generation# 1 is bootstrapped, going to send TEvDiscover {TabletId# 72058502699329537 MinGeneration# 1 ReadBody# false DiscoverBlockedGeneration# true ForceBlockedGeneration# 0 FromLeader# true Deadline# 18446744073709551} 2025-05-29T15:21:58.086940Z 1 00h01m00.010512s :BS_LOAD_TEST INFO: TabletId# 72058679074007041 Generation# 1 recieved TEvDiscoverResult {Status# NODATA BlockedGeneration# 0 Id# [0:0:0:0:0:0:0] Size# 0 MinGeneration# 1} 2025-05-29T15:21:58.086957Z 1 00h01m00.010512s :BS_LOAD_TEST DEBUG: TabletId# 72058679074007041 Generation# 1 going to send TEvBlock {TabletId# 72058679074007041 Generation# 1 Deadline# 18446744073709551 IsMonitored# 1} 2025-05-29T15:21:58.086972Z 1 00h01m00.010512s :BS_LOAD_TEST INFO: TabletId# 72058502699329537 Generation# 1 recieved TEvDiscoverResult {Status# NODATA BlockedGeneration# 0 Id# [0:0:0:0:0:0:0] Size# 0 MinGeneration# 1} 2025-05-29T15:21:58.086975Z 1 00h01m00.010512s :BS_LOAD_TEST DEBUG: TabletId# 72058502699329537 Generation# 1 going to send TEvBlock {TabletId# 72058502699329537 Generation# 1 Deadline# 18446744073709551 IsMonitored# 1} 2025-05-29T15:21:58.087520Z 1 00h01m00.010512s :BS_LOAD_TEST INFO: TabletId# 72058679074007041 Generation# 1 recieved TEvBlockResult {Status# OK} 2025-05-29T15:21:58.087538Z 1 00h01m00.010512s :BS_LOAD_TEST INFO: TabletId# 72058502699329537 Generation# 1 recieved TEvBlockResult {Status# OK} 2025-05-29T15:21:58.099962Z 1 00h01m00.010512s :BS_LOAD_TEST DEBUG: TabletId# 72058502699329537 Generation# 2 going to send TEvCollectGarbage {TabletId# 72058502699329537 RecordGeneration# 2 PerGenerationCounter# 1 Channel# 0 Deadline# 18446744073709551 Collect# true CollectGeneration# 2 CollectStep# 0 Hard# true IsMultiCollectAllowed# 0 IsMonitored# 1} 2025-05-29T15:21:58.099996Z 1 00h01m00.010512s :BS_LOAD_TEST DEBUG: TabletId# 72058679074007041 Generation# 2 going to send TEvCollectGarbage {TabletId# 72058679074007041 RecordGeneration# 2 PerGenerationCounter# 1 Channel# 0 Deadline# 18446744073709551 Collect# true CollectGeneration# 2 CollectStep# 0 Hard# true IsMultiCollectAllowed# 0 IsMonitored# 1} 2025-05-29T15:21:58.100876Z 1 00h01m00.010512s :BS_LOAD_TEST INFO: TabletId# 72058502699329537 Generation# 2 recieved TEvCollectGarbageResult {TabletId# 72058502699329537 RecordGeneration# 2 PerGenerationCounter# 1 Channel# 0 Status# OK} 2025-05-29T15:21:58.100894Z 1 00h01m00.010512s :BS_LOAD_TEST INFO: TabletId# 72058679074007041 Generation# 2 recieved TEvCollectGarbageResult {TabletId# 72058679074007041 RecordGeneration# 2 PerGenerationCounter# 1 Channel# 0 Status# OK} 2025-05-29T15:21:59.127813Z 1 00h01m20.010512s :BS_LOAD_TEST DEBUG: Load tablet recieved PoisonPill, going to die 2025-05-29T15:21:59.127847Z 1 00h01m20.010512s :BS_LOAD_TEST DEBUG: TabletId# 72058679074007041 Generation# 2 end working, going to send TEvCollectGarbage {TabletId# 72058679074007041 RecordGeneration# 2 PerGenerationCounter# 22 Channel# 0 Deadline# 18446744073709551 Collect# true CollectGeneration# 2 CollectStep# 4294967295 Hard# true IsMultiCollectAllowed# 0 IsMonitored# 1} 2025-05-29T15:21:59.127856Z 1 00h01m20.010512s :BS_LOAD_TEST DEBUG: TabletId# 72058502699329537 Generation# 2 end working, going to send TEvCollectGarbage {TabletId# 72058502699329537 RecordGeneration# 2 PerGenerationCounter# 22 Channel# 0 Deadline# 18446744073709551 Collect# true CollectGeneration# 2 CollectStep# 4294967295 Hard# true IsMultiCollectAllowed# 0 IsMonitored# 1} 2025-05-29T15:21:59.127862Z 1 00h01m20.010512s :BS_LOAD_TEST DEBUG: Load tablet recieved PoisonPill, going to die 2025-05-29T15:21:59.127868Z 1 00h01m20.010512s :BS_LOAD_TEST DEBUG: TabletId# 72058679074007041 Generation# 2 end working, going to send TEvCollectGarbage {TabletId# 72058679074007041 RecordGeneration# 2 PerGenerationCounter# 23 Channel# 0 Deadline# 18446744073709551 Collect# true CollectGeneration# 2 CollectStep# 4294967295 Hard# true IsMultiCollectAllowed# 0 IsMonitored# 1} 2025-05-29T15:21:59.127873Z 1 00h01m20.010512s :BS_LOAD_TEST DEBUG: TabletId# 72058502699329537 Generation# 2 end working, going to send TEvCollectGarbage {TabletId# 72058502699329537 RecordGeneration# 2 PerGenerationCounter# 23 Channel# 0 Deadline# 18446744073709551 Collect# true CollectGeneration# 2 CollectStep# 4294967295 Hard# true IsMultiCollectAllowed# 0 IsMonitored# 1} 2025-05-29T15:21:59.127878Z 1 00h01m20.010512s :BS_LOAD_TEST DEBUG: Load tablet recieved PoisonPill, going to die 2025-05-29T15:21:59.127882Z 1 00h01m20.010512s :BS_LOAD_TEST DEBUG: TabletId# 72058679074007041 Generation# 2 end working, going to send TEvCollectGarbage {TabletId# 72058679074007041 RecordGeneration# 2 PerGenerationCounter# 24 Channel# 0 Deadline# 18446744073709551 Collect# true CollectGeneration# 2 CollectStep# 4294967295 Hard# true IsMultiCollectAllowed# 0 IsMonitored# 1} 2025-05-29T15:21:59.127888Z 1 00h01m20.010512s :BS_LOAD_TEST DEBUG: TabletId# 72058502699329537 Generation# 2 end working, going to send TEvCollectGarbage {TabletId# 72058502699329537 RecordGeneration# 2 PerGenerationCounter# 24 Channel# 0 Deadline# 18446744073709551 Collect# true CollectGeneration# 2 CollectStep# 4294967295 Hard# true IsMultiCollectAllowed# 0 IsMonitored# 1} 2025-05-29T15:21:59.139170Z 1 00h01m20.010512s :BS_HULLRECS CRIT: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Db# LogoBlobs; putting blob beyond the barrier id# [72058679074007041:2:23:0:11:2295992:3] barrier# {Soft# {Gen# 2 Step# 19} Hard# {Gen# 2 Step# 4294967295}} 2025-05-29T15:21:59.139529Z 4 00h01m20.010512s :BS_HULLRECS CRIT: PDiskId# 1000 VDISK[82000000:_:0:3:0]: (2181038080) Db# LogoBlobs; putting blob beyond the barrier id# [72058679074007041:2:23:0:11:2295992:6] barrier# {Soft# {Gen# 2 Step# 19} Hard# {Gen# 2 Step# 4294967295}} 2025-05-29T15:21:59.139548Z 8 00h01m20.010512s :BS_HULLRECS CRIT: PDiskId# 1000 VDISK[82000000:_:0:7:0]: (2181038080) Db# LogoBlobs; putting blob beyond the barrier id# [72058679074007041:2:23:0:11:2295992:2] barrier# {Soft# {Gen# 2 Step# 19} Hard# {Gen# 2 Step# 4294967295}} 2025-05-29T15:21:59.139558Z 3 00h01m20.010512s :BS_HULLRECS CRIT: PDiskId# 1000 VDISK[82000000:_:0:2:0]: (2181038080) Db# LogoBlobs; putting blob beyond the barrier id# [72058679074007041:2:23:0:11:2295992:5] barrier# {Soft# {Gen# 2 Step# 19} Hard# {Gen# 2 Step# 4294967295}} 2025-05-29T15:21:59.139567Z 7 00h01m20.010512s :BS_HULLRECS CRIT: PDiskId# 1000 VDISK[82000000:_:0:6:0]: (2181038080) Db# LogoBlobs; putting blob beyond the barrier id# [72058679074007041:2:23:0:11:2295992:1] barrier# {Soft# {Gen# 2 Step# 19} Hard# {Gen# 2 Step# 4294967295}} 2025-05-29T15:21:59.139578Z 2 00h01m20.010512s :BS_HULLRECS CRIT: PDiskId# 1000 VDISK[82000000:_:0:1:0]: (2181038080) Db# LogoBlobs; putting blob beyond the barrier id# [72058679074007041:2:23:0:11:2295992:4] barrier# {Soft# {Gen# 2 Step# 19} Hard# {Gen# 2 Step# 4294967295}} 2025-05-29T15:21:59.140344Z 1 00h01m20.010512s :BS_LOAD_TEST INFO: TabletId# 72058679074007041 Generation# 2 recieved TEvCollectGarbageResult {TabletId# 72058679074007041 RecordGeneration# 2 PerGenerationCounter# 22 Channel# 0 Status# OK} 2025-05-29T15:21:59.140361Z 1 00h01m20.010512s :BS_LOAD_TEST INFO: TabletId# 72058502699329537 Generation# 2 recieved TEvCollectGarbageResult {TabletId# 72058502699329537 RecordGeneration# 2 PerGenerationCounter# 22 Channel# 0 Status# OK} 2025-05-29T15:21:59.140365Z 1 00h01m20.010512s :BS_LOAD_TEST INFO: TabletId# 72058679074007041 Generation# 2 recieved TEvCollectGarbageResult {TabletId# 72058679074007041 RecordGeneration# 2 PerGenerationCounter# 23 Channel# 0 Status# OK} 2025-05-29T15:21:59.140370Z 1 00h01m20.010512s :BS_LOAD_TEST INFO: TabletId# 72058502699329537 Generation# 2 recieved TEvCollectGarbageResult {TabletId# 72058502699329537 RecordGeneration# 2 PerGenerationCounter# 23 Channel# 0 Status# OK} 2025-05-29T15:21:59.140374Z 1 00h01m20.010512s :BS_LOAD_TEST INFO: TabletId# 72058679074007041 Generation# 2 recieved TEvCollectGarbageResult {TabletId# 72058679074007041 RecordGeneration# 2 PerGenerationCounter# 24 Channel# 0 Status# OK} 2025-05-29T15:21:59.140378Z 1 00h01m20.010512s :BS_LOAD_TEST INFO: TabletId# 72058502699329537 Generation# 2 recieved TEvCollectGarbageResult {TabletId# 72058502699329537 RecordGeneration# 2 PerGenerationCounter# 24 Channel# 0 Status# OK} ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/mind/ut/unittest >> TNodeBrokerTest::ConfigPipelining [GOOD] Test command err: 2025-05-29T15:21:53.066649Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 2 Deadline: 18446744073709.551615s } 2025-05-29T15:21:53.066708Z node 3 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 3 Deadline: 18446744073709.551615s } 2025-05-29T15:21:53.066868Z node 4 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 4 Deadline: 18446744073709.551615s } 2025-05-29T15:21:53.066915Z node 5 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 5 Deadline: 18446744073709.551615s } 2025-05-29T15:21:53.066944Z node 6 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 6 Deadline: 18446744073709.551615s } 2025-05-29T15:21:53.066974Z node 7 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 7 Deadline: 18446744073709.551615s } 2025-05-29T15:21:53.074939Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:21:53.075086Z node 3 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:21:53.075153Z node 4 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:21:53.075203Z node 5 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:21:53.075260Z node 6 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:21:53.075308Z node 7 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:21:53.075405Z node 8 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 8 Deadline: 18446744073709.551615s } 2025-05-29T15:21:53.075441Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-05-29T15:21:53.075638Z node 3 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:21:53.075672Z node 4 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:21:53.075699Z node 5 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:21:53.075721Z node 6 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:21:53.075753Z node 7 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:21:53.075785Z node 8 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:21:53.075841Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:21:53.103508Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:21:53.103768Z node 8 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:21:53.103804Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:21:53.104940Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:21:53.104969Z node 3 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:21:53.105014Z node 4 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:21:53.105044Z node 5 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:21:53.105094Z node 6 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:21:53.105120Z node 7 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:21:53.105219Z node 8 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:21:53.105239Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:21:53.105633Z node 3 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-05-29T15:21:53.105749Z node 4 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-05-29T15:21:53.105808Z node 5 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-05-29T15:21:53.105854Z node 6 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-05-29T15:21:53.105907Z node 7 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-05-29T15:21:53.105934Z node 8 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-05-29T15:21:53.106143Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-05-29T15:21:53.106341Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 3 Deadline: 18446744073709.551615s } 2025-05-29T15:21:53.107424Z node 4 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 2 Deadline: 18446744073709.551615s } 2025-05-29T15:21:53.107451Z node 5 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 7 Deadline: 18446744073709.551615s } 2025-05-29T15:21:53.107474Z node 7 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 5 Deadline: 18446744073709.551615s } 2025-05-29T15:21:53.107493Z node 8 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 7 Deadline: 18446744073709.551615s } 2025-05-29T15:21:53.108732Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 3 Deadline: 18446744073709.551615s } 2025-05-29T15:21:53.119813Z node 7 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 3 Deadline: 18446744073709.551615s } 2025-05-29T15:21:53.119927Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 4 Deadline: 18446744073709.551615s } 2025-05-29T15:21:53.120002Z node 7 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 8 Deadline: 18446744073709.551615s } 2025-05-29T15:21:53.120935Z node 3 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 2 Deadline: 18446744073709.551615s } 2025-05-29T15:21:53.121241Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 7 Deadline: 18446744073709.551615s } 2025-05-29T15:21:53.121487Z node 3 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 8 Deadline: 18446744073709.551615s } 2025-05-29T15:21:53.121829Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 2 Deadline: 18446744073709.551615s } 2025-05-29T15:21:53.121950Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 6 Deadline: 18446744073709.551615s } 2025-05-29T15:21:53.121993Z node 3 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 7 Deadline: 18446744073709.551615s } 2025-05-29T15:21:53.122079Z node 8 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 3 Deadline: 18446744073709.551615s } 2025-05-29T15:21:53.122111Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 4 Deadline: 18446744073709.551615s } 2025-05-29T15:21:53.122149Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 5 Deadline: 18446744073709.551615s } 2025-05-29T15:21:53.122460Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 8 Deadline: 18446744073709.551615s } 2025-05-29T15:21:53.130797Z node 3 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 5 Deadline: 18446744073709.551615s } 2025-05-29T15:21:53.131041Z node 7 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 6 Deadline: 18446744073709.551615s } 2025-05-29T15:21:53.131089Z node 5 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 3 Deadline: 18446744073709.551615s } 2025-05-29T15:21:53.131343Z node 6 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 7 Deadline: 18446744073709.551615s } 2025-05-29T15:21:53.136039Z node 7 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 2 Deadline: 18446744073709.551615s } 2025-05-29T15:21:53.143077Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 7 Deadline: 18446744073709.551615s } 2025-05-29T15:21:53.195270Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7309: Cannot subscribe to console configs 2025-05-29T15:21:53.195303Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded ... waiting for nameservers are connected 2025-05-29T15:21:53.226568Z node 1 :NODE_BROKER DEBUG: node_broker_impl.h:236: StateInit event type: 10060000 event: NKikimr::TEvTablet::TEvBoot 2025-05-29T15:21:53.227133Z node 1 :NODE_BROKER DEBUG: node_broker_impl.h:236: StateInit event type: 10060001 event: NKikimr::TEvTablet::TEvRestored 2025-05-29T15:21:53.227221Z node 1 :NODE_BROKER DEBUG: node_broker__init_scheme.cpp:20: TTxInitScheme Execute 2025-05-29T15:21:53.227467Z node 1 :NODE_BROKER DEBUG: node_broker_impl.h:236: StateInit event type: 1006000c event: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-05-29T15:21:53.228254Z node 1 :NODE_BROKER DEBUG: node_broker__init_scheme.cpp:29: TTxInitScheme Complete 2025-05-29T15:21:53.228282Z node 1 :NODE_BROKER DEBUG: node_broker__load_state.cpp:19: TTxLoadState Execute 2025-05-29T15:21:53.228358Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:961: [DB] Using default config. 2025-05-29T15:21:53.228377Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:998: [DB] Starting the first epoch: #1.1 1970-01-01T00:00:00.024000Z - 1970-01-01T01:00:00.024000Z - 1970-01-01T02:00:00.024000Z 2025-05-29T15:21:53.228383Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:1024: [DB] Loaded the first approximate epoch start: #1.1 2025-05-29T15:21:53.228399Z node 1 :NODE_BROKER DEBUG: node_broker__load_state.cpp:27: TTxLoadState Complete 2025-05-29T15:21:53.228481Z node 1 :NODE_BROKER DEBUG: node_broker__migrate_state.cpp:84: TTxMigrateState Execute 2025-05-29T15:21:53.228488Z node 1 :NODE_BROKER DEBUG: node_broker__migrate_state.cpp:52: TTxMigrateState ProcessMigrationBatch UpdateNodes left 0, NewVersionUpdateNodes left 0 2025-05-29T15:21:53.228493Z node 1 :NODE_BROKER DEBU ... lessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } }] } 2025-05-29T15:21:57.529837Z node 9 :NODE_BROKER TRACE: node_broker.cpp:1532: Handle TEvTxProxySchemeCache::TEvNavigateKeySetResult: response# { Path: dc-1 TableId: [72057594046678944:1:0] RequestType: ByPath Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046678944, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046678944, LocalPathId: 1] Params { Version: 0 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } } 2025-05-29T15:21:57.529854Z node 9 :NODE_BROKER TRACE: node_broker.cpp:1558: Finished resolving tenant: request# Host: "host1" Port: 1001 ResolveHost: "host1.yandex.net" Address: "1.2.3.4" Location { DataCenter: "1" Module: "2" Rack: "3" Unit: "4" } FixedNodeId: false Path: "dc-1": scope id# <72057594046678944:1>: serviced subdomain# 72057594046678944:1 2025-05-29T15:21:57.529871Z node 9 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 2146435073, Sender [9:636:2183], Recipient [9:557:2183]: NKikimr::NNodeBroker::TNodeBroker::TEvPrivate::TEvResolvedRegistrationRequest 2025-05-29T15:21:57.529876Z node 9 :NODE_BROKER TRACE: node_broker_impl.h:257: StateWork, processing event TEvPrivate::TEvResolvedRegistrationRequest 2025-05-29T15:21:57.529890Z node 9 :NODE_BROKER DEBUG: node_broker__register_node.cpp:77: TTxRegisterNode Execute 2025-05-29T15:21:57.529895Z node 9 :NODE_BROKER DEBUG: node_broker__register_node.cpp:81: Registration request from host1:1001 (not fixed) tenant: dc-1 2025-05-29T15:21:57.529908Z node 9 :NODE_BROKER ERROR: node_broker__register_node.cpp:39: Cannot register node host1:1001: ERROR_TEMP: No free node IDs 2025-05-29T15:21:57.529999Z node 9 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 269877761, Sender [9:647:2228], Recipient [9:557:2183]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-05-29T15:21:57.530019Z node 9 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 269877761, Sender [9:648:2229], Recipient [9:557:2183]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-05-29T15:21:57.530039Z node 9 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 272039946, Sender [9:625:2213], Recipient [9:557:2183]: NKikimr::NNodeBroker::TEvNodeBroker::TEvSetConfigRequest { Config { BannedNodeIds { From: 1024 To: 1024 } BannedNodeIds { From: 1026 To: 1027 } } } 2025-05-29T15:21:57.530044Z node 9 :NODE_BROKER TRACE: node_broker_impl.h:253: StateWork, processing event TEvNodeBroker::TEvSetConfigRequest 2025-05-29T15:21:57.530056Z node 9 :NODE_BROKER DEBUG: node_broker__update_config.cpp:53: TTxUpdateConfig Execute Config { BannedNodeIds { From: 1024 To: 1024 } BannedNodeIds { From: 1026 To: 1027 } } 2025-05-29T15:21:57.530071Z node 9 :NODE_BROKER DEBUG: node_broker.cpp:1286: [DB] Update config in database config=BannedNodeIds { From: 1024 To: 1024 } BannedNodeIds { From: 1026 To: 1027 } 2025-05-29T15:21:57.530118Z node 9 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 272039938, Sender [9:625:2213], Recipient [9:557:2183]: NKikimr::NNodeBroker::TEvNodeBroker::TEvRegistrationRequest { Host: "host1" Port: 1001 ResolveHost: "host1.yandex.net" Address: "1.2.3.4" Location { DataCenter: "1" Module: "2" Rack: "3" Unit: "4" } FixedNodeId: false Path: "dc-1" } 2025-05-29T15:21:57.530122Z node 9 :NODE_BROKER TRACE: node_broker_impl.h:248: StateWork, processing event TEvNodeBroker::TEvRegistrationRequest 2025-05-29T15:21:57.530132Z node 9 :NODE_BROKER TRACE: node_broker.cpp:1487: Handle TEvNodeBroker::TEvRegistrationRequest: request# Host: "host1" Port: 1001 ResolveHost: "host1.yandex.net" Address: "1.2.3.4" Location { DataCenter: "1" Module: "2" Rack: "3" Unit: "4" } FixedNodeId: false Path: "dc-1" 2025-05-29T15:21:57.530156Z node 9 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2702: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [9:23:2070], request# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-05-29T15:21:57.530168Z node 9 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:1842: FillEntry for TNavigate: self# [9:23:2070], cacheItem# { Subscriber: { Subscriber: [9:637:2224] DomainOwnerId: 72057594046678944 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusSuccess Kind: 2 TableKind: 0 Created: 1 CreateStep: 0 PathId: [OwnerId: 72057594046678944, LocalPathId: 1] DomainId: [OwnerId: 72057594046678944, LocalPathId: 1] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-05-29T15:21:57.530190Z node 9 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:265: Send result: self# [9:650:2230], recipient# [9:649:2183], result# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [72057594046678944:1:0] RequestType: ByPath Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046678944, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046678944, LocalPathId: 1] Params { Version: 0 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } }] } 2025-05-29T15:21:57.530205Z node 9 :NODE_BROKER TRACE: node_broker.cpp:1532: Handle TEvTxProxySchemeCache::TEvNavigateKeySetResult: response# { Path: dc-1 TableId: [72057594046678944:1:0] RequestType: ByPath Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046678944, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046678944, LocalPathId: 1] Params { Version: 0 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } } 2025-05-29T15:21:57.530216Z node 9 :NODE_BROKER TRACE: node_broker.cpp:1558: Finished resolving tenant: request# Host: "host1" Port: 1001 ResolveHost: "host1.yandex.net" Address: "1.2.3.4" Location { DataCenter: "1" Module: "2" Rack: "3" Unit: "4" } FixedNodeId: false Path: "dc-1": scope id# <72057594046678944:1>: serviced subdomain# 72057594046678944:1 2025-05-29T15:21:57.530230Z node 9 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 2146435073, Sender [9:649:2183], Recipient [9:557:2183]: NKikimr::NNodeBroker::TNodeBroker::TEvPrivate::TEvResolvedRegistrationRequest 2025-05-29T15:21:57.530234Z node 9 :NODE_BROKER TRACE: node_broker_impl.h:257: StateWork, processing event TEvPrivate::TEvResolvedRegistrationRequest 2025-05-29T15:21:57.530239Z node 9 :NODE_BROKER DEBUG: node_broker__register_node.cpp:77: TTxRegisterNode Execute 2025-05-29T15:21:57.530243Z node 9 :NODE_BROKER DEBUG: node_broker__register_node.cpp:81: Registration request from host1:1001 (not fixed) tenant: dc-1 2025-05-29T15:21:57.530270Z node 9 :NODE_BROKER DEBUG: node_broker.cpp:856: [DB] Adding node #1025.v2 host1:1001 to database state=Active resolvehost=host1.yandex.net address=1.2.3.4 dc=1 location=DC=1/M=2/R=3/U=4/ lease=1 expire=Thu, 01 Jan 1970 02:00:00 UTC servicedsubdomain=72057594046678944:1 slotindex=0 authorizedbycertificate=false 2025-05-29T15:21:57.530307Z node 9 :NODE_BROKER DEBUG: node_broker.cpp:264: [Dirty] Register new active node #1025.v2 host1:1001 2025-05-29T15:21:57.530314Z node 9 :NODE_BROKER DEBUG: node_broker.cpp:552: [Dirty] Update current epoch version from 1 to 2 2025-05-29T15:21:57.530318Z node 9 :NODE_BROKER DEBUG: node_broker.cpp:1356: [DB] Update epoch version in database version=2 ... waiting for commit ... blocking NKikimr::TEvTablet::TEvCommit from FLAT_EXECUTOR to TABLET_ACTOR cookie 1 ... waiting for commit (done) ... unblocking NKikimr::TEvTablet::TEvCommit from FLAT_EXECUTOR to TABLET_ACTOR 2025-05-29T15:21:57.554112Z node 9 :NODE_BROKER DEBUG: node_broker__update_config.cpp:85: TTxUpdateConfig Complete 2025-05-29T15:21:57.554173Z node 9 :NODE_BROKER TRACE: node_broker__update_config.cpp:92: TTxUpdateConfig reply with: NKikimr::NNodeBroker::TEvNodeBroker::TEvSetConfigResponse { Status { Code: OK } } 2025-05-29T15:21:57.554189Z node 9 :NODE_BROKER DEBUG: node_broker__register_node.cpp:186: TTxRegisterNode Complete 2025-05-29T15:21:57.554200Z node 9 :NODE_BROKER TRACE: node_broker__register_node.cpp:58: TTxRegisterNode reply with: Status { Code: ERROR_TEMP Reason: "No free node IDs" } 2025-05-29T15:21:57.554213Z node 9 :NODE_BROKER DEBUG: node_broker__update_config.cpp:85: TTxUpdateConfig Complete 2025-05-29T15:21:57.554220Z node 9 :NODE_BROKER TRACE: node_broker__update_config.cpp:92: TTxUpdateConfig reply with: NKikimr::NNodeBroker::TEvNodeBroker::TEvSetConfigResponse { Status { Code: OK } } 2025-05-29T15:21:57.554226Z node 9 :NODE_BROKER DEBUG: node_broker__register_node.cpp:186: TTxRegisterNode Complete 2025-05-29T15:21:57.554233Z node 9 :NODE_BROKER DEBUG: node_broker.cpp:264: [Committed] Register new active node #1025.v2 host1:1001 2025-05-29T15:21:57.554242Z node 9 :NODE_BROKER DEBUG: node_broker.cpp:552: [Committed] Update current epoch version from 1 to 2 2025-05-29T15:21:57.554248Z node 9 :NODE_BROKER DEBUG: node_broker.cpp:630: Add node #1025.v2 host1:1001 to epoch cache 2025-05-29T15:21:57.554269Z node 9 :NODE_BROKER DEBUG: node_broker.cpp:659: Add node #1025.v2 to update nodes log 2025-05-29T15:21:57.554292Z node 9 :NODE_BROKER TRACE: node_broker__register_node.cpp:58: TTxRegisterNode reply with: Status { Code: OK } Node { NodeId: 1025 Host: "host1" Port: 1001 ResolveHost: "host1.yandex.net" Address: "1.2.3.4" Location { DataCenter: "1" Module: "2" Rack: "3" Unit: "4" } Expire: 7200024000 Name: "slot-0" } 2025-05-29T15:21:57.554443Z node 9 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 269877761, Sender [9:654:2234], Recipient [9:557:2183]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-05-29T15:21:57.554461Z node 9 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 272039936, Sender [9:625:2213], Recipient [9:557:2183]: NKikimr::NNodeBroker::TEvNodeBroker::TEvListNodes { } 2025-05-29T15:21:57.554471Z node 9 :NODE_BROKER TRACE: node_broker_impl.h:246: StateWork, processing event TEvNodeBroker::TEvListNodes 2025-05-29T15:21:57.554482Z node 9 :NODE_BROKER TRACE: node_broker.cpp:423: Send TEvNodesInfo for epoch #1.2 1970-01-01T00:00:00.024000Z - 1970-01-01T01:00:00.024000Z - 1970-01-01T02:00:00.024000Z 2025-05-29T15:21:57.554547Z node 9 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 269877761, Sender [9:656:2236], Recipient [9:557:2183]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-05-29T15:21:57.554565Z node 9 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 272039937, Sender [9:625:2213], Recipient [9:557:2183]: NKikimr::NNodeBroker::TEvNodeBroker::TEvResolveNode { NodeId: 1025 } 2025-05-29T15:21:57.554570Z node 9 :NODE_BROKER TRACE: node_broker_impl.h:247: StateWork, processing event TEvNodeBroker::TEvResolveNode 2025-05-29T15:21:57.554589Z node 9 :NODE_BROKER TRACE: node_broker.cpp:1478: Send TEvResolvedNode: NKikimr::NNodeBroker::TEvNodeBroker::TEvResolvedNode { Status { Code: OK } Node { NodeId: 1025 Host: "host1" Port: 1001 ResolveHost: "host1.yandex.net" Address: "1.2.3.4" Location { DataCenter: "1" Module: "2" Rack: "3" Unit: "4" } Expire: 7200024000 Name: "slot-0" } } >> KqpQueryPerf::Delete+QueryService-UseSink >> IcebergClusterProcessor::ValidateDdlCreationForHadoopWithS3 [GOOD] >> IcebergClusterProcessor::ValidateConfigurationWithoutWarehouse [GOOD] >> IcebergClusterProcessor::ValidateDdlCreationForHiveWithS3 [GOOD] >> IcebergClusterProcessor::ValidateRiseErrors [GOOD] >> GroupWriteTest::Simple [GOOD] >> test_sql_streaming.py::test[suites-GroupByHoppingWindowExprKey-default.txt] [GOOD] >> test_sql_streaming.py::test[suites-GroupByHoppingWindowListKey-default.txt] >> EscapingBasics::HideSecretsOverEncloseSecretShouldWork [GOOD] >> EscapingBasics::EscapeStringShouldWork [GOOD] >> Cache::Test4 [GOOD] >> Cache::Test5 |59.0%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/fq/libs/common/ut/unittest >> IcebergClusterProcessor::ValidateConfigurationWithoutWarehouse [GOOD] >> EntityId::Distinct [GOOD] >> EntityId::MinId [GOOD] >> EntityId::MaxId [GOOD] >> BasicUsage::TWriteSession_WriteEncoded [FAIL] >> CompressExecutor::TestExecutorMemUsage ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/public/sdk/cpp/src/client/persqueue_public/ut/with_offset_ranges_mode_ut/unittest >> BasicUsage::BrokenCredentialsProvider [FAIL] Test command err: 2025-05-29T15:21:51.254839Z :MaxByteSizeEqualZero INFO: Random seed for debugging is 1748532111254831 2025-05-29T15:21:51.406088Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509888236709311383:2211];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:21:51.406148Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/000c8b/r3tmp/tmppiW1P6/pdisk_1.dat 2025-05-29T15:21:51.430966Z node 2 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7509888234737556367:2221];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:21:51.431113Z node 2 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-05-29T15:21:51.431133Z node 2 :PQ_READ_PROXY DEBUG: caching_service.cpp:44: Direct read cache: : Created 2025-05-29T15:21:51.438082Z node 1 :PQ_READ_PROXY DEBUG: caching_service.cpp:44: Direct read cache: : Created 2025-05-29T15:21:51.465345Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 27209, node 1 2025-05-29T15:21:51.477030Z node 1 :GRPC_SERVER WARN: grpc_request_proxy.cpp:529: SchemeBoardDelete /Root Strong=0 2025-05-29T15:21:51.477054Z node 1 :GRPC_SERVER WARN: grpc_request_proxy.cpp:529: SchemeBoardDelete /Root Strong=0 2025-05-29T15:21:51.482990Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/ciyv/000c8b/r3tmp/yandexfc1PBu.tmp 2025-05-29T15:21:51.483002Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: /home/runner/.ya/build/build_root/ciyv/000c8b/r3tmp/yandexfc1PBu.tmp 2025-05-29T15:21:51.483049Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:202: successfully initialized from file: /home/runner/.ya/build/build_root/ciyv/000c8b/r3tmp/yandexfc1PBu.tmp 2025-05-29T15:21:51.483062Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-05-29T15:21:51.485298Z INFO: TTestServer started on Port 18532 GrpcPort 27209 TClient is connected to server localhost:18532 PQClient connected to localhost:27209 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: 2025-05-29T15:21:51.501546Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:21:51.501593Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:21:51.503092Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-29T15:21:51.530486Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976720657:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:21:51.535673Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:21:51.535702Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:21:51.536946Z node 1 :HIVE WARN: hive_impl.cpp:771: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-05-29T15:21:51.537327Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected waiting... waiting... waiting... 2025-05-29T15:21:51.787123Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509888236709312130:2335], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:21:51.787143Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509888236709312156:2338], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:21:51.787157Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:21:51.787893Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976720661:3, at schemeshard: 72057594046644480 2025-05-29T15:21:51.793758Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7509888236709312159:2339], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976720661 completed, doublechecking } 2025-05-29T15:21:51.844162Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720662:0, at schemeshard: 72057594046644480 2025-05-29T15:21:51.855528Z node 1 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [1:7509888236709312289:2671] txid# 281474976720663, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 8], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-29T15:21:51.871656Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [1:7509888236709312308:2350], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-05-29T15:21:51.872575Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=1&id=NDQ0MTVlN2EtYzdiYzcxNy00OTVkOTM2Ni1kNWZiZTUwMQ==, ActorId: [1:7509888236709312127:2333], ActorState: ExecuteState, TraceId: 01jwea5ad8bkcnm4gpyxttxv4w, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-05-29T15:21:51.871937Z node 2 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [2:7509888234737556555:2313], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-05-29T15:21:51.872547Z node 2 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=2&id=Nzc1M2UzNzQtZWU0YjkzMTktYWFkMzM4ZDctNDQ2ODg5ZA==, ActorId: [2:7509888234737556465:2303], ActorState: ExecuteState, TraceId: 01jwea5afe6ez1ffx45vcg2wnt, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-05-29T15:21:51.876132Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: cluster_tracker.cpp:167: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-05-29T15:21:51.875562Z node 2 :PERSQUEUE_CLUSTER_TRACKER ERROR: cluster_tracker.cpp:167: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-05-29T15:21:51.897373Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720664:0, at schemeshard: 72057594046644480 2025-05-29T15:21:51.951964Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720665:0, at schemeshard: 72057594046644480 === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost:27209", true, true, 1000); 2025-05-29T15:21:52.078638Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [1:7509888241004279902:2376], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:21:52.079655Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=1&id=ZGM4ZmRhZWItMTA3MzQyZjYtYzBmZTE5YzQtZTY5NWU3MDM=, ActorId: [1:7509888241004279899:2374], ActorState: ExecuteState, TraceId: 01jwea5anb81fjxwxgmvdgvynx, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: assertion failed at ydb/core/testlib/test_pq_client.h:537, TMaybe NKikimr::NPersQueueTests::TFlatMsgBusPQClient::RunYqlDataQueryWithParams(TString, const NYdb::TParams &): (result.IsSuccess())
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 TBackTrace::Capture()+28 (0x13A5CA0C) NUnitTest::NPriv ... e 5 2025-05-29T15:21:54.589294Z node 5 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/ciyv/000c8b/r3tmp/yandexpAC9gq.tmp 2025-05-29T15:21:54.589307Z node 5 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: /home/runner/.ya/build/build_root/ciyv/000c8b/r3tmp/yandexpAC9gq.tmp 2025-05-29T15:21:54.589367Z node 5 :NET_CLASSIFIER WARN: net_classifier.cpp:202: successfully initialized from file: /home/runner/.ya/build/build_root/ciyv/000c8b/r3tmp/yandexpAC9gq.tmp 2025-05-29T15:21:54.589421Z node 5 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-05-29T15:21:54.594810Z INFO: TTestServer started on Port 9103 GrpcPort 6252 TClient is connected to server localhost:9103 PQClient connected to localhost:6252 WaitRootIsUp 'Root'... TClient::Ls request: Root 2025-05-29T15:21:54.618344Z node 5 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:21:54.618373Z node 5 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:21:54.621362Z node 5 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Connecting -> Connected TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-29T15:21:54.651383Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976720657:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:21:54.663322Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976720657, at schemeshard: 72057594046644480 waiting... waiting... waiting... 2025-05-29T15:21:54.913754Z node 6 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7509888248296399884:2309], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:21:54.913777Z node 6 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7509888248296399855:2306], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:21:54.913813Z node 6 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:21:54.915416Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715657:3, at schemeshard: 72057594046644480 2025-05-29T15:21:54.930187Z node 6 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [6:7509888248296399907:2310], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715657 completed, doublechecking } 2025-05-29T15:21:54.983423Z node 5 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [5:7509888247532005725:2340], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-05-29T15:21:54.984049Z node 5 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=5&id=OWZjYWY4YWYtZWM1YWQ3MzYtZDhjMTY3NjUtZjliZGFmNDU=, ActorId: [5:7509888247532005684:2333], ActorState: ExecuteState, TraceId: 01jwea5dh2fx563pszbt52a0p2, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-05-29T15:21:54.984198Z node 5 :PERSQUEUE_CLUSTER_TRACKER ERROR: cluster_tracker.cpp:167: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-05-29T15:21:54.996826Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720661:0, at schemeshard: 72057594046644480 2025-05-29T15:21:55.022287Z node 6 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [6:7509888252591367231:2132] txid# 281474976715658, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 8], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-29T15:21:55.023528Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720662:0, at schemeshard: 72057594046644480 2025-05-29T15:21:55.027881Z node 6 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [6:7509888252591367246:2314], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:5:17: Error: At function: KiReadTable!
:5:17: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Versions]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-05-29T15:21:55.027951Z node 6 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=6&id=YTczODE2ZDItNjkyMTE4Y2QtNjI1MDM4YjctZjc3OGVkZmI=, ActorId: [6:7509888248296399853:2305], ActorState: ExecuteState, TraceId: 01jwea5df02k75e2wfvmnhvs9r, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-05-29T15:21:55.028075Z node 6 :PERSQUEUE_CLUSTER_TRACKER ERROR: cluster_tracker.cpp:167: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 5 column: 17 } message: "At function: KiReadTable!" end_position { row: 5 column: 17 } severity: 1 issues { position { row: 5 column: 17 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Versions]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 5 column: 17 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-05-29T15:21:55.116568Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720663:0, at schemeshard: 72057594046644480 === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost:6252", true, true, 1000); 2025-05-29T15:21:55.154597Z node 5 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [5:7509888251826973419:2375], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:21:55.154817Z node 5 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=5&id=MWM1NDY5ZjktZDQxZjg4OC02NmZmMTRiOS02N2M0YjgyNA==, ActorId: [5:7509888251826973416:2373], ActorState: ExecuteState, TraceId: 01jwea5dp8drn7mdfjyjr5eanv, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: assertion failed at ydb/core/testlib/test_pq_client.h:537, TMaybe NKikimr::NPersQueueTests::TFlatMsgBusPQClient::RunYqlDataQueryWithParams(TString, const NYdb::TParams &): (result.IsSuccess())
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 TBackTrace::Capture()+28 (0x13A5CA0C) NUnitTest::NPrivate::RaiseError(char const*, TBasicString> const&, bool)+137 (0x13C148C9) NKikimr::NPersQueueTests::TFlatMsgBusPQClient::RunYqlDataQueryWithParams(TBasicString>, NYdb::Dev::TParams const&)+932 (0x138B3A44) NKikimr::NPersQueueTests::TFlatMsgBusPQClient::RunYqlDataQuery(TBasicString>)+88 (0x138B29A8) NKikimr::NPersQueueTests::TFlatMsgBusPQClient::InitDCs(THashMap>, NKikimr::NPersQueueTests::TPQTestClusterInfo, THash>>, TEqualTo>>, std::__y1::allocator>>>, TBasicString> const&)+1170 (0x138B1BF2) NPersQueue::SDKTestSetup::Start(bool, bool)+1450 (0x138A75DA) NPersQueue::SDKTestSetup::SDKTestSetup(TBasicString> const&, bool, TVector> const&, NActors::NLog::EPriority, unsigned int, unsigned long)+675 (0x138A50E3) void std::__y1::allocator::construct[abi:fe200000](NYdb::NPersQueue::NTests::TPersQueueYdbSdkTestSetup*, char const*&)+72 (0x139432F8) NYdb::NPersQueue::NTests::NTestSuiteBasicUsage::TTestCaseBrokenCredentialsProvider::Execute_(NUnitTest::TTestContext&)+155 (0x13932EEB) NYdb::NPersQueue::NTests::NTestSuiteBasicUsage::TCurrentTest::Execute()::'lambda'()::operator()() const+71 (0x13936577) NUnitTest::TTestBase::Run(std::__y1::function, TBasicString> const&, char const*, bool)+126 (0x13C1677E) NYdb::NPersQueue::NTests::NTestSuiteBasicUsage::TCurrentTest::Execute()+428 (0x13935F3C) NUnitTest::TTestFactory::Execute()+803 (0x13C16EF3) NUnitTest::RunMain(int, char**)+3021 (0x13C2883D) ??+0 (0x7FCFFAA10D90) __libc_start_main+128 (0x7FCFFAA10E40) _start+41 (0x129B5029) |59.0%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/fq/libs/common/ut/unittest >> EscapingBasics::EscapeStringShouldWork [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/load_test/ut/unittest >> GroupWriteTest::Simple [GOOD] Test command err: RandomSeed# 17221820544695579039 2025-05-29T15:21:58.355286Z 1 00h01m00.010512s :BS_LOAD_TEST DEBUG: TabletId# 1 Generation# 1 is bootstrapped, going to send TEvDiscover {TabletId# 1 MinGeneration# 1 ReadBody# false DiscoverBlockedGeneration# true ForceBlockedGeneration# 0 FromLeader# true Deadline# 18446744073709551} 2025-05-29T15:21:58.359194Z 1 00h01m00.010512s :BS_LOAD_TEST INFO: TabletId# 1 Generation# 1 recieved TEvDiscoverResult {Status# NODATA BlockedGeneration# 0 Id# [0:0:0:0:0:0:0] Size# 0 MinGeneration# 1} 2025-05-29T15:21:58.359213Z 1 00h01m00.010512s :BS_LOAD_TEST DEBUG: TabletId# 1 Generation# 1 going to send TEvBlock {TabletId# 1 Generation# 1 Deadline# 18446744073709551 IsMonitored# 1} 2025-05-29T15:21:58.359717Z 1 00h01m00.010512s :BS_LOAD_TEST INFO: TabletId# 1 Generation# 1 recieved TEvBlockResult {Status# OK} 2025-05-29T15:21:58.369233Z 1 00h01m00.010512s :BS_LOAD_TEST DEBUG: TabletId# 1 Generation# 2 going to send TEvCollectGarbage {TabletId# 1 RecordGeneration# 2 PerGenerationCounter# 1 Channel# 0 Deadline# 18446744073709551 Collect# true CollectGeneration# 2 CollectStep# 0 Hard# true IsMultiCollectAllowed# 0 IsMonitored# 1} 2025-05-29T15:21:58.369846Z 1 00h01m00.010512s :BS_LOAD_TEST INFO: TabletId# 1 Generation# 2 recieved TEvCollectGarbageResult {TabletId# 1 RecordGeneration# 2 PerGenerationCounter# 1 Channel# 0 Status# OK} 2025-05-29T15:22:00.001172Z 1 00h01m30.010512s :BS_LOAD_TEST DEBUG: Load tablet recieved PoisonPill, going to die 2025-05-29T15:22:00.001209Z 1 00h01m30.010512s :BS_LOAD_TEST DEBUG: TabletId# 1 Generation# 2 end working, going to send TEvCollectGarbage {TabletId# 1 RecordGeneration# 2 PerGenerationCounter# 32 Channel# 0 Deadline# 18446744073709551 Collect# true CollectGeneration# 2 CollectStep# 4294967295 Hard# true IsMultiCollectAllowed# 0 IsMonitored# 1} 2025-05-29T15:22:00.001218Z 1 00h01m30.010512s :BS_LOAD_TEST DEBUG: Load tablet recieved PoisonPill, going to die 2025-05-29T15:22:00.001224Z 1 00h01m30.010512s :BS_LOAD_TEST DEBUG: TabletId# 1 Generation# 2 end working, going to send TEvCollectGarbage {TabletId# 1 RecordGeneration# 2 PerGenerationCounter# 33 Channel# 0 Deadline# 18446744073709551 Collect# true CollectGeneration# 2 CollectStep# 4294967295 Hard# true IsMultiCollectAllowed# 0 IsMonitored# 1} 2025-05-29T15:22:00.012644Z 1 00h01m30.010512s :BS_HULLRECS CRIT: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Db# LogoBlobs; putting blob beyond the barrier id# [1:2:33:0:11:3536693:4] barrier# {Soft# {Gen# 2 Step# 28} Hard# {Gen# 2 Step# 4294967295}} 2025-05-29T15:22:00.013178Z 3 00h01m30.010512s :BS_HULLRECS CRIT: PDiskId# 1000 VDISK[82000000:_:0:2:0]: (2181038080) Db# LogoBlobs; putting blob beyond the barrier id# [1:2:33:0:11:3536693:6] barrier# {Soft# {Gen# 2 Step# 28} Hard# {Gen# 2 Step# 4294967295}} 2025-05-29T15:22:00.013195Z 8 00h01m30.010512s :BS_HULLRECS CRIT: PDiskId# 1000 VDISK[82000000:_:0:7:0]: (2181038080) Db# LogoBlobs; putting blob beyond the barrier id# [1:2:33:0:11:3536693:3] barrier# {Soft# {Gen# 2 Step# 28} Hard# {Gen# 2 Step# 4294967295}} 2025-05-29T15:22:00.013208Z 7 00h01m30.010512s :BS_HULLRECS CRIT: PDiskId# 1000 VDISK[82000000:_:0:6:0]: (2181038080) Db# LogoBlobs; putting blob beyond the barrier id# [1:2:33:0:11:3536693:2] barrier# {Soft# {Gen# 2 Step# 28} Hard# {Gen# 2 Step# 4294967295}} 2025-05-29T15:22:00.013220Z 2 00h01m30.010512s :BS_HULLRECS CRIT: PDiskId# 1000 VDISK[82000000:_:0:1:0]: (2181038080) Db# LogoBlobs; putting blob beyond the barrier id# [1:2:33:0:11:3536693:5] barrier# {Soft# {Gen# 2 Step# 28} Hard# {Gen# 2 Step# 4294967295}} 2025-05-29T15:22:00.013232Z 6 00h01m30.010512s :BS_HULLRECS CRIT: PDiskId# 1000 VDISK[82000000:_:0:5:0]: (2181038080) Db# LogoBlobs; putting blob beyond the barrier id# [1:2:33:0:11:3536693:1] barrier# {Soft# {Gen# 2 Step# 28} Hard# {Gen# 2 Step# 4294967295}} 2025-05-29T15:22:00.013996Z 1 00h01m30.010512s :BS_LOAD_TEST INFO: TabletId# 1 Generation# 2 recieved TEvCollectGarbageResult {TabletId# 1 RecordGeneration# 2 PerGenerationCounter# 32 Channel# 0 Status# OK} 2025-05-29T15:22:00.014017Z 1 00h01m30.010512s :BS_LOAD_TEST INFO: TabletId# 1 Generation# 2 recieved TEvCollectGarbageResult {TabletId# 1 RecordGeneration# 2 PerGenerationCounter# 33 Channel# 0 Status# OK} ------- [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/fq/libs/common/ut/unittest >> IcebergClusterProcessor::ValidateRiseErrors [GOOD] Test command err: test case: 1 test case: 2 test case: 3 test case: 4 test case: 5 test case: 6 test case: 7 test case: 8 test case: 9 >> SplitterBasic::EqualSplitByMaxRowsLimitPerChunk [GOOD] >> SplitterBasic::LimitExceed [GOOD] >> EntityId::Order |59.0%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/fq/libs/common/ut/unittest >> EntityId::MaxId [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/public/sdk/cpp/src/client/persqueue_public/ut/with_offset_ranges_mode_ut/unittest >> ReadSessionImplTest::CommonHandler [GOOD] Test command err: 2025-05-29T15:21:51.224074Z :SpecifyClustersExplicitly INFO: Random seed for debugging is 1748532111224068 2025-05-29T15:21:51.369730Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509888235020599873:2074];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:21:51.369768Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-05-29T15:21:51.375260Z node 2 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7509888234603581206:2210];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:21:51.375345Z node 2 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/000ca2/r3tmp/tmp0mzJQ4/pdisk_1.dat 2025-05-29T15:21:51.416276Z node 2 :PQ_READ_PROXY DEBUG: caching_service.cpp:44: Direct read cache: : Created 2025-05-29T15:21:51.418074Z node 1 :PQ_READ_PROXY DEBUG: caching_service.cpp:44: Direct read cache: : Created 2025-05-29T15:21:51.453897Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 13205, node 1 2025-05-29T15:21:51.470683Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/ciyv/000ca2/r3tmp/yandexq2roMs.tmp 2025-05-29T15:21:51.470698Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: /home/runner/.ya/build/build_root/ciyv/000ca2/r3tmp/yandexq2roMs.tmp 2025-05-29T15:21:51.470773Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:202: successfully initialized from file: /home/runner/.ya/build/build_root/ciyv/000ca2/r3tmp/yandexq2roMs.tmp 2025-05-29T15:21:51.470829Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-05-29T15:21:51.471007Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:21:51.471029Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:21:51.475772Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-29T15:21:51.477764Z INFO: TTestServer started on Port 4110 GrpcPort 13205 TClient is connected to server localhost:4110 PQClient connected to localhost:13205 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: 2025-05-29T15:21:51.515645Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:21:51.515673Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:21:51.517307Z node 1 :HIVE WARN: hive_impl.cpp:771: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-29T15:21:51.517819Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-29T15:21:51.518717Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976720657:0, at schemeshard: 72057594046644480 waiting... waiting... waiting... waiting... 2025-05-29T15:21:51.845514Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509888235020600812:2335], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:21:51.845547Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:21:51.845667Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509888235020600839:2338], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:21:51.846551Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976720661:3, at schemeshard: 72057594046644480 2025-05-29T15:21:51.857779Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7509888235020600841:2339], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976720661 completed, doublechecking } 2025-05-29T15:21:51.903809Z node 2 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [2:7509888234603581382:2311], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-05-29T15:21:51.903944Z node 2 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=2&id=MTljNGM3ZTAtNTVjNWE0MzktODU3NGEwNzYtYmJjZWZiZWY=, ActorId: [2:7509888234603581341:2305], ActorState: ExecuteState, TraceId: 01jwea5afgfwmt7hap6h41spxh, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-05-29T15:21:51.904575Z node 2 :PERSQUEUE_CLUSTER_TRACKER ERROR: cluster_tracker.cpp:167: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-05-29T15:21:51.904314Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720662:0, at schemeshard: 72057594046644480 2025-05-29T15:21:51.934767Z node 1 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [1:7509888235020600998:2700] txid# 281474976720663, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 8], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-29T15:21:51.938142Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [1:7509888235020601017:2350], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-05-29T15:21:51.938318Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=1&id=NjMzZWUxZWItNGMyODM2NDgtNzZjMjMzYTMtYzMyOTQ3ZTA=, ActorId: [1:7509888235020600809:2333], ActorState: ExecuteState, TraceId: 01jwea5af40za75ffzvx104nvq, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-05-29T15:21:51.938434Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: cluster_tracker.cpp:167: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-05-29T15:21:51.987700Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720664:0, at schemeshard: 72057594046644480 2025-05-29T15:21:52.040895Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720665:0, at schemeshard: 72057594046644480 === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost:13205", true, true, 1000); 2025-05-29T15:21:52.167129Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [1:7509888239315568594:2376], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:21:52.168038Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=1&id=ZTU2MTliMzctMmE1YjQwY2MtODRjNmJmMGYtYjhlODg0NjY=, ActorId: [1:7509888239315568591:2374], ActorState: ExecuteState, TraceId: 01jwea5arp5v8kwrs7f12a0fx5, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: assertion failed at ydb/core/testlib/test_pq_client.h:537, TMaybe NKikimr::NPersQueueTests::TFlatMsgBusPQClient::RunYqlDataQueryWithParams(TString, const NYdb::TParams &): (result.IsSuccess())
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 TBackTrace::Capture()+28 (0x13A5CA0C) NUnitTest::NPrivate::RaiseError(char const*, TBasicString> const&, bool)+137 (0x13C148C9) NKikimr::NPersQueueTests::TFlatMsgBusPQClient::RunYqlDataQueryWithParams(TBasicString: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:21:53.860580Z node 3 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=3&id=NTQzNDFjMDAtNWVkZDI5YmQtMzllMThiMS0zNzQyMjVlMQ==, ActorId: [3:7509888244813470720:2370], ActorState: ExecuteState, TraceId: 01jwea5cdsc4vesb75ypa5j1qn, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: assertion failed at ydb/core/testlib/test_pq_client.h:537, TMaybe NKikimr::NPersQueueTests::TFlatMsgBusPQClient::RunYqlDataQueryWithParams(TString, const NYdb::TParams &): (result.IsSuccess())
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 TBackTrace::Capture()+28 (0x13A5CA0C) NUnitTest::NPrivate::RaiseError(char const*, TBasicString> const&, bool)+137 (0x13C148C9) NKikimr::NPersQueueTests::TFlatMsgBusPQClient::RunYqlDataQueryWithParams(TBasicString>, NYdb::Dev::TParams const&)+932 (0x138B3A44) NKikimr::NPersQueueTests::TFlatMsgBusPQClient::RunYqlDataQuery(TBasicString>)+88 (0x138B29A8) NKikimr::NPersQueueTests::TFlatMsgBusPQClient::InitDCs(THashMap>, NKikimr::NPersQueueTests::TPQTestClusterInfo, THash>>, TEqualTo>>, std::__y1::allocator>>>, TBasicString> const&)+1170 (0x138B1BF2) NPersQueue::SDKTestSetup::Start(bool, bool)+1450 (0x138A75DA) NPersQueue::SDKTestSetup::SDKTestSetup(TBasicString> const&, bool, TVector> const&, NActors::NLog::EPriority, unsigned int, unsigned long)+675 (0x138A50E3) NTestSuitePersQueueSdkReadSessionTest::TTestCaseStopResumeReadingData::Execute_(NUnitTest::TTestContext&)+127 (0x13868CBF) NTestSuitePersQueueSdkReadSessionTest::TCurrentTest::Execute()::'lambda'()::operator()() const+71 (0x138A4B07) NUnitTest::TTestBase::Run(std::__y1::function, TBasicString> const&, char const*, bool)+126 (0x13C1677E) NTestSuitePersQueueSdkReadSessionTest::TCurrentTest::Execute()+481 (0x138A44A1) NUnitTest::TTestFactory::Execute()+803 (0x13C16EF3) NUnitTest::RunMain(int, char**)+3021 (0x13C2883D) ??+0 (0x7FC03A073D90) __libc_start_main+128 (0x7FC03A073E40) _start+41 (0x129B5029) 2025-05-29T15:21:54.411305Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-05-29T15:21:54.411313Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-05-29T15:21:54.411318Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2025-05-29T15:21:54.412111Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2025-05-29T15:21:54.412267Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2025-05-29T15:21:54.414021Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-05-29T15:21:54.414206Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: 13. Commit offset: 31 2025-05-29T15:21:54.416232Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-05-29T15:21:54.416238Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-05-29T15:21:54.416242Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2025-05-29T15:21:54.419328Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2025-05-29T15:21:54.421021Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2025-05-29T15:21:54.421095Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-05-29T15:21:54.421187Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (NULL) 2025-05-29T15:21:54.421363Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 Post function 2025-05-29T15:21:54.427087Z :INFO: Error decompressing data: (TZLibDecompressorError) util/stream/zlib.cpp:143: inflate error(incorrect header check) 2025-05-29T15:21:54.427117Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-3) 2025-05-29T15:21:54.427172Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2025-05-29T15:21:54.427180Z :DEBUG: Take Data. Partition 1. Read: {0, 1} (2-2) 2025-05-29T15:21:54.427185Z :DEBUG: Take Data. Partition 1. Read: {0, 2} (3-3) 2025-05-29T15:21:54.427198Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 3, size 16 bytes DataReceived { PartitionStreamId: 1 PartitionId: 1 Message { DataDecompressionError: "(TZLibDecompressorError) util/stream/zlib.cpp:143: inflate error(incorrect header check)" Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 1 SeqNo: 1 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:00:00.042000Z Ip: "::1" UncompressedSize: 0 Meta: { } } } Message { Data: ..8 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 2 SeqNo: 1 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:00:00.042000Z Ip: "::1" UncompressedSize: 0 Meta: { } } } Message { Data: ..8 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 3 SeqNo: 1 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:00:00.042000Z Ip: "::1" UncompressedSize: 0 Meta: { } } } } 2025-05-29T15:21:54.436663Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-05-29T15:21:54.436670Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-05-29T15:21:54.436675Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2025-05-29T15:21:54.436759Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2025-05-29T15:21:54.436898Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2025-05-29T15:21:54.436969Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-05-29T15:21:54.438392Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (NULL) 2025-05-29T15:21:54.438584Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-05-29T15:21:54.438638Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-1) 2025-05-29T15:21:54.438659Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2025-05-29T15:21:54.438668Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 1, size 8 bytes 2025-05-29T15:21:54.438678Z :DEBUG: [db] [sessionid] [cluster] Commit offsets [0, 2). Partition stream id: 1 2025-05-29T15:21:54.439172Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-05-29T15:21:54.439176Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-05-29T15:21:54.439181Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2025-05-29T15:21:54.439266Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2025-05-29T15:21:54.439350Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2025-05-29T15:21:54.439371Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-05-29T15:21:54.439422Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (NULL) 2025-05-29T15:21:54.439514Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 Post function 2025-05-29T15:21:54.439550Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 Post function 2025-05-29T15:21:54.439581Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (2-2) 2025-05-29T15:21:54.439588Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-1) 2025-05-29T15:21:54.439596Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2025-05-29T15:21:54.439603Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (2-2) 2025-05-29T15:21:54.439624Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 1, size 8 bytes 2025-05-29T15:21:54.439628Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 1, size 8 bytes 2025-05-29T15:21:56.471125Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-05-29T15:21:56.471133Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-05-29T15:21:56.471138Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2025-05-29T15:21:56.471218Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2025-05-29T15:21:56.471354Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2025-05-29T15:21:56.471414Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-05-29T15:21:56.471607Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-05-29T15:21:56.471670Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-1) 2025-05-29T15:21:56.471692Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2025-05-29T15:21:56.471712Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 1, size 8 bytes >> EscapingBasics::HideSecretsShouldWork [GOOD] >> IcebergClusterProcessor::ValidateConfigurationWithoutCatalog >> IcebergClusterProcessor::ValidateConfigurationWithoutCatalog [GOOD] >> EntityId::Order [GOOD] >> EscapingBasics::EncloseSecretShouldWork [GOOD] >> EscapingBasics::EncloseAndEscapeStringShouldWork [GOOD] |59.1%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/fq/libs/common/ut/unittest >> SplitterBasic::LimitExceed [GOOD] >> TPartitionTests::ConflictingTxProceedAfterRollback [GOOD] >> TPartitionTests::ConflictingSrcIdTxAndWritesDifferentBatches >> Cache::Test5 [GOOD] >> EntityId::CheckId [GOOD] |59.1%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/fq/libs/common/ut/unittest >> IcebergClusterProcessor::ValidateConfigurationWithoutCatalog [GOOD] |59.1%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/fq/libs/common/ut/unittest >> EscapingBasics::EncloseAndEscapeStringShouldWork [GOOD] >> IssuesTextFiltering::ShouldRemoveDatabasePath [GOOD] >> SplitterBasic::EqualSplitByMaxBytesLimitPerChunk [GOOD] >> Cache::Test1 [GOOD] >> Cache::Test2 [GOOD] >> Cache::Test3 [GOOD] |59.1%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/fq/libs/common/ut/unittest >> EntityId::CheckId [GOOD] >> TPartitionTests::NonConflictingCommitsBatch [GOOD] >> KqpQueryPerf::ComputeLength+QueryService >> KqpQueryPerf::MultiDeleteFromTable+QueryService-UseSink >> TPartitionTests::GetUsedStorage >> KqpQueryPerf::MultiRead+QueryService >> CompressExecutor::TestExecutorMemUsage [FAIL] |59.1%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/fq/libs/common/ut/unittest >> SplitterBasic::EqualSplitByMaxBytesLimitPerChunk [GOOD] |59.1%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/fq/libs/common/ut/unittest >> Cache::Test3 [GOOD] >> KqpQueryPerf::Upsert-QueryService-UseSink >> test_sql_streaming.py::test[suites-GroupByHoppingWindowListKey-default.txt] [GOOD] >> test_sql_streaming.py::test[suites-GroupByHoppingWindowNoKey-default.txt] >> KqpQueryPerf::MultiDeleteFromTable-QueryService+UseSink >> TPartitionTests::GetUsedStorage [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/yql/unittest >> KqpScripting::SecondaryIndexes Test command err: Trying to start YDB, gRPC: 1194, MsgBus: 62101 2025-05-29T15:21:51.652272Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509888233233858719:2139];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:21:51.653256Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/00242a/r3tmp/tmpIaCRJ8/pdisk_1.dat 2025-05-29T15:21:51.725259Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:21:51.725365Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [1:7509888233233858618:2079] 1748532111651593 != 1748532111651596 TServer::EnableGrpc on GrpcPort 1194, node 1 2025-05-29T15:21:51.745218Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:21:51.745236Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-05-29T15:21:51.745238Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-05-29T15:21:51.745286Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:62101 2025-05-29T15:21:51.793614Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:21:51.793643Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:21:51.795711Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:62101 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-29T15:21:51.817445Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:21:51.823447Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:21:51.895631Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:21:51.935920Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:21:51.956455Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:21:52.059335Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509888237528827561:2403], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:21:52.059362Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:21:52.120502Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-05-29T15:21:52.135652Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-05-29T15:21:52.155366Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-05-29T15:21:52.171209Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-05-29T15:21:52.191891Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-05-29T15:21:52.220143Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-05-29T15:21:52.240090Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-05-29T15:21:52.267387Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509888237528828216:2466], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:21:52.267419Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:21:52.267528Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509888237528828221:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:21:52.268429Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715669:3, at schemeshard: 72057594046644480 2025-05-29T15:21:52.271977Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976715669, at schemeshard: 72057594046644480 2025-05-29T15:21:52.272048Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7509888237528828223:2470], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715669 completed, doublechecking } 2025-05-29T15:21:52.328380Z node 1 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [1:7509888237528828274:3397] txid# 281474976715670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } VERIFY failed (2025-05-29T15:21:52.485325Z): assertion failed in non-unittest thread with message: assertion failed at ydb/core/kqp/ut/common/kqp_ut_common.h:375, void NKikimr::NKqp::AssertSuccessResult(const NYdb::TStatus &): (result.IsSuccess())
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 library/cpp/testing/unittest/registar.cpp:36 RaiseError(): requirement UnittestThread failed 2025-05-29T15:21:52.481812Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [1:7509888237528828283:2474], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:21:52.484594Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=1&id=YWFhNTVhMmUtYjg5MDFjNjUtYWU1M2Q0OTAtYjU3MzU1NWI=, ActorId: [1:7509888237528827534:2400], ActorState: ExecuteState, TraceId: 01jwea5awa9qngdy9gdddst8hx, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: 0. /-S/util/system/yassert.cpp:83: InternalPanicImpl @ 0x15E4ED75 1. /-S/util/system/yassert.cpp:55: Panic @ 0x15E45D76 2. /tmp//-S/library/cpp/testing/unittest/registar.cpp:36: RaiseError @ 0x15FE8506 3. /-S/ydb/core/kqp/ut/common/kqp_ut_common.h:375: AssertSuccessResult @ 0x284B1732 4. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:365: CreateSampleTables @ 0x284B1032 5. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:581: operator() @ 0x284D254C 6. /-S/library/cpp/threading/future/core/future-inl.h:493: SetValue @ 0x284D254C 7. /-S/library/cpp/threading/future/async.h:24: operator() @ 0x284D254C 8. /-S/util/thread/pool.h:71: Process @ 0x284D254C 9. /-S/util/thread/pool.cpp:418: DoExecute @ 0x15E566F9 10. /-S/util/thread/factory.h:15: Execute @ 0x15E550E9 11. /-S/util/thread/factory.cpp:36: ThreadProc @ 0x15E550E9 12. /-S/util/system/thread.cpp:244: ThreadProxy @ 0x15E5055C 13. ??:0: ?? @ 0x7F4B7B8DFAC2 14. ??:0: ?? @ 0x7F4B7B97184F Trying to start YDB, gRPC: 23390, MsgBus: 3463 2025-05-29T15:21:56.365040Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509888257501194221:2070];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:21:56.365070Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/00242a/r3tmp/tmplRjkSo/pdisk_1.dat 2025-05-29T15:21:56.435415Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:21:56.435462Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [1:7509888257501194180:2079] 1748532116364832 != 1748532116364835 TServer::EnableGrpc on GrpcPort 23390, node 1 2025-05-29T15:21:56.456161Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:21:56.456180Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-05-29T15:21:56.456182Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-05-29T15:21:56.456231Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-05-29T15:21:56.466711Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:21:56.466755Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:21:56.467872Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:3463 TClient is connected to server localhost:3463 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-29T15:21:56.527754Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:21:56.532186Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-05-29T15:21:56.543695Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:21:56.616838Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:21:56.680549Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:21:56.695788Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:21:56.839798Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509888257501195827:2403], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:21:56.839885Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:21:56.847604Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-05-29T15:21:56.864399Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-05-29T15:21:56.877793Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-05-29T15:21:56.890404Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-05-29T15:21:56.902523Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-05-29T15:21:56.916555Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-05-29T15:21:56.927760Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-05-29T15:21:56.947379Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509888257501196481:2466], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:21:56.947401Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:21:56.947413Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509888257501196486:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:21:56.948218Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715669:3, at schemeshard: 72057594046644480 2025-05-29T15:21:56.955078Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7509888257501196488:2470], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715669 completed, doublechecking } 2025-05-29T15:21:57.047988Z node 1 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [1:7509888261796163835:3399] txid# 281474976715670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } VERIFY failed (2025-05-29T15:21:57.287374Z): assertion failed in non-unittest thread with message: assertion failed at ydb/core/kqp/ut/common/kqp_ut_common.h:375, void NKikimr::NKqp::AssertSuccessResult(const NYdb::TStatus &): (result.IsSuccess())
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 library/cpp/testing/unittest/registar.cpp:36 RaiseError(): requirement UnittestThread failed 2025-05-29T15:21:57.284430Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [1:7509888261796163844:2474], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:21:57.284855Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=1&id=ODFjYzRhM2MtMjI1MWRkNDctZmQ2MmZmMGEtOWM3MGQ4MjQ=, ActorId: [1:7509888257501195809:2401], ActorState: ExecuteState, TraceId: 01jwea5fej9p88t3hhr89v9be7, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: 0. /-S/util/system/yassert.cpp:83: InternalPanicImpl @ 0x15E4ED75 1. /-S/util/system/yassert.cpp:55: Panic @ 0x15E45D76 2. /tmp//-S/library/cpp/testing/unittest/registar.cpp:36: RaiseError @ 0x15FE8506 3. /-S/ydb/core/kqp/ut/common/kqp_ut_common.h:375: AssertSuccessResult @ 0x284B1732 4. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:365: CreateSampleTables @ 0x284B1032 5. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:581: operator() @ 0x284D254C 6. /-S/library/cpp/threading/future/core/future-inl.h:493: SetValue @ 0x284D254C 7. /-S/library/cpp/threading/future/async.h:24: operator() @ 0x284D254C 8. /-S/util/thread/pool.h:71: Process @ 0x284D254C 9. /-S/util/thread/pool.cpp:418: DoExecute @ 0x15E566F9 10. /-S/util/thread/factory.h:15: Execute @ 0x15E550E9 11. /-S/util/thread/factory.cpp:36: ThreadProc @ 0x15E550E9 12. /-S/util/system/thread.cpp:244: ThreadProxy @ 0x15E5055C 13. ??:0: ?? @ 0x7F5424995AC2 14. ??:0: ?? @ 0x7F5424A2784F ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/yql/unittest >> KqpPragma::MatchRecognizeWithoutTimeOrderRecoverer Test command err: Trying to start YDB, gRPC: 16378, MsgBus: 16344 2025-05-29T15:21:51.938499Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509888233784971344:2066];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:21:51.938839Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/002424/r3tmp/tmp7L51ox/pdisk_1.dat 2025-05-29T15:21:52.014863Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:21:52.018329Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [1:7509888233784971318:2079] 1748532111937599 != 1748532111937602 TServer::EnableGrpc on GrpcPort 16378, node 1 2025-05-29T15:21:52.063428Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:21:52.063444Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-05-29T15:21:52.063447Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-05-29T15:21:52.063498Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-05-29T15:21:52.083415Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:21:52.083447Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:21:52.084339Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:16344 TClient is connected to server localhost:16344 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-29T15:21:52.232977Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:21:52.243033Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-05-29T15:21:52.255352Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:21:52.284245Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:21:52.324395Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:21:52.357174Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:21:52.500381Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509888238079940244:2403], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:21:52.500411Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:21:52.568318Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-05-29T15:21:52.586891Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-05-29T15:21:52.599922Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-05-29T15:21:52.608884Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-05-29T15:21:52.623941Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-05-29T15:21:52.638623Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-05-29T15:21:52.651682Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-05-29T15:21:52.671885Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509888238079940896:2466], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:21:52.671917Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:21:52.672018Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509888238079940901:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:21:52.672858Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715669:3, at schemeshard: 72057594046644480 2025-05-29T15:21:52.679972Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976715669, at schemeshard: 72057594046644480 2025-05-29T15:21:52.680067Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7509888238079940903:2470], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715669 completed, doublechecking } 2025-05-29T15:21:52.746163Z node 1 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [1:7509888238079940954:3395] txid# 281474976715670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-29T15:21:52.849218Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [1:7509888238079940970:2474], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:21:52.850397Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=1&id=MmYyMWRiNmMtMWQxNjhkYmMtNzllMWEwMTktNDlhNzkzNGY=, ActorId: [1:7509888238079940216:2399], ActorState: ExecuteState, TraceId: 01jwea5b8z725j24dwfsz05eb2, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: VERIFY failed (2025-05-29T15:21:52.851143Z): assertion failed in non-unittest thread with message: assertion failed at ydb/core/kqp/ut/common/kqp_ut_common.h:375, void NKikimr::NKqp::AssertSuccessResult(const NYdb::TStatus &): (result.IsSuccess())
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 library/cpp/testing/unittest/registar.cpp:36 RaiseError(): requirement UnittestThread failed 0. /-S/util/system/yassert.cpp:83: InternalPanicImpl @ 0x15E4ED75 1. /-S/util/system/yassert.cpp:55: Panic @ 0x15E45D76 2. /tmp//-S/library/cpp/testing/unittest/registar.cpp:36: RaiseError @ 0x15FE8506 3. /-S/ydb/core/kqp/ut/common/kqp_ut_common.h:375: AssertSuccessResult @ 0x284B1732 4. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:365: CreateSampleTables @ 0x284B1032 5. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:581: operator() @ 0x284D254C 6. /-S/library/cpp/threading/future/core/future-inl.h:493: SetValue @ 0x284D254C 7. /-S/library/cpp/threading/future/async.h:24: operator() @ 0x284D254C 8. /-S/util/thread/pool.h:71: Process @ 0x284D254C 9. /-S/util/thread/pool.cpp:418: DoExecute @ 0x15E566F9 10. /-S/util/thread/factory.h:15: Execute @ 0x15E550E9 11. /-S/util/thread/factory.cpp:36: ThreadProc @ 0x15E550E9 12. /-S/util/system/thread.cpp:244: ThreadProxy @ 0x15E5055C 13. ??:0: ?? @ 0x7FE10B34CAC2 14. ??:0: ?? @ 0x7FE10B3DE84F Trying to start YDB, gRPC: 18627, MsgBus: 11272 2025-05-29T15:21:57.202138Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509888261414356368:2218];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:21:57.204374Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/002424/r3tmp/tmpLsVlnl/pdisk_1.dat 2025-05-29T15:21:57.266842Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:21:57.267114Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [1:7509888261414356164:2079] 1748532117147653 != 1748532117147656 TServer::EnableGrpc on GrpcPort 18627, node 1 2025-05-29T15:21:57.314723Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:21:57.314735Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-05-29T15:21:57.314750Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-05-29T15:21:57.314797Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-05-29T15:21:57.316046Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:21:57.316091Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:21:57.316975Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:11272 TClient is connected to server localhost:11272 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-29T15:21:57.443095Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:21:57.448466Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-05-29T15:21:57.458230Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:21:57.480274Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:21:57.508534Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:21:57.522703Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:21:57.690290Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509888261414357801:2403], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:21:57.690321Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:21:57.745757Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-05-29T15:21:57.761645Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-05-29T15:21:57.776765Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-05-29T15:21:57.791023Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-05-29T15:21:57.804131Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-05-29T15:21:57.819427Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-05-29T15:21:57.834194Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-05-29T15:21:57.857349Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509888261414358454:2466], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:21:57.857374Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:21:57.857524Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509888261414358459:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:21:57.858333Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715669:3, at schemeshard: 72057594046644480 2025-05-29T15:21:57.866857Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976715669, at schemeshard: 72057594046644480 2025-05-29T15:21:57.867024Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7509888261414358461:2470], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715669 completed, doublechecking } 2025-05-29T15:21:57.949269Z node 1 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [1:7509888261414358512:3396] txid# 281474976715670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-29T15:21:58.062313Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [1:7509888261414358521:2474], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:21:58.062884Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=1&id=MmRiOWEyM2MtMmI5YzMzNS01MTNlNjY2OC03ODEwMWIyMw==, ActorId: [1:7509888261414357775:2401], ActorState: ExecuteState, TraceId: 01jwea5gazfey4han8z4x4e5es, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: VERIFY failed (2025-05-29T15:21:58.063542Z): assertion failed in non-unittest thread with message: assertion failed at ydb/core/kqp/ut/common/kqp_ut_common.h:375, void NKikimr::NKqp::AssertSuccessResult(const NYdb::TStatus &): (result.IsSuccess())
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 library/cpp/testing/unittest/registar.cpp:36 RaiseError(): requirement UnittestThread failed 0. /-S/util/system/yassert.cpp:83: InternalPanicImpl @ 0x15E4ED75 1. /-S/util/system/yassert.cpp:55: Panic @ 0x15E45D76 2. /tmp//-S/library/cpp/testing/unittest/registar.cpp:36: RaiseError @ 0x15FE8506 3. /-S/ydb/core/kqp/ut/common/kqp_ut_common.h:375: AssertSuccessResult @ 0x284B1732 4. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:365: CreateSampleTables @ 0x284B1032 5. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:581: operator() @ 0x284D254C 6. /-S/library/cpp/threading/future/core/future-inl.h:493: SetValue @ 0x284D254C 7. /-S/library/cpp/threading/future/async.h:24: operator() @ 0x284D254C 8. /-S/util/thread/pool.h:71: Process @ 0x284D254C 9. /-S/util/thread/pool.cpp:418: DoExecute @ 0x15E566F9 10. /-S/util/thread/factory.h:15: Execute @ 0x15E550E9 11. /-S/util/thread/factory.cpp:36: ThreadProc @ 0x15E550E9 12. /-S/util/system/thread.cpp:244: ThreadProxy @ 0x15E5055C 13. ??:0: ?? @ 0x7F036CF69AC2 14. ??:0: ?? @ 0x7F036CFFB84F ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/yql/unittest >> KqpScripting::StreamOperationTimeout Test command err: Trying to start YDB, gRPC: 14852, MsgBus: 15328 2025-05-29T15:21:51.944728Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509888236157306768:2209];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:21:51.945410Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/002420/r3tmp/tmpEcl7Ng/pdisk_1.dat 2025-05-29T15:21:52.046698Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:21:52.046730Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:21:52.049370Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:21:52.052777Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 14852, node 1 2025-05-29T15:21:52.088073Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:21:52.088091Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-05-29T15:21:52.088094Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-05-29T15:21:52.088145Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:15328 TClient is connected to server localhost:15328 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-29T15:21:52.247040Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:21:52.264339Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:21:52.324454Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:21:52.363908Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:21:52.405636Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:21:52.671505Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509888240452275506:2403], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:21:52.671546Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:21:52.733262Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-05-29T15:21:52.751579Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-05-29T15:21:52.770388Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-05-29T15:21:52.795896Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-05-29T15:21:52.814942Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-05-29T15:21:52.870969Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-05-29T15:21:52.886682Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-05-29T15:21:52.907636Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509888240452276166:2466], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:21:52.907666Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:21:52.907761Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509888240452276171:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:21:52.908845Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715669:3, at schemeshard: 72057594046644480 2025-05-29T15:21:52.912128Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7509888240452276173:2470], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715669 completed, doublechecking } 2025-05-29T15:21:53.007306Z node 1 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [1:7509888244747243529:3401] txid# 281474976715670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-29T15:21:53.100431Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [1:7509888244747243542:2475], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:21:53.101317Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=1&id=NTZhYjdlNDQtNWZhZmQzNGItNjAwYmMyNzYtNTk1ZDA3MjU=, ActorId: [1:7509888240452275479:2400], ActorState: ExecuteState, TraceId: 01jwea5bgbbprr6kh69421azk7, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: VERIFY failed (2025-05-29T15:21:53.104212Z): assertion failed in non-unittest thread with message: assertion failed at ydb/core/kqp/ut/common/kqp_ut_common.h:375, void NKikimr::NKqp::AssertSuccessResult(const NYdb::TStatus &): (result.IsSuccess())
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 library/cpp/testing/unittest/registar.cpp:36 RaiseError(): requirement UnittestThread failed 0. /-S/util/system/yassert.cpp:83: InternalPanicImpl @ 0x15E4ED75 1. /-S/util/system/yassert.cpp:55: Panic @ 0x15E45D76 2. /tmp//-S/library/cpp/testing/unittest/registar.cpp:36: RaiseError @ 0x15FE8506 3. /-S/ydb/core/kqp/ut/common/kqp_ut_common.h:375: AssertSuccessResult @ 0x284B1732 4. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:365: CreateSampleTables @ 0x284B1032 5. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:581: operator() @ 0x284D254C 6. /-S/library/cpp/threading/future/core/future-inl.h:493: SetValue @ 0x284D254C 7. /-S/library/cpp/threading/future/async.h:24: operator() @ 0x284D254C 8. /-S/util/thread/pool.h:71: Process @ 0x284D254C 9. /-S/util/thread/pool.cpp:418: DoExecute @ 0x15E566F9 10. /-S/util/thread/factory.h:15: Execute @ 0x15E550E9 11. /-S/util/thread/factory.cpp:36: ThreadProc @ 0x15E550E9 12. /-S/util/system/thread.cpp:244: ThreadProxy @ 0x15E5055C 13. ??:0: ?? @ 0x7F7F21E02AC2 14. ??:0: ?? @ 0x7F7F21E9484F Trying to start YDB, gRPC: 11358, MsgBus: 26760 2025-05-29T15:21:57.073493Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509888259676818366:2160];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:21:57.074104Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/002420/r3tmp/tmpyATxLa/pdisk_1.dat 2025-05-29T15:21:57.180891Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 11358, node 1 2025-05-29T15:21:57.181866Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:21:57.181902Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:21:57.185549Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-29T15:21:57.214117Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:21:57.214130Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-05-29T15:21:57.214132Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-05-29T15:21:57.214172Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:26760 TClient is connected to server localhost:26760 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-29T15:21:57.327603Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:21:57.339038Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:21:57.435439Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-05-29T15:21:57.480263Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:21:57.564931Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-05-29T15:21:57.592289Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 2025-05-29T15:21:57.668483Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509888259676819863:2403], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:21:57.668523Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:21:57.739085Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-05-29T15:21:57.802516Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-05-29T15:21:57.816667Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-05-29T15:21:57.833475Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-05-29T15:21:57.847042Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-05-29T15:21:57.874540Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-05-29T15:21:57.888218Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-05-29T15:21:57.906727Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509888259676820518:2466], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:21:57.906790Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:21:57.906804Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509888259676820523:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:21:57.907744Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715669:3, at schemeshard: 72057594046644480 2025-05-29T15:21:57.913621Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7509888259676820525:2470], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715669 completed, doublechecking } 2025-05-29T15:21:58.002862Z node 1 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [1:7509888263971787872:3395] txid# 281474976715670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-29T15:21:58.094099Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [1:7509888263971787881:2474], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:21:58.094252Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=1&id=NTc1NmQ5M2MtNGViMjEzMzItZTc3NWEzYzYtN2UwZmE2ZA==, ActorId: [1:7509888259676819860:2401], ActorState: ExecuteState, TraceId: 01jwea5gcj81ygz7p789m8wc2q, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: VERIFY failed (2025-05-29T15:21:58.094942Z): assertion failed in non-unittest thread with message: assertion failed at ydb/core/kqp/ut/common/kqp_ut_common.h:375, void NKikimr::NKqp::AssertSuccessResult(const NYdb::TStatus &): (result.IsSuccess())
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 library/cpp/testing/unittest/registar.cpp:36 RaiseError(): requirement UnittestThread failed 0. /-S/util/system/yassert.cpp:83: InternalPanicImpl @ 0x15E4ED75 1. /-S/util/system/yassert.cpp:55: Panic @ 0x15E45D76 2. /tmp//-S/library/cpp/testing/unittest/registar.cpp:36: RaiseError @ 0x15FE8506 3. /-S/ydb/core/kqp/ut/common/kqp_ut_common.h:375: AssertSuccessResult @ 0x284B1732 4. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:365: CreateSampleTables @ 0x284B1032 5. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:581: operator() @ 0x284D254C 6. /-S/library/cpp/threading/future/core/future-inl.h:493: SetValue @ 0x284D254C 7. /-S/library/cpp/threading/future/async.h:24: operator() @ 0x284D254C 8. /-S/util/thread/pool.h:71: Process @ 0x284D254C 9. /-S/util/thread/pool.cpp:418: DoExecute @ 0x15E566F9 10. /-S/util/thread/factory.h:15: Execute @ 0x15E550E9 11. /-S/util/thread/factory.cpp:36: ThreadProc @ 0x15E550E9 12. /-S/util/system/thread.cpp:244: ThreadProxy @ 0x15E5055C 13. ??:0: ?? @ 0x7F5CE9B93AC2 14. ??:0: ?? @ 0x7F5CE9C2584F >> KqpQueryPerf::MultiDeleteFromTable-QueryService-UseSink ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/yql/unittest >> KqpScripting::ScanQueryDisable Test command err: Trying to start YDB, gRPC: 15365, MsgBus: 9838 2025-05-29T15:21:51.941289Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509888236666595100:2204];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:21:51.941474Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/002413/r3tmp/tmpQaR7Sv/pdisk_1.dat 2025-05-29T15:21:52.025998Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 15365, node 1 2025-05-29T15:21:52.042977Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:21:52.043004Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:21:52.043659Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-29T15:21:52.054957Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:21:52.054969Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-05-29T15:21:52.054971Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-05-29T15:21:52.055029Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:9838 TClient is connected to server localhost:9838 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-29T15:21:52.170632Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:21:52.174163Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-05-29T15:21:52.182435Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:21:52.266053Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-05-29T15:21:52.343726Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 2025-05-29T15:21:52.377334Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:21:52.485006Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509888240961563871:2403], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:21:52.485029Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:21:52.536256Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-05-29T15:21:52.557523Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-05-29T15:21:52.573253Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-05-29T15:21:52.636931Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-05-29T15:21:52.646350Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-05-29T15:21:52.659256Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-05-29T15:21:52.671892Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-05-29T15:21:52.695793Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509888240961564526:2466], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:21:52.695816Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:21:52.695944Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509888240961564531:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:21:52.696622Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715669:3, at schemeshard: 72057594046644480 2025-05-29T15:21:52.699299Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976715669, at schemeshard: 72057594046644480 2025-05-29T15:21:52.699373Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7509888240961564533:2470], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715669 completed, doublechecking } 2025-05-29T15:21:52.752035Z node 1 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [1:7509888240961564584:3394] txid# 281474976715670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-29T15:21:52.865654Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [1:7509888240961564600:2474], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:21:52.866824Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=1&id=YjVmMjNmNjUtZDhlYjc5ZjMtNzY0Y2M1YWMtZDk5NzhkNTc=, ActorId: [1:7509888240961563853:2401], ActorState: ExecuteState, TraceId: 01jwea5b9pactcd44tzjgq7w9r, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: VERIFY failed (2025-05-29T15:21:52.871122Z): assertion failed in non-unittest thread with message: assertion failed at ydb/core/kqp/ut/common/kqp_ut_common.h:375, void NKikimr::NKqp::AssertSuccessResult(const NYdb::TStatus &): (result.IsSuccess())
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 library/cpp/testing/unittest/registar.cpp:36 RaiseError(): requirement UnittestThread failed 0. /-S/util/system/yassert.cpp:83: InternalPanicImpl @ 0x15E4ED75 1. /-S/util/system/yassert.cpp:55: Panic @ 0x15E45D76 2. /tmp//-S/library/cpp/testing/unittest/registar.cpp:36: RaiseError @ 0x15FE8506 3. /-S/ydb/core/kqp/ut/common/kqp_ut_common.h:375: AssertSuccessResult @ 0x284B1732 4. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:365: CreateSampleTables @ 0x284B1032 5. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:581: operator() @ 0x284D254C 6. /-S/library/cpp/threading/future/core/future-inl.h:493: SetValue @ 0x284D254C 7. /-S/library/cpp/threading/future/async.h:24: operator() @ 0x284D254C 8. /-S/util/thread/pool.h:71: Process @ 0x284D254C 9. /-S/util/thread/pool.cpp:418: DoExecute @ 0x15E566F9 10. /-S/util/thread/factory.h:15: Execute @ 0x15E550E9 11. /-S/util/thread/factory.cpp:36: ThreadProc @ 0x15E550E9 12. /-S/util/system/thread.cpp:244: ThreadProxy @ 0x15E5055C 13. ??:0: ?? @ 0x7FC348918AC2 14. ??:0: ?? @ 0x7FC3489AA84F Trying to start YDB, gRPC: 23135, MsgBus: 15541 2025-05-29T15:21:57.074685Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509888260440596208:2206];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:21:57.074812Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/002413/r3tmp/tmp9PbLUM/pdisk_1.dat 2025-05-29T15:21:57.154152Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 23135, node 1 2025-05-29T15:21:57.174913Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:21:57.174927Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-05-29T15:21:57.174929Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-05-29T15:21:57.174966Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-05-29T15:21:57.179062Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:21:57.179095Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:21:57.183060Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:15541 TClient is connected to server localhost:15541 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-29T15:21:57.297797Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:21:57.300915Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-05-29T15:21:57.315799Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:21:57.390063Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:21:57.422636Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:21:57.436742Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:21:57.590363Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509888260440597672:2403], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:21:57.590391Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:21:57.659283Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-05-29T15:21:57.681342Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-05-29T15:21:57.695563Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-05-29T15:21:57.710408Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-05-29T15:21:57.731499Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-05-29T15:21:57.761435Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-05-29T15:21:57.801801Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-05-29T15:21:57.874220Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509888260440598332:2466], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:21:57.874242Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:21:57.875999Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509888260440598337:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:21:57.876874Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715669:3, at schemeshard: 72057594046644480 2025-05-29T15:21:57.880470Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976715669, at schemeshard: 72057594046644480 2025-05-29T15:21:57.880543Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7509888260440598339:2470], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715669 completed, doublechecking } 2025-05-29T15:21:57.957640Z node 1 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [1:7509888260440598390:3397] txid# 281474976715670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-29T15:21:58.071543Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [1:7509888260440598399:2474], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:21:58.075204Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=1&id=M2JjOTZjZWUtOWEwMmIyYjItNTU5MjZhYmYtOTk0NWI2NTE=, ActorId: [1:7509888260440597646:2401], ActorState: ExecuteState, TraceId: 01jwea5gbhev00cpp4012zm5qf, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: VERIFY failed (2025-05-29T15:21:58.076120Z): assertion failed in non-unittest thread with message: assertion failed at ydb/core/kqp/ut/common/kqp_ut_common.h:375, void NKikimr::NKqp::AssertSuccessResult(const NYdb::TStatus &): (result.IsSuccess())
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 library/cpp/testing/unittest/registar.cpp:36 RaiseError(): requirement UnittestThread failed 0. /-S/util/system/yassert.cpp:83: InternalPanicImpl @ 0x15E4ED75 1. /-S/util/system/yassert.cpp:55: Panic @ 0x15E45D76 2. /tmp//-S/library/cpp/testing/unittest/registar.cpp:36: RaiseError @ 0x15FE8506 3. /-S/ydb/core/kqp/ut/common/kqp_ut_common.h:375: AssertSuccessResult @ 0x284B1732 4. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:365: CreateSampleTables @ 0x284B1032 5. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:581: operator() @ 0x284D254C 6. /-S/library/cpp/threading/future/core/future-inl.h:493: SetValue @ 0x284D254C 7. /-S/library/cpp/threading/future/async.h:24: operator() @ 0x284D254C 8. /-S/util/thread/pool.h:71: Process @ 0x284D254C 9. /-S/util/thread/pool.cpp:418: DoExecute @ 0x15E566F9 10. /-S/util/thread/factory.h:15: Execute @ 0x15E550E9 11. /-S/util/thread/factory.cpp:36: ThreadProc @ 0x15E550E9 12. /-S/util/system/thread.cpp:244: ThreadProxy @ 0x15E5055C 13. ??:0: ?? @ 0x7EFF162F0AC2 14. ??:0: ?? @ 0x7EFF1638284F |59.1%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/kqp/runtime/ut/ydb-core-kqp-runtime-ut |59.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/runtime/ut/ydb-core-kqp-runtime-ut |59.1%| [LD] {RESULT} $(B)/ydb/core/kqp/runtime/ut/ydb-core-kqp-runtime-ut |59.1%| [TA] $(B)/ydb/core/fq/libs/common/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> KqpQueryPerf::IdxLookupJoin+QueryService >> KqpQueryPerf::IndexLookupJoin-EnableStreamLookup-QueryService >> TPartitionTests::ShadowPartitionCounters [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/yql/unittest >> KqpYql::EvaluateExprYsonAndType Test command err: Trying to start YDB, gRPC: 10199, MsgBus: 9626 2025-05-29T15:21:52.251014Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509888239261151898:2081];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:21:52.251250Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/0023ff/r3tmp/tmpgN1ywS/pdisk_1.dat 2025-05-29T15:21:52.398838Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 10199, node 1 2025-05-29T15:21:52.442914Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:21:52.442927Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-05-29T15:21:52.442929Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-05-29T15:21:52.442973Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:9626 TClient is connected to server localhost:9626 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: 2025-05-29T15:21:52.586936Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:21:52.586960Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-29T15:21:52.591513Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-29T15:21:52.591924Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:21:52.601190Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-05-29T15:21:52.607639Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:21:52.684218Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:21:52.708913Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:21:52.723824Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:21:52.887311Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509888239261153477:2403], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:21:52.887370Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:21:52.929609Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-05-29T15:21:52.943673Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-05-29T15:21:52.953774Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-05-29T15:21:52.968817Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-05-29T15:21:52.982868Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-05-29T15:21:53.000479Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-05-29T15:21:53.060245Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-05-29T15:21:53.077592Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509888243556121428:2466], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:21:53.077619Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:21:53.077799Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509888243556121433:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:21:53.078720Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715669:3, at schemeshard: 72057594046644480 2025-05-29T15:21:53.084209Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7509888243556121435:2470], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715669 completed, doublechecking } 2025-05-29T15:21:53.158573Z node 1 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [1:7509888243556121486:3400] txid# 281474976715670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-29T15:21:53.253194Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [1:7509888243556121495:2474], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:21:53.253762Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=1&id=MzRkMzExLTFlOGFmZDU3LTc4MmFhYTQ0LTRiNWJmYTY=, ActorId: [1:7509888239261153459:2401], ActorState: ExecuteState, TraceId: 01jwea5bnn6dnsxzrwa10n7p0q, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: VERIFY failed (2025-05-29T15:21:53.259018Z): assertion failed in non-unittest thread with message: assertion failed at ydb/core/kqp/ut/common/kqp_ut_common.h:375, void NKikimr::NKqp::AssertSuccessResult(const NYdb::TStatus &): (result.IsSuccess())
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 library/cpp/testing/unittest/registar.cpp:36 RaiseError(): requirement UnittestThread failed 0. /-S/util/system/yassert.cpp:83: InternalPanicImpl @ 0x15E4ED75 1. /-S/util/system/yassert.cpp:55: Panic @ 0x15E45D76 2. /tmp//-S/library/cpp/testing/unittest/registar.cpp:36: RaiseError @ 0x15FE8506 3. /-S/ydb/core/kqp/ut/common/kqp_ut_common.h:375: AssertSuccessResult @ 0x284B1732 4. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:365: CreateSampleTables @ 0x284B1032 5. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:581: operator() @ 0x284D254C 6. /-S/library/cpp/threading/future/core/future-inl.h:493: SetValue @ 0x284D254C 7. /-S/library/cpp/threading/future/async.h:24: operator() @ 0x284D254C 8. /-S/util/thread/pool.h:71: Process @ 0x284D254C 9. /-S/util/thread/pool.cpp:418: DoExecute @ 0x15E566F9 10. /-S/util/thread/factory.h:15: Execute @ 0x15E550E9 11. /-S/util/thread/factory.cpp:36: ThreadProc @ 0x15E550E9 12. /-S/util/system/thread.cpp:244: ThreadProxy @ 0x15E5055C 13. ??:0: ?? @ 0x7F31FDE17AC2 14. ??:0: ?? @ 0x7F31FDEA984F Trying to start YDB, gRPC: 1183, MsgBus: 28531 test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/0023ff/r3tmp/tmp7mBs05/pdisk_1.dat 2025-05-29T15:21:57.522864Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-05-29T15:21:57.576513Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 1183, node 1 2025-05-29T15:21:57.607263Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:21:57.607279Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-05-29T15:21:57.607280Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-05-29T15:21:57.607321Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-05-29T15:21:57.611616Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:21:57.611655Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:21:57.614915Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:28531 TClient is connected to server localhost:28531 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-29T15:21:57.772526Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:21:57.782992Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-05-29T15:21:57.799476Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-05-29T15:21:57.828870Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 2025-05-29T15:21:57.870463Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:21:57.885540Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:21:58.013610Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509888265524920057:2403], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:21:58.013654Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:21:58.065452Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-05-29T15:21:58.073800Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-05-29T15:21:58.082475Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-05-29T15:21:58.097219Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-05-29T15:21:58.110727Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-05-29T15:21:58.124950Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-05-29T15:21:58.139431Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-05-29T15:21:58.155381Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509888265524920710:2466], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:21:58.155419Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509888265524920715:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:21:58.155418Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:21:58.156142Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715669:3, at schemeshard: 72057594046644480 2025-05-29T15:21:58.158545Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7509888265524920717:2470], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715669 completed, doublechecking } 2025-05-29T15:21:58.214986Z node 1 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [1:7509888265524920768:3400] txid# 281474976715670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-29T15:21:58.339165Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [1:7509888265524920784:2474], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:21:58.341512Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=1&id=MTFkYThlMmMtZWE5M2FhMDYtNjlmN2YwYTItZTFjMTQyZmY=, ActorId: [1:7509888261229952743:2401], ActorState: ExecuteState, TraceId: 01jwea5gmb4k8xp2rq8bqmbp4n, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: VERIFY failed (2025-05-29T15:21:58.346842Z): assertion failed in non-unittest thread with message: assertion failed at ydb/core/kqp/ut/common/kqp_ut_common.h:375, void NKikimr::NKqp::AssertSuccessResult(const NYdb::TStatus &): (result.IsSuccess())
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 library/cpp/testing/unittest/registar.cpp:36 RaiseError(): requirement UnittestThread failed 0. /-S/util/system/yassert.cpp:83: InternalPanicImpl @ 0x15E4ED75 1. /-S/util/system/yassert.cpp:55: Panic @ 0x15E45D76 2. /tmp//-S/library/cpp/testing/unittest/registar.cpp:36: RaiseError @ 0x15FE8506 3. /-S/ydb/core/kqp/ut/common/kqp_ut_common.h:375: AssertSuccessResult @ 0x284B1732 4. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:365: CreateSampleTables @ 0x284B1032 5. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:581: operator() @ 0x284D254C 6. /-S/library/cpp/threading/future/core/future-inl.h:493: SetValue @ 0x284D254C 7. /-S/library/cpp/threading/future/async.h:24: operator() @ 0x284D254C 8. /-S/util/thread/pool.h:71: Process @ 0x284D254C 9. /-S/util/thread/pool.cpp:418: DoExecute @ 0x15E566F9 10. /-S/util/thread/factory.h:15: Execute @ 0x15E550E9 11. /-S/util/thread/factory.cpp:36: ThreadProc @ 0x15E550E9 12. /-S/util/system/thread.cpp:244: ThreadProxy @ 0x15E5055C 13. ??:0: ?? @ 0x7F02B32A3AC2 14. ??:0: ?? @ 0x7F02B333584F ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/persqueue/ut/unittest >> TPartitionTests::GetUsedStorage [GOOD] Test command err: 2025-05-29T15:21:56.053202Z node 1 :PERSQUEUE NOTICE: pq_impl.cpp:1099: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-05-29T15:21:56.053230Z node 1 :PERSQUEUE INFO: pq_impl.cpp:800: [PQ: 72057594037927937] doesn't have tx writes info 2025-05-29T15:21:56.057298Z node 1 :PERSQUEUE INFO: partition_init.cpp:880: [PQ: 72057594037927937, Partition: 3, State: StateInit] bootstrapping 3 [1:180:2194] 2025-05-29T15:21:56.057645Z node 1 :PERSQUEUE INFO: partition_init.cpp:785: [Root/PQ/rt3.dc1--account--topic:3:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp from keys completed. Value 2025-05-29T15:21:56.000000Z 2025-05-29T15:21:56.057656Z node 1 :PERSQUEUE INFO: partition.cpp:560: [PQ: 72057594037927937, Partition: 3, State: StateInit] init complete for topic 'Root/PQ/rt3.dc1--account--topic' partition 3 generation 0 [1:180:2194] Got cmd write: CmdWrite { Key: "i0000000003" Value: "\010\000\020\n\030\000(\240\364\212\345\3612" StorageChannel: INLINE } CmdWrite { Key: "m0000000003cclient" Value: "\010\000\020\001\030\001\"\007session(\0000\001@\000" StorageChannel: INLINE } CmdWrite { Key: "m0000000003uclient" Value: "\000\000\000\000\000\000\000\000\001\000\000\000\001\000\000\000session" StorageChannel: INLINE } Got cmd write: CmdWrite { Key: "i0000000003" Value: "\010\000\020\n\030\000(\240\364\212\345\3612" StorageChannel: INLINE } CmdWrite { Key: "I0000000003" Value: "\010\271`\020\262\222\004" StorageChannel: INLINE } Got cmd write: CmdWrite { Key: "i0000000003" Value: "\010\000\020\n\030\000(\240\364\212\345\3612" StorageChannel: INLINE } CmdWrite { Key: "I0000000003" Value: "\010\271`\020\263\222\004" StorageChannel: INLINE } 2025-05-29T15:21:56.709834Z node 2 :PERSQUEUE NOTICE: pq_impl.cpp:1099: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-05-29T15:21:56.709868Z node 2 :PERSQUEUE INFO: pq_impl.cpp:800: [PQ: 72057594037927937] doesn't have tx writes info 2025-05-29T15:21:56.713433Z node 2 :PERSQUEUE DEBUG: partition_init.cpp:75: [Root/PQ/rt3.dc1--account--topic:{2, {0, 10}, 100001}:Initializer] Start initializing step TInitConfigStep 2025-05-29T15:21:56.713505Z node 2 :PERSQUEUE DEBUG: partition_init.cpp:75: [Root/PQ/rt3.dc1--account--topic:{2, {0, 10}, 100001}:Initializer] Start initializing step TInitInternalFieldsStep 2025-05-29T15:21:56.713578Z node 2 :PERSQUEUE INFO: partition_init.cpp:880: [PQ: 72057594037927937, Partition: {2, {0, 10}, 100001}, State: StateInit] bootstrapping {2, {0, 10}, 100001} [2:179:2193] 2025-05-29T15:21:56.713758Z node 2 :PERSQUEUE DEBUG: partition_init.cpp:55: [Root/PQ/rt3.dc1--account--topic:{2, {0, 10}, 100001}:Initializer] Initializing completed. 2025-05-29T15:21:56.713767Z node 2 :PERSQUEUE INFO: partition.cpp:560: [PQ: 72057594037927937, Partition: {2, {0, 10}, 100001}, State: StateInit] init complete for topic 'Root/PQ/rt3.dc1--account--topic' partition {2, {0, 10}, 100001} generation 0 [2:179:2193] 2025-05-29T15:21:56.713775Z node 2 :PERSQUEUE DEBUG: partition.cpp:574: [PQ: 72057594037927937, Partition: {2, {0, 10}, 100001}, State: StateInit] SYNC INIT topic Root/PQ/rt3.dc1--account--topic partitition {2, {0, 10}, 100001} so 0 endOffset 0 Head Offset 0 PartNo 0 PackedSize 0 count 0 nextOffset 0 batches 0 2025-05-29T15:21:56.713782Z node 2 :PERSQUEUE DEBUG: partition.cpp:3850: [PQ: 72057594037927937, Partition: {2, {0, 10}, 100001}, State: StateIdle] Process pending events. Count 0 2025-05-29T15:21:56.713828Z node 2 :PERSQUEUE INFO: ownerinfo.cpp:30: new Cookie owner1|88255480-86e45854-8d621b93-f5fedcc2_0 generated for partition {2, {0, 10}, 100001} topic 'Root/PQ/rt3.dc1--account--topic' owner owner1 2025-05-29T15:21:56.713850Z node 2 :PERSQUEUE DEBUG: partition_write.cpp:35: [PQ: 72057594037927937, Partition: {2, {0, 10}, 100001}, State: StateIdle] TPartition::ReplyOwnerOk. Partition: {2, {0, 10}, 100001} 2025-05-29T15:21:56.713920Z node 2 :PERSQUEUE DEBUG: partition_write.cpp:1233: [PQ: 72057594037927937, Partition: {2, {0, 10}, 100001}, State: StateIdle] Topic 'Root/PQ/rt3.dc1--account--topic' partition {2, {0, 10}, 100001} part blob processing sourceId 'SourceId' seqNo 2 partNo 0 2025-05-29T15:21:56.713952Z node 2 :PERSQUEUE DEBUG: partition_write.cpp:1333: [PQ: 72057594037927937, Partition: {2, {0, 10}, 100001}, State: StateIdle] Topic 'Root/PQ/rt3.dc1--account--topic' partition {2, {0, 10}, 100001} part blob complete sourceId 'SourceId' seqNo 2 partNo 0 FormedBlobsCount 0 NewHead: Offset 100 PartNo 0 PackedSize 118 count 1 nextOffset 101 batches 1 2025-05-29T15:21:56.713989Z node 2 :PERSQUEUE DEBUG: partition_write.cpp:1623: [PQ: 72057594037927937, Partition: {2, {0, 10}, 100001}, State: StateIdle] Add new write blob: topic 'Root/PQ/rt3.dc1--account--topic' partition {2, {0, 10}, 100001} compactOffset 100,1 HeadOffset 0 endOffset 0 curOffset 101 D0000100001_00000000000000000100_00000_0000000001_00000| size 104 WTime 128 2025-05-29T15:21:56.714008Z node 2 :PERSQUEUE DEBUG: partition.cpp:2185: [PQ: 72057594037927937, Partition: {2, {0, 10}, 100001}, State: StateIdle] === DumpKeyValueRequest === 2025-05-29T15:21:56.714012Z node 2 :PERSQUEUE DEBUG: partition.cpp:2186: [PQ: 72057594037927937, Partition: {2, {0, 10}, 100001}, State: StateIdle] --- delete ---------------- 2025-05-29T15:21:56.714015Z node 2 :PERSQUEUE DEBUG: partition.cpp:2192: [PQ: 72057594037927937, Partition: {2, {0, 10}, 100001}, State: StateIdle] [X0000100001, X0000100002) 2025-05-29T15:21:56.714018Z node 2 :PERSQUEUE DEBUG: partition.cpp:2194: [PQ: 72057594037927937, Partition: {2, {0, 10}, 100001}, State: StateIdle] --- write ----------------- 2025-05-29T15:21:56.714020Z node 2 :PERSQUEUE DEBUG: partition.cpp:2197: [PQ: 72057594037927937, Partition: {2, {0, 10}, 100001}, State: StateIdle] M0000100001pSourceId 2025-05-29T15:21:56.714026Z node 2 :PERSQUEUE DEBUG: partition.cpp:2197: [PQ: 72057594037927937, Partition: {2, {0, 10}, 100001}, State: StateIdle] D0000100001_00000000000000000100_00000_0000000001_00000| 2025-05-29T15:21:56.714028Z node 2 :PERSQUEUE DEBUG: partition.cpp:2197: [PQ: 72057594037927937, Partition: {2, {0, 10}, 100001}, State: StateIdle] J0000100001 2025-05-29T15:21:56.714031Z node 2 :PERSQUEUE DEBUG: partition.cpp:2199: [PQ: 72057594037927937, Partition: {2, {0, 10}, 100001}, State: StateIdle] --- rename ---------------- 2025-05-29T15:21:56.714034Z node 2 :PERSQUEUE DEBUG: partition.cpp:2204: [PQ: 72057594037927937, Partition: {2, {0, 10}, 100001}, State: StateIdle] =========================== 2025-05-29T15:21:56.767304Z node 2 :PERSQUEUE DEBUG: partition_write.cpp:524: [PQ: 72057594037927937, Partition: {2, {0, 10}, 100001}, State: StateIdle] TPartition::HandleWriteResponse writeNewSize# 22 WriteNewSizeFromSupportivePartitions# 0 2025-05-29T15:21:56.767359Z node 2 :PERSQUEUE DEBUG: partition_write.cpp:58: [PQ: 72057594037927937, Partition: {2, {0, 10}, 100001}, State: StateIdle] TPartition::ReplyWrite. Partition: {2, {0, 10}, 100001} 2025-05-29T15:21:56.767380Z node 2 :PERSQUEUE DEBUG: partition_write.cpp:324: [PQ: 72057594037927937, Partition: {2, {0, 10}, 100001}, State: StateIdle] Answering for message sourceid: 'SourceId', Topic: 'Root/PQ/rt3.dc1--account--topic', Partition: {2, {0, 10}, 100001}, SeqNo: 2, partNo: 0, Offset: 100 is stored on disk 2025-05-29T15:21:57.067805Z node 2 :PERSQUEUE DEBUG: partition_write.cpp:1233: [PQ: 72057594037927937, Partition: {2, {0, 10}, 100001}, State: StateIdle] Topic 'Root/PQ/rt3.dc1--account--topic' partition {2, {0, 10}, 100001} part blob processing sourceId 'SourceId' seqNo 4 partNo 0 2025-05-29T15:21:57.067882Z node 2 :PERSQUEUE DEBUG: partition_write.cpp:1333: [PQ: 72057594037927937, Partition: {2, {0, 10}, 100001}, State: StateIdle] Topic 'Root/PQ/rt3.dc1--account--topic' partition {2, {0, 10}, 100001} part blob complete sourceId 'SourceId' seqNo 4 partNo 0 FormedBlobsCount 0 NewHead: Offset 101 PartNo 0 PackedSize 118 count 1 nextOffset 102 batches 1 2025-05-29T15:21:57.067962Z node 2 :PERSQUEUE DEBUG: partition_write.cpp:1623: [PQ: 72057594037927937, Partition: {2, {0, 10}, 100001}, State: StateIdle] Add new write blob: topic 'Root/PQ/rt3.dc1--account--topic' partition {2, {0, 10}, 100001} compactOffset 101,1 HeadOffset 100 endOffset 101 curOffset 102 D0000100001_00000000000000000101_00000_0000000001_00000| size 104 WTime 1129 2025-05-29T15:21:57.067995Z node 2 :PERSQUEUE DEBUG: partition.cpp:2185: [PQ: 72057594037927937, Partition: {2, {0, 10}, 100001}, State: StateIdle] === DumpKeyValueRequest === 2025-05-29T15:21:57.068002Z node 2 :PERSQUEUE DEBUG: partition.cpp:2186: [PQ: 72057594037927937, Partition: {2, {0, 10}, 100001}, State: StateIdle] --- delete ---------------- 2025-05-29T15:21:57.068007Z node 2 :PERSQUEUE DEBUG: partition.cpp:2192: [PQ: 72057594037927937, Partition: {2, {0, 10}, 100001}, State: StateIdle] [X0000100001, X0000100002) 2025-05-29T15:21:57.068013Z node 2 :PERSQUEUE DEBUG: partition.cpp:2194: [PQ: 72057594037927937, Partition: {2, {0, 10}, 100001}, State: StateIdle] --- write ----------------- 2025-05-29T15:21:57.068018Z node 2 :PERSQUEUE DEBUG: partition.cpp:2197: [PQ: 72057594037927937, Partition: {2, {0, 10}, 100001}, State: StateIdle] M0000100001pSourceId 2025-05-29T15:21:57.068022Z node 2 :PERSQUEUE DEBUG: partition.cpp:2197: [PQ: 72057594037927937, Partition: {2, {0, 10}, 100001}, State: StateIdle] D0000100001_00000000000000000101_00000_0000000001_00000| 2025-05-29T15:21:57.068026Z node 2 :PERSQUEUE DEBUG: partition.cpp:2197: [PQ: 72057594037927937, Partition: {2, {0, 10}, 100001}, State: StateIdle] J0000100001 2025-05-29T15:21:57.068030Z node 2 :PERSQUEUE DEBUG: partition.cpp:2199: [PQ: 72057594037927937, Partition: {2, {0, 10}, 100001}, State: StateIdle] --- rename ---------------- 2025-05-29T15:21:57.068035Z node 2 :PERSQUEUE DEBUG: partition.cpp:2204: [PQ: 72057594037927937, Partition: {2, {0, 10}, 100001}, State: StateIdle] =========================== 2025-05-29T15:21:57.109224Z node 2 :PERSQUEUE DEBUG: partition_write.cpp:524: [PQ: 72057594037927937, Partition: {2, {0, 10}, 100001}, State: StateIdle] TPartition::HandleWriteResponse writeNewSize# 22 WriteNewSizeFromSupportivePartitions# 0 2025-05-29T15:21:57.109275Z node 2 :PERSQUEUE DEBUG: partition_write.cpp:58: [PQ: 72057594037927937, Partition: {2, {0, 10}, 100001}, State: StateIdle] TPartition::ReplyWrite. Partition: {2, {0, 10}, 100001} 2025-05-29T15:21:57.109296Z node 2 :PERSQUEUE DEBUG: partition_write.cpp:324: [PQ: 72057594037927937, Partition: {2, {0, 10}, 100001}, State: StateIdle] Answering for message sourceid: 'SourceId', Topic: 'Root/PQ/rt3.dc1--account--topic', Partition: {2, {0, 10}, 100001}, SeqNo: 4, partNo: 0, Offset: 101 is stored on disk 2025-05-29T15:21:57.360023Z node 2 :PERSQUEUE DEBUG: partition_write.cpp:1233: [PQ: 72057594037927937, Partition: {2, {0, 10}, 100001}, State: StateIdle] Topic 'Root/PQ/rt3.dc1--account--topic' partition {2, {0, 10}, 100001} part blob processing sourceId 'SourceId' seqNo 6 partNo 0 2025-05-29T15:21:57.360100Z node 2 :PERSQUEUE DEBUG: partition_write.cpp:1333: [PQ: 72057594037927937, Partition: {2, {0, 10}, 100001}, State: StateIdle] Topic 'Root/PQ/rt3.dc1--account--topic' partition {2, {0, 10}, 100001} part blob complete sourceId 'SourceId' seqNo 6 partNo 0 FormedBlobsCount 0 NewHead: Offset 102 PartNo 0 PackedSize 118 count 1 nextOffset 103 batches 1 2025-05-29T15:21:57.360167Z node 2 :PERSQUEUE DEBUG: partition_write.cpp:1623: [PQ: 72057594037927937, Partition: {2, {0, 10}, 100001}, State: StateIdle] Add new write blob: topic 'Root/PQ/rt3.dc1--account--topic' partition {2, {0, 10}, 100001} compactOffset 102,1 HeadOffset 100 endOffset 102 curOffset 103 D0000100001_00000000000000000102_00000_0000000001_00000| size 104 WTime 2130 2025-05-29T15:21:57.360203Z node 2 :PERSQUEUE DEBUG: partition.cpp:2185: [PQ: 72057594037927937, Partition: {2, {0, 10}, 100001}, State: StateIdle] === DumpKeyValue ... ep Got KV request 2025-05-29T15:21:58.928769Z node 4 :PERSQUEUE DEBUG: partition_init.cpp:75: [Root/PQ/rt3.dc1--account--topic:0:Initializer] Start initializing step TInitMetaStep Got KV request 2025-05-29T15:21:58.928790Z node 4 :PERSQUEUE DEBUG: partition_init.cpp:75: [Root/PQ/rt3.dc1--account--topic:0:Initializer] Start initializing step TInitInfoRangeStep Got KV request Got KV request 2025-05-29T15:21:58.928995Z node 4 :PERSQUEUE DEBUG: partition_init.cpp:75: [Root/PQ/rt3.dc1--account--topic:0:Initializer] Start initializing step TInitDataRangeStep Got KV request 2025-05-29T15:21:58.929046Z node 4 :PERSQUEUE DEBUG: partition_init.cpp:621: [Root/PQ/rt3.dc1--account--topic:0:TInitDataRangeStep] Got data offset 0 count 50 size 684 so 0 eo 50 d0000000000_00000000000000000000_00000_0000000050_00000 2025-05-29T15:21:58.929057Z node 4 :PERSQUEUE DEBUG: partition_init.cpp:75: [Root/PQ/rt3.dc1--account--topic:0:Initializer] Start initializing step TInitDataStep 2025-05-29T15:21:58.929061Z node 4 :PERSQUEUE DEBUG: partition_init.cpp:75: [Root/PQ/rt3.dc1--account--topic:0:Initializer] Start initializing step TInitEndWriteTimestampStep 2025-05-29T15:21:58.929069Z node 4 :PERSQUEUE INFO: partition_init.cpp:785: [Root/PQ/rt3.dc1--account--topic:0:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp from keys completed. Value 2025-05-29T15:21:58.000000Z 2025-05-29T15:21:58.929074Z node 4 :PERSQUEUE DEBUG: partition_init.cpp:55: [Root/PQ/rt3.dc1--account--topic:0:Initializer] Initializing completed. 2025-05-29T15:21:58.929081Z node 4 :PERSQUEUE INFO: partition.cpp:560: [PQ: 72057594037927937, Partition: 0, State: StateInit] init complete for topic 'Root/PQ/rt3.dc1--account--topic' partition 0 generation 0 [4:179:2193] 2025-05-29T15:21:58.929091Z node 4 :PERSQUEUE DEBUG: partition.cpp:574: [PQ: 72057594037927937, Partition: 0, State: StateInit] SYNC INIT topic Root/PQ/rt3.dc1--account--topic partitition 0 so 0 endOffset 50 Head Offset 50 PartNo 0 PackedSize 0 count 0 nextOffset 50 batches 0 SYNC INIT DATA KEY: d0000000000_00000000000000000000_00000_0000000050_00000 size 684 2025-05-29T15:21:58.929115Z node 4 :PERSQUEUE DEBUG: partition.cpp:3850: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Process pending events. Count 0 2025-05-29T15:21:58.929136Z node 4 :PERSQUEUE DEBUG: partition_read.cpp:779: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'Root/PQ/rt3.dc1--account--topic' partition 0 user client-2 readTimeStamp for offset 0 initiated queuesize 0 startOffset 0 ReadingTimestamp 0 rrg 0 2025-05-29T15:21:58.929143Z node 4 :PERSQUEUE DEBUG: partition_read.cpp:821: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'Root/PQ/rt3.dc1--account--topic' partition 0 user client-2 send read request for offset 0 initiated queuesize 0 startOffset 0 ReadingTimestamp 1 rrg 0 2025-05-29T15:21:58.929151Z node 4 :PERSQUEUE DEBUG: partition_read.cpp:779: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'Root/PQ/rt3.dc1--account--topic' partition 0 user client-1 readTimeStamp for offset 0 initiated queuesize 0 startOffset 0 ReadingTimestamp 1 rrg 0 2025-05-29T15:21:58.929159Z node 4 :PERSQUEUE DEBUG: partition_read.cpp:779: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'Root/PQ/rt3.dc1--account--topic' partition 0 user client-0 readTimeStamp for offset 0 initiated queuesize 1 startOffset 0 ReadingTimestamp 1 rrg 0 2025-05-29T15:21:58.929233Z node 4 :PERSQUEUE DEBUG: partition_read.cpp:736: [PQ: 72057594037927937, Partition: 0, State: StateIdle] read cookie 0 Topic 'Root/PQ/rt3.dc1--account--topic' partition 0 user client-2 offset 0 count 1 size 1024000 endOffset 50 max time lag 0ms effective offset 0 2025-05-29T15:21:58.929246Z node 4 :PERSQUEUE DEBUG: partition_read.cpp:936: [PQ: 72057594037927937, Partition: 0, State: StateIdle] read cookie 0 added 1 blobs, size 684 count 50 last offset 1, current partition end offset: 50 2025-05-29T15:21:58.929253Z node 4 :PERSQUEUE DEBUG: partition_read.cpp:960: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Reading cookie 0. Send blob request. 2025-05-29T15:21:59.286998Z node 4 :PERSQUEUE DEBUG: partition.cpp:3267: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'Root/PQ/rt3.dc1--account--topic' partition 0 user client-0 session is set to 0 (startOffset 0) session session-client-0 2025-05-29T15:21:59.287063Z node 4 :PERSQUEUE DEBUG: partition.cpp:2185: [PQ: 72057594037927937, Partition: 0, State: StateIdle] === DumpKeyValueRequest === 2025-05-29T15:21:59.287069Z node 4 :PERSQUEUE DEBUG: partition.cpp:2186: [PQ: 72057594037927937, Partition: 0, State: StateIdle] --- delete ---------------- 2025-05-29T15:21:59.287074Z node 4 :PERSQUEUE DEBUG: partition.cpp:2194: [PQ: 72057594037927937, Partition: 0, State: StateIdle] --- write ----------------- 2025-05-29T15:21:59.287079Z node 4 :PERSQUEUE DEBUG: partition.cpp:2197: [PQ: 72057594037927937, Partition: 0, State: StateIdle] i0000000000 2025-05-29T15:21:59.287083Z node 4 :PERSQUEUE DEBUG: partition.cpp:2197: [PQ: 72057594037927937, Partition: 0, State: StateIdle] m0000000000cclient-0 2025-05-29T15:21:59.287086Z node 4 :PERSQUEUE DEBUG: partition.cpp:2197: [PQ: 72057594037927937, Partition: 0, State: StateIdle] m0000000000uclient-0 2025-05-29T15:21:59.287090Z node 4 :PERSQUEUE DEBUG: partition.cpp:2199: [PQ: 72057594037927937, Partition: 0, State: StateIdle] --- rename ---------------- 2025-05-29T15:21:59.287094Z node 4 :PERSQUEUE DEBUG: partition.cpp:2204: [PQ: 72057594037927937, Partition: 0, State: StateIdle] =========================== Got KV request Got batch complete: 1 Got KV request Got KV request Got cmd write: CmdWrite { Key: "i0000000000" Value: "\010\000\0202\030\000(\360\203\213\345\3612" StorageChannel: INLINE } CmdWrite { Key: "m0000000000cclient-0" Value: "\010\000\020\001\030\001\"\020session-client-0(\0000\000@\000" StorageChannel: INLINE } CmdWrite { Key: "m0000000000uclient-0" Value: "\000\000\000\000\000\000\000\000\001\000\000\000\001\000\000\000session-client-0" StorageChannel: INLINE } 2025-05-29T15:21:59.319517Z node 4 :PERSQUEUE DEBUG: partition_write.cpp:524: [PQ: 72057594037927937, Partition: 0, State: StateIdle] TPartition::HandleWriteResponse writeNewSize# 0 WriteNewSizeFromSupportivePartitions# 0 Create distr tx with id = 0 and act no: 1 Created Tx with id 3 as act# 3 Created Tx with id 4 as act# 4 2025-05-29T15:22:00.331081Z node 4 :PERSQUEUE DEBUG: partition.cpp:1127: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Handle TEvPQ::TEvTxCalcPredicate Step 1, TxId 0 2025-05-29T15:22:00.331126Z node 4 :PERSQUEUE DEBUG: partition.cpp:1127: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Handle TEvPQ::TEvTxCalcPredicate Step 1, TxId 3 2025-05-29T15:22:00.331132Z node 4 :PERSQUEUE DEBUG: partition.cpp:1127: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Handle TEvPQ::TEvTxCalcPredicate Step 1, TxId 4 2025-05-29T15:22:01.667244Z node 4 :PERSQUEUE DEBUG: partition.cpp:1363: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Handle TEvPQ::TEvGetWriteInfoResponse Got batch complete: 6 Wait batch completion Wait kv request 2025-05-29T15:22:01.667354Z node 4 :PERSQUEUE DEBUG: partition.cpp:3267: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'Root/PQ/rt3.dc1--account--topic' partition 0 user client-0 offset is set to 5 (startOffset 0) session session-client-0 2025-05-29T15:22:01.667367Z node 4 :PERSQUEUE DEBUG: partition.cpp:1173: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Handle TEvPQ::TEvTxCommit Step 1, TxId 3 2025-05-29T15:22:01.667373Z node 4 :PERSQUEUE DEBUG: partition.cpp:2424: [PQ: 72057594037927937, Partition: 0, State: StateIdle] TPartition::CommitWriteOperations TxId: 3 2025-05-29T15:22:01.667380Z node 4 :PERSQUEUE DEBUG: partition.cpp:1173: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Handle TEvPQ::TEvTxCommit Step 1, TxId 4 2025-05-29T15:22:01.667385Z node 4 :PERSQUEUE DEBUG: partition.cpp:2424: [PQ: 72057594037927937, Partition: 0, State: StateIdle] TPartition::CommitWriteOperations TxId: 4 2025-05-29T15:22:01.667391Z node 4 :PERSQUEUE DEBUG: partition.cpp:3267: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'Root/PQ/rt3.dc1--account--topic' partition 0 user client-0 offset is set to 10 (startOffset 0) session session-client-0 2025-05-29T15:22:01.670399Z node 4 :PERSQUEUE DEBUG: partition.cpp:3322: [PQ: 72057594037927937, Partition: 0, State: StateIdle] schedule TEvPersQueue::TEvProposeTransactionResult(ABORTED), reason=incorrect offset range (gap) 2025-05-29T15:22:01.670501Z node 4 :PERSQUEUE DEBUG: partition.cpp:2185: [PQ: 72057594037927937, Partition: 0, State: StateIdle] === DumpKeyValueRequest === 2025-05-29T15:22:01.670509Z node 4 :PERSQUEUE DEBUG: partition.cpp:2186: [PQ: 72057594037927937, Partition: 0, State: StateIdle] --- delete ---------------- 2025-05-29T15:22:01.670514Z node 4 :PERSQUEUE DEBUG: partition.cpp:2194: [PQ: 72057594037927937, Partition: 0, State: StateIdle] --- write ----------------- 2025-05-29T15:22:01.670518Z node 4 :PERSQUEUE DEBUG: partition.cpp:2197: [PQ: 72057594037927937, Partition: 0, State: StateIdle] i0000000000 2025-05-29T15:22:01.670522Z node 4 :PERSQUEUE DEBUG: partition.cpp:2197: [PQ: 72057594037927937, Partition: 0, State: StateIdle] I0000000000 2025-05-29T15:22:01.670526Z node 4 :PERSQUEUE DEBUG: partition.cpp:2197: [PQ: 72057594037927937, Partition: 0, State: StateIdle] m0000000000cclient-2 2025-05-29T15:22:01.670529Z node 4 :PERSQUEUE DEBUG: partition.cpp:2197: [PQ: 72057594037927937, Partition: 0, State: StateIdle] m0000000000uclient-2 2025-05-29T15:22:01.670533Z node 4 :PERSQUEUE DEBUG: partition.cpp:2197: [PQ: 72057594037927937, Partition: 0, State: StateIdle] m0000000000cclient-1 2025-05-29T15:22:01.670537Z node 4 :PERSQUEUE DEBUG: partition.cpp:2197: [PQ: 72057594037927937, Partition: 0, State: StateIdle] m0000000000uclient-1 2025-05-29T15:22:01.670540Z node 4 :PERSQUEUE DEBUG: partition.cpp:2197: [PQ: 72057594037927937, Partition: 0, State: StateIdle] m0000000000cclient-0 2025-05-29T15:22:01.670544Z node 4 :PERSQUEUE DEBUG: partition.cpp:2197: [PQ: 72057594037927937, Partition: 0, State: StateIdle] m0000000000uclient-0 2025-05-29T15:22:01.670547Z node 4 :PERSQUEUE DEBUG: partition.cpp:2199: [PQ: 72057594037927937, Partition: 0, State: StateIdle] --- rename ---------------- 2025-05-29T15:22:01.670552Z node 4 :PERSQUEUE DEBUG: partition.cpp:2204: [PQ: 72057594037927937, Partition: 0, State: StateIdle] =========================== Got KV request Wait tx committed for tx 3 2025-05-29T15:22:01.681222Z node 4 :PERSQUEUE DEBUG: partition_write.cpp:524: [PQ: 72057594037927937, Partition: 0, State: StateIdle] TPartition::HandleWriteResponse writeNewSize# 0 WriteNewSizeFromSupportivePartitions# 0 Wait tx committed for tx 4 Wait immediate tx complete 6 Got propose resutl: Origin: 72057594037927937 Status: ABORTED TxId: 6 Errors { Kind: BAD_REQUEST Reason: "incorrect offset range (gap)" } 2025-05-29T15:22:01.846058Z node 5 :PERSQUEUE NOTICE: pq_impl.cpp:1099: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-05-29T15:22:01.846084Z node 5 :PERSQUEUE INFO: pq_impl.cpp:800: [PQ: 72057594037927937] doesn't have tx writes info 2025-05-29T15:22:01.849521Z node 5 :PERSQUEUE INFO: partition_init.cpp:880: [PQ: 72057594037927937, Partition: {2, {0, 10}, 100001}, State: StateInit] bootstrapping {2, {0, 10}, 100001} [5:178:2192] 2025-05-29T15:22:01.849822Z node 5 :PERSQUEUE INFO: partition_init.cpp:774: [Root/PQ/rt3.dc1--account--topic:{2, {0, 10}, 100001}:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp skipped because already initialized. 2025-05-29T15:22:01.849835Z node 5 :PERSQUEUE INFO: partition.cpp:560: [PQ: 72057594037927937, Partition: {2, {0, 10}, 100001}, State: StateInit] init complete for topic 'Root/PQ/rt3.dc1--account--topic' partition {2, {0, 10}, 100001} generation 0 [5:178:2192] >> TPartitionTests::ShadowPartitionCountersFirstClass ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/yql/unittest >> KqpScripting::ScriptExplain Test command err: Trying to start YDB, gRPC: 3597, MsgBus: 9308 2025-05-29T15:21:51.958992Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509888235528215766:2172];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/002403/r3tmp/tmpXBIZNV/pdisk_1.dat 2025-05-29T15:21:52.007177Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-05-29T15:21:52.035127Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 3597, node 1 2025-05-29T15:21:52.065269Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:21:52.065282Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-05-29T15:21:52.065284Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-05-29T15:21:52.065329Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:9308 2025-05-29T15:21:52.100718Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:21:52.100749Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:21:52.102301Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:9308 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-29T15:21:52.168240Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:21:52.171261Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-05-29T15:21:52.207874Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-05-29T15:21:52.247852Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 2025-05-29T15:21:52.300132Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:21:52.367621Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:21:52.675316Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509888239823184578:2403], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:21:52.675348Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:21:52.741653Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-05-29T15:21:52.763294Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-05-29T15:21:52.782019Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-05-29T15:21:52.811758Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-05-29T15:21:52.831616Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-05-29T15:21:52.847945Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-05-29T15:21:52.863886Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-05-29T15:21:52.883827Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509888239823185230:2466], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:21:52.883861Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:21:52.884009Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509888239823185235:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:21:52.884835Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715669:3, at schemeshard: 72057594046644480 2025-05-29T15:21:52.887558Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7509888239823185237:2470], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715669 completed, doublechecking } 2025-05-29T15:21:52.957027Z node 1 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [1:7509888239823185288:3399] txid# 281474976715670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-29T15:21:53.061959Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [1:7509888239823185297:2474], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:21:53.062834Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=1&id=Y2EwNThkMjUtZGQ3ODliN2QtYmFlMWQzYTEtMTBmMTM1Mjg=, ActorId: [1:7509888239823184560:2401], ActorState: ExecuteState, TraceId: 01jwea5bfk7f8mn68gzt12kzer, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: VERIFY failed (2025-05-29T15:21:53.064265Z): assertion failed in non-unittest thread with message: assertion failed at ydb/core/kqp/ut/common/kqp_ut_common.h:375, void NKikimr::NKqp::AssertSuccessResult(const NYdb::TStatus &): (result.IsSuccess())
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 library/cpp/testing/unittest/registar.cpp:36 RaiseError(): requirement UnittestThread failed 0. /-S/util/system/yassert.cpp:83: InternalPanicImpl @ 0x15E4ED75 1. /-S/util/system/yassert.cpp:55: Panic @ 0x15E45D76 2. /tmp//-S/library/cpp/testing/unittest/registar.cpp:36: RaiseError @ 0x15FE8506 3. /-S/ydb/core/kqp/ut/common/kqp_ut_common.h:375: AssertSuccessResult @ 0x284B1732 4. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:365: CreateSampleTables @ 0x284B1032 5. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:581: operator() @ 0x284D254C 6. /-S/library/cpp/threading/future/core/future-inl.h:493: SetValue @ 0x284D254C 7. /-S/library/cpp/threading/future/async.h:24: operator() @ 0x284D254C 8. /-S/util/thread/pool.h:71: Process @ 0x284D254C 9. /-S/util/thread/pool.cpp:418: DoExecute @ 0x15E566F9 10. /-S/util/thread/factory.h:15: Execute @ 0x15E550E9 11. /-S/util/thread/factory.cpp:36: ThreadProc @ 0x15E550E9 12. /-S/util/system/thread.cpp:244: ThreadProxy @ 0x15E5055C 13. ??:0: ?? @ 0x7F5701587AC2 14. ??:0: ?? @ 0x7F570161984F Trying to start YDB, gRPC: 3594, MsgBus: 21195 2025-05-29T15:21:57.082488Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509888258715998780:2087];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:21:57.082776Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/002403/r3tmp/tmppS3TGA/pdisk_1.dat 2025-05-29T15:21:57.250906Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 3594, node 1 2025-05-29T15:21:57.269293Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:21:57.269318Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:21:57.269930Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-29T15:21:57.289762Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:21:57.289774Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-05-29T15:21:57.289777Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-05-29T15:21:57.289815Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:21195 TClient is connected to server localhost:21195 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-29T15:21:57.499489Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:21:57.507183Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-05-29T15:21:57.675267Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:21:57.771431Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-05-29T15:21:57.807687Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:21:57.834946Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 2025-05-29T15:21:57.902020Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509888258716000359:2403], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:21:57.902055Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:21:57.943090Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-05-29T15:21:57.952462Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-05-29T15:21:57.959511Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-05-29T15:21:57.970331Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-05-29T15:21:57.985061Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-05-29T15:21:57.999288Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-05-29T15:21:58.012906Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-05-29T15:21:58.031162Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509888263010968308:2466], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:21:58.031191Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509888263010968313:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:21:58.031190Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:21:58.031949Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715669:3, at schemeshard: 72057594046644480 2025-05-29T15:21:58.039529Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7509888263010968315:2470], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715669 completed, doublechecking } 2025-05-29T15:21:58.119267Z node 1 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [1:7509888263010968375:3398] txid# 281474976715670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-29T15:21:58.219856Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [1:7509888263010968391:2475], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:21:58.219984Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=1&id=Zjk5ZjUzMmEtNjUyMDJlYWMtZjUzNTQ1NzctNjkxZTVhNTc=, ActorId: [1:7509888258716000356:2401], ActorState: ExecuteState, TraceId: 01jwea5gge7yjww0d0er826trm, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: VERIFY failed (2025-05-29T15:21:58.220733Z): assertion failed in non-unittest thread with message: assertion failed at ydb/core/kqp/ut/common/kqp_ut_common.h:375, void NKikimr::NKqp::AssertSuccessResult(const NYdb::TStatus &): (result.IsSuccess())
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 library/cpp/testing/unittest/registar.cpp:36 RaiseError(): requirement UnittestThread failed 0. /-S/util/system/yassert.cpp:83: InternalPanicImpl @ 0x15E4ED75 1. /-S/util/system/yassert.cpp:55: Panic @ 0x15E45D76 2. /tmp//-S/library/cpp/testing/unittest/registar.cpp:36: RaiseError @ 0x15FE8506 3. /-S/ydb/core/kqp/ut/common/kqp_ut_common.h:375: AssertSuccessResult @ 0x284B1732 4. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:365: CreateSampleTables @ 0x284B1032 5. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:581: operator() @ 0x284D254C 6. /-S/library/cpp/threading/future/core/future-inl.h:493: SetValue @ 0x284D254C 7. /-S/library/cpp/threading/future/async.h:24: operator() @ 0x284D254C 8. /-S/util/thread/pool.h:71: Process @ 0x284D254C 9. /-S/util/thread/pool.cpp:418: DoExecute @ 0x15E566F9 10. /-S/util/thread/factory.h:15: Execute @ 0x15E550E9 11. /-S/util/thread/factory.cpp:36: ThreadProc @ 0x15E550E9 12. /-S/util/system/thread.cpp:244: ThreadProxy @ 0x15E5055C 13. ??:0: ?? @ 0x7F72D5472AC2 14. ??:0: ?? @ 0x7F72D550484F |59.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/load_test/ut/unittest >> KqpQueryPerf::IndexInsert+QueryService-UseSink >> KqpQueryPerf::IndexUpdateOn-QueryService-UseSink >> KqpQueryPerf::UpdateOn+QueryService+UseSink >> KqpQueryPerf::AggregateToScalar+QueryService >> KqpWorkload::KV >> TColumnShardTestReadWrite::CompactionInGranule_PKDatetime_Reboot [GOOD] >> TColumnShardTestReadWrite::CompactionInGranule_PKUInt32_Reboot [GOOD] >> KqpQueryPerf::RangeLimitRead-QueryService ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/perf/unittest >> KqpQueryPerf::Replace+QueryService-UseSink Test command err: Trying to start YDB, gRPC: 19624, MsgBus: 21194 2025-05-29T15:21:58.889023Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509888265138178090:2220];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/0010a1/r3tmp/tmpSZz1lH/pdisk_1.dat 2025-05-29T15:21:58.920468Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-05-29T15:21:58.944735Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [1:7509888265138177881:2079] 1748532118878301 != 1748532118878304 2025-05-29T15:21:58.947901Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 19624, node 1 2025-05-29T15:21:58.960027Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:21:58.960041Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-05-29T15:21:58.960043Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-05-29T15:21:58.960082Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:21194 TClient is connected to server localhost:21194 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: 2025-05-29T15:21:59.020139Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:21:59.020166Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:21:59.021161Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-29T15:21:59.025604Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:21:59.030467Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-05-29T15:21:59.095420Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 2025-05-29T15:21:59.123690Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:21:59.138440Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:21:59.271047Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509888269433146815:2403], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:21:59.271107Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:21:59.335796Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-05-29T15:21:59.348148Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-05-29T15:21:59.362619Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-05-29T15:21:59.378925Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-05-29T15:21:59.447055Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-05-29T15:21:59.466590Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-05-29T15:21:59.527847Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-05-29T15:21:59.596057Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509888269433147480:2466], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:21:59.596089Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:21:59.596142Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509888269433147485:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:21:59.597097Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715669:3, at schemeshard: 72057594046644480 2025-05-29T15:21:59.599863Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7509888269433147487:2470], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715669 completed, doublechecking } 2025-05-29T15:21:59.682192Z node 1 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [1:7509888269433147538:3401] txid# 281474976715670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-29T15:21:59.816855Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [1:7509888269433147547:2474], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:21:59.816967Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=1&id=MmRmMWJkOC04ZWFiMmJjMy1jMWI2MjYwYS01M2ZiYzNhNg==, ActorId: [1:7509888269433146787:2399], ActorState: ExecuteState, TraceId: 01jwea5j1bf7qnehdr2h2nhqcz, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: VERIFY failed (2025-05-29T15:21:59.818344Z): assertion failed in non-unittest thread with message: assertion failed at ydb/core/kqp/ut/common/kqp_ut_common.h:375, void NKikimr::NKqp::AssertSuccessResult(const NYdb::TStatus &): (result.IsSuccess())
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 library/cpp/testing/unittest/registar.cpp:36 RaiseError(): requirement UnittestThread failed 0. /-S/util/system/yassert.cpp:83: InternalPanicImpl @ 0x13AC65E5 1. /-S/util/system/yassert.cpp:55: Panic @ 0x13ABD5E6 2. /tmp//-S/library/cpp/testing/unittest/registar.cpp:36: RaiseError @ 0x13C5F376 3. /-S/ydb/core/kqp/ut/common/kqp_ut_common.h:375: AssertSuccessResult @ 0x261025A2 4. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:365: CreateSampleTables @ 0x26101EA2 5. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:581: operator() @ 0x261237FC 6. /-S/library/cpp/threading/future/core/future-inl.h:493: SetValue @ 0x261237FC 7. /-S/library/cpp/threading/future/async.h:24: operator() @ 0x261237FC 8. /-S/util/thread/pool.h:71: Process @ 0x261237FC 9. /-S/util/thread/pool.cpp:418: DoExecute @ 0x13ACDF69 10. /-S/util/thread/factory.h:15: Execute @ 0x13ACC959 11. /-S/util/thread/factory.cpp:36: ThreadProc @ 0x13ACC959 12. /-S/util/system/thread.cpp:244: ThreadProxy @ 0x13AC7DCC 13. ??:0: ?? @ 0x7FD95B28DAC2 14. ??:0: ?? @ 0x7FD95B31F84F >> KqpQueryPerf::UpdateOn+QueryService-UseSink >> KqpQueryPerf::Replace+QueryService+UseSink >> test_sql_streaming.py::test[suites-GroupByHoppingWindowNoKey-default.txt] [GOOD] >> test_sql_streaming.py::test[suites-GroupByHoppingWindowPercentile-default.txt] >> KqpQueryPerf::Delete-QueryService+UseSink >> KqpQueryPerf::Insert-QueryService-UseSink >> KqpQueryPerf::RangeRead-QueryService >> KqpQueryPerf::IndexInsert-QueryService+UseSink >> TPartitionTests::ConflictingSrcIdTxAndWritesDifferentBatches [GOOD] >> KqpQueryPerf::Delete+QueryService+UseSink >> KqpQueryPerf::IndexReplace+QueryService+UseSink >> KqpQueryPerf::IndexUpsert-QueryService+UseSink ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/perf/unittest >> KqpQueryPerf::Update+QueryService+UseSink Test command err: Trying to start YDB, gRPC: 17362, MsgBus: 23468 2025-05-29T15:21:59.379805Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509888270069628192:2214];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/0010a2/r3tmp/tmp57m8WR/pdisk_1.dat 2025-05-29T15:21:59.418848Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-05-29T15:21:59.460002Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:21:59.463431Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [1:7509888270069627998:2079] 1748532119372911 != 1748532119372914 TServer::EnableGrpc on GrpcPort 17362, node 1 2025-05-29T15:21:59.488230Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:21:59.488243Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-05-29T15:21:59.488245Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-05-29T15:21:59.488290Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:23468 2025-05-29T15:21:59.521202Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:21:59.521240Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:21:59.523456Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:23468 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-29T15:21:59.609950Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:21:59.613102Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-05-29T15:21:59.620736Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:21:59.702990Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:21:59.728276Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:21:59.745647Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:21:59.869913Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509888270069629639:2403], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:21:59.869954Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:21:59.912365Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-05-29T15:21:59.924894Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-05-29T15:21:59.938616Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-05-29T15:21:59.954635Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-05-29T15:22:00.016188Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-05-29T15:22:00.070659Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-05-29T15:22:00.085168Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-05-29T15:22:00.104154Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509888274364597593:2466], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:22:00.104183Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:22:00.104220Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509888274364597598:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:22:00.105063Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715669:3, at schemeshard: 72057594046644480 2025-05-29T15:22:00.112745Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7509888274364597600:2470], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715669 completed, doublechecking } 2025-05-29T15:22:00.166091Z node 1 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [1:7509888274364597651:3396] txid# 281474976715670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-29T15:22:00.272717Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [1:7509888274364597667:2474], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:22:00.272877Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=1&id=MjQxNWRiMTctYWVkMzQyYjQtZjc4YTllMGQtZTA2N2IzZWU=, ActorId: [1:7509888270069629636:2401], ActorState: ExecuteState, TraceId: 01jwea5jh7bbnr510j1t7fcvcr, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: VERIFY failed (2025-05-29T15:22:00.273826Z): assertion failed in non-unittest thread with message: assertion failed at ydb/core/kqp/ut/common/kqp_ut_common.h:375, void NKikimr::NKqp::AssertSuccessResult(const NYdb::TStatus &): (result.IsSuccess())
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 library/cpp/testing/unittest/registar.cpp:36 RaiseError(): requirement UnittestThread failed 0. /-S/util/system/yassert.cpp:83: InternalPanicImpl @ 0x13AC65E5 1. /-S/util/system/yassert.cpp:55: Panic @ 0x13ABD5E6 2. /tmp//-S/library/cpp/testing/unittest/registar.cpp:36: RaiseError @ 0x13C5F376 3. /-S/ydb/core/kqp/ut/common/kqp_ut_common.h:375: AssertSuccessResult @ 0x261025A2 4. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:365: CreateSampleTables @ 0x26101EA2 5. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:581: operator() @ 0x261237FC 6. /-S/library/cpp/threading/future/core/future-inl.h:493: SetValue @ 0x261237FC 7. /-S/library/cpp/threading/future/async.h:24: operator() @ 0x261237FC 8. /-S/util/thread/pool.h:71: Process @ 0x261237FC 9. /-S/util/thread/pool.cpp:418: DoExecute @ 0x13ACDF69 10. /-S/util/thread/factory.h:15: Execute @ 0x13ACC959 11. /-S/util/thread/factory.cpp:36: ThreadProc @ 0x13ACC959 12. /-S/util/system/thread.cpp:244: ThreadProxy @ 0x13AC7DCC 13. ??:0: ?? @ 0x7F51D7AFBAC2 14. ??:0: ?? @ 0x7F51D7B8D84F ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/perf/unittest >> KqpQueryPerf::Update+QueryService-UseSink Test command err: Trying to start YDB, gRPC: 29004, MsgBus: 5549 2025-05-29T15:21:59.482964Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509888271154335234:2142];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:21:59.518777Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/0010f9/r3tmp/tmp7yNHgX/pdisk_1.dat 2025-05-29T15:21:59.602840Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [1:7509888271154335119:2079] 1748532119478569 != 1748532119478572 2025-05-29T15:21:59.606022Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 29004, node 1 2025-05-29T15:21:59.624152Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:21:59.624165Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-05-29T15:21:59.624168Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-05-29T15:21:59.624212Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:5549 2025-05-29T15:21:59.667828Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:21:59.667868Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:21:59.668674Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:5549 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-29T15:21:59.697077Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:21:59.699819Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-05-29T15:21:59.711583Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:21:59.736901Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-05-29T15:21:59.761927Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 2025-05-29T15:21:59.775878Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:21:59.924967Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509888271154336751:2403], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:21:59.925006Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:21:59.978053Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-05-29T15:21:59.992772Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-05-29T15:22:00.003249Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-05-29T15:22:00.016133Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-05-29T15:22:00.029344Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-05-29T15:22:00.047924Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-05-29T15:22:00.066474Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-05-29T15:22:00.083649Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509888275449304701:2466], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:22:00.083674Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:22:00.083784Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509888275449304706:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:22:00.084495Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715669:3, at schemeshard: 72057594046644480 2025-05-29T15:22:00.091098Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7509888275449304708:2470], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715669 completed, doublechecking } 2025-05-29T15:22:00.163344Z node 1 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [1:7509888275449304759:3398] txid# 281474976715670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-29T15:22:00.299312Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [1:7509888275449304768:2474], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:22:00.301741Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=1&id=YzM4NTg4NDktM2FhMGEwOS04Y2I0MDAwLTExOTU5Mjk5, ActorId: [1:7509888271154336748:2401], ActorState: ExecuteState, TraceId: 01jwea5jgkawgazensdchtnj80, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: VERIFY failed (2025-05-29T15:22:00.303091Z): assertion failed in non-unittest thread with message: assertion failed at ydb/core/kqp/ut/common/kqp_ut_common.h:375, void NKikimr::NKqp::AssertSuccessResult(const NYdb::TStatus &): (result.IsSuccess())
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 library/cpp/testing/unittest/registar.cpp:36 RaiseError(): requirement UnittestThread failed 0. /-S/util/system/yassert.cpp:83: InternalPanicImpl @ 0x13AC65E5 1. /-S/util/system/yassert.cpp:55: Panic @ 0x13ABD5E6 2. /tmp//-S/library/cpp/testing/unittest/registar.cpp:36: RaiseError @ 0x13C5F376 3. /-S/ydb/core/kqp/ut/common/kqp_ut_common.h:375: AssertSuccessResult @ 0x261025A2 4. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:365: CreateSampleTables @ 0x26101EA2 5. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:581: operator() @ 0x261237FC 6. /-S/library/cpp/threading/future/core/future-inl.h:493: SetValue @ 0x261237FC 7. /-S/library/cpp/threading/future/async.h:24: operator() @ 0x261237FC 8. /-S/util/thread/pool.h:71: Process @ 0x261237FC 9. /-S/util/thread/pool.cpp:418: DoExecute @ 0x13ACDF69 10. /-S/util/thread/factory.h:15: Execute @ 0x13ACC959 11. /-S/util/thread/factory.cpp:36: ThreadProc @ 0x13ACC959 12. /-S/util/system/thread.cpp:244: ThreadProxy @ 0x13AC7DCC 13. ??:0: ?? @ 0x7F41C739FAC2 14. ??:0: ?? @ 0x7F41C743184F ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/perf/unittest >> KqpQueryPerf::Upsert+QueryService-UseSink Test command err: Trying to start YDB, gRPC: 31448, MsgBus: 13378 2025-05-29T15:21:59.475826Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509888269633124811:2265];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:21:59.475876Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/0010e3/r3tmp/tmpoCnXLY/pdisk_1.dat 2025-05-29T15:21:59.553988Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [1:7509888269633124585:2079] 1748532119474029 != 1748532119474032 2025-05-29T15:21:59.559962Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 31448, node 1 2025-05-29T15:21:59.579869Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:21:59.579885Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-05-29T15:21:59.579887Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-05-29T15:21:59.579927Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-05-29T15:21:59.614278Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:21:59.614308Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting TClient is connected to server localhost:13378 2025-05-29T15:21:59.614910Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:13378 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-29T15:21:59.710552Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:21:59.713877Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-05-29T15:21:59.725513Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:21:59.792281Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:21:59.817496Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:21:59.833219Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:21:59.974995Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509888269633126223:2403], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:21:59.975025Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:22:00.026829Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-05-29T15:22:00.044085Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-05-29T15:22:00.058940Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-05-29T15:22:00.072654Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-05-29T15:22:00.084749Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-05-29T15:22:00.099131Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-05-29T15:22:00.156673Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-05-29T15:22:00.174442Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509888273928094178:2466], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:22:00.174472Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:22:00.174594Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509888273928094183:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:22:00.175548Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715669:3, at schemeshard: 72057594046644480 2025-05-29T15:22:00.182290Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7509888273928094185:2470], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715669 completed, doublechecking } 2025-05-29T15:22:00.246342Z node 1 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [1:7509888273928094236:3399] txid# 281474976715670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-29T15:22:00.345140Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [1:7509888273928094252:2474], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:22:00.345248Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=1&id=MjM1YjU0OTMtNjczM2RlYTQtNjNkNTc5OTItMjBmMjY0Zg==, ActorId: [1:7509888269633126220:2401], ActorState: ExecuteState, TraceId: 01jwea5jke8vpjdq3tpxjfw2dq, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: VERIFY failed (2025-05-29T15:22:00.345925Z): assertion failed in non-unittest thread with message: assertion failed at ydb/core/kqp/ut/common/kqp_ut_common.h:375, void NKikimr::NKqp::AssertSuccessResult(const NYdb::TStatus &): (result.IsSuccess())
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 library/cpp/testing/unittest/registar.cpp:36 RaiseError(): requirement UnittestThread failed 0. /-S/util/system/yassert.cpp:83: InternalPanicImpl @ 0x13AC65E5 1. /-S/util/system/yassert.cpp:55: Panic @ 0x13ABD5E6 2. /tmp//-S/library/cpp/testing/unittest/registar.cpp:36: RaiseError @ 0x13C5F376 3. /-S/ydb/core/kqp/ut/common/kqp_ut_common.h:375: AssertSuccessResult @ 0x261025A2 4. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:365: CreateSampleTables @ 0x26101EA2 5. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:581: operator() @ 0x261237FC 6. /-S/library/cpp/threading/future/core/future-inl.h:493: SetValue @ 0x261237FC 7. /-S/library/cpp/threading/future/async.h:24: operator() @ 0x261237FC 8. /-S/util/thread/pool.h:71: Process @ 0x261237FC 9. /-S/util/thread/pool.cpp:418: DoExecute @ 0x13ACDF69 10. /-S/util/thread/factory.h:15: Execute @ 0x13ACC959 11. /-S/util/thread/factory.cpp:36: ThreadProc @ 0x13ACC959 12. /-S/util/system/thread.cpp:244: ThreadProxy @ 0x13AC7DCC 13. ??:0: ?? @ 0x7F77B642CAC2 14. ??:0: ?? @ 0x7F77B64BE84F ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/columnshard/ut_rw/unittest >> TColumnShardTestReadWrite::CompactionInGranule_PKUInt32_Reboot [GOOD] Test command err: 2025-05-29T15:21:37.778466Z node 1 :TX_COLUMNSHARD INFO: log.cpp:784: tablet_id=9437184;self_id=[1:139:2170];fline=columnshard.cpp:102;event=initialize_shard;step=OnActivateExecutor; 2025-05-29T15:21:37.783267Z node 1 :TX_COLUMNSHARD INFO: log.cpp:784: tablet_id=9437184;self_id=[1:139:2170];fline=columnshard.cpp:120;event=initialize_shard;step=initialize_tiring_finished; 2025-05-29T15:21:37.783349Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Execute at tablet 9437184 2025-05-29T15:21:37.784241Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=9437184;self_id=[1:139:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-05-29T15:21:37.784301Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=9437184;self_id=[1:139:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-05-29T15:21:37.784348Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=9437184;self_id=[1:139:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-05-29T15:21:37.784371Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=9437184;self_id=[1:139:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-05-29T15:21:37.784400Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=9437184;self_id=[1:139:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-05-29T15:21:37.784424Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=9437184;self_id=[1:139:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-05-29T15:21:37.784445Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=9437184;self_id=[1:139:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-05-29T15:21:37.784467Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=9437184;self_id=[1:139:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-05-29T15:21:37.784489Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=9437184;self_id=[1:139:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-05-29T15:21:37.784510Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=9437184;self_id=[1:139:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-05-29T15:21:37.784535Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=9437184;self_id=[1:139:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2025-05-29T15:21:37.784563Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=9437184;self_id=[1:139:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2025-05-29T15:21:37.792597Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Complete at tablet 9437184 2025-05-29T15:21:37.792666Z node 1 :TX_COLUMNSHARD INFO: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:137;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2025-05-29T15:21:37.792679Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-05-29T15:21:37.792717Z node 1 :TX_COLUMNSHARD INFO: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-05-29T15:21:37.792763Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-05-29T15:21:37.792779Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-05-29T15:21:37.792786Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-05-29T15:21:37.792797Z node 1 :TX_COLUMNSHARD INFO: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2025-05-29T15:21:37.792809Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-05-29T15:21:37.792819Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-05-29T15:21:37.792824Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-05-29T15:21:37.792848Z node 1 :TX_COLUMNSHARD INFO: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-05-29T15:21:37.792858Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-05-29T15:21:37.792866Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-05-29T15:21:37.792872Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-05-29T15:21:37.792895Z node 1 :TX_COLUMNSHARD INFO: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-05-29T15:21:37.792904Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-05-29T15:21:37.792914Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-05-29T15:21:37.792919Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=8;type=CleanInsertionDedup; 2025-05-29T15:21:37.792935Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-05-29T15:21:37.792944Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-05-29T15:21:37.792949Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-05-29T15:21:37.792961Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-05-29T15:21:37.792972Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-05-29T15:21:37.792977Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-05-29T15:21:37.793008Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-05-29T15:21:37.793019Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-05-29T15:21:37.793025Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2025-05-29T15:21:37.793050Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-05-29T15:21:37.793060Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-05-29T15:21:37.793065Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2025-05-29T15:21:37.793082Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-05-29T15:21:37.793091Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanDeprecatedSnapshot;id=CleanDeprecatedSnapshot; 2025-05-29T15:21:37.793097Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=17;type=CleanDeprecatedSnapshot; 2025-05-29T15:21:37.793108Z node 1 :TX_COLUMNSHARD CRIT: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_deprecated_snapshot.cpp:30;tasks_for_rewrite=0; 2025-05-29T15:21:37.793117Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanDeprecatedSnapshot;id=17; 2025-05-29T15:21:37.793126Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV0ChunksMeta;id=RestoreV0ChunksMeta; 2025-05-29T15:21:37.793132Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=18;type=RestoreV0ChunksMeta; 2025-05-29T15:21:37.793225Z node 1 :TX_COLUMNSHARD INFO: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=13; 2025-05-29T15:21:37.793238Z node 1 :TX_COLUMNSHARD INFO: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=8; 2025-05-29T15:21:37.793249Z node 1 :TX_COLUMNSHARD INFO: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute ... UMNSHARD INFO: log.cpp:784: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;fline=common_data.cpp:29;EXECUTE:granulesLoadingTime=11414; 2025-05-29T15:22:03.396448Z node 1 :TX_COLUMNSHARD INFO: log.cpp:784: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;fline=common_data.cpp:29;PRECHARGE:finishLoadingTime=2; 2025-05-29T15:22:03.397064Z node 1 :TX_COLUMNSHARD INFO: log.cpp:784: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;fline=common_data.cpp:29;EXECUTE:finishLoadingTime=608; 2025-05-29T15:22:03.397072Z node 1 :TX_COLUMNSHARD INFO: log.cpp:784: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:column_enginesLoadingTime=12123; 2025-05-29T15:22:03.397103Z node 1 :TX_COLUMNSHARD INFO: log.cpp:784: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:insert_tableLoadingTime=22; 2025-05-29T15:22:03.397275Z node 1 :TX_COLUMNSHARD INFO: log.cpp:784: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:composite_init/insert_table;fline=common_data.cpp:29;InsertTableLoadingTime=19; 2025-05-29T15:22:03.397288Z node 1 :TX_COLUMNSHARD INFO: log.cpp:784: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:insert_tableLoadingTime=179; 2025-05-29T15:22:03.397310Z node 1 :TX_COLUMNSHARD INFO: log.cpp:784: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:tx_controllerLoadingTime=14; 2025-05-29T15:22:03.397327Z node 1 :TX_COLUMNSHARD INFO: log.cpp:784: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:tx_controllerLoadingTime=11; 2025-05-29T15:22:03.397400Z node 1 :TX_COLUMNSHARD INFO: log.cpp:784: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:operations_managerLoadingTime=66; 2025-05-29T15:22:03.397451Z node 1 :TX_COLUMNSHARD INFO: log.cpp:784: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:operations_managerLoadingTime=44; 2025-05-29T15:22:03.403541Z node 1 :TX_COLUMNSHARD INFO: log.cpp:784: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:storages_managerLoadingTime=6074; 2025-05-29T15:22:03.409982Z node 1 :TX_COLUMNSHARD INFO: log.cpp:784: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:storages_managerLoadingTime=6407; 2025-05-29T15:22:03.410017Z node 1 :TX_COLUMNSHARD INFO: log.cpp:784: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:long_txLoadingTime=10; 2025-05-29T15:22:03.410027Z node 1 :TX_COLUMNSHARD INFO: log.cpp:784: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:long_txLoadingTime=4; 2025-05-29T15:22:03.410034Z node 1 :TX_COLUMNSHARD INFO: log.cpp:784: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:db_locksLoadingTime=1; 2025-05-29T15:22:03.410041Z node 1 :TX_COLUMNSHARD INFO: log.cpp:784: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:db_locksLoadingTime=1; 2025-05-29T15:22:03.410049Z node 1 :TX_COLUMNSHARD INFO: log.cpp:784: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:bg_sessionsLoadingTime=1; 2025-05-29T15:22:03.410066Z node 1 :TX_COLUMNSHARD INFO: log.cpp:784: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:bg_sessionsLoadingTime=10; 2025-05-29T15:22:03.410077Z node 1 :TX_COLUMNSHARD INFO: log.cpp:784: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:sharing_sessionsLoadingTime=1; 2025-05-29T15:22:03.410095Z node 1 :TX_COLUMNSHARD INFO: log.cpp:784: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:sharing_sessionsLoadingTime=12; 2025-05-29T15:22:03.410103Z node 1 :TX_COLUMNSHARD INFO: log.cpp:784: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:in_flight_readsLoadingTime=1; 2025-05-29T15:22:03.410115Z node 1 :TX_COLUMNSHARD INFO: log.cpp:784: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:in_flight_readsLoadingTime=7; 2025-05-29T15:22:03.410133Z node 1 :TX_COLUMNSHARD INFO: log.cpp:784: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:tiers_managerLoadingTime=12; 2025-05-29T15:22:03.410151Z node 1 :TX_COLUMNSHARD INFO: log.cpp:784: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:tiers_managerLoadingTime=11; 2025-05-29T15:22:03.410157Z node 1 :TX_COLUMNSHARD INFO: log.cpp:784: tablet_id=9437184;event=initialize_shard;fline=common_data.cpp:29;EXECUTE:composite_initLoadingTime=26393; 2025-05-29T15:22:03.410209Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: Index: tables 1 inserted {blob_bytes=0;raw_bytes=0;count=0;records=0} compacted {blob_bytes=0;raw_bytes=0;count=0;records=0} s-compacted {blob_bytes=7217696;raw_bytes=7088450;count=1;records=75200} inactive {blob_bytes=112697216;raw_bytes=109567450;count=218;records=1277000} evicted {blob_bytes=0;raw_bytes=0;count=0;records=0} at tablet 9437184 2025-05-29T15:22:03.410255Z node 1 :TX_COLUMNSHARD INFO: log.cpp:784: tablet_id=9437184;self_id=[1:7621:9220];process=SwitchToWork;fline=columnshard.cpp:77;event=initialize_shard;step=SwitchToWork; 2025-05-29T15:22:03.410265Z node 1 :TX_COLUMNSHARD INFO: log.cpp:784: tablet_id=9437184;self_id=[1:7621:9220];process=SwitchToWork;fline=columnshard.cpp:80;event=initialize_shard;step=SignalTabletActive; 2025-05-29T15:22:03.410284Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:784: tablet_id=9437184;self_id=[1:7621:9220];process=SwitchToWork;fline=columnshard_impl.cpp:1614;event=OnTieringModified;path_id=NO_VALUE_OPTIONAL; 2025-05-29T15:22:03.410292Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:784: tablet_id=9437184;self_id=[1:7621:9220];process=SwitchToWork;fline=column_engine_logs.cpp:493;event=OnTieringModified;new_count_tierings=0; 2025-05-29T15:22:03.410336Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:784: tablet_id=9437184;fline=columnshard_impl.cpp:515;event=EnqueueBackgroundActivities;periodic=0; 2025-05-29T15:22:03.410348Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:784: tablet_id=9437184;fline=columnshard_impl.cpp:784;event=start_indexation_tasks;insert_overload_size=0; 2025-05-29T15:22:03.410369Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:784: tablet_id=9437184;fline=column_engine_logs.cpp:246;event=StartCleanup;portions_count=10; 2025-05-29T15:22:03.410386Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:784: tablet_id=9437184;fline=column_engine_logs.cpp:288;event=StartCleanupStop;snapshot=plan_step=1748531802442;tx_id=18446744073709551615;;current_snapshot_ts=1748532099332; 2025-05-29T15:22:03.410395Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:784: tablet_id=9437184;fline=column_engine_logs.cpp:321;event=StartCleanup;portions_count=10;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2025-05-29T15:22:03.410409Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:784: tablet_id=9437184;fline=columnshard_impl.cpp:1060;background=cleanup;skip_reason=no_changes; 2025-05-29T15:22:03.410415Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:784: tablet_id=9437184;fline=columnshard_impl.cpp:1092;background=cleanup;skip_reason=no_changes; 2025-05-29T15:22:03.410440Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:784: tablet_id=9437184;fline=columnshard_impl.cpp:1001;background=ttl;skip_reason=no_changes; 2025-05-29T15:22:03.410913Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:784: tablet_id=9437184;tx_state=TTxProgressTx::Complete;fline=columnshard_impl.cpp:784;event=start_indexation_tasks;insert_overload_size=0; 2025-05-29T15:22:03.410937Z node 1 :TX_COLUMNSHARD INFO: log.cpp:784: self_id=[1:7738:9329];tablet_id=9437184;parent=[1:7621:9220];fline=manager.cpp:85;event=ask_data;request=request_id=137;1={portions_count=219};; 2025-05-29T15:22:03.411336Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:784: tablet_id=9437184;self_id=[1:7621:9220];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:254;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=9437184; 2025-05-29T15:22:03.411406Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:784: tablet_id=9437184;self_id=[1:7621:9220];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;fline=columnshard.cpp:243;event=TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184; 2025-05-29T15:22:03.411411Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: Send periodic stats. 2025-05-29T15:22:03.411415Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: Disabled periodic stats at tablet 9437184 2025-05-29T15:22:03.411422Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:784: tablet_id=9437184;self_id=[1:7621:9220];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:515;event=EnqueueBackgroundActivities;periodic=0; 2025-05-29T15:22:03.411431Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:784: tablet_id=9437184;self_id=[1:7621:9220];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:784;event=start_indexation_tasks;insert_overload_size=0; 2025-05-29T15:22:03.411440Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:784: tablet_id=9437184;self_id=[1:7621:9220];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:246;event=StartCleanup;portions_count=10; 2025-05-29T15:22:03.411449Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:784: tablet_id=9437184;self_id=[1:7621:9220];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:288;event=StartCleanupStop;snapshot=plan_step=1748531802442;tx_id=18446744073709551615;;current_snapshot_ts=1748532099332; 2025-05-29T15:22:03.411456Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:784: tablet_id=9437184;self_id=[1:7621:9220];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:321;event=StartCleanup;portions_count=10;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2025-05-29T15:22:03.411464Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:784: tablet_id=9437184;self_id=[1:7621:9220];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:1060;background=cleanup;skip_reason=no_changes; 2025-05-29T15:22:03.411469Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:784: tablet_id=9437184;self_id=[1:7621:9220];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:1092;background=cleanup;skip_reason=no_changes; 2025-05-29T15:22:03.411483Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:784: tablet_id=9437184;self_id=[1:7621:9220];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;queue=ttl;external_count=0;fline=granule.cpp:164;event=skip_actualization;waiting=1.000000s; 2025-05-29T15:22:03.411491Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:784: tablet_id=9437184;self_id=[1:7621:9220];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:1001;background=ttl;skip_reason=no_changes; ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/columnshard/ut_rw/unittest >> TColumnShardTestReadWrite::CompactionInGranule_PKDatetime_Reboot [GOOD] Test command err: 2025-05-29T15:21:37.835698Z node 1 :TX_COLUMNSHARD INFO: log.cpp:784: tablet_id=9437184;self_id=[1:139:2170];fline=columnshard.cpp:102;event=initialize_shard;step=OnActivateExecutor; 2025-05-29T15:21:37.840075Z node 1 :TX_COLUMNSHARD INFO: log.cpp:784: tablet_id=9437184;self_id=[1:139:2170];fline=columnshard.cpp:120;event=initialize_shard;step=initialize_tiring_finished; 2025-05-29T15:21:37.840155Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Execute at tablet 9437184 2025-05-29T15:21:37.840955Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=9437184;self_id=[1:139:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-05-29T15:21:37.841006Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=9437184;self_id=[1:139:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-05-29T15:21:37.841047Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=9437184;self_id=[1:139:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-05-29T15:21:37.841068Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=9437184;self_id=[1:139:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-05-29T15:21:37.841090Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=9437184;self_id=[1:139:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-05-29T15:21:37.841115Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=9437184;self_id=[1:139:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-05-29T15:21:37.841135Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=9437184;self_id=[1:139:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-05-29T15:21:37.841156Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=9437184;self_id=[1:139:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-05-29T15:21:37.841177Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=9437184;self_id=[1:139:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-05-29T15:21:37.841196Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=9437184;self_id=[1:139:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-05-29T15:21:37.841219Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=9437184;self_id=[1:139:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2025-05-29T15:21:37.841245Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=9437184;self_id=[1:139:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2025-05-29T15:21:37.851342Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Complete at tablet 9437184 2025-05-29T15:21:37.851408Z node 1 :TX_COLUMNSHARD INFO: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:137;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2025-05-29T15:21:37.851424Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-05-29T15:21:37.851464Z node 1 :TX_COLUMNSHARD INFO: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-05-29T15:21:37.851507Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-05-29T15:21:37.851521Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-05-29T15:21:37.851527Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-05-29T15:21:37.851538Z node 1 :TX_COLUMNSHARD INFO: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2025-05-29T15:21:37.851549Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-05-29T15:21:37.851557Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-05-29T15:21:37.851562Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-05-29T15:21:37.851597Z node 1 :TX_COLUMNSHARD INFO: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-05-29T15:21:37.851606Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-05-29T15:21:37.851614Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-05-29T15:21:37.851619Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-05-29T15:21:37.851632Z node 1 :TX_COLUMNSHARD INFO: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-05-29T15:21:37.851639Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-05-29T15:21:37.851648Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-05-29T15:21:37.851653Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=8;type=CleanInsertionDedup; 2025-05-29T15:21:37.851668Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-05-29T15:21:37.851676Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-05-29T15:21:37.851681Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-05-29T15:21:37.851691Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-05-29T15:21:37.851700Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-05-29T15:21:37.851705Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-05-29T15:21:37.851734Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-05-29T15:21:37.851743Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-05-29T15:21:37.851748Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2025-05-29T15:21:37.851772Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-05-29T15:21:37.851781Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-05-29T15:21:37.851786Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2025-05-29T15:21:37.851800Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-05-29T15:21:37.851808Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanDeprecatedSnapshot;id=CleanDeprecatedSnapshot; 2025-05-29T15:21:37.851813Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=17;type=CleanDeprecatedSnapshot; 2025-05-29T15:21:37.851823Z node 1 :TX_COLUMNSHARD CRIT: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_deprecated_snapshot.cpp:30;tasks_for_rewrite=0; 2025-05-29T15:21:37.851832Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanDeprecatedSnapshot;id=17; 2025-05-29T15:21:37.851840Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV0ChunksMeta;id=RestoreV0ChunksMeta; 2025-05-29T15:21:37.851846Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=18;type=RestoreV0ChunksMeta; 2025-05-29T15:21:37.851928Z node 1 :TX_COLUMNSHARD INFO: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=12; 2025-05-29T15:21:37.851939Z node 1 :TX_COLUMNSHARD INFO: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=7; 2025-05-29T15:21:37.851949Z node 1 :TX_COLUMNSHARD INFO: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute ... _COLUMNSHARD INFO: log.cpp:784: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;fline=common_data.cpp:29;EXECUTE:granulesLoadingTime=9327; 2025-05-29T15:22:03.403233Z node 1 :TX_COLUMNSHARD INFO: log.cpp:784: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;fline=common_data.cpp:29;PRECHARGE:finishLoadingTime=1; 2025-05-29T15:22:03.403609Z node 1 :TX_COLUMNSHARD INFO: log.cpp:784: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;fline=common_data.cpp:29;EXECUTE:finishLoadingTime=370; 2025-05-29T15:22:03.403617Z node 1 :TX_COLUMNSHARD INFO: log.cpp:784: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:column_enginesLoadingTime=9789; 2025-05-29T15:22:03.403639Z node 1 :TX_COLUMNSHARD INFO: log.cpp:784: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:insert_tableLoadingTime=15; 2025-05-29T15:22:03.403755Z node 1 :TX_COLUMNSHARD INFO: log.cpp:784: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:composite_init/insert_table;fline=common_data.cpp:29;InsertTableLoadingTime=13; 2025-05-29T15:22:03.403764Z node 1 :TX_COLUMNSHARD INFO: log.cpp:784: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:insert_tableLoadingTime=122; 2025-05-29T15:22:03.403777Z node 1 :TX_COLUMNSHARD INFO: log.cpp:784: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:tx_controllerLoadingTime=8; 2025-05-29T15:22:03.403787Z node 1 :TX_COLUMNSHARD INFO: log.cpp:784: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:tx_controllerLoadingTime=7; 2025-05-29T15:22:03.403834Z node 1 :TX_COLUMNSHARD INFO: log.cpp:784: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:operations_managerLoadingTime=39; 2025-05-29T15:22:03.403858Z node 1 :TX_COLUMNSHARD INFO: log.cpp:784: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:operations_managerLoadingTime=20; 2025-05-29T15:22:03.409522Z node 1 :TX_COLUMNSHARD INFO: log.cpp:784: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:storages_managerLoadingTime=5644; 2025-05-29T15:22:03.414835Z node 1 :TX_COLUMNSHARD INFO: log.cpp:784: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:storages_managerLoadingTime=5238; 2025-05-29T15:22:03.414880Z node 1 :TX_COLUMNSHARD INFO: log.cpp:784: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:long_txLoadingTime=11; 2025-05-29T15:22:03.414892Z node 1 :TX_COLUMNSHARD INFO: log.cpp:784: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:long_txLoadingTime=4; 2025-05-29T15:22:03.414901Z node 1 :TX_COLUMNSHARD INFO: log.cpp:784: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:db_locksLoadingTime=1; 2025-05-29T15:22:03.414908Z node 1 :TX_COLUMNSHARD INFO: log.cpp:784: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:db_locksLoadingTime=1; 2025-05-29T15:22:03.414915Z node 1 :TX_COLUMNSHARD INFO: log.cpp:784: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:bg_sessionsLoadingTime=1; 2025-05-29T15:22:03.414934Z node 1 :TX_COLUMNSHARD INFO: log.cpp:784: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:bg_sessionsLoadingTime=12; 2025-05-29T15:22:03.414945Z node 1 :TX_COLUMNSHARD INFO: log.cpp:784: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:sharing_sessionsLoadingTime=2; 2025-05-29T15:22:03.414962Z node 1 :TX_COLUMNSHARD INFO: log.cpp:784: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:sharing_sessionsLoadingTime=12; 2025-05-29T15:22:03.414970Z node 1 :TX_COLUMNSHARD INFO: log.cpp:784: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:in_flight_readsLoadingTime=1; 2025-05-29T15:22:03.414983Z node 1 :TX_COLUMNSHARD INFO: log.cpp:784: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:in_flight_readsLoadingTime=8; 2025-05-29T15:22:03.415002Z node 1 :TX_COLUMNSHARD INFO: log.cpp:784: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:tiers_managerLoadingTime=12; 2025-05-29T15:22:03.415021Z node 1 :TX_COLUMNSHARD INFO: log.cpp:784: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:tiers_managerLoadingTime=12; 2025-05-29T15:22:03.415026Z node 1 :TX_COLUMNSHARD INFO: log.cpp:784: tablet_id=9437184;event=initialize_shard;fline=common_data.cpp:29;EXECUTE:composite_initLoadingTime=22346; 2025-05-29T15:22:03.415082Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: Index: tables 1 inserted {blob_bytes=0;raw_bytes=0;count=0;records=0} compacted {blob_bytes=0;raw_bytes=0;count=0;records=0} s-compacted {blob_bytes=7217696;raw_bytes=7088450;count=1;records=75200} inactive {blob_bytes=112697216;raw_bytes=109567450;count=218;records=1277000} evicted {blob_bytes=0;raw_bytes=0;count=0;records=0} at tablet 9437184 2025-05-29T15:22:03.415121Z node 1 :TX_COLUMNSHARD INFO: log.cpp:784: tablet_id=9437184;self_id=[1:7621:9220];process=SwitchToWork;fline=columnshard.cpp:77;event=initialize_shard;step=SwitchToWork; 2025-05-29T15:22:03.415131Z node 1 :TX_COLUMNSHARD INFO: log.cpp:784: tablet_id=9437184;self_id=[1:7621:9220];process=SwitchToWork;fline=columnshard.cpp:80;event=initialize_shard;step=SignalTabletActive; 2025-05-29T15:22:03.415147Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:784: tablet_id=9437184;self_id=[1:7621:9220];process=SwitchToWork;fline=columnshard_impl.cpp:1614;event=OnTieringModified;path_id=NO_VALUE_OPTIONAL; 2025-05-29T15:22:03.415155Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:784: tablet_id=9437184;self_id=[1:7621:9220];process=SwitchToWork;fline=column_engine_logs.cpp:493;event=OnTieringModified;new_count_tierings=0; 2025-05-29T15:22:03.415200Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:784: tablet_id=9437184;fline=columnshard_impl.cpp:515;event=EnqueueBackgroundActivities;periodic=0; 2025-05-29T15:22:03.415212Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:784: tablet_id=9437184;fline=columnshard_impl.cpp:784;event=start_indexation_tasks;insert_overload_size=0; 2025-05-29T15:22:03.415232Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:784: tablet_id=9437184;fline=column_engine_logs.cpp:246;event=StartCleanup;portions_count=10; 2025-05-29T15:22:03.415252Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:784: tablet_id=9437184;fline=column_engine_logs.cpp:288;event=StartCleanupStop;snapshot=plan_step=1748531802498;tx_id=18446744073709551615;;current_snapshot_ts=1748532099388; 2025-05-29T15:22:03.415261Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:784: tablet_id=9437184;fline=column_engine_logs.cpp:321;event=StartCleanup;portions_count=10;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2025-05-29T15:22:03.415272Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:784: tablet_id=9437184;fline=columnshard_impl.cpp:1060;background=cleanup;skip_reason=no_changes; 2025-05-29T15:22:03.415277Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:784: tablet_id=9437184;fline=columnshard_impl.cpp:1092;background=cleanup;skip_reason=no_changes; 2025-05-29T15:22:03.415302Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:784: tablet_id=9437184;fline=columnshard_impl.cpp:1001;background=ttl;skip_reason=no_changes; 2025-05-29T15:22:03.415831Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:784: tablet_id=9437184;tx_state=TTxProgressTx::Complete;fline=columnshard_impl.cpp:784;event=start_indexation_tasks;insert_overload_size=0; 2025-05-29T15:22:03.415866Z node 1 :TX_COLUMNSHARD INFO: log.cpp:784: self_id=[1:7738:9329];tablet_id=9437184;parent=[1:7621:9220];fline=manager.cpp:85;event=ask_data;request=request_id=137;1={portions_count=219};; 2025-05-29T15:22:03.416443Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:784: tablet_id=9437184;self_id=[1:7621:9220];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:254;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=9437184; 2025-05-29T15:22:03.416512Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:784: tablet_id=9437184;self_id=[1:7621:9220];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;fline=columnshard.cpp:243;event=TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184; 2025-05-29T15:22:03.416518Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: Send periodic stats. 2025-05-29T15:22:03.416523Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: Disabled periodic stats at tablet 9437184 2025-05-29T15:22:03.416530Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:784: tablet_id=9437184;self_id=[1:7621:9220];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:515;event=EnqueueBackgroundActivities;periodic=0; 2025-05-29T15:22:03.416540Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:784: tablet_id=9437184;self_id=[1:7621:9220];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:784;event=start_indexation_tasks;insert_overload_size=0; 2025-05-29T15:22:03.416550Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:784: tablet_id=9437184;self_id=[1:7621:9220];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:246;event=StartCleanup;portions_count=10; 2025-05-29T15:22:03.416560Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:784: tablet_id=9437184;self_id=[1:7621:9220];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:288;event=StartCleanupStop;snapshot=plan_step=1748531802498;tx_id=18446744073709551615;;current_snapshot_ts=1748532099388; 2025-05-29T15:22:03.416568Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:784: tablet_id=9437184;self_id=[1:7621:9220];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:321;event=StartCleanup;portions_count=10;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2025-05-29T15:22:03.416576Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:784: tablet_id=9437184;self_id=[1:7621:9220];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:1060;background=cleanup;skip_reason=no_changes; 2025-05-29T15:22:03.416580Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:784: tablet_id=9437184;self_id=[1:7621:9220];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:1092;background=cleanup;skip_reason=no_changes; 2025-05-29T15:22:03.416595Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:784: tablet_id=9437184;self_id=[1:7621:9220];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;queue=ttl;external_count=0;fline=granule.cpp:164;event=skip_actualization;waiting=1.000000s; 2025-05-29T15:22:03.416603Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:784: tablet_id=9437184;self_id=[1:7621:9220];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:1001;background=ttl;skip_reason=no_changes; ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/persqueue/ut/unittest >> TPartitionTests::ConflictingSrcIdTxAndWritesDifferentBatches [GOOD] Test command err: 2025-05-29T15:21:56.376266Z node 1 :PERSQUEUE NOTICE: pq_impl.cpp:1099: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-05-29T15:21:56.376293Z node 1 :PERSQUEUE INFO: pq_impl.cpp:800: [PQ: 72057594037927937] doesn't have tx writes info 2025-05-29T15:21:56.380270Z node 1 :PERSQUEUE INFO: partition_init.cpp:880: [PQ: 72057594037927937, Partition: 3, State: StateInit] bootstrapping 3 [1:180:2194] 2025-05-29T15:21:56.380573Z node 1 :PERSQUEUE INFO: partition_init.cpp:785: [Root/PQ/rt3.dc1--account--topic:3:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp from keys completed. Value 2025-05-29T15:21:56.000000Z 2025-05-29T15:21:56.380584Z node 1 :PERSQUEUE INFO: partition.cpp:560: [PQ: 72057594037927937, Partition: 3, State: StateInit] init complete for topic 'Root/PQ/rt3.dc1--account--topic' partition 3 generation 0 [1:180:2194] Got cmd write: CmdWrite { Key: "i0000000003" Value: "\010\000\020\n\030\000(\240\364\212\345\3612" StorageChannel: INLINE } CmdWrite { Key: "m0000000003cclient" Value: "\010\000\020\001\030\001\"\007session(\0000\001@\000" StorageChannel: INLINE } CmdWrite { Key: "m0000000003uclient" Value: "\000\000\000\000\000\000\000\000\001\000\000\000\001\000\000\000session" StorageChannel: INLINE } Got cmd write: CmdWrite { Key: "i0000000003" Value: "\010\000\020\n\030\000(\240\364\212\345\3612" StorageChannel: INLINE } CmdWrite { Key: "I0000000003" Value: "\010\271`\020\262\222\004" StorageChannel: INLINE } CmdWrite { Key: "m0000000003cclient" Value: "\010\002\020\001\030\001\"\007session(\0000\001@\001" StorageChannel: INLINE } CmdWrite { Key: "m0000000003uclient" Value: "\002\000\000\000\000\000\000\000\001\000\000\000\001\000\000\000session" StorageChannel: INLINE } 2025-05-29T15:21:57.117998Z node 2 :PERSQUEUE NOTICE: pq_impl.cpp:1099: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-05-29T15:21:57.118035Z node 2 :PERSQUEUE INFO: pq_impl.cpp:800: [PQ: 72057594037927937] doesn't have tx writes info 2025-05-29T15:21:57.126655Z node 2 :PERSQUEUE INFO: partition_init.cpp:880: [PQ: 72057594037927937, Partition: 3, State: StateInit] bootstrapping 3 [2:180:2194] 2025-05-29T15:21:57.126995Z node 2 :PERSQUEUE INFO: partition_init.cpp:785: [Root/PQ/rt3.dc1--account--topic:3:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp from keys completed. Value 2025-05-29T15:21:57.000000Z 2025-05-29T15:21:57.127011Z node 2 :PERSQUEUE INFO: partition.cpp:560: [PQ: 72057594037927937, Partition: 3, State: StateInit] init complete for topic 'Root/PQ/rt3.dc1--account--topic' partition 3 generation 0 [2:180:2194] Got cmd write: CmdWrite { Key: "i0000000003" Value: "\010\000\020\n\030\000(\210\374\212\345\3612" StorageChannel: INLINE } CmdWrite { Key: "m0000000003cclient-1" Value: "\010\000\020\001\030\001\"\tsession-1(\0000\001@\000" StorageChannel: INLINE } CmdWrite { Key: "m0000000003uclient-1" Value: "\000\000\000\000\000\000\000\000\001\000\000\000\001\000\000\000session-1" StorageChannel: INLINE } Got cmd write: CmdWrite { Key: "i0000000003" Value: "\010\000\020\n\030\000(\210\374\212\345\3612" StorageChannel: INLINE } CmdWrite { Key: "m0000000003cclient-2" Value: "\010\000\020\001\030\001\"\tsession-2(\0000\003@\000" StorageChannel: INLINE } CmdWrite { Key: "m0000000003uclient-2" Value: "\000\000\000\000\000\000\000\000\001\000\000\000\001\000\000\000session-2" StorageChannel: INLINE } Got cmd write: CmdWrite { Key: "i0000000003" Value: "\010\000\020\n\030\000(\210\374\212\345\3612" StorageChannel: INLINE } CmdWrite { Key: "m0000000003cclient-1" Value: "\010\003\020\001\030\001\"\tsession-1(\0000\001@\000" StorageChannel: INLINE } CmdWrite { Key: "m0000000003uclient-1" Value: "\003\000\000\000\000\000\000\000\001\000\000\000\001\000\000\000session-1" StorageChannel: INLINE } Got cmd write: CmdWrite { Key: "i0000000003" Value: "\010\000\020\n\030\000(\210\374\212\345\3612" StorageChannel: INLINE } CmdWrite { Key: "I0000000003" Value: "\010\271`\020\262\222\004" StorageChannel: INLINE } CmdWrite { Key: "m0000000003cclient-2" Value: "\010\001\020\001\030\001\"\tsession-2(\0000\003@\001" StorageChannel: INLINE } CmdWrite { Key: "m0000000003uclient-2" Value: "\001\000\000\000\000\000\000\000\001\000\000\000\001\000\000\000session-2" StorageChannel: INLINE } CmdWrite { Key: "m0000000003cclient-1" Value: "\010\006\020\001\030\001\"\tsession-1(\0000\001@\000" StorageChannel: INLINE } CmdWrite { Key: "m0000000003uclient-1" Value: "\006\000\000\000\000\000\000\000\001\000\000\000\001\000\000\000session-1" StorageChannel: INLINE } 2025-05-29T15:21:57.851443Z node 3 :PERSQUEUE NOTICE: pq_impl.cpp:1099: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-05-29T15:21:57.851481Z node 3 :PERSQUEUE INFO: pq_impl.cpp:800: [PQ: 72057594037927937] doesn't have tx writes info 2025-05-29T15:21:58.066924Z node 4 :PERSQUEUE NOTICE: pq_impl.cpp:1099: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-05-29T15:21:58.066956Z node 4 :PERSQUEUE INFO: pq_impl.cpp:800: [PQ: 72057594037927937] doesn't have tx writes info 2025-05-29T15:21:58.071823Z node 4 :PERSQUEUE DEBUG: partition_init.cpp:75: [Root/PQ/rt3.dc1--account--topic:0:Initializer] Start initializing step TInitConfigStep Got KV request 2025-05-29T15:21:58.071900Z node 4 :PERSQUEUE DEBUG: partition_init.cpp:75: [Root/PQ/rt3.dc1--account--topic:0:Initializer] Start initializing step TInitInternalFieldsStep 2025-05-29T15:21:58.071981Z node 4 :PERSQUEUE INFO: partition_init.cpp:880: [PQ: 72057594037927937, Partition: 0, State: StateInit] bootstrapping 0 [4:179:2193] 2025-05-29T15:21:58.072190Z node 4 :PERSQUEUE DEBUG: partition_init.cpp:75: [Root/PQ/rt3.dc1--account--topic:0:Initializer] Start initializing step TInitDiskStatusStep Got KV request 2025-05-29T15:21:58.072248Z node 4 :PERSQUEUE DEBUG: partition_init.cpp:75: [Root/PQ/rt3.dc1--account--topic:0:Initializer] Start initializing step TInitMetaStep Got KV request 2025-05-29T15:21:58.072272Z node 4 :PERSQUEUE DEBUG: partition_init.cpp:75: [Root/PQ/rt3.dc1--account--topic:0:Initializer] Start initializing step TInitInfoRangeStep Got KV request Got KV request 2025-05-29T15:21:58.072294Z node 4 :PERSQUEUE DEBUG: partition_init.cpp:75: [Root/PQ/rt3.dc1--account--topic:0:Initializer] Start initializing step TInitDataRangeStep Got KV request 2025-05-29T15:21:58.072338Z node 4 :PERSQUEUE DEBUG: partition_init.cpp:621: [Root/PQ/rt3.dc1--account--topic:0:TInitDataRangeStep] Got data offset 0 count 50 size 684 so 0 eo 50 d0000000000_00000000000000000000_00000_0000000050_00000 2025-05-29T15:21:58.072351Z node 4 :PERSQUEUE DEBUG: partition_init.cpp:75: [Root/PQ/rt3.dc1--account--topic:0:Initializer] Start initializing step TInitDataStep 2025-05-29T15:21:58.072356Z node 4 :PERSQUEUE DEBUG: partition_init.cpp:75: [Root/PQ/rt3.dc1--account--topic:0:Initializer] Start initializing step TInitEndWriteTimestampStep 2025-05-29T15:21:58.072362Z node 4 :PERSQUEUE INFO: partition_init.cpp:785: [Root/PQ/rt3.dc1--account--topic:0:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp from keys completed. Value 2025-05-29T15:21:58.000000Z 2025-05-29T15:21:58.072366Z node 4 :PERSQUEUE DEBUG: partition_init.cpp:55: [Root/PQ/rt3.dc1--account--topic:0:Initializer] Initializing completed. 2025-05-29T15:21:58.072375Z node 4 :PERSQUEUE INFO: partition.cpp:560: [PQ: 72057594037927937, Partition: 0, State: StateInit] init complete for topic 'Root/PQ/rt3.dc1--account--topic' partition 0 generation 0 [4:179:2193] 2025-05-29T15:21:58.072386Z node 4 :PERSQUEUE DEBUG: partition.cpp:574: [PQ: 72057594037927937, Partition: 0, State: StateInit] SYNC INIT topic Root/PQ/rt3.dc1--account--topic partitition 0 so 0 endOffset 50 Head Offset 50 PartNo 0 PackedSize 0 count 0 nextOffset 50 batches 0 SYNC INIT DATA KEY: d0000000000_00000000000000000000_00000_0000000050_00000 size 684 2025-05-29T15:21:58.072396Z node 4 :PERSQUEUE DEBUG: partition.cpp:3850: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Process pending events. Count 0 Create distr tx with id = 0 and act no: 1 Create distr tx with id = 2 and act no: 3 Create immediate tx with id = 4 and act no: 5 2025-05-29T15:21:59.429133Z node 4 :PERSQUEUE DEBUG: partition.cpp:1127: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Handle TEvPQ::TEvTxCalcPredicate Step 1, TxId 0 2025-05-29T15:21:59.429174Z node 4 :PERSQUEUE DEBUG: partition.cpp:1127: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Handle TEvPQ::TEvTxCalcPredicate Step 1, TxId 2 2025-05-29T15:22:00.745106Z node 4 :PERSQUEUE DEBUG: partition.cpp:1363: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Handle TEvPQ::TEvGetWriteInfoResponse 2025-05-29T15:22:00.745151Z node 4 :PERSQUEUE DEBUG: partition.cpp:1363: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Handle TEvPQ::TEvGetWriteInfoResponse 2025-05-29T15:22:00.745161Z node 4 :PERSQUEUE DEBUG: partition.cpp:1363: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Handle TEvPQ::TEvGetWriteInfoResponse Got batch complete: 1 Wait batch completion Got batch complete: 2 Wait batch completion Wait kv request 2025-05-29T15:22:00.987493Z node 4 :PERSQUEUE DEBUG: partition.cpp:1173: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Handle TEvPQ::TEvTxCommit Step 1, TxId 2 2025-05-29T15:22:00.987516Z node 4 :PERSQUEUE DEBUG: partition.cpp:2424: [PQ: 72057594037927937, Partition: 0, State: StateIdle] TPartition::CommitWriteOperations TxId: 2 2025-05-29T15:22:00.987529Z node 4 :PERSQUEUE DEBUG: partition.cpp:2451: [PQ: 72057594037927937, Partition: 0, State: StateIdle] t.WriteInfo->BodyKeys.size=0, t.WriteInfo->BlobsFromHead.size=0 2025-05-29T15:22:00.987535Z node 4 :PERSQUEUE DEBUG: partition.cpp:2452: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Head=Offset 50 PartNo 0 PackedSize 0 count 0 nextOffset 50 batches 0, NewHead=Offset 50 PartNo 0 PackedSize 0 count 0 nextOffset 50 batches 0 2025-05-29T15:22:00.987544Z node 4 :PERSQUEUE DEBUG: partition.cpp:2424: [PQ: 72057594037927937, Partition: 0, State: StateIdle] TPartition::CommitWriteOperations TxId: (empty maybe) 2025-05-29T15:22:00.987547Z node 4 :PERSQUEUE DEBUG: partition.cpp:2451: [PQ: 72057594037927937, Partition: 0, State: StateIdle] t.WriteInfo->BodyKeys.size=0, t.WriteInfo->BlobsFromHead.size=0 2025-05-29T15:22:00.987550Z node 4 :PERSQUEUE DEBUG: partition.cpp:2452: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Head=Offset 50 PartNo 0 PackedSize 0 count 0 nextOffset 50 batches 0, NewHead=Offset 50 PartNo 0 PackedSize 0 count 0 nextOffset 50 batches 0 2025-05-29T15:22:00.989921Z node 4 :PERSQUEUE DEBUG: partition.cpp:3322: [PQ: 72057594037927937, Partition: 0, State: StateIdle] schedule TEvPersQueue::TEvProposeTransactionResult(COMPLETE), reason= 2025-05-29T15:22:00.989989Z node 4 :PERSQUEUE DEBUG: partition.cpp:2185: [PQ: 72057594037927937, Partition: 0, State: StateIdle] === DumpKeyValueRequest === 2025-05-29T15:22:00.989996Z node 4 :PERSQUEUE DEBUG: partition.cpp:2186: [PQ: 72057594037927937, Partition: 0, State: StateIdle] --- delete ---------------- 2025-05-29T15:22:00.990001Z node 4 :PERSQUEUE DEBUG: partition.cpp:2194: [PQ: 72057594037927937, Partition: 0, State: StateIdle] --- write ----------------- 2025-05-29T15:22:00.990004Z node 4 :PERSQUEUE DEBUG: partition.cpp:2197: [PQ: 72057594037927937, Partition: 0, State: StateIdle] m0000000000psrc2 2025-05-29T15:22:00.990006Z node 4 :PERSQUEUE DEBUG: partition.cpp:2197: [PQ: 72057594037927937, Partition: 0, State: StateIdle] m0000000000psrc1 2025-05-29T15:22:00.990009Z node 4 :PERSQUEUE DEBUG: partition.cpp:2197: [PQ: 72057594037927937, Partition: 0, State: StateIdle] i0000000000 2025-05-29T15:22:00.990011Z node 4 :PERSQUEUE DEBUG: partition.cpp:2197 ... 05-29T15:22:01.167424Z node 5 :PERSQUEUE DEBUG: partition_init.cpp:75: [Root/PQ/rt3.dc1--account--topic:0:Initializer] Start initializing step TInitDiskStatusStep Got KV request 2025-05-29T15:22:01.167458Z node 5 :PERSQUEUE DEBUG: partition_init.cpp:75: [Root/PQ/rt3.dc1--account--topic:0:Initializer] Start initializing step TInitMetaStep Got KV request 2025-05-29T15:22:01.167477Z node 5 :PERSQUEUE DEBUG: partition_init.cpp:75: [Root/PQ/rt3.dc1--account--topic:0:Initializer] Start initializing step TInitInfoRangeStep Got KV request 2025-05-29T15:22:01.167499Z node 5 :PERSQUEUE DEBUG: partition_init.cpp:75: [Root/PQ/rt3.dc1--account--topic:0:Initializer] Start initializing step TInitDataRangeStep Got KV request 2025-05-29T15:22:01.167538Z node 5 :PERSQUEUE DEBUG: partition_init.cpp:621: [Root/PQ/rt3.dc1--account--topic:0:TInitDataRangeStep] Got data offset 0 count 1 size 684 so 0 eo 1 d0000000000_00000000000000000000_00000_0000000001_00000 2025-05-29T15:22:01.167547Z node 5 :PERSQUEUE DEBUG: partition_init.cpp:75: [Root/PQ/rt3.dc1--account--topic:0:Initializer] Start initializing step TInitDataStep 2025-05-29T15:22:01.167551Z node 5 :PERSQUEUE DEBUG: partition_init.cpp:75: [Root/PQ/rt3.dc1--account--topic:0:Initializer] Start initializing step TInitEndWriteTimestampStep 2025-05-29T15:22:01.167557Z node 5 :PERSQUEUE INFO: partition_init.cpp:785: [Root/PQ/rt3.dc1--account--topic:0:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp from keys completed. Value 2025-05-29T15:22:01.000000Z 2025-05-29T15:22:01.167562Z node 5 :PERSQUEUE DEBUG: partition_init.cpp:55: [Root/PQ/rt3.dc1--account--topic:0:Initializer] Initializing completed. 2025-05-29T15:22:01.167568Z node 5 :PERSQUEUE INFO: partition.cpp:560: [PQ: 72057594037927937, Partition: 0, State: StateInit] init complete for topic 'Root/PQ/rt3.dc1--account--topic' partition 0 generation 0 [5:177:2191] 2025-05-29T15:22:01.167578Z node 5 :PERSQUEUE DEBUG: partition.cpp:574: [PQ: 72057594037927937, Partition: 0, State: StateInit] SYNC INIT topic Root/PQ/rt3.dc1--account--topic partitition 0 so 0 endOffset 1 Head Offset 1 PartNo 0 PackedSize 0 count 0 nextOffset 1 batches 0 SYNC INIT DATA KEY: d0000000000_00000000000000000000_00000_0000000001_00000 size 684 2025-05-29T15:22:01.167585Z node 5 :PERSQUEUE DEBUG: partition.cpp:3850: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Process pending events. Count 0 2025-05-29T15:22:01.514997Z node 5 :PERSQUEUE INFO: ownerinfo.cpp:30: new Cookie src1|8617a0b8-1b7e8201-1345db84-ea07b42_0 generated for partition 0 topic 'Root/PQ/rt3.dc1--account--topic' owner src1 2025-05-29T15:22:01.515063Z node 5 :PERSQUEUE DEBUG: partition_write.cpp:35: [PQ: 72057594037927937, Partition: 0, State: StateIdle] TPartition::ReplyOwnerOk. Partition: 0 Got batch complete: 1 Create distr tx with id = 0 and act no: 1 Create distr tx with id = 2 and act no: 3 Create distr tx with id = 4 and act no: 5 2025-05-29T15:22:02.554716Z node 5 :PERSQUEUE DEBUG: partition.cpp:1127: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Handle TEvPQ::TEvTxCalcPredicate Step 1, TxId 0 2025-05-29T15:22:02.554776Z node 5 :PERSQUEUE DEBUG: partition.cpp:1127: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Handle TEvPQ::TEvTxCalcPredicate Step 1, TxId 2 2025-05-29T15:22:02.554787Z node 5 :PERSQUEUE DEBUG: partition.cpp:1127: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Handle TEvPQ::TEvTxCalcPredicate Step 1, TxId 4 2025-05-29T15:22:03.865476Z node 5 :PERSQUEUE DEBUG: partition.cpp:1363: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Handle TEvPQ::TEvGetWriteInfoResponse 2025-05-29T15:22:03.865520Z node 5 :PERSQUEUE DEBUG: partition.cpp:1363: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Handle TEvPQ::TEvGetWriteInfoResponse 2025-05-29T15:22:03.865529Z node 5 :PERSQUEUE DEBUG: partition.cpp:1363: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Handle TEvPQ::TEvGetWriteInfoResponse Got batch complete: 1 Wait batch completion 2025-05-29T15:22:03.865575Z node 5 :PERSQUEUE DEBUG: partition.cpp:1173: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Handle TEvPQ::TEvTxCommit Step 1, TxId 0 2025-05-29T15:22:03.865583Z node 5 :PERSQUEUE DEBUG: partition.cpp:2424: [PQ: 72057594037927937, Partition: 0, State: StateIdle] TPartition::CommitWriteOperations TxId: 0 2025-05-29T15:22:03.865594Z node 5 :PERSQUEUE DEBUG: partition.cpp:2451: [PQ: 72057594037927937, Partition: 0, State: StateIdle] t.WriteInfo->BodyKeys.size=0, t.WriteInfo->BlobsFromHead.size=0 2025-05-29T15:22:03.865602Z node 5 :PERSQUEUE DEBUG: partition.cpp:2452: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Head=Offset 1 PartNo 0 PackedSize 0 count 0 nextOffset 1 batches 0, NewHead=Offset 1 PartNo 0 PackedSize 0 count 0 nextOffset 1 batches 0 Got batch complete: 2 Wait batch completion Wait for no tx committed 2025-05-29T15:22:04.095552Z node 5 :PERSQUEUE DEBUG: partition.cpp:1173: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Handle TEvPQ::TEvTxCommit Step 1, TxId 4 2025-05-29T15:22:04.095575Z node 5 :PERSQUEUE DEBUG: partition.cpp:2424: [PQ: 72057594037927937, Partition: 0, State: StateIdle] TPartition::CommitWriteOperations TxId: 4 2025-05-29T15:22:04.095584Z node 5 :PERSQUEUE DEBUG: partition.cpp:2451: [PQ: 72057594037927937, Partition: 0, State: StateIdle] t.WriteInfo->BodyKeys.size=0, t.WriteInfo->BlobsFromHead.size=0 2025-05-29T15:22:04.095594Z node 5 :PERSQUEUE DEBUG: partition.cpp:2452: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Head=Offset 1 PartNo 0 PackedSize 0 count 0 nextOffset 1 batches 0, NewHead=Offset 1 PartNo 0 PackedSize 0 count 0 nextOffset 1 batches 0 2025-05-29T15:22:04.095638Z node 5 :PERSQUEUE DEBUG: partition.cpp:2185: [PQ: 72057594037927937, Partition: 0, State: StateIdle] === DumpKeyValueRequest === 2025-05-29T15:22:04.095643Z node 5 :PERSQUEUE DEBUG: partition.cpp:2186: [PQ: 72057594037927937, Partition: 0, State: StateIdle] --- delete ---------------- 2025-05-29T15:22:04.095648Z node 5 :PERSQUEUE DEBUG: partition.cpp:2194: [PQ: 72057594037927937, Partition: 0, State: StateIdle] --- write ----------------- 2025-05-29T15:22:04.095653Z node 5 :PERSQUEUE DEBUG: partition.cpp:2197: [PQ: 72057594037927937, Partition: 0, State: StateIdle] m0000000000psrc1 2025-05-29T15:22:04.095657Z node 5 :PERSQUEUE DEBUG: partition.cpp:2197: [PQ: 72057594037927937, Partition: 0, State: StateIdle] i0000000000 2025-05-29T15:22:04.095661Z node 5 :PERSQUEUE DEBUG: partition.cpp:2197: [PQ: 72057594037927937, Partition: 0, State: StateIdle] i0000000000 2025-05-29T15:22:04.095665Z node 5 :PERSQUEUE DEBUG: partition.cpp:2197: [PQ: 72057594037927937, Partition: 0, State: StateIdle] I0000000000 2025-05-29T15:22:04.095670Z node 5 :PERSQUEUE DEBUG: partition.cpp:2199: [PQ: 72057594037927937, Partition: 0, State: StateIdle] --- rename ---------------- 2025-05-29T15:22:04.095675Z node 5 :PERSQUEUE DEBUG: partition.cpp:2204: [PQ: 72057594037927937, Partition: 0, State: StateIdle] =========================== Got KV request Wait kv request Wait tx committed for tx 0 2025-05-29T15:22:04.322396Z node 5 :PERSQUEUE DEBUG: partition_write.cpp:524: [PQ: 72057594037927937, Partition: 0, State: StateIdle] TPartition::HandleWriteResponse writeNewSize# 0 WriteNewSizeFromSupportivePartitions# 2 2025-05-29T15:22:04.322450Z node 5 :PERSQUEUE DEBUG: partition_write.cpp:1126: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Already written message. Topic: 'Root/PQ/rt3.dc1--account--topic' Partition: 0 SourceId: 'src1'. Message seqNo: 1. Committed seqNo: 6. Writing seqNo: (NULL). EndOffset: 1. CurOffset: 1. Offset: 60 2025-05-29T15:22:04.322479Z node 5 :PERSQUEUE DEBUG: partition_write.cpp:1233: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'Root/PQ/rt3.dc1--account--topic' partition 0 part blob processing sourceId 'src1' seqNo 7 partNo 0 2025-05-29T15:22:04.322528Z node 5 :PERSQUEUE DEBUG: partition_write.cpp:1333: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'Root/PQ/rt3.dc1--account--topic' partition 0 part blob complete sourceId 'src1' seqNo 7 partNo 0 FormedBlobsCount 0 NewHead: Offset 70 PartNo 0 PackedSize 84 count 1 nextOffset 71 batches 1 2025-05-29T15:22:04.322537Z node 5 :PERSQUEUE DEBUG: partition_write.cpp:1126: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Already written message. Topic: 'Root/PQ/rt3.dc1--account--topic' Partition: 0 SourceId: 'src1'. Message seqNo: 7. Committed seqNo: 6. Writing seqNo: 7. EndOffset: 1. CurOffset: 71. Offset: 80 2025-05-29T15:22:04.322595Z node 5 :PERSQUEUE DEBUG: partition_write.cpp:1623: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Add new write blob: topic 'Root/PQ/rt3.dc1--account--topic' partition 0 compactOffset 70,1 HeadOffset 1 endOffset 1 curOffset 71 d0000000000_00000000000000000070_00000_0000000001_00000| size 70 WTime 12140 2025-05-29T15:22:04.322616Z node 5 :PERSQUEUE DEBUG: partition.cpp:2185: [PQ: 72057594037927937, Partition: 0, State: StateIdle] === DumpKeyValueRequest === 2025-05-29T15:22:04.322622Z node 5 :PERSQUEUE DEBUG: partition.cpp:2186: [PQ: 72057594037927937, Partition: 0, State: StateIdle] --- delete ---------------- 2025-05-29T15:22:04.322627Z node 5 :PERSQUEUE DEBUG: partition.cpp:2192: [PQ: 72057594037927937, Partition: 0, State: StateIdle] [x0000000000, x0000000001) 2025-05-29T15:22:04.322633Z node 5 :PERSQUEUE DEBUG: partition.cpp:2194: [PQ: 72057594037927937, Partition: 0, State: StateIdle] --- write ----------------- 2025-05-29T15:22:04.322638Z node 5 :PERSQUEUE DEBUG: partition.cpp:2197: [PQ: 72057594037927937, Partition: 0, State: StateIdle] m0000000000psrc1 2025-05-29T15:22:04.322642Z node 5 :PERSQUEUE DEBUG: partition.cpp:2197: [PQ: 72057594037927937, Partition: 0, State: StateIdle] d0000000000_00000000000000000070_00000_0000000001_00000| 2025-05-29T15:22:04.322646Z node 5 :PERSQUEUE DEBUG: partition.cpp:2197: [PQ: 72057594037927937, Partition: 0, State: StateIdle] i0000000000 2025-05-29T15:22:04.322651Z node 5 :PERSQUEUE DEBUG: partition.cpp:2199: [PQ: 72057594037927937, Partition: 0, State: StateIdle] --- rename ---------------- 2025-05-29T15:22:04.322656Z node 5 :PERSQUEUE DEBUG: partition.cpp:2204: [PQ: 72057594037927937, Partition: 0, State: StateIdle] =========================== Got KV request Got batch complete: 3 Got KV request Got KV request Wait tx committed for tx 4 Wait batch completion Wait kv request 2025-05-29T15:22:04.343063Z node 5 :PERSQUEUE DEBUG: partition_write.cpp:524: [PQ: 72057594037927937, Partition: 0, State: StateIdle] TPartition::HandleWriteResponse writeNewSize# 17 WriteNewSizeFromSupportivePartitions# 0 2025-05-29T15:22:04.343109Z node 5 :PERSQUEUE DEBUG: partition_write.cpp:58: [PQ: 72057594037927937, Partition: 0, State: StateIdle] TPartition::ReplyWrite. Partition: 0 2025-05-29T15:22:04.343134Z node 5 :PERSQUEUE DEBUG: partition_write.cpp:324: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Answering for message sourceid: 'src1', Topic: 'Root/PQ/rt3.dc1--account--topic', Partition: 0, SeqNo: 1, partNo: 0, Offset: 1 is already written 2025-05-29T15:22:04.343149Z node 5 :PERSQUEUE DEBUG: partition_write.cpp:58: [PQ: 72057594037927937, Partition: 0, State: StateIdle] TPartition::ReplyWrite. Partition: 0 2025-05-29T15:22:04.343178Z node 5 :PERSQUEUE DEBUG: partition_write.cpp:324: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Answering for message sourceid: 'src1', Topic: 'Root/PQ/rt3.dc1--account--topic', Partition: 0, SeqNo: 7, partNo: 0, Offset: 70 is stored on disk 2025-05-29T15:22:04.343185Z node 5 :PERSQUEUE DEBUG: partition_write.cpp:58: [PQ: 72057594037927937, Partition: 0, State: StateIdle] TPartition::ReplyWrite. Partition: 0 2025-05-29T15:22:04.343196Z node 5 :PERSQUEUE DEBUG: partition_write.cpp:324: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Answering for message sourceid: 'src1', Topic: 'Root/PQ/rt3.dc1--account--topic', Partition: 0, SeqNo: 7, partNo: 0, Offset: 71 is already written >> KqpQueryPerf::KvRead+QueryService >> KqpQueryPerf::IndexLookupJoin+EnableStreamLookup-QueryService >> TSubscriberCombinationsTest::CombinationsRootDomain [GOOD] >> TSubscriberCombinationsTest::CombinationsMigratedPath >> TComputeScheduler::TTotalLimits [GOOD] >> KqpQueryPerf::Update-QueryService+UseSink >> KqpQueryPerf::RangeLimitRead+QueryService >> TKqpScanData::UnboxedValueSize [GOOD] >> KqpOlapAggregations::AggregationAndFilterPushdownOnDiffCols [GOOD] >> TPQTest::TestReserveBytes [GOOD] >> TPQTest::TestSetClientOffset >> test_sql_streaming.py::test[suites-GroupByHoppingWindowPercentile-default.txt] [GOOD] >> TPartitionTests::ShadowPartitionCountersFirstClass [GOOD] >> KqpOlapSysView::StatsSysViewBytesDictActualization [GOOD] |59.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/runtime/ut/unittest >> TKqpScanData::UnboxedValueSize [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/runtime/ut/unittest >> TComputeScheduler::TTotalLimits [GOOD] Test command err: 1610 1600 1610 1600 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/public/sdk/cpp/src/client/persqueue_public/ut/with_offset_ranges_mode_ut/unittest >> CompressExecutor::TestExecutorMemUsage [FAIL] Test command err: 2025-05-29T15:21:51.250841Z :WriteAndReadSomeMessagesWithAsyncCompression INFO: Random seed for debugging is 1748532111250822 2025-05-29T15:21:51.385342Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509888236590406216:2218];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/000c9b/r3tmp/tmpWROJHi/pdisk_1.dat 2025-05-29T15:21:51.427007Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-05-29T15:21:51.427122Z node 1 :PQ_READ_PROXY DEBUG: caching_service.cpp:44: Direct read cache: : Created 2025-05-29T15:21:51.439219Z node 2 :PQ_READ_PROXY DEBUG: caching_service.cpp:44: Direct read cache: : Created 2025-05-29T15:21:51.451922Z node 2 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-05-29T15:21:51.466632Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 12129, node 1 2025-05-29T15:21:51.484686Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:21:51.484720Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:21:51.487026Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-29T15:21:51.492756Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/ciyv/000c9b/r3tmp/yandexackRml.tmp 2025-05-29T15:21:51.492768Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: /home/runner/.ya/build/build_root/ciyv/000c9b/r3tmp/yandexackRml.tmp 2025-05-29T15:21:51.492824Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:202: successfully initialized from file: /home/runner/.ya/build/build_root/ciyv/000c9b/r3tmp/yandexackRml.tmp 2025-05-29T15:21:51.492870Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-05-29T15:21:51.498471Z INFO: TTestServer started on Port 24044 GrpcPort 12129 TClient is connected to server localhost:24044 PQClient connected to localhost:12129 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: 2025-05-29T15:21:51.525577Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:21:51.525599Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:21:51.526433Z node 1 :HIVE WARN: hive_impl.cpp:771: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-05-29T15:21:51.526725Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-29T15:21:51.528824Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976720657:0, at schemeshard: 72057594046644480 waiting... waiting... waiting... 2025-05-29T15:21:51.550980Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976720659, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:21:51.816862Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7509888236749566906:2309], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:21:51.816888Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7509888236749566878:2306], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:21:51.816913Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:21:51.818388Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710657:3, at schemeshard: 72057594046644480 2025-05-29T15:21:51.829482Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7509888236749566912:2310], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710657 completed, doublechecking } 2025-05-29T15:21:51.902058Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [1:7509888236590407038:2340], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-05-29T15:21:51.902947Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=1&id=ZmNiNWNhZTYtODNjNjM4OGItOGJhYzkwOTItOGM4YmNmMTA=, ActorId: [1:7509888236590407005:2333], ActorState: ExecuteState, TraceId: 01jwea5af0ckjpnfag3rc1pm6t, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-05-29T15:21:51.905305Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: cluster_tracker.cpp:167: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-05-29T15:21:51.905530Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720661:0, at schemeshard: 72057594046644480 2025-05-29T15:21:51.908540Z node 2 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [2:7509888236749566940:2126] txid# 281474976710658, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 8], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-29T15:21:51.915727Z node 2 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [2:7509888236749566947:2314], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-05-29T15:21:51.916370Z node 2 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=2&id=YWE3ZjJlYWEtMzg0YTM4MjItMjA2ODdmMDYtMzg5OTRjNQ==, ActorId: [2:7509888236749566873:2305], ActorState: ExecuteState, TraceId: 01jwea5ae6dmzpte6vx87hr8yv, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-05-29T15:21:51.916498Z node 2 :PERSQUEUE_CLUSTER_TRACKER ERROR: cluster_tracker.cpp:167: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-05-29T15:21:52.016571Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720662:0, at schemeshard: 72057594046644480 2025-05-29T15:21:52.099102Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720663:0, at schemeshard: 72057594046644480 === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost:12129", true, true, 1000); 2025-05-29T15:21:52.182591Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [1:7509888240885374702:2373], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:21:52.183899Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=1&id=OTMzZDY3ZjItYjU3MTk1ZDEtMWI2ODQxYzktZWZlNTA2ODg=, ActorId: [1:7509888240885374699:2371], ActorState: ExecuteState, TraceId: 01jwea5as6157gvff944wp8n1p, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: assertion failed at ydb/core/testlib/test_pq_client.h:537, TMaybe NKikimr::NPersQueueTests::TFlatMsgBusPQClient::RunYqlDataQueryWithParams(TString, const NYdb::TParams &): (result.IsSuccess())
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 TBackTrace::Capture()+28 (0x13A5CA0C) NUnitTest::NPrivate::RaiseError(char const*, TBasicString> const&, bool)+137 (0x13C148C9) NKikimr::NPersQueueTests::TFlatMsgBusPQClient::RunYqlDataQueryWithParams(TBasicString>, NY ... net_classifier.cpp:202: successfully initialized from file: /home/runner/.ya/build/build_root/ciyv/000c9b/r3tmp/yandex8iok6n.tmp 2025-05-29T15:22:00.619556Z node 13 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-05-29T15:22:00.625840Z INFO: TTestServer started on Port 6711 GrpcPort 4793 TClient is connected to server localhost:6711 PQClient connected to localhost:4793 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-29T15:22:00.688200Z node 13 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:22:00.688232Z node 13 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:22:00.689813Z node 13 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-29T15:22:00.693019Z node 13 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(14, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:22:00.693044Z node 13 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(14, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:22:00.695095Z node 13 :HIVE WARN: hive_impl.cpp:771: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 14 Cookie 14 2025-05-29T15:22:00.695398Z node 13 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-05-29T15:22:00.695602Z node 13 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(14, (0,0,0,0)) VolatileState: Connecting -> Connected waiting... waiting... waiting... waiting... 2025-05-29T15:22:01.020922Z node 14 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [14:7509888277897646098:2306], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:22:01.020971Z node 14 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:22:01.021113Z node 14 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [14:7509888277897646148:2309], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:22:01.022774Z node 13 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976720657:3, at schemeshard: 72057594046644480 2025-05-29T15:22:01.037277Z node 13 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [13:7509888277692293152:2340], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-05-29T15:22:01.037840Z node 13 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=13&id=ZDdhYTgzMC1kNTNkNjk4OC0xZWU0NjEwYy0xOWE5YjI0ZQ==, ActorId: [13:7509888277692293096:2331], ActorState: ExecuteState, TraceId: 01jwea5ke80gkq2y5v3c6es5w2, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-05-29T15:22:01.037966Z node 13 :PERSQUEUE_CLUSTER_TRACKER ERROR: cluster_tracker.cpp:167: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-05-29T15:22:01.038406Z node 14 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [14:7509888277897646150:2310], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976720657 completed, doublechecking } 2025-05-29T15:22:01.039214Z node 13 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 2025-05-29T15:22:01.104421Z node 13 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-05-29T15:22:01.143603Z node 14 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [14:7509888277897646241:2170] txid# 281474976720658, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 8], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-29T15:22:01.149361Z node 14 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [14:7509888277897646248:2318], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:5:17: Error: At function: KiReadTable!
:5:17: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Versions]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-05-29T15:22:01.149592Z node 14 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=14&id=ZjI0YTFjOTQtNzBmN2EyZmQtMjViOTIwMGItZGFiOWE2Y2U=, ActorId: [14:7509888277897646096:2305], ActorState: ExecuteState, TraceId: 01jwea5kdv4yn09v3vr8ttbcd1, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-05-29T15:22:01.149778Z node 14 :PERSQUEUE_CLUSTER_TRACKER ERROR: cluster_tracker.cpp:167: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 5 column: 17 } message: "At function: KiReadTable!" end_position { row: 5 column: 17 } severity: 1 issues { position { row: 5 column: 17 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Versions]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 5 column: 17 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-05-29T15:22:01.168656Z node 13 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost:4793", true, true, 1000); 2025-05-29T15:22:01.277773Z node 13 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [13:7509888277692293547:2373], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:22:01.278464Z node 13 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=13&id=NjllMzBiNS1iZDMwZjQ1MC1mODRkNjZhMS01NGQ0ZmNmYQ==, ActorId: [13:7509888277692293544:2371], ActorState: ExecuteState, TraceId: 01jwea5kng0qxdzayg9e8f5stv, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: assertion failed at ydb/core/testlib/test_pq_client.h:537, TMaybe NKikimr::NPersQueueTests::TFlatMsgBusPQClient::RunYqlDataQueryWithParams(TString, const NYdb::TParams &): (result.IsSuccess())
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 TBackTrace::Capture()+28 (0x13A5CA0C) NUnitTest::NPrivate::RaiseError(char const*, TBasicString> const&, bool)+137 (0x13C148C9) NKikimr::NPersQueueTests::TFlatMsgBusPQClient::RunYqlDataQueryWithParams(TBasicString>, NYdb::Dev::TParams const&)+932 (0x138B3A44) NKikimr::NPersQueueTests::TFlatMsgBusPQClient::RunYqlDataQuery(TBasicString>)+88 (0x138B29A8) NKikimr::NPersQueueTests::TFlatMsgBusPQClient::InitDCs(THashMap>, NKikimr::NPersQueueTests::TPQTestClusterInfo, THash>>, TEqualTo>>, std::__y1::allocator>>>, TBasicString> const&)+1170 (0x138B1BF2) NPersQueue::SDKTestSetup::Start(bool, bool)+1450 (0x138A75DA) NPersQueue::SDKTestSetup::SDKTestSetup(TBasicString> const&, bool, TVector> const&, NActors::NLog::EPriority, unsigned int, unsigned long)+675 (0x138A50E3) void std::__y1::allocator::construct[abi:fe200000](NYdb::NPersQueue::NTests::TPersQueueYdbSdkTestSetup*, char const*&)+72 (0x139432F8) NYdb::NPersQueue::NTests::NTestSuiteCompressExecutor::TTestCaseTestExecutorMemUsage::Execute_(NUnitTest::TTestContext&)+195 (0x139459A3) NYdb::NPersQueue::NTests::NTestSuiteCompressExecutor::TCurrentTest::Execute()::'lambda'()::operator()() const+71 (0x1394ACA7) NUnitTest::TTestBase::Run(std::__y1::function, TBasicString> const&, char const*, bool)+126 (0x13C1677E) NYdb::NPersQueue::NTests::NTestSuiteCompressExecutor::TCurrentTest::Execute()+419 (0x1394A663) NUnitTest::TTestFactory::Execute()+803 (0x13C16EF3) NUnitTest::RunMain(int, char**)+3021 (0x13C2883D) ??+0 (0x7F88D8917D90) __libc_start_main+128 (0x7F88D8917E40) _start+41 (0x129B5029) |59.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/runtime/ut/unittest ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/olap/unittest >> KqpOlapAggregations::AggregationAndFilterPushdownOnDiffCols [GOOD] Test command err: Trying to start YDB, gRPC: 8296, MsgBus: 2342 test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/0026b5/r3tmp/tmp2vbxlc/pdisk_1.dat 2025-05-29T15:21:34.380128Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-05-29T15:21:34.459378Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [1:7509888160893659998:2079] 1748532094234019 != 1748532094234022 2025-05-29T15:21:34.475143Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:21:34.475362Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:21:34.475373Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:21:34.483884Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 8296, node 1 2025-05-29T15:21:34.503090Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:21:34.503099Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-05-29T15:21:34.503101Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-05-29T15:21:34.503146Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:2342 TClient is connected to server localhost:2342 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-29T15:21:34.698447Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:21:34.706997Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-05-29T15:21:34.712315Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnStore, opId: 281474976715658:0, at schemeshard: 72057594046644480 Status: 53 TxId: 281474976715658 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 2 2025-05-29T15:21:34.734297Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037889;self_id=[1:7509888160893660701:2314];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-05-29T15:21:34.734455Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037889;self_id=[1:7509888160893660701:2314];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-05-29T15:21:34.734501Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037889;self_id=[1:7509888160893660701:2314];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-05-29T15:21:34.734524Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037889;self_id=[1:7509888160893660701:2314];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-05-29T15:21:34.734544Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037889;self_id=[1:7509888160893660701:2314];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-05-29T15:21:34.734566Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037889;self_id=[1:7509888160893660701:2314];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-05-29T15:21:34.734584Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037889;self_id=[1:7509888160893660701:2314];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-05-29T15:21:34.734604Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037889;self_id=[1:7509888160893660701:2314];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-05-29T15:21:34.734627Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037889;self_id=[1:7509888160893660701:2314];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-05-29T15:21:34.734645Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037889;self_id=[1:7509888160893660701:2314];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-05-29T15:21:34.734663Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037889;self_id=[1:7509888160893660701:2314];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2025-05-29T15:21:34.734693Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037889;self_id=[1:7509888160893660701:2314];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2025-05-29T15:21:34.739121Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037891;self_id=[1:7509888160893660702:2315];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-05-29T15:21:34.739137Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037891;self_id=[1:7509888160893660702:2315];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-05-29T15:21:34.739174Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037891;self_id=[1:7509888160893660702:2315];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-05-29T15:21:34.739191Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037891;self_id=[1:7509888160893660702:2315];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-05-29T15:21:34.739209Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037891;self_id=[1:7509888160893660702:2315];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-05-29T15:21:34.739228Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037891;self_id=[1:7509888160893660702:2315];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-05-29T15:21:34.739247Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037891;self_id=[1:7509888160893660702:2315];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-05-29T15:21:34.739266Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037891;self_id=[1:7509888160893660702:2315];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-05-29T15:21:34.739293Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037891;self_id=[1:7509888160893660702:2315];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-05-29T15:21:34.739309Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037891;self_id=[1:7509888160893660702:2315];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-05-29T15:21:34.739325Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037891;self_id=[1:7509888160893660702:2315];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2025-05-29T15:21:34.739343Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037891;self_id=[1:7509888160893660702:2315];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2025-05-29T15:21:34.743408Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037890;self_id=[1:7509888160893660703:2316];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-05-29T15:21:34.743424Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037890;self_id=[1:7509888160893660703:2316];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-05-29T15:21:34.743471Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037890;self_id=[1:7509888160893660703:2316];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-05-29T15:21:34.743492Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037890;self_id=[1:7509888160893660703:2316];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-05-29T15:21:34.743517Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037890;self_id=[1:7509888160893660703:2316];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-05-29T15:21:34.743538Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037890;self_id=[1:7509888160893660703:2316];ta ... ks { TaskId: 1 CpuTimeUs: 161 FinishTimeMs: 1748532118770 OutputRows: 1 OutputBytes: 4 ResultRows: 1 ResultBytes: 4 ComputeCpuTimeUs: 8 BuildCpuTimeUs: 153 HostName: "ghrun-lxxdcki4qu" NodeId: 3 CreateTimeMs: 1748532118769 CurrentWaitOutputTimeUs: 4501965 UpdateTimeMs: 1748532123271 } MaxMemoryUsage: 1048576 2025-05-29T15:22:03.272074Z node 3 :KQP_EXECUTER DEBUG: kqp_executer_impl.h:434: ActorId: [3:1919:2934] TxId: 281474976715663. Ctx: { TraceId: 01jwea55p2d7mdpmgz6erh7brg, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=MTFhOTc1NGMtMTcxYmZkYjAtNjU4ZDE2MjUtZDYyZGFkMzk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ActorState: ExecuteState, got execution state from compute actor: [3:1922:3042], task: 1, state: COMPUTE_STATE_EXECUTING, stats: { CpuTimeUs: 1101 Tasks { TaskId: 1 CpuTimeUs: 161 FinishTimeMs: 1748532118770 OutputRows: 1 OutputBytes: 4 ResultRows: 1 ResultBytes: 4 ComputeCpuTimeUs: 8 BuildCpuTimeUs: 153 HostName: "ghrun-lxxdcki4qu" NodeId: 3 CreateTimeMs: 1748532118769 CurrentWaitOutputTimeUs: 4501965 UpdateTimeMs: 1748532123271 } MaxMemoryUsage: 1048576 } 2025-05-29T15:22:03.272094Z node 3 :KQP_EXECUTER DEBUG: kqp_executer_impl.h:645: ActorId: [3:1919:2934] TxId: 281474976715663. Ctx: { TraceId: 01jwea55p2d7mdpmgz6erh7brg, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=MTFhOTc1NGMtMTcxYmZkYjAtNjU4ZDE2MjUtZDYyZGFkMzk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Waiting for: CA [3:1922:3042], 2025-05-29T15:22:03.576045Z node 4 :TX_COLUMNSHARD DEBUG: log.cpp:784: tablet_id=72075186224037888;self_id=[4:1279:2390];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:254;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=72075186224037888; 2025-05-29T15:22:03.576095Z node 4 :TX_COLUMNSHARD DEBUG: log.cpp:784: tablet_id=72075186224037888;parent=[4:1279:2390];fline=actor.cpp:33;event=skip_flush_writing; 2025-05-29T15:22:03.900441Z node 4 :TX_COLUMNSHARD DEBUG: log.cpp:784: tablet_id=72075186224037888;parent=[4:1279:2390];fline=actor.cpp:33;event=skip_flush_writing; 2025-05-29T15:22:03.967655Z node 3 :KQP_COMPUTE DEBUG: kqp_pure_compute_actor.cpp:148: SelfId: [3:1922:3042], TxId: 281474976715663, task: 1. Ctx: { SessionId : ydb://session/3?node_id=3&id=MTFhOTc1NGMtMTcxYmZkYjAtNjU4ZDE2MjUtZDYyZGFkMzk=. CustomerSuppliedId : . TraceId : 01jwea55p2d7mdpmgz6erh7brg. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. CA StateFunc 65538 2025-05-29T15:22:03.967688Z node 3 :KQP_COMPUTE DEBUG: kqp_compute_scheduler.h:167: SelfId: [3:1922:3042], TxId: 281474976715663, task: 1. Ctx: { SessionId : ydb://session/3?node_id=3&id=MTFhOTc1NGMtMTcxYmZkYjAtNjU4ZDE2MjUtZDYyZGFkMzk=. CustomerSuppliedId : . TraceId : 01jwea55p2d7mdpmgz6erh7brg. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. wakeup with tag 2 2025-05-29T15:22:03.967759Z node 3 :KQP_COMPUTE DEBUG: dq_compute_actor_impl.h:2021: SelfId: [3:1922:3042], TxId: 281474976715663, task: 1. Ctx: { SessionId : ydb://session/3?node_id=3&id=MTFhOTc1NGMtMTcxYmZkYjAtNjU4ZDE2MjUtZDYyZGFkMzk=. CustomerSuppliedId : . TraceId : 01jwea55p2d7mdpmgz6erh7brg. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. Send stats to executor actor [3:1919:2934] TaskId: 1 Stats: CpuTimeUs: 1220 Tasks { TaskId: 1 CpuTimeUs: 161 FinishTimeMs: 1748532118770 OutputRows: 1 OutputBytes: 4 ResultRows: 1 ResultBytes: 4 ComputeCpuTimeUs: 8 BuildCpuTimeUs: 153 HostName: "ghrun-lxxdcki4qu" NodeId: 3 CreateTimeMs: 1748532118769 CurrentWaitOutputTimeUs: 5197700 UpdateTimeMs: 1748532123967 } MaxMemoryUsage: 1048576 2025-05-29T15:22:03.967812Z node 3 :KQP_EXECUTER DEBUG: kqp_executer_impl.h:434: ActorId: [3:1919:2934] TxId: 281474976715663. Ctx: { TraceId: 01jwea55p2d7mdpmgz6erh7brg, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=MTFhOTc1NGMtMTcxYmZkYjAtNjU4ZDE2MjUtZDYyZGFkMzk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ActorState: ExecuteState, got execution state from compute actor: [3:1922:3042], task: 1, state: COMPUTE_STATE_EXECUTING, stats: { CpuTimeUs: 1220 Tasks { TaskId: 1 CpuTimeUs: 161 FinishTimeMs: 1748532118770 OutputRows: 1 OutputBytes: 4 ResultRows: 1 ResultBytes: 4 ComputeCpuTimeUs: 8 BuildCpuTimeUs: 153 HostName: "ghrun-lxxdcki4qu" NodeId: 3 CreateTimeMs: 1748532118769 CurrentWaitOutputTimeUs: 5197700 UpdateTimeMs: 1748532123967 } MaxMemoryUsage: 1048576 } 2025-05-29T15:22:03.967834Z node 3 :KQP_EXECUTER DEBUG: kqp_executer_impl.h:645: ActorId: [3:1919:2934] TxId: 281474976715663. Ctx: { TraceId: 01jwea55p2d7mdpmgz6erh7brg, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=MTFhOTc1NGMtMTcxYmZkYjAtNjU4ZDE2MjUtZDYyZGFkMzk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Waiting for: CA [3:1922:3042], 2025-05-29T15:22:04.288343Z node 4 :TX_COLUMNSHARD DEBUG: log.cpp:784: tablet_id=72075186224037888;self_id=[4:1279:2390];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:254;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=72075186224037888; 2025-05-29T15:22:04.288387Z node 4 :TX_COLUMNSHARD DEBUG: log.cpp:784: tablet_id=72075186224037888;parent=[4:1279:2390];fline=actor.cpp:33;event=skip_flush_writing; 2025-05-29T15:22:04.470980Z node 4 :TX_COLUMNSHARD DEBUG: log.cpp:784: tablet_id=72075186224037888;parent=[4:1279:2390];fline=actor.cpp:33;event=skip_flush_writing; 2025-05-29T15:22:04.633528Z node 3 :KQP_COMPUTE DEBUG: kqp_pure_compute_actor.cpp:148: SelfId: [3:1922:3042], TxId: 281474976715663, task: 1. Ctx: { SessionId : ydb://session/3?node_id=3&id=MTFhOTc1NGMtMTcxYmZkYjAtNjU4ZDE2MjUtZDYyZGFkMzk=. CustomerSuppliedId : . TraceId : 01jwea55p2d7mdpmgz6erh7brg. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. CA StateFunc 65538 2025-05-29T15:22:04.633580Z node 3 :KQP_COMPUTE DEBUG: kqp_compute_scheduler.h:167: SelfId: [3:1922:3042], TxId: 281474976715663, task: 1. Ctx: { SessionId : ydb://session/3?node_id=3&id=MTFhOTc1NGMtMTcxYmZkYjAtNjU4ZDE2MjUtZDYyZGFkMzk=. CustomerSuppliedId : . TraceId : 01jwea55p2d7mdpmgz6erh7brg. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. wakeup with tag 2 2025-05-29T15:22:04.633669Z node 3 :KQP_COMPUTE DEBUG: dq_compute_actor_impl.h:2021: SelfId: [3:1922:3042], TxId: 281474976715663, task: 1. Ctx: { SessionId : ydb://session/3?node_id=3&id=MTFhOTc1NGMtMTcxYmZkYjAtNjU4ZDE2MjUtZDYyZGFkMzk=. CustomerSuppliedId : . TraceId : 01jwea55p2d7mdpmgz6erh7brg. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. Send stats to executor actor [3:1919:2934] TaskId: 1 Stats: CpuTimeUs: 1363 Tasks { TaskId: 1 CpuTimeUs: 161 FinishTimeMs: 1748532118770 OutputRows: 1 OutputBytes: 4 ResultRows: 1 ResultBytes: 4 ComputeCpuTimeUs: 8 BuildCpuTimeUs: 153 HostName: "ghrun-lxxdcki4qu" NodeId: 3 CreateTimeMs: 1748532118769 CurrentWaitOutputTimeUs: 5863595 UpdateTimeMs: 1748532124633 } MaxMemoryUsage: 1048576 2025-05-29T15:22:04.633750Z node 3 :KQP_EXECUTER DEBUG: kqp_executer_impl.h:434: ActorId: [3:1919:2934] TxId: 281474976715663. Ctx: { TraceId: 01jwea55p2d7mdpmgz6erh7brg, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=MTFhOTc1NGMtMTcxYmZkYjAtNjU4ZDE2MjUtZDYyZGFkMzk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ActorState: ExecuteState, got execution state from compute actor: [3:1922:3042], task: 1, state: COMPUTE_STATE_EXECUTING, stats: { CpuTimeUs: 1363 Tasks { TaskId: 1 CpuTimeUs: 161 FinishTimeMs: 1748532118770 OutputRows: 1 OutputBytes: 4 ResultRows: 1 ResultBytes: 4 ComputeCpuTimeUs: 8 BuildCpuTimeUs: 153 HostName: "ghrun-lxxdcki4qu" NodeId: 3 CreateTimeMs: 1748532118769 CurrentWaitOutputTimeUs: 5863595 UpdateTimeMs: 1748532124633 } MaxMemoryUsage: 1048576 } 2025-05-29T15:22:04.633777Z node 3 :KQP_EXECUTER DEBUG: kqp_executer_impl.h:645: ActorId: [3:1919:2934] TxId: 281474976715663. Ctx: { TraceId: 01jwea55p2d7mdpmgz6erh7brg, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=MTFhOTc1NGMtMTcxYmZkYjAtNjU4ZDE2MjUtZDYyZGFkMzk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Waiting for: CA [3:1922:3042], 2025-05-29T15:22:04.925692Z node 4 :TX_COLUMNSHARD DEBUG: log.cpp:784: tablet_id=72075186224037888;self_id=[4:1279:2390];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:254;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=72075186224037888; 2025-05-29T15:22:04.925736Z node 4 :TX_COLUMNSHARD DEBUG: log.cpp:784: tablet_id=72075186224037888;parent=[4:1279:2390];fline=actor.cpp:33;event=skip_flush_writing; 2025-05-29T15:22:05.114756Z node 4 :TX_COLUMNSHARD DEBUG: log.cpp:784: tablet_id=72075186224037888;parent=[4:1279:2390];fline=actor.cpp:33;event=skip_flush_writing; 2025-05-29T15:22:05.209292Z node 3 :KQP_COMPUTE DEBUG: kqp_pure_compute_actor.cpp:148: SelfId: [3:1922:3042], TxId: 281474976715663, task: 1. Ctx: { SessionId : ydb://session/3?node_id=3&id=MTFhOTc1NGMtMTcxYmZkYjAtNjU4ZDE2MjUtZDYyZGFkMzk=. CustomerSuppliedId : . TraceId : 01jwea55p2d7mdpmgz6erh7brg. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. CA StateFunc 65538 2025-05-29T15:22:05.209317Z node 3 :KQP_COMPUTE DEBUG: kqp_compute_scheduler.h:167: SelfId: [3:1922:3042], TxId: 281474976715663, task: 1. Ctx: { SessionId : ydb://session/3?node_id=3&id=MTFhOTc1NGMtMTcxYmZkYjAtNjU4ZDE2MjUtZDYyZGFkMzk=. CustomerSuppliedId : . TraceId : 01jwea55p2d7mdpmgz6erh7brg. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. wakeup with tag 2 2025-05-29T15:22:05.209366Z node 3 :KQP_COMPUTE DEBUG: dq_compute_actor_impl.h:2021: SelfId: [3:1922:3042], TxId: 281474976715663, task: 1. Ctx: { SessionId : ydb://session/3?node_id=3&id=MTFhOTc1NGMtMTcxYmZkYjAtNjU4ZDE2MjUtZDYyZGFkMzk=. CustomerSuppliedId : . TraceId : 01jwea55p2d7mdpmgz6erh7brg. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. Send stats to executor actor [3:1919:2934] TaskId: 1 Stats: CpuTimeUs: 1560 Tasks { TaskId: 1 CpuTimeUs: 161 FinishTimeMs: 1748532118770 OutputRows: 1 OutputBytes: 4 ResultRows: 1 ResultBytes: 4 ComputeCpuTimeUs: 8 BuildCpuTimeUs: 153 HostName: "ghrun-lxxdcki4qu" NodeId: 3 CreateTimeMs: 1748532118769 CurrentWaitOutputTimeUs: 6439322 UpdateTimeMs: 1748532125209 } MaxMemoryUsage: 1048576 2025-05-29T15:22:05.209409Z node 3 :KQP_EXECUTER DEBUG: kqp_executer_impl.h:434: ActorId: [3:1919:2934] TxId: 281474976715663. Ctx: { TraceId: 01jwea55p2d7mdpmgz6erh7brg, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=MTFhOTc1NGMtMTcxYmZkYjAtNjU4ZDE2MjUtZDYyZGFkMzk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ActorState: ExecuteState, got execution state from compute actor: [3:1922:3042], task: 1, state: COMPUTE_STATE_EXECUTING, stats: { CpuTimeUs: 1560 Tasks { TaskId: 1 CpuTimeUs: 161 FinishTimeMs: 1748532118770 OutputRows: 1 OutputBytes: 4 ResultRows: 1 ResultBytes: 4 ComputeCpuTimeUs: 8 BuildCpuTimeUs: 153 HostName: "ghrun-lxxdcki4qu" NodeId: 3 CreateTimeMs: 1748532118769 CurrentWaitOutputTimeUs: 6439322 UpdateTimeMs: 1748532125209 } MaxMemoryUsage: 1048576 } 2025-05-29T15:22:05.209422Z node 3 :KQP_EXECUTER DEBUG: kqp_executer_impl.h:645: ActorId: [3:1919:2934] TxId: 281474976715663. Ctx: { TraceId: 01jwea55p2d7mdpmgz6erh7brg, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=MTFhOTc1NGMtMTcxYmZkYjAtNjU4ZDE2MjUtZDYyZGFkMzk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Waiting for: CA [3:1922:3042], 2025-05-29T15:22:05.414687Z node 4 :TX_COLUMNSHARD DEBUG: log.cpp:784: tablet_id=72075186224037888;self_id=[4:1279:2390];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:254;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=72075186224037888; 2025-05-29T15:22:05.414729Z node 4 :TX_COLUMNSHARD DEBUG: log.cpp:784: tablet_id=72075186224037888;parent=[4:1279:2390];fline=actor.cpp:33;event=skip_flush_writing; >> KqpQueryPerf::ComputeLength-QueryService ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/persqueue/ut/unittest >> TPartitionTests::ShadowPartitionCountersFirstClass [GOOD] Test command err: 2025-05-29T15:21:57.509706Z node 1 :PERSQUEUE NOTICE: pq_impl.cpp:1099: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-05-29T15:21:57.509738Z node 1 :PERSQUEUE INFO: pq_impl.cpp:800: [PQ: 72057594037927937] doesn't have tx writes info 2025-05-29T15:21:57.513842Z node 1 :PERSQUEUE INFO: partition_init.cpp:880: [PQ: 72057594037927937, Partition: 0, State: StateInit] bootstrapping 0 [1:180:2194] 2025-05-29T15:21:57.514179Z node 1 :PERSQUEUE INFO: partition_init.cpp:785: [Root/PQ/rt3.dc1--account--topic:0:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp from keys completed. Value 2025-05-29T15:21:57.000000Z 2025-05-29T15:21:57.514190Z node 1 :PERSQUEUE INFO: partition.cpp:560: [PQ: 72057594037927937, Partition: 0, State: StateInit] init complete for topic 'Root/PQ/rt3.dc1--account--topic' partition 0 generation 0 [1:180:2194] Got cmd write: CmdWrite { Key: "i0000000000" Value: "\010\000\020\n\030\000(\210\374\212\345\3612" StorageChannel: INLINE } CmdWrite { Key: "m0000000000cclient" Value: "\010\000\020\001\030\001\"\007session(\0000\001@\000" StorageChannel: INLINE } CmdWrite { Key: "m0000000000uclient" Value: "\000\000\000\000\000\000\000\000\001\000\000\000\001\000\000\000session" StorageChannel: INLINE } Got cmd write: CmdWrite { Key: "i0000000000" Value: "\010\000\020\n\030\000(\210\374\212\345\3612" StorageChannel: INLINE } CmdWrite { Key: "m0000000000cclient" Value: "\010\005\020\001\030\001\"\007session(\0000\001@\000" StorageChannel: INLINE } CmdWrite { Key: "m0000000000uclient" Value: "\005\000\000\000\000\000\000\000\001\000\000\000\001\000\000\000session" StorageChannel: INLINE } Got cmd write: CmdWrite { Key: "i0000000000" Value: "\010\000\020\n\030\000(\210\374\212\345\3612" StorageChannel: INLINE } CmdWrite { Key: "m0000000000cclient" Value: "\010\005\020\001\030\001\"\007session(\0000\001@\000" StorageChannel: INLINE } CmdWrite { Key: "m0000000000uclient" Value: "\005\000\000\000\000\000\000\000\001\000\000\000\001\000\000\000session" StorageChannel: INLINE } 2025-05-29T15:21:57.983050Z node 1 :PERSQUEUE WARN: partition.cpp:3171: [PQ: 72057594037927937, Partition: 0, State: StateIdle] commit to future - topic Root/PQ/rt3.dc1--account--topic partition 0 client client EndOffset 10 offset 13 Got cmd write: CmdWrite { Key: "i0000000000" Value: "\010\000\020\n\030\000(\210\374\212\345\3612" StorageChannel: INLINE } CmdWrite { Key: "m0000000000cclient" Value: "\010\n\020\001\030\001\"\007session(\0000\001@\000" StorageChannel: INLINE } CmdWrite { Key: "m0000000000uclient" Value: "\n\000\000\000\000\000\000\000\001\000\000\000\001\000\000\000session" StorageChannel: INLINE } 2025-05-29T15:21:58.127560Z node 2 :PERSQUEUE NOTICE: pq_impl.cpp:1099: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-05-29T15:21:58.127589Z node 2 :PERSQUEUE INFO: pq_impl.cpp:800: [PQ: 72057594037927937] doesn't have tx writes info 2025-05-29T15:21:58.132447Z node 2 :PERSQUEUE INFO: partition_init.cpp:880: [PQ: 72057594037927937, Partition: 3, State: StateInit] bootstrapping 3 [2:180:2194] 2025-05-29T15:21:58.132653Z node 2 :PERSQUEUE INFO: partition_init.cpp:785: [Root/PQ/rt3.dc1--account--topic:3:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp from keys completed. Value 2025-05-29T15:21:58.000000Z 2025-05-29T15:21:58.132659Z node 2 :PERSQUEUE INFO: partition.cpp:560: [PQ: 72057594037927937, Partition: 3, State: StateInit] init complete for topic 'Root/PQ/rt3.dc1--account--topic' partition 3 generation 0 [2:180:2194] 2025-05-29T15:21:58.646107Z node 3 :PERSQUEUE NOTICE: pq_impl.cpp:1099: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-05-29T15:21:58.646139Z node 3 :PERSQUEUE INFO: pq_impl.cpp:800: [PQ: 72057594037927937] doesn't have tx writes info 2025-05-29T15:21:58.650168Z node 3 :PERSQUEUE INFO: partition_init.cpp:880: [PQ: 72057594037927937, Partition: 1, State: StateInit] bootstrapping 1 [3:180:2194] 2025-05-29T15:21:58.650395Z node 3 :PERSQUEUE INFO: partition.cpp:560: [PQ: 72057594037927937, Partition: 1, State: StateInit] init complete for topic 'Root/PQ/rt3.dc1--account--topic' partition 1 generation 0 [3:180:2194] 2025-05-29T15:21:58.650544Z node 3 :PERSQUEUE INFO: partition.cpp:3710: [PQ: 72057594037927937, Partition: 1, State: StateIdle] SubDomainOutOfSpace was changed. Topic: "Root/PQ/rt3.dc1--account--topic". Partition: 1. SubDomainOutOfSpace: 1 2025-05-29T15:21:58.650568Z node 3 :PERSQUEUE INFO: ownerinfo.cpp:30: new Cookie owner1|9a56659c-a9bebef1-a3ab3720-baa15bfc_0 generated for partition 1 topic 'Root/PQ/rt3.dc1--account--topic' owner owner1 Send disk status response with cookie: 0 2025-05-29T15:21:59.005055Z node 3 :PERSQUEUE INFO: partition.cpp:3710: [PQ: 72057594037927937, Partition: 1, State: StateIdle] SubDomainOutOfSpace was changed. Topic: "Root/PQ/rt3.dc1--account--topic". Partition: 1. SubDomainOutOfSpace: 0 2025-05-29T15:21:59.335903Z node 4 :PERSQUEUE NOTICE: pq_impl.cpp:1099: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-05-29T15:21:59.335936Z node 4 :PERSQUEUE INFO: pq_impl.cpp:800: [PQ: 72057594037927937] doesn't have tx writes info 2025-05-29T15:21:59.339740Z node 4 :PERSQUEUE INFO: partition_init.cpp:880: [PQ: 72057594037927937, Partition: {0, {0, 1111}, 123}, State: StateInit] bootstrapping {0, {0, 1111}, 123} [4:180:2194] 2025-05-29T15:21:59.340646Z node 4 :PERSQUEUE INFO: partition_init.cpp:774: [rt3.dc1--account--topic:{0, {0, 1111}, 123}:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp skipped because already initialized. 2025-05-29T15:21:59.340658Z node 4 :PERSQUEUE INFO: partition.cpp:560: [PQ: 72057594037927937, Partition: {0, {0, 1111}, 123}, State: StateInit] init complete for topic 'rt3.dc1--account--topic' partition {0, {0, 1111}, 123} generation 0 [4:180:2194] 2025-05-29T15:21:59.721306Z node 4 :PERSQUEUE INFO: ownerinfo.cpp:30: new Cookie owner1|5cd91a97-e0461f0f-91e02e7d-7a9dc070_0 generated for partition {0, {0, 1111}, 123} topic 'rt3.dc1--account--topic' owner owner1 Send write: 0 Send write: 1 Send write: 2 Send write: 3 Send write: 4 Send write: 5 Send write: 6 Send write: 7 Send write: 8 Send write: 9 Got write info response. Body keys: 0, head: 10, src id info: 1 2025-05-29T15:22:02.793299Z node 5 :PERSQUEUE NOTICE: pq_impl.cpp:1099: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-05-29T15:22:02.793331Z node 5 :PERSQUEUE INFO: pq_impl.cpp:800: [PQ: 72057594037927937] doesn't have tx writes info 2025-05-29T15:22:02.796663Z node 5 :PERSQUEUE INFO: partition_init.cpp:880: [PQ: 72057594037927937, Partition: {0, {0, 1111}, 123}, State: StateInit] bootstrapping {0, {0, 1111}, 123} [5:178:2192] 2025-05-29T15:22:02.796919Z node 5 :PERSQUEUE INFO: partition_init.cpp:774: [Root/PQ/rt3.dc1--account--topic:{0, {0, 1111}, 123}:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp skipped because already initialized. 2025-05-29T15:22:02.796929Z node 5 :PERSQUEUE INFO: partition.cpp:560: [PQ: 72057594037927937, Partition: {0, {0, 1111}, 123}, State: StateInit] init complete for topic 'Root/PQ/rt3.dc1--account--topic' partition {0, {0, 1111}, 123} generation 0 [5:178:2192] 2025-05-29T15:22:03.138789Z node 5 :PERSQUEUE INFO: ownerinfo.cpp:30: new Cookie owner1|22ea78ac-e758f167-b52b3133-a95a5913_0 generated for partition {0, {0, 1111}, 123} topic 'Root/PQ/rt3.dc1--account--topic' owner owner1 Send write: 0 Send write: 1 Send write: 2 Send write: 3 Send write: 4 Send write: 5 Send write: 6 Send write: 7 Send write: 8 Send write: 9 Got write info response. Body keys: 0, head: 10, src id info: 1 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/olap/unittest >> KqpOlapSysView::StatsSysViewBytesDictActualization [GOOD] Test command err: Trying to start YDB, gRPC: 9005, MsgBus: 64279 2025-05-29T15:21:36.675037Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509888170931862132:2178];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:21:36.675130Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/0026ab/r3tmp/tmpBTpAAn/pdisk_1.dat 2025-05-29T15:21:36.765621Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:21:36.766799Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [1:7509888170931861993:2079] 1748532096674161 != 1748532096674164 TServer::EnableGrpc on GrpcPort 9005, node 1 2025-05-29T15:21:36.779440Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:21:36.783993Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:21:36.791056Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-29T15:21:36.798954Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:21:36.798967Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-05-29T15:21:36.798969Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-05-29T15:21:36.799010Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:64279 TClient is connected to server localhost:64279 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-29T15:21:36.882777Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:21:36.886155Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-05-29T15:21:36.896940Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnStore, opId: 281474976715658:0, at schemeshard: 72057594046644480 Status: 53 TxId: 281474976715658 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 2 2025-05-29T15:21:36.919365Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037891;self_id=[1:7509888170931862698:2315];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-05-29T15:21:36.919431Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037891;self_id=[1:7509888170931862698:2315];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-05-29T15:21:36.919487Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037891;self_id=[1:7509888170931862698:2315];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-05-29T15:21:36.919509Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037891;self_id=[1:7509888170931862698:2315];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-05-29T15:21:36.919538Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037891;self_id=[1:7509888170931862698:2315];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-05-29T15:21:36.919561Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037891;self_id=[1:7509888170931862698:2315];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-05-29T15:21:36.919580Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037891;self_id=[1:7509888170931862698:2315];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-05-29T15:21:36.919607Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037891;self_id=[1:7509888170931862698:2315];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-05-29T15:21:36.919629Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037891;self_id=[1:7509888170931862698:2315];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-05-29T15:21:36.919649Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037891;self_id=[1:7509888170931862698:2315];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-05-29T15:21:36.919673Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037891;self_id=[1:7509888170931862698:2315];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2025-05-29T15:21:36.919694Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037891;self_id=[1:7509888170931862698:2315];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2025-05-29T15:21:36.931845Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037890;self_id=[1:7509888170931862699:2316];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-05-29T15:21:36.931885Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037890;self_id=[1:7509888170931862699:2316];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-05-29T15:21:36.931976Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037890;self_id=[1:7509888170931862699:2316];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-05-29T15:21:36.932005Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037890;self_id=[1:7509888170931862699:2316];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-05-29T15:21:36.932040Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037890;self_id=[1:7509888170931862699:2316];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-05-29T15:21:36.932067Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037890;self_id=[1:7509888170931862699:2316];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-05-29T15:21:36.932094Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037890;self_id=[1:7509888170931862699:2316];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-05-29T15:21:36.932122Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037890;self_id=[1:7509888170931862699:2316];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-05-29T15:21:36.932152Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037890;self_id=[1:7509888170931862699:2316];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-05-29T15:21:36.932178Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037890;self_id=[1:7509888170931862699:2316];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-05-29T15:21:36.932206Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037890;self_id=[1:7509888170931862699:2316];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2025-05-29T15:21:36.932237Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037890;self_id=[1:7509888170931862699:2316];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2025-05-29T15:21:36.939258Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037889;self_id=[1:7509888170931862716:2317];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-05-29T15:21:36.939287Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037889;self_id=[1:7509888170931862716:2317];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-05-29T15:21:36.939351Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037889;self_id=[1:7509888170931862716:2317];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-05-29T15:21:36.939372Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037889;self_id=[1:7509888170931862716:2317];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-05-29T15:21:36.939393Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037889;self_id=[1:7509888170931862716:2317];tablet_id=72075186224037889;process=TTxInitSchema::Execute;flin ... 7889:1:3:5:300:5408:0] EntityType: COL BlobRangeSize: 5408 PathId: 3 Rows: 777 RawBytes: 43512 BlobRangeOffset: 0 TierName: __DEFAULT Activity:  TabletId: 72075186224037889 Kind: INSERTED EntityName: field PortionId: 3 InternalEntityId: 1 ChunkIdx: 301 BlobId: [72075186224037889:1:3:5:301:5400:0] EntityType: COL BlobRangeSize: 5400 PathId: 3 Rows: 777 RawBytes: 43512 BlobRangeOffset: 0 TierName: __DEFAULT Activity:  TabletId: 72075186224037889 Kind: INSERTED EntityName: field PortionId: 3 InternalEntityId: 1 ChunkIdx: 302 BlobId: [72075186224037889:1:3:5:302:5496:0] EntityType: COL BlobRangeSize: 5496 PathId: 3 Rows: 777 RawBytes: 43512 BlobRangeOffset: 0 TierName: __DEFAULT Activity:  TabletId: 72075186224037889 Kind: INSERTED EntityName: field PortionId: 3 InternalEntityId: 1 ChunkIdx: 303 BlobId: [72075186224037889:1:3:5:303:5496:0] EntityType: COL BlobRangeSize: 5496 PathId: 3 Rows: 777 RawBytes: 43512 BlobRangeOffset: 0 TierName: __DEFAULT Activity:  TabletId: 72075186224037889 Kind: INSERTED EntityName: field PortionId: 3 InternalEntityId: 1 ChunkIdx: 304 BlobId: [72075186224037889:1:3:5:304:5384:0] EntityType: COL BlobRangeSize: 5384 PathId: 3 Rows: 777 RawBytes: 43512 BlobRangeOffset: 0 TierName: __DEFAULT Activity:  TabletId: 72075186224037889 Kind: INSERTED EntityName: field PortionId: 3 InternalEntityId: 1 ChunkIdx: 305 BlobId: [72075186224037889:1:3:5:305:5448:0] EntityType: COL BlobRangeSize: 5448 PathId: 3 Rows: 777 RawBytes: 43512 BlobRangeOffset: 0 TierName: __DEFAULT Activity:  TabletId: 72075186224037889 Kind: INSERTED EntityName: field PortionId: 3 InternalEntityId: 1 ChunkIdx: 306 BlobId: [72075186224037889:1:3:5:306:5408:0] EntityType: COL BlobRangeSize: 5408 PathId: 3 Rows: 777 RawBytes: 43512 BlobRangeOffset: 0 TierName: __DEFAULT Activity:  TabletId: 72075186224037889 Kind: INSERTED EntityName: field PortionId: 3 InternalEntityId: 1 ChunkIdx: 307 BlobId: [72075186224037889:1:3:5:307:5504:0] EntityType: COL BlobRangeSize: 5504 PathId: 3 Rows: 777 RawBytes: 43512 BlobRangeOffset: 0 TierName: __DEFAULT Activity:  TabletId: 72075186224037889 Kind: INSERTED EntityName: field PortionId: 3 InternalEntityId: 1 ChunkIdx: 308 BlobId: [72075186224037889:1:3:5:308:5408:0] EntityType: COL BlobRangeSize: 5408 PathId: 3 Rows: 777 RawBytes: 43512 BlobRangeOffset: 0 TierName: __DEFAULT Activity:  TabletId: 72075186224037889 Kind: INSERTED EntityName: field PortionId: 3 InternalEntityId: 1 ChunkIdx: 309 BlobId: [72075186224037889:1:3:5:309:5416:0] EntityType: COL BlobRangeSize: 5416 PathId: 3 Rows: 777 RawBytes: 43512 BlobRangeOffset: 0 TierName: __DEFAULT Activity:  TabletId: 72075186224037889 Kind: INSERTED EntityName: field PortionId: 3 InternalEntityId: 1 ChunkIdx: 310 BlobId: [72075186224037889:1:3:5:310:5464:0] EntityType: COL BlobRangeSize: 5464 PathId: 3 Rows: 777 RawBytes: 43512 BlobRangeOffset: 0 TierName: __DEFAULT Activity:  TabletId: 72075186224037889 Kind: INSERTED EntityName: field PortionId: 3 InternalEntityId: 1 ChunkIdx: 311 BlobId: [72075186224037889:1:3:5:311:5416:0] EntityType: COL BlobRangeSize: 5416 PathId: 3 Rows: 777 RawBytes: 43512 BlobRangeOffset: 0 TierName: __DEFAULT Activity:  TabletId: 72075186224037889 Kind: INSERTED EntityName: field PortionId: 3 InternalEntityId: 1 ChunkIdx: 312 BlobId: [72075186224037889:1:3:5:312:5464:0] EntityType: COL BlobRangeSize: 5464 PathId: 3 Rows: 777 RawBytes: 43512 BlobRangeOffset: 0 TierName: __DEFAULT Activity:  TabletId: 72075186224037889 Kind: INSERTED EntityName: field PortionId: 3 InternalEntityId: 1 ChunkIdx: 313 BlobId: [72075186224037889:1:3:5:313:5376:0] EntityType: COL BlobRangeSize: 5376 PathId: 3 Rows: 777 RawBytes: 43512 BlobRangeOffset: 0 TierName: __DEFAULT Activity:  TabletId: 72075186224037889 Kind: INSERTED EntityName: field PortionId: 3 InternalEntityId: 1 ChunkIdx: 314 BlobId: [72075186224037889:1:3:5:314:5440:0] EntityType: COL BlobRangeSize: 5440 PathId: 3 Rows: 777 RawBytes: 43512 BlobRangeOffset: 0 TierName: __DEFAULT Activity:  TabletId: 72075186224037889 Kind: INSERTED EntityName: field PortionId: 3 InternalEntityId: 1 ChunkIdx: 315 BlobId: [72075186224037889:1:3:5:315:5464:0] EntityType: COL BlobRangeSize: 5464 PathId: 3 Rows: 776 RawBytes: 43456 BlobRangeOffset: 0 TierName: __DEFAULT Activity:  TabletId: 72075186224037889 Kind: INSERTED EntityName: field PortionId: 3 InternalEntityId: 1 ChunkIdx: 316 BlobId: [72075186224037889:1:3:5:316:5432:0] EntityType: COL BlobRangeSize: 5432 PathId: 3 Rows: 778 RawBytes: 43568 BlobRangeOffset: 0 TierName: __DEFAULT Activity:  TabletId: 72075186224037889 Kind: INSERTED EntityName: field PortionId: 3 InternalEntityId: 1 ChunkIdx: 317 BlobId: [72075186224037889:1:3:5:317:5328:0] EntityType: COL BlobRangeSize: 5328 PathId: 3 Rows: 777 RawBytes: 43512 BlobRangeOffset: 0 TierName: __DEFAULT Activity:  TabletId: 72075186224037889 Kind: INSERTED EntityName: field PortionId: 3 InternalEntityId: 1 ChunkIdx: 318 BlobId: [72075186224037889:1:3:5:318:5456:0] EntityType: COL BlobRangeSize: 5456 PathId: 3 Rows: 777 RawBytes: 43512 BlobRangeOffset: 0 TierName: __DEFAULT Activity:  TabletId: 72075186224037889 Kind: INSERTED EntityName: field PortionId: 3 InternalEntityId: 1 ChunkIdx: 319 BlobId: [72075186224037889:1:3:5:319:5360:0] EntityType: COL BlobRangeSize: 5360 PathId: 3 Rows: 777 RawBytes: 43512 BlobRangeOffset: 0 TierName: __DEFAULT Activity:  TabletId: 72075186224037889 Kind: INSERTED EntityName: field PortionId: 3 InternalEntityId: 1 ChunkIdx: 320 BlobId: [72075186224037889:1:3:5:320:5392:0] EntityType: COL BlobRangeSize: 5392 PathId: 3 Rows: 777 RawBytes: 43512 BlobRangeOffset: 0 TierName: __DEFAULT Activity:  TabletId: 72075186224037889 Kind: INSERTED EntityName: field PortionId: 3 InternalEntityId: 1 ChunkIdx: 321 BlobId: [72075186224037889:1:3:5:321:5384:0] EntityType: COL BlobRangeSize: 5384 PathId: 3 Rows: 777 RawBytes: 43512 BlobRangeOffset: 0 TierName: __DEFAULT Activity:  TabletId: 72075186224037889 Kind: INSERTED EntityName: field PortionId: 3 InternalEntityId: 1 ChunkIdx: 322 BlobId: [72075186224037889:1:3:5:322:5464:0] EntityType: COL BlobRangeSize: 5464 PathId: 3 Rows: 777 RawBytes: 43512 BlobRangeOffset: 0 TierName: __DEFAULT Activity:  TabletId: 72075186224037889 Kind: INSERTED EntityName: field PortionId: 3 InternalEntityId: 1 ChunkIdx: 323 BlobId: [72075186224037889:1:3:5:323:5464:0] EntityType: COL BlobRangeSize: 5464 PathId: 3 Rows: 777 RawBytes: 43512 BlobRangeOffset: 0 TierName: __DEFAULT Activity:  TabletId: 72075186224037889 Kind: INSERTED EntityName: field PortionId: 3 InternalEntityId: 1 ChunkIdx: 324 BlobId: [72075186224037889:1:3:5:324:5368:0] EntityType: COL BlobRangeSize: 5368 PathId: 3 Rows: 777 RawBytes: 43512 BlobRangeOffset: 0 TierName: __DEFAULT Activity:  TabletId: 72075186224037889 Kind: INSERTED EntityName: field PortionId: 3 InternalEntityId: 1 ChunkIdx: 325 BlobId: [72075186224037889:1:3:5:325:5424:0] EntityType: COL BlobRangeSize: 5424 PathId: 3 Rows: 777 RawBytes: 43512 BlobRangeOffset: 0 TierName: __DEFAULT Activity:  TabletId: 72075186224037889 Kind: INSERTED EntityName: field PortionId: 3 InternalEntityId: 1 ChunkIdx: 326 BlobId: [72075186224037889:1:3:5:326:5440:0] EntityType: COL BlobRangeSize: 5440 PathId: 3 Rows: 777 RawBytes: 43512 BlobRangeOffset: 0 TierName: __DEFAULT Activity:  TabletId: 72075186224037889 Kind: INSERTED EntityName: field PortionId: 3 InternalEntityId: 1 ChunkIdx: 327 BlobId: [72075186224037889:1:3:5:327:5416:0] EntityType: COL BlobRangeSize: 5416 PathId: 3 Rows: 777 RawBytes: 43512 BlobRangeOffset: 0 TierName: __DEFAULT Activity:  TabletId: 72075186224037889 Kind: INSERTED EntityName: field PortionId: 3 InternalEntityId: 1 ChunkIdx: 328 BlobId: [72075186224037889:1:3:5:328:5424:0] EntityType: COL BlobRangeSize: 5424 PathId: 3 Rows: 777 RawBytes: 43512 BlobRangeOffset: 0 TierName: __DEFAULT Activity:  TabletId: 72075186224037889 Kind: INSERTED EntityName: field PortionId: 3 InternalEntityId: 1 ChunkIdx: 329 BlobId: [72075186224037889:1:3:5:329:5448:0] EntityType: COL BlobRangeSize: 5448 PathId: 3 Rows: 777 RawBytes: 43512 BlobRangeOffset: 0 TierName: __DEFAULT Activity:  TabletId: 72075186224037889 Kind: INSERTED EntityName: field PortionId: 3 InternalEntityId: 1 ChunkIdx: 330 BlobId: [72075186224037889:1:3:5:330:5416:0] EntityType: COL BlobRangeSize: 5416 PathId: 3 Rows: 777 RawBytes: 43512 BlobRangeOffset: 0 TierName: __DEFAULT Activity:  TabletId: 72075186224037889 Kind: INSERTED EntityName: field PortionId: 3 InternalEntityId: 1 ChunkIdx: 331 BlobId: [72075186224037889:1:3:5:331:5472:0] EntityType: COL BlobRangeSize: 5472 PathId: 3 Rows: 777 RawBytes: 43512 BlobRangeOffset: 0 TierName: __DEFAULT Activity:  TabletId: 72075186224037889 Kind: INSERTED EntityName: field PortionId: 3 InternalEntityId: 1 ChunkIdx: 332 BlobId: [72075186224037889:1:3:5:332:5432:0] EntityType: COL BlobRangeSize: 5432 PathId: 3 Rows: 777 RawBytes: 43512 BlobRangeOffset: 0 TierName: __DEFAULT Activity:  TabletId: 72075186224037889 Kind: INSERTED EntityName: field PortionId: 3 InternalEntityId: 1 ChunkIdx: 333 BlobId: [72075186224037889:1:3:5:333:5376:0] EntityType: COL BlobRangeSize: 5376 PathId: 3 Rows: 777 RawBytes: 43512 BlobRangeOffset: 0 TierName: __DEFAULT Activity:  TabletId: 72075186224037889 Kind: INSERTED EntityName: field PortionId: 3 InternalEntityId: 1 ChunkIdx: 334 BlobId: [72075186224037889:1:3:5:334:5360:0] EntityType: COL BlobRangeSize: 5360 PathId: 3 Rows: 777 RawBytes: 43512 BlobRangeOffset: 0 TierName: __DEFAULT Activity:  TabletId: 72075186224037889 Kind: INSERTED EntityName: field PortionId: 3 InternalEntityId: 1 ChunkIdx: 335 BlobId: [72075186224037889:1:3:5:335:5424:0] EntityType: COL BlobRangeSize: 5424 PathId: 3 Rows: 777 RawBytes: 43512 BlobRangeOffset: 0 TierName: __DEFAULT Activity:  TabletId: 72075186224037889 Kind: INSERTED EntityName: field PortionId: 3 InternalEntityId: 1 ChunkIdx: 336 BlobId: [72075186224037889:1:3:5:336:5472:0] EntityType: COL BlobRangeSize: 5472 PathId: 3 Rows: 777 RawBytes: 43512 BlobRangeOffset: 0 TierName: __DEFAULT Activity:  TabletId: 72075186224037889 Kind: INSERTED EntityName: field PortionId: 3 InternalEntityId: 1 ChunkIdx: 337 BlobId: [72075186224037889:1:3:5:337:5264:0] EntityType: COL BlobRangeSize: 5264 PathId: 3 Rows: 777 RawBytes: 43512 BlobRangeOffset: 0 TierName: __DEFAULT Activity:  TabletId: 72075186224037889 Kind: INSERTED EntityName: field PortionId: 3 InternalEntityId: 1 ChunkIdx: 338 BlobId: [72075186224037889:1:3:5:338:5424:0] EntityType: COL BlobRangeSize: 5424 PathId: 3 Rows: 777 RawBytes: 43512 BlobRangeOffset: 0 TierName: __DEFAULT Activity:  TabletId: 72075186224037889 Kind: INSERTED EntityName: field PortionId: 3 InternalEntityId: 1 ChunkIdx: 339 BlobId: [72075186224037889:1:3:5:339:5432:0] EntityType: COL BlobRangeSize: 5432 PathId: 3 Rows: 777 RawBytes: 43512 BlobRangeOffset: 0 TierName: __DEFAULT Activity:  TabletId: 72075186224037889 Kind: INSERTED EntityName: field PortionId: 3 InternalEntityId: 1 ChunkIdx: 340 BlobId: [72075186224037889:1:3:5:340:5408:0] EntityType: COL BlobRangeSize: 5408 PathId: 3 Rows: 777 RawBytes: 43512 BlobRangeOffset: 0 TierName: __DEFAULT Activity:  TabletId: 72075186224037889 Kind: INSERTED EntityName: field PortionId: 3 InternalEntityId: 1 ChunkIdx: 341 BlobId: [72075186224037889:1:3:5:341:5496:0] EntityType: COL BlobRangeSize: 5496 PathId: 3 Rows: 793 RawBytes: 44408 BlobRangeOffset: 0 TierName: __DEFAULT Activity:  TabletId: 72075186224037889 Kind: INSERTED EntityName: field PortionId: 3 InternalEntityId: 1 ChunkIdx: 342 BlobId: [72075186224037889:1:3:5:342:5592:0] EntityType: COL BlobRangeSize: 5592 PathId: 3 Rows: 795 RawBytes: 44520 BlobRangeOffset: 0 TierName: __DEFAULT Activity:  TabletId: 72075186224037889 Kind: INSERTED EntityName: field PortionId: 3 InternalEntityId: 1 ChunkIdx: 343 BlobId: [72075186224037889:1:3:5:343:5544:0] EntityType: COL BlobRangeSize: 5544 PathId: 3 5587984/44800000/1 >> KqpQueryPerf::MultiDeleteFromTable+QueryService+UseSink ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/perf/unittest >> KqpQueryPerf::MultiRead+QueryService Test command err: Trying to start YDB, gRPC: 15829, MsgBus: 23383 2025-05-29T15:22:01.808107Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509888278211220327:2065];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:22:01.808138Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/0010c2/r3tmp/tmpVctL5i/pdisk_1.dat 2025-05-29T15:22:01.879096Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [1:7509888278211220303:2079] 1748532121807877 != 1748532121807880 2025-05-29T15:22:01.881853Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 15829, node 1 2025-05-29T15:22:01.898190Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:22:01.898200Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-05-29T15:22:01.898202Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-05-29T15:22:01.898243Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-05-29T15:22:01.910064Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:22:01.910092Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:22:01.911194Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:23383 TClient is connected to server localhost:23383 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-29T15:22:01.977152Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:22:01.991645Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-05-29T15:22:02.011523Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 2025-05-29T15:22:02.078588Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:22:02.091681Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:22:02.223739Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509888282506189243:2403], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:22:02.223770Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:22:02.272328Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-05-29T15:22:02.294328Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-05-29T15:22:02.304202Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-05-29T15:22:02.323231Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-05-29T15:22:02.333968Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-05-29T15:22:02.347410Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-05-29T15:22:02.362064Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-05-29T15:22:02.379382Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509888282506189897:2466], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:22:02.379406Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:22:02.379454Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509888282506189902:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:22:02.380330Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715669:3, at schemeshard: 72057594046644480 2025-05-29T15:22:02.387326Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7509888282506189904:2470], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715669 completed, doublechecking } 2025-05-29T15:22:02.450248Z node 1 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [1:7509888282506189955:3397] txid# 281474976715670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-29T15:22:02.566001Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [1:7509888282506189964:2474], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:22:02.566106Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=1&id=MmY1MWZmYzEtMTI4OGQzYjEtNzkwZjAyMDUtYzQ3YWY3ZDQ=, ActorId: [1:7509888282506189216:2400], ActorState: ExecuteState, TraceId: 01jwea5mra6fkc7wsbtdkvb307, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: VERIFY failed (2025-05-29T15:22:02.566671Z): assertion failed in non-unittest thread with message: assertion failed at ydb/core/kqp/ut/common/kqp_ut_common.h:375, void NKikimr::NKqp::AssertSuccessResult(const NYdb::TStatus &): (result.IsSuccess())
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 library/cpp/testing/unittest/registar.cpp:36 RaiseError(): requirement UnittestThread failed 0. /-S/util/system/yassert.cpp:83: InternalPanicImpl @ 0x13AC65E5 1. /-S/util/system/yassert.cpp:55: Panic @ 0x13ABD5E6 2. /tmp//-S/library/cpp/testing/unittest/registar.cpp:36: RaiseError @ 0x13C5F376 3. /-S/ydb/core/kqp/ut/common/kqp_ut_common.h:375: AssertSuccessResult @ 0x261025A2 4. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:365: CreateSampleTables @ 0x26101EA2 5. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:581: operator() @ 0x261237FC 6. /-S/library/cpp/threading/future/core/future-inl.h:493: SetValue @ 0x261237FC 7. /-S/library/cpp/threading/future/async.h:24: operator() @ 0x261237FC 8. /-S/util/thread/pool.h:71: Process @ 0x261237FC 9. /-S/util/thread/pool.cpp:418: DoExecute @ 0x13ACDF69 10. /-S/util/thread/factory.h:15: Execute @ 0x13ACC959 11. /-S/util/thread/factory.cpp:36: ThreadProc @ 0x13ACC959 12. /-S/util/system/thread.cpp:244: ThreadProxy @ 0x13AC7DCC 13. ??:0: ?? @ 0x7F9A617A8AC2 14. ??:0: ?? @ 0x7F9A6183A84F |59.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/fq/streaming_optimize/py3test >> test_sql_streaming.py::test[suites-GroupByHoppingWindowPercentile-default.txt] [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/perf/unittest >> KqpQueryPerf::MultiDeleteFromTable-QueryService+UseSink Test command err: Trying to start YDB, gRPC: 23866, MsgBus: 18126 2025-05-29T15:22:02.111479Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509888281932638178:2199];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:22:02.119594Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/0010bb/r3tmp/tmp8VIdKZ/pdisk_1.dat 2025-05-29T15:22:02.186716Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [1:7509888281932638018:2079] 1748532122108919 != 1748532122108922 TServer::EnableGrpc on GrpcPort 23866, node 1 2025-05-29T15:22:02.197373Z node 1 :GRPC_SERVER WARN: grpc_request_proxy.cpp:529: SchemeBoardDelete /Root Strong=0 2025-05-29T15:22:02.197384Z node 1 :GRPC_SERVER WARN: grpc_request_proxy.cpp:529: SchemeBoardDelete /Root Strong=0 2025-05-29T15:22:02.202937Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:22:02.202951Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-05-29T15:22:02.202953Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-05-29T15:22:02.202998Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-05-29T15:22:02.204495Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded TClient is connected to server localhost:18126 2025-05-29T15:22:02.254823Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:22:02.254857Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:22:02.256059Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:18126 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-29T15:22:02.290786Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:22:02.312489Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-05-29T15:22:02.320989Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:22:02.392518Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:22:02.422955Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:22:02.435862Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:22:02.587287Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509888281932639653:2403], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:22:02.587311Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:22:02.627722Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-05-29T15:22:02.636702Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-05-29T15:22:02.649059Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-05-29T15:22:02.660894Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-05-29T15:22:02.715716Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-05-29T15:22:02.723476Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-05-29T15:22:02.737837Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-05-29T15:22:02.753354Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509888281932640309:2466], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:22:02.753386Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:22:02.753389Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509888281932640314:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:22:02.754195Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715669:3, at schemeshard: 72057594046644480 2025-05-29T15:22:02.758302Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7509888281932640316:2470], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715669 completed, doublechecking } 2025-05-29T15:22:02.836343Z node 1 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [1:7509888281932640367:3398] txid# 281474976715670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-29T15:22:02.954212Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [1:7509888281932640383:2474], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:22:02.954867Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=1&id=MjY4MmFhY2EtZGM2MGY5OTQtNzU3YWE2ZTUtMTY2ZDI4Nzc=, ActorId: [1:7509888281932639635:2401], ActorState: ExecuteState, TraceId: 01jwea5n40ed1ht3x9nhf027cy, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: VERIFY failed (2025-05-29T15:22:02.960143Z): assertion failed in non-unittest thread with message: assertion failed at ydb/core/kqp/ut/common/kqp_ut_common.h:375, void NKikimr::NKqp::AssertSuccessResult(const NYdb::TStatus &): (result.IsSuccess())
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 library/cpp/testing/unittest/registar.cpp:36 RaiseError(): requirement UnittestThread failed 0. /-S/util/system/yassert.cpp:83: InternalPanicImpl @ 0x13AC65E5 1. /-S/util/system/yassert.cpp:55: Panic @ 0x13ABD5E6 2. /tmp//-S/library/cpp/testing/unittest/registar.cpp:36: RaiseError @ 0x13C5F376 3. /-S/ydb/core/kqp/ut/common/kqp_ut_common.h:375: AssertSuccessResult @ 0x261025A2 4. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:365: CreateSampleTables @ 0x26101EA2 5. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:581: operator() @ 0x261237FC 6. /-S/library/cpp/threading/future/core/future-inl.h:493: SetValue @ 0x261237FC 7. /-S/library/cpp/threading/future/async.h:24: operator() @ 0x261237FC 8. /-S/util/thread/pool.h:71: Process @ 0x261237FC 9. /-S/util/thread/pool.cpp:418: DoExecute @ 0x13ACDF69 10. /-S/util/thread/factory.h:15: Execute @ 0x13ACC959 11. /-S/util/thread/factory.cpp:36: ThreadProc @ 0x13ACC959 12. /-S/util/system/thread.cpp:244: ThreadProxy @ 0x13AC7DCC 13. ??:0: ?? @ 0x7FF31771EAC2 14. ??:0: ?? @ 0x7FF3177B084F >> KqpWorkload::KV [FAIL] >> KqpQueryPerf::IdxLookupJoin-QueryService >> TComputeScheduler::QueryLimits [GOOD] >> TKqpScanData::ArrowToUnboxedValueConverter [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/perf/unittest >> KqpQueryPerf::MultiDeleteFromTable-QueryService-UseSink Test command err: Trying to start YDB, gRPC: 26895, MsgBus: 8797 2025-05-29T15:22:02.320226Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509888281315558607:2200];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:22:02.320299Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/0010ae/r3tmp/tmplqO0kh/pdisk_1.dat 2025-05-29T15:22:02.403452Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 26895, node 1 2025-05-29T15:22:02.405362Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [1:7509888281315558447:2079] 1748532122319070 != 1748532122319073 2025-05-29T15:22:02.408923Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:22:02.408932Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-05-29T15:22:02.408934Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-05-29T15:22:02.408982Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:8797 TClient is connected to server localhost:8797 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: 2025-05-29T15:22:02.461184Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:22:02.461203Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:22:02.462185Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-29T15:22:02.472354Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:22:02.474869Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-05-29T15:22:02.477722Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:22:02.497736Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:22:02.559114Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:22:02.572551Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:22:02.721775Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509888281315560097:2403], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:22:02.721801Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:22:02.766791Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-05-29T15:22:02.821853Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-05-29T15:22:02.876717Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-05-29T15:22:02.933621Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-05-29T15:22:02.941618Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-05-29T15:22:02.955023Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-05-29T15:22:02.968608Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-05-29T15:22:02.999665Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509888281315560756:2466], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:22:02.999692Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:22:02.999754Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509888281315560761:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:22:03.000584Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715669:3, at schemeshard: 72057594046644480 2025-05-29T15:22:03.002485Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7509888281315560763:2470], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715669 completed, doublechecking } 2025-05-29T15:22:03.058180Z node 1 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [1:7509888285610528110:3398] txid# 281474976715670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-29T15:22:03.173892Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [1:7509888285610528126:2474], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:22:03.174026Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=1&id=MmU4ZGJmNWEtYjNmNmI5NGQtMTA3ODQ3M2EtZmIwM2VjOA==, ActorId: [1:7509888281315560094:2401], ActorState: ExecuteState, TraceId: 01jwea5nbqcsgffavn4yxa10gs, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: VERIFY failed (2025-05-29T15:22:03.175189Z): assertion failed in non-unittest thread with message: assertion failed at ydb/core/kqp/ut/common/kqp_ut_common.h:375, void NKikimr::NKqp::AssertSuccessResult(const NYdb::TStatus &): (result.IsSuccess())
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 library/cpp/testing/unittest/registar.cpp:36 RaiseError(): requirement UnittestThread failed 0. /-S/util/system/yassert.cpp:83: InternalPanicImpl @ 0x13AC65E5 1. /-S/util/system/yassert.cpp:55: Panic @ 0x13ABD5E6 2. /tmp//-S/library/cpp/testing/unittest/registar.cpp:36: RaiseError @ 0x13C5F376 3. /-S/ydb/core/kqp/ut/common/kqp_ut_common.h:375: AssertSuccessResult @ 0x261025A2 4. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:365: CreateSampleTables @ 0x26101EA2 5. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:581: operator() @ 0x261237FC 6. /-S/library/cpp/threading/future/core/future-inl.h:493: SetValue @ 0x261237FC 7. /-S/library/cpp/threading/future/async.h:24: operator() @ 0x261237FC 8. /-S/util/thread/pool.h:71: Process @ 0x261237FC 9. /-S/util/thread/pool.cpp:418: DoExecute @ 0x13ACDF69 10. /-S/util/thread/factory.h:15: Execute @ 0x13ACC959 11. /-S/util/thread/factory.cpp:36: ThreadProc @ 0x13ACC959 12. /-S/util/system/thread.cpp:244: ThreadProxy @ 0x13AC7DCC 13. ??:0: ?? @ 0x7F3340E02AC2 14. ??:0: ?? @ 0x7F3340E9484F ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/perf/unittest >> KqpQueryPerf::Upsert-QueryService-UseSink Test command err: Trying to start YDB, gRPC: 9270, MsgBus: 26229 2025-05-29T15:22:02.109053Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509888281083239724:2197];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:22:02.109112Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/0010b7/r3tmp/tmpq0JRjd/pdisk_1.dat 2025-05-29T15:22:02.192365Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [1:7509888281083239566:2079] 1748532122107628 != 1748532122107631 2025-05-29T15:22:02.195261Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 9270, node 1 2025-05-29T15:22:02.210911Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:22:02.210923Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-05-29T15:22:02.210925Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-05-29T15:22:02.210964Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:26229 TClient is connected to server localhost:26229 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: 2025-05-29T15:22:02.260130Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:22:02.260158Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:22:02.261154Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-29T15:22:02.274902Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-05-29T15:22:02.291774Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-05-29T15:22:02.330718Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-05-29T15:22:02.364624Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 2025-05-29T15:22:02.378520Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:22:02.586665Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509888281083241220:2403], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:22:02.586692Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:22:02.633817Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-05-29T15:22:02.641647Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-05-29T15:22:02.654123Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-05-29T15:22:02.669146Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-05-29T15:22:02.682524Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-05-29T15:22:02.701209Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-05-29T15:22:02.709573Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-05-29T15:22:02.728389Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509888281083241871:2466], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:22:02.728419Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:22:02.728485Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509888281083241876:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:22:02.729169Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715669:3, at schemeshard: 72057594046644480 2025-05-29T15:22:02.737944Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7509888281083241878:2470], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715669 completed, doublechecking } 2025-05-29T15:22:02.803393Z node 1 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [1:7509888281083241929:3395] txid# 281474976715670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-29T15:22:02.896198Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [1:7509888281083241945:2474], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:22:02.896310Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=1&id=MzA5OTAwOTAtNzZjYjg1ZmUtMmMwNmMwMjctOWVmOGM1ZDY=, ActorId: [1:7509888281083241217:2401], ActorState: ExecuteState, TraceId: 01jwea5n37crd0z9rswwt6k958, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: VERIFY failed (2025-05-29T15:22:02.896918Z): assertion failed in non-unittest thread with message: assertion failed at ydb/core/kqp/ut/common/kqp_ut_common.h:375, void NKikimr::NKqp::AssertSuccessResult(const NYdb::TStatus &): (result.IsSuccess())
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 library/cpp/testing/unittest/registar.cpp:36 RaiseError(): requirement UnittestThread failed 0. /-S/util/system/yassert.cpp:83: InternalPanicImpl @ 0x13AC65E5 1. /-S/util/system/yassert.cpp:55: Panic @ 0x13ABD5E6 2. /tmp//-S/library/cpp/testing/unittest/registar.cpp:36: RaiseError @ 0x13C5F376 3. /-S/ydb/core/kqp/ut/common/kqp_ut_common.h:375: AssertSuccessResult @ 0x261025A2 4. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:365: CreateSampleTables @ 0x26101EA2 5. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:581: operator() @ 0x261237FC 6. /-S/library/cpp/threading/future/core/future-inl.h:493: SetValue @ 0x261237FC 7. /-S/library/cpp/threading/future/async.h:24: operator() @ 0x261237FC 8. /-S/util/thread/pool.h:71: Process @ 0x261237FC 9. /-S/util/thread/pool.cpp:418: DoExecute @ 0x13ACDF69 10. /-S/util/thread/factory.h:15: Execute @ 0x13ACC959 11. /-S/util/thread/factory.cpp:36: ThreadProc @ 0x13ACC959 12. /-S/util/system/thread.cpp:244: ThreadProxy @ 0x13AC7DCC 13. ??:0: ?? @ 0x7F48F292BAC2 14. ??:0: ?? @ 0x7F48F29BD84F >> TKqpScanData::EmptyColumns [GOOD] >> TKqpScanData::EmptyColumnsAndNonEmptyArrowBatch >> KqpQueryPerf::IndexLookupJoin-EnableStreamLookup+QueryService >> TKqpScanData::EmptyColumnsAndNonEmptyArrowBatch [GOOD] >> TComputeScheduler::ResourceWeight |59.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/runtime/ut/unittest >> TKqpScanData::ArrowToUnboxedValueConverter [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/runtime/ut/unittest >> TComputeScheduler::QueryLimits [GOOD] Test command err: 800 800 800 800 >> TComputeScheduler::ResourceWeight [GOOD] >> TKqpScanData::FailOnUnsupportedPgType >> KqpQueryPerf::IndexInsert+QueryService+UseSink >> TKqpScanData::FailOnUnsupportedPgType [GOOD] >> TKqpScanData::DifferentNumberOfInputAndResultColumns [GOOD] |59.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/runtime/ut/unittest >> TKqpScanData::EmptyColumns [GOOD] |59.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/runtime/ut/unittest >> TKqpScanData::EmptyColumnsAndNonEmptyArrowBatch [GOOD] >> KqpQueryPerf::IndexUpdateOn-QueryService+UseSink >> KqpQueryPerf::RangeRead+QueryService ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/runtime/ut/unittest >> TComputeScheduler::ResourceWeight [GOOD] Test command err: 510 500 1510 1500 990 1000 1000 1000 >> KqpQueryPerf::Upsert+QueryService+UseSink >> KqpQueryPerf::IndexUpdateOn+QueryService-UseSink |59.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/runtime/ut/unittest >> TKqpScanData::FailOnUnsupportedPgType [GOOD] |59.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/runtime/ut/unittest >> TKqpScanData::DifferentNumberOfInputAndResultColumns [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/perf/unittest >> KqpQueryPerf::UpdateOn+QueryService+UseSink Test command err: Trying to start YDB, gRPC: 24303, MsgBus: 63616 2025-05-29T15:22:03.580737Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509888285065331994:2201];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:22:03.581587Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/00108e/r3tmp/tmprZp6L0/pdisk_1.dat 2025-05-29T15:22:03.650266Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [1:7509888285065331832:2079] 1748532123579748 != 1748532123579751 2025-05-29T15:22:03.653494Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 24303, node 1 2025-05-29T15:22:03.673322Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:22:03.673334Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-05-29T15:22:03.673336Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-05-29T15:22:03.673377Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:63616 2025-05-29T15:22:03.725709Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:22:03.725753Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:22:03.726621Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:63616 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-29T15:22:03.766160Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:22:03.769588Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-05-29T15:22:03.780966Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:22:03.852588Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:22:03.879702Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:22:03.895231Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:22:04.092413Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509888289360300767:2403], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:22:04.092446Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:22:04.147439Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-05-29T15:22:04.155723Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-05-29T15:22:04.212107Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-05-29T15:22:04.221394Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-05-29T15:22:04.235870Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-05-29T15:22:04.291453Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-05-29T15:22:04.300126Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-05-29T15:22:04.314907Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509888289360301425:2466], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:22:04.314932Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509888289360301430:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:22:04.314935Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:22:04.315672Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710669:3, at schemeshard: 72057594046644480 2025-05-29T15:22:04.318327Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7509888289360301432:2470], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710669 completed, doublechecking } 2025-05-29T15:22:04.397786Z node 1 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [1:7509888289360301483:3399] txid# 281474976710670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-29T15:22:04.490525Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [1:7509888289360301499:2474], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:22:04.490684Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=1&id=ZDI3ZGI4ZWEtYjliNWE4NGItYzQzMTA1MmQtNjU5ODU1ZTA=, ActorId: [1:7509888289360300749:2401], ActorState: ExecuteState, TraceId: 01jwea5pmt7ygk18acm7efrtfr, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: VERIFY failed (2025-05-29T15:22:04.492354Z): assertion failed in non-unittest thread with message: assertion failed at ydb/core/kqp/ut/common/kqp_ut_common.h:375, void NKikimr::NKqp::AssertSuccessResult(const NYdb::TStatus &): (result.IsSuccess())
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 library/cpp/testing/unittest/registar.cpp:36 RaiseError(): requirement UnittestThread failed 0. /-S/util/system/yassert.cpp:83: InternalPanicImpl @ 0x13AC65E5 1. /-S/util/system/yassert.cpp:55: Panic @ 0x13ABD5E6 2. /tmp//-S/library/cpp/testing/unittest/registar.cpp:36: RaiseError @ 0x13C5F376 3. /-S/ydb/core/kqp/ut/common/kqp_ut_common.h:375: AssertSuccessResult @ 0x261025A2 4. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:365: CreateSampleTables @ 0x26101EA2 5. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:581: operator() @ 0x261237FC 6. /-S/library/cpp/threading/future/core/future-inl.h:493: SetValue @ 0x261237FC 7. /-S/library/cpp/threading/future/async.h:24: operator() @ 0x261237FC 8. /-S/util/thread/pool.h:71: Process @ 0x261237FC 9. /-S/util/thread/pool.cpp:418: DoExecute @ 0x13ACDF69 10. /-S/util/thread/factory.h:15: Execute @ 0x13ACC959 11. /-S/util/thread/factory.cpp:36: ThreadProc @ 0x13ACC959 12. /-S/util/system/thread.cpp:244: ThreadProxy @ 0x13AC7DCC 13. ??:0: ?? @ 0x7EFFE7851AC2 14. ??:0: ?? @ 0x7EFFE78E384F ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/perf/unittest >> KqpQueryPerf::RangeLimitRead-QueryService Test command err: Trying to start YDB, gRPC: 5598, MsgBus: 21702 2025-05-29T15:22:03.863051Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509888285745161356:2204];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:22:03.863132Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/001083/r3tmp/tmpGC1dbe/pdisk_1.dat 2025-05-29T15:22:03.930376Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:22:03.932005Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [1:7509888285745161187:2079] 1748532123860448 != 1748532123860451 TServer::EnableGrpc on GrpcPort 5598, node 1 2025-05-29T15:22:03.947974Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:22:03.947985Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-05-29T15:22:03.947987Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-05-29T15:22:03.948029Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:21702 TClient is connected to server localhost:21702 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: 2025-05-29T15:22:04.002820Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:22:04.002847Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:22:04.003916Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-29T15:22:04.008278Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:22:04.087197Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-05-29T15:22:04.105968Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:22:04.172010Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 2025-05-29T15:22:04.184095Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:22:04.224220Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509888290040130132:2403], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:22:04.224245Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:22:04.274998Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-05-29T15:22:04.283518Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-05-29T15:22:04.291709Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-05-29T15:22:04.306084Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-05-29T15:22:04.319713Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-05-29T15:22:04.334392Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-05-29T15:22:04.347805Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-05-29T15:22:04.372296Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509888290040130784:2466], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:22:04.372344Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:22:04.372369Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509888290040130789:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:22:04.373561Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715669:3, at schemeshard: 72057594046644480 2025-05-29T15:22:04.376424Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7509888290040130791:2470], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715669 completed, doublechecking } 2025-05-29T15:22:04.455264Z node 1 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [1:7509888290040130842:3397] txid# 281474976715670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-29T15:22:04.538702Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [1:7509888290040130858:2474], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:22:04.538852Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=1&id=YmUzMTZiNGUtZjk3OGE1N2YtOGQyOGNhOTktNTEyYjFlMzA=, ActorId: [1:7509888290040130129:2401], ActorState: ExecuteState, TraceId: 01jwea5ppkafysamgf12qyn4k2, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: VERIFY failed (2025-05-29T15:22:04.541657Z): assertion failed in non-unittest thread with message: assertion failed at ydb/core/kqp/ut/common/kqp_ut_common.h:375, void NKikimr::NKqp::AssertSuccessResult(const NYdb::TStatus &): (result.IsSuccess())
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 library/cpp/testing/unittest/registar.cpp:36 RaiseError(): requirement UnittestThread failed 0. /-S/util/system/yassert.cpp:83: InternalPanicImpl @ 0x13AC65E5 1. /-S/util/system/yassert.cpp:55: Panic @ 0x13ABD5E6 2. /tmp//-S/library/cpp/testing/unittest/registar.cpp:36: RaiseError @ 0x13C5F376 3. /-S/ydb/core/kqp/ut/common/kqp_ut_common.h:375: AssertSuccessResult @ 0x261025A2 4. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:365: CreateSampleTables @ 0x26101EA2 5. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:581: operator() @ 0x261237FC 6. /-S/library/cpp/threading/future/core/future-inl.h:493: SetValue @ 0x261237FC 7. /-S/library/cpp/threading/future/async.h:24: operator() @ 0x261237FC 8. /-S/util/thread/pool.h:71: Process @ 0x261237FC 9. /-S/util/thread/pool.cpp:418: DoExecute @ 0x13ACDF69 10. /-S/util/thread/factory.h:15: Execute @ 0x13ACC959 11. /-S/util/thread/factory.cpp:36: ThreadProc @ 0x13ACC959 12. /-S/util/system/thread.cpp:244: ThreadProxy @ 0x13AC7DCC 13. ??:0: ?? @ 0x7F6108DA3AC2 14. ??:0: ?? @ 0x7F6108E3584F >> KqpQueryPerf::IndexReplace-QueryService-UseSink >> KqpQueryPerf::IdxLookupJoinThreeWay+QueryService ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/perf/unittest >> KqpQueryPerf::Delete-QueryService+UseSink Test command err: Trying to start YDB, gRPC: 17440, MsgBus: 15956 2025-05-29T15:21:59.305130Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509888267540907749:2211];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:21:59.307141Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/001103/r3tmp/tmpJ2okmr/pdisk_1.dat 2025-05-29T15:21:59.348490Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [1:7509888267540907563:2079] 1748532119259816 != 1748532119259819 2025-05-29T15:21:59.350576Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 17440, node 1 2025-05-29T15:21:59.360792Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:21:59.360807Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-05-29T15:21:59.360809Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-05-29T15:21:59.360857Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:15956 2025-05-29T15:21:59.411524Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:21:59.411555Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:21:59.414711Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:15956 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-29T15:21:59.449549Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:21:59.452496Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-05-29T15:21:59.455668Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:21:59.527130Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:21:59.553882Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:21:59.569738Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:21:59.747936Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509888267540909199:2403], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:21:59.747963Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:21:59.797625Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-05-29T15:21:59.807911Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-05-29T15:21:59.818830Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-05-29T15:21:59.833245Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-05-29T15:21:59.847591Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-05-29T15:21:59.861546Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-05-29T15:21:59.876280Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-05-29T15:21:59.897797Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509888267540909851:2466], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:21:59.897836Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:21:59.897974Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509888267540909856:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:21:59.898978Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715669:3, at schemeshard: 72057594046644480 2025-05-29T15:21:59.901734Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976715669, at schemeshard: 72057594046644480 2025-05-29T15:21:59.901806Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7509888267540909858:2470], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715669 completed, doublechecking } 2025-05-29T15:21:59.967079Z node 1 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [1:7509888267540909909:3398] txid# 281474976715670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-29T15:22:00.089141Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [1:7509888267540909925:2474], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:22:00.089254Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=1&id=MTJlNTIxZDUtOTJjN2Y0NDEtMmYwODk3MjUtYTMzMGU0NmM=, ActorId: [1:7509888267540909181:2401], ActorState: ExecuteState, TraceId: 01jwea5jasbn6x4kxq1n5m1hsp, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: VERIFY failed (2025-05-29T15:22:00.091287Z): assertion failed in non-unittest thread with message: assertion failed at ydb/core/kqp/ut/common/kqp_ut_common.h:375, void NKikimr::NKqp::AssertSuccessResult(const NYdb::TStatus &): (result.IsSuccess())
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 library/cpp/testing/unittest/registar.cpp:36 RaiseError(): requirement UnittestThread failed 0. /-S/util/system/yassert.cpp:83: InternalPanicImpl @ 0x13AC65E5 1. /-S/util/system/yassert.cpp:55: Panic @ 0x13ABD5E6 2. /tmp//-S/library/cpp/testing/unittest/registar.cpp:36: RaiseError @ 0x13C5F376 3. /-S/ydb/core/kqp/ut/common/kqp_ut_common.h:375: AssertSuccessResult @ 0x261025A2 4. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:365: CreateSampleTables @ 0x26101EA2 5. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:581: operator() @ 0x261237FC 6. /-S/library/cpp/threading/future/core/future-inl.h:493: SetValue @ 0x261237FC 7. /-S/library/cpp/threading/future/async.h:24: operator() @ 0x261237FC 8. /-S/util/thread/pool.h:71: Process @ 0x261237FC 9. /-S/util/thread/pool.cpp:418: DoExecute @ 0x13ACDF69 10. /-S/util/thread/factory.h:15: Execute @ 0x13ACC959 11. /-S/util/thread/factory.cpp:36: ThreadProc @ 0x13ACC959 12. /-S/util/system/thread.cpp:244: ThreadProxy @ 0x13AC7DCC 13. ??:0: ?? @ 0x7F2E501F0AC2 14. ??:0: ?? @ 0x7F2E5028284F Trying to start YDB, gRPC: 10148, MsgBus: 30812 2025-05-29T15:22:04.067558Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509888289881071776:2198];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:22:04.067680Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/001103/r3tmp/tmppZ3gA2/pdisk_1.dat 2025-05-29T15:22:04.136102Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:22:04.137172Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [1:7509888289881071617:2079] 1748532124066707 != 1748532124066710 TServer::EnableGrpc on GrpcPort 10148, node 1 2025-05-29T15:22:04.144448Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:22:04.144466Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-05-29T15:22:04.144468Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-05-29T15:22:04.144517Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:30812 TClient is connected to server localhost:30812 WaitRootIsUp 'Root'... TClient::Ls request: Root 2025-05-29T15:22:04.196773Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:22:04.196806Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:22:04.198016Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-29T15:22:04.210921Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:22:04.216957Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-05-29T15:22:04.220925Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:22:04.285639Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:22:04.304903Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:22:04.317383Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:22:04.500785Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509888289881073249:2403], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:22:04.500821Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:22:04.551992Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-05-29T15:22:04.563372Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-05-29T15:22:04.574117Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-05-29T15:22:04.587616Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-05-29T15:22:04.599730Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-05-29T15:22:04.613997Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-05-29T15:22:04.633154Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-05-29T15:22:04.649231Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509888289881073901:2466], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:22:04.649260Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:22:04.649331Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509888289881073906:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:22:04.650009Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715669:3, at schemeshard: 72057594046644480 2025-05-29T15:22:04.654891Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7509888289881073908:2470], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715669 completed, doublechecking } 2025-05-29T15:22:04.709386Z node 1 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [1:7509888289881073959:3396] txid# 281474976715670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-29T15:22:04.817148Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [1:7509888289881073968:2474], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:22:04.817269Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=1&id=MzAwMTI2NzMtNDYxNDY1MGMtNGMxMmQxMDktYWU5YTkxZDI=, ActorId: [1:7509888289881073246:2401], ActorState: ExecuteState, TraceId: 01jwea5pz81vqjfgwt9yv5wceg, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: VERIFY failed (2025-05-29T15:22:04.818020Z): assertion failed in non-unittest thread with message: assertion failed at ydb/core/kqp/ut/common/kqp_ut_common.h:375, void NKikimr::NKqp::AssertSuccessResult(const NYdb::TStatus &): (result.IsSuccess())
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 library/cpp/testing/unittest/registar.cpp:36 RaiseError(): requirement UnittestThread failed 0. /-S/util/system/yassert.cpp:83: InternalPanicImpl @ 0x13AC65E5 1. /-S/util/system/yassert.cpp:55: Panic @ 0x13ABD5E6 2. /tmp//-S/library/cpp/testing/unittest/registar.cpp:36: RaiseError @ 0x13C5F376 3. /-S/ydb/core/kqp/ut/common/kqp_ut_common.h:375: AssertSuccessResult @ 0x261025A2 4. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:365: CreateSampleTables @ 0x26101EA2 5. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:581: operator() @ 0x261237FC 6. /-S/library/cpp/threading/future/core/future-inl.h:493: SetValue @ 0x261237FC 7. /-S/library/cpp/threading/future/async.h:24: operator() @ 0x261237FC 8. /-S/util/thread/pool.h:71: Process @ 0x261237FC 9. /-S/util/thread/pool.cpp:418: DoExecute @ 0x13ACDF69 10. /-S/util/thread/factory.h:15: Execute @ 0x13ACC959 11. /-S/util/thread/factory.cpp:36: ThreadProc @ 0x13ACC959 12. /-S/util/system/thread.cpp:244: ThreadProxy @ 0x13AC7DCC 13. ??:0: ?? @ 0x7F2499911AC2 14. ??:0: ?? @ 0x7F24999A384F |59.3%| [TA] $(B)/ydb/core/kqp/runtime/ut/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/perf/unittest >> KqpWorkload::KV [FAIL] Test command err: Trying to start YDB, gRPC: 27504, MsgBus: 17556 2025-05-29T15:22:03.626038Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509888286911325093:2132];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:22:03.626894Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/001084/r3tmp/tmpn0YwGd/pdisk_1.dat 2025-05-29T15:22:03.770815Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [1:7509888286911325000:2079] 1748532123625067 != 1748532123625070 2025-05-29T15:22:03.776180Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 27504, node 1 2025-05-29T15:22:03.798940Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:22:03.798960Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-05-29T15:22:03.798962Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-05-29T15:22:03.799008Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-05-29T15:22:03.822846Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:22:03.822888Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:22:03.823651Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:17556 TClient is connected to server localhost:17556 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-29T15:22:03.911607Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:22:03.914950Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-05-29T15:22:04.077714Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509888291206292954:2328], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:22:04.077749Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:22:04.114122Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-05-29T15:22:04.202065Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509888291206294550:2454], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:22:04.202096Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509888291206294555:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:22:04.202101Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:22:04.202966Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480 2025-05-29T15:22:04.204640Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7509888291206294557:2458], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2025-05-29T15:22:04.298234Z node 1 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [1:7509888291206294608:3323] txid# 281474976715660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-29T15:22:04.314513Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [1:7509888291206294624:2462], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:22:04.314609Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=1&id=M2MzNzc0YjAtYWZkN2Q1NjktMTQwMjQ5YmEtM2JkOWI5NWI=, ActorId: [1:7509888291206292936:2326], ActorState: ExecuteState, TraceId: 01jwea5ph9657fxkpjsh0czgvr, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: assertion failed at ydb/core/kqp/ut/perf/kqp_workload_ut.cpp:59, void NKikimr::NKqp::Test(const TString &): (result.IsSuccess())
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 0. /-S/util/system/backtrace.cpp:284: ?? @ 0x13AA7E1B 1. /tmp//-S/library/cpp/testing/unittest/registar.cpp:46: RaiseError @ 0x13C5F288 2. /tmp//-S/ydb/core/kqp/ut/perf/kqp_workload_ut.cpp:59: Test @ 0x139A82D0 3. /tmp//-S/ydb/core/kqp/ut/perf/kqp_workload_ut.cpp:92: Execute_ @ 0x139A97F1 4. /tmp//-S/ydb/core/kqp/ut/perf/kqp_workload_ut.cpp:86: operator() @ 0x139AC866 5. /tmp//-S/library/cpp/testing/unittest/registar.cpp:373: Run @ 0x13C6113D 6. /tmp//-S/ydb/core/kqp/ut/perf/kqp_workload_ut.cpp:86: Execute @ 0x139AC22C 7. /tmp//-S/library/cpp/testing/unittest/registar.cpp:494: Execute @ 0x13C618B2 8. /tmp//-S/library/cpp/testing/unittest/utmain.cpp:872: RunMain @ 0x13C7345C 9. ??:0: ?? @ 0x7FE41E84BD8F 10. ??:0: ?? @ 0x7FE41E84BE3F 11. ??:0: ?? @ 0x129BC028 >> KqpQueryPerf::DeleteOn-QueryService-UseSink >> KqpQueryPerf::UpdateOn-QueryService+UseSink >> KqpQueryPerf::AggregateToScalar-QueryService >> KqpQueryPerf::MultiRead-QueryService >> KqpQueryPerf::Insert-QueryService+UseSink >> KqpQueryPerf::Upsert-QueryService+UseSink ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/perf/unittest >> KqpQueryPerf::UpdateOn+QueryService-UseSink Test command err: Trying to start YDB, gRPC: 30310, MsgBus: 16792 2025-05-29T15:22:04.025531Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509888289922491757:2066];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:22:04.025909Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/001077/r3tmp/tmpzDo6fl/pdisk_1.dat 2025-05-29T15:22:04.080124Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:22:04.080207Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [1:7509888289922491732:2079] 1748532124025285 != 1748532124025288 TServer::EnableGrpc on GrpcPort 30310, node 1 2025-05-29T15:22:04.090302Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:22:04.090318Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-05-29T15:22:04.090320Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-05-29T15:22:04.090365Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:16792 TClient is connected to server localhost:16792 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-29T15:22:04.158550Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:22:04.158579Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:22:04.159256Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-05-29T15:22:04.159648Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected waiting... 2025-05-29T15:22:04.164164Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:22:04.228618Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-05-29T15:22:04.246954Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 2025-05-29T15:22:04.258023Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:22:04.413899Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509888289922493366:2403], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:22:04.413926Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:22:04.454420Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-05-29T15:22:04.466173Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-05-29T15:22:04.480532Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-05-29T15:22:04.535861Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-05-29T15:22:04.543673Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-05-29T15:22:04.558284Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-05-29T15:22:04.573917Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-05-29T15:22:04.587863Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509888289922494020:2466], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:22:04.587888Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:22:04.588071Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509888289922494025:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:22:04.588883Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715669:3, at schemeshard: 72057594046644480 2025-05-29T15:22:04.592176Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7509888289922494027:2470], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715669 completed, doublechecking } 2025-05-29T15:22:04.654622Z node 1 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [1:7509888289922494078:3396] txid# 281474976715670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-29T15:22:04.764400Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [1:7509888289922494087:2474], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:22:04.764506Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=1&id=ZWEyZjg1MTQtZTFmM2E4YTYtN2EyYWZjNWEtYTgyOTA2YWY=, ActorId: [1:7509888289922493348:2401], ActorState: ExecuteState, TraceId: 01jwea5pxbem1yk2y2f8rhkfr4, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: VERIFY failed (2025-05-29T15:22:04.765190Z): assertion failed in non-unittest thread with message: assertion failed at ydb/core/kqp/ut/common/kqp_ut_common.h:375, void NKikimr::NKqp::AssertSuccessResult(const NYdb::TStatus &): (result.IsSuccess())
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 library/cpp/testing/unittest/registar.cpp:36 RaiseError(): requirement UnittestThread failed 0. /-S/util/system/yassert.cpp:83: InternalPanicImpl @ 0x13AC65E5 1. /-S/util/system/yassert.cpp:55: Panic @ 0x13ABD5E6 2. /tmp//-S/library/cpp/testing/unittest/registar.cpp:36: RaiseError @ 0x13C5F376 3. /-S/ydb/core/kqp/ut/common/kqp_ut_common.h:375: AssertSuccessResult @ 0x261025A2 4. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:365: CreateSampleTables @ 0x26101EA2 5. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:581: operator() @ 0x261237FC 6. /-S/library/cpp/threading/future/core/future-inl.h:493: SetValue @ 0x261237FC 7. /-S/library/cpp/threading/future/async.h:24: operator() @ 0x261237FC 8. /-S/util/thread/pool.h:71: Process @ 0x261237FC 9. /-S/util/thread/pool.cpp:418: DoExecute @ 0x13ACDF69 10. /-S/util/thread/factory.h:15: Execute @ 0x13ACC959 11. /-S/util/thread/factory.cpp:36: ThreadProc @ 0x13ACC959 12. /-S/util/system/thread.cpp:244: ThreadProxy @ 0x13AC7DCC 13. ??:0: ?? @ 0x7FD6B62D8AC2 14. ??:0: ?? @ 0x7FD6B636A84F ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/perf/unittest >> KqpQueryPerf::Replace+QueryService+UseSink Test command err: Trying to start YDB, gRPC: 3795, MsgBus: 14145 2025-05-29T15:22:04.127001Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509888289626519934:2199];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:22:04.127971Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/001072/r3tmp/tmprumx2M/pdisk_1.dat 2025-05-29T15:22:04.180603Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [1:7509888289626519774:2079] 1748532124125903 != 1748532124125906 2025-05-29T15:22:04.182324Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 3795, node 1 2025-05-29T15:22:04.195689Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:22:04.195703Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-05-29T15:22:04.195705Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-05-29T15:22:04.195745Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:14145 TClient is connected to server localhost:14145 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: 2025-05-29T15:22:04.259227Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:22:04.259256Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:22:04.260135Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-29T15:22:04.262086Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:22:04.266967Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:22:04.286485Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:22:04.357172Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:22:04.368988Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:22:04.568460Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509888289626521421:2403], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:22:04.568487Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:22:04.610558Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-05-29T15:22:04.617218Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-05-29T15:22:04.628534Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-05-29T15:22:04.641476Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-05-29T15:22:04.655732Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-05-29T15:22:04.670608Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-05-29T15:22:04.684895Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-05-29T15:22:04.701644Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509888289626522074:2466], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:22:04.701669Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:22:04.701763Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509888289626522079:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:22:04.702440Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715669:3, at schemeshard: 72057594046644480 2025-05-29T15:22:04.710765Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7509888289626522081:2470], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715669 completed, doublechecking } 2025-05-29T15:22:04.805739Z node 1 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [1:7509888289626522132:3394] txid# 281474976715670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-29T15:22:04.913906Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [1:7509888289626522148:2474], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:22:04.914826Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=1&id=NzMwYmUwYjMtZGZkZjU3ODUtNzg2ODZkYjctNGU1OTEyMzY=, ActorId: [1:7509888289626521403:2401], ActorState: ExecuteState, TraceId: 01jwea5q0x22a2jtk804avmme7, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: VERIFY failed (2025-05-29T15:22:04.915497Z): assertion failed in non-unittest thread with message: assertion failed at ydb/core/kqp/ut/common/kqp_ut_common.h:375, void NKikimr::NKqp::AssertSuccessResult(const NYdb::TStatus &): (result.IsSuccess())
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 library/cpp/testing/unittest/registar.cpp:36 RaiseError(): requirement UnittestThread failed 0. /-S/util/system/yassert.cpp:83: InternalPanicImpl @ 0x13AC65E5 1. /-S/util/system/yassert.cpp:55: Panic @ 0x13ABD5E6 2. /tmp//-S/library/cpp/testing/unittest/registar.cpp:36: RaiseError @ 0x13C5F376 3. /-S/ydb/core/kqp/ut/common/kqp_ut_common.h:375: AssertSuccessResult @ 0x261025A2 4. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:365: CreateSampleTables @ 0x26101EA2 5. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:581: operator() @ 0x261237FC 6. /-S/library/cpp/threading/future/core/future-inl.h:493: SetValue @ 0x261237FC 7. /-S/library/cpp/threading/future/async.h:24: operator() @ 0x261237FC 8. /-S/util/thread/pool.h:71: Process @ 0x261237FC 9. /-S/util/thread/pool.cpp:418: DoExecute @ 0x13ACDF69 10. /-S/util/thread/factory.h:15: Execute @ 0x13ACC959 11. /-S/util/thread/factory.cpp:36: ThreadProc @ 0x13ACC959 12. /-S/util/system/thread.cpp:244: ThreadProxy @ 0x13AC7DCC 13. ??:0: ?? @ 0x7F1AF9809AC2 14. ??:0: ?? @ 0x7F1AF989B84F ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/perf/unittest >> KqpQueryPerf::RangeRead-QueryService Test command err: Trying to start YDB, gRPC: 24633, MsgBus: 1693 2025-05-29T15:22:04.259908Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509888291814956493:2062];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:22:04.259936Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/00106f/r3tmp/tmpuJ4FLl/pdisk_1.dat 2025-05-29T15:22:04.330688Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [1:7509888291814956473:2079] 1748532124259770 != 1748532124259773 2025-05-29T15:22:04.333756Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 24633, node 1 2025-05-29T15:22:04.350942Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:22:04.350956Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-05-29T15:22:04.350958Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-05-29T15:22:04.351003Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:1693 TClient is connected to server localhost:1693 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: 2025-05-29T15:22:04.402718Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:22:04.402789Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:22:04.403779Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-29T15:22:04.412898Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:22:04.418941Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:22:04.440257Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:22:04.460830Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:22:04.471810Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:22:04.649896Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509888291814958108:2403], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:22:04.649922Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:22:04.695651Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-05-29T15:22:04.707386Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-05-29T15:22:04.718643Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-05-29T15:22:04.733349Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-05-29T15:22:04.746506Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-05-29T15:22:04.760891Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-05-29T15:22:04.779315Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-05-29T15:22:04.795022Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509888291814958760:2466], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:22:04.795054Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:22:04.795141Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509888291814958765:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:22:04.796124Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715669:3, at schemeshard: 72057594046644480 2025-05-29T15:22:04.804452Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7509888291814958767:2470], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715669 completed, doublechecking } 2025-05-29T15:22:04.892701Z node 1 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [1:7509888291814958818:3396] txid# 281474976715670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-29T15:22:05.020211Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [1:7509888291814958827:2474], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:22:05.020310Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=1&id=ZGFiNTQwZTgtNTk0NmMzNWItNmI3MThiZWItMzkwZmQ5MzI=, ActorId: [1:7509888291814958081:2400], ActorState: ExecuteState, TraceId: 01jwea5q3tawcb9v0da62te32r, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: VERIFY failed (2025-05-29T15:22:05.020966Z): assertion failed in non-unittest thread with message: assertion failed at ydb/core/kqp/ut/common/kqp_ut_common.h:375, void NKikimr::NKqp::AssertSuccessResult(const NYdb::TStatus &): (result.IsSuccess())
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 library/cpp/testing/unittest/registar.cpp:36 RaiseError(): requirement UnittestThread failed 0. /-S/util/system/yassert.cpp:83: InternalPanicImpl @ 0x13AC65E5 1. /-S/util/system/yassert.cpp:55: Panic @ 0x13ABD5E6 2. /tmp//-S/library/cpp/testing/unittest/registar.cpp:36: RaiseError @ 0x13C5F376 3. /-S/ydb/core/kqp/ut/common/kqp_ut_common.h:375: AssertSuccessResult @ 0x261025A2 4. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:365: CreateSampleTables @ 0x26101EA2 5. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:581: operator() @ 0x261237FC 6. /-S/library/cpp/threading/future/core/future-inl.h:493: SetValue @ 0x261237FC 7. /-S/library/cpp/threading/future/async.h:24: operator() @ 0x261237FC 8. /-S/util/thread/pool.h:71: Process @ 0x261237FC 9. /-S/util/thread/pool.cpp:418: DoExecute @ 0x13ACDF69 10. /-S/util/thread/factory.h:15: Execute @ 0x13ACC959 11. /-S/util/thread/factory.cpp:36: ThreadProc @ 0x13ACC959 12. /-S/util/system/thread.cpp:244: ThreadProxy @ 0x13AC7DCC 13. ??:0: ?? @ 0x7FDB3A322AC2 14. ??:0: ?? @ 0x7FDB3A3B484F >> KqpOlapCompression::DefaultCompressionViaCSConfig [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/perf/unittest >> KqpQueryPerf::IndexReplace+QueryService+UseSink Test command err: Trying to start YDB, gRPC: 14620, MsgBus: 24619 test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/0010f1/r3tmp/tmp22AXt1/pdisk_1.dat 2025-05-29T15:21:59.573047Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509888269796117680:2208];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:21:59.575638Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-05-29T15:21:59.613269Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [1:7509888269796117498:2079] 1748532119507656 != 1748532119507659 2025-05-29T15:21:59.614606Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 14620, node 1 2025-05-29T15:21:59.641787Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:21:59.641798Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-05-29T15:21:59.641800Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-05-29T15:21:59.641844Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:24619 2025-05-29T15:21:59.676890Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:21:59.676919Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:21:59.678098Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:24619 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-29T15:21:59.721127Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:21:59.727409Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-05-29T15:21:59.739532Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-05-29T15:21:59.805859Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 2025-05-29T15:21:59.832697Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:21:59.891671Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:21:59.987202Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509888269796119144:2403], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:21:59.987234Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:22:00.037903Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-05-29T15:22:00.048608Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-05-29T15:22:00.059914Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-05-29T15:22:00.070647Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-05-29T15:22:00.085304Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-05-29T15:22:00.099084Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-05-29T15:22:00.113770Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-05-29T15:22:00.132193Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509888274091087094:2466], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:22:00.132224Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:22:00.132233Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509888274091087099:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:22:00.133188Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710669:3, at schemeshard: 72057594046644480 2025-05-29T15:22:00.140449Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7509888274091087101:2470], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710669 completed, doublechecking } 2025-05-29T15:22:00.237428Z node 1 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [1:7509888274091087152:3402] txid# 281474976710670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } VERIFY failed (2025-05-29T15:22:00.359487Z): assertion failed in non-unittest thread with message: assertion failed at ydb/core/kqp/ut/common/kqp_ut_common.h:375, void NKikimr::NKqp::AssertSuccessResult(const NYdb::TStatus &): (result.IsSuccess())
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 library/cpp/testing/unittest/registar.cpp:36 RaiseError(): requirement UnittestThread failed 2025-05-29T15:22:00.358402Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [1:7509888274091087168:2474], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:22:00.358558Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=1&id=NjNkYjVhZWEtZDA5N2Q1OTQtNzhiZjM4MWQtMWM2YTZiODE=, ActorId: [1:7509888269796119126:2401], ActorState: ExecuteState, TraceId: 01jwea5jj32zhr8hkv31vvh00a, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: 0. /-S/util/system/yassert.cpp:83: InternalPanicImpl @ 0x13AC65E5 1. /-S/util/system/yassert.cpp:55: Panic @ 0x13ABD5E6 2. /tmp//-S/library/cpp/testing/unittest/registar.cpp:36: RaiseError @ 0x13C5F376 3. /-S/ydb/core/kqp/ut/common/kqp_ut_common.h:375: AssertSuccessResult @ 0x261025A2 4. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:365: CreateSampleTables @ 0x26101EA2 5. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:581: operator() @ 0x261237FC 6. /-S/library/cpp/threading/future/core/future-inl.h:493: SetValue @ 0x261237FC 7. /-S/library/cpp/threading/future/async.h:24: operator() @ 0x261237FC 8. /-S/util/thread/pool.h:71: Process @ 0x261237FC 9. /-S/util/thread/pool.cpp:418: DoExecute @ 0x13ACDF69 10. /-S/util/thread/factory.h:15: Execute @ 0x13ACC959 11. /-S/util/thread/factory.cpp:36: ThreadProc @ 0x13ACC959 12. /-S/util/system/thread.cpp:244: ThreadProxy @ 0x13AC7DCC 13. ??:0: ?? @ 0x7F9E8E697AC2 14. ??:0: ?? @ 0x7F9E8E72984F Trying to start YDB, gRPC: 20204, MsgBus: 2308 2025-05-29T15:22:04.552392Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509888289531064575:2204];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:22:04.552618Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/0010f1/r3tmp/tmpfemHTP/pdisk_1.dat 2025-05-29T15:22:04.625444Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:22:04.625645Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [1:7509888289531064409:2079] 1748532124549171 != 1748532124549174 TServer::EnableGrpc on GrpcPort 20204, node 1 2025-05-29T15:22:04.640910Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:22:04.640922Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-05-29T15:22:04.640924Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-05-29T15:22:04.640956Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-05-29T15:22:04.653570Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:22:04.653601Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:22:04.654651Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:2308 TClient is connected to server localhost:2308 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-29T15:22:04.704141Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:22:04.706958Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-05-29T15:22:04.717314Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:22:04.794071Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:22:04.821321Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:22:04.831992Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:22:04.969772Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509888289531066045:2403], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:22:04.969794Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:22:05.028862Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-05-29T15:22:05.036087Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-05-29T15:22:05.092823Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-05-29T15:22:05.147773Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-05-29T15:22:05.203036Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-05-29T15:22:05.215733Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-05-29T15:22:05.229529Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-05-29T15:22:05.245112Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509888293826034000:2466], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:22:05.245130Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:22:05.245145Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509888293826034005:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:22:05.245902Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715669:3, at schemeshard: 72057594046644480 2025-05-29T15:22:05.249211Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7509888293826034007:2470], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715669 completed, doublechecking } 2025-05-29T15:22:05.332468Z node 1 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [1:7509888293826034058:3397] txid# 281474976715670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-29T15:22:05.441215Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [1:7509888293826034074:2474], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:22:05.441332Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=1&id=OTM4OTJmMzQtOGJkODIyNTgtYjE2ZDBhMTAtZjVjYTc2M2E=, ActorId: [1:7509888289531066027:2401], ActorState: ExecuteState, TraceId: 01jwea5qhw8kkmzb8d1npakvqc, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: VERIFY failed (2025-05-29T15:22:05.441956Z): assertion failed in non-unittest thread with message: assertion failed at ydb/core/kqp/ut/common/kqp_ut_common.h:375, void NKikimr::NKqp::AssertSuccessResult(const NYdb::TStatus &): (result.IsSuccess())
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 library/cpp/testing/unittest/registar.cpp:36 RaiseError(): requirement UnittestThread failed 0. /-S/util/system/yassert.cpp:83: InternalPanicImpl @ 0x13AC65E5 1. /-S/util/system/yassert.cpp:55: Panic @ 0x13ABD5E6 2. /tmp//-S/library/cpp/testing/unittest/registar.cpp:36: RaiseError @ 0x13C5F376 3. /-S/ydb/core/kqp/ut/common/kqp_ut_common.h:375: AssertSuccessResult @ 0x261025A2 4. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:365: CreateSampleTables @ 0x26101EA2 5. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:581: operator() @ 0x261237FC 6. /-S/library/cpp/threading/future/core/future-inl.h:493: SetValue @ 0x261237FC 7. /-S/library/cpp/threading/future/async.h:24: operator() @ 0x261237FC 8. /-S/util/thread/pool.h:71: Process @ 0x261237FC 9. /-S/util/thread/pool.cpp:418: DoExecute @ 0x13ACDF69 10. /-S/util/thread/factory.h:15: Execute @ 0x13ACC959 11. /-S/util/thread/factory.cpp:36: ThreadProc @ 0x13ACC959 12. /-S/util/system/thread.cpp:244: ThreadProxy @ 0x13AC7DCC 13. ??:0: ?? @ 0x7FBF7F64AAC2 14. ??:0: ?? @ 0x7FBF7F6DC84F ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/perf/unittest >> KqpQueryPerf::Delete+QueryService+UseSink Test command err: Trying to start YDB, gRPC: 17615, MsgBus: 6827 2025-05-29T15:21:59.850282Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509888270987772640:2198];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:21:59.850428Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/0010cd/r3tmp/tmpVZzpE6/pdisk_1.dat 2025-05-29T15:21:59.914923Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [1:7509888270987772481:2079] 1748532119848495 != 1748532119848498 2025-05-29T15:21:59.916714Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 17615, node 1 2025-05-29T15:21:59.929498Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:21:59.929510Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-05-29T15:21:59.929512Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-05-29T15:21:59.929570Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:6827 TClient is connected to server localhost:6827 WaitRootIsUp 'Root'... TClient::Ls request: Root 2025-05-29T15:21:59.985774Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:21:59.985803Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:21:59.987336Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-29T15:21:59.999394Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:22:00.001760Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-05-29T15:22:00.003726Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:22:00.077819Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:22:00.103079Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:22:00.116023Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:22:00.237544Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509888275282741410:2403], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:22:00.237583Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:22:00.284636Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-05-29T15:22:00.292722Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-05-29T15:22:00.301203Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-05-29T15:22:00.356803Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-05-29T15:22:00.365482Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-05-29T15:22:00.379573Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-05-29T15:22:00.393037Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-05-29T15:22:00.411059Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509888275282742063:2466], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:22:00.411092Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:22:00.411151Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509888275282742068:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:22:00.412107Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715669:3, at schemeshard: 72057594046644480 2025-05-29T15:22:00.416240Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7509888275282742070:2470], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715669 completed, doublechecking } 2025-05-29T15:22:00.471924Z node 1 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [1:7509888275282742121:3395] txid# 281474976715670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-29T15:22:00.597135Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [1:7509888275282742137:2474], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:22:00.598231Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=1&id=YWY1MjBhNzItMjEyM2ZiNGQtMjhjYWMyYWQtMjk5ZGYwZmE=, ActorId: [1:7509888275282741407:2401], ActorState: ExecuteState, TraceId: 01jwea5jttcssaw5cbj34p1cj2, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: VERIFY failed (2025-05-29T15:22:00.601084Z): assertion failed in non-unittest thread with message: assertion failed at ydb/core/kqp/ut/common/kqp_ut_common.h:375, void NKikimr::NKqp::AssertSuccessResult(const NYdb::TStatus &): (result.IsSuccess())
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 library/cpp/testing/unittest/registar.cpp:36 RaiseError(): requirement UnittestThread failed 0. /-S/util/system/yassert.cpp:83: InternalPanicImpl @ 0x13AC65E5 1. /-S/util/system/yassert.cpp:55: Panic @ 0x13ABD5E6 2. /tmp//-S/library/cpp/testing/unittest/registar.cpp:36: RaiseError @ 0x13C5F376 3. /-S/ydb/core/kqp/ut/common/kqp_ut_common.h:375: AssertSuccessResult @ 0x261025A2 4. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:365: CreateSampleTables @ 0x26101EA2 5. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:581: operator() @ 0x261237FC 6. /-S/library/cpp/threading/future/core/future-inl.h:493: SetValue @ 0x261237FC 7. /-S/library/cpp/threading/future/async.h:24: operator() @ 0x261237FC 8. /-S/util/thread/pool.h:71: Process @ 0x261237FC 9. /-S/util/thread/pool.cpp:418: DoExecute @ 0x13ACDF69 10. /-S/util/thread/factory.h:15: Execute @ 0x13ACC959 11. /-S/util/thread/factory.cpp:36: ThreadProc @ 0x13ACC959 12. /-S/util/system/thread.cpp:244: ThreadProxy @ 0x13AC7DCC 13. ??:0: ?? @ 0x7F23F4AA1AC2 14. ??:0: ?? @ 0x7F23F4B3384F Trying to start YDB, gRPC: 16882, MsgBus: 4132 2025-05-29T15:22:04.506640Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509888290758376521:2061];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:22:04.506663Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/0010cd/r3tmp/tmpS3zgxI/pdisk_1.dat 2025-05-29T15:22:04.581689Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [1:7509888290758376502:2079] 1748532124506535 != 1748532124506538 2025-05-29T15:22:04.584757Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 16882, node 1 2025-05-29T15:22:04.600449Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:22:04.600462Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-05-29T15:22:04.600464Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-05-29T15:22:04.600507Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:4132 TClient is connected to server localhost:4132 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: 2025-05-29T15:22:04.654141Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:22:04.654172Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:22:04.655210Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-29T15:22:04.668383Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:22:04.671878Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-05-29T15:22:04.682182Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:22:04.757828Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:22:04.781883Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:22:04.796770Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:22:04.902231Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509888290758378136:2403], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:22:04.902259Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:22:04.948936Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-05-29T15:22:04.960105Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-05-29T15:22:04.973371Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-05-29T15:22:05.030275Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-05-29T15:22:05.041185Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-05-29T15:22:05.057370Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-05-29T15:22:05.070688Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-05-29T15:22:05.085015Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509888295053346090:2466], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:22:05.085052Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:22:05.085083Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509888295053346095:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:22:05.085851Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715669:3, at schemeshard: 72057594046644480 2025-05-29T15:22:05.088417Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7509888295053346097:2470], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715669 completed, doublechecking } 2025-05-29T15:22:05.165808Z node 1 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [1:7509888295053346148:3399] txid# 281474976715670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-29T15:22:05.254831Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [1:7509888295053346164:2474], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:22:05.254943Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=1&id=NDMzZjU4NTAtMzQyOTNkNWYtZmVkOGJhYWEtZjhhNzY2NTc=, ActorId: [1:7509888290758378133:2401], ActorState: ExecuteState, TraceId: 01jwea5qcwcv32wnnqjkqv3z0w, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: VERIFY failed (2025-05-29T15:22:05.255600Z): assertion failed in non-unittest thread with message: assertion failed at ydb/core/kqp/ut/common/kqp_ut_common.h:375, void NKikimr::NKqp::AssertSuccessResult(const NYdb::TStatus &): (result.IsSuccess())
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 library/cpp/testing/unittest/registar.cpp:36 RaiseError(): requirement UnittestThread failed 0. /-S/util/system/yassert.cpp:83: InternalPanicImpl @ 0x13AC65E5 1. /-S/util/system/yassert.cpp:55: Panic @ 0x13ABD5E6 2. /tmp//-S/library/cpp/testing/unittest/registar.cpp:36: RaiseError @ 0x13C5F376 3. /-S/ydb/core/kqp/ut/common/kqp_ut_common.h:375: AssertSuccessResult @ 0x261025A2 4. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:365: CreateSampleTables @ 0x26101EA2 5. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:581: operator() @ 0x261237FC 6. /-S/library/cpp/threading/future/core/future-inl.h:493: SetValue @ 0x261237FC 7. /-S/library/cpp/threading/future/async.h:24: operator() @ 0x261237FC 8. /-S/util/thread/pool.h:71: Process @ 0x261237FC 9. /-S/util/thread/pool.cpp:418: DoExecute @ 0x13ACDF69 10. /-S/util/thread/factory.h:15: Execute @ 0x13ACC959 11. /-S/util/thread/factory.cpp:36: ThreadProc @ 0x13ACC959 12. /-S/util/system/thread.cpp:244: ThreadProxy @ 0x13AC7DCC 13. ??:0: ?? @ 0x7FC89B491AC2 14. ??:0: ?? @ 0x7FC89B52384F ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/perf/unittest >> KqpQueryPerf::IndexInsert-QueryService+UseSink Test command err: Trying to start YDB, gRPC: 1434, MsgBus: 20528 2025-05-29T15:21:59.461996Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509888269050908351:2220];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/0010d6/r3tmp/tmp3OGIpJ/pdisk_1.dat 2025-05-29T15:21:59.497400Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-05-29T15:21:59.536883Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:21:59.541162Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [1:7509888269050908140:2079] 1748532119451031 != 1748532119451034 TServer::EnableGrpc on GrpcPort 1434, node 1 2025-05-29T15:21:59.555937Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:21:59.555952Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-05-29T15:21:59.555954Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-05-29T15:21:59.556001Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:20528 2025-05-29T15:21:59.596433Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:21:59.596475Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:21:59.598150Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:20528 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-29T15:21:59.636971Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:21:59.643258Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-05-29T15:21:59.652992Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:21:59.687641Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:21:59.712426Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-05-29T15:21:59.730768Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 2025-05-29T15:21:59.974609Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509888269050909772:2403], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:21:59.974634Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:22:00.039163Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-05-29T15:22:00.047373Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-05-29T15:22:00.059210Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-05-29T15:22:00.071911Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-05-29T15:22:00.086092Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-05-29T15:22:00.099966Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-05-29T15:22:00.115219Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-05-29T15:22:00.133574Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509888273345877722:2466], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:22:00.133594Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509888273345877727:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:22:00.133629Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:22:00.134458Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715669:3, at schemeshard: 72057594046644480 2025-05-29T15:22:00.139792Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7509888273345877729:2470], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715669 completed, doublechecking } 2025-05-29T15:22:00.202166Z node 1 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [1:7509888273345877780:3396] txid# 281474976715670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-29T15:22:00.323865Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [1:7509888273345877796:2474], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:22:00.323987Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=1&id=YzQxODQ4NzUtYzBhOWNkNDAtMWNjYWRiNWEtZDQ2ZDdhYjQ=, ActorId: [1:7509888269050909754:2401], ActorState: ExecuteState, TraceId: 01jwea5jj57vkvd90cjky8d7th, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: VERIFY failed (2025-05-29T15:22:00.324629Z): assertion failed in non-unittest thread with message: assertion failed at ydb/core/kqp/ut/common/kqp_ut_common.h:375, void NKikimr::NKqp::AssertSuccessResult(const NYdb::TStatus &): (result.IsSuccess())
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 library/cpp/testing/unittest/registar.cpp:36 RaiseError(): requirement UnittestThread failed 0. /-S/util/system/yassert.cpp:83: InternalPanicImpl @ 0x13AC65E5 1. /-S/util/system/yassert.cpp:55: Panic @ 0x13ABD5E6 2. /tmp//-S/library/cpp/testing/unittest/registar.cpp:36: RaiseError @ 0x13C5F376 3. /-S/ydb/core/kqp/ut/common/kqp_ut_common.h:375: AssertSuccessResult @ 0x261025A2 4. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:365: CreateSampleTables @ 0x26101EA2 5. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:581: operator() @ 0x261237FC 6. /-S/library/cpp/threading/future/core/future-inl.h:493: SetValue @ 0x261237FC 7. /-S/library/cpp/threading/future/async.h:24: operator() @ 0x261237FC 8. /-S/util/thread/pool.h:71: Process @ 0x261237FC 9. /-S/util/thread/pool.cpp:418: DoExecute @ 0x13ACDF69 10. /-S/util/thread/factory.h:15: Execute @ 0x13ACC959 11. /-S/util/thread/factory.cpp:36: ThreadProc @ 0x13ACC959 12. /-S/util/system/thread.cpp:244: ThreadProxy @ 0x13AC7DCC 13. ??:0: ?? @ 0x7F46ED346AC2 14. ??:0: ?? @ 0x7F46ED3D884F Trying to start YDB, gRPC: 20690, MsgBus: 61939 2025-05-29T15:22:04.338091Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509888291191306487:2062];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:22:04.338116Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/0010d6/r3tmp/tmp4FefIP/pdisk_1.dat 2025-05-29T15:22:04.401557Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [1:7509888291191306467:2079] 1748532124337932 != 1748532124337935 2025-05-29T15:22:04.402093Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 20690, node 1 2025-05-29T15:22:04.418890Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:22:04.418904Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-05-29T15:22:04.418905Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-05-29T15:22:04.418942Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:61939 TClient is connected to server localhost:61939 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: 2025-05-29T15:22:04.473449Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:22:04.473473Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:22:04.474603Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-29T15:22:04.487425Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:22:04.491714Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:22:04.558366Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:22:04.590084Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:22:04.606244Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:22:04.759337Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509888291191308106:2403], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:22:04.759368Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:22:04.807639Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-05-29T15:22:04.817352Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-05-29T15:22:04.831244Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-05-29T15:22:04.886937Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-05-29T15:22:04.901575Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-05-29T15:22:04.918145Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-05-29T15:22:04.929140Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-05-29T15:22:04.989736Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509888291191308767:2466], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:22:04.989763Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:22:04.989802Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509888291191308772:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:22:04.990709Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715669:3, at schemeshard: 72057594046644480 2025-05-29T15:22:04.997912Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7509888291191308774:2470], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715669 completed, doublechecking } 2025-05-29T15:22:05.063650Z node 1 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [1:7509888295486276122:3402] txid# 281474976715670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-29T15:22:05.176029Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [1:7509888295486276138:2474], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:22:05.176159Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=1&id=ZGRiZjBkNjAtZjIzMDliNzEtYWE4NzBkMjYtYjY1NzczZDM=, ActorId: [1:7509888291191308079:2400], ActorState: ExecuteState, TraceId: 01jwea5q9x8z63y42mz6fagc50, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: VERIFY failed (2025-05-29T15:22:05.177001Z): assertion failed in non-unittest thread with message: assertion failed at ydb/core/kqp/ut/common/kqp_ut_common.h:375, void NKikimr::NKqp::AssertSuccessResult(const NYdb::TStatus &): (result.IsSuccess())
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 library/cpp/testing/unittest/registar.cpp:36 RaiseError(): requirement UnittestThread failed 0. /-S/util/system/yassert.cpp:83: InternalPanicImpl @ 0x13AC65E5 1. /-S/util/system/yassert.cpp:55: Panic @ 0x13ABD5E6 2. /tmp//-S/library/cpp/testing/unittest/registar.cpp:36: RaiseError @ 0x13C5F376 3. /-S/ydb/core/kqp/ut/common/kqp_ut_common.h:375: AssertSuccessResult @ 0x261025A2 4. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:365: CreateSampleTables @ 0x26101EA2 5. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:581: operator() @ 0x261237FC 6. /-S/library/cpp/threading/future/core/future-inl.h:493: SetValue @ 0x261237FC 7. /-S/library/cpp/threading/future/async.h:24: operator() @ 0x261237FC 8. /-S/util/thread/pool.h:71: Process @ 0x261237FC 9. /-S/util/thread/pool.cpp:418: DoExecute @ 0x13ACDF69 10. /-S/util/thread/factory.h:15: Execute @ 0x13ACC959 11. /-S/util/thread/factory.cpp:36: ThreadProc @ 0x13ACC959 12. /-S/util/system/thread.cpp:244: ThreadProxy @ 0x13AC7DCC 13. ??:0: ?? @ 0x7F609EE51AC2 14. ??:0: ?? @ 0x7F609EEE384F ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/perf/unittest >> KqpQueryPerf::IndexUpsert-QueryService+UseSink Test command err: Trying to start YDB, gRPC: 15150, MsgBus: 5351 2025-05-29T15:21:59.643959Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509888270501098422:2156];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/0010cc/r3tmp/tmpoHb2Z5/pdisk_1.dat 2025-05-29T15:21:59.689389Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-05-29T15:21:59.728756Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [1:7509888270501098277:2079] 1748532119635538 != 1748532119635541 2025-05-29T15:21:59.728760Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 15150, node 1 2025-05-29T15:21:59.743508Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:21:59.743524Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-05-29T15:21:59.743526Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-05-29T15:21:59.743580Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:5351 2025-05-29T15:21:59.787351Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:21:59.787380Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:21:59.788291Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:5351 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-29T15:21:59.810955Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:21:59.814159Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-05-29T15:21:59.872409Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:21:59.936983Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:21:59.962346Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:22:00.024138Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:22:00.122059Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509888274796067229:2403], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:22:00.122082Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:22:00.182298Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-05-29T15:22:00.191552Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-05-29T15:22:00.203567Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-05-29T15:22:00.219092Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-05-29T15:22:00.232762Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-05-29T15:22:00.247779Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-05-29T15:22:00.260519Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-05-29T15:22:00.279645Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509888274796067880:2466], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:22:00.279684Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:22:00.279944Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509888274796067885:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:22:00.280801Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715669:3, at schemeshard: 72057594046644480 2025-05-29T15:22:00.287273Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7509888274796067887:2470], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715669 completed, doublechecking } 2025-05-29T15:22:00.344331Z node 1 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [1:7509888274796067938:3397] txid# 281474976715670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-29T15:22:00.459986Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [1:7509888274796067954:2474], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:22:00.460130Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=1&id=YjQ5ZjNjNmMtMzcxYmUyOWMtODA2YzhhYWUtODM2M2ExZGM=, ActorId: [1:7509888274796067211:2401], ActorState: ExecuteState, TraceId: 01jwea5jpq7m73jdg9m7sy2kmc, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: VERIFY failed (2025-05-29T15:22:00.463172Z): assertion failed in non-unittest thread with message: assertion failed at ydb/core/kqp/ut/common/kqp_ut_common.h:375, void NKikimr::NKqp::AssertSuccessResult(const NYdb::TStatus &): (result.IsSuccess())
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 library/cpp/testing/unittest/registar.cpp:36 RaiseError(): requirement UnittestThread failed 0. /-S/util/system/yassert.cpp:83: InternalPanicImpl @ 0x13AC65E5 1. /-S/util/system/yassert.cpp:55: Panic @ 0x13ABD5E6 2. /tmp//-S/library/cpp/testing/unittest/registar.cpp:36: RaiseError @ 0x13C5F376 3. /-S/ydb/core/kqp/ut/common/kqp_ut_common.h:375: AssertSuccessResult @ 0x261025A2 4. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:365: CreateSampleTables @ 0x26101EA2 5. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:581: operator() @ 0x261237FC 6. /-S/library/cpp/threading/future/core/future-inl.h:493: SetValue @ 0x261237FC 7. /-S/library/cpp/threading/future/async.h:24: operator() @ 0x261237FC 8. /-S/util/thread/pool.h:71: Process @ 0x261237FC 9. /-S/util/thread/pool.cpp:418: DoExecute @ 0x13ACDF69 10. /-S/util/thread/factory.h:15: Execute @ 0x13ACC959 11. /-S/util/thread/factory.cpp:36: ThreadProc @ 0x13ACC959 12. /-S/util/system/thread.cpp:244: ThreadProxy @ 0x13AC7DCC 13. ??:0: ?? @ 0x7F7BD816FAC2 14. ??:0: ?? @ 0x7F7BD820184F Trying to start YDB, gRPC: 15400, MsgBus: 65079 2025-05-29T15:22:04.491796Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509888292244985934:2064];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:22:04.491820Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/0010cc/r3tmp/tmpywMTVK/pdisk_1.dat 2025-05-29T15:22:04.560258Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [1:7509888292244985911:2079] 1748532124491578 != 1748532124491581 2025-05-29T15:22:04.563628Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 15400, node 1 2025-05-29T15:22:04.578941Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:22:04.578954Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-05-29T15:22:04.578956Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-05-29T15:22:04.578995Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:65079 TClient is connected to server localhost:65079 WaitRootIsUp 'Root'... TClient::Ls request: Root 2025-05-29T15:22:04.637410Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:22:04.637446Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting TClient::Ls response: 2025-05-29T15:22:04.638756Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-29T15:22:04.650781Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:22:04.653617Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-05-29T15:22:04.667855Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-05-29T15:22:04.684594Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 2025-05-29T15:22:04.710809Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:22:04.768267Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:22:04.896263Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509888292244987547:2403], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:22:04.896318Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:22:04.904862Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-05-29T15:22:04.963201Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-05-29T15:22:04.980122Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-05-29T15:22:04.994519Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-05-29T15:22:05.006230Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-05-29T15:22:05.063202Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-05-29T15:22:05.075420Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-05-29T15:22:05.094303Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509888296539955501:2466], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:22:05.094327Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:22:05.094348Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509888296539955506:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:22:05.095194Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715669:3, at schemeshard: 72057594046644480 2025-05-29T15:22:05.102703Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7509888296539955508:2470], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715669 completed, doublechecking } 2025-05-29T15:22:05.181524Z node 1 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [1:7509888296539955560:3398] txid# 281474976715670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-29T15:22:05.259331Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [1:7509888296539955576:2474], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:22:05.259428Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=1&id=ZjZhNDQ3MzUtOWI3YWEwNjUtYWIyZTQ4ZDEtOGFiNWM4NWU=, ActorId: [1:7509888292244987529:2401], ActorState: ExecuteState, TraceId: 01jwea5qd535hd4tny7h0kmz4e, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: VERIFY failed (2025-05-29T15:22:05.260145Z): assertion failed in non-unittest thread with message: assertion failed at ydb/core/kqp/ut/common/kqp_ut_common.h:375, void NKikimr::NKqp::AssertSuccessResult(const NYdb::TStatus &): (result.IsSuccess())
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 library/cpp/testing/unittest/registar.cpp:36 RaiseError(): requirement UnittestThread failed 0. /-S/util/system/yassert.cpp:83: InternalPanicImpl @ 0x13AC65E5 1. /-S/util/system/yassert.cpp:55: Panic @ 0x13ABD5E6 2. /tmp//-S/library/cpp/testing/unittest/registar.cpp:36: RaiseError @ 0x13C5F376 3. /-S/ydb/core/kqp/ut/common/kqp_ut_common.h:375: AssertSuccessResult @ 0x261025A2 4. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:365: CreateSampleTables @ 0x26101EA2 5. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:581: operator() @ 0x261237FC 6. /-S/library/cpp/threading/future/core/future-inl.h:493: SetValue @ 0x261237FC 7. /-S/library/cpp/threading/future/async.h:24: operator() @ 0x261237FC 8. /-S/util/thread/pool.h:71: Process @ 0x261237FC 9. /-S/util/thread/pool.cpp:418: DoExecute @ 0x13ACDF69 10. /-S/util/thread/factory.h:15: Execute @ 0x13ACC959 11. /-S/util/thread/factory.cpp:36: ThreadProc @ 0x13ACC959 12. /-S/util/system/thread.cpp:244: ThreadProxy @ 0x13AC7DCC 13. ??:0: ?? @ 0x7F118B18CAC2 14. ??:0: ?? @ 0x7F118B21E84F >> KqpQueryPerf::IndexDeleteOn-QueryService-UseSink >> KqpQueryPerf::Insert+QueryService-UseSink ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/olap/unittest >> KqpOlapCompression::DefaultCompressionViaCSConfig [GOOD] Test command err: Trying to start YDB, gRPC: 20083, MsgBus: 2260 2025-05-29T15:21:36.937590Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509888170662365673:2204];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:21:36.937744Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/0026a9/r3tmp/tmpJqDO1s/pdisk_1.dat 2025-05-29T15:21:37.006670Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [1:7509888170662365496:2079] 1748532096934488 != 1748532096934491 2025-05-29T15:21:37.008112Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 20083, node 1 2025-05-29T15:21:37.034926Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:21:37.034937Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-05-29T15:21:37.034939Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-05-29T15:21:37.034990Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:2260 2025-05-29T15:21:37.080404Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:21:37.080432Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:21:37.081079Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:2260 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... 2025-05-29T15:21:37.119899Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 CREATE TABLE `/Root/ColumnTableTest` (pk_int Uint64 NOT NULL, PRIMARY KEY (pk_int)) PARTITION BY HASH(pk_int) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 1); 2025-05-29T15:21:37.335996Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509888174957333455:2331], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:21:37.336021Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:21:37.377799Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-05-29T15:21:37.389549Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;self_id=[1:7509888174957333531:2335];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-05-29T15:21:37.389617Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;self_id=[1:7509888174957333531:2335];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-05-29T15:21:37.389660Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;self_id=[1:7509888174957333531:2335];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-05-29T15:21:37.389688Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;self_id=[1:7509888174957333531:2335];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-05-29T15:21:37.389715Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;self_id=[1:7509888174957333531:2335];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-05-29T15:21:37.389742Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;self_id=[1:7509888174957333531:2335];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-05-29T15:21:37.389774Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;self_id=[1:7509888174957333531:2335];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-05-29T15:21:37.389802Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;self_id=[1:7509888174957333531:2335];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-05-29T15:21:37.389828Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;self_id=[1:7509888174957333531:2335];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-05-29T15:21:37.389853Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;self_id=[1:7509888174957333531:2335];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-05-29T15:21:37.389879Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;self_id=[1:7509888174957333531:2335];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2025-05-29T15:21:37.389903Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;self_id=[1:7509888174957333531:2335];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2025-05-29T15:21:37.391247Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-05-29T15:21:37.391267Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-05-29T15:21:37.391281Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-05-29T15:21:37.391287Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-05-29T15:21:37.391318Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-05-29T15:21:37.391323Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-05-29T15:21:37.391335Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-05-29T15:21:37.391341Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-05-29T15:21:37.391351Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-05-29T15:21:37.391356Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-05-29T15:21:37.391363Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-05-29T15:21:37.391368Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-05-29T15:21:37.391391Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-05-29T15:21:37.391408Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-05-29T15:21:37.391431Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-05-29T15:21:37.391437Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-05-29T15:21:37.391451Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-05-29T15:21:37.391456Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanDeprecatedSnap ... rocess=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-05-29T15:21:53.519143Z node 2 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;self_id=[2:7509888241869695166:2335];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-05-29T15:21:53.519168Z node 2 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;self_id=[2:7509888241869695166:2335];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-05-29T15:21:53.519190Z node 2 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;self_id=[2:7509888241869695166:2335];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-05-29T15:21:53.519213Z node 2 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;self_id=[2:7509888241869695166:2335];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-05-29T15:21:53.519238Z node 2 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;self_id=[2:7509888241869695166:2335];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-05-29T15:21:53.519263Z node 2 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;self_id=[2:7509888241869695166:2335];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-05-29T15:21:53.519285Z node 2 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;self_id=[2:7509888241869695166:2335];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2025-05-29T15:21:53.519336Z node 2 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;self_id=[2:7509888241869695166:2335];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2025-05-29T15:21:53.520732Z node 2 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-05-29T15:21:53.520756Z node 2 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-05-29T15:21:53.520772Z node 2 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-05-29T15:21:53.520780Z node 2 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-05-29T15:21:53.520817Z node 2 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-05-29T15:21:53.520832Z node 2 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-05-29T15:21:53.520845Z node 2 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-05-29T15:21:53.520861Z node 2 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-05-29T15:21:53.520875Z node 2 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-05-29T15:21:53.520886Z node 2 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-05-29T15:21:53.520897Z node 2 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-05-29T15:21:53.520910Z node 2 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-05-29T15:21:53.520939Z node 2 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-05-29T15:21:53.520975Z node 2 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-05-29T15:21:53.521008Z node 2 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-05-29T15:21:53.521022Z node 2 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-05-29T15:21:53.521037Z node 2 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-05-29T15:21:53.521050Z node 2 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanDeprecatedSnapshot;id=CleanDeprecatedSnapshot; 2025-05-29T15:21:53.521076Z node 2 :TX_COLUMNSHARD CRIT: log.cpp:784: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=clean_deprecated_snapshot.cpp:30;tasks_for_rewrite=0; 2025-05-29T15:21:53.521089Z node 2 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanDeprecatedSnapshot;id=17; 2025-05-29T15:21:53.521097Z node 2 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV0ChunksMeta;id=RestoreV0ChunksMeta; 2025-05-29T15:21:53.521246Z node 2 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV0ChunksMeta;id=18; 2025-05-29T15:21:53.521258Z node 2 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-05-29T15:21:53.527758Z node 2 :TX_COLUMNSHARD_TX WARN: log.cpp:784: tablet_id=72075186224037888;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715658;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715658; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=800144;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=800144;columns=1; 2025-05-29T15:21:53.565482Z node 2 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;parent_id=[2:7509888241869695166:2335];path_id=2;entity_id=1;size=102408;limit=10240;r_count=100000;fline=column_info.h:130;sizes=9309,9309,9310,9310,9310,9310,9310,9310,9310,9310,9310;s_splitted=10072,9448,9448,9448,9448,9448,9448,9448,9448,9448,9448;r_splitted=9090,9090,9091,9091,9091,9091,9091,9091,9091,9091,9092; WAIT_COMPACTION: 0 WAIT_COMPACTION: 0 WAIT_COMPACTION: 0 WAIT_COMPACTION: 0 WAIT_COMPACTION: 0 2025-05-29T15:21:58.120041Z node 2 :METADATA_PROVIDER ERROR: log.cpp:784: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7509888241869694484:2064];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:21:58.120085Z node 2 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; WAIT_COMPACTION: 0 WAIT_COMPACTION: 0 WAIT_COMPACTION: 0 WAIT_COMPACTION: 0 WAIT_COMPACTION: 0 2025-05-29T15:22:03.602819Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7509888284819368323:2396], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:22:03.602889Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:22:03.603130Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7509888284819368328:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:22:03.604146Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480 2025-05-29T15:22:03.607204Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976715659, at schemeshard: 72057594046644480 2025-05-29T15:22:03.607278Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7509888284819368330:2400], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2025-05-29T15:22:03.659854Z node 2 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [2:7509888284819368381:2447] txid# 281474976715660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-29T15:22:03.733750Z node 2 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:238: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1748532123703, txId: 281474976715661] shutting down Wait changes: 104552/800000 2025-05-29T15:22:08.139823Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7301: Cannot get console configs 2025-05-29T15:22:08.139843Z node 2 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:22:08.805375Z node 2 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:238: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1748532128780, txId: 281474976715663] shutting down >> KqpWorkload::STOCK >> KqpQueryPerf::IndexDeleteOn+QueryService-UseSink >> KqpQueryPerf::IndexUpsert+QueryService-UseSink >> KqpQueryPerf::Replace-QueryService+UseSink >> KqpQueryPerf::KvRead-QueryService >> KqpQueryPerf::Update-QueryService-UseSink >> KqpQueryPerf::DeleteOn+QueryService-UseSink >> KqpQueryPerf::Replace-QueryService-UseSink >> KqpQueryPerf::IndexLookupJoin+EnableStreamLookup+QueryService >> TPartitionTests::TabletConfig_Is_Newer_That_PartitionConfig >> TPartitionChooserSuite::TPartitionChooserActor_SplitMergeEnabled_SourceId_PartitionInactive_1_Test >> GroupWriteTest::ByTableName >> KqpQueryPerf::UpdateOn-QueryService-UseSink >> TPartitionTests::Batching ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/perf/unittest >> KqpQueryPerf::RangeLimitRead+QueryService Test command err: Trying to start YDB, gRPC: 22108, MsgBus: 16977 2025-05-29T15:22:05.809409Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509888296761828998:2201];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:22:05.809536Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/00104b/r3tmp/tmpfqOpkL/pdisk_1.dat 2025-05-29T15:22:05.859414Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [1:7509888296761828836:2079] 1748532125808491 != 1748532125808494 2025-05-29T15:22:05.861646Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 22108, node 1 2025-05-29T15:22:05.870270Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:22:05.870279Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-05-29T15:22:05.870280Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-05-29T15:22:05.870328Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:16977 2025-05-29T15:22:05.911668Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:22:05.911705Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:22:05.912773Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:16977 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-29T15:22:05.935911Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:22:05.940078Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:22:06.004796Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:22:06.069034Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:22:06.082290Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:22:06.251943Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509888301056797797:2403], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:22:06.251987Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:22:06.290393Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-05-29T15:22:06.298319Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-05-29T15:22:06.307856Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-05-29T15:22:06.321810Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-05-29T15:22:06.336073Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-05-29T15:22:06.350208Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-05-29T15:22:06.364064Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-05-29T15:22:06.380381Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509888301056798449:2466], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:22:06.380420Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509888301056798454:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:22:06.380422Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:22:06.381327Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715669:3, at schemeshard: 72057594046644480 2025-05-29T15:22:06.383887Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7509888301056798456:2470], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715669 completed, doublechecking } 2025-05-29T15:22:06.472763Z node 1 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [1:7509888301056798507:3396] txid# 281474976715670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-29T15:22:06.567580Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [1:7509888301056798523:2474], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:22:06.567719Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=1&id=Y2FiM2M0MzEtZTAzYWZhMzUtMjkxNzRkN2QtYzg1N2Q0MjE=, ActorId: [1:7509888301056797794:2401], ActorState: ExecuteState, TraceId: 01jwea5rnb57n3t838kpqxg6zw, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: VERIFY failed (2025-05-29T15:22:06.568465Z): assertion failed in non-unittest thread with message: assertion failed at ydb/core/kqp/ut/common/kqp_ut_common.h:375, void NKikimr::NKqp::AssertSuccessResult(const NYdb::TStatus &): (result.IsSuccess())
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 library/cpp/testing/unittest/registar.cpp:36 RaiseError(): requirement UnittestThread failed 0. /-S/util/system/yassert.cpp:83: InternalPanicImpl @ 0x13AC65E5 1. /-S/util/system/yassert.cpp:55: Panic @ 0x13ABD5E6 2. /tmp//-S/library/cpp/testing/unittest/registar.cpp:36: RaiseError @ 0x13C5F376 3. /-S/ydb/core/kqp/ut/common/kqp_ut_common.h:375: AssertSuccessResult @ 0x261025A2 4. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:365: CreateSampleTables @ 0x26101EA2 5. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:581: operator() @ 0x261237FC 6. /-S/library/cpp/threading/future/core/future-inl.h:493: SetValue @ 0x261237FC 7. /-S/library/cpp/threading/future/async.h:24: operator() @ 0x261237FC 8. /-S/util/thread/pool.h:71: Process @ 0x261237FC 9. /-S/util/thread/pool.cpp:418: DoExecute @ 0x13ACDF69 10. /-S/util/thread/factory.h:15: Execute @ 0x13ACC959 11. /-S/util/thread/factory.cpp:36: ThreadProc @ 0x13ACC959 12. /-S/util/system/thread.cpp:244: ThreadProxy @ 0x13AC7DCC 13. ??:0: ?? @ 0x7FB7893ECAC2 14. ??:0: ?? @ 0x7FB78947E84F >> TPQTest::TestWritePQCompact >> TPartitionTests::Batching [GOOD] >> TPartitionTests::CommitOffsetRanges >> TPartitionTests::TabletConfig_Is_Newer_That_PartitionConfig [GOOD] >> TPartitionTests::ShadowPartitionCountersRestore ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/perf/unittest >> KqpQueryPerf::Update-QueryService+UseSink Test command err: Trying to start YDB, gRPC: 63308, MsgBus: 21461 2025-05-29T15:22:05.790377Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509888296433197751:2200];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:22:05.790488Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/001049/r3tmp/tmporus4G/pdisk_1.dat 2025-05-29T15:22:05.852307Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:22:05.852394Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [1:7509888296433197590:2079] 1748532125789573 != 1748532125789576 TServer::EnableGrpc on GrpcPort 63308, node 1 2025-05-29T15:22:05.863163Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:22:05.863174Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-05-29T15:22:05.863175Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-05-29T15:22:05.863217Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:21461 2025-05-29T15:22:05.892659Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:22:05.892685Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:22:05.893798Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:21461 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-29T15:22:05.924786Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:22:05.933611Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:22:05.997164Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:22:06.016877Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:22:06.028424Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:22:06.160669Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509888300728166521:2403], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:22:06.160705Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:22:06.197999Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-05-29T15:22:06.211519Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-05-29T15:22:06.266191Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-05-29T15:22:06.280161Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-05-29T15:22:06.293958Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-05-29T15:22:06.307838Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-05-29T15:22:06.321926Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-05-29T15:22:06.338265Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509888300728167176:2466], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:22:06.338292Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:22:06.338296Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509888300728167181:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:22:06.339047Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715669:3, at schemeshard: 72057594046644480 2025-05-29T15:22:06.342193Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7509888300728167183:2470], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715669 completed, doublechecking } 2025-05-29T15:22:06.394470Z node 1 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [1:7509888300728167234:3398] txid# 281474976715670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-29T15:22:06.509430Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [1:7509888300728167250:2474], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:22:06.509573Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=1&id=NTUyNGM1ZGUtZGNkNTM3OGMtYTIxY2Y2M2YtODY4ODg0ODQ=, ActorId: [1:7509888300728166518:2401], ActorState: ExecuteState, TraceId: 01jwea5rm1ahkthap3mgw2c58t, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: VERIFY failed (2025-05-29T15:22:06.511244Z): assertion failed in non-unittest thread with message: assertion failed at ydb/core/kqp/ut/common/kqp_ut_common.h:375, void NKikimr::NKqp::AssertSuccessResult(const NYdb::TStatus &): (result.IsSuccess())
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 library/cpp/testing/unittest/registar.cpp:36 RaiseError(): requirement UnittestThread failed 0. /-S/util/system/yassert.cpp:83: InternalPanicImpl @ 0x13AC65E5 1. /-S/util/system/yassert.cpp:55: Panic @ 0x13ABD5E6 2. /tmp//-S/library/cpp/testing/unittest/registar.cpp:36: RaiseError @ 0x13C5F376 3. /-S/ydb/core/kqp/ut/common/kqp_ut_common.h:375: AssertSuccessResult @ 0x261025A2 4. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:365: CreateSampleTables @ 0x26101EA2 5. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:581: operator() @ 0x261237FC 6. /-S/library/cpp/threading/future/core/future-inl.h:493: SetValue @ 0x261237FC 7. /-S/library/cpp/threading/future/async.h:24: operator() @ 0x261237FC 8. /-S/util/thread/pool.h:71: Process @ 0x261237FC 9. /-S/util/thread/pool.cpp:418: DoExecute @ 0x13ACDF69 10. /-S/util/thread/factory.h:15: Execute @ 0x13ACC959 11. /-S/util/thread/factory.cpp:36: ThreadProc @ 0x13ACC959 12. /-S/util/system/thread.cpp:244: ThreadProxy @ 0x13AC7DCC 13. ??:0: ?? @ 0x7F4AB4222AC2 14. ??:0: ?? @ 0x7F4AB42B484F >> TPartitionTests::ShadowPartitionCountersRestore [GOOD] >> TPartitionTests::CommitOffsetRanges [GOOD] >> TPartitionTests::TestNonConflictingActsBatchOk ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/perf/unittest >> KqpQueryPerf::MultiDeleteFromTable+QueryService+UseSink Test command err: Trying to start YDB, gRPC: 10082, MsgBus: 8171 2025-05-29T15:22:01.813029Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509888277002253780:2073];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:22:01.813127Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/0010cb/r3tmp/tmpns3scH/pdisk_1.dat 2025-05-29T15:22:01.869889Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 10082, node 1 2025-05-29T15:22:01.894636Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:22:01.894654Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-05-29T15:22:01.894656Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-05-29T15:22:01.894710Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:8171 2025-05-29T15:22:01.914185Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:22:01.914227Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:22:01.915320Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:8171 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... 2025-05-29T15:22:01.958890Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:22:01.972771Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-05-29T15:22:02.038962Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:22:02.063424Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:22:02.076177Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:22:02.289711Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509888281297222691:2403], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:22:02.289739Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:22:02.344190Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-05-29T15:22:02.357409Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-05-29T15:22:02.366543Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-05-29T15:22:02.380489Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-05-29T15:22:02.394963Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-05-29T15:22:02.451547Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-05-29T15:22:02.467393Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-05-29T15:22:02.484828Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509888281297223348:2466], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:22:02.484853Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:22:02.484974Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509888281297223353:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:22:02.486073Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715669:3, at schemeshard: 72057594046644480 2025-05-29T15:22:02.492077Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7509888281297223355:2470], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715669 completed, doublechecking } 2025-05-29T15:22:02.545969Z node 1 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [1:7509888281297223407:3400] txid# 281474976715670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-29T15:22:02.677799Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [1:7509888281297223423:2474], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:22:02.678340Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=1&id=OTNmMWYwYTMtNzA2MmM2ZjYtZTNkMDRiZC1iYjBlMGU0Mg==, ActorId: [1:7509888281297222673:2401], ActorState: ExecuteState, TraceId: 01jwea5mvmb4veby2gxwxqr47z, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: VERIFY failed (2025-05-29T15:22:02.679165Z): assertion failed in non-unittest thread with message: assertion failed at ydb/core/kqp/ut/common/kqp_ut_common.h:375, void NKikimr::NKqp::AssertSuccessResult(const NYdb::TStatus &): (result.IsSuccess())
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 library/cpp/testing/unittest/registar.cpp:36 RaiseError(): requirement UnittestThread failed 0. /-S/util/system/yassert.cpp:83: InternalPanicImpl @ 0x13AC65E5 1. /-S/util/system/yassert.cpp:55: Panic @ 0x13ABD5E6 2. /tmp//-S/library/cpp/testing/unittest/registar.cpp:36: RaiseError @ 0x13C5F376 3. /-S/ydb/core/kqp/ut/common/kqp_ut_common.h:375: AssertSuccessResult @ 0x261025A2 4. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:365: CreateSampleTables @ 0x26101EA2 5. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:581: operator() @ 0x261237FC 6. /-S/library/cpp/threading/future/core/future-inl.h:493: SetValue @ 0x261237FC 7. /-S/library/cpp/threading/future/async.h:24: operator() @ 0x261237FC 8. /-S/util/thread/pool.h:71: Process @ 0x261237FC 9. /-S/util/thread/pool.cpp:418: DoExecute @ 0x13ACDF69 10. /-S/util/thread/factory.h:15: Execute @ 0x13ACC959 11. /-S/util/thread/factory.cpp:36: ThreadProc @ 0x13ACC959 12. /-S/util/system/thread.cpp:244: ThreadProxy @ 0x13AC7DCC 13. ??:0: ?? @ 0x7FB49B7BDAC2 14. ??:0: ?? @ 0x7FB49B84F84F Trying to start YDB, gRPC: 26723, MsgBus: 20167 2025-05-29T15:22:06.534624Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509888300795691445:2265];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:22:06.534655Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/0010cb/r3tmp/tmpGgP7OA/pdisk_1.dat 2025-05-29T15:22:06.594438Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [1:7509888300795691219:2079] 1748532126533464 != 1748532126533467 2025-05-29T15:22:06.595776Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 26723, node 1 2025-05-29T15:22:06.609033Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:22:06.609048Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-05-29T15:22:06.609050Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-05-29T15:22:06.609107Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:20167 TClient is connected to server localhost:20167 WaitRootIsUp 'Root'... TClient::Ls request: Root 2025-05-29T15:22:06.667225Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:22:06.667254Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:22:06.669327Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-29T15:22:06.692776Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:22:06.697928Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-05-29T15:22:06.712585Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:22:06.792915Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:22:06.828941Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:22:06.863257Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:22:07.034061Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509888305090660152:2403], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:22:07.034093Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:22:07.083881Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-05-29T15:22:07.098169Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-05-29T15:22:07.154032Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-05-29T15:22:07.209489Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-05-29T15:22:07.217679Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-05-29T15:22:07.232254Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-05-29T15:22:07.245889Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-05-29T15:22:07.264586Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509888305090660809:2466], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:22:07.264615Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:22:07.264670Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509888305090660814:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:22:07.265535Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715669:3, at schemeshard: 72057594046644480 2025-05-29T15:22:07.272759Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7509888305090660816:2470], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715669 completed, doublechecking } 2025-05-29T15:22:07.373537Z node 1 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [1:7509888305090660867:3398] txid# 281474976715670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-29T15:22:07.454197Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [1:7509888305090660883:2474], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:22:07.454313Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=1&id=Mjg4ZmM5YTYtNWFlMTRmNDYtOWVjMThjN2UtMzYyODAxN2Q=, ActorId: [1:7509888305090660134:2401], ActorState: ExecuteState, TraceId: 01jwea5sh0btjkesgve30thw0s, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: VERIFY failed (2025-05-29T15:22:07.455024Z): assertion failed in non-unittest thread with message: assertion failed at ydb/core/kqp/ut/common/kqp_ut_common.h:375, void NKikimr::NKqp::AssertSuccessResult(const NYdb::TStatus &): (result.IsSuccess())
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 library/cpp/testing/unittest/registar.cpp:36 RaiseError(): requirement UnittestThread failed 0. /-S/util/system/yassert.cpp:83: InternalPanicImpl @ 0x13AC65E5 1. /-S/util/system/yassert.cpp:55: Panic @ 0x13ABD5E6 2. /tmp//-S/library/cpp/testing/unittest/registar.cpp:36: RaiseError @ 0x13C5F376 3. /-S/ydb/core/kqp/ut/common/kqp_ut_common.h:375: AssertSuccessResult @ 0x261025A2 4. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:365: CreateSampleTables @ 0x26101EA2 5. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:581: operator() @ 0x261237FC 6. /-S/library/cpp/threading/future/core/future-inl.h:493: SetValue @ 0x261237FC 7. /-S/library/cpp/threading/future/async.h:24: operator() @ 0x261237FC 8. /-S/util/thread/pool.h:71: Process @ 0x261237FC 9. /-S/util/thread/pool.cpp:418: DoExecute @ 0x13ACDF69 10. /-S/util/thread/factory.h:15: Execute @ 0x13ACC959 11. /-S/util/thread/factory.cpp:36: ThreadProc @ 0x13ACC959 12. /-S/util/system/thread.cpp:244: ThreadProxy @ 0x13AC7DCC 13. ??:0: ?? @ 0x7F0D33184AC2 14. ??:0: ?? @ 0x7F0D3321684F ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/perf/unittest >> KqpQueryPerf::ComputeLength-QueryService Test command err: Trying to start YDB, gRPC: 7385, MsgBus: 15139 2025-05-29T15:22:01.781989Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509888279401883452:2203];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:22:01.782161Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/0010bd/r3tmp/tmpC3Rfn7/pdisk_1.dat 2025-05-29T15:22:01.866086Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:22:01.866612Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [1:7509888279401883287:2079] 1748532121779158 != 1748532121779161 TServer::EnableGrpc on GrpcPort 7385, node 1 2025-05-29T15:22:01.884064Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:22:01.884087Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-05-29T15:22:01.884090Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-05-29T15:22:01.884140Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:15139 TClient is connected to server localhost:15139 2025-05-29T15:22:01.930853Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:22:01.930882Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting WaitRootIsUp 'Root'... TClient::Ls request: Root 2025-05-29T15:22:01.932006Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-29T15:22:01.945832Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:22:01.948414Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-05-29T15:22:01.953028Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:22:02.018396Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-05-29T15:22:02.056450Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-05-29T15:22:02.071198Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:22:02.200858Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509888283696852216:2403], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:22:02.200886Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:22:02.255526Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-05-29T15:22:02.265294Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-05-29T15:22:02.333863Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-05-29T15:22:02.347118Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-05-29T15:22:02.359317Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-05-29T15:22:02.368285Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-05-29T15:22:02.381308Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-05-29T15:22:02.399538Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509888283696852872:2466], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:22:02.399562Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:22:02.399681Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509888283696852877:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:22:02.400588Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710669:3, at schemeshard: 72057594046644480 2025-05-29T15:22:02.407998Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7509888283696852879:2470], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710669 completed, doublechecking } 2025-05-29T15:22:02.465665Z node 1 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [1:7509888283696852930:3397] txid# 281474976710670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-29T15:22:02.580372Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [1:7509888283696852946:2474], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:22:02.582534Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=1&id=N2FkZWQ4Ni00MjI2YjVkMS04ZjVhNjlkMi1mNGI5MGUx, ActorId: [1:7509888283696852198:2401], ActorState: ExecuteState, TraceId: 01jwea5mry22fpfy8s5pc3qzqm, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: VERIFY failed (2025-05-29T15:22:02.583839Z): assertion failed in non-unittest thread with message: assertion failed at ydb/core/kqp/ut/common/kqp_ut_common.h:375, void NKikimr::NKqp::AssertSuccessResult(const NYdb::TStatus &): (result.IsSuccess())
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 library/cpp/testing/unittest/registar.cpp:36 RaiseError(): requirement UnittestThread failed 0. /-S/util/system/yassert.cpp:83: InternalPanicImpl @ 0x13AC65E5 1. /-S/util/system/yassert.cpp:55: Panic @ 0x13ABD5E6 2. /tmp//-S/library/cpp/testing/unittest/registar.cpp:36: RaiseError @ 0x13C5F376 3. /-S/ydb/core/kqp/ut/common/kqp_ut_common.h:375: AssertSuccessResult @ 0x261025A2 4. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:365: CreateSampleTables @ 0x26101EA2 5. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:581: operator() @ 0x261237FC 6. /-S/library/cpp/threading/future/core/future-inl.h:493: SetValue @ 0x261237FC 7. /-S/library/cpp/threading/future/async.h:24: operator() @ 0x261237FC 8. /-S/util/thread/pool.h:71: Process @ 0x261237FC 9. /-S/util/thread/pool.cpp:418: DoExecute @ 0x13ACDF69 10. /-S/util/thread/factory.h:15: Execute @ 0x13ACC959 11. /-S/util/thread/factory.cpp:36: ThreadProc @ 0x13ACC959 12. /-S/util/system/thread.cpp:244: ThreadProxy @ 0x13AC7DCC 13. ??:0: ?? @ 0x7F7A801FDAC2 14. ??:0: ?? @ 0x7F7A8028F84F Trying to start YDB, gRPC: 26641, MsgBus: 2710 2025-05-29T15:22:06.502508Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509888299572588275:2062];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:22:06.502543Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/0010bd/r3tmp/tmpIfEvZF/pdisk_1.dat 2025-05-29T15:22:06.566584Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [1:7509888299572588255:2079] 1748532126502379 != 1748532126502382 2025-05-29T15:22:06.568789Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 26641, node 1 2025-05-29T15:22:06.579809Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:22:06.579826Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-05-29T15:22:06.579828Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-05-29T15:22:06.579879Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:2710 2025-05-29T15:22:06.640873Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:22:06.640911Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting TClient is connected to server localhost:2710 2025-05-29T15:22:06.643280Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... 2025-05-29T15:22:06.671807Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-05-29T15:22:06.677946Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:22:06.736159Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:22:06.775377Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:22:06.792146Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:22:06.986983Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509888299572589888:2403], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:22:06.987020Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:22:07.024113Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-05-29T15:22:07.031860Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-05-29T15:22:07.088507Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-05-29T15:22:07.099733Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-05-29T15:22:07.113042Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-05-29T15:22:07.127502Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-05-29T15:22:07.140512Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-05-29T15:22:07.156845Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509888303867557837:2466], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:22:07.156865Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:22:07.156913Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509888303867557842:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:22:07.157691Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715669:3, at schemeshard: 72057594046644480 2025-05-29T15:22:07.160012Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7509888303867557844:2470], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715669 completed, doublechecking } 2025-05-29T15:22:07.235788Z node 1 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [1:7509888303867557895:3396] txid# 281474976715670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-29T15:22:07.316284Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [1:7509888303867557911:2474], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:22:07.316387Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=1&id=YjM5NmJmMDAtNGY1NWFkMDgtODlhNDc3ZGYtMzFlOTk3OGU=, ActorId: [1:7509888299572589885:2401], ActorState: ExecuteState, TraceId: 01jwea5sdm93n3qxck6h7arjxz, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: VERIFY failed (2025-05-29T15:22:07.317084Z): assertion failed in non-unittest thread with message: assertion failed at ydb/core/kqp/ut/common/kqp_ut_common.h:375, void NKikimr::NKqp::AssertSuccessResult(const NYdb::TStatus &): (result.IsSuccess())
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 library/cpp/testing/unittest/registar.cpp:36 RaiseError(): requirement UnittestThread failed 0. /-S/util/system/yassert.cpp:83: InternalPanicImpl @ 0x13AC65E5 1. /-S/util/system/yassert.cpp:55: Panic @ 0x13ABD5E6 2. /tmp//-S/library/cpp/testing/unittest/registar.cpp:36: RaiseError @ 0x13C5F376 3. /-S/ydb/core/kqp/ut/common/kqp_ut_common.h:375: AssertSuccessResult @ 0x261025A2 4. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:365: CreateSampleTables @ 0x26101EA2 5. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:581: operator() @ 0x261237FC 6. /-S/library/cpp/threading/future/core/future-inl.h:493: SetValue @ 0x261237FC 7. /-S/library/cpp/threading/future/async.h:24: operator() @ 0x261237FC 8. /-S/util/thread/pool.h:71: Process @ 0x261237FC 9. /-S/util/thread/pool.cpp:418: DoExecute @ 0x13ACDF69 10. /-S/util/thread/factory.h:15: Execute @ 0x13ACC959 11. /-S/util/thread/factory.cpp:36: ThreadProc @ 0x13ACC959 12. /-S/util/system/thread.cpp:244: ThreadProxy @ 0x13AC7DCC 13. ??:0: ?? @ 0x7F34A233FAC2 14. ??:0: ?? @ 0x7F34A23D184F >> TPartitionTests::ChangeConfig >> KqpOlapSparsed::SwitchingStandalone [GOOD] >> TSourceIdTests::SourceIdWriterAddMessage [GOOD] >> TSourceIdTests::SourceIdWriterClean [GOOD] >> TSourceIdTests::SourceIdWriterFormCommand [GOOD] >> TTypeCodecsTest::TestBoolCodec [GOOD] >> TTypeCodecsTest::TestDeltaVarIntCodecAndRev [GOOD] >> TPartitionChooserSuite::TPartitionChooserActor_SplitMergeEnabled_SourceId_PartitionInactive_1_Test [FAIL] >> TPartitionChooserSuite::TPartitionChooserActor_SplitMergeEnabled_SourceId_PartitionNotExists_Test >> TPartitionTests::ChangeConfig [GOOD] >> TPartitionChooserSuite::TPartitionChooserActor_SplitMergeEnabled_SourceId_PartitionActive_BoundaryTrue_Test >> TPartitionTests::ConflictingActsInSeveralBatches ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/persqueue/ut/unittest >> TTypeCodecsTest::TestDeltaVarIntCodecAndRev [GOOD] Test command err: Size: 128 Create chunk: 0.000012s Read by index: 0.000008s Iterate: 0.000006s Size: 252 Create chunk: 0.000022s Read by index: 0.000009s Iterate: 0.000008s Size: 1887 Create chunk: 0.000022s Read by index: 0.000029s Iterate: 0.000007s Size: 1658 Create chunk: 0.000027s Read by index: 0.000025s Iterate: 0.000009s Size: 1889 Create chunk: 0.000020s Read by index: 0.000021s Iterate: 0.000008s Size: 1660 Create chunk: 0.000022s Read by index: 0.000023s Iterate: 0.000009s >> TColumnShardTestReadWrite::CompactionInGranule_PKString_Reboot [GOOD] >> TFetchRequestTests::HappyWay ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/perf/unittest >> KqpQueryPerf::IdxLookupJoin-QueryService Test command err: Trying to start YDB, gRPC: 27616, MsgBus: 17231 2025-05-29T15:22:02.519776Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509888281343392144:2061];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:22:02.519793Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/0010ad/r3tmp/tmpD5RbWk/pdisk_1.dat 2025-05-29T15:22:02.585724Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [1:7509888281343392125:2079] 1748532122519658 != 1748532122519661 2025-05-29T15:22:02.585902Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 27616, node 1 2025-05-29T15:22:02.600085Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:22:02.600101Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-05-29T15:22:02.600103Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-05-29T15:22:02.600146Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:17231 TClient is connected to server localhost:17231 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-29T15:22:02.661321Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:22:02.661359Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:22:02.662330Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-29T15:22:02.662517Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:22:02.667745Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:22:02.689416Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:22:02.711710Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:22:02.724031Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:22:02.898623Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509888281343393757:2403], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:22:02.898653Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:22:02.937148Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-05-29T15:22:02.949676Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-05-29T15:22:02.961188Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-05-29T15:22:02.975305Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-05-29T15:22:02.991868Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-05-29T15:22:03.005690Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-05-29T15:22:03.017921Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-05-29T15:22:03.032979Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509888285638361706:2466], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:22:03.032984Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509888285638361711:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:22:03.033003Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:22:03.033658Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715669:3, at schemeshard: 72057594046644480 2025-05-29T15:22:03.037394Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7509888285638361713:2470], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715669 completed, doublechecking } 2025-05-29T15:22:03.107762Z node 1 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [1:7509888285638361764:3397] txid# 281474976715670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-29T15:22:03.224208Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [1:7509888285638361780:2474], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:22:03.224350Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=1&id=YjcwZmM5NTQtYWI4NGZiNmEtYjA2MWRhMDEtZjhiYjJiNTc=, ActorId: [1:7509888281343393739:2401], ActorState: ExecuteState, TraceId: 01jwea5ncr4mg2q32wpa9j8kwm, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: VERIFY failed (2025-05-29T15:22:03.225227Z): assertion failed in non-unittest thread with message: assertion failed at ydb/core/kqp/ut/common/kqp_ut_common.h:375, void NKikimr::NKqp::AssertSuccessResult(const NYdb::TStatus &): (result.IsSuccess())
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 library/cpp/testing/unittest/registar.cpp:36 RaiseError(): requirement UnittestThread failed 0. /-S/util/system/yassert.cpp:83: InternalPanicImpl @ 0x13AC65E5 1. /-S/util/system/yassert.cpp:55: Panic @ 0x13ABD5E6 2. /tmp//-S/library/cpp/testing/unittest/registar.cpp:36: RaiseError @ 0x13C5F376 3. /-S/ydb/core/kqp/ut/common/kqp_ut_common.h:375: AssertSuccessResult @ 0x261025A2 4. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:365: CreateSampleTables @ 0x26101EA2 5. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:581: operator() @ 0x261237FC 6. /-S/library/cpp/threading/future/core/future-inl.h:493: SetValue @ 0x261237FC 7. /-S/library/cpp/threading/future/async.h:24: operator() @ 0x261237FC 8. /-S/util/thread/pool.h:71: Process @ 0x261237FC 9. /-S/util/thread/pool.cpp:418: DoExecute @ 0x13ACDF69 10. /-S/util/thread/factory.h:15: Execute @ 0x13ACC959 11. /-S/util/thread/factory.cpp:36: ThreadProc @ 0x13ACC959 12. /-S/util/system/thread.cpp:244: ThreadProxy @ 0x13AC7DCC 13. ??:0: ?? @ 0x7F8C0E2EFAC2 14. ??:0: ?? @ 0x7F8C0E38184F Trying to start YDB, gRPC: 12601, MsgBus: 25964 2025-05-29T15:22:07.130421Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509888302057952075:2072];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:22:07.130444Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/0010ad/r3tmp/tmp9oAXX7/pdisk_1.dat TServer::EnableGrpc on GrpcPort 12601, node 1 2025-05-29T15:22:07.191308Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:22:07.191324Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-05-29T15:22:07.191326Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-05-29T15:22:07.191372Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-05-29T15:22:07.192187Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded TClient is connected to server localhost:25964 2025-05-29T15:22:07.231794Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:22:07.231819Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:22:07.232885Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:25964 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-29T15:22:07.262464Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:22:07.272916Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:22:07.292451Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:22:07.313289Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:22:07.324901Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:22:07.508427Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509888302057953669:2403], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:22:07.508455Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:22:07.551132Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-05-29T15:22:07.558160Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-05-29T15:22:07.571786Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-05-29T15:22:07.582285Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-05-29T15:22:07.596159Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-05-29T15:22:07.611119Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-05-29T15:22:07.623798Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-05-29T15:22:07.641235Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509888302057954325:2466], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:22:07.641282Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:22:07.641368Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509888302057954330:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:22:07.643135Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715669:3, at schemeshard: 72057594046644480 2025-05-29T15:22:07.650800Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7509888302057954332:2470], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715669 completed, doublechecking } 2025-05-29T15:22:07.701772Z node 1 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [1:7509888302057954383:3397] txid# 281474976715670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-29T15:22:07.827075Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [1:7509888302057954399:2474], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:22:07.827213Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=1&id=ZmIxMzdjNzAtZmNmOGNmNDYtY2ViZjczODAtOWZiOGYzMjM=, ActorId: [1:7509888302057953651:2401], ActorState: ExecuteState, TraceId: 01jwea5swrcv8m4gx44j9jvkb2, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: VERIFY failed (2025-05-29T15:22:07.827992Z): assertion failed in non-unittest thread with message: assertion failed at ydb/core/kqp/ut/common/kqp_ut_common.h:375, void NKikimr::NKqp::AssertSuccessResult(const NYdb::TStatus &): (result.IsSuccess())
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 library/cpp/testing/unittest/registar.cpp:36 RaiseError(): requirement UnittestThread failed 0. /-S/util/system/yassert.cpp:83: InternalPanicImpl @ 0x13AC65E5 1. /-S/util/system/yassert.cpp:55: Panic @ 0x13ABD5E6 2. /tmp//-S/library/cpp/testing/unittest/registar.cpp:36: RaiseError @ 0x13C5F376 3. /-S/ydb/core/kqp/ut/common/kqp_ut_common.h:375: AssertSuccessResult @ 0x261025A2 4. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:365: CreateSampleTables @ 0x26101EA2 5. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:581: operator() @ 0x261237FC 6. /-S/library/cpp/threading/future/core/future-inl.h:493: SetValue @ 0x261237FC 7. /-S/library/cpp/threading/future/async.h:24: operator() @ 0x261237FC 8. /-S/util/thread/pool.h:71: Process @ 0x261237FC 9. /-S/util/thread/pool.cpp:418: DoExecute @ 0x13ACDF69 10. /-S/util/thread/factory.h:15: Execute @ 0x13ACC959 11. /-S/util/thread/factory.cpp:36: ThreadProc @ 0x13ACC959 12. /-S/util/system/thread.cpp:244: ThreadProxy @ 0x13AC7DCC 13. ??:0: ?? @ 0x7F3F0EDCEAC2 14. ??:0: ?? @ 0x7F3F0EE6084F ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/perf/unittest >> KqpQueryPerf::IndexInsert+QueryService+UseSink Test command err: Trying to start YDB, gRPC: 28010, MsgBus: 2673 2025-05-29T15:22:03.078785Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509888287848891989:2061];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:22:03.078820Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/001091/r3tmp/tmpkxSRgb/pdisk_1.dat 2025-05-29T15:22:03.139324Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [1:7509888287848891970:2079] 1748532123078645 != 1748532123078648 2025-05-29T15:22:03.141536Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 28010, node 1 2025-05-29T15:22:03.153207Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:22:03.153219Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-05-29T15:22:03.153221Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-05-29T15:22:03.153264Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:2673 TClient is connected to server localhost:2673 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: 2025-05-29T15:22:03.211896Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:22:03.211933Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:22:03.213030Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-29T15:22:03.218604Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:22:03.220479Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-05-29T15:22:03.266395Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:22:03.285710Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-05-29T15:22:03.305547Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 2025-05-29T15:22:03.318415Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:22:03.419017Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509888287848893602:2403], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:22:03.419055Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:22:03.465587Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-05-29T15:22:03.478531Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-05-29T15:22:03.488092Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-05-29T15:22:03.501157Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-05-29T15:22:03.515150Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-05-29T15:22:03.528939Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-05-29T15:22:03.543454Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-05-29T15:22:03.571892Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509888287848894254:2466], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:22:03.571927Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:22:03.572032Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509888287848894259:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:22:03.573048Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715669:3, at schemeshard: 72057594046644480 2025-05-29T15:22:03.578975Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7509888287848894261:2470], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715669 completed, doublechecking } 2025-05-29T15:22:03.656008Z node 1 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [1:7509888287848894312:3396] txid# 281474976715670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-29T15:22:03.806998Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [1:7509888287848894321:2474], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:22:03.808930Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=1&id=YzYwYTk2YTEtOWI5MzE0Yy1mZjFiYjIzNi03NGQ3NzMxOQ==, ActorId: [1:7509888287848893574:2399], ActorState: ExecuteState, TraceId: 01jwea5nxj2m74tz7mvvq6gyvq, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: VERIFY failed (2025-05-29T15:22:03.811134Z): assertion failed in non-unittest thread with message: assertion failed at ydb/core/kqp/ut/common/kqp_ut_common.h:375, void NKikimr::NKqp::AssertSuccessResult(const NYdb::TStatus &): (result.IsSuccess())
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 library/cpp/testing/unittest/registar.cpp:36 RaiseError(): requirement UnittestThread failed 0. /-S/util/system/yassert.cpp:83: InternalPanicImpl @ 0x13AC65E5 1. /-S/util/system/yassert.cpp:55: Panic @ 0x13ABD5E6 2. /tmp//-S/library/cpp/testing/unittest/registar.cpp:36: RaiseError @ 0x13C5F376 3. /-S/ydb/core/kqp/ut/common/kqp_ut_common.h:375: AssertSuccessResult @ 0x261025A2 4. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:365: CreateSampleTables @ 0x26101EA2 5. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:581: operator() @ 0x261237FC 6. /-S/library/cpp/threading/future/core/future-inl.h:493: SetValue @ 0x261237FC 7. /-S/library/cpp/threading/future/async.h:24: operator() @ 0x261237FC 8. /-S/util/thread/pool.h:71: Process @ 0x261237FC 9. /-S/util/thread/pool.cpp:418: DoExecute @ 0x13ACDF69 10. /-S/util/thread/factory.h:15: Execute @ 0x13ACC959 11. /-S/util/thread/factory.cpp:36: ThreadProc @ 0x13ACC959 12. /-S/util/system/thread.cpp:244: ThreadProxy @ 0x13AC7DCC 13. ??:0: ?? @ 0x7F6063BE7AC2 14. ??:0: ?? @ 0x7F6063C7984F Trying to start YDB, gRPC: 30067, MsgBus: 4416 2025-05-29T15:22:07.574850Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509888302213252734:2200];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:22:07.575809Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/001091/r3tmp/tmpTOYeix/pdisk_1.dat 2025-05-29T15:22:07.632347Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:22:07.632465Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [1:7509888302213252573:2079] 1748532127573914 != 1748532127573917 TServer::EnableGrpc on GrpcPort 30067, node 1 2025-05-29T15:22:07.645446Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:22:07.645458Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-05-29T15:22:07.645460Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-05-29T15:22:07.645498Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:4416 2025-05-29T15:22:07.679334Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:22:07.679362Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:22:07.680402Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:4416 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-29T15:22:07.710140Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:22:07.724052Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:22:07.749036Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:22:07.771781Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-05-29T15:22:07.782552Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 2025-05-29T15:22:07.941269Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509888302213254214:2403], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:22:07.941295Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:22:07.975213Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-05-29T15:22:07.985721Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-05-29T15:22:08.043905Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-05-29T15:22:08.057811Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-05-29T15:22:08.072503Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-05-29T15:22:08.086195Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-05-29T15:22:08.142112Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-05-29T15:22:08.157870Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509888306508222168:2466], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:22:08.157902Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:22:08.157905Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509888306508222173:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:22:08.158588Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715669:3, at schemeshard: 72057594046644480 2025-05-29T15:22:08.161365Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7509888306508222175:2470], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715669 completed, doublechecking } 2025-05-29T15:22:08.260545Z node 1 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [1:7509888306508222226:3396] txid# 281474976715670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-29T15:22:08.382503Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [1:7509888306508222242:2474], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:22:08.382630Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=1&id=MmY1MDM3MS0zMzgzMzU4Ny00NjBmMGM3Zi1lM2M3M2RiZQ==, ActorId: [1:7509888302213254196:2401], ActorState: ExecuteState, TraceId: 01jwea5tcxcbf8k7rk33e2ry1x, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: VERIFY failed (2025-05-29T15:22:08.383308Z): assertion failed in non-unittest thread with message: assertion failed at ydb/core/kqp/ut/common/kqp_ut_common.h:375, void NKikimr::NKqp::AssertSuccessResult(const NYdb::TStatus &): (result.IsSuccess())
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 library/cpp/testing/unittest/registar.cpp:36 RaiseError(): requirement UnittestThread failed 0. /-S/util/system/yassert.cpp:83: InternalPanicImpl @ 0x13AC65E5 1. /-S/util/system/yassert.cpp:55: Panic @ 0x13ABD5E6 2. /tmp//-S/library/cpp/testing/unittest/registar.cpp:36: RaiseError @ 0x13C5F376 3. /-S/ydb/core/kqp/ut/common/kqp_ut_common.h:375: AssertSuccessResult @ 0x261025A2 4. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:365: CreateSampleTables @ 0x26101EA2 5. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:581: operator() @ 0x261237FC 6. /-S/library/cpp/threading/future/core/future-inl.h:493: SetValue @ 0x261237FC 7. /-S/library/cpp/threading/future/async.h:24: operator() @ 0x261237FC 8. /-S/util/thread/pool.h:71: Process @ 0x261237FC 9. /-S/util/thread/pool.cpp:418: DoExecute @ 0x13ACDF69 10. /-S/util/thread/factory.h:15: Execute @ 0x13ACC959 11. /-S/util/thread/factory.cpp:36: ThreadProc @ 0x13ACC959 12. /-S/util/system/thread.cpp:244: ThreadProxy @ 0x13AC7DCC 13. ??:0: ?? @ 0x7F7FDA9E1AC2 14. ??:0: ?? @ 0x7F7FDAA7384F >> TPQTest::TestUserInfoCompatibility ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/perf/unittest >> KqpQueryPerf::IndexLookupJoin-EnableStreamLookup+QueryService Test command err: Trying to start YDB, gRPC: 5574, MsgBus: 11468 2025-05-29T15:22:02.592487Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509888281908570129:2061];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:22:02.592524Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/0010a3/r3tmp/tmpAnC1z4/pdisk_1.dat 2025-05-29T15:22:02.653293Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [1:7509888281908570110:2079] 1748532122592337 != 1748532122592340 2025-05-29T15:22:02.655529Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 5574, node 1 2025-05-29T15:22:02.672331Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:22:02.672349Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-05-29T15:22:02.672351Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-05-29T15:22:02.672398Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:11468 TClient is connected to server localhost:11468 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-29T15:22:02.727010Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:22:02.730642Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-05-29T15:22:02.731612Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:22:02.731654Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:22:02.732268Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected waiting... 2025-05-29T15:22:02.792464Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:22:02.812134Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:22:02.869900Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:22:03.020189Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509888286203539039:2403], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:22:03.020223Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:22:03.066173Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-05-29T15:22:03.074597Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-05-29T15:22:03.087891Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-05-29T15:22:03.102052Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-05-29T15:22:03.115724Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-05-29T15:22:03.129947Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-05-29T15:22:03.145842Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-05-29T15:22:03.164005Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509888286203539691:2466], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:22:03.164029Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:22:03.164067Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509888286203539696:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:22:03.164898Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710669:3, at schemeshard: 72057594046644480 2025-05-29T15:22:03.170708Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7509888286203539698:2470], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710669 completed, doublechecking } 2025-05-29T15:22:03.224187Z node 1 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [1:7509888286203539749:3392] txid# 281474976710670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-29T15:22:03.347496Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [1:7509888286203539765:2474], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:22:03.347658Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=1&id=NjYwNTY5MWEtZmYzNWVjOGQtYzU4OWFlYzEtZWM5NmU4NjE=, ActorId: [1:7509888286203539036:2401], ActorState: ExecuteState, TraceId: 01jwea5ngv7zjve5xpxmj63n2r, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: VERIFY failed (2025-05-29T15:22:03.348377Z): assertion failed in non-unittest thread with message: assertion failed at ydb/core/kqp/ut/common/kqp_ut_common.h:375, void NKikimr::NKqp::AssertSuccessResult(const NYdb::TStatus &): (result.IsSuccess())
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 library/cpp/testing/unittest/registar.cpp:36 RaiseError(): requirement UnittestThread failed 0. /-S/util/system/yassert.cpp:83: InternalPanicImpl @ 0x13AC65E5 1. /-S/util/system/yassert.cpp:55: Panic @ 0x13ABD5E6 2. /tmp//-S/library/cpp/testing/unittest/registar.cpp:36: RaiseError @ 0x13C5F376 3. /-S/ydb/core/kqp/ut/common/kqp_ut_common.h:375: AssertSuccessResult @ 0x261025A2 4. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:365: CreateSampleTables @ 0x26101EA2 5. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:581: operator() @ 0x261237FC 6. /-S/library/cpp/threading/future/core/future-inl.h:493: SetValue @ 0x261237FC 7. /-S/library/cpp/threading/future/async.h:24: operator() @ 0x261237FC 8. /-S/util/thread/pool.h:71: Process @ 0x261237FC 9. /-S/util/thread/pool.cpp:418: DoExecute @ 0x13ACDF69 10. /-S/util/thread/factory.h:15: Execute @ 0x13ACC959 11. /-S/util/thread/factory.cpp:36: ThreadProc @ 0x13ACC959 12. /-S/util/system/thread.cpp:244: ThreadProxy @ 0x13AC7DCC 13. ??:0: ?? @ 0x7FF114579AC2 14. ??:0: ?? @ 0x7FF11460B84F Trying to start YDB, gRPC: 16731, MsgBus: 61927 2025-05-29T15:22:07.331154Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509888301839319684:2062];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:22:07.331190Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/0010a3/r3tmp/tmpaDpvKb/pdisk_1.dat 2025-05-29T15:22:07.395909Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:22:07.397278Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [1:7509888301839319664:2079] 1748532127331012 != 1748532127331015 TServer::EnableGrpc on GrpcPort 16731, node 1 2025-05-29T15:22:07.409951Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:22:07.409968Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-05-29T15:22:07.409970Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-05-29T15:22:07.410019Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:61927 TClient is connected to server localhost:61927 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-29T15:22:07.473571Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:22:07.473616Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:22:07.474317Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-05-29T15:22:07.474497Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected waiting... 2025-05-29T15:22:07.485720Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:22:07.513265Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:22:07.587488Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:22:07.608839Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:22:07.702890Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509888301839321318:2403], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:22:07.702919Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:22:07.751099Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-05-29T15:22:07.758923Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-05-29T15:22:07.771082Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-05-29T15:22:07.784201Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-05-29T15:22:07.791328Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-05-29T15:22:07.805698Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-05-29T15:22:07.820034Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-05-29T15:22:07.837365Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509888301839321971:2466], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:22:07.837398Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:22:07.837463Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509888301839321976:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:22:07.838407Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715669:3, at schemeshard: 72057594046644480 2025-05-29T15:22:07.848971Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7509888301839321978:2470], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715669 completed, doublechecking } 2025-05-29T15:22:07.913574Z node 1 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [1:7509888301839322029:3396] txid# 281474976715670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-29T15:22:08.044942Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [1:7509888301839322045:2474], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:22:08.045117Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=1&id=NTY2ZTU2ZDEtM2I1M2NmZTgtOWY1ZjcwZjUtOGQ2YjZmZjY=, ActorId: [1:7509888301839321315:2401], ActorState: ExecuteState, TraceId: 01jwea5t2wcm60hjzx6bat3byj, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: VERIFY failed (2025-05-29T15:22:08.046931Z): assertion failed in non-unittest thread with message: assertion failed at ydb/core/kqp/ut/common/kqp_ut_common.h:375, void NKikimr::NKqp::AssertSuccessResult(const NYdb::TStatus &): (result.IsSuccess())
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 library/cpp/testing/unittest/registar.cpp:36 RaiseError(): requirement UnittestThread failed 0. /-S/util/system/yassert.cpp:83: InternalPanicImpl @ 0x13AC65E5 1. /-S/util/system/yassert.cpp:55: Panic @ 0x13ABD5E6 2. /tmp//-S/library/cpp/testing/unittest/registar.cpp:36: RaiseError @ 0x13C5F376 3. /-S/ydb/core/kqp/ut/common/kqp_ut_common.h:375: AssertSuccessResult @ 0x261025A2 4. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:365: CreateSampleTables @ 0x26101EA2 5. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:581: operator() @ 0x261237FC 6. /-S/library/cpp/threading/future/core/future-inl.h:493: SetValue @ 0x261237FC 7. /-S/library/cpp/threading/future/async.h:24: operator() @ 0x261237FC 8. /-S/util/thread/pool.h:71: Process @ 0x261237FC 9. /-S/util/thread/pool.cpp:418: DoExecute @ 0x13ACDF69 10. /-S/util/thread/factory.h:15: Execute @ 0x13ACC959 11. /-S/util/thread/factory.cpp:36: ThreadProc @ 0x13ACC959 12. /-S/util/system/thread.cpp:244: ThreadProxy @ 0x13AC7DCC 13. ??:0: ?? @ 0x7F03A8A0AAC2 14. ??:0: ?? @ 0x7F03A8A9C84F ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/perf/unittest >> KqpQueryPerf::Upsert+QueryService+UseSink Test command err: Trying to start YDB, gRPC: 5779, MsgBus: 32719 2025-05-29T15:22:07.944024Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509888302639520061:2265];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:22:07.944066Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/001039/r3tmp/tmpomApAA/pdisk_1.dat 2025-05-29T15:22:08.012763Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:22:08.012811Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [1:7509888302639519835:2079] 1748532127943003 != 1748532127943006 TServer::EnableGrpc on GrpcPort 5779, node 1 2025-05-29T15:22:08.027291Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:22:08.027305Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-05-29T15:22:08.027306Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-05-29T15:22:08.027342Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:32719 TClient is connected to server localhost:32719 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: 2025-05-29T15:22:08.093504Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:22:08.093537Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:22:08.094499Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-29T15:22:08.100149Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-05-29T15:22:08.112041Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-05-29T15:22:08.174030Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:22:08.191535Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:22:08.202843Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:22:08.371550Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509888306934488767:2403], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:22:08.371586Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:22:08.422834Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-05-29T15:22:08.432290Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-05-29T15:22:08.442800Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-05-29T15:22:08.457695Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-05-29T15:22:08.473026Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-05-29T15:22:08.485340Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-05-29T15:22:08.499717Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-05-29T15:22:08.560475Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509888306934489428:2466], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:22:08.560504Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:22:08.560509Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509888306934489433:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:22:08.561398Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715669:3, at schemeshard: 72057594046644480 2025-05-29T15:22:08.568658Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7509888306934489435:2470], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715669 completed, doublechecking } 2025-05-29T15:22:08.633315Z node 1 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [1:7509888306934489486:3398] txid# 281474976715670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-29T15:22:08.714439Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [1:7509888306934489502:2474], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:22:08.714561Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=1&id=YzJhYmI1MmQtZDc5OWFkM2QtMzUxOTgwYjktZTJlZDg1MDI=, ActorId: [1:7509888306934488749:2401], ActorState: ExecuteState, TraceId: 01jwea5tsgfpwpk75n3jqs1hhc, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: VERIFY failed (2025-05-29T15:22:08.715510Z): assertion failed in non-unittest thread with message: assertion failed at ydb/core/kqp/ut/common/kqp_ut_common.h:375, void NKikimr::NKqp::AssertSuccessResult(const NYdb::TStatus &): (result.IsSuccess())
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 library/cpp/testing/unittest/registar.cpp:36 RaiseError(): requirement UnittestThread failed 0. /-S/util/system/yassert.cpp:83: InternalPanicImpl @ 0x13AC65E5 1. /-S/util/system/yassert.cpp:55: Panic @ 0x13ABD5E6 2. /tmp//-S/library/cpp/testing/unittest/registar.cpp:36: RaiseError @ 0x13C5F376 3. /-S/ydb/core/kqp/ut/common/kqp_ut_common.h:375: AssertSuccessResult @ 0x261025A2 4. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:365: CreateSampleTables @ 0x26101EA2 5. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:581: operator() @ 0x261237FC 6. /-S/library/cpp/threading/future/core/future-inl.h:493: SetValue @ 0x261237FC 7. /-S/library/cpp/threading/future/async.h:24: operator() @ 0x261237FC 8. /-S/util/thread/pool.h:71: Process @ 0x261237FC 9. /-S/util/thread/pool.cpp:418: DoExecute @ 0x13ACDF69 10. /-S/util/thread/factory.h:15: Execute @ 0x13ACC959 11. /-S/util/thread/factory.cpp:36: ThreadProc @ 0x13ACC959 12. /-S/util/system/thread.cpp:244: ThreadProxy @ 0x13AC7DCC 13. ??:0: ?? @ 0x7F2284A44AC2 14. ??:0: ?? @ 0x7F2284AD684F ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/olap/unittest >> KqpOlapSparsed::SwitchingStandalone [GOOD] Test command err: Trying to start YDB, gRPC: 31877, MsgBus: 28992 2025-05-29T15:21:34.108235Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509888160293941387:2202];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:21:34.108536Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/0026af/r3tmp/tmpsCSYRd/pdisk_1.dat 2025-05-29T15:21:34.194290Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:21:34.197304Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [1:7509888160293941222:2079] 1748532094007108 != 1748532094007111 2025-05-29T15:21:34.214232Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:21:34.214254Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting TServer::EnableGrpc on GrpcPort 31877, node 1 2025-05-29T15:21:34.219270Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:21:34.219282Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-05-29T15:21:34.219284Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-05-29T15:21:34.219320Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-05-29T15:21:34.223288Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:28992 TClient is connected to server localhost:28992 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-29T15:21:34.379154Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:21:34.395298Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-05-29T15:21:34.711218Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509888160293941880:2328], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:21:34.711239Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:21:34.772216Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-05-29T15:21:34.929934Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037912;self_id=[1:7509888160293942455:2368];tablet_id=72075186224037912;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-05-29T15:21:34.929991Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037912;self_id=[1:7509888160293942455:2368];tablet_id=72075186224037912;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-05-29T15:21:34.930031Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037912;self_id=[1:7509888160293942455:2368];tablet_id=72075186224037912;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-05-29T15:21:34.930052Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037912;self_id=[1:7509888160293942455:2368];tablet_id=72075186224037912;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-05-29T15:21:34.930070Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037912;self_id=[1:7509888160293942455:2368];tablet_id=72075186224037912;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-05-29T15:21:34.930088Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037912;self_id=[1:7509888160293942455:2368];tablet_id=72075186224037912;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-05-29T15:21:34.930108Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037912;self_id=[1:7509888160293942455:2368];tablet_id=72075186224037912;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-05-29T15:21:34.930126Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037912;self_id=[1:7509888160293942455:2368];tablet_id=72075186224037912;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-05-29T15:21:34.930144Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037912;self_id=[1:7509888160293942455:2368];tablet_id=72075186224037912;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-05-29T15:21:34.930163Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037912;self_id=[1:7509888160293942455:2368];tablet_id=72075186224037912;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-05-29T15:21:34.930187Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037912;self_id=[1:7509888160293942455:2368];tablet_id=72075186224037912;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2025-05-29T15:21:34.930212Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037912;self_id=[1:7509888160293942455:2368];tablet_id=72075186224037912;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2025-05-29T15:21:34.936211Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037926;self_id=[1:7509888160293942407:2338];tablet_id=72075186224037926;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-05-29T15:21:34.936255Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037926;self_id=[1:7509888160293942407:2338];tablet_id=72075186224037926;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-05-29T15:21:34.936312Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037926;self_id=[1:7509888160293942407:2338];tablet_id=72075186224037926;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-05-29T15:21:34.936338Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037926;self_id=[1:7509888160293942407:2338];tablet_id=72075186224037926;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-05-29T15:21:34.936358Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037926;self_id=[1:7509888160293942407:2338];tablet_id=72075186224037926;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-05-29T15:21:34.936378Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037926;self_id=[1:7509888160293942407:2338];tablet_id=72075186224037926;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-05-29T15:21:34.936396Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037926;self_id=[1:7509888160293942407:2338];tablet_id=72075186224037926;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-05-29T15:21:34.936414Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037926;self_id=[1:7509888160293942407:2338];tablet_id=72075186224037926;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-05-29T15:21:34.936437Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037926;self_id=[1:7509888160293942407:2338];tablet_id=72075186224037926;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-05-29T15:21:34.936455Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037926;self_id=[1:7509888160293942407:2338];tablet_id=72075186224037926;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-05-29T15:21:34.936474Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037926;self_id=[1:7509888160293942407:2338];tablet_id=72075186224037926;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2025-05-29T15:21:34.936493Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037926;self_id=[1:7509888160293942407:2338];tablet_id=72075186224037926;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2025-05-29T15:21:34.942101Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037910;self_id=[1:7509888160293942461:2374];tablet_id=72075186224037910;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-05-29T15:21:34.942128Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037910;self_id=[1:7509888160293942461:2374];tablet_id=72075186224037910;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-05-29T15:21:34.942169Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037910;self_id=[1:7509888160293942461:2374];tablet_id=72075186224037910;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_r ... 5407 pk_int: 5410 pk_int: 5421 pk_int: 5424 pk_int: 5445 pk_int: 5457 pk_int: 5467 pk_int: 5475 pk_int: 5481 pk_int: 5505 pk_int: 5514 pk_int: 5516 pk_int: 5517 pk_int: 5518 pk_int: 5526 pk_int: 5532 pk_int: 5545 pk_int: 5553 pk_int: 5555 pk_int: 5558 pk_int: 5569 pk_int: 5579 pk_int: 5585 pk_int: 5604 pk_int: 5614 pk_int: 5626 pk_int: 5628 pk_int: 5633 pk_int: 5638 pk_int: 5650 pk_int: 5663 pk_int: 5684 pk_int: 5711 pk_int: 5719 pk_int: 5728 pk_int: 5738 pk_int: 5756 pk_int: 5758 pk_int: 5769 pk_int: 5782 pk_int: 5783 pk_int: 5786 pk_int: 5810 pk_int: 5812 pk_int: 5824 pk_int: 5838 pk_int: 5844 pk_int: 5846 pk_int: 5862 pk_int: 5871 pk_int: 5884 pk_int: 5887 pk_int: 5895 pk_int: 5904 pk_int: 5953 pk_int: 5966 pk_int: 5980 pk_int: 5987 pk_int: 6002 pk_int: 6005 pk_int: 6020 pk_int: 6029 pk_int: 6033 pk_int: 6037 pk_int: 6043 pk_int: 6046 pk_int: 6052 pk_int: 6110 pk_int: 6112 pk_int: 6113 pk_int: 6133 pk_int: 6168 pk_int: 6179 pk_int: 6200 pk_int: 6211 pk_int: 6232 pk_int: 6235 pk_int: 6237 pk_int: 6241 pk_int: 6266 pk_int: 6270 pk_int: 6283 pk_int: 6285 pk_int: 6327 pk_int: 6347 pk_int: 6349 pk_int: 6394 pk_int: 6400 pk_int: 6405 pk_int: 6407 pk_int: 6410 pk_int: 6421 pk_int: 6424 pk_int: 6445 pk_int: 6457 pk_int: 6467 pk_int: 6475 pk_int: 6481 pk_int: 6505 pk_int: 6514 pk_int: 6516 pk_int: 6517 pk_int: 6518 pk_int: 6526 pk_int: 6532 pk_int: 6545 pk_int: 6553 pk_int: 6555 pk_int: 6558 pk_int: 6569 pk_int: 6579 pk_int: 6585 pk_int: 6604 pk_int: 6614 pk_int: 6626 pk_int: 6628 pk_int: 6633 pk_int: 6638 pk_int: 6650 pk_int: 6663 pk_int: 6684 pk_int: 6711 pk_int: 6719 pk_int: 6728 pk_int: 6738 pk_int: 6756 pk_int: 6758 pk_int: 6769 pk_int: 6782 pk_int: 6783 pk_int: 6786 pk_int: 6810 pk_int: 6812 pk_int: 6824 pk_int: 6838 pk_int: 6844 pk_int: 6846 pk_int: 6862 pk_int: 6871 pk_int: 6884 pk_int: 6887 pk_int: 6895 pk_int: 6904 pk_int: 6953 pk_int: 6966 pk_int: 6980 pk_int: 6987 pk_int: 7002 pk_int: 7005 pk_int: 7020 pk_int: 7029 pk_int: 7033 pk_int: 7037 pk_int: 7043 pk_int: 7046 pk_int: 7052 pk_int: 7110 pk_int: 7112 pk_int: 7113 pk_int: 7133 pk_int: 7168 pk_int: 7179 pk_int: 7200 pk_int: 7211 pk_int: 7232 pk_int: 7235 pk_int: 7237 pk_int: 7241 pk_int: 7266 pk_int: 7270 pk_int: 7283 pk_int: 7285 pk_int: 7327 pk_int: 7347 pk_int: 7349 pk_int: 7394 pk_int: 7400 pk_int: 7405 pk_int: 7407 pk_int: 7410 pk_int: 7421 pk_int: 7424 pk_int: 7445 pk_int: 7457 pk_int: 7467 pk_int: 7475 pk_int: 7481 pk_int: 7505 pk_int: 7514 pk_int: 7516 pk_int: 7517 pk_int: 7518 pk_int: 7526 pk_int: 7532 pk_int: 7545 pk_int: 7553 pk_int: 7555 pk_int: 7558 pk_int: 7569 pk_int: 7579 pk_int: 7585 pk_int: 7604 pk_int: 7614 pk_int: 7626 pk_int: 7628 pk_int: 7633 pk_int: 7638 pk_int: 7650 pk_int: 7663 pk_int: 7684 pk_int: 7711 pk_int: 7719 pk_int: 7728 pk_int: 7738 pk_int: 7756 pk_int: 7758 pk_int: 7769 pk_int: 7782 pk_int: 7783 pk_int: 7786 pk_int: 7810 pk_int: 7812 pk_int: 7824 pk_int: 7838 pk_int: 7844 pk_int: 7846 pk_int: 7862 pk_int: 7871 pk_int: 7884 pk_int: 7887 pk_int: 7895 pk_int: 7904 pk_int: 7953 pk_int: 7966 pk_int: 7980 pk_int: 7987 pk_int: 8002 pk_int: 8005 pk_int: 8020 pk_int: 8029 pk_int: 8033 pk_int: 8037 pk_int: 8043 pk_int: 8046 pk_int: 8052 pk_int: 8110 pk_int: 8112 pk_int: 8113 pk_int: 8133 pk_int: 8168 pk_int: 8179 pk_int: 8200 pk_int: 8211 pk_int: 8232 pk_int: 8235 pk_int: 8237 pk_int: 8241 pk_int: 8266 pk_int: 8270 pk_int: 8283 pk_int: 8285 pk_int: 8327 pk_int: 8347 pk_int: 8349 pk_int: 8394 pk_int: 8400 pk_int: 8405 pk_int: 8407 pk_int: 8410 pk_int: 8421 pk_int: 8424 pk_int: 8445 pk_int: 8457 pk_int: 8467 pk_int: 8475 pk_int: 8481 pk_int: 8505 pk_int: 8514 pk_int: 8516 pk_int: 8517 pk_int: 8518 pk_int: 8526 pk_int: 8532 pk_int: 8545 pk_int: 8553 pk_int: 8555 pk_int: 8558 pk_int: 8569 pk_int: 8579 pk_int: 8585 pk_int: 8604 pk_int: 8614 pk_int: 8626 pk_int: 8628 pk_int: 8633 pk_int: 8638 pk_int: 8650 pk_int: 8663 pk_int: 8684 pk_int: 8711 pk_int: 8719 pk_int: 8728 pk_int: 8738 pk_int: 8756 pk_int: 8758 pk_int: 8769 pk_int: 8782 pk_int: 8783 pk_int: 8786 pk_int: 8810 pk_int: 8812 pk_int: 8824 pk_int: 8838 pk_int: 8844 pk_int: 8846 pk_int: 8862 pk_int: 8871 pk_int: 8884 pk_int: 8887 pk_int: 8895 pk_int: 8904 pk_int: 8953 pk_int: 8966 pk_int: 8980 pk_int: 8987 pk_int: 9002 pk_int: 9005 pk_int: 9020 pk_int: 9029 pk_int: 9033 pk_int: 9037 pk_int: 9043 pk_int: 9046 pk_int: 9052 pk_int: 9110 pk_int: 9112 pk_int: 9113 pk_int: 9133 pk_int: 9168 pk_int: 9179 pk_int: 9200 pk_int: 9211 pk_int: 9232 pk_int: 9235 pk_int: 9237 pk_int: 9241 pk_int: 9266 pk_int: 9270 pk_int: 9283 pk_int: 9285 pk_int: 9327 pk_int: 9347 pk_int: 9349 pk_int: 9394 pk_int: 9400 pk_int: 9405 pk_int: 9407 pk_int: 9410 pk_int: 9421 pk_int: 9424 pk_int: 9445 pk_int: 9457 pk_int: 9467 pk_int: 9475 pk_int: 9481 pk_int: 9505 pk_int: 9514 pk_int: 9516 pk_int: 9517 pk_int: 9518 pk_int: 9526 pk_int: 9532 pk_int: 9545 pk_int: 9553 pk_int: 9555 pk_int: 9558 pk_int: 9569 pk_int: 9579 pk_int: 9585 pk_int: 9604 pk_int: 9614 pk_int: 9626 pk_int: 9628 pk_int: 9633 pk_int: 9638 pk_int: 9650 pk_int: 9663 pk_int: 9684 pk_int: 9711 pk_int: 9719 pk_int: 9728 pk_int: 9738 pk_int: 9756 pk_int: 9758 pk_int: 9769 pk_int: 9782 pk_int: 9783 pk_int: 9786 pk_int: 9810 pk_int: 9812 pk_int: 9824 pk_int: 9838 pk_int: 9844 pk_int: 9846 pk_int: 9862 pk_int: 9871 pk_int: 9884 pk_int: 9887 pk_int: 9895 pk_int: 9904 pk_int: 9953 pk_int: 9966 pk_int: 9980 pk_int: 9987 pk_int: 10002 pk_int: 10005 pk_int: 10020 pk_int: 10029 pk_int: 10033 pk_int: 10037 pk_int: 10043 pk_int: 10046 pk_int: 10052 pk_int: 10110 pk_int: 10112 pk_int: 10113 pk_int: 10133 pk_int: 10168 pk_int: 10179 pk_int: 10200 pk_int: 10211 pk_int: 10232 pk_int: 10235 pk_int: 10237 pk_int: 10241 pk_int: 10266 pk_int: 10270 pk_int: 10283 pk_int: 10285 pk_int: 10327 pk_int: 10347 pk_int: 10349 pk_int: 10394 pk_int: 10400 pk_int: 10405 pk_int: 10407 pk_int: 10410 pk_int: 10421 pk_int: 10424 pk_int: 10445 pk_int: 10457 pk_int: 10467 pk_int: 10475 pk_int: 10481 pk_int: 10505 pk_int: 10514 pk_int: 10516 pk_int: 10517 pk_int: 10518 pk_int: 10526 pk_int: 10532 pk_int: 10545 pk_int: 10553 pk_int: 10555 pk_int: 10558 pk_int: 10569 pk_int: 10579 pk_int: 10585 pk_int: 10604 pk_int: 10614 pk_int: 10626 pk_int: 10628 pk_int: 10633 pk_int: 10638 pk_int: 10650 pk_int: 10663 pk_int: 10684 pk_int: 10711 pk_int: 10719 pk_int: 10728 pk_int: 10738 pk_int: 10756 pk_int: 10758 pk_int: 10769 pk_int: 10782 pk_int: 10783 pk_int: 10786 pk_int: 10810 pk_int: 10812 pk_int: 10824 pk_int: 10838 pk_int: 10844 pk_int: 10846 pk_int: 10862 pk_int: 10871 pk_int: 10884 pk_int: 10887 pk_int: 10895 pk_int: 10904 pk_int: 10953 pk_int: 10966 pk_int: 10980 pk_int: 10987 pk_int: 11002 pk_int: 11005 pk_int: 11020 pk_int: 11029 pk_int: 11033 pk_int: 11037 pk_int: 11043 pk_int: 11046 pk_int: 11052 pk_int: 11110 pk_int: 11112 pk_int: 11113 pk_int: 11133 pk_int: 11168 pk_int: 11179 pk_int: 11200 pk_int: 11211 pk_int: 11232 pk_int: 11235 pk_int: 11237 pk_int: 11241 pk_int: 11266 pk_int: 11270 pk_int: 11283 pk_int: 11285 pk_int: 11327 pk_int: 11347 pk_int: 11349 pk_int: 11394 pk_int: 11400 pk_int: 11405 pk_int: 11407 pk_int: 11410 pk_int: 11421 pk_int: 11424 pk_int: 11445 pk_int: 11457 pk_int: 11467 pk_int: 11475 pk_int: 11481 pk_int: 11505 pk_int: 11514 pk_int: 11516 pk_int: 11517 pk_int: 11518 pk_int: 11526 pk_int: 11532 pk_int: 11545 pk_int: 11553 pk_int: 11555 pk_int: 11558 pk_int: 11569 pk_int: 11579 pk_int: 11585 pk_int: 11604 pk_int: 11614 pk_int: 11626 pk_int: 11628 pk_int: 11633 pk_int: 11638 pk_int: 11650 pk_int: 11663 pk_int: 11684 pk_int: 11711 pk_int: 11719 pk_int: 11728 pk_int: 11738 pk_int: 11756 pk_int: 11758 pk_int: 11769 pk_int: 11782 pk_int: 11783 pk_int: 11786 pk_int: 11810 pk_int: 11812 pk_int: 11824 pk_int: 11838 pk_int: 11844 pk_int: 11846 pk_int: 11862 pk_int: 11871 pk_int: 11884 pk_int: 11887 pk_int: 11895 pk_int: 11904 pk_int: 11953 pk_int: 11966 pk_int: 11980 pk_int: 11987 pk_int: 12002 pk_int: 12005 pk_int: 12020 pk_int: 12029 pk_int: 12033 pk_int: 12037 pk_int: 12043 pk_int: 12046 pk_int: 12052 pk_int: 12110 pk_int: 12112 pk_int: 12113 pk_int: 12133 pk_int: 12168 pk_int: 12179 pk_int: 12200 pk_int: 12211 pk_int: 12232 pk_int: 12235 pk_int: 12237 pk_int: 12241 pk_int: 12266 pk_int: 12270 pk_int: 12283 pk_int: 12285 pk_int: 12327 pk_int: 12347 pk_int: 12349 pk_int: 12394 pk_int: 12400 pk_int: 12405 pk_int: 12407 pk_int: 12410 pk_int: 12421 pk_int: 12424 pk_int: 12445 pk_int: 12457 pk_int: 12467 pk_int: 12475 pk_int: 12481 pk_int: 12505 pk_int: 12514 pk_int: 12516 pk_int: 12517 pk_int: 12518 pk_int: 12526 pk_int: 12532 pk_int: 12545 pk_int: 12553 pk_int: 12555 pk_int: 12558 pk_int: 12569 pk_int: 12579 pk_int: 12585 pk_int: 12604 pk_int: 12614 pk_int: 12626 pk_int: 12628 pk_int: 12633 pk_int: 12638 pk_int: 12650 pk_int: 12663 pk_int: 12684 pk_int: 12711 pk_int: 12719 pk_int: 12728 pk_int: 12738 pk_int: 12756 pk_int: 12758 pk_int: 12769 pk_int: 12782 pk_int: 12783 pk_int: 12786 pk_int: 12810 pk_int: 12812 pk_int: 12824 pk_int: 12838 pk_int: 12844 pk_int: 12846 pk_int: 12862 pk_int: 12871 pk_int: 12884 pk_int: 12887 pk_int: 12895 pk_int: 12904 pk_int: 12953 pk_int: 12966 pk_int: 12980 pk_int: 12987 pk_int: 13002 pk_int: 13005 pk_int: 13020 pk_int: 13029 pk_int: 13033 pk_int: 13037 pk_int: 13043 pk_int: 13046 pk_int: 13052 pk_int: 13110 pk_int: 13112 pk_int: 13113 pk_int: 13133 pk_int: 13168 pk_int: 13179 pk_int: 13200 pk_int: 13211 pk_int: 13232 pk_int: 13235 pk_int: 13237 pk_int: 13241 pk_int: 13266 pk_int: 13270 pk_int: 13283 pk_int: 13285 pk_int: 13327 pk_int: 13347 pk_int: 13349 pk_int: 13394 pk_int: 13400 pk_int: 13405 pk_int: 13407 pk_int: 13410 pk_int: 13421 pk_int: 13424 pk_int: 13445 pk_int: 13457 pk_int: 13467 pk_int: 13475 pk_int: 13481 pk_int: 13505 pk_int: 13514 pk_int: 13516 pk_int: 13517 pk_int: 13518 pk_int: 13526 pk_int: 13532 pk_int: 13545 pk_int: 13553 pk_int: 13555 pk_int: 13558 pk_int: 13569 pk_int: 13579 pk_int: 13585 pk_int: 13604 pk_int: 13614 pk_int: 13626 pk_int: 13628 pk_int: 13633 pk_int: 13638 pk_int: 13650 pk_int: 13663 pk_int: 13684 pk_int: 13711 pk_int: 13719 pk_int: 13728 pk_int: 13738 pk_int: 13756 pk_int: 13758 pk_int: 13769 pk_int: 13782 pk_int: 13783 pk_int: 13786 pk_int: 13810 pk_int: 13812 pk_int: 13824 pk_int: 13838 pk_int: 13844 pk_int: 13846 pk_int: 13862 pk_int: 13871 pk_int: 13884 pk_int: 13887 pk_int: 13895 pk_int: 13904 pk_int: 13953 pk_int: 13966 pk_int: 13980 pk_int: 13987 ==================================== QUERY: SELECT count(*) as count, FROM `/Root/olapTable` WHERE field == 'abcde' RESULT: 2025-05-29T15:22:11.066292Z node 1 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:238: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1748532131000, txId: 18446744073709551615] shutting down count: 12721 ==================================== QUERY: SELECT count(*) as count, FROM `/Root/olapTable` RESULT: 2025-05-29T15:22:11.203687Z node 1 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:238: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1748532131003, txId: 18446744073709551615] shutting down count: 14000 Timing: checkTable took 1 seconds Timing: wait took 0 seconds >> GroupWriteTest::ByTableName [GOOD] >> KqpWorkload::STOCK [FAIL] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/perf/unittest >> KqpQueryPerf::IndexUpdateOn-QueryService+UseSink Test command err: Trying to start YDB, gRPC: 62749, MsgBus: 61079 2025-05-29T15:22:03.129400Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509888288305514224:2065];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:22:03.129429Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/001097/r3tmp/tmpBTHhGU/pdisk_1.dat 2025-05-29T15:22:03.204564Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:22:03.204890Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [1:7509888288305514200:2079] 1748532123129217 != 1748532123129220 TServer::EnableGrpc on GrpcPort 62749, node 1 2025-05-29T15:22:03.220708Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:22:03.220717Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-05-29T15:22:03.220718Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-05-29T15:22:03.220749Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-05-29T15:22:03.232208Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:22:03.232249Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:22:03.233431Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:61079 TClient is connected to server localhost:61079 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-29T15:22:03.277422Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:22:03.280980Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:22:03.300260Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:22:03.321816Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:22:03.333538Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:22:03.591999Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509888288305515841:2403], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:22:03.592035Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:22:03.646396Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-05-29T15:22:03.664504Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-05-29T15:22:03.677463Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-05-29T15:22:03.690711Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-05-29T15:22:03.705618Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-05-29T15:22:03.720271Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-05-29T15:22:03.736621Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-05-29T15:22:03.759884Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509888288305516491:2466], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:22:03.759934Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:22:03.760129Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509888288305516498:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:22:03.761085Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710669:3, at schemeshard: 72057594046644480 2025-05-29T15:22:03.767469Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7509888288305516500:2470], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710669 completed, doublechecking } 2025-05-29T15:22:03.832267Z node 1 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [1:7509888288305516551:3395] txid# 281474976710670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-29T15:22:03.954601Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [1:7509888288305516560:2474], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:22:03.954761Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=1&id=Y2JhNGEyY2MtM2ZjMGI2ZWMtYmJiNWU0YTMtZTc2OGU3NTc=, ActorId: [1:7509888288305515823:2401], ActorState: ExecuteState, TraceId: 01jwea5p3ff1sxxj8apftwgz0n, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: VERIFY failed (2025-05-29T15:22:03.957790Z): assertion failed in non-unittest thread with message: assertion failed at ydb/core/kqp/ut/common/kqp_ut_common.h:375, void NKikimr::NKqp::AssertSuccessResult(const NYdb::TStatus &): (result.IsSuccess())
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 library/cpp/testing/unittest/registar.cpp:36 RaiseError(): requirement UnittestThread failed 0. /-S/util/system/yassert.cpp:83: InternalPanicImpl @ 0x13AC65E5 1. /-S/util/system/yassert.cpp:55: Panic @ 0x13ABD5E6 2. /tmp//-S/library/cpp/testing/unittest/registar.cpp:36: RaiseError @ 0x13C5F376 3. /-S/ydb/core/kqp/ut/common/kqp_ut_common.h:375: AssertSuccessResult @ 0x261025A2 4. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:365: CreateSampleTables @ 0x26101EA2 5. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:581: operator() @ 0x261237FC 6. /-S/library/cpp/threading/future/core/future-inl.h:493: SetValue @ 0x261237FC 7. /-S/library/cpp/threading/future/async.h:24: operator() @ 0x261237FC 8. /-S/util/thread/pool.h:71: Process @ 0x261237FC 9. /-S/util/thread/pool.cpp:418: DoExecute @ 0x13ACDF69 10. /-S/util/thread/factory.h:15: Execute @ 0x13ACC959 11. /-S/util/thread/factory.cpp:36: ThreadProc @ 0x13ACC959 12. /-S/util/system/thread.cpp:244: ThreadProxy @ 0x13AC7DCC 13. ??:0: ?? @ 0x7F941B2A0AC2 14. ??:0: ?? @ 0x7F941B33284F Trying to start YDB, gRPC: 11027, MsgBus: 12091 2025-05-29T15:22:07.872961Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509888305729835432:2064];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:22:07.872981Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/001097/r3tmp/tmpgwu0dy/pdisk_1.dat 2025-05-29T15:22:07.943659Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [1:7509888305729835407:2079] 1748532127872725 != 1748532127872728 2025-05-29T15:22:07.944121Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 11027, node 1 2025-05-29T15:22:07.962498Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:22:07.962512Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-05-29T15:22:07.962515Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-05-29T15:22:07.962563Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-05-29T15:22:07.974907Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:22:07.974942Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:22:07.976290Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:12091 TClient is connected to server localhost:12091 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... 2025-05-29T15:22:08.032718Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-05-29T15:22:08.037751Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:22:08.058567Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:22:08.087208Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:22:08.143847Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:22:08.280884Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509888310024804340:2403], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:22:08.280923Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:22:08.324352Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-05-29T15:22:08.333802Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-05-29T15:22:08.344782Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-05-29T15:22:08.399924Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-05-29T15:22:08.455854Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-05-29T15:22:08.463482Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-05-29T15:22:08.490894Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-05-29T15:22:08.506056Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509888310024804994:2466], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:22:08.506086Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:22:08.506137Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509888310024804999:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:22:08.507057Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715669:3, at schemeshard: 72057594046644480 2025-05-29T15:22:08.511430Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7509888310024805001:2470], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715669 completed, doublechecking } 2025-05-29T15:22:08.592919Z node 1 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [1:7509888310024805052:3396] txid# 281474976715670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-29T15:22:08.713383Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [1:7509888310024805068:2474], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:22:08.713504Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=1&id=OTEzYTI2MTUtZmZlMzdhZDItYjIxYjNjMmEtY2QyNDQxMzU=, ActorId: [1:7509888310024804337:2401], ActorState: ExecuteState, TraceId: 01jwea5tqsdz370ncsbgpb0frb, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: VERIFY failed (2025-05-29T15:22:08.714198Z): assertion failed in non-unittest thread with message: assertion failed at ydb/core/kqp/ut/common/kqp_ut_common.h:375, void NKikimr::NKqp::AssertSuccessResult(const NYdb::TStatus &): (result.IsSuccess())
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 library/cpp/testing/unittest/registar.cpp:36 RaiseError(): requirement UnittestThread failed 0. /-S/util/system/yassert.cpp:83: InternalPanicImpl @ 0x13AC65E5 1. /-S/util/system/yassert.cpp:55: Panic @ 0x13ABD5E6 2. /tmp//-S/library/cpp/testing/unittest/registar.cpp:36: RaiseError @ 0x13C5F376 3. /-S/ydb/core/kqp/ut/common/kqp_ut_common.h:375: AssertSuccessResult @ 0x261025A2 4. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:365: CreateSampleTables @ 0x26101EA2 5. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:581: operator() @ 0x261237FC 6. /-S/library/cpp/threading/future/core/future-inl.h:493: SetValue @ 0x261237FC 7. /-S/library/cpp/threading/future/async.h:24: operator() @ 0x261237FC 8. /-S/util/thread/pool.h:71: Process @ 0x261237FC 9. /-S/util/thread/pool.cpp:418: DoExecute @ 0x13ACDF69 10. /-S/util/thread/factory.h:15: Execute @ 0x13ACC959 11. /-S/util/thread/factory.cpp:36: ThreadProc @ 0x13ACC959 12. /-S/util/system/thread.cpp:244: ThreadProxy @ 0x13AC7DCC 13. ??:0: ?? @ 0x7F86C43A9AC2 14. ??:0: ?? @ 0x7F86C443B84F >> KqpQueryPerf::IndexReplace-QueryService+UseSink >> KqpQueryPerf::IndexUpdateOn+QueryService+UseSink ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/load_test/ut/unittest >> GroupWriteTest::ByTableName [GOOD] Test command err: RandomSeed# 3776212119695056965 2025-05-29T15:22:10.363481Z 1 00h01m00.010512s :BS_LOAD_TEST DEBUG: TabletId# 72058428954028033 Generation# 1 is bootstrapped, going to send TEvDiscover {TabletId# 72058428954028033 MinGeneration# 1 ReadBody# false DiscoverBlockedGeneration# true ForceBlockedGeneration# 0 FromLeader# true Deadline# 18446744073709551} 2025-05-29T15:22:10.367020Z 1 00h01m00.010512s :BS_LOAD_TEST INFO: TabletId# 72058428954028033 Generation# 1 recieved TEvDiscoverResult {Status# NODATA BlockedGeneration# 0 Id# [0:0:0:0:0:0:0] Size# 0 MinGeneration# 1} 2025-05-29T15:22:10.367037Z 1 00h01m00.010512s :BS_LOAD_TEST DEBUG: TabletId# 72058428954028033 Generation# 1 going to send TEvBlock {TabletId# 72058428954028033 Generation# 1 Deadline# 18446744073709551 IsMonitored# 1} 2025-05-29T15:22:10.367455Z 1 00h01m00.010512s :BS_LOAD_TEST INFO: TabletId# 72058428954028033 Generation# 1 recieved TEvBlockResult {Status# OK} 2025-05-29T15:22:10.376630Z 1 00h01m00.010512s :BS_LOAD_TEST DEBUG: TabletId# 72058428954028033 Generation# 2 going to send TEvCollectGarbage {TabletId# 72058428954028033 RecordGeneration# 2 PerGenerationCounter# 1 Channel# 0 Deadline# 18446744073709551 Collect# true CollectGeneration# 2 CollectStep# 0 Hard# true IsMultiCollectAllowed# 0 IsMonitored# 1} 2025-05-29T15:22:10.377217Z 1 00h01m00.010512s :BS_LOAD_TEST INFO: TabletId# 72058428954028033 Generation# 2 recieved TEvCollectGarbageResult {TabletId# 72058428954028033 RecordGeneration# 2 PerGenerationCounter# 1 Channel# 0 Status# OK} 2025-05-29T15:22:11.187222Z 5 00h01m11.610512s :BS_LOGCUTTER ERROR: PDiskId# 1000 VDISK[82000000:_:0:4:0]: (2181038080) KEEPER: RetryCutLogEvent: limit exceeded; FreeUpToLsn# 1086 2025-05-29T15:22:12.324869Z 1 00h01m30.010512s :BS_LOAD_TEST DEBUG: Load tablet recieved PoisonPill, going to die 2025-05-29T15:22:12.324900Z 1 00h01m30.010512s :BS_LOAD_TEST DEBUG: TabletId# 72058428954028033 Generation# 2 end working, going to send TEvCollectGarbage {TabletId# 72058428954028033 RecordGeneration# 2 PerGenerationCounter# 32 Channel# 0 Deadline# 18446744073709551 Collect# true CollectGeneration# 2 CollectStep# 4294967295 Hard# true IsMultiCollectAllowed# 0 IsMonitored# 1} 2025-05-29T15:22:12.324908Z 1 00h01m30.010512s :BS_LOAD_TEST DEBUG: Load tablet recieved PoisonPill, going to die 2025-05-29T15:22:12.324913Z 1 00h01m30.010512s :BS_LOAD_TEST DEBUG: TabletId# 72058428954028033 Generation# 2 end working, going to send TEvCollectGarbage {TabletId# 72058428954028033 RecordGeneration# 2 PerGenerationCounter# 33 Channel# 0 Deadline# 18446744073709551 Collect# true CollectGeneration# 2 CollectStep# 4294967295 Hard# true IsMultiCollectAllowed# 0 IsMonitored# 1} 2025-05-29T15:22:12.332929Z 1 00h01m30.010512s :BS_LOAD_TEST INFO: TabletId# 72058428954028033 Generation# 2 recieved TEvCollectGarbageResult {TabletId# 72058428954028033 RecordGeneration# 2 PerGenerationCounter# 32 Channel# 0 Status# OK} 2025-05-29T15:22:12.332951Z 1 00h01m30.010512s :BS_LOAD_TEST INFO: TabletId# 72058428954028033 Generation# 2 recieved TEvCollectGarbageResult {TabletId# 72058428954028033 RecordGeneration# 2 PerGenerationCounter# 33 Channel# 0 Status# OK} ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/perf/unittest >> KqpQueryPerf::RangeRead+QueryService Test command err: Trying to start YDB, gRPC: 64308, MsgBus: 9265 2025-05-29T15:22:07.887019Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509888301706087387:2064];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:22:07.887037Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/00103c/r3tmp/tmpYiHJ3D/pdisk_1.dat 2025-05-29T15:22:07.961082Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:22:07.961160Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [1:7509888301706087365:2079] 1748532127886881 != 1748532127886884 TServer::EnableGrpc on GrpcPort 64308, node 1 2025-05-29T15:22:07.980129Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:22:07.980143Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-05-29T15:22:07.980145Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-05-29T15:22:07.980189Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:9265 2025-05-29T15:22:08.030905Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:22:08.030948Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:22:08.031807Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:9265 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-29T15:22:08.050189Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:22:08.052483Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-05-29T15:22:08.055627Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:22:08.123043Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:22:08.145977Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:22:08.157059Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:22:08.285981Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509888306001056299:2403], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:22:08.286007Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:22:08.333048Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-05-29T15:22:08.388386Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-05-29T15:22:08.444380Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-05-29T15:22:08.456756Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-05-29T15:22:08.470686Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-05-29T15:22:08.485823Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-05-29T15:22:08.499803Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-05-29T15:22:08.516013Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509888306001056955:2466], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:22:08.516044Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:22:08.516163Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509888306001056960:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:22:08.517351Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715669:3, at schemeshard: 72057594046644480 2025-05-29T15:22:08.527039Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7509888306001056962:2470], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715669 completed, doublechecking } 2025-05-29T15:22:08.605264Z node 1 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [1:7509888306001057013:3401] txid# 281474976715670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-29T15:22:08.684869Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [1:7509888306001057029:2474], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:22:08.684989Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=1&id=MzM2ZDk0MTktZWRkODAzMWYtOTExYjc0NGYtMTI5YTQzMDM=, ActorId: [1:7509888306001056281:2401], ActorState: ExecuteState, TraceId: 01jwea5tr30ng8jzpacm7w8b9h, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: VERIFY failed (2025-05-29T15:22:08.685917Z): assertion failed in non-unittest thread with message: assertion failed at ydb/core/kqp/ut/common/kqp_ut_common.h:375, void NKikimr::NKqp::AssertSuccessResult(const NYdb::TStatus &): (result.IsSuccess())
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 library/cpp/testing/unittest/registar.cpp:36 RaiseError(): requirement UnittestThread failed 0. /-S/util/system/yassert.cpp:83: InternalPanicImpl @ 0x13AC65E5 1. /-S/util/system/yassert.cpp:55: Panic @ 0x13ABD5E6 2. /tmp//-S/library/cpp/testing/unittest/registar.cpp:36: RaiseError @ 0x13C5F376 3. /-S/ydb/core/kqp/ut/common/kqp_ut_common.h:375: AssertSuccessResult @ 0x261025A2 4. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:365: CreateSampleTables @ 0x26101EA2 5. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:581: operator() @ 0x261237FC 6. /-S/library/cpp/threading/future/core/future-inl.h:493: SetValue @ 0x261237FC 7. /-S/library/cpp/threading/future/async.h:24: operator() @ 0x261237FC 8. /-S/util/thread/pool.h:71: Process @ 0x261237FC 9. /-S/util/thread/pool.cpp:418: DoExecute @ 0x13ACDF69 10. /-S/util/thread/factory.h:15: Execute @ 0x13ACC959 11. /-S/util/thread/factory.cpp:36: ThreadProc @ 0x13ACC959 12. /-S/util/system/thread.cpp:244: ThreadProxy @ 0x13AC7DCC 13. ??:0: ?? @ 0x7FE00BE83AC2 14. ??:0: ?? @ 0x7FE00BF1584F >> TPartitionChooserSuite::TPartitionChooserActor_SplitMergeEnabled_SourceId_PartitionNotExists_Test [FAIL] >> TPartitionGraphTest::BuildGraph [GOOD] >> TPartitionTests::AfterRestart_1 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/columnshard/ut_rw/unittest >> TColumnShardTestReadWrite::CompactionInGranule_PKString_Reboot [GOOD] Test command err: 2025-05-29T15:21:44.801765Z node 1 :TX_COLUMNSHARD INFO: log.cpp:784: tablet_id=9437184;self_id=[1:139:2170];fline=columnshard.cpp:102;event=initialize_shard;step=OnActivateExecutor; 2025-05-29T15:21:44.806043Z node 1 :TX_COLUMNSHARD INFO: log.cpp:784: tablet_id=9437184;self_id=[1:139:2170];fline=columnshard.cpp:120;event=initialize_shard;step=initialize_tiring_finished; 2025-05-29T15:21:44.806120Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Execute at tablet 9437184 2025-05-29T15:21:44.806970Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=9437184;self_id=[1:139:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-05-29T15:21:44.807027Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=9437184;self_id=[1:139:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-05-29T15:21:44.807065Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=9437184;self_id=[1:139:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-05-29T15:21:44.807089Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=9437184;self_id=[1:139:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-05-29T15:21:44.807116Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=9437184;self_id=[1:139:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-05-29T15:21:44.807138Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=9437184;self_id=[1:139:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-05-29T15:21:44.807157Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=9437184;self_id=[1:139:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-05-29T15:21:44.807179Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=9437184;self_id=[1:139:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-05-29T15:21:44.807199Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=9437184;self_id=[1:139:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-05-29T15:21:44.807227Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=9437184;self_id=[1:139:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-05-29T15:21:44.807247Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=9437184;self_id=[1:139:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2025-05-29T15:21:44.807271Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=9437184;self_id=[1:139:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2025-05-29T15:21:44.816319Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Complete at tablet 9437184 2025-05-29T15:21:44.816386Z node 1 :TX_COLUMNSHARD INFO: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:137;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2025-05-29T15:21:44.816398Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-05-29T15:21:44.816434Z node 1 :TX_COLUMNSHARD INFO: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-05-29T15:21:44.816474Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-05-29T15:21:44.816488Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-05-29T15:21:44.816495Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-05-29T15:21:44.816506Z node 1 :TX_COLUMNSHARD INFO: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2025-05-29T15:21:44.816517Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-05-29T15:21:44.816526Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-05-29T15:21:44.816531Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-05-29T15:21:44.816551Z node 1 :TX_COLUMNSHARD INFO: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-05-29T15:21:44.816560Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-05-29T15:21:44.816569Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-05-29T15:21:44.816574Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-05-29T15:21:44.816586Z node 1 :TX_COLUMNSHARD INFO: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-05-29T15:21:44.816594Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-05-29T15:21:44.816603Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-05-29T15:21:44.816609Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=8;type=CleanInsertionDedup; 2025-05-29T15:21:44.816622Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-05-29T15:21:44.816631Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-05-29T15:21:44.816636Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-05-29T15:21:44.816646Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-05-29T15:21:44.816656Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-05-29T15:21:44.816661Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-05-29T15:21:44.816688Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-05-29T15:21:44.816697Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-05-29T15:21:44.816702Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2025-05-29T15:21:44.816726Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-05-29T15:21:44.816735Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-05-29T15:21:44.816740Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2025-05-29T15:21:44.816754Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-05-29T15:21:44.816762Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanDeprecatedSnapshot;id=CleanDeprecatedSnapshot; 2025-05-29T15:21:44.816767Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=17;type=CleanDeprecatedSnapshot; 2025-05-29T15:21:44.816777Z node 1 :TX_COLUMNSHARD CRIT: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_deprecated_snapshot.cpp:30;tasks_for_rewrite=0; 2025-05-29T15:21:44.816786Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanDeprecatedSnapshot;id=17; 2025-05-29T15:21:44.816794Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV0ChunksMeta;id=RestoreV0ChunksMeta; 2025-05-29T15:21:44.816799Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=18;type=RestoreV0ChunksMeta; 2025-05-29T15:21:44.816876Z node 1 :TX_COLUMNSHARD INFO: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=10; 2025-05-29T15:21:44.816887Z node 1 :TX_COLUMNSHARD INFO: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=5; 2025-05-29T15:21:44.816896Z node 1 :TX_COLUMNSHARD INFO: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute ... COLUMNSHARD INFO: log.cpp:784: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;fline=common_data.cpp:29;EXECUTE:granulesLoadingTime=7868; 2025-05-29T15:22:11.747754Z node 1 :TX_COLUMNSHARD INFO: log.cpp:784: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;fline=common_data.cpp:29;PRECHARGE:finishLoadingTime=1; 2025-05-29T15:22:11.748084Z node 1 :TX_COLUMNSHARD INFO: log.cpp:784: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;fline=common_data.cpp:29;EXECUTE:finishLoadingTime=325; 2025-05-29T15:22:11.748088Z node 1 :TX_COLUMNSHARD INFO: log.cpp:784: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:column_enginesLoadingTime=8284; 2025-05-29T15:22:11.748117Z node 1 :TX_COLUMNSHARD INFO: log.cpp:784: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:insert_tableLoadingTime=24; 2025-05-29T15:22:11.748249Z node 1 :TX_COLUMNSHARD INFO: log.cpp:784: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:composite_init/insert_table;fline=common_data.cpp:29;InsertTableLoadingTime=15; 2025-05-29T15:22:11.748260Z node 1 :TX_COLUMNSHARD INFO: log.cpp:784: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:insert_tableLoadingTime=139; 2025-05-29T15:22:11.748275Z node 1 :TX_COLUMNSHARD INFO: log.cpp:784: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:tx_controllerLoadingTime=10; 2025-05-29T15:22:11.748290Z node 1 :TX_COLUMNSHARD INFO: log.cpp:784: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:tx_controllerLoadingTime=11; 2025-05-29T15:22:11.748340Z node 1 :TX_COLUMNSHARD INFO: log.cpp:784: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:operations_managerLoadingTime=46; 2025-05-29T15:22:11.748367Z node 1 :TX_COLUMNSHARD INFO: log.cpp:784: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:operations_managerLoadingTime=20; 2025-05-29T15:22:11.752078Z node 1 :TX_COLUMNSHARD INFO: log.cpp:784: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:storages_managerLoadingTime=3697; 2025-05-29T15:22:11.755709Z node 1 :TX_COLUMNSHARD INFO: log.cpp:784: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:storages_managerLoadingTime=3592; 2025-05-29T15:22:11.755753Z node 1 :TX_COLUMNSHARD INFO: log.cpp:784: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:long_txLoadingTime=11; 2025-05-29T15:22:11.755760Z node 1 :TX_COLUMNSHARD INFO: log.cpp:784: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:long_txLoadingTime=2; 2025-05-29T15:22:11.755766Z node 1 :TX_COLUMNSHARD INFO: log.cpp:784: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:db_locksLoadingTime=1; 2025-05-29T15:22:11.755770Z node 1 :TX_COLUMNSHARD INFO: log.cpp:784: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:db_locksLoadingTime=0; 2025-05-29T15:22:11.755775Z node 1 :TX_COLUMNSHARD INFO: log.cpp:784: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:bg_sessionsLoadingTime=1; 2025-05-29T15:22:11.755788Z node 1 :TX_COLUMNSHARD INFO: log.cpp:784: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:bg_sessionsLoadingTime=9; 2025-05-29T15:22:11.755794Z node 1 :TX_COLUMNSHARD INFO: log.cpp:784: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:sharing_sessionsLoadingTime=1; 2025-05-29T15:22:11.755810Z node 1 :TX_COLUMNSHARD INFO: log.cpp:784: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:sharing_sessionsLoadingTime=12; 2025-05-29T15:22:11.755815Z node 1 :TX_COLUMNSHARD INFO: log.cpp:784: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:in_flight_readsLoadingTime=1; 2025-05-29T15:22:11.755824Z node 1 :TX_COLUMNSHARD INFO: log.cpp:784: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:in_flight_readsLoadingTime=5; 2025-05-29T15:22:11.755838Z node 1 :TX_COLUMNSHARD INFO: log.cpp:784: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:tiers_managerLoadingTime=10; 2025-05-29T15:22:11.755854Z node 1 :TX_COLUMNSHARD INFO: log.cpp:784: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:tiers_managerLoadingTime=12; 2025-05-29T15:22:11.755858Z node 1 :TX_COLUMNSHARD INFO: log.cpp:784: tablet_id=9437184;event=initialize_shard;fline=common_data.cpp:29;EXECUTE:composite_initLoadingTime=17281; 2025-05-29T15:22:11.755908Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: Index: tables 1 inserted {blob_bytes=0;raw_bytes=0;count=0;records=0} compacted {blob_bytes=0;raw_bytes=0;count=0;records=0} s-compacted {blob_bytes=7590912;raw_bytes=7453340;count=1;records=75200} inactive {blob_bytes=119333840;raw_bytes=115763580;count=218;records=1277000} evicted {blob_bytes=0;raw_bytes=0;count=0;records=0} at tablet 9437184 2025-05-29T15:22:11.755953Z node 1 :TX_COLUMNSHARD INFO: log.cpp:784: tablet_id=9437184;self_id=[1:7822:9419];process=SwitchToWork;fline=columnshard.cpp:77;event=initialize_shard;step=SwitchToWork; 2025-05-29T15:22:11.755969Z node 1 :TX_COLUMNSHARD INFO: log.cpp:784: tablet_id=9437184;self_id=[1:7822:9419];process=SwitchToWork;fline=columnshard.cpp:80;event=initialize_shard;step=SignalTabletActive; 2025-05-29T15:22:11.755987Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:784: tablet_id=9437184;self_id=[1:7822:9419];process=SwitchToWork;fline=columnshard_impl.cpp:1614;event=OnTieringModified;path_id=NO_VALUE_OPTIONAL; 2025-05-29T15:22:11.755994Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:784: tablet_id=9437184;self_id=[1:7822:9419];process=SwitchToWork;fline=column_engine_logs.cpp:493;event=OnTieringModified;new_count_tierings=0; 2025-05-29T15:22:11.756048Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:784: tablet_id=9437184;fline=columnshard_impl.cpp:515;event=EnqueueBackgroundActivities;periodic=0; 2025-05-29T15:22:11.756061Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:784: tablet_id=9437184;fline=columnshard_impl.cpp:784;event=start_indexation_tasks;insert_overload_size=0; 2025-05-29T15:22:11.756078Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:784: tablet_id=9437184;fline=column_engine_logs.cpp:246;event=StartCleanup;portions_count=10; 2025-05-29T15:22:11.756095Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:784: tablet_id=9437184;fline=column_engine_logs.cpp:288;event=StartCleanupStop;snapshot=plan_step=1748531809575;tx_id=18446744073709551615;;current_snapshot_ts=1748532106357; 2025-05-29T15:22:11.756101Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:784: tablet_id=9437184;fline=column_engine_logs.cpp:321;event=StartCleanup;portions_count=10;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2025-05-29T15:22:11.756111Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:784: tablet_id=9437184;fline=columnshard_impl.cpp:1060;background=cleanup;skip_reason=no_changes; 2025-05-29T15:22:11.756115Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:784: tablet_id=9437184;fline=columnshard_impl.cpp:1092;background=cleanup;skip_reason=no_changes; 2025-05-29T15:22:11.756134Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:784: tablet_id=9437184;fline=columnshard_impl.cpp:1001;background=ttl;skip_reason=no_changes; 2025-05-29T15:22:11.756651Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:784: tablet_id=9437184;tx_state=TTxProgressTx::Complete;fline=columnshard_impl.cpp:784;event=start_indexation_tasks;insert_overload_size=0; 2025-05-29T15:22:11.756674Z node 1 :TX_COLUMNSHARD INFO: log.cpp:784: self_id=[1:7945:9534];tablet_id=9437184;parent=[1:7822:9419];fline=manager.cpp:85;event=ask_data;request=request_id=135;1={portions_count=219};; 2025-05-29T15:22:11.757163Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:784: tablet_id=9437184;self_id=[1:7822:9419];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:254;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=9437184; 2025-05-29T15:22:11.757223Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:784: tablet_id=9437184;self_id=[1:7822:9419];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;fline=columnshard.cpp:243;event=TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184; 2025-05-29T15:22:11.757226Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: Send periodic stats. 2025-05-29T15:22:11.757229Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: Disabled periodic stats at tablet 9437184 2025-05-29T15:22:11.757233Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:784: tablet_id=9437184;self_id=[1:7822:9419];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:515;event=EnqueueBackgroundActivities;periodic=0; 2025-05-29T15:22:11.757243Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:784: tablet_id=9437184;self_id=[1:7822:9419];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:784;event=start_indexation_tasks;insert_overload_size=0; 2025-05-29T15:22:11.757251Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:784: tablet_id=9437184;self_id=[1:7822:9419];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:246;event=StartCleanup;portions_count=10; 2025-05-29T15:22:11.757259Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:784: tablet_id=9437184;self_id=[1:7822:9419];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:288;event=StartCleanupStop;snapshot=plan_step=1748531809575;tx_id=18446744073709551615;;current_snapshot_ts=1748532106357; 2025-05-29T15:22:11.757264Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:784: tablet_id=9437184;self_id=[1:7822:9419];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:321;event=StartCleanup;portions_count=10;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2025-05-29T15:22:11.757271Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:784: tablet_id=9437184;self_id=[1:7822:9419];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:1060;background=cleanup;skip_reason=no_changes; 2025-05-29T15:22:11.757274Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:784: tablet_id=9437184;self_id=[1:7822:9419];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:1092;background=cleanup;skip_reason=no_changes; 2025-05-29T15:22:11.757287Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:784: tablet_id=9437184;self_id=[1:7822:9419];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;queue=ttl;external_count=0;fline=granule.cpp:164;event=skip_actualization;waiting=1.000000s; 2025-05-29T15:22:11.757292Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:784: tablet_id=9437184;self_id=[1:7822:9419];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:1001;background=ttl;skip_reason=no_changes; >> KqpQueryPerf::DeleteOn-QueryService+UseSink >> TSourceIdTests::SourceIdStorageAdd [GOOD] >> TSourceIdTests::ProtoSourceIdStorageParseAndAdd [GOOD] >> TSourceIdTests::SourceIdStorageComplexDelete [GOOD] >> TSourceIdTests::HeartbeatEmitter [GOOD] >> TSourceIdTests::SourceIdMinSeqNo [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/perf/unittest >> KqpQueryPerf::AggregateToScalar-QueryService Test command err: Trying to start YDB, gRPC: 15749, MsgBus: 12224 test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/001089/r3tmp/tmpID4JCY/pdisk_1.dat 2025-05-29T15:22:03.605158Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509888287129475304:2273];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:22:03.605231Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-05-29T15:22:03.679159Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:22:03.679297Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [1:7509888287129475045:2079] 1748532123602279 != 1748532123602282 TServer::EnableGrpc on GrpcPort 15749, node 1 2025-05-29T15:22:03.700798Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:22:03.700830Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:22:03.701828Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-29T15:22:03.702936Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:22:03.702945Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-05-29T15:22:03.702948Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-05-29T15:22:03.702994Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:12224 TClient is connected to server localhost:12224 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-29T15:22:03.850410Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:22:03.854950Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-05-29T15:22:03.864952Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:22:03.884579Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:22:03.942539Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-05-29T15:22:03.952613Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 2025-05-29T15:22:04.042066Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509888291424443980:2403], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:22:04.042099Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:22:04.097267Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-05-29T15:22:04.105571Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-05-29T15:22:04.160914Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-05-29T15:22:04.172418Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-05-29T15:22:04.187029Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-05-29T15:22:04.201906Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-05-29T15:22:04.217999Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-05-29T15:22:04.231173Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509888291424444636:2466], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:22:04.231211Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:22:04.231334Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509888291424444641:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:22:04.232221Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715669:3, at schemeshard: 72057594046644480 2025-05-29T15:22:04.234711Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7509888291424444643:2470], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715669 completed, doublechecking } 2025-05-29T15:22:04.297237Z node 1 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [1:7509888291424444694:3399] txid# 281474976715670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-29T15:22:04.416681Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [1:7509888291424444710:2474], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:22:04.416800Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=1&id=OTQ5ZjE5YzYtMzc3Mzc3OGItN2RjNjYxMGYtNTJiZjhjMTk=, ActorId: [1:7509888291424443962:2401], ActorState: ExecuteState, TraceId: 01jwea5pj6cmq833ftpdf7eh02, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: VERIFY failed (2025-05-29T15:22:04.417309Z): assertion failed in non-unittest thread with message: assertion failed at ydb/core/kqp/ut/common/kqp_ut_common.h:375, void NKikimr::NKqp::AssertSuccessResult(const NYdb::TStatus &): (result.IsSuccess())
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 library/cpp/testing/unittest/registar.cpp:36 RaiseError(): requirement UnittestThread failed 0. /-S/util/system/yassert.cpp:83: InternalPanicImpl @ 0x13AC65E5 1. /-S/util/system/yassert.cpp:55: Panic @ 0x13ABD5E6 2. /tmp//-S/library/cpp/testing/unittest/registar.cpp:36: RaiseError @ 0x13C5F376 3. /-S/ydb/core/kqp/ut/common/kqp_ut_common.h:375: AssertSuccessResult @ 0x261025A2 4. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:365: CreateSampleTables @ 0x26101EA2 5. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:581: operator() @ 0x261237FC 6. /-S/library/cpp/threading/future/core/future-inl.h:493: SetValue @ 0x261237FC 7. /-S/library/cpp/threading/future/async.h:24: operator() @ 0x261237FC 8. /-S/util/thread/pool.h:71: Process @ 0x261237FC 9. /-S/util/thread/pool.cpp:418: DoExecute @ 0x13ACDF69 10. /-S/util/thread/factory.h:15: Execute @ 0x13ACC959 11. /-S/util/thread/factory.cpp:36: ThreadProc @ 0x13ACC959 12. /-S/util/system/thread.cpp:244: ThreadProxy @ 0x13AC7DCC 13. ??:0: ?? @ 0x7F3EA550EAC2 14. ??:0: ?? @ 0x7F3EA55A084F Trying to start YDB, gRPC: 30027, MsgBus: 19693 2025-05-29T15:22:08.808512Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509888308749159452:2061];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:22:08.808618Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/001089/r3tmp/tmpOskPDe/pdisk_1.dat 2025-05-29T15:22:08.858628Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [1:7509888308749159433:2079] 1748532128808383 != 1748532128808386 2025-05-29T15:22:08.865013Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 30027, node 1 2025-05-29T15:22:08.875166Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:22:08.875181Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-05-29T15:22:08.875184Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-05-29T15:22:08.875232Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:19693 TClient is connected to server localhost:19693 WaitRootIsUp 'Root'... TClient::Ls request: Root 2025-05-29T15:22:08.911379Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:22:08.911417Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting TClient::Ls response: 2025-05-29T15:22:08.914655Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-29T15:22:08.938344Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:22:08.943009Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-05-29T15:22:08.955389Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:22:09.024281Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:22:09.046311Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-05-29T15:22:09.057502Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 2025-05-29T15:22:09.261168Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509888313044128362:2403], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:22:09.261240Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:22:09.316386Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-05-29T15:22:09.324327Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-05-29T15:22:09.341718Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-05-29T15:22:09.353542Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-05-29T15:22:09.366667Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-05-29T15:22:09.380617Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-05-29T15:22:09.396866Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-05-29T15:22:09.410676Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509888313044129017:2466], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:22:09.410715Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509888313044129022:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:22:09.410714Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:22:09.411628Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715669:3, at schemeshard: 72057594046644480 2025-05-29T15:22:09.414932Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7509888313044129024:2470], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715669 completed, doublechecking } 2025-05-29T15:22:09.474205Z node 1 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [1:7509888313044129075:3397] txid# 281474976715670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-29T15:22:09.583016Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [1:7509888313044129091:2474], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:22:09.583147Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=1&id=ZGM3Mjg1MzUtMjE0NmVhM2MtMzNiNmMwNmEtODFjMWIzY2M=, ActorId: [1:7509888313044128359:2401], ActorState: ExecuteState, TraceId: 01jwea5vm26ev1y6jk9pya23vx, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: VERIFY failed (2025-05-29T15:22:09.583856Z): assertion failed in non-unittest thread with message: assertion failed at ydb/core/kqp/ut/common/kqp_ut_common.h:375, void NKikimr::NKqp::AssertSuccessResult(const NYdb::TStatus &): (result.IsSuccess())
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 library/cpp/testing/unittest/registar.cpp:36 RaiseError(): requirement UnittestThread failed 0. /-S/util/system/yassert.cpp:83: InternalPanicImpl @ 0x13AC65E5 1. /-S/util/system/yassert.cpp:55: Panic @ 0x13ABD5E6 2. /tmp//-S/library/cpp/testing/unittest/registar.cpp:36: RaiseError @ 0x13C5F376 3. /-S/ydb/core/kqp/ut/common/kqp_ut_common.h:375: AssertSuccessResult @ 0x261025A2 4. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:365: CreateSampleTables @ 0x26101EA2 5. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:581: operator() @ 0x261237FC 6. /-S/library/cpp/threading/future/core/future-inl.h:493: SetValue @ 0x261237FC 7. /-S/library/cpp/threading/future/async.h:24: operator() @ 0x261237FC 8. /-S/util/thread/pool.h:71: Process @ 0x261237FC 9. /-S/util/thread/pool.cpp:418: DoExecute @ 0x13ACDF69 10. /-S/util/thread/factory.h:15: Execute @ 0x13ACC959 11. /-S/util/thread/factory.cpp:36: ThreadProc @ 0x13ACC959 12. /-S/util/system/thread.cpp:244: ThreadProxy @ 0x13AC7DCC 13. ??:0: ?? @ 0x7F6B9A1C9AC2 14. ??:0: ?? @ 0x7F6B9A25B84F >> TPQTest::TestUserInfoCompatibility [GOOD] >> TPQTest::TestWaitInOwners >> TPartitionChooserSuite::TPartitionChooserActor_SplitMergeEnabled_SourceId_OldPartitionExists_NotBoundary_Test >> TPartitionTests::AfterRestart_1 [GOOD] >> TListAllTopicsTests::PlainList >> TPartitionChooserSuite::TPartitionChooserActor_SplitMergeEnabled_SourceId_PartitionActive_BoundaryTrue_Test [FAIL] >> TPartitionChooserSuite::TPartitionChooserActor_SplitMergeEnabled_SourceId_PartitionActive_BoundaryFalse_Test >> TPartitionTests::AfterRestart_2 |59.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/persqueue/ut/unittest >> TSourceIdTests::SourceIdMinSeqNo [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/perf/unittest >> KqpQueryPerf::Insert-QueryService+UseSink Test command err: Trying to start YDB, gRPC: 3364, MsgBus: 14037 2025-05-29T15:22:04.344748Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509888289234414052:2200];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:22:04.344836Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/00106e/r3tmp/tmpRWfnNv/pdisk_1.dat 2025-05-29T15:22:04.404507Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [1:7509888289234413891:2079] 1748532124343870 != 1748532124343873 2025-05-29T15:22:04.407452Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 3364, node 1 2025-05-29T15:22:04.417558Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:22:04.417571Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-05-29T15:22:04.417573Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-05-29T15:22:04.417621Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:14037 2025-05-29T15:22:04.446715Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:22:04.446762Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:22:04.447813Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:14037 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-29T15:22:04.468788Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:22:04.483689Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:22:04.507170Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:22:04.535491Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:22:04.546640Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:22:04.724873Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509888289234415531:2403], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:22:04.724897Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:22:04.769914Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-05-29T15:22:04.827850Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-05-29T15:22:04.837058Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-05-29T15:22:04.852000Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-05-29T15:22:04.866005Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-05-29T15:22:04.880363Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-05-29T15:22:04.900633Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-05-29T15:22:04.925642Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509888289234416183:2466], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:22:04.925679Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:22:04.925934Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509888289234416188:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:22:04.927287Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710669:3, at schemeshard: 72057594046644480 2025-05-29T15:22:04.935673Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7509888289234416190:2470], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710669 completed, doublechecking } 2025-05-29T15:22:04.998161Z node 1 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [1:7509888289234416241:3396] txid# 281474976710670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-29T15:22:05.094450Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [1:7509888289234416257:2474], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:22:05.094555Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=1&id=YWM4NWJhZGQtZWY3N2QzOGEtNTFlZDY0LTM1OWVmYTI=, ActorId: [1:7509888289234415513:2401], ActorState: ExecuteState, TraceId: 01jwea5q7w673sq70c4b3vy9r5, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: VERIFY failed (2025-05-29T15:22:05.095158Z): assertion failed in non-unittest thread with message: assertion failed at ydb/core/kqp/ut/common/kqp_ut_common.h:375, void NKikimr::NKqp::AssertSuccessResult(const NYdb::TStatus &): (result.IsSuccess())
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 library/cpp/testing/unittest/registar.cpp:36 RaiseError(): requirement UnittestThread failed 0. /-S/util/system/yassert.cpp:83: InternalPanicImpl @ 0x13AC65E5 1. /-S/util/system/yassert.cpp:55: Panic @ 0x13ABD5E6 2. /tmp//-S/library/cpp/testing/unittest/registar.cpp:36: RaiseError @ 0x13C5F376 3. /-S/ydb/core/kqp/ut/common/kqp_ut_common.h:375: AssertSuccessResult @ 0x261025A2 4. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:365: CreateSampleTables @ 0x26101EA2 5. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:581: operator() @ 0x261237FC 6. /-S/library/cpp/threading/future/core/future-inl.h:493: SetValue @ 0x261237FC 7. /-S/library/cpp/threading/future/async.h:24: operator() @ 0x261237FC 8. /-S/util/thread/pool.h:71: Process @ 0x261237FC 9. /-S/util/thread/pool.cpp:418: DoExecute @ 0x13ACDF69 10. /-S/util/thread/factory.h:15: Execute @ 0x13ACC959 11. /-S/util/thread/factory.cpp:36: ThreadProc @ 0x13ACC959 12. /-S/util/system/thread.cpp:244: ThreadProxy @ 0x13AC7DCC 13. ??:0: ?? @ 0x7FF3C6223AC2 14. ??:0: ?? @ 0x7FF3C62B584F Trying to start YDB, gRPC: 24594, MsgBus: 2555 2025-05-29T15:22:08.935451Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509888308596690483:2265];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:22:08.935479Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/00106e/r3tmp/tmpTqSQsd/pdisk_1.dat 2025-05-29T15:22:08.986787Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:22:08.988958Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [1:7509888308596690257:2079] 1748532128934677 != 1748532128934680 TServer::EnableGrpc on GrpcPort 24594, node 1 2025-05-29T15:22:09.002238Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:22:09.002250Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-05-29T15:22:09.002252Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-05-29T15:22:09.002297Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:2555 TClient is connected to server localhost:2555 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-29T15:22:09.063902Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:22:09.063933Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting waiting... 2025-05-29T15:22:09.064997Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-05-29T15:22:09.065007Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-29T15:22:09.069438Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:22:09.131831Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:22:09.158243Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:22:09.172202Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:22:09.271664Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509888312891659188:2403], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:22:09.271686Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:22:09.313019Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-05-29T15:22:09.320035Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-05-29T15:22:09.331262Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-05-29T15:22:09.345824Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-05-29T15:22:09.360368Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-05-29T15:22:09.374870Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-05-29T15:22:09.388602Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-05-29T15:22:09.405918Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509888312891659840:2466], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:22:09.405952Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509888312891659845:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:22:09.405955Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:22:09.406855Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715669:3, at schemeshard: 72057594046644480 2025-05-29T15:22:09.416285Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7509888312891659847:2470], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715669 completed, doublechecking } 2025-05-29T15:22:09.513317Z node 1 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [1:7509888312891659898:3396] txid# 281474976715670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-29T15:22:09.598416Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [1:7509888312891659914:2474], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:22:09.598520Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=1&id=OGUxMWU1ZWQtYTUwNTI0OTktYzI2YTRhNGEtZTNhMjQ3NjM=, ActorId: [1:7509888312891659162:2401], ActorState: ExecuteState, TraceId: 01jwea5vkx6gkbtmennr307m7p, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: VERIFY failed (2025-05-29T15:22:09.599308Z): assertion failed in non-unittest thread with message: assertion failed at ydb/core/kqp/ut/common/kqp_ut_common.h:375, void NKikimr::NKqp::AssertSuccessResult(const NYdb::TStatus &): (result.IsSuccess())
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 library/cpp/testing/unittest/registar.cpp:36 RaiseError(): requirement UnittestThread failed 0. /-S/util/system/yassert.cpp:83: InternalPanicImpl @ 0x13AC65E5 1. /-S/util/system/yassert.cpp:55: Panic @ 0x13ABD5E6 2. /tmp//-S/library/cpp/testing/unittest/registar.cpp:36: RaiseError @ 0x13C5F376 3. /-S/ydb/core/kqp/ut/common/kqp_ut_common.h:375: AssertSuccessResult @ 0x261025A2 4. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:365: CreateSampleTables @ 0x26101EA2 5. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:581: operator() @ 0x261237FC 6. /-S/library/cpp/threading/future/core/future-inl.h:493: SetValue @ 0x261237FC 7. /-S/library/cpp/threading/future/async.h:24: operator() @ 0x261237FC 8. /-S/util/thread/pool.h:71: Process @ 0x261237FC 9. /-S/util/thread/pool.cpp:418: DoExecute @ 0x13ACDF69 10. /-S/util/thread/factory.h:15: Execute @ 0x13ACC959 11. /-S/util/thread/factory.cpp:36: ThreadProc @ 0x13ACC959 12. /-S/util/system/thread.cpp:244: ThreadProxy @ 0x13AC7DCC 13. ??:0: ?? @ 0x7FE2222E6AC2 14. ??:0: ?? @ 0x7FE22237884F ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/perf/unittest >> KqpQueryPerf::MultiRead-QueryService Test command err: Trying to start YDB, gRPC: 31378, MsgBus: 11426 2025-05-29T15:22:08.926101Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509888309999404304:2062];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:22:08.926130Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/000ff3/r3tmp/tmpCRlpWV/pdisk_1.dat 2025-05-29T15:22:09.002237Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:22:09.002278Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [1:7509888309999404284:2079] 1748532128925927 != 1748532128925930 TServer::EnableGrpc on GrpcPort 31378, node 1 2025-05-29T15:22:09.017181Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:22:09.017197Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-05-29T15:22:09.017198Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-05-29T15:22:09.017229Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-05-29T15:22:09.028522Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:22:09.028549Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:22:09.029484Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:11426 TClient is connected to server localhost:11426 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-29T15:22:09.081120Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:22:09.134888Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:22:09.207889Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:22:09.269754Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:22:09.281177Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:22:09.337568Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509888314294373218:2403], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:22:09.337601Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:22:09.380212Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-05-29T15:22:09.388743Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-05-29T15:22:09.402728Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-05-29T15:22:09.459356Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-05-29T15:22:09.471512Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-05-29T15:22:09.485361Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-05-29T15:22:09.492672Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-05-29T15:22:09.508663Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509888314294373873:2466], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:22:09.508688Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:22:09.508712Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509888314294373878:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:22:09.509405Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715669:3, at schemeshard: 72057594046644480 2025-05-29T15:22:09.514969Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7509888314294373880:2470], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715669 completed, doublechecking } 2025-05-29T15:22:09.589161Z node 1 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [1:7509888314294373931:3397] txid# 281474976715670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-29T15:22:09.705148Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [1:7509888314294373947:2474], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:22:09.705276Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=1&id=YjE0YzQ5YTgtZGQxNjBkYTQtNzg2YWEzMGUtOWI5NzI1Y2Q=, ActorId: [1:7509888314294373200:2401], ActorState: ExecuteState, TraceId: 01jwea5vq42d3fvj8y3sk5s9yb, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: VERIFY failed (2025-05-29T15:22:09.705922Z): assertion failed in non-unittest thread with message: assertion failed at ydb/core/kqp/ut/common/kqp_ut_common.h:375, void NKikimr::NKqp::AssertSuccessResult(const NYdb::TStatus &): (result.IsSuccess())
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 library/cpp/testing/unittest/registar.cpp:36 RaiseError(): requirement UnittestThread failed 0. /-S/util/system/yassert.cpp:83: InternalPanicImpl @ 0x13AC65E5 1. /-S/util/system/yassert.cpp:55: Panic @ 0x13ABD5E6 2. /tmp//-S/library/cpp/testing/unittest/registar.cpp:36: RaiseError @ 0x13C5F376 3. /-S/ydb/core/kqp/ut/common/kqp_ut_common.h:375: AssertSuccessResult @ 0x261025A2 4. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:365: CreateSampleTables @ 0x26101EA2 5. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:581: operator() @ 0x261237FC 6. /-S/library/cpp/threading/future/core/future-inl.h:493: SetValue @ 0x261237FC 7. /-S/library/cpp/threading/future/async.h:24: operator() @ 0x261237FC 8. /-S/util/thread/pool.h:71: Process @ 0x261237FC 9. /-S/util/thread/pool.cpp:418: DoExecute @ 0x13ACDF69 10. /-S/util/thread/factory.h:15: Execute @ 0x13ACC959 11. /-S/util/thread/factory.cpp:36: ThreadProc @ 0x13ACC959 12. /-S/util/system/thread.cpp:244: ThreadProxy @ 0x13AC7DCC 13. ??:0: ?? @ 0x7F63198FAAC2 14. ??:0: ?? @ 0x7F631998C84F >> TPQTest::DirectReadBadSessionOrPipe ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/perf/unittest >> KqpQueryPerf::Upsert-QueryService+UseSink Test command err: Trying to start YDB, gRPC: 24521, MsgBus: 21216 2025-05-29T15:22:08.929979Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509888307531347949:2141];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/000fe6/r3tmp/tmpSGTTDS/pdisk_1.dat 2025-05-29T15:22:08.971272Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-05-29T15:22:08.990392Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:22:08.990508Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [1:7509888307531347845:2079] 1748532128926908 != 1748532128926911 TServer::EnableGrpc on GrpcPort 24521, node 1 2025-05-29T15:22:09.013789Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:22:09.013804Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-05-29T15:22:09.013805Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-05-29T15:22:09.013839Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:21216 TClient is connected to server localhost:21216 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-29T15:22:09.063182Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:22:09.063226Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:22:09.063907Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-05-29T15:22:09.064284Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected waiting... waiting... 2025-05-29T15:22:09.071238Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:22:09.135437Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 2025-05-29T15:22:09.159720Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:22:09.172450Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:22:09.393451Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509888311826316793:2403], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:22:09.393486Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:22:09.432869Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-05-29T15:22:09.439836Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-05-29T15:22:09.453991Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-05-29T15:22:09.464306Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-05-29T15:22:09.482447Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-05-29T15:22:09.492756Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-05-29T15:22:09.507111Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-05-29T15:22:09.522936Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509888311826317448:2466], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:22:09.522964Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:22:09.522966Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509888311826317453:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:22:09.523745Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715669:3, at schemeshard: 72057594046644480 2025-05-29T15:22:09.526315Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7509888311826317455:2470], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715669 completed, doublechecking } 2025-05-29T15:22:09.587752Z node 1 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [1:7509888311826317506:3398] txid# 281474976715670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-29T15:22:09.708064Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [1:7509888311826317522:2474], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:22:09.708611Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=1&id=OWE0ZGRhNTQtYmI3NTdjOTItODI4YTZmNmItMjRkMjQzMjk=, ActorId: [1:7509888311826316780:2399], ActorState: ExecuteState, TraceId: 01jwea5vqjagc0yzrpc7reg3kf, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: VERIFY failed (2025-05-29T15:22:09.709214Z): assertion failed in non-unittest thread with message: assertion failed at ydb/core/kqp/ut/common/kqp_ut_common.h:375, void NKikimr::NKqp::AssertSuccessResult(const NYdb::TStatus &): (result.IsSuccess())
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 library/cpp/testing/unittest/registar.cpp:36 RaiseError(): requirement UnittestThread failed 0. /-S/util/system/yassert.cpp:83: InternalPanicImpl @ 0x13AC65E5 1. /-S/util/system/yassert.cpp:55: Panic @ 0x13ABD5E6 2. /tmp//-S/library/cpp/testing/unittest/registar.cpp:36: RaiseError @ 0x13C5F376 3. /-S/ydb/core/kqp/ut/common/kqp_ut_common.h:375: AssertSuccessResult @ 0x261025A2 4. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:365: CreateSampleTables @ 0x26101EA2 5. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:581: operator() @ 0x261237FC 6. /-S/library/cpp/threading/future/core/future-inl.h:493: SetValue @ 0x261237FC 7. /-S/library/cpp/threading/future/async.h:24: operator() @ 0x261237FC 8. /-S/util/thread/pool.h:71: Process @ 0x261237FC 9. /-S/util/thread/pool.cpp:418: DoExecute @ 0x13ACDF69 10. /-S/util/thread/factory.h:15: Execute @ 0x13ACC959 11. /-S/util/thread/factory.cpp:36: ThreadProc @ 0x13ACC959 12. /-S/util/system/thread.cpp:244: ThreadProxy @ 0x13AC7DCC 13. ??:0: ?? @ 0x7FDDB1C91AC2 14. ??:0: ?? @ 0x7FDDB1D2384F >> TPQTabletTests::Single_PQTablet_And_Multiple_Partitions >> TPQTest::TestPQPartialRead >> KqpQueryPerf::Insert+QueryService+UseSink >> TMeteringSink::FlushThroughputV1 [GOOD] >> TMeteringSink::UsedStorageV1 [GOOD] >> TMicrosecondsSlidingWindow::Basic [GOOD] >> TMultiBucketCounter::InsertAndUpdate [GOOD] >> TMultiBucketCounter::ManyCounters [GOOD] >> TPQRBDescribes::PartitionLocations >> KqpQueryPerf::IndexDeleteOn-QueryService+UseSink >> TPQTabletTests::Single_PQTablet_And_Multiple_Partitions [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/perf/unittest >> KqpQueryPerf::UpdateOn-QueryService+UseSink Test command err: Trying to start YDB, gRPC: 17555, MsgBus: 64644 2025-05-29T15:22:08.795752Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509888306959862786:2061];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:22:08.795771Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/000ffe/r3tmp/tmpxVD7tk/pdisk_1.dat 2025-05-29T15:22:08.849945Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:22:08.849989Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [1:7509888306959862767:2079] 1748532128795617 != 1748532128795620 TServer::EnableGrpc on GrpcPort 17555, node 1 2025-05-29T15:22:08.861364Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:22:08.861382Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-05-29T15:22:08.861384Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-05-29T15:22:08.861438Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:64644 TClient is connected to server localhost:64644 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-29T15:22:08.928396Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:22:08.928431Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting waiting... 2025-05-29T15:22:08.929243Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-05-29T15:22:08.931004Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-29T15:22:08.951010Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:22:09.022232Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:22:09.083525Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:22:09.095700Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:22:09.152437Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509888311254831712:2403], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:22:09.152471Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:22:09.185054Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-05-29T15:22:09.194653Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-05-29T15:22:09.205574Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-05-29T15:22:09.220072Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-05-29T15:22:09.275101Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-05-29T15:22:09.290382Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-05-29T15:22:09.305227Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-05-29T15:22:09.319359Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509888311254832368:2466], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:22:09.319387Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509888311254832373:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:22:09.319392Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:22:09.320240Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715669:3, at schemeshard: 72057594046644480 2025-05-29T15:22:09.323652Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7509888311254832375:2470], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715669 completed, doublechecking } 2025-05-29T15:22:09.383509Z node 1 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [1:7509888311254832426:3399] txid# 281474976715670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-29T15:22:09.502485Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [1:7509888311254832435:2474], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:22:09.502628Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=1&id=NjYwYTdiNGUtNjE2Yzc0NmEtZjgyYzFiOWItZGUyNzVjMjc=, ActorId: [1:7509888311254831694:2401], ActorState: ExecuteState, TraceId: 01jwea5vh7271cjggfzamq32m9, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: VERIFY failed (2025-05-29T15:22:09.503249Z): assertion failed in non-unittest thread with message: assertion failed at ydb/core/kqp/ut/common/kqp_ut_common.h:375, void NKikimr::NKqp::AssertSuccessResult(const NYdb::TStatus &): (result.IsSuccess())
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 library/cpp/testing/unittest/registar.cpp:36 RaiseError(): requirement UnittestThread failed 0. /-S/util/system/yassert.cpp:83: InternalPanicImpl @ 0x13AC65E5 1. /-S/util/system/yassert.cpp:55: Panic @ 0x13ABD5E6 2. /tmp//-S/library/cpp/testing/unittest/registar.cpp:36: RaiseError @ 0x13C5F376 3. /-S/ydb/core/kqp/ut/common/kqp_ut_common.h:375: AssertSuccessResult @ 0x261025A2 4. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:365: CreateSampleTables @ 0x26101EA2 5. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:581: operator() @ 0x261237FC 6. /-S/library/cpp/threading/future/core/future-inl.h:493: SetValue @ 0x261237FC 7. /-S/library/cpp/threading/future/async.h:24: operator() @ 0x261237FC 8. /-S/util/thread/pool.h:71: Process @ 0x261237FC 9. /-S/util/thread/pool.cpp:418: DoExecute @ 0x13ACDF69 10. /-S/util/thread/factory.h:15: Execute @ 0x13ACC959 11. /-S/util/thread/factory.cpp:36: ThreadProc @ 0x13ACC959 12. /-S/util/system/thread.cpp:244: ThreadProxy @ 0x13AC7DCC 13. ??:0: ?? @ 0x7FC53A25AAC2 14. ??:0: ?? @ 0x7FC53A2EC84F >> TFetchRequestTests::HappyWay [FAIL] >> TFetchRequestTests::BadTopicName >> TPartitionTests::AfterRestart_2 [GOOD] >> KqpQueryPerf::IdxLookupJoinThreeWay-QueryService >> TPQTabletTests::Test_Waiting_For_TEvReadSet_When_The_Number_Of_Senders_And_Recipients_Match >> TPQTabletTests::UpdateConfig_1 >> TPQTabletTests::Test_Waiting_For_TEvReadSet_When_The_Number_Of_Senders_And_Recipients_Match [GOOD] >> TPQTest::TestSourceIdDropByUserWrites >> TSourceIdTests::SourceIdStorageParseAndAdd [GOOD] >> TSourceIdTests::SourceIdStorageMinDS [GOOD] >> TSourceIdTests::SourceIdStorageTestClean [GOOD] >> TSourceIdTests::SourceIdStorageDeleteByMaxCount [GOOD] >> TSourceIdTests::SourceIdStorageDeleteAndOwnersMark [GOOD] >> TPQTabletTests::TEvReadSet_comes_before_TEvPlanStep >> KqpQueryPerf::IndexUpsert+QueryService+UseSink >> TPQTabletTests::UpdateConfig_1 [GOOD] >> TPQTabletTests::TEvReadSet_comes_before_TEvPlanStep [GOOD] >> TPQTabletTests::UpdateConfig_2 >> TTypeCodecsTest::TestFixedLenCodec [GOOD] >> TTypeCodecsTest::TestVarLenCodec [GOOD] >> TTypeCodecsTest::TestVarIntCodec [GOOD] >> TTypeCodecsTest::TestZigZagCodec [GOOD] >> TTypeCodecsTest::TestDeltaZigZagCodec [GOOD] >> TPQTest::DirectReadBadSessionOrPipe [GOOD] >> TPQTest::DirectReadOldPipe ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/perf/unittest >> KqpWorkload::STOCK [FAIL] Test command err: Trying to start YDB, gRPC: 1839, MsgBus: 21472 2025-05-29T15:22:09.372335Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509888310197396537:2062];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:22:09.372360Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/000fb3/r3tmp/tmpArWWoI/pdisk_1.dat 2025-05-29T15:22:09.429306Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [1:7509888310197396517:2079] 1748532129372227 != 1748532129372230 2025-05-29T15:22:09.430453Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 1839, node 1 2025-05-29T15:22:09.442927Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:22:09.442941Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-05-29T15:22:09.442944Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-05-29T15:22:09.442997Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:21472 TClient is connected to server localhost:21472 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-29T15:22:09.508249Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:22:09.508274Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:22:09.509429Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-29T15:22:09.509879Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:22:09.515262Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-05-29T15:22:09.712407Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509888310197397178:2328], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:22:09.712436Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:22:09.747903Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-05-29T15:22:09.767253Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 2025-05-29T15:22:09.831115Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 2025-05-29T15:22:09.882456Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509888310197401036:2628], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:22:09.882490Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:22:09.882501Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509888310197401041:2631], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:22:09.883509Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715661:3, at schemeshard: 72057594046644480 2025-05-29T15:22:09.886268Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7509888310197401043:2632], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715661 completed, doublechecking } 2025-05-29T15:22:09.939244Z node 1 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [1:7509888310197401094:4826] txid# 281474976715662, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 10], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-29T15:22:09.967311Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [1:7509888310197401110:2636], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:22:09.967433Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=1&id=MzU3MjkwM2UtYzZmZmY5ODItYTYwMGRhMS1kZWU0NTRhYQ==, ActorId: [1:7509888310197397175:2326], ActorState: ExecuteState, TraceId: 01jwea5w2ta5pg7tgj4ahxpr7r, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: assertion failed at ydb/core/kqp/ut/perf/kqp_workload_ut.cpp:59, void NKikimr::NKqp::Test(const TString &): (result.IsSuccess())
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 0. /-S/util/system/backtrace.cpp:284: ?? @ 0x13AA7E1B 1. /tmp//-S/library/cpp/testing/unittest/registar.cpp:46: RaiseError @ 0x13C5F288 2. /tmp//-S/ydb/core/kqp/ut/perf/kqp_workload_ut.cpp:59: Test @ 0x139A82D0 3. /tmp//-S/ydb/core/kqp/ut/perf/kqp_workload_ut.cpp:88: Execute_ @ 0x139A9764 4. /tmp//-S/ydb/core/kqp/ut/perf/kqp_workload_ut.cpp:86: operator() @ 0x139AC866 5. /tmp//-S/library/cpp/testing/unittest/registar.cpp:373: Run @ 0x13C6113D 6. /tmp//-S/ydb/core/kqp/ut/perf/kqp_workload_ut.cpp:86: Execute @ 0x139AC22C 7. /tmp//-S/library/cpp/testing/unittest/registar.cpp:494: Execute @ 0x13C618B2 8. /tmp//-S/library/cpp/testing/unittest/utmain.cpp:872: RunMain @ 0x13C7345C 9. ??:0: ?? @ 0x7F1903931D8F 10. ??:0: ?? @ 0x7F1903931E3F 11. ??:0: ?? @ 0x129BC028 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/perf/unittest >> KqpQueryPerf::Replace-QueryService+UseSink Test command err: Trying to start YDB, gRPC: 15062, MsgBus: 6118 2025-05-29T15:22:09.736340Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509888313485454508:2064];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:22:09.736360Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/000f94/r3tmp/tmpL65saG/pdisk_1.dat 2025-05-29T15:22:09.799890Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [1:7509888313485454485:2079] 1748532129736140 != 1748532129736143 2025-05-29T15:22:09.803328Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 15062, node 1 2025-05-29T15:22:09.811746Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:22:09.811757Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-05-29T15:22:09.811759Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-05-29T15:22:09.811797Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:6118 2025-05-29T15:22:09.838362Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:22:09.838388Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:22:09.839438Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:6118 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-29T15:22:09.864124Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:22:09.888373Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:22:09.953608Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:22:09.974561Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:22:09.991715Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:22:10.074867Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509888317780423416:2403], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:22:10.074888Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:22:10.121909Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-05-29T15:22:10.132336Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-05-29T15:22:10.144087Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-05-29T15:22:10.158104Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-05-29T15:22:10.172102Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-05-29T15:22:10.186100Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-05-29T15:22:10.200040Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-05-29T15:22:10.216080Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509888317780424068:2466], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:22:10.216109Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:22:10.216204Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509888317780424073:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:22:10.217128Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715669:3, at schemeshard: 72057594046644480 2025-05-29T15:22:10.226858Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7509888317780424075:2470], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715669 completed, doublechecking } 2025-05-29T15:22:10.299976Z node 1 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [1:7509888317780424126:3396] txid# 281474976715670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-29T15:22:10.415925Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [1:7509888317780424142:2474], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:22:10.418383Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=1&id=YTZjNjg5OTQtNjJjZmIxZDEtYzMwZmQxOWItMjM0MDkxZDg=, ActorId: [1:7509888317780423398:2401], ActorState: ExecuteState, TraceId: 01jwea5wd71z3whmbnnac71py8, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: VERIFY failed (2025-05-29T15:22:10.421246Z): assertion failed in non-unittest thread with message: assertion failed at ydb/core/kqp/ut/common/kqp_ut_common.h:375, void NKikimr::NKqp::AssertSuccessResult(const NYdb::TStatus &): (result.IsSuccess())
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 library/cpp/testing/unittest/registar.cpp:36 RaiseError(): requirement UnittestThread failed 0. /-S/util/system/yassert.cpp:83: InternalPanicImpl @ 0x13AC65E5 1. /-S/util/system/yassert.cpp:55: Panic @ 0x13ABD5E6 2. /tmp//-S/library/cpp/testing/unittest/registar.cpp:36: RaiseError @ 0x13C5F376 3. /-S/ydb/core/kqp/ut/common/kqp_ut_common.h:375: AssertSuccessResult @ 0x261025A2 4. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:365: CreateSampleTables @ 0x26101EA2 5. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:581: operator() @ 0x261237FC 6. /-S/library/cpp/threading/future/core/future-inl.h:493: SetValue @ 0x261237FC 7. /-S/library/cpp/threading/future/async.h:24: operator() @ 0x261237FC 8. /-S/util/thread/pool.h:71: Process @ 0x261237FC 9. /-S/util/thread/pool.cpp:418: DoExecute @ 0x13ACDF69 10. /-S/util/thread/factory.h:15: Execute @ 0x13ACC959 11. /-S/util/thread/factory.cpp:36: ThreadProc @ 0x13ACC959 12. /-S/util/system/thread.cpp:244: ThreadProxy @ 0x13AC7DCC 13. ??:0: ?? @ 0x7F89C3117AC2 14. ??:0: ?? @ 0x7F89C31A984F |59.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/persqueue/ut/unittest >> TSourceIdTests::SourceIdStorageDeleteAndOwnersMark [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/perf/unittest >> KqpQueryPerf::KvRead-QueryService Test command err: Trying to start YDB, gRPC: 16785, MsgBus: 12807 2025-05-29T15:22:05.167200Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509888293069142845:2064];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:22:05.167233Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/001060/r3tmp/tmpfTo3IB/pdisk_1.dat 2025-05-29T15:22:05.221537Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:22:05.221677Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [1:7509888293069142820:2079] 1748532125167016 != 1748532125167019 TServer::EnableGrpc on GrpcPort 16785, node 1 2025-05-29T15:22:05.233227Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:22:05.233241Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-05-29T15:22:05.233243Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-05-29T15:22:05.233286Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:12807 2025-05-29T15:22:05.268628Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:22:05.268659Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:22:05.269728Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:12807 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-29T15:22:05.299651Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:22:05.304239Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:22:05.369485Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:22:05.386795Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:22:05.398684Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:22:05.603803Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509888293069144463:2403], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:22:05.603829Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:22:05.648032Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-05-29T15:22:05.659116Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-05-29T15:22:05.714125Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-05-29T15:22:05.726356Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-05-29T15:22:05.740896Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-05-29T15:22:05.754643Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-05-29T15:22:05.768889Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-05-29T15:22:05.791986Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509888293069145118:2466], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:22:05.792013Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:22:05.792056Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509888293069145123:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:22:05.792831Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715669:3, at schemeshard: 72057594046644480 2025-05-29T15:22:05.795794Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7509888293069145125:2470], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715669 completed, doublechecking } 2025-05-29T15:22:05.849236Z node 1 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [1:7509888293069145176:3397] txid# 281474976715670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-29T15:22:05.945308Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [1:7509888293069145192:2474], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:22:05.945454Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=1&id=YzM1NWFmNC0zNTllY2M1Zi04MDk4ZmU3My03MDhlNmE3, ActorId: [1:7509888293069144460:2401], ActorState: ExecuteState, TraceId: 01jwea5r2z6yd1pbbf8pnp8vsy, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: VERIFY failed (2025-05-29T15:22:05.946187Z): assertion failed in non-unittest thread with message: assertion failed at ydb/core/kqp/ut/common/kqp_ut_common.h:375, void NKikimr::NKqp::AssertSuccessResult(const NYdb::TStatus &): (result.IsSuccess())
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 library/cpp/testing/unittest/registar.cpp:36 RaiseError(): requirement UnittestThread failed 0. /-S/util/system/yassert.cpp:83: InternalPanicImpl @ 0x13AC65E5 1. /-S/util/system/yassert.cpp:55: Panic @ 0x13ABD5E6 2. /tmp//-S/library/cpp/testing/unittest/registar.cpp:36: RaiseError @ 0x13C5F376 3. /-S/ydb/core/kqp/ut/common/kqp_ut_common.h:375: AssertSuccessResult @ 0x261025A2 4. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:365: CreateSampleTables @ 0x26101EA2 5. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:581: operator() @ 0x261237FC 6. /-S/library/cpp/threading/future/core/future-inl.h:493: SetValue @ 0x261237FC 7. /-S/library/cpp/threading/future/async.h:24: operator() @ 0x261237FC 8. /-S/util/thread/pool.h:71: Process @ 0x261237FC 9. /-S/util/thread/pool.cpp:418: DoExecute @ 0x13ACDF69 10. /-S/util/thread/factory.h:15: Execute @ 0x13ACC959 11. /-S/util/thread/factory.cpp:36: ThreadProc @ 0x13ACC959 12. /-S/util/system/thread.cpp:244: ThreadProxy @ 0x13AC7DCC 13. ??:0: ?? @ 0x7F89357C6AC2 14. ??:0: ?? @ 0x7F893585884F Trying to start YDB, gRPC: 4130, MsgBus: 26754 2025-05-29T15:22:09.881775Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509888311098073254:2265];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:22:09.881824Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/001060/r3tmp/tmpPRsV65/pdisk_1.dat 2025-05-29T15:22:09.934320Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [1:7509888311098073028:2079] 1748532129880794 != 1748532129880797 2025-05-29T15:22:09.936348Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 4130, node 1 2025-05-29T15:22:09.944682Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:22:09.944697Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-05-29T15:22:09.944699Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-05-29T15:22:09.944746Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:26754 2025-05-29T15:22:09.983785Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:22:09.983824Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting TClient is connected to server localhost:26754 2025-05-29T15:22:09.984787Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-29T15:22:10.014067Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:22:10.016706Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:22:10.026813Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-05-29T15:22:10.092433Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:22:10.110059Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:22:10.124427Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:22:10.234298Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509888315393041957:2403], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:22:10.234325Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:22:10.284172Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-05-29T15:22:10.292212Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-05-29T15:22:10.304909Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-05-29T15:22:10.362773Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-05-29T15:22:10.373737Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-05-29T15:22:10.386013Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-05-29T15:22:10.400633Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-05-29T15:22:10.463815Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509888315393042615:2466], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:22:10.463845Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:22:10.463916Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509888315393042620:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:22:10.464827Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715669:3, at schemeshard: 72057594046644480 2025-05-29T15:22:10.467302Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7509888315393042622:2470], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715669 completed, doublechecking } 2025-05-29T15:22:10.519581Z node 1 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [1:7509888315393042673:3396] txid# 281474976715670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-29T15:22:10.605709Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [1:7509888315393042682:2474], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:22:10.605871Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=1&id=ODgxMjc0MTMtODY0YmE5NDEtMmVkNzgxNDgtMzgyMTY0MGI=, ActorId: [1:7509888315393041931:2401], ActorState: ExecuteState, TraceId: 01jwea5wmz0ra2rtryneqgfnew, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: VERIFY failed (2025-05-29T15:22:10.606477Z): assertion failed in non-unittest thread with message: assertion failed at ydb/core/kqp/ut/common/kqp_ut_common.h:375, void NKikimr::NKqp::AssertSuccessResult(const NYdb::TStatus &): (result.IsSuccess())
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 library/cpp/testing/unittest/registar.cpp:36 RaiseError(): requirement UnittestThread failed 0. /-S/util/system/yassert.cpp:83: InternalPanicImpl @ 0x13AC65E5 1. /-S/util/system/yassert.cpp:55: Panic @ 0x13ABD5E6 2. /tmp//-S/library/cpp/testing/unittest/registar.cpp:36: RaiseError @ 0x13C5F376 3. /-S/ydb/core/kqp/ut/common/kqp_ut_common.h:375: AssertSuccessResult @ 0x261025A2 4. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:365: CreateSampleTables @ 0x26101EA2 5. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:581: operator() @ 0x261237FC 6. /-S/library/cpp/threading/future/core/future-inl.h:493: SetValue @ 0x261237FC 7. /-S/library/cpp/threading/future/async.h:24: operator() @ 0x261237FC 8. /-S/util/thread/pool.h:71: Process @ 0x261237FC 9. /-S/util/thread/pool.cpp:418: DoExecute @ 0x13ACDF69 10. /-S/util/thread/factory.h:15: Execute @ 0x13ACC959 11. /-S/util/thread/factory.cpp:36: ThreadProc @ 0x13ACC959 12. /-S/util/system/thread.cpp:244: ThreadProxy @ 0x13AC7DCC 13. ??:0: ?? @ 0x7F902B69EAC2 14. ??:0: ?? @ 0x7F902B73084F >> TPQTabletTests::UpdateConfig_2 [GOOD] >> TListAllTopicsTests::PlainList [FAIL] >> TListAllTopicsTests::RecursiveList >> KqpOlap::SelectLimit1ManyShards [GOOD] >> KqpQueryPerf::DeleteOn+QueryService+UseSink >> TPQTabletTests::Read_TEvTxCommit_After_Restart >> TPartitionChooserSuite::TPartitionChooserActor_SplitMergeEnabled_SourceId_OldPartitionExists_NotBoundary_Test [FAIL] >> TPartitionChooserSuite::TPartitionChooserActor_SplitMergeEnabled_PreferedPartition_Active_Test >> TPQTabletTests::Read_TEvTxCommit_After_Restart [GOOD] >> TPQTabletTests::Test_Waiting_For_TEvReadSet_When_There_Are_More_Senders_Than_Recipients [GOOD] >> TPQTabletTests::Multiple_PQTablets_1 >> TPQTestInternal::TestBatchPacking [GOOD] >> TPQTestInternal::TestKeyRange [GOOD] >> TPQTestInternal::TestAsInt [GOOD] >> TPQTestInternal::TestAsIntWide [GOOD] >> TPQTestInternal::StoreKeys [GOOD] >> TPQTabletTests::DropTablet ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/perf/unittest >> KqpQueryPerf::UpdateOn-QueryService-UseSink Test command err: Trying to start YDB, gRPC: 16463, MsgBus: 9721 2025-05-29T15:22:10.292586Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509888315457445282:2061];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:22:10.292611Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/000f91/r3tmp/tmpNjhTAx/pdisk_1.dat 2025-05-29T15:22:10.367298Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:22:10.367447Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [1:7509888315457445263:2079] 1748532130292478 != 1748532130292481 TServer::EnableGrpc on GrpcPort 16463, node 1 2025-05-29T15:22:10.380257Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:22:10.380268Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-05-29T15:22:10.380270Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-05-29T15:22:10.380316Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:9721 TClient is connected to server localhost:9721 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: 2025-05-29T15:22:10.435816Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:22:10.435848Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:22:10.437629Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-29T15:22:10.446387Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:22:10.454006Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:22:10.520336Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:22:10.541787Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:22:10.556705Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:22:10.703107Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509888315457446916:2403], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:22:10.703134Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:22:10.736875Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-05-29T15:22:10.793869Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-05-29T15:22:10.802062Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-05-29T15:22:10.815817Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-05-29T15:22:10.870891Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-05-29T15:22:10.879932Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-05-29T15:22:10.893146Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-05-29T15:22:10.911446Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509888315457447574:2466], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:22:10.911484Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:22:10.911547Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509888315457447579:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:22:10.912393Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715669:3, at schemeshard: 72057594046644480 2025-05-29T15:22:10.919879Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7509888315457447581:2470], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715669 completed, doublechecking } 2025-05-29T15:22:11.002557Z node 1 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [1:7509888319752414929:3398] txid# 281474976715670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } VERIFY failed (2025-05-29T15:22:11.131214Z): assertion failed in non-unittest thread with message: assertion failed at ydb/core/kqp/ut/common/kqp_ut_common.h:375, void NKikimr::NKqp::AssertSuccessResult(const NYdb::TStatus &): (result.IsSuccess())
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 library/cpp/testing/unittest/registar.cpp:36 RaiseError(): requirement UnittestThread failed 2025-05-29T15:22:11.128124Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [1:7509888319752414945:2474], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:22:11.130435Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=1&id=OWU1NWZiMWMtNzkyMGVmZDgtYjk2NmRhYzktNGQ4Yzc1OWE=, ActorId: [1:7509888315457446898:2401], ActorState: ExecuteState, TraceId: 01jwea5x2y3hw8fqaap1jhe9eh, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: 0. /-S/util/system/yassert.cpp:83: InternalPanicImpl @ 0x13AC65E5 1. /-S/util/system/yassert.cpp:55: Panic @ 0x13ABD5E6 2. /tmp//-S/library/cpp/testing/unittest/registar.cpp:36: RaiseError @ 0x13C5F376 3. /-S/ydb/core/kqp/ut/common/kqp_ut_common.h:375: AssertSuccessResult @ 0x261025A2 4. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:365: CreateSampleTables @ 0x26101EA2 5. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:581: operator() @ 0x261237FC 6. /-S/library/cpp/threading/future/core/future-inl.h:493: SetValue @ 0x261237FC 7. /-S/library/cpp/threading/future/async.h:24: operator() @ 0x261237FC 8. /-S/util/thread/pool.h:71: Process @ 0x261237FC 9. /-S/util/thread/pool.cpp:418: DoExecute @ 0x13ACDF69 10. /-S/util/thread/factory.h:15: Execute @ 0x13ACC959 11. /-S/util/thread/factory.cpp:36: ThreadProc @ 0x13ACC959 12. /-S/util/system/thread.cpp:244: ThreadProxy @ 0x13AC7DCC 13. ??:0: ?? @ 0x7F575BE5FAC2 14. ??:0: ?? @ 0x7F575BEF184F ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/persqueue/ut/unittest >> TTypeCodecsTest::TestDeltaZigZagCodec [GOOD] Test command err: Size: 8002 Create chunk: 0.000017s Read by index: 0.000007s Iterate: 0.000006s Size: 8256 Create chunk: 0.000037s Read by index: 0.000010s Iterate: 0.000010s Size: 8532 Create chunk: 0.000017s Read by index: 0.000005s Iterate: 0.000004s Size: 7769 Create chunk: 0.000022s Read by index: 0.000006s Iterate: 0.000005s Size: 2853 Create chunk: 0.000012s Read by index: 0.000019s Iterate: 0.000007s Size: 2419 Create chunk: 0.000017s Read by index: 0.000019s Iterate: 0.000009s Size: 2929 Create chunk: 0.000011s Read by index: 0.000012s Iterate: 0.000006s Size: 2472 Create chunk: 0.000016s Read by index: 0.000016s Iterate: 0.000008s Size: 2407 Create chunk: 0.000034s Read by index: 0.000019s Iterate: 0.000011s Size: 2061 Create chunk: 0.000021s Read by index: 0.000022s Iterate: 0.000013s >> TPartitionChooserSuite::TPartitionChooserActor_SplitMergeEnabled_SourceId_PartitionActive_BoundaryFalse_Test [FAIL] >> TPartitionChooserSuite::TPartitionChooserActor_SplitMergeEnabled_SourceId_PartitionInactive_0_Test >> TPQTabletTests::TEvReadSet_Is_Not_Sent_Ahead_Of_Time >> TPQTestInternal::TestPartitionedBlobSimpleTest [GOOD] >> TPQTestInternal::TestPartitionedBigTest >> KqpQueryPerf::IndexDeleteOn+QueryService+UseSink >> TPQTabletTests::Multiple_PQTablets_1 [GOOD] >> TPQTabletTests::DropTablet [GOOD] >> TPQTabletTests::Test_Waiting_For_TEvReadSet_When_There_Are_Fewer_Senders_Than_Recipients >> TPQTabletTests::DropTablet_And_PlannedConfigTransaction ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/perf/unittest >> KqpQueryPerf::Update-QueryService-UseSink Test command err: Trying to start YDB, gRPC: 23946, MsgBus: 25340 2025-05-29T15:22:09.953195Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509888312633055785:2062];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:22:09.953212Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/000f92/r3tmp/tmpb8Y1NS/pdisk_1.dat 2025-05-29T15:22:10.019965Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [1:7509888312633055765:2079] 1748532129953073 != 1748532129953076 2025-05-29T15:22:10.022804Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 23946, node 1 2025-05-29T15:22:10.034154Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:22:10.034168Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-05-29T15:22:10.034169Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-05-29T15:22:10.034208Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:25340 TClient is connected to server localhost:25340 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-29T15:22:10.094841Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:22:10.094864Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:22:10.095920Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-29T15:22:10.096492Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:22:10.143577Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:22:10.211642Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:22:10.234305Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:22:10.248735Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:22:10.327539Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509888316928024712:2403], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:22:10.327565Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:22:10.384320Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-05-29T15:22:10.394098Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-05-29T15:22:10.404377Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-05-29T15:22:10.418254Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-05-29T15:22:10.472810Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-05-29T15:22:10.486427Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-05-29T15:22:10.500171Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-05-29T15:22:10.517075Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509888316928025365:2466], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:22:10.517101Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509888316928025370:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:22:10.517104Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:22:10.517876Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715669:3, at schemeshard: 72057594046644480 2025-05-29T15:22:10.520106Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7509888316928025372:2470], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715669 completed, doublechecking } 2025-05-29T15:22:10.586339Z node 1 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [1:7509888316928025423:3397] txid# 281474976715670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-29T15:22:10.731630Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [1:7509888316928025439:2474], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:22:10.731732Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=1&id=NDhjMWRkZjQtYmU3MDdmOWUtNDc3NzdiOTctYjZkMDVlZDc=, ActorId: [1:7509888316928024709:2401], ActorState: ExecuteState, TraceId: 01jwea5wpmby62swz7mn1pph90, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: VERIFY failed (2025-05-29T15:22:10.733640Z): assertion failed in non-unittest thread with message: assertion failed at ydb/core/kqp/ut/common/kqp_ut_common.h:375, void NKikimr::NKqp::AssertSuccessResult(const NYdb::TStatus &): (result.IsSuccess())
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 library/cpp/testing/unittest/registar.cpp:36 RaiseError(): requirement UnittestThread failed 0. /-S/util/system/yassert.cpp:83: InternalPanicImpl @ 0x13AC65E5 1. /-S/util/system/yassert.cpp:55: Panic @ 0x13ABD5E6 2. /tmp//-S/library/cpp/testing/unittest/registar.cpp:36: RaiseError @ 0x13C5F376 3. /-S/ydb/core/kqp/ut/common/kqp_ut_common.h:375: AssertSuccessResult @ 0x261025A2 4. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:365: CreateSampleTables @ 0x26101EA2 5. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:581: operator() @ 0x261237FC 6. /-S/library/cpp/threading/future/core/future-inl.h:493: SetValue @ 0x261237FC 7. /-S/library/cpp/threading/future/async.h:24: operator() @ 0x261237FC 8. /-S/util/thread/pool.h:71: Process @ 0x261237FC 9. /-S/util/thread/pool.cpp:418: DoExecute @ 0x13ACDF69 10. /-S/util/thread/factory.h:15: Execute @ 0x13ACC959 11. /-S/util/thread/factory.cpp:36: ThreadProc @ 0x13ACC959 12. /-S/util/system/thread.cpp:244: ThreadProxy @ 0x13AC7DCC 13. ??:0: ?? @ 0x7EFF79553AC2 14. ??:0: ?? @ 0x7EFF795E584F >> TPQTabletTests::Test_Waiting_For_TEvReadSet_When_There_Are_Fewer_Senders_Than_Recipients [GOOD] >> TPQTabletTests::DropTablet_And_Tx [GOOD] >> TPQTabletTests::DropTablet_And_PlannedConfigTransaction [GOOD] >> TPartitionTests::CorrectRange_Multiple_Transactions >> TPQRBDescribes::PartitionLocations [FAIL] >> TPQTabletTests::Test_Waiting_For_TEvReadSet_Without_Recipients >> TPQTestInternal::TestPartitionedBigTest [GOOD] >> TPQTestInternal::TestToHex [GOOD] >> TPQUserInfoTest::UserDataDeprecatedSerializaion [GOOD] >> TPQUtilsTest::TLastCounter [GOOD] >> TPQTest::DirectReadOldPipe [GOOD] >> TPQTest::TestAccountReadQuota ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/perf/unittest >> KqpQueryPerf::Replace-QueryService-UseSink Test command err: Trying to start YDB, gRPC: 1322, MsgBus: 7797 2025-05-29T15:22:10.184574Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509888314538295306:2064];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:22:10.184604Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/000e6a/r3tmp/tmpzR6tqe/pdisk_1.dat 2025-05-29T15:22:10.267958Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [1:7509888314538295283:2079] 1748532130184446 != 1748532130184449 2025-05-29T15:22:10.270076Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 1322, node 1 2025-05-29T15:22:10.276209Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:22:10.276221Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-05-29T15:22:10.276223Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-05-29T15:22:10.276259Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-05-29T15:22:10.287114Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:22:10.287153Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:22:10.288335Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:7797 TClient is connected to server localhost:7797 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-29T15:22:10.337535Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:22:10.340134Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:22:10.345727Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-05-29T15:22:10.420409Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:22:10.443966Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:22:10.454781Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:22:10.650507Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509888314538296916:2403], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:22:10.650538Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:22:10.702532Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-05-29T15:22:10.710613Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-05-29T15:22:10.724721Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-05-29T15:22:10.739284Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-05-29T15:22:10.794292Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-05-29T15:22:10.804602Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-05-29T15:22:10.815864Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-05-29T15:22:10.831757Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509888314538297572:2466], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:22:10.831791Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:22:10.831804Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509888314538297577:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:22:10.832581Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710669:3, at schemeshard: 72057594046644480 2025-05-29T15:22:10.835379Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7509888314538297579:2470], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710669 completed, doublechecking } 2025-05-29T15:22:10.933507Z node 1 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [1:7509888314538297630:3397] txid# 281474976710670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-29T15:22:11.060088Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [1:7509888314538297646:2474], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:22:11.060216Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=1&id=MjA4OTUyOTktYjhkM2IzOS1jYjE4MzlkMy0zMzc2Zjlh, ActorId: [1:7509888314538296913:2401], ActorState: ExecuteState, TraceId: 01jwea5x0fc98c7tbkmgxg30kw, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: VERIFY failed (2025-05-29T15:22:11.061148Z): assertion failed in non-unittest thread with message: assertion failed at ydb/core/kqp/ut/common/kqp_ut_common.h:375, void NKikimr::NKqp::AssertSuccessResult(const NYdb::TStatus &): (result.IsSuccess())
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 library/cpp/testing/unittest/registar.cpp:36 RaiseError(): requirement UnittestThread failed 0. /-S/util/system/yassert.cpp:83: InternalPanicImpl @ 0x13AC65E5 1. /-S/util/system/yassert.cpp:55: Panic @ 0x13ABD5E6 2. /tmp//-S/library/cpp/testing/unittest/registar.cpp:36: RaiseError @ 0x13C5F376 3. /-S/ydb/core/kqp/ut/common/kqp_ut_common.h:375: AssertSuccessResult @ 0x261025A2 4. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:365: CreateSampleTables @ 0x26101EA2 5. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:581: operator() @ 0x261237FC 6. /-S/library/cpp/threading/future/core/future-inl.h:493: SetValue @ 0x261237FC 7. /-S/library/cpp/threading/future/async.h:24: operator() @ 0x261237FC 8. /-S/util/thread/pool.h:71: Process @ 0x261237FC 9. /-S/util/thread/pool.cpp:418: DoExecute @ 0x13ACDF69 10. /-S/util/thread/factory.h:15: Execute @ 0x13ACC959 11. /-S/util/thread/factory.cpp:36: ThreadProc @ 0x13ACC959 12. /-S/util/system/thread.cpp:244: ThreadProxy @ 0x13AC7DCC 13. ??:0: ?? @ 0x7F6B8DEFAAC2 14. ??:0: ?? @ 0x7F6B8DF8C84F ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/perf/unittest >> KqpQueryPerf::IndexLookupJoin+EnableStreamLookup+QueryService Test command err: Trying to start YDB, gRPC: 16039, MsgBus: 17536 2025-05-29T15:22:05.651507Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509888296812750812:2061];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:22:05.651525Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/00105e/r3tmp/tmpj8UmC4/pdisk_1.dat 2025-05-29T15:22:05.717732Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [1:7509888296812750792:2079] 1748532125651195 != 1748532125651198 2025-05-29T15:22:05.718686Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 16039, node 1 2025-05-29T15:22:05.729946Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:22:05.729960Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-05-29T15:22:05.729961Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-05-29T15:22:05.730002Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:17536 2025-05-29T15:22:05.754218Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:22:05.754242Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:22:05.755352Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:17536 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-29T15:22:05.797391Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:22:05.812377Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:22:05.832396Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:22:05.852740Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:22:05.865204Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:22:06.035124Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509888301107719720:2403], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:22:06.035153Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:22:06.089095Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-05-29T15:22:06.096909Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-05-29T15:22:06.152692Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-05-29T15:22:06.161340Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-05-29T15:22:06.217133Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-05-29T15:22:06.231054Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-05-29T15:22:06.245079Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-05-29T15:22:06.261087Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509888301107720377:2466], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:22:06.261129Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:22:06.261137Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509888301107720382:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:22:06.261909Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715669:3, at schemeshard: 72057594046644480 2025-05-29T15:22:06.264458Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7509888301107720384:2470], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715669 completed, doublechecking } 2025-05-29T15:22:06.333342Z node 1 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [1:7509888301107720435:3397] txid# 281474976715670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-29T15:22:06.422590Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [1:7509888301107720451:2474], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:22:06.422679Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=1&id=MTRkOWYzYmEtZDgzODNiNy0xM2NiOWM5OC1hNGM4OWQ0Ng==, ActorId: [1:7509888301107719708:2400], ActorState: ExecuteState, TraceId: 01jwea5rhm4w9qwevm8ybkd0pz, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: VERIFY failed (2025-05-29T15:22:06.423551Z): assertion failed in non-unittest thread with message: assertion failed at ydb/core/kqp/ut/common/kqp_ut_common.h:375, void NKikimr::NKqp::AssertSuccessResult(const NYdb::TStatus &): (result.IsSuccess())
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 library/cpp/testing/unittest/registar.cpp:36 RaiseError(): requirement UnittestThread failed 0. /-S/util/system/yassert.cpp:83: InternalPanicImpl @ 0x13AC65E5 1. /-S/util/system/yassert.cpp:55: Panic @ 0x13ABD5E6 2. /tmp//-S/library/cpp/testing/unittest/registar.cpp:36: RaiseError @ 0x13C5F376 3. /-S/ydb/core/kqp/ut/common/kqp_ut_common.h:375: AssertSuccessResult @ 0x261025A2 4. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:365: CreateSampleTables @ 0x26101EA2 5. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:581: operator() @ 0x261237FC 6. /-S/library/cpp/threading/future/core/future-inl.h:493: SetValue @ 0x261237FC 7. /-S/library/cpp/threading/future/async.h:24: operator() @ 0x261237FC 8. /-S/util/thread/pool.h:71: Process @ 0x261237FC 9. /-S/util/thread/pool.cpp:418: DoExecute @ 0x13ACDF69 10. /-S/util/thread/factory.h:15: Execute @ 0x13ACC959 11. /-S/util/thread/factory.cpp:36: ThreadProc @ 0x13ACC959 12. /-S/util/system/thread.cpp:244: ThreadProxy @ 0x13AC7DCC 13. ??:0: ?? @ 0x7F5E3CF4DAC2 14. ??:0: ?? @ 0x7F5E3CFDF84F Trying to start YDB, gRPC: 10518, MsgBus: 5556 2025-05-29T15:22:10.174074Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509888315992611679:2061];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:22:10.174096Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/00105e/r3tmp/tmpsCUjO6/pdisk_1.dat 2025-05-29T15:22:10.233814Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:22:10.234254Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [1:7509888315992611660:2079] 1748532130173957 != 1748532130173960 TServer::EnableGrpc on GrpcPort 10518, node 1 2025-05-29T15:22:10.254570Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:22:10.254584Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-05-29T15:22:10.254585Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-05-29T15:22:10.254630Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:5556 TClient is connected to server localhost:5556 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: 2025-05-29T15:22:10.307604Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:22:10.307646Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:22:10.308734Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-29T15:22:10.319189Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:22:10.327298Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:22:10.345800Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:22:10.375539Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:22:10.396391Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:22:10.543391Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509888315992613294:2403], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:22:10.543417Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:22:10.588496Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-05-29T15:22:10.596669Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-05-29T15:22:10.605879Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-05-29T15:22:10.620073Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-05-29T15:22:10.633823Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-05-29T15:22:10.648171Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-05-29T15:22:10.662472Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-05-29T15:22:10.680467Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509888315992613948:2466], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:22:10.680505Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:22:10.680632Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509888315992613953:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:22:10.681637Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715669:3, at schemeshard: 72057594046644480 2025-05-29T15:22:10.690758Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7509888315992613955:2470], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715669 completed, doublechecking } 2025-05-29T15:22:10.773717Z node 1 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [1:7509888315992614006:3396] txid# 281474976715670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-29T15:22:10.888895Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [1:7509888315992614022:2474], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:22:10.889006Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=1&id=NDk5YzEwNmUtN2JiYWFiMjItMzA2ZjAyZi0zOTNkNWU1MA==, ActorId: [1:7509888315992613291:2401], ActorState: ExecuteState, TraceId: 01jwea5wvp8tdpc5we4z24z8ct, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: VERIFY failed (2025-05-29T15:22:10.889634Z): assertion failed in non-unittest thread with message: assertion failed at ydb/core/kqp/ut/common/kqp_ut_common.h:375, void NKikimr::NKqp::AssertSuccessResult(const NYdb::TStatus &): (result.IsSuccess())
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 library/cpp/testing/unittest/registar.cpp:36 RaiseError(): requirement UnittestThread failed 0. /-S/util/system/yassert.cpp:83: InternalPanicImpl @ 0x13AC65E5 1. /-S/util/system/yassert.cpp:55: Panic @ 0x13ABD5E6 2. /tmp//-S/library/cpp/testing/unittest/registar.cpp:36: RaiseError @ 0x13C5F376 3. /-S/ydb/core/kqp/ut/common/kqp_ut_common.h:375: AssertSuccessResult @ 0x261025A2 4. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:365: CreateSampleTables @ 0x26101EA2 5. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:581: operator() @ 0x261237FC 6. /-S/library/cpp/threading/future/core/future-inl.h:493: SetValue @ 0x261237FC 7. /-S/library/cpp/threading/future/async.h:24: operator() @ 0x261237FC 8. /-S/util/thread/pool.h:71: Process @ 0x261237FC 9. /-S/util/thread/pool.cpp:418: DoExecute @ 0x13ACDF69 10. /-S/util/thread/factory.h:15: Execute @ 0x13ACC959 11. /-S/util/thread/factory.cpp:36: ThreadProc @ 0x13ACC959 12. /-S/util/system/thread.cpp:244: ThreadProxy @ 0x13AC7DCC 13. ??:0: ?? @ 0x7F3A7C49BAC2 14. ??:0: ?? @ 0x7F3A7C52D84F |59.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/persqueue/ut/unittest >> TPQTestInternal::StoreKeys [GOOD] >> TFetchRequestTests::BadTopicName [FAIL] >> TFetchRequestTests::CheckAccess >> TPQTabletTests::Test_Waiting_For_TEvReadSet_Without_Recipients [GOOD] >> TPQTabletTests::DropTablet_Before_Write >> TPQTabletTests::Cancel_Tx >> TListAllTopicsTests::RecursiveList [FAIL] >> TListAllTopicsTests::ListLimitAndPaging >> TPQTabletTests::DropTablet_Before_Write [GOOD] >> TPQTabletTests::Cancel_Tx [GOOD] >> TPQTabletTests::Config_TEvTxCommit_After_Restart >> TPartitionTests::ConflictingActsInSeveralBatches [GOOD] >> TPQTabletTests::Test_Waiting_For_TEvReadSet_Without_Senders [GOOD] >> TPartitionTests::CorrectRange_Multiple_Transactions [GOOD] >> TPQTabletTests::TEvReadSet_Is_Not_Sent_Ahead_Of_Time [GOOD] >> TPartitionChooserSuite::TBoundaryChooserTest [GOOD] >> TPartitionChooserSuite::TBoundaryChooser_GetTabletIdTest [GOOD] >> TPartitionChooserSuite::THashChooserTest [GOOD] >> TPartitionChooserSuite::THashChooser_GetTabletIdTest [GOOD] >> TPartitionChooserSuite::TPartitionChooserActor_SplitMergeDisabled_BadSourceId_Test >> CacheEviction::DeleteKeys [GOOD] >> PQCountersLabeled::Partition >> TPQTabletTests::DropTablet_And_UnplannedConfigTransaction [GOOD] >> TPQTabletTests::Config_TEvTxCommit_After_Restart [GOOD] >> TPartitionTests::After_TEvGetWriteInfoError_Comes_TEvTxCalcPredicateResult [GOOD] |59.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/persqueue/ut/unittest >> TPQUtilsTest::TLastCounter [GOOD] >> TPQTest::TestDirectReadHappyWay >> TPQTest::TestPartitionTotalQuota >> TPartitionTests::CorrectRange_Rollback >> TPQTabletTests::TEvReadSet_For_A_Non_Existent_Tablet [GOOD] >> TPartitionTests::ConflictingSrcIdForTxInDifferentBatches >> TPQTabletTests::All_New_Partitions_In_Another_Tablet >> TPQTabletTests::Huge_ProposeTransacton >> TPartitionTests::UserActCount >> TPartitionChooserSuite::TPartitionChooserActor_SplitMergeEnabled_PreferedPartition_Active_Test [FAIL] >> TPartitionChooserSuite::TPartitionChooserActor_SplitMergeEnabled_PreferedPartition_InactiveConfig_Test >> TPQTabletTests::All_New_Partitions_In_Another_Tablet [GOOD] >> TPQTabletTests::After_Restarting_The_Tablet_Sends_A_TEvReadSet_For_Transactions_In_The_EXECUTED_State >> TPartitionChooserSuite::TPartitionChooserActor_SplitMergeEnabled_NewSourceId_Test >> TPartitionTests::CorrectRange_Rollback [GOOD] >> TPQTabletTests::After_Restarting_The_Tablet_Sends_A_TEvReadSet_For_Transactions_In_The_EXECUTED_State [GOOD] >> TPQTabletTests::Multiple_PQTablets_2 [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/persqueue/ut/unittest >> TPQTabletTests::Test_Waiting_For_TEvReadSet_Without_Senders [GOOD] Test command err: 2025-05-29T15:22:13.940374Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:3098: [PQ: 72057594037927937] Handle TEvInterconnect::TEvNodeInfo 2025-05-29T15:22:13.941288Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:3130: [PQ: 72057594037927937] Transactions request. From tx_00000000000000000000, To tx_18446744073709551615 2025-05-29T15:22:13.941352Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:751: [PQ: 72057594037927937] doesn't have tx info 2025-05-29T15:22:13.941365Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:763: [PQ: 72057594037927937] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2025-05-29T15:22:13.941370Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:975: [PQ: 72057594037927937] no config, start with empty partitions and default config 2025-05-29T15:22:13.941377Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:4889: [PQ: 72057594037927937] Txs.size=0, PlannedTxs.size=0 2025-05-29T15:22:13.941386Z node 1 :PERSQUEUE NOTICE: pq_impl.cpp:1099: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-05-29T15:22:13.941396Z node 1 :PERSQUEUE INFO: pq_impl.cpp:800: [PQ: 72057594037927937] doesn't have tx writes info 2025-05-29T15:22:13.945650Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:2882: [PQ: 72057594037927937] server connected, pipe [1:179:2193], now have 1 active actors on pipe 2025-05-29T15:22:13.945687Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:1460: [PQ: 72057594037927937] Handle TEvPersQueue::TEvUpdateConfig 2025-05-29T15:22:13.948527Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:1646: [PQ: 72057594037927937] Config update version 1(current 0) received from actor [1:178:2192] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 PartitionIds: 1 TopicName: "topic" Version: 1 LocalDC: true Topic: "topic" TopicPath: "/topic" YcCloudId: "somecloud" YcFolderId: "somefolder" YdbDatabaseId: "PQ" YdbDatabasePath: "/Root/PQ" Partitions { PartitionId: 0 } Partitions { PartitionId: 1 } ReadRuleGenerations: 1 FederationAccount: "federationAccount" MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } AllPartitions { PartitionId: 1 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 1 Important: false } 2025-05-29T15:22:13.949320Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:590: [PQ: 72057594037927937] Apply new config CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 PartitionIds: 1 TopicName: "topic" Version: 1 LocalDC: true Topic: "topic" TopicPath: "/topic" YcCloudId: "somecloud" YcFolderId: "somefolder" YdbDatabaseId: "PQ" YdbDatabasePath: "/Root/PQ" Partitions { PartitionId: 0 } Partitions { PartitionId: 1 } ReadRuleGenerations: 1 FederationAccount: "federationAccount" MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } AllPartitions { PartitionId: 1 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 1 Important: false } 2025-05-29T15:22:13.949346Z node 1 :PERSQUEUE NOTICE: pq_impl.cpp:1099: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-05-29T15:22:13.949600Z node 1 :PERSQUEUE INFO: pq_impl.cpp:1487: [PQ: 72057594037927937] Config applied version 1 actor [1:178:2192] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 PartitionIds: 1 TopicName: "topic" Version: 1 LocalDC: true Topic: "topic" TopicPath: "/topic" YcCloudId: "somecloud" YcFolderId: "somefolder" YdbDatabaseId: "PQ" YdbDatabasePath: "/Root/PQ" Partitions { PartitionId: 0 } Partitions { PartitionId: 1 } ReadRuleGenerations: 1 FederationAccount: "federationAccount" MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } AllPartitions { PartitionId: 1 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 1 Important: false } 2025-05-29T15:22:13.949642Z node 1 :PERSQUEUE DEBUG: partition_init.cpp:75: [topic:0:Initializer] Start initializing step TInitConfigStep 2025-05-29T15:22:13.949654Z node 1 :PERSQUEUE DEBUG: partition_init.cpp:75: [topic:1:Initializer] Start initializing step TInitConfigStep 2025-05-29T15:22:13.949736Z node 1 :PERSQUEUE DEBUG: partition_init.cpp:75: [topic:0:Initializer] Start initializing step TInitInternalFieldsStep 2025-05-29T15:22:13.949822Z node 1 :PERSQUEUE INFO: partition_init.cpp:880: [PQ: 72057594037927937, Partition: 0, State: StateInit] bootstrapping 0 [1:187:2199] 2025-05-29T15:22:13.950020Z node 1 :PERSQUEUE DEBUG: partition_init.cpp:55: [topic:0:Initializer] Initializing completed. 2025-05-29T15:22:13.950028Z node 1 :PERSQUEUE INFO: partition.cpp:560: [PQ: 72057594037927937, Partition: 0, State: StateInit] init complete for topic 'topic' partition 0 generation 2 [1:187:2199] 2025-05-29T15:22:13.950038Z node 1 :PERSQUEUE DEBUG: partition.cpp:574: [PQ: 72057594037927937, Partition: 0, State: StateInit] SYNC INIT topic topic partitition 0 so 0 endOffset 0 Head Offset 0 PartNo 0 PackedSize 0 count 0 nextOffset 0 batches 0 2025-05-29T15:22:13.950182Z node 1 :PERSQUEUE DEBUG: partition.cpp:3850: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Process pending events. Count 0 2025-05-29T15:22:13.950199Z node 1 :PERSQUEUE DEBUG: partition.cpp:3155: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'topic' partition 0 user user reinit request with generation 1 2025-05-29T15:22:13.950205Z node 1 :PERSQUEUE DEBUG: partition.cpp:3224: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'topic' partition 0 user user reinit with generation 1 done 2025-05-29T15:22:13.950231Z node 1 :PERSQUEUE DEBUG: partition.cpp:2185: [PQ: 72057594037927937, Partition: 0, State: StateIdle] === DumpKeyValueRequest === 2025-05-29T15:22:13.950235Z node 1 :PERSQUEUE DEBUG: partition.cpp:2186: [PQ: 72057594037927937, Partition: 0, State: StateIdle] --- delete ---------------- 2025-05-29T15:22:13.950240Z node 1 :PERSQUEUE DEBUG: partition.cpp:2194: [PQ: 72057594037927937, Partition: 0, State: StateIdle] --- write ----------------- 2025-05-29T15:22:13.950244Z node 1 :PERSQUEUE DEBUG: partition.cpp:2197: [PQ: 72057594037927937, Partition: 0, State: StateIdle] i0000000000 2025-05-29T15:22:13.950248Z node 1 :PERSQUEUE DEBUG: partition.cpp:2197: [PQ: 72057594037927937, Partition: 0, State: StateIdle] m0000000000cuser 2025-05-29T15:22:13.950252Z node 1 :PERSQUEUE DEBUG: partition.cpp:2197: [PQ: 72057594037927937, Partition: 0, State: StateIdle] m0000000000uuser 2025-05-29T15:22:13.950257Z node 1 :PERSQUEUE DEBUG: partition.cpp:2199: [PQ: 72057594037927937, Partition: 0, State: StateIdle] --- rename ---------------- 2025-05-29T15:22:13.950261Z node 1 :PERSQUEUE DEBUG: partition.cpp:2204: [PQ: 72057594037927937, Partition: 0, State: StateIdle] =========================== 2025-05-29T15:22:13.950283Z node 1 :PERSQUEUE DEBUG: partition_read.cpp:779: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'topic' partition 0 user user readTimeStamp for offset 0 initiated queuesize 0 startOffset 0 ReadingTimestamp 0 rrg 0 2025-05-29T15:22:13.950309Z node 1 :PERSQUEUE DEBUG: read.h:262: CacheProxy. Passthrough write request to KV 2025-05-29T15:22:13.950379Z node 1 :PERSQUEUE DEBUG: partition_init.cpp:75: [topic:1:Initializer] Start initializing step TInitInternalFieldsStep 2025-05-29T15:22:13.950408Z node 1 :PERSQUEUE INFO: partition_init.cpp:880: [PQ: 72057594037927937, Partition: 1, State: StateInit] bootstrapping 1 [1:189:2201] 2025-05-29T15:22:13.950553Z node 1 :PERSQUEUE DEBUG: partition_init.cpp:55: [topic:1:Initializer] Initializing completed. 2025-05-29T15:22:13.950559Z node 1 :PERSQUEUE INFO: partition.cpp:560: [PQ: 72057594037927937, Partition: 1, State: StateInit] init complete for topic 'topic' partition 1 generation 2 [1:189:2201] 2025-05-29T15:22:13.950565Z node 1 :PERSQUEUE DEBUG: partition.cpp:574: [PQ: 72057594037927937, Partition: 1, State: StateInit] SYNC INIT topic topic partitition 1 so 0 endOffset 0 Head Offset 0 PartNo 0 PackedSize 0 count 0 nextOffset 0 batches 0 2025-05-29T15:22:13.950614Z node 1 :PERSQUEUE DEBUG: partition.cpp:3850: [PQ: 72057594037927937, Partition: 1, State: StateIdle] Process pending events. Count 0 2025-05-29T15:22:13.950622Z node 1 :PERSQUEUE DEBUG: partition.cpp:3155: [PQ: 72057594037927937, Partition: 1, State: StateIdle] Topic 'topic' partition 1 user user reinit request with generation 1 2025-05-29T15:22:13.950627Z node 1 :PERSQUEUE DEBUG: partition.cpp:3224: [PQ: 72057594037927937, Partition: 1, State: StateIdle] Topic 'topic' partition 1 user user reinit with generation 1 done 2025-05-29T15:22:13.950639Z node 1 :PERSQUEUE DEBUG: partition.cpp:2185: [PQ: 72057594037927937, Partition: 1, State: StateIdle] === DumpKeyValueRequest === 2025-05-29T15:22:13.950643Z node 1 :PERSQUEUE DEBUG: partition.cpp:2186: [PQ: 72057594037927937, Partition: 1, State: StateIdle] --- delete ---------------- 2025-05-29T15:22:13.950647Z node 1 :PERSQUEUE DEBUG: partition.cpp:2194: [PQ: 72057594037927937, Partition: 1, State: StateIdle] --- write ----------------- 2025-05-29T15:22:13.950651Z node 1 :PERSQUEUE DEBUG: partition.cpp:2197: [PQ: 72057594037927937, Partition: 1, State: StateIdle] i0000000001 2025-05-29T15:22:13.950655Z node 1 :PERSQUEUE DEBUG: partition.cpp:2197: [PQ: 72057594037927937, Partition: 1, State: StateIdle] m0000000001cuser 2025-05-29T15:22:13.950659Z node 1 :PERSQUEUE DEBUG: partition.cpp:2197: [PQ: 72057594037927937, Partition: 1, State: StateIdle] m0000000001uuser 2025-05-29T15:22:13.950663Z node 1 :PERSQUEUE DEBUG: partition.cpp:2199: [PQ: 72057594037927937, Partition: 1, State: StateIdle] --- rename ---------------- 2025-05-29T15:22:13.950667Z node 1 :PERSQUEUE DEBUG: partition.cpp:2204: [PQ: 72057594037927937, Partition: 1, State: StateIdle] =========================== 2025-05-29T15:22:13.950674Z node 1 :PERSQUEUE DEBUG: partition_read.cpp:779: [PQ: 72057594037927937, Partition: 1, State: StateIdle] Topic 'topic' partition 1 user user readTimeStamp for offset 0 initiated queuesize 0 startOffset 0 ReadingTimestamp 0 rrg 0 2025-05-29T15:22:13.950703Z node 1 :PERSQUEUE DEBUG: read.h:262: CacheProxy. Passthrough write request to KV 2025-05-29T15:22:13.951619Z node 1 :PERSQUEUE DEBUG: partition_write.cpp:524: [PQ: 72057594037927937, Partition: 0, State: StateIdle] TPartition::HandleWriteResponse writeNewSize# 0 WriteNewSizeFromSupportivePartitions# 0 2025-05-29T15:22:13.951694Z node 1 :PERSQUEUE DEBUG: partition_write.cpp:524: [PQ: 72057594037927937, Partition: 1, State: StateIdle] TPartition::HandleWriteResponse writeNewSize# 0 WriteNewSizeFromSupportivePartitions# 0 2025-05-29T15:22:13.951761Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:2882: [PQ: 72057594037927937] server connected, pipe [1:202:2210], now have 1 active actors on pipe 2025-05-29T15:22:13.951877Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:2882: [PQ: 72057594037927937] server connected, pipe [1:205:2212], now have 1 active actors on pipe 2025-05-29T15:22:13.952095Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:3222: [PQ: 72057594037927937] Handle TEvPersQueue::TEvProposeTransaction SourceActor { RawX1: 178 RawX2: 4294969488 } TxId: 67890 Config { TabletConfig { PartitionConfig { LifetimeSeconds: 86400 WriteSpeedInBytesPerSecond: 10485760 } PartitionIds: 0 PartitionIds: 1 TopicName: "rt3.dc1--account--topic" Version: 2 LocalDC: true TopicPath: "/Root/PQ/rt3.dc1--account--topic" YdbDatabasePath: "" Partitions { PartitionId: 0 } Partitions { PartitionId: 1 } FederationAccount: "account" MeteringMode: METERING_MODE_REQUEST_UNITS AllPartitions { PartitionId: 0 } AllPartitions { Partitio ... node 6 :PERSQUEUE DEBUG: pq_impl.cpp:4293: [PQ: 72057594037927937] Try execute txs with state PREPARED 2025-05-29T15:22:15.158910Z node 6 :PERSQUEUE DEBUG: pq_impl.cpp:4338: [PQ: 72057594037927937] TxId 67890, State PREPARED 2025-05-29T15:22:15.158914Z node 6 :PERSQUEUE DEBUG: pq_impl.cpp:4228: [PQ: 72057594037927937] TxId 67890, NewState PLANNING 2025-05-29T15:22:15.158920Z node 6 :PERSQUEUE DEBUG: pq_impl.cpp:3798: [PQ: 72057594037927937] PlanStep 100, PlanTxId 67890 2025-05-29T15:22:15.158928Z node 6 :PERSQUEUE DEBUG: pq_impl.cpp:3818: [PQ: 72057594037927937] write key for TxId 67890 2025-05-29T15:22:15.158953Z node 6 :PERSQUEUE DEBUG: transaction.cpp:374: [TxId: 67890] save tx TxId: 67890 State: PLANNED MinStep: 137 MaxStep: 30137 PredicateRecipients: 33334 PredicateRecipients: 33333 Operations { PartitionId: 0 CommitOffsetsBegin: 0 CommitOffsetsEnd: 0 Consumer: "user" Path: "/topic" } Step: 100 Kind: KIND_DATA SourceActor { RawX1: 176 RawX2: 25769805966 } Partitions { } 2025-05-29T15:22:15.158965Z node 6 :PERSQUEUE DEBUG: pq_impl.cpp:3635: [PQ: 72057594037927937] Send TEvKeyValue::TEvRequest (WRITE_TX_COOKIE) 2025-05-29T15:22:15.159953Z node 6 :PERSQUEUE DEBUG: pq_impl.cpp:1231: [PQ: 72057594037927937] Handle TEvKeyValue::TEvResponse (WRITE_TX_COOKIE) 2025-05-29T15:22:15.159968Z node 6 :PERSQUEUE DEBUG: pq_impl.cpp:4293: [PQ: 72057594037927937] Try execute txs with state PLANNING 2025-05-29T15:22:15.159973Z node 6 :PERSQUEUE DEBUG: pq_impl.cpp:4338: [PQ: 72057594037927937] TxId 67890, State PLANNING 2025-05-29T15:22:15.159978Z node 6 :PERSQUEUE DEBUG: pq_impl.cpp:4228: [PQ: 72057594037927937] TxId 67890, NewState PLANNED 2025-05-29T15:22:15.159983Z node 6 :PERSQUEUE DEBUG: pq_impl.cpp:4263: [PQ: 72057594037927937] TxId 67890 moved from PLANNING to PLANNED 2025-05-29T15:22:15.159988Z node 6 :PERSQUEUE DEBUG: pq_impl.cpp:4406: [PQ: 72057594037927937] TxQueue.size 1 2025-05-29T15:22:15.159993Z node 6 :PERSQUEUE DEBUG: pq_impl.cpp:836: [PQ: 72057594037927937] New ExecStep 100, ExecTxId 67890 2025-05-29T15:22:15.160002Z node 6 :PERSQUEUE DEBUG: pq_impl.cpp:4228: [PQ: 72057594037927937] TxId 67890, NewState CALCULATING 2025-05-29T15:22:15.160006Z node 6 :PERSQUEUE DEBUG: pq_impl.cpp:4263: [PQ: 72057594037927937] TxId 67890 moved from PLANNED to CALCULATING 2025-05-29T15:22:15.160019Z node 6 :PERSQUEUE DEBUG: partition.cpp:1127: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Handle TEvPQ::TEvTxCalcPredicate Step 100, TxId 67890 2025-05-29T15:22:15.160057Z node 6 :PERSQUEUE DEBUG: pq_impl.cpp:3489: [PQ: 72057594037927937] Handle TEvPQ::TEvTxCalcPredicateResult Step 100, TxId 67890, Partition 0, Predicate 1 2025-05-29T15:22:15.160062Z node 6 :PERSQUEUE DEBUG: transaction.cpp:218: [TxId: 67890] Handle TEvTxCalcPredicateResult 2025-05-29T15:22:15.160068Z node 6 :PERSQUEUE DEBUG: transaction.cpp:267: [TxId: 67890] Partition responses 1/1 2025-05-29T15:22:15.160072Z node 6 :PERSQUEUE DEBUG: pq_impl.cpp:4293: [PQ: 72057594037927937] Try execute txs with state CALCULATING 2025-05-29T15:22:15.160077Z node 6 :PERSQUEUE DEBUG: pq_impl.cpp:4338: [PQ: 72057594037927937] TxId 67890, State CALCULATING 2025-05-29T15:22:15.160081Z node 6 :PERSQUEUE DEBUG: pq_impl.cpp:4285: [PQ: 72057594037927937] TxId 67890 State CALCULATING FrontTxId 67890 2025-05-29T15:22:15.160086Z node 6 :PERSQUEUE DEBUG: pq_impl.cpp:4422: [PQ: 72057594037927937] Received 1, Expected 1 2025-05-29T15:22:15.160093Z node 6 :PERSQUEUE DEBUG: pq_impl.cpp:4228: [PQ: 72057594037927937] TxId 67890, NewState CALCULATED 2025-05-29T15:22:15.160097Z node 6 :PERSQUEUE DEBUG: pq_impl.cpp:4263: [PQ: 72057594037927937] TxId 67890 moved from CALCULATING to CALCULATED 2025-05-29T15:22:15.160102Z node 6 :PERSQUEUE DEBUG: pq_impl.cpp:3818: [PQ: 72057594037927937] write key for TxId 67890 2025-05-29T15:22:15.160134Z node 6 :PERSQUEUE DEBUG: transaction.cpp:374: [TxId: 67890] save tx TxId: 67890 State: CALCULATED MinStep: 137 MaxStep: 30137 PredicateRecipients: 33334 PredicateRecipients: 33333 Operations { PartitionId: 0 CommitOffsetsBegin: 0 CommitOffsetsEnd: 0 Consumer: "user" Path: "/topic" } Step: 100 Predicate: true Kind: KIND_DATA SourceActor { RawX1: 176 RawX2: 25769805966 } Partitions { } 2025-05-29T15:22:15.160146Z node 6 :PERSQUEUE DEBUG: pq_impl.cpp:3635: [PQ: 72057594037927937] Send TEvKeyValue::TEvRequest (WRITE_TX_COOKIE) 2025-05-29T15:22:15.161869Z node 6 :PERSQUEUE DEBUG: pq_impl.cpp:1231: [PQ: 72057594037927937] Handle TEvKeyValue::TEvResponse (WRITE_TX_COOKIE) 2025-05-29T15:22:15.161886Z node 6 :PERSQUEUE DEBUG: pq_impl.cpp:4293: [PQ: 72057594037927937] Try execute txs with state CALCULATED 2025-05-29T15:22:15.161891Z node 6 :PERSQUEUE DEBUG: pq_impl.cpp:4338: [PQ: 72057594037927937] TxId 67890, State CALCULATED 2025-05-29T15:22:15.161896Z node 6 :PERSQUEUE DEBUG: pq_impl.cpp:4285: [PQ: 72057594037927937] TxId 67890 State CALCULATED FrontTxId 67890 2025-05-29T15:22:15.161901Z node 6 :PERSQUEUE DEBUG: pq_impl.cpp:4228: [PQ: 72057594037927937] TxId 67890, NewState WAIT_RS 2025-05-29T15:22:15.161907Z node 6 :PERSQUEUE DEBUG: pq_impl.cpp:4263: [PQ: 72057594037927937] TxId 67890 moved from CALCULATED to WAIT_RS 2025-05-29T15:22:15.161915Z node 6 :PERSQUEUE DEBUG: pq_impl.cpp:3970: [PQ: 72057594037927937] Send TEvTxProcessing::TEvReadSet to 2 receivers. Wait TEvTxProcessing::TEvReadSet from 0 senders. 2025-05-29T15:22:15.161919Z node 6 :PERSQUEUE DEBUG: pq_impl.cpp:3980: [PQ: 72057594037927937] Send TEvReadSet to tablet 33334 2025-05-29T15:22:15.161935Z node 6 :PERSQUEUE DEBUG: pq_impl.cpp:3980: [PQ: 72057594037927937] Send TEvReadSet to tablet 33333 2025-05-29T15:22:15.161940Z node 6 :PERSQUEUE DEBUG: pq_impl.cpp:4461: [PQ: 72057594037927937] HaveParticipantsDecision 1 2025-05-29T15:22:15.161947Z node 6 :PERSQUEUE DEBUG: pq_impl.cpp:4228: [PQ: 72057594037927937] TxId 67890, NewState EXECUTING 2025-05-29T15:22:15.161949Z node 6 :PERSQUEUE DEBUG: pq_impl.cpp:4263: [PQ: 72057594037927937] TxId 67890 moved from WAIT_RS to EXECUTING 2025-05-29T15:22:15.161952Z node 6 :PERSQUEUE DEBUG: pq_impl.cpp:4491: [PQ: 72057594037927937] Received 0, Expected 1 2025-05-29T15:22:15.161976Z node 6 :PERSQUEUE DEBUG: partition.cpp:1173: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Handle TEvPQ::TEvTxCommit Step 100, TxId 67890 2025-05-29T15:22:15.161983Z node 6 :PERSQUEUE DEBUG: partition.cpp:2424: [PQ: 72057594037927937, Partition: 0, State: StateIdle] TPartition::CommitWriteOperations TxId: 67890 2025-05-29T15:22:15.162002Z node 6 :PERSQUEUE DEBUG: partition.cpp:2185: [PQ: 72057594037927937, Partition: 0, State: StateIdle] === DumpKeyValueRequest === 2025-05-29T15:22:15.162005Z node 6 :PERSQUEUE DEBUG: partition.cpp:2186: [PQ: 72057594037927937, Partition: 0, State: StateIdle] --- delete ---------------- 2025-05-29T15:22:15.162008Z node 6 :PERSQUEUE DEBUG: partition.cpp:2194: [PQ: 72057594037927937, Partition: 0, State: StateIdle] --- write ----------------- 2025-05-29T15:22:15.162010Z node 6 :PERSQUEUE DEBUG: partition.cpp:2197: [PQ: 72057594037927937, Partition: 0, State: StateIdle] i0000000000 2025-05-29T15:22:15.162012Z node 6 :PERSQUEUE DEBUG: partition.cpp:2197: [PQ: 72057594037927937, Partition: 0, State: StateIdle] I0000000000 2025-05-29T15:22:15.162015Z node 6 :PERSQUEUE DEBUG: partition.cpp:2197: [PQ: 72057594037927937, Partition: 0, State: StateIdle] m0000000000cuser 2025-05-29T15:22:15.162017Z node 6 :PERSQUEUE DEBUG: partition.cpp:2197: [PQ: 72057594037927937, Partition: 0, State: StateIdle] m0000000000uuser 2025-05-29T15:22:15.162019Z node 6 :PERSQUEUE DEBUG: partition.cpp:2199: [PQ: 72057594037927937, Partition: 0, State: StateIdle] --- rename ---------------- 2025-05-29T15:22:15.162022Z node 6 :PERSQUEUE DEBUG: partition.cpp:2204: [PQ: 72057594037927937, Partition: 0, State: StateIdle] =========================== 2025-05-29T15:22:15.162035Z node 6 :PERSQUEUE DEBUG: read.h:262: CacheProxy. Passthrough write request to KV 2025-05-29T15:22:15.162436Z node 6 :PERSQUEUE DEBUG: pq_impl.cpp:2915: [PQ: 72057594037927937] Handle TEvTabletPipe::TEvClientConnected 2025-05-29T15:22:15.162445Z node 6 :PERSQUEUE DEBUG: pq_impl.cpp:2920: [PQ: 72057594037927937] Connected to tablet 33334 2025-05-29T15:22:15.162713Z node 6 :PERSQUEUE DEBUG: pq_impl.cpp:2915: [PQ: 72057594037927937] Handle TEvTabletPipe::TEvClientConnected 2025-05-29T15:22:15.162719Z node 6 :PERSQUEUE DEBUG: pq_impl.cpp:2920: [PQ: 72057594037927937] Connected to tablet 33333 2025-05-29T15:22:15.163007Z node 6 :PERSQUEUE DEBUG: partition_write.cpp:524: [PQ: 72057594037927937, Partition: 0, State: StateIdle] TPartition::HandleWriteResponse writeNewSize# 0 WriteNewSizeFromSupportivePartitions# 0 2025-05-29T15:22:15.163026Z node 6 :PERSQUEUE DEBUG: pq_impl.cpp:3535: [PQ: 72057594037927937] Handle TEvPQ::TEvTxCommitDone Step 100, TxId 67890, Partition 0 2025-05-29T15:22:15.163030Z node 6 :PERSQUEUE DEBUG: pq_impl.cpp:4293: [PQ: 72057594037927937] Try execute txs with state EXECUTING 2025-05-29T15:22:15.163033Z node 6 :PERSQUEUE DEBUG: pq_impl.cpp:4338: [PQ: 72057594037927937] TxId 67890, State EXECUTING 2025-05-29T15:22:15.163036Z node 6 :PERSQUEUE DEBUG: pq_impl.cpp:4285: [PQ: 72057594037927937] TxId 67890 State EXECUTING FrontTxId 67890 2025-05-29T15:22:15.163039Z node 6 :PERSQUEUE DEBUG: pq_impl.cpp:4491: [PQ: 72057594037927937] Received 1, Expected 1 2025-05-29T15:22:15.163044Z node 6 :PERSQUEUE DEBUG: pq_impl.cpp:4164: [PQ: 72057594037927937] TxId: 67890 send TEvPersQueue::TEvProposeTransactionResult(COMPLETE) 2025-05-29T15:22:15.163048Z node 6 :PERSQUEUE DEBUG: pq_impl.cpp:4495: [PQ: 72057594037927937] complete TxId 67890 2025-05-29T15:22:15.163051Z node 6 :PERSQUEUE DEBUG: pq_impl.cpp:4513: [PQ: 72057594037927937] delete partitions for TxId 67890 2025-05-29T15:22:15.163054Z node 6 :PERSQUEUE DEBUG: pq_impl.cpp:4228: [PQ: 72057594037927937] TxId 67890, NewState EXECUTED 2025-05-29T15:22:15.163057Z node 6 :PERSQUEUE DEBUG: pq_impl.cpp:4263: [PQ: 72057594037927937] TxId 67890 moved from EXECUTING to EXECUTED 2025-05-29T15:22:15.163060Z node 6 :PERSQUEUE DEBUG: pq_impl.cpp:3818: [PQ: 72057594037927937] write key for TxId 67890 2025-05-29T15:22:15.163094Z node 6 :PERSQUEUE DEBUG: transaction.cpp:374: [TxId: 67890] save tx TxId: 67890 State: EXECUTED MinStep: 137 MaxStep: 30137 PredicateRecipients: 33334 PredicateRecipients: 33333 Operations { PartitionId: 0 CommitOffsetsBegin: 0 CommitOffsetsEnd: 0 Consumer: "user" Path: "/topic" } Step: 100 Predicate: true Kind: KIND_DATA SourceActor { RawX1: 176 RawX2: 25769805966 } Partitions { } 2025-05-29T15:22:15.163104Z node 6 :PERSQUEUE DEBUG: pq_impl.cpp:3635: [PQ: 72057594037927937] Send TEvKeyValue::TEvRequest (WRITE_TX_COOKIE) 2025-05-29T15:22:15.163656Z node 6 :PERSQUEUE DEBUG: pq_impl.cpp:1231: [PQ: 72057594037927937] Handle TEvKeyValue::TEvResponse (WRITE_TX_COOKIE) 2025-05-29T15:22:15.163664Z node 6 :PERSQUEUE DEBUG: pq_impl.cpp:4293: [PQ: 72057594037927937] Try execute txs with state EXECUTED 2025-05-29T15:22:15.163666Z node 6 :PERSQUEUE DEBUG: pq_impl.cpp:4338: [PQ: 72057594037927937] TxId 67890, State EXECUTED 2025-05-29T15:22:15.163669Z node 6 :PERSQUEUE DEBUG: pq_impl.cpp:4285: [PQ: 72057594037927937] TxId 67890 State EXECUTED FrontTxId 67890 2025-05-29T15:22:15.163673Z node 6 :PERSQUEUE DEBUG: pq_impl.cpp:3989: [PQ: 72057594037927937] TPersQueue::SendEvReadSetAckToSenders 2025-05-29T15:22:15.163676Z node 6 :PERSQUEUE DEBUG: pq_impl.cpp:4228: [PQ: 72057594037927937] TxId 67890, NewState WAIT_RS_ACKS 2025-05-29T15:22:15.163678Z node 6 :PERSQUEUE DEBUG: pq_impl.cpp:4263: [PQ: 72057594037927937] TxId 67890 moved from EXECUTED to WAIT_RS_ACKS 2025-05-29T15:22:15.163682Z node 6 :PERSQUEUE DEBUG: transaction.cpp:366: [TxId: 67890] PredicateAcks: 0/2 2025-05-29T15:22:15.163684Z node 6 :PERSQUEUE DEBUG: pq_impl.cpp:4539: [PQ: 72057594037927937] HaveAllRecipientsReceive 0, AllSupportivePartitionsHaveBeenDeleted 1 2025-05-29T15:22:15.163686Z node 6 :PERSQUEUE DEBUG: transaction.cpp:366: [TxId: 67890] PredicateAcks: 0/2 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/persqueue/ut/unittest >> TPartitionTests::After_TEvGetWriteInfoError_Comes_TEvTxCalcPredicateResult [GOOD] Test command err: 2025-05-29T15:22:10.435949Z node 1 :PERSQUEUE NOTICE: pq_impl.cpp:1099: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-05-29T15:22:10.435978Z node 1 :PERSQUEUE INFO: pq_impl.cpp:800: [PQ: 72057594037927937] doesn't have tx writes info 2025-05-29T15:22:10.439910Z node 1 :PERSQUEUE INFO: partition_init.cpp:880: [PQ: 72057594037927937, Partition: 1, State: StateInit] bootstrapping 1 [1:180:2194] 2025-05-29T15:22:10.440121Z node 1 :PERSQUEUE INFO: partition.cpp:560: [PQ: 72057594037927937, Partition: 1, State: StateInit] init complete for topic 'Root/PQ/rt3.dc1--account--topic' partition 1 generation 0 [1:180:2194] Got cmd write: CmdWrite { Key: "i0000000001" Value: "\010\000\020\000\030\000(\000" StorageChannel: INLINE } CmdWrite { Key: "m0000000001cclient-1" Value: "\010\000\020\002\030\003\"\014session-id-1(\0000\001@\000" StorageChannel: INLINE } CmdWrite { Key: "m0000000001uclient-1" Value: "\000\000\000\000\000\000\000\000\002\000\000\000\003\000\000\000session-id-1" StorageChannel: INLINE } Got cmd write: CmdWrite { Key: "i0000000001" Value: "\010\000\020\000\030\000(\000" StorageChannel: INLINE } CmdWrite { Key: "m0000000001cclient-2" Value: "\010\000\020\004\030\005\"\014session-id-2(\0000\003@\000" StorageChannel: INLINE } CmdWrite { Key: "m0000000001uclient-2" Value: "\000\000\000\000\000\000\000\000\004\000\000\000\005\000\000\000session-id-2" StorageChannel: INLINE } CmdWrite { Key: "m0000000001cclient-3" Value: "\010\000\020\006\030\007\"\014session-id-3(\0000\004@\000" StorageChannel: INLINE } CmdWrite { Key: "m0000000001uclient-3" Value: "\000\000\000\000\000\000\000\000\006\000\000\000\007\000\000\000session-id-3" StorageChannel: INLINE } Got cmd write: CmdWrite { Key: "i0000000001" Value: "\010\000\020\000\030\000(\000" StorageChannel: INLINE } CmdWrite { Key: "m0000000001cclient-1" Value: "\010\000\020\010\030\t\"\014session-id-2(\0000\001@\000" StorageChannel: INLINE } CmdWrite { Key: "m0000000001uclient-1" Value: "\000\000\000\000\000\000\000\000\010\000\000\000\t\000\000\000session-id-2" StorageChannel: INLINE } 2025-05-29T15:22:10.674648Z node 2 :PERSQUEUE NOTICE: pq_impl.cpp:1099: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-05-29T15:22:10.674673Z node 2 :PERSQUEUE INFO: pq_impl.cpp:800: [PQ: 72057594037927937] doesn't have tx writes info 2025-05-29T15:22:10.678201Z node 2 :PERSQUEUE INFO: partition_init.cpp:880: [PQ: 72057594037927937, Partition: 0, State: StateInit] bootstrapping 0 [2:180:2194] 2025-05-29T15:22:10.678491Z node 2 :PERSQUEUE INFO: partition_init.cpp:785: [Root/PQ/rt3.dc1--account--topic:0:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp from keys completed. Value 2025-05-29T15:22:10.000000Z 2025-05-29T15:22:10.678500Z node 2 :PERSQUEUE INFO: partition.cpp:560: [PQ: 72057594037927937, Partition: 0, State: StateInit] init complete for topic 'Root/PQ/rt3.dc1--account--topic' partition 0 generation 0 [2:180:2194] Got cmd write: CmdWrite { Key: "i0000000000" Value: "\010\000\020\n\030\000(\320\341\213\345\3612" StorageChannel: INLINE } CmdWrite { Key: "m0000000000cclient" Value: "\010\000\020\001\030\001\"\007session(\0000\001@\000" StorageChannel: INLINE } CmdWrite { Key: "m0000000000uclient" Value: "\000\000\000\000\000\000\000\000\001\000\000\000\001\000\000\000session" StorageChannel: INLINE } Got cmd write: CmdWrite { Key: "i0000000000" Value: "\010\000\020\n\030\000(\320\341\213\345\3612" StorageChannel: INLINE } CmdWrite { Key: "m0000000000cclient" Value: "\010\002\020\001\030\001\"\007session(\0000\001@\000" StorageChannel: INLINE } CmdWrite { Key: "m0000000000uclient" Value: "\002\000\000\000\000\000\000\000\001\000\000\000\001\000\000\000session" StorageChannel: INLINE } Got cmd write: CmdWrite { Key: "i0000000000" Value: "\010\000\020\n\030\000(\320\341\213\345\3612" StorageChannel: INLINE } CmdWrite { Key: "m0000000000cclient" Value: "\010\004\020\001\030\001\"\007session(\0000\001@\000" StorageChannel: INLINE } CmdWrite { Key: "m0000000000uclient" Value: "\004\000\000\000\000\000\000\000\001\000\000\000\001\000\000\000session" StorageChannel: INLINE } 2025-05-29T15:22:11.322731Z node 3 :PERSQUEUE NOTICE: pq_impl.cpp:1099: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-05-29T15:22:11.322793Z node 3 :PERSQUEUE INFO: pq_impl.cpp:800: [PQ: 72057594037927937] doesn't have tx writes info 2025-05-29T15:22:11.325473Z node 3 :PERSQUEUE INFO: partition_init.cpp:880: [PQ: 72057594037927937, Partition: 3, State: StateInit] bootstrapping 3 [3:180:2194] 2025-05-29T15:22:11.325899Z node 3 :PERSQUEUE INFO: partition_init.cpp:785: [Root/PQ/rt3.dc1--account--topic:3:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp from keys completed. Value 2025-05-29T15:22:11.000000Z 2025-05-29T15:22:11.325913Z node 3 :PERSQUEUE INFO: partition.cpp:560: [PQ: 72057594037927937, Partition: 3, State: StateInit] init complete for topic 'Root/PQ/rt3.dc1--account--topic' partition 3 generation 0 [3:180:2194] Send change config Wait cmd write (initial) Got cmd write: CmdWrite { Key: "i0000000003" Value: "\010\000\020\n\030\000(\270\351\213\345\3612" StorageChannel: INLINE } CmdWrite { Key: "I0000000003" Value: "\010\271`\020\262\222\004" StorageChannel: INLINE } CmdWrite { Key: "m0000000003cclient-1" Value: "\010\002\020\000\030\000\"\tsession-1(\0000\000@\001" StorageChannel: INLINE } CmdWrite { Key: "m0000000003uclient-1" Value: "\002\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000session-1" StorageChannel: INLINE } Wait commit 1 done Wait cmd write (change config) Got cmd write: CmdDeleteRange { Range { From: "m0000000003cclient-2" IncludeFrom: true To: "m0000000003cclient-2" IncludeTo: true } } CmdDeleteRange { Range { From: "m0000000003uclient-2" IncludeFrom: true To: "m0000000003uclient-2" IncludeTo: true } } CmdWrite { Key: "i0000000003" Value: "\010\000\020\n\030\000(\270\351\213\345\3612" StorageChannel: INLINE } CmdWrite { Key: "m0000000003cclient-1" Value: "\010\002\020\000\030\000\"\tsession-1(\0000\000@\000" StorageChannel: INLINE } CmdWrite { Key: "m0000000003uclient-1" Value: "\002\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000session-1" StorageChannel: INLINE } CmdWrite { Key: "m0000000003cclient-3" Value: "\010\000\020\000\030\000\"\000(\0000\007@\000" StorageChannel: INLINE } CmdWrite { Key: "m0000000003uclient-3" Value: "\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000" StorageChannel: INLINE } CmdWrite { Key: "_config_3" Value: "\022\t\030\200\243\0058\200\200\200\005\030\000\"\027rt3.dc1--account--topic(\0020\001\272\001 /Root/PQ/rt3.dc1--account--topic\352\001\000\372\001\002\010\000\212\002\007account\220\002\001\242\002\002\010\000\252\002\016\n\010client-1@\000H\000\252\002\016\n\010client-3@\007H\000" StorageChannel: INLINE } Wait config changed 2025-05-29T15:22:11.967614Z node 4 :PERSQUEUE NOTICE: pq_impl.cpp:1099: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-05-29T15:22:11.967644Z node 4 :PERSQUEUE INFO: pq_impl.cpp:800: [PQ: 72057594037927937] doesn't have tx writes info 2025-05-29T15:22:11.971124Z node 4 :PERSQUEUE DEBUG: partition_init.cpp:75: [Root/PQ/rt3.dc1--account--topic:0:Initializer] Start initializing step TInitConfigStep Got KV request 2025-05-29T15:22:11.971176Z node 4 :PERSQUEUE DEBUG: partition_init.cpp:75: [Root/PQ/rt3.dc1--account--topic:0:Initializer] Start initializing step TInitInternalFieldsStep 2025-05-29T15:22:11.971222Z node 4 :PERSQUEUE INFO: partition_init.cpp:880: [PQ: 72057594037927937, Partition: 0, State: StateInit] bootstrapping 0 [4:179:2193] 2025-05-29T15:22:11.971427Z node 4 :PERSQUEUE DEBUG: partition_init.cpp:75: [Root/PQ/rt3.dc1--account--topic:0:Initializer] Start initializing step TInitDiskStatusStep Got KV request 2025-05-29T15:22:11.971454Z node 4 :PERSQUEUE DEBUG: partition_init.cpp:75: [Root/PQ/rt3.dc1--account--topic:0:Initializer] Start initializing step TInitMetaStep Got KV request 2025-05-29T15:22:11.971473Z node 4 :PERSQUEUE DEBUG: partition_init.cpp:75: [Root/PQ/rt3.dc1--account--topic:0:Initializer] Start initializing step TInitInfoRangeStep Got KV request Got KV request 2025-05-29T15:22:11.971493Z node 4 :PERSQUEUE DEBUG: partition_init.cpp:75: [Root/PQ/rt3.dc1--account--topic:0:Initializer] Start initializing step TInitDataRangeStep Got KV request 2025-05-29T15:22:11.971532Z node 4 :PERSQUEUE DEBUG: partition_init.cpp:621: [Root/PQ/rt3.dc1--account--topic:0:TInitDataRangeStep] Got data offset 0 count 1 size 684 so 0 eo 1 d0000000000_00000000000000000000_00000_0000000001_00000 2025-05-29T15:22:11.971543Z node 4 :PERSQUEUE DEBUG: partition_init.cpp:75: [Root/PQ/rt3.dc1--account--topic:0:Initializer] Start initializing step TInitDataStep 2025-05-29T15:22:11.971548Z node 4 :PERSQUEUE DEBUG: partition_init.cpp:75: [Root/PQ/rt3.dc1--account--topic:0:Initializer] Start initializing step TInitEndWriteTimestampStep 2025-05-29T15:22:11.971554Z node 4 :PERSQUEUE INFO: partition_init.cpp:785: [Root/PQ/rt3.dc1--account--topic:0:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp from keys completed. Value 2025-05-29T15:22:11.000000Z 2025-05-29T15:22:11.971558Z node 4 :PERSQUEUE DEBUG: partition_init.cpp:55: [Root/PQ/rt3.dc1--account--topic:0:Initializer] Initializing completed. 2025-05-29T15:22:11.971564Z node 4 :PERSQUEUE INFO: partition.cpp:560: [PQ: 72057594037927937, Partition: 0, State: StateInit] init complete for topic 'Root/PQ/rt3.dc1--account--topic' partition 0 generation 0 [4:179:2193] 2025-05-29T15:22:11.971577Z node 4 :PERSQUEUE DEBUG: partition.cpp:574: [PQ: 72057594037927937, Partition: 0, State: StateInit] SYNC INIT topic Root/PQ/rt3.dc1--account--topic partitition 0 so 0 endOffset 1 Head Offset 1 PartNo 0 PackedSize 0 count 0 nextOffset 1 batches 0 SYNC INIT DATA KEY: d0000000000_00000000000000000000_00000_0000000001_00000 size 684 2025-05-29T15:22:11.971585Z node 4 :PERSQUEUE DEBUG: partition.cpp:3850: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Process pending events. Count 0 2025-05-29T15:22:12.308787Z node 4 :PERSQUEUE INFO: ownerinfo.cpp:30: new Cookie src1|a4b7a87c-2cfefeaf-3e364897-c54c3347_0 generated for partition 0 topic 'Root/PQ/rt3.dc1--account--topic' owner src1 2025-05-29T15:22:12.308848Z node 4 :PERSQUEUE DEBUG: partition_write.cpp:35: [PQ: 72057594037927937, Partition: 0, State: StateIdle] TPartition::ReplyOwnerOk. Partition: 0 Got batch complete: 1 2025-05-29T15:22:12.308890Z node 4 :PERSQUEUE INFO: ownerinfo.cpp:30: new Cookie src4|5172b65-2d37cf84-c427f565-d199a829_0 generated for partition 0 topic 'Root/PQ/rt3.dc1--account--topic' owner src4 2025-05-29T15:22:12.308900Z node 4 :PERSQUEUE DEBUG: partition_write.cpp:35: [PQ: 72057594037927937, Partition: 0, State: StateIdle] TPartition::ReplyOwnerOk. Partition: 0 Got batch complete: 1 Create distr tx with id = 0 and act no: 1 Create distr tx with id = 2 and act no: 3 Create distr tx with id = 4 and act no: 5 Create distr tx with id = 8 and act no: 9 Create immediate tx with id = 11 and act no: 12 2025-05-29T15:22:13.299882Z node 4 :PERSQUEUE DEBUG: partition.cpp:1127: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Handle TEvPQ::TEvTxCalcPredicate Step 1, TxId 0 2025-05-29T15:22:13.299907Z node 4 :PERSQUEUE DEBUG: partition.cpp:1127: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Handle TEvPQ::TEvTxCalcPredicate Step 1, TxId 2 2025-05-29T15:22:13.299917Z node 4 :PERSQUEUE DEBUG: partition.cpp:1127: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Handle TEvPQ::TEvTxCalcPredicate Step 1, TxId 4 2025-05-29T15:22:13.299947Z node 4 :PERSQUEUE DEBUG: partition.cpp:1127: [P ... ssing sourceId 'src4' seqNo 7 partNo 0 2025-05-29T15:22:15.086365Z node 4 :PERSQUEUE DEBUG: partition_write.cpp:1295: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'Root/PQ/rt3.dc1--account--topic' partition 0 part blob sourceId 'src4' seqNo 7 partNo 0 result is x0000000000_00000000000000000070_00000_0000000002_00000 size 121 2025-05-29T15:22:15.086375Z node 4 :PERSQUEUE DEBUG: partition_write.cpp:1049: [PQ: 72057594037927937, Partition: 0, State: StateIdle] writing blob: topic 'Root/PQ/rt3.dc1--account--topic' partition 0 old key x0000000000_00000000000000000070_00000_0000000002_00000 new key d0000000000_00000000000000000070_00000_0000000002_00000 size 121 WTime 12141 2025-05-29T15:22:15.086404Z node 4 :PERSQUEUE DEBUG: partition_write.cpp:1333: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'Root/PQ/rt3.dc1--account--topic' partition 0 part blob complete sourceId 'src4' seqNo 7 partNo 0 FormedBlobsCount 1 NewHead: Offset 100 PartNo 0 PackedSize 84 count 1 nextOffset 101 batches 1 2025-05-29T15:22:15.086412Z node 4 :PERSQUEUE DEBUG: partition_write.cpp:1233: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'Root/PQ/rt3.dc1--account--topic' partition 0 part blob processing sourceId 'src4' seqNo 8 partNo 0 2025-05-29T15:22:15.086419Z node 4 :PERSQUEUE DEBUG: partition_write.cpp:1333: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'Root/PQ/rt3.dc1--account--topic' partition 0 part blob complete sourceId 'src4' seqNo 8 partNo 0 FormedBlobsCount 0 NewHead: Offset 100 PartNo 0 PackedSize 136 count 2 nextOffset 102 batches 1 2025-05-29T15:22:15.086425Z node 4 :PERSQUEUE DEBUG: partition_write.cpp:1233: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'Root/PQ/rt3.dc1--account--topic' partition 0 part blob processing sourceId 'src4' seqNo 9 partNo 0 2025-05-29T15:22:15.086432Z node 4 :PERSQUEUE DEBUG: partition_write.cpp:1333: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'Root/PQ/rt3.dc1--account--topic' partition 0 part blob complete sourceId 'src4' seqNo 9 partNo 0 FormedBlobsCount 0 NewHead: Offset 100 PartNo 0 PackedSize 188 count 3 nextOffset 103 batches 1 2025-05-29T15:22:15.086438Z node 4 :PERSQUEUE DEBUG: partition_write.cpp:1233: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'Root/PQ/rt3.dc1--account--topic' partition 0 part blob processing sourceId 'src4' seqNo 10 partNo 0 2025-05-29T15:22:15.086444Z node 4 :PERSQUEUE DEBUG: partition_write.cpp:1333: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'Root/PQ/rt3.dc1--account--topic' partition 0 part blob complete sourceId 'src4' seqNo 10 partNo 0 FormedBlobsCount 0 NewHead: Offset 100 PartNo 0 PackedSize 240 count 4 nextOffset 104 batches 1 2025-05-29T15:22:15.086450Z node 4 :PERSQUEUE DEBUG: partition_write.cpp:1233: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'Root/PQ/rt3.dc1--account--topic' partition 0 part blob processing sourceId 'src4' seqNo 11 partNo 0 2025-05-29T15:22:15.086458Z node 4 :PERSQUEUE DEBUG: partition_write.cpp:1333: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'Root/PQ/rt3.dc1--account--topic' partition 0 part blob complete sourceId 'src4' seqNo 11 partNo 0 FormedBlobsCount 0 NewHead: Offset 100 PartNo 0 PackedSize 292 count 5 nextOffset 105 batches 1 2025-05-29T15:22:15.086465Z node 4 :PERSQUEUE DEBUG: partition_write.cpp:1233: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'Root/PQ/rt3.dc1--account--topic' partition 0 part blob processing sourceId 'src4' seqNo 12 partNo 0 2025-05-29T15:22:15.086472Z node 4 :PERSQUEUE DEBUG: partition_write.cpp:1333: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'Root/PQ/rt3.dc1--account--topic' partition 0 part blob complete sourceId 'src4' seqNo 12 partNo 0 FormedBlobsCount 0 NewHead: Offset 100 PartNo 0 PackedSize 344 count 6 nextOffset 106 batches 1 2025-05-29T15:22:15.086478Z node 4 :PERSQUEUE DEBUG: partition.cpp:2424: [PQ: 72057594037927937, Partition: 0, State: StateIdle] TPartition::CommitWriteOperations TxId: (empty maybe) 2025-05-29T15:22:15.086483Z node 4 :PERSQUEUE DEBUG: partition.cpp:2451: [PQ: 72057594037927937, Partition: 0, State: StateIdle] t.WriteInfo->BodyKeys.size=0, t.WriteInfo->BlobsFromHead.size=0 2025-05-29T15:22:15.086488Z node 4 :PERSQUEUE DEBUG: partition.cpp:2452: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Head=Offset 70 PartNo 0 PackedSize 121 count 2 nextOffset 72 batches 1, NewHead=Offset 100 PartNo 0 PackedSize 344 count 6 nextOffset 106 batches 1 2025-05-29T15:22:15.089389Z node 4 :PERSQUEUE DEBUG: partition.cpp:3322: [PQ: 72057594037927937, Partition: 0, State: StateIdle] schedule TEvPersQueue::TEvProposeTransactionResult(COMPLETE), reason= 2025-05-29T15:22:15.089484Z node 4 :PERSQUEUE DEBUG: partition_write.cpp:1623: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Add new write blob: topic 'Root/PQ/rt3.dc1--account--topic' partition 0 compactOffset 100,6 HeadOffset 70 endOffset 72 curOffset 106 d0000000000_00000000000000000100_00000_0000000006_00000| size 211 WTime 12141 2025-05-29T15:22:15.089514Z node 4 :PERSQUEUE DEBUG: partition.cpp:2185: [PQ: 72057594037927937, Partition: 0, State: StateIdle] === DumpKeyValueRequest === 2025-05-29T15:22:15.089519Z node 4 :PERSQUEUE DEBUG: partition.cpp:2186: [PQ: 72057594037927937, Partition: 0, State: StateIdle] --- delete ---------------- 2025-05-29T15:22:15.089524Z node 4 :PERSQUEUE DEBUG: partition.cpp:2192: [PQ: 72057594037927937, Partition: 0, State: StateIdle] [x0000000000, x0000000001) 2025-05-29T15:22:15.089529Z node 4 :PERSQUEUE DEBUG: partition.cpp:2194: [PQ: 72057594037927937, Partition: 0, State: StateIdle] --- write ----------------- 2025-05-29T15:22:15.089533Z node 4 :PERSQUEUE DEBUG: partition.cpp:2197: [PQ: 72057594037927937, Partition: 0, State: StateIdle] d0000000000_00000000000000000070_00000_0000000002_00000 2025-05-29T15:22:15.089537Z node 4 :PERSQUEUE DEBUG: partition.cpp:2197: [PQ: 72057594037927937, Partition: 0, State: StateIdle] m0000000000psrc4 2025-05-29T15:22:15.089542Z node 4 :PERSQUEUE DEBUG: partition.cpp:2197: [PQ: 72057594037927937, Partition: 0, State: StateIdle] d0000000000_00000000000000000100_00000_0000000006_00000| 2025-05-29T15:22:15.089546Z node 4 :PERSQUEUE DEBUG: partition.cpp:2197: [PQ: 72057594037927937, Partition: 0, State: StateIdle] i0000000000 2025-05-29T15:22:15.089565Z node 4 :PERSQUEUE DEBUG: partition.cpp:2197: [PQ: 72057594037927937, Partition: 0, State: StateIdle] i0000000000 2025-05-29T15:22:15.089569Z node 4 :PERSQUEUE DEBUG: partition.cpp:2197: [PQ: 72057594037927937, Partition: 0, State: StateIdle] I0000000000 2025-05-29T15:22:15.089573Z node 4 :PERSQUEUE DEBUG: partition.cpp:2199: [PQ: 72057594037927937, Partition: 0, State: StateIdle] --- rename ---------------- 2025-05-29T15:22:15.089578Z node 4 :PERSQUEUE DEBUG: partition.cpp:2204: [PQ: 72057594037927937, Partition: 0, State: StateIdle] =========================== Got KV request Got batch complete: 7 Got KV request Got KV request Wait kv request Wait tx committed for tx 8 2025-05-29T15:22:15.120120Z node 4 :PERSQUEUE DEBUG: partition_write.cpp:524: [PQ: 72057594037927937, Partition: 0, State: StateIdle] TPartition::HandleWriteResponse writeNewSize# 102 WriteNewSizeFromSupportivePartitions# 2 2025-05-29T15:22:15.120142Z node 4 :PERSQUEUE DEBUG: partition_write.cpp:58: [PQ: 72057594037927937, Partition: 0, State: StateIdle] TPartition::ReplyWrite. Partition: 0 2025-05-29T15:22:15.120156Z node 4 :PERSQUEUE DEBUG: partition_write.cpp:324: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Answering for message sourceid: 'src4', Topic: 'Root/PQ/rt3.dc1--account--topic', Partition: 0, SeqNo: 7, partNo: 0, Offset: 72 is already written 2025-05-29T15:22:15.120161Z node 4 :PERSQUEUE DEBUG: partition_write.cpp:58: [PQ: 72057594037927937, Partition: 0, State: StateIdle] TPartition::ReplyWrite. Partition: 0 2025-05-29T15:22:15.120166Z node 4 :PERSQUEUE DEBUG: partition_write.cpp:324: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Answering for message sourceid: 'src4', Topic: 'Root/PQ/rt3.dc1--account--topic', Partition: 0, SeqNo: 8, partNo: 0, Offset: 72 is already written 2025-05-29T15:22:15.120168Z node 4 :PERSQUEUE DEBUG: partition_write.cpp:58: [PQ: 72057594037927937, Partition: 0, State: StateIdle] TPartition::ReplyWrite. Partition: 0 2025-05-29T15:22:15.120173Z node 4 :PERSQUEUE DEBUG: partition_write.cpp:324: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Answering for message sourceid: 'src4', Topic: 'Root/PQ/rt3.dc1--account--topic', Partition: 0, SeqNo: 9, partNo: 0, Offset: 72 is already written 2025-05-29T15:22:15.120176Z node 4 :PERSQUEUE DEBUG: partition_write.cpp:58: [PQ: 72057594037927937, Partition: 0, State: StateIdle] TPartition::ReplyWrite. Partition: 0 2025-05-29T15:22:15.120179Z node 4 :PERSQUEUE DEBUG: partition_write.cpp:324: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Answering for message sourceid: 'src4', Topic: 'Root/PQ/rt3.dc1--account--topic', Partition: 0, SeqNo: 10, partNo: 0, Offset: 72 is already written 2025-05-29T15:22:15.120182Z node 4 :PERSQUEUE DEBUG: partition_write.cpp:58: [PQ: 72057594037927937, Partition: 0, State: StateIdle] TPartition::ReplyWrite. Partition: 0 2025-05-29T15:22:15.120186Z node 4 :PERSQUEUE DEBUG: partition_write.cpp:324: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Answering for message sourceid: 'src4', Topic: 'Root/PQ/rt3.dc1--account--topic', Partition: 0, SeqNo: 11, partNo: 0, Offset: 72 is already written 2025-05-29T15:22:15.120189Z node 4 :PERSQUEUE DEBUG: partition_write.cpp:58: [PQ: 72057594037927937, Partition: 0, State: StateIdle] TPartition::ReplyWrite. Partition: 0 2025-05-29T15:22:15.120193Z node 4 :PERSQUEUE DEBUG: partition_write.cpp:324: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Answering for message sourceid: 'src4', Topic: 'Root/PQ/rt3.dc1--account--topic', Partition: 0, SeqNo: 12, partNo: 0, Offset: 72 is already written 2025-05-29T15:22:15.120236Z node 4 :PERSQUEUE DEBUG: partition.cpp:2185: [PQ: 72057594037927937, Partition: 0, State: StateIdle] === DumpKeyValueRequest === 2025-05-29T15:22:15.120239Z node 4 :PERSQUEUE DEBUG: partition.cpp:2186: [PQ: 72057594037927937, Partition: 0, State: StateIdle] --- delete ---------------- 2025-05-29T15:22:15.120243Z node 4 :PERSQUEUE DEBUG: partition.cpp:2192: [PQ: 72057594037927937, Partition: 0, State: StateIdle] [d0000000000_00000000000000000070_00000_0000000002_00000|, d0000000000_00000000000000000070_00000_0000000002_00000|] 2025-05-29T15:22:15.120246Z node 4 :PERSQUEUE DEBUG: partition.cpp:2194: [PQ: 72057594037927937, Partition: 0, State: StateIdle] --- write ----------------- 2025-05-29T15:22:15.120249Z node 4 :PERSQUEUE DEBUG: partition.cpp:2197: [PQ: 72057594037927937, Partition: 0, State: StateIdle] i0000000000 2025-05-29T15:22:15.120251Z node 4 :PERSQUEUE DEBUG: partition.cpp:2199: [PQ: 72057594037927937, Partition: 0, State: StateIdle] --- rename ---------------- 2025-05-29T15:22:15.120254Z node 4 :PERSQUEUE DEBUG: partition.cpp:2204: [PQ: 72057594037927937, Partition: 0, State: StateIdle] =========================== Got KV request Wait immediate tx complete 11 Got propose resutl: Origin: 72057594037927937 Status: COMPLETE TxId: 11 2025-05-29T15:22:15.240337Z node 5 :PERSQUEUE NOTICE: pq_impl.cpp:1099: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-05-29T15:22:15.240360Z node 5 :PERSQUEUE INFO: pq_impl.cpp:800: [PQ: 72057594037927937] doesn't have tx writes info 2025-05-29T15:22:15.243819Z node 5 :PERSQUEUE INFO: partition_init.cpp:880: [PQ: 72057594037927937, Partition: 1, State: StateInit] bootstrapping 1 [5:178:2192] 2025-05-29T15:22:15.244016Z node 5 :PERSQUEUE INFO: partition.cpp:560: [PQ: 72057594037927937, Partition: 1, State: StateInit] init complete for topic 'Root/PQ/rt3.dc1--account--topic' partition 1 generation 0 [5:178:2192] >> TPQTest::TestCmdReadWithLastOffset >> TListAllTopicsTests::ListLimitAndPaging [FAIL] >> TMeteringSink::FlushPutEventsV1 [GOOD] >> TMeteringSink::FlushResourcesReservedV1 [GOOD] >> TMeteringSink::FlushStorageV1 [GOOD] >> TPartitionTests::DataTxCalcPredicateOk >> TPartitionChooserSuite::TPartitionChooserActor_SplitMergeEnabled_SourceId_PartitionInactive_0_Test [FAIL] >> TPartitionChooserSuite::TPartitionChooserActor_SplitMergeEnabled_SourceId_OldPartitionExists_Test >> TPartitionTests::DifferentWriteTxBatchingOptions ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/olap/unittest >> KqpOlap::SelectLimit1ManyShards [GOOD] Test command err: Trying to start YDB, gRPC: 22809, MsgBus: 1536 test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/0026ae/r3tmp/tmpZOd6zX/pdisk_1.dat 2025-05-29T15:21:34.268544Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509888161195291771:2208];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:21:34.270680Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-05-29T15:21:34.327916Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [1:7509888161195291589:2079] 1748532094193145 != 1748532094193148 2025-05-29T15:21:34.336175Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 22809, node 1 2025-05-29T15:21:34.358903Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:21:34.358912Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-05-29T15:21:34.358915Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-05-29T15:21:34.358952Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:1536 2025-05-29T15:21:34.374483Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:21:34.374508Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:21:34.379180Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:1536 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-29T15:21:34.499490Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:21:34.507027Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-05-29T15:21:34.551736Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnStore, opId: 281474976710658:0, at schemeshard: 72057594046644480 Status: 53 TxId: 281474976710658 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 2 2025-05-29T15:21:34.582554Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;self_id=[1:7509888161195292287:2314];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-05-29T15:21:34.582636Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;self_id=[1:7509888161195292287:2314];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-05-29T15:21:34.582683Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;self_id=[1:7509888161195292287:2314];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-05-29T15:21:34.582701Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;self_id=[1:7509888161195292287:2314];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-05-29T15:21:34.582724Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;self_id=[1:7509888161195292287:2314];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-05-29T15:21:34.588369Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;self_id=[1:7509888161195292287:2314];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-05-29T15:21:34.588413Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;self_id=[1:7509888161195292287:2314];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-05-29T15:21:34.588434Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;self_id=[1:7509888161195292287:2314];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-05-29T15:21:34.588454Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;self_id=[1:7509888161195292287:2314];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-05-29T15:21:34.588485Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;self_id=[1:7509888161195292287:2314];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-05-29T15:21:34.588504Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;self_id=[1:7509888161195292287:2314];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2025-05-29T15:21:34.588530Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;self_id=[1:7509888161195292287:2314];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2025-05-29T15:21:34.592765Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037891;self_id=[1:7509888161195292288:2315];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-05-29T15:21:34.592783Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037891;self_id=[1:7509888161195292288:2315];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-05-29T15:21:34.592822Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037891;self_id=[1:7509888161195292288:2315];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-05-29T15:21:34.592841Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037891;self_id=[1:7509888161195292288:2315];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-05-29T15:21:34.592860Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037891;self_id=[1:7509888161195292288:2315];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-05-29T15:21:34.592878Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037891;self_id=[1:7509888161195292288:2315];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-05-29T15:21:34.592896Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037891;self_id=[1:7509888161195292288:2315];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-05-29T15:21:34.592915Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037891;self_id=[1:7509888161195292288:2315];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-05-29T15:21:34.592935Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037891;self_id=[1:7509888161195292288:2315];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-05-29T15:21:34.592952Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037891;self_id=[1:7509888161195292288:2315];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-05-29T15:21:34.592970Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037891;self_id=[1:7509888161195292288:2315];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2025-05-29T15:21:34.592987Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037891;self_id=[1:7509888161195292288:2315];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2025-05-29T15:21:34.613258Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037890;self_id=[1:7509888161195292289:2316];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-05-29T15:21:34.613292Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037890;self_id=[1:7509888161195292289:2316];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-05-29T15:21:34.613353Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037890;self_id=[1:7509888161195292289:2316];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-05-29T15:21:34.613374Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037890;self_id=[1:7509888161195292289:2316];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-05-29T15:21:34.613397Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037890;self_id=[1:7509888161195292289:2316];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline ... ryUsage: 1048576 } 2025-05-29T15:22:13.613443Z node 2 :KQP_EXECUTER INFO: kqp_planner.cpp:688: TxId: 281474976715662. Ctx: { TraceId: 01jwea5f2yahdks1jred6qzj5w, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=YWZmOGRiZWQtY2JmZTlkNWYtYjljNDVjMjYtM2I2MWY1ZmU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Compute actor has finished execution: [3:3266:2920] 2025-05-29T15:22:13.613453Z node 2 :KQP_EXECUTER DEBUG: kqp_executer_impl.h:645: ActorId: [2:3069:3717] TxId: 281474976715662. Ctx: { TraceId: 01jwea5f2yahdks1jred6qzj5w, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=YWZmOGRiZWQtY2JmZTlkNWYtYjljNDVjMjYtM2I2MWY1ZmU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Waiting for: CA [3:3268:2922], CA [3:3267:2921], 2025-05-29T15:22:13.613478Z node 2 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: Finished read cookie: 1 at tablet 72075186224037893 2025-05-29T15:22:13.613639Z node 2 :KQP_EXECUTER DEBUG: kqp_executer_impl.h:434: ActorId: [2:3069:3717] TxId: 281474976715662. Ctx: { TraceId: 01jwea5f2yahdks1jred6qzj5w, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=YWZmOGRiZWQtY2JmZTlkNWYtYjljNDVjMjYtM2I2MWY1ZmU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ActorState: ExecuteState, got execution state from compute actor: [3:3267:2921], task: 127, state: COMPUTE_STATE_FINISHED, stats: { CpuTimeUs: 230 Tasks { TaskId: 127 CpuTimeUs: 84 Tables { TablePath: "/Root/selectStore/selectTable" } ComputeCpuTimeUs: 5 BuildCpuTimeUs: 79 Sources { IngressName: "CS" Ingress { } } HostName: "ghrun-lxxdcki4qu" NodeId: 3 CreateTimeMs: 1748532121596 CurrentWaitInputTimeUs: 237280 UpdateTimeMs: 1748532130874 } MaxMemoryUsage: 1048576 } 2025-05-29T15:22:13.613651Z node 2 :KQP_EXECUTER INFO: kqp_planner.cpp:688: TxId: 281474976715662. Ctx: { TraceId: 01jwea5f2yahdks1jred6qzj5w, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=YWZmOGRiZWQtY2JmZTlkNWYtYjljNDVjMjYtM2I2MWY1ZmU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Compute actor has finished execution: [3:3267:2921] 2025-05-29T15:22:13.613661Z node 2 :KQP_EXECUTER DEBUG: kqp_executer_impl.h:645: ActorId: [2:3069:3717] TxId: 281474976715662. Ctx: { TraceId: 01jwea5f2yahdks1jred6qzj5w, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=YWZmOGRiZWQtY2JmZTlkNWYtYjljNDVjMjYtM2I2MWY1ZmU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Waiting for: CA [3:3268:2922], 2025-05-29T15:22:13.613681Z node 2 :TX_COLUMNSHARD_SCAN WARN: actor.cpp:153: Scan [2:3408:3963] undelivered event: 271646932 txId: 281474976715662 scanId: 2 gen: 1 tablet: 72075186224037895 reason: ActorUnknown description: init failed 2025-05-29T15:22:13.613686Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: actor.cpp:410: Scan [2:3408:3963] finished for tablet 72075186224037895 2025-05-29T15:22:13.613755Z node 2 :TX_COLUMNSHARD_SCAN INFO: log.cpp:784: SelfId=[2:3408:3963];TabletId=72075186224037895;ScanId=2;TxId=281474976715662;ScanGen=1;task_identifier=;fline=actor.cpp:416;event=scan_finish;compute_actor_id=[3:3269:2923];stats={"p":[{"events":["f_bootstrap"],"t":0.005},{"events":["l_bootstrap","f_ProduceResults"],"t":0.006},{"events":["f_processing","f_task_result"],"t":0.344},{"events":["l_ProduceResults","l_task_result"],"t":0.948},{"events":["l_processing"],"t":2.982}],"full":{"a":1748532130631596,"name":"_full_task","f":1748532130631596,"d_finished":0,"c":0,"l":1748532133613695,"d":2982099},"events":[{"name":"bootstrap","f":1748532130637584,"d_finished":459,"c":1,"l":1748532130638043,"d":459},{"a":1748532133613673,"name":"processing","f":1748532130976578,"d_finished":297436,"c":7,"l":1748532131579767,"d":297458},{"name":"ProduceResults","f":1748532130637943,"d_finished":297221,"c":8,"l":1748532131579728,"d":297221},{"name":"task_result","f":1748532130976586,"d_finished":297400,"c":7,"l":1748532131579766,"d":297400}],"id":"72075186224037895::10"};iterator=ready_results:(count:1;records_count:1930;schema=level: int32 message: string resource_id: string timestamp: timestamp[us] uid: string;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,3;column_names=timestamp,uid;);;ff=(column_ids=1,2,3,4,5;column_names=level,message,resource_id,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5;column_names=level,message,resource_id,timestamp,uid;);;;); 2025-05-29T15:22:13.613777Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:784: fline=stats.cpp:8;event=statistic;begin=2025-05-29T15:22:10.631388Z;index_granules=0;index_portions=1;index_batches=2;committed_batches=0;schema_columns=5;filter_columns=0;additional_columns=0;compacted_portions_bytes=72576;inserted_portions_bytes=0;committed_portions_bytes=0;data_filter_bytes=0;data_additional_bytes=0;delta_bytes=72576;selected_rows=0; 2025-05-29T15:22:13.613813Z node 2 :TX_COLUMNSHARD_SCAN INFO: log.cpp:784: fline=context.h:81;fetching=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,3;column_names=timestamp,uid;);;ff=(column_ids=1,2,3,4,5;column_names=level,message,resource_id,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5;column_names=level,message,resource_id,timestamp,uid;);;; 2025-05-29T15:22:13.613897Z node 2 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: Finished read cookie: 1 at tablet 72075186224037895 2025-05-29T15:22:13.614000Z node 2 :KQP_EXECUTER DEBUG: kqp_executer_impl.h:434: ActorId: [2:3069:3717] TxId: 281474976715662. Ctx: { TraceId: 01jwea5f2yahdks1jred6qzj5w, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=YWZmOGRiZWQtY2JmZTlkNWYtYjljNDVjMjYtM2I2MWY1ZmU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ActorState: ExecuteState, got execution state from compute actor: [3:3268:2922], task: 128, state: COMPUTE_STATE_FINISHED, stats: { CpuTimeUs: 337 Tasks { TaskId: 128 CpuTimeUs: 74 Tables { TablePath: "/Root/selectStore/selectTable" } ComputeCpuTimeUs: 5 BuildCpuTimeUs: 69 Sources { IngressName: "CS" Ingress { } } HostName: "ghrun-lxxdcki4qu" NodeId: 3 CreateTimeMs: 1748532121596 CurrentWaitInputTimeUs: 237208 UpdateTimeMs: 1748532130874 } MaxMemoryUsage: 1048576 } 2025-05-29T15:22:13.614014Z node 2 :KQP_EXECUTER INFO: kqp_planner.cpp:688: TxId: 281474976715662. Ctx: { TraceId: 01jwea5f2yahdks1jred6qzj5w, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=YWZmOGRiZWQtY2JmZTlkNWYtYjljNDVjMjYtM2I2MWY1ZmU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Compute actor has finished execution: [3:3268:2922] 2025-05-29T15:22:13.614063Z node 2 :KQP_EXECUTER DEBUG: kqp_executer_impl.h:2151: ActorId: [2:3069:3717] TxId: 281474976715662. Ctx: { TraceId: 01jwea5f2yahdks1jred6qzj5w, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=YWZmOGRiZWQtY2JmZTlkNWYtYjljNDVjMjYtM2I2MWY1ZmU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. terminate execution. 2025-05-29T15:22:13.614078Z node 2 :KQP_EXECUTER DEBUG: kqp_executer_impl.h:839: ActorId: [2:3069:3717] TxId: 281474976715662. Ctx: { TraceId: 01jwea5f2yahdks1jred6qzj5w, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=YWZmOGRiZWQtY2JmZTlkNWYtYjljNDVjMjYtM2I2MWY1ZmU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Resource usage for last stat interval: ComputeTime: 3.384345s ReadRows: 2 ReadBytes: 2151 ru: 2256 rate limiter was not found force flag: 1 2025-05-29T15:22:13.715239Z node 2 :KQP_SLOW_LOG WARN: kqp_worker_common.cpp:132: TraceId: "01jwea5f2yahdks1jred6qzj5w", SessionId: ydb://session/3?node_id=2&id=YWZmOGRiZWQtY2JmZTlkNWYtYjljNDVjMjYtM2I2MWY1ZmU=, Slow query, duration: 17.140355s, status: STATUS_CODE_UNSPECIFIED, user: UNAUTHENTICATED, results: 0b, text: "SELECT * FROM `/Root/selectStore/selectTable` LIMIT 1;", parameters: 0b 2025-05-29T15:22:13.717357Z node 2 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:238: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 2000, txId: 18446744073709551615] shutting down FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2700;fline=stage_features.cpp:79;name=FO::ACCESSORS;event=free;usage=1008;delta=252; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2700;fline=stage_features.cpp:79;name=GLOBAL;event=free;usage=12177740;delta=252; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2700;fline=allocation.cpp:62;event=destroy;allocation_id=7;stage=FO::ACCESSORS; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2700;fline=stage_features.cpp:79;name=FO::ACCESSORS;event=free;usage=756;delta=252; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2700;fline=stage_features.cpp:79;name=GLOBAL;event=free;usage=12177488;delta=252; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2700;fline=allocation.cpp:62;event=destroy;allocation_id=8;stage=FO::ACCESSORS; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2700;fline=stage_features.cpp:79;name=FO::ACCESSORS;event=free;usage=504;delta=252; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2700;fline=stage_features.cpp:79;name=GLOBAL;event=free;usage=12177236;delta=252; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2700;fline=allocation.cpp:62;event=destroy;allocation_id=9;stage=FO::ACCESSORS; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2700;fline=stage_features.cpp:79;name=FO::FETCHING;event=free;usage=9766893;delta=2409839; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2700;fline=stage_features.cpp:79;name=GLOBAL;event=free;usage=9767397;delta=2409839; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2700;fline=allocation.cpp:62;event=destroy;allocation_id=10;stage=FO::FETCHING; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2700;fline=stage_features.cpp:79;name=FO::FETCHING;event=free;usage=7433409;delta=2333484; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2700;fline=stage_features.cpp:79;name=GLOBAL;event=free;usage=7433913;delta=2333484; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2700;fline=allocation.cpp:62;event=destroy;allocation_id=11;stage=FO::FETCHING; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2700;fline=stage_features.cpp:79;name=FO::FETCHING;event=free;usage=4926897;delta=2506512; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2700;fline=stage_features.cpp:79;name=GLOBAL;event=free;usage=4927401;delta=2506512; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2700;fline=allocation.cpp:62;event=destroy;allocation_id=12;stage=FO::FETCHING; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2700;fline=stage_features.cpp:79;name=FO::FETCHING;event=free;usage=2470786;delta=2456111; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2700;fline=stage_features.cpp:79;name=GLOBAL;event=free;usage=2471290;delta=2456111; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2700;fline=allocation.cpp:62;event=destroy;allocation_id=14;stage=FO::FETCHING; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2700;fline=stage_features.cpp:79;name=FO::ACCESSORS;event=free;usage=252;delta=252; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2700;fline=stage_features.cpp:79;name=GLOBAL;event=free;usage=2471038;delta=252; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2700;fline=allocation.cpp:62;event=destroy;allocation_id=15;stage=FO::ACCESSORS; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2700;fline=stage_features.cpp:79;name=FO::FETCHING;event=free;usage=0;delta=2470786; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2700;fline=stage_features.cpp:79;name=GLOBAL;event=free;usage=252;delta=2470786; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2700;fline=allocation.cpp:62;event=destroy;allocation_id=17;stage=FO::FETCHING; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2700;fline=stage_features.cpp:79;name=FO::ACCESSORS;event=free;usage=0;delta=252; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2700;fline=stage_features.cpp:79;name=GLOBAL;event=free;usage=0;delta=252; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2700;fline=allocation.cpp:62;event=destroy;allocation_id=13;stage=FO::ACCESSORS; ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/persqueue/ut/unittest >> TPQTabletTests::TEvReadSet_For_A_Non_Existent_Tablet [GOOD] Test command err: 2025-05-29T15:22:13.558165Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:3098: [PQ: 72057594037927937] Handle TEvInterconnect::TEvNodeInfo 2025-05-29T15:22:13.559204Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:3130: [PQ: 72057594037927937] Transactions request. From tx_00000000000000000000, To tx_18446744073709551615 2025-05-29T15:22:13.559278Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:751: [PQ: 72057594037927937] doesn't have tx info 2025-05-29T15:22:13.559291Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:763: [PQ: 72057594037927937] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2025-05-29T15:22:13.559296Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:975: [PQ: 72057594037927937] no config, start with empty partitions and default config 2025-05-29T15:22:13.559303Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:4889: [PQ: 72057594037927937] Txs.size=0, PlannedTxs.size=0 2025-05-29T15:22:13.559312Z node 1 :PERSQUEUE NOTICE: pq_impl.cpp:1099: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-05-29T15:22:13.559322Z node 1 :PERSQUEUE INFO: pq_impl.cpp:800: [PQ: 72057594037927937] doesn't have tx writes info 2025-05-29T15:22:13.562939Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:2882: [PQ: 72057594037927937] server connected, pipe [1:179:2193], now have 1 active actors on pipe 2025-05-29T15:22:13.562966Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:1460: [PQ: 72057594037927937] Handle TEvPersQueue::TEvUpdateConfig 2025-05-29T15:22:13.565567Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:1646: [PQ: 72057594037927937] Config update version 1(current 0) received from actor [1:178:2192] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 PartitionIds: 1 TopicName: "topic" Version: 1 LocalDC: true Topic: "topic" TopicPath: "/topic" YcCloudId: "somecloud" YcFolderId: "somefolder" YdbDatabaseId: "PQ" YdbDatabasePath: "/Root/PQ" Partitions { PartitionId: 0 } Partitions { PartitionId: 1 } ReadRuleGenerations: 1 FederationAccount: "federationAccount" MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } AllPartitions { PartitionId: 1 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 1 Important: false } 2025-05-29T15:22:13.566273Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:590: [PQ: 72057594037927937] Apply new config CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 PartitionIds: 1 TopicName: "topic" Version: 1 LocalDC: true Topic: "topic" TopicPath: "/topic" YcCloudId: "somecloud" YcFolderId: "somefolder" YdbDatabaseId: "PQ" YdbDatabasePath: "/Root/PQ" Partitions { PartitionId: 0 } Partitions { PartitionId: 1 } ReadRuleGenerations: 1 FederationAccount: "federationAccount" MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } AllPartitions { PartitionId: 1 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 1 Important: false } 2025-05-29T15:22:13.566298Z node 1 :PERSQUEUE NOTICE: pq_impl.cpp:1099: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-05-29T15:22:13.566505Z node 1 :PERSQUEUE INFO: pq_impl.cpp:1487: [PQ: 72057594037927937] Config applied version 1 actor [1:178:2192] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 PartitionIds: 1 TopicName: "topic" Version: 1 LocalDC: true Topic: "topic" TopicPath: "/topic" YcCloudId: "somecloud" YcFolderId: "somefolder" YdbDatabaseId: "PQ" YdbDatabasePath: "/Root/PQ" Partitions { PartitionId: 0 } Partitions { PartitionId: 1 } ReadRuleGenerations: 1 FederationAccount: "federationAccount" MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } AllPartitions { PartitionId: 1 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 1 Important: false } 2025-05-29T15:22:13.566537Z node 1 :PERSQUEUE DEBUG: partition_init.cpp:75: [topic:0:Initializer] Start initializing step TInitConfigStep 2025-05-29T15:22:13.566553Z node 1 :PERSQUEUE DEBUG: partition_init.cpp:75: [topic:1:Initializer] Start initializing step TInitConfigStep 2025-05-29T15:22:13.566628Z node 1 :PERSQUEUE DEBUG: partition_init.cpp:75: [topic:0:Initializer] Start initializing step TInitInternalFieldsStep 2025-05-29T15:22:13.566705Z node 1 :PERSQUEUE INFO: partition_init.cpp:880: [PQ: 72057594037927937, Partition: 0, State: StateInit] bootstrapping 0 [1:187:2199] 2025-05-29T15:22:13.566933Z node 1 :PERSQUEUE DEBUG: partition_init.cpp:55: [topic:0:Initializer] Initializing completed. 2025-05-29T15:22:13.566942Z node 1 :PERSQUEUE INFO: partition.cpp:560: [PQ: 72057594037927937, Partition: 0, State: StateInit] init complete for topic 'topic' partition 0 generation 2 [1:187:2199] 2025-05-29T15:22:13.566951Z node 1 :PERSQUEUE DEBUG: partition.cpp:574: [PQ: 72057594037927937, Partition: 0, State: StateInit] SYNC INIT topic topic partitition 0 so 0 endOffset 0 Head Offset 0 PartNo 0 PackedSize 0 count 0 nextOffset 0 batches 0 2025-05-29T15:22:13.567044Z node 1 :PERSQUEUE DEBUG: partition.cpp:3850: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Process pending events. Count 0 2025-05-29T15:22:13.567060Z node 1 :PERSQUEUE DEBUG: partition.cpp:3155: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'topic' partition 0 user user reinit request with generation 1 2025-05-29T15:22:13.567065Z node 1 :PERSQUEUE DEBUG: partition.cpp:3224: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'topic' partition 0 user user reinit with generation 1 done 2025-05-29T15:22:13.567094Z node 1 :PERSQUEUE DEBUG: partition.cpp:2185: [PQ: 72057594037927937, Partition: 0, State: StateIdle] === DumpKeyValueRequest === 2025-05-29T15:22:13.567099Z node 1 :PERSQUEUE DEBUG: partition.cpp:2186: [PQ: 72057594037927937, Partition: 0, State: StateIdle] --- delete ---------------- 2025-05-29T15:22:13.567103Z node 1 :PERSQUEUE DEBUG: partition.cpp:2194: [PQ: 72057594037927937, Partition: 0, State: StateIdle] --- write ----------------- 2025-05-29T15:22:13.567107Z node 1 :PERSQUEUE DEBUG: partition.cpp:2197: [PQ: 72057594037927937, Partition: 0, State: StateIdle] i0000000000 2025-05-29T15:22:13.567111Z node 1 :PERSQUEUE DEBUG: partition.cpp:2197: [PQ: 72057594037927937, Partition: 0, State: StateIdle] m0000000000cuser 2025-05-29T15:22:13.567114Z node 1 :PERSQUEUE DEBUG: partition.cpp:2197: [PQ: 72057594037927937, Partition: 0, State: StateIdle] m0000000000uuser 2025-05-29T15:22:13.567118Z node 1 :PERSQUEUE DEBUG: partition.cpp:2199: [PQ: 72057594037927937, Partition: 0, State: StateIdle] --- rename ---------------- 2025-05-29T15:22:13.567122Z node 1 :PERSQUEUE DEBUG: partition.cpp:2204: [PQ: 72057594037927937, Partition: 0, State: StateIdle] =========================== 2025-05-29T15:22:13.567143Z node 1 :PERSQUEUE DEBUG: partition_read.cpp:779: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'topic' partition 0 user user readTimeStamp for offset 0 initiated queuesize 0 startOffset 0 ReadingTimestamp 0 rrg 0 2025-05-29T15:22:13.567166Z node 1 :PERSQUEUE DEBUG: read.h:262: CacheProxy. Passthrough write request to KV 2025-05-29T15:22:13.567230Z node 1 :PERSQUEUE DEBUG: partition_init.cpp:75: [topic:1:Initializer] Start initializing step TInitInternalFieldsStep 2025-05-29T15:22:13.567255Z node 1 :PERSQUEUE INFO: partition_init.cpp:880: [PQ: 72057594037927937, Partition: 1, State: StateInit] bootstrapping 1 [1:189:2201] 2025-05-29T15:22:13.567395Z node 1 :PERSQUEUE DEBUG: partition_init.cpp:55: [topic:1:Initializer] Initializing completed. 2025-05-29T15:22:13.567400Z node 1 :PERSQUEUE INFO: partition.cpp:560: [PQ: 72057594037927937, Partition: 1, State: StateInit] init complete for topic 'topic' partition 1 generation 2 [1:189:2201] 2025-05-29T15:22:13.567405Z node 1 :PERSQUEUE DEBUG: partition.cpp:574: [PQ: 72057594037927937, Partition: 1, State: StateInit] SYNC INIT topic topic partitition 1 so 0 endOffset 0 Head Offset 0 PartNo 0 PackedSize 0 count 0 nextOffset 0 batches 0 2025-05-29T15:22:13.567459Z node 1 :PERSQUEUE DEBUG: partition.cpp:3850: [PQ: 72057594037927937, Partition: 1, State: StateIdle] Process pending events. Count 0 2025-05-29T15:22:13.567468Z node 1 :PERSQUEUE DEBUG: partition.cpp:3155: [PQ: 72057594037927937, Partition: 1, State: StateIdle] Topic 'topic' partition 1 user user reinit request with generation 1 2025-05-29T15:22:13.567472Z node 1 :PERSQUEUE DEBUG: partition.cpp:3224: [PQ: 72057594037927937, Partition: 1, State: StateIdle] Topic 'topic' partition 1 user user reinit with generation 1 done 2025-05-29T15:22:13.567482Z node 1 :PERSQUEUE DEBUG: partition.cpp:2185: [PQ: 72057594037927937, Partition: 1, State: StateIdle] === DumpKeyValueRequest === 2025-05-29T15:22:13.567486Z node 1 :PERSQUEUE DEBUG: partition.cpp:2186: [PQ: 72057594037927937, Partition: 1, State: StateIdle] --- delete ---------------- 2025-05-29T15:22:13.567489Z node 1 :PERSQUEUE DEBUG: partition.cpp:2194: [PQ: 72057594037927937, Partition: 1, State: StateIdle] --- write ----------------- 2025-05-29T15:22:13.567492Z node 1 :PERSQUEUE DEBUG: partition.cpp:2197: [PQ: 72057594037927937, Partition: 1, State: StateIdle] i0000000001 2025-05-29T15:22:13.567495Z node 1 :PERSQUEUE DEBUG: partition.cpp:2197: [PQ: 72057594037927937, Partition: 1, State: StateIdle] m0000000001cuser 2025-05-29T15:22:13.567498Z node 1 :PERSQUEUE DEBUG: partition.cpp:2197: [PQ: 72057594037927937, Partition: 1, State: StateIdle] m0000000001uuser 2025-05-29T15:22:13.567501Z node 1 :PERSQUEUE DEBUG: partition.cpp:2199: [PQ: 72057594037927937, Partition: 1, State: StateIdle] --- rename ---------------- 2025-05-29T15:22:13.567505Z node 1 :PERSQUEUE DEBUG: partition.cpp:2204: [PQ: 72057594037927937, Partition: 1, State: StateIdle] =========================== 2025-05-29T15:22:13.567511Z node 1 :PERSQUEUE DEBUG: partition_read.cpp:779: [PQ: 72057594037927937, Partition: 1, State: StateIdle] Topic 'topic' partition 1 user user readTimeStamp for offset 0 initiated queuesize 0 startOffset 0 ReadingTimestamp 0 rrg 0 2025-05-29T15:22:13.567535Z node 1 :PERSQUEUE DEBUG: read.h:262: CacheProxy. Passthrough write request to KV 2025-05-29T15:22:13.568406Z node 1 :PERSQUEUE DEBUG: partition_write.cpp:524: [PQ: 72057594037927937, Partition: 0, State: StateIdle] TPartition::HandleWriteResponse writeNewSize# 0 WriteNewSizeFromSupportivePartitions# 0 2025-05-29T15:22:13.568468Z node 1 :PERSQUEUE DEBUG: partition_write.cpp:524: [PQ: 72057594037927937, Partition: 1, State: StateIdle] TPartition::HandleWriteResponse writeNewSize# 0 WriteNewSizeFromSupportivePartitions# 0 2025-05-29T15:22:13.568531Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:2882: [PQ: 72057594037927937] server connected, pipe [1:202:2210], now have 1 active actors on pipe 2025-05-29T15:22:13.568618Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:2882: [PQ: 72057594037927937] server connected, pipe [1:205:2212], now have 1 active actors on pipe 2025-05-29T15:22:13.568779Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:3222: [PQ: 72057594037927937] Handle TEvPersQueue::TEvProposeTransaction SourceActor { RawX1: 178 RawX2: 4294969488 } TxId: 67890 Data { Operations { PartitionId: 0 CommitOffsetsBegin: 0 CommitOffsetsEnd: 0 Consumer: "user" Path: "/topic" } Operations { PartitionId: 1 CommitOffsetsBegin: 0 CommitOffsetsEnd: 0 Consumer: "user" Path: "/topic" } Immediate: false } 2025-05-29T15:22:13.568791Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:3383: [PQ: 72057594037927937] distributed transaction 2025-05-29T15:22:13.568805Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:3697: [PQ: 72057594037927937] Prop ... DEBUG: pq_impl.cpp:836: [PQ: 72057594037927937] New ExecStep 100, ExecTxId 67890 2025-05-29T15:22:15.401564Z node 6 :PERSQUEUE DEBUG: pq_impl.cpp:4228: [PQ: 72057594037927937] TxId 67890, NewState CALCULATING 2025-05-29T15:22:15.401569Z node 6 :PERSQUEUE DEBUG: pq_impl.cpp:4263: [PQ: 72057594037927937] TxId 67890 moved from PLANNED to CALCULATING 2025-05-29T15:22:15.401582Z node 6 :PERSQUEUE DEBUG: partition.cpp:1127: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Handle TEvPQ::TEvTxCalcPredicate Step 100, TxId 67890 2025-05-29T15:22:15.401616Z node 6 :PERSQUEUE DEBUG: pq_impl.cpp:3489: [PQ: 72057594037927937] Handle TEvPQ::TEvTxCalcPredicateResult Step 100, TxId 67890, Partition 0, Predicate 1 2025-05-29T15:22:15.401621Z node 6 :PERSQUEUE DEBUG: transaction.cpp:218: [TxId: 67890] Handle TEvTxCalcPredicateResult 2025-05-29T15:22:15.401626Z node 6 :PERSQUEUE DEBUG: transaction.cpp:267: [TxId: 67890] Partition responses 1/1 2025-05-29T15:22:15.401630Z node 6 :PERSQUEUE DEBUG: pq_impl.cpp:4293: [PQ: 72057594037927937] Try execute txs with state CALCULATING 2025-05-29T15:22:15.401634Z node 6 :PERSQUEUE DEBUG: pq_impl.cpp:4338: [PQ: 72057594037927937] TxId 67890, State CALCULATING 2025-05-29T15:22:15.401639Z node 6 :PERSQUEUE DEBUG: pq_impl.cpp:4285: [PQ: 72057594037927937] TxId 67890 State CALCULATING FrontTxId 67890 2025-05-29T15:22:15.401644Z node 6 :PERSQUEUE DEBUG: pq_impl.cpp:4422: [PQ: 72057594037927937] Received 1, Expected 1 2025-05-29T15:22:15.401649Z node 6 :PERSQUEUE DEBUG: pq_impl.cpp:4228: [PQ: 72057594037927937] TxId 67890, NewState CALCULATED 2025-05-29T15:22:15.401653Z node 6 :PERSQUEUE DEBUG: pq_impl.cpp:4263: [PQ: 72057594037927937] TxId 67890 moved from CALCULATING to CALCULATED 2025-05-29T15:22:15.401662Z node 6 :PERSQUEUE DEBUG: pq_impl.cpp:3818: [PQ: 72057594037927937] write key for TxId 67890 2025-05-29T15:22:15.401688Z node 6 :PERSQUEUE DEBUG: transaction.cpp:374: [TxId: 67890] save tx TxId: 67890 State: CALCULATED MinStep: 133 MaxStep: 30133 PredicatesReceived { TabletId: 72057594037950158 Predicate: true } PredicateRecipients: 72057594037950158 Operations { PartitionId: 0 CommitOffsetsBegin: 0 CommitOffsetsEnd: 0 Consumer: "user" Path: "/topic" } Step: 100 Predicate: true Kind: KIND_DATA SourceActor { RawX1: 176 RawX2: 25769805966 } Partitions { } 2025-05-29T15:22:15.401699Z node 6 :PERSQUEUE DEBUG: pq_impl.cpp:3635: [PQ: 72057594037927937] Send TEvKeyValue::TEvRequest (WRITE_TX_COOKIE) 2025-05-29T15:22:15.403012Z node 6 :PERSQUEUE DEBUG: pq_impl.cpp:1231: [PQ: 72057594037927937] Handle TEvKeyValue::TEvResponse (WRITE_TX_COOKIE) 2025-05-29T15:22:15.403025Z node 6 :PERSQUEUE DEBUG: pq_impl.cpp:4293: [PQ: 72057594037927937] Try execute txs with state CALCULATED 2025-05-29T15:22:15.403033Z node 6 :PERSQUEUE DEBUG: pq_impl.cpp:4338: [PQ: 72057594037927937] TxId 67890, State CALCULATED 2025-05-29T15:22:15.403039Z node 6 :PERSQUEUE DEBUG: pq_impl.cpp:4285: [PQ: 72057594037927937] TxId 67890 State CALCULATED FrontTxId 67890 2025-05-29T15:22:15.403044Z node 6 :PERSQUEUE DEBUG: pq_impl.cpp:4228: [PQ: 72057594037927937] TxId 67890, NewState WAIT_RS 2025-05-29T15:22:15.403049Z node 6 :PERSQUEUE DEBUG: pq_impl.cpp:4263: [PQ: 72057594037927937] TxId 67890 moved from CALCULATED to WAIT_RS 2025-05-29T15:22:15.403056Z node 6 :PERSQUEUE DEBUG: pq_impl.cpp:3970: [PQ: 72057594037927937] Send TEvTxProcessing::TEvReadSet to 1 receivers. Wait TEvTxProcessing::TEvReadSet from 1 senders. 2025-05-29T15:22:15.403061Z node 6 :PERSQUEUE DEBUG: pq_impl.cpp:3980: [PQ: 72057594037927937] Send TEvReadSet to tablet 72057594037950158 2025-05-29T15:22:15.403076Z node 6 :PERSQUEUE DEBUG: pq_impl.cpp:4461: [PQ: 72057594037927937] HaveParticipantsDecision 1 2025-05-29T15:22:15.403084Z node 6 :PERSQUEUE DEBUG: pq_impl.cpp:4228: [PQ: 72057594037927937] TxId 67890, NewState EXECUTING 2025-05-29T15:22:15.403089Z node 6 :PERSQUEUE DEBUG: pq_impl.cpp:4263: [PQ: 72057594037927937] TxId 67890 moved from WAIT_RS to EXECUTING 2025-05-29T15:22:15.403093Z node 6 :PERSQUEUE DEBUG: pq_impl.cpp:4491: [PQ: 72057594037927937] Received 0, Expected 1 2025-05-29T15:22:15.403114Z node 6 :PERSQUEUE DEBUG: partition.cpp:1173: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Handle TEvPQ::TEvTxCommit Step 100, TxId 67890 2025-05-29T15:22:15.403123Z node 6 :PERSQUEUE DEBUG: partition.cpp:2424: [PQ: 72057594037927937, Partition: 0, State: StateIdle] TPartition::CommitWriteOperations TxId: 67890 2025-05-29T15:22:15.403146Z node 6 :PERSQUEUE DEBUG: partition.cpp:2185: [PQ: 72057594037927937, Partition: 0, State: StateIdle] === DumpKeyValueRequest === 2025-05-29T15:22:15.403150Z node 6 :PERSQUEUE DEBUG: partition.cpp:2186: [PQ: 72057594037927937, Partition: 0, State: StateIdle] --- delete ---------------- 2025-05-29T15:22:15.403156Z node 6 :PERSQUEUE DEBUG: partition.cpp:2194: [PQ: 72057594037927937, Partition: 0, State: StateIdle] --- write ----------------- 2025-05-29T15:22:15.403160Z node 6 :PERSQUEUE DEBUG: partition.cpp:2197: [PQ: 72057594037927937, Partition: 0, State: StateIdle] i0000000000 2025-05-29T15:22:15.403165Z node 6 :PERSQUEUE DEBUG: partition.cpp:2197: [PQ: 72057594037927937, Partition: 0, State: StateIdle] I0000000000 2025-05-29T15:22:15.403169Z node 6 :PERSQUEUE DEBUG: partition.cpp:2197: [PQ: 72057594037927937, Partition: 0, State: StateIdle] m0000000000cuser 2025-05-29T15:22:15.403173Z node 6 :PERSQUEUE DEBUG: partition.cpp:2197: [PQ: 72057594037927937, Partition: 0, State: StateIdle] m0000000000uuser 2025-05-29T15:22:15.403178Z node 6 :PERSQUEUE DEBUG: partition.cpp:2199: [PQ: 72057594037927937, Partition: 0, State: StateIdle] --- rename ---------------- 2025-05-29T15:22:15.403183Z node 6 :PERSQUEUE DEBUG: partition.cpp:2204: [PQ: 72057594037927937, Partition: 0, State: StateIdle] =========================== 2025-05-29T15:22:15.403196Z node 6 :PERSQUEUE DEBUG: read.h:262: CacheProxy. Passthrough write request to KV 2025-05-29T15:22:15.403270Z node 6 :PERSQUEUE DEBUG: pq_impl.cpp:2915: [PQ: 72057594037927937] Handle TEvTabletPipe::TEvClientConnected 2025-05-29T15:22:15.403278Z node 6 :PERSQUEUE DEBUG: transaction.cpp:324: [TxId: 67890] Predicate acks 1/1 2025-05-29T15:22:15.403284Z node 6 :PERSQUEUE DEBUG: pq_impl.cpp:4293: [PQ: 72057594037927937] Try execute txs with state EXECUTING 2025-05-29T15:22:15.403288Z node 6 :PERSQUEUE DEBUG: pq_impl.cpp:4338: [PQ: 72057594037927937] TxId 67890, State EXECUTING 2025-05-29T15:22:15.403293Z node 6 :PERSQUEUE DEBUG: pq_impl.cpp:4285: [PQ: 72057594037927937] TxId 67890 State EXECUTING FrontTxId 67890 2025-05-29T15:22:15.403297Z node 6 :PERSQUEUE DEBUG: pq_impl.cpp:4491: [PQ: 72057594037927937] Received 0, Expected 1 2025-05-29T15:22:15.403302Z node 6 :PERSQUEUE DEBUG: pq_impl.cpp:4306: [PQ: 72057594037927937] TxId 67890 status has not changed 2025-05-29T15:22:15.403374Z node 6 :PERSQUEUE DEBUG: pq_impl.cpp:2915: [PQ: 72057594037927937] Handle TEvTabletPipe::TEvClientConnected 2025-05-29T15:22:15.403380Z node 6 :PERSQUEUE DEBUG: pq_impl.cpp:2920: [PQ: 72057594037927937] Connected to tablet 72057594037950158 2025-05-29T15:22:15.403852Z node 6 :PERSQUEUE DEBUG: partition_write.cpp:524: [PQ: 72057594037927937, Partition: 0, State: StateIdle] TPartition::HandleWriteResponse writeNewSize# 0 WriteNewSizeFromSupportivePartitions# 0 2025-05-29T15:22:15.403870Z node 6 :PERSQUEUE DEBUG: pq_impl.cpp:3535: [PQ: 72057594037927937] Handle TEvPQ::TEvTxCommitDone Step 100, TxId 67890, Partition 0 2025-05-29T15:22:15.403876Z node 6 :PERSQUEUE DEBUG: pq_impl.cpp:4293: [PQ: 72057594037927937] Try execute txs with state EXECUTING 2025-05-29T15:22:15.403880Z node 6 :PERSQUEUE DEBUG: pq_impl.cpp:4338: [PQ: 72057594037927937] TxId 67890, State EXECUTING 2025-05-29T15:22:15.403885Z node 6 :PERSQUEUE DEBUG: pq_impl.cpp:4285: [PQ: 72057594037927937] TxId 67890 State EXECUTING FrontTxId 67890 2025-05-29T15:22:15.403889Z node 6 :PERSQUEUE DEBUG: pq_impl.cpp:4491: [PQ: 72057594037927937] Received 1, Expected 1 2025-05-29T15:22:15.403897Z node 6 :PERSQUEUE DEBUG: pq_impl.cpp:4164: [PQ: 72057594037927937] TxId: 67890 send TEvPersQueue::TEvProposeTransactionResult(COMPLETE) 2025-05-29T15:22:15.403903Z node 6 :PERSQUEUE DEBUG: pq_impl.cpp:4495: [PQ: 72057594037927937] complete TxId 67890 2025-05-29T15:22:15.403908Z node 6 :PERSQUEUE DEBUG: pq_impl.cpp:4513: [PQ: 72057594037927937] delete partitions for TxId 67890 2025-05-29T15:22:15.403913Z node 6 :PERSQUEUE DEBUG: pq_impl.cpp:4228: [PQ: 72057594037927937] TxId 67890, NewState EXECUTED 2025-05-29T15:22:15.403918Z node 6 :PERSQUEUE DEBUG: pq_impl.cpp:4263: [PQ: 72057594037927937] TxId 67890 moved from EXECUTING to EXECUTED 2025-05-29T15:22:15.403922Z node 6 :PERSQUEUE DEBUG: pq_impl.cpp:3818: [PQ: 72057594037927937] write key for TxId 67890 2025-05-29T15:22:15.403952Z node 6 :PERSQUEUE DEBUG: transaction.cpp:374: [TxId: 67890] save tx TxId: 67890 State: EXECUTED MinStep: 133 MaxStep: 30133 PredicatesReceived { TabletId: 72057594037950158 Predicate: true } PredicateRecipients: 72057594037950158 Operations { PartitionId: 0 CommitOffsetsBegin: 0 CommitOffsetsEnd: 0 Consumer: "user" Path: "/topic" } Step: 100 Predicate: true Kind: KIND_DATA SourceActor { RawX1: 176 RawX2: 25769805966 } Partitions { } 2025-05-29T15:22:15.403964Z node 6 :PERSQUEUE DEBUG: pq_impl.cpp:3635: [PQ: 72057594037927937] Send TEvKeyValue::TEvRequest (WRITE_TX_COOKIE) 2025-05-29T15:22:15.404670Z node 6 :PERSQUEUE DEBUG: pq_impl.cpp:1231: [PQ: 72057594037927937] Handle TEvKeyValue::TEvResponse (WRITE_TX_COOKIE) 2025-05-29T15:22:15.404681Z node 6 :PERSQUEUE DEBUG: pq_impl.cpp:4293: [PQ: 72057594037927937] Try execute txs with state EXECUTED 2025-05-29T15:22:15.404686Z node 6 :PERSQUEUE DEBUG: pq_impl.cpp:4338: [PQ: 72057594037927937] TxId 67890, State EXECUTED 2025-05-29T15:22:15.404691Z node 6 :PERSQUEUE DEBUG: pq_impl.cpp:4285: [PQ: 72057594037927937] TxId 67890 State EXECUTED FrontTxId 67890 2025-05-29T15:22:15.404696Z node 6 :PERSQUEUE DEBUG: pq_impl.cpp:3989: [PQ: 72057594037927937] TPersQueue::SendEvReadSetAckToSenders 2025-05-29T15:22:15.404704Z node 6 :PERSQUEUE DEBUG: pq_impl.cpp:3991: [PQ: 72057594037927937] Send TEvTxProcessing::TEvReadSetAck {TEvReadSet step# 100 txid# 67890 TabletSource# 72057594037950158 TabletDest# 72057594037927937 SetTabletConsumer# 72057594037927937 Flags# 0 Seqno# 0} 2025-05-29T15:22:15.404710Z node 6 :PERSQUEUE DEBUG: pq_impl.cpp:4228: [PQ: 72057594037927937] TxId 67890, NewState WAIT_RS_ACKS 2025-05-29T15:22:15.404714Z node 6 :PERSQUEUE DEBUG: pq_impl.cpp:4263: [PQ: 72057594037927937] TxId 67890 moved from EXECUTED to WAIT_RS_ACKS 2025-05-29T15:22:15.404720Z node 6 :PERSQUEUE DEBUG: transaction.cpp:366: [TxId: 67890] PredicateAcks: 1/1 2025-05-29T15:22:15.404724Z node 6 :PERSQUEUE DEBUG: pq_impl.cpp:4539: [PQ: 72057594037927937] HaveAllRecipientsReceive 1, AllSupportivePartitionsHaveBeenDeleted 1 2025-05-29T15:22:15.404731Z node 6 :PERSQUEUE DEBUG: transaction.cpp:366: [TxId: 67890] PredicateAcks: 1/1 2025-05-29T15:22:15.404736Z node 6 :PERSQUEUE DEBUG: pq_impl.cpp:4600: [PQ: 72057594037927937] add an TxId 67890 to the list for deletion 2025-05-29T15:22:15.404742Z node 6 :PERSQUEUE DEBUG: pq_impl.cpp:4228: [PQ: 72057594037927937] TxId 67890, NewState DELETING 2025-05-29T15:22:15.404748Z node 6 :PERSQUEUE DEBUG: pq_impl.cpp:3834: [PQ: 72057594037927937] delete key for TxId 67890 2025-05-29T15:22:15.404755Z node 6 :PERSQUEUE DEBUG: pq_impl.cpp:3635: [PQ: 72057594037927937] Send TEvKeyValue::TEvRequest (WRITE_TX_COOKIE) 2025-05-29T15:22:15.405650Z node 6 :PERSQUEUE DEBUG: pq_impl.cpp:1231: [PQ: 72057594037927937] Handle TEvKeyValue::TEvResponse (WRITE_TX_COOKIE) 2025-05-29T15:22:15.405663Z node 6 :PERSQUEUE DEBUG: pq_impl.cpp:4293: [PQ: 72057594037927937] Try execute txs with state DELETING 2025-05-29T15:22:15.405668Z node 6 :PERSQUEUE DEBUG: pq_impl.cpp:4338: [PQ: 72057594037927937] TxId 67890, State DELETING 2025-05-29T15:22:15.405673Z node 6 :PERSQUEUE DEBUG: pq_impl.cpp:4550: [PQ: 72057594037927937] delete TxId 67890 >> TPQTabletTests::Parallel_Transactions_1 >> TPQTabletTests::Parallel_Transactions_1 [GOOD] >> TPQTest::TestDirectReadHappyWay [GOOD] >> TPQTest::TestMessageNo >> TFetchRequestTests::CheckAccess [FAIL] >> PQCountersSimple::PartitionWriteQuota >> TPQTabletTests::Parallel_Transactions_2 >> TNodeBrokerTest::TestRandomActions ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/persqueue/ut/unittest >> TPQTabletTests::After_Restarting_The_Tablet_Sends_A_TEvReadSet_For_Transactions_In_The_EXECUTED_State [GOOD] Test command err: 2025-05-29T15:22:14.493399Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:3098: [PQ: 72057594037927937] Handle TEvInterconnect::TEvNodeInfo 2025-05-29T15:22:14.494623Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:3130: [PQ: 72057594037927937] Transactions request. From tx_00000000000000000000, To tx_18446744073709551615 2025-05-29T15:22:14.494700Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:751: [PQ: 72057594037927937] doesn't have tx info 2025-05-29T15:22:14.494713Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:763: [PQ: 72057594037927937] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2025-05-29T15:22:14.494718Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:975: [PQ: 72057594037927937] no config, start with empty partitions and default config 2025-05-29T15:22:14.494724Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:4889: [PQ: 72057594037927937] Txs.size=0, PlannedTxs.size=0 2025-05-29T15:22:14.494733Z node 1 :PERSQUEUE NOTICE: pq_impl.cpp:1099: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-05-29T15:22:14.494760Z node 1 :PERSQUEUE INFO: pq_impl.cpp:800: [PQ: 72057594037927937] doesn't have tx writes info 2025-05-29T15:22:14.498801Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:2882: [PQ: 72057594037927937] server connected, pipe [1:179:2193], now have 1 active actors on pipe 2025-05-29T15:22:14.498836Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:1460: [PQ: 72057594037927937] Handle TEvPersQueue::TEvUpdateConfig 2025-05-29T15:22:14.501888Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:1646: [PQ: 72057594037927937] Config update version 1(current 0) received from actor [1:178:2192] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "topic" Version: 1 LocalDC: true Topic: "topic" TopicPath: "/topic" YcCloudId: "somecloud" YcFolderId: "somefolder" YdbDatabaseId: "PQ" YdbDatabasePath: "/Root/PQ" Partitions { PartitionId: 0 } ReadRuleGenerations: 1 FederationAccount: "federationAccount" MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 1 Important: false } 2025-05-29T15:22:14.502687Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:590: [PQ: 72057594037927937] Apply new config CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "topic" Version: 1 LocalDC: true Topic: "topic" TopicPath: "/topic" YcCloudId: "somecloud" YcFolderId: "somefolder" YdbDatabaseId: "PQ" YdbDatabasePath: "/Root/PQ" Partitions { PartitionId: 0 } ReadRuleGenerations: 1 FederationAccount: "federationAccount" MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 1 Important: false } 2025-05-29T15:22:14.502714Z node 1 :PERSQUEUE NOTICE: pq_impl.cpp:1099: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-05-29T15:22:14.502903Z node 1 :PERSQUEUE INFO: pq_impl.cpp:1487: [PQ: 72057594037927937] Config applied version 1 actor [1:178:2192] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "topic" Version: 1 LocalDC: true Topic: "topic" TopicPath: "/topic" YcCloudId: "somecloud" YcFolderId: "somefolder" YdbDatabaseId: "PQ" YdbDatabasePath: "/Root/PQ" Partitions { PartitionId: 0 } ReadRuleGenerations: 1 FederationAccount: "federationAccount" MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 1 Important: false } 2025-05-29T15:22:14.502934Z node 1 :PERSQUEUE DEBUG: partition_init.cpp:75: [topic:0:Initializer] Start initializing step TInitConfigStep 2025-05-29T15:22:14.503025Z node 1 :PERSQUEUE DEBUG: partition_init.cpp:75: [topic:0:Initializer] Start initializing step TInitInternalFieldsStep 2025-05-29T15:22:14.503109Z node 1 :PERSQUEUE INFO: partition_init.cpp:880: [PQ: 72057594037927937, Partition: 0, State: StateInit] bootstrapping 0 [1:187:2199] 2025-05-29T15:22:14.503430Z node 1 :PERSQUEUE DEBUG: partition_init.cpp:55: [topic:0:Initializer] Initializing completed. 2025-05-29T15:22:14.503439Z node 1 :PERSQUEUE INFO: partition.cpp:560: [PQ: 72057594037927937, Partition: 0, State: StateInit] init complete for topic 'topic' partition 0 generation 2 [1:187:2199] 2025-05-29T15:22:14.503449Z node 1 :PERSQUEUE DEBUG: partition.cpp:574: [PQ: 72057594037927937, Partition: 0, State: StateInit] SYNC INIT topic topic partitition 0 so 0 endOffset 0 Head Offset 0 PartNo 0 PackedSize 0 count 0 nextOffset 0 batches 0 2025-05-29T15:22:14.503583Z node 1 :PERSQUEUE DEBUG: partition.cpp:3850: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Process pending events. Count 0 2025-05-29T15:22:14.503601Z node 1 :PERSQUEUE DEBUG: partition.cpp:3155: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'topic' partition 0 user user reinit request with generation 1 2025-05-29T15:22:14.503607Z node 1 :PERSQUEUE DEBUG: partition.cpp:3224: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'topic' partition 0 user user reinit with generation 1 done 2025-05-29T15:22:14.503629Z node 1 :PERSQUEUE DEBUG: partition.cpp:2185: [PQ: 72057594037927937, Partition: 0, State: StateIdle] === DumpKeyValueRequest === 2025-05-29T15:22:14.503634Z node 1 :PERSQUEUE DEBUG: partition.cpp:2186: [PQ: 72057594037927937, Partition: 0, State: StateIdle] --- delete ---------------- 2025-05-29T15:22:14.503638Z node 1 :PERSQUEUE DEBUG: partition.cpp:2194: [PQ: 72057594037927937, Partition: 0, State: StateIdle] --- write ----------------- 2025-05-29T15:22:14.503643Z node 1 :PERSQUEUE DEBUG: partition.cpp:2197: [PQ: 72057594037927937, Partition: 0, State: StateIdle] i0000000000 2025-05-29T15:22:14.503647Z node 1 :PERSQUEUE DEBUG: partition.cpp:2197: [PQ: 72057594037927937, Partition: 0, State: StateIdle] m0000000000cuser 2025-05-29T15:22:14.503651Z node 1 :PERSQUEUE DEBUG: partition.cpp:2197: [PQ: 72057594037927937, Partition: 0, State: StateIdle] m0000000000uuser 2025-05-29T15:22:14.503655Z node 1 :PERSQUEUE DEBUG: partition.cpp:2199: [PQ: 72057594037927937, Partition: 0, State: StateIdle] --- rename ---------------- 2025-05-29T15:22:14.503659Z node 1 :PERSQUEUE DEBUG: partition.cpp:2204: [PQ: 72057594037927937, Partition: 0, State: StateIdle] =========================== 2025-05-29T15:22:14.503678Z node 1 :PERSQUEUE DEBUG: partition_read.cpp:779: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'topic' partition 0 user user readTimeStamp for offset 0 initiated queuesize 0 startOffset 0 ReadingTimestamp 0 rrg 0 2025-05-29T15:22:14.503707Z node 1 :PERSQUEUE DEBUG: read.h:262: CacheProxy. Passthrough write request to KV 2025-05-29T15:22:14.504392Z node 1 :PERSQUEUE DEBUG: partition_write.cpp:524: [PQ: 72057594037927937, Partition: 0, State: StateIdle] TPartition::HandleWriteResponse writeNewSize# 0 WriteNewSizeFromSupportivePartitions# 0 2025-05-29T15:22:14.504478Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:2882: [PQ: 72057594037927937] server connected, pipe [1:194:2204], now have 1 active actors on pipe 2025-05-29T15:22:14.504547Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:2882: [PQ: 72057594037927937] server connected, pipe [1:197:2206], now have 1 active actors on pipe 2025-05-29T15:22:14.504553Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:1711: [PQ: 72057594037927937] Handle TEvPersQueue::TEvDropTablet 2025-05-29T15:22:14.725291Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:3098: [PQ: 72057594037927937] Handle TEvInterconnect::TEvNodeInfo 2025-05-29T15:22:14.726442Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:3130: [PQ: 72057594037927937] Transactions request. From tx_00000000000000000000, To tx_18446744073709551615 2025-05-29T15:22:14.726513Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:751: [PQ: 72057594037927937] doesn't have tx info 2025-05-29T15:22:14.726520Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:763: [PQ: 72057594037927937] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2025-05-29T15:22:14.726525Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:975: [PQ: 72057594037927937] no config, start with empty partitions and default config 2025-05-29T15:22:14.726530Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:4889: [PQ: 72057594037927937] Txs.size=0, PlannedTxs.size=0 2025-05-29T15:22:14.726539Z node 2 :PERSQUEUE NOTICE: pq_impl.cpp:1099: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-05-29T15:22:14.726549Z node 2 :PERSQUEUE INFO: pq_impl.cpp:800: [PQ: 72057594037927937] doesn't have tx writes info 2025-05-29T15:22:14.730298Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:2882: [PQ: 72057594037927937] server connected, pipe [2:179:2193], now have 1 active actors on pipe 2025-05-29T15:22:14.730328Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:1460: [PQ: 72057594037927937] Handle TEvPersQueue::TEvUpdateConfig 2025-05-29T15:22:14.730393Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:1646: [PQ: 72057594037927937] Config update version 2(current 0) received from actor [2:178:2192] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 PartitionIds: 1 TopicName: "topic" Version: 2 LocalDC: true Topic: "topic" TopicPath: "/topic" YcCloudId: "somecloud" YcFolderId: "somefolder" YdbDatabaseId: "PQ" YdbDatabasePath: "/Root/PQ" Partitions { PartitionId: 0 } Partitions { PartitionId: 1 } ReadRuleGenerations: 2 FederationAccount: "federationAccount" MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } AllPartitions { PartitionId: 1 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 2 Important: false } 2025-05-29T15:22:14.730954Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:590: [PQ: 72057594037927937] Apply new config CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 PartitionIds: 1 TopicName: "topic" Version: 2 LocalDC: true Topic: "topic" TopicPath: "/topic" YcCloudId: "somecloud" YcFolderId: "somefolder" YdbDatabaseId: "PQ" YdbDatabasePath: "/Root/PQ" Partitions { PartitionId: 0 } Partitions { PartitionId: 1 } ReadRuleGenerations: 2 FederationAccount: "federationAccount" MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } AllPartitions { PartitionId: 1 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 2 Important: false } 2025-05-29T15:22:14.730975Z node 2 :PERSQUEUE NOTICE: pq_impl.cpp:1099: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-05-29T15:22:14.731162Z node 2 :PERSQUEUE INFO: pq_impl.cpp:1487: [PQ: 72057594037927937] Config applied version 2 actor [2:178:2192] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 PartitionIds: 1 TopicName: "topic" Version: 2 LocalDC: true Topic: "topic" TopicPath: "/topic" YcCloudId: "somecloud" YcFolderId: "somefolder" YdbDatabaseId: "PQ" YdbDatabasePath: "/Root/PQ" Partitions { PartitionId: 0 } Partitions { PartitionId: 1 } ReadRuleGenerations: 2 FederationAccount: "federationAccount" MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { ... 2025-05-29T15:22:15.720553Z node 6 :PERSQUEUE DEBUG: pq_impl.cpp:3635: [PQ: 72057594037927937] Send TEvKeyValue::TEvRequest (WRITE_TX_COOKIE) 2025-05-29T15:22:15.722332Z node 6 :PERSQUEUE DEBUG: pq_impl.cpp:1231: [PQ: 72057594037927937] Handle TEvKeyValue::TEvResponse (WRITE_TX_COOKIE) 2025-05-29T15:22:15.722348Z node 6 :PERSQUEUE DEBUG: pq_impl.cpp:4293: [PQ: 72057594037927937] Try execute txs with state CALCULATED 2025-05-29T15:22:15.722352Z node 6 :PERSQUEUE DEBUG: pq_impl.cpp:4338: [PQ: 72057594037927937] TxId 67891, State CALCULATED 2025-05-29T15:22:15.722358Z node 6 :PERSQUEUE DEBUG: pq_impl.cpp:4285: [PQ: 72057594037927937] TxId 67891 State CALCULATED FrontTxId 67891 2025-05-29T15:22:15.722364Z node 6 :PERSQUEUE DEBUG: pq_impl.cpp:4228: [PQ: 72057594037927937] TxId 67891, NewState WAIT_RS 2025-05-29T15:22:15.722369Z node 6 :PERSQUEUE DEBUG: pq_impl.cpp:4263: [PQ: 72057594037927937] TxId 67891 moved from CALCULATED to WAIT_RS 2025-05-29T15:22:15.722376Z node 6 :PERSQUEUE DEBUG: pq_impl.cpp:3970: [PQ: 72057594037927937] Send TEvTxProcessing::TEvReadSet to 1 receivers. Wait TEvTxProcessing::TEvReadSet from 1 senders. 2025-05-29T15:22:15.722381Z node 6 :PERSQUEUE DEBUG: pq_impl.cpp:3980: [PQ: 72057594037927937] Send TEvReadSet to tablet 22222 2025-05-29T15:22:15.722391Z node 6 :PERSQUEUE DEBUG: pq_impl.cpp:4461: [PQ: 72057594037927937] HaveParticipantsDecision 0 2025-05-29T15:22:15.723569Z node 6 :PERSQUEUE DEBUG: pqtablet_mock.cpp:87: Client pipe to tablet 72057594037927937 from 22222 is reset 2025-05-29T15:22:15.728615Z node 6 :PERSQUEUE DEBUG: pq_impl.cpp:3098: [PQ: 72057594037927937] Handle TEvInterconnect::TEvNodeInfo 2025-05-29T15:22:15.729232Z node 6 :PERSQUEUE DEBUG: pq_impl.cpp:3130: [PQ: 72057594037927937] Transactions request. From tx_00000000000000000000, To tx_18446744073709551615 2025-05-29T15:22:15.729480Z node 6 :PERSQUEUE DEBUG: pq_impl.cpp:741: [PQ: 72057594037927937] has a tx info 2025-05-29T15:22:15.729494Z node 6 :PERSQUEUE DEBUG: pq_impl.cpp:763: [PQ: 72057594037927937] PlanStep 110, PlanTxId 67891, ExecStep 110, ExecTxId 67891 2025-05-29T15:22:15.729522Z node 6 :PERSQUEUE DEBUG: pq_impl.cpp:997: [PQ: 72057594037927937] ReadRange pair. Key tx_00000000000000067890, Status 0 2025-05-29T15:22:15.729545Z node 6 :PERSQUEUE DEBUG: pq_impl.cpp:1006: [PQ: 72057594037927937] Restore Tx. TxId: 67890, Step: 100, State: EXECUTED, WriteId: 2025-05-29T15:22:15.729574Z node 6 :PERSQUEUE DEBUG: pq_impl.cpp:997: [PQ: 72057594037927937] ReadRange pair. Key tx_00000000000000067891, Status 0 2025-05-29T15:22:15.729581Z node 6 :PERSQUEUE DEBUG: pq_impl.cpp:1006: [PQ: 72057594037927937] Restore Tx. TxId: 67891, Step: 110, State: CALCULATED, WriteId: 2025-05-29T15:22:15.729585Z node 6 :PERSQUEUE DEBUG: pq_impl.cpp:1009: [PQ: 72057594037927937] Fix tx state 2025-05-29T15:22:15.729592Z node 6 :PERSQUEUE DEBUG: pq_impl.cpp:4889: [PQ: 72057594037927937] Txs.size=2, PlannedTxs.size=2 2025-05-29T15:22:15.729598Z node 6 :PERSQUEUE DEBUG: pq_impl.cpp:4897: [PQ: 72057594037927937] top tx queue (100, 67890) 2025-05-29T15:22:15.729605Z node 6 :PERSQUEUE DEBUG: pq_impl.cpp:4917: [PQ: 72057594037927937] TxsOrder: 67890 EXECUTED 0 2025-05-29T15:22:15.729610Z node 6 :PERSQUEUE DEBUG: pq_impl.cpp:4917: [PQ: 72057594037927937] TxsOrder: 67891 PLANNED 0 2025-05-29T15:22:15.729697Z node 6 :PERSQUEUE NOTICE: pq_impl.cpp:1099: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-05-29T15:22:15.729704Z node 6 :PERSQUEUE INFO: pq_impl.cpp:787: [PQ: 72057594037927937] has a tx writes info 2025-05-29T15:22:15.729726Z node 6 :PERSQUEUE DEBUG: partition_init.cpp:75: [topic:0:Initializer] Start initializing step TInitConfigStep 2025-05-29T15:22:15.729797Z node 6 :PERSQUEUE DEBUG: partition_init.cpp:75: [topic:0:Initializer] Start initializing step TInitInternalFieldsStep 2025-05-29T15:22:15.729843Z node 6 :PERSQUEUE INFO: partition_init.cpp:880: [PQ: 72057594037927937, Partition: 0, State: StateInit] bootstrapping 0 [6:361:2338] 2025-05-29T15:22:15.730009Z node 6 :PERSQUEUE DEBUG: partition_init.cpp:75: [topic:0:Initializer] Start initializing step TInitDiskStatusStep 2025-05-29T15:22:15.730233Z node 6 :PERSQUEUE DEBUG: partition_init.cpp:75: [topic:0:Initializer] Start initializing step TInitMetaStep 2025-05-29T15:22:15.730290Z node 6 :PERSQUEUE DEBUG: partition_init.cpp:75: [topic:0:Initializer] Start initializing step TInitInfoRangeStep 2025-05-29T15:22:15.730395Z node 6 :PERSQUEUE DEBUG: partition_init.cpp:75: [topic:0:Initializer] Start initializing step TInitDataRangeStep 2025-05-29T15:22:15.730438Z node 6 :PERSQUEUE DEBUG: partition_init.cpp:75: [topic:0:Initializer] Start initializing step TInitDataStep 2025-05-29T15:22:15.730445Z node 6 :PERSQUEUE DEBUG: partition_init.cpp:75: [topic:0:Initializer] Start initializing step TInitEndWriteTimestampStep 2025-05-29T15:22:15.730450Z node 6 :PERSQUEUE INFO: partition_init.cpp:774: [topic:0:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp skipped because already initialized. 2025-05-29T15:22:15.730455Z node 6 :PERSQUEUE DEBUG: partition_init.cpp:55: [topic:0:Initializer] Initializing completed. 2025-05-29T15:22:15.730462Z node 6 :PERSQUEUE INFO: partition.cpp:560: [PQ: 72057594037927937, Partition: 0, State: StateInit] init complete for topic 'topic' partition 0 generation 3 [6:361:2338] 2025-05-29T15:22:15.730475Z node 6 :PERSQUEUE DEBUG: partition.cpp:574: [PQ: 72057594037927937, Partition: 0, State: StateInit] SYNC INIT topic topic partitition 0 so 0 endOffset 0 Head Offset 0 PartNo 0 PackedSize 0 count 0 nextOffset 0 batches 0 2025-05-29T15:22:15.730484Z node 6 :PERSQUEUE DEBUG: partition.cpp:3850: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Process pending events. Count 0 2025-05-29T15:22:15.730499Z node 6 :PERSQUEUE DEBUG: partition_read.cpp:779: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'topic' partition 0 user user readTimeStamp for offset 0 initiated queuesize 0 startOffset 0 ReadingTimestamp 0 rrg 6 2025-05-29T15:22:15.730528Z node 6 :PERSQUEUE DEBUG: pq_impl.cpp:3970: [PQ: 72057594037927937] Send TEvTxProcessing::TEvReadSet to 1 receivers. Wait TEvTxProcessing::TEvReadSet from 1 senders. 2025-05-29T15:22:15.730533Z node 6 :PERSQUEUE DEBUG: pq_impl.cpp:3980: [PQ: 72057594037927937] Send TEvReadSet to tablet 22222 2025-05-29T15:22:15.730549Z node 6 :PERSQUEUE DEBUG: pq_impl.cpp:4293: [PQ: 72057594037927937] Try execute txs with state EXECUTED 2025-05-29T15:22:15.730554Z node 6 :PERSQUEUE DEBUG: pq_impl.cpp:4338: [PQ: 72057594037927937] TxId 67890, State EXECUTED 2025-05-29T15:22:15.730559Z node 6 :PERSQUEUE DEBUG: pq_impl.cpp:4285: [PQ: 72057594037927937] TxId 67890 State EXECUTED FrontTxId 67890 2025-05-29T15:22:15.730564Z node 6 :PERSQUEUE DEBUG: pq_impl.cpp:3989: [PQ: 72057594037927937] TPersQueue::SendEvReadSetAckToSenders 2025-05-29T15:22:15.730569Z node 6 :PERSQUEUE DEBUG: pq_impl.cpp:4228: [PQ: 72057594037927937] TxId 67890, NewState WAIT_RS_ACKS 2025-05-29T15:22:15.730574Z node 6 :PERSQUEUE DEBUG: pq_impl.cpp:4263: [PQ: 72057594037927937] TxId 67890 moved from EXECUTED to WAIT_RS_ACKS 2025-05-29T15:22:15.730579Z node 6 :PERSQUEUE DEBUG: transaction.cpp:366: [TxId: 67890] PredicateAcks: 0/1 2025-05-29T15:22:15.730583Z node 6 :PERSQUEUE DEBUG: pq_impl.cpp:4539: [PQ: 72057594037927937] HaveAllRecipientsReceive 0, AllSupportivePartitionsHaveBeenDeleted 1 2025-05-29T15:22:15.730587Z node 6 :PERSQUEUE DEBUG: transaction.cpp:366: [TxId: 67890] PredicateAcks: 0/1 2025-05-29T15:22:15.730592Z node 6 :PERSQUEUE DEBUG: pq_impl.cpp:4293: [PQ: 72057594037927937] Try execute txs with state PLANNED 2025-05-29T15:22:15.730597Z node 6 :PERSQUEUE DEBUG: pq_impl.cpp:4338: [PQ: 72057594037927937] TxId 67891, State PLANNED 2025-05-29T15:22:15.730601Z node 6 :PERSQUEUE DEBUG: pq_impl.cpp:4285: [PQ: 72057594037927937] TxId 67891 State PLANNED FrontTxId 67891 2025-05-29T15:22:15.730606Z node 6 :PERSQUEUE DEBUG: pq_impl.cpp:4406: [PQ: 72057594037927937] TxQueue.size 1 2025-05-29T15:22:15.730610Z node 6 :PERSQUEUE DEBUG: pq_impl.cpp:836: [PQ: 72057594037927937] New ExecStep 110, ExecTxId 67891 2025-05-29T15:22:15.730619Z node 6 :PERSQUEUE DEBUG: pq_impl.cpp:4228: [PQ: 72057594037927937] TxId 67891, NewState CALCULATING 2025-05-29T15:22:15.730624Z node 6 :PERSQUEUE DEBUG: pq_impl.cpp:4263: [PQ: 72057594037927937] TxId 67891 moved from PLANNED to CALCULATING 2025-05-29T15:22:15.730680Z node 6 :PERSQUEUE DEBUG: partition.cpp:1127: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Handle TEvPQ::TEvTxCalcPredicate Step 110, TxId 67891 2025-05-29T15:22:15.730791Z node 6 :PERSQUEUE DEBUG: pq_impl.cpp:3489: [PQ: 72057594037927937] Handle TEvPQ::TEvTxCalcPredicateResult Step 110, TxId 67891, Partition 0, Predicate 1 2025-05-29T15:22:15.730801Z node 6 :PERSQUEUE DEBUG: transaction.cpp:218: [TxId: 67891] Handle TEvTxCalcPredicateResult 2025-05-29T15:22:15.730806Z node 6 :PERSQUEUE DEBUG: transaction.cpp:267: [TxId: 67891] Partition responses 1/1 2025-05-29T15:22:15.730810Z node 6 :PERSQUEUE DEBUG: pq_impl.cpp:4293: [PQ: 72057594037927937] Try execute txs with state CALCULATING 2025-05-29T15:22:15.730815Z node 6 :PERSQUEUE DEBUG: pq_impl.cpp:4338: [PQ: 72057594037927937] TxId 67891, State CALCULATING 2025-05-29T15:22:15.730821Z node 6 :PERSQUEUE DEBUG: pq_impl.cpp:4285: [PQ: 72057594037927937] TxId 67891 State CALCULATING FrontTxId 67891 2025-05-29T15:22:15.730826Z node 6 :PERSQUEUE DEBUG: pq_impl.cpp:4422: [PQ: 72057594037927937] Received 1, Expected 1 2025-05-29T15:22:15.730831Z node 6 :PERSQUEUE DEBUG: pq_impl.cpp:4228: [PQ: 72057594037927937] TxId 67891, NewState CALCULATED 2025-05-29T15:22:15.730836Z node 6 :PERSQUEUE DEBUG: pq_impl.cpp:4263: [PQ: 72057594037927937] TxId 67891 moved from CALCULATING to CALCULATED 2025-05-29T15:22:15.730842Z node 6 :PERSQUEUE DEBUG: pq_impl.cpp:3818: [PQ: 72057594037927937] write key for TxId 67891 2025-05-29T15:22:15.730886Z node 6 :PERSQUEUE DEBUG: transaction.cpp:374: [TxId: 67891] save tx TxId: 67891 State: CALCULATED MinStep: 152 MaxStep: 30152 PredicatesReceived { TabletId: 22222 } PredicateRecipients: 22222 Operations { PartitionId: 0 CommitOffsetsBegin: 0 CommitOffsetsEnd: 0 Consumer: "user" Path: "/topic" } Step: 110 Predicate: true Kind: KIND_DATA SourceActor { RawX1: 176 RawX2: 25769805966 } Partitions { } 2025-05-29T15:22:15.730901Z node 6 :PERSQUEUE DEBUG: pq_impl.cpp:3635: [PQ: 72057594037927937] Send TEvKeyValue::TEvRequest (WRITE_TX_COOKIE) 2025-05-29T15:22:15.730912Z node 6 :PERSQUEUE DEBUG: pq_impl.cpp:2915: [PQ: 72057594037927937] Handle TEvTabletPipe::TEvClientConnected 2025-05-29T15:22:15.730917Z node 6 :PERSQUEUE DEBUG: pq_impl.cpp:2920: [PQ: 72057594037927937] Connected to tablet 22222 2025-05-29T15:22:15.731717Z node 6 :PERSQUEUE DEBUG: pq_impl.cpp:1231: [PQ: 72057594037927937] Handle TEvKeyValue::TEvResponse (WRITE_TX_COOKIE) 2025-05-29T15:22:15.731731Z node 6 :PERSQUEUE DEBUG: pq_impl.cpp:4293: [PQ: 72057594037927937] Try execute txs with state CALCULATED 2025-05-29T15:22:15.731736Z node 6 :PERSQUEUE DEBUG: pq_impl.cpp:4338: [PQ: 72057594037927937] TxId 67891, State CALCULATED 2025-05-29T15:22:15.731740Z node 6 :PERSQUEUE DEBUG: pq_impl.cpp:4285: [PQ: 72057594037927937] TxId 67891 State CALCULATED FrontTxId 67891 2025-05-29T15:22:15.731745Z node 6 :PERSQUEUE DEBUG: pq_impl.cpp:4228: [PQ: 72057594037927937] TxId 67891, NewState WAIT_RS 2025-05-29T15:22:15.731750Z node 6 :PERSQUEUE DEBUG: pq_impl.cpp:4263: [PQ: 72057594037927937] TxId 67891 moved from CALCULATED to WAIT_RS 2025-05-29T15:22:15.731757Z node 6 :PERSQUEUE DEBUG: pq_impl.cpp:3970: [PQ: 72057594037927937] Send TEvTxProcessing::TEvReadSet to 1 receivers. Wait TEvTxProcessing::TEvReadSet from 1 senders. 2025-05-29T15:22:15.731762Z node 6 :PERSQUEUE DEBUG: pq_impl.cpp:3980: [PQ: 72057594037927937] Send TEvReadSet to tablet 22222 2025-05-29T15:22:15.731770Z node 6 :PERSQUEUE DEBUG: pq_impl.cpp:4461: [PQ: 72057594037927937] HaveParticipantsDecision 0 >> TPartitionChooserSuite::TPartitionChooserActor_SplitMergeDisabled_BadSourceId_Test [FAIL] >> TPQTabletTests::Parallel_Transactions_2 [GOOD] >> PQCountersLabeled::Partition [GOOD] >> PQCountersLabeled::PartitionFirstClass >> TPQTabletTests::PQTablet_Send_RS_With_Abort >> TPartitionTests::TestNonConflictingActsBatchOk [GOOD] >> TPartitionTests::TestBatchingWithChangeConfig >> TDynamicNameserverTest::CacheMissDifferentDeadline-EnableNodeBrokerDeltaProtocol-true >> TPQTabletTests::PQTablet_Send_RS_With_Abort [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/perf/unittest >> KqpQueryPerf::IndexReplace-QueryService+UseSink Test command err: Trying to start YDB, gRPC: 6008, MsgBus: 11494 2025-05-29T15:22:08.481543Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509888308905696682:2198];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:22:08.481689Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/001026/r3tmp/tmpb6jyCG/pdisk_1.dat 2025-05-29T15:22:08.544386Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:22:08.545022Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [1:7509888308905696523:2079] 1748532128479646 != 1748532128479649 TServer::EnableGrpc on GrpcPort 6008, node 1 2025-05-29T15:22:08.570200Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:22:08.570216Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-05-29T15:22:08.570218Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-05-29T15:22:08.570257Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:11494 TClient is connected to server localhost:11494 WaitRootIsUp 'Root'... TClient::Ls request: Root 2025-05-29T15:22:08.620652Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:22:08.620687Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting TClient::Ls response: 2025-05-29T15:22:08.621944Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-29T15:22:08.634759Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:22:08.639773Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-05-29T15:22:08.649252Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:22:08.715997Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:22:08.737960Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:22:08.751025Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:22:08.902017Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509888308905698156:2403], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:22:08.902040Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:22:08.939162Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-05-29T15:22:08.950149Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-05-29T15:22:08.960726Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-05-29T15:22:08.975452Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-05-29T15:22:08.988391Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-05-29T15:22:09.005699Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-05-29T15:22:09.017035Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-05-29T15:22:09.075831Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509888313200666112:2466], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:22:09.075862Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:22:09.075971Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509888313200666117:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:22:09.076997Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715669:3, at schemeshard: 72057594046644480 2025-05-29T15:22:09.086128Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7509888313200666119:2470], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715669 completed, doublechecking } 2025-05-29T15:22:09.151873Z node 1 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [1:7509888313200666170:3398] txid# 281474976715670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-29T15:22:09.279045Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [1:7509888313200666183:2474], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:22:09.279165Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=1&id=YWFhOGZjOGUtN2JkYjI1ZjctNWYyMzljNmYtOTA0NDk0Zg==, ActorId: [1:7509888308905698153:2401], ActorState: ExecuteState, TraceId: 01jwea5v9kbnamtme5zryndqjv, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: VERIFY failed (2025-05-29T15:22:09.280078Z): assertion failed in non-unittest thread with message: assertion failed at ydb/core/kqp/ut/common/kqp_ut_common.h:375, void NKikimr::NKqp::AssertSuccessResult(const NYdb::TStatus &): (result.IsSuccess())
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 library/cpp/testing/unittest/registar.cpp:36 RaiseError(): requirement UnittestThread failed 0. /-S/util/system/yassert.cpp:83: InternalPanicImpl @ 0x13AC65E5 1. /-S/util/system/yassert.cpp:55: Panic @ 0x13ABD5E6 2. /tmp//-S/library/cpp/testing/unittest/registar.cpp:36: RaiseError @ 0x13C5F376 3. /-S/ydb/core/kqp/ut/common/kqp_ut_common.h:375: AssertSuccessResult @ 0x261025A2 4. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:365: CreateSampleTables @ 0x26101EA2 5. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:581: operator() @ 0x261237FC 6. /-S/library/cpp/threading/future/core/future-inl.h:493: SetValue @ 0x261237FC 7. /-S/library/cpp/threading/future/async.h:24: operator() @ 0x261237FC 8. /-S/util/thread/pool.h:71: Process @ 0x261237FC 9. /-S/util/thread/pool.cpp:418: DoExecute @ 0x13ACDF69 10. /-S/util/thread/factory.h:15: Execute @ 0x13ACC959 11. /-S/util/thread/factory.cpp:36: ThreadProc @ 0x13ACC959 12. /-S/util/system/thread.cpp:244: ThreadProxy @ 0x13AC7DCC 13. ??:0: ?? @ 0x7F5AF92F6AC2 14. ??:0: ?? @ 0x7F5AF938884F Trying to start YDB, gRPC: 22992, MsgBus: 26923 2025-05-29T15:22:12.667398Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509888325876221798:2064];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:22:12.667447Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/001026/r3tmp/tmpJD1OF8/pdisk_1.dat 2025-05-29T15:22:12.736945Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [1:7509888325876221775:2079] 1748532132667200 != 1748532132667203 2025-05-29T15:22:12.737286Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 22992, node 1 2025-05-29T15:22:12.749386Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:22:12.749395Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-05-29T15:22:12.749397Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-05-29T15:22:12.749436Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:26923 2025-05-29T15:22:12.770376Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:22:12.770406Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:22:12.771540Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:26923 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-29T15:22:12.812060Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:22:12.818103Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:22:12.840375Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:22:12.862466Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:22:12.874908Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:22:13.024571Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509888330171190704:2403], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:22:13.024594Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:22:13.076048Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-05-29T15:22:13.084891Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-05-29T15:22:13.098115Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-05-29T15:22:13.153159Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-05-29T15:22:13.167294Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-05-29T15:22:13.181897Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-05-29T15:22:13.195527Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-05-29T15:22:13.212491Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509888330171191359:2466], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:22:13.212507Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509888330171191364:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:22:13.212515Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:22:13.213214Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715669:3, at schemeshard: 72057594046644480 2025-05-29T15:22:13.222810Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7509888330171191366:2470], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715669 completed, doublechecking } 2025-05-29T15:22:13.277913Z node 1 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [1:7509888330171191417:3396] txid# 281474976715670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-29T15:22:13.355377Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [1:7509888330171191433:2474], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:22:13.355511Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=1&id=ZjI2MTZmZjEtOWJmODFlODUtYWJlMzc2NzQtNjg4YjgzMzI=, ActorId: [1:7509888330171190701:2401], ActorState: ExecuteState, TraceId: 01jwea5zawbkyrmzwp11xf657r, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: VERIFY failed (2025-05-29T15:22:13.356440Z): assertion failed in non-unittest thread with message: assertion failed at ydb/core/kqp/ut/common/kqp_ut_common.h:375, void NKikimr::NKqp::AssertSuccessResult(const NYdb::TStatus &): (result.IsSuccess())
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 library/cpp/testing/unittest/registar.cpp:36 RaiseError(): requirement UnittestThread failed 0. /-S/util/system/yassert.cpp:83: InternalPanicImpl @ 0x13AC65E5 1. /-S/util/system/yassert.cpp:55: Panic @ 0x13ABD5E6 2. /tmp//-S/library/cpp/testing/unittest/registar.cpp:36: RaiseError @ 0x13C5F376 3. /-S/ydb/core/kqp/ut/common/kqp_ut_common.h:375: AssertSuccessResult @ 0x261025A2 4. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:365: CreateSampleTables @ 0x26101EA2 5. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:581: operator() @ 0x261237FC 6. /-S/library/cpp/threading/future/core/future-inl.h:493: SetValue @ 0x261237FC 7. /-S/library/cpp/threading/future/async.h:24: operator() @ 0x261237FC 8. /-S/util/thread/pool.h:71: Process @ 0x261237FC 9. /-S/util/thread/pool.cpp:418: DoExecute @ 0x13ACDF69 10. /-S/util/thread/factory.h:15: Execute @ 0x13ACC959 11. /-S/util/thread/factory.cpp:36: ThreadProc @ 0x13ACC959 12. /-S/util/system/thread.cpp:244: ThreadProxy @ 0x13AC7DCC 13. ??:0: ?? @ 0x7F223DAD9AC2 14. ??:0: ?? @ 0x7F223DB6B84F >> TPartitionChooserSuite::TPartitionChooserActor_SplitMergeEnabled_PreferedPartition_InactiveConfig_Test [FAIL] >> TPartitionChooserSuite::TPartitionChooserActor_SplitMergeEnabled_PreferedPartition_InactiveActor_Test >> TPQTabletTests::One_Tablet_For_All_Partitions >> TNodeBrokerTest::NodesMigrationExpireActive >> TPQTest::TestMessageNo [GOOD] >> TPQTest::TestLowWatermark >> TPQTabletTests::One_Tablet_For_All_Partitions [GOOD] >> TDynamicNameserverTest::ListNodesCacheWhenNoChanges-EnableNodeBrokerDeltaProtocol-false ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/perf/unittest >> KqpQueryPerf::IndexUpdateOn+QueryService+UseSink Test command err: Trying to start YDB, gRPC: 10030, MsgBus: 10002 2025-05-29T15:22:08.020692Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509888306914013538:2203];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:22:08.020783Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/00102d/r3tmp/tmpBRtjSV/pdisk_1.dat 2025-05-29T15:22:08.083860Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:22:08.083923Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [1:7509888306914013373:2079] 1748532128018764 != 1748532128018767 TServer::EnableGrpc on GrpcPort 10030, node 1 2025-05-29T15:22:08.109283Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:22:08.109293Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-05-29T15:22:08.109295Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-05-29T15:22:08.109330Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-05-29T15:22:08.122297Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:22:08.122338Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:22:08.123611Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:10002 TClient is connected to server localhost:10002 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-29T15:22:08.158813Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:22:08.167809Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:22:08.229498Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:22:08.246458Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:22:08.256751Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:22:08.487348Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509888306914015005:2403], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:22:08.487392Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:22:08.536529Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-05-29T15:22:08.543234Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-05-29T15:22:08.556063Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-05-29T15:22:08.612205Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-05-29T15:22:08.624781Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-05-29T15:22:08.641253Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-05-29T15:22:08.653449Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-05-29T15:22:08.672104Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509888306914015661:2466], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:22:08.672140Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:22:08.672217Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509888306914015666:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:22:08.673245Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715669:3, at schemeshard: 72057594046644480 2025-05-29T15:22:08.679825Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7509888306914015668:2470], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715669 completed, doublechecking } 2025-05-29T15:22:08.737010Z node 1 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [1:7509888306914015719:3396] txid# 281474976715670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-29T15:22:08.867933Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [1:7509888306914015735:2474], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:22:08.868069Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=1&id=OTAwMjc0YTctNzFmODJiYzItZGQ2MGEwOTItNTE3NzZjNTI=, ActorId: [1:7509888306914015002:2401], ActorState: ExecuteState, TraceId: 01jwea5twz7xxk1zraqmd5qey6, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: VERIFY failed (2025-05-29T15:22:08.868676Z): assertion failed in non-unittest thread with message: assertion failed at ydb/core/kqp/ut/common/kqp_ut_common.h:375, void NKikimr::NKqp::AssertSuccessResult(const NYdb::TStatus &): (result.IsSuccess())
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 library/cpp/testing/unittest/registar.cpp:36 RaiseError(): requirement UnittestThread failed 0. /-S/util/system/yassert.cpp:83: InternalPanicImpl @ 0x13AC65E5 1. /-S/util/system/yassert.cpp:55: Panic @ 0x13ABD5E6 2. /tmp//-S/library/cpp/testing/unittest/registar.cpp:36: RaiseError @ 0x13C5F376 3. /-S/ydb/core/kqp/ut/common/kqp_ut_common.h:375: AssertSuccessResult @ 0x261025A2 4. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:365: CreateSampleTables @ 0x26101EA2 5. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:581: operator() @ 0x261237FC 6. /-S/library/cpp/threading/future/core/future-inl.h:493: SetValue @ 0x261237FC 7. /-S/library/cpp/threading/future/async.h:24: operator() @ 0x261237FC 8. /-S/util/thread/pool.h:71: Process @ 0x261237FC 9. /-S/util/thread/pool.cpp:418: DoExecute @ 0x13ACDF69 10. /-S/util/thread/factory.h:15: Execute @ 0x13ACC959 11. /-S/util/thread/factory.cpp:36: ThreadProc @ 0x13ACC959 12. /-S/util/system/thread.cpp:244: ThreadProxy @ 0x13AC7DCC 13. ??:0: ?? @ 0x7FAB80AF4AC2 14. ??:0: ?? @ 0x7FAB80B8684F Trying to start YDB, gRPC: 28542, MsgBus: 16947 2025-05-29T15:22:12.696136Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509888323771421462:2064];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:22:12.696166Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/00102d/r3tmp/tmpIeXmse/pdisk_1.dat 2025-05-29T15:22:12.745214Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:22:12.745261Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [1:7509888323771421439:2079] 1748532132695972 != 1748532132695975 TServer::EnableGrpc on GrpcPort 28542, node 1 2025-05-29T15:22:12.762775Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:22:12.762787Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-05-29T15:22:12.762790Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-05-29T15:22:12.762845Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:16947 TClient is connected to server localhost:16947 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: 2025-05-29T15:22:12.821914Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:22:12.821942Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:22:12.823062Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-29T15:22:12.827153Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:22:12.834514Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:22:12.897785Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:22:12.916726Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:22:12.930582Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:22:13.088031Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509888328066390366:2403], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:22:13.088062Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:22:13.126591Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-05-29T15:22:13.134566Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-05-29T15:22:13.189603Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-05-29T15:22:13.202080Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-05-29T15:22:13.256929Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-05-29T15:22:13.264959Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-05-29T15:22:13.271957Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-05-29T15:22:13.289032Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509888328066391021:2466], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:22:13.289056Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509888328066391026:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:22:13.289060Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:22:13.289748Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710669:3, at schemeshard: 72057594046644480 2025-05-29T15:22:13.292210Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7509888328066391028:2470], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710669 completed, doublechecking } 2025-05-29T15:22:13.362525Z node 1 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [1:7509888328066391079:3395] txid# 281474976710670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-29T15:22:13.456244Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [1:7509888328066391095:2474], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:22:13.456364Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=1&id=NzU1ZDczNy01Mzc0ZWEzZi01YjBkZDA0OS1iNTJjM2UyOA==, ActorId: [1:7509888328066390347:2401], ActorState: ExecuteState, TraceId: 01jwea5zd86z9wvn5acz1pwqgz, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: VERIFY failed (2025-05-29T15:22:13.456976Z): assertion failed in non-unittest thread with message: assertion failed at ydb/core/kqp/ut/common/kqp_ut_common.h:375, void NKikimr::NKqp::AssertSuccessResult(const NYdb::TStatus &): (result.IsSuccess())
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 library/cpp/testing/unittest/registar.cpp:36 RaiseError(): requirement UnittestThread failed 0. /-S/util/system/yassert.cpp:83: InternalPanicImpl @ 0x13AC65E5 1. /-S/util/system/yassert.cpp:55: Panic @ 0x13ABD5E6 2. /tmp//-S/library/cpp/testing/unittest/registar.cpp:36: RaiseError @ 0x13C5F376 3. /-S/ydb/core/kqp/ut/common/kqp_ut_common.h:375: AssertSuccessResult @ 0x261025A2 4. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:365: CreateSampleTables @ 0x26101EA2 5. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:581: operator() @ 0x261237FC 6. /-S/library/cpp/threading/future/core/future-inl.h:493: SetValue @ 0x261237FC 7. /-S/library/cpp/threading/future/async.h:24: operator() @ 0x261237FC 8. /-S/util/thread/pool.h:71: Process @ 0x261237FC 9. /-S/util/thread/pool.cpp:418: DoExecute @ 0x13ACDF69 10. /-S/util/thread/factory.h:15: Execute @ 0x13ACC959 11. /-S/util/thread/factory.cpp:36: ThreadProc @ 0x13ACC959 12. /-S/util/system/thread.cpp:244: ThreadProxy @ 0x13AC7DCC 13. ??:0: ?? @ 0x7F44C3289AC2 14. ??:0: ?? @ 0x7F44C331B84F >> TNodeBrokerTest::MinDynamicNodeIdShifted >> TPartitionChooserSuite::TPartitionChooserActor_SplitMergeEnabled_NewSourceId_Test [FAIL] >> TPartitionChooserSuite::TPartitionChooserActor_SplitMergeEnabled_BadSourceId_Test >> TPartitionChooserSuite::TPartitionChooserActor_SplitMergeEnabled_SourceId_OldPartitionExists_Test [FAIL] >> TPartitionChooserSuite::TPartitionChooserActor_SplitMergeEnabled_SourceId_OldPartitionExists_NotWritten_Test ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/persqueue/ut/unittest >> TPartitionTests::AfterRestart_2 [GOOD] Test command err: 2025-05-29T15:22:10.367806Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509888317136897547:2077];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:22:10.368006Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-05-29T15:22:10.372211Z node 2 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7509888314819655379:2210];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:22:10.372253Z node 2 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/000b54/r3tmp/tmplWQHm2/pdisk_1.dat 2025-05-29T15:22:10.421912Z node 2 :PQ_READ_PROXY DEBUG: caching_service.cpp:44: Direct read cache: : Created 2025-05-29T15:22:10.427943Z node 1 :PQ_READ_PROXY DEBUG: caching_service.cpp:44: Direct read cache: : Created 2025-05-29T15:22:10.445305Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 8366, node 1 2025-05-29T15:22:10.462981Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/ciyv/000b54/r3tmp/yandexA0Qcx9.tmp 2025-05-29T15:22:10.462994Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: /home/runner/.ya/build/build_root/ciyv/000b54/r3tmp/yandexA0Qcx9.tmp 2025-05-29T15:22:10.463043Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:202: successfully initialized from file: /home/runner/.ya/build/build_root/ciyv/000b54/r3tmp/yandexA0Qcx9.tmp 2025-05-29T15:22:10.463086Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-05-29T15:22:10.464567Z INFO: TTestServer started on Port 1454 GrpcPort 8366 2025-05-29T15:22:10.467853Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:22:10.467895Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:22:10.469305Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:1454 PQClient connected to localhost:8366 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-29T15:22:10.517449Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:22:10.517479Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:22:10.519056Z node 1 :HIVE WARN: hive_impl.cpp:771: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-05-29T15:22:10.519379Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-29T15:22:10.520620Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976720657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-05-29T15:22:10.528893Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720659:0, at schemeshard: 72057594046644480 waiting... waiting... waiting... 2025-05-29T15:22:10.768433Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7509888314819655609:2313], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:22:10.768451Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7509888314819655598:2310], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:22:10.768470Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:22:10.769602Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710657:3, at schemeshard: 72057594046644480 2025-05-29T15:22:10.774048Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7509888314819655612:2314], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710657 completed, doublechecking } 2025-05-29T15:22:10.837687Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [1:7509888317136898609:2341], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-05-29T15:22:10.837687Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720662:0, at schemeshard: 72057594046644480 2025-05-29T15:22:10.837780Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=1&id=MTJkZGE1OTEtZWYxYjkzNGQtNzhlNDRkMTktOGEzOGYxNjc=, ActorId: [1:7509888317136898568:2334], ActorState: ExecuteState, TraceId: 01jwea5wz3722121yrpafnjek9, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-05-29T15:22:10.838240Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: cluster_tracker.cpp:167: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-05-29T15:22:10.871656Z node 2 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [2:7509888314819655639:2167] txid# 281474976710658, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 9], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-29T15:22:10.877002Z node 2 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [2:7509888314819655653:2318], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-05-29T15:22:10.877128Z node 2 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=2&id=ZjI4M2ZmZTctZTMwNzE2ZjYtYjRmZTY4NzAtODIxZTI1Y2I=, ActorId: [2:7509888314819655596:2309], ActorState: ExecuteState, TraceId: 01jwea5wyf3fyw5kj3v40ckqyx, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-05-29T15:22:10.877284Z node 2 :PERSQUEUE_CLUSTER_TRACKER ERROR: cluster_tracker.cpp:167: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-05-29T15:22:10.898064Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720663:0, at schemeshard: 72057594046644480 2025-05-29T15:22:10.926488Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720664:0, at schemeshard: 72057594046644480 === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost", true, true, 1000), ("dc2", "dc2.logbroker.yandex.net", false, true, 1000); 2025-05-29T15:22:10.958175Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [1:7509888317136899002:2376], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:22:10.958275Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=1&id=NWRkNWRiYTEtODIwMTYwMjUtYTFmM2QwMzUtODJlZDJkNjQ=, ActorId: [1:7509888317136898999:2374], ActorState: ExecuteState, TraceId: 01jwea5x3xb8asjw5qtpnrcnzj, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: assertion failed at ydb/core/testlib/test_pq_client.h:537, TMaybe NKikimr::NPersQueueTests::TFlatMsgBusPQClient::RunYqlDataQueryWithParams(TString, const NYdb::TParams &): (result.IsSuccess())
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 TBackTrace::Capture()+28 (0x13C16BFC) NUnitTest::NPrivate::RaiseError(ch ... d: 72057594046644480 waiting... waiting... waiting... 2025-05-29T15:22:12.089530Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7509888326104793448:2310], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:22:12.089558Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7509888326104793465:2313], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:22:12.089565Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:22:12.090665Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976720657:3, at schemeshard: 72057594046644480 2025-05-29T15:22:12.095405Z node 4 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [4:7509888326104793469:2314], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976720657 completed, doublechecking } 2025-05-29T15:22:12.140654Z node 3 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [3:7509888326527016946:2341], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-05-29T15:22:12.141739Z node 3 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=3&id=Y2FiZmFjMTYtZDBhY2IxMTYtNDUzMjU2ZGItNGM0NmZhNTk=, ActorId: [3:7509888326527016920:2334], ActorState: ExecuteState, TraceId: 01jwea5y9739a92mx1pcewzh55, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-05-29T15:22:12.141899Z node 3 :PERSQUEUE_CLUSTER_TRACKER ERROR: cluster_tracker.cpp:167: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-05-29T15:22:12.142660Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-05-29T15:22:12.191761Z node 4 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [4:7509888326104793497:2166] txid# 281474976720658, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 9], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-29T15:22:12.196035Z node 4 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [4:7509888326104793511:2318], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-05-29T15:22:12.196108Z node 4 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=4&id=NTM0MzgyMDYtY2U3NDdhYjctNGVkMjM4M2ItMWJlYjQ0ZjI=, ActorId: [4:7509888326104793439:2308], ActorState: ExecuteState, TraceId: 01jwea5y7scz25ya72zsgv81xt, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-05-29T15:22:12.196238Z node 4 :PERSQUEUE_CLUSTER_TRACKER ERROR: cluster_tracker.cpp:167: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-05-29T15:22:12.201915Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-05-29T15:22:12.222759Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost", true, true, 1000), ("dc2", "dc2.logbroker.yandex.net", false, true, 1000); 2025-05-29T15:22:12.250989Z node 3 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [3:7509888326527017341:2376], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:22:12.251097Z node 3 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=3&id=ZGU2ZTQ1ZWEtNzRmNmU2Y2UtZDdjMjI1M2MtOTM3YmVjOTE=, ActorId: [3:7509888326527017338:2374], ActorState: ExecuteState, TraceId: 01jwea5ycecfw8e0h6zz8z1j7r, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: assertion failed at ydb/core/testlib/test_pq_client.h:537, TMaybe NKikimr::NPersQueueTests::TFlatMsgBusPQClient::RunYqlDataQueryWithParams(TString, const NYdb::TParams &): (result.IsSuccess())
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 TBackTrace::Capture()+28 (0x13C16BFC) NUnitTest::NPrivate::RaiseError(char const*, TBasicString> const&, bool)+137 (0x13DCA7A9) NKikimr::NPersQueueTests::TFlatMsgBusPQClient::RunYqlDataQueryWithParams(TBasicString>, NYdb::Dev::TParams const&)+932 (0x13965684) NKikimr::NPersQueueTests::TFlatMsgBusPQClient::RunYqlDataQuery(TBasicString>)+88 (0x1394D528) NKikimr::NPersQueueTests::TFlatMsgBusPQClient::InitDCs(THashMap>, NKikimr::NPersQueueTests::TPQTestClusterInfo, THash>>, TEqualTo>>, std::__y1::allocator>>>, TBasicString> const&)+1170 (0x139741A2) NKikimr::NPersQueueTests::TFlatMsgBusPQClient::FullInit(THashMap>, NKikimr::NPersQueueTests::TPQTestClusterInfo, THash>>, TEqualTo>>, std::__y1::allocator>>>, TBasicString> const&, TBasicString> const&)+327 (0x139720D7) NPersQueue::TTestServer::StartServer(bool, TMaybe>, NMaybe::TPolicyUndefinedExcept>)+888 (0x13970D08) NPersQueue::TTestServer::TTestServer(NKikimr::Tests::TServerSettings const&, bool, TVector> const&, NActors::NLog::EPriority, TMaybe, TDelete>, NMaybe::TPolicyUndefinedExcept>)+1563 (0x13969A6B) NPersQueue::TTestServer::TTestServer(bool)+134 (0x139502B6) NTestSuiteTPartitionChooserSuite::CreateServer()+24 (0x13950118) NTestSuiteTPartitionChooserSuite::TTestCaseTPartitionChooserActor_SplitMergeEnabled_SourceId_PartitionNotExists_Test::Execute_(NUnitTest::TTestContext&)+32 (0x139538C0) NTestSuiteTPartitionChooserSuite::TCurrentTest::Execute()::'lambda'()::operator()() const+71 (0x13962767) NUnitTest::TTestBase::Run(std::__y1::function, TBasicString> const&, char const*, bool)+126 (0x13DCC65E) NTestSuiteTPartitionChooserSuite::TCurrentTest::Execute()+433 (0x13962121) NUnitTest::TTestFactory::Execute()+803 (0x13DCCDD3) NUnitTest::RunMain(int, char**)+3021 (0x13DDE97D) ??+0 (0x7F7E09135D90) __libc_start_main+128 (0x7F7E09135E40) _start+41 (0x12A48029) 2025-05-29T15:22:12.908187Z node 5 :PERSQUEUE NOTICE: pq_impl.cpp:1099: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-05-29T15:22:12.908233Z node 5 :PERSQUEUE INFO: pq_impl.cpp:800: [PQ: 72057594037927937] doesn't have tx writes info 2025-05-29T15:22:12.912331Z node 5 :PERSQUEUE INFO: partition_init.cpp:880: [PQ: 72057594037927937, Partition: 3, State: StateInit] bootstrapping 3 [5:178:2192] 2025-05-29T15:22:12.912769Z node 5 :PERSQUEUE INFO: partition_init.cpp:785: [Root/PQ/rt3.dc1--account--topic:3:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp from keys completed. Value 2025-05-29T15:22:12.000000Z 2025-05-29T15:22:12.912780Z node 5 :PERSQUEUE INFO: partition.cpp:560: [PQ: 72057594037927937, Partition: 3, State: StateInit] init complete for topic 'Root/PQ/rt3.dc1--account--topic' partition 3 generation 0 [5:178:2192] Got cmd write: CmdWrite { Key: "i0000000003" Value: "\010\000\020\n\030\000(\240\361\213\345\3612" StorageChannel: INLINE } CmdWrite { Key: "I0000000003" Value: "\010\271`\020\316\255\001" StorageChannel: INLINE } CmdWrite { Key: "m0000000003cclient" Value: "\010\004\020\000\030\000\"\007session(\0000\000@\001" StorageChannel: INLINE } CmdWrite { Key: "m0000000003uclient" Value: "\004\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000session" StorageChannel: INLINE } 2025-05-29T15:22:13.360373Z node 6 :PERSQUEUE NOTICE: pq_impl.cpp:1099: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-05-29T15:22:13.360401Z node 6 :PERSQUEUE INFO: pq_impl.cpp:800: [PQ: 72057594037927937] doesn't have tx writes info 2025-05-29T15:22:13.364214Z node 6 :PERSQUEUE INFO: partition_init.cpp:880: [PQ: 72057594037927937, Partition: 3, State: StateInit] bootstrapping 3 [6:178:2192] 2025-05-29T15:22:13.364618Z node 6 :PERSQUEUE INFO: partition_init.cpp:785: [Root/PQ/rt3.dc1--account--topic:3:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp from keys completed. Value 2025-05-29T15:22:13.000000Z 2025-05-29T15:22:13.364628Z node 6 :PERSQUEUE INFO: partition.cpp:560: [PQ: 72057594037927937, Partition: 3, State: StateInit] init complete for topic 'Root/PQ/rt3.dc1--account--topic' partition 3 generation 0 [6:178:2192] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/perf/unittest >> KqpQueryPerf::DeleteOn-QueryService+UseSink Test command err: Trying to start YDB, gRPC: 24929, MsgBus: 8705 2025-05-29T15:22:08.738407Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509888306427394214:2062];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:22:08.738441Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/00100d/r3tmp/tmp362Wq4/pdisk_1.dat 2025-05-29T15:22:08.810889Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [1:7509888306427394194:2079] 1748532128738265 != 1748532128738268 2025-05-29T15:22:08.813293Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 24929, node 1 2025-05-29T15:22:08.825003Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:22:08.825013Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-05-29T15:22:08.825014Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-05-29T15:22:08.825047Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:8705 TClient is connected to server localhost:8705 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-29T15:22:08.880513Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:22:08.880545Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:22:08.881602Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-29T15:22:08.882623Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:22:08.904968Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:22:08.932065Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:22:08.955043Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:22:08.969847Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:22:09.139638Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509888310722363122:2403], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:22:09.139675Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:22:09.192838Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-05-29T15:22:09.202777Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-05-29T15:22:09.213262Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-05-29T15:22:09.227443Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-05-29T15:22:09.240661Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-05-29T15:22:09.255178Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-05-29T15:22:09.270609Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-05-29T15:22:09.285721Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509888310722363776:2466], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:22:09.285768Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:22:09.285865Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509888310722363781:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:22:09.287031Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715669:3, at schemeshard: 72057594046644480 2025-05-29T15:22:09.296048Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7509888310722363783:2470], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715669 completed, doublechecking } 2025-05-29T15:22:09.379027Z node 1 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [1:7509888310722363834:3396] txid# 281474976715670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-29T15:22:09.509693Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [1:7509888310722363843:2474], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:22:09.509819Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=1&id=ZmVlODdhYjMtZWU5M2U4OWYtNDE3YWQzMjMtNDlhYzdmZDc=, ActorId: [1:7509888310722363104:2401], ActorState: ExecuteState, TraceId: 01jwea5vg5bnrbbz4yy434esss, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: VERIFY failed (2025-05-29T15:22:09.510526Z): assertion failed in non-unittest thread with message: assertion failed at ydb/core/kqp/ut/common/kqp_ut_common.h:375, void NKikimr::NKqp::AssertSuccessResult(const NYdb::TStatus &): (result.IsSuccess())
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 library/cpp/testing/unittest/registar.cpp:36 RaiseError(): requirement UnittestThread failed 0. /-S/util/system/yassert.cpp:83: InternalPanicImpl @ 0x13AC65E5 1. /-S/util/system/yassert.cpp:55: Panic @ 0x13ABD5E6 2. /tmp//-S/library/cpp/testing/unittest/registar.cpp:36: RaiseError @ 0x13C5F376 3. /-S/ydb/core/kqp/ut/common/kqp_ut_common.h:375: AssertSuccessResult @ 0x261025A2 4. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:365: CreateSampleTables @ 0x26101EA2 5. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:581: operator() @ 0x261237FC 6. /-S/library/cpp/threading/future/core/future-inl.h:493: SetValue @ 0x261237FC 7. /-S/library/cpp/threading/future/async.h:24: operator() @ 0x261237FC 8. /-S/util/thread/pool.h:71: Process @ 0x261237FC 9. /-S/util/thread/pool.cpp:418: DoExecute @ 0x13ACDF69 10. /-S/util/thread/factory.h:15: Execute @ 0x13ACC959 11. /-S/util/thread/factory.cpp:36: ThreadProc @ 0x13ACC959 12. /-S/util/system/thread.cpp:244: ThreadProxy @ 0x13AC7DCC 13. ??:0: ?? @ 0x7F5CB3F19AC2 14. ??:0: ?? @ 0x7F5CB3FAB84F Trying to start YDB, gRPC: 11803, MsgBus: 12882 2025-05-29T15:22:13.011431Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509888327565402398:2062];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:22:13.011453Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/00100d/r3tmp/tmpGMGGgf/pdisk_1.dat 2025-05-29T15:22:13.064983Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [1:7509888327565402378:2079] 1748532133011217 != 1748532133011220 2025-05-29T15:22:13.065166Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 11803, node 1 2025-05-29T15:22:13.073937Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:22:13.073950Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-05-29T15:22:13.073952Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-05-29T15:22:13.074001Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:12882 TClient is connected to server localhost:12882 WaitRootIsUp 'Root'... TClient::Ls request: Root 2025-05-29T15:22:13.114239Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:22:13.114275Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:22:13.115194Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-29T15:22:13.141924Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:22:13.144500Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-05-29T15:22:13.154646Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:22:13.220255Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:22:13.236721Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:22:13.245587Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:22:13.423417Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509888327565404011:2403], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:22:13.423454Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:22:13.472171Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-05-29T15:22:13.479887Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-05-29T15:22:13.534860Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-05-29T15:22:13.545504Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-05-29T15:22:13.551971Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-05-29T15:22:13.567103Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-05-29T15:22:13.580824Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-05-29T15:22:13.596399Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509888327565404666:2466], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:22:13.596426Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:22:13.596429Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509888327565404671:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:22:13.597056Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715669:3, at schemeshard: 72057594046644480 2025-05-29T15:22:13.601117Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7509888327565404673:2470], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715669 completed, doublechecking } 2025-05-29T15:22:13.685234Z node 1 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [1:7509888327565404724:3395] txid# 281474976715670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-29T15:22:13.802609Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [1:7509888327565404740:2474], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 VERIFY failed (2025-05-29T15:22:13.804350Z): assertion failed in non-unittest thread with message: assertion failed at ydb/core/kqp/ut/common/kqp_ut_common.h:375, void NKikimr::NKqp::AssertSuccessResult(const NYdb::TStatus &): (result.IsSuccess())
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 library/cpp/testing/unittest/registar.cpp:36 RaiseError(): requirement UnittestThread failed 2025-05-29T15:22:13.802727Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=1&id=Yjk0OTJlZWEtOTk0NDJmZTctOWM1Yjc5YmQtMWZmYTAzMg==, ActorId: [1:7509888327565404008:2401], ActorState: ExecuteState, TraceId: 01jwea5zpwb90mrabzm4zd2mn1, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: 0. /-S/util/system/yassert.cpp:83: InternalPanicImpl @ 0x13AC65E5 1. /-S/util/system/yassert.cpp:55: Panic @ 0x13ABD5E6 2. /tmp//-S/library/cpp/testing/unittest/registar.cpp:36: RaiseError @ 0x13C5F376 3. /-S/ydb/core/kqp/ut/common/kqp_ut_common.h:375: AssertSuccessResult @ 0x261025A2 4. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:365: CreateSampleTables @ 0x26101EA2 5. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:581: operator() @ 0x261237FC 6. /-S/library/cpp/threading/future/core/future-inl.h:493: SetValue @ 0x261237FC 7. /-S/library/cpp/threading/future/async.h:24: operator() @ 0x261237FC 8. /-S/util/thread/pool.h:71: Process @ 0x261237FC 9. /-S/util/thread/pool.cpp:418: DoExecute @ 0x13ACDF69 10. /-S/util/thread/factory.h:15: Execute @ 0x13ACC959 11. /-S/util/thread/factory.cpp:36: ThreadProc @ 0x13ACC959 12. /-S/util/system/thread.cpp:244: ThreadProxy @ 0x13AC7DCC 13. ??:0: ?? @ 0x7FD58DD3FAC2 14. ??:0: ?? @ 0x7FD58DDD184F >> TPQTabletTests::One_New_Partition_In_Another_Tablet >> TDynamicNameserverTest::CacheMissDifferentDeadline-EnableNodeBrokerDeltaProtocol-true [GOOD] >> TDynamicNameserverTest::CacheMissDifferentDeadlineInverseOrder-EnableNodeBrokerDeltaProtocol-false >> TPQTest::TestCmdReadWithLastOffset [GOOD] >> TPQTest::TestDescribeBalancer >> TPQTabletTests::One_New_Partition_In_Another_Tablet [GOOD] >> TDynamicNameserverTest::ListNodesCacheWhenNoChanges-EnableNodeBrokerDeltaProtocol-false [GOOD] >> TDynamicNameserverTest::CacheMissSimpleDeadline-EnableNodeBrokerDeltaProtocol-true >> TLocalTests::TestAlterTenant >> TLocalTests::TestAlterTenant [GOOD] >> TLocalTests::TestAddTenantWhileResolving >> TDynamicNameserverTest::CacheMissSimpleDeadline-EnableNodeBrokerDeltaProtocol-true [GOOD] >> PQCountersSimple::PartitionWriteQuota [GOOD] >> PQCountersSimple::PartitionFirstClass ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/perf/unittest >> KqpQueryPerf::Insert+QueryService+UseSink Test command err: Trying to start YDB, gRPC: 27239, MsgBus: 4141 2025-05-29T15:22:09.180207Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509888313855511159:2142];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:22:09.181484Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/000fd1/r3tmp/tmpJZ5D9q/pdisk_1.dat TServer::EnableGrpc on GrpcPort 27239, node 1 2025-05-29T15:22:09.251566Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:22:09.251654Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [1:7509888313855511044:2079] 1748532129179178 != 1748532129179181 2025-05-29T15:22:09.253284Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:22:09.253295Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-05-29T15:22:09.253297Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-05-29T15:22:09.253337Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:4141 2025-05-29T15:22:09.281802Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:22:09.281827Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:22:09.282905Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:4141 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-29T15:22:09.322111Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:22:09.338464Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:22:09.355364Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:22:09.374344Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:22:09.386024Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:22:09.513803Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509888313855512705:2403], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:22:09.513824Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:22:09.552684Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-05-29T15:22:09.559698Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-05-29T15:22:09.614926Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-05-29T15:22:09.626417Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-05-29T15:22:09.639702Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-05-29T15:22:09.655098Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-05-29T15:22:09.667465Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-05-29T15:22:09.687335Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509888313855513360:2466], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:22:09.687366Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:22:09.687434Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509888313855513365:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:22:09.688244Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715669:3, at schemeshard: 72057594046644480 2025-05-29T15:22:09.694471Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7509888313855513367:2470], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715669 completed, doublechecking } 2025-05-29T15:22:09.795273Z node 1 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [1:7509888313855513418:3397] txid# 281474976715670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-29T15:22:09.894201Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [1:7509888313855513434:2474], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:22:09.894304Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=1&id=MTVjYTZkMjMtMzQ1YzZhZGYtNDBkYmZjZWItMjlmZmI1ZmI=, ActorId: [1:7509888313855512687:2401], ActorState: ExecuteState, TraceId: 01jwea5vwp2434pj9zpe22k4tp, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: VERIFY failed (2025-05-29T15:22:09.896757Z): assertion failed in non-unittest thread with message: assertion failed at ydb/core/kqp/ut/common/kqp_ut_common.h:375, void NKikimr::NKqp::AssertSuccessResult(const NYdb::TStatus &): (result.IsSuccess())
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 library/cpp/testing/unittest/registar.cpp:36 RaiseError(): requirement UnittestThread failed 0. /-S/util/system/yassert.cpp:83: InternalPanicImpl @ 0x13AC65E5 1. /-S/util/system/yassert.cpp:55: Panic @ 0x13ABD5E6 2. /tmp//-S/library/cpp/testing/unittest/registar.cpp:36: RaiseError @ 0x13C5F376 3. /-S/ydb/core/kqp/ut/common/kqp_ut_common.h:375: AssertSuccessResult @ 0x261025A2 4. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:365: CreateSampleTables @ 0x26101EA2 5. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:581: operator() @ 0x261237FC 6. /-S/library/cpp/threading/future/core/future-inl.h:493: SetValue @ 0x261237FC 7. /-S/library/cpp/threading/future/async.h:24: operator() @ 0x261237FC 8. /-S/util/thread/pool.h:71: Process @ 0x261237FC 9. /-S/util/thread/pool.cpp:418: DoExecute @ 0x13ACDF69 10. /-S/util/thread/factory.h:15: Execute @ 0x13ACC959 11. /-S/util/thread/factory.cpp:36: ThreadProc @ 0x13ACC959 12. /-S/util/system/thread.cpp:244: ThreadProxy @ 0x13AC7DCC 13. ??:0: ?? @ 0x7F766E1F2AC2 14. ??:0: ?? @ 0x7F766E28484F Trying to start YDB, gRPC: 22081, MsgBus: 24427 2025-05-29T15:22:13.611800Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509888328740806750:2062];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:22:13.611825Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/000fd1/r3tmp/tmpcUbBJQ/pdisk_1.dat 2025-05-29T15:22:13.672023Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:22:13.672119Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [1:7509888328740806729:2079] 1748532133611664 != 1748532133611667 TServer::EnableGrpc on GrpcPort 22081, node 1 2025-05-29T15:22:13.685338Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:22:13.685353Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-05-29T15:22:13.685355Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-05-29T15:22:13.685420Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:24427 2025-05-29T15:22:13.714528Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:22:13.714563Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:22:13.715635Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:24427 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-29T15:22:13.747816Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:22:13.754936Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-05-29T15:22:13.768428Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:22:13.838606Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:22:13.861703Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-05-29T15:22:13.874699Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 2025-05-29T15:22:13.968364Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509888328740808363:2403], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:22:13.968392Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:22:14.005478Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-05-29T15:22:14.060523Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-05-29T15:22:14.070346Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-05-29T15:22:14.084530Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-05-29T15:22:14.098101Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-05-29T15:22:14.112848Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-05-29T15:22:14.126413Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-05-29T15:22:14.142762Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509888333035776312:2466], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:22:14.142805Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:22:14.142858Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509888333035776317:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:22:14.143950Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715669:3, at schemeshard: 72057594046644480 2025-05-29T15:22:14.146624Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7509888333035776319:2470], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715669 completed, doublechecking } 2025-05-29T15:22:14.219097Z node 1 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [1:7509888333035776370:3395] txid# 281474976715670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-29T15:22:14.318005Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [1:7509888333035776386:2474], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:22:14.318109Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=1&id=YWFmZjI2ZTgtNGE5ZDdkMjctN2I4YmY0OGItZGRhMzBkODc=, ActorId: [1:7509888328740808345:2401], ActorState: ExecuteState, TraceId: 01jwea607y5ghcdjxvbzf35sds, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: VERIFY failed (2025-05-29T15:22:14.318684Z): assertion failed in non-unittest thread with message: assertion failed at ydb/core/kqp/ut/common/kqp_ut_common.h:375, void NKikimr::NKqp::AssertSuccessResult(const NYdb::TStatus &): (result.IsSuccess())
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 library/cpp/testing/unittest/registar.cpp:36 RaiseError(): requirement UnittestThread failed 0. /-S/util/system/yassert.cpp:83: InternalPanicImpl @ 0x13AC65E5 1. /-S/util/system/yassert.cpp:55: Panic @ 0x13ABD5E6 2. /tmp//-S/library/cpp/testing/unittest/registar.cpp:36: RaiseError @ 0x13C5F376 3. /-S/ydb/core/kqp/ut/common/kqp_ut_common.h:375: AssertSuccessResult @ 0x261025A2 4. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:365: CreateSampleTables @ 0x26101EA2 5. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:581: operator() @ 0x261237FC 6. /-S/library/cpp/threading/future/core/future-inl.h:493: SetValue @ 0x261237FC 7. /-S/library/cpp/threading/future/async.h:24: operator() @ 0x261237FC 8. /-S/util/thread/pool.h:71: Process @ 0x261237FC 9. /-S/util/thread/pool.cpp:418: DoExecute @ 0x13ACDF69 10. /-S/util/thread/factory.h:15: Execute @ 0x13ACC959 11. /-S/util/thread/factory.cpp:36: ThreadProc @ 0x13ACC959 12. /-S/util/system/thread.cpp:244: ThreadProxy @ 0x13AC7DCC 13. ??:0: ?? @ 0x7FD72FBF5AC2 14. ??:0: ?? @ 0x7FD72FC8784F >> TDynamicNameserverTest::CacheMissDifferentDeadlineInverseOrder-EnableNodeBrokerDeltaProtocol-false [GOOD] >> TNodeBrokerTest::LoadStateMoveEpoch ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/persqueue/ut/unittest >> TPQTabletTests::One_New_Partition_In_Another_Tablet [GOOD] Test command err: 2025-05-29T15:22:15.780376Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:3098: [PQ: 72057594037927937] Handle TEvInterconnect::TEvNodeInfo 2025-05-29T15:22:15.781178Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:3130: [PQ: 72057594037927937] Transactions request. From tx_00000000000000000000, To tx_18446744073709551615 2025-05-29T15:22:15.781228Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:751: [PQ: 72057594037927937] doesn't have tx info 2025-05-29T15:22:15.781236Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:763: [PQ: 72057594037927937] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2025-05-29T15:22:15.781239Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:975: [PQ: 72057594037927937] no config, start with empty partitions and default config 2025-05-29T15:22:15.781243Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:4889: [PQ: 72057594037927937] Txs.size=0, PlannedTxs.size=0 2025-05-29T15:22:15.781249Z node 1 :PERSQUEUE NOTICE: pq_impl.cpp:1099: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-05-29T15:22:15.781255Z node 1 :PERSQUEUE INFO: pq_impl.cpp:800: [PQ: 72057594037927937] doesn't have tx writes info 2025-05-29T15:22:15.786272Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:2882: [PQ: 72057594037927937] server connected, pipe [1:207:2212], now have 1 active actors on pipe 2025-05-29T15:22:15.786295Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:1460: [PQ: 72057594037927937] Handle TEvPersQueue::TEvUpdateConfig 2025-05-29T15:22:15.788441Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:1646: [PQ: 72057594037927937] Config update version 1(current 0) received from actor [1:178:2192] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 ImportantClientId: "consumer-1" ImportantClientId: "consumer-2" LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "topic" Version: 1 LocalDC: true Topic: "topic" TopicPath: "/topic" YcCloudId: "somecloud" YcFolderId: "somefolder" YdbDatabaseId: "PQ" YdbDatabasePath: "/Root/PQ" Partitions { PartitionId: 0 } ReadRuleGenerations: 1 ReadRuleGenerations: 1 ReadRuleGenerations: 1 FederationAccount: "federationAccount" MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 1 Important: false } Consumers { Name: "consumer-1" Generation: 1 Important: true } Consumers { Name: "consumer-2" Generation: 1 Important: true } 2025-05-29T15:22:15.789152Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:590: [PQ: 72057594037927937] Apply new config CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 ImportantClientId: "consumer-1" ImportantClientId: "consumer-2" LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "topic" Version: 1 LocalDC: true Topic: "topic" TopicPath: "/topic" YcCloudId: "somecloud" YcFolderId: "somefolder" YdbDatabaseId: "PQ" YdbDatabasePath: "/Root/PQ" Partitions { PartitionId: 0 } ReadRuleGenerations: 1 ReadRuleGenerations: 1 ReadRuleGenerations: 1 FederationAccount: "federationAccount" MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 1 Important: false } Consumers { Name: "consumer-1" Generation: 1 Important: true } Consumers { Name: "consumer-2" Generation: 1 Important: true } 2025-05-29T15:22:15.789175Z node 1 :PERSQUEUE NOTICE: pq_impl.cpp:1099: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-05-29T15:22:15.789294Z node 1 :PERSQUEUE INFO: pq_impl.cpp:1487: [PQ: 72057594037927937] Config applied version 1 actor [1:178:2192] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 ImportantClientId: "consumer-1" ImportantClientId: "consumer-2" LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "topic" Version: 1 LocalDC: true Topic: "topic" TopicPath: "/topic" YcCloudId: "somecloud" YcFolderId: "somefolder" YdbDatabaseId: "PQ" YdbDatabasePath: "/Root/PQ" Partitions { PartitionId: 0 } ReadRuleGenerations: 1 ReadRuleGenerations: 1 ReadRuleGenerations: 1 FederationAccount: "federationAccount" MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 1 Important: false } Consumers { Name: "consumer-1" Generation: 1 Important: true } Consumers { Name: "consumer-2" Generation: 1 Important: true } 2025-05-29T15:22:15.789319Z node 1 :PERSQUEUE DEBUG: partition_init.cpp:75: [topic:0:Initializer] Start initializing step TInitConfigStep 2025-05-29T15:22:15.789390Z node 1 :PERSQUEUE DEBUG: partition_init.cpp:75: [topic:0:Initializer] Start initializing step TInitInternalFieldsStep 2025-05-29T15:22:15.789444Z node 1 :PERSQUEUE INFO: partition_init.cpp:880: [PQ: 72057594037927937, Partition: 0, State: StateInit] bootstrapping 0 [1:215:2218] 2025-05-29T15:22:15.789589Z node 1 :PERSQUEUE DEBUG: partition_init.cpp:55: [topic:0:Initializer] Initializing completed. 2025-05-29T15:22:15.789595Z node 1 :PERSQUEUE INFO: partition.cpp:560: [PQ: 72057594037927937, Partition: 0, State: StateInit] init complete for topic 'topic' partition 0 generation 2 [1:215:2218] 2025-05-29T15:22:15.789600Z node 1 :PERSQUEUE DEBUG: partition.cpp:574: [PQ: 72057594037927937, Partition: 0, State: StateInit] SYNC INIT topic topic partitition 0 so 0 endOffset 0 Head Offset 0 PartNo 0 PackedSize 0 count 0 nextOffset 0 batches 0 2025-05-29T15:22:15.789719Z node 1 :PERSQUEUE DEBUG: partition.cpp:3850: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Process pending events. Count 0 2025-05-29T15:22:15.789729Z node 1 :PERSQUEUE DEBUG: partition.cpp:3155: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'topic' partition 0 user user reinit request with generation 1 2025-05-29T15:22:15.789732Z node 1 :PERSQUEUE DEBUG: partition.cpp:3224: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'topic' partition 0 user user reinit with generation 1 done 2025-05-29T15:22:15.789736Z node 1 :PERSQUEUE DEBUG: partition.cpp:3155: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'topic' partition 0 user consumer-1 reinit request with generation 1 2025-05-29T15:22:15.789739Z node 1 :PERSQUEUE DEBUG: partition.cpp:3224: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'topic' partition 0 user consumer-1 reinit with generation 1 done 2025-05-29T15:22:15.789742Z node 1 :PERSQUEUE DEBUG: partition.cpp:3155: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'topic' partition 0 user consumer-2 reinit request with generation 1 2025-05-29T15:22:15.789744Z node 1 :PERSQUEUE DEBUG: partition.cpp:3224: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'topic' partition 0 user consumer-2 reinit with generation 1 done 2025-05-29T15:22:15.789763Z node 1 :PERSQUEUE DEBUG: partition.cpp:2185: [PQ: 72057594037927937, Partition: 0, State: StateIdle] === DumpKeyValueRequest === 2025-05-29T15:22:15.789765Z node 1 :PERSQUEUE DEBUG: partition.cpp:2186: [PQ: 72057594037927937, Partition: 0, State: StateIdle] --- delete ---------------- 2025-05-29T15:22:15.789768Z node 1 :PERSQUEUE DEBUG: partition.cpp:2194: [PQ: 72057594037927937, Partition: 0, State: StateIdle] --- write ----------------- 2025-05-29T15:22:15.789771Z node 1 :PERSQUEUE DEBUG: partition.cpp:2197: [PQ: 72057594037927937, Partition: 0, State: StateIdle] i0000000000 2025-05-29T15:22:15.789773Z node 1 :PERSQUEUE DEBUG: partition.cpp:2197: [PQ: 72057594037927937, Partition: 0, State: StateIdle] m0000000000cuser 2025-05-29T15:22:15.789777Z node 1 :PERSQUEUE DEBUG: partition.cpp:2197: [PQ: 72057594037927937, Partition: 0, State: StateIdle] m0000000000uuser 2025-05-29T15:22:15.789779Z node 1 :PERSQUEUE DEBUG: partition.cpp:2197: [PQ: 72057594037927937, Partition: 0, State: StateIdle] m0000000000cconsumer-2 2025-05-29T15:22:15.789781Z node 1 :PERSQUEUE DEBUG: partition.cpp:2197: [PQ: 72057594037927937, Partition: 0, State: StateIdle] m0000000000uconsumer-2 2025-05-29T15:22:15.789783Z node 1 :PERSQUEUE DEBUG: partition.cpp:2197: [PQ: 72057594037927937, Partition: 0, State: StateIdle] m0000000000cconsumer-1 2025-05-29T15:22:15.789785Z node 1 :PERSQUEUE DEBUG: partition.cpp:2197: [PQ: 72057594037927937, Partition: 0, State: StateIdle] m0000000000uconsumer-1 2025-05-29T15:22:15.789788Z node 1 :PERSQUEUE DEBUG: partition.cpp:2199: [PQ: 72057594037927937, Partition: 0, State: StateIdle] --- rename ---------------- 2025-05-29T15:22:15.789790Z node 1 :PERSQUEUE DEBUG: partition.cpp:2204: [PQ: 72057594037927937, Partition: 0, State: StateIdle] =========================== 2025-05-29T15:22:15.789805Z node 1 :PERSQUEUE DEBUG: partition_read.cpp:779: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'topic' partition 0 user user readTimeStamp for offset 0 initiated queuesize 0 startOffset 0 ReadingTimestamp 0 rrg 0 2025-05-29T15:22:15.789808Z node 1 :PERSQUEUE DEBUG: partition_read.cpp:779: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'topic' partition 0 user consumer-2 readTimeStamp for offset 0 initiated queuesize 0 startOffset 0 ReadingTimestamp 0 rrg 0 2025-05-29T15:22:15.789812Z node 1 :PERSQUEUE DEBUG: partition_read.cpp:779: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'topic' partition 0 user consumer-1 readTimeStamp for offset 0 initiated queuesize 0 startOffset 0 ReadingTimestamp 0 rrg 0 2025-05-29T15:22:15.789834Z node 1 :PERSQUEUE DEBUG: read.h:262: CacheProxy. Passthrough write request to KV 2025-05-29T15:22:15.790379Z node 1 :PERSQUEUE DEBUG: partition_write.cpp:524: [PQ: 72057594037927937, Partition: 0, State: StateIdle] TPartition::HandleWriteResponse writeNewSize# 0 WriteNewSizeFromSupportivePartitions# 0 2025-05-29T15:22:15.790446Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:2882: [PQ: 72057594037927937] server connected, pipe [1:222:2223], now have 1 active actors on pipe 2025-05-29T15:22:15.790514Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:2882: [PQ: 72057594037927937] server connected, pipe [1:225:2225], now have 1 active actors on pipe 2025-05-29T15:22:15.790631Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:3222: [PQ: 72057594037927937] Handle TEvPersQueue::TEvProposeTransaction SourceActor { RawX1: 178 RawX2: 4294969488 } TxId: 67890 Data { Operations { PartitionId: 0 CommitOffsetsBegin: 0 CommitOffsetsEnd: 0 Consumer: "consumer-1" Path: "/topic" } SendingShards: 22222 ReceivingShards: 22222 Immediate: false } 2025-05-29T15:22:15.790637Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:3383: [PQ: 72057594037927937] distributed transaction 2025-05-29T15:22:15.790648Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:3697: [PQ: 72057594037927937] Propose TxId 67890, WriteId (empty maybe) 2025-05-29T15:22:15.790652Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:4293: [PQ: 72057594037927937] Try execute txs with state UNKNOWN 2025-05-29T15:22:15.790655Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:4338: [PQ: 72057594037927937] TxId 67890, State UNKNOWN 2025-05-29T15:22:15.790659Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:3922: [PQ: 72057594037927937] schedule TEvProposeTransactionResult(PREPARED) 2025-05-29T15:22:15.790662Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:4228: [PQ: 72057594037927937] TxId 67890, NewState PREPARING 2025-05-29T15:22:15.790667Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:3818: [PQ: 72057594037927937] write key for TxId 67890 2025-05-29T15:22:15.790691Z node 1 :PERSQUEUE DEBUG: transaction.cpp:374: [TxId: 67890] save tx TxId: 67890 State: PREPARED MinStep: 134 MaxStep: 30134 PredicatesReceived { TabletId: 22222 } PredicateRecipients: 22222 Operations { PartitionId: 0 CommitOffsetsBegin: 0 CommitOffsetsEnd: 0 Consumer: "consumer-1 ... 15:22:17.093956Z node 6 :PERSQUEUE DEBUG: partition.cpp:3224: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'topic' partition 0 user client-3 reinit with generation 2 done 2025-05-29T15:22:17.093959Z node 6 :PERSQUEUE DEBUG: partition.cpp:3219: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'topic' partition 0 user user drop done 2025-05-29T15:22:17.093988Z node 6 :PERSQUEUE DEBUG: partition.cpp:2185: [PQ: 72057594037927937, Partition: 0, State: StateIdle] === DumpKeyValueRequest === 2025-05-29T15:22:17.093993Z node 6 :PERSQUEUE DEBUG: partition.cpp:2186: [PQ: 72057594037927937, Partition: 0, State: StateIdle] --- delete ---------------- 2025-05-29T15:22:17.093999Z node 6 :PERSQUEUE DEBUG: partition.cpp:2192: [PQ: 72057594037927937, Partition: 0, State: StateIdle] [m0000000000cuser, m0000000000cuser] 2025-05-29T15:22:17.094005Z node 6 :PERSQUEUE DEBUG: partition.cpp:2192: [PQ: 72057594037927937, Partition: 0, State: StateIdle] [m0000000000uuser, m0000000000uuser] 2025-05-29T15:22:17.094007Z node 6 :PERSQUEUE DEBUG: partition.cpp:2194: [PQ: 72057594037927937, Partition: 0, State: StateIdle] --- write ----------------- 2025-05-29T15:22:17.094010Z node 6 :PERSQUEUE DEBUG: partition.cpp:2197: [PQ: 72057594037927937, Partition: 0, State: StateIdle] i0000000000 2025-05-29T15:22:17.094012Z node 6 :PERSQUEUE DEBUG: partition.cpp:2197: [PQ: 72057594037927937, Partition: 0, State: StateIdle] I0000000000 2025-05-29T15:22:17.094014Z node 6 :PERSQUEUE DEBUG: partition.cpp:2197: [PQ: 72057594037927937, Partition: 0, State: StateIdle] m0000000000cclient-1 2025-05-29T15:22:17.094016Z node 6 :PERSQUEUE DEBUG: partition.cpp:2197: [PQ: 72057594037927937, Partition: 0, State: StateIdle] m0000000000uclient-1 2025-05-29T15:22:17.094019Z node 6 :PERSQUEUE DEBUG: partition.cpp:2197: [PQ: 72057594037927937, Partition: 0, State: StateIdle] m0000000000cclient-3 2025-05-29T15:22:17.094021Z node 6 :PERSQUEUE DEBUG: partition.cpp:2197: [PQ: 72057594037927937, Partition: 0, State: StateIdle] m0000000000uclient-3 2025-05-29T15:22:17.094023Z node 6 :PERSQUEUE DEBUG: partition.cpp:2197: [PQ: 72057594037927937, Partition: 0, State: StateIdle] _config_0 2025-05-29T15:22:17.094025Z node 6 :PERSQUEUE DEBUG: partition.cpp:2199: [PQ: 72057594037927937, Partition: 0, State: StateIdle] --- rename ---------------- 2025-05-29T15:22:17.094028Z node 6 :PERSQUEUE DEBUG: partition.cpp:2204: [PQ: 72057594037927937, Partition: 0, State: StateIdle] =========================== 2025-05-29T15:22:17.094034Z node 6 :PERSQUEUE DEBUG: partition.cpp:1173: [PQ: 72057594037927937, Partition: 1, State: StateIdle] Handle TEvPQ::TEvTxCommit Step 100, TxId 67890 2025-05-29T15:22:17.094051Z node 6 :PERSQUEUE DEBUG: partition.cpp:2185: [PQ: 72057594037927937, Partition: 1, State: StateIdle] === DumpKeyValueRequest === 2025-05-29T15:22:17.094054Z node 6 :PERSQUEUE DEBUG: partition.cpp:2186: [PQ: 72057594037927937, Partition: 1, State: StateIdle] --- delete ---------------- 2025-05-29T15:22:17.094056Z node 6 :PERSQUEUE DEBUG: partition.cpp:2194: [PQ: 72057594037927937, Partition: 1, State: StateIdle] --- write ----------------- 2025-05-29T15:22:17.094058Z node 6 :PERSQUEUE DEBUG: partition.cpp:2197: [PQ: 72057594037927937, Partition: 1, State: StateIdle] i0000000001 2025-05-29T15:22:17.094061Z node 6 :PERSQUEUE DEBUG: partition.cpp:2197: [PQ: 72057594037927937, Partition: 1, State: StateIdle] I0000000001 2025-05-29T15:22:17.094063Z node 6 :PERSQUEUE DEBUG: partition.cpp:2197: [PQ: 72057594037927937, Partition: 1, State: StateIdle] m0000000001cclient-1 2025-05-29T15:22:17.094065Z node 6 :PERSQUEUE DEBUG: partition.cpp:2197: [PQ: 72057594037927937, Partition: 1, State: StateIdle] m0000000001uclient-1 2025-05-29T15:22:17.094067Z node 6 :PERSQUEUE DEBUG: partition.cpp:2197: [PQ: 72057594037927937, Partition: 1, State: StateIdle] m0000000001cclient-3 2025-05-29T15:22:17.094070Z node 6 :PERSQUEUE DEBUG: partition.cpp:2197: [PQ: 72057594037927937, Partition: 1, State: StateIdle] m0000000001uclient-3 2025-05-29T15:22:17.094072Z node 6 :PERSQUEUE DEBUG: partition.cpp:2197: [PQ: 72057594037927937, Partition: 1, State: StateIdle] _config_1 2025-05-29T15:22:17.094074Z node 6 :PERSQUEUE DEBUG: partition.cpp:2199: [PQ: 72057594037927937, Partition: 1, State: StateIdle] --- rename ---------------- 2025-05-29T15:22:17.094076Z node 6 :PERSQUEUE DEBUG: partition.cpp:2204: [PQ: 72057594037927937, Partition: 1, State: StateIdle] =========================== 2025-05-29T15:22:17.094096Z node 6 :PERSQUEUE DEBUG: read.h:262: CacheProxy. Passthrough write request to KV 2025-05-29T15:22:17.094288Z node 6 :PERSQUEUE DEBUG: read.h:262: CacheProxy. Passthrough write request to KV 2025-05-29T15:22:17.094529Z node 6 :PERSQUEUE DEBUG: pq_impl.cpp:2915: [PQ: 72057594037927937] Handle TEvTabletPipe::TEvClientConnected 2025-05-29T15:22:17.094540Z node 6 :PERSQUEUE DEBUG: pq_impl.cpp:2920: [PQ: 72057594037927937] Connected to tablet 22222 2025-05-29T15:22:17.095037Z node 6 :PERSQUEUE DEBUG: partition_write.cpp:524: [PQ: 72057594037927937, Partition: 0, State: StateIdle] TPartition::HandleWriteResponse writeNewSize# 0 WriteNewSizeFromSupportivePartitions# 0 2025-05-29T15:22:17.095141Z node 6 :PERSQUEUE DEBUG: pq_impl.cpp:3535: [PQ: 72057594037927937] Handle TEvPQ::TEvTxCommitDone Step 100, TxId 67890, Partition 0 2025-05-29T15:22:17.095148Z node 6 :PERSQUEUE DEBUG: pq_impl.cpp:4293: [PQ: 72057594037927937] Try execute txs with state EXECUTING 2025-05-29T15:22:17.095152Z node 6 :PERSQUEUE DEBUG: pq_impl.cpp:4338: [PQ: 72057594037927937] TxId 67890, State EXECUTING 2025-05-29T15:22:17.095171Z node 6 :PERSQUEUE DEBUG: pq_impl.cpp:4285: [PQ: 72057594037927937] TxId 67890 State EXECUTING FrontTxId 67890 2025-05-29T15:22:17.095175Z node 6 :PERSQUEUE DEBUG: pq_impl.cpp:4491: [PQ: 72057594037927937] Received 1, Expected 2 2025-05-29T15:22:17.095180Z node 6 :PERSQUEUE DEBUG: pq_impl.cpp:4306: [PQ: 72057594037927937] TxId 67890 status has not changed 2025-05-29T15:22:17.095574Z node 6 :PERSQUEUE DEBUG: partition_write.cpp:524: [PQ: 72057594037927937, Partition: 1, State: StateIdle] TPartition::HandleWriteResponse writeNewSize# 0 WriteNewSizeFromSupportivePartitions# 0 2025-05-29T15:22:17.095602Z node 6 :PERSQUEUE DEBUG: pq_impl.cpp:3535: [PQ: 72057594037927937] Handle TEvPQ::TEvTxCommitDone Step 100, TxId 67890, Partition 1 2025-05-29T15:22:17.095607Z node 6 :PERSQUEUE DEBUG: pq_impl.cpp:4293: [PQ: 72057594037927937] Try execute txs with state EXECUTING 2025-05-29T15:22:17.095611Z node 6 :PERSQUEUE DEBUG: pq_impl.cpp:4338: [PQ: 72057594037927937] TxId 67890, State EXECUTING 2025-05-29T15:22:17.095615Z node 6 :PERSQUEUE DEBUG: pq_impl.cpp:4285: [PQ: 72057594037927937] TxId 67890 State EXECUTING FrontTxId 67890 2025-05-29T15:22:17.095620Z node 6 :PERSQUEUE DEBUG: pq_impl.cpp:4491: [PQ: 72057594037927937] Received 2, Expected 2 2025-05-29T15:22:17.095627Z node 6 :PERSQUEUE DEBUG: pq_impl.cpp:4164: [PQ: 72057594037927937] TxId: 67890 send TEvPersQueue::TEvProposeTransactionResult(COMPLETE) 2025-05-29T15:22:17.095632Z node 6 :PERSQUEUE DEBUG: pq_impl.cpp:4495: [PQ: 72057594037927937] complete TxId 67890 2025-05-29T15:22:17.095685Z node 6 :PERSQUEUE DEBUG: pq_impl.cpp:590: [PQ: 72057594037927937] Apply new config PartitionConfig { LifetimeSeconds: 86400 WriteSpeedInBytesPerSecond: 10485760 } TopicName: "rt3.dc1--account--topic" Version: 2 LocalDC: true TopicPath: "/Root/PQ/rt3.dc1--account--topic" YdbDatabasePath: "" Partitions { PartitionId: 0 } Partitions { PartitionId: 1 } ReadRuleGenerations: 2 ReadRuleGenerations: 2 FederationAccount: "account" MeteringMode: METERING_MODE_REQUEST_UNITS AllPartitions { PartitionId: 0 ChildPartitionIds: 1 ChildPartitionIds: 2 TabletId: 72057594037927937 } AllPartitions { PartitionId: 1 ParentPartitionIds: 0 TabletId: 72057594037927937 } AllPartitions { PartitionId: 2 ParentPartitionIds: 0 TabletId: 22222 } Consumers { Name: "client-1" Generation: 2 Important: false } Consumers { Name: "client-3" Generation: 2 Important: false } 2025-05-29T15:22:17.095694Z node 6 :PERSQUEUE NOTICE: pq_impl.cpp:1099: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-05-29T15:22:17.095704Z node 6 :PERSQUEUE DEBUG: pq_impl.cpp:4513: [PQ: 72057594037927937] delete partitions for TxId 67890 2025-05-29T15:22:17.095709Z node 6 :PERSQUEUE DEBUG: pq_impl.cpp:4228: [PQ: 72057594037927937] TxId 67890, NewState EXECUTED 2025-05-29T15:22:17.095713Z node 6 :PERSQUEUE DEBUG: pq_impl.cpp:4263: [PQ: 72057594037927937] TxId 67890 moved from EXECUTING to EXECUTED 2025-05-29T15:22:17.095732Z node 6 :PERSQUEUE DEBUG: pq_impl.cpp:3818: [PQ: 72057594037927937] write key for TxId 67890 2025-05-29T15:22:17.095779Z node 6 :PERSQUEUE DEBUG: transaction.cpp:374: [TxId: 67890] save tx TxId: 67890 State: EXECUTED MinStep: 133 MaxStep: 18446744073709551615 PredicateRecipients: 22222 Step: 100 Predicate: true Kind: KIND_CONFIG TabletConfig { PartitionConfig { LifetimeSeconds: 86400 WriteSpeedInBytesPerSecond: 10485760 } TopicName: "rt3.dc1--account--topic" Version: 2 LocalDC: true TopicPath: "/Root/PQ/rt3.dc1--account--topic" YdbDatabasePath: "" Partitions { PartitionId: 0 } Partitions { PartitionId: 1 } ReadRuleGenerations: 2 ReadRuleGenerations: 2 FederationAccount: "account" MeteringMode: METERING_MODE_REQUEST_UNITS AllPartitions { PartitionId: 0 ChildPartitionIds: 1 ChildPartitionIds: 2 TabletId: 72057594037927937 } AllPartitions { PartitionId: 1 ParentPartitionIds: 0 TabletId: 72057594037927937 } AllPartitions { PartitionId: 2 ParentPartitionIds: 0 TabletId: 22222 } Consumers { Name: "client-1" Generation: 2 Important: false } Consumers { Name: "client-3" Generation: 2 Important: false } } BootstrapConfig { } SourceActor { RawX1: 176 RawX2: 25769805966 } Partitions { Partition { PartitionId: 0 } Partition { PartitionId: 1 } } 2025-05-29T15:22:17.095815Z node 6 :PERSQUEUE DEBUG: pq_impl.cpp:3635: [PQ: 72057594037927937] Send TEvKeyValue::TEvRequest (WRITE_TX_COOKIE) 2025-05-29T15:22:17.096472Z node 6 :PERSQUEUE DEBUG: pq_impl.cpp:1231: [PQ: 72057594037927937] Handle TEvKeyValue::TEvResponse (WRITE_TX_COOKIE) 2025-05-29T15:22:17.096481Z node 6 :PERSQUEUE DEBUG: pq_impl.cpp:4293: [PQ: 72057594037927937] Try execute txs with state EXECUTED 2025-05-29T15:22:17.096484Z node 6 :PERSQUEUE DEBUG: pq_impl.cpp:4338: [PQ: 72057594037927937] TxId 67890, State EXECUTED 2025-05-29T15:22:17.096487Z node 6 :PERSQUEUE DEBUG: pq_impl.cpp:4285: [PQ: 72057594037927937] TxId 67890 State EXECUTED FrontTxId 67890 2025-05-29T15:22:17.096490Z node 6 :PERSQUEUE DEBUG: pq_impl.cpp:3989: [PQ: 72057594037927937] TPersQueue::SendEvReadSetAckToSenders 2025-05-29T15:22:17.096493Z node 6 :PERSQUEUE DEBUG: pq_impl.cpp:4228: [PQ: 72057594037927937] TxId 67890, NewState WAIT_RS_ACKS 2025-05-29T15:22:17.096495Z node 6 :PERSQUEUE DEBUG: pq_impl.cpp:4263: [PQ: 72057594037927937] TxId 67890 moved from EXECUTED to WAIT_RS_ACKS 2025-05-29T15:22:17.096499Z node 6 :PERSQUEUE DEBUG: transaction.cpp:366: [TxId: 67890] PredicateAcks: 0/1 2025-05-29T15:22:17.096501Z node 6 :PERSQUEUE DEBUG: pq_impl.cpp:4539: [PQ: 72057594037927937] HaveAllRecipientsReceive 0, AllSupportivePartitionsHaveBeenDeleted 1 2025-05-29T15:22:17.096503Z node 6 :PERSQUEUE DEBUG: transaction.cpp:366: [TxId: 67890] PredicateAcks: 0/1 2025-05-29T15:22:17.097250Z node 6 :PERSQUEUE DEBUG: pq_impl.cpp:2882: [PQ: 72057594037927937] server connected, pipe [6:364:2335], now have 1 active actors on pipe 2025-05-29T15:22:17.097278Z node 6 :PERSQUEUE DEBUG: pq_impl.cpp:3423: [PQ: 72057594037927937] Handle TEvTxProcessing::TEvReadSet Step: 100 TxId: 67890 TabletSource: 22222 TabletDest: 72057594037927937 TabletProducer: 22222 ReadSet: "\010\001" Seqno: 0 2025-05-29T15:22:17.097282Z node 6 :PERSQUEUE DEBUG: pq_impl.cpp:3451: [PQ: 72057594037927937] send TEvReadSetAck to 22222 2025-05-29T15:22:17.097288Z node 6 :PERSQUEUE DEBUG: pqtablet_mock.cpp:72: Connected to tablet 72057594037927937 from tablet 22222 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/perf/unittest >> KqpQueryPerf::IndexUpsert+QueryService+UseSink Test command err: Trying to start YDB, gRPC: 5551, MsgBus: 24706 2025-05-29T15:22:09.541039Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509888314340357978:2062];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:22:09.541074Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/000fb2/r3tmp/tmpenzIBI/pdisk_1.dat 2025-05-29T15:22:09.608595Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [1:7509888314340357955:2079] 1748532129540885 != 1748532129540888 2025-05-29T15:22:09.610887Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 5551, node 1 2025-05-29T15:22:09.624204Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:22:09.624218Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-05-29T15:22:09.624219Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-05-29T15:22:09.624271Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-05-29T15:22:09.642780Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:22:09.642808Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:22:09.643870Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:24706 TClient is connected to server localhost:24706 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-29T15:22:09.690689Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:22:09.695291Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-05-29T15:22:09.705830Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:22:09.725922Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:22:09.745234Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:22:09.755403Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:22:09.950794Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509888314340359585:2403], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:22:09.950829Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:22:09.989883Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-05-29T15:22:10.045107Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-05-29T15:22:10.052228Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-05-29T15:22:10.066613Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-05-29T15:22:10.121401Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-05-29T15:22:10.130244Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-05-29T15:22:10.148491Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-05-29T15:22:10.207237Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509888318635327541:2466], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:22:10.207281Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:22:10.207280Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509888318635327546:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:22:10.208114Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710669:3, at schemeshard: 72057594046644480 2025-05-29T15:22:10.213161Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7509888318635327548:2470], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710669 completed, doublechecking } 2025-05-29T15:22:10.273760Z node 1 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [1:7509888318635327599:3397] txid# 281474976710670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-29T15:22:10.378062Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [1:7509888318635327608:2474], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:22:10.379783Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=1&id=M2NmZmMwMmUtNzIwMzNmMzgtZWZiOTE1ZDgtY2Q3YTU2Mzg=, ActorId: [1:7509888314340359582:2401], ActorState: ExecuteState, TraceId: 01jwea5wcy13whqz180h2aeqb0, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: VERIFY failed (2025-05-29T15:22:10.380601Z): assertion failed in non-unittest thread with message: assertion failed at ydb/core/kqp/ut/common/kqp_ut_common.h:375, void NKikimr::NKqp::AssertSuccessResult(const NYdb::TStatus &): (result.IsSuccess())
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 library/cpp/testing/unittest/registar.cpp:36 RaiseError(): requirement UnittestThread failed 0. /-S/util/system/yassert.cpp:83: InternalPanicImpl @ 0x13AC65E5 1. /-S/util/system/yassert.cpp:55: Panic @ 0x13ABD5E6 2. /tmp//-S/library/cpp/testing/unittest/registar.cpp:36: RaiseError @ 0x13C5F376 3. /-S/ydb/core/kqp/ut/common/kqp_ut_common.h:375: AssertSuccessResult @ 0x261025A2 4. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:365: CreateSampleTables @ 0x26101EA2 5. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:581: operator() @ 0x261237FC 6. /-S/library/cpp/threading/future/core/future-inl.h:493: SetValue @ 0x261237FC 7. /-S/library/cpp/threading/future/async.h:24: operator() @ 0x261237FC 8. /-S/util/thread/pool.h:71: Process @ 0x261237FC 9. /-S/util/thread/pool.cpp:418: DoExecute @ 0x13ACDF69 10. /-S/util/thread/factory.h:15: Execute @ 0x13ACC959 11. /-S/util/thread/factory.cpp:36: ThreadProc @ 0x13ACC959 12. /-S/util/system/thread.cpp:244: ThreadProxy @ 0x13AC7DCC 13. ??:0: ?? @ 0x7F83C831DAC2 14. ??:0: ?? @ 0x7F83C83AF84F Trying to start YDB, gRPC: 27474, MsgBus: 14753 2025-05-29T15:22:14.068600Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509888332657124052:2063];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:22:14.068624Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/000fb2/r3tmp/tmpIBxfLR/pdisk_1.dat 2025-05-29T15:22:14.117148Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:22:14.117218Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [1:7509888332657124031:2079] 1748532134068487 != 1748532134068490 TServer::EnableGrpc on GrpcPort 27474, node 1 2025-05-29T15:22:14.132927Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:22:14.132946Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-05-29T15:22:14.132949Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-05-29T15:22:14.132997Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:14753 TClient is connected to server localhost:14753 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-29T15:22:14.193899Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:22:14.193924Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:22:14.195017Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-05-29T15:22:14.195055Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected waiting... 2025-05-29T15:22:14.208635Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:22:14.271950Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:22:14.293844Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:22:14.307188Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:22:14.408458Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509888332657125681:2403], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:22:14.408482Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:22:14.449140Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-05-29T15:22:14.456551Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-05-29T15:22:14.511568Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-05-29T15:22:14.525730Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-05-29T15:22:14.539747Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-05-29T15:22:14.554260Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-05-29T15:22:14.568103Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-05-29T15:22:14.584246Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509888332657126335:2466], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:22:14.584285Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:22:14.584326Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509888332657126340:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:22:14.585218Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715669:3, at schemeshard: 72057594046644480 2025-05-29T15:22:14.587757Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7509888332657126342:2470], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715669 completed, doublechecking } 2025-05-29T15:22:14.657711Z node 1 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [1:7509888332657126394:3397] txid# 281474976715670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-29T15:22:14.758087Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [1:7509888332657126410:2474], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:22:14.758210Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=1&id=Y2Q0OTQxOGEtY2VmNmRjMmQtZDJjZWUyNTMtMzU1OGZjMDQ=, ActorId: [1:7509888332657125663:2401], ActorState: ExecuteState, TraceId: 01jwea60nq5nrgvnfdv6ez22t4, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: VERIFY failed (2025-05-29T15:22:14.758932Z): assertion failed in non-unittest thread with message: assertion failed at ydb/core/kqp/ut/common/kqp_ut_common.h:375, void NKikimr::NKqp::AssertSuccessResult(const NYdb::TStatus &): (result.IsSuccess())
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 library/cpp/testing/unittest/registar.cpp:36 RaiseError(): requirement UnittestThread failed 0. /-S/util/system/yassert.cpp:83: InternalPanicImpl @ 0x13AC65E5 1. /-S/util/system/yassert.cpp:55: Panic @ 0x13ABD5E6 2. /tmp//-S/library/cpp/testing/unittest/registar.cpp:36: RaiseError @ 0x13C5F376 3. /-S/ydb/core/kqp/ut/common/kqp_ut_common.h:375: AssertSuccessResult @ 0x261025A2 4. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:365: CreateSampleTables @ 0x26101EA2 5. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:581: operator() @ 0x261237FC 6. /-S/library/cpp/threading/future/core/future-inl.h:493: SetValue @ 0x261237FC 7. /-S/library/cpp/threading/future/async.h:24: operator() @ 0x261237FC 8. /-S/util/thread/pool.h:71: Process @ 0x261237FC 9. /-S/util/thread/pool.cpp:418: DoExecute @ 0x13ACDF69 10. /-S/util/thread/factory.h:15: Execute @ 0x13ACC959 11. /-S/util/thread/factory.cpp:36: ThreadProc @ 0x13ACC959 12. /-S/util/system/thread.cpp:244: ThreadProxy @ 0x13AC7DCC 13. ??:0: ?? @ 0x7F6A3716FAC2 14. ??:0: ?? @ 0x7F6A3720184F ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/perf/unittest >> KqpQueryPerf::IndexDeleteOn-QueryService+UseSink Test command err: Trying to start YDB, gRPC: 9364, MsgBus: 7850 2025-05-29T15:22:09.170404Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509888311914008874:2203];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:22:09.170454Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/000fe0/r3tmp/tmppAGQht/pdisk_1.dat 2025-05-29T15:22:09.245125Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [1:7509888311914008709:2079] 1748532129166892 != 1748532129166895 2025-05-29T15:22:09.247815Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 9364, node 1 2025-05-29T15:22:09.257747Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:22:09.257763Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-05-29T15:22:09.257765Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-05-29T15:22:09.257808Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:7850 TClient is connected to server localhost:7850 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-29T15:22:09.308059Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:22:09.310249Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-05-29T15:22:09.318784Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:22:09.318814Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:22:09.320430Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-29T15:22:09.334629Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-05-29T15:22:09.407707Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 2025-05-29T15:22:09.428667Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:22:09.440430Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:22:09.618072Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509888311914010345:2403], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:22:09.618140Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:22:09.668143Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-05-29T15:22:09.675918Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-05-29T15:22:09.689277Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-05-29T15:22:09.702976Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-05-29T15:22:09.758698Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-05-29T15:22:09.773684Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-05-29T15:22:09.787548Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-05-29T15:22:09.804081Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509888311914011001:2466], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:22:09.804103Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:22:09.804136Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509888311914011006:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:22:09.804867Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710669:3, at schemeshard: 72057594046644480 2025-05-29T15:22:09.813562Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7509888311914011008:2470], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710669 completed, doublechecking } 2025-05-29T15:22:09.889519Z node 1 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [1:7509888311914011060:3395] txid# 281474976710670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-29T15:22:09.974156Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [1:7509888311914011076:2474], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:22:09.974247Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=1&id=ZTBiOTY3MjgtNWY1ZGUwODAtM2U1NTJhNS05NjQ2YTI1ZQ==, ActorId: [1:7509888311914010342:2401], ActorState: ExecuteState, TraceId: 01jwea5w0bbbfg6y7ctbacs2wx, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: VERIFY failed (2025-05-29T15:22:09.974805Z): assertion failed in non-unittest thread with message: assertion failed at ydb/core/kqp/ut/common/kqp_ut_common.h:375, void NKikimr::NKqp::AssertSuccessResult(const NYdb::TStatus &): (result.IsSuccess())
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 library/cpp/testing/unittest/registar.cpp:36 RaiseError(): requirement UnittestThread failed 0. /-S/util/system/yassert.cpp:83: InternalPanicImpl @ 0x13AC65E5 1. /-S/util/system/yassert.cpp:55: Panic @ 0x13ABD5E6 2. /tmp//-S/library/cpp/testing/unittest/registar.cpp:36: RaiseError @ 0x13C5F376 3. /-S/ydb/core/kqp/ut/common/kqp_ut_common.h:375: AssertSuccessResult @ 0x261025A2 4. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:365: CreateSampleTables @ 0x26101EA2 5. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:581: operator() @ 0x261237FC 6. /-S/library/cpp/threading/future/core/future-inl.h:493: SetValue @ 0x261237FC 7. /-S/library/cpp/threading/future/async.h:24: operator() @ 0x261237FC 8. /-S/util/thread/pool.h:71: Process @ 0x261237FC 9. /-S/util/thread/pool.cpp:418: DoExecute @ 0x13ACDF69 10. /-S/util/thread/factory.h:15: Execute @ 0x13ACC959 11. /-S/util/thread/factory.cpp:36: ThreadProc @ 0x13ACC959 12. /-S/util/system/thread.cpp:244: ThreadProxy @ 0x13AC7DCC 13. ??:0: ?? @ 0x7F7C33FE9AC2 14. ??:0: ?? @ 0x7F7C3407B84F Trying to start YDB, gRPC: 19560, MsgBus: 8283 2025-05-29T15:22:13.630146Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509888329110194916:2066];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:22:13.630308Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/000fe0/r3tmp/tmpopL9c4/pdisk_1.dat 2025-05-29T15:22:13.679557Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [1:7509888329110194891:2079] 1748532133629871 != 1748532133629874 2025-05-29T15:22:13.680006Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 19560, node 1 2025-05-29T15:22:13.690954Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:22:13.690965Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-05-29T15:22:13.690967Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-05-29T15:22:13.691001Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:8283 TClient is connected to server localhost:8283 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-29T15:22:13.759758Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:22:13.759794Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:22:13.760639Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-05-29T15:22:13.762616Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected waiting... 2025-05-29T15:22:13.764611Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-05-29T15:22:13.770883Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-05-29T15:22:13.796105Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 2025-05-29T15:22:13.817277Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:22:13.828369Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:22:14.026467Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509888333405163819:2403], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:22:14.026508Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:22:14.062424Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-05-29T15:22:14.116972Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-05-29T15:22:14.126050Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-05-29T15:22:14.180618Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-05-29T15:22:14.189068Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-05-29T15:22:14.203787Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-05-29T15:22:14.217680Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-05-29T15:22:14.234243Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509888333405164474:2466], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:22:14.234275Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:22:14.234288Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509888333405164479:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:22:14.235181Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715669:3, at schemeshard: 72057594046644480 2025-05-29T15:22:14.245059Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7509888333405164481:2470], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715669 completed, doublechecking } 2025-05-29T15:22:14.320822Z node 1 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [1:7509888333405164532:3395] txid# 281474976715670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-29T15:22:14.432434Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [1:7509888333405164548:2474], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:22:14.432574Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=1&id=ZTM2MDc2N2MtOGM5NTJkOGItYWNmMzliMjktMzU2OTJiZDM=, ActorId: [1:7509888333405163801:2401], ActorState: ExecuteState, TraceId: 01jwea60asdz2acnndb5ynppn5, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: VERIFY failed (2025-05-29T15:22:14.433321Z): assertion failed in non-unittest thread with message: assertion failed at ydb/core/kqp/ut/common/kqp_ut_common.h:375, void NKikimr::NKqp::AssertSuccessResult(const NYdb::TStatus &): (result.IsSuccess())
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 library/cpp/testing/unittest/registar.cpp:36 RaiseError(): requirement UnittestThread failed 0. /-S/util/system/yassert.cpp:83: InternalPanicImpl @ 0x13AC65E5 1. /-S/util/system/yassert.cpp:55: Panic @ 0x13ABD5E6 2. /tmp//-S/library/cpp/testing/unittest/registar.cpp:36: RaiseError @ 0x13C5F376 3. /-S/ydb/core/kqp/ut/common/kqp_ut_common.h:375: AssertSuccessResult @ 0x261025A2 4. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:365: CreateSampleTables @ 0x26101EA2 5. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:581: operator() @ 0x261237FC 6. /-S/library/cpp/threading/future/core/future-inl.h:493: SetValue @ 0x261237FC 7. /-S/library/cpp/threading/future/async.h:24: operator() @ 0x261237FC 8. /-S/util/thread/pool.h:71: Process @ 0x261237FC 9. /-S/util/thread/pool.cpp:418: DoExecute @ 0x13ACDF69 10. /-S/util/thread/factory.h:15: Execute @ 0x13ACC959 11. /-S/util/thread/factory.cpp:36: ThreadProc @ 0x13ACC959 12. /-S/util/system/thread.cpp:244: ThreadProxy @ 0x13AC7DCC 13. ??:0: ?? @ 0x7F8524159AC2 14. ??:0: ?? @ 0x7F85241EB84F ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/mind/ut/unittest >> TDynamicNameserverTest::CacheMissSimpleDeadline-EnableNodeBrokerDeltaProtocol-true [GOOD] Test command err: 2025-05-29T15:22:16.952408Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-05-29T15:22:16.952665Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:16.955675Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:16.955720Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:16.979083Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7309: Cannot subscribe to console configs 2025-05-29T15:22:16.979105Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded ... waiting for nameservers are connected 2025-05-29T15:22:16.982638Z node 1 :NODE_BROKER DEBUG: node_broker_impl.h:236: StateInit event type: 10060000 event: NKikimr::TEvTablet::TEvBoot 2025-05-29T15:22:16.983062Z node 1 :NODE_BROKER DEBUG: node_broker_impl.h:236: StateInit event type: 10060001 event: NKikimr::TEvTablet::TEvRestored 2025-05-29T15:22:16.983126Z node 1 :NODE_BROKER DEBUG: node_broker__init_scheme.cpp:20: TTxInitScheme Execute 2025-05-29T15:22:16.983337Z node 1 :NODE_BROKER DEBUG: node_broker_impl.h:236: StateInit event type: 1006000c event: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-05-29T15:22:16.983978Z node 1 :NODE_BROKER DEBUG: node_broker__init_scheme.cpp:29: TTxInitScheme Complete 2025-05-29T15:22:16.984044Z node 1 :NODE_BROKER DEBUG: node_broker__load_state.cpp:19: TTxLoadState Execute 2025-05-29T15:22:16.984094Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:961: [DB] Using default config. 2025-05-29T15:22:16.984110Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:998: [DB] Starting the first epoch: #1.1 1970-01-01T00:00:00.024000Z - 1970-01-01T01:00:00.024000Z - 1970-01-01T02:00:00.024000Z 2025-05-29T15:22:16.984115Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:1024: [DB] Loaded the first approximate epoch start: #1.1 2025-05-29T15:22:16.984129Z node 1 :NODE_BROKER DEBUG: node_broker__load_state.cpp:27: TTxLoadState Complete 2025-05-29T15:22:16.984146Z node 1 :NODE_BROKER DEBUG: node_broker__migrate_state.cpp:84: TTxMigrateState Execute 2025-05-29T15:22:16.984151Z node 1 :NODE_BROKER DEBUG: node_broker__migrate_state.cpp:52: TTxMigrateState ProcessMigrationBatch UpdateNodes left 0, NewVersionUpdateNodes left 0 2025-05-29T15:22:16.984156Z node 1 :NODE_BROKER DEBUG: node_broker__migrate_state.cpp:21: TTxMigrateState FinalizeMigration 2025-05-29T15:22:16.984162Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:1311: [DB] Update epoch in database: #1.1 1970-01-01T00:00:00.024000Z - 1970-01-01T01:00:00.024000Z - 1970-01-01T02:00:00.024000Z 2025-05-29T15:22:16.984178Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:1330: [DB] Update approx epoch start in database: #1.1 2025-05-29T15:22:16.984184Z node 1 :NODE_BROKER NOTICE: node_broker.cpp:1343: [DB] Update main nodes table to: Nodes 2025-05-29T15:22:17.005518Z node 1 :NODE_BROKER DEBUG: node_broker__migrate_state.cpp:95: TTxMigrateState Complete 2025-05-29T15:22:17.005574Z node 1 :NODE_BROKER TRACE: node_broker.cpp:456: Scheduled epoch update at 1970-01-01T01:00:00.024000Z 2025-05-29T15:22:17.005587Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:562: Preparing nodes list cache for epoch #1.1 1970-01-01T00:00:00.024000Z - 1970-01-01T01:00:00.024000Z - 1970-01-01T02:00:00.024000Z, approximate epoch start #1.1 nodes=0 expired=0 2025-05-29T15:22:17.005597Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:603: Preparing update nodes log for epoch ##1.1 1970-01-01T00:00:00.024000Z - 1970-01-01T01:00:00.024000Z - 1970-01-01T02:00:00.024000Z nodes=0 expired=0 removed=0 2025-05-29T15:22:17.046295Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 269877761, Sender [1:201:2197], Recipient [1:170:2176]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-05-29T15:22:17.046360Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:657: Handle NKikimr::TEvTabletPipe::TEvClientConnected { TabletId: 72057594037936129 Status: OK ServerId: [1:201:2197] Leader: 1 Dead: 0 Generation: 2 VersionInfo:  } ... waiting for nameservers are connected (done) 2025-05-29T15:22:17.046820Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 272039936, Sender [1:18:2065], Recipient [1:170:2176]: NKikimr::NNodeBroker::TEvNodeBroker::TEvListNodes { MinEpoch: 1 } 2025-05-29T15:22:17.046830Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:246: StateWork, processing event TEvNodeBroker::TEvListNodes 2025-05-29T15:22:17.046839Z node 1 :NODE_BROKER TRACE: node_broker.cpp:423: Send TEvNodesInfo for epoch #1.1 1970-01-01T00:00:00.024000Z - 1970-01-01T01:00:00.024000Z - 1970-01-01T02:00:00.024000Z 2025-05-29T15:22:17.046873Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 269877761, Sender [1:205:2201], Recipient [1:170:2176]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-05-29T15:22:17.046898Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 272039938, Sender [1:203:2199], Recipient [1:170:2176]: NKikimr::NNodeBroker::TEvNodeBroker::TEvRegistrationRequest { Host: "host1" Port: 1001 ResolveHost: "host1.host1.host1" Address: "1.2.3.4" Location { DataCenter: "1" Module: "2" Rack: "3" Unit: "4" } FixedNodeId: false Path: "dc-1" } 2025-05-29T15:22:17.046902Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:248: StateWork, processing event TEvNodeBroker::TEvRegistrationRequest 2025-05-29T15:22:17.046908Z node 1 :NODE_BROKER TRACE: node_broker.cpp:1487: Handle TEvNodeBroker::TEvRegistrationRequest: request# Host: "host1" Port: 1001 ResolveHost: "host1.host1.host1" Address: "1.2.3.4" Location { DataCenter: "1" Module: "2" Rack: "3" Unit: "4" } FixedNodeId: false Path: "dc-1" 2025-05-29T15:22:17.046947Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2702: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [1:16:2063], request# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-05-29T15:22:17.046958Z node 1 :TX_PROXY_SCHEME_CACHE TRACE: cache.cpp:2322: Create subscriber: self# [1:16:2063], path# /dc-1, domainOwnerId# 72057594046678944 2025-05-29T15:22:17.051414Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2589: HandleNotify: self# [1:16:2063], notify# NKikimr::TSchemeBoardEvents::TEvNotifyUpdate { Path: /dc-1 PathId: [OwnerId: 72057594046678944, LocalPathId: 1] DescribeSchemeResult: Status: StatusSuccess Path: "/dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/dc-1" } } } PathId: 1 PathOwnerId: 72057594046678944 } 2025-05-29T15:22:17.051474Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2464: ResolveCacheItem: self# [1:16:2063], notify# NKikimr::TSchemeBoardEvents::TEvNotifyUpdate { Path: /dc-1 PathId: [OwnerId: 72057594046678944, LocalPathId: 1] DescribeSchemeResult: Status: StatusSuccess Path: "/dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/dc-1" } } } PathId: 1 PathOwnerId: 72057594046678944 }, by path# { Subscriber: { Subscriber: [1:207:2202] DomainOwnerId: 72057594046678944 Type: 2 SyncCookie: 0 } Filled: 0 Status: undefined Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, by pathId# nullptr 2025-05-29T15:22:17.051528Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:1842: FillEntry for TNavigate: self# [1:16:2063], cacheItem# { Subscriber: { Subscriber: [1:207:2202] DomainOwnerId: 72057594046678944 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusSuccess Kind: 2 TableKind: 0 Created: 1 CreateStep: 0 PathId: [OwnerId: 72057594046678944, LocalPathId: 1] DomainId: [OwnerId: 72057594046678944, LocalPathId: 1] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-05-29T15:22:17.051606Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:265: Send result: self# [1:214:2203], recipient# [1:206:2176], result# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [72057594046678944:1:0] RequestType: ByPath Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046678944, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046678944, LocalPathId: 1] Params { Version: 0 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } }] } 2025-05-29T15:22:17.051622Z node 1 :NODE_BROKER TRACE: node_broker.cpp:1532: Handle TEvTxProxySchemeCache::TEvNavigateKeySetResult: response# { Path: dc-1 TableId: [72057594046678944:1:0] RequestType: ByPath Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046678944, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046678944, LocalPathId: 1] Params { Version: 0 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } } 2025-05-29T15:22:17.051637Z node 1 :NODE_BROKER TRACE: node_broker.cpp:1558: Finished resolving tenant: request# Host: "host1" Port: 1001 ResolveHost: "host1.host1.host1" Address: "1.2.3.4" Location { DataCenter: "1" Module: "2" Rack: "3" Unit: "4" } FixedNodeId: false Path: "dc-1": scope id# <7205759404667894 ... tPipe::TEvClientConnected { TabletId: 72057594037936129 Status: OK ServerId: [2:199:2195] Leader: 1 Dead: 0 Generation: 2 VersionInfo:  } ... waiting for nameservers are connected (done) 2025-05-29T15:22:17.275870Z node 2 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 272039950, Sender [2:18:2065], Recipient [2:170:2176]: NKikimr::NNodeBroker::TEvNodeBroker::TEvSubscribeNodesRequest { CachedVersion: 0 SeqNo: 0 } 2025-05-29T15:22:17.275876Z node 2 :NODE_BROKER TRACE: node_broker_impl.h:254: StateWork, processing event TEvNodeBroker::TEvSubscribeNodesRequest 2025-05-29T15:22:17.275883Z node 2 :NODE_BROKER DEBUG: node_broker.cpp:747: New subscriber [2:18:2065], seqNo: 0, version: 0, server pipe id: [2:199:2195] 2025-05-29T15:22:17.275892Z node 2 :NODE_BROKER TRACE: node_broker.cpp:730: Send TEvUpdateNodes v0 -> v1 to [2:18:2065] ... blocking NKikimr::NNodeBroker::TEvNodeBroker::TEvUpdateNodes from NODE_BROKER_ACTOR to NAMESERVICE cookie 0 2025-05-29T15:22:17.275921Z node 2 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 269877761, Sender [2:203:2199], Recipient [2:170:2176]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-05-29T15:22:17.275946Z node 2 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 272039938, Sender [2:201:2197], Recipient [2:170:2176]: NKikimr::NNodeBroker::TEvNodeBroker::TEvRegistrationRequest { Host: "host1" Port: 1001 ResolveHost: "host1.host1.host1" Address: "1.2.3.4" Location { DataCenter: "1" Module: "2" Rack: "3" Unit: "4" } FixedNodeId: false Path: "dc-1" } 2025-05-29T15:22:17.275950Z node 2 :NODE_BROKER TRACE: node_broker_impl.h:248: StateWork, processing event TEvNodeBroker::TEvRegistrationRequest 2025-05-29T15:22:17.275957Z node 2 :NODE_BROKER TRACE: node_broker.cpp:1487: Handle TEvNodeBroker::TEvRegistrationRequest: request# Host: "host1" Port: 1001 ResolveHost: "host1.host1.host1" Address: "1.2.3.4" Location { DataCenter: "1" Module: "2" Rack: "3" Unit: "4" } FixedNodeId: false Path: "dc-1" 2025-05-29T15:22:17.275995Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2702: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [2:16:2063], request# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-05-29T15:22:17.276010Z node 2 :TX_PROXY_SCHEME_CACHE TRACE: cache.cpp:2322: Create subscriber: self# [2:16:2063], path# /dc-1, domainOwnerId# 72057594046678944 2025-05-29T15:22:17.276266Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2589: HandleNotify: self# [2:16:2063], notify# NKikimr::TSchemeBoardEvents::TEvNotifyUpdate { Path: /dc-1 PathId: [OwnerId: 72057594046678944, LocalPathId: 1] DescribeSchemeResult: Status: StatusSuccess Path: "/dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/dc-1" } } } PathId: 1 PathOwnerId: 72057594046678944 } 2025-05-29T15:22:17.276298Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2464: ResolveCacheItem: self# [2:16:2063], notify# NKikimr::TSchemeBoardEvents::TEvNotifyUpdate { Path: /dc-1 PathId: [OwnerId: 72057594046678944, LocalPathId: 1] DescribeSchemeResult: Status: StatusSuccess Path: "/dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/dc-1" } } } PathId: 1 PathOwnerId: 72057594046678944 }, by path# { Subscriber: { Subscriber: [2:205:2200] DomainOwnerId: 72057594046678944 Type: 2 SyncCookie: 0 } Filled: 0 Status: undefined Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, by pathId# nullptr 2025-05-29T15:22:17.276327Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:1842: FillEntry for TNavigate: self# [2:16:2063], cacheItem# { Subscriber: { Subscriber: [2:205:2200] DomainOwnerId: 72057594046678944 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusSuccess Kind: 2 TableKind: 0 Created: 1 CreateStep: 0 PathId: [OwnerId: 72057594046678944, LocalPathId: 1] DomainId: [OwnerId: 72057594046678944, LocalPathId: 1] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-05-29T15:22:17.276369Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:265: Send result: self# [2:212:2201], recipient# [2:204:2176], result# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [72057594046678944:1:0] RequestType: ByPath Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046678944, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046678944, LocalPathId: 1] Params { Version: 0 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } }] } 2025-05-29T15:22:17.276382Z node 2 :NODE_BROKER TRACE: node_broker.cpp:1532: Handle TEvTxProxySchemeCache::TEvNavigateKeySetResult: response# { Path: dc-1 TableId: [72057594046678944:1:0] RequestType: ByPath Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046678944, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046678944, LocalPathId: 1] Params { Version: 0 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } } 2025-05-29T15:22:17.276391Z node 2 :NODE_BROKER TRACE: node_broker.cpp:1558: Finished resolving tenant: request# Host: "host1" Port: 1001 ResolveHost: "host1.host1.host1" Address: "1.2.3.4" Location { DataCenter: "1" Module: "2" Rack: "3" Unit: "4" } FixedNodeId: false Path: "dc-1": scope id# <72057594046678944:1>: serviced subdomain# 72057594046678944:1 2025-05-29T15:22:17.276401Z node 2 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 2146435073, Sender [2:204:2176], Recipient [2:170:2176]: NKikimr::NNodeBroker::TNodeBroker::TEvPrivate::TEvResolvedRegistrationRequest 2025-05-29T15:22:17.276405Z node 2 :NODE_BROKER TRACE: node_broker_impl.h:257: StateWork, processing event TEvPrivate::TEvResolvedRegistrationRequest 2025-05-29T15:22:17.276425Z node 2 :NODE_BROKER DEBUG: node_broker__register_node.cpp:77: TTxRegisterNode Execute 2025-05-29T15:22:17.276428Z node 2 :NODE_BROKER DEBUG: node_broker__register_node.cpp:81: Registration request from host1:1001 (not fixed) tenant: dc-1 2025-05-29T15:22:17.276456Z node 2 :NODE_BROKER DEBUG: node_broker.cpp:856: [DB] Adding node #1024.v2 host1:1001 to database state=Active resolvehost=host1.host1.host1 address=1.2.3.4 dc=1 location=DC=1/M=2/R=3/U=4/ lease=1 expire=Thu, 01 Jan 1970 02:00:00 UTC servicedsubdomain=72057594046678944:1 slotindex=0 authorizedbycertificate=false 2025-05-29T15:22:17.276501Z node 2 :NODE_BROKER DEBUG: node_broker.cpp:264: [Dirty] Register new active node #1024.v2 host1:1001 2025-05-29T15:22:17.276508Z node 2 :NODE_BROKER DEBUG: node_broker.cpp:552: [Dirty] Update current epoch version from 1 to 2 2025-05-29T15:22:17.276511Z node 2 :NODE_BROKER DEBUG: node_broker.cpp:1356: [DB] Update epoch version in database version=2 2025-05-29T15:22:17.287235Z node 2 :NODE_BROKER DEBUG: node_broker__register_node.cpp:186: TTxRegisterNode Complete 2025-05-29T15:22:17.287255Z node 2 :NODE_BROKER DEBUG: node_broker.cpp:264: [Committed] Register new active node #1024.v2 host1:1001 2025-05-29T15:22:17.287263Z node 2 :NODE_BROKER DEBUG: node_broker.cpp:552: [Committed] Update current epoch version from 1 to 2 2025-05-29T15:22:17.287267Z node 2 :NODE_BROKER DEBUG: node_broker.cpp:630: Add node #1024.v2 host1:1001 to epoch cache 2025-05-29T15:22:17.287296Z node 2 :NODE_BROKER DEBUG: node_broker.cpp:659: Add node #1024.v2 to update nodes log 2025-05-29T15:22:17.287338Z node 2 :NODE_BROKER TRACE: node_broker__register_node.cpp:58: TTxRegisterNode reply with: Status { Code: OK } Node { NodeId: 1024 Host: "host1" Port: 1001 ResolveHost: "host1.host1.host1" Address: "1.2.3.4" Location { DataCenter: "1" Module: "2" Rack: "3" Unit: "4" } Expire: 7200023000 Name: "slot-0" } ... waiting for cache miss 2025-05-29T15:22:17.287398Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:525: Handle NActors::TEvInterconnect::TEvResolveNode { NodeId: 1024 Deadline: 1.105512s } 2025-05-29T15:22:17.287409Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:626: New cache miss: nodeId# 1024, deadline# 1.105512s 2025-05-29T15:22:17.287411Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:630: Schedule wakeup for new earliest deadline 1.105512s ... blocking NKikimr::NNodeBroker::TEvNodeBroker::TEvSyncNodesRequest from NAMESERVICE to NODE_BROKER_ACTOR cookie 1 ... waiting for cache miss (done) 2025-05-29T15:22:17.297583Z node 2 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 2146435074, Sender [0:0:0], Recipient [2:170:2176]: NKikimr::NNodeBroker::TNodeBroker::TEvPrivate::TEvProcessSubscribersQueue 2025-05-29T15:22:17.297599Z node 2 :NODE_BROKER TRACE: node_broker_impl.h:258: StateWork, processing event TEvPrivate::TEvProcessSubscribersQueue 2025-05-29T15:22:17.297609Z node 2 :NODE_BROKER TRACE: node_broker.cpp:730: Send TEvUpdateNodes v1 -> v2 to [2:18:2065] ... blocking NKikimr::NNodeBroker::TEvNodeBroker::TEvUpdateNodes from NODE_BROKER_ACTOR to NAMESERVICE cookie 0 2025-05-29T15:22:17.359189Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:873: HandleWakeup at 1.106512s 2025-05-29T15:22:17.359226Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:134: Cache miss failed: nodeId=1024, error=Deadline exceeded >> TLocalTests::TestAddTenantWhileResolving [GOOD] >> PQCountersSimple::PartitionFirstClass [GOOD] >> PQCountersSimple::SupportivePartitionCountersPersist >> TPartitionChooserSuite::TPartitionChooserActor_SplitMergeEnabled_PreferedPartition_InactiveActor_Test [FAIL] >> TPartitionChooserSuite::TPartitionChooserActor_SplitMergeEnabled_PreferedPartition_OtherPartition_Test ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/persqueue/ut/unittest >> TPQRBDescribes::PartitionLocations [FAIL] Test command err: Bucket: 100 elems count: 97 Bucket: 200 elems count: 104 Bucket: 500 elems count: 288 Bucket: 1000 elems count: 528 Bucket: 2000 elems count: 1008 Bucket: 5000 elems count: 2976 2025-05-29T15:22:13.712308Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509888331353388073:2212];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:22:13.712580Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-05-29T15:22:13.715678Z node 2 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7509888328627958827:2073];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:22:13.715703Z node 2 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-05-29T15:22:13.739514Z node 1 :PQ_READ_PROXY DEBUG: caching_service.cpp:44: Direct read cache: : Created 2025-05-29T15:22:13.740018Z node 2 :PQ_READ_PROXY DEBUG: caching_service.cpp:44: Direct read cache: : Created test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/000a8b/r3tmp/tmpcPZRKa/pdisk_1.dat 2025-05-29T15:22:13.771307Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 24674, node 1 2025-05-29T15:22:13.792124Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/ciyv/000a8b/r3tmp/yandexix2Vv6.tmp 2025-05-29T15:22:13.792139Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: /home/runner/.ya/build/build_root/ciyv/000a8b/r3tmp/yandexix2Vv6.tmp 2025-05-29T15:22:13.792196Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:202: successfully initialized from file: /home/runner/.ya/build/build_root/ciyv/000a8b/r3tmp/yandexix2Vv6.tmp 2025-05-29T15:22:13.792252Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-05-29T15:22:13.799431Z INFO: TTestServer started on Port 7531 GrpcPort 24674 2025-05-29T15:22:13.812371Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:22:13.812398Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting TClient is connected to server localhost:7531 2025-05-29T15:22:13.815171Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected PQClient connected to localhost:24674 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-29T15:22:13.844997Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976720657:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:22:13.850010Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:22:13.850033Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:22:13.851213Z node 1 :HIVE WARN: hive_impl.cpp:771: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-05-29T15:22:13.851512Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected waiting... 2025-05-29T15:22:13.865079Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720659:0, at schemeshard: 72057594046644480 waiting... waiting... waiting... 2025-05-29T15:22:14.079013Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509888335648356231:2339], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:22:14.079053Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509888335648356222:2336], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:22:14.079068Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:22:14.079596Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509888335648356261:2342], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:22:14.079628Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:22:14.080015Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976720662:3, at schemeshard: 72057594046644480 2025-05-29T15:22:14.084599Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7509888335648356236:2340], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976720662 completed, doublechecking } 2025-05-29T15:22:14.117908Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720663:0, at schemeshard: 72057594046644480 2025-05-29T15:22:14.139199Z node 1 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [1:7509888335648356423:2803] txid# 281474976720664, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 9], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-29T15:22:14.150709Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [1:7509888335648356442:2354], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-05-29T15:22:14.150829Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=1&id=ZDNkMmQ4MzgtZGI3NDYxYjAtODIwYTJjMzEtYWE4NjJmMGM=, ActorId: [1:7509888335648356219:2334], ActorState: ExecuteState, TraceId: 01jwea605wf2etp9rq5mz5nyat, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-05-29T15:22:14.151271Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: cluster_tracker.cpp:167: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-05-29T15:22:14.177198Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720665:0, at schemeshard: 72057594046644480 2025-05-29T15:22:14.185575Z node 2 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [2:7509888332922926508:2315], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:5:17: Error: At function: KiReadTable!
:5:17: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Versions]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-05-29T15:22:14.186193Z node 2 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=2&id=NGRlZTM3OTgtNTRjMmQzYzctOWY2Yjk2Yy02NzU5Yzk3NA==, ActorId: [2:7509888332922926469:2309], ActorState: ExecuteState, TraceId: 01jwea6092d61tfwb7gvkb2dwm, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-05-29T15:22:14.186330Z node 2 :PERSQUEUE_CLUSTER_TRACKER ERROR: cluster_tracker.cpp:167: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 5 column: 17 } message: "At function: KiReadTable!" end_position { row: 5 column: 17 } severity: 1 issues { position { row: 5 column: 17 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Versions]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 5 column: 17 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-05-29T15:22:14.199103Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720666:0, at schemeshard: 72057594046644480 === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost", true, true, 1000), ("dc2", "dc2.logbroker.yandex.net", false, true, 1000); 2025-05-29T15:22:14.231852Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [1:7509888335648356727:2381], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:22:14.232210Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=1&id=M2Q4ZWEzOTAtYmU0MjE1YzYtMWNlYzMwYzItY2M1ODI1Mjk=, ActorId: [1:7509888335648356724:2379], ActorState: ExecuteState, TraceId: 01jwea60a6dvv6ttg78rx7jftv, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: assertion failed at ydb/core/testlib/test_pq_client.h:537, TMaybe NKikimr::NPersQueueTests::TFlatMsgBusPQClient::RunYqlDataQueryWithParams(TString, const NYdb::TParams &): (result.IsSuccess())
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 TBackTrace::Capture()+28 (0x13C16BFC) NUnitTest::NPrivate::RaiseError(char const*, TBasicString> const&, bool)+137 (0x13DCA7A9) NKikimr::NPersQueueTests::TFlatMsgBusPQClient::RunYqlDataQueryWithParams(TBasicString>, NYdb::Dev::TParams const&)+932 (0x13965684) NKikimr::NPersQueueTests::TFlatMsgBusPQClient::RunYqlDataQuery(TBasicString>)+88 (0x1394D528) NKikimr::NPersQueueTests::TFlatMsgBusPQClient::InitDCs(THashMap>, NKikimr::NPersQueueTests::TPQTestClusterInfo, THash>>, TEqualTo>>, std::__y1::allocator>>>, TBasicString> const&)+1170 (0x139741A2) NKikimr::NPersQueueTests::TFlatMsgBusPQClient::FullInit(THashMap>, NKikimr::NPersQueueTests::TPQTestClusterInfo, THash>>, TEqualTo>>, std::__y1::allocator>>>, TBasicString> const&, TBasicString> const&)+327 (0x139720D7) NPersQueue::TTestServer::StartServer(bool, TMaybe>, NMaybe::TPolicyUndefinedExcept>)+888 (0x13970D08) NPersQueue::TTestServer::TTestServer(NKikimr::Tests::TServerSettings const&, bool, TVector> const&, NActors::NLog::EPriority, TMaybe, TDelete>, NMaybe::TPolicyUndefinedExcept>)+1563 (0x13969A6B) NPersQueue::TTestServer::TTestServer(bool)+134 (0x139502B6) NKikimr::NPQ::NTestSuiteTPQRBDescribes::TTestCasePartitionLocations::Execute_(NUnitTest::TTestContext&)+37 (0x13AE7965) NKikimr::NPQ::NTestSuiteTPQRBDescribes::TCurrentTest::Execute()::'lambda'()::operator()() const+71 (0x13AEC9F7) NUnitTest::TTestBase::Run(std::__y1::function, TBasicString> const&, char const*, bool)+126 (0x13DCC65E) NKikimr::NPQ::NTestSuiteTPQRBDescribes::TCurrentTest::Execute()+436 (0x13AEC3B4) NUnitTest::TTestFactory::Execute()+803 (0x13DCCDD3) NUnitTest::RunMain(int, char**)+3021 (0x13DDE97D) ??+0 (0x7F2CFDC88D90) __libc_start_main+128 (0x7F2CFDC88E40) _start+41 (0x12A48029) ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/mind/ut/unittest >> TDynamicNameserverTest::CacheMissDifferentDeadlineInverseOrder-EnableNodeBrokerDeltaProtocol-false [GOOD] Test command err: 2025-05-29T15:22:16.674735Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-05-29T15:22:16.675015Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:16.677697Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:16.677726Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:16.699144Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7309: Cannot subscribe to console configs 2025-05-29T15:22:16.699165Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded ... waiting for nameservers are connected 2025-05-29T15:22:16.703325Z node 1 :NODE_BROKER DEBUG: node_broker_impl.h:236: StateInit event type: 10060000 event: NKikimr::TEvTablet::TEvBoot 2025-05-29T15:22:16.703798Z node 1 :NODE_BROKER DEBUG: node_broker_impl.h:236: StateInit event type: 10060001 event: NKikimr::TEvTablet::TEvRestored 2025-05-29T15:22:16.703869Z node 1 :NODE_BROKER DEBUG: node_broker__init_scheme.cpp:20: TTxInitScheme Execute 2025-05-29T15:22:16.704067Z node 1 :NODE_BROKER DEBUG: node_broker_impl.h:236: StateInit event type: 1006000c event: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-05-29T15:22:16.704702Z node 1 :NODE_BROKER DEBUG: node_broker__init_scheme.cpp:29: TTxInitScheme Complete 2025-05-29T15:22:16.704863Z node 1 :NODE_BROKER DEBUG: node_broker__load_state.cpp:19: TTxLoadState Execute 2025-05-29T15:22:16.704907Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:961: [DB] Using default config. 2025-05-29T15:22:16.704919Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:998: [DB] Starting the first epoch: #1.1 1970-01-01T00:00:00.025000Z - 1970-01-01T01:00:00.025000Z - 1970-01-01T02:00:00.025000Z 2025-05-29T15:22:16.704922Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:1024: [DB] Loaded the first approximate epoch start: #1.1 2025-05-29T15:22:16.704935Z node 1 :NODE_BROKER DEBUG: node_broker__load_state.cpp:27: TTxLoadState Complete 2025-05-29T15:22:16.704967Z node 1 :NODE_BROKER DEBUG: node_broker__migrate_state.cpp:84: TTxMigrateState Execute 2025-05-29T15:22:16.704971Z node 1 :NODE_BROKER DEBUG: node_broker__migrate_state.cpp:52: TTxMigrateState ProcessMigrationBatch UpdateNodes left 0, NewVersionUpdateNodes left 0 2025-05-29T15:22:16.704974Z node 1 :NODE_BROKER DEBUG: node_broker__migrate_state.cpp:21: TTxMigrateState FinalizeMigration 2025-05-29T15:22:16.704979Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:1311: [DB] Update epoch in database: #1.1 1970-01-01T00:00:00.025000Z - 1970-01-01T01:00:00.025000Z - 1970-01-01T02:00:00.025000Z 2025-05-29T15:22:16.704994Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:1330: [DB] Update approx epoch start in database: #1.1 2025-05-29T15:22:16.704999Z node 1 :NODE_BROKER NOTICE: node_broker.cpp:1343: [DB] Update main nodes table to: Nodes 2025-05-29T15:22:16.726253Z node 1 :NODE_BROKER DEBUG: node_broker__migrate_state.cpp:95: TTxMigrateState Complete 2025-05-29T15:22:16.726292Z node 1 :NODE_BROKER TRACE: node_broker.cpp:456: Scheduled epoch update at 1970-01-01T01:00:00.025000Z 2025-05-29T15:22:16.726301Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:562: Preparing nodes list cache for epoch #1.1 1970-01-01T00:00:00.025000Z - 1970-01-01T01:00:00.025000Z - 1970-01-01T02:00:00.025000Z, approximate epoch start #1.1 nodes=0 expired=0 2025-05-29T15:22:16.726310Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:603: Preparing update nodes log for epoch ##1.1 1970-01-01T00:00:00.025000Z - 1970-01-01T01:00:00.025000Z - 1970-01-01T02:00:00.025000Z nodes=0 expired=0 removed=0 2025-05-29T15:22:16.767260Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 269877761, Sender [1:201:2197], Recipient [1:170:2176]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-05-29T15:22:16.767308Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:657: Handle NKikimr::TEvTabletPipe::TEvClientConnected { TabletId: 72057594037936129 Status: OK ServerId: [1:201:2197] Leader: 1 Dead: 0 Generation: 2 VersionInfo:  } ... waiting for nameservers are connected (done) 2025-05-29T15:22:16.767795Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 272039950, Sender [1:18:2065], Recipient [1:170:2176]: NKikimr::NNodeBroker::TEvNodeBroker::TEvSubscribeNodesRequest { CachedVersion: 0 SeqNo: 0 } 2025-05-29T15:22:16.767805Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:254: StateWork, processing event TEvNodeBroker::TEvSubscribeNodesRequest 2025-05-29T15:22:16.767811Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:747: New subscriber [1:18:2065], seqNo: 0, version: 0, server pipe id: [1:201:2197] 2025-05-29T15:22:16.767822Z node 1 :NODE_BROKER TRACE: node_broker.cpp:730: Send TEvUpdateNodes v0 -> v1 to [1:18:2065] ... blocking NKikimr::NNodeBroker::TEvNodeBroker::TEvUpdateNodes from NODE_BROKER_ACTOR to NAMESERVICE cookie 0 2025-05-29T15:22:16.767855Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 269877761, Sender [1:205:2201], Recipient [1:170:2176]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-05-29T15:22:16.767884Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 272039938, Sender [1:203:2199], Recipient [1:170:2176]: NKikimr::NNodeBroker::TEvNodeBroker::TEvRegistrationRequest { Host: "host1" Port: 1001 ResolveHost: "host1.host1.host1" Address: "1.2.3.4" Location { DataCenter: "1" Module: "2" Rack: "3" Unit: "4" } FixedNodeId: false Path: "dc-1" } 2025-05-29T15:22:16.767888Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:248: StateWork, processing event TEvNodeBroker::TEvRegistrationRequest 2025-05-29T15:22:16.767895Z node 1 :NODE_BROKER TRACE: node_broker.cpp:1487: Handle TEvNodeBroker::TEvRegistrationRequest: request# Host: "host1" Port: 1001 ResolveHost: "host1.host1.host1" Address: "1.2.3.4" Location { DataCenter: "1" Module: "2" Rack: "3" Unit: "4" } FixedNodeId: false Path: "dc-1" 2025-05-29T15:22:16.767953Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2702: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [1:16:2063], request# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-05-29T15:22:16.767966Z node 1 :TX_PROXY_SCHEME_CACHE TRACE: cache.cpp:2322: Create subscriber: self# [1:16:2063], path# /dc-1, domainOwnerId# 72057594046678944 2025-05-29T15:22:16.773348Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2589: HandleNotify: self# [1:16:2063], notify# NKikimr::TSchemeBoardEvents::TEvNotifyUpdate { Path: /dc-1 PathId: [OwnerId: 72057594046678944, LocalPathId: 1] DescribeSchemeResult: Status: StatusSuccess Path: "/dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/dc-1" } } } PathId: 1 PathOwnerId: 72057594046678944 } 2025-05-29T15:22:16.773452Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2464: ResolveCacheItem: self# [1:16:2063], notify# NKikimr::TSchemeBoardEvents::TEvNotifyUpdate { Path: /dc-1 PathId: [OwnerId: 72057594046678944, LocalPathId: 1] DescribeSchemeResult: Status: StatusSuccess Path: "/dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/dc-1" } } } PathId: 1 PathOwnerId: 72057594046678944 }, by path# { Subscriber: { Subscriber: [1:207:2202] DomainOwnerId: 72057594046678944 Type: 2 SyncCookie: 0 } Filled: 0 Status: undefined Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, by pathId# nullptr 2025-05-29T15:22:16.773515Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:1842: FillEntry for TNavigate: self# [1:16:2063], cacheItem# { Subscriber: { Subscriber: [1:207:2202] DomainOwnerId: 72057594046678944 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusSuccess Kind: 2 TableKind: 0 Created: 1 CreateStep: 0 PathId: [OwnerId: 72057594046678944, LocalPathId: 1] DomainId: [OwnerId: 72057594046678944, LocalPathId: 1] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-05-29T15:22:16.773650Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:265: Send result: self# [1:214:2203], recipient# [1:206:2176], result# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [72057594046678944:1:0] RequestType: ByPath Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046678944, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046678944, LocalPathId: 1] Params { Version: 0 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } }] } 2025-05-29T15:22:16.773668Z node 1 :NODE_BROKER TRACE: node_broker.cpp:1532: Handle TEvTxProxySchemeCache::TEvNavigateKeySetResult: response# { Path: dc-1 TableId: [72057594046678944:1:0] RequestType: ByPath Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046678944, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046678944, LocalPathId: 1] Params { Version: 0 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } } 2025-05-29T15:22:16.773694Z node 1 :NODE_BROKER TRACE: node_broker.cpp:1558: Finish ... indUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-05-29T15:22:17.208131Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:265: Send result: self# [2:212:2201], recipient# [2:204:2176], result# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [72057594046678944:1:0] RequestType: ByPath Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046678944, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046678944, LocalPathId: 1] Params { Version: 0 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } }] } 2025-05-29T15:22:17.208149Z node 2 :NODE_BROKER TRACE: node_broker.cpp:1532: Handle TEvTxProxySchemeCache::TEvNavigateKeySetResult: response# { Path: dc-1 TableId: [72057594046678944:1:0] RequestType: ByPath Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046678944, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046678944, LocalPathId: 1] Params { Version: 0 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } } 2025-05-29T15:22:17.208166Z node 2 :NODE_BROKER TRACE: node_broker.cpp:1558: Finished resolving tenant: request# Host: "host1" Port: 1001 ResolveHost: "host1.host1.host1" Address: "1.2.3.4" Location { DataCenter: "1" Module: "2" Rack: "3" Unit: "4" } FixedNodeId: false Path: "dc-1": scope id# <72057594046678944:1>: serviced subdomain# 72057594046678944:1 2025-05-29T15:22:17.208184Z node 2 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 2146435073, Sender [2:204:2176], Recipient [2:170:2176]: NKikimr::NNodeBroker::TNodeBroker::TEvPrivate::TEvResolvedRegistrationRequest 2025-05-29T15:22:17.208189Z node 2 :NODE_BROKER TRACE: node_broker_impl.h:257: StateWork, processing event TEvPrivate::TEvResolvedRegistrationRequest 2025-05-29T15:22:17.208208Z node 2 :NODE_BROKER DEBUG: node_broker__register_node.cpp:77: TTxRegisterNode Execute 2025-05-29T15:22:17.208213Z node 2 :NODE_BROKER DEBUG: node_broker__register_node.cpp:81: Registration request from host1:1001 (not fixed) tenant: dc-1 2025-05-29T15:22:17.208248Z node 2 :NODE_BROKER DEBUG: node_broker.cpp:856: [DB] Adding node #1024.v2 host1:1001 to database state=Active resolvehost=host1.host1.host1 address=1.2.3.4 dc=1 location=DC=1/M=2/R=3/U=4/ lease=1 expire=Thu, 01 Jan 1970 02:00:00 UTC servicedsubdomain=72057594046678944:1 slotindex=0 authorizedbycertificate=false 2025-05-29T15:22:17.208307Z node 2 :NODE_BROKER DEBUG: node_broker.cpp:264: [Dirty] Register new active node #1024.v2 host1:1001 2025-05-29T15:22:17.208315Z node 2 :NODE_BROKER DEBUG: node_broker.cpp:552: [Dirty] Update current epoch version from 1 to 2 2025-05-29T15:22:17.208320Z node 2 :NODE_BROKER DEBUG: node_broker.cpp:1356: [DB] Update epoch version in database version=2 2025-05-29T15:22:17.219325Z node 2 :NODE_BROKER DEBUG: node_broker__register_node.cpp:186: TTxRegisterNode Complete 2025-05-29T15:22:17.219353Z node 2 :NODE_BROKER DEBUG: node_broker.cpp:264: [Committed] Register new active node #1024.v2 host1:1001 2025-05-29T15:22:17.219366Z node 2 :NODE_BROKER DEBUG: node_broker.cpp:552: [Committed] Update current epoch version from 1 to 2 2025-05-29T15:22:17.219373Z node 2 :NODE_BROKER DEBUG: node_broker.cpp:630: Add node #1024.v2 host1:1001 to epoch cache 2025-05-29T15:22:17.219404Z node 2 :NODE_BROKER DEBUG: node_broker.cpp:659: Add node #1024.v2 to update nodes log 2025-05-29T15:22:17.219460Z node 2 :NODE_BROKER TRACE: node_broker__register_node.cpp:58: TTxRegisterNode reply with: Status { Code: OK } Node { NodeId: 1024 Host: "host1" Port: 1001 ResolveHost: "host1.host1.host1" Address: "1.2.3.4" Location { DataCenter: "1" Module: "2" Rack: "3" Unit: "4" } Expire: 7200023000 Name: "slot-0" } 2025-05-29T15:22:17.219588Z node 2 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 269877761, Sender [2:216:2205], Recipient [2:170:2176]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-05-29T15:22:17.219629Z node 2 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 272039938, Sender [2:201:2197], Recipient [2:170:2176]: NKikimr::NNodeBroker::TEvNodeBroker::TEvRegistrationRequest { Host: "host2" Port: 1001 ResolveHost: "host2.host2.host2" Address: "1.2.3.5" Location { DataCenter: "1" Module: "2" Rack: "3" Unit: "5" } FixedNodeId: false Path: "dc-1" } 2025-05-29T15:22:17.219636Z node 2 :NODE_BROKER TRACE: node_broker_impl.h:248: StateWork, processing event TEvNodeBroker::TEvRegistrationRequest 2025-05-29T15:22:17.219648Z node 2 :NODE_BROKER TRACE: node_broker.cpp:1487: Handle TEvNodeBroker::TEvRegistrationRequest: request# Host: "host2" Port: 1001 ResolveHost: "host2.host2.host2" Address: "1.2.3.5" Location { DataCenter: "1" Module: "2" Rack: "3" Unit: "5" } FixedNodeId: false Path: "dc-1" 2025-05-29T15:22:17.219697Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2702: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [2:16:2063], request# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-05-29T15:22:17.219730Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:1842: FillEntry for TNavigate: self# [2:16:2063], cacheItem# { Subscriber: { Subscriber: [2:205:2200] DomainOwnerId: 72057594046678944 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusSuccess Kind: 2 TableKind: 0 Created: 1 CreateStep: 0 PathId: [OwnerId: 72057594046678944, LocalPathId: 1] DomainId: [OwnerId: 72057594046678944, LocalPathId: 1] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-05-29T15:22:17.219787Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:265: Send result: self# [2:218:2206], recipient# [2:217:2176], result# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [72057594046678944:1:0] RequestType: ByPath Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046678944, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046678944, LocalPathId: 1] Params { Version: 0 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } }] } 2025-05-29T15:22:17.219805Z node 2 :NODE_BROKER TRACE: node_broker.cpp:1532: Handle TEvTxProxySchemeCache::TEvNavigateKeySetResult: response# { Path: dc-1 TableId: [72057594046678944:1:0] RequestType: ByPath Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046678944, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046678944, LocalPathId: 1] Params { Version: 0 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } } 2025-05-29T15:22:17.219819Z node 2 :NODE_BROKER TRACE: node_broker.cpp:1558: Finished resolving tenant: request# Host: "host2" Port: 1001 ResolveHost: "host2.host2.host2" Address: "1.2.3.5" Location { DataCenter: "1" Module: "2" Rack: "3" Unit: "5" } FixedNodeId: false Path: "dc-1": scope id# <72057594046678944:1>: serviced subdomain# 72057594046678944:1 2025-05-29T15:22:17.219834Z node 2 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 2146435073, Sender [2:217:2176], Recipient [2:170:2176]: NKikimr::NNodeBroker::TNodeBroker::TEvPrivate::TEvResolvedRegistrationRequest 2025-05-29T15:22:17.219840Z node 2 :NODE_BROKER TRACE: node_broker_impl.h:257: StateWork, processing event TEvPrivate::TEvResolvedRegistrationRequest 2025-05-29T15:22:17.219856Z node 2 :NODE_BROKER DEBUG: node_broker__register_node.cpp:77: TTxRegisterNode Execute 2025-05-29T15:22:17.219860Z node 2 :NODE_BROKER DEBUG: node_broker__register_node.cpp:81: Registration request from host2:1001 (not fixed) tenant: dc-1 2025-05-29T15:22:17.219889Z node 2 :NODE_BROKER DEBUG: node_broker.cpp:856: [DB] Adding node #1025.v3 host2:1001 to database state=Active resolvehost=host2.host2.host2 address=1.2.3.5 dc=1 location=DC=1/M=2/R=3/U=5/ lease=1 expire=Thu, 01 Jan 1970 02:00:00 UTC servicedsubdomain=72057594046678944:1 slotindex=1 authorizedbycertificate=false 2025-05-29T15:22:17.219947Z node 2 :NODE_BROKER DEBUG: node_broker.cpp:264: [Dirty] Register new active node #1025.v3 host2:1001 2025-05-29T15:22:17.219955Z node 2 :NODE_BROKER DEBUG: node_broker.cpp:552: [Dirty] Update current epoch version from 2 to 3 2025-05-29T15:22:17.219962Z node 2 :NODE_BROKER DEBUG: node_broker.cpp:1356: [DB] Update epoch version in database version=3 2025-05-29T15:22:17.230881Z node 2 :NODE_BROKER DEBUG: node_broker__register_node.cpp:186: TTxRegisterNode Complete 2025-05-29T15:22:17.230907Z node 2 :NODE_BROKER DEBUG: node_broker.cpp:264: [Committed] Register new active node #1025.v3 host2:1001 2025-05-29T15:22:17.230916Z node 2 :NODE_BROKER DEBUG: node_broker.cpp:552: [Committed] Update current epoch version from 2 to 3 2025-05-29T15:22:17.230921Z node 2 :NODE_BROKER DEBUG: node_broker.cpp:630: Add node #1025.v3 host2:1001 to epoch cache 2025-05-29T15:22:17.230946Z node 2 :NODE_BROKER DEBUG: node_broker.cpp:659: Add node #1025.v3 to update nodes log 2025-05-29T15:22:17.230998Z node 2 :NODE_BROKER TRACE: node_broker__register_node.cpp:58: TTxRegisterNode reply with: Status { Code: OK } Node { NodeId: 1025 Host: "host2" Port: 1001 ResolveHost: "host2.host2.host2" Address: "1.2.3.5" Location { DataCenter: "1" Module: "2" Rack: "3" Unit: "5" } Expire: 7200023000 Name: "slot-1" } ... waiting for cache miss 2025-05-29T15:22:17.231086Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:525: Handle NActors::TEvInterconnect::TEvResolveNode { NodeId: 1024 Deadline: 2.107024s } 2025-05-29T15:22:17.231108Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:626: New cache miss: nodeId# 1024, deadline# 2.107024s 2025-05-29T15:22:17.231113Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:630: Schedule wakeup for new earliest deadline 2.107024s 2025-05-29T15:22:17.231122Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:525: Handle NActors::TEvInterconnect::TEvResolveNode { NodeId: 1025 Deadline: 1.107024s } 2025-05-29T15:22:17.231128Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:626: New cache miss: nodeId# 1025, deadline# 1.107024s 2025-05-29T15:22:17.231132Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:630: Schedule wakeup for new earliest deadline 1.107024s ... blocking NKikimr::NNodeBroker::TEvNodeBroker::TEvResolveNode from NAMESERVICE to NODE_BROKER_ACTOR cookie 0 ... blocking NKikimr::NNodeBroker::TEvNodeBroker::TEvResolveNode from NAMESERVICE to NODE_BROKER_ACTOR cookie 0 ... waiting for cache miss (done) 2025-05-29T15:22:17.292672Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:873: HandleWakeup at 1.108024s 2025-05-29T15:22:17.292693Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:134: Cache miss failed: nodeId=1025, error=Deadline exceeded 2025-05-29T15:22:17.333683Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:873: HandleWakeup at 2.108024s 2025-05-29T15:22:17.333709Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:134: Cache miss failed: nodeId=1024, error=Deadline exceeded ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/perf/unittest >> KqpQueryPerf::IdxLookupJoinThreeWay-QueryService Test command err: Trying to start YDB, gRPC: 22538, MsgBus: 9486 2025-05-29T15:22:08.552415Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509888308592303896:2136];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:22:08.552526Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/001025/r3tmp/tmpzYub6l/pdisk_1.dat 2025-05-29T15:22:08.619650Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:22:08.619709Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [1:7509888308592303799:2079] 1748532128551401 != 1748532128551404 TServer::EnableGrpc on GrpcPort 22538, node 1 2025-05-29T15:22:08.638445Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:22:08.638463Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-05-29T15:22:08.638465Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-05-29T15:22:08.638515Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:9486 TClient is connected to server localhost:9486 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: 2025-05-29T15:22:08.695864Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:22:08.695894Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:22:08.696980Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-29T15:22:08.707182Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:22:08.721525Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:22:08.788633Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:22:08.811284Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:22:08.825289Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:22:08.912169Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509888308592305437:2403], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:22:08.912188Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:22:08.959201Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-05-29T15:22:08.967560Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-05-29T15:22:08.978017Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-05-29T15:22:08.991035Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-05-29T15:22:09.005215Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-05-29T15:22:09.019142Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-05-29T15:22:09.032024Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-05-29T15:22:09.047036Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509888312887273385:2466], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:22:09.047071Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:22:09.047084Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509888312887273390:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:22:09.047973Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715669:3, at schemeshard: 72057594046644480 2025-05-29T15:22:09.050624Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7509888312887273392:2470], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715669 completed, doublechecking } 2025-05-29T15:22:09.113414Z node 1 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [1:7509888312887273443:3399] txid# 281474976715670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-29T15:22:09.222912Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [1:7509888312887273459:2474], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:22:09.223042Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=1&id=ODkxN2VlYjAtN2Q5NTY0MDgtNGY5YzM5OWUtNDI3YmM2M2Q=, ActorId: [1:7509888308592305434:2401], ActorState: ExecuteState, TraceId: 01jwea5v8pbbzhs9b1egxcdhd1, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: VERIFY failed (2025-05-29T15:22:09.224510Z): assertion failed in non-unittest thread with message: assertion failed at ydb/core/kqp/ut/common/kqp_ut_common.h:375, void NKikimr::NKqp::AssertSuccessResult(const NYdb::TStatus &): (result.IsSuccess())
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 library/cpp/testing/unittest/registar.cpp:36 RaiseError(): requirement UnittestThread failed 0. /-S/util/system/yassert.cpp:83: InternalPanicImpl @ 0x13AC65E5 1. /-S/util/system/yassert.cpp:55: Panic @ 0x13ABD5E6 2. /tmp//-S/library/cpp/testing/unittest/registar.cpp:36: RaiseError @ 0x13C5F376 3. /-S/ydb/core/kqp/ut/common/kqp_ut_common.h:375: AssertSuccessResult @ 0x261025A2 4. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:365: CreateSampleTables @ 0x26101EA2 5. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:581: operator() @ 0x261237FC 6. /-S/library/cpp/threading/future/core/future-inl.h:493: SetValue @ 0x261237FC 7. /-S/library/cpp/threading/future/async.h:24: operator() @ 0x261237FC 8. /-S/util/thread/pool.h:71: Process @ 0x261237FC 9. /-S/util/thread/pool.cpp:418: DoExecute @ 0x13ACDF69 10. /-S/util/thread/factory.h:15: Execute @ 0x13ACC959 11. /-S/util/thread/factory.cpp:36: ThreadProc @ 0x13ACC959 12. /-S/util/system/thread.cpp:244: ThreadProxy @ 0x13AC7DCC 13. ??:0: ?? @ 0x7F12393A7AC2 14. ??:0: ?? @ 0x7F123943984F Trying to start YDB, gRPC: 7572, MsgBus: 25615 2025-05-29T15:22:13.758141Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509888329571144057:2062];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:22:13.758170Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/001025/r3tmp/tmp8uJJYs/pdisk_1.dat 2025-05-29T15:22:13.815780Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:22:13.815864Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [1:7509888329571144037:2079] 1748532133758001 != 1748532133758004 TServer::EnableGrpc on GrpcPort 7572, node 1 2025-05-29T15:22:13.832160Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:22:13.832173Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-05-29T15:22:13.832175Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-05-29T15:22:13.832219Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:25615 TClient is connected to server localhost:25615 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: 2025-05-29T15:22:13.889104Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:22:13.889126Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:22:13.890104Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-29T15:22:13.905123Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:22:13.910963Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-05-29T15:22:13.917998Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:22:13.984268Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:22:14.006449Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:22:14.015809Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:22:14.210592Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509888333866112986:2403], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:22:14.210621Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:22:14.264154Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-05-29T15:22:14.272130Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-05-29T15:22:14.327168Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-05-29T15:22:14.336021Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-05-29T15:22:14.343378Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-05-29T15:22:14.357225Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-05-29T15:22:14.364369Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-05-29T15:22:14.381284Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509888333866113639:2466], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:22:14.381310Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509888333866113644:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:22:14.381315Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:22:14.382094Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715669:3, at schemeshard: 72057594046644480 2025-05-29T15:22:14.384567Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7509888333866113646:2470], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715669 completed, doublechecking } 2025-05-29T15:22:14.438805Z node 1 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [1:7509888333866113697:3394] txid# 281474976715670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-29T15:22:14.548639Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [1:7509888333866113713:2474], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:22:14.548757Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=1&id=MzcyMDQ5NTctYmMyZDBiMGEtODI2M2JlYmItYjhkMGYyODI=, ActorId: [1:7509888333866112968:2401], ActorState: ExecuteState, TraceId: 01jwea60fc0377rjf08cbjtwr4, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: VERIFY failed (2025-05-29T15:22:14.549387Z): assertion failed in non-unittest thread with message: assertion failed at ydb/core/kqp/ut/common/kqp_ut_common.h:375, void NKikimr::NKqp::AssertSuccessResult(const NYdb::TStatus &): (result.IsSuccess())
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 library/cpp/testing/unittest/registar.cpp:36 RaiseError(): requirement UnittestThread failed 0. /-S/util/system/yassert.cpp:83: InternalPanicImpl @ 0x13AC65E5 1. /-S/util/system/yassert.cpp:55: Panic @ 0x13ABD5E6 2. /tmp//-S/library/cpp/testing/unittest/registar.cpp:36: RaiseError @ 0x13C5F376 3. /-S/ydb/core/kqp/ut/common/kqp_ut_common.h:375: AssertSuccessResult @ 0x261025A2 4. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:365: CreateSampleTables @ 0x26101EA2 5. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:581: operator() @ 0x261237FC 6. /-S/library/cpp/threading/future/core/future-inl.h:493: SetValue @ 0x261237FC 7. /-S/library/cpp/threading/future/async.h:24: operator() @ 0x261237FC 8. /-S/util/thread/pool.h:71: Process @ 0x261237FC 9. /-S/util/thread/pool.cpp:418: DoExecute @ 0x13ACDF69 10. /-S/util/thread/factory.h:15: Execute @ 0x13ACC959 11. /-S/util/thread/factory.cpp:36: ThreadProc @ 0x13ACC959 12. /-S/util/system/thread.cpp:244: ThreadProxy @ 0x13AC7DCC 13. ??:0: ?? @ 0x7F7A9F18AAC2 14. ??:0: ?? @ 0x7F7A9F21C84F >> TSlotIndexesPoolTest::Expansion [GOOD] >> TNodeBrokerTest::Test1001NodesSubscribers >> TPQTest::TestDescribeBalancer [GOOD] >> TNodeBrokerTest::RegistrationPipeliningNodeName >> TPQTest::TestCheckACL ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/mind/ut/unittest >> TLocalTests::TestAddTenantWhileResolving [GOOD] Test command err: 2025-05-29T15:22:17.260315Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:434: actor# [1:117:2151] Bootstrap 2025-05-29T15:22:17.283422Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:453: actor# [1:117:2151] Become StateWork (SchemeCache [1:123:2157]) 2025-05-29T15:22:17.290772Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2057} StateInit event Type# 268828672 Event# NKikimr::TEvTablet::TEvBoot 2025-05-29T15:22:17.292014Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2057} StateInit event Type# 268828673 Event# NKikimr::TEvTablet::TEvRestored 2025-05-29T15:22:17.292047Z node 1 :BS_CONTROLLER DEBUG: {BSC22@console_interaction.cpp:14} Console interaction started 2025-05-29T15:22:17.292297Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2057} StateInit event Type# 268828684 Event# NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-05-29T15:22:17.292400Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2057} StateInit event Type# 268639244 Event# NKikimr::TEvNodeWardenStorageConfig 2025-05-29T15:22:17.292473Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2057} StateInit event Type# 131082 Event# NActors::TEvInterconnect::TEvNodesInfo 2025-05-29T15:22:17.292478Z node 1 :BS_CONTROLLER DEBUG: {BSC01@bsc.cpp:521} Handle TEvInterconnect::TEvNodesInfo 2025-05-29T15:22:17.292496Z node 1 :BS_CONTROLLER DEBUG: {BSCTXIS01@init_scheme.cpp:17} TTxInitScheme Execute 2025-05-29T15:22:17.294832Z node 1 :BS_CONTROLLER DEBUG: {BSCTXIS03@init_scheme.cpp:44} TTxInitScheme Complete 2025-05-29T15:22:17.294897Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM01@migrate.cpp:190} Execute tx 2025-05-29T15:22:17.294906Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM02@migrate.cpp:251} Complete tx IncompatibleData# false 2025-05-29T15:22:17.294924Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxTrimUnusedSlots 2025-05-29T15:22:17.294935Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxTrimUnusedSlots 2025-05-29T15:22:17.294946Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateSchemaVersion 2025-05-29T15:22:17.316780Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateSchemaVersion 2025-05-29T15:22:17.316827Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxGenerateInstanceId 2025-05-29T15:22:17.327600Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxGenerateInstanceId 2025-05-29T15:22:17.327642Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateStaticPDiskInfo 2025-05-29T15:22:17.327654Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateStaticPDiskInfo 2025-05-29T15:22:17.327662Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxFillInNonNullConfigForPDisk 2025-05-29T15:22:17.327683Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxFillInNonNullConfigForPDisk 2025-05-29T15:22:17.327690Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxDropDriveStatus 2025-05-29T15:22:17.327695Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxDropDriveStatus 2025-05-29T15:22:17.327700Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateCompatibilityInfo 2025-05-29T15:22:17.338487Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateCompatibilityInfo 2025-05-29T15:22:17.338529Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateEnableConfigV2 2025-05-29T15:22:17.349360Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateEnableConfigV2 2025-05-29T15:22:17.349419Z node 1 :BS_CONTROLLER DEBUG: {BSCTXLE01@load_everything.cpp:19} TTxLoadEverything Execute 2025-05-29T15:22:17.349589Z node 1 :BS_CONTROLLER DEBUG: {BSCTXLE03@load_everything.cpp:557} TTxLoadEverything Complete 2025-05-29T15:22:17.349595Z node 1 :BS_CONTROLLER DEBUG: {BSC09@impl.h:2188} LoadFinished 2025-05-29T15:22:17.350955Z node 1 :BS_CONTROLLER DEBUG: {BSC18@console_interaction.cpp:31} Console connection service started 2025-05-29T15:22:17.350970Z node 1 :BS_CONTROLLER DEBUG: {BSCTXLE04@load_everything.cpp:562} TTxLoadEverything InitQueue processed 2025-05-29T15:22:17.351199Z node 1 :BS_CONTROLLER DEBUG: {BSCTXRN01@register_node.cpp:216} Handle TEvControllerRegisterNode Request# {NodeID: 1 VDiskStatus { VDiskId { GroupID: 0 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } NodeId: 1 PDiskId: 1 VSlotId: 0 PDiskGuid: 123 Status: INIT_PENDING OnlyPhantomsRemain: false } DeclarativePDiskManagement: true } 2025-05-29T15:22:17.351454Z node 1 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:398} Execute TEvControllerConfigRequest Request# {Command { DefineHostConfig { HostConfigId: 1 Drive { Path: "/home/runner/.ya/build/build_root/ciyv/002573/r3tmp/tmppddBQm/pdisk_1.dat" } } } Command { DefineBox { BoxId: 1 Host { Key { Fqdn: "::1" IcPort: 12001 } HostConfigId: 1 } } } } 2025-05-29T15:22:17.351536Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:340} Create new pdisk PDiskId# 1:1 Path# /home/runner/.ya/build/build_root/ciyv/002573/r3tmp/tmppddBQm/pdisk_1.dat 2025-05-29T15:22:17.351750Z node 1 :BS_CONTROLLER DEBUG: {BSCTXUDM01@disk_metrics.cpp:68} Updating disk status Record# {VDisksMetrics { VDiskId { GroupID: 0 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 0 } State: Initial Replicated: false DiskSpace: Green } } 2025-05-29T15:22:17.351788Z node 1 :BS_CONTROLLER DEBUG: {BSC10@scrub.cpp:187} Handle(TEvControllerScrubQueryStartQuantum) Msg# {VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 0 } } 2025-05-29T15:22:17.351800Z node 1 :BS_CONTROLLER DEBUG: {BSC13@scrub.cpp:597} sending TEvControllerScrubStartQuantum Msg# NKikimrBlobStorage.TEvControllerScrubStartQuantum VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 0 } 2025-05-29T15:22:17.351829Z node 1 :BS_CONTROLLER DEBUG: {BSCTXUDM01@disk_metrics.cpp:68} Updating disk status Record# {VDiskStatus { VDiskId { GroupID: 0 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } NodeId: 1 PDiskId: 1 VSlotId: 0 PDiskGuid: 123 Status: REPLICATING OnlyPhantomsRemain: false } } 2025-05-29T15:22:17.351850Z node 1 :BS_CONTROLLER DEBUG: {BSCTXUDM01@disk_metrics.cpp:68} Updating disk status Record# {VDiskStatus { VDiskId { GroupID: 0 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } NodeId: 1 PDiskId: 1 VSlotId: 0 PDiskGuid: 123 Status: READY OnlyPhantomsRemain: false } } 2025-05-29T15:22:17.352284Z node 1 :BS_CONTROLLER DEBUG: {BSC11@scrub.cpp:214} Handle(TEvControllerScrubQuantumFinished) Msg# {VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 0 } Success: true } 2025-05-29T15:22:17.352318Z node 1 :BS_CONTROLLER DEBUG: {BSC10@scrub.cpp:187} Handle(TEvControllerScrubQueryStartQuantum) Msg# {VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 0 } } 2025-05-29T15:22:17.363380Z node 1 :BS_CONTROLLER DEBUG: {BSCTXRN05@register_node.cpp:34} Add devicesData from NodeWarden NodeId# 1 Devices# [] 2025-05-29T15:22:17.363519Z node 1 :TENANT_POOL DEBUG: tenant_pool.cpp:826: TTenantPool::Bootstrap 2025-05-29T15:22:17.363577Z node 1 :LOCAL DEBUG: local.cpp:1491: TLocal::Bootstrap 2025-05-29T15:22:17.363584Z node 1 :TENANT_POOL DEBUG: tenant_pool.cpp:412: TDomainTenantPool(dc-1) Bootstrap 2025-05-29T15:22:17.363613Z node 1 :TENANT_POOL DEBUG: tenant_pool.cpp:286: TDomainTenantPool(dc-1) send request to add tenant /dc-1 with resources CPU: 1 Memory: 1 Network: 1 2025-05-29T15:22:17.363629Z node 1 :LOCAL DEBUG: local.cpp:1441: TDomainLocal(dc-1): Bootstrap 2025-05-29T15:22:17.363735Z node 1 :LOCAL DEBUG: local.cpp:1149: TDomainLocal(dc-1): Binding to hive 72057594046578946 at domain dc-1 (allocated resources: CPU: 1 Memory: 1 Network: 1) 2025-05-29T15:22:17.363745Z node 1 :LOCAL DEBUG: local.cpp:975: TLocalNodeRegistrar::Bootstrap 2025-05-29T15:22:17.363750Z node 1 :LOCAL DEBUG: local.cpp:181: TLocalNodeRegistrar::TryToRegister 2025-05-29T15:22:17.363769Z node 1 :LOCAL DEBUG: local.cpp:213: TLocalNodeRegistrar::TryToRegister pipe to hive, pipe:[1:331:2301] 2025-05-29T15:22:17.364261Z node 1 :TENANT_POOL NOTICE: tenant_pool.cpp:526: TDomainTenantPool(dc-1) started tenant /dc-1 2025-05-29T15:22:17.364271Z node 1 :TENANT_POOL DEBUG: tenant_pool.cpp:274: TDomainTenantPool(dc-1) send status update to [1:325:2297] 2025-05-29T15:22:17.364407Z node 1 :LOCAL DEBUG: local.cpp:260: TEvTabletPipe::TEvClientConnected {TabletId=72057594046578946 Status=OK ClientId=[1:331:2301]} 2025-05-29T15:22:17.364415Z node 1 :LOCAL DEBUG: local.cpp:324: TLocalNodeRegistrar::Handle TEvLocal::TEvPing 2025-05-29T15:22:17.364422Z node 1 :LOCAL DEBUG: local.cpp:380: TLocalNodeRegistrar TEvPing - CONNECTED 2025-05-29T15:22:17.364424Z node 1 :LOCAL DEBUG: local.cpp:297: TLocalNodeRegistrar SendStatusOk 2025-05-29T15:22:17.378884Z node 1 :LOCAL DEBUG: local.cpp:1207: TDomainLocal(dc-1): TDomainLocal::TEvClientConnected for dc-1 shard 72057594046578944 2025-05-29T15:22:17.378908Z node 1 :LOCAL DEBUG: local.cpp:1066: TDomainLocal(dc-1): Send resolve request for /dc-1/users/tenant-1 to schemeshard 72057594046578944 2025-05-29T15:22:17.383240Z node 1 :LOCAL DEBUG: local.cpp:1234: TDomainLocal(dc-1): HandleResolve from schemeshard 72057594046578944: Status: StatusSuccess Path: "/dc-1/users/tenant-1" PathDescription { Self { Name: "/dc-1/users/tenant-1" PathId: 100 SchemeshardId: 72057594046578944 PathType: EPathTypeSubDomain } DomainDescription { SchemeShardId_Depricated: 72057594046578944 PathId_Depricated: 100 DomainKey { SchemeShard: 72057594046578944 PathId: 100 } } } 2025-05-29T15:22:17.383301Z node 1 :LOCAL DEBUG: local.cpp:1172: TDomainLocal(dc-1): Binding tenant /dc-1/users/tenant-1 to hive 72057594046578946 (allocated resources: CPU: 5 Memory: 5 Network: 1) 2025-05-29T15:22:17.383408Z node 1 :LOCAL DEBUG: local.cpp:975: TLocalNodeRegistrar::Bootstrap 2025-05-29T15:22:17.383413Z node 1 :LOCAL DEBUG: local.cpp:181: TLocalNodeRegistrar::TryToRegister 2025-05-29T15:22:17.383427Z node 1 :LOCAL DEBUG: local.cpp:213: TLocalNodeRegistrar::TryToRegister pipe to hive, pipe:[1:387:2337] 2025-05-29T15:22:17.383872Z node 1 :LOCAL DEBUG: local.cpp:260: TEvTabletPipe::TEvClientConnected {TabletId=72057594046578946 Status=OK ClientId=[1:387:2337]} 2025-05-29T15:22:17.383926Z node 1 :LOCAL DEBUG: local.cpp:324: TLocalNodeRegistrar::Handle TEvLocal::TEvPing 2025-05-29T15:22:17.383933Z node 1 :LOCAL DEBUG: local.cpp:380: TLocalNodeRegistrar TEvPing - CONNECTED 2025-05-29T15:22:17.383936Z node 1 :LOCAL DEBUG: local.cpp:297: TLocalNodeRegistrar SendStatusOk 2025-05-29T15:22:17.386037Z node 1 :LOCAL DEBUG: local.cpp:1407: TDomainLocal(dc-1): Alter tenant /dc-1/users/tenant-1 2025-05-29T15:22:17.386082Z node 1 :LOCAL DEBUG: local.cpp:726: Updated resoure limit: CPU: 10 Memory: 10 Network: 10 2025-05-29T15:22:17.386088Z node 1 :LOCAL DEBUG: local.cpp:297: TLocalNodeRegistrar SendSt ... 1@init_scheme.cpp:17} TTxInitScheme Execute 2025-05-29T15:22:17.509231Z node 2 :BS_CONTROLLER DEBUG: {BSCTXIS03@init_scheme.cpp:44} TTxInitScheme Complete 2025-05-29T15:22:17.509265Z node 2 :BS_CONTROLLER DEBUG: {BSCTXM01@migrate.cpp:190} Execute tx 2025-05-29T15:22:17.509277Z node 2 :BS_CONTROLLER DEBUG: {BSCTXM02@migrate.cpp:251} Complete tx IncompatibleData# false 2025-05-29T15:22:17.509410Z node 2 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxTrimUnusedSlots 2025-05-29T15:22:17.509422Z node 2 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxTrimUnusedSlots 2025-05-29T15:22:17.509448Z node 2 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateSchemaVersion 2025-05-29T15:22:17.531645Z node 2 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateSchemaVersion 2025-05-29T15:22:17.531688Z node 2 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxGenerateInstanceId 2025-05-29T15:22:17.542599Z node 2 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxGenerateInstanceId 2025-05-29T15:22:17.542651Z node 2 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateStaticPDiskInfo 2025-05-29T15:22:17.542666Z node 2 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateStaticPDiskInfo 2025-05-29T15:22:17.542676Z node 2 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxFillInNonNullConfigForPDisk 2025-05-29T15:22:17.542699Z node 2 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxFillInNonNullConfigForPDisk 2025-05-29T15:22:17.542705Z node 2 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxDropDriveStatus 2025-05-29T15:22:17.542709Z node 2 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxDropDriveStatus 2025-05-29T15:22:17.542714Z node 2 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateCompatibilityInfo 2025-05-29T15:22:17.553506Z node 2 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateCompatibilityInfo 2025-05-29T15:22:17.553560Z node 2 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateEnableConfigV2 2025-05-29T15:22:17.564349Z node 2 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateEnableConfigV2 2025-05-29T15:22:17.564395Z node 2 :BS_CONTROLLER DEBUG: {BSCTXLE01@load_everything.cpp:19} TTxLoadEverything Execute 2025-05-29T15:22:17.564517Z node 2 :BS_CONTROLLER DEBUG: {BSCTXLE03@load_everything.cpp:557} TTxLoadEverything Complete 2025-05-29T15:22:17.564522Z node 2 :BS_CONTROLLER DEBUG: {BSC09@impl.h:2188} LoadFinished 2025-05-29T15:22:17.564596Z node 2 :BS_CONTROLLER DEBUG: {BSC18@console_interaction.cpp:31} Console connection service started 2025-05-29T15:22:17.564602Z node 2 :BS_CONTROLLER DEBUG: {BSCTXLE04@load_everything.cpp:562} TTxLoadEverything InitQueue processed 2025-05-29T15:22:17.564766Z node 2 :BS_CONTROLLER DEBUG: {BSCTXRN01@register_node.cpp:216} Handle TEvControllerRegisterNode Request# {NodeID: 2 VDiskStatus { VDiskId { GroupID: 0 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } NodeId: 2 PDiskId: 1 VSlotId: 0 PDiskGuid: 123 Status: INIT_PENDING OnlyPhantomsRemain: false } DeclarativePDiskManagement: true } 2025-05-29T15:22:17.564849Z node 2 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:398} Execute TEvControllerConfigRequest Request# {Command { DefineHostConfig { HostConfigId: 1 Drive { Path: "/home/runner/.ya/build/build_root/ciyv/002573/r3tmp/tmpRh8HGk/pdisk_1.dat" } } } Command { DefineBox { BoxId: 1 Host { Key { Fqdn: "::1" IcPort: 12001 } HostConfigId: 1 } } } } 2025-05-29T15:22:17.564894Z node 2 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:340} Create new pdisk PDiskId# 2:1 Path# /home/runner/.ya/build/build_root/ciyv/002573/r3tmp/tmpRh8HGk/pdisk_1.dat 2025-05-29T15:22:17.565012Z node 2 :BS_CONTROLLER DEBUG: {BSCTXUDM01@disk_metrics.cpp:68} Updating disk status Record# {VDisksMetrics { VDiskId { GroupID: 0 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } VSlotId { NodeId: 2 PDiskId: 1 VSlotId: 0 } State: Initial Replicated: false DiskSpace: Green } } 2025-05-29T15:22:17.565051Z node 2 :BS_CONTROLLER DEBUG: {BSC10@scrub.cpp:187} Handle(TEvControllerScrubQueryStartQuantum) Msg# {VSlotId { NodeId: 2 PDiskId: 1 VSlotId: 0 } } 2025-05-29T15:22:17.565065Z node 2 :BS_CONTROLLER DEBUG: {BSC13@scrub.cpp:597} sending TEvControllerScrubStartQuantum Msg# NKikimrBlobStorage.TEvControllerScrubStartQuantum VSlotId { NodeId: 2 PDiskId: 1 VSlotId: 0 } 2025-05-29T15:22:17.565104Z node 2 :BS_CONTROLLER DEBUG: {BSCTXUDM01@disk_metrics.cpp:68} Updating disk status Record# {VDiskStatus { VDiskId { GroupID: 0 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } NodeId: 2 PDiskId: 1 VSlotId: 0 PDiskGuid: 123 Status: REPLICATING OnlyPhantomsRemain: false } } 2025-05-29T15:22:17.565173Z node 2 :BS_CONTROLLER DEBUG: {BSCTXUDM01@disk_metrics.cpp:68} Updating disk status Record# {VDiskStatus { VDiskId { GroupID: 0 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } NodeId: 2 PDiskId: 1 VSlotId: 0 PDiskGuid: 123 Status: READY OnlyPhantomsRemain: false } } 2025-05-29T15:22:17.565421Z node 2 :BS_CONTROLLER DEBUG: {BSC11@scrub.cpp:214} Handle(TEvControllerScrubQuantumFinished) Msg# {VSlotId { NodeId: 2 PDiskId: 1 VSlotId: 0 } Success: true } 2025-05-29T15:22:17.565449Z node 2 :BS_CONTROLLER DEBUG: {BSC10@scrub.cpp:187} Handle(TEvControllerScrubQueryStartQuantum) Msg# {VSlotId { NodeId: 2 PDiskId: 1 VSlotId: 0 } } 2025-05-29T15:22:17.576188Z node 2 :BS_CONTROLLER DEBUG: {BSCTXRN05@register_node.cpp:34} Add devicesData from NodeWarden NodeId# 2 Devices# [] 2025-05-29T15:22:17.576292Z node 2 :TENANT_POOL DEBUG: tenant_pool.cpp:826: TTenantPool::Bootstrap 2025-05-29T15:22:17.576357Z node 2 :LOCAL DEBUG: local.cpp:1491: TLocal::Bootstrap 2025-05-29T15:22:17.576371Z node 2 :TENANT_POOL DEBUG: tenant_pool.cpp:412: TDomainTenantPool(dc-1) Bootstrap 2025-05-29T15:22:17.576397Z node 2 :TENANT_POOL DEBUG: tenant_pool.cpp:286: TDomainTenantPool(dc-1) send request to add tenant /dc-1 with resources CPU: 1 Memory: 1 Network: 1 2025-05-29T15:22:17.576414Z node 2 :LOCAL DEBUG: local.cpp:1441: TDomainLocal(dc-1): Bootstrap 2025-05-29T15:22:17.576496Z node 2 :LOCAL DEBUG: local.cpp:1149: TDomainLocal(dc-1): Binding to hive 72057594046578946 at domain dc-1 (allocated resources: CPU: 1 Memory: 1 Network: 1) 2025-05-29T15:22:17.576506Z node 2 :LOCAL DEBUG: local.cpp:975: TLocalNodeRegistrar::Bootstrap 2025-05-29T15:22:17.576510Z node 2 :LOCAL DEBUG: local.cpp:181: TLocalNodeRegistrar::TryToRegister 2025-05-29T15:22:17.576526Z node 2 :LOCAL DEBUG: local.cpp:213: TLocalNodeRegistrar::TryToRegister pipe to hive, pipe:[2:333:2303] 2025-05-29T15:22:17.576852Z node 2 :TENANT_POOL NOTICE: tenant_pool.cpp:526: TDomainTenantPool(dc-1) started tenant /dc-1 2025-05-29T15:22:17.576859Z node 2 :TENANT_POOL DEBUG: tenant_pool.cpp:274: TDomainTenantPool(dc-1) send status update to [2:327:2299] 2025-05-29T15:22:17.576986Z node 2 :LOCAL DEBUG: local.cpp:260: TEvTabletPipe::TEvClientConnected {TabletId=72057594046578946 Status=OK ClientId=[2:333:2303]} 2025-05-29T15:22:17.576994Z node 2 :LOCAL DEBUG: local.cpp:324: TLocalNodeRegistrar::Handle TEvLocal::TEvPing 2025-05-29T15:22:17.577001Z node 2 :LOCAL DEBUG: local.cpp:380: TLocalNodeRegistrar TEvPing - CONNECTED 2025-05-29T15:22:17.577004Z node 2 :LOCAL DEBUG: local.cpp:297: TLocalNodeRegistrar SendStatusOk 2025-05-29T15:22:17.593091Z node 2 :LOCAL DEBUG: local.cpp:1207: TDomainLocal(dc-1): TDomainLocal::TEvClientConnected for dc-1 shard 72057594046578944 2025-05-29T15:22:17.593110Z node 2 :LOCAL DEBUG: local.cpp:1066: TDomainLocal(dc-1): Send resolve request for /dc-1/users/tenant-1 to schemeshard 72057594046578944 2025-05-29T15:22:17.593133Z node 2 :LOCAL DEBUG: local.cpp:1066: TDomainLocal(dc-1): Send resolve request for /dc-1/users/tenant-2 to schemeshard 72057594046578944 2025-05-29T15:22:17.593209Z node 2 :LOCAL DEBUG: local.cpp:1234: TDomainLocal(dc-1): HandleResolve from schemeshard 72057594046578944: Status: StatusSuccess Path: "/dc-1/users/tenant-1" PathDescription { Self { Name: "/dc-1/users/tenant-1" PathId: 100 SchemeshardId: 72057594046578944 PathType: EPathTypeSubDomain } DomainDescription { SchemeShardId_Depricated: 72057594046578944 PathId_Depricated: 100 DomainKey { SchemeShard: 72057594046578944 PathId: 100 } } } 2025-05-29T15:22:17.593222Z node 2 :LOCAL DEBUG: local.cpp:1172: TDomainLocal(dc-1): Binding tenant /dc-1/users/tenant-1 to hive 72057594046578946 (allocated resources: CPU: 1 Memory: 1 Network: 1) 2025-05-29T15:22:17.593272Z node 2 :LOCAL DEBUG: local.cpp:1234: TDomainLocal(dc-1): HandleResolve from schemeshard 72057594046578944: Status: StatusSuccess Path: "/dc-1/users/tenant-2" PathDescription { Self { Name: "/dc-1/users/tenant-2" PathId: 101 SchemeshardId: 72057594046578944 PathType: EPathTypeSubDomain } DomainDescription { SchemeShardId_Depricated: 72057594046578944 PathId_Depricated: 101 DomainKey { SchemeShard: 72057594046578944 PathId: 101 } } } 2025-05-29T15:22:17.593277Z node 2 :LOCAL DEBUG: local.cpp:1172: TDomainLocal(dc-1): Binding tenant /dc-1/users/tenant-2 to hive 72057594046578946 (allocated resources: CPU: 1 Memory: 1 Network: 1) 2025-05-29T15:22:17.593306Z node 2 :LOCAL DEBUG: local.cpp:975: TLocalNodeRegistrar::Bootstrap 2025-05-29T15:22:17.593311Z node 2 :LOCAL DEBUG: local.cpp:181: TLocalNodeRegistrar::TryToRegister 2025-05-29T15:22:17.593321Z node 2 :LOCAL DEBUG: local.cpp:213: TLocalNodeRegistrar::TryToRegister pipe to hive, pipe:[2:418:2360] 2025-05-29T15:22:17.593337Z node 2 :LOCAL DEBUG: local.cpp:975: TLocalNodeRegistrar::Bootstrap 2025-05-29T15:22:17.593340Z node 2 :LOCAL DEBUG: local.cpp:181: TLocalNodeRegistrar::TryToRegister 2025-05-29T15:22:17.593344Z node 2 :LOCAL DEBUG: local.cpp:213: TLocalNodeRegistrar::TryToRegister pipe to hive, pipe:[2:419:2362] 2025-05-29T15:22:17.593464Z node 2 :LOCAL DEBUG: local.cpp:260: TEvTabletPipe::TEvClientConnected {TabletId=72057594046578946 Status=OK ClientId=[2:418:2360]} 2025-05-29T15:22:17.593492Z node 2 :LOCAL DEBUG: local.cpp:324: TLocalNodeRegistrar::Handle TEvLocal::TEvPing 2025-05-29T15:22:17.593497Z node 2 :LOCAL DEBUG: local.cpp:380: TLocalNodeRegistrar TEvPing - CONNECTED 2025-05-29T15:22:17.593500Z node 2 :LOCAL DEBUG: local.cpp:297: TLocalNodeRegistrar SendStatusOk 2025-05-29T15:22:17.593508Z node 2 :LOCAL DEBUG: local.cpp:260: TEvTabletPipe::TEvClientConnected {TabletId=72057594046578946 Status=OK ClientId=[2:419:2362]} 2025-05-29T15:22:17.593525Z node 2 :LOCAL DEBUG: local.cpp:324: TLocalNodeRegistrar::Handle TEvLocal::TEvPing 2025-05-29T15:22:17.593530Z node 2 :LOCAL DEBUG: local.cpp:380: TLocalNodeRegistrar TEvPing - CONNECTED 2025-05-29T15:22:17.593532Z node 2 :LOCAL DEBUG: local.cpp:297: TLocalNodeRegistrar SendStatusOk >> TPartitionChooserSuite::TPartitionChooserActor_SplitMergeEnabled_SourceId_OldPartitionExists_NotWritten_Test [FAIL] >> TPartitionChooserSuite::TPartitionChooserActor_SplitMergeEnabled_BadSourceId_Test [FAIL] >> TPartitionChooserSuite::TPartitionChooserActor_SplitMergeDisabled_NewSourceId_Test |59.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/mind/ut/unittest >> TSlotIndexesPoolTest::Expansion [GOOD] >> TPQTest::TestAccountReadQuota [GOOD] >> TPQTest::TestAlreadyWritten ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/perf/unittest >> KqpQueryPerf::DeleteOn+QueryService+UseSink Test command err: Trying to start YDB, gRPC: 18266, MsgBus: 28224 2025-05-29T15:22:10.176361Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509888317246867038:2062];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:22:10.176381Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/000f6d/r3tmp/tmpqtVe9t/pdisk_1.dat 2025-05-29T15:22:10.245238Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:22:10.245309Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [1:7509888317246867018:2079] 1748532130176263 != 1748532130176266 TServer::EnableGrpc on GrpcPort 18266, node 1 2025-05-29T15:22:10.257345Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:22:10.257358Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-05-29T15:22:10.257359Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-05-29T15:22:10.257406Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:28224 2025-05-29T15:22:10.278672Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:22:10.278703Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:22:10.279776Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:28224 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-29T15:22:10.319383Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:22:10.328073Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:22:10.346622Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:22:10.368059Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:22:10.387702Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:22:10.561725Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509888317246868658:2403], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:22:10.561745Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:22:10.599935Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-05-29T15:22:10.608109Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-05-29T15:22:10.620069Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-05-29T15:22:10.633733Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-05-29T15:22:10.690905Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-05-29T15:22:10.703709Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-05-29T15:22:10.718166Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-05-29T15:22:10.734130Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509888317246869313:2466], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:22:10.734152Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:22:10.734161Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509888317246869318:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:22:10.734798Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715669:3, at schemeshard: 72057594046644480 2025-05-29T15:22:10.737269Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7509888317246869320:2470], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715669 completed, doublechecking } 2025-05-29T15:22:10.809733Z node 1 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [1:7509888317246869371:3396] txid# 281474976715670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-29T15:22:10.924139Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [1:7509888317246869387:2474], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:22:10.924265Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=1&id=MzM3OTViNDYtY2I0ZmI5ZDctZWNhNTEyYzYtN2E3Y2QzNTc=, ActorId: [1:7509888317246868655:2401], ActorState: ExecuteState, TraceId: 01jwea5wxd5px4c20dd3qa19ez, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: VERIFY failed (2025-05-29T15:22:10.924942Z): assertion failed in non-unittest thread with message: assertion failed at ydb/core/kqp/ut/common/kqp_ut_common.h:375, void NKikimr::NKqp::AssertSuccessResult(const NYdb::TStatus &): (result.IsSuccess())
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 library/cpp/testing/unittest/registar.cpp:36 RaiseError(): requirement UnittestThread failed 0. /-S/util/system/yassert.cpp:83: InternalPanicImpl @ 0x13AC65E5 1. /-S/util/system/yassert.cpp:55: Panic @ 0x13ABD5E6 2. /tmp//-S/library/cpp/testing/unittest/registar.cpp:36: RaiseError @ 0x13C5F376 3. /-S/ydb/core/kqp/ut/common/kqp_ut_common.h:375: AssertSuccessResult @ 0x261025A2 4. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:365: CreateSampleTables @ 0x26101EA2 5. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:581: operator() @ 0x261237FC 6. /-S/library/cpp/threading/future/core/future-inl.h:493: SetValue @ 0x261237FC 7. /-S/library/cpp/threading/future/async.h:24: operator() @ 0x261237FC 8. /-S/util/thread/pool.h:71: Process @ 0x261237FC 9. /-S/util/thread/pool.cpp:418: DoExecute @ 0x13ACDF69 10. /-S/util/thread/factory.h:15: Execute @ 0x13ACC959 11. /-S/util/thread/factory.cpp:36: ThreadProc @ 0x13ACC959 12. /-S/util/system/thread.cpp:244: ThreadProxy @ 0x13AC7DCC 13. ??:0: ?? @ 0x7F335C377AC2 14. ??:0: ?? @ 0x7F335C40984F Trying to start YDB, gRPC: 4573, MsgBus: 17689 2025-05-29T15:22:14.289856Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509888335446948057:2063];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:22:14.289903Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/000f6d/r3tmp/tmpJfH9Bo/pdisk_1.dat 2025-05-29T15:22:14.346441Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [1:7509888335446948036:2079] 1748532134289703 != 1748532134289706 2025-05-29T15:22:14.348697Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 4573, node 1 2025-05-29T15:22:14.358151Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:22:14.358164Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-05-29T15:22:14.358166Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-05-29T15:22:14.358201Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:17689 2025-05-29T15:22:14.392953Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:22:14.392979Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:22:14.393962Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:17689 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-29T15:22:14.423148Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:22:14.437194Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:22:14.454643Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:22:14.514517Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:22:14.526826Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:22:14.718614Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509888335446949672:2403], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:22:14.718643Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:22:14.766202Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-05-29T15:22:14.773359Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-05-29T15:22:14.828728Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-05-29T15:22:14.883872Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-05-29T15:22:14.896821Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-05-29T15:22:14.910453Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-05-29T15:22:14.924826Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-05-29T15:22:14.940538Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509888335446950328:2466], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:22:14.940566Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:22:14.940622Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509888335446950333:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:22:14.941464Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715669:3, at schemeshard: 72057594046644480 2025-05-29T15:22:14.944272Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7509888335446950335:2470], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715669 completed, doublechecking } 2025-05-29T15:22:15.008940Z node 1 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [1:7509888339741917682:3397] txid# 281474976715670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-29T15:22:15.096863Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [1:7509888339741917698:2474], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:22:15.096972Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=1&id=NGJmMWE4MjQtYmIxMTczOGYtZDg1YmIzYzYtNDBjMjMwNA==, ActorId: [1:7509888335446949646:2401], ActorState: ExecuteState, TraceId: 01jwea610wdwq27rxn93jxfs6g, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: VERIFY failed (2025-05-29T15:22:15.097737Z): assertion failed in non-unittest thread with message: assertion failed at ydb/core/kqp/ut/common/kqp_ut_common.h:375, void NKikimr::NKqp::AssertSuccessResult(const NYdb::TStatus &): (result.IsSuccess())
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 library/cpp/testing/unittest/registar.cpp:36 RaiseError(): requirement UnittestThread failed 0. /-S/util/system/yassert.cpp:83: InternalPanicImpl @ 0x13AC65E5 1. /-S/util/system/yassert.cpp:55: Panic @ 0x13ABD5E6 2. /tmp//-S/library/cpp/testing/unittest/registar.cpp:36: RaiseError @ 0x13C5F376 3. /-S/ydb/core/kqp/ut/common/kqp_ut_common.h:375: AssertSuccessResult @ 0x261025A2 4. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:365: CreateSampleTables @ 0x26101EA2 5. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:581: operator() @ 0x261237FC 6. /-S/library/cpp/threading/future/core/future-inl.h:493: SetValue @ 0x261237FC 7. /-S/library/cpp/threading/future/async.h:24: operator() @ 0x261237FC 8. /-S/util/thread/pool.h:71: Process @ 0x261237FC 9. /-S/util/thread/pool.cpp:418: DoExecute @ 0x13ACDF69 10. /-S/util/thread/factory.h:15: Execute @ 0x13ACC959 11. /-S/util/thread/factory.cpp:36: ThreadProc @ 0x13ACC959 12. /-S/util/system/thread.cpp:244: ThreadProxy @ 0x13AC7DCC 13. ??:0: ?? @ 0x7FF0CC0F1AC2 14. ??:0: ?? @ 0x7FF0CC18384F ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/perf/unittest >> KqpQueryPerf::IndexDeleteOn+QueryService+UseSink Test command err: Trying to start YDB, gRPC: 4866, MsgBus: 31256 2025-05-29T15:22:09.514035Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509888313252832306:2212];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:22:09.514115Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/000fa3/r3tmp/tmp5jDKay/pdisk_1.dat 2025-05-29T15:22:09.571405Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:22:09.571730Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [1:7509888313252832133:2079] 1748532129512453 != 1748532129512456 TServer::EnableGrpc on GrpcPort 4866, node 1 2025-05-29T15:22:09.580825Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:22:09.580840Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-05-29T15:22:09.580842Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-05-29T15:22:09.580895Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:31256 TClient is connected to server localhost:31256 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-29T15:22:09.643742Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:22:09.643779Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:22:09.644751Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-29T15:22:09.644904Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-05-29T15:22:09.651567Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-05-29T15:22:09.715480Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:22:09.736833Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:22:09.747471Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:22:09.950852Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509888313252833767:2403], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:22:09.950878Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:22:09.989827Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-05-29T15:22:10.045107Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-05-29T15:22:10.052634Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-05-29T15:22:10.066440Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-05-29T15:22:10.080511Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-05-29T15:22:10.095301Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-05-29T15:22:10.108702Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-05-29T15:22:10.124554Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509888317547801720:2466], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:22:10.124577Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509888317547801725:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:22:10.124581Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:22:10.125288Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715669:3, at schemeshard: 72057594046644480 2025-05-29T15:22:10.128443Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7509888317547801727:2470], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715669 completed, doublechecking } 2025-05-29T15:22:10.213954Z node 1 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [1:7509888317547801778:3398] txid# 281474976715670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-29T15:22:10.331409Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [1:7509888317547801794:2474], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:22:10.331542Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=1&id=NGIyNjNiM2MtODA1OWE2NzgtMTM4ZDg5NzMtZWQxYTk0ZA==, ActorId: [1:7509888313252833749:2401], ActorState: ExecuteState, TraceId: 01jwea5wac25m55vcq2bgwwyb6, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: VERIFY failed (2025-05-29T15:22:10.334986Z): assertion failed in non-unittest thread with message: assertion failed at ydb/core/kqp/ut/common/kqp_ut_common.h:375, void NKikimr::NKqp::AssertSuccessResult(const NYdb::TStatus &): (result.IsSuccess())
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 library/cpp/testing/unittest/registar.cpp:36 RaiseError(): requirement UnittestThread failed 0. /-S/util/system/yassert.cpp:83: InternalPanicImpl @ 0x13AC65E5 1. /-S/util/system/yassert.cpp:55: Panic @ 0x13ABD5E6 2. /tmp//-S/library/cpp/testing/unittest/registar.cpp:36: RaiseError @ 0x13C5F376 3. /-S/ydb/core/kqp/ut/common/kqp_ut_common.h:375: AssertSuccessResult @ 0x261025A2 4. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:365: CreateSampleTables @ 0x26101EA2 5. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:581: operator() @ 0x261237FC 6. /-S/library/cpp/threading/future/core/future-inl.h:493: SetValue @ 0x261237FC 7. /-S/library/cpp/threading/future/async.h:24: operator() @ 0x261237FC 8. /-S/util/thread/pool.h:71: Process @ 0x261237FC 9. /-S/util/thread/pool.cpp:418: DoExecute @ 0x13ACDF69 10. /-S/util/thread/factory.h:15: Execute @ 0x13ACC959 11. /-S/util/thread/factory.cpp:36: ThreadProc @ 0x13ACC959 12. /-S/util/system/thread.cpp:244: ThreadProxy @ 0x13AC7DCC 13. ??:0: ?? @ 0x7FEF4CD23AC2 14. ??:0: ?? @ 0x7FEF4CDB584F Trying to start YDB, gRPC: 21439, MsgBus: 23441 2025-05-29T15:22:14.534949Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509888335241321787:2062];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:22:14.534979Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/000fa3/r3tmp/tmpH7d4of/pdisk_1.dat 2025-05-29T15:22:14.589736Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [1:7509888335241321767:2079] 1748532134534807 != 1748532134534810 2025-05-29T15:22:14.592257Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 21439, node 1 2025-05-29T15:22:14.601797Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:22:14.601807Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-05-29T15:22:14.601809Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-05-29T15:22:14.601858Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:23441 TClient is connected to server localhost:23441 2025-05-29T15:22:14.637494Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:22:14.637519Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting WaitRootIsUp 'Root'... TClient::Ls request: Root 2025-05-29T15:22:14.638556Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-29T15:22:14.669040Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:22:14.679407Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:22:14.744565Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:22:14.766104Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:22:14.777722Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:22:14.866548Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509888335241323400:2403], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:22:14.866570Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:22:14.900659Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-05-29T15:22:14.907430Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-05-29T15:22:14.917636Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-05-29T15:22:14.930926Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-05-29T15:22:14.937948Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-05-29T15:22:14.945275Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-05-29T15:22:14.959655Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-05-29T15:22:14.976578Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509888335241324054:2466], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:22:14.976611Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:22:14.976617Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509888335241324059:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:22:14.977505Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715669:3, at schemeshard: 72057594046644480 2025-05-29T15:22:14.986639Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7509888335241324061:2470], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715669 completed, doublechecking } 2025-05-29T15:22:15.055315Z node 1 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [1:7509888339536291408:3394] txid# 281474976715670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-29T15:22:15.131015Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [1:7509888339536291424:2474], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:22:15.131122Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=1&id=ZjY4ODRjNzAtZDYzYzc4NGMtZGRjNDdhYzYtOWJlNWU5ZGM=, ActorId: [1:7509888335241323382:2401], ActorState: ExecuteState, TraceId: 01jwea61207s5ybgb09g843ffh, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: VERIFY failed (2025-05-29T15:22:15.131821Z): assertion failed in non-unittest thread with message: assertion failed at ydb/core/kqp/ut/common/kqp_ut_common.h:375, void NKikimr::NKqp::AssertSuccessResult(const NYdb::TStatus &): (result.IsSuccess())
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 library/cpp/testing/unittest/registar.cpp:36 RaiseError(): requirement UnittestThread failed 0. /-S/util/system/yassert.cpp:83: InternalPanicImpl @ 0x13AC65E5 1. /-S/util/system/yassert.cpp:55: Panic @ 0x13ABD5E6 2. /tmp//-S/library/cpp/testing/unittest/registar.cpp:36: RaiseError @ 0x13C5F376 3. /-S/ydb/core/kqp/ut/common/kqp_ut_common.h:375: AssertSuccessResult @ 0x261025A2 4. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:365: CreateSampleTables @ 0x26101EA2 5. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:581: operator() @ 0x261237FC 6. /-S/library/cpp/threading/future/core/future-inl.h:493: SetValue @ 0x261237FC 7. /-S/library/cpp/threading/future/async.h:24: operator() @ 0x261237FC 8. /-S/util/thread/pool.h:71: Process @ 0x261237FC 9. /-S/util/thread/pool.cpp:418: DoExecute @ 0x13ACDF69 10. /-S/util/thread/factory.h:15: Execute @ 0x13ACC959 11. /-S/util/thread/factory.cpp:36: ThreadProc @ 0x13ACC959 12. /-S/util/system/thread.cpp:244: ThreadProxy @ 0x13AC7DCC 13. ??:0: ?? @ 0x7F2B01CCEAC2 14. ??:0: ?? @ 0x7F2B01D6084F >> TNodeBrokerTest::ShiftIdRangeRemoveActive >> TNodeBrokerTest::UpdateNodesLogEmptyEpoch >> TDynamicNameserverTest::BasicFunctionality-EnableNodeBrokerDeltaProtocol-true >> TNodeBrokerTest::NodesMigrationExpireActive [GOOD] >> TNodeBrokerTest::TestListNodes >> KqpOlapBlobsSharing::SplitEmpty [GOOD] >> TPQTest::TestWritePQCompact [GOOD] >> TPQTest::TestWriteSplit >> TPartitionTests::ConflictingSrcIdForTxInDifferentBatches [GOOD] >> TNodeBrokerTest::UpdateEpochPipelining >> TNodeBrokerTest::ExtendLeaseSetLocationInOneRegistration >> TNodeBrokerTest::NodesMigrationReuseID >> TNodeBrokerTest::NodesMigrationRemoveActive ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/persqueue/ut/unittest >> TMeteringSink::FlushStorageV1 [GOOD] Test command err: 2025-05-29T15:22:13.315611Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509888327848827476:2063];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:22:13.315639Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-05-29T15:22:13.347639Z node 1 :PQ_READ_PROXY DEBUG: caching_service.cpp:44: Direct read cache: : Created test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/000aa6/r3tmp/tmpqK5bV1/pdisk_1.dat 2025-05-29T15:22:13.377866Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:22:13.378005Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [1:7509888327848827455:2079] 1748532133315462 != 1748532133315465 TServer::EnableGrpc on GrpcPort 23861, node 1 2025-05-29T15:22:13.391282Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/ciyv/000aa6/r3tmp/yandexvwO7KM.tmp 2025-05-29T15:22:13.391297Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: /home/runner/.ya/build/build_root/ciyv/000aa6/r3tmp/yandexvwO7KM.tmp 2025-05-29T15:22:13.391365Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:202: successfully initialized from file: /home/runner/.ya/build/build_root/ciyv/000aa6/r3tmp/yandexvwO7KM.tmp 2025-05-29T15:22:13.391427Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-05-29T15:22:13.396089Z INFO: TTestServer started on Port 19373 GrpcPort 23861 TClient is connected to server localhost:19373 PQClient connected to localhost:23861 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: 2025-05-29T15:22:13.418298Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:22:13.418326Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:22:13.419377Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-29T15:22:13.424867Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-05-29T15:22:13.433321Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... waiting... waiting... 2025-05-29T15:22:13.732262Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509888327848828265:2335], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:22:13.732283Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509888327848828276:2338], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:22:13.732291Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:22:13.732982Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509888327848828306:2341], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:22:13.733005Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:22:13.733101Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710662:3, at schemeshard: 72057594046644480 2025-05-29T15:22:13.735184Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7509888327848828279:2339], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710662 completed, doublechecking } 2025-05-29T15:22:13.770261Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-05-29T15:22:13.777653Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-05-29T15:22:13.799801Z node 1 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [1:7509888327848828485:2525] txid# 281474976710665, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 9], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-29T15:22:13.810240Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-05-29T15:22:13.818324Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [1:7509888327848828494:2360], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:5:17: Error: At function: KiReadTable!
:5:17: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Versions]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-05-29T15:22:13.819170Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=1&id=Y2IyM2U3OGYtNzM2NWIxYTYtZGJmMzMzNGItOTUzYTg4ZWU=, ActorId: [1:7509888327848828262:2333], ActorState: ExecuteState, TraceId: 01jwea5zv26mw0fm47k7jwvaf4, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-05-29T15:22:13.819633Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: cluster_tracker.cpp:167: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 5 column: 17 } message: "At function: KiReadTable!" end_position { row: 5 column: 17 } severity: 1 issues { position { row: 5 column: 17 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Versions]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 5 column: 17 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost", true, true, 1000), ("dc2", "dc2.logbroker.yandex.net", false, true, 1000); 2025-05-29T15:22:13.839614Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [1:7509888327848828600:2383], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:22:13.839754Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=1&id=NDI1YzVjNWUtZjI1MDc4OGItNjdhMjZmZmMtMjE5MzNjMzA=, ActorId: [1:7509888327848828597:2381], ActorState: ExecuteState, TraceId: 01jwea5zxy6zcszbfvgdhqpnn7, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: assertion failed at ydb/core/testlib/test_pq_client.h:537, TMaybe NKikimr::NPersQueueTests::TFlatMsgBusPQClient::RunYqlDataQueryWithParams(TString, const NYdb::TParams &): (result.IsSuccess())
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 TBackTrace::Capture()+28 (0x13C16BFC) NUnitTest::NPrivate::RaiseError(char const*, TBasicString> const&, bool)+137 (0x13DCA7A9) NKikimr::NPersQueueTests::TFlatMsgBusPQClient::RunYqlDataQueryWithParams(TBasicString>, NYdb::Dev::TParams const&)+932 (0x13965684) NKikimr::NPersQueueTests::TFlatMsgBusPQClient::RunYqlDataQuery(TBasicString>)+88 (0x1394D528) NKikimr::NPersQueueTests::TFlatMsgBusPQClient::InitDCs(THashMap>, NKikimr::NPersQueueTests::TPQTestClusterInfo, THash>>, TEqualTo>>, std::__y1::allocator>>>, TBasicString> const&)+1170 (0x139741A2) NKikimr::NPersQueueTests::TFlatMsgBusPQClient::FullInit(THashMap>, NKikimr::NPersQueueTests::TPQTestClusterInfo, THash>>, TEqualTo>>, std::__y1::allocator>>>, TBasicString> const&, TBasicString> const&)+327 (0x139720D7) NPersQueue::TTestServer::StartServer(bool, TMaybe>, NMaybe::TPolicyUndefinedExcept>)+888 (0x13970D08) NPersQueue::TTestServer::TTestServer(NKikimr::Tests::TServerSettings const&, bool, TVector> const&, NActors::NLog::EPriority, TMaybe, TDelete>, NMaybe::TPolicyUndefinedExcept>)+1563 (0x13969A6B) NKikimr::NPersQueueTests::NTestSuiteTList ... or;event=undelivered;self_id=[3:7509888332571567872:2060];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:22:14.993249Z node 3 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/000aa6/r3tmp/tmpo5VaSx/pdisk_1.dat 2025-05-29T15:22:14.999800Z node 3 :PQ_READ_PROXY DEBUG: caching_service.cpp:44: Direct read cache: : Created TServer::EnableGrpc on GrpcPort 31400, node 3 2025-05-29T15:22:15.010717Z node 3 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:22:15.011128Z node 3 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [3:7509888332571567854:2079] 1748532134993118 != 1748532134993121 2025-05-29T15:22:15.015819Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/ciyv/000aa6/r3tmp/yandexNgta2Q.tmp 2025-05-29T15:22:15.015829Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: /home/runner/.ya/build/build_root/ciyv/000aa6/r3tmp/yandexNgta2Q.tmp 2025-05-29T15:22:15.015885Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:202: successfully initialized from file: /home/runner/.ya/build/build_root/ciyv/000aa6/r3tmp/yandexNgta2Q.tmp 2025-05-29T15:22:15.015928Z node 3 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-05-29T15:22:15.019414Z INFO: TTestServer started on Port 4354 GrpcPort 31400 TClient is connected to server localhost:4354 PQClient connected to localhost:31400 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-29T15:22:15.097736Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:22:15.097770Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:22:15.098045Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:22:15.098712Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected waiting... 2025-05-29T15:22:15.105708Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... waiting... waiting... 2025-05-29T15:22:15.293145Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7509888336866535956:2335], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:22:15.293166Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7509888336866535968:2338], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:22:15.293173Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:22:15.293857Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7509888336866535998:2341], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:22:15.293879Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:22:15.293954Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715662:3, at schemeshard: 72057594046644480 2025-05-29T15:22:15.295834Z node 3 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7509888336866535970:2339], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715662 completed, doublechecking } 2025-05-29T15:22:15.297702Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-05-29T15:22:15.309467Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-05-29T15:22:15.371976Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost", true, true, 1000), ("dc2", "dc2.logbroker.yandex.net", false, true, 1000); 2025-05-29T15:22:15.383910Z node 3 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [3:7509888336866536277:2577] txid# 281474976715666, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 9], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-29T15:22:15.397430Z node 3 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [3:7509888336866536275:2378], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:22:15.398442Z node 3 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=3&id=OWQ4NDYwYzctNTE2NTU5NWEtNjYyZjIwMTQtZjg1ZDFhMDg=, ActorId: [3:7509888336866536272:2376], ActorState: ExecuteState, TraceId: 01jwea61epcex8xjaxbxt9gfs8, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: 2025-05-29T15:22:15.419440Z node 3 :PERSQUEUE_CLUSTER_TRACKER ERROR: cluster_tracker.cpp:167: failed to list clusters: { Response { QueryIssues { message: "Execution" issue_code: 1060 severity: 2 issues { position { row: 3 column: 120 } message: "Cost Based Optimizer could not be applied to this query: couldn\'t load statistics" end_position { row: 3 column: 120 } issue_code: 8001 severity: 2 } } TxMeta { } YdbResults { columns { name: "C.name" type { optional_type { item { type_id: UTF8 } } } } columns { name: "C.balancer" type { optional_type { item { type_id: UTF8 } } } } columns { name: "C.local" type { optional_type { item { type_id: BOOL } } } } columns { name: "C.enabled" type { optional_type { item { type_id: BOOL } } } } columns { name: "C.weight" type { optional_type { item { type_id: UINT64 } } } } columns { name: "V.version" type { optional_type { item { type_id: INT64 } } } } } QueryDiagnostics: "" } YdbStatus: SUCCESS ConsumedRu: 22 } assertion failed at ydb/core/testlib/test_pq_client.h:537, TMaybe NKikimr::NPersQueueTests::TFlatMsgBusPQClient::RunYqlDataQueryWithParams(TString, const NYdb::TParams &): (result.IsSuccess())
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 TBackTrace::Capture()+28 (0x13C16BFC) NUnitTest::NPrivate::RaiseError(char const*, TBasicString> const&, bool)+137 (0x13DCA7A9) NKikimr::NPersQueueTests::TFlatMsgBusPQClient::RunYqlDataQueryWithParams(TBasicString>, NYdb::Dev::TParams const&)+932 (0x13965684) NKikimr::NPersQueueTests::TFlatMsgBusPQClient::RunYqlDataQuery(TBasicString>)+88 (0x1394D528) NKikimr::NPersQueueTests::TFlatMsgBusPQClient::InitDCs(THashMap>, NKikimr::NPersQueueTests::TPQTestClusterInfo, THash>>, TEqualTo>>, std::__y1::allocator>>>, TBasicString> const&)+1170 (0x139741A2) NKikimr::NPersQueueTests::TFlatMsgBusPQClient::FullInit(THashMap>, NKikimr::NPersQueueTests::TPQTestClusterInfo, THash>>, TEqualTo>>, std::__y1::allocator>>>, TBasicString> const&, TBasicString> const&)+327 (0x139720D7) NPersQueue::TTestServer::StartServer(bool, TMaybe>, NMaybe::TPolicyUndefinedExcept>)+888 (0x13970D08) NPersQueue::TTestServer::TTestServer(NKikimr::Tests::TServerSettings const&, bool, TVector> const&, NActors::NLog::EPriority, TMaybe, TDelete>, NMaybe::TPolicyUndefinedExcept>)+1563 (0x13969A6B) NKikimr::NPersQueueTests::NTestSuiteTListAllTopicsTests::TTestCaseListLimitAndPaging::Execute_(NUnitTest::TTestContext&)+329 (0x13B0EA49) NKikimr::NPersQueueTests::NTestSuiteTListAllTopicsTests::TCurrentTest::Execute()::'lambda'()::operator()() const+71 (0x13B140A7) NUnitTest::TTestBase::Run(std::__y1::function, TBasicString> const&, char const*, bool)+126 (0x13DCC65E) NKikimr::NPersQueueTests::NTestSuiteTListAllTopicsTests::TCurrentTest::Execute()+426 (0x13B13A6A) NUnitTest::TTestFactory::Execute()+803 (0x13DCCDD3) NUnitTest::RunMain(int, char**)+3021 (0x13DDE97D) ??+0 (0x7F81ED806D90) __libc_start_main+128 (0x7F81ED806E40) _start+41 (0x12A48029) >> TPQTest::TestSetClientOffset [GOOD] >> TPQTest::TestReadSessions >> TNodeBrokerTest::SyncNodes >> TPartitionTests::ConflictingSrcIdForTxWithHead >> TPartitionChooserSuite::TPartitionChooserActor_SplitMergeEnabled_PreferedPartition_OtherPartition_Test [FAIL] >> TNodeBrokerTest::NodesMigrationExtendLeaseThenRemove ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/mind/ut/unittest >> TNodeBrokerTest::NodesMigrationExpireActive [GOOD] Test command err: 2025-05-29T15:22:16.810770Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 2 Deadline: 18446744073709.551615s } 2025-05-29T15:22:16.810820Z node 3 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 3 Deadline: 18446744073709.551615s } 2025-05-29T15:22:16.810840Z node 4 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 4 Deadline: 18446744073709.551615s } 2025-05-29T15:22:16.810867Z node 5 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 5 Deadline: 18446744073709.551615s } 2025-05-29T15:22:16.810889Z node 6 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 6 Deadline: 18446744073709.551615s } 2025-05-29T15:22:16.810904Z node 7 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 7 Deadline: 18446744073709.551615s } 2025-05-29T15:22:16.816478Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:16.816578Z node 3 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:16.816616Z node 4 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:16.816650Z node 5 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:16.816685Z node 6 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:16.816712Z node 7 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:16.816774Z node 8 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 8 Deadline: 18446744073709.551615s } 2025-05-29T15:22:16.816795Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-05-29T15:22:16.816992Z node 3 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:16.817010Z node 4 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:16.817023Z node 5 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:16.817035Z node 6 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:16.817049Z node 7 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:16.817062Z node 8 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:16.817089Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:16.821181Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:16.821227Z node 8 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:16.821251Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:16.822002Z node 5 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:16.822032Z node 6 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:16.822056Z node 7 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:16.822122Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:16.822137Z node 3 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:16.822153Z node 4 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:16.822169Z node 8 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:16.822182Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:16.822320Z node 3 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-05-29T15:22:16.822339Z node 4 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-05-29T15:22:16.822354Z node 5 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-05-29T15:22:16.822375Z node 6 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-05-29T15:22:16.822392Z node 7 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-05-29T15:22:16.822454Z node 8 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-05-29T15:22:16.822518Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-05-29T15:22:16.822550Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 8 Deadline: 18446744073709.551615s } 2025-05-29T15:22:16.823234Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 6 Deadline: 18446744073709.551615s } 2025-05-29T15:22:16.823276Z node 3 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 6 Deadline: 18446744073709.551615s } 2025-05-29T15:22:16.823290Z node 4 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 5 Deadline: 18446744073709.551615s } 2025-05-29T15:22:16.823304Z node 5 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 2 Deadline: 18446744073709.551615s } 2025-05-29T15:22:16.823317Z node 6 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 5 Deadline: 18446744073709.551615s } 2025-05-29T15:22:16.823328Z node 7 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 8 Deadline: 18446744073709.551615s } 2025-05-29T15:22:16.823340Z node 8 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 7 Deadline: 18446744073709.551615s } 2025-05-29T15:22:16.826686Z node 5 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 4 Deadline: 18446744073709.551615s } 2025-05-29T15:22:16.826730Z node 6 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 3 Deadline: 18446744073709.551615s } 2025-05-29T15:22:16.826779Z node 8 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 5 Deadline: 18446744073709.551615s } 2025-05-29T15:22:16.826793Z node 6 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 2 Deadline: 18446744073709.551615s } 2025-05-29T15:22:16.827309Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 5 Deadline: 18446744073709.551615s } 2025-05-29T15:22:16.827346Z node 5 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 6 Deadline: 18446744073709.551615s } 2025-05-29T15:22:16.827558Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 6 Deadline: 18446744073709.551615s } 2025-05-29T15:22:16.827783Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 7 Deadline: 18446744073709.551615s } 2025-05-29T15:22:16.827990Z node 5 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 8 Deadline: 18446744073709.551615s } 2025-05-29T15:22:16.828028Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 2 Deadline: 18446744073709.551615s } 2025-05-29T15:22:16.828123Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 3 Deadline: 18446744073709.551615s } 2025-05-29T15:22:16.828212Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 4 Deadline: 18446744073709.551615s } 2025-05-29T15:22:16.828300Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 5 Deadline: 18446744073709.551615s } 2025-05-29T15:22:16.828884Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 8 Deadline: 18446744073709.551615s } 2025-05-29T15:22:16.829061Z node 8 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 2 Deadline: 18446744073709.551615s } 2025-05-29T15:22:16.829589Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 7 Deadline: 18446744073709.551615s } 2025-05-29T15:22:16.829771Z node 7 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 2 Deadline: 18446744073709.551615s } 2025-05-29T15:22:16.853850Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7309: Cannot subscribe to console configs 2025-05-29T15:22:16.853871Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded ... waiting for nameservers are connected 2025-05-29T15:22:16.857322Z node 1 :NODE_BROKER DEBUG: node_broker_impl.h:236: StateInit event type: 10060000 event: NKikimr::TEvTablet::TEvBoot 2025-05-29T15:22:16.857628Z node 1 :NODE_BROKER DEBUG: node_broker_impl.h:236: StateInit event type: 10060001 event: NKikimr::TEvTablet::TEvRestored 2025-05-29T15:22:16.857669Z node 1 :NODE_BROKER DEBUG: node_broker__init_scheme.cpp:20: TTxInitScheme Execute 2025-05-29T15:22:16.857833Z node 1 :NODE_BROKER DEBUG: node_broker_impl.h:236: StateInit event type: 1006000c event: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-05-29T15:22:16.858373Z node 1 :NODE_BROKER DEBUG: node_broker__init_scheme.cpp:29: TTxInitScheme Complete 2025-05-29T15:22:16.858389Z node 1 :NODE_BROKER DEBUG: node_broker__load_state.cpp:19: TTxLoadState Execute 2025-05-29T15:22:16.858429Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:961: [DB] Using default config. 2025-05-29T15:22:16.858441Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:998: [DB] Starting the first epoch: #1.1 1970-01-01T00:00:00.025000Z - 1970-01-01T01:00:00.025000Z - 1970-01-01T02:00:00.025000Z 2025-05-29T15:22:16.858444Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:1024: [DB] Loaded the first approximate epoch start: #1.1 2025-05-29T15:22:16.858457Z node 1 :NODE_BROKER DEBUG: node_broker__load_state.cpp:27: TTxLoadState Complete 2025-05-29T15:22:16.858493Z node 1 :NODE_BROKER DEBUG: node_broker__migrate_state.cpp:84: TTxMigrateState Execute 2025-05-29T15:22:16.858497Z node 1 :NODE_BROKER DEBUG: node_broker__migrate_state.cpp:52: TTxMigrateState ProcessMigrationBatch UpdateNodes left 0, NewVersionUpdateNodes left 0 2025-05-29T15:22:16.858501Z node 1 :NODE_BROKER DEBU ... -29T15:22:17.146066Z node 1 :NODE_BROKER NOTICE: node_broker.cpp:1214: [DB] Migrating changed node #1024.v4 { NodeId: 1024, State: Expired, Version: 4, Host: host1, Port: 1001, ResolveHost: host1.yandex.net, Address: 1.2.3.4, Lease: 1, Expire: Thu, 01 Jan 1970 02:00:00 UTC, Location: DC=1/M=2/R=3/U=4/, AuthorizedByCertificate: 0, SlotIndex: 0, ServicedSubDomain: 72057594046678944:1 } 2025-05-29T15:22:17.146074Z node 1 :NODE_BROKER DEBUG: node_broker__load_state.cpp:27: TTxLoadState Complete 2025-05-29T15:22:17.146098Z node 1 :NODE_BROKER DEBUG: node_broker__migrate_state.cpp:84: TTxMigrateState Execute 2025-05-29T15:22:17.146103Z node 1 :NODE_BROKER DEBUG: node_broker__migrate_state.cpp:52: TTxMigrateState ProcessMigrationBatch UpdateNodes left 1, NewVersionUpdateNodes left 0 2025-05-29T15:22:17.146109Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:856: [DB] Adding node #1024.v4 host1:1001 to database state=Expired resolvehost=host1.yandex.net address=1.2.3.4 dc=1 location=DC=1/M=2/R=3/U=4/ lease=1 expire=Thu, 01 Jan 1970 02:00:00 UTC servicedsubdomain=72057594046678944:1 slotindex=0 authorizedbycertificate=false 2025-05-29T15:22:17.146145Z node 1 :NODE_BROKER DEBUG: node_broker__migrate_state.cpp:21: TTxMigrateState FinalizeMigration 2025-05-29T15:22:17.146149Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:1330: [DB] Update approx epoch start in database: #3.4 2025-05-29T15:22:17.168095Z node 1 :NODE_BROKER DEBUG: node_broker__migrate_state.cpp:95: TTxMigrateState Complete 2025-05-29T15:22:17.168131Z node 1 :NODE_BROKER TRACE: node_broker.cpp:456: Scheduled epoch update at 1970-01-01T03:00:00.025000Z 2025-05-29T15:22:17.168139Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:562: Preparing nodes list cache for epoch #3.4 1970-01-01T02:00:00.025000Z - 1970-01-01T03:00:00.025000Z - 1970-01-01T04:00:00.025000Z, approximate epoch start #3.4 nodes=0 expired=1 2025-05-29T15:22:17.168163Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:603: Preparing update nodes log for epoch ##3.4 1970-01-01T02:00:00.025000Z - 1970-01-01T03:00:00.025000Z - 1970-01-01T04:00:00.025000Z nodes=0 expired=1 removed=0 2025-05-29T15:22:17.168168Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:659: Add node #1024.v4 to update nodes log 2025-05-29T15:22:17.168291Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 269877761, Sender [1:687:2244], Recipient [1:679:2238]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-05-29T15:22:17.168324Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 269877761, Sender [1:688:2245], Recipient [1:679:2238]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-05-29T15:22:17.168374Z node 5 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:657: Handle NKikimr::TEvTabletPipe::TEvClientConnected { TabletId: 72057594037936129 Status: OK ServerId: [1:687:2244] Leader: 1 Dead: 0 Generation: 3 VersionInfo:  } 2025-05-29T15:22:17.168390Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 269877761, Sender [1:690:2247], Recipient [1:679:2238]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-05-29T15:22:17.168395Z node 7 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:657: Handle NKikimr::TEvTabletPipe::TEvClientConnected { TabletId: 72057594037936129 Status: OK ServerId: [1:690:2247] Leader: 1 Dead: 0 Generation: 3 VersionInfo:  } 2025-05-29T15:22:17.168404Z node 8 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:657: Handle NKikimr::TEvTabletPipe::TEvClientConnected { TabletId: 72057594037936129 Status: OK ServerId: [1:691:2248] Leader: 1 Dead: 0 Generation: 3 VersionInfo:  } 2025-05-29T15:22:17.168411Z node 4 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:657: Handle NKikimr::TEvTabletPipe::TEvClientConnected { TabletId: 72057594037936129 Status: OK ServerId: [1:693:2250] Leader: 1 Dead: 0 Generation: 3 VersionInfo:  } 2025-05-29T15:22:17.168436Z node 6 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:657: Handle NKikimr::TEvTabletPipe::TEvClientConnected { TabletId: 72057594037936129 Status: OK ServerId: [1:688:2245] Leader: 1 Dead: 0 Generation: 3 VersionInfo:  } 2025-05-29T15:22:17.168443Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 269877761, Sender [1:691:2248], Recipient [1:679:2238]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-05-29T15:22:17.168460Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 269877761, Sender [1:693:2250], Recipient [1:679:2238]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-05-29T15:22:17.168506Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 272039936, Sender [7:193:2072], Recipient [1:690:2247] 2025-05-29T15:22:17.168511Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:246: StateWork, processing event TEvNodeBroker::TEvListNodes 2025-05-29T15:22:17.168519Z node 1 :NODE_BROKER TRACE: node_broker.cpp:423: Send TEvNodesInfo for epoch #3.4 1970-01-01T02:00:00.025000Z - 1970-01-01T03:00:00.025000Z - 1970-01-01T04:00:00.025000Z 2025-05-29T15:22:17.168532Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 272039936, Sender [8:221:2072], Recipient [1:691:2248] 2025-05-29T15:22:17.168534Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:246: StateWork, processing event TEvNodeBroker::TEvListNodes 2025-05-29T15:22:17.168538Z node 1 :NODE_BROKER TRACE: node_broker.cpp:423: Send TEvNodesInfo for epoch #3.4 1970-01-01T02:00:00.025000Z - 1970-01-01T03:00:00.025000Z - 1970-01-01T04:00:00.025000Z 2025-05-29T15:22:17.168551Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 272039936, Sender [5:137:2072], Recipient [1:687:2244] 2025-05-29T15:22:17.168553Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:246: StateWork, processing event TEvNodeBroker::TEvListNodes 2025-05-29T15:22:17.168556Z node 1 :NODE_BROKER TRACE: node_broker.cpp:423: Send TEvNodesInfo for epoch #3.4 1970-01-01T02:00:00.025000Z - 1970-01-01T03:00:00.025000Z - 1970-01-01T04:00:00.025000Z 2025-05-29T15:22:17.168590Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 272039936, Sender [6:165:2072], Recipient [1:688:2245] 2025-05-29T15:22:17.168593Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:246: StateWork, processing event TEvNodeBroker::TEvListNodes 2025-05-29T15:22:17.168597Z node 1 :NODE_BROKER TRACE: node_broker.cpp:423: Send TEvNodesInfo for epoch #3.4 1970-01-01T02:00:00.025000Z - 1970-01-01T03:00:00.025000Z - 1970-01-01T04:00:00.025000Z 2025-05-29T15:22:17.168601Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 272039936, Sender [4:109:2072], Recipient [1:693:2250] 2025-05-29T15:22:17.168604Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:246: StateWork, processing event TEvNodeBroker::TEvListNodes 2025-05-29T15:22:17.168607Z node 1 :NODE_BROKER TRACE: node_broker.cpp:423: Send TEvNodesInfo for epoch #3.4 1970-01-01T02:00:00.025000Z - 1970-01-01T03:00:00.025000Z - 1970-01-01T04:00:00.025000Z 2025-05-29T15:22:17.168724Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 269877761, Sender [1:721:2272], Recipient [1:679:2238]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-05-29T15:22:17.168745Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 272039936, Sender [1:627:2213], Recipient [1:679:2238]: NKikimr::NNodeBroker::TEvNodeBroker::TEvListNodes { } 2025-05-29T15:22:17.168750Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:246: StateWork, processing event TEvNodeBroker::TEvListNodes 2025-05-29T15:22:17.168756Z node 1 :NODE_BROKER TRACE: node_broker.cpp:423: Send TEvNodesInfo for epoch #3.4 1970-01-01T02:00:00.025000Z - 1970-01-01T03:00:00.025000Z - 1970-01-01T04:00:00.025000Z 2025-05-29T15:22:17.168813Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 269877761, Sender [1:723:2274], Recipient [1:679:2238]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-05-29T15:22:17.168826Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 272039936, Sender [1:627:2213], Recipient [1:679:2238]: NKikimr::NNodeBroker::TEvNodeBroker::TEvListNodes { } 2025-05-29T15:22:17.168828Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:246: StateWork, processing event TEvNodeBroker::TEvListNodes 2025-05-29T15:22:17.168831Z node 1 :NODE_BROKER TRACE: node_broker.cpp:423: Send TEvNodesInfo for epoch #3.4 1970-01-01T02:00:00.025000Z - 1970-01-01T03:00:00.025000Z - 1970-01-01T04:00:00.025000Z 2025-05-29T15:22:17.168868Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 269877761, Sender [1:725:2276], Recipient [1:679:2238]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-05-29T15:22:17.168878Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 272039936, Sender [1:627:2213], Recipient [1:679:2238]: NKikimr::NNodeBroker::TEvNodeBroker::TEvListNodes { } 2025-05-29T15:22:17.168881Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:246: StateWork, processing event TEvNodeBroker::TEvListNodes 2025-05-29T15:22:17.168884Z node 1 :NODE_BROKER TRACE: node_broker.cpp:423: Send TEvNodesInfo for epoch #3.4 1970-01-01T02:00:00.025000Z - 1970-01-01T03:00:00.025000Z - 1970-01-01T04:00:00.025000Z 2025-05-29T15:22:17.168938Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 269877761, Sender [1:727:2278], Recipient [1:679:2238]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-05-29T15:22:17.168957Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 272039936, Sender [1:627:2213], Recipient [1:679:2238]: NKikimr::NNodeBroker::TEvNodeBroker::TEvListNodes { CachedVersion: 4 } 2025-05-29T15:22:17.168962Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:246: StateWork, processing event TEvNodeBroker::TEvListNodes 2025-05-29T15:22:17.168967Z node 1 :NODE_BROKER TRACE: node_broker.cpp:423: Send TEvNodesInfo for epoch #3.4 1970-01-01T02:00:00.025000Z - 1970-01-01T03:00:00.025000Z - 1970-01-01T04:00:00.025000Z 2025-05-29T15:22:17.169016Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 269877761, Sender [1:729:2280], Recipient [1:679:2238]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-05-29T15:22:17.169026Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 272039950, Sender [1:627:2213], Recipient [1:679:2238]: NKikimr::NNodeBroker::TEvNodeBroker::TEvSubscribeNodesRequest { CachedVersion: 4 SeqNo: 2 } 2025-05-29T15:22:17.169029Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:254: StateWork, processing event TEvNodeBroker::TEvSubscribeNodesRequest 2025-05-29T15:22:17.169035Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:747: New subscriber [1:627:2213], seqNo: 2, version: 4, server pipe id: [1:729:2280] 2025-05-29T15:22:17.169039Z node 1 :NODE_BROKER TRACE: node_broker.cpp:730: Send TEvUpdateNodes v4 -> v4 to [1:627:2213] 2025-05-29T15:22:17.169076Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 269877764, Sender [1:729:2280], Recipient [1:679:2238]: NKikimr::TEvTabletPipe::TEvServerDisconnected 2025-05-29T15:22:17.169079Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:768: Unsubscribed [1:627:2213], seqNo: 2, server pipe id: [1:729:2280] 2025-05-29T15:22:17.169095Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 269877761, Sender [1:731:2282], Recipient [1:679:2238]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-05-29T15:22:17.169109Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 272039937, Sender [1:627:2213], Recipient [1:679:2238]: NKikimr::NNodeBroker::TEvNodeBroker::TEvResolveNode { NodeId: 1024 } 2025-05-29T15:22:17.169113Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:247: StateWork, processing event TEvNodeBroker::TEvResolveNode 2025-05-29T15:22:17.169125Z node 1 :NODE_BROKER TRACE: node_broker.cpp:1478: Send TEvResolvedNode: NKikimr::NNodeBroker::TEvNodeBroker::TEvResolvedNode { Status { Code: WRONG_REQUEST Reason: "Unknown node" } } ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/persqueue/ut/unittest >> TPartitionChooserSuite::TPartitionChooserActor_SplitMergeDisabled_BadSourceId_Test [FAIL] Test command err: 2025-05-29T15:22:15.260644Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509888335893770440:2075];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:22:15.260685Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-05-29T15:22:15.264530Z node 2 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7509888338594191693:2073];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:22:15.264618Z node 2 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/0009d9/r3tmp/tmpoFrjpB/pdisk_1.dat 2025-05-29T15:22:15.298371Z node 2 :PQ_READ_PROXY DEBUG: caching_service.cpp:44: Direct read cache: : Created 2025-05-29T15:22:15.300320Z node 1 :PQ_READ_PROXY DEBUG: caching_service.cpp:44: Direct read cache: : Created 2025-05-29T15:22:15.321536Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 19939, node 1 2025-05-29T15:22:15.333647Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/ciyv/0009d9/r3tmp/yandexn2iqul.tmp 2025-05-29T15:22:15.333664Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: /home/runner/.ya/build/build_root/ciyv/0009d9/r3tmp/yandexn2iqul.tmp 2025-05-29T15:22:15.333720Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:202: successfully initialized from file: /home/runner/.ya/build/build_root/ciyv/0009d9/r3tmp/yandexn2iqul.tmp 2025-05-29T15:22:15.333776Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-05-29T15:22:15.339865Z INFO: TTestServer started on Port 12006 GrpcPort 19939 TClient is connected to server localhost:12006 PQClient connected to localhost:19939 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: 2025-05-29T15:22:15.361159Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:22:15.361192Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:22:15.362783Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-29T15:22:15.392618Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:22:15.392646Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:22:15.393470Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-05-29T15:22:15.393652Z node 1 :HIVE WARN: hive_impl.cpp:771: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-05-29T15:22:15.393881Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected waiting... waiting... 2025-05-29T15:22:15.408471Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... waiting... waiting... 2025-05-29T15:22:15.586621Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509888335893771430:2336], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:22:15.586645Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:22:15.586704Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509888335893771457:2339], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:22:15.587384Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509888335893771484:2342], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:22:15.587398Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:22:15.587463Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715662:3, at schemeshard: 72057594046644480 2025-05-29T15:22:15.591594Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7509888335893771459:2340], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715662 completed, doublechecking } 2025-05-29T15:22:15.617922Z node 2 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [2:7509888338594192077:2315], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-05-29T15:22:15.618017Z node 2 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=2&id=NjcxN2RiZjctYTUxYjAyNDItNGE4OGRjYi1iMjBlN2FlMQ==, ActorId: [2:7509888338594192038:2309], ActorState: ExecuteState, TraceId: 01jwea61nc1h7nhdp23wznhf0b, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-05-29T15:22:15.618332Z node 2 :PERSQUEUE_CLUSTER_TRACKER ERROR: cluster_tracker.cpp:167: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-05-29T15:22:15.618361Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-05-29T15:22:15.662285Z node 1 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [1:7509888335893771637:2799] txid# 281474976715664, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 9], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-29T15:22:15.665519Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [1:7509888335893771656:2354], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-05-29T15:22:15.665601Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=1&id=MjE4N2UzNDYtM2NiMzMwMmEtNjE2ZDkwZDctNTgzNDYxMjE=, ActorId: [1:7509888335893771427:2334], ActorState: ExecuteState, TraceId: 01jwea61n202a0thc09w4j0yzf, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-05-29T15:22:15.665725Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: cluster_tracker.cpp:167: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-05-29T15:22:15.676612Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-05-29T15:22:15.694585Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost", true, true, 1000), ("dc2", "dc2.logbroker.yandex.net", false, true, 1000); 2025-05-29T15:22:15.727916Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [1:7509888335893771941:2381], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:22:15.728048Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=1&id=NjMyZTEzMjMtYTM1NmNkNjktNWJjZWE1ZjgtZTExZTVjZTI=, ActorId: [1:7509888335893771938:2379], ActorState: ExecuteState, TraceId: 01jwea61rzbpb90xyxp6vwdvm0, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: assertion failed at ydb/core/testlib/test_pq_client.h:537, TMaybe NKikimr::NPersQueueTests::TFlatMsgBusPQClient::RunYqlDataQueryWithParams(TString, const NYdb::TParams &): (result.IsSuccess())
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 TBackTrace::Capture()+28 (0x13C16BFC) NUnitTest::NPrivate::RaiseError(char const*, TBasicString> const&, bool)+137 (0x13DCA7A9) NKikimr::NPersQueueTests::TFlatMsgBusPQClient::RunYqlDataQueryWithParams(TBasicString>, NYdb::Dev::TParams const&)+932 (0x13965684) NKikimr::NPersQueueTests::TFlatMsgBusPQClient::RunYqlDataQuery(TBasicString>)+88 (0x1394D528) NKikimr::NPersQueueTests::TFlatMsgBusPQClient::InitDCs(THashMap>, NKikimr::NPersQueueTests::TPQTestClusterInfo, THash>>, TEqualTo>>, std::__y1::allocator>>>, TBasicString> const&)+1170 (0x139741A2) NKikimr::NPersQueueTests::TFlatMsgBusPQClient::FullInit(THashMap>, NKikimr::NPersQueueTests::TPQTestClusterInfo, THash>>, TEqualTo>>, std::__y1::allocator>>>, TBasicString> const&, TBasicString> const&)+327 (0x139720D7) NPersQueue::TTestServer::StartServer(bool, TMaybe>, NMaybe::TPolicyUndefinedExcept>)+888 (0x13970D08) NPersQueue::TTestServer::TTestServer(NKikimr::Tests::TServerSettings const&, bool, TVector> const&, NActors::NLog::EPriority, TMaybe, TDelete>, NMaybe::TPolicyUndefinedExcept>)+1563 (0x13969A6B) NPersQueue::TTestServer::TTestServer(bool)+134 (0x139502B6) NTestSuiteTPartitionChooserSuite::CreateServer()+24 (0x13950118) NTestSuiteTPartitionChooserSuite::TTestCaseTPartitionChooserActor_SplitMergeDisabled_BadSourceId_Test::Execute_(NUnitTest::TTestContext&)+30 (0x1395D70E) NTestSuiteTPartitionChooserSuite::TCurrentTest::Execute()::'lambda'()::operator()() const+71 (0x13962767) NUnitTest::TTestBase::Run(std::__y1::function, TBasicString> const&, char const*, bool)+126 (0x13DCC65E) NTestSuiteTPartitionChooserSuite::TCurrentTest::Execute()+433 (0x13962121) NUnitTest::TTestFactory::Execute()+803 (0x13DCCDD3) NUnitTest::RunMain(int, char**)+3021 (0x13DDE97D) ??+0 (0x7FE14E8FBD90) __libc_start_main+128 (0x7FE14E8FBE40) _start+41 (0x12A48029) ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/olap/unittest >> KqpOlapBlobsSharing::SplitEmpty [GOOD] Test command err: Trying to start YDB, gRPC: 31676, MsgBus: 12216 2025-05-29T15:21:34.129644Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509888161226626493:2196];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:21:34.129763Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/0026b1/r3tmp/tmpevPZw4/pdisk_1.dat 2025-05-29T15:21:34.318840Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [1:7509888161226626336:2079] 1748532094127283 != 1748532094127286 2025-05-29T15:21:34.322837Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:21:34.323268Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:21:34.323282Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:21:34.331728Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 31676, node 1 2025-05-29T15:21:34.351223Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:21:34.351232Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-05-29T15:21:34.351234Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-05-29T15:21:34.351269Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:12216 TClient is connected to server localhost:12216 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-29T15:21:34.491634Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:21:34.499254Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-05-29T15:21:34.820699Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnStore, opId: 281474976710658:0, at schemeshard: 72057594046644480 Status: 53 TxId: 281474976710658 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 2 2025-05-29T15:21:35.848306Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037889;self_id=[1:7509888165521597030:2327];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-05-29T15:21:35.848364Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037889;self_id=[1:7509888165521597030:2327];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-05-29T15:21:35.852314Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037889;self_id=[1:7509888165521597030:2327];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-05-29T15:21:35.852364Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037889;self_id=[1:7509888165521597030:2327];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-05-29T15:21:35.852386Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037889;self_id=[1:7509888165521597030:2327];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-05-29T15:21:35.852420Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037889;self_id=[1:7509888165521597030:2327];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-05-29T15:21:35.853161Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037889;self_id=[1:7509888165521597030:2327];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-05-29T15:21:35.853219Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037889;self_id=[1:7509888165521597030:2327];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-05-29T15:21:35.853257Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037889;self_id=[1:7509888165521597030:2327];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-05-29T15:21:35.853283Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037889;self_id=[1:7509888165521597030:2327];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-05-29T15:21:35.853329Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037889;self_id=[1:7509888165521597030:2327];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2025-05-29T15:21:35.853365Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037889;self_id=[1:7509888165521597030:2327];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2025-05-29T15:21:35.854368Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037895;self_id=[1:7509888165521597033:2329];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-05-29T15:21:35.854404Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037895;self_id=[1:7509888165521597033:2329];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-05-29T15:21:35.854442Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037895;self_id=[1:7509888165521597033:2329];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-05-29T15:21:35.854460Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037895;self_id=[1:7509888165521597033:2329];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-05-29T15:21:35.854477Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037895;self_id=[1:7509888165521597033:2329];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-05-29T15:21:35.854494Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037895;self_id=[1:7509888165521597033:2329];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-05-29T15:21:35.854513Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037895;self_id=[1:7509888165521597033:2329];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-05-29T15:21:35.854535Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037895;self_id=[1:7509888165521597033:2329];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-05-29T15:21:35.854553Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037895;self_id=[1:7509888165521597033:2329];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-05-29T15:21:35.854568Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037895;self_id=[1:7509888165521597033:2329];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-05-29T15:21:35.854585Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037895;self_id=[1:7509888165521597033:2329];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2025-05-29T15:21:35.854604Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037895;self_id=[1:7509888165521597033:2329];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2025-05-29T15:21:35.857482Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;self_id=[1:7509888165521596746:2326];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-05-29T15:21:35.857502Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;self_id=[1:7509888165521596746:2326];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-05-29T15:21:35.857532Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;self_id=[1:7509888165521596746:2326];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-05-29T15:21:35.857550Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;self_id=[1:7509888165521596746:2326];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-05-29T15:21:35.857583Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;self_id=[1:7509888165521596746:2326];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fl ... 8.380815Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976720731:0, at schemeshard: 72057594046644480 RESHARDING_WAIT_FINISHED... () 2025-05-29T15:22:09.382908Z node 1 :TX_COLUMNSHARD_TX WARN: log.cpp:784: tablet_id=72075186224037944;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976720731;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976720731; 2025-05-29T15:22:09.385082Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976720732:0, at schemeshard: 72057594046644480 2025-05-29T15:22:09.392886Z node 1 :TX_COLUMNSHARD_TX WARN: log.cpp:784: tablet_id=72075186224037912;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976720732;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976720732; 2025-05-29T15:22:09.395915Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976720733:0, at schemeshard: 72057594046644480 2025-05-29T15:22:09.399575Z node 1 :TX_COLUMNSHARD_TX WARN: log.cpp:784: tablet_id=72075186224037945;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976720733;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976720733; 2025-05-29T15:22:09.403167Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976720734:0, at schemeshard: 72057594046644480 RESHARDING_WAIT_FINISHED... () 2025-05-29T15:22:10.360555Z node 1 :TX_COLUMNSHARD_TX WARN: log.cpp:784: tablet_id=72075186224037945;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976720734;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976720734; 2025-05-29T15:22:10.363472Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976720735:0, at schemeshard: 72057594046644480 2025-05-29T15:22:10.366419Z node 1 :TX_COLUMNSHARD_TX WARN: log.cpp:784: tablet_id=72075186224037913;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976720735;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976720735; 2025-05-29T15:22:10.369140Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976720736:0, at schemeshard: 72057594046644480 2025-05-29T15:22:10.372859Z node 1 :TX_COLUMNSHARD_TX WARN: log.cpp:784: tablet_id=72075186224037946;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976720736;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976720736; 2025-05-29T15:22:10.375714Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976720737:0, at schemeshard: 72057594046644480 2025-05-29T15:22:10.649121Z node 1 :TX_COLUMNSHARD_TX WARN: log.cpp:784: tablet_id=72075186224037946;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976720737;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976720737; 2025-05-29T15:22:10.652729Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976720738:0, at schemeshard: 72057594046644480 2025-05-29T15:22:10.659728Z node 1 :TX_COLUMNSHARD_TX WARN: log.cpp:784: tablet_id=72075186224037914;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976720738;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976720738; 2025-05-29T15:22:10.662426Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976720739:0, at schemeshard: 72057594046644480 2025-05-29T15:22:10.666767Z node 1 :TX_COLUMNSHARD_TX WARN: log.cpp:784: tablet_id=72075186224037947;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976720739;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976720739; 2025-05-29T15:22:10.669468Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976720740:0, at schemeshard: 72057594046644480 RESHARDING_WAIT_FINISHED... () 2025-05-29T15:22:11.536287Z node 1 :TX_COLUMNSHARD_TX WARN: log.cpp:784: tablet_id=72075186224037947;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976720740;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976720740; 2025-05-29T15:22:11.539056Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976720741:0, at schemeshard: 72057594046644480 2025-05-29T15:22:11.541709Z node 1 :TX_COLUMNSHARD_TX WARN: log.cpp:784: tablet_id=72075186224037915;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976720741;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976720741; 2025-05-29T15:22:11.543863Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976720742:0, at schemeshard: 72057594046644480 2025-05-29T15:22:11.548957Z node 1 :TX_COLUMNSHARD_TX WARN: log.cpp:784: tablet_id=72075186224037948;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976720742;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976720742; 2025-05-29T15:22:11.551225Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976720743:0, at schemeshard: 72057594046644480 2025-05-29T15:22:11.583418Z node 1 :TX_COLUMNSHARD_TX WARN: log.cpp:784: tablet_id=72075186224037948;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976720743;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976720743; 2025-05-29T15:22:11.585103Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976720744:0, at schemeshard: 72057594046644480 2025-05-29T15:22:11.591248Z node 1 :TX_COLUMNSHARD_TX WARN: log.cpp:784: tablet_id=72075186224037916;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976720744;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976720744; 2025-05-29T15:22:11.593794Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976720745:0, at schemeshard: 72057594046644480 2025-05-29T15:22:11.597446Z node 1 :TX_COLUMNSHARD_TX WARN: log.cpp:784: tablet_id=72075186224037949;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976720745;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976720745; 2025-05-29T15:22:11.599247Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976720746:0, at schemeshard: 72057594046644480 RESHARDING_WAIT_FINISHED... () RESHARDING_WAIT_FINISHED... () 2025-05-29T15:22:13.602095Z node 1 :TX_COLUMNSHARD_TX WARN: log.cpp:784: tablet_id=72075186224037949;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976720746;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976720746; 2025-05-29T15:22:13.607401Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976720747:0, at schemeshard: 72057594046644480 2025-05-29T15:22:13.614147Z node 1 :TX_COLUMNSHARD_TX WARN: log.cpp:784: tablet_id=72075186224037917;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976720747;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976720747; 2025-05-29T15:22:13.617767Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976720748:0, at schemeshard: 72057594046644480 2025-05-29T15:22:13.620552Z node 1 :TX_COLUMNSHARD_TX WARN: log.cpp:784: tablet_id=72075186224037950;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976720748;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976720748; 2025-05-29T15:22:13.622572Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976720749:0, at schemeshard: 72057594046644480 RESHARDING_WAIT_FINISHED... () 2025-05-29T15:22:14.599610Z node 1 :TX_COLUMNSHARD_TX WARN: log.cpp:784: tablet_id=72075186224037950;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976720749;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976720749; 2025-05-29T15:22:14.603245Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976720750:0, at schemeshard: 72057594046644480 2025-05-29T15:22:14.607762Z node 1 :TX_COLUMNSHARD_TX WARN: log.cpp:784: tablet_id=72075186224037918;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976720750;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976720750; 2025-05-29T15:22:14.613761Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976720751:0, at schemeshard: 72057594046644480 2025-05-29T15:22:14.622099Z node 1 :TX_COLUMNSHARD_TX WARN: log.cpp:784: tablet_id=72075186224037951;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976720751;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976720751; 2025-05-29T15:22:14.624765Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976720752:0, at schemeshard: 72057594046644480 RESHARDING_WAIT_FINISHED... () RESHARDING_WAIT_FINISHED... () 2025-05-29T15:22:16.363566Z node 1 :TX_COLUMNSHARD_TX WARN: log.cpp:784: tablet_id=72075186224037951;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976720752;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976720752; 2025-05-29T15:22:16.365732Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976720753:0, at schemeshard: 72057594046644480 2025-05-29T15:22:16.371933Z node 1 :TX_COLUMNSHARD_TX WARN: log.cpp:784: tablet_id=72075186224037919;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976720753;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976720753; RESHARDING_WAIT_FINISHED... () RESHARDING_FINISHED 2025-05-29T15:22:17.390347Z node 1 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:238: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1748532137000, txId: 18446744073709551615] shutting down [[0u]] >> PQCountersSimple::SupportivePartitionCountersPersist [GOOD] >> TNodeBrokerTest::NodesMigrationExtendLeaseThenExpire >> TPartitionChooserSuite::TPartitionChooserActor_SplitMergeDisabled_NewSourceId_Test [FAIL] >> TPartitionChooserSuite::TPartitionChooserActor_SplitMergeDisabled_RegisteredSourceId_Test >> TNodeBrokerTest::NodesMigrationExtendLease >> TNodeBrokerTest::MinDynamicNodeIdShifted [GOOD] >> TNodeBrokerTest::NodesMigration2000Nodes >> TNodeBrokerTest::RegistrationPipeliningNodeName [GOOD] >> TNodeBrokerTest::Test1000NodesSubscribers ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/mind/ut/unittest >> TNodeBrokerTest::MinDynamicNodeIdShifted [GOOD] Test command err: 2025-05-29T15:22:17.024549Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 2 Deadline: 18446744073709.551615s } 2025-05-29T15:22:17.024601Z node 3 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 3 Deadline: 18446744073709.551615s } 2025-05-29T15:22:17.024625Z node 4 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 4 Deadline: 18446744073709.551615s } 2025-05-29T15:22:17.024655Z node 5 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 5 Deadline: 18446744073709.551615s } 2025-05-29T15:22:17.024680Z node 6 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 6 Deadline: 18446744073709.551615s } 2025-05-29T15:22:17.024698Z node 7 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 7 Deadline: 18446744073709.551615s } 2025-05-29T15:22:17.032058Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:17.032203Z node 3 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:17.032251Z node 4 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:17.032288Z node 5 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:17.032334Z node 6 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:17.032369Z node 7 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:17.032443Z node 8 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 8 Deadline: 18446744073709.551615s } 2025-05-29T15:22:17.032483Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-05-29T15:22:17.032723Z node 3 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:17.032751Z node 4 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:17.032769Z node 5 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:17.032783Z node 6 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:17.032802Z node 7 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:17.032818Z node 8 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:17.032853Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:17.036867Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:17.036926Z node 8 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:17.036956Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:17.037833Z node 5 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:17.037873Z node 6 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:17.037899Z node 7 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:17.037987Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:17.038004Z node 3 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:17.038021Z node 4 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:17.038043Z node 5 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-05-29T15:22:17.038091Z node 6 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-05-29T15:22:17.038106Z node 7 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-05-29T15:22:17.038124Z node 8 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:17.038140Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:17.038468Z node 3 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-05-29T15:22:17.038521Z node 4 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-05-29T15:22:17.038675Z node 8 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-05-29T15:22:17.038839Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-05-29T15:22:17.038914Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 4 Deadline: 18446744073709.551615s } 2025-05-29T15:22:17.039768Z node 3 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 7 Deadline: 18446744073709.551615s } 2025-05-29T15:22:17.039790Z node 4 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 7 Deadline: 18446744073709.551615s } 2025-05-29T15:22:17.039802Z node 5 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 4 Deadline: 18446744073709.551615s } 2025-05-29T15:22:17.039814Z node 6 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 5 Deadline: 18446744073709.551615s } 2025-05-29T15:22:17.039831Z node 8 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 3 Deadline: 18446744073709.551615s } 2025-05-29T15:22:17.040050Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 6 Deadline: 18446744073709.551615s } 2025-05-29T15:22:17.045614Z node 3 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 8 Deadline: 18446744073709.551615s } 2025-05-29T15:22:17.045658Z node 4 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 5 Deadline: 18446744073709.551615s } 2025-05-29T15:22:17.045674Z node 5 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 6 Deadline: 18446744073709.551615s } 2025-05-29T15:22:17.045745Z node 7 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 3 Deadline: 18446744073709.551615s } 2025-05-29T15:22:17.045783Z node 7 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 4 Deadline: 18446744073709.551615s } 2025-05-29T15:22:17.046632Z node 6 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 2 Deadline: 18446744073709.551615s } 2025-05-29T15:22:17.047308Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 2 Deadline: 18446744073709.551615s } 2025-05-29T15:22:17.047810Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 3 Deadline: 18446744073709.551615s } 2025-05-29T15:22:17.047960Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 7 Deadline: 18446744073709.551615s } 2025-05-29T15:22:17.048088Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 5 Deadline: 18446744073709.551615s } 2025-05-29T15:22:17.048179Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 6 Deadline: 18446744073709.551615s } 2025-05-29T15:22:17.048404Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 8 Deadline: 18446744073709.551615s } 2025-05-29T15:22:17.048684Z node 4 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 3 Deadline: 18446744073709.551615s } 2025-05-29T15:22:17.049130Z node 3 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 4 Deadline: 18446744073709.551615s } 2025-05-29T15:22:17.049373Z node 7 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 5 Deadline: 18446744073709.551615s } 2025-05-29T15:22:17.049733Z node 5 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 7 Deadline: 18446744073709.551615s } 2025-05-29T15:22:17.078364Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7309: Cannot subscribe to console configs 2025-05-29T15:22:17.078388Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded ... waiting for nameservers are connected 2025-05-29T15:22:17.083665Z node 1 :NODE_BROKER DEBUG: node_broker_impl.h:236: StateInit event type: 10060000 event: NKikimr::TEvTablet::TEvBoot 2025-05-29T15:22:17.084272Z node 1 :NODE_BROKER DEBUG: node_broker_impl.h:236: StateInit event type: 10060001 event: NKikimr::TEvTablet::TEvRestored 2025-05-29T15:22:17.084352Z node 1 :NODE_BROKER DEBUG: node_broker__init_scheme.cpp:20: TTxInitScheme Execute 2025-05-29T15:22:17.084597Z node 1 :NODE_BROKER DEBUG: node_broker_impl.h:236: StateInit event type: 1006000c event: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-05-29T15:22:17.085497Z node 1 :NODE_BROKER DEBUG: node_broker__init_scheme.cpp:29: TTxInitScheme Complete 2025-05-29T15:22:17.085519Z node 1 :NODE_BROKER DEBUG: node_broker__load_state.cpp:19: TTxLoadState Execute 2025-05-29T15:22:17.085589Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:961: [DB] Using default config. 2025-05-29T15:22:17.085600Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:998: [DB] Starting the first epoch: #1.1 1970-01-01T00:00:00.025000Z - 1970-01-01T01:00:00.025000Z - 1970-01-01T02:00:00.025000Z 2025-05-29T15:22:17.085603Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:1024: [DB] Loaded the first approximate epoch start: #1.1 2025-05-29T15:22:17.085614Z node 1 :NODE_BROKER DEBUG: node_broker__load_state.cpp:27: TTxLoadState Complete 2025-05-29T15:22:17.085631Z node 1 :NODE_BROKER DEBUG: node_broker__migrate_state.cpp:84: TTxMigrateState Execute 2025-05-29T15:22:17.085635Z node 1 :NODE_BROKER DEBUG: node_broker__migrate_state.cpp:52: TTxMigrateState ProcessMigrationBatch UpdateNodes left 0, NewVersionUpdateNodes left 0 2025-05-29T15:22:17.085638Z node 1 :NODE_BROKER DEBUG: node_broker__migrate_state.cpp:21: TTxMigrateState FinalizeMigration 2025-05-29T15:22:17.085642Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:1311: [DB] Update epoch in database: #1.1 1970-01-01T00:00:00.025000Z - 1970-01-01T01:00:00.025000Z - 1970-01-01T02:00:00.025000Z 2025-05-29T15:22:17.085657Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:1330 ... dynamic_nameserver.cpp:657: Handle NKikimr::TEvTabletPipe::TEvClientConnected { TabletId: 72057594037936129 Status: OK ServerId: [1:725:2265] Leader: 1 Dead: 0 Generation: 3 VersionInfo:  } 2025-05-29T15:22:17.452057Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 272039936, Sender [2:53:2072], Recipient [1:722:2262] 2025-05-29T15:22:17.452063Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:246: StateWork, processing event TEvNodeBroker::TEvListNodes 2025-05-29T15:22:17.452069Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:375: Delaying list nodes request for epoch #2 2025-05-29T15:22:17.452081Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 272039936, Sender [6:165:2072], Recipient [1:723:2263] 2025-05-29T15:22:17.452083Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:246: StateWork, processing event TEvNodeBroker::TEvListNodes 2025-05-29T15:22:17.452086Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:375: Delaying list nodes request for epoch #2 2025-05-29T15:22:17.452096Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 272039936, Sender [3:81:2072], Recipient [1:724:2264] 2025-05-29T15:22:17.452098Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:246: StateWork, processing event TEvNodeBroker::TEvListNodes 2025-05-29T15:22:17.452101Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:375: Delaying list nodes request for epoch #2 2025-05-29T15:22:17.452104Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 272039936, Sender [4:109:2072], Recipient [1:725:2265] 2025-05-29T15:22:17.452106Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:246: StateWork, processing event TEvNodeBroker::TEvListNodes 2025-05-29T15:22:17.452108Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:375: Delaying list nodes request for epoch #2 2025-05-29T15:22:17.631470Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 2146435072, Sender [1:662:2225], Recipient [1:662:2225]: NKikimr::NNodeBroker::TNodeBroker::TEvPrivate::TEvUpdateEpoch 2025-05-29T15:22:17.631498Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:256: StateWork, processing event TEvPrivate::TEvUpdateEpoch 2025-05-29T15:22:17.631522Z node 1 :NODE_BROKER DEBUG: node_broker__update_epoch.cpp:20: TTxUpdateEpoch Execute 2025-05-29T15:22:17.631536Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:542: [Dirty] Move to new epoch #2.4 1970-01-01T01:00:00.025000Z - 1970-01-01T02:00:00.025000Z - 1970-01-01T03:00:00.025000Z, approximate epoch start #2.4 2025-05-29T15:22:17.631544Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:1311: [DB] Update epoch in database: #2.4 1970-01-01T01:00:00.025000Z - 1970-01-01T02:00:00.025000Z - 1970-01-01T03:00:00.025000Z 2025-05-29T15:22:17.631575Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:1330: [DB] Update approx epoch start in database: #2.4 2025-05-29T15:22:18.110570Z node 1 :NODE_BROKER DEBUG: node_broker__update_epoch.cpp:31: TTxUpdateEpoch Complete 2025-05-29T15:22:18.110605Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:542: [Committed] Move to new epoch #2.4 1970-01-01T01:00:00.025000Z - 1970-01-01T02:00:00.025000Z - 1970-01-01T03:00:00.025000Z, approximate epoch start #2.4 2025-05-29T15:22:18.110626Z node 1 :NODE_BROKER TRACE: node_broker.cpp:456: Scheduled epoch update at 1970-01-01T02:00:00.025000Z 2025-05-29T15:22:18.110636Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:562: Preparing nodes list cache for epoch #2.4 1970-01-01T01:00:00.025000Z - 1970-01-01T02:00:00.025000Z - 1970-01-01T03:00:00.025000Z, approximate epoch start #2.4 nodes=2 expired=0 2025-05-29T15:22:18.110671Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:603: Preparing update nodes log for epoch ##2.4 1970-01-01T01:00:00.025000Z - 1970-01-01T02:00:00.025000Z - 1970-01-01T03:00:00.025000Z nodes=2 expired=0 removed=0 2025-05-29T15:22:18.110679Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:659: Add node #1024.v2 to update nodes log 2025-05-29T15:22:18.110689Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:659: Add node #1026.v3 to update nodes log 2025-05-29T15:22:18.110704Z node 1 :NODE_BROKER TRACE: node_broker.cpp:423: Send TEvNodesInfo for epoch #2.4 1970-01-01T01:00:00.025000Z - 1970-01-01T02:00:00.025000Z - 1970-01-01T03:00:00.025000Z 2025-05-29T15:22:18.110715Z node 1 :NODE_BROKER TRACE: node_broker.cpp:423: Send TEvNodesInfo for epoch #2.4 1970-01-01T01:00:00.025000Z - 1970-01-01T02:00:00.025000Z - 1970-01-01T03:00:00.025000Z 2025-05-29T15:22:18.110722Z node 1 :NODE_BROKER TRACE: node_broker.cpp:423: Send TEvNodesInfo for epoch #2.4 1970-01-01T01:00:00.025000Z - 1970-01-01T02:00:00.025000Z - 1970-01-01T03:00:00.025000Z 2025-05-29T15:22:18.110729Z node 1 :NODE_BROKER TRACE: node_broker.cpp:423: Send TEvNodesInfo for epoch #2.4 1970-01-01T01:00:00.025000Z - 1970-01-01T02:00:00.025000Z - 1970-01-01T03:00:00.025000Z 2025-05-29T15:22:18.110752Z node 1 :NODE_BROKER TRACE: node_broker.cpp:423: Send TEvNodesInfo for epoch #2.4 1970-01-01T01:00:00.025000Z - 1970-01-01T02:00:00.025000Z - 1970-01-01T03:00:00.025000Z 2025-05-29T15:22:18.110759Z node 1 :NODE_BROKER TRACE: node_broker.cpp:423: Send TEvNodesInfo for epoch #2.4 1970-01-01T01:00:00.025000Z - 1970-01-01T02:00:00.025000Z - 1970-01-01T03:00:00.025000Z 2025-05-29T15:22:18.110768Z node 1 :NODE_BROKER TRACE: node_broker.cpp:423: Send TEvNodesInfo for epoch #2.4 1970-01-01T01:00:00.025000Z - 1970-01-01T02:00:00.025000Z - 1970-01-01T03:00:00.025000Z 2025-05-29T15:22:18.110776Z node 1 :NODE_BROKER TRACE: node_broker.cpp:423: Send TEvNodesInfo for epoch #2.4 1970-01-01T01:00:00.025000Z - 1970-01-01T02:00:00.025000Z - 1970-01-01T03:00:00.025000Z 2025-05-29T15:22:18.131734Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 269877761, Sender [1:736:2275], Recipient [1:662:2225]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-05-29T15:22:18.131781Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 272039936, Sender [1:626:2214], Recipient [1:662:2225]: NKikimr::NNodeBroker::TEvNodeBroker::TEvListNodes { } 2025-05-29T15:22:18.131788Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:246: StateWork, processing event TEvNodeBroker::TEvListNodes 2025-05-29T15:22:18.131804Z node 1 :NODE_BROKER TRACE: node_broker.cpp:423: Send TEvNodesInfo for epoch #2.4 1970-01-01T01:00:00.025000Z - 1970-01-01T02:00:00.025000Z - 1970-01-01T03:00:00.025000Z 2025-05-29T15:22:18.131884Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 269877761, Sender [1:738:2277], Recipient [1:662:2225]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-05-29T15:22:18.131901Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 272039936, Sender [1:626:2214], Recipient [1:662:2225]: NKikimr::NNodeBroker::TEvNodeBroker::TEvListNodes { } 2025-05-29T15:22:18.131906Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:246: StateWork, processing event TEvNodeBroker::TEvListNodes 2025-05-29T15:22:18.131912Z node 1 :NODE_BROKER TRACE: node_broker.cpp:423: Send TEvNodesInfo for epoch #2.4 1970-01-01T01:00:00.025000Z - 1970-01-01T02:00:00.025000Z - 1970-01-01T03:00:00.025000Z 2025-05-29T15:22:18.131980Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 269877761, Sender [1:740:2279], Recipient [1:662:2225]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-05-29T15:22:18.132006Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 272039939, Sender [1:626:2214], Recipient [1:662:2225]: NKikimr::NNodeBroker::TEvNodeBroker::TEvExtendLeaseRequest { NodeId: 1024 } 2025-05-29T15:22:18.132013Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:250: StateWork, processing event TEvNodeBroker::TEvExtendLeaseRequest 2025-05-29T15:22:18.132036Z node 1 :NODE_BROKER DEBUG: node_broker__extend_lease.cpp:44: TTxExtendLease Execute node #1024 2025-05-29T15:22:18.132055Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:314: [Dirty] Extended lease of #1024.v5 host1:1001 up to Thu, 01 Jan 1970 03:00:00 UTC (lease 2) 2025-05-29T15:22:18.132076Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:856: [DB] Adding node #1024.v5 host1:1001 to database state=Active resolvehost=host1.yandex.net address=1.2.3.4 dc=1 location=DC=1/M=2/R=3/U=4/ lease=2 expire=Thu, 01 Jan 1970 03:00:00 UTC servicedsubdomain=72057594046678944:1 slotindex=0 authorizedbycertificate=false 2025-05-29T15:22:18.132162Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:552: [Dirty] Update current epoch version from 4 to 5 2025-05-29T15:22:18.132168Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:1356: [DB] Update epoch version in database version=5 2025-05-29T15:22:18.143162Z node 1 :NODE_BROKER DEBUG: node_broker__extend_lease.cpp:78: TTxExtendLease Complete 2025-05-29T15:22:18.143240Z node 1 :NODE_BROKER TRACE: node_broker__extend_lease.cpp:82: TTxExtendLease reply with: NKikimr::NNodeBroker::TEvNodeBroker::TEvExtendLeaseResponse { Status { Code: OK } NodeId: 1024 Expire: 10800025000 Epoch { Id: 2 Version: 5 Start: 3600025000 End: 7200025000 NextEnd: 10800025000 } } 2025-05-29T15:22:18.143262Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:314: [Committed] Extended lease of #1024.v5 host1:1001 up to Thu, 01 Jan 1970 03:00:00 UTC (lease 2) 2025-05-29T15:22:18.143269Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:552: [Committed] Update current epoch version from 4 to 5 2025-05-29T15:22:18.143275Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:630: Add node #1024.v5 host1:1001 to epoch cache 2025-05-29T15:22:18.143299Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:659: Add node #1024.v5 to update nodes log 2025-05-29T15:22:18.143440Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 269877761, Sender [1:744:2283], Recipient [1:662:2225]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-05-29T15:22:18.143474Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 272039939, Sender [1:626:2214], Recipient [1:662:2225]: NKikimr::NNodeBroker::TEvNodeBroker::TEvExtendLeaseRequest { NodeId: 1026 } 2025-05-29T15:22:18.143480Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:250: StateWork, processing event TEvNodeBroker::TEvExtendLeaseRequest 2025-05-29T15:22:18.143496Z node 1 :NODE_BROKER DEBUG: node_broker__extend_lease.cpp:44: TTxExtendLease Execute node #1026 2025-05-29T15:22:18.143505Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:314: [Dirty] Extended lease of #1026.v6 host2:1001 up to Thu, 01 Jan 1970 03:00:00 UTC (lease 2) 2025-05-29T15:22:18.143520Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:856: [DB] Adding node #1026.v6 host2:1001 to database state=Active resolvehost=host2.yandex.net address=1.2.3.5 dc=1 location=DC=1/M=2/R=3/U=5/ lease=2 expire=Thu, 01 Jan 1970 03:00:00 UTC servicedsubdomain=72057594046678944:1 slotindex=1 authorizedbycertificate=false 2025-05-29T15:22:18.143575Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:552: [Dirty] Update current epoch version from 5 to 6 2025-05-29T15:22:18.143580Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:1356: [DB] Update epoch version in database version=6 2025-05-29T15:22:18.154487Z node 1 :NODE_BROKER DEBUG: node_broker__extend_lease.cpp:78: TTxExtendLease Complete 2025-05-29T15:22:18.154547Z node 1 :NODE_BROKER TRACE: node_broker__extend_lease.cpp:82: TTxExtendLease reply with: NKikimr::NNodeBroker::TEvNodeBroker::TEvExtendLeaseResponse { Status { Code: OK } NodeId: 1026 Expire: 10800025000 Epoch { Id: 2 Version: 6 Start: 3600025000 End: 7200025000 NextEnd: 10800025000 } } 2025-05-29T15:22:18.154570Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:314: [Committed] Extended lease of #1026.v6 host2:1001 up to Thu, 01 Jan 1970 03:00:00 UTC (lease 2) 2025-05-29T15:22:18.154576Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:552: [Committed] Update current epoch version from 5 to 6 2025-05-29T15:22:18.154582Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:630: Add node #1026.v6 host2:1001 to epoch cache 2025-05-29T15:22:18.154606Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:659: Add node #1026.v6 to update nodes log >> TNodeBrokerTest::NodesV2BackMigrationManyNodesInterrupted |59.7%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/schemeshard/ut_sysview_reboots/ydb-core-tx-schemeshard-ut_sysview_reboots |59.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_sysview_reboots/ydb-core-tx-schemeshard-ut_sysview_reboots |59.7%| [TA] {RESULT} $(B)/ydb/core/fq/libs/common/ut/test-results/unittest/{meta.json ... results_accumulator.log} |59.7%| [TA] {RESULT} $(B)/ydb/core/kqp/runtime/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> TNodeBrokerTest::ExtendLeaseBumpVersion |59.7%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_sysview_reboots/ydb-core-tx-schemeshard-ut_sysview_reboots ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/mind/ut/unittest >> TNodeBrokerTest::RegistrationPipeliningNodeName [GOOD] Test command err: 2025-05-29T15:22:18.052635Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 2 Deadline: 18446744073709.551615s } 2025-05-29T15:22:18.052676Z node 3 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 3 Deadline: 18446744073709.551615s } 2025-05-29T15:22:18.052698Z node 4 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 4 Deadline: 18446744073709.551615s } 2025-05-29T15:22:18.052729Z node 5 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 5 Deadline: 18446744073709.551615s } 2025-05-29T15:22:18.052751Z node 6 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 6 Deadline: 18446744073709.551615s } 2025-05-29T15:22:18.052768Z node 7 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 7 Deadline: 18446744073709.551615s } 2025-05-29T15:22:18.059297Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:18.059419Z node 3 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:18.059458Z node 4 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:18.059491Z node 5 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:18.059529Z node 6 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:18.059559Z node 7 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:18.059615Z node 8 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 8 Deadline: 18446744073709.551615s } 2025-05-29T15:22:18.059638Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-05-29T15:22:18.059867Z node 3 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:18.059887Z node 4 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:18.059900Z node 5 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:18.059913Z node 6 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:18.059931Z node 7 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:18.059945Z node 8 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:18.059975Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:18.063874Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:18.063934Z node 8 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:18.063964Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:18.064832Z node 5 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:18.064869Z node 6 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:18.064893Z node 7 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:18.064972Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:18.064988Z node 3 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:18.065005Z node 4 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:18.065022Z node 5 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-05-29T15:22:18.065059Z node 6 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-05-29T15:22:18.065077Z node 7 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-05-29T15:22:18.065093Z node 8 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:18.065115Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:18.065280Z node 3 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-05-29T15:22:18.065300Z node 4 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-05-29T15:22:18.065367Z node 8 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-05-29T15:22:18.065446Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-05-29T15:22:18.065482Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 8 Deadline: 18446744073709.551615s } 2025-05-29T15:22:18.066124Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 8 Deadline: 18446744073709.551615s } 2025-05-29T15:22:18.066147Z node 4 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 7 Deadline: 18446744073709.551615s } 2025-05-29T15:22:18.066155Z node 6 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 2 Deadline: 18446744073709.551615s } 2025-05-29T15:22:18.066161Z node 7 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 3 Deadline: 18446744073709.551615s } 2025-05-29T15:22:18.066168Z node 8 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 6 Deadline: 18446744073709.551615s } 2025-05-29T15:22:18.069417Z node 7 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 4 Deadline: 18446744073709.551615s } 2025-05-29T15:22:18.069616Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 6 Deadline: 18446744073709.551615s } 2025-05-29T15:22:18.069634Z node 3 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 7 Deadline: 18446744073709.551615s } 2025-05-29T15:22:18.069648Z node 6 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 8 Deadline: 18446744073709.551615s } 2025-05-29T15:22:18.069660Z node 8 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 2 Deadline: 18446744073709.551615s } 2025-05-29T15:22:18.070390Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 2 Deadline: 18446744073709.551615s } 2025-05-29T15:22:18.070843Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 4 Deadline: 18446744073709.551615s } 2025-05-29T15:22:18.070982Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 5 Deadline: 18446744073709.551615s } 2025-05-29T15:22:18.071050Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 6 Deadline: 18446744073709.551615s } 2025-05-29T15:22:18.071155Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 3 Deadline: 18446744073709.551615s } 2025-05-29T15:22:18.071199Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 7 Deadline: 18446744073709.551615s } 2025-05-29T15:22:18.071522Z node 6 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 3 Deadline: 18446744073709.551615s } 2025-05-29T15:22:18.071776Z node 8 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 4 Deadline: 18446744073709.551615s } 2025-05-29T15:22:18.071810Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 4 Deadline: 18446744073709.551615s } 2025-05-29T15:22:18.071860Z node 3 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 6 Deadline: 18446744073709.551615s } 2025-05-29T15:22:18.072308Z node 4 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 2 Deadline: 18446744073709.551615s } 2025-05-29T15:22:18.072372Z node 4 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 8 Deadline: 18446744073709.551615s } 2025-05-29T15:22:18.072610Z node 6 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 7 Deadline: 18446744073709.551615s } 2025-05-29T15:22:18.073120Z node 7 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 6 Deadline: 18446744073709.551615s } 2025-05-29T15:22:18.073237Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 3 Deadline: 18446744073709.551615s } 2025-05-29T15:22:18.073559Z node 3 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 2 Deadline: 18446744073709.551615s } 2025-05-29T15:22:18.098373Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7309: Cannot subscribe to console configs 2025-05-29T15:22:18.098392Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded ... waiting for nameservers are connected 2025-05-29T15:22:18.103768Z node 1 :NODE_BROKER DEBUG: node_broker_impl.h:236: StateInit event type: 10060000 event: NKikimr::TEvTablet::TEvBoot 2025-05-29T15:22:18.104333Z node 1 :NODE_BROKER DEBUG: node_broker_impl.h:236: StateInit event type: 10060001 event: NKikimr::TEvTablet::TEvRestored 2025-05-29T15:22:18.104410Z node 1 :NODE_BROKER DEBUG: node_broker__init_scheme.cpp:20: TTxInitScheme Execute 2025-05-29T15:22:18.104618Z node 1 :NODE_BROKER DEBUG: node_broker_impl.h:236: StateInit event type: 1006000c event: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-05-29T15:22:18.105388Z node 1 :NODE_BROKER DEBUG: node_broker__init_scheme.cpp:29: TTxInitScheme Complete 2025-05-29T15:22:18.105410Z node 1 :NODE_BROKER DEBUG: node_broker__load_state.cpp:19: TTxLoadState Execute 2025-05-29T15:22:18.105460Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:961: [DB] Using default config. 2025-05-29T15:22:18.105473Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:998: [DB] Starting the first epoch: #1.1 1970-01-01T00:00:00.025000Z - 1970-01-01T01:00:00.025000Z - 1970-01-01T02:00:00.025000Z 2025-05-29T15:22:18.105477Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:1024: [DB] Loaded the first approximate epoch start: #1.1 2025-05-29T15:22:18.105489Z node 1 :NODE_BROKER DEBUG: node_broker__load_state.cpp:27: TTxLoadState Compl ... roker_impl.h:242: StateWork, received event# 269877761, Sender [1:741:2305], Recipient [1:562:2184]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-05-29T15:22:18.347827Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 272039937, Sender [1:688:2270], Recipient [1:562:2184]: NKikimr::NNodeBroker::TEvNodeBroker::TEvResolveNode { NodeId: 1024 } 2025-05-29T15:22:18.347831Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:247: StateWork, processing event TEvNodeBroker::TEvResolveNode 2025-05-29T15:22:18.347847Z node 1 :NODE_BROKER TRACE: node_broker.cpp:1478: Send TEvResolvedNode: NKikimr::NNodeBroker::TEvNodeBroker::TEvResolvedNode { Status { Code: WRONG_REQUEST Reason: "Unknown node" } } 2025-05-29T15:22:18.347891Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 269877761, Sender [1:743:2307], Recipient [1:562:2184]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-05-29T15:22:18.347901Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 272039937, Sender [1:688:2270], Recipient [1:562:2184]: NKikimr::NNodeBroker::TEvNodeBroker::TEvResolveNode { NodeId: 1025 } 2025-05-29T15:22:18.347904Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:247: StateWork, processing event TEvNodeBroker::TEvResolveNode 2025-05-29T15:22:18.347909Z node 1 :NODE_BROKER TRACE: node_broker.cpp:1478: Send TEvResolvedNode: NKikimr::NNodeBroker::TEvNodeBroker::TEvResolvedNode { Status { Code: WRONG_REQUEST Reason: "Unknown node" } } 2025-05-29T15:22:18.347946Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 269877761, Sender [1:745:2309], Recipient [1:562:2184]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-05-29T15:22:18.347958Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 272039937, Sender [1:688:2270], Recipient [1:562:2184]: NKikimr::NNodeBroker::TEvNodeBroker::TEvResolveNode { NodeId: 1026 } 2025-05-29T15:22:18.347961Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:247: StateWork, processing event TEvNodeBroker::TEvResolveNode 2025-05-29T15:22:18.347965Z node 1 :NODE_BROKER TRACE: node_broker.cpp:1478: Send TEvResolvedNode: NKikimr::NNodeBroker::TEvNodeBroker::TEvResolvedNode { Status { Code: WRONG_REQUEST Reason: "Unknown node" } } 2025-05-29T15:22:18.348005Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 269877761, Sender [1:747:2311], Recipient [1:562:2184]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-05-29T15:22:18.348015Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 272039937, Sender [1:688:2270], Recipient [1:562:2184]: NKikimr::NNodeBroker::TEvNodeBroker::TEvResolveNode { NodeId: 1027 } 2025-05-29T15:22:18.348018Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:247: StateWork, processing event TEvNodeBroker::TEvResolveNode 2025-05-29T15:22:18.348022Z node 1 :NODE_BROKER TRACE: node_broker.cpp:1478: Send TEvResolvedNode: NKikimr::NNodeBroker::TEvNodeBroker::TEvResolvedNode { Status { Code: WRONG_REQUEST Reason: "Unknown node" } } ... unblocking NKikimr::TEvTablet::TEvCommit from FLAT_EXECUTOR to TABLET_ACTOR 2025-05-29T15:22:18.348647Z node 1 :NODE_BROKER DEBUG: node_broker__register_node.cpp:186: TTxRegisterNode Complete 2025-05-29T15:22:18.348660Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:264: [Committed] Register new active node #1024.v2 host1:1001 2025-05-29T15:22:18.348667Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:552: [Committed] Update current epoch version from 1 to 2 2025-05-29T15:22:18.348672Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:630: Add node #1024.v2 host1:1001 to epoch cache 2025-05-29T15:22:18.348689Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:659: Add node #1024.v2 to update nodes log 2025-05-29T15:22:18.348712Z node 1 :NODE_BROKER TRACE: node_broker__register_node.cpp:58: TTxRegisterNode reply with: Status { Code: OK } Node { NodeId: 1024 Host: "host1" Port: 1001 ResolveHost: "host1.yandex.net" Address: "1.2.3.4" Location { DataCenter: "1" Module: "2" Rack: "3" Unit: "4" } Expire: 7200025000 Name: "slot-0" } 2025-05-29T15:22:18.348721Z node 1 :NODE_BROKER DEBUG: node_broker__register_node.cpp:186: TTxRegisterNode Complete 2025-05-29T15:22:18.348725Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:264: [Committed] Register new active node #1025.v3 host2:1001 2025-05-29T15:22:18.348728Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:552: [Committed] Update current epoch version from 2 to 3 2025-05-29T15:22:18.348731Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:630: Add node #1025.v3 host2:1001 to epoch cache 2025-05-29T15:22:18.348735Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:659: Add node #1025.v3 to update nodes log 2025-05-29T15:22:18.348744Z node 1 :NODE_BROKER TRACE: node_broker__register_node.cpp:58: TTxRegisterNode reply with: Status { Code: OK } Node { NodeId: 1025 Host: "host2" Port: 1001 ResolveHost: "host2.yandex.net" Address: "1.2.3.4" Location { DataCenter: "1" Module: "2" Rack: "3" Unit: "4" } Expire: 7200025000 Name: "slot-1" } 2025-05-29T15:22:18.348749Z node 1 :NODE_BROKER DEBUG: node_broker__register_node.cpp:186: TTxRegisterNode Complete 2025-05-29T15:22:18.348757Z node 1 :NODE_BROKER TRACE: node_broker__register_node.cpp:58: TTxRegisterNode reply with: Status { Code: OK } Node { NodeId: 1025 Host: "host2" Port: 1001 ResolveHost: "host2.yandex.net" Address: "1.2.3.4" Location { DataCenter: "1" Module: "2" Rack: "3" Unit: "4" } Expire: 7200025000 Name: "slot-0" } 2025-05-29T15:22:18.348761Z node 1 :NODE_BROKER DEBUG: node_broker__register_node.cpp:186: TTxRegisterNode Complete 2025-05-29T15:22:18.348764Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:264: [Committed] Register new active node #1026.v4 host3:1001 2025-05-29T15:22:18.348767Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:552: [Committed] Update current epoch version from 3 to 4 2025-05-29T15:22:18.348770Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:630: Add node #1026.v4 host3:1001 to epoch cache 2025-05-29T15:22:18.348774Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:659: Add node #1026.v4 to update nodes log 2025-05-29T15:22:18.348782Z node 1 :NODE_BROKER TRACE: node_broker__register_node.cpp:58: TTxRegisterNode reply with: Status { Code: OK } Node { NodeId: 1026 Host: "host3" Port: 1001 ResolveHost: "host3.yandex.net" Address: "1.2.3.4" Location { DataCenter: "1" Module: "2" Rack: "3" Unit: "4" } Expire: 7200025000 Name: "slot-1" } 2025-05-29T15:22:18.348800Z node 1 :NODE_BROKER DEBUG: node_broker__graceful_shutdown.cpp:50: TTxGracefulShutdown Complete 2025-05-29T15:22:18.348807Z node 1 :NODE_BROKER DEBUG: node_broker__register_node.cpp:186: TTxRegisterNode Complete 2025-05-29T15:22:18.348810Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:264: [Committed] Register new active node #1027.v5 host4:1001 2025-05-29T15:22:18.348812Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:552: [Committed] Update current epoch version from 4 to 5 2025-05-29T15:22:18.348815Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:630: Add node #1027.v5 host4:1001 to epoch cache 2025-05-29T15:22:18.348819Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:659: Add node #1027.v5 to update nodes log 2025-05-29T15:22:18.348827Z node 1 :NODE_BROKER TRACE: node_broker__register_node.cpp:58: TTxRegisterNode reply with: Status { Code: OK } Node { NodeId: 1027 Host: "host4" Port: 1001 ResolveHost: "host4.yandex.net" Address: "1.2.3.4" Location { DataCenter: "1" Module: "2" Rack: "3" Unit: "4" } Expire: 7200025000 Name: "slot-1" } 2025-05-29T15:22:18.348940Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 269877761, Sender [1:751:2315], Recipient [1:562:2184]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-05-29T15:22:18.348951Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 272039936, Sender [1:688:2270], Recipient [1:562:2184]: NKikimr::NNodeBroker::TEvNodeBroker::TEvListNodes { } 2025-05-29T15:22:18.348954Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:246: StateWork, processing event TEvNodeBroker::TEvListNodes 2025-05-29T15:22:18.348960Z node 1 :NODE_BROKER TRACE: node_broker.cpp:423: Send TEvNodesInfo for epoch #1.5 1970-01-01T00:00:00.025000Z - 1970-01-01T01:00:00.025000Z - 1970-01-01T02:00:00.025000Z 2025-05-29T15:22:18.349017Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 269877761, Sender [1:753:2317], Recipient [1:562:2184]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-05-29T15:22:18.349027Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 272039937, Sender [1:688:2270], Recipient [1:562:2184]: NKikimr::NNodeBroker::TEvNodeBroker::TEvResolveNode { NodeId: 1024 } 2025-05-29T15:22:18.349030Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:247: StateWork, processing event TEvNodeBroker::TEvResolveNode 2025-05-29T15:22:18.349041Z node 1 :NODE_BROKER TRACE: node_broker.cpp:1478: Send TEvResolvedNode: NKikimr::NNodeBroker::TEvNodeBroker::TEvResolvedNode { Status { Code: OK } Node { NodeId: 1024 Host: "host1" Port: 1001 ResolveHost: "host1.yandex.net" Address: "1.2.3.4" Location { DataCenter: "1" Module: "2" Rack: "3" Unit: "4" } Expire: 7200025000 Name: "slot-0" } } 2025-05-29T15:22:18.349086Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 269877761, Sender [1:755:2319], Recipient [1:562:2184]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-05-29T15:22:18.349094Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 272039937, Sender [1:688:2270], Recipient [1:562:2184]: NKikimr::NNodeBroker::TEvNodeBroker::TEvResolveNode { NodeId: 1025 } 2025-05-29T15:22:18.349096Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:247: StateWork, processing event TEvNodeBroker::TEvResolveNode 2025-05-29T15:22:18.349104Z node 1 :NODE_BROKER TRACE: node_broker.cpp:1478: Send TEvResolvedNode: NKikimr::NNodeBroker::TEvNodeBroker::TEvResolvedNode { Status { Code: OK } Node { NodeId: 1025 Host: "host2" Port: 1001 ResolveHost: "host2.yandex.net" Address: "1.2.3.4" Location { DataCenter: "1" Module: "2" Rack: "3" Unit: "4" } Expire: 7200025000 Name: "slot-0" } } 2025-05-29T15:22:18.349144Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 269877761, Sender [1:757:2321], Recipient [1:562:2184]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-05-29T15:22:18.349154Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 272039937, Sender [1:688:2270], Recipient [1:562:2184]: NKikimr::NNodeBroker::TEvNodeBroker::TEvResolveNode { NodeId: 1026 } 2025-05-29T15:22:18.349157Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:247: StateWork, processing event TEvNodeBroker::TEvResolveNode 2025-05-29T15:22:18.349165Z node 1 :NODE_BROKER TRACE: node_broker.cpp:1478: Send TEvResolvedNode: NKikimr::NNodeBroker::TEvNodeBroker::TEvResolvedNode { Status { Code: OK } Node { NodeId: 1026 Host: "host3" Port: 1001 ResolveHost: "host3.yandex.net" Address: "1.2.3.4" Location { DataCenter: "1" Module: "2" Rack: "3" Unit: "4" } Expire: 7200025000 } } 2025-05-29T15:22:18.349218Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 269877761, Sender [1:759:2323], Recipient [1:562:2184]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-05-29T15:22:18.349229Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 272039937, Sender [1:688:2270], Recipient [1:562:2184]: NKikimr::NNodeBroker::TEvNodeBroker::TEvResolveNode { NodeId: 1027 } 2025-05-29T15:22:18.349231Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:247: StateWork, processing event TEvNodeBroker::TEvResolveNode 2025-05-29T15:22:18.349239Z node 1 :NODE_BROKER TRACE: node_broker.cpp:1478: Send TEvResolvedNode: NKikimr::NNodeBroker::TEvNodeBroker::TEvResolvedNode { Status { Code: OK } Node { NodeId: 1027 Host: "host4" Port: 1001 ResolveHost: "host4.yandex.net" Address: "1.2.3.4" Location { DataCenter: "1" Module: "2" Rack: "3" Unit: "4" } Expire: 7200025000 Name: "slot-1" } } >> TPQTest::TestWaitInOwners [GOOD] >> TPQTest::TestWritePQBigMessage >> TPartitionTests::TestBatchingWithChangeConfig [GOOD] >> TPartitionChooserSuite::TPartitionChooserActor_SplitMergeDisabled_RegisteredSourceId_Test [FAIL] >> TNodeBrokerTest::LoadStateMoveEpoch [GOOD] >> TPartitionChooserSuite::TPartitionChooserActor_SplitMergeDisabled_PreferedPartition_Test >> TPartitionTests::TestBatchingWithProposeConfig |59.7%| [TA] $(B)/ydb/core/kqp/ut/perf/test-results/unittest/{meta.json ... results_accumulator.log} >> TNodeBrokerTest::ExtendLeasePipelining >> TNodeBrokerTest::SyncNodes [GOOD] >> TNodeBrokerTest::NodesMigrationReuseID [GOOD] >> TNodeBrokerTest::NodesMigrationRemoveActive [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/mind/ut/unittest >> TNodeBrokerTest::LoadStateMoveEpoch [GOOD] Test command err: 2025-05-29T15:22:17.700100Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 2 Deadline: 18446744073709.551615s } 2025-05-29T15:22:17.700145Z node 3 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 3 Deadline: 18446744073709.551615s } 2025-05-29T15:22:17.700168Z node 4 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 4 Deadline: 18446744073709.551615s } 2025-05-29T15:22:17.700194Z node 5 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 5 Deadline: 18446744073709.551615s } 2025-05-29T15:22:17.700218Z node 6 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 6 Deadline: 18446744073709.551615s } 2025-05-29T15:22:17.700236Z node 7 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 7 Deadline: 18446744073709.551615s } 2025-05-29T15:22:17.706131Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:17.706249Z node 3 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:17.706287Z node 4 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:17.706320Z node 5 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:17.706355Z node 6 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:17.706385Z node 7 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:17.706449Z node 8 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 8 Deadline: 18446744073709.551615s } 2025-05-29T15:22:17.706472Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-05-29T15:22:17.706711Z node 3 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:17.706733Z node 4 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:17.706771Z node 5 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:17.706785Z node 6 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:17.706802Z node 7 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:17.706819Z node 8 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:17.706854Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:17.711187Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:17.711265Z node 8 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:17.711309Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:17.712557Z node 5 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:17.712609Z node 6 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:17.712644Z node 7 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:17.712763Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:17.712789Z node 3 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:17.712817Z node 4 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:17.712852Z node 5 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-05-29T15:22:17.712913Z node 6 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-05-29T15:22:17.712942Z node 7 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-05-29T15:22:17.712972Z node 8 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:17.713000Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:17.713178Z node 3 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-05-29T15:22:17.713209Z node 4 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-05-29T15:22:17.713321Z node 8 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-05-29T15:22:17.713432Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-05-29T15:22:17.713495Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 3 Deadline: 18446744073709.551615s } 2025-05-29T15:22:17.714263Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 8 Deadline: 18446744073709.551615s } 2025-05-29T15:22:17.714290Z node 3 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 8 Deadline: 18446744073709.551615s } 2025-05-29T15:22:17.714302Z node 5 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 2 Deadline: 18446744073709.551615s } 2025-05-29T15:22:17.714314Z node 6 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 2 Deadline: 18446744073709.551615s } 2025-05-29T15:22:17.714324Z node 7 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 2 Deadline: 18446744073709.551615s } 2025-05-29T15:22:17.719560Z node 8 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 3 Deadline: 18446744073709.551615s } 2025-05-29T15:22:17.719736Z node 8 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 2 Deadline: 18446744073709.551615s } 2025-05-29T15:22:17.719779Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 5 Deadline: 18446744073709.551615s } 2025-05-29T15:22:17.720686Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 6 Deadline: 18446744073709.551615s } 2025-05-29T15:22:17.720928Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 5 Deadline: 18446744073709.551615s } 2025-05-29T15:22:17.720958Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 7 Deadline: 18446744073709.551615s } 2025-05-29T15:22:17.721211Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 6 Deadline: 18446744073709.551615s } 2025-05-29T15:22:17.721481Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 7 Deadline: 18446744073709.551615s } 2025-05-29T15:22:17.721542Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 4 Deadline: 18446744073709.551615s } 2025-05-29T15:22:17.721717Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 2 Deadline: 18446744073709.551615s } 2025-05-29T15:22:17.721757Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 8 Deadline: 18446744073709.551615s } 2025-05-29T15:22:17.722301Z node 3 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 5 Deadline: 18446744073709.551615s } 2025-05-29T15:22:17.722854Z node 5 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 3 Deadline: 18446744073709.551615s } 2025-05-29T15:22:17.724739Z node 8 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 4 Deadline: 18446744073709.551615s } 2025-05-29T15:22:17.725042Z node 3 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 6 Deadline: 18446744073709.551615s } 2025-05-29T15:22:17.725203Z node 4 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 8 Deadline: 18446744073709.551615s } 2025-05-29T15:22:17.725307Z node 6 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 3 Deadline: 18446744073709.551615s } 2025-05-29T15:22:17.727599Z node 3 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 2 Deadline: 18446744073709.551615s } 2025-05-29T15:22:17.727761Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 3 Deadline: 18446744073709.551615s } 2025-05-29T15:22:17.753799Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7309: Cannot subscribe to console configs 2025-05-29T15:22:17.753824Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded ... waiting for nameservers are connected 2025-05-29T15:22:17.759142Z node 1 :NODE_BROKER DEBUG: node_broker_impl.h:236: StateInit event type: 10060000 event: NKikimr::TEvTablet::TEvBoot 2025-05-29T15:22:17.759668Z node 1 :NODE_BROKER DEBUG: node_broker_impl.h:236: StateInit event type: 10060001 event: NKikimr::TEvTablet::TEvRestored 2025-05-29T15:22:17.759735Z node 1 :NODE_BROKER DEBUG: node_broker__init_scheme.cpp:20: TTxInitScheme Execute 2025-05-29T15:22:17.759975Z node 1 :NODE_BROKER DEBUG: node_broker_impl.h:236: StateInit event type: 1006000c event: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-05-29T15:22:17.761145Z node 1 :NODE_BROKER DEBUG: node_broker__init_scheme.cpp:29: TTxInitScheme Complete 2025-05-29T15:22:17.761179Z node 1 :NODE_BROKER DEBUG: node_broker__load_state.cpp:19: TTxLoadState Execute 2025-05-29T15:22:17.761248Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:961: [DB] Using default config. 2025-05-29T15:22:17.761266Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:998: [DB] Starting the first epoch: #1.1 1970-01-01T00:00:00.025000Z - 1970-01-01T01:00:00.025000Z - 1970-01-01T02:00:00.025000Z 2025-05-29T15:22:17.761270Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:1024: [DB] Loaded the first approximate epoch start: #1.1 2025-05-29T15:22:17.761284Z node 1 :NODE_BROKER DEBUG: node_broker__load_state.cpp:27: TTxLoadState Complete 2025-05-29T15:22:17.761314Z node 1 :NODE_BROKER DEBUG: node_broker__migrate_state.cpp:84: TTxMigrateState Execute 2025-05-29T15:22:17.761318Z node 1 :NODE_BROKER DEBUG: node_broker__migrate_state.cpp:52: TTxMigrateState ProcessMigrationBatch UpdateNodes left 0, NewVersionUpdateNodes left 0 2025-05-29T15:22:17.761321Z node 1 :NODE_BROKER DEBU ... 15:22:18.657861Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:1113: [DB] Loaded node #1024.v0 { NodeId: 1024, State: Expired, Version: 0, Host: host1, Port: 1001, ResolveHost: host1.yandex.net, Address: 1.2.3.4, Lease: 1, Expire: Thu, 01 Jan 1970 02:00:00 UTC, Location: DC=1/M=2/R=3/U=4/, AuthorizedByCertificate: 0, SlotIndex: 0, ServicedSubDomain: 72057594046678944:1 } 2025-05-29T15:22:18.657883Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:1190: [DB] Loaded nodeV2 #1024.v4 { NodeId: 1024, State: Expired, Version: 4, Host: host1, Port: 1001, ResolveHost: host1.yandex.net, Address: 1.2.3.4, Lease: 1, Expire: Thu, 01 Jan 1970 02:00:00 UTC, Location: DC=1/M=2/R=3/U=4/, AuthorizedByCertificate: 0, SlotIndex: 0, ServicedSubDomain: 72057594046678944:1 } 2025-05-29T15:22:18.657890Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:1239: [DB] Node #1024.v4 is already migrated 2025-05-29T15:22:18.657905Z node 1 :NODE_BROKER DEBUG: node_broker__load_state.cpp:27: TTxLoadState Complete 2025-05-29T15:22:18.657940Z node 1 :NODE_BROKER DEBUG: node_broker__migrate_state.cpp:84: TTxMigrateState Execute 2025-05-29T15:22:18.657946Z node 1 :NODE_BROKER DEBUG: node_broker__migrate_state.cpp:52: TTxMigrateState ProcessMigrationBatch UpdateNodes left 0, NewVersionUpdateNodes left 0 2025-05-29T15:22:18.657950Z node 1 :NODE_BROKER DEBUG: node_broker__migrate_state.cpp:21: TTxMigrateState FinalizeMigration 2025-05-29T15:22:18.657955Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:530: [Dirty] Remove node #1024.v4 host1:1001 2025-05-29T15:22:18.657964Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:542: [Dirty] Move to new epoch #4.5 1970-01-01T03:00:00.025000Z - 1970-01-01T04:00:00.025000Z - 1970-01-01T05:00:00.025000Z, approximate epoch start #4.5 2025-05-29T15:22:18.657973Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:830: [DB] Removing node #1024.v5 from database 2025-05-29T15:22:18.657993Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:1311: [DB] Update epoch in database: #4.5 1970-01-01T03:00:00.025000Z - 1970-01-01T04:00:00.025000Z - 1970-01-01T05:00:00.025000Z 2025-05-29T15:22:18.658003Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:1330: [DB] Update approx epoch start in database: #4.5 2025-05-29T15:22:19.000934Z node 1 :NODE_BROKER DEBUG: node_broker__migrate_state.cpp:95: TTxMigrateState Complete 2025-05-29T15:22:19.000967Z node 1 :NODE_BROKER TRACE: node_broker.cpp:456: Scheduled epoch update at 1970-01-01T04:00:00.025000Z 2025-05-29T15:22:19.000975Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:562: Preparing nodes list cache for epoch #4.5 1970-01-01T03:00:00.025000Z - 1970-01-01T04:00:00.025000Z - 1970-01-01T05:00:00.025000Z, approximate epoch start #4.5 nodes=0 expired=0 2025-05-29T15:22:19.000983Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:603: Preparing update nodes log for epoch ##4.5 1970-01-01T03:00:00.025000Z - 1970-01-01T04:00:00.025000Z - 1970-01-01T05:00:00.025000Z nodes=0 expired=0 removed=1 2025-05-29T15:22:19.000987Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:659: Add node #1024.v5 to update nodes log 2025-05-29T15:22:19.001078Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:657: Handle NKikimr::TEvTabletPipe::TEvClientConnected { TabletId: 72057594037936129 Status: OK ServerId: [1:774:2299] Leader: 1 Dead: 0 Generation: 4 VersionInfo:  } 2025-05-29T15:22:19.001096Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 269877761, Sender [1:774:2299], Recipient [1:740:2272]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-05-29T15:22:19.001162Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 269877761, Sender [1:748:2278], Recipient [1:740:2272]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-05-29T15:22:19.001171Z node 5 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:657: Handle NKikimr::TEvTabletPipe::TEvClientConnected { TabletId: 72057594037936129 Status: OK ServerId: [1:782:2303] Leader: 1 Dead: 0 Generation: 4 VersionInfo:  } 2025-05-29T15:22:19.001179Z node 7 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:657: Handle NKikimr::TEvTabletPipe::TEvClientConnected { TabletId: 72057594037936129 Status: OK ServerId: [1:748:2278] Leader: 1 Dead: 0 Generation: 4 VersionInfo:  } 2025-05-29T15:22:19.001186Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 269877761, Sender [1:749:2279], Recipient [1:740:2272]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-05-29T15:22:19.001196Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:657: Handle NKikimr::TEvTabletPipe::TEvClientConnected { TabletId: 72057594037936129 Status: OK ServerId: [1:779:2300] Leader: 1 Dead: 0 Generation: 4 VersionInfo:  } 2025-05-29T15:22:19.001203Z node 3 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:657: Handle NKikimr::TEvTabletPipe::TEvClientConnected { TabletId: 72057594037936129 Status: OK ServerId: [1:780:2301] Leader: 1 Dead: 0 Generation: 4 VersionInfo:  } 2025-05-29T15:22:19.001210Z node 4 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:657: Handle NKikimr::TEvTabletPipe::TEvClientConnected { TabletId: 72057594037936129 Status: OK ServerId: [1:781:2302] Leader: 1 Dead: 0 Generation: 4 VersionInfo:  } 2025-05-29T15:22:19.001217Z node 8 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:657: Handle NKikimr::TEvTabletPipe::TEvClientConnected { TabletId: 72057594037936129 Status: OK ServerId: [1:749:2279] Leader: 1 Dead: 0 Generation: 4 VersionInfo:  } 2025-05-29T15:22:19.001237Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 269877761, Sender [1:779:2300], Recipient [1:740:2272]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-05-29T15:22:19.001253Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 269877761, Sender [1:780:2301], Recipient [1:740:2272]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-05-29T15:22:19.001262Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 269877761, Sender [1:781:2302], Recipient [1:740:2272]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-05-29T15:22:19.001287Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 269877761, Sender [1:782:2303], Recipient [1:740:2272]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-05-29T15:22:19.001321Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 272039936, Sender [1:25:2072], Recipient [1:740:2272]: NKikimr::NNodeBroker::TEvNodeBroker::TEvListNodes { MinEpoch: 2 } 2025-05-29T15:22:19.001328Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:246: StateWork, processing event TEvNodeBroker::TEvListNodes 2025-05-29T15:22:19.001334Z node 1 :NODE_BROKER TRACE: node_broker.cpp:423: Send TEvNodesInfo for epoch #4.5 1970-01-01T03:00:00.025000Z - 1970-01-01T04:00:00.025000Z - 1970-01-01T05:00:00.025000Z 2025-05-29T15:22:19.001351Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 272039936, Sender [7:193:2072], Recipient [1:748:2278] 2025-05-29T15:22:19.001354Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:246: StateWork, processing event TEvNodeBroker::TEvListNodes 2025-05-29T15:22:19.001359Z node 1 :NODE_BROKER TRACE: node_broker.cpp:423: Send TEvNodesInfo for epoch #4.5 1970-01-01T03:00:00.025000Z - 1970-01-01T04:00:00.025000Z - 1970-01-01T05:00:00.025000Z 2025-05-29T15:22:19.001366Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 272039936, Sender [8:221:2072], Recipient [1:749:2279] 2025-05-29T15:22:19.001369Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:246: StateWork, processing event TEvNodeBroker::TEvListNodes 2025-05-29T15:22:19.001373Z node 1 :NODE_BROKER TRACE: node_broker.cpp:423: Send TEvNodesInfo for epoch #4.5 1970-01-01T03:00:00.025000Z - 1970-01-01T04:00:00.025000Z - 1970-01-01T05:00:00.025000Z 2025-05-29T15:22:19.001390Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 272039936, Sender [4:109:2072], Recipient [1:781:2302] 2025-05-29T15:22:19.001393Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:246: StateWork, processing event TEvNodeBroker::TEvListNodes 2025-05-29T15:22:19.001397Z node 1 :NODE_BROKER TRACE: node_broker.cpp:423: Send TEvNodesInfo for epoch #4.5 1970-01-01T03:00:00.025000Z - 1970-01-01T04:00:00.025000Z - 1970-01-01T05:00:00.025000Z 2025-05-29T15:22:19.001405Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 272039936, Sender [5:137:2072], Recipient [1:782:2303] 2025-05-29T15:22:19.001408Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:246: StateWork, processing event TEvNodeBroker::TEvListNodes 2025-05-29T15:22:19.001411Z node 1 :NODE_BROKER TRACE: node_broker.cpp:423: Send TEvNodesInfo for epoch #4.5 1970-01-01T03:00:00.025000Z - 1970-01-01T04:00:00.025000Z - 1970-01-01T05:00:00.025000Z 2025-05-29T15:22:19.001425Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 272039936, Sender [2:53:2072], Recipient [1:779:2300] 2025-05-29T15:22:19.001428Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:246: StateWork, processing event TEvNodeBroker::TEvListNodes 2025-05-29T15:22:19.001431Z node 1 :NODE_BROKER TRACE: node_broker.cpp:423: Send TEvNodesInfo for epoch #4.5 1970-01-01T03:00:00.025000Z - 1970-01-01T04:00:00.025000Z - 1970-01-01T05:00:00.025000Z 2025-05-29T15:22:19.001438Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 272039936, Sender [3:81:2072], Recipient [1:780:2301] 2025-05-29T15:22:19.001440Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:246: StateWork, processing event TEvNodeBroker::TEvListNodes 2025-05-29T15:22:19.001444Z node 1 :NODE_BROKER TRACE: node_broker.cpp:423: Send TEvNodesInfo for epoch #4.5 1970-01-01T03:00:00.025000Z - 1970-01-01T04:00:00.025000Z - 1970-01-01T05:00:00.025000Z 2025-05-29T15:22:19.001507Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 269877761, Sender [1:804:2314], Recipient [1:740:2272]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-05-29T15:22:19.001519Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 272039936, Sender [1:627:2213], Recipient [1:740:2272]: NKikimr::NNodeBroker::TEvNodeBroker::TEvListNodes { } 2025-05-29T15:22:19.001522Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:246: StateWork, processing event TEvNodeBroker::TEvListNodes 2025-05-29T15:22:19.001525Z node 1 :NODE_BROKER TRACE: node_broker.cpp:423: Send TEvNodesInfo for epoch #4.5 1970-01-01T03:00:00.025000Z - 1970-01-01T04:00:00.025000Z - 1970-01-01T05:00:00.025000Z 2025-05-29T15:22:19.001585Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 269877761, Sender [1:806:2316], Recipient [1:740:2272]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-05-29T15:22:19.001593Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 272039936, Sender [1:627:2213], Recipient [1:740:2272]: NKikimr::NNodeBroker::TEvNodeBroker::TEvListNodes { } 2025-05-29T15:22:19.001595Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:246: StateWork, processing event TEvNodeBroker::TEvListNodes 2025-05-29T15:22:19.001599Z node 1 :NODE_BROKER TRACE: node_broker.cpp:423: Send TEvNodesInfo for epoch #4.5 1970-01-01T03:00:00.025000Z - 1970-01-01T04:00:00.025000Z - 1970-01-01T05:00:00.025000Z 2025-05-29T15:22:19.001636Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 269877761, Sender [1:808:2318], Recipient [1:740:2272]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-05-29T15:22:19.001650Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 272039937, Sender [1:627:2213], Recipient [1:740:2272]: NKikimr::NNodeBroker::TEvNodeBroker::TEvResolveNode { NodeId: 1024 } 2025-05-29T15:22:19.001654Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:247: StateWork, processing event TEvNodeBroker::TEvResolveNode 2025-05-29T15:22:19.001667Z node 1 :NODE_BROKER TRACE: node_broker.cpp:1478: Send TEvResolvedNode: NKikimr::NNodeBroker::TEvNodeBroker::TEvResolvedNode { Status { Code: WRONG_REQUEST Reason: "Unknown node" } } ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/persqueue/ut/unittest >> TPartitionChooserSuite::TPartitionChooserActor_SplitMergeEnabled_SourceId_OldPartitionExists_NotWritten_Test [FAIL] Test command err: 2025-05-29T15:22:11.960401Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509888322742373253:2075];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:22:11.960432Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-05-29T15:22:11.963851Z node 2 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7509888318910636471:2073];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:22:11.963871Z node 2 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/000ac1/r3tmp/tmpJgLT8z/pdisk_1.dat 2025-05-29T15:22:11.996874Z node 2 :PQ_READ_PROXY DEBUG: caching_service.cpp:44: Direct read cache: : Created 2025-05-29T15:22:11.999884Z node 1 :PQ_READ_PROXY DEBUG: caching_service.cpp:44: Direct read cache: : Created 2025-05-29T15:22:12.025425Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 7426, node 1 2025-05-29T15:22:12.036329Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/ciyv/000ac1/r3tmp/yandexLWoTO4.tmp 2025-05-29T15:22:12.036342Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: /home/runner/.ya/build/build_root/ciyv/000ac1/r3tmp/yandexLWoTO4.tmp 2025-05-29T15:22:12.036386Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:202: successfully initialized from file: /home/runner/.ya/build/build_root/ciyv/000ac1/r3tmp/yandexLWoTO4.tmp 2025-05-29T15:22:12.036430Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-05-29T15:22:12.042853Z INFO: TTestServer started on Port 8346 GrpcPort 7426 TClient is connected to server localhost:8346 PQClient connected to localhost:7426 WaitRootIsUp 'Root'... TClient::Ls request: Root 2025-05-29T15:22:12.060664Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:22:12.060697Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:22:12.062340Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-29T15:22:12.092107Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:22:12.092134Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:22:12.093044Z node 1 :HIVE WARN: hive_impl.cpp:771: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-05-29T15:22:12.093307Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-29T15:22:12.093430Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976720657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-05-29T15:22:12.104424Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720659:0, at schemeshard: 72057594046644480 waiting... waiting... waiting... 2025-05-29T15:22:12.324522Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7509888323205604114:2310], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:22:12.324542Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7509888323205604127:2313], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:22:12.324550Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:22:12.325685Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715657:3, at schemeshard: 72057594046644480 2025-05-29T15:22:12.329697Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7509888323205604143:2314], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715657 completed, doublechecking } 2025-05-29T15:22:12.432695Z node 2 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [2:7509888323205604171:2165] txid# 281474976715658, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 9], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-29T15:22:12.468259Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720662:0, at schemeshard: 72057594046644480 2025-05-29T15:22:12.468278Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [1:7509888327037341634:2341], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-05-29T15:22:12.468390Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=1&id=NjQxODdlMzItYTE3NzFjOTItOTM4NjZlODctMzQ3MGVkNDY=, ActorId: [1:7509888327037341593:2334], ActorState: ExecuteState, TraceId: 01jwea5yj31w0qppgeh6tkgcwa, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-05-29T15:22:12.468867Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: cluster_tracker.cpp:167: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-05-29T15:22:12.468208Z node 2 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [2:7509888323205604178:2318], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-05-29T15:22:12.468825Z node 2 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=2&id=M2U0OGZmMjAtOTIxNTBkMjctY2Q5ZDhhYjItZDg3MTk4ZTA=, ActorId: [2:7509888323205604112:2309], ActorState: ExecuteState, TraceId: 01jwea5yf360rx95hmzecx9ccr, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-05-29T15:22:12.468937Z node 2 :PERSQUEUE_CLUSTER_TRACKER ERROR: cluster_tracker.cpp:167: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-05-29T15:22:12.528498Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720663:0, at schemeshard: 72057594046644480 2025-05-29T15:22:12.550389Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720664:0, at schemeshard: 72057594046644480 === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost", true, true, 1000), ("dc2", "dc2.logbroker.yandex.net", false, true, 1000); 2025-05-29T15:22:12.581352Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [1:7509888327037342024:2376], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:22:12.581456Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=1&id=YTVjMzE4YzUtYmU1YTUwZDAtN2M4NGU0ZDQtZjBhMDVhZjY=, ActorId: [1:7509888327037342021:2374], ActorState: ExecuteState, TraceId: 01jwea5ypp037djd0q8sy8ek7d, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: assertion failed at ydb/core/testlib/test_pq_client.h:537, TMaybe NKikimr::NPersQueueTests::TFlatMsgBusPQClient::RunYqlDataQueryWithParams(TString, const NYdb::TParams &): (result.IsSuccess())
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 TBackTrace::Capture()+28 (0x13C16BFC) NUnitTest::NPrivate::RaiseError(ch ... " EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-29T15:22:17.168874Z node 9 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:22:17.168906Z node 9 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:22:17.170400Z node 9 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-29T15:22:17.172594Z node 9 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:22:17.173193Z node 9 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:22:17.173212Z node 9 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:22:17.174044Z node 9 :HIVE WARN: hive_impl.cpp:771: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 10 Cookie 10 2025-05-29T15:22:17.174245Z node 9 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Connecting -> Connected waiting... 2025-05-29T15:22:17.186386Z node 9 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... waiting... waiting... 2025-05-29T15:22:17.419661Z node 10 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [10:7509888344727571659:2313], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:22:17.419758Z node 10 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [10:7509888344727571645:2310], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:22:17.419774Z node 10 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:22:17.420739Z node 9 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976720657:3, at schemeshard: 72057594046644480 2025-05-29T15:22:17.425001Z node 10 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [10:7509888344727571666:2314], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976720657 completed, doublechecking } 2025-05-29T15:22:17.487240Z node 10 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [10:7509888344727571693:2166] txid# 281474976720658, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 9], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-29T15:22:17.490825Z node 10 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [10:7509888344727571707:2318], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-05-29T15:22:17.490929Z node 10 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=10&id=NTI2ZjcxOWUtNGY0NGUwN2MtMzFhYjczNWMtMTUwNmNkZjM=, ActorId: [10:7509888344727571634:2309], ActorState: ExecuteState, TraceId: 01jwea63ebft55xmshmfdetxds, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-05-29T15:22:17.491080Z node 10 :PERSQUEUE_CLUSTER_TRACKER ERROR: cluster_tracker.cpp:167: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-05-29T15:22:17.491375Z node 9 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [9:7509888344530166368:2341], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-05-29T15:22:17.491444Z node 9 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=9&id=MzUyNTRjZmQtMWE5ZDdmNWMtZTI2NTJiZDktMmMxYzNhYjQ=, ActorId: [9:7509888344530166341:2334], ActorState: ExecuteState, TraceId: 01jwea63gfbya84m5gv0n2syt8, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-05-29T15:22:17.491550Z node 9 :PERSQUEUE_CLUSTER_TRACKER ERROR: cluster_tracker.cpp:167: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-05-29T15:22:17.492185Z node 9 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-05-29T15:22:17.550674Z node 9 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-05-29T15:22:17.570335Z node 9 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost", true, true, 1000), ("dc2", "dc2.logbroker.yandex.net", false, true, 1000); 2025-05-29T15:22:17.592780Z node 9 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [9:7509888344530166752:2376], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:22:17.592877Z node 9 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=9&id=ODQxZjE0YWUtYWFmZWYxYjktOTQ1YTczZmYtOGFlZTUyNWI=, ActorId: [9:7509888344530166749:2374], ActorState: ExecuteState, TraceId: 01jwea63ke3fw8rt165w63ehty, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: assertion failed at ydb/core/testlib/test_pq_client.h:537, TMaybe NKikimr::NPersQueueTests::TFlatMsgBusPQClient::RunYqlDataQueryWithParams(TString, const NYdb::TParams &): (result.IsSuccess())
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 TBackTrace::Capture()+28 (0x13C16BFC) NUnitTest::NPrivate::RaiseError(char const*, TBasicString> const&, bool)+137 (0x13DCA7A9) NKikimr::NPersQueueTests::TFlatMsgBusPQClient::RunYqlDataQueryWithParams(TBasicString>, NYdb::Dev::TParams const&)+932 (0x13965684) NKikimr::NPersQueueTests::TFlatMsgBusPQClient::RunYqlDataQuery(TBasicString>)+88 (0x1394D528) NKikimr::NPersQueueTests::TFlatMsgBusPQClient::InitDCs(THashMap>, NKikimr::NPersQueueTests::TPQTestClusterInfo, THash>>, TEqualTo>>, std::__y1::allocator>>>, TBasicString> const&)+1170 (0x139741A2) NKikimr::NPersQueueTests::TFlatMsgBusPQClient::FullInit(THashMap>, NKikimr::NPersQueueTests::TPQTestClusterInfo, THash>>, TEqualTo>>, std::__y1::allocator>>>, TBasicString> const&, TBasicString> const&)+327 (0x139720D7) NPersQueue::TTestServer::StartServer(bool, TMaybe>, NMaybe::TPolicyUndefinedExcept>)+888 (0x13970D08) NPersQueue::TTestServer::TTestServer(NKikimr::Tests::TServerSettings const&, bool, TVector> const&, NActors::NLog::EPriority, TMaybe, TDelete>, NMaybe::TPolicyUndefinedExcept>)+1563 (0x13969A6B) NPersQueue::TTestServer::TTestServer(bool)+134 (0x139502B6) NTestSuiteTPartitionChooserSuite::CreateServer()+24 (0x13950118) NTestSuiteTPartitionChooserSuite::TTestCaseTPartitionChooserActor_SplitMergeEnabled_SourceId_OldPartitionExists_NotWritten_Test::Execute_(NUnitTest::TTestContext&)+32 (0x139561E0) NTestSuiteTPartitionChooserSuite::TCurrentTest::Execute()::'lambda'()::operator()() const+71 (0x13962767) NUnitTest::TTestBase::Run(std::__y1::function, TBasicString> const&, char const*, bool)+126 (0x13DCC65E) NTestSuiteTPartitionChooserSuite::TCurrentTest::Execute()+433 (0x13962121) NUnitTest::TTestFactory::Execute()+803 (0x13DCCDD3) NUnitTest::RunMain(int, char**)+3021 (0x13DDE97D) ??+0 (0x7FA1FAB8AD90) __libc_start_main+128 (0x7FA1FAB8AE40) _start+41 (0x12A48029) >> TPartitionTests::DataTxCalcPredicateOk [GOOD] >> TNodeBrokerTest::UpdateNodesLogEmptyEpoch [GOOD] >> TPartitionTests::DataTxCalcPredicateError >> TNodeBrokerTest::NodesMigrationExtendLeaseThenRemove [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/mind/ut/unittest >> TNodeBrokerTest::SyncNodes [GOOD] Test command err: 2025-05-29T15:22:18.951854Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 2 Deadline: 18446744073709.551615s } 2025-05-29T15:22:18.951900Z node 3 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 3 Deadline: 18446744073709.551615s } 2025-05-29T15:22:18.951924Z node 4 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 4 Deadline: 18446744073709.551615s } 2025-05-29T15:22:18.951953Z node 5 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 5 Deadline: 18446744073709.551615s } 2025-05-29T15:22:18.951992Z node 6 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 6 Deadline: 18446744073709.551615s } 2025-05-29T15:22:18.952011Z node 7 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 7 Deadline: 18446744073709.551615s } 2025-05-29T15:22:18.957782Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:18.957897Z node 3 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:18.957940Z node 4 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:18.957976Z node 5 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:18.958015Z node 6 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:18.958047Z node 7 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:18.958107Z node 8 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 8 Deadline: 18446744073709.551615s } 2025-05-29T15:22:18.958130Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-05-29T15:22:18.958340Z node 3 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:18.958360Z node 4 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:18.958373Z node 5 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:18.958385Z node 6 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:18.958400Z node 7 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:18.958414Z node 8 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:18.958444Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:18.962173Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:18.962230Z node 8 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:18.962261Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:18.963113Z node 5 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:18.963151Z node 6 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:18.963182Z node 7 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:18.963258Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:18.963274Z node 3 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:18.963292Z node 4 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:18.963312Z node 8 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:18.963327Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:18.963482Z node 3 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-05-29T15:22:18.963503Z node 4 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-05-29T15:22:18.963518Z node 5 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-05-29T15:22:18.963541Z node 6 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-05-29T15:22:18.963560Z node 7 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-05-29T15:22:18.963625Z node 8 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-05-29T15:22:18.963696Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-05-29T15:22:18.963731Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 3 Deadline: 18446744073709.551615s } 2025-05-29T15:22:18.964349Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 7 Deadline: 18446744073709.551615s } 2025-05-29T15:22:18.964372Z node 3 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 7 Deadline: 18446744073709.551615s } 2025-05-29T15:22:18.964379Z node 4 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 3 Deadline: 18446744073709.551615s } 2025-05-29T15:22:18.964386Z node 5 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 4 Deadline: 18446744073709.551615s } 2025-05-29T15:22:18.964392Z node 6 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 2 Deadline: 18446744073709.551615s } 2025-05-29T15:22:18.964399Z node 7 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 5 Deadline: 18446744073709.551615s } 2025-05-29T15:22:18.964406Z node 8 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 2 Deadline: 18446744073709.551615s } 2025-05-29T15:22:18.967424Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 6 Deadline: 18446744073709.551615s } 2025-05-29T15:22:18.967519Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 8 Deadline: 18446744073709.551615s } 2025-05-29T15:22:18.967535Z node 3 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 4 Deadline: 18446744073709.551615s } 2025-05-29T15:22:18.967559Z node 5 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 7 Deadline: 18446744073709.551615s } 2025-05-29T15:22:18.967569Z node 7 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 2 Deadline: 18446744073709.551615s } 2025-05-29T15:22:18.967592Z node 7 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 3 Deadline: 18446744073709.551615s } 2025-05-29T15:22:18.968154Z node 4 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 5 Deadline: 18446744073709.551615s } 2025-05-29T15:22:18.968262Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 6 Deadline: 18446744073709.551615s } 2025-05-29T15:22:18.968528Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 7 Deadline: 18446744073709.551615s } 2025-05-29T15:22:18.968744Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 2 Deadline: 18446744073709.551615s } 2025-05-29T15:22:18.968815Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 4 Deadline: 18446744073709.551615s } 2025-05-29T15:22:18.968948Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 5 Deadline: 18446744073709.551615s } 2025-05-29T15:22:18.969094Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 8 Deadline: 18446744073709.551615s } 2025-05-29T15:22:18.969339Z node 3 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 8 Deadline: 18446744073709.551615s } 2025-05-29T15:22:18.969703Z node 8 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 3 Deadline: 18446744073709.551615s } 2025-05-29T15:22:18.969739Z node 4 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 6 Deadline: 18446744073709.551615s } 2025-05-29T15:22:18.969915Z node 6 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 4 Deadline: 18446744073709.551615s } 2025-05-29T15:22:18.969958Z node 3 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 6 Deadline: 18446744073709.551615s } 2025-05-29T15:22:18.970145Z node 6 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 3 Deadline: 18446744073709.551615s } 2025-05-29T15:22:18.993142Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7309: Cannot subscribe to console configs 2025-05-29T15:22:18.993161Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded ... waiting for nameservers are connected 2025-05-29T15:22:18.997456Z node 1 :NODE_BROKER DEBUG: node_broker_impl.h:236: StateInit event type: 10060000 event: NKikimr::TEvTablet::TEvBoot 2025-05-29T15:22:18.997889Z node 1 :NODE_BROKER DEBUG: node_broker_impl.h:236: StateInit event type: 10060001 event: NKikimr::TEvTablet::TEvRestored 2025-05-29T15:22:18.997948Z node 1 :NODE_BROKER DEBUG: node_broker__init_scheme.cpp:20: TTxInitScheme Execute 2025-05-29T15:22:18.998128Z node 1 :NODE_BROKER DEBUG: node_broker_impl.h:236: StateInit event type: 1006000c event: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-05-29T15:22:18.998873Z node 1 :NODE_BROKER DEBUG: node_broker__init_scheme.cpp:29: TTxInitScheme Complete 2025-05-29T15:22:18.998902Z node 1 :NODE_BROKER DEBUG: node_broker__load_state.cpp:19: TTxLoadState Execute 2025-05-29T15:22:18.998970Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:961: [DB] Using default config. 2025-05-29T15:22:18.998987Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:998: [DB] Starting the first epoch: #1.1 1970-01-01T00:00:00.025000Z - 1970-01-01T01:00:00.025000Z - 1970-01-01T02:00:00.025000Z 2025-05-29T15:22:18.998992Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:1024: [DB] Loaded the first approximate epoch start: #1.1 2025-05-29T15:22:18.999010Z node 1 :NODE_BROKER DEBUG: node_broker__load_state.cpp:27: TTxLoadState Compl ... :81: Registration request from host1:1001 (not fixed) tenant: dc-1 2025-05-29T15:22:19.212459Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:856: [DB] Adding node #1024.v2 host1:1001 to database state=Active resolvehost=host1.yandex.net address=1.2.3.4 dc=1 location=DC=1/M=2/R=3/U=4/ lease=1 expire=Thu, 01 Jan 1970 02:00:00 UTC servicedsubdomain=72057594046678944:1 slotindex=0 authorizedbycertificate=false 2025-05-29T15:22:19.212513Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:264: [Dirty] Register new active node #1024.v2 host1:1001 2025-05-29T15:22:19.212519Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:552: [Dirty] Update current epoch version from 1 to 2 2025-05-29T15:22:19.212522Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:1356: [DB] Update epoch version in database version=2 2025-05-29T15:22:19.223306Z node 1 :NODE_BROKER DEBUG: node_broker__register_node.cpp:186: TTxRegisterNode Complete 2025-05-29T15:22:19.223325Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:264: [Committed] Register new active node #1024.v2 host1:1001 2025-05-29T15:22:19.223332Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:552: [Committed] Update current epoch version from 1 to 2 2025-05-29T15:22:19.223336Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:630: Add node #1024.v2 host1:1001 to epoch cache 2025-05-29T15:22:19.223353Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:659: Add node #1024.v2 to update nodes log 2025-05-29T15:22:19.223384Z node 1 :NODE_BROKER TRACE: node_broker__register_node.cpp:58: TTxRegisterNode reply with: Status { Code: OK } Node { NodeId: 1024 Host: "host1" Port: 1001 ResolveHost: "host1.yandex.net" Address: "1.2.3.4" Location { DataCenter: "1" Module: "2" Rack: "3" Unit: "4" } Expire: 7200025000 Name: "slot-0" } 2025-05-29T15:22:19.223482Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 269877761, Sender [1:646:2223], Recipient [1:562:2184]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-05-29T15:22:19.223504Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 272039950, Sender [1:630:2214], Recipient [1:562:2184]: NKikimr::NNodeBroker::TEvNodeBroker::TEvSubscribeNodesRequest { CachedVersion: 0 SeqNo: 1 } 2025-05-29T15:22:19.223508Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:254: StateWork, processing event TEvNodeBroker::TEvSubscribeNodesRequest 2025-05-29T15:22:19.223513Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:747: New subscriber [1:630:2214], seqNo: 1, version: 0, server pipe id: [1:646:2223] 2025-05-29T15:22:19.223519Z node 1 :NODE_BROKER TRACE: node_broker.cpp:730: Send TEvUpdateNodes v0 -> v2 to [1:630:2214] 2025-05-29T15:22:19.223531Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 269877761, Sender [1:647:2224], Recipient [1:562:2184]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-05-29T15:22:19.223540Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 272039936, Sender [1:630:2214], Recipient [1:562:2184]: NKikimr::NNodeBroker::TEvNodeBroker::TEvListNodes { } 2025-05-29T15:22:19.223543Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:246: StateWork, processing event TEvNodeBroker::TEvListNodes 2025-05-29T15:22:19.223553Z node 1 :NODE_BROKER TRACE: node_broker.cpp:423: Send TEvNodesInfo for epoch #1.2 1970-01-01T00:00:00.025000Z - 1970-01-01T01:00:00.025000Z - 1970-01-01T02:00:00.025000Z 2025-05-29T15:22:19.223612Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 269877761, Sender [1:649:2226], Recipient [1:562:2184]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-05-29T15:22:19.223625Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 272039938, Sender [1:630:2214], Recipient [1:562:2184]: NKikimr::NNodeBroker::TEvNodeBroker::TEvRegistrationRequest { Host: "host2" Port: 1001 ResolveHost: "host2.yandex.net" Address: "1.2.3.4" Location { DataCenter: "1" Module: "2" Rack: "3" Unit: "4" } FixedNodeId: false Path: "dc-1" } 2025-05-29T15:22:19.223628Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:248: StateWork, processing event TEvNodeBroker::TEvRegistrationRequest 2025-05-29T15:22:19.223635Z node 1 :NODE_BROKER TRACE: node_broker.cpp:1487: Handle TEvNodeBroker::TEvRegistrationRequest: request# Host: "host2" Port: 1001 ResolveHost: "host2.yandex.net" Address: "1.2.3.4" Location { DataCenter: "1" Module: "2" Rack: "3" Unit: "4" } FixedNodeId: false Path: "dc-1" 2025-05-29T15:22:19.223672Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2702: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [1:23:2070], request# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-05-29T15:22:19.223693Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:1842: FillEntry for TNavigate: self# [1:23:2070], cacheItem# { Subscriber: { Subscriber: [1:634:2217] DomainOwnerId: 72057594046678944 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusSuccess Kind: 2 TableKind: 0 Created: 1 CreateStep: 0 PathId: [OwnerId: 72057594046678944, LocalPathId: 1] DomainId: [OwnerId: 72057594046678944, LocalPathId: 1] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-05-29T15:22:19.223736Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:265: Send result: self# [1:651:2227], recipient# [1:650:2184], result# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [72057594046678944:1:0] RequestType: ByPath Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046678944, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046678944, LocalPathId: 1] Params { Version: 0 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } }] } 2025-05-29T15:22:19.223747Z node 1 :NODE_BROKER TRACE: node_broker.cpp:1532: Handle TEvTxProxySchemeCache::TEvNavigateKeySetResult: response# { Path: dc-1 TableId: [72057594046678944:1:0] RequestType: ByPath Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046678944, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046678944, LocalPathId: 1] Params { Version: 0 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } } 2025-05-29T15:22:19.223755Z node 1 :NODE_BROKER TRACE: node_broker.cpp:1558: Finished resolving tenant: request# Host: "host2" Port: 1001 ResolveHost: "host2.yandex.net" Address: "1.2.3.4" Location { DataCenter: "1" Module: "2" Rack: "3" Unit: "4" } FixedNodeId: false Path: "dc-1": scope id# <72057594046678944:1>: serviced subdomain# 72057594046678944:1 2025-05-29T15:22:19.223779Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 2146435073, Sender [1:650:2184], Recipient [1:562:2184]: NKikimr::NNodeBroker::TNodeBroker::TEvPrivate::TEvResolvedRegistrationRequest 2025-05-29T15:22:19.223782Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:257: StateWork, processing event TEvPrivate::TEvResolvedRegistrationRequest 2025-05-29T15:22:19.223791Z node 1 :NODE_BROKER DEBUG: node_broker__register_node.cpp:77: TTxRegisterNode Execute 2025-05-29T15:22:19.223794Z node 1 :NODE_BROKER DEBUG: node_broker__register_node.cpp:81: Registration request from host2:1001 (not fixed) tenant: dc-1 2025-05-29T15:22:19.223811Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:856: [DB] Adding node #1025.v3 host2:1001 to database state=Active resolvehost=host2.yandex.net address=1.2.3.4 dc=1 location=DC=1/M=2/R=3/U=4/ lease=1 expire=Thu, 01 Jan 1970 02:00:00 UTC servicedsubdomain=72057594046678944:1 slotindex=1 authorizedbycertificate=false 2025-05-29T15:22:19.223843Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:264: [Dirty] Register new active node #1025.v3 host2:1001 2025-05-29T15:22:19.223847Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:552: [Dirty] Update current epoch version from 2 to 3 2025-05-29T15:22:19.223850Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:1356: [DB] Update epoch version in database version=3 2025-05-29T15:22:19.234648Z node 1 :NODE_BROKER DEBUG: node_broker__register_node.cpp:186: TTxRegisterNode Complete 2025-05-29T15:22:19.234671Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:264: [Committed] Register new active node #1025.v3 host2:1001 2025-05-29T15:22:19.234681Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:552: [Committed] Update current epoch version from 2 to 3 2025-05-29T15:22:19.234685Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:630: Add node #1025.v3 host2:1001 to epoch cache 2025-05-29T15:22:19.234706Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:659: Add node #1025.v3 to update nodes log 2025-05-29T15:22:19.234763Z node 1 :NODE_BROKER TRACE: node_broker__register_node.cpp:58: TTxRegisterNode reply with: Status { Code: OK } Node { NodeId: 1025 Host: "host2" Port: 1001 ResolveHost: "host2.yandex.net" Address: "1.2.3.4" Location { DataCenter: "1" Module: "2" Rack: "3" Unit: "4" } Expire: 7200025000 Name: "slot-1" } 2025-05-29T15:22:19.234874Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 269877761, Sender [1:655:2231], Recipient [1:562:2184]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-05-29T15:22:19.234886Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 272039936, Sender [1:630:2214], Recipient [1:562:2184]: NKikimr::NNodeBroker::TEvNodeBroker::TEvListNodes { } 2025-05-29T15:22:19.234891Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:246: StateWork, processing event TEvNodeBroker::TEvListNodes 2025-05-29T15:22:19.234899Z node 1 :NODE_BROKER TRACE: node_broker.cpp:423: Send TEvNodesInfo for epoch #1.3 1970-01-01T00:00:00.025000Z - 1970-01-01T01:00:00.025000Z - 1970-01-01T02:00:00.025000Z 2025-05-29T15:22:19.234936Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 272039952, Sender [1:630:2214], Recipient [1:562:2184]: NKikimr::NNodeBroker::TEvNodeBroker::TEvSyncNodesRequest { SeqNo: 1 } 2025-05-29T15:22:19.234940Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:255: StateWork, processing event TEvNodeBroker::TEvSyncNodesRequest 2025-05-29T15:22:19.234946Z node 1 :NODE_BROKER TRACE: node_broker.cpp:730: Send TEvUpdateNodes v2 -> v3 to [1:630:2214] 2025-05-29T15:22:19.234994Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 269877761, Sender [1:657:2233], Recipient [1:562:2184]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-05-29T15:22:19.235005Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 272039936, Sender [1:630:2214], Recipient [1:562:2184]: NKikimr::NNodeBroker::TEvNodeBroker::TEvListNodes { } 2025-05-29T15:22:19.235008Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:246: StateWork, processing event TEvNodeBroker::TEvListNodes 2025-05-29T15:22:19.235012Z node 1 :NODE_BROKER TRACE: node_broker.cpp:423: Send TEvNodesInfo for epoch #1.3 1970-01-01T00:00:00.025000Z - 1970-01-01T01:00:00.025000Z - 1970-01-01T02:00:00.025000Z 2025-05-29T15:22:19.235033Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 272039952, Sender [1:630:2214], Recipient [1:562:2184]: NKikimr::NNodeBroker::TEvNodeBroker::TEvSyncNodesRequest { SeqNo: 1 } 2025-05-29T15:22:19.235035Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:255: StateWork, processing event TEvNodeBroker::TEvSyncNodesRequest ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/mind/ut/unittest >> TNodeBrokerTest::NodesMigrationReuseID [GOOD] Test command err: 2025-05-29T15:22:18.845867Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 2 Deadline: 18446744073709.551615s } 2025-05-29T15:22:18.845916Z node 3 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 3 Deadline: 18446744073709.551615s } 2025-05-29T15:22:18.845950Z node 4 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 4 Deadline: 18446744073709.551615s } 2025-05-29T15:22:18.845988Z node 5 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 5 Deadline: 18446744073709.551615s } 2025-05-29T15:22:18.846018Z node 6 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 6 Deadline: 18446744073709.551615s } 2025-05-29T15:22:18.846041Z node 7 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 7 Deadline: 18446744073709.551615s } 2025-05-29T15:22:18.853532Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:18.853666Z node 3 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:18.853736Z node 4 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:18.853787Z node 5 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:18.853839Z node 6 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:18.853883Z node 7 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:18.853958Z node 8 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 8 Deadline: 18446744073709.551615s } 2025-05-29T15:22:18.853989Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-05-29T15:22:18.854276Z node 3 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:18.854309Z node 4 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:18.854333Z node 5 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:18.854354Z node 6 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:18.854380Z node 7 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:18.854405Z node 8 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:18.854451Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:18.858520Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:18.858576Z node 8 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:18.858609Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:18.859451Z node 5 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:18.859483Z node 6 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:18.859507Z node 7 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:18.859578Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:18.859594Z node 3 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:18.859611Z node 4 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:18.859629Z node 8 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:18.859645Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:18.859811Z node 3 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-05-29T15:22:18.859831Z node 4 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-05-29T15:22:18.859846Z node 5 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-05-29T15:22:18.859870Z node 6 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-05-29T15:22:18.859890Z node 7 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-05-29T15:22:18.859955Z node 8 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-05-29T15:22:18.860019Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-05-29T15:22:18.860053Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 2 Deadline: 18446744073709.551615s } 2025-05-29T15:22:18.860650Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 6 Deadline: 18446744073709.551615s } 2025-05-29T15:22:18.860670Z node 3 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 5 Deadline: 18446744073709.551615s } 2025-05-29T15:22:18.860677Z node 4 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 5 Deadline: 18446744073709.551615s } 2025-05-29T15:22:18.860682Z node 5 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 6 Deadline: 18446744073709.551615s } 2025-05-29T15:22:18.860688Z node 6 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 7 Deadline: 18446744073709.551615s } 2025-05-29T15:22:18.860695Z node 7 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 5 Deadline: 18446744073709.551615s } 2025-05-29T15:22:18.860703Z node 8 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 7 Deadline: 18446744073709.551615s } 2025-05-29T15:22:18.863638Z node 7 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 6 Deadline: 18446744073709.551615s } 2025-05-29T15:22:18.863709Z node 5 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 3 Deadline: 18446744073709.551615s } 2025-05-29T15:22:18.863740Z node 6 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 5 Deadline: 18446744073709.551615s } 2025-05-29T15:22:18.863757Z node 7 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 8 Deadline: 18446744073709.551615s } 2025-05-29T15:22:18.863774Z node 6 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 2 Deadline: 18446744073709.551615s } 2025-05-29T15:22:18.864294Z node 5 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 4 Deadline: 18446744073709.551615s } 2025-05-29T15:22:18.864321Z node 5 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 7 Deadline: 18446744073709.551615s } 2025-05-29T15:22:18.864499Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 6 Deadline: 18446744073709.551615s } 2025-05-29T15:22:18.864659Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 7 Deadline: 18446744073709.551615s } 2025-05-29T15:22:18.864858Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 3 Deadline: 18446744073709.551615s } 2025-05-29T15:22:18.864931Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 4 Deadline: 18446744073709.551615s } 2025-05-29T15:22:18.865023Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 5 Deadline: 18446744073709.551615s } 2025-05-29T15:22:18.865103Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 8 Deadline: 18446744073709.551615s } 2025-05-29T15:22:18.865498Z node 6 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 3 Deadline: 18446744073709.551615s } 2025-05-29T15:22:18.865577Z node 5 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 2 Deadline: 18446744073709.551615s } 2025-05-29T15:22:18.865616Z node 7 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 2 Deadline: 18446744073709.551615s } 2025-05-29T15:22:18.865760Z node 3 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 6 Deadline: 18446744073709.551615s } 2025-05-29T15:22:18.865821Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 5 Deadline: 18446744073709.551615s } 2025-05-29T15:22:18.865864Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 7 Deadline: 18446744073709.551615s } 2025-05-29T15:22:18.866492Z node 5 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 8 Deadline: 18446744073709.551615s } 2025-05-29T15:22:18.866628Z node 8 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 5 Deadline: 18446744073709.551615s } 2025-05-29T15:22:18.867342Z node 6 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 4 Deadline: 18446744073709.551615s } 2025-05-29T15:22:18.867633Z node 4 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 6 Deadline: 18446744073709.551615s } 2025-05-29T15:22:18.891306Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7309: Cannot subscribe to console configs 2025-05-29T15:22:18.891324Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded ... waiting for nameservers are connected 2025-05-29T15:22:18.895022Z node 1 :NODE_BROKER DEBUG: node_broker_impl.h:236: StateInit event type: 10060000 event: NKikimr::TEvTablet::TEvBoot 2025-05-29T15:22:18.895464Z node 1 :NODE_BROKER DEBUG: node_broker_impl.h:236: StateInit event type: 10060001 event: NKikimr::TEvTablet::TEvRestored 2025-05-29T15:22:18.895521Z node 1 :NODE_BROKER DEBUG: node_broker__init_scheme.cpp:20: TTxInitScheme Execute 2025-05-29T15:22:18.895725Z node 1 :NODE_BROKER DEBUG: node_broker_impl.h:236: StateInit event type: 1006000c event: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-05-29T15:22:18.896370Z node 1 :NODE_BROKER DEBUG: node_ ... Certificate: 0, SlotIndex: 0, ServicedSubDomain: 72057594046678944:1 } 2025-05-29T15:22:19.276802Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:1190: [DB] Loaded nodeV2 #1024.v2 { NodeId: 1024, State: Active, Version: 2, Host: host1, Port: 1001, ResolveHost: host1.yandex.net, Address: 1.2.3.4, Lease: 1, Expire: Thu, 01 Jan 1970 02:00:00 UTC, Location: DC=1/M=2/R=3/U=4/, AuthorizedByCertificate: 0, SlotIndex: 0, ServicedSubDomain: 72057594046678944:1 } 2025-05-29T15:22:19.276807Z node 1 :NODE_BROKER NOTICE: node_broker.cpp:1214: [DB] Migrating changed node #1024.v7 { NodeId: 1024, State: Active, Version: 7, Host: host2, Port: 1001, ResolveHost: host2.yandex.net, Address: 1.2.3.4, Lease: 1, Expire: Thu, 01 Jan 1970 05:00:00 UTC, Location: DC=1/M=2/R=3/U=4/, AuthorizedByCertificate: 0, SlotIndex: 0, ServicedSubDomain: 72057594046678944:1 } 2025-05-29T15:22:19.276812Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:552: [Dirty] Update current epoch version from 6 to 7 2025-05-29T15:22:19.276819Z node 1 :NODE_BROKER DEBUG: node_broker__load_state.cpp:27: TTxLoadState Complete 2025-05-29T15:22:19.276863Z node 1 :NODE_BROKER DEBUG: node_broker__migrate_state.cpp:84: TTxMigrateState Execute 2025-05-29T15:22:19.276868Z node 1 :NODE_BROKER DEBUG: node_broker__migrate_state.cpp:52: TTxMigrateState ProcessMigrationBatch UpdateNodes left 0, NewVersionUpdateNodes left 1 2025-05-29T15:22:19.276871Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:1311: [DB] Update epoch in database: #4.7 1970-01-01T03:00:00.025000Z - 1970-01-01T04:00:00.025000Z - 1970-01-01T05:00:00.025000Z 2025-05-29T15:22:19.276883Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:1330: [DB] Update approx epoch start in database: #4.6 2025-05-29T15:22:19.276890Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:856: [DB] Adding node #1024.v7 host2:1001 to database state=Active resolvehost=host2.yandex.net address=1.2.3.4 dc=1 location=DC=1/M=2/R=3/U=4/ lease=1 expire=Thu, 01 Jan 1970 05:00:00 UTC servicedsubdomain=72057594046678944:1 slotindex=0 authorizedbycertificate=false 2025-05-29T15:22:19.276935Z node 1 :NODE_BROKER DEBUG: node_broker__migrate_state.cpp:21: TTxMigrateState FinalizeMigration 2025-05-29T15:22:19.288323Z node 1 :NODE_BROKER DEBUG: node_broker__migrate_state.cpp:95: TTxMigrateState Complete 2025-05-29T15:22:19.288361Z node 1 :NODE_BROKER TRACE: node_broker.cpp:456: Scheduled epoch update at 1970-01-01T04:00:00.025000Z 2025-05-29T15:22:19.288369Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:562: Preparing nodes list cache for epoch #4.7 1970-01-01T03:00:00.025000Z - 1970-01-01T04:00:00.025000Z - 1970-01-01T05:00:00.025000Z, approximate epoch start #4.6 nodes=1 expired=0 2025-05-29T15:22:19.288409Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:603: Preparing update nodes log for epoch ##4.7 1970-01-01T03:00:00.025000Z - 1970-01-01T04:00:00.025000Z - 1970-01-01T05:00:00.025000Z nodes=1 expired=0 removed=0 2025-05-29T15:22:19.288414Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:659: Add node #1024.v7 to update nodes log 2025-05-29T15:22:19.288503Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 269877761, Sender [1:709:2259], Recipient [1:699:2253]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-05-29T15:22:19.288515Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 269877761, Sender [1:711:2261], Recipient [1:699:2253]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-05-29T15:22:19.288569Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 269877761, Sender [1:712:2262], Recipient [1:699:2253]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-05-29T15:22:19.288589Z node 5 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:657: Handle NKikimr::TEvTabletPipe::TEvClientConnected { TabletId: 72057594037936129 Status: OK ServerId: [1:709:2259] Leader: 1 Dead: 0 Generation: 3 VersionInfo:  } 2025-05-29T15:22:19.288603Z node 6 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:657: Handle NKikimr::TEvTabletPipe::TEvClientConnected { TabletId: 72057594037936129 Status: OK ServerId: [1:711:2261] Leader: 1 Dead: 0 Generation: 3 VersionInfo:  } 2025-05-29T15:22:19.288611Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 269877761, Sender [1:714:2264], Recipient [1:699:2253]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-05-29T15:22:19.288615Z node 7 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:657: Handle NKikimr::TEvTabletPipe::TEvClientConnected { TabletId: 72057594037936129 Status: OK ServerId: [1:712:2262] Leader: 1 Dead: 0 Generation: 3 VersionInfo:  } 2025-05-29T15:22:19.288637Z node 8 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:657: Handle NKikimr::TEvTabletPipe::TEvClientConnected { TabletId: 72057594037936129 Status: OK ServerId: [1:714:2264] Leader: 1 Dead: 0 Generation: 3 VersionInfo:  } 2025-05-29T15:22:19.288692Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 272039936, Sender [5:137:2072], Recipient [1:709:2259] 2025-05-29T15:22:19.288697Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:246: StateWork, processing event TEvNodeBroker::TEvListNodes 2025-05-29T15:22:19.288704Z node 1 :NODE_BROKER TRACE: node_broker.cpp:423: Send TEvNodesInfo for epoch #4.7 1970-01-01T03:00:00.025000Z - 1970-01-01T04:00:00.025000Z - 1970-01-01T05:00:00.025000Z 2025-05-29T15:22:19.288712Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 272039936, Sender [6:165:2072], Recipient [1:711:2261] 2025-05-29T15:22:19.288714Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:246: StateWork, processing event TEvNodeBroker::TEvListNodes 2025-05-29T15:22:19.288718Z node 1 :NODE_BROKER TRACE: node_broker.cpp:423: Send TEvNodesInfo for epoch #4.7 1970-01-01T03:00:00.025000Z - 1970-01-01T04:00:00.025000Z - 1970-01-01T05:00:00.025000Z 2025-05-29T15:22:19.288746Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 272039936, Sender [7:193:2072], Recipient [1:712:2262] 2025-05-29T15:22:19.288749Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:246: StateWork, processing event TEvNodeBroker::TEvListNodes 2025-05-29T15:22:19.288752Z node 1 :NODE_BROKER TRACE: node_broker.cpp:423: Send TEvNodesInfo for epoch #4.7 1970-01-01T03:00:00.025000Z - 1970-01-01T04:00:00.025000Z - 1970-01-01T05:00:00.025000Z 2025-05-29T15:22:19.288770Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 272039936, Sender [8:221:2072], Recipient [1:714:2264] 2025-05-29T15:22:19.288772Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:246: StateWork, processing event TEvNodeBroker::TEvListNodes 2025-05-29T15:22:19.288777Z node 1 :NODE_BROKER TRACE: node_broker.cpp:423: Send TEvNodesInfo for epoch #4.7 1970-01-01T03:00:00.025000Z - 1970-01-01T04:00:00.025000Z - 1970-01-01T05:00:00.025000Z 2025-05-29T15:22:19.288893Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 269877761, Sender [1:741:2286], Recipient [1:699:2253]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-05-29T15:22:19.288915Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 272039936, Sender [1:634:2214], Recipient [1:699:2253]: NKikimr::NNodeBroker::TEvNodeBroker::TEvListNodes { } 2025-05-29T15:22:19.288917Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:246: StateWork, processing event TEvNodeBroker::TEvListNodes 2025-05-29T15:22:19.288921Z node 1 :NODE_BROKER TRACE: node_broker.cpp:423: Send TEvNodesInfo for epoch #4.7 1970-01-01T03:00:00.025000Z - 1970-01-01T04:00:00.025000Z - 1970-01-01T05:00:00.025000Z 2025-05-29T15:22:19.288964Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 269877761, Sender [1:743:2288], Recipient [1:699:2253]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-05-29T15:22:19.288974Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 272039936, Sender [1:634:2214], Recipient [1:699:2253]: NKikimr::NNodeBroker::TEvNodeBroker::TEvListNodes { } 2025-05-29T15:22:19.288976Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:246: StateWork, processing event TEvNodeBroker::TEvListNodes 2025-05-29T15:22:19.288980Z node 1 :NODE_BROKER TRACE: node_broker.cpp:423: Send TEvNodesInfo for epoch #4.7 1970-01-01T03:00:00.025000Z - 1970-01-01T04:00:00.025000Z - 1970-01-01T05:00:00.025000Z 2025-05-29T15:22:19.289017Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 269877761, Sender [1:745:2290], Recipient [1:699:2253]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-05-29T15:22:19.289027Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 272039936, Sender [1:634:2214], Recipient [1:699:2253]: NKikimr::NNodeBroker::TEvNodeBroker::TEvListNodes { } 2025-05-29T15:22:19.289030Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:246: StateWork, processing event TEvNodeBroker::TEvListNodes 2025-05-29T15:22:19.289033Z node 1 :NODE_BROKER TRACE: node_broker.cpp:423: Send TEvNodesInfo for epoch #4.7 1970-01-01T03:00:00.025000Z - 1970-01-01T04:00:00.025000Z - 1970-01-01T05:00:00.025000Z 2025-05-29T15:22:19.289075Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 269877761, Sender [1:747:2292], Recipient [1:699:2253]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-05-29T15:22:19.289090Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 272039936, Sender [1:634:2214], Recipient [1:699:2253]: NKikimr::NNodeBroker::TEvNodeBroker::TEvListNodes { CachedVersion: 6 } 2025-05-29T15:22:19.289093Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:246: StateWork, processing event TEvNodeBroker::TEvListNodes 2025-05-29T15:22:19.289096Z node 1 :NODE_BROKER TRACE: node_broker.cpp:423: Send TEvNodesInfo for epoch #4.7 1970-01-01T03:00:00.025000Z - 1970-01-01T04:00:00.025000Z - 1970-01-01T05:00:00.025000Z 2025-05-29T15:22:19.289135Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 269877761, Sender [1:749:2294], Recipient [1:699:2253]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-05-29T15:22:19.289146Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 272039950, Sender [1:634:2214], Recipient [1:699:2253]: NKikimr::NNodeBroker::TEvNodeBroker::TEvSubscribeNodesRequest { CachedVersion: 6 SeqNo: 2 } 2025-05-29T15:22:19.289150Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:254: StateWork, processing event TEvNodeBroker::TEvSubscribeNodesRequest 2025-05-29T15:22:19.289154Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:747: New subscriber [1:634:2214], seqNo: 2, version: 6, server pipe id: [1:749:2294] 2025-05-29T15:22:19.289160Z node 1 :NODE_BROKER TRACE: node_broker.cpp:730: Send TEvUpdateNodes v6 -> v7 to [1:634:2214] 2025-05-29T15:22:19.289200Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 269877764, Sender [1:749:2294], Recipient [1:699:2253]: NKikimr::TEvTabletPipe::TEvServerDisconnected 2025-05-29T15:22:19.289204Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:768: Unsubscribed [1:634:2214], seqNo: 2, server pipe id: [1:749:2294] 2025-05-29T15:22:19.289221Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 269877761, Sender [1:751:2296], Recipient [1:699:2253]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-05-29T15:22:19.289233Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 272039937, Sender [1:634:2214], Recipient [1:699:2253]: NKikimr::NNodeBroker::TEvNodeBroker::TEvResolveNode { NodeId: 1024 } 2025-05-29T15:22:19.289237Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:247: StateWork, processing event TEvNodeBroker::TEvResolveNode 2025-05-29T15:22:19.289254Z node 1 :NODE_BROKER TRACE: node_broker.cpp:1478: Send TEvResolvedNode: NKikimr::NNodeBroker::TEvNodeBroker::TEvResolvedNode { Status { Code: OK } Node { NodeId: 1024 Host: "host2" Port: 1001 ResolveHost: "host2.yandex.net" Address: "1.2.3.4" Location { DataCenter: "1" Module: "2" Rack: "3" Unit: "4" } Expire: 18000025000 Name: "slot-0" } } ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/mind/ut/unittest >> TNodeBrokerTest::NodesMigrationRemoveActive [GOOD] Test command err: 2025-05-29T15:22:18.850795Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 2 Deadline: 18446744073709.551615s } 2025-05-29T15:22:18.850841Z node 3 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 3 Deadline: 18446744073709.551615s } 2025-05-29T15:22:18.850868Z node 4 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 4 Deadline: 18446744073709.551615s } 2025-05-29T15:22:18.850906Z node 5 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 5 Deadline: 18446744073709.551615s } 2025-05-29T15:22:18.850936Z node 6 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 6 Deadline: 18446744073709.551615s } 2025-05-29T15:22:18.850958Z node 7 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 7 Deadline: 18446744073709.551615s } 2025-05-29T15:22:18.856893Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:18.857005Z node 3 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:18.857052Z node 4 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:18.857087Z node 5 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:18.857132Z node 6 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:18.857168Z node 7 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:18.857228Z node 8 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 8 Deadline: 18446744073709.551615s } 2025-05-29T15:22:18.857255Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-05-29T15:22:18.857464Z node 3 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:18.857486Z node 4 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:18.857500Z node 5 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:18.857512Z node 6 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:18.857527Z node 7 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:18.857542Z node 8 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:18.857589Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:18.861560Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:18.861615Z node 8 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:18.861643Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:18.862451Z node 5 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:18.862482Z node 6 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:18.862507Z node 7 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:18.862581Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:18.862597Z node 3 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:18.862615Z node 4 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:18.862640Z node 6 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-05-29T15:22:18.862676Z node 7 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-05-29T15:22:18.862694Z node 8 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:18.862710Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:18.862847Z node 3 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-05-29T15:22:18.862870Z node 4 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-05-29T15:22:18.862888Z node 5 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-05-29T15:22:18.862956Z node 8 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-05-29T15:22:18.863029Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-05-29T15:22:18.863066Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 5 Deadline: 18446744073709.551615s } 2025-05-29T15:22:18.863600Z node 4 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 3 Deadline: 18446744073709.551615s } 2025-05-29T15:22:18.863610Z node 5 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 6 Deadline: 18446744073709.551615s } 2025-05-29T15:22:18.863616Z node 6 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 7 Deadline: 18446744073709.551615s } 2025-05-29T15:22:18.863623Z node 7 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 3 Deadline: 18446744073709.551615s } 2025-05-29T15:22:18.863629Z node 8 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 4 Deadline: 18446744073709.551615s } 2025-05-29T15:22:18.866778Z node 6 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 5 Deadline: 18446744073709.551615s } 2025-05-29T15:22:18.866802Z node 7 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 6 Deadline: 18446744073709.551615s } 2025-05-29T15:22:18.866852Z node 3 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 4 Deadline: 18446744073709.551615s } 2025-05-29T15:22:18.866880Z node 4 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 8 Deadline: 18446744073709.551615s } 2025-05-29T15:22:18.867477Z node 3 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 7 Deadline: 18446744073709.551615s } 2025-05-29T15:22:18.867964Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 6 Deadline: 18446744073709.551615s } 2025-05-29T15:22:18.868111Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 7 Deadline: 18446744073709.551615s } 2025-05-29T15:22:18.868139Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 2 Deadline: 18446744073709.551615s } 2025-05-29T15:22:18.868252Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 3 Deadline: 18446744073709.551615s } 2025-05-29T15:22:18.868298Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 4 Deadline: 18446744073709.551615s } 2025-05-29T15:22:18.868367Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 8 Deadline: 18446744073709.551615s } 2025-05-29T15:22:18.869327Z node 7 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 4 Deadline: 18446744073709.551615s } 2025-05-29T15:22:18.869363Z node 6 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 4 Deadline: 18446744073709.551615s } 2025-05-29T15:22:18.869559Z node 4 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 7 Deadline: 18446744073709.551615s } 2025-05-29T15:22:18.869660Z node 5 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 8 Deadline: 18446744073709.551615s } 2025-05-29T15:22:18.869696Z node 4 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 6 Deadline: 18446744073709.551615s } 2025-05-29T15:22:18.869913Z node 8 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 5 Deadline: 18446744073709.551615s } 2025-05-29T15:22:18.891826Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7309: Cannot subscribe to console configs 2025-05-29T15:22:18.891848Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded ... waiting for nameservers are connected 2025-05-29T15:22:18.895288Z node 1 :NODE_BROKER DEBUG: node_broker_impl.h:236: StateInit event type: 10060000 event: NKikimr::TEvTablet::TEvBoot 2025-05-29T15:22:18.895594Z node 1 :NODE_BROKER DEBUG: node_broker_impl.h:236: StateInit event type: 10060001 event: NKikimr::TEvTablet::TEvRestored 2025-05-29T15:22:18.895636Z node 1 :NODE_BROKER DEBUG: node_broker__init_scheme.cpp:20: TTxInitScheme Execute 2025-05-29T15:22:18.895793Z node 1 :NODE_BROKER DEBUG: node_broker_impl.h:236: StateInit event type: 1006000c event: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-05-29T15:22:18.896363Z node 1 :NODE_BROKER DEBUG: node_broker__init_scheme.cpp:29: TTxInitScheme Complete 2025-05-29T15:22:18.896382Z node 1 :NODE_BROKER DEBUG: node_broker__load_state.cpp:19: TTxLoadState Execute 2025-05-29T15:22:18.896444Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:961: [DB] Using default config. 2025-05-29T15:22:18.896454Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:998: [DB] Starting the first epoch: #1.1 1970-01-01T00:00:00.025000Z - 1970-01-01T01:00:00.025000Z - 1970-01-01T02:00:00.025000Z 2025-05-29T15:22:18.896457Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:1024: [DB] Loaded the first approximate epoch start: #1.1 2025-05-29T15:22:18.896468Z node 1 :NODE_BROKER DEBUG: node_broker__load_state.cpp:27: TTxLoadState Complete 2025-05-29T15:22:18.896483Z node 1 :NODE_BROKER DEBUG: node_broker__migrate_state.cpp:84: TTxMigrateState Execute 2025-05-29T15:22:18.896487Z node 1 :NODE_BROKER DEBUG: node_broker__migrate_state.cpp:52: TTxMigrateState ProcessMigrationBatch UpdateNodes left 0, NewVersionUpdateNodes left 0 2025-05-29T15:22:18.896490Z node 1 :NODE_BROKER DEBUG: node_broker__migrate_state.cpp:21: TTxMigrateState FinalizeMigration 2025-05-29T15:22:18.896494Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:1311: [DB] Update epoch in database: #1.1 1970-01-01T00:00:00.025000Z - 1970-01-01T01:00:00.025000Z - 1970-01-01T02:00:00.025000Z 2025-05-29T15:22:18.896506Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:1330 ... tateInit event type: 1006000c event: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-05-29T15:22:19.195505Z node 1 :NODE_BROKER DEBUG: node_broker__init_scheme.cpp:20: TTxInitScheme Execute 2025-05-29T15:22:19.195562Z node 1 :NODE_BROKER DEBUG: node_broker__init_scheme.cpp:29: TTxInitScheme Complete 2025-05-29T15:22:19.195619Z node 1 :NODE_BROKER DEBUG: node_broker__load_state.cpp:19: TTxLoadState Execute 2025-05-29T15:22:19.195707Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:961: [DB] Using default config. 2025-05-29T15:22:19.195719Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:988: [DB] Loaded current epoch: #4.5 1970-01-01T03:00:00.025000Z - 1970-01-01T04:00:00.025000Z - 1970-01-01T05:00:00.025000Z 2025-05-29T15:22:19.195724Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:1012: [DB] Approximate epoch start is changed: #4.5 2025-05-29T15:22:19.195729Z node 1 :NODE_BROKER NOTICE: node_broker.cpp:1034: [DB] Loaded main nodes table: Nodes 2025-05-29T15:22:19.195775Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:1190: [DB] Loaded nodeV2 #1024.v2 { NodeId: 1024, State: Active, Version: 2, Host: host1, Port: 1001, ResolveHost: host1.yandex.net, Address: 1.2.3.4, Lease: 1, Expire: Thu, 01 Jan 1970 02:00:00 UTC, Location: DC=1/M=2/R=3/U=4/, AuthorizedByCertificate: 0, SlotIndex: 0, ServicedSubDomain: 72057594046678944:1 } 2025-05-29T15:22:19.195781Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:300: [Dirty] Added removed node #1024.v5 2025-05-29T15:22:19.195787Z node 1 :NODE_BROKER NOTICE: node_broker.cpp:1230: [DB] Migrating removed node #1024.v5 2025-05-29T15:22:19.195800Z node 1 :NODE_BROKER DEBUG: node_broker__load_state.cpp:27: TTxLoadState Complete 2025-05-29T15:22:19.195832Z node 1 :NODE_BROKER DEBUG: node_broker__migrate_state.cpp:84: TTxMigrateState Execute 2025-05-29T15:22:19.195838Z node 1 :NODE_BROKER DEBUG: node_broker__migrate_state.cpp:52: TTxMigrateState ProcessMigrationBatch UpdateNodes left 1, NewVersionUpdateNodes left 0 2025-05-29T15:22:19.195841Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:830: [DB] Removing node #1024.v5 from database 2025-05-29T15:22:19.195855Z node 1 :NODE_BROKER DEBUG: node_broker__migrate_state.cpp:21: TTxMigrateState FinalizeMigration 2025-05-29T15:22:19.195858Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:1330: [DB] Update approx epoch start in database: #4.5 2025-05-29T15:22:19.217363Z node 1 :NODE_BROKER DEBUG: node_broker__migrate_state.cpp:95: TTxMigrateState Complete 2025-05-29T15:22:19.217394Z node 1 :NODE_BROKER TRACE: node_broker.cpp:456: Scheduled epoch update at 1970-01-01T04:00:00.025000Z 2025-05-29T15:22:19.217402Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:562: Preparing nodes list cache for epoch #4.5 1970-01-01T03:00:00.025000Z - 1970-01-01T04:00:00.025000Z - 1970-01-01T05:00:00.025000Z, approximate epoch start #4.5 nodes=0 expired=0 2025-05-29T15:22:19.217408Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:603: Preparing update nodes log for epoch ##4.5 1970-01-01T03:00:00.025000Z - 1970-01-01T04:00:00.025000Z - 1970-01-01T05:00:00.025000Z nodes=0 expired=0 removed=1 2025-05-29T15:22:19.217412Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:659: Add node #1024.v5 to update nodes log 2025-05-29T15:22:19.217488Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 269877761, Sender [1:688:2247], Recipient [1:679:2241]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-05-29T15:22:19.217504Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 269877761, Sender [1:689:2248], Recipient [1:679:2241]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-05-29T15:22:19.217575Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 269877761, Sender [1:691:2250], Recipient [1:679:2241]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-05-29T15:22:19.217593Z node 3 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:657: Handle NKikimr::TEvTabletPipe::TEvClientConnected { TabletId: 72057594037936129 Status: OK ServerId: [1:688:2247] Leader: 1 Dead: 0 Generation: 3 VersionInfo:  } 2025-05-29T15:22:19.217606Z node 4 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:657: Handle NKikimr::TEvTabletPipe::TEvClientConnected { TabletId: 72057594037936129 Status: OK ServerId: [1:689:2248] Leader: 1 Dead: 0 Generation: 3 VersionInfo:  } 2025-05-29T15:22:19.217614Z node 5 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:657: Handle NKikimr::TEvTabletPipe::TEvClientConnected { TabletId: 72057594037936129 Status: OK ServerId: [1:691:2250] Leader: 1 Dead: 0 Generation: 3 VersionInfo:  } 2025-05-29T15:22:19.217632Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 269877761, Sender [1:692:2251], Recipient [1:679:2241]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-05-29T15:22:19.217637Z node 7 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:657: Handle NKikimr::TEvTabletPipe::TEvClientConnected { TabletId: 72057594037936129 Status: OK ServerId: [1:692:2251] Leader: 1 Dead: 0 Generation: 3 VersionInfo:  } 2025-05-29T15:22:19.217687Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 272039936, Sender [5:137:2072], Recipient [1:691:2250] 2025-05-29T15:22:19.217691Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:246: StateWork, processing event TEvNodeBroker::TEvListNodes 2025-05-29T15:22:19.217699Z node 1 :NODE_BROKER TRACE: node_broker.cpp:423: Send TEvNodesInfo for epoch #4.5 1970-01-01T03:00:00.025000Z - 1970-01-01T04:00:00.025000Z - 1970-01-01T05:00:00.025000Z 2025-05-29T15:22:19.217725Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 272039936, Sender [3:81:2072], Recipient [1:688:2247] 2025-05-29T15:22:19.217728Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:246: StateWork, processing event TEvNodeBroker::TEvListNodes 2025-05-29T15:22:19.217731Z node 1 :NODE_BROKER TRACE: node_broker.cpp:423: Send TEvNodesInfo for epoch #4.5 1970-01-01T03:00:00.025000Z - 1970-01-01T04:00:00.025000Z - 1970-01-01T05:00:00.025000Z 2025-05-29T15:22:19.217746Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 272039936, Sender [4:109:2072], Recipient [1:689:2248] 2025-05-29T15:22:19.217748Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:246: StateWork, processing event TEvNodeBroker::TEvListNodes 2025-05-29T15:22:19.217752Z node 1 :NODE_BROKER TRACE: node_broker.cpp:423: Send TEvNodesInfo for epoch #4.5 1970-01-01T03:00:00.025000Z - 1970-01-01T04:00:00.025000Z - 1970-01-01T05:00:00.025000Z 2025-05-29T15:22:19.217763Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 272039936, Sender [7:193:2072], Recipient [1:692:2251] 2025-05-29T15:22:19.217765Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:246: StateWork, processing event TEvNodeBroker::TEvListNodes 2025-05-29T15:22:19.217768Z node 1 :NODE_BROKER TRACE: node_broker.cpp:423: Send TEvNodesInfo for epoch #4.5 1970-01-01T03:00:00.025000Z - 1970-01-01T04:00:00.025000Z - 1970-01-01T05:00:00.025000Z 2025-05-29T15:22:19.217882Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 269877761, Sender [1:721:2274], Recipient [1:679:2241]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-05-29T15:22:19.217901Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 272039936, Sender [1:626:2214], Recipient [1:679:2241]: NKikimr::NNodeBroker::TEvNodeBroker::TEvListNodes { } 2025-05-29T15:22:19.217904Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:246: StateWork, processing event TEvNodeBroker::TEvListNodes 2025-05-29T15:22:19.217907Z node 1 :NODE_BROKER TRACE: node_broker.cpp:423: Send TEvNodesInfo for epoch #4.5 1970-01-01T03:00:00.025000Z - 1970-01-01T04:00:00.025000Z - 1970-01-01T05:00:00.025000Z 2025-05-29T15:22:19.217949Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 269877761, Sender [1:723:2276], Recipient [1:679:2241]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-05-29T15:22:19.217960Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 272039936, Sender [1:626:2214], Recipient [1:679:2241]: NKikimr::NNodeBroker::TEvNodeBroker::TEvListNodes { } 2025-05-29T15:22:19.217962Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:246: StateWork, processing event TEvNodeBroker::TEvListNodes 2025-05-29T15:22:19.217965Z node 1 :NODE_BROKER TRACE: node_broker.cpp:423: Send TEvNodesInfo for epoch #4.5 1970-01-01T03:00:00.025000Z - 1970-01-01T04:00:00.025000Z - 1970-01-01T05:00:00.025000Z 2025-05-29T15:22:19.218006Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 269877761, Sender [1:725:2278], Recipient [1:679:2241]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-05-29T15:22:19.218025Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 272039936, Sender [1:626:2214], Recipient [1:679:2241]: NKikimr::NNodeBroker::TEvNodeBroker::TEvListNodes { } 2025-05-29T15:22:19.218030Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:246: StateWork, processing event TEvNodeBroker::TEvListNodes 2025-05-29T15:22:19.218034Z node 1 :NODE_BROKER TRACE: node_broker.cpp:423: Send TEvNodesInfo for epoch #4.5 1970-01-01T03:00:00.025000Z - 1970-01-01T04:00:00.025000Z - 1970-01-01T05:00:00.025000Z 2025-05-29T15:22:19.218072Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 269877761, Sender [1:727:2280], Recipient [1:679:2241]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-05-29T15:22:19.218084Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 272039936, Sender [1:626:2214], Recipient [1:679:2241]: NKikimr::NNodeBroker::TEvNodeBroker::TEvListNodes { CachedVersion: 5 } 2025-05-29T15:22:19.218087Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:246: StateWork, processing event TEvNodeBroker::TEvListNodes 2025-05-29T15:22:19.218090Z node 1 :NODE_BROKER TRACE: node_broker.cpp:423: Send TEvNodesInfo for epoch #4.5 1970-01-01T03:00:00.025000Z - 1970-01-01T04:00:00.025000Z - 1970-01-01T05:00:00.025000Z 2025-05-29T15:22:19.218133Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 269877761, Sender [1:729:2282], Recipient [1:679:2241]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-05-29T15:22:19.218147Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 272039950, Sender [1:626:2214], Recipient [1:679:2241]: NKikimr::NNodeBroker::TEvNodeBroker::TEvSubscribeNodesRequest { CachedVersion: 5 SeqNo: 2 } 2025-05-29T15:22:19.218151Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:254: StateWork, processing event TEvNodeBroker::TEvSubscribeNodesRequest 2025-05-29T15:22:19.218156Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:747: New subscriber [1:626:2214], seqNo: 2, version: 5, server pipe id: [1:729:2282] 2025-05-29T15:22:19.218160Z node 1 :NODE_BROKER TRACE: node_broker.cpp:730: Send TEvUpdateNodes v5 -> v5 to [1:626:2214] 2025-05-29T15:22:19.218194Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 269877764, Sender [1:729:2282], Recipient [1:679:2241]: NKikimr::TEvTabletPipe::TEvServerDisconnected 2025-05-29T15:22:19.218198Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:768: Unsubscribed [1:626:2214], seqNo: 2, server pipe id: [1:729:2282] 2025-05-29T15:22:19.218219Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 269877761, Sender [1:731:2284], Recipient [1:679:2241]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-05-29T15:22:19.218232Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 272039937, Sender [1:626:2214], Recipient [1:679:2241]: NKikimr::NNodeBroker::TEvNodeBroker::TEvResolveNode { NodeId: 1024 } 2025-05-29T15:22:19.218236Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:247: StateWork, processing event TEvNodeBroker::TEvResolveNode 2025-05-29T15:22:19.218247Z node 1 :NODE_BROKER TRACE: node_broker.cpp:1478: Send TEvResolvedNode: NKikimr::NNodeBroker::TEvNodeBroker::TEvResolvedNode { Status { Code: WRONG_REQUEST Reason: "Unknown node" } } >> TLocalTests::TestRemoveTenantWhileResolving >> TNodeBrokerTest::NodesMigrationExtendLeaseThenExpire [GOOD] >> TNodeBrokerTest::NodesMigrationExtendLease [GOOD] >> TNodeBrokerTest::ShiftIdRangeRemoveActive [GOOD] >> TNodeBrokerTest::ExtendLeaseSetLocationInOneRegistration [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/mind/ut/unittest >> TNodeBrokerTest::NodesMigrationExtendLeaseThenRemove [GOOD] Test command err: 2025-05-29T15:22:19.097150Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 2 Deadline: 18446744073709.551615s } 2025-05-29T15:22:19.097199Z node 3 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 3 Deadline: 18446744073709.551615s } 2025-05-29T15:22:19.097223Z node 4 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 4 Deadline: 18446744073709.551615s } 2025-05-29T15:22:19.097251Z node 5 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 5 Deadline: 18446744073709.551615s } 2025-05-29T15:22:19.097275Z node 6 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 6 Deadline: 18446744073709.551615s } 2025-05-29T15:22:19.097294Z node 7 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 7 Deadline: 18446744073709.551615s } 2025-05-29T15:22:19.105423Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:19.105581Z node 3 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:19.105641Z node 4 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:19.105689Z node 5 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:19.105743Z node 6 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:19.105789Z node 7 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:19.105868Z node 8 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 8 Deadline: 18446744073709.551615s } 2025-05-29T15:22:19.105903Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-05-29T15:22:19.106212Z node 3 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:19.106247Z node 4 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:19.106269Z node 5 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:19.106289Z node 6 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:19.106314Z node 7 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:19.106337Z node 8 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:19.106385Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:19.110793Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:19.110871Z node 8 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:19.110911Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:19.112241Z node 5 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:19.112287Z node 6 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:19.112321Z node 7 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:19.112441Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:19.112467Z node 3 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:19.112493Z node 4 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:19.112527Z node 5 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-05-29T15:22:19.112586Z node 6 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-05-29T15:22:19.112612Z node 7 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-05-29T15:22:19.112639Z node 8 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:19.112665Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:19.112846Z node 3 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-05-29T15:22:19.112877Z node 4 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-05-29T15:22:19.112984Z node 8 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-05-29T15:22:19.113089Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-05-29T15:22:19.113153Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 2 Deadline: 18446744073709.551615s } 2025-05-29T15:22:19.113939Z node 3 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 4 Deadline: 18446744073709.551615s } 2025-05-29T15:22:19.113956Z node 4 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 3 Deadline: 18446744073709.551615s } 2025-05-29T15:22:19.113966Z node 5 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 2 Deadline: 18446744073709.551615s } 2025-05-29T15:22:19.113977Z node 6 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 3 Deadline: 18446744073709.551615s } 2025-05-29T15:22:19.113992Z node 8 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 2 Deadline: 18446744073709.551615s } 2025-05-29T15:22:19.114122Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 6 Deadline: 18446744073709.551615s } 2025-05-29T15:22:19.119520Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 8 Deadline: 18446744073709.551615s } 2025-05-29T15:22:19.119582Z node 4 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 2 Deadline: 18446744073709.551615s } 2025-05-29T15:22:19.119595Z node 6 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 2 Deadline: 18446744073709.551615s } 2025-05-29T15:22:19.120139Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 5 Deadline: 18446744073709.551615s } 2025-05-29T15:22:19.120166Z node 3 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 6 Deadline: 18446744073709.551615s } 2025-05-29T15:22:19.120252Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 5 Deadline: 18446744073709.551615s } 2025-05-29T15:22:19.120508Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 6 Deadline: 18446744073709.551615s } 2025-05-29T15:22:19.120581Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 7 Deadline: 18446744073709.551615s } 2025-05-29T15:22:19.120791Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 3 Deadline: 18446744073709.551615s } 2025-05-29T15:22:19.120834Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 4 Deadline: 18446744073709.551615s } 2025-05-29T15:22:19.120901Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 4 Deadline: 18446744073709.551615s } 2025-05-29T15:22:19.121209Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 8 Deadline: 18446744073709.551615s } 2025-05-29T15:22:19.121462Z node 4 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 7 Deadline: 18446744073709.551615s } 2025-05-29T15:22:19.121716Z node 7 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 4 Deadline: 18446744073709.551615s } 2025-05-29T15:22:19.121881Z node 6 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 7 Deadline: 18446744073709.551615s } 2025-05-29T15:22:19.121911Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 3 Deadline: 18446744073709.551615s } 2025-05-29T15:22:19.121955Z node 3 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 7 Deadline: 18446744073709.551615s } 2025-05-29T15:22:19.122151Z node 3 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 2 Deadline: 18446744073709.551615s } 2025-05-29T15:22:19.122188Z node 7 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 6 Deadline: 18446744073709.551615s } 2025-05-29T15:22:19.122260Z node 4 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 8 Deadline: 18446744073709.551615s } 2025-05-29T15:22:19.122273Z node 7 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 3 Deadline: 18446744073709.551615s } 2025-05-29T15:22:19.122501Z node 8 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 4 Deadline: 18446744073709.551615s } 2025-05-29T15:22:19.123823Z node 3 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 8 Deadline: 18446744073709.551615s } 2025-05-29T15:22:19.124109Z node 8 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 3 Deadline: 18446744073709.551615s } 2025-05-29T15:22:19.124698Z node 3 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 5 Deadline: 18446744073709.551615s } 2025-05-29T15:22:19.124812Z node 5 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 3 Deadline: 18446744073709.551615s } 2025-05-29T15:22:19.145373Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7309: Cannot subscribe to console configs 2025-05-29T15:22:19.145391Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded ... waiting for nameservers are connected 2025-05-29T15:22:19.149559Z node 1 :NODE_BROKER DEBUG: node_broker_impl.h:236: StateInit event type: 10060000 event: NKikimr::TEvTablet::TEvBoot 2025-05-29T15:22:19.149909Z node 1 :NODE_BROKER DEBUG: node_broker_impl.h:236: StateInit event type: 10060001 event: NKikimr::TEvTab ... tateInit event type: 1006000c event: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-05-29T15:22:19.480464Z node 1 :NODE_BROKER DEBUG: node_broker__init_scheme.cpp:20: TTxInitScheme Execute 2025-05-29T15:22:19.480527Z node 1 :NODE_BROKER DEBUG: node_broker__init_scheme.cpp:29: TTxInitScheme Complete 2025-05-29T15:22:19.480585Z node 1 :NODE_BROKER DEBUG: node_broker__load_state.cpp:19: TTxLoadState Execute 2025-05-29T15:22:19.480667Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:961: [DB] Using default config. 2025-05-29T15:22:19.480681Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:988: [DB] Loaded current epoch: #5.6 1970-01-01T04:00:00.025000Z - 1970-01-01T05:00:00.025000Z - 1970-01-01T06:00:00.025000Z 2025-05-29T15:22:19.480688Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:1012: [DB] Approximate epoch start is changed: #5.6 2025-05-29T15:22:19.480693Z node 1 :NODE_BROKER NOTICE: node_broker.cpp:1034: [DB] Loaded main nodes table: Nodes 2025-05-29T15:22:19.480739Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:1190: [DB] Loaded nodeV2 #1024.v2 { NodeId: 1024, State: Active, Version: 2, Host: host1, Port: 1001, ResolveHost: host1.yandex.net, Address: 1.2.3.4, Lease: 1, Expire: Thu, 01 Jan 1970 02:00:00 UTC, Location: DC=1/M=2/R=3/U=4/, AuthorizedByCertificate: 0, SlotIndex: 0, ServicedSubDomain: 72057594046678944:1 } 2025-05-29T15:22:19.480747Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:300: [Dirty] Added removed node #1024.v6 2025-05-29T15:22:19.480754Z node 1 :NODE_BROKER NOTICE: node_broker.cpp:1230: [DB] Migrating removed node #1024.v6 2025-05-29T15:22:19.480770Z node 1 :NODE_BROKER DEBUG: node_broker__load_state.cpp:27: TTxLoadState Complete 2025-05-29T15:22:19.480823Z node 1 :NODE_BROKER DEBUG: node_broker__migrate_state.cpp:84: TTxMigrateState Execute 2025-05-29T15:22:19.480829Z node 1 :NODE_BROKER DEBUG: node_broker__migrate_state.cpp:52: TTxMigrateState ProcessMigrationBatch UpdateNodes left 1, NewVersionUpdateNodes left 0 2025-05-29T15:22:19.480834Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:830: [DB] Removing node #1024.v6 from database 2025-05-29T15:22:19.480852Z node 1 :NODE_BROKER DEBUG: node_broker__migrate_state.cpp:21: TTxMigrateState FinalizeMigration 2025-05-29T15:22:19.480858Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:1330: [DB] Update approx epoch start in database: #5.6 2025-05-29T15:22:19.492341Z node 1 :NODE_BROKER DEBUG: node_broker__migrate_state.cpp:95: TTxMigrateState Complete 2025-05-29T15:22:19.492373Z node 1 :NODE_BROKER TRACE: node_broker.cpp:456: Scheduled epoch update at 1970-01-01T05:00:00.025000Z 2025-05-29T15:22:19.492380Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:562: Preparing nodes list cache for epoch #5.6 1970-01-01T04:00:00.025000Z - 1970-01-01T05:00:00.025000Z - 1970-01-01T06:00:00.025000Z, approximate epoch start #5.6 nodes=0 expired=0 2025-05-29T15:22:19.492387Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:603: Preparing update nodes log for epoch ##5.6 1970-01-01T04:00:00.025000Z - 1970-01-01T05:00:00.025000Z - 1970-01-01T06:00:00.025000Z nodes=0 expired=0 removed=1 2025-05-29T15:22:19.492392Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:659: Add node #1024.v6 to update nodes log 2025-05-29T15:22:19.492466Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 269877761, Sender [1:705:2253], Recipient [1:692:2242]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-05-29T15:22:19.492479Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 269877761, Sender [1:701:2249], Recipient [1:692:2242]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-05-29T15:22:19.492495Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 269877761, Sender [1:702:2250], Recipient [1:692:2242]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-05-29T15:22:19.492538Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 269877761, Sender [1:703:2251], Recipient [1:692:2242]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-05-29T15:22:19.492552Z node 8 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:657: Handle NKikimr::TEvTabletPipe::TEvClientConnected { TabletId: 72057594037936129 Status: OK ServerId: [1:705:2253] Leader: 1 Dead: 0 Generation: 3 VersionInfo:  } 2025-05-29T15:22:19.492565Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:657: Handle NKikimr::TEvTabletPipe::TEvClientConnected { TabletId: 72057594037936129 Status: OK ServerId: [1:701:2249] Leader: 1 Dead: 0 Generation: 3 VersionInfo:  } 2025-05-29T15:22:19.492581Z node 7 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:657: Handle NKikimr::TEvTabletPipe::TEvClientConnected { TabletId: 72057594037936129 Status: OK ServerId: [1:703:2251] Leader: 1 Dead: 0 Generation: 3 VersionInfo:  } 2025-05-29T15:22:19.492597Z node 6 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:657: Handle NKikimr::TEvTabletPipe::TEvClientConnected { TabletId: 72057594037936129 Status: OK ServerId: [1:702:2250] Leader: 1 Dead: 0 Generation: 3 VersionInfo:  } 2025-05-29T15:22:19.492647Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 272039936, Sender [2:53:2072], Recipient [1:701:2249] 2025-05-29T15:22:19.492651Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:246: StateWork, processing event TEvNodeBroker::TEvListNodes 2025-05-29T15:22:19.492659Z node 1 :NODE_BROKER TRACE: node_broker.cpp:423: Send TEvNodesInfo for epoch #5.6 1970-01-01T04:00:00.025000Z - 1970-01-01T05:00:00.025000Z - 1970-01-01T06:00:00.025000Z 2025-05-29T15:22:19.492668Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 272039936, Sender [8:221:2072], Recipient [1:705:2253] 2025-05-29T15:22:19.492670Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:246: StateWork, processing event TEvNodeBroker::TEvListNodes 2025-05-29T15:22:19.492674Z node 1 :NODE_BROKER TRACE: node_broker.cpp:423: Send TEvNodesInfo for epoch #5.6 1970-01-01T04:00:00.025000Z - 1970-01-01T05:00:00.025000Z - 1970-01-01T06:00:00.025000Z 2025-05-29T15:22:19.492697Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 272039936, Sender [7:193:2072], Recipient [1:703:2251] 2025-05-29T15:22:19.492700Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:246: StateWork, processing event TEvNodeBroker::TEvListNodes 2025-05-29T15:22:19.492703Z node 1 :NODE_BROKER TRACE: node_broker.cpp:423: Send TEvNodesInfo for epoch #5.6 1970-01-01T04:00:00.025000Z - 1970-01-01T05:00:00.025000Z - 1970-01-01T06:00:00.025000Z 2025-05-29T15:22:19.492714Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 272039936, Sender [6:165:2072], Recipient [1:702:2250] 2025-05-29T15:22:19.492716Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:246: StateWork, processing event TEvNodeBroker::TEvListNodes 2025-05-29T15:22:19.492719Z node 1 :NODE_BROKER TRACE: node_broker.cpp:423: Send TEvNodesInfo for epoch #5.6 1970-01-01T04:00:00.025000Z - 1970-01-01T05:00:00.025000Z - 1970-01-01T06:00:00.025000Z 2025-05-29T15:22:19.492843Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 269877761, Sender [1:732:2275], Recipient [1:692:2242]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-05-29T15:22:19.492857Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 272039936, Sender [1:635:2213], Recipient [1:692:2242]: NKikimr::NNodeBroker::TEvNodeBroker::TEvListNodes { } 2025-05-29T15:22:19.492860Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:246: StateWork, processing event TEvNodeBroker::TEvListNodes 2025-05-29T15:22:19.492864Z node 1 :NODE_BROKER TRACE: node_broker.cpp:423: Send TEvNodesInfo for epoch #5.6 1970-01-01T04:00:00.025000Z - 1970-01-01T05:00:00.025000Z - 1970-01-01T06:00:00.025000Z 2025-05-29T15:22:19.492907Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 269877761, Sender [1:734:2277], Recipient [1:692:2242]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-05-29T15:22:19.492917Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 272039936, Sender [1:635:2213], Recipient [1:692:2242]: NKikimr::NNodeBroker::TEvNodeBroker::TEvListNodes { } 2025-05-29T15:22:19.492920Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:246: StateWork, processing event TEvNodeBroker::TEvListNodes 2025-05-29T15:22:19.492923Z node 1 :NODE_BROKER TRACE: node_broker.cpp:423: Send TEvNodesInfo for epoch #5.6 1970-01-01T04:00:00.025000Z - 1970-01-01T05:00:00.025000Z - 1970-01-01T06:00:00.025000Z 2025-05-29T15:22:19.492966Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 269877761, Sender [1:736:2279], Recipient [1:692:2242]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-05-29T15:22:19.492980Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 272039936, Sender [1:635:2213], Recipient [1:692:2242]: NKikimr::NNodeBroker::TEvNodeBroker::TEvListNodes { } 2025-05-29T15:22:19.492983Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:246: StateWork, processing event TEvNodeBroker::TEvListNodes 2025-05-29T15:22:19.492986Z node 1 :NODE_BROKER TRACE: node_broker.cpp:423: Send TEvNodesInfo for epoch #5.6 1970-01-01T04:00:00.025000Z - 1970-01-01T05:00:00.025000Z - 1970-01-01T06:00:00.025000Z 2025-05-29T15:22:19.493021Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 269877761, Sender [1:738:2281], Recipient [1:692:2242]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-05-29T15:22:19.493036Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 272039936, Sender [1:635:2213], Recipient [1:692:2242]: NKikimr::NNodeBroker::TEvNodeBroker::TEvListNodes { CachedVersion: 6 } 2025-05-29T15:22:19.493039Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:246: StateWork, processing event TEvNodeBroker::TEvListNodes 2025-05-29T15:22:19.493042Z node 1 :NODE_BROKER TRACE: node_broker.cpp:423: Send TEvNodesInfo for epoch #5.6 1970-01-01T04:00:00.025000Z - 1970-01-01T05:00:00.025000Z - 1970-01-01T06:00:00.025000Z 2025-05-29T15:22:19.493082Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 269877761, Sender [1:740:2283], Recipient [1:692:2242]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-05-29T15:22:19.493093Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 272039950, Sender [1:635:2213], Recipient [1:692:2242]: NKikimr::NNodeBroker::TEvNodeBroker::TEvSubscribeNodesRequest { CachedVersion: 6 SeqNo: 2 } 2025-05-29T15:22:19.493097Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:254: StateWork, processing event TEvNodeBroker::TEvSubscribeNodesRequest 2025-05-29T15:22:19.493101Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:747: New subscriber [1:635:2213], seqNo: 2, version: 6, server pipe id: [1:740:2283] 2025-05-29T15:22:19.493107Z node 1 :NODE_BROKER TRACE: node_broker.cpp:730: Send TEvUpdateNodes v6 -> v6 to [1:635:2213] 2025-05-29T15:22:19.493144Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 269877764, Sender [1:740:2283], Recipient [1:692:2242]: NKikimr::TEvTabletPipe::TEvServerDisconnected 2025-05-29T15:22:19.493148Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:768: Unsubscribed [1:635:2213], seqNo: 2, server pipe id: [1:740:2283] 2025-05-29T15:22:19.493174Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 269877761, Sender [1:742:2285], Recipient [1:692:2242]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-05-29T15:22:19.493184Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 272039937, Sender [1:635:2213], Recipient [1:692:2242]: NKikimr::NNodeBroker::TEvNodeBroker::TEvResolveNode { NodeId: 1024 } 2025-05-29T15:22:19.493189Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:247: StateWork, processing event TEvNodeBroker::TEvResolveNode 2025-05-29T15:22:19.493201Z node 1 :NODE_BROKER TRACE: node_broker.cpp:1478: Send TEvResolvedNode: NKikimr::NNodeBroker::TEvNodeBroker::TEvResolvedNode { Status { Code: WRONG_REQUEST Reason: "Unknown node" } } ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/mind/ut/unittest >> TNodeBrokerTest::NodesMigrationExtendLeaseThenExpire [GOOD] Test command err: 2025-05-29T15:22:19.348611Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 2 Deadline: 18446744073709.551615s } 2025-05-29T15:22:19.348654Z node 3 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 3 Deadline: 18446744073709.551615s } 2025-05-29T15:22:19.348672Z node 4 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 4 Deadline: 18446744073709.551615s } 2025-05-29T15:22:19.348694Z node 5 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 5 Deadline: 18446744073709.551615s } 2025-05-29T15:22:19.348715Z node 6 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 6 Deadline: 18446744073709.551615s } 2025-05-29T15:22:19.348730Z node 7 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 7 Deadline: 18446744073709.551615s } 2025-05-29T15:22:19.353952Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:19.354033Z node 3 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:19.354063Z node 4 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:19.354087Z node 5 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:19.354116Z node 6 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:19.354139Z node 7 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:19.354184Z node 8 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 8 Deadline: 18446744073709.551615s } 2025-05-29T15:22:19.354200Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-05-29T15:22:19.354382Z node 3 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:19.354397Z node 4 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:19.354408Z node 5 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:19.354419Z node 6 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:19.354434Z node 7 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:19.354446Z node 8 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:19.354470Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:19.358007Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:19.358056Z node 8 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:19.358080Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:19.358789Z node 5 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:19.358819Z node 6 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:19.358842Z node 7 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:19.358910Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:19.358924Z node 3 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:19.358940Z node 4 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:19.358951Z node 8 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:19.358965Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:19.359083Z node 3 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-05-29T15:22:19.359101Z node 4 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-05-29T15:22:19.359114Z node 5 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-05-29T15:22:19.359129Z node 6 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-05-29T15:22:19.359147Z node 7 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-05-29T15:22:19.359204Z node 8 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-05-29T15:22:19.359264Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-05-29T15:22:19.359295Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 3 Deadline: 18446744073709.551615s } 2025-05-29T15:22:19.359759Z node 3 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 4 Deadline: 18446744073709.551615s } 2025-05-29T15:22:19.359768Z node 4 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 7 Deadline: 18446744073709.551615s } 2025-05-29T15:22:19.359775Z node 6 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 7 Deadline: 18446744073709.551615s } 2025-05-29T15:22:19.359784Z node 7 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 6 Deadline: 18446744073709.551615s } 2025-05-29T15:22:19.359790Z node 8 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 5 Deadline: 18446744073709.551615s } 2025-05-29T15:22:19.359899Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 5 Deadline: 18446744073709.551615s } 2025-05-29T15:22:19.362713Z node 7 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 4 Deadline: 18446744073709.551615s } 2025-05-29T15:22:19.362840Z node 4 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 3 Deadline: 18446744073709.551615s } 2025-05-29T15:22:19.362855Z node 5 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 8 Deadline: 18446744073709.551615s } 2025-05-29T15:22:19.362880Z node 7 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 3 Deadline: 18446744073709.551615s } 2025-05-29T15:22:19.362898Z node 5 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 2 Deadline: 18446744073709.551615s } 2025-05-29T15:22:19.363500Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 4 Deadline: 18446744073709.551615s } 2025-05-29T15:22:19.363722Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 6 Deadline: 18446744073709.551615s } 2025-05-29T15:22:19.363802Z node 3 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 7 Deadline: 18446744073709.551615s } 2025-05-29T15:22:19.363854Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 7 Deadline: 18446744073709.551615s } 2025-05-29T15:22:19.363909Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 5 Deadline: 18446744073709.551615s } 2025-05-29T15:22:19.363965Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 2 Deadline: 18446744073709.551615s } 2025-05-29T15:22:19.364073Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 8 Deadline: 18446744073709.551615s } 2025-05-29T15:22:19.364195Z node 7 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 8 Deadline: 18446744073709.551615s } 2025-05-29T15:22:19.364344Z node 4 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 2 Deadline: 18446744073709.551615s } 2025-05-29T15:22:19.364465Z node 8 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 7 Deadline: 18446744073709.551615s } 2025-05-29T15:22:19.364647Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 4 Deadline: 18446744073709.551615s } 2025-05-29T15:22:19.364998Z node 4 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 6 Deadline: 18446744073709.551615s } 2025-05-29T15:22:19.365215Z node 6 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 4 Deadline: 18446744073709.551615s } 2025-05-29T15:22:19.365988Z node 7 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 2 Deadline: 18446744073709.551615s } 2025-05-29T15:22:19.366208Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 7 Deadline: 18446744073709.551615s } 2025-05-29T15:22:19.385587Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7309: Cannot subscribe to console configs 2025-05-29T15:22:19.385606Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded ... waiting for nameservers are connected 2025-05-29T15:22:19.388937Z node 1 :NODE_BROKER DEBUG: node_broker_impl.h:236: StateInit event type: 10060000 event: NKikimr::TEvTablet::TEvBoot 2025-05-29T15:22:19.389216Z node 1 :NODE_BROKER DEBUG: node_broker_impl.h:236: StateInit event type: 10060001 event: NKikimr::TEvTablet::TEvRestored 2025-05-29T15:22:19.389255Z node 1 :NODE_BROKER DEBUG: node_broker__init_scheme.cpp:20: TTxInitScheme Execute 2025-05-29T15:22:19.389403Z node 1 :NODE_BROKER DEBUG: node_broker_impl.h:236: StateInit event type: 1006000c event: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-05-29T15:22:19.390075Z node 1 :NODE_BROKER DEBUG: node_broker__init_scheme.cpp:29: TTxInitScheme Complete 2025-05-29T15:22:19.390092Z node 1 :NODE_BROKER DEBUG: node_broker__load_state.cpp:19: TTxLoadState Execute 2025-05-29T15:22:19.390130Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:961: [DB] Using default config. 2025-05-29T15:22:19.390140Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:998: [DB] Starting the first epoch: #1.1 1970-01-01T00:00:00.025000Z - 1970-01-01T01:00:00.025000Z - 1970-01-01T02:00:00.025000Z 2025-05-29T15:22:19.390144Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:1024: [DB] Loaded the first approximate epoch start: #1.1 2025-05-29T15:22:19.390154Z node 1 :NODE_BROKER DEBUG: node_broker__load_state.cpp:27: TTxLoadState Compl ... oker.cpp:1034: [DB] Loaded main nodes table: Nodes 2025-05-29T15:22:19.692182Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:291: [Dirty] Added expired node #1024.v0 host1:1001 2025-05-29T15:22:19.692200Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:1113: [DB] Loaded node #1024.v0 { NodeId: 1024, State: Expired, Version: 0, Host: host1, Port: 1001, ResolveHost: host1.yandex.net, Address: 1.2.3.4, Lease: 2, Expire: Thu, 01 Jan 1970 03:00:00 UTC, Location: DC=1/M=2/R=3/U=4/, AuthorizedByCertificate: 0, SlotIndex: 0, ServicedSubDomain: 72057594046678944:1 } 2025-05-29T15:22:19.692212Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:1190: [DB] Loaded nodeV2 #1024.v2 { NodeId: 1024, State: Active, Version: 2, Host: host1, Port: 1001, ResolveHost: host1.yandex.net, Address: 1.2.3.4, Lease: 1, Expire: Thu, 01 Jan 1970 02:00:00 UTC, Location: DC=1/M=2/R=3/U=4/, AuthorizedByCertificate: 0, SlotIndex: 0, ServicedSubDomain: 72057594046678944:1 } 2025-05-29T15:22:19.692219Z node 1 :NODE_BROKER NOTICE: node_broker.cpp:1214: [DB] Migrating changed node #1024.v5 { NodeId: 1024, State: Expired, Version: 5, Host: host1, Port: 1001, ResolveHost: host1.yandex.net, Address: 1.2.3.4, Lease: 2, Expire: Thu, 01 Jan 1970 03:00:00 UTC, Location: DC=1/M=2/R=3/U=4/, AuthorizedByCertificate: 0, SlotIndex: 0, ServicedSubDomain: 72057594046678944:1 } 2025-05-29T15:22:19.692227Z node 1 :NODE_BROKER DEBUG: node_broker__load_state.cpp:27: TTxLoadState Complete 2025-05-29T15:22:19.692244Z node 1 :NODE_BROKER DEBUG: node_broker__migrate_state.cpp:84: TTxMigrateState Execute 2025-05-29T15:22:19.692248Z node 1 :NODE_BROKER DEBUG: node_broker__migrate_state.cpp:52: TTxMigrateState ProcessMigrationBatch UpdateNodes left 1, NewVersionUpdateNodes left 0 2025-05-29T15:22:19.692254Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:856: [DB] Adding node #1024.v5 host1:1001 to database state=Expired resolvehost=host1.yandex.net address=1.2.3.4 dc=1 location=DC=1/M=2/R=3/U=4/ lease=2 expire=Thu, 01 Jan 1970 03:00:00 UTC servicedsubdomain=72057594046678944:1 slotindex=0 authorizedbycertificate=false 2025-05-29T15:22:19.692286Z node 1 :NODE_BROKER DEBUG: node_broker__migrate_state.cpp:21: TTxMigrateState FinalizeMigration 2025-05-29T15:22:19.692289Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:1330: [DB] Update approx epoch start in database: #4.5 2025-05-29T15:22:19.714183Z node 1 :NODE_BROKER DEBUG: node_broker__migrate_state.cpp:95: TTxMigrateState Complete 2025-05-29T15:22:19.714228Z node 1 :NODE_BROKER TRACE: node_broker.cpp:456: Scheduled epoch update at 1970-01-01T04:00:00.025000Z 2025-05-29T15:22:19.714239Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:562: Preparing nodes list cache for epoch #4.5 1970-01-01T03:00:00.025000Z - 1970-01-01T04:00:00.025000Z - 1970-01-01T05:00:00.025000Z, approximate epoch start #4.5 nodes=0 expired=1 2025-05-29T15:22:19.714264Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:603: Preparing update nodes log for epoch ##4.5 1970-01-01T03:00:00.025000Z - 1970-01-01T04:00:00.025000Z - 1970-01-01T05:00:00.025000Z nodes=0 expired=1 removed=0 2025-05-29T15:22:19.714270Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:659: Add node #1024.v5 to update nodes log 2025-05-29T15:22:19.714377Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 269877761, Sender [1:692:2247], Recipient [1:683:2241]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-05-29T15:22:19.714393Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 269877761, Sender [1:693:2248], Recipient [1:683:2241]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-05-29T15:22:19.714502Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 269877761, Sender [1:695:2250], Recipient [1:683:2241]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-05-29T15:22:19.714525Z node 3 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:657: Handle NKikimr::TEvTabletPipe::TEvClientConnected { TabletId: 72057594037936129 Status: OK ServerId: [1:692:2247] Leader: 1 Dead: 0 Generation: 3 VersionInfo:  } 2025-05-29T15:22:19.714546Z node 4 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:657: Handle NKikimr::TEvTabletPipe::TEvClientConnected { TabletId: 72057594037936129 Status: OK ServerId: [1:693:2248] Leader: 1 Dead: 0 Generation: 3 VersionInfo:  } 2025-05-29T15:22:19.714558Z node 5 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:657: Handle NKikimr::TEvTabletPipe::TEvClientConnected { TabletId: 72057594037936129 Status: OK ServerId: [1:695:2250] Leader: 1 Dead: 0 Generation: 3 VersionInfo:  } 2025-05-29T15:22:19.714587Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 269877761, Sender [1:696:2251], Recipient [1:683:2241]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-05-29T15:22:19.714595Z node 7 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:657: Handle NKikimr::TEvTabletPipe::TEvClientConnected { TabletId: 72057594037936129 Status: OK ServerId: [1:696:2251] Leader: 1 Dead: 0 Generation: 3 VersionInfo:  } 2025-05-29T15:22:19.714676Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 272039936, Sender [5:137:2072], Recipient [1:695:2250] 2025-05-29T15:22:19.714681Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:246: StateWork, processing event TEvNodeBroker::TEvListNodes 2025-05-29T15:22:19.714692Z node 1 :NODE_BROKER TRACE: node_broker.cpp:423: Send TEvNodesInfo for epoch #4.5 1970-01-01T03:00:00.025000Z - 1970-01-01T04:00:00.025000Z - 1970-01-01T05:00:00.025000Z 2025-05-29T15:22:19.714716Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 272039936, Sender [3:81:2072], Recipient [1:692:2247] 2025-05-29T15:22:19.714720Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:246: StateWork, processing event TEvNodeBroker::TEvListNodes 2025-05-29T15:22:19.714726Z node 1 :NODE_BROKER TRACE: node_broker.cpp:423: Send TEvNodesInfo for epoch #4.5 1970-01-01T03:00:00.025000Z - 1970-01-01T04:00:00.025000Z - 1970-01-01T05:00:00.025000Z 2025-05-29T15:22:19.714792Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 272039936, Sender [4:109:2072], Recipient [1:693:2248] 2025-05-29T15:22:19.714797Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:246: StateWork, processing event TEvNodeBroker::TEvListNodes 2025-05-29T15:22:19.714804Z node 1 :NODE_BROKER TRACE: node_broker.cpp:423: Send TEvNodesInfo for epoch #4.5 1970-01-01T03:00:00.025000Z - 1970-01-01T04:00:00.025000Z - 1970-01-01T05:00:00.025000Z 2025-05-29T15:22:19.714826Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 272039936, Sender [7:193:2072], Recipient [1:696:2251] 2025-05-29T15:22:19.714830Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:246: StateWork, processing event TEvNodeBroker::TEvListNodes 2025-05-29T15:22:19.714836Z node 1 :NODE_BROKER TRACE: node_broker.cpp:423: Send TEvNodesInfo for epoch #4.5 1970-01-01T03:00:00.025000Z - 1970-01-01T04:00:00.025000Z - 1970-01-01T05:00:00.025000Z 2025-05-29T15:22:19.714995Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 269877761, Sender [1:725:2274], Recipient [1:683:2241]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-05-29T15:22:19.715021Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 272039936, Sender [1:630:2214], Recipient [1:683:2241]: NKikimr::NNodeBroker::TEvNodeBroker::TEvListNodes { } 2025-05-29T15:22:19.715025Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:246: StateWork, processing event TEvNodeBroker::TEvListNodes 2025-05-29T15:22:19.715031Z node 1 :NODE_BROKER TRACE: node_broker.cpp:423: Send TEvNodesInfo for epoch #4.5 1970-01-01T03:00:00.025000Z - 1970-01-01T04:00:00.025000Z - 1970-01-01T05:00:00.025000Z 2025-05-29T15:22:19.715100Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 269877761, Sender [1:727:2276], Recipient [1:683:2241]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-05-29T15:22:19.715120Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 272039936, Sender [1:630:2214], Recipient [1:683:2241]: NKikimr::NNodeBroker::TEvNodeBroker::TEvListNodes { } 2025-05-29T15:22:19.715124Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:246: StateWork, processing event TEvNodeBroker::TEvListNodes 2025-05-29T15:22:19.715130Z node 1 :NODE_BROKER TRACE: node_broker.cpp:423: Send TEvNodesInfo for epoch #4.5 1970-01-01T03:00:00.025000Z - 1970-01-01T04:00:00.025000Z - 1970-01-01T05:00:00.025000Z 2025-05-29T15:22:19.715200Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 269877761, Sender [1:729:2278], Recipient [1:683:2241]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-05-29T15:22:19.715218Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 272039936, Sender [1:630:2214], Recipient [1:683:2241]: NKikimr::NNodeBroker::TEvNodeBroker::TEvListNodes { } 2025-05-29T15:22:19.715222Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:246: StateWork, processing event TEvNodeBroker::TEvListNodes 2025-05-29T15:22:19.715228Z node 1 :NODE_BROKER TRACE: node_broker.cpp:423: Send TEvNodesInfo for epoch #4.5 1970-01-01T03:00:00.025000Z - 1970-01-01T04:00:00.025000Z - 1970-01-01T05:00:00.025000Z 2025-05-29T15:22:19.715289Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 269877761, Sender [1:731:2280], Recipient [1:683:2241]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-05-29T15:22:19.715308Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 272039936, Sender [1:630:2214], Recipient [1:683:2241]: NKikimr::NNodeBroker::TEvNodeBroker::TEvListNodes { CachedVersion: 5 } 2025-05-29T15:22:19.715312Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:246: StateWork, processing event TEvNodeBroker::TEvListNodes 2025-05-29T15:22:19.715317Z node 1 :NODE_BROKER TRACE: node_broker.cpp:423: Send TEvNodesInfo for epoch #4.5 1970-01-01T03:00:00.025000Z - 1970-01-01T04:00:00.025000Z - 1970-01-01T05:00:00.025000Z 2025-05-29T15:22:19.715372Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 269877761, Sender [1:733:2282], Recipient [1:683:2241]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-05-29T15:22:19.715394Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 272039950, Sender [1:630:2214], Recipient [1:683:2241]: NKikimr::NNodeBroker::TEvNodeBroker::TEvSubscribeNodesRequest { CachedVersion: 5 SeqNo: 2 } 2025-05-29T15:22:19.715400Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:254: StateWork, processing event TEvNodeBroker::TEvSubscribeNodesRequest 2025-05-29T15:22:19.715407Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:747: New subscriber [1:630:2214], seqNo: 2, version: 5, server pipe id: [1:733:2282] 2025-05-29T15:22:19.715415Z node 1 :NODE_BROKER TRACE: node_broker.cpp:730: Send TEvUpdateNodes v5 -> v5 to [1:630:2214] 2025-05-29T15:22:19.715469Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 269877764, Sender [1:733:2282], Recipient [1:683:2241]: NKikimr::TEvTabletPipe::TEvServerDisconnected 2025-05-29T15:22:19.715475Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:768: Unsubscribed [1:630:2214], seqNo: 2, server pipe id: [1:733:2282] 2025-05-29T15:22:19.715508Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 269877761, Sender [1:735:2284], Recipient [1:683:2241]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-05-29T15:22:19.715528Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 272039937, Sender [1:630:2214], Recipient [1:683:2241]: NKikimr::NNodeBroker::TEvNodeBroker::TEvResolveNode { NodeId: 1024 } 2025-05-29T15:22:19.715533Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:247: StateWork, processing event TEvNodeBroker::TEvResolveNode 2025-05-29T15:22:19.715551Z node 1 :NODE_BROKER TRACE: node_broker.cpp:1478: Send TEvResolvedNode: NKikimr::NNodeBroker::TEvNodeBroker::TEvResolvedNode { Status { Code: WRONG_REQUEST Reason: "Unknown node" } } >> TLocalTests::TestRemoveTenantWhileResolving [GOOD] >> TNodeBrokerTest::BasicFunctionality ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/mind/ut/unittest >> TNodeBrokerTest::UpdateNodesLogEmptyEpoch [GOOD] Test command err: 2025-05-29T15:22:18.629807Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 2 Deadline: 18446744073709.551615s } 2025-05-29T15:22:18.629847Z node 3 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 3 Deadline: 18446744073709.551615s } 2025-05-29T15:22:18.629869Z node 4 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 4 Deadline: 18446744073709.551615s } 2025-05-29T15:22:18.629898Z node 5 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 5 Deadline: 18446744073709.551615s } 2025-05-29T15:22:18.629921Z node 6 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 6 Deadline: 18446744073709.551615s } 2025-05-29T15:22:18.629939Z node 7 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 7 Deadline: 18446744073709.551615s } 2025-05-29T15:22:18.636352Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:18.636462Z node 3 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:18.636501Z node 4 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:18.636533Z node 5 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:18.636568Z node 6 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:18.636601Z node 7 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:18.636676Z node 8 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 8 Deadline: 18446744073709.551615s } 2025-05-29T15:22:18.636701Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-05-29T15:22:18.636991Z node 3 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:18.637020Z node 4 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:18.637040Z node 5 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:18.637059Z node 6 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:18.637082Z node 7 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:18.637104Z node 8 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:18.637146Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:18.641485Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:18.641569Z node 8 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:18.641601Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:18.642715Z node 5 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:18.642786Z node 6 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:18.642823Z node 7 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:18.642947Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:18.642971Z node 3 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:18.642997Z node 4 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:18.643023Z node 5 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-05-29T15:22:18.643077Z node 6 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-05-29T15:22:18.643100Z node 7 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-05-29T15:22:18.643119Z node 8 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:18.643143Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:18.643337Z node 3 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-05-29T15:22:18.643366Z node 4 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-05-29T15:22:18.643437Z node 8 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-05-29T15:22:18.643518Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-05-29T15:22:18.643556Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 3 Deadline: 18446744073709.551615s } 2025-05-29T15:22:18.644194Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 7 Deadline: 18446744073709.551615s } 2025-05-29T15:22:18.644216Z node 3 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 4 Deadline: 18446744073709.551615s } 2025-05-29T15:22:18.644223Z node 4 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 6 Deadline: 18446744073709.551615s } 2025-05-29T15:22:18.644232Z node 7 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 2 Deadline: 18446744073709.551615s } 2025-05-29T15:22:18.644241Z node 8 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 3 Deadline: 18446744073709.551615s } 2025-05-29T15:22:18.648167Z node 3 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 8 Deadline: 18446744073709.551615s } 2025-05-29T15:22:18.648184Z node 4 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 3 Deadline: 18446744073709.551615s } 2025-05-29T15:22:18.648195Z node 6 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 4 Deadline: 18446744073709.551615s } 2025-05-29T15:22:18.648206Z node 7 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 8 Deadline: 18446744073709.551615s } 2025-05-29T15:22:18.649216Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 4 Deadline: 18446744073709.551615s } 2025-05-29T15:22:18.649521Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 7 Deadline: 18446744073709.551615s } 2025-05-29T15:22:18.649622Z node 8 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 7 Deadline: 18446744073709.551615s } 2025-05-29T15:22:18.649658Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 5 Deadline: 18446744073709.551615s } 2025-05-29T15:22:18.649704Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 2 Deadline: 18446744073709.551615s } 2025-05-29T15:22:18.649747Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 6 Deadline: 18446744073709.551615s } 2025-05-29T15:22:18.650098Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 8 Deadline: 18446744073709.551615s } 2025-05-29T15:22:18.651001Z node 6 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 2 Deadline: 18446744073709.551615s } 2025-05-29T15:22:18.651069Z node 4 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 8 Deadline: 18446744073709.551615s } 2025-05-29T15:22:18.651257Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 6 Deadline: 18446744073709.551615s } 2025-05-29T15:22:18.651282Z node 8 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 4 Deadline: 18446744073709.551615s } 2025-05-29T15:22:18.651430Z node 3 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 5 Deadline: 18446744073709.551615s } 2025-05-29T15:22:18.651615Z node 5 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 3 Deadline: 18446744073709.551615s } 2025-05-29T15:22:18.675994Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7309: Cannot subscribe to console configs 2025-05-29T15:22:18.676011Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded ... waiting for nameservers are connected 2025-05-29T15:22:18.680154Z node 1 :NODE_BROKER DEBUG: node_broker_impl.h:236: StateInit event type: 10060000 event: NKikimr::TEvTablet::TEvBoot 2025-05-29T15:22:18.680516Z node 1 :NODE_BROKER DEBUG: node_broker_impl.h:236: StateInit event type: 10060001 event: NKikimr::TEvTablet::TEvRestored 2025-05-29T15:22:18.680568Z node 1 :NODE_BROKER DEBUG: node_broker__init_scheme.cpp:20: TTxInitScheme Execute 2025-05-29T15:22:18.680739Z node 1 :NODE_BROKER DEBUG: node_broker_impl.h:236: StateInit event type: 1006000c event: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-05-29T15:22:18.681409Z node 1 :NODE_BROKER DEBUG: node_broker__init_scheme.cpp:29: TTxInitScheme Complete 2025-05-29T15:22:18.681430Z node 1 :NODE_BROKER DEBUG: node_broker__load_state.cpp:19: TTxLoadState Execute 2025-05-29T15:22:18.681475Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:961: [DB] Using default config. 2025-05-29T15:22:18.681486Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:998: [DB] Starting the first epoch: #1.1 1970-01-01T00:00:00.025000Z - 1970-01-01T01:00:00.025000Z - 1970-01-01T02:00:00.025000Z 2025-05-29T15:22:18.681490Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:1024: [DB] Loaded the first approximate epoch start: #1.1 2025-05-29T15:22:18.681501Z node 1 :NODE_BROKER DEBUG: node_broker__load_state.cpp:27: TTxLoadState Complete 2025-05-29T15:22:18.681515Z node 1 :NODE_BROKER DEBUG: node_broker__migrate_state.cpp:84: TTxMigrateState Execute 2025-05-29T15:22:18.681519Z node 1 :NODE_BROKER DEBUG: node_broker__migrate_state.cpp:52: TTxMigrateState ProcessMigrationBatch UpdateNodes left 0, NewVersionUpdateNodes left 0 2025-05-29T15:22:18.681522Z node 1 :NODE_BROKER DEBUG: node_broker__migrate_state.cpp:21: TTxMigrateState FinalizeMigration 2025-05-29T15:22:18.681526Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:1311: [DB] Update epoch in database: #1.1 1970-01-01T00:00:00.025000Z - 1970-01-01T01:00:00.025000Z - 1970-01-01T02:00:00.025000Z 2025-05-29T15:22:18.681539Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:1330 ... ], Recipient [1:558:2184]: NKikimr::TEvTabletPipe::TEvServerDisconnected 2025-05-29T15:22:18.890302Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:768: Unsubscribed [1:626:2214], seqNo: 2, server pipe id: [1:630:2218] 2025-05-29T15:22:18.890318Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 269877761, Sender [1:632:2220], Recipient [1:558:2184]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-05-29T15:22:18.890329Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 272039950, Sender [1:626:2214], Recipient [1:558:2184]: NKikimr::NNodeBroker::TEvNodeBroker::TEvSubscribeNodesRequest { CachedVersion: 0 SeqNo: 3 } 2025-05-29T15:22:18.890331Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:254: StateWork, processing event TEvNodeBroker::TEvSubscribeNodesRequest 2025-05-29T15:22:18.890334Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:747: New subscriber [1:626:2214], seqNo: 3, version: 0, server pipe id: [1:632:2220] 2025-05-29T15:22:18.890336Z node 1 :NODE_BROKER TRACE: node_broker.cpp:730: Send TEvUpdateNodes v0 -> v1 to [1:626:2214] 2025-05-29T15:22:18.890362Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 269877764, Sender [1:632:2220], Recipient [1:558:2184]: NKikimr::TEvTabletPipe::TEvServerDisconnected 2025-05-29T15:22:18.890365Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:768: Unsubscribed [1:626:2214], seqNo: 3, server pipe id: [1:632:2220] 2025-05-29T15:22:18.890381Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 269877761, Sender [1:634:2222], Recipient [1:558:2184]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-05-29T15:22:18.890391Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 272039936, Sender [1:626:2214], Recipient [1:558:2184]: NKikimr::NNodeBroker::TEvNodeBroker::TEvListNodes { } 2025-05-29T15:22:18.890393Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:246: StateWork, processing event TEvNodeBroker::TEvListNodes 2025-05-29T15:22:18.890396Z node 1 :NODE_BROKER TRACE: node_broker.cpp:423: Send TEvNodesInfo for epoch #1.1 1970-01-01T00:00:00.025000Z - 1970-01-01T01:00:00.025000Z - 1970-01-01T02:00:00.025000Z 2025-05-29T15:22:19.119668Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 2146435072, Sender [1:558:2184], Recipient [1:558:2184]: NKikimr::NNodeBroker::TNodeBroker::TEvPrivate::TEvUpdateEpoch 2025-05-29T15:22:19.119690Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:256: StateWork, processing event TEvPrivate::TEvUpdateEpoch 2025-05-29T15:22:19.119715Z node 1 :NODE_BROKER DEBUG: node_broker__update_epoch.cpp:20: TTxUpdateEpoch Execute 2025-05-29T15:22:19.119729Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:542: [Dirty] Move to new epoch #2.2 1970-01-01T01:00:00.025000Z - 1970-01-01T02:00:00.025000Z - 1970-01-01T03:00:00.025000Z, approximate epoch start #2.2 2025-05-29T15:22:19.119737Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:1311: [DB] Update epoch in database: #2.2 1970-01-01T01:00:00.025000Z - 1970-01-01T02:00:00.025000Z - 1970-01-01T03:00:00.025000Z 2025-05-29T15:22:19.119780Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:1330: [DB] Update approx epoch start in database: #2.2 2025-05-29T15:22:19.525336Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 272039936, Sender [1:25:2072], Recipient [1:558:2184]: NKikimr::NNodeBroker::TEvNodeBroker::TEvListNodes { MinEpoch: 2 } 2025-05-29T15:22:19.525359Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:246: StateWork, processing event TEvNodeBroker::TEvListNodes 2025-05-29T15:22:19.525365Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:375: Delaying list nodes request for epoch #2 2025-05-29T15:22:19.525389Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 272039936, Sender [2:53:2072], Recipient [1:621:2209] 2025-05-29T15:22:19.525391Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:246: StateWork, processing event TEvNodeBroker::TEvListNodes 2025-05-29T15:22:19.525396Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:375: Delaying list nodes request for epoch #2 2025-05-29T15:22:19.525417Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 272039936, Sender [5:137:2072], Recipient [1:618:2206] 2025-05-29T15:22:19.525419Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:246: StateWork, processing event TEvNodeBroker::TEvListNodes 2025-05-29T15:22:19.525422Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:375: Delaying list nodes request for epoch #2 2025-05-29T15:22:19.525426Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 272039936, Sender [6:165:2072], Recipient [1:619:2207] 2025-05-29T15:22:19.525429Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:246: StateWork, processing event TEvNodeBroker::TEvListNodes 2025-05-29T15:22:19.525431Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:375: Delaying list nodes request for epoch #2 2025-05-29T15:22:19.525436Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 272039936, Sender [8:221:2072], Recipient [1:620:2208] 2025-05-29T15:22:19.525438Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:246: StateWork, processing event TEvNodeBroker::TEvListNodes 2025-05-29T15:22:19.525440Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:375: Delaying list nodes request for epoch #2 2025-05-29T15:22:19.525444Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 272039936, Sender [3:81:2072], Recipient [1:622:2210] 2025-05-29T15:22:19.525447Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:246: StateWork, processing event TEvNodeBroker::TEvListNodes 2025-05-29T15:22:19.525449Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:375: Delaying list nodes request for epoch #2 2025-05-29T15:22:19.525452Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 272039936, Sender [7:193:2072], Recipient [1:623:2211] 2025-05-29T15:22:19.525455Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:246: StateWork, processing event TEvNodeBroker::TEvListNodes 2025-05-29T15:22:19.525457Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:375: Delaying list nodes request for epoch #2 2025-05-29T15:22:19.525461Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 272039936, Sender [4:109:2072], Recipient [1:624:2212] 2025-05-29T15:22:19.525463Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:246: StateWork, processing event TEvNodeBroker::TEvListNodes 2025-05-29T15:22:19.525466Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:375: Delaying list nodes request for epoch #2 2025-05-29T15:22:19.536211Z node 1 :NODE_BROKER DEBUG: node_broker__update_epoch.cpp:31: TTxUpdateEpoch Complete 2025-05-29T15:22:19.536237Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:542: [Committed] Move to new epoch #2.2 1970-01-01T01:00:00.025000Z - 1970-01-01T02:00:00.025000Z - 1970-01-01T03:00:00.025000Z, approximate epoch start #2.2 2025-05-29T15:22:19.536250Z node 1 :NODE_BROKER TRACE: node_broker.cpp:456: Scheduled epoch update at 1970-01-01T02:00:00.025000Z 2025-05-29T15:22:19.536256Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:562: Preparing nodes list cache for epoch #2.2 1970-01-01T01:00:00.025000Z - 1970-01-01T02:00:00.025000Z - 1970-01-01T03:00:00.025000Z, approximate epoch start #2.2 nodes=0 expired=0 2025-05-29T15:22:19.536265Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:603: Preparing update nodes log for epoch ##2.2 1970-01-01T01:00:00.025000Z - 1970-01-01T02:00:00.025000Z - 1970-01-01T03:00:00.025000Z nodes=0 expired=0 removed=0 2025-05-29T15:22:19.536277Z node 1 :NODE_BROKER TRACE: node_broker.cpp:423: Send TEvNodesInfo for epoch #2.2 1970-01-01T01:00:00.025000Z - 1970-01-01T02:00:00.025000Z - 1970-01-01T03:00:00.025000Z 2025-05-29T15:22:19.536284Z node 1 :NODE_BROKER TRACE: node_broker.cpp:423: Send TEvNodesInfo for epoch #2.2 1970-01-01T01:00:00.025000Z - 1970-01-01T02:00:00.025000Z - 1970-01-01T03:00:00.025000Z 2025-05-29T15:22:19.536289Z node 1 :NODE_BROKER TRACE: node_broker.cpp:423: Send TEvNodesInfo for epoch #2.2 1970-01-01T01:00:00.025000Z - 1970-01-01T02:00:00.025000Z - 1970-01-01T03:00:00.025000Z 2025-05-29T15:22:19.536294Z node 1 :NODE_BROKER TRACE: node_broker.cpp:423: Send TEvNodesInfo for epoch #2.2 1970-01-01T01:00:00.025000Z - 1970-01-01T02:00:00.025000Z - 1970-01-01T03:00:00.025000Z 2025-05-29T15:22:19.536298Z node 1 :NODE_BROKER TRACE: node_broker.cpp:423: Send TEvNodesInfo for epoch #2.2 1970-01-01T01:00:00.025000Z - 1970-01-01T02:00:00.025000Z - 1970-01-01T03:00:00.025000Z 2025-05-29T15:22:19.536303Z node 1 :NODE_BROKER TRACE: node_broker.cpp:423: Send TEvNodesInfo for epoch #2.2 1970-01-01T01:00:00.025000Z - 1970-01-01T02:00:00.025000Z - 1970-01-01T03:00:00.025000Z 2025-05-29T15:22:19.536306Z node 1 :NODE_BROKER TRACE: node_broker.cpp:423: Send TEvNodesInfo for epoch #2.2 1970-01-01T01:00:00.025000Z - 1970-01-01T02:00:00.025000Z - 1970-01-01T03:00:00.025000Z 2025-05-29T15:22:19.536311Z node 1 :NODE_BROKER TRACE: node_broker.cpp:423: Send TEvNodesInfo for epoch #2.2 1970-01-01T01:00:00.025000Z - 1970-01-01T02:00:00.025000Z - 1970-01-01T03:00:00.025000Z 2025-05-29T15:22:19.556952Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 269877761, Sender [1:654:2232], Recipient [1:558:2184]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-05-29T15:22:19.557005Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 272039936, Sender [1:626:2214], Recipient [1:558:2184]: NKikimr::NNodeBroker::TEvNodeBroker::TEvListNodes { } 2025-05-29T15:22:19.557010Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:246: StateWork, processing event TEvNodeBroker::TEvListNodes 2025-05-29T15:22:19.557020Z node 1 :NODE_BROKER TRACE: node_broker.cpp:423: Send TEvNodesInfo for epoch #2.2 1970-01-01T01:00:00.025000Z - 1970-01-01T02:00:00.025000Z - 1970-01-01T03:00:00.025000Z 2025-05-29T15:22:19.557061Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 269877761, Sender [1:656:2234], Recipient [1:558:2184]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-05-29T15:22:19.557080Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 272039950, Sender [1:626:2214], Recipient [1:558:2184]: NKikimr::NNodeBroker::TEvNodeBroker::TEvSubscribeNodesRequest { CachedVersion: 1 SeqNo: 4 } 2025-05-29T15:22:19.557084Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:254: StateWork, processing event TEvNodeBroker::TEvSubscribeNodesRequest 2025-05-29T15:22:19.557089Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:747: New subscriber [1:626:2214], seqNo: 4, version: 1, server pipe id: [1:656:2234] 2025-05-29T15:22:19.557095Z node 1 :NODE_BROKER TRACE: node_broker.cpp:730: Send TEvUpdateNodes v1 -> v2 to [1:626:2214] 2025-05-29T15:22:19.557131Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 269877764, Sender [1:656:2234], Recipient [1:558:2184]: NKikimr::TEvTabletPipe::TEvServerDisconnected 2025-05-29T15:22:19.557135Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:768: Unsubscribed [1:626:2214], seqNo: 4, server pipe id: [1:656:2234] 2025-05-29T15:22:19.557153Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 269877761, Sender [1:658:2236], Recipient [1:558:2184]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-05-29T15:22:19.557163Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 272039950, Sender [1:626:2214], Recipient [1:558:2184]: NKikimr::NNodeBroker::TEvNodeBroker::TEvSubscribeNodesRequest { CachedVersion: 0 SeqNo: 5 } 2025-05-29T15:22:19.557168Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:254: StateWork, processing event TEvNodeBroker::TEvSubscribeNodesRequest 2025-05-29T15:22:19.557171Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:747: New subscriber [1:626:2214], seqNo: 5, version: 0, server pipe id: [1:658:2236] 2025-05-29T15:22:19.557173Z node 1 :NODE_BROKER TRACE: node_broker.cpp:730: Send TEvUpdateNodes v0 -> v2 to [1:626:2214] >> TPartitionTests::ConflictingSrcIdForTxWithHead [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/mind/ut/unittest >> TNodeBrokerTest::NodesMigrationExtendLease [GOOD] Test command err: 2025-05-29T15:22:19.423974Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 2 Deadline: 18446744073709.551615s } 2025-05-29T15:22:19.424017Z node 3 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 3 Deadline: 18446744073709.551615s } 2025-05-29T15:22:19.424040Z node 4 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 4 Deadline: 18446744073709.551615s } 2025-05-29T15:22:19.424069Z node 5 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 5 Deadline: 18446744073709.551615s } 2025-05-29T15:22:19.424091Z node 6 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 6 Deadline: 18446744073709.551615s } 2025-05-29T15:22:19.424108Z node 7 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 7 Deadline: 18446744073709.551615s } 2025-05-29T15:22:19.429969Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:19.430082Z node 3 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:19.430123Z node 4 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:19.430157Z node 5 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:19.430194Z node 6 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:19.430225Z node 7 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:19.430282Z node 8 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 8 Deadline: 18446744073709.551615s } 2025-05-29T15:22:19.430305Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-05-29T15:22:19.430527Z node 3 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:19.430546Z node 4 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:19.430560Z node 5 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:19.430570Z node 6 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:19.430584Z node 7 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:19.430598Z node 8 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:19.430627Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:19.434482Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:19.434537Z node 8 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:19.434568Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:19.435454Z node 5 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:19.435490Z node 6 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:19.435515Z node 7 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:19.435594Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:19.435609Z node 3 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:19.435626Z node 4 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:19.435643Z node 5 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-05-29T15:22:19.435685Z node 6 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-05-29T15:22:19.435702Z node 7 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-05-29T15:22:19.435718Z node 8 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:19.435735Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:19.435850Z node 3 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-05-29T15:22:19.435867Z node 4 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-05-29T15:22:19.435931Z node 8 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-05-29T15:22:19.436021Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-05-29T15:22:19.436060Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 5 Deadline: 18446744073709.551615s } 2025-05-29T15:22:19.436651Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 6 Deadline: 18446744073709.551615s } 2025-05-29T15:22:19.436671Z node 3 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 8 Deadline: 18446744073709.551615s } 2025-05-29T15:22:19.436680Z node 6 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 4 Deadline: 18446744073709.551615s } 2025-05-29T15:22:19.436686Z node 7 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 2 Deadline: 18446744073709.551615s } 2025-05-29T15:22:19.436693Z node 8 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 7 Deadline: 18446744073709.551615s } 2025-05-29T15:22:19.439632Z node 6 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 2 Deadline: 18446744073709.551615s } 2025-05-29T15:22:19.439952Z node 8 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 3 Deadline: 18446744073709.551615s } 2025-05-29T15:22:19.439997Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 7 Deadline: 18446744073709.551615s } 2025-05-29T15:22:19.440013Z node 4 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 6 Deadline: 18446744073709.551615s } 2025-05-29T15:22:19.440032Z node 7 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 8 Deadline: 18446744073709.551615s } 2025-05-29T15:22:19.440820Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 6 Deadline: 18446744073709.551615s } 2025-05-29T15:22:19.440877Z node 5 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 7 Deadline: 18446744073709.551615s } 2025-05-29T15:22:19.441114Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 7 Deadline: 18446744073709.551615s } 2025-05-29T15:22:19.441226Z node 7 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 5 Deadline: 18446744073709.551615s } 2025-05-29T15:22:19.441254Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 2 Deadline: 18446744073709.551615s } 2025-05-29T15:22:19.441314Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 4 Deadline: 18446744073709.551615s } 2025-05-29T15:22:19.441414Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 3 Deadline: 18446744073709.551615s } 2025-05-29T15:22:19.441599Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 8 Deadline: 18446744073709.551615s } 2025-05-29T15:22:19.442900Z node 4 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 5 Deadline: 18446744073709.551615s } 2025-05-29T15:22:19.443096Z node 5 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 4 Deadline: 18446744073709.551615s } 2025-05-29T15:22:19.443674Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 4 Deadline: 18446744073709.551615s } 2025-05-29T15:22:19.443941Z node 4 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 2 Deadline: 18446744073709.551615s } 2025-05-29T15:22:19.445038Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 5 Deadline: 18446744073709.551615s } 2025-05-29T15:22:19.445357Z node 5 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 2 Deadline: 18446744073709.551615s } 2025-05-29T15:22:19.446970Z node 5 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 8 Deadline: 18446744073709.551615s } 2025-05-29T15:22:19.447105Z node 8 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 5 Deadline: 18446744073709.551615s } 2025-05-29T15:22:19.469367Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7309: Cannot subscribe to console configs 2025-05-29T15:22:19.469388Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded ... waiting for nameservers are connected 2025-05-29T15:22:19.472774Z node 1 :NODE_BROKER DEBUG: node_broker_impl.h:236: StateInit event type: 10060000 event: NKikimr::TEvTablet::TEvBoot 2025-05-29T15:22:19.473080Z node 1 :NODE_BROKER DEBUG: node_broker_impl.h:236: StateInit event type: 10060001 event: NKikimr::TEvTablet::TEvRestored 2025-05-29T15:22:19.473121Z node 1 :NODE_BROKER DEBUG: node_broker__init_scheme.cpp:20: TTxInitScheme Execute 2025-05-29T15:22:19.473265Z node 1 :NODE_BROKER DEBUG: node_broker_impl.h:236: StateInit event type: 1006000c event: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-05-29T15:22:19.473736Z node 1 :NODE_BROKER DEBUG: node_broker__init_scheme.cpp:29: TTxInitScheme Complete 2025-05-29T15:22:19.473862Z node 1 :NODE_BROKER DEBUG: node_broker__load_state.cpp:19: TTxLoadState Execute 2025-05-29T15:22:19.473904Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:961: [DB] Using default config. 2025-05-29T15:22:19.473914Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:998: [DB] Starting the first epoch: #1.1 1970-01-01T00:00:00.025000Z - 1970-01-01T01:00:00.025000Z - 1970-01-01T02:00:00.025000Z 2025-05-29T15:22:19.473918Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:1024: [DB] Loaded the first approximate epoch start: #1.1 2025-05-29T15:22:19.473928Z node 1 :NODE_BROKER DEBUG: node_broker__load_state.cpp:27: TTxLoadState Compl ... och version from 3 to 4 2025-05-29T15:22:19.737536Z node 1 :NODE_BROKER DEBUG: node_broker__load_state.cpp:27: TTxLoadState Complete 2025-05-29T15:22:19.737574Z node 1 :NODE_BROKER DEBUG: node_broker__migrate_state.cpp:84: TTxMigrateState Execute 2025-05-29T15:22:19.737577Z node 1 :NODE_BROKER DEBUG: node_broker__migrate_state.cpp:52: TTxMigrateState ProcessMigrationBatch UpdateNodes left 0, NewVersionUpdateNodes left 2 2025-05-29T15:22:19.737581Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:1311: [DB] Update epoch in database: #1.4 1970-01-01T00:00:00.025000Z - 1970-01-01T01:00:00.025000Z - 1970-01-01T02:00:00.025000Z 2025-05-29T15:22:19.737597Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:856: [DB] Adding node #1025.v4 host2:1001 to database state=Active resolvehost=host2.yandex.net address=1.2.3.4 dc=1 location=DC=1/M=2/R=3/U=4/ lease=2 expire=Thu, 01 Jan 1970 02:00:00 UTC servicedsubdomain=72057594046678944:1 slotindex=1 authorizedbycertificate=false 2025-05-29T15:22:19.737625Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:856: [DB] Adding node #1024.v4 host1:1001 to database state=Active resolvehost=host1.yandex.net address=1.2.3.4 dc=1 location=DC=1/M=2/R=3/U=4/ lease=2 expire=Thu, 01 Jan 1970 02:00:00 UTC servicedsubdomain=72057594046678944:1 slotindex=0 authorizedbycertificate=false 2025-05-29T15:22:19.737636Z node 1 :NODE_BROKER DEBUG: node_broker__migrate_state.cpp:21: TTxMigrateState FinalizeMigration 2025-05-29T15:22:19.749212Z node 1 :NODE_BROKER DEBUG: node_broker__migrate_state.cpp:95: TTxMigrateState Complete 2025-05-29T15:22:19.749242Z node 1 :NODE_BROKER TRACE: node_broker.cpp:456: Scheduled epoch update at 1970-01-01T01:00:00.025000Z 2025-05-29T15:22:19.749251Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:562: Preparing nodes list cache for epoch #1.4 1970-01-01T00:00:00.025000Z - 1970-01-01T01:00:00.025000Z - 1970-01-01T02:00:00.025000Z, approximate epoch start #1.1 nodes=2 expired=0 2025-05-29T15:22:19.749278Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:603: Preparing update nodes log for epoch ##1.4 1970-01-01T00:00:00.025000Z - 1970-01-01T01:00:00.025000Z - 1970-01-01T02:00:00.025000Z nodes=2 expired=0 removed=0 2025-05-29T15:22:19.749282Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:659: Add node #1024.v4 to update nodes log 2025-05-29T15:22:19.749289Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:659: Add node #1025.v4 to update nodes log 2025-05-29T15:22:19.749380Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 269877761, Sender [1:691:2245], Recipient [1:683:2239]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-05-29T15:22:19.749421Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 269877761, Sender [1:692:2246], Recipient [1:683:2239]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-05-29T15:22:19.749452Z node 3 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:657: Handle NKikimr::TEvTabletPipe::TEvClientConnected { TabletId: 72057594037936129 Status: OK ServerId: [1:692:2246] Leader: 1 Dead: 0 Generation: 3 VersionInfo:  } 2025-05-29T15:22:19.749464Z node 6 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:657: Handle NKikimr::TEvTabletPipe::TEvClientConnected { TabletId: 72057594037936129 Status: OK ServerId: [1:694:2248] Leader: 1 Dead: 0 Generation: 3 VersionInfo:  } 2025-05-29T15:22:19.749471Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 269877761, Sender [1:694:2248], Recipient [1:683:2239]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-05-29T15:22:19.749475Z node 7 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:657: Handle NKikimr::TEvTabletPipe::TEvClientConnected { TabletId: 72057594037936129 Status: OK ServerId: [1:691:2245] Leader: 1 Dead: 0 Generation: 3 VersionInfo:  } 2025-05-29T15:22:19.749482Z node 8 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:657: Handle NKikimr::TEvTabletPipe::TEvClientConnected { TabletId: 72057594037936129 Status: OK ServerId: [1:695:2249] Leader: 1 Dead: 0 Generation: 3 VersionInfo:  } 2025-05-29T15:22:19.749500Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 269877761, Sender [1:695:2249], Recipient [1:683:2239]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-05-29T15:22:19.749542Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 272039936, Sender [8:221:2072], Recipient [1:695:2249] 2025-05-29T15:22:19.749559Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:246: StateWork, processing event TEvNodeBroker::TEvListNodes 2025-05-29T15:22:19.749565Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:375: Delaying list nodes request for epoch #2 2025-05-29T15:22:19.749581Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 272039936, Sender [7:193:2072], Recipient [1:691:2245] 2025-05-29T15:22:19.749583Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:246: StateWork, processing event TEvNodeBroker::TEvListNodes 2025-05-29T15:22:19.749585Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:375: Delaying list nodes request for epoch #2 2025-05-29T15:22:19.749593Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 272039936, Sender [6:165:2072], Recipient [1:694:2248] 2025-05-29T15:22:19.749595Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:246: StateWork, processing event TEvNodeBroker::TEvListNodes 2025-05-29T15:22:19.749597Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:375: Delaying list nodes request for epoch #2 2025-05-29T15:22:19.749603Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 272039936, Sender [3:81:2072], Recipient [1:692:2246] 2025-05-29T15:22:19.749605Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:246: StateWork, processing event TEvNodeBroker::TEvListNodes 2025-05-29T15:22:19.749609Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:375: Delaying list nodes request for epoch #2 2025-05-29T15:22:19.749700Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 269877761, Sender [1:723:2272], Recipient [1:683:2239]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-05-29T15:22:19.749719Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 272039936, Sender [1:629:2213], Recipient [1:683:2239]: NKikimr::NNodeBroker::TEvNodeBroker::TEvListNodes { } 2025-05-29T15:22:19.749722Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:246: StateWork, processing event TEvNodeBroker::TEvListNodes 2025-05-29T15:22:19.749727Z node 1 :NODE_BROKER TRACE: node_broker.cpp:423: Send TEvNodesInfo for epoch #1.4 1970-01-01T00:00:00.025000Z - 1970-01-01T01:00:00.025000Z - 1970-01-01T02:00:00.025000Z 2025-05-29T15:22:19.749776Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 269877761, Sender [1:725:2274], Recipient [1:683:2239]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-05-29T15:22:19.749786Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 272039936, Sender [1:629:2213], Recipient [1:683:2239]: NKikimr::NNodeBroker::TEvNodeBroker::TEvListNodes { } 2025-05-29T15:22:19.749789Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:246: StateWork, processing event TEvNodeBroker::TEvListNodes 2025-05-29T15:22:19.749792Z node 1 :NODE_BROKER TRACE: node_broker.cpp:423: Send TEvNodesInfo for epoch #1.4 1970-01-01T00:00:00.025000Z - 1970-01-01T01:00:00.025000Z - 1970-01-01T02:00:00.025000Z 2025-05-29T15:22:19.749835Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 269877761, Sender [1:727:2276], Recipient [1:683:2239]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-05-29T15:22:19.749846Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 272039936, Sender [1:629:2213], Recipient [1:683:2239]: NKikimr::NNodeBroker::TEvNodeBroker::TEvListNodes { } 2025-05-29T15:22:19.749848Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:246: StateWork, processing event TEvNodeBroker::TEvListNodes 2025-05-29T15:22:19.749852Z node 1 :NODE_BROKER TRACE: node_broker.cpp:423: Send TEvNodesInfo for epoch #1.4 1970-01-01T00:00:00.025000Z - 1970-01-01T01:00:00.025000Z - 1970-01-01T02:00:00.025000Z 2025-05-29T15:22:19.749893Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 269877761, Sender [1:729:2278], Recipient [1:683:2239]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-05-29T15:22:19.749909Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 272039936, Sender [1:629:2213], Recipient [1:683:2239]: NKikimr::NNodeBroker::TEvNodeBroker::TEvListNodes { CachedVersion: 3 } 2025-05-29T15:22:19.749911Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:246: StateWork, processing event TEvNodeBroker::TEvListNodes 2025-05-29T15:22:19.749915Z node 1 :NODE_BROKER TRACE: node_broker.cpp:423: Send TEvNodesInfo for epoch #1.4 1970-01-01T00:00:00.025000Z - 1970-01-01T01:00:00.025000Z - 1970-01-01T02:00:00.025000Z 2025-05-29T15:22:19.749955Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 269877761, Sender [1:731:2280], Recipient [1:683:2239]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-05-29T15:22:19.749968Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 272039950, Sender [1:629:2213], Recipient [1:683:2239]: NKikimr::NNodeBroker::TEvNodeBroker::TEvSubscribeNodesRequest { CachedVersion: 3 SeqNo: 2 } 2025-05-29T15:22:19.749972Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:254: StateWork, processing event TEvNodeBroker::TEvSubscribeNodesRequest 2025-05-29T15:22:19.749977Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:747: New subscriber [1:629:2213], seqNo: 2, version: 3, server pipe id: [1:731:2280] 2025-05-29T15:22:19.749982Z node 1 :NODE_BROKER TRACE: node_broker.cpp:730: Send TEvUpdateNodes v3 -> v4 to [1:629:2213] 2025-05-29T15:22:19.750023Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 269877764, Sender [1:731:2280], Recipient [1:683:2239]: NKikimr::TEvTabletPipe::TEvServerDisconnected 2025-05-29T15:22:19.750027Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:768: Unsubscribed [1:629:2213], seqNo: 2, server pipe id: [1:731:2280] 2025-05-29T15:22:19.750053Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 269877761, Sender [1:733:2282], Recipient [1:683:2239]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-05-29T15:22:19.750066Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 272039937, Sender [1:629:2213], Recipient [1:683:2239]: NKikimr::NNodeBroker::TEvNodeBroker::TEvResolveNode { NodeId: 1024 } 2025-05-29T15:22:19.750070Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:247: StateWork, processing event TEvNodeBroker::TEvResolveNode 2025-05-29T15:22:19.750089Z node 1 :NODE_BROKER TRACE: node_broker.cpp:1478: Send TEvResolvedNode: NKikimr::NNodeBroker::TEvNodeBroker::TEvResolvedNode { Status { Code: OK } Node { NodeId: 1024 Host: "host1" Port: 1001 ResolveHost: "host1.yandex.net" Address: "1.2.3.4" Location { DataCenter: "1" Module: "2" Rack: "3" Unit: "4" } Expire: 7200026000 Name: "slot-0" } } 2025-05-29T15:22:19.750133Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 269877761, Sender [1:735:2284], Recipient [1:683:2239]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-05-29T15:22:19.750144Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 272039937, Sender [1:629:2213], Recipient [1:683:2239]: NKikimr::NNodeBroker::TEvNodeBroker::TEvResolveNode { NodeId: 1025 } 2025-05-29T15:22:19.750147Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:247: StateWork, processing event TEvNodeBroker::TEvResolveNode 2025-05-29T15:22:19.750156Z node 1 :NODE_BROKER TRACE: node_broker.cpp:1478: Send TEvResolvedNode: NKikimr::NNodeBroker::TEvNodeBroker::TEvResolvedNode { Status { Code: OK } Node { NodeId: 1025 Host: "host2" Port: 1001 ResolveHost: "host2.yandex.net" Address: "1.2.3.4" Location { DataCenter: "1" Module: "2" Rack: "3" Unit: "4" } Expire: 7200026000 Name: "slot-1" } } ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/mind/ut/unittest >> TNodeBrokerTest::ShiftIdRangeRemoveActive [GOOD] Test command err: 2025-05-29T15:22:18.685442Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 2 Deadline: 18446744073709.551615s } 2025-05-29T15:22:18.685478Z node 3 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 3 Deadline: 18446744073709.551615s } 2025-05-29T15:22:18.685499Z node 4 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 4 Deadline: 18446744073709.551615s } 2025-05-29T15:22:18.685523Z node 5 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 5 Deadline: 18446744073709.551615s } 2025-05-29T15:22:18.685543Z node 6 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 6 Deadline: 18446744073709.551615s } 2025-05-29T15:22:18.685571Z node 7 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 7 Deadline: 18446744073709.551615s } 2025-05-29T15:22:18.690968Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:18.691062Z node 3 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:18.691103Z node 4 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:18.691132Z node 5 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:18.691164Z node 6 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:18.691190Z node 7 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:18.691239Z node 8 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 8 Deadline: 18446744073709.551615s } 2025-05-29T15:22:18.691258Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-05-29T15:22:18.691552Z node 3 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:18.691589Z node 4 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:18.691608Z node 5 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:18.691625Z node 6 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:18.691656Z node 7 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:18.691675Z node 8 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:18.691722Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:18.695234Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:18.695281Z node 8 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:18.695305Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:18.696174Z node 5 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:18.696221Z node 6 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:18.696243Z node 7 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:18.696316Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:18.696332Z node 3 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:18.696348Z node 4 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:18.696365Z node 8 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:18.696379Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:18.696519Z node 3 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-05-29T15:22:18.696540Z node 4 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-05-29T15:22:18.696562Z node 5 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-05-29T15:22:18.696586Z node 6 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-05-29T15:22:18.696603Z node 7 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-05-29T15:22:18.696665Z node 8 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-05-29T15:22:18.696745Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-05-29T15:22:18.696780Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 5 Deadline: 18446744073709.551615s } 2025-05-29T15:22:18.697380Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 4 Deadline: 18446744073709.551615s } 2025-05-29T15:22:18.697399Z node 3 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 7 Deadline: 18446744073709.551615s } 2025-05-29T15:22:18.697406Z node 4 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 7 Deadline: 18446744073709.551615s } 2025-05-29T15:22:18.697412Z node 5 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 4 Deadline: 18446744073709.551615s } 2025-05-29T15:22:18.697418Z node 6 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 3 Deadline: 18446744073709.551615s } 2025-05-29T15:22:18.697424Z node 7 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 8 Deadline: 18446744073709.551615s } 2025-05-29T15:22:18.697431Z node 8 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 7 Deadline: 18446744073709.551615s } 2025-05-29T15:22:18.700568Z node 3 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 6 Deadline: 18446744073709.551615s } 2025-05-29T15:22:18.700751Z node 4 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 5 Deadline: 18446744073709.551615s } 2025-05-29T15:22:18.700780Z node 7 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 3 Deadline: 18446744073709.551615s } 2025-05-29T15:22:18.700806Z node 8 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 5 Deadline: 18446744073709.551615s } 2025-05-29T15:22:18.700844Z node 7 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 4 Deadline: 18446744073709.551615s } 2025-05-29T15:22:18.701520Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 6 Deadline: 18446744073709.551615s } 2025-05-29T15:22:18.701567Z node 4 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 2 Deadline: 18446744073709.551615s } 2025-05-29T15:22:18.701812Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 7 Deadline: 18446744073709.551615s } 2025-05-29T15:22:18.701961Z node 5 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 8 Deadline: 18446744073709.551615s } 2025-05-29T15:22:18.702070Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 2 Deadline: 18446744073709.551615s } 2025-05-29T15:22:18.702144Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 3 Deadline: 18446744073709.551615s } 2025-05-29T15:22:18.702312Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 4 Deadline: 18446744073709.551615s } 2025-05-29T15:22:18.702442Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 8 Deadline: 18446744073709.551615s } 2025-05-29T15:22:18.703157Z node 5 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 2 Deadline: 18446744073709.551615s } 2025-05-29T15:22:18.703369Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 5 Deadline: 18446744073709.551615s } 2025-05-29T15:22:18.703883Z node 4 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 8 Deadline: 18446744073709.551615s } 2025-05-29T15:22:18.704297Z node 8 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 4 Deadline: 18446744073709.551615s } 2025-05-29T15:22:18.704902Z node 4 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 6 Deadline: 18446744073709.551615s } 2025-05-29T15:22:18.705168Z node 6 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 4 Deadline: 18446744073709.551615s } 2025-05-29T15:22:18.706411Z node 7 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 2 Deadline: 18446744073709.551615s } 2025-05-29T15:22:18.706540Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 7 Deadline: 18446744073709.551615s } 2025-05-29T15:22:18.727265Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7309: Cannot subscribe to console configs 2025-05-29T15:22:18.727295Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded ... waiting for nameservers are connected 2025-05-29T15:22:18.731031Z node 1 :NODE_BROKER DEBUG: node_broker_impl.h:236: StateInit event type: 10060000 event: NKikimr::TEvTablet::TEvBoot 2025-05-29T15:22:18.731523Z node 1 :NODE_BROKER DEBUG: node_broker_impl.h:236: StateInit event type: 10060001 event: NKikimr::TEvTablet::TEvRestored 2025-05-29T15:22:18.731580Z node 1 :NODE_BROKER DEBUG: node_broker__init_scheme.cpp:20: TTxInitScheme Execute 2025-05-29T15:22:18.731735Z node 1 :NODE_BROKER DEBUG: node_broker_impl.h:236: StateInit event type: 1006000c event: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-05-29T15:22:18.732309Z node 1 :NODE_BROKER DEBUG: node_broker__init_scheme.cpp:29: TTxInitScheme Complete 2025-05-29T15:22:18.732325Z node 1 :NODE_BROKER DEBUG: node_broker__load_state.cpp:19: TTxLoadState Execute 2025-05-29T15:22:18.732369Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:961: [DB] Using default config. 2025-05-29T15:22:18.732378Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:998: [DB] Start ... d TEvNodesInfo for epoch #2.9 1970-01-01T01:00:00.025000Z - 1970-01-01T02:00:00.025000Z - 1970-01-01T03:00:00.025000Z 2025-05-29T15:22:19.756736Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 269877761, Sender [1:774:2310], Recipient [1:719:2262]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-05-29T15:22:19.756745Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 272039936, Sender [1:631:2213], Recipient [1:719:2262]: NKikimr::NNodeBroker::TEvNodeBroker::TEvListNodes { } 2025-05-29T15:22:19.756748Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:246: StateWork, processing event TEvNodeBroker::TEvListNodes 2025-05-29T15:22:19.756751Z node 1 :NODE_BROKER TRACE: node_broker.cpp:423: Send TEvNodesInfo for epoch #2.9 1970-01-01T01:00:00.025000Z - 1970-01-01T02:00:00.025000Z - 1970-01-01T03:00:00.025000Z 2025-05-29T15:22:19.756786Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 269877761, Sender [1:776:2312], Recipient [1:719:2262]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-05-29T15:22:19.756795Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 272039936, Sender [1:631:2213], Recipient [1:719:2262]: NKikimr::NNodeBroker::TEvNodeBroker::TEvListNodes { CachedVersion: 6 } 2025-05-29T15:22:19.756798Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:246: StateWork, processing event TEvNodeBroker::TEvListNodes 2025-05-29T15:22:19.756801Z node 1 :NODE_BROKER TRACE: node_broker.cpp:423: Send TEvNodesInfo for epoch #2.9 1970-01-01T01:00:00.025000Z - 1970-01-01T02:00:00.025000Z - 1970-01-01T03:00:00.025000Z 2025-05-29T15:22:19.756835Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 269877761, Sender [1:778:2314], Recipient [1:719:2262]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-05-29T15:22:19.756844Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 272039936, Sender [1:631:2213], Recipient [1:719:2262]: NKikimr::NNodeBroker::TEvNodeBroker::TEvListNodes { } 2025-05-29T15:22:19.756847Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:246: StateWork, processing event TEvNodeBroker::TEvListNodes 2025-05-29T15:22:19.756850Z node 1 :NODE_BROKER TRACE: node_broker.cpp:423: Send TEvNodesInfo for epoch #2.9 1970-01-01T01:00:00.025000Z - 1970-01-01T02:00:00.025000Z - 1970-01-01T03:00:00.025000Z 2025-05-29T15:22:19.756883Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 269877761, Sender [1:780:2316], Recipient [1:719:2262]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-05-29T15:22:19.756894Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 272039936, Sender [1:631:2213], Recipient [1:719:2262]: NKikimr::NNodeBroker::TEvNodeBroker::TEvListNodes { CachedVersion: 5 } 2025-05-29T15:22:19.756897Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:246: StateWork, processing event TEvNodeBroker::TEvListNodes 2025-05-29T15:22:19.756900Z node 1 :NODE_BROKER TRACE: node_broker.cpp:423: Send TEvNodesInfo for epoch #2.9 1970-01-01T01:00:00.025000Z - 1970-01-01T02:00:00.025000Z - 1970-01-01T03:00:00.025000Z 2025-05-29T15:22:19.756942Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 269877761, Sender [1:782:2318], Recipient [1:719:2262]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-05-29T15:22:19.756955Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 272039950, Sender [1:631:2213], Recipient [1:719:2262]: NKikimr::NNodeBroker::TEvNodeBroker::TEvSubscribeNodesRequest { CachedVersion: 9 SeqNo: 2 } 2025-05-29T15:22:19.756959Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:254: StateWork, processing event TEvNodeBroker::TEvSubscribeNodesRequest 2025-05-29T15:22:19.756963Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:747: New subscriber [1:631:2213], seqNo: 2, version: 9, server pipe id: [1:782:2318] 2025-05-29T15:22:19.756967Z node 1 :NODE_BROKER TRACE: node_broker.cpp:730: Send TEvUpdateNodes v9 -> v9 to [1:631:2213] 2025-05-29T15:22:19.757005Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 269877764, Sender [1:782:2318], Recipient [1:719:2262]: NKikimr::TEvTabletPipe::TEvServerDisconnected 2025-05-29T15:22:19.757008Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:768: Unsubscribed [1:631:2213], seqNo: 2, server pipe id: [1:782:2318] 2025-05-29T15:22:19.757024Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 269877761, Sender [1:784:2320], Recipient [1:719:2262]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-05-29T15:22:19.757036Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 272039950, Sender [1:631:2213], Recipient [1:719:2262]: NKikimr::NNodeBroker::TEvNodeBroker::TEvSubscribeNodesRequest { CachedVersion: 8 SeqNo: 3 } 2025-05-29T15:22:19.757039Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:254: StateWork, processing event TEvNodeBroker::TEvSubscribeNodesRequest 2025-05-29T15:22:19.757042Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:747: New subscriber [1:631:2213], seqNo: 3, version: 8, server pipe id: [1:784:2320] 2025-05-29T15:22:19.757044Z node 1 :NODE_BROKER TRACE: node_broker.cpp:730: Send TEvUpdateNodes v8 -> v9 to [1:631:2213] 2025-05-29T15:22:19.757074Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 269877764, Sender [1:784:2320], Recipient [1:719:2262]: NKikimr::TEvTabletPipe::TEvServerDisconnected 2025-05-29T15:22:19.757077Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:768: Unsubscribed [1:631:2213], seqNo: 3, server pipe id: [1:784:2320] 2025-05-29T15:22:19.757090Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 269877761, Sender [1:786:2322], Recipient [1:719:2262]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-05-29T15:22:19.757101Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 272039950, Sender [1:631:2213], Recipient [1:719:2262]: NKikimr::NNodeBroker::TEvNodeBroker::TEvSubscribeNodesRequest { CachedVersion: 7 SeqNo: 4 } 2025-05-29T15:22:19.757104Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:254: StateWork, processing event TEvNodeBroker::TEvSubscribeNodesRequest 2025-05-29T15:22:19.757106Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:747: New subscriber [1:631:2213], seqNo: 4, version: 7, server pipe id: [1:786:2322] 2025-05-29T15:22:19.757109Z node 1 :NODE_BROKER TRACE: node_broker.cpp:730: Send TEvUpdateNodes v7 -> v9 to [1:631:2213] 2025-05-29T15:22:19.757135Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 269877764, Sender [1:786:2322], Recipient [1:719:2262]: NKikimr::TEvTabletPipe::TEvServerDisconnected 2025-05-29T15:22:19.757138Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:768: Unsubscribed [1:631:2213], seqNo: 4, server pipe id: [1:786:2322] 2025-05-29T15:22:19.757154Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 269877761, Sender [1:788:2324], Recipient [1:719:2262]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-05-29T15:22:19.757163Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 272039950, Sender [1:631:2213], Recipient [1:719:2262]: NKikimr::NNodeBroker::TEvNodeBroker::TEvSubscribeNodesRequest { CachedVersion: 6 SeqNo: 5 } 2025-05-29T15:22:19.757165Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:254: StateWork, processing event TEvNodeBroker::TEvSubscribeNodesRequest 2025-05-29T15:22:19.757168Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:747: New subscriber [1:631:2213], seqNo: 5, version: 6, server pipe id: [1:788:2324] 2025-05-29T15:22:19.757170Z node 1 :NODE_BROKER TRACE: node_broker.cpp:730: Send TEvUpdateNodes v6 -> v9 to [1:631:2213] 2025-05-29T15:22:19.757199Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 269877764, Sender [1:788:2324], Recipient [1:719:2262]: NKikimr::TEvTabletPipe::TEvServerDisconnected 2025-05-29T15:22:19.757202Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:768: Unsubscribed [1:631:2213], seqNo: 5, server pipe id: [1:788:2324] 2025-05-29T15:22:19.757217Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 269877761, Sender [1:790:2326], Recipient [1:719:2262]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-05-29T15:22:19.757226Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 272039950, Sender [1:631:2213], Recipient [1:719:2262]: NKikimr::NNodeBroker::TEvNodeBroker::TEvSubscribeNodesRequest { CachedVersion: 5 SeqNo: 6 } 2025-05-29T15:22:19.757228Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:254: StateWork, processing event TEvNodeBroker::TEvSubscribeNodesRequest 2025-05-29T15:22:19.757231Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:747: New subscriber [1:631:2213], seqNo: 6, version: 5, server pipe id: [1:790:2326] 2025-05-29T15:22:19.757233Z node 1 :NODE_BROKER TRACE: node_broker.cpp:730: Send TEvUpdateNodes v5 -> v9 to [1:631:2213] 2025-05-29T15:22:19.757264Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 269877764, Sender [1:790:2326], Recipient [1:719:2262]: NKikimr::TEvTabletPipe::TEvServerDisconnected 2025-05-29T15:22:19.757267Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:768: Unsubscribed [1:631:2213], seqNo: 6, server pipe id: [1:790:2326] 2025-05-29T15:22:19.757284Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 269877761, Sender [1:792:2328], Recipient [1:719:2262]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-05-29T15:22:19.757296Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 272039937, Sender [1:631:2213], Recipient [1:719:2262]: NKikimr::NNodeBroker::TEvNodeBroker::TEvResolveNode { NodeId: 1024 } 2025-05-29T15:22:19.757299Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:247: StateWork, processing event TEvNodeBroker::TEvResolveNode 2025-05-29T15:22:19.757318Z node 1 :NODE_BROKER TRACE: node_broker.cpp:1478: Send TEvResolvedNode: NKikimr::NNodeBroker::TEvNodeBroker::TEvResolvedNode { Status { Code: OK } Node { NodeId: 1024 Host: "host1" Port: 1001 ResolveHost: "host1.yandex.net" Address: "1.2.3.4" Location { DataCenter: "1" Module: "2" Rack: "3" Unit: "4" } Expire: 10800025000 Name: "slot-0" } } 2025-05-29T15:22:19.757357Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 269877761, Sender [1:794:2330], Recipient [1:719:2262]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-05-29T15:22:19.757368Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 272039937, Sender [1:631:2213], Recipient [1:719:2262]: NKikimr::NNodeBroker::TEvNodeBroker::TEvResolveNode { NodeId: 1025 } 2025-05-29T15:22:19.757371Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:247: StateWork, processing event TEvNodeBroker::TEvResolveNode 2025-05-29T15:22:19.757380Z node 1 :NODE_BROKER TRACE: node_broker.cpp:1478: Send TEvResolvedNode: NKikimr::NNodeBroker::TEvNodeBroker::TEvResolvedNode { Status { Code: OK } Node { NodeId: 1025 Host: "host2" Port: 1001 ResolveHost: "host2.yandex.net" Address: "1.2.3.4" Location { DataCenter: "1" Module: "2" Rack: "3" Unit: "4" } Expire: 10800025000 Name: "slot-1" } } 2025-05-29T15:22:19.757416Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 269877761, Sender [1:796:2332], Recipient [1:719:2262]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-05-29T15:22:19.757427Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 272039937, Sender [1:631:2213], Recipient [1:719:2262]: NKikimr::NNodeBroker::TEvNodeBroker::TEvResolveNode { NodeId: 1026 } 2025-05-29T15:22:19.757430Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:247: StateWork, processing event TEvNodeBroker::TEvResolveNode 2025-05-29T15:22:19.757434Z node 1 :NODE_BROKER TRACE: node_broker.cpp:1478: Send TEvResolvedNode: NKikimr::NNodeBroker::TEvNodeBroker::TEvResolvedNode { Status { Code: WRONG_REQUEST Reason: "Unknown node" } } ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/mind/ut/unittest >> TNodeBrokerTest::ExtendLeaseSetLocationInOneRegistration [GOOD] Test command err: 2025-05-29T15:22:18.830600Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 2 Deadline: 18446744073709.551615s } 2025-05-29T15:22:18.830641Z node 3 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 3 Deadline: 18446744073709.551615s } 2025-05-29T15:22:18.830664Z node 4 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 4 Deadline: 18446744073709.551615s } 2025-05-29T15:22:18.830688Z node 5 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 5 Deadline: 18446744073709.551615s } 2025-05-29T15:22:18.830704Z node 6 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 6 Deadline: 18446744073709.551615s } 2025-05-29T15:22:18.830718Z node 7 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 7 Deadline: 18446744073709.551615s } 2025-05-29T15:22:18.836101Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:18.836193Z node 3 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:18.836230Z node 4 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:18.836260Z node 5 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:18.836294Z node 6 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:18.836322Z node 7 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:18.836377Z node 8 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 8 Deadline: 18446744073709.551615s } 2025-05-29T15:22:18.836396Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-05-29T15:22:18.836603Z node 3 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:18.836622Z node 4 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:18.836636Z node 5 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:18.836647Z node 6 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:18.836661Z node 7 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:18.836679Z node 8 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:18.836708Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:18.840195Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:18.840234Z node 8 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:18.840255Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:18.841009Z node 5 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:18.841037Z node 6 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:18.841061Z node 7 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:18.841127Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:18.841147Z node 3 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:18.841166Z node 4 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:18.841184Z node 8 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:18.841198Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:18.841352Z node 3 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-05-29T15:22:18.841372Z node 4 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-05-29T15:22:18.841387Z node 5 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-05-29T15:22:18.841408Z node 6 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-05-29T15:22:18.841425Z node 7 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-05-29T15:22:18.841480Z node 8 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-05-29T15:22:18.841543Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-05-29T15:22:18.841589Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 2 Deadline: 18446744073709.551615s } 2025-05-29T15:22:18.842046Z node 3 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 4 Deadline: 18446744073709.551615s } 2025-05-29T15:22:18.842056Z node 4 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 3 Deadline: 18446744073709.551615s } 2025-05-29T15:22:18.842062Z node 5 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 3 Deadline: 18446744073709.551615s } 2025-05-29T15:22:18.842071Z node 6 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 5 Deadline: 18446744073709.551615s } 2025-05-29T15:22:18.842078Z node 7 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 8 Deadline: 18446744073709.551615s } 2025-05-29T15:22:18.842189Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 6 Deadline: 18446744073709.551615s } 2025-05-29T15:22:18.844991Z node 8 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 7 Deadline: 18446744073709.551615s } 2025-05-29T15:22:18.845062Z node 4 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 7 Deadline: 18446744073709.551615s } 2025-05-29T15:22:18.845069Z node 5 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 6 Deadline: 18446744073709.551615s } 2025-05-29T15:22:18.845083Z node 3 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 5 Deadline: 18446744073709.551615s } 2025-05-29T15:22:18.845640Z node 6 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 2 Deadline: 18446744073709.551615s } 2025-05-29T15:22:18.845716Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 4 Deadline: 18446744073709.551615s } 2025-05-29T15:22:18.846029Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 5 Deadline: 18446744073709.551615s } 2025-05-29T15:22:18.846053Z node 7 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 4 Deadline: 18446744073709.551615s } 2025-05-29T15:22:18.846153Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 6 Deadline: 18446744073709.551615s } 2025-05-29T15:22:18.846206Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 8 Deadline: 18446744073709.551615s } 2025-05-29T15:22:18.846240Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 7 Deadline: 18446744073709.551615s } 2025-05-29T15:22:18.846411Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 3 Deadline: 18446744073709.551615s } 2025-05-29T15:22:18.847838Z node 5 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 2 Deadline: 18446744073709.551615s } 2025-05-29T15:22:18.847900Z node 4 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 5 Deadline: 18446744073709.551615s } 2025-05-29T15:22:18.847946Z node 6 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 3 Deadline: 18446744073709.551615s } 2025-05-29T15:22:18.848050Z node 8 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 6 Deadline: 18446744073709.551615s } 2025-05-29T15:22:18.848089Z node 5 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 4 Deadline: 18446744073709.551615s } 2025-05-29T15:22:18.848103Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 5 Deadline: 18446744073709.551615s } 2025-05-29T15:22:18.848205Z node 3 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 6 Deadline: 18446744073709.551615s } 2025-05-29T15:22:18.848226Z node 3 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 7 Deadline: 18446744073709.551615s } 2025-05-29T15:22:18.848520Z node 6 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 8 Deadline: 18446744073709.551615s } 2025-05-29T15:22:18.848654Z node 7 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 3 Deadline: 18446744073709.551615s } 2025-05-29T15:22:18.848670Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 3 Deadline: 18446744073709.551615s } 2025-05-29T15:22:18.849325Z node 3 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 2 Deadline: 18446744073709.551615s } 2025-05-29T15:22:18.850490Z node 8 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 3 Deadline: 18446744073709.551615s } 2025-05-29T15:22:18.850619Z node 3 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 8 Deadline: 18446744073709.551615s } 2025-05-29T15:22:18.851911Z node 8 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 5 Deadline: 18446744073709.551615s } 2025-05-29T15:22:18.852129Z node 5 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 8 Deadline: 18446744073709.551615s } 2025-05-29T15:22:18.871655Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7309: Cannot subscribe to console configs 2025-05-29T15:22:18.871672Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Ta ... :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2464: ResolveCacheItem: self# [1:23:2070], notify# NKikimr::TSchemeBoardEvents::TEvNotifyUpdate { Path: /dc-1 PathId: [OwnerId: 72057594046678944, LocalPathId: 1] DescribeSchemeResult: Status: StatusSuccess Path: "/dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/dc-1" } } } PathId: 1 PathOwnerId: 72057594046678944 }, by path# { Subscriber: { Subscriber: [1:682:2240] DomainOwnerId: 72057594046678944 Type: 2 SyncCookie: 0 } Filled: 0 Status: undefined Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, by pathId# nullptr 2025-05-29T15:22:19.786597Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:1842: FillEntry for TNavigate: self# [1:23:2070], cacheItem# { Subscriber: { Subscriber: [1:682:2240] DomainOwnerId: 72057594046678944 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusSuccess Kind: 2 TableKind: 0 Created: 1 CreateStep: 0 PathId: [OwnerId: 72057594046678944, LocalPathId: 1] DomainId: [OwnerId: 72057594046678944, LocalPathId: 1] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-05-29T15:22:19.786644Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:265: Send result: self# [1:689:2241], recipient# [1:681:2183], result# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [72057594046678944:1:0] RequestType: ByPath Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046678944, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046678944, LocalPathId: 1] Params { Version: 0 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } }] } 2025-05-29T15:22:19.786658Z node 1 :NODE_BROKER TRACE: node_broker.cpp:1532: Handle TEvTxProxySchemeCache::TEvNavigateKeySetResult: response# { Path: dc-1 TableId: [72057594046678944:1:0] RequestType: ByPath Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046678944, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046678944, LocalPathId: 1] Params { Version: 0 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } } 2025-05-29T15:22:19.786673Z node 1 :NODE_BROKER TRACE: node_broker.cpp:1558: Finished resolving tenant: request# Host: "host1" Port: 1001 ResolveHost: "host1.yandex.net" Address: "1.2.3.4" Location { DataCenter: "1" Module: "2" Rack: "3" Unit: "4" } FixedNodeId: false Path: "dc-1": scope id# <72057594046678944:1>: serviced subdomain# 72057594046678944:1 2025-05-29T15:22:19.786688Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 2146435073, Sender [1:681:2183], Recipient [1:569:2183]: NKikimr::NNodeBroker::TNodeBroker::TEvPrivate::TEvResolvedRegistrationRequest 2025-05-29T15:22:19.786693Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:257: StateWork, processing event TEvPrivate::TEvResolvedRegistrationRequest 2025-05-29T15:22:19.786715Z node 1 :NODE_BROKER DEBUG: node_broker__register_node.cpp:77: TTxRegisterNode Execute 2025-05-29T15:22:19.786720Z node 1 :NODE_BROKER DEBUG: node_broker__register_node.cpp:81: Registration request from host1:1001 (not fixed) tenant: dc-1 2025-05-29T15:22:19.786756Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:241: [Dirty] Updated location of #1024.v4 host1:1001 to DC=1/M=2/R=3/U=4/ 2025-05-29T15:22:19.786773Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:856: [DB] Adding node #1024.v4 host1:1001 to database state=Active resolvehost=host1.yandex.net address=1.2.3.4 dc=1 location=DC=1/M=2/R=3/U=4/ lease=1 expire=Thu, 01 Jan 1970 02:00:00 UTC servicedsubdomain=72057594046678944:1 slotindex=0 authorizedbycertificate=false 2025-05-29T15:22:19.786849Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:314: [Dirty] Extended lease of #1024.v4 host1:1001 up to Thu, 01 Jan 1970 03:00:00 UTC (lease 2) 2025-05-29T15:22:19.786856Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:856: [DB] Adding node #1024.v4 host1:1001 to database state=Active resolvehost=host1.yandex.net address=1.2.3.4 dc=1 location=DC=1/M=2/R=3/U=4/ lease=2 expire=Thu, 01 Jan 1970 03:00:00 UTC servicedsubdomain=72057594046678944:1 slotindex=0 authorizedbycertificate=false 2025-05-29T15:22:19.786874Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:552: [Dirty] Update current epoch version from 3 to 4 2025-05-29T15:22:19.786880Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:1356: [DB] Update epoch version in database version=4 2025-05-29T15:22:19.797637Z node 1 :NODE_BROKER DEBUG: node_broker__register_node.cpp:186: TTxRegisterNode Complete 2025-05-29T15:22:19.797668Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:241: [Committed] Updated location of #1024.v4 host1:1001 to DC=1/M=2/R=3/U=4/ 2025-05-29T15:22:19.797678Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:314: [Committed] Extended lease of #1024.v4 host1:1001 up to Thu, 01 Jan 1970 03:00:00 UTC (lease 2) 2025-05-29T15:22:19.797681Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:552: [Committed] Update current epoch version from 3 to 4 2025-05-29T15:22:19.797685Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:630: Add node #1024.v4 host1:1001 to epoch cache 2025-05-29T15:22:19.797699Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:659: Add node #1024.v4 to update nodes log 2025-05-29T15:22:19.797729Z node 1 :NODE_BROKER TRACE: node_broker__register_node.cpp:58: TTxRegisterNode reply with: Status { Code: OK } Node { NodeId: 1024 Host: "host1" Port: 1001 ResolveHost: "host1.yandex.net" Address: "1.2.3.4" Location { DataCenter: "1" Module: "2" Rack: "3" Unit: "4" } Expire: 10800025000 Name: "slot-0" } 2025-05-29T15:22:19.797828Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 269877761, Sender [1:693:2245], Recipient [1:569:2183]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-05-29T15:22:19.797866Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 272039936, Sender [1:637:2213], Recipient [1:569:2183]: NKikimr::NNodeBroker::TEvNodeBroker::TEvListNodes { } 2025-05-29T15:22:19.797870Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:246: StateWork, processing event TEvNodeBroker::TEvListNodes 2025-05-29T15:22:19.797879Z node 1 :NODE_BROKER TRACE: node_broker.cpp:423: Send TEvNodesInfo for epoch #2.4 1970-01-01T01:00:00.025000Z - 1970-01-01T02:00:00.025000Z - 1970-01-01T03:00:00.025000Z 2025-05-29T15:22:19.797927Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 269877761, Sender [1:695:2247], Recipient [1:569:2183]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-05-29T15:22:19.797940Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 272039937, Sender [1:637:2213], Recipient [1:569:2183]: NKikimr::NNodeBroker::TEvNodeBroker::TEvResolveNode { NodeId: 1024 } 2025-05-29T15:22:19.797944Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:247: StateWork, processing event TEvNodeBroker::TEvResolveNode 2025-05-29T15:22:19.797957Z node 1 :NODE_BROKER TRACE: node_broker.cpp:1478: Send TEvResolvedNode: NKikimr::NNodeBroker::TEvNodeBroker::TEvResolvedNode { Status { Code: OK } Node { NodeId: 1024 Host: "host1" Port: 1001 ResolveHost: "host1.yandex.net" Address: "1.2.3.4" Location { DataCenter: "1" Module: "2" Rack: "3" Unit: "4" } Expire: 10800025000 Name: "slot-0" } } 2025-05-29T15:22:19.797997Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 269877761, Sender [1:697:2249], Recipient [1:569:2183]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-05-29T15:22:19.798009Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 272039936, Sender [1:637:2213], Recipient [1:569:2183]: NKikimr::NNodeBroker::TEvNodeBroker::TEvListNodes { } 2025-05-29T15:22:19.798012Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:246: StateWork, processing event TEvNodeBroker::TEvListNodes 2025-05-29T15:22:19.798017Z node 1 :NODE_BROKER TRACE: node_broker.cpp:423: Send TEvNodesInfo for epoch #2.4 1970-01-01T01:00:00.025000Z - 1970-01-01T02:00:00.025000Z - 1970-01-01T03:00:00.025000Z 2025-05-29T15:22:19.798064Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 269877761, Sender [1:699:2251], Recipient [1:569:2183]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-05-29T15:22:19.798077Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 272039936, Sender [1:637:2213], Recipient [1:569:2183]: NKikimr::NNodeBroker::TEvNodeBroker::TEvListNodes { } 2025-05-29T15:22:19.798080Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:246: StateWork, processing event TEvNodeBroker::TEvListNodes 2025-05-29T15:22:19.798087Z node 1 :NODE_BROKER TRACE: node_broker.cpp:423: Send TEvNodesInfo for epoch #2.4 1970-01-01T01:00:00.025000Z - 1970-01-01T02:00:00.025000Z - 1970-01-01T03:00:00.025000Z 2025-05-29T15:22:19.798120Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 269877761, Sender [1:701:2253], Recipient [1:569:2183]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-05-29T15:22:19.798133Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 272039936, Sender [1:637:2213], Recipient [1:569:2183]: NKikimr::NNodeBroker::TEvNodeBroker::TEvListNodes { CachedVersion: 3 } 2025-05-29T15:22:19.798136Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:246: StateWork, processing event TEvNodeBroker::TEvListNodes 2025-05-29T15:22:19.798139Z node 1 :NODE_BROKER TRACE: node_broker.cpp:423: Send TEvNodesInfo for epoch #2.4 1970-01-01T01:00:00.025000Z - 1970-01-01T02:00:00.025000Z - 1970-01-01T03:00:00.025000Z 2025-05-29T15:22:19.798189Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 269877761, Sender [1:703:2255], Recipient [1:569:2183]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-05-29T15:22:19.798202Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 272039950, Sender [1:637:2213], Recipient [1:569:2183]: NKikimr::NNodeBroker::TEvNodeBroker::TEvSubscribeNodesRequest { CachedVersion: 3 SeqNo: 2 } 2025-05-29T15:22:19.798205Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:254: StateWork, processing event TEvNodeBroker::TEvSubscribeNodesRequest 2025-05-29T15:22:19.798211Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:747: New subscriber [1:637:2213], seqNo: 2, version: 3, server pipe id: [1:703:2255] 2025-05-29T15:22:19.798215Z node 1 :NODE_BROKER TRACE: node_broker.cpp:730: Send TEvUpdateNodes v3 -> v4 to [1:637:2213] >> TPartitionTests::ConflictingCommitsInSeveralBatches >> TDynamicNameserverTest::BasicFunctionality-EnableNodeBrokerDeltaProtocol-true [GOOD] >> TDynamicNameserverTest::CacheMissDifferentDeadline-EnableNodeBrokerDeltaProtocol-false >> TNodeBrokerTest::NodesMigration1000Nodes ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/persqueue/ut/unittest >> TPartitionChooserSuite::TPartitionChooserActor_SplitMergeEnabled_PreferedPartition_OtherPartition_Test [FAIL] Test command err: 2025-05-29T15:22:13.280646Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509888330780830858:2075];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:22:13.280742Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-05-29T15:22:13.283417Z node 2 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7509888330176806768:2074];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:22:13.283438Z node 2 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-05-29T15:22:13.307947Z node 1 :PQ_READ_PROXY DEBUG: caching_service.cpp:44: Direct read cache: : Created test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/000aae/r3tmp/tmpPuv2qi/pdisk_1.dat 2025-05-29T15:22:13.317247Z node 2 :PQ_READ_PROXY DEBUG: caching_service.cpp:44: Direct read cache: : Created 2025-05-29T15:22:13.340877Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 10955, node 1 2025-05-29T15:22:13.357340Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/ciyv/000aae/r3tmp/yandexZfKIXp.tmp 2025-05-29T15:22:13.357354Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: /home/runner/.ya/build/build_root/ciyv/000aae/r3tmp/yandexZfKIXp.tmp 2025-05-29T15:22:13.357415Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:202: successfully initialized from file: /home/runner/.ya/build/build_root/ciyv/000aae/r3tmp/yandexZfKIXp.tmp 2025-05-29T15:22:13.357477Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-05-29T15:22:13.364072Z INFO: TTestServer started on Port 12729 GrpcPort 10955 TClient is connected to server localhost:12729 2025-05-29T15:22:13.381211Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:22:13.381241Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting PQClient connected to localhost:10955 WaitRootIsUp 'Root'... TClient::Ls request: Root 2025-05-29T15:22:13.385086Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-29T15:22:13.412336Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:22:13.412359Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:22:13.414519Z node 1 :HIVE WARN: hive_impl.cpp:771: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-05-29T15:22:13.415249Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976720657:0, at schemeshard: 72057594046644480 2025-05-29T15:22:13.415423Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected waiting... waiting... 2025-05-29T15:22:13.433959Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720659:0, at schemeshard: 72057594046644480 waiting... waiting... waiting... 2025-05-29T15:22:13.633660Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7509888330176807137:2313], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:22:13.633681Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7509888330176807112:2310], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:22:13.633726Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:22:13.634817Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710657:3, at schemeshard: 72057594046644480 2025-05-29T15:22:13.638517Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7509888330176807141:2314], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710657 completed, doublechecking } 2025-05-29T15:22:13.695809Z node 2 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [2:7509888330176807169:2165] txid# 281474976710658, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 9], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-29T15:22:13.739151Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [1:7509888330780831927:2341], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-05-29T15:22:13.739240Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=1&id=YzQ3NjdjNWQtYmUzNmI4ZTUtNDdkMGJlNWYtODBjNWJjNGI=, ActorId: [1:7509888330780831886:2334], ActorState: ExecuteState, TraceId: 01jwea5zss4g7s8g52ncy87d93, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-05-29T15:22:13.739272Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720662:0, at schemeshard: 72057594046644480 2025-05-29T15:22:13.739729Z node 2 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [2:7509888330176807183:2318], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-05-29T15:22:13.739674Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: cluster_tracker.cpp:167: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-05-29T15:22:13.739856Z node 2 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=2&id=NTBhZGRkZjctZDRiYjZkNmQtYjE1YTA4YmMtZDE0Yzc0OQ==, ActorId: [2:7509888330176807110:2309], ActorState: ExecuteState, TraceId: 01jwea5zr15wen3mg66q0qrs2b, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-05-29T15:22:13.739984Z node 2 :PERSQUEUE_CLUSTER_TRACKER ERROR: cluster_tracker.cpp:167: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-05-29T15:22:13.760166Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720663:0, at schemeshard: 72057594046644480 2025-05-29T15:22:13.788620Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720664:0, at schemeshard: 72057594046644480 === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost", true, true, 1000), ("dc2", "dc2.logbroker.yandex.net", false, true, 1000); 2025-05-29T15:22:13.823437Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [1:7509888330780832319:2376], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:22:13.823544Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=1&id=ZTU4MjM3YjEtOTNjYmIzODAtYmU2ODdhM2ItZmQzMjExYmU=, ActorId: [1:7509888330780832316:2374], ActorState: ExecuteState, TraceId: 01jwea5zxj4bdkykvzr5h21f3w, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: assertion failed at ydb/core/testlib/test_pq_client.h:537, TMaybe NKikimr::NPersQueueTests::TFlatMsgBusPQClient::RunYqlDataQueryWithParams(TString, const NYdb::TParams &): (result.IsSuccess())
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 TBackTrace::Capture()+28 (0x13C16BFC) NUnitTest::NPrivate::RaiseErr ... PathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-29T15:22:18.031622Z node 9 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:22:18.031655Z node 9 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:22:18.033260Z node 9 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-29T15:22:18.035377Z node 9 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976720657:0, at schemeshard: 72057594046644480 2025-05-29T15:22:18.035867Z node 9 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:22:18.035888Z node 9 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Disconnected -> Connecting waiting... 2025-05-29T15:22:18.036775Z node 9 :HIVE WARN: hive_impl.cpp:771: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 10 Cookie 10 2025-05-29T15:22:18.036983Z node 9 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Connecting -> Connected waiting... 2025-05-29T15:22:18.047414Z node 9 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720659:0, at schemeshard: 72057594046644480 waiting... waiting... waiting... 2025-05-29T15:22:18.262227Z node 9 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [9:7509888350982397290:2336], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:22:18.262250Z node 9 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [9:7509888350982397301:2339], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:22:18.262258Z node 9 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:22:18.262978Z node 9 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976720662:3, at schemeshard: 72057594046644480 2025-05-29T15:22:18.263986Z node 9 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [9:7509888350982397333:2342], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:22:18.264016Z node 9 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:22:18.266998Z node 9 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [9:7509888350982397304:2340], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976720662 completed, doublechecking } 2025-05-29T15:22:18.268252Z node 9 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720663:0, at schemeshard: 72057594046644480 2025-05-29T15:22:18.275975Z node 10 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [10:7509888349143848116:2315], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-05-29T15:22:18.276057Z node 10 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=10&id=ODc0NDY1MzItMTliMzZjMy0zYjdkODUzNy02Yzk5ZGFkNQ==, ActorId: [10:7509888349143848091:2309], ActorState: ExecuteState, TraceId: 01jwea649020xxm9db47jgta9z, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-05-29T15:22:18.276187Z node 10 :PERSQUEUE_CLUSTER_TRACKER ERROR: cluster_tracker.cpp:167: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-05-29T15:22:18.325878Z node 9 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720664:0, at schemeshard: 72057594046644480 2025-05-29T15:22:18.341226Z node 9 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720665:0, at schemeshard: 72057594046644480 === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost", true, true, 1000), ("dc2", "dc2.logbroker.yandex.net", false, true, 1000); 2025-05-29T15:22:18.356211Z node 9 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [9:7509888350982397784:3030] txid# 281474976720666, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 9], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-29T15:22:18.363107Z node 9 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [9:7509888350982397773:2377], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:22:18.363678Z node 9 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=9&id=ZmE5OGVhODEtYzFiNjg0YTktYmMxNGFkYWUtN2YzNzQyM2E=, ActorId: [9:7509888350982397770:2375], ActorState: ExecuteState, TraceId: 01jwea64bg5wgjn61tke4s25a3, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: 2025-05-29T15:22:18.385187Z node 9 :PERSQUEUE_CLUSTER_TRACKER ERROR: cluster_tracker.cpp:167: failed to list clusters: { Response { QueryIssues { message: "Execution" issue_code: 1060 severity: 2 issues { position { row: 3 column: 120 } message: "Cost Based Optimizer could not be applied to this query: couldn\'t load statistics" end_position { row: 3 column: 120 } issue_code: 8001 severity: 2 } } TxMeta { } YdbResults { columns { name: "C.name" type { optional_type { item { type_id: UTF8 } } } } columns { name: "C.balancer" type { optional_type { item { type_id: UTF8 } } } } columns { name: "C.local" type { optional_type { item { type_id: BOOL } } } } columns { name: "C.enabled" type { optional_type { item { type_id: BOOL } } } } columns { name: "C.weight" type { optional_type { item { type_id: UINT64 } } } } columns { name: "V.version" type { optional_type { item { type_id: INT64 } } } } } QueryDiagnostics: "" } YdbStatus: SUCCESS ConsumedRu: 18 } assertion failed at ydb/core/testlib/test_pq_client.h:537, TMaybe NKikimr::NPersQueueTests::TFlatMsgBusPQClient::RunYqlDataQueryWithParams(TString, const NYdb::TParams &): (result.IsSuccess())
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 TBackTrace::Capture()+28 (0x13C16BFC) NUnitTest::NPrivate::RaiseError(char const*, TBasicString> const&, bool)+137 (0x13DCA7A9) NKikimr::NPersQueueTests::TFlatMsgBusPQClient::RunYqlDataQueryWithParams(TBasicString>, NYdb::Dev::TParams const&)+932 (0x13965684) NKikimr::NPersQueueTests::TFlatMsgBusPQClient::RunYqlDataQuery(TBasicString>)+88 (0x1394D528) NKikimr::NPersQueueTests::TFlatMsgBusPQClient::InitDCs(THashMap>, NKikimr::NPersQueueTests::TPQTestClusterInfo, THash>>, TEqualTo>>, std::__y1::allocator>>>, TBasicString> const&)+1170 (0x139741A2) NKikimr::NPersQueueTests::TFlatMsgBusPQClient::FullInit(THashMap>, NKikimr::NPersQueueTests::TPQTestClusterInfo, THash>>, TEqualTo>>, std::__y1::allocator>>>, TBasicString> const&, TBasicString> const&)+327 (0x139720D7) NPersQueue::TTestServer::StartServer(bool, TMaybe>, NMaybe::TPolicyUndefinedExcept>)+888 (0x13970D08) NPersQueue::TTestServer::TTestServer(NKikimr::Tests::TServerSettings const&, bool, TVector> const&, NActors::NLog::EPriority, TMaybe, TDelete>, NMaybe::TPolicyUndefinedExcept>)+1563 (0x13969A6B) NPersQueue::TTestServer::TTestServer(bool)+134 (0x139502B6) NTestSuiteTPartitionChooserSuite::CreateServer()+24 (0x13950118) NTestSuiteTPartitionChooserSuite::TTestCaseTPartitionChooserActor_SplitMergeEnabled_PreferedPartition_OtherPartition_Test::Execute_(NUnitTest::TTestContext&)+32 (0x1395A080) NTestSuiteTPartitionChooserSuite::TCurrentTest::Execute()::'lambda'()::operator()() const+71 (0x13962767) NUnitTest::TTestBase::Run(std::__y1::function, TBasicString> const&, char const*, bool)+126 (0x13DCC65E) NTestSuiteTPartitionChooserSuite::TCurrentTest::Execute()+433 (0x13962121) NUnitTest::TTestFactory::Execute()+803 (0x13DCCDD3) NUnitTest::RunMain(int, char**)+3021 (0x13DDE97D) ??+0 (0x7F4F67757D90) __libc_start_main+128 (0x7F4F67757E40) _start+41 (0x12A48029) >> TPartitionChooserSuite::TPartitionChooserActor_SplitMergeDisabled_PreferedPartition_Test [FAIL] >> TNodeBrokerTest::SingleDomainModeBannedIds >> TNodeBrokerTest::UpdateEpochPipelining [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/persqueue/ut/unittest >> PQCountersSimple::SupportivePartitionCountersPersist [GOOD] Test command err: 2025-05-29T15:22:12.180666Z :HappyWay INFO: Random seed for debugging is 1748532132180658 2025-05-29T15:22:12.366894Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509888325643942284:2077];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:22:12.366945Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-05-29T15:22:12.379431Z node 2 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7509888327113930064:2158];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/000abd/r3tmp/tmp6jtsS8/pdisk_1.dat 2025-05-29T15:22:12.414017Z node 2 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-05-29T15:22:12.414060Z node 2 :PQ_READ_PROXY DEBUG: caching_service.cpp:44: Direct read cache: : Created 2025-05-29T15:22:12.414640Z node 1 :PQ_READ_PROXY DEBUG: caching_service.cpp:44: Direct read cache: : Created 2025-05-29T15:22:12.440945Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 13992, node 1 2025-05-29T15:22:12.458137Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/ciyv/000abd/r3tmp/yandexEGTFkk.tmp 2025-05-29T15:22:12.458151Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: /home/runner/.ya/build/build_root/ciyv/000abd/r3tmp/yandexEGTFkk.tmp 2025-05-29T15:22:12.458212Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:202: successfully initialized from file: /home/runner/.ya/build/build_root/ciyv/000abd/r3tmp/yandexEGTFkk.tmp 2025-05-29T15:22:12.458276Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-05-29T15:22:12.464203Z INFO: TTestServer started on Port 16879 GrpcPort 13992 2025-05-29T15:22:12.468859Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:22:12.468887Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:22:12.470510Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:16879 PQClient connected to localhost:13992 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-29T15:22:12.507161Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:22:12.507193Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:22:12.508574Z node 1 :HIVE WARN: hive_impl.cpp:771: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-05-29T15:22:12.508891Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976720657:0, at schemeshard: 72057594046644480 2025-05-29T15:22:12.509343Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected waiting... waiting... waiting... waiting... 2025-05-29T15:22:12.757723Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7509888327113930276:2309], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:22:12.757767Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7509888327113930265:2306], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:22:12.757791Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:22:12.761853Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715657:3, at schemeshard: 72057594046644480 2025-05-29T15:22:12.765777Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7509888327113930280:2310], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715657 completed, doublechecking } 2025-05-29T15:22:12.846117Z node 2 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [2:7509888327113930308:2125] txid# 281474976715658, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 8], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-29T15:22:12.889819Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [1:7509888325643943270:2340], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-05-29T15:22:12.889914Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=1&id=ODJlMmJmYjItNzM2OTc4NTUtOTU4ZmE1ODUtMTM3ZGE1N2U=, ActorId: [1:7509888325643943220:2332], ActorState: ExecuteState, TraceId: 01jwea5yz98apa2xtfp6c5pdne, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-05-29T15:22:12.890285Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: cluster_tracker.cpp:167: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-05-29T15:22:12.890446Z node 2 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [2:7509888327113930323:2314], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-05-29T15:22:12.890533Z node 2 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=2&id=YTk0YzRiMDEtY2MyNjY3YWYtYWM2M2Y5OC0zOTExMjdkZg==, ActorId: [2:7509888327113930249:2305], ActorState: ExecuteState, TraceId: 01jwea5ywm20kzwm4f4vtdtx0k, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-05-29T15:22:12.890397Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720661:0, at schemeshard: 72057594046644480 2025-05-29T15:22:12.890665Z node 2 :PERSQUEUE_CLUSTER_TRACKER ERROR: cluster_tracker.cpp:167: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-05-29T15:22:12.955452Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720662:0, at schemeshard: 72057594046644480 2025-05-29T15:22:13.022474Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720663:0, at schemeshard: 72057594046644480 === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost:13992", true, true, 1000); 2025-05-29T15:22:13.054307Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [1:7509888329938910931:2372], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:22:13.054423Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=1&id=NTIzZTE3ODMtNWVmZmYwZjgtZDY3YmFjNWYtOTBiNzE2Y2I=, ActorId: [1:7509888329938910928:2370], ActorState: ExecuteState, TraceId: 01jwea5z5ebkpt98c7ce113z87, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: assertion failed at ydb/core/testlib/test_pq_client.h:537, TMaybe NKikimr::NPersQueueTests::TFlatMsgBusPQClient::RunYqlDataQueryWithParams(TString, const NYdb::TParams &): (result.IsSuccess())
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 TBackTrace::Capture()+28 (0x13C16BFC) NUnitTest::NPrivate::RaiseError(char const*, TBasicString> const&, bool)+137 (0x13DCA7A9) NKikimr::NPersQueueTests::TFlatMsgBusPQClient::RunYqlDataQueryWithParams(TBasicString>, NYdb::Dev: ... ::NPrivate::RaiseError(char const*, TBasicString> const&, bool)+137 (0x13DCA7A9) NKikimr::NPersQueueTests::TFlatMsgBusPQClient::RunYqlDataQueryWithParams(TBasicString>, NYdb::Dev::TParams const&)+932 (0x13965684) NKikimr::NPersQueueTests::TFlatMsgBusPQClient::RunYqlDataQuery(TBasicString>)+88 (0x1394D528) NKikimr::NPersQueueTests::TFlatMsgBusPQClient::InitDCs(THashMap>, NKikimr::NPersQueueTests::TPQTestClusterInfo, THash>>, TEqualTo>>, std::__y1::allocator>>>, TBasicString> const&)+1170 (0x139741A2) NPersQueue::SDKTestSetup::Start(bool, bool)+1450 (0x13B0236A) NPersQueue::SDKTestSetup::SDKTestSetup(TBasicString> const&, bool, TVector> const&, NActors::NLog::EPriority, unsigned int, unsigned long)+675 (0x13B01743) void std::__y1::allocator::construct[abi:fe200000](NYdb::NPersQueue::NTests::TPersQueueYdbSdkTestSetup*, char const*&)+72 (0x13B01428) NKikimr::NPQ::NTestSuiteTFetchRequestTests::TTestCaseCheckAccess::Execute_(NUnitTest::TTestContext&)+79 (0x13AFB37F) NKikimr::NPQ::NTestSuiteTFetchRequestTests::TCurrentTest::Execute()::'lambda'()::operator()() const+71 (0x13AFF7A7) NUnitTest::TTestBase::Run(std::__y1::function, TBasicString> const&, char const*, bool)+126 (0x13DCC65E) NKikimr::NPQ::NTestSuiteTFetchRequestTests::TCurrentTest::Execute()+425 (0x13AFF169) NUnitTest::TTestFactory::Execute()+803 (0x13DCCDD3) NUnitTest::RunMain(int, char**)+3021 (0x13DDE97D) ??+0 (0x7F8269FCAD90) __libc_start_main+128 (0x7F8269FCAE40) _start+41 (0x12A48029) 2025-05-29T15:22:16.139848Z node 7 :PERSQUEUE NOTICE: pq_impl.cpp:1099: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-05-29T15:22:16.139888Z node 7 :PERSQUEUE INFO: pq_impl.cpp:800: [PQ: 72057594037927937] doesn't have tx writes info 2025-05-29T15:22:16.143959Z node 7 :PERSQUEUE NOTICE: pq_impl.cpp:1099: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-05-29T15:22:16.147097Z node 7 :PERSQUEUE INFO: pq_impl.cpp:1487: [PQ: 72057594037927937] Config applied version 1 actor [7:199:2213] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 WriteSpeedInBytesPerSecond: 30720 BurstSize: 30720 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "rt3.dc1--asdfgs--topic" Version: 1 LocalDC: true Topic: "topic" TopicPath: "/Root/PQ/rt3.dc1--asdfgs--topic" Partitions { PartitionId: 0 } ReadRuleGenerations: 1 MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 1 Important: false } 2025-05-29T15:22:16.147297Z node 7 :PERSQUEUE INFO: partition_init.cpp:880: [PQ: 72057594037927937, Partition: 0, State: StateInit] bootstrapping 0 [7:208:2220] 2025-05-29T15:22:16.148129Z node 7 :PERSQUEUE INFO: partition.cpp:560: [PQ: 72057594037927937, Partition: 0, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 0 generation 2 [7:208:2220] 2025-05-29T15:22:16.149496Z node 7 :PERSQUEUE INFO: ownerinfo.cpp:30: new Cookie default|b63d9b3d-a0beade3-d1d62152-3653c5e1_0 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default Captured kesus quota request event from [7:224:2233] 2025-05-29T15:22:16.150836Z node 7 :PERSQUEUE INFO: ownerinfo.cpp:30: new Cookie default|5d29bcd8-f6be6929-71daa23f-184b459e_1 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default Captured kesus quota request event from [7:224:2233] 2025-05-29T15:22:16.488728Z node 7 :PERSQUEUE INFO: ownerinfo.cpp:30: new Cookie default|7f1612ef-49834c36-ba11152f-bbd39203_2 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default Captured kesus quota request event from [7:224:2233] 2025-05-29T15:22:16.765210Z node 7 :PERSQUEUE INFO: ownerinfo.cpp:30: new Cookie default|122bd192-c046315-4c62a464-2204602e_3 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default Captured kesus quota request event from [7:224:2233] 2025-05-29T15:22:17.002179Z node 7 :PERSQUEUE INFO: ownerinfo.cpp:30: new Cookie default|87514c7a-437c5834-1711fa0c-648bbb94_4 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default Captured kesus quota request event from [7:224:2233] 2025-05-29T15:22:17.238133Z node 7 :PERSQUEUE INFO: ownerinfo.cpp:30: new Cookie default|77caad23-dcbc2f81-b6dec2a5-6fbc18a5_5 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default Captured kesus quota request event from [7:224:2233] **** Total histogram: ****
Interval=0ms: 1
Interval=10000ms: 0
Interval=1000ms: 3
Interval=100ms: 0
Interval=10ms: 0
Interval=1ms: 0
Interval=20ms: 0
Interval=2500ms: 2
Interval=5000ms: 0
Interval=500ms: 0
Interval=50ms: 0
Interval=5ms: 0
Interval=999999ms: 0
**** **** **** **** 2025-05-29T15:22:17.582308Z node 8 :PERSQUEUE NOTICE: pq_impl.cpp:1099: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-05-29T15:22:17.582330Z node 8 :PERSQUEUE INFO: pq_impl.cpp:800: [PQ: 72057594037927937] doesn't have tx writes info 2025-05-29T15:22:17.585150Z node 8 :PERSQUEUE NOTICE: pq_impl.cpp:1099: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-05-29T15:22:17.585314Z node 8 :PERSQUEUE INFO: pq_impl.cpp:1487: [PQ: 72057594037927937] Config applied version 2 actor [8:197:2211] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 PartitionIds: 1 TopicName: "topic" Version: 2 LocalDC: true Topic: "topic" TopicPath: "/topic" YcCloudId: "somecloud" YcFolderId: "somefolder" YdbDatabaseId: "PQ" YdbDatabasePath: "/Root/PQ" Partitions { PartitionId: 0 } Partitions { PartitionId: 1 } ReadRuleGenerations: 2 FederationAccount: "federationAccount" MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } AllPartitions { PartitionId: 1 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 2 Important: false } 2025-05-29T15:22:17.585419Z node 8 :PERSQUEUE INFO: partition_init.cpp:880: [PQ: 72057594037927937, Partition: 0, State: StateInit] bootstrapping 0 [8:205:2217] 2025-05-29T15:22:17.585563Z node 8 :PERSQUEUE INFO: partition.cpp:560: [PQ: 72057594037927937, Partition: 0, State: StateInit] init complete for topic 'topic' partition 0 generation 2 [8:205:2217] 2025-05-29T15:22:17.585698Z node 8 :PERSQUEUE INFO: partition_init.cpp:880: [PQ: 72057594037927937, Partition: 1, State: StateInit] bootstrapping 1 [8:206:2218] 2025-05-29T15:22:17.585781Z node 8 :PERSQUEUE INFO: partition.cpp:560: [PQ: 72057594037927937, Partition: 1, State: StateInit] init complete for topic 'topic' partition 1 generation 2 [8:206:2218] 2025-05-29T15:22:17.586855Z node 8 :PERSQUEUE INFO: ownerinfo.cpp:30: new Cookie default|4e6d0180-cce149f8-7c7afa6d-cf9ad07a_0 generated for partition 0 topic 'topic' owner default 2025-05-29T15:22:17.587830Z node 8 :PERSQUEUE INFO: ownerinfo.cpp:30: new Cookie default|ee4af3f-1b3257a2-fb51c337-5a96c39c_1 generated for partition 0 topic 'topic' owner default 2025-05-29T15:22:17.588465Z node 8 :PERSQUEUE INFO: ownerinfo.cpp:30: new Cookie default|4d5be01-ebe92bd8-86b824a8-78a70d4c_2 generated for partition 0 topic 'topic' owner default 2025-05-29T15:22:17.589047Z node 8 :PERSQUEUE INFO: ownerinfo.cpp:30: new Cookie default|9c1c56a5-de9b9f57-758febda-b73d9dc1_3 generated for partition 0 topic 'topic' owner default 2025-05-29T15:22:17.837442Z node 9 :PERSQUEUE NOTICE: pq_impl.cpp:1099: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-05-29T15:22:17.837464Z node 9 :PERSQUEUE INFO: pq_impl.cpp:800: [PQ: 72057594037927937] doesn't have tx writes info 2025-05-29T15:22:17.840252Z node 9 :PERSQUEUE NOTICE: pq_impl.cpp:1099: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-05-29T15:22:17.840393Z node 9 :PERSQUEUE INFO: pq_impl.cpp:1487: [PQ: 72057594037927937] Config applied version 3 actor [9:197:2211] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 WriteSpeedInBytesPerSecond: 30720 BurstSize: 30720 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "rt3.dc1--asdfgs--topic" Version: 3 LocalDC: true Topic: "topic" TopicPath: "/Root/PQ/rt3.dc1--asdfgs--topic" Partitions { PartitionId: 0 } ReadRuleGenerations: 3 MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 3 Important: false } 2025-05-29T15:22:17.840484Z node 9 :PERSQUEUE INFO: partition_init.cpp:880: [PQ: 72057594037927937, Partition: 0, State: StateInit] bootstrapping 0 [9:206:2218] 2025-05-29T15:22:17.840988Z node 9 :PERSQUEUE INFO: partition.cpp:560: [PQ: 72057594037927937, Partition: 0, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 0 generation 2 [9:206:2218] 2025-05-29T15:22:17.841827Z node 9 :PERSQUEUE INFO: ownerinfo.cpp:30: new Cookie default|dbef0ef3-f835de40-7b6107b5-8f80d6e3_0 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default Captured TEvRequest, cmd write size: 3 Captured TEvRequest, cmd write size: 3 Captured kesus quota request event from [9:222:2231] 2025-05-29T15:22:17.842844Z node 9 :PERSQUEUE INFO: ownerinfo.cpp:30: new Cookie default|751b3f78-5ee2bdeb-36157662-d98dd8c0_1 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default Captured TEvRequest, cmd write size: 3 Captured kesus quota request event from [9:222:2231] Captured TEvRequest, cmd write size: 3 2025-05-29T15:22:18.180255Z node 9 :PERSQUEUE INFO: ownerinfo.cpp:30: new Cookie default|8b668862-f5bc326a-e5ddbfc5-abe4f8e3_2 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default Captured TEvRequest, cmd write size: 3 Captured TEvRequest, cmd write size: 3 Captured kesus quota request event from [9:222:2231] 2025-05-29T15:22:18.456140Z node 9 :PERSQUEUE INFO: ownerinfo.cpp:30: new Cookie default|c67af35f-ca619bef-5cf27ff1-ee09930d_3 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default Captured TEvRequest, cmd write size: 3 Captured TEvRequest, cmd write size: 3 Captured kesus quota request event from [9:222:2231] Captured TEvRequest, cmd write size: 1 Captured TEvRequest, cmd write size: 1 2025-05-29T15:22:18.702667Z node 9 :PERSQUEUE INFO: ownerinfo.cpp:30: new Cookie default|1faaa967-7a41e5f3-c62876b8-1c63202c_4 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default Captured TEvRequest, cmd write size: 3 Captured TEvRequest, cmd write size: 3 Captured kesus quota request event from [9:222:2231] 2025-05-29T15:22:18.937735Z node 9 :PERSQUEUE INFO: ownerinfo.cpp:30: new Cookie default|6de2f412-4bf4d0-b7f1de6d-1f123cf7_5 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default Captured TEvRequest, cmd write size: 3 Captured TEvRequest, cmd write size: 3 Captured kesus quota request event from [9:222:2231] >> TDynamicNameserverTest::CacheMissNoDeadline-EnableNodeBrokerDeltaProtocol-false >> TTenantPoolTests::TestForcedSensorLabelsForStaticConfig >> TDynamicNameserverTest::BasicFunctionality-EnableNodeBrokerDeltaProtocol-false >> KqpOlapOptimizer::SpecialSliceToOneLayer [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/mind/ut/unittest >> TNodeBrokerTest::UpdateEpochPipelining [GOOD] Test command err: 2025-05-29T15:22:18.856120Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 2 Deadline: 18446744073709.551615s } 2025-05-29T15:22:18.856172Z node 3 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 3 Deadline: 18446744073709.551615s } 2025-05-29T15:22:18.856204Z node 4 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 4 Deadline: 18446744073709.551615s } 2025-05-29T15:22:18.856243Z node 5 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 5 Deadline: 18446744073709.551615s } 2025-05-29T15:22:18.856276Z node 6 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 6 Deadline: 18446744073709.551615s } 2025-05-29T15:22:18.856302Z node 7 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 7 Deadline: 18446744073709.551615s } 2025-05-29T15:22:18.863932Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:18.864044Z node 3 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:18.864101Z node 4 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:18.864146Z node 5 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:18.864206Z node 6 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:18.864248Z node 7 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:18.864317Z node 8 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 8 Deadline: 18446744073709.551615s } 2025-05-29T15:22:18.864344Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-05-29T15:22:18.864602Z node 3 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:18.864632Z node 4 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:18.864654Z node 5 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:18.864674Z node 6 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:18.864699Z node 7 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:18.864719Z node 8 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:18.864762Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:18.868720Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:18.868779Z node 8 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:18.868810Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:18.869868Z node 5 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:18.869906Z node 6 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:18.869938Z node 7 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:18.870027Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:18.870049Z node 3 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:18.870072Z node 4 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:18.870103Z node 6 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-05-29T15:22:18.870140Z node 7 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-05-29T15:22:18.870163Z node 8 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:18.870186Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:18.870345Z node 3 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-05-29T15:22:18.870369Z node 4 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-05-29T15:22:18.870392Z node 5 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-05-29T15:22:18.870488Z node 8 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-05-29T15:22:18.870575Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-05-29T15:22:18.870629Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 7 Deadline: 18446744073709.551615s } 2025-05-29T15:22:18.871288Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 4 Deadline: 18446744073709.551615s } 2025-05-29T15:22:18.871313Z node 3 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 2 Deadline: 18446744073709.551615s } 2025-05-29T15:22:18.871324Z node 5 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 6 Deadline: 18446744073709.551615s } 2025-05-29T15:22:18.871332Z node 6 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 5 Deadline: 18446744073709.551615s } 2025-05-29T15:22:18.871345Z node 8 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 5 Deadline: 18446744073709.551615s } 2025-05-29T15:22:18.875324Z node 4 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 2 Deadline: 18446744073709.551615s } 2025-05-29T15:22:18.875745Z node 6 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 8 Deadline: 18446744073709.551615s } 2025-05-29T15:22:18.875790Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 3 Deadline: 18446744073709.551615s } 2025-05-29T15:22:18.875865Z node 5 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 8 Deadline: 18446744073709.551615s } 2025-05-29T15:22:18.876881Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 5 Deadline: 18446744073709.551615s } 2025-05-29T15:22:18.877006Z node 7 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 6 Deadline: 18446744073709.551615s } 2025-05-29T15:22:18.877266Z node 8 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 6 Deadline: 18446744073709.551615s } 2025-05-29T15:22:18.877294Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 6 Deadline: 18446744073709.551615s } 2025-05-29T15:22:18.877504Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 2 Deadline: 18446744073709.551615s } 2025-05-29T15:22:18.877605Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 4 Deadline: 18446744073709.551615s } 2025-05-29T15:22:18.877656Z node 6 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 7 Deadline: 18446744073709.551615s } 2025-05-29T15:22:18.877711Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 3 Deadline: 18446744073709.551615s } 2025-05-29T15:22:18.877881Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 8 Deadline: 18446744073709.551615s } 2025-05-29T15:22:18.878027Z node 6 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 4 Deadline: 18446744073709.551615s } 2025-05-29T15:22:18.878101Z node 5 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 2 Deadline: 18446744073709.551615s } 2025-05-29T15:22:18.878222Z node 8 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 3 Deadline: 18446744073709.551615s } 2025-05-29T15:22:18.878665Z node 3 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 8 Deadline: 18446744073709.551615s } 2025-05-29T15:22:18.878680Z node 4 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 6 Deadline: 18446744073709.551615s } 2025-05-29T15:22:18.878759Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 5 Deadline: 18446744073709.551615s } 2025-05-29T15:22:18.880325Z node 6 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 2 Deadline: 18446744073709.551615s } 2025-05-29T15:22:18.880518Z node 7 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 2 Deadline: 18446744073709.551615s } 2025-05-29T15:22:18.880596Z node 8 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 4 Deadline: 18446744073709.551615s } 2025-05-29T15:22:18.880688Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 6 Deadline: 18446744073709.551615s } 2025-05-29T15:22:18.880945Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 7 Deadline: 18446744073709.551615s } 2025-05-29T15:22:18.881467Z node 4 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 8 Deadline: 18446744073709.551615s } 2025-05-29T15:22:18.883336Z node 8 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 2 Deadline: 18446744073709.551615s } 2025-05-29T15:22:18.883971Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 8 Deadline: 18446744073709.551615s } 2025-05-29T15:22:18.909180Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7309: Cannot subscribe to console configs 2025-05-29T15:22:18.909204Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded ... waiting for nameservers are connected 2025-05-29T15:22:18.913981Z node 1 :NODE_BROKER DEBUG: node_broker_impl.h:236: StateInit event type: 10060000 event: NKikimr::TEvTablet::TEvBoot 2025-05-29T15:22:18.914424Z node 1 :NODE_BROKER DEBUG: node_broker_impl.h:236: StateInit event type: 10060001 event: NKikimr::TEvTab ... ide: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/dc-1" } } } PathId: 1 PathOwnerId: 72057594046678944 }, by path# { Subscriber: { Subscriber: [1:737:2267] DomainOwnerId: 72057594046678944 Type: 2 SyncCookie: 0 } Filled: 0 Status: undefined Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, by pathId# nullptr 2025-05-29T15:22:20.396628Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:1842: FillEntry for TNavigate: self# [1:23:2070], cacheItem# { Subscriber: { Subscriber: [1:737:2267] DomainOwnerId: 72057594046678944 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusSuccess Kind: 2 TableKind: 0 Created: 1 CreateStep: 0 PathId: [OwnerId: 72057594046678944, LocalPathId: 1] DomainId: [OwnerId: 72057594046678944, LocalPathId: 1] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-05-29T15:22:20.396668Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:265: Send result: self# [1:744:2268], recipient# [1:736:2183], result# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [72057594046678944:1:0] RequestType: ByPath Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046678944, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046678944, LocalPathId: 1] Params { Version: 0 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } }] } 2025-05-29T15:22:20.396679Z node 1 :NODE_BROKER TRACE: node_broker.cpp:1532: Handle TEvTxProxySchemeCache::TEvNavigateKeySetResult: response# { Path: dc-1 TableId: [72057594046678944:1:0] RequestType: ByPath Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046678944, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046678944, LocalPathId: 1] Params { Version: 0 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } } 2025-05-29T15:22:20.396688Z node 1 :NODE_BROKER TRACE: node_broker.cpp:1558: Finished resolving tenant: request# Host: "host2" Port: 1001 ResolveHost: "host2.yandex.net" Address: "1.2.3.4" Location { DataCenter: "1" Module: "2" Rack: "3" Unit: "4" } FixedNodeId: false Path: "dc-1": scope id# <72057594046678944:1>: serviced subdomain# 72057594046678944:1 2025-05-29T15:22:20.396700Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 2146435073, Sender [1:736:2183], Recipient [1:567:2183]: NKikimr::NNodeBroker::TNodeBroker::TEvPrivate::TEvResolvedRegistrationRequest 2025-05-29T15:22:20.396704Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:257: StateWork, processing event TEvPrivate::TEvResolvedRegistrationRequest 2025-05-29T15:22:20.396721Z node 1 :NODE_BROKER DEBUG: node_broker__register_node.cpp:77: TTxRegisterNode Execute 2025-05-29T15:22:20.396724Z node 1 :NODE_BROKER DEBUG: node_broker__register_node.cpp:81: Registration request from host2:1001 (not fixed) tenant: dc-1 2025-05-29T15:22:20.396747Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:856: [DB] Adding node #1024.v6 host2:1001 to database state=Active resolvehost=host2.yandex.net address=1.2.3.4 dc=1 location=DC=1/M=2/R=3/U=4/ lease=1 expire=Thu, 01 Jan 1970 05:00:00 UTC servicedsubdomain=72057594046678944:1 slotindex=0 authorizedbycertificate=false 2025-05-29T15:22:20.396813Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:264: [Dirty] Register new active node #1024.v6 host2:1001 2025-05-29T15:22:20.396823Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:552: [Dirty] Update current epoch version from 5 to 6 2025-05-29T15:22:20.396828Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:1356: [DB] Update epoch version in database version=6 2025-05-29T15:22:20.396915Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 269877761, Sender [1:746:2270], Recipient [1:567:2183]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-05-29T15:22:20.396932Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 272039936, Sender [1:635:2213], Recipient [1:567:2183]: NKikimr::NNodeBroker::TEvNodeBroker::TEvListNodes { } 2025-05-29T15:22:20.396935Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:246: StateWork, processing event TEvNodeBroker::TEvListNodes 2025-05-29T15:22:20.396943Z node 1 :NODE_BROKER TRACE: node_broker.cpp:423: Send TEvNodesInfo for epoch #3.4 1970-01-01T02:00:00.025000Z - 1970-01-01T03:00:00.025000Z - 1970-01-01T04:00:00.025000Z 2025-05-29T15:22:20.396992Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 269877761, Sender [1:748:2272], Recipient [1:567:2183]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-05-29T15:22:20.397005Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 272039937, Sender [1:635:2213], Recipient [1:567:2183]: NKikimr::NNodeBroker::TEvNodeBroker::TEvResolveNode { NodeId: 1024 } 2025-05-29T15:22:20.397008Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:247: StateWork, processing event TEvNodeBroker::TEvResolveNode 2025-05-29T15:22:20.397016Z node 1 :NODE_BROKER TRACE: node_broker.cpp:1478: Send TEvResolvedNode: NKikimr::NNodeBroker::TEvNodeBroker::TEvResolvedNode { Status { Code: WRONG_REQUEST Reason: "Unknown node" } } ... unblocking NKikimr::TEvTablet::TEvCommit from FLAT_EXECUTOR to TABLET_ACTOR 2025-05-29T15:22:20.397626Z node 1 :NODE_BROKER DEBUG: node_broker__update_epoch.cpp:31: TTxUpdateEpoch Complete 2025-05-29T15:22:20.397636Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:530: [Committed] Remove node #1024.v4 host1:1001 2025-05-29T15:22:20.397646Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:542: [Committed] Move to new epoch #4.5 1970-01-01T03:00:00.025000Z - 1970-01-01T04:00:00.025000Z - 1970-01-01T05:00:00.025000Z, approximate epoch start #4.5 2025-05-29T15:22:20.397656Z node 1 :NODE_BROKER TRACE: node_broker.cpp:456: Scheduled epoch update at 1970-01-01T04:00:00.025000Z 2025-05-29T15:22:20.397661Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:562: Preparing nodes list cache for epoch #4.5 1970-01-01T03:00:00.025000Z - 1970-01-01T04:00:00.025000Z - 1970-01-01T05:00:00.025000Z, approximate epoch start #4.5 nodes=0 expired=0 2025-05-29T15:22:20.397670Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:603: Preparing update nodes log for epoch ##4.5 1970-01-01T03:00:00.025000Z - 1970-01-01T04:00:00.025000Z - 1970-01-01T05:00:00.025000Z nodes=0 expired=0 removed=1 2025-05-29T15:22:20.397673Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:659: Add node #1024.v5 to update nodes log 2025-05-29T15:22:20.397682Z node 1 :NODE_BROKER TRACE: node_broker.cpp:423: Send TEvNodesInfo for epoch #4.5 1970-01-01T03:00:00.025000Z - 1970-01-01T04:00:00.025000Z - 1970-01-01T05:00:00.025000Z 2025-05-29T15:22:20.397687Z node 1 :NODE_BROKER TRACE: node_broker.cpp:423: Send TEvNodesInfo for epoch #4.5 1970-01-01T03:00:00.025000Z - 1970-01-01T04:00:00.025000Z - 1970-01-01T05:00:00.025000Z 2025-05-29T15:22:20.397691Z node 1 :NODE_BROKER TRACE: node_broker.cpp:423: Send TEvNodesInfo for epoch #4.5 1970-01-01T03:00:00.025000Z - 1970-01-01T04:00:00.025000Z - 1970-01-01T05:00:00.025000Z 2025-05-29T15:22:20.397696Z node 1 :NODE_BROKER TRACE: node_broker.cpp:423: Send TEvNodesInfo for epoch #4.5 1970-01-01T03:00:00.025000Z - 1970-01-01T04:00:00.025000Z - 1970-01-01T05:00:00.025000Z 2025-05-29T15:22:20.397700Z node 1 :NODE_BROKER TRACE: node_broker.cpp:423: Send TEvNodesInfo for epoch #4.5 1970-01-01T03:00:00.025000Z - 1970-01-01T04:00:00.025000Z - 1970-01-01T05:00:00.025000Z 2025-05-29T15:22:20.397705Z node 1 :NODE_BROKER TRACE: node_broker.cpp:423: Send TEvNodesInfo for epoch #4.5 1970-01-01T03:00:00.025000Z - 1970-01-01T04:00:00.025000Z - 1970-01-01T05:00:00.025000Z 2025-05-29T15:22:20.397709Z node 1 :NODE_BROKER TRACE: node_broker.cpp:423: Send TEvNodesInfo for epoch #4.5 1970-01-01T03:00:00.025000Z - 1970-01-01T04:00:00.025000Z - 1970-01-01T05:00:00.025000Z 2025-05-29T15:22:20.397725Z node 1 :NODE_BROKER TRACE: node_broker.cpp:423: Send TEvNodesInfo for epoch #4.5 1970-01-01T03:00:00.025000Z - 1970-01-01T04:00:00.025000Z - 1970-01-01T05:00:00.025000Z 2025-05-29T15:22:20.408698Z node 1 :NODE_BROKER DEBUG: node_broker__register_node.cpp:186: TTxRegisterNode Complete 2025-05-29T15:22:20.408716Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:264: [Committed] Register new active node #1024.v6 host2:1001 2025-05-29T15:22:20.408724Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:552: [Committed] Update current epoch version from 5 to 6 2025-05-29T15:22:20.408729Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:630: Add node #1024.v6 host2:1001 to epoch cache 2025-05-29T15:22:20.408746Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:659: Add node #1024.v6 to update nodes log 2025-05-29T15:22:20.408785Z node 1 :NODE_BROKER TRACE: node_broker__register_node.cpp:58: TTxRegisterNode reply with: Status { Code: OK } Node { NodeId: 1024 Host: "host2" Port: 1001 ResolveHost: "host2.yandex.net" Address: "1.2.3.4" Location { DataCenter: "1" Module: "2" Rack: "3" Unit: "4" } Expire: 18000025000 Name: "slot-0" } 2025-05-29T15:22:20.408876Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 269877761, Sender [1:755:2279], Recipient [1:567:2183]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-05-29T15:22:20.408889Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 272039936, Sender [1:635:2213], Recipient [1:567:2183]: NKikimr::NNodeBroker::TEvNodeBroker::TEvListNodes { } 2025-05-29T15:22:20.408893Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:246: StateWork, processing event TEvNodeBroker::TEvListNodes 2025-05-29T15:22:20.408901Z node 1 :NODE_BROKER TRACE: node_broker.cpp:423: Send TEvNodesInfo for epoch #4.6 1970-01-01T03:00:00.025000Z - 1970-01-01T04:00:00.025000Z - 1970-01-01T05:00:00.025000Z 2025-05-29T15:22:20.408960Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 269877761, Sender [1:757:2281], Recipient [1:567:2183]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-05-29T15:22:20.408970Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 272039936, Sender [1:635:2213], Recipient [1:567:2183]: NKikimr::NNodeBroker::TEvNodeBroker::TEvListNodes { } 2025-05-29T15:22:20.408973Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:246: StateWork, processing event TEvNodeBroker::TEvListNodes 2025-05-29T15:22:20.408977Z node 1 :NODE_BROKER TRACE: node_broker.cpp:423: Send TEvNodesInfo for epoch #4.6 1970-01-01T03:00:00.025000Z - 1970-01-01T04:00:00.025000Z - 1970-01-01T05:00:00.025000Z 2025-05-29T15:22:20.409021Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 269877761, Sender [1:759:2283], Recipient [1:567:2183]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-05-29T15:22:20.409034Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 272039937, Sender [1:635:2213], Recipient [1:567:2183]: NKikimr::NNodeBroker::TEvNodeBroker::TEvResolveNode { NodeId: 1024 } 2025-05-29T15:22:20.409038Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:247: StateWork, processing event TEvNodeBroker::TEvResolveNode 2025-05-29T15:22:20.409050Z node 1 :NODE_BROKER TRACE: node_broker.cpp:1478: Send TEvResolvedNode: NKikimr::NNodeBroker::TEvNodeBroker::TEvResolvedNode { Status { Code: OK } Node { NodeId: 1024 Host: "host2" Port: 1001 ResolveHost: "host2.yandex.net" Address: "1.2.3.4" Location { DataCenter: "1" Module: "2" Rack: "3" Unit: "4" } Expire: 18000025000 Name: "slot-0" } } >> TNodeBrokerTest::NodesV2BackMigration >> TDynamicNameserverTest::CacheMissDifferentDeadline-EnableNodeBrokerDeltaProtocol-false [GOOD] >> TPQTest::TestWritePQBigMessage [GOOD] >> TPQTest::TestWritePQ >> TPQTest::TestCheckACL [GOOD] >> TPQTest::TestAlreadyWrittenWithoutDeduplication >> TDynamicNameserverTest::CacheMissNoDeadline-EnableNodeBrokerDeltaProtocol-false [GOOD] >> TDynamicNameserverTest::CacheMissDifferentDeadlineInverseOrder-EnableNodeBrokerDeltaProtocol-true >> TNodeBrokerTest::ShiftIdRangeRemoveExpired |59.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_sysview_reboots/unittest ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/olap/unittest >> KqpOlapOptimizer::SpecialSliceToOneLayer [GOOD] Test command err: Trying to start YDB, gRPC: 18147, MsgBus: 3092 2025-05-29T15:21:33.388270Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509888159114417857:2271];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:21:33.388348Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/0026f3/r3tmp/tmpfWBXwE/pdisk_1.dat 2025-05-29T15:21:33.485517Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:21:33.486610Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [1:7509888159114417611:2079] 1748532093386191 != 1748532093386194 2025-05-29T15:21:33.494710Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:21:33.494788Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting TServer::EnableGrpc on GrpcPort 18147, node 1 2025-05-29T15:21:33.503373Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-29T15:21:33.514964Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:21:33.514979Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-05-29T15:21:33.514981Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-05-29T15:21:33.515026Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:3092 TClient is connected to server localhost:3092 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-29T15:21:33.664461Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:21:33.669450Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-05-29T15:21:33.684102Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnStore, opId: 281474976715658:0, at schemeshard: 72057594046644480 Status: 53 TxId: 281474976715658 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 2 2025-05-29T15:21:33.697876Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;self_id=[1:7509888159114418289:2314];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-05-29T15:21:33.697954Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;self_id=[1:7509888159114418289:2314];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-05-29T15:21:33.698006Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;self_id=[1:7509888159114418289:2314];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-05-29T15:21:33.698028Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;self_id=[1:7509888159114418289:2314];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-05-29T15:21:33.698054Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;self_id=[1:7509888159114418289:2314];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-05-29T15:21:33.698077Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;self_id=[1:7509888159114418289:2314];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-05-29T15:21:33.698095Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;self_id=[1:7509888159114418289:2314];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-05-29T15:21:33.698112Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;self_id=[1:7509888159114418289:2314];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-05-29T15:21:33.698141Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;self_id=[1:7509888159114418289:2314];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-05-29T15:21:33.698158Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;self_id=[1:7509888159114418289:2314];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-05-29T15:21:33.698177Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;self_id=[1:7509888159114418289:2314];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2025-05-29T15:21:33.698207Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;self_id=[1:7509888159114418289:2314];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2025-05-29T15:21:33.706965Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-05-29T15:21:33.706990Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-05-29T15:21:33.707006Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-05-29T15:21:33.707012Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-05-29T15:21:33.707036Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-05-29T15:21:33.707041Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-05-29T15:21:33.707052Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-05-29T15:21:33.707058Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-05-29T15:21:33.707070Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-05-29T15:21:33.707079Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-05-29T15:21:33.707086Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-05-29T15:21:33.707093Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-05-29T15:21:33.707118Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-05-29T15:21:33.707126Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-05-29T15:21:33.707148Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-05-29T15:21:33.707153Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-05-29T15:21:33.707167Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-05-29T15:21:33.707199Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanDeprecatedSnapshot;id=CleanDeprecatedSnapshot; 2025-05-29T15:21:33.707208Z node 1 :TX_COLUMNSHARD CRIT: log.cpp:784: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=clean_deprecated_snapshot.cpp:30;tasks_for_rewrite=0; 2025-05-29T15:21:33.707215Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanDeprecatedSnapshot;id=17; 2025-05-29T15:21:33.707221Z node 1 :TX_COLUMNS ... riority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=11724304;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=11724304;columns=5; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=11724304;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=11724304;columns=5; 2025-05-29T15:21:52.806457Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;self_id=[1:7509888159114418289:2314];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=72075186224037888;event=TEvWrite;fline=write_queue.cpp:58;event=queue_on_write;size=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=11724304;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=11724304;columns=5; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=11724304;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=11724304;columns=5; 2025-05-29T15:21:53.277016Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;self_id=[1:7509888159114418289:2314];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=72075186224037888;event=TEvWrite;fline=write_queue.cpp:58;event=queue_on_write;size=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=11724304;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=11724304;columns=5; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=11724304;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=11724304;columns=5; 2025-05-29T15:21:53.713264Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;self_id=[1:7509888159114418289:2314];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=72075186224037888;event=TEvWrite;fline=write_queue.cpp:58;event=queue_on_write;size=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=11724304;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=11724304;columns=5; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=11724304;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=11724304;columns=5; 2025-05-29T15:21:54.224547Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;self_id=[1:7509888159114418289:2314];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=72075186224037888;event=TEvWrite;fline=write_queue.cpp:58;event=queue_on_write;size=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=11724304;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=11724304;columns=5; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=11724304;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=11724304;columns=5; 2025-05-29T15:21:54.690408Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;self_id=[1:7509888159114418289:2314];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=72075186224037888;event=TEvWrite;fline=write_queue.cpp:58;event=queue_on_write;size=1; WAIT_COMPACTION: 45 FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=11725304;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=11725304;columns=5; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=11726304;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=11726304;columns=5; 2025-05-29T15:21:55.129225Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;self_id=[1:7509888159114418289:2314];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=72075186224037888;event=TEvWrite;fline=write_queue.cpp:58;event=queue_on_write;size=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=11727304;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=11727304;columns=5; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=11728304;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=11728304;columns=5; 2025-05-29T15:21:55.551494Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;self_id=[1:7509888159114418289:2314];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=72075186224037888;event=TEvWrite;fline=write_queue.cpp:58;event=queue_on_write;size=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=11729304;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=11729304;columns=5; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=11730304;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=11730304;columns=5; 2025-05-29T15:21:55.970003Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;self_id=[1:7509888159114418289:2314];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=72075186224037888;event=TEvWrite;fline=write_queue.cpp:58;event=queue_on_write;size=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=11731304;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=11731304;columns=5; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=11732304;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=11732304;columns=5; 2025-05-29T15:21:56.396049Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;self_id=[1:7509888159114418289:2314];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=72075186224037888;event=TEvWrite;fline=write_queue.cpp:58;event=queue_on_write;size=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=11733304;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=11733304;columns=5; WAIT_COMPACTION: 50 WAIT_COMPACTION: 50 WAIT_COMPACTION: 50 WAIT_COMPACTION: 50 WAIT_COMPACTION: 50 WAIT_COMPACTION: 50 WAIT_COMPACTION: 50 WAIT_COMPACTION: 50 WAIT_COMPACTION: 50 WAIT_COMPACTION: 50 2025-05-29T15:22:06.842092Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509888300848340821:3068], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:22:06.842125Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:22:06.842573Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509888300848340826:3071], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:22:06.843986Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715661:3, at schemeshard: 72057594046644480 2025-05-29T15:22:06.846640Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7509888300848340828:3072], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715661 completed, doublechecking } 2025-05-29T15:22:06.900829Z node 1 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [1:7509888300848340881:3202] txid# 281474976715662, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 7], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-29T15:22:07.592535Z node 1 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:238: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1748532127000, txId: 18446744073709551615] shutting down 2025-05-29T15:22:07.657060Z node 1 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:238: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1748532127639, txId: 281474976715666] shutting down 0/0 1/550000 2/0 2025-05-29T15:22:07.662432Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnStore, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-05-29T15:22:07.666864Z node 1 :TX_COLUMNSHARD_TX WARN: log.cpp:784: tablet_id=72075186224037888;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715668;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715668; WAIT_COMPACTION: 50 WAIT_COMPACTION: 51 WAIT_COMPACTION: 51 WAIT_COMPACTION: 51 WAIT_COMPACTION: 52 WAIT_COMPACTION: 52 WAIT_COMPACTION: 52 WAIT_COMPACTION: 52 WAIT_COMPACTION: 52 WAIT_COMPACTION: 52 WAIT_COMPACTION: 52 WAIT_COMPACTION: 52 WAIT_COMPACTION: 52 WAIT_COMPACTION: 52 2025-05-29T15:22:21.767295Z node 1 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:238: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1748532141000, txId: 18446744073709551615] shutting down [[[0u];["{\"weight\":0,\"details\":{},\"level\":0,\"selectivity\":{\"default\":{\"raw_bytes\":0,\"count\":0,\"records_count\":0,\"blob_bytes\":0,\"bytes_by_channel\":{},\"blobs\":0},\"slice\":{\"raw_bytes\":0,\"count\":0,\"records_count\":0,\"blob_bytes\":0,\"bytes_by_channel\":{},\"blobs\":0}}}"]];[[1u];["{\"weight\":0,\"details\":{},\"level\":1,\"selectivity\":{\"default\":{\"raw_bytes\":0,\"count\":0,\"records_count\":0,\"blob_bytes\":0,\"bytes_by_channel\":{},\"blobs\":0},\"slice\":{\"raw_bytes\":0,\"count\":0,\"records_count\":0,\"blob_bytes\":0,\"bytes_by_channel\":{},\"blobs\":0}}}"]];[[2u];["{\"weight\":0,\"details\":{\"fraction\":1,\"total_bytes\":3715712,\"bytes_limit\":100000000,\"expected_portion_size\":40000,\"size_limit_guarantee\":100000000},\"level\":2,\"selectivity\":{\"default\":{\"raw_bytes\":129498657,\"count\":123,\"records_count\":109000,\"blob_bytes\":3715712,\"bytes_by_channel\":{\"25\":1060584,\"255\":2338288,\"26\":316840},\"blobs\":246},\"slice\":{\"raw_bytes\":129498657,\"count\":123,\"records_count\":109000,\"blob_bytes\":3715712,\"bytes_by_channel\":{\"25\":1060584,\"255\":2338288,\"26\":316840},\"blobs\":246}}}"]]] 2025-05-29T15:22:21.808876Z node 1 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:238: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1748532141796, txId: 281474976715672] shutting down 0/0/0 1/0/0 2/109000/109000 2025-05-29T15:22:21.872903Z node 1 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:238: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1748532141857, txId: 281474976715674] shutting down ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/mind/ut/unittest >> TDynamicNameserverTest::CacheMissDifferentDeadline-EnableNodeBrokerDeltaProtocol-false [GOOD] Test command err: 2025-05-29T15:22:18.665005Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 2 Deadline: 18446744073709.551615s } 2025-05-29T15:22:18.665059Z node 3 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 3 Deadline: 18446744073709.551615s } 2025-05-29T15:22:18.665086Z node 4 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 4 Deadline: 18446744073709.551615s } 2025-05-29T15:22:18.665112Z node 5 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 5 Deadline: 18446744073709.551615s } 2025-05-29T15:22:18.665138Z node 6 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 6 Deadline: 18446744073709.551615s } 2025-05-29T15:22:18.665160Z node 7 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 7 Deadline: 18446744073709.551615s } 2025-05-29T15:22:18.671398Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:18.671493Z node 3 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:18.671530Z node 4 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:18.671559Z node 5 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:18.671594Z node 6 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:18.671621Z node 7 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:18.671682Z node 8 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 8 Deadline: 18446744073709.551615s } 2025-05-29T15:22:18.671700Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-05-29T15:22:18.671884Z node 3 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:18.671906Z node 4 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:18.671918Z node 5 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:18.671929Z node 6 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:18.671943Z node 7 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:18.671955Z node 8 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:18.671984Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:18.676040Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:18.676092Z node 8 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:18.676118Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:18.676835Z node 5 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:18.676875Z node 6 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:18.676889Z node 7 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:18.676959Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:18.676973Z node 3 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:18.676989Z node 4 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:18.677008Z node 5 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-05-29T15:22:18.677040Z node 6 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-05-29T15:22:18.677056Z node 7 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-05-29T15:22:18.677071Z node 8 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:18.677087Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:18.677190Z node 3 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-05-29T15:22:18.677207Z node 4 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-05-29T15:22:18.677264Z node 8 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-05-29T15:22:18.677326Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-05-29T15:22:18.677358Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 4 Deadline: 18446744073709.551615s } 2025-05-29T15:22:18.677874Z node 4 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 8 Deadline: 18446744073709.551615s } 2025-05-29T15:22:18.677885Z node 5 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 6 Deadline: 18446744073709.551615s } 2025-05-29T15:22:18.677891Z node 6 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 2 Deadline: 18446744073709.551615s } 2025-05-29T15:22:18.677897Z node 7 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 5 Deadline: 18446744073709.551615s } 2025-05-29T15:22:18.677903Z node 8 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 6 Deadline: 18446744073709.551615s } 2025-05-29T15:22:18.677990Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 6 Deadline: 18446744073709.551615s } 2025-05-29T15:22:18.680674Z node 6 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 5 Deadline: 18446744073709.551615s } 2025-05-29T15:22:18.680701Z node 8 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 4 Deadline: 18446744073709.551615s } 2025-05-29T15:22:18.680760Z node 5 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 7 Deadline: 18446744073709.551615s } 2025-05-29T15:22:18.680768Z node 6 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 8 Deadline: 18446744073709.551615s } 2025-05-29T15:22:18.681611Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 2 Deadline: 18446744073709.551615s } 2025-05-29T15:22:18.681717Z node 6 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 7 Deadline: 18446744073709.551615s } 2025-05-29T15:22:18.681830Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 3 Deadline: 18446744073709.551615s } 2025-05-29T15:22:18.681883Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 5 Deadline: 18446744073709.551615s } 2025-05-29T15:22:18.681957Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 6 Deadline: 18446744073709.551615s } 2025-05-29T15:22:18.681980Z node 7 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 6 Deadline: 18446744073709.551615s } 2025-05-29T15:22:18.682038Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 7 Deadline: 18446744073709.551615s } 2025-05-29T15:22:18.682126Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 8 Deadline: 18446744073709.551615s } 2025-05-29T15:22:18.682170Z node 6 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 3 Deadline: 18446744073709.551615s } 2025-05-29T15:22:18.682369Z node 3 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 6 Deadline: 18446744073709.551615s } 2025-05-29T15:22:18.682707Z node 8 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 2 Deadline: 18446744073709.551615s } 2025-05-29T15:22:18.682955Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 8 Deadline: 18446744073709.551615s } 2025-05-29T15:22:18.703386Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7309: Cannot subscribe to console configs 2025-05-29T15:22:18.703404Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded ... waiting for nameservers are connected 2025-05-29T15:22:18.707027Z node 1 :NODE_BROKER DEBUG: node_broker_impl.h:236: StateInit event type: 10060000 event: NKikimr::TEvTablet::TEvBoot 2025-05-29T15:22:18.707346Z node 1 :NODE_BROKER DEBUG: node_broker_impl.h:236: StateInit event type: 10060001 event: NKikimr::TEvTablet::TEvRestored 2025-05-29T15:22:18.707388Z node 1 :NODE_BROKER DEBUG: node_broker__init_scheme.cpp:20: TTxInitScheme Execute 2025-05-29T15:22:18.707541Z node 1 :NODE_BROKER DEBUG: node_broker_impl.h:236: StateInit event type: 1006000c event: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-05-29T15:22:18.708133Z node 1 :NODE_BROKER DEBUG: node_broker__init_scheme.cpp:29: TTxInitScheme Complete 2025-05-29T15:22:18.708157Z node 1 :NODE_BROKER DEBUG: node_broker__load_state.cpp:19: TTxLoadState Execute 2025-05-29T15:22:18.708204Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:961: [DB] Using default config. 2025-05-29T15:22:18.708214Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:998: [DB] Starting the first epoch: #1.1 1970-01-01T00:00:00.025000Z - 1970-01-01T01:00:00.025000Z - 1970-01-01T02:00:00.025000Z 2025-05-29T15:22:18.708217Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:1024: [DB] Loaded the first approximate epoch start: #1.1 2025-05-29T15:22:18.708227Z node 1 :NODE_BROKER DEBUG: node_broker__load_state.cpp:27: TTxLoadState Complete 2025-05-29T15:22:18.708241Z node 1 :NODE_BROKER DEBUG: node_broker__migrate_state.cpp:84: TTxMigrateState Execute 2025-05-29T15:22:18.708245Z node 1 :NODE_BROKER DEBUG: node_broker__migrate_state.cpp:52: TTxMigrateState ProcessMigrationBatch UpdateNodes left 0, NewVersionUpdateNodes left 0 2025-05-29T15:22:18.708249Z node 1 :NODE_BROKER DEBUG: node_broker__migrate_state.cpp:21: TTxMigrateState FinalizeMigration 2025-05-29T15:22:18.708252Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:1311: [DB] Update epoch in database: #1.1 1970-01-01T00:00:00.025000Z - 1970-01-01T01:00:00.025000Z - 1970-01-01T02:00:00.025000Z 2025-05-29T15:22:18.708263Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:1330 ... s: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-05-29T15:22:21.767904Z node 9 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:265: Send result: self# [9:216:2205], recipient# [9:208:2178], result# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [72057594046678944:1:0] RequestType: ByPath Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046678944, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046678944, LocalPathId: 1] Params { Version: 0 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } }] } 2025-05-29T15:22:21.767921Z node 9 :NODE_BROKER TRACE: node_broker.cpp:1532: Handle TEvTxProxySchemeCache::TEvNavigateKeySetResult: response# { Path: dc-1 TableId: [72057594046678944:1:0] RequestType: ByPath Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046678944, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046678944, LocalPathId: 1] Params { Version: 0 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } } 2025-05-29T15:22:21.767936Z node 9 :NODE_BROKER TRACE: node_broker.cpp:1558: Finished resolving tenant: request# Host: "host1" Port: 1001 ResolveHost: "host1.host1.host1" Address: "1.2.3.4" Location { DataCenter: "1" Module: "2" Rack: "3" Unit: "4" } FixedNodeId: false Path: "dc-1": scope id# <72057594046678944:1>: serviced subdomain# 72057594046678944:1 2025-05-29T15:22:21.767946Z node 9 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 2146435073, Sender [9:208:2178], Recipient [9:172:2178]: NKikimr::NNodeBroker::TNodeBroker::TEvPrivate::TEvResolvedRegistrationRequest 2025-05-29T15:22:21.767950Z node 9 :NODE_BROKER TRACE: node_broker_impl.h:257: StateWork, processing event TEvPrivate::TEvResolvedRegistrationRequest 2025-05-29T15:22:21.767961Z node 9 :NODE_BROKER DEBUG: node_broker__register_node.cpp:77: TTxRegisterNode Execute 2025-05-29T15:22:21.767965Z node 9 :NODE_BROKER DEBUG: node_broker__register_node.cpp:81: Registration request from host1:1001 (not fixed) tenant: dc-1 2025-05-29T15:22:21.767982Z node 9 :NODE_BROKER DEBUG: node_broker.cpp:856: [DB] Adding node #1024.v2 host1:1001 to database state=Active resolvehost=host1.host1.host1 address=1.2.3.4 dc=1 location=DC=1/M=2/R=3/U=4/ lease=1 expire=Thu, 01 Jan 1970 02:00:00 UTC servicedsubdomain=72057594046678944:1 slotindex=0 authorizedbycertificate=false 2025-05-29T15:22:21.768016Z node 9 :NODE_BROKER DEBUG: node_broker.cpp:264: [Dirty] Register new active node #1024.v2 host1:1001 2025-05-29T15:22:21.768030Z node 9 :NODE_BROKER DEBUG: node_broker.cpp:552: [Dirty] Update current epoch version from 1 to 2 2025-05-29T15:22:21.768034Z node 9 :NODE_BROKER DEBUG: node_broker.cpp:1356: [DB] Update epoch version in database version=2 2025-05-29T15:22:21.778533Z node 9 :NODE_BROKER DEBUG: node_broker__register_node.cpp:186: TTxRegisterNode Complete 2025-05-29T15:22:21.778548Z node 9 :NODE_BROKER DEBUG: node_broker.cpp:264: [Committed] Register new active node #1024.v2 host1:1001 2025-05-29T15:22:21.778554Z node 9 :NODE_BROKER DEBUG: node_broker.cpp:552: [Committed] Update current epoch version from 1 to 2 2025-05-29T15:22:21.778557Z node 9 :NODE_BROKER DEBUG: node_broker.cpp:630: Add node #1024.v2 host1:1001 to epoch cache 2025-05-29T15:22:21.778570Z node 9 :NODE_BROKER DEBUG: node_broker.cpp:659: Add node #1024.v2 to update nodes log 2025-05-29T15:22:21.778594Z node 9 :NODE_BROKER TRACE: node_broker__register_node.cpp:58: TTxRegisterNode reply with: Status { Code: OK } Node { NodeId: 1024 Host: "host1" Port: 1001 ResolveHost: "host1.host1.host1" Address: "1.2.3.4" Location { DataCenter: "1" Module: "2" Rack: "3" Unit: "4" } Expire: 7200025000 Name: "slot-0" } 2025-05-29T15:22:21.778654Z node 9 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 269877761, Sender [9:220:2209], Recipient [9:172:2178]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-05-29T15:22:21.778675Z node 9 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 272039938, Sender [9:205:2201], Recipient [9:172:2178]: NKikimr::NNodeBroker::TEvNodeBroker::TEvRegistrationRequest { Host: "host2" Port: 1001 ResolveHost: "host2.host2.host2" Address: "1.2.3.5" Location { DataCenter: "1" Module: "2" Rack: "3" Unit: "5" } FixedNodeId: false Path: "dc-1" } 2025-05-29T15:22:21.778680Z node 9 :NODE_BROKER TRACE: node_broker_impl.h:248: StateWork, processing event TEvNodeBroker::TEvRegistrationRequest 2025-05-29T15:22:21.778685Z node 9 :NODE_BROKER TRACE: node_broker.cpp:1487: Handle TEvNodeBroker::TEvRegistrationRequest: request# Host: "host2" Port: 1001 ResolveHost: "host2.host2.host2" Address: "1.2.3.5" Location { DataCenter: "1" Module: "2" Rack: "3" Unit: "5" } FixedNodeId: false Path: "dc-1" 2025-05-29T15:22:21.778710Z node 9 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2702: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [9:16:2063], request# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-05-29T15:22:21.778724Z node 9 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:1842: FillEntry for TNavigate: self# [9:16:2063], cacheItem# { Subscriber: { Subscriber: [9:209:2204] DomainOwnerId: 72057594046678944 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusSuccess Kind: 2 TableKind: 0 Created: 1 CreateStep: 0 PathId: [OwnerId: 72057594046678944, LocalPathId: 1] DomainId: [OwnerId: 72057594046678944, LocalPathId: 1] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-05-29T15:22:21.778780Z node 9 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:265: Send result: self# [9:222:2210], recipient# [9:221:2178], result# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [72057594046678944:1:0] RequestType: ByPath Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046678944, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046678944, LocalPathId: 1] Params { Version: 0 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } }] } 2025-05-29T15:22:21.778793Z node 9 :NODE_BROKER TRACE: node_broker.cpp:1532: Handle TEvTxProxySchemeCache::TEvNavigateKeySetResult: response# { Path: dc-1 TableId: [72057594046678944:1:0] RequestType: ByPath Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046678944, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046678944, LocalPathId: 1] Params { Version: 0 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } } 2025-05-29T15:22:21.778805Z node 9 :NODE_BROKER TRACE: node_broker.cpp:1558: Finished resolving tenant: request# Host: "host2" Port: 1001 ResolveHost: "host2.host2.host2" Address: "1.2.3.5" Location { DataCenter: "1" Module: "2" Rack: "3" Unit: "5" } FixedNodeId: false Path: "dc-1": scope id# <72057594046678944:1>: serviced subdomain# 72057594046678944:1 2025-05-29T15:22:21.778817Z node 9 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 2146435073, Sender [9:221:2178], Recipient [9:172:2178]: NKikimr::NNodeBroker::TNodeBroker::TEvPrivate::TEvResolvedRegistrationRequest 2025-05-29T15:22:21.778822Z node 9 :NODE_BROKER TRACE: node_broker_impl.h:257: StateWork, processing event TEvPrivate::TEvResolvedRegistrationRequest 2025-05-29T15:22:21.778832Z node 9 :NODE_BROKER DEBUG: node_broker__register_node.cpp:77: TTxRegisterNode Execute 2025-05-29T15:22:21.778836Z node 9 :NODE_BROKER DEBUG: node_broker__register_node.cpp:81: Registration request from host2:1001 (not fixed) tenant: dc-1 2025-05-29T15:22:21.778855Z node 9 :NODE_BROKER DEBUG: node_broker.cpp:856: [DB] Adding node #1025.v3 host2:1001 to database state=Active resolvehost=host2.host2.host2 address=1.2.3.5 dc=1 location=DC=1/M=2/R=3/U=5/ lease=1 expire=Thu, 01 Jan 1970 02:00:00 UTC servicedsubdomain=72057594046678944:1 slotindex=1 authorizedbycertificate=false 2025-05-29T15:22:21.778895Z node 9 :NODE_BROKER DEBUG: node_broker.cpp:264: [Dirty] Register new active node #1025.v3 host2:1001 2025-05-29T15:22:21.778900Z node 9 :NODE_BROKER DEBUG: node_broker.cpp:552: [Dirty] Update current epoch version from 2 to 3 2025-05-29T15:22:21.778905Z node 9 :NODE_BROKER DEBUG: node_broker.cpp:1356: [DB] Update epoch version in database version=3 2025-05-29T15:22:21.789468Z node 9 :NODE_BROKER DEBUG: node_broker__register_node.cpp:186: TTxRegisterNode Complete 2025-05-29T15:22:21.789484Z node 9 :NODE_BROKER DEBUG: node_broker.cpp:264: [Committed] Register new active node #1025.v3 host2:1001 2025-05-29T15:22:21.789490Z node 9 :NODE_BROKER DEBUG: node_broker.cpp:552: [Committed] Update current epoch version from 2 to 3 2025-05-29T15:22:21.789494Z node 9 :NODE_BROKER DEBUG: node_broker.cpp:630: Add node #1025.v3 host2:1001 to epoch cache 2025-05-29T15:22:21.789507Z node 9 :NODE_BROKER DEBUG: node_broker.cpp:659: Add node #1025.v3 to update nodes log 2025-05-29T15:22:21.789534Z node 9 :NODE_BROKER TRACE: node_broker__register_node.cpp:58: TTxRegisterNode reply with: Status { Code: OK } Node { NodeId: 1025 Host: "host2" Port: 1001 ResolveHost: "host2.host2.host2" Address: "1.2.3.5" Location { DataCenter: "1" Module: "2" Rack: "3" Unit: "5" } Expire: 7200025000 Name: "slot-1" } ... waiting for cache miss 2025-05-29T15:22:21.789591Z node 9 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:525: Handle NActors::TEvInterconnect::TEvResolveNode { NodeId: 1024 Deadline: 1.107024s } 2025-05-29T15:22:21.789602Z node 9 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:626: New cache miss: nodeId# 1024, deadline# 1.107024s 2025-05-29T15:22:21.789605Z node 9 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:630: Schedule wakeup for new earliest deadline 1.107024s 2025-05-29T15:22:21.789610Z node 9 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:525: Handle NActors::TEvInterconnect::TEvResolveNode { NodeId: 1025 Deadline: 2.107024s } 2025-05-29T15:22:21.789614Z node 9 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:626: New cache miss: nodeId# 1025, deadline# 2.107024s ... blocking NKikimr::NNodeBroker::TEvNodeBroker::TEvResolveNode from NAMESERVICE to NODE_BROKER_ACTOR cookie 0 ... blocking NKikimr::NNodeBroker::TEvNodeBroker::TEvResolveNode from NAMESERVICE to NODE_BROKER_ACTOR cookie 0 ... waiting for cache miss (done) 2025-05-29T15:22:21.860719Z node 9 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:873: HandleWakeup at 1.108024s 2025-05-29T15:22:21.860740Z node 9 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:134: Cache miss failed: nodeId=1024, error=Deadline exceeded 2025-05-29T15:22:21.860748Z node 9 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:888: Schedule next wakeup at 2.107024s 2025-05-29T15:22:21.901325Z node 9 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:873: HandleWakeup at 2.108024s 2025-05-29T15:22:21.901348Z node 9 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:134: Cache miss failed: nodeId=1025, error=Deadline exceeded >> PQCountersLabeled::PartitionFirstClass [GOOD] >> PQCountersLabeled::ImportantFlagSwitching |59.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_sysview_reboots/unittest |59.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_sysview_reboots/unittest >> TTenantPoolTests::TestForcedSensorLabelsForStaticConfig [GOOD] >> TDynamicNameserverTest::CacheMissDifferentDeadlineInverseOrder-EnableNodeBrokerDeltaProtocol-true [GOOD] |59.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_sysview_reboots/unittest |59.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_sysview_reboots/unittest >> TNodeBrokerTest::ExtendLeaseBumpVersion [GOOD] >> TNodeBrokerTest::EpochCacheUpdate >> TNodeBrokerTest::SingleDomainModeBannedIds [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/mind/ut/unittest >> TTenantPoolTests::TestForcedSensorLabelsForStaticConfig [GOOD] Test command err: 2025-05-29T15:22:22.081017Z node 2 :TX_PROXY DEBUG: proxy_impl.cpp:434: actor# [2:246:2103] Bootstrap 2025-05-29T15:22:22.102545Z node 2 :TX_PROXY DEBUG: proxy_impl.cpp:453: actor# [2:246:2103] Become StateWork (SchemeCache [2:256:2106]) 2025-05-29T15:22:22.102683Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:434: actor# [1:245:2153] Bootstrap 2025-05-29T15:22:22.104323Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:453: actor# [1:245:2153] Become StateWork (SchemeCache [1:259:2159]) 2025-05-29T15:22:22.104407Z node 3 :TX_PROXY DEBUG: proxy_impl.cpp:434: actor# [3:247:2103] Bootstrap 2025-05-29T15:22:22.105989Z node 3 :TX_PROXY DEBUG: proxy_impl.cpp:453: actor# [3:247:2103] Become StateWork (SchemeCache [3:261:2106]) 2025-05-29T15:22:22.114362Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2057} StateInit event Type# 268828672 Event# NKikimr::TEvTablet::TEvBoot 2025-05-29T15:22:22.115993Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2057} StateInit event Type# 268828673 Event# NKikimr::TEvTablet::TEvRestored 2025-05-29T15:22:22.116022Z node 1 :BS_CONTROLLER DEBUG: {BSC22@console_interaction.cpp:14} Console interaction started 2025-05-29T15:22:22.116291Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2057} StateInit event Type# 268828684 Event# NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-05-29T15:22:22.116533Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2057} StateInit event Type# 268639244 Event# NKikimr::TEvNodeWardenStorageConfig 2025-05-29T15:22:22.116573Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2057} StateInit event Type# 131082 Event# NActors::TEvInterconnect::TEvNodesInfo 2025-05-29T15:22:22.116577Z node 1 :BS_CONTROLLER DEBUG: {BSC01@bsc.cpp:521} Handle TEvInterconnect::TEvNodesInfo 2025-05-29T15:22:22.116602Z node 1 :BS_CONTROLLER DEBUG: {BSCTXIS01@init_scheme.cpp:17} TTxInitScheme Execute 2025-05-29T15:22:22.118310Z node 1 :BS_CONTROLLER DEBUG: {BSCTXIS03@init_scheme.cpp:44} TTxInitScheme Complete 2025-05-29T15:22:22.118328Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM01@migrate.cpp:190} Execute tx 2025-05-29T15:22:22.118340Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM02@migrate.cpp:251} Complete tx IncompatibleData# false 2025-05-29T15:22:22.118355Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxTrimUnusedSlots 2025-05-29T15:22:22.118365Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxTrimUnusedSlots 2025-05-29T15:22:22.118382Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateSchemaVersion 2025-05-29T15:22:22.151647Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateSchemaVersion 2025-05-29T15:22:22.151691Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxGenerateInstanceId 2025-05-29T15:22:22.162555Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxGenerateInstanceId 2025-05-29T15:22:22.162608Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateStaticPDiskInfo 2025-05-29T15:22:22.162626Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateStaticPDiskInfo 2025-05-29T15:22:22.162641Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxFillInNonNullConfigForPDisk 2025-05-29T15:22:22.162669Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxFillInNonNullConfigForPDisk 2025-05-29T15:22:22.162682Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxDropDriveStatus 2025-05-29T15:22:22.162691Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxDropDriveStatus 2025-05-29T15:22:22.162701Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateCompatibilityInfo 2025-05-29T15:22:22.173457Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateCompatibilityInfo 2025-05-29T15:22:22.173498Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateEnableConfigV2 2025-05-29T15:22:22.184455Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateEnableConfigV2 2025-05-29T15:22:22.184525Z node 1 :BS_CONTROLLER DEBUG: {BSCTXLE01@load_everything.cpp:19} TTxLoadEverything Execute 2025-05-29T15:22:22.184747Z node 1 :BS_CONTROLLER DEBUG: {BSCTXLE03@load_everything.cpp:557} TTxLoadEverything Complete 2025-05-29T15:22:22.184756Z node 1 :BS_CONTROLLER DEBUG: {BSC09@impl.h:2188} LoadFinished 2025-05-29T15:22:22.186817Z node 1 :BS_CONTROLLER DEBUG: {BSC18@console_interaction.cpp:31} Console connection service started 2025-05-29T15:22:22.186838Z node 1 :BS_CONTROLLER DEBUG: {BSCTXLE04@load_everything.cpp:562} TTxLoadEverything InitQueue processed 2025-05-29T15:22:22.187265Z node 1 :BS_CONTROLLER DEBUG: {BSCTXRN01@register_node.cpp:216} Handle TEvControllerRegisterNode Request# {NodeID: 1 VDiskStatus { VDiskId { GroupID: 0 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } NodeId: 1 PDiskId: 1 VSlotId: 0 PDiskGuid: 123 Status: INIT_PENDING OnlyPhantomsRemain: false } DeclarativePDiskManagement: true } 2025-05-29T15:22:22.187388Z node 1 :BS_CONTROLLER DEBUG: {BSCTXRN01@register_node.cpp:216} Handle TEvControllerRegisterNode Request# {NodeID: 3 DeclarativePDiskManagement: true } 2025-05-29T15:22:22.187413Z node 1 :BS_CONTROLLER DEBUG: {BSCTXRN01@register_node.cpp:216} Handle TEvControllerRegisterNode Request# {NodeID: 2 DeclarativePDiskManagement: true } 2025-05-29T15:22:22.187682Z node 1 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:398} Execute TEvControllerConfigRequest Request# {Command { DefineHostConfig { HostConfigId: 1 Drive { Path: "/home/runner/.ya/build/build_root/ciyv/002510/r3tmp/tmpCeWNOw/pdisk_1.dat" } } } Command { DefineBox { BoxId: 1 Host { Key { Fqdn: "::1" IcPort: 12001 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12002 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12003 } HostConfigId: 1 } } } } 2025-05-29T15:22:22.187767Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:340} Create new pdisk PDiskId# 1:1 Path# /home/runner/.ya/build/build_root/ciyv/002510/r3tmp/tmpCeWNOw/pdisk_1.dat 2025-05-29T15:22:22.187775Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:340} Create new pdisk PDiskId# 2:1000 Path# /home/runner/.ya/build/build_root/ciyv/002510/r3tmp/tmpCeWNOw/pdisk_1.dat 2025-05-29T15:22:22.187780Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:340} Create new pdisk PDiskId# 3:1000 Path# /home/runner/.ya/build/build_root/ciyv/002510/r3tmp/tmpCeWNOw/pdisk_1.dat 2025-05-29T15:22:22.188044Z node 1 :BS_CONTROLLER DEBUG: {BSCTXUDM01@disk_metrics.cpp:68} Updating disk status Record# {VDisksMetrics { VDiskId { GroupID: 0 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 0 } State: Initial Replicated: false DiskSpace: Green } } 2025-05-29T15:22:22.188076Z node 1 :BS_CONTROLLER DEBUG: {BSC10@scrub.cpp:187} Handle(TEvControllerScrubQueryStartQuantum) Msg# {VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 0 } } 2025-05-29T15:22:22.188091Z node 1 :BS_CONTROLLER DEBUG: {BSC13@scrub.cpp:597} sending TEvControllerScrubStartQuantum Msg# NKikimrBlobStorage.TEvControllerScrubStartQuantum VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 0 } 2025-05-29T15:22:22.188164Z node 1 :BS_CONTROLLER DEBUG: {BSCTXUDM01@disk_metrics.cpp:68} Updating disk status Record# {VDiskStatus { VDiskId { GroupID: 0 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } NodeId: 1 PDiskId: 1 VSlotId: 0 PDiskGuid: 123 Status: REPLICATING OnlyPhantomsRemain: false } } 2025-05-29T15:22:22.188182Z node 1 :BS_CONTROLLER DEBUG: {BSCTXUDM01@disk_metrics.cpp:68} Updating disk status Record# {VDiskStatus { VDiskId { GroupID: 0 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } NodeId: 1 PDiskId: 1 VSlotId: 0 PDiskGuid: 123 Status: READY OnlyPhantomsRemain: false } } 2025-05-29T15:22:22.188658Z node 1 :BS_CONTROLLER DEBUG: {BSC11@scrub.cpp:214} Handle(TEvControllerScrubQuantumFinished) Msg# {VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 0 } Success: true } 2025-05-29T15:22:22.188731Z node 1 :BS_CONTROLLER DEBUG: {BSC10@scrub.cpp:187} Handle(TEvControllerScrubQueryStartQuantum) Msg# {VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 0 } } 2025-05-29T15:22:22.199735Z node 1 :BS_CONTROLLER DEBUG: {BSCTXRN05@register_node.cpp:34} Add devicesData from NodeWarden NodeId# 1 Devices# [] 2025-05-29T15:22:22.199791Z node 1 :BS_CONTROLLER DEBUG: {BSCTXRN05@register_node.cpp:34} Add devicesData from NodeWarden NodeId# 3 Devices# [] 2025-05-29T15:22:22.202022Z node 3 :BS_PDISK WARN: {BPD01@blobstorage_pdisk_blockdevice_async.cpp:922} Warning# got EIoResult::FileOpenError from IoContext->Setup PDiskId# 1000 2025-05-29T15:22:22.202200Z node 3 :BS_PDISK CRIT: {BPD39@blobstorage_pdisk_impl.cpp:2856} BlockDevice initialization error Details# Can't open file "/home/runner/.ya/build/build_root/ciyv/002510/r3tmp/tmpCeWNOw/pdisk_1.dat": unknown reason, errno# 0. PDiskId# 1000 2025-05-29T15:22:22.202360Z node 3 :BS_PDISK CRIT: {BPD01@blobstorage_pdisk_actor.cpp:290} PDiskId# 1000 bootstrapped to the StateError, reason# Can't open file "/home/runner/.ya/build/build_root/ciyv/002510/r3tmp/tmpCeWNOw/pdisk_1.dat": unknown reason, errno# 0. Can not be initialized Config: {TPDiskConfg Path# "/home/runner/.ya/build/build_root/ciyv/002510/r3tmp/tmpCeWNOw/pdisk_1.dat" ExpectedPath# "" ExpectedSerial# "" PDiskGuid# 9558256452278457409 PDiskId# 1000 PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} MetadataOnly# 0 StartOwnerRound# 2 SectorMap# true EnableSectorEncryption # 0 ChunkSize# 134217728 SectorSize# 4096 StatisticsUpdateIntervalMs# 1000 SchedulerCfg# {TPDiskSchedulerConfig BytesSchedulerWeight# 1 LogWeight# 1 FreshWeight# 2 CompWeight# 7 SyncLogWeight# 15 HugeWeight# 2 FastReadWeight# 1 OtherReadWeight# 1 LoadWeight# 2 LowReadWeight# 1 MaxChunkReadsPerCycle# 16 MaxChunkReadsDurationPerCycleMs# 0.25 MaxChunkWritesPerCycle# 8 MaxChunkWritesDurationPerCycleMs# 1} MinLogChunksTotal# 4 MaxLogChunksPerOwnerMultiplier# 5 MaxLogChunksPerOwnerDivisor# 4 SortFreeChunksPerItems# 100 GetDriveDataSwitch# DoNotTouch WriteCacheSwitch# DoNotTouch DriveModelSeekTimeNs# 8000000 DriveModelSpeedBps# 127000000 DriveModelSpeedBpsMin# 135000000 DriveModelSpeedBpsMax# 200000000 DriveModelBulkWrieBlockSize# 2097152 DriveModelTrimSpeedBps# 0 ReorderingMs# 50 DeviceInFlight# 4 CostLimitNs# 50000000 BufferPoolBufferSizeBytes# 524288 BufferPoolBufferCount# 256 MaxQueuedCompletionActions# 128 ExpectedSlotCount# 0 ReserveLogChunksMultiplier# 56 InsaneLogChunksMultiplier# 40 RedLogChunksMultiplier# 30 OrangeLogChunksMultiplier# 20 WarningLogChunksMultiplier# 4 YellowLogChunksMultiplier# 4 MaxMetadataMegabytes# 32 SpaceColorBorder# GREEN CompletionThreadsCount# 1 UseNoopScheduler# false PlainDataChunks# 0} PDiskId# 1000 2025-05-29T15:22:22.202430Z node 1 :BS_CONTROLLER DEBUG: {BSCTXRN05@register_node.cpp:34} Add devicesData from NodeWarden NodeId# 2 Devices# [] 2025-05-29T15:22:22.204608Z node 2 :BS_PDISK WARN: {BPD01@blobstorage_pdisk_blockdevice_async.cpp:922} Warning# got EIoResult::FileOpenError from IoContext->Setup PDiskId# 1000 2025-05-29T15:22:22.204982Z node 2 :BS_PDISK CRIT: {BPD39@blobstorage_pdisk_impl.cpp:2856} BlockDevice initialization error Details# Can't open file "/home/runner/.ya/build/build_root/ciyv/002510/r3tmp/tmpCeWNOw/pdisk_1.dat": unknown reason, errno# 0. PDiskId# 1000 2025-05-29T15:22:22.205065Z node 2 :BS_PDISK CRIT: {BPD01@blobstorage_pdisk_actor.cpp:290} PDiskId# 1000 bootstrapped to the StateError, reason# Can't open file "/home/runner/.ya/build/build_root/ciyv/002510/r3tmp/tmpCeWNOw/pdisk_1.dat": unknown reason, errno# 0. Can not be initialized Config: {TPDiskConfg Path# "/home/runner/.ya/build/build_root/ciyv/002510/r3tmp/tmpCeWNOw/pdisk_1.dat" ExpectedPath# "" ExpectedSerial# "" PDiskGuid# 7081078819271339189 PDiskId# 1000 PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} MetadataOnly# 0 StartOwnerRound# 2 SectorMap# true EnableSectorEncryption # 0 ChunkSize# 134217728 SectorSize# 4096 StatisticsUpdateIntervalMs# 1000 SchedulerCfg# {TPDiskSchedulerConfig BytesSchedulerWeight# 1 LogWeight# 1 FreshWeight# 2 CompWeight# 7 SyncLogWeight# 15 HugeWeight# 2 FastReadWeight# 1 OtherReadWeight# 1 LoadWeight# 2 LowReadWeight# 1 MaxChunkReadsPerCycle# 16 MaxChunkReadsDurationPerCycleMs# 0.25 MaxChunkWritesPerCycle# 8 MaxChunkWritesDurationPerCycleMs# 1} MinLogChunksTotal# 4 MaxLogChunksPerOwnerMultiplier# 5 MaxLogChunksPerOwnerDivisor# 4 SortFreeChunksPerItems# 100 GetDriveDataSwitch# DoNotTouch WriteCacheSwitch# DoNotTouch DriveModelSeekTimeNs# 8000000 DriveModelSpeedBps# 127000000 DriveModelSpeedBpsMin# 135000000 DriveModelSpeedBpsMax# 200000000 DriveModelBulkWrieBlockSize# 2097152 DriveModelTrimSpeedBps# 0 ReorderingMs# 50 DeviceInFlight# 4 CostLimitNs# 50000000 BufferPoolBufferSizeBytes# 524288 BufferPoolBufferCount# 256 MaxQueuedCompletionActions# 128 ExpectedSlotCount# 0 ReserveLogChunksMultiplier# 56 InsaneLogChunksMultiplier# 40 RedLogChunksMultiplier# 30 OrangeLogChunksMultiplier# 20 WarningLogChunksMultiplier# 4 YellowLogChunksMultiplier# 4 MaxMetadataMegabytes# 32 SpaceColorBorder# GREEN CompletionThreadsCount# 1 UseNoopScheduler# false PlainDataChunks# 0} PDiskId# 1000 2025-05-29T15:22:22.205273Z node 1 :TENANT_POOL DEBUG: tenant_pool.cpp:826: TTenantPool::Bootstrap 2025-05-29T15:22:22.205357Z node 1 :LOCAL DEBUG: local.cpp:1491: TLocal::Bootstrap 2025-05-29T15:22:22.205364Z node 2 :TENANT_POOL DEBUG: tenant_pool.cpp:826: TTenantPool::Bootstrap 2025-05-29T15:22:22.205401Z node 2 :LOCAL DEBUG: local.cpp:1491: TLocal::Bootstrap 2025-05-29T15:22:22.205407Z node 3 :TENANT_POOL DEBUG: tenant_pool.cpp:826: TTenantPool::Bootstrap 2025-05-29T15:22:22.205458Z node 1 :TENANT_POOL DEBUG: tenant_pool.cpp:412: TDomainTenantPool(dc-1) Bootstrap 2025-05-29T15:22:22.205486Z node 1 :TENANT_POOL DEBUG: tenant_pool.cpp:286: TDomainTenantPool(dc-1) send request to add tenant /dc-1 with resources CPU: 1 Memory: 1 Network: 1 2025-05-29T15:22:22.205506Z node 1 :LOCAL DEBUG: local.cpp:1441: TDomainLocal(dc-1): Bootstrap 2025-05-29T15:22:22.205513Z node 2 :TENANT_POOL DEBUG: tenant_pool.cpp:412: TDomainTenantPool(dc-1) Bootstrap 2025-05-29T15:22:22.205521Z node 2 :TENANT_POOL DEBUG: tenant_pool.cpp:286: TDomainTenantPool(dc-1) send request to add tenant /dc-1/users/tenant-1 with resources CPU: 1 Memory: 1 Network: 1 2025-05-29T15:22:22.205531Z node 2 :LOCAL DEBUG: local.cpp:1441: TDomainLocal(dc-1): Bootstrap 2025-05-29T15:22:22.205539Z node 3 :LOCAL DEBUG: local.cpp:1491: TLocal::Bootstrap 2025-05-29T15:22:22.205660Z node 1 :LOCAL DEBUG: local.cpp:1149: TDomainLocal(dc-1): Binding to hive 72057594046578946 at domain dc-1 (allocated resources: CPU: 1 Memory: 1 Network: 1) 2025-05-29T15:22:22.205672Z node 1 :LOCAL DEBUG: local.cpp:975: TLocalNodeRegistrar::Bootstrap 2025-05-29T15:22:22.205676Z node 1 :LOCAL DEBUG: local.cpp:181: TLocalNodeRegistrar::TryToRegister 2025-05-29T15:22:22.205699Z node 1 :LOCAL DEBUG: local.cpp:213: TLocalNodeRegistrar::TryToRegister pipe to hive, pipe:[1:500:2309] 2025-05-29T15:22:22.205723Z node 3 :TENANT_POOL DEBUG: tenant_pool.cpp:412: TDomainTenantPool(dc-1) Bootstrap 2025-05-29T15:22:22.205731Z node 3 :TENANT_POOL DEBUG: tenant_pool.cpp:286: TDomainTenantPool(dc-1) send request to add tenant /dc-1/users/tenant-2 with resources CPU: 1 Memory: 1 Network: 1 2025-05-29T15:22:22.205741Z node 3 :LOCAL DEBUG: local.cpp:1441: TDomainLocal(dc-1): Bootstrap 2025-05-29T15:22:22.208161Z node 1 :TENANT_POOL NOTICE: tenant_pool.cpp:526: TDomainTenantPool(dc-1) started tenant /dc-1 2025-05-29T15:22:22.208184Z node 1 :TENANT_POOL DEBUG: tenant_pool.cpp:274: TDomainTenantPool(dc-1) send status update to [1:485:2305] 2025-05-29T15:22:22.208420Z node 1 :LOCAL DEBUG: local.cpp:260: TEvTabletPipe::TEvClientConnected {TabletId=72057594046578946 Status=OK ClientId=[1:500:2309]} 2025-05-29T15:22:22.208450Z node 1 :LOCAL DEBUG: local.cpp:324: TLocalNodeRegistrar::Handle TEvLocal::TEvPing 2025-05-29T15:22:22.208460Z node 1 :LOCAL DEBUG: local.cpp:380: TLocalNodeRegistrar TEvPing - CONNECTED 2025-05-29T15:22:22.208464Z node 1 :LOCAL DEBUG: local.cpp:297: TLocalNodeRegistrar SendStatusOk 2025-05-29T15:22:22.208655Z node 2 :LOCAL DEBUG: local.cpp:1207: TDomainLocal(dc-1): TDomainLocal::TEvClientConnected for dc-1 shard 72057594046578944 2025-05-29T15:22:22.208663Z node 2 :LOCAL DEBUG: local.cpp:1066: TDomainLocal(dc-1): Send resolve request for /dc-1/users/tenant-1 to schemeshard 72057594046578944 2025-05-29T15:22:22.208691Z node 3 :LOCAL DEBUG: local.cpp:1207: TDomainLocal(dc-1): TDomainLocal::TEvClientConnected for dc-1 shard 72057594046578944 2025-05-29T15:22:22.208697Z node 3 :LOCAL DEBUG: local.cpp:1066: TDomainLocal(dc-1): Send resolve request for /dc-1/users/tenant-2 to schemeshard 72057594046578944 2025-05-29T15:22:22.216164Z node 2 :LOCAL DEBUG: local.cpp:1234: TDomainLocal(dc-1): HandleResolve from schemeshard 72057594046578944: Status: StatusSuccess Path: "/dc-1/users/tenant-1" PathDescription { Self { Name: "/dc-1/users/tenant-1" PathId: 100 SchemeshardId: 72057594046578944 PathType: EPathTypeSubDomain } DomainDescription { SchemeShardId_Depricated: 72057594046578944 PathId_Depricated: 100 DomainKey { SchemeShard: 72057594046578944 PathId: 100 } } } 2025-05-29T15:22:22.216221Z node 2 :LOCAL DEBUG: local.cpp:1172: TDomainLocal(dc-1): Binding tenant /dc-1/users/tenant-1 to hive 72057594046578946 (allocated resources: CPU: 1 Memory: 1 Network: 1) 2025-05-29T15:22:22.216349Z node 2 :LOCAL DEBUG: local.cpp:975: TLocalNodeRegistrar::Bootstrap 2025-05-29T15:22:22.216355Z node 2 :LOCAL DEBUG: local.cpp:181: TLocalNodeRegistrar::TryToRegister 2025-05-29T15:22:22.216374Z node 2 :LOCAL DEBUG: local.cpp:213: TLocalNodeRegistrar::TryToRegister pipe to hive, pipe:[2:512:2116] 2025-05-29T15:22:22.216439Z node 3 :LOCAL DEBUG: local.cpp:1234: TDomainLocal(dc-1): HandleResolve from schemeshard 72057594046578944: Status: StatusSuccess Path: "/dc-1/users/tenant-2" PathDescription { Self { Name: "/dc-1/users/tenant-2" PathId: 101 SchemeshardId: 72057594046578944 PathType: EPathTypeSubDomain } DomainDescription { SchemeShardId_Depricated: 72057594046578944 PathId_Depricated: 101 DomainKey { SchemeShard: 72057594046578944 PathId: 101 } } } 2025-05-29T15:22:22.216456Z node 3 :LOCAL DEBUG: local.cpp:1172: TDomainLocal(dc-1): Binding tenant /dc-1/users/tenant-2 to hive 72057594046578946 (allocated resources: CPU: 1 Memory: 1 Network: 1) 2025-05-29T15:22:22.216526Z node 3 :LOCAL DEBUG: local.cpp:975: TLocalNodeRegistrar::Bootstrap 2025-05-29T15:22:22.216531Z node 3 :LOCAL DEBUG: local.cpp:181: TLocalNodeRegistrar::TryToRegister 2025-05-29T15:22:22.216540Z node 3 :LOCAL DEBUG: local.cpp:213: TLocalNodeRegistrar::TryToRegister pipe to hive, pipe:[3:515:2116] 2025-05-29T15:22:22.216597Z node 3 :TENANT_POOL NOTICE: tenant_pool.cpp:526: TDomainTenantPool(dc-1) started tenant /dc-1/users/tenant-2 2025-05-29T15:22:22.216604Z node 3 :TENANT_POOL DEBUG: tenant_pool.cpp:274: TDomainTenantPool(dc-1) send status update to [3:487:2112] 2025-05-29T15:22:22.216658Z node 2 :TENANT_POOL NOTICE: tenant_pool.cpp:526: TDomainTenantPool(dc-1) started tenant /dc-1/users/tenant-1 2025-05-29T15:22:22.216664Z node 2 :TENANT_POOL DEBUG: tenant_pool.cpp:274: TDomainTenantPool(dc-1) send status update to [2:486:2112] 2025-05-29T15:22:22.217466Z node 3 :LOCAL DEBUG: local.cpp:260: TEvTabletPipe::TEvClientConnected {TabletId=72057594046578946 Status=OK ClientId=[3:515:2116]} 2025-05-29T15:22:22.217486Z node 2 :LOCAL DEBUG: local.cpp:260: TEvTabletPipe::TEvClientConnected {TabletId=72057594046578946 Status=OK ClientId=[2:512:2116]} 2025-05-29T15:22:22.217584Z node 3 :LOCAL DEBUG: local.cpp:324: TLocalNodeRegistrar::Handle TEvLocal::TEvPing 2025-05-29T15:22:22.217595Z node 3 :LOCAL DEBUG: local.cpp:380: TLocalNodeRegistrar TEvPing - CONNECTED 2025-05-29T15:22:22.217600Z node 3 :LOCAL DEBUG: local.cpp:297: TLocalNodeRegistrar SendStatusOk 2025-05-29T15:22:22.217619Z node 2 :LOCAL DEBUG: local.cpp:324: TLocalNodeRegistrar::Handle TEvLocal::TEvPing 2025-05-29T15:22:22.217625Z node 2 :LOCAL DEBUG: local.cpp:380: TLocalNodeRegistrar TEvPing - CONNECTED 2025-05-29T15:22:22.217628Z node 2 :LOCAL DEBUG: local.cpp:297: TLocalNodeRegistrar SendStatusOk |59.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_sysview_reboots/unittest ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/mind/ut/unittest >> TDynamicNameserverTest::CacheMissDifferentDeadlineInverseOrder-EnableNodeBrokerDeltaProtocol-true [GOOD] Test command err: 2025-05-29T15:22:22.051038Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-05-29T15:22:22.051235Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:22.053979Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:22.054009Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:22.075999Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7309: Cannot subscribe to console configs 2025-05-29T15:22:22.076015Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded ... waiting for nameservers are connected 2025-05-29T15:22:22.078944Z node 1 :NODE_BROKER DEBUG: node_broker_impl.h:236: StateInit event type: 10060000 event: NKikimr::TEvTablet::TEvBoot 2025-05-29T15:22:22.079289Z node 1 :NODE_BROKER DEBUG: node_broker_impl.h:236: StateInit event type: 10060001 event: NKikimr::TEvTablet::TEvRestored 2025-05-29T15:22:22.079342Z node 1 :NODE_BROKER DEBUG: node_broker__init_scheme.cpp:20: TTxInitScheme Execute 2025-05-29T15:22:22.079510Z node 1 :NODE_BROKER DEBUG: node_broker_impl.h:236: StateInit event type: 1006000c event: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-05-29T15:22:22.080072Z node 1 :NODE_BROKER DEBUG: node_broker__init_scheme.cpp:29: TTxInitScheme Complete 2025-05-29T15:22:22.080116Z node 1 :NODE_BROKER DEBUG: node_broker__load_state.cpp:19: TTxLoadState Execute 2025-05-29T15:22:22.080154Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:961: [DB] Using default config. 2025-05-29T15:22:22.080167Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:998: [DB] Starting the first epoch: #1.1 1970-01-01T00:00:00.024000Z - 1970-01-01T01:00:00.024000Z - 1970-01-01T02:00:00.024000Z 2025-05-29T15:22:22.080172Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:1024: [DB] Loaded the first approximate epoch start: #1.1 2025-05-29T15:22:22.080185Z node 1 :NODE_BROKER DEBUG: node_broker__load_state.cpp:27: TTxLoadState Complete 2025-05-29T15:22:22.080198Z node 1 :NODE_BROKER DEBUG: node_broker__migrate_state.cpp:84: TTxMigrateState Execute 2025-05-29T15:22:22.080203Z node 1 :NODE_BROKER DEBUG: node_broker__migrate_state.cpp:52: TTxMigrateState ProcessMigrationBatch UpdateNodes left 0, NewVersionUpdateNodes left 0 2025-05-29T15:22:22.080207Z node 1 :NODE_BROKER DEBUG: node_broker__migrate_state.cpp:21: TTxMigrateState FinalizeMigration 2025-05-29T15:22:22.080213Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:1311: [DB] Update epoch in database: #1.1 1970-01-01T00:00:00.024000Z - 1970-01-01T01:00:00.024000Z - 1970-01-01T02:00:00.024000Z 2025-05-29T15:22:22.080227Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:1330: [DB] Update approx epoch start in database: #1.1 2025-05-29T15:22:22.080233Z node 1 :NODE_BROKER NOTICE: node_broker.cpp:1343: [DB] Update main nodes table to: Nodes 2025-05-29T15:22:22.101364Z node 1 :NODE_BROKER DEBUG: node_broker__migrate_state.cpp:95: TTxMigrateState Complete 2025-05-29T15:22:22.101390Z node 1 :NODE_BROKER TRACE: node_broker.cpp:456: Scheduled epoch update at 1970-01-01T01:00:00.024000Z 2025-05-29T15:22:22.101397Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:562: Preparing nodes list cache for epoch #1.1 1970-01-01T00:00:00.024000Z - 1970-01-01T01:00:00.024000Z - 1970-01-01T02:00:00.024000Z, approximate epoch start #1.1 nodes=0 expired=0 2025-05-29T15:22:22.101403Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:603: Preparing update nodes log for epoch ##1.1 1970-01-01T00:00:00.024000Z - 1970-01-01T01:00:00.024000Z - 1970-01-01T02:00:00.024000Z nodes=0 expired=0 removed=0 2025-05-29T15:22:22.141961Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 269877761, Sender [1:201:2197], Recipient [1:170:2176]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-05-29T15:22:22.141992Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:657: Handle NKikimr::TEvTabletPipe::TEvClientConnected { TabletId: 72057594037936129 Status: OK ServerId: [1:201:2197] Leader: 1 Dead: 0 Generation: 2 VersionInfo:  } ... waiting for nameservers are connected (done) 2025-05-29T15:22:22.142355Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 272039936, Sender [1:18:2065], Recipient [1:170:2176]: NKikimr::NNodeBroker::TEvNodeBroker::TEvListNodes { MinEpoch: 1 } 2025-05-29T15:22:22.142360Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:246: StateWork, processing event TEvNodeBroker::TEvListNodes 2025-05-29T15:22:22.142367Z node 1 :NODE_BROKER TRACE: node_broker.cpp:423: Send TEvNodesInfo for epoch #1.1 1970-01-01T00:00:00.024000Z - 1970-01-01T01:00:00.024000Z - 1970-01-01T02:00:00.024000Z 2025-05-29T15:22:22.142391Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 269877761, Sender [1:205:2201], Recipient [1:170:2176]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-05-29T15:22:22.142414Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 272039938, Sender [1:203:2199], Recipient [1:170:2176]: NKikimr::NNodeBroker::TEvNodeBroker::TEvRegistrationRequest { Host: "host1" Port: 1001 ResolveHost: "host1.host1.host1" Address: "1.2.3.4" Location { DataCenter: "1" Module: "2" Rack: "3" Unit: "4" } FixedNodeId: false Path: "dc-1" } 2025-05-29T15:22:22.142418Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:248: StateWork, processing event TEvNodeBroker::TEvRegistrationRequest 2025-05-29T15:22:22.142424Z node 1 :NODE_BROKER TRACE: node_broker.cpp:1487: Handle TEvNodeBroker::TEvRegistrationRequest: request# Host: "host1" Port: 1001 ResolveHost: "host1.host1.host1" Address: "1.2.3.4" Location { DataCenter: "1" Module: "2" Rack: "3" Unit: "4" } FixedNodeId: false Path: "dc-1" 2025-05-29T15:22:22.142460Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2702: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [1:16:2063], request# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-05-29T15:22:22.142471Z node 1 :TX_PROXY_SCHEME_CACHE TRACE: cache.cpp:2322: Create subscriber: self# [1:16:2063], path# /dc-1, domainOwnerId# 72057594046678944 2025-05-29T15:22:22.146756Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2589: HandleNotify: self# [1:16:2063], notify# NKikimr::TSchemeBoardEvents::TEvNotifyUpdate { Path: /dc-1 PathId: [OwnerId: 72057594046678944, LocalPathId: 1] DescribeSchemeResult: Status: StatusSuccess Path: "/dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/dc-1" } } } PathId: 1 PathOwnerId: 72057594046678944 } 2025-05-29T15:22:22.146828Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2464: ResolveCacheItem: self# [1:16:2063], notify# NKikimr::TSchemeBoardEvents::TEvNotifyUpdate { Path: /dc-1 PathId: [OwnerId: 72057594046678944, LocalPathId: 1] DescribeSchemeResult: Status: StatusSuccess Path: "/dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/dc-1" } } } PathId: 1 PathOwnerId: 72057594046678944 }, by path# { Subscriber: { Subscriber: [1:207:2202] DomainOwnerId: 72057594046678944 Type: 2 SyncCookie: 0 } Filled: 0 Status: undefined Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, by pathId# nullptr 2025-05-29T15:22:22.146876Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:1842: FillEntry for TNavigate: self# [1:16:2063], cacheItem# { Subscriber: { Subscriber: [1:207:2202] DomainOwnerId: 72057594046678944 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusSuccess Kind: 2 TableKind: 0 Created: 1 CreateStep: 0 PathId: [OwnerId: 72057594046678944, LocalPathId: 1] DomainId: [OwnerId: 72057594046678944, LocalPathId: 1] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-05-29T15:22:22.146943Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:265: Send result: self# [1:214:2203], recipient# [1:206:2176], result# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [72057594046678944:1:0] RequestType: ByPath Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046678944, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046678944, LocalPathId: 1] Params { Version: 0 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } }] } 2025-05-29T15:22:22.146953Z node 1 :NODE_BROKER TRACE: node_broker.cpp:1532: Handle TEvTxProxySchemeCache::TEvNavigateKeySetResult: response# { Path: dc-1 TableId: [72057594046678944:1:0] RequestType: ByPath Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046678944, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046678944, LocalPathId: 1] Params { Version: 0 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } } 2025-05-29T15:22:22.146966Z node 1 :NODE_BROKER TRACE: node_broker.cpp:1558: Finished resolving tenant: request# Host: "host1" Port: 1001 ResolveHost: "host1.host1.host1" Address: "1.2.3.4" Location { DataCenter: "1" Module: "2" Rack: "3" Unit: "4" } FixedNodeId: false Path: "dc-1": scope id# <7205759404667894 ... Cache::TEvNavigateKeySetResult: response# { Path: dc-1 TableId: [72057594046678944:1:0] RequestType: ByPath Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046678944, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046678944, LocalPathId: 1] Params { Version: 0 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } } 2025-05-29T15:22:22.363641Z node 2 :NODE_BROKER TRACE: node_broker.cpp:1558: Finished resolving tenant: request# Host: "host1" Port: 1001 ResolveHost: "host1.host1.host1" Address: "1.2.3.4" Location { DataCenter: "1" Module: "2" Rack: "3" Unit: "4" } FixedNodeId: false Path: "dc-1": scope id# <72057594046678944:1>: serviced subdomain# 72057594046678944:1 2025-05-29T15:22:22.363649Z node 2 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 2146435073, Sender [2:204:2176], Recipient [2:170:2176]: NKikimr::NNodeBroker::TNodeBroker::TEvPrivate::TEvResolvedRegistrationRequest 2025-05-29T15:22:22.363652Z node 2 :NODE_BROKER TRACE: node_broker_impl.h:257: StateWork, processing event TEvPrivate::TEvResolvedRegistrationRequest 2025-05-29T15:22:22.363672Z node 2 :NODE_BROKER DEBUG: node_broker__register_node.cpp:77: TTxRegisterNode Execute 2025-05-29T15:22:22.363675Z node 2 :NODE_BROKER DEBUG: node_broker__register_node.cpp:81: Registration request from host1:1001 (not fixed) tenant: dc-1 2025-05-29T15:22:22.363696Z node 2 :NODE_BROKER DEBUG: node_broker.cpp:856: [DB] Adding node #1024.v2 host1:1001 to database state=Active resolvehost=host1.host1.host1 address=1.2.3.4 dc=1 location=DC=1/M=2/R=3/U=4/ lease=1 expire=Thu, 01 Jan 1970 02:00:00 UTC servicedsubdomain=72057594046678944:1 slotindex=0 authorizedbycertificate=false 2025-05-29T15:22:22.363741Z node 2 :NODE_BROKER DEBUG: node_broker.cpp:264: [Dirty] Register new active node #1024.v2 host1:1001 2025-05-29T15:22:22.363748Z node 2 :NODE_BROKER DEBUG: node_broker.cpp:552: [Dirty] Update current epoch version from 1 to 2 2025-05-29T15:22:22.363751Z node 2 :NODE_BROKER DEBUG: node_broker.cpp:1356: [DB] Update epoch version in database version=2 2025-05-29T15:22:22.374421Z node 2 :NODE_BROKER DEBUG: node_broker__register_node.cpp:186: TTxRegisterNode Complete 2025-05-29T15:22:22.374439Z node 2 :NODE_BROKER DEBUG: node_broker.cpp:264: [Committed] Register new active node #1024.v2 host1:1001 2025-05-29T15:22:22.374446Z node 2 :NODE_BROKER DEBUG: node_broker.cpp:552: [Committed] Update current epoch version from 1 to 2 2025-05-29T15:22:22.374450Z node 2 :NODE_BROKER DEBUG: node_broker.cpp:630: Add node #1024.v2 host1:1001 to epoch cache 2025-05-29T15:22:22.374467Z node 2 :NODE_BROKER DEBUG: node_broker.cpp:659: Add node #1024.v2 to update nodes log 2025-05-29T15:22:22.374501Z node 2 :NODE_BROKER TRACE: node_broker__register_node.cpp:58: TTxRegisterNode reply with: Status { Code: OK } Node { NodeId: 1024 Host: "host1" Port: 1001 ResolveHost: "host1.host1.host1" Address: "1.2.3.4" Location { DataCenter: "1" Module: "2" Rack: "3" Unit: "4" } Expire: 7200023000 Name: "slot-0" } 2025-05-29T15:22:22.374568Z node 2 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 269877761, Sender [2:216:2205], Recipient [2:170:2176]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-05-29T15:22:22.374590Z node 2 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 272039938, Sender [2:201:2197], Recipient [2:170:2176]: NKikimr::NNodeBroker::TEvNodeBroker::TEvRegistrationRequest { Host: "host2" Port: 1001 ResolveHost: "host2.host2.host2" Address: "1.2.3.5" Location { DataCenter: "1" Module: "2" Rack: "3" Unit: "5" } FixedNodeId: false Path: "dc-1" } 2025-05-29T15:22:22.374594Z node 2 :NODE_BROKER TRACE: node_broker_impl.h:248: StateWork, processing event TEvNodeBroker::TEvRegistrationRequest 2025-05-29T15:22:22.374600Z node 2 :NODE_BROKER TRACE: node_broker.cpp:1487: Handle TEvNodeBroker::TEvRegistrationRequest: request# Host: "host2" Port: 1001 ResolveHost: "host2.host2.host2" Address: "1.2.3.5" Location { DataCenter: "1" Module: "2" Rack: "3" Unit: "5" } FixedNodeId: false Path: "dc-1" 2025-05-29T15:22:22.374629Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2702: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [2:16:2063], request# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-05-29T15:22:22.374647Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:1842: FillEntry for TNavigate: self# [2:16:2063], cacheItem# { Subscriber: { Subscriber: [2:205:2200] DomainOwnerId: 72057594046678944 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusSuccess Kind: 2 TableKind: 0 Created: 1 CreateStep: 0 PathId: [OwnerId: 72057594046678944, LocalPathId: 1] DomainId: [OwnerId: 72057594046678944, LocalPathId: 1] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-05-29T15:22:22.374683Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:265: Send result: self# [2:218:2206], recipient# [2:217:2176], result# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [72057594046678944:1:0] RequestType: ByPath Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046678944, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046678944, LocalPathId: 1] Params { Version: 0 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } }] } 2025-05-29T15:22:22.374693Z node 2 :NODE_BROKER TRACE: node_broker.cpp:1532: Handle TEvTxProxySchemeCache::TEvNavigateKeySetResult: response# { Path: dc-1 TableId: [72057594046678944:1:0] RequestType: ByPath Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046678944, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046678944, LocalPathId: 1] Params { Version: 0 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } } 2025-05-29T15:22:22.374701Z node 2 :NODE_BROKER TRACE: node_broker.cpp:1558: Finished resolving tenant: request# Host: "host2" Port: 1001 ResolveHost: "host2.host2.host2" Address: "1.2.3.5" Location { DataCenter: "1" Module: "2" Rack: "3" Unit: "5" } FixedNodeId: false Path: "dc-1": scope id# <72057594046678944:1>: serviced subdomain# 72057594046678944:1 2025-05-29T15:22:22.374709Z node 2 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 2146435073, Sender [2:217:2176], Recipient [2:170:2176]: NKikimr::NNodeBroker::TNodeBroker::TEvPrivate::TEvResolvedRegistrationRequest 2025-05-29T15:22:22.374714Z node 2 :NODE_BROKER TRACE: node_broker_impl.h:257: StateWork, processing event TEvPrivate::TEvResolvedRegistrationRequest 2025-05-29T15:22:22.374725Z node 2 :NODE_BROKER DEBUG: node_broker__register_node.cpp:77: TTxRegisterNode Execute 2025-05-29T15:22:22.374728Z node 2 :NODE_BROKER DEBUG: node_broker__register_node.cpp:81: Registration request from host2:1001 (not fixed) tenant: dc-1 2025-05-29T15:22:22.374761Z node 2 :NODE_BROKER DEBUG: node_broker.cpp:856: [DB] Adding node #1025.v3 host2:1001 to database state=Active resolvehost=host2.host2.host2 address=1.2.3.5 dc=1 location=DC=1/M=2/R=3/U=5/ lease=1 expire=Thu, 01 Jan 1970 02:00:00 UTC servicedsubdomain=72057594046678944:1 slotindex=1 authorizedbycertificate=false 2025-05-29T15:22:22.374800Z node 2 :NODE_BROKER DEBUG: node_broker.cpp:264: [Dirty] Register new active node #1025.v3 host2:1001 2025-05-29T15:22:22.374804Z node 2 :NODE_BROKER DEBUG: node_broker.cpp:552: [Dirty] Update current epoch version from 2 to 3 2025-05-29T15:22:22.374807Z node 2 :NODE_BROKER DEBUG: node_broker.cpp:1356: [DB] Update epoch version in database version=3 2025-05-29T15:22:22.385457Z node 2 :NODE_BROKER DEBUG: node_broker__register_node.cpp:186: TTxRegisterNode Complete 2025-05-29T15:22:22.385473Z node 2 :NODE_BROKER DEBUG: node_broker.cpp:264: [Committed] Register new active node #1025.v3 host2:1001 2025-05-29T15:22:22.385481Z node 2 :NODE_BROKER DEBUG: node_broker.cpp:552: [Committed] Update current epoch version from 2 to 3 2025-05-29T15:22:22.385485Z node 2 :NODE_BROKER DEBUG: node_broker.cpp:630: Add node #1025.v3 host2:1001 to epoch cache 2025-05-29T15:22:22.385501Z node 2 :NODE_BROKER DEBUG: node_broker.cpp:659: Add node #1025.v3 to update nodes log 2025-05-29T15:22:22.385532Z node 2 :NODE_BROKER TRACE: node_broker__register_node.cpp:58: TTxRegisterNode reply with: Status { Code: OK } Node { NodeId: 1025 Host: "host2" Port: 1001 ResolveHost: "host2.host2.host2" Address: "1.2.3.5" Location { DataCenter: "1" Module: "2" Rack: "3" Unit: "5" } Expire: 7200023000 Name: "slot-1" } ... waiting for cache miss 2025-05-29T15:22:22.385595Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:525: Handle NActors::TEvInterconnect::TEvResolveNode { NodeId: 1024 Deadline: 2.107024s } 2025-05-29T15:22:22.385604Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:626: New cache miss: nodeId# 1024, deadline# 2.107024s 2025-05-29T15:22:22.385607Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:630: Schedule wakeup for new earliest deadline 2.107024s 2025-05-29T15:22:22.385613Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:525: Handle NActors::TEvInterconnect::TEvResolveNode { NodeId: 1025 Deadline: 1.107024s } 2025-05-29T15:22:22.385615Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:626: New cache miss: nodeId# 1025, deadline# 1.107024s 2025-05-29T15:22:22.385617Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:630: Schedule wakeup for new earliest deadline 1.107024s ... blocking NKikimr::NNodeBroker::TEvNodeBroker::TEvSyncNodesRequest from NAMESERVICE to NODE_BROKER_ACTOR cookie 1 ... waiting for cache miss (done) 2025-05-29T15:22:22.395772Z node 2 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 2146435074, Sender [0:0:0], Recipient [2:170:2176]: NKikimr::NNodeBroker::TNodeBroker::TEvPrivate::TEvProcessSubscribersQueue 2025-05-29T15:22:22.395791Z node 2 :NODE_BROKER TRACE: node_broker_impl.h:258: StateWork, processing event TEvPrivate::TEvProcessSubscribersQueue 2025-05-29T15:22:22.395799Z node 2 :NODE_BROKER TRACE: node_broker.cpp:730: Send TEvUpdateNodes v1 -> v3 to [2:18:2065] ... blocking NKikimr::NNodeBroker::TEvNodeBroker::TEvUpdateNodes from NODE_BROKER_ACTOR to NAMESERVICE cookie 0 2025-05-29T15:22:22.457012Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:873: HandleWakeup at 1.108024s 2025-05-29T15:22:22.457038Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:134: Cache miss failed: nodeId=1025, error=Deadline exceeded 2025-05-29T15:22:22.467177Z node 2 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 2146435074, Sender [0:0:0], Recipient [2:170:2176]: NKikimr::NNodeBroker::TNodeBroker::TEvPrivate::TEvProcessSubscribersQueue 2025-05-29T15:22:22.467192Z node 2 :NODE_BROKER TRACE: node_broker_impl.h:258: StateWork, processing event TEvPrivate::TEvProcessSubscribersQueue 2025-05-29T15:22:22.507852Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:873: HandleWakeup at 2.108024s 2025-05-29T15:22:22.507875Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:134: Cache miss failed: nodeId=1024, error=Deadline exceeded |59.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_sysview_reboots/unittest ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/mind/ut/unittest >> TNodeBrokerTest::SingleDomainModeBannedIds [GOOD] Test command err: 2025-05-29T15:22:21.852350Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-05-29T15:22:21.852596Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:21.855125Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:21.855167Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:21.878674Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7309: Cannot subscribe to console configs 2025-05-29T15:22:21.878694Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded ... waiting for nameservers are connected 2025-05-29T15:22:21.881877Z node 1 :NODE_BROKER DEBUG: node_broker_impl.h:236: StateInit event type: 10060000 event: NKikimr::TEvTablet::TEvBoot 2025-05-29T15:22:21.882137Z node 1 :NODE_BROKER DEBUG: node_broker_impl.h:236: StateInit event type: 10060001 event: NKikimr::TEvTablet::TEvRestored 2025-05-29T15:22:21.882176Z node 1 :NODE_BROKER DEBUG: node_broker__init_scheme.cpp:20: TTxInitScheme Execute 2025-05-29T15:22:21.882301Z node 1 :NODE_BROKER DEBUG: node_broker_impl.h:236: StateInit event type: 1006000c event: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-05-29T15:22:21.882694Z node 1 :NODE_BROKER DEBUG: node_broker__init_scheme.cpp:29: TTxInitScheme Complete 2025-05-29T15:22:21.882725Z node 1 :NODE_BROKER DEBUG: node_broker__load_state.cpp:19: TTxLoadState Execute 2025-05-29T15:22:21.882778Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:961: [DB] Using default config. 2025-05-29T15:22:21.882790Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:998: [DB] Starting the first epoch: #1.1 1970-01-01T00:00:00.024000Z - 1970-01-01T01:00:00.024000Z - 1970-01-01T02:00:00.024000Z 2025-05-29T15:22:21.882793Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:1024: [DB] Loaded the first approximate epoch start: #1.1 2025-05-29T15:22:21.882804Z node 1 :NODE_BROKER DEBUG: node_broker__load_state.cpp:27: TTxLoadState Complete 2025-05-29T15:22:21.882814Z node 1 :NODE_BROKER DEBUG: node_broker__migrate_state.cpp:84: TTxMigrateState Execute 2025-05-29T15:22:21.882818Z node 1 :NODE_BROKER DEBUG: node_broker__migrate_state.cpp:52: TTxMigrateState ProcessMigrationBatch UpdateNodes left 0, NewVersionUpdateNodes left 0 2025-05-29T15:22:21.882821Z node 1 :NODE_BROKER DEBUG: node_broker__migrate_state.cpp:21: TTxMigrateState FinalizeMigration 2025-05-29T15:22:21.882824Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:1311: [DB] Update epoch in database: #1.1 1970-01-01T00:00:00.024000Z - 1970-01-01T01:00:00.024000Z - 1970-01-01T02:00:00.024000Z 2025-05-29T15:22:21.882833Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:1330: [DB] Update approx epoch start in database: #1.1 2025-05-29T15:22:21.882837Z node 1 :NODE_BROKER NOTICE: node_broker.cpp:1343: [DB] Update main nodes table to: Nodes 2025-05-29T15:22:21.904154Z node 1 :NODE_BROKER DEBUG: node_broker__migrate_state.cpp:95: TTxMigrateState Complete 2025-05-29T15:22:21.904192Z node 1 :NODE_BROKER TRACE: node_broker.cpp:456: Scheduled epoch update at 1970-01-01T01:00:00.024000Z 2025-05-29T15:22:21.904203Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:562: Preparing nodes list cache for epoch #1.1 1970-01-01T00:00:00.024000Z - 1970-01-01T01:00:00.024000Z - 1970-01-01T02:00:00.024000Z, approximate epoch start #1.1 nodes=0 expired=0 2025-05-29T15:22:21.904214Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:603: Preparing update nodes log for epoch ##1.1 1970-01-01T00:00:00.024000Z - 1970-01-01T01:00:00.024000Z - 1970-01-01T02:00:00.024000Z nodes=0 expired=0 removed=0 2025-05-29T15:22:21.944984Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 269877761, Sender [1:201:2197], Recipient [1:170:2176]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-05-29T15:22:21.945021Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:657: Handle NKikimr::TEvTabletPipe::TEvClientConnected { TabletId: 72057594037936129 Status: OK ServerId: [1:201:2197] Leader: 1 Dead: 0 Generation: 2 VersionInfo:  } ... waiting for nameservers are connected (done) 2025-05-29T15:22:21.945432Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 272039936, Sender [1:18:2065], Recipient [1:170:2176]: NKikimr::NNodeBroker::TEvNodeBroker::TEvListNodes { MinEpoch: 1 } 2025-05-29T15:22:21.945438Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:246: StateWork, processing event TEvNodeBroker::TEvListNodes 2025-05-29T15:22:21.945446Z node 1 :NODE_BROKER TRACE: node_broker.cpp:423: Send TEvNodesInfo for epoch #1.1 1970-01-01T00:00:00.024000Z - 1970-01-01T01:00:00.024000Z - 1970-01-01T02:00:00.024000Z 2025-05-29T15:22:21.945474Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 269877761, Sender [1:205:2201], Recipient [1:170:2176]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-05-29T15:22:21.945491Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 272039946, Sender [1:203:2199], Recipient [1:170:2176]: NKikimr::NNodeBroker::TEvNodeBroker::TEvSetConfigRequest { Config { BannedNodeIds { From: 1025 To: 1032 } } } 2025-05-29T15:22:21.945496Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:253: StateWork, processing event TEvNodeBroker::TEvSetConfigRequest 2025-05-29T15:22:21.945517Z node 1 :NODE_BROKER DEBUG: node_broker__update_config.cpp:53: TTxUpdateConfig Execute Config { BannedNodeIds { From: 1025 To: 1032 } } 2025-05-29T15:22:21.945539Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:1286: [DB] Update config in database config=BannedNodeIds { From: 1025 To: 1032 } 2025-05-29T15:22:21.956191Z node 1 :NODE_BROKER DEBUG: node_broker__update_config.cpp:85: TTxUpdateConfig Complete 2025-05-29T15:22:21.956220Z node 1 :NODE_BROKER TRACE: node_broker__update_config.cpp:92: TTxUpdateConfig reply with: NKikimr::NNodeBroker::TEvNodeBroker::TEvSetConfigResponse { Status { Code: OK } } 2025-05-29T15:22:21.956279Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 269877761, Sender [1:209:2205], Recipient [1:170:2176]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-05-29T15:22:21.956289Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 272039936, Sender [1:203:2199], Recipient [1:170:2176]: NKikimr::NNodeBroker::TEvNodeBroker::TEvListNodes { } 2025-05-29T15:22:21.956293Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:246: StateWork, processing event TEvNodeBroker::TEvListNodes 2025-05-29T15:22:21.956300Z node 1 :NODE_BROKER TRACE: node_broker.cpp:423: Send TEvNodesInfo for epoch #1.1 1970-01-01T00:00:00.024000Z - 1970-01-01T01:00:00.024000Z - 1970-01-01T02:00:00.024000Z 2025-05-29T15:22:21.956326Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 269877761, Sender [1:211:2207], Recipient [1:170:2176]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-05-29T15:22:21.956347Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 272039938, Sender [1:203:2199], Recipient [1:170:2176]: NKikimr::NNodeBroker::TEvNodeBroker::TEvRegistrationRequest { Host: "host1" Port: 1001 ResolveHost: "host1.yandex.net" Address: "1.2.3.4" Location { DataCenter: "1" Module: "2" Rack: "3" Unit: "4" } FixedNodeId: false Path: "dc-1" } 2025-05-29T15:22:21.956350Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:248: StateWork, processing event TEvNodeBroker::TEvRegistrationRequest 2025-05-29T15:22:21.956356Z node 1 :NODE_BROKER TRACE: node_broker.cpp:1487: Handle TEvNodeBroker::TEvRegistrationRequest: request# Host: "host1" Port: 1001 ResolveHost: "host1.yandex.net" Address: "1.2.3.4" Location { DataCenter: "1" Module: "2" Rack: "3" Unit: "4" } FixedNodeId: false Path: "dc-1" 2025-05-29T15:22:21.956396Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2702: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [1:16:2063], request# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-05-29T15:22:21.956407Z node 1 :TX_PROXY_SCHEME_CACHE TRACE: cache.cpp:2322: Create subscriber: self# [1:16:2063], path# /dc-1, domainOwnerId# 72057594046678944 2025-05-29T15:22:21.961166Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2589: HandleNotify: self# [1:16:2063], notify# NKikimr::TSchemeBoardEvents::TEvNotifyUpdate { Path: /dc-1 PathId: [OwnerId: 72057594046678944, LocalPathId: 1] DescribeSchemeResult: Status: StatusSuccess Path: "/dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/dc-1" } } } PathId: 1 PathOwnerId: 72057594046678944 } 2025-05-29T15:22:21.961235Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2464: ResolveCacheItem: self# [1:16:2063], notify# NKikimr::TSchemeBoardEvents::TEvNotifyUpdate { Path: /dc-1 PathId: [OwnerId: 72057594046678944, LocalPathId: 1] DescribeSchemeResult: Status: StatusSuccess Path: "/dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/dc-1" } } } PathId: 1 PathOwnerId: 72057594046678944 }, by path# { Subscriber: { Subscriber: [1:213:2208] DomainOwnerId: 72057594046678944 Type: 2 SyncCookie: 0 } Filled: 0 Status: undefined Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, by pathId# nullptr 2025-05-29T15:22:21.961278Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:1842: FillEntry for TNavigate: self# [1:16:2063], cacheItem# { Subscriber: { Subscriber: [1:213:2208] DomainOwnerId: 72057594046678944 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusSuccess Kind: 2 TableKind: 0 Created: 1 CreateStep: 0 PathId: [OwnerId: 72057594046678944, LocalPathId: 1] DomainId: [OwnerId: 720 ... _node.cpp:39: Cannot register node host4:1001: ERROR_TEMP: No free node IDs 2025-05-29T15:22:22.775137Z node 1 :NODE_BROKER DEBUG: node_broker__register_node.cpp:186: TTxRegisterNode Complete 2025-05-29T15:22:22.775142Z node 1 :NODE_BROKER TRACE: node_broker__register_node.cpp:58: TTxRegisterNode reply with: Status { Code: ERROR_TEMP Reason: "No free node IDs" } 2025-05-29T15:22:22.775289Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 268829696, Sender [1:164:2172], Recipient [1:170:2176]: NKikimr::TEvTablet::TEvTabletDead 2025-05-29T15:22:22.775309Z node 1 :NODE_BROKER INFO: node_broker.cpp:126: OnTabletDead: 72057594037936129 2025-05-29T15:22:22.775312Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:212: TNodeBroker::Cleanup 2025-05-29T15:22:22.775476Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:649: Handle NKikimr::TEvTabletPipe::TEvClientDestroyed { TabletId: 72057594037936129 ClientId: [1:22:2065] ServerId: [1:201:2197] } 2025-05-29T15:22:22.776750Z node 1 :NODE_BROKER DEBUG: node_broker_impl.h:236: StateInit event type: 10060000 event: NKikimr::TEvTablet::TEvBoot 2025-05-29T15:22:22.777357Z node 1 :NODE_BROKER DEBUG: node_broker_impl.h:236: StateInit event type: 10060001 event: NKikimr::TEvTablet::TEvRestored 2025-05-29T15:22:22.777396Z node 1 :NODE_BROKER DEBUG: node_broker_impl.h:236: StateInit event type: 1006000c event: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-05-29T15:22:22.777678Z node 1 :NODE_BROKER DEBUG: node_broker__init_scheme.cpp:20: TTxInitScheme Execute 2025-05-29T15:22:22.777720Z node 1 :NODE_BROKER DEBUG: node_broker__init_scheme.cpp:29: TTxInitScheme Complete 2025-05-29T15:22:22.777773Z node 1 :NODE_BROKER DEBUG: node_broker__load_state.cpp:19: TTxLoadState Execute 2025-05-29T15:22:22.777828Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:958: [DB] Loaded config: BannedNodeIds { From: 1024 To: 1029 } BannedNodeIds { From: 1031 To: 1032 } 2025-05-29T15:22:22.777835Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:988: [DB] Loaded current epoch: #3.8 1970-01-01T02:00:00.024000Z - 1970-01-01T03:00:00.024000Z - 1970-01-01T04:00:00.024000Z 2025-05-29T15:22:22.777840Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:1017: [DB] Loaded approximate epoch start: #3.8 2025-05-29T15:22:22.777843Z node 1 :NODE_BROKER NOTICE: node_broker.cpp:1034: [DB] Loaded main nodes table: Nodes 2025-05-29T15:22:22.777868Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:291: [Dirty] Added expired node #1024.v0 host1:1001 2025-05-29T15:22:22.777886Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:1113: [DB] Loaded node #1024.v0 { NodeId: 1024, State: Expired, Version: 0, Host: host1, Port: 1001, ResolveHost: host1.yandex.net, Address: 1.2.3.4, Lease: 1, Expire: Thu, 01 Jan 1970 02:00:00 UTC, Location: DC=1/M=2/R=3/U=4/, AuthorizedByCertificate: 0, SlotIndex: 0, ServicedSubDomain: 72057594046678944:1 } 2025-05-29T15:22:22.777894Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:281: [Dirty] Added node #1030.v0 host3:1001 2025-05-29T15:22:22.777899Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:1113: [DB] Loaded node #1030.v0 { NodeId: 1030, State: Active, Version: 0, Host: host3, Port: 1001, ResolveHost: host3.yandex.net, Address: 1.2.3.6, Lease: 2, Expire: Thu, 01 Jan 1970 03:00:00 UTC, Location: DC=1/M=2/R=3/U=6/, AuthorizedByCertificate: 0, SlotIndex: 2, ServicedSubDomain: 72057594046678944:1 } 2025-05-29T15:22:22.777907Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:281: [Dirty] Added node #1033.v0 host2:1001 2025-05-29T15:22:22.777911Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:1113: [DB] Loaded node #1033.v0 { NodeId: 1033, State: Active, Version: 0, Host: host2, Port: 1001, ResolveHost: host2.yandex.net, Address: 1.2.3.5, Lease: 2, Expire: Thu, 01 Jan 1970 03:00:00 UTC, Location: DC=1/M=2/R=3/U=5/, AuthorizedByCertificate: 0, SlotIndex: 1, ServicedSubDomain: 72057594046678944:1 } 2025-05-29T15:22:22.777921Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:1190: [DB] Loaded nodeV2 #1024.v8 { NodeId: 1024, State: Expired, Version: 8, Host: host1, Port: 1001, ResolveHost: host1.yandex.net, Address: 1.2.3.4, Lease: 1, Expire: Thu, 01 Jan 1970 02:00:00 UTC, Location: DC=1/M=2/R=3/U=4/, AuthorizedByCertificate: 0, SlotIndex: 0, ServicedSubDomain: 72057594046678944:1 } 2025-05-29T15:22:22.777926Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:1239: [DB] Node #1024.v8 is already migrated 2025-05-29T15:22:22.777932Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:1190: [DB] Loaded nodeV2 #1030.v6 { NodeId: 1030, State: Active, Version: 6, Host: host3, Port: 1001, ResolveHost: host3.yandex.net, Address: 1.2.3.6, Lease: 2, Expire: Thu, 01 Jan 1970 03:00:00 UTC, Location: DC=1/M=2/R=3/U=6/, AuthorizedByCertificate: 0, SlotIndex: 2, ServicedSubDomain: 72057594046678944:1 } 2025-05-29T15:22:22.777935Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:1239: [DB] Node #1030.v6 is already migrated 2025-05-29T15:22:22.777941Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:1190: [DB] Loaded nodeV2 #1033.v7 { NodeId: 1033, State: Active, Version: 7, Host: host2, Port: 1001, ResolveHost: host2.yandex.net, Address: 1.2.3.5, Lease: 2, Expire: Thu, 01 Jan 1970 03:00:00 UTC, Location: DC=1/M=2/R=3/U=5/, AuthorizedByCertificate: 0, SlotIndex: 1, ServicedSubDomain: 72057594046678944:1 } 2025-05-29T15:22:22.777943Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:1239: [DB] Node #1033.v7 is already migrated 2025-05-29T15:22:22.777953Z node 1 :NODE_BROKER DEBUG: node_broker__load_state.cpp:27: TTxLoadState Complete 2025-05-29T15:22:22.777962Z node 1 :NODE_BROKER DEBUG: node_broker__migrate_state.cpp:84: TTxMigrateState Execute 2025-05-29T15:22:22.777965Z node 1 :NODE_BROKER DEBUG: node_broker__migrate_state.cpp:52: TTxMigrateState ProcessMigrationBatch UpdateNodes left 0, NewVersionUpdateNodes left 0 2025-05-29T15:22:22.777968Z node 1 :NODE_BROKER DEBUG: node_broker__migrate_state.cpp:21: TTxMigrateState FinalizeMigration 2025-05-29T15:22:22.777970Z node 1 :NODE_BROKER DEBUG: node_broker__migrate_state.cpp:95: TTxMigrateState Complete 2025-05-29T15:22:22.777984Z node 1 :NODE_BROKER TRACE: node_broker.cpp:456: Scheduled epoch update at 1970-01-01T03:00:00.024000Z 2025-05-29T15:22:22.777990Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:562: Preparing nodes list cache for epoch #3.8 1970-01-01T02:00:00.024000Z - 1970-01-01T03:00:00.024000Z - 1970-01-01T04:00:00.024000Z, approximate epoch start #3.8 nodes=2 expired=1 2025-05-29T15:22:22.778003Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:603: Preparing update nodes log for epoch ##3.8 1970-01-01T02:00:00.024000Z - 1970-01-01T03:00:00.024000Z - 1970-01-01T04:00:00.024000Z nodes=2 expired=1 removed=0 2025-05-29T15:22:22.778007Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:659: Add node #1030.v6 to update nodes log 2025-05-29T15:22:22.778012Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:659: Add node #1033.v7 to update nodes log 2025-05-29T15:22:22.778016Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:659: Add node #1024.v8 to update nodes log 2025-05-29T15:22:22.778891Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 269877761, Sender [1:358:2316], Recipient [1:324:2289]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-05-29T15:22:22.778927Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 272039938, Sender [1:203:2199], Recipient [1:324:2289]: NKikimr::NNodeBroker::TEvNodeBroker::TEvRegistrationRequest { Host: "host4" Port: 1001 ResolveHost: "host4.yandex.net" Address: "1.2.3.7" Location { DataCenter: "1" Module: "2" Rack: "3" Unit: "7" } FixedNodeId: false Path: "dc-1" } 2025-05-29T15:22:22.778931Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:248: StateWork, processing event TEvNodeBroker::TEvRegistrationRequest 2025-05-29T15:22:22.778937Z node 1 :NODE_BROKER TRACE: node_broker.cpp:1487: Handle TEvNodeBroker::TEvRegistrationRequest: request# Host: "host4" Port: 1001 ResolveHost: "host4.yandex.net" Address: "1.2.3.7" Location { DataCenter: "1" Module: "2" Rack: "3" Unit: "7" } FixedNodeId: false Path: "dc-1" 2025-05-29T15:22:22.778967Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2702: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [1:16:2063], request# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-05-29T15:22:22.778984Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:1842: FillEntry for TNavigate: self# [1:16:2063], cacheItem# { Subscriber: { Subscriber: [1:308:2285] DomainOwnerId: 72057594046678944 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusSuccess Kind: 2 TableKind: 0 Created: 1 CreateStep: 0 PathId: [OwnerId: 72057594046678944, LocalPathId: 1] DomainId: [OwnerId: 72057594046678944, LocalPathId: 1] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-05-29T15:22:22.779033Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:265: Send result: self# [1:360:2317], recipient# [1:359:2289], result# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [72057594046678944:1:0] RequestType: ByPath Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046678944, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046678944, LocalPathId: 1] Params { Version: 0 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } }] } 2025-05-29T15:22:22.779044Z node 1 :NODE_BROKER TRACE: node_broker.cpp:1532: Handle TEvTxProxySchemeCache::TEvNavigateKeySetResult: response# { Path: dc-1 TableId: [72057594046678944:1:0] RequestType: ByPath Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046678944, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046678944, LocalPathId: 1] Params { Version: 0 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } } 2025-05-29T15:22:22.779052Z node 1 :NODE_BROKER TRACE: node_broker.cpp:1558: Finished resolving tenant: request# Host: "host4" Port: 1001 ResolveHost: "host4.yandex.net" Address: "1.2.3.7" Location { DataCenter: "1" Module: "2" Rack: "3" Unit: "7" } FixedNodeId: false Path: "dc-1": scope id# <72057594046678944:1>: serviced subdomain# 72057594046678944:1 2025-05-29T15:22:22.779061Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 2146435073, Sender [1:359:2289], Recipient [1:324:2289]: NKikimr::NNodeBroker::TNodeBroker::TEvPrivate::TEvResolvedRegistrationRequest 2025-05-29T15:22:22.779066Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:257: StateWork, processing event TEvPrivate::TEvResolvedRegistrationRequest 2025-05-29T15:22:22.779073Z node 1 :NODE_BROKER DEBUG: node_broker__register_node.cpp:77: TTxRegisterNode Execute 2025-05-29T15:22:22.779076Z node 1 :NODE_BROKER DEBUG: node_broker__register_node.cpp:81: Registration request from host4:1001 (not fixed) tenant: dc-1 2025-05-29T15:22:22.779083Z node 1 :NODE_BROKER ERROR: node_broker__register_node.cpp:39: Cannot register node host4:1001: ERROR_TEMP: No free node IDs 2025-05-29T15:22:22.779091Z node 1 :NODE_BROKER DEBUG: node_broker__register_node.cpp:186: TTxRegisterNode Complete 2025-05-29T15:22:22.779096Z node 1 :NODE_BROKER TRACE: node_broker__register_node.cpp:58: TTxRegisterNode reply with: Status { Code: ERROR_TEMP Reason: "No free node IDs" } |59.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_sysview_reboots/unittest |59.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_sysview_reboots/unittest >> TPQTest::TestPQPartialRead [GOOD] >> TPQTest::TestPQRead |59.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_sysview_reboots/unittest >> TNodeBrokerTest::ExtendLeasePipelining [GOOD] |59.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_sysview_reboots/unittest |59.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_sysview_reboots/unittest >> TPartitionTests::DataTxCalcPredicateError [GOOD] >> TPartitionTests::DataTxCalcPredicateOrder |59.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_sysview_reboots/unittest |59.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_sysview_reboots/unittest ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/mind/ut/unittest >> TNodeBrokerTest::ExtendLeasePipelining [GOOD] Test command err: 2025-05-29T15:22:20.839512Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 2 Deadline: 18446744073709.551615s } 2025-05-29T15:22:20.839565Z node 3 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 3 Deadline: 18446744073709.551615s } 2025-05-29T15:22:20.839589Z node 4 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 4 Deadline: 18446744073709.551615s } 2025-05-29T15:22:20.839624Z node 5 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 5 Deadline: 18446744073709.551615s } 2025-05-29T15:22:20.839649Z node 6 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 6 Deadline: 18446744073709.551615s } 2025-05-29T15:22:20.839668Z node 7 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 7 Deadline: 18446744073709.551615s } 2025-05-29T15:22:20.845586Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:20.845697Z node 3 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:20.845738Z node 4 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:20.845774Z node 5 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:20.845815Z node 6 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:20.845848Z node 7 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:20.845907Z node 8 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 8 Deadline: 18446744073709.551615s } 2025-05-29T15:22:20.845929Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-05-29T15:22:20.846128Z node 3 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:20.846147Z node 4 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:20.846159Z node 5 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:20.846171Z node 6 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:20.846185Z node 7 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:20.846198Z node 8 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:20.846227Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:20.849967Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:20.850015Z node 8 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:20.850038Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:20.850834Z node 5 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:20.850865Z node 6 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:20.850889Z node 7 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:20.850961Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:20.850976Z node 3 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:20.850993Z node 4 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:20.851011Z node 8 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:20.851025Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:20.851231Z node 3 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-05-29T15:22:20.851252Z node 4 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-05-29T15:22:20.851267Z node 5 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-05-29T15:22:20.851289Z node 6 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-05-29T15:22:20.851307Z node 7 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-05-29T15:22:20.851374Z node 8 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-05-29T15:22:20.851442Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-05-29T15:22:20.851477Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 5 Deadline: 18446744073709.551615s } 2025-05-29T15:22:20.852052Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 4 Deadline: 18446744073709.551615s } 2025-05-29T15:22:20.852074Z node 3 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 6 Deadline: 18446744073709.551615s } 2025-05-29T15:22:20.852081Z node 4 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 7 Deadline: 18446744073709.551615s } 2025-05-29T15:22:20.852087Z node 5 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 3 Deadline: 18446744073709.551615s } 2025-05-29T15:22:20.852093Z node 6 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 7 Deadline: 18446744073709.551615s } 2025-05-29T15:22:20.852099Z node 7 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 2 Deadline: 18446744073709.551615s } 2025-05-29T15:22:20.852106Z node 8 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 3 Deadline: 18446744073709.551615s } 2025-05-29T15:22:20.855018Z node 7 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 6 Deadline: 18446744073709.551615s } 2025-05-29T15:22:20.855083Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 7 Deadline: 18446744073709.551615s } 2025-05-29T15:22:20.855101Z node 3 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 8 Deadline: 18446744073709.551615s } 2025-05-29T15:22:20.855170Z node 3 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 5 Deadline: 18446744073709.551615s } 2025-05-29T15:22:20.855177Z node 4 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 2 Deadline: 18446744073709.551615s } 2025-05-29T15:22:20.855186Z node 6 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 3 Deadline: 18446744073709.551615s } 2025-05-29T15:22:20.855636Z node 7 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 4 Deadline: 18446744073709.551615s } 2025-05-29T15:22:20.856020Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 2 Deadline: 18446744073709.551615s } 2025-05-29T15:22:20.856263Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 3 Deadline: 18446744073709.551615s } 2025-05-29T15:22:20.856370Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 4 Deadline: 18446744073709.551615s } 2025-05-29T15:22:20.856448Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 6 Deadline: 18446744073709.551615s } 2025-05-29T15:22:20.856558Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 7 Deadline: 18446744073709.551615s } 2025-05-29T15:22:20.856810Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 8 Deadline: 18446744073709.551615s } 2025-05-29T15:22:20.856865Z node 4 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 8 Deadline: 18446744073709.551615s } 2025-05-29T15:22:20.857117Z node 8 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 4 Deadline: 18446744073709.551615s } 2025-05-29T15:22:20.857650Z node 4 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 6 Deadline: 18446744073709.551615s } 2025-05-29T15:22:20.857839Z node 6 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 4 Deadline: 18446744073709.551615s } 2025-05-29T15:22:20.858267Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 3 Deadline: 18446744073709.551615s } 2025-05-29T15:22:20.858409Z node 3 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 2 Deadline: 18446744073709.551615s } 2025-05-29T15:22:20.859467Z node 7 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 3 Deadline: 18446744073709.551615s } 2025-05-29T15:22:20.859487Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 8 Deadline: 18446744073709.551615s } 2025-05-29T15:22:20.859596Z node 3 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 7 Deadline: 18446744073709.551615s } 2025-05-29T15:22:20.859705Z node 8 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 2 Deadline: 18446744073709.551615s } 2025-05-29T15:22:20.860182Z node 6 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 2 Deadline: 18446744073709.551615s } 2025-05-29T15:22:20.860420Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 6 Deadline: 18446744073709.551615s } 2025-05-29T15:22:20.860803Z node 7 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 8 Deadline: 18446744073709.551615s } 2025-05-29T15:22:20.861048Z node 8 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 7 Deadline: 18446744073709.551615s } 2025-05-29T15:22:20.881123Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7309: Cannot subscribe to console configs 2025-05-29T15:22:20.881145Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Ta ... eceived event# 269877761, Sender [1:685:2244], Recipient [1:570:2184]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-05-29T15:22:21.831101Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 272039939, Sender [1:638:2214], Recipient [1:570:2184]: NKikimr::NNodeBroker::TEvNodeBroker::TEvExtendLeaseRequest { NodeId: 1024 } 2025-05-29T15:22:21.831105Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:250: StateWork, processing event TEvNodeBroker::TEvExtendLeaseRequest 2025-05-29T15:22:21.831123Z node 1 :NODE_BROKER DEBUG: node_broker__extend_lease.cpp:44: TTxExtendLease Execute node #1024 2025-05-29T15:22:21.831152Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:314: [Dirty] Extended lease of #1024.v4 host1:1001 up to Thu, 01 Jan 1970 03:00:00 UTC (lease 2) 2025-05-29T15:22:21.831166Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:856: [DB] Adding node #1024.v4 host1:1001 to database state=Active resolvehost=host1.yandex.net address=1.2.3.4 dc=1 location=DC=1/M=2/R=3/U=4/ lease=2 expire=Thu, 01 Jan 1970 03:00:00 UTC servicedsubdomain=72057594046678944:1 slotindex=0 authorizedbycertificate=false 2025-05-29T15:22:21.831216Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:552: [Dirty] Update current epoch version from 3 to 4 2025-05-29T15:22:21.831219Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:1356: [DB] Update epoch version in database version=4 2025-05-29T15:22:22.029118Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 2146435072, Sender [1:570:2184], Recipient [1:570:2184]: NKikimr::NNodeBroker::TNodeBroker::TEvPrivate::TEvUpdateEpoch 2025-05-29T15:22:22.029137Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:256: StateWork, processing event TEvPrivate::TEvUpdateEpoch 2025-05-29T15:22:22.029153Z node 1 :NODE_BROKER DEBUG: node_broker__update_epoch.cpp:20: TTxUpdateEpoch Execute 2025-05-29T15:22:22.029161Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:542: [Dirty] Move to new epoch #3.5 1970-01-01T02:00:00.025000Z - 1970-01-01T03:00:00.025000Z - 1970-01-01T04:00:00.025000Z, approximate epoch start #3.5 2025-05-29T15:22:22.029166Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:1311: [DB] Update epoch in database: #3.5 1970-01-01T02:00:00.025000Z - 1970-01-01T03:00:00.025000Z - 1970-01-01T04:00:00.025000Z 2025-05-29T15:22:22.029183Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:1330: [DB] Update approx epoch start in database: #3.5 2025-05-29T15:22:22.100388Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 272039936, Sender [1:25:2072], Recipient [1:570:2184]: NKikimr::NNodeBroker::TEvNodeBroker::TEvListNodes { MinEpoch: 3 } 2025-05-29T15:22:22.100406Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:246: StateWork, processing event TEvNodeBroker::TEvListNodes 2025-05-29T15:22:22.100412Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:375: Delaying list nodes request for epoch #3 2025-05-29T15:22:22.100478Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 272039936, Sender [5:137:2072], Recipient [1:630:2206] 2025-05-29T15:22:22.100481Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:246: StateWork, processing event TEvNodeBroker::TEvListNodes 2025-05-29T15:22:22.100484Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:375: Delaying list nodes request for epoch #3 2025-05-29T15:22:22.100493Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 272039936, Sender [6:165:2072], Recipient [1:631:2207] 2025-05-29T15:22:22.100496Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:246: StateWork, processing event TEvNodeBroker::TEvListNodes 2025-05-29T15:22:22.100498Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:375: Delaying list nodes request for epoch #3 2025-05-29T15:22:22.100501Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 272039936, Sender [8:221:2072], Recipient [1:632:2208] 2025-05-29T15:22:22.100503Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:246: StateWork, processing event TEvNodeBroker::TEvListNodes 2025-05-29T15:22:22.100506Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:375: Delaying list nodes request for epoch #3 2025-05-29T15:22:22.100510Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 272039936, Sender [7:193:2072], Recipient [1:635:2211] 2025-05-29T15:22:22.100512Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:246: StateWork, processing event TEvNodeBroker::TEvListNodes 2025-05-29T15:22:22.100514Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:375: Delaying list nodes request for epoch #3 2025-05-29T15:22:22.100518Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 272039936, Sender [4:109:2072], Recipient [1:636:2212] 2025-05-29T15:22:22.100520Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:246: StateWork, processing event TEvNodeBroker::TEvListNodes 2025-05-29T15:22:22.100522Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:375: Delaying list nodes request for epoch #3 2025-05-29T15:22:22.100527Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 272039936, Sender [2:53:2072], Recipient [1:633:2209] 2025-05-29T15:22:22.100529Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:246: StateWork, processing event TEvNodeBroker::TEvListNodes 2025-05-29T15:22:22.100531Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:375: Delaying list nodes request for epoch #3 2025-05-29T15:22:22.100534Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 272039936, Sender [3:81:2072], Recipient [1:634:2210] 2025-05-29T15:22:22.100536Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:246: StateWork, processing event TEvNodeBroker::TEvListNodes 2025-05-29T15:22:22.100538Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:375: Delaying list nodes request for epoch #3 ... blocking NKikimr::TEvTablet::TEvCommit from FLAT_EXECUTOR to TABLET_ACTOR cookie 1 ... unblocking NKikimr::TEvTablet::TEvCommit from FLAT_EXECUTOR to TABLET_ACTOR 2025-05-29T15:22:22.111225Z node 1 :NODE_BROKER DEBUG: node_broker__extend_lease.cpp:78: TTxExtendLease Complete 2025-05-29T15:22:22.111268Z node 1 :NODE_BROKER TRACE: node_broker__extend_lease.cpp:82: TTxExtendLease reply with: NKikimr::NNodeBroker::TEvNodeBroker::TEvExtendLeaseResponse { Status { Code: OK } NodeId: 1024 Expire: 10800025000 Epoch { Id: 2 Version: 4 Start: 3600025000 End: 7200025000 NextEnd: 10800025000 } } 2025-05-29T15:22:22.111287Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:314: [Committed] Extended lease of #1024.v4 host1:1001 up to Thu, 01 Jan 1970 03:00:00 UTC (lease 2) 2025-05-29T15:22:22.111294Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:552: [Committed] Update current epoch version from 3 to 4 2025-05-29T15:22:22.111298Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:630: Add node #1024.v4 host1:1001 to epoch cache 2025-05-29T15:22:22.111313Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:659: Add node #1024.v4 to update nodes log 2025-05-29T15:22:22.111325Z node 1 :NODE_BROKER DEBUG: node_broker__update_epoch.cpp:31: TTxUpdateEpoch Complete 2025-05-29T15:22:22.111332Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:542: [Committed] Move to new epoch #3.5 1970-01-01T02:00:00.025000Z - 1970-01-01T03:00:00.025000Z - 1970-01-01T04:00:00.025000Z, approximate epoch start #3.5 2025-05-29T15:22:22.111343Z node 1 :NODE_BROKER TRACE: node_broker.cpp:456: Scheduled epoch update at 1970-01-01T03:00:00.025000Z 2025-05-29T15:22:22.111348Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:562: Preparing nodes list cache for epoch #3.5 1970-01-01T02:00:00.025000Z - 1970-01-01T03:00:00.025000Z - 1970-01-01T04:00:00.025000Z, approximate epoch start #3.5 nodes=1 expired=0 2025-05-29T15:22:22.111355Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:603: Preparing update nodes log for epoch ##3.5 1970-01-01T02:00:00.025000Z - 1970-01-01T03:00:00.025000Z - 1970-01-01T04:00:00.025000Z nodes=1 expired=0 removed=0 2025-05-29T15:22:22.111358Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:659: Add node #1024.v4 to update nodes log 2025-05-29T15:22:22.111366Z node 1 :NODE_BROKER TRACE: node_broker.cpp:423: Send TEvNodesInfo for epoch #3.5 1970-01-01T02:00:00.025000Z - 1970-01-01T03:00:00.025000Z - 1970-01-01T04:00:00.025000Z 2025-05-29T15:22:22.111370Z node 1 :NODE_BROKER TRACE: node_broker.cpp:423: Send TEvNodesInfo for epoch #3.5 1970-01-01T02:00:00.025000Z - 1970-01-01T03:00:00.025000Z - 1970-01-01T04:00:00.025000Z 2025-05-29T15:22:22.111375Z node 1 :NODE_BROKER TRACE: node_broker.cpp:423: Send TEvNodesInfo for epoch #3.5 1970-01-01T02:00:00.025000Z - 1970-01-01T03:00:00.025000Z - 1970-01-01T04:00:00.025000Z 2025-05-29T15:22:22.111379Z node 1 :NODE_BROKER TRACE: node_broker.cpp:423: Send TEvNodesInfo for epoch #3.5 1970-01-01T02:00:00.025000Z - 1970-01-01T03:00:00.025000Z - 1970-01-01T04:00:00.025000Z 2025-05-29T15:22:22.111383Z node 1 :NODE_BROKER TRACE: node_broker.cpp:423: Send TEvNodesInfo for epoch #3.5 1970-01-01T02:00:00.025000Z - 1970-01-01T03:00:00.025000Z - 1970-01-01T04:00:00.025000Z 2025-05-29T15:22:22.111387Z node 1 :NODE_BROKER TRACE: node_broker.cpp:423: Send TEvNodesInfo for epoch #3.5 1970-01-01T02:00:00.025000Z - 1970-01-01T03:00:00.025000Z - 1970-01-01T04:00:00.025000Z 2025-05-29T15:22:22.111390Z node 1 :NODE_BROKER TRACE: node_broker.cpp:423: Send TEvNodesInfo for epoch #3.5 1970-01-01T02:00:00.025000Z - 1970-01-01T03:00:00.025000Z - 1970-01-01T04:00:00.025000Z 2025-05-29T15:22:22.111394Z node 1 :NODE_BROKER TRACE: node_broker.cpp:423: Send TEvNodesInfo for epoch #3.5 1970-01-01T02:00:00.025000Z - 1970-01-01T03:00:00.025000Z - 1970-01-01T04:00:00.025000Z 2025-05-29T15:22:22.131935Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 269877761, Sender [1:708:2255], Recipient [1:570:2184]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-05-29T15:22:22.131967Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 272039936, Sender [1:638:2214], Recipient [1:570:2184]: NKikimr::NNodeBroker::TEvNodeBroker::TEvListNodes { } 2025-05-29T15:22:22.131972Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:246: StateWork, processing event TEvNodeBroker::TEvListNodes 2025-05-29T15:22:22.131982Z node 1 :NODE_BROKER TRACE: node_broker.cpp:423: Send TEvNodesInfo for epoch #3.5 1970-01-01T02:00:00.025000Z - 1970-01-01T03:00:00.025000Z - 1970-01-01T04:00:00.025000Z 2025-05-29T15:22:22.132022Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 269877761, Sender [1:710:2257], Recipient [1:570:2184]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-05-29T15:22:22.132036Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 272039936, Sender [1:638:2214], Recipient [1:570:2184]: NKikimr::NNodeBroker::TEvNodeBroker::TEvListNodes { } 2025-05-29T15:22:22.132039Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:246: StateWork, processing event TEvNodeBroker::TEvListNodes 2025-05-29T15:22:22.132042Z node 1 :NODE_BROKER TRACE: node_broker.cpp:423: Send TEvNodesInfo for epoch #3.5 1970-01-01T02:00:00.025000Z - 1970-01-01T03:00:00.025000Z - 1970-01-01T04:00:00.025000Z 2025-05-29T15:22:22.132099Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 269877761, Sender [1:712:2259], Recipient [1:570:2184]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-05-29T15:22:22.132115Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 272039937, Sender [1:638:2214], Recipient [1:570:2184]: NKikimr::NNodeBroker::TEvNodeBroker::TEvResolveNode { NodeId: 1024 } 2025-05-29T15:22:22.132118Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:247: StateWork, processing event TEvNodeBroker::TEvResolveNode 2025-05-29T15:22:22.132145Z node 1 :NODE_BROKER TRACE: node_broker.cpp:1478: Send TEvResolvedNode: NKikimr::NNodeBroker::TEvNodeBroker::TEvResolvedNode { Status { Code: OK } Node { NodeId: 1024 Host: "host1" Port: 1001 ResolveHost: "host1.yandex.net" Address: "1.2.3.4" Location { DataCenter: "1" Module: "2" Rack: "3" Unit: "4" } Expire: 10800025000 Name: "slot-0" } } |59.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_sysview_reboots/unittest >> TSchemeShardSysViewTestReboots::CreateSysViewWithReboots >> TNodeBrokerTest::TestListNodes [GOOD] |59.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_sysview_reboots/unittest |59.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_sysview_reboots/unittest |59.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_sysview_reboots/unittest >> GroupWriteTest::WriteHardRateDispatcher [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/persqueue/ut/unittest >> TPartitionChooserSuite::TPartitionChooserActor_SplitMergeDisabled_PreferedPartition_Test [FAIL] Test command err: 2025-05-29T15:22:15.899978Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509888338993337319:2074];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:22:15.899995Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-05-29T15:22:15.902594Z node 2 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7509888339041101316:2073];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:22:15.902627Z node 2 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-05-29T15:22:15.927839Z node 1 :PQ_READ_PROXY DEBUG: caching_service.cpp:44: Direct read cache: : Created test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/0008b7/r3tmp/tmpbBN6sL/pdisk_1.dat 2025-05-29T15:22:15.936562Z node 2 :PQ_READ_PROXY DEBUG: caching_service.cpp:44: Direct read cache: : Created 2025-05-29T15:22:15.960034Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 30267, node 1 2025-05-29T15:22:15.976179Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/ciyv/0008b7/r3tmp/yandexYkYrxd.tmp 2025-05-29T15:22:15.976188Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: /home/runner/.ya/build/build_root/ciyv/0008b7/r3tmp/yandexYkYrxd.tmp 2025-05-29T15:22:15.976240Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:202: successfully initialized from file: /home/runner/.ya/build/build_root/ciyv/0008b7/r3tmp/yandexYkYrxd.tmp 2025-05-29T15:22:15.976301Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-05-29T15:22:15.982298Z INFO: TTestServer started on Port 9491 GrpcPort 30267 TClient is connected to server localhost:9491 PQClient connected to localhost:30267 WaitRootIsUp 'Root'... TClient::Ls request: Root 2025-05-29T15:22:16.000557Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:22:16.000588Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting TClient::Ls response: 2025-05-29T15:22:16.002198Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-29T15:22:16.030064Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:22:16.030084Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:22:16.031348Z node 1 :HIVE WARN: hive_impl.cpp:771: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-05-29T15:22:16.031574Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-29T15:22:16.031809Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-05-29T15:22:16.076190Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... waiting... waiting... 2025-05-29T15:22:16.239011Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509888343288305625:2339], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:22:16.239028Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509888343288305620:2336], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:22:16.239042Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:22:16.239650Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715662:3, at schemeshard: 72057594046644480 2025-05-29T15:22:16.239922Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509888343288305664:2342], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:22:16.239959Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:22:16.243251Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7509888343288305635:2340], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715662 completed, doublechecking } 2025-05-29T15:22:16.268756Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-05-29T15:22:16.326299Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-05-29T15:22:16.328202Z node 1 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [1:7509888343288305899:2870] txid# 281474976715665, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 9], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-29T15:22:16.340042Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [1:7509888343288305922:2358], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:5:17: Error: At function: KiReadTable!
:5:17: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Versions]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-05-29T15:22:16.340145Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=1&id=MmNhNjM5OTItOTllZjc4Ni05ZGMwZDk0LThlOGY1OA==, ActorId: [1:7509888343288305603:2334], ActorState: ExecuteState, TraceId: 01jwea629c8d6mmx2tqdtr6jz5, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-05-29T15:22:16.340533Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: cluster_tracker.cpp:167: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 5 column: 17 } message: "At function: KiReadTable!" end_position { row: 5 column: 17 } severity: 1 issues { position { row: 5 column: 17 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Versions]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 5 column: 17 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-05-29T15:22:16.350153Z node 2 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [2:7509888343336069033:2317], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:5:17: Error: At function: KiReadTable!
:5:17: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Versions]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-05-29T15:22:16.350220Z node 2 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=2&id=ZjU5ZDAwMTItY2E1YjRhZWYtZmIzMmJlZTEtYTI3OGVhNTg=, ActorId: [2:7509888343336068969:2309], ActorState: ExecuteState, TraceId: 01jwea62cs1m73j10xsc53fdr4, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-05-29T15:22:16.350329Z node 2 :PERSQUEUE_CLUSTER_TRACKER ERROR: cluster_tracker.cpp:167: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 5 column: 17 } message: "At function: KiReadTable!" end_position { row: 5 column: 17 } severity: 1 issues { position { row: 5 column: 17 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Versions]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 5 column: 17 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-05-29T15:22:16.391345Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost", true, true, 1000), ("dc2", "dc2.logbroker.yandex.net", false, true, 1000); 2025-05-29T15:22:16.422084Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [1:7509888343288306124:2382], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:22:16.422239Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=1&id=YjNjZTA3My1jMTZkYmFiMy1iNj ... @builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-29T15:22:20.746219Z node 9 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:22:20.746250Z node 9 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:22:20.747687Z node 9 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-29T15:22:20.750043Z node 9 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:22:20.750061Z node 9 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:22:20.750344Z node 9 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:22:20.751032Z node 9 :HIVE WARN: hive_impl.cpp:771: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 10 Cookie 10 2025-05-29T15:22:20.751230Z node 9 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Connecting -> Connected waiting... 2025-05-29T15:22:20.763096Z node 9 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... waiting... waiting... 2025-05-29T15:22:20.966498Z node 10 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [10:7509888359348210312:2310], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:22:20.966515Z node 10 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [10:7509888359348210334:2313], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:22:20.966523Z node 10 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:22:20.967509Z node 9 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976720657:3, at schemeshard: 72057594046644480 2025-05-29T15:22:20.971265Z node 10 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [10:7509888359348210340:2314], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976720657 completed, doublechecking } 2025-05-29T15:22:21.006260Z node 9 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [9:7509888363701755449:2341], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-05-29T15:22:21.006615Z node 9 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=9&id=YzZhNDlhN2QtYThhYzU2OC04OTJhYjEyNS01NWIyZDQ0MA==, ActorId: [9:7509888363701755408:2334], ActorState: ExecuteState, TraceId: 01jwea66ya4pztrjpymv3cyyn6, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-05-29T15:22:21.006799Z node 9 :PERSQUEUE_CLUSTER_TRACKER ERROR: cluster_tracker.cpp:167: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-05-29T15:22:21.007318Z node 9 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-05-29T15:22:21.024797Z node 10 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [10:7509888363643177664:2166] txid# 281474976720658, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 9], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-29T15:22:21.028031Z node 10 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [10:7509888363643177678:2318], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-05-29T15:22:21.028095Z node 10 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=10&id=MzRhNmQ4MTItODU2OGM2ZjEtYmVjOWI1MjYtODIzNjFjMjU=, ActorId: [10:7509888359348210309:2309], ActorState: ExecuteState, TraceId: 01jwea66x685k32t1a8hr7jdsp, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-05-29T15:22:21.028201Z node 10 :PERSQUEUE_CLUSTER_TRACKER ERROR: cluster_tracker.cpp:167: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-05-29T15:22:21.065702Z node 9 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-05-29T15:22:21.129728Z node 9 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost", true, true, 1000), ("dc2", "dc2.logbroker.yandex.net", false, true, 1000); 2025-05-29T15:22:21.151398Z node 9 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [9:7509888363701755840:2376], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:22:21.151478Z node 9 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=9&id=NzM4NmY4Ny1lMTg5ZGY1Ni0yNzViOGM5Yi04OGZmNGIzMA==, ActorId: [9:7509888363701755837:2374], ActorState: ExecuteState, TraceId: 01jwea672n6rpq77q6kh9trjax, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: assertion failed at ydb/core/testlib/test_pq_client.h:537, TMaybe NKikimr::NPersQueueTests::TFlatMsgBusPQClient::RunYqlDataQueryWithParams(TString, const NYdb::TParams &): (result.IsSuccess())
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 TBackTrace::Capture()+28 (0x13C16BFC) NUnitTest::NPrivate::RaiseError(char const*, TBasicString> const&, bool)+137 (0x13DCA7A9) NKikimr::NPersQueueTests::TFlatMsgBusPQClient::RunYqlDataQueryWithParams(TBasicString>, NYdb::Dev::TParams const&)+932 (0x13965684) NKikimr::NPersQueueTests::TFlatMsgBusPQClient::RunYqlDataQuery(TBasicString>)+88 (0x1394D528) NKikimr::NPersQueueTests::TFlatMsgBusPQClient::InitDCs(THashMap>, NKikimr::NPersQueueTests::TPQTestClusterInfo, THash>>, TEqualTo>>, std::__y1::allocator>>>, TBasicString> const&)+1170 (0x139741A2) NKikimr::NPersQueueTests::TFlatMsgBusPQClient::FullInit(THashMap>, NKikimr::NPersQueueTests::TPQTestClusterInfo, THash>>, TEqualTo>>, std::__y1::allocator>>>, TBasicString> const&, TBasicString> const&)+327 (0x139720D7) NPersQueue::TTestServer::StartServer(bool, TMaybe>, NMaybe::TPolicyUndefinedExcept>)+888 (0x13970D08) NPersQueue::TTestServer::TTestServer(NKikimr::Tests::TServerSettings const&, bool, TVector> const&, NActors::NLog::EPriority, TMaybe, TDelete>, NMaybe::TPolicyUndefinedExcept>)+1563 (0x13969A6B) NPersQueue::TTestServer::TTestServer(bool)+134 (0x139502B6) NTestSuiteTPartitionChooserSuite::CreateServer()+24 (0x13950118) NTestSuiteTPartitionChooserSuite::TTestCaseTPartitionChooserActor_SplitMergeDisabled_PreferedPartition_Test::Execute_(NUnitTest::TTestContext&)+32 (0x1395CB60) NTestSuiteTPartitionChooserSuite::TCurrentTest::Execute()::'lambda'()::operator()() const+71 (0x13962767) NUnitTest::TTestBase::Run(std::__y1::function, TBasicString> const&, char const*, bool)+126 (0x13DCC65E) NTestSuiteTPartitionChooserSuite::TCurrentTest::Execute()+433 (0x13962121) NUnitTest::TTestFactory::Execute()+803 (0x13DCCDD3) NUnitTest::RunMain(int, char**)+3021 (0x13DDE97D) ??+0 (0x7F7B4C4E6D90) __libc_start_main+128 (0x7F7B4C4E6E40) _start+41 (0x12A48029) |59.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_sysview_reboots/unittest |60.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_sysview_reboots/unittest ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/mind/ut/unittest >> TNodeBrokerTest::TestListNodes [GOOD] Test command err: 2025-05-29T15:22:18.683985Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 2 Deadline: 18446744073709.551615s } 2025-05-29T15:22:18.684026Z node 3 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 3 Deadline: 18446744073709.551615s } 2025-05-29T15:22:18.684049Z node 4 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 4 Deadline: 18446744073709.551615s } 2025-05-29T15:22:18.684079Z node 5 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 5 Deadline: 18446744073709.551615s } 2025-05-29T15:22:18.684100Z node 6 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 6 Deadline: 18446744073709.551615s } 2025-05-29T15:22:18.684117Z node 7 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 7 Deadline: 18446744073709.551615s } 2025-05-29T15:22:18.690455Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:18.690572Z node 3 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:18.690611Z node 4 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:18.690645Z node 5 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:18.690682Z node 6 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:18.690714Z node 7 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:18.690806Z node 8 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 8 Deadline: 18446744073709.551615s } 2025-05-29T15:22:18.690830Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-05-29T15:22:18.691040Z node 3 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:18.691061Z node 4 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:18.691076Z node 5 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:18.691090Z node 6 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:18.691105Z node 7 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:18.691120Z node 8 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:18.691149Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:18.695195Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:18.695259Z node 8 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:18.695288Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:18.696144Z node 5 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:18.696180Z node 6 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:18.696205Z node 7 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:18.696285Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:18.696300Z node 3 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:18.696318Z node 4 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:18.696346Z node 6 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-05-29T15:22:18.696391Z node 7 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-05-29T15:22:18.696409Z node 8 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:18.696427Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:18.696556Z node 3 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-05-29T15:22:18.696574Z node 4 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-05-29T15:22:18.696592Z node 5 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-05-29T15:22:18.696650Z node 8 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-05-29T15:22:18.696727Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-05-29T15:22:18.696764Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 7 Deadline: 18446744073709.551615s } 2025-05-29T15:22:18.697279Z node 5 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 7 Deadline: 18446744073709.551615s } 2025-05-29T15:22:18.697290Z node 6 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 7 Deadline: 18446744073709.551615s } 2025-05-29T15:22:18.697388Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 8 Deadline: 18446744073709.551615s } 2025-05-29T15:22:18.697406Z node 3 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 7 Deadline: 18446744073709.551615s } 2025-05-29T15:22:18.700133Z node 7 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 5 Deadline: 18446744073709.551615s } 2025-05-29T15:22:18.700444Z node 7 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 6 Deadline: 18446744073709.551615s } 2025-05-29T15:22:18.700469Z node 8 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 2 Deadline: 18446744073709.551615s } 2025-05-29T15:22:18.700508Z node 7 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 3 Deadline: 18446744073709.551615s } 2025-05-29T15:22:18.701382Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 2 Deadline: 18446744073709.551615s } 2025-05-29T15:22:18.701615Z node 7 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 8 Deadline: 18446744073709.551615s } 2025-05-29T15:22:18.701691Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 3 Deadline: 18446744073709.551615s } 2025-05-29T15:22:18.701719Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 4 Deadline: 18446744073709.551615s } 2025-05-29T15:22:18.701776Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 8 Deadline: 18446744073709.551615s } 2025-05-29T15:22:18.701830Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 5 Deadline: 18446744073709.551615s } 2025-05-29T15:22:18.701872Z node 8 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 7 Deadline: 18446744073709.551615s } 2025-05-29T15:22:18.701884Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 6 Deadline: 18446744073709.551615s } 2025-05-29T15:22:18.725369Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7309: Cannot subscribe to console configs 2025-05-29T15:22:18.725388Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded ... waiting for nameservers are connected 2025-05-29T15:22:18.729254Z node 1 :NODE_BROKER DEBUG: node_broker_impl.h:236: StateInit event type: 10060000 event: NKikimr::TEvTablet::TEvBoot 2025-05-29T15:22:18.729616Z node 1 :NODE_BROKER DEBUG: node_broker_impl.h:236: StateInit event type: 10060001 event: NKikimr::TEvTablet::TEvRestored 2025-05-29T15:22:18.729666Z node 1 :NODE_BROKER DEBUG: node_broker__init_scheme.cpp:20: TTxInitScheme Execute 2025-05-29T15:22:18.729825Z node 1 :NODE_BROKER DEBUG: node_broker_impl.h:236: StateInit event type: 1006000c event: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-05-29T15:22:18.730774Z node 1 :NODE_BROKER DEBUG: node_broker__init_scheme.cpp:29: TTxInitScheme Complete 2025-05-29T15:22:18.730807Z node 1 :NODE_BROKER DEBUG: node_broker__load_state.cpp:19: TTxLoadState Execute 2025-05-29T15:22:18.730861Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:961: [DB] Using default config. 2025-05-29T15:22:18.730874Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:998: [DB] Starting the first epoch: #1.1 1970-01-01T00:00:00.025000Z - 1970-01-01T01:00:00.025000Z - 1970-01-01T02:00:00.025000Z 2025-05-29T15:22:18.730878Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:1024: [DB] Loaded the first approximate epoch start: #1.1 2025-05-29T15:22:18.730891Z node 1 :NODE_BROKER DEBUG: node_broker__load_state.cpp:27: TTxLoadState Complete 2025-05-29T15:22:18.730908Z node 1 :NODE_BROKER DEBUG: node_broker__migrate_state.cpp:84: TTxMigrateState Execute 2025-05-29T15:22:18.730911Z node 1 :NODE_BROKER DEBUG: node_broker__migrate_state.cpp:52: TTxMigrateState ProcessMigrationBatch UpdateNodes left 0, NewVersionUpdateNodes left 0 2025-05-29T15:22:18.730914Z node 1 :NODE_BROKER DEBUG: node_broker__migrate_state.cpp:21: TTxMigrateState FinalizeMigration 2025-05-29T15:22:18.730918Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:1311: [DB] Update epoch in database: #1.1 1970-01-01T00:00:00.025000Z - 1970-01-01T01:00:00.025000Z - 1970-01-01T02:00:00.025000Z 2025-05-29T15:22:18.730931Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:1330: [DB] Update approx epoch start in database: #1.1 2025-05-29T15:22:18.730936Z node 1 :NODE_BROKER NOTICE: node_broker.cpp:1343: [DB] Update main nodes table to: Nodes 2025-05-29T15:22:18.762912Z node 1 :NODE_BROKER DEBUG: node_broker__migrate_state.cpp:95: TTxMigrateState Complete 2025-05-29T15:22:18.762960Z node 1 :NODE_BROKER TRACE: node_broker.cpp:456: Scheduled epoch update at 1970-01-01T01:00:00.025000Z 2025-05-29T15:22:18.762969Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:562: Preparing nodes list cache for epoch #1.1 1970-01-01T00:00:00.025000Z - 1970-01-01T01:00:00.025000Z - 1970-01-01T02:00:00.025000Z, approximate epoch start #1.1 nodes=0 expired=0 2025-05-29T15:22:18.762977Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:603: Preparing update nodes log for epoch ##1.1 1970-01-01T00:00:00.025000Z - 1970-01-01T01:00:00.025000Z - 1970-01-01T02:00:00.025000Z nodes=0 expired=0 removed=0 2025-05-29T15:22:18.938306Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 269877761, Sender [1:610:2205], Reci ... ker_impl.h:246: StateWork, processing event TEvNodeBroker::TEvListNodes 2025-05-29T15:22:22.005264Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:375: Delaying list nodes request for epoch #7 2025-05-29T15:22:22.005268Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 272039936, Sender [7:193:2072], Recipient [1:618:2212] 2025-05-29T15:22:22.005270Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:246: StateWork, processing event TEvNodeBroker::TEvListNodes 2025-05-29T15:22:22.005272Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:375: Delaying list nodes request for epoch #7 2025-05-29T15:22:22.016090Z node 1 :NODE_BROKER DEBUG: node_broker__update_epoch.cpp:31: TTxUpdateEpoch Complete 2025-05-29T15:22:22.016116Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:542: [Committed] Move to new epoch #7.8 1970-01-01T06:00:00.025000Z - 1970-01-01T07:00:00.025000Z - 1970-01-01T08:00:00.025000Z, approximate epoch start #7.8 2025-05-29T15:22:22.016137Z node 1 :NODE_BROKER TRACE: node_broker.cpp:456: Scheduled epoch update at 1970-01-01T07:00:00.025000Z 2025-05-29T15:22:22.016147Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:562: Preparing nodes list cache for epoch #7.8 1970-01-01T06:00:00.025000Z - 1970-01-01T07:00:00.025000Z - 1970-01-01T08:00:00.025000Z, approximate epoch start #7.8 nodes=0 expired=0 2025-05-29T15:22:22.016157Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:603: Preparing update nodes log for epoch ##7.8 1970-01-01T06:00:00.025000Z - 1970-01-01T07:00:00.025000Z - 1970-01-01T08:00:00.025000Z nodes=0 expired=0 removed=1 2025-05-29T15:22:22.016166Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:659: Add node #1024.v7 to update nodes log 2025-05-29T15:22:22.016180Z node 1 :NODE_BROKER TRACE: node_broker.cpp:423: Send TEvNodesInfo for epoch #7.8 1970-01-01T06:00:00.025000Z - 1970-01-01T07:00:00.025000Z - 1970-01-01T08:00:00.025000Z 2025-05-29T15:22:22.016189Z node 1 :NODE_BROKER TRACE: node_broker.cpp:423: Send TEvNodesInfo for epoch #7.8 1970-01-01T06:00:00.025000Z - 1970-01-01T07:00:00.025000Z - 1970-01-01T08:00:00.025000Z 2025-05-29T15:22:22.016197Z node 1 :NODE_BROKER TRACE: node_broker.cpp:423: Send TEvNodesInfo for epoch #7.8 1970-01-01T06:00:00.025000Z - 1970-01-01T07:00:00.025000Z - 1970-01-01T08:00:00.025000Z 2025-05-29T15:22:22.016205Z node 1 :NODE_BROKER TRACE: node_broker.cpp:423: Send TEvNodesInfo for epoch #7.8 1970-01-01T06:00:00.025000Z - 1970-01-01T07:00:00.025000Z - 1970-01-01T08:00:00.025000Z 2025-05-29T15:22:22.016213Z node 1 :NODE_BROKER TRACE: node_broker.cpp:423: Send TEvNodesInfo for epoch #7.8 1970-01-01T06:00:00.025000Z - 1970-01-01T07:00:00.025000Z - 1970-01-01T08:00:00.025000Z 2025-05-29T15:22:22.016220Z node 1 :NODE_BROKER TRACE: node_broker.cpp:423: Send TEvNodesInfo for epoch #7.8 1970-01-01T06:00:00.025000Z - 1970-01-01T07:00:00.025000Z - 1970-01-01T08:00:00.025000Z 2025-05-29T15:22:22.016226Z node 1 :NODE_BROKER TRACE: node_broker.cpp:423: Send TEvNodesInfo for epoch #7.8 1970-01-01T06:00:00.025000Z - 1970-01-01T07:00:00.025000Z - 1970-01-01T08:00:00.025000Z 2025-05-29T15:22:22.016234Z node 1 :NODE_BROKER TRACE: node_broker.cpp:423: Send TEvNodesInfo for epoch #7.8 1970-01-01T06:00:00.025000Z - 1970-01-01T07:00:00.025000Z - 1970-01-01T08:00:00.025000Z 2025-05-29T15:22:22.036906Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 269877761, Sender [1:849:2341], Recipient [1:552:2184]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-05-29T15:22:22.036936Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 272039936, Sender [1:620:2214], Recipient [1:552:2184]: NKikimr::NNodeBroker::TEvNodeBroker::TEvListNodes { } 2025-05-29T15:22:22.036941Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:246: StateWork, processing event TEvNodeBroker::TEvListNodes 2025-05-29T15:22:22.036952Z node 1 :NODE_BROKER TRACE: node_broker.cpp:423: Send TEvNodesInfo for epoch #7.8 1970-01-01T06:00:00.025000Z - 1970-01-01T07:00:00.025000Z - 1970-01-01T08:00:00.025000Z 2025-05-29T15:22:22.036994Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 269877761, Sender [1:851:2343], Recipient [1:552:2184]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-05-29T15:22:22.037003Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 272039936, Sender [1:620:2214], Recipient [1:552:2184]: NKikimr::NNodeBroker::TEvNodeBroker::TEvListNodes { } 2025-05-29T15:22:22.037006Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:246: StateWork, processing event TEvNodeBroker::TEvListNodes 2025-05-29T15:22:22.037010Z node 1 :NODE_BROKER TRACE: node_broker.cpp:423: Send TEvNodesInfo for epoch #7.8 1970-01-01T06:00:00.025000Z - 1970-01-01T07:00:00.025000Z - 1970-01-01T08:00:00.025000Z 2025-05-29T15:22:22.390165Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 2146435072, Sender [1:552:2184], Recipient [1:552:2184]: NKikimr::NNodeBroker::TNodeBroker::TEvPrivate::TEvUpdateEpoch 2025-05-29T15:22:22.390193Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:256: StateWork, processing event TEvPrivate::TEvUpdateEpoch 2025-05-29T15:22:22.390218Z node 1 :NODE_BROKER DEBUG: node_broker__update_epoch.cpp:20: TTxUpdateEpoch Execute 2025-05-29T15:22:22.390231Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:542: [Dirty] Move to new epoch #8.9 1970-01-01T07:00:00.025000Z - 1970-01-01T08:00:00.025000Z - 1970-01-01T09:00:00.025000Z, approximate epoch start #8.9 2025-05-29T15:22:22.390239Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:1311: [DB] Update epoch in database: #8.9 1970-01-01T07:00:00.025000Z - 1970-01-01T08:00:00.025000Z - 1970-01-01T09:00:00.025000Z 2025-05-29T15:22:22.390272Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:1330: [DB] Update approx epoch start in database: #8.9 2025-05-29T15:22:22.502325Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 272039936, Sender [1:25:2072], Recipient [1:552:2184]: NKikimr::NNodeBroker::TEvNodeBroker::TEvListNodes { MinEpoch: 8 } 2025-05-29T15:22:22.502352Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:246: StateWork, processing event TEvNodeBroker::TEvListNodes 2025-05-29T15:22:22.502359Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:375: Delaying list nodes request for epoch #8 2025-05-29T15:22:22.502416Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 272039936, Sender [3:81:2072], Recipient [1:612:2207] 2025-05-29T15:22:22.502421Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:246: StateWork, processing event TEvNodeBroker::TEvListNodes 2025-05-29T15:22:22.502427Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:375: Delaying list nodes request for epoch #8 2025-05-29T15:22:22.502442Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 272039936, Sender [7:193:2072], Recipient [1:618:2212] 2025-05-29T15:22:22.502446Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:246: StateWork, processing event TEvNodeBroker::TEvListNodes 2025-05-29T15:22:22.502451Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:375: Delaying list nodes request for epoch #8 2025-05-29T15:22:22.502492Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 272039936, Sender [8:221:2072], Recipient [1:611:2206] 2025-05-29T15:22:22.502497Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:246: StateWork, processing event TEvNodeBroker::TEvListNodes 2025-05-29T15:22:22.502502Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:375: Delaying list nodes request for epoch #8 2025-05-29T15:22:22.502510Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 272039936, Sender [4:109:2072], Recipient [1:613:2208] 2025-05-29T15:22:22.502514Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:246: StateWork, processing event TEvNodeBroker::TEvListNodes 2025-05-29T15:22:22.502519Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:375: Delaying list nodes request for epoch #8 2025-05-29T15:22:22.502526Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 272039936, Sender [5:137:2072], Recipient [1:614:2209] 2025-05-29T15:22:22.502531Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:246: StateWork, processing event TEvNodeBroker::TEvListNodes 2025-05-29T15:22:22.502535Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:375: Delaying list nodes request for epoch #8 2025-05-29T15:22:22.502543Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 272039936, Sender [6:165:2072], Recipient [1:615:2210] 2025-05-29T15:22:22.502547Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:246: StateWork, processing event TEvNodeBroker::TEvListNodes 2025-05-29T15:22:22.502552Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:375: Delaying list nodes request for epoch #8 2025-05-29T15:22:22.502559Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 272039936, Sender [2:53:2072], Recipient [1:616:2211] 2025-05-29T15:22:22.502563Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:246: StateWork, processing event TEvNodeBroker::TEvListNodes 2025-05-29T15:22:22.502567Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:375: Delaying list nodes request for epoch #8 2025-05-29T15:22:22.513597Z node 1 :NODE_BROKER DEBUG: node_broker__update_epoch.cpp:31: TTxUpdateEpoch Complete 2025-05-29T15:22:22.513626Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:542: [Committed] Move to new epoch #8.9 1970-01-01T07:00:00.025000Z - 1970-01-01T08:00:00.025000Z - 1970-01-01T09:00:00.025000Z, approximate epoch start #8.9 2025-05-29T15:22:22.513646Z node 1 :NODE_BROKER TRACE: node_broker.cpp:456: Scheduled epoch update at 1970-01-01T08:00:00.025000Z 2025-05-29T15:22:22.513655Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:562: Preparing nodes list cache for epoch #8.9 1970-01-01T07:00:00.025000Z - 1970-01-01T08:00:00.025000Z - 1970-01-01T09:00:00.025000Z, approximate epoch start #8.9 nodes=0 expired=0 2025-05-29T15:22:22.513667Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:603: Preparing update nodes log for epoch ##8.9 1970-01-01T07:00:00.025000Z - 1970-01-01T08:00:00.025000Z - 1970-01-01T09:00:00.025000Z nodes=0 expired=0 removed=1 2025-05-29T15:22:22.513677Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:659: Add node #1024.v7 to update nodes log 2025-05-29T15:22:22.513694Z node 1 :NODE_BROKER TRACE: node_broker.cpp:423: Send TEvNodesInfo for epoch #8.9 1970-01-01T07:00:00.025000Z - 1970-01-01T08:00:00.025000Z - 1970-01-01T09:00:00.025000Z 2025-05-29T15:22:22.513704Z node 1 :NODE_BROKER TRACE: node_broker.cpp:423: Send TEvNodesInfo for epoch #8.9 1970-01-01T07:00:00.025000Z - 1970-01-01T08:00:00.025000Z - 1970-01-01T09:00:00.025000Z 2025-05-29T15:22:22.513712Z node 1 :NODE_BROKER TRACE: node_broker.cpp:423: Send TEvNodesInfo for epoch #8.9 1970-01-01T07:00:00.025000Z - 1970-01-01T08:00:00.025000Z - 1970-01-01T09:00:00.025000Z 2025-05-29T15:22:22.513721Z node 1 :NODE_BROKER TRACE: node_broker.cpp:423: Send TEvNodesInfo for epoch #8.9 1970-01-01T07:00:00.025000Z - 1970-01-01T08:00:00.025000Z - 1970-01-01T09:00:00.025000Z 2025-05-29T15:22:22.513729Z node 1 :NODE_BROKER TRACE: node_broker.cpp:423: Send TEvNodesInfo for epoch #8.9 1970-01-01T07:00:00.025000Z - 1970-01-01T08:00:00.025000Z - 1970-01-01T09:00:00.025000Z 2025-05-29T15:22:22.513737Z node 1 :NODE_BROKER TRACE: node_broker.cpp:423: Send TEvNodesInfo for epoch #8.9 1970-01-01T07:00:00.025000Z - 1970-01-01T08:00:00.025000Z - 1970-01-01T09:00:00.025000Z 2025-05-29T15:22:22.513745Z node 1 :NODE_BROKER TRACE: node_broker.cpp:423: Send TEvNodesInfo for epoch #8.9 1970-01-01T07:00:00.025000Z - 1970-01-01T08:00:00.025000Z - 1970-01-01T09:00:00.025000Z 2025-05-29T15:22:22.513752Z node 1 :NODE_BROKER TRACE: node_broker.cpp:423: Send TEvNodesInfo for epoch #8.9 1970-01-01T07:00:00.025000Z - 1970-01-01T08:00:00.025000Z - 1970-01-01T09:00:00.025000Z 2025-05-29T15:22:22.513761Z node 1 :NODE_BROKER TRACE: node_broker.cpp:423: Send TEvNodesInfo for epoch #8.9 1970-01-01T07:00:00.025000Z - 1970-01-01T08:00:00.025000Z - 1970-01-01T09:00:00.025000Z >> TPartitionTests::TestBatchingWithProposeConfig [GOOD] |60.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_sysview_reboots/unittest |60.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_sysview_reboots/unittest ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/load_test/ut/unittest >> GroupWriteTest::WriteHardRateDispatcher [GOOD] Test command err: RandomSeed# 16193483058612204602 2025-05-29T15:21:58.043789Z 1 00h01m00.010512s :BS_LOAD_TEST DEBUG: TabletId# 5 Generation# 1 is bootstrapped, going to send TEvDiscover {TabletId# 5 MinGeneration# 1 ReadBody# false DiscoverBlockedGeneration# true ForceBlockedGeneration# 0 FromLeader# true Deadline# 18446744073709551} 2025-05-29T15:21:58.046547Z 1 00h01m00.010512s :BS_LOAD_TEST INFO: TabletId# 5 Generation# 1 recieved TEvDiscoverResult {Status# NODATA BlockedGeneration# 0 Id# [0:0:0:0:0:0:0] Size# 0 MinGeneration# 1} 2025-05-29T15:21:58.046565Z 1 00h01m00.010512s :BS_LOAD_TEST DEBUG: TabletId# 5 Generation# 1 going to send TEvBlock {TabletId# 5 Generation# 1 Deadline# 18446744073709551 IsMonitored# 1} 2025-05-29T15:21:58.047016Z 1 00h01m00.010512s :BS_LOAD_TEST INFO: TabletId# 5 Generation# 1 recieved TEvBlockResult {Status# OK} 2025-05-29T15:21:58.055738Z 1 00h01m00.010512s :BS_LOAD_TEST DEBUG: TabletId# 5 Generation# 2 going to send TEvCollectGarbage {TabletId# 5 RecordGeneration# 2 PerGenerationCounter# 1 Channel# 0 Deadline# 18446744073709551 Collect# true CollectGeneration# 2 CollectStep# 0 Hard# true IsMultiCollectAllowed# 0 IsMonitored# 1} 2025-05-29T15:21:58.056214Z 1 00h01m00.010512s :BS_LOAD_TEST INFO: TabletId# 5 Generation# 2 recieved TEvCollectGarbageResult {TabletId# 5 RecordGeneration# 2 PerGenerationCounter# 1 Channel# 0 Status# OK} 2025-05-29T15:22:08.842493Z 5 00h01m06.364964s :BS_LOGCUTTER ERROR: PDiskId# 1000 VDISK[82000000:_:0:4:0]: (2181038080) KEEPER: RetryCutLogEvent: limit exceeded; FreeUpToLsn# 16120 2025-05-29T15:22:13.562345Z 4 00h01m07.541411s :BS_LOGCUTTER ERROR: PDiskId# 1000 VDISK[82000000:_:0:3:0]: (2181038080) KEEPER: RetryCutLogEvent: limit exceeded; FreeUpToLsn# 16043 2025-05-29T15:22:21.159560Z 6 00h01m09.188869s :BS_LOGCUTTER ERROR: PDiskId# 1000 VDISK[82000000:_:0:5:0]: (2181038080) KEEPER: RetryCutLogEvent: limit exceeded; FreeUpToLsn# 30407 2025-05-29T15:22:23.780029Z 1 00h01m10.010512s :BS_LOAD_TEST DEBUG: Load tablet recieved PoisonPill, going to die 2025-05-29T15:22:23.780061Z 1 00h01m10.010512s :BS_LOAD_TEST DEBUG: TabletId# 5 Generation# 2 end working, going to send TEvCollectGarbage {TabletId# 5 RecordGeneration# 2 PerGenerationCounter# 12 Channel# 0 Deadline# 18446744073709551 Collect# true CollectGeneration# 2 CollectStep# 4294967295 Hard# true IsMultiCollectAllowed# 0 IsMonitored# 1} 2025-05-29T15:22:23.780070Z 1 00h01m10.010512s :BS_LOAD_TEST DEBUG: Load tablet recieved PoisonPill, going to die 2025-05-29T15:22:23.780073Z 1 00h01m10.010512s :BS_LOAD_TEST DEBUG: TabletId# 5 Generation# 2 end working, going to send TEvCollectGarbage {TabletId# 5 RecordGeneration# 2 PerGenerationCounter# 13 Channel# 0 Deadline# 18446744073709551 Collect# true CollectGeneration# 2 CollectStep# 4294967295 Hard# true IsMultiCollectAllowed# 0 IsMonitored# 1} 2025-05-29T15:22:23.812991Z 1 00h01m10.010512s :BS_LOAD_TEST INFO: TabletId# 5 Generation# 2 recieved TEvCollectGarbageResult {TabletId# 5 RecordGeneration# 2 PerGenerationCounter# 12 Channel# 0 Status# OK} 2025-05-29T15:22:23.813023Z 1 00h01m10.010512s :BS_LOAD_TEST INFO: TabletId# 5 Generation# 2 recieved TEvCollectGarbageResult {TabletId# 5 RecordGeneration# 2 PerGenerationCounter# 13 Channel# 0 Status# OK} |60.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_sysview_reboots/unittest |60.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_sysview_reboots/unittest |60.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_sysview_reboots/unittest ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/persqueue/ut/unittest >> TPartitionTests::TestBatchingWithProposeConfig [GOOD] Test command err: 2025-05-29T15:22:10.250884Z node 1 :PERSQUEUE NOTICE: pq_impl.cpp:1099: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-05-29T15:22:10.250916Z node 1 :PERSQUEUE INFO: pq_impl.cpp:800: [PQ: 72057594037927937] doesn't have tx writes info 2025-05-29T15:22:10.254880Z node 1 :PERSQUEUE INFO: partition_init.cpp:880: [PQ: 72057594037927937, Partition: 3, State: StateInit] bootstrapping 3 [1:180:2194] 2025-05-29T15:22:10.255293Z node 1 :PERSQUEUE INFO: partition_init.cpp:785: [Root/PQ/rt3.dc1--account--topic:3:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp from keys completed. Value 2025-05-29T15:22:10.000000Z 2025-05-29T15:22:10.255309Z node 1 :PERSQUEUE INFO: partition.cpp:560: [PQ: 72057594037927937, Partition: 3, State: StateInit] init complete for topic 'Root/PQ/rt3.dc1--account--topic' partition 3 generation 0 [1:180:2194] Got cmd write: CmdDeleteRange { Range { From: "m0000000003cclient-1" IncludeFrom: true To: "m0000000003cclient-1" IncludeTo: true } } CmdDeleteRange { Range { From: "m0000000003uclient-1" IncludeFrom: true To: "m0000000003uclient-1" IncludeTo: true } } CmdWrite { Key: "i0000000003" Value: "\010\000\020\n\030\000(\320\341\213\345\3612" StorageChannel: INLINE } CmdWrite { Key: "m0000000003cclient-2" Value: "\010\000\020\000\030\000\"\000(\0000\000@\000" StorageChannel: INLINE } CmdWrite { Key: "m0000000003uclient-2" Value: "\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000" StorageChannel: INLINE } CmdWrite { Key: "_config_3" Value: "\022\t\030\200\243\0058\200\200\200\005\030\000\"\027rt3.dc1--account--topic(\0020\001\272\001 /Root/PQ/rt3.dc1--account--topic\352\001\000\372\001\002\010\000\212\002\007account\220\002\001\242\002\002\010\000\252\002\016\n\010client-2@\000H\000" StorageChannel: INLINE } 2025-05-29T15:22:10.670997Z node 2 :PERSQUEUE NOTICE: pq_impl.cpp:1099: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-05-29T15:22:10.671025Z node 2 :PERSQUEUE INFO: pq_impl.cpp:800: [PQ: 72057594037927937] doesn't have tx writes info 2025-05-29T15:22:10.674559Z node 2 :PERSQUEUE INFO: partition_init.cpp:880: [PQ: 72057594037927937, Partition: {0, {0, 1111}, 123}, State: StateInit] bootstrapping {0, {0, 1111}, 123} [2:180:2194] 2025-05-29T15:22:10.675648Z node 2 :PERSQUEUE INFO: partition_init.cpp:774: [rt3.dc1--account--topic:{0, {0, 1111}, 123}:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp skipped because already initialized. 2025-05-29T15:22:10.675664Z node 2 :PERSQUEUE INFO: partition.cpp:560: [PQ: 72057594037927937, Partition: {0, {0, 1111}, 123}, State: StateInit] init complete for topic 'rt3.dc1--account--topic' partition {0, {0, 1111}, 123} generation 0 [2:180:2194] 2025-05-29T15:22:11.141539Z node 3 :PERSQUEUE NOTICE: pq_impl.cpp:1099: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-05-29T15:22:11.141589Z node 3 :PERSQUEUE INFO: pq_impl.cpp:800: [PQ: 72057594037927937] doesn't have tx writes info 2025-05-29T15:22:11.146799Z node 3 :PERSQUEUE DEBUG: partition_init.cpp:75: [Root/PQ/rt3.dc1--account--topic:0:Initializer] Start initializing step TInitConfigStep Got KV request 2025-05-29T15:22:11.146863Z node 3 :PERSQUEUE DEBUG: partition_init.cpp:75: [Root/PQ/rt3.dc1--account--topic:0:Initializer] Start initializing step TInitInternalFieldsStep 2025-05-29T15:22:11.146913Z node 3 :PERSQUEUE INFO: partition_init.cpp:880: [PQ: 72057594037927937, Partition: 0, State: StateInit] bootstrapping 0 [3:179:2193] 2025-05-29T15:22:11.147113Z node 3 :PERSQUEUE DEBUG: partition_init.cpp:75: [Root/PQ/rt3.dc1--account--topic:0:Initializer] Start initializing step TInitDiskStatusStep Got KV request 2025-05-29T15:22:11.147146Z node 3 :PERSQUEUE DEBUG: partition_init.cpp:75: [Root/PQ/rt3.dc1--account--topic:0:Initializer] Start initializing step TInitMetaStep Got KV request 2025-05-29T15:22:11.147167Z node 3 :PERSQUEUE DEBUG: partition_init.cpp:75: [Root/PQ/rt3.dc1--account--topic:0:Initializer] Start initializing step TInitInfoRangeStep Got KV request 2025-05-29T15:22:11.147187Z node 3 :PERSQUEUE DEBUG: partition_init.cpp:75: [Root/PQ/rt3.dc1--account--topic:0:Initializer] Start initializing step TInitDataRangeStep Got KV request 2025-05-29T15:22:11.147230Z node 3 :PERSQUEUE DEBUG: partition_init.cpp:621: [Root/PQ/rt3.dc1--account--topic:0:TInitDataRangeStep] Got data offset 0 count 50 size 684 so 0 eo 50 d0000000000_00000000000000000000_00000_0000000050_00000 2025-05-29T15:22:11.147240Z node 3 :PERSQUEUE DEBUG: partition_init.cpp:75: [Root/PQ/rt3.dc1--account--topic:0:Initializer] Start initializing step TInitDataStep 2025-05-29T15:22:11.147245Z node 3 :PERSQUEUE DEBUG: partition_init.cpp:75: [Root/PQ/rt3.dc1--account--topic:0:Initializer] Start initializing step TInitEndWriteTimestampStep 2025-05-29T15:22:11.147251Z node 3 :PERSQUEUE INFO: partition_init.cpp:785: [Root/PQ/rt3.dc1--account--topic:0:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp from keys completed. Value 2025-05-29T15:22:11.000000Z 2025-05-29T15:22:11.147256Z node 3 :PERSQUEUE DEBUG: partition_init.cpp:55: [Root/PQ/rt3.dc1--account--topic:0:Initializer] Initializing completed. 2025-05-29T15:22:11.147262Z node 3 :PERSQUEUE INFO: partition.cpp:560: [PQ: 72057594037927937, Partition: 0, State: StateInit] init complete for topic 'Root/PQ/rt3.dc1--account--topic' partition 0 generation 0 [3:179:2193] 2025-05-29T15:22:11.147272Z node 3 :PERSQUEUE DEBUG: partition.cpp:574: [PQ: 72057594037927937, Partition: 0, State: StateInit] SYNC INIT topic Root/PQ/rt3.dc1--account--topic partitition 0 so 0 endOffset 50 Head Offset 50 PartNo 0 PackedSize 0 count 0 nextOffset 50 batches 0 SYNC INIT DATA KEY: d0000000000_00000000000000000000_00000_0000000050_00000 size 684 2025-05-29T15:22:11.147280Z node 3 :PERSQUEUE DEBUG: partition.cpp:3850: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Process pending events. Count 0 2025-05-29T15:22:11.474591Z node 3 :PERSQUEUE INFO: ownerinfo.cpp:30: new Cookie src3|5dcbe0d-ec491e3c-c08c4ae0-71ea3ac_0 generated for partition 0 topic 'Root/PQ/rt3.dc1--account--topic' owner src3 2025-05-29T15:22:11.474668Z node 3 :PERSQUEUE DEBUG: partition_write.cpp:35: [PQ: 72057594037927937, Partition: 0, State: StateIdle] TPartition::ReplyOwnerOk. Partition: 0 Got batch complete: 1 2025-05-29T15:22:11.474791Z node 3 :PERSQUEUE INFO: ownerinfo.cpp:30: new Cookie src4|15c5cf54-df718efa-93834327-25ba2cdc_0 generated for partition 0 topic 'Root/PQ/rt3.dc1--account--topic' owner src4 2025-05-29T15:22:11.474813Z node 3 :PERSQUEUE DEBUG: partition_write.cpp:35: [PQ: 72057594037927937, Partition: 0, State: StateIdle] TPartition::ReplyOwnerOk. Partition: 0 Got batch complete: 1 Create distr tx with id = 0 and act no: 1 Create immediate tx with id = 3 and act no: 4 Create immediate tx with id = 6 and act no: 7 2025-05-29T15:22:13.728667Z node 3 :PERSQUEUE DEBUG: partition.cpp:1127: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Handle TEvPQ::TEvTxCalcPredicate Step 1, TxId 0 Create distr tx with id = 8 and act no: 9 Create distr tx with id = 10 and act no: 11 2025-05-29T15:22:14.214644Z node 3 :PERSQUEUE DEBUG: partition.cpp:1127: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Handle TEvPQ::TEvTxCalcPredicate Step 1, TxId 8 2025-05-29T15:22:14.214683Z node 3 :PERSQUEUE DEBUG: partition.cpp:1127: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Handle TEvPQ::TEvTxCalcPredicate Step 1, TxId 10 2025-05-29T15:22:14.214694Z node 3 :PERSQUEUE DEBUG: partition.cpp:1363: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Handle TEvPQ::TEvGetWriteInfoResponse 2025-05-29T15:22:14.214701Z node 3 :PERSQUEUE DEBUG: partition.cpp:1363: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Handle TEvPQ::TEvGetWriteInfoResponse 2025-05-29T15:22:14.969685Z node 3 :PERSQUEUE DEBUG: partition.cpp:1363: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Handle TEvPQ::TEvGetWriteInfoResponse 2025-05-29T15:22:16.204114Z node 3 :PERSQUEUE DEBUG: partition.cpp:1363: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Handle TEvPQ::TEvGetWriteInfoResponse 2025-05-29T15:22:16.204168Z node 3 :PERSQUEUE DEBUG: partition.cpp:1363: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Handle TEvPQ::TEvGetWriteInfoResponse Got batch complete: 17 Wait batch completion 2025-05-29T15:22:16.204194Z node 3 :PERSQUEUE DEBUG: partition.cpp:1173: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Handle TEvPQ::TEvTxCommit Step 1, TxId 10 Wait kv request 2025-05-29T15:22:16.448130Z node 3 :PERSQUEUE DEBUG: partition.cpp:1173: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Handle TEvPQ::TEvTxCommit Step 1, TxId 0 2025-05-29T15:22:16.448153Z node 3 :PERSQUEUE DEBUG: partition.cpp:2424: [PQ: 72057594037927937, Partition: 0, State: StateIdle] TPartition::CommitWriteOperations TxId: 0 2025-05-29T15:22:16.448167Z node 3 :PERSQUEUE DEBUG: partition.cpp:2451: [PQ: 72057594037927937, Partition: 0, State: StateIdle] t.WriteInfo->BodyKeys.size=0, t.WriteInfo->BlobsFromHead.size=0 2025-05-29T15:22:16.448176Z node 3 :PERSQUEUE DEBUG: partition.cpp:2452: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Head=Offset 50 PartNo 0 PackedSize 0 count 0 nextOffset 50 batches 0, NewHead=Offset 50 PartNo 0 PackedSize 0 count 0 nextOffset 50 batches 0 2025-05-29T15:22:16.448197Z node 3 :PERSQUEUE DEBUG: partition_write.cpp:138: [PQ: 72057594037927937, Partition: 0, State: StateIdle] TPartition::DropOwner. 2025-05-29T15:22:16.448205Z node 3 :PERSQUEUE DEBUG: partition_write.cpp:138: [PQ: 72057594037927937, Partition: 0, State: StateIdle] TPartition::DropOwner. 2025-05-29T15:22:16.448227Z node 3 :PERSQUEUE DEBUG: partition.cpp:2424: [PQ: 72057594037927937, Partition: 0, State: StateIdle] TPartition::CommitWriteOperations TxId: (empty maybe) 2025-05-29T15:22:16.448233Z node 3 :PERSQUEUE DEBUG: partition.cpp:2451: [PQ: 72057594037927937, Partition: 0, State: StateIdle] t.WriteInfo->BodyKeys.size=0, t.WriteInfo->BlobsFromHead.size=0 2025-05-29T15:22:16.448238Z node 3 :PERSQUEUE DEBUG: partition.cpp:2452: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Head=Offset 50 PartNo 0 PackedSize 0 count 0 nextOffset 50 batches 0, NewHead=Offset 50 PartNo 0 PackedSize 0 count 0 nextOffset 50 batches 0 2025-05-29T15:22:16.450661Z node 3 :PERSQUEUE DEBUG: partition.cpp:3322: [PQ: 72057594037927937, Partition: 0, State: StateIdle] schedule TEvPersQueue::TEvProposeTransactionResult(COMPLETE), reason= 2025-05-29T15:22:16.450704Z node 3 :PERSQUEUE DEBUG: partition_write.cpp:1126: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Already written message. Topic: 'Root/PQ/rt3.dc1--account--topic' Partition: 0 SourceId: 'src4'. Message seqNo: 7. Committed seqNo: (NULL). Writing seqNo: 7. EndOffset: 50. CurOffset: 50. Offset: 50 2025-05-29T15:22:16.450725Z node 3 :PERSQUEUE DEBUG: partition_write.cpp:1233: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'Root/PQ/rt3.dc1--account--topic' partition 0 part blob processing sourceId 'src4' seqNo 8 partNo 0 2025-05-29T15:22:16.450767Z node 3 :PERSQUEUE DEBUG: partition_write.cpp:1333: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'Root/PQ/rt3.dc1--account--topic' partition 0 part blob complete sourceId 'src4' seqNo 8 partNo 0 FormedBlobsCount 0 NewHead: Offset 51 PartNo 0 PackedSize 84 count 1 nextOffset 52 batches 1 2025-05-29T15:22:16.450777Z node 3 :PERSQUEUE DEBUG: partition_write.cpp:1233: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'Root/PQ/rt3.dc1--account--topic' partition 0 part blob processing sourceId 'src4' seqNo 9 partNo 0 2025-05-29T15:22:16.450781Z node 3 :PERSQUEUE DEBUG: partition_write.cpp:1333: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'Root/PQ/rt3.dc1--account--topic' partition 0 part blob complete sourceI ... 000_00000000000000000000_00000_0000000050_00000 2025-05-29T15:22:20.667055Z node 5 :PERSQUEUE DEBUG: partition_init.cpp:75: [Root/PQ/rt3.dc1--account--topic:0:Initializer] Start initializing step TInitDataStep 2025-05-29T15:22:20.667058Z node 5 :PERSQUEUE DEBUG: partition_init.cpp:75: [Root/PQ/rt3.dc1--account--topic:0:Initializer] Start initializing step TInitEndWriteTimestampStep 2025-05-29T15:22:20.667063Z node 5 :PERSQUEUE INFO: partition_init.cpp:785: [Root/PQ/rt3.dc1--account--topic:0:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp from keys completed. Value 2025-05-29T15:22:20.000000Z 2025-05-29T15:22:20.667066Z node 5 :PERSQUEUE DEBUG: partition_init.cpp:55: [Root/PQ/rt3.dc1--account--topic:0:Initializer] Initializing completed. 2025-05-29T15:22:20.667070Z node 5 :PERSQUEUE INFO: partition.cpp:560: [PQ: 72057594037927937, Partition: 0, State: StateInit] init complete for topic 'Root/PQ/rt3.dc1--account--topic' partition 0 generation 0 [5:177:2191] 2025-05-29T15:22:20.667076Z node 5 :PERSQUEUE DEBUG: partition.cpp:574: [PQ: 72057594037927937, Partition: 0, State: StateInit] SYNC INIT topic Root/PQ/rt3.dc1--account--topic partitition 0 so 0 endOffset 50 Head Offset 50 PartNo 0 PackedSize 0 count 0 nextOffset 50 batches 0 SYNC INIT DATA KEY: d0000000000_00000000000000000000_00000_0000000050_00000 size 684 2025-05-29T15:22:20.667084Z node 5 :PERSQUEUE DEBUG: partition.cpp:3850: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Process pending events. Count 0 2025-05-29T15:22:20.667100Z node 5 :PERSQUEUE DEBUG: partition_read.cpp:779: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'Root/PQ/rt3.dc1--account--topic' partition 0 user client-1 readTimeStamp for offset 0 initiated queuesize 0 startOffset 0 ReadingTimestamp 0 rrg 0 2025-05-29T15:22:20.667105Z node 5 :PERSQUEUE DEBUG: partition_read.cpp:821: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'Root/PQ/rt3.dc1--account--topic' partition 0 user client-1 send read request for offset 0 initiated queuesize 0 startOffset 0 ReadingTimestamp 1 rrg 0 2025-05-29T15:22:20.667109Z node 5 :PERSQUEUE DEBUG: partition_read.cpp:779: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'Root/PQ/rt3.dc1--account--topic' partition 0 user client-0 readTimeStamp for offset 0 initiated queuesize 0 startOffset 0 ReadingTimestamp 1 rrg 0 2025-05-29T15:22:20.667160Z node 5 :PERSQUEUE DEBUG: partition_read.cpp:736: [PQ: 72057594037927937, Partition: 0, State: StateIdle] read cookie 0 Topic 'Root/PQ/rt3.dc1--account--topic' partition 0 user client-1 offset 0 count 1 size 1024000 endOffset 50 max time lag 0ms effective offset 0 2025-05-29T15:22:20.667168Z node 5 :PERSQUEUE DEBUG: partition_read.cpp:936: [PQ: 72057594037927937, Partition: 0, State: StateIdle] read cookie 0 added 1 blobs, size 684 count 50 last offset 1, current partition end offset: 50 2025-05-29T15:22:20.667172Z node 5 :PERSQUEUE DEBUG: partition_read.cpp:960: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Reading cookie 0. Send blob request. Create distr tx with id = 0 and act no: 1 2025-05-29T15:22:21.987764Z node 5 :PERSQUEUE DEBUG: partition.cpp:1127: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Handle TEvPQ::TEvTxCalcPredicate Step 1, TxId 0 2025-05-29T15:22:21.987803Z node 5 :PERSQUEUE DEBUG: partition.cpp:1036: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Handle TEvPQ::TEvProposePartitionConfig Step 1, TxId 3 Wait batch completion 2025-05-29T15:22:23.269009Z node 5 :PERSQUEUE DEBUG: partition.cpp:1363: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Handle TEvPQ::TEvGetWriteInfoResponse 2025-05-29T15:22:23.269044Z node 5 :PERSQUEUE DEBUG: partition.cpp:2424: [PQ: 72057594037927937, Partition: 0, State: StateIdle] TPartition::CommitWriteOperations TxId: (empty maybe) 2025-05-29T15:22:23.269049Z node 5 :PERSQUEUE DEBUG: partition.cpp:3322: [PQ: 72057594037927937, Partition: 0, State: StateIdle] schedule TEvPersQueue::TEvProposeTransactionResult(COMPLETE), reason= 2025-05-29T15:22:23.269074Z node 5 :PERSQUEUE DEBUG: partition.cpp:2185: [PQ: 72057594037927937, Partition: 0, State: StateIdle] === DumpKeyValueRequest === 2025-05-29T15:22:23.269078Z node 5 :PERSQUEUE DEBUG: partition.cpp:2186: [PQ: 72057594037927937, Partition: 0, State: StateIdle] --- delete ---------------- 2025-05-29T15:22:23.269081Z node 5 :PERSQUEUE DEBUG: partition.cpp:2194: [PQ: 72057594037927937, Partition: 0, State: StateIdle] --- write ----------------- 2025-05-29T15:22:23.269083Z node 5 :PERSQUEUE DEBUG: partition.cpp:2197: [PQ: 72057594037927937, Partition: 0, State: StateIdle] i0000000000 2025-05-29T15:22:23.269086Z node 5 :PERSQUEUE DEBUG: partition.cpp:2197: [PQ: 72057594037927937, Partition: 0, State: StateIdle] I0000000000 2025-05-29T15:22:23.269088Z node 5 :PERSQUEUE DEBUG: partition.cpp:2197: [PQ: 72057594037927937, Partition: 0, State: StateIdle] m0000000000cclient-0 2025-05-29T15:22:23.269090Z node 5 :PERSQUEUE DEBUG: partition.cpp:2197: [PQ: 72057594037927937, Partition: 0, State: StateIdle] m0000000000uclient-0 2025-05-29T15:22:23.269092Z node 5 :PERSQUEUE DEBUG: partition.cpp:2199: [PQ: 72057594037927937, Partition: 0, State: StateIdle] --- rename ---------------- 2025-05-29T15:22:23.269095Z node 5 :PERSQUEUE DEBUG: partition.cpp:2204: [PQ: 72057594037927937, Partition: 0, State: StateIdle] =========================== Got KV request Got batch complete: 2 Got KV request Got KV request Send disk status response with cookie: 0 Wait immediate tx complete 2 2025-05-29T15:22:24.489536Z node 5 :PERSQUEUE DEBUG: partition_write.cpp:524: [PQ: 72057594037927937, Partition: 0, State: StateIdle] TPartition::HandleWriteResponse writeNewSize# 0 WriteNewSizeFromSupportivePartitions# 0 Got batch complete: 1 Got propose resutl: Origin: 72057594037927937 Status: COMPLETE TxId: 2 Wait batch completion 2025-05-29T15:22:24.489633Z node 5 :PERSQUEUE DEBUG: partition.cpp:1173: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Handle TEvPQ::TEvTxCommit Step 1, TxId 3 2025-05-29T15:22:24.489652Z node 5 :PERSQUEUE DEBUG: partition.cpp:3219: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'Root/PQ/rt3.dc1--account--topic' partition 0 user client-1 drop done 2025-05-29T15:22:24.489684Z node 5 :PERSQUEUE DEBUG: partition.cpp:2185: [PQ: 72057594037927937, Partition: 0, State: StateIdle] === DumpKeyValueRequest === 2025-05-29T15:22:24.489687Z node 5 :PERSQUEUE DEBUG: partition.cpp:2186: [PQ: 72057594037927937, Partition: 0, State: StateIdle] --- delete ---------------- 2025-05-29T15:22:24.489690Z node 5 :PERSQUEUE DEBUG: partition.cpp:2192: [PQ: 72057594037927937, Partition: 0, State: StateIdle] [m0000000000cclient-1, m0000000000cclient-1] 2025-05-29T15:22:24.489693Z node 5 :PERSQUEUE DEBUG: partition.cpp:2192: [PQ: 72057594037927937, Partition: 0, State: StateIdle] [m0000000000uclient-1, m0000000000uclient-1] 2025-05-29T15:22:24.489696Z node 5 :PERSQUEUE DEBUG: partition.cpp:2194: [PQ: 72057594037927937, Partition: 0, State: StateIdle] --- write ----------------- 2025-05-29T15:22:24.489699Z node 5 :PERSQUEUE DEBUG: partition.cpp:2197: [PQ: 72057594037927937, Partition: 0, State: StateIdle] i0000000000 2025-05-29T15:22:24.489701Z node 5 :PERSQUEUE DEBUG: partition.cpp:2197: [PQ: 72057594037927937, Partition: 0, State: StateIdle] I0000000000 2025-05-29T15:22:24.489703Z node 5 :PERSQUEUE DEBUG: partition.cpp:2197: [PQ: 72057594037927937, Partition: 0, State: StateIdle] m0000000000cclient-0 2025-05-29T15:22:24.489705Z node 5 :PERSQUEUE DEBUG: partition.cpp:2197: [PQ: 72057594037927937, Partition: 0, State: StateIdle] m0000000000uclient-0 2025-05-29T15:22:24.489707Z node 5 :PERSQUEUE DEBUG: partition.cpp:2197: [PQ: 72057594037927937, Partition: 0, State: StateIdle] _config_0 2025-05-29T15:22:24.489709Z node 5 :PERSQUEUE DEBUG: partition.cpp:2199: [PQ: 72057594037927937, Partition: 0, State: StateIdle] --- rename ---------------- 2025-05-29T15:22:24.489712Z node 5 :PERSQUEUE DEBUG: partition.cpp:2204: [PQ: 72057594037927937, Partition: 0, State: StateIdle] =========================== Got KV request Send disk status response with cookie: 0 2025-05-29T15:22:24.510130Z node 5 :PERSQUEUE DEBUG: event_helpers.cpp:40: tablet 72057594037927937 topic 'Root/PQ/rt3.dc1--account--topic' partition 0 error: cannot finish read request. Consumer client-1 is gone from partition 2025-05-29T15:22:24.510180Z node 5 :PERSQUEUE DEBUG: partition_write.cpp:524: [PQ: 72057594037927937, Partition: 0, State: StateIdle] TPartition::HandleWriteResponse writeNewSize# 0 WriteNewSizeFromSupportivePartitions# 0 2025-05-29T15:22:24.510197Z node 5 :PERSQUEUE DEBUG: partition.cpp:2424: [PQ: 72057594037927937, Partition: 0, State: StateIdle] TPartition::CommitWriteOperations TxId: (empty maybe) 2025-05-29T15:22:24.510202Z node 5 :PERSQUEUE DEBUG: partition.cpp:3322: [PQ: 72057594037927937, Partition: 0, State: StateIdle] schedule TEvPersQueue::TEvProposeTransactionResult(COMPLETE), reason= 2025-05-29T15:22:24.510226Z node 5 :PERSQUEUE DEBUG: partition.cpp:2185: [PQ: 72057594037927937, Partition: 0, State: StateIdle] === DumpKeyValueRequest === 2025-05-29T15:22:24.510229Z node 5 :PERSQUEUE DEBUG: partition.cpp:2186: [PQ: 72057594037927937, Partition: 0, State: StateIdle] --- delete ---------------- 2025-05-29T15:22:24.510232Z node 5 :PERSQUEUE DEBUG: partition.cpp:2194: [PQ: 72057594037927937, Partition: 0, State: StateIdle] --- write ----------------- 2025-05-29T15:22:24.510235Z node 5 :PERSQUEUE DEBUG: partition.cpp:2197: [PQ: 72057594037927937, Partition: 0, State: StateIdle] i0000000000 2025-05-29T15:22:24.510239Z node 5 :PERSQUEUE DEBUG: partition.cpp:2197: [PQ: 72057594037927937, Partition: 0, State: StateIdle] m0000000000cclient-0 2025-05-29T15:22:24.510241Z node 5 :PERSQUEUE DEBUG: partition.cpp:2197: [PQ: 72057594037927937, Partition: 0, State: StateIdle] m0000000000uclient-0 2025-05-29T15:22:24.510244Z node 5 :PERSQUEUE DEBUG: partition.cpp:2199: [PQ: 72057594037927937, Partition: 0, State: StateIdle] --- rename ---------------- 2025-05-29T15:22:24.510246Z node 5 :PERSQUEUE DEBUG: partition.cpp:2204: [PQ: 72057594037927937, Partition: 0, State: StateIdle] =========================== Got KV request 2025-05-29T15:22:24.510401Z node 5 :PERSQUEUE DEBUG: partition_read.cpp:779: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'Root/PQ/rt3.dc1--account--topic' partition 0 user client-0 readTimeStamp for offset 5 initiated queuesize 0 startOffset 0 ReadingTimestamp 0 rrg 0 2025-05-29T15:22:24.510407Z node 5 :PERSQUEUE DEBUG: partition_read.cpp:821: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'Root/PQ/rt3.dc1--account--topic' partition 0 user client-0 send read request for offset 5 initiated queuesize 0 startOffset 0 ReadingTimestamp 1 rrg 0 Got KV request Got batch complete: 1 Got KV request Got KV request Got KV request 2025-05-29T15:22:24.510532Z node 5 :PERSQUEUE DEBUG: partition_read.cpp:736: [PQ: 72057594037927937, Partition: 0, State: StateIdle] read cookie 1 Topic 'Root/PQ/rt3.dc1--account--topic' partition 0 user client-0 offset 5 count 1 size 1024000 endOffset 50 max time lag 0ms effective offset 5 2025-05-29T15:22:24.510543Z node 5 :PERSQUEUE DEBUG: partition_read.cpp:936: [PQ: 72057594037927937, Partition: 0, State: StateIdle] read cookie 1 added 1 blobs, size 0 count 45 last offset 6, current partition end offset: 50 2025-05-29T15:22:24.510546Z node 5 :PERSQUEUE DEBUG: partition_read.cpp:960: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Reading cookie 1. Send blob request. Got KV request Got KV request Wait batch completion Send disk status response with cookie: 0 Wait immediate tx complete 4 2025-05-29T15:22:24.510600Z node 5 :PERSQUEUE DEBUG: partition_write.cpp:524: [PQ: 72057594037927937, Partition: 0, State: StateIdle] TPartition::HandleWriteResponse writeNewSize# 0 WriteNewSizeFromSupportivePartitions# 0 Got propose resutl: Origin: 72057594037927937 Status: COMPLETE TxId: 4 |60.0%| [TA] $(B)/ydb/core/load_test/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> TDynamicNameserverTest::BasicFunctionality-EnableNodeBrokerDeltaProtocol-false [GOOD] >> GracefulShutdown::TTxGracefulShutdown >> TSchemeShardSysViewTestReboots::DropSysViewWithReboots |60.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_sysview_reboots/unittest |60.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_sysview_reboots/unittest |60.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_sysview_reboots/unittest |60.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_sysview_reboots/unittest >> KqpOlap::PredicatePushdownCastErrors [GOOD] |60.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_sysview_reboots/unittest |60.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_sysview_reboots/unittest >> TNodeBrokerTest::ShiftIdRangeRemoveExpired [GOOD] >> TNodeBrokerTest::NodesMigration1000Nodes [GOOD] >> TPartitionTests::ConflictingCommitsInSeveralBatches [GOOD] >> TPartitionTests::ConflictingCommitFails |60.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_sysview_reboots/unittest |60.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_sysview_reboots/unittest >> TNodeBrokerTest::BasicFunctionality [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/olap/unittest >> KqpOlap::PredicatePushdownCastErrors [GOOD] Test command err: Trying to start YDB, gRPC: 13183, MsgBus: 63338 2025-05-29T15:21:33.687851Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509888159190899824:2070];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:21:33.688103Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/0025ca/r3tmp/tmpthv8tk/pdisk_1.dat 2025-05-29T15:21:33.891554Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:21:33.891579Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:21:33.900956Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:21:33.901231Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 13183, node 1 2025-05-29T15:21:33.939017Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:21:33.939025Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-05-29T15:21:33.939027Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-05-29T15:21:33.939059Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:63338 TClient is connected to server localhost:63338 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-29T15:21:34.163566Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:21:34.166565Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-05-29T15:21:34.183751Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnStore, opId: 281474976710658:0, at schemeshard: 72057594046644480 Status: 53 TxId: 281474976710658 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 2 2025-05-29T15:21:34.208572Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037889;self_id=[1:7509888163485867785:2314];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-05-29T15:21:34.208613Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037889;self_id=[1:7509888163485867785:2314];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-05-29T15:21:34.208664Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037889;self_id=[1:7509888163485867785:2314];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-05-29T15:21:34.208686Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037889;self_id=[1:7509888163485867785:2314];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-05-29T15:21:34.208706Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037889;self_id=[1:7509888163485867785:2314];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-05-29T15:21:34.208729Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037889;self_id=[1:7509888163485867785:2314];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-05-29T15:21:34.208748Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037889;self_id=[1:7509888163485867785:2314];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-05-29T15:21:34.208774Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037889;self_id=[1:7509888163485867785:2314];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-05-29T15:21:34.208793Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037889;self_id=[1:7509888163485867785:2314];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-05-29T15:21:34.208813Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037889;self_id=[1:7509888163485867785:2314];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-05-29T15:21:34.208832Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037889;self_id=[1:7509888163485867785:2314];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2025-05-29T15:21:34.208852Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037889;self_id=[1:7509888163485867785:2314];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2025-05-29T15:21:34.230347Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037891;self_id=[1:7509888163485867786:2315];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-05-29T15:21:34.230380Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037891;self_id=[1:7509888163485867786:2315];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-05-29T15:21:34.230446Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037891;self_id=[1:7509888163485867786:2315];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-05-29T15:21:34.230477Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037891;self_id=[1:7509888163485867786:2315];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-05-29T15:21:34.230503Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037891;self_id=[1:7509888163485867786:2315];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-05-29T15:21:34.230526Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037891;self_id=[1:7509888163485867786:2315];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-05-29T15:21:34.230543Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037891;self_id=[1:7509888163485867786:2315];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-05-29T15:21:34.230567Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037891;self_id=[1:7509888163485867786:2315];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-05-29T15:21:34.230585Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037891;self_id=[1:7509888163485867786:2315];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-05-29T15:21:34.230602Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037891;self_id=[1:7509888163485867786:2315];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-05-29T15:21:34.230619Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037891;self_id=[1:7509888163485867786:2315];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2025-05-29T15:21:34.230637Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037891;self_id=[1:7509888163485867786:2315];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2025-05-29T15:21:34.234637Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037890;self_id=[1:7509888163485867787:2316];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-05-29T15:21:34.234649Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037890;self_id=[1:7509888163485867787:2316];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-05-29T15:21:34.234682Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037890;self_id=[1:7509888163485867787:2316];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-05-29T15:21:34.234701Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037890;self_id=[1:7509888163485867787:2316];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-05-29T15:21:34.234719Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037890;self_id=[1:7509888163485867787:2316];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-05-29T15:21:34.235053Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037890;self_id=[1:750988816 ... lue AS Int32; SELECT `key` FROM `/Root/olapStore/OlapParametersTable` WHERE $in_value = Datetime_column 2025-05-29T15:22:24.791594Z node 2 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [2:7509888377156065100:8740], status: GENERIC_ERROR, issues:
: Error: Type annotation, code: 1030
:4:13: Error: At function: RemovePrefixMembers, At function: PersistableRepr, At function: SqlProject
:4:31: Error: At function: Filter, At lambda, At function: Coalesce
:5:19: Error: At function: ==
:5:19: Error: Uncompatible types in compare: Int32 '==' Optional 2025-05-29T15:22:24.791646Z node 2 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=2&id=YjgxZjYwLWMyYzI0YmJjLTZkMmFiMGZlLWEwZTVlMzZk, ActorId: [2:7509888377156065098:8314], ActorState: ExecuteState, TraceId: 01jwea6amn21rf09h8r20g520r, ReplyQueryCompileError, status GENERIC_ERROR remove tx with tx_id: Test query: --!syntax_v1 DECLARE $in_value AS Int64; SELECT `key` FROM `/Root/olapStore/OlapParametersTable` WHERE $in_value = Datetime_column 2025-05-29T15:22:24.794116Z node 2 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [2:7509888377156065112:8824], status: GENERIC_ERROR, issues:
: Error: Type annotation, code: 1030
:4:13: Error: At function: RemovePrefixMembers, At function: PersistableRepr, At function: SqlProject
:4:31: Error: At function: Filter, At lambda, At function: Coalesce
:5:19: Error: At function: ==
:5:19: Error: Uncompatible types in compare: Int64 '==' Optional 2025-05-29T15:22:24.794158Z node 2 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=2&id=ZmJlNzRhY2MtYzUwMjQyYmUtM2ZiOTI1MGUtMWZiMWI5Y2M=, ActorId: [2:7509888377156065110:8688], ActorState: ExecuteState, TraceId: 01jwea6amrb1r1306snn0718yf, ReplyQueryCompileError, status GENERIC_ERROR remove tx with tx_id: Test query: --!syntax_v1 DECLARE $in_value AS UInt8; SELECT `key` FROM `/Root/olapStore/OlapParametersTable` WHERE $in_value = Datetime_column 2025-05-29T15:22:24.796756Z node 2 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [2:7509888377156065124:8966], status: GENERIC_ERROR, issues:
: Error: Type annotation, code: 1030
:4:13: Error: At function: RemovePrefixMembers, At function: PersistableRepr, At function: SqlProject
:4:31: Error: At function: Filter, At lambda, At function: Coalesce
:5:19: Error: At function: ==
:5:19: Error: Uncompatible types in compare: Uint8 '==' Optional 2025-05-29T15:22:24.796805Z node 2 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=2&id=YTRlNWU5OWQtODQ3NmEzMGUtZWY1ZWUzNjEtYjdhMmIzOTU=, ActorId: [2:7509888377156065122:8775], ActorState: ExecuteState, TraceId: 01jwea6amt4efjbnw4j0x808wt, ReplyQueryCompileError, status GENERIC_ERROR remove tx with tx_id: Test query: --!syntax_v1 DECLARE $in_value AS UInt16; SELECT `key` FROM `/Root/olapStore/OlapParametersTable` WHERE $in_value = Datetime_column 2025-05-29T15:22:24.799185Z node 2 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [2:7509888377156065136:8934], status: GENERIC_ERROR, issues:
: Error: Type annotation, code: 1030
:4:13: Error: At function: RemovePrefixMembers, At function: PersistableRepr, At function: SqlProject
:4:31: Error: At function: Filter, At lambda, At function: Coalesce
:5:19: Error: At function: ==
:5:19: Error: Uncompatible types in compare: Uint16 '==' Optional 2025-05-29T15:22:24.799235Z node 2 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=2&id=NWNkNTYwYi00ZmIyOTUwNC0zZTY3OGVmMS04ZDRkY2E4NA==, ActorId: [2:7509888377156065134:8733], ActorState: ExecuteState, TraceId: 01jwea6amx2g3r0tmzye2q3q9q, ReplyQueryCompileError, status GENERIC_ERROR remove tx with tx_id: Test query: --!syntax_v1 DECLARE $in_value AS UInt32; SELECT `key` FROM `/Root/olapStore/OlapParametersTable` WHERE $in_value = Datetime_column 2025-05-29T15:22:24.801590Z node 2 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [2:7509888377156065148:9626], status: GENERIC_ERROR, issues:
: Error: Type annotation, code: 1030
:4:13: Error: At function: RemovePrefixMembers, At function: PersistableRepr, At function: SqlProject
:4:31: Error: At function: Filter, At lambda, At function: Coalesce
:5:19: Error: At function: ==
:5:19: Error: Uncompatible types in compare: Uint32 '==' Optional 2025-05-29T15:22:24.801651Z node 2 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=2&id=NjYxNjU1MDctY2I2YjNkMDgtOWVkMjhlLWM5ZTk2ZWM5, ActorId: [2:7509888377156065146:9056], ActorState: ExecuteState, TraceId: 01jwea6amzff018e56jwm0qqpk, ReplyQueryCompileError, status GENERIC_ERROR remove tx with tx_id: Test query: --!syntax_v1 DECLARE $in_value AS UInt64; SELECT `key` FROM `/Root/olapStore/OlapParametersTable` WHERE $in_value = Datetime_column 2025-05-29T15:22:24.804278Z node 2 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [2:7509888377156065160:8419], status: GENERIC_ERROR, issues:
: Error: Type annotation, code: 1030
:4:13: Error: At function: RemovePrefixMembers, At function: PersistableRepr, At function: SqlProject
:4:31: Error: At function: Filter, At lambda, At function: Coalesce
:5:19: Error: At function: ==
:5:19: Error: Uncompatible types in compare: Uint64 '==' Optional 2025-05-29T15:22:24.804324Z node 2 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=2&id=Y2NhOTM3YzctMTMwZTRmNzUtNTkyNmRmZjItZDhiOTBjZDQ=, ActorId: [2:7509888377156065158:8339], ActorState: ExecuteState, TraceId: 01jwea6an2ct7qkd3mxvhe079t, ReplyQueryCompileError, status GENERIC_ERROR remove tx with tx_id: Test query: --!syntax_v1 DECLARE $in_value AS Double; SELECT `key` FROM `/Root/olapStore/OlapParametersTable` WHERE $in_value = Datetime_column 2025-05-29T15:22:24.806649Z node 2 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [2:7509888377156065172:9164], status: GENERIC_ERROR, issues:
: Error: Type annotation, code: 1030
:4:13: Error: At function: RemovePrefixMembers, At function: PersistableRepr, At function: SqlProject
:4:31: Error: At function: Filter, At lambda, At function: Coalesce
:5:19: Error: At function: ==
:5:19: Error: Uncompatible types in compare: Double '==' Optional 2025-05-29T15:22:24.806698Z node 2 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=2&id=YTQ0Y2Y5MzUtZDA1NzM5ODktMzdmNTMyNjMtYWNjNjZkNDM=, ActorId: [2:7509888377156065170:9291], ActorState: ExecuteState, TraceId: 01jwea6an437xm9f2h43tzdz45, ReplyQueryCompileError, status GENERIC_ERROR remove tx with tx_id: Test query: --!syntax_v1 DECLARE $in_value AS Float; SELECT `key` FROM `/Root/olapStore/OlapParametersTable` WHERE $in_value = Datetime_column 2025-05-29T15:22:24.809003Z node 2 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [2:7509888377156065184:9406], status: GENERIC_ERROR, issues:
: Error: Type annotation, code: 1030
:4:13: Error: At function: RemovePrefixMembers, At function: PersistableRepr, At function: SqlProject
:4:31: Error: At function: Filter, At lambda, At function: Coalesce
:5:19: Error: At function: ==
:5:19: Error: Uncompatible types in compare: Float '==' Optional 2025-05-29T15:22:24.809069Z node 2 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=2&id=NzcxOTdhZmUtYTBiMzdkZmYtMTY5NjcyYTAtZDI2OTRlOGE=, ActorId: [2:7509888377156065182:9285], ActorState: ExecuteState, TraceId: 01jwea6an7a23a3f7knxktnkkh, ReplyQueryCompileError, status GENERIC_ERROR remove tx with tx_id: Test query: --!syntax_v1 DECLARE $in_value AS String; SELECT `key` FROM `/Root/olapStore/OlapParametersTable` WHERE $in_value = Datetime_column 2025-05-29T15:22:24.811216Z node 2 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [2:7509888377156065196:9393], status: GENERIC_ERROR, issues:
: Error: Type annotation, code: 1030
:4:13: Error: At function: RemovePrefixMembers, At function: PersistableRepr, At function: SqlProject
:4:31: Error: At function: Filter, At lambda, At function: Coalesce
:5:19: Error: At function: ==
:5:19: Error: Uncompatible types in compare: String '==' Optional 2025-05-29T15:22:24.811260Z node 2 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=2&id=YzJkZjJiZDAtMzk5OWMzMGYtY2JjZDdjZjItMTQzNmY2ZTM=, ActorId: [2:7509888377156065194:9641], ActorState: ExecuteState, TraceId: 01jwea6an9b2jc9hfb4h3y624k, ReplyQueryCompileError, status GENERIC_ERROR remove tx with tx_id: Test query: --!syntax_v1 DECLARE $in_value AS Utf8; SELECT `key` FROM `/Root/olapStore/OlapParametersTable` WHERE $in_value = Datetime_column 2025-05-29T15:22:24.813613Z node 2 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [2:7509888377156065208:9446], status: GENERIC_ERROR, issues:
: Error: Type annotation, code: 1030
:4:13: Error: At function: RemovePrefixMembers, At function: PersistableRepr, At function: SqlProject
:4:31: Error: At function: Filter, At lambda, At function: Coalesce
:5:19: Error: At function: ==
:5:19: Error: Uncompatible types in compare: Utf8 '==' Optional 2025-05-29T15:22:24.813655Z node 2 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=2&id=M2EzMDQzZDgtYjhmNWYwYWMtMmM1MmQ2NzQtOWE4NWEwMGI=, ActorId: [2:7509888377156065206:9391], ActorState: ExecuteState, TraceId: 01jwea6anb5tp2w840jhcbmx3b, ReplyQueryCompileError, status GENERIC_ERROR remove tx with tx_id: Test query: --!syntax_v1 DECLARE $in_value AS Timestamp; SELECT `key` FROM `/Root/olapStore/OlapParametersTable` WHERE $in_value = Datetime_column 2025-05-29T15:22:24.839044Z node 2 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:238: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1748532144800, txId: 18446744073709551615] shutting down Test query: --!syntax_v1 DECLARE $in_value AS Date; SELECT `key` FROM `/Root/olapStore/OlapParametersTable` WHERE $in_value = Datetime_column 2025-05-29T15:22:24.888164Z node 2 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:238: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1748532144877, txId: 18446744073709551615] shutting down Test query: --!syntax_v1 DECLARE $in_value AS Datetime; SELECT `key` FROM `/Root/olapStore/OlapParametersTable` WHERE $in_value = Datetime_column 2025-05-29T15:22:24.935920Z node 2 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:238: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1748532144926, txId: 18446744073709551615] shutting down |60.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_sysview_reboots/unittest >> TNodeBrokerTest::NodesMigration2000Nodes [GOOD] >> TNodeBrokerTest::NodesV2BackMigration [GOOD] |60.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_sysview_reboots/unittest |60.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_sysview_reboots/unittest |60.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_sysview_reboots/unittest |60.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_sysview_reboots/unittest ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/mind/ut/unittest >> TNodeBrokerTest::BasicFunctionality [GOOD] Test command err: 2025-05-29T15:22:21.205349Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:434: actor# [1:117:2151] Bootstrap 2025-05-29T15:22:21.225420Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:453: actor# [1:117:2151] Become StateWork (SchemeCache [1:123:2157]) 2025-05-29T15:22:21.232879Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2057} StateInit event Type# 268828672 Event# NKikimr::TEvTablet::TEvBoot 2025-05-29T15:22:21.234054Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2057} StateInit event Type# 268828673 Event# NKikimr::TEvTablet::TEvRestored 2025-05-29T15:22:21.234088Z node 1 :BS_CONTROLLER DEBUG: {BSC22@console_interaction.cpp:14} Console interaction started 2025-05-29T15:22:21.234307Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2057} StateInit event Type# 268828684 Event# NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-05-29T15:22:21.234401Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2057} StateInit event Type# 268639244 Event# NKikimr::TEvNodeWardenStorageConfig 2025-05-29T15:22:21.234467Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2057} StateInit event Type# 131082 Event# NActors::TEvInterconnect::TEvNodesInfo 2025-05-29T15:22:21.234471Z node 1 :BS_CONTROLLER DEBUG: {BSC01@bsc.cpp:521} Handle TEvInterconnect::TEvNodesInfo 2025-05-29T15:22:21.234488Z node 1 :BS_CONTROLLER DEBUG: {BSCTXIS01@init_scheme.cpp:17} TTxInitScheme Execute 2025-05-29T15:22:21.236669Z node 1 :BS_CONTROLLER DEBUG: {BSCTXIS03@init_scheme.cpp:44} TTxInitScheme Complete 2025-05-29T15:22:21.236726Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM01@migrate.cpp:190} Execute tx 2025-05-29T15:22:21.236735Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM02@migrate.cpp:251} Complete tx IncompatibleData# false 2025-05-29T15:22:21.236753Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxTrimUnusedSlots 2025-05-29T15:22:21.236763Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxTrimUnusedSlots 2025-05-29T15:22:21.236774Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateSchemaVersion 2025-05-29T15:22:21.258509Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateSchemaVersion 2025-05-29T15:22:21.258543Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxGenerateInstanceId 2025-05-29T15:22:21.269228Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxGenerateInstanceId 2025-05-29T15:22:21.269274Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateStaticPDiskInfo 2025-05-29T15:22:21.269290Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateStaticPDiskInfo 2025-05-29T15:22:21.269301Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxFillInNonNullConfigForPDisk 2025-05-29T15:22:21.269329Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxFillInNonNullConfigForPDisk 2025-05-29T15:22:21.269339Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxDropDriveStatus 2025-05-29T15:22:21.269346Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxDropDriveStatus 2025-05-29T15:22:21.269354Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateCompatibilityInfo 2025-05-29T15:22:21.280140Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateCompatibilityInfo 2025-05-29T15:22:21.280194Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateEnableConfigV2 2025-05-29T15:22:21.291004Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateEnableConfigV2 2025-05-29T15:22:21.291065Z node 1 :BS_CONTROLLER DEBUG: {BSCTXLE01@load_everything.cpp:19} TTxLoadEverything Execute 2025-05-29T15:22:21.291277Z node 1 :BS_CONTROLLER DEBUG: {BSCTXLE03@load_everything.cpp:557} TTxLoadEverything Complete 2025-05-29T15:22:21.291285Z node 1 :BS_CONTROLLER DEBUG: {BSC09@impl.h:2188} LoadFinished 2025-05-29T15:22:21.292812Z node 1 :BS_CONTROLLER DEBUG: {BSC18@console_interaction.cpp:31} Console connection service started 2025-05-29T15:22:21.292829Z node 1 :BS_CONTROLLER DEBUG: {BSCTXLE04@load_everything.cpp:562} TTxLoadEverything InitQueue processed 2025-05-29T15:22:21.293014Z node 1 :BS_CONTROLLER DEBUG: {BSCTXRN01@register_node.cpp:216} Handle TEvControllerRegisterNode Request# {NodeID: 1 VDiskStatus { VDiskId { GroupID: 0 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } NodeId: 1 PDiskId: 1 VSlotId: 0 PDiskGuid: 123 Status: INIT_PENDING OnlyPhantomsRemain: false } DeclarativePDiskManagement: true } 2025-05-29T15:22:21.293211Z node 1 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:398} Execute TEvControllerConfigRequest Request# {Command { DefineHostConfig { HostConfigId: 1 Drive { Path: "/home/runner/.ya/build/build_root/ciyv/00251d/r3tmp/tmpLee2Px/pdisk_1.dat" } } } Command { DefineBox { BoxId: 1 Host { Key { Fqdn: "::1" IcPort: 12001 } HostConfigId: 1 } } } } 2025-05-29T15:22:21.293256Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:340} Create new pdisk PDiskId# 1:1 Path# /home/runner/.ya/build/build_root/ciyv/00251d/r3tmp/tmpLee2Px/pdisk_1.dat 2025-05-29T15:22:21.293393Z node 1 :BS_CONTROLLER DEBUG: {BSCTXUDM01@disk_metrics.cpp:68} Updating disk status Record# {VDisksMetrics { VDiskId { GroupID: 0 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 0 } State: Initial Replicated: false DiskSpace: Green } } 2025-05-29T15:22:21.293416Z node 1 :BS_CONTROLLER DEBUG: {BSC10@scrub.cpp:187} Handle(TEvControllerScrubQueryStartQuantum) Msg# {VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 0 } } 2025-05-29T15:22:21.293426Z node 1 :BS_CONTROLLER DEBUG: {BSC13@scrub.cpp:597} sending TEvControllerScrubStartQuantum Msg# NKikimrBlobStorage.TEvControllerScrubStartQuantum VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 0 } 2025-05-29T15:22:21.293448Z node 1 :BS_CONTROLLER DEBUG: {BSCTXUDM01@disk_metrics.cpp:68} Updating disk status Record# {VDiskStatus { VDiskId { GroupID: 0 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } NodeId: 1 PDiskId: 1 VSlotId: 0 PDiskGuid: 123 Status: REPLICATING OnlyPhantomsRemain: false } } 2025-05-29T15:22:21.293466Z node 1 :BS_CONTROLLER DEBUG: {BSCTXUDM01@disk_metrics.cpp:68} Updating disk status Record# {VDiskStatus { VDiskId { GroupID: 0 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } NodeId: 1 PDiskId: 1 VSlotId: 0 PDiskGuid: 123 Status: READY OnlyPhantomsRemain: false } } 2025-05-29T15:22:21.293793Z node 1 :BS_CONTROLLER DEBUG: {BSC11@scrub.cpp:214} Handle(TEvControllerScrubQuantumFinished) Msg# {VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 0 } Success: true } 2025-05-29T15:22:21.293823Z node 1 :BS_CONTROLLER DEBUG: {BSC10@scrub.cpp:187} Handle(TEvControllerScrubQueryStartQuantum) Msg# {VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 0 } } 2025-05-29T15:22:21.304596Z node 1 :BS_CONTROLLER DEBUG: {BSCTXRN05@register_node.cpp:34} Add devicesData from NodeWarden NodeId# 1 Devices# [] 2025-05-29T15:22:21.304701Z node 1 :TENANT_POOL DEBUG: tenant_pool.cpp:826: TTenantPool::Bootstrap 2025-05-29T15:22:21.304747Z node 1 :LOCAL DEBUG: local.cpp:1491: TLocal::Bootstrap 2025-05-29T15:22:21.304753Z node 1 :TENANT_POOL DEBUG: tenant_pool.cpp:412: TDomainTenantPool(dc-1) Bootstrap 2025-05-29T15:22:21.304768Z node 1 :TENANT_POOL DEBUG: tenant_pool.cpp:286: TDomainTenantPool(dc-1) send request to add tenant /dc-1 with resources CPU: 1 Memory: 1 Network: 1 2025-05-29T15:22:21.304780Z node 1 :LOCAL DEBUG: local.cpp:1441: TDomainLocal(dc-1): Bootstrap 2025-05-29T15:22:21.304855Z node 1 :LOCAL DEBUG: local.cpp:1149: TDomainLocal(dc-1): Binding to hive 72057594046578946 at domain dc-1 (allocated resources: CPU: 1 Memory: 1 Network: 1) 2025-05-29T15:22:21.304861Z node 1 :LOCAL DEBUG: local.cpp:975: TLocalNodeRegistrar::Bootstrap 2025-05-29T15:22:21.304864Z node 1 :LOCAL DEBUG: local.cpp:181: TLocalNodeRegistrar::TryToRegister 2025-05-29T15:22:21.304875Z node 1 :LOCAL DEBUG: local.cpp:213: TLocalNodeRegistrar::TryToRegister pipe to hive, pipe:[1:331:2301] 2025-05-29T15:22:21.305156Z node 1 :TENANT_POOL NOTICE: tenant_pool.cpp:526: TDomainTenantPool(dc-1) started tenant /dc-1 2025-05-29T15:22:21.305162Z node 1 :TENANT_POOL DEBUG: tenant_pool.cpp:274: TDomainTenantPool(dc-1) send status update to [1:325:2297] 2025-05-29T15:22:21.305268Z node 1 :LOCAL DEBUG: local.cpp:260: TEvTabletPipe::TEvClientConnected {TabletId=72057594046578946 Status=OK ClientId=[1:331:2301]} 2025-05-29T15:22:21.305277Z node 1 :LOCAL DEBUG: local.cpp:324: TLocalNodeRegistrar::Handle TEvLocal::TEvPing 2025-05-29T15:22:21.305283Z node 1 :LOCAL DEBUG: local.cpp:380: TLocalNodeRegistrar TEvPing - CONNECTED 2025-05-29T15:22:21.305285Z node 1 :LOCAL DEBUG: local.cpp:297: TLocalNodeRegistrar SendStatusOk 2025-05-29T15:22:21.321028Z node 1 :LOCAL DEBUG: local.cpp:1207: TDomainLocal(dc-1): TDomainLocal::TEvClientConnected for dc-1 shard 72057594046578944 2025-05-29T15:22:21.321046Z node 1 :LOCAL DEBUG: local.cpp:1066: TDomainLocal(dc-1): Send resolve request for /dc-1/users/tenant-1 to schemeshard 72057594046578944 2025-05-29T15:22:21.321075Z node 1 :LOCAL DEBUG: local.cpp:1066: TDomainLocal(dc-1): Send resolve request for /dc-1/users/tenant-2 to schemeshard 72057594046578944 2025-05-29T15:22:21.325796Z node 1 :LOCAL DEBUG: local.cpp:1234: TDomainLocal(dc-1): HandleResolve from schemeshard 72057594046578944: Status: StatusSuccess Path: "/dc-1/users/tenant-1" PathDescription { Self { Name: "/dc-1/users/tenant-1" PathId: 100 SchemeshardId: 72057594046578944 PathType: EPathTypeSubDomain } DomainDescription { SchemeShardId_Depricated: 72057594046578944 PathId_Depricated: 100 DomainKey { SchemeShard: 72057594046578944 PathId: 100 } } } 2025-05-29T15:22:21.325822Z node 1 :LOCAL DEBUG: local.cpp:1238: TDomainLocal(dc-1): Missing task for /dc-1/users/tenant-1 2025-05-29T15:22:21.325862Z node 1 :LOCAL DEBUG: local.cpp:1234: TDomainLocal(dc-1): HandleResolve from schemeshard 72057594046578944: Status: StatusSuccess Path: "/dc-1/users/tenant-2" PathDescription { Self { Name: "/dc-1/users/tenant-2" PathId: 101 SchemeshardId: 72057594046578944 PathType: EPathTypeSubDomain } DomainDescription { SchemeShardId_Depricated: 72057594046578944 PathId_Depricated: 101 DomainKey { SchemeShard: 72057594046578944 PathId: 101 } } } 2025-05-29T15:22:21.325876Z node 1 :LOCAL DEBUG: local.cpp:1172: TDomainLocal(dc-1): Binding tenant /dc-1/users/tenant-2 to hive 72057594046578946 (allocated resources: CPU: 1 Memory: 1 Network: 1) 2025-05-29T15:22:21.325954Z node 1 :LOCAL DEBUG: local.cpp:975: TLocalNodeRegistrar::Bootstrap 2025-05-29T15:22:21.325957Z node 1 :LOCAL DEBUG: local.cpp:181: TLocalNodeRegistrar::TryToRegister 2025-05-29T15:22:21.325969Z node 1 :LOCAL DEBUG: local.cpp:213: TLocalNodeRegistrar::TryToRegister pipe to hive, pipe:[1:414:2358] 2025-05-29T15:22:21.326096Z node 1 :LOCAL DEBU ... 6, Sender [2:621:2213], Recipient [2:694:2251]: NKikimr::NNodeBroker::TEvNodeBroker::TEvListNodes { } 2025-05-29T15:22:23.548544Z node 2 :NODE_BROKER TRACE: node_broker_impl.h:246: StateWork, processing event TEvNodeBroker::TEvListNodes 2025-05-29T15:22:23.548548Z node 2 :NODE_BROKER TRACE: node_broker.cpp:423: Send TEvNodesInfo for epoch #5.13 1970-01-01T04:00:00.024000Z - 1970-01-01T05:00:00.024000Z - 1970-01-01T06:00:00.024000Z 2025-05-29T15:22:23.548602Z node 2 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 269877761, Sender [2:871:2365], Recipient [2:694:2251]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-05-29T15:22:23.548610Z node 2 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 272039936, Sender [2:621:2213], Recipient [2:694:2251]: NKikimr::NNodeBroker::TEvNodeBroker::TEvListNodes { } 2025-05-29T15:22:23.548613Z node 2 :NODE_BROKER TRACE: node_broker_impl.h:246: StateWork, processing event TEvNodeBroker::TEvListNodes 2025-05-29T15:22:23.548617Z node 2 :NODE_BROKER TRACE: node_broker.cpp:423: Send TEvNodesInfo for epoch #5.13 1970-01-01T04:00:00.024000Z - 1970-01-01T05:00:00.024000Z - 1970-01-01T06:00:00.024000Z 2025-05-29T15:22:23.748613Z node 2 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 2146435072, Sender [2:694:2251], Recipient [2:694:2251]: NKikimr::NNodeBroker::TNodeBroker::TEvPrivate::TEvUpdateEpoch 2025-05-29T15:22:23.748646Z node 2 :NODE_BROKER TRACE: node_broker_impl.h:256: StateWork, processing event TEvPrivate::TEvUpdateEpoch 2025-05-29T15:22:23.748667Z node 2 :NODE_BROKER DEBUG: node_broker__update_epoch.cpp:20: TTxUpdateEpoch Execute 2025-05-29T15:22:23.748675Z node 2 :NODE_BROKER DEBUG: node_broker.cpp:516: [Dirty] Node #1024.v12 host1:1001 has expired 2025-05-29T15:22:23.748683Z node 2 :NODE_BROKER DEBUG: node_broker.cpp:516: [Dirty] Node #1025.v11 host4:1001 has expired 2025-05-29T15:22:23.748686Z node 2 :NODE_BROKER DEBUG: node_broker.cpp:530: [Dirty] Remove node #1026.v13 host3:1001 2025-05-29T15:22:23.748698Z node 2 :NODE_BROKER DEBUG: node_broker.cpp:530: [Dirty] Remove node #1027.v13 host1:1001 2025-05-29T15:22:23.748705Z node 2 :NODE_BROKER DEBUG: node_broker.cpp:542: [Dirty] Move to new epoch #6.14 1970-01-01T05:00:00.024000Z - 1970-01-01T06:00:00.024000Z - 1970-01-01T07:00:00.024000Z, approximate epoch start #6.14 2025-05-29T15:22:23.748726Z node 2 :NODE_BROKER DEBUG: node_broker.cpp:856: [DB] Adding node #1024.v14 host1:1001 to database state=Expired resolvehost=host1.yandex.net address=1.2.3.4 dc=1 location=DC=1/M=2/R=3/U=4/ lease=4 expire=Thu, 01 Jan 1970 05:00:00 UTC servicedsubdomain=72057594046678944:1 slotindex=0 authorizedbycertificate=false 2025-05-29T15:22:23.748780Z node 2 :NODE_BROKER DEBUG: node_broker.cpp:856: [DB] Adding node #1025.v14 host4:1001 to database state=Expired resolvehost=host4.yandex.net address=1.2.3.7 dc=1 location=DC=1/M=2/R=3/U=7/ lease=1 expire=Thu, 01 Jan 1970 05:00:00 UTC servicedsubdomain=72057594046678944:1 slotindex=1 authorizedbycertificate=false 2025-05-29T15:22:23.748795Z node 2 :NODE_BROKER DEBUG: node_broker.cpp:830: [DB] Removing node #1026.v14 from database 2025-05-29T15:22:23.748801Z node 2 :NODE_BROKER DEBUG: node_broker.cpp:830: [DB] Removing node #1027.v14 from database 2025-05-29T15:22:23.748807Z node 2 :NODE_BROKER DEBUG: node_broker.cpp:1311: [DB] Update epoch in database: #6.14 1970-01-01T05:00:00.024000Z - 1970-01-01T06:00:00.024000Z - 1970-01-01T07:00:00.024000Z 2025-05-29T15:22:23.748817Z node 2 :NODE_BROKER DEBUG: node_broker.cpp:1330: [DB] Update approx epoch start in database: #6.14 2025-05-29T15:22:23.830502Z node 2 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 272039936, Sender [2:25:2072], Recipient [2:694:2251]: NKikimr::NNodeBroker::TEvNodeBroker::TEvListNodes { MinEpoch: 6 } 2025-05-29T15:22:23.830525Z node 2 :NODE_BROKER TRACE: node_broker_impl.h:246: StateWork, processing event TEvNodeBroker::TEvListNodes 2025-05-29T15:22:23.830532Z node 2 :NODE_BROKER DEBUG: node_broker.cpp:375: Delaying list nodes request for epoch #6 2025-05-29T15:22:23.830610Z node 2 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 272039936, Sender [7:165:2072], Recipient [2:702:2254] 2025-05-29T15:22:23.830613Z node 2 :NODE_BROKER TRACE: node_broker_impl.h:246: StateWork, processing event TEvNodeBroker::TEvListNodes 2025-05-29T15:22:23.830617Z node 2 :NODE_BROKER DEBUG: node_broker.cpp:375: Delaying list nodes request for epoch #6 2025-05-29T15:22:23.830625Z node 2 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 272039936, Sender [5:109:2072], Recipient [2:706:2258] 2025-05-29T15:22:23.830627Z node 2 :NODE_BROKER TRACE: node_broker_impl.h:246: StateWork, processing event TEvNodeBroker::TEvListNodes 2025-05-29T15:22:23.830630Z node 2 :NODE_BROKER DEBUG: node_broker.cpp:375: Delaying list nodes request for epoch #6 2025-05-29T15:22:23.830634Z node 2 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 272039936, Sender [9:221:2072], Recipient [2:708:2260] 2025-05-29T15:22:23.830636Z node 2 :NODE_BROKER TRACE: node_broker_impl.h:246: StateWork, processing event TEvNodeBroker::TEvListNodes 2025-05-29T15:22:23.830639Z node 2 :NODE_BROKER DEBUG: node_broker.cpp:375: Delaying list nodes request for epoch #6 2025-05-29T15:22:23.830643Z node 2 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 272039936, Sender [8:193:2072], Recipient [2:709:2261] 2025-05-29T15:22:23.830645Z node 2 :NODE_BROKER TRACE: node_broker_impl.h:246: StateWork, processing event TEvNodeBroker::TEvListNodes 2025-05-29T15:22:23.830648Z node 2 :NODE_BROKER DEBUG: node_broker.cpp:375: Delaying list nodes request for epoch #6 2025-05-29T15:22:23.830652Z node 2 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 272039936, Sender [3:53:2072], Recipient [2:802:2324] 2025-05-29T15:22:23.830654Z node 2 :NODE_BROKER TRACE: node_broker_impl.h:246: StateWork, processing event TEvNodeBroker::TEvListNodes 2025-05-29T15:22:23.830657Z node 2 :NODE_BROKER DEBUG: node_broker.cpp:375: Delaying list nodes request for epoch #6 2025-05-29T15:22:23.830661Z node 2 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 272039936, Sender [6:137:2072], Recipient [2:804:2326] 2025-05-29T15:22:23.830663Z node 2 :NODE_BROKER TRACE: node_broker_impl.h:246: StateWork, processing event TEvNodeBroker::TEvListNodes 2025-05-29T15:22:23.830665Z node 2 :NODE_BROKER DEBUG: node_broker.cpp:375: Delaying list nodes request for epoch #6 2025-05-29T15:22:23.830670Z node 2 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 272039936, Sender [4:81:2072], Recipient [2:803:2325] 2025-05-29T15:22:23.830673Z node 2 :NODE_BROKER TRACE: node_broker_impl.h:246: StateWork, processing event TEvNodeBroker::TEvListNodes 2025-05-29T15:22:23.830675Z node 2 :NODE_BROKER DEBUG: node_broker.cpp:375: Delaying list nodes request for epoch #6 2025-05-29T15:22:23.841554Z node 2 :NODE_BROKER DEBUG: node_broker__update_epoch.cpp:31: TTxUpdateEpoch Complete 2025-05-29T15:22:23.841577Z node 2 :NODE_BROKER DEBUG: node_broker.cpp:516: [Committed] Node #1024.v12 host1:1001 has expired 2025-05-29T15:22:23.841587Z node 2 :NODE_BROKER DEBUG: node_broker.cpp:516: [Committed] Node #1025.v11 host4:1001 has expired 2025-05-29T15:22:23.841593Z node 2 :NODE_BROKER DEBUG: node_broker.cpp:530: [Committed] Remove node #1026.v13 host3:1001 2025-05-29T15:22:23.841606Z node 2 :NODE_BROKER DEBUG: node_broker.cpp:530: [Committed] Remove node #1027.v13 host1:1001 2025-05-29T15:22:23.841613Z node 2 :NODE_BROKER DEBUG: node_broker.cpp:542: [Committed] Move to new epoch #6.14 1970-01-01T05:00:00.024000Z - 1970-01-01T06:00:00.024000Z - 1970-01-01T07:00:00.024000Z, approximate epoch start #6.14 2025-05-29T15:22:23.841628Z node 2 :NODE_BROKER TRACE: node_broker.cpp:456: Scheduled epoch update at 1970-01-01T06:00:00.024000Z 2025-05-29T15:22:23.841634Z node 2 :NODE_BROKER DEBUG: node_broker.cpp:562: Preparing nodes list cache for epoch #6.14 1970-01-01T05:00:00.024000Z - 1970-01-01T06:00:00.024000Z - 1970-01-01T07:00:00.024000Z, approximate epoch start #6.14 nodes=0 expired=2 2025-05-29T15:22:23.841657Z node 2 :NODE_BROKER DEBUG: node_broker.cpp:603: Preparing update nodes log for epoch ##6.14 1970-01-01T05:00:00.024000Z - 1970-01-01T06:00:00.024000Z - 1970-01-01T07:00:00.024000Z nodes=0 expired=2 removed=2 2025-05-29T15:22:23.841662Z node 2 :NODE_BROKER DEBUG: node_broker.cpp:659: Add node #1024.v14 to update nodes log 2025-05-29T15:22:23.841668Z node 2 :NODE_BROKER DEBUG: node_broker.cpp:659: Add node #1025.v14 to update nodes log 2025-05-29T15:22:23.841670Z node 2 :NODE_BROKER DEBUG: node_broker.cpp:659: Add node #1026.v14 to update nodes log 2025-05-29T15:22:23.841673Z node 2 :NODE_BROKER DEBUG: node_broker.cpp:659: Add node #1027.v14 to update nodes log 2025-05-29T15:22:23.841681Z node 2 :NODE_BROKER TRACE: node_broker.cpp:423: Send TEvNodesInfo for epoch #6.14 1970-01-01T05:00:00.024000Z - 1970-01-01T06:00:00.024000Z - 1970-01-01T07:00:00.024000Z 2025-05-29T15:22:23.841687Z node 2 :NODE_BROKER TRACE: node_broker.cpp:423: Send TEvNodesInfo for epoch #6.14 1970-01-01T05:00:00.024000Z - 1970-01-01T06:00:00.024000Z - 1970-01-01T07:00:00.024000Z 2025-05-29T15:22:23.841693Z node 2 :NODE_BROKER TRACE: node_broker.cpp:423: Send TEvNodesInfo for epoch #6.14 1970-01-01T05:00:00.024000Z - 1970-01-01T06:00:00.024000Z - 1970-01-01T07:00:00.024000Z 2025-05-29T15:22:23.841701Z node 2 :NODE_BROKER TRACE: node_broker.cpp:423: Send TEvNodesInfo for epoch #6.14 1970-01-01T05:00:00.024000Z - 1970-01-01T06:00:00.024000Z - 1970-01-01T07:00:00.024000Z 2025-05-29T15:22:23.841709Z node 2 :NODE_BROKER TRACE: node_broker.cpp:423: Send TEvNodesInfo for epoch #6.14 1970-01-01T05:00:00.024000Z - 1970-01-01T06:00:00.024000Z - 1970-01-01T07:00:00.024000Z 2025-05-29T15:22:23.841716Z node 2 :NODE_BROKER TRACE: node_broker.cpp:423: Send TEvNodesInfo for epoch #6.14 1970-01-01T05:00:00.024000Z - 1970-01-01T06:00:00.024000Z - 1970-01-01T07:00:00.024000Z 2025-05-29T15:22:23.841723Z node 2 :NODE_BROKER TRACE: node_broker.cpp:423: Send TEvNodesInfo for epoch #6.14 1970-01-01T05:00:00.024000Z - 1970-01-01T06:00:00.024000Z - 1970-01-01T07:00:00.024000Z 2025-05-29T15:22:23.841731Z node 2 :NODE_BROKER TRACE: node_broker.cpp:423: Send TEvNodesInfo for epoch #6.14 1970-01-01T05:00:00.024000Z - 1970-01-01T06:00:00.024000Z - 1970-01-01T07:00:00.024000Z 2025-05-29T15:22:23.862591Z node 2 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 269877761, Sender [2:894:2376], Recipient [2:694:2251]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-05-29T15:22:23.862633Z node 2 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 272039936, Sender [2:621:2213], Recipient [2:694:2251]: NKikimr::NNodeBroker::TEvNodeBroker::TEvListNodes { } 2025-05-29T15:22:23.862640Z node 2 :NODE_BROKER TRACE: node_broker_impl.h:246: StateWork, processing event TEvNodeBroker::TEvListNodes 2025-05-29T15:22:23.862652Z node 2 :NODE_BROKER TRACE: node_broker.cpp:423: Send TEvNodesInfo for epoch #6.14 1970-01-01T05:00:00.024000Z - 1970-01-01T06:00:00.024000Z - 1970-01-01T07:00:00.024000Z 2025-05-29T15:22:23.862715Z node 2 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 269877761, Sender [2:896:2378], Recipient [2:694:2251]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-05-29T15:22:23.862723Z node 2 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 272039936, Sender [2:621:2213], Recipient [2:694:2251]: NKikimr::NNodeBroker::TEvNodeBroker::TEvListNodes { } 2025-05-29T15:22:23.862726Z node 2 :NODE_BROKER TRACE: node_broker_impl.h:246: StateWork, processing event TEvNodeBroker::TEvListNodes 2025-05-29T15:22:23.862732Z node 2 :NODE_BROKER TRACE: node_broker.cpp:423: Send TEvNodesInfo for epoch #6.14 1970-01-01T05:00:00.024000Z - 1970-01-01T06:00:00.024000Z - 1970-01-01T07:00:00.024000Z >> TNodeBrokerTest::EpochCacheUpdate [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/mind/ut/unittest >> TNodeBrokerTest::ShiftIdRangeRemoveExpired [GOOD] Test command err: 2025-05-29T15:22:22.386963Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 2 Deadline: 18446744073709.551615s } 2025-05-29T15:22:22.387027Z node 3 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 3 Deadline: 18446744073709.551615s } 2025-05-29T15:22:22.387062Z node 4 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 4 Deadline: 18446744073709.551615s } 2025-05-29T15:22:22.387100Z node 5 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 5 Deadline: 18446744073709.551615s } 2025-05-29T15:22:22.387134Z node 6 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 6 Deadline: 18446744073709.551615s } 2025-05-29T15:22:22.387159Z node 7 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 7 Deadline: 18446744073709.551615s } 2025-05-29T15:22:22.394770Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:22.394900Z node 3 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:22.394942Z node 4 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:22.394978Z node 5 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:22.395013Z node 6 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:22.395046Z node 7 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:22.395105Z node 8 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 8 Deadline: 18446744073709.551615s } 2025-05-29T15:22:22.395130Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-05-29T15:22:22.395469Z node 3 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:22.395493Z node 4 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:22.395507Z node 5 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:22.395520Z node 6 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:22.395536Z node 7 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:22.395553Z node 8 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:22.395592Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:22.399554Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:22.399619Z node 8 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:22.399652Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:22.400563Z node 5 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:22.400601Z node 6 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:22.400626Z node 7 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:22.400710Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:22.400725Z node 3 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:22.400744Z node 4 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:22.400767Z node 5 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-05-29T15:22:22.400815Z node 6 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-05-29T15:22:22.400834Z node 7 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-05-29T15:22:22.400852Z node 8 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:22.400869Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:22.401053Z node 3 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-05-29T15:22:22.401075Z node 4 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-05-29T15:22:22.401141Z node 8 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-05-29T15:22:22.401225Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-05-29T15:22:22.401263Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 7 Deadline: 18446744073709.551615s } 2025-05-29T15:22:22.401963Z node 3 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 2 Deadline: 18446744073709.551615s } 2025-05-29T15:22:22.401977Z node 4 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 6 Deadline: 18446744073709.551615s } 2025-05-29T15:22:22.401985Z node 5 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 8 Deadline: 18446744073709.551615s } 2025-05-29T15:22:22.401995Z node 7 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 2 Deadline: 18446744073709.551615s } 2025-05-29T15:22:22.402004Z node 8 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 3 Deadline: 18446744073709.551615s } 2025-05-29T15:22:22.402136Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 6 Deadline: 18446744073709.551615s } 2025-05-29T15:22:22.405734Z node 8 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 5 Deadline: 18446744073709.551615s } 2025-05-29T15:22:22.405881Z node 3 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 8 Deadline: 18446744073709.551615s } 2025-05-29T15:22:22.405899Z node 6 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 4 Deadline: 18446744073709.551615s } 2025-05-29T15:22:22.405952Z node 6 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 2 Deadline: 18446744073709.551615s } 2025-05-29T15:22:22.406562Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 3 Deadline: 18446744073709.551615s } 2025-05-29T15:22:22.406859Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 4 Deadline: 18446744073709.551615s } 2025-05-29T15:22:22.406897Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 7 Deadline: 18446744073709.551615s } 2025-05-29T15:22:22.407076Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 5 Deadline: 18446744073709.551615s } 2025-05-29T15:22:22.407340Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 6 Deadline: 18446744073709.551615s } 2025-05-29T15:22:22.407377Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 2 Deadline: 18446744073709.551615s } 2025-05-29T15:22:22.407483Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 3 Deadline: 18446744073709.551615s } 2025-05-29T15:22:22.407601Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 8 Deadline: 18446744073709.551615s } 2025-05-29T15:22:22.408024Z node 7 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 8 Deadline: 18446744073709.551615s } 2025-05-29T15:22:22.408195Z node 8 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 7 Deadline: 18446744073709.551615s } 2025-05-29T15:22:22.408476Z node 6 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 7 Deadline: 18446744073709.551615s } 2025-05-29T15:22:22.408692Z node 7 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 6 Deadline: 18446744073709.551615s } 2025-05-29T15:22:22.438837Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7309: Cannot subscribe to console configs 2025-05-29T15:22:22.438863Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded ... waiting for nameservers are connected 2025-05-29T15:22:22.444578Z node 1 :NODE_BROKER DEBUG: node_broker_impl.h:236: StateInit event type: 10060000 event: NKikimr::TEvTablet::TEvBoot 2025-05-29T15:22:22.445088Z node 1 :NODE_BROKER DEBUG: node_broker_impl.h:236: StateInit event type: 10060001 event: NKikimr::TEvTablet::TEvRestored 2025-05-29T15:22:22.445152Z node 1 :NODE_BROKER DEBUG: node_broker__init_scheme.cpp:20: TTxInitScheme Execute 2025-05-29T15:22:22.445320Z node 1 :NODE_BROKER DEBUG: node_broker_impl.h:236: StateInit event type: 1006000c event: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-05-29T15:22:22.446081Z node 1 :NODE_BROKER DEBUG: node_broker__init_scheme.cpp:29: TTxInitScheme Complete 2025-05-29T15:22:22.446107Z node 1 :NODE_BROKER DEBUG: node_broker__load_state.cpp:19: TTxLoadState Execute 2025-05-29T15:22:22.446152Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:961: [DB] Using default config. 2025-05-29T15:22:22.446164Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:998: [DB] Starting the first epoch: #1.1 1970-01-01T00:00:00.025000Z - 1970-01-01T01:00:00.025000Z - 1970-01-01T02:00:00.025000Z 2025-05-29T15:22:22.446168Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:1024: [DB] Loaded the first approximate epoch start: #1.1 2025-05-29T15:22:22.446179Z node 1 :NODE_BROKER DEBUG: node_broker__load_state.cpp:27: TTxLoadState Complete 2025-05-29T15:22:22.446196Z node 1 :NODE_BROKER DEBUG: node_broker__migrate_state.cpp:84: TTxMigrateState Execute 2025-05-29T15:22:22.446199Z node 1 :NODE_BROKER DEBUG: node_broker__migrate_state.cpp:52: TTxMigrateState ProcessMigrationBatch UpdateNodes left 0, NewVersionUpdateNodes left 0 2025-05-29T15:22:22.446202Z node 1 :NODE_BROKER DEBUG: node_broker__migrate_state.cpp:21: TTxMigrateState FinalizeMigration 2025-05-29T15:22:22.446206Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:1311: [DB] Update epoch in database: #1.1 1970-01-01T00:00:00.025000Z - 1970-01-01T01:00:00.025000Z - 1970-01-01T02:00:00.025000Z 2025-05-29T15:22:22.446219Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:1330 ... erverConnected 2025-05-29T15:22:23.800598Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 272039936, Sender [1:626:2214], Recipient [1:744:2282]: NKikimr::NNodeBroker::TEvNodeBroker::TEvListNodes { } 2025-05-29T15:22:23.800601Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:246: StateWork, processing event TEvNodeBroker::TEvListNodes 2025-05-29T15:22:23.800604Z node 1 :NODE_BROKER TRACE: node_broker.cpp:423: Send TEvNodesInfo for epoch #3.11 1970-01-01T02:00:00.025000Z - 1970-01-01T03:00:00.025000Z - 1970-01-01T04:00:00.025000Z 2025-05-29T15:22:23.800650Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 269877761, Sender [1:798:2328], Recipient [1:744:2282]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-05-29T15:22:23.800660Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 272039936, Sender [1:626:2214], Recipient [1:744:2282]: NKikimr::NNodeBroker::TEvNodeBroker::TEvListNodes { CachedVersion: 10 } 2025-05-29T15:22:23.800665Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:246: StateWork, processing event TEvNodeBroker::TEvListNodes 2025-05-29T15:22:23.800669Z node 1 :NODE_BROKER TRACE: node_broker.cpp:423: Send TEvNodesInfo for epoch #3.11 1970-01-01T02:00:00.025000Z - 1970-01-01T03:00:00.025000Z - 1970-01-01T04:00:00.025000Z 2025-05-29T15:22:23.800703Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 269877761, Sender [1:800:2330], Recipient [1:744:2282]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-05-29T15:22:23.800712Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 272039936, Sender [1:626:2214], Recipient [1:744:2282]: NKikimr::NNodeBroker::TEvNodeBroker::TEvListNodes { } 2025-05-29T15:22:23.800715Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:246: StateWork, processing event TEvNodeBroker::TEvListNodes 2025-05-29T15:22:23.800718Z node 1 :NODE_BROKER TRACE: node_broker.cpp:423: Send TEvNodesInfo for epoch #3.11 1970-01-01T02:00:00.025000Z - 1970-01-01T03:00:00.025000Z - 1970-01-01T04:00:00.025000Z 2025-05-29T15:22:23.800754Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 269877761, Sender [1:802:2332], Recipient [1:744:2282]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-05-29T15:22:23.800764Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 272039936, Sender [1:626:2214], Recipient [1:744:2282]: NKikimr::NNodeBroker::TEvNodeBroker::TEvListNodes { CachedVersion: 9 } 2025-05-29T15:22:23.800766Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:246: StateWork, processing event TEvNodeBroker::TEvListNodes 2025-05-29T15:22:23.800770Z node 1 :NODE_BROKER TRACE: node_broker.cpp:423: Send TEvNodesInfo for epoch #3.11 1970-01-01T02:00:00.025000Z - 1970-01-01T03:00:00.025000Z - 1970-01-01T04:00:00.025000Z 2025-05-29T15:22:23.800804Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 269877761, Sender [1:804:2334], Recipient [1:744:2282]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-05-29T15:22:23.800813Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 272039936, Sender [1:626:2214], Recipient [1:744:2282]: NKikimr::NNodeBroker::TEvNodeBroker::TEvListNodes { } 2025-05-29T15:22:23.800816Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:246: StateWork, processing event TEvNodeBroker::TEvListNodes 2025-05-29T15:22:23.800819Z node 1 :NODE_BROKER TRACE: node_broker.cpp:423: Send TEvNodesInfo for epoch #3.11 1970-01-01T02:00:00.025000Z - 1970-01-01T03:00:00.025000Z - 1970-01-01T04:00:00.025000Z 2025-05-29T15:22:23.800853Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 269877761, Sender [1:806:2336], Recipient [1:744:2282]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-05-29T15:22:23.800863Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 272039936, Sender [1:626:2214], Recipient [1:744:2282]: NKikimr::NNodeBroker::TEvNodeBroker::TEvListNodes { CachedVersion: 8 } 2025-05-29T15:22:23.800866Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:246: StateWork, processing event TEvNodeBroker::TEvListNodes 2025-05-29T15:22:23.800869Z node 1 :NODE_BROKER TRACE: node_broker.cpp:423: Send TEvNodesInfo for epoch #3.11 1970-01-01T02:00:00.025000Z - 1970-01-01T03:00:00.025000Z - 1970-01-01T04:00:00.025000Z 2025-05-29T15:22:23.800936Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 269877761, Sender [1:808:2338], Recipient [1:744:2282]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-05-29T15:22:23.800949Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 272039950, Sender [1:626:2214], Recipient [1:744:2282]: NKikimr::NNodeBroker::TEvNodeBroker::TEvSubscribeNodesRequest { CachedVersion: 11 SeqNo: 2 } 2025-05-29T15:22:23.800952Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:254: StateWork, processing event TEvNodeBroker::TEvSubscribeNodesRequest 2025-05-29T15:22:23.800959Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:747: New subscriber [1:626:2214], seqNo: 2, version: 11, server pipe id: [1:808:2338] 2025-05-29T15:22:23.800964Z node 1 :NODE_BROKER TRACE: node_broker.cpp:730: Send TEvUpdateNodes v11 -> v11 to [1:626:2214] 2025-05-29T15:22:23.801008Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 269877764, Sender [1:808:2338], Recipient [1:744:2282]: NKikimr::TEvTabletPipe::TEvServerDisconnected 2025-05-29T15:22:23.801012Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:768: Unsubscribed [1:626:2214], seqNo: 2, server pipe id: [1:808:2338] 2025-05-29T15:22:23.801028Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 269877761, Sender [1:810:2340], Recipient [1:744:2282]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-05-29T15:22:23.801038Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 272039950, Sender [1:626:2214], Recipient [1:744:2282]: NKikimr::NNodeBroker::TEvNodeBroker::TEvSubscribeNodesRequest { CachedVersion: 10 SeqNo: 3 } 2025-05-29T15:22:23.801040Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:254: StateWork, processing event TEvNodeBroker::TEvSubscribeNodesRequest 2025-05-29T15:22:23.801043Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:747: New subscriber [1:626:2214], seqNo: 3, version: 10, server pipe id: [1:810:2340] 2025-05-29T15:22:23.801046Z node 1 :NODE_BROKER TRACE: node_broker.cpp:730: Send TEvUpdateNodes v10 -> v11 to [1:626:2214] 2025-05-29T15:22:23.801077Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 269877764, Sender [1:810:2340], Recipient [1:744:2282]: NKikimr::TEvTabletPipe::TEvServerDisconnected 2025-05-29T15:22:23.801080Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:768: Unsubscribed [1:626:2214], seqNo: 3, server pipe id: [1:810:2340] 2025-05-29T15:22:23.801096Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 269877761, Sender [1:812:2342], Recipient [1:744:2282]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-05-29T15:22:23.801105Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 272039950, Sender [1:626:2214], Recipient [1:744:2282]: NKikimr::NNodeBroker::TEvNodeBroker::TEvSubscribeNodesRequest { CachedVersion: 9 SeqNo: 4 } 2025-05-29T15:22:23.801107Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:254: StateWork, processing event TEvNodeBroker::TEvSubscribeNodesRequest 2025-05-29T15:22:23.801110Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:747: New subscriber [1:626:2214], seqNo: 4, version: 9, server pipe id: [1:812:2342] 2025-05-29T15:22:23.801112Z node 1 :NODE_BROKER TRACE: node_broker.cpp:730: Send TEvUpdateNodes v9 -> v11 to [1:626:2214] 2025-05-29T15:22:23.801142Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 269877764, Sender [1:812:2342], Recipient [1:744:2282]: NKikimr::TEvTabletPipe::TEvServerDisconnected 2025-05-29T15:22:23.801145Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:768: Unsubscribed [1:626:2214], seqNo: 4, server pipe id: [1:812:2342] 2025-05-29T15:22:23.801161Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 269877761, Sender [1:814:2344], Recipient [1:744:2282]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-05-29T15:22:23.801171Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 272039950, Sender [1:626:2214], Recipient [1:744:2282]: NKikimr::NNodeBroker::TEvNodeBroker::TEvSubscribeNodesRequest { CachedVersion: 8 SeqNo: 5 } 2025-05-29T15:22:23.801173Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:254: StateWork, processing event TEvNodeBroker::TEvSubscribeNodesRequest 2025-05-29T15:22:23.801176Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:747: New subscriber [1:626:2214], seqNo: 5, version: 8, server pipe id: [1:814:2344] 2025-05-29T15:22:23.801179Z node 1 :NODE_BROKER TRACE: node_broker.cpp:730: Send TEvUpdateNodes v8 -> v11 to [1:626:2214] 2025-05-29T15:22:23.801212Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 269877764, Sender [1:814:2344], Recipient [1:744:2282]: NKikimr::TEvTabletPipe::TEvServerDisconnected 2025-05-29T15:22:23.801215Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:768: Unsubscribed [1:626:2214], seqNo: 5, server pipe id: [1:814:2344] 2025-05-29T15:22:23.801233Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 269877761, Sender [1:816:2346], Recipient [1:744:2282]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-05-29T15:22:23.801247Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 272039937, Sender [1:626:2214], Recipient [1:744:2282]: NKikimr::NNodeBroker::TEvNodeBroker::TEvResolveNode { NodeId: 1024 } 2025-05-29T15:22:23.801250Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:247: StateWork, processing event TEvNodeBroker::TEvResolveNode 2025-05-29T15:22:23.801271Z node 1 :NODE_BROKER TRACE: node_broker.cpp:1478: Send TEvResolvedNode: NKikimr::NNodeBroker::TEvNodeBroker::TEvResolvedNode { Status { Code: OK } Node { NodeId: 1024 Host: "host1" Port: 1001 ResolveHost: "host1.yandex.net" Address: "1.2.3.4" Location { DataCenter: "1" Module: "2" Rack: "3" Unit: "4" } Expire: 14400025000 Name: "slot-0" } } 2025-05-29T15:22:23.801313Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 269877761, Sender [1:818:2348], Recipient [1:744:2282]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-05-29T15:22:23.801326Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 272039937, Sender [1:626:2214], Recipient [1:744:2282]: NKikimr::NNodeBroker::TEvNodeBroker::TEvResolveNode { NodeId: 1025 } 2025-05-29T15:22:23.801329Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:247: StateWork, processing event TEvNodeBroker::TEvResolveNode 2025-05-29T15:22:23.801337Z node 1 :NODE_BROKER TRACE: node_broker.cpp:1478: Send TEvResolvedNode: NKikimr::NNodeBroker::TEvNodeBroker::TEvResolvedNode { Status { Code: OK } Node { NodeId: 1025 Host: "host2" Port: 1001 ResolveHost: "host2.yandex.net" Address: "1.2.3.4" Location { DataCenter: "1" Module: "2" Rack: "3" Unit: "4" } Expire: 14400025000 Name: "slot-1" } } 2025-05-29T15:22:23.801372Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 269877761, Sender [1:820:2350], Recipient [1:744:2282]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-05-29T15:22:23.801383Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 272039937, Sender [1:626:2214], Recipient [1:744:2282]: NKikimr::NNodeBroker::TEvNodeBroker::TEvResolveNode { NodeId: 1026 } 2025-05-29T15:22:23.801386Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:247: StateWork, processing event TEvNodeBroker::TEvResolveNode 2025-05-29T15:22:23.801391Z node 1 :NODE_BROKER TRACE: node_broker.cpp:1478: Send TEvResolvedNode: NKikimr::NNodeBroker::TEvNodeBroker::TEvResolvedNode { Status { Code: WRONG_REQUEST Reason: "Unknown node" } } ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/mind/ut/unittest >> TNodeBrokerTest::NodesMigration1000Nodes [GOOD] Test command err: 2025-05-29T15:22:21.776817Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 2 Deadline: 18446744073709.551615s } 2025-05-29T15:22:21.776865Z node 3 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 3 Deadline: 18446744073709.551615s } 2025-05-29T15:22:21.776889Z node 4 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 4 Deadline: 18446744073709.551615s } 2025-05-29T15:22:21.776917Z node 5 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 5 Deadline: 18446744073709.551615s } 2025-05-29T15:22:21.776938Z node 6 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 6 Deadline: 18446744073709.551615s } 2025-05-29T15:22:21.776955Z node 7 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 7 Deadline: 18446744073709.551615s } 2025-05-29T15:22:21.782402Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:21.782482Z node 3 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:21.782515Z node 4 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:21.782543Z node 5 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:21.782574Z node 6 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:21.782601Z node 7 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:21.782661Z node 8 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 8 Deadline: 18446744073709.551615s } 2025-05-29T15:22:21.782679Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-05-29T15:22:21.782882Z node 3 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:21.782902Z node 4 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:21.782916Z node 5 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:21.782927Z node 6 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:21.782943Z node 7 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:21.782956Z node 8 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:21.782990Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:21.786380Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:21.786413Z node 8 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:21.786431Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:21.787339Z node 5 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:21.787368Z node 6 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:21.787392Z node 7 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:21.787452Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:21.787466Z node 3 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:21.787480Z node 4 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:21.787496Z node 8 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:21.787510Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:21.787640Z node 3 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-05-29T15:22:21.787659Z node 4 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-05-29T15:22:21.787672Z node 5 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-05-29T15:22:21.787694Z node 6 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-05-29T15:22:21.787716Z node 7 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-05-29T15:22:21.787788Z node 8 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-05-29T15:22:21.787851Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-05-29T15:22:21.787883Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 4 Deadline: 18446744073709.551615s } 2025-05-29T15:22:21.788431Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 6 Deadline: 18446744073709.551615s } 2025-05-29T15:22:21.788449Z node 3 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 8 Deadline: 18446744073709.551615s } 2025-05-29T15:22:21.788455Z node 4 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 5 Deadline: 18446744073709.551615s } 2025-05-29T15:22:21.788461Z node 5 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 2 Deadline: 18446744073709.551615s } 2025-05-29T15:22:21.788466Z node 6 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 8 Deadline: 18446744073709.551615s } 2025-05-29T15:22:21.788472Z node 7 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 3 Deadline: 18446744073709.551615s } 2025-05-29T15:22:21.788478Z node 8 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 7 Deadline: 18446744073709.551615s } 2025-05-29T15:22:21.792021Z node 8 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 6 Deadline: 18446744073709.551615s } 2025-05-29T15:22:21.792113Z node 3 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 7 Deadline: 18446744073709.551615s } 2025-05-29T15:22:21.792132Z node 5 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 4 Deadline: 18446744073709.551615s } 2025-05-29T15:22:21.792156Z node 7 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 8 Deadline: 18446744073709.551615s } 2025-05-29T15:22:21.792163Z node 8 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 3 Deadline: 18446744073709.551615s } 2025-05-29T15:22:21.792193Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 5 Deadline: 18446744073709.551615s } 2025-05-29T15:22:21.792211Z node 6 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 2 Deadline: 18446744073709.551615s } 2025-05-29T15:22:21.792977Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 6 Deadline: 18446744073709.551615s } 2025-05-29T15:22:21.793290Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 7 Deadline: 18446744073709.551615s } 2025-05-29T15:22:21.793463Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 2 Deadline: 18446744073709.551615s } 2025-05-29T15:22:21.793575Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 3 Deadline: 18446744073709.551615s } 2025-05-29T15:22:21.793671Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 5 Deadline: 18446744073709.551615s } 2025-05-29T15:22:21.793808Z node 3 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 4 Deadline: 18446744073709.551615s } 2025-05-29T15:22:21.793826Z node 7 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 5 Deadline: 18446744073709.551615s } 2025-05-29T15:22:21.793872Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 8 Deadline: 18446744073709.551615s } 2025-05-29T15:22:21.794040Z node 4 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 3 Deadline: 18446744073709.551615s } 2025-05-29T15:22:21.794367Z node 5 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 7 Deadline: 18446744073709.551615s } 2025-05-29T15:22:21.814578Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7309: Cannot subscribe to console configs 2025-05-29T15:22:21.814594Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded ... waiting for nameservers are connected 2025-05-29T15:22:21.818302Z node 1 :NODE_BROKER DEBUG: node_broker_impl.h:236: StateInit event type: 10060000 event: NKikimr::TEvTablet::TEvBoot 2025-05-29T15:22:21.818595Z node 1 :NODE_BROKER DEBUG: node_broker_impl.h:236: StateInit event type: 10060001 event: NKikimr::TEvTablet::TEvRestored 2025-05-29T15:22:21.818633Z node 1 :NODE_BROKER DEBUG: node_broker__init_scheme.cpp:20: TTxInitScheme Execute 2025-05-29T15:22:21.818806Z node 1 :NODE_BROKER DEBUG: node_broker_impl.h:236: StateInit event type: 1006000c event: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-05-29T15:22:21.819578Z node 1 :NODE_BROKER DEBUG: node_broker__init_scheme.cpp:29: TTxInitScheme Complete 2025-05-29T15:22:21.819601Z node 1 :NODE_BROKER DEBUG: node_broker__load_state.cpp:19: TTxLoadState Execute 2025-05-29T15:22:21.819648Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:961: [DB] Using default config. 2025-05-29T15:22:21.819657Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:998: [DB] Starting the first epoch: #1.1 1970-01-01T00:00:00.025000Z - 1970-01-01T01:00:00.025000Z - 1970-01-01T02:00:00.025000Z 2025-05-29T15:22:21.819660Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:1024: [DB] Loaded the first approximate epoch start: #1.1 2025-05-29T15:22:21.819670Z node 1 :NODE_BROKER DEBUG: node_broker__load_state.cpp:27: TTxLoadState Complete 2025-05-29T15:22:21.819687Z node 1 :NODE_BROKER DEBUG: node_broker__migrate_state.cpp:84: TTxMigrateState Execute 2025-05-29T15:22:21.819691Z node 1 :NODE_BROKER DEBUG: node_broker__migrate_state.cpp:52: TTxMigrateState ProcessMigrationBatch UpdateNodes left 0, NewVersionUpdateNodes left 0 2025-05-29T15:22:21.819693Z node 1 :NODE_BROKER DEBU ... e 1 :NODE_BROKER DEBUG: node_broker.cpp:659: Add node #1598.v504 to update nodes log 2025-05-29T15:22:23.793930Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:659: Add node #1597.v504 to update nodes log 2025-05-29T15:22:23.793933Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:659: Add node #1596.v504 to update nodes log 2025-05-29T15:22:23.793936Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:659: Add node #1595.v504 to update nodes log 2025-05-29T15:22:23.793940Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:659: Add node #1594.v504 to update nodes log 2025-05-29T15:22:23.793943Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:659: Add node #1593.v504 to update nodes log 2025-05-29T15:22:23.793946Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:659: Add node #1592.v504 to update nodes log 2025-05-29T15:22:23.793949Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:659: Add node #1591.v504 to update nodes log 2025-05-29T15:22:23.793952Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:659: Add node #1590.v504 to update nodes log 2025-05-29T15:22:23.793956Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:659: Add node #1589.v504 to update nodes log 2025-05-29T15:22:23.793959Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:659: Add node #1588.v504 to update nodes log 2025-05-29T15:22:23.793962Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:659: Add node #1587.v504 to update nodes log 2025-05-29T15:22:23.793965Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:659: Add node #1586.v504 to update nodes log 2025-05-29T15:22:23.793968Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:659: Add node #1585.v504 to update nodes log 2025-05-29T15:22:23.793972Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:659: Add node #1584.v504 to update nodes log 2025-05-29T15:22:23.793975Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:659: Add node #1583.v504 to update nodes log 2025-05-29T15:22:23.793978Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:659: Add node #1582.v504 to update nodes log 2025-05-29T15:22:23.793981Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:659: Add node #1581.v504 to update nodes log 2025-05-29T15:22:23.793985Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:659: Add node #1580.v504 to update nodes log 2025-05-29T15:22:23.793988Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:659: Add node #1579.v504 to update nodes log 2025-05-29T15:22:23.793991Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:659: Add node #1578.v504 to update nodes log 2025-05-29T15:22:23.793994Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:659: Add node #1577.v504 to update nodes log 2025-05-29T15:22:23.793998Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:659: Add node #1576.v504 to update nodes log 2025-05-29T15:22:23.794004Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:659: Add node #1575.v504 to update nodes log 2025-05-29T15:22:23.794009Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:659: Add node #1574.v504 to update nodes log 2025-05-29T15:22:23.794013Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:659: Add node #1573.v504 to update nodes log 2025-05-29T15:22:23.794019Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:659: Add node #1572.v504 to update nodes log 2025-05-29T15:22:23.794024Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:659: Add node #1571.v504 to update nodes log 2025-05-29T15:22:23.794029Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:659: Add node #1570.v504 to update nodes log 2025-05-29T15:22:23.794034Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:659: Add node #1569.v504 to update nodes log 2025-05-29T15:22:23.794039Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:659: Add node #1568.v504 to update nodes log 2025-05-29T15:22:23.794043Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:659: Add node #1567.v504 to update nodes log 2025-05-29T15:22:23.794048Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:659: Add node #1566.v504 to update nodes log 2025-05-29T15:22:23.794053Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:659: Add node #1565.v504 to update nodes log 2025-05-29T15:22:23.794057Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:659: Add node #1564.v504 to update nodes log 2025-05-29T15:22:23.794063Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:659: Add node #1563.v504 to update nodes log 2025-05-29T15:22:23.794068Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:659: Add node #1562.v504 to update nodes log 2025-05-29T15:22:23.794073Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:659: Add node #1561.v504 to update nodes log 2025-05-29T15:22:23.794078Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:659: Add node #1560.v504 to update nodes log 2025-05-29T15:22:23.794083Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:659: Add node #1559.v504 to update nodes log 2025-05-29T15:22:23.794089Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:659: Add node #1558.v504 to update nodes log 2025-05-29T15:22:23.794095Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:659: Add node #1557.v504 to update nodes log 2025-05-29T15:22:23.794101Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:659: Add node #1556.v504 to update nodes log 2025-05-29T15:22:23.794106Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:659: Add node #1555.v504 to update nodes log 2025-05-29T15:22:23.794112Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:659: Add node #1554.v504 to update nodes log 2025-05-29T15:22:23.794117Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:659: Add node #1553.v504 to update nodes log 2025-05-29T15:22:23.794123Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:659: Add node #1552.v504 to update nodes log 2025-05-29T15:22:23.794129Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:659: Add node #1551.v504 to update nodes log 2025-05-29T15:22:23.794134Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:659: Add node #1550.v504 to update nodes log 2025-05-29T15:22:23.794140Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:659: Add node #1549.v504 to update nodes log 2025-05-29T15:22:23.794145Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:659: Add node #1548.v504 to update nodes log 2025-05-29T15:22:23.794149Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:659: Add node #1547.v504 to update nodes log 2025-05-29T15:22:23.794154Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:659: Add node #1546.v504 to update nodes log 2025-05-29T15:22:23.794160Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:659: Add node #1545.v504 to update nodes log 2025-05-29T15:22:23.794165Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:659: Add node #1544.v504 to update nodes log 2025-05-29T15:22:23.794170Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:659: Add node #1543.v504 to update nodes log 2025-05-29T15:22:23.794175Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:659: Add node #1542.v504 to update nodes log 2025-05-29T15:22:23.794180Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:659: Add node #1541.v504 to update nodes log 2025-05-29T15:22:23.794187Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:659: Add node #1540.v504 to update nodes log 2025-05-29T15:22:23.794193Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:659: Add node #1537.v504 to update nodes log 2025-05-29T15:22:23.794198Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:659: Add node #1787.v504 to update nodes log 2025-05-29T15:22:23.794333Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 269877761, Sender [1:2780:3806], Recipient [1:2770:3800]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-05-29T15:22:23.794398Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:657: Handle NKikimr::TEvTabletPipe::TEvClientConnected { TabletId: 72057594037936129 Status: OK ServerId: [1:2781:3807] Leader: 1 Dead: 0 Generation: 3 VersionInfo:  } 2025-05-29T15:22:23.794417Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 269877761, Sender [1:2781:3807], Recipient [1:2770:3800]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-05-29T15:22:23.794443Z node 8 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:657: Handle NKikimr::TEvTabletPipe::TEvClientConnected { TabletId: 72057594037936129 Status: OK ServerId: [1:2780:3806] Leader: 1 Dead: 0 Generation: 3 VersionInfo:  } 2025-05-29T15:22:23.794513Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 272039936, Sender [8:221:2072], Recipient [1:2780:3806] 2025-05-29T15:22:23.794517Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:246: StateWork, processing event TEvNodeBroker::TEvListNodes 2025-05-29T15:22:23.794524Z node 1 :NODE_BROKER TRACE: node_broker.cpp:423: Send TEvNodesInfo for epoch #3.504 1970-01-01T02:00:00.025000Z - 1970-01-01T03:00:00.025000Z - 1970-01-01T04:00:00.025000Z 2025-05-29T15:22:23.794530Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 272039936, Sender [2:53:2072], Recipient [1:2781:3807] 2025-05-29T15:22:23.794532Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:246: StateWork, processing event TEvNodeBroker::TEvListNodes 2025-05-29T15:22:23.794536Z node 1 :NODE_BROKER TRACE: node_broker.cpp:423: Send TEvNodesInfo for epoch #3.504 1970-01-01T02:00:00.025000Z - 1970-01-01T03:00:00.025000Z - 1970-01-01T04:00:00.025000Z 2025-05-29T15:22:23.797190Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 269877761, Sender [1:2834:3855], Recipient [1:2770:3800]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-05-29T15:22:23.797211Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 272039936, Sender [1:628:2214], Recipient [1:2770:3800]: NKikimr::NNodeBroker::TEvNodeBroker::TEvListNodes { } 2025-05-29T15:22:23.797214Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:246: StateWork, processing event TEvNodeBroker::TEvListNodes 2025-05-29T15:22:23.797220Z node 1 :NODE_BROKER TRACE: node_broker.cpp:423: Send TEvNodesInfo for epoch #3.504 1970-01-01T02:00:00.025000Z - 1970-01-01T03:00:00.025000Z - 1970-01-01T04:00:00.025000Z 2025-05-29T15:22:23.798403Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 269877761, Sender [1:2836:3857], Recipient [1:2770:3800]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-05-29T15:22:23.798420Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 272039936, Sender [1:628:2214], Recipient [1:2770:3800]: NKikimr::NNodeBroker::TEvNodeBroker::TEvListNodes { } 2025-05-29T15:22:23.798423Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:246: StateWork, processing event TEvNodeBroker::TEvListNodes 2025-05-29T15:22:23.798428Z node 1 :NODE_BROKER TRACE: node_broker.cpp:423: Send TEvNodesInfo for epoch #3.504 1970-01-01T02:00:00.025000Z - 1970-01-01T03:00:00.025000Z - 1970-01-01T04:00:00.025000Z 2025-05-29T15:22:23.800361Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 269877761, Sender [1:2838:3859], Recipient [1:2770:3800]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-05-29T15:22:23.800381Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 272039936, Sender [1:628:2214], Recipient [1:2770:3800]: NKikimr::NNodeBroker::TEvNodeBroker::TEvListNodes { } 2025-05-29T15:22:23.800387Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:246: StateWork, processing event TEvNodeBroker::TEvListNodes 2025-05-29T15:22:23.800392Z node 1 :NODE_BROKER TRACE: node_broker.cpp:423: Send TEvNodesInfo for epoch #3.504 1970-01-01T02:00:00.025000Z - 1970-01-01T03:00:00.025000Z - 1970-01-01T04:00:00.025000Z 2025-05-29T15:22:23.801284Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 269877761, Sender [1:2840:3861], Recipient [1:2770:3800]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-05-29T15:22:23.801302Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 272039936, Sender [1:628:2214], Recipient [1:2770:3800]: NKikimr::NNodeBroker::TEvNodeBroker::TEvListNodes { CachedVersion: 503 } 2025-05-29T15:22:23.801305Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:246: StateWork, processing event TEvNodeBroker::TEvListNodes 2025-05-29T15:22:23.801309Z node 1 :NODE_BROKER TRACE: node_broker.cpp:423: Send TEvNodesInfo for epoch #3.504 1970-01-01T02:00:00.025000Z - 1970-01-01T03:00:00.025000Z - 1970-01-01T04:00:00.025000Z ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/mind/ut/unittest >> TNodeBrokerTest::NodesV2BackMigration [GOOD] Test command err: 2025-05-29T15:22:22.283935Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 2 Deadline: 18446744073709.551615s } 2025-05-29T15:22:22.283987Z node 3 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 3 Deadline: 18446744073709.551615s } 2025-05-29T15:22:22.284014Z node 4 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 4 Deadline: 18446744073709.551615s } 2025-05-29T15:22:22.284046Z node 5 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 5 Deadline: 18446744073709.551615s } 2025-05-29T15:22:22.284075Z node 6 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 6 Deadline: 18446744073709.551615s } 2025-05-29T15:22:22.284097Z node 7 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 7 Deadline: 18446744073709.551615s } 2025-05-29T15:22:22.289882Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:22.290007Z node 3 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:22.290050Z node 4 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:22.290088Z node 5 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:22.290127Z node 6 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:22.290162Z node 7 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:22.290221Z node 8 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 8 Deadline: 18446744073709.551615s } 2025-05-29T15:22:22.290246Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-05-29T15:22:22.290459Z node 3 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:22.290480Z node 4 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:22.290493Z node 5 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:22.290506Z node 6 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:22.290520Z node 7 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:22.290534Z node 8 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:22.290565Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:22.294131Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:22.294194Z node 8 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:22.294227Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:22.295143Z node 5 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:22.295182Z node 6 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:22.295209Z node 7 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:22.295293Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:22.295310Z node 3 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:22.295327Z node 4 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:22.295348Z node 8 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:22.295365Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:22.295538Z node 3 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-05-29T15:22:22.295561Z node 4 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-05-29T15:22:22.295578Z node 5 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-05-29T15:22:22.295605Z node 6 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-05-29T15:22:22.295624Z node 7 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-05-29T15:22:22.295692Z node 8 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-05-29T15:22:22.295772Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-05-29T15:22:22.295809Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 7 Deadline: 18446744073709.551615s } 2025-05-29T15:22:22.296508Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 5 Deadline: 18446744073709.551615s } 2025-05-29T15:22:22.296529Z node 3 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 6 Deadline: 18446744073709.551615s } 2025-05-29T15:22:22.296537Z node 4 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 6 Deadline: 18446744073709.551615s } 2025-05-29T15:22:22.296544Z node 5 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 4 Deadline: 18446744073709.551615s } 2025-05-29T15:22:22.296550Z node 6 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 2 Deadline: 18446744073709.551615s } 2025-05-29T15:22:22.296557Z node 7 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 2 Deadline: 18446744073709.551615s } 2025-05-29T15:22:22.296564Z node 8 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 7 Deadline: 18446744073709.551615s } 2025-05-29T15:22:22.299810Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 6 Deadline: 18446744073709.551615s } 2025-05-29T15:22:22.299833Z node 4 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 5 Deadline: 18446744073709.551615s } 2025-05-29T15:22:22.299864Z node 6 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 3 Deadline: 18446744073709.551615s } 2025-05-29T15:22:22.299882Z node 7 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 8 Deadline: 18446744073709.551615s } 2025-05-29T15:22:22.299924Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 7 Deadline: 18446744073709.551615s } 2025-05-29T15:22:22.299948Z node 5 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 2 Deadline: 18446744073709.551615s } 2025-05-29T15:22:22.299959Z node 6 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 4 Deadline: 18446744073709.551615s } 2025-05-29T15:22:22.300787Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 3 Deadline: 18446744073709.551615s } 2025-05-29T15:22:22.301010Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 4 Deadline: 18446744073709.551615s } 2025-05-29T15:22:22.301132Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 5 Deadline: 18446744073709.551615s } 2025-05-29T15:22:22.301267Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 6 Deadline: 18446744073709.551615s } 2025-05-29T15:22:22.301505Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 2 Deadline: 18446744073709.551615s } 2025-05-29T15:22:22.301636Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 8 Deadline: 18446744073709.551615s } 2025-05-29T15:22:22.301888Z node 6 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 8 Deadline: 18446744073709.551615s } 2025-05-29T15:22:22.302084Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 3 Deadline: 18446744073709.551615s } 2025-05-29T15:22:22.302221Z node 8 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 6 Deadline: 18446744073709.551615s } 2025-05-29T15:22:22.302267Z node 5 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 3 Deadline: 18446744073709.551615s } 2025-05-29T15:22:22.302359Z node 3 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 2 Deadline: 18446744073709.551615s } 2025-05-29T15:22:22.302582Z node 3 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 5 Deadline: 18446744073709.551615s } 2025-05-29T15:22:22.302652Z node 6 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 5 Deadline: 18446744073709.551615s } 2025-05-29T15:22:22.303024Z node 5 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 6 Deadline: 18446744073709.551615s } 2025-05-29T15:22:22.332561Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7309: Cannot subscribe to console configs 2025-05-29T15:22:22.332590Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded ... waiting for nameservers are connected 2025-05-29T15:22:22.337724Z node 1 :NODE_BROKER DEBUG: node_broker_impl.h:236: StateInit event type: 10060000 event: NKikimr::TEvTablet::TEvBoot 2025-05-29T15:22:22.338187Z node 1 :NODE_BROKER DEBUG: node_broker_impl.h:236: StateInit event type: 10060001 event: NKikimr::TEvTablet::TEvRestored 2025-05-29T15:22:22.338253Z node 1 :NODE_BROKER DEBUG: node_broker__init_scheme.cpp:20: TTxInitScheme Execute 2025-05-29T15:22:22.338470Z node 1 :NODE_BROKER DEBUG: node_broker_impl.h:236: StateInit event type: 1006000c event: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-05-29T15:22:22.339363Z node 1 :NODE_BROKER DEBUG: node_broker__init_scheme.cpp:29: TTxInitScheme Complete 2025-05-29T15:22:22.339394Z node 1 :NODE_BROKER DEBUG: node_broker__load_state.cpp:19: TTxLoadState Execute 2025-05-29T15:22:22.339455Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:961: [DB] Using default config. 2025-05-29T15:22:22.339473Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:998: [DB] Start ... nt# 272039936, Sender [1:632:2214], Recipient [1:864:2364]: NKikimr::NNodeBroker::TEvNodeBroker::TEvListNodes { } 2025-05-29T15:22:24.072859Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:246: StateWork, processing event TEvNodeBroker::TEvListNodes 2025-05-29T15:22:24.072866Z node 1 :NODE_BROKER TRACE: node_broker.cpp:423: Send TEvNodesInfo for epoch #4.11 1970-01-01T03:00:00.025000Z - 1970-01-01T04:00:00.025000Z - 1970-01-01T05:00:00.025000Z 2025-05-29T15:22:24.073342Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 269877761, Sender [1:905:2395], Recipient [1:864:2364]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-05-29T15:22:24.073359Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 272039936, Sender [1:632:2214], Recipient [1:864:2364]: NKikimr::NNodeBroker::TEvNodeBroker::TEvListNodes { } 2025-05-29T15:22:24.073362Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:246: StateWork, processing event TEvNodeBroker::TEvListNodes 2025-05-29T15:22:24.073368Z node 1 :NODE_BROKER TRACE: node_broker.cpp:423: Send TEvNodesInfo for epoch #4.11 1970-01-01T03:00:00.025000Z - 1970-01-01T04:00:00.025000Z - 1970-01-01T05:00:00.025000Z 2025-05-29T15:22:24.073414Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 269877761, Sender [1:907:2397], Recipient [1:864:2364]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-05-29T15:22:24.073424Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 272039936, Sender [1:632:2214], Recipient [1:864:2364]: NKikimr::NNodeBroker::TEvNodeBroker::TEvListNodes { } 2025-05-29T15:22:24.073427Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:246: StateWork, processing event TEvNodeBroker::TEvListNodes 2025-05-29T15:22:24.073430Z node 1 :NODE_BROKER TRACE: node_broker.cpp:423: Send TEvNodesInfo for epoch #4.11 1970-01-01T03:00:00.025000Z - 1970-01-01T04:00:00.025000Z - 1970-01-01T05:00:00.025000Z 2025-05-29T15:22:24.073468Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 269877761, Sender [1:909:2399], Recipient [1:864:2364]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-05-29T15:22:24.073482Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 272039936, Sender [1:632:2214], Recipient [1:864:2364]: NKikimr::NNodeBroker::TEvNodeBroker::TEvListNodes { CachedVersion: 11 } 2025-05-29T15:22:24.073484Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:246: StateWork, processing event TEvNodeBroker::TEvListNodes 2025-05-29T15:22:24.073490Z node 1 :NODE_BROKER TRACE: node_broker.cpp:423: Send TEvNodesInfo for epoch #4.11 1970-01-01T03:00:00.025000Z - 1970-01-01T04:00:00.025000Z - 1970-01-01T05:00:00.025000Z 2025-05-29T15:22:24.073526Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 269877761, Sender [1:911:2401], Recipient [1:864:2364]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-05-29T15:22:24.073536Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 272039936, Sender [1:632:2214], Recipient [1:864:2364]: NKikimr::NNodeBroker::TEvNodeBroker::TEvListNodes { } 2025-05-29T15:22:24.073539Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:246: StateWork, processing event TEvNodeBroker::TEvListNodes 2025-05-29T15:22:24.073542Z node 1 :NODE_BROKER TRACE: node_broker.cpp:423: Send TEvNodesInfo for epoch #4.11 1970-01-01T03:00:00.025000Z - 1970-01-01T04:00:00.025000Z - 1970-01-01T05:00:00.025000Z 2025-05-29T15:22:24.073608Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 269877761, Sender [1:913:2403], Recipient [1:864:2364]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-05-29T15:22:24.073620Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 272039936, Sender [1:632:2214], Recipient [1:864:2364]: NKikimr::NNodeBroker::TEvNodeBroker::TEvListNodes { CachedVersion: 10 } 2025-05-29T15:22:24.073623Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:246: StateWork, processing event TEvNodeBroker::TEvListNodes 2025-05-29T15:22:24.073626Z node 1 :NODE_BROKER TRACE: node_broker.cpp:423: Send TEvNodesInfo for epoch #4.11 1970-01-01T03:00:00.025000Z - 1970-01-01T04:00:00.025000Z - 1970-01-01T05:00:00.025000Z 2025-05-29T15:22:24.073664Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 269877761, Sender [1:915:2405], Recipient [1:864:2364]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-05-29T15:22:24.073674Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 272039936, Sender [1:632:2214], Recipient [1:864:2364]: NKikimr::NNodeBroker::TEvNodeBroker::TEvListNodes { } 2025-05-29T15:22:24.073677Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:246: StateWork, processing event TEvNodeBroker::TEvListNodes 2025-05-29T15:22:24.073680Z node 1 :NODE_BROKER TRACE: node_broker.cpp:423: Send TEvNodesInfo for epoch #4.11 1970-01-01T03:00:00.025000Z - 1970-01-01T04:00:00.025000Z - 1970-01-01T05:00:00.025000Z 2025-05-29T15:22:24.073716Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 269877761, Sender [1:917:2407], Recipient [1:864:2364]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-05-29T15:22:24.073747Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 272039936, Sender [1:632:2214], Recipient [1:864:2364]: NKikimr::NNodeBroker::TEvNodeBroker::TEvListNodes { CachedVersion: 9 } 2025-05-29T15:22:24.073750Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:246: StateWork, processing event TEvNodeBroker::TEvListNodes 2025-05-29T15:22:24.073753Z node 1 :NODE_BROKER TRACE: node_broker.cpp:423: Send TEvNodesInfo for epoch #4.11 1970-01-01T03:00:00.025000Z - 1970-01-01T04:00:00.025000Z - 1970-01-01T05:00:00.025000Z 2025-05-29T15:22:24.073792Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 269877761, Sender [1:919:2409], Recipient [1:864:2364]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-05-29T15:22:24.073805Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 272039950, Sender [1:632:2214], Recipient [1:864:2364]: NKikimr::NNodeBroker::TEvNodeBroker::TEvSubscribeNodesRequest { CachedVersion: 11 SeqNo: 5 } 2025-05-29T15:22:24.073809Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:254: StateWork, processing event TEvNodeBroker::TEvSubscribeNodesRequest 2025-05-29T15:22:24.073813Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:747: New subscriber [1:632:2214], seqNo: 5, version: 11, server pipe id: [1:919:2409] 2025-05-29T15:22:24.073819Z node 1 :NODE_BROKER TRACE: node_broker.cpp:730: Send TEvUpdateNodes v11 -> v11 to [1:632:2214] 2025-05-29T15:22:24.073857Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 269877764, Sender [1:919:2409], Recipient [1:864:2364]: NKikimr::TEvTabletPipe::TEvServerDisconnected 2025-05-29T15:22:24.073860Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:768: Unsubscribed [1:632:2214], seqNo: 5, server pipe id: [1:919:2409] 2025-05-29T15:22:24.073876Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 269877761, Sender [1:921:2411], Recipient [1:864:2364]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-05-29T15:22:24.073887Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 272039950, Sender [1:632:2214], Recipient [1:864:2364]: NKikimr::NNodeBroker::TEvNodeBroker::TEvSubscribeNodesRequest { CachedVersion: 10 SeqNo: 6 } 2025-05-29T15:22:24.073889Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:254: StateWork, processing event TEvNodeBroker::TEvSubscribeNodesRequest 2025-05-29T15:22:24.073892Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:747: New subscriber [1:632:2214], seqNo: 6, version: 10, server pipe id: [1:921:2411] 2025-05-29T15:22:24.073894Z node 1 :NODE_BROKER TRACE: node_broker.cpp:730: Send TEvUpdateNodes v10 -> v11 to [1:632:2214] 2025-05-29T15:22:24.073927Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 269877764, Sender [1:921:2411], Recipient [1:864:2364]: NKikimr::TEvTabletPipe::TEvServerDisconnected 2025-05-29T15:22:24.073930Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:768: Unsubscribed [1:632:2214], seqNo: 6, server pipe id: [1:921:2411] 2025-05-29T15:22:24.073946Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 269877761, Sender [1:923:2413], Recipient [1:864:2364]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-05-29T15:22:24.073956Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 272039950, Sender [1:632:2214], Recipient [1:864:2364]: NKikimr::NNodeBroker::TEvNodeBroker::TEvSubscribeNodesRequest { CachedVersion: 9 SeqNo: 7 } 2025-05-29T15:22:24.073959Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:254: StateWork, processing event TEvNodeBroker::TEvSubscribeNodesRequest 2025-05-29T15:22:24.073961Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:747: New subscriber [1:632:2214], seqNo: 7, version: 9, server pipe id: [1:923:2413] 2025-05-29T15:22:24.073964Z node 1 :NODE_BROKER TRACE: node_broker.cpp:730: Send TEvUpdateNodes v9 -> v11 to [1:632:2214] 2025-05-29T15:22:24.074008Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 269877764, Sender [1:923:2413], Recipient [1:864:2364]: NKikimr::TEvTabletPipe::TEvServerDisconnected 2025-05-29T15:22:24.074012Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:768: Unsubscribed [1:632:2214], seqNo: 7, server pipe id: [1:923:2413] 2025-05-29T15:22:24.074028Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 269877761, Sender [1:925:2415], Recipient [1:864:2364]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-05-29T15:22:24.074041Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 272039937, Sender [1:632:2214], Recipient [1:864:2364]: NKikimr::NNodeBroker::TEvNodeBroker::TEvResolveNode { NodeId: 1024 } 2025-05-29T15:22:24.074043Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:247: StateWork, processing event TEvNodeBroker::TEvResolveNode 2025-05-29T15:22:24.074062Z node 1 :NODE_BROKER TRACE: node_broker.cpp:1478: Send TEvResolvedNode: NKikimr::NNodeBroker::TEvNodeBroker::TEvResolvedNode { Status { Code: OK } Node { NodeId: 1024 Host: "host1" Port: 1001 ResolveHost: "host1.yandex.net" Address: "1.2.3.4" Location { DataCenter: "1" Module: "2" Rack: "3" Unit: "4" } Expire: 18000025000 Name: "slot-0" } } 2025-05-29T15:22:24.074110Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 269877761, Sender [1:927:2417], Recipient [1:864:2364]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-05-29T15:22:24.074127Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 272039937, Sender [1:632:2214], Recipient [1:864:2364]: NKikimr::NNodeBroker::TEvNodeBroker::TEvResolveNode { NodeId: 1025 } 2025-05-29T15:22:24.074132Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:247: StateWork, processing event TEvNodeBroker::TEvResolveNode 2025-05-29T15:22:24.074140Z node 1 :NODE_BROKER TRACE: node_broker.cpp:1478: Send TEvResolvedNode: NKikimr::NNodeBroker::TEvNodeBroker::TEvResolvedNode { Status { Code: WRONG_REQUEST Reason: "Unknown node" } } 2025-05-29T15:22:24.074187Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 269877761, Sender [1:929:2419], Recipient [1:864:2364]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-05-29T15:22:24.074198Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 272039937, Sender [1:632:2214], Recipient [1:864:2364]: NKikimr::NNodeBroker::TEvNodeBroker::TEvResolveNode { NodeId: 1026 } 2025-05-29T15:22:24.074201Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:247: StateWork, processing event TEvNodeBroker::TEvResolveNode 2025-05-29T15:22:24.074206Z node 1 :NODE_BROKER TRACE: node_broker.cpp:1478: Send TEvResolvedNode: NKikimr::NNodeBroker::TEvNodeBroker::TEvResolvedNode { Status { Code: WRONG_REQUEST Reason: "Unknown node" } } |60.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_sysview_reboots/unittest |60.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_sysview_reboots/unittest |60.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_sysview_reboots/unittest |60.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_sysview_reboots/unittest |60.1%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/schemeshard/ut_transfer/ydb-core-tx-schemeshard-ut_transfer |60.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_transfer/ydb-core-tx-schemeshard-ut_transfer |60.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_sysview_reboots/unittest ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/mind/ut/unittest >> TNodeBrokerTest::NodesMigration2000Nodes [GOOD] Test command err: 2025-05-29T15:22:19.823950Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 2 Deadline: 18446744073709.551615s } 2025-05-29T15:22:19.823992Z node 3 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 3 Deadline: 18446744073709.551615s } 2025-05-29T15:22:19.824014Z node 4 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 4 Deadline: 18446744073709.551615s } 2025-05-29T15:22:19.824037Z node 5 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 5 Deadline: 18446744073709.551615s } 2025-05-29T15:22:19.824060Z node 6 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 6 Deadline: 18446744073709.551615s } 2025-05-29T15:22:19.824076Z node 7 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 7 Deadline: 18446744073709.551615s } 2025-05-29T15:22:19.829142Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:19.829233Z node 3 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:19.829267Z node 4 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:19.829296Z node 5 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:19.829324Z node 6 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:19.829351Z node 7 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:19.829400Z node 8 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 8 Deadline: 18446744073709.551615s } 2025-05-29T15:22:19.829418Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-05-29T15:22:19.829629Z node 3 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:19.829650Z node 4 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:19.829668Z node 5 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:19.829685Z node 6 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:19.829705Z node 7 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:19.829726Z node 8 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:19.829768Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:19.833796Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:19.833840Z node 8 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:19.833864Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:19.834590Z node 5 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:19.834620Z node 6 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:19.834641Z node 7 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:19.834706Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:19.834720Z node 3 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:19.834759Z node 4 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:19.834782Z node 5 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-05-29T15:22:19.834816Z node 6 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-05-29T15:22:19.834833Z node 7 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-05-29T15:22:19.834847Z node 8 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:19.834864Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:19.834998Z node 3 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-05-29T15:22:19.835013Z node 4 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-05-29T15:22:19.835075Z node 8 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-05-29T15:22:19.835138Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-05-29T15:22:19.835172Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 7 Deadline: 18446744073709.551615s } 2025-05-29T15:22:19.835627Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 8 Deadline: 18446744073709.551615s } 2025-05-29T15:22:19.835643Z node 3 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 2 Deadline: 18446744073709.551615s } 2025-05-29T15:22:19.835652Z node 6 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 7 Deadline: 18446744073709.551615s } 2025-05-29T15:22:19.835658Z node 7 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 4 Deadline: 18446744073709.551615s } 2025-05-29T15:22:19.835664Z node 8 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 6 Deadline: 18446744073709.551615s } 2025-05-29T15:22:19.838343Z node 7 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 6 Deadline: 18446744073709.551615s } 2025-05-29T15:22:19.838454Z node 6 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 8 Deadline: 18446744073709.551615s } 2025-05-29T15:22:19.838464Z node 8 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 2 Deadline: 18446744073709.551615s } 2025-05-29T15:22:19.838489Z node 4 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 7 Deadline: 18446744073709.551615s } 2025-05-29T15:22:19.838983Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 3 Deadline: 18446744073709.551615s } 2025-05-29T15:22:19.839257Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 6 Deadline: 18446744073709.551615s } 2025-05-29T15:22:19.839497Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 5 Deadline: 18446744073709.551615s } 2025-05-29T15:22:19.839609Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 2 Deadline: 18446744073709.551615s } 2025-05-29T15:22:19.839667Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 4 Deadline: 18446744073709.551615s } 2025-05-29T15:22:19.839771Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 3 Deadline: 18446744073709.551615s } 2025-05-29T15:22:19.839921Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 8 Deadline: 18446744073709.551615s } 2025-05-29T15:22:19.840686Z node 4 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 3 Deadline: 18446744073709.551615s } 2025-05-29T15:22:19.840773Z node 7 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 5 Deadline: 18446744073709.551615s } 2025-05-29T15:22:19.840877Z node 3 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 4 Deadline: 18446744073709.551615s } 2025-05-29T15:22:19.840939Z node 5 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 7 Deadline: 18446744073709.551615s } 2025-05-29T15:22:19.862038Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7309: Cannot subscribe to console configs 2025-05-29T15:22:19.862057Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded ... waiting for nameservers are connected 2025-05-29T15:22:19.868400Z node 1 :NODE_BROKER DEBUG: node_broker_impl.h:236: StateInit event type: 10060000 event: NKikimr::TEvTablet::TEvBoot 2025-05-29T15:22:19.868920Z node 1 :NODE_BROKER DEBUG: node_broker_impl.h:236: StateInit event type: 10060001 event: NKikimr::TEvTablet::TEvRestored 2025-05-29T15:22:19.868998Z node 1 :NODE_BROKER DEBUG: node_broker__init_scheme.cpp:20: TTxInitScheme Execute 2025-05-29T15:22:19.869248Z node 1 :NODE_BROKER DEBUG: node_broker_impl.h:236: StateInit event type: 1006000c event: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-05-29T15:22:19.870060Z node 1 :NODE_BROKER DEBUG: node_broker__init_scheme.cpp:29: TTxInitScheme Complete 2025-05-29T15:22:19.870083Z node 1 :NODE_BROKER DEBUG: node_broker__load_state.cpp:19: TTxLoadState Execute 2025-05-29T15:22:19.870142Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:961: [DB] Using default config. 2025-05-29T15:22:19.870155Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:998: [DB] Starting the first epoch: #1.1 1970-01-01T00:00:00.025000Z - 1970-01-01T01:00:00.025000Z - 1970-01-01T02:00:00.025000Z 2025-05-29T15:22:19.870159Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:1024: [DB] Loaded the first approximate epoch start: #1.1 2025-05-29T15:22:19.870170Z node 1 :NODE_BROKER DEBUG: node_broker__load_state.cpp:27: TTxLoadState Complete 2025-05-29T15:22:19.870187Z node 1 :NODE_BROKER DEBUG: node_broker__migrate_state.cpp:84: TTxMigrateState Execute 2025-05-29T15:22:19.870190Z node 1 :NODE_BROKER DEBUG: node_broker__migrate_state.cpp:52: TTxMigrateState ProcessMigrationBatch UpdateNodes left 0, NewVersionUpdateNodes left 0 2025-05-29T15:22:19.870194Z node 1 :NODE_BROKER DEBUG: node_broker__migrate_state.cpp:21: TTxMigrateState FinalizeMigration 2025-05-29T15:22:19.870198Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:1311: [DB] Update epoch in database: #1.1 1970-01-01T00:00:00.025000Z - 1970-01-01T01:00:00.025000Z - 1970-01-01T02:00:00.025000Z 2025-05-29T15:22:19.870212Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:1330: [DB] Update approx epoch start in database: #1.1 2025-05-29T15:22:19.870217Z node 1 :NODE_BROKER NOTICE: node_broker.cpp:1343: [DB] Update main nodes table to: Nodes 2025-05-29T15:22:19.901976Z node 1 :NODE_BROKER DEBUG: node_broker__migrate_state.cpp:95: TTxMigrateState Complete 2025-05-29T15:22:19.902012Z node 1 :NODE_BROKER TRACE: node_broke ... 2:24.145669Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:659: Add node #2067.v1004 to update nodes log 2025-05-29T15:22:24.145675Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:659: Add node #2066.v1004 to update nodes log 2025-05-29T15:22:24.145681Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:659: Add node #2065.v1004 to update nodes log 2025-05-29T15:22:24.145688Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:659: Add node #2064.v1004 to update nodes log 2025-05-29T15:22:24.145694Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:659: Add node #2063.v1004 to update nodes log 2025-05-29T15:22:24.145701Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:659: Add node #2062.v1004 to update nodes log 2025-05-29T15:22:24.145707Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:659: Add node #2061.v1004 to update nodes log 2025-05-29T15:22:24.145714Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:659: Add node #2060.v1004 to update nodes log 2025-05-29T15:22:24.145721Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:659: Add node #2059.v1004 to update nodes log 2025-05-29T15:22:24.145727Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:659: Add node #2058.v1004 to update nodes log 2025-05-29T15:22:24.145733Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:659: Add node #2057.v1004 to update nodes log 2025-05-29T15:22:24.145740Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:659: Add node #2056.v1004 to update nodes log 2025-05-29T15:22:24.145746Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:659: Add node #2055.v1004 to update nodes log 2025-05-29T15:22:24.145753Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:659: Add node #2054.v1004 to update nodes log 2025-05-29T15:22:24.145760Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:659: Add node #2053.v1004 to update nodes log 2025-05-29T15:22:24.145768Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:659: Add node #2052.v1004 to update nodes log 2025-05-29T15:22:24.145775Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:659: Add node #2051.v1004 to update nodes log 2025-05-29T15:22:24.145782Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:659: Add node #2050.v1004 to update nodes log 2025-05-29T15:22:24.145790Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:659: Add node #2049.v1004 to update nodes log 2025-05-29T15:22:24.145798Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:659: Add node #2048.v1004 to update nodes log 2025-05-29T15:22:24.145805Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:659: Add node #2047.v1004 to update nodes log 2025-05-29T15:22:24.145813Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:659: Add node #2046.v1004 to update nodes log 2025-05-29T15:22:24.145820Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:659: Add node #2045.v1004 to update nodes log 2025-05-29T15:22:24.145828Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:659: Add node #2044.v1004 to update nodes log 2025-05-29T15:22:24.145836Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:659: Add node #2043.v1004 to update nodes log 2025-05-29T15:22:24.145843Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:659: Add node #2042.v1004 to update nodes log 2025-05-29T15:22:24.145866Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:659: Add node #2041.v1004 to update nodes log 2025-05-29T15:22:24.145874Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:659: Add node #2040.v1004 to update nodes log 2025-05-29T15:22:24.145881Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:659: Add node #2039.v1004 to update nodes log 2025-05-29T15:22:24.145889Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:659: Add node #2038.v1004 to update nodes log 2025-05-29T15:22:24.145896Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:659: Add node #2037.v1004 to update nodes log 2025-05-29T15:22:24.145903Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:659: Add node #2036.v1004 to update nodes log 2025-05-29T15:22:24.145909Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:659: Add node #2035.v1004 to update nodes log 2025-05-29T15:22:24.145915Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:659: Add node #2034.v1004 to update nodes log 2025-05-29T15:22:24.145924Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:659: Add node #2033.v1004 to update nodes log 2025-05-29T15:22:24.145931Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:659: Add node #2032.v1004 to update nodes log 2025-05-29T15:22:24.145938Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:659: Add node #2031.v1004 to update nodes log 2025-05-29T15:22:24.145945Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:659: Add node #2030.v1004 to update nodes log 2025-05-29T15:22:24.145951Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:659: Add node #2029.v1004 to update nodes log 2025-05-29T15:22:24.145958Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:659: Add node #2028.v1004 to update nodes log 2025-05-29T15:22:24.145965Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:659: Add node #2027.v1004 to update nodes log 2025-05-29T15:22:24.145971Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:659: Add node #2026.v1004 to update nodes log 2025-05-29T15:22:24.145978Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:659: Add node #3023.v1004 to update nodes log 2025-05-29T15:22:24.145985Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:659: Add node #2523.v1004 to update nodes log 2025-05-29T15:22:24.146269Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 269877761, Sender [1:4844:5371], Recipient [1:4833:5365]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-05-29T15:22:24.146359Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 269877761, Sender [1:4845:5372], Recipient [1:4833:5365]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-05-29T15:22:24.146414Z node 5 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:657: Handle NKikimr::TEvTabletPipe::TEvClientConnected { TabletId: 72057594037936129 Status: OK ServerId: [1:4844:5371] Leader: 1 Dead: 0 Generation: 3 VersionInfo:  } 2025-05-29T15:22:24.146438Z node 7 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:657: Handle NKikimr::TEvTabletPipe::TEvClientConnected { TabletId: 72057594037936129 Status: OK ServerId: [1:4846:5373] Leader: 1 Dead: 0 Generation: 3 VersionInfo:  } 2025-05-29T15:22:24.146450Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 269877761, Sender [1:4846:5373], Recipient [1:4833:5365]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-05-29T15:22:24.146458Z node 8 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:657: Handle NKikimr::TEvTabletPipe::TEvClientConnected { TabletId: 72057594037936129 Status: OK ServerId: [1:4848:5375] Leader: 1 Dead: 0 Generation: 3 VersionInfo:  } 2025-05-29T15:22:24.146484Z node 6 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:657: Handle NKikimr::TEvTabletPipe::TEvClientConnected { TabletId: 72057594037936129 Status: OK ServerId: [1:4845:5372] Leader: 1 Dead: 0 Generation: 3 VersionInfo:  } 2025-05-29T15:22:24.146499Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 269877761, Sender [1:4848:5375], Recipient [1:4833:5365]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-05-29T15:22:24.146636Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 272039936, Sender [5:137:2072], Recipient [1:4844:5371] 2025-05-29T15:22:24.146642Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:246: StateWork, processing event TEvNodeBroker::TEvListNodes 2025-05-29T15:22:24.146657Z node 1 :NODE_BROKER TRACE: node_broker.cpp:423: Send TEvNodesInfo for epoch #3.1004 1970-01-01T02:00:00.025000Z - 1970-01-01T03:00:00.025000Z - 1970-01-01T04:00:00.025000Z 2025-05-29T15:22:24.146679Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 272039936, Sender [7:193:2072], Recipient [1:4846:5373] 2025-05-29T15:22:24.146684Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:246: StateWork, processing event TEvNodeBroker::TEvListNodes 2025-05-29T15:22:24.146691Z node 1 :NODE_BROKER TRACE: node_broker.cpp:423: Send TEvNodesInfo for epoch #3.1004 1970-01-01T02:00:00.025000Z - 1970-01-01T03:00:00.025000Z - 1970-01-01T04:00:00.025000Z 2025-05-29T15:22:24.152484Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 272039936, Sender [8:221:2072], Recipient [1:4848:5375] 2025-05-29T15:22:24.152512Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:246: StateWork, processing event TEvNodeBroker::TEvListNodes 2025-05-29T15:22:24.152537Z node 1 :NODE_BROKER TRACE: node_broker.cpp:423: Send TEvNodesInfo for epoch #3.1004 1970-01-01T02:00:00.025000Z - 1970-01-01T03:00:00.025000Z - 1970-01-01T04:00:00.025000Z 2025-05-29T15:22:24.158211Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 272039936, Sender [6:165:2072], Recipient [1:4845:5372] 2025-05-29T15:22:24.158239Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:246: StateWork, processing event TEvNodeBroker::TEvListNodes 2025-05-29T15:22:24.158258Z node 1 :NODE_BROKER TRACE: node_broker.cpp:423: Send TEvNodesInfo for epoch #3.1004 1970-01-01T02:00:00.025000Z - 1970-01-01T03:00:00.025000Z - 1970-01-01T04:00:00.025000Z 2025-05-29T15:22:24.170161Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 269877761, Sender [1:4903:5425], Recipient [1:4833:5365]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-05-29T15:22:24.170219Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 272039936, Sender [1:624:2214], Recipient [1:4833:5365]: NKikimr::NNodeBroker::TEvNodeBroker::TEvListNodes { } 2025-05-29T15:22:24.170226Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:246: StateWork, processing event TEvNodeBroker::TEvListNodes 2025-05-29T15:22:24.170239Z node 1 :NODE_BROKER TRACE: node_broker.cpp:423: Send TEvNodesInfo for epoch #3.1004 1970-01-01T02:00:00.025000Z - 1970-01-01T03:00:00.025000Z - 1970-01-01T04:00:00.025000Z 2025-05-29T15:22:24.176280Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 269877761, Sender [1:4905:5427], Recipient [1:4833:5365]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-05-29T15:22:24.176348Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 272039936, Sender [1:624:2214], Recipient [1:4833:5365]: NKikimr::NNodeBroker::TEvNodeBroker::TEvListNodes { } 2025-05-29T15:22:24.176356Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:246: StateWork, processing event TEvNodeBroker::TEvListNodes 2025-05-29T15:22:24.176372Z node 1 :NODE_BROKER TRACE: node_broker.cpp:423: Send TEvNodesInfo for epoch #3.1004 1970-01-01T02:00:00.025000Z - 1970-01-01T03:00:00.025000Z - 1970-01-01T04:00:00.025000Z 2025-05-29T15:22:24.185771Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 269877761, Sender [1:4907:5429], Recipient [1:4833:5365]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-05-29T15:22:24.185835Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 272039936, Sender [1:624:2214], Recipient [1:4833:5365]: NKikimr::NNodeBroker::TEvNodeBroker::TEvListNodes { } 2025-05-29T15:22:24.185842Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:246: StateWork, processing event TEvNodeBroker::TEvListNodes 2025-05-29T15:22:24.185855Z node 1 :NODE_BROKER TRACE: node_broker.cpp:423: Send TEvNodesInfo for epoch #3.1004 1970-01-01T02:00:00.025000Z - 1970-01-01T03:00:00.025000Z - 1970-01-01T04:00:00.025000Z 2025-05-29T15:22:24.191704Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 269877761, Sender [1:4909:5431], Recipient [1:4833:5365]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-05-29T15:22:24.191778Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 272039936, Sender [1:624:2214], Recipient [1:4833:5365]: NKikimr::NNodeBroker::TEvNodeBroker::TEvListNodes { CachedVersion: 1003 } 2025-05-29T15:22:24.191785Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:246: StateWork, processing event TEvNodeBroker::TEvListNodes 2025-05-29T15:22:24.191803Z node 1 :NODE_BROKER TRACE: node_broker.cpp:423: Send TEvNodesInfo for epoch #3.1004 1970-01-01T02:00:00.025000Z - 1970-01-01T03:00:00.025000Z - 1970-01-01T04:00:00.025000Z ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/mind/ut/unittest >> TNodeBrokerTest::EpochCacheUpdate [GOOD] Test command err: 2025-05-29T15:22:20.194439Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 2 Deadline: 18446744073709.551615s } 2025-05-29T15:22:20.194487Z node 3 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 3 Deadline: 18446744073709.551615s } 2025-05-29T15:22:20.194511Z node 4 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 4 Deadline: 18446744073709.551615s } 2025-05-29T15:22:20.194541Z node 5 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 5 Deadline: 18446744073709.551615s } 2025-05-29T15:22:20.194566Z node 6 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 6 Deadline: 18446744073709.551615s } 2025-05-29T15:22:20.194586Z node 7 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 7 Deadline: 18446744073709.551615s } 2025-05-29T15:22:20.200755Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:20.200887Z node 3 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:20.200931Z node 4 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:20.200965Z node 5 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:20.201005Z node 6 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:20.201035Z node 7 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:20.201095Z node 8 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 8 Deadline: 18446744073709.551615s } 2025-05-29T15:22:20.201118Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-05-29T15:22:20.201348Z node 3 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:20.201367Z node 4 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:20.201382Z node 5 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:20.201394Z node 6 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:20.201409Z node 7 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:20.201422Z node 8 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:20.201455Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:20.205576Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:20.205635Z node 8 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:20.205666Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:20.206482Z node 5 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:20.206518Z node 6 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:20.206544Z node 7 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:20.206620Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:20.206636Z node 3 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:20.206655Z node 4 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:20.206681Z node 8 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:20.206698Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:20.206886Z node 3 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-05-29T15:22:20.206911Z node 4 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-05-29T15:22:20.206928Z node 5 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-05-29T15:22:20.206954Z node 6 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-05-29T15:22:20.206974Z node 7 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-05-29T15:22:20.207062Z node 8 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-05-29T15:22:20.207144Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-05-29T15:22:20.207181Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 4 Deadline: 18446744073709.551615s } 2025-05-29T15:22:20.207828Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 6 Deadline: 18446744073709.551615s } 2025-05-29T15:22:20.207850Z node 3 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 7 Deadline: 18446744073709.551615s } 2025-05-29T15:22:20.207858Z node 4 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 2 Deadline: 18446744073709.551615s } 2025-05-29T15:22:20.207865Z node 5 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 3 Deadline: 18446744073709.551615s } 2025-05-29T15:22:20.207871Z node 6 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 8 Deadline: 18446744073709.551615s } 2025-05-29T15:22:20.207878Z node 7 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 2 Deadline: 18446744073709.551615s } 2025-05-29T15:22:20.207886Z node 8 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 6 Deadline: 18446744073709.551615s } 2025-05-29T15:22:20.211253Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 7 Deadline: 18446744073709.551615s } 2025-05-29T15:22:20.211347Z node 3 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 5 Deadline: 18446744073709.551615s } 2025-05-29T15:22:20.211397Z node 7 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 3 Deadline: 18446744073709.551615s } 2025-05-29T15:22:20.211412Z node 8 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 3 Deadline: 18446744073709.551615s } 2025-05-29T15:22:20.211518Z node 6 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 2 Deadline: 18446744073709.551615s } 2025-05-29T15:22:20.212311Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 4 Deadline: 18446744073709.551615s } 2025-05-29T15:22:20.212390Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 3 Deadline: 18446744073709.551615s } 2025-05-29T15:22:20.212686Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 5 Deadline: 18446744073709.551615s } 2025-05-29T15:22:20.212840Z node 3 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 8 Deadline: 18446744073709.551615s } 2025-05-29T15:22:20.212884Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 6 Deadline: 18446744073709.551615s } 2025-05-29T15:22:20.212965Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 7 Deadline: 18446744073709.551615s } 2025-05-29T15:22:20.213115Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 2 Deadline: 18446744073709.551615s } 2025-05-29T15:22:20.213367Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 8 Deadline: 18446744073709.551615s } 2025-05-29T15:22:20.214075Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 3 Deadline: 18446744073709.551615s } 2025-05-29T15:22:20.214135Z node 8 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 2 Deadline: 18446744073709.551615s } 2025-05-29T15:22:20.214171Z node 6 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 3 Deadline: 18446744073709.551615s } 2025-05-29T15:22:20.214295Z node 3 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 2 Deadline: 18446744073709.551615s } 2025-05-29T15:22:20.214323Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 8 Deadline: 18446744073709.551615s } 2025-05-29T15:22:20.214367Z node 3 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 6 Deadline: 18446744073709.551615s } 2025-05-29T15:22:20.215818Z node 7 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 4 Deadline: 18446744073709.551615s } 2025-05-29T15:22:20.216163Z node 4 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 7 Deadline: 18446744073709.551615s } 2025-05-29T15:22:20.217171Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 5 Deadline: 18446744073709.551615s } 2025-05-29T15:22:20.217452Z node 5 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 2 Deadline: 18446744073709.551615s } 2025-05-29T15:22:20.238782Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7309: Cannot subscribe to console configs 2025-05-29T15:22:20.238806Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded ... waiting for nameservers are connected 2025-05-29T15:22:20.242963Z node 1 :NODE_BROKER DEBUG: node_broker_impl.h:236: StateInit event type: 10060000 event: NKikimr::TEvTablet::TEvBoot 2025-05-29T15:22:20.243324Z node 1 :NODE_BROKER DEBUG: node_broker_impl.h:236: StateInit event type: 10060001 event: NKikimr::TEvTablet::TEvRestored 2025-05-29T15:22:20.243373Z node 1 :NODE_BROKER DEBUG: node_broker__init_scheme.cpp:20: TTxInitScheme Execute 2025-05-29T15:22:20.243543Z node 1 :NODE_BROKER DEBUG: node_broker_impl.h:236: StateInit event type: 1006000c event: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-05-29T15:22:20.244186Z node 1 :NODE_BROKER DEBUG: node_ ... 53Z node 9 :NODE_BROKER DEBUG: node_broker.cpp:314: [Dirty] Extended lease of #1024.v6 host1:1001 up to Thu, 01 Jan 1970 04:00:00 UTC (lease 3) 2025-05-29T15:22:24.256370Z node 9 :NODE_BROKER DEBUG: node_broker.cpp:856: [DB] Adding node #1024.v6 host1:1001 to database state=Active resolvehost=host1.yandex.net address=1.2.3.4 dc=1 location=DC=1/M=2/R=3/U=4/ lease=3 expire=Thu, 01 Jan 1970 04:00:00 UTC servicedsubdomain=72057594046678944:1 slotindex=0 authorizedbycertificate=false 2025-05-29T15:22:24.256428Z node 9 :NODE_BROKER DEBUG: node_broker.cpp:552: [Dirty] Update current epoch version from 5 to 6 2025-05-29T15:22:24.256434Z node 9 :NODE_BROKER DEBUG: node_broker.cpp:1356: [DB] Update epoch version in database version=6 2025-05-29T15:22:24.267198Z node 9 :NODE_BROKER DEBUG: node_broker__extend_lease.cpp:78: TTxExtendLease Complete 2025-05-29T15:22:24.267259Z node 9 :NODE_BROKER TRACE: node_broker__extend_lease.cpp:82: TTxExtendLease reply with: NKikimr::NNodeBroker::TEvNodeBroker::TEvExtendLeaseResponse { Status { Code: OK } NodeId: 1024 Expire: 14400024000 Epoch { Id: 3 Version: 6 Start: 7200024000 End: 10800024000 NextEnd: 14400024000 } } 2025-05-29T15:22:24.267276Z node 9 :NODE_BROKER DEBUG: node_broker.cpp:314: [Committed] Extended lease of #1024.v6 host1:1001 up to Thu, 01 Jan 1970 04:00:00 UTC (lease 3) 2025-05-29T15:22:24.267280Z node 9 :NODE_BROKER DEBUG: node_broker.cpp:552: [Committed] Update current epoch version from 5 to 6 2025-05-29T15:22:24.267285Z node 9 :NODE_BROKER DEBUG: node_broker.cpp:630: Add node #1024.v6 host1:1001 to epoch cache 2025-05-29T15:22:24.267304Z node 9 :NODE_BROKER DEBUG: node_broker.cpp:659: Add node #1024.v6 to update nodes log 2025-05-29T15:22:24.267414Z node 9 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 269877761, Sender [9:699:2260], Recipient [9:555:2183]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-05-29T15:22:24.267444Z node 9 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 272039938, Sender [9:623:2213], Recipient [9:555:2183]: NKikimr::NNodeBroker::TEvNodeBroker::TEvRegistrationRequest { Host: "host2" Port: 1001 ResolveHost: "host2.yandex.net" Address: "1.2.3.4" Location { DataCenter: "1" Module: "2" Rack: "3" Unit: "4" } FixedNodeId: false Path: "dc-1" } 2025-05-29T15:22:24.267449Z node 9 :NODE_BROKER TRACE: node_broker_impl.h:248: StateWork, processing event TEvNodeBroker::TEvRegistrationRequest 2025-05-29T15:22:24.267456Z node 9 :NODE_BROKER TRACE: node_broker.cpp:1487: Handle TEvNodeBroker::TEvRegistrationRequest: request# Host: "host2" Port: 1001 ResolveHost: "host2.yandex.net" Address: "1.2.3.4" Location { DataCenter: "1" Module: "2" Rack: "3" Unit: "4" } FixedNodeId: false Path: "dc-1" 2025-05-29T15:22:24.267488Z node 9 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2702: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [9:23:2070], request# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-05-29T15:22:24.267514Z node 9 :TX_PROXY_SCHEME_CACHE TRACE: cache.cpp:2322: Create subscriber: self# [9:23:2070], path# /dc-1, domainOwnerId# 72057594046678944 2025-05-29T15:22:24.267735Z node 9 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2589: HandleNotify: self# [9:23:2070], notify# NKikimr::TSchemeBoardEvents::TEvNotifyUpdate { Path: /dc-1 PathId: [OwnerId: 72057594046678944, LocalPathId: 1] DescribeSchemeResult: Status: StatusSuccess Path: "/dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/dc-1" } } } PathId: 1 PathOwnerId: 72057594046678944 } 2025-05-29T15:22:24.267767Z node 9 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2464: ResolveCacheItem: self# [9:23:2070], notify# NKikimr::TSchemeBoardEvents::TEvNotifyUpdate { Path: /dc-1 PathId: [OwnerId: 72057594046678944, LocalPathId: 1] DescribeSchemeResult: Status: StatusSuccess Path: "/dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/dc-1" } } } PathId: 1 PathOwnerId: 72057594046678944 }, by path# { Subscriber: { Subscriber: [9:701:2261] DomainOwnerId: 72057594046678944 Type: 2 SyncCookie: 0 } Filled: 0 Status: undefined Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, by pathId# nullptr 2025-05-29T15:22:24.267794Z node 9 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:1842: FillEntry for TNavigate: self# [9:23:2070], cacheItem# { Subscriber: { Subscriber: [9:701:2261] DomainOwnerId: 72057594046678944 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusSuccess Kind: 2 TableKind: 0 Created: 1 CreateStep: 0 PathId: [OwnerId: 72057594046678944, LocalPathId: 1] DomainId: [OwnerId: 72057594046678944, LocalPathId: 1] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-05-29T15:22:24.267830Z node 9 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:265: Send result: self# [9:708:2262], recipient# [9:700:2183], result# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [72057594046678944:1:0] RequestType: ByPath Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046678944, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046678944, LocalPathId: 1] Params { Version: 0 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } }] } 2025-05-29T15:22:24.267841Z node 9 :NODE_BROKER TRACE: node_broker.cpp:1532: Handle TEvTxProxySchemeCache::TEvNavigateKeySetResult: response# { Path: dc-1 TableId: [72057594046678944:1:0] RequestType: ByPath Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046678944, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046678944, LocalPathId: 1] Params { Version: 0 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } } 2025-05-29T15:22:24.267850Z node 9 :NODE_BROKER TRACE: node_broker.cpp:1558: Finished resolving tenant: request# Host: "host2" Port: 1001 ResolveHost: "host2.yandex.net" Address: "1.2.3.4" Location { DataCenter: "1" Module: "2" Rack: "3" Unit: "4" } FixedNodeId: false Path: "dc-1": scope id# <72057594046678944:1>: serviced subdomain# 72057594046678944:1 2025-05-29T15:22:24.267860Z node 9 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 2146435073, Sender [9:700:2183], Recipient [9:555:2183]: NKikimr::NNodeBroker::TNodeBroker::TEvPrivate::TEvResolvedRegistrationRequest 2025-05-29T15:22:24.267863Z node 9 :NODE_BROKER TRACE: node_broker_impl.h:257: StateWork, processing event TEvPrivate::TEvResolvedRegistrationRequest 2025-05-29T15:22:24.267876Z node 9 :NODE_BROKER DEBUG: node_broker__register_node.cpp:77: TTxRegisterNode Execute 2025-05-29T15:22:24.267878Z node 9 :NODE_BROKER DEBUG: node_broker__register_node.cpp:81: Registration request from host2:1001 (not fixed) tenant: dc-1 2025-05-29T15:22:24.267894Z node 9 :NODE_BROKER DEBUG: node_broker.cpp:856: [DB] Adding node #1025.v7 host2:1001 to database state=Active resolvehost=host2.yandex.net address=1.2.3.4 dc=1 location=DC=1/M=2/R=3/U=4/ lease=1 expire=Thu, 01 Jan 1970 04:00:00 UTC servicedsubdomain=72057594046678944:1 slotindex=1 authorizedbycertificate=false 2025-05-29T15:22:24.267930Z node 9 :NODE_BROKER DEBUG: node_broker.cpp:264: [Dirty] Register new active node #1025.v7 host2:1001 2025-05-29T15:22:24.267935Z node 9 :NODE_BROKER DEBUG: node_broker.cpp:552: [Dirty] Update current epoch version from 6 to 7 2025-05-29T15:22:24.267938Z node 9 :NODE_BROKER DEBUG: node_broker.cpp:1356: [DB] Update epoch version in database version=7 2025-05-29T15:22:24.278900Z node 9 :NODE_BROKER DEBUG: node_broker__register_node.cpp:186: TTxRegisterNode Complete 2025-05-29T15:22:24.278924Z node 9 :NODE_BROKER DEBUG: node_broker.cpp:264: [Committed] Register new active node #1025.v7 host2:1001 2025-05-29T15:22:24.278933Z node 9 :NODE_BROKER DEBUG: node_broker.cpp:552: [Committed] Update current epoch version from 6 to 7 2025-05-29T15:22:24.278937Z node 9 :NODE_BROKER DEBUG: node_broker.cpp:630: Add node #1025.v7 host2:1001 to epoch cache 2025-05-29T15:22:24.278960Z node 9 :NODE_BROKER DEBUG: node_broker.cpp:659: Add node #1025.v7 to update nodes log 2025-05-29T15:22:24.278998Z node 9 :NODE_BROKER TRACE: node_broker__register_node.cpp:58: TTxRegisterNode reply with: Status { Code: OK } Node { NodeId: 1025 Host: "host2" Port: 1001 ResolveHost: "host2.yandex.net" Address: "1.2.3.4" Location { DataCenter: "1" Module: "2" Rack: "3" Unit: "4" } Expire: 14400024000 Name: "slot-1" } 2025-05-29T15:22:24.279109Z node 9 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 269877761, Sender [9:712:2266], Recipient [9:555:2183]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-05-29T15:22:24.279132Z node 9 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 272039936, Sender [9:623:2213], Recipient [9:555:2183]: NKikimr::NNodeBroker::TEvNodeBroker::TEvListNodes { } 2025-05-29T15:22:24.279136Z node 9 :NODE_BROKER TRACE: node_broker_impl.h:246: StateWork, processing event TEvNodeBroker::TEvListNodes 2025-05-29T15:22:24.279146Z node 9 :NODE_BROKER TRACE: node_broker.cpp:423: Send TEvNodesInfo for epoch #3.7 1970-01-01T02:00:00.024000Z - 1970-01-01T03:00:00.024000Z - 1970-01-01T04:00:00.024000Z |60.2%| [TA] {RESULT} $(B)/ydb/core/kqp/ut/perf/test-results/unittest/{meta.json ... results_accumulator.log} |60.2%| [TA] {RESULT} $(B)/ydb/core/load_test/ut/test-results/unittest/{meta.json ... results_accumulator.log} |60.2%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_transfer/ydb-core-tx-schemeshard-ut_transfer |60.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_sysview_reboots/unittest |60.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_sysview_reboots/unittest |60.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_sysview_reboots/unittest |60.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_sysview_reboots/unittest |60.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_sysview_reboots/unittest |60.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_sysview_reboots/unittest |60.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_sysview_reboots/unittest |60.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_sysview_reboots/unittest |60.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_sysview_reboots/unittest |60.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_sysview_reboots/unittest >> GracefulShutdown::TTxGracefulShutdown [GOOD] >> TNodeBrokerTest::NodesMigrationSetLocation >> TNodeBrokerTest::TestListNodesEpochDeltas >> TNodeBrokerTest::NodesMigrationNodeName >> TNodeBrokerTest::FixedNodeId |60.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_sysview_reboots/unittest |60.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_sysview_reboots/unittest |60.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_sysview_reboots/unittest >> TNodeBrokerTest::NodesMigrationExpiredChanged >> TNodeBrokerTest::NodesMigrationRemoveExpired >> TNodeBrokerTest::ShiftIdRangeRemoveReusedID |60.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_sysview_reboots/unittest >> TNodeBrokerTest::NodesMigrationReuseExpiredID >> TNodeBrokerTest::SeveralNodesSubscribersPerPipe |60.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_sysview_reboots/unittest ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/mind/ut/unittest >> GracefulShutdown::TTxGracefulShutdown [GOOD] Test command err: 2025-05-29T15:22:22.179444Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 2 Deadline: 18446744073709.551615s } 2025-05-29T15:22:22.179498Z node 3 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 3 Deadline: 18446744073709.551615s } 2025-05-29T15:22:22.179521Z node 4 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 4 Deadline: 18446744073709.551615s } 2025-05-29T15:22:22.179552Z node 5 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 5 Deadline: 18446744073709.551615s } 2025-05-29T15:22:22.179577Z node 6 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 6 Deadline: 18446744073709.551615s } 2025-05-29T15:22:22.179604Z node 7 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 7 Deadline: 18446744073709.551615s } 2025-05-29T15:22:22.185434Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:22.185569Z node 3 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:22.185610Z node 4 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:22.185643Z node 5 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:22.185682Z node 6 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:22.185713Z node 7 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:22.185771Z node 8 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 8 Deadline: 18446744073709.551615s } 2025-05-29T15:22:22.185795Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-05-29T15:22:22.186062Z node 3 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:22.186085Z node 4 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:22.186100Z node 5 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:22.186115Z node 6 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:22.186132Z node 7 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:22.186148Z node 8 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:22.186183Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:22.189799Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:22.189847Z node 8 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:22.189872Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:22.190705Z node 5 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:22.190767Z node 6 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:22.190788Z node 7 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:22.190859Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:22.190875Z node 3 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:22.190894Z node 4 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:22.190914Z node 6 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-05-29T15:22:22.190953Z node 7 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-05-29T15:22:22.190970Z node 8 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:22.190985Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:22.191214Z node 3 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-05-29T15:22:22.191241Z node 4 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-05-29T15:22:22.191267Z node 5 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-05-29T15:22:22.191354Z node 8 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-05-29T15:22:22.191435Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-05-29T15:22:22.191472Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 8 Deadline: 18446744073709.551615s } 2025-05-29T15:22:22.192058Z node 6 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 3 Deadline: 18446744073709.551615s } 2025-05-29T15:22:22.192071Z node 8 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 7 Deadline: 18446744073709.551615s } 2025-05-29T15:22:22.192194Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 7 Deadline: 18446744073709.551615s } 2025-05-29T15:22:22.192213Z node 3 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 5 Deadline: 18446744073709.551615s } 2025-05-29T15:22:22.195421Z node 3 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 6 Deadline: 18446744073709.551615s } 2025-05-29T15:22:22.195508Z node 7 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 2 Deadline: 18446744073709.551615s } 2025-05-29T15:22:22.195584Z node 7 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 8 Deadline: 18446744073709.551615s } 2025-05-29T15:22:22.196076Z node 5 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 3 Deadline: 18446744073709.551615s } 2025-05-29T15:22:22.196653Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 2 Deadline: 18446744073709.551615s } 2025-05-29T15:22:22.196770Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 4 Deadline: 18446744073709.551615s } 2025-05-29T15:22:22.196804Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 3 Deadline: 18446744073709.551615s } 2025-05-29T15:22:22.196850Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 5 Deadline: 18446744073709.551615s } 2025-05-29T15:22:22.196892Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 6 Deadline: 18446744073709.551615s } 2025-05-29T15:22:22.196962Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 7 Deadline: 18446744073709.551615s } 2025-05-29T15:22:22.197932Z node 7 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 5 Deadline: 18446744073709.551615s } 2025-05-29T15:22:22.198086Z node 5 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 7 Deadline: 18446744073709.551615s } 2025-05-29T15:22:22.220280Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7309: Cannot subscribe to console configs 2025-05-29T15:22:22.220300Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded ... waiting for nameservers are connected 2025-05-29T15:22:22.224677Z node 1 :NODE_BROKER DEBUG: node_broker_impl.h:236: StateInit event type: 10060000 event: NKikimr::TEvTablet::TEvBoot 2025-05-29T15:22:22.225040Z node 1 :NODE_BROKER DEBUG: node_broker_impl.h:236: StateInit event type: 10060001 event: NKikimr::TEvTablet::TEvRestored 2025-05-29T15:22:22.225094Z node 1 :NODE_BROKER DEBUG: node_broker__init_scheme.cpp:20: TTxInitScheme Execute 2025-05-29T15:22:22.225266Z node 1 :NODE_BROKER DEBUG: node_broker_impl.h:236: StateInit event type: 1006000c event: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-05-29T15:22:22.225905Z node 1 :NODE_BROKER DEBUG: node_broker__init_scheme.cpp:29: TTxInitScheme Complete 2025-05-29T15:22:22.225931Z node 1 :NODE_BROKER DEBUG: node_broker__load_state.cpp:19: TTxLoadState Execute 2025-05-29T15:22:22.225976Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:961: [DB] Using default config. 2025-05-29T15:22:22.225987Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:998: [DB] Starting the first epoch: #1.1 1970-01-01T00:00:00.025000Z - 1970-01-01T01:00:00.025000Z - 1970-01-01T02:00:00.025000Z 2025-05-29T15:22:22.225990Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:1024: [DB] Loaded the first approximate epoch start: #1.1 2025-05-29T15:22:22.226001Z node 1 :NODE_BROKER DEBUG: node_broker__load_state.cpp:27: TTxLoadState Complete 2025-05-29T15:22:22.226019Z node 1 :NODE_BROKER DEBUG: node_broker__migrate_state.cpp:84: TTxMigrateState Execute 2025-05-29T15:22:22.226022Z node 1 :NODE_BROKER DEBUG: node_broker__migrate_state.cpp:52: TTxMigrateState ProcessMigrationBatch UpdateNodes left 0, NewVersionUpdateNodes left 0 2025-05-29T15:22:22.226025Z node 1 :NODE_BROKER DEBUG: node_broker__migrate_state.cpp:21: TTxMigrateState FinalizeMigration 2025-05-29T15:22:22.226029Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:1311: [DB] Update epoch in database: #1.1 1970-01-01T00:00:00.025000Z - 1970-01-01T01:00:00.025000Z - 1970-01-01T02:00:00.025000Z 2025-05-29T15:22:22.226041Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:1330: [DB] Update approx epoch start in database: #1.1 2025-05-29T15:22:22.226045Z node 1 :NODE_BROKER NOTICE: node_broker.cpp:1343: [DB] Update main nodes table to: Nodes 2025-05-29T15:22:22.257774Z node 1 :NODE_BROKER DEBUG: node_broker__migrate_state.cpp:95: TTxMigrateState Complete 2025-05-29T15:22:22.257806Z node 1 :NODE_BROKER TRACE: node_broker.cpp:456: Scheduled epoch update at 1970-01-01T01:00:00.025000Z 2025-05-29T15:22:22.257815Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:562: Preparing nodes list cache for epoch #1.1 1970-01-01T00:00:00.025000Z - 1970-01-01T01:00:00.025000Z - 1970-01-01T02:00:00.025000Z, approximate epoch start #1.1 nodes=0 expired=0 2025-05-29T15:22:22.257823Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:603: Preparing update nodes log for epoch ##1.1 1970-01-01T00:00:00.025000Z - 1970-01-01T01:00:00.025000Z - 1970-01-01T02:00:00.025000Z nodes=0 expired=0 removed=0 2025-05-29T15:22:22.432841Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 269877761, Sender [1:611:2205], Reci ... 72057594046678944, LocalPathId: 1] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-05-29T15:22:25.300686Z node 9 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:265: Send result: self# [9:634:2219], recipient# [9:626:2183], result# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [72057594046678944:1:0] RequestType: ByPath Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046678944, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046678944, LocalPathId: 1] Params { Version: 0 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } }] } 2025-05-29T15:22:25.300696Z node 9 :NODE_BROKER TRACE: node_broker.cpp:1532: Handle TEvTxProxySchemeCache::TEvNavigateKeySetResult: response# { Path: dc-1 TableId: [72057594046678944:1:0] RequestType: ByPath Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046678944, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046678944, LocalPathId: 1] Params { Version: 0 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } } 2025-05-29T15:22:25.300704Z node 9 :NODE_BROKER TRACE: node_broker.cpp:1558: Finished resolving tenant: request# Host: "host1" Port: 1001 ResolveHost: "host1.yandex.net" Address: "1.2.3.4" Location { DataCenter: "1" Module: "2" Rack: "3" Unit: "4" } FixedNodeId: false Path: "dc-1": scope id# <72057594046678944:1>: serviced subdomain# 72057594046678944:1 2025-05-29T15:22:25.300714Z node 9 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 2146435073, Sender [9:626:2183], Recipient [9:553:2183]: NKikimr::NNodeBroker::TNodeBroker::TEvPrivate::TEvResolvedRegistrationRequest 2025-05-29T15:22:25.300718Z node 9 :NODE_BROKER TRACE: node_broker_impl.h:257: StateWork, processing event TEvPrivate::TEvResolvedRegistrationRequest 2025-05-29T15:22:25.300746Z node 9 :NODE_BROKER DEBUG: node_broker__register_node.cpp:77: TTxRegisterNode Execute 2025-05-29T15:22:25.300751Z node 9 :NODE_BROKER DEBUG: node_broker__register_node.cpp:81: Registration request from host1:1001 (not fixed) tenant: dc-1 2025-05-29T15:22:25.300787Z node 9 :NODE_BROKER DEBUG: node_broker.cpp:856: [DB] Adding node #1024.v2 host1:1001 to database state=Active resolvehost=host1.yandex.net address=1.2.3.4 dc=1 location=DC=1/M=2/R=3/U=4/ lease=1 expire=Thu, 01 Jan 1970 02:00:00 UTC servicedsubdomain=72057594046678944:1 slotindex=0 authorizedbycertificate=false 2025-05-29T15:22:25.300839Z node 9 :NODE_BROKER DEBUG: node_broker.cpp:264: [Dirty] Register new active node #1024.v2 host1:1001 2025-05-29T15:22:25.300845Z node 9 :NODE_BROKER DEBUG: node_broker.cpp:552: [Dirty] Update current epoch version from 1 to 2 2025-05-29T15:22:25.300847Z node 9 :NODE_BROKER DEBUG: node_broker.cpp:1356: [DB] Update epoch version in database version=2 2025-05-29T15:22:25.311815Z node 9 :NODE_BROKER DEBUG: node_broker__register_node.cpp:186: TTxRegisterNode Complete 2025-05-29T15:22:25.311838Z node 9 :NODE_BROKER DEBUG: node_broker.cpp:264: [Committed] Register new active node #1024.v2 host1:1001 2025-05-29T15:22:25.311847Z node 9 :NODE_BROKER DEBUG: node_broker.cpp:552: [Committed] Update current epoch version from 1 to 2 2025-05-29T15:22:25.311851Z node 9 :NODE_BROKER DEBUG: node_broker.cpp:630: Add node #1024.v2 host1:1001 to epoch cache 2025-05-29T15:22:25.311876Z node 9 :NODE_BROKER DEBUG: node_broker.cpp:659: Add node #1024.v2 to update nodes log 2025-05-29T15:22:25.311914Z node 9 :NODE_BROKER TRACE: node_broker__register_node.cpp:58: TTxRegisterNode reply with: Status { Code: OK } Node { NodeId: 1024 Host: "host1" Port: 1001 ResolveHost: "host1.yandex.net" Address: "1.2.3.4" Location { DataCenter: "1" Module: "2" Rack: "3" Unit: "4" } Expire: 7200024000 Name: "slot-0" } 2025-05-29T15:22:25.312011Z node 9 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 269877761, Sender [9:638:2223], Recipient [9:553:2183]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-05-29T15:22:25.312035Z node 9 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 272039948, Sender [9:621:2213], Recipient [9:553:2183]: NKikimr::NNodeBroker::TEvNodeBroker::TEvGracefulShutdownRequest { NodeId: 1024 } 2025-05-29T15:22:25.312040Z node 9 :NODE_BROKER TRACE: node_broker_impl.h:249: StateWork, processing event TEvNodeBroker::TEvGracefulShutdownRequest 2025-05-29T15:22:25.312044Z node 9 :NODE_BROKER TRACE: node_broker.cpp:1575: Handle TEvNodeBroker::TEvGracefulShutdownRequest: request# NodeId: 1024 2025-05-29T15:22:25.312057Z node 9 :NODE_BROKER DEBUG: node_broker__graceful_shutdown.cpp:26: TTxGracefulShutdown Execute. Graceful Shutdown request from 1024 2025-05-29T15:22:25.312075Z node 9 :NODE_BROKER DEBUG: node_broker.cpp:856: [DB] Adding node #1024.v2 host1:1001 to database state=Active resolvehost=host1.yandex.net address=1.2.3.4 dc=1 location=DC=1/M=2/R=3/U=4/ lease=1 expire=Thu, 01 Jan 1970 02:00:00 UTC servicedsubdomain=72057594046678944:1 slotindex=(NULL) authorizedbycertificate=false 2025-05-29T15:22:25.323057Z node 9 :NODE_BROKER DEBUG: node_broker__graceful_shutdown.cpp:50: TTxGracefulShutdown Complete 2025-05-29T15:22:25.323194Z node 9 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 269877761, Sender [9:642:2227], Recipient [9:553:2183]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-05-29T15:22:25.323252Z node 9 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 272039938, Sender [9:621:2213], Recipient [9:553:2183]: NKikimr::NNodeBroker::TEvNodeBroker::TEvRegistrationRequest { Host: "host2" Port: 1001 ResolveHost: "host2.yandex.net" Address: "1.2.3.5" Location { DataCenter: "1" Module: "2" Rack: "3" Unit: "5" } FixedNodeId: false Path: "dc-1" } 2025-05-29T15:22:25.323259Z node 9 :NODE_BROKER TRACE: node_broker_impl.h:248: StateWork, processing event TEvNodeBroker::TEvRegistrationRequest 2025-05-29T15:22:25.323271Z node 9 :NODE_BROKER TRACE: node_broker.cpp:1487: Handle TEvNodeBroker::TEvRegistrationRequest: request# Host: "host2" Port: 1001 ResolveHost: "host2.yandex.net" Address: "1.2.3.5" Location { DataCenter: "1" Module: "2" Rack: "3" Unit: "5" } FixedNodeId: false Path: "dc-1" 2025-05-29T15:22:25.323321Z node 9 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2702: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [9:23:2070], request# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-05-29T15:22:25.323349Z node 9 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:1842: FillEntry for TNavigate: self# [9:23:2070], cacheItem# { Subscriber: { Subscriber: [9:627:2218] DomainOwnerId: 72057594046678944 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusSuccess Kind: 2 TableKind: 0 Created: 1 CreateStep: 0 PathId: [OwnerId: 72057594046678944, LocalPathId: 1] DomainId: [OwnerId: 72057594046678944, LocalPathId: 1] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-05-29T15:22:25.323394Z node 9 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:265: Send result: self# [9:644:2228], recipient# [9:643:2183], result# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [72057594046678944:1:0] RequestType: ByPath Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046678944, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046678944, LocalPathId: 1] Params { Version: 0 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } }] } 2025-05-29T15:22:25.323408Z node 9 :NODE_BROKER TRACE: node_broker.cpp:1532: Handle TEvTxProxySchemeCache::TEvNavigateKeySetResult: response# { Path: dc-1 TableId: [72057594046678944:1:0] RequestType: ByPath Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046678944, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046678944, LocalPathId: 1] Params { Version: 0 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } } 2025-05-29T15:22:25.323422Z node 9 :NODE_BROKER TRACE: node_broker.cpp:1558: Finished resolving tenant: request# Host: "host2" Port: 1001 ResolveHost: "host2.yandex.net" Address: "1.2.3.5" Location { DataCenter: "1" Module: "2" Rack: "3" Unit: "5" } FixedNodeId: false Path: "dc-1": scope id# <72057594046678944:1>: serviced subdomain# 72057594046678944:1 2025-05-29T15:22:25.323439Z node 9 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 2146435073, Sender [9:643:2183], Recipient [9:553:2183]: NKikimr::NNodeBroker::TNodeBroker::TEvPrivate::TEvResolvedRegistrationRequest 2025-05-29T15:22:25.323447Z node 9 :NODE_BROKER TRACE: node_broker_impl.h:257: StateWork, processing event TEvPrivate::TEvResolvedRegistrationRequest 2025-05-29T15:22:25.323461Z node 9 :NODE_BROKER DEBUG: node_broker__register_node.cpp:77: TTxRegisterNode Execute 2025-05-29T15:22:25.323464Z node 9 :NODE_BROKER DEBUG: node_broker__register_node.cpp:81: Registration request from host2:1001 (not fixed) tenant: dc-1 2025-05-29T15:22:25.323489Z node 9 :NODE_BROKER DEBUG: node_broker.cpp:856: [DB] Adding node #1025.v3 host2:1001 to database state=Active resolvehost=host2.yandex.net address=1.2.3.5 dc=1 location=DC=1/M=2/R=3/U=5/ lease=1 expire=Thu, 01 Jan 1970 02:00:00 UTC servicedsubdomain=72057594046678944:1 slotindex=0 authorizedbycertificate=false 2025-05-29T15:22:25.323535Z node 9 :NODE_BROKER DEBUG: node_broker.cpp:264: [Dirty] Register new active node #1025.v3 host2:1001 2025-05-29T15:22:25.323540Z node 9 :NODE_BROKER DEBUG: node_broker.cpp:552: [Dirty] Update current epoch version from 2 to 3 2025-05-29T15:22:25.323543Z node 9 :NODE_BROKER DEBUG: node_broker.cpp:1356: [DB] Update epoch version in database version=3 2025-05-29T15:22:25.334332Z node 9 :NODE_BROKER DEBUG: node_broker__register_node.cpp:186: TTxRegisterNode Complete 2025-05-29T15:22:25.334351Z node 9 :NODE_BROKER DEBUG: node_broker.cpp:264: [Committed] Register new active node #1025.v3 host2:1001 2025-05-29T15:22:25.334357Z node 9 :NODE_BROKER DEBUG: node_broker.cpp:552: [Committed] Update current epoch version from 2 to 3 2025-05-29T15:22:25.334362Z node 9 :NODE_BROKER DEBUG: node_broker.cpp:630: Add node #1025.v3 host2:1001 to epoch cache 2025-05-29T15:22:25.334386Z node 9 :NODE_BROKER DEBUG: node_broker.cpp:659: Add node #1025.v3 to update nodes log 2025-05-29T15:22:25.334422Z node 9 :NODE_BROKER TRACE: node_broker__register_node.cpp:58: TTxRegisterNode reply with: Status { Code: OK } Node { NodeId: 1025 Host: "host2" Port: 1001 ResolveHost: "host2.yandex.net" Address: "1.2.3.5" Location { DataCenter: "1" Module: "2" Rack: "3" Unit: "5" } Expire: 7200024000 Name: "slot-0" } >> TSlotIndexesPoolTest::Basic [GOOD] >> TNodeBrokerTest::SubscribeToNodes >> TEnumerationTest::TestPublish [GOOD] >> TLocalTests::TestAddTenant >> TTransferTests::Create >> TPartitionTests::DataTxCalcPredicateOrder [GOOD] >> TTransferTests::Create_Disabled >> TNodeBrokerTest::NodeNameWithDifferentTenants >> TNodeBrokerTest::NodeNameReuseRestartWithHostChanges |60.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/mind/ut/unittest >> TSlotIndexesPoolTest::Basic [GOOD] >> TNodeBrokerTest::NoEffectBeforeCommit >> TNodeBrokerTest::ShiftIdRangeRemoveNew >> TNodeBrokerTest::ExtendLeaseRestartRace >> TLocalTests::TestAddTenant [GOOD] >> TTenantPoolTests::TestStateStatic >> TTransferTests::Create [GOOD] >> TTransferTests::CreateSequential >> TTransferTests::Create_Disabled [GOOD] >> TTransferTests::CreateWithoutCredentials |60.3%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/mind/ut_fat/ydb-core-mind-ut_fat |60.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/mind/ut_fat/ydb-core-mind-ut_fat |60.3%| [LD] {RESULT} $(B)/ydb/core/mind/ut_fat/ydb-core-mind-ut_fat >> TNodeBrokerTest::Test999NodesSubscribers >> TTenantPoolTests::TestStateStatic [GOOD] >> TTransferTests::CreateSequential [GOOD] >> TTransferTests::CreateInParallel >> TTransferTests::CreateWithoutCredentials [GOOD] >> TTransferTests::CreateWrongConfig |60.3%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/backup/impl/ut_table_writer/ydb-core-backup-impl-ut_table_writer |60.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/backup/impl/ut_table_writer/ydb-core-backup-impl-ut_table_writer |60.3%| [LD] {RESULT} $(B)/ydb/core/backup/impl/ut_table_writer/ydb-core-backup-impl-ut_table_writer ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/persqueue/ut/unittest >> TPartitionTests::DataTxCalcPredicateOrder [GOOD] Test command err: 2025-05-29T15:22:14.774508Z node 1 :PERSQUEUE NOTICE: pq_impl.cpp:1099: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-05-29T15:22:14.774537Z node 1 :PERSQUEUE INFO: pq_impl.cpp:800: [PQ: 72057594037927937] doesn't have tx writes info 2025-05-29T15:22:14.778837Z node 1 :PERSQUEUE INFO: partition_init.cpp:880: [PQ: 72057594037927937, Partition: 3, State: StateInit] bootstrapping 3 [1:180:2194] 2025-05-29T15:22:14.779176Z node 1 :PERSQUEUE INFO: partition_init.cpp:785: [Root/PQ/rt3.dc1--account--topic:3:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp from keys completed. Value 2025-05-29T15:22:14.000000Z 2025-05-29T15:22:14.779190Z node 1 :PERSQUEUE INFO: partition.cpp:560: [PQ: 72057594037927937, Partition: 3, State: StateInit] init complete for topic 'Root/PQ/rt3.dc1--account--topic' partition 3 generation 0 [1:180:2194] Got cmd write: CmdWrite { Key: "i0000000003" Value: "\010\000\020\n\030\000(\360\200\214\345\3612" StorageChannel: INLINE } CmdWrite { Key: "m0000000003cclient" Value: "\010\000\020\001\030\001\"\007session(\0000\001@\000" StorageChannel: INLINE } CmdWrite { Key: "m0000000003uclient" Value: "\000\000\000\000\000\000\000\000\001\000\000\000\001\000\000\000session" StorageChannel: INLINE } Got cmd write: CmdWrite { Key: "i0000000003" Value: "\010\000\020\n\030\000(\360\200\214\345\3612" StorageChannel: INLINE } CmdWrite { Key: "I0000000003" Value: "\010\271`\020\264\222\004" StorageChannel: INLINE } CmdWrite { Key: "m0000000003cclient" Value: "\010\001\020\001\030\001\"\007session(\0000\001@\001" StorageChannel: INLINE } CmdWrite { Key: "m0000000003uclient" Value: "\001\000\000\000\000\000\000\000\001\000\000\000\001\000\000\000session" StorageChannel: INLINE } 2025-05-29T15:22:15.413426Z node 2 :PERSQUEUE NOTICE: pq_impl.cpp:1099: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-05-29T15:22:15.413446Z node 2 :PERSQUEUE INFO: pq_impl.cpp:800: [PQ: 72057594037927937] doesn't have tx writes info 2025-05-29T15:22:15.416952Z node 2 :PERSQUEUE INFO: partition_init.cpp:880: [PQ: 72057594037927937, Partition: 3, State: StateInit] bootstrapping 3 [2:180:2194] 2025-05-29T15:22:15.417211Z node 2 :PERSQUEUE INFO: partition_init.cpp:785: [Root/PQ/rt3.dc1--account--topic:3:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp from keys completed. Value 2025-05-29T15:22:15.000000Z 2025-05-29T15:22:15.417219Z node 2 :PERSQUEUE INFO: partition.cpp:560: [PQ: 72057594037927937, Partition: 3, State: StateInit] init complete for topic 'Root/PQ/rt3.dc1--account--topic' partition 3 generation 0 [2:180:2194] Got cmd write: CmdWrite { Key: "i0000000003" Value: "\010\000\020\n\030\000(\330\210\214\345\3612" StorageChannel: INLINE } CmdWrite { Key: "m0000000003cclient" Value: "\010\000\020\001\030\001\"\007session(\0000\001@\000" StorageChannel: INLINE } CmdWrite { Key: "m0000000003uclient" Value: "\000\000\000\000\000\000\000\000\001\000\000\000\001\000\000\000session" StorageChannel: INLINE } 2025-05-29T15:22:15.867578Z node 3 :PERSQUEUE NOTICE: pq_impl.cpp:1099: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-05-29T15:22:15.867608Z node 3 :PERSQUEUE INFO: pq_impl.cpp:800: [PQ: 72057594037927937] doesn't have tx writes info 2025-05-29T15:22:15.871566Z node 3 :PERSQUEUE DEBUG: partition_init.cpp:75: [Root/PQ/rt3.dc1--account--topic:0:Initializer] Start initializing step TInitConfigStep Got KV request 2025-05-29T15:22:15.871642Z node 3 :PERSQUEUE DEBUG: partition_init.cpp:75: [Root/PQ/rt3.dc1--account--topic:0:Initializer] Start initializing step TInitInternalFieldsStep 2025-05-29T15:22:15.871696Z node 3 :PERSQUEUE INFO: partition_init.cpp:880: [PQ: 72057594037927937, Partition: 0, State: StateInit] bootstrapping 0 [3:179:2193] 2025-05-29T15:22:15.871901Z node 3 :PERSQUEUE DEBUG: partition_init.cpp:75: [Root/PQ/rt3.dc1--account--topic:0:Initializer] Start initializing step TInitDiskStatusStep Got KV request 2025-05-29T15:22:15.871930Z node 3 :PERSQUEUE DEBUG: partition_init.cpp:75: [Root/PQ/rt3.dc1--account--topic:0:Initializer] Start initializing step TInitMetaStep Got KV request 2025-05-29T15:22:15.871952Z node 3 :PERSQUEUE DEBUG: partition_init.cpp:75: [Root/PQ/rt3.dc1--account--topic:0:Initializer] Start initializing step TInitInfoRangeStep Got KV request 2025-05-29T15:22:15.871973Z node 3 :PERSQUEUE DEBUG: partition_init.cpp:75: [Root/PQ/rt3.dc1--account--topic:0:Initializer] Start initializing step TInitDataRangeStep Got KV request 2025-05-29T15:22:15.872018Z node 3 :PERSQUEUE DEBUG: partition_init.cpp:621: [Root/PQ/rt3.dc1--account--topic:0:TInitDataRangeStep] Got data offset 0 count 50 size 684 so 0 eo 50 d0000000000_00000000000000000000_00000_0000000050_00000 2025-05-29T15:22:15.872028Z node 3 :PERSQUEUE DEBUG: partition_init.cpp:75: [Root/PQ/rt3.dc1--account--topic:0:Initializer] Start initializing step TInitDataStep 2025-05-29T15:22:15.872032Z node 3 :PERSQUEUE DEBUG: partition_init.cpp:75: [Root/PQ/rt3.dc1--account--topic:0:Initializer] Start initializing step TInitEndWriteTimestampStep 2025-05-29T15:22:15.872039Z node 3 :PERSQUEUE INFO: partition_init.cpp:785: [Root/PQ/rt3.dc1--account--topic:0:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp from keys completed. Value 2025-05-29T15:22:15.000000Z 2025-05-29T15:22:15.872044Z node 3 :PERSQUEUE DEBUG: partition_init.cpp:55: [Root/PQ/rt3.dc1--account--topic:0:Initializer] Initializing completed. 2025-05-29T15:22:15.872051Z node 3 :PERSQUEUE INFO: partition.cpp:560: [PQ: 72057594037927937, Partition: 0, State: StateInit] init complete for topic 'Root/PQ/rt3.dc1--account--topic' partition 0 generation 0 [3:179:2193] 2025-05-29T15:22:15.872061Z node 3 :PERSQUEUE DEBUG: partition.cpp:574: [PQ: 72057594037927937, Partition: 0, State: StateInit] SYNC INIT topic Root/PQ/rt3.dc1--account--topic partitition 0 so 0 endOffset 50 Head Offset 50 PartNo 0 PackedSize 0 count 0 nextOffset 50 batches 0 SYNC INIT DATA KEY: d0000000000_00000000000000000000_00000_0000000050_00000 size 684 2025-05-29T15:22:15.872069Z node 3 :PERSQUEUE DEBUG: partition.cpp:3850: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Process pending events. Count 0 2025-05-29T15:22:17.184322Z node 3 :PERSQUEUE DEBUG: partition.cpp:3267: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'Root/PQ/rt3.dc1--account--topic' partition 0 user client session is set to 0 (startOffset 0) session session 2025-05-29T15:22:17.184372Z node 3 :PERSQUEUE DEBUG: partition.cpp:2185: [PQ: 72057594037927937, Partition: 0, State: StateIdle] === DumpKeyValueRequest === 2025-05-29T15:22:17.184377Z node 3 :PERSQUEUE DEBUG: partition.cpp:2186: [PQ: 72057594037927937, Partition: 0, State: StateIdle] --- delete ---------------- 2025-05-29T15:22:17.184380Z node 3 :PERSQUEUE DEBUG: partition.cpp:2194: [PQ: 72057594037927937, Partition: 0, State: StateIdle] --- write ----------------- 2025-05-29T15:22:17.184383Z node 3 :PERSQUEUE DEBUG: partition.cpp:2197: [PQ: 72057594037927937, Partition: 0, State: StateIdle] i0000000000 2025-05-29T15:22:17.184386Z node 3 :PERSQUEUE DEBUG: partition.cpp:2197: [PQ: 72057594037927937, Partition: 0, State: StateIdle] m0000000000cclient 2025-05-29T15:22:17.184388Z node 3 :PERSQUEUE DEBUG: partition.cpp:2197: [PQ: 72057594037927937, Partition: 0, State: StateIdle] m0000000000uclient 2025-05-29T15:22:17.184390Z node 3 :PERSQUEUE DEBUG: partition.cpp:2199: [PQ: 72057594037927937, Partition: 0, State: StateIdle] --- rename ---------------- 2025-05-29T15:22:17.184393Z node 3 :PERSQUEUE DEBUG: partition.cpp:2204: [PQ: 72057594037927937, Partition: 0, State: StateIdle] =========================== Got KV request Got batch complete: 1 Got KV request Got KV request Got cmd write: CmdWrite { Key: "i0000000000" Value: "\010\000\0202\030\000(\330\210\214\345\3612" StorageChannel: INLINE } CmdWrite { Key: "m0000000000cclient" Value: "\010\000\020\001\030\001\"\007session(\0000\001@\000" StorageChannel: INLINE } CmdWrite { Key: "m0000000000uclient" Value: "\000\000\000\000\000\000\000\000\001\000\000\000\001\000\000\000session" StorageChannel: INLINE } 2025-05-29T15:22:17.204795Z node 3 :PERSQUEUE DEBUG: partition_write.cpp:524: [PQ: 72057594037927937, Partition: 0, State: StateIdle] TPartition::HandleWriteResponse writeNewSize# 0 WriteNewSizeFromSupportivePartitions# 0 Create distr tx with id = 0 and act no: 1 2025-05-29T15:22:17.204863Z node 3 :PERSQUEUE DEBUG: partition.cpp:1127: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Handle TEvPQ::TEvTxCalcPredicate Step 1, TxId 0 Wait first predicate result 2025-05-29T15:22:18.466693Z node 3 :PERSQUEUE DEBUG: partition.cpp:1363: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Handle TEvPQ::TEvGetWriteInfoResponse Got batch complete: 1 Create distr tx with id = 2 and act no: 3 2025-05-29T15:22:18.466760Z node 3 :PERSQUEUE DEBUG: partition.cpp:1127: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Handle TEvPQ::TEvTxCalcPredicate Step 1, TxId 2 Wait second predicate result 2025-05-29T15:22:19.694477Z node 3 :PERSQUEUE DEBUG: partition.cpp:1363: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Handle TEvPQ::TEvGetWriteInfoResponse 2025-05-29T15:22:19.694498Z node 3 :PERSQUEUE DEBUG: partition.cpp:1173: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Handle TEvPQ::TEvTxCommit Step 1, TxId 0 2025-05-29T15:22:19.694504Z node 3 :PERSQUEUE DEBUG: partition.cpp:2424: [PQ: 72057594037927937, Partition: 0, State: StateIdle] TPartition::CommitWriteOperations TxId: 0 2025-05-29T15:22:19.694513Z node 3 :PERSQUEUE DEBUG: partition.cpp:2451: [PQ: 72057594037927937, Partition: 0, State: StateIdle] t.WriteInfo->BodyKeys.size=0, t.WriteInfo->BlobsFromHead.size=0 2025-05-29T15:22:19.694519Z node 3 :PERSQUEUE DEBUG: partition.cpp:2452: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Head=Offset 50 PartNo 0 PackedSize 0 count 0 nextOffset 50 batches 0, NewHead=Offset 50 PartNo 0 PackedSize 0 count 0 nextOffset 50 batches 0 Got batch complete: 1 2025-05-29T15:22:19.694547Z node 3 :PERSQUEUE DEBUG: partition.cpp:1173: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Handle TEvPQ::TEvTxCommit Step 1, TxId 2 2025-05-29T15:22:19.694551Z node 3 :PERSQUEUE DEBUG: partition.cpp:2424: [PQ: 72057594037927937, Partition: 0, State: StateIdle] TPartition::CommitWriteOperations TxId: 2 2025-05-29T15:22:19.694554Z node 3 :PERSQUEUE DEBUG: partition.cpp:2451: [PQ: 72057594037927937, Partition: 0, State: StateIdle] t.WriteInfo->BodyKeys.size=0, t.WriteInfo->BlobsFromHead.size=0 2025-05-29T15:22:19.694556Z node 3 :PERSQUEUE DEBUG: partition.cpp:2452: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Head=Offset 50 PartNo 0 PackedSize 0 count 0 nextOffset 50 batches 0, NewHead=Offset 50 PartNo 0 PackedSize 0 count 0 nextOffset 50 batches 0 2025-05-29T15:22:19.694588Z node 3 :PERSQUEUE DEBUG: partition.cpp:2185: [PQ: 72057594037927937, Partition: 0, State: StateIdle] === DumpKeyValueRequest === 2025-05-29T15:22:19.694591Z node 3 :PERSQUEUE DEBUG: partition.cpp:2186: [PQ: 72057594037927937, Partition: 0, State: StateIdle] --- delete ---------------- 2025-05-29T15:22:19.694594Z node 3 :PERSQUEUE DEBUG: partition.cpp:2194: [PQ: 72057594037927937, Partition: 0, State: StateIdle] --- write ----------------- 2025-05-29T15:22:19.694596Z node 3 :PERSQUEUE DEBUG: partition.cpp:2197: [PQ: 72057594037927937, Partition: 0, State: StateIdle] m0000000000psrc1 2025-05-29T15:22:19.694598Z node 3 :PERSQUEUE DEBUG: partition.cpp:2197: [PQ: 72057594037927937, Partition: 0, State: StateIdle] i0000000000 2025-05-29T15:22:19.694600Z node 3 :PERSQUEUE DEBUG: partition.cpp:2197: [PQ: 72057594037927937, Partition: 0, State: StateIdle] i0000000000 2025-05-29T15:22:19.694603Z node 3 :PERSQUEUE DEBUG: partition.cpp:2197: [PQ: 72057594037927937, Partition: ... :PERSQUEUE INFO: partition.cpp:560: [PQ: 72057594037927937, Partition: 0, State: StateInit] init complete for topic 'Root/PQ/rt3.dc1--account--topic' partition 0 generation 0 [4:179:2193] 2025-05-29T15:22:21.123044Z node 4 :PERSQUEUE DEBUG: partition.cpp:574: [PQ: 72057594037927937, Partition: 0, State: StateInit] SYNC INIT topic Root/PQ/rt3.dc1--account--topic partitition 0 so 0 endOffset 1 Head Offset 1 PartNo 0 PackedSize 0 count 0 nextOffset 1 batches 0 SYNC INIT DATA KEY: d0000000000_00000000000000000000_00000_0000000001_00000 size 684 2025-05-29T15:22:21.123049Z node 4 :PERSQUEUE DEBUG: partition.cpp:3850: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Process pending events. Count 0 2025-05-29T15:22:22.422240Z node 4 :PERSQUEUE INFO: ownerinfo.cpp:30: new Cookie SourceId|7ec7be72-297c1dec-82c69cd-655a2051_0 generated for partition 0 topic 'Root/PQ/rt3.dc1--account--topic' owner SourceId 2025-05-29T15:22:22.422284Z node 4 :PERSQUEUE DEBUG: partition_write.cpp:35: [PQ: 72057594037927937, Partition: 0, State: StateIdle] TPartition::ReplyOwnerOk. Partition: 0 Got batch complete: 1 Wait write response Wait kv request 2025-05-29T15:22:22.422337Z node 4 :PERSQUEUE DEBUG: partition_write.cpp:1233: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'Root/PQ/rt3.dc1--account--topic' partition 0 part blob processing sourceId 'SourceId' seqNo 4 partNo 0 2025-05-29T15:22:22.422373Z node 4 :PERSQUEUE DEBUG: partition_write.cpp:1333: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'Root/PQ/rt3.dc1--account--topic' partition 0 part blob complete sourceId 'SourceId' seqNo 4 partNo 0 FormedBlobsCount 0 NewHead: Offset 11 PartNo 0 PackedSize 118 count 1 nextOffset 12 batches 1 2025-05-29T15:22:22.422411Z node 4 :PERSQUEUE DEBUG: partition_write.cpp:1623: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Add new write blob: topic 'Root/PQ/rt3.dc1--account--topic' partition 0 compactOffset 11,1 HeadOffset 1 endOffset 1 curOffset 12 d0000000000_00000000000000000011_00000_0000000001_00000| size 104 WTime 5132 2025-05-29T15:22:22.422425Z node 4 :PERSQUEUE DEBUG: partition.cpp:2185: [PQ: 72057594037927937, Partition: 0, State: StateIdle] === DumpKeyValueRequest === 2025-05-29T15:22:22.422428Z node 4 :PERSQUEUE DEBUG: partition.cpp:2186: [PQ: 72057594037927937, Partition: 0, State: StateIdle] --- delete ---------------- 2025-05-29T15:22:22.422432Z node 4 :PERSQUEUE DEBUG: partition.cpp:2192: [PQ: 72057594037927937, Partition: 0, State: StateIdle] [x0000000000, x0000000001) 2025-05-29T15:22:22.422434Z node 4 :PERSQUEUE DEBUG: partition.cpp:2194: [PQ: 72057594037927937, Partition: 0, State: StateIdle] --- write ----------------- 2025-05-29T15:22:22.422437Z node 4 :PERSQUEUE DEBUG: partition.cpp:2197: [PQ: 72057594037927937, Partition: 0, State: StateIdle] m0000000000pSourceId 2025-05-29T15:22:22.422440Z node 4 :PERSQUEUE DEBUG: partition.cpp:2197: [PQ: 72057594037927937, Partition: 0, State: StateIdle] d0000000000_00000000000000000011_00000_0000000001_00000| 2025-05-29T15:22:22.422442Z node 4 :PERSQUEUE DEBUG: partition.cpp:2197: [PQ: 72057594037927937, Partition: 0, State: StateIdle] i0000000000 2025-05-29T15:22:22.422445Z node 4 :PERSQUEUE DEBUG: partition.cpp:2199: [PQ: 72057594037927937, Partition: 0, State: StateIdle] --- rename ---------------- 2025-05-29T15:22:22.422447Z node 4 :PERSQUEUE DEBUG: partition.cpp:2204: [PQ: 72057594037927937, Partition: 0, State: StateIdle] =========================== Got KV request Got batch complete: 1 Got KV request Got KV request 2025-05-29T15:22:22.442817Z node 4 :PERSQUEUE DEBUG: partition_write.cpp:524: [PQ: 72057594037927937, Partition: 0, State: StateIdle] TPartition::HandleWriteResponse writeNewSize# 22 WriteNewSizeFromSupportivePartitions# 0 2025-05-29T15:22:22.442849Z node 4 :PERSQUEUE DEBUG: partition_write.cpp:58: [PQ: 72057594037927937, Partition: 0, State: StateIdle] TPartition::ReplyWrite. Partition: 0 2025-05-29T15:22:22.442877Z node 4 :PERSQUEUE DEBUG: partition_write.cpp:324: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Answering for message sourceid: 'SourceId', Topic: 'Root/PQ/rt3.dc1--account--topic', Partition: 0, SeqNo: 4, partNo: 0, Offset: 11 is stored on disk Wait second predicate result Create distr tx with id = 0 and act no: 1 2025-05-29T15:22:22.442961Z node 4 :PERSQUEUE DEBUG: partition.cpp:1127: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Handle TEvPQ::TEvTxCalcPredicate Step 1, TxId 0 2025-05-29T15:22:23.703986Z node 4 :PERSQUEUE DEBUG: partition.cpp:1363: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Handle TEvPQ::TEvGetWriteInfoResponse Got batch complete: 1 2025-05-29T15:22:23.766317Z node 5 :PERSQUEUE NOTICE: pq_impl.cpp:1099: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-05-29T15:22:23.766340Z node 5 :PERSQUEUE INFO: pq_impl.cpp:800: [PQ: 72057594037927937] doesn't have tx writes info 2025-05-29T15:22:23.769577Z node 5 :PERSQUEUE DEBUG: partition_init.cpp:75: [Root/PQ/rt3.dc1--account--topic:0:Initializer] Start initializing step TInitConfigStep Got KV request 2025-05-29T15:22:23.769619Z node 5 :PERSQUEUE DEBUG: partition_init.cpp:75: [Root/PQ/rt3.dc1--account--topic:0:Initializer] Start initializing step TInitInternalFieldsStep 2025-05-29T15:22:23.769657Z node 5 :PERSQUEUE INFO: partition_init.cpp:880: [PQ: 72057594037927937, Partition: 0, State: StateInit] bootstrapping 0 [5:177:2191] 2025-05-29T15:22:23.769780Z node 5 :PERSQUEUE DEBUG: partition_init.cpp:75: [Root/PQ/rt3.dc1--account--topic:0:Initializer] Start initializing step TInitDiskStatusStep Got KV request 2025-05-29T15:22:23.769798Z node 5 :PERSQUEUE DEBUG: partition_init.cpp:75: [Root/PQ/rt3.dc1--account--topic:0:Initializer] Start initializing step TInitMetaStep Got KV request 2025-05-29T15:22:23.769813Z node 5 :PERSQUEUE DEBUG: partition_init.cpp:75: [Root/PQ/rt3.dc1--account--topic:0:Initializer] Start initializing step TInitInfoRangeStep Got KV request 2025-05-29T15:22:23.769826Z node 5 :PERSQUEUE DEBUG: partition_init.cpp:75: [Root/PQ/rt3.dc1--account--topic:0:Initializer] Start initializing step TInitDataRangeStep Got KV request 2025-05-29T15:22:23.769853Z node 5 :PERSQUEUE DEBUG: partition_init.cpp:621: [Root/PQ/rt3.dc1--account--topic:0:TInitDataRangeStep] Got data offset 0 count 50 size 684 so 0 eo 50 d0000000000_00000000000000000000_00000_0000000050_00000 2025-05-29T15:22:23.769861Z node 5 :PERSQUEUE DEBUG: partition_init.cpp:75: [Root/PQ/rt3.dc1--account--topic:0:Initializer] Start initializing step TInitDataStep 2025-05-29T15:22:23.769863Z node 5 :PERSQUEUE DEBUG: partition_init.cpp:75: [Root/PQ/rt3.dc1--account--topic:0:Initializer] Start initializing step TInitEndWriteTimestampStep 2025-05-29T15:22:23.769867Z node 5 :PERSQUEUE INFO: partition_init.cpp:785: [Root/PQ/rt3.dc1--account--topic:0:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp from keys completed. Value 2025-05-29T15:22:23.000000Z 2025-05-29T15:22:23.769870Z node 5 :PERSQUEUE DEBUG: partition_init.cpp:55: [Root/PQ/rt3.dc1--account--topic:0:Initializer] Initializing completed. 2025-05-29T15:22:23.769875Z node 5 :PERSQUEUE INFO: partition.cpp:560: [PQ: 72057594037927937, Partition: 0, State: StateInit] init complete for topic 'Root/PQ/rt3.dc1--account--topic' partition 0 generation 0 [5:177:2191] 2025-05-29T15:22:23.769882Z node 5 :PERSQUEUE DEBUG: partition.cpp:574: [PQ: 72057594037927937, Partition: 0, State: StateInit] SYNC INIT topic Root/PQ/rt3.dc1--account--topic partitition 0 so 0 endOffset 50 Head Offset 50 PartNo 0 PackedSize 0 count 0 nextOffset 50 batches 0 SYNC INIT DATA KEY: d0000000000_00000000000000000000_00000_0000000050_00000 size 684 2025-05-29T15:22:23.769888Z node 5 :PERSQUEUE DEBUG: partition.cpp:3850: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Process pending events. Count 0 Create distr tx with id = 0 and act no: 1 2025-05-29T15:22:25.090381Z node 5 :PERSQUEUE DEBUG: partition.cpp:1127: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Handle TEvPQ::TEvTxCalcPredicate Step 1, TxId 0 2025-05-29T15:22:26.393151Z node 5 :PERSQUEUE DEBUG: partition.cpp:1363: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Handle TEvPQ::TEvGetWriteInfoResponse Got batch complete: 1 Create distr tx with id = 2 and act no: 3 2025-05-29T15:22:26.393229Z node 5 :PERSQUEUE DEBUG: partition.cpp:1127: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Handle TEvPQ::TEvTxCalcPredicate Step 1, TxId 2 2025-05-29T15:22:26.393245Z node 5 :PERSQUEUE DEBUG: partition.cpp:1173: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Handle TEvPQ::TEvTxCommit Step 1, TxId 0 2025-05-29T15:22:26.393253Z node 5 :PERSQUEUE DEBUG: partition.cpp:2424: [PQ: 72057594037927937, Partition: 0, State: StateIdle] TPartition::CommitWriteOperations TxId: 0 2025-05-29T15:22:26.393262Z node 5 :PERSQUEUE DEBUG: partition.cpp:2451: [PQ: 72057594037927937, Partition: 0, State: StateIdle] t.WriteInfo->BodyKeys.size=0, t.WriteInfo->BlobsFromHead.size=0 2025-05-29T15:22:26.393270Z node 5 :PERSQUEUE DEBUG: partition.cpp:2452: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Head=Offset 50 PartNo 0 PackedSize 0 count 0 nextOffset 50 batches 0, NewHead=Offset 50 PartNo 0 PackedSize 0 count 0 nextOffset 50 batches 0 2025-05-29T15:22:27.654890Z node 5 :PERSQUEUE DEBUG: partition.cpp:1363: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Handle TEvPQ::TEvGetWriteInfoResponse Got batch complete: 1 2025-05-29T15:22:27.654948Z node 5 :PERSQUEUE DEBUG: partition.cpp:1173: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Handle TEvPQ::TEvTxCommit Step 1, TxId 2 2025-05-29T15:22:27.654954Z node 5 :PERSQUEUE DEBUG: partition.cpp:2424: [PQ: 72057594037927937, Partition: 0, State: StateIdle] TPartition::CommitWriteOperations TxId: 2 2025-05-29T15:22:27.654959Z node 5 :PERSQUEUE DEBUG: partition.cpp:2451: [PQ: 72057594037927937, Partition: 0, State: StateIdle] t.WriteInfo->BodyKeys.size=0, t.WriteInfo->BlobsFromHead.size=0 2025-05-29T15:22:27.654965Z node 5 :PERSQUEUE DEBUG: partition.cpp:2452: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Head=Offset 50 PartNo 0 PackedSize 0 count 0 nextOffset 50 batches 0, NewHead=Offset 50 PartNo 0 PackedSize 0 count 0 nextOffset 50 batches 0 2025-05-29T15:22:27.655004Z node 5 :PERSQUEUE DEBUG: partition.cpp:2185: [PQ: 72057594037927937, Partition: 0, State: StateIdle] === DumpKeyValueRequest === 2025-05-29T15:22:27.655007Z node 5 :PERSQUEUE DEBUG: partition.cpp:2186: [PQ: 72057594037927937, Partition: 0, State: StateIdle] --- delete ---------------- 2025-05-29T15:22:27.655010Z node 5 :PERSQUEUE DEBUG: partition.cpp:2194: [PQ: 72057594037927937, Partition: 0, State: StateIdle] --- write ----------------- 2025-05-29T15:22:27.655013Z node 5 :PERSQUEUE DEBUG: partition.cpp:2197: [PQ: 72057594037927937, Partition: 0, State: StateIdle] m0000000000psrc1 2025-05-29T15:22:27.655015Z node 5 :PERSQUEUE DEBUG: partition.cpp:2197: [PQ: 72057594037927937, Partition: 0, State: StateIdle] i0000000000 2025-05-29T15:22:27.655017Z node 5 :PERSQUEUE DEBUG: partition.cpp:2197: [PQ: 72057594037927937, Partition: 0, State: StateIdle] i0000000000 2025-05-29T15:22:27.655020Z node 5 :PERSQUEUE DEBUG: partition.cpp:2197: [PQ: 72057594037927937, Partition: 0, State: StateIdle] I0000000000 2025-05-29T15:22:27.655022Z node 5 :PERSQUEUE DEBUG: partition.cpp:2199: [PQ: 72057594037927937, Partition: 0, State: StateIdle] --- rename ---------------- 2025-05-29T15:22:27.655025Z node 5 :PERSQUEUE DEBUG: partition.cpp:2204: [PQ: 72057594037927937, Partition: 0, State: StateIdle] =========================== Got KV request Send disk status response with cookie: 0 Wait tx committed for tx 0 2025-05-29T15:22:27.665622Z node 5 :PERSQUEUE DEBUG: partition_write.cpp:524: [PQ: 72057594037927937, Partition: 0, State: StateIdle] TPartition::HandleWriteResponse writeNewSize# 0 WriteNewSizeFromSupportivePartitions# 2 Wait tx committed for tx 2 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/mind/ut/unittest >> TLocalTests::TestAddTenant [GOOD] Test command err: 2025-05-29T15:22:27.751818Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:434: actor# [1:117:2151] Bootstrap 2025-05-29T15:22:27.769469Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:453: actor# [1:117:2151] Become StateWork (SchemeCache [1:123:2157]) 2025-05-29T15:22:27.776733Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2057} StateInit event Type# 268828672 Event# NKikimr::TEvTablet::TEvBoot 2025-05-29T15:22:27.778181Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2057} StateInit event Type# 268828673 Event# NKikimr::TEvTablet::TEvRestored 2025-05-29T15:22:27.778222Z node 1 :BS_CONTROLLER DEBUG: {BSC22@console_interaction.cpp:14} Console interaction started 2025-05-29T15:22:27.778475Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2057} StateInit event Type# 268828684 Event# NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-05-29T15:22:27.778589Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2057} StateInit event Type# 268639244 Event# NKikimr::TEvNodeWardenStorageConfig 2025-05-29T15:22:27.778671Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2057} StateInit event Type# 131082 Event# NActors::TEvInterconnect::TEvNodesInfo 2025-05-29T15:22:27.778676Z node 1 :BS_CONTROLLER DEBUG: {BSC01@bsc.cpp:521} Handle TEvInterconnect::TEvNodesInfo 2025-05-29T15:22:27.778697Z node 1 :BS_CONTROLLER DEBUG: {BSCTXIS01@init_scheme.cpp:17} TTxInitScheme Execute 2025-05-29T15:22:27.781038Z node 1 :BS_CONTROLLER DEBUG: {BSCTXIS03@init_scheme.cpp:44} TTxInitScheme Complete 2025-05-29T15:22:27.781110Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM01@migrate.cpp:190} Execute tx 2025-05-29T15:22:27.781122Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM02@migrate.cpp:251} Complete tx IncompatibleData# false 2025-05-29T15:22:27.781145Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxTrimUnusedSlots 2025-05-29T15:22:27.781158Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxTrimUnusedSlots 2025-05-29T15:22:27.781172Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateSchemaVersion 2025-05-29T15:22:27.802875Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateSchemaVersion 2025-05-29T15:22:27.802926Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxGenerateInstanceId 2025-05-29T15:22:27.813425Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxGenerateInstanceId 2025-05-29T15:22:27.813451Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateStaticPDiskInfo 2025-05-29T15:22:27.813460Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateStaticPDiskInfo 2025-05-29T15:22:27.813467Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxFillInNonNullConfigForPDisk 2025-05-29T15:22:27.813480Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxFillInNonNullConfigForPDisk 2025-05-29T15:22:27.813486Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxDropDriveStatus 2025-05-29T15:22:27.813490Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxDropDriveStatus 2025-05-29T15:22:27.813496Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateCompatibilityInfo 2025-05-29T15:22:27.823981Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateCompatibilityInfo 2025-05-29T15:22:27.824007Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateEnableConfigV2 2025-05-29T15:22:27.834477Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateEnableConfigV2 2025-05-29T15:22:27.834514Z node 1 :BS_CONTROLLER DEBUG: {BSCTXLE01@load_everything.cpp:19} TTxLoadEverything Execute 2025-05-29T15:22:27.834645Z node 1 :BS_CONTROLLER DEBUG: {BSCTXLE03@load_everything.cpp:557} TTxLoadEverything Complete 2025-05-29T15:22:27.834649Z node 1 :BS_CONTROLLER DEBUG: {BSC09@impl.h:2188} LoadFinished 2025-05-29T15:22:27.835824Z node 1 :BS_CONTROLLER DEBUG: {BSC18@console_interaction.cpp:31} Console connection service started 2025-05-29T15:22:27.835836Z node 1 :BS_CONTROLLER DEBUG: {BSCTXLE04@load_everything.cpp:562} TTxLoadEverything InitQueue processed 2025-05-29T15:22:27.835999Z node 1 :BS_CONTROLLER DEBUG: {BSCTXRN01@register_node.cpp:216} Handle TEvControllerRegisterNode Request# {NodeID: 1 VDiskStatus { VDiskId { GroupID: 0 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } NodeId: 1 PDiskId: 1 VSlotId: 0 PDiskGuid: 123 Status: INIT_PENDING OnlyPhantomsRemain: false } DeclarativePDiskManagement: true } 2025-05-29T15:22:27.836173Z node 1 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:398} Execute TEvControllerConfigRequest Request# {Command { DefineHostConfig { HostConfigId: 1 Drive { Path: "/home/runner/.ya/build/build_root/ciyv/0024e9/r3tmp/tmpqn5D1a/pdisk_1.dat" } } } Command { DefineBox { BoxId: 1 Host { Key { Fqdn: "::1" IcPort: 12001 } HostConfigId: 1 } } } } 2025-05-29T15:22:27.836215Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:340} Create new pdisk PDiskId# 1:1 Path# /home/runner/.ya/build/build_root/ciyv/0024e9/r3tmp/tmpqn5D1a/pdisk_1.dat 2025-05-29T15:22:27.836339Z node 1 :BS_CONTROLLER DEBUG: {BSCTXUDM01@disk_metrics.cpp:68} Updating disk status Record# {VDisksMetrics { VDiskId { GroupID: 0 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 0 } State: Initial Replicated: false DiskSpace: Green } } 2025-05-29T15:22:27.836362Z node 1 :BS_CONTROLLER DEBUG: {BSC10@scrub.cpp:187} Handle(TEvControllerScrubQueryStartQuantum) Msg# {VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 0 } } 2025-05-29T15:22:27.836372Z node 1 :BS_CONTROLLER DEBUG: {BSC13@scrub.cpp:597} sending TEvControllerScrubStartQuantum Msg# NKikimrBlobStorage.TEvControllerScrubStartQuantum VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 0 } 2025-05-29T15:22:27.836393Z node 1 :BS_CONTROLLER DEBUG: {BSCTXUDM01@disk_metrics.cpp:68} Updating disk status Record# {VDiskStatus { VDiskId { GroupID: 0 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } NodeId: 1 PDiskId: 1 VSlotId: 0 PDiskGuid: 123 Status: REPLICATING OnlyPhantomsRemain: false } } 2025-05-29T15:22:27.836409Z node 1 :BS_CONTROLLER DEBUG: {BSCTXUDM01@disk_metrics.cpp:68} Updating disk status Record# {VDiskStatus { VDiskId { GroupID: 0 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } NodeId: 1 PDiskId: 1 VSlotId: 0 PDiskGuid: 123 Status: READY OnlyPhantomsRemain: false } } 2025-05-29T15:22:27.836641Z node 1 :BS_CONTROLLER DEBUG: {BSC11@scrub.cpp:214} Handle(TEvControllerScrubQuantumFinished) Msg# {VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 0 } Success: true } 2025-05-29T15:22:27.836671Z node 1 :BS_CONTROLLER DEBUG: {BSC10@scrub.cpp:187} Handle(TEvControllerScrubQueryStartQuantum) Msg# {VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 0 } } 2025-05-29T15:22:27.847209Z node 1 :BS_CONTROLLER DEBUG: {BSCTXRN05@register_node.cpp:34} Add devicesData from NodeWarden NodeId# 1 Devices# [] 2025-05-29T15:22:27.847303Z node 1 :TENANT_POOL DEBUG: tenant_pool.cpp:826: TTenantPool::Bootstrap 2025-05-29T15:22:27.847345Z node 1 :LOCAL DEBUG: local.cpp:1491: TLocal::Bootstrap 2025-05-29T15:22:27.847350Z node 1 :TENANT_POOL DEBUG: tenant_pool.cpp:412: TDomainTenantPool(dc-1) Bootstrap 2025-05-29T15:22:27.847368Z node 1 :TENANT_POOL DEBUG: tenant_pool.cpp:286: TDomainTenantPool(dc-1) send request to add tenant /dc-1 with resources CPU: 1 Memory: 1 Network: 1 2025-05-29T15:22:27.847380Z node 1 :LOCAL DEBUG: local.cpp:1441: TDomainLocal(dc-1): Bootstrap 2025-05-29T15:22:27.847447Z node 1 :LOCAL DEBUG: local.cpp:1149: TDomainLocal(dc-1): Binding to hive 72057594046578946 at domain dc-1 (allocated resources: CPU: 1 Memory: 1 Network: 1) 2025-05-29T15:22:27.847453Z node 1 :LOCAL DEBUG: local.cpp:975: TLocalNodeRegistrar::Bootstrap 2025-05-29T15:22:27.847456Z node 1 :LOCAL DEBUG: local.cpp:181: TLocalNodeRegistrar::TryToRegister 2025-05-29T15:22:27.847467Z node 1 :LOCAL DEBUG: local.cpp:213: TLocalNodeRegistrar::TryToRegister pipe to hive, pipe:[1:331:2301] 2025-05-29T15:22:27.847727Z node 1 :TENANT_POOL NOTICE: tenant_pool.cpp:526: TDomainTenantPool(dc-1) started tenant /dc-1 2025-05-29T15:22:27.847731Z node 1 :TENANT_POOL DEBUG: tenant_pool.cpp:274: TDomainTenantPool(dc-1) send status update to [1:325:2297] 2025-05-29T15:22:27.847812Z node 1 :LOCAL DEBUG: local.cpp:260: TEvTabletPipe::TEvClientConnected {TabletId=72057594046578946 Status=OK ClientId=[1:331:2301]} 2025-05-29T15:22:27.847819Z node 1 :LOCAL DEBUG: local.cpp:324: TLocalNodeRegistrar::Handle TEvLocal::TEvPing 2025-05-29T15:22:27.847824Z node 1 :LOCAL DEBUG: local.cpp:380: TLocalNodeRegistrar TEvPing - CONNECTED 2025-05-29T15:22:27.847826Z node 1 :LOCAL DEBUG: local.cpp:297: TLocalNodeRegistrar SendStatusOk 2025-05-29T15:22:27.860611Z node 1 :LOCAL DEBUG: local.cpp:1207: TDomainLocal(dc-1): TDomainLocal::TEvClientConnected for dc-1 shard 72057594046578944 2025-05-29T15:22:27.860625Z node 1 :LOCAL DEBUG: local.cpp:1066: TDomainLocal(dc-1): Send resolve request for /dc-1/users/tenant-1 to schemeshard 72057594046578944 2025-05-29T15:22:27.864028Z node 1 :LOCAL DEBUG: local.cpp:1234: TDomainLocal(dc-1): HandleResolve from schemeshard 72057594046578944: Status: StatusSuccess Path: "/dc-1/users/tenant-1" PathDescription { Self { Name: "/dc-1/users/tenant-1" PathId: 100 SchemeshardId: 72057594046578944 PathType: EPathTypeSubDomain } DomainDescription { SchemeShardId_Depricated: 72057594046578944 PathId_Depricated: 100 DomainKey { SchemeShard: 72057594046578944 PathId: 100 } } } 2025-05-29T15:22:27.864057Z node 1 :LOCAL DEBUG: local.cpp:1172: TDomainLocal(dc-1): Binding tenant /dc-1/users/tenant-1 to hive 72057594046578946 (allocated resources: CPU: 5 Memory: 1 Network: 1) 2025-05-29T15:22:27.864128Z node 1 :LOCAL DEBUG: local.cpp:975: TLocalNodeRegistrar::Bootstrap 2025-05-29T15:22:27.864132Z node 1 :LOCAL DEBUG: local.cpp:181: TLocalNodeRegistrar::TryToRegister 2025-05-29T15:22:27.864141Z node 1 :LOCAL DEBUG: local.cpp:213: TLocalNodeRegistrar::TryToRegister pipe to hive, pipe:[1:387:2337] 2025-05-29T15:22:27.864413Z node 1 :LOCAL DEBUG: local.cpp:260: TEvTabletPipe::TEvClientConnected {TabletId=72057594046578946 Status=OK ClientId=[1:387:2337]} 2025-05-29T15:22:27.864452Z node 1 :LOCAL DEBUG: local.cpp:324: TLocalNodeRegistrar::Handle TEvLocal::TEvPing 2025-05-29T15:22:27.864457Z node 1 :LOCAL DEBUG: local.cpp:380: TLocalNodeRegistrar TEvPing - CONNECTED 2025-05-29T15:22:27.864459Z node 1 :LOCAL DEBUG: local.cpp:297: TLocalNodeRegistrar SendStatusOk 2025-05-29T15:22:27.866087Z node 1 :LOCAL DEBUG: local.cpp:1066: TDomainLocal(dc-1): Send resolve request for /dc-1/users/tenant-2 to schemeshard 72057594046578944 2025-05-29T15:22:27.866148Z node 1 :LOCAL DEBUG: local.cpp:1234: TDomainLocal(dc-1): HandleResolve from schemeshard 72057594046578944: Status: StatusSuccess Path: "/dc-1/users/tenant-2" PathDescription { Self { Name: "/dc-1/users/tenant-2" PathId: 101 SchemeshardId: 72057594046578944 PathType: EPathTypeSubDomain } DomainDescription { SchemeShardId_Depricated: 72057594046578944 PathId_Depricated: 101 DomainKey { SchemeShard: 72057594046578944 PathId: 101 } } } 2025-05-29T15:22:27.866164Z node 1 :LOCAL DEBUG: local.cpp:1172: TDomainLocal(dc-1): Binding tenant /dc-1/users/tenant-2 to hive 72057594046578946 (allocated resources: CPU: 1 Memory: 1 Network: 1) 2025-05-29T15:22:27.866223Z node 1 :LOCAL DEBUG: local.cpp:975: TLocalNodeRegistrar::Bootstrap 2025-05-29T15:22:27.866229Z node 1 :LOCAL DEBUG: local.cpp:181: TLocalNodeRegistrar::TryToRegister 2025-05-29T15:22:27.866239Z node 1 :LOCAL DEBUG: local.cpp:213: TLocalNodeRegistrar::TryToRegister pipe to hive, pipe:[1:420:2357] 2025-05-29T15:22:27.866375Z node 1 :LOCAL DEBUG: local.cpp:260: TEvTabletPipe::TEvClientConnected {TabletId=72057594046578946 Status=OK ClientId=[1:420:2357]} 2025-05-29T15:22:27.866393Z node 1 :LOCAL DEBUG: local.cpp:324: TLocalNodeRegistrar::Handle TEvLocal::TEvPing 2025-05-29T15:22:27.866399Z node 1 :LOCAL DEBUG: local.cpp:380: TLocalNodeRegistrar TEvPing - CONNECTED 2025-05-29T15:22:27.866402Z node 1 :LOCAL DEBUG: local.cpp:297: TLocalNodeRegistrar SendStatusOk 2025-05-29T15:22:27.866472Z node 1 :LOCAL DEBUG: local.cpp:1066: TDomainLocal(dc-1): Send resolve request for /dc-1/users/tenant-unknown to schemeshard 72057594046578944 2025-05-29T15:22:27.866493Z node 1 :LOCAL DEBUG: local.cpp:1234: TDomainLocal(dc-1): HandleResolve from schemeshard 72057594046578944: Status: StatusPathDoesNotExist Path: "/dc-1/users/tenant-unknown" 2025-05-29T15:22:27.866500Z node 1 :LOCAL ERROR: local.cpp:1250: TDomainLocal(dc-1): Receive TEvDescribeSchemeResult with bad status StatusPathDoesNotExist reason is <> while resolving subdomain dc-1 2025-05-29T15:22:27.866515Z node 1 :LOCAL ERROR: local.cpp:1500: Unknown domain dc-3 >> TNodeBrokerTest::RegistrationPipelining >> TNodeBrokerTest::NodesMigrationManyNodesInterrupted |60.3%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/kqp/ut/federated_query/generic_ut/ydb-core-kqp-ut-federated_query-generic_ut |60.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/ut/federated_query/generic_ut/ydb-core-kqp-ut-federated_query-generic_ut |60.3%| [LD] {RESULT} $(B)/ydb/core/kqp/ut/federated_query/generic_ut/ydb-core-kqp-ut-federated_query-generic_ut ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/mind/ut/unittest >> TTenantPoolTests::TestStateStatic [GOOD] Test command err: 2025-05-29T15:22:27.947560Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:434: actor# [1:117:2151] Bootstrap 2025-05-29T15:22:27.981489Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:453: actor# [1:117:2151] Become StateWork (SchemeCache [1:123:2157]) 2025-05-29T15:22:27.989069Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2057} StateInit event Type# 268828672 Event# NKikimr::TEvTablet::TEvBoot 2025-05-29T15:22:27.990360Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2057} StateInit event Type# 268828673 Event# NKikimr::TEvTablet::TEvRestored 2025-05-29T15:22:27.990399Z node 1 :BS_CONTROLLER DEBUG: {BSC22@console_interaction.cpp:14} Console interaction started 2025-05-29T15:22:27.990650Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2057} StateInit event Type# 268828684 Event# NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-05-29T15:22:27.990787Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2057} StateInit event Type# 268639244 Event# NKikimr::TEvNodeWardenStorageConfig 2025-05-29T15:22:27.990873Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2057} StateInit event Type# 131082 Event# NActors::TEvInterconnect::TEvNodesInfo 2025-05-29T15:22:27.990878Z node 1 :BS_CONTROLLER DEBUG: {BSC01@bsc.cpp:521} Handle TEvInterconnect::TEvNodesInfo 2025-05-29T15:22:27.990898Z node 1 :BS_CONTROLLER DEBUG: {BSCTXIS01@init_scheme.cpp:17} TTxInitScheme Execute 2025-05-29T15:22:27.993139Z node 1 :BS_CONTROLLER DEBUG: {BSCTXIS03@init_scheme.cpp:44} TTxInitScheme Complete 2025-05-29T15:22:27.993155Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM01@migrate.cpp:190} Execute tx 2025-05-29T15:22:27.993164Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM02@migrate.cpp:251} Complete tx IncompatibleData# false 2025-05-29T15:22:27.993209Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxTrimUnusedSlots 2025-05-29T15:22:27.993219Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxTrimUnusedSlots 2025-05-29T15:22:27.993239Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateSchemaVersion 2025-05-29T15:22:28.015126Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateSchemaVersion 2025-05-29T15:22:28.015171Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxGenerateInstanceId 2025-05-29T15:22:28.025992Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxGenerateInstanceId 2025-05-29T15:22:28.026040Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateStaticPDiskInfo 2025-05-29T15:22:28.026056Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateStaticPDiskInfo 2025-05-29T15:22:28.026069Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxFillInNonNullConfigForPDisk 2025-05-29T15:22:28.026096Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxFillInNonNullConfigForPDisk 2025-05-29T15:22:28.026106Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxDropDriveStatus 2025-05-29T15:22:28.026113Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxDropDriveStatus 2025-05-29T15:22:28.026122Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateCompatibilityInfo 2025-05-29T15:22:28.036810Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateCompatibilityInfo 2025-05-29T15:22:28.036848Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateEnableConfigV2 2025-05-29T15:22:28.047635Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateEnableConfigV2 2025-05-29T15:22:28.047699Z node 1 :BS_CONTROLLER DEBUG: {BSCTXLE01@load_everything.cpp:19} TTxLoadEverything Execute 2025-05-29T15:22:28.047909Z node 1 :BS_CONTROLLER DEBUG: {BSCTXLE03@load_everything.cpp:557} TTxLoadEverything Complete 2025-05-29T15:22:28.047920Z node 1 :BS_CONTROLLER DEBUG: {BSC09@impl.h:2188} LoadFinished 2025-05-29T15:22:28.049675Z node 1 :BS_CONTROLLER DEBUG: {BSC18@console_interaction.cpp:31} Console connection service started 2025-05-29T15:22:28.049691Z node 1 :BS_CONTROLLER DEBUG: {BSCTXLE04@load_everything.cpp:562} TTxLoadEverything InitQueue processed 2025-05-29T15:22:28.049887Z node 1 :BS_CONTROLLER DEBUG: {BSCTXRN01@register_node.cpp:216} Handle TEvControllerRegisterNode Request# {NodeID: 1 VDiskStatus { VDiskId { GroupID: 0 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } NodeId: 1 PDiskId: 1 VSlotId: 0 PDiskGuid: 123 Status: INIT_PENDING OnlyPhantomsRemain: false } DeclarativePDiskManagement: true } 2025-05-29T15:22:28.050090Z node 1 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:398} Execute TEvControllerConfigRequest Request# {Command { DefineHostConfig { HostConfigId: 1 Drive { Path: "/home/runner/.ya/build/build_root/ciyv/0024e0/r3tmp/tmpWXgfKW/pdisk_1.dat" } } } Command { DefineBox { BoxId: 1 Host { Key { Fqdn: "::1" IcPort: 12001 } HostConfigId: 1 } } } } 2025-05-29T15:22:28.050142Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:340} Create new pdisk PDiskId# 1:1 Path# /home/runner/.ya/build/build_root/ciyv/0024e0/r3tmp/tmpWXgfKW/pdisk_1.dat 2025-05-29T15:22:28.050287Z node 1 :BS_CONTROLLER DEBUG: {BSCTXUDM01@disk_metrics.cpp:68} Updating disk status Record# {VDisksMetrics { VDiskId { GroupID: 0 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 0 } State: Initial Replicated: false DiskSpace: Green } } 2025-05-29T15:22:28.050312Z node 1 :BS_CONTROLLER DEBUG: {BSC10@scrub.cpp:187} Handle(TEvControllerScrubQueryStartQuantum) Msg# {VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 0 } } 2025-05-29T15:22:28.050323Z node 1 :BS_CONTROLLER DEBUG: {BSC13@scrub.cpp:597} sending TEvControllerScrubStartQuantum Msg# NKikimrBlobStorage.TEvControllerScrubStartQuantum VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 0 } 2025-05-29T15:22:28.050346Z node 1 :BS_CONTROLLER DEBUG: {BSCTXUDM01@disk_metrics.cpp:68} Updating disk status Record# {VDiskStatus { VDiskId { GroupID: 0 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } NodeId: 1 PDiskId: 1 VSlotId: 0 PDiskGuid: 123 Status: REPLICATING OnlyPhantomsRemain: false } } 2025-05-29T15:22:28.050365Z node 1 :BS_CONTROLLER DEBUG: {BSCTXUDM01@disk_metrics.cpp:68} Updating disk status Record# {VDiskStatus { VDiskId { GroupID: 0 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } NodeId: 1 PDiskId: 1 VSlotId: 0 PDiskGuid: 123 Status: READY OnlyPhantomsRemain: false } } 2025-05-29T15:22:28.050712Z node 1 :BS_CONTROLLER DEBUG: {BSC11@scrub.cpp:214} Handle(TEvControllerScrubQuantumFinished) Msg# {VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 0 } Success: true } 2025-05-29T15:22:28.050778Z node 1 :BS_CONTROLLER DEBUG: {BSC10@scrub.cpp:187} Handle(TEvControllerScrubQueryStartQuantum) Msg# {VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 0 } } 2025-05-29T15:22:28.061469Z node 1 :BS_CONTROLLER DEBUG: {BSCTXRN05@register_node.cpp:34} Add devicesData from NodeWarden NodeId# 1 Devices# [] 2025-05-29T15:22:28.075110Z node 1 :TENANT_POOL DEBUG: tenant_pool.cpp:826: TTenantPool::Bootstrap 2025-05-29T15:22:28.075187Z node 1 :LOCAL DEBUG: local.cpp:1491: TLocal::Bootstrap 2025-05-29T15:22:28.075225Z node 1 :TENANT_POOL DEBUG: tenant_pool.cpp:412: TDomainTenantPool(dc-1) Bootstrap 2025-05-29T15:22:28.075250Z node 1 :TENANT_POOL DEBUG: tenant_pool.cpp:286: TDomainTenantPool(dc-1) send request to add tenant /dc-1/users/tenant-1 with resources CPU: 1 Memory: 1 Network: 1 2025-05-29T15:22:28.075272Z node 1 :LOCAL DEBUG: local.cpp:1441: TDomainLocal(dc-1): Bootstrap 2025-05-29T15:22:28.075440Z node 1 :LOCAL DEBUG: local.cpp:1207: TDomainLocal(dc-1): TDomainLocal::TEvClientConnected for dc-1 shard 72057594046578944 2025-05-29T15:22:28.075449Z node 1 :LOCAL DEBUG: local.cpp:1066: TDomainLocal(dc-1): Send resolve request for /dc-1/users/tenant-1 to schemeshard 72057594046578944 2025-05-29T15:22:28.080545Z node 1 :LOCAL DEBUG: local.cpp:1234: TDomainLocal(dc-1): HandleResolve from schemeshard 72057594046578944: Status: StatusSuccess Path: "/dc-1/users/tenant-1" PathDescription { Self { Name: "/dc-1/users/tenant-1" PathId: 100 SchemeshardId: 72057594046578944 PathType: EPathTypeSubDomain } DomainDescription { SchemeShardId_Depricated: 72057594046578944 PathId_Depricated: 100 DomainKey { SchemeShard: 72057594046578944 PathId: 100 } } } 2025-05-29T15:22:28.080597Z node 1 :LOCAL DEBUG: local.cpp:1172: TDomainLocal(dc-1): Binding tenant /dc-1/users/tenant-1 to hive 72057594046578946 (allocated resources: CPU: 1 Memory: 1 Network: 1) 2025-05-29T15:22:28.080715Z node 1 :LOCAL DEBUG: local.cpp:975: TLocalNodeRegistrar::Bootstrap 2025-05-29T15:22:28.080722Z node 1 :LOCAL DEBUG: local.cpp:181: TLocalNodeRegistrar::TryToRegister 2025-05-29T15:22:28.080741Z node 1 :LOCAL DEBUG: local.cpp:213: TLocalNodeRegistrar::TryToRegister pipe to hive, pipe:[1:383:2335] 2025-05-29T15:22:28.080819Z node 1 :TENANT_POOL NOTICE: tenant_pool.cpp:526: TDomainTenantPool(dc-1) started tenant /dc-1/users/tenant-1 2025-05-29T15:22:28.080826Z node 1 :TENANT_POOL DEBUG: tenant_pool.cpp:274: TDomainTenantPool(dc-1) send status update to [1:115:2149] 2025-05-29T15:22:28.081413Z node 1 :LOCAL DEBUG: local.cpp:260: TEvTabletPipe::TEvClientConnected {TabletId=72057594046578946 Status=OK ClientId=[1:383:2335]} 2025-05-29T15:22:28.081464Z node 1 :LOCAL DEBUG: local.cpp:324: TLocalNodeRegistrar::Handle TEvLocal::TEvPing 2025-05-29T15:22:28.081473Z node 1 :LOCAL DEBUG: local.cpp:380: TLocalNodeRegistrar TEvPing - CONNECTED 2025-05-29T15:22:28.081476Z node 1 :LOCAL DEBUG: local.cpp:297: TLocalNodeRegistrar SendStatusOk >> TTransferTests::CreateInParallel [GOOD] >> TTransferTests::CreateWrongConfig [GOOD] >> TTransferTests::CreateWrongBatchSize >> TTransferTests::CreateDropRecreate >> TNodeBrokerTest::NodesMigrationReuseIDThenExtendLease >> TTransferTests::CreateDropRecreate [GOOD] >> TTransferTests::CreateWrongBatchSize [GOOD] >> TTransferTests::CreateWrongFlushIntervalIsSmall >> TTransferTests::ConsistencyLevel >> TNodeBrokerTest::NodesMigrationNodeName [GOOD] >> TNodeBrokerTest::FixedNodeId [GOOD] >> TNodeBrokerTest::NodesMigrationSetLocation [GOOD] >> TTransferTests::CreateWrongFlushIntervalIsSmall [GOOD] >> TTransferTests::CreateWrongFlushIntervalIsBig >> TPartitionTests::DifferentWriteTxBatchingOptions [GOOD] >> TDynamicNameserverTest::CacheMissPipeDisconnect-EnableNodeBrokerDeltaProtocol-false >> TNodeBrokerTest::NodesMigrationNewActiveNode |60.3%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/schemeshard/ut_data_erasure/ydb-core-tx-schemeshard-ut_data_erasure |60.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_data_erasure/ydb-core-tx-schemeshard-ut_data_erasure |60.4%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_data_erasure/ydb-core-tx-schemeshard-ut_data_erasure ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/mind/ut/unittest >> TNodeBrokerTest::NodesMigrationSetLocation [GOOD] Test command err: 2025-05-29T15:22:27.112833Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 2 Deadline: 18446744073709.551615s } 2025-05-29T15:22:27.112878Z node 3 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 3 Deadline: 18446744073709.551615s } 2025-05-29T15:22:27.112901Z node 4 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 4 Deadline: 18446744073709.551615s } 2025-05-29T15:22:27.112928Z node 5 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 5 Deadline: 18446744073709.551615s } 2025-05-29T15:22:27.112953Z node 6 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 6 Deadline: 18446744073709.551615s } 2025-05-29T15:22:27.112971Z node 7 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 7 Deadline: 18446744073709.551615s } 2025-05-29T15:22:27.118827Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:27.118931Z node 3 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:27.118970Z node 4 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:27.119007Z node 5 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:27.119046Z node 6 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:27.119077Z node 7 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:27.119138Z node 8 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 8 Deadline: 18446744073709.551615s } 2025-05-29T15:22:27.119160Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-05-29T15:22:27.119356Z node 3 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:27.119376Z node 4 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:27.119389Z node 5 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:27.119400Z node 6 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:27.119414Z node 7 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:27.119427Z node 8 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:27.119455Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:27.123413Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:27.123482Z node 8 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:27.123519Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:27.124777Z node 5 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:27.124829Z node 6 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:27.124872Z node 7 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:27.124994Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:27.125020Z node 3 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:27.125048Z node 4 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:27.125078Z node 8 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:27.125102Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:27.125315Z node 3 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-05-29T15:22:27.125350Z node 4 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-05-29T15:22:27.125377Z node 5 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-05-29T15:22:27.125421Z node 6 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-05-29T15:22:27.125455Z node 7 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-05-29T15:22:27.125587Z node 8 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-05-29T15:22:27.125705Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-05-29T15:22:27.125769Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 6 Deadline: 18446744073709.551615s } 2025-05-29T15:22:27.126579Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 7 Deadline: 18446744073709.551615s } 2025-05-29T15:22:27.126606Z node 3 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 7 Deadline: 18446744073709.551615s } 2025-05-29T15:22:27.126613Z node 4 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 5 Deadline: 18446744073709.551615s } 2025-05-29T15:22:27.126620Z node 5 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 4 Deadline: 18446744073709.551615s } 2025-05-29T15:22:27.126627Z node 6 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 7 Deadline: 18446744073709.551615s } 2025-05-29T15:22:27.126634Z node 7 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 4 Deadline: 18446744073709.551615s } 2025-05-29T15:22:27.126642Z node 8 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 5 Deadline: 18446744073709.551615s } 2025-05-29T15:22:27.131649Z node 4 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 7 Deadline: 18446744073709.551615s } 2025-05-29T15:22:27.131751Z node 7 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 6 Deadline: 18446744073709.551615s } 2025-05-29T15:22:27.131875Z node 5 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 8 Deadline: 18446744073709.551615s } 2025-05-29T15:22:27.131927Z node 7 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 2 Deadline: 18446744073709.551615s } 2025-05-29T15:22:27.132021Z node 5 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 2 Deadline: 18446744073709.551615s } 2025-05-29T15:22:27.132071Z node 7 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 3 Deadline: 18446744073709.551615s } 2025-05-29T15:22:27.133197Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 4 Deadline: 18446744073709.551615s } 2025-05-29T15:22:27.133730Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 5 Deadline: 18446744073709.551615s } 2025-05-29T15:22:27.133957Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 5 Deadline: 18446744073709.551615s } 2025-05-29T15:22:27.134149Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 7 Deadline: 18446744073709.551615s } 2025-05-29T15:22:27.134343Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 2 Deadline: 18446744073709.551615s } 2025-05-29T15:22:27.134521Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 3 Deadline: 18446744073709.551615s } 2025-05-29T15:22:27.134762Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 8 Deadline: 18446744073709.551615s } 2025-05-29T15:22:27.135564Z node 4 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 2 Deadline: 18446744073709.551615s } 2025-05-29T15:22:27.136057Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 4 Deadline: 18446744073709.551615s } 2025-05-29T15:22:27.136287Z node 7 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 8 Deadline: 18446744073709.551615s } 2025-05-29T15:22:27.136642Z node 8 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 7 Deadline: 18446744073709.551615s } 2025-05-29T15:22:27.138041Z node 7 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 5 Deadline: 18446744073709.551615s } 2025-05-29T15:22:27.138569Z node 5 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 7 Deadline: 18446744073709.551615s } 2025-05-29T15:22:27.163482Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7309: Cannot subscribe to console configs 2025-05-29T15:22:27.163502Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded ... waiting for nameservers are connected 2025-05-29T15:22:27.168881Z node 1 :NODE_BROKER DEBUG: node_broker_impl.h:236: StateInit event type: 10060000 event: NKikimr::TEvTablet::TEvBoot 2025-05-29T15:22:27.169400Z node 1 :NODE_BROKER DEBUG: node_broker_impl.h:236: StateInit event type: 10060001 event: NKikimr::TEvTablet::TEvRestored 2025-05-29T15:22:27.169472Z node 1 :NODE_BROKER DEBUG: node_broker__init_scheme.cpp:20: TTxInitScheme Execute 2025-05-29T15:22:27.169794Z node 1 :NODE_BROKER DEBUG: node_broker_impl.h:236: StateInit event type: 1006000c event: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-05-29T15:22:27.170766Z node 1 :NODE_BROKER DEBUG: node_broker__init_scheme.cpp:29: TTxInitScheme Complete 2025-05-29T15:22:27.170794Z node 1 :NODE_BROKER DEBUG: node_broker__load_state.cpp:19: TTxLoadState Execute 2025-05-29T15:22:27.170867Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:961: [DB] Using default config. 2025-05-29T15:22:27.170885Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:998: [DB] Starting the first epoch: #1.1 1970-01-01T00:00:00.025000Z - 1970-01-01T01:00:00.025000Z - 1970-01-01T02:00:00.025000Z 2025-05-29T15:22:27.170890Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:1024: [DB] Loaded the first approximate epoch start: #1.1 2025-05-29T15:22:27.170906Z node 1 :NODE_BROKER DEBUG: node_broker__load_state.cpp:27: TTxLoadState Compl ... NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-05-29T15:22:27.413165Z node 1 :NODE_BROKER DEBUG: node_broker__init_scheme.cpp:20: TTxInitScheme Execute 2025-05-29T15:22:27.413198Z node 1 :NODE_BROKER DEBUG: node_broker__init_scheme.cpp:29: TTxInitScheme Complete 2025-05-29T15:22:27.413236Z node 1 :NODE_BROKER DEBUG: node_broker__load_state.cpp:19: TTxLoadState Execute 2025-05-29T15:22:27.413301Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:961: [DB] Using default config. 2025-05-29T15:22:27.413311Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:988: [DB] Loaded current epoch: #1.2 1970-01-01T00:00:00.025000Z - 1970-01-01T01:00:00.025000Z - 1970-01-01T02:00:00.025000Z 2025-05-29T15:22:27.413315Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:1017: [DB] Loaded approximate epoch start: #1.1 2025-05-29T15:22:27.413318Z node 1 :NODE_BROKER NOTICE: node_broker.cpp:1034: [DB] Loaded main nodes table: Nodes 2025-05-29T15:22:27.413340Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:281: [Dirty] Added node #1024.v0 host1:1001 2025-05-29T15:22:27.413358Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:1113: [DB] Loaded node #1024.v0 { NodeId: 1024, State: Active, Version: 0, Host: host1, Port: 1001, ResolveHost: host1.yandex.net, Address: 1.2.3.4, Lease: 1, Expire: Thu, 01 Jan 1970 02:00:00 UTC, Location: DC=1/M=2/R=3/U=4/, AuthorizedByCertificate: 0, SlotIndex: 0, ServicedSubDomain: 72057594046678944:1 } 2025-05-29T15:22:27.413369Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:1190: [DB] Loaded nodeV2 #1024.v2 { NodeId: 1024, State: Active, Version: 2, Host: host1, Port: 1001, ResolveHost: host1.yandex.net, Address: 1.2.3.4, Lease: 1, Expire: Thu, 01 Jan 1970 02:00:00 UTC, Location: , AuthorizedByCertificate: 0, SlotIndex: 0, ServicedSubDomain: 72057594046678944:1 } 2025-05-29T15:22:27.413375Z node 1 :NODE_BROKER NOTICE: node_broker.cpp:1214: [DB] Migrating changed node #1024.v3 { NodeId: 1024, State: Active, Version: 3, Host: host1, Port: 1001, ResolveHost: host1.yandex.net, Address: 1.2.3.4, Lease: 1, Expire: Thu, 01 Jan 1970 02:00:00 UTC, Location: DC=1/M=2/R=3/U=4/, AuthorizedByCertificate: 0, SlotIndex: 0, ServicedSubDomain: 72057594046678944:1 } 2025-05-29T15:22:27.413379Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:552: [Dirty] Update current epoch version from 2 to 3 2025-05-29T15:22:27.413386Z node 1 :NODE_BROKER DEBUG: node_broker__load_state.cpp:27: TTxLoadState Complete 2025-05-29T15:22:27.413402Z node 1 :NODE_BROKER DEBUG: node_broker__migrate_state.cpp:84: TTxMigrateState Execute 2025-05-29T15:22:27.413405Z node 1 :NODE_BROKER DEBUG: node_broker__migrate_state.cpp:52: TTxMigrateState ProcessMigrationBatch UpdateNodes left 0, NewVersionUpdateNodes left 1 2025-05-29T15:22:27.413409Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:1311: [DB] Update epoch in database: #1.3 1970-01-01T00:00:00.025000Z - 1970-01-01T01:00:00.025000Z - 1970-01-01T02:00:00.025000Z 2025-05-29T15:22:27.413422Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:856: [DB] Adding node #1024.v3 host1:1001 to database state=Active resolvehost=host1.yandex.net address=1.2.3.4 dc=1 location=DC=1/M=2/R=3/U=4/ lease=1 expire=Thu, 01 Jan 1970 02:00:00 UTC servicedsubdomain=72057594046678944:1 slotindex=0 authorizedbycertificate=false 2025-05-29T15:22:27.413446Z node 1 :NODE_BROKER DEBUG: node_broker__migrate_state.cpp:21: TTxMigrateState FinalizeMigration 2025-05-29T15:22:27.424689Z node 1 :NODE_BROKER DEBUG: node_broker__migrate_state.cpp:95: TTxMigrateState Complete 2025-05-29T15:22:27.424721Z node 1 :NODE_BROKER TRACE: node_broker.cpp:456: Scheduled epoch update at 1970-01-01T01:00:00.025000Z 2025-05-29T15:22:27.424730Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:562: Preparing nodes list cache for epoch #1.3 1970-01-01T00:00:00.025000Z - 1970-01-01T01:00:00.025000Z - 1970-01-01T02:00:00.025000Z, approximate epoch start #1.1 nodes=1 expired=0 2025-05-29T15:22:27.424750Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:603: Preparing update nodes log for epoch ##1.3 1970-01-01T00:00:00.025000Z - 1970-01-01T01:00:00.025000Z - 1970-01-01T02:00:00.025000Z nodes=1 expired=0 removed=0 2025-05-29T15:22:27.424753Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:659: Add node #1024.v3 to update nodes log 2025-05-29T15:22:27.424819Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 269877761, Sender [1:681:2236], Recipient [1:671:2230]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-05-29T15:22:27.424828Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 269877761, Sender [1:682:2237], Recipient [1:671:2230]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-05-29T15:22:27.424922Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 269877761, Sender [1:684:2239], Recipient [1:671:2230]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-05-29T15:22:27.424934Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:657: Handle NKikimr::TEvTabletPipe::TEvClientConnected { TabletId: 72057594037936129 Status: OK ServerId: [1:681:2236] Leader: 1 Dead: 0 Generation: 3 VersionInfo:  } 2025-05-29T15:22:27.424947Z node 3 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:657: Handle NKikimr::TEvTabletPipe::TEvClientConnected { TabletId: 72057594037936129 Status: OK ServerId: [1:682:2237] Leader: 1 Dead: 0 Generation: 3 VersionInfo:  } 2025-05-29T15:22:27.424954Z node 4 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:657: Handle NKikimr::TEvTabletPipe::TEvClientConnected { TabletId: 72057594037936129 Status: OK ServerId: [1:684:2239] Leader: 1 Dead: 0 Generation: 3 VersionInfo:  } 2025-05-29T15:22:27.425012Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 272039936, Sender [2:53:2072], Recipient [1:681:2236] 2025-05-29T15:22:27.425016Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:246: StateWork, processing event TEvNodeBroker::TEvListNodes 2025-05-29T15:22:27.425020Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:375: Delaying list nodes request for epoch #2 2025-05-29T15:22:27.425025Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 272039936, Sender [3:81:2072], Recipient [1:682:2237] 2025-05-29T15:22:27.425027Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:246: StateWork, processing event TEvNodeBroker::TEvListNodes 2025-05-29T15:22:27.425029Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:375: Delaying list nodes request for epoch #2 2025-05-29T15:22:27.425033Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 272039936, Sender [4:109:2072], Recipient [1:684:2239] 2025-05-29T15:22:27.425035Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:246: StateWork, processing event TEvNodeBroker::TEvListNodes 2025-05-29T15:22:27.425037Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:375: Delaying list nodes request for epoch #2 2025-05-29T15:22:27.425118Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 269877761, Sender [1:712:2262], Recipient [1:671:2230]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-05-29T15:22:27.425144Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 272039936, Sender [1:629:2213], Recipient [1:671:2230]: NKikimr::NNodeBroker::TEvNodeBroker::TEvListNodes { } 2025-05-29T15:22:27.425147Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:246: StateWork, processing event TEvNodeBroker::TEvListNodes 2025-05-29T15:22:27.425152Z node 1 :NODE_BROKER TRACE: node_broker.cpp:423: Send TEvNodesInfo for epoch #1.3 1970-01-01T00:00:00.025000Z - 1970-01-01T01:00:00.025000Z - 1970-01-01T02:00:00.025000Z 2025-05-29T15:22:27.425193Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 269877761, Sender [1:714:2264], Recipient [1:671:2230]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-05-29T15:22:27.425203Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 272039936, Sender [1:629:2213], Recipient [1:671:2230]: NKikimr::NNodeBroker::TEvNodeBroker::TEvListNodes { } 2025-05-29T15:22:27.425218Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:246: StateWork, processing event TEvNodeBroker::TEvListNodes 2025-05-29T15:22:27.425222Z node 1 :NODE_BROKER TRACE: node_broker.cpp:423: Send TEvNodesInfo for epoch #1.3 1970-01-01T00:00:00.025000Z - 1970-01-01T01:00:00.025000Z - 1970-01-01T02:00:00.025000Z 2025-05-29T15:22:27.425267Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 269877761, Sender [1:716:2266], Recipient [1:671:2230]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-05-29T15:22:27.425278Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 272039936, Sender [1:629:2213], Recipient [1:671:2230]: NKikimr::NNodeBroker::TEvNodeBroker::TEvListNodes { } 2025-05-29T15:22:27.425281Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:246: StateWork, processing event TEvNodeBroker::TEvListNodes 2025-05-29T15:22:27.425284Z node 1 :NODE_BROKER TRACE: node_broker.cpp:423: Send TEvNodesInfo for epoch #1.3 1970-01-01T00:00:00.025000Z - 1970-01-01T01:00:00.025000Z - 1970-01-01T02:00:00.025000Z 2025-05-29T15:22:27.425320Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 269877761, Sender [1:718:2268], Recipient [1:671:2230]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-05-29T15:22:27.425338Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 272039936, Sender [1:629:2213], Recipient [1:671:2230]: NKikimr::NNodeBroker::TEvNodeBroker::TEvListNodes { CachedVersion: 2 } 2025-05-29T15:22:27.425341Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:246: StateWork, processing event TEvNodeBroker::TEvListNodes 2025-05-29T15:22:27.425344Z node 1 :NODE_BROKER TRACE: node_broker.cpp:423: Send TEvNodesInfo for epoch #1.3 1970-01-01T00:00:00.025000Z - 1970-01-01T01:00:00.025000Z - 1970-01-01T02:00:00.025000Z 2025-05-29T15:22:27.425381Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 269877761, Sender [1:720:2270], Recipient [1:671:2230]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-05-29T15:22:27.425395Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 272039950, Sender [1:629:2213], Recipient [1:671:2230]: NKikimr::NNodeBroker::TEvNodeBroker::TEvSubscribeNodesRequest { CachedVersion: 2 SeqNo: 2 } 2025-05-29T15:22:27.425399Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:254: StateWork, processing event TEvNodeBroker::TEvSubscribeNodesRequest 2025-05-29T15:22:27.425404Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:747: New subscriber [1:629:2213], seqNo: 2, version: 2, server pipe id: [1:720:2270] 2025-05-29T15:22:27.425408Z node 1 :NODE_BROKER TRACE: node_broker.cpp:730: Send TEvUpdateNodes v2 -> v3 to [1:629:2213] 2025-05-29T15:22:27.425447Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 269877764, Sender [1:720:2270], Recipient [1:671:2230]: NKikimr::TEvTabletPipe::TEvServerDisconnected 2025-05-29T15:22:27.425451Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:768: Unsubscribed [1:629:2213], seqNo: 2, server pipe id: [1:720:2270] 2025-05-29T15:22:27.425468Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 269877761, Sender [1:722:2272], Recipient [1:671:2230]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-05-29T15:22:27.425480Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 272039937, Sender [1:629:2213], Recipient [1:671:2230]: NKikimr::NNodeBroker::TEvNodeBroker::TEvResolveNode { NodeId: 1024 } 2025-05-29T15:22:27.425492Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:247: StateWork, processing event TEvNodeBroker::TEvResolveNode 2025-05-29T15:22:27.425510Z node 1 :NODE_BROKER TRACE: node_broker.cpp:1478: Send TEvResolvedNode: NKikimr::NNodeBroker::TEvNodeBroker::TEvResolvedNode { Status { Code: OK } Node { NodeId: 1024 Host: "host1" Port: 1001 ResolveHost: "host1.yandex.net" Address: "1.2.3.4" Location { DataCenter: "1" Module: "2" Rack: "3" Unit: "4" } Expire: 7200025000 Name: "slot-0" } } ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/mind/ut/unittest >> TNodeBrokerTest::FixedNodeId [GOOD] Test command err: 2025-05-29T15:22:27.121746Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 2 Deadline: 18446744073709.551615s } 2025-05-29T15:22:27.121796Z node 3 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 3 Deadline: 18446744073709.551615s } 2025-05-29T15:22:27.121828Z node 4 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 4 Deadline: 18446744073709.551615s } 2025-05-29T15:22:27.121867Z node 5 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 5 Deadline: 18446744073709.551615s } 2025-05-29T15:22:27.121897Z node 6 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 6 Deadline: 18446744073709.551615s } 2025-05-29T15:22:27.121922Z node 7 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 7 Deadline: 18446744073709.551615s } 2025-05-29T15:22:27.130019Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:27.130155Z node 3 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:27.130218Z node 4 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:27.130267Z node 5 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:27.130330Z node 6 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:27.130377Z node 7 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:27.130465Z node 8 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 8 Deadline: 18446744073709.551615s } 2025-05-29T15:22:27.130502Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-05-29T15:22:27.130798Z node 3 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:27.130836Z node 4 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:27.130860Z node 5 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:27.130882Z node 6 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:27.130910Z node 7 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:27.130937Z node 8 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:27.130985Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:27.135385Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:27.135446Z node 8 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:27.135480Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:27.136632Z node 5 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:27.136678Z node 6 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:27.136715Z node 7 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:27.136830Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:27.136860Z node 3 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:27.136887Z node 4 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:27.136911Z node 8 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:27.136934Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:27.137142Z node 3 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-05-29T15:22:27.137181Z node 4 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-05-29T15:22:27.137208Z node 5 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-05-29T15:22:27.137239Z node 6 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-05-29T15:22:27.137278Z node 7 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-05-29T15:22:27.137387Z node 8 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-05-29T15:22:27.137499Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-05-29T15:22:27.137582Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 3 Deadline: 18446744073709.551615s } 2025-05-29T15:22:27.138248Z node 3 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 7 Deadline: 18446744073709.551615s } 2025-05-29T15:22:27.138264Z node 4 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 2 Deadline: 18446744073709.551615s } 2025-05-29T15:22:27.138276Z node 6 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 8 Deadline: 18446744073709.551615s } 2025-05-29T15:22:27.138286Z node 7 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 5 Deadline: 18446744073709.551615s } 2025-05-29T15:22:27.138297Z node 8 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 5 Deadline: 18446744073709.551615s } 2025-05-29T15:22:27.138428Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 4 Deadline: 18446744073709.551615s } 2025-05-29T15:22:27.143299Z node 7 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 3 Deadline: 18446744073709.551615s } 2025-05-29T15:22:27.143326Z node 8 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 6 Deadline: 18446744073709.551615s } 2025-05-29T15:22:27.143528Z node 5 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 7 Deadline: 18446744073709.551615s } 2025-05-29T15:22:27.143574Z node 4 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 7 Deadline: 18446744073709.551615s } 2025-05-29T15:22:27.143586Z node 5 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 8 Deadline: 18446744073709.551615s } 2025-05-29T15:22:27.144687Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 4 Deadline: 18446744073709.551615s } 2025-05-29T15:22:27.145019Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 6 Deadline: 18446744073709.551615s } 2025-05-29T15:22:27.145267Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 7 Deadline: 18446744073709.551615s } 2025-05-29T15:22:27.145368Z node 7 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 4 Deadline: 18446744073709.551615s } 2025-05-29T15:22:27.145519Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 2 Deadline: 18446744073709.551615s } 2025-05-29T15:22:27.145650Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 5 Deadline: 18446744073709.551615s } 2025-05-29T15:22:27.145804Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 8 Deadline: 18446744073709.551615s } 2025-05-29T15:22:27.171119Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7309: Cannot subscribe to console configs 2025-05-29T15:22:27.171134Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded ... waiting for nameservers are connected 2025-05-29T15:22:27.175397Z node 1 :NODE_BROKER DEBUG: node_broker_impl.h:236: StateInit event type: 10060000 event: NKikimr::TEvTablet::TEvBoot 2025-05-29T15:22:27.175773Z node 1 :NODE_BROKER DEBUG: node_broker_impl.h:236: StateInit event type: 10060001 event: NKikimr::TEvTablet::TEvRestored 2025-05-29T15:22:27.175814Z node 1 :NODE_BROKER DEBUG: node_broker__init_scheme.cpp:20: TTxInitScheme Execute 2025-05-29T15:22:27.175959Z node 1 :NODE_BROKER DEBUG: node_broker_impl.h:236: StateInit event type: 1006000c event: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-05-29T15:22:27.176582Z node 1 :NODE_BROKER DEBUG: node_broker__init_scheme.cpp:29: TTxInitScheme Complete 2025-05-29T15:22:27.176597Z node 1 :NODE_BROKER DEBUG: node_broker__load_state.cpp:19: TTxLoadState Execute 2025-05-29T15:22:27.176637Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:961: [DB] Using default config. 2025-05-29T15:22:27.176645Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:998: [DB] Starting the first epoch: #1.1 1970-01-01T00:00:00.025000Z - 1970-01-01T01:00:00.025000Z - 1970-01-01T02:00:00.025000Z 2025-05-29T15:22:27.176648Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:1024: [DB] Loaded the first approximate epoch start: #1.1 2025-05-29T15:22:27.176657Z node 1 :NODE_BROKER DEBUG: node_broker__load_state.cpp:27: TTxLoadState Complete 2025-05-29T15:22:27.176669Z node 1 :NODE_BROKER DEBUG: node_broker__migrate_state.cpp:84: TTxMigrateState Execute 2025-05-29T15:22:27.176673Z node 1 :NODE_BROKER DEBUG: node_broker__migrate_state.cpp:52: TTxMigrateState ProcessMigrationBatch UpdateNodes left 0, NewVersionUpdateNodes left 0 2025-05-29T15:22:27.176675Z node 1 :NODE_BROKER DEBUG: node_broker__migrate_state.cpp:21: TTxMigrateState FinalizeMigration 2025-05-29T15:22:27.176679Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:1311: [DB] Update epoch in database: #1.1 1970-01-01T00:00:00.025000Z - 1970-01-01T01:00:00.025000Z - 1970-01-01T02:00:00.025000Z 2025-05-29T15:22:27.176689Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:1330: [DB] Update approx epoch start in database: #1.1 2025-05-29T15:22:27.176693Z node 1 :NODE_BROKER NOTICE: node_broker.cpp:1343: [DB] Update main nodes table to: Nodes 2025-05-29T15:22:27.208318Z node 1 :NODE_BROKER DEBUG: node_broker__migrate_state.cpp:95: TTxMigrateState Complete 2025-05-29T15:22:27.208354Z node 1 :NODE_BROKER TRACE: node_broker.cpp:456: Scheduled epoch update at 1970-01-01T01:00:00.025000Z 2025-05-29T15:22:27.208366Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:562: Preparing nodes list cache for epoch #1.1 1970-01-01T00:00:00.025000Z - 1970-01-01T01:00:00.025000Z - 1970-01-01T02:00:00.025000Z, approximate epoch start #1.1 nodes=0 expired=0 2025-05-29T15:22:27.208377Z n ... s: 72057594046316545 TimeCastBucketsPerMediator: 2 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } } 2025-05-29T15:22:27.411618Z node 1 :NODE_BROKER TRACE: node_broker.cpp:1558: Finished resolving tenant: request# Host: "host2" Port: 1001 ResolveHost: "host2.yandex.net" Address: "1.2.3.5" Location { DataCenter: "1" Module: "2" Rack: "3" Unit: "5" } FixedNodeId: true Path: "dc-1": scope id# <72057594046678944:1>: serviced subdomain# 72057594046678944:1 2025-05-29T15:22:27.411628Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 2146435073, Sender [1:656:2184], Recipient [1:554:2184]: NKikimr::NNodeBroker::TNodeBroker::TEvPrivate::TEvResolvedRegistrationRequest 2025-05-29T15:22:27.411631Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:257: StateWork, processing event TEvPrivate::TEvResolvedRegistrationRequest 2025-05-29T15:22:27.411639Z node 1 :NODE_BROKER DEBUG: node_broker__register_node.cpp:77: TTxRegisterNode Execute 2025-05-29T15:22:27.411642Z node 1 :NODE_BROKER DEBUG: node_broker__register_node.cpp:81: Registration request from host2:1001 (fixed) tenant: dc-1 2025-05-29T15:22:27.411652Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:324: [Dirty] Fix ID for node #1025.v4 host2:1001 2025-05-29T15:22:27.411660Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:856: [DB] Adding node #1025.v4 host2:1001 to database state=Active resolvehost=host2.yandex.net address=1.2.3.5 dc=1 location=DC=1/M=2/R=3/U=5/ lease=2 expire=NEVER servicedsubdomain=72057594046678944:1 slotindex=1 authorizedbycertificate=false 2025-05-29T15:22:27.411712Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:552: [Dirty] Update current epoch version from 3 to 4 2025-05-29T15:22:27.411715Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:1356: [DB] Update epoch version in database version=4 2025-05-29T15:22:27.422313Z node 1 :NODE_BROKER DEBUG: node_broker__register_node.cpp:186: TTxRegisterNode Complete 2025-05-29T15:22:27.422328Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:324: [Committed] Fix ID for node #1025.v4 host2:1001 2025-05-29T15:22:27.422331Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:552: [Committed] Update current epoch version from 3 to 4 2025-05-29T15:22:27.422335Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:630: Add node #1025.v4 host2:1001 to epoch cache 2025-05-29T15:22:27.422350Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:659: Add node #1025.v4 to update nodes log 2025-05-29T15:22:27.422377Z node 1 :NODE_BROKER TRACE: node_broker__register_node.cpp:58: TTxRegisterNode reply with: Status { Code: OK } Node { NodeId: 1025 Host: "host2" Port: 1001 ResolveHost: "host2.yandex.net" Address: "1.2.3.5" Location { DataCenter: "1" Module: "2" Rack: "3" Unit: "5" } Expire: 18446744073709551615 Name: "slot-1" } 2025-05-29T15:22:27.422458Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 269877761, Sender [1:661:2243], Recipient [1:554:2184]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-05-29T15:22:27.422472Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 272039937, Sender [1:622:2214], Recipient [1:554:2184]: NKikimr::NNodeBroker::TEvNodeBroker::TEvResolveNode { NodeId: 1025 } 2025-05-29T15:22:27.422475Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:247: StateWork, processing event TEvNodeBroker::TEvResolveNode 2025-05-29T15:22:27.422485Z node 1 :NODE_BROKER TRACE: node_broker.cpp:1478: Send TEvResolvedNode: NKikimr::NNodeBroker::TEvNodeBroker::TEvResolvedNode { Status { Code: OK } Node { NodeId: 1025 Host: "host2" Port: 1001 ResolveHost: "host2.yandex.net" Address: "1.2.3.5" Location { DataCenter: "1" Module: "2" Rack: "3" Unit: "5" } Expire: 18446744073709551615 Name: "slot-1" } } 2025-05-29T15:22:27.422525Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 269877761, Sender [1:663:2245], Recipient [1:554:2184]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-05-29T15:22:27.422538Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 272039939, Sender [1:622:2214], Recipient [1:554:2184]: NKikimr::NNodeBroker::TEvNodeBroker::TEvExtendLeaseRequest { NodeId: 1025 } 2025-05-29T15:22:27.422541Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:250: StateWork, processing event TEvNodeBroker::TEvExtendLeaseRequest 2025-05-29T15:22:27.422551Z node 1 :NODE_BROKER DEBUG: node_broker__extend_lease.cpp:44: TTxExtendLease Execute node #1025 2025-05-29T15:22:27.422561Z node 1 :NODE_BROKER DEBUG: node_broker__extend_lease.cpp:78: TTxExtendLease Complete 2025-05-29T15:22:27.422570Z node 1 :NODE_BROKER TRACE: node_broker__extend_lease.cpp:82: TTxExtendLease reply with: NKikimr::NNodeBroker::TEvNodeBroker::TEvExtendLeaseResponse { Status { Code: OK } NodeId: 1025 Expire: 18446744073709551615 Epoch { Id: 1 Version: 4 Start: 25000 End: 3600025000 NextEnd: 7200025000 } } 2025-05-29T15:22:27.422615Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 269877761, Sender [1:665:2247], Recipient [1:554:2184]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-05-29T15:22:27.422624Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 272039937, Sender [1:622:2214], Recipient [1:554:2184]: NKikimr::NNodeBroker::TEvNodeBroker::TEvResolveNode { NodeId: 1025 } 2025-05-29T15:22:27.422626Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:247: StateWork, processing event TEvNodeBroker::TEvResolveNode 2025-05-29T15:22:27.422634Z node 1 :NODE_BROKER TRACE: node_broker.cpp:1478: Send TEvResolvedNode: NKikimr::NNodeBroker::TEvNodeBroker::TEvResolvedNode { Status { Code: OK } Node { NodeId: 1025 Host: "host2" Port: 1001 ResolveHost: "host2.yandex.net" Address: "1.2.3.5" Location { DataCenter: "1" Module: "2" Rack: "3" Unit: "5" } Expire: 18446744073709551615 Name: "slot-1" } } 2025-05-29T15:22:27.422666Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 269877761, Sender [1:667:2249], Recipient [1:554:2184]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-05-29T15:22:27.422680Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 272039938, Sender [1:622:2214], Recipient [1:554:2184]: NKikimr::NNodeBroker::TEvNodeBroker::TEvRegistrationRequest { Host: "host2" Port: 1001 ResolveHost: "host2.yandex.net" Address: "1.2.3.5" Location { DataCenter: "1" Module: "2" Rack: "3" Unit: "5" } FixedNodeId: false Path: "dc-1" } 2025-05-29T15:22:27.422683Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:248: StateWork, processing event TEvNodeBroker::TEvRegistrationRequest 2025-05-29T15:22:27.422689Z node 1 :NODE_BROKER TRACE: node_broker.cpp:1487: Handle TEvNodeBroker::TEvRegistrationRequest: request# Host: "host2" Port: 1001 ResolveHost: "host2.yandex.net" Address: "1.2.3.5" Location { DataCenter: "1" Module: "2" Rack: "3" Unit: "5" } FixedNodeId: false Path: "dc-1" 2025-05-29T15:22:27.422729Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2702: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [1:23:2070], request# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-05-29T15:22:27.422767Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:1842: FillEntry for TNavigate: self# [1:23:2070], cacheItem# { Subscriber: { Subscriber: [1:628:2219] DomainOwnerId: 72057594046678944 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusSuccess Kind: 2 TableKind: 0 Created: 1 CreateStep: 0 PathId: [OwnerId: 72057594046678944, LocalPathId: 1] DomainId: [OwnerId: 72057594046678944, LocalPathId: 1] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-05-29T15:22:27.422804Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:265: Send result: self# [1:669:2250], recipient# [1:668:2184], result# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [72057594046678944:1:0] RequestType: ByPath Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046678944, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046678944, LocalPathId: 1] Params { Version: 0 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } }] } 2025-05-29T15:22:27.422815Z node 1 :NODE_BROKER TRACE: node_broker.cpp:1532: Handle TEvTxProxySchemeCache::TEvNavigateKeySetResult: response# { Path: dc-1 TableId: [72057594046678944:1:0] RequestType: ByPath Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046678944, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046678944, LocalPathId: 1] Params { Version: 0 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } } 2025-05-29T15:22:27.422823Z node 1 :NODE_BROKER TRACE: node_broker.cpp:1558: Finished resolving tenant: request# Host: "host2" Port: 1001 ResolveHost: "host2.yandex.net" Address: "1.2.3.5" Location { DataCenter: "1" Module: "2" Rack: "3" Unit: "5" } FixedNodeId: false Path: "dc-1": scope id# <72057594046678944:1>: serviced subdomain# 72057594046678944:1 2025-05-29T15:22:27.422831Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 2146435073, Sender [1:668:2184], Recipient [1:554:2184]: NKikimr::NNodeBroker::TNodeBroker::TEvPrivate::TEvResolvedRegistrationRequest 2025-05-29T15:22:27.422833Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:257: StateWork, processing event TEvPrivate::TEvResolvedRegistrationRequest 2025-05-29T15:22:27.422838Z node 1 :NODE_BROKER DEBUG: node_broker__register_node.cpp:77: TTxRegisterNode Execute 2025-05-29T15:22:27.422840Z node 1 :NODE_BROKER DEBUG: node_broker__register_node.cpp:81: Registration request from host2:1001 (not fixed) tenant: dc-1 2025-05-29T15:22:27.422850Z node 1 :NODE_BROKER DEBUG: node_broker__register_node.cpp:186: TTxRegisterNode Complete 2025-05-29T15:22:27.422858Z node 1 :NODE_BROKER TRACE: node_broker__register_node.cpp:58: TTxRegisterNode reply with: Status { Code: OK } Node { NodeId: 1025 Host: "host2" Port: 1001 ResolveHost: "host2.yandex.net" Address: "1.2.3.5" Location { DataCenter: "1" Module: "2" Rack: "3" Unit: "5" } Expire: 18446744073709551615 Name: "slot-1" } 2025-05-29T15:22:27.422899Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 269877761, Sender [1:671:2252], Recipient [1:554:2184]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-05-29T15:22:27.422915Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 272039939, Sender [1:622:2214], Recipient [1:554:2184]: NKikimr::NNodeBroker::TEvNodeBroker::TEvExtendLeaseRequest { NodeId: 1025 } 2025-05-29T15:22:27.422919Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:250: StateWork, processing event TEvNodeBroker::TEvExtendLeaseRequest 2025-05-29T15:22:27.422923Z node 1 :NODE_BROKER DEBUG: node_broker__extend_lease.cpp:44: TTxExtendLease Execute node #1025 2025-05-29T15:22:27.422928Z node 1 :NODE_BROKER DEBUG: node_broker__extend_lease.cpp:78: TTxExtendLease Complete 2025-05-29T15:22:27.422939Z node 1 :NODE_BROKER TRACE: node_broker__extend_lease.cpp:82: TTxExtendLease reply with: NKikimr::NNodeBroker::TEvNodeBroker::TEvExtendLeaseResponse { Status { Code: OK } NodeId: 1025 Expire: 18446744073709551615 Epoch { Id: 1 Version: 4 Start: 25000 End: 3600025000 NextEnd: 7200025000 } } >> TTransferTests::CreateWrongFlushIntervalIsBig [GOOD] >> TTransferTests::ConsistencyLevel [GOOD] >> TTransferTests::Alter >> TPartitionTests::FailedTxsDontBlock ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/mind/ut/unittest >> TNodeBrokerTest::NodesMigrationNodeName [GOOD] Test command err: 2025-05-29T15:22:27.095811Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 2 Deadline: 18446744073709.551615s } 2025-05-29T15:22:27.095850Z node 3 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 3 Deadline: 18446744073709.551615s } 2025-05-29T15:22:27.095871Z node 4 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 4 Deadline: 18446744073709.551615s } 2025-05-29T15:22:27.095899Z node 5 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 5 Deadline: 18446744073709.551615s } 2025-05-29T15:22:27.095923Z node 6 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 6 Deadline: 18446744073709.551615s } 2025-05-29T15:22:27.095939Z node 7 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 7 Deadline: 18446744073709.551615s } 2025-05-29T15:22:27.101649Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:27.101737Z node 3 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:27.101773Z node 4 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:27.101803Z node 5 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:27.101838Z node 6 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:27.101866Z node 7 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:27.101916Z node 8 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 8 Deadline: 18446744073709.551615s } 2025-05-29T15:22:27.101937Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-05-29T15:22:27.102125Z node 3 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:27.102142Z node 4 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:27.102155Z node 5 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:27.102167Z node 6 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:27.102182Z node 7 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:27.102196Z node 8 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:27.102222Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:27.106303Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:27.106357Z node 8 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:27.106387Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:27.107464Z node 5 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:27.107506Z node 6 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:27.107536Z node 7 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:27.107628Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:27.107650Z node 3 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:27.107672Z node 4 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:27.107697Z node 8 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:27.107718Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:27.107914Z node 3 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-05-29T15:22:27.107944Z node 4 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-05-29T15:22:27.107968Z node 5 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-05-29T15:22:27.107996Z node 6 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-05-29T15:22:27.108023Z node 7 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-05-29T15:22:27.108119Z node 8 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-05-29T15:22:27.108217Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-05-29T15:22:27.108273Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 6 Deadline: 18446744073709.551615s } 2025-05-29T15:22:27.108823Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 4 Deadline: 18446744073709.551615s } 2025-05-29T15:22:27.108840Z node 3 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 8 Deadline: 18446744073709.551615s } 2025-05-29T15:22:27.108846Z node 4 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 5 Deadline: 18446744073709.551615s } 2025-05-29T15:22:27.108852Z node 5 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 8 Deadline: 18446744073709.551615s } 2025-05-29T15:22:27.108857Z node 6 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 5 Deadline: 18446744073709.551615s } 2025-05-29T15:22:27.108863Z node 7 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 5 Deadline: 18446744073709.551615s } 2025-05-29T15:22:27.108870Z node 8 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 7 Deadline: 18446744073709.551615s } 2025-05-29T15:22:27.111627Z node 5 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 4 Deadline: 18446744073709.551615s } 2025-05-29T15:22:27.111658Z node 7 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 8 Deadline: 18446744073709.551615s } 2025-05-29T15:22:27.111666Z node 8 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 3 Deadline: 18446744073709.551615s } 2025-05-29T15:22:27.111697Z node 5 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 6 Deadline: 18446744073709.551615s } 2025-05-29T15:22:27.111734Z node 8 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 5 Deadline: 18446744073709.551615s } 2025-05-29T15:22:27.112168Z node 4 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 2 Deadline: 18446744073709.551615s } 2025-05-29T15:22:27.112277Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 5 Deadline: 18446744073709.551615s } 2025-05-29T15:22:27.112302Z node 5 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 7 Deadline: 18446744073709.551615s } 2025-05-29T15:22:27.112588Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 7 Deadline: 18446744073709.551615s } 2025-05-29T15:22:27.112742Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 2 Deadline: 18446744073709.551615s } 2025-05-29T15:22:27.112827Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 3 Deadline: 18446744073709.551615s } 2025-05-29T15:22:27.112922Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 4 Deadline: 18446744073709.551615s } 2025-05-29T15:22:27.113040Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 8 Deadline: 18446744073709.551615s } 2025-05-29T15:22:27.113517Z node 5 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 3 Deadline: 18446744073709.551615s } 2025-05-29T15:22:27.113762Z node 3 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 5 Deadline: 18446744073709.551615s } 2025-05-29T15:22:27.139342Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7309: Cannot subscribe to console configs 2025-05-29T15:22:27.139359Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded ... waiting for nameservers are connected 2025-05-29T15:22:27.142861Z node 1 :NODE_BROKER DEBUG: node_broker_impl.h:236: StateInit event type: 10060000 event: NKikimr::TEvTablet::TEvBoot 2025-05-29T15:22:27.143204Z node 1 :NODE_BROKER DEBUG: node_broker_impl.h:236: StateInit event type: 10060001 event: NKikimr::TEvTablet::TEvRestored 2025-05-29T15:22:27.143249Z node 1 :NODE_BROKER DEBUG: node_broker__init_scheme.cpp:20: TTxInitScheme Execute 2025-05-29T15:22:27.143442Z node 1 :NODE_BROKER DEBUG: node_broker_impl.h:236: StateInit event type: 1006000c event: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-05-29T15:22:27.144076Z node 1 :NODE_BROKER DEBUG: node_broker__init_scheme.cpp:29: TTxInitScheme Complete 2025-05-29T15:22:27.144094Z node 1 :NODE_BROKER DEBUG: node_broker__load_state.cpp:19: TTxLoadState Execute 2025-05-29T15:22:27.144140Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:961: [DB] Using default config. 2025-05-29T15:22:27.144153Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:998: [DB] Starting the first epoch: #1.1 1970-01-01T00:00:00.025000Z - 1970-01-01T01:00:00.025000Z - 1970-01-01T02:00:00.025000Z 2025-05-29T15:22:27.144159Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:1024: [DB] Loaded the first approximate epoch start: #1.1 2025-05-29T15:22:27.144168Z node 1 :NODE_BROKER DEBUG: node_broker__load_state.cpp:27: TTxLoadState Complete 2025-05-29T15:22:27.144191Z node 1 :NODE_BROKER DEBUG: node_broker__migrate_state.cpp:84: TTxMigrateState Execute 2025-05-29T15:22:27.144195Z node 1 :NODE_BROKER DEBUG: node_broker__migrate_state.cpp:52: TTxMigrateState ProcessMigrationBatch UpdateNodes left 0, NewVersionUpdateNodes left 0 2025-05-29T15:22:27.144198Z node 1 :NODE_BROKER DEBUG: node_broker__migrate_state.cpp:21: TTxMigrateState FinalizeMigration 2025-05-29T15:22:27.144202Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:1311: [DB] Update epoch in database: #1.1 1970-01-01T00:00:00.025000Z - 1970-01-01T01:00:00.025000Z - 1970-01-01T02:00:00.025000Z 2025-05-29T15:22:27.144213Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:1330 ... EvUpdateNodes v2 -> v2 to [1:625:2213] 2025-05-29T15:22:27.397771Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 269877764, Sender [1:718:2272], Recipient [1:670:2232]: NKikimr::TEvTabletPipe::TEvServerDisconnected 2025-05-29T15:22:27.397775Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:768: Unsubscribed [1:625:2213], seqNo: 2, server pipe id: [1:718:2272] 2025-05-29T15:22:27.397795Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 269877761, Sender [1:720:2274], Recipient [1:670:2232]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-05-29T15:22:27.397804Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 272039937, Sender [1:625:2213], Recipient [1:670:2232]: NKikimr::NNodeBroker::TEvNodeBroker::TEvResolveNode { NodeId: 1024 } 2025-05-29T15:22:27.397807Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:247: StateWork, processing event TEvNodeBroker::TEvResolveNode 2025-05-29T15:22:27.397821Z node 1 :NODE_BROKER TRACE: node_broker.cpp:1478: Send TEvResolvedNode: NKikimr::NNodeBroker::TEvNodeBroker::TEvResolvedNode { Status { Code: OK } Node { NodeId: 1024 Host: "host1" Port: 1001 ResolveHost: "host1" Address: "" Location { } Expire: 7200025000 Name: "slot-1" } } 2025-05-29T15:22:27.397863Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 269877761, Sender [1:722:2276], Recipient [1:670:2232]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-05-29T15:22:27.397879Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 272039938, Sender [1:625:2213], Recipient [1:670:2232]: NKikimr::NNodeBroker::TEvNodeBroker::TEvRegistrationRequest { Host: "host1" Port: 1001 ResolveHost: "host1" Address: "" Location { } FixedNodeId: false Path: "dc-1" } 2025-05-29T15:22:27.397881Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:248: StateWork, processing event TEvNodeBroker::TEvRegistrationRequest 2025-05-29T15:22:27.397886Z node 1 :NODE_BROKER TRACE: node_broker.cpp:1487: Handle TEvNodeBroker::TEvRegistrationRequest: request# Host: "host1" Port: 1001 ResolveHost: "host1" Address: "" Location { } FixedNodeId: false Path: "dc-1" 2025-05-29T15:22:27.397914Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2702: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [1:23:2070], request# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-05-29T15:22:27.397936Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:1842: FillEntry for TNavigate: self# [1:23:2070], cacheItem# { Subscriber: { Subscriber: [1:631:2218] DomainOwnerId: 72057594046678944 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusSuccess Kind: 2 TableKind: 0 Created: 1 CreateStep: 0 PathId: [OwnerId: 72057594046678944, LocalPathId: 1] DomainId: [OwnerId: 72057594046678944, LocalPathId: 1] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-05-29T15:22:27.397973Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:265: Send result: self# [1:724:2277], recipient# [1:723:2232], result# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [72057594046678944:1:0] RequestType: ByPath Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046678944, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046678944, LocalPathId: 1] Params { Version: 0 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } }] } 2025-05-29T15:22:27.397984Z node 1 :NODE_BROKER TRACE: node_broker.cpp:1532: Handle TEvTxProxySchemeCache::TEvNavigateKeySetResult: response# { Path: dc-1 TableId: [72057594046678944:1:0] RequestType: ByPath Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046678944, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046678944, LocalPathId: 1] Params { Version: 0 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } } 2025-05-29T15:22:27.397992Z node 1 :NODE_BROKER TRACE: node_broker.cpp:1558: Finished resolving tenant: request# Host: "host1" Port: 1001 ResolveHost: "host1" Address: "" Location { } FixedNodeId: false Path: "dc-1": scope id# <72057594046678944:1>: serviced subdomain# 72057594046678944:1 2025-05-29T15:22:27.398002Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 2146435073, Sender [1:723:2232], Recipient [1:670:2232]: NKikimr::NNodeBroker::TNodeBroker::TEvPrivate::TEvResolvedRegistrationRequest 2025-05-29T15:22:27.398005Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:257: StateWork, processing event TEvPrivate::TEvResolvedRegistrationRequest 2025-05-29T15:22:27.398015Z node 1 :NODE_BROKER DEBUG: node_broker__register_node.cpp:77: TTxRegisterNode Execute 2025-05-29T15:22:27.398017Z node 1 :NODE_BROKER DEBUG: node_broker__register_node.cpp:81: Registration request from host1:1001 (not fixed) tenant: dc-1 2025-05-29T15:22:27.398033Z node 1 :NODE_BROKER DEBUG: node_broker__register_node.cpp:186: TTxRegisterNode Complete 2025-05-29T15:22:27.398041Z node 1 :NODE_BROKER TRACE: node_broker__register_node.cpp:58: TTxRegisterNode reply with: Status { Code: OK } Node { NodeId: 1024 Host: "host1" Port: 1001 ResolveHost: "host1" Address: "" Location { } Expire: 7200025000 Name: "slot-1" } 2025-05-29T15:22:27.398086Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 269877761, Sender [1:726:2279], Recipient [1:670:2232]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-05-29T15:22:27.398099Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 272039938, Sender [1:625:2213], Recipient [1:670:2232]: NKikimr::NNodeBroker::TEvNodeBroker::TEvRegistrationRequest { Host: "host2" Port: 1001 ResolveHost: "host2" Address: "" Location { } FixedNodeId: false Path: "dc-1" } 2025-05-29T15:22:27.398103Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:248: StateWork, processing event TEvNodeBroker::TEvRegistrationRequest 2025-05-29T15:22:27.398107Z node 1 :NODE_BROKER TRACE: node_broker.cpp:1487: Handle TEvNodeBroker::TEvRegistrationRequest: request# Host: "host2" Port: 1001 ResolveHost: "host2" Address: "" Location { } FixedNodeId: false Path: "dc-1" 2025-05-29T15:22:27.398121Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2702: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [1:23:2070], request# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-05-29T15:22:27.398127Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:1842: FillEntry for TNavigate: self# [1:23:2070], cacheItem# { Subscriber: { Subscriber: [1:631:2218] DomainOwnerId: 72057594046678944 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusSuccess Kind: 2 TableKind: 0 Created: 1 CreateStep: 0 PathId: [OwnerId: 72057594046678944, LocalPathId: 1] DomainId: [OwnerId: 72057594046678944, LocalPathId: 1] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-05-29T15:22:27.398139Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:265: Send result: self# [1:728:2280], recipient# [1:727:2232], result# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [72057594046678944:1:0] RequestType: ByPath Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046678944, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046678944, LocalPathId: 1] Params { Version: 0 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } }] } 2025-05-29T15:22:27.398147Z node 1 :NODE_BROKER TRACE: node_broker.cpp:1532: Handle TEvTxProxySchemeCache::TEvNavigateKeySetResult: response# { Path: dc-1 TableId: [72057594046678944:1:0] RequestType: ByPath Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046678944, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046678944, LocalPathId: 1] Params { Version: 0 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } } 2025-05-29T15:22:27.398152Z node 1 :NODE_BROKER TRACE: node_broker.cpp:1558: Finished resolving tenant: request# Host: "host2" Port: 1001 ResolveHost: "host2" Address: "" Location { } FixedNodeId: false Path: "dc-1": scope id# <72057594046678944:1>: serviced subdomain# 72057594046678944:1 2025-05-29T15:22:27.398159Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 2146435073, Sender [1:727:2232], Recipient [1:670:2232]: NKikimr::NNodeBroker::TNodeBroker::TEvPrivate::TEvResolvedRegistrationRequest 2025-05-29T15:22:27.398161Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:257: StateWork, processing event TEvPrivate::TEvResolvedRegistrationRequest 2025-05-29T15:22:27.398164Z node 1 :NODE_BROKER DEBUG: node_broker__register_node.cpp:77: TTxRegisterNode Execute 2025-05-29T15:22:27.398166Z node 1 :NODE_BROKER DEBUG: node_broker__register_node.cpp:81: Registration request from host2:1001 (not fixed) tenant: dc-1 2025-05-29T15:22:27.398181Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:856: [DB] Adding node #1025.v3 host2:1001 to database state=Active resolvehost=host2 address= dc= location= lease=1 expire=Thu, 01 Jan 1970 02:00:00 UTC servicedsubdomain=72057594046678944:1 slotindex=0 authorizedbycertificate=false 2025-05-29T15:22:27.398217Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:264: [Dirty] Register new active node #1025.v3 host2:1001 2025-05-29T15:22:27.398220Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:552: [Dirty] Update current epoch version from 2 to 3 2025-05-29T15:22:27.398223Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:1356: [DB] Update epoch version in database version=3 2025-05-29T15:22:27.408880Z node 1 :NODE_BROKER DEBUG: node_broker__register_node.cpp:186: TTxRegisterNode Complete 2025-05-29T15:22:27.408897Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:264: [Committed] Register new active node #1025.v3 host2:1001 2025-05-29T15:22:27.408902Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:552: [Committed] Update current epoch version from 2 to 3 2025-05-29T15:22:27.408907Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:630: Add node #1025.v3 host2:1001 to epoch cache 2025-05-29T15:22:27.408917Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:659: Add node #1025.v3 to update nodes log 2025-05-29T15:22:27.408939Z node 1 :NODE_BROKER TRACE: node_broker__register_node.cpp:58: TTxRegisterNode reply with: Status { Code: OK } Node { NodeId: 1025 Host: "host2" Port: 1001 ResolveHost: "host2" Address: "" Location { } Expire: 7200025000 Name: "slot-0" } >> TDynamicNameserverTest::CacheMissPipeDisconnect-EnableNodeBrokerDeltaProtocol-false [GOOD] >> TDynamicNameserverTest::CacheMissNoDeadline-EnableNodeBrokerDeltaProtocol-true >> TNodeBrokerTest::NodesAlreadyMigrated >> TTransferTests::Alter [GOOD] >> TDynamicNameserverTest::CacheMissNoDeadline-EnableNodeBrokerDeltaProtocol-true [GOOD] >> TNodeBrokerTest::SeveralNodesSubscribersPerPipe [GOOD] >> TNodeBrokerTest::NodeNameWithDifferentTenants [GOOD] >> TNodeBrokerTest::NoEffectBeforeCommit [GOOD] >> TNodeBrokerTest::NodesV2BackMigrationManyNodesInterrupted [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_transfer/unittest >> TTransferTests::CreateWrongFlushIntervalIsBig [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:116:2058] recipient: [1:111:2141] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:116:2058] recipient: [1:111:2141] Leader for TabletID 72057594046678944 is [1:128:2152] sender: [1:132:2058] recipient: [1:111:2141] 2025-05-29T15:22:27.774764Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7488: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-05-29T15:22:27.774787Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7516: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:22:27.774792Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7402: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-05-29T15:22:27.774795Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7418: OperationsProcessing config: using default configuration 2025-05-29T15:22:27.774799Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-05-29T15:22:27.774802Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-05-29T15:22:27.774808Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7548: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:22:27.774819Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:39: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-05-29T15:22:27.774905Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7619: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-05-29T15:22:27.774960Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-05-29T15:22:27.785335Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7309: Cannot subscribe to console configs 2025-05-29T15:22:27.785355Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:22:27.787303Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-05-29T15:22:27.787386Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-05-29T15:22:27.787410Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-05-29T15:22:27.788506Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-05-29T15:22:27.788647Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:445: Clear TempDirsState with owners number: 0 2025-05-29T15:22:27.788723Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1348: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-05-29T15:22:27.788762Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:32: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-05-29T15:22:27.789193Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:157: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:22:27.789230Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:84: [RootDataErasureManager] Stop 2025-05-29T15:22:27.789458Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:22:27.789466Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:22:27.789482Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-05-29T15:22:27.789488Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-29T15:22:27.789492Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:202: TTxServerlessStorageBilling.Complete 2025-05-29T15:22:27.789515Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6671: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-05-29T15:22:27.790679Z node 1 :HIVE INFO: tablet_helpers.cpp:1130: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:128:2152] sender: [1:242:2058] recipient: [1:15:2062] 2025-05-29T15:22:27.806159Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:372: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-05-29T15:22:27.806214Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:22:27.806263Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-05-29T15:22:27.806313Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:130: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-05-29T15:22:27.806324Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:22:27.807161Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:451: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-05-29T15:22:27.807181Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-05-29T15:22:27.807227Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:22:27.807235Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:313: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-05-29T15:22:27.807239Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:367: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-05-29T15:22:27.807243Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 2 -> 3 2025-05-29T15:22:27.807691Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:22:27.807700Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-05-29T15:22:27.807705Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 3 -> 128 2025-05-29T15:22:27.808072Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:22:27.808084Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:22:27.808088Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:22:27.808093Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1650: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-05-29T15:22:27.808529Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1719: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-05-29T15:22:27.808922Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:652: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-05-29T15:22:27.808952Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1751: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-05-29T15:22:27.809098Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:676: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-05-29T15:22:27.809118Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:680: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 124 RawX2: 4294969445 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-05-29T15:22:27.809125Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:22:27.809178Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 128 -> 240 2025-05-29T15:22:27.809183Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:22:27.809206Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-05-29T15:22:27.809215Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:402: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-05-29T15:22:27.809579Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:22:27.809586Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-29T15:22:27.809616Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29 ... nResult Origin: 72075186233409546 TxId: 101 2025-05-29T15:22:29.127159Z node 6 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4920: StateWork, processing event TEvColumnShard::TEvNotifyTxCompletionResult 2025-05-29T15:22:29.127168Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6146: Handle TEvNotifyTxCompletionResult, at schemeshard: 72057594046678944, message: Origin: 72075186233409546 TxId: 101 2025-05-29T15:22:29.127175Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1764: TOperation FindRelatedPartByTabletId, TxId: 101, tablet: 72075186233409546, partId: 0 2025-05-29T15:22:29.127201Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:619: TTxOperationReply execute, operationId: 101:0, at schemeshard: 72057594046678944, message: Origin: 72075186233409546 TxId: 101 2025-05-29T15:22:29.127221Z node 6 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:165: TSideEffects ApplyOnExecute at tablet# 72057594046678944 FAKE_COORDINATOR: Erasing txId 101 2025-05-29T15:22:29.127797Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:647: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2025-05-29T15:22:29.127813Z node 6 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:207: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2025-05-29T15:22:29.127820Z node 6 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:275: Activate send for 101:0 2025-05-29T15:22:29.127853Z node 6 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4877: StateWork, received event# 2146435072, Sender [6:123:2148], Recipient [6:123:2148]: NKikimr::NSchemeShard::TEvPrivate::TEvProgressOperation 2025-05-29T15:22:29.127861Z node 6 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4894: StateWork, processing event TEvPrivate::TEvProgressOperation 2025-05-29T15:22:29.127875Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 101:0, at schemeshard: 72057594046678944 2025-05-29T15:22:29.127883Z node 6 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:482: [72057594046678944] TDone opId# 101:0 ProgressState 2025-05-29T15:22:29.127898Z node 6 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:165: TSideEffects ApplyOnExecute at tablet# 72057594046678944 2025-05-29T15:22:29.127904Z node 6 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:906: Part operation is done id#101:0 progress is 1/1 2025-05-29T15:22:29.127908Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2025-05-29T15:22:29.127914Z node 6 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:906: Part operation is done id#101:0 progress is 1/1 2025-05-29T15:22:29.127918Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2025-05-29T15:22:29.127924Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1606: TOperation IsReadyToNotify, TxId: 101, ready parts: 1/1, is published: true 2025-05-29T15:22:29.127940Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1629: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [6:345:2322] message: TxId: 101 2025-05-29T15:22:29.127945Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2025-05-29T15:22:29.127950Z node 6 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:973: Operation and all the parts is done, operation id: 101:0 2025-05-29T15:22:29.127953Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5174: RemoveTx for txid 101:0 2025-05-29T15:22:29.127984Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2025-05-29T15:22:29.128483Z node 6 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:207: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2025-05-29T15:22:29.128511Z node 6 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:630: Send to actor: [6:345:2322] msg type: 271124998 msg: NKikimrScheme.TEvNotifyTxCompletionResult TxId: 101 at schemeshard: 72057594046678944 2025-05-29T15:22:29.128547Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:227: tests -- TTxNotificationSubscriber for txId 101: got EvNotifyTxCompletionResult 2025-05-29T15:22:29.128554Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:236: tests -- TTxNotificationSubscriber for txId 101: satisfy waiter [6:346:2323] 2025-05-29T15:22:29.128597Z node 6 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4877: StateWork, received event# 269877764, Sender [6:348:2325], Recipient [6:123:2148]: NKikimr::TEvTabletPipe::TEvServerDisconnected 2025-05-29T15:22:29.128603Z node 6 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4975: StateWork, processing event TEvTabletPipe::TEvServerDisconnected 2025-05-29T15:22:29.128607Z node 6 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5801: Server pipe is reset, at schemeshard: 72057594046678944 TestWaitNotification: OK eventTxId 101 TestModificationResults wait txId: 102 2025-05-29T15:22:29.128774Z node 6 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4877: StateWork, received event# 271122432, Sender [6:390:2360], Recipient [6:123:2148]: {TEvModifySchemeTransaction txid# 102 TabletId# 72057594046678944} 2025-05-29T15:22:29.128780Z node 6 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4888: StateWork, processing event TEvSchemeShard::TEvModifySchemeTransaction 2025-05-29T15:22:29.129480Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:372: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpCreateTransfer Replication { Name: "Transfer" Config { TransferSpecific { Target { SrcPath: "/MyRoot1/Table" DstPath: "/MyRoot/Table" } Batching { FlushIntervalMilliSeconds: 86400001 } } } } } TxId: 102 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-05-29T15:22:29.129534Z node 6 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_create_replication.cpp:348: [72057594046678944] TCreateReplication Propose: opId# 102:0, path# /MyRoot/Transfer 2025-05-29T15:22:29.129562Z node 6 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:130: IgniteOperation, opId: 102:1, propose status:StatusInvalidParameter, reason: Flush interval must be less than or equal to 24 hours, at schemeshard: 72057594046678944 2025-05-29T15:22:29.129610Z node 6 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:165: TSideEffects ApplyOnExecute at tablet# 72057594046678944 2025-05-29T15:22:29.130174Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:451: TTxOperationPropose Complete, txId: 102, response: Status: StatusInvalidParameter Reason: "Flush interval must be less than or equal to 24 hours" TxId: 102 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2025-05-29T15:22:29.130212Z node 6 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 102, database: /MyRoot, subject: , status: StatusInvalidParameter, reason: Flush interval must be less than or equal to 24 hours, operation: CREATE TRANSFER, path: /MyRoot/Transfer 2025-05-29T15:22:29.130219Z node 6 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:207: TSideEffects ApplyOnComplete at tablet# 72057594046678944 TestModificationResult got TxId: 102, wait until txId: 102 TestWaitNotification wait txId: 102 2025-05-29T15:22:29.130282Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:210: tests -- TTxNotificationSubscriber for txId 102: send EvNotifyTxCompletion 2025-05-29T15:22:29.130289Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:256: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 102 2025-05-29T15:22:29.130365Z node 6 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4877: StateWork, received event# 269877761, Sender [6:396:2366], Recipient [6:123:2148]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-05-29T15:22:29.130372Z node 6 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4974: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-05-29T15:22:29.130377Z node 6 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5753: Pipe server connected, at tablet: 72057594046678944 2025-05-29T15:22:29.130400Z node 6 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4877: StateWork, received event# 271124996, Sender [6:345:2322], Recipient [6:123:2148]: NKikimrScheme.TEvNotifyTxCompletion TxId: 102 2025-05-29T15:22:29.130406Z node 6 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4890: StateWork, processing event TEvSchemeShard::TEvNotifyTxCompletion 2025-05-29T15:22:29.130417Z node 6 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 102, at schemeshard: 72057594046678944 2025-05-29T15:22:29.130436Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:227: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2025-05-29T15:22:29.130441Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:236: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [6:394:2364] 2025-05-29T15:22:29.130466Z node 6 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4877: StateWork, received event# 269877764, Sender [6:396:2366], Recipient [6:123:2148]: NKikimr::TEvTabletPipe::TEvServerDisconnected 2025-05-29T15:22:29.130470Z node 6 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4975: StateWork, processing event TEvTabletPipe::TEvServerDisconnected 2025-05-29T15:22:29.130475Z node 6 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5801: Server pipe is reset, at schemeshard: 72057594046678944 TestWaitNotification: OK eventTxId 102 2025-05-29T15:22:29.130531Z node 6 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4877: StateWork, received event# 271122945, Sender [6:397:2367], Recipient [6:123:2148]: NKikimrSchemeOp.TDescribePath Path: "/MyRoot/Transfer" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false } 2025-05-29T15:22:29.130536Z node 6 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4889: StateWork, processing event TEvSchemeShard::TEvDescribeScheme 2025-05-29T15:22:29.130547Z node 6 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Transfer" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-05-29T15:22:29.130579Z node 6 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/Transfer" took 29us result status StatusPathDoesNotExist 2025-05-29T15:22:29.130613Z node 6 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/Transfer\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1]), source_location: ydb/core/tx/schemeshard/schemeshard_path_describer.cpp:1157" Path: "/MyRoot/Transfer" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: true } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 >> Initializer::Simple [FAIL] >> TNodeBrokerTest::NodesMigrationExpiredChanged [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_transfer/unittest >> TTransferTests::Alter [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:116:2058] recipient: [1:111:2141] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:116:2058] recipient: [1:111:2141] Leader for TabletID 72057594046678944 is [1:128:2152] sender: [1:132:2058] recipient: [1:111:2141] 2025-05-29T15:22:27.776409Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7488: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-05-29T15:22:27.776433Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7516: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:22:27.776439Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7402: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-05-29T15:22:27.776444Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7418: OperationsProcessing config: using default configuration 2025-05-29T15:22:27.776450Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-05-29T15:22:27.776454Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-05-29T15:22:27.776463Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7548: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:22:27.776478Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:39: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-05-29T15:22:27.776605Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7619: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-05-29T15:22:27.776677Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-05-29T15:22:27.788521Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7309: Cannot subscribe to console configs 2025-05-29T15:22:27.788546Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:22:27.790940Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-05-29T15:22:27.791017Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-05-29T15:22:27.791037Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-05-29T15:22:27.792430Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-05-29T15:22:27.792582Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:445: Clear TempDirsState with owners number: 0 2025-05-29T15:22:27.792662Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1348: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-05-29T15:22:27.792699Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:32: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-05-29T15:22:27.793068Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:157: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:22:27.793094Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:84: [RootDataErasureManager] Stop 2025-05-29T15:22:27.793275Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:22:27.793282Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:22:27.793296Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-05-29T15:22:27.793301Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-29T15:22:27.793305Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:202: TTxServerlessStorageBilling.Complete 2025-05-29T15:22:27.793329Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6671: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-05-29T15:22:27.794337Z node 1 :HIVE INFO: tablet_helpers.cpp:1130: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:128:2152] sender: [1:242:2058] recipient: [1:15:2062] 2025-05-29T15:22:27.807928Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:372: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-05-29T15:22:27.807981Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:22:27.808031Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-05-29T15:22:27.808074Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:130: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-05-29T15:22:27.808081Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:22:27.808589Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:451: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-05-29T15:22:27.808605Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-05-29T15:22:27.808640Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:22:27.808646Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:313: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-05-29T15:22:27.808650Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:367: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-05-29T15:22:27.808653Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 2 -> 3 2025-05-29T15:22:27.808957Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:22:27.808966Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-05-29T15:22:27.808969Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 3 -> 128 2025-05-29T15:22:27.809222Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:22:27.809229Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:22:27.809232Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:22:27.809236Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1650: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-05-29T15:22:27.809738Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1719: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-05-29T15:22:27.810100Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:652: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-05-29T15:22:27.810126Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1751: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-05-29T15:22:27.810264Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:676: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-05-29T15:22:27.810288Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:680: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 124 RawX2: 4294969445 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-05-29T15:22:27.810295Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:22:27.810347Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 128 -> 240 2025-05-29T15:22:27.810353Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:22:27.810374Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-05-29T15:22:27.810381Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:402: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-05-29T15:22:27.810786Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:22:27.810792Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-29T15:22:27.810821Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29 ... lterReplication TConfigureParts opId# 104:0 HandleReply NKikimrReplication.TEvAlterReplicationResult OperationId { TxId: 104 PartId: 0 } Origin: 72075186233409547 Status: SUCCESS 2025-05-29T15:22:29.414905Z node 6 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 104:0 3 -> 128 2025-05-29T15:22:29.414914Z node 6 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:165: TSideEffects ApplyOnExecute at tablet# 72057594046678944 2025-05-29T15:22:29.414918Z node 6 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:693: Ack tablet strongly msg opId: 104:0 from tablet: 72057594046678944 to tablet: 72075186233409547 cookie: 72057594046678944:3 2025-05-29T15:22:29.415141Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:647: TTxOperationReply complete, operationId: 104:0, at schemeshard: 72057594046678944 2025-05-29T15:22:29.415148Z node 6 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:207: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2025-05-29T15:22:29.415151Z node 6 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:275: Activate send for 104:0 2025-05-29T15:22:29.415166Z node 6 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4877: StateWork, received event# 2146435072, Sender [6:123:2148], Recipient [6:123:2148]: NKikimr::NSchemeShard::TEvPrivate::TEvProgressOperation 2025-05-29T15:22:29.415171Z node 6 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4894: StateWork, processing event TEvPrivate::TEvProgressOperation 2025-05-29T15:22:29.415177Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 104:0, at schemeshard: 72057594046678944 2025-05-29T15:22:29.415183Z node 6 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_alter_replication.cpp:189: [72057594046678944] TAlterReplication TPropose opId# 104:0 ProgressState 2025-05-29T15:22:29.415187Z node 6 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:165: TSideEffects ApplyOnExecute at tablet# 72057594046678944 2025-05-29T15:22:29.415193Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1650: TOperation IsReadyToPropose , TxId: 104 ready parts: 1/1 2025-05-29T15:22:29.415212Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1719: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 104 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-05-29T15:22:29.415465Z node 6 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:207: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2025-05-29T15:22:29.415472Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:652: Send tablet strongly msg operationId: 104:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:104 msg type: 269090816 2025-05-29T15:22:29.415483Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1751: TOperation RegisterRelationByTabletId, TxId: 104, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 104 at step: 5000005 FAKE_COORDINATOR: advance: minStep5000005 State->FrontStep: 5000004 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 104 at step: 5000005 2025-05-29T15:22:29.415525Z node 6 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4877: StateWork, received event# 269287424, Sender [6:136:2157], Recipient [6:259:2249] 2025-05-29T15:22:29.415530Z node 6 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4897: StateWork, processing event TEvTxProcessing::TEvPlanStep 2025-05-29T15:22:29.415541Z node 6 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:676: TTxOperationPlanStep Execute, stepId: 5000005, transactions count in step: 1, at schemeshard: 72057594046678944 2025-05-29T15:22:29.415559Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:680: TTxOperationPlanStep Execute, message: Transactions { TxId: 104 Coordinator: 72057594046316545 AckTo { RawX1: 136 RawX2: 25769805933 } } Step: 5000005 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-05-29T15:22:29.415565Z node 6 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_alter_replication.cpp:203: [72057594046678944] TAlterReplication TPropose opId# 104:0 HandleReply TEvOperationPlan: step# 5000005 2025-05-29T15:22:29.415586Z node 6 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 104:0 128 -> 240 2025-05-29T15:22:29.415606Z node 6 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:165: TSideEffects ApplyOnExecute at tablet# 72057594046678944 2025-05-29T15:22:29.415616Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 3 2025-05-29T15:22:29.415624Z node 6 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:693: Ack tablet strongly msg opId: 104:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:104 2025-05-29T15:22:29.415854Z node 6 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:207: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2025-05-29T15:22:29.415861Z node 6 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:384: Ack coordinator stepId#5000005 first txId#104 countTxs#1 2025-05-29T15:22:29.415865Z node 6 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:354: Ack mediator stepId#5000005 2025-05-29T15:22:29.415868Z node 6 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:275: Activate send for 104:0 FAKE_COORDINATOR: Erasing txId 104 2025-05-29T15:22:29.415894Z node 6 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4877: StateWork, received event# 2146435072, Sender [6:123:2148], Recipient [6:123:2148]: NKikimr::NSchemeShard::TEvPrivate::TEvProgressOperation 2025-05-29T15:22:29.415897Z node 6 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4894: StateWork, processing event TEvPrivate::TEvProgressOperation 2025-05-29T15:22:29.415903Z node 6 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:22:29.415906Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 104, path id: [OwnerId: 72057594046678944, LocalPathId: 3] 2025-05-29T15:22:29.415931Z node 6 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:22:29.415935Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [6:208:2209], at schemeshard: 72057594046678944, txId: 104, path id: 3 2025-05-29T15:22:29.416004Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 104:0, at schemeshard: 72057594046678944 2025-05-29T15:22:29.416009Z node 6 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:482: [72057594046678944] TDone opId# 104:0 ProgressState 2025-05-29T15:22:29.416015Z node 6 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:165: TSideEffects ApplyOnExecute at tablet# 72057594046678944 2025-05-29T15:22:29.416019Z node 6 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:906: Part operation is done id#104:0 progress is 1/1 2025-05-29T15:22:29.416021Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 104 ready parts: 1/1 2025-05-29T15:22:29.416025Z node 6 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:906: Part operation is done id#104:0 progress is 1/1 2025-05-29T15:22:29.416028Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 104 ready parts: 1/1 2025-05-29T15:22:29.416032Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1606: TOperation IsReadyToNotify, TxId: 104, ready parts: 1/1, is published: false 2025-05-29T15:22:29.416038Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 104 ready parts: 1/1 2025-05-29T15:22:29.416042Z node 6 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:973: Operation and all the parts is done, operation id: 104:0 2025-05-29T15:22:29.416046Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5174: RemoveTx for txid 104:0 2025-05-29T15:22:29.416069Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 4 2025-05-29T15:22:29.416073Z node 6 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:982: Publication still in progress, tx: 104, publications: 1, subscribers: 0 2025-05-29T15:22:29.416076Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:989: Publication details: tx: 104, [OwnerId: 72057594046678944, LocalPathId: 3], 4 2025-05-29T15:22:29.416130Z node 6 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4877: StateWork, received event# 274137603, Sender [6:208:2209], Recipient [6:123:2148]: NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 3] Version: 4 } 2025-05-29T15:22:29.416135Z node 6 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4961: StateWork, processing event NSchemeBoard::NSchemeshardEvents::TEvUpdateAck 2025-05-29T15:22:29.416143Z node 6 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:5834: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 4 PathOwnerId: 72057594046678944, cookie: 104 2025-05-29T15:22:29.416149Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 4 PathOwnerId: 72057594046678944, cookie: 104 2025-05-29T15:22:29.416152Z node 6 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 104 2025-05-29T15:22:29.416155Z node 6 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 104, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 4 2025-05-29T15:22:29.416158Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 3 2025-05-29T15:22:29.416166Z node 6 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 104, subscribers: 0 2025-05-29T15:22:29.416168Z node 6 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:165: TSideEffects ApplyOnExecute at tablet# 72057594046678944 2025-05-29T15:22:29.416655Z node 6 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:207: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2025-05-29T15:22:29.416695Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 104 2025-05-29T15:22:29.416698Z node 6 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:207: TSideEffects ApplyOnComplete at tablet# 72057594046678944 TestModificationResult got TxId: 104, wait until txId: 104 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/mind/ut/unittest >> TDynamicNameserverTest::CacheMissNoDeadline-EnableNodeBrokerDeltaProtocol-true [GOOD] Test command err: 2025-05-29T15:22:29.112486Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-05-29T15:22:29.112680Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:29.115468Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:29.115501Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:29.136038Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7309: Cannot subscribe to console configs 2025-05-29T15:22:29.136060Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded ... waiting for nameservers are connected 2025-05-29T15:22:29.139675Z node 1 :NODE_BROKER DEBUG: node_broker_impl.h:236: StateInit event type: 10060000 event: NKikimr::TEvTablet::TEvBoot 2025-05-29T15:22:29.139997Z node 1 :NODE_BROKER DEBUG: node_broker_impl.h:236: StateInit event type: 10060001 event: NKikimr::TEvTablet::TEvRestored 2025-05-29T15:22:29.140040Z node 1 :NODE_BROKER DEBUG: node_broker__init_scheme.cpp:20: TTxInitScheme Execute 2025-05-29T15:22:29.140201Z node 1 :NODE_BROKER DEBUG: node_broker_impl.h:236: StateInit event type: 1006000c event: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-05-29T15:22:29.140684Z node 1 :NODE_BROKER DEBUG: node_broker__init_scheme.cpp:29: TTxInitScheme Complete 2025-05-29T15:22:29.140720Z node 1 :NODE_BROKER DEBUG: node_broker__load_state.cpp:19: TTxLoadState Execute 2025-05-29T15:22:29.140751Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:961: [DB] Using default config. 2025-05-29T15:22:29.140760Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:998: [DB] Starting the first epoch: #1.1 1970-01-01T00:00:00.024000Z - 1970-01-01T01:00:00.024000Z - 1970-01-01T02:00:00.024000Z 2025-05-29T15:22:29.140764Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:1024: [DB] Loaded the first approximate epoch start: #1.1 2025-05-29T15:22:29.140773Z node 1 :NODE_BROKER DEBUG: node_broker__load_state.cpp:27: TTxLoadState Complete 2025-05-29T15:22:29.140782Z node 1 :NODE_BROKER DEBUG: node_broker__migrate_state.cpp:84: TTxMigrateState Execute 2025-05-29T15:22:29.140786Z node 1 :NODE_BROKER DEBUG: node_broker__migrate_state.cpp:52: TTxMigrateState ProcessMigrationBatch UpdateNodes left 0, NewVersionUpdateNodes left 0 2025-05-29T15:22:29.140789Z node 1 :NODE_BROKER DEBUG: node_broker__migrate_state.cpp:21: TTxMigrateState FinalizeMigration 2025-05-29T15:22:29.140792Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:1311: [DB] Update epoch in database: #1.1 1970-01-01T00:00:00.024000Z - 1970-01-01T01:00:00.024000Z - 1970-01-01T02:00:00.024000Z 2025-05-29T15:22:29.140802Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:1330: [DB] Update approx epoch start in database: #1.1 2025-05-29T15:22:29.140806Z node 1 :NODE_BROKER NOTICE: node_broker.cpp:1343: [DB] Update main nodes table to: Nodes 2025-05-29T15:22:29.161950Z node 1 :NODE_BROKER DEBUG: node_broker__migrate_state.cpp:95: TTxMigrateState Complete 2025-05-29T15:22:29.161983Z node 1 :NODE_BROKER TRACE: node_broker.cpp:456: Scheduled epoch update at 1970-01-01T01:00:00.024000Z 2025-05-29T15:22:29.161993Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:562: Preparing nodes list cache for epoch #1.1 1970-01-01T00:00:00.024000Z - 1970-01-01T01:00:00.024000Z - 1970-01-01T02:00:00.024000Z, approximate epoch start #1.1 nodes=0 expired=0 2025-05-29T15:22:29.162003Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:603: Preparing update nodes log for epoch ##1.1 1970-01-01T00:00:00.024000Z - 1970-01-01T01:00:00.024000Z - 1970-01-01T02:00:00.024000Z nodes=0 expired=0 removed=0 2025-05-29T15:22:29.202636Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 269877761, Sender [1:201:2197], Recipient [1:170:2176]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-05-29T15:22:29.202674Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:657: Handle NKikimr::TEvTabletPipe::TEvClientConnected { TabletId: 72057594037936129 Status: OK ServerId: [1:201:2197] Leader: 1 Dead: 0 Generation: 2 VersionInfo:  } ... waiting for nameservers are connected (done) 2025-05-29T15:22:29.203078Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 272039936, Sender [1:18:2065], Recipient [1:170:2176]: NKikimr::NNodeBroker::TEvNodeBroker::TEvListNodes { MinEpoch: 1 } 2025-05-29T15:22:29.203086Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:246: StateWork, processing event TEvNodeBroker::TEvListNodes 2025-05-29T15:22:29.203095Z node 1 :NODE_BROKER TRACE: node_broker.cpp:423: Send TEvNodesInfo for epoch #1.1 1970-01-01T00:00:00.024000Z - 1970-01-01T01:00:00.024000Z - 1970-01-01T02:00:00.024000Z 2025-05-29T15:22:29.203122Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 269877761, Sender [1:205:2201], Recipient [1:170:2176]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-05-29T15:22:29.203145Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 272039938, Sender [1:203:2199], Recipient [1:170:2176]: NKikimr::NNodeBroker::TEvNodeBroker::TEvRegistrationRequest { Host: "host1" Port: 1001 ResolveHost: "host1.host1.host1" Address: "1.2.3.4" Location { DataCenter: "1" Module: "2" Rack: "3" Unit: "4" } FixedNodeId: false Path: "dc-1" } 2025-05-29T15:22:29.203148Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:248: StateWork, processing event TEvNodeBroker::TEvRegistrationRequest 2025-05-29T15:22:29.203154Z node 1 :NODE_BROKER TRACE: node_broker.cpp:1487: Handle TEvNodeBroker::TEvRegistrationRequest: request# Host: "host1" Port: 1001 ResolveHost: "host1.host1.host1" Address: "1.2.3.4" Location { DataCenter: "1" Module: "2" Rack: "3" Unit: "4" } FixedNodeId: false Path: "dc-1" 2025-05-29T15:22:29.203202Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2702: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [1:16:2063], request# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-05-29T15:22:29.203213Z node 1 :TX_PROXY_SCHEME_CACHE TRACE: cache.cpp:2322: Create subscriber: self# [1:16:2063], path# /dc-1, domainOwnerId# 72057594046678944 2025-05-29T15:22:29.207806Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2589: HandleNotify: self# [1:16:2063], notify# NKikimr::TSchemeBoardEvents::TEvNotifyUpdate { Path: /dc-1 PathId: [OwnerId: 72057594046678944, LocalPathId: 1] DescribeSchemeResult: Status: StatusSuccess Path: "/dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/dc-1" } } } PathId: 1 PathOwnerId: 72057594046678944 } 2025-05-29T15:22:29.207861Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2464: ResolveCacheItem: self# [1:16:2063], notify# NKikimr::TSchemeBoardEvents::TEvNotifyUpdate { Path: /dc-1 PathId: [OwnerId: 72057594046678944, LocalPathId: 1] DescribeSchemeResult: Status: StatusSuccess Path: "/dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/dc-1" } } } PathId: 1 PathOwnerId: 72057594046678944 }, by path# { Subscriber: { Subscriber: [1:207:2202] DomainOwnerId: 72057594046678944 Type: 2 SyncCookie: 0 } Filled: 0 Status: undefined Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, by pathId# nullptr 2025-05-29T15:22:29.207895Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:1842: FillEntry for TNavigate: self# [1:16:2063], cacheItem# { Subscriber: { Subscriber: [1:207:2202] DomainOwnerId: 72057594046678944 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusSuccess Kind: 2 TableKind: 0 Created: 1 CreateStep: 0 PathId: [OwnerId: 72057594046678944, LocalPathId: 1] DomainId: [OwnerId: 72057594046678944, LocalPathId: 1] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-05-29T15:22:29.207954Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:265: Send result: self# [1:214:2203], recipient# [1:206:2176], result# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [72057594046678944:1:0] RequestType: ByPath Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046678944, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046678944, LocalPathId: 1] Params { Version: 0 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } }] } 2025-05-29T15:22:29.207964Z node 1 :NODE_BROKER TRACE: node_broker.cpp:1532: Handle TEvTxProxySchemeCache::TEvNavigateKeySetResult: response# { Path: dc-1 TableId: [72057594046678944:1:0] RequestType: ByPath Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046678944, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046678944, LocalPathId: 1] Params { Version: 0 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } } 2025-05-29T15:22:29.207977Z node 1 :NODE_BROKER TRACE: node_broker.cpp:1558: Finished resolving tenant: request# Host: "host1" Port: 1001 ResolveHost: "host1.host1.host1" Address: "1.2.3.4" Location { DataCenter: "1" Module: "2" Rack: "3" Unit: "4" } FixedNodeId: false Path: "dc-1": scope id# <7205759404667894 ... te::TEvResolvedRegistrationRequest 2025-05-29T15:22:29.435846Z node 2 :NODE_BROKER DEBUG: node_broker__register_node.cpp:77: TTxRegisterNode Execute 2025-05-29T15:22:29.435851Z node 2 :NODE_BROKER DEBUG: node_broker__register_node.cpp:81: Registration request from host1:1001 (not fixed) tenant: dc-1 2025-05-29T15:22:29.435881Z node 2 :NODE_BROKER DEBUG: node_broker.cpp:856: [DB] Adding node #1024.v2 host1:1001 to database state=Active resolvehost=host1.host1.host1 address=1.2.3.4 dc=1 location=DC=1/M=2/R=3/U=4/ lease=1 expire=Thu, 01 Jan 1970 02:00:00 UTC servicedsubdomain=72057594046678944:1 slotindex=0 authorizedbycertificate=false 2025-05-29T15:22:29.435936Z node 2 :NODE_BROKER DEBUG: node_broker.cpp:264: [Dirty] Register new active node #1024.v2 host1:1001 2025-05-29T15:22:29.435945Z node 2 :NODE_BROKER DEBUG: node_broker.cpp:552: [Dirty] Update current epoch version from 1 to 2 2025-05-29T15:22:29.435950Z node 2 :NODE_BROKER DEBUG: node_broker.cpp:1356: [DB] Update epoch version in database version=2 2025-05-29T15:22:29.446622Z node 2 :NODE_BROKER DEBUG: node_broker__register_node.cpp:186: TTxRegisterNode Complete 2025-05-29T15:22:29.446641Z node 2 :NODE_BROKER DEBUG: node_broker.cpp:264: [Committed] Register new active node #1024.v2 host1:1001 2025-05-29T15:22:29.446648Z node 2 :NODE_BROKER DEBUG: node_broker.cpp:552: [Committed] Update current epoch version from 1 to 2 2025-05-29T15:22:29.446652Z node 2 :NODE_BROKER DEBUG: node_broker.cpp:630: Add node #1024.v2 host1:1001 to epoch cache 2025-05-29T15:22:29.446670Z node 2 :NODE_BROKER DEBUG: node_broker.cpp:659: Add node #1024.v2 to update nodes log 2025-05-29T15:22:29.446703Z node 2 :NODE_BROKER TRACE: node_broker__register_node.cpp:58: TTxRegisterNode reply with: Status { Code: OK } Node { NodeId: 1024 Host: "host1" Port: 1001 ResolveHost: "host1.host1.host1" Address: "1.2.3.4" Location { DataCenter: "1" Module: "2" Rack: "3" Unit: "4" } Expire: 7200023000 Name: "slot-0" } 2025-05-29T15:22:29.446820Z node 2 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 269877761, Sender [2:216:2205], Recipient [2:170:2176]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-05-29T15:22:29.446849Z node 2 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 272039938, Sender [2:201:2197], Recipient [2:170:2176]: NKikimr::NNodeBroker::TEvNodeBroker::TEvRegistrationRequest { Host: "host2" Port: 1001 ResolveHost: "host2.host2.host2" Address: "1.2.3.5" Location { DataCenter: "1" Module: "2" Rack: "3" Unit: "5" } FixedNodeId: false Path: "dc-1" } 2025-05-29T15:22:29.446853Z node 2 :NODE_BROKER TRACE: node_broker_impl.h:248: StateWork, processing event TEvNodeBroker::TEvRegistrationRequest 2025-05-29T15:22:29.446859Z node 2 :NODE_BROKER TRACE: node_broker.cpp:1487: Handle TEvNodeBroker::TEvRegistrationRequest: request# Host: "host2" Port: 1001 ResolveHost: "host2.host2.host2" Address: "1.2.3.5" Location { DataCenter: "1" Module: "2" Rack: "3" Unit: "5" } FixedNodeId: false Path: "dc-1" 2025-05-29T15:22:29.446897Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2702: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [2:16:2063], request# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-05-29T15:22:29.446921Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:1842: FillEntry for TNavigate: self# [2:16:2063], cacheItem# { Subscriber: { Subscriber: [2:205:2200] DomainOwnerId: 72057594046678944 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusSuccess Kind: 2 TableKind: 0 Created: 1 CreateStep: 0 PathId: [OwnerId: 72057594046678944, LocalPathId: 1] DomainId: [OwnerId: 72057594046678944, LocalPathId: 1] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-05-29T15:22:29.446960Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:265: Send result: self# [2:218:2206], recipient# [2:217:2176], result# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [72057594046678944:1:0] RequestType: ByPath Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046678944, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046678944, LocalPathId: 1] Params { Version: 0 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } }] } 2025-05-29T15:22:29.446970Z node 2 :NODE_BROKER TRACE: node_broker.cpp:1532: Handle TEvTxProxySchemeCache::TEvNavigateKeySetResult: response# { Path: dc-1 TableId: [72057594046678944:1:0] RequestType: ByPath Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046678944, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046678944, LocalPathId: 1] Params { Version: 0 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } } 2025-05-29T15:22:29.446980Z node 2 :NODE_BROKER TRACE: node_broker.cpp:1558: Finished resolving tenant: request# Host: "host2" Port: 1001 ResolveHost: "host2.host2.host2" Address: "1.2.3.5" Location { DataCenter: "1" Module: "2" Rack: "3" Unit: "5" } FixedNodeId: false Path: "dc-1": scope id# <72057594046678944:1>: serviced subdomain# 72057594046678944:1 2025-05-29T15:22:29.446989Z node 2 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 2146435073, Sender [2:217:2176], Recipient [2:170:2176]: NKikimr::NNodeBroker::TNodeBroker::TEvPrivate::TEvResolvedRegistrationRequest 2025-05-29T15:22:29.446992Z node 2 :NODE_BROKER TRACE: node_broker_impl.h:257: StateWork, processing event TEvPrivate::TEvResolvedRegistrationRequest 2025-05-29T15:22:29.447004Z node 2 :NODE_BROKER DEBUG: node_broker__register_node.cpp:77: TTxRegisterNode Execute 2025-05-29T15:22:29.447007Z node 2 :NODE_BROKER DEBUG: node_broker__register_node.cpp:81: Registration request from host2:1001 (not fixed) tenant: dc-1 2025-05-29T15:22:29.447026Z node 2 :NODE_BROKER DEBUG: node_broker.cpp:856: [DB] Adding node #1025.v3 host2:1001 to database state=Active resolvehost=host2.host2.host2 address=1.2.3.5 dc=1 location=DC=1/M=2/R=3/U=5/ lease=1 expire=Thu, 01 Jan 1970 02:00:00 UTC servicedsubdomain=72057594046678944:1 slotindex=1 authorizedbycertificate=false 2025-05-29T15:22:29.447062Z node 2 :NODE_BROKER DEBUG: node_broker.cpp:264: [Dirty] Register new active node #1025.v3 host2:1001 2025-05-29T15:22:29.447067Z node 2 :NODE_BROKER DEBUG: node_broker.cpp:552: [Dirty] Update current epoch version from 2 to 3 2025-05-29T15:22:29.447069Z node 2 :NODE_BROKER DEBUG: node_broker.cpp:1356: [DB] Update epoch version in database version=3 2025-05-29T15:22:29.457711Z node 2 :NODE_BROKER DEBUG: node_broker__register_node.cpp:186: TTxRegisterNode Complete 2025-05-29T15:22:29.457728Z node 2 :NODE_BROKER DEBUG: node_broker.cpp:264: [Committed] Register new active node #1025.v3 host2:1001 2025-05-29T15:22:29.457734Z node 2 :NODE_BROKER DEBUG: node_broker.cpp:552: [Committed] Update current epoch version from 2 to 3 2025-05-29T15:22:29.457738Z node 2 :NODE_BROKER DEBUG: node_broker.cpp:630: Add node #1025.v3 host2:1001 to epoch cache 2025-05-29T15:22:29.457755Z node 2 :NODE_BROKER DEBUG: node_broker.cpp:659: Add node #1025.v3 to update nodes log 2025-05-29T15:22:29.457783Z node 2 :NODE_BROKER TRACE: node_broker__register_node.cpp:58: TTxRegisterNode reply with: Status { Code: OK } Node { NodeId: 1025 Host: "host2" Port: 1001 ResolveHost: "host2.host2.host2" Address: "1.2.3.5" Location { DataCenter: "1" Module: "2" Rack: "3" Unit: "5" } Expire: 7200023000 Name: "slot-1" } ... waiting for cache miss 2025-05-29T15:22:29.457832Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:525: Handle NActors::TEvInterconnect::TEvResolveNode { NodeId: 1024 Deadline: 18446744073709.551615s } 2025-05-29T15:22:29.457840Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:626: New cache miss: nodeId# 1024, deadline# 18446744073709.551615s 2025-05-29T15:22:29.457844Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:525: Handle NActors::TEvInterconnect::TEvResolveNode { NodeId: 1025 Deadline: 1.107024s } 2025-05-29T15:22:29.457847Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:626: New cache miss: nodeId# 1025, deadline# 1.107024s 2025-05-29T15:22:29.457849Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:630: Schedule wakeup for new earliest deadline 1.107024s ... blocking NKikimr::NNodeBroker::TEvNodeBroker::TEvSyncNodesRequest from NAMESERVICE to NODE_BROKER_ACTOR cookie 1 ... waiting for cache miss (done) 2025-05-29T15:22:29.467991Z node 2 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 2146435074, Sender [0:0:0], Recipient [2:170:2176]: NKikimr::NNodeBroker::TNodeBroker::TEvPrivate::TEvProcessSubscribersQueue 2025-05-29T15:22:29.468007Z node 2 :NODE_BROKER TRACE: node_broker_impl.h:258: StateWork, processing event TEvPrivate::TEvProcessSubscribersQueue 2025-05-29T15:22:29.468015Z node 2 :NODE_BROKER TRACE: node_broker.cpp:730: Send TEvUpdateNodes v1 -> v3 to [2:18:2065] ... blocking NKikimr::NNodeBroker::TEvNodeBroker::TEvUpdateNodes from NODE_BROKER_ACTOR to NAMESERVICE cookie 0 2025-05-29T15:22:29.529227Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:873: HandleWakeup at 1.108024s 2025-05-29T15:22:29.529259Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:134: Cache miss failed: nodeId=1025, error=Deadline exceeded ... unblocking NKikimr::NNodeBroker::TEvNodeBroker::TEvUpdateNodes from NODE_BROKER_ACTOR to NAMESERVICE ... unblocking NKikimr::NNodeBroker::TEvNodeBroker::TEvUpdateNodes from NODE_BROKER_ACTOR to NAMESERVICE ... unblocking NKikimr::NNodeBroker::TEvNodeBroker::TEvSyncNodesRequest from NAMESERVICE to NODE_BROKER_ACTOR 2025-05-29T15:22:29.529331Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:724: Handle NKikimrNodeBroker.TUpdateNodes Epoch { Id: 1 Version: 1 Start: 23000 End: 3600023000 NextEnd: 7200023000 } SeqNo: 0 2025-05-29T15:22:29.529345Z node 2 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 272039952, Sender [2:18:2065], Recipient [2:170:2176]: NKikimr::NNodeBroker::TEvNodeBroker::TEvSyncNodesRequest { SeqNo: 0 } 2025-05-29T15:22:29.529352Z node 2 :NODE_BROKER TRACE: node_broker_impl.h:255: StateWork, processing event TEvNodeBroker::TEvSyncNodesRequest 2025-05-29T15:22:29.529391Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:724: Handle NKikimrNodeBroker.TUpdateNodes Epoch { Id: 1 Version: 3 Start: 23000 End: 3600023000 NextEnd: 7200023000 } Updates { Node { NodeId: 1024 Host: "host1" Port: 1001 ResolveHost: "host1.host1.host1" Address: "1.2.3.4" Location { DataCenter: "1" Module: "2" Rack: "3" Unit: "4" } Expire: 7200023000 Name: "slot-0" } } Updates { Node { NodeId: 1025 Host: "host2" Port: 1001 ResolveHost: "host2.host2.host2" Address: "1.2.3.5" Location { DataCenter: "1" Module: "2" Rack: "3" Unit: "5" } Expire: 7200023000 Name: "slot-1" } } SeqNo: 0 2025-05-29T15:22:29.529410Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:789: Handle NKikimr::NNodeBroker::TEvNodeBroker::TEvSyncNodesResponse { SeqNo: 0 } 2025-05-29T15:22:29.529414Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:128: Cache miss succeed: nodeId=1024 2025-05-29T15:22:29.529422Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:525: Handle NActors::TEvInterconnect::TEvResolveNode { NodeId: 1024 Deadline: 18446744073709.551615s } ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/mind/ut/unittest >> TNodeBrokerTest::SeveralNodesSubscribersPerPipe [GOOD] Test command err: 2025-05-29T15:22:27.413644Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 2 Deadline: 18446744073709.551615s } 2025-05-29T15:22:27.413684Z node 3 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 3 Deadline: 18446744073709.551615s } 2025-05-29T15:22:27.413706Z node 4 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 4 Deadline: 18446744073709.551615s } 2025-05-29T15:22:27.413732Z node 5 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 5 Deadline: 18446744073709.551615s } 2025-05-29T15:22:27.413752Z node 6 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 6 Deadline: 18446744073709.551615s } 2025-05-29T15:22:27.413766Z node 7 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 7 Deadline: 18446744073709.551615s } 2025-05-29T15:22:27.419082Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:27.419167Z node 3 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:27.419203Z node 4 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:27.419232Z node 5 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:27.419266Z node 6 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:27.419292Z node 7 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:27.419352Z node 8 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 8 Deadline: 18446744073709.551615s } 2025-05-29T15:22:27.419371Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-05-29T15:22:27.419655Z node 3 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:27.419683Z node 4 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:27.419699Z node 5 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:27.419726Z node 6 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:27.419748Z node 7 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:27.419767Z node 8 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:27.419807Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:27.423025Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:27.423068Z node 8 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:27.423091Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:27.423780Z node 5 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:27.423807Z node 6 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:27.423830Z node 7 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:27.423894Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:27.423909Z node 3 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:27.423923Z node 4 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:27.423937Z node 8 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:27.423950Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:27.424078Z node 3 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-05-29T15:22:27.424097Z node 4 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-05-29T15:22:27.424110Z node 5 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-05-29T15:22:27.424132Z node 6 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-05-29T15:22:27.424148Z node 7 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-05-29T15:22:27.424205Z node 8 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-05-29T15:22:27.424265Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-05-29T15:22:27.424295Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 7 Deadline: 18446744073709.551615s } 2025-05-29T15:22:27.424796Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 4 Deadline: 18446744073709.551615s } 2025-05-29T15:22:27.424814Z node 3 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 5 Deadline: 18446744073709.551615s } 2025-05-29T15:22:27.424820Z node 4 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 6 Deadline: 18446744073709.551615s } 2025-05-29T15:22:27.424825Z node 5 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 2 Deadline: 18446744073709.551615s } 2025-05-29T15:22:27.424831Z node 6 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 5 Deadline: 18446744073709.551615s } 2025-05-29T15:22:27.424836Z node 7 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 5 Deadline: 18446744073709.551615s } 2025-05-29T15:22:27.424842Z node 8 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 7 Deadline: 18446744073709.551615s } 2025-05-29T15:22:27.427667Z node 5 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 3 Deadline: 18446744073709.551615s } 2025-05-29T15:22:27.427689Z node 6 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 4 Deadline: 18446744073709.551615s } 2025-05-29T15:22:27.427696Z node 7 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 8 Deadline: 18446744073709.551615s } 2025-05-29T15:22:27.428147Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 5 Deadline: 18446744073709.551615s } 2025-05-29T15:22:27.428159Z node 4 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 2 Deadline: 18446744073709.551615s } 2025-05-29T15:22:27.428168Z node 5 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 6 Deadline: 18446744073709.551615s } 2025-05-29T15:22:27.428192Z node 5 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 7 Deadline: 18446744073709.551615s } 2025-05-29T15:22:27.428396Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 6 Deadline: 18446744073709.551615s } 2025-05-29T15:22:27.428722Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 2 Deadline: 18446744073709.551615s } 2025-05-29T15:22:27.428782Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 3 Deadline: 18446744073709.551615s } 2025-05-29T15:22:27.428948Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 4 Deadline: 18446744073709.551615s } 2025-05-29T15:22:27.429022Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 5 Deadline: 18446744073709.551615s } 2025-05-29T15:22:27.429092Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 8 Deadline: 18446744073709.551615s } 2025-05-29T15:22:27.429681Z node 4 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 5 Deadline: 18446744073709.551615s } 2025-05-29T15:22:27.429860Z node 5 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 4 Deadline: 18446744073709.551615s } 2025-05-29T15:22:27.450668Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7309: Cannot subscribe to console configs 2025-05-29T15:22:27.450690Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded ... waiting for nameservers are connected 2025-05-29T15:22:27.453909Z node 1 :NODE_BROKER DEBUG: node_broker_impl.h:236: StateInit event type: 10060000 event: NKikimr::TEvTablet::TEvBoot 2025-05-29T15:22:27.454214Z node 1 :NODE_BROKER DEBUG: node_broker_impl.h:236: StateInit event type: 10060001 event: NKikimr::TEvTablet::TEvRestored 2025-05-29T15:22:27.454261Z node 1 :NODE_BROKER DEBUG: node_broker__init_scheme.cpp:20: TTxInitScheme Execute 2025-05-29T15:22:27.454458Z node 1 :NODE_BROKER DEBUG: node_broker_impl.h:236: StateInit event type: 1006000c event: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-05-29T15:22:27.455459Z node 1 :NODE_BROKER DEBUG: node_broker__init_scheme.cpp:29: TTxInitScheme Complete 2025-05-29T15:22:27.455491Z node 1 :NODE_BROKER DEBUG: node_broker__load_state.cpp:19: TTxLoadState Execute 2025-05-29T15:22:27.455555Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:961: [DB] Using default config. 2025-05-29T15:22:27.455568Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:998: [DB] Starting the first epoch: #1.1 1970-01-01T00:00:00.025000Z - 1970-01-01T01:00:00.025000Z - 1970-01-01T02:00:00.025000Z 2025-05-29T15:22:27.455572Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:1024: [DB] Loaded the first approximate epoch start: #1.1 2025-05-29T15:22:27.455586Z node 1 :NODE_BROKER DEBUG: node_broker__load_state.cpp:27: TTxLoadState Complete 2025-05-29T15:22:27.455628Z node 1 :NODE_BROKER DEBUG: node_broker__migrate_state.cpp:84: TTxMigrateState Execute 2025-05-29T15:22:27.455634Z node 1 :NODE_BROKER DEBUG: node_broker__migrate_state.cpp:52: TTxMigrateState ProcessMigrationBatch UpdateNodes left 0, NewVersionUpdateNodes left 0 2025-05-29T15:22:27.455638Z node 1 :NODE_BROKER DEBUG: node_broker__migrate_state.cpp:21: TTxMigrateState FinalizeMigration 2025-05-29T15:22:27.455643Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:1311: [DB] Update epoch in database: #1.1 1970-01-01T00:00:00.025000Z - 1970-01-01T01:00:00.025000Z - 1970-01-01T02:00:00.025000Z 2025-05-29T15:22:27.455658Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:1330 ... TEvNodeBroker::TEvListNodes 2025-05-29T15:22:27.716255Z node 1 :NODE_BROKER TRACE: node_broker.cpp:423: Send TEvNodesInfo for epoch #1.3 1970-01-01T00:00:00.025000Z - 1970-01-01T01:00:00.025000Z - 1970-01-01T02:00:00.025000Z 2025-05-29T15:22:27.716314Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 272039952, Sender [1:625:2213], Recipient [1:557:2183]: NKikimr::NNodeBroker::TEvNodeBroker::TEvSyncNodesRequest { SeqNo: 1 } 2025-05-29T15:22:27.716318Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:255: StateWork, processing event TEvNodeBroker::TEvSyncNodesRequest 2025-05-29T15:22:27.716358Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 269877761, Sender [1:659:2239], Recipient [1:557:2183]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-05-29T15:22:27.716370Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 272039936, Sender [1:625:2213], Recipient [1:557:2183]: NKikimr::NNodeBroker::TEvNodeBroker::TEvListNodes { } 2025-05-29T15:22:27.716374Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:246: StateWork, processing event TEvNodeBroker::TEvListNodes 2025-05-29T15:22:27.716380Z node 1 :NODE_BROKER TRACE: node_broker.cpp:423: Send TEvNodesInfo for epoch #1.3 1970-01-01T00:00:00.025000Z - 1970-01-01T01:00:00.025000Z - 1970-01-01T02:00:00.025000Z 2025-05-29T15:22:27.716416Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 272039952, Sender [1:643:2224], Recipient [1:557:2183]: NKikimr::NNodeBroker::TEvNodeBroker::TEvSyncNodesRequest { SeqNo: 1 } 2025-05-29T15:22:27.716419Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:255: StateWork, processing event TEvNodeBroker::TEvSyncNodesRequest 2025-05-29T15:22:27.716459Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 269877761, Sender [1:661:2241], Recipient [1:557:2183]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-05-29T15:22:27.716469Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 272039936, Sender [1:625:2213], Recipient [1:557:2183]: NKikimr::NNodeBroker::TEvNodeBroker::TEvListNodes { } 2025-05-29T15:22:27.716474Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:246: StateWork, processing event TEvNodeBroker::TEvListNodes 2025-05-29T15:22:27.716478Z node 1 :NODE_BROKER TRACE: node_broker.cpp:423: Send TEvNodesInfo for epoch #1.3 1970-01-01T00:00:00.025000Z - 1970-01-01T01:00:00.025000Z - 1970-01-01T02:00:00.025000Z 2025-05-29T15:22:27.716513Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 269877761, Sender [1:663:2243], Recipient [1:557:2183]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-05-29T15:22:27.716524Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 272039936, Sender [1:625:2213], Recipient [1:557:2183]: NKikimr::NNodeBroker::TEvNodeBroker::TEvListNodes { } 2025-05-29T15:22:27.716527Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:246: StateWork, processing event TEvNodeBroker::TEvListNodes 2025-05-29T15:22:27.716530Z node 1 :NODE_BROKER TRACE: node_broker.cpp:423: Send TEvNodesInfo for epoch #1.3 1970-01-01T00:00:00.025000Z - 1970-01-01T01:00:00.025000Z - 1970-01-01T02:00:00.025000Z 2025-05-29T15:22:27.716572Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 269877764, Sender [1:641:2222], Recipient [1:557:2183]: NKikimr::TEvTabletPipe::TEvServerDisconnected 2025-05-29T15:22:27.716578Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:768: Unsubscribed [1:643:2224], seqNo: 1, server pipe id: [1:641:2222] 2025-05-29T15:22:27.716582Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:768: Unsubscribed [1:625:2213], seqNo: 1, server pipe id: [1:641:2222] 2025-05-29T15:22:27.716599Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 269877761, Sender [1:665:2245], Recipient [1:557:2183]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-05-29T15:22:27.716625Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 272039938, Sender [1:625:2213], Recipient [1:557:2183]: NKikimr::NNodeBroker::TEvNodeBroker::TEvRegistrationRequest { Host: "host3" Port: 1001 ResolveHost: "host3.yandex.net" Address: "1.2.3.4" Location { DataCenter: "1" Module: "2" Rack: "3" Unit: "4" } FixedNodeId: false Path: "dc-1" } 2025-05-29T15:22:27.716628Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:248: StateWork, processing event TEvNodeBroker::TEvRegistrationRequest 2025-05-29T15:22:27.716635Z node 1 :NODE_BROKER TRACE: node_broker.cpp:1487: Handle TEvNodeBroker::TEvRegistrationRequest: request# Host: "host3" Port: 1001 ResolveHost: "host3.yandex.net" Address: "1.2.3.4" Location { DataCenter: "1" Module: "2" Rack: "3" Unit: "4" } FixedNodeId: false Path: "dc-1" 2025-05-29T15:22:27.716671Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2702: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [1:23:2070], request# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-05-29T15:22:27.716693Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:1842: FillEntry for TNavigate: self# [1:23:2070], cacheItem# { Subscriber: { Subscriber: [1:629:2216] DomainOwnerId: 72057594046678944 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusSuccess Kind: 2 TableKind: 0 Created: 1 CreateStep: 0 PathId: [OwnerId: 72057594046678944, LocalPathId: 1] DomainId: [OwnerId: 72057594046678944, LocalPathId: 1] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-05-29T15:22:27.716739Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:265: Send result: self# [1:667:2246], recipient# [1:666:2183], result# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [72057594046678944:1:0] RequestType: ByPath Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046678944, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046678944, LocalPathId: 1] Params { Version: 0 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } }] } 2025-05-29T15:22:27.716752Z node 1 :NODE_BROKER TRACE: node_broker.cpp:1532: Handle TEvTxProxySchemeCache::TEvNavigateKeySetResult: response# { Path: dc-1 TableId: [72057594046678944:1:0] RequestType: ByPath Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046678944, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046678944, LocalPathId: 1] Params { Version: 0 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } } 2025-05-29T15:22:27.716760Z node 1 :NODE_BROKER TRACE: node_broker.cpp:1558: Finished resolving tenant: request# Host: "host3" Port: 1001 ResolveHost: "host3.yandex.net" Address: "1.2.3.4" Location { DataCenter: "1" Module: "2" Rack: "3" Unit: "4" } FixedNodeId: false Path: "dc-1": scope id# <72057594046678944:1>: serviced subdomain# 72057594046678944:1 2025-05-29T15:22:27.716770Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 2146435073, Sender [1:666:2183], Recipient [1:557:2183]: NKikimr::NNodeBroker::TNodeBroker::TEvPrivate::TEvResolvedRegistrationRequest 2025-05-29T15:22:27.716774Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:257: StateWork, processing event TEvPrivate::TEvResolvedRegistrationRequest 2025-05-29T15:22:27.716791Z node 1 :NODE_BROKER DEBUG: node_broker__register_node.cpp:77: TTxRegisterNode Execute 2025-05-29T15:22:27.716794Z node 1 :NODE_BROKER DEBUG: node_broker__register_node.cpp:81: Registration request from host3:1001 (not fixed) tenant: dc-1 2025-05-29T15:22:27.716819Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:856: [DB] Adding node #1026.v4 host3:1001 to database state=Active resolvehost=host3.yandex.net address=1.2.3.4 dc=1 location=DC=1/M=2/R=3/U=4/ lease=1 expire=Thu, 01 Jan 1970 02:00:00 UTC servicedsubdomain=72057594046678944:1 slotindex=2 authorizedbycertificate=false 2025-05-29T15:22:27.716900Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:264: [Dirty] Register new active node #1026.v4 host3:1001 2025-05-29T15:22:27.716905Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:552: [Dirty] Update current epoch version from 3 to 4 2025-05-29T15:22:27.716908Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:1356: [DB] Update epoch version in database version=4 2025-05-29T15:22:27.727761Z node 1 :NODE_BROKER DEBUG: node_broker__register_node.cpp:186: TTxRegisterNode Complete 2025-05-29T15:22:27.727781Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:264: [Committed] Register new active node #1026.v4 host3:1001 2025-05-29T15:22:27.727790Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:552: [Committed] Update current epoch version from 3 to 4 2025-05-29T15:22:27.727794Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:630: Add node #1026.v4 host3:1001 to epoch cache 2025-05-29T15:22:27.727813Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:659: Add node #1026.v4 to update nodes log 2025-05-29T15:22:27.727849Z node 1 :NODE_BROKER TRACE: node_broker__register_node.cpp:58: TTxRegisterNode reply with: Status { Code: OK } Node { NodeId: 1026 Host: "host3" Port: 1001 ResolveHost: "host3.yandex.net" Address: "1.2.3.4" Location { DataCenter: "1" Module: "2" Rack: "3" Unit: "4" } Expire: 7200025000 Name: "slot-2" } 2025-05-29T15:22:27.738101Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 2146435074, Sender [0:0:0], Recipient [1:557:2183]: NKikimr::NNodeBroker::TNodeBroker::TEvPrivate::TEvProcessSubscribersQueue 2025-05-29T15:22:27.738125Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:258: StateWork, processing event TEvPrivate::TEvProcessSubscribersQueue 2025-05-29T15:22:28.150910Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 269877761, Sender [1:706:2256], Recipient [1:557:2183]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-05-29T15:22:28.151002Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 272039950, Sender [1:625:2213], Recipient [1:557:2183]: NKikimr::NNodeBroker::TEvNodeBroker::TEvSubscribeNodesRequest { CachedVersion: 3 SeqNo: 2 } 2025-05-29T15:22:28.151013Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:254: StateWork, processing event TEvNodeBroker::TEvSubscribeNodesRequest 2025-05-29T15:22:28.151023Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:747: New subscriber [1:625:2213], seqNo: 2, version: 3, server pipe id: [1:706:2256] 2025-05-29T15:22:28.151038Z node 1 :NODE_BROKER TRACE: node_broker.cpp:730: Send TEvUpdateNodes v3 -> v4 to [1:625:2213] 2025-05-29T15:22:28.151060Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 269877761, Sender [1:707:2257], Recipient [1:557:2183]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-05-29T15:22:28.151075Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 272039936, Sender [1:625:2213], Recipient [1:557:2183]: NKikimr::NNodeBroker::TEvNodeBroker::TEvListNodes { } 2025-05-29T15:22:28.151081Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:246: StateWork, processing event TEvNodeBroker::TEvListNodes 2025-05-29T15:22:28.151096Z node 1 :NODE_BROKER TRACE: node_broker.cpp:423: Send TEvNodesInfo for epoch #1.4 1970-01-01T00:00:00.025000Z - 1970-01-01T01:00:00.025000Z - 1970-01-01T02:00:00.025000Z ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/mind/ut/unittest >> TNodeBrokerTest::NodeNameWithDifferentTenants [GOOD] Test command err: 2025-05-29T15:22:27.779870Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 2 Deadline: 18446744073709.551615s } 2025-05-29T15:22:27.779920Z node 3 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 3 Deadline: 18446744073709.551615s } 2025-05-29T15:22:27.779947Z node 4 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 4 Deadline: 18446744073709.551615s } 2025-05-29T15:22:27.779978Z node 5 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 5 Deadline: 18446744073709.551615s } 2025-05-29T15:22:27.780005Z node 6 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 6 Deadline: 18446744073709.551615s } 2025-05-29T15:22:27.780029Z node 7 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 7 Deadline: 18446744073709.551615s } 2025-05-29T15:22:27.787581Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:27.787697Z node 3 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:27.787742Z node 4 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:27.787779Z node 5 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:27.787819Z node 6 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:27.787856Z node 7 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:27.787924Z node 8 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 8 Deadline: 18446744073709.551615s } 2025-05-29T15:22:27.787948Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-05-29T15:22:27.788198Z node 3 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:27.788227Z node 4 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:27.788241Z node 5 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:27.788255Z node 6 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:27.788273Z node 7 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:27.788289Z node 8 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:27.788321Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:27.791960Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:27.792019Z node 8 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:27.792044Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:27.792913Z node 5 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:27.792949Z node 6 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:27.792987Z node 7 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:27.793069Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:27.793086Z node 3 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:27.793104Z node 4 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:27.793125Z node 5 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-05-29T15:22:27.793164Z node 6 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-05-29T15:22:27.793182Z node 7 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-05-29T15:22:27.793198Z node 8 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:27.793215Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:27.793369Z node 3 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-05-29T15:22:27.793389Z node 4 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-05-29T15:22:27.793458Z node 8 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-05-29T15:22:27.793554Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-05-29T15:22:27.793595Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 8 Deadline: 18446744073709.551615s } 2025-05-29T15:22:27.794397Z node 3 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 5 Deadline: 18446744073709.551615s } 2025-05-29T15:22:27.794415Z node 4 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 6 Deadline: 18446744073709.551615s } 2025-05-29T15:22:27.794426Z node 5 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 3 Deadline: 18446744073709.551615s } 2025-05-29T15:22:27.794437Z node 6 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 2 Deadline: 18446744073709.551615s } 2025-05-29T15:22:27.794450Z node 7 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 5 Deadline: 18446744073709.551615s } 2025-05-29T15:22:27.794462Z node 8 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 2 Deadline: 18446744073709.551615s } 2025-05-29T15:22:27.798143Z node 6 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 4 Deadline: 18446744073709.551615s } 2025-05-29T15:22:27.798322Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 6 Deadline: 18446744073709.551615s } 2025-05-29T15:22:27.798364Z node 5 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 7 Deadline: 18446744073709.551615s } 2025-05-29T15:22:27.799058Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 8 Deadline: 18446744073709.551615s } 2025-05-29T15:22:27.799249Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 5 Deadline: 18446744073709.551615s } 2025-05-29T15:22:27.799551Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 6 Deadline: 18446744073709.551615s } 2025-05-29T15:22:27.799703Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 7 Deadline: 18446744073709.551615s } 2025-05-29T15:22:27.799746Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 2 Deadline: 18446744073709.551615s } 2025-05-29T15:22:27.799822Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 3 Deadline: 18446744073709.551615s } 2025-05-29T15:22:27.799868Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 4 Deadline: 18446744073709.551615s } 2025-05-29T15:22:27.800186Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 3 Deadline: 18446744073709.551615s } 2025-05-29T15:22:27.800541Z node 3 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 2 Deadline: 18446744073709.551615s } 2025-05-29T15:22:27.801677Z node 5 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 2 Deadline: 18446744073709.551615s } 2025-05-29T15:22:27.802059Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 5 Deadline: 18446744073709.551615s } 2025-05-29T15:22:27.825253Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7309: Cannot subscribe to console configs 2025-05-29T15:22:27.825272Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded ... waiting for nameservers are connected 2025-05-29T15:22:27.829876Z node 1 :NODE_BROKER DEBUG: node_broker_impl.h:236: StateInit event type: 10060000 event: NKikimr::TEvTablet::TEvBoot 2025-05-29T15:22:27.830236Z node 1 :NODE_BROKER DEBUG: node_broker_impl.h:236: StateInit event type: 10060001 event: NKikimr::TEvTablet::TEvRestored 2025-05-29T15:22:27.830290Z node 1 :NODE_BROKER DEBUG: node_broker__init_scheme.cpp:20: TTxInitScheme Execute 2025-05-29T15:22:27.830458Z node 1 :NODE_BROKER DEBUG: node_broker_impl.h:236: StateInit event type: 1006000c event: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-05-29T15:22:27.831267Z node 1 :NODE_BROKER DEBUG: node_broker__init_scheme.cpp:29: TTxInitScheme Complete 2025-05-29T15:22:27.831287Z node 1 :NODE_BROKER DEBUG: node_broker__load_state.cpp:19: TTxLoadState Execute 2025-05-29T15:22:27.831343Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:961: [DB] Using default config. 2025-05-29T15:22:27.831356Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:998: [DB] Starting the first epoch: #1.1 1970-01-01T00:00:00.025000Z - 1970-01-01T01:00:00.025000Z - 1970-01-01T02:00:00.025000Z 2025-05-29T15:22:27.831360Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:1024: [DB] Loaded the first approximate epoch start: #1.1 2025-05-29T15:22:27.831388Z node 1 :NODE_BROKER DEBUG: node_broker__load_state.cpp:27: TTxLoadState Complete 2025-05-29T15:22:27.831403Z node 1 :NODE_BROKER DEBUG: node_broker__migrate_state.cpp:84: TTxMigrateState Execute 2025-05-29T15:22:27.831407Z node 1 :NODE_BROKER DEBUG: node_broker__migrate_state.cpp:52: TTxMigrateState ProcessMigrationBatch UpdateNodes left 0, NewVersionUpdateNodes left 0 2025-05-29T15:22:27.831410Z node 1 :NODE_BROKER DEBUG: node_broker__migrate_state.cpp:21: TTxMigrateState FinalizeMigration 2025-05-29T15:22:27.831414Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:1311: [DB] Update epoch in database: #1.1 1970-01-01T00:00:00.025000Z - 1970-01-01T01:00:00.025000Z - 1970-01-01T02:00:00.025000Z 2025-05-29T15:22:27.831429Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:1330: [DB] Update approx epoch start in database: #1.1 2025-05-29T15:22:27.831433Z node 1 :NODE_BROKER NOTICE: node_broker.cpp:1343: [DB] Update main nodes table to: Nodes 2025-05-29T15:22:27.863382Z node 1 :NODE_BROKER DEBUG: node_broker__migrate_state.cpp:95: TTxMigrateState Complete 2025-05-29T15:22:27.863411Z node 1 :NODE_BROKER TRACE: node_broke ... node 1 :NODE_BROKER DEBUG: node_broker__register_node.cpp:81: Registration request from host1:19001 (not fixed) tenant: /dc-1/yet-another-database 2025-05-29T15:22:28.096254Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:856: [DB] Adding node #1024.v2 host1:19001 to database state=Active resolvehost=host1 address= dc= location= lease=1 expire=Thu, 01 Jan 1970 02:00:00 UTC servicedsubdomain=72057594046678944:3 slotindex=1 authorizedbycertificate=false 2025-05-29T15:22:28.107062Z node 1 :NODE_BROKER DEBUG: node_broker__register_node.cpp:186: TTxRegisterNode Complete 2025-05-29T15:22:28.107119Z node 1 :NODE_BROKER TRACE: node_broker__register_node.cpp:58: TTxRegisterNode reply with: Status { Code: OK } Node { NodeId: 1024 Host: "host1" Port: 19001 ResolveHost: "host1" Address: "" Location { } Expire: 7200025000 Name: "slot-1" } 2025-05-29T15:22:28.107253Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 269877761, Sender [1:725:2296], Recipient [1:556:2184]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-05-29T15:22:28.107304Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 272039938, Sender [1:682:2270], Recipient [1:556:2184]: NKikimr::NNodeBroker::TEvNodeBroker::TEvRegistrationRequest { Host: "host4" Port: 19001 ResolveHost: "host4" Address: "" Location { } FixedNodeId: false Path: "/dc-1/my-database" } 2025-05-29T15:22:28.107311Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:248: StateWork, processing event TEvNodeBroker::TEvRegistrationRequest 2025-05-29T15:22:28.107319Z node 1 :NODE_BROKER TRACE: node_broker.cpp:1487: Handle TEvNodeBroker::TEvRegistrationRequest: request# Host: "host4" Port: 19001 ResolveHost: "host4" Address: "" Location { } FixedNodeId: false Path: "/dc-1/my-database" 2025-05-29T15:22:28.107367Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2702: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [1:23:2070], request# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/my-database TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-05-29T15:22:28.107395Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:1842: FillEntry for TNavigate: self# [1:23:2070], cacheItem# { Subscriber: { Subscriber: [1:688:2275] DomainOwnerId: 72057594046678944 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusSuccess Kind: 9 TableKind: 0 Created: 1 CreateStep: 5000001 PathId: [OwnerId: 72057594046678944, LocalPathId: 2] DomainId: [OwnerId: 72057594046678944, LocalPathId: 2] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/my-database TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-05-29T15:22:28.107446Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:265: Send result: self# [1:727:2297], recipient# [1:726:2184], result# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/my-database TableId: [72057594046678944:2:0] RequestType: ByPath Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindExtSubdomain DomainInfo { DomainKey: [OwnerId: 72057594046678944, LocalPathId: 2] ResourcesDomainKey: [OwnerId: 72057594046678944, LocalPathId: 2] Params { Version: 1 PlanResolution: 0 TimeCastBucketsPerMediator: 0 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } }] } 2025-05-29T15:22:28.107460Z node 1 :NODE_BROKER TRACE: node_broker.cpp:1532: Handle TEvTxProxySchemeCache::TEvNavigateKeySetResult: response# { Path: dc-1/my-database TableId: [72057594046678944:2:0] RequestType: ByPath Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindExtSubdomain DomainInfo { DomainKey: [OwnerId: 72057594046678944, LocalPathId: 2] ResourcesDomainKey: [OwnerId: 72057594046678944, LocalPathId: 2] Params { Version: 1 PlanResolution: 0 TimeCastBucketsPerMediator: 0 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } } 2025-05-29T15:22:28.107471Z node 1 :NODE_BROKER TRACE: node_broker.cpp:1558: Finished resolving tenant: request# Host: "host4" Port: 19001 ResolveHost: "host4" Address: "" Location { } FixedNodeId: false Path: "/dc-1/my-database": scope id# <72057594046678944:2>: serviced subdomain# 72057594046678944:2 2025-05-29T15:22:28.107484Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 2146435073, Sender [1:726:2184], Recipient [1:556:2184]: NKikimr::NNodeBroker::TNodeBroker::TEvPrivate::TEvResolvedRegistrationRequest 2025-05-29T15:22:28.107488Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:257: StateWork, processing event TEvPrivate::TEvResolvedRegistrationRequest 2025-05-29T15:22:28.107501Z node 1 :NODE_BROKER DEBUG: node_broker__register_node.cpp:77: TTxRegisterNode Execute 2025-05-29T15:22:28.107506Z node 1 :NODE_BROKER DEBUG: node_broker__register_node.cpp:81: Registration request from host4:19001 (not fixed) tenant: /dc-1/my-database 2025-05-29T15:22:28.107526Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:856: [DB] Adding node #1027.v5 host4:19001 to database state=Active resolvehost=host4 address= dc= location= lease=1 expire=Thu, 01 Jan 1970 02:00:00 UTC servicedsubdomain=72057594046678944:2 slotindex=0 authorizedbycertificate=false 2025-05-29T15:22:28.107569Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:264: [Dirty] Register new active node #1027.v5 host4:19001 2025-05-29T15:22:28.107577Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:552: [Dirty] Update current epoch version from 4 to 5 2025-05-29T15:22:28.107580Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:1356: [DB] Update epoch version in database version=5 2025-05-29T15:22:28.118427Z node 1 :NODE_BROKER DEBUG: node_broker__register_node.cpp:186: TTxRegisterNode Complete 2025-05-29T15:22:28.118445Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:264: [Committed] Register new active node #1027.v5 host4:19001 2025-05-29T15:22:28.118453Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:552: [Committed] Update current epoch version from 4 to 5 2025-05-29T15:22:28.118458Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:630: Add node #1027.v5 host4:19001 to epoch cache 2025-05-29T15:22:28.118472Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:659: Add node #1027.v5 to update nodes log 2025-05-29T15:22:28.118510Z node 1 :NODE_BROKER TRACE: node_broker__register_node.cpp:58: TTxRegisterNode reply with: Status { Code: OK } Node { NodeId: 1027 Host: "host4" Port: 19001 ResolveHost: "host4" Address: "" Location { } Expire: 7200025000 Name: "slot-0" } 2025-05-29T15:22:28.118629Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 269877761, Sender [1:731:2301], Recipient [1:556:2184]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-05-29T15:22:28.118649Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 272039938, Sender [1:682:2270], Recipient [1:556:2184]: NKikimr::NNodeBroker::TEvNodeBroker::TEvRegistrationRequest { Host: "host1" Port: 19001 ResolveHost: "host1" Address: "" Location { } FixedNodeId: false Path: "/dc-1/my-database" } 2025-05-29T15:22:28.118655Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:248: StateWork, processing event TEvNodeBroker::TEvRegistrationRequest 2025-05-29T15:22:28.118663Z node 1 :NODE_BROKER TRACE: node_broker.cpp:1487: Handle TEvNodeBroker::TEvRegistrationRequest: request# Host: "host1" Port: 19001 ResolveHost: "host1" Address: "" Location { } FixedNodeId: false Path: "/dc-1/my-database" 2025-05-29T15:22:28.118707Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2702: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [1:23:2070], request# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/my-database TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-05-29T15:22:28.118732Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:1842: FillEntry for TNavigate: self# [1:23:2070], cacheItem# { Subscriber: { Subscriber: [1:688:2275] DomainOwnerId: 72057594046678944 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusSuccess Kind: 9 TableKind: 0 Created: 1 CreateStep: 5000001 PathId: [OwnerId: 72057594046678944, LocalPathId: 2] DomainId: [OwnerId: 72057594046678944, LocalPathId: 2] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/my-database TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-05-29T15:22:28.118793Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:265: Send result: self# [1:733:2302], recipient# [1:732:2184], result# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/my-database TableId: [72057594046678944:2:0] RequestType: ByPath Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindExtSubdomain DomainInfo { DomainKey: [OwnerId: 72057594046678944, LocalPathId: 2] ResourcesDomainKey: [OwnerId: 72057594046678944, LocalPathId: 2] Params { Version: 1 PlanResolution: 0 TimeCastBucketsPerMediator: 0 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } }] } 2025-05-29T15:22:28.118808Z node 1 :NODE_BROKER TRACE: node_broker.cpp:1532: Handle TEvTxProxySchemeCache::TEvNavigateKeySetResult: response# { Path: dc-1/my-database TableId: [72057594046678944:2:0] RequestType: ByPath Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindExtSubdomain DomainInfo { DomainKey: [OwnerId: 72057594046678944, LocalPathId: 2] ResourcesDomainKey: [OwnerId: 72057594046678944, LocalPathId: 2] Params { Version: 1 PlanResolution: 0 TimeCastBucketsPerMediator: 0 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } } 2025-05-29T15:22:28.118820Z node 1 :NODE_BROKER TRACE: node_broker.cpp:1558: Finished resolving tenant: request# Host: "host1" Port: 19001 ResolveHost: "host1" Address: "" Location { } FixedNodeId: false Path: "/dc-1/my-database": scope id# <72057594046678944:2>: serviced subdomain# 72057594046678944:2 2025-05-29T15:22:28.118835Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 2146435073, Sender [1:732:2184], Recipient [1:556:2184]: NKikimr::NNodeBroker::TNodeBroker::TEvPrivate::TEvResolvedRegistrationRequest 2025-05-29T15:22:28.118839Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:257: StateWork, processing event TEvPrivate::TEvResolvedRegistrationRequest 2025-05-29T15:22:28.118855Z node 1 :NODE_BROKER DEBUG: node_broker__register_node.cpp:77: TTxRegisterNode Execute 2025-05-29T15:22:28.118859Z node 1 :NODE_BROKER DEBUG: node_broker__register_node.cpp:81: Registration request from host1:19001 (not fixed) tenant: /dc-1/my-database 2025-05-29T15:22:28.118879Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:856: [DB] Adding node #1024.v2 host1:19001 to database state=Active resolvehost=host1 address= dc= location= lease=1 expire=Thu, 01 Jan 1970 02:00:00 UTC servicedsubdomain=72057594046678944:2 slotindex=2 authorizedbycertificate=false 2025-05-29T15:22:28.129759Z node 1 :NODE_BROKER DEBUG: node_broker__register_node.cpp:186: TTxRegisterNode Complete 2025-05-29T15:22:28.129804Z node 1 :NODE_BROKER TRACE: node_broker__register_node.cpp:58: TTxRegisterNode reply with: Status { Code: OK } Node { NodeId: 1024 Host: "host1" Port: 19001 ResolveHost: "host1" Address: "" Location { } Expire: 7200025000 Name: "slot-2" } >> TNodeBrokerTest::RegistrationPipelining [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/mind/ut/unittest >> TNodeBrokerTest::NoEffectBeforeCommit [GOOD] Test command err: 2025-05-29T15:22:27.865633Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 2 Deadline: 18446744073709.551615s } 2025-05-29T15:22:27.865676Z node 3 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 3 Deadline: 18446744073709.551615s } 2025-05-29T15:22:27.865704Z node 4 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 4 Deadline: 18446744073709.551615s } 2025-05-29T15:22:27.865740Z node 5 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 5 Deadline: 18446744073709.551615s } 2025-05-29T15:22:27.865774Z node 6 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 6 Deadline: 18446744073709.551615s } 2025-05-29T15:22:27.865797Z node 7 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 7 Deadline: 18446744073709.551615s } 2025-05-29T15:22:27.872590Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:27.872681Z node 3 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:27.872721Z node 4 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:27.872754Z node 5 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:27.872789Z node 6 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:27.872828Z node 7 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:27.872884Z node 8 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 8 Deadline: 18446744073709.551615s } 2025-05-29T15:22:27.872904Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-05-29T15:22:27.873112Z node 3 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:27.873132Z node 4 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:27.873145Z node 5 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:27.873158Z node 6 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:27.873173Z node 7 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:27.873186Z node 8 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:27.873213Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:27.877321Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:27.877365Z node 8 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:27.877389Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:27.878148Z node 5 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:27.878178Z node 6 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:27.878200Z node 7 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:27.878269Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:27.878284Z node 3 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:27.878298Z node 4 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:27.878314Z node 8 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:27.878328Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:27.878478Z node 3 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-05-29T15:22:27.878498Z node 4 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-05-29T15:22:27.878513Z node 5 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-05-29T15:22:27.878534Z node 6 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-05-29T15:22:27.878553Z node 7 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-05-29T15:22:27.878611Z node 8 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-05-29T15:22:27.878674Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-05-29T15:22:27.878707Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 2 Deadline: 18446744073709.551615s } 2025-05-29T15:22:27.879555Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 7 Deadline: 18446744073709.551615s } 2025-05-29T15:22:27.879587Z node 3 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 6 Deadline: 18446744073709.551615s } 2025-05-29T15:22:27.879598Z node 4 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 8 Deadline: 18446744073709.551615s } 2025-05-29T15:22:27.879609Z node 5 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 3 Deadline: 18446744073709.551615s } 2025-05-29T15:22:27.879618Z node 6 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 7 Deadline: 18446744073709.551615s } 2025-05-29T15:22:27.879628Z node 7 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 4 Deadline: 18446744073709.551615s } 2025-05-29T15:22:27.879640Z node 8 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 4 Deadline: 18446744073709.551615s } 2025-05-29T15:22:27.883747Z node 4 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 7 Deadline: 18446744073709.551615s } 2025-05-29T15:22:27.883834Z node 7 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 6 Deadline: 18446744073709.551615s } 2025-05-29T15:22:27.883910Z node 3 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 5 Deadline: 18446744073709.551615s } 2025-05-29T15:22:27.883926Z node 6 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 3 Deadline: 18446744073709.551615s } 2025-05-29T15:22:27.883937Z node 7 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 2 Deadline: 18446744073709.551615s } 2025-05-29T15:22:27.883949Z node 8 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 2 Deadline: 18446744073709.551615s } 2025-05-29T15:22:27.884957Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 5 Deadline: 18446744073709.551615s } 2025-05-29T15:22:27.885226Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 6 Deadline: 18446744073709.551615s } 2025-05-29T15:22:27.885278Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 8 Deadline: 18446744073709.551615s } 2025-05-29T15:22:27.885346Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 7 Deadline: 18446744073709.551615s } 2025-05-29T15:22:27.885478Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 3 Deadline: 18446744073709.551615s } 2025-05-29T15:22:27.885637Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 4 Deadline: 18446744073709.551615s } 2025-05-29T15:22:27.885674Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 8 Deadline: 18446744073709.551615s } 2025-05-29T15:22:27.886314Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 6 Deadline: 18446744073709.551615s } 2025-05-29T15:22:27.886457Z node 7 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 8 Deadline: 18446744073709.551615s } 2025-05-29T15:22:27.886571Z node 6 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 2 Deadline: 18446744073709.551615s } 2025-05-29T15:22:27.886653Z node 8 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 7 Deadline: 18446744073709.551615s } 2025-05-29T15:22:27.888179Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 5 Deadline: 18446744073709.551615s } 2025-05-29T15:22:27.888485Z node 5 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 2 Deadline: 18446744073709.551615s } 2025-05-29T15:22:27.908622Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7309: Cannot subscribe to console configs 2025-05-29T15:22:27.908641Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded ... waiting for nameservers are connected 2025-05-29T15:22:27.911596Z node 1 :NODE_BROKER DEBUG: node_broker_impl.h:236: StateInit event type: 10060000 event: NKikimr::TEvTablet::TEvBoot 2025-05-29T15:22:27.911892Z node 1 :NODE_BROKER DEBUG: node_broker_impl.h:236: StateInit event type: 10060001 event: NKikimr::TEvTablet::TEvRestored 2025-05-29T15:22:27.911927Z node 1 :NODE_BROKER DEBUG: node_broker__init_scheme.cpp:20: TTxInitScheme Execute 2025-05-29T15:22:27.912067Z node 1 :NODE_BROKER DEBUG: node_broker_impl.h:236: StateInit event type: 1006000c event: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-05-29T15:22:27.912608Z node 1 :NODE_BROKER DEBUG: node_broker__init_scheme.cpp:29: TTxInitScheme Complete 2025-05-29T15:22:27.912624Z node 1 :NODE_BROKER DEBUG: node_broker__load_state.cpp:19: TTxLoadState Execute 2025-05-29T15:22:27.912664Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:961: [DB] Using default config. 2025-05-29T15:22:27.912674Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:998: [DB] Starting the first epoch: #1.1 1970-01-01T00:00:00.025000Z - 1970-01-01T01:00:00.025000Z - 1970-01-01T02:00:00.025000Z 2025-05-29T15:22:27.912678Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:1024: [DB] Loaded the first approximate epoch start: #1.1 2025-05-29T15:22:27.912687Z node 1 :NODE_BROKER DEBUG: node_broker__load_state.cpp:27: TTxLoadState Compl ... " Unit: "4" } FixedNodeId: false Path: "dc-1" 2025-05-29T15:22:28.120061Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2702: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [1:23:2070], request# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-05-29T15:22:28.120070Z node 1 :TX_PROXY_SCHEME_CACHE TRACE: cache.cpp:2322: Create subscriber: self# [1:23:2070], path# /dc-1, domainOwnerId# 72057594046678944 2025-05-29T15:22:28.124796Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2589: HandleNotify: self# [1:23:2070], notify# NKikimr::TSchemeBoardEvents::TEvNotifyUpdate { Path: /dc-1 PathId: [OwnerId: 72057594046678944, LocalPathId: 1] DescribeSchemeResult: Status: StatusSuccess Path: "/dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/dc-1" } } } PathId: 1 PathOwnerId: 72057594046678944 } 2025-05-29T15:22:28.124856Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2464: ResolveCacheItem: self# [1:23:2070], notify# NKikimr::TSchemeBoardEvents::TEvNotifyUpdate { Path: /dc-1 PathId: [OwnerId: 72057594046678944, LocalPathId: 1] DescribeSchemeResult: Status: StatusSuccess Path: "/dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/dc-1" } } } PathId: 1 PathOwnerId: 72057594046678944 }, by path# { Subscriber: { Subscriber: [1:639:2222] DomainOwnerId: 72057594046678944 Type: 2 SyncCookie: 0 } Filled: 0 Status: undefined Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, by pathId# nullptr 2025-05-29T15:22:28.124886Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:1842: FillEntry for TNavigate: self# [1:23:2070], cacheItem# { Subscriber: { Subscriber: [1:639:2222] DomainOwnerId: 72057594046678944 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusSuccess Kind: 2 TableKind: 0 Created: 1 CreateStep: 0 PathId: [OwnerId: 72057594046678944, LocalPathId: 1] DomainId: [OwnerId: 72057594046678944, LocalPathId: 1] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-05-29T15:22:28.124962Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:265: Send result: self# [1:646:2223], recipient# [1:638:2183], result# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [72057594046678944:1:0] RequestType: ByPath Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046678944, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046678944, LocalPathId: 1] Params { Version: 0 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } }] } 2025-05-29T15:22:28.124980Z node 1 :NODE_BROKER TRACE: node_broker.cpp:1532: Handle TEvTxProxySchemeCache::TEvNavigateKeySetResult: response# { Path: dc-1 TableId: [72057594046678944:1:0] RequestType: ByPath Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046678944, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046678944, LocalPathId: 1] Params { Version: 0 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } } 2025-05-29T15:22:28.124994Z node 1 :NODE_BROKER TRACE: node_broker.cpp:1558: Finished resolving tenant: request# Host: "host1" Port: 1001 ResolveHost: "host1.yandex.net" Address: "1.2.3.4" Location { DataCenter: "1" Module: "2" Rack: "3" Unit: "4" } FixedNodeId: false Path: "dc-1": scope id# <72057594046678944:1>: serviced subdomain# 72057594046678944:1 2025-05-29T15:22:28.125007Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 2146435073, Sender [1:638:2183], Recipient [1:561:2183]: NKikimr::NNodeBroker::TNodeBroker::TEvPrivate::TEvResolvedRegistrationRequest 2025-05-29T15:22:28.125011Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:257: StateWork, processing event TEvPrivate::TEvResolvedRegistrationRequest 2025-05-29T15:22:28.125029Z node 1 :NODE_BROKER DEBUG: node_broker__register_node.cpp:77: TTxRegisterNode Execute 2025-05-29T15:22:28.125032Z node 1 :NODE_BROKER DEBUG: node_broker__register_node.cpp:81: Registration request from host1:1001 (not fixed) tenant: dc-1 2025-05-29T15:22:28.125093Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:856: [DB] Adding node #1024.v2 host1:1001 to database state=Active resolvehost=host1.yandex.net address=1.2.3.4 dc=1 location=DC=1/M=2/R=3/U=4/ lease=1 expire=Thu, 01 Jan 1970 02:00:00 UTC servicedsubdomain=72057594046678944:1 slotindex=0 authorizedbycertificate=false 2025-05-29T15:22:28.125130Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:264: [Dirty] Register new active node #1024.v2 host1:1001 2025-05-29T15:22:28.125134Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:552: [Dirty] Update current epoch version from 1 to 2 2025-05-29T15:22:28.125138Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:1356: [DB] Update epoch version in database version=2 ... waiting for commit ... blocking NKikimr::TEvTablet::TEvCommit from FLAT_EXECUTOR to TABLET_ACTOR cookie 1 ... waiting for commit (done) 2025-05-29T15:22:28.135450Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 269877761, Sender [1:648:2225], Recipient [1:561:2183]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-05-29T15:22:28.135493Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 272039936, Sender [1:629:2213], Recipient [1:561:2183]: NKikimr::NNodeBroker::TEvNodeBroker::TEvListNodes { } 2025-05-29T15:22:28.135498Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:246: StateWork, processing event TEvNodeBroker::TEvListNodes 2025-05-29T15:22:28.135508Z node 1 :NODE_BROKER TRACE: node_broker.cpp:423: Send TEvNodesInfo for epoch #1.1 1970-01-01T00:00:00.025000Z - 1970-01-01T01:00:00.025000Z - 1970-01-01T02:00:00.025000Z 2025-05-29T15:22:28.135553Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 269877761, Sender [1:650:2227], Recipient [1:561:2183]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-05-29T15:22:28.135571Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 272039937, Sender [1:629:2213], Recipient [1:561:2183]: NKikimr::NNodeBroker::TEvNodeBroker::TEvResolveNode { NodeId: 1024 } 2025-05-29T15:22:28.135575Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:247: StateWork, processing event TEvNodeBroker::TEvResolveNode 2025-05-29T15:22:28.135587Z node 1 :NODE_BROKER TRACE: node_broker.cpp:1478: Send TEvResolvedNode: NKikimr::NNodeBroker::TEvNodeBroker::TEvResolvedNode { Status { Code: WRONG_REQUEST Reason: "Unknown node" } } ... unblocking NKikimr::TEvTablet::TEvCommit from FLAT_EXECUTOR to TABLET_ACTOR 2025-05-29T15:22:28.136204Z node 1 :NODE_BROKER DEBUG: node_broker__register_node.cpp:186: TTxRegisterNode Complete 2025-05-29T15:22:28.136220Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:264: [Committed] Register new active node #1024.v2 host1:1001 2025-05-29T15:22:28.136227Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:552: [Committed] Update current epoch version from 1 to 2 2025-05-29T15:22:28.136231Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:630: Add node #1024.v2 host1:1001 to epoch cache 2025-05-29T15:22:28.136249Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:659: Add node #1024.v2 to update nodes log 2025-05-29T15:22:28.136274Z node 1 :NODE_BROKER TRACE: node_broker__register_node.cpp:58: TTxRegisterNode reply with: Status { Code: OK } Node { NodeId: 1024 Host: "host1" Port: 1001 ResolveHost: "host1.yandex.net" Address: "1.2.3.4" Location { DataCenter: "1" Module: "2" Rack: "3" Unit: "4" } Expire: 7200025000 Name: "slot-0" } 2025-05-29T15:22:28.136362Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 269877761, Sender [1:654:2231], Recipient [1:561:2183]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-05-29T15:22:28.136373Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 272039936, Sender [1:629:2213], Recipient [1:561:2183]: NKikimr::NNodeBroker::TEvNodeBroker::TEvListNodes { } 2025-05-29T15:22:28.136376Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:246: StateWork, processing event TEvNodeBroker::TEvListNodes 2025-05-29T15:22:28.136383Z node 1 :NODE_BROKER TRACE: node_broker.cpp:423: Send TEvNodesInfo for epoch #1.2 1970-01-01T00:00:00.025000Z - 1970-01-01T01:00:00.025000Z - 1970-01-01T02:00:00.025000Z 2025-05-29T15:22:28.136433Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 269877761, Sender [1:656:2233], Recipient [1:561:2183]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-05-29T15:22:28.136444Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 272039937, Sender [1:629:2213], Recipient [1:561:2183]: NKikimr::NNodeBroker::TEvNodeBroker::TEvResolveNode { NodeId: 1024 } 2025-05-29T15:22:28.136447Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:247: StateWork, processing event TEvNodeBroker::TEvResolveNode 2025-05-29T15:22:28.136459Z node 1 :NODE_BROKER TRACE: node_broker.cpp:1478: Send TEvResolvedNode: NKikimr::NNodeBroker::TEvNodeBroker::TEvResolvedNode { Status { Code: OK } Node { NodeId: 1024 Host: "host1" Port: 1001 ResolveHost: "host1.yandex.net" Address: "1.2.3.4" Location { DataCenter: "1" Module: "2" Rack: "3" Unit: "4" } Expire: 7200025000 Name: "slot-0" } } ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/mind/ut/unittest >> TNodeBrokerTest::NodesV2BackMigrationManyNodesInterrupted [GOOD] Test command err: 2025-05-29T15:22:20.007533Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 2 Deadline: 18446744073709.551615s } 2025-05-29T15:22:20.007587Z node 3 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 3 Deadline: 18446744073709.551615s } 2025-05-29T15:22:20.007610Z node 4 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 4 Deadline: 18446744073709.551615s } 2025-05-29T15:22:20.007639Z node 5 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 5 Deadline: 18446744073709.551615s } 2025-05-29T15:22:20.007657Z node 6 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 6 Deadline: 18446744073709.551615s } 2025-05-29T15:22:20.007672Z node 7 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 7 Deadline: 18446744073709.551615s } 2025-05-29T15:22:20.013174Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:20.013266Z node 3 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:20.013303Z node 4 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:20.013333Z node 5 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:20.013368Z node 6 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:20.013396Z node 7 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:20.013468Z node 8 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 8 Deadline: 18446744073709.551615s } 2025-05-29T15:22:20.013489Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-05-29T15:22:20.013718Z node 3 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:20.013740Z node 4 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:20.013754Z node 5 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:20.013769Z node 6 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:20.013788Z node 7 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:20.013806Z node 8 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:20.013834Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:20.017424Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:20.017469Z node 8 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:20.017494Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:20.018281Z node 5 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:20.018310Z node 6 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:20.018332Z node 7 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:20.018395Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:20.018411Z node 3 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:20.018425Z node 4 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:20.018446Z node 5 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-05-29T15:22:20.018483Z node 6 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-05-29T15:22:20.018497Z node 7 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-05-29T15:22:20.018511Z node 8 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:20.018526Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:20.018637Z node 3 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-05-29T15:22:20.018655Z node 4 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-05-29T15:22:20.018714Z node 8 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-05-29T15:22:20.018807Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-05-29T15:22:20.018846Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 7 Deadline: 18446744073709.551615s } 2025-05-29T15:22:20.019331Z node 3 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 6 Deadline: 18446744073709.551615s } 2025-05-29T15:22:20.019342Z node 4 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 2 Deadline: 18446744073709.551615s } 2025-05-29T15:22:20.019348Z node 5 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 2 Deadline: 18446744073709.551615s } 2025-05-29T15:22:20.019354Z node 6 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 2 Deadline: 18446744073709.551615s } 2025-05-29T15:22:20.019362Z node 8 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 2 Deadline: 18446744073709.551615s } 2025-05-29T15:22:20.019524Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 3 Deadline: 18446744073709.551615s } 2025-05-29T15:22:20.022500Z node 6 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 3 Deadline: 18446744073709.551615s } 2025-05-29T15:22:20.022666Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 4 Deadline: 18446744073709.551615s } 2025-05-29T15:22:20.023341Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 5 Deadline: 18446744073709.551615s } 2025-05-29T15:22:20.023374Z node 3 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 2 Deadline: 18446744073709.551615s } 2025-05-29T15:22:20.023510Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 4 Deadline: 18446744073709.551615s } 2025-05-29T15:22:20.023546Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 6 Deadline: 18446744073709.551615s } 2025-05-29T15:22:20.023582Z node 7 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 2 Deadline: 18446744073709.551615s } 2025-05-29T15:22:20.023785Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 5 Deadline: 18446744073709.551615s } 2025-05-29T15:22:20.023803Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 8 Deadline: 18446744073709.551615s } 2025-05-29T15:22:20.023894Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 6 Deadline: 18446744073709.551615s } 2025-05-29T15:22:20.024054Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 2 Deadline: 18446744073709.551615s } 2025-05-29T15:22:20.024169Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 3 Deadline: 18446744073709.551615s } 2025-05-29T15:22:20.024186Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 7 Deadline: 18446744073709.551615s } 2025-05-29T15:22:20.024245Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 8 Deadline: 18446744073709.551615s } 2025-05-29T15:22:20.024615Z node 6 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 7 Deadline: 18446744073709.551615s } 2025-05-29T15:22:20.024995Z node 7 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 6 Deadline: 18446744073709.551615s } 2025-05-29T15:22:20.025887Z node 3 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 4 Deadline: 18446744073709.551615s } 2025-05-29T15:22:20.026046Z node 4 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 3 Deadline: 18446744073709.551615s } 2025-05-29T15:22:20.027054Z node 3 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 5 Deadline: 18446744073709.551615s } 2025-05-29T15:22:20.027321Z node 5 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 3 Deadline: 18446744073709.551615s } 2025-05-29T15:22:20.047641Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7309: Cannot subscribe to console configs 2025-05-29T15:22:20.047662Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded ... waiting for nameservers are connected 2025-05-29T15:22:20.050790Z node 1 :NODE_BROKER DEBUG: node_broker_impl.h:236: StateInit event type: 10060000 event: NKikimr::TEvTablet::TEvBoot 2025-05-29T15:22:20.051147Z node 1 :NODE_BROKER DEBUG: node_broker_impl.h:236: StateInit event type: 10060001 event: NKikimr::TEvTablet::TEvRestored 2025-05-29T15:22:20.051189Z node 1 :NODE_BROKER DEBUG: node_broker__init_scheme.cpp:20: TTxInitScheme Execute 2025-05-29T15:22:20.051341Z node 1 :NODE_BROKER DEBUG: node_broker_impl.h:236: StateInit event type: 1006000c event: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-05-29T15:22:20.051963Z node 1 :NODE_BROKER DEBUG: node_broker__init_scheme.cpp:29: TTxInitScheme Complete 2025-05-29T15:22:20.051980Z node 1 :NODE_BROKER DEBUG: node_broker__load_state.cpp:19: TTxLoadState Execute 2025-05-29T15:22:20.052023Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:961: [DB] Using default config. 2025-05-29T15:22:20.052033Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:998: [DB] Starting the first epoch: #1.1 1970-01-01T00:00:00.025000Z - 1970-01-01T01:00:00.025000Z - 1970-01-01T02:00:00.025000Z 2025-05-29T15:22:20.052036Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:1024: [DB] Loaded the first approximate epoch start: #1.1 2025-05-29T15:22:20.052045Z node 1 :NODE_BROKER DEBUG: node_broker__load_state.cpp:27: TTxLoadState Compl ... pp:659: Add node #2447.v1425 to update nodes log 2025-05-29T15:22:28.075259Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:659: Add node #2448.v1426 to update nodes log 2025-05-29T15:22:28.075264Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:659: Add node #2449.v1427 to update nodes log 2025-05-29T15:22:28.075268Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:659: Add node #2450.v1428 to update nodes log 2025-05-29T15:22:28.075271Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:659: Add node #2451.v1429 to update nodes log 2025-05-29T15:22:28.075274Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:659: Add node #2452.v1430 to update nodes log 2025-05-29T15:22:28.075278Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:659: Add node #2453.v1431 to update nodes log 2025-05-29T15:22:28.075281Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:659: Add node #2454.v1432 to update nodes log 2025-05-29T15:22:28.075288Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:659: Add node #2455.v1433 to update nodes log 2025-05-29T15:22:28.075294Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:659: Add node #2456.v1434 to update nodes log 2025-05-29T15:22:28.075300Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:659: Add node #2457.v1435 to update nodes log 2025-05-29T15:22:28.075306Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:659: Add node #2458.v1436 to update nodes log 2025-05-29T15:22:28.075312Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:659: Add node #2459.v1437 to update nodes log 2025-05-29T15:22:28.075319Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:659: Add node #2460.v1438 to update nodes log 2025-05-29T15:22:28.075326Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:659: Add node #2461.v1439 to update nodes log 2025-05-29T15:22:28.075331Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:659: Add node #2462.v1440 to update nodes log 2025-05-29T15:22:28.075335Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:659: Add node #2463.v1441 to update nodes log 2025-05-29T15:22:28.075338Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:659: Add node #2464.v1442 to update nodes log 2025-05-29T15:22:28.075343Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:659: Add node #2465.v1443 to update nodes log 2025-05-29T15:22:28.075346Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:659: Add node #2466.v1444 to update nodes log 2025-05-29T15:22:28.075349Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:659: Add node #2467.v1445 to update nodes log 2025-05-29T15:22:28.075353Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:659: Add node #2468.v1446 to update nodes log 2025-05-29T15:22:28.075356Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:659: Add node #2469.v1447 to update nodes log 2025-05-29T15:22:28.075359Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:659: Add node #2470.v1448 to update nodes log 2025-05-29T15:22:28.075363Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:659: Add node #2471.v1449 to update nodes log 2025-05-29T15:22:28.075366Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:659: Add node #2472.v1450 to update nodes log 2025-05-29T15:22:28.075370Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:659: Add node #2473.v1451 to update nodes log 2025-05-29T15:22:28.075373Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:659: Add node #2474.v1452 to update nodes log 2025-05-29T15:22:28.075376Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:659: Add node #2475.v1453 to update nodes log 2025-05-29T15:22:28.075380Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:659: Add node #2476.v1454 to update nodes log 2025-05-29T15:22:28.075384Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:659: Add node #2477.v1455 to update nodes log 2025-05-29T15:22:28.075387Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:659: Add node #2478.v1456 to update nodes log 2025-05-29T15:22:28.075390Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:659: Add node #2479.v1457 to update nodes log 2025-05-29T15:22:28.075394Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:659: Add node #2480.v1458 to update nodes log 2025-05-29T15:22:28.075397Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:659: Add node #2481.v1459 to update nodes log 2025-05-29T15:22:28.075401Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:659: Add node #2482.v1460 to update nodes log 2025-05-29T15:22:28.075404Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:659: Add node #2483.v1461 to update nodes log 2025-05-29T15:22:28.075408Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:659: Add node #2484.v1462 to update nodes log 2025-05-29T15:22:28.075412Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:659: Add node #2485.v1463 to update nodes log 2025-05-29T15:22:28.075415Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:659: Add node #2486.v1464 to update nodes log 2025-05-29T15:22:28.075419Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:659: Add node #2487.v1465 to update nodes log 2025-05-29T15:22:28.075424Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:659: Add node #2488.v1466 to update nodes log 2025-05-29T15:22:28.075428Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:659: Add node #2489.v1467 to update nodes log 2025-05-29T15:22:28.075433Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:659: Add node #2490.v1468 to update nodes log 2025-05-29T15:22:28.075437Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:659: Add node #2491.v1469 to update nodes log 2025-05-29T15:22:28.075441Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:659: Add node #2492.v1470 to update nodes log 2025-05-29T15:22:28.075445Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:659: Add node #2493.v1471 to update nodes log 2025-05-29T15:22:28.075449Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:659: Add node #2494.v1472 to update nodes log 2025-05-29T15:22:28.075452Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:659: Add node #2495.v1473 to update nodes log 2025-05-29T15:22:28.075456Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:659: Add node #2496.v1474 to update nodes log 2025-05-29T15:22:28.075459Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:659: Add node #2497.v1475 to update nodes log 2025-05-29T15:22:28.075463Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:659: Add node #2498.v1476 to update nodes log 2025-05-29T15:22:28.075466Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:659: Add node #2499.v1477 to update nodes log 2025-05-29T15:22:28.075470Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:659: Add node #2500.v1478 to update nodes log 2025-05-29T15:22:28.075474Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:659: Add node #2501.v1479 to update nodes log 2025-05-29T15:22:28.075478Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:659: Add node #2502.v1480 to update nodes log 2025-05-29T15:22:28.075481Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:659: Add node #2503.v1481 to update nodes log 2025-05-29T15:22:28.075484Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:659: Add node #2504.v1482 to update nodes log 2025-05-29T15:22:28.075488Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:659: Add node #2505.v1483 to update nodes log 2025-05-29T15:22:28.075491Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:659: Add node #2506.v1484 to update nodes log 2025-05-29T15:22:28.075494Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:659: Add node #2507.v1485 to update nodes log 2025-05-29T15:22:28.075498Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:659: Add node #2508.v1486 to update nodes log 2025-05-29T15:22:28.075501Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:659: Add node #2509.v1487 to update nodes log 2025-05-29T15:22:28.075505Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:659: Add node #2510.v1488 to update nodes log 2025-05-29T15:22:28.075508Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:659: Add node #2511.v1489 to update nodes log 2025-05-29T15:22:28.075511Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:659: Add node #2512.v1490 to update nodes log 2025-05-29T15:22:28.075514Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:659: Add node #2513.v1491 to update nodes log 2025-05-29T15:22:28.075518Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:659: Add node #2514.v1492 to update nodes log 2025-05-29T15:22:28.075522Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:659: Add node #2515.v1493 to update nodes log 2025-05-29T15:22:28.075525Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:659: Add node #2516.v1494 to update nodes log 2025-05-29T15:22:28.075529Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:659: Add node #2517.v1495 to update nodes log 2025-05-29T15:22:28.075532Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:659: Add node #2518.v1496 to update nodes log 2025-05-29T15:22:28.075537Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:659: Add node #2519.v1497 to update nodes log 2025-05-29T15:22:28.075542Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:659: Add node #2520.v1498 to update nodes log 2025-05-29T15:22:28.075546Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:659: Add node #2521.v1499 to update nodes log 2025-05-29T15:22:28.075549Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:659: Add node #2522.v1500 to update nodes log 2025-05-29T15:22:28.075553Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:659: Add node #2523.v1501 to update nodes log 2025-05-29T15:22:28.076086Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 269877761, Sender [1:7055:7042], Recipient [1:6988:6982]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-05-29T15:22:28.076117Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 272039936, Sender [1:629:2213], Recipient [1:6988:6982]: NKikimr::NNodeBroker::TEvNodeBroker::TEvListNodes { } 2025-05-29T15:22:28.076120Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:246: StateWork, processing event TEvNodeBroker::TEvListNodes 2025-05-29T15:22:28.076129Z node 1 :NODE_BROKER TRACE: node_broker.cpp:423: Send TEvNodesInfo for epoch #2.1502 1970-01-01T01:00:00.025000Z - 1970-01-01T02:00:00.025000Z - 1970-01-01T03:00:00.025000Z 2025-05-29T15:22:28.078554Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 269877761, Sender [1:7057:7044], Recipient [1:6988:6982]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-05-29T15:22:28.078595Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 272039936, Sender [1:629:2213], Recipient [1:6988:6982]: NKikimr::NNodeBroker::TEvNodeBroker::TEvListNodes { } 2025-05-29T15:22:28.078598Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:246: StateWork, processing event TEvNodeBroker::TEvListNodes 2025-05-29T15:22:28.078604Z node 1 :NODE_BROKER TRACE: node_broker.cpp:423: Send TEvNodesInfo for epoch #2.1502 1970-01-01T01:00:00.025000Z - 1970-01-01T02:00:00.025000Z - 1970-01-01T03:00:00.025000Z 2025-05-29T15:22:28.082914Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 269877761, Sender [1:7059:7046], Recipient [1:6988:6982]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-05-29T15:22:28.082946Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 272039936, Sender [1:629:2213], Recipient [1:6988:6982]: NKikimr::NNodeBroker::TEvNodeBroker::TEvListNodes { } 2025-05-29T15:22:28.082950Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:246: StateWork, processing event TEvNodeBroker::TEvListNodes 2025-05-29T15:22:28.082956Z node 1 :NODE_BROKER TRACE: node_broker.cpp:423: Send TEvNodesInfo for epoch #2.1502 1970-01-01T01:00:00.025000Z - 1970-01-01T02:00:00.025000Z - 1970-01-01T03:00:00.025000Z 2025-05-29T15:22:28.084849Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 269877761, Sender [1:7061:7048], Recipient [1:6988:6982]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-05-29T15:22:28.084876Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 272039936, Sender [1:629:2213], Recipient [1:6988:6982]: NKikimr::NNodeBroker::TEvNodeBroker::TEvListNodes { CachedVersion: 1502 } 2025-05-29T15:22:28.084879Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:246: StateWork, processing event TEvNodeBroker::TEvListNodes 2025-05-29T15:22:28.084884Z node 1 :NODE_BROKER TRACE: node_broker.cpp:423: Send TEvNodesInfo for epoch #2.1502 1970-01-01T01:00:00.025000Z - 1970-01-01T02:00:00.025000Z - 1970-01-01T03:00:00.025000Z >> TNodeBrokerTest::NodeNameReuseRestart >> TNodeBrokerTest::NodesMigrationRemoveExpired [GOOD] >> KqpYql::UuidPrimaryKeyDisabled >> TNodeBrokerTest::TestListNodesEpochDeltas [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/mind/ut/unittest >> TNodeBrokerTest::NodesMigrationExpiredChanged [GOOD] Test command err: 2025-05-29T15:22:27.137084Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 2 Deadline: 18446744073709.551615s } 2025-05-29T15:22:27.137131Z node 3 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 3 Deadline: 18446744073709.551615s } 2025-05-29T15:22:27.137154Z node 4 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 4 Deadline: 18446744073709.551615s } 2025-05-29T15:22:27.137183Z node 5 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 5 Deadline: 18446744073709.551615s } 2025-05-29T15:22:27.137240Z node 6 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 6 Deadline: 18446744073709.551615s } 2025-05-29T15:22:27.137260Z node 7 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 7 Deadline: 18446744073709.551615s } 2025-05-29T15:22:27.144104Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:27.144209Z node 3 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:27.144268Z node 4 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:27.144302Z node 5 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:27.144341Z node 6 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:27.144373Z node 7 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:27.144434Z node 8 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 8 Deadline: 18446744073709.551615s } 2025-05-29T15:22:27.144459Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-05-29T15:22:27.144829Z node 3 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:27.144851Z node 4 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:27.144865Z node 5 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:27.144878Z node 6 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:27.144896Z node 7 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:27.144910Z node 8 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:27.144939Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:27.149525Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:27.149606Z node 8 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:27.149645Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:27.150646Z node 5 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:27.150691Z node 6 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:27.150725Z node 7 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:27.150855Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:27.150879Z node 3 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:27.150905Z node 4 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:27.150931Z node 8 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:27.150953Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:27.151185Z node 3 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-05-29T15:22:27.151217Z node 4 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-05-29T15:22:27.151242Z node 5 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-05-29T15:22:27.151284Z node 6 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-05-29T15:22:27.151319Z node 7 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-05-29T15:22:27.151441Z node 8 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-05-29T15:22:27.151552Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-05-29T15:22:27.151609Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 2 Deadline: 18446744073709.551615s } 2025-05-29T15:22:27.152636Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 3 Deadline: 18446744073709.551615s } 2025-05-29T15:22:27.152669Z node 3 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 5 Deadline: 18446744073709.551615s } 2025-05-29T15:22:27.152679Z node 4 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 3 Deadline: 18446744073709.551615s } 2025-05-29T15:22:27.152687Z node 5 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 8 Deadline: 18446744073709.551615s } 2025-05-29T15:22:27.152694Z node 6 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 8 Deadline: 18446744073709.551615s } 2025-05-29T15:22:27.152704Z node 7 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 5 Deadline: 18446744073709.551615s } 2025-05-29T15:22:27.152715Z node 8 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 3 Deadline: 18446744073709.551615s } 2025-05-29T15:22:27.157295Z node 3 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 8 Deadline: 18446744073709.551615s } 2025-05-29T15:22:27.157394Z node 8 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 6 Deadline: 18446744073709.551615s } 2025-05-29T15:22:27.157455Z node 5 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 3 Deadline: 18446744073709.551615s } 2025-05-29T15:22:27.157488Z node 8 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 5 Deadline: 18446744073709.551615s } 2025-05-29T15:22:27.157573Z node 3 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 4 Deadline: 18446744073709.551615s } 2025-05-29T15:22:27.158453Z node 5 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 7 Deadline: 18446744073709.551615s } 2025-05-29T15:22:27.158522Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 7 Deadline: 18446744073709.551615s } 2025-05-29T15:22:27.158577Z node 3 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 2 Deadline: 18446744073709.551615s } 2025-05-29T15:22:27.159486Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 3 Deadline: 18446744073709.551615s } 2025-05-29T15:22:27.159560Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 4 Deadline: 18446744073709.551615s } 2025-05-29T15:22:27.159648Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 5 Deadline: 18446744073709.551615s } 2025-05-29T15:22:27.159832Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 6 Deadline: 18446744073709.551615s } 2025-05-29T15:22:27.159982Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 8 Deadline: 18446744073709.551615s } 2025-05-29T15:22:27.160435Z node 5 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 4 Deadline: 18446744073709.551615s } 2025-05-29T15:22:27.160844Z node 4 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 5 Deadline: 18446744073709.551615s } 2025-05-29T15:22:27.162066Z node 8 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 7 Deadline: 18446744073709.551615s } 2025-05-29T15:22:27.162383Z node 7 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 8 Deadline: 18446744073709.551615s } 2025-05-29T15:22:27.192736Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7309: Cannot subscribe to console configs 2025-05-29T15:22:27.192763Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded ... waiting for nameservers are connected 2025-05-29T15:22:27.197679Z node 1 :NODE_BROKER DEBUG: node_broker_impl.h:236: StateInit event type: 10060000 event: NKikimr::TEvTablet::TEvBoot 2025-05-29T15:22:27.198092Z node 1 :NODE_BROKER DEBUG: node_broker_impl.h:236: StateInit event type: 10060001 event: NKikimr::TEvTablet::TEvRestored 2025-05-29T15:22:27.198167Z node 1 :NODE_BROKER DEBUG: node_broker__init_scheme.cpp:20: TTxInitScheme Execute 2025-05-29T15:22:27.198361Z node 1 :NODE_BROKER DEBUG: node_broker_impl.h:236: StateInit event type: 1006000c event: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-05-29T15:22:27.199123Z node 1 :NODE_BROKER DEBUG: node_broker__init_scheme.cpp:29: TTxInitScheme Complete 2025-05-29T15:22:27.199146Z node 1 :NODE_BROKER DEBUG: node_broker__load_state.cpp:19: TTxLoadState Execute 2025-05-29T15:22:27.199197Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:961: [DB] Using default config. 2025-05-29T15:22:27.199210Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:998: [DB] Starting the first epoch: #1.1 1970-01-01T00:00:00.025000Z - 1970-01-01T01:00:00.025000Z - 1970-01-01T02:00:00.025000Z 2025-05-29T15:22:27.199215Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:1024: [DB] Loaded the first approximate epoch start: #1.1 2025-05-29T15:22:27.199228Z node 1 :NODE_BROKER DEBUG: node_broker__load_state.cpp:27: TTxLoadState Complete 2025-05-29T15:22:27.199245Z node 1 :NODE_BROKER DEBUG: node_broker__migrate_state.cpp:84: TTxMigrateState Execute 2025-05-29T15:22:27.199251Z node 1 :NODE_BROKER DEBUG: node_broker__migrate_state.cpp:52: TTxMigrateState ProcessMigrationBatch UpdateNodes left 0, NewVersionUpdateNodes left 0 2025-05-29T15:22:27.199255Z node 1 :NODE_BROKER DEBU ... 5-29T15:22:28.632496Z node 1 :NODE_BROKER NOTICE: node_broker.cpp:1214: [DB] Migrating changed node #1024.v4 { NodeId: 1024, State: Expired, Version: 4, Host: host2, Port: 1001, ResolveHost: host2.yandex.net, Address: 1.2.3.4, Lease: 1, Expire: Thu, 01 Jan 1970 04:00:00 UTC, Location: DC=1/M=2/R=3/U=4/, AuthorizedByCertificate: 0, SlotIndex: 0, ServicedSubDomain: 72057594046678944:1 } 2025-05-29T15:22:28.632506Z node 1 :NODE_BROKER DEBUG: node_broker__load_state.cpp:27: TTxLoadState Complete 2025-05-29T15:22:28.632524Z node 1 :NODE_BROKER DEBUG: node_broker__migrate_state.cpp:84: TTxMigrateState Execute 2025-05-29T15:22:28.632527Z node 1 :NODE_BROKER DEBUG: node_broker__migrate_state.cpp:52: TTxMigrateState ProcessMigrationBatch UpdateNodes left 1, NewVersionUpdateNodes left 0 2025-05-29T15:22:28.632533Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:856: [DB] Adding node #1024.v4 host2:1001 to database state=Expired resolvehost=host2.yandex.net address=1.2.3.4 dc=1 location=DC=1/M=2/R=3/U=4/ lease=1 expire=Thu, 01 Jan 1970 04:00:00 UTC servicedsubdomain=72057594046678944:1 slotindex=0 authorizedbycertificate=false 2025-05-29T15:22:28.632554Z node 1 :NODE_BROKER DEBUG: node_broker__migrate_state.cpp:21: TTxMigrateState FinalizeMigration 2025-05-29T15:22:28.632557Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:1330: [DB] Update approx epoch start in database: #5.6 2025-05-29T15:22:28.643910Z node 1 :NODE_BROKER DEBUG: node_broker__migrate_state.cpp:95: TTxMigrateState Complete 2025-05-29T15:22:28.643953Z node 1 :NODE_BROKER TRACE: node_broker.cpp:456: Scheduled epoch update at 1970-01-01T05:00:00.025000Z 2025-05-29T15:22:28.643963Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:562: Preparing nodes list cache for epoch #5.6 1970-01-01T04:00:00.025000Z - 1970-01-01T05:00:00.025000Z - 1970-01-01T06:00:00.025000Z, approximate epoch start #5.6 nodes=0 expired=1 2025-05-29T15:22:28.643985Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:603: Preparing update nodes log for epoch ##5.6 1970-01-01T04:00:00.025000Z - 1970-01-01T05:00:00.025000Z - 1970-01-01T06:00:00.025000Z nodes=0 expired=1 removed=0 2025-05-29T15:22:28.643992Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:659: Add node #1024.v4 to update nodes log 2025-05-29T15:22:28.644075Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 269877761, Sender [1:761:2296], Recipient [1:752:2290]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-05-29T15:22:28.644126Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 269877761, Sender [1:762:2297], Recipient [1:752:2290]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-05-29T15:22:28.644138Z node 3 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:657: Handle NKikimr::TEvTabletPipe::TEvClientConnected { TabletId: 72057594037936129 Status: OK ServerId: [1:761:2296] Leader: 1 Dead: 0 Generation: 3 VersionInfo:  } 2025-05-29T15:22:28.644189Z node 4 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:657: Handle NKikimr::TEvTabletPipe::TEvClientConnected { TabletId: 72057594037936129 Status: OK ServerId: [1:762:2297] Leader: 1 Dead: 0 Generation: 3 VersionInfo:  } 2025-05-29T15:22:28.644201Z node 5 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:657: Handle NKikimr::TEvTabletPipe::TEvClientConnected { TabletId: 72057594037936129 Status: OK ServerId: [1:764:2299] Leader: 1 Dead: 0 Generation: 3 VersionInfo:  } 2025-05-29T15:22:28.644213Z node 6 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:657: Handle NKikimr::TEvTabletPipe::TEvClientConnected { TabletId: 72057594037936129 Status: OK ServerId: [1:765:2300] Leader: 1 Dead: 0 Generation: 3 VersionInfo:  } 2025-05-29T15:22:28.644225Z node 7 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:657: Handle NKikimr::TEvTabletPipe::TEvClientConnected { TabletId: 72057594037936129 Status: OK ServerId: [1:766:2301] Leader: 1 Dead: 0 Generation: 3 VersionInfo:  } 2025-05-29T15:22:28.644236Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 269877761, Sender [1:764:2299], Recipient [1:752:2290]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-05-29T15:22:28.644245Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 269877761, Sender [1:765:2300], Recipient [1:752:2290]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-05-29T15:22:28.644287Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 269877761, Sender [1:766:2301], Recipient [1:752:2290]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-05-29T15:22:28.644334Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 272039936, Sender [3:81:2072], Recipient [1:761:2296] 2025-05-29T15:22:28.644338Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:246: StateWork, processing event TEvNodeBroker::TEvListNodes 2025-05-29T15:22:28.644345Z node 1 :NODE_BROKER TRACE: node_broker.cpp:423: Send TEvNodesInfo for epoch #5.6 1970-01-01T04:00:00.025000Z - 1970-01-01T05:00:00.025000Z - 1970-01-01T06:00:00.025000Z 2025-05-29T15:22:28.644352Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 272039936, Sender [4:109:2072], Recipient [1:762:2297] 2025-05-29T15:22:28.644354Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:246: StateWork, processing event TEvNodeBroker::TEvListNodes 2025-05-29T15:22:28.644358Z node 1 :NODE_BROKER TRACE: node_broker.cpp:423: Send TEvNodesInfo for epoch #5.6 1970-01-01T04:00:00.025000Z - 1970-01-01T05:00:00.025000Z - 1970-01-01T06:00:00.025000Z 2025-05-29T15:22:28.644378Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 272039936, Sender [5:137:2072], Recipient [1:764:2299] 2025-05-29T15:22:28.644380Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:246: StateWork, processing event TEvNodeBroker::TEvListNodes 2025-05-29T15:22:28.644384Z node 1 :NODE_BROKER TRACE: node_broker.cpp:423: Send TEvNodesInfo for epoch #5.6 1970-01-01T04:00:00.025000Z - 1970-01-01T05:00:00.025000Z - 1970-01-01T06:00:00.025000Z 2025-05-29T15:22:28.644390Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 272039936, Sender [6:165:2072], Recipient [1:765:2300] 2025-05-29T15:22:28.644394Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:246: StateWork, processing event TEvNodeBroker::TEvListNodes 2025-05-29T15:22:28.644397Z node 1 :NODE_BROKER TRACE: node_broker.cpp:423: Send TEvNodesInfo for epoch #5.6 1970-01-01T04:00:00.025000Z - 1970-01-01T05:00:00.025000Z - 1970-01-01T06:00:00.025000Z 2025-05-29T15:22:28.644417Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 272039936, Sender [7:193:2072], Recipient [1:766:2301] 2025-05-29T15:22:28.644419Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:246: StateWork, processing event TEvNodeBroker::TEvListNodes 2025-05-29T15:22:28.644422Z node 1 :NODE_BROKER TRACE: node_broker.cpp:423: Send TEvNodesInfo for epoch #5.6 1970-01-01T04:00:00.025000Z - 1970-01-01T05:00:00.025000Z - 1970-01-01T06:00:00.025000Z 2025-05-29T15:22:28.644529Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 269877761, Sender [1:794:2324], Recipient [1:752:2290]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-05-29T15:22:28.644547Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 272039936, Sender [1:628:2214], Recipient [1:752:2290]: NKikimr::NNodeBroker::TEvNodeBroker::TEvListNodes { } 2025-05-29T15:22:28.644549Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:246: StateWork, processing event TEvNodeBroker::TEvListNodes 2025-05-29T15:22:28.644553Z node 1 :NODE_BROKER TRACE: node_broker.cpp:423: Send TEvNodesInfo for epoch #5.6 1970-01-01T04:00:00.025000Z - 1970-01-01T05:00:00.025000Z - 1970-01-01T06:00:00.025000Z 2025-05-29T15:22:28.644595Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 269877761, Sender [1:796:2326], Recipient [1:752:2290]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-05-29T15:22:28.644620Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 272039936, Sender [1:628:2214], Recipient [1:752:2290]: NKikimr::NNodeBroker::TEvNodeBroker::TEvListNodes { } 2025-05-29T15:22:28.644623Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:246: StateWork, processing event TEvNodeBroker::TEvListNodes 2025-05-29T15:22:28.644626Z node 1 :NODE_BROKER TRACE: node_broker.cpp:423: Send TEvNodesInfo for epoch #5.6 1970-01-01T04:00:00.025000Z - 1970-01-01T05:00:00.025000Z - 1970-01-01T06:00:00.025000Z 2025-05-29T15:22:28.644669Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 269877761, Sender [1:798:2328], Recipient [1:752:2290]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-05-29T15:22:28.644691Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 272039936, Sender [1:628:2214], Recipient [1:752:2290]: NKikimr::NNodeBroker::TEvNodeBroker::TEvListNodes { } 2025-05-29T15:22:28.644693Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:246: StateWork, processing event TEvNodeBroker::TEvListNodes 2025-05-29T15:22:28.644696Z node 1 :NODE_BROKER TRACE: node_broker.cpp:423: Send TEvNodesInfo for epoch #5.6 1970-01-01T04:00:00.025000Z - 1970-01-01T05:00:00.025000Z - 1970-01-01T06:00:00.025000Z 2025-05-29T15:22:28.644732Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 269877761, Sender [1:800:2330], Recipient [1:752:2290]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-05-29T15:22:28.644746Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 272039936, Sender [1:628:2214], Recipient [1:752:2290]: NKikimr::NNodeBroker::TEvNodeBroker::TEvListNodes { CachedVersion: 6 } 2025-05-29T15:22:28.644748Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:246: StateWork, processing event TEvNodeBroker::TEvListNodes 2025-05-29T15:22:28.644751Z node 1 :NODE_BROKER TRACE: node_broker.cpp:423: Send TEvNodesInfo for epoch #5.6 1970-01-01T04:00:00.025000Z - 1970-01-01T05:00:00.025000Z - 1970-01-01T06:00:00.025000Z 2025-05-29T15:22:28.644786Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 269877761, Sender [1:802:2332], Recipient [1:752:2290]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-05-29T15:22:28.644799Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 272039950, Sender [1:628:2214], Recipient [1:752:2290]: NKikimr::NNodeBroker::TEvNodeBroker::TEvSubscribeNodesRequest { CachedVersion: 6 SeqNo: 2 } 2025-05-29T15:22:28.644802Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:254: StateWork, processing event TEvNodeBroker::TEvSubscribeNodesRequest 2025-05-29T15:22:28.644806Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:747: New subscriber [1:628:2214], seqNo: 2, version: 6, server pipe id: [1:802:2332] 2025-05-29T15:22:28.644810Z node 1 :NODE_BROKER TRACE: node_broker.cpp:730: Send TEvUpdateNodes v6 -> v6 to [1:628:2214] 2025-05-29T15:22:28.644855Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 269877764, Sender [1:802:2332], Recipient [1:752:2290]: NKikimr::TEvTabletPipe::TEvServerDisconnected 2025-05-29T15:22:28.644859Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:768: Unsubscribed [1:628:2214], seqNo: 2, server pipe id: [1:802:2332] 2025-05-29T15:22:28.644876Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 269877761, Sender [1:804:2334], Recipient [1:752:2290]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-05-29T15:22:28.644889Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 272039937, Sender [1:628:2214], Recipient [1:752:2290]: NKikimr::NNodeBroker::TEvNodeBroker::TEvResolveNode { NodeId: 1024 } 2025-05-29T15:22:28.644892Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:247: StateWork, processing event TEvNodeBroker::TEvResolveNode 2025-05-29T15:22:28.644903Z node 1 :NODE_BROKER TRACE: node_broker.cpp:1478: Send TEvResolvedNode: NKikimr::NNodeBroker::TEvNodeBroker::TEvResolvedNode { Status { Code: WRONG_REQUEST Reason: "Unknown node" } } >> TNodeBrokerTest::NodesMigrationReuseExpiredID [GOOD] >> TNodeBrokerTest::NodesMigrationRemovedChanged ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/mind/ut/unittest >> TNodeBrokerTest::RegistrationPipelining [GOOD] Test command err: 2025-05-29T15:22:28.271691Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 2 Deadline: 18446744073709.551615s } 2025-05-29T15:22:28.271728Z node 3 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 3 Deadline: 18446744073709.551615s } 2025-05-29T15:22:28.271747Z node 4 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 4 Deadline: 18446744073709.551615s } 2025-05-29T15:22:28.271765Z node 5 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 5 Deadline: 18446744073709.551615s } 2025-05-29T15:22:28.271795Z node 6 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 6 Deadline: 18446744073709.551615s } 2025-05-29T15:22:28.271811Z node 7 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 7 Deadline: 18446744073709.551615s } 2025-05-29T15:22:28.276901Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:28.276986Z node 3 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:28.277014Z node 4 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:28.277040Z node 5 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:28.277069Z node 6 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:28.277092Z node 7 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:28.277133Z node 8 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 8 Deadline: 18446744073709.551615s } 2025-05-29T15:22:28.277149Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-05-29T15:22:28.277315Z node 3 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:28.277329Z node 4 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:28.277341Z node 5 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:28.277352Z node 6 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:28.277364Z node 7 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:28.277375Z node 8 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:28.277400Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:28.281368Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:28.281420Z node 8 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:28.281451Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:28.282579Z node 5 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:28.282620Z node 6 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:28.282646Z node 7 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:28.282758Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:28.282784Z node 3 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:28.282806Z node 4 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:28.282839Z node 6 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-05-29T15:22:28.282888Z node 7 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-05-29T15:22:28.282913Z node 8 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:28.282938Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:28.283104Z node 3 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-05-29T15:22:28.283132Z node 4 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-05-29T15:22:28.283156Z node 5 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-05-29T15:22:28.283250Z node 8 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-05-29T15:22:28.283346Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-05-29T15:22:28.283406Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 5 Deadline: 18446744073709.551615s } 2025-05-29T15:22:28.284037Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 5 Deadline: 18446744073709.551615s } 2025-05-29T15:22:28.284065Z node 5 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 3 Deadline: 18446744073709.551615s } 2025-05-29T15:22:28.284074Z node 6 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 3 Deadline: 18446744073709.551615s } 2025-05-29T15:22:28.284084Z node 7 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 3 Deadline: 18446744073709.551615s } 2025-05-29T15:22:28.284093Z node 8 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 6 Deadline: 18446744073709.551615s } 2025-05-29T15:22:28.288257Z node 5 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 2 Deadline: 18446744073709.551615s } 2025-05-29T15:22:28.288646Z node 6 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 8 Deadline: 18446744073709.551615s } 2025-05-29T15:22:28.288702Z node 3 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 5 Deadline: 18446744073709.551615s } 2025-05-29T15:22:28.289485Z node 3 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 6 Deadline: 18446744073709.551615s } 2025-05-29T15:22:28.289836Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 7 Deadline: 18446744073709.551615s } 2025-05-29T15:22:28.289915Z node 3 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 7 Deadline: 18446744073709.551615s } 2025-05-29T15:22:28.290207Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 2 Deadline: 18446744073709.551615s } 2025-05-29T15:22:28.290380Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 4 Deadline: 18446744073709.551615s } 2025-05-29T15:22:28.290537Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 3 Deadline: 18446744073709.551615s } 2025-05-29T15:22:28.290607Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 6 Deadline: 18446744073709.551615s } 2025-05-29T15:22:28.290688Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 8 Deadline: 18446744073709.551615s } 2025-05-29T15:22:28.291101Z node 5 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 7 Deadline: 18446744073709.551615s } 2025-05-29T15:22:28.291556Z node 3 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 4 Deadline: 18446744073709.551615s } 2025-05-29T15:22:28.291602Z node 7 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 5 Deadline: 18446744073709.551615s } 2025-05-29T15:22:28.291827Z node 4 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 3 Deadline: 18446744073709.551615s } 2025-05-29T15:22:28.316787Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7309: Cannot subscribe to console configs 2025-05-29T15:22:28.316803Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded ... waiting for nameservers are connected 2025-05-29T15:22:28.319534Z node 1 :NODE_BROKER DEBUG: node_broker_impl.h:236: StateInit event type: 10060000 event: NKikimr::TEvTablet::TEvBoot 2025-05-29T15:22:28.319833Z node 1 :NODE_BROKER DEBUG: node_broker_impl.h:236: StateInit event type: 10060001 event: NKikimr::TEvTablet::TEvRestored 2025-05-29T15:22:28.319879Z node 1 :NODE_BROKER DEBUG: node_broker__init_scheme.cpp:20: TTxInitScheme Execute 2025-05-29T15:22:28.320008Z node 1 :NODE_BROKER DEBUG: node_broker_impl.h:236: StateInit event type: 1006000c event: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-05-29T15:22:28.320432Z node 1 :NODE_BROKER DEBUG: node_broker__init_scheme.cpp:29: TTxInitScheme Complete 2025-05-29T15:22:28.320446Z node 1 :NODE_BROKER DEBUG: node_broker__load_state.cpp:19: TTxLoadState Execute 2025-05-29T15:22:28.320479Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:961: [DB] Using default config. 2025-05-29T15:22:28.320488Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:998: [DB] Starting the first epoch: #1.1 1970-01-01T00:00:00.024000Z - 1970-01-01T01:00:00.024000Z - 1970-01-01T02:00:00.024000Z 2025-05-29T15:22:28.320491Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:1024: [DB] Loaded the first approximate epoch start: #1.1 2025-05-29T15:22:28.320500Z node 1 :NODE_BROKER DEBUG: node_broker__load_state.cpp:27: TTxLoadState Complete 2025-05-29T15:22:28.320533Z node 1 :NODE_BROKER DEBUG: node_broker__migrate_state.cpp:84: TTxMigrateState Execute 2025-05-29T15:22:28.320537Z node 1 :NODE_BROKER DEBUG: node_broker__migrate_state.cpp:52: TTxMigrateState ProcessMigrationBatch UpdateNodes left 0, NewVersionUpdateNodes left 0 2025-05-29T15:22:28.320540Z node 1 :NODE_BROKER DEBUG: node_broker__migrate_state.cpp:21: TTxMigrateState FinalizeMigration 2025-05-29T15:22:28.320544Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:1311: [DB] Update epoch in database: #1.1 1970-01-01T00:00:00.024000Z - 1970-01-01T01:00:00.024000Z - 1970-01-01T02:00:00.024000Z 2025-05-29T15:22:28.320553Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:1330: [DB] Update approx epoch start in database: #1.1 2025-05-29T15:22:28.320557Z node 1 :NODE_BROKER NOTICE: node_broker.cpp:1343: [DB] Update main nodes table to: Nodes 2025-05-29T15:22:28.351864Z node 1 :NODE_BROKER DEBUG: node_broker__migrate_state.cpp:95: TTxMigrateState Complete 2025-05-29T15:22:28.351893Z node 1 :NODE_BROKER TRACE: node_broke ... nter: "1" Module: "2" Rack: "3" Unit: "4" } FixedNodeId: false Path: "dc-1" } 2025-05-29T15:22:28.548010Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:248: StateWork, processing event TEvNodeBroker::TEvRegistrationRequest 2025-05-29T15:22:28.548019Z node 1 :NODE_BROKER TRACE: node_broker.cpp:1487: Handle TEvNodeBroker::TEvRegistrationRequest: request# Host: "host2" Port: 1001 ResolveHost: "host2.yandex.net" Address: "1.2.3.4" Location { DataCenter: "1" Module: "2" Rack: "3" Unit: "4" } FixedNodeId: false Path: "dc-1" 2025-05-29T15:22:28.548045Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2702: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [1:23:2070], request# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-05-29T15:22:28.548058Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:1842: FillEntry for TNavigate: self# [1:23:2070], cacheItem# { Subscriber: { Subscriber: [1:635:2224] DomainOwnerId: 72057594046678944 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusSuccess Kind: 2 TableKind: 0 Created: 1 CreateStep: 0 PathId: [OwnerId: 72057594046678944, LocalPathId: 1] DomainId: [OwnerId: 72057594046678944, LocalPathId: 1] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-05-29T15:22:28.548084Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:265: Send result: self# [1:650:2231], recipient# [1:649:2183], result# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [72057594046678944:1:0] RequestType: ByPath Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046678944, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046678944, LocalPathId: 1] Params { Version: 0 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } }] } 2025-05-29T15:22:28.548097Z node 1 :NODE_BROKER TRACE: node_broker.cpp:1532: Handle TEvTxProxySchemeCache::TEvNavigateKeySetResult: response# { Path: dc-1 TableId: [72057594046678944:1:0] RequestType: ByPath Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046678944, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046678944, LocalPathId: 1] Params { Version: 0 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } } 2025-05-29T15:22:28.548110Z node 1 :NODE_BROKER TRACE: node_broker.cpp:1558: Finished resolving tenant: request# Host: "host2" Port: 1001 ResolveHost: "host2.yandex.net" Address: "1.2.3.4" Location { DataCenter: "1" Module: "2" Rack: "3" Unit: "4" } FixedNodeId: false Path: "dc-1": scope id# <72057594046678944:1>: serviced subdomain# 72057594046678944:1 2025-05-29T15:22:28.548122Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 2146435073, Sender [1:649:2183], Recipient [1:555:2183]: NKikimr::NNodeBroker::TNodeBroker::TEvPrivate::TEvResolvedRegistrationRequest 2025-05-29T15:22:28.548126Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:257: StateWork, processing event TEvPrivate::TEvResolvedRegistrationRequest 2025-05-29T15:22:28.548133Z node 1 :NODE_BROKER DEBUG: node_broker__register_node.cpp:77: TTxRegisterNode Execute 2025-05-29T15:22:28.548137Z node 1 :NODE_BROKER DEBUG: node_broker__register_node.cpp:81: Registration request from host2:1001 (not fixed) tenant: dc-1 2025-05-29T15:22:28.548148Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:241: [Dirty] Updated location of #1025.v4 host2:1001 to DC=1/M=2/R=3/U=4/ 2025-05-29T15:22:28.548157Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:856: [DB] Adding node #1025.v4 host2:1001 to database state=Active resolvehost=host2.yandex.net address=1.2.3.4 dc=1 location=DC=1/M=2/R=3/U=4/ lease=1 expire=Thu, 01 Jan 1970 02:00:00 UTC servicedsubdomain=72057594046678944:1 slotindex=1 authorizedbycertificate=false 2025-05-29T15:22:28.548203Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:552: [Dirty] Update current epoch version from 3 to 4 2025-05-29T15:22:28.548208Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:1356: [DB] Update epoch version in database version=4 ... waiting for commit ... blocking NKikimr::TEvTablet::TEvCommit from FLAT_EXECUTOR to TABLET_ACTOR cookie 1 ... waiting for commit (done) 2025-05-29T15:22:28.558523Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 269877761, Sender [1:652:2233], Recipient [1:555:2183]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-05-29T15:22:28.558560Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 272039936, Sender [1:623:2213], Recipient [1:555:2183]: NKikimr::NNodeBroker::TEvNodeBroker::TEvListNodes { } 2025-05-29T15:22:28.558566Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:246: StateWork, processing event TEvNodeBroker::TEvListNodes 2025-05-29T15:22:28.558578Z node 1 :NODE_BROKER TRACE: node_broker.cpp:423: Send TEvNodesInfo for epoch #1.1 1970-01-01T00:00:00.024000Z - 1970-01-01T01:00:00.024000Z - 1970-01-01T02:00:00.024000Z 2025-05-29T15:22:28.558640Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 269877761, Sender [1:654:2235], Recipient [1:555:2183]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-05-29T15:22:28.558665Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 272039937, Sender [1:623:2213], Recipient [1:555:2183]: NKikimr::NNodeBroker::TEvNodeBroker::TEvResolveNode { NodeId: 1024 } 2025-05-29T15:22:28.558671Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:247: StateWork, processing event TEvNodeBroker::TEvResolveNode 2025-05-29T15:22:28.558686Z node 1 :NODE_BROKER TRACE: node_broker.cpp:1478: Send TEvResolvedNode: NKikimr::NNodeBroker::TEvNodeBroker::TEvResolvedNode { Status { Code: WRONG_REQUEST Reason: "Unknown node" } } ... unblocking NKikimr::TEvTablet::TEvCommit from FLAT_EXECUTOR to TABLET_ACTOR 2025-05-29T15:22:28.559280Z node 1 :NODE_BROKER DEBUG: node_broker__register_node.cpp:186: TTxRegisterNode Complete 2025-05-29T15:22:28.559296Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:264: [Committed] Register new active node #1024.v2 host1:1001 2025-05-29T15:22:28.559306Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:552: [Committed] Update current epoch version from 1 to 2 2025-05-29T15:22:28.559311Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:630: Add node #1024.v2 host1:1001 to epoch cache 2025-05-29T15:22:28.559329Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:659: Add node #1024.v2 to update nodes log 2025-05-29T15:22:28.559360Z node 1 :NODE_BROKER TRACE: node_broker__register_node.cpp:58: TTxRegisterNode reply with: Status { Code: OK } Node { NodeId: 1024 Host: "host1" Port: 1001 ResolveHost: "host1.yandex.net" Address: "1.2.3.4" Location { DataCenter: "1" Module: "2" Rack: "3" Unit: "4" } Expire: 7200024000 Name: "slot-0" } 2025-05-29T15:22:28.559373Z node 1 :NODE_BROKER DEBUG: node_broker__register_node.cpp:186: TTxRegisterNode Complete 2025-05-29T15:22:28.559378Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:264: [Committed] Register new active node #1025.v3 host2:1001 2025-05-29T15:22:28.559383Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:552: [Committed] Update current epoch version from 2 to 3 2025-05-29T15:22:28.559387Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:630: Add node #1025.v3 host2:1001 to epoch cache 2025-05-29T15:22:28.559394Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:659: Add node #1025.v3 to update nodes log 2025-05-29T15:22:28.559417Z node 1 :NODE_BROKER TRACE: node_broker__register_node.cpp:58: TTxRegisterNode reply with: Status { Code: OK } Node { NodeId: 1025 Host: "host2" Port: 1001 ResolveHost: "host2.yandex.net" Address: "1.2.3.4" Location { } Expire: 7200024000 Name: "slot-1" } 2025-05-29T15:22:28.559425Z node 1 :NODE_BROKER DEBUG: node_broker__register_node.cpp:186: TTxRegisterNode Complete 2025-05-29T15:22:28.559438Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:241: [Committed] Updated location of #1025.v4 host2:1001 to DC=1/M=2/R=3/U=4/ 2025-05-29T15:22:28.559441Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:552: [Committed] Update current epoch version from 3 to 4 2025-05-29T15:22:28.559445Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:630: Add node #1025.v4 host2:1001 to epoch cache 2025-05-29T15:22:28.559452Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:659: Add node #1025.v4 to update nodes log 2025-05-29T15:22:28.559470Z node 1 :NODE_BROKER TRACE: node_broker__register_node.cpp:58: TTxRegisterNode reply with: Status { Code: OK } Node { NodeId: 1025 Host: "host2" Port: 1001 ResolveHost: "host2.yandex.net" Address: "1.2.3.4" Location { DataCenter: "1" Module: "2" Rack: "3" Unit: "4" } Expire: 7200024000 Name: "slot-1" } 2025-05-29T15:22:28.559577Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 269877761, Sender [1:658:2239], Recipient [1:555:2183]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-05-29T15:22:28.559601Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 272039936, Sender [1:623:2213], Recipient [1:555:2183]: NKikimr::NNodeBroker::TEvNodeBroker::TEvListNodes { } 2025-05-29T15:22:28.559607Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:246: StateWork, processing event TEvNodeBroker::TEvListNodes 2025-05-29T15:22:28.559615Z node 1 :NODE_BROKER TRACE: node_broker.cpp:423: Send TEvNodesInfo for epoch #1.4 1970-01-01T00:00:00.024000Z - 1970-01-01T01:00:00.024000Z - 1970-01-01T02:00:00.024000Z 2025-05-29T15:22:28.559685Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 269877761, Sender [1:660:2241], Recipient [1:555:2183]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-05-29T15:22:28.559705Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 272039937, Sender [1:623:2213], Recipient [1:555:2183]: NKikimr::NNodeBroker::TEvNodeBroker::TEvResolveNode { NodeId: 1024 } 2025-05-29T15:22:28.559710Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:247: StateWork, processing event TEvNodeBroker::TEvResolveNode 2025-05-29T15:22:28.559728Z node 1 :NODE_BROKER TRACE: node_broker.cpp:1478: Send TEvResolvedNode: NKikimr::NNodeBroker::TEvNodeBroker::TEvResolvedNode { Status { Code: OK } Node { NodeId: 1024 Host: "host1" Port: 1001 ResolveHost: "host1.yandex.net" Address: "1.2.3.4" Location { DataCenter: "1" Module: "2" Rack: "3" Unit: "4" } Expire: 7200024000 Name: "slot-0" } } 2025-05-29T15:22:28.559789Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 269877761, Sender [1:662:2243], Recipient [1:555:2183]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-05-29T15:22:28.559808Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 272039937, Sender [1:623:2213], Recipient [1:555:2183]: NKikimr::NNodeBroker::TEvNodeBroker::TEvResolveNode { NodeId: 1025 } 2025-05-29T15:22:28.559813Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:247: StateWork, processing event TEvNodeBroker::TEvResolveNode 2025-05-29T15:22:28.559828Z node 1 :NODE_BROKER TRACE: node_broker.cpp:1478: Send TEvResolvedNode: NKikimr::NNodeBroker::TEvNodeBroker::TEvResolvedNode { Status { Code: OK } Node { NodeId: 1025 Host: "host2" Port: 1001 ResolveHost: "host2.yandex.net" Address: "1.2.3.4" Location { DataCenter: "1" Module: "2" Rack: "3" Unit: "4" } Expire: 7200024000 Name: "slot-1" } } >> KqpYql::UpdateBadType >> TNodeBrokerTest::ShiftIdRangeRemoveNew [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/mind/ut/unittest >> TNodeBrokerTest::NodesMigrationRemoveExpired [GOOD] Test command err: 2025-05-29T15:22:27.238883Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 2 Deadline: 18446744073709.551615s } 2025-05-29T15:22:27.238922Z node 3 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 3 Deadline: 18446744073709.551615s } 2025-05-29T15:22:27.238943Z node 4 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 4 Deadline: 18446744073709.551615s } 2025-05-29T15:22:27.238970Z node 5 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 5 Deadline: 18446744073709.551615s } 2025-05-29T15:22:27.238992Z node 6 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 6 Deadline: 18446744073709.551615s } 2025-05-29T15:22:27.239009Z node 7 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 7 Deadline: 18446744073709.551615s } 2025-05-29T15:22:27.244571Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:27.244660Z node 3 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:27.244695Z node 4 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:27.244725Z node 5 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:27.244758Z node 6 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:27.244788Z node 7 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:27.244840Z node 8 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 8 Deadline: 18446744073709.551615s } 2025-05-29T15:22:27.244861Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-05-29T15:22:27.245062Z node 3 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:27.245082Z node 4 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:27.245095Z node 5 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:27.245107Z node 6 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:27.245122Z node 7 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:27.245136Z node 8 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:27.245164Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:27.249494Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:27.249552Z node 8 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:27.249578Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:27.250310Z node 5 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:27.250338Z node 6 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:27.250360Z node 7 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:27.250422Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:27.250436Z node 3 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:27.250451Z node 4 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:27.250467Z node 5 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-05-29T15:22:27.250501Z node 6 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-05-29T15:22:27.250517Z node 7 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-05-29T15:22:27.250531Z node 8 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:27.250545Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:27.250726Z node 3 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-05-29T15:22:27.250765Z node 4 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-05-29T15:22:27.250830Z node 8 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-05-29T15:22:27.250896Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-05-29T15:22:27.250930Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 2 Deadline: 18446744073709.551615s } 2025-05-29T15:22:27.251414Z node 3 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 4 Deadline: 18446744073709.551615s } 2025-05-29T15:22:27.251424Z node 4 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 2 Deadline: 18446744073709.551615s } 2025-05-29T15:22:27.251431Z node 6 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 5 Deadline: 18446744073709.551615s } 2025-05-29T15:22:27.251437Z node 7 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 3 Deadline: 18446744073709.551615s } 2025-05-29T15:22:27.251444Z node 8 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 6 Deadline: 18446744073709.551615s } 2025-05-29T15:22:27.254346Z node 4 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 3 Deadline: 18446744073709.551615s } 2025-05-29T15:22:27.254472Z node 6 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 8 Deadline: 18446744073709.551615s } 2025-05-29T15:22:27.254490Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 4 Deadline: 18446744073709.551615s } 2025-05-29T15:22:27.254520Z node 5 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 6 Deadline: 18446744073709.551615s } 2025-05-29T15:22:27.254530Z node 3 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 7 Deadline: 18446744073709.551615s } 2025-05-29T15:22:27.255592Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 4 Deadline: 18446744073709.551615s } 2025-05-29T15:22:27.255669Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 6 Deadline: 18446744073709.551615s } 2025-05-29T15:22:27.255682Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 8 Deadline: 18446744073709.551615s } 2025-05-29T15:22:27.255731Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 7 Deadline: 18446744073709.551615s } 2025-05-29T15:22:27.255799Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 5 Deadline: 18446744073709.551615s } 2025-05-29T15:22:27.255863Z node 8 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 2 Deadline: 18446744073709.551615s } 2025-05-29T15:22:27.255886Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 3 Deadline: 18446744073709.551615s } 2025-05-29T15:22:27.256050Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 8 Deadline: 18446744073709.551615s } 2025-05-29T15:22:27.257019Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 6 Deadline: 18446744073709.551615s } 2025-05-29T15:22:27.257079Z node 5 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 4 Deadline: 18446744073709.551615s } 2025-05-29T15:22:27.257235Z node 6 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 2 Deadline: 18446744073709.551615s } 2025-05-29T15:22:27.257245Z node 4 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 5 Deadline: 18446744073709.551615s } 2025-05-29T15:22:27.258498Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 5 Deadline: 18446744073709.551615s } 2025-05-29T15:22:27.258929Z node 5 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 2 Deadline: 18446744073709.551615s } 2025-05-29T15:22:27.260315Z node 5 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 3 Deadline: 18446744073709.551615s } 2025-05-29T15:22:27.260436Z node 3 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 5 Deadline: 18446744073709.551615s } 2025-05-29T15:22:27.283394Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7309: Cannot subscribe to console configs 2025-05-29T15:22:27.283410Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded ... waiting for nameservers are connected 2025-05-29T15:22:27.288490Z node 1 :NODE_BROKER DEBUG: node_broker_impl.h:236: StateInit event type: 10060000 event: NKikimr::TEvTablet::TEvBoot 2025-05-29T15:22:27.289044Z node 1 :NODE_BROKER DEBUG: node_broker_impl.h:236: StateInit event type: 10060001 event: NKikimr::TEvTablet::TEvRestored 2025-05-29T15:22:27.289133Z node 1 :NODE_BROKER DEBUG: node_broker__init_scheme.cpp:20: TTxInitScheme Execute 2025-05-29T15:22:27.289364Z node 1 :NODE_BROKER DEBUG: node_broker_impl.h:236: StateInit event type: 1006000c event: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-05-29T15:22:27.290197Z node 1 :NODE_BROKER DEBUG: node_broker__init_scheme.cpp:29: TTxInitScheme Complete 2025-05-29T15:22:27.290223Z node 1 :NODE_BROKER DEBUG: node_broker__load_state.cpp:19: TTxLoadState Execute 2025-05-29T15:22:27.290288Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:961: [DB] Using default config. 2025-05-29T15:22:27.290304Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:998: [DB] Starting the first epoch: #1.1 1970-01-01T00:00:00.024000Z - 1970-01-01T01:00:00.024000Z - 1970-01-01T02:00:00.024000Z 2025-05-29T15:22:27.290309Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:1024: [DB] Loaded the first approximate epoch start: #1.1 2025-05-29T15:22:27.290323Z node 1 :NODE_BROKER DEBUG: node_broker__load_state.cpp:27: TTxLoadState Compl ... 025-05-29T15:22:28.620211Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:1190: [DB] Loaded nodeV2 #1024.v4 { NodeId: 1024, State: Expired, Version: 4, Host: host1, Port: 1001, ResolveHost: host1.yandex.net, Address: 1.2.3.4, Lease: 1, Expire: Thu, 01 Jan 1970 02:00:00 UTC, Location: DC=1/M=2/R=3/U=4/, AuthorizedByCertificate: 0, SlotIndex: 0, ServicedSubDomain: 72057594046678944:1 } 2025-05-29T15:22:28.620217Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:300: [Dirty] Added removed node #1024.v5 2025-05-29T15:22:28.620220Z node 1 :NODE_BROKER NOTICE: node_broker.cpp:1230: [DB] Migrating removed node #1024.v5 2025-05-29T15:22:28.620230Z node 1 :NODE_BROKER DEBUG: node_broker__load_state.cpp:27: TTxLoadState Complete 2025-05-29T15:22:28.620254Z node 1 :NODE_BROKER DEBUG: node_broker__migrate_state.cpp:84: TTxMigrateState Execute 2025-05-29T15:22:28.620257Z node 1 :NODE_BROKER DEBUG: node_broker__migrate_state.cpp:52: TTxMigrateState ProcessMigrationBatch UpdateNodes left 1, NewVersionUpdateNodes left 0 2025-05-29T15:22:28.620264Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:830: [DB] Removing node #1024.v5 from database 2025-05-29T15:22:28.620276Z node 1 :NODE_BROKER DEBUG: node_broker__migrate_state.cpp:21: TTxMigrateState FinalizeMigration 2025-05-29T15:22:28.620279Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:1330: [DB] Update approx epoch start in database: #4.5 2025-05-29T15:22:28.631386Z node 1 :NODE_BROKER DEBUG: node_broker__migrate_state.cpp:95: TTxMigrateState Complete 2025-05-29T15:22:28.631413Z node 1 :NODE_BROKER TRACE: node_broker.cpp:456: Scheduled epoch update at 1970-01-01T04:00:00.024000Z 2025-05-29T15:22:28.631420Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:562: Preparing nodes list cache for epoch #4.5 1970-01-01T03:00:00.024000Z - 1970-01-01T04:00:00.024000Z - 1970-01-01T05:00:00.024000Z, approximate epoch start #4.5 nodes=0 expired=0 2025-05-29T15:22:28.631427Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:603: Preparing update nodes log for epoch ##4.5 1970-01-01T03:00:00.024000Z - 1970-01-01T04:00:00.024000Z - 1970-01-01T05:00:00.024000Z nodes=0 expired=0 removed=1 2025-05-29T15:22:28.631431Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:659: Add node #1024.v5 to update nodes log 2025-05-29T15:22:28.631503Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 269877761, Sender [1:742:2275], Recipient [1:732:2269]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-05-29T15:22:28.631559Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 269877761, Sender [1:743:2276], Recipient [1:732:2269]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-05-29T15:22:28.631573Z node 5 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:657: Handle NKikimr::TEvTabletPipe::TEvClientConnected { TabletId: 72057594037936129 Status: OK ServerId: [1:743:2276] Leader: 1 Dead: 0 Generation: 3 VersionInfo:  } 2025-05-29T15:22:28.631583Z node 6 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:657: Handle NKikimr::TEvTabletPipe::TEvClientConnected { TabletId: 72057594037936129 Status: OK ServerId: [1:742:2275] Leader: 1 Dead: 0 Generation: 3 VersionInfo:  } 2025-05-29T15:22:28.631591Z node 7 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:657: Handle NKikimr::TEvTabletPipe::TEvClientConnected { TabletId: 72057594037936129 Status: OK ServerId: [1:744:2277] Leader: 1 Dead: 0 Generation: 3 VersionInfo:  } 2025-05-29T15:22:28.631597Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 269877761, Sender [1:744:2277], Recipient [1:732:2269]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-05-29T15:22:28.631601Z node 8 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:657: Handle NKikimr::TEvTabletPipe::TEvClientConnected { TabletId: 72057594037936129 Status: OK ServerId: [1:747:2280] Leader: 1 Dead: 0 Generation: 3 VersionInfo:  } 2025-05-29T15:22:28.631609Z node 4 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:657: Handle NKikimr::TEvTabletPipe::TEvClientConnected { TabletId: 72057594037936129 Status: OK ServerId: [1:746:2279] Leader: 1 Dead: 0 Generation: 3 VersionInfo:  } 2025-05-29T15:22:28.631628Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 269877761, Sender [1:746:2279], Recipient [1:732:2269]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-05-29T15:22:28.631643Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 269877761, Sender [1:747:2280], Recipient [1:732:2269]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-05-29T15:22:28.631684Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 272039936, Sender [8:221:2072], Recipient [1:747:2280] 2025-05-29T15:22:28.631687Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:246: StateWork, processing event TEvNodeBroker::TEvListNodes 2025-05-29T15:22:28.631694Z node 1 :NODE_BROKER TRACE: node_broker.cpp:423: Send TEvNodesInfo for epoch #4.5 1970-01-01T03:00:00.024000Z - 1970-01-01T04:00:00.024000Z - 1970-01-01T05:00:00.024000Z 2025-05-29T15:22:28.631722Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 272039936, Sender [6:165:2072], Recipient [1:742:2275] 2025-05-29T15:22:28.631725Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:246: StateWork, processing event TEvNodeBroker::TEvListNodes 2025-05-29T15:22:28.631729Z node 1 :NODE_BROKER TRACE: node_broker.cpp:423: Send TEvNodesInfo for epoch #4.5 1970-01-01T03:00:00.024000Z - 1970-01-01T04:00:00.024000Z - 1970-01-01T05:00:00.024000Z 2025-05-29T15:22:28.631738Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 272039936, Sender [5:137:2072], Recipient [1:743:2276] 2025-05-29T15:22:28.631741Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:246: StateWork, processing event TEvNodeBroker::TEvListNodes 2025-05-29T15:22:28.631744Z node 1 :NODE_BROKER TRACE: node_broker.cpp:423: Send TEvNodesInfo for epoch #4.5 1970-01-01T03:00:00.024000Z - 1970-01-01T04:00:00.024000Z - 1970-01-01T05:00:00.024000Z 2025-05-29T15:22:28.631761Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 272039936, Sender [7:193:2072], Recipient [1:744:2277] 2025-05-29T15:22:28.631763Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:246: StateWork, processing event TEvNodeBroker::TEvListNodes 2025-05-29T15:22:28.631767Z node 1 :NODE_BROKER TRACE: node_broker.cpp:423: Send TEvNodesInfo for epoch #4.5 1970-01-01T03:00:00.024000Z - 1970-01-01T04:00:00.024000Z - 1970-01-01T05:00:00.024000Z 2025-05-29T15:22:28.631774Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 272039936, Sender [4:109:2072], Recipient [1:746:2279] 2025-05-29T15:22:28.631776Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:246: StateWork, processing event TEvNodeBroker::TEvListNodes 2025-05-29T15:22:28.631780Z node 1 :NODE_BROKER TRACE: node_broker.cpp:423: Send TEvNodesInfo for epoch #4.5 1970-01-01T03:00:00.024000Z - 1970-01-01T04:00:00.024000Z - 1970-01-01T05:00:00.024000Z 2025-05-29T15:22:28.631877Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 269877761, Sender [1:775:2303], Recipient [1:732:2269]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-05-29T15:22:28.631894Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 272039936, Sender [1:629:2213], Recipient [1:732:2269]: NKikimr::NNodeBroker::TEvNodeBroker::TEvListNodes { } 2025-05-29T15:22:28.631896Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:246: StateWork, processing event TEvNodeBroker::TEvListNodes 2025-05-29T15:22:28.631900Z node 1 :NODE_BROKER TRACE: node_broker.cpp:423: Send TEvNodesInfo for epoch #4.5 1970-01-01T03:00:00.024000Z - 1970-01-01T04:00:00.024000Z - 1970-01-01T05:00:00.024000Z 2025-05-29T15:22:28.631936Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 269877761, Sender [1:777:2305], Recipient [1:732:2269]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-05-29T15:22:28.631946Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 272039936, Sender [1:629:2213], Recipient [1:732:2269]: NKikimr::NNodeBroker::TEvNodeBroker::TEvListNodes { } 2025-05-29T15:22:28.631949Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:246: StateWork, processing event TEvNodeBroker::TEvListNodes 2025-05-29T15:22:28.631953Z node 1 :NODE_BROKER TRACE: node_broker.cpp:423: Send TEvNodesInfo for epoch #4.5 1970-01-01T03:00:00.024000Z - 1970-01-01T04:00:00.024000Z - 1970-01-01T05:00:00.024000Z 2025-05-29T15:22:28.631989Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 269877761, Sender [1:779:2307], Recipient [1:732:2269]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-05-29T15:22:28.631998Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 272039936, Sender [1:629:2213], Recipient [1:732:2269]: NKikimr::NNodeBroker::TEvNodeBroker::TEvListNodes { } 2025-05-29T15:22:28.632002Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:246: StateWork, processing event TEvNodeBroker::TEvListNodes 2025-05-29T15:22:28.632005Z node 1 :NODE_BROKER TRACE: node_broker.cpp:423: Send TEvNodesInfo for epoch #4.5 1970-01-01T03:00:00.024000Z - 1970-01-01T04:00:00.024000Z - 1970-01-01T05:00:00.024000Z 2025-05-29T15:22:28.632037Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 269877761, Sender [1:781:2309], Recipient [1:732:2269]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-05-29T15:22:28.632049Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 272039936, Sender [1:629:2213], Recipient [1:732:2269]: NKikimr::NNodeBroker::TEvNodeBroker::TEvListNodes { CachedVersion: 5 } 2025-05-29T15:22:28.632052Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:246: StateWork, processing event TEvNodeBroker::TEvListNodes 2025-05-29T15:22:28.632055Z node 1 :NODE_BROKER TRACE: node_broker.cpp:423: Send TEvNodesInfo for epoch #4.5 1970-01-01T03:00:00.024000Z - 1970-01-01T04:00:00.024000Z - 1970-01-01T05:00:00.024000Z 2025-05-29T15:22:28.632097Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 269877761, Sender [1:783:2311], Recipient [1:732:2269]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-05-29T15:22:28.632110Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 272039950, Sender [1:629:2213], Recipient [1:732:2269]: NKikimr::NNodeBroker::TEvNodeBroker::TEvSubscribeNodesRequest { CachedVersion: 5 SeqNo: 2 } 2025-05-29T15:22:28.632113Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:254: StateWork, processing event TEvNodeBroker::TEvSubscribeNodesRequest 2025-05-29T15:22:28.632117Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:747: New subscriber [1:629:2213], seqNo: 2, version: 5, server pipe id: [1:783:2311] 2025-05-29T15:22:28.632121Z node 1 :NODE_BROKER TRACE: node_broker.cpp:730: Send TEvUpdateNodes v5 -> v5 to [1:629:2213] 2025-05-29T15:22:28.632155Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 269877764, Sender [1:783:2311], Recipient [1:732:2269]: NKikimr::TEvTabletPipe::TEvServerDisconnected 2025-05-29T15:22:28.632159Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:768: Unsubscribed [1:629:2213], seqNo: 2, server pipe id: [1:783:2311] 2025-05-29T15:22:28.632186Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 269877761, Sender [1:785:2313], Recipient [1:732:2269]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-05-29T15:22:28.632198Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 272039937, Sender [1:629:2213], Recipient [1:732:2269]: NKikimr::NNodeBroker::TEvNodeBroker::TEvResolveNode { NodeId: 1024 } 2025-05-29T15:22:28.632201Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:247: StateWork, processing event TEvNodeBroker::TEvResolveNode 2025-05-29T15:22:28.632212Z node 1 :NODE_BROKER TRACE: node_broker.cpp:1478: Send TEvResolvedNode: NKikimr::NNodeBroker::TEvNodeBroker::TEvResolvedNode { Status { Code: WRONG_REQUEST Reason: "Unknown node" } } |60.4%| [TA] $(B)/ydb/core/tx/schemeshard/ut_transfer/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/mind/ut/unittest >> TNodeBrokerTest::TestListNodesEpochDeltas [GOOD] Test command err: 2025-05-29T15:22:27.114580Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 2 Deadline: 18446744073709.551615s } 2025-05-29T15:22:27.114622Z node 3 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 3 Deadline: 18446744073709.551615s } 2025-05-29T15:22:27.114645Z node 4 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 4 Deadline: 18446744073709.551615s } 2025-05-29T15:22:27.114674Z node 5 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 5 Deadline: 18446744073709.551615s } 2025-05-29T15:22:27.114699Z node 6 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 6 Deadline: 18446744073709.551615s } 2025-05-29T15:22:27.114715Z node 7 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 7 Deadline: 18446744073709.551615s } 2025-05-29T15:22:27.120232Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:27.120328Z node 3 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:27.120365Z node 4 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:27.120398Z node 5 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:27.120434Z node 6 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:27.120465Z node 7 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:27.120518Z node 8 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 8 Deadline: 18446744073709.551615s } 2025-05-29T15:22:27.120537Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-05-29T15:22:27.121627Z node 3 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:27.121686Z node 4 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:27.121745Z node 5 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:27.121782Z node 6 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:27.121821Z node 7 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:27.121862Z node 8 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:27.121938Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:27.125398Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:27.125615Z node 8 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:27.125658Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:27.126905Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:27.126954Z node 3 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:27.126984Z node 4 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:27.127030Z node 5 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:27.127059Z node 6 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:27.127088Z node 7 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:27.127209Z node 6 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-05-29T15:22:27.127239Z node 7 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-05-29T15:22:27.127272Z node 8 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:27.127302Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:27.127587Z node 3 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-05-29T15:22:27.127641Z node 4 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-05-29T15:22:27.127685Z node 5 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-05-29T15:22:27.127770Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-05-29T15:22:27.127933Z node 8 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-05-29T15:22:27.129063Z node 8 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 2 Deadline: 18446744073709.551615s } 2025-05-29T15:22:27.129110Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 7 Deadline: 18446744073709.551615s } 2025-05-29T15:22:27.129231Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 7 Deadline: 18446744073709.551615s } 2025-05-29T15:22:27.129260Z node 3 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 6 Deadline: 18446744073709.551615s } 2025-05-29T15:22:27.129272Z node 4 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 7 Deadline: 18446744073709.551615s } 2025-05-29T15:22:27.129284Z node 5 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 3 Deadline: 18446744073709.551615s } 2025-05-29T15:22:27.129296Z node 6 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 3 Deadline: 18446744073709.551615s } 2025-05-29T15:22:27.129311Z node 7 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 2 Deadline: 18446744073709.551615s } 2025-05-29T15:22:27.134579Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 8 Deadline: 18446744073709.551615s } 2025-05-29T15:22:27.134638Z node 3 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 5 Deadline: 18446744073709.551615s } 2025-05-29T15:22:27.134883Z node 6 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 7 Deadline: 18446744073709.551615s } 2025-05-29T15:22:27.134904Z node 7 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 3 Deadline: 18446744073709.551615s } 2025-05-29T15:22:27.134986Z node 7 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 4 Deadline: 18446744073709.551615s } 2025-05-29T15:22:27.136834Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 2 Deadline: 18446744073709.551615s } 2025-05-29T15:22:27.137070Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 3 Deadline: 18446744073709.551615s } 2025-05-29T15:22:27.137096Z node 3 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 7 Deadline: 18446744073709.551615s } 2025-05-29T15:22:27.137262Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 4 Deadline: 18446744073709.551615s } 2025-05-29T15:22:27.137374Z node 7 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 6 Deadline: 18446744073709.551615s } 2025-05-29T15:22:27.137443Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 5 Deadline: 18446744073709.551615s } 2025-05-29T15:22:27.137590Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 6 Deadline: 18446744073709.551615s } 2025-05-29T15:22:27.137743Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 8 Deadline: 18446744073709.551615s } 2025-05-29T15:22:27.138459Z node 7 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 5 Deadline: 18446744073709.551615s } 2025-05-29T15:22:27.139022Z node 5 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 7 Deadline: 18446744073709.551615s } 2025-05-29T15:22:27.172053Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7309: Cannot subscribe to console configs 2025-05-29T15:22:27.172076Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded ... waiting for nameservers are connected 2025-05-29T15:22:27.176682Z node 1 :NODE_BROKER DEBUG: node_broker_impl.h:236: StateInit event type: 10060000 event: NKikimr::TEvTablet::TEvBoot 2025-05-29T15:22:27.177202Z node 1 :NODE_BROKER DEBUG: node_broker_impl.h:236: StateInit event type: 10060001 event: NKikimr::TEvTablet::TEvRestored 2025-05-29T15:22:27.177264Z node 1 :NODE_BROKER DEBUG: node_broker__init_scheme.cpp:20: TTxInitScheme Execute 2025-05-29T15:22:27.177490Z node 1 :NODE_BROKER DEBUG: node_broker_impl.h:236: StateInit event type: 1006000c event: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-05-29T15:22:27.178336Z node 1 :NODE_BROKER DEBUG: node_broker__init_scheme.cpp:29: TTxInitScheme Complete 2025-05-29T15:22:27.178361Z node 1 :NODE_BROKER DEBUG: node_broker__load_state.cpp:19: TTxLoadState Execute 2025-05-29T15:22:27.178425Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:961: [DB] Using default config. 2025-05-29T15:22:27.178441Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:998: [DB] Starting the first epoch: #1.1 1970-01-01T00:00:00.025000Z - 1970-01-01T01:00:00.025000Z - 1970-01-01T02:00:00.025000Z 2025-05-29T15:22:27.178446Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:1024: [DB] Loaded the first approximate epoch start: #1.1 2025-05-29T15:22:27.178461Z node 1 :NODE_BROKER DEBUG: node_broker__load_state.cpp:27: TTxLoadState Complete 2025-05-29T15:22:27.178496Z node 1 :NODE_BROKER DEBUG: node_broker__migrate_state.cpp:84: TTxMigrateState Execute 2025-05-29T15:22:27.178502Z node 1 :NODE_BROKER DEBUG: node_broker__migrate_state.cpp:52: TTxMigrateState ProcessMigrationBatch UpdateNodes left 0, NewVersionUpdateNodes left 0 2025-05-29T15:22:27.178507Z node 1 :NODE_BROKER DEBUG: node_broker__migrate_state.cpp:21: TTxMigrateState FinalizeMigration 2025-05-29T15:22:27.178513Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:1311: [DB] Update epoch in database: #1.1 1970-01-01T00:00:00.025000Z - 1970-01-01T01:00:00.025000Z - 1970-01-01T02:00:00.025000Z 2025-05-29T15:22:27.178532Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:1330 ... event# 269877761, Sender [1:801:2331], Recipient [1:744:2282]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-05-29T15:22:28.457199Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:657: Handle NKikimr::TEvTabletPipe::TEvClientConnected { TabletId: 72057594037936129 Status: OK ServerId: [1:801:2331] Leader: 1 Dead: 0 Generation: 3 VersionInfo:  } 2025-05-29T15:22:28.457235Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 272039936, Sender [1:25:2072], Recipient [1:744:2282]: NKikimr::NNodeBroker::TEvNodeBroker::TEvListNodes { MinEpoch: 4 } 2025-05-29T15:22:28.457242Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:246: StateWork, processing event TEvNodeBroker::TEvListNodes 2025-05-29T15:22:28.457247Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:375: Delaying list nodes request for epoch #4 2025-05-29T15:22:28.467766Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 269877761, Sender [1:805:2332], Recipient [1:744:2282]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-05-29T15:22:28.467796Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 269877761, Sender [1:806:2333], Recipient [1:744:2282]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-05-29T15:22:28.467831Z node 6 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:657: Handle NKikimr::TEvTabletPipe::TEvClientConnected { TabletId: 72057594037936129 Status: OK ServerId: [1:805:2332] Leader: 1 Dead: 0 Generation: 3 VersionInfo:  } 2025-05-29T15:22:28.467845Z node 7 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:657: Handle NKikimr::TEvTabletPipe::TEvClientConnected { TabletId: 72057594037936129 Status: OK ServerId: [1:806:2333] Leader: 1 Dead: 0 Generation: 3 VersionInfo:  } 2025-05-29T15:22:28.467852Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 269877761, Sender [1:807:2334], Recipient [1:744:2282]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-05-29T15:22:28.467873Z node 8 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:657: Handle NKikimr::TEvTabletPipe::TEvClientConnected { TabletId: 72057594037936129 Status: OK ServerId: [1:807:2334] Leader: 1 Dead: 0 Generation: 3 VersionInfo:  } 2025-05-29T15:22:28.467915Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 272039936, Sender [6:165:2072], Recipient [1:805:2332] 2025-05-29T15:22:28.467919Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:246: StateWork, processing event TEvNodeBroker::TEvListNodes 2025-05-29T15:22:28.467924Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:375: Delaying list nodes request for epoch #4 2025-05-29T15:22:28.467934Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 272039936, Sender [7:193:2072], Recipient [1:806:2333] 2025-05-29T15:22:28.467936Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:246: StateWork, processing event TEvNodeBroker::TEvListNodes 2025-05-29T15:22:28.467938Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:375: Delaying list nodes request for epoch #4 2025-05-29T15:22:28.467947Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 272039936, Sender [8:221:2072], Recipient [1:807:2334] 2025-05-29T15:22:28.467949Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:246: StateWork, processing event TEvNodeBroker::TEvListNodes 2025-05-29T15:22:28.467951Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:375: Delaying list nodes request for epoch #4 2025-05-29T15:22:28.690755Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 2146435072, Sender [1:744:2282], Recipient [1:744:2282]: NKikimr::NNodeBroker::TNodeBroker::TEvPrivate::TEvUpdateEpoch 2025-05-29T15:22:28.690775Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:256: StateWork, processing event TEvPrivate::TEvUpdateEpoch 2025-05-29T15:22:28.690791Z node 1 :NODE_BROKER DEBUG: node_broker__update_epoch.cpp:20: TTxUpdateEpoch Execute 2025-05-29T15:22:28.690801Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:542: [Dirty] Move to new epoch #4.8 1970-01-01T03:00:00.025000Z - 1970-01-01T04:00:00.025000Z - 1970-01-01T05:00:00.025000Z, approximate epoch start #4.8 2025-05-29T15:22:28.690806Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:1311: [DB] Update epoch in database: #4.8 1970-01-01T03:00:00.025000Z - 1970-01-01T04:00:00.025000Z - 1970-01-01T05:00:00.025000Z 2025-05-29T15:22:28.690828Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:1330: [DB] Update approx epoch start in database: #4.8 2025-05-29T15:22:28.854183Z node 1 :NODE_BROKER DEBUG: node_broker__update_epoch.cpp:31: TTxUpdateEpoch Complete 2025-05-29T15:22:28.854202Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:542: [Committed] Move to new epoch #4.8 1970-01-01T03:00:00.025000Z - 1970-01-01T04:00:00.025000Z - 1970-01-01T05:00:00.025000Z, approximate epoch start #4.8 2025-05-29T15:22:28.854217Z node 1 :NODE_BROKER TRACE: node_broker.cpp:456: Scheduled epoch update at 1970-01-01T04:00:00.025000Z 2025-05-29T15:22:28.854223Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:562: Preparing nodes list cache for epoch #4.8 1970-01-01T03:00:00.025000Z - 1970-01-01T04:00:00.025000Z - 1970-01-01T05:00:00.025000Z, approximate epoch start #4.8 nodes=4 expired=0 2025-05-29T15:22:28.854249Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:603: Preparing update nodes log for epoch ##4.8 1970-01-01T03:00:00.025000Z - 1970-01-01T04:00:00.025000Z - 1970-01-01T05:00:00.025000Z nodes=4 expired=0 removed=0 2025-05-29T15:22:28.854254Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:659: Add node #1024.v4 to update nodes log 2025-05-29T15:22:28.854259Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:659: Add node #1025.v5 to update nodes log 2025-05-29T15:22:28.854263Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:659: Add node #1026.v6 to update nodes log 2025-05-29T15:22:28.854266Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:659: Add node #1027.v7 to update nodes log 2025-05-29T15:22:28.854275Z node 1 :NODE_BROKER TRACE: node_broker.cpp:423: Send TEvNodesInfo for epoch #4.8 1970-01-01T03:00:00.025000Z - 1970-01-01T04:00:00.025000Z - 1970-01-01T05:00:00.025000Z 2025-05-29T15:22:28.854280Z node 1 :NODE_BROKER TRACE: node_broker.cpp:423: Send TEvNodesInfo for epoch #4.8 1970-01-01T03:00:00.025000Z - 1970-01-01T04:00:00.025000Z - 1970-01-01T05:00:00.025000Z 2025-05-29T15:22:28.854284Z node 1 :NODE_BROKER TRACE: node_broker.cpp:423: Send TEvNodesInfo for epoch #4.8 1970-01-01T03:00:00.025000Z - 1970-01-01T04:00:00.025000Z - 1970-01-01T05:00:00.025000Z 2025-05-29T15:22:28.854289Z node 1 :NODE_BROKER TRACE: node_broker.cpp:423: Send TEvNodesInfo for epoch #4.8 1970-01-01T03:00:00.025000Z - 1970-01-01T04:00:00.025000Z - 1970-01-01T05:00:00.025000Z 2025-05-29T15:22:28.854294Z node 1 :NODE_BROKER TRACE: node_broker.cpp:423: Send TEvNodesInfo for epoch #4.8 1970-01-01T03:00:00.025000Z - 1970-01-01T04:00:00.025000Z - 1970-01-01T05:00:00.025000Z 2025-05-29T15:22:28.854299Z node 1 :NODE_BROKER TRACE: node_broker.cpp:423: Send TEvNodesInfo for epoch #4.8 1970-01-01T03:00:00.025000Z - 1970-01-01T04:00:00.025000Z - 1970-01-01T05:00:00.025000Z 2025-05-29T15:22:28.854304Z node 1 :NODE_BROKER TRACE: node_broker.cpp:423: Send TEvNodesInfo for epoch #4.8 1970-01-01T03:00:00.025000Z - 1970-01-01T04:00:00.025000Z - 1970-01-01T05:00:00.025000Z 2025-05-29T15:22:28.854309Z node 1 :NODE_BROKER TRACE: node_broker.cpp:423: Send TEvNodesInfo for epoch #4.8 1970-01-01T03:00:00.025000Z - 1970-01-01T04:00:00.025000Z - 1970-01-01T05:00:00.025000Z 2025-05-29T15:22:28.874991Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 269877761, Sender [1:830:2345], Recipient [1:744:2282]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-05-29T15:22:28.875022Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 272039936, Sender [1:625:2213], Recipient [1:744:2282]: NKikimr::NNodeBroker::TEvNodeBroker::TEvListNodes { } 2025-05-29T15:22:28.875027Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:246: StateWork, processing event TEvNodeBroker::TEvListNodes 2025-05-29T15:22:28.875037Z node 1 :NODE_BROKER TRACE: node_broker.cpp:423: Send TEvNodesInfo for epoch #4.8 1970-01-01T03:00:00.025000Z - 1970-01-01T04:00:00.025000Z - 1970-01-01T05:00:00.025000Z 2025-05-29T15:22:28.875095Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 269877761, Sender [1:832:2347], Recipient [1:744:2282]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-05-29T15:22:28.875107Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 272039936, Sender [1:625:2213], Recipient [1:744:2282]: NKikimr::NNodeBroker::TEvNodeBroker::TEvListNodes { } 2025-05-29T15:22:28.875112Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:246: StateWork, processing event TEvNodeBroker::TEvListNodes 2025-05-29T15:22:28.875116Z node 1 :NODE_BROKER TRACE: node_broker.cpp:423: Send TEvNodesInfo for epoch #4.8 1970-01-01T03:00:00.025000Z - 1970-01-01T04:00:00.025000Z - 1970-01-01T05:00:00.025000Z 2025-05-29T15:22:28.875154Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 269877761, Sender [1:834:2349], Recipient [1:744:2282]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-05-29T15:22:28.875163Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 272039936, Sender [1:625:2213], Recipient [1:744:2282]: NKikimr::NNodeBroker::TEvNodeBroker::TEvListNodes { } 2025-05-29T15:22:28.875165Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:246: StateWork, processing event TEvNodeBroker::TEvListNodes 2025-05-29T15:22:28.875169Z node 1 :NODE_BROKER TRACE: node_broker.cpp:423: Send TEvNodesInfo for epoch #4.8 1970-01-01T03:00:00.025000Z - 1970-01-01T04:00:00.025000Z - 1970-01-01T05:00:00.025000Z 2025-05-29T15:22:28.875207Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 269877761, Sender [1:836:2351], Recipient [1:744:2282]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-05-29T15:22:28.875220Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 272039936, Sender [1:625:2213], Recipient [1:744:2282]: NKikimr::NNodeBroker::TEvNodeBroker::TEvListNodes { CachedVersion: 8 } 2025-05-29T15:22:28.875223Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:246: StateWork, processing event TEvNodeBroker::TEvListNodes 2025-05-29T15:22:28.875226Z node 1 :NODE_BROKER TRACE: node_broker.cpp:423: Send TEvNodesInfo for epoch #4.8 1970-01-01T03:00:00.025000Z - 1970-01-01T04:00:00.025000Z - 1970-01-01T05:00:00.025000Z 2025-05-29T15:22:28.875264Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 269877761, Sender [1:838:2353], Recipient [1:744:2282]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-05-29T15:22:28.875271Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 272039936, Sender [1:625:2213], Recipient [1:744:2282]: NKikimr::NNodeBroker::TEvNodeBroker::TEvListNodes { } 2025-05-29T15:22:28.875274Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:246: StateWork, processing event TEvNodeBroker::TEvListNodes 2025-05-29T15:22:28.875277Z node 1 :NODE_BROKER TRACE: node_broker.cpp:423: Send TEvNodesInfo for epoch #4.8 1970-01-01T03:00:00.025000Z - 1970-01-01T04:00:00.025000Z - 1970-01-01T05:00:00.025000Z 2025-05-29T15:22:28.875316Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 269877761, Sender [1:840:2355], Recipient [1:744:2282]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-05-29T15:22:28.875324Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 272039936, Sender [1:625:2213], Recipient [1:744:2282]: NKikimr::NNodeBroker::TEvNodeBroker::TEvListNodes { CachedVersion: 6 } 2025-05-29T15:22:28.875326Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:246: StateWork, processing event TEvNodeBroker::TEvListNodes 2025-05-29T15:22:28.875329Z node 1 :NODE_BROKER TRACE: node_broker.cpp:423: Send TEvNodesInfo for epoch #4.8 1970-01-01T03:00:00.025000Z - 1970-01-01T04:00:00.025000Z - 1970-01-01T05:00:00.025000Z >> TNodeBrokerTest::ShiftIdRangeRemoveReusedID [GOOD] >> TNodeBrokerTest::NodesMigrationReuseIDThenExtendLease [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/mind/ut/unittest >> TNodeBrokerTest::NodesMigrationReuseExpiredID [GOOD] Test command err: 2025-05-29T15:22:27.264301Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 2 Deadline: 18446744073709.551615s } 2025-05-29T15:22:27.264343Z node 3 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 3 Deadline: 18446744073709.551615s } 2025-05-29T15:22:27.264365Z node 4 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 4 Deadline: 18446744073709.551615s } 2025-05-29T15:22:27.264392Z node 5 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 5 Deadline: 18446744073709.551615s } 2025-05-29T15:22:27.264416Z node 6 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 6 Deadline: 18446744073709.551615s } 2025-05-29T15:22:27.264433Z node 7 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 7 Deadline: 18446744073709.551615s } 2025-05-29T15:22:27.270497Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:27.270588Z node 3 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:27.270633Z node 4 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:27.270677Z node 5 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:27.270719Z node 6 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:27.270779Z node 7 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:27.270840Z node 8 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 8 Deadline: 18446744073709.551615s } 2025-05-29T15:22:27.270862Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-05-29T15:22:27.271114Z node 3 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:27.271153Z node 4 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:27.271174Z node 5 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:27.271194Z node 6 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:27.271212Z node 7 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:27.271230Z node 8 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:27.271265Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:27.275119Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:27.275171Z node 8 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:27.275197Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:27.276108Z node 5 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:27.276147Z node 6 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:27.276180Z node 7 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:27.276277Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:27.276303Z node 3 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:27.276326Z node 4 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:27.276351Z node 8 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:27.276374Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:27.276584Z node 3 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-05-29T15:22:27.276614Z node 4 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-05-29T15:22:27.276638Z node 5 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-05-29T15:22:27.276673Z node 6 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-05-29T15:22:27.276698Z node 7 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-05-29T15:22:27.276794Z node 8 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-05-29T15:22:27.276883Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-05-29T15:22:27.276936Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 7 Deadline: 18446744073709.551615s } 2025-05-29T15:22:27.277639Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 4 Deadline: 18446744073709.551615s } 2025-05-29T15:22:27.277664Z node 3 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 7 Deadline: 18446744073709.551615s } 2025-05-29T15:22:27.277674Z node 4 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 6 Deadline: 18446744073709.551615s } 2025-05-29T15:22:27.277684Z node 5 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 7 Deadline: 18446744073709.551615s } 2025-05-29T15:22:27.277693Z node 6 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 5 Deadline: 18446744073709.551615s } 2025-05-29T15:22:27.277703Z node 7 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 2 Deadline: 18446744073709.551615s } 2025-05-29T15:22:27.277714Z node 8 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 2 Deadline: 18446744073709.551615s } 2025-05-29T15:22:27.282647Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 7 Deadline: 18446744073709.551615s } 2025-05-29T15:22:27.282924Z node 5 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 6 Deadline: 18446744073709.551615s } 2025-05-29T15:22:27.282943Z node 6 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 4 Deadline: 18446744073709.551615s } 2025-05-29T15:22:27.282958Z node 7 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 3 Deadline: 18446744073709.551615s } 2025-05-29T15:22:27.283028Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 8 Deadline: 18446744073709.551615s } 2025-05-29T15:22:27.283062Z node 7 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 5 Deadline: 18446744073709.551615s } 2025-05-29T15:22:27.283921Z node 4 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 2 Deadline: 18446744073709.551615s } 2025-05-29T15:22:27.284157Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 6 Deadline: 18446744073709.551615s } 2025-05-29T15:22:27.284947Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 2 Deadline: 18446744073709.551615s } 2025-05-29T15:22:27.285129Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 3 Deadline: 18446744073709.551615s } 2025-05-29T15:22:27.285292Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 4 Deadline: 18446744073709.551615s } 2025-05-29T15:22:27.285434Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 5 Deadline: 18446744073709.551615s } 2025-05-29T15:22:27.285700Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 8 Deadline: 18446744073709.551615s } 2025-05-29T15:22:27.287352Z node 5 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 3 Deadline: 18446744073709.551615s } 2025-05-29T15:22:27.287447Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 6 Deadline: 18446744073709.551615s } 2025-05-29T15:22:27.287525Z node 7 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 8 Deadline: 18446744073709.551615s } 2025-05-29T15:22:27.287764Z node 6 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 2 Deadline: 18446744073709.551615s } 2025-05-29T15:22:27.287819Z node 3 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 5 Deadline: 18446744073709.551615s } 2025-05-29T15:22:27.287923Z node 8 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 7 Deadline: 18446744073709.551615s } 2025-05-29T15:22:27.290235Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 5 Deadline: 18446744073709.551615s } 2025-05-29T15:22:27.290562Z node 5 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 2 Deadline: 18446744073709.551615s } 2025-05-29T15:22:27.317475Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7309: Cannot subscribe to console configs 2025-05-29T15:22:27.317496Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded ... waiting for nameservers are connected 2025-05-29T15:22:27.323498Z node 1 :NODE_BROKER DEBUG: node_broker_impl.h:236: StateInit event type: 10060000 event: NKikimr::TEvTablet::TEvBoot 2025-05-29T15:22:27.323870Z node 1 :NODE_BROKER DEBUG: node_broker_impl.h:236: StateInit event type: 10060001 event: NKikimr::TEvTablet::TEvRestored 2025-05-29T15:22:27.323919Z node 1 :NODE_BROKER DEBUG: node_broker__init_scheme.cpp:20: TTxInitScheme Execute 2025-05-29T15:22:27.324082Z node 1 :NODE_BROKER DEBUG: node_broker_impl.h:236: StateInit event type: 1006000c event: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-05-29T15:22:27.324890Z node 1 :NODE_BROKER DEBUG: node_broker__init_scheme.cpp:29: TTxInitScheme Complete 2025-05-29T15:22:27.324910Z node 1 :NODE_BROKER DEBUG: node_broker__load_state.cpp:19: TTxLoadState Execute 2025-05-29T15:22:27.324960Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:961: [DB] Using default config. 2025-05-29T15:22:27.324973Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:998: [DB] Start ... ertificate: 0, SlotIndex: 0, ServicedSubDomain: 72057594046678944:1 } 2025-05-29T15:22:28.698578Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:1190: [DB] Loaded nodeV2 #1024.v4 { NodeId: 1024, State: Expired, Version: 4, Host: host1, Port: 1001, ResolveHost: host1.yandex.net, Address: 1.2.3.4, Lease: 1, Expire: Thu, 01 Jan 1970 02:00:00 UTC, Location: DC=1/M=2/R=3/U=4/, AuthorizedByCertificate: 0, SlotIndex: 0, ServicedSubDomain: 72057594046678944:1 } 2025-05-29T15:22:28.698586Z node 1 :NODE_BROKER NOTICE: node_broker.cpp:1214: [DB] Migrating changed node #1024.v9 { NodeId: 1024, State: Active, Version: 9, Host: host2, Port: 1001, ResolveHost: host2.yandex.net, Address: 1.2.3.4, Lease: 1, Expire: Thu, 01 Jan 1970 07:00:00 UTC, Location: DC=1/M=2/R=3/U=4/, AuthorizedByCertificate: 0, SlotIndex: 0, ServicedSubDomain: 72057594046678944:1 } 2025-05-29T15:22:28.698593Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:552: [Dirty] Update current epoch version from 8 to 9 2025-05-29T15:22:28.698604Z node 1 :NODE_BROKER DEBUG: node_broker__load_state.cpp:27: TTxLoadState Complete 2025-05-29T15:22:28.698637Z node 1 :NODE_BROKER DEBUG: node_broker__migrate_state.cpp:84: TTxMigrateState Execute 2025-05-29T15:22:28.698643Z node 1 :NODE_BROKER DEBUG: node_broker__migrate_state.cpp:52: TTxMigrateState ProcessMigrationBatch UpdateNodes left 0, NewVersionUpdateNodes left 1 2025-05-29T15:22:28.698649Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:1311: [DB] Update epoch in database: #6.9 1970-01-01T05:00:00.025000Z - 1970-01-01T06:00:00.025000Z - 1970-01-01T07:00:00.025000Z 2025-05-29T15:22:28.698663Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:1330: [DB] Update approx epoch start in database: #6.8 2025-05-29T15:22:28.698674Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:856: [DB] Adding node #1024.v9 host2:1001 to database state=Active resolvehost=host2.yandex.net address=1.2.3.4 dc=1 location=DC=1/M=2/R=3/U=4/ lease=1 expire=Thu, 01 Jan 1970 07:00:00 UTC servicedsubdomain=72057594046678944:1 slotindex=0 authorizedbycertificate=false 2025-05-29T15:22:28.698701Z node 1 :NODE_BROKER DEBUG: node_broker__migrate_state.cpp:21: TTxMigrateState FinalizeMigration 2025-05-29T15:22:28.710472Z node 1 :NODE_BROKER DEBUG: node_broker__migrate_state.cpp:95: TTxMigrateState Complete 2025-05-29T15:22:28.710506Z node 1 :NODE_BROKER TRACE: node_broker.cpp:456: Scheduled epoch update at 1970-01-01T06:00:00.025000Z 2025-05-29T15:22:28.710513Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:562: Preparing nodes list cache for epoch #6.9 1970-01-01T05:00:00.025000Z - 1970-01-01T06:00:00.025000Z - 1970-01-01T07:00:00.025000Z, approximate epoch start #6.8 nodes=1 expired=0 2025-05-29T15:22:28.710539Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:603: Preparing update nodes log for epoch ##6.9 1970-01-01T05:00:00.025000Z - 1970-01-01T06:00:00.025000Z - 1970-01-01T07:00:00.025000Z nodes=1 expired=0 removed=0 2025-05-29T15:22:28.710543Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:659: Add node #1024.v9 to update nodes log 2025-05-29T15:22:28.710685Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 269877761, Sender [1:754:2285], Recipient [1:747:2281]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-05-29T15:22:28.710712Z node 7 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:657: Handle NKikimr::TEvTabletPipe::TEvClientConnected { TabletId: 72057594037936129 Status: OK ServerId: [1:758:2289] Leader: 1 Dead: 0 Generation: 3 VersionInfo:  } 2025-05-29T15:22:28.710724Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 269877761, Sender [1:757:2288], Recipient [1:747:2281]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-05-29T15:22:28.710732Z node 8 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:657: Handle NKikimr::TEvTabletPipe::TEvClientConnected { TabletId: 72057594037936129 Status: OK ServerId: [1:759:2290] Leader: 1 Dead: 0 Generation: 3 VersionInfo:  } 2025-05-29T15:22:28.710769Z node 5 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:657: Handle NKikimr::TEvTabletPipe::TEvClientConnected { TabletId: 72057594037936129 Status: OK ServerId: [1:757:2288] Leader: 1 Dead: 0 Generation: 3 VersionInfo:  } 2025-05-29T15:22:28.710777Z node 6 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:657: Handle NKikimr::TEvTabletPipe::TEvClientConnected { TabletId: 72057594037936129 Status: OK ServerId: [1:754:2285] Leader: 1 Dead: 0 Generation: 3 VersionInfo:  } 2025-05-29T15:22:28.710789Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 269877761, Sender [1:758:2289], Recipient [1:747:2281]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-05-29T15:22:28.710800Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 269877761, Sender [1:759:2290], Recipient [1:747:2281]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-05-29T15:22:28.710849Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 272039936, Sender [7:193:2072], Recipient [1:758:2289] 2025-05-29T15:22:28.710853Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:246: StateWork, processing event TEvNodeBroker::TEvListNodes 2025-05-29T15:22:28.710860Z node 1 :NODE_BROKER TRACE: node_broker.cpp:423: Send TEvNodesInfo for epoch #6.9 1970-01-01T05:00:00.025000Z - 1970-01-01T06:00:00.025000Z - 1970-01-01T07:00:00.025000Z 2025-05-29T15:22:28.710902Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 272039936, Sender [8:221:2072], Recipient [1:759:2290] 2025-05-29T15:22:28.710904Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:246: StateWork, processing event TEvNodeBroker::TEvListNodes 2025-05-29T15:22:28.710908Z node 1 :NODE_BROKER TRACE: node_broker.cpp:423: Send TEvNodesInfo for epoch #6.9 1970-01-01T05:00:00.025000Z - 1970-01-01T06:00:00.025000Z - 1970-01-01T07:00:00.025000Z 2025-05-29T15:22:28.710914Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 272039936, Sender [6:165:2072], Recipient [1:754:2285] 2025-05-29T15:22:28.710917Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:246: StateWork, processing event TEvNodeBroker::TEvListNodes 2025-05-29T15:22:28.710920Z node 1 :NODE_BROKER TRACE: node_broker.cpp:423: Send TEvNodesInfo for epoch #6.9 1970-01-01T05:00:00.025000Z - 1970-01-01T06:00:00.025000Z - 1970-01-01T07:00:00.025000Z 2025-05-29T15:22:28.710928Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 272039936, Sender [5:137:2072], Recipient [1:757:2288] 2025-05-29T15:22:28.710930Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:246: StateWork, processing event TEvNodeBroker::TEvListNodes 2025-05-29T15:22:28.710934Z node 1 :NODE_BROKER TRACE: node_broker.cpp:423: Send TEvNodesInfo for epoch #6.9 1970-01-01T05:00:00.025000Z - 1970-01-01T06:00:00.025000Z - 1970-01-01T07:00:00.025000Z 2025-05-29T15:22:28.711067Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 269877761, Sender [1:788:2314], Recipient [1:747:2281]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-05-29T15:22:28.711088Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 272039936, Sender [1:631:2213], Recipient [1:747:2281]: NKikimr::NNodeBroker::TEvNodeBroker::TEvListNodes { } 2025-05-29T15:22:28.711091Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:246: StateWork, processing event TEvNodeBroker::TEvListNodes 2025-05-29T15:22:28.711095Z node 1 :NODE_BROKER TRACE: node_broker.cpp:423: Send TEvNodesInfo for epoch #6.9 1970-01-01T05:00:00.025000Z - 1970-01-01T06:00:00.025000Z - 1970-01-01T07:00:00.025000Z 2025-05-29T15:22:28.711141Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 269877761, Sender [1:790:2316], Recipient [1:747:2281]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-05-29T15:22:28.711155Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 272039936, Sender [1:631:2213], Recipient [1:747:2281]: NKikimr::NNodeBroker::TEvNodeBroker::TEvListNodes { } 2025-05-29T15:22:28.711158Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:246: StateWork, processing event TEvNodeBroker::TEvListNodes 2025-05-29T15:22:28.711161Z node 1 :NODE_BROKER TRACE: node_broker.cpp:423: Send TEvNodesInfo for epoch #6.9 1970-01-01T05:00:00.025000Z - 1970-01-01T06:00:00.025000Z - 1970-01-01T07:00:00.025000Z 2025-05-29T15:22:28.711201Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 269877761, Sender [1:792:2318], Recipient [1:747:2281]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-05-29T15:22:28.711211Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 272039936, Sender [1:631:2213], Recipient [1:747:2281]: NKikimr::NNodeBroker::TEvNodeBroker::TEvListNodes { } 2025-05-29T15:22:28.711214Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:246: StateWork, processing event TEvNodeBroker::TEvListNodes 2025-05-29T15:22:28.711218Z node 1 :NODE_BROKER TRACE: node_broker.cpp:423: Send TEvNodesInfo for epoch #6.9 1970-01-01T05:00:00.025000Z - 1970-01-01T06:00:00.025000Z - 1970-01-01T07:00:00.025000Z 2025-05-29T15:22:28.711256Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 269877761, Sender [1:794:2320], Recipient [1:747:2281]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-05-29T15:22:28.711272Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 272039936, Sender [1:631:2213], Recipient [1:747:2281]: NKikimr::NNodeBroker::TEvNodeBroker::TEvListNodes { CachedVersion: 8 } 2025-05-29T15:22:28.711275Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:246: StateWork, processing event TEvNodeBroker::TEvListNodes 2025-05-29T15:22:28.711279Z node 1 :NODE_BROKER TRACE: node_broker.cpp:423: Send TEvNodesInfo for epoch #6.9 1970-01-01T05:00:00.025000Z - 1970-01-01T06:00:00.025000Z - 1970-01-01T07:00:00.025000Z 2025-05-29T15:22:28.711318Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 269877761, Sender [1:796:2322], Recipient [1:747:2281]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-05-29T15:22:28.711334Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 272039950, Sender [1:631:2213], Recipient [1:747:2281]: NKikimr::NNodeBroker::TEvNodeBroker::TEvSubscribeNodesRequest { CachedVersion: 8 SeqNo: 2 } 2025-05-29T15:22:28.711338Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:254: StateWork, processing event TEvNodeBroker::TEvSubscribeNodesRequest 2025-05-29T15:22:28.711342Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:747: New subscriber [1:631:2213], seqNo: 2, version: 8, server pipe id: [1:796:2322] 2025-05-29T15:22:28.711347Z node 1 :NODE_BROKER TRACE: node_broker.cpp:730: Send TEvUpdateNodes v8 -> v9 to [1:631:2213] 2025-05-29T15:22:28.711388Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 269877764, Sender [1:796:2322], Recipient [1:747:2281]: NKikimr::TEvTabletPipe::TEvServerDisconnected 2025-05-29T15:22:28.711393Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:768: Unsubscribed [1:631:2213], seqNo: 2, server pipe id: [1:796:2322] 2025-05-29T15:22:28.711413Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 269877761, Sender [1:798:2324], Recipient [1:747:2281]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-05-29T15:22:28.711428Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 272039937, Sender [1:631:2213], Recipient [1:747:2281]: NKikimr::NNodeBroker::TEvNodeBroker::TEvResolveNode { NodeId: 1024 } 2025-05-29T15:22:28.711432Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:247: StateWork, processing event TEvNodeBroker::TEvResolveNode 2025-05-29T15:22:28.711452Z node 1 :NODE_BROKER TRACE: node_broker.cpp:1478: Send TEvResolvedNode: NKikimr::NNodeBroker::TEvNodeBroker::TEvResolvedNode { Status { Code: OK } Node { NodeId: 1024 Host: "host2" Port: 1001 ResolveHost: "host2.yandex.net" Address: "1.2.3.4" Location { DataCenter: "1" Module: "2" Rack: "3" Unit: "4" } Expire: 25200025000 Name: "slot-0" } } |60.5%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/schemeshard/ut_rtmr_reboots/ydb-core-tx-schemeshard-ut_rtmr_reboots >> TNodeBrokerTest::NodesMigrationNewActiveNode [GOOD] >> KqpYql::UuidPrimaryKeyDisabled [GOOD] >> GenericFederatedQuery::IcebergHiveBasicSelectAll >> KqpYql::TableUseBeforeCreate |60.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_rtmr_reboots/ydb-core-tx-schemeshard-ut_rtmr_reboots ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/mind/ut/unittest >> TNodeBrokerTest::ShiftIdRangeRemoveNew [GOOD] Test command err: 2025-05-29T15:22:27.876877Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 2 Deadline: 18446744073709.551615s } 2025-05-29T15:22:27.876915Z node 3 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 3 Deadline: 18446744073709.551615s } 2025-05-29T15:22:27.876936Z node 4 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 4 Deadline: 18446744073709.551615s } 2025-05-29T15:22:27.876960Z node 5 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 5 Deadline: 18446744073709.551615s } 2025-05-29T15:22:27.876983Z node 6 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 6 Deadline: 18446744073709.551615s } 2025-05-29T15:22:27.876998Z node 7 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 7 Deadline: 18446744073709.551615s } 2025-05-29T15:22:27.882203Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:27.882285Z node 3 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:27.882320Z node 4 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:27.882351Z node 5 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:27.882384Z node 6 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:27.882412Z node 7 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:27.882470Z node 8 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 8 Deadline: 18446744073709.551615s } 2025-05-29T15:22:27.882489Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-05-29T15:22:27.882683Z node 3 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:27.882702Z node 4 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:27.882716Z node 5 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:27.882727Z node 6 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:27.882758Z node 7 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:27.882780Z node 8 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:27.882810Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:27.886708Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:27.886780Z node 8 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:27.886806Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:27.887543Z node 5 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:27.887569Z node 6 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:27.887591Z node 7 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:27.887652Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:27.887667Z node 3 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:27.887681Z node 4 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:27.887697Z node 8 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:27.887711Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:27.887885Z node 3 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-05-29T15:22:27.887905Z node 4 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-05-29T15:22:27.887920Z node 5 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-05-29T15:22:27.887943Z node 6 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-05-29T15:22:27.887962Z node 7 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-05-29T15:22:27.888022Z node 8 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-05-29T15:22:27.888088Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-05-29T15:22:27.888121Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 8 Deadline: 18446744073709.551615s } 2025-05-29T15:22:27.888656Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 5 Deadline: 18446744073709.551615s } 2025-05-29T15:22:27.888672Z node 3 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 7 Deadline: 18446744073709.551615s } 2025-05-29T15:22:27.888679Z node 4 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 8 Deadline: 18446744073709.551615s } 2025-05-29T15:22:27.888685Z node 5 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 6 Deadline: 18446744073709.551615s } 2025-05-29T15:22:27.888690Z node 6 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 2 Deadline: 18446744073709.551615s } 2025-05-29T15:22:27.888696Z node 7 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 2 Deadline: 18446744073709.551615s } 2025-05-29T15:22:27.888703Z node 8 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 4 Deadline: 18446744073709.551615s } 2025-05-29T15:22:27.891454Z node 8 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 7 Deadline: 18446744073709.551615s } 2025-05-29T15:22:27.891496Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 6 Deadline: 18446744073709.551615s } 2025-05-29T15:22:27.891528Z node 6 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 5 Deadline: 18446744073709.551615s } 2025-05-29T15:22:27.891906Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 7 Deadline: 18446744073709.551615s } 2025-05-29T15:22:27.891930Z node 5 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 2 Deadline: 18446744073709.551615s } 2025-05-29T15:22:27.891941Z node 7 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 3 Deadline: 18446744073709.551615s } 2025-05-29T15:22:27.892307Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 2 Deadline: 18446744073709.551615s } 2025-05-29T15:22:27.892534Z node 7 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 8 Deadline: 18446744073709.551615s } 2025-05-29T15:22:27.892560Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 3 Deadline: 18446744073709.551615s } 2025-05-29T15:22:27.892636Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 4 Deadline: 18446744073709.551615s } 2025-05-29T15:22:27.892725Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 5 Deadline: 18446744073709.551615s } 2025-05-29T15:22:27.892804Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 6 Deadline: 18446744073709.551615s } 2025-05-29T15:22:27.892872Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 7 Deadline: 18446744073709.551615s } 2025-05-29T15:22:27.892959Z node 6 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 4 Deadline: 18446744073709.551615s } 2025-05-29T15:22:27.893270Z node 4 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 6 Deadline: 18446744073709.551615s } 2025-05-29T15:22:27.912801Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7309: Cannot subscribe to console configs 2025-05-29T15:22:27.912817Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded ... waiting for nameservers are connected 2025-05-29T15:22:27.916272Z node 1 :NODE_BROKER DEBUG: node_broker_impl.h:236: StateInit event type: 10060000 event: NKikimr::TEvTablet::TEvBoot 2025-05-29T15:22:27.916562Z node 1 :NODE_BROKER DEBUG: node_broker_impl.h:236: StateInit event type: 10060001 event: NKikimr::TEvTablet::TEvRestored 2025-05-29T15:22:27.916595Z node 1 :NODE_BROKER DEBUG: node_broker__init_scheme.cpp:20: TTxInitScheme Execute 2025-05-29T15:22:27.916726Z node 1 :NODE_BROKER DEBUG: node_broker_impl.h:236: StateInit event type: 1006000c event: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-05-29T15:22:27.917349Z node 1 :NODE_BROKER DEBUG: node_broker__init_scheme.cpp:29: TTxInitScheme Complete 2025-05-29T15:22:27.917365Z node 1 :NODE_BROKER DEBUG: node_broker__load_state.cpp:19: TTxLoadState Execute 2025-05-29T15:22:27.917406Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:961: [DB] Using default config. 2025-05-29T15:22:27.917416Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:998: [DB] Starting the first epoch: #1.1 1970-01-01T00:00:00.025000Z - 1970-01-01T01:00:00.025000Z - 1970-01-01T02:00:00.025000Z 2025-05-29T15:22:27.917420Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:1024: [DB] Loaded the first approximate epoch start: #1.1 2025-05-29T15:22:27.917430Z node 1 :NODE_BROKER DEBUG: node_broker__load_state.cpp:27: TTxLoadState Complete 2025-05-29T15:22:27.917444Z node 1 :NODE_BROKER DEBUG: node_broker__migrate_state.cpp:84: TTxMigrateState Execute 2025-05-29T15:22:27.917447Z node 1 :NODE_BROKER DEBUG: node_broker__migrate_state.cpp:52: TTxMigrateState ProcessMigrationBatch UpdateNodes left 0, NewVersionUpdateNodes left 0 2025-05-29T15:22:27.917450Z node 1 :NODE_BROKER DEBUG: node_broker__migrate_state.cpp:21: TTxMigrateState FinalizeMigration 2025-05-29T15:22:27.917454Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:1311: [DB] Update epoch in database: #1.1 1970-01-01T00:00:00.025000Z - 1970-01-01T01:00:00.025000Z - 1970-01-01T02:00:00.025000Z 2025-05-29T15:22:27.917463Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:1330 ... d TEvNodesInfo for epoch #2.8 1970-01-01T01:00:00.025000Z - 1970-01-01T02:00:00.025000Z - 1970-01-01T03:00:00.025000Z 2025-05-29T15:22:29.029596Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 269877761, Sender [1:774:2314], Recipient [1:718:2266]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-05-29T15:22:29.029611Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 272039936, Sender [1:626:2214], Recipient [1:718:2266]: NKikimr::NNodeBroker::TEvNodeBroker::TEvListNodes { } 2025-05-29T15:22:29.029616Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:246: StateWork, processing event TEvNodeBroker::TEvListNodes 2025-05-29T15:22:29.029622Z node 1 :NODE_BROKER TRACE: node_broker.cpp:423: Send TEvNodesInfo for epoch #2.8 1970-01-01T01:00:00.025000Z - 1970-01-01T02:00:00.025000Z - 1970-01-01T03:00:00.025000Z 2025-05-29T15:22:29.029679Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 269877761, Sender [1:776:2316], Recipient [1:718:2266]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-05-29T15:22:29.029696Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 272039936, Sender [1:626:2214], Recipient [1:718:2266]: NKikimr::NNodeBroker::TEvNodeBroker::TEvListNodes { CachedVersion: 5 } 2025-05-29T15:22:29.029700Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:246: StateWork, processing event TEvNodeBroker::TEvListNodes 2025-05-29T15:22:29.029706Z node 1 :NODE_BROKER TRACE: node_broker.cpp:423: Send TEvNodesInfo for epoch #2.8 1970-01-01T01:00:00.025000Z - 1970-01-01T02:00:00.025000Z - 1970-01-01T03:00:00.025000Z 2025-05-29T15:22:29.029767Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 269877761, Sender [1:778:2318], Recipient [1:718:2266]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-05-29T15:22:29.029780Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 272039936, Sender [1:626:2214], Recipient [1:718:2266]: NKikimr::NNodeBroker::TEvNodeBroker::TEvListNodes { } 2025-05-29T15:22:29.029785Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:246: StateWork, processing event TEvNodeBroker::TEvListNodes 2025-05-29T15:22:29.029791Z node 1 :NODE_BROKER TRACE: node_broker.cpp:423: Send TEvNodesInfo for epoch #2.8 1970-01-01T01:00:00.025000Z - 1970-01-01T02:00:00.025000Z - 1970-01-01T03:00:00.025000Z 2025-05-29T15:22:29.029871Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 269877761, Sender [1:780:2320], Recipient [1:718:2266]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-05-29T15:22:29.029893Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 272039936, Sender [1:626:2214], Recipient [1:718:2266]: NKikimr::NNodeBroker::TEvNodeBroker::TEvListNodes { CachedVersion: 4 } 2025-05-29T15:22:29.029898Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:246: StateWork, processing event TEvNodeBroker::TEvListNodes 2025-05-29T15:22:29.029904Z node 1 :NODE_BROKER TRACE: node_broker.cpp:423: Send TEvNodesInfo for epoch #2.8 1970-01-01T01:00:00.025000Z - 1970-01-01T02:00:00.025000Z - 1970-01-01T03:00:00.025000Z 2025-05-29T15:22:29.029969Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 269877761, Sender [1:782:2322], Recipient [1:718:2266]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-05-29T15:22:29.029987Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 272039950, Sender [1:626:2214], Recipient [1:718:2266]: NKikimr::NNodeBroker::TEvNodeBroker::TEvSubscribeNodesRequest { CachedVersion: 8 SeqNo: 2 } 2025-05-29T15:22:29.029993Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:254: StateWork, processing event TEvNodeBroker::TEvSubscribeNodesRequest 2025-05-29T15:22:29.030000Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:747: New subscriber [1:626:2214], seqNo: 2, version: 8, server pipe id: [1:782:2322] 2025-05-29T15:22:29.030007Z node 1 :NODE_BROKER TRACE: node_broker.cpp:730: Send TEvUpdateNodes v8 -> v8 to [1:626:2214] 2025-05-29T15:22:29.030065Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 269877764, Sender [1:782:2322], Recipient [1:718:2266]: NKikimr::TEvTabletPipe::TEvServerDisconnected 2025-05-29T15:22:29.030071Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:768: Unsubscribed [1:626:2214], seqNo: 2, server pipe id: [1:782:2322] 2025-05-29T15:22:29.030114Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 269877761, Sender [1:784:2324], Recipient [1:718:2266]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-05-29T15:22:29.030129Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 272039950, Sender [1:626:2214], Recipient [1:718:2266]: NKikimr::NNodeBroker::TEvNodeBroker::TEvSubscribeNodesRequest { CachedVersion: 7 SeqNo: 3 } 2025-05-29T15:22:29.030133Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:254: StateWork, processing event TEvNodeBroker::TEvSubscribeNodesRequest 2025-05-29T15:22:29.030137Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:747: New subscriber [1:626:2214], seqNo: 3, version: 7, server pipe id: [1:784:2324] 2025-05-29T15:22:29.030143Z node 1 :NODE_BROKER TRACE: node_broker.cpp:730: Send TEvUpdateNodes v7 -> v8 to [1:626:2214] 2025-05-29T15:22:29.030196Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 269877764, Sender [1:784:2324], Recipient [1:718:2266]: NKikimr::TEvTabletPipe::TEvServerDisconnected 2025-05-29T15:22:29.030202Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:768: Unsubscribed [1:626:2214], seqNo: 3, server pipe id: [1:784:2324] 2025-05-29T15:22:29.030225Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 269877761, Sender [1:786:2326], Recipient [1:718:2266]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-05-29T15:22:29.030241Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 272039950, Sender [1:626:2214], Recipient [1:718:2266]: NKikimr::NNodeBroker::TEvNodeBroker::TEvSubscribeNodesRequest { CachedVersion: 6 SeqNo: 4 } 2025-05-29T15:22:29.030244Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:254: StateWork, processing event TEvNodeBroker::TEvSubscribeNodesRequest 2025-05-29T15:22:29.030248Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:747: New subscriber [1:626:2214], seqNo: 4, version: 6, server pipe id: [1:786:2326] 2025-05-29T15:22:29.030253Z node 1 :NODE_BROKER TRACE: node_broker.cpp:730: Send TEvUpdateNodes v6 -> v8 to [1:626:2214] 2025-05-29T15:22:29.030302Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 269877764, Sender [1:786:2326], Recipient [1:718:2266]: NKikimr::TEvTabletPipe::TEvServerDisconnected 2025-05-29T15:22:29.030307Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:768: Unsubscribed [1:626:2214], seqNo: 4, server pipe id: [1:786:2326] 2025-05-29T15:22:29.030332Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 269877761, Sender [1:788:2328], Recipient [1:718:2266]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-05-29T15:22:29.030349Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 272039950, Sender [1:626:2214], Recipient [1:718:2266]: NKikimr::NNodeBroker::TEvNodeBroker::TEvSubscribeNodesRequest { CachedVersion: 5 SeqNo: 5 } 2025-05-29T15:22:29.030353Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:254: StateWork, processing event TEvNodeBroker::TEvSubscribeNodesRequest 2025-05-29T15:22:29.030357Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:747: New subscriber [1:626:2214], seqNo: 5, version: 5, server pipe id: [1:788:2328] 2025-05-29T15:22:29.030361Z node 1 :NODE_BROKER TRACE: node_broker.cpp:730: Send TEvUpdateNodes v5 -> v8 to [1:626:2214] 2025-05-29T15:22:29.030407Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 269877764, Sender [1:788:2328], Recipient [1:718:2266]: NKikimr::TEvTabletPipe::TEvServerDisconnected 2025-05-29T15:22:29.030412Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:768: Unsubscribed [1:626:2214], seqNo: 5, server pipe id: [1:788:2328] 2025-05-29T15:22:29.030438Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 269877761, Sender [1:790:2330], Recipient [1:718:2266]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-05-29T15:22:29.030453Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 272039950, Sender [1:626:2214], Recipient [1:718:2266]: NKikimr::NNodeBroker::TEvNodeBroker::TEvSubscribeNodesRequest { CachedVersion: 4 SeqNo: 6 } 2025-05-29T15:22:29.030457Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:254: StateWork, processing event TEvNodeBroker::TEvSubscribeNodesRequest 2025-05-29T15:22:29.030462Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:747: New subscriber [1:626:2214], seqNo: 6, version: 4, server pipe id: [1:790:2330] 2025-05-29T15:22:29.030466Z node 1 :NODE_BROKER TRACE: node_broker.cpp:730: Send TEvUpdateNodes v4 -> v8 to [1:626:2214] 2025-05-29T15:22:29.030532Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 269877764, Sender [1:790:2330], Recipient [1:718:2266]: NKikimr::TEvTabletPipe::TEvServerDisconnected 2025-05-29T15:22:29.030538Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:768: Unsubscribed [1:626:2214], seqNo: 6, server pipe id: [1:790:2330] 2025-05-29T15:22:29.030565Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 269877761, Sender [1:792:2332], Recipient [1:718:2266]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-05-29T15:22:29.030584Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 272039937, Sender [1:626:2214], Recipient [1:718:2266]: NKikimr::NNodeBroker::TEvNodeBroker::TEvResolveNode { NodeId: 1024 } 2025-05-29T15:22:29.030589Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:247: StateWork, processing event TEvNodeBroker::TEvResolveNode 2025-05-29T15:22:29.030624Z node 1 :NODE_BROKER TRACE: node_broker.cpp:1478: Send TEvResolvedNode: NKikimr::NNodeBroker::TEvNodeBroker::TEvResolvedNode { Status { Code: OK } Node { NodeId: 1024 Host: "host1" Port: 1001 ResolveHost: "host1.yandex.net" Address: "1.2.3.4" Location { DataCenter: "1" Module: "2" Rack: "3" Unit: "4" } Expire: 10800025000 Name: "slot-0" } } 2025-05-29T15:22:29.030685Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 269877761, Sender [1:794:2334], Recipient [1:718:2266]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-05-29T15:22:29.030701Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 272039937, Sender [1:626:2214], Recipient [1:718:2266]: NKikimr::NNodeBroker::TEvNodeBroker::TEvResolveNode { NodeId: 1025 } 2025-05-29T15:22:29.030707Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:247: StateWork, processing event TEvNodeBroker::TEvResolveNode 2025-05-29T15:22:29.030721Z node 1 :NODE_BROKER TRACE: node_broker.cpp:1478: Send TEvResolvedNode: NKikimr::NNodeBroker::TEvNodeBroker::TEvResolvedNode { Status { Code: OK } Node { NodeId: 1025 Host: "host2" Port: 1001 ResolveHost: "host2.yandex.net" Address: "1.2.3.4" Location { DataCenter: "1" Module: "2" Rack: "3" Unit: "4" } Expire: 10800025000 Name: "slot-1" } } 2025-05-29T15:22:29.030800Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 269877761, Sender [1:796:2336], Recipient [1:718:2266]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-05-29T15:22:29.030819Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 272039937, Sender [1:626:2214], Recipient [1:718:2266]: NKikimr::NNodeBroker::TEvNodeBroker::TEvResolveNode { NodeId: 1026 } 2025-05-29T15:22:29.030824Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:247: StateWork, processing event TEvNodeBroker::TEvResolveNode 2025-05-29T15:22:29.030832Z node 1 :NODE_BROKER TRACE: node_broker.cpp:1478: Send TEvResolvedNode: NKikimr::NNodeBroker::TEvNodeBroker::TEvResolvedNode { Status { Code: WRONG_REQUEST Reason: "Unknown node" } } |60.5%| [TA] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_transfer/test-results/unittest/{meta.json ... results_accumulator.log} |60.5%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_rtmr_reboots/ydb-core-tx-schemeshard-ut_rtmr_reboots ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/mind/ut/unittest >> TNodeBrokerTest::NodesMigrationReuseIDThenExtendLease [GOOD] Test command err: 2025-05-29T15:22:28.593813Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 2 Deadline: 18446744073709.551615s } 2025-05-29T15:22:28.593867Z node 3 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 3 Deadline: 18446744073709.551615s } 2025-05-29T15:22:28.593899Z node 4 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 4 Deadline: 18446744073709.551615s } 2025-05-29T15:22:28.593936Z node 5 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 5 Deadline: 18446744073709.551615s } 2025-05-29T15:22:28.593966Z node 6 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 6 Deadline: 18446744073709.551615s } 2025-05-29T15:22:28.593988Z node 7 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 7 Deadline: 18446744073709.551615s } 2025-05-29T15:22:28.599315Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:28.599403Z node 3 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:28.599437Z node 4 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:28.599465Z node 5 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:28.599497Z node 6 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:28.599522Z node 7 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:28.599575Z node 8 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 8 Deadline: 18446744073709.551615s } 2025-05-29T15:22:28.599593Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-05-29T15:22:28.599821Z node 3 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:28.599847Z node 4 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:28.599866Z node 5 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:28.599884Z node 6 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:28.599906Z node 7 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:28.599926Z node 8 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:28.599963Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:28.603354Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:28.603393Z node 8 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:28.603415Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:28.604143Z node 5 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:28.604170Z node 6 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:28.604191Z node 7 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:28.604252Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:28.604267Z node 3 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:28.604280Z node 4 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:28.604297Z node 8 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:28.604310Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:28.604443Z node 3 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-05-29T15:22:28.604461Z node 4 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-05-29T15:22:28.604475Z node 5 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-05-29T15:22:28.604490Z node 6 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-05-29T15:22:28.604512Z node 7 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-05-29T15:22:28.604572Z node 8 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-05-29T15:22:28.604634Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-05-29T15:22:28.604667Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 3 Deadline: 18446744073709.551615s } 2025-05-29T15:22:28.605218Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 5 Deadline: 18446744073709.551615s } 2025-05-29T15:22:28.605235Z node 3 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 4 Deadline: 18446744073709.551615s } 2025-05-29T15:22:28.605241Z node 4 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 7 Deadline: 18446744073709.551615s } 2025-05-29T15:22:28.605247Z node 5 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 8 Deadline: 18446744073709.551615s } 2025-05-29T15:22:28.605252Z node 6 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 2 Deadline: 18446744073709.551615s } 2025-05-29T15:22:28.605258Z node 7 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 8 Deadline: 18446744073709.551615s } 2025-05-29T15:22:28.605264Z node 8 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 6 Deadline: 18446744073709.551615s } 2025-05-29T15:22:28.608316Z node 8 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 7 Deadline: 18446744073709.551615s } 2025-05-29T15:22:28.608372Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 6 Deadline: 18446744073709.551615s } 2025-05-29T15:22:28.608385Z node 4 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 3 Deadline: 18446744073709.551615s } 2025-05-29T15:22:28.608417Z node 6 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 8 Deadline: 18446744073709.551615s } 2025-05-29T15:22:28.608424Z node 7 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 4 Deadline: 18446744073709.551615s } 2025-05-29T15:22:28.608431Z node 8 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 5 Deadline: 18446744073709.551615s } 2025-05-29T15:22:28.608488Z node 5 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 2 Deadline: 18446744073709.551615s } 2025-05-29T15:22:28.609153Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 4 Deadline: 18446744073709.551615s } 2025-05-29T15:22:28.609487Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 5 Deadline: 18446744073709.551615s } 2025-05-29T15:22:28.609711Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 6 Deadline: 18446744073709.551615s } 2025-05-29T15:22:28.609786Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 7 Deadline: 18446744073709.551615s } 2025-05-29T15:22:28.609950Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 2 Deadline: 18446744073709.551615s } 2025-05-29T15:22:28.610132Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 8 Deadline: 18446744073709.551615s } 2025-05-29T15:22:28.610657Z node 6 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 4 Deadline: 18446744073709.551615s } 2025-05-29T15:22:28.610936Z node 4 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 6 Deadline: 18446744073709.551615s } 2025-05-29T15:22:28.611293Z node 5 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 3 Deadline: 18446744073709.551615s } 2025-05-29T15:22:28.611472Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 7 Deadline: 18446744073709.551615s } 2025-05-29T15:22:28.611584Z node 3 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 5 Deadline: 18446744073709.551615s } 2025-05-29T15:22:28.611676Z node 7 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 2 Deadline: 18446744073709.551615s } 2025-05-29T15:22:28.632750Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7309: Cannot subscribe to console configs 2025-05-29T15:22:28.632766Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded ... waiting for nameservers are connected 2025-05-29T15:22:28.636386Z node 1 :NODE_BROKER DEBUG: node_broker_impl.h:236: StateInit event type: 10060000 event: NKikimr::TEvTablet::TEvBoot 2025-05-29T15:22:28.636754Z node 1 :NODE_BROKER DEBUG: node_broker_impl.h:236: StateInit event type: 10060001 event: NKikimr::TEvTablet::TEvRestored 2025-05-29T15:22:28.636806Z node 1 :NODE_BROKER DEBUG: node_broker__init_scheme.cpp:20: TTxInitScheme Execute 2025-05-29T15:22:28.636998Z node 1 :NODE_BROKER DEBUG: node_broker_impl.h:236: StateInit event type: 1006000c event: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-05-29T15:22:28.637740Z node 1 :NODE_BROKER DEBUG: node_broker__init_scheme.cpp:29: TTxInitScheme Complete 2025-05-29T15:22:28.637763Z node 1 :NODE_BROKER DEBUG: node_broker__load_state.cpp:19: TTxLoadState Execute 2025-05-29T15:22:28.637818Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:961: [DB] Using default config. 2025-05-29T15:22:28.637831Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:998: [DB] Starting the first epoch: #1.1 1970-01-01T00:00:00.025000Z - 1970-01-01T01:00:00.025000Z - 1970-01-01T02:00:00.025000Z 2025-05-29T15:22:28.637837Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:1024: [DB] Loaded the first approximate epoch start: #1.1 2025-05-29T15:22:28.637850Z node 1 :NODE_BROKER DEBUG: node_broker__load_state.cpp:27: TTxLoadState Compl ... Certificate: 0, SlotIndex: 0, ServicedSubDomain: 72057594046678944:1 } 2025-05-29T15:22:29.020103Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:1190: [DB] Loaded nodeV2 #1024.v2 { NodeId: 1024, State: Active, Version: 2, Host: host1, Port: 1001, ResolveHost: host1.yandex.net, Address: 1.2.3.4, Lease: 1, Expire: Thu, 01 Jan 1970 02:00:00 UTC, Location: DC=1/M=2/R=3/U=4/, AuthorizedByCertificate: 0, SlotIndex: 0, ServicedSubDomain: 72057594046678944:1 } 2025-05-29T15:22:29.020108Z node 1 :NODE_BROKER NOTICE: node_broker.cpp:1214: [DB] Migrating changed node #1024.v7 { NodeId: 1024, State: Active, Version: 7, Host: host2, Port: 1001, ResolveHost: host2.yandex.net, Address: 1.2.3.4, Lease: 2, Expire: Thu, 01 Jan 1970 05:00:00 UTC, Location: DC=1/M=2/R=3/U=4/, AuthorizedByCertificate: 0, SlotIndex: 0, ServicedSubDomain: 72057594046678944:1 } 2025-05-29T15:22:29.020112Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:552: [Dirty] Update current epoch version from 6 to 7 2025-05-29T15:22:29.020120Z node 1 :NODE_BROKER DEBUG: node_broker__load_state.cpp:27: TTxLoadState Complete 2025-05-29T15:22:29.020137Z node 1 :NODE_BROKER DEBUG: node_broker__migrate_state.cpp:84: TTxMigrateState Execute 2025-05-29T15:22:29.020140Z node 1 :NODE_BROKER DEBUG: node_broker__migrate_state.cpp:52: TTxMigrateState ProcessMigrationBatch UpdateNodes left 0, NewVersionUpdateNodes left 1 2025-05-29T15:22:29.020144Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:1311: [DB] Update epoch in database: #4.7 1970-01-01T03:00:00.025000Z - 1970-01-01T04:00:00.025000Z - 1970-01-01T05:00:00.025000Z 2025-05-29T15:22:29.020154Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:1330: [DB] Update approx epoch start in database: #4.6 2025-05-29T15:22:29.020160Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:856: [DB] Adding node #1024.v7 host2:1001 to database state=Active resolvehost=host2.yandex.net address=1.2.3.4 dc=1 location=DC=1/M=2/R=3/U=4/ lease=2 expire=Thu, 01 Jan 1970 05:00:00 UTC servicedsubdomain=72057594046678944:1 slotindex=0 authorizedbycertificate=false 2025-05-29T15:22:29.020178Z node 1 :NODE_BROKER DEBUG: node_broker__migrate_state.cpp:21: TTxMigrateState FinalizeMigration 2025-05-29T15:22:29.031576Z node 1 :NODE_BROKER DEBUG: node_broker__migrate_state.cpp:95: TTxMigrateState Complete 2025-05-29T15:22:29.031612Z node 1 :NODE_BROKER TRACE: node_broker.cpp:456: Scheduled epoch update at 1970-01-01T04:00:00.025000Z 2025-05-29T15:22:29.031623Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:562: Preparing nodes list cache for epoch #4.7 1970-01-01T03:00:00.025000Z - 1970-01-01T04:00:00.025000Z - 1970-01-01T05:00:00.025000Z, approximate epoch start #4.6 nodes=1 expired=0 2025-05-29T15:22:29.031646Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:603: Preparing update nodes log for epoch ##4.7 1970-01-01T03:00:00.025000Z - 1970-01-01T04:00:00.025000Z - 1970-01-01T05:00:00.025000Z nodes=1 expired=0 removed=0 2025-05-29T15:22:29.031652Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:659: Add node #1024.v7 to update nodes log 2025-05-29T15:22:29.031763Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 269877761, Sender [1:707:2261], Recipient [1:696:2255]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-05-29T15:22:29.031825Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 269877761, Sender [1:709:2263], Recipient [1:696:2255]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-05-29T15:22:29.031860Z node 5 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:657: Handle NKikimr::TEvTabletPipe::TEvClientConnected { TabletId: 72057594037936129 Status: OK ServerId: [1:707:2261] Leader: 1 Dead: 0 Generation: 3 VersionInfo:  } 2025-05-29T15:22:29.031876Z node 6 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:657: Handle NKikimr::TEvTabletPipe::TEvClientConnected { TabletId: 72057594037936129 Status: OK ServerId: [1:709:2263] Leader: 1 Dead: 0 Generation: 3 VersionInfo:  } 2025-05-29T15:22:29.031886Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 269877761, Sender [1:710:2264], Recipient [1:696:2255]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-05-29T15:22:29.031893Z node 7 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:657: Handle NKikimr::TEvTabletPipe::TEvClientConnected { TabletId: 72057594037936129 Status: OK ServerId: [1:710:2264] Leader: 1 Dead: 0 Generation: 3 VersionInfo:  } 2025-05-29T15:22:29.031903Z node 8 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:657: Handle NKikimr::TEvTabletPipe::TEvClientConnected { TabletId: 72057594037936129 Status: OK ServerId: [1:711:2265] Leader: 1 Dead: 0 Generation: 3 VersionInfo:  } 2025-05-29T15:22:29.031925Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 269877761, Sender [1:711:2265], Recipient [1:696:2255]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-05-29T15:22:29.032002Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 272039936, Sender [7:193:2072], Recipient [1:710:2264] 2025-05-29T15:22:29.032008Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:246: StateWork, processing event TEvNodeBroker::TEvListNodes 2025-05-29T15:22:29.032017Z node 1 :NODE_BROKER TRACE: node_broker.cpp:423: Send TEvNodesInfo for epoch #4.7 1970-01-01T03:00:00.025000Z - 1970-01-01T04:00:00.025000Z - 1970-01-01T05:00:00.025000Z 2025-05-29T15:22:29.032046Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 272039936, Sender [8:221:2072], Recipient [1:711:2265] 2025-05-29T15:22:29.032050Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:246: StateWork, processing event TEvNodeBroker::TEvListNodes 2025-05-29T15:22:29.032057Z node 1 :NODE_BROKER TRACE: node_broker.cpp:423: Send TEvNodesInfo for epoch #4.7 1970-01-01T03:00:00.025000Z - 1970-01-01T04:00:00.025000Z - 1970-01-01T05:00:00.025000Z 2025-05-29T15:22:29.032065Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 272039936, Sender [5:137:2072], Recipient [1:707:2261] 2025-05-29T15:22:29.032068Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:246: StateWork, processing event TEvNodeBroker::TEvListNodes 2025-05-29T15:22:29.032075Z node 1 :NODE_BROKER TRACE: node_broker.cpp:423: Send TEvNodesInfo for epoch #4.7 1970-01-01T03:00:00.025000Z - 1970-01-01T04:00:00.025000Z - 1970-01-01T05:00:00.025000Z 2025-05-29T15:22:29.032110Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 272039936, Sender [6:165:2072], Recipient [1:709:2263] 2025-05-29T15:22:29.032115Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:246: StateWork, processing event TEvNodeBroker::TEvListNodes 2025-05-29T15:22:29.032121Z node 1 :NODE_BROKER TRACE: node_broker.cpp:423: Send TEvNodesInfo for epoch #4.7 1970-01-01T03:00:00.025000Z - 1970-01-01T04:00:00.025000Z - 1970-01-01T05:00:00.025000Z 2025-05-29T15:22:29.032295Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 269877761, Sender [1:739:2288], Recipient [1:696:2255]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-05-29T15:22:29.032317Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 272039936, Sender [1:630:2214], Recipient [1:696:2255]: NKikimr::NNodeBroker::TEvNodeBroker::TEvListNodes { } 2025-05-29T15:22:29.032322Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:246: StateWork, processing event TEvNodeBroker::TEvListNodes 2025-05-29T15:22:29.032328Z node 1 :NODE_BROKER TRACE: node_broker.cpp:423: Send TEvNodesInfo for epoch #4.7 1970-01-01T03:00:00.025000Z - 1970-01-01T04:00:00.025000Z - 1970-01-01T05:00:00.025000Z 2025-05-29T15:22:29.032405Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 269877761, Sender [1:741:2290], Recipient [1:696:2255]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-05-29T15:22:29.032423Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 272039936, Sender [1:630:2214], Recipient [1:696:2255]: NKikimr::NNodeBroker::TEvNodeBroker::TEvListNodes { } 2025-05-29T15:22:29.032427Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:246: StateWork, processing event TEvNodeBroker::TEvListNodes 2025-05-29T15:22:29.032433Z node 1 :NODE_BROKER TRACE: node_broker.cpp:423: Send TEvNodesInfo for epoch #4.7 1970-01-01T03:00:00.025000Z - 1970-01-01T04:00:00.025000Z - 1970-01-01T05:00:00.025000Z 2025-05-29T15:22:29.032497Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 269877761, Sender [1:743:2292], Recipient [1:696:2255]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-05-29T15:22:29.032508Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 272039936, Sender [1:630:2214], Recipient [1:696:2255]: NKikimr::NNodeBroker::TEvNodeBroker::TEvListNodes { } 2025-05-29T15:22:29.032513Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:246: StateWork, processing event TEvNodeBroker::TEvListNodes 2025-05-29T15:22:29.032518Z node 1 :NODE_BROKER TRACE: node_broker.cpp:423: Send TEvNodesInfo for epoch #4.7 1970-01-01T03:00:00.025000Z - 1970-01-01T04:00:00.025000Z - 1970-01-01T05:00:00.025000Z 2025-05-29T15:22:29.032582Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 269877761, Sender [1:745:2294], Recipient [1:696:2255]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-05-29T15:22:29.032603Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 272039936, Sender [1:630:2214], Recipient [1:696:2255]: NKikimr::NNodeBroker::TEvNodeBroker::TEvListNodes { CachedVersion: 6 } 2025-05-29T15:22:29.032608Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:246: StateWork, processing event TEvNodeBroker::TEvListNodes 2025-05-29T15:22:29.032614Z node 1 :NODE_BROKER TRACE: node_broker.cpp:423: Send TEvNodesInfo for epoch #4.7 1970-01-01T03:00:00.025000Z - 1970-01-01T04:00:00.025000Z - 1970-01-01T05:00:00.025000Z 2025-05-29T15:22:29.032680Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 269877761, Sender [1:747:2296], Recipient [1:696:2255]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-05-29T15:22:29.032696Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 272039950, Sender [1:630:2214], Recipient [1:696:2255]: NKikimr::NNodeBroker::TEvNodeBroker::TEvSubscribeNodesRequest { CachedVersion: 6 SeqNo: 2 } 2025-05-29T15:22:29.032703Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:254: StateWork, processing event TEvNodeBroker::TEvSubscribeNodesRequest 2025-05-29T15:22:29.032709Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:747: New subscriber [1:630:2214], seqNo: 2, version: 6, server pipe id: [1:747:2296] 2025-05-29T15:22:29.032715Z node 1 :NODE_BROKER TRACE: node_broker.cpp:730: Send TEvUpdateNodes v6 -> v7 to [1:630:2214] 2025-05-29T15:22:29.032774Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 269877764, Sender [1:747:2296], Recipient [1:696:2255]: NKikimr::TEvTabletPipe::TEvServerDisconnected 2025-05-29T15:22:29.032780Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:768: Unsubscribed [1:630:2214], seqNo: 2, server pipe id: [1:747:2296] 2025-05-29T15:22:29.032813Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 269877761, Sender [1:749:2298], Recipient [1:696:2255]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-05-29T15:22:29.032828Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 272039937, Sender [1:630:2214], Recipient [1:696:2255]: NKikimr::NNodeBroker::TEvNodeBroker::TEvResolveNode { NodeId: 1024 } 2025-05-29T15:22:29.032833Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:247: StateWork, processing event TEvNodeBroker::TEvResolveNode 2025-05-29T15:22:29.032858Z node 1 :NODE_BROKER TRACE: node_broker.cpp:1478: Send TEvResolvedNode: NKikimr::NNodeBroker::TEvNodeBroker::TEvResolvedNode { Status { Code: OK } Node { NodeId: 1024 Host: "host2" Port: 1001 ResolveHost: "host2.yandex.net" Address: "1.2.3.4" Location { DataCenter: "1" Module: "2" Rack: "3" Unit: "4" } Expire: 18000026000 Name: "slot-0" } } >> TNodeBrokerTest::SubscribeToNodes [GOOD] >> TestDataErasure::SimpleDataErasureTestForTopic |60.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/backup/impl/ut_table_writer/unittest |60.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/backup/impl/ut_table_writer/unittest ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/mind/ut/unittest >> TNodeBrokerTest::ShiftIdRangeRemoveReusedID [GOOD] Test command err: 2025-05-29T15:22:27.259559Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 2 Deadline: 18446744073709.551615s } 2025-05-29T15:22:27.259624Z node 3 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 3 Deadline: 18446744073709.551615s } 2025-05-29T15:22:27.259659Z node 4 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 4 Deadline: 18446744073709.551615s } 2025-05-29T15:22:27.259703Z node 5 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 5 Deadline: 18446744073709.551615s } 2025-05-29T15:22:27.259738Z node 6 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 6 Deadline: 18446744073709.551615s } 2025-05-29T15:22:27.259766Z node 7 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 7 Deadline: 18446744073709.551615s } 2025-05-29T15:22:27.268140Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:27.268285Z node 3 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:27.268342Z node 4 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:27.268392Z node 5 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:27.268446Z node 6 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:27.268493Z node 7 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:27.268572Z node 8 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 8 Deadline: 18446744073709.551615s } 2025-05-29T15:22:27.268606Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-05-29T15:22:27.268988Z node 3 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:27.269022Z node 4 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:27.269047Z node 5 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:27.269069Z node 6 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:27.269093Z node 7 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:27.269118Z node 8 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:27.269164Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:27.274034Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:27.274106Z node 8 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:27.274144Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:27.275431Z node 5 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:27.275480Z node 6 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:27.275521Z node 7 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:27.275655Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:27.275680Z node 3 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:27.275706Z node 4 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:27.275735Z node 8 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:27.275756Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:27.275979Z node 3 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-05-29T15:22:27.276011Z node 4 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-05-29T15:22:27.276033Z node 5 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-05-29T15:22:27.276065Z node 6 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-05-29T15:22:27.276096Z node 7 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-05-29T15:22:27.276198Z node 8 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-05-29T15:22:27.276316Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-05-29T15:22:27.276378Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 8 Deadline: 18446744073709.551615s } 2025-05-29T15:22:27.277235Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 7 Deadline: 18446744073709.551615s } 2025-05-29T15:22:27.277269Z node 3 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 5 Deadline: 18446744073709.551615s } 2025-05-29T15:22:27.277281Z node 4 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 6 Deadline: 18446744073709.551615s } 2025-05-29T15:22:27.277292Z node 5 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 4 Deadline: 18446744073709.551615s } 2025-05-29T15:22:27.277302Z node 6 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 4 Deadline: 18446744073709.551615s } 2025-05-29T15:22:27.277314Z node 7 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 4 Deadline: 18446744073709.551615s } 2025-05-29T15:22:27.277327Z node 8 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 3 Deadline: 18446744073709.551615s } 2025-05-29T15:22:27.282220Z node 3 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 8 Deadline: 18446744073709.551615s } 2025-05-29T15:22:27.282269Z node 4 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 7 Deadline: 18446744073709.551615s } 2025-05-29T15:22:27.282487Z node 4 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 5 Deadline: 18446744073709.551615s } 2025-05-29T15:22:27.282522Z node 5 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 3 Deadline: 18446744073709.551615s } 2025-05-29T15:22:27.282540Z node 6 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 8 Deadline: 18446744073709.551615s } 2025-05-29T15:22:27.282554Z node 7 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 2 Deadline: 18446744073709.551615s } 2025-05-29T15:22:27.283837Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 2 Deadline: 18446744073709.551615s } 2025-05-29T15:22:27.284283Z node 8 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 6 Deadline: 18446744073709.551615s } 2025-05-29T15:22:27.284331Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 3 Deadline: 18446744073709.551615s } 2025-05-29T15:22:27.284610Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 4 Deadline: 18446744073709.551615s } 2025-05-29T15:22:27.284864Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 5 Deadline: 18446744073709.551615s } 2025-05-29T15:22:27.285035Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 6 Deadline: 18446744073709.551615s } 2025-05-29T15:22:27.285149Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 7 Deadline: 18446744073709.551615s } 2025-05-29T15:22:27.286692Z node 8 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 2 Deadline: 18446744073709.551615s } 2025-05-29T15:22:27.286844Z node 3 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 4 Deadline: 18446744073709.551615s } 2025-05-29T15:22:27.287050Z node 5 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 2 Deadline: 18446744073709.551615s } 2025-05-29T15:22:27.287289Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 8 Deadline: 18446744073709.551615s } 2025-05-29T15:22:27.287306Z node 4 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 3 Deadline: 18446744073709.551615s } 2025-05-29T15:22:27.287550Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 5 Deadline: 18446744073709.551615s } 2025-05-29T15:22:27.288798Z node 8 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 5 Deadline: 18446744073709.551615s } 2025-05-29T15:22:27.289571Z node 5 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 8 Deadline: 18446744073709.551615s } 2025-05-29T15:22:27.292939Z node 4 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 8 Deadline: 18446744073709.551615s } 2025-05-29T15:22:27.293131Z node 8 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 4 Deadline: 18446744073709.551615s } 2025-05-29T15:22:27.324191Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7309: Cannot subscribe to console configs 2025-05-29T15:22:27.324209Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded ... waiting for nameservers are connected 2025-05-29T15:22:27.327721Z node 1 :NODE_BROKER DEBUG: node_broker_impl.h:236: StateInit event type: 10060000 event: NKikimr::TEvTablet::TEvBoot 2025-05-29T15:22:27.328148Z node 1 :NODE_BROKER DEBUG: node_broker_impl.h:236: StateInit event type: 10060001 event: NKikimr::TEvTablet::TEvRestored 2025-05-29T15:22:27.328200Z node 1 :NODE_BROKER DEBUG: node_broker__init_scheme.cpp:20: TTxInitScheme Execute 2025-05-29T15:22:27.328360Z node 1 :NODE_BROKER DEBUG: node_broker_impl.h:236: StateInit event type: 1006000c event: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-05-29T15:22:27.328934Z node 1 :NODE_BROKER DEBUG: node_ ... .15 1970-01-01T03:00:00.025000Z - 1970-01-01T04:00:00.025000Z - 1970-01-01T05:00:00.025000Z 2025-05-29T15:22:29.117978Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 269877761, Sender [1:851:2361], Recipient [1:796:2314]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-05-29T15:22:29.117989Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 272039936, Sender [1:633:2213], Recipient [1:796:2314]: NKikimr::NNodeBroker::TEvNodeBroker::TEvListNodes { } 2025-05-29T15:22:29.117992Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:246: StateWork, processing event TEvNodeBroker::TEvListNodes 2025-05-29T15:22:29.117995Z node 1 :NODE_BROKER TRACE: node_broker.cpp:423: Send TEvNodesInfo for epoch #4.15 1970-01-01T03:00:00.025000Z - 1970-01-01T04:00:00.025000Z - 1970-01-01T05:00:00.025000Z 2025-05-29T15:22:29.118031Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 269877761, Sender [1:853:2363], Recipient [1:796:2314]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-05-29T15:22:29.118042Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 272039936, Sender [1:633:2213], Recipient [1:796:2314]: NKikimr::NNodeBroker::TEvNodeBroker::TEvListNodes { CachedVersion: 12 } 2025-05-29T15:22:29.118044Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:246: StateWork, processing event TEvNodeBroker::TEvListNodes 2025-05-29T15:22:29.118048Z node 1 :NODE_BROKER TRACE: node_broker.cpp:423: Send TEvNodesInfo for epoch #4.15 1970-01-01T03:00:00.025000Z - 1970-01-01T04:00:00.025000Z - 1970-01-01T05:00:00.025000Z 2025-05-29T15:22:29.118087Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 269877761, Sender [1:855:2365], Recipient [1:796:2314]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-05-29T15:22:29.118108Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 272039936, Sender [1:633:2213], Recipient [1:796:2314]: NKikimr::NNodeBroker::TEvNodeBroker::TEvListNodes { } 2025-05-29T15:22:29.118111Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:246: StateWork, processing event TEvNodeBroker::TEvListNodes 2025-05-29T15:22:29.118114Z node 1 :NODE_BROKER TRACE: node_broker.cpp:423: Send TEvNodesInfo for epoch #4.15 1970-01-01T03:00:00.025000Z - 1970-01-01T04:00:00.025000Z - 1970-01-01T05:00:00.025000Z 2025-05-29T15:22:29.118159Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 269877761, Sender [1:857:2367], Recipient [1:796:2314]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-05-29T15:22:29.118172Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 272039936, Sender [1:633:2213], Recipient [1:796:2314]: NKikimr::NNodeBroker::TEvNodeBroker::TEvListNodes { CachedVersion: 11 } 2025-05-29T15:22:29.118175Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:246: StateWork, processing event TEvNodeBroker::TEvListNodes 2025-05-29T15:22:29.118178Z node 1 :NODE_BROKER TRACE: node_broker.cpp:423: Send TEvNodesInfo for epoch #4.15 1970-01-01T03:00:00.025000Z - 1970-01-01T04:00:00.025000Z - 1970-01-01T05:00:00.025000Z 2025-05-29T15:22:29.118218Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 269877761, Sender [1:859:2369], Recipient [1:796:2314]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-05-29T15:22:29.118233Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 272039950, Sender [1:633:2213], Recipient [1:796:2314]: NKikimr::NNodeBroker::TEvNodeBroker::TEvSubscribeNodesRequest { CachedVersion: 15 SeqNo: 2 } 2025-05-29T15:22:29.118237Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:254: StateWork, processing event TEvNodeBroker::TEvSubscribeNodesRequest 2025-05-29T15:22:29.118242Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:747: New subscriber [1:633:2213], seqNo: 2, version: 15, server pipe id: [1:859:2369] 2025-05-29T15:22:29.118247Z node 1 :NODE_BROKER TRACE: node_broker.cpp:730: Send TEvUpdateNodes v15 -> v15 to [1:633:2213] 2025-05-29T15:22:29.118287Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 269877764, Sender [1:859:2369], Recipient [1:796:2314]: NKikimr::TEvTabletPipe::TEvServerDisconnected 2025-05-29T15:22:29.118292Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:768: Unsubscribed [1:633:2213], seqNo: 2, server pipe id: [1:859:2369] 2025-05-29T15:22:29.118307Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 269877761, Sender [1:861:2371], Recipient [1:796:2314]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-05-29T15:22:29.118318Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 272039950, Sender [1:633:2213], Recipient [1:796:2314]: NKikimr::NNodeBroker::TEvNodeBroker::TEvSubscribeNodesRequest { CachedVersion: 14 SeqNo: 3 } 2025-05-29T15:22:29.118321Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:254: StateWork, processing event TEvNodeBroker::TEvSubscribeNodesRequest 2025-05-29T15:22:29.118323Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:747: New subscriber [1:633:2213], seqNo: 3, version: 14, server pipe id: [1:861:2371] 2025-05-29T15:22:29.118326Z node 1 :NODE_BROKER TRACE: node_broker.cpp:730: Send TEvUpdateNodes v14 -> v15 to [1:633:2213] 2025-05-29T15:22:29.118356Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 269877764, Sender [1:861:2371], Recipient [1:796:2314]: NKikimr::TEvTabletPipe::TEvServerDisconnected 2025-05-29T15:22:29.118359Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:768: Unsubscribed [1:633:2213], seqNo: 3, server pipe id: [1:861:2371] 2025-05-29T15:22:29.118376Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 269877761, Sender [1:863:2373], Recipient [1:796:2314]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-05-29T15:22:29.118386Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 272039950, Sender [1:633:2213], Recipient [1:796:2314]: NKikimr::NNodeBroker::TEvNodeBroker::TEvSubscribeNodesRequest { CachedVersion: 13 SeqNo: 4 } 2025-05-29T15:22:29.118388Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:254: StateWork, processing event TEvNodeBroker::TEvSubscribeNodesRequest 2025-05-29T15:22:29.118391Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:747: New subscriber [1:633:2213], seqNo: 4, version: 13, server pipe id: [1:863:2373] 2025-05-29T15:22:29.118393Z node 1 :NODE_BROKER TRACE: node_broker.cpp:730: Send TEvUpdateNodes v13 -> v15 to [1:633:2213] 2025-05-29T15:22:29.118426Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 269877764, Sender [1:863:2373], Recipient [1:796:2314]: NKikimr::TEvTabletPipe::TEvServerDisconnected 2025-05-29T15:22:29.118438Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:768: Unsubscribed [1:633:2213], seqNo: 4, server pipe id: [1:863:2373] 2025-05-29T15:22:29.118453Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 269877761, Sender [1:865:2375], Recipient [1:796:2314]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-05-29T15:22:29.118464Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 272039950, Sender [1:633:2213], Recipient [1:796:2314]: NKikimr::NNodeBroker::TEvNodeBroker::TEvSubscribeNodesRequest { CachedVersion: 12 SeqNo: 5 } 2025-05-29T15:22:29.118466Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:254: StateWork, processing event TEvNodeBroker::TEvSubscribeNodesRequest 2025-05-29T15:22:29.118469Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:747: New subscriber [1:633:2213], seqNo: 5, version: 12, server pipe id: [1:865:2375] 2025-05-29T15:22:29.118472Z node 1 :NODE_BROKER TRACE: node_broker.cpp:730: Send TEvUpdateNodes v12 -> v15 to [1:633:2213] 2025-05-29T15:22:29.118504Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 269877764, Sender [1:865:2375], Recipient [1:796:2314]: NKikimr::TEvTabletPipe::TEvServerDisconnected 2025-05-29T15:22:29.118507Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:768: Unsubscribed [1:633:2213], seqNo: 5, server pipe id: [1:865:2375] 2025-05-29T15:22:29.118522Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 269877761, Sender [1:867:2377], Recipient [1:796:2314]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-05-29T15:22:29.118532Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 272039950, Sender [1:633:2213], Recipient [1:796:2314]: NKikimr::NNodeBroker::TEvNodeBroker::TEvSubscribeNodesRequest { CachedVersion: 11 SeqNo: 6 } 2025-05-29T15:22:29.118534Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:254: StateWork, processing event TEvNodeBroker::TEvSubscribeNodesRequest 2025-05-29T15:22:29.118537Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:747: New subscriber [1:633:2213], seqNo: 6, version: 11, server pipe id: [1:867:2377] 2025-05-29T15:22:29.118539Z node 1 :NODE_BROKER TRACE: node_broker.cpp:730: Send TEvUpdateNodes v11 -> v15 to [1:633:2213] 2025-05-29T15:22:29.118575Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 269877764, Sender [1:867:2377], Recipient [1:796:2314]: NKikimr::TEvTabletPipe::TEvServerDisconnected 2025-05-29T15:22:29.118578Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:768: Unsubscribed [1:633:2213], seqNo: 6, server pipe id: [1:867:2377] 2025-05-29T15:22:29.118595Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 269877761, Sender [1:869:2379], Recipient [1:796:2314]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-05-29T15:22:29.118609Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 272039937, Sender [1:633:2213], Recipient [1:796:2314]: NKikimr::NNodeBroker::TEvNodeBroker::TEvResolveNode { NodeId: 1024 } 2025-05-29T15:22:29.118612Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:247: StateWork, processing event TEvNodeBroker::TEvResolveNode 2025-05-29T15:22:29.118634Z node 1 :NODE_BROKER TRACE: node_broker.cpp:1478: Send TEvResolvedNode: NKikimr::NNodeBroker::TEvNodeBroker::TEvResolvedNode { Status { Code: OK } Node { NodeId: 1024 Host: "host1" Port: 1001 ResolveHost: "host1.yandex.net" Address: "1.2.3.4" Location { DataCenter: "1" Module: "2" Rack: "3" Unit: "4" } Expire: 18000025000 Name: "slot-0" } } 2025-05-29T15:22:29.118680Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 269877761, Sender [1:871:2381], Recipient [1:796:2314]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-05-29T15:22:29.118691Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 272039937, Sender [1:633:2213], Recipient [1:796:2314]: NKikimr::NNodeBroker::TEvNodeBroker::TEvResolveNode { NodeId: 1025 } 2025-05-29T15:22:29.118695Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:247: StateWork, processing event TEvNodeBroker::TEvResolveNode 2025-05-29T15:22:29.118704Z node 1 :NODE_BROKER TRACE: node_broker.cpp:1478: Send TEvResolvedNode: NKikimr::NNodeBroker::TEvNodeBroker::TEvResolvedNode { Status { Code: OK } Node { NodeId: 1025 Host: "host2" Port: 1001 ResolveHost: "host2.yandex.net" Address: "1.2.3.4" Location { DataCenter: "1" Module: "2" Rack: "3" Unit: "4" } Expire: 18000025000 Name: "slot-1" } } 2025-05-29T15:22:29.118769Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 269877761, Sender [1:873:2383], Recipient [1:796:2314]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-05-29T15:22:29.118783Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 272039937, Sender [1:633:2213], Recipient [1:796:2314]: NKikimr::NNodeBroker::TEvNodeBroker::TEvResolveNode { NodeId: 1026 } 2025-05-29T15:22:29.118787Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:247: StateWork, processing event TEvNodeBroker::TEvResolveNode 2025-05-29T15:22:29.118795Z node 1 :NODE_BROKER TRACE: node_broker.cpp:1478: Send TEvResolvedNode: NKikimr::NNodeBroker::TEvNodeBroker::TEvResolvedNode { Status { Code: WRONG_REQUEST Reason: "Unknown node" } } >> TestDataErasure::DataErasureWithSplit >> TNodeBrokerTest::NodeNameReuseRestartWithHostChanges [GOOD] >> TestDataErasure::DataErasureWithCopyTable ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/mind/ut/unittest >> TNodeBrokerTest::NodesMigrationNewActiveNode [GOOD] Test command err: 2025-05-29T15:22:29.138955Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 2 Deadline: 18446744073709.551615s } 2025-05-29T15:22:29.138996Z node 3 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 3 Deadline: 18446744073709.551615s } 2025-05-29T15:22:29.139018Z node 4 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 4 Deadline: 18446744073709.551615s } 2025-05-29T15:22:29.139047Z node 5 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 5 Deadline: 18446744073709.551615s } 2025-05-29T15:22:29.139070Z node 6 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 6 Deadline: 18446744073709.551615s } 2025-05-29T15:22:29.139087Z node 7 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 7 Deadline: 18446744073709.551615s } 2025-05-29T15:22:29.144607Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:29.144707Z node 3 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:29.144745Z node 4 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:29.144777Z node 5 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:29.144818Z node 6 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:29.144859Z node 7 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:29.144916Z node 8 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 8 Deadline: 18446744073709.551615s } 2025-05-29T15:22:29.144938Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-05-29T15:22:29.145152Z node 3 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:29.145172Z node 4 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:29.145187Z node 5 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:29.145200Z node 6 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:29.145215Z node 7 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:29.145227Z node 8 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:29.145255Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:29.148761Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:29.148816Z node 8 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:29.148843Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:29.149608Z node 5 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:29.149637Z node 6 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:29.149659Z node 7 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:29.149728Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:29.149742Z node 3 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:29.149761Z node 4 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:29.149782Z node 6 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-05-29T15:22:29.149819Z node 7 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-05-29T15:22:29.149834Z node 8 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:29.149849Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:29.150001Z node 3 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-05-29T15:22:29.150017Z node 4 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-05-29T15:22:29.150033Z node 5 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-05-29T15:22:29.150095Z node 8 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-05-29T15:22:29.150161Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-05-29T15:22:29.150194Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 6 Deadline: 18446744073709.551615s } 2025-05-29T15:22:29.150648Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 6 Deadline: 18446744073709.551615s } 2025-05-29T15:22:29.150667Z node 5 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 3 Deadline: 18446744073709.551615s } 2025-05-29T15:22:29.150672Z node 6 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 8 Deadline: 18446744073709.551615s } 2025-05-29T15:22:29.150679Z node 7 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 3 Deadline: 18446744073709.551615s } 2025-05-29T15:22:29.150685Z node 8 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 4 Deadline: 18446744073709.551615s } 2025-05-29T15:22:29.153597Z node 6 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 2 Deadline: 18446744073709.551615s } 2025-05-29T15:22:29.153841Z node 8 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 6 Deadline: 18446744073709.551615s } 2025-05-29T15:22:29.153896Z node 3 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 5 Deadline: 18446744073709.551615s } 2025-05-29T15:22:29.153922Z node 4 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 8 Deadline: 18446744073709.551615s } 2025-05-29T15:22:29.154429Z node 3 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 7 Deadline: 18446744073709.551615s } 2025-05-29T15:22:29.154639Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 7 Deadline: 18446744073709.551615s } 2025-05-29T15:22:29.154981Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 2 Deadline: 18446744073709.551615s } 2025-05-29T15:22:29.155034Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 4 Deadline: 18446744073709.551615s } 2025-05-29T15:22:29.155079Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 3 Deadline: 18446744073709.551615s } 2025-05-29T15:22:29.155115Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 5 Deadline: 18446744073709.551615s } 2025-05-29T15:22:29.155211Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 8 Deadline: 18446744073709.551615s } 2025-05-29T15:22:29.156804Z node 6 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 4 Deadline: 18446744073709.551615s } 2025-05-29T15:22:29.156992Z node 4 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 6 Deadline: 18446744073709.551615s } 2025-05-29T15:22:29.177184Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7309: Cannot subscribe to console configs 2025-05-29T15:22:29.177201Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded ... waiting for nameservers are connected 2025-05-29T15:22:29.180644Z node 1 :NODE_BROKER DEBUG: node_broker_impl.h:236: StateInit event type: 10060000 event: NKikimr::TEvTablet::TEvBoot 2025-05-29T15:22:29.180927Z node 1 :NODE_BROKER DEBUG: node_broker_impl.h:236: StateInit event type: 10060001 event: NKikimr::TEvTablet::TEvRestored 2025-05-29T15:22:29.180962Z node 1 :NODE_BROKER DEBUG: node_broker__init_scheme.cpp:20: TTxInitScheme Execute 2025-05-29T15:22:29.181110Z node 1 :NODE_BROKER DEBUG: node_broker_impl.h:236: StateInit event type: 1006000c event: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-05-29T15:22:29.181737Z node 1 :NODE_BROKER DEBUG: node_broker__init_scheme.cpp:29: TTxInitScheme Complete 2025-05-29T15:22:29.181755Z node 1 :NODE_BROKER DEBUG: node_broker__load_state.cpp:19: TTxLoadState Execute 2025-05-29T15:22:29.181795Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:961: [DB] Using default config. 2025-05-29T15:22:29.181805Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:998: [DB] Starting the first epoch: #1.1 1970-01-01T00:00:00.025000Z - 1970-01-01T01:00:00.025000Z - 1970-01-01T02:00:00.025000Z 2025-05-29T15:22:29.181808Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:1024: [DB] Loaded the first approximate epoch start: #1.1 2025-05-29T15:22:29.181817Z node 1 :NODE_BROKER DEBUG: node_broker__load_state.cpp:27: TTxLoadState Complete 2025-05-29T15:22:29.181831Z node 1 :NODE_BROKER DEBUG: node_broker__migrate_state.cpp:84: TTxMigrateState Execute 2025-05-29T15:22:29.181834Z node 1 :NODE_BROKER DEBUG: node_broker__migrate_state.cpp:52: TTxMigrateState ProcessMigrationBatch UpdateNodes left 0, NewVersionUpdateNodes left 0 2025-05-29T15:22:29.181837Z node 1 :NODE_BROKER DEBUG: node_broker__migrate_state.cpp:21: TTxMigrateState FinalizeMigration 2025-05-29T15:22:29.181841Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:1311: [DB] Update epoch in database: #1.1 1970-01-01T00:00:00.025000Z - 1970-01-01T01:00:00.025000Z - 1970-01-01T02:00:00.025000Z 2025-05-29T15:22:29.181851Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:1330: [DB] Update approx epoch start in database: #1.1 2025-05-29T15:22:29.181855Z node 1 :NODE_BROKER NOTICE: node_broker.cpp:1343: [DB] Update main nodes table to: Nodes 2025-05-29T15:22:29.213213Z node 1 :NODE_BROKER DEBUG: node_broker__migrate_state.cpp:95: TTxMigrateState Complete 2025-05-29T15:22:29.213240Z node 1 :NODE_BROKER TRACE: node_broker.cpp:456: Scheduled epoch update at 1970-01-01T01:00:00.025000Z 2025-05-29T15:22:29.213248Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:562: Preparing nodes list cache for epoch #1.1 1970-01-01T00:00:00.025000Z - 1970-01-01T01:00:00.025000Z - 1970-01-01T02:00:00.025000Z, approximate epoch start #1.1 nodes=0 expired=0 2025-05-29T15:22:29.213256Z n ... node 1 :NODE_BROKER DEBUG: node_broker.cpp:988: [DB] Loaded current epoch: #1.2 1970-01-01T00:00:00.025000Z - 1970-01-01T01:00:00.025000Z - 1970-01-01T02:00:00.025000Z 2025-05-29T15:22:29.464460Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:1017: [DB] Loaded approximate epoch start: #1.1 2025-05-29T15:22:29.464464Z node 1 :NODE_BROKER NOTICE: node_broker.cpp:1034: [DB] Loaded main nodes table: Nodes 2025-05-29T15:22:29.464484Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:281: [Dirty] Added node #1024.v0 host1:1001 2025-05-29T15:22:29.464543Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:1113: [DB] Loaded node #1024.v0 { NodeId: 1024, State: Active, Version: 0, Host: host1, Port: 1001, ResolveHost: host1.yandex.net, Address: 1.2.3.4, Lease: 1, Expire: Thu, 01 Jan 1970 02:00:00 UTC, Location: DC=1/M=2/R=3/U=4/, AuthorizedByCertificate: 0, SlotIndex: 0, ServicedSubDomain: 72057594046678944:1 } 2025-05-29T15:22:29.464553Z node 1 :NODE_BROKER NOTICE: node_broker.cpp:1253: [DB] Migrating new active node #1024.v3 { NodeId: 1024, State: Active, Version: 3, Host: host1, Port: 1001, ResolveHost: host1.yandex.net, Address: 1.2.3.4, Lease: 1, Expire: Thu, 01 Jan 1970 02:00:00 UTC, Location: DC=1/M=2/R=3/U=4/, AuthorizedByCertificate: 0, SlotIndex: 0, ServicedSubDomain: 72057594046678944:1 } 2025-05-29T15:22:29.464556Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:552: [Dirty] Update current epoch version from 2 to 3 2025-05-29T15:22:29.464568Z node 1 :NODE_BROKER DEBUG: node_broker__load_state.cpp:27: TTxLoadState Complete 2025-05-29T15:22:29.464582Z node 1 :NODE_BROKER DEBUG: node_broker__migrate_state.cpp:84: TTxMigrateState Execute 2025-05-29T15:22:29.464587Z node 1 :NODE_BROKER DEBUG: node_broker__migrate_state.cpp:52: TTxMigrateState ProcessMigrationBatch UpdateNodes left 0, NewVersionUpdateNodes left 1 2025-05-29T15:22:29.464593Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:1311: [DB] Update epoch in database: #1.3 1970-01-01T00:00:00.025000Z - 1970-01-01T01:00:00.025000Z - 1970-01-01T02:00:00.025000Z 2025-05-29T15:22:29.464611Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:856: [DB] Adding node #1024.v3 host1:1001 to database state=Active resolvehost=host1.yandex.net address=1.2.3.4 dc=1 location=DC=1/M=2/R=3/U=4/ lease=1 expire=Thu, 01 Jan 1970 02:00:00 UTC servicedsubdomain=72057594046678944:1 slotindex=0 authorizedbycertificate=false 2025-05-29T15:22:29.464632Z node 1 :NODE_BROKER DEBUG: node_broker__migrate_state.cpp:21: TTxMigrateState FinalizeMigration 2025-05-29T15:22:29.486294Z node 1 :NODE_BROKER DEBUG: node_broker__migrate_state.cpp:95: TTxMigrateState Complete 2025-05-29T15:22:29.486337Z node 1 :NODE_BROKER TRACE: node_broker.cpp:456: Scheduled epoch update at 1970-01-01T01:00:00.025000Z 2025-05-29T15:22:29.486349Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:562: Preparing nodes list cache for epoch #1.3 1970-01-01T00:00:00.025000Z - 1970-01-01T01:00:00.025000Z - 1970-01-01T02:00:00.025000Z, approximate epoch start #1.1 nodes=1 expired=0 2025-05-29T15:22:29.486378Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:603: Preparing update nodes log for epoch ##1.3 1970-01-01T00:00:00.025000Z - 1970-01-01T01:00:00.025000Z - 1970-01-01T02:00:00.025000Z nodes=1 expired=0 removed=0 2025-05-29T15:22:29.486384Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:659: Add node #1024.v3 to update nodes log 2025-05-29T15:22:29.486491Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 269877761, Sender [1:670:2240], Recipient [1:661:2233]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-05-29T15:22:29.486554Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 269877761, Sender [1:671:2241], Recipient [1:661:2233]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-05-29T15:22:29.486589Z node 5 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:657: Handle NKikimr::TEvTabletPipe::TEvClientConnected { TabletId: 72057594037936129 Status: OK ServerId: [1:670:2240] Leader: 1 Dead: 0 Generation: 3 VersionInfo:  } 2025-05-29T15:22:29.486605Z node 6 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:657: Handle NKikimr::TEvTabletPipe::TEvClientConnected { TabletId: 72057594037936129 Status: OK ServerId: [1:674:2244] Leader: 1 Dead: 0 Generation: 3 VersionInfo:  } 2025-05-29T15:22:29.486614Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 269877761, Sender [1:672:2242], Recipient [1:661:2233]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-05-29T15:22:29.486620Z node 7 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:657: Handle NKikimr::TEvTabletPipe::TEvClientConnected { TabletId: 72057594037936129 Status: OK ServerId: [1:671:2241] Leader: 1 Dead: 0 Generation: 3 VersionInfo:  } 2025-05-29T15:22:29.486629Z node 8 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:657: Handle NKikimr::TEvTabletPipe::TEvClientConnected { TabletId: 72057594037936129 Status: OK ServerId: [1:672:2242] Leader: 1 Dead: 0 Generation: 3 VersionInfo:  } 2025-05-29T15:22:29.486644Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 269877761, Sender [1:674:2244], Recipient [1:661:2233]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-05-29T15:22:29.486701Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 272039936, Sender [5:137:2072], Recipient [1:670:2240] 2025-05-29T15:22:29.486705Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:246: StateWork, processing event TEvNodeBroker::TEvListNodes 2025-05-29T15:22:29.486709Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:375: Delaying list nodes request for epoch #2 2025-05-29T15:22:29.486715Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 272039936, Sender [7:193:2072], Recipient [1:671:2241] 2025-05-29T15:22:29.486717Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:246: StateWork, processing event TEvNodeBroker::TEvListNodes 2025-05-29T15:22:29.486719Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:375: Delaying list nodes request for epoch #2 2025-05-29T15:22:29.486723Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 272039936, Sender [8:221:2072], Recipient [1:672:2242] 2025-05-29T15:22:29.486725Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:246: StateWork, processing event TEvNodeBroker::TEvListNodes 2025-05-29T15:22:29.486727Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:375: Delaying list nodes request for epoch #2 2025-05-29T15:22:29.486732Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 272039936, Sender [6:165:2072], Recipient [1:674:2244] 2025-05-29T15:22:29.486734Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:246: StateWork, processing event TEvNodeBroker::TEvListNodes 2025-05-29T15:22:29.486755Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:375: Delaying list nodes request for epoch #2 2025-05-29T15:22:29.486862Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 269877761, Sender [1:702:2266], Recipient [1:661:2233]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-05-29T15:22:29.486882Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 272039936, Sender [1:622:2214], Recipient [1:661:2233]: NKikimr::NNodeBroker::TEvNodeBroker::TEvListNodes { } 2025-05-29T15:22:29.486885Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:246: StateWork, processing event TEvNodeBroker::TEvListNodes 2025-05-29T15:22:29.486891Z node 1 :NODE_BROKER TRACE: node_broker.cpp:423: Send TEvNodesInfo for epoch #1.3 1970-01-01T00:00:00.025000Z - 1970-01-01T01:00:00.025000Z - 1970-01-01T02:00:00.025000Z 2025-05-29T15:22:29.486954Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 269877761, Sender [1:704:2268], Recipient [1:661:2233]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-05-29T15:22:29.486971Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 272039936, Sender [1:622:2214], Recipient [1:661:2233]: NKikimr::NNodeBroker::TEvNodeBroker::TEvListNodes { } 2025-05-29T15:22:29.486975Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:246: StateWork, processing event TEvNodeBroker::TEvListNodes 2025-05-29T15:22:29.486981Z node 1 :NODE_BROKER TRACE: node_broker.cpp:423: Send TEvNodesInfo for epoch #1.3 1970-01-01T00:00:00.025000Z - 1970-01-01T01:00:00.025000Z - 1970-01-01T02:00:00.025000Z 2025-05-29T15:22:29.487030Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 269877761, Sender [1:706:2270], Recipient [1:661:2233]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-05-29T15:22:29.487040Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 272039936, Sender [1:622:2214], Recipient [1:661:2233]: NKikimr::NNodeBroker::TEvNodeBroker::TEvListNodes { } 2025-05-29T15:22:29.487044Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:246: StateWork, processing event TEvNodeBroker::TEvListNodes 2025-05-29T15:22:29.487047Z node 1 :NODE_BROKER TRACE: node_broker.cpp:423: Send TEvNodesInfo for epoch #1.3 1970-01-01T00:00:00.025000Z - 1970-01-01T01:00:00.025000Z - 1970-01-01T02:00:00.025000Z 2025-05-29T15:22:29.487085Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 269877761, Sender [1:708:2272], Recipient [1:661:2233]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-05-29T15:22:29.487102Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 272039936, Sender [1:622:2214], Recipient [1:661:2233]: NKikimr::NNodeBroker::TEvNodeBroker::TEvListNodes { CachedVersion: 2 } 2025-05-29T15:22:29.487105Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:246: StateWork, processing event TEvNodeBroker::TEvListNodes 2025-05-29T15:22:29.487108Z node 1 :NODE_BROKER TRACE: node_broker.cpp:423: Send TEvNodesInfo for epoch #1.3 1970-01-01T00:00:00.025000Z - 1970-01-01T01:00:00.025000Z - 1970-01-01T02:00:00.025000Z 2025-05-29T15:22:29.487143Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 269877761, Sender [1:710:2274], Recipient [1:661:2233]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-05-29T15:22:29.487155Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 272039950, Sender [1:622:2214], Recipient [1:661:2233]: NKikimr::NNodeBroker::TEvNodeBroker::TEvSubscribeNodesRequest { CachedVersion: 2 SeqNo: 2 } 2025-05-29T15:22:29.487159Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:254: StateWork, processing event TEvNodeBroker::TEvSubscribeNodesRequest 2025-05-29T15:22:29.487163Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:747: New subscriber [1:622:2214], seqNo: 2, version: 2, server pipe id: [1:710:2274] 2025-05-29T15:22:29.487168Z node 1 :NODE_BROKER TRACE: node_broker.cpp:730: Send TEvUpdateNodes v2 -> v3 to [1:622:2214] 2025-05-29T15:22:29.487206Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 269877764, Sender [1:710:2274], Recipient [1:661:2233]: NKikimr::TEvTabletPipe::TEvServerDisconnected 2025-05-29T15:22:29.487210Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:768: Unsubscribed [1:622:2214], seqNo: 2, server pipe id: [1:710:2274] 2025-05-29T15:22:29.487242Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 269877761, Sender [1:712:2276], Recipient [1:661:2233]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-05-29T15:22:29.487256Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 272039937, Sender [1:622:2214], Recipient [1:661:2233]: NKikimr::NNodeBroker::TEvNodeBroker::TEvResolveNode { NodeId: 1024 } 2025-05-29T15:22:29.487260Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:247: StateWork, processing event TEvNodeBroker::TEvResolveNode 2025-05-29T15:22:29.487279Z node 1 :NODE_BROKER TRACE: node_broker.cpp:1478: Send TEvResolvedNode: NKikimr::NNodeBroker::TEvNodeBroker::TEvResolvedNode { Status { Code: OK } Node { NodeId: 1024 Host: "host1" Port: 1001 ResolveHost: "host1.yandex.net" Address: "1.2.3.4" Location { DataCenter: "1" Module: "2" Rack: "3" Unit: "4" } Expire: 7200025000 Name: "slot-0" } } ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/yql/unittest >> KqpYql::UuidPrimaryKeyDisabled [GOOD] Test command err: Trying to start YDB, gRPC: 10970, MsgBus: 18025 2025-05-29T15:22:30.278215Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509888403279274831:2063];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:22:30.278238Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/0023f5/r3tmp/tmpMbOtUd/pdisk_1.dat 2025-05-29T15:22:30.333229Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:22:30.333303Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [1:7509888403279274806:2079] 1748532150278024 != 1748532150278027 TServer::EnableGrpc on GrpcPort 10970, node 1 2025-05-29T15:22:30.349015Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:22:30.349034Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-05-29T15:22:30.349037Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-05-29T15:22:30.349080Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:18025 2025-05-29T15:22:30.379895Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:22:30.379924Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:22:30.381139Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:18025 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-29T15:22:30.410647Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:22:30.593909Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509888403279275468:2328], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:22:30.593949Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:22:30.642094Z node 1 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [1:7509888403279275488:2294] txid# 281474976715658, issues: { message: "Uuid as primary key is forbiden by configuration: key" severity: 1 } 2025-05-29T15:22:30.646040Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509888403279275496:2333], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:22:30.646063Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:22:30.649985Z node 1 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [1:7509888403279275503:2302] txid# 281474976715659, issues: { message: "Uuid as primary key is forbiden by configuration: key" severity: 1 } 2025-05-29T15:22:30.652413Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509888403279275511:2338], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:22:30.652441Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:22:30.656736Z node 1 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [1:7509888403279275518:2310] txid# 281474976715660, issues: { message: "Uuid as primary key is forbiden by configuration: val" severity: 1 } 2025-05-29T15:22:30.658963Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509888403279275526:2343], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:22:30.658986Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:22:30.662260Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 2025-05-29T15:22:30.727586Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509888403279275614:2352], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:22:30.727608Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } >> TestDataErasure::DataErasureRun3CyclesForAllSupportedObjects ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/mind/ut/unittest >> TNodeBrokerTest::SubscribeToNodes [GOOD] Test command err: 2025-05-29T15:22:27.694352Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 2 Deadline: 18446744073709.551615s } 2025-05-29T15:22:27.694393Z node 3 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 3 Deadline: 18446744073709.551615s } 2025-05-29T15:22:27.694416Z node 4 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 4 Deadline: 18446744073709.551615s } 2025-05-29T15:22:27.694444Z node 5 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 5 Deadline: 18446744073709.551615s } 2025-05-29T15:22:27.694464Z node 6 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 6 Deadline: 18446744073709.551615s } 2025-05-29T15:22:27.694484Z node 7 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 7 Deadline: 18446744073709.551615s } 2025-05-29T15:22:27.700626Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:27.700715Z node 3 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:27.700752Z node 4 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:27.700780Z node 5 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:27.700812Z node 6 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:27.700840Z node 7 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:27.700889Z node 8 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 8 Deadline: 18446744073709.551615s } 2025-05-29T15:22:27.700909Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-05-29T15:22:27.701100Z node 3 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:27.701119Z node 4 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:27.701130Z node 5 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:27.701141Z node 6 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:27.701155Z node 7 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:27.701167Z node 8 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:27.701192Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:27.704392Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:27.704434Z node 8 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:27.704454Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:27.705169Z node 5 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:27.705196Z node 6 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:27.705217Z node 7 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:27.705280Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:27.705293Z node 3 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:27.705306Z node 4 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:27.705325Z node 5 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-05-29T15:22:27.705357Z node 6 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-05-29T15:22:27.705373Z node 7 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-05-29T15:22:27.705386Z node 8 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:27.705400Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:27.705501Z node 3 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-05-29T15:22:27.705517Z node 4 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-05-29T15:22:27.705594Z node 8 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-05-29T15:22:27.705659Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-05-29T15:22:27.705691Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 7 Deadline: 18446744073709.551615s } 2025-05-29T15:22:27.706120Z node 4 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 5 Deadline: 18446744073709.551615s } 2025-05-29T15:22:27.706128Z node 5 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 6 Deadline: 18446744073709.551615s } 2025-05-29T15:22:27.706134Z node 6 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 4 Deadline: 18446744073709.551615s } 2025-05-29T15:22:27.706139Z node 7 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 5 Deadline: 18446744073709.551615s } 2025-05-29T15:22:27.706145Z node 8 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 5 Deadline: 18446744073709.551615s } 2025-05-29T15:22:27.706288Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 3 Deadline: 18446744073709.551615s } 2025-05-29T15:22:27.709586Z node 5 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 4 Deadline: 18446744073709.551615s } 2025-05-29T15:22:27.709615Z node 6 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 5 Deadline: 18446744073709.551615s } 2025-05-29T15:22:27.709676Z node 4 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 6 Deadline: 18446744073709.551615s } 2025-05-29T15:22:27.709685Z node 5 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 7 Deadline: 18446744073709.551615s } 2025-05-29T15:22:27.709723Z node 3 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 2 Deadline: 18446744073709.551615s } 2025-05-29T15:22:27.709735Z node 5 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 8 Deadline: 18446744073709.551615s } 2025-05-29T15:22:27.710395Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 4 Deadline: 18446744073709.551615s } 2025-05-29T15:22:27.710640Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 5 Deadline: 18446744073709.551615s } 2025-05-29T15:22:27.710773Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 6 Deadline: 18446744073709.551615s } 2025-05-29T15:22:27.710847Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 3 Deadline: 18446744073709.551615s } 2025-05-29T15:22:27.711056Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 2 Deadline: 18446744073709.551615s } 2025-05-29T15:22:27.711097Z node 5 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 2 Deadline: 18446744073709.551615s } 2025-05-29T15:22:27.711107Z node 6 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 3 Deadline: 18446744073709.551615s } 2025-05-29T15:22:27.711132Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 8 Deadline: 18446744073709.551615s } 2025-05-29T15:22:27.711221Z node 4 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 2 Deadline: 18446744073709.551615s } 2025-05-29T15:22:27.711473Z node 3 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 6 Deadline: 18446744073709.551615s } 2025-05-29T15:22:27.711501Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 5 Deadline: 18446744073709.551615s } 2025-05-29T15:22:27.711719Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 4 Deadline: 18446744073709.551615s } 2025-05-29T15:22:27.712066Z node 7 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 8 Deadline: 18446744073709.551615s } 2025-05-29T15:22:27.712339Z node 8 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 7 Deadline: 18446744073709.551615s } 2025-05-29T15:22:27.712535Z node 4 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 8 Deadline: 18446744073709.551615s } 2025-05-29T15:22:27.712862Z node 8 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 4 Deadline: 18446744073709.551615s } 2025-05-29T15:22:27.713714Z node 6 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 7 Deadline: 18446744073709.551615s } 2025-05-29T15:22:27.713982Z node 7 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 6 Deadline: 18446744073709.551615s } 2025-05-29T15:22:27.715365Z node 6 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 2 Deadline: 18446744073709.551615s } 2025-05-29T15:22:27.715507Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 6 Deadline: 18446744073709.551615s } 2025-05-29T15:22:27.734108Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7309: Cannot subscribe to console configs 2025-05-29T15:22:27.734127Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded ... waiting for nameservers are connected 2025-05-29T15:22:27.736988Z node 1 :NODE_BROKER DEBUG: node_broker_impl.h:236: StateInit event type: 10060000 event: NKikimr::TEvTablet::TEvBoot 2025-05-29T15:22:27.737268Z node 1 :NODE_BROKER DEBUG: node_broker_impl.h:236: StateInit event type: 10060001 event: NKikimr::TEvTab ... ablet::TEvFollowerSyncComplete 2025-05-29T15:22:29.522517Z node 1 :NODE_BROKER DEBUG: node_broker__init_scheme.cpp:20: TTxInitScheme Execute 2025-05-29T15:22:29.522580Z node 1 :NODE_BROKER DEBUG: node_broker__init_scheme.cpp:29: TTxInitScheme Complete 2025-05-29T15:22:29.522648Z node 1 :NODE_BROKER DEBUG: node_broker__load_state.cpp:19: TTxLoadState Execute 2025-05-29T15:22:29.522724Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:961: [DB] Using default config. 2025-05-29T15:22:29.522755Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:988: [DB] Loaded current epoch: #4.10 1970-01-01T03:00:00.025000Z - 1970-01-01T04:00:00.025000Z - 1970-01-01T05:00:00.025000Z 2025-05-29T15:22:29.522762Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:1017: [DB] Loaded approximate epoch start: #4.10 2025-05-29T15:22:29.522766Z node 1 :NODE_BROKER NOTICE: node_broker.cpp:1034: [DB] Loaded main nodes table: Nodes 2025-05-29T15:22:29.522799Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:281: [Dirty] Added node #1024.v0 host1:1001 2025-05-29T15:22:29.522825Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:1113: [DB] Loaded node #1024.v0 { NodeId: 1024, State: Active, Version: 0, Host: host1, Port: 1001, ResolveHost: host1.yandex.net, Address: 1.2.3.4, Lease: 3, Expire: Thu, 01 Jan 1970 04:00:00 UTC, Location: DC=1/M=2/R=3/U=4/, AuthorizedByCertificate: 0, SlotIndex: 0, ServicedSubDomain: 72057594046678944:1 } 2025-05-29T15:22:29.522833Z node 1 :NODE_BROKER ERROR: node_broker.cpp:1078: [DB] Removing node with wrong ID 1025 not in range (1023, 1024] 2025-05-29T15:22:29.522839Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:300: [Dirty] Added removed node #1025.v11 2025-05-29T15:22:29.522855Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:1190: [DB] Loaded nodeV2 #1024.v9 { NodeId: 1024, State: Active, Version: 9, Host: host1, Port: 1001, ResolveHost: host1.yandex.net, Address: 1.2.3.4, Lease: 3, Expire: Thu, 01 Jan 1970 04:00:00 UTC, Location: DC=1/M=2/R=3/U=4/, AuthorizedByCertificate: 0, SlotIndex: 0, ServicedSubDomain: 72057594046678944:1 } 2025-05-29T15:22:29.522861Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:1239: [DB] Node #1024.v9 is already migrated 2025-05-29T15:22:29.522873Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:1190: [DB] Loaded nodeV2 #1025.v10 { NodeId: 1025, State: Expired, Version: 10, Host: host2, Port: 1001, ResolveHost: host2.yandex.net, Address: 1.2.3.4, Lease: 2, Expire: Thu, 01 Jan 1970 03:00:00 UTC, Location: DC=1/M=2/R=3/U=4/, AuthorizedByCertificate: 0, SlotIndex: 1, ServicedSubDomain: 72057594046678944:1 } 2025-05-29T15:22:29.522878Z node 1 :NODE_BROKER NOTICE: node_broker.cpp:1219: [DB] Migrating removed node #1025.v11 2025-05-29T15:22:29.522887Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:1190: [DB] Loaded nodeV2 #1026.v10 { NodeId: 1026, State: Removed, Version: 10, Host: , Port: 0, ResolveHost: , Address: , Lease: 0, Expire: Thu, 01 Jan 1970 00:00:00 UTC, Location: , AuthorizedByCertificate: 0, SlotIndex: 0, ServicedSubDomain: 0:0 } 2025-05-29T15:22:29.522893Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:300: [Dirty] Added removed node #1026.v10 2025-05-29T15:22:29.522898Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:1234: [DB] Removed node #1026.v10 is already migrated 2025-05-29T15:22:29.522903Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:552: [Dirty] Update current epoch version from 10 to 11 2025-05-29T15:22:29.522916Z node 1 :NODE_BROKER DEBUG: node_broker__load_state.cpp:27: TTxLoadState Complete 2025-05-29T15:22:29.522948Z node 1 :NODE_BROKER DEBUG: node_broker__migrate_state.cpp:84: TTxMigrateState Execute 2025-05-29T15:22:29.522954Z node 1 :NODE_BROKER DEBUG: node_broker__migrate_state.cpp:52: TTxMigrateState ProcessMigrationBatch UpdateNodes left 0, NewVersionUpdateNodes left 1 2025-05-29T15:22:29.522961Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:1311: [DB] Update epoch in database: #4.11 1970-01-01T03:00:00.025000Z - 1970-01-01T04:00:00.025000Z - 1970-01-01T05:00:00.025000Z 2025-05-29T15:22:29.522980Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:830: [DB] Removing node #1025.v11 from database 2025-05-29T15:22:29.522992Z node 1 :NODE_BROKER DEBUG: node_broker__migrate_state.cpp:21: TTxMigrateState FinalizeMigration 2025-05-29T15:22:29.534857Z node 1 :NODE_BROKER DEBUG: node_broker__migrate_state.cpp:95: TTxMigrateState Complete 2025-05-29T15:22:29.534894Z node 1 :NODE_BROKER TRACE: node_broker.cpp:456: Scheduled epoch update at 1970-01-01T04:00:00.025000Z 2025-05-29T15:22:29.534904Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:562: Preparing nodes list cache for epoch #4.11 1970-01-01T03:00:00.025000Z - 1970-01-01T04:00:00.025000Z - 1970-01-01T05:00:00.025000Z, approximate epoch start #4.10 nodes=1 expired=0 2025-05-29T15:22:29.534926Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:603: Preparing update nodes log for epoch ##4.11 1970-01-01T03:00:00.025000Z - 1970-01-01T04:00:00.025000Z - 1970-01-01T05:00:00.025000Z nodes=1 expired=0 removed=2 2025-05-29T15:22:29.534932Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:659: Add node #1024.v9 to update nodes log 2025-05-29T15:22:29.534940Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:659: Add node #1026.v10 to update nodes log 2025-05-29T15:22:29.534944Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:659: Add node #1025.v11 to update nodes log 2025-05-29T15:22:29.535057Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 269877761, Sender [1:808:2321], Recipient [1:799:2315]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-05-29T15:22:29.535132Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 269877761, Sender [1:809:2322], Recipient [1:799:2315]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-05-29T15:22:29.535151Z node 5 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:657: Handle NKikimr::TEvTabletPipe::TEvClientConnected { TabletId: 72057594037936129 Status: OK ServerId: [1:808:2321] Leader: 1 Dead: 0 Generation: 3 VersionInfo:  } 2025-05-29T15:22:29.535167Z node 6 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:657: Handle NKikimr::TEvTabletPipe::TEvClientConnected { TabletId: 72057594037936129 Status: OK ServerId: [1:809:2322] Leader: 1 Dead: 0 Generation: 3 VersionInfo:  } 2025-05-29T15:22:29.535183Z node 7 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:657: Handle NKikimr::TEvTabletPipe::TEvClientConnected { TabletId: 72057594037936129 Status: OK ServerId: [1:811:2324] Leader: 1 Dead: 0 Generation: 3 VersionInfo:  } 2025-05-29T15:22:29.535192Z node 8 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:657: Handle NKikimr::TEvTabletPipe::TEvClientConnected { TabletId: 72057594037936129 Status: OK ServerId: [1:812:2325] Leader: 1 Dead: 0 Generation: 3 VersionInfo:  } 2025-05-29T15:22:29.535202Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 269877761, Sender [1:811:2324], Recipient [1:799:2315]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-05-29T15:22:29.535228Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 269877761, Sender [1:812:2325], Recipient [1:799:2315]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-05-29T15:22:29.535275Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 272039936, Sender [5:137:2072], Recipient [1:808:2321] 2025-05-29T15:22:29.535279Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:246: StateWork, processing event TEvNodeBroker::TEvListNodes 2025-05-29T15:22:29.535283Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:375: Delaying list nodes request for epoch #5 2025-05-29T15:22:29.535289Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 272039936, Sender [6:165:2072], Recipient [1:809:2322] 2025-05-29T15:22:29.535291Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:246: StateWork, processing event TEvNodeBroker::TEvListNodes 2025-05-29T15:22:29.535293Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:375: Delaying list nodes request for epoch #5 2025-05-29T15:22:29.535299Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 272039936, Sender [7:193:2072], Recipient [1:811:2324] 2025-05-29T15:22:29.535301Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:246: StateWork, processing event TEvNodeBroker::TEvListNodes 2025-05-29T15:22:29.535303Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:375: Delaying list nodes request for epoch #5 2025-05-29T15:22:29.535310Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 272039936, Sender [8:221:2072], Recipient [1:812:2325] 2025-05-29T15:22:29.535313Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:246: StateWork, processing event TEvNodeBroker::TEvListNodes 2025-05-29T15:22:29.535315Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:375: Delaying list nodes request for epoch #5 2025-05-29T15:22:29.535413Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 269877761, Sender [1:841:2349], Recipient [1:799:2315]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-05-29T15:22:29.535433Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 272039950, Sender [1:635:2213], Recipient [1:799:2315]: NKikimr::NNodeBroker::TEvNodeBroker::TEvSubscribeNodesRequest { CachedVersion: 10 SeqNo: 1 } 2025-05-29T15:22:29.535437Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:254: StateWork, processing event TEvNodeBroker::TEvSubscribeNodesRequest 2025-05-29T15:22:29.535442Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:747: New subscriber [1:635:2213], seqNo: 1, version: 10, server pipe id: [1:841:2349] 2025-05-29T15:22:29.535446Z node 1 :NODE_BROKER TRACE: node_broker.cpp:730: Send TEvUpdateNodes v10 -> v11 to [1:635:2213] 2025-05-29T15:22:29.535457Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 269877761, Sender [1:842:2350], Recipient [1:799:2315]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-05-29T15:22:29.535465Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 272039936, Sender [1:635:2213], Recipient [1:799:2315]: NKikimr::NNodeBroker::TEvNodeBroker::TEvListNodes { } 2025-05-29T15:22:29.535467Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:246: StateWork, processing event TEvNodeBroker::TEvListNodes 2025-05-29T15:22:29.535472Z node 1 :NODE_BROKER TRACE: node_broker.cpp:423: Send TEvNodesInfo for epoch #4.11 1970-01-01T03:00:00.025000Z - 1970-01-01T04:00:00.025000Z - 1970-01-01T05:00:00.025000Z 2025-05-29T15:22:29.535531Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 269877761, Sender [1:846:2354], Recipient [1:799:2315]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-05-29T15:22:29.535543Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 272039950, Sender [1:843:2351], Recipient [1:799:2315]: NKikimr::NNodeBroker::TEvNodeBroker::TEvSubscribeNodesRequest { CachedVersion: 0 SeqNo: 0 } 2025-05-29T15:22:29.535546Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:254: StateWork, processing event TEvNodeBroker::TEvSubscribeNodesRequest 2025-05-29T15:22:29.535549Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:747: New subscriber [1:843:2351], seqNo: 0, version: 0, server pipe id: [1:846:2354] 2025-05-29T15:22:29.535552Z node 1 :NODE_BROKER TRACE: node_broker.cpp:730: Send TEvUpdateNodes v0 -> v11 to [1:843:2351] 2025-05-29T15:22:29.535561Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 269877761, Sender [1:847:2355], Recipient [1:799:2315]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-05-29T15:22:29.535569Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 272039936, Sender [1:742:2289], Recipient [1:799:2315]: NKikimr::NNodeBroker::TEvNodeBroker::TEvListNodes { } 2025-05-29T15:22:29.535571Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:246: StateWork, processing event TEvNodeBroker::TEvListNodes 2025-05-29T15:22:29.535574Z node 1 :NODE_BROKER TRACE: node_broker.cpp:423: Send TEvNodesInfo for epoch #4.11 1970-01-01T03:00:00.025000Z - 1970-01-01T04:00:00.025000Z - 1970-01-01T05:00:00.025000Z ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/mind/ut/unittest >> TNodeBrokerTest::NodeNameReuseRestartWithHostChanges [GOOD] Test command err: 2025-05-29T15:22:27.771043Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 2 Deadline: 18446744073709.551615s } 2025-05-29T15:22:27.771078Z node 3 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 3 Deadline: 18446744073709.551615s } 2025-05-29T15:22:27.771098Z node 4 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 4 Deadline: 18446744073709.551615s } 2025-05-29T15:22:27.771121Z node 5 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 5 Deadline: 18446744073709.551615s } 2025-05-29T15:22:27.771139Z node 6 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 6 Deadline: 18446744073709.551615s } 2025-05-29T15:22:27.771155Z node 7 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 7 Deadline: 18446744073709.551615s } 2025-05-29T15:22:27.776094Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:27.776182Z node 3 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:27.776217Z node 4 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:27.776246Z node 5 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:27.776280Z node 6 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:27.776308Z node 7 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:27.776360Z node 8 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 8 Deadline: 18446744073709.551615s } 2025-05-29T15:22:27.776380Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-05-29T15:22:27.776650Z node 3 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:27.776684Z node 4 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:27.776705Z node 5 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:27.776725Z node 6 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:27.776749Z node 7 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:27.776773Z node 8 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:27.776818Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:27.779921Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:27.779960Z node 8 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:27.779979Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:27.780680Z node 5 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:27.780706Z node 6 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:27.780725Z node 7 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:27.780783Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:27.780796Z node 3 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:27.780811Z node 4 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:27.780822Z node 8 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:27.780835Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:27.781005Z node 3 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-05-29T15:22:27.781026Z node 4 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-05-29T15:22:27.781041Z node 5 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-05-29T15:22:27.781060Z node 6 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-05-29T15:22:27.781082Z node 7 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-05-29T15:22:27.781140Z node 8 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-05-29T15:22:27.781201Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-05-29T15:22:27.781233Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 4 Deadline: 18446744073709.551615s } 2025-05-29T15:22:27.781662Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 8 Deadline: 18446744073709.551615s } 2025-05-29T15:22:27.781682Z node 3 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 5 Deadline: 18446744073709.551615s } 2025-05-29T15:22:27.781689Z node 4 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 5 Deadline: 18446744073709.551615s } 2025-05-29T15:22:27.781695Z node 6 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 4 Deadline: 18446744073709.551615s } 2025-05-29T15:22:27.781700Z node 7 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 2 Deadline: 18446744073709.551615s } 2025-05-29T15:22:27.784383Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 7 Deadline: 18446744073709.551615s } 2025-05-29T15:22:27.784402Z node 8 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 2 Deadline: 18446744073709.551615s } 2025-05-29T15:22:27.784431Z node 4 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 6 Deadline: 18446744073709.551615s } 2025-05-29T15:22:27.784438Z node 5 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 3 Deadline: 18446744073709.551615s } 2025-05-29T15:22:27.784462Z node 5 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 4 Deadline: 18446744073709.551615s } 2025-05-29T15:22:27.785092Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 6 Deadline: 18446744073709.551615s } 2025-05-29T15:22:27.785314Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 7 Deadline: 18446744073709.551615s } 2025-05-29T15:22:27.785419Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 2 Deadline: 18446744073709.551615s } 2025-05-29T15:22:27.785482Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 5 Deadline: 18446744073709.551615s } 2025-05-29T15:22:27.785512Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 3 Deadline: 18446744073709.551615s } 2025-05-29T15:22:27.785577Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 8 Deadline: 18446744073709.551615s } 2025-05-29T15:22:27.785782Z node 4 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 8 Deadline: 18446744073709.551615s } 2025-05-29T15:22:27.785960Z node 8 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 4 Deadline: 18446744073709.551615s } 2025-05-29T15:22:27.786395Z node 5 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 8 Deadline: 18446744073709.551615s } 2025-05-29T15:22:27.786543Z node 8 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 5 Deadline: 18446744073709.551615s } 2025-05-29T15:22:27.786772Z node 5 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 6 Deadline: 18446744073709.551615s } 2025-05-29T15:22:27.787121Z node 6 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 5 Deadline: 18446744073709.551615s } 2025-05-29T15:22:27.806215Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7309: Cannot subscribe to console configs 2025-05-29T15:22:27.806231Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded ... waiting for nameservers are connected 2025-05-29T15:22:27.809508Z node 1 :NODE_BROKER DEBUG: node_broker_impl.h:236: StateInit event type: 10060000 event: NKikimr::TEvTablet::TEvBoot 2025-05-29T15:22:27.809825Z node 1 :NODE_BROKER DEBUG: node_broker_impl.h:236: StateInit event type: 10060001 event: NKikimr::TEvTablet::TEvRestored 2025-05-29T15:22:27.809863Z node 1 :NODE_BROKER DEBUG: node_broker__init_scheme.cpp:20: TTxInitScheme Execute 2025-05-29T15:22:27.809998Z node 1 :NODE_BROKER DEBUG: node_broker_impl.h:236: StateInit event type: 1006000c event: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-05-29T15:22:27.810540Z node 1 :NODE_BROKER DEBUG: node_broker__init_scheme.cpp:29: TTxInitScheme Complete 2025-05-29T15:22:27.810558Z node 1 :NODE_BROKER DEBUG: node_broker__load_state.cpp:19: TTxLoadState Execute 2025-05-29T15:22:27.810595Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:961: [DB] Using default config. 2025-05-29T15:22:27.810605Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:998: [DB] Starting the first epoch: #1.1 1970-01-01T00:00:00.025000Z - 1970-01-01T01:00:00.025000Z - 1970-01-01T02:00:00.025000Z 2025-05-29T15:22:27.810608Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:1024: [DB] Loaded the first approximate epoch start: #1.1 2025-05-29T15:22:27.810618Z node 1 :NODE_BROKER DEBUG: node_broker__load_state.cpp:27: TTxLoadState Complete 2025-05-29T15:22:27.810631Z node 1 :NODE_BROKER DEBUG: node_broker__migrate_state.cpp:84: TTxMigrateState Execute 2025-05-29T15:22:27.810635Z node 1 :NODE_BROKER DEBUG: node_broker__migrate_state.cpp:52: TTxMigrateState ProcessMigrationBatch UpdateNodes left 0, NewVersionUpdateNodes left 0 2025-05-29T15:22:27.810637Z node 1 :NODE_BROKER DEBUG: node_broker__migrate_state.cpp:21: TTxMigrateState FinalizeMigration 2025-05-29T15:22:27.810641Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:1311: [DB] Update epoch in database: #1.1 1970-01-01T00:00:00.025000Z - 1970-01-01T01:00:00.025000Z - 1970-01-01T02:00:00.025000Z 2025-05-29T15:22:27.810650Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:1330 ... Self { Name: "my-database" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeExtSubDomain CreateFinished: true CreateTxId: 101 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 1 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 0 TimeCastBucketsPerMediator: 0 } DomainKey { SchemeShard: 72057594046678944 PathId: 2 } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 2 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { } } } PathId: 2 PathOwnerId: 72057594046678944 }, by path# { Subscriber: { Subscriber: [1:788:2325] DomainOwnerId: 72057594046678944 Type: 2 SyncCookie: 0 } Filled: 0 Status: undefined Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, by pathId# nullptr 2025-05-29T15:22:29.601451Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:1842: FillEntry for TNavigate: self# [1:23:2070], cacheItem# { Subscriber: { Subscriber: [1:788:2325] DomainOwnerId: 72057594046678944 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusSuccess Kind: 9 TableKind: 0 Created: 1 CreateStep: 5000001 PathId: [OwnerId: 72057594046678944, LocalPathId: 2] DomainId: [OwnerId: 72057594046678944, LocalPathId: 2] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/my-database TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-05-29T15:22:29.601494Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:265: Send result: self# [1:795:2326], recipient# [1:787:2184], result# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/my-database TableId: [72057594046678944:2:0] RequestType: ByPath Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindExtSubdomain DomainInfo { DomainKey: [OwnerId: 72057594046678944, LocalPathId: 2] ResourcesDomainKey: [OwnerId: 72057594046678944, LocalPathId: 2] Params { Version: 1 PlanResolution: 0 TimeCastBucketsPerMediator: 0 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } }] } 2025-05-29T15:22:29.601508Z node 1 :NODE_BROKER TRACE: node_broker.cpp:1532: Handle TEvTxProxySchemeCache::TEvNavigateKeySetResult: response# { Path: dc-1/my-database TableId: [72057594046678944:2:0] RequestType: ByPath Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindExtSubdomain DomainInfo { DomainKey: [OwnerId: 72057594046678944, LocalPathId: 2] ResourcesDomainKey: [OwnerId: 72057594046678944, LocalPathId: 2] Params { Version: 1 PlanResolution: 0 TimeCastBucketsPerMediator: 0 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } } 2025-05-29T15:22:29.601520Z node 1 :NODE_BROKER TRACE: node_broker.cpp:1558: Finished resolving tenant: request# Host: "host5" Port: 19001 ResolveHost: "host5" Address: "" Location { } FixedNodeId: false Path: "/dc-1/my-database": scope id# <72057594046678944:2>: serviced subdomain# 72057594046678944:2 2025-05-29T15:22:29.601533Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 2146435073, Sender [1:787:2184], Recipient [1:558:2184]: NKikimr::NNodeBroker::TNodeBroker::TEvPrivate::TEvResolvedRegistrationRequest 2025-05-29T15:22:29.601538Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:257: StateWork, processing event TEvPrivate::TEvResolvedRegistrationRequest 2025-05-29T15:22:29.601572Z node 1 :NODE_BROKER DEBUG: node_broker__register_node.cpp:77: TTxRegisterNode Execute 2025-05-29T15:22:29.601577Z node 1 :NODE_BROKER DEBUG: node_broker__register_node.cpp:81: Registration request from host5:19001 (not fixed) tenant: /dc-1/my-database 2025-05-29T15:22:29.601593Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:856: [DB] Adding node #1024.v13 host5:19001 to database state=Active resolvehost=host5 address= dc= location= lease=1 expire=Thu, 01 Jan 1970 05:00:00 UTC servicedsubdomain=72057594046678944:2 slotindex=0 authorizedbycertificate=false 2025-05-29T15:22:29.601634Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:264: [Dirty] Register new active node #1024.v13 host5:19001 2025-05-29T15:22:29.601641Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:552: [Dirty] Update current epoch version from 12 to 13 2025-05-29T15:22:29.601645Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:1356: [DB] Update epoch version in database version=13 2025-05-29T15:22:29.612374Z node 1 :NODE_BROKER DEBUG: node_broker__register_node.cpp:186: TTxRegisterNode Complete 2025-05-29T15:22:29.612393Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:264: [Committed] Register new active node #1024.v13 host5:19001 2025-05-29T15:22:29.612401Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:552: [Committed] Update current epoch version from 12 to 13 2025-05-29T15:22:29.612409Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:630: Add node #1024.v13 host5:19001 to epoch cache 2025-05-29T15:22:29.612423Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:659: Add node #1024.v13 to update nodes log 2025-05-29T15:22:29.612455Z node 1 :NODE_BROKER TRACE: node_broker__register_node.cpp:58: TTxRegisterNode reply with: Status { Code: OK } Node { NodeId: 1024 Host: "host5" Port: 19001 ResolveHost: "host5" Address: "" Location { } Expire: 18000025000 Name: "slot-0" } 2025-05-29T15:22:29.612560Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 269877761, Sender [1:799:2330], Recipient [1:558:2184]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-05-29T15:22:29.612591Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 272039938, Sender [1:658:2244], Recipient [1:558:2184]: NKikimr::NNodeBroker::TEvNodeBroker::TEvRegistrationRequest { Host: "host6" Port: 19001 ResolveHost: "host6" Address: "" Location { } FixedNodeId: false Path: "/dc-1/my-database" } 2025-05-29T15:22:29.612597Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:248: StateWork, processing event TEvNodeBroker::TEvRegistrationRequest 2025-05-29T15:22:29.612605Z node 1 :NODE_BROKER TRACE: node_broker.cpp:1487: Handle TEvNodeBroker::TEvRegistrationRequest: request# Host: "host6" Port: 19001 ResolveHost: "host6" Address: "" Location { } FixedNodeId: false Path: "/dc-1/my-database" 2025-05-29T15:22:29.612642Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2702: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [1:23:2070], request# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/my-database TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-05-29T15:22:29.612665Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:1842: FillEntry for TNavigate: self# [1:23:2070], cacheItem# { Subscriber: { Subscriber: [1:788:2325] DomainOwnerId: 72057594046678944 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusSuccess Kind: 9 TableKind: 0 Created: 1 CreateStep: 5000001 PathId: [OwnerId: 72057594046678944, LocalPathId: 2] DomainId: [OwnerId: 72057594046678944, LocalPathId: 2] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/my-database TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-05-29T15:22:29.612701Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:265: Send result: self# [1:801:2331], recipient# [1:800:2184], result# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/my-database TableId: [72057594046678944:2:0] RequestType: ByPath Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindExtSubdomain DomainInfo { DomainKey: [OwnerId: 72057594046678944, LocalPathId: 2] ResourcesDomainKey: [OwnerId: 72057594046678944, LocalPathId: 2] Params { Version: 1 PlanResolution: 0 TimeCastBucketsPerMediator: 0 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } }] } 2025-05-29T15:22:29.612718Z node 1 :NODE_BROKER TRACE: node_broker.cpp:1532: Handle TEvTxProxySchemeCache::TEvNavigateKeySetResult: response# { Path: dc-1/my-database TableId: [72057594046678944:2:0] RequestType: ByPath Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindExtSubdomain DomainInfo { DomainKey: [OwnerId: 72057594046678944, LocalPathId: 2] ResourcesDomainKey: [OwnerId: 72057594046678944, LocalPathId: 2] Params { Version: 1 PlanResolution: 0 TimeCastBucketsPerMediator: 0 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } } 2025-05-29T15:22:29.612731Z node 1 :NODE_BROKER TRACE: node_broker.cpp:1558: Finished resolving tenant: request# Host: "host6" Port: 19001 ResolveHost: "host6" Address: "" Location { } FixedNodeId: false Path: "/dc-1/my-database": scope id# <72057594046678944:2>: serviced subdomain# 72057594046678944:2 2025-05-29T15:22:29.612744Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 2146435073, Sender [1:800:2184], Recipient [1:558:2184]: NKikimr::NNodeBroker::TNodeBroker::TEvPrivate::TEvResolvedRegistrationRequest 2025-05-29T15:22:29.612749Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:257: StateWork, processing event TEvPrivate::TEvResolvedRegistrationRequest 2025-05-29T15:22:29.612759Z node 1 :NODE_BROKER DEBUG: node_broker__register_node.cpp:77: TTxRegisterNode Execute 2025-05-29T15:22:29.612763Z node 1 :NODE_BROKER DEBUG: node_broker__register_node.cpp:81: Registration request from host6:19001 (not fixed) tenant: /dc-1/my-database 2025-05-29T15:22:29.612776Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:856: [DB] Adding node #1025.v14 host6:19001 to database state=Active resolvehost=host6 address= dc= location= lease=1 expire=Thu, 01 Jan 1970 05:00:00 UTC servicedsubdomain=72057594046678944:2 slotindex=1 authorizedbycertificate=false 2025-05-29T15:22:29.612819Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:264: [Dirty] Register new active node #1025.v14 host6:19001 2025-05-29T15:22:29.612825Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:552: [Dirty] Update current epoch version from 13 to 14 2025-05-29T15:22:29.612829Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:1356: [DB] Update epoch version in database version=14 2025-05-29T15:22:29.623361Z node 1 :NODE_BROKER DEBUG: node_broker__register_node.cpp:186: TTxRegisterNode Complete 2025-05-29T15:22:29.623376Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:264: [Committed] Register new active node #1025.v14 host6:19001 2025-05-29T15:22:29.623383Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:552: [Committed] Update current epoch version from 13 to 14 2025-05-29T15:22:29.623387Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:630: Add node #1025.v14 host6:19001 to epoch cache 2025-05-29T15:22:29.623398Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:659: Add node #1025.v14 to update nodes log 2025-05-29T15:22:29.623426Z node 1 :NODE_BROKER TRACE: node_broker__register_node.cpp:58: TTxRegisterNode reply with: Status { Code: OK } Node { NodeId: 1025 Host: "host6" Port: 19001 ResolveHost: "host6" Address: "" Location { } Expire: 18000025000 Name: "slot-1" } >> TSchemeShardSysViewTestReboots::CreateSysViewWithReboots [GOOD] >> TestDataErasure::DataErasureManualLaunch >> TPartitionTests::ConflictingCommitFails [GOOD] >> TestDataErasure::SimpleDataErasureTestForTables >> TNodeBrokerTest::ExtendLeaseRestartRace [GOOD] >> TNodeBrokerTest::NodeNameReuseRestart [GOOD] >> TestDataErasure::DataErasureRun3CyclesForTables >> TPartitionTests::ConflictingCommitProccesAfterRollback >> TestDataErasure::DataErasureWithMerge >> GenericFederatedQuery::IcebergHadoopBasicSelectAll >> TestDataErasure::DataErasureRun3CyclesForTopics ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_sysview_reboots/unittest >> TSchemeShardSysViewTestReboots::CreateSysViewWithReboots [GOOD] Test command err: ==== RunWithTabletReboots =========== RUN: Trace =========== Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2140] Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:116:2058] recipient: [1:110:2141] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:116:2058] recipient: [1:110:2141] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:117:2058] recipient: [1:111:2142] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:117:2058] recipient: [1:111:2142] Leader for TabletID 72057594046678944 is [1:126:2151] sender: [1:127:2058] recipient: [1:109:2140] Leader for TabletID 72057594046447617 is [1:130:2154] sender: [1:132:2058] recipient: [1:110:2141] Leader for TabletID 72057594046316545 is [1:133:2155] sender: [1:135:2058] recipient: [1:111:2142] 2025-05-29T15:22:24.092030Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7488: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-05-29T15:22:24.092048Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7516: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:22:24.092053Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7402: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2025-05-29T15:22:24.092056Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7418: OperationsProcessing config: using default configuration 2025-05-29T15:22:24.092059Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-05-29T15:22:24.092062Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-05-29T15:22:24.092068Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7548: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:22:24.092077Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:39: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-05-29T15:22:24.092145Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7619: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-05-29T15:22:24.092194Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-05-29T15:22:24.101717Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7651: Got new config: QueryServiceConfig { AllExternalDataSourcesAreAvailable: true } 2025-05-29T15:22:24.101734Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:22:24.101796Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7619: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# Leader for TabletID 72057594046447617 is [1:130:2154] sender: [1:175:2058] recipient: [1:15:2062] 2025-05-29T15:22:24.103711Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-05-29T15:22:24.103733Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-05-29T15:22:24.103751Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-05-29T15:22:24.105500Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-05-29T15:22:24.105573Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:445: Clear TempDirsState with owners number: 0 2025-05-29T15:22:24.105644Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1348: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-05-29T15:22:24.105805Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:32: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-05-29T15:22:24.106238Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:157: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:22:24.106263Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:84: [RootDataErasureManager] Stop 2025-05-29T15:22:24.106421Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:22:24.106428Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:22:24.106449Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-05-29T15:22:24.106454Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-29T15:22:24.106458Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:202: TTxServerlessStorageBilling.Complete 2025-05-29T15:22:24.106470Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6671: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:214:2058] recipient: [1:212:2212] IGNORE Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:214:2058] recipient: [1:212:2212] Leader for TabletID 72057594037968897 is [1:218:2216] sender: [1:219:2058] recipient: [1:212:2212] 2025-05-29T15:22:24.107394Z node 1 :HIVE INFO: tablet_helpers.cpp:1130: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:126:2151] sender: [1:239:2058] recipient: [1:15:2062] 2025-05-29T15:22:24.119370Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:372: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-05-29T15:22:24.119422Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:22:24.119468Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-05-29T15:22:24.119497Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:130: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-05-29T15:22:24.119504Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:22:24.120118Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:451: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-05-29T15:22:24.120140Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-05-29T15:22:24.120189Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:22:24.120202Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:313: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-05-29T15:22:24.120206Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:367: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-05-29T15:22:24.120210Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 2 -> 3 2025-05-29T15:22:24.120560Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:22:24.120569Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-05-29T15:22:24.120573Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 3 -> 128 2025-05-29T15:22:24.120829Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:22:24.120837Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:22:24.120841Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:22:24.120845Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1650: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-05-29T15:22:24.121267Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1719: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-05-29T15:22:24.121584Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:652: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-05-29T15:22:24.121610Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1751: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 Leader for TabletID 72057594046316545 is [1:133:2155] sender: [1:254:2058] recipient: [1:15:2062] FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-05-29T15:22:24.121744Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:676: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-05-29T15:22:24.121762Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:680: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 133 RawX2: 4294969451 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-05-29T15:22:24.121767Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:22:24.121809Z node 1 :FLAT_TX_SCHEMESHARD INFO: sch ... 450Z node 31 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2025-05-29T15:22:31.637465Z node 31 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1606: TOperation IsReadyToNotify, TxId: 1003, ready parts: 0/1, is published: true 2025-05-29T15:22:31.638532Z node 31 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:652: Send tablet strongly msg operationId: 1003:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1003 msg type: 269090816 2025-05-29T15:22:31.638571Z node 31 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1751: TOperation RegisterRelationByTabletId, TxId: 1003, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1003 at step: 5000004 FAKE_COORDINATOR: advance: minStep5000004 State->FrontStep: 5000003 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1003 at step: 5000004 2025-05-29T15:22:31.638912Z node 31 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:676: TTxOperationPlanStep Execute, stepId: 5000004, transactions count in step: 1, at schemeshard: 72057594046678944 2025-05-29T15:22:31.638939Z node 31 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:680: TTxOperationPlanStep Execute, message: Transactions { TxId: 1003 Coordinator: 72057594046316545 AckTo { RawX1: 132 RawX2: 133143988331 } } Step: 5000004 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-05-29T15:22:31.638949Z node 31 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_create_sysview.cpp:45: [72057594046678944] TCreateSysView::TPropose, opId: 1003:0 HandleReply TEvPrivate::TEvOperationPlan, step: 5000004 2025-05-29T15:22:31.638978Z node 31 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1003:0 128 -> 240 2025-05-29T15:22:31.639010Z node 31 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 1 2025-05-29T15:22:31.639021Z node 31 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 1 2025-05-29T15:22:31.639209Z node 31 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1003 2025-05-29T15:22:31.639512Z node 31 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:22:31.639523Z node 31 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1003, path id: [OwnerId: 72057594046678944, LocalPathId: 3] 2025-05-29T15:22:31.639555Z node 31 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1003, path id: [OwnerId: 72057594046678944, LocalPathId: 4] 2025-05-29T15:22:31.639579Z node 31 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:22:31.639585Z node 31 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [31:206:2207], at schemeshard: 72057594046678944, txId: 1003, path id: 3 2025-05-29T15:22:31.639592Z node 31 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [31:206:2207], at schemeshard: 72057594046678944, txId: 1003, path id: 4 FAKE_COORDINATOR: Erasing txId 1003 2025-05-29T15:22:31.639663Z node 31 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1003:0, at schemeshard: 72057594046678944 2025-05-29T15:22:31.639671Z node 31 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:482: [72057594046678944] TDone opId# 1003:0 ProgressState 2025-05-29T15:22:31.639685Z node 31 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:906: Part operation is done id#1003:0 progress is 1/1 2025-05-29T15:22:31.639690Z node 31 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 1003 ready parts: 1/1 2025-05-29T15:22:31.639696Z node 31 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:906: Part operation is done id#1003:0 progress is 1/1 2025-05-29T15:22:31.639699Z node 31 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 1003 ready parts: 1/1 2025-05-29T15:22:31.639704Z node 31 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1606: TOperation IsReadyToNotify, TxId: 1003, ready parts: 1/1, is published: false 2025-05-29T15:22:31.639710Z node 31 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 1003 ready parts: 1/1 2025-05-29T15:22:31.639715Z node 31 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:973: Operation and all the parts is done, operation id: 1003:0 2025-05-29T15:22:31.639720Z node 31 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5174: RemoveTx for txid 1003:0 2025-05-29T15:22:31.639732Z node 31 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 2 2025-05-29T15:22:31.639740Z node 31 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:982: Publication still in progress, tx: 1003, publications: 2, subscribers: 0 2025-05-29T15:22:31.639744Z node 31 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:989: Publication details: tx: 1003, [OwnerId: 72057594046678944, LocalPathId: 3], 4 2025-05-29T15:22:31.639748Z node 31 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:989: Publication details: tx: 1003, [OwnerId: 72057594046678944, LocalPathId: 4], 2 2025-05-29T15:22:31.639853Z node 31 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:5834: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 4 PathOwnerId: 72057594046678944, cookie: 1003 2025-05-29T15:22:31.639866Z node 31 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 4 PathOwnerId: 72057594046678944, cookie: 1003 2025-05-29T15:22:31.639871Z node 31 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 1003 2025-05-29T15:22:31.639876Z node 31 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 1003, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 4 2025-05-29T15:22:31.639882Z node 31 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 2 2025-05-29T15:22:31.639978Z node 31 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:5834: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 2 PathOwnerId: 72057594046678944, cookie: 1003 2025-05-29T15:22:31.639989Z node 31 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 2 PathOwnerId: 72057594046678944, cookie: 1003 2025-05-29T15:22:31.639994Z node 31 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1003 2025-05-29T15:22:31.639998Z node 31 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 1003, pathId: [OwnerId: 72057594046678944, LocalPathId: 4], version: 2 2025-05-29T15:22:31.640002Z node 31 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 1 2025-05-29T15:22:31.640011Z node 31 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 1003, subscribers: 0 2025-05-29T15:22:31.640779Z node 31 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1003 2025-05-29T15:22:31.640858Z node 31 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1003 TestModificationResult got TxId: 1003, wait until txId: 1003 TestWaitNotification wait txId: 1003 2025-05-29T15:22:31.640908Z node 31 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:210: tests -- TTxNotificationSubscriber for txId 1003: send EvNotifyTxCompletion 2025-05-29T15:22:31.640917Z node 31 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:256: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 1003 2025-05-29T15:22:31.640997Z node 31 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 1003, at schemeshard: 72057594046678944 2025-05-29T15:22:31.641015Z node 31 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:227: tests -- TTxNotificationSubscriber for txId 1003: got EvNotifyTxCompletionResult 2025-05-29T15:22:31.641021Z node 31 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:236: tests -- TTxNotificationSubscriber for txId 1003: satisfy waiter [31:351:2341] TestWaitNotification: OK eventTxId 1003 2025-05-29T15:22:31.641097Z node 31 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/.sys/new_sys_view" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-05-29T15:22:31.641131Z node 31 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/.sys/new_sys_view" took 45us result status StatusSuccess 2025-05-29T15:22:31.641207Z node 31 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/.sys/new_sys_view" PathDescription { Self { Name: "new_sys_view" PathId: 4 SchemeshardId: 72057594046678944 PathType: EPathTypeSysView CreateFinished: true CreateTxId: 1003 CreateStep: 5000004 ParentPathId: 3 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 SysViewVersion: 1 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 3 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } SysViewDescription { Name: "new_sys_view" Type: EPartitionStats } } PathId: 4 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/mind/ut/unittest >> TNodeBrokerTest::ExtendLeaseRestartRace [GOOD] Test command err: 2025-05-29T15:22:27.965519Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 2 Deadline: 18446744073709.551615s } 2025-05-29T15:22:27.965571Z node 3 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 3 Deadline: 18446744073709.551615s } 2025-05-29T15:22:27.965597Z node 4 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 4 Deadline: 18446744073709.551615s } 2025-05-29T15:22:27.965628Z node 5 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 5 Deadline: 18446744073709.551615s } 2025-05-29T15:22:27.965651Z node 6 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 6 Deadline: 18446744073709.551615s } 2025-05-29T15:22:27.965667Z node 7 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 7 Deadline: 18446744073709.551615s } 2025-05-29T15:22:27.971457Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:27.971546Z node 3 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:27.971596Z node 4 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:27.971629Z node 5 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:27.971661Z node 6 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:27.971687Z node 7 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:27.971742Z node 8 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 8 Deadline: 18446744073709.551615s } 2025-05-29T15:22:27.971762Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-05-29T15:22:27.971951Z node 3 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:27.971969Z node 4 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:27.971981Z node 5 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:27.971994Z node 6 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:27.972018Z node 7 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:27.972033Z node 8 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:27.972062Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:27.976046Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:27.976099Z node 8 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:27.976128Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:27.977224Z node 5 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:27.977263Z node 6 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:27.977295Z node 7 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:27.977388Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:27.977410Z node 3 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:27.977432Z node 4 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:27.977459Z node 5 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-05-29T15:22:27.977506Z node 6 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-05-29T15:22:27.977531Z node 7 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-05-29T15:22:27.977567Z node 8 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:27.977589Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:27.977727Z node 3 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-05-29T15:22:27.977755Z node 4 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-05-29T15:22:27.977865Z node 8 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-05-29T15:22:27.977952Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-05-29T15:22:27.978006Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 5 Deadline: 18446744073709.551615s } 2025-05-29T15:22:27.978678Z node 3 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 2 Deadline: 18446744073709.551615s } 2025-05-29T15:22:27.978695Z node 4 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 3 Deadline: 18446744073709.551615s } 2025-05-29T15:22:27.978704Z node 5 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 4 Deadline: 18446744073709.551615s } 2025-05-29T15:22:27.978713Z node 6 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 5 Deadline: 18446744073709.551615s } 2025-05-29T15:22:27.978723Z node 7 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 4 Deadline: 18446744073709.551615s } 2025-05-29T15:22:27.982001Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 3 Deadline: 18446744073709.551615s } 2025-05-29T15:22:27.982018Z node 3 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 4 Deadline: 18446744073709.551615s } 2025-05-29T15:22:27.982026Z node 4 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 5 Deadline: 18446744073709.551615s } 2025-05-29T15:22:27.982056Z node 5 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 6 Deadline: 18446744073709.551615s } 2025-05-29T15:22:27.982112Z node 4 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 7 Deadline: 18446744073709.551615s } 2025-05-29T15:22:27.982647Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 3 Deadline: 18446744073709.551615s } 2025-05-29T15:22:27.983089Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 4 Deadline: 18446744073709.551615s } 2025-05-29T15:22:27.983184Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 2 Deadline: 18446744073709.551615s } 2025-05-29T15:22:27.983245Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 8 Deadline: 18446744073709.551615s } 2025-05-29T15:22:27.983271Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 6 Deadline: 18446744073709.551615s } 2025-05-29T15:22:27.983364Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 7 Deadline: 18446744073709.551615s } 2025-05-29T15:22:27.984490Z node 5 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 2 Deadline: 18446744073709.551615s } 2025-05-29T15:22:27.984512Z node 3 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 6 Deadline: 18446744073709.551615s } 2025-05-29T15:22:27.984568Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 5 Deadline: 18446744073709.551615s } 2025-05-29T15:22:28.068048Z node 6 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 3 Deadline: 18446744073709.551615s } 2025-05-29T15:22:28.068082Z node 5 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 3 Deadline: 18446744073709.551615s } 2025-05-29T15:22:28.068307Z node 3 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 5 Deadline: 18446744073709.551615s } 2025-05-29T15:22:28.068689Z node 5 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 8 Deadline: 18446744073709.551615s } 2025-05-29T15:22:28.069001Z node 8 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 5 Deadline: 18446744073709.551615s } 2025-05-29T15:22:28.070576Z node 5 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 7 Deadline: 18446744073709.551615s } 2025-05-29T15:22:28.070925Z node 7 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 5 Deadline: 18446744073709.551615s } 2025-05-29T15:22:28.100726Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7309: Cannot subscribe to console configs 2025-05-29T15:22:28.100746Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded ... waiting for nameservers are connected 2025-05-29T15:22:28.104709Z node 1 :NODE_BROKER DEBUG: node_broker_impl.h:236: StateInit event type: 10060000 event: NKikimr::TEvTablet::TEvBoot 2025-05-29T15:22:28.105111Z node 1 :NODE_BROKER DEBUG: node_broker_impl.h:236: StateInit event type: 10060001 event: NKikimr::TEvTablet::TEvRestored 2025-05-29T15:22:28.105167Z node 1 :NODE_BROKER DEBUG: node_broker__init_scheme.cpp:20: TTxInitScheme Execute 2025-05-29T15:22:28.105371Z node 1 :NODE_BROKER DEBUG: node_broker_impl.h:236: StateInit event type: 1006000c event: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-05-29T15:22:28.106045Z node 1 :NODE_BROKER DEBUG: node_broker__init_scheme.cpp:29: TTxInitScheme Complete 2025-05-29T15:22:28.106320Z node 1 :NODE_BROKER DEBUG: node_broker__load_state.cpp:19: TTxLoadState Execute 2025-05-29T15:22:28.106381Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:961: [DB] Using default config. 2025-05-29T15:22:28.106397Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:998: [DB] Starting the first epoch: #1.1 1970-01-01T00:00:00.025000Z - 1970-01-01T01:00:00.025000Z - 1970-01-01T02:00:00.025000Z 2025-05-29T15:22:28.106402Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:1024: [DB] Loaded the first approximate epoch start: #1.1 2025-05-29T15:22:28.106417Z node 1 :NODE_BROKER DEBUG: node_broker__load_state.cpp:27: TTxLoadState Compl ... st 2025-05-29T15:22:30.062498Z node 1 :NODE_BROKER DEBUG: node_broker__extend_lease.cpp:44: TTxExtendLease Execute node #1024 2025-05-29T15:22:30.062503Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:314: [Dirty] Extended lease of #1024.v4 host1:1001 up to Thu, 01 Jan 1970 03:00:00 UTC (lease 2) 2025-05-29T15:22:30.062509Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:856: [DB] Adding node #1024.v4 host1:1001 to database state=Active resolvehost=host1.yandex.net address=1.2.3.4 dc=1 location=DC=1/M=2/R=3/U=4/ lease=2 expire=Thu, 01 Jan 1970 03:00:00 UTC servicedsubdomain=72057594046678944:1 slotindex=0 authorizedbycertificate=false 2025-05-29T15:22:30.062551Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:552: [Dirty] Update current epoch version from 3 to 4 2025-05-29T15:22:30.062556Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:1356: [DB] Update epoch version in database version=4 2025-05-29T15:22:30.062599Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 269877761, Sender [1:809:2310], Recipient [1:755:2268]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-05-29T15:22:30.062628Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 269877761, Sender [1:810:2311], Recipient [1:755:2268]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-05-29T15:22:30.062652Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 269877761, Sender [1:811:2312], Recipient [1:755:2268]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-05-29T15:22:30.062667Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 269877761, Sender [1:812:2313], Recipient [1:755:2268]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-05-29T15:22:30.108317Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 272039936, Sender [1:25:2072], Recipient [1:755:2268]: NKikimr::NNodeBroker::TEvNodeBroker::TEvListNodes { MinEpoch: 3 } 2025-05-29T15:22:30.108338Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:246: StateWork, processing event TEvNodeBroker::TEvListNodes 2025-05-29T15:22:30.108345Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:375: Delaying list nodes request for epoch #3 2025-05-29T15:22:30.108356Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 272039936, Sender [8:221:2072], Recipient [1:764:2274] 2025-05-29T15:22:30.108360Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:246: StateWork, processing event TEvNodeBroker::TEvListNodes 2025-05-29T15:22:30.108367Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:375: Delaying list nodes request for epoch #3 2025-05-29T15:22:30.108374Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 272039936, Sender [4:109:2072], Recipient [1:767:2277] 2025-05-29T15:22:30.108378Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:246: StateWork, processing event TEvNodeBroker::TEvListNodes 2025-05-29T15:22:30.108383Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:375: Delaying list nodes request for epoch #3 2025-05-29T15:22:30.108390Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 272039936, Sender [7:193:2072], Recipient [1:809:2310] 2025-05-29T15:22:30.108394Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:246: StateWork, processing event TEvNodeBroker::TEvListNodes 2025-05-29T15:22:30.108398Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:375: Delaying list nodes request for epoch #3 2025-05-29T15:22:30.108406Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 272039936, Sender [2:53:2072], Recipient [1:810:2311] 2025-05-29T15:22:30.108410Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:246: StateWork, processing event TEvNodeBroker::TEvListNodes 2025-05-29T15:22:30.108414Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:375: Delaying list nodes request for epoch #3 2025-05-29T15:22:30.108424Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 272039936, Sender [5:137:2072], Recipient [1:811:2312] 2025-05-29T15:22:30.108427Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:246: StateWork, processing event TEvNodeBroker::TEvListNodes 2025-05-29T15:22:30.108431Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:375: Delaying list nodes request for epoch #3 2025-05-29T15:22:30.108439Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 272039936, Sender [6:165:2072], Recipient [1:812:2313] 2025-05-29T15:22:30.108443Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:246: StateWork, processing event TEvNodeBroker::TEvListNodes 2025-05-29T15:22:30.108447Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:375: Delaying list nodes request for epoch #3 2025-05-29T15:22:30.108454Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 272039936, Sender [3:81:2072], Recipient [1:765:2275] 2025-05-29T15:22:30.108458Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:246: StateWork, processing event TEvNodeBroker::TEvListNodes 2025-05-29T15:22:30.108462Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:375: Delaying list nodes request for epoch #3 2025-05-29T15:22:30.119250Z node 1 :NODE_BROKER DEBUG: node_broker__extend_lease.cpp:78: TTxExtendLease Complete 2025-05-29T15:22:30.119293Z node 1 :NODE_BROKER TRACE: node_broker__extend_lease.cpp:82: TTxExtendLease reply with: NKikimr::NNodeBroker::TEvNodeBroker::TEvExtendLeaseResponse { Status { Code: OK } NodeId: 1024 Expire: 10800025000 Epoch { Id: 2 Version: 4 Start: 3600025000 End: 7200025000 NextEnd: 10800025000 } } 2025-05-29T15:22:30.119311Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:314: [Committed] Extended lease of #1024.v4 host1:1001 up to Thu, 01 Jan 1970 03:00:00 UTC (lease 2) 2025-05-29T15:22:30.119316Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:552: [Committed] Update current epoch version from 3 to 4 2025-05-29T15:22:30.119320Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:630: Add node #1024.v4 host1:1001 to epoch cache 2025-05-29T15:22:30.119334Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:659: Add node #1024.v4 to update nodes log ... waiting for epoch update 2025-05-29T15:22:30.119424Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 269877761, Sender [1:834:2331], Recipient [1:755:2268]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-05-29T15:22:30.119442Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 272039936, Sender [1:629:2213], Recipient [1:755:2268]: NKikimr::NNodeBroker::TEvNodeBroker::TEvListNodes { } 2025-05-29T15:22:30.119447Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:246: StateWork, processing event TEvNodeBroker::TEvListNodes 2025-05-29T15:22:30.119455Z node 1 :NODE_BROKER TRACE: node_broker.cpp:423: Send TEvNodesInfo for epoch #2.4 1970-01-01T01:00:00.025000Z - 1970-01-01T02:00:00.025000Z - 1970-01-01T03:00:00.025000Z 2025-05-29T15:22:30.348399Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 2146435072, Sender [1:755:2268], Recipient [1:755:2268]: NKikimr::NNodeBroker::TNodeBroker::TEvPrivate::TEvUpdateEpoch 2025-05-29T15:22:30.348422Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:256: StateWork, processing event TEvPrivate::TEvUpdateEpoch 2025-05-29T15:22:30.348446Z node 1 :NODE_BROKER DEBUG: node_broker__update_epoch.cpp:20: TTxUpdateEpoch Execute 2025-05-29T15:22:30.348456Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:542: [Dirty] Move to new epoch #3.5 1970-01-01T02:00:00.025000Z - 1970-01-01T03:00:00.025000Z - 1970-01-01T04:00:00.025000Z, approximate epoch start #3.5 2025-05-29T15:22:30.348462Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:1311: [DB] Update epoch in database: #3.5 1970-01-01T02:00:00.025000Z - 1970-01-01T03:00:00.025000Z - 1970-01-01T04:00:00.025000Z 2025-05-29T15:22:30.348489Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:1330: [DB] Update approx epoch start in database: #3.5 2025-05-29T15:22:30.519952Z node 1 :NODE_BROKER DEBUG: node_broker__update_epoch.cpp:31: TTxUpdateEpoch Complete 2025-05-29T15:22:30.519995Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:542: [Committed] Move to new epoch #3.5 1970-01-01T02:00:00.025000Z - 1970-01-01T03:00:00.025000Z - 1970-01-01T04:00:00.025000Z, approximate epoch start #3.5 2025-05-29T15:22:30.520026Z node 1 :NODE_BROKER TRACE: node_broker.cpp:456: Scheduled epoch update at 1970-01-01T03:00:00.025000Z 2025-05-29T15:22:30.520033Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:562: Preparing nodes list cache for epoch #3.5 1970-01-01T02:00:00.025000Z - 1970-01-01T03:00:00.025000Z - 1970-01-01T04:00:00.025000Z, approximate epoch start #3.5 nodes=1 expired=0 2025-05-29T15:22:30.520072Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:603: Preparing update nodes log for epoch ##3.5 1970-01-01T02:00:00.025000Z - 1970-01-01T03:00:00.025000Z - 1970-01-01T04:00:00.025000Z nodes=1 expired=0 removed=0 2025-05-29T15:22:30.520078Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:659: Add node #1024.v4 to update nodes log 2025-05-29T15:22:30.520093Z node 1 :NODE_BROKER TRACE: node_broker.cpp:423: Send TEvNodesInfo for epoch #3.5 1970-01-01T02:00:00.025000Z - 1970-01-01T03:00:00.025000Z - 1970-01-01T04:00:00.025000Z 2025-05-29T15:22:30.520101Z node 1 :NODE_BROKER TRACE: node_broker.cpp:423: Send TEvNodesInfo for epoch #3.5 1970-01-01T02:00:00.025000Z - 1970-01-01T03:00:00.025000Z - 1970-01-01T04:00:00.025000Z 2025-05-29T15:22:30.520108Z node 1 :NODE_BROKER TRACE: node_broker.cpp:423: Send TEvNodesInfo for epoch #3.5 1970-01-01T02:00:00.025000Z - 1970-01-01T03:00:00.025000Z - 1970-01-01T04:00:00.025000Z 2025-05-29T15:22:30.520114Z node 1 :NODE_BROKER TRACE: node_broker.cpp:423: Send TEvNodesInfo for epoch #3.5 1970-01-01T02:00:00.025000Z - 1970-01-01T03:00:00.025000Z - 1970-01-01T04:00:00.025000Z 2025-05-29T15:22:30.520119Z node 1 :NODE_BROKER TRACE: node_broker.cpp:423: Send TEvNodesInfo for epoch #3.5 1970-01-01T02:00:00.025000Z - 1970-01-01T03:00:00.025000Z - 1970-01-01T04:00:00.025000Z 2025-05-29T15:22:30.520124Z node 1 :NODE_BROKER TRACE: node_broker.cpp:423: Send TEvNodesInfo for epoch #3.5 1970-01-01T02:00:00.025000Z - 1970-01-01T03:00:00.025000Z - 1970-01-01T04:00:00.025000Z 2025-05-29T15:22:30.520129Z node 1 :NODE_BROKER TRACE: node_broker.cpp:423: Send TEvNodesInfo for epoch #3.5 1970-01-01T02:00:00.025000Z - 1970-01-01T03:00:00.025000Z - 1970-01-01T04:00:00.025000Z 2025-05-29T15:22:30.520134Z node 1 :NODE_BROKER TRACE: node_broker.cpp:423: Send TEvNodesInfo for epoch #3.5 1970-01-01T02:00:00.025000Z - 1970-01-01T03:00:00.025000Z - 1970-01-01T04:00:00.025000Z 2025-05-29T15:22:30.541108Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 269877761, Sender [1:857:2342], Recipient [1:755:2268]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-05-29T15:22:30.541159Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 272039936, Sender [1:629:2213], Recipient [1:755:2268]: NKikimr::NNodeBroker::TEvNodeBroker::TEvListNodes { } 2025-05-29T15:22:30.541166Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:246: StateWork, processing event TEvNodeBroker::TEvListNodes 2025-05-29T15:22:30.541181Z node 1 :NODE_BROKER TRACE: node_broker.cpp:423: Send TEvNodesInfo for epoch #3.5 1970-01-01T02:00:00.025000Z - 1970-01-01T03:00:00.025000Z - 1970-01-01T04:00:00.025000Z 2025-05-29T15:22:30.541253Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 269877761, Sender [1:859:2344], Recipient [1:755:2268]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-05-29T15:22:30.541277Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 272039936, Sender [1:629:2213], Recipient [1:755:2268]: NKikimr::NNodeBroker::TEvNodeBroker::TEvListNodes { } 2025-05-29T15:22:30.541282Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:246: StateWork, processing event TEvNodeBroker::TEvListNodes 2025-05-29T15:22:30.541288Z node 1 :NODE_BROKER TRACE: node_broker.cpp:423: Send TEvNodesInfo for epoch #3.5 1970-01-01T02:00:00.025000Z - 1970-01-01T03:00:00.025000Z - 1970-01-01T04:00:00.025000Z >> TestDataErasure::SimpleDataErasureTestForAllSupportedObjects >> GenericFederatedQuery::IcebergHadoopTokenSelectAll ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/mind/ut/unittest >> TNodeBrokerTest::NodeNameReuseRestart [GOOD] Test command err: 2025-05-29T15:22:30.235304Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 2 Deadline: 18446744073709.551615s } 2025-05-29T15:22:30.235342Z node 3 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 3 Deadline: 18446744073709.551615s } 2025-05-29T15:22:30.235363Z node 4 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 4 Deadline: 18446744073709.551615s } 2025-05-29T15:22:30.235386Z node 5 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 5 Deadline: 18446744073709.551615s } 2025-05-29T15:22:30.235405Z node 6 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 6 Deadline: 18446744073709.551615s } 2025-05-29T15:22:30.235421Z node 7 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 7 Deadline: 18446744073709.551615s } 2025-05-29T15:22:30.241867Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:30.241982Z node 3 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:30.242025Z node 4 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:30.242059Z node 5 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:30.242099Z node 6 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:30.242129Z node 7 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:30.242182Z node 8 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 8 Deadline: 18446744073709.551615s } 2025-05-29T15:22:30.242204Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-05-29T15:22:30.242430Z node 3 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:30.242465Z node 4 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:30.242487Z node 5 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:30.242501Z node 6 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:30.242519Z node 7 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:30.242536Z node 8 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:30.242571Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:30.246956Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:30.247031Z node 8 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:30.247069Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:30.247929Z node 5 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:30.247969Z node 6 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:30.247997Z node 7 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:30.248085Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:30.248104Z node 3 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:30.248123Z node 4 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:30.248151Z node 6 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-05-29T15:22:30.248194Z node 7 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-05-29T15:22:30.248213Z node 8 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:30.248229Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:30.248369Z node 3 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-05-29T15:22:30.248393Z node 4 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-05-29T15:22:30.248413Z node 5 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-05-29T15:22:30.248515Z node 8 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-05-29T15:22:30.248631Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-05-29T15:22:30.248693Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 4 Deadline: 18446744073709.551615s } 2025-05-29T15:22:30.249495Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 3 Deadline: 18446744073709.551615s } 2025-05-29T15:22:30.249526Z node 4 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 6 Deadline: 18446744073709.551615s } 2025-05-29T15:22:30.249537Z node 5 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 3 Deadline: 18446744073709.551615s } 2025-05-29T15:22:30.249560Z node 6 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 5 Deadline: 18446744073709.551615s } 2025-05-29T15:22:30.249576Z node 8 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 5 Deadline: 18446744073709.551615s } 2025-05-29T15:22:30.254118Z node 3 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 2 Deadline: 18446744073709.551615s } 2025-05-29T15:22:30.254601Z node 6 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 4 Deadline: 18446744073709.551615s } 2025-05-29T15:22:30.254761Z node 5 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 6 Deadline: 18446744073709.551615s } 2025-05-29T15:22:30.254852Z node 3 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 5 Deadline: 18446744073709.551615s } 2025-05-29T15:22:30.254881Z node 5 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 8 Deadline: 18446744073709.551615s } 2025-05-29T15:22:30.256141Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 5 Deadline: 18446744073709.551615s } 2025-05-29T15:22:30.256480Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 6 Deadline: 18446744073709.551615s } 2025-05-29T15:22:30.256540Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 7 Deadline: 18446744073709.551615s } 2025-05-29T15:22:30.256656Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 2 Deadline: 18446744073709.551615s } 2025-05-29T15:22:30.256678Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 3 Deadline: 18446744073709.551615s } 2025-05-29T15:22:30.256891Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 8 Deadline: 18446744073709.551615s } 2025-05-29T15:22:30.257599Z node 4 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 8 Deadline: 18446744073709.551615s } 2025-05-29T15:22:30.257622Z node 6 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 8 Deadline: 18446744073709.551615s } 2025-05-29T15:22:30.257895Z node 8 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 6 Deadline: 18446744073709.551615s } 2025-05-29T15:22:30.257945Z node 5 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 2 Deadline: 18446744073709.551615s } 2025-05-29T15:22:30.257967Z node 8 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 4 Deadline: 18446744073709.551615s } 2025-05-29T15:22:30.258208Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 5 Deadline: 18446744073709.551615s } 2025-05-29T15:22:30.262270Z node 3 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 7 Deadline: 18446744073709.551615s } 2025-05-29T15:22:30.262471Z node 7 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 3 Deadline: 18446744073709.551615s } 2025-05-29T15:22:30.262704Z node 3 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 6 Deadline: 18446744073709.551615s } 2025-05-29T15:22:30.263081Z node 6 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 3 Deadline: 18446744073709.551615s } 2025-05-29T15:22:30.288564Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7309: Cannot subscribe to console configs 2025-05-29T15:22:30.288583Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded ... waiting for nameservers are connected 2025-05-29T15:22:30.292833Z node 1 :NODE_BROKER DEBUG: node_broker_impl.h:236: StateInit event type: 10060000 event: NKikimr::TEvTablet::TEvBoot 2025-05-29T15:22:30.293229Z node 1 :NODE_BROKER DEBUG: node_broker_impl.h:236: StateInit event type: 10060001 event: NKikimr::TEvTablet::TEvRestored 2025-05-29T15:22:30.293284Z node 1 :NODE_BROKER DEBUG: node_broker__init_scheme.cpp:20: TTxInitScheme Execute 2025-05-29T15:22:30.293442Z node 1 :NODE_BROKER DEBUG: node_broker_impl.h:236: StateInit event type: 1006000c event: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-05-29T15:22:30.294039Z node 1 :NODE_BROKER DEBUG: node_broker__init_scheme.cpp:29: TTxInitScheme Complete 2025-05-29T15:22:30.294058Z node 1 :NODE_BROKER DEBUG: node_broker__load_state.cpp:19: TTxLoadState Execute 2025-05-29T15:22:30.294104Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:961: [DB] Using default config. 2025-05-29T15:22:30.294118Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:998: [DB] Starting the first epoch: #1.1 1970-01-01T00:00:00.024000Z - 1970-01-01T01:00:00.024000Z - 1970-01-01T02:00:00.024000Z 2025-05-29T15:22:30.294122Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:1024: [DB] Loaded the first approximate epoch start: #1.1 2025-05-29T15:22:30.294133Z node 1 :NODE_BROKER DEBUG: node_broker__load_state.cpp:27: TTxLoadState Compl ... owPrivatePath: false SyncVersion: false Status: Ok Kind: KindExtSubdomain DomainInfo { DomainKey: [OwnerId: 72057594046678944, LocalPathId: 2] ResourcesDomainKey: [OwnerId: 72057594046678944, LocalPathId: 2] Params { Version: 1 PlanResolution: 0 TimeCastBucketsPerMediator: 0 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } }] } 2025-05-29T15:22:30.566533Z node 1 :NODE_BROKER TRACE: node_broker.cpp:1532: Handle TEvTxProxySchemeCache::TEvNavigateKeySetResult: response# { Path: dc-1/my-database TableId: [72057594046678944:2:0] RequestType: ByPath Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindExtSubdomain DomainInfo { DomainKey: [OwnerId: 72057594046678944, LocalPathId: 2] ResourcesDomainKey: [OwnerId: 72057594046678944, LocalPathId: 2] Params { Version: 1 PlanResolution: 0 TimeCastBucketsPerMediator: 0 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } } 2025-05-29T15:22:30.566538Z node 1 :NODE_BROKER TRACE: node_broker.cpp:1558: Finished resolving tenant: request# Host: "host2" Port: 19001 ResolveHost: "host2" Address: "" Location { } FixedNodeId: false Path: "/dc-1/my-database": scope id# <72057594046678944:2>: serviced subdomain# 72057594046678944:2 2025-05-29T15:22:30.566545Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 2146435073, Sender [1:690:2183], Recipient [1:561:2183]: NKikimr::NNodeBroker::TNodeBroker::TEvPrivate::TEvResolvedRegistrationRequest 2025-05-29T15:22:30.566548Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:257: StateWork, processing event TEvPrivate::TEvResolvedRegistrationRequest 2025-05-29T15:22:30.566552Z node 1 :NODE_BROKER DEBUG: node_broker__register_node.cpp:77: TTxRegisterNode Execute 2025-05-29T15:22:30.566554Z node 1 :NODE_BROKER DEBUG: node_broker__register_node.cpp:81: Registration request from host2:19001 (not fixed) tenant: /dc-1/my-database 2025-05-29T15:22:30.566559Z node 1 :NODE_BROKER DEBUG: node_broker__register_node.cpp:186: TTxRegisterNode Complete 2025-05-29T15:22:30.566566Z node 1 :NODE_BROKER TRACE: node_broker__register_node.cpp:58: TTxRegisterNode reply with: Status { Code: OK } Node { NodeId: 1025 Host: "host2" Port: 19001 ResolveHost: "host2" Address: "" Location { } Expire: 7200024000 Name: "slot-1" } 2025-05-29T15:22:30.566603Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 269877761, Sender [1:693:2264], Recipient [1:561:2183]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-05-29T15:22:30.566616Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 272039938, Sender [1:661:2243], Recipient [1:561:2183]: NKikimr::NNodeBroker::TEvNodeBroker::TEvRegistrationRequest { Host: "host2" Port: 19001 ResolveHost: "host2" Address: "" Location { } FixedNodeId: false Path: "/dc-1/my-database" } 2025-05-29T15:22:30.566619Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:248: StateWork, processing event TEvNodeBroker::TEvRegistrationRequest 2025-05-29T15:22:30.566624Z node 1 :NODE_BROKER TRACE: node_broker.cpp:1487: Handle TEvNodeBroker::TEvRegistrationRequest: request# Host: "host2" Port: 19001 ResolveHost: "host2" Address: "" Location { } FixedNodeId: false Path: "/dc-1/my-database" 2025-05-29T15:22:30.566638Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2702: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [1:23:2070], request# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/my-database TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-05-29T15:22:30.566644Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:1842: FillEntry for TNavigate: self# [1:23:2070], cacheItem# { Subscriber: { Subscriber: [1:667:2248] DomainOwnerId: 72057594046678944 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusSuccess Kind: 9 TableKind: 0 Created: 1 CreateStep: 5000001 PathId: [OwnerId: 72057594046678944, LocalPathId: 2] DomainId: [OwnerId: 72057594046678944, LocalPathId: 2] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/my-database TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-05-29T15:22:30.566658Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:265: Send result: self# [1:695:2265], recipient# [1:694:2183], result# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/my-database TableId: [72057594046678944:2:0] RequestType: ByPath Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindExtSubdomain DomainInfo { DomainKey: [OwnerId: 72057594046678944, LocalPathId: 2] ResourcesDomainKey: [OwnerId: 72057594046678944, LocalPathId: 2] Params { Version: 1 PlanResolution: 0 TimeCastBucketsPerMediator: 0 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } }] } 2025-05-29T15:22:30.566666Z node 1 :NODE_BROKER TRACE: node_broker.cpp:1532: Handle TEvTxProxySchemeCache::TEvNavigateKeySetResult: response# { Path: dc-1/my-database TableId: [72057594046678944:2:0] RequestType: ByPath Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindExtSubdomain DomainInfo { DomainKey: [OwnerId: 72057594046678944, LocalPathId: 2] ResourcesDomainKey: [OwnerId: 72057594046678944, LocalPathId: 2] Params { Version: 1 PlanResolution: 0 TimeCastBucketsPerMediator: 0 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } } 2025-05-29T15:22:30.566673Z node 1 :NODE_BROKER TRACE: node_broker.cpp:1558: Finished resolving tenant: request# Host: "host2" Port: 19001 ResolveHost: "host2" Address: "" Location { } FixedNodeId: false Path: "/dc-1/my-database": scope id# <72057594046678944:2>: serviced subdomain# 72057594046678944:2 2025-05-29T15:22:30.566679Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 2146435073, Sender [1:694:2183], Recipient [1:561:2183]: NKikimr::NNodeBroker::TNodeBroker::TEvPrivate::TEvResolvedRegistrationRequest 2025-05-29T15:22:30.566682Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:257: StateWork, processing event TEvPrivate::TEvResolvedRegistrationRequest 2025-05-29T15:22:30.566685Z node 1 :NODE_BROKER DEBUG: node_broker__register_node.cpp:77: TTxRegisterNode Execute 2025-05-29T15:22:30.566687Z node 1 :NODE_BROKER DEBUG: node_broker__register_node.cpp:81: Registration request from host2:19001 (not fixed) tenant: /dc-1/my-database 2025-05-29T15:22:30.566691Z node 1 :NODE_BROKER DEBUG: node_broker__register_node.cpp:186: TTxRegisterNode Complete 2025-05-29T15:22:30.566706Z node 1 :NODE_BROKER TRACE: node_broker__register_node.cpp:58: TTxRegisterNode reply with: Status { Code: OK } Node { NodeId: 1025 Host: "host2" Port: 19001 ResolveHost: "host2" Address: "" Location { } Expire: 7200024000 Name: "slot-1" } 2025-05-29T15:22:30.566780Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 269877761, Sender [1:697:2267], Recipient [1:561:2183]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-05-29T15:22:30.566795Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 272039938, Sender [1:661:2243], Recipient [1:561:2183]: NKikimr::NNodeBroker::TEvNodeBroker::TEvRegistrationRequest { Host: "host1" Port: 19001 ResolveHost: "host1" Address: "" Location { } FixedNodeId: false Path: "/dc-1/my-database" } 2025-05-29T15:22:30.566798Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:248: StateWork, processing event TEvNodeBroker::TEvRegistrationRequest 2025-05-29T15:22:30.566803Z node 1 :NODE_BROKER TRACE: node_broker.cpp:1487: Handle TEvNodeBroker::TEvRegistrationRequest: request# Host: "host1" Port: 19001 ResolveHost: "host1" Address: "" Location { } FixedNodeId: false Path: "/dc-1/my-database" 2025-05-29T15:22:30.566819Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2702: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [1:23:2070], request# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/my-database TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-05-29T15:22:30.566826Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:1842: FillEntry for TNavigate: self# [1:23:2070], cacheItem# { Subscriber: { Subscriber: [1:667:2248] DomainOwnerId: 72057594046678944 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusSuccess Kind: 9 TableKind: 0 Created: 1 CreateStep: 5000001 PathId: [OwnerId: 72057594046678944, LocalPathId: 2] DomainId: [OwnerId: 72057594046678944, LocalPathId: 2] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/my-database TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-05-29T15:22:30.566843Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:265: Send result: self# [1:699:2268], recipient# [1:698:2183], result# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/my-database TableId: [72057594046678944:2:0] RequestType: ByPath Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindExtSubdomain DomainInfo { DomainKey: [OwnerId: 72057594046678944, LocalPathId: 2] ResourcesDomainKey: [OwnerId: 72057594046678944, LocalPathId: 2] Params { Version: 1 PlanResolution: 0 TimeCastBucketsPerMediator: 0 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } }] } 2025-05-29T15:22:30.566851Z node 1 :NODE_BROKER TRACE: node_broker.cpp:1532: Handle TEvTxProxySchemeCache::TEvNavigateKeySetResult: response# { Path: dc-1/my-database TableId: [72057594046678944:2:0] RequestType: ByPath Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindExtSubdomain DomainInfo { DomainKey: [OwnerId: 72057594046678944, LocalPathId: 2] ResourcesDomainKey: [OwnerId: 72057594046678944, LocalPathId: 2] Params { Version: 1 PlanResolution: 0 TimeCastBucketsPerMediator: 0 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } } 2025-05-29T15:22:30.566856Z node 1 :NODE_BROKER TRACE: node_broker.cpp:1558: Finished resolving tenant: request# Host: "host1" Port: 19001 ResolveHost: "host1" Address: "" Location { } FixedNodeId: false Path: "/dc-1/my-database": scope id# <72057594046678944:2>: serviced subdomain# 72057594046678944:2 2025-05-29T15:22:30.566864Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 2146435073, Sender [1:698:2183], Recipient [1:561:2183]: NKikimr::NNodeBroker::TNodeBroker::TEvPrivate::TEvResolvedRegistrationRequest 2025-05-29T15:22:30.566866Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:257: StateWork, processing event TEvPrivate::TEvResolvedRegistrationRequest 2025-05-29T15:22:30.566870Z node 1 :NODE_BROKER DEBUG: node_broker__register_node.cpp:77: TTxRegisterNode Execute 2025-05-29T15:22:30.566872Z node 1 :NODE_BROKER DEBUG: node_broker__register_node.cpp:81: Registration request from host1:19001 (not fixed) tenant: /dc-1/my-database 2025-05-29T15:22:30.566878Z node 1 :NODE_BROKER DEBUG: node_broker__register_node.cpp:186: TTxRegisterNode Complete 2025-05-29T15:22:30.566884Z node 1 :NODE_BROKER TRACE: node_broker__register_node.cpp:58: TTxRegisterNode reply with: Status { Code: OK } Node { NodeId: 1024 Host: "host1" Port: 19001 ResolveHost: "host1" Address: "" Location { } Expire: 7200024000 Name: "slot-0" } >> GenericFederatedQuery::YdbFilterPushdown >> GenericFederatedQuery::ClickHouseManagedSelectAll |60.5%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/client/ut/ydb-core-client-ut |60.5%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/persqueue/ut/ut_with_sdk/ydb-core-persqueue-ut-ut_with_sdk |60.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/persqueue/ut/ut_with_sdk/ydb-core-persqueue-ut-ut_with_sdk |60.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/client/ut/ydb-core-client-ut |60.6%| [LD] {RESULT} $(B)/ydb/core/persqueue/ut/ut_with_sdk/ydb-core-persqueue-ut-ut_with_sdk |60.6%| [LD] {RESULT} $(B)/ydb/core/client/ut/ydb-core-client-ut |60.6%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/tx_allocator_client/ut/ydb-core-tx-tx_allocator_client-ut >> TNodeBrokerTest::NodesMigrationManyNodesInterrupted [GOOD] >> GenericFederatedQuery::IcebergHiveTokenSelectAll |60.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/tx_allocator_client/ut/ydb-core-tx-tx_allocator_client-ut |60.6%| [LD] {RESULT} $(B)/ydb/core/tx/tx_allocator_client/ut/ydb-core-tx-tx_allocator_client-ut |60.6%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/engine/ut/ydb-core-engine-ut |60.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/engine/ut/ydb-core-engine-ut |60.6%| [LD] {RESULT} $(B)/ydb/core/engine/ut/ydb-core-engine-ut >> TNodeBrokerTest::NodesAlreadyMigrated [GOOD] >> TPQTabletTests::Huge_ProposeTransacton [GOOD] |60.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_rtmr_reboots/unittest |60.6%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/schemeshard/ut_move/ydb-core-tx-schemeshard-ut_move |60.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_move/ydb-core-tx-schemeshard-ut_move |60.6%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_move/ydb-core-tx-schemeshard-ut_move |60.6%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/library/table_creator/ut/ydb-library-table_creator-ut >> TPartitionTests::FailedTxsDontBlock [GOOD] >> TPQTabletTests::Limit_On_The_Number_Of_Transactons >> PQCountersLabeled::ImportantFlagSwitching [GOOD] >> PQCountersLabeled::NewConsumersCountersAppear |60.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/library/table_creator/ut/ydb-library-table_creator-ut ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/metadata/initializer/ut/unittest >> Initializer::Simple [FAIL] Test command err: 2025-05-29T15:21:43.398953Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:102:2148], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-05-29T15:21:43.398986Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-05-29T15:21:43.398997Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/000bf2/r3tmp/tmpXuyFJ5/pdisk_1.dat TServer::EnableGrpc on GrpcPort 29511, node 1 TClient is connected to server localhost:30300 2025-05-29T15:21:43.536948Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-05-29T15:21:43.559778Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:21:43.560907Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:21:43.560925Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-05-29T15:21:43.560930Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-05-29T15:21:43.561083Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-05-29T15:21:43.561233Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [1:32:2079] 1748532102892848 != 1748532102892852 2025-05-29T15:21:43.603427Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:21:43.603469Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:21:43.614267Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-29T15:21:53.723868Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:677:2568], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:21:53.723897Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:687:2573], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:21:53.723909Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:21:53.724934Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715657:3, at schemeshard: 72057594046644480 2025-05-29T15:21:53.825582Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:691:2576], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715657 completed, doublechecking } 2025-05-29T15:21:53.904359Z node 1 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [1:760:2614] txid# 281474976715658, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-29T15:21:53.954856Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [1:770:2623], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:1:1: Error: At function: KiReadTable!
:1:1: Error: Cannot find table 'db.[/Root/.metadata/test]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-05-29T15:21:53.956216Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=1&id=YzhjNTZkZDItZjBlN2IwYjktNmI5MDg0NWUtODU1ODM1ODI=, ActorId: [1:673:2565], ActorState: ExecuteState, TraceId: 01jwea5c9v7c8je5b54qn8yjd6, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: REQUEST=SELECT * FROM `/Root/.metadata/test`;RESULT=
: Error: Type annotation, code: 1030
:1:1: Error: At function: KiReadTable!
:1:1: Error: Cannot find table 'db.[/Root/.metadata/test]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 ;EXPECTATION=0 REQUEST=SELECT * FROM `/Root/.metadata/test`;EXPECTATION=0 2025-05-29T15:21:54.034097Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:1, at schemeshard: 72057594046644480 2025-05-29T15:21:54.341645Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [1:925:2730], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:21:54.342176Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=1&id=MjQzNDVlNjctMjI5NjAyMmEtZmZjN2RlMDItOGQ3MjgyNmQ=, ActorId: [1:886:2698], ActorState: ExecuteState, TraceId: 01jwea5cwq508ngkn72vftb3gg, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: 01jwea5cw897e00ms4jj17mr3e 2025-05-29T15:21:54.343378Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=request_actor.h:64;event=unexpected reply;error_message=operation { ready: true status: INTERNAL_ERROR issues { message: "Execution" issue_code: 1060 issues { message: "yql/essentials/ast/yql_expr.h:1874: index out of range" issue_code: 1 } } result { [type.googleapis.com/Ydb.Table.ExecuteQueryResult] { tx_meta { } } } } ;request=session_id: "ydb://session/3?node_id=1&id=MjQzNDVlNjctMjI5NjAyMmEtZmZjN2RlMDItOGQ3MjgyNmQ=" tx_control { tx_id: "01jwea5cw897e00ms4jj17mr3e" commit_tx: true } query { yql_text: "DECLARE $objects AS List>;\nUPSERT INTO `//Root/.metadata/initialization/migrations`\nSELECT componentId,modificationId,instant FROM AS_TABLE($objects)\n" } parameters { key: "$objects" value { type { list_type { item { struct_type { members { name: "componentId" type { type_id: UTF8 } } members { name: "modificationId" type { type_id: UTF8 } } members { name: "instant" type { type_id: UINT32 } } } } } } value { items { items { text_value: "INITIALIZATION" } items { text_value: "create" } items { uint32_value: 1 } } } } } ; 2025-05-29T15:21:54.343571Z node 1 :METADATA_INITIALIZER ERROR: log.cpp:784: fline=accessor_init.cpp:81;event=OnAlteringProblem;error=cannot UPSERT objects:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 ; 2025-05-29T15:21:59.076867Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7301: Cannot get console configs 2025-05-29T15:21:59.076904Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded assertion failed at ydb/services/metadata/initializer/ut/ut_init.cpp:139, virtual void NKikimr::NTestSuiteInitializer::TTestCaseSimple::Execute_(NUnitTest::TTestContext &): (emulator->IsInitialized()) TBackTrace::Capture()+28 (0x137C123C) NUnitTest::NPrivate::RaiseError(char const*, TBasicString> const&, bool)+137 (0x139745C9) NKikimr::NTestSuiteInitializer::TTestCaseSimple::Execute_(NUnitTest::TTestContext&)+2524 (0x136B341C) NKikimr::NTestSuiteInitializer::TCurrentTest::Execute()::'lambda'()::operator()() const+71 (0x136B8087) NUnitTest::TTestBase::Run(std::__y1::function, TBasicString> const&, char const*, bool)+126 (0x1397647E) NKikimr::NTestSuiteInitializer::TCurrentTest::Execute()+429 (0x136B78ED) NUnitTest::TTestFactory::Execute()+803 (0x13976BF3) NUnitTest::RunMain(int, char**)+3021 (0x1398879D) ??+0 (0x7F8080FF3D90) __libc_start_main+128 (0x7F8080FF3E40) _start+41 (0x12810029) |60.6%| [LD] {RESULT} $(B)/ydb/library/table_creator/ut/ydb-library-table_creator-ut ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/mind/ut/unittest >> TNodeBrokerTest::NodesMigrationManyNodesInterrupted [GOOD] Test command err: 2025-05-29T15:22:28.310481Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 2 Deadline: 18446744073709.551615s } 2025-05-29T15:22:28.310525Z node 3 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 3 Deadline: 18446744073709.551615s } 2025-05-29T15:22:28.310548Z node 4 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 4 Deadline: 18446744073709.551615s } 2025-05-29T15:22:28.310577Z node 5 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 5 Deadline: 18446744073709.551615s } 2025-05-29T15:22:28.310598Z node 6 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 6 Deadline: 18446744073709.551615s } 2025-05-29T15:22:28.310615Z node 7 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 7 Deadline: 18446744073709.551615s } 2025-05-29T15:22:28.316594Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:28.316698Z node 3 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:28.316737Z node 4 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:28.316770Z node 5 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:28.316809Z node 6 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:28.316840Z node 7 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:28.316896Z node 8 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 8 Deadline: 18446744073709.551615s } 2025-05-29T15:22:28.316917Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-05-29T15:22:28.317136Z node 3 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:28.317155Z node 4 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:28.317168Z node 5 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:28.317179Z node 6 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:28.317194Z node 7 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:28.317208Z node 8 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:28.317236Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:28.320923Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:28.320974Z node 8 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:28.321010Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:28.321906Z node 5 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:28.321939Z node 6 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:28.321969Z node 7 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:28.322042Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:28.322058Z node 3 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:28.322075Z node 4 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:28.322102Z node 6 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-05-29T15:22:28.322143Z node 7 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-05-29T15:22:28.322160Z node 8 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:28.322176Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:28.322303Z node 3 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-05-29T15:22:28.322335Z node 4 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-05-29T15:22:28.322352Z node 5 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-05-29T15:22:28.322419Z node 8 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-05-29T15:22:28.322491Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-05-29T15:22:28.322527Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 7 Deadline: 18446744073709.551615s } 2025-05-29T15:22:28.323442Z node 4 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 8 Deadline: 18446744073709.551615s } 2025-05-29T15:22:28.323461Z node 5 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 6 Deadline: 18446744073709.551615s } 2025-05-29T15:22:28.323470Z node 6 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 8 Deadline: 18446744073709.551615s } 2025-05-29T15:22:28.323480Z node 7 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 2 Deadline: 18446744073709.551615s } 2025-05-29T15:22:28.323492Z node 8 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 5 Deadline: 18446744073709.551615s } 2025-05-29T15:22:28.327824Z node 6 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 5 Deadline: 18446744073709.551615s } 2025-05-29T15:22:28.327844Z node 8 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 4 Deadline: 18446744073709.551615s } 2025-05-29T15:22:28.327939Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 7 Deadline: 18446744073709.551615s } 2025-05-29T15:22:28.327953Z node 5 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 8 Deadline: 18446744073709.551615s } 2025-05-29T15:22:28.327965Z node 8 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 6 Deadline: 18446744073709.551615s } 2025-05-29T15:22:28.328807Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 4 Deadline: 18446744073709.551615s } 2025-05-29T15:22:28.328981Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 5 Deadline: 18446744073709.551615s } 2025-05-29T15:22:28.329118Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 6 Deadline: 18446744073709.551615s } 2025-05-29T15:22:28.329192Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 2 Deadline: 18446744073709.551615s } 2025-05-29T15:22:28.329242Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 3 Deadline: 18446744073709.551615s } 2025-05-29T15:22:28.329424Z node 8 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 7 Deadline: 18446744073709.551615s } 2025-05-29T15:22:28.329466Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 8 Deadline: 18446744073709.551615s } 2025-05-29T15:22:28.329629Z node 5 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 7 Deadline: 18446744073709.551615s } 2025-05-29T15:22:28.329720Z node 7 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 8 Deadline: 18446744073709.551615s } 2025-05-29T15:22:28.329857Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 3 Deadline: 18446744073709.551615s } 2025-05-29T15:22:28.330072Z node 7 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 5 Deadline: 18446744073709.551615s } 2025-05-29T15:22:28.330135Z node 3 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 2 Deadline: 18446744073709.551615s } 2025-05-29T15:22:28.330420Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 4 Deadline: 18446744073709.551615s } 2025-05-29T15:22:28.330570Z node 4 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 2 Deadline: 18446744073709.551615s } 2025-05-29T15:22:28.331090Z node 7 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 3 Deadline: 18446744073709.551615s } 2025-05-29T15:22:28.331265Z node 3 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 7 Deadline: 18446744073709.551615s } 2025-05-29T15:22:28.352861Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7309: Cannot subscribe to console configs 2025-05-29T15:22:28.352879Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded ... waiting for nameservers are connected 2025-05-29T15:22:28.356951Z node 1 :NODE_BROKER DEBUG: node_broker_impl.h:236: StateInit event type: 10060000 event: NKikimr::TEvTablet::TEvBoot 2025-05-29T15:22:28.357477Z node 1 :NODE_BROKER DEBUG: node_broker_impl.h:236: StateInit event type: 10060001 event: NKikimr::TEvTablet::TEvRestored 2025-05-29T15:22:28.357560Z node 1 :NODE_BROKER DEBUG: node_broker__init_scheme.cpp:20: TTxInitScheme Execute 2025-05-29T15:22:28.357811Z node 1 :NODE_BROKER DEBUG: node_broker_impl.h:236: StateInit event type: 1006000c event: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-05-29T15:22:28.358843Z node 1 :NODE_BROKER DEBUG: node_broker__init_scheme.cpp:29: TTxInitScheme Complete 2025-05-29T15:22:28.358883Z node 1 :NODE_BROKER DEBUG: node_broker__load_state.cpp:19: TTxLoadState Execute 2025-05-29T15:22:28.358948Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:961: [DB] Using default config. 2025-05-29T15:22:28.358965Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:998: [DB] Starting the first epoch: #1.1 1970-01-01T00:00:00.025000Z - 1970-01-01T01:00:00.025000Z - 1970-01-01T02:00:00.025000Z 2025-05-29T15:22:28.358970Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:1024: [DB] Loaded the first approximate epoch start: #1.1 2025-05-29T15:22:28.358987Z node 1 :NODE_BROKER DEBUG: node_broker__load_state.cpp:27: TTxLoadState Compl ... ate nodes log 2025-05-29T15:22:31.097942Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:659: Add node #2369.v755 to update nodes log 2025-05-29T15:22:31.097949Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:659: Add node #2368.v755 to update nodes log 2025-05-29T15:22:31.097956Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:659: Add node #2367.v755 to update nodes log 2025-05-29T15:22:31.097963Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:659: Add node #2366.v755 to update nodes log 2025-05-29T15:22:31.097970Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:659: Add node #2365.v755 to update nodes log 2025-05-29T15:22:31.097978Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:659: Add node #2364.v755 to update nodes log 2025-05-29T15:22:31.097985Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:659: Add node #2363.v755 to update nodes log 2025-05-29T15:22:31.097992Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:659: Add node #2362.v755 to update nodes log 2025-05-29T15:22:31.097999Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:659: Add node #2361.v755 to update nodes log 2025-05-29T15:22:31.098006Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:659: Add node #2360.v755 to update nodes log 2025-05-29T15:22:31.098012Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:659: Add node #2359.v755 to update nodes log 2025-05-29T15:22:31.098019Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:659: Add node #2358.v755 to update nodes log 2025-05-29T15:22:31.098028Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:659: Add node #2357.v755 to update nodes log 2025-05-29T15:22:31.098036Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:659: Add node #2356.v755 to update nodes log 2025-05-29T15:22:31.098043Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:659: Add node #2355.v755 to update nodes log 2025-05-29T15:22:31.098050Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:659: Add node #2354.v755 to update nodes log 2025-05-29T15:22:31.098072Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:659: Add node #2353.v755 to update nodes log 2025-05-29T15:22:31.098079Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:659: Add node #2352.v755 to update nodes log 2025-05-29T15:22:31.098087Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:659: Add node #2351.v755 to update nodes log 2025-05-29T15:22:31.098095Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:659: Add node #2350.v755 to update nodes log 2025-05-29T15:22:31.098103Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:659: Add node #2349.v755 to update nodes log 2025-05-29T15:22:31.098110Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:659: Add node #2348.v755 to update nodes log 2025-05-29T15:22:31.098119Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:659: Add node #2347.v755 to update nodes log 2025-05-29T15:22:31.098127Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:659: Add node #2346.v755 to update nodes log 2025-05-29T15:22:31.098134Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:659: Add node #2345.v755 to update nodes log 2025-05-29T15:22:31.098142Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:659: Add node #2344.v755 to update nodes log 2025-05-29T15:22:31.098148Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:659: Add node #2343.v755 to update nodes log 2025-05-29T15:22:31.098156Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:659: Add node #2342.v755 to update nodes log 2025-05-29T15:22:31.098163Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:659: Add node #2341.v755 to update nodes log 2025-05-29T15:22:31.098172Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:659: Add node #2340.v755 to update nodes log 2025-05-29T15:22:31.098179Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:659: Add node #2339.v755 to update nodes log 2025-05-29T15:22:31.098186Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:659: Add node #2338.v755 to update nodes log 2025-05-29T15:22:31.098194Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:659: Add node #2337.v755 to update nodes log 2025-05-29T15:22:31.098201Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:659: Add node #2336.v755 to update nodes log 2025-05-29T15:22:31.098209Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:659: Add node #2335.v755 to update nodes log 2025-05-29T15:22:31.098216Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:659: Add node #2334.v755 to update nodes log 2025-05-29T15:22:31.098224Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:659: Add node #2333.v755 to update nodes log 2025-05-29T15:22:31.098231Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:659: Add node #2332.v755 to update nodes log 2025-05-29T15:22:31.098238Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:659: Add node #2331.v755 to update nodes log 2025-05-29T15:22:31.098246Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:659: Add node #2330.v755 to update nodes log 2025-05-29T15:22:31.098253Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:659: Add node #2329.v755 to update nodes log 2025-05-29T15:22:31.098261Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:659: Add node #2328.v755 to update nodes log 2025-05-29T15:22:31.098269Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:659: Add node #2327.v755 to update nodes log 2025-05-29T15:22:31.098277Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:659: Add node #2326.v755 to update nodes log 2025-05-29T15:22:31.098284Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:659: Add node #2325.v755 to update nodes log 2025-05-29T15:22:31.098292Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:659: Add node #2324.v755 to update nodes log 2025-05-29T15:22:31.098299Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:659: Add node #2323.v755 to update nodes log 2025-05-29T15:22:31.098308Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:659: Add node #2322.v755 to update nodes log 2025-05-29T15:22:31.098315Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:659: Add node #2321.v755 to update nodes log 2025-05-29T15:22:31.098323Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:659: Add node #2320.v755 to update nodes log 2025-05-29T15:22:31.098329Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:659: Add node #2319.v755 to update nodes log 2025-05-29T15:22:31.098337Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:659: Add node #2318.v755 to update nodes log 2025-05-29T15:22:31.098344Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:659: Add node #2317.v755 to update nodes log 2025-05-29T15:22:31.098351Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:659: Add node #2316.v755 to update nodes log 2025-05-29T15:22:31.098359Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:659: Add node #2315.v755 to update nodes log 2025-05-29T15:22:31.098366Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:659: Add node #2314.v755 to update nodes log 2025-05-29T15:22:31.098374Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:659: Add node #2313.v755 to update nodes log 2025-05-29T15:22:31.098382Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:659: Add node #2312.v755 to update nodes log 2025-05-29T15:22:31.098389Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:659: Add node #2311.v755 to update nodes log 2025-05-29T15:22:31.098397Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:659: Add node #2310.v755 to update nodes log 2025-05-29T15:22:31.098406Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:659: Add node #2043.v755 to update nodes log 2025-05-29T15:22:31.098414Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:659: Add node #2044.v755 to update nodes log 2025-05-29T15:22:31.098422Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:659: Add node #2045.v755 to update nodes log 2025-05-29T15:22:31.098430Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:659: Add node #2046.v755 to update nodes log 2025-05-29T15:22:31.098438Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:659: Add node #2047.v755 to update nodes log 2025-05-29T15:22:31.098446Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:659: Add node #2048.v755 to update nodes log 2025-05-29T15:22:31.098455Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:659: Add node #2049.v755 to update nodes log 2025-05-29T15:22:31.098464Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:659: Add node #2050.v755 to update nodes log 2025-05-29T15:22:31.098471Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:659: Add node #2051.v755 to update nodes log 2025-05-29T15:22:31.098479Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:659: Add node #2052.v755 to update nodes log 2025-05-29T15:22:31.098487Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:659: Add node #2053.v755 to update nodes log 2025-05-29T15:22:31.098496Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:659: Add node #2054.v755 to update nodes log 2025-05-29T15:22:31.098502Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:659: Add node #2055.v755 to update nodes log 2025-05-29T15:22:31.098509Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:659: Add node #2056.v755 to update nodes log 2025-05-29T15:22:31.098518Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:659: Add node #2309.v755 to update nodes log 2025-05-29T15:22:31.098526Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:659: Add node #2308.v755 to update nodes log 2025-05-29T15:22:31.098534Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:659: Add node #1792.v755 to update nodes log 2025-05-29T15:22:31.099381Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 269877761, Sender [1:3915:4671], Recipient [1:3851:4614]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-05-29T15:22:31.099439Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 272039936, Sender [1:630:2214], Recipient [1:3851:4614]: NKikimr::NNodeBroker::TEvNodeBroker::TEvListNodes { } 2025-05-29T15:22:31.099447Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:246: StateWork, processing event TEvNodeBroker::TEvListNodes 2025-05-29T15:22:31.099468Z node 1 :NODE_BROKER TRACE: node_broker.cpp:423: Send TEvNodesInfo for epoch #3.755 1970-01-01T02:00:00.025000Z - 1970-01-01T03:00:00.025000Z - 1970-01-01T04:00:00.025000Z 2025-05-29T15:22:31.104413Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 269877761, Sender [1:3917:4673], Recipient [1:3851:4614]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-05-29T15:22:31.104470Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 272039936, Sender [1:630:2214], Recipient [1:3851:4614]: NKikimr::NNodeBroker::TEvNodeBroker::TEvListNodes { } 2025-05-29T15:22:31.104478Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:246: StateWork, processing event TEvNodeBroker::TEvListNodes 2025-05-29T15:22:31.104493Z node 1 :NODE_BROKER TRACE: node_broker.cpp:423: Send TEvNodesInfo for epoch #3.755 1970-01-01T02:00:00.025000Z - 1970-01-01T03:00:00.025000Z - 1970-01-01T04:00:00.025000Z 2025-05-29T15:22:31.118483Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 269877761, Sender [1:3919:4675], Recipient [1:3851:4614]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-05-29T15:22:31.118538Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 272039936, Sender [1:630:2214], Recipient [1:3851:4614]: NKikimr::NNodeBroker::TEvNodeBroker::TEvListNodes { } 2025-05-29T15:22:31.118546Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:246: StateWork, processing event TEvNodeBroker::TEvListNodes 2025-05-29T15:22:31.118560Z node 1 :NODE_BROKER TRACE: node_broker.cpp:423: Send TEvNodesInfo for epoch #3.755 1970-01-01T02:00:00.025000Z - 1970-01-01T03:00:00.025000Z - 1970-01-01T04:00:00.025000Z 2025-05-29T15:22:31.122047Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 269877761, Sender [1:3921:4677], Recipient [1:3851:4614]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-05-29T15:22:31.122121Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 272039936, Sender [1:630:2214], Recipient [1:3851:4614]: NKikimr::NNodeBroker::TEvNodeBroker::TEvListNodes { CachedVersion: 753 } 2025-05-29T15:22:31.122129Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:246: StateWork, processing event TEvNodeBroker::TEvListNodes 2025-05-29T15:22:31.122144Z node 1 :NODE_BROKER TRACE: node_broker.cpp:423: Send TEvNodesInfo for epoch #3.755 1970-01-01T02:00:00.025000Z - 1970-01-01T03:00:00.025000Z - 1970-01-01T04:00:00.025000Z ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/mind/ut/unittest >> TNodeBrokerTest::NodesAlreadyMigrated [GOOD] Test command err: 2025-05-29T15:22:29.559692Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 2 Deadline: 18446744073709.551615s } 2025-05-29T15:22:29.559760Z node 3 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 3 Deadline: 18446744073709.551615s } 2025-05-29T15:22:29.559792Z node 4 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 4 Deadline: 18446744073709.551615s } 2025-05-29T15:22:29.559836Z node 5 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 5 Deadline: 18446744073709.551615s } 2025-05-29T15:22:29.559870Z node 6 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 6 Deadline: 18446744073709.551615s } 2025-05-29T15:22:29.559900Z node 7 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 7 Deadline: 18446744073709.551615s } 2025-05-29T15:22:29.568166Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:29.568320Z node 3 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:29.568377Z node 4 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:29.568426Z node 5 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:29.568483Z node 6 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:29.568530Z node 7 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:29.568625Z node 8 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 8 Deadline: 18446744073709.551615s } 2025-05-29T15:22:29.568662Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-05-29T15:22:29.568979Z node 3 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:29.569008Z node 4 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:29.569029Z node 5 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:29.569046Z node 6 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:29.569067Z node 7 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:29.569086Z node 8 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:29.569123Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:29.574089Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:29.574163Z node 8 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:29.574199Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:29.575203Z node 5 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:29.575260Z node 6 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:29.575301Z node 7 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:29.575420Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:29.575443Z node 3 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:29.575470Z node 4 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:29.575497Z node 5 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-05-29T15:22:29.575557Z node 6 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-05-29T15:22:29.575584Z node 7 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-05-29T15:22:29.575609Z node 8 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:29.575634Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:29.575844Z node 3 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-05-29T15:22:29.575877Z node 4 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-05-29T15:22:29.575983Z node 8 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-05-29T15:22:29.576092Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-05-29T15:22:29.576148Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 7 Deadline: 18446744073709.551615s } 2025-05-29T15:22:29.576789Z node 3 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 4 Deadline: 18446744073709.551615s } 2025-05-29T15:22:29.576803Z node 4 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 3 Deadline: 18446744073709.551615s } 2025-05-29T15:22:29.576813Z node 6 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 7 Deadline: 18446744073709.551615s } 2025-05-29T15:22:29.576824Z node 7 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 5 Deadline: 18446744073709.551615s } 2025-05-29T15:22:29.576833Z node 8 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 3 Deadline: 18446744073709.551615s } 2025-05-29T15:22:29.581363Z node 7 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 6 Deadline: 18446744073709.551615s } 2025-05-29T15:22:29.581587Z node 5 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 7 Deadline: 18446744073709.551615s } 2025-05-29T15:22:29.582976Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 6 Deadline: 18446744073709.551615s } 2025-05-29T15:22:29.583038Z node 3 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 8 Deadline: 18446744073709.551615s } 2025-05-29T15:22:29.583610Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 2 Deadline: 18446744073709.551615s } 2025-05-29T15:22:29.583720Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 3 Deadline: 18446744073709.551615s } 2025-05-29T15:22:29.583809Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 4 Deadline: 18446744073709.551615s } 2025-05-29T15:22:29.583881Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 5 Deadline: 18446744073709.551615s } 2025-05-29T15:22:29.583926Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 8 Deadline: 18446744073709.551615s } 2025-05-29T15:22:29.584658Z node 5 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 8 Deadline: 18446744073709.551615s } 2025-05-29T15:22:29.584891Z node 7 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 8 Deadline: 18446744073709.551615s } 2025-05-29T15:22:29.585265Z node 8 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 5 Deadline: 18446744073709.551615s } 2025-05-29T15:22:29.585398Z node 8 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 7 Deadline: 18446744073709.551615s } 2025-05-29T15:22:29.585714Z node 5 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 2 Deadline: 18446744073709.551615s } 2025-05-29T15:22:29.586145Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 5 Deadline: 18446744073709.551615s } 2025-05-29T15:22:29.613616Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7309: Cannot subscribe to console configs 2025-05-29T15:22:29.613635Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded ... waiting for nameservers are connected 2025-05-29T15:22:29.617671Z node 1 :NODE_BROKER DEBUG: node_broker_impl.h:236: StateInit event type: 10060000 event: NKikimr::TEvTablet::TEvBoot 2025-05-29T15:22:29.618022Z node 1 :NODE_BROKER DEBUG: node_broker_impl.h:236: StateInit event type: 10060001 event: NKikimr::TEvTablet::TEvRestored 2025-05-29T15:22:29.618072Z node 1 :NODE_BROKER DEBUG: node_broker__init_scheme.cpp:20: TTxInitScheme Execute 2025-05-29T15:22:29.618236Z node 1 :NODE_BROKER DEBUG: node_broker_impl.h:236: StateInit event type: 1006000c event: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-05-29T15:22:29.618907Z node 1 :NODE_BROKER DEBUG: node_broker__init_scheme.cpp:29: TTxInitScheme Complete 2025-05-29T15:22:29.618928Z node 1 :NODE_BROKER DEBUG: node_broker__load_state.cpp:19: TTxLoadState Execute 2025-05-29T15:22:29.618975Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:961: [DB] Using default config. 2025-05-29T15:22:29.618986Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:998: [DB] Starting the first epoch: #1.1 1970-01-01T00:00:00.025000Z - 1970-01-01T01:00:00.025000Z - 1970-01-01T02:00:00.025000Z 2025-05-29T15:22:29.618990Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:1024: [DB] Loaded the first approximate epoch start: #1.1 2025-05-29T15:22:29.619000Z node 1 :NODE_BROKER DEBUG: node_broker__load_state.cpp:27: TTxLoadState Complete 2025-05-29T15:22:29.619014Z node 1 :NODE_BROKER DEBUG: node_broker__migrate_state.cpp:84: TTxMigrateState Execute 2025-05-29T15:22:29.619018Z node 1 :NODE_BROKER DEBUG: node_broker__migrate_state.cpp:52: TTxMigrateState ProcessMigrationBatch UpdateNodes left 0, NewVersionUpdateNodes left 0 2025-05-29T15:22:29.619021Z node 1 :NODE_BROKER DEBUG: node_broker__migrate_state.cpp:21: TTxMigrateState FinalizeMigration 2025-05-29T15:22:29.619025Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:1311: [DB] Update epoch in database: #1.1 1970-01-01T00:00:00.025000Z - 1970-01-01T01:00:00.025000Z - 1970-01-01T02:00:00.025000Z 2025-05-29T15:22:29.619037Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:1330: [DB] Update approx epoch start in database: #1.1 2025-05-29T15:22:29.619040Z node 1 :NODE_BROKER NOTICE: node_broker.cpp:1343: [DB] Update main nodes table to: Nodes 2025-05-29T15:22:29.650764Z node 1 :NODE_BROKER DEBUG: node_broker__migrate_state.cpp:95: TTxMigrateState Complete 2025-05-29T15:22:29.650805Z node 1 :NODE_BROKER TRACE: node_broke ... ient [1:980:2427]: NKikimr::NNodeBroker::TEvNodeBroker::TEvResolveNode { NodeId: 1024 } 2025-05-29T15:22:31.484107Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:247: StateWork, processing event TEvNodeBroker::TEvResolveNode 2025-05-29T15:22:31.484123Z node 1 :NODE_BROKER TRACE: node_broker.cpp:1478: Send TEvResolvedNode: NKikimr::NNodeBroker::TEvNodeBroker::TEvResolvedNode { Status { Code: OK } Node { NodeId: 1024 Host: "host2" Port: 1001 ResolveHost: "host2.yandex.net" Address: "1.2.3.5" Location { DataCenter: "1" Module: "2" Rack: "3" Unit: "5" } Expire: 18000025000 Name: "slot-0" } } 2025-05-29T15:22:31.484268Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 268829696, Sender [1:974:2426], Recipient [1:980:2427]: NKikimr::TEvTablet::TEvTabletDead 2025-05-29T15:22:31.484298Z node 1 :NODE_BROKER INFO: node_broker.cpp:126: OnTabletDead: 72057594037936129 2025-05-29T15:22:31.484303Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:212: TNodeBroker::Cleanup 2025-05-29T15:22:31.484621Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:649: Handle NKikimr::TEvTabletPipe::TEvClientDestroyed { TabletId: 72057594037936129 ClientId: [2:966:2072] ServerId: [1:992:2434] } 2025-05-29T15:22:31.484642Z node 3 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:649: Handle NKikimr::TEvTabletPipe::TEvClientDestroyed { TabletId: 72057594037936129 ClientId: [3:967:2072] ServerId: [1:994:2436] } 2025-05-29T15:22:31.484675Z node 4 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:649: Handle NKikimr::TEvTabletPipe::TEvClientDestroyed { TabletId: 72057594037936129 ClientId: [4:968:2072] ServerId: [1:995:2437] } 2025-05-29T15:22:31.484689Z node 5 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:649: Handle NKikimr::TEvTabletPipe::TEvClientDestroyed { TabletId: 72057594037936129 ClientId: [5:969:2072] ServerId: [1:997:2439] } 2025-05-29T15:22:31.484703Z node 8 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:649: Handle NKikimr::TEvTabletPipe::TEvClientDestroyed { TabletId: 72057594037936129 ClientId: [8:965:2072] ServerId: [1:991:2433] } 2025-05-29T15:22:31.487607Z node 1 :NODE_BROKER DEBUG: node_broker_impl.h:236: StateInit event type: 10060000 event: NKikimr::TEvTablet::TEvBoot 2025-05-29T15:22:31.488584Z node 1 :NODE_BROKER DEBUG: node_broker_impl.h:236: StateInit event type: 10060001 event: NKikimr::TEvTablet::TEvRestored 2025-05-29T15:22:31.488664Z node 1 :NODE_BROKER DEBUG: node_broker_impl.h:236: StateInit event type: 1006000c event: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-05-29T15:22:31.489098Z node 1 :NODE_BROKER DEBUG: node_broker__init_scheme.cpp:20: TTxInitScheme Execute 2025-05-29T15:22:31.489161Z node 1 :NODE_BROKER DEBUG: node_broker__init_scheme.cpp:29: TTxInitScheme Complete ... blocking NKikimr::TEvTablet::TEvCommit from FLAT_EXECUTOR to TABLET_ACTOR cookie 2 2025-05-29T15:22:31.489272Z node 1 :NODE_BROKER DEBUG: node_broker__load_state.cpp:19: TTxLoadState Execute 2025-05-29T15:22:31.489333Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:961: [DB] Using default config. 2025-05-29T15:22:31.489346Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:988: [DB] Loaded current epoch: #4.7 1970-01-01T03:00:00.025000Z - 1970-01-01T04:00:00.025000Z - 1970-01-01T05:00:00.025000Z 2025-05-29T15:22:31.489353Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:1017: [DB] Loaded approximate epoch start: #4.6 2025-05-29T15:22:31.489358Z node 1 :NODE_BROKER NOTICE: node_broker.cpp:1034: [DB] Loaded main nodes table: Nodes 2025-05-29T15:22:31.489387Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:281: [Dirty] Added node #1024.v0 host2:1001 2025-05-29T15:22:31.489435Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:1113: [DB] Loaded node #1024.v0 { NodeId: 1024, State: Active, Version: 0, Host: host2, Port: 1001, ResolveHost: host2.yandex.net, Address: 1.2.3.5, Lease: 1, Expire: Thu, 01 Jan 1970 05:00:00 UTC, Location: DC=1/M=2/R=3/U=5/, AuthorizedByCertificate: 0, SlotIndex: 0, ServicedSubDomain: 72057594046678944:1 } 2025-05-29T15:22:31.489457Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:1190: [DB] Loaded nodeV2 #1024.v7 { NodeId: 1024, State: Active, Version: 7, Host: host2, Port: 1001, ResolveHost: host2.yandex.net, Address: 1.2.3.5, Lease: 1, Expire: Thu, 01 Jan 1970 05:00:00 UTC, Location: DC=1/M=2/R=3/U=5/, AuthorizedByCertificate: 0, SlotIndex: 0, ServicedSubDomain: 72057594046678944:1 } 2025-05-29T15:22:31.489463Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:1239: [DB] Node #1024.v7 is already migrated 2025-05-29T15:22:31.489480Z node 1 :NODE_BROKER DEBUG: node_broker__load_state.cpp:27: TTxLoadState Complete 2025-05-29T15:22:31.489510Z node 1 :NODE_BROKER DEBUG: node_broker__migrate_state.cpp:84: TTxMigrateState Execute 2025-05-29T15:22:31.489517Z node 1 :NODE_BROKER DEBUG: node_broker__migrate_state.cpp:52: TTxMigrateState ProcessMigrationBatch UpdateNodes left 0, NewVersionUpdateNodes left 0 2025-05-29T15:22:31.489522Z node 1 :NODE_BROKER DEBUG: node_broker__migrate_state.cpp:21: TTxMigrateState FinalizeMigration 2025-05-29T15:22:31.489527Z node 1 :NODE_BROKER DEBUG: node_broker__migrate_state.cpp:95: TTxMigrateState Complete 2025-05-29T15:22:31.489571Z node 1 :NODE_BROKER TRACE: node_broker.cpp:456: Scheduled epoch update at 1970-01-01T04:00:00.025000Z 2025-05-29T15:22:31.489580Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:562: Preparing nodes list cache for epoch #4.7 1970-01-01T03:00:00.025000Z - 1970-01-01T04:00:00.025000Z - 1970-01-01T05:00:00.025000Z, approximate epoch start #4.6 nodes=1 expired=0 2025-05-29T15:22:31.489603Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:603: Preparing update nodes log for epoch ##4.7 1970-01-01T03:00:00.025000Z - 1970-01-01T04:00:00.025000Z - 1970-01-01T05:00:00.025000Z nodes=1 expired=0 removed=0 2025-05-29T15:22:31.489609Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:659: Add node #1024.v7 to update nodes log 2025-05-29T15:22:31.489693Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 269877761, Sender [1:1075:2488], Recipient [1:1069:2484]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-05-29T15:22:31.489759Z node 8 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:657: Handle NKikimr::TEvTabletPipe::TEvClientConnected { TabletId: 72057594037936129 Status: OK ServerId: [1:1075:2488] Leader: 1 Dead: 0 Generation: 7 VersionInfo:  } 2025-05-29T15:22:31.489837Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 272039936, Sender [8:221:2072], Recipient [1:1075:2488] 2025-05-29T15:22:31.489849Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:246: StateWork, processing event TEvNodeBroker::TEvListNodes 2025-05-29T15:22:31.489854Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:375: Delaying list nodes request for epoch #5 ... unblocking NKikimr::TEvTablet::TEvCommit from FLAT_EXECUTOR to TABLET_ACTOR 2025-05-29T15:22:31.490461Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 269877761, Sender [1:1102:2510], Recipient [1:1069:2484]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-05-29T15:22:31.490539Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 272039936, Sender [1:624:2214], Recipient [1:1069:2484]: NKikimr::NNodeBroker::TEvNodeBroker::TEvListNodes { } 2025-05-29T15:22:31.490545Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:246: StateWork, processing event TEvNodeBroker::TEvListNodes 2025-05-29T15:22:31.490555Z node 1 :NODE_BROKER TRACE: node_broker.cpp:423: Send TEvNodesInfo for epoch #4.7 1970-01-01T03:00:00.025000Z - 1970-01-01T04:00:00.025000Z - 1970-01-01T05:00:00.025000Z 2025-05-29T15:22:31.491301Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 269877761, Sender [1:1106:2514], Recipient [1:1069:2484]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-05-29T15:22:31.491334Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 272039936, Sender [1:624:2214], Recipient [1:1069:2484]: NKikimr::NNodeBroker::TEvNodeBroker::TEvListNodes { } 2025-05-29T15:22:31.491340Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:246: StateWork, processing event TEvNodeBroker::TEvListNodes 2025-05-29T15:22:31.491349Z node 1 :NODE_BROKER TRACE: node_broker.cpp:423: Send TEvNodesInfo for epoch #4.7 1970-01-01T03:00:00.025000Z - 1970-01-01T04:00:00.025000Z - 1970-01-01T05:00:00.025000Z 2025-05-29T15:22:31.491422Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 269877761, Sender [1:1108:2516], Recipient [1:1069:2484]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-05-29T15:22:31.491439Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 272039936, Sender [1:624:2214], Recipient [1:1069:2484]: NKikimr::NNodeBroker::TEvNodeBroker::TEvListNodes { } 2025-05-29T15:22:31.491443Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:246: StateWork, processing event TEvNodeBroker::TEvListNodes 2025-05-29T15:22:31.491448Z node 1 :NODE_BROKER TRACE: node_broker.cpp:423: Send TEvNodesInfo for epoch #4.7 1970-01-01T03:00:00.025000Z - 1970-01-01T04:00:00.025000Z - 1970-01-01T05:00:00.025000Z 2025-05-29T15:22:31.491503Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 269877761, Sender [1:1110:2518], Recipient [1:1069:2484]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-05-29T15:22:31.491524Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 272039936, Sender [1:624:2214], Recipient [1:1069:2484]: NKikimr::NNodeBroker::TEvNodeBroker::TEvListNodes { CachedVersion: 7 } 2025-05-29T15:22:31.491529Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:246: StateWork, processing event TEvNodeBroker::TEvListNodes 2025-05-29T15:22:31.491534Z node 1 :NODE_BROKER TRACE: node_broker.cpp:423: Send TEvNodesInfo for epoch #4.7 1970-01-01T03:00:00.025000Z - 1970-01-01T04:00:00.025000Z - 1970-01-01T05:00:00.025000Z 2025-05-29T15:22:31.491586Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 269877761, Sender [1:1112:2520], Recipient [1:1069:2484]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-05-29T15:22:31.491605Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 272039950, Sender [1:624:2214], Recipient [1:1069:2484]: NKikimr::NNodeBroker::TEvNodeBroker::TEvSubscribeNodesRequest { CachedVersion: 7 SeqNo: 6 } 2025-05-29T15:22:31.491611Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:254: StateWork, processing event TEvNodeBroker::TEvSubscribeNodesRequest 2025-05-29T15:22:31.491620Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:747: New subscriber [1:624:2214], seqNo: 6, version: 7, server pipe id: [1:1112:2520] 2025-05-29T15:22:31.491628Z node 1 :NODE_BROKER TRACE: node_broker.cpp:730: Send TEvUpdateNodes v7 -> v7 to [1:624:2214] 2025-05-29T15:22:31.491688Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 269877764, Sender [1:1112:2520], Recipient [1:1069:2484]: NKikimr::TEvTabletPipe::TEvServerDisconnected 2025-05-29T15:22:31.491694Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:768: Unsubscribed [1:624:2214], seqNo: 6, server pipe id: [1:1112:2520] 2025-05-29T15:22:31.491725Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 269877761, Sender [1:1114:2522], Recipient [1:1069:2484]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-05-29T15:22:31.491739Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 272039937, Sender [1:624:2214], Recipient [1:1069:2484]: NKikimr::NNodeBroker::TEvNodeBroker::TEvResolveNode { NodeId: 1024 } 2025-05-29T15:22:31.491743Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:247: StateWork, processing event TEvNodeBroker::TEvResolveNode 2025-05-29T15:22:31.491773Z node 1 :NODE_BROKER TRACE: node_broker.cpp:1478: Send TEvResolvedNode: NKikimr::NNodeBroker::TEvNodeBroker::TEvResolvedNode { Status { Code: OK } Node { NodeId: 1024 Host: "host2" Port: 1001 ResolveHost: "host2.yandex.net" Address: "1.2.3.5" Location { DataCenter: "1" Module: "2" Rack: "3" Unit: "5" } Expire: 18000025000 Name: "slot-0" } } >> TSchemeShardSysViewTestReboots::DropSysViewWithReboots [GOOD] >> TPartitionTests::EndWriteTimestamp_DataKeysBody |60.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_rtmr_reboots/unittest >> TPQTabletTests::Limit_On_The_Number_Of_Transactons [GOOD] |60.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_rtmr_reboots/unittest |60.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_rtmr_reboots/unittest ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_sysview_reboots/unittest >> TSchemeShardSysViewTestReboots::DropSysViewWithReboots [GOOD] Test command err: ==== RunWithTabletReboots =========== RUN: Trace =========== Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2140] Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:116:2058] recipient: [1:110:2141] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:116:2058] recipient: [1:110:2141] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:117:2058] recipient: [1:111:2142] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:117:2058] recipient: [1:111:2142] Leader for TabletID 72057594046678944 is [1:126:2151] sender: [1:127:2058] recipient: [1:109:2140] Leader for TabletID 72057594046447617 is [1:130:2154] sender: [1:132:2058] recipient: [1:110:2141] Leader for TabletID 72057594046316545 is [1:133:2155] sender: [1:135:2058] recipient: [1:111:2142] 2025-05-29T15:22:25.155178Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7488: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-05-29T15:22:25.155202Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7516: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:22:25.155208Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7402: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2025-05-29T15:22:25.155213Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7418: OperationsProcessing config: using default configuration 2025-05-29T15:22:25.155219Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-05-29T15:22:25.155224Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-05-29T15:22:25.155232Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7548: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:22:25.155247Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:39: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-05-29T15:22:25.155383Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7619: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-05-29T15:22:25.155457Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-05-29T15:22:25.169970Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7651: Got new config: QueryServiceConfig { AllExternalDataSourcesAreAvailable: true } 2025-05-29T15:22:25.169996Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:22:25.170099Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7619: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# Leader for TabletID 72057594046447617 is [1:130:2154] sender: [1:175:2058] recipient: [1:15:2062] 2025-05-29T15:22:25.172909Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-05-29T15:22:25.172940Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-05-29T15:22:25.172973Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-05-29T15:22:25.175594Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-05-29T15:22:25.175672Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:445: Clear TempDirsState with owners number: 0 2025-05-29T15:22:25.175778Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1348: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-05-29T15:22:25.175940Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:32: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-05-29T15:22:25.176499Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:157: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:22:25.176540Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:84: [RootDataErasureManager] Stop 2025-05-29T15:22:25.176793Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:22:25.176806Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:22:25.176844Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-05-29T15:22:25.176852Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-29T15:22:25.176858Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:202: TTxServerlessStorageBilling.Complete 2025-05-29T15:22:25.176878Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6671: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:214:2058] recipient: [1:212:2212] IGNORE Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:214:2058] recipient: [1:212:2212] Leader for TabletID 72057594037968897 is [1:218:2216] sender: [1:219:2058] recipient: [1:212:2212] 2025-05-29T15:22:25.178124Z node 1 :HIVE INFO: tablet_helpers.cpp:1130: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:126:2151] sender: [1:239:2058] recipient: [1:15:2062] 2025-05-29T15:22:25.195309Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:372: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-05-29T15:22:25.195381Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:22:25.195441Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-05-29T15:22:25.195478Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:130: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-05-29T15:22:25.195486Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:22:25.196296Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:451: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-05-29T15:22:25.196323Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-05-29T15:22:25.196386Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:22:25.196401Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:313: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-05-29T15:22:25.196405Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:367: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-05-29T15:22:25.196409Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 2 -> 3 2025-05-29T15:22:25.196760Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:22:25.196769Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-05-29T15:22:25.196773Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 3 -> 128 2025-05-29T15:22:25.197054Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:22:25.197060Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:22:25.197065Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:22:25.197070Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1650: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-05-29T15:22:25.197498Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1719: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-05-29T15:22:25.197780Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:652: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-05-29T15:22:25.197812Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1751: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 Leader for TabletID 72057594046316545 is [1:133:2155] sender: [1:254:2058] recipient: [1:15:2062] FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-05-29T15:22:25.197962Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:676: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-05-29T15:22:25.197978Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:680: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 133 RawX2: 4294969451 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-05-29T15:22:25.197982Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:22:25.198027Z node 1 :FLAT_TX_SCHEMESHARD INFO: sch ... 3.167915Z node 32 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:22:33.167923Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1004, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-29T15:22:33.167950Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1004, path id: [OwnerId: 72057594046678944, LocalPathId: 3] 2025-05-29T15:22:33.167960Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1004, path id: [OwnerId: 72057594046678944, LocalPathId: 4] 2025-05-29T15:22:33.167984Z node 32 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:22:33.167989Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [32:206:2207], at schemeshard: 72057594046678944, txId: 1004, path id: 1 2025-05-29T15:22:33.167995Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [32:206:2207], at schemeshard: 72057594046678944, txId: 1004, path id: 3 2025-05-29T15:22:33.167999Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [32:206:2207], at schemeshard: 72057594046678944, txId: 1004, path id: 4 2025-05-29T15:22:33.168041Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1004:0, at schemeshard: 72057594046678944 2025-05-29T15:22:33.168046Z node 32 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:482: [72057594046678944] TDone opId# 1004:0 ProgressState 2025-05-29T15:22:33.168055Z node 32 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:906: Part operation is done id#1004:0 progress is 1/1 2025-05-29T15:22:33.168061Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 1004 ready parts: 1/1 2025-05-29T15:22:33.168064Z node 32 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:906: Part operation is done id#1004:0 progress is 1/1 2025-05-29T15:22:33.168066Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 1004 ready parts: 1/1 2025-05-29T15:22:33.168069Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1606: TOperation IsReadyToNotify, TxId: 1004, ready parts: 1/1, is published: false 2025-05-29T15:22:33.168073Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 1004 ready parts: 1/1 2025-05-29T15:22:33.168076Z node 32 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:973: Operation and all the parts is done, operation id: 1004:0 2025-05-29T15:22:33.168078Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5174: RemoveTx for txid 1004:0 2025-05-29T15:22:33.168087Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 2 2025-05-29T15:22:33.168090Z node 32 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:982: Publication still in progress, tx: 1004, publications: 3, subscribers: 0 2025-05-29T15:22:33.168093Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:989: Publication details: tx: 1004, [OwnerId: 72057594046678944, LocalPathId: 1], 9 2025-05-29T15:22:33.168095Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:989: Publication details: tx: 1004, [OwnerId: 72057594046678944, LocalPathId: 3], 5 2025-05-29T15:22:33.168098Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:989: Publication details: tx: 1004, [OwnerId: 72057594046678944, LocalPathId: 4], 18446744073709551615 2025-05-29T15:22:33.168221Z node 32 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:5834: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 9 PathOwnerId: 72057594046678944, cookie: 1004 2025-05-29T15:22:33.168231Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 9 PathOwnerId: 72057594046678944, cookie: 1004 2025-05-29T15:22:33.168234Z node 32 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 3, at schemeshard: 72057594046678944, txId: 1004 2025-05-29T15:22:33.168237Z node 32 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 1004, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 9 2025-05-29T15:22:33.168240Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2025-05-29T15:22:33.168428Z node 32 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:5834: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 5 PathOwnerId: 72057594046678944, cookie: 1004 2025-05-29T15:22:33.168436Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 5 PathOwnerId: 72057594046678944, cookie: 1004 2025-05-29T15:22:33.168439Z node 32 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 1004 2025-05-29T15:22:33.168444Z node 32 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 1004, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 5 2025-05-29T15:22:33.168447Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 2 2025-05-29T15:22:33.168546Z node 32 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:5834: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 1004 2025-05-29T15:22:33.168556Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 1004 2025-05-29T15:22:33.168559Z node 32 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1004 2025-05-29T15:22:33.168562Z node 32 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 1004, pathId: [OwnerId: 72057594046678944, LocalPathId: 4], version: 18446744073709551615 2025-05-29T15:22:33.168564Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 1 2025-05-29T15:22:33.168573Z node 32 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 1004, subscribers: 0 2025-05-29T15:22:33.168615Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:123: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-05-29T15:22:33.168620Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:137: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 4], at schemeshard: 72057594046678944 2025-05-29T15:22:33.168626Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 1 2025-05-29T15:22:33.168813Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1004 2025-05-29T15:22:33.169068Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1004 2025-05-29T15:22:33.169082Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1004 2025-05-29T15:22:33.169268Z node 32 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:160: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestModificationResult got TxId: 1004, wait until txId: 1004 TestWaitNotification wait txId: 1004 2025-05-29T15:22:33.169304Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:210: tests -- TTxNotificationSubscriber for txId 1004: send EvNotifyTxCompletion 2025-05-29T15:22:33.169309Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:256: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 1004 2025-05-29T15:22:33.169354Z node 32 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 1004, at schemeshard: 72057594046678944 2025-05-29T15:22:33.169367Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:227: tests -- TTxNotificationSubscriber for txId 1004: got EvNotifyTxCompletionResult 2025-05-29T15:22:33.169372Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:236: tests -- TTxNotificationSubscriber for txId 1004: satisfy waiter [32:382:2372] TestWaitNotification: OK eventTxId 1004 2025-05-29T15:22:33.169421Z node 32 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/.sys/new_sys_view" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-05-29T15:22:33.169440Z node 32 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/.sys/new_sys_view" took 27us result status StatusPathDoesNotExist 2025-05-29T15:22:33.169467Z node 32 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/.sys/new_sys_view\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot/.sys\' (id: [OwnerId: 72057594046678944, LocalPathId: 3]), source_location: ydb/core/tx/schemeshard/schemeshard_path_describer.cpp:1157" Path: "/MyRoot/.sys/new_sys_view" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot/.sys" LastExistedPrefixPathId: 3 LastExistedPrefixDescription { Self { Name: ".sys" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1002 CreateStep: 5000003 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: false } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 >> TPartitionTests::EndWriteTimestamp_DataKeysBody [GOOD] >> TPartitionTests::EndWriteTimestamp_FromMeta >> TNodeBrokerTest::NodesMigrationRemovedChanged [GOOD] >> TestDataErasure::SimpleDataErasureTestForTopic [GOOD] >> TPartitionTests::ConflictingCommitProccesAfterRollback [GOOD] >> TestDataErasure::DataErasureManualLaunch [GOOD] >> TestDataErasure::DataErasureManualLaunch3Cycles >> TPartitionTests::EndWriteTimestamp_FromMeta [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/mind/ut/unittest >> TNodeBrokerTest::NodesMigrationRemovedChanged [GOOD] Test command err: 2025-05-29T15:22:30.446761Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 2 Deadline: 18446744073709.551615s } 2025-05-29T15:22:30.446819Z node 3 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 3 Deadline: 18446744073709.551615s } 2025-05-29T15:22:30.446852Z node 4 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 4 Deadline: 18446744073709.551615s } 2025-05-29T15:22:30.446893Z node 5 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 5 Deadline: 18446744073709.551615s } 2025-05-29T15:22:30.446925Z node 6 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 6 Deadline: 18446744073709.551615s } 2025-05-29T15:22:30.446950Z node 7 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 7 Deadline: 18446744073709.551615s } 2025-05-29T15:22:30.454577Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:30.454716Z node 3 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:30.454793Z node 4 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:30.454839Z node 5 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:30.454889Z node 6 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:30.454930Z node 7 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:30.455018Z node 8 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 8 Deadline: 18446744073709.551615s } 2025-05-29T15:22:30.455052Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-05-29T15:22:30.455365Z node 3 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:30.455401Z node 4 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:30.455426Z node 5 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:30.455449Z node 6 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:30.455478Z node 7 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:30.455501Z node 8 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:30.455547Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:30.460104Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:30.460155Z node 8 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:30.460184Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:30.461310Z node 5 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:30.461351Z node 6 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:30.461383Z node 7 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:30.461484Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:30.461508Z node 3 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:30.461533Z node 4 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:30.461576Z node 8 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:30.461601Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:30.461807Z node 3 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-05-29T15:22:30.461838Z node 4 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-05-29T15:22:30.461862Z node 5 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-05-29T15:22:30.461896Z node 6 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-05-29T15:22:30.461924Z node 7 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-05-29T15:22:30.462027Z node 8 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-05-29T15:22:30.462131Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-05-29T15:22:30.462189Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 8 Deadline: 18446744073709.551615s } 2025-05-29T15:22:30.462993Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 3 Deadline: 18446744073709.551615s } 2025-05-29T15:22:30.463019Z node 3 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 4 Deadline: 18446744073709.551615s } 2025-05-29T15:22:30.463030Z node 4 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 5 Deadline: 18446744073709.551615s } 2025-05-29T15:22:30.463040Z node 5 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 6 Deadline: 18446744073709.551615s } 2025-05-29T15:22:30.463049Z node 6 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 5 Deadline: 18446744073709.551615s } 2025-05-29T15:22:30.463060Z node 7 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 3 Deadline: 18446744073709.551615s } 2025-05-29T15:22:30.463070Z node 8 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 7 Deadline: 18446744073709.551615s } 2025-05-29T15:22:30.467903Z node 3 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 7 Deadline: 18446744073709.551615s } 2025-05-29T15:22:30.467952Z node 4 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 3 Deadline: 18446744073709.551615s } 2025-05-29T15:22:30.467978Z node 7 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 8 Deadline: 18446744073709.551615s } 2025-05-29T15:22:30.468035Z node 3 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 2 Deadline: 18446744073709.551615s } 2025-05-29T15:22:30.468051Z node 5 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 4 Deadline: 18446744073709.551615s } 2025-05-29T15:22:30.468083Z node 6 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 2 Deadline: 18446744073709.551615s } 2025-05-29T15:22:30.469187Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 7 Deadline: 18446744073709.551615s } 2025-05-29T15:22:30.469747Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 2 Deadline: 18446744073709.551615s } 2025-05-29T15:22:30.469950Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 3 Deadline: 18446744073709.551615s } 2025-05-29T15:22:30.469966Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 6 Deadline: 18446744073709.551615s } 2025-05-29T15:22:30.470176Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 4 Deadline: 18446744073709.551615s } 2025-05-29T15:22:30.470402Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 5 Deadline: 18446744073709.551615s } 2025-05-29T15:22:30.470481Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 6 Deadline: 18446744073709.551615s } 2025-05-29T15:22:30.471808Z node 3 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 6 Deadline: 18446744073709.551615s } 2025-05-29T15:22:30.471857Z node 5 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 7 Deadline: 18446744073709.551615s } 2025-05-29T15:22:30.472147Z node 6 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 3 Deadline: 18446744073709.551615s } 2025-05-29T15:22:30.472159Z node 7 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 5 Deadline: 18446744073709.551615s } 2025-05-29T15:22:30.503781Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7309: Cannot subscribe to console configs 2025-05-29T15:22:30.503802Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded ... waiting for nameservers are connected 2025-05-29T15:22:30.508814Z node 1 :NODE_BROKER DEBUG: node_broker_impl.h:236: StateInit event type: 10060000 event: NKikimr::TEvTablet::TEvBoot 2025-05-29T15:22:30.509358Z node 1 :NODE_BROKER DEBUG: node_broker_impl.h:236: StateInit event type: 10060001 event: NKikimr::TEvTablet::TEvRestored 2025-05-29T15:22:30.509449Z node 1 :NODE_BROKER DEBUG: node_broker__init_scheme.cpp:20: TTxInitScheme Execute 2025-05-29T15:22:30.509734Z node 1 :NODE_BROKER DEBUG: node_broker_impl.h:236: StateInit event type: 1006000c event: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-05-29T15:22:30.511203Z node 1 :NODE_BROKER DEBUG: node_broker__init_scheme.cpp:29: TTxInitScheme Complete 2025-05-29T15:22:30.511227Z node 1 :NODE_BROKER DEBUG: node_broker__load_state.cpp:19: TTxLoadState Execute 2025-05-29T15:22:30.511282Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:961: [DB] Using default config. 2025-05-29T15:22:30.511295Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:998: [DB] Starting the first epoch: #1.1 1970-01-01T00:00:00.025000Z - 1970-01-01T01:00:00.025000Z - 1970-01-01T02:00:00.025000Z 2025-05-29T15:22:30.511298Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:1024: [DB] Loaded the first approximate epoch start: #1.1 2025-05-29T15:22:30.511311Z node 1 :NODE_BROKER DEBUG: node_broker__load_state.cpp:27: TTxLoadState Complete 2025-05-29T15:22:30.511341Z node 1 :NODE_BROKER DEBUG: node_broker__migrate_state.cpp:84: TTxMigrateState Execute 2025-05-29T15:22:30.511346Z node 1 :NODE_BROKER DEBUG: node_broker__migrate_state.cpp:52: TTxMigrateState ProcessMigrationBatch UpdateNodes left 0, NewVersionUpdateNodes left 0 2025-05-29T15:22:30.511349Z node 1 :NODE_BROKER DEBU ... de_broker_impl.h:236: StateInit event type: 10060001 event: NKikimr::TEvTablet::TEvRestored 2025-05-29T15:22:32.267207Z node 1 :NODE_BROKER DEBUG: node_broker_impl.h:236: StateInit event type: 1006000c event: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-05-29T15:22:32.267562Z node 1 :NODE_BROKER DEBUG: node_broker__init_scheme.cpp:20: TTxInitScheme Execute 2025-05-29T15:22:32.267625Z node 1 :NODE_BROKER DEBUG: node_broker__init_scheme.cpp:29: TTxInitScheme Complete 2025-05-29T15:22:32.267694Z node 1 :NODE_BROKER DEBUG: node_broker__load_state.cpp:19: TTxLoadState Execute 2025-05-29T15:22:32.267757Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:961: [DB] Using default config. 2025-05-29T15:22:32.267768Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:988: [DB] Loaded current epoch: #7.9 1970-01-01T06:00:00.025000Z - 1970-01-01T07:00:00.025000Z - 1970-01-01T08:00:00.025000Z 2025-05-29T15:22:32.267774Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:1012: [DB] Approximate epoch start is changed: #7.9 2025-05-29T15:22:32.267778Z node 1 :NODE_BROKER NOTICE: node_broker.cpp:1034: [DB] Loaded main nodes table: Nodes 2025-05-29T15:22:32.267807Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:1190: [DB] Loaded nodeV2 #1024.v5 { NodeId: 1024, State: Removed, Version: 5, Host: , Port: 0, ResolveHost: , Address: , Lease: 0, Expire: Thu, 01 Jan 1970 00:00:00 UTC, Location: , AuthorizedByCertificate: 0, SlotIndex: 0, ServicedSubDomain: 0:0 } 2025-05-29T15:22:32.267811Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:300: [Dirty] Added removed node #1024.v5 2025-05-29T15:22:32.267815Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:1234: [DB] Removed node #1024.v5 is already migrated 2025-05-29T15:22:32.267825Z node 1 :NODE_BROKER DEBUG: node_broker__load_state.cpp:27: TTxLoadState Complete 2025-05-29T15:22:32.267847Z node 1 :NODE_BROKER DEBUG: node_broker__migrate_state.cpp:84: TTxMigrateState Execute 2025-05-29T15:22:32.267853Z node 1 :NODE_BROKER DEBUG: node_broker__migrate_state.cpp:52: TTxMigrateState ProcessMigrationBatch UpdateNodes left 0, NewVersionUpdateNodes left 0 2025-05-29T15:22:32.267856Z node 1 :NODE_BROKER DEBUG: node_broker__migrate_state.cpp:21: TTxMigrateState FinalizeMigration 2025-05-29T15:22:32.267859Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:1330: [DB] Update approx epoch start in database: #7.9 2025-05-29T15:22:32.279494Z node 1 :NODE_BROKER DEBUG: node_broker__migrate_state.cpp:95: TTxMigrateState Complete 2025-05-29T15:22:32.279535Z node 1 :NODE_BROKER TRACE: node_broker.cpp:456: Scheduled epoch update at 1970-01-01T07:00:00.025000Z 2025-05-29T15:22:32.279549Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:562: Preparing nodes list cache for epoch #7.9 1970-01-01T06:00:00.025000Z - 1970-01-01T07:00:00.025000Z - 1970-01-01T08:00:00.025000Z, approximate epoch start #7.9 nodes=0 expired=0 2025-05-29T15:22:32.279561Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:603: Preparing update nodes log for epoch ##7.9 1970-01-01T06:00:00.025000Z - 1970-01-01T07:00:00.025000Z - 1970-01-01T08:00:00.025000Z nodes=0 expired=0 removed=1 2025-05-29T15:22:32.279568Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:659: Add node #1024.v5 to update nodes log 2025-05-29T15:22:32.279664Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 269877761, Sender [1:777:2300], Recipient [1:769:2294]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-05-29T15:22:32.279747Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 269877761, Sender [1:778:2301], Recipient [1:769:2294]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-05-29T15:22:32.279765Z node 8 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:657: Handle NKikimr::TEvTabletPipe::TEvClientConnected { TabletId: 72057594037936129 Status: OK ServerId: [1:778:2301] Leader: 1 Dead: 0 Generation: 3 VersionInfo:  } 2025-05-29T15:22:32.279778Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 269877761, Sender [1:780:2303], Recipient [1:769:2294]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-05-29T15:22:32.279783Z node 3 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:657: Handle NKikimr::TEvTabletPipe::TEvClientConnected { TabletId: 72057594037936129 Status: OK ServerId: [1:777:2300] Leader: 1 Dead: 0 Generation: 3 VersionInfo:  } 2025-05-29T15:22:32.279791Z node 4 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:657: Handle NKikimr::TEvTabletPipe::TEvClientConnected { TabletId: 72057594037936129 Status: OK ServerId: [1:780:2303] Leader: 1 Dead: 0 Generation: 3 VersionInfo:  } 2025-05-29T15:22:32.279797Z node 5 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:657: Handle NKikimr::TEvTabletPipe::TEvClientConnected { TabletId: 72057594037936129 Status: OK ServerId: [1:781:2304] Leader: 1 Dead: 0 Generation: 3 VersionInfo:  } 2025-05-29T15:22:32.279814Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 269877761, Sender [1:781:2304], Recipient [1:769:2294]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-05-29T15:22:32.279892Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 272039936, Sender [3:81:2072], Recipient [1:777:2300] 2025-05-29T15:22:32.279898Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:246: StateWork, processing event TEvNodeBroker::TEvListNodes 2025-05-29T15:22:32.279906Z node 1 :NODE_BROKER TRACE: node_broker.cpp:423: Send TEvNodesInfo for epoch #7.9 1970-01-01T06:00:00.025000Z - 1970-01-01T07:00:00.025000Z - 1970-01-01T08:00:00.025000Z 2025-05-29T15:22:32.279926Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 272039936, Sender [8:221:2072], Recipient [1:778:2301] 2025-05-29T15:22:32.279929Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:246: StateWork, processing event TEvNodeBroker::TEvListNodes 2025-05-29T15:22:32.279933Z node 1 :NODE_BROKER TRACE: node_broker.cpp:423: Send TEvNodesInfo for epoch #7.9 1970-01-01T06:00:00.025000Z - 1970-01-01T07:00:00.025000Z - 1970-01-01T08:00:00.025000Z 2025-05-29T15:22:32.279941Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 272039936, Sender [4:109:2072], Recipient [1:780:2303] 2025-05-29T15:22:32.279944Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:246: StateWork, processing event TEvNodeBroker::TEvListNodes 2025-05-29T15:22:32.279947Z node 1 :NODE_BROKER TRACE: node_broker.cpp:423: Send TEvNodesInfo for epoch #7.9 1970-01-01T06:00:00.025000Z - 1970-01-01T07:00:00.025000Z - 1970-01-01T08:00:00.025000Z 2025-05-29T15:22:32.279955Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 272039936, Sender [5:137:2072], Recipient [1:781:2304] 2025-05-29T15:22:32.279957Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:246: StateWork, processing event TEvNodeBroker::TEvListNodes 2025-05-29T15:22:32.279961Z node 1 :NODE_BROKER TRACE: node_broker.cpp:423: Send TEvNodesInfo for epoch #7.9 1970-01-01T06:00:00.025000Z - 1970-01-01T07:00:00.025000Z - 1970-01-01T08:00:00.025000Z 2025-05-29T15:22:32.280100Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 269877761, Sender [1:809:2327], Recipient [1:769:2294]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-05-29T15:22:32.280124Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 272039936, Sender [1:627:2213], Recipient [1:769:2294]: NKikimr::NNodeBroker::TEvNodeBroker::TEvListNodes { } 2025-05-29T15:22:32.280127Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:246: StateWork, processing event TEvNodeBroker::TEvListNodes 2025-05-29T15:22:32.280131Z node 1 :NODE_BROKER TRACE: node_broker.cpp:423: Send TEvNodesInfo for epoch #7.9 1970-01-01T06:00:00.025000Z - 1970-01-01T07:00:00.025000Z - 1970-01-01T08:00:00.025000Z 2025-05-29T15:22:32.280176Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 269877761, Sender [1:811:2329], Recipient [1:769:2294]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-05-29T15:22:32.280185Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 272039936, Sender [1:627:2213], Recipient [1:769:2294]: NKikimr::NNodeBroker::TEvNodeBroker::TEvListNodes { } 2025-05-29T15:22:32.280188Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:246: StateWork, processing event TEvNodeBroker::TEvListNodes 2025-05-29T15:22:32.280191Z node 1 :NODE_BROKER TRACE: node_broker.cpp:423: Send TEvNodesInfo for epoch #7.9 1970-01-01T06:00:00.025000Z - 1970-01-01T07:00:00.025000Z - 1970-01-01T08:00:00.025000Z 2025-05-29T15:22:32.280242Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 269877761, Sender [1:813:2331], Recipient [1:769:2294]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-05-29T15:22:32.280251Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 272039936, Sender [1:627:2213], Recipient [1:769:2294]: NKikimr::NNodeBroker::TEvNodeBroker::TEvListNodes { } 2025-05-29T15:22:32.280254Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:246: StateWork, processing event TEvNodeBroker::TEvListNodes 2025-05-29T15:22:32.280257Z node 1 :NODE_BROKER TRACE: node_broker.cpp:423: Send TEvNodesInfo for epoch #7.9 1970-01-01T06:00:00.025000Z - 1970-01-01T07:00:00.025000Z - 1970-01-01T08:00:00.025000Z 2025-05-29T15:22:32.280303Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 269877761, Sender [1:815:2333], Recipient [1:769:2294]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-05-29T15:22:32.280324Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 272039936, Sender [1:627:2213], Recipient [1:769:2294]: NKikimr::NNodeBroker::TEvNodeBroker::TEvListNodes { CachedVersion: 9 } 2025-05-29T15:22:32.280327Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:246: StateWork, processing event TEvNodeBroker::TEvListNodes 2025-05-29T15:22:32.280330Z node 1 :NODE_BROKER TRACE: node_broker.cpp:423: Send TEvNodesInfo for epoch #7.9 1970-01-01T06:00:00.025000Z - 1970-01-01T07:00:00.025000Z - 1970-01-01T08:00:00.025000Z 2025-05-29T15:22:32.280369Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 269877761, Sender [1:817:2335], Recipient [1:769:2294]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-05-29T15:22:32.280383Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 272039950, Sender [1:627:2213], Recipient [1:769:2294]: NKikimr::NNodeBroker::TEvNodeBroker::TEvSubscribeNodesRequest { CachedVersion: 9 SeqNo: 2 } 2025-05-29T15:22:32.280387Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:254: StateWork, processing event TEvNodeBroker::TEvSubscribeNodesRequest 2025-05-29T15:22:32.280392Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:747: New subscriber [1:627:2213], seqNo: 2, version: 9, server pipe id: [1:817:2335] 2025-05-29T15:22:32.280397Z node 1 :NODE_BROKER TRACE: node_broker.cpp:730: Send TEvUpdateNodes v9 -> v9 to [1:627:2213] 2025-05-29T15:22:32.280441Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 269877764, Sender [1:817:2335], Recipient [1:769:2294]: NKikimr::TEvTabletPipe::TEvServerDisconnected 2025-05-29T15:22:32.280445Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:768: Unsubscribed [1:627:2213], seqNo: 2, server pipe id: [1:817:2335] 2025-05-29T15:22:32.280462Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 269877761, Sender [1:819:2337], Recipient [1:769:2294]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-05-29T15:22:32.280476Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 272039937, Sender [1:627:2213], Recipient [1:769:2294]: NKikimr::NNodeBroker::TEvNodeBroker::TEvResolveNode { NodeId: 1024 } 2025-05-29T15:22:32.280479Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:247: StateWork, processing event TEvNodeBroker::TEvResolveNode 2025-05-29T15:22:32.280492Z node 1 :NODE_BROKER TRACE: node_broker.cpp:1478: Send TEvResolvedNode: NKikimr::NNodeBroker::TEvNodeBroker::TEvResolvedNode { Status { Code: WRONG_REQUEST Reason: "Unknown node" } } |60.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_rtmr_reboots/unittest >> TPartitionTests::EndWriteTimestamp_HeadKeys ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_data_erasure/unittest >> TestDataErasure::SimpleDataErasureTestForTopic [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:116:2058] recipient: [1:111:2141] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:116:2058] recipient: [1:111:2141] Leader for TabletID 72057594046678944 is [1:128:2152] sender: [1:132:2058] recipient: [1:111:2141] 2025-05-29T15:22:31.287472Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7488: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-05-29T15:22:31.287494Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7516: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:22:31.287498Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7402: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-05-29T15:22:31.287502Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7418: OperationsProcessing config: using default configuration 2025-05-29T15:22:31.287511Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-05-29T15:22:31.287513Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-05-29T15:22:31.287520Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7548: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:22:31.287533Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:39: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-05-29T15:22:31.287658Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7619: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-05-29T15:22:31.287730Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-05-29T15:22:31.296599Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7309: Cannot subscribe to console configs 2025-05-29T15:22:31.296619Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:22:31.299582Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-05-29T15:22:31.299723Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-05-29T15:22:31.299790Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-05-29T15:22:31.301856Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-05-29T15:22:31.302021Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:445: Clear TempDirsState with owners number: 0 2025-05-29T15:22:31.302149Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1348: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-05-29T15:22:31.302197Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:32: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-05-29T15:22:31.302692Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:157: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:22:31.302754Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:84: [RootDataErasureManager] Stop 2025-05-29T15:22:31.303009Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:22:31.303018Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:22:31.303039Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-05-29T15:22:31.303046Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-29T15:22:31.303052Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:202: TTxServerlessStorageBilling.Complete 2025-05-29T15:22:31.303086Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6671: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-05-29T15:22:31.304476Z node 1 :HIVE INFO: tablet_helpers.cpp:1130: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:128:2152] sender: [1:242:2058] recipient: [1:15:2062] 2025-05-29T15:22:31.324589Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:372: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-05-29T15:22:31.324658Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:22:31.324718Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-05-29T15:22:31.324759Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:130: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-05-29T15:22:31.324770Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:22:31.325612Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:451: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-05-29T15:22:31.325639Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-05-29T15:22:31.325686Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:22:31.325695Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:313: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-05-29T15:22:31.325701Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:367: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-05-29T15:22:31.325707Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 2 -> 3 2025-05-29T15:22:31.326091Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:22:31.326101Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-05-29T15:22:31.326106Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 3 -> 128 2025-05-29T15:22:31.326430Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:22:31.326439Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:22:31.326445Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:22:31.326452Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1650: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-05-29T15:22:31.327114Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1719: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-05-29T15:22:31.327504Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:652: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-05-29T15:22:31.327554Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1751: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-05-29T15:22:31.327687Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:676: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-05-29T15:22:31.327705Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:680: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 124 RawX2: 4294969445 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-05-29T15:22:31.327711Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:22:31.327749Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 128 -> 240 2025-05-29T15:22:31.327753Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:22:31.327781Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-05-29T15:22:31.327789Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:402: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-05-29T15:22:31.328132Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:22:31.328139Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-29T15:22:31.328166Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29 ... dRequestToBSC: Generation# 1 2025-05-29T15:22:33.351281Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4877: StateWork, received event# 268637738, Sender [1:298:2280], Recipient [1:296:2279]: NKikimrBlobStorage.TEvControllerShredResponse CurrentGeneration: 1 Completed: false Progress10k: 5000 2025-05-29T15:22:33.351288Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5076: StateWork, processing event TEvBlobStorage::TEvControllerShredResponse 2025-05-29T15:22:33.351293Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:7798: Handle TEvControllerShredResponse, at schemeshard: 72057594046678944 2025-05-29T15:22:33.351315Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__root_data_erasure_manager.cpp:632: TTxCompleteDataErasureBSC Execute at schemeshard: 72057594046678944 2025-05-29T15:22:33.351328Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:648: TTxCompleteDataErasureBSC: Progress data shred in BSC 50% 2025-05-29T15:22:33.351343Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__root_data_erasure_manager.cpp:656: TTxCompleteDataErasureBSC Complete at schemeshard: 72057594046678944, NeedScheduleRequestToBSC# true 2025-05-29T15:22:33.351356Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:348: [RootDataErasureManager] ScheduleRequestToBSC: Interval# 1.000000s 2025-05-29T15:22:33.412392Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4877: StateWork, received event# 271125000, Sender [0:0:0], Recipient [1:459:2410]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-05-29T15:22:33.412424Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4885: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-05-29T15:22:33.412456Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4877: StateWork, received event# 271124999, Sender [1:459:2410], Recipient [1:459:2410]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2025-05-29T15:22:33.412461Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4884: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2025-05-29T15:22:33.412518Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4877: StateWork, received event# 271122945, Sender [1:642:2557], Recipient [1:459:2410]: NKikimrSchemeOp.TDescribePath PathId: 2 SchemeshardId: 72075186233409546 2025-05-29T15:22:33.412523Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4889: StateWork, processing event TEvSchemeShard::TEvDescribeScheme 2025-05-29T15:22:33.412549Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: PathId: 2 SchemeshardId: 72075186233409546, at schemeshard: 72075186233409546 2025-05-29T15:22:33.412605Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:44: Tablet 72075186233409546 describe pathId 2 took 43us result status StatusSuccess 2025-05-29T15:22:33.412745Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Database1/Topic1" PathDescription { Self { Name: "Topic1" PathId: 2 SchemeshardId: 72075186233409546 PathType: EPathTypePersQueueGroup CreateFinished: true CreateTxId: 103 CreateStep: 200 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 PQVersion: 1 } ChildrenExist: false BalancerTabletID: 72075186233409550 } PersQueueGroup { Name: "Topic1" PathId: 2 TotalGroupCount: 1 PartitionPerTablet: 1 PQTabletConfig { PartitionConfig { LifetimeSeconds: 10 } YdbDatabasePath: "/MyRoot/Database1" } Partitions { PartitionId: 0 TabletId: 72075186233409549 Status: Active } AlterVersion: 1 BalancerTabletID: 72075186233409550 NextPartitionId: 1 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 2 ProcessingParams { Version: 2 PlanResolution: 50 Coordinators: 72075186233409547 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409548 SchemeShard: 72075186233409546 } DomainKey { SchemeShard: 72057594046678944 PathId: 2 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 5 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 2 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 1 PQPartitionsLimit: 1000000 } } PathId: 2 PathOwnerId: 72075186233409546, at schemeshard: 72075186233409546 2025-05-29T15:22:33.463723Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4877: StateWork, received event# 271125000, Sender [0:0:0], Recipient [1:887:2761]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-05-29T15:22:33.463749Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4885: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-05-29T15:22:33.463765Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4877: StateWork, received event# 271124999, Sender [1:887:2761], Recipient [1:887:2761]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2025-05-29T15:22:33.463770Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4884: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2025-05-29T15:22:33.463825Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4877: StateWork, received event# 271122945, Sender [1:1068:2905], Recipient [1:887:2761]: NKikimrSchemeOp.TDescribePath PathId: 2 SchemeshardId: 72075186233409551 2025-05-29T15:22:33.463830Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4889: StateWork, processing event TEvSchemeShard::TEvDescribeScheme 2025-05-29T15:22:33.463854Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: PathId: 2 SchemeshardId: 72075186233409551, at schemeshard: 72075186233409551 2025-05-29T15:22:33.463908Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:44: Tablet 72075186233409551 describe pathId 2 took 41us result status StatusSuccess 2025-05-29T15:22:33.464052Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Database2/Topic1" PathDescription { Self { Name: "Topic1" PathId: 2 SchemeshardId: 72075186233409551 PathType: EPathTypePersQueueGroup CreateFinished: true CreateTxId: 106 CreateStep: 300 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 PQVersion: 1 } ChildrenExist: false BalancerTabletID: 72075186233409555 } PersQueueGroup { Name: "Topic1" PathId: 2 TotalGroupCount: 1 PartitionPerTablet: 1 PQTabletConfig { PartitionConfig { LifetimeSeconds: 10 } YdbDatabasePath: "/MyRoot/Database2" } Partitions { PartitionId: 0 TabletId: 72075186233409554 Status: Active } AlterVersion: 1 BalancerTabletID: 72075186233409555 NextPartitionId: 1 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 3 ProcessingParams { Version: 2 PlanResolution: 50 Coordinators: 72075186233409552 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409553 SchemeShard: 72075186233409551 } DomainKey { SchemeShard: 72057594046678944 PathId: 3 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 5 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 3 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 1 PQPartitionsLimit: 1000000 } } PathId: 2 PathOwnerId: 72075186233409551, at schemeshard: 72075186233409551 2025-05-29T15:22:33.787864Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4877: StateWork, received event# 271125000, Sender [0:0:0], Recipient [1:296:2279]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-05-29T15:22:33.787901Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4885: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-05-29T15:22:33.787921Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4877: StateWork, received event# 271124999, Sender [1:296:2279], Recipient [1:296:2279]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2025-05-29T15:22:33.787926Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4884: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2025-05-29T15:22:33.808248Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4877: StateWork, received event# 271125517, Sender [0:0:0], Recipient [1:296:2279]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToRunDataErasureBSC 2025-05-29T15:22:33.808277Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5077: StateWork, processing event TEvSchemeShard::TEvWakeupToRunDataErasureBSC 2025-05-29T15:22:33.808287Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:354: [RootDataErasureManager] SendRequestToBSC: Generation# 1 2025-05-29T15:22:33.808371Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4877: StateWork, received event# 268637738, Sender [1:298:2280], Recipient [1:296:2279]: NKikimrBlobStorage.TEvControllerShredResponse CurrentGeneration: 1 Completed: true Progress10k: 10000 2025-05-29T15:22:33.808379Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5076: StateWork, processing event TEvBlobStorage::TEvControllerShredResponse 2025-05-29T15:22:33.808383Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:7798: Handle TEvControllerShredResponse, at schemeshard: 72057594046678944 2025-05-29T15:22:33.808406Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__root_data_erasure_manager.cpp:632: TTxCompleteDataErasureBSC Execute at schemeshard: 72057594046678944 2025-05-29T15:22:33.808410Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:644: TTxCompleteDataErasureBSC: Data shred in BSC is completed 2025-05-29T15:22:33.808423Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:170: [RootDataErasureManager] ScheduleDataErasureWakeup: Interval# 0.982000s, Timestamp# 1970-01-01T00:00:05.066000Z 2025-05-29T15:22:33.808428Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:378: [RootDataErasureManager] Complete: Generation# 1, duration# 2 s 2025-05-29T15:22:33.809429Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__root_data_erasure_manager.cpp:656: TTxCompleteDataErasureBSC Complete at schemeshard: 72057594046678944, NeedScheduleRequestToBSC# false 2025-05-29T15:22:33.809645Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4877: StateWork, received event# 269877761, Sender [1:1351:3156], Recipient [1:296:2279]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-05-29T15:22:33.809656Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4974: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-05-29T15:22:33.809662Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5753: Pipe server connected, at tablet: 72057594046678944 2025-05-29T15:22:33.809696Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4877: StateWork, received event# 271125519, Sender [1:277:2267], Recipient [1:296:2279]: NKikimrScheme.TEvDataErasureInfoRequest 2025-05-29T15:22:33.809702Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5074: StateWork, processing event TEvSchemeShard::TEvDataErasureInfoRequest 2025-05-29T15:22:33.809707Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:7753: Handle TEvDataErasureInfoRequest, at schemeshard: 72057594046678944 |60.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_rtmr_reboots/unittest |60.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_rtmr_reboots/unittest |60.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_rtmr_reboots/unittest ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/persqueue/ut/unittest >> TPartitionTests::ConflictingCommitProccesAfterRollback [GOOD] Test command err: 2025-05-29T15:22:15.476259Z node 1 :PERSQUEUE NOTICE: pq_impl.cpp:1099: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-05-29T15:22:15.476283Z node 1 :PERSQUEUE INFO: pq_impl.cpp:800: [PQ: 72057594037927937] doesn't have tx writes info 2025-05-29T15:22:15.479681Z node 1 :PERSQUEUE DEBUG: partition_init.cpp:75: [Root/PQ/rt3.dc1--account--topic:0:Initializer] Start initializing step TInitConfigStep Got KV request 2025-05-29T15:22:15.479733Z node 1 :PERSQUEUE DEBUG: partition_init.cpp:75: [Root/PQ/rt3.dc1--account--topic:0:Initializer] Start initializing step TInitInternalFieldsStep 2025-05-29T15:22:15.479794Z node 1 :PERSQUEUE INFO: partition_init.cpp:880: [PQ: 72057594037927937, Partition: 0, State: StateInit] bootstrapping 0 [1:179:2193] 2025-05-29T15:22:15.480004Z node 1 :PERSQUEUE DEBUG: partition_init.cpp:75: [Root/PQ/rt3.dc1--account--topic:0:Initializer] Start initializing step TInitDiskStatusStep Got KV request 2025-05-29T15:22:15.480029Z node 1 :PERSQUEUE DEBUG: partition_init.cpp:75: [Root/PQ/rt3.dc1--account--topic:0:Initializer] Start initializing step TInitMetaStep Got KV request 2025-05-29T15:22:15.480049Z node 1 :PERSQUEUE DEBUG: partition_init.cpp:75: [Root/PQ/rt3.dc1--account--topic:0:Initializer] Start initializing step TInitInfoRangeStep Got KV request 2025-05-29T15:22:15.480068Z node 1 :PERSQUEUE DEBUG: partition_init.cpp:75: [Root/PQ/rt3.dc1--account--topic:0:Initializer] Start initializing step TInitDataRangeStep Got KV request 2025-05-29T15:22:15.480108Z node 1 :PERSQUEUE DEBUG: partition_init.cpp:621: [Root/PQ/rt3.dc1--account--topic:0:TInitDataRangeStep] Got data offset 0 count 50 size 684 so 0 eo 50 d0000000000_00000000000000000000_00000_0000000050_00000 2025-05-29T15:22:15.480122Z node 1 :PERSQUEUE DEBUG: partition_init.cpp:75: [Root/PQ/rt3.dc1--account--topic:0:Initializer] Start initializing step TInitDataStep 2025-05-29T15:22:15.480127Z node 1 :PERSQUEUE DEBUG: partition_init.cpp:75: [Root/PQ/rt3.dc1--account--topic:0:Initializer] Start initializing step TInitEndWriteTimestampStep 2025-05-29T15:22:15.480132Z node 1 :PERSQUEUE INFO: partition_init.cpp:785: [Root/PQ/rt3.dc1--account--topic:0:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp from keys completed. Value 2025-05-29T15:22:15.000000Z 2025-05-29T15:22:15.480137Z node 1 :PERSQUEUE DEBUG: partition_init.cpp:55: [Root/PQ/rt3.dc1--account--topic:0:Initializer] Initializing completed. 2025-05-29T15:22:15.480144Z node 1 :PERSQUEUE INFO: partition.cpp:560: [PQ: 72057594037927937, Partition: 0, State: StateInit] init complete for topic 'Root/PQ/rt3.dc1--account--topic' partition 0 generation 0 [1:179:2193] 2025-05-29T15:22:15.480152Z node 1 :PERSQUEUE DEBUG: partition.cpp:574: [PQ: 72057594037927937, Partition: 0, State: StateInit] SYNC INIT topic Root/PQ/rt3.dc1--account--topic partitition 0 so 0 endOffset 50 Head Offset 50 PartNo 0 PackedSize 0 count 0 nextOffset 50 batches 0 SYNC INIT DATA KEY: d0000000000_00000000000000000000_00000_0000000050_00000 size 684 2025-05-29T15:22:15.480159Z node 1 :PERSQUEUE DEBUG: partition.cpp:3850: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Process pending events. Count 0 2025-05-29T15:22:15.815024Z node 1 :PERSQUEUE INFO: ownerinfo.cpp:30: new Cookie src1|10c8b6fb-7a026aca-cd9c82d6-53c09593_0 generated for partition 0 topic 'Root/PQ/rt3.dc1--account--topic' owner src1 2025-05-29T15:22:15.815059Z node 1 :PERSQUEUE DEBUG: partition_write.cpp:35: [PQ: 72057594037927937, Partition: 0, State: StateIdle] TPartition::ReplyOwnerOk. Partition: 0 Got batch complete: 1 Create distr tx with id = 0 and act no: 1 Create distr tx with id = 2 and act no: 3 Create distr tx with id = 4 and act no: 5 Create distr tx with id = 6 and act no: 7 2025-05-29T15:22:16.779320Z node 1 :PERSQUEUE DEBUG: partition.cpp:1127: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Handle TEvPQ::TEvTxCalcPredicate Step 1, TxId 0 2025-05-29T15:22:16.779348Z node 1 :PERSQUEUE DEBUG: partition.cpp:1127: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Handle TEvPQ::TEvTxCalcPredicate Step 1, TxId 2 2025-05-29T15:22:16.779355Z node 1 :PERSQUEUE DEBUG: partition.cpp:1127: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Handle TEvPQ::TEvTxCalcPredicate Step 1, TxId 4 2025-05-29T15:22:16.779365Z node 1 :PERSQUEUE DEBUG: partition.cpp:1127: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Handle TEvPQ::TEvTxCalcPredicate Step 1, TxId 6 2025-05-29T15:22:18.039446Z node 1 :PERSQUEUE DEBUG: partition.cpp:1363: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Handle TEvPQ::TEvGetWriteInfoResponse 2025-05-29T15:22:18.039491Z node 1 :PERSQUEUE DEBUG: partition.cpp:1363: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Handle TEvPQ::TEvGetWriteInfoResponse 2025-05-29T15:22:18.039501Z node 1 :PERSQUEUE DEBUG: partition.cpp:1363: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Handle TEvPQ::TEvGetWriteInfoResponse 2025-05-29T15:22:18.039507Z node 1 :PERSQUEUE DEBUG: partition.cpp:1363: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Handle TEvPQ::TEvGetWriteInfoResponse Got batch complete: 1 Wait batch of 1 completion Wait batch completion Expect no KV request 2025-05-29T15:22:18.039555Z node 1 :PERSQUEUE DEBUG: partition.cpp:1173: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Handle TEvPQ::TEvTxCommit Step 1, TxId 0 2025-05-29T15:22:18.039563Z node 1 :PERSQUEUE DEBUG: partition.cpp:2424: [PQ: 72057594037927937, Partition: 0, State: StateIdle] TPartition::CommitWriteOperations TxId: 0 2025-05-29T15:22:18.039584Z node 1 :PERSQUEUE DEBUG: partition.cpp:2451: [PQ: 72057594037927937, Partition: 0, State: StateIdle] t.WriteInfo->BodyKeys.size=0, t.WriteInfo->BlobsFromHead.size=0 2025-05-29T15:22:18.039593Z node 1 :PERSQUEUE DEBUG: partition.cpp:2452: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Head=Offset 50 PartNo 0 PackedSize 0 count 0 nextOffset 50 batches 0, NewHead=Offset 50 PartNo 0 PackedSize 0 count 0 nextOffset 50 batches 0 Got batch complete: 1 Waif or tx 3 predicate failure 2025-05-29T15:22:18.273302Z node 1 :PERSQUEUE DEBUG: partition.cpp:1173: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Handle TEvPQ::TEvTxCommit Step 1, TxId 2 2025-05-29T15:22:18.273325Z node 1 :PERSQUEUE DEBUG: partition.cpp:2424: [PQ: 72057594037927937, Partition: 0, State: StateIdle] TPartition::CommitWriteOperations TxId: 2 2025-05-29T15:22:18.273335Z node 1 :PERSQUEUE DEBUG: partition.cpp:2451: [PQ: 72057594037927937, Partition: 0, State: StateIdle] t.WriteInfo->BodyKeys.size=0, t.WriteInfo->BlobsFromHead.size=0 2025-05-29T15:22:18.273346Z node 1 :PERSQUEUE DEBUG: partition.cpp:2452: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Head=Offset 50 PartNo 0 PackedSize 0 count 0 nextOffset 50 batches 0, NewHead=Offset 50 PartNo 0 PackedSize 0 count 0 nextOffset 50 batches 0 Got batch complete: 2 Waif or tx 4 predicate failure Wait batch of 3 completion Wait batch completion Expect no KV request Wait batch completion Wait for no tx committed 2025-05-29T15:22:18.517379Z node 1 :PERSQUEUE DEBUG: partition.cpp:2185: [PQ: 72057594037927937, Partition: 0, State: StateIdle] === DumpKeyValueRequest === 2025-05-29T15:22:18.517394Z node 1 :PERSQUEUE DEBUG: partition.cpp:2186: [PQ: 72057594037927937, Partition: 0, State: StateIdle] --- delete ---------------- 2025-05-29T15:22:18.517398Z node 1 :PERSQUEUE DEBUG: partition.cpp:2194: [PQ: 72057594037927937, Partition: 0, State: StateIdle] --- write ----------------- 2025-05-29T15:22:18.517401Z node 1 :PERSQUEUE DEBUG: partition.cpp:2197: [PQ: 72057594037927937, Partition: 0, State: StateIdle] m0000000000psrc1 2025-05-29T15:22:18.517403Z node 1 :PERSQUEUE DEBUG: partition.cpp:2197: [PQ: 72057594037927937, Partition: 0, State: StateIdle] i0000000000 2025-05-29T15:22:18.517405Z node 1 :PERSQUEUE DEBUG: partition.cpp:2197: [PQ: 72057594037927937, Partition: 0, State: StateIdle] i0000000000 2025-05-29T15:22:18.517408Z node 1 :PERSQUEUE DEBUG: partition.cpp:2197: [PQ: 72057594037927937, Partition: 0, State: StateIdle] I0000000000 2025-05-29T15:22:18.517410Z node 1 :PERSQUEUE DEBUG: partition.cpp:2199: [PQ: 72057594037927937, Partition: 0, State: StateIdle] --- rename ---------------- 2025-05-29T15:22:18.517413Z node 1 :PERSQUEUE DEBUG: partition.cpp:2204: [PQ: 72057594037927937, Partition: 0, State: StateIdle] =========================== Got KV request Wait kv request Wait for commits Wait tx committed for tx 0 2025-05-29T15:22:18.751074Z node 1 :PERSQUEUE DEBUG: partition_write.cpp:524: [PQ: 72057594037927937, Partition: 0, State: StateIdle] TPartition::HandleWriteResponse writeNewSize# 0 WriteNewSizeFromSupportivePartitions# 2 Wait tx committed for tx 2 2025-05-29T15:22:18.919056Z node 2 :PERSQUEUE NOTICE: pq_impl.cpp:1099: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-05-29T15:22:18.919077Z node 2 :PERSQUEUE INFO: pq_impl.cpp:800: [PQ: 72057594037927937] doesn't have tx writes info 2025-05-29T15:22:18.921354Z node 2 :PERSQUEUE DEBUG: partition_init.cpp:75: [Root/PQ/rt3.dc1--account--topic:0:Initializer] Start initializing step TInitConfigStep Got KV request 2025-05-29T15:22:18.921389Z node 2 :PERSQUEUE DEBUG: partition_init.cpp:75: [Root/PQ/rt3.dc1--account--topic:0:Initializer] Start initializing step TInitInternalFieldsStep 2025-05-29T15:22:18.921422Z node 2 :PERSQUEUE INFO: partition_init.cpp:880: [PQ: 72057594037927937, Partition: 0, State: StateInit] bootstrapping 0 [2:179:2193] 2025-05-29T15:22:18.921540Z node 2 :PERSQUEUE DEBUG: partition_init.cpp:75: [Root/PQ/rt3.dc1--account--topic:0:Initializer] Start initializing step TInitDiskStatusStep Got KV request 2025-05-29T15:22:18.921570Z node 2 :PERSQUEUE DEBUG: partition_init.cpp:75: [Root/PQ/rt3.dc1--account--topic:0:Initializer] Start initializing step TInitMetaStep Got KV request 2025-05-29T15:22:18.921585Z node 2 :PERSQUEUE DEBUG: partition_init.cpp:75: [Root/PQ/rt3.dc1--account--topic:0:Initializer] Start initializing step TInitInfoRangeStep Got KV request 2025-05-29T15:22:18.921597Z node 2 :PERSQUEUE DEBUG: partition_init.cpp:75: [Root/PQ/rt3.dc1--account--topic:0:Initializer] Start initializing step TInitDataRangeStep Got KV request 2025-05-29T15:22:18.921623Z node 2 :PERSQUEUE DEBUG: partition_init.cpp:621: [Root/PQ/rt3.dc1--account--topic:0:TInitDataRangeStep] Got data offset 0 count 1 size 684 so 0 eo 1 d0000000000_00000000000000000000_00000_0000000001_00000 2025-05-29T15:22:18.921629Z node 2 :PERSQUEUE DEBUG: partition_init.cpp:75: [Root/PQ/rt3.dc1--account--topic:0:Initializer] Start initializing step TInitDataStep 2025-05-29T15:22:18.921632Z node 2 :PERSQUEUE DEBUG: partition_init.cpp:75: [Root/PQ/rt3.dc1--account--topic:0:Initializer] Start initializing step TInitEndWriteTimestampStep 2025-05-29T15:22:18.921636Z node 2 :PERSQUEUE INFO: partition_init.cpp:785: [Root/PQ/rt3.dc1--account--topic:0:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp from keys completed. Value 2025-05-29T15:22:18.000000Z 2025-05-29T15:22:18.921639Z node 2 :PERSQUEUE DEBUG: partition_init.cpp:55: [Root/PQ/rt3.dc1--account--topic:0:Initializer] Initializing completed. 2025-05-29T15:22:18.921643Z node 2 :PERSQUEUE INFO: partition.cpp:560: [PQ: 72057594037927937, Partition: 0, State: StateInit] init complete for topic 'Root/PQ/rt3.dc1--account--topic' partition 0 generation 0 [2:179:2193] 2025-05-29T15:22:18.921649Z node 2 :PERSQUEUE DEBUG: partition.cpp:574: [PQ: 72057594037927937, Partition: 0, State: StateInit] SYNC INIT topic Root/PQ/rt3.dc1--account--topic partitition 0 so 0 endOffset 1 Head Offset 1 PartNo 0 PackedSize 0 count 0 nextOffset 1 batches 0 SYNC INIT DATA KEY: d0000000000_00000000000000000000_00000_0000000001_00000 size 684 2025-05-29T15:22:18.921654Z node 2 :PERSQUEUE DEBUG: partition.cpp:3850: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Process pending events. Count 0 2025-05-29T15:22:19.256767Z ... t kv request 2025-05-29T15:22:30.773912Z node 4 :PERSQUEUE DEBUG: partition_write.cpp:524: [PQ: 72057594037927937, Partition: 0, State: StateIdle] TPartition::HandleWriteResponse writeNewSize# 0 WriteNewSizeFromSupportivePartitions# 0 2025-05-29T15:22:30.773932Z node 4 :PERSQUEUE DEBUG: partition.cpp:2383: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Partition 0 Consumer 'client-1' Bad request (gap) Offset 3 Begin 0 Got batch complete: 1 Wait batch completion Wait kv request 2025-05-29T15:22:30.773979Z node 4 :PERSQUEUE DEBUG: partition.cpp:2185: [PQ: 72057594037927937, Partition: 0, State: StateIdle] === DumpKeyValueRequest === 2025-05-29T15:22:30.773985Z node 4 :PERSQUEUE DEBUG: partition.cpp:2186: [PQ: 72057594037927937, Partition: 0, State: StateIdle] --- delete ---------------- 2025-05-29T15:22:30.773990Z node 4 :PERSQUEUE DEBUG: partition.cpp:2194: [PQ: 72057594037927937, Partition: 0, State: StateIdle] --- write ----------------- 2025-05-29T15:22:30.773994Z node 4 :PERSQUEUE DEBUG: partition.cpp:2197: [PQ: 72057594037927937, Partition: 0, State: StateIdle] i0000000000 2025-05-29T15:22:30.773998Z node 4 :PERSQUEUE DEBUG: partition.cpp:2197: [PQ: 72057594037927937, Partition: 0, State: StateIdle] I0000000000 2025-05-29T15:22:30.774003Z node 4 :PERSQUEUE DEBUG: partition.cpp:2199: [PQ: 72057594037927937, Partition: 0, State: StateIdle] --- rename ---------------- 2025-05-29T15:22:30.774007Z node 4 :PERSQUEUE DEBUG: partition.cpp:2204: [PQ: 72057594037927937, Partition: 0, State: StateIdle] =========================== Got KV request Create distr tx with id = 8 and act no: 9 2025-05-29T15:22:30.774049Z node 4 :PERSQUEUE DEBUG: partition.cpp:1127: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Handle TEvPQ::TEvTxCalcPredicate Step 1, TxId 8 2025-05-29T15:22:30.804498Z node 4 :PERSQUEUE DEBUG: partition_write.cpp:524: [PQ: 72057594037927937, Partition: 0, State: StateIdle] TPartition::HandleWriteResponse writeNewSize# 0 WriteNewSizeFromSupportivePartitions# 0 2025-05-29T15:22:31.859256Z node 4 :PERSQUEUE DEBUG: partition.cpp:1363: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Handle TEvPQ::TEvGetWriteInfoResponse Got batch complete: 3 Wait kv request 2025-05-29T15:22:31.859327Z node 4 :PERSQUEUE DEBUG: partition.cpp:2424: [PQ: 72057594037927937, Partition: 0, State: StateIdle] TPartition::CommitWriteOperations TxId: (empty maybe) 2025-05-29T15:22:31.859334Z node 4 :PERSQUEUE DEBUG: partition.cpp:3322: [PQ: 72057594037927937, Partition: 0, State: StateIdle] schedule TEvPersQueue::TEvProposeTransactionResult(COMPLETE), reason= 2025-05-29T15:22:31.859347Z node 4 :PERSQUEUE DEBUG: partition.cpp:3322: [PQ: 72057594037927937, Partition: 0, State: StateIdle] schedule TEvPersQueue::TEvProposeTransactionResult(ABORTED), reason=incorrect offset range (gap) 2025-05-29T15:22:31.859378Z node 4 :PERSQUEUE DEBUG: partition.cpp:2185: [PQ: 72057594037927937, Partition: 0, State: StateIdle] === DumpKeyValueRequest === 2025-05-29T15:22:31.859383Z node 4 :PERSQUEUE DEBUG: partition.cpp:2186: [PQ: 72057594037927937, Partition: 0, State: StateIdle] --- delete ---------------- 2025-05-29T15:22:31.859391Z node 4 :PERSQUEUE DEBUG: partition.cpp:2194: [PQ: 72057594037927937, Partition: 0, State: StateIdle] --- write ----------------- 2025-05-29T15:22:31.859395Z node 4 :PERSQUEUE DEBUG: partition.cpp:2197: [PQ: 72057594037927937, Partition: 0, State: StateIdle] i0000000000 2025-05-29T15:22:31.859398Z node 4 :PERSQUEUE DEBUG: partition.cpp:2197: [PQ: 72057594037927937, Partition: 0, State: StateIdle] I0000000000 2025-05-29T15:22:31.859402Z node 4 :PERSQUEUE DEBUG: partition.cpp:2197: [PQ: 72057594037927937, Partition: 0, State: StateIdle] m0000000000cclient-1 2025-05-29T15:22:31.859405Z node 4 :PERSQUEUE DEBUG: partition.cpp:2197: [PQ: 72057594037927937, Partition: 0, State: StateIdle] m0000000000uclient-1 2025-05-29T15:22:31.859409Z node 4 :PERSQUEUE DEBUG: partition.cpp:2199: [PQ: 72057594037927937, Partition: 0, State: StateIdle] --- rename ---------------- 2025-05-29T15:22:31.859413Z node 4 :PERSQUEUE DEBUG: partition.cpp:2204: [PQ: 72057594037927937, Partition: 0, State: StateIdle] =========================== Got KV request Wait immediate tx complete 10 2025-05-29T15:22:31.879777Z node 4 :PERSQUEUE DEBUG: partition_write.cpp:524: [PQ: 72057594037927937, Partition: 0, State: StateIdle] TPartition::HandleWriteResponse writeNewSize# 0 WriteNewSizeFromSupportivePartitions# 0 Got propose resutl: Origin: 72057594037927937 Status: COMPLETE TxId: 10 Wait immediate tx complete 11 Got propose resutl: Origin: 72057594037927937 Status: ABORTED TxId: 11 Errors { Kind: BAD_REQUEST Reason: "incorrect offset range (gap)" } 2025-05-29T15:22:32.156117Z node 5 :PERSQUEUE NOTICE: pq_impl.cpp:1099: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-05-29T15:22:32.156142Z node 5 :PERSQUEUE INFO: pq_impl.cpp:800: [PQ: 72057594037927937] doesn't have tx writes info 2025-05-29T15:22:32.159933Z node 5 :PERSQUEUE DEBUG: partition_init.cpp:75: [Root/PQ/rt3.dc1--account--topic:0:Initializer] Start initializing step TInitConfigStep Got KV request 2025-05-29T15:22:32.160013Z node 5 :PERSQUEUE DEBUG: partition_init.cpp:75: [Root/PQ/rt3.dc1--account--topic:0:Initializer] Start initializing step TInitInternalFieldsStep 2025-05-29T15:22:32.160071Z node 5 :PERSQUEUE INFO: partition_init.cpp:880: [PQ: 72057594037927937, Partition: 0, State: StateInit] bootstrapping 0 [5:177:2191] 2025-05-29T15:22:32.160279Z node 5 :PERSQUEUE DEBUG: partition_init.cpp:75: [Root/PQ/rt3.dc1--account--topic:0:Initializer] Start initializing step TInitDiskStatusStep Got KV request 2025-05-29T15:22:32.160312Z node 5 :PERSQUEUE DEBUG: partition_init.cpp:75: [Root/PQ/rt3.dc1--account--topic:0:Initializer] Start initializing step TInitMetaStep Got KV request 2025-05-29T15:22:32.160335Z node 5 :PERSQUEUE DEBUG: partition_init.cpp:75: [Root/PQ/rt3.dc1--account--topic:0:Initializer] Start initializing step TInitInfoRangeStep Got KV request 2025-05-29T15:22:32.160484Z node 5 :PERSQUEUE DEBUG: partition_init.cpp:75: [Root/PQ/rt3.dc1--account--topic:0:Initializer] Start initializing step TInitDataRangeStep Got KV request 2025-05-29T15:22:32.160537Z node 5 :PERSQUEUE DEBUG: partition_init.cpp:621: [Root/PQ/rt3.dc1--account--topic:0:TInitDataRangeStep] Got data offset 0 count 50 size 684 so 0 eo 50 d0000000000_00000000000000000000_00000_0000000050_00000 2025-05-29T15:22:32.160547Z node 5 :PERSQUEUE DEBUG: partition_init.cpp:75: [Root/PQ/rt3.dc1--account--topic:0:Initializer] Start initializing step TInitDataStep 2025-05-29T15:22:32.160552Z node 5 :PERSQUEUE DEBUG: partition_init.cpp:75: [Root/PQ/rt3.dc1--account--topic:0:Initializer] Start initializing step TInitEndWriteTimestampStep 2025-05-29T15:22:32.160558Z node 5 :PERSQUEUE INFO: partition_init.cpp:785: [Root/PQ/rt3.dc1--account--topic:0:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp from keys completed. Value 2025-05-29T15:22:32.000000Z 2025-05-29T15:22:32.160564Z node 5 :PERSQUEUE DEBUG: partition_init.cpp:55: [Root/PQ/rt3.dc1--account--topic:0:Initializer] Initializing completed. 2025-05-29T15:22:32.160571Z node 5 :PERSQUEUE INFO: partition.cpp:560: [PQ: 72057594037927937, Partition: 0, State: StateInit] init complete for topic 'Root/PQ/rt3.dc1--account--topic' partition 0 generation 0 [5:177:2191] 2025-05-29T15:22:32.160581Z node 5 :PERSQUEUE DEBUG: partition.cpp:574: [PQ: 72057594037927937, Partition: 0, State: StateInit] SYNC INIT topic Root/PQ/rt3.dc1--account--topic partitition 0 so 0 endOffset 50 Head Offset 50 PartNo 0 PackedSize 0 count 0 nextOffset 50 batches 0 SYNC INIT DATA KEY: d0000000000_00000000000000000000_00000_0000000050_00000 size 684 2025-05-29T15:22:32.160590Z node 5 :PERSQUEUE DEBUG: partition.cpp:3850: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Process pending events. Count 0 2025-05-29T15:22:32.160610Z node 5 :PERSQUEUE DEBUG: partition_read.cpp:779: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'Root/PQ/rt3.dc1--account--topic' partition 0 user client-1 readTimeStamp for offset 0 initiated queuesize 0 startOffset 0 ReadingTimestamp 0 rrg 0 2025-05-29T15:22:32.160617Z node 5 :PERSQUEUE DEBUG: partition_read.cpp:821: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'Root/PQ/rt3.dc1--account--topic' partition 0 user client-1 send read request for offset 0 initiated queuesize 0 startOffset 0 ReadingTimestamp 1 rrg 0 2025-05-29T15:22:32.160625Z node 5 :PERSQUEUE DEBUG: partition_read.cpp:779: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'Root/PQ/rt3.dc1--account--topic' partition 0 user client-0 readTimeStamp for offset 0 initiated queuesize 0 startOffset 0 ReadingTimestamp 1 rrg 0 2025-05-29T15:22:32.160693Z node 5 :PERSQUEUE DEBUG: partition_read.cpp:736: [PQ: 72057594037927937, Partition: 0, State: StateIdle] read cookie 0 Topic 'Root/PQ/rt3.dc1--account--topic' partition 0 user client-1 offset 0 count 1 size 1024000 endOffset 50 max time lag 0ms effective offset 0 2025-05-29T15:22:32.160714Z node 5 :PERSQUEUE DEBUG: partition_read.cpp:936: [PQ: 72057594037927937, Partition: 0, State: StateIdle] read cookie 0 added 1 blobs, size 684 count 50 last offset 1, current partition end offset: 50 2025-05-29T15:22:32.160721Z node 5 :PERSQUEUE DEBUG: partition_read.cpp:960: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Reading cookie 0. Send blob request. Created Tx with id 0 as act# 0 Created Tx with id 1 as act# 1 2025-05-29T15:22:33.489855Z node 5 :PERSQUEUE DEBUG: partition.cpp:1127: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Handle TEvPQ::TEvTxCalcPredicate Step 1, TxId 0 2025-05-29T15:22:33.489894Z node 5 :PERSQUEUE DEBUG: partition.cpp:1127: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Handle TEvPQ::TEvTxCalcPredicate Step 1, TxId 1 Got batch complete: 1 Wait batch completion Got batch complete: 1 Wait batch completion Wait kv request 2025-05-29T15:22:33.724340Z node 5 :PERSQUEUE DEBUG: partition.cpp:1173: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Handle TEvPQ::TEvTxCommit Step 1, TxId 1 2025-05-29T15:22:33.724367Z node 5 :PERSQUEUE DEBUG: partition.cpp:2424: [PQ: 72057594037927937, Partition: 0, State: StateIdle] TPartition::CommitWriteOperations TxId: 1 2025-05-29T15:22:33.724412Z node 5 :PERSQUEUE DEBUG: partition.cpp:2185: [PQ: 72057594037927937, Partition: 0, State: StateIdle] === DumpKeyValueRequest === 2025-05-29T15:22:33.724418Z node 5 :PERSQUEUE DEBUG: partition.cpp:2186: [PQ: 72057594037927937, Partition: 0, State: StateIdle] --- delete ---------------- 2025-05-29T15:22:33.724423Z node 5 :PERSQUEUE DEBUG: partition.cpp:2194: [PQ: 72057594037927937, Partition: 0, State: StateIdle] --- write ----------------- 2025-05-29T15:22:33.724427Z node 5 :PERSQUEUE DEBUG: partition.cpp:2197: [PQ: 72057594037927937, Partition: 0, State: StateIdle] i0000000000 2025-05-29T15:22:33.724431Z node 5 :PERSQUEUE DEBUG: partition.cpp:2197: [PQ: 72057594037927937, Partition: 0, State: StateIdle] I0000000000 2025-05-29T15:22:33.724435Z node 5 :PERSQUEUE DEBUG: partition.cpp:2197: [PQ: 72057594037927937, Partition: 0, State: StateIdle] m0000000000cclient-0 2025-05-29T15:22:33.724438Z node 5 :PERSQUEUE DEBUG: partition.cpp:2197: [PQ: 72057594037927937, Partition: 0, State: StateIdle] m0000000000uclient-0 2025-05-29T15:22:33.724442Z node 5 :PERSQUEUE DEBUG: partition.cpp:2199: [PQ: 72057594037927937, Partition: 0, State: StateIdle] --- rename ---------------- 2025-05-29T15:22:33.724447Z node 5 :PERSQUEUE DEBUG: partition.cpp:2204: [PQ: 72057594037927937, Partition: 0, State: StateIdle] =========================== Got KV request Wait tx committed for tx 1 2025-05-29T15:22:33.744854Z node 5 :PERSQUEUE DEBUG: partition_write.cpp:524: [PQ: 72057594037927937, Partition: 0, State: StateIdle] TPartition::HandleWriteResponse writeNewSize# 0 WriteNewSizeFromSupportivePartitions# 0 Wait for no tx committed |60.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_rtmr_reboots/unittest >> TPartitionTests::EndWriteTimestamp_HeadKeys [GOOD] |60.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_rtmr_reboots/unittest >> TestDataErasure::SimpleDataErasureTestForTables [GOOD] >> TopicAutoscaling::Simple_BeforeAutoscaleAwareSDK |60.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_rtmr_reboots/unittest ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/persqueue/ut/unittest >> TPartitionTests::EndWriteTimestamp_HeadKeys [GOOD] Test command err: 2025-05-29T15:22:16.000383Z node 1 :PERSQUEUE NOTICE: pq_impl.cpp:1099: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-05-29T15:22:16.000410Z node 1 :PERSQUEUE INFO: pq_impl.cpp:800: [PQ: 72057594037927937] doesn't have tx writes info 2025-05-29T15:22:16.003763Z node 1 :PERSQUEUE DEBUG: partition_init.cpp:75: [Root/PQ/rt3.dc1--account--topic:0:Initializer] Start initializing step TInitConfigStep Got KV request 2025-05-29T15:22:16.003808Z node 1 :PERSQUEUE DEBUG: partition_init.cpp:75: [Root/PQ/rt3.dc1--account--topic:0:Initializer] Start initializing step TInitInternalFieldsStep 2025-05-29T15:22:16.003861Z node 1 :PERSQUEUE INFO: partition_init.cpp:880: [PQ: 72057594037927937, Partition: 0, State: StateInit] bootstrapping 0 [1:179:2193] 2025-05-29T15:22:16.004020Z node 1 :PERSQUEUE DEBUG: partition_init.cpp:75: [Root/PQ/rt3.dc1--account--topic:0:Initializer] Start initializing step TInitDiskStatusStep Got KV request 2025-05-29T15:22:16.004044Z node 1 :PERSQUEUE DEBUG: partition_init.cpp:75: [Root/PQ/rt3.dc1--account--topic:0:Initializer] Start initializing step TInitMetaStep Got KV request 2025-05-29T15:22:16.004066Z node 1 :PERSQUEUE DEBUG: partition_init.cpp:75: [Root/PQ/rt3.dc1--account--topic:0:Initializer] Start initializing step TInitInfoRangeStep Got KV request 2025-05-29T15:22:16.004083Z node 1 :PERSQUEUE DEBUG: partition_init.cpp:75: [Root/PQ/rt3.dc1--account--topic:0:Initializer] Start initializing step TInitDataRangeStep Got KV request 2025-05-29T15:22:16.004111Z node 1 :PERSQUEUE DEBUG: partition_init.cpp:621: [Root/PQ/rt3.dc1--account--topic:0:TInitDataRangeStep] Got data offset 0 count 1 size 684 so 0 eo 1 d0000000000_00000000000000000000_00000_0000000001_00000 2025-05-29T15:22:16.004121Z node 1 :PERSQUEUE DEBUG: partition_init.cpp:75: [Root/PQ/rt3.dc1--account--topic:0:Initializer] Start initializing step TInitDataStep 2025-05-29T15:22:16.004124Z node 1 :PERSQUEUE DEBUG: partition_init.cpp:75: [Root/PQ/rt3.dc1--account--topic:0:Initializer] Start initializing step TInitEndWriteTimestampStep 2025-05-29T15:22:16.004134Z node 1 :PERSQUEUE INFO: partition_init.cpp:785: [Root/PQ/rt3.dc1--account--topic:0:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp from keys completed. Value 2025-05-29T15:22:16.000000Z 2025-05-29T15:22:16.004137Z node 1 :PERSQUEUE DEBUG: partition_init.cpp:55: [Root/PQ/rt3.dc1--account--topic:0:Initializer] Initializing completed. 2025-05-29T15:22:16.004143Z node 1 :PERSQUEUE INFO: partition.cpp:560: [PQ: 72057594037927937, Partition: 0, State: StateInit] init complete for topic 'Root/PQ/rt3.dc1--account--topic' partition 0 generation 0 [1:179:2193] 2025-05-29T15:22:16.004149Z node 1 :PERSQUEUE DEBUG: partition.cpp:574: [PQ: 72057594037927937, Partition: 0, State: StateInit] SYNC INIT topic Root/PQ/rt3.dc1--account--topic partitition 0 so 0 endOffset 1 Head Offset 1 PartNo 0 PackedSize 0 count 0 nextOffset 1 batches 0 SYNC INIT DATA KEY: d0000000000_00000000000000000000_00000_0000000001_00000 size 684 2025-05-29T15:22:16.004177Z node 1 :PERSQUEUE DEBUG: partition.cpp:3850: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Process pending events. Count 0 2025-05-29T15:22:16.340118Z node 1 :PERSQUEUE INFO: ownerinfo.cpp:30: new Cookie src1|8e674d76-d36e7fe6-97524198-143cbfb3_0 generated for partition 0 topic 'Root/PQ/rt3.dc1--account--topic' owner src1 2025-05-29T15:22:16.340168Z node 1 :PERSQUEUE DEBUG: partition_write.cpp:35: [PQ: 72057594037927937, Partition: 0, State: StateIdle] TPartition::ReplyOwnerOk. Partition: 0 Got batch complete: 1 Create distr tx with id = 0 and act no: 1 Create immediate tx with id = 3 and act no: 4 Create immediate tx with id = 6 and act no: 7 2025-05-29T15:22:17.315746Z node 1 :PERSQUEUE DEBUG: partition.cpp:1127: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Handle TEvPQ::TEvTxCalcPredicate Step 1, TxId 0 2025-05-29T15:22:18.555751Z node 1 :PERSQUEUE DEBUG: partition.cpp:1363: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Handle TEvPQ::TEvGetWriteInfoResponse Wait batch completion 2025-05-29T15:22:18.555796Z node 1 :PERSQUEUE DEBUG: partition.cpp:1363: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Handle TEvPQ::TEvGetWriteInfoResponse 2025-05-29T15:22:18.555802Z node 1 :PERSQUEUE DEBUG: partition.cpp:1363: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Handle TEvPQ::TEvGetWriteInfoResponse 2025-05-29T15:22:18.555824Z node 1 :PERSQUEUE DEBUG: partition_write.cpp:1233: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'Root/PQ/rt3.dc1--account--topic' partition 0 part blob processing sourceId 'src1' seqNo 1 partNo 0 2025-05-29T15:22:18.555854Z node 1 :PERSQUEUE DEBUG: partition_write.cpp:1333: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'Root/PQ/rt3.dc1--account--topic' partition 0 part blob complete sourceId 'src1' seqNo 1 partNo 0 FormedBlobsCount 0 NewHead: Offset 20 PartNo 0 PackedSize 84 count 1 nextOffset 21 batches 1 2025-05-29T15:22:18.555862Z node 1 :PERSQUEUE DEBUG: partition.cpp:2424: [PQ: 72057594037927937, Partition: 0, State: StateIdle] TPartition::CommitWriteOperations TxId: (empty maybe) 2025-05-29T15:22:18.555867Z node 1 :PERSQUEUE DEBUG: partition.cpp:2451: [PQ: 72057594037927937, Partition: 0, State: StateIdle] t.WriteInfo->BodyKeys.size=0, t.WriteInfo->BlobsFromHead.size=0 2025-05-29T15:22:18.555871Z node 1 :PERSQUEUE DEBUG: partition.cpp:2452: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Head=Offset 1 PartNo 0 PackedSize 0 count 0 nextOffset 1 batches 0, NewHead=Offset 20 PartNo 0 PackedSize 84 count 1 nextOffset 21 batches 1 2025-05-29T15:22:18.557473Z node 1 :PERSQUEUE DEBUG: partition.cpp:3322: [PQ: 72057594037927937, Partition: 0, State: StateIdle] schedule TEvPersQueue::TEvProposeTransactionResult(COMPLETE), reason= 2025-05-29T15:22:18.557493Z node 1 :PERSQUEUE DEBUG: partition_write.cpp:1233: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'Root/PQ/rt3.dc1--account--topic' partition 0 part blob processing sourceId 'src1' seqNo 3 partNo 0 2025-05-29T15:22:18.557563Z node 1 :PERSQUEUE DEBUG: partition_write.cpp:1295: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'Root/PQ/rt3.dc1--account--topic' partition 0 part blob sourceId 'src1' seqNo 3 partNo 0 result is x0000000000_00000000000000000020_00000_0000000001_00000 size 70 2025-05-29T15:22:18.557572Z node 1 :PERSQUEUE DEBUG: partition_write.cpp:1049: [PQ: 72057594037927937, Partition: 0, State: StateIdle] writing blob: topic 'Root/PQ/rt3.dc1--account--topic' partition 0 old key x0000000000_00000000000000000020_00000_0000000001_00000 new key d0000000000_00000000000000000020_00000_0000000001_00000 size 70 WTime 10139 2025-05-29T15:22:18.557580Z node 1 :PERSQUEUE DEBUG: partition_write.cpp:1333: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'Root/PQ/rt3.dc1--account--topic' partition 0 part blob complete sourceId 'src1' seqNo 3 partNo 0 FormedBlobsCount 1 NewHead: Offset 50 PartNo 0 PackedSize 84 count 1 nextOffset 51 batches 1 2025-05-29T15:22:18.557586Z node 1 :PERSQUEUE DEBUG: partition.cpp:2424: [PQ: 72057594037927937, Partition: 0, State: StateIdle] TPartition::CommitWriteOperations TxId: (empty maybe) 2025-05-29T15:22:18.557590Z node 1 :PERSQUEUE DEBUG: partition.cpp:2451: [PQ: 72057594037927937, Partition: 0, State: StateIdle] t.WriteInfo->BodyKeys.size=0, t.WriteInfo->BlobsFromHead.size=0 2025-05-29T15:22:18.557593Z node 1 :PERSQUEUE DEBUG: partition.cpp:2452: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Head=Offset 1 PartNo 0 PackedSize 0 count 0 nextOffset 1 batches 0, NewHead=Offset 50 PartNo 0 PackedSize 84 count 1 nextOffset 51 batches 1 2025-05-29T15:22:18.557597Z node 1 :PERSQUEUE DEBUG: partition.cpp:3322: [PQ: 72057594037927937, Partition: 0, State: StateIdle] schedule TEvPersQueue::TEvProposeTransactionResult(COMPLETE), reason= 2025-05-29T15:22:18.557625Z node 1 :PERSQUEUE DEBUG: partition_write.cpp:1623: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Add new write blob: topic 'Root/PQ/rt3.dc1--account--topic' partition 0 compactOffset 50,1 HeadOffset 1 endOffset 1 curOffset 51 d0000000000_00000000000000000050_00000_0000000001_00000| size 70 WTime 10139 2025-05-29T15:22:18.557640Z node 1 :PERSQUEUE DEBUG: partition.cpp:2185: [PQ: 72057594037927937, Partition: 0, State: StateIdle] === DumpKeyValueRequest === 2025-05-29T15:22:18.557643Z node 1 :PERSQUEUE DEBUG: partition.cpp:2186: [PQ: 72057594037927937, Partition: 0, State: StateIdle] --- delete ---------------- 2025-05-29T15:22:18.557646Z node 1 :PERSQUEUE DEBUG: partition.cpp:2192: [PQ: 72057594037927937, Partition: 0, State: StateIdle] [x0000000000, x0000000001) 2025-05-29T15:22:18.557649Z node 1 :PERSQUEUE DEBUG: partition.cpp:2194: [PQ: 72057594037927937, Partition: 0, State: StateIdle] --- write ----------------- 2025-05-29T15:22:18.557651Z node 1 :PERSQUEUE DEBUG: partition.cpp:2197: [PQ: 72057594037927937, Partition: 0, State: StateIdle] d0000000000_00000000000000000020_00000_0000000001_00000 2025-05-29T15:22:18.557654Z node 1 :PERSQUEUE DEBUG: partition.cpp:2197: [PQ: 72057594037927937, Partition: 0, State: StateIdle] m0000000000psrc1 2025-05-29T15:22:18.557656Z node 1 :PERSQUEUE DEBUG: partition.cpp:2197: [PQ: 72057594037927937, Partition: 0, State: StateIdle] d0000000000_00000000000000000050_00000_0000000001_00000| 2025-05-29T15:22:18.557658Z node 1 :PERSQUEUE DEBUG: partition.cpp:2197: [PQ: 72057594037927937, Partition: 0, State: StateIdle] i0000000000 2025-05-29T15:22:18.557660Z node 1 :PERSQUEUE DEBUG: partition.cpp:2197: [PQ: 72057594037927937, Partition: 0, State: StateIdle] i0000000000 2025-05-29T15:22:18.557662Z node 1 :PERSQUEUE DEBUG: partition.cpp:2197: [PQ: 72057594037927937, Partition: 0, State: StateIdle] I0000000000 2025-05-29T15:22:18.557665Z node 1 :PERSQUEUE DEBUG: partition.cpp:2199: [PQ: 72057594037927937, Partition: 0, State: StateIdle] --- rename ---------------- 2025-05-29T15:22:18.557667Z node 1 :PERSQUEUE DEBUG: partition.cpp:2204: [PQ: 72057594037927937, Partition: 0, State: StateIdle] =========================== Got KV request Got batch complete: 5 Got KV request Got KV request Send disk status response with cookie: 0 Wait immediate tx complete 3 2025-05-29T15:22:18.578001Z node 1 :PERSQUEUE DEBUG: partition_write.cpp:524: [PQ: 72057594037927937, Partition: 0, State: StateIdle] TPartition::HandleWriteResponse writeNewSize# 34 WriteNewSizeFromSupportivePartitions# 2 2025-05-29T15:22:18.578019Z node 1 :PERSQUEUE DEBUG: partition_write.cpp:58: [PQ: 72057594037927937, Partition: 0, State: StateIdle] TPartition::ReplyWrite. Partition: 0 2025-05-29T15:22:18.578028Z node 1 :PERSQUEUE DEBUG: partition_write.cpp:324: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Answering for message sourceid: 'src1', Topic: 'Root/PQ/rt3.dc1--account--topic', Partition: 0, SeqNo: 1, partNo: 0, Offset: 1 is already written 2025-05-29T15:22:18.578034Z node 1 :PERSQUEUE DEBUG: partition_write.cpp:58: [PQ: 72057594037927937, Partition: 0, State: StateIdle] TPartition::ReplyWrite. Partition: 0 2025-05-29T15:22:18.578038Z node 1 :PERSQUEUE DEBUG: partition_write.cpp:324: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Answering for message sourceid: 'src1', Topic: 'Root/PQ/rt3.dc1--account--topic', Partition: 0, SeqNo: 3, partNo: 0, Offset: 1 is already written Got propose resutl: Origin: 72057594037927937 Status: COMPLETE TxId: 3 Wait immediate tx complete 6 Got propose resutl: Origin: 72057594037927937 Status: COMPLETE TxId: 6 Create distr tx with id = 8 and act no: 9 Create immediate tx with id = 10 and act no: 11 Create distr tx with id = 12 and act no: 13 2025-05-29T15:22:19.744249Z node 1 :PERSQUEUE DEBUG: partition.cpp:1127: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Handle TEvPQ::TEvTxCalcPredicate Step 1, TxId 8 2025-05-29T15:22:19.744275Z node 1 :PERSQUEUE DEBUG: partition.cpp:1127: [PQ: 72057594037927937, Partitio ... ode 2 :PERSQUEUE DEBUG: partition_write.cpp:324: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Answering for message sourceid: 'src2', Topic: 'Root/PQ/rt3.dc1--account--topic', Partition: 0, SeqNo: 3, partNo: 0, Offset: 72 is stored on disk 2025-05-29T15:22:32.118803Z node 2 :PERSQUEUE DEBUG: partition_write.cpp:58: [PQ: 72057594037927937, Partition: 0, State: StateIdle] TPartition::ReplyWrite. Partition: 0 2025-05-29T15:22:32.118808Z node 2 :PERSQUEUE DEBUG: partition_write.cpp:324: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Answering for message sourceid: 'src2', Topic: 'Root/PQ/rt3.dc1--account--topic', Partition: 0, SeqNo: 4, partNo: 0, Offset: 73 is stored on disk 2025-05-29T15:22:32.118813Z node 2 :PERSQUEUE DEBUG: partition_write.cpp:58: [PQ: 72057594037927937, Partition: 0, State: StateIdle] TPartition::ReplyWrite. Partition: 0 2025-05-29T15:22:32.118818Z node 2 :PERSQUEUE DEBUG: partition_write.cpp:324: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Answering for message sourceid: 'src2', Topic: 'Root/PQ/rt3.dc1--account--topic', Partition: 0, SeqNo: 5, partNo: 0, Offset: 74 is stored on disk 2025-05-29T15:22:32.118823Z node 2 :PERSQUEUE DEBUG: partition_write.cpp:58: [PQ: 72057594037927937, Partition: 0, State: StateIdle] TPartition::ReplyWrite. Partition: 0 2025-05-29T15:22:32.118829Z node 2 :PERSQUEUE DEBUG: partition_write.cpp:324: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Answering for message sourceid: 'src2', Topic: 'Root/PQ/rt3.dc1--account--topic', Partition: 0, SeqNo: 6, partNo: 0, Offset: 75 is stored on disk 2025-05-29T15:22:32.118834Z node 2 :PERSQUEUE DEBUG: partition_write.cpp:58: [PQ: 72057594037927937, Partition: 0, State: StateIdle] TPartition::ReplyWrite. Partition: 0 2025-05-29T15:22:32.118843Z node 2 :PERSQUEUE DEBUG: partition_write.cpp:324: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Answering for message sourceid: 'src2', Topic: 'Root/PQ/rt3.dc1--account--topic', Partition: 0, SeqNo: 7, partNo: 0, Offset: 76 is stored on disk 2025-05-29T15:22:32.118851Z node 2 :PERSQUEUE DEBUG: partition_write.cpp:58: [PQ: 72057594037927937, Partition: 0, State: StateIdle] TPartition::ReplyWrite. Partition: 0 2025-05-29T15:22:32.118856Z node 2 :PERSQUEUE DEBUG: partition_write.cpp:324: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Answering for message sourceid: 'src2', Topic: 'Root/PQ/rt3.dc1--account--topic', Partition: 0, SeqNo: 8, partNo: 0, Offset: 77 is stored on disk 2025-05-29T15:22:32.118861Z node 2 :PERSQUEUE DEBUG: partition_write.cpp:58: [PQ: 72057594037927937, Partition: 0, State: StateIdle] TPartition::ReplyWrite. Partition: 0 2025-05-29T15:22:32.118867Z node 2 :PERSQUEUE DEBUG: partition_write.cpp:324: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Answering for message sourceid: 'src2', Topic: 'Root/PQ/rt3.dc1--account--topic', Partition: 0, SeqNo: 9, partNo: 0, Offset: 78 is stored on disk 2025-05-29T15:22:32.118871Z node 2 :PERSQUEUE DEBUG: partition_write.cpp:58: [PQ: 72057594037927937, Partition: 0, State: StateIdle] TPartition::ReplyWrite. Partition: 0 2025-05-29T15:22:32.118877Z node 2 :PERSQUEUE DEBUG: partition_write.cpp:324: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Answering for message sourceid: 'src2', Topic: 'Root/PQ/rt3.dc1--account--topic', Partition: 0, SeqNo: 10, partNo: 0, Offset: 79 is stored on disk 2025-05-29T15:22:32.118917Z node 2 :PERSQUEUE DEBUG: partition.cpp:2185: [PQ: 72057594037927937, Partition: 0, State: StateIdle] === DumpKeyValueRequest === 2025-05-29T15:22:32.118921Z node 2 :PERSQUEUE DEBUG: partition.cpp:2186: [PQ: 72057594037927937, Partition: 0, State: StateIdle] --- delete ---------------- 2025-05-29T15:22:32.118927Z node 2 :PERSQUEUE DEBUG: partition.cpp:2192: [PQ: 72057594037927937, Partition: 0, State: StateIdle] [d0000000000_00000000000000000020_00000_0000000005_00000|, d0000000000_00000000000000000020_00000_0000000005_00000|] 2025-05-29T15:22:32.118932Z node 2 :PERSQUEUE DEBUG: partition.cpp:2194: [PQ: 72057594037927937, Partition: 0, State: StateIdle] --- write ----------------- 2025-05-29T15:22:32.118936Z node 2 :PERSQUEUE DEBUG: partition.cpp:2197: [PQ: 72057594037927937, Partition: 0, State: StateIdle] i0000000000 2025-05-29T15:22:32.118939Z node 2 :PERSQUEUE DEBUG: partition.cpp:2199: [PQ: 72057594037927937, Partition: 0, State: StateIdle] --- rename ---------------- 2025-05-29T15:22:32.118944Z node 2 :PERSQUEUE DEBUG: partition.cpp:2204: [PQ: 72057594037927937, Partition: 0, State: StateIdle] =========================== Got KV request Create distr tx with id = 8 and act no: 9 Create immediate tx with id = 10 and act no: 11 Create distr tx with id = 12 and act no: 13 2025-05-29T15:22:32.119012Z node 2 :PERSQUEUE DEBUG: partition.cpp:1127: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Handle TEvPQ::TEvTxCalcPredicate Step 1, TxId 8 2025-05-29T15:22:32.119032Z node 2 :PERSQUEUE DEBUG: partition.cpp:1127: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Handle TEvPQ::TEvTxCalcPredicate Step 1, TxId 12 2025-05-29T15:22:33.019620Z node 2 :PERSQUEUE DEBUG: partition.cpp:1363: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Handle TEvPQ::TEvGetWriteInfoResponse 2025-05-29T15:22:33.019673Z node 2 :PERSQUEUE DEBUG: partition_write.cpp:524: [PQ: 72057594037927937, Partition: 0, State: StateIdle] TPartition::HandleWriteResponse writeNewSize# 0 WriteNewSizeFromSupportivePartitions# 0 Wait batch completion 2025-05-29T15:22:33.019717Z node 2 :PERSQUEUE DEBUG: partition.cpp:1363: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Handle TEvPQ::TEvGetWriteInfoResponse 2025-05-29T15:22:33.019727Z node 2 :PERSQUEUE DEBUG: partition.cpp:1363: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Handle TEvPQ::TEvGetWriteInfoResponse 2025-05-29T15:22:33.019740Z node 2 :PERSQUEUE DEBUG: partition.cpp:3322: [PQ: 72057594037927937, Partition: 0, State: StateIdle] schedule TEvPersQueue::TEvProposeTransactionResult(ABORTED), reason=MinSeqNo violation failure on src2 Got batch complete: 3 2025-05-29T15:22:33.233486Z node 2 :PERSQUEUE DEBUG: partition.cpp:1173: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Handle TEvPQ::TEvTxCommit Step 1, TxId 12 2025-05-29T15:22:33.233519Z node 2 :PERSQUEUE DEBUG: partition.cpp:2424: [PQ: 72057594037927937, Partition: 0, State: StateIdle] TPartition::CommitWriteOperations TxId: 12 2025-05-29T15:22:33.233533Z node 2 :PERSQUEUE DEBUG: partition.cpp:2451: [PQ: 72057594037927937, Partition: 0, State: StateIdle] t.WriteInfo->BodyKeys.size=0, t.WriteInfo->BlobsFromHead.size=0 2025-05-29T15:22:33.233540Z node 2 :PERSQUEUE DEBUG: partition.cpp:2452: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Head=Offset 70 PartNo 0 PackedSize 299 count 10 nextOffset 80 batches 1, NewHead=Offset 80 PartNo 0 PackedSize 0 count 0 nextOffset 80 batches 0 2025-05-29T15:22:33.233592Z node 2 :PERSQUEUE DEBUG: partition.cpp:2185: [PQ: 72057594037927937, Partition: 0, State: StateIdle] === DumpKeyValueRequest === 2025-05-29T15:22:33.233595Z node 2 :PERSQUEUE DEBUG: partition.cpp:2186: [PQ: 72057594037927937, Partition: 0, State: StateIdle] --- delete ---------------- 2025-05-29T15:22:33.233598Z node 2 :PERSQUEUE DEBUG: partition.cpp:2194: [PQ: 72057594037927937, Partition: 0, State: StateIdle] --- write ----------------- 2025-05-29T15:22:33.233601Z node 2 :PERSQUEUE DEBUG: partition.cpp:2197: [PQ: 72057594037927937, Partition: 0, State: StateIdle] m0000000000psrcId2 2025-05-29T15:22:33.233603Z node 2 :PERSQUEUE DEBUG: partition.cpp:2197: [PQ: 72057594037927937, Partition: 0, State: StateIdle] i0000000000 2025-05-29T15:22:33.233605Z node 2 :PERSQUEUE DEBUG: partition.cpp:2197: [PQ: 72057594037927937, Partition: 0, State: StateIdle] i0000000000 2025-05-29T15:22:33.233607Z node 2 :PERSQUEUE DEBUG: partition.cpp:2197: [PQ: 72057594037927937, Partition: 0, State: StateIdle] I0000000000 2025-05-29T15:22:33.233610Z node 2 :PERSQUEUE DEBUG: partition.cpp:2199: [PQ: 72057594037927937, Partition: 0, State: StateIdle] --- rename ---------------- 2025-05-29T15:22:33.233613Z node 2 :PERSQUEUE DEBUG: partition.cpp:2204: [PQ: 72057594037927937, Partition: 0, State: StateIdle] =========================== Got KV request Send disk status response with cookie: 0 Wait immediate tx complete 10 2025-05-29T15:22:33.264143Z node 2 :PERSQUEUE DEBUG: partition_write.cpp:524: [PQ: 72057594037927937, Partition: 0, State: StateIdle] TPartition::HandleWriteResponse writeNewSize# 0 WriteNewSizeFromSupportivePartitions# 1 Got propose resutl: Origin: 72057594037927937 Status: ABORTED TxId: 10 Errors { Kind: BAD_REQUEST Reason: "MinSeqNo violation failure on src2" } Wait tx committed for tx 12 2025-05-29T15:22:33.355786Z node 3 :PERSQUEUE NOTICE: pq_impl.cpp:1099: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-05-29T15:22:33.355808Z node 3 :PERSQUEUE INFO: pq_impl.cpp:800: [PQ: 72057594037927937] doesn't have tx writes info 2025-05-29T15:22:33.358796Z node 3 :PERSQUEUE INFO: partition_init.cpp:880: [PQ: 72057594037927937, Partition: 2, State: StateInit] bootstrapping 2 [3:180:2194] 2025-05-29T15:22:33.359011Z node 3 :PERSQUEUE INFO: partition_init.cpp:785: [Root/PQ/rt3.dc1--account--topic:2:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp from keys completed. Value 2025-05-29T15:22:33.000000Z 2025-05-29T15:22:33.359017Z node 3 :PERSQUEUE INFO: partition.cpp:560: [PQ: 72057594037927937, Partition: 2, State: StateInit] init complete for topic 'Root/PQ/rt3.dc1--account--topic' partition 2 generation 0 [3:180:2194] 2025-05-29T15:22:33.801139Z node 4 :PERSQUEUE NOTICE: pq_impl.cpp:1099: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-05-29T15:22:33.801167Z node 4 :PERSQUEUE INFO: pq_impl.cpp:800: [PQ: 72057594037927937] doesn't have tx writes info 2025-05-29T15:22:33.804735Z node 4 :PERSQUEUE INFO: partition_init.cpp:880: [PQ: 72057594037927937, Partition: 2, State: StateInit] bootstrapping 2 [4:180:2194] 2025-05-29T15:22:33.805044Z node 4 :PERSQUEUE INFO: partition_init.cpp:774: [Root/PQ/rt3.dc1--account--topic:2:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp skipped because already initialized. 2025-05-29T15:22:33.805053Z node 4 :PERSQUEUE INFO: partition.cpp:560: [PQ: 72057594037927937, Partition: 2, State: StateInit] init complete for topic 'Root/PQ/rt3.dc1--account--topic' partition 2 generation 0 [4:180:2194] 2025-05-29T15:22:34.494174Z node 5 :PERSQUEUE NOTICE: pq_impl.cpp:1099: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-05-29T15:22:34.494207Z node 5 :PERSQUEUE INFO: pq_impl.cpp:800: [PQ: 72057594037927937] doesn't have tx writes info 2025-05-29T15:22:34.497772Z node 5 :PERSQUEUE INFO: partition_init.cpp:880: [PQ: 72057594037927937, Partition: 2, State: StateInit] bootstrapping 2 [5:178:2192] >>>> ADD BLOB 0 writeTimestamp=2025-05-29T15:22:34.488094Z >>>> ADD BLOB 1 writeTimestamp=2025-05-29T15:22:34.488103Z >>>> ADD BLOB 2 writeTimestamp=2025-05-29T15:22:34.488108Z >>>> ADD BLOB 3 writeTimestamp=2025-05-29T15:22:34.488115Z >>>> ADD BLOB 4 writeTimestamp=2025-05-29T15:22:34.488121Z >>>> ADD BLOB 5 writeTimestamp=2025-05-29T15:22:34.488127Z >>>> ADD BLOB 6 writeTimestamp=2025-05-29T15:22:34.488132Z >>>> ADD BLOB 7 writeTimestamp=2025-05-29T15:22:34.488137Z >>>> ADD BLOB 8 writeTimestamp=2025-05-29T15:22:34.488143Z >>>> ADD BLOB 9 writeTimestamp=2025-05-29T15:22:34.488149Z 2025-05-29T15:22:34.498214Z node 5 :PERSQUEUE INFO: partition_init.cpp:785: [Root/PQ/rt3.dc1--account--topic:2:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp from keys completed. Value 2025-05-29T15:22:34.000000Z 2025-05-29T15:22:34.498226Z node 5 :PERSQUEUE INFO: partition.cpp:560: [PQ: 72057594037927937, Partition: 2, State: StateInit] init complete for topic 'Root/PQ/rt3.dc1--account--topic' partition 2 generation 0 [5:178:2192] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/yql/unittest >> KqpYql::UpdateBadType Test command err: Trying to start YDB, gRPC: 23353, MsgBus: 5558 2025-05-29T15:22:30.574722Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509888402434890589:2065];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:22:30.575152Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/0023f4/r3tmp/tmpBctPsK/pdisk_1.dat 2025-05-29T15:22:30.647980Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 23353, node 1 2025-05-29T15:22:30.662839Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:22:30.662855Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-05-29T15:22:30.662857Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-05-29T15:22:30.662901Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-05-29T15:22:30.676980Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:22:30.677012Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:22:30.678188Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:5558 TClient is connected to server localhost:5558 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-29T15:22:30.726991Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:22:30.733892Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:22:30.751335Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:22:30.775588Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:22:30.786427Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:22:30.995313Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509888402434892192:2403], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:22:30.995339Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:22:31.047121Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-05-29T15:22:31.058102Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-05-29T15:22:31.073402Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-05-29T15:22:31.098855Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-05-29T15:22:31.117588Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-05-29T15:22:31.137374Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-05-29T15:22:31.148555Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-05-29T15:22:31.171755Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509888406729860140:2466], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:22:31.171779Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:22:31.171852Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509888406729860145:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:22:31.172832Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715669:3, at schemeshard: 72057594046644480 2025-05-29T15:22:31.179023Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7509888406729860147:2470], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715669 completed, doublechecking } 2025-05-29T15:22:31.260805Z node 1 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [1:7509888406729860198:3399] txid# 281474976715670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-29T15:22:31.331086Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [1:7509888406729860214:2474], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:22:31.331204Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=1&id=Yzc1NDY4YWItNTAwOGMyYWItY2U4MDZlN2UtNDRmMmI2NjE=, ActorId: [1:7509888402434892174:2401], ActorState: ExecuteState, TraceId: 01jwea6gw31g7vsaw50j17h3ew, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: VERIFY failed (2025-05-29T15:22:31.331808Z): assertion failed in non-unittest thread with message: assertion failed at ydb/core/kqp/ut/common/kqp_ut_common.h:375, void NKikimr::NKqp::AssertSuccessResult(const NYdb::TStatus &): (result.IsSuccess())
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 library/cpp/testing/unittest/registar.cpp:36 RaiseError(): requirement UnittestThread failed 0. /-S/util/system/yassert.cpp:83: InternalPanicImpl @ 0x15E4ED75 1. /-S/util/system/yassert.cpp:55: Panic @ 0x15E45D76 2. /tmp//-S/library/cpp/testing/unittest/registar.cpp:36: RaiseError @ 0x15FE8506 3. /-S/ydb/core/kqp/ut/common/kqp_ut_common.h:375: AssertSuccessResult @ 0x284B1732 4. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:365: CreateSampleTables @ 0x284B1032 5. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:581: operator() @ 0x284D254C 6. /-S/library/cpp/threading/future/core/future-inl.h:493: SetValue @ 0x284D254C 7. /-S/library/cpp/threading/future/async.h:24: operator() @ 0x284D254C 8. /-S/util/thread/pool.h:71: Process @ 0x284D254C 9. /-S/util/thread/pool.cpp:418: DoExecute @ 0x15E566F9 10. /-S/util/thread/factory.h:15: Execute @ 0x15E550E9 11. /-S/util/thread/factory.cpp:36: ThreadProc @ 0x15E550E9 12. /-S/util/system/thread.cpp:244: ThreadProxy @ 0x15E5055C 13. ??:0: ?? @ 0x7F9AE718CAC2 14. ??:0: ?? @ 0x7F9AE721E84F >> TopicAutoscaling::PartitionSplit_BeforeAutoscaleAwareSDK >> TMiniKQLProgramBuilderTest::TestEraseRowStaticKey [GOOD] >> TMiniKQLProgramBuilderTest::TestEraseRowPartialDynamicKey [GOOD] >> TMiniKQLProgramBuilderTest::TestSelectRow ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/persqueue/ut/unittest >> TPQTabletTests::Limit_On_The_Number_Of_Transactons [GOOD] Test command err: 2025-05-29T15:22:14.501163Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:3098: [PQ: 72057594037927937] Handle TEvInterconnect::TEvNodeInfo 2025-05-29T15:22:14.502134Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:3130: [PQ: 72057594037927937] Transactions request. From tx_00000000000000000000, To tx_18446744073709551615 2025-05-29T15:22:14.502186Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:751: [PQ: 72057594037927937] doesn't have tx info 2025-05-29T15:22:14.502195Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:763: [PQ: 72057594037927937] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2025-05-29T15:22:14.502198Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:975: [PQ: 72057594037927937] no config, start with empty partitions and default config 2025-05-29T15:22:14.502202Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:4889: [PQ: 72057594037927937] Txs.size=0, PlannedTxs.size=0 2025-05-29T15:22:14.502212Z node 1 :PERSQUEUE NOTICE: pq_impl.cpp:1099: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-05-29T15:22:14.502219Z node 1 :PERSQUEUE INFO: pq_impl.cpp:800: [PQ: 72057594037927937] doesn't have tx writes info 2025-05-29T15:22:14.511410Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:2882: [PQ: 72057594037927937] server connected, pipe [1:207:2212], now have 1 active actors on pipe 2025-05-29T15:22:14.511450Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:1460: [PQ: 72057594037927937] Handle TEvPersQueue::TEvUpdateConfig 2025-05-29T15:22:14.513612Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:1646: [PQ: 72057594037927937] Config update version 1(current 0) received from actor [1:178:2192] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 ImportantClientId: "consumer" LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "topic" Version: 1 LocalDC: true Topic: "topic" TopicPath: "/topic" YcCloudId: "somecloud" YcFolderId: "somefolder" YdbDatabaseId: "PQ" YdbDatabasePath: "/Root/PQ" Partitions { PartitionId: 0 } ReadRuleGenerations: 1 ReadRuleGenerations: 1 FederationAccount: "federationAccount" MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 1 Important: false } Consumers { Name: "consumer" Generation: 1 Important: true } 2025-05-29T15:22:14.514394Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:590: [PQ: 72057594037927937] Apply new config CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 ImportantClientId: "consumer" LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "topic" Version: 1 LocalDC: true Topic: "topic" TopicPath: "/topic" YcCloudId: "somecloud" YcFolderId: "somefolder" YdbDatabaseId: "PQ" YdbDatabasePath: "/Root/PQ" Partitions { PartitionId: 0 } ReadRuleGenerations: 1 ReadRuleGenerations: 1 FederationAccount: "federationAccount" MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 1 Important: false } Consumers { Name: "consumer" Generation: 1 Important: true } 2025-05-29T15:22:14.514415Z node 1 :PERSQUEUE NOTICE: pq_impl.cpp:1099: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-05-29T15:22:14.514543Z node 1 :PERSQUEUE INFO: pq_impl.cpp:1487: [PQ: 72057594037927937] Config applied version 1 actor [1:178:2192] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 ImportantClientId: "consumer" LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "topic" Version: 1 LocalDC: true Topic: "topic" TopicPath: "/topic" YcCloudId: "somecloud" YcFolderId: "somefolder" YdbDatabaseId: "PQ" YdbDatabasePath: "/Root/PQ" Partitions { PartitionId: 0 } ReadRuleGenerations: 1 ReadRuleGenerations: 1 FederationAccount: "federationAccount" MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 1 Important: false } Consumers { Name: "consumer" Generation: 1 Important: true } 2025-05-29T15:22:14.514570Z node 1 :PERSQUEUE DEBUG: partition_init.cpp:75: [topic:0:Initializer] Start initializing step TInitConfigStep 2025-05-29T15:22:14.514626Z node 1 :PERSQUEUE DEBUG: partition_init.cpp:75: [topic:0:Initializer] Start initializing step TInitInternalFieldsStep 2025-05-29T15:22:14.514679Z node 1 :PERSQUEUE INFO: partition_init.cpp:880: [PQ: 72057594037927937, Partition: 0, State: StateInit] bootstrapping 0 [1:215:2218] 2025-05-29T15:22:14.514872Z node 1 :PERSQUEUE DEBUG: partition_init.cpp:55: [topic:0:Initializer] Initializing completed. 2025-05-29T15:22:14.514882Z node 1 :PERSQUEUE INFO: partition.cpp:560: [PQ: 72057594037927937, Partition: 0, State: StateInit] init complete for topic 'topic' partition 0 generation 2 [1:215:2218] 2025-05-29T15:22:14.514892Z node 1 :PERSQUEUE DEBUG: partition.cpp:574: [PQ: 72057594037927937, Partition: 0, State: StateInit] SYNC INIT topic topic partitition 0 so 0 endOffset 0 Head Offset 0 PartNo 0 PackedSize 0 count 0 nextOffset 0 batches 0 2025-05-29T15:22:14.515046Z node 1 :PERSQUEUE DEBUG: partition.cpp:3850: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Process pending events. Count 0 2025-05-29T15:22:14.515061Z node 1 :PERSQUEUE DEBUG: partition.cpp:3155: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'topic' partition 0 user user reinit request with generation 1 2025-05-29T15:22:14.515065Z node 1 :PERSQUEUE DEBUG: partition.cpp:3224: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'topic' partition 0 user user reinit with generation 1 done 2025-05-29T15:22:14.515068Z node 1 :PERSQUEUE DEBUG: partition.cpp:3155: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'topic' partition 0 user consumer reinit request with generation 1 2025-05-29T15:22:14.515071Z node 1 :PERSQUEUE DEBUG: partition.cpp:3224: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'topic' partition 0 user consumer reinit with generation 1 done 2025-05-29T15:22:14.515098Z node 1 :PERSQUEUE DEBUG: partition.cpp:2185: [PQ: 72057594037927937, Partition: 0, State: StateIdle] === DumpKeyValueRequest === 2025-05-29T15:22:14.515102Z node 1 :PERSQUEUE DEBUG: partition.cpp:2186: [PQ: 72057594037927937, Partition: 0, State: StateIdle] --- delete ---------------- 2025-05-29T15:22:14.515106Z node 1 :PERSQUEUE DEBUG: partition.cpp:2194: [PQ: 72057594037927937, Partition: 0, State: StateIdle] --- write ----------------- 2025-05-29T15:22:14.515110Z node 1 :PERSQUEUE DEBUG: partition.cpp:2197: [PQ: 72057594037927937, Partition: 0, State: StateIdle] i0000000000 2025-05-29T15:22:14.515113Z node 1 :PERSQUEUE DEBUG: partition.cpp:2197: [PQ: 72057594037927937, Partition: 0, State: StateIdle] m0000000000cuser 2025-05-29T15:22:14.515117Z node 1 :PERSQUEUE DEBUG: partition.cpp:2197: [PQ: 72057594037927937, Partition: 0, State: StateIdle] m0000000000uuser 2025-05-29T15:22:14.515120Z node 1 :PERSQUEUE DEBUG: partition.cpp:2197: [PQ: 72057594037927937, Partition: 0, State: StateIdle] m0000000000cconsumer 2025-05-29T15:22:14.515124Z node 1 :PERSQUEUE DEBUG: partition.cpp:2197: [PQ: 72057594037927937, Partition: 0, State: StateIdle] m0000000000uconsumer 2025-05-29T15:22:14.515129Z node 1 :PERSQUEUE DEBUG: partition.cpp:2199: [PQ: 72057594037927937, Partition: 0, State: StateIdle] --- rename ---------------- 2025-05-29T15:22:14.515133Z node 1 :PERSQUEUE DEBUG: partition.cpp:2204: [PQ: 72057594037927937, Partition: 0, State: StateIdle] =========================== 2025-05-29T15:22:14.515157Z node 1 :PERSQUEUE DEBUG: partition_read.cpp:779: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'topic' partition 0 user user readTimeStamp for offset 0 initiated queuesize 0 startOffset 0 ReadingTimestamp 0 rrg 0 2025-05-29T15:22:14.515162Z node 1 :PERSQUEUE DEBUG: partition_read.cpp:779: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'topic' partition 0 user consumer readTimeStamp for offset 0 initiated queuesize 0 startOffset 0 ReadingTimestamp 0 rrg 0 2025-05-29T15:22:14.515193Z node 1 :PERSQUEUE DEBUG: read.h:262: CacheProxy. Passthrough write request to KV 2025-05-29T15:22:14.515876Z node 1 :PERSQUEUE DEBUG: partition_write.cpp:524: [PQ: 72057594037927937, Partition: 0, State: StateIdle] TPartition::HandleWriteResponse writeNewSize# 0 WriteNewSizeFromSupportivePartitions# 0 2025-05-29T15:22:14.515958Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:2882: [PQ: 72057594037927937] server connected, pipe [1:222:2223], now have 1 active actors on pipe 2025-05-29T15:22:14.516065Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:2882: [PQ: 72057594037927937] server connected, pipe [1:225:2225], now have 1 active actors on pipe 2025-05-29T15:22:14.516270Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:3222: [PQ: 72057594037927937] Handle TEvPersQueue::TEvProposeTransaction SourceActor { RawX1: 178 RawX2: 4294969488 } TxId: 67890 Data { Operations { PartitionId: 0 CommitOffsetsBegin: 0 CommitOffsetsEnd: 0 Consumer: "consumer" Path: "/topic" } SendingShards: 22222 ReceivingShards: 22222 Immediate: false } 2025-05-29T15:22:14.516282Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:3383: [PQ: 72057594037927937] distributed transaction 2025-05-29T15:22:14.516303Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:3697: [PQ: 72057594037927937] Propose TxId 67890, WriteId (empty maybe) 2025-05-29T15:22:14.516310Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:4293: [PQ: 72057594037927937] Try execute txs with state UNKNOWN 2025-05-29T15:22:14.516315Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:4338: [PQ: 72057594037927937] TxId 67890, State UNKNOWN 2025-05-29T15:22:14.516321Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:3922: [PQ: 72057594037927937] schedule TEvProposeTransactionResult(PREPARED) 2025-05-29T15:22:14.516326Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:4228: [PQ: 72057594037927937] TxId 67890, NewState PREPARING 2025-05-29T15:22:14.516334Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:3818: [PQ: 72057594037927937] write key for TxId 67890 2025-05-29T15:22:14.516358Z node 1 :PERSQUEUE DEBUG: transaction.cpp:374: [TxId: 67890] save tx TxId: 67890 State: PREPARED MinStep: 134 MaxStep: 30134 PredicatesReceived { TabletId: 22222 } PredicateRecipients: 22222 Operations { PartitionId: 0 CommitOffsetsBegin: 0 CommitOffsetsEnd: 0 Consumer: "consumer" Path: "/topic" } Kind: KIND_DATA SourceActor { RawX1: 178 RawX2: 4294969488 } Partitions { } 2025-05-29T15:22:14.516373Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:3635: [PQ: 72057594037927937] Send TEvKeyValue::TEvRequest (WRITE_TX_COOKIE) 2025-05-29T15:22:14.517103Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:1231: [PQ: 72057594037927937] Handle TEvKeyValue::TEvResponse (WRITE_TX_COOKIE) 2025-05-29T15:22:14.517116Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:4293: [PQ: 72057594037927937] Try execute txs with state PREPARING 2025-05-29T15:22:14.517119Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:4338: [PQ: 72057594037927937] TxId 67890, State PREPARING 2025-05-29T15:22:14.517122Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:4228: [PQ: 72057594037927937] TxId 67890, NewState PREPARED 2025-05-29T15:22:14.517164Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:3222: [PQ: 72057594037927937] Handle TEvPersQueue::TEvProposeTransaction SourceActor { RawX1: 178 RawX2: 4294969488 } TxId: 67891 Data { Operations { PartitionId: 0 CommitOffsetsBegin: 0 CommitOffsetsEnd: 0 Consumer: "consumer" Path: "/topic" } SendingShards: 22222 ReceivingShards: 22222 Immediate: false } 2025-05-29T15:22:14.517172Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:3383: [PQ: 72057594037927937] distributed transaction 2025-05-29T15:22:14.517178Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:3697: [PQ: 72057594037927937] Propose TxId 67891, ... node 6 :PERSQUEUE DEBUG: pq_impl.cpp:4338: [PQ: 72057594037927937] TxId 68858, State PREPARING 2025-05-29T15:22:33.495392Z node 6 :PERSQUEUE DEBUG: pq_impl.cpp:4228: [PQ: 72057594037927937] TxId 68858, NewState PREPARED 2025-05-29T15:22:33.495395Z node 6 :PERSQUEUE DEBUG: pq_impl.cpp:4293: [PQ: 72057594037927937] Try execute txs with state PREPARING 2025-05-29T15:22:33.495399Z node 6 :PERSQUEUE DEBUG: pq_impl.cpp:4338: [PQ: 72057594037927937] TxId 68859, State PREPARING 2025-05-29T15:22:33.495403Z node 6 :PERSQUEUE DEBUG: pq_impl.cpp:4228: [PQ: 72057594037927937] TxId 68859, NewState PREPARED 2025-05-29T15:22:33.495406Z node 6 :PERSQUEUE DEBUG: pq_impl.cpp:4293: [PQ: 72057594037927937] Try execute txs with state PREPARING 2025-05-29T15:22:33.495410Z node 6 :PERSQUEUE DEBUG: pq_impl.cpp:4338: [PQ: 72057594037927937] TxId 68860, State PREPARING 2025-05-29T15:22:33.495414Z node 6 :PERSQUEUE DEBUG: pq_impl.cpp:4228: [PQ: 72057594037927937] TxId 68860, NewState PREPARED 2025-05-29T15:22:33.495417Z node 6 :PERSQUEUE DEBUG: pq_impl.cpp:4293: [PQ: 72057594037927937] Try execute txs with state PREPARING 2025-05-29T15:22:33.495421Z node 6 :PERSQUEUE DEBUG: pq_impl.cpp:4338: [PQ: 72057594037927937] TxId 68861, State PREPARING 2025-05-29T15:22:33.495424Z node 6 :PERSQUEUE DEBUG: pq_impl.cpp:4228: [PQ: 72057594037927937] TxId 68861, NewState PREPARED 2025-05-29T15:22:33.495428Z node 6 :PERSQUEUE DEBUG: pq_impl.cpp:4293: [PQ: 72057594037927937] Try execute txs with state PREPARING 2025-05-29T15:22:33.495432Z node 6 :PERSQUEUE DEBUG: pq_impl.cpp:4338: [PQ: 72057594037927937] TxId 68862, State PREPARING 2025-05-29T15:22:33.495437Z node 6 :PERSQUEUE DEBUG: pq_impl.cpp:4228: [PQ: 72057594037927937] TxId 68862, NewState PREPARED 2025-05-29T15:22:33.495441Z node 6 :PERSQUEUE DEBUG: pq_impl.cpp:4293: [PQ: 72057594037927937] Try execute txs with state PREPARING 2025-05-29T15:22:33.495444Z node 6 :PERSQUEUE DEBUG: pq_impl.cpp:4338: [PQ: 72057594037927937] TxId 68863, State PREPARING 2025-05-29T15:22:33.495448Z node 6 :PERSQUEUE DEBUG: pq_impl.cpp:4228: [PQ: 72057594037927937] TxId 68863, NewState PREPARED 2025-05-29T15:22:33.495451Z node 6 :PERSQUEUE DEBUG: pq_impl.cpp:4293: [PQ: 72057594037927937] Try execute txs with state PREPARING 2025-05-29T15:22:33.495455Z node 6 :PERSQUEUE DEBUG: pq_impl.cpp:4338: [PQ: 72057594037927937] TxId 68864, State PREPARING 2025-05-29T15:22:33.495459Z node 6 :PERSQUEUE DEBUG: pq_impl.cpp:4228: [PQ: 72057594037927937] TxId 68864, NewState PREPARED 2025-05-29T15:22:33.495463Z node 6 :PERSQUEUE DEBUG: pq_impl.cpp:4293: [PQ: 72057594037927937] Try execute txs with state PREPARING 2025-05-29T15:22:33.495466Z node 6 :PERSQUEUE DEBUG: pq_impl.cpp:4338: [PQ: 72057594037927937] TxId 68865, State PREPARING 2025-05-29T15:22:33.495470Z node 6 :PERSQUEUE DEBUG: pq_impl.cpp:4228: [PQ: 72057594037927937] TxId 68865, NewState PREPARED 2025-05-29T15:22:33.495474Z node 6 :PERSQUEUE DEBUG: pq_impl.cpp:4293: [PQ: 72057594037927937] Try execute txs with state PREPARING 2025-05-29T15:22:33.495478Z node 6 :PERSQUEUE DEBUG: pq_impl.cpp:4338: [PQ: 72057594037927937] TxId 68866, State PREPARING 2025-05-29T15:22:33.495481Z node 6 :PERSQUEUE DEBUG: pq_impl.cpp:4228: [PQ: 72057594037927937] TxId 68866, NewState PREPARED 2025-05-29T15:22:33.495485Z node 6 :PERSQUEUE DEBUG: pq_impl.cpp:4293: [PQ: 72057594037927937] Try execute txs with state PREPARING 2025-05-29T15:22:33.495489Z node 6 :PERSQUEUE DEBUG: pq_impl.cpp:4338: [PQ: 72057594037927937] TxId 68867, State PREPARING 2025-05-29T15:22:33.495492Z node 6 :PERSQUEUE DEBUG: pq_impl.cpp:4228: [PQ: 72057594037927937] TxId 68867, NewState PREPARED 2025-05-29T15:22:33.495496Z node 6 :PERSQUEUE DEBUG: pq_impl.cpp:4293: [PQ: 72057594037927937] Try execute txs with state PREPARING 2025-05-29T15:22:33.495500Z node 6 :PERSQUEUE DEBUG: pq_impl.cpp:4338: [PQ: 72057594037927937] TxId 68868, State PREPARING 2025-05-29T15:22:33.495503Z node 6 :PERSQUEUE DEBUG: pq_impl.cpp:4228: [PQ: 72057594037927937] TxId 68868, NewState PREPARED 2025-05-29T15:22:33.495507Z node 6 :PERSQUEUE DEBUG: pq_impl.cpp:4293: [PQ: 72057594037927937] Try execute txs with state PREPARING 2025-05-29T15:22:33.495510Z node 6 :PERSQUEUE DEBUG: pq_impl.cpp:4338: [PQ: 72057594037927937] TxId 68869, State PREPARING 2025-05-29T15:22:33.495514Z node 6 :PERSQUEUE DEBUG: pq_impl.cpp:4228: [PQ: 72057594037927937] TxId 68869, NewState PREPARED 2025-05-29T15:22:33.495518Z node 6 :PERSQUEUE DEBUG: pq_impl.cpp:4293: [PQ: 72057594037927937] Try execute txs with state PREPARING 2025-05-29T15:22:33.495522Z node 6 :PERSQUEUE DEBUG: pq_impl.cpp:4338: [PQ: 72057594037927937] TxId 68870, State PREPARING 2025-05-29T15:22:33.495525Z node 6 :PERSQUEUE DEBUG: pq_impl.cpp:4228: [PQ: 72057594037927937] TxId 68870, NewState PREPARED 2025-05-29T15:22:33.495529Z node 6 :PERSQUEUE DEBUG: pq_impl.cpp:4293: [PQ: 72057594037927937] Try execute txs with state PREPARING 2025-05-29T15:22:33.495533Z node 6 :PERSQUEUE DEBUG: pq_impl.cpp:4338: [PQ: 72057594037927937] TxId 68871, State PREPARING 2025-05-29T15:22:33.495536Z node 6 :PERSQUEUE DEBUG: pq_impl.cpp:4228: [PQ: 72057594037927937] TxId 68871, NewState PREPARED 2025-05-29T15:22:33.495540Z node 6 :PERSQUEUE DEBUG: pq_impl.cpp:4293: [PQ: 72057594037927937] Try execute txs with state PREPARING 2025-05-29T15:22:33.495544Z node 6 :PERSQUEUE DEBUG: pq_impl.cpp:4338: [PQ: 72057594037927937] TxId 68872, State PREPARING 2025-05-29T15:22:33.495547Z node 6 :PERSQUEUE DEBUG: pq_impl.cpp:4228: [PQ: 72057594037927937] TxId 68872, NewState PREPARED 2025-05-29T15:22:33.495551Z node 6 :PERSQUEUE DEBUG: pq_impl.cpp:4293: [PQ: 72057594037927937] Try execute txs with state PREPARING 2025-05-29T15:22:33.495556Z node 6 :PERSQUEUE DEBUG: pq_impl.cpp:4338: [PQ: 72057594037927937] TxId 68873, State PREPARING 2025-05-29T15:22:33.495560Z node 6 :PERSQUEUE DEBUG: pq_impl.cpp:4228: [PQ: 72057594037927937] TxId 68873, NewState PREPARED 2025-05-29T15:22:33.495564Z node 6 :PERSQUEUE DEBUG: pq_impl.cpp:4293: [PQ: 72057594037927937] Try execute txs with state PREPARING 2025-05-29T15:22:33.495567Z node 6 :PERSQUEUE DEBUG: pq_impl.cpp:4338: [PQ: 72057594037927937] TxId 68874, State PREPARING 2025-05-29T15:22:33.495571Z node 6 :PERSQUEUE DEBUG: pq_impl.cpp:4228: [PQ: 72057594037927937] TxId 68874, NewState PREPARED 2025-05-29T15:22:33.495575Z node 6 :PERSQUEUE DEBUG: pq_impl.cpp:4293: [PQ: 72057594037927937] Try execute txs with state PREPARING 2025-05-29T15:22:33.495578Z node 6 :PERSQUEUE DEBUG: pq_impl.cpp:4338: [PQ: 72057594037927937] TxId 68875, State PREPARING 2025-05-29T15:22:33.495582Z node 6 :PERSQUEUE DEBUG: pq_impl.cpp:4228: [PQ: 72057594037927937] TxId 68875, NewState PREPARED 2025-05-29T15:22:33.495586Z node 6 :PERSQUEUE DEBUG: pq_impl.cpp:4293: [PQ: 72057594037927937] Try execute txs with state PREPARING 2025-05-29T15:22:33.495590Z node 6 :PERSQUEUE DEBUG: pq_impl.cpp:4338: [PQ: 72057594037927937] TxId 68876, State PREPARING 2025-05-29T15:22:33.495593Z node 6 :PERSQUEUE DEBUG: pq_impl.cpp:4228: [PQ: 72057594037927937] TxId 68876, NewState PREPARED 2025-05-29T15:22:33.495597Z node 6 :PERSQUEUE DEBUG: pq_impl.cpp:4293: [PQ: 72057594037927937] Try execute txs with state PREPARING 2025-05-29T15:22:33.495601Z node 6 :PERSQUEUE DEBUG: pq_impl.cpp:4338: [PQ: 72057594037927937] TxId 68877, State PREPARING 2025-05-29T15:22:33.495604Z node 6 :PERSQUEUE DEBUG: pq_impl.cpp:4228: [PQ: 72057594037927937] TxId 68877, NewState PREPARED 2025-05-29T15:22:33.495608Z node 6 :PERSQUEUE DEBUG: pq_impl.cpp:4293: [PQ: 72057594037927937] Try execute txs with state PREPARING 2025-05-29T15:22:33.495612Z node 6 :PERSQUEUE DEBUG: pq_impl.cpp:4338: [PQ: 72057594037927937] TxId 68878, State PREPARING 2025-05-29T15:22:33.495615Z node 6 :PERSQUEUE DEBUG: pq_impl.cpp:4228: [PQ: 72057594037927937] TxId 68878, NewState PREPARED 2025-05-29T15:22:33.495619Z node 6 :PERSQUEUE DEBUG: pq_impl.cpp:4293: [PQ: 72057594037927937] Try execute txs with state PREPARING 2025-05-29T15:22:33.495622Z node 6 :PERSQUEUE DEBUG: pq_impl.cpp:4338: [PQ: 72057594037927937] TxId 68879, State PREPARING 2025-05-29T15:22:33.495626Z node 6 :PERSQUEUE DEBUG: pq_impl.cpp:4228: [PQ: 72057594037927937] TxId 68879, NewState PREPARED 2025-05-29T15:22:33.495629Z node 6 :PERSQUEUE DEBUG: pq_impl.cpp:4293: [PQ: 72057594037927937] Try execute txs with state PREPARING 2025-05-29T15:22:33.495633Z node 6 :PERSQUEUE DEBUG: pq_impl.cpp:4338: [PQ: 72057594037927937] TxId 68880, State PREPARING 2025-05-29T15:22:33.495637Z node 6 :PERSQUEUE DEBUG: pq_impl.cpp:4228: [PQ: 72057594037927937] TxId 68880, NewState PREPARED 2025-05-29T15:22:33.495640Z node 6 :PERSQUEUE DEBUG: pq_impl.cpp:4293: [PQ: 72057594037927937] Try execute txs with state PREPARING 2025-05-29T15:22:33.495643Z node 6 :PERSQUEUE DEBUG: pq_impl.cpp:4338: [PQ: 72057594037927937] TxId 68881, State PREPARING 2025-05-29T15:22:33.495647Z node 6 :PERSQUEUE DEBUG: pq_impl.cpp:4228: [PQ: 72057594037927937] TxId 68881, NewState PREPARED 2025-05-29T15:22:33.495650Z node 6 :PERSQUEUE DEBUG: pq_impl.cpp:4293: [PQ: 72057594037927937] Try execute txs with state PREPARING 2025-05-29T15:22:33.495654Z node 6 :PERSQUEUE DEBUG: pq_impl.cpp:4338: [PQ: 72057594037927937] TxId 68882, State PREPARING 2025-05-29T15:22:33.495657Z node 6 :PERSQUEUE DEBUG: pq_impl.cpp:4228: [PQ: 72057594037927937] TxId 68882, NewState PREPARED 2025-05-29T15:22:33.495661Z node 6 :PERSQUEUE DEBUG: pq_impl.cpp:4293: [PQ: 72057594037927937] Try execute txs with state PREPARING 2025-05-29T15:22:33.495664Z node 6 :PERSQUEUE DEBUG: pq_impl.cpp:4338: [PQ: 72057594037927937] TxId 68883, State PREPARING 2025-05-29T15:22:33.495668Z node 6 :PERSQUEUE DEBUG: pq_impl.cpp:4228: [PQ: 72057594037927937] TxId 68883, NewState PREPARED 2025-05-29T15:22:33.495673Z node 6 :PERSQUEUE DEBUG: pq_impl.cpp:4293: [PQ: 72057594037927937] Try execute txs with state PREPARING 2025-05-29T15:22:33.495676Z node 6 :PERSQUEUE DEBUG: pq_impl.cpp:4338: [PQ: 72057594037927937] TxId 68884, State PREPARING 2025-05-29T15:22:33.495679Z node 6 :PERSQUEUE DEBUG: pq_impl.cpp:4228: [PQ: 72057594037927937] TxId 68884, NewState PREPARED 2025-05-29T15:22:33.495683Z node 6 :PERSQUEUE DEBUG: pq_impl.cpp:4293: [PQ: 72057594037927937] Try execute txs with state PREPARING 2025-05-29T15:22:33.495686Z node 6 :PERSQUEUE DEBUG: pq_impl.cpp:4338: [PQ: 72057594037927937] TxId 68885, State PREPARING 2025-05-29T15:22:33.495689Z node 6 :PERSQUEUE DEBUG: pq_impl.cpp:4228: [PQ: 72057594037927937] TxId 68885, NewState PREPARED 2025-05-29T15:22:33.495694Z node 6 :PERSQUEUE DEBUG: pq_impl.cpp:4293: [PQ: 72057594037927937] Try execute txs with state PREPARING 2025-05-29T15:22:33.495697Z node 6 :PERSQUEUE DEBUG: pq_impl.cpp:4338: [PQ: 72057594037927937] TxId 68886, State PREPARING 2025-05-29T15:22:33.495700Z node 6 :PERSQUEUE DEBUG: pq_impl.cpp:4228: [PQ: 72057594037927937] TxId 68886, NewState PREPARED 2025-05-29T15:22:33.495703Z node 6 :PERSQUEUE DEBUG: pq_impl.cpp:4293: [PQ: 72057594037927937] Try execute txs with state PREPARING 2025-05-29T15:22:33.495706Z node 6 :PERSQUEUE DEBUG: pq_impl.cpp:4338: [PQ: 72057594037927937] TxId 68887, State PREPARING 2025-05-29T15:22:33.495710Z node 6 :PERSQUEUE DEBUG: pq_impl.cpp:4228: [PQ: 72057594037927937] TxId 68887, NewState PREPARED 2025-05-29T15:22:33.495714Z node 6 :PERSQUEUE DEBUG: pq_impl.cpp:4293: [PQ: 72057594037927937] Try execute txs with state PREPARING 2025-05-29T15:22:33.495719Z node 6 :PERSQUEUE DEBUG: pq_impl.cpp:4338: [PQ: 72057594037927937] TxId 68888, State PREPARING 2025-05-29T15:22:33.495723Z node 6 :PERSQUEUE DEBUG: pq_impl.cpp:4228: [PQ: 72057594037927937] TxId 68888, NewState PREPARED 2025-05-29T15:22:33.495727Z node 6 :PERSQUEUE DEBUG: pq_impl.cpp:4293: [PQ: 72057594037927937] Try execute txs with state PREPARING 2025-05-29T15:22:33.495730Z node 6 :PERSQUEUE DEBUG: pq_impl.cpp:4338: [PQ: 72057594037927937] TxId 68889, State PREPARING 2025-05-29T15:22:33.495733Z node 6 :PERSQUEUE DEBUG: pq_impl.cpp:4228: [PQ: 72057594037927937] TxId 68889, NewState PREPARED >> TMiniKQLProgramBuilderTest::TestSelectRow [GOOD] >> TMiniKQLProgramBuilderTest::TestUpdateRowDynamicKey [GOOD] >> TMiniKQLProgramBuilderTest::TestSelectFromInclusiveRange [GOOD] >> TMiniKQLProgramBuilderTest::TestSelectFromExclusiveRange [GOOD] >> TMiniKQLProgramBuilderTest::TestSelectToInclusiveRange [GOOD] >> TMiniKQLProgramBuilderTest::TestSelectToExclusiveRange [GOOD] >> TMiniKQLProgramBuilderTest::TestSelectBothFromInclusiveToInclusiveRange [GOOD] >> TMiniKQLProgramBuilderTest::TestSelectBothFromExclusiveToExclusiveRange [GOOD] >> TMiniKQLProgramBuilderTest::TestInvalidParameterName [GOOD] >> TMiniKQLProgramBuilderTest::TestInvalidParameterType [GOOD] |60.7%| [TA] $(B)/ydb/core/tx/schemeshard/ut_sysview_reboots/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_data_erasure/unittest >> TestDataErasure::SimpleDataErasureTestForTables [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:116:2058] recipient: [1:111:2141] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:116:2058] recipient: [1:111:2141] Leader for TabletID 72057594046678944 is [1:128:2152] sender: [1:132:2058] recipient: [1:111:2141] 2025-05-29T15:22:32.025597Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7488: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-05-29T15:22:32.025625Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7516: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:22:32.025630Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7402: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-05-29T15:22:32.025636Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7418: OperationsProcessing config: using default configuration 2025-05-29T15:22:32.025648Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-05-29T15:22:32.025652Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-05-29T15:22:32.025662Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7548: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:22:32.025675Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:39: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-05-29T15:22:32.025811Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7619: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-05-29T15:22:32.025900Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-05-29T15:22:32.040618Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7309: Cannot subscribe to console configs 2025-05-29T15:22:32.040650Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:22:32.051612Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-05-29T15:22:32.051793Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-05-29T15:22:32.051866Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-05-29T15:22:32.058675Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-05-29T15:22:32.058994Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:445: Clear TempDirsState with owners number: 0 2025-05-29T15:22:32.059142Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1348: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-05-29T15:22:32.059217Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:32: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-05-29T15:22:32.059780Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:157: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:22:32.059840Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:84: [RootDataErasureManager] Stop 2025-05-29T15:22:32.060139Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:22:32.060152Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:22:32.060178Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-05-29T15:22:32.060194Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-29T15:22:32.060200Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:202: TTxServerlessStorageBilling.Complete 2025-05-29T15:22:32.060240Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6671: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-05-29T15:22:32.062903Z node 1 :HIVE INFO: tablet_helpers.cpp:1130: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:128:2152] sender: [1:242:2058] recipient: [1:15:2062] 2025-05-29T15:22:32.079336Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:372: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-05-29T15:22:32.079405Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:22:32.079468Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-05-29T15:22:32.079513Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:130: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-05-29T15:22:32.079524Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:22:32.080138Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:451: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-05-29T15:22:32.080165Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-05-29T15:22:32.080203Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:22:32.080214Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:313: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-05-29T15:22:32.080219Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:367: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-05-29T15:22:32.080225Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 2 -> 3 2025-05-29T15:22:32.080726Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:22:32.080740Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-05-29T15:22:32.080747Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 3 -> 128 2025-05-29T15:22:32.081111Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:22:32.081122Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:22:32.081129Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:22:32.081145Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1650: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-05-29T15:22:32.081862Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1719: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-05-29T15:22:32.082236Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:652: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-05-29T15:22:32.082293Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1751: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-05-29T15:22:32.082474Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:676: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-05-29T15:22:32.082494Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:680: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 124 RawX2: 4294969445 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-05-29T15:22:32.082500Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:22:32.082544Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 128 -> 240 2025-05-29T15:22:32.082549Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:22:32.082576Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-05-29T15:22:32.082585Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:402: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-05-29T15:22:32.082970Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:22:32.082976Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-29T15:22:32.083012Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29 ... [RootDataErasureManager] SendRequestToBSC: Generation# 1 2025-05-29T15:22:33.932790Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4877: StateWork, received event# 269877760, Sender [1:1965:3632], Recipient [1:296:2279]: NKikimr::TEvTabletPipe::TEvClientConnected { TabletId: 72057594037932033 Status: OK ServerId: [1:1966:3633] Leader: 1 Dead: 0 Generation: 2 VersionInfo: } 2025-05-29T15:22:33.932796Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4972: StateWork, processing event TEvTabletPipe::TEvClientConnected 2025-05-29T15:22:33.932801Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5703: Handle TEvClientConnected, tabletId: 72057594037932033, status: OK, at schemeshard: 72057594046678944 2025-05-29T15:22:33.932834Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4877: StateWork, received event# 268637738, Sender [1:298:2280], Recipient [1:296:2279]: NKikimrBlobStorage.TEvControllerShredResponse CurrentGeneration: 1 Completed: false Progress10k: 0 2025-05-29T15:22:33.932839Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5076: StateWork, processing event TEvBlobStorage::TEvControllerShredResponse 2025-05-29T15:22:33.932843Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:7798: Handle TEvControllerShredResponse, at schemeshard: 72057594046678944 2025-05-29T15:22:33.932851Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__root_data_erasure_manager.cpp:632: TTxCompleteDataErasureBSC Execute at schemeshard: 72057594046678944 2025-05-29T15:22:33.932857Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:648: TTxCompleteDataErasureBSC: Progress data shred in BSC 0% 2025-05-29T15:22:33.932867Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__root_data_erasure_manager.cpp:656: TTxCompleteDataErasureBSC Complete at schemeshard: 72057594046678944, NeedScheduleRequestToBSC# true 2025-05-29T15:22:33.932874Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:348: [RootDataErasureManager] ScheduleRequestToBSC: Interval# 1.000000s 2025-05-29T15:22:34.385693Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4877: StateWork, received event# 271125000, Sender [0:0:0], Recipient [1:296:2279]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-05-29T15:22:34.385726Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4885: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-05-29T15:22:34.385742Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4877: StateWork, received event# 271125000, Sender [0:0:0], Recipient [1:459:2410]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-05-29T15:22:34.385745Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4885: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-05-29T15:22:34.385754Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4877: StateWork, received event# 271125000, Sender [0:0:0], Recipient [1:836:2719]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-05-29T15:22:34.385757Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4885: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-05-29T15:22:34.385767Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4877: StateWork, received event# 271124999, Sender [1:296:2279], Recipient [1:296:2279]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2025-05-29T15:22:34.385771Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4884: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2025-05-29T15:22:34.385785Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4877: StateWork, received event# 271124999, Sender [1:459:2410], Recipient [1:459:2410]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2025-05-29T15:22:34.385788Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4884: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2025-05-29T15:22:34.385798Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4877: StateWork, received event# 271124999, Sender [1:836:2719], Recipient [1:836:2719]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2025-05-29T15:22:34.385802Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4884: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2025-05-29T15:22:34.429456Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4877: StateWork, received event# 271125517, Sender [0:0:0], Recipient [1:296:2279]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToRunDataErasureBSC 2025-05-29T15:22:34.429489Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5077: StateWork, processing event TEvSchemeShard::TEvWakeupToRunDataErasureBSC 2025-05-29T15:22:34.429497Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:354: [RootDataErasureManager] SendRequestToBSC: Generation# 1 2025-05-29T15:22:34.429603Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4877: StateWork, received event# 268637738, Sender [1:298:2280], Recipient [1:296:2279]: NKikimrBlobStorage.TEvControllerShredResponse CurrentGeneration: 1 Completed: false Progress10k: 5000 2025-05-29T15:22:34.429610Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5076: StateWork, processing event TEvBlobStorage::TEvControllerShredResponse 2025-05-29T15:22:34.429615Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:7798: Handle TEvControllerShredResponse, at schemeshard: 72057594046678944 2025-05-29T15:22:34.429638Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__root_data_erasure_manager.cpp:632: TTxCompleteDataErasureBSC Execute at schemeshard: 72057594046678944 2025-05-29T15:22:34.429653Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:648: TTxCompleteDataErasureBSC: Progress data shred in BSC 50% 2025-05-29T15:22:34.429672Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__root_data_erasure_manager.cpp:656: TTxCompleteDataErasureBSC Complete at schemeshard: 72057594046678944, NeedScheduleRequestToBSC# true 2025-05-29T15:22:34.429684Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:348: [RootDataErasureManager] ScheduleRequestToBSC: Interval# 1.000000s 2025-05-29T15:22:34.813539Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4877: StateWork, received event# 271125000, Sender [0:0:0], Recipient [1:836:2719]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-05-29T15:22:34.813596Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4885: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-05-29T15:22:34.813616Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4877: StateWork, received event# 271125000, Sender [0:0:0], Recipient [1:296:2279]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-05-29T15:22:34.813621Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4885: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-05-29T15:22:34.813632Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4877: StateWork, received event# 271125000, Sender [0:0:0], Recipient [1:459:2410]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-05-29T15:22:34.813637Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4885: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-05-29T15:22:34.813648Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4877: StateWork, received event# 271124999, Sender [1:296:2279], Recipient [1:296:2279]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2025-05-29T15:22:34.813653Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4884: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2025-05-29T15:22:34.813670Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4877: StateWork, received event# 271124999, Sender [1:459:2410], Recipient [1:459:2410]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2025-05-29T15:22:34.813674Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4884: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2025-05-29T15:22:34.813686Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4877: StateWork, received event# 271124999, Sender [1:836:2719], Recipient [1:836:2719]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2025-05-29T15:22:34.813691Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4884: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2025-05-29T15:22:34.857138Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4877: StateWork, received event# 271125517, Sender [0:0:0], Recipient [1:296:2279]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToRunDataErasureBSC 2025-05-29T15:22:34.857174Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5077: StateWork, processing event TEvSchemeShard::TEvWakeupToRunDataErasureBSC 2025-05-29T15:22:34.857183Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:354: [RootDataErasureManager] SendRequestToBSC: Generation# 1 2025-05-29T15:22:34.857312Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4877: StateWork, received event# 268637738, Sender [1:298:2280], Recipient [1:296:2279]: NKikimrBlobStorage.TEvControllerShredResponse CurrentGeneration: 1 Completed: true Progress10k: 10000 2025-05-29T15:22:34.857322Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5076: StateWork, processing event TEvBlobStorage::TEvControllerShredResponse 2025-05-29T15:22:34.857328Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:7798: Handle TEvControllerShredResponse, at schemeshard: 72057594046678944 2025-05-29T15:22:34.857355Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__root_data_erasure_manager.cpp:632: TTxCompleteDataErasureBSC Execute at schemeshard: 72057594046678944 2025-05-29T15:22:34.857360Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:644: TTxCompleteDataErasureBSC: Data shred in BSC is completed 2025-05-29T15:22:34.857374Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:170: [RootDataErasureManager] ScheduleDataErasureWakeup: Interval# 0.934000s, Timestamp# 1970-01-01T00:00:05.114000Z 2025-05-29T15:22:34.857381Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:378: [RootDataErasureManager] Complete: Generation# 1, duration# 2 s 2025-05-29T15:22:34.858371Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__root_data_erasure_manager.cpp:656: TTxCompleteDataErasureBSC Complete at schemeshard: 72057594046678944, NeedScheduleRequestToBSC# false 2025-05-29T15:22:34.858548Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4877: StateWork, received event# 269877761, Sender [1:1985:3652], Recipient [1:296:2279]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-05-29T15:22:34.858560Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4974: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-05-29T15:22:34.858566Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5753: Pipe server connected, at tablet: 72057594046678944 2025-05-29T15:22:34.858599Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4877: StateWork, received event# 271125519, Sender [1:277:2267], Recipient [1:296:2279]: NKikimrScheme.TEvDataErasureInfoRequest 2025-05-29T15:22:34.858604Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5074: StateWork, processing event TEvSchemeShard::TEvDataErasureInfoRequest 2025-05-29T15:22:34.858610Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:7753: Handle TEvDataErasureInfoRequest, at schemeshard: 72057594046678944 |60.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/library/table_creator/ut/unittest |60.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/library/table_creator/ut/unittest >> TestDataErasure::SimpleDataErasureTestForAllSupportedObjects [GOOD] |60.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/library/table_creator/ut/unittest |60.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/library/table_creator/ut/unittest |60.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/engine/ut/unittest >> TMiniKQLProgramBuilderTest::TestInvalidParameterType [GOOD] >> TopicAutoscaling::Simple_BeforeAutoscaleAwareSDK [FAIL] >> TopicAutoscaling::Simple_AutoscaleAwareSDK |60.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/library/table_creator/ut/unittest ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/yql/unittest >> KqpYql::TableUseBeforeCreate Test command err: Trying to start YDB, gRPC: 12030, MsgBus: 13032 2025-05-29T15:22:31.008355Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509888408101354819:2204];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:22:31.008943Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/0023ef/r3tmp/tmpCReEHj/pdisk_1.dat 2025-05-29T15:22:31.098290Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 12030, node 1 2025-05-29T15:22:31.120796Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:22:31.120810Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-05-29T15:22:31.120812Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-05-29T15:22:31.120861Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:13032 2025-05-29T15:22:31.158105Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:22:31.158129Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:22:31.159207Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:13032 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-29T15:22:31.194329Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:22:31.196901Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-05-29T15:22:31.201335Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:22:31.273716Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:22:31.300251Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:22:31.313983Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:22:31.434144Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509888408101356277:2403], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:22:31.434169Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:22:31.508827Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-05-29T15:22:31.518345Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-05-29T15:22:31.575736Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-05-29T15:22:31.635499Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-05-29T15:22:31.649569Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-05-29T15:22:31.663283Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-05-29T15:22:31.676016Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-05-29T15:22:31.692269Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509888408101356933:2466], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:22:31.692302Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:22:31.692381Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509888408101356938:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:22:31.693354Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715669:3, at schemeshard: 72057594046644480 2025-05-29T15:22:31.703431Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7509888408101356940:2470], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715669 completed, doublechecking } 2025-05-29T15:22:31.792331Z node 1 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [1:7509888408101356991:3399] txid# 281474976715670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-29T15:22:31.901078Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [1:7509888408101357000:2474], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:22:31.901215Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=1&id=MWZiYmYxMGItOThhN2JjMmMtN2U1Y2EyN2EtNmJkNmRlNDY=, ActorId: [1:7509888408101356259:2401], ActorState: ExecuteState, TraceId: 01jwea6hcb2ym4f4hzmzxkcs4s, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: VERIFY failed (2025-05-29T15:22:31.902386Z): assertion failed in non-unittest thread with message: assertion failed at ydb/core/kqp/ut/common/kqp_ut_common.h:375, void NKikimr::NKqp::AssertSuccessResult(const NYdb::TStatus &): (result.IsSuccess())
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 library/cpp/testing/unittest/registar.cpp:36 RaiseError(): requirement UnittestThread failed 0. /-S/util/system/yassert.cpp:83: InternalPanicImpl @ 0x15E4ED75 1. /-S/util/system/yassert.cpp:55: Panic @ 0x15E45D76 2. /tmp//-S/library/cpp/testing/unittest/registar.cpp:36: RaiseError @ 0x15FE8506 3. /-S/ydb/core/kqp/ut/common/kqp_ut_common.h:375: AssertSuccessResult @ 0x284B1732 4. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:365: CreateSampleTables @ 0x284B1032 5. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:581: operator() @ 0x284D254C 6. /-S/library/cpp/threading/future/core/future-inl.h:493: SetValue @ 0x284D254C 7. /-S/library/cpp/threading/future/async.h:24: operator() @ 0x284D254C 8. /-S/util/thread/pool.h:71: Process @ 0x284D254C 9. /-S/util/thread/pool.cpp:418: DoExecute @ 0x15E566F9 10. /-S/util/thread/factory.h:15: Execute @ 0x15E550E9 11. /-S/util/thread/factory.cpp:36: ThreadProc @ 0x15E550E9 12. /-S/util/system/thread.cpp:244: ThreadProxy @ 0x15E5055C 13. ??:0: ?? @ 0x7F0DB7FECAC2 14. ??:0: ?? @ 0x7F0DB807E84F |60.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/library/table_creator/ut/unittest |60.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/library/table_creator/ut/unittest ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_data_erasure/unittest >> TestDataErasure::SimpleDataErasureTestForAllSupportedObjects [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:116:2058] recipient: [1:111:2141] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:116:2058] recipient: [1:111:2141] Leader for TabletID 72057594046678944 is [1:128:2152] sender: [1:132:2058] recipient: [1:111:2141] 2025-05-29T15:22:32.544425Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7488: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-05-29T15:22:32.544460Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7516: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:22:32.544465Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7402: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-05-29T15:22:32.544470Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7418: OperationsProcessing config: using default configuration 2025-05-29T15:22:32.544481Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-05-29T15:22:32.544484Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-05-29T15:22:32.544493Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7548: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:22:32.544507Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:39: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-05-29T15:22:32.544618Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7619: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-05-29T15:22:32.544690Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-05-29T15:22:32.556793Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7309: Cannot subscribe to console configs 2025-05-29T15:22:32.556816Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:22:32.559348Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-05-29T15:22:32.559465Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-05-29T15:22:32.559529Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-05-29T15:22:32.561522Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-05-29T15:22:32.561768Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:445: Clear TempDirsState with owners number: 0 2025-05-29T15:22:32.561903Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1348: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-05-29T15:22:32.561953Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:32: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-05-29T15:22:32.562433Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:157: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:22:32.562484Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:84: [RootDataErasureManager] Stop 2025-05-29T15:22:32.562734Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:22:32.562760Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:22:32.562784Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-05-29T15:22:32.562792Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-29T15:22:32.562798Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:202: TTxServerlessStorageBilling.Complete 2025-05-29T15:22:32.562833Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6671: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-05-29T15:22:32.564088Z node 1 :HIVE INFO: tablet_helpers.cpp:1130: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:128:2152] sender: [1:242:2058] recipient: [1:15:2062] 2025-05-29T15:22:32.587667Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:372: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-05-29T15:22:32.587752Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:22:32.587817Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-05-29T15:22:32.587884Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:130: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-05-29T15:22:32.587897Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:22:32.591165Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:451: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-05-29T15:22:32.591214Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-05-29T15:22:32.591268Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:22:32.591280Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:313: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-05-29T15:22:32.591288Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:367: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-05-29T15:22:32.591294Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 2 -> 3 2025-05-29T15:22:32.591963Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:22:32.591977Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-05-29T15:22:32.591983Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 3 -> 128 2025-05-29T15:22:32.592416Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:22:32.592427Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:22:32.592433Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:22:32.592440Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1650: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-05-29T15:22:32.593160Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1719: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-05-29T15:22:32.599558Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:652: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-05-29T15:22:32.599640Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1751: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-05-29T15:22:32.599826Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:676: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-05-29T15:22:32.599862Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:680: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 124 RawX2: 4294969445 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-05-29T15:22:32.599870Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:22:32.599940Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 128 -> 240 2025-05-29T15:22:32.599949Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:22:32.599984Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-05-29T15:22:32.599997Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:402: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-05-29T15:22:32.600691Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:22:32.600701Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-29T15:22:32.600740Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29 ... dRequestToBSC: Generation# 1 2025-05-29T15:22:35.218841Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4877: StateWork, received event# 268637738, Sender [1:298:2280], Recipient [1:296:2279]: NKikimrBlobStorage.TEvControllerShredResponse CurrentGeneration: 1 Completed: false Progress10k: 5000 2025-05-29T15:22:35.218847Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5076: StateWork, processing event TEvBlobStorage::TEvControllerShredResponse 2025-05-29T15:22:35.218852Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:7798: Handle TEvControllerShredResponse, at schemeshard: 72057594046678944 2025-05-29T15:22:35.218875Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__root_data_erasure_manager.cpp:632: TTxCompleteDataErasureBSC Execute at schemeshard: 72057594046678944 2025-05-29T15:22:35.218897Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:648: TTxCompleteDataErasureBSC: Progress data shred in BSC 50% 2025-05-29T15:22:35.218918Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__root_data_erasure_manager.cpp:656: TTxCompleteDataErasureBSC Complete at schemeshard: 72057594046678944, NeedScheduleRequestToBSC# true 2025-05-29T15:22:35.218930Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:348: [RootDataErasureManager] ScheduleRequestToBSC: Interval# 1.000000s 2025-05-29T15:22:35.249832Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4877: StateWork, received event# 271125000, Sender [0:0:0], Recipient [1:459:2410]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-05-29T15:22:35.249868Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4885: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-05-29T15:22:35.249896Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4877: StateWork, received event# 271124999, Sender [1:459:2410], Recipient [1:459:2410]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2025-05-29T15:22:35.249902Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4884: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2025-05-29T15:22:35.249950Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4877: StateWork, received event# 271122945, Sender [1:804:2689], Recipient [1:459:2410]: NKikimrSchemeOp.TDescribePath PathId: 3 SchemeshardId: 72075186233409546 2025-05-29T15:22:35.249956Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4889: StateWork, processing event TEvSchemeShard::TEvDescribeScheme 2025-05-29T15:22:35.249992Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: PathId: 3 SchemeshardId: 72075186233409546, at schemeshard: 72075186233409546 2025-05-29T15:22:35.250076Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:44: Tablet 72075186233409546 describe pathId 3 took 63us result status StatusSuccess 2025-05-29T15:22:35.250264Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Database1/Topic1" PathDescription { Self { Name: "Topic1" PathId: 3 SchemeshardId: 72075186233409546 PathType: EPathTypePersQueueGroup CreateFinished: true CreateTxId: 104 CreateStep: 250 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 PQVersion: 1 } ChildrenExist: false BalancerTabletID: 72075186233409552 } PersQueueGroup { Name: "Topic1" PathId: 3 TotalGroupCount: 1 PartitionPerTablet: 1 PQTabletConfig { PartitionConfig { LifetimeSeconds: 10 } YdbDatabasePath: "/MyRoot/Database1" } Partitions { PartitionId: 0 TabletId: 72075186233409551 Status: Active } AlterVersion: 1 BalancerTabletID: 72075186233409552 NextPartitionId: 1 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 2 ProcessingParams { Version: 2 PlanResolution: 50 Coordinators: 72075186233409547 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409548 SchemeShard: 72075186233409546 } DomainKey { SchemeShard: 72057594046678944 PathId: 2 } PathsInside: 2 PathsLimit: 10000 ShardsInside: 7 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 2 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 1 PQPartitionsLimit: 1000000 } } PathId: 3 PathOwnerId: 72075186233409546, at schemeshard: 72075186233409546 2025-05-29T15:22:35.312047Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4877: StateWork, received event# 271125000, Sender [0:0:0], Recipient [1:967:2823]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-05-29T15:22:35.312078Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4885: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-05-29T15:22:35.312124Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4877: StateWork, received event# 271124999, Sender [1:967:2823], Recipient [1:967:2823]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2025-05-29T15:22:35.312130Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4884: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2025-05-29T15:22:35.312182Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4877: StateWork, received event# 271122945, Sender [1:1388:3171], Recipient [1:967:2823]: NKikimrSchemeOp.TDescribePath PathId: 3 SchemeshardId: 72075186233409553 2025-05-29T15:22:35.312188Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4889: StateWork, processing event TEvSchemeShard::TEvDescribeScheme 2025-05-29T15:22:35.312220Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: PathId: 3 SchemeshardId: 72075186233409553, at schemeshard: 72075186233409553 2025-05-29T15:22:35.312309Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:44: Tablet 72075186233409553 describe pathId 3 took 71us result status StatusSuccess 2025-05-29T15:22:35.312465Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Database2/Topic1" PathDescription { Self { Name: "Topic1" PathId: 3 SchemeshardId: 72075186233409553 PathType: EPathTypePersQueueGroup CreateFinished: true CreateTxId: 108 CreateStep: 350 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 PQVersion: 1 } ChildrenExist: false BalancerTabletID: 72075186233409559 } PersQueueGroup { Name: "Topic1" PathId: 3 TotalGroupCount: 1 PartitionPerTablet: 1 PQTabletConfig { PartitionConfig { LifetimeSeconds: 10 } YdbDatabasePath: "/MyRoot/Database2" } Partitions { PartitionId: 0 TabletId: 72075186233409558 Status: Active } AlterVersion: 1 BalancerTabletID: 72075186233409559 NextPartitionId: 1 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 3 ProcessingParams { Version: 2 PlanResolution: 50 Coordinators: 72075186233409554 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409555 SchemeShard: 72075186233409553 } DomainKey { SchemeShard: 72057594046678944 PathId: 3 } PathsInside: 2 PathsLimit: 10000 ShardsInside: 7 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 3 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 1 PQPartitionsLimit: 1000000 } } PathId: 3 PathOwnerId: 72075186233409553, at schemeshard: 72075186233409553 2025-05-29T15:22:35.610159Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4877: StateWork, received event# 271125000, Sender [0:0:0], Recipient [1:296:2279]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-05-29T15:22:35.610198Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4885: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-05-29T15:22:35.610217Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4877: StateWork, received event# 271124999, Sender [1:296:2279], Recipient [1:296:2279]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2025-05-29T15:22:35.610222Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4884: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2025-05-29T15:22:35.664106Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4877: StateWork, received event# 271125517, Sender [0:0:0], Recipient [1:296:2279]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToRunDataErasureBSC 2025-05-29T15:22:35.664153Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5077: StateWork, processing event TEvSchemeShard::TEvWakeupToRunDataErasureBSC 2025-05-29T15:22:35.664164Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:354: [RootDataErasureManager] SendRequestToBSC: Generation# 1 2025-05-29T15:22:35.664257Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4877: StateWork, received event# 268637738, Sender [1:298:2280], Recipient [1:296:2279]: NKikimrBlobStorage.TEvControllerShredResponse CurrentGeneration: 1 Completed: true Progress10k: 10000 2025-05-29T15:22:35.664265Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5076: StateWork, processing event TEvBlobStorage::TEvControllerShredResponse 2025-05-29T15:22:35.664271Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:7798: Handle TEvControllerShredResponse, at schemeshard: 72057594046678944 2025-05-29T15:22:35.664300Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__root_data_erasure_manager.cpp:632: TTxCompleteDataErasureBSC Execute at schemeshard: 72057594046678944 2025-05-29T15:22:35.664307Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:644: TTxCompleteDataErasureBSC: Data shred in BSC is completed 2025-05-29T15:22:35.664322Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:170: [RootDataErasureManager] ScheduleDataErasureWakeup: Interval# 0.923000s, Timestamp# 1970-01-01T00:00:05.125000Z 2025-05-29T15:22:35.664331Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:378: [RootDataErasureManager] Complete: Generation# 1, duration# 2 s 2025-05-29T15:22:35.665414Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__root_data_erasure_manager.cpp:656: TTxCompleteDataErasureBSC Complete at schemeshard: 72057594046678944, NeedScheduleRequestToBSC# false 2025-05-29T15:22:35.665615Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4877: StateWork, received event# 269877761, Sender [1:2431:4033], Recipient [1:296:2279]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-05-29T15:22:35.665626Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4974: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-05-29T15:22:35.665631Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5753: Pipe server connected, at tablet: 72057594046678944 2025-05-29T15:22:35.665663Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4877: StateWork, received event# 271125519, Sender [1:277:2267], Recipient [1:296:2279]: NKikimrScheme.TEvDataErasureInfoRequest 2025-05-29T15:22:35.665671Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5074: StateWork, processing event TEvSchemeShard::TEvDataErasureInfoRequest 2025-05-29T15:22:35.665675Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:7753: Handle TEvDataErasureInfoRequest, at schemeshard: 72057594046678944 >> TableCreator::CreateTables |60.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/library/table_creator/ut/unittest >> TopicAutoscaling::PartitionSplit_BeforeAutoscaleAwareSDK [FAIL] >> TopicAutoscaling::PartitionSplit_AutoscaleAwareSDK |60.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/library/table_creator/ut/unittest >> TSchemeShardMoveTest::MoveMigratedTable >> TSchemeShardMoveTest::ResetCachedPath >> TopicAutoscaling::Simple_AutoscaleAwareSDK [FAIL] >> TopicAutoscaling::Simple_PQv1 >> TSchemeShardMoveTest::Replace >> TSchemeShardMoveTest::Chain >> TestDataErasure::DataErasureWithSplit [GOOD] >> TSchemeShardMoveTest::MoveMigratedTable [GOOD] >> TSchemeShardMoveTest::MoveOldTableWithIndex >> TSchemeShardMoveTest::Reject |60.8%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/datashard/ut_object_storage_listing/ydb-core-tx-datashard-ut_object_storage_listing |60.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_object_storage_listing/ydb-core-tx-datashard-ut_object_storage_listing |60.8%| [TA] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_sysview_reboots/test-results/unittest/{meta.json ... results_accumulator.log} >> TableCreator::CreateTables [GOOD] >> TSchemeShardMoveTest::TwoTables |60.8%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_object_storage_listing/ydb-core-tx-datashard-ut_object_storage_listing >> TSchemeShardMoveTest::Boot >> TSchemeShardMoveTest::MoveTableForBackup >> TMiniKQLProtoTestYdb::TestExportOptionalTypeYdb >> TSchemeShardMoveTest::MoveIndex >> TSchemeShardMoveTest::MoveIndexSameDst >> TopicAutoscaling::PartitionSplit_AutoscaleAwareSDK [FAIL] >> TopicAutoscaling::PartitionMerge_PreferedPartition_PQv1 >> TSchemeShardMoveTest::Boot [GOOD] >> TMiniKQLEngineFlatTest::TestUpdateRowExistErasePayload [GOOD] >> TSchemeShardMoveTest::AsyncIndexWithSyncInFly >> TMiniKQLEngineFlatTest::TestUpdateRowNoShards [GOOD] >> TMiniKQLEngineFlatTest::TestUpdateRowNotExistWithoutColumns [GOOD] >> TMiniKQLEngineFlatTest::TestUpdateRowNotExistSetPayload [GOOD] >> TMiniKQLEngineFlatTest::TestUpdateRowManyShards [GOOD] >> TMiniKQLEngineFlatTest::TestUpdateRowExistChangePayload [GOOD] >> TMiniKQLEngineFlatTest::TestUpdateRowNotExistErasePayload [GOOD] >> TMiniKQLEngineFlatTest::TestTopSortPushdownPk >> TMiniKQLEngineFlatTest::TestUpdateRowNotExistSetPayloadNullValue [GOOD] >> TMiniKQLProtoTestYdb::TestExportOptionalTypeYdb [GOOD] >> TMiniKQLProtoTestYdb::TestExportListTypeYdb [GOOD] >> TMiniKQLProtoTestYdb::TestExportIntegralYdb [GOOD] >> TMiniKQLProtoTestYdb::TestExportEmptyOptionalYdb [GOOD] >> TMiniKQLProtoTestYdb::TestExportMultipleOptionalNotEmptyYdb [GOOD] >> TMiniKQLProtoTestYdb::TestExportOptionalYdb [GOOD] >> TMiniKQLProtoTestYdb::TestExportListYdb [GOOD] >> TMiniKQLProtoTestYdb::TestExportMultipleOptionalVariantNotNullYdb [GOOD] >> TMiniKQLProtoTestYdb::TestExportOptionalVariantOptionalNullYdb [GOOD] >> TMiniKQLProtoTestYdb::TestExportMultipleOptionalVariantOptionalNullYdb [GOOD] >> TMiniKQLProtoTestYdb::TestExportMultipleOptionalVariantOptionalNotNullYdb [GOOD] >> TMiniKQLProtoTestYdb::TestExportOptionalVariantOptionalYdbType [GOOD] >> TSchemeShardMoveTest::ResetCachedPath [GOOD] >> TSchemeShardMoveTest::MoveOldTableWithIndex [GOOD] >> TMiniKQLProgramBuilderTest::TestUpdateRowStaticKey [GOOD] >> TMiniKQLProtoTestYdb::TestExportDataTypeYdb [GOOD] >> TMiniKQLProtoTestYdb::TestExportDecimalTypeYdb [GOOD] >> TMiniKQLProtoTestYdb::TestExportDictTypeYdb [GOOD] >> TMiniKQLProtoTestYdb::TestExportBoolYdb [GOOD] >> TMiniKQLProtoTestYdb::TestExportDoubleYdb [GOOD] >> TMiniKQLProtoTestYdb::TestExportDecimalYdb [GOOD] >> TMiniKQLProtoTestYdb::TestExportDecimalNegativeYdb [GOOD] >> TMiniKQLProtoTestYdb::TestExportDecimalHugeYdb [GOOD] >> TMiniKQLProtoTestYdb::TestExportEmptyOptionalOptionalYdb [GOOD] >> TMiniKQLProtoTestYdb::TestExportDictYdb [GOOD] >> TMiniKQLProtoTestYdb::TestCellsFromTuple [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/library/table_creator/ut/unittest >> TableCreator::CreateTables [GOOD] Test command err: 2025-05-29T15:22:36.320477Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509888426992106331:2062];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:22:36.320500Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/001c16/r3tmp/tmpqVXLVK/pdisk_1.dat 2025-05-29T15:22:36.397052Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:22:36.397403Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [1:7509888426992106311:2079] 1748532156320315 != 1748532156320318 TClient is connected to server localhost:14450 TServer::EnableGrpc on GrpcPort 22348, node 1 2025-05-29T15:22:36.423174Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:22:36.423204Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:22:36.424295Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-29T15:22:36.430990Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:22:36.431002Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-05-29T15:22:36.431005Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-05-29T15:22:36.431046Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-05-29T15:22:36.462034Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:22:36.465023Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-05-29T15:22:36.504240Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:3, at schemeshard: 72057594046644480 2025-05-29T15:22:36.504527Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_data_erasure/unittest >> TestDataErasure::DataErasureWithSplit [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:66:2058] recipient: [1:59:2100] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:66:2058] recipient: [1:59:2100] Leader for TabletID 72057594046678944 is [1:70:2104] sender: [1:74:2058] recipient: [1:59:2100] 2025-05-29T15:22:31.282797Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7488: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-05-29T15:22:31.282819Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7516: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:22:31.282823Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7402: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-05-29T15:22:31.282827Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7418: OperationsProcessing config: using default configuration 2025-05-29T15:22:31.282837Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-05-29T15:22:31.282839Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-05-29T15:22:31.282846Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7548: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:22:31.282858Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:39: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-05-29T15:22:31.282939Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7619: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-05-29T15:22:31.283022Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-05-29T15:22:31.293377Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7309: Cannot subscribe to console configs 2025-05-29T15:22:31.293399Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:22:31.293484Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-05-29T15:22:31.293514Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-05-29T15:22:31.293584Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-05-29T15:22:31.294455Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-05-29T15:22:31.294493Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:445: Clear TempDirsState with owners number: 0 2025-05-29T15:22:31.294582Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1348: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-05-29T15:22:31.294617Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:32: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-05-29T15:22:31.294768Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:157: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:22:31.294809Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:84: [RootDataErasureManager] Stop 2025-05-29T15:22:31.295005Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:22:31.295011Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:22:31.295019Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-05-29T15:22:31.295024Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-29T15:22:31.295028Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:202: TTxServerlessStorageBilling.Complete 2025-05-29T15:22:31.295056Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6671: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-05-29T15:22:31.295398Z node 1 :HIVE INFO: tablet_helpers.cpp:1130: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:70:2104] sender: [1:149:2058] recipient: [1:16:2063] 2025-05-29T15:22:31.313296Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:372: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-05-29T15:22:31.313388Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:22:31.313460Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-05-29T15:22:31.313514Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:130: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-05-29T15:22:31.313526Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:22:31.313776Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:451: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-05-29T15:22:31.313797Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-05-29T15:22:31.313857Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:22:31.313865Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:313: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-05-29T15:22:31.313871Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:367: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-05-29T15:22:31.313876Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 2 -> 3 2025-05-29T15:22:31.313940Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:22:31.313946Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-05-29T15:22:31.313950Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 3 -> 128 2025-05-29T15:22:31.313992Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:22:31.313998Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:22:31.314003Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:22:31.314010Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1650: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-05-29T15:22:31.314585Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1719: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-05-29T15:22:31.314666Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:652: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-05-29T15:22:31.314731Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1751: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-05-29T15:22:31.314943Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:676: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-05-29T15:22:31.314963Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:680: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 76 RawX2: 4294969404 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-05-29T15:22:31.314970Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:22:31.315033Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 128 -> 240 2025-05-29T15:22:31.315041Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:22:31.315075Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-05-29T15:22:31.315096Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:402: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-05-29T15:22:31.315204Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:22:31.315210Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-29T15:22:31.315276Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:22:31 ... -29T15:22:36.668869Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4877: StateWork, received event# 269553162, Sender [1:1001:2862], Recipient [1:280:2238]: NKikimrTxDataShard.TEvPeriodicTableStats DatashardId: 72075186233409550 TableLocalId: 2 Generation: 2 Round: 2 TableStats { DataSize: 5019511 RowCount: 49 IndexSize: 2213 InMemSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 SearchHeight: 1 LastFullCompactionTs: 0 HasLoanedParts: false Channels { Channel: 1 DataSize: 5019511 IndexSize: 2213 } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 549 Memory: 92963 Storage: 5022813 } ShardState: 2 UserTablePartOwners: 72075186233409550 NodeId: 1 StartTime: 50000 TableOwnerId: 72075186233409546 IsDstSplit: true FollowerId: 0 2025-05-29T15:22:36.668887Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4914: StateWork, processing event TEvDataShard::TEvPeriodicTableStats 2025-05-29T15:22:36.668901Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:563: Got periodic table stats at tablet 72075186233409546 from shard 72075186233409550 followerId 0 pathId [OwnerId: 72075186233409546, LocalPathId: 2] state 'Ready' dataSize 5019511 rowCount 49 cpuUsage 0.0549 2025-05-29T15:22:36.668912Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__table_stats.cpp:570: Got periodic table stats at tablet 72075186233409546 from shard 72075186233409550 followerId 0 pathId [OwnerId: 72075186233409546, LocalPathId: 2] raw table stats: DataSize: 5019511 RowCount: 49 IndexSize: 2213 InMemSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 SearchHeight: 1 LastFullCompactionTs: 0 HasLoanedParts: false Channels { Channel: 1 DataSize: 5019511 IndexSize: 2213 } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 2025-05-29T15:22:36.668918Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__table_stats.cpp:610: Will delay TTxStoreTableStats on# 0.100000s, queue# 1 2025-05-29T15:22:36.668938Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4877: StateWork, received event# 269553162, Sender [1:1004:2864], Recipient [1:280:2238]: NKikimrTxDataShard.TEvPeriodicTableStats DatashardId: 72075186233409551 TableLocalId: 2 Generation: 2 Round: 2 TableStats { DataSize: 5121950 RowCount: 50 IndexSize: 2258 InMemSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 SearchHeight: 1 LastFullCompactionTs: 0 HasLoanedParts: false Channels { Channel: 1 DataSize: 5121950 IndexSize: 2258 } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 438 Memory: 92987 Storage: 5125318 } ShardState: 2 UserTablePartOwners: 72075186233409551 NodeId: 1 StartTime: 50000 TableOwnerId: 72075186233409546 IsDstSplit: true FollowerId: 0 2025-05-29T15:22:36.668944Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4914: StateWork, processing event TEvDataShard::TEvPeriodicTableStats 2025-05-29T15:22:36.668947Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:563: Got periodic table stats at tablet 72075186233409546 from shard 72075186233409551 followerId 0 pathId [OwnerId: 72075186233409546, LocalPathId: 2] state 'Ready' dataSize 5121950 rowCount 50 cpuUsage 0.0438 2025-05-29T15:22:36.668955Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__table_stats.cpp:570: Got periodic table stats at tablet 72075186233409546 from shard 72075186233409551 followerId 0 pathId [OwnerId: 72075186233409546, LocalPathId: 2] raw table stats: DataSize: 5121950 RowCount: 50 IndexSize: 2258 InMemSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 SearchHeight: 1 LastFullCompactionTs: 0 HasLoanedParts: false Channels { Channel: 1 DataSize: 5121950 IndexSize: 2258 } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 2025-05-29T15:22:36.679151Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4877: StateWork, received event# 271125000, Sender [0:0:0], Recipient [1:280:2238]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-05-29T15:22:36.679176Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4885: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-05-29T15:22:36.679192Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4877: StateWork, received event# 271124999, Sender [1:280:2238], Recipient [1:280:2238]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2025-05-29T15:22:36.679195Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4884: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2025-05-29T15:22:36.701295Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4877: StateWork, received event# 271125517, Sender [0:0:0], Recipient [1:184:2176]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToRunDataErasureBSC 2025-05-29T15:22:36.701320Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5077: StateWork, processing event TEvSchemeShard::TEvWakeupToRunDataErasureBSC 2025-05-29T15:22:36.701326Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:354: [RootDataErasureManager] SendRequestToBSC: Generation# 1 2025-05-29T15:22:36.701352Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4877: StateWork, received event# 2146435092, Sender [0:0:0], Recipient [1:280:2238]: NKikimr::NSchemeShard::TEvPrivate::TEvPersistTableStats 2025-05-29T15:22:36.701356Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5053: StateWork, processing event TEvPrivate::TEvPersistTableStats 2025-05-29T15:22:36.701360Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:590: Started TEvPersistStats at tablet 72075186233409546, queue size# 2 2025-05-29T15:22:36.701379Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__table_stats.cpp:601: Will execute TTxStoreStats, queue# 2 2025-05-29T15:22:36.701383Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__table_stats.cpp:610: Will delay TTxStoreTableStats on# 0.000000s, queue# 2 2025-05-29T15:22:36.701415Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:267: PersistSingleStats for pathId 2 shard idx 72075186233409546:5 data size 5019511 row count 49 2025-05-29T15:22:36.701434Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:292: TTxStoreTableStats.PersistSingleStats: main stats from datashardId(TabletID)=72075186233409550 maps to shardIdx: 72075186233409546:5 followerId=0, pathId: [OwnerId: 72075186233409546, LocalPathId: 2], pathId map=Simple, is column=0, is olap=0, RowCount 49, DataSize 5019511 2025-05-29T15:22:36.701439Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__table_stats.cpp:62: BuildStatsForCollector: datashardId 72075186233409550, followerId 0 2025-05-29T15:22:36.701456Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__background_compaction.cpp:235: [BackgroundCompaction] [Update] Skipped shard# 72075186233409546:5 with partCount# 1, rowCount# 49, searchHeight# 1, lastFullCompaction# 1970-01-01T00:00:00.000000Z at schemeshard 72075186233409546 2025-05-29T15:22:36.701480Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:485: Do not want to split tablet 72075186233409550 2025-05-29T15:22:36.701490Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:267: PersistSingleStats for pathId 2 shard idx 72075186233409546:6 data size 5121950 row count 50 2025-05-29T15:22:36.701495Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:292: TTxStoreTableStats.PersistSingleStats: main stats from datashardId(TabletID)=72075186233409551 maps to shardIdx: 72075186233409546:6 followerId=0, pathId: [OwnerId: 72075186233409546, LocalPathId: 2], pathId map=Simple, is column=0, is olap=0, RowCount 50, DataSize 5121950 2025-05-29T15:22:36.701497Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__table_stats.cpp:62: BuildStatsForCollector: datashardId 72075186233409551, followerId 0 2025-05-29T15:22:36.701501Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__background_compaction.cpp:235: [BackgroundCompaction] [Update] Skipped shard# 72075186233409546:6 with partCount# 1, rowCount# 50, searchHeight# 1, lastFullCompaction# 1970-01-01T00:00:00.000000Z at schemeshard 72075186233409546 2025-05-29T15:22:36.701505Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:485: Do not want to split tablet 72075186233409551 2025-05-29T15:22:36.701519Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:207: TSideEffects ApplyOnComplete at tablet# 72075186233409546 2025-05-29T15:22:36.701587Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4877: StateWork, received event# 268637738, Sender [1:186:2178], Recipient [1:184:2176]: NKikimrBlobStorage.TEvControllerShredResponse CurrentGeneration: 1 Completed: true Progress10k: 10000 2025-05-29T15:22:36.701592Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5076: StateWork, processing event TEvBlobStorage::TEvControllerShredResponse 2025-05-29T15:22:36.701594Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:7798: Handle TEvControllerShredResponse, at schemeshard: 72057594046678944 2025-05-29T15:22:36.701601Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__root_data_erasure_manager.cpp:632: TTxCompleteDataErasureBSC Execute at schemeshard: 72057594046678944 2025-05-29T15:22:36.701604Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:644: TTxCompleteDataErasureBSC: Data shred in BSC is completed 2025-05-29T15:22:36.701610Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:170: [RootDataErasureManager] ScheduleDataErasureWakeup: Interval# 19.899500s, Timestamp# 1970-01-01T00:01:20.100500Z 2025-05-29T15:22:36.701627Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:378: [RootDataErasureManager] Complete: Generation# 1, duration# 30 s 2025-05-29T15:22:36.701762Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__root_data_erasure_manager.cpp:656: TTxCompleteDataErasureBSC Complete at schemeshard: 72057594046678944, NeedScheduleRequestToBSC# false 2025-05-29T15:22:36.702177Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4877: StateWork, received event# 269877761, Sender [1:1172:3007], Recipient [1:184:2176]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-05-29T15:22:36.702181Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4974: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-05-29T15:22:36.702185Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5753: Pipe server connected, at tablet: 72057594046678944 2025-05-29T15:22:36.702207Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4877: StateWork, received event# 271125519, Sender [1:170:2169], Recipient [1:184:2176]: NKikimrScheme.TEvDataErasureInfoRequest 2025-05-29T15:22:36.702211Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5074: StateWork, processing event TEvSchemeShard::TEvDataErasureInfoRequest 2025-05-29T15:22:36.702214Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:7753: Handle TEvDataErasureInfoRequest, at schemeshard: 72057594046678944 >> TMiniKQLEngineFlatTest::TestTopSortPushdownPk [GOOD] >> TMiniKQLEngineFlatTest::TestTopSortPushdown [GOOD] >> TMiniKQLProgramBuilderTest::TestEraseRowDynamicKey [GOOD] >> TMiniKQLProgramBuilderTest::TestAcquireLocks [GOOD] >> TMiniKQLProgramBuilderTest::TestDiagnostics [GOOD] >> TMiniKQLProtoTestYdb::TestExportVoidTypeYdb [GOOD] >> TMiniKQLProtoTestYdb::TestExportUuidTypeYdb [GOOD] >> TMiniKQLProtoTestYdb::TestExportTupleTypeYdb [GOOD] >> TMiniKQLProtoTestYdb::TestExportStructTypeYdb [GOOD] >> TMiniKQLProtoTestYdb::TestExportVariantTupleTypeYdb [GOOD] >> TMiniKQLProtoTestYdb::TestExportVariantStructTypeYdb [GOOD] >> TMiniKQLProtoTestYdb::TestExportVoidYdb [GOOD] >> TMiniKQLProtoTestYdb::TestExportStringYdb [GOOD] >> TMiniKQLProtoTestYdb::TestExportUuidYdb >> TestDataErasure::DataErasureWithCopyTable [GOOD] >> TMiniKQLProtoTestYdb::TestExportUuidYdb [GOOD] >> TMiniKQLProtoTestYdb::TestExportTupleYdb [GOOD] >> TMiniKQLProtoTestYdb::TestExportStructYdb [GOOD] >> TMiniKQLProtoTestYdb::TestExportVariantYdb [GOOD] >> TopicAutoscaling::Simple_PQv1 [FAIL] >> TopicAutoscaling::ReadingAfterSplitTest_PreferedPartition_BeforeAutoscaleAwareSDK >> TSchemeShardMoveTest::TwoTables [GOOD] >> TSchemeShardMoveTest::MoveIndexSameDst [GOOD] >> TSchemeShardMoveTest::MoveIntoBuildingIndex >> TSchemeShardMoveTest::MoveTableForBackup [GOOD] >> TSchemeShardMoveTest::MoveTableWithSequence >> TPQTest::TestWriteSplit [GOOD] >> TPQTest::TestWriteTimeStampEstimate >> TSchemeShardMoveTest::MoveIndex [GOOD] >> TSchemeShardMoveTest::MoveIndexDoesNonExisted |60.9%| [TA] $(B)/ydb/library/table_creator/ut/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_move/unittest >> TSchemeShardMoveTest::ResetCachedPath [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:116:2058] recipient: [1:111:2141] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:116:2058] recipient: [1:111:2141] Leader for TabletID 72057594046678944 is [1:128:2152] sender: [1:132:2058] recipient: [1:111:2141] 2025-05-29T15:22:36.698881Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7488: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-05-29T15:22:36.698923Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7516: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:22:36.698930Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7402: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-05-29T15:22:36.698935Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7418: OperationsProcessing config: using default configuration 2025-05-29T15:22:36.698955Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-05-29T15:22:36.698960Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-05-29T15:22:36.698971Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7548: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:22:36.698988Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:39: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-05-29T15:22:36.699115Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7619: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-05-29T15:22:36.699213Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-05-29T15:22:36.714061Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7309: Cannot subscribe to console configs 2025-05-29T15:22:36.714089Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:22:36.717029Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-05-29T15:22:36.717167Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-05-29T15:22:36.717215Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-05-29T15:22:36.720357Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-05-29T15:22:36.720552Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:445: Clear TempDirsState with owners number: 0 2025-05-29T15:22:36.720673Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1348: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-05-29T15:22:36.720756Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:32: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-05-29T15:22:36.721289Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:157: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:22:36.721344Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:84: [RootDataErasureManager] Stop 2025-05-29T15:22:36.721662Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:22:36.721676Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:22:36.721702Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-05-29T15:22:36.721713Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-29T15:22:36.721719Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:202: TTxServerlessStorageBilling.Complete 2025-05-29T15:22:36.721761Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6671: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-05-29T15:22:36.724130Z node 1 :HIVE INFO: tablet_helpers.cpp:1130: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:128:2152] sender: [1:242:2058] recipient: [1:15:2062] 2025-05-29T15:22:36.747065Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:372: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-05-29T15:22:36.747154Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:22:36.747224Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-05-29T15:22:36.747284Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:130: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-05-29T15:22:36.747295Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:22:36.748169Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:451: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-05-29T15:22:36.748204Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-05-29T15:22:36.748298Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:22:36.748311Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:313: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-05-29T15:22:36.748317Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:367: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-05-29T15:22:36.748324Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 2 -> 3 2025-05-29T15:22:36.748837Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:22:36.748854Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-05-29T15:22:36.748859Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 3 -> 128 2025-05-29T15:22:36.749288Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:22:36.749302Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:22:36.749308Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:22:36.749314Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1650: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-05-29T15:22:36.749849Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1719: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-05-29T15:22:36.750321Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:652: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-05-29T15:22:36.750373Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1751: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-05-29T15:22:36.750577Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:676: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-05-29T15:22:36.750606Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:680: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 124 RawX2: 4294969445 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-05-29T15:22:36.750614Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:22:36.750704Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 128 -> 240 2025-05-29T15:22:36.750713Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:22:36.750768Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-05-29T15:22:36.750784Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:402: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-05-29T15:22:36.751236Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:22:36.751248Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-29T15:22:36.751299Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29 ... .cpp:418: TAlterTable TPropose operationId# 105:0 ProgressState, at schemeshard: 72057594046678944 2025-05-29T15:22:37.151233Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1650: TOperation IsReadyToPropose , TxId: 105 ready parts: 1/1 2025-05-29T15:22:37.151276Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1719: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } AffectedSet { TabletId: 72075186233409549 Flags: 2 } ExecLevel: 0 TxId: 105 MinStep: 1 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-05-29T15:22:37.155122Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:652: Send tablet strongly msg operationId: 105:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:105 msg type: 269090816 2025-05-29T15:22:37.155176Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1751: TOperation RegisterRelationByTabletId, TxId: 105, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 105 at step: 5000004 FAKE_COORDINATOR: advance: minStep5000004 State->FrontStep: 5000003 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 105 at step: 5000004 FAKE_COORDINATOR: Send Plan to tablet 72075186233409549 for txId: 105 at step: 5000004 2025-05-29T15:22:37.155293Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:676: TTxOperationPlanStep Execute, stepId: 5000004, transactions count in step: 1, at schemeshard: 72057594046678944 2025-05-29T15:22:37.155325Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:680: TTxOperationPlanStep Execute, message: Transactions { TxId: 105 Coordinator: 72057594046316545 AckTo { RawX1: 124 RawX2: 4294969445 } } Step: 5000004 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-05-29T15:22:37.155334Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_alter_table.cpp:359: TAlterTable TPropose operationId# 105:0 HandleReply TEvOperationPlan, operationId: 105:0, stepId: 5000004, at schemeshard: 72057594046678944 2025-05-29T15:22:37.155437Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 105:0 128 -> 129 2025-05-29T15:22:37.155473Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 3 FAKE_COORDINATOR: advance: minStep5000004 State->FrontStep: 5000004 2025-05-29T15:22:37.157057Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:22:37.157071Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 105, path id: [OwnerId: 72057594046678944, LocalPathId: 3] 2025-05-29T15:22:37.157138Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:22:37.157144Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:209:2210], at schemeshard: 72057594046678944, txId: 105, path id: 3 2025-05-29T15:22:37.157234Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 105:0, at schemeshard: 72057594046678944 2025-05-29T15:22:37.157243Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:1036: NTableState::TProposedWaitParts operationId# 105:0 ProgressState at tablet: 72057594046678944 FAKE_COORDINATOR: Erasing txId 105 2025-05-29T15:22:37.157461Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:5834: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 8 PathOwnerId: 72057594046678944, cookie: 105 2025-05-29T15:22:37.157476Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 8 PathOwnerId: 72057594046678944, cookie: 105 2025-05-29T15:22:37.157481Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72057594046678944, txId: 105 2025-05-29T15:22:37.157486Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 105, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 8 2025-05-29T15:22:37.157493Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 4 2025-05-29T15:22:37.157510Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1606: TOperation IsReadyToNotify, TxId: 105, ready parts: 0/1, is published: true 2025-05-29T15:22:37.157945Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6285: Handle TEvProposeTransactionResult, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409549 Status: COMPLETE TxId: 105 Step: 5000004 OrderId: 105 ExecLatency: 1 ProposeLatency: 3 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409549 CpuTimeUsec: 301 } } CommitVersion { Step: 5000004 TxId: 105 } 2025-05-29T15:22:37.157961Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1764: TOperation FindRelatedPartByTabletId, TxId: 105, tablet: 72075186233409549, partId: 0 2025-05-29T15:22:37.157983Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:619: TTxOperationReply execute, operationId: 105:0, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409549 Status: COMPLETE TxId: 105 Step: 5000004 OrderId: 105 ExecLatency: 1 ProposeLatency: 3 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409549 CpuTimeUsec: 301 } } CommitVersion { Step: 5000004 TxId: 105 } 2025-05-29T15:22:37.157997Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_part.cpp:108: HandleReply TEvDataShard::TEvProposeTransactionResult Ignore message: tablet# 72057594046678944, ev# TxKind: TX_KIND_SCHEME Origin: 72075186233409549 Status: COMPLETE TxId: 105 Step: 5000004 OrderId: 105 ExecLatency: 1 ProposeLatency: 3 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409549 CpuTimeUsec: 301 } } CommitVersion { Step: 5000004 TxId: 105 } 2025-05-29T15:22:37.158330Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5512: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 677 RawX2: 4294969910 } Origin: 72075186233409549 State: 2 TxId: 105 Step: 0 Generation: 2 2025-05-29T15:22:37.158342Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1764: TOperation FindRelatedPartByTabletId, TxId: 105, tablet: 72075186233409549, partId: 0 2025-05-29T15:22:37.158360Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:619: TTxOperationReply execute, operationId: 105:0, at schemeshard: 72057594046678944, message: Source { RawX1: 677 RawX2: 4294969910 } Origin: 72075186233409549 State: 2 TxId: 105 Step: 0 Generation: 2 2025-05-29T15:22:37.158365Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:1005: NTableState::TProposedWaitParts operationId# 105:0 HandleReply TEvSchemaChanged at tablet: 72057594046678944 2025-05-29T15:22:37.158373Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:1009: NTableState::TProposedWaitParts operationId# 105:0 HandleReply TEvSchemaChanged at tablet: 72057594046678944 message: Source { RawX1: 677 RawX2: 4294969910 } Origin: 72075186233409549 State: 2 TxId: 105 Step: 0 Generation: 2 2025-05-29T15:22:37.158385Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:655: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 105:0, shardIdx: 72057594046678944:4, datashard: 72075186233409549, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2025-05-29T15:22:37.158388Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:674: all shard schema changes has been received, operationId: 105:0, at schemeshard: 72057594046678944 2025-05-29T15:22:37.158393Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:686: send schema changes ack message, operation: 105:0, datashard: 72075186233409549, at schemeshard: 72057594046678944 2025-05-29T15:22:37.158399Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 105:0 129 -> 240 2025-05-29T15:22:37.158543Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 105 2025-05-29T15:22:37.158901Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:647: TTxOperationReply complete, operationId: 105:0, at schemeshard: 72057594046678944 2025-05-29T15:22:37.158933Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:647: TTxOperationReply complete, operationId: 105:0, at schemeshard: 72057594046678944 2025-05-29T15:22:37.158984Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 105:0, at schemeshard: 72057594046678944 2025-05-29T15:22:37.158991Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:482: [72057594046678944] TDone opId# 105:0 ProgressState 2025-05-29T15:22:37.159005Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:906: Part operation is done id#105:0 progress is 1/1 2025-05-29T15:22:37.159009Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 105 ready parts: 1/1 2025-05-29T15:22:37.159014Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:906: Part operation is done id#105:0 progress is 1/1 2025-05-29T15:22:37.159017Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 105 ready parts: 1/1 2025-05-29T15:22:37.159022Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1606: TOperation IsReadyToNotify, TxId: 105, ready parts: 1/1, is published: true 2025-05-29T15:22:37.159035Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1629: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:338:2316] message: TxId: 105 2025-05-29T15:22:37.159042Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 105 ready parts: 1/1 2025-05-29T15:22:37.159047Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:973: Operation and all the parts is done, operation id: 105:0 2025-05-29T15:22:37.159051Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5174: RemoveTx for txid 105:0 2025-05-29T15:22:37.159078Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 3 2025-05-29T15:22:37.159791Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:227: tests -- TTxNotificationSubscriber for txId 105: got EvNotifyTxCompletionResult 2025-05-29T15:22:37.159804Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:236: tests -- TTxNotificationSubscriber for txId 105: satisfy waiter [1:842:2760] TestWaitNotification: OK eventTxId 105 |60.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/engine/ut/unittest >> TMiniKQLProtoTestYdb::TestCellsFromTuple [GOOD] |60.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/engine/ut/unittest >> TMiniKQLProtoTestYdb::TestExportOptionalVariantOptionalYdbType [GOOD] >> TSchemeShardMoveTest::Chain [GOOD] >> TSchemeShardMoveTest::Index >> TestDataErasure::DataErasureWithMerge [GOOD] >> TestDataErasure::DataErasureRun3CyclesForTables [GOOD] >> TestDataErasure::DataErasureRun3CyclesForTopics [GOOD] >> TSchemeShardMoveTest::Replace [GOOD] >> TSchemeShardMoveTest::Reject [GOOD] >> TSchemeShardMoveTest::OneTable ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_data_erasure/unittest >> TestDataErasure::DataErasureWithCopyTable [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:66:2058] recipient: [1:59:2100] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:66:2058] recipient: [1:59:2100] Leader for TabletID 72057594046678944 is [1:70:2104] sender: [1:74:2058] recipient: [1:59:2100] 2025-05-29T15:22:31.310172Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7488: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-05-29T15:22:31.310195Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7516: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:22:31.310201Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7402: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-05-29T15:22:31.310206Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7418: OperationsProcessing config: using default configuration 2025-05-29T15:22:31.310217Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-05-29T15:22:31.310221Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-05-29T15:22:31.310232Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7548: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:22:31.310245Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:39: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-05-29T15:22:31.310346Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7619: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-05-29T15:22:31.310462Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-05-29T15:22:31.327691Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7309: Cannot subscribe to console configs 2025-05-29T15:22:31.327712Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:22:31.327853Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-05-29T15:22:31.327900Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-05-29T15:22:31.327959Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-05-29T15:22:31.330533Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-05-29T15:22:31.330630Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:445: Clear TempDirsState with owners number: 0 2025-05-29T15:22:31.330794Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1348: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-05-29T15:22:31.330851Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:32: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-05-29T15:22:31.331067Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:157: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:22:31.331110Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:84: [RootDataErasureManager] Stop 2025-05-29T15:22:31.331364Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:22:31.331374Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:22:31.331384Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-05-29T15:22:31.331392Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-29T15:22:31.331398Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:202: TTxServerlessStorageBilling.Complete 2025-05-29T15:22:31.331433Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6671: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-05-29T15:22:31.331962Z node 1 :HIVE INFO: tablet_helpers.cpp:1130: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:70:2104] sender: [1:149:2058] recipient: [1:16:2063] 2025-05-29T15:22:31.355660Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:372: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-05-29T15:22:31.355755Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:22:31.355826Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-05-29T15:22:31.355883Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:130: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-05-29T15:22:31.355896Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:22:31.356122Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:451: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-05-29T15:22:31.356146Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-05-29T15:22:31.356207Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:22:31.356218Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:313: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-05-29T15:22:31.356225Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:367: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-05-29T15:22:31.356230Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 2 -> 3 2025-05-29T15:22:31.356305Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:22:31.356311Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-05-29T15:22:31.356316Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 3 -> 128 2025-05-29T15:22:31.356360Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:22:31.356365Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:22:31.356370Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:22:31.356377Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1650: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-05-29T15:22:31.357009Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1719: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-05-29T15:22:31.357142Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:652: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-05-29T15:22:31.357223Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1751: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-05-29T15:22:31.357435Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:676: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-05-29T15:22:31.357463Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:680: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 76 RawX2: 4294969404 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-05-29T15:22:31.357471Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:22:31.357566Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 128 -> 240 2025-05-29T15:22:31.357575Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:22:31.357614Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-05-29T15:22:31.357639Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:402: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-05-29T15:22:31.357774Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:22:31.357781Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-29T15:22:31.357845Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:22:31 ... d(TabletID)=72075186233409552 maps to shardIdx: 72075186233409546:7 followerId=0, pathId: [OwnerId: 72075186233409546, LocalPathId: 3], pathId map=SimpleCopy, is column=0, is olap=0, RowCount 50, DataSize 5121950 2025-05-29T15:22:37.186264Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__table_stats.cpp:62: BuildStatsForCollector: datashardId 72075186233409552, followerId 0 2025-05-29T15:22:37.186272Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__background_compaction.cpp:235: [BackgroundCompaction] [Update] Skipped shard# 72075186233409546:7 with partCount# 1, rowCount# 50, searchHeight# 1, lastFullCompaction# 1970-01-01T00:00:50.000000Z at schemeshard 72075186233409546 2025-05-29T15:22:37.186278Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:485: Do not want to split tablet 72075186233409552 2025-05-29T15:22:37.186298Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:207: TSideEffects ApplyOnComplete at tablet# 72075186233409546 2025-05-29T15:22:37.199023Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4877: StateWork, received event# 2146435092, Sender [0:0:0], Recipient [1:280:2238]: NKikimr::NSchemeShard::TEvPrivate::TEvPersistTableStats 2025-05-29T15:22:37.199052Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5053: StateWork, processing event TEvPrivate::TEvPersistTableStats 2025-05-29T15:22:37.199058Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:590: Started TEvPersistStats at tablet 72075186233409546, queue size# 0 2025-05-29T15:22:37.221471Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4877: StateWork, received event# 271125000, Sender [0:0:0], Recipient [1:184:2176]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-05-29T15:22:37.221501Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4885: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-05-29T15:22:37.221522Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4877: StateWork, received event# 271124999, Sender [1:184:2176], Recipient [1:184:2176]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2025-05-29T15:22:37.221527Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4884: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2025-05-29T15:22:37.234905Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4877: StateWork, received event# 271125000, Sender [0:0:0], Recipient [1:280:2238]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-05-29T15:22:37.234935Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4885: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-05-29T15:22:37.234955Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4877: StateWork, received event# 271124999, Sender [1:280:2238], Recipient [1:280:2238]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2025-05-29T15:22:37.234960Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4884: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2025-05-29T15:22:37.269376Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4877: StateWork, received event# 271125000, Sender [0:0:0], Recipient [1:184:2176]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-05-29T15:22:37.269402Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4885: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-05-29T15:22:37.269423Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4877: StateWork, received event# 271124999, Sender [1:184:2176], Recipient [1:184:2176]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2025-05-29T15:22:37.269427Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4884: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2025-05-29T15:22:37.279705Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4877: StateWork, received event# 271125000, Sender [0:0:0], Recipient [1:280:2238]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-05-29T15:22:37.279730Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4885: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-05-29T15:22:37.279752Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4877: StateWork, received event# 271124999, Sender [1:280:2238], Recipient [1:280:2238]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2025-05-29T15:22:37.279757Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4884: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2025-05-29T15:22:37.312007Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4877: StateWork, received event# 271125000, Sender [0:0:0], Recipient [1:184:2176]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-05-29T15:22:37.312027Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4885: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-05-29T15:22:37.312046Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4877: StateWork, received event# 271124999, Sender [1:184:2176], Recipient [1:184:2176]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2025-05-29T15:22:37.312051Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4884: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2025-05-29T15:22:37.322249Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4877: StateWork, received event# 271125000, Sender [0:0:0], Recipient [1:280:2238]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-05-29T15:22:37.322269Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4885: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-05-29T15:22:37.322288Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4877: StateWork, received event# 271124999, Sender [1:280:2238], Recipient [1:280:2238]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2025-05-29T15:22:37.322293Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4884: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2025-05-29T15:22:37.353188Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4877: StateWork, received event# 271125000, Sender [0:0:0], Recipient [1:184:2176]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-05-29T15:22:37.353213Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4885: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-05-29T15:22:37.353229Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4877: StateWork, received event# 271124999, Sender [1:184:2176], Recipient [1:184:2176]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2025-05-29T15:22:37.353234Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4884: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2025-05-29T15:22:37.364478Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4877: StateWork, received event# 271125000, Sender [0:0:0], Recipient [1:280:2238]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-05-29T15:22:37.364503Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4885: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-05-29T15:22:37.364524Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4877: StateWork, received event# 271124999, Sender [1:280:2238], Recipient [1:280:2238]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2025-05-29T15:22:37.364529Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4884: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2025-05-29T15:22:37.397439Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4877: StateWork, received event# 271125000, Sender [0:0:0], Recipient [1:184:2176]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-05-29T15:22:37.397462Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4885: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-05-29T15:22:37.397486Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4877: StateWork, received event# 271124999, Sender [1:184:2176], Recipient [1:184:2176]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2025-05-29T15:22:37.397490Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4884: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2025-05-29T15:22:37.407696Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4877: StateWork, received event# 271125517, Sender [0:0:0], Recipient [1:184:2176]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToRunDataErasureBSC 2025-05-29T15:22:37.407720Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5077: StateWork, processing event TEvSchemeShard::TEvWakeupToRunDataErasureBSC 2025-05-29T15:22:37.407729Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:354: [RootDataErasureManager] SendRequestToBSC: Generation# 1 2025-05-29T15:22:37.407814Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4877: StateWork, received event# 268637738, Sender [1:186:2178], Recipient [1:184:2176]: NKikimrBlobStorage.TEvControllerShredResponse CurrentGeneration: 1 Completed: true Progress10k: 10000 2025-05-29T15:22:37.407821Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5076: StateWork, processing event TEvBlobStorage::TEvControllerShredResponse 2025-05-29T15:22:37.407825Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:7798: Handle TEvControllerShredResponse, at schemeshard: 72057594046678944 2025-05-29T15:22:37.407861Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__root_data_erasure_manager.cpp:632: TTxCompleteDataErasureBSC Execute at schemeshard: 72057594046678944 2025-05-29T15:22:37.407867Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:644: TTxCompleteDataErasureBSC: Data shred in BSC is completed 2025-05-29T15:22:37.407879Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:170: [RootDataErasureManager] ScheduleDataErasureWakeup: Interval# 14.999500s, Timestamp# 1970-01-01T00:01:25.000500Z 2025-05-29T15:22:37.407890Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:378: [RootDataErasureManager] Complete: Generation# 1, duration# 35 s 2025-05-29T15:22:37.408095Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__root_data_erasure_manager.cpp:656: TTxCompleteDataErasureBSC Complete at schemeshard: 72057594046678944, NeedScheduleRequestToBSC# false 2025-05-29T15:22:37.408796Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4877: StateWork, received event# 269877761, Sender [1:1733:3439], Recipient [1:184:2176]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-05-29T15:22:37.408807Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4974: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-05-29T15:22:37.408813Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5753: Pipe server connected, at tablet: 72057594046678944 2025-05-29T15:22:37.408847Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4877: StateWork, received event# 271125519, Sender [1:170:2169], Recipient [1:184:2176]: NKikimrScheme.TEvDataErasureInfoRequest 2025-05-29T15:22:37.408852Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5074: StateWork, processing event TEvSchemeShard::TEvDataErasureInfoRequest 2025-05-29T15:22:37.408856Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:7753: Handle TEvDataErasureInfoRequest, at schemeshard: 72057594046678944 |60.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/engine/ut/unittest >> TMiniKQLProgramBuilderTest::TestDiagnostics [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_move/unittest >> TSchemeShardMoveTest::MoveOldTableWithIndex [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:116:2058] recipient: [1:111:2141] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:116:2058] recipient: [1:111:2141] Leader for TabletID 72057594046678944 is [1:128:2152] sender: [1:132:2058] recipient: [1:111:2141] 2025-05-29T15:22:36.618528Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7488: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-05-29T15:22:36.618555Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7516: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:22:36.618561Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7402: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-05-29T15:22:36.618566Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7418: OperationsProcessing config: using default configuration 2025-05-29T15:22:36.618581Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-05-29T15:22:36.618585Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-05-29T15:22:36.618594Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7548: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:22:36.618606Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:39: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-05-29T15:22:36.618688Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7619: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-05-29T15:22:36.618766Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-05-29T15:22:36.631156Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7309: Cannot subscribe to console configs 2025-05-29T15:22:36.631177Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:22:36.633528Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-05-29T15:22:36.633647Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-05-29T15:22:36.633681Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-05-29T15:22:36.635978Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-05-29T15:22:36.636146Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:445: Clear TempDirsState with owners number: 0 2025-05-29T15:22:36.636242Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1348: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-05-29T15:22:36.636306Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:32: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-05-29T15:22:36.636804Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:157: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:22:36.636854Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:84: [RootDataErasureManager] Stop 2025-05-29T15:22:36.637106Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:22:36.637116Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:22:36.637136Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-05-29T15:22:36.637143Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-29T15:22:36.637150Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:202: TTxServerlessStorageBilling.Complete 2025-05-29T15:22:36.637183Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6671: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-05-29T15:22:36.638373Z node 1 :HIVE INFO: tablet_helpers.cpp:1130: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:128:2152] sender: [1:242:2058] recipient: [1:15:2062] 2025-05-29T15:22:36.653361Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:372: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-05-29T15:22:36.653427Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:22:36.653475Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-05-29T15:22:36.653508Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:130: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-05-29T15:22:36.653516Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:22:36.654152Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:451: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-05-29T15:22:36.654176Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-05-29T15:22:36.654221Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:22:36.654229Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:313: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-05-29T15:22:36.654234Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:367: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-05-29T15:22:36.654239Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 2 -> 3 2025-05-29T15:22:36.654559Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:22:36.654572Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-05-29T15:22:36.654577Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 3 -> 128 2025-05-29T15:22:36.655016Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:22:36.655030Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:22:36.655036Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:22:36.655042Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1650: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-05-29T15:22:36.655594Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1719: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-05-29T15:22:36.656062Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:652: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-05-29T15:22:36.656099Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1751: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-05-29T15:22:36.656237Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:676: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-05-29T15:22:36.656256Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:680: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 124 RawX2: 4294969445 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-05-29T15:22:36.656263Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:22:36.656311Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 128 -> 240 2025-05-29T15:22:36.656316Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:22:36.656339Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-05-29T15:22:36.656347Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:402: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-05-29T15:22:36.656797Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:22:36.656808Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-29T15:22:36.656840Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29 ... 025-05-29T15:22:37.262924Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:1005: NTableState::TProposedWaitParts operationId# 102:2 HandleReply TEvSchemaChanged at tablet: 72057594046678944 2025-05-29T15:22:37.262931Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:1009: NTableState::TProposedWaitParts operationId# 102:2 HandleReply TEvSchemaChanged at tablet: 72057594046678944 message: Source { RawX1: 324 RawX2: 8589936899 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 2025-05-29T15:22:37.262944Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:655: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 102:2, shardIdx: 72057594046678944:2, datashard: 72075186233409546, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2025-05-29T15:22:37.262948Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:674: all shard schema changes has been received, operationId: 102:2, at schemeshard: 72057594046678944 2025-05-29T15:22:37.262952Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:686: send schema changes ack message, operation: 102:2, datashard: 72075186233409546, at schemeshard: 72057594046678944 2025-05-29T15:22:37.262959Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 102:2 129 -> 240 2025-05-29T15:22:37.263478Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5512: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 326 RawX2: 8589936900 } Origin: 72075186233409547 State: 2 TxId: 102 Step: 0 Generation: 2 2025-05-29T15:22:37.263492Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1764: TOperation FindRelatedPartByTabletId, TxId: 102, tablet: 72075186233409547, partId: 0 2025-05-29T15:22:37.263511Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:619: TTxOperationReply execute, operationId: 102:0, at schemeshard: 72057594046678944, message: Source { RawX1: 326 RawX2: 8589936900 } Origin: 72075186233409547 State: 2 TxId: 102 Step: 0 Generation: 2 2025-05-29T15:22:37.263516Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:1005: NTableState::TProposedWaitParts operationId# 102:0 HandleReply TEvSchemaChanged at tablet: 72057594046678944 2025-05-29T15:22:37.263523Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:1009: NTableState::TProposedWaitParts operationId# 102:0 HandleReply TEvSchemaChanged at tablet: 72057594046678944 message: Source { RawX1: 326 RawX2: 8589936900 } Origin: 72075186233409547 State: 2 TxId: 102 Step: 0 Generation: 2 2025-05-29T15:22:37.263534Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:655: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 102:0, shardIdx: 72057594046678944:1, datashard: 72075186233409547, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2025-05-29T15:22:37.263538Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:674: all shard schema changes has been received, operationId: 102:0, at schemeshard: 72057594046678944 2025-05-29T15:22:37.263542Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:686: send schema changes ack message, operation: 102:0, datashard: 72075186233409547, at schemeshard: 72057594046678944 2025-05-29T15:22:37.263547Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 102:0 129 -> 240 2025-05-29T15:22:37.269393Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:647: TTxOperationReply complete, operationId: 102:2, at schemeshard: 72057594046678944 2025-05-29T15:22:37.269583Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:647: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2025-05-29T15:22:37.275136Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:647: TTxOperationReply complete, operationId: 102:2, at schemeshard: 72057594046678944 2025-05-29T15:22:37.275205Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 102:2, at schemeshard: 72057594046678944 2025-05-29T15:22:37.275214Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_move_table.cpp:482: TMoveTable TDone, operationId: 102:2 ProgressState, at schemeshard: 72057594046678944 2025-05-29T15:22:37.275223Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_move_table.cpp:492: TMoveTable TDone, operationId: 102:2 ProgressState, SourcePathId: [OwnerId: 72057594046678944, LocalPathId: 4], TargetPathId: [OwnerId: 72057594046678944, LocalPathId: 7], at schemeshard: 72057594046678944 2025-05-29T15:22:37.275238Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:906: Part operation is done id#102:2 progress is 2/3 2025-05-29T15:22:37.275244Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 102 ready parts: 2/3 2025-05-29T15:22:37.275249Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:906: Part operation is done id#102:2 progress is 2/3 2025-05-29T15:22:37.275253Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 102 ready parts: 2/3 2025-05-29T15:22:37.275258Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1606: TOperation IsReadyToNotify, TxId: 102, ready parts: 2/3, is published: true 2025-05-29T15:22:37.275373Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:647: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2025-05-29T15:22:37.275414Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2025-05-29T15:22:37.275420Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_move_table.cpp:482: TMoveTable TDone, operationId: 102:0 ProgressState, at schemeshard: 72057594046678944 2025-05-29T15:22:37.275425Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_move_table.cpp:492: TMoveTable TDone, operationId: 102:0 ProgressState, SourcePathId: [OwnerId: 72057594046678944, LocalPathId: 2], TargetPathId: [OwnerId: 72057594046678944, LocalPathId: 5], at schemeshard: 72057594046678944 2025-05-29T15:22:37.275437Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:906: Part operation is done id#102:0 progress is 3/3 2025-05-29T15:22:37.275441Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 102 ready parts: 3/3 2025-05-29T15:22:37.275446Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:906: Part operation is done id#102:0 progress is 3/3 2025-05-29T15:22:37.275449Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 102 ready parts: 3/3 2025-05-29T15:22:37.275454Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1606: TOperation IsReadyToNotify, TxId: 102, ready parts: 3/3, is published: true 2025-05-29T15:22:37.275473Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1629: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [2:378:2345] message: TxId: 102 2025-05-29T15:22:37.275480Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 102 ready parts: 3/3 2025-05-29T15:22:37.275486Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:973: Operation and all the parts is done, operation id: 102:0 2025-05-29T15:22:37.275491Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5174: RemoveTx for txid 102:0 2025-05-29T15:22:37.275515Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 4 2025-05-29T15:22:37.275521Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove txstate source path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2025-05-29T15:22:37.275526Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:973: Operation and all the parts is done, operation id: 102:1 2025-05-29T15:22:37.275530Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5174: RemoveTx for txid 102:1 2025-05-29T15:22:37.275535Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 6] was 3 2025-05-29T15:22:37.275539Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove txstate source path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 2 2025-05-29T15:22:37.275543Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:973: Operation and all the parts is done, operation id: 102:2 2025-05-29T15:22:37.275547Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5174: RemoveTx for txid 102:2 2025-05-29T15:22:37.275554Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 7] was 3 2025-05-29T15:22:37.275558Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove txstate source path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 1 2025-05-29T15:22:37.275626Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:123: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-05-29T15:22:37.275632Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:137: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 4], at schemeshard: 72057594046678944 2025-05-29T15:22:37.275644Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 1 2025-05-29T15:22:37.275652Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:137: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 3], at schemeshard: 72057594046678944 2025-05-29T15:22:37.275658Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 2025-05-29T15:22:37.275663Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:137: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2025-05-29T15:22:37.275669Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-05-29T15:22:37.283654Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:227: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2025-05-29T15:22:37.283678Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:236: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [2:477:2437] 2025-05-29T15:22:37.283785Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:160: TTxCleanDroppedPaths Complete, done PersistRemovePath for 3 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestWaitNotification: OK eventTxId 102 |60.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/engine/ut/unittest >> TMiniKQLProtoTestYdb::TestExportVariantYdb [GOOD] >> TSchemeShardMoveTest::AsyncIndexWithSyncInFly [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_move/unittest >> TSchemeShardMoveTest::TwoTables [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:116:2058] recipient: [1:111:2141] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:116:2058] recipient: [1:111:2141] Leader for TabletID 72057594046678944 is [1:128:2152] sender: [1:132:2058] recipient: [1:111:2141] 2025-05-29T15:22:37.317460Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7488: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-05-29T15:22:37.317486Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7516: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:22:37.317492Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7402: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-05-29T15:22:37.317498Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7418: OperationsProcessing config: using default configuration 2025-05-29T15:22:37.317517Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-05-29T15:22:37.317522Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-05-29T15:22:37.317534Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7548: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:22:37.317565Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:39: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-05-29T15:22:37.317681Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7619: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-05-29T15:22:37.317766Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-05-29T15:22:37.329218Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7309: Cannot subscribe to console configs 2025-05-29T15:22:37.329242Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:22:37.332120Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-05-29T15:22:37.332256Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-05-29T15:22:37.332303Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-05-29T15:22:37.333962Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-05-29T15:22:37.334121Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:445: Clear TempDirsState with owners number: 0 2025-05-29T15:22:37.334239Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1348: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-05-29T15:22:37.334300Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:32: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-05-29T15:22:37.334800Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:157: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:22:37.334851Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:84: [RootDataErasureManager] Stop 2025-05-29T15:22:37.335146Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:22:37.335156Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:22:37.335177Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-05-29T15:22:37.335183Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-29T15:22:37.335188Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:202: TTxServerlessStorageBilling.Complete 2025-05-29T15:22:37.335220Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6671: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-05-29T15:22:37.336261Z node 1 :HIVE INFO: tablet_helpers.cpp:1130: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:128:2152] sender: [1:242:2058] recipient: [1:15:2062] 2025-05-29T15:22:37.356216Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:372: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-05-29T15:22:37.356295Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:22:37.356354Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-05-29T15:22:37.356400Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:130: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-05-29T15:22:37.356411Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:22:37.356989Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:451: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-05-29T15:22:37.357019Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-05-29T15:22:37.357066Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:22:37.357075Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:313: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-05-29T15:22:37.357082Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:367: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-05-29T15:22:37.357087Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 2 -> 3 2025-05-29T15:22:37.357487Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:22:37.357498Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-05-29T15:22:37.357504Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 3 -> 128 2025-05-29T15:22:37.357859Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:22:37.357880Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:22:37.357886Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:22:37.357895Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1650: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-05-29T15:22:37.358605Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1719: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-05-29T15:22:37.359013Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:652: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-05-29T15:22:37.359054Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1751: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-05-29T15:22:37.359236Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:676: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-05-29T15:22:37.359262Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:680: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 124 RawX2: 4294969445 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-05-29T15:22:37.359270Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:22:37.359344Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 128 -> 240 2025-05-29T15:22:37.359351Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:22:37.359386Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-05-29T15:22:37.359400Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:402: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-05-29T15:22:37.359800Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:22:37.359810Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-29T15:22:37.359858Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29 ... :22:37.589302Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 4 2025-05-29T15:22:37.589309Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:137: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2025-05-29T15:22:37.589315Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2025-05-29T15:22:37.589949Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:227: tests -- TTxNotificationSubscriber for txId 103: got EvNotifyTxCompletionResult 2025-05-29T15:22:37.589962Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:236: tests -- TTxNotificationSubscriber for txId 103: satisfy waiter [1:505:2464] 2025-05-29T15:22:37.589978Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:160: TTxCleanDroppedPaths Complete, done PersistRemovePath for 2 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestWaitNotification: OK eventTxId 103 2025-05-29T15:22:37.590105Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Table1" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-05-29T15:22:37.590151Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/Table1" took 55us result status StatusPathDoesNotExist 2025-05-29T15:22:37.590201Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/Table1\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1]), source_location: ydb/core/tx/schemeshard/schemeshard_path_describer.cpp:1157" Path: "/MyRoot/Table1" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: true } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 2025-05-29T15:22:37.590258Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/TableMove1" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-05-29T15:22:37.590294Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/TableMove1" took 37us result status StatusSuccess 2025-05-29T15:22:37.590387Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/TableMove1" PathDescription { Self { Name: "TableMove1" PathId: 4 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 103 CreateStep: 5000004 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 2 TablePartitionVersion: 2 } ChildrenExist: false } Table { Name: "TableMove1" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 TableSchemaVersion: 2 IsBackup: false IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 2 PathsLimit: 10000 ShardsInside: 2 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 4 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-05-29T15:22:37.590473Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Table2" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-05-29T15:22:37.590486Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/Table2" took 15us result status StatusPathDoesNotExist 2025-05-29T15:22:37.590502Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/Table2\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1]), source_location: ydb/core/tx/schemeshard/schemeshard_path_describer.cpp:1157" Path: "/MyRoot/Table2" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: true } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 2025-05-29T15:22:37.590546Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/TableMove2" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-05-29T15:22:37.590561Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/TableMove2" took 17us result status StatusSuccess 2025-05-29T15:22:37.590615Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/TableMove2" PathDescription { Self { Name: "TableMove2" PathId: 5 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 103 CreateStep: 5000004 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 2 TablePartitionVersion: 2 } ChildrenExist: false } Table { Name: "TableMove2" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 TableSchemaVersion: 2 IsBackup: false IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 2 PathsLimit: 10000 ShardsInside: 2 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 5 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-05-29T15:22:37.590676Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-05-29T15:22:37.590696Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot" took 20us result status StatusSuccess 2025-05-29T15:22:37.590790Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot" PathDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 15 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 15 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 13 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: "TableMove1" PathId: 4 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 103 CreateStep: 5000004 ParentPathId: 1 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" ChildrenExist: false } Children { Name: "TableMove2" PathId: 5 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 103 CreateStep: 5000004 ParentPathId: 1 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 2 PathsLimit: 10000 ShardsInside: 2 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/MyRoot" } } } PathId: 1 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> TPQTest::TestWriteTimeStampEstimate [GOOD] >> TPQTest::TestWriteTimeLag >> TSchemeShardMoveTest::MoveIntoBuildingIndex [GOOD] >> TSchemeShardMoveTest::MoveIndexDoesNonExisted [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_data_erasure/unittest >> TestDataErasure::DataErasureRun3CyclesForTopics [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:116:2058] recipient: [1:111:2141] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:116:2058] recipient: [1:111:2141] Leader for TabletID 72057594046678944 is [1:128:2152] sender: [1:132:2058] recipient: [1:111:2141] 2025-05-29T15:22:32.516341Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7488: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-05-29T15:22:32.516367Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7516: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:22:32.516373Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7402: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-05-29T15:22:32.516379Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7418: OperationsProcessing config: using default configuration 2025-05-29T15:22:32.516390Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-05-29T15:22:32.516394Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-05-29T15:22:32.516403Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7548: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:22:32.516417Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:39: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-05-29T15:22:32.516511Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7619: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-05-29T15:22:32.516567Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-05-29T15:22:32.530243Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7309: Cannot subscribe to console configs 2025-05-29T15:22:32.530264Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:22:32.532967Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-05-29T15:22:32.533090Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-05-29T15:22:32.533152Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-05-29T15:22:32.534681Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-05-29T15:22:32.535298Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:445: Clear TempDirsState with owners number: 0 2025-05-29T15:22:32.535475Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1348: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-05-29T15:22:32.535555Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:32: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-05-29T15:22:32.536377Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:157: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:22:32.536437Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:84: [RootDataErasureManager] Stop 2025-05-29T15:22:32.536727Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:22:32.536740Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:22:32.536762Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-05-29T15:22:32.536773Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-29T15:22:32.536780Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:202: TTxServerlessStorageBilling.Complete 2025-05-29T15:22:32.536817Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6671: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-05-29T15:22:32.538370Z node 1 :HIVE INFO: tablet_helpers.cpp:1130: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:128:2152] sender: [1:242:2058] recipient: [1:15:2062] 2025-05-29T15:22:32.559586Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:372: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-05-29T15:22:32.559666Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:22:32.559730Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-05-29T15:22:32.559775Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:130: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-05-29T15:22:32.559787Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:22:32.563089Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:451: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-05-29T15:22:32.563127Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-05-29T15:22:32.563180Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:22:32.563194Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:313: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-05-29T15:22:32.563203Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:367: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-05-29T15:22:32.563225Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 2 -> 3 2025-05-29T15:22:32.563775Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:22:32.563789Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-05-29T15:22:32.563795Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 3 -> 128 2025-05-29T15:22:32.564665Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:22:32.564678Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:22:32.564686Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:22:32.564693Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1650: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-05-29T15:22:32.565424Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1719: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-05-29T15:22:32.566199Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:652: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-05-29T15:22:32.566245Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1751: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-05-29T15:22:32.566384Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:676: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-05-29T15:22:32.566406Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:680: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 124 RawX2: 4294969445 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-05-29T15:22:32.566412Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:22:32.566476Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 128 -> 240 2025-05-29T15:22:32.566484Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:22:32.566513Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-05-29T15:22:32.566525Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:402: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-05-29T15:22:32.567286Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:22:32.567296Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-29T15:22:32.567356Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29 ... dRequestToBSC: Generation# 3 2025-05-29T15:22:37.171128Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4877: StateWork, received event# 268637738, Sender [1:298:2280], Recipient [1:295:2278]: NKikimrBlobStorage.TEvControllerShredResponse CurrentGeneration: 3 Completed: false Progress10k: 5000 2025-05-29T15:22:37.171135Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5076: StateWork, processing event TEvBlobStorage::TEvControllerShredResponse 2025-05-29T15:22:37.171139Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:7798: Handle TEvControllerShredResponse, at schemeshard: 72057594046678944 2025-05-29T15:22:37.171155Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__root_data_erasure_manager.cpp:632: TTxCompleteDataErasureBSC Execute at schemeshard: 72057594046678944 2025-05-29T15:22:37.171167Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:648: TTxCompleteDataErasureBSC: Progress data shred in BSC 50% 2025-05-29T15:22:37.171182Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__root_data_erasure_manager.cpp:656: TTxCompleteDataErasureBSC Complete at schemeshard: 72057594046678944, NeedScheduleRequestToBSC# true 2025-05-29T15:22:37.171190Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:348: [RootDataErasureManager] ScheduleRequestToBSC: Interval# 1.000000s 2025-05-29T15:22:37.255179Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4877: StateWork, received event# 271125000, Sender [0:0:0], Recipient [1:461:2412]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-05-29T15:22:37.255205Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4885: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-05-29T15:22:37.255238Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4877: StateWork, received event# 271124999, Sender [1:461:2412], Recipient [1:461:2412]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2025-05-29T15:22:37.255243Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4884: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2025-05-29T15:22:37.255298Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4877: StateWork, received event# 271122945, Sender [1:641:2556], Recipient [1:461:2412]: NKikimrSchemeOp.TDescribePath PathId: 2 SchemeshardId: 72075186233409546 2025-05-29T15:22:37.255303Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4889: StateWork, processing event TEvSchemeShard::TEvDescribeScheme 2025-05-29T15:22:37.255329Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: PathId: 2 SchemeshardId: 72075186233409546, at schemeshard: 72075186233409546 2025-05-29T15:22:37.255392Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:44: Tablet 72075186233409546 describe pathId 2 took 46us result status StatusSuccess 2025-05-29T15:22:37.255534Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Database1/Topic1" PathDescription { Self { Name: "Topic1" PathId: 2 SchemeshardId: 72075186233409546 PathType: EPathTypePersQueueGroup CreateFinished: true CreateTxId: 103 CreateStep: 200 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 PQVersion: 1 } ChildrenExist: false BalancerTabletID: 72075186233409550 } PersQueueGroup { Name: "Topic1" PathId: 2 TotalGroupCount: 1 PartitionPerTablet: 1 PQTabletConfig { PartitionConfig { LifetimeSeconds: 10 } YdbDatabasePath: "/MyRoot/Database1" } Partitions { PartitionId: 0 TabletId: 72075186233409549 Status: Active } AlterVersion: 1 BalancerTabletID: 72075186233409550 NextPartitionId: 1 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 2 ProcessingParams { Version: 2 PlanResolution: 50 Coordinators: 72075186233409547 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409548 SchemeShard: 72075186233409546 } DomainKey { SchemeShard: 72057594046678944 PathId: 2 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 5 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 2 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 1 PQPartitionsLimit: 1000000 } } PathId: 2 PathOwnerId: 72075186233409546, at schemeshard: 72075186233409546 2025-05-29T15:22:37.340796Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4877: StateWork, received event# 271125000, Sender [0:0:0], Recipient [1:884:2758]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-05-29T15:22:37.340823Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4885: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-05-29T15:22:37.340851Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4877: StateWork, received event# 271124999, Sender [1:884:2758], Recipient [1:884:2758]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2025-05-29T15:22:37.340857Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4884: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2025-05-29T15:22:37.340910Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4877: StateWork, received event# 271122945, Sender [1:1064:2901], Recipient [1:884:2758]: NKikimrSchemeOp.TDescribePath PathId: 2 SchemeshardId: 72075186233409551 2025-05-29T15:22:37.340915Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4889: StateWork, processing event TEvSchemeShard::TEvDescribeScheme 2025-05-29T15:22:37.340943Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: PathId: 2 SchemeshardId: 72075186233409551, at schemeshard: 72075186233409551 2025-05-29T15:22:37.341003Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:44: Tablet 72075186233409551 describe pathId 2 took 46us result status StatusSuccess 2025-05-29T15:22:37.341137Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Database2/Topic1" PathDescription { Self { Name: "Topic1" PathId: 2 SchemeshardId: 72075186233409551 PathType: EPathTypePersQueueGroup CreateFinished: true CreateTxId: 106 CreateStep: 300 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 PQVersion: 1 } ChildrenExist: false BalancerTabletID: 72075186233409555 } PersQueueGroup { Name: "Topic1" PathId: 2 TotalGroupCount: 1 PartitionPerTablet: 1 PQTabletConfig { PartitionConfig { LifetimeSeconds: 10 } YdbDatabasePath: "/MyRoot/Database2" } Partitions { PartitionId: 0 TabletId: 72075186233409554 Status: Active } AlterVersion: 1 BalancerTabletID: 72075186233409555 NextPartitionId: 1 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 3 ProcessingParams { Version: 2 PlanResolution: 50 Coordinators: 72075186233409552 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409553 SchemeShard: 72075186233409551 } DomainKey { SchemeShard: 72057594046678944 PathId: 3 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 5 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 3 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 1 PQPartitionsLimit: 1000000 } } PathId: 2 PathOwnerId: 72075186233409551, at schemeshard: 72075186233409551 2025-05-29T15:22:37.710887Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4877: StateWork, received event# 271125000, Sender [0:0:0], Recipient [1:295:2278]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-05-29T15:22:37.710922Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4885: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-05-29T15:22:37.710940Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4877: StateWork, received event# 271124999, Sender [1:295:2278], Recipient [1:295:2278]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2025-05-29T15:22:37.710945Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4884: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2025-05-29T15:22:37.722903Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4877: StateWork, received event# 271125517, Sender [0:0:0], Recipient [1:295:2278]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToRunDataErasureBSC 2025-05-29T15:22:37.722935Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5077: StateWork, processing event TEvSchemeShard::TEvWakeupToRunDataErasureBSC 2025-05-29T15:22:37.722944Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:354: [RootDataErasureManager] SendRequestToBSC: Generation# 3 2025-05-29T15:22:37.723082Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4877: StateWork, received event# 268637738, Sender [1:298:2280], Recipient [1:295:2278]: NKikimrBlobStorage.TEvControllerShredResponse CurrentGeneration: 3 Completed: true Progress10k: 10000 2025-05-29T15:22:37.723090Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5076: StateWork, processing event TEvBlobStorage::TEvControllerShredResponse 2025-05-29T15:22:37.723096Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:7798: Handle TEvControllerShredResponse, at schemeshard: 72057594046678944 2025-05-29T15:22:37.723124Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__root_data_erasure_manager.cpp:632: TTxCompleteDataErasureBSC Execute at schemeshard: 72057594046678944 2025-05-29T15:22:37.723130Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:644: TTxCompleteDataErasureBSC: Data shred in BSC is completed 2025-05-29T15:22:37.723141Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:170: [RootDataErasureManager] ScheduleDataErasureWakeup: Interval# 0.978000s, Timestamp# 1970-01-01T00:00:11.068000Z 2025-05-29T15:22:37.723149Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:378: [RootDataErasureManager] Complete: Generation# 3, duration# 2 s 2025-05-29T15:22:37.735201Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__root_data_erasure_manager.cpp:656: TTxCompleteDataErasureBSC Complete at schemeshard: 72057594046678944, NeedScheduleRequestToBSC# false 2025-05-29T15:22:37.735412Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4877: StateWork, received event# 269877761, Sender [1:1529:3317], Recipient [1:295:2278]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-05-29T15:22:37.735426Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4974: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-05-29T15:22:37.735432Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5753: Pipe server connected, at tablet: 72057594046678944 2025-05-29T15:22:37.735474Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4877: StateWork, received event# 271125519, Sender [1:277:2267], Recipient [1:295:2278]: NKikimrScheme.TEvDataErasureInfoRequest 2025-05-29T15:22:37.735482Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5074: StateWork, processing event TEvSchemeShard::TEvDataErasureInfoRequest 2025-05-29T15:22:37.735487Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:7753: Handle TEvDataErasureInfoRequest, at schemeshard: 72057594046678944 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_data_erasure/unittest >> TestDataErasure::DataErasureRun3CyclesForTables [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:116:2058] recipient: [1:111:2141] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:116:2058] recipient: [1:111:2141] Leader for TabletID 72057594046678944 is [1:128:2152] sender: [1:132:2058] recipient: [1:111:2141] 2025-05-29T15:22:32.147912Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7488: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-05-29T15:22:32.147949Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7516: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:22:32.147956Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7402: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-05-29T15:22:32.147961Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7418: OperationsProcessing config: using default configuration 2025-05-29T15:22:32.147973Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-05-29T15:22:32.147978Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-05-29T15:22:32.147988Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7548: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:22:32.148003Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:39: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-05-29T15:22:32.148131Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7619: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-05-29T15:22:32.148212Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-05-29T15:22:32.163794Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7309: Cannot subscribe to console configs 2025-05-29T15:22:32.163820Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:22:32.166890Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-05-29T15:22:32.167033Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-05-29T15:22:32.167103Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-05-29T15:22:32.169211Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-05-29T15:22:32.169401Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:445: Clear TempDirsState with owners number: 0 2025-05-29T15:22:32.169538Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1348: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-05-29T15:22:32.169605Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:32: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-05-29T15:22:32.170235Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:157: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:22:32.170295Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:84: [RootDataErasureManager] Stop 2025-05-29T15:22:32.170573Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:22:32.170586Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:22:32.170607Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-05-29T15:22:32.170620Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-29T15:22:32.170626Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:202: TTxServerlessStorageBilling.Complete 2025-05-29T15:22:32.170663Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6671: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-05-29T15:22:32.172224Z node 1 :HIVE INFO: tablet_helpers.cpp:1130: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:128:2152] sender: [1:242:2058] recipient: [1:15:2062] 2025-05-29T15:22:32.196330Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:372: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-05-29T15:22:32.196414Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:22:32.196488Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-05-29T15:22:32.196535Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:130: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-05-29T15:22:32.196548Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:22:32.197519Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:451: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-05-29T15:22:32.197566Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-05-29T15:22:32.197624Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:22:32.197636Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:313: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-05-29T15:22:32.197643Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:367: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-05-29T15:22:32.197649Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 2 -> 3 2025-05-29T15:22:32.198202Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:22:32.198215Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-05-29T15:22:32.198222Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 3 -> 128 2025-05-29T15:22:32.198628Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:22:32.198640Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:22:32.198650Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:22:32.198659Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1650: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-05-29T15:22:32.199405Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1719: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-05-29T15:22:32.199896Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:652: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-05-29T15:22:32.199949Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1751: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-05-29T15:22:32.200145Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:676: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-05-29T15:22:32.200172Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:680: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 124 RawX2: 4294969445 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-05-29T15:22:32.200180Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:22:32.200242Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 128 -> 240 2025-05-29T15:22:32.200249Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:22:32.200285Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-05-29T15:22:32.200298Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:402: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-05-29T15:22:32.200787Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:22:32.200797Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-29T15:22:32.200843Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29 ... 594046678944, LocalPathId: 2] in# 64 ms, next wakeup# 593.936000s, rate# 0, in queue# 0 tenants, running# 0 tenants at schemeshard 72057594046678944 2025-05-29T15:22:36.880584Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__root_data_erasure_manager.cpp:327: [RootDataErasureManager] Data erasure in tenants is completed. Send request to BS controller 2025-05-29T15:22:36.880951Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__root_data_erasure_manager.cpp:608: TTxCompleteDataErasureTenant Complete at schemeshard: 72057594046678944, NeedSendRequestToBSC# true 2025-05-29T15:22:36.880964Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:354: [RootDataErasureManager] SendRequestToBSC: Generation# 3 2025-05-29T15:22:36.881014Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4877: StateWork, received event# 268637738, Sender [1:298:2280], Recipient [1:296:2279]: NKikimrBlobStorage.TEvControllerShredResponse CurrentGeneration: 3 Completed: false Progress10k: 0 2025-05-29T15:22:36.881022Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5076: StateWork, processing event TEvBlobStorage::TEvControllerShredResponse 2025-05-29T15:22:36.881026Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:7798: Handle TEvControllerShredResponse, at schemeshard: 72057594046678944 2025-05-29T15:22:36.881034Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__root_data_erasure_manager.cpp:632: TTxCompleteDataErasureBSC Execute at schemeshard: 72057594046678944 2025-05-29T15:22:36.881040Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:648: TTxCompleteDataErasureBSC: Progress data shred in BSC 0% 2025-05-29T15:22:36.881050Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__root_data_erasure_manager.cpp:656: TTxCompleteDataErasureBSC Complete at schemeshard: 72057594046678944, NeedScheduleRequestToBSC# true 2025-05-29T15:22:36.881058Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:348: [RootDataErasureManager] ScheduleRequestToBSC: Interval# 1.000000s 2025-05-29T15:22:37.351059Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4877: StateWork, received event# 271125000, Sender [0:0:0], Recipient [1:296:2279]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-05-29T15:22:37.351099Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4885: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-05-29T15:22:37.351116Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4877: StateWork, received event# 271125000, Sender [0:0:0], Recipient [1:459:2410]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-05-29T15:22:37.351120Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4885: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-05-29T15:22:37.351132Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4877: StateWork, received event# 271125000, Sender [0:0:0], Recipient [1:836:2719]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-05-29T15:22:37.351136Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4885: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-05-29T15:22:37.351147Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4877: StateWork, received event# 271124999, Sender [1:459:2410], Recipient [1:459:2410]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2025-05-29T15:22:37.351152Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4884: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2025-05-29T15:22:37.351170Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4877: StateWork, received event# 271124999, Sender [1:836:2719], Recipient [1:836:2719]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2025-05-29T15:22:37.351174Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4884: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2025-05-29T15:22:37.351183Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4877: StateWork, received event# 271124999, Sender [1:296:2279], Recipient [1:296:2279]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2025-05-29T15:22:37.351187Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4884: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2025-05-29T15:22:37.383406Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4877: StateWork, received event# 271125517, Sender [0:0:0], Recipient [1:296:2279]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToRunDataErasureBSC 2025-05-29T15:22:37.383439Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5077: StateWork, processing event TEvSchemeShard::TEvWakeupToRunDataErasureBSC 2025-05-29T15:22:37.383448Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:354: [RootDataErasureManager] SendRequestToBSC: Generation# 3 2025-05-29T15:22:37.383534Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4877: StateWork, received event# 268637738, Sender [1:298:2280], Recipient [1:296:2279]: NKikimrBlobStorage.TEvControllerShredResponse CurrentGeneration: 3 Completed: false Progress10k: 5000 2025-05-29T15:22:37.383542Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5076: StateWork, processing event TEvBlobStorage::TEvControllerShredResponse 2025-05-29T15:22:37.383547Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:7798: Handle TEvControllerShredResponse, at schemeshard: 72057594046678944 2025-05-29T15:22:37.383571Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__root_data_erasure_manager.cpp:632: TTxCompleteDataErasureBSC Execute at schemeshard: 72057594046678944 2025-05-29T15:22:37.383586Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:648: TTxCompleteDataErasureBSC: Progress data shred in BSC 50% 2025-05-29T15:22:37.383604Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__root_data_erasure_manager.cpp:656: TTxCompleteDataErasureBSC Complete at schemeshard: 72057594046678944, NeedScheduleRequestToBSC# true 2025-05-29T15:22:37.383622Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:348: [RootDataErasureManager] ScheduleRequestToBSC: Interval# 1.000000s 2025-05-29T15:22:37.817112Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4877: StateWork, received event# 271125000, Sender [0:0:0], Recipient [1:836:2719]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-05-29T15:22:37.817148Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4885: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-05-29T15:22:37.817169Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4877: StateWork, received event# 271125000, Sender [0:0:0], Recipient [1:296:2279]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-05-29T15:22:37.817173Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4885: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-05-29T15:22:37.817183Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4877: StateWork, received event# 271125000, Sender [0:0:0], Recipient [1:459:2410]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-05-29T15:22:37.817187Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4885: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-05-29T15:22:37.817197Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4877: StateWork, received event# 271124999, Sender [1:296:2279], Recipient [1:296:2279]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2025-05-29T15:22:37.817202Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4884: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2025-05-29T15:22:37.817254Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4877: StateWork, received event# 271124999, Sender [1:459:2410], Recipient [1:459:2410]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2025-05-29T15:22:37.817259Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4884: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2025-05-29T15:22:37.817270Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4877: StateWork, received event# 271124999, Sender [1:836:2719], Recipient [1:836:2719]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2025-05-29T15:22:37.817275Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4884: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2025-05-29T15:22:37.848038Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4877: StateWork, received event# 271125517, Sender [0:0:0], Recipient [1:296:2279]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToRunDataErasureBSC 2025-05-29T15:22:37.848076Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5077: StateWork, processing event TEvSchemeShard::TEvWakeupToRunDataErasureBSC 2025-05-29T15:22:37.848087Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:354: [RootDataErasureManager] SendRequestToBSC: Generation# 3 2025-05-29T15:22:37.848213Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4877: StateWork, received event# 268637738, Sender [1:298:2280], Recipient [1:296:2279]: NKikimrBlobStorage.TEvControllerShredResponse CurrentGeneration: 3 Completed: true Progress10k: 10000 2025-05-29T15:22:37.848221Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5076: StateWork, processing event TEvBlobStorage::TEvControllerShredResponse 2025-05-29T15:22:37.848226Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:7798: Handle TEvControllerShredResponse, at schemeshard: 72057594046678944 2025-05-29T15:22:37.848256Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__root_data_erasure_manager.cpp:632: TTxCompleteDataErasureBSC Execute at schemeshard: 72057594046678944 2025-05-29T15:22:37.848263Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:644: TTxCompleteDataErasureBSC: Data shred in BSC is completed 2025-05-29T15:22:37.848274Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:170: [RootDataErasureManager] ScheduleDataErasureWakeup: Interval# 0.935000s, Timestamp# 1970-01-01T00:00:11.113000Z 2025-05-29T15:22:37.848281Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:378: [RootDataErasureManager] Complete: Generation# 3, duration# 2 s 2025-05-29T15:22:37.851330Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__root_data_erasure_manager.cpp:656: TTxCompleteDataErasureBSC Complete at schemeshard: 72057594046678944, NeedScheduleRequestToBSC# false 2025-05-29T15:22:37.851558Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4877: StateWork, received event# 269877761, Sender [1:3565:4920], Recipient [1:296:2279]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-05-29T15:22:37.851568Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4974: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-05-29T15:22:37.851573Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5753: Pipe server connected, at tablet: 72057594046678944 2025-05-29T15:22:37.851597Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4877: StateWork, received event# 271125519, Sender [1:277:2267], Recipient [1:296:2279]: NKikimrScheme.TEvDataErasureInfoRequest 2025-05-29T15:22:37.851603Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5074: StateWork, processing event TEvSchemeShard::TEvDataErasureInfoRequest 2025-05-29T15:22:37.851608Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:7753: Handle TEvDataErasureInfoRequest, at schemeshard: 72057594046678944 >> TSchemeShardMoveTest::Index [GOOD] >> TSchemeShardMoveTest::MoveTableWithSequence [GOOD] >> TopicAutoscaling::PartitionMerge_PreferedPartition_PQv1 [FAIL] >> TopicAutoscaling::PartitionSplit_ManySession_BeforeAutoscaleAwareSDK ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_move/unittest >> TSchemeShardMoveTest::Replace [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:116:2058] recipient: [1:111:2141] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:116:2058] recipient: [1:111:2141] Leader for TabletID 72057594046678944 is [1:128:2152] sender: [1:132:2058] recipient: [1:111:2141] 2025-05-29T15:22:37.024352Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7488: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-05-29T15:22:37.024378Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7516: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:22:37.024384Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7402: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-05-29T15:22:37.024390Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7418: OperationsProcessing config: using default configuration 2025-05-29T15:22:37.024407Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-05-29T15:22:37.024412Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-05-29T15:22:37.024419Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7548: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:22:37.024430Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:39: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-05-29T15:22:37.024518Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7619: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-05-29T15:22:37.024587Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-05-29T15:22:37.035094Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7309: Cannot subscribe to console configs 2025-05-29T15:22:37.035117Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:22:37.037520Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-05-29T15:22:37.037648Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-05-29T15:22:37.037695Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-05-29T15:22:37.039289Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-05-29T15:22:37.040402Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:445: Clear TempDirsState with owners number: 0 2025-05-29T15:22:37.040541Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1348: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-05-29T15:22:37.040629Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:32: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-05-29T15:22:37.041338Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:157: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:22:37.041395Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:84: [RootDataErasureManager] Stop 2025-05-29T15:22:37.041662Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:22:37.041670Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:22:37.041692Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-05-29T15:22:37.041698Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-29T15:22:37.041703Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:202: TTxServerlessStorageBilling.Complete 2025-05-29T15:22:37.041749Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6671: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-05-29T15:22:37.043186Z node 1 :HIVE INFO: tablet_helpers.cpp:1130: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:128:2152] sender: [1:242:2058] recipient: [1:15:2062] 2025-05-29T15:22:37.058427Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:372: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-05-29T15:22:37.058496Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:22:37.058550Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-05-29T15:22:37.058587Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:130: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-05-29T15:22:37.058595Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:22:37.060115Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:451: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-05-29T15:22:37.060145Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-05-29T15:22:37.060203Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:22:37.060212Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:313: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-05-29T15:22:37.060217Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:367: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-05-29T15:22:37.060222Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 2 -> 3 2025-05-29T15:22:37.060759Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:22:37.060772Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-05-29T15:22:37.060778Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 3 -> 128 2025-05-29T15:22:37.061185Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:22:37.061199Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:22:37.061205Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:22:37.061212Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1650: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-05-29T15:22:37.061860Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1719: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-05-29T15:22:37.062310Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:652: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-05-29T15:22:37.062352Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1751: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-05-29T15:22:37.062534Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:676: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-05-29T15:22:37.062560Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:680: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 124 RawX2: 4294969445 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-05-29T15:22:37.062569Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:22:37.062634Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 128 -> 240 2025-05-29T15:22:37.062641Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:22:37.062670Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-05-29T15:22:37.062684Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:402: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-05-29T15:22:37.063994Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:22:37.064005Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-29T15:22:37.064046Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29 ... _TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6518: Transaction 105 reset current state at schemeshard 72057594046678944 because pipe to tablet 72075186233409547 disconnected 2025-05-29T15:22:37.909481Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6518: Transaction 105 reset current state at schemeshard 72057594046678944 because pipe to tablet 72075186233409548 disconnected 2025-05-29T15:22:37.909515Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:227: tests -- TTxNotificationSubscriber for txId 105: got EvNotifyTxCompletionResult 2025-05-29T15:22:37.909523Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:236: tests -- TTxNotificationSubscriber for txId 105: satisfy waiter [1:1428:3188] 2025-05-29T15:22:37.909564Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:160: TTxCleanDroppedPaths Complete, done PersistRemovePath for 5 paths, skipped 0, left 2 candidates, at schemeshard: 72057594046678944 2025-05-29T15:22:37.909583Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:123: TTxCleanDroppedPaths Execute, 2 paths in candidate queue, at schemeshard: 72057594046678944 2025-05-29T15:22:37.909588Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:137: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 16], at schemeshard: 72057594046678944 2025-05-29T15:22:37.909600Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 15] was 1 2025-05-29T15:22:37.909606Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:137: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 15], at schemeshard: 72057594046678944 2025-05-29T15:22:37.909611Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 12] was 2 2025-05-29T15:22:37.909617Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:137: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 14], at schemeshard: 72057594046678944 2025-05-29T15:22:37.909623Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 13] was 1 2025-05-29T15:22:37.909628Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:137: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 13], at schemeshard: 72057594046678944 2025-05-29T15:22:37.909634Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 12] was 1 2025-05-29T15:22:37.909638Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:137: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 12], at schemeshard: 72057594046678944 2025-05-29T15:22:37.909645Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-05-29T15:22:37.910444Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:174: Deleted shardIdx 72057594046678944:3 2025-05-29T15:22:37.910460Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:180: Close pipe to deleted shardIdx 72057594046678944:3 tabletId 72075186233409546 2025-05-29T15:22:37.910476Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:174: Deleted shardIdx 72057594046678944:2 2025-05-29T15:22:37.910479Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:180: Close pipe to deleted shardIdx 72057594046678944:2 tabletId 72075186233409547 2025-05-29T15:22:37.910487Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:174: Deleted shardIdx 72057594046678944:1 2025-05-29T15:22:37.910491Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:180: Close pipe to deleted shardIdx 72057594046678944:1 tabletId 72075186233409548 2025-05-29T15:22:37.910508Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:160: TTxCleanDroppedPaths Complete, done PersistRemovePath for 5 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestWaitNotification: OK eventTxId 105 wait until 72075186233409546 is deleted wait until 72075186233409547 is deleted wait until 72075186233409548 is deleted 2025-05-29T15:22:37.910613Z node 1 :HIVE INFO: tablet_helpers.cpp:1476: [72057594037968897] TEvSubscribeToTabletDeletion, 72075186233409546 2025-05-29T15:22:37.910624Z node 1 :HIVE INFO: tablet_helpers.cpp:1476: [72057594037968897] TEvSubscribeToTabletDeletion, 72075186233409547 2025-05-29T15:22:37.910630Z node 1 :HIVE INFO: tablet_helpers.cpp:1476: [72057594037968897] TEvSubscribeToTabletDeletion, 72075186233409548 Deleted tabletId 72075186233409546 Deleted tabletId 72075186233409547 Deleted tabletId 72075186233409548 2025-05-29T15:22:37.910703Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Src" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-05-29T15:22:37.910757Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/Src" took 46us result status StatusPathDoesNotExist 2025-05-29T15:22:37.910788Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/Src\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1]), source_location: ydb/core/tx/schemeshard/schemeshard_path_describer.cpp:1157" Path: "/MyRoot/Src" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: true } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 2025-05-29T15:22:37.910843Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Dst" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-05-29T15:22:37.910888Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/Dst" took 45us result status StatusSuccess 2025-05-29T15:22:37.911016Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Dst" PathDescription { Self { Name: "Dst" PathId: 22 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 105 CreateStep: 5000006 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 2 TablePartitionVersion: 2 } ChildrenExist: false } Table { Name: "Dst" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value0" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } Columns { Name: "value1" Type: "Utf8" TypeId: 4608 Id: 3 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 TableIndexes { Name: "Async" LocalPathId: 23 Type: EIndexTypeGlobalAsync State: EIndexStateReady KeyColumnNames: "value1" SchemaVersion: 2 PathOwnerId: 72057594046678944 DataSize: 0 IndexImplTableDescriptions { } } TableIndexes { Name: "Sync" LocalPathId: 25 Type: EIndexTypeGlobal State: EIndexStateReady KeyColumnNames: "value0" SchemaVersion: 2 PathOwnerId: 72057594046678944 DataSize: 0 IndexImplTableDescriptions { } } TableSchemaVersion: 2 IsBackup: false IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 5 PathsLimit: 10000 ShardsInside: 3 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 22 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-05-29T15:22:37.911142Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-05-29T15:22:37.911166Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot" took 25us result status StatusSuccess 2025-05-29T15:22:37.911231Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot" PathDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 28 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 28 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 26 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: "Dst" PathId: 22 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 105 CreateStep: 5000006 ParentPathId: 1 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 5 PathsLimit: 10000 ShardsInside: 3 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/MyRoot" } } } PathId: 1 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_data_erasure/unittest >> TestDataErasure::DataErasureWithMerge [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:66:2058] recipient: [1:59:2100] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:66:2058] recipient: [1:59:2100] Leader for TabletID 72057594046678944 is [1:70:2104] sender: [1:74:2058] recipient: [1:59:2100] 2025-05-29T15:22:32.335846Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7488: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-05-29T15:22:32.335870Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7516: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:22:32.335876Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7402: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-05-29T15:22:32.335881Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7418: OperationsProcessing config: using default configuration 2025-05-29T15:22:32.335893Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-05-29T15:22:32.335898Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-05-29T15:22:32.335909Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7548: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:22:32.335922Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:39: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-05-29T15:22:32.336029Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7619: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-05-29T15:22:32.336152Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-05-29T15:22:32.352379Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7309: Cannot subscribe to console configs 2025-05-29T15:22:32.352397Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:22:32.352474Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-05-29T15:22:32.352501Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-05-29T15:22:32.352544Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-05-29T15:22:32.353317Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-05-29T15:22:32.353348Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:445: Clear TempDirsState with owners number: 0 2025-05-29T15:22:32.353426Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1348: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-05-29T15:22:32.353456Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:32: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-05-29T15:22:32.353581Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:157: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:22:32.353605Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:84: [RootDataErasureManager] Stop 2025-05-29T15:22:32.353767Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:22:32.353774Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:22:32.353782Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-05-29T15:22:32.353786Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-29T15:22:32.353790Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:202: TTxServerlessStorageBilling.Complete 2025-05-29T15:22:32.353812Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6671: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-05-29T15:22:32.354136Z node 1 :HIVE INFO: tablet_helpers.cpp:1130: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:70:2104] sender: [1:149:2058] recipient: [1:16:2063] 2025-05-29T15:22:32.367626Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:372: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-05-29T15:22:32.367696Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:22:32.367745Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-05-29T15:22:32.367786Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:130: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-05-29T15:22:32.367793Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:22:32.367981Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:451: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-05-29T15:22:32.367999Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-05-29T15:22:32.368059Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:22:32.368065Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:313: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-05-29T15:22:32.368069Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:367: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-05-29T15:22:32.368072Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 2 -> 3 2025-05-29T15:22:32.368138Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:22:32.368142Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-05-29T15:22:32.368146Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 3 -> 128 2025-05-29T15:22:32.368181Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:22:32.368187Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:22:32.368191Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:22:32.368195Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1650: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-05-29T15:22:32.368665Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1719: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-05-29T15:22:32.368728Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:652: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-05-29T15:22:32.368780Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1751: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-05-29T15:22:32.368919Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:676: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-05-29T15:22:32.368934Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:680: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 76 RawX2: 4294969404 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-05-29T15:22:32.368939Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:22:32.368984Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 128 -> 240 2025-05-29T15:22:32.368990Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:22:32.369014Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-05-29T15:22:32.369033Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:402: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-05-29T15:22:32.369113Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:22:32.369118Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-29T15:22:32.369150Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:22:32 ... asureSelfResponseTime 2025-05-29T15:22:37.687038Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4884: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2025-05-29T15:22:37.702948Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4877: StateWork, received event# 271125000, Sender [0:0:0], Recipient [1:280:2238]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-05-29T15:22:37.702986Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4885: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-05-29T15:22:37.703010Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4877: StateWork, received event# 271124999, Sender [1:280:2238], Recipient [1:280:2238]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2025-05-29T15:22:37.703016Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4884: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2025-05-29T15:22:37.742974Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4877: StateWork, received event# 271125000, Sender [0:0:0], Recipient [1:184:2176]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-05-29T15:22:37.743010Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4885: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-05-29T15:22:37.743032Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4877: StateWork, received event# 271124999, Sender [1:184:2176], Recipient [1:184:2176]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2025-05-29T15:22:37.743036Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4884: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2025-05-29T15:22:37.755127Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4877: StateWork, received event# 271125000, Sender [0:0:0], Recipient [1:280:2238]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-05-29T15:22:37.755163Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4885: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-05-29T15:22:37.755185Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4877: StateWork, received event# 271124999, Sender [1:280:2238], Recipient [1:280:2238]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2025-05-29T15:22:37.755191Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4884: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2025-05-29T15:22:37.789045Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4877: StateWork, received event# 271125000, Sender [0:0:0], Recipient [1:184:2176]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-05-29T15:22:37.789071Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4885: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-05-29T15:22:37.789442Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4877: StateWork, received event# 271124999, Sender [1:184:2176], Recipient [1:184:2176]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2025-05-29T15:22:37.789454Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4884: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2025-05-29T15:22:37.802612Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4877: StateWork, received event# 269553162, Sender [1:1203:3016], Recipient [1:280:2238]: NKikimrTxDataShard.TEvPeriodicTableStats DatashardId: 72075186233409551 TableLocalId: 2 Generation: 2 Round: 2 TableStats { DataSize: 10141461 RowCount: 99 IndexSize: 4463 InMemSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 SearchHeight: 1 LastFullCompactionTs: 0 HasLoanedParts: false Channels { Channel: 1 DataSize: 10141461 IndexSize: 4463 } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 859 Memory: 94187 Storage: 10148063 } ShardState: 2 UserTablePartOwners: 72075186233409551 NodeId: 1 StartTime: 50000 TableOwnerId: 72075186233409546 IsDstSplit: true FollowerId: 0 2025-05-29T15:22:37.802639Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4914: StateWork, processing event TEvDataShard::TEvPeriodicTableStats 2025-05-29T15:22:37.802658Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:563: Got periodic table stats at tablet 72075186233409546 from shard 72075186233409551 followerId 0 pathId [OwnerId: 72075186233409546, LocalPathId: 2] state 'Ready' dataSize 10141461 rowCount 99 cpuUsage 0.0859 2025-05-29T15:22:37.802681Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__table_stats.cpp:570: Got periodic table stats at tablet 72075186233409546 from shard 72075186233409551 followerId 0 pathId [OwnerId: 72075186233409546, LocalPathId: 2] raw table stats: DataSize: 10141461 RowCount: 99 IndexSize: 4463 InMemSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 SearchHeight: 1 LastFullCompactionTs: 0 HasLoanedParts: false Channels { Channel: 1 DataSize: 10141461 IndexSize: 4463 } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 2025-05-29T15:22:37.802690Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__table_stats.cpp:610: Will delay TTxStoreTableStats on# 0.100000s, queue# 1 2025-05-29T15:22:37.824329Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4877: StateWork, received event# 271125000, Sender [0:0:0], Recipient [1:280:2238]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-05-29T15:22:37.824368Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4885: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-05-29T15:22:37.824394Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4877: StateWork, received event# 271124999, Sender [1:280:2238], Recipient [1:280:2238]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2025-05-29T15:22:37.824399Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4884: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2025-05-29T15:22:37.834673Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4877: StateWork, received event# 271125517, Sender [0:0:0], Recipient [1:184:2176]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToRunDataErasureBSC 2025-05-29T15:22:37.834712Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5077: StateWork, processing event TEvSchemeShard::TEvWakeupToRunDataErasureBSC 2025-05-29T15:22:37.834722Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:354: [RootDataErasureManager] SendRequestToBSC: Generation# 1 2025-05-29T15:22:37.834765Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4877: StateWork, received event# 2146435092, Sender [0:0:0], Recipient [1:280:2238]: NKikimr::NSchemeShard::TEvPrivate::TEvPersistTableStats 2025-05-29T15:22:37.834772Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5053: StateWork, processing event TEvPrivate::TEvPersistTableStats 2025-05-29T15:22:37.834777Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:590: Started TEvPersistStats at tablet 72075186233409546, queue size# 1 2025-05-29T15:22:37.834805Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__table_stats.cpp:601: Will execute TTxStoreStats, queue# 1 2025-05-29T15:22:37.834812Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__table_stats.cpp:610: Will delay TTxStoreTableStats on# 0.000000s, queue# 1 2025-05-29T15:22:37.834862Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:267: PersistSingleStats for pathId 2 shard idx 72075186233409546:6 data size 10141461 row count 99 2025-05-29T15:22:37.834903Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:292: TTxStoreTableStats.PersistSingleStats: main stats from datashardId(TabletID)=72075186233409551 maps to shardIdx: 72075186233409546:6 followerId=0, pathId: [OwnerId: 72075186233409546, LocalPathId: 2], pathId map=Simple, is column=0, is olap=0, RowCount 99, DataSize 10141461 2025-05-29T15:22:37.834910Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__table_stats.cpp:62: BuildStatsForCollector: datashardId 72075186233409551, followerId 0 2025-05-29T15:22:37.834937Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__background_compaction.cpp:235: [BackgroundCompaction] [Update] Skipped shard# 72075186233409546:6 with partCount# 1, rowCount# 99, searchHeight# 1, lastFullCompaction# 1970-01-01T00:00:00.000000Z at schemeshard 72075186233409546 2025-05-29T15:22:37.834969Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:485: Do not want to split tablet 72075186233409551 2025-05-29T15:22:37.835001Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:207: TSideEffects ApplyOnComplete at tablet# 72075186233409546 2025-05-29T15:22:37.835129Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4877: StateWork, received event# 268637738, Sender [1:186:2178], Recipient [1:184:2176]: NKikimrBlobStorage.TEvControllerShredResponse CurrentGeneration: 1 Completed: true Progress10k: 10000 2025-05-29T15:22:37.835135Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5076: StateWork, processing event TEvBlobStorage::TEvControllerShredResponse 2025-05-29T15:22:37.835139Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:7798: Handle TEvControllerShredResponse, at schemeshard: 72057594046678944 2025-05-29T15:22:37.835153Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__root_data_erasure_manager.cpp:632: TTxCompleteDataErasureBSC Execute at schemeshard: 72057594046678944 2025-05-29T15:22:37.835158Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:644: TTxCompleteDataErasureBSC: Data shred in BSC is completed 2025-05-29T15:22:37.835169Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:170: [RootDataErasureManager] ScheduleDataErasureWakeup: Interval# 19.899500s, Timestamp# 1970-01-01T00:01:20.100500Z 2025-05-29T15:22:37.835174Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:378: [RootDataErasureManager] Complete: Generation# 1, duration# 30 s 2025-05-29T15:22:37.835373Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__root_data_erasure_manager.cpp:656: TTxCompleteDataErasureBSC Complete at schemeshard: 72057594046678944, NeedScheduleRequestToBSC# false 2025-05-29T15:22:37.836292Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4877: StateWork, received event# 269877761, Sender [1:1374:3166], Recipient [1:184:2176]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-05-29T15:22:37.836309Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4974: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-05-29T15:22:37.836315Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5753: Pipe server connected, at tablet: 72057594046678944 2025-05-29T15:22:37.836339Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4877: StateWork, received event# 271125519, Sender [1:170:2169], Recipient [1:184:2176]: NKikimrScheme.TEvDataErasureInfoRequest 2025-05-29T15:22:37.836345Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5074: StateWork, processing event TEvSchemeShard::TEvDataErasureInfoRequest 2025-05-29T15:22:37.836350Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:7753: Handle TEvDataErasureInfoRequest, at schemeshard: 72057594046678944 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_move/unittest >> TSchemeShardMoveTest::AsyncIndexWithSyncInFly [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:116:2058] recipient: [1:111:2141] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:116:2058] recipient: [1:111:2141] Leader for TabletID 72057594046678944 is [1:128:2152] sender: [1:132:2058] recipient: [1:111:2141] 2025-05-29T15:22:37.275415Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7488: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-05-29T15:22:37.275438Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7516: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:22:37.275444Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7402: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-05-29T15:22:37.275448Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7418: OperationsProcessing config: using default configuration 2025-05-29T15:22:37.275461Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-05-29T15:22:37.275465Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-05-29T15:22:37.275473Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7548: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:22:37.275485Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:39: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-05-29T15:22:37.275590Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7619: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-05-29T15:22:37.275656Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-05-29T15:22:37.295787Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7309: Cannot subscribe to console configs 2025-05-29T15:22:37.295815Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:22:37.301759Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-05-29T15:22:37.301907Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-05-29T15:22:37.301944Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-05-29T15:22:37.310208Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-05-29T15:22:37.310508Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:445: Clear TempDirsState with owners number: 0 2025-05-29T15:22:37.310664Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1348: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-05-29T15:22:37.310803Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:32: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-05-29T15:22:37.312399Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:157: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:22:37.312616Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:84: [RootDataErasureManager] Stop 2025-05-29T15:22:37.313069Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:22:37.313092Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:22:37.313130Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-05-29T15:22:37.313151Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-29T15:22:37.313162Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:202: TTxServerlessStorageBilling.Complete 2025-05-29T15:22:37.313223Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6671: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-05-29T15:22:37.317730Z node 1 :HIVE INFO: tablet_helpers.cpp:1130: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:128:2152] sender: [1:242:2058] recipient: [1:15:2062] 2025-05-29T15:22:37.346832Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:372: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-05-29T15:22:37.346943Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:22:37.347020Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-05-29T15:22:37.347091Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:130: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-05-29T15:22:37.347110Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:22:37.348098Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:451: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-05-29T15:22:37.348140Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-05-29T15:22:37.348223Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:22:37.348238Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:313: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-05-29T15:22:37.348246Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:367: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-05-29T15:22:37.348254Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 2 -> 3 2025-05-29T15:22:37.349155Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:22:37.349177Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-05-29T15:22:37.349185Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 3 -> 128 2025-05-29T15:22:37.349943Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:22:37.349960Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:22:37.349974Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:22:37.349985Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1650: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-05-29T15:22:37.350872Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1719: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-05-29T15:22:37.351447Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:652: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-05-29T15:22:37.351505Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1751: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-05-29T15:22:37.351715Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:676: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-05-29T15:22:37.351746Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:680: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 124 RawX2: 4294969445 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-05-29T15:22:37.351754Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:22:37.351833Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 128 -> 240 2025-05-29T15:22:37.351842Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:22:37.351879Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-05-29T15:22:37.351895Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:402: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-05-29T15:22:37.352493Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:22:37.352508Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-29T15:22:37.352568Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29 ... 324 RawX2: 8589936899 } Origin: 72075186233409546 State: 2 TxId: 103 Step: 0 Generation: 2 2025-05-29T15:22:37.826267Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:655: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 103:2, shardIdx: 72057594046678944:2, datashard: 72075186233409546, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2025-05-29T15:22:37.826271Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:674: all shard schema changes has been received, operationId: 103:2, at schemeshard: 72057594046678944 2025-05-29T15:22:37.826275Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:686: send schema changes ack message, operation: 103:2, datashard: 72075186233409546, at schemeshard: 72057594046678944 2025-05-29T15:22:37.826281Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 103:2 129 -> 240 2025-05-29T15:22:37.826470Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5512: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 326 RawX2: 8589936900 } Origin: 72075186233409547 State: 2 TxId: 103 Step: 0 Generation: 2 2025-05-29T15:22:37.826477Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1764: TOperation FindRelatedPartByTabletId, TxId: 103, tablet: 72075186233409547, partId: 0 2025-05-29T15:22:37.826491Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:619: TTxOperationReply execute, operationId: 103:0, at schemeshard: 72057594046678944, message: Source { RawX1: 326 RawX2: 8589936900 } Origin: 72075186233409547 State: 2 TxId: 103 Step: 0 Generation: 2 2025-05-29T15:22:37.826495Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:1005: NTableState::TProposedWaitParts operationId# 103:0 HandleReply TEvSchemaChanged at tablet: 72057594046678944 2025-05-29T15:22:37.826502Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:1009: NTableState::TProposedWaitParts operationId# 103:0 HandleReply TEvSchemaChanged at tablet: 72057594046678944 message: Source { RawX1: 326 RawX2: 8589936900 } Origin: 72075186233409547 State: 2 TxId: 103 Step: 0 Generation: 2 2025-05-29T15:22:37.826510Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:655: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 103:0, shardIdx: 72057594046678944:1, datashard: 72075186233409547, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2025-05-29T15:22:37.826513Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:674: all shard schema changes has been received, operationId: 103:0, at schemeshard: 72057594046678944 2025-05-29T15:22:37.826517Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:686: send schema changes ack message, operation: 103:0, datashard: 72075186233409547, at schemeshard: 72057594046678944 2025-05-29T15:22:37.826521Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 103:0 129 -> 240 2025-05-29T15:22:37.827116Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:647: TTxOperationReply complete, operationId: 103:2, at schemeshard: 72057594046678944 2025-05-29T15:22:37.827917Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:647: TTxOperationReply complete, operationId: 103:0, at schemeshard: 72057594046678944 2025-05-29T15:22:37.828172Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:647: TTxOperationReply complete, operationId: 103:2, at schemeshard: 72057594046678944 2025-05-29T15:22:37.828266Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 103:2, at schemeshard: 72057594046678944 2025-05-29T15:22:37.828274Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_move_table.cpp:482: TMoveTable TDone, operationId: 103:2 ProgressState, at schemeshard: 72057594046678944 2025-05-29T15:22:37.828281Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_move_table.cpp:492: TMoveTable TDone, operationId: 103:2 ProgressState, SourcePathId: [OwnerId: 72057594046678944, LocalPathId: 4], TargetPathId: [OwnerId: 72057594046678944, LocalPathId: 7], at schemeshard: 72057594046678944 2025-05-29T15:22:37.828294Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:906: Part operation is done id#103:2 progress is 2/3 2025-05-29T15:22:37.828299Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 103 ready parts: 2/3 2025-05-29T15:22:37.828303Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:906: Part operation is done id#103:2 progress is 2/3 2025-05-29T15:22:37.828306Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 103 ready parts: 2/3 2025-05-29T15:22:37.828310Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1606: TOperation IsReadyToNotify, TxId: 103, ready parts: 2/3, is published: true 2025-05-29T15:22:37.828350Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:647: TTxOperationReply complete, operationId: 103:0, at schemeshard: 72057594046678944 2025-05-29T15:22:37.828412Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 103:0, at schemeshard: 72057594046678944 2025-05-29T15:22:37.828417Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_move_table.cpp:482: TMoveTable TDone, operationId: 103:0 ProgressState, at schemeshard: 72057594046678944 2025-05-29T15:22:37.828421Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_move_table.cpp:492: TMoveTable TDone, operationId: 103:0 ProgressState, SourcePathId: [OwnerId: 72057594046678944, LocalPathId: 2], TargetPathId: [OwnerId: 72057594046678944, LocalPathId: 5], at schemeshard: 72057594046678944 2025-05-29T15:22:37.828429Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:906: Part operation is done id#103:0 progress is 3/3 2025-05-29T15:22:37.828432Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 103 ready parts: 3/3 2025-05-29T15:22:37.828437Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:906: Part operation is done id#103:0 progress is 3/3 2025-05-29T15:22:37.828439Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 103 ready parts: 3/3 2025-05-29T15:22:37.828443Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1606: TOperation IsReadyToNotify, TxId: 103, ready parts: 3/3, is published: true 2025-05-29T15:22:37.828447Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 103 ready parts: 3/3 2025-05-29T15:22:37.828453Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:973: Operation and all the parts is done, operation id: 103:0 2025-05-29T15:22:37.828462Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5174: RemoveTx for txid 103:0 2025-05-29T15:22:37.828487Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 4 2025-05-29T15:22:37.828491Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove txstate source path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2025-05-29T15:22:37.828495Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:973: Operation and all the parts is done, operation id: 103:1 2025-05-29T15:22:37.828498Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5174: RemoveTx for txid 103:1 2025-05-29T15:22:37.828503Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 6] was 3 2025-05-29T15:22:37.828506Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove txstate source path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 2 2025-05-29T15:22:37.828509Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:973: Operation and all the parts is done, operation id: 103:2 2025-05-29T15:22:37.828512Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5174: RemoveTx for txid 103:2 2025-05-29T15:22:37.828519Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 7] was 3 2025-05-29T15:22:37.828522Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove txstate source path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 1 2025-05-29T15:22:37.828591Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:123: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-05-29T15:22:37.828597Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:137: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 4], at schemeshard: 72057594046678944 2025-05-29T15:22:37.828608Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 1 2025-05-29T15:22:37.828613Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:137: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 3], at schemeshard: 72057594046678944 2025-05-29T15:22:37.828618Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 2025-05-29T15:22:37.828622Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:137: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2025-05-29T15:22:37.828628Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-05-29T15:22:37.829794Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:160: TTxCleanDroppedPaths Complete, done PersistRemovePath for 3 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 2025-05-29T15:22:37.829999Z node 2 :TX_PROXY DEBUG: proxy_impl.cpp:392: actor# [2:270:2260] Handle TEvGetProxyServicesRequest TestWaitNotification wait txId: 103 2025-05-29T15:22:37.876141Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:210: tests -- TTxNotificationSubscriber for txId 103: send EvNotifyTxCompletion 2025-05-29T15:22:37.876168Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:256: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 103 2025-05-29T15:22:37.876288Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 103, at schemeshard: 72057594046678944 2025-05-29T15:22:37.876326Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:227: tests -- TTxNotificationSubscriber for txId 103: got EvNotifyTxCompletionResult 2025-05-29T15:22:37.876333Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:236: tests -- TTxNotificationSubscriber for txId 103: satisfy waiter [2:675:2558] TestWaitNotification: OK eventTxId 103 >> TSchemeShardMoveTest::OneTable [GOOD] >> TPQTest::TestWriteTimeLag [GOOD] >> TPQTestInternal::RestoreKeys [GOOD] >> TopicAutoscaling::ReadingAfterSplitTest_PreferedPartition_BeforeAutoscaleAwareSDK [FAIL] >> TopicAutoscaling::ReadingAfterSplitTest_PreferedPartition_PQv1 >> TPQTest::TestAlreadyWrittenWithoutDeduplication [GOOD] >> TPQTest::TestComactifiedWithRetention >> TMiniKQLEngineFlatTest::TestSelectRowWithoutColumnsNotExists [GOOD] >> TMiniKQLEngineFlatTest::TestSelectRowWithoutColumnsExists [GOOD] >> TMiniKQLEngineFlatTest::TestSelectRowPayload [GOOD] >> TMiniKQLEngineFlatTest::TestSelectRowPayloadNullKey [GOOD] >> TMiniKQLEngineFlatTest::TestSelectRangeToInclusive [GOOD] >> TMiniKQLEngineFlatTest::TestSelectRowManyShards [GOOD] >> TMiniKQLEngineFlatTest::TestSelectRowNoShards [GOOD] >> TMiniKQLEngineFlatTest::TestSelectRangeWithPartitions ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_move/unittest >> TSchemeShardMoveTest::MoveIndexDoesNonExisted [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:116:2058] recipient: [1:111:2141] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:116:2058] recipient: [1:111:2141] Leader for TabletID 72057594046678944 is [1:128:2152] sender: [1:132:2058] recipient: [1:111:2141] 2025-05-29T15:22:37.367450Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7488: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-05-29T15:22:37.367475Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7516: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:22:37.367481Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7402: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-05-29T15:22:37.367487Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7418: OperationsProcessing config: using default configuration 2025-05-29T15:22:37.367503Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-05-29T15:22:37.367508Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-05-29T15:22:37.367534Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7548: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:22:37.367552Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:39: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-05-29T15:22:37.367674Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7619: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-05-29T15:22:37.367753Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-05-29T15:22:37.384029Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7309: Cannot subscribe to console configs 2025-05-29T15:22:37.384049Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:22:37.386998Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-05-29T15:22:37.389492Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-05-29T15:22:37.389588Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-05-29T15:22:37.391699Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-05-29T15:22:37.391872Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:445: Clear TempDirsState with owners number: 0 2025-05-29T15:22:37.391993Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1348: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-05-29T15:22:37.392043Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:32: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-05-29T15:22:37.392437Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:157: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:22:37.392478Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:84: [RootDataErasureManager] Stop 2025-05-29T15:22:37.392704Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:22:37.392715Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:22:37.392732Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-05-29T15:22:37.392741Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-29T15:22:37.392745Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:202: TTxServerlessStorageBilling.Complete 2025-05-29T15:22:37.392773Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6671: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-05-29T15:22:37.393969Z node 1 :HIVE INFO: tablet_helpers.cpp:1130: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:128:2152] sender: [1:242:2058] recipient: [1:15:2062] 2025-05-29T15:22:37.416693Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:372: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-05-29T15:22:37.416777Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:22:37.416842Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-05-29T15:22:37.416892Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:130: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-05-29T15:22:37.416906Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:22:37.424715Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:451: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-05-29T15:22:37.424763Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-05-29T15:22:37.424863Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:22:37.424876Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:313: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-05-29T15:22:37.424882Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:367: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-05-29T15:22:37.424889Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 2 -> 3 2025-05-29T15:22:37.427126Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:22:37.427150Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-05-29T15:22:37.427159Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 3 -> 128 2025-05-29T15:22:37.427737Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:22:37.427763Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:22:37.427774Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:22:37.427784Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1650: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-05-29T15:22:37.428522Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1719: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-05-29T15:22:37.429088Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:652: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-05-29T15:22:37.429141Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1751: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-05-29T15:22:37.429347Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:676: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-05-29T15:22:37.429384Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:680: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 124 RawX2: 4294969445 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-05-29T15:22:37.429392Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:22:37.429474Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 128 -> 240 2025-05-29T15:22:37.429484Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:22:37.429523Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-05-29T15:22:37.429538Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:402: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-05-29T15:22:37.430049Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:22:37.430059Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-29T15:22:37.430112Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29 ... e ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-05-29T15:22:38.184496Z node 2 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot" took 27us result status StatusSuccess 2025-05-29T15:22:38.184559Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot" PathDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 6 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 6 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: "Table" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 5 PathsLimit: 10000 ShardsInside: 3 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/MyRoot" } } } PathId: 1 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-05-29T15:22:38.184612Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Table/Sync" Options { ReturnPartitioningInfo: true ReturnPartitionConfig: true BackupInfo: false ReturnBoundaries: true ShowPrivateTable: true }, at schemeshard: 72057594046678944 2025-05-29T15:22:38.184643Z node 2 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/Table/Sync" took 31us result status StatusSuccess 2025-05-29T15:22:38.184783Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Table/Sync" PathDescription { Self { Name: "Sync" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeTableIndex CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableIndexVersion: 1 } ChildrenExist: true } Children { Name: "indexImplTable" PathId: 4 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 3 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" PathSubType: EPathSubTypeSyncIndexImplTable Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 5 PathsLimit: 10000 ShardsInside: 3 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } TableIndex { Name: "Sync" LocalPathId: 3 Type: EIndexTypeGlobal State: EIndexStateReady KeyColumnNames: "value0" SchemaVersion: 1 PathOwnerId: 72057594046678944 DataSize: 0 IndexImplTableDescriptions { Columns { Name: "value0" Type: "Utf8" TypeId: 4608 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "value0" KeyColumnNames: "key" KeyColumnIds: 1 KeyColumnIds: 2 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { SizeToSplit: 2147483648 MinPartitionsCount: 1 } } } } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-05-29T15:22:38.184867Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Table/Async" Options { ReturnPartitioningInfo: true ReturnPartitionConfig: true BackupInfo: false ReturnBoundaries: true ShowPrivateTable: true }, at schemeshard: 72057594046678944 2025-05-29T15:22:38.184890Z node 2 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/Table/Async" took 24us result status StatusSuccess 2025-05-29T15:22:38.184988Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Table/Async" PathDescription { Self { Name: "Async" PathId: 5 SchemeshardId: 72057594046678944 PathType: EPathTypeTableIndex CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableIndexVersion: 1 } ChildrenExist: true } Children { Name: "indexImplTable" PathId: 6 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 5 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" PathSubType: EPathSubTypeAsyncIndexImplTable Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 5 PathsLimit: 10000 ShardsInside: 3 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } TableIndex { Name: "Async" LocalPathId: 5 Type: EIndexTypeGlobalAsync State: EIndexStateReady KeyColumnNames: "value1" SchemaVersion: 1 PathOwnerId: 72057594046678944 DataSize: 0 IndexImplTableDescriptions { Columns { Name: "value1" Type: "Utf8" TypeId: 4608 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "value1" KeyColumnNames: "key" KeyColumnIds: 1 KeyColumnIds: 2 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { SizeToSplit: 2147483648 MinPartitionsCount: 1 } } } } } PathId: 5 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_move/unittest >> TSchemeShardMoveTest::MoveIntoBuildingIndex [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:116:2058] recipient: [1:111:2141] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:116:2058] recipient: [1:111:2141] Leader for TabletID 72057594046678944 is [1:128:2152] sender: [1:132:2058] recipient: [1:111:2141] 2025-05-29T15:22:37.353342Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7488: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-05-29T15:22:37.353370Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7516: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:22:37.353377Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7402: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-05-29T15:22:37.353383Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7418: OperationsProcessing config: using default configuration 2025-05-29T15:22:37.353400Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-05-29T15:22:37.353405Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-05-29T15:22:37.353414Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7548: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:22:37.353430Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:39: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-05-29T15:22:37.353587Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7619: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-05-29T15:22:37.353672Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-05-29T15:22:37.367694Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7309: Cannot subscribe to console configs 2025-05-29T15:22:37.367714Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:22:37.369827Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-05-29T15:22:37.369938Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-05-29T15:22:37.369975Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-05-29T15:22:37.371578Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-05-29T15:22:37.371790Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:445: Clear TempDirsState with owners number: 0 2025-05-29T15:22:37.371925Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1348: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-05-29T15:22:37.371998Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:32: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-05-29T15:22:37.372548Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:157: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:22:37.372611Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:84: [RootDataErasureManager] Stop 2025-05-29T15:22:37.372883Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:22:37.372891Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:22:37.372910Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-05-29T15:22:37.372922Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-29T15:22:37.372929Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:202: TTxServerlessStorageBilling.Complete 2025-05-29T15:22:37.372973Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6671: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-05-29T15:22:37.374966Z node 1 :HIVE INFO: tablet_helpers.cpp:1130: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:128:2152] sender: [1:242:2058] recipient: [1:15:2062] 2025-05-29T15:22:37.398179Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:372: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-05-29T15:22:37.398267Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:22:37.398333Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-05-29T15:22:37.398386Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:130: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-05-29T15:22:37.398399Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:22:37.399195Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:451: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-05-29T15:22:37.399228Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-05-29T15:22:37.399294Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:22:37.399306Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:313: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-05-29T15:22:37.399313Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:367: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-05-29T15:22:37.399320Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 2 -> 3 2025-05-29T15:22:37.399801Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:22:37.399816Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-05-29T15:22:37.399822Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 3 -> 128 2025-05-29T15:22:37.400221Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:22:37.400233Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:22:37.400242Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:22:37.400250Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1650: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-05-29T15:22:37.400999Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1719: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-05-29T15:22:37.401690Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:652: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-05-29T15:22:37.401739Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1751: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-05-29T15:22:37.401937Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:676: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-05-29T15:22:37.401967Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:680: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 124 RawX2: 4294969445 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-05-29T15:22:37.401977Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:22:37.402056Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 128 -> 240 2025-05-29T15:22:37.402064Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:22:37.402100Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-05-29T15:22:37.402114Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:402: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-05-29T15:22:37.402570Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:22:37.402580Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-29T15:22:37.402627Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29 ... 710760, UnlockTxStatus: StatusAccepted, UnlockTxDone: 0, ToUploadShards: 0, DoneShards: 0, Processed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }, Billed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }} 2025-05-29T15:22:38.159078Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__notify.cpp:30: NotifyTxCompletion operation in-flight, txId: 281474976710760, at schemeshard: 72057594046678944 2025-05-29T15:22:38.159081Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1606: TOperation IsReadyToNotify, TxId: 281474976710760, ready parts: 0/1, is published: true 2025-05-29T15:22:38.159085Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__notify.cpp:131: NotifyTxCompletion transaction is registered, txId: 281474976710760, at schemeshard: 72057594046678944 2025-05-29T15:22:38.159098Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:676: TTxOperationPlanStep Execute, stepId: 5000006, transactions count in step: 1, at schemeshard: 72057594046678944 2025-05-29T15:22:38.159113Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:680: TTxOperationPlanStep Execute, message: Transactions { TxId: 281474976710760 Coordinator: 72057594046316545 AckTo { RawX1: 127 RawX2: 8589936743 } } Step: 5000006 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-05-29T15:22:38.159118Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_drop_lock.cpp:44: [72057594046678944] TDropLock TPropose opId# 281474976710760:0 HandleReply TEvOperationPlan: step# 5000006 2025-05-29T15:22:38.159121Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 281474976710760:0 128 -> 240 2025-05-29T15:22:38.159368Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 281474976710760:0, at schemeshard: 72057594046678944 2025-05-29T15:22:38.159375Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:482: [72057594046678944] TDone opId# 281474976710760:0 ProgressState 2025-05-29T15:22:38.159384Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:906: Part operation is done id#281474976710760:0 progress is 1/1 2025-05-29T15:22:38.159387Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 281474976710760 ready parts: 1/1 2025-05-29T15:22:38.159390Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:906: Part operation is done id#281474976710760:0 progress is 1/1 2025-05-29T15:22:38.159392Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 281474976710760 ready parts: 1/1 2025-05-29T15:22:38.159395Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1606: TOperation IsReadyToNotify, TxId: 281474976710760, ready parts: 1/1, is published: true 2025-05-29T15:22:38.159402Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1629: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [2:136:2157] message: TxId: 281474976710760 2025-05-29T15:22:38.159406Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 281474976710760 ready parts: 1/1 2025-05-29T15:22:38.159412Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:973: Operation and all the parts is done, operation id: 281474976710760:0 2025-05-29T15:22:38.159414Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5174: RemoveTx for txid 281474976710760:0 2025-05-29T15:22:38.159423Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 5 FAKE_COORDINATOR: Erasing txId 281474976710760 2025-05-29T15:22:38.159637Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6746: Handle: TEvNotifyTxCompletionResult: txId# 281474976710760 2025-05-29T15:22:38.159645Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6748: Message: TxId: 281474976710760 2025-05-29T15:22:38.159652Z node 2 :BUILD_INDEX INFO: schemeshard_build_index__progress.cpp:2338: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxReply : TEvNotifyTxCompletionResult, txId# 281474976710760, buildInfoId: 102 2025-05-29T15:22:38.159660Z node 2 :BUILD_INDEX DEBUG: schemeshard_build_index__progress.cpp:2341: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxReply : TEvNotifyTxCompletionResult, txId# 281474976710760, buildInfo: TBuildInfo{ IndexBuildId: 102, Uid: , DomainPathId: [OwnerId: 72057594046678944, LocalPathId: 1], TablePathId: [OwnerId: 72057594046678944, LocalPathId: 2], IndexType: EIndexTypeGlobal, IndexName: Sync, IndexColumn: value0, State: Unlocking, IsCancellationRequested: 0, Issue: , SubscribersCount: 1, CreateSender: [2:451:2411], AlterMainTableTxId: 0, AlterMainTableTxStatus: StatusSuccess, AlterMainTableTxDone: 0, LockTxId: 281474976710757, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976710758, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 5000004, ApplyTxId: 281474976710759, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, UnlockTxId: 281474976710760, UnlockTxStatus: StatusAccepted, UnlockTxDone: 0, ToUploadShards: 0, DoneShards: 0, Processed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }, Billed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }} 2025-05-29T15:22:38.159847Z node 2 :BUILD_INDEX NOTICE: schemeshard_build_index__progress.cpp:1121: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Execute: 102 Unlocking 2025-05-29T15:22:38.159856Z node 2 :BUILD_INDEX DEBUG: schemeshard_build_index__progress.cpp:1122: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Execute: 102 Unlocking TBuildInfo{ IndexBuildId: 102, Uid: , DomainPathId: [OwnerId: 72057594046678944, LocalPathId: 1], TablePathId: [OwnerId: 72057594046678944, LocalPathId: 2], IndexType: EIndexTypeGlobal, IndexName: Sync, IndexColumn: value0, State: Unlocking, IsCancellationRequested: 0, Issue: , SubscribersCount: 1, CreateSender: [2:451:2411], AlterMainTableTxId: 0, AlterMainTableTxStatus: StatusSuccess, AlterMainTableTxDone: 0, LockTxId: 281474976710757, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976710758, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 5000004, ApplyTxId: 281474976710759, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, UnlockTxId: 281474976710760, UnlockTxStatus: StatusAccepted, UnlockTxDone: 1, ToUploadShards: 0, DoneShards: 0, Processed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }, Billed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }} 2025-05-29T15:22:38.159861Z node 2 :BUILD_INDEX INFO: schemeshard_build_index_tx_base.cpp:25: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: Change state from Unlocking to Done 2025-05-29T15:22:38.160035Z node 2 :BUILD_INDEX NOTICE: schemeshard_build_index__progress.cpp:1121: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Execute: 102 Done 2025-05-29T15:22:38.160044Z node 2 :BUILD_INDEX DEBUG: schemeshard_build_index__progress.cpp:1122: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Execute: 102 Done TBuildInfo{ IndexBuildId: 102, Uid: , DomainPathId: [OwnerId: 72057594046678944, LocalPathId: 1], TablePathId: [OwnerId: 72057594046678944, LocalPathId: 2], IndexType: EIndexTypeGlobal, IndexName: Sync, IndexColumn: value0, State: Done, IsCancellationRequested: 0, Issue: , SubscribersCount: 1, CreateSender: [2:451:2411], AlterMainTableTxId: 0, AlterMainTableTxStatus: StatusSuccess, AlterMainTableTxDone: 0, LockTxId: 281474976710757, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976710758, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 5000004, ApplyTxId: 281474976710759, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, UnlockTxId: 281474976710760, UnlockTxStatus: StatusAccepted, UnlockTxDone: 1, ToUploadShards: 0, DoneShards: 0, Processed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }, Billed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }} 2025-05-29T15:22:38.160049Z node 2 :BUILD_INDEX TRACE: schemeshard_build_index_tx_base.cpp:339: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TIndexBuildInfo SendNotifications: : id# 102, subscribers count# 1 2025-05-29T15:22:38.160064Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:227: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2025-05-29T15:22:38.160068Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:236: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [2:631:2579] TestWaitNotification: OK eventTxId 102 2025-05-29T15:22:38.160158Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Table" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-05-29T15:22:38.160198Z node 2 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/Table" took 47us result status StatusSuccess 2025-05-29T15:22:38.160294Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Table" PathDescription { Self { Name: "Table" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 6 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 6 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 TableSchemaVersion: 3 TablePartitionVersion: 1 } ChildrenExist: true } Table { Name: "Table" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value0" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } Columns { Name: "value1" Type: "Utf8" TypeId: 4608 Id: 3 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 TableIndexes { Name: "SomeIndex" LocalPathId: 3 Type: EIndexTypeGlobal State: EIndexStateReady KeyColumnNames: "value1" SchemaVersion: 1 PathOwnerId: 72057594046678944 DataSize: 0 IndexImplTableDescriptions { } } TableIndexes { Name: "Sync" LocalPathId: 5 Type: EIndexTypeGlobal State: EIndexStateReady KeyColumnNames: "value0" SchemaVersion: 2 PathOwnerId: 72057594046678944 DataSize: 0 IndexImplTableDescriptions { } } TableSchemaVersion: 3 IsBackup: false IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 5 PathsLimit: 10000 ShardsInside: 3 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_move/unittest >> TSchemeShardMoveTest::MoveTableWithSequence [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:116:2058] recipient: [1:111:2141] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:116:2058] recipient: [1:111:2141] Leader for TabletID 72057594046678944 is [1:128:2152] sender: [1:132:2058] recipient: [1:111:2141] 2025-05-29T15:22:37.282524Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7488: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-05-29T15:22:37.282555Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7516: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:22:37.282563Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7402: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-05-29T15:22:37.282570Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7418: OperationsProcessing config: using default configuration 2025-05-29T15:22:37.282589Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-05-29T15:22:37.282595Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-05-29T15:22:37.282605Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7548: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:22:37.282623Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:39: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-05-29T15:22:37.282777Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7619: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-05-29T15:22:37.282879Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-05-29T15:22:37.299411Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7309: Cannot subscribe to console configs 2025-05-29T15:22:37.299436Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:22:37.302456Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-05-29T15:22:37.302588Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-05-29T15:22:37.302636Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-05-29T15:22:37.304177Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-05-29T15:22:37.304337Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:445: Clear TempDirsState with owners number: 0 2025-05-29T15:22:37.304478Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1348: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-05-29T15:22:37.304561Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:32: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-05-29T15:22:37.305085Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:157: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:22:37.305141Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:84: [RootDataErasureManager] Stop 2025-05-29T15:22:37.305448Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:22:37.305459Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:22:37.305483Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-05-29T15:22:37.305495Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-29T15:22:37.305502Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:202: TTxServerlessStorageBilling.Complete 2025-05-29T15:22:37.305561Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6671: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-05-29T15:22:37.307051Z node 1 :HIVE INFO: tablet_helpers.cpp:1130: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:128:2152] sender: [1:242:2058] recipient: [1:15:2062] 2025-05-29T15:22:37.331040Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:372: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-05-29T15:22:37.331121Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:22:37.331187Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-05-29T15:22:37.331246Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:130: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-05-29T15:22:37.331258Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:22:37.331896Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:451: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-05-29T15:22:37.331924Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-05-29T15:22:37.331985Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:22:37.331995Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:313: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-05-29T15:22:37.332002Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:367: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-05-29T15:22:37.332010Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 2 -> 3 2025-05-29T15:22:37.332418Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:22:37.332430Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-05-29T15:22:37.332436Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 3 -> 128 2025-05-29T15:22:37.332795Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:22:37.332805Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:22:37.332815Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:22:37.332824Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1650: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-05-29T15:22:37.333638Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1719: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-05-29T15:22:37.335436Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:652: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-05-29T15:22:37.335487Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1751: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-05-29T15:22:37.335680Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:676: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-05-29T15:22:37.335712Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:680: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 124 RawX2: 4294969445 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-05-29T15:22:37.335722Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:22:37.335818Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 128 -> 240 2025-05-29T15:22:37.335827Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:22:37.335864Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-05-29T15:22:37.335878Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:402: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-05-29T15:22:37.336348Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:22:37.336359Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-29T15:22:37.336403Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29 ... e id#102:1 progress is 2/2 2025-05-29T15:22:38.197395Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 102 ready parts: 2/2 2025-05-29T15:22:38.197402Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:906: Part operation is done id#102:1 progress is 2/2 2025-05-29T15:22:38.197406Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 102 ready parts: 2/2 2025-05-29T15:22:38.197412Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1606: TOperation IsReadyToNotify, TxId: 102, ready parts: 2/2, is published: true 2025-05-29T15:22:38.197441Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1629: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [2:375:2343] message: TxId: 102 2025-05-29T15:22:38.197451Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 102 ready parts: 2/2 2025-05-29T15:22:38.197460Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:973: Operation and all the parts is done, operation id: 102:0 2025-05-29T15:22:38.197465Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5174: RemoveTx for txid 102:0 2025-05-29T15:22:38.197514Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 4 2025-05-29T15:22:38.197520Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove txstate source path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2025-05-29T15:22:38.197526Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:973: Operation and all the parts is done, operation id: 102:1 2025-05-29T15:22:38.197529Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5174: RemoveTx for txid 102:1 2025-05-29T15:22:38.197556Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 2 2025-05-29T15:22:38.197562Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove txstate source path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 1 2025-05-29T15:22:38.197682Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:123: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-05-29T15:22:38.197691Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:137: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 3], at schemeshard: 72057594046678944 2025-05-29T15:22:38.197705Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 2025-05-29T15:22:38.197715Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:137: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2025-05-29T15:22:38.197721Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2025-05-29T15:22:38.198644Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:227: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2025-05-29T15:22:38.198663Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:236: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [2:473:2428] 2025-05-29T15:22:38.198771Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:160: TTxCleanDroppedPaths Complete, done PersistRemovePath for 2 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestWaitNotification: OK eventTxId 102 2025-05-29T15:22:38.199643Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Table/myseq" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-05-29T15:22:38.199699Z node 2 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/Table/myseq" took 68us result status StatusPathDoesNotExist 2025-05-29T15:22:38.199742Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/Table/myseq\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1]), source_location: ydb/core/tx/schemeshard/schemeshard_path_describer.cpp:1157" Path: "/MyRoot/Table/myseq" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: true } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 2025-05-29T15:22:38.199805Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Table" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-05-29T15:22:38.199825Z node 2 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/Table" took 21us result status StatusPathDoesNotExist 2025-05-29T15:22:38.199846Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/Table\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1]), source_location: ydb/core/tx/schemeshard/schemeshard_path_describer.cpp:1157" Path: "/MyRoot/Table" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: true } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 2025-05-29T15:22:38.199893Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/TableMove" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-05-29T15:22:38.199933Z node 2 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/TableMove" took 42us result status StatusSuccess 2025-05-29T15:22:38.200056Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/TableMove" PathDescription { Self { Name: "TableMove" PathId: 4 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 102 CreateStep: 5000003 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 2 TablePartitionVersion: 2 } ChildrenExist: true } Table { Name: "TableMove" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 DefaultFromSequence: "myseq" NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Uint64" TypeId: 4 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 TableSchemaVersion: 2 IsBackup: false Sequences { Name: "myseq" PathId { OwnerId: 72057594046678944 LocalId: 5 } Version: 2 SequenceShard: 72075186233409546 MinValue: 1 MaxValue: 9223372036854775807 StartValue: 1 Cache: 1 Increment: 1 Cycle: false DataType: "Int64" } IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 2 PathsLimit: 10000 ShardsInside: 2 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 4 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-05-29T15:22:38.200126Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/TableMove/myseq" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: true }, at schemeshard: 72057594046678944 2025-05-29T15:22:38.200146Z node 2 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/TableMove/myseq" took 23us result status StatusSuccess 2025-05-29T15:22:38.200189Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/TableMove/myseq" PathDescription { Self { Name: "myseq" PathId: 5 SchemeshardId: 72057594046678944 PathType: EPathTypeSequence CreateFinished: true CreateTxId: 102 CreateStep: 5000003 ParentPathId: 4 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 SequenceVersion: 2 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 2 PathsLimit: 10000 ShardsInside: 2 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } SequenceDescription { Name: "myseq" PathId { OwnerId: 72057594046678944 LocalId: 5 } Version: 2 SequenceShard: 72075186233409546 MinValue: 1 MaxValue: 9223372036854775807 StartValue: 1 Cache: 1 Increment: 1 Cycle: false DataType: "Int64" } } PathId: 5 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_move/unittest >> TSchemeShardMoveTest::Index [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:116:2058] recipient: [1:111:2141] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:116:2058] recipient: [1:111:2141] Leader for TabletID 72057594046678944 is [1:128:2152] sender: [1:132:2058] recipient: [1:111:2141] 2025-05-29T15:22:37.072651Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7488: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-05-29T15:22:37.072676Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7516: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:22:37.072681Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7402: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-05-29T15:22:37.072685Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7418: OperationsProcessing config: using default configuration 2025-05-29T15:22:37.072699Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-05-29T15:22:37.072703Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-05-29T15:22:37.072710Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7548: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:22:37.072722Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:39: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-05-29T15:22:37.072826Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7619: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-05-29T15:22:37.072902Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-05-29T15:22:37.084515Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7309: Cannot subscribe to console configs 2025-05-29T15:22:37.084537Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:22:37.086762Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-05-29T15:22:37.086867Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-05-29T15:22:37.086900Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-05-29T15:22:37.088158Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-05-29T15:22:37.088268Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:445: Clear TempDirsState with owners number: 0 2025-05-29T15:22:37.088357Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1348: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-05-29T15:22:37.088406Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:32: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-05-29T15:22:37.088779Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:157: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:22:37.088817Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:84: [RootDataErasureManager] Stop 2025-05-29T15:22:37.089036Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:22:37.089044Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:22:37.089064Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-05-29T15:22:37.089070Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-29T15:22:37.089076Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:202: TTxServerlessStorageBilling.Complete 2025-05-29T15:22:37.089105Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6671: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-05-29T15:22:37.091913Z node 1 :HIVE INFO: tablet_helpers.cpp:1130: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:128:2152] sender: [1:242:2058] recipient: [1:15:2062] 2025-05-29T15:22:37.115199Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:372: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-05-29T15:22:37.115268Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:22:37.115319Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-05-29T15:22:37.115358Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:130: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-05-29T15:22:37.115368Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:22:37.115916Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:451: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-05-29T15:22:37.115943Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-05-29T15:22:37.115987Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:22:37.115996Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:313: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-05-29T15:22:37.116001Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:367: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-05-29T15:22:37.116005Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 2 -> 3 2025-05-29T15:22:37.116362Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:22:37.116374Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-05-29T15:22:37.116378Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 3 -> 128 2025-05-29T15:22:37.116681Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:22:37.116691Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:22:37.116697Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:22:37.116704Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1650: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-05-29T15:22:37.117281Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1719: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-05-29T15:22:37.117625Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:652: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-05-29T15:22:37.117661Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1751: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-05-29T15:22:37.117817Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:676: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-05-29T15:22:37.117838Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:680: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 124 RawX2: 4294969445 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-05-29T15:22:37.117844Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:22:37.117900Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 128 -> 240 2025-05-29T15:22:37.117906Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:22:37.117929Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-05-29T15:22:37.117939Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:402: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-05-29T15:22:37.118287Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:22:37.118295Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-29T15:22:37.118330Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29 ... eTable: false }, at schemeshard: 72057594046678944 2025-05-29T15:22:38.202730Z node 2 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot" took 20us result status StatusSuccess 2025-05-29T15:22:38.202803Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot" PathDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 12 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 12 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 10 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: "TableMove" PathId: 7 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 102 CreateStep: 5000003 ParentPathId: 1 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 5 PathsLimit: 10000 ShardsInside: 3 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/MyRoot" } } } PathId: 1 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-05-29T15:22:38.202857Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/TableMove/Sync" Options { ReturnPartitioningInfo: true ReturnPartitionConfig: true BackupInfo: false ReturnBoundaries: true ShowPrivateTable: true }, at schemeshard: 72057594046678944 2025-05-29T15:22:38.202884Z node 2 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/TableMove/Sync" took 28us result status StatusSuccess 2025-05-29T15:22:38.203036Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/TableMove/Sync" PathDescription { Self { Name: "Sync" PathId: 10 SchemeshardId: 72057594046678944 PathType: EPathTypeTableIndex CreateFinished: true CreateTxId: 102 CreateStep: 5000003 ParentPathId: 7 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableIndexVersion: 2 } ChildrenExist: true } Children { Name: "indexImplTable" PathId: 11 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 102 CreateStep: 5000003 ParentPathId: 10 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" PathSubType: EPathSubTypeSyncIndexImplTable Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 2 TablePartitionVersion: 2 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 5 PathsLimit: 10000 ShardsInside: 3 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } TableIndex { Name: "Sync" LocalPathId: 10 Type: EIndexTypeGlobal State: EIndexStateReady KeyColumnNames: "value0" SchemaVersion: 2 PathOwnerId: 72057594046678944 DataSize: 0 IndexImplTableDescriptions { Columns { Name: "value0" Type: "Utf8" TypeId: 4608 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "value0" KeyColumnNames: "key" KeyColumnIds: 1 KeyColumnIds: 2 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { SizeToSplit: 2147483648 MinPartitionsCount: 1 } } } } } PathId: 10 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-05-29T15:22:38.203112Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/TableMove/Async" Options { ReturnPartitioningInfo: true ReturnPartitionConfig: true BackupInfo: false ReturnBoundaries: true ShowPrivateTable: true }, at schemeshard: 72057594046678944 2025-05-29T15:22:38.203137Z node 2 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/TableMove/Async" took 26us result status StatusSuccess 2025-05-29T15:22:38.203229Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/TableMove/Async" PathDescription { Self { Name: "Async" PathId: 8 SchemeshardId: 72057594046678944 PathType: EPathTypeTableIndex CreateFinished: true CreateTxId: 102 CreateStep: 5000003 ParentPathId: 7 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableIndexVersion: 2 } ChildrenExist: true } Children { Name: "indexImplTable" PathId: 9 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 102 CreateStep: 5000003 ParentPathId: 8 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" PathSubType: EPathSubTypeAsyncIndexImplTable Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 2 TablePartitionVersion: 2 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 5 PathsLimit: 10000 ShardsInside: 3 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } TableIndex { Name: "Async" LocalPathId: 8 Type: EIndexTypeGlobalAsync State: EIndexStateReady KeyColumnNames: "value1" SchemaVersion: 2 PathOwnerId: 72057594046678944 DataSize: 0 IndexImplTableDescriptions { Columns { Name: "value1" Type: "Utf8" TypeId: 4608 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "value1" KeyColumnNames: "key" KeyColumnIds: 1 KeyColumnIds: 2 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { SizeToSplit: 2147483648 MinPartitionsCount: 1 } } } } } PathId: 8 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> TMiniKQLEngineFlatTest::TestPureProgram [GOOD] >> TMiniKQLEngineFlatTest::TestSelectRangeFullExists [GOOD] >> TMiniKQLEngineFlatTest::TestSelectRangeFromInclusive [GOOD] >> TMiniKQLEngineFlatTest::TestSelectRangeFromExclusive [GOOD] >> TMiniKQLEngineFlatTest::TestSelectRangeBothIncFromIncTo [GOOD] >> TMiniKQLEngineFlatTest::TestSelectRangeBothExcFromIncTo [GOOD] >> TMiniKQLEngineFlatTest::TestSelectRangeBothIncFromExcTo [GOOD] >> TMiniKQLEngineFlatTest::TestSelectRangeBothExcFromExcTo [GOOD] >> TMiniKQLEngineFlatTest::TestMapsPushdown [GOOD] >> TMiniKQLEngineFlatTest::TestNoOrderedTakePushdown [GOOD] >> TMiniKQLEngineFlatTest::TestNoAggregatedPushdown [GOOD] >> TMiniKQLEngineFlatTest::TestNoPartialSortPushdown >> TMiniKQLEngineFlatTest::TestSelectRangeWithPartitions [GOOD] >> TMiniKQLEngineFlatTest::TestSelectRangeWithPartitionsTruncatedByItems [GOOD] >> TMiniKQLEngineFlatTest::TestSelectRangeWithPartitionsTruncatedByBytes [GOOD] >> TMiniKQLEngineFlatTest::TestSomePushDown [GOOD] >> TMiniKQLEngineFlatTest::TestTakePushdown [GOOD] >> TMiniKQLEngineFlatTest::TestTopSortNonImmediatePushdown [GOOD] >> TMiniKQLEngineFlatTest::TestNoPartialSortPushdown [GOOD] >> TMiniKQLEngineFlatTest::TestMultiRSPerDestination [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_move/unittest >> TSchemeShardMoveTest::OneTable [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:116:2058] recipient: [1:111:2141] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:116:2058] recipient: [1:111:2141] Leader for TabletID 72057594046678944 is [1:128:2152] sender: [1:132:2058] recipient: [1:111:2141] 2025-05-29T15:22:37.151909Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7488: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-05-29T15:22:37.151933Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7516: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:22:37.151939Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7402: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-05-29T15:22:37.151944Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7418: OperationsProcessing config: using default configuration 2025-05-29T15:22:37.151958Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-05-29T15:22:37.151962Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-05-29T15:22:37.151971Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7548: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:22:37.151985Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:39: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-05-29T15:22:37.152091Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7619: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-05-29T15:22:37.152169Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-05-29T15:22:37.179095Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7309: Cannot subscribe to console configs 2025-05-29T15:22:37.179122Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:22:37.187251Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-05-29T15:22:37.187422Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-05-29T15:22:37.187468Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-05-29T15:22:37.189095Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-05-29T15:22:37.189250Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:445: Clear TempDirsState with owners number: 0 2025-05-29T15:22:37.189366Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1348: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-05-29T15:22:37.189432Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:32: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-05-29T15:22:37.189901Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:157: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:22:37.189951Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:84: [RootDataErasureManager] Stop 2025-05-29T15:22:37.190204Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:22:37.190216Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:22:37.190237Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-05-29T15:22:37.190245Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-29T15:22:37.190252Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:202: TTxServerlessStorageBilling.Complete 2025-05-29T15:22:37.190289Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6671: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-05-29T15:22:37.191602Z node 1 :HIVE INFO: tablet_helpers.cpp:1130: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:128:2152] sender: [1:242:2058] recipient: [1:15:2062] 2025-05-29T15:22:37.220218Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:372: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-05-29T15:22:37.220305Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:22:37.220367Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-05-29T15:22:37.220418Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:130: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-05-29T15:22:37.220432Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:22:37.221146Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:451: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-05-29T15:22:37.221184Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-05-29T15:22:37.221247Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:22:37.221260Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:313: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-05-29T15:22:37.221265Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:367: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-05-29T15:22:37.221270Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 2 -> 3 2025-05-29T15:22:37.221806Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:22:37.221820Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-05-29T15:22:37.221827Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 3 -> 128 2025-05-29T15:22:37.222222Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:22:37.222234Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:22:37.222240Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:22:37.222247Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1650: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-05-29T15:22:37.222955Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1719: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-05-29T15:22:37.223359Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:652: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-05-29T15:22:37.223393Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1751: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-05-29T15:22:37.223567Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:676: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-05-29T15:22:37.223592Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:680: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 124 RawX2: 4294969445 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-05-29T15:22:37.223602Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:22:37.223665Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 128 -> 240 2025-05-29T15:22:37.223673Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:22:37.223699Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-05-29T15:22:37.223710Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:402: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-05-29T15:22:37.224096Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:22:37.224104Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-29T15:22:37.224138Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29 ... id: [OwnerId: 72057594046678944, LocalPathId: 4] 2025-05-29T15:22:38.500675Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:22:38.500682Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [2:207:2208], at schemeshard: 72057594046678944, txId: 108, path id: 1 2025-05-29T15:22:38.500688Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [2:207:2208], at schemeshard: 72057594046678944, txId: 108, path id: 4 2025-05-29T15:22:38.500803Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 108:0, at schemeshard: 72057594046678944 2025-05-29T15:22:38.500810Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:1036: NTableState::TProposedWaitParts operationId# 108:0 ProgressState at tablet: 72057594046678944 2025-05-29T15:22:38.500826Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:674: all shard schema changes has been received, operationId: 108:0, at schemeshard: 72057594046678944 2025-05-29T15:22:38.500832Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:686: send schema changes ack message, operation: 108:0, datashard: 72075186233409546, at schemeshard: 72057594046678944 2025-05-29T15:22:38.500838Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 108:0 129 -> 240 2025-05-29T15:22:38.500960Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:5834: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 23 PathOwnerId: 72057594046678944, cookie: 108 2025-05-29T15:22:38.500973Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 23 PathOwnerId: 72057594046678944, cookie: 108 2025-05-29T15:22:38.500980Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72057594046678944, txId: 108 2025-05-29T15:22:38.500986Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 108, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 23 2025-05-29T15:22:38.500994Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-05-29T15:22:38.501105Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:5834: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 108 2025-05-29T15:22:38.501118Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 108 2025-05-29T15:22:38.501122Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72057594046678944, txId: 108 2025-05-29T15:22:38.501127Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 108, pathId: [OwnerId: 72057594046678944, LocalPathId: 4], version: 18446744073709551615 2025-05-29T15:22:38.501131Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 4 2025-05-29T15:22:38.501142Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1606: TOperation IsReadyToNotify, TxId: 108, ready parts: 0/1, is published: true 2025-05-29T15:22:38.507206Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 108:0, at schemeshard: 72057594046678944 2025-05-29T15:22:38.507237Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_drop_table.cpp:414: TDropTable TProposedDeletePart operationId: 108:0 ProgressState, at schemeshard: 72057594046678944 2025-05-29T15:22:38.507334Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove table for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 3 2025-05-29T15:22:38.507362Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:906: Part operation is done id#108:0 progress is 1/1 2025-05-29T15:22:38.507367Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 108 ready parts: 1/1 2025-05-29T15:22:38.507372Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:906: Part operation is done id#108:0 progress is 1/1 2025-05-29T15:22:38.507375Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 108 ready parts: 1/1 2025-05-29T15:22:38.507380Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1606: TOperation IsReadyToNotify, TxId: 108, ready parts: 1/1, is published: true 2025-05-29T15:22:38.507402Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1629: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [2:336:2314] message: TxId: 108 2025-05-29T15:22:38.507409Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 108 ready parts: 1/1 2025-05-29T15:22:38.507413Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:973: Operation and all the parts is done, operation id: 108:0 2025-05-29T15:22:38.507421Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5174: RemoveTx for txid 108:0 2025-05-29T15:22:38.507445Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 2 2025-05-29T15:22:38.515054Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 108 2025-05-29T15:22:38.515184Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 108 2025-05-29T15:22:38.519091Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:227: tests -- TTxNotificationSubscriber for txId 108: got EvNotifyTxCompletionResult 2025-05-29T15:22:38.519133Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:236: tests -- TTxNotificationSubscriber for txId 108: satisfy waiter [2:831:2787] TestWaitNotification: OK eventTxId 108 wait until 72075186233409546 is deleted wait until 72075186233409547 is deleted 2025-05-29T15:22:38.519314Z node 2 :HIVE INFO: tablet_helpers.cpp:1476: [72057594037968897] TEvSubscribeToTabletDeletion, 72075186233409546 2025-05-29T15:22:38.519328Z node 2 :HIVE INFO: tablet_helpers.cpp:1476: [72057594037968897] TEvSubscribeToTabletDeletion, 72075186233409547 Deleted tabletId 72075186233409547 2025-05-29T15:22:38.537644Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5549: Handle TEvStateChanged, at schemeshard: 72057594046678944, message: Source { RawX1: 308 RawX2: 8589936886 } TabletId: 72075186233409546 State: 4 2025-05-29T15:22:38.537688Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__state_changed_reply.cpp:78: TTxShardStateChanged DoExecute, datashard informs about state changing, datashardId: 72075186233409546, state: Offline, at schemeshard: 72057594046678944 2025-05-29T15:22:38.547335Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:46: Free shard 72057594046678944:1 hive 72057594037968897 at ss 72057594046678944 2025-05-29T15:22:38.547500Z node 2 :HIVE INFO: tablet_helpers.cpp:1356: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 1 TxId_Deprecated: 1 TabletID: 72075186233409546 2025-05-29T15:22:38.547569Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5938: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 1 ShardOwnerId: 72057594046678944 ShardLocalIdx: 1, at schemeshard: 72057594046678944 2025-05-29T15:22:38.547661Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 1 Forgetting tablet 72075186233409546 2025-05-29T15:22:38.548291Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:123: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-05-29T15:22:38.548301Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:137: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 4], at schemeshard: 72057594046678944 2025-05-29T15:22:38.548318Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-05-29T15:22:38.549110Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:174: Deleted shardIdx 72057594046678944:1 2025-05-29T15:22:38.549126Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:180: Close pipe to deleted shardIdx 72057594046678944:1 tabletId 72075186233409546 2025-05-29T15:22:38.549186Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:160: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 Deleted tabletId 72075186233409546 2025-05-29T15:22:38.549332Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-05-29T15:22:38.549377Z node 2 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot" took 56us result status StatusSuccess 2025-05-29T15:22:38.549463Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot" PathDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 23 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 23 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 21 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/MyRoot" } } } PathId: 1 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/persqueue/ut/unittest >> TPQTestInternal::RestoreKeys [GOOD] Test command err: Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:104:2057] recipient: [1:102:2135] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:104:2057] recipient: [1:102:2135] Leader for TabletID 72057594037927937 is [1:108:2139] sender: [1:109:2057] recipient: [1:102:2135] 2025-05-29T15:22:10.590486Z node 1 :PERSQUEUE NOTICE: pq_impl.cpp:1099: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-05-29T15:22:10.590514Z node 1 :PERSQUEUE INFO: pq_impl.cpp:800: [PQ: 72057594037927937] doesn't have tx writes info Leader for TabletID 72057594037927938 is [0:0:0] sender: [1:150:2057] recipient: [1:148:2170] IGNORE Leader for TabletID 72057594037927938 is [0:0:0] sender: [1:150:2057] recipient: [1:148:2170] Leader for TabletID 72057594037927938 is [1:154:2174] sender: [1:155:2057] recipient: [1:148:2170] Leader for TabletID 72057594037927937 is [1:108:2139] sender: [1:180:2057] recipient: [1:14:2061] 2025-05-29T15:22:10.594590Z node 1 :PERSQUEUE NOTICE: pq_impl.cpp:1099: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-05-29T15:22:10.597657Z node 1 :PERSQUEUE INFO: pq_impl.cpp:1487: [PQ: 72057594037927937] Config applied version 1 actor [1:178:2192] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 7864320 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 PartitionIds: 1 TopicName: "rt3.dc1--asdfgs--topic" Version: 1 LocalDC: true Topic: "topic" TopicPath: "/Root/PQ/rt3.dc1--asdfgs--topic" Partitions { PartitionId: 0 } Partitions { PartitionId: 1 } ReadRuleGenerations: 1 MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } AllPartitions { PartitionId: 1 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 1 Important: false } 2025-05-29T15:22:10.597876Z node 1 :PERSQUEUE INFO: partition_init.cpp:880: [PQ: 72057594037927937, Partition: 0, State: StateInit] bootstrapping 0 [1:186:2198] 2025-05-29T15:22:10.598620Z node 1 :PERSQUEUE INFO: partition.cpp:560: [PQ: 72057594037927937, Partition: 0, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 0 generation 2 [1:186:2198] 2025-05-29T15:22:10.599153Z node 1 :PERSQUEUE INFO: partition_init.cpp:880: [PQ: 72057594037927937, Partition: 1, State: StateInit] bootstrapping 1 [1:187:2199] 2025-05-29T15:22:10.599715Z node 1 :PERSQUEUE INFO: partition.cpp:560: [PQ: 72057594037927937, Partition: 1, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 1 generation 2 [1:187:2199] 2025-05-29T15:22:10.611179Z node 1 :PERSQUEUE INFO: ownerinfo.cpp:30: new Cookie default|71e530d2-7fe23d60-1e693378-6c98df69_0 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default 2025-05-29T15:22:10.642241Z node 1 :PERSQUEUE INFO: ownerinfo.cpp:30: new Cookie default|e8549e44-ad1b63d3-845a3765-d68c0e8b_1 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default 2025-05-29T15:22:10.662105Z node 1 :PERSQUEUE INFO: ownerinfo.cpp:30: new Cookie default|518616c5-1b6bb835-dd8847af-2559d7cd_2 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default 2025-05-29T15:22:10.677406Z node 1 :PERSQUEUE INFO: ownerinfo.cpp:30: new Cookie default|ab5d481e-d9b6d92b-b13b38f8-2a2954a1_3 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default 2025-05-29T15:22:10.678490Z node 1 :PERSQUEUE INFO: ownerinfo.cpp:30: new Cookie default|97f8c3de-5f2fc416-9263183b-6b95c746_4 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default 2025-05-29T15:22:10.679191Z node 1 :PERSQUEUE INFO: ownerinfo.cpp:30: new Cookie default|7ed24464-68d0e950-fe701d2b-4033c84a_5 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:104:2057] recipient: [2:102:2135] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:104:2057] recipient: [2:102:2135] Leader for TabletID 72057594037927937 is [2:108:2139] sender: [2:109:2057] recipient: [2:102:2135] 2025-05-29T15:22:10.860446Z node 2 :PERSQUEUE NOTICE: pq_impl.cpp:1099: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-05-29T15:22:10.860473Z node 2 :PERSQUEUE INFO: pq_impl.cpp:800: [PQ: 72057594037927937] doesn't have tx writes info Leader for TabletID 72057594037927938 is [0:0:0] sender: [2:150:2057] recipient: [2:148:2170] IGNORE Leader for TabletID 72057594037927938 is [0:0:0] sender: [2:150:2057] recipient: [2:148:2170] Leader for TabletID 72057594037927938 is [2:154:2174] sender: [2:155:2057] recipient: [2:148:2170] Leader for TabletID 72057594037927937 is [2:108:2139] sender: [2:178:2057] recipient: [2:14:2061] 2025-05-29T15:22:10.863919Z node 2 :PERSQUEUE NOTICE: pq_impl.cpp:1099: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-05-29T15:22:10.864079Z node 2 :PERSQUEUE INFO: pq_impl.cpp:1487: [PQ: 72057594037927937] Config applied version 2 actor [2:176:2190] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 7864320 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 PartitionIds: 1 TopicName: "rt3.dc1--asdfgs--topic" Version: 2 LocalDC: true Topic: "topic" TopicPath: "/Root/PQ/rt3.dc1--asdfgs--topic" Partitions { PartitionId: 0 } Partitions { PartitionId: 1 } ReadRuleGenerations: 2 MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } AllPartitions { PartitionId: 1 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 2 Important: false } 2025-05-29T15:22:10.864187Z node 2 :PERSQUEUE INFO: partition_init.cpp:880: [PQ: 72057594037927937, Partition: 0, State: StateInit] bootstrapping 0 [2:184:2196] 2025-05-29T15:22:10.864608Z node 2 :PERSQUEUE INFO: partition.cpp:560: [PQ: 72057594037927937, Partition: 0, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 0 generation 2 [2:184:2196] 2025-05-29T15:22:10.864894Z node 2 :PERSQUEUE INFO: partition_init.cpp:880: [PQ: 72057594037927937, Partition: 1, State: StateInit] bootstrapping 1 [2:185:2197] 2025-05-29T15:22:10.865184Z node 2 :PERSQUEUE INFO: partition.cpp:560: [PQ: 72057594037927937, Partition: 1, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 1 generation 2 [2:185:2197] 2025-05-29T15:22:10.871954Z node 2 :PERSQUEUE INFO: ownerinfo.cpp:30: new Cookie default|a37b1b9d-69972cca-4ff759d-1af87c41_0 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default 2025-05-29T15:22:10.897159Z node 2 :PERSQUEUE INFO: ownerinfo.cpp:30: new Cookie default|921b9065-2ee4d62f-65be6287-a5987cbb_1 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default 2025-05-29T15:22:10.919980Z node 2 :PERSQUEUE INFO: ownerinfo.cpp:30: new Cookie default|bc71e303-dcde5894-a5ca9d6e-9a85d38_2 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default 2025-05-29T15:22:10.934751Z node 2 :PERSQUEUE INFO: ownerinfo.cpp:30: new Cookie default|310ce6fc-1cfbf1c6-397e5cda-3e98ed3b_3 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default 2025-05-29T15:22:10.935917Z node 2 :PERSQUEUE INFO: ownerinfo.cpp:30: new Cookie default|bf20c1e3-3b218adb-b7cb024c-178270ab_4 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default !Reboot 72057594037927937 (actor [2:108:2139]) on event NKikimr::TEvPersQueue::TEvRequest ! Leader for TabletID 72057594037927937 is [2:108:2139] sender: [2:294:2057] recipient: [2:100:2134] Leader for TabletID 72057594037927937 is [2:108:2139] sender: [2:297:2057] recipient: [2:14:2061] Leader for TabletID 72057594037927937 is [2:108:2139] sender: [2:298:2057] recipient: [2:296:2293] Leader for TabletID 72057594037927937 is [2:299:2294] sender: [2:300:2057] recipient: [2:296:2293] 2025-05-29T15:22:10.942073Z node 2 :PERSQUEUE NOTICE: pq_impl.cpp:1099: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-05-29T15:22:10.942092Z node 2 :PERSQUEUE INFO: pq_impl.cpp:800: [PQ: 72057594037927937] doesn't have tx writes info 2025-05-29T15:22:10.942225Z node 2 :PERSQUEUE INFO: partition_init.cpp:880: [PQ: 72057594037927937, Partition: 0, State: StateInit] bootstrapping 0 [2:348:2335] 2025-05-29T15:22:10.942595Z node 2 :PERSQUEUE INFO: partition_init.cpp:880: [PQ: 72057594037927937, Partition: 1, State: StateInit] bootstrapping 1 [2:349:2336] 2025-05-29T15:22:10.943760Z node 2 :PERSQUEUE INFO: partition_init.cpp:774: [rt3.dc1--asdfgs--topic:1:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp skipped because already initialized. 2025-05-29T15:22:10.943771Z node 2 :PERSQUEUE INFO: partition.cpp:560: [PQ: 72057594037927937, Partition: 1, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 1 generation 3 [2:349:2336] 2025-05-29T15:22:10.943896Z node 2 :PERSQUEUE INFO: partition_init.cpp:774: [rt3.dc1--asdfgs--topic:0:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp skipped because already initialized. 2025-05-29T15:22:10.943902Z node 2 :PERSQUEUE INFO: partition.cpp:560: [PQ: 72057594037927937, Partition: 0, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 0 generation 3 [2:348:2335] !Reboot 72057594037927937 (actor [2:108:2139]) rebooted! !Reboot 72057594037927937 (actor [2:108:2139]) tablet resolver refreshed! new actor is[2:299:2294] Leader for TabletID 72057594037927937 is [2:299:2294] sender: [2:398:2057] recipient: [2:14:2061] 2025-05-29T15:22:12.160441Z node 2 :PERSQUEUE INFO: ownerinfo.cpp:30: new Cookie default|2c4e618c-f8d7e142-ffbc181f-4c47ff08_0 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:104:2057] recipient: [3:102:2135] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:104:2057] recipient: [3:102:2135] Leader for TabletID 72057594037927937 is [3:108:2139] sender: [3:109:2057] recipient: [3:102:2135] 2025-05-29T15:22:12.310762Z node 3 :PERSQUEUE NOTICE: pq_impl.cpp:1099: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-05-29T15:22:12.310787Z node 3 :PERSQUEUE INFO: pq_impl.cpp:800: [PQ: 72057594037927937] doesn't have tx writes info Leader for TabletID 72057594037927938 is [0:0:0] sender: [3:150:2057] recipient: [3:148:2170] IGNORE Leader for TabletID 72057594037927938 is [0:0:0] sender: [3:150:2057] recipient: [3:148:2170] Leader for TabletID 72057594037927938 is [3:154:2174] sender: [3:155:2057] recipient: [3:148:2170] Leader for TabletID 72057594037927937 is [3:108:2139] sender: [3:180:2057] recipient: [3:14:2061] 2025-05-29T15:22:12.316429Z node 3 :PERSQUEUE NOTICE: pq_impl.cpp:1099: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-05-29T15:22:12.316650Z node 3 :PERSQUEUE INFO: pq_impl.cpp:1487: [PQ: 72057594037927937] Config applied version 3 actor [3:178:2192] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 7864320 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 PartitionIds: 1 TopicName: "rt3.dc1--asdfgs--topic" Version: 3 LocalDC: true Topic: "topic" TopicPath: "/Root/PQ/rt3.dc1--asdfgs--topic" Partitions { PartitionId: 0 } Partitions { PartitionId: 1 } ReadRuleGenerations: 3 MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } AllPartitions { PartitionId: 1 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 3 Important: false } 2025-05-29T15:22:12.316771Z node 3 :PERSQUEUE INFO: par ... ation: 63 Important: false } Consumers { Name: "another1" Generation: 65 Important: true } Consumers { Name: "important" Generation: 64 Important: true } Consumers { Name: "another" Generation: 66 Important: false } 2025-05-29T15:22:38.517024Z node 62 :PERSQUEUE DEBUG: pq_impl.cpp:590: [PQ: 72057594037927937] Apply new config CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 1099511627776 LifetimeSeconds: 0 ImportantClientId: "another1" ImportantClientId: "important" LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 PartitionIds: 1 TopicName: "topic" Version: 66 LocalDC: true Topic: "topic" TopicPath: "/topic" YcCloudId: "somecloud" YcFolderId: "somefolder" YdbDatabaseId: "PQ" YdbDatabasePath: "/Root/PQ" Partitions { PartitionId: 0 } Partitions { PartitionId: 1 } ReadRuleGenerations: 63 ReadRuleGenerations: 63 ReadRuleGenerations: 65 ReadRuleGenerations: 64 ReadRuleGenerations: 66 FederationAccount: "federationAccount" MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } AllPartitions { PartitionId: 1 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 63 Important: false } Consumers { Name: "aaa" Generation: 63 Important: false } Consumers { Name: "another1" Generation: 65 Important: true } Consumers { Name: "important" Generation: 64 Important: true } Consumers { Name: "another" Generation: 66 Important: false } 2025-05-29T15:22:38.517040Z node 62 :PERSQUEUE NOTICE: pq_impl.cpp:1099: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-05-29T15:22:38.517079Z node 62 :PERSQUEUE DEBUG: partition.cpp:3224: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'topic' partition 0 user another reinit with generation 66 done 2025-05-29T15:22:38.517113Z node 62 :PERSQUEUE DEBUG: partition.cpp:2185: [PQ: 72057594037927937, Partition: 0, State: StateIdle] === DumpKeyValueRequest === 2025-05-29T15:22:38.517117Z node 62 :PERSQUEUE DEBUG: partition.cpp:2186: [PQ: 72057594037927937, Partition: 0, State: StateIdle] --- delete ---------------- 2025-05-29T15:22:38.517121Z node 62 :PERSQUEUE DEBUG: partition.cpp:2194: [PQ: 72057594037927937, Partition: 0, State: StateIdle] --- write ----------------- 2025-05-29T15:22:38.517126Z node 62 :PERSQUEUE DEBUG: partition.cpp:2197: [PQ: 72057594037927937, Partition: 0, State: StateIdle] i0000000000 2025-05-29T15:22:38.517129Z node 62 :PERSQUEUE DEBUG: partition.cpp:2197: [PQ: 72057594037927937, Partition: 0, State: StateIdle] m0000000000cuser 2025-05-29T15:22:38.517133Z node 62 :PERSQUEUE DEBUG: partition.cpp:2197: [PQ: 72057594037927937, Partition: 0, State: StateIdle] m0000000000uuser 2025-05-29T15:22:38.517136Z node 62 :PERSQUEUE DEBUG: partition.cpp:2197: [PQ: 72057594037927937, Partition: 0, State: StateIdle] m0000000000caaa 2025-05-29T15:22:38.517140Z node 62 :PERSQUEUE DEBUG: partition.cpp:2197: [PQ: 72057594037927937, Partition: 0, State: StateIdle] m0000000000uaaa 2025-05-29T15:22:38.517146Z node 62 :PERSQUEUE DEBUG: partition.cpp:2197: [PQ: 72057594037927937, Partition: 0, State: StateIdle] m0000000000canother1 2025-05-29T15:22:38.517149Z node 62 :PERSQUEUE DEBUG: partition.cpp:2197: [PQ: 72057594037927937, Partition: 0, State: StateIdle] m0000000000uanother1 2025-05-29T15:22:38.517153Z node 62 :PERSQUEUE DEBUG: partition.cpp:2197: [PQ: 72057594037927937, Partition: 0, State: StateIdle] m0000000000canother 2025-05-29T15:22:38.517156Z node 62 :PERSQUEUE DEBUG: partition.cpp:2197: [PQ: 72057594037927937, Partition: 0, State: StateIdle] m0000000000uanother 2025-05-29T15:22:38.517160Z node 62 :PERSQUEUE DEBUG: partition.cpp:2197: [PQ: 72057594037927937, Partition: 0, State: StateIdle] m0000000000cimportant 2025-05-29T15:22:38.517163Z node 62 :PERSQUEUE DEBUG: partition.cpp:2197: [PQ: 72057594037927937, Partition: 0, State: StateIdle] m0000000000uimportant 2025-05-29T15:22:38.517167Z node 62 :PERSQUEUE DEBUG: partition.cpp:2197: [PQ: 72057594037927937, Partition: 0, State: StateIdle] _config_0 2025-05-29T15:22:38.517171Z node 62 :PERSQUEUE DEBUG: partition.cpp:2199: [PQ: 72057594037927937, Partition: 0, State: StateIdle] --- rename ---------------- 2025-05-29T15:22:38.517175Z node 62 :PERSQUEUE DEBUG: partition.cpp:2204: [PQ: 72057594037927937, Partition: 0, State: StateIdle] =========================== 2025-05-29T15:22:38.517191Z node 62 :PERSQUEUE DEBUG: partition.cpp:3224: [PQ: 72057594037927937, Partition: 1, State: StateIdle] Topic 'topic' partition 1 user another reinit with generation 66 done 2025-05-29T15:22:38.517217Z node 62 :PERSQUEUE DEBUG: partition.cpp:2185: [PQ: 72057594037927937, Partition: 1, State: StateIdle] === DumpKeyValueRequest === 2025-05-29T15:22:38.517220Z node 62 :PERSQUEUE DEBUG: partition.cpp:2186: [PQ: 72057594037927937, Partition: 1, State: StateIdle] --- delete ---------------- 2025-05-29T15:22:38.517224Z node 62 :PERSQUEUE DEBUG: partition.cpp:2194: [PQ: 72057594037927937, Partition: 1, State: StateIdle] --- write ----------------- 2025-05-29T15:22:38.517228Z node 62 :PERSQUEUE DEBUG: partition.cpp:2197: [PQ: 72057594037927937, Partition: 1, State: StateIdle] i0000000001 2025-05-29T15:22:38.517231Z node 62 :PERSQUEUE DEBUG: partition.cpp:2197: [PQ: 72057594037927937, Partition: 1, State: StateIdle] m0000000001cuser 2025-05-29T15:22:38.517235Z node 62 :PERSQUEUE DEBUG: partition.cpp:2197: [PQ: 72057594037927937, Partition: 1, State: StateIdle] m0000000001uuser 2025-05-29T15:22:38.517238Z node 62 :PERSQUEUE DEBUG: partition.cpp:2197: [PQ: 72057594037927937, Partition: 1, State: StateIdle] m0000000001caaa 2025-05-29T15:22:38.517243Z node 62 :PERSQUEUE DEBUG: partition.cpp:2197: [PQ: 72057594037927937, Partition: 1, State: StateIdle] m0000000001uaaa 2025-05-29T15:22:38.517246Z node 62 :PERSQUEUE DEBUG: partition.cpp:2197: [PQ: 72057594037927937, Partition: 1, State: StateIdle] m0000000001canother1 2025-05-29T15:22:38.517250Z node 62 :PERSQUEUE DEBUG: partition.cpp:2197: [PQ: 72057594037927937, Partition: 1, State: StateIdle] m0000000001uanother1 2025-05-29T15:22:38.517254Z node 62 :PERSQUEUE DEBUG: partition.cpp:2197: [PQ: 72057594037927937, Partition: 1, State: StateIdle] m0000000001canother 2025-05-29T15:22:38.517257Z node 62 :PERSQUEUE DEBUG: partition.cpp:2197: [PQ: 72057594037927937, Partition: 1, State: StateIdle] m0000000001uanother 2025-05-29T15:22:38.517261Z node 62 :PERSQUEUE DEBUG: partition.cpp:2197: [PQ: 72057594037927937, Partition: 1, State: StateIdle] m0000000001cimportant 2025-05-29T15:22:38.517264Z node 62 :PERSQUEUE DEBUG: partition.cpp:2197: [PQ: 72057594037927937, Partition: 1, State: StateIdle] m0000000001uimportant 2025-05-29T15:22:38.517268Z node 62 :PERSQUEUE DEBUG: partition.cpp:2197: [PQ: 72057594037927937, Partition: 1, State: StateIdle] _config_1 2025-05-29T15:22:38.517272Z node 62 :PERSQUEUE DEBUG: partition.cpp:2199: [PQ: 72057594037927937, Partition: 1, State: StateIdle] --- rename ---------------- 2025-05-29T15:22:38.517277Z node 62 :PERSQUEUE DEBUG: partition.cpp:2204: [PQ: 72057594037927937, Partition: 1, State: StateIdle] =========================== 2025-05-29T15:22:38.517288Z node 62 :PERSQUEUE DEBUG: read.h:262: CacheProxy. Passthrough write request to KV 2025-05-29T15:22:38.517328Z node 62 :PERSQUEUE DEBUG: read.h:262: CacheProxy. Passthrough write request to KV 2025-05-29T15:22:38.518134Z node 62 :PERSQUEUE DEBUG: partition_write.cpp:524: [PQ: 72057594037927937, Partition: 0, State: StateIdle] TPartition::HandleWriteResponse writeNewSize# 0 WriteNewSizeFromSupportivePartitions# 0 2025-05-29T15:22:38.518175Z node 62 :PERSQUEUE DEBUG: pq_impl.cpp:1471: [PQ: 72057594037927937] Handle TEvPQ::TEvPartitionConfigChanged 2025-05-29T15:22:38.518268Z node 62 :PERSQUEUE DEBUG: partition_write.cpp:524: [PQ: 72057594037927937, Partition: 1, State: StateIdle] TPartition::HandleWriteResponse writeNewSize# 0 WriteNewSizeFromSupportivePartitions# 0 2025-05-29T15:22:38.518283Z node 62 :PERSQUEUE DEBUG: pq_impl.cpp:1471: [PQ: 72057594037927937] Handle TEvPQ::TEvPartitionConfigChanged 2025-05-29T15:22:38.518313Z node 62 :PERSQUEUE INFO: pq_impl.cpp:1487: [PQ: 72057594037927937] Config applied version 66 actor [62:178:2192] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 1099511627776 LifetimeSeconds: 0 ImportantClientId: "another1" ImportantClientId: "important" LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 PartitionIds: 1 TopicName: "topic" Version: 66 LocalDC: true Topic: "topic" TopicPath: "/topic" YcCloudId: "somecloud" YcFolderId: "somefolder" YdbDatabaseId: "PQ" YdbDatabasePath: "/Root/PQ" Partitions { PartitionId: 0 } Partitions { PartitionId: 1 } ReadRuleGenerations: 63 ReadRuleGenerations: 63 ReadRuleGenerations: 65 ReadRuleGenerations: 64 ReadRuleGenerations: 66 FederationAccount: "federationAccount" MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } AllPartitions { PartitionId: 1 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 63 Important: false } Consumers { Name: "aaa" Generation: 63 Important: false } Consumers { Name: "another1" Generation: 65 Important: true } Consumers { Name: "important" Generation: 64 Important: true } Consumers { Name: "another" Generation: 66 Important: false } 2025-05-29T15:22:38.518406Z node 62 :PERSQUEUE DEBUG: pq_impl.cpp:2882: [PQ: 72057594037927937] server connected, pipe [62:618:2564], now have 1 active actors on pipe 2025-05-29T15:22:38.518495Z node 62 :PERSQUEUE DEBUG: pq_impl.cpp:2882: [PQ: 72057594037927937] server connected, pipe [62:621:2566], now have 1 active actors on pipe 2025-05-29T15:22:38.518504Z node 62 :PERSQUEUE DEBUG: pq_impl.cpp:347: Handle TEvRequest topic: 'topic' requestId: 2025-05-29T15:22:38.518508Z node 62 :PERSQUEUE DEBUG: pq_impl.cpp:2796: [PQ: 72057594037927937] got client message batch for topic 'topic' partition 0 2025-05-29T15:22:38.518524Z node 62 :PERSQUEUE DEBUG: pq_impl.cpp:382: Answer ok topic: 'topic' partition: 0 messageNo: 0 requestId: cookie: 0 2025-05-29T15:22:38.518568Z node 62 :PERSQUEUE DEBUG: pq_impl.cpp:2882: [PQ: 72057594037927937] server connected, pipe [62:623:2568], now have 1 active actors on pipe 2025-05-29T15:22:38.518577Z node 62 :PERSQUEUE DEBUG: pq_impl.cpp:347: Handle TEvRequest topic: 'topic' requestId: 2025-05-29T15:22:38.518580Z node 62 :PERSQUEUE DEBUG: pq_impl.cpp:2796: [PQ: 72057594037927937] got client message batch for topic 'topic' partition 0 2025-05-29T15:22:38.518586Z node 62 :PERSQUEUE DEBUG: pq_impl.cpp:382: Answer ok topic: 'topic' partition: 0 messageNo: 0 requestId: cookie: 0 2025-05-29T15:22:38.518623Z node 62 :PERSQUEUE DEBUG: pq_impl.cpp:2882: [PQ: 72057594037927937] server connected, pipe [62:625:2570], now have 1 active actors on pipe 2025-05-29T15:22:38.518632Z node 62 :PERSQUEUE DEBUG: pq_impl.cpp:347: Handle TEvRequest topic: 'topic' requestId: 2025-05-29T15:22:38.518634Z node 62 :PERSQUEUE DEBUG: pq_impl.cpp:2796: [PQ: 72057594037927937] got client message batch for topic 'topic' partition 0 2025-05-29T15:22:38.518643Z node 62 :PERSQUEUE DEBUG: pq_impl.cpp:382: Answer ok topic: 'topic' partition: 0 messageNo: 0 requestId: cookie: 0 2025-05-29T15:22:38.518676Z node 62 :PERSQUEUE DEBUG: pq_impl.cpp:2882: [PQ: 72057594037927937] server connected, pipe [62:627:2572], now have 1 active actors on pipe 2025-05-29T15:22:38.518684Z node 62 :PERSQUEUE DEBUG: pq_impl.cpp:347: Handle TEvRequest topic: 'topic' requestId: 2025-05-29T15:22:38.518686Z node 62 :PERSQUEUE DEBUG: pq_impl.cpp:2796: [PQ: 72057594037927937] got client message batch for topic 'topic' partition 0 2025-05-29T15:22:38.518696Z node 62 :PERSQUEUE DEBUG: pq_impl.cpp:382: Answer ok topic: 'topic' partition: 0 messageNo: 0 requestId: cookie: 0 >> TMiniKQLEngineFlatTest::TestSelectRangeFullWithoutColumnsNotExists [GOOD] >> TMiniKQLEngineFlatTest::TestSelectRangeFullWithoutColumnsNotExistsNullKey [GOOD] >> TMiniKQLEngineFlatTest::TestSelectRangeFullExistsTruncatedByItems [GOOD] >> TMiniKQLEngineFlatTest::TestSelectRangeFullExistsTruncatedByItemsFromNull [GOOD] >> TMiniKQLEngineFlatTest::TestSelectRangeFullExistsTruncatedByBytes [GOOD] >> TMiniKQLEngineFlatTest::TestSelectRangeNullNull [GOOD] >> TMiniKQLEngineFlatTest::TestSelectRangeToExclusive >> TMiniKQLEngineFlatHostTest::ShardId [GOOD] >> TMiniKQLEngineFlatHostTest::Basic [GOOD] >> TMiniKQLEngineFlatTest::TestAbort [GOOD] >> TMiniKQLEngineFlatTest::TestCASBoth2Fail1 [GOOD] >> TMiniKQLEngineFlatTest::TestCASBoth2Fail2 [GOOD] >> TMiniKQLEngineFlatTest::TestCASBoth2Fail12 [GOOD] >> TMiniKQLEngineFlatTest::TestBug998 [GOOD] >> TMiniKQLEngineFlatTest::TestAcquireLocks [GOOD] >> TMiniKQLEngineFlatTest::NoMapPushdownMultipleConsumers >> TMiniKQLEngineFlatTest::TestEmptyProgram [GOOD] >> TMiniKQLEngineFlatTest::TestEraseRow [GOOD] >> TMiniKQLEngineFlatTest::TestEraseRowNullKey [GOOD] >> TMiniKQLEngineFlatTest::TestEraseRowManyShards [GOOD] >> TMiniKQLEngineFlatTest::TestCASBoth2Success [GOOD] >> TMiniKQLEngineFlatTest::TestEraseRowNoShards [GOOD] >> TMiniKQLEngineFlatTest::TestDiagnostics [GOOD] >> TMiniKQLEngineFlatTest::TestCombineByKeyPushdown [GOOD] >> TMiniKQLEngineFlatTest::TestCombineByKeyNoPushdown [GOOD] >> TMiniKQLEngineFlatTest::TestLengthPushdown [GOOD] >> TMiniKQLEngineFlatTest::TestInternalResult [GOOD] >> TMiniKQLEngineFlatTest::TestIndependentSelects >> TTxAllocatorClientTest::InitiatingRequest >> TTxAllocatorClientTest::ZeroRange >> TMiniKQLEngineFlatTest::TestSelectRangeToExclusive [GOOD] >> TMiniKQLEngineFlatTest::TestSelectRangeNoShards [GOOD] >> TMiniKQLEngineFlatTest::NoMapPushdownMultipleConsumers [GOOD] >> TMiniKQLEngineFlatTest::TestSelectRangeReverseWithPartitions [GOOD] >> TMiniKQLEngineFlatTest::TestSelectRangeReverseWithPartitionsTruncatedByItems1 [GOOD] >> TMiniKQLEngineFlatTest::TestSelectRangeReverseWithPartitionsTruncatedByItems2 [GOOD] >> TMiniKQLEngineFlatTest::TestSelectRangeReverseWithPartitionsTruncatedByItems3 [GOOD] >> TMiniKQLEngineFlatTest::TestSelectRangeNoColumns [GOOD] >> TMiniKQLEngineFlatTest::NoMapPushdownNonPureLambda [GOOD] >> TMiniKQLEngineFlatTest::NoOrderedMapPushdown [GOOD] >> TMiniKQLEngineFlatTest::NoMapPushdownWriteToTable [GOOD] >> TMiniKQLEngineFlatTest::NoMapPushdownArgClosure [GOOD] >> TMiniKQLEngineFlatTest::TestIndependentSelects [GOOD] >> TMiniKQLEngineFlatTest::TestCrossTableRs [GOOD] >> TTxAllocatorClientTest::Boot |61.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/tx_allocator_client/ut/unittest >> TPQTest::TestPartitionTotalQuota [GOOD] >> TPQTest::TestPartitionPerConsumerQuota >> TopicAutoscaling::PartitionSplit_ManySession_BeforeAutoscaleAwareSDK [FAIL] >> TopicAutoscaling::PartitionSplit_ManySession_AutoscaleAwareSDK >> TestDataErasure::DataErasureRun3CyclesForAllSupportedObjects [GOOD] >> TestDataErasure::DataErasureManualLaunch3Cycles [GOOD] >> TTxAllocatorClientTest::Boot [GOOD] >> TTxAllocatorClientTest::InitiatingRequest [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/engine/ut/unittest >> TMiniKQLEngineFlatTest::TestMultiRSPerDestination [GOOD] Test command err: PrepareShardPrograms (491): too many shard readsets (2 > 1), src tables: [200:301:0], dst tables: [200:301:0] Type { Kind: Struct } |61.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/engine/ut/unittest >> TMiniKQLEngineFlatTest::TestTopSortNonImmediatePushdown [GOOD] |61.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/engine/ut/unittest >> TMiniKQLEngineFlatTest::TestSelectRangeNoColumns [GOOD] >> TPQTest::TestAlreadyWritten [GOOD] >> TPQTest::PQ_Tablet_Removes_Blobs_Asynchronously |61.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/datashard/ut_object_storage_listing/unittest ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/engine/ut/unittest >> TMiniKQLEngineFlatTest::TestCrossTableRs [GOOD] Test command err: SetProgram (370): ydb/core/engine/mkql_engine_flat.cpp:183: ExtractResultType(): requirement !label.StartsWith(TxInternalResultPrefix) failed. Label can't be used in SetResult as it's reserved for internal purposes: __cantuse PrepareShardPrograms (491): too many shard readsets (1 > 0), src tables: [200:301:0], dst tables: [200:302:0] Type { Kind: Struct } |61.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/engine/ut/unittest >> TMiniKQLEngineFlatTest::NoMapPushdownArgClosure [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_data_erasure/unittest >> TestDataErasure::DataErasureRun3CyclesForAllSupportedObjects [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:116:2058] recipient: [1:111:2141] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:116:2058] recipient: [1:111:2141] Leader for TabletID 72057594046678944 is [1:128:2152] sender: [1:132:2058] recipient: [1:111:2141] 2025-05-29T15:22:31.664392Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7488: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-05-29T15:22:31.664413Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7516: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:22:31.664417Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7402: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-05-29T15:22:31.664421Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7418: OperationsProcessing config: using default configuration 2025-05-29T15:22:31.664431Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-05-29T15:22:31.664434Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-05-29T15:22:31.664440Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7548: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:22:31.664452Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:39: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-05-29T15:22:31.664537Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7619: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-05-29T15:22:31.664589Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-05-29T15:22:31.675626Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7309: Cannot subscribe to console configs 2025-05-29T15:22:31.675647Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:22:31.678650Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-05-29T15:22:31.678800Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-05-29T15:22:31.678855Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-05-29T15:22:31.680638Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-05-29T15:22:31.680805Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:445: Clear TempDirsState with owners number: 0 2025-05-29T15:22:31.680942Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1348: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-05-29T15:22:31.680993Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:32: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-05-29T15:22:31.681451Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:157: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:22:31.681503Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:84: [RootDataErasureManager] Stop 2025-05-29T15:22:31.681815Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:22:31.681826Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:22:31.681849Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-05-29T15:22:31.681857Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-29T15:22:31.681863Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:202: TTxServerlessStorageBilling.Complete 2025-05-29T15:22:31.681901Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6671: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-05-29T15:22:31.683228Z node 1 :HIVE INFO: tablet_helpers.cpp:1130: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:128:2152] sender: [1:242:2058] recipient: [1:15:2062] 2025-05-29T15:22:31.696377Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:372: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-05-29T15:22:31.696451Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:22:31.696510Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-05-29T15:22:31.696543Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:130: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-05-29T15:22:31.696550Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:22:31.697460Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:451: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-05-29T15:22:31.697488Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-05-29T15:22:31.697535Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:22:31.697556Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:313: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-05-29T15:22:31.697560Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:367: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-05-29T15:22:31.697564Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 2 -> 3 2025-05-29T15:22:31.697979Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:22:31.697988Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-05-29T15:22:31.697991Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 3 -> 128 2025-05-29T15:22:31.698298Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:22:31.698307Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:22:31.698311Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:22:31.698317Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1650: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-05-29T15:22:31.698758Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1719: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-05-29T15:22:31.699094Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:652: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-05-29T15:22:31.699136Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1751: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-05-29T15:22:31.699280Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:676: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-05-29T15:22:31.699298Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:680: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 124 RawX2: 4294969445 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-05-29T15:22:31.699303Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:22:31.699341Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 128 -> 240 2025-05-29T15:22:31.699346Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:22:31.699371Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-05-29T15:22:31.699378Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:402: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-05-29T15:22:31.699742Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:22:31.699748Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-29T15:22:31.699783Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29 ... dRequestToBSC: Generation# 3 2025-05-29T15:22:38.537275Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4877: StateWork, received event# 268637738, Sender [1:298:2280], Recipient [1:295:2278]: NKikimrBlobStorage.TEvControllerShredResponse CurrentGeneration: 3 Completed: false Progress10k: 5000 2025-05-29T15:22:38.537280Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5076: StateWork, processing event TEvBlobStorage::TEvControllerShredResponse 2025-05-29T15:22:38.537284Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:7798: Handle TEvControllerShredResponse, at schemeshard: 72057594046678944 2025-05-29T15:22:38.537313Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__root_data_erasure_manager.cpp:632: TTxCompleteDataErasureBSC Execute at schemeshard: 72057594046678944 2025-05-29T15:22:38.537324Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:648: TTxCompleteDataErasureBSC: Progress data shred in BSC 50% 2025-05-29T15:22:38.537338Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__root_data_erasure_manager.cpp:656: TTxCompleteDataErasureBSC Complete at schemeshard: 72057594046678944, NeedScheduleRequestToBSC# true 2025-05-29T15:22:38.537346Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:348: [RootDataErasureManager] ScheduleRequestToBSC: Interval# 1.000000s 2025-05-29T15:22:38.638923Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4877: StateWork, received event# 271125000, Sender [0:0:0], Recipient [1:461:2412]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-05-29T15:22:38.638957Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4885: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-05-29T15:22:38.638985Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4877: StateWork, received event# 271124999, Sender [1:461:2412], Recipient [1:461:2412]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2025-05-29T15:22:38.638990Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4884: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2025-05-29T15:22:38.639038Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4877: StateWork, received event# 271122945, Sender [1:804:2688], Recipient [1:461:2412]: NKikimrSchemeOp.TDescribePath PathId: 3 SchemeshardId: 72075186233409546 2025-05-29T15:22:38.639043Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4889: StateWork, processing event TEvSchemeShard::TEvDescribeScheme 2025-05-29T15:22:38.639074Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: PathId: 3 SchemeshardId: 72075186233409546, at schemeshard: 72075186233409546 2025-05-29T15:22:38.639145Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:44: Tablet 72075186233409546 describe pathId 3 took 54us result status StatusSuccess 2025-05-29T15:22:38.639292Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Database1/Topic1" PathDescription { Self { Name: "Topic1" PathId: 3 SchemeshardId: 72075186233409546 PathType: EPathTypePersQueueGroup CreateFinished: true CreateTxId: 104 CreateStep: 300 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 PQVersion: 1 } ChildrenExist: false BalancerTabletID: 72075186233409552 } PersQueueGroup { Name: "Topic1" PathId: 3 TotalGroupCount: 1 PartitionPerTablet: 1 PQTabletConfig { PartitionConfig { LifetimeSeconds: 10 } YdbDatabasePath: "/MyRoot/Database1" } Partitions { PartitionId: 0 TabletId: 72075186233409551 Status: Active } AlterVersion: 1 BalancerTabletID: 72075186233409552 NextPartitionId: 1 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 2 ProcessingParams { Version: 2 PlanResolution: 50 Coordinators: 72075186233409547 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409548 SchemeShard: 72075186233409546 } DomainKey { SchemeShard: 72057594046678944 PathId: 2 } PathsInside: 2 PathsLimit: 10000 ShardsInside: 7 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 2 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 1 PQPartitionsLimit: 1000000 } } PathId: 3 PathOwnerId: 72075186233409546, at schemeshard: 72075186233409546 2025-05-29T15:22:38.777831Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4877: StateWork, received event# 271125000, Sender [0:0:0], Recipient [1:966:2821]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-05-29T15:22:38.777869Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4885: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-05-29T15:22:38.777893Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4877: StateWork, received event# 271124999, Sender [1:966:2821], Recipient [1:966:2821]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2025-05-29T15:22:38.777898Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4884: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2025-05-29T15:22:38.777953Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4877: StateWork, received event# 271122945, Sender [1:1380:3165], Recipient [1:966:2821]: NKikimrSchemeOp.TDescribePath PathId: 3 SchemeshardId: 72075186233409553 2025-05-29T15:22:38.777959Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4889: StateWork, processing event TEvSchemeShard::TEvDescribeScheme 2025-05-29T15:22:38.777991Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: PathId: 3 SchemeshardId: 72075186233409553, at schemeshard: 72075186233409553 2025-05-29T15:22:38.778059Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:44: Tablet 72075186233409553 describe pathId 3 took 52us result status StatusSuccess 2025-05-29T15:22:38.778212Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Database2/Topic1" PathDescription { Self { Name: "Topic1" PathId: 3 SchemeshardId: 72075186233409553 PathType: EPathTypePersQueueGroup CreateFinished: true CreateTxId: 108 CreateStep: 350 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 PQVersion: 1 } ChildrenExist: false BalancerTabletID: 72075186233409559 } PersQueueGroup { Name: "Topic1" PathId: 3 TotalGroupCount: 1 PartitionPerTablet: 1 PQTabletConfig { PartitionConfig { LifetimeSeconds: 10 } YdbDatabasePath: "/MyRoot/Database2" } Partitions { PartitionId: 0 TabletId: 72075186233409558 Status: Active } AlterVersion: 1 BalancerTabletID: 72075186233409559 NextPartitionId: 1 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 3 ProcessingParams { Version: 2 PlanResolution: 50 Coordinators: 72075186233409554 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409555 SchemeShard: 72075186233409553 } DomainKey { SchemeShard: 72057594046678944 PathId: 3 } PathsInside: 2 PathsLimit: 10000 ShardsInside: 7 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 3 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 1 PQPartitionsLimit: 1000000 } } PathId: 3 PathOwnerId: 72075186233409553, at schemeshard: 72075186233409553 2025-05-29T15:22:39.173538Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4877: StateWork, received event# 271125000, Sender [0:0:0], Recipient [1:295:2278]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-05-29T15:22:39.173588Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4885: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-05-29T15:22:39.173606Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4877: StateWork, received event# 271124999, Sender [1:295:2278], Recipient [1:295:2278]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2025-05-29T15:22:39.173612Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4884: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2025-05-29T15:22:39.215032Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4877: StateWork, received event# 271125517, Sender [0:0:0], Recipient [1:295:2278]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToRunDataErasureBSC 2025-05-29T15:22:39.215074Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5077: StateWork, processing event TEvSchemeShard::TEvWakeupToRunDataErasureBSC 2025-05-29T15:22:39.215084Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:354: [RootDataErasureManager] SendRequestToBSC: Generation# 3 2025-05-29T15:22:39.215198Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4877: StateWork, received event# 268637738, Sender [1:298:2280], Recipient [1:295:2278]: NKikimrBlobStorage.TEvControllerShredResponse CurrentGeneration: 3 Completed: true Progress10k: 10000 2025-05-29T15:22:39.215210Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5076: StateWork, processing event TEvBlobStorage::TEvControllerShredResponse 2025-05-29T15:22:39.215214Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:7798: Handle TEvControllerShredResponse, at schemeshard: 72057594046678944 2025-05-29T15:22:39.215247Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__root_data_erasure_manager.cpp:632: TTxCompleteDataErasureBSC Execute at schemeshard: 72057594046678944 2025-05-29T15:22:39.215254Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:644: TTxCompleteDataErasureBSC: Data shred in BSC is completed 2025-05-29T15:22:39.215266Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:170: [RootDataErasureManager] ScheduleDataErasureWakeup: Interval# 0.922000s, Timestamp# 1970-01-01T00:00:11.124000Z 2025-05-29T15:22:39.215273Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:378: [RootDataErasureManager] Complete: Generation# 3, duration# 2 s 2025-05-29T15:22:39.220083Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__root_data_erasure_manager.cpp:656: TTxCompleteDataErasureBSC Complete at schemeshard: 72057594046678944, NeedScheduleRequestToBSC# false 2025-05-29T15:22:39.220312Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4877: StateWork, received event# 269877761, Sender [1:4098:5374], Recipient [1:295:2278]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-05-29T15:22:39.220323Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4974: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-05-29T15:22:39.220328Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5753: Pipe server connected, at tablet: 72057594046678944 2025-05-29T15:22:39.220361Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4877: StateWork, received event# 271125519, Sender [1:277:2267], Recipient [1:295:2278]: NKikimrScheme.TEvDataErasureInfoRequest 2025-05-29T15:22:39.220368Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5074: StateWork, processing event TEvSchemeShard::TEvDataErasureInfoRequest 2025-05-29T15:22:39.220373Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:7753: Handle TEvDataErasureInfoRequest, at schemeshard: 72057594046678944 |61.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/datashard/ut_object_storage_listing/unittest ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/tx_allocator_client/ut/unittest >> TTxAllocatorClientTest::InitiatingRequest [GOOD] Test command err: 2025-05-29T15:22:39.203862Z node 1 :TABLET_MAIN DEBUG: tablet_sys.cpp:1925: Tablet: 72057594046447617 LockedInitializationPath Marker# TSYS32 2025-05-29T15:22:39.203992Z node 1 :TABLET_MAIN DEBUG: tablet_sys.cpp:911: Tablet: 72057594046447617 HandleFindLatestLogEntry, NODATA Promote Marker# TSYS19 2025-05-29T15:22:39.204126Z node 1 :TABLET_MAIN DEBUG: tablet_sys.cpp:225: Tablet: 72057594046447617 TTablet::WriteZeroEntry. logid# [72057594046447617:2:0:0:0:0:0] Marker# TSYS01 2025-05-29T15:22:39.204595Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:54: Put Result: TEvPutResult {Id# [72057594046447617:2:0:0:0:20:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-05-29T15:22:39.204713Z node 1 :TX_ALLOCATOR DEBUG: txallocator_impl.cpp:17: tablet# 72057594046447617 OnActivateExecutor 2025-05-29T15:22:39.207338Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:54: Put Result: TEvPutResult {Id# [72057594046447617:2:1:1:28672:35:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-05-29T15:22:39.207402Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:54: Put Result: TEvPutResult {Id# [72057594046447617:2:1:0:0:42:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-05-29T15:22:39.207430Z node 1 :TABLET_MAIN DEBUG: tablet_sys.cpp:1396: Tablet: 72057594046447617 GcCollect 0 channel, tablet:gen:step => 2:0 Marker# TSYS28 2025-05-29T15:22:39.207466Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:54: Put Result: TEvPutResult {Id# [72057594046447617:2:2:1:8192:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-05-29T15:22:39.207485Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:54: Put Result: TEvPutResult {Id# [72057594046447617:2:2:0:0:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-05-29T15:22:39.207512Z node 1 :TX_ALLOCATOR DEBUG: txallocator__scheme.cpp:22: tablet# 72057594046447617 TTxSchema Complete 2025-05-29T15:22:39.207546Z node 1 :TABLET_MAIN INFO: tablet_sys.cpp:1009: Tablet: 72057594046447617 Active! Generation: 2, Type: TxAllocator started in 0msec Marker# TSYS24 2025-05-29T15:22:39.207702Z node 1 :TX_ALLOCATOR DEBUG: txallocator_impl.cpp:60: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:71:2105] requested range size#5000 2025-05-29T15:22:39.207801Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:54: Put Result: TEvPutResult {Id# [72057594046447617:2:3:1:24576:70:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-05-29T15:22:39.207809Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:54: Put Result: TEvPutResult {Id# [72057594046447617:2:3:0:0:69:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-05-29T15:22:39.207822Z node 1 :TX_ALLOCATOR DEBUG: txallocator__reserve.cpp:56: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 0 Reserved to# 5000 2025-05-29T15:22:39.207828Z node 1 :TX_ALLOCATOR DEBUG: txallocator_impl.cpp:70: tablet# 72057594046447617 Send to Sender# [1:71:2105] TEvAllocateResult from# 0 to# 5000 >> TPQTest::PQ_Tablet_Removes_Blobs_Asynchronously [GOOD] >> TPQTest::PQ_Tablet_Does_Not_Remove_The_Blob_Until_The_Reading_Is_Complete ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_data_erasure/unittest >> TestDataErasure::DataErasureManualLaunch3Cycles [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:116:2058] recipient: [1:111:2141] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:116:2058] recipient: [1:111:2141] Leader for TabletID 72057594046678944 is [1:128:2152] sender: [1:132:2058] recipient: [1:111:2141] 2025-05-29T15:22:31.984200Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7488: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-05-29T15:22:31.984230Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7516: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:22:31.984237Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7402: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-05-29T15:22:31.984245Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7418: OperationsProcessing config: using default configuration 2025-05-29T15:22:31.984263Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-05-29T15:22:31.984268Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-05-29T15:22:31.984277Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7548: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:22:31.984291Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:39: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-05-29T15:22:31.984406Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7619: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-05-29T15:22:31.984480Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-05-29T15:22:31.997949Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7309: Cannot subscribe to console configs 2025-05-29T15:22:31.997973Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:22:32.000526Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-05-29T15:22:32.000657Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-05-29T15:22:32.000717Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-05-29T15:22:32.002839Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-05-29T15:22:32.003034Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:445: Clear TempDirsState with owners number: 0 2025-05-29T15:22:32.003183Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1348: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-05-29T15:22:32.003239Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:32: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-05-29T15:22:32.003760Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:157: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:22:32.003802Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:84: [RootDataErasureManager] Stop 2025-05-29T15:22:32.004024Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:22:32.004039Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:22:32.004057Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-05-29T15:22:32.004062Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-29T15:22:32.004067Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:202: TTxServerlessStorageBilling.Complete 2025-05-29T15:22:32.004095Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6671: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-05-29T15:22:32.006090Z node 1 :HIVE INFO: tablet_helpers.cpp:1130: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:128:2152] sender: [1:242:2058] recipient: [1:15:2062] 2025-05-29T15:22:32.029710Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:372: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-05-29T15:22:32.029790Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:22:32.029851Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-05-29T15:22:32.029895Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:130: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-05-29T15:22:32.029906Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:22:32.031264Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:451: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-05-29T15:22:32.031297Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-05-29T15:22:32.031347Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:22:32.031359Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:313: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-05-29T15:22:32.031366Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:367: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-05-29T15:22:32.031372Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 2 -> 3 2025-05-29T15:22:32.031999Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:22:32.032014Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-05-29T15:22:32.032021Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 3 -> 128 2025-05-29T15:22:32.032641Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:22:32.032654Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:22:32.032662Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:22:32.032670Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1650: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-05-29T15:22:32.033451Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1719: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-05-29T15:22:32.033944Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:652: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-05-29T15:22:32.033994Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1751: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-05-29T15:22:32.034172Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:676: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-05-29T15:22:32.034203Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:680: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 124 RawX2: 4294969445 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-05-29T15:22:32.034211Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:22:32.034270Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 128 -> 240 2025-05-29T15:22:32.034278Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:22:32.034317Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-05-29T15:22:32.034330Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:402: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-05-29T15:22:32.034793Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:22:32.034803Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-29T15:22:32.034846Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29 ... cpp:5077: StateWork, processing event TEvSchemeShard::TEvWakeupToRunDataErasureBSC 2025-05-29T15:22:38.583383Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:354: [RootDataErasureManager] SendRequestToBSC: Generation# 3 2025-05-29T15:22:38.583493Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4877: StateWork, received event# 268637738, Sender [2:296:2278], Recipient [2:293:2276]: NKikimrBlobStorage.TEvControllerShredResponse CurrentGeneration: 3 Completed: false Progress10k: 5000 2025-05-29T15:22:38.583501Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5076: StateWork, processing event TEvBlobStorage::TEvControllerShredResponse 2025-05-29T15:22:38.583505Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:7798: Handle TEvControllerShredResponse, at schemeshard: 72057594046678944 2025-05-29T15:22:38.583529Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__root_data_erasure_manager.cpp:632: TTxCompleteDataErasureBSC Execute at schemeshard: 72057594046678944 2025-05-29T15:22:38.583545Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:648: TTxCompleteDataErasureBSC: Progress data shred in BSC 50% 2025-05-29T15:22:38.583559Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__root_data_erasure_manager.cpp:656: TTxCompleteDataErasureBSC Complete at schemeshard: 72057594046678944, NeedScheduleRequestToBSC# true 2025-05-29T15:22:38.583571Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:348: [RootDataErasureManager] ScheduleRequestToBSC: Interval# 1.000000s 2025-05-29T15:22:38.948475Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4877: StateWork, received event# 271125000, Sender [0:0:0], Recipient [2:460:2411]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-05-29T15:22:38.948506Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4885: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-05-29T15:22:38.948525Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4877: StateWork, received event# 271124999, Sender [2:460:2411], Recipient [2:460:2411]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2025-05-29T15:22:38.948531Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4884: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2025-05-29T15:22:38.948592Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4877: StateWork, received event# 271122945, Sender [2:803:2688], Recipient [2:460:2411]: NKikimrSchemeOp.TDescribePath PathId: 3 SchemeshardId: 72075186233409546 2025-05-29T15:22:38.948599Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4889: StateWork, processing event TEvSchemeShard::TEvDescribeScheme 2025-05-29T15:22:38.948630Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: PathId: 3 SchemeshardId: 72075186233409546, at schemeshard: 72075186233409546 2025-05-29T15:22:38.948692Z node 2 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:44: Tablet 72075186233409546 describe pathId 3 took 47us result status StatusSuccess 2025-05-29T15:22:38.948838Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Database1/Topic1" PathDescription { Self { Name: "Topic1" PathId: 3 SchemeshardId: 72075186233409546 PathType: EPathTypePersQueueGroup CreateFinished: true CreateTxId: 104 CreateStep: 300 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 PQVersion: 1 } ChildrenExist: false BalancerTabletID: 72075186233409552 } PersQueueGroup { Name: "Topic1" PathId: 3 TotalGroupCount: 1 PartitionPerTablet: 1 PQTabletConfig { PartitionConfig { LifetimeSeconds: 10 } YdbDatabasePath: "/MyRoot/Database1" } Partitions { PartitionId: 0 TabletId: 72075186233409551 Status: Active } AlterVersion: 1 BalancerTabletID: 72075186233409552 NextPartitionId: 1 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 2 ProcessingParams { Version: 2 PlanResolution: 50 Coordinators: 72075186233409547 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409548 SchemeShard: 72075186233409546 } DomainKey { SchemeShard: 72057594046678944 PathId: 2 } PathsInside: 2 PathsLimit: 10000 ShardsInside: 7 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 2 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 1 PQPartitionsLimit: 1000000 } } PathId: 3 PathOwnerId: 72075186233409546, at schemeshard: 72075186233409546 2025-05-29T15:22:39.020908Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4877: StateWork, received event# 271125000, Sender [0:0:0], Recipient [2:958:2814]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-05-29T15:22:39.020934Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4885: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-05-29T15:22:39.020953Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4877: StateWork, received event# 271124999, Sender [2:958:2814], Recipient [2:958:2814]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2025-05-29T15:22:39.020957Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4884: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2025-05-29T15:22:39.021005Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4877: StateWork, received event# 271122945, Sender [2:1371:3155], Recipient [2:958:2814]: NKikimrSchemeOp.TDescribePath PathId: 3 SchemeshardId: 72075186233409553 2025-05-29T15:22:39.021011Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4889: StateWork, processing event TEvSchemeShard::TEvDescribeScheme 2025-05-29T15:22:39.021038Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: PathId: 3 SchemeshardId: 72075186233409553, at schemeshard: 72075186233409553 2025-05-29T15:22:39.021095Z node 2 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:44: Tablet 72075186233409553 describe pathId 3 took 44us result status StatusSuccess 2025-05-29T15:22:39.021211Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Database2/Topic1" PathDescription { Self { Name: "Topic1" PathId: 3 SchemeshardId: 72075186233409553 PathType: EPathTypePersQueueGroup CreateFinished: true CreateTxId: 108 CreateStep: 400 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 PQVersion: 1 } ChildrenExist: false BalancerTabletID: 72075186233409559 } PersQueueGroup { Name: "Topic1" PathId: 3 TotalGroupCount: 1 PartitionPerTablet: 1 PQTabletConfig { PartitionConfig { LifetimeSeconds: 10 } YdbDatabasePath: "/MyRoot/Database2" } Partitions { PartitionId: 0 TabletId: 72075186233409558 Status: Active } AlterVersion: 1 BalancerTabletID: 72075186233409559 NextPartitionId: 1 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 3 ProcessingParams { Version: 2 PlanResolution: 50 Coordinators: 72075186233409554 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409555 SchemeShard: 72075186233409553 } DomainKey { SchemeShard: 72057594046678944 PathId: 3 } PathsInside: 2 PathsLimit: 10000 ShardsInside: 7 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 3 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 1 PQPartitionsLimit: 1000000 } } PathId: 3 PathOwnerId: 72075186233409553, at schemeshard: 72075186233409553 2025-05-29T15:22:39.092735Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4877: StateWork, received event# 271125000, Sender [0:0:0], Recipient [2:293:2276]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-05-29T15:22:39.092772Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4885: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-05-29T15:22:39.092791Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4877: StateWork, received event# 271124999, Sender [2:293:2276], Recipient [2:293:2276]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2025-05-29T15:22:39.092795Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4884: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2025-05-29T15:22:39.170844Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4877: StateWork, received event# 271125517, Sender [0:0:0], Recipient [2:293:2276]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToRunDataErasureBSC 2025-05-29T15:22:39.170884Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5077: StateWork, processing event TEvSchemeShard::TEvWakeupToRunDataErasureBSC 2025-05-29T15:22:39.170892Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:354: [RootDataErasureManager] SendRequestToBSC: Generation# 3 2025-05-29T15:22:39.170996Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4877: StateWork, received event# 268637738, Sender [2:296:2278], Recipient [2:293:2276]: NKikimrBlobStorage.TEvControllerShredResponse CurrentGeneration: 3 Completed: true Progress10k: 10000 2025-05-29T15:22:39.171003Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5076: StateWork, processing event TEvBlobStorage::TEvControllerShredResponse 2025-05-29T15:22:39.171007Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:7798: Handle TEvControllerShredResponse, at schemeshard: 72057594046678944 2025-05-29T15:22:39.171033Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__root_data_erasure_manager.cpp:632: TTxCompleteDataErasureBSC Execute at schemeshard: 72057594046678944 2025-05-29T15:22:39.171038Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:644: TTxCompleteDataErasureBSC: Data shred in BSC is completed 2025-05-29T15:22:39.171044Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:378: [RootDataErasureManager] Complete: Generation# 3, duration# 2 s 2025-05-29T15:22:39.172038Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__root_data_erasure_manager.cpp:656: TTxCompleteDataErasureBSC Complete at schemeshard: 72057594046678944, NeedScheduleRequestToBSC# false 2025-05-29T15:22:39.172227Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4877: StateWork, received event# 269877761, Sender [2:4046:5321], Recipient [2:293:2276]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-05-29T15:22:39.172238Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4974: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-05-29T15:22:39.172244Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5753: Pipe server connected, at tablet: 72057594046678944 2025-05-29T15:22:39.172264Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4877: StateWork, received event# 271125519, Sender [2:3208:4653], Recipient [2:293:2276]: NKikimrScheme.TEvDataErasureInfoRequest 2025-05-29T15:22:39.172271Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5074: StateWork, processing event TEvSchemeShard::TEvDataErasureInfoRequest 2025-05-29T15:22:39.172276Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:7753: Handle TEvDataErasureInfoRequest, at schemeshard: 72057594046678944 |61.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/datashard/ut_object_storage_listing/unittest |61.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/datashard/ut_object_storage_listing/unittest >> ObjectStorageListingTest::FilterListing >> TopicAutoscaling::ReadingAfterSplitTest_PreferedPartition_PQv1 [FAIL] >> TopicAutoscaling::WithDir_PartitionSplit_AutosplitByLoad ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/tx_allocator_client/ut/unittest >> TTxAllocatorClientTest::Boot [GOOD] Test command err: 2025-05-29T15:22:39.233082Z node 1 :TABLET_MAIN DEBUG: tablet_sys.cpp:1925: Tablet: 72057594046447617 LockedInitializationPath Marker# TSYS32 2025-05-29T15:22:39.233216Z node 1 :TABLET_MAIN DEBUG: tablet_sys.cpp:911: Tablet: 72057594046447617 HandleFindLatestLogEntry, NODATA Promote Marker# TSYS19 2025-05-29T15:22:39.233312Z node 1 :TABLET_MAIN DEBUG: tablet_sys.cpp:225: Tablet: 72057594046447617 TTablet::WriteZeroEntry. logid# [72057594046447617:2:0:0:0:0:0] Marker# TSYS01 2025-05-29T15:22:39.233741Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:54: Put Result: TEvPutResult {Id# [72057594046447617:2:0:0:0:20:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-05-29T15:22:39.233830Z node 1 :TX_ALLOCATOR DEBUG: txallocator_impl.cpp:17: tablet# 72057594046447617 OnActivateExecutor 2025-05-29T15:22:39.235574Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:54: Put Result: TEvPutResult {Id# [72057594046447617:2:1:1:28672:35:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-05-29T15:22:39.235614Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:54: Put Result: TEvPutResult {Id# [72057594046447617:2:1:0:0:42:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-05-29T15:22:39.235632Z node 1 :TABLET_MAIN DEBUG: tablet_sys.cpp:1396: Tablet: 72057594046447617 GcCollect 0 channel, tablet:gen:step => 2:0 Marker# TSYS28 2025-05-29T15:22:39.235655Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:54: Put Result: TEvPutResult {Id# [72057594046447617:2:2:1:8192:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-05-29T15:22:39.235667Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:54: Put Result: TEvPutResult {Id# [72057594046447617:2:2:0:0:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-05-29T15:22:39.235686Z node 1 :TX_ALLOCATOR DEBUG: txallocator__scheme.cpp:22: tablet# 72057594046447617 TTxSchema Complete 2025-05-29T15:22:39.235708Z node 1 :TABLET_MAIN INFO: tablet_sys.cpp:1009: Tablet: 72057594046447617 Active! Generation: 2, Type: TxAllocator started in 0msec Marker# TSYS24 |61.0%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/schemeshard/ut_replication/ydb-core-tx-schemeshard-ut_replication |61.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_replication/ydb-core-tx-schemeshard-ut_replication |61.1%| [TA] {RESULT} $(B)/ydb/library/table_creator/ut/test-results/unittest/{meta.json ... results_accumulator.log} |61.1%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_replication/ydb-core-tx-schemeshard-ut_replication |61.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/datashard/ut_object_storage_listing/unittest |61.1%| [TA] $(B)/ydb/core/tx/schemeshard/ut_move/test-results/unittest/{meta.json ... results_accumulator.log} |61.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/datashard/ut_object_storage_listing/unittest >> TopicAutoscaling::PartitionSplit_ManySession_AutoscaleAwareSDK [FAIL] >> TopicAutoscaling::PartitionSplit_AutosplitByLoad |61.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/datashard/ut_object_storage_listing/unittest |61.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/datashard/ut_object_storage_listing/unittest |61.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/datashard/ut_object_storage_listing/unittest |61.1%| [TA] $(B)/ydb/core/engine/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> ObjectStorageListingTest::ListingNoFilter |61.1%| [TA] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_move/test-results/unittest/{meta.json ... results_accumulator.log} |61.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/datashard/ut_object_storage_listing/unittest |61.1%| [TA] {RESULT} $(B)/ydb/core/engine/ut/test-results/unittest/{meta.json ... results_accumulator.log} |61.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/datashard/ut_object_storage_listing/unittest |61.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/datashard/ut_object_storage_listing/unittest |61.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/datashard/ut_object_storage_listing/unittest |61.2%| [TA] $(B)/ydb/core/tx/schemeshard/ut_data_erasure/test-results/unittest/{meta.json ... results_accumulator.log} >> TTxAllocatorClientTest::AllocateOverTheEdge >> TopicAutoscaling::WithDir_PartitionSplit_AutosplitByLoad [FAIL] >> WithSDK::DescribeConsumer |61.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/tx_allocator_client/ut/unittest |61.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/tx_allocator_client/ut/unittest |61.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/tx_allocator_client/ut/unittest |61.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/tx_allocator_client/ut/unittest >> TTxAllocatorClientTest::AllocateOverTheEdge [GOOD] >> TPQTest::PQ_Tablet_Does_Not_Remove_The_Blob_Until_The_Reading_Is_Complete [GOOD] >> TopicAutoscaling::PartitionMerge_PreferedPartition_BeforeAutoscaleAwareSDK >> CommitOffset::Commit_WithoutSession_TopPast |61.2%| [TA] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_data_erasure/test-results/unittest/{meta.json ... results_accumulator.log} |61.2%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/schemeshard/ut_topic_splitmerge/ydb-core-tx-schemeshard-ut_topic_splitmerge >> TopicAutoscaling::ControlPlane_BackCompatibility >> TopicAutoscaling::PartitionSplit_ReadEmptyPartitions_BeforeAutoscaleAwareSDK >> TopicAutoscaling::PartitionSplit_PQv1 |61.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_topic_splitmerge/ydb-core-tx-schemeshard-ut_topic_splitmerge |61.2%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_topic_splitmerge/ydb-core-tx-schemeshard-ut_topic_splitmerge |61.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/tx_allocator_client/ut/unittest >> TopicAutoscaling::PartitionSplit_AutosplitByLoad [FAIL] >> TopicAutoscaling::PartitionSplit_AutosplitByLoad_AfterAlter >> ObjectStorageListingTest::FilterListing [FAIL] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/persqueue/ut/unittest >> TPQTest::PQ_Tablet_Does_Not_Remove_The_Blob_Until_The_Reading_Is_Complete [GOOD] Test command err: Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:104:2057] recipient: [1:102:2135] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:104:2057] recipient: [1:102:2135] Leader for TabletID 72057594037927937 is [1:108:2139] sender: [1:109:2057] recipient: [1:102:2135] 2025-05-29T15:22:13.437065Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:3098: [PQ: 72057594037927937] Handle TEvInterconnect::TEvNodeInfo 2025-05-29T15:22:13.438151Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:3130: [PQ: 72057594037927937] Transactions request. From tx_00000000000000000000, To tx_18446744073709551615 2025-05-29T15:22:13.438222Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:751: [PQ: 72057594037927937] doesn't have tx info 2025-05-29T15:22:13.438231Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:763: [PQ: 72057594037927937] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2025-05-29T15:22:13.438236Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:975: [PQ: 72057594037927937] no config, start with empty partitions and default config 2025-05-29T15:22:13.438243Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:4889: [PQ: 72057594037927937] Txs.size=0, PlannedTxs.size=0 2025-05-29T15:22:13.438251Z node 1 :PERSQUEUE NOTICE: pq_impl.cpp:1099: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-05-29T15:22:13.438261Z node 1 :PERSQUEUE INFO: pq_impl.cpp:800: [PQ: 72057594037927937] doesn't have tx writes info Leader for TabletID 72057594037927938 is [0:0:0] sender: [1:150:2057] recipient: [1:148:2170] IGNORE Leader for TabletID 72057594037927938 is [0:0:0] sender: [1:150:2057] recipient: [1:148:2170] Leader for TabletID 72057594037927938 is [1:154:2174] sender: [1:155:2057] recipient: [1:148:2170] Leader for TabletID 72057594037927937 is [1:108:2139] sender: [1:180:2057] recipient: [1:14:2061] 2025-05-29T15:22:13.441984Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:2882: [PQ: 72057594037927937] server connected, pipe [1:179:2193], now have 1 active actors on pipe 2025-05-29T15:22:13.442008Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:1460: [PQ: 72057594037927937] Handle TEvPersQueue::TEvUpdateConfig 2025-05-29T15:22:13.444910Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:1646: [PQ: 72057594037927937] Config update version 1(current 0) received from actor [1:178:2192] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 ImportantClientId: "user1" LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 WriteSpeedInBytesPerSecond: 102400 BurstSize: 102400 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "rt3.dc1--asdfgs--topic" Version: 1 LocalDC: true Topic: "topic" TopicPath: "/Root/PQ/rt3.dc1--asdfgs--topic" Partitions { PartitionId: 0 } ReadRuleGenerations: 1 ReadRuleGenerations: 1 MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 1 Important: false } Consumers { Name: "user1" Generation: 1 Important: true } 2025-05-29T15:22:13.445623Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:590: [PQ: 72057594037927937] Apply new config CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 ImportantClientId: "user1" LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 WriteSpeedInBytesPerSecond: 102400 BurstSize: 102400 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "rt3.dc1--asdfgs--topic" Version: 1 LocalDC: true Topic: "topic" TopicPath: "/Root/PQ/rt3.dc1--asdfgs--topic" Partitions { PartitionId: 0 } ReadRuleGenerations: 1 ReadRuleGenerations: 1 MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 1 Important: false } Consumers { Name: "user1" Generation: 1 Important: true } 2025-05-29T15:22:13.445651Z node 1 :PERSQUEUE NOTICE: pq_impl.cpp:1099: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-05-29T15:22:13.445795Z node 1 :PERSQUEUE INFO: pq_impl.cpp:1487: [PQ: 72057594037927937] Config applied version 1 actor [1:178:2192] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 ImportantClientId: "user1" LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 WriteSpeedInBytesPerSecond: 102400 BurstSize: 102400 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "rt3.dc1--asdfgs--topic" Version: 1 LocalDC: true Topic: "topic" TopicPath: "/Root/PQ/rt3.dc1--asdfgs--topic" Partitions { PartitionId: 0 } ReadRuleGenerations: 1 ReadRuleGenerations: 1 MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 1 Important: false } Consumers { Name: "user1" Generation: 1 Important: true } 2025-05-29T15:22:13.445832Z node 1 :PERSQUEUE DEBUG: partition_init.cpp:75: [rt3.dc1--asdfgs--topic:0:Initializer] Start initializing step TInitConfigStep 2025-05-29T15:22:13.445904Z node 1 :PERSQUEUE DEBUG: partition_init.cpp:75: [rt3.dc1--asdfgs--topic:0:Initializer] Start initializing step TInitInternalFieldsStep 2025-05-29T15:22:13.445967Z node 1 :PERSQUEUE INFO: partition_init.cpp:880: [PQ: 72057594037927937, Partition: 0, State: StateInit] bootstrapping 0 [1:186:2198] 2025-05-29T15:22:13.446654Z node 1 :PERSQUEUE DEBUG: partition_init.cpp:55: [rt3.dc1--asdfgs--topic:0:Initializer] Initializing completed. 2025-05-29T15:22:13.446662Z node 1 :PERSQUEUE INFO: partition.cpp:560: [PQ: 72057594037927937, Partition: 0, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 0 generation 2 [1:186:2198] 2025-05-29T15:22:13.446673Z node 1 :PERSQUEUE DEBUG: partition.cpp:574: [PQ: 72057594037927937, Partition: 0, State: StateInit] SYNC INIT topic rt3.dc1--asdfgs--topic partitition 0 so 0 endOffset 0 Head Offset 0 PartNo 0 PackedSize 0 count 0 nextOffset 0 batches 0 2025-05-29T15:22:13.447282Z node 1 :PERSQUEUE DEBUG: partition.cpp:3850: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Process pending events. Count 0 2025-05-29T15:22:13.447301Z node 1 :PERSQUEUE DEBUG: partition.cpp:3155: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'rt3.dc1--asdfgs--topic' partition 0 user user reinit request with generation 1 2025-05-29T15:22:13.447307Z node 1 :PERSQUEUE DEBUG: partition.cpp:3224: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'rt3.dc1--asdfgs--topic' partition 0 user user reinit with generation 1 done 2025-05-29T15:22:13.447314Z node 1 :PERSQUEUE DEBUG: partition.cpp:3155: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'rt3.dc1--asdfgs--topic' partition 0 user user1 reinit request with generation 1 2025-05-29T15:22:13.447318Z node 1 :PERSQUEUE DEBUG: partition.cpp:3224: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'rt3.dc1--asdfgs--topic' partition 0 user user1 reinit with generation 1 done 2025-05-29T15:22:13.447348Z node 1 :PERSQUEUE DEBUG: partition.cpp:2185: [PQ: 72057594037927937, Partition: 0, State: StateIdle] === DumpKeyValueRequest === 2025-05-29T15:22:13.447353Z node 1 :PERSQUEUE DEBUG: partition.cpp:2186: [PQ: 72057594037927937, Partition: 0, State: StateIdle] --- delete ---------------- 2025-05-29T15:22:13.447357Z node 1 :PERSQUEUE DEBUG: partition.cpp:2194: [PQ: 72057594037927937, Partition: 0, State: StateIdle] --- write ----------------- 2025-05-29T15:22:13.447362Z node 1 :PERSQUEUE DEBUG: partition.cpp:2197: [PQ: 72057594037927937, Partition: 0, State: StateIdle] i0000000000 2025-05-29T15:22:13.447366Z node 1 :PERSQUEUE DEBUG: partition.cpp:2197: [PQ: 72057594037927937, Partition: 0, State: StateIdle] m0000000000cuser 2025-05-29T15:22:13.447370Z node 1 :PERSQUEUE DEBUG: partition.cpp:2197: [PQ: 72057594037927937, Partition: 0, State: StateIdle] m0000000000uuser 2025-05-29T15:22:13.447374Z node 1 :PERSQUEUE DEBUG: partition.cpp:2197: [PQ: 72057594037927937, Partition: 0, State: StateIdle] m0000000000cuser1 2025-05-29T15:22:13.447377Z node 1 :PERSQUEUE DEBUG: partition.cpp:2197: [PQ: 72057594037927937, Partition: 0, State: StateIdle] m0000000000uuser1 2025-05-29T15:22:13.447384Z node 1 :PERSQUEUE DEBUG: partition.cpp:2199: [PQ: 72057594037927937, Partition: 0, State: StateIdle] --- rename ---------------- 2025-05-29T15:22:13.447388Z node 1 :PERSQUEUE DEBUG: partition.cpp:2204: [PQ: 72057594037927937, Partition: 0, State: StateIdle] =========================== 2025-05-29T15:22:13.447412Z node 1 :PERSQUEUE DEBUG: partition_read.cpp:779: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'rt3.dc1--asdfgs--topic' partition 0 user user readTimeStamp for offset 0 initiated queuesize 0 startOffset 0 ReadingTimestamp 0 rrg 0 2025-05-29T15:22:13.447418Z node 1 :PERSQUEUE DEBUG: partition_read.cpp:779: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'rt3.dc1--asdfgs--topic' partition 0 user user1 readTimeStamp for offset 0 initiated queuesize 0 startOffset 0 ReadingTimestamp 0 rrg 0 2025-05-29T15:22:13.447452Z node 1 :PERSQUEUE DEBUG: read.h:262: CacheProxy. Passthrough write request to KV 2025-05-29T15:22:13.448097Z node 1 :PERSQUEUE DEBUG: partition_write.cpp:524: [PQ: 72057594037927937, Partition: 0, State: StateIdle] TPartition::HandleWriteResponse writeNewSize# 0 WriteNewSizeFromSupportivePartitions# 0 2025-05-29T15:22:13.448197Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:2882: [PQ: 72057594037927937] server connected, pipe [1:193:2203], now have 1 active actors on pipe 2025-05-29T15:22:13.449717Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:2882: [PQ: 72057594037927937] server connected, pipe [1:196:2205], now have 1 active actors on pipe 2025-05-29T15:22:13.449743Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:347: Handle TEvRequest topic: 'rt3.dc1--asdfgs--topic' requestId: 2025-05-29T15:22:13.449750Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:2796: [PQ: 72057594037927937] got client message batch for topic 'rt3.dc1--asdfgs--topic' partition 0 2025-05-29T15:22:13.449871Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:2165: [PQ: 72057594037927937] got client PART message topic: rt3.dc1--asdfgs--topic partition: 0 SourceId: 'sourceid2' SeqNo: 1 partNo : 0 messageNo: 0 size: 511957 2025-05-29T15:22:13.449929Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:2165: [PQ: 72057594037927937] got client PART message topic: rt3.dc1--asdfgs--topic partition: 0 SourceId: 'sourceid2' SeqNo: 1 partNo : 1 messageNo: 0 size: 511957 2025-05-29T15:22:13.449985Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:2165: [PQ: 72057594037927937] got client PART message topic: rt3.dc1--asdfgs--topic partition: 0 SourceId: 'sourceid2' SeqNo: 1 partNo : 2 messageNo: 0 size: 511957 2025-05-29T15:22:13.450043Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:2165: [PQ: 72057594037927937] got client PART message topic: rt3.dc1--asdfgs--topic partition: 0 SourceId: 'sourceid2' SeqNo: 1 partNo : 3 messageNo: 0 size: 511957 2025-05-29T15:22:13.450059Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:2165: [PQ: 72057594037927937] got client PART message topic: rt3.dc1--asdfgs--topic partition: 0 SourceId: 'sourceid2' SeqNo: 1 partNo : 4 messageNo: 0 size: 49324 2025-05-29T15:22:13.450066Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:2198: [PQ: 72057594037927937] got client message topic: rt3.dc1--asdfgs--topic partition: 0 SourceId: 'sourceid2' SeqNo: 1 partNo : 4 messageNo: 0 size 49324 offset: 0 2025-05-29T15:22:13.450087Z node 1 :PERSQUEUE DEBUG: event_helpers.cpp:40: tablet 72057594037927937 topic 'rt3.dc1--asdfgs--topic' partition 0 error: new GetOwnership request needed for owner 2025-05-29T15:22:13.450110Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:1424: [PQ: 72057594037927937] Handle TEvPQ::TEvError Cookie 1, Error new GetOwnership request needed for owner 2025-05-29T15:22:13.450115Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:401: Answer error topic: 'rt3.dc1--asdfgs--topic' partition: 0 messageNo: 0 requestId: ... s 0 cbcount 1 2025-05-29T15:22:40.565296Z node 26 :PERSQUEUE DEBUG: partition_read.cpp:513: FormAnswer processing batch offset 3 totakecount 1 count 0 size 512005 from pos 0 cbcount 1 2025-05-29T15:22:40.565466Z node 26 :PERSQUEUE DEBUG: partition_read.cpp:513: FormAnswer processing batch offset 3 totakecount 1 count 0 size 512005 from pos 0 cbcount 1 2025-05-29T15:22:40.565666Z node 26 :PERSQUEUE DEBUG: partition_read.cpp:513: FormAnswer processing batch offset 3 totakecount 1 count 0 size 512005 from pos 0 cbcount 1 2025-05-29T15:22:40.565833Z node 26 :PERSQUEUE DEBUG: partition_read.cpp:513: FormAnswer processing batch offset 3 totakecount 1 count 0 size 512005 from pos 0 cbcount 1 2025-05-29T15:22:40.565991Z node 26 :PERSQUEUE DEBUG: partition_read.cpp:513: FormAnswer processing batch offset 3 totakecount 1 count 0 size 512005 from pos 0 cbcount 1 2025-05-29T15:22:40.566155Z node 26 :PERSQUEUE DEBUG: partition_read.cpp:513: FormAnswer processing batch offset 3 totakecount 1 count 0 size 512005 from pos 0 cbcount 1 2025-05-29T15:22:40.566332Z node 26 :PERSQUEUE DEBUG: partition_read.cpp:513: FormAnswer processing batch offset 3 totakecount 1 count 0 size 512005 from pos 0 cbcount 1 2025-05-29T15:22:40.566513Z node 26 :PERSQUEUE DEBUG: partition_read.cpp:513: FormAnswer processing batch offset 3 totakecount 1 count 0 size 512005 from pos 0 cbcount 1 2025-05-29T15:22:40.566680Z node 26 :PERSQUEUE DEBUG: partition_read.cpp:513: FormAnswer processing batch offset 3 totakecount 1 count 0 size 512005 from pos 0 cbcount 1 2025-05-29T15:22:40.566886Z node 26 :PERSQUEUE DEBUG: partition_read.cpp:513: FormAnswer processing batch offset 3 totakecount 1 count 0 size 512005 from pos 0 cbcount 1 2025-05-29T15:22:40.567071Z node 26 :PERSQUEUE DEBUG: partition_read.cpp:513: FormAnswer processing batch offset 3 totakecount 1 count 0 size 512005 from pos 0 cbcount 1 2025-05-29T15:22:40.567261Z node 26 :PERSQUEUE DEBUG: partition_read.cpp:513: FormAnswer processing batch offset 3 totakecount 1 count 0 size 512005 from pos 0 cbcount 1 2025-05-29T15:22:40.567768Z node 26 :PERSQUEUE DEBUG: partition_read.cpp:513: FormAnswer processing batch offset 3 totakecount 1 count 0 size 512005 from pos 0 cbcount 1 2025-05-29T15:22:40.567964Z node 26 :PERSQUEUE DEBUG: partition_read.cpp:513: FormAnswer processing batch offset 3 totakecount 1 count 0 size 512005 from pos 0 cbcount 1 2025-05-29T15:22:40.568058Z node 26 :PERSQUEUE DEBUG: partition_read.cpp:513: FormAnswer processing batch offset 3 totakecount 1 count 1 size 172682 from pos 0 cbcount 1 2025-05-29T15:22:40.568256Z node 26 :PERSQUEUE DEBUG: pq_impl.cpp:382: Answer ok topic: 'topic' partition: 0 messageNo: 0 requestId: cookie: 123 2025-05-29T15:22:40.584551Z node 26 :PERSQUEUE DEBUG: pq_impl.cpp:2882: [PQ: 72057594037927937] server connected, pipe [26:446:2418], now have 1 active actors on pipe 2025-05-29T15:22:40.584599Z node 26 :PERSQUEUE DEBUG: pq_impl.cpp:347: Handle TEvRequest topic: 'topic' requestId: 2025-05-29T15:22:40.584608Z node 26 :PERSQUEUE DEBUG: pq_impl.cpp:2796: [PQ: 72057594037927937] got client message batch for topic 'topic' partition 0 2025-05-29T15:22:40.584625Z node 26 :PERSQUEUE DEBUG: pq_impl.cpp:2198: [PQ: 72057594037927937] got client message topic: topic partition: 0 SourceId: 'sourceid1' SeqNo: 15 partNo : 0 messageNo: 1 size 102400 offset: 14 2025-05-29T15:22:40.584648Z node 26 :PERSQUEUE DEBUG: event_helpers.cpp:40: tablet 72057594037927937 topic 'topic' partition 0 error: new GetOwnership request needed for owner 2025-05-29T15:22:40.584681Z node 26 :PERSQUEUE DEBUG: partition.cpp:2185: [PQ: 72057594037927937, Partition: 0, State: StateIdle] === DumpKeyValueRequest === 2025-05-29T15:22:40.584686Z node 26 :PERSQUEUE DEBUG: partition.cpp:2186: [PQ: 72057594037927937, Partition: 0, State: StateIdle] --- delete ---------------- 2025-05-29T15:22:40.584691Z node 26 :PERSQUEUE DEBUG: partition.cpp:2192: [PQ: 72057594037927937, Partition: 0, State: StateIdle] [d0000000000_00000000000000000002_00000_0000000001_00014, d0000000000_00000000000000000002_00000_0000000001_00014] 2025-05-29T15:22:40.584696Z node 26 :PERSQUEUE DEBUG: partition.cpp:2192: [PQ: 72057594037927937, Partition: 0, State: StateIdle] [d0000000000_00000000000000000003_00000_0000000001_00014, d0000000000_00000000000000000003_00000_0000000001_00014] 2025-05-29T15:22:40.584701Z node 26 :PERSQUEUE DEBUG: partition.cpp:2194: [PQ: 72057594037927937, Partition: 0, State: StateIdle] --- write ----------------- 2025-05-29T15:22:40.584722Z node 26 :PERSQUEUE DEBUG: partition.cpp:2197: [PQ: 72057594037927937, Partition: 0, State: StateIdle] i0000000000 2025-05-29T15:22:40.584726Z node 26 :PERSQUEUE DEBUG: partition.cpp:2199: [PQ: 72057594037927937, Partition: 0, State: StateIdle] --- rename ---------------- 2025-05-29T15:22:40.584730Z node 26 :PERSQUEUE DEBUG: partition.cpp:2204: [PQ: 72057594037927937, Partition: 0, State: StateIdle] =========================== 2025-05-29T15:22:40.584745Z node 26 :PERSQUEUE DEBUG: pq_impl.cpp:1424: [PQ: 72057594037927937] Handle TEvPQ::TEvError Cookie 45, Error new GetOwnership request needed for owner 2025-05-29T15:22:40.584750Z node 26 :PERSQUEUE DEBUG: pq_impl.cpp:401: Answer error topic: 'topic' partition: 0 messageNo: 1 requestId: error: new GetOwnership request needed for owner 2025-05-29T15:22:40.584760Z node 26 :PERSQUEUE DEBUG: read.h:262: CacheProxy. Passthrough write request to KV 2025-05-29T15:22:40.584768Z node 26 :PERSQUEUE DEBUG: read.h:338: CacheProxy. Delete blobs from d0000000000_00000000000000000002_00000_0000000001_00014(+) to d0000000000_00000000000000000002_00000_0000000001_00014(+) 2025-05-29T15:22:40.584772Z node 26 :PERSQUEUE DEBUG: read.h:338: CacheProxy. Delete blobs from d0000000000_00000000000000000003_00000_0000000001_00014(+) to d0000000000_00000000000000000003_00000_0000000001_00014(+) 2025-05-29T15:22:40.587776Z node 26 :PERSQUEUE DEBUG: partition_write.cpp:524: [PQ: 72057594037927937, Partition: 0, State: StateIdle] TPartition::HandleWriteResponse writeNewSize# 0 WriteNewSizeFromSupportivePartitions# 0 2025-05-29T15:22:40.591590Z node 26 :PERSQUEUE DEBUG: pq_impl.cpp:2882: [PQ: 72057594037927937] server connected, pipe [26:458:2429], now have 1 active actors on pipe 2025-05-29T15:22:40.591623Z node 26 :PERSQUEUE DEBUG: pq_impl.cpp:347: Handle TEvRequest topic: 'topic' requestId: 2025-05-29T15:22:40.591634Z node 26 :PERSQUEUE DEBUG: pq_impl.cpp:2796: [PQ: 72057594037927937] got client message batch for topic 'topic' partition 0 2025-05-29T15:22:40.591666Z node 26 :PERSQUEUE INFO: ownerinfo.cpp:30: new Cookie default|a6978e2d-53848525-dbb2b802-c196335e_14 generated for partition 0 topic 'topic' owner default 2025-05-29T15:22:40.591696Z node 26 :PERSQUEUE DEBUG: partition_write.cpp:35: [PQ: 72057594037927937, Partition: 0, State: StateIdle] TPartition::ReplyOwnerOk. Partition: 0 2025-05-29T15:22:40.591719Z node 26 :PERSQUEUE DEBUG: pq_impl.cpp:382: Answer ok topic: 'topic' partition: 0 messageNo: 0 requestId: cookie: 0 2025-05-29T15:22:40.591788Z node 26 :PERSQUEUE DEBUG: pq_impl.cpp:2882: [PQ: 72057594037927937] server connected, pipe [26:460:2431], now have 1 active actors on pipe 2025-05-29T15:22:40.591803Z node 26 :PERSQUEUE DEBUG: pq_impl.cpp:347: Handle TEvRequest topic: 'topic' requestId: 2025-05-29T15:22:40.591807Z node 26 :PERSQUEUE DEBUG: pq_impl.cpp:2796: [PQ: 72057594037927937] got client message batch for topic 'topic' partition 0 2025-05-29T15:22:40.591820Z node 26 :PERSQUEUE DEBUG: pq_impl.cpp:2198: [PQ: 72057594037927937] got client message topic: topic partition: 0 SourceId: 'sourceid1' SeqNo: 15 partNo : 0 messageNo: 0 size 102400 offset: 14 2025-05-29T15:22:40.591840Z node 26 :PERSQUEUE DEBUG: partition_write.cpp:1704: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Send write quota request. Topic: "topic". Partition: 0. Amount: 102409. Cookie: 15 2025-05-29T15:22:40.895621Z node 26 :PERSQUEUE DEBUG: partition.cpp:3630: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Got quota. Topic: "topic". Partition: 0: Cookie: 15 2025-05-29T15:22:40.895699Z node 26 :PERSQUEUE DEBUG: partition_write.cpp:1233: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'topic' partition 0 part blob processing sourceId 'sourceid1' seqNo 15 partNo 0 2025-05-29T15:22:40.895757Z node 26 :PERSQUEUE DEBUG: partition_write.cpp:1333: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'topic' partition 0 part blob complete sourceId 'sourceid1' seqNo 15 partNo 0 FormedBlobsCount 0 NewHead: Offset 14 PartNo 0 PackedSize 102472 count 1 nextOffset 15 batches 1 2025-05-29T15:22:40.895951Z node 26 :PERSQUEUE DEBUG: partition_write.cpp:1623: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Add new write blob: topic 'topic' partition 0 compactOffset 14,1 HeadOffset 14 endOffset 14 curOffset 15 d0000000000_00000000000000000014_00000_0000000001_00000| size 102462 WTime 2102 2025-05-29T15:22:40.895996Z node 26 :PERSQUEUE DEBUG: partition.cpp:2185: [PQ: 72057594037927937, Partition: 0, State: StateIdle] === DumpKeyValueRequest === 2025-05-29T15:22:40.896002Z node 26 :PERSQUEUE DEBUG: partition.cpp:2186: [PQ: 72057594037927937, Partition: 0, State: StateIdle] --- delete ---------------- 2025-05-29T15:22:40.896007Z node 26 :PERSQUEUE DEBUG: partition.cpp:2192: [PQ: 72057594037927937, Partition: 0, State: StateIdle] [x0000000000, x0000000001) 2025-05-29T15:22:40.896012Z node 26 :PERSQUEUE DEBUG: partition.cpp:2194: [PQ: 72057594037927937, Partition: 0, State: StateIdle] --- write ----------------- 2025-05-29T15:22:40.896017Z node 26 :PERSQUEUE DEBUG: partition.cpp:2197: [PQ: 72057594037927937, Partition: 0, State: StateIdle] m0000000000psourceid1 2025-05-29T15:22:40.896020Z node 26 :PERSQUEUE DEBUG: partition.cpp:2197: [PQ: 72057594037927937, Partition: 0, State: StateIdle] d0000000000_00000000000000000014_00000_0000000001_00000| 2025-05-29T15:22:40.896024Z node 26 :PERSQUEUE DEBUG: partition.cpp:2197: [PQ: 72057594037927937, Partition: 0, State: StateIdle] i0000000000 2025-05-29T15:22:40.896029Z node 26 :PERSQUEUE DEBUG: partition.cpp:2199: [PQ: 72057594037927937, Partition: 0, State: StateIdle] --- rename ---------------- 2025-05-29T15:22:40.896034Z node 26 :PERSQUEUE DEBUG: partition.cpp:2204: [PQ: 72057594037927937, Partition: 0, State: StateIdle] =========================== 2025-05-29T15:22:40.896138Z node 26 :PERSQUEUE DEBUG: read.h:262: CacheProxy. Passthrough write request to KV 2025-05-29T15:22:40.896155Z node 26 :PERSQUEUE DEBUG: read.h:300: CacheProxy. Passthrough blob. Partition 0 offset 14 partNo 0 count 1 size 102462 2025-05-29T15:22:40.904088Z node 26 :PERSQUEUE DEBUG: cache_eviction.h:315: Caching head blob in L1. Partition 0 offset 14 count 1 size 102462 actorID [26:135:2160] 2025-05-29T15:22:40.904139Z node 26 :PERSQUEUE DEBUG: pq_l2_cache.cpp:120: PQ Cache (L2). Adding blob. Tablet '72057594037927937' partition 0 offset 14 partno 0 count 1 parts 0 size 102462 2025-05-29T15:22:40.904186Z node 26 :PERSQUEUE DEBUG: partition_write.cpp:524: [PQ: 72057594037927937, Partition: 0, State: StateIdle] TPartition::HandleWriteResponse writeNewSize# 102409 WriteNewSizeFromSupportivePartitions# 0 2025-05-29T15:22:40.904203Z node 26 :PERSQUEUE DEBUG: partition_write.cpp:58: [PQ: 72057594037927937, Partition: 0, State: StateIdle] TPartition::ReplyWrite. Partition: 0 2025-05-29T15:22:40.904218Z node 26 :PERSQUEUE DEBUG: partition_write.cpp:324: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Answering for message sourceid: 'sourceid1', Topic: 'topic', Partition: 0, SeqNo: 15, partNo: 0, Offset: 14 is stored on disk 2025-05-29T15:22:40.904302Z node 26 :PERSQUEUE DEBUG: pq_impl.cpp:382: Answer ok topic: 'topic' partition: 0 messageNo: 0 requestId: cookie: 0 2025-05-29T15:22:40.904445Z node 26 :PERSQUEUE DEBUG: pq_impl.cpp:2882: [PQ: 72057594037927937] server connected, pipe [26:472:2440], now have 1 active actors on pipe |61.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_rtmr_reboots/unittest ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/tx_allocator_client/ut/unittest >> TTxAllocatorClientTest::AllocateOverTheEdge [GOOD] Test command err: 2025-05-29T15:22:41.017914Z node 1 :TABLET_MAIN DEBUG: tablet_sys.cpp:1925: Tablet: 72057594046447617 LockedInitializationPath Marker# TSYS32 2025-05-29T15:22:41.018033Z node 1 :TABLET_MAIN DEBUG: tablet_sys.cpp:911: Tablet: 72057594046447617 HandleFindLatestLogEntry, NODATA Promote Marker# TSYS19 2025-05-29T15:22:41.018151Z node 1 :TABLET_MAIN DEBUG: tablet_sys.cpp:225: Tablet: 72057594046447617 TTablet::WriteZeroEntry. logid# [72057594046447617:2:0:0:0:0:0] Marker# TSYS01 2025-05-29T15:22:41.018616Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:54: Put Result: TEvPutResult {Id# [72057594046447617:2:0:0:0:20:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-05-29T15:22:41.018716Z node 1 :TX_ALLOCATOR DEBUG: txallocator_impl.cpp:17: tablet# 72057594046447617 OnActivateExecutor 2025-05-29T15:22:41.021467Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:54: Put Result: TEvPutResult {Id# [72057594046447617:2:1:1:28672:35:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-05-29T15:22:41.021522Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:54: Put Result: TEvPutResult {Id# [72057594046447617:2:1:0:0:42:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-05-29T15:22:41.021563Z node 1 :TABLET_MAIN DEBUG: tablet_sys.cpp:1396: Tablet: 72057594046447617 GcCollect 0 channel, tablet:gen:step => 2:0 Marker# TSYS28 2025-05-29T15:22:41.021599Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:54: Put Result: TEvPutResult {Id# [72057594046447617:2:2:1:8192:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-05-29T15:22:41.021614Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:54: Put Result: TEvPutResult {Id# [72057594046447617:2:2:0:0:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-05-29T15:22:41.021639Z node 1 :TX_ALLOCATOR DEBUG: txallocator__scheme.cpp:22: tablet# 72057594046447617 TTxSchema Complete 2025-05-29T15:22:41.021670Z node 1 :TABLET_MAIN INFO: tablet_sys.cpp:1009: Tablet: 72057594046447617 Active! Generation: 2, Type: TxAllocator started in 0msec Marker# TSYS24 2025-05-29T15:22:41.021813Z node 1 :TX_ALLOCATOR DEBUG: txallocator_impl.cpp:60: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:71:2105] requested range size#5000 2025-05-29T15:22:41.021935Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:54: Put Result: TEvPutResult {Id# [72057594046447617:2:3:1:24576:70:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-05-29T15:22:41.021945Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:54: Put Result: TEvPutResult {Id# [72057594046447617:2:3:0:0:69:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-05-29T15:22:41.021961Z node 1 :TX_ALLOCATOR DEBUG: txallocator__reserve.cpp:56: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 0 Reserved to# 5000 2025-05-29T15:22:41.021966Z node 1 :TX_ALLOCATOR DEBUG: txallocator_impl.cpp:70: tablet# 72057594046447617 Send to Sender# [1:71:2105] TEvAllocateResult from# 0 to# 5000 2025-05-29T15:22:41.021998Z node 1 :TX_ALLOCATOR_CLIENT WARN: client.cpp:38: AllocateTxIds: requested many txIds. Just a warning, request is processed. Requested: 1000 TxAllocators count: 1 RequestPerAllocator: 5000 MaxCapacity: 5000 BatchAllocationWarning: 500 2025-05-29T15:22:41.022021Z node 1 :TX_ALLOCATOR_CLIENT WARN: client.cpp:38: AllocateTxIds: requested many txIds. Just a warning, request is processed. Requested: 1000 TxAllocators count: 1 RequestPerAllocator: 5000 MaxCapacity: 5000 BatchAllocationWarning: 500 2025-05-29T15:22:41.022037Z node 1 :TX_ALLOCATOR_CLIENT WARN: client.cpp:38: AllocateTxIds: requested many txIds. Just a warning, request is processed. Requested: 1000 TxAllocators count: 1 RequestPerAllocator: 5000 MaxCapacity: 5000 BatchAllocationWarning: 500 2025-05-29T15:22:41.022053Z node 1 :TX_ALLOCATOR_CLIENT WARN: client.cpp:38: AllocateTxIds: requested many txIds. Just a warning, request is processed. Requested: 1000 TxAllocators count: 1 RequestPerAllocator: 5000 MaxCapacity: 5000 BatchAllocationWarning: 500 2025-05-29T15:22:41.022070Z node 1 :TX_ALLOCATOR DEBUG: txallocator_impl.cpp:60: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:71:2105] requested range size#5000 2025-05-29T15:22:41.022134Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:54: Put Result: TEvPutResult {Id# [72057594046447617:2:4:1:24576:76:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-05-29T15:22:41.022153Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:54: Put Result: TEvPutResult {Id# [72057594046447617:2:4:0:0:69:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-05-29T15:22:41.022164Z node 1 :TX_ALLOCATOR DEBUG: txallocator__reserve.cpp:56: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 5000 Reserved to# 10000 2025-05-29T15:22:41.022168Z node 1 :TX_ALLOCATOR DEBUG: txallocator_impl.cpp:70: tablet# 72057594046447617 Send to Sender# [1:71:2105] TEvAllocateResult from# 5000 to# 10000 2025-05-29T15:22:41.022185Z node 1 :TX_ALLOCATOR_CLIENT WARN: client.cpp:38: AllocateTxIds: requested many txIds. Just a warning, request is processed. Requested: 500 TxAllocators count: 1 RequestPerAllocator: 5000 MaxCapacity: 5000 BatchAllocationWarning: 500 2025-05-29T15:22:41.022210Z node 1 :TX_ALLOCATOR_CLIENT WARN: client.cpp:38: AllocateTxIds: requested many txIds. Just a warning, request is processed. Requested: 1000 TxAllocators count: 1 RequestPerAllocator: 5000 MaxCapacity: 5000 BatchAllocationWarning: 500 2025-05-29T15:22:41.022248Z node 1 :TX_ALLOCATOR_CLIENT WARN: client.cpp:38: AllocateTxIds: requested many txIds. Just a warning, request is processed. Requested: 2500 TxAllocators count: 1 RequestPerAllocator: 5000 MaxCapacity: 5000 BatchAllocationWarning: 500 2025-05-29T15:22:41.022299Z node 1 :TX_ALLOCATOR_CLIENT WARN: client.cpp:38: AllocateTxIds: requested many txIds. Just a warning, request is processed. Requested: 1000 TxAllocators count: 1 RequestPerAllocator: 5000 MaxCapacity: 5000 BatchAllocationWarning: 500 2025-05-29T15:22:41.022312Z node 1 :TX_ALLOCATOR DEBUG: txallocator_impl.cpp:60: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:71:2105] requested range size#5000 2025-05-29T15:22:41.022345Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:54: Put Result: TEvPutResult {Id# [72057594046447617:2:5:1:24576:76:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-05-29T15:22:41.022353Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:54: Put Result: TEvPutResult {Id# [72057594046447617:2:5:0:0:69:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-05-29T15:22:41.022362Z node 1 :TX_ALLOCATOR DEBUG: txallocator__reserve.cpp:56: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 10000 Reserved to# 15000 2025-05-29T15:22:41.022366Z node 1 :TX_ALLOCATOR DEBUG: txallocator_impl.cpp:70: tablet# 72057594046447617 Send to Sender# [1:71:2105] TEvAllocateResult from# 10000 to# 15000 2025-05-29T15:22:41.022384Z node 1 :TX_ALLOCATOR_CLIENT WARN: client.cpp:38: AllocateTxIds: requested many txIds. Just a warning, request is processed. Requested: 3000 TxAllocators count: 1 RequestPerAllocator: 5000 MaxCapacity: 5000 BatchAllocationWarning: 500 |61.2%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/services/ext_index/ut/ydb-services-ext_index-ut >> CommitOffset::PartitionSplit_OffsetCommit |61.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/services/ext_index/ut/ydb-services-ext_index-ut |61.2%| [LD] {RESULT} $(B)/ydb/services/ext_index/ut/ydb-services-ext_index-ut >> Balancing::Balancing_OneTopic_TopicApi >> TopicAutoscaling::ReadingAfterSplitTest_BeforeAutoscaleAwareSDK >> TReplicationTests::Create >> TFlatTest::SelectRangeItemsLimit >> WithSDK::DescribeConsumer [FAIL] >> TLocksTest::BrokenSameKeyLock >> ObjectStorageListingTest::ListingNoFilter [FAIL] >> TFlatTest::SplitInvalidPath >> TopicAutoscaling::PartitionMerge_PreferedPartition_BeforeAutoscaleAwareSDK [FAIL] >> TopicAutoscaling::PartitionMerge_PreferedPartition_AutoscaleAwareSDK >> TReplicationTests::CreateSequential >> CommitOffset::Commit_WithoutSession_TopPast [FAIL] >> CommitOffset::Commit_WithWrongSession_ToParent >> TopicAutoscaling::PartitionSplit_AutosplitByLoad_AfterAlter [FAIL] >> TopicAutoscaling::PartitionSplit_ReadEmptyPartitions_BeforeAutoscaleAwareSDK [FAIL] >> TopicAutoscaling::PartitionSplit_ReadEmptyPartitions_PQv1 >> TReplicationTests::Create [GOOD] >> TReplicationTests::ConsistencyLevel >> TopicAutoscaling::ControlPlane_BackCompatibility [FAIL] >> TopicAutoscaling::ControlPlane_AutoscalingWithStorageSizeRetention >> TopicAutoscaling::PartitionSplit_PQv1 [FAIL] >> TopicAutoscaling::PartitionSplit_PreferedPartition_BeforeAutoscaleAwareSDK >> TLocksTest::CK_GoodLock >> TLocksTest::Range_BrokenLockMax >> TReplicationTests::ConsistencyLevel [GOOD] >> TReplicationTests::Alter >> TopicAutoscaling::ReadingAfterSplitTest_BeforeAutoscaleAwareSDK [FAIL] >> TopicAutoscaling::ReadingAfterSplitTest_AutoscaleAwareSDK >> TFlatTest::SelectRangeItemsLimit [GOOD] >> TFlatTest::SelectRangeForbidNullArgs4 >> CommitOffset::PartitionSplit_OffsetCommit [FAIL] >> CommitOffset::DistributedTxCommit >> TReplicationTests::CreateSequential [GOOD] >> TReplicationTests::CreateInParallel >> TFlatTest::CopyTableAndReturnPartAfterCompaction >> TLocksFatTest::RangeSetRemove >> TFlatTest::SplitInvalidPath [GOOD] >> TFlatTest::SplitThenMerge >> TFlatTest::Mix_DML_DDL >> TReplicationTests::Alter [GOOD] >> TReplicationTests::CannotAddReplicationConfig >> TFlatTest::SplitEmptyAndWrite >> Balancing::Balancing_OneTopic_TopicApi [FAIL] >> Balancing::Balancing_OneTopic_PQv1 >> TFlatTest::SelectRangeForbidNullArgs4 [GOOD] >> TReplicationTests::CannotAddReplicationConfig [GOOD] >> TReplicationTests::CannotSetAsyncReplicaAttribute >> TFlatTest::SelectBigRangePerf >> CommitOffset::Commit_WithWrongSession_ToParent [FAIL] >> CommitOffset::Commit_WithoutSession_ParentNotFinished >> TFlatTest::Ls >> TPartitionTests::UserActCount [GOOD] >> TopicAutoscaling::ControlPlane_AutoscalingWithStorageSizeRetention [FAIL] >> TopicAutoscaling::CDC_PartitionSplit_AutosplitByLoad >> TopicAutoscaling::PartitionMerge_PreferedPartition_AutoscaleAwareSDK [FAIL] >> TopicAutoscaling::ControlPlane_CreateAlterDescribe >> TReplicationTests::CreateInParallel [GOOD] >> TReplicationTests::CreateDropRecreate >> TopicAutoscaling::PartitionSplit_ReadEmptyPartitions_PQv1 [FAIL] >> TopicAutoscaling::PartitionSplit_ReadNotEmptyPartitions_BeforeAutoscaleAwareSDK |61.2%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/sys_view/partition_stats/ut/ydb-core-sys_view-partition_stats-ut |61.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/sys_view/partition_stats/ut/ydb-core-sys_view-partition_stats-ut |61.3%| [LD] {RESULT} $(B)/ydb/core/sys_view/partition_stats/ut/ydb-core-sys_view-partition_stats-ut >> TPartitionTests::TooManyImmediateTxs >> TopicAutoscaling::PartitionSplit_PreferedPartition_BeforeAutoscaleAwareSDK [FAIL] >> TopicAutoscaling::PartitionSplit_PreferedPartition_AutoscaleAwareSDK >> TReplicationTests::CannotSetAsyncReplicaAttribute [GOOD] >> TReplicationTests::AlterReplicatedTable >> TFlatTest::SplitThenMerge [GOOD] >> TFlatTest::Mix_DML_DDL [GOOD] >> TFlatTest::OutOfDiskSpace [GOOD] >> TFlatTest::SplitEmptyAndWrite [GOOD] >> TFlatTest::SplitBoundaryRead >> TReplicationTests::CreateDropRecreate [GOOD] >> TReplicationTests::CreateWithoutCredentials >> TFlatTest::CopyTableAndReturnPartAfterCompaction [GOOD] >> TFlatTest::CopyTableDropOriginalAndReturnPartAfterCompaction >> TopicAutoscaling::ReadingAfterSplitTest_AutoscaleAwareSDK [FAIL] >> TopicAutoscaling::ReadingAfterSplitTest_AutoscaleAwareSDK_AutoCommit >> TReplicationTests::AlterReplicatedTable [GOOD] >> TReplicationTests::AlterReplicatedIndexTable >> TFlatTest::SelectBigRangePerf [GOOD] >> TFlatTest::SelectRangeBothLimit >> CommitOffset::DistributedTxCommit [FAIL] >> CommitOffset::DistributedTxCommit_ChildFirst >> TFlatTest::Ls [GOOD] >> TFlatTest::LsPathId >> TPQTest::TestReadSessions [GOOD] >> TPQTest::TestReadSubscription >> TReplicationTests::CreateWithoutCredentials [GOOD] >> TReplicationTests::Describe >> Balancing::Balancing_OneTopic_PQv1 [FAIL] >> Balancing::Balancing_ManyTopics_TopicApi ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/client/ut/unittest >> TFlatTest::SelectRangeForbidNullArgs4 [GOOD] Test command err: 2025-05-29T15:22:42.420078Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509888452284708616:2266];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:22:42.420154Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/002881/r3tmp/tmpmxIyed/pdisk_1.dat 2025-05-29T15:22:42.497523Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:22:42.501801Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [1:7509888452284708388:2079] 1748532162418236 != 1748532162418239 2025-05-29T15:22:42.521486Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:22:42.521515Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:22:42.522492Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:63224 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-05-29T15:22:42.559288Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:22:42.561933Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:22:42.587420Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976715658, at schemeshard: 72057594046644480 2025-05-29T15:22:42.589408Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/002881/r3tmp/tmpgdHcrR/pdisk_1.dat 2025-05-29T15:22:43.047214Z node 2 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-05-29T15:22:43.049928Z node 2 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:22:43.050268Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [2:7509888456706083597:2079] 1748532163031805 != 1748532163031808 TClient is connected to server localhost:29854 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-05-29T15:22:43.137075Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:22:43.137106Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:22:43.137520Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-05-29T15:22:43.137946Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected waiting... 2025-05-29T15:22:43.143204Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:22:43.152417Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... >> TFlatTest::SplitBoundaryRead [GOOD] >> TReplicationTests::Describe [GOOD] >> TReplicationTests::CreateReplicatedTable >> CommitOffset::Commit_WithoutSession_ParentNotFinished [FAIL] >> CommitOffset::Commit_WithoutSession_ToPastParentPartition >> TopicAutoscaling::CDC_PartitionSplit_AutosplitByLoad [FAIL] >> TopicAutoscaling::ControlPlane_CDC >> TopicAutoscaling::ControlPlane_CreateAlterDescribe [FAIL] >> TopicAutoscaling::ControlPlane_DisableAutoPartitioning >> TopicAutoscaling::PartitionSplit_ReadNotEmptyPartitions_BeforeAutoscaleAwareSDK [FAIL] >> TopicAutoscaling::PartitionSplit_ReadNotEmptyPartitions_PQv1 >> TFlatTest::LsPathId [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/client/ut/unittest >> TFlatTest::SplitThenMerge [GOOD] Test command err: 2025-05-29T15:22:42.666227Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509888451975612028:2060];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:22:42.666254Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/002878/r3tmp/tmpapdT4T/pdisk_1.dat 2025-05-29T15:22:42.724665Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:22:42.727120Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [1:7509888451975612009:2079] 1748532162666014 != 1748532162666017 TClient is connected to server localhost:19437 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-05-29T15:22:42.799274Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:22:42.799299Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:22:42.800136Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-05-29T15:22:42.802060Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected waiting... waiting... Error 128: Check failed: path: '/dc-1/Dir1', error: path is not a table (id: [OwnerId: 72057594046644480, LocalPathId: 2], type: EPathTypeDir, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_split_merge.cpp:825 2025-05-29T15:22:42.806782Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-05-29T15:22:42.812411Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation_split_merge.cpp:810: TSplitMerge Propose failed StatusNameConflict Check failed: path: '/dc-1/Dir1', error: path is not a table (id: [OwnerId: 72057594046644480, LocalPathId: 2], type: EPathTypeDir, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_split_merge.cpp:825, tableStr: /dc-1/Dir1, tableId: , opId: 281474976715659:0, at schemeshard: 72057594046644480, request: TablePath: "/dc-1/Dir1" SourceTabletId: 100500 SplitBoundary { KeyPrefix { Tuple { Optional { Uint32: 42 } } } } 2025-05-29T15:22:42.812916Z node 1 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [1:7509888451975612612:2297] txid# 281474976715659, issues: { message: "Check failed: path: \'/dc-1/Dir1\', error: path is not a table (id: [OwnerId: 72057594046644480, LocalPathId: 2], type: EPathTypeDir, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_split_merge.cpp:825" severity: 1 } 2025-05-29T15:22:43.199636Z node 2 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7509888456269471049:2199];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:22:43.200557Z node 2 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/002878/r3tmp/tmpzFHb5I/pdisk_1.dat 2025-05-29T15:22:43.218906Z node 2 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:22:43.219124Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [2:7509888456269470884:2079] 1748532163198800 != 1748532163198803 TClient is connected to server localhost:1870 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. waiting... 2025-05-29T15:22:43.306950Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:22:43.306989Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:22:43.307299Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-05-29T15:22:43.308483Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-29T15:22:43.309428Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... waiting... 2025-05-29T15:22:43.319498Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 2025-05-29T15:22:43.401409Z node 2 :OPS_COMPACT INFO: Compact{72075186224037888.1.11, eph 1} end=Done, 4 blobs 3r (max 3), put Spent{time=0.000s,wait=0.000s,interrupts=1} Part{ 2 pk, lobs 2 +0, (1265 647 2154)b }, ecr=1.000 2025-05-29T15:22:43.403204Z node 2 :OPS_COMPACT INFO: Compact{72075186224037889.1.11, eph 1} end=Done, 4 blobs 3r (max 3), put Spent{time=0.000s,wait=0.000s,interrupts=1} Part{ 2 pk, lobs 2 +0, (1139 521 2626)b }, ecr=1.000 2025-05-29T15:22:43.409406Z node 2 :OPS_COMPACT INFO: Compact{72075186224037888.1.16, eph 2} end=Done, 4 blobs 6r (max 6), put Spent{time=0.000s,wait=0.000s,interrupts=1} Part{ 2 pk, lobs 5 +0, (1573 647 6413)b }, ecr=1.000 2025-05-29T15:22:43.410609Z node 2 :OPS_COMPACT INFO: Compact{72075186224037889.1.16, eph 2} end=Done, 4 blobs 6r (max 6), put Spent{time=0.000s,wait=0.000s,interrupts=1} Part{ 2 pk, lobs 4 +0, (2326 1432 5183)b }, ecr=1.000 TClient::Ls request: /dc-1/Dir/TableOld TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "TableOld" PathId: 3 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976715659 CreateStep: 1748532163427 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "TableOld" Columns { Name: "Key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "Value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } Columns { Name: "... (TRUNCATED) 2025-05-29T15:22:43.423147Z node 2 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037888 2025-05-29T15:22:43.423707Z node 2 :TX_DATASHARD DEBUG: datashard_active_transaction.cpp:561: tx 281474976715676 released its data 2025-05-29T15:22:43.423759Z node 2 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037889 2025-05-29T15:22:43.423980Z node 2 :TX_DATASHARD DEBUG: datashard_active_transaction.cpp:561: tx 281474976715676 released its data 2025-05-29T15:22:43.424221Z node 2 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037888 2025-05-29T15:22:43.424292Z node 2 :TX_DATASHARD DEBUG: datashard_active_transaction.cpp:661: tx 281474976715676 at 72075186224037888 restored its data 2025-05-29T15:22:43.424423Z node 2 :TX_DATASHARD DEBUG: datashard_active_transaction.cpp:561: tx 281474976715676 released its data 2025-05-29T15:22:43.424445Z node 2 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037889 2025-05-29T15:22:43.424499Z node 2 :TX_DATASHARD DEBUG: datashard_active_transaction.cpp:661: tx 281474976715676 at 72075186224037889 restored its data 2025-05-29T15:22:43.424613Z node 2 :TX_DATASHARD DEBUG: datashard_active_transaction.cpp:561: tx 281474976715676 released its data 2025-05-29T15:22:43.424781Z node 2 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037888 2025-05-29T15:22:43.424841Z node 2 :TX_DATASHARD DEBUG: datashard_active_transaction.cpp:661: tx 281474976715676 at 72075186224037888 restored its data 2025-05-29T15:22:43.424961Z node 2 :TX_DATASHARD DEBUG: datashard_active_transaction.cpp:561: tx 281474976715676 released its data 2025-05-29T15:22:43.424979Z node 2 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037889 2025-05-29T15:22:43.425028Z node 2 :TX_DATASHARD DEBUG: datashard_active_transaction.cpp:661: tx 281474976715676 at 72075186224037889 restored its data 2025-05-29T15:22:43.425144Z node 2 :TX_DATASHARD DEBUG: datashard_active_transaction.cpp:561: tx 281474976715676 released its data 2025-05-29T15:22:43.425297Z node 2 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037888 2025-05-29T15:22:43.425355Z node 2 :TX_DATASHARD DEBUG: datashard_active_transaction.cpp:661: tx 281474976715676 at 72075186224037888 restored its data 2025-05-29 ... blet: 72057594046644480 2025-05-29T15:22:43.569204Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:1009: NTableState::TProposedWaitParts operationId# 281474976715693:0 HandleReply TEvSchemaChanged at tablet: 72057594046644480 message: Source { RawX1: 7509888456269471538 RawX2: 4503608217307387 } Origin: 72075186224037889 State: 5 TxId: 281474976715693 Step: 0 Generation: 1 2025-05-29T15:22:43.569211Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:655: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 281474976715693:0, shardIdx: 72057594046644480:2, datashard: 72075186224037889, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046644480 2025-05-29T15:22:43.569213Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:674: all shard schema changes has been received, operationId: 281474976715693:0, at schemeshard: 72057594046644480 2025-05-29T15:22:43.569219Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:686: send schema changes ack message, operation: 281474976715693:0, datashard: 72075186224037889, at schemeshard: 72057594046644480 2025-05-29T15:22:43.569222Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:686: send schema changes ack message, operation: 281474976715693:0, datashard: 72075186224037894, at schemeshard: 72057594046644480 2025-05-29T15:22:43.569226Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 281474976715693:0 129 -> 240 2025-05-29T15:22:43.569276Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:647: TTxOperationReply complete, operationId: 281474976715693:0, at schemeshard: 72057594046644480 2025-05-29T15:22:43.569299Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:647: TTxOperationReply complete, operationId: 281474976715693:0, at schemeshard: 72057594046644480 2025-05-29T15:22:43.569315Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:2953: Handle TEvSchemaChangedResult 281474976715693 datashard 72075186224037889 state PreOffline 2025-05-29T15:22:43.569318Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 281474976715693:0, at schemeshard: 72057594046644480 2025-05-29T15:22:43.569320Z node 2 :TX_DATASHARD DEBUG: datashard__schema_changed.cpp:22: 72075186224037889 Got TEvSchemaChangedResult from SS at 72075186224037889 2025-05-29T15:22:43.569321Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_drop_table.cpp:414: TDropTable TProposedDeletePart operationId: 281474976715693:0 ProgressState, at schemeshard: 72057594046644480 2025-05-29T15:22:43.569337Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:2953: Handle TEvSchemaChangedResult 281474976715693 datashard 72075186224037894 state PreOffline 2025-05-29T15:22:43.569338Z node 2 :TX_DATASHARD DEBUG: datashard__schema_changed.cpp:22: 72075186224037894 Got TEvSchemaChangedResult from SS at 72075186224037894 2025-05-29T15:22:43.569402Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove table for pathId [OwnerId: 72057594046644480, LocalPathId: 3] was 4 2025-05-29T15:22:43.569431Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:906: Part operation is done id#281474976715693:0 progress is 1/1 2025-05-29T15:22:43.569437Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 281474976715693 ready parts: 1/1 2025-05-29T15:22:43.569439Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:906: Part operation is done id#281474976715693:0 progress is 1/1 2025-05-29T15:22:43.569440Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 281474976715693 ready parts: 1/1 2025-05-29T15:22:43.569443Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1606: TOperation IsReadyToNotify, TxId: 281474976715693, ready parts: 1/1, is published: true 2025-05-29T15:22:43.569445Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 281474976715693 ready parts: 1/1 2025-05-29T15:22:43.569449Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:973: Operation and all the parts is done, operation id: 281474976715693:0 2025-05-29T15:22:43.569450Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5174: RemoveTx for txid 281474976715693:0 2025-05-29T15:22:43.569483Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046644480, LocalPathId: 3] was 3 2025-05-29T15:22:43.569626Z node 2 :TX_DATASHARD DEBUG: datashard_loans.cpp:220: 72075186224037894 in PreOffline state HasSharedBobs: 0 SchemaOperations: [ ] OutReadSets count: 0 ChangesQueue size: 0 ChangeExchangeSplit: 1 siblings to be activated: wait to activation from: 2025-05-29T15:22:43.569639Z node 2 :TX_DATASHARD INFO: datashard_loans.cpp:177: 72075186224037894 Initiating switch from PreOffline to Offline state 2025-05-29T15:22:43.569840Z node 2 :TX_DATASHARD DEBUG: datashard_loans.cpp:220: 72075186224037889 in PreOffline state HasSharedBobs: 0 SchemaOperations: [ ] OutReadSets count: 0 ChangesQueue size: 0 ChangeExchangeSplit: 1 siblings to be activated: wait to activation from: 2025-05-29T15:22:43.569846Z node 2 :TX_DATASHARD INFO: datashard_loans.cpp:177: 72075186224037889 Initiating switch from PreOffline to Offline state 2025-05-29T15:22:43.570010Z node 2 :TX_DATASHARD INFO: datashard_impl.h:3306: 72075186224037894 Reporting state Offline to schemeshard 72057594046644480 2025-05-29T15:22:43.570148Z node 2 :TX_DATASHARD INFO: datashard_impl.h:3306: 72075186224037889 Reporting state Offline to schemeshard 72057594046644480 2025-05-29T15:22:43.570272Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5549: Handle TEvStateChanged, at schemeshard: 72057594046644480, message: Source { RawX1: 7509888456269472147 RawX2: 4503608217307478 } TabletId: 72075186224037894 State: 4 2025-05-29T15:22:43.570284Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__state_changed_reply.cpp:78: TTxShardStateChanged DoExecute, datashard informs about state changing, datashardId: 72075186224037894, state: Offline, at schemeshard: 72057594046644480 2025-05-29T15:22:43.570341Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:46: Free shard 72057594046644480:7 hive 72057594037968897 at ss 72057594046644480 2025-05-29T15:22:43.570472Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:2962: Handle TEvStateChangedResult datashard 72075186224037894 state Offline 2025-05-29T15:22:43.570531Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5549: Handle TEvStateChanged, at schemeshard: 72057594046644480, message: Source { RawX1: 7509888456269471538 RawX2: 4503608217307387 } TabletId: 72075186224037889 State: 4 2025-05-29T15:22:43.570536Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__state_changed_reply.cpp:78: TTxShardStateChanged DoExecute, datashard informs about state changing, datashardId: 72075186224037889, state: Offline, at schemeshard: 72057594046644480 2025-05-29T15:22:43.570584Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:46: Free shard 72057594046644480:2 hive 72057594037968897 at ss 72057594046644480 2025-05-29T15:22:43.570621Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5938: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 7 ShardOwnerId: 72057594046644480 ShardLocalIdx: 7, at schemeshard: 72057594046644480 2025-05-29T15:22:43.570670Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046644480, LocalPathId: 3] was 2 2025-05-29T15:22:43.570804Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:174: Deleted shardIdx 72057594046644480:7 2025-05-29T15:22:43.570807Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:180: Close pipe to deleted shardIdx 72057594046644480:7 tabletId 72075186224037894 2025-05-29T15:22:43.570899Z node 2 :TX_DATASHARD INFO: datashard.cpp:197: OnTabletStop: 72075186224037894 reason = ReasonStop 2025-05-29T15:22:43.570918Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:3728: Server disconnected at leader tablet# 72075186224037894, clientId# [2:7509888456269472258:2785], serverId# [2:7509888456269472259:2786], sessionId# [0:0:0] 2025-05-29T15:22:43.570923Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:2962: Handle TEvStateChangedResult datashard 72075186224037889 state Offline 2025-05-29T15:22:43.571128Z node 2 :HIVE WARN: hive_impl.cpp:491: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037894 not found 2025-05-29T15:22:43.571231Z node 2 :TX_DATASHARD INFO: datashard.cpp:257: OnTabletDead: 72075186224037894 2025-05-29T15:22:43.571255Z node 2 :TX_DATASHARD INFO: datashard.cpp:1301: Change sender killed: at tablet: 72075186224037894 2025-05-29T15:22:43.571906Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5938: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 2 ShardOwnerId: 72057594046644480 ShardLocalIdx: 2, at schemeshard: 72057594046644480 2025-05-29T15:22:43.571953Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046644480, LocalPathId: 3] was 1 2025-05-29T15:22:43.571983Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:123: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046644480 2025-05-29T15:22:43.571985Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:137: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046644480, LocalPathId: 3], at schemeshard: 72057594046644480 2025-05-29T15:22:43.571993Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046644480, LocalPathId: 2] was 1 2025-05-29T15:22:43.572251Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:174: Deleted shardIdx 72057594046644480:2 2025-05-29T15:22:43.572256Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:180: Close pipe to deleted shardIdx 72057594046644480:2 tabletId 72075186224037889 2025-05-29T15:22:43.572263Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:160: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046644480 2025-05-29T15:22:43.572483Z node 2 :TX_DATASHARD INFO: datashard.cpp:197: OnTabletStop: 72075186224037889 reason = ReasonStop 2025-05-29T15:22:43.572489Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:3728: Server disconnected at leader tablet# 72075186224037889, clientId# [2:7509888456269471647:2391], serverId# [2:7509888456269471648:2392], sessionId# [0:0:0] 2025-05-29T15:22:43.572547Z node 2 :HIVE WARN: hive_impl.cpp:491: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037889 not found 2025-05-29T15:22:43.572626Z node 2 :TX_DATASHARD INFO: datashard.cpp:257: OnTabletDead: 72075186224037889 2025-05-29T15:22:43.572638Z node 2 :TX_DATASHARD INFO: datashard.cpp:1301: Change sender killed: at tablet: 72075186224037889 waiting... 2025-05-29T15:22:43.574840Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976715693, at schemeshard: 72057594046644480 TClient::Ls request: /dc-1/Dir/TableOld TClient::Ls response: Status: 128 StatusCode: PATH_NOT_EXIST Issues { message: "Path not exist" issue_code: 200200 severity: 1 } SchemeStatus: 2 ErrorReason: "Path not found" ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/client/ut/unittest >> TFlatTest::OutOfDiskSpace [GOOD] Test command err: 2025-05-29T15:22:43.257671Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509888458718813265:2064];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:22:43.257699Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/00286b/r3tmp/tmpyZJ8vp/pdisk_1.dat 2025-05-29T15:22:43.306840Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [1:7509888458718813238:2079] 1748532163257516 != 1748532163257519 2025-05-29T15:22:43.310845Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded TClient is connected to server localhost:12692 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-05-29T15:22:43.386516Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:22:43.386545Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:22:43.387341Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-05-29T15:22:43.387693Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected waiting... 2025-05-29T15:22:43.390360Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-05-29T15:22:43.391642Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-05-29T15:22:43.472450Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 proxy error code: Unknown error:
: Error: Resolve failed for table: /dc-1/Table, error: column 'value' not exist, code: 200400 2025-05-29T15:22:43.478439Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-05-29T15:22:43.491733Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:22:43.504498Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 proxy error code: Unknown error:
:5:24: Error: At function: AsList
:5:32: Error: At function: SetResult
:4:27: Error: At function: SelectRow
:4:27: Error: Mismatch of key columns count for table [/dc-1/Table], expected: 2, but got 1., code: 2028 >> TReplicationTests::AlterReplicatedIndexTable [GOOD] >> TReplicationTests::CopyReplicatedTable >> TopicAutoscaling::PartitionSplit_PreferedPartition_AutoscaleAwareSDK [FAIL] >> TopicAutoscaling::PartitionSplit_PreferedPartition_PQv1 >> TFlatTest::SelectRangeBothLimit [GOOD] |61.3%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/datashard/ut_kqp_scan/ydb-core-tx-datashard-ut_kqp_scan |61.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_kqp_scan/ydb-core-tx-datashard-ut_kqp_scan |61.3%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_kqp_scan/ydb-core-tx-datashard-ut_kqp_scan >> CommitOffset::DistributedTxCommit_ChildFirst [FAIL] >> CommitOffset::DistributedTxCommit_CheckSessionResetAfterCommit >> TopicAutoscaling::ReadingAfterSplitTest_AutoscaleAwareSDK_AutoCommit [FAIL] >> TopicAutoscaling::ReadingAfterSplitTest_PQv1 >> TReplicationTests::CopyReplicatedTable [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/client/ut/unittest >> TFlatTest::SplitBoundaryRead [GOOD] >> TFlatTest::CopyTableDropOriginalAndReturnPartAfterCompaction [GOOD] Test command err: 2025-05-29T15:22:43.497676Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509888459361423998:2201];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:22:43.497758Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/002868/r3tmp/tmpHlUEnk/pdisk_1.dat 2025-05-29T15:22:43.566662Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:22:43.567135Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [1:7509888459361423835:2079] 1748532163496434 != 1748532163496437 TClient is connected to server localhost:14904 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-05-29T15:22:43.597242Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:22:43.599924Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:22:43.614555Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:22:43.620792Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 2025-05-29T15:22:43.642726Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:22:43.642776Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:22:43.643909Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-29T15:22:43.699617Z node 1 :OPS_COMPACT INFO: Compact{72075186224037888.1.11, eph 1} end=Done, 4 blobs 3r (max 3), put Spent{time=0.000s,wait=0.000s,interrupts=1} Part{ 2 pk, lobs 2 +0, (1265 647 2154)b }, ecr=1.000 2025-05-29T15:22:43.702054Z node 1 :OPS_COMPACT INFO: Compact{72075186224037889.1.11, eph 1} end=Done, 4 blobs 3r (max 3), put Spent{time=0.000s,wait=0.000s,interrupts=1} Part{ 2 pk, lobs 2 +0, (1139 521 2626)b }, ecr=1.000 2025-05-29T15:22:43.713227Z node 1 :OPS_COMPACT INFO: Compact{72075186224037888.1.16, eph 2} end=Done, 4 blobs 6r (max 6), put Spent{time=0.000s,wait=0.000s,interrupts=1} Part{ 2 pk, lobs 5 +0, (1573 647 6413)b }, ecr=1.000 2025-05-29T15:22:43.714964Z node 1 :OPS_COMPACT INFO: Compact{72075186224037889.1.16, eph 2} end=Done, 4 blobs 6r (max 6), put Spent{time=0.000s,wait=0.000s,interrupts=1} Part{ 2 pk, lobs 4 +0, (2326 1432 5183)b }, ecr=1.000 TClient::Ls request: /dc-1/Dir/TableOld TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "TableOld" PathId: 3 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710659 CreateStep: 1748532163728 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "TableOld" Columns { Name: "Key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "Value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } Columns { Name: "... (TRUNCATED) waiting... 2025-05-29T15:22:43.764955Z node 1 :OPS_COMPACT INFO: Compact{72075186224037888.1.22, eph 1} end=Done, 0 blobs 0r (max 1), put Spent{time=0.000s,wait=0.000s,interrupts=0} 2025-05-29T15:22:43.765008Z node 1 :OPS_COMPACT INFO: Compact{72075186224037888.1.24, eph 1} end=Done, 0 blobs 0r (max 1), put Spent{time=0.000s,wait=0.000s,interrupts=0} 2025-05-29T15:22:43.765024Z node 1 :OPS_COMPACT INFO: Compact{72075186224037888.1.25, eph 1} end=Done, 0 blobs 0r (max 1), put Spent{time=0.000s,wait=0.000s,interrupts=0} 2025-05-29T15:22:43.765050Z node 1 :OPS_COMPACT INFO: Compact{72075186224037888.1.23, eph 1} end=Done, 0 blobs 0r (max 1), put Spent{time=0.000s,wait=0.000s,interrupts=0} 2025-05-29T15:22:43.765688Z node 1 :OPS_COMPACT INFO: Compact{72075186224037888.1.26, eph 3} end=Done, 4 blobs 2r (max 2), put Spent{time=0.000s,wait=0.000s,interrupts=1} Part{ 2 pk, lobs 0 +0, (1907 1533 0)b }, ecr=1.000 2025-05-29T15:22:43.767276Z node 1 :OPS_COMPACT INFO: Compact{72075186224037888.1.33, eph 3} end=Done, 4 blobs 8r (max 8), put Spent{time=0.000s,wait=0.000s,interrupts=1} Part{ 2 pk, lobs 5 +0, (3250 2180 6413)b }, ecr=1.000 TClient::Ls request: /dc-1/Dir/TableOld TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "TableOld" PathId: 3 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710659 CreateStep: 1748532163728 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 4 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 4 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 2 } ChildrenExist: false } Table { Name: "TableOld" Columns { Name: "Key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "Value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } Columns { Name: "... (TRUNCATED) waiting... TClient::Ls request: /dc-1/Dir/TableOld TClient::Ls response: Status: 128 StatusCode: PATH_NOT_EXIST Issues { message: "Path not exist" issue_code: 200200 severity: 1 } SchemeStatus: 2 ErrorReason: "Path not found" 2025-05-29T15:22:43.817299Z node 1 :HIVE WARN: hive_impl.cpp:491: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037888 not found 2025-05-29T15:22:43.817318Z node 1 :HIVE WARN: hive_impl.cpp:491: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037890 not found 2025-05-29T15:22:43.822038Z node 1 :HIVE WARN: hive_impl.cpp:491: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037889 not found 2025-05-29T15:22:43.822055Z node 1 :HIVE WARN: hive_impl.cpp:491: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037891 not found 2025-05-29T15:22:43.822058Z node 1 :HIVE WARN: hive_impl.cpp:491: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037892 not found 2025-05-29T15:22:43.984673Z node 2 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7509888457725641943:2062];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:22:43.984721Z node 2 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/002868/r3tmp/tmpouUUKF/pdisk_1.dat 2025-05-29T15:22:44.007554Z node 2 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:22:44.009566Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [2:7509888457725641915:2079] 1748532163984502 != 1748532163984505 TClient is connected to server localhost:3140 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-05-29T15:22:44.091253Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:22:44.091281Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting waiting... 2025-05-29T15:22:44.091678Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-05-29T15:22:44.092456Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected waiting... 2025-05-29T15:22:44.142869Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976715658, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:22:44.144481Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 2025-05-29T15:22:44.165602Z node 2 :OPS_COMPACT INFO: Compact{72075186224037888.1.11, eph 1} end=Don ... kie: 281474976715678 TabletId: 72075186224037890 2025-05-29T15:22:44.236645Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_split_merge.cpp:37: TSplitMerge TConfigureDestination operationId# 281474976715678:0 HandleReply TEvInitSplitMergeDestinationAck, operationId: 281474976715678:0, at schemeshard: 72057594046644480 message# OperationCookie: 281474976715678 TabletId: 72075186224037890 2025-05-29T15:22:44.236758Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:647: TTxOperationReply complete, operationId: 281474976715678:0, at schemeshard: 72057594046644480 2025-05-29T15:22:44.236893Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:619: TTxOperationReply execute, operationId: 281474976715678:0, at schemeshard: 72057594046644480, message: OperationCookie: 281474976715678 TabletId: 72075186224037891 2025-05-29T15:22:44.236902Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_split_merge.cpp:37: TSplitMerge TConfigureDestination operationId# 281474976715678:0 HandleReply TEvInitSplitMergeDestinationAck, operationId: 281474976715678:0, at schemeshard: 72057594046644480 message# OperationCookie: 281474976715678 TabletId: 72075186224037891 2025-05-29T15:22:44.236907Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 281474976715678:0 3 -> 131 2025-05-29T15:22:44.236957Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:647: TTxOperationReply complete, operationId: 281474976715678:0, at schemeshard: 72057594046644480 2025-05-29T15:22:44.236968Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 281474976715678:0, at schemeshard: 72057594046644480 2025-05-29T15:22:44.236971Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_split_merge.cpp:328: TSplitMerge TTransferData operationId# 281474976715678:0 ProgressState, at schemeshard: 72057594046644480 2025-05-29T15:22:44.236975Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_split_merge.cpp:347: TSplitMerge TTransferData operationId# 281474976715678:0 Starting split on src datashard 72075186224037888 splitOpId# 281474976715678:0 at tablet 72057594046644480 2025-05-29T15:22:44.237016Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:652: Send tablet strongly msg operationId: 281474976715678:0 from tablet: 72057594046644480 to tablet: 72075186224037888 cookie: 72057594046644480:1 msg type: 269553154 2025-05-29T15:22:44.237035Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1751: TOperation RegisterRelationByTabletId, TxId: 281474976715678, partId: 0, tablet: 72075186224037888 2025-05-29T15:22:44.238628Z node 2 :OPS_COMPACT INFO: Compact{72075186224037888.1.25, eph 1} end=Done, 0 blobs 0r (max 1), put Spent{time=0.000s,wait=0.000s,interrupts=0} 2025-05-29T15:22:44.238682Z node 2 :OPS_COMPACT INFO: Compact{72075186224037888.1.26, eph 1} end=Done, 0 blobs 0r (max 1), put Spent{time=0.000s,wait=0.000s,interrupts=0} 2025-05-29T15:22:44.238711Z node 2 :OPS_COMPACT INFO: Compact{72075186224037888.1.27, eph 1} end=Done, 0 blobs 0r (max 1), put Spent{time=0.000s,wait=0.000s,interrupts=0} 2025-05-29T15:22:44.238727Z node 2 :OPS_COMPACT INFO: Compact{72075186224037888.1.28, eph 1} end=Done, 0 blobs 0r (max 1), put Spent{time=0.000s,wait=0.000s,interrupts=0} 2025-05-29T15:22:44.238765Z node 2 :OPS_COMPACT INFO: Compact{72075186224037888.1.29, eph -9223372036854775808} end=Done, 0 blobs 0r (max 0), put Spent{time=0.000s,wait=0.000s,interrupts=0} 2025-05-29T15:22:44.241271Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:619: TTxOperationReply execute, operationId: 281474976715678:0, at schemeshard: 72057594046644480, message: OperationCookie: 281474976715678 TabletId: 72075186224037888 2025-05-29T15:22:44.241285Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_split_merge.cpp:206: TSplitMerge TTransferData operationId# 281474976715678:0 HandleReply TEvSplitAck, at schemeshard: 72057594046644480, message: OperationCookie: 281474976715678 TabletId: 72075186224037888 2025-05-29T15:22:44.241377Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 281474976715678:0 131 -> 132 2025-05-29T15:22:44.241406Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046644480, LocalPathId: 3] was 6 2025-05-29T15:22:44.241505Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:647: TTxOperationReply complete, operationId: 281474976715678:0, at schemeshard: 72057594046644480 2025-05-29T15:22:44.241537Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2025-05-29T15:22:44.241555Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046644480, txId: 281474976715678, path id: [OwnerId: 72057594046644480, LocalPathId: 3] 2025-05-29T15:22:44.241619Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2025-05-29T15:22:44.241622Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [2:7509888462020609732:2236], at schemeshard: 72057594046644480, txId: 281474976715678, path id: 3 2025-05-29T15:22:44.241629Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 281474976715678:0, at schemeshard: 72057594046644480 2025-05-29T15:22:44.241633Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_split_merge.cpp:431: TSplitMerge TNotifySrc, operationId: 281474976715678:0 ProgressState, at schemeshard: 72057594046644480 2025-05-29T15:22:44.241639Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_split_merge.cpp:462: Notify src datashard 72075186224037888 on partitioning changed splitOp# 281474976715678 at tablet 72057594046644480 2025-05-29T15:22:44.242191Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:5834: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 3 Version: 4 PathOwnerId: 72057594046644480, cookie: 281474976715678 2025-05-29T15:22:44.242204Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 3 Version: 4 PathOwnerId: 72057594046644480, cookie: 281474976715678 2025-05-29T15:22:44.242206Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976715678 2025-05-29T15:22:44.242210Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715678, pathId: [OwnerId: 72057594046644480, LocalPathId: 3], version: 4 2025-05-29T15:22:44.242215Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046644480, LocalPathId: 3] was 7 2025-05-29T15:22:44.242233Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1606: TOperation IsReadyToNotify, TxId: 281474976715678, ready parts: 0/1, is published: true 2025-05-29T15:22:44.242278Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:652: Send tablet strongly msg operationId: 281474976715678:0 from tablet: 72057594046644480 to tablet: 72075186224037888 cookie: 72057594046644480:1 msg type: 269553158 2025-05-29T15:22:44.242320Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046644480, cookie: 281474976715678 2025-05-29T15:22:44.246963Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:619: TTxOperationReply execute, operationId: 281474976715678:0, at schemeshard: 72057594046644480, message: OperationCookie: 281474976715678 TabletId: 72075186224037888 2025-05-29T15:22:44.246978Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_split_merge.cpp:386: TSplitMerge TNotifySrc, operationId: 281474976715678:0 HandleReply TEvSplitPartitioningChangedAck, from datashard: 72075186224037888, at schemeshard: 72057594046644480 2025-05-29T15:22:44.246996Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:906: Part operation is done id#281474976715678:0 progress is 1/1 2025-05-29T15:22:44.247000Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 281474976715678 ready parts: 1/1 2025-05-29T15:22:44.247004Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:906: Part operation is done id#281474976715678:0 progress is 1/1 2025-05-29T15:22:44.247005Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 281474976715678 ready parts: 1/1 2025-05-29T15:22:44.247009Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1606: TOperation IsReadyToNotify, TxId: 281474976715678, ready parts: 1/1, is published: true 2025-05-29T15:22:44.247018Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1629: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [2:7509888462020610152:2359] message: TxId: 281474976715678 2025-05-29T15:22:44.247026Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 281474976715678 ready parts: 1/1 2025-05-29T15:22:44.247030Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:973: Operation and all the parts is done, operation id: 281474976715678:0 2025-05-29T15:22:44.247032Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5174: RemoveTx for txid 281474976715678:0 2025-05-29T15:22:44.247073Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046644480, LocalPathId: 3] was 6 2025-05-29T15:22:44.247188Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:647: TTxOperationReply complete, operationId: 281474976715678:0, at schemeshard: 72057594046644480 2025-05-29T15:22:44.247191Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:260: Unable to activate 281474976715678:0 TClient::Ls request: /dc-1/Dir/TableOld TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "TableOld" PathId: 3 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976715659 CreateStep: 1748532164197 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 4 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 4 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 2 } ChildrenExist: false } Table { Name: "TableOld" Columns { Name: "Key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "Value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } Columns { Name: "... (TRUNCATED) >> TPartitionTests::TooManyImmediateTxs [GOOD] >> TLocksTest::BrokenSameKeyLock [GOOD] >> TLocksTest::BrokenSameShardLock >> Balancing::Balancing_ManyTopics_TopicApi [FAIL] >> Balancing::Balancing_ManyTopics_PQv1 >> TPartitionTests::WriteSubDomainOutOfSpace ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/client/ut/unittest >> TFlatTest::LsPathId [GOOD] Test command err: 2025-05-29T15:22:43.664244Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509888457036445308:2198];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:22:43.665112Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/00285f/r3tmp/tmp7riTsT/pdisk_1.dat 2025-05-29T15:22:43.766769Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [1:7509888457036445149:2079] 1748532163663496 != 1748532163663499 2025-05-29T15:22:43.768778Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded TClient is connected to server localhost:17601 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 0 Pa... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-05-29T15:22:43.821650Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:22:43.821682Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:22:43.822652Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-29T15:22:43.825720Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:22:43.832351Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 TClient::Ls request: / TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "/" PathId: 1 SchemeshardId: 0 PathType: EPathTypeDir CreateFinished: true } Children { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true } } Path: "/" TClient::Ls request: TClient::Ls response: Status: 128 StatusCode: ERROR Issues { message: "Default error" severity: 1 } SchemeStatus: 7 ErrorReason: "Invalid path" TClient::Ls request: // TClient::Ls response: Status: 128 StatusCode: ERROR Issues { message: "Default error" severity: 1 } SchemeStatus: 7 ErrorReason: "Invalid path" TClient::Ls request: / TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "/" PathId: 1 SchemeshardId: 0 PathType: EPathTypeDir CreateFinished: true } Children { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true } } Path: "/" TClient::Ls request: /dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 1748532163875 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } StoragePo... (TRUNCATED) TClient::Ls request: /dc-11 TClient::Ls response: Status: 128 StatusCode: PATH_NOT_EXIST Issues { message: "Path not exist" issue_code: 200200 severity: 1 } SchemeStatus: 2 ErrorReason: "Root not found" TClient::Ls request: /dc-2 TClient::Ls response: Status: 128 StatusCode: PATH_NOT_EXIST Issues { message: "Path not exist" issue_code: 200200 severity: 1 } SchemeStatus: 2 ErrorReason: "Root not found" waiting... 2025-05-29T15:22:43.912714Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976715658, at schemeshard: 72057594046644480 TClient::Ls request: / TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "/" PathId: 1 SchemeshardId: 0 PathType: EPathTypeDir CreateFinished: true } Children { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true } } Path: "/" TClient::Ls request: /dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 1748532163875 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 3 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: "Berkanavt" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715658 CreateStep: 1748532163959 ParentPathId: 1 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" ChildrenExist: false } DomainDescription { SchemeShardId_Depr... (TRUNCATED) TClient::Ls request: /dc-1/Berkanavt TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Berkanavt" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715658 CreateStep: 1748532163959 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 2 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 1 PathsLimit: 10000 Shard... (TRUNCATED) 2025-05-29T15:22:43.920465Z node 1 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [1:7509888457036445781:2322] txid# 281474976715659, issues: { message: "Check failed: path: \'/dc-1/Berkanavt\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 2], type: EPathTypeDir, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_mkdir.cpp:155" severity: 1 } Error 1: Check failed: path: '/dc-1/Berkanavt', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 2], type: EPathTypeDir, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_mkdir.cpp:155 TClient::Ls request: /dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 1748532163875 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 3 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: "Berkanavt" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715658 CreateStep: 1748532163959 ParentPathId: 1 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" ChildrenExist: false } DomainDescription { SchemeShardId_Depr... (TRUNCATED) TClient::Ls request: /dc-1/arcadia TClient::Ls response: Status: 128 StatusCode: PATH_NOT_EXIST Issues { message: "Path not exist" issue_code: 200200 severity: 1 } SchemeStatus: 2 ErrorReason: "Path not found" waiting... 2025-05-29T15:22:43.927389Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976715660, at schemeshard: 72057594046644480 TClient::Ls request: /dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 1748532163875 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 7 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 7 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 5 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: "Berkanavt" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715658 CreateStep: 1748532163959 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" ChildrenExist: false } Children { Name: "arcadia" Path... (TRUNCATED) TClient::Ls request: /dc-1/arcadia TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "arcadia" PathId: 3 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715660 CreateStep: 1748532163973 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 2 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 2 PathsLimit: 10000 ShardsI... (TRUNCATED) test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/00285f/r3tmp/tmpxKQqIL/pdisk_1.dat 2025-05-29T15:22:44.219565Z node 2 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-05-29T15:22:44.241814Z node 2 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:22:44.242129Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [2:7509888464093769803:2079] 1748532164206343 != 1748532164206346 TClient is connected to server localhost:21639 TClient::Ls request: / TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "/" PathId: 1 SchemeshardId: 0 PathType: EPathTypeDir CreateFinished: true } Children { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true } } Path: "/" WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 0 Pa... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-05-29T15:22:44.316026Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:22:44.316053Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:22:44.316337Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:22:44.317728Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-29T15:22:44.318973Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... waiting... >> TReplicationTests::CreateReplicatedTable [GOOD] >> TReplicationTests::DropReplicationWithInvalidCredentials >> CommitOffset::Commit_WithoutSession_ToPastParentPartition [FAIL] >> CommitOffset::Commit_WithSession_ParentNotFinished_SameSession >> TopicAutoscaling::ControlPlane_CDC [FAIL] >> TopicAutoscaling::ControlPlane_CDC_Disable >> TopicAutoscaling::ControlPlane_DisableAutoPartitioning [FAIL] >> TopicAutoscaling::ControlPlane_PauseAutoPartitioning >> TLocksTest::CK_GoodLock [GOOD] >> TLocksTest::CK_BrokenLock >> TLocksFatTest::RangeSetRemove [GOOD] >> TLocksTest::Range_BrokenLockMax [GOOD] >> TLocksFatTest::ShardLocks >> TLocksTest::Range_CorrectDot >> TPartitionTests::WriteSubDomainOutOfSpace [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_replication/unittest >> TReplicationTests::CopyReplicatedTable [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:116:2058] recipient: [1:111:2141] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:116:2058] recipient: [1:111:2141] Leader for TabletID 72057594046678944 is [1:128:2152] sender: [1:132:2058] recipient: [1:111:2141] 2025-05-29T15:22:42.562560Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7488: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-05-29T15:22:42.562594Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7516: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:22:42.562601Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7402: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-05-29T15:22:42.562607Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7418: OperationsProcessing config: using default configuration 2025-05-29T15:22:42.562620Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-05-29T15:22:42.562625Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-05-29T15:22:42.562637Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7548: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:22:42.562655Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:39: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-05-29T15:22:42.562810Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7619: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-05-29T15:22:42.562907Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-05-29T15:22:42.577589Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7309: Cannot subscribe to console configs 2025-05-29T15:22:42.577621Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:22:42.581034Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-05-29T15:22:42.581183Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-05-29T15:22:42.581222Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-05-29T15:22:42.583213Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-05-29T15:22:42.583482Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:445: Clear TempDirsState with owners number: 0 2025-05-29T15:22:42.583617Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1348: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-05-29T15:22:42.583694Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:32: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-05-29T15:22:42.584381Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:157: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:22:42.584447Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:84: [RootDataErasureManager] Stop 2025-05-29T15:22:42.584776Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:22:42.584790Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:22:42.584814Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-05-29T15:22:42.584824Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-29T15:22:42.584831Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:202: TTxServerlessStorageBilling.Complete 2025-05-29T15:22:42.584872Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6671: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-05-29T15:22:42.586568Z node 1 :HIVE INFO: tablet_helpers.cpp:1130: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:128:2152] sender: [1:242:2058] recipient: [1:15:2062] 2025-05-29T15:22:42.612119Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:372: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-05-29T15:22:42.612226Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:22:42.612301Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-05-29T15:22:42.612351Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:130: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-05-29T15:22:42.612362Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:22:42.613385Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:451: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-05-29T15:22:42.613415Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-05-29T15:22:42.613474Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:22:42.613485Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:313: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-05-29T15:22:42.613493Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:367: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-05-29T15:22:42.613501Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 2 -> 3 2025-05-29T15:22:42.614027Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:22:42.614037Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-05-29T15:22:42.614044Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 3 -> 128 2025-05-29T15:22:42.614447Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:22:42.614457Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:22:42.614463Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:22:42.614471Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1650: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-05-29T15:22:42.615228Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1719: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-05-29T15:22:42.615683Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:652: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-05-29T15:22:42.615726Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1751: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-05-29T15:22:42.615928Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:676: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-05-29T15:22:42.615954Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:680: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 124 RawX2: 4294969445 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-05-29T15:22:42.615961Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:22:42.616029Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 128 -> 240 2025-05-29T15:22:42.616037Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:22:42.616085Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-05-29T15:22:42.616098Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:402: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-05-29T15:22:42.616537Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:22:42.616547Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-29T15:22:42.616596Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29 ... ersion { Step: 5000003 TxId: 102 } 2025-05-29T15:22:45.006297Z node 8 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_part.cpp:108: HandleReply TEvDataShard::TEvProposeTransactionResult Ignore message: tablet# 72057594046678944, ev# TxKind: TX_KIND_SCHEME Origin: 72075186233409547 Status: COMPLETE TxId: 102 Step: 5000003 OrderId: 102 ExecLatency: 4 ProposeLatency: 5 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409547 CpuTimeUsec: 385 } } CommitVersion { Step: 5000003 TxId: 102 } 2025-05-29T15:22:45.006495Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5512: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 408 RawX2: 34359740744 } Origin: 72075186233409547 State: 2 TxId: 102 Step: 0 Generation: 2 2025-05-29T15:22:45.006504Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1764: TOperation FindRelatedPartByTabletId, TxId: 102, tablet: 72075186233409547, partId: 0 2025-05-29T15:22:45.006520Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:619: TTxOperationReply execute, operationId: 102:0, at schemeshard: 72057594046678944, message: Source { RawX1: 408 RawX2: 34359740744 } Origin: 72075186233409547 State: 2 TxId: 102 Step: 0 Generation: 2 2025-05-29T15:22:45.006528Z node 8 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:1005: NTableState::TProposedWaitParts operationId# 102:0 HandleReply TEvSchemaChanged at tablet: 72057594046678944 2025-05-29T15:22:45.006537Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:1009: NTableState::TProposedWaitParts operationId# 102:0 HandleReply TEvSchemaChanged at tablet: 72057594046678944 message: Source { RawX1: 408 RawX2: 34359740744 } Origin: 72075186233409547 State: 2 TxId: 102 Step: 0 Generation: 2 2025-05-29T15:22:45.006552Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:655: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 102:0, shardIdx: 72057594046678944:2, datashard: 72075186233409547, left await: 1, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2025-05-29T15:22:45.006556Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:1014: NTableState::TProposedWaitParts operationId# 102:0 HandleReply TEvSchemaChanged CollectSchemaChanged: false 2025-05-29T15:22:45.007206Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:647: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2025-05-29T15:22:45.007325Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:647: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2025-05-29T15:22:45.018262Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5512: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 306 RawX2: 34359740660 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 2025-05-29T15:22:45.018290Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1764: TOperation FindRelatedPartByTabletId, TxId: 102, tablet: 72075186233409546, partId: 0 2025-05-29T15:22:45.018315Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:619: TTxOperationReply execute, operationId: 102:0, at schemeshard: 72057594046678944, message: Source { RawX1: 306 RawX2: 34359740660 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 2025-05-29T15:22:45.018323Z node 8 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:1005: NTableState::TProposedWaitParts operationId# 102:0 HandleReply TEvSchemaChanged at tablet: 72057594046678944 2025-05-29T15:22:45.018330Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:1009: NTableState::TProposedWaitParts operationId# 102:0 HandleReply TEvSchemaChanged at tablet: 72057594046678944 message: Source { RawX1: 306 RawX2: 34359740660 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 2025-05-29T15:22:45.018342Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:655: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 102:0, shardIdx: 72057594046678944:1, datashard: 72075186233409546, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2025-05-29T15:22:45.018345Z node 8 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:674: all shard schema changes has been received, operationId: 102:0, at schemeshard: 72057594046678944 2025-05-29T15:22:45.018349Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:686: send schema changes ack message, operation: 102:0, datashard: 72075186233409546, at schemeshard: 72057594046678944 2025-05-29T15:22:45.018353Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:686: send schema changes ack message, operation: 102:0, datashard: 72075186233409547, at schemeshard: 72057594046678944 2025-05-29T15:22:45.018359Z node 8 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 102:0 129 -> 240 2025-05-29T15:22:45.018752Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:647: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2025-05-29T15:22:45.018867Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2025-05-29T15:22:45.018876Z node 8 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_copy_table.cpp:306: TCopyTable TCopyTableBarrier operationId: 102:0ProgressState, operation type TxCopyTable 2025-05-29T15:22:45.018883Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1059: Set barrier, OperationId: 102:0, name: CopyTableBarrier, done: 0, blocked: 1, parts count: 1 2025-05-29T15:22:45.018887Z node 8 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1103: All parts have reached barrier, tx: 102, done: 0, blocked: 1 2025-05-29T15:22:45.018896Z node 8 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_copy_table.cpp:289: TCopyTable TCopyTableBarrier operationId: 102:0 HandleReply TEvPrivate::TEvCompleteBarrier, msg: NKikimr::NSchemeShard::TEvPrivate::TEvCompleteBarrier { TxId: 102 Name: CopyTableBarrier }, at tablet# 72057594046678944 2025-05-29T15:22:45.018900Z node 8 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 102:0 240 -> 240 2025-05-29T15:22:45.019332Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2025-05-29T15:22:45.019342Z node 8 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:482: [72057594046678944] TDone opId# 102:0 ProgressState 2025-05-29T15:22:45.019359Z node 8 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:906: Part operation is done id#102:0 progress is 1/1 2025-05-29T15:22:45.019363Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-05-29T15:22:45.019366Z node 8 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:906: Part operation is done id#102:0 progress is 1/1 2025-05-29T15:22:45.019368Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-05-29T15:22:45.019373Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1606: TOperation IsReadyToNotify, TxId: 102, ready parts: 1/1, is published: true 2025-05-29T15:22:45.019391Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1629: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [8:334:2312] message: TxId: 102 2025-05-29T15:22:45.019399Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-05-29T15:22:45.019407Z node 8 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:973: Operation and all the parts is done, operation id: 102:0 2025-05-29T15:22:45.019412Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5174: RemoveTx for txid 102:0 2025-05-29T15:22:45.019448Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 3 2025-05-29T15:22:45.019454Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove txstate source path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2025-05-29T15:22:45.019844Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:227: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2025-05-29T15:22:45.019856Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:236: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [8:435:2395] TestWaitNotification: OK eventTxId 102 2025-05-29T15:22:45.019958Z node 8 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/CopyTable" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-05-29T15:22:45.020009Z node 8 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/CopyTable" took 57us result status StatusSuccess 2025-05-29T15:22:45.020095Z node 8 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/CopyTable" PathDescription { Self { Name: "CopyTable" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 102 CreateStep: 5000003 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "CopyTable" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Uint64" TypeId: 4 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 TableSchemaVersion: 1 IsBackup: false IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 2 PathsLimit: 10000 ShardsInside: 2 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/client/ut/unittest >> TFlatTest::SelectRangeBothLimit [GOOD] Test command err: 2025-05-29T15:22:43.534684Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509888457299206854:2199];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:22:43.534894Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/002863/r3tmp/tmpeJiUKn/pdisk_1.dat 2025-05-29T15:22:43.602773Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:22:43.606411Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [1:7509888457299206694:2079] 1748532163531300 != 1748532163531303 TClient is connected to server localhost:16403 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: 2025-05-29T15:22:43.671276Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:22:43.671309Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:22:43.671742Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-05-29T15:22:43.679495Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:22:43.683285Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:22:43.696201Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976715658, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:22:43.704475Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976715659, at schemeshard: 72057594046644480 2025-05-29T15:22:43.722650Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... insert finished 2008 usec 1847 usec 1976 usec 2029 usec 1791 usec 1815 usec 2496 usec 6234 usec 9168 usec 2482 usec test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/002863/r3tmp/tmpsDN88p/pdisk_1.dat 2025-05-29T15:22:44.214996Z node 2 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7509888462297246213:2200];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:22:44.215809Z node 2 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-05-29T15:22:44.247316Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [2:7509888462297246043:2079] 1748532164214337 != 1748532164214340 2025-05-29T15:22:44.253271Z node 2 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded TClient is connected to server localhost:21348 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-05-29T15:22:44.325257Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:22:44.325295Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:22:44.325591Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-05-29T15:22:44.326143Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected waiting... 2025-05-29T15:22:44.329282Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... waiting... 2025-05-29T15:22:44.339093Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 >> TopicAutoscaling::PartitionSplit_ReadNotEmptyPartitions_PQv1 [FAIL] >> TopicAutoscaling::PartitionSplit_ReadNotEmptyPartitions_AutoscaleAwareSDK >> TReplicationTests::DropReplicationWithInvalidCredentials [GOOD] >> TReplicationTests::DropReplicationWithUnknownSecret >> TopicAutoscaling::PartitionSplit_PreferedPartition_PQv1 [FAIL] >> TopicAutoscaling::PartitionSplit_ReadEmptyPartitions_AutoscaleAwareSDK >> TPartitionTests::TestTxBatchInFederation >> TLocksFatTest::RangeSetBreak ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/client/ut/unittest >> TFlatTest::CopyTableDropOriginalAndReturnPartAfterCompaction [GOOD] Test command err: 2025-05-29T15:22:43.168593Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509888457162263937:2265];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:22:43.168612Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/00286d/r3tmp/tmpioNilO/pdisk_1.dat 2025-05-29T15:22:43.237195Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:22:43.238885Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [1:7509888457162263711:2079] 1748532163166173 != 1748532163166176 TClient is connected to server localhost:17911 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-05-29T15:22:43.302633Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:22:43.302668Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:22:43.303561Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-05-29T15:22:43.304449Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected waiting... waiting... 2025-05-29T15:22:43.320651Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:22:43.400695Z node 1 :OPS_COMPACT INFO: Compact{72075186224037888.1.11, eph 1} end=Done, 4 blobs 3r (max 3), put Spent{time=0.000s,wait=0.000s,interrupts=1} Part{ 2 pk, lobs 2 +0, (1265 647 2154)b }, ecr=1.000 2025-05-29T15:22:43.400763Z node 1 :OPS_COMPACT INFO: Compact{72075186224037889.1.11, eph 1} end=Done, 4 blobs 3r (max 3), put Spent{time=0.000s,wait=0.000s,interrupts=1} Part{ 2 pk, lobs 2 +0, (1139 521 2626)b }, ecr=1.000 2025-05-29T15:22:43.409142Z node 1 :OPS_COMPACT INFO: Compact{72075186224037888.1.16, eph 2} end=Done, 4 blobs 6r (max 6), put Spent{time=0.000s,wait=0.000s,interrupts=1} Part{ 2 pk, lobs 5 +0, (1573 647 6413)b }, ecr=1.000 2025-05-29T15:22:43.410069Z node 1 :OPS_COMPACT INFO: Compact{72075186224037889.1.16, eph 2} end=Done, 4 blobs 6r (max 6), put Spent{time=0.000s,wait=0.000s,interrupts=1} Part{ 2 pk, lobs 4 +0, (2326 1432 5183)b }, ecr=1.000 TClient::Ls request: /dc-1/Dir/TableOld TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "TableOld" PathId: 3 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976715659 CreateStep: 1748532163427 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "TableOld" Columns { Name: "Key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "Value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } Columns { Name: "... (TRUNCATED) Copy TableOld to Table 2025-05-29T15:22:43.441334Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:372: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/dc-1/Dir" OperationType: ESchemeOpCreateTable CreateTable { Name: "Table" PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 100000 InMemStepsToSnapshot: 2 InMemForceStepsToSnapshot: 3 InMemForceSizeToSnapshot: 1000000 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 200000 ReadAheadLoThreshold: 100000 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 10000 CountToCompact: 2 ForceCountToCompact: 2 ForceSizeToCompact: 20000 CompactionBrokerQueue: 1 KeepInCache: true } } ColumnFamilies { Id: 0 ColumnCache: ColumnCacheNone Storage: ColumnStorageTest_1_2_1k } } CopyFromTable: "/dc-1/Dir/TableOld" } } TxId: 281474976715676 TabletId: 72057594046644480 PeerName: "" , at schemeshard: 72057594046644480 2025-05-29T15:22:43.441436Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_copy_table.cpp:383: TCopyTable Propose, path: /dc-1/Dir/Table, opId: 281474976715676:0, at schemeshard: 72057594046644480 2025-05-29T15:22:43.441576Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:319: AttachChild: child attached as only one child to the parent, parent id: [OwnerId: 72057594046644480, LocalPathId: 2], parent name: Dir, child name: Table, child id: [OwnerId: 72057594046644480, LocalPathId: 4], at schemeshard: 72057594046644480 2025-05-29T15:22:43.441590Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046644480, LocalPathId: 4] was 0 2025-05-29T15:22:43.441593Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason transaction source path for pathId [OwnerId: 72057594046644480, LocalPathId: 3] was 3 2025-05-29T15:22:43.441600Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason new shard created for pathId [OwnerId: 72057594046644480, LocalPathId: 4] was 1 2025-05-29T15:22:43.441612Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason new shard created for pathId [OwnerId: 72057594046644480, LocalPathId: 4] was 2 2025-05-29T15:22:43.441652Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046644480, LocalPathId: 4] was 3 2025-05-29T15:22:43.441677Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:130: IgniteOperation, opId: 281474976715676:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-05-29T15:22:43.441903Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046644480, LocalPathId: 2] was 2 2025-05-29T15:22:43.441912Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046644480, LocalPathId: 4] was 4 2025-05-29T15:22:43.442165Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:451: TTxOperationPropose Complete, txId: 281474976715676, response: Status: StatusAccepted TxId: 281474976715676 SchemeshardId: 72057594046644480 PathId: 4, at schemeshard: 72057594046644480 2025-05-29T15:22:43.442191Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 281474976715676, database: /dc-1, subject: , status: StatusAccepted, operation: CREATE TABLE, path: /dc-1/Dir/Table 2025-05-29T15:22:43.442239Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2025-05-29T15:22:43.442242Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046644480, txId: 281474976715676, path id: [OwnerId: 72057594046644480, LocalPathId: 2] 2025-05-29T15:22:43.442273Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046644480, txId: 281474976715676, path id: [OwnerId: 72057594046644480, LocalPathId: 4] 2025-05-29T15:22:43.442285Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2025-05-29T15:22:43.442287Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:7509888457162264239:2240], at schemeshard: 72057594046644480, txId: 281474976715676, path id: 2 2025-05-29T15:22:43.442290Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:7509888457162264239:2240], at schemeshard: 72057594046644480, txId: 281474976715676, path id: 4 2025-05-29T15:22:43.442296Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 281474976715676:0, at schemeshard: 72057594046644480 2025-05-29T15:22:43.442302Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:313: TCreateParts opId# 281474976715676:0 ProgressState, operation type: TxCopyTable, at tablet# 72057594046644480 2025-05-29T15:22:43.442380Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:357: TCreateParts opId# 281474976715676:0 CreateRequest Event to Hive: 72057594037968897 msg: Owner: 72057594046644480 OwnerIdx: 3 TabletType: DataShard FollowerCount: 0 ObjectDomain { SchemeShard: 72057594046644480 PathId: 1 } ObjectId: 4 BindedChannels { StoragePoolName: "/dc-1:test" } BindedChannels { StoragePoolName: "/dc-1:test" } BindedChannels { StoragePoolName: "/dc-1:test" } AllowedDomains { SchemeShard: 72057594046644480 PathId: 1 } 2025-05-29T15:22:43.442398Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:357: TCreateParts opId# 281474976715676:0 CreateRequest Event to Hive: 72057594037968897 msg: Owner: 72057594046644480 OwnerIdx: 4 TabletType: DataShard FollowerCount: 0 ObjectDomain { SchemeShard: 72057594046644480 PathId: 1 } ObjectId: 4 BindedChannels { StoragePoolName: "/dc-1:test" } BindedChannels { StoragePoolName: "/dc-1:test" } BindedChannels { StoragePoolName: "/dc-1:test" } AllowedDomains { SchemeShard: 72057594046644480 PathId: 1 } 2025-05-29T15:22:43.442986Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:5834: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 2 Version: 6 PathOwnerId: 72057594046644480, cookie: 281474976715676 2025-05-29T15:22:43.443002Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 2 Version: 6 PathOwnerId: 72 ... :0 progress is 1/1 2025-05-29T15:22:44.566655Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 281474976715784 ready parts: 1/1 2025-05-29T15:22:44.566657Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1606: TOperation IsReadyToNotify, TxId: 281474976715784, ready parts: 1/1, is published: true 2025-05-29T15:22:44.566665Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1629: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [2:7509888462187221096:2697] message: TxId: 281474976715784 2025-05-29T15:22:44.566667Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 281474976715784 ready parts: 1/1 2025-05-29T15:22:44.566670Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:973: Operation and all the parts is done, operation id: 281474976715784:0 2025-05-29T15:22:44.566672Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5174: RemoveTx for txid 281474976715784:0 2025-05-29T15:22:44.566688Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:3728: Server disconnected at leader tablet# 72075186224037889, clientId# [2:7509888462187221118:2700], serverId# [2:7509888462187221122:3430], sessionId# [0:0:0] 2025-05-29T15:22:44.566691Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046644480, LocalPathId: 4] was 3 2025-05-29T15:22:44.566712Z node 2 :TX_DATASHARD INFO: datashard_impl.h:3306: 72075186224037889 Reporting state Offline to schemeshard 72057594046644480 2025-05-29T15:22:44.566837Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5549: Handle TEvStateChanged, at schemeshard: 72057594046644480, message: Source { RawX1: 7509888462187219392 RawX2: 4503608217307387 } TabletId: 72075186224037889 State: 4 2025-05-29T15:22:44.566857Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__state_changed_reply.cpp:78: TTxShardStateChanged DoExecute, datashard informs about state changing, datashardId: 72075186224037889, state: Offline, at schemeshard: 72057594046644480 2025-05-29T15:22:44.566939Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:46: Free shard 72057594046644480:2 hive 72057594037968897 at ss 72057594046644480 2025-05-29T15:22:44.566965Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:2962: Handle TEvStateChangedResult datashard 72075186224037889 state Offline Check that tablet 72075186224037888 was deleted 2025-05-29T15:22:44.567083Z node 2 :HIVE WARN: hive_impl.cpp:1945: HIVE#72057594037968897 Can't find the tablet from RequestHiveInfo(TabletID=72075186224037888) Check that tablet 72075186224037889 was deleted 2025-05-29T15:22:44.567423Z node 2 :HIVE WARN: hive_impl.cpp:1945: HIVE#72057594037968897 Can't find the tablet from RequestHiveInfo(TabletID=72075186224037889) 2025-05-29T15:22:44.567521Z node 2 :TX_DATASHARD DEBUG: datashard_loans.cpp:220: 72075186224037891 in PreOffline state HasSharedBobs: 0 SchemaOperations: [ ] OutReadSets count: 0 ChangesQueue size: 0 ChangeExchangeSplit: 1 siblings to be activated: wait to activation from: 2025-05-29T15:22:44.567526Z node 2 :TX_DATASHARD DEBUG: datashard_loans.cpp:220: 72075186224037890 in PreOffline state HasSharedBobs: 0 SchemaOperations: [ ] OutReadSets count: 0 ChangesQueue size: 0 ChangeExchangeSplit: 1 siblings to be activated: wait to activation from: 2025-05-29T15:22:44.567550Z node 2 :TX_DATASHARD INFO: datashard_loans.cpp:177: 72075186224037890 Initiating switch from PreOffline to Offline state 2025-05-29T15:22:44.567552Z node 2 :TX_DATASHARD INFO: datashard_loans.cpp:177: 72075186224037891 Initiating switch from PreOffline to Offline state Check that tablet 72075186224037890 was deleted 2025-05-29T15:22:44.567907Z node 2 :TX_DATASHARD INFO: datashard_impl.h:3306: 72075186224037891 Reporting state Offline to schemeshard 72057594046644480 2025-05-29T15:22:44.567916Z node 2 :TX_DATASHARD INFO: datashard_impl.h:3306: 72075186224037890 Reporting state Offline to schemeshard 72057594046644480 2025-05-29T15:22:44.567939Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5938: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 2 ShardOwnerId: 72057594046644480 ShardLocalIdx: 2, at schemeshard: 72057594046644480 2025-05-29T15:22:44.567943Z node 2 :TX_DATASHARD INFO: datashard.cpp:197: OnTabletStop: 72075186224037889 reason = ReasonStop 2025-05-29T15:22:44.567989Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046644480, LocalPathId: 3] was 1 2025-05-29T15:22:44.568025Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:123: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046644480 2025-05-29T15:22:44.568026Z node 2 :TX_DATASHARD INFO: datashard.cpp:257: OnTabletDead: 72075186224037889 2025-05-29T15:22:44.568027Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:137: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046644480, LocalPathId: 3], at schemeshard: 72057594046644480 2025-05-29T15:22:44.568036Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046644480, LocalPathId: 2] was 2 2025-05-29T15:22:44.568054Z node 2 :TX_DATASHARD INFO: datashard.cpp:1301: Change sender killed: at tablet: 72075186224037889 2025-05-29T15:22:44.568117Z node 2 :HIVE WARN: hive_impl.cpp:491: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037889 not found 2025-05-29T15:22:44.568140Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5549: Handle TEvStateChanged, at schemeshard: 72057594046644480, message: Source { RawX1: 7509888462187219672 RawX2: 4503608217307443 } TabletId: 72075186224037891 State: 4 2025-05-29T15:22:44.568157Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__state_changed_reply.cpp:78: TTxShardStateChanged DoExecute, datashard informs about state changing, datashardId: 72075186224037891, state: Offline, at schemeshard: 72057594046644480 2025-05-29T15:22:44.568198Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5549: Handle TEvStateChanged, at schemeshard: 72057594046644480, message: Source { RawX1: 7509888462187219671 RawX2: 4503608217307442 } TabletId: 72075186224037890 State: 4 2025-05-29T15:22:44.568206Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__state_changed_reply.cpp:78: TTxShardStateChanged DoExecute, datashard informs about state changing, datashardId: 72075186224037890, state: Offline, at schemeshard: 72057594046644480 2025-05-29T15:22:44.568221Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:174: Deleted shardIdx 72057594046644480:2 2025-05-29T15:22:44.568231Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:180: Close pipe to deleted shardIdx 72057594046644480:2 tabletId 72075186224037889 2025-05-29T15:22:44.568243Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:160: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046644480 2025-05-29T15:22:44.568332Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:46: Free shard 72057594046644480:4 hive 72057594037968897 at ss 72057594046644480 2025-05-29T15:22:44.568345Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:2962: Handle TEvStateChangedResult datashard 72075186224037891 state Offline 2025-05-29T15:22:44.568353Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:46: Free shard 72057594046644480:3 hive 72057594037968897 at ss 72057594046644480 2025-05-29T15:22:44.568357Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:2962: Handle TEvStateChangedResult datashard 72075186224037890 state Offline 2025-05-29T15:22:44.568990Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5938: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 4 ShardOwnerId: 72057594046644480 ShardLocalIdx: 4, at schemeshard: 72057594046644480 2025-05-29T15:22:44.569039Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046644480, LocalPathId: 4] was 2 2025-05-29T15:22:44.569052Z node 2 :TX_DATASHARD INFO: datashard.cpp:197: OnTabletStop: 72075186224037891 reason = ReasonStop 2025-05-29T15:22:44.569061Z node 2 :TX_DATASHARD INFO: datashard.cpp:197: OnTabletStop: 72075186224037890 reason = ReasonStop 2025-05-29T15:22:44.569076Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:3728: Server disconnected at leader tablet# 72075186224037891, clientId# [2:7509888462187219747:2544], serverId# [2:7509888462187219751:2548], sessionId# [0:0:0] 2025-05-29T15:22:44.569076Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5938: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 3 ShardOwnerId: 72057594046644480 ShardLocalIdx: 3, at schemeshard: 72057594046644480 2025-05-29T15:22:44.569094Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046644480, LocalPathId: 4] was 1 2025-05-29T15:22:44.569112Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:123: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046644480 2025-05-29T15:22:44.569118Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:137: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046644480, LocalPathId: 4], at schemeshard: 72057594046644480 2025-05-29T15:22:44.569126Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046644480, LocalPathId: 2] was 1 2025-05-29T15:22:44.569126Z node 2 :TX_DATASHARD INFO: datashard.cpp:257: OnTabletDead: 72075186224037891 2025-05-29T15:22:44.569150Z node 2 :TX_DATASHARD INFO: datashard.cpp:1301: Change sender killed: at tablet: 72075186224037891 2025-05-29T15:22:44.569158Z node 2 :HIVE WARN: hive_impl.cpp:491: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037891 not found 2025-05-29T15:22:44.569161Z node 2 :HIVE WARN: hive_impl.cpp:491: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037890 not found 2025-05-29T15:22:44.569168Z node 2 :TX_DATASHARD INFO: datashard.cpp:257: OnTabletDead: 72075186224037890 2025-05-29T15:22:44.569186Z node 2 :TX_DATASHARD INFO: datashard.cpp:1301: Change sender killed: at tablet: 72075186224037890 2025-05-29T15:22:44.569292Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:174: Deleted shardIdx 72057594046644480:4 2025-05-29T15:22:44.569302Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:180: Close pipe to deleted shardIdx 72057594046644480:4 tabletId 72075186224037891 2025-05-29T15:22:44.569328Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:174: Deleted shardIdx 72057594046644480:3 2025-05-29T15:22:44.569333Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:180: Close pipe to deleted shardIdx 72057594046644480:3 tabletId 72075186224037890 2025-05-29T15:22:44.569337Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:160: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046644480 2025-05-29T15:22:44.868541Z node 2 :HIVE WARN: hive_impl.cpp:1945: HIVE#72057594037968897 Can't find the tablet from RequestHiveInfo(TabletID=72075186224037890) Check that tablet 72075186224037891 was deleted 2025-05-29T15:22:44.868876Z node 2 :HIVE WARN: hive_impl.cpp:1945: HIVE#72057594037968897 Can't find the tablet from RequestHiveInfo(TabletID=72075186224037891) >> TReplicationTests::DropReplicationWithUnknownSecret [GOOD] >> TopicAutoscaling::ReadingAfterSplitTest_PQv1 [FAIL] >> TopicAutoscaling::ReadingAfterSplitTest_PreferedPartition_AutoscaleAwareSDK ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/datashard/ut_object_storage_listing/unittest >> ObjectStorageListingTest::FilterListing [FAIL] Test command err: 2025-05-29T15:22:40.446196Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:102:2148], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-05-29T15:22:40.446234Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-05-29T15:22:40.446246Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/001ece/r3tmp/tmpbAac2W/pdisk_1.dat 2025-05-29T15:22:40.563903Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-05-29T15:22:40.583662Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:22:40.588215Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [1:32:2079] 1748532159951474 != 1748532159951478 2025-05-29T15:22:40.638215Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:22:40.638265Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:22:40.649342Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-29T15:22:40.739801Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-05-29T15:22:40.764335Z node 1 :TX_DATASHARD INFO: datashard.cpp:373: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:664:2569] 2025-05-29T15:22:40.764435Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:630: TxInitSchema.Execute 2025-05-29T15:22:40.777231Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:696: TxInitSchema.Complete 2025-05-29T15:22:40.777297Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:48: TDataShard::TTxInit::Execute 2025-05-29T15:22:40.777523Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1315: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-05-29T15:22:40.777533Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1371: LoadLockChangeRecords at tablet: 72075186224037888 2025-05-29T15:22:40.777559Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1420: LoadChangeRecordCommits at tablet: 72075186224037888 2025-05-29T15:22:40.777634Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:92: TDataShard::TTxInit::Complete 2025-05-29T15:22:40.777659Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:100: TDataShard::TTxInitRestored::Execute 2025-05-29T15:22:40.777677Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:111: DataShard 72075186224037888 persisting started state actor id [1:681:2569] in generation 1 2025-05-29T15:22:40.788079Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:120: TDataShard::TTxInitRestored::Complete 2025-05-29T15:22:40.794079Z node 1 :TX_DATASHARD INFO: datashard.cpp:417: Switched to work state WaitScheme tabletId 72075186224037888 2025-05-29T15:22:40.794223Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:457: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-05-29T15:22:40.794269Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1250: Change sender created: at tablet: 72075186224037888, actorId: [1:683:2579] 2025-05-29T15:22:40.794276Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1255: Trying to activate change sender: at tablet: 72075186224037888 2025-05-29T15:22:40.794282Z node 1 :TX_DATASHARD INFO: datashard.cpp:1272: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-05-29T15:22:40.794290Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-05-29T15:22:40.794507Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 72075186224037888 2025-05-29T15:22:40.794538Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-05-29T15:22:40.794559Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2025-05-29T15:22:40.794569Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-05-29T15:22:40.794580Z node 1 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 72075186224037888 TxInFly 0 2025-05-29T15:22:40.794587Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2025-05-29T15:22:40.794724Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3710: Server connected at leader tablet# 72075186224037888, clientId# [1:662:2567], serverId# [1:674:2574], sessionId# [0:0:0] 2025-05-29T15:22:40.794871Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037888 2025-05-29T15:22:40.794952Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:132: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-05-29T15:22:40.794976Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:220: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2025-05-29T15:22:40.795433Z node 1 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:129: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-05-29T15:22:40.805838Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 72075186224037888 2025-05-29T15:22:40.805899Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:467: 72075186224037888 not sending time cast registration request in state WaitScheme 2025-05-29T15:22:40.954143Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3710: Server connected at leader tablet# 72075186224037888, clientId# [1:697:2587], serverId# [1:699:2589], sessionId# [0:0:0] 2025-05-29T15:22:40.955051Z node 1 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:69: Planned transaction txId 281474976715657 at step 1000 at tablet 72075186224037888 { Transactions { TxId: 281474976715657 AckTo { RawX1: 0 RawX2: 0 } } Step: 1000 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2025-05-29T15:22:40.955079Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-05-29T15:22:40.955148Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2025-05-29T15:22:40.955159Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2025-05-29T15:22:40.955173Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:333: Found ready operation [1000:281474976715657] in PlanQueue unit at 72075186224037888 2025-05-29T15:22:40.955265Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:634: LoadTxDetails at 72075186224037888 loaded tx from db 1000:281474976715657 keys extracted: 0 2025-05-29T15:22:40.955305Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 1 immediate 0 planned 1 2025-05-29T15:22:40.955470Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2025-05-29T15:22:40.955489Z node 1 :TX_DATASHARD INFO: create_table_unit.cpp:69: Trying to CREATE TABLE at 72075186224037888 tableId# [OwnerId: 72057594046644480, LocalPathId: 2] schema version# 1 2025-05-29T15:22:40.955942Z node 1 :TX_DATASHARD INFO: datashard.cpp:475: Send registration request to time cast Ready tabletId 72075186224037888 mediators count is 1 coordinators count is 1 buckets per mediator 2 2025-05-29T15:22:40.956066Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-05-29T15:22:40.956539Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3742: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037888 time 0 2025-05-29T15:22:40.956552Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-05-29T15:22:40.956790Z node 1 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:98: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 1000} 2025-05-29T15:22:40.956805Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2025-05-29T15:22:40.957049Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2025-05-29T15:22:40.957058Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1255: Trying to activate change sender: at tablet: 72075186224037888 2025-05-29T15:22:40.957064Z node 1 :TX_DATASHARD INFO: datashard.cpp:1293: Change sender activated: at tablet: 72075186224037888 2025-05-29T15:22:40.957084Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:801: Complete [1000 : 281474976715657] from 72075186224037888 at tablet 72075186224037888 send result to client [1:411:2405], exec latency: 0 ms, propose latency: 0 ms 2025-05-29T15:22:40.957098Z node 1 :TX_DATASHARD INFO: datashard.cpp:1590: 72075186224037888 Sending notify to schemeshard 72057594046644480 txId 281474976715657 state Ready TxInFly 0 2025-05-29T15:22:40.957129Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-05-29T15:22:40.958204Z node 1 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:129: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-05-29T15:22:40.958564Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:2953: Handle TEvSchemaChangedResult 281474976715657 datashard 72075186224037888 state Ready 2025-05-29T15:22:40.958580Z node 1 :TX_DATASHARD DEBUG: datashard__schema_changed.cpp:22: 72075186224037888 Got TEvSchemaChangedResult from SS at 72075186224037888 2025-05-29T15:22:40.959303Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3760: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037888 coordinator 72057594046316545 last step 0 next step 1000 2025-05-29T15:22:40.964165Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:731:2613], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:22:40.964201Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:742:2618], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:22:40.964289Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:22:40.967677Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2025-05-29T15:22:40.969204Z node 1 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:129: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-05-29T15:22:41.153819Z node 1 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:129: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-05-29T15:22:41.154355Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:745:2621], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-05-29T15:22:41.224874Z node 1 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [1:816:2661] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-29T15:22:41.298175Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [1:826:2670], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:22:41.299063Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=1&id=Yzg1NWI5YS05NjJiYzM5ZC1iZDc3YWE1OS03NjI0YzI1OA==, ActorId: [1:729:2611], ActorState: ExecuteState, TraceId: 01jwea6te3e81anjb1rwtcck6f, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: assertion failed at ydb/core/tx/datashard/ut_common/datashard_ut_common.cpp:2091, void NKikimr::ExecSQL(Tests::TServer::TPtr, TActorId, const TString &, bool, Ydb::StatusIds::StatusCode): (response.GetYdbStatus() == code) failed: (INTERNAL_ERROR != SUCCESS) Response { QueryIssues { message: "Execution" issue_code: 1060 issues { message: "yql/essentials/ast/yql_expr.h:1874: index out of range" issue_code: 1 } } TxMeta { } } YdbStatus: INTERNAL_ERROR ConsumedRu: 1 , with diff: (INT|SUCC)E(RNAL_ERROR|SS) TBackTrace::Capture()+28 (0x13A9C8BC) NUnitTest::NPrivate::RaiseError(char const*, TBasicString> const&, bool)+137 (0x13C501E9) NKikimr::ExecSQL(TIntrusivePtr>, NActors::TActorId, TBasicString> const&, bool, Ydb::StatusIds_StatusCode)+1300 (0x262E49B4) NKikimr::NTestSuiteObjectStorageListingTest::TTestCaseFilterListing::Execute_(NUnitTest::TTestContext&)+954 (0x1399297A) NKikimr::NTestSuiteObjectStorageListingTest::TCurrentTest::Execute()::'lambda'()::operator()() const+71 (0x13997E07) NUnitTest::TTestBase::Run(std::__y1::function, TBasicString> const&, char const*, bool)+126 (0x13C5209E) NKikimr::NTestSuiteObjectStorageListingTest::TCurrentTest::Execute()+484 (0x13997644) NUnitTest::TTestFactory::Execute()+803 (0x13C52813) NUnitTest::RunMain(int, char**)+3021 (0x13C643BD) ??+0 (0x7FE52F10FD90) __libc_start_main+128 (0x7FE52F10FE40) _start+41 (0x129F5029) |61.3%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/mind/bscontroller/ut/ydb-core-mind-bscontroller-ut |61.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/mind/bscontroller/ut/ydb-core-mind-bscontroller-ut |61.3%| [LD] {RESULT} $(B)/ydb/core/mind/bscontroller/ut/ydb-core-mind-bscontroller-ut >> TLocksTest::Range_CorrectNullDot >> CommitOffset::DistributedTxCommit_CheckSessionResetAfterCommit [FAIL] >> CommitOffset::DistributedTxCommit_CheckOffsetCommitForDifferentCases >> TObjectStorageListingTest::Split >> Balancing::Balancing_ManyTopics_PQv1 [FAIL] >> CommitOffset::Commit_Flat_WithWrongSession >> TPQTest::TestPQRead [GOOD] >> TPQTest::TestPQReadAhead ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_replication/unittest >> TReplicationTests::DropReplicationWithUnknownSecret [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:116:2058] recipient: [1:111:2141] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:116:2058] recipient: [1:111:2141] Leader for TabletID 72057594046678944 is [1:128:2152] sender: [1:132:2058] recipient: [1:111:2141] 2025-05-29T15:22:42.733519Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7488: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-05-29T15:22:42.733552Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7516: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:22:42.733558Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7402: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-05-29T15:22:42.733563Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7418: OperationsProcessing config: using default configuration 2025-05-29T15:22:42.733573Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-05-29T15:22:42.733576Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-05-29T15:22:42.733586Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7548: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:22:42.733599Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:39: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-05-29T15:22:42.733677Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7619: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-05-29T15:22:42.733739Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-05-29T15:22:42.746789Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7309: Cannot subscribe to console configs 2025-05-29T15:22:42.746812Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:22:42.751135Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-05-29T15:22:42.751265Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-05-29T15:22:42.751301Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-05-29T15:22:42.752921Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-05-29T15:22:42.753083Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:445: Clear TempDirsState with owners number: 0 2025-05-29T15:22:42.753208Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1348: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-05-29T15:22:42.753258Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:32: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-05-29T15:22:42.753751Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:157: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:22:42.753807Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:84: [RootDataErasureManager] Stop 2025-05-29T15:22:42.754069Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:22:42.754081Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:22:42.754100Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-05-29T15:22:42.754109Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-29T15:22:42.754116Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:202: TTxServerlessStorageBilling.Complete 2025-05-29T15:22:42.754148Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6671: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-05-29T15:22:42.755472Z node 1 :HIVE INFO: tablet_helpers.cpp:1130: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:128:2152] sender: [1:242:2058] recipient: [1:15:2062] 2025-05-29T15:22:42.775817Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:372: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-05-29T15:22:42.775924Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:22:42.775993Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-05-29T15:22:42.776044Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:130: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-05-29T15:22:42.776056Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:22:42.779269Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:451: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-05-29T15:22:42.779318Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-05-29T15:22:42.779378Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:22:42.779390Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:313: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-05-29T15:22:42.779397Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:367: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-05-29T15:22:42.779404Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 2 -> 3 2025-05-29T15:22:42.780031Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:22:42.780045Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-05-29T15:22:42.780051Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 3 -> 128 2025-05-29T15:22:42.780432Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:22:42.780445Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:22:42.780451Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:22:42.780457Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1650: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-05-29T15:22:42.781195Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1719: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-05-29T15:22:42.781657Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:652: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-05-29T15:22:42.781700Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1751: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-05-29T15:22:42.781877Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:676: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-05-29T15:22:42.781908Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:680: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 124 RawX2: 4294969445 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-05-29T15:22:42.781915Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:22:42.781991Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 128 -> 240 2025-05-29T15:22:42.782000Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:22:42.782033Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-05-29T15:22:42.782045Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:402: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-05-29T15:22:42.782475Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:22:42.782484Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-29T15:22:42.782535Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29 ... X_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:906: Part operation is done id#102:0 progress is 1/1 2025-05-29T15:22:45.866810Z node 9 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-05-29T15:22:45.866814Z node 9 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1606: TOperation IsReadyToNotify, TxId: 102, ready parts: 1/1, is published: false 2025-05-29T15:22:45.866819Z node 9 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-05-29T15:22:45.866823Z node 9 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:973: Operation and all the parts is done, operation id: 102:0 2025-05-29T15:22:45.866827Z node 9 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5174: RemoveTx for txid 102:0 2025-05-29T15:22:45.866858Z node 9 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2025-05-29T15:22:45.866863Z node 9 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:982: Publication still in progress, tx: 102, publications: 2, subscribers: 0 2025-05-29T15:22:45.866867Z node 9 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:989: Publication details: tx: 102, [OwnerId: 72057594046678944, LocalPathId: 1], 7 2025-05-29T15:22:45.866871Z node 9 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:989: Publication details: tx: 102, [OwnerId: 72057594046678944, LocalPathId: 2], 18446744073709551615 2025-05-29T15:22:45.867021Z node 9 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4877: StateWork, received event# 274137603, Sender [9:206:2207], Recipient [9:125:2150]: NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 1] Version: 7 } 2025-05-29T15:22:45.867028Z node 9 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4961: StateWork, processing event NSchemeBoard::NSchemeshardEvents::TEvUpdateAck 2025-05-29T15:22:45.867042Z node 9 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:5834: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 7 PathOwnerId: 72057594046678944, cookie: 102 2025-05-29T15:22:45.867052Z node 9 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 7 PathOwnerId: 72057594046678944, cookie: 102 2025-05-29T15:22:45.867056Z node 9 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 102 2025-05-29T15:22:45.867061Z node 9 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 7 2025-05-29T15:22:45.867066Z node 9 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-05-29T15:22:45.867079Z node 9 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:165: TSideEffects ApplyOnExecute at tablet# 72057594046678944 2025-05-29T15:22:45.867224Z node 9 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4877: StateWork, received event# 274137603, Sender [9:206:2207], Recipient [9:125:2150]: NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 2] Version: 18446744073709551615 } 2025-05-29T15:22:45.867231Z node 9 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4961: StateWork, processing event NSchemeBoard::NSchemeshardEvents::TEvUpdateAck 2025-05-29T15:22:45.867239Z node 9 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:5834: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 102 2025-05-29T15:22:45.867250Z node 9 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 102 2025-05-29T15:22:45.867254Z node 9 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 102 2025-05-29T15:22:45.867258Z node 9 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 18446744073709551615 2025-05-29T15:22:45.867265Z node 9 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 2025-05-29T15:22:45.867278Z node 9 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 102, subscribers: 0 2025-05-29T15:22:45.867282Z node 9 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:165: TSideEffects ApplyOnExecute at tablet# 72057594046678944 2025-05-29T15:22:45.867436Z node 9 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4877: StateWork, received event# 2146435084, Sender [9:125:2150], Recipient [9:125:2150]: NKikimr::NSchemeShard::TEvPrivate::TEvCleanDroppedPaths 2025-05-29T15:22:45.867444Z node 9 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5049: StateWork, processing event TEvPrivate::TEvCleanDroppedPaths 2025-05-29T15:22:45.867451Z node 9 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:123: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-05-29T15:22:45.867456Z node 9 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:137: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2025-05-29T15:22:45.867468Z node 9 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-05-29T15:22:45.867766Z node 9 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:207: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2025-05-29T15:22:45.867852Z node 9 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2025-05-29T15:22:45.867857Z node 9 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:207: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2025-05-29T15:22:45.868194Z node 9 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2025-05-29T15:22:45.868203Z node 9 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:207: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2025-05-29T15:22:45.868220Z node 9 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:160: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestModificationResult got TxId: 102, wait until txId: 102 TestWaitNotification wait txId: 102 2025-05-29T15:22:45.868266Z node 9 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:210: tests -- TTxNotificationSubscriber for txId 102: send EvNotifyTxCompletion 2025-05-29T15:22:45.868273Z node 9 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:256: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 102 2025-05-29T15:22:45.868320Z node 9 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4877: StateWork, received event# 269877761, Sender [9:447:2400], Recipient [9:125:2150]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-05-29T15:22:45.868325Z node 9 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4974: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-05-29T15:22:45.868329Z node 9 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5753: Pipe server connected, at tablet: 72057594046678944 2025-05-29T15:22:45.868351Z node 9 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4877: StateWork, received event# 271124996, Sender [9:365:2342], Recipient [9:125:2150]: NKikimrScheme.TEvNotifyTxCompletion TxId: 102 2025-05-29T15:22:45.868356Z node 9 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4890: StateWork, processing event TEvSchemeShard::TEvNotifyTxCompletion 2025-05-29T15:22:45.868368Z node 9 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 102, at schemeshard: 72057594046678944 2025-05-29T15:22:45.868383Z node 9 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:227: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2025-05-29T15:22:45.868388Z node 9 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:236: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [9:445:2398] 2025-05-29T15:22:45.868403Z node 9 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4877: StateWork, received event# 269877764, Sender [9:447:2400], Recipient [9:125:2150]: NKikimr::TEvTabletPipe::TEvServerDisconnected 2025-05-29T15:22:45.868408Z node 9 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4975: StateWork, processing event TEvTabletPipe::TEvServerDisconnected 2025-05-29T15:22:45.868412Z node 9 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5801: Server pipe is reset, at schemeshard: 72057594046678944 TestWaitNotification: OK eventTxId 102 2025-05-29T15:22:45.868461Z node 9 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4877: StateWork, received event# 271122945, Sender [9:448:2401], Recipient [9:125:2150]: NKikimrSchemeOp.TDescribePath Path: "/MyRoot/Replication" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false } 2025-05-29T15:22:45.868466Z node 9 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4889: StateWork, processing event TEvSchemeShard::TEvDescribeScheme 2025-05-29T15:22:45.868476Z node 9 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Replication" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-05-29T15:22:45.868505Z node 9 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/Replication" took 27us result status StatusPathDoesNotExist 2025-05-29T15:22:45.868542Z node 9 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/Replication\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1]), source_location: ydb/core/tx/schemeshard/schemeshard_path_describer.cpp:1157" Path: "/MyRoot/Replication" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: false } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/persqueue/ut/ut_with_sdk/unittest >> WithSDK::DescribeConsumer [FAIL] Test command err: 2025-05-29T15:22:35.144325Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509888425100061252:2061];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:22:35.144348Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/001f6c/r3tmp/tmpZJs6dt/pdisk_1.dat 2025-05-29T15:22:35.173894Z node 1 :PQ_READ_PROXY DEBUG: caching_service.cpp:44: Direct read cache: : Created 2025-05-29T15:22:35.198778Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:22:35.198996Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [1:7509888425100061233:2079] 1748532155144198 != 1748532155144201 TServer::EnableGrpc on GrpcPort 23245, node 1 2025-05-29T15:22:35.215140Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/ciyv/001f6c/r3tmp/yandex8NTgSK.tmp 2025-05-29T15:22:35.215153Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: /home/runner/.ya/build/build_root/ciyv/001f6c/r3tmp/yandex8NTgSK.tmp 2025-05-29T15:22:35.215212Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:202: successfully initialized from file: /home/runner/.ya/build/build_root/ciyv/001f6c/r3tmp/yandex8NTgSK.tmp 2025-05-29T15:22:35.215261Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-05-29T15:22:35.221786Z INFO: TTestServer started on Port 26474 GrpcPort 23245 TClient is connected to server localhost:26474 PQClient connected to localhost:23245 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: 2025-05-29T15:22:35.246953Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:22:35.246984Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:22:35.248154Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-29T15:22:35.272490Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-05-29T15:22:35.280639Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... waiting... waiting... 2025-05-29T15:22:35.449725Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509888425100062044:2336], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:22:35.449768Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509888425100062052:2339], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:22:35.449780Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:22:35.450215Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509888425100062084:2342], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:22:35.450233Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:22:35.450763Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715662:3, at schemeshard: 72057594046644480 2025-05-29T15:22:35.452782Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7509888425100062058:2340], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715662 completed, doublechecking } 2025-05-29T15:22:35.485327Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-05-29T15:22:35.494307Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-05-29T15:22:35.510370Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost", true, true, 1000), ("dc2", "dc2.logbroker.yandex.net", false, true, 1000); 2025-05-29T15:22:35.533319Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [1:7509888425100062356:2375], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:22:35.533415Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=1&id=ZGM1NzdhYTctMzhjZDgzYTYtNTlhY2E1MTItZTQxZmViNzA=, ActorId: [1:7509888425100062353:2373], ActorState: ExecuteState, TraceId: 01jwea6n3z10egkchdwvzr9azk, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: 2025-05-29T15:22:35.549361Z node 1 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [1:7509888425100062368:2581] txid# 281474976715666, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 9], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-29T15:22:35.594710Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: cluster_tracker.cpp:167: failed to list clusters: { Response { QueryIssues { message: "Execution" issue_code: 1060 severity: 2 issues { position { row: 3 column: 120 } message: "Cost Based Optimizer could not be applied to this query: couldn\'t load statistics" end_position { row: 3 column: 120 } issue_code: 8001 severity: 2 } } TxMeta { } YdbResults { columns { name: "C.name" type { optional_type { item { type_id: UTF8 } } } } columns { name: "C.balancer" type { optional_type { item { type_id: UTF8 } } } } columns { name: "C.local" type { optional_type { item { type_id: BOOL } } } } columns { name: "C.enabled" type { optional_type { item { type_id: BOOL } } } } columns { name: "C.weight" type { optional_type { item { type_id: UINT64 } } } } columns { name: "V.version" type { optional_type { item { type_id: INT64 } } } } } QueryDiagnostics: "" } YdbStatus: SUCCESS ConsumedRu: 27 } assertion failed at ydb/core/testlib/test_pq_client.h:537, TMaybe NKikimr::NPersQueueTests::TFlatMsgBusPQClient::RunYqlDataQueryWithParams(TString, const NYdb::TParams &): (result.IsSuccess())
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 TBackTrace::Capture()+28 (0x13AC482C) NUnitTest::NPrivate::RaiseError(char const*, TBasicString> const&, bool)+137 (0x13C785B9) NKikimr::NPersQueueTests::TFlatMsgBusPQClient::RunYqlDataQueryWithParams(TBasicString>, NYdb::Dev::TParams const&)+932 (0x139A34A4) NKikimr::NPersQueueTests::TFlatMsgBusPQClient::RunYqlDataQuery(TBasicString>)+88 (0x139A2A58) NKikimr::NPersQueueTests::TFlatMsgBusPQClient::InitDCs(THashMap>, NKikimr::NPersQueueTests::TPQTestClusterInfo, THash>>, TEqualTo>>, std::__y1::allocator>>>, TBasicString> const&)+1170 (0x139A1CA2) NKikimr::NPersQueueTests::TFlatMsgBusPQClient::FullInit(THashMap>, NKikimr::NPersQueueTests::TPQTestClusterInfo, THash>>, TEqualTo>>, std::__y1::allocator>>>, TBasicString> const&, TBasicString> const&)+327 (0x1399FC57) NPersQueue::TTestServer::StartServer(bool, TMaybe>, NMaybe::TPolicyUndefinedExcept>)+888 (0x1399E948) NYdb::NTopic::NTests::TTopicSdkTestSetup::TTopicSdkTestSetup(TBasicString> const&, NKikimr::Tests::TServerSettings const&, bool)+582 (0x262E6316) NKikimr::NPQ::NTest::CreateSetup()+178 (0x26290EC2) NKikimr::NTestSuiteTopicAutoscaling::SimpleTest(NKikimr::NPQ::NTest::SdkVersion, bool)+40 (0x138EE848) NKikimr::NTestSuiteTopicAutoscaling::TCurrentTest::Execute()::'lambda'()::operator()() const+71 (0x1393DC27) NUnitTest::TTestBase::Run(std::__y1::function, TBasicString> const&, char const*, bool)+126 (0x13C7A46E) NKikimr::NTestSuiteTopicAutoscaling::TCurrentTest::Execute()+419 (0x1393D483) NUnitTest::TTestFactory::Execute()+803 (0x13C7ABE3) NUnitTest::RunMain(int, char**)+3021 (0x13C8C78D) ??+0 (0x7F19D455BD90) __libc_start_main+128 (0x7F19D455BE40) _start+41 (0x12A4C029) 2025-05-29T15:22:36.056861Z node 2 :METADATA_PROVIDER WARN: log.cp ... R: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-05-29T15:22:41.382858Z node 7 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:22:41.384719Z node 7 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [7:7509888447772141655:2079] 1748532161313928 != 1748532161313931 TServer::EnableGrpc on GrpcPort 10401, node 7 2025-05-29T15:22:41.422553Z node 7 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/ciyv/001f6c/r3tmp/yandexXGwdPE.tmp 2025-05-29T15:22:41.422570Z node 7 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: /home/runner/.ya/build/build_root/ciyv/001f6c/r3tmp/yandexXGwdPE.tmp 2025-05-29T15:22:41.422645Z node 7 :NET_CLASSIFIER WARN: net_classifier.cpp:202: successfully initialized from file: /home/runner/.ya/build/build_root/ciyv/001f6c/r3tmp/yandexXGwdPE.tmp 2025-05-29T15:22:41.422710Z node 7 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-05-29T15:22:41.426604Z INFO: TTestServer started on Port 11576 GrpcPort 10401 2025-05-29T15:22:41.445738Z node 7 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:22:41.445778Z node 7 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:22:41.447188Z node 7 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:11576 PQClient connected to localhost:10401 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-29T15:22:41.486734Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:22:41.489029Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:22:41.504117Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... waiting... waiting... 2025-05-29T15:22:41.530772Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976715661, at schemeshard: 72057594046644480 2025-05-29T15:22:41.836381Z node 7 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7509888447772142463:2336], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:22:41.836405Z node 7 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7509888447772142474:2339], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:22:41.836413Z node 7 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:22:41.837155Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715662:3, at schemeshard: 72057594046644480 2025-05-29T15:22:41.840546Z node 7 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7509888447772142506:2342], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:22:41.840631Z node 7 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:22:41.840739Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976715662, at schemeshard: 72057594046644480 2025-05-29T15:22:41.841931Z node 7 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [7:7509888447772142477:2340], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715662 completed, doublechecking } 2025-05-29T15:22:41.842649Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-05-29T15:22:41.866103Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-05-29T15:22:41.886328Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost", true, true, 1000), ("dc2", "dc2.logbroker.yandex.net", false, true, 1000); 2025-05-29T15:22:41.909633Z node 7 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [7:7509888447772142774:2375], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:22:41.909768Z node 7 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=7&id=YjAxNzQ5ZjMtMTllODM5MGMtNDdiZGJhYjMtOWU3NzdmZDU=, ActorId: [7:7509888447772142771:2373], ActorState: ExecuteState, TraceId: 01jwea6vb730rajd6ttqx10b93, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: 2025-05-29T15:22:41.917241Z node 7 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [7:7509888447772142786:2583] txid# 281474976715666, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 9], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-29T15:22:41.947860Z node 7 :PERSQUEUE_CLUSTER_TRACKER ERROR: cluster_tracker.cpp:167: failed to list clusters: { Response { QueryIssues { message: "Execution" issue_code: 1060 severity: 2 issues { position { row: 3 column: 120 } message: "Cost Based Optimizer could not be applied to this query: couldn\'t load statistics" end_position { row: 3 column: 120 } issue_code: 8001 severity: 2 } } TxMeta { } YdbResults { columns { name: "C.name" type { optional_type { item { type_id: UTF8 } } } } columns { name: "C.balancer" type { optional_type { item { type_id: UTF8 } } } } columns { name: "C.local" type { optional_type { item { type_id: BOOL } } } } columns { name: "C.enabled" type { optional_type { item { type_id: BOOL } } } } columns { name: "C.weight" type { optional_type { item { type_id: UINT64 } } } } columns { name: "V.version" type { optional_type { item { type_id: INT64 } } } } } QueryDiagnostics: "" } YdbStatus: SUCCESS ConsumedRu: 18 } assertion failed at ydb/core/testlib/test_pq_client.h:537, TMaybe NKikimr::NPersQueueTests::TFlatMsgBusPQClient::RunYqlDataQueryWithParams(TString, const NYdb::TParams &): (result.IsSuccess())
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 TBackTrace::Capture()+28 (0x13AC482C) NUnitTest::NPrivate::RaiseError(char const*, TBasicString> const&, bool)+137 (0x13C785B9) NKikimr::NPersQueueTests::TFlatMsgBusPQClient::RunYqlDataQueryWithParams(TBasicString>, NYdb::Dev::TParams const&)+932 (0x139A34A4) NKikimr::NPersQueueTests::TFlatMsgBusPQClient::RunYqlDataQuery(TBasicString>)+88 (0x139A2A58) NKikimr::NPersQueueTests::TFlatMsgBusPQClient::InitDCs(THashMap>, NKikimr::NPersQueueTests::TPQTestClusterInfo, THash>>, TEqualTo>>, std::__y1::allocator>>>, TBasicString> const&)+1170 (0x139A1CA2) NKikimr::NPersQueueTests::TFlatMsgBusPQClient::FullInit(THashMap>, NKikimr::NPersQueueTests::TPQTestClusterInfo, THash>>, TEqualTo>>, std::__y1::allocator>>>, TBasicString> const&, TBasicString> const&)+327 (0x1399FC57) NPersQueue::TTestServer::StartServer(bool, TMaybe>, NMaybe::TPolicyUndefinedExcept>)+888 (0x1399E948) NYdb::NTopic::NTests::TTopicSdkTestSetup::TTopicSdkTestSetup(TBasicString> const&, NKikimr::Tests::TServerSettings const&, bool)+582 (0x262E6316) NKikimr::NPQ::NTest::CreateSetup()+178 (0x26290EC2) NKikimr::NTestSuiteWithSDK::TTestCaseDescribeConsumer::Execute_(NUnitTest::TTestContext&)+32 (0x139B1980) NKikimr::NTestSuiteWithSDK::TCurrentTest::Execute()::'lambda'()::operator()() const+71 (0x139C74D7) NUnitTest::TTestBase::Run(std::__y1::function, TBasicString> const&, char const*, bool)+126 (0x13C7A46E) NKikimr::NTestSuiteWithSDK::TCurrentTest::Execute()+422 (0x139C6E96) NUnitTest::TTestFactory::Execute()+803 (0x13C7ABE3) NUnitTest::RunMain(int, char**)+3021 (0x13C8C78D) ??+0 (0x7F19D455BD90) __libc_start_main+128 (0x7F19D455BE40) _start+41 (0x12A4C029) >> TopicAutoscaling::ControlPlane_PauseAutoPartitioning [FAIL] >> TopicAutoscaling::ControlPlane_CDC_Enable >> CommitOffset::Commit_WithSession_ParentNotFinished_SameSession [FAIL] >> CommitOffset::Commit_WithSession_ParentNotFinished_OtherSession_ParentCommittedToEnd >> TopicAutoscaling::ControlPlane_CDC_Disable [FAIL] >> TopicAutoscaling::BalancingAfterSplit_sessionsWithPartition >> TopicAutoscaling::PartitionSplit_ReadNotEmptyPartitions_AutoscaleAwareSDK [FAIL] >> TopicAutoscaling::ReBalancingAfterSplit_sessionsWithPartition ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/datashard/ut_object_storage_listing/unittest >> ObjectStorageListingTest::ListingNoFilter [FAIL] Test command err: 2025-05-29T15:22:41.276728Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:102:2148], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-05-29T15:22:41.276766Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-05-29T15:22:41.276779Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/001ead/r3tmp/tmp3dox92/pdisk_1.dat 2025-05-29T15:22:41.407900Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-05-29T15:22:41.423084Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:22:41.435778Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [1:32:2079] 1748532160581441 != 1748532160581445 2025-05-29T15:22:41.487399Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:22:41.487445Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:22:41.499259Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-29T15:22:41.578963Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-05-29T15:22:41.599296Z node 1 :TX_DATASHARD INFO: datashard.cpp:373: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:664:2569] 2025-05-29T15:22:41.599361Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:630: TxInitSchema.Execute 2025-05-29T15:22:41.606731Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:696: TxInitSchema.Complete 2025-05-29T15:22:41.606796Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:48: TDataShard::TTxInit::Execute 2025-05-29T15:22:41.606964Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1315: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-05-29T15:22:41.606975Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1371: LoadLockChangeRecords at tablet: 72075186224037888 2025-05-29T15:22:41.606982Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1420: LoadChangeRecordCommits at tablet: 72075186224037888 2025-05-29T15:22:41.607043Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:92: TDataShard::TTxInit::Complete 2025-05-29T15:22:41.607066Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:100: TDataShard::TTxInitRestored::Execute 2025-05-29T15:22:41.607079Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:111: DataShard 72075186224037888 persisting started state actor id [1:681:2569] in generation 1 2025-05-29T15:22:41.619070Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:120: TDataShard::TTxInitRestored::Complete 2025-05-29T15:22:41.627775Z node 1 :TX_DATASHARD INFO: datashard.cpp:417: Switched to work state WaitScheme tabletId 72075186224037888 2025-05-29T15:22:41.627902Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:457: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-05-29T15:22:41.627940Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1250: Change sender created: at tablet: 72075186224037888, actorId: [1:683:2579] 2025-05-29T15:22:41.627946Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1255: Trying to activate change sender: at tablet: 72075186224037888 2025-05-29T15:22:41.627951Z node 1 :TX_DATASHARD INFO: datashard.cpp:1272: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-05-29T15:22:41.627958Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-05-29T15:22:41.628167Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 72075186224037888 2025-05-29T15:22:41.628200Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-05-29T15:22:41.628216Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2025-05-29T15:22:41.628225Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-05-29T15:22:41.628236Z node 1 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 72075186224037888 TxInFly 0 2025-05-29T15:22:41.628241Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2025-05-29T15:22:41.628371Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3710: Server connected at leader tablet# 72075186224037888, clientId# [1:662:2567], serverId# [1:674:2574], sessionId# [0:0:0] 2025-05-29T15:22:41.628405Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037888 2025-05-29T15:22:41.628473Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:132: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-05-29T15:22:41.628495Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:220: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2025-05-29T15:22:41.628874Z node 1 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:129: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-05-29T15:22:41.641288Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 72075186224037888 2025-05-29T15:22:41.641350Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:467: 72075186224037888 not sending time cast registration request in state WaitScheme 2025-05-29T15:22:41.790060Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3710: Server connected at leader tablet# 72075186224037888, clientId# [1:697:2587], serverId# [1:699:2589], sessionId# [0:0:0] 2025-05-29T15:22:41.791242Z node 1 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:69: Planned transaction txId 281474976715657 at step 1000 at tablet 72075186224037888 { Transactions { TxId: 281474976715657 AckTo { RawX1: 0 RawX2: 0 } } Step: 1000 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2025-05-29T15:22:41.791273Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-05-29T15:22:41.791345Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2025-05-29T15:22:41.791357Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2025-05-29T15:22:41.791371Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:333: Found ready operation [1000:281474976715657] in PlanQueue unit at 72075186224037888 2025-05-29T15:22:41.791471Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:634: LoadTxDetails at 72075186224037888 loaded tx from db 1000:281474976715657 keys extracted: 0 2025-05-29T15:22:41.791520Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 1 immediate 0 planned 1 2025-05-29T15:22:41.791678Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2025-05-29T15:22:41.791697Z node 1 :TX_DATASHARD INFO: create_table_unit.cpp:69: Trying to CREATE TABLE at 72075186224037888 tableId# [OwnerId: 72057594046644480, LocalPathId: 2] schema version# 1 2025-05-29T15:22:41.792208Z node 1 :TX_DATASHARD INFO: datashard.cpp:475: Send registration request to time cast Ready tabletId 72075186224037888 mediators count is 1 coordinators count is 1 buckets per mediator 2 2025-05-29T15:22:41.792334Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-05-29T15:22:41.792870Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3742: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037888 time 0 2025-05-29T15:22:41.792886Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-05-29T15:22:41.793142Z node 1 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:98: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 1000} 2025-05-29T15:22:41.793161Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2025-05-29T15:22:41.793486Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2025-05-29T15:22:41.793500Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1255: Trying to activate change sender: at tablet: 72075186224037888 2025-05-29T15:22:41.793508Z node 1 :TX_DATASHARD INFO: datashard.cpp:1293: Change sender activated: at tablet: 72075186224037888 2025-05-29T15:22:41.793528Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:801: Complete [1000 : 281474976715657] from 72075186224037888 at tablet 72075186224037888 send result to client [1:411:2405], exec latency: 0 ms, propose latency: 0 ms 2025-05-29T15:22:41.793554Z node 1 :TX_DATASHARD INFO: datashard.cpp:1590: 72075186224037888 Sending notify to schemeshard 72057594046644480 txId 281474976715657 state Ready TxInFly 0 2025-05-29T15:22:41.793588Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-05-29T15:22:41.794782Z node 1 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:129: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-05-29T15:22:41.795114Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:2953: Handle TEvSchemaChangedResult 281474976715657 datashard 72075186224037888 state Ready 2025-05-29T15:22:41.795132Z node 1 :TX_DATASHARD DEBUG: datashard__schema_changed.cpp:22: 72075186224037888 Got TEvSchemaChangedResult from SS at 72075186224037888 2025-05-29T15:22:41.795340Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3760: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037888 coordinator 72057594046316545 last step 0 next step 1000 2025-05-29T15:22:41.799329Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:731:2613], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:22:41.799349Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:742:2618], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:22:41.799412Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:22:41.800150Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2025-05-29T15:22:41.800966Z node 1 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:129: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-05-29T15:22:41.966875Z node 1 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:129: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-05-29T15:22:41.967446Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:745:2621], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-05-29T15:22:42.009378Z node 1 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [1:816:2661] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-29T15:22:42.067550Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [1:826:2670], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:22:42.068580Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=1&id=ODFhZmM0NTEtNTJkOTI4M2ItYzlmZWQwMjAtMTNkYTFkZmE=, ActorId: [1:729:2611], ActorState: ExecuteState, TraceId: 01jwea6v87e5xd4shv2fv0jes0, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: assertion failed at ydb/core/tx/datashard/ut_common/datashard_ut_common.cpp:2091, void NKikimr::ExecSQL(Tests::TServer::TPtr, TActorId, const TString &, bool, Ydb::StatusIds::StatusCode): (response.GetYdbStatus() == code) failed: (INTERNAL_ERROR != SUCCESS) Response { QueryIssues { message: "Execution" issue_code: 1060 issues { message: "yql/essentials/ast/yql_expr.h:1874: index out of range" issue_code: 1 } } TxMeta { } } YdbStatus: INTERNAL_ERROR ConsumedRu: 1 , with diff: (INT|SUCC)E(RNAL_ERROR|SS) TBackTrace::Capture()+28 (0x13A9C8BC) NUnitTest::NPrivate::RaiseError(char const*, TBasicString> const&, bool)+137 (0x13C501E9) NKikimr::ExecSQL(TIntrusivePtr>, NActors::TActorId, TBasicString> const&, bool, Ydb::StatusIds_StatusCode)+1300 (0x262E49B4) NKikimr::NTestSuiteObjectStorageListingTest::TTestCaseListingNoFilter::Execute_(NUnitTest::TTestContext&)+954 (0x1398D3DA) NKikimr::NTestSuiteObjectStorageListingTest::TCurrentTest::Execute()::'lambda'()::operator()() const+71 (0x13997E07) NUnitTest::TTestBase::Run(std::__y1::function, TBasicString> const&, char const*, bool)+126 (0x13C5209E) NKikimr::NTestSuiteObjectStorageListingTest::TCurrentTest::Execute()+484 (0x13997644) NUnitTest::TTestFactory::Execute()+803 (0x13C52813) NUnitTest::RunMain(int, char**)+3021 (0x13C643BD) ??+0 (0x7FD75869FD90) __libc_start_main+128 (0x7FD75869FE40) _start+41 (0x129F5029) >> TFlatTest::PathSorting >> TLocksFatTest::ShardLocks [GOOD] >> TSchemeShardTopicSplitMergeTest::MargeInactivePartitions >> TSchemeShardTopicSplitMergeTest::SplitWithManyPartition >> PQCountersLabeled::NewConsumersCountersAppear [GOOD] >> PQCountersSimple::Partition >> TopicAutoscaling::PartitionSplit_ReadEmptyPartitions_AutoscaleAwareSDK [FAIL] >> TopicAutoscaling::PartitionSplit_ManySession_PQv1 >> TSchemeShardTopicSplitMergeTest::MargeUnorderedPartitions >> TObjectStorageListingTest::Split [GOOD] >> TObjectStorageListingTest::SuffixColumns >> TSubscriberCombinationsTest::CombinationsMigratedPath [GOOD] |61.4%| [TA] $(B)/ydb/core/tx/datashard/ut_object_storage_listing/test-results/unittest/{meta.json ... results_accumulator.log} >> TopicAutoscaling::ReadingAfterSplitTest_PreferedPartition_AutoscaleAwareSDK [FAIL] >> TopicAutoscaling::ReadFromTimestamp_BeforeAutoscaleAwareSDK >> PQCountersSimple::Partition [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/persqueue/ut/ut_with_sdk/unittest >> TopicAutoscaling::PartitionSplit_AutosplitByLoad_AfterAlter [FAIL] Test command err: 2025-05-29T15:22:35.525788Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509888424081104494:2063];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:22:35.525811Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/001f7a/r3tmp/tmppPmx7D/pdisk_1.dat 2025-05-29T15:22:35.566907Z node 1 :PQ_READ_PROXY DEBUG: caching_service.cpp:44: Direct read cache: : Created 2025-05-29T15:22:35.587704Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [1:7509888424081104473:2079] 1748532155525615 != 1748532155525618 2025-05-29T15:22:35.590164Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 18177, node 1 2025-05-29T15:22:35.603085Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/ciyv/001f7a/r3tmp/yandexqSVVgn.tmp 2025-05-29T15:22:35.603098Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: /home/runner/.ya/build/build_root/ciyv/001f7a/r3tmp/yandexqSVVgn.tmp 2025-05-29T15:22:35.603165Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:202: successfully initialized from file: /home/runner/.ya/build/build_root/ciyv/001f7a/r3tmp/yandexqSVVgn.tmp 2025-05-29T15:22:35.603213Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-05-29T15:22:35.608886Z INFO: TTestServer started on Port 12321 GrpcPort 18177 TClient is connected to server localhost:12321 2025-05-29T15:22:35.628107Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:22:35.628152Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:22:35.629218Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected PQClient connected to localhost:18177 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-29T15:22:35.665514Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-05-29T15:22:35.672334Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... waiting... waiting... 2025-05-29T15:22:35.878397Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509888424081105262:2336], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:22:35.878440Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:22:35.878649Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509888424081105297:2339], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:22:35.879564Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715662:3, at schemeshard: 72057594046644480 2025-05-29T15:22:35.886932Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976715662, at schemeshard: 72057594046644480 2025-05-29T15:22:35.887028Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7509888424081105299:2340], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715662 completed, doublechecking } 2025-05-29T15:22:35.921317Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-05-29T15:22:35.975883Z node 1 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [1:7509888424081105433:2465] txid# 281474976715664, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 9], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-29T15:22:35.980499Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-05-29T15:22:35.993801Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [1:7509888424081105443:2353], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:5:17: Error: At function: KiReadTable!
:5:17: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Versions]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-05-29T15:22:35.993897Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=1&id=MTgxMjAyMjgtYzE4NjViYjktMzA4NWJkOGItZjMyMWJiMDQ=, ActorId: [1:7509888424081105259:2334], ActorState: ExecuteState, TraceId: 01jwea6nf52xt8bdgw72yg3x64, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-05-29T15:22:35.994420Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: cluster_tracker.cpp:167: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 5 column: 17 } message: "At function: KiReadTable!" end_position { row: 5 column: 17 } severity: 1 issues { position { row: 5 column: 17 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Versions]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 5 column: 17 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-05-29T15:22:35.998295Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost", true, true, 1000), ("dc2", "dc2.logbroker.yandex.net", false, true, 1000); 2025-05-29T15:22:36.029347Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [1:7509888428376072912:2379], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:22:36.029525Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=1&id=OGUwMzkyN2EtY2E1ZjBhMDMtZTk3NGY4Y2QtNzY4MzdkYmI=, ActorId: [1:7509888428376072909:2377], ActorState: ExecuteState, TraceId: 01jwea6nkc0wxd68et4emkynx9, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: assertion failed at ydb/core/testlib/test_pq_client.h:537, TMaybe NKikimr::NPersQueueTests::TFlatMsgBusPQClient::RunYqlDataQueryWithParams(TString, const NYdb::TParams &): (result.IsSuccess())
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 TBackTrace::Capture()+28 (0x13AC482C) NUnitTest::NPrivate::RaiseError(char const*, TBasicString> const&, bool)+137 (0x13C785B9) NKikimr::NPersQueueTests::TFlatMsgBusPQClient::RunYqlDataQueryWithParams(TBasicString>, NYdb::Dev::TParams const&)+932 (0x139A34A4) NKikimr::NPersQueueTests::TFlatMsgBusPQClient::RunYqlDataQuery(TBasicString>)+88 (0x139A2A58) NKikimr::NPersQueueTests::TFlatMsgBusPQClient::InitDCs(THashMap>, NKikimr::NPersQueueTests::TPQTestClusterInfo, THash>>, TEqualTo>>, std::__y1::allocator>>>, TBasicString> const&)+1170 (0x139A1CA2) NKikimr::NPersQueueTests::TFlatMsgBusPQClient::FullInit(THashMap>, NKikimr::NPersQueueTests::TPQTestClusterInfo, THash>>, TEqualTo>>, std::__y1::allocator>>>, TBasicString> const&, TBasicString> const&)+327 (0x1399FC57) NPersQueue::TTestServer::StartServer(bool, TMaybe>, NMaybe::TPolicyUndefinedExcept>)+888 (0x1399E948) NYdb::NTopic::NTests::TTopicSdkTestSetup::TTopicSdkTestSetup(TBasicString> const&, NKikimr::Tests::TServerSettings const&, bool)+582 (0x262E6316) NKikimr::NPQ::NTest::CreateSetup()+178 (0x26290EC2) NKikimr::NTestSuiteTopicAutoscaling::PartitionSplit_oldSDK(NKikimr::NPQ::NTest::SdkVersion)+34 (0x138F6C92) NKikimr::NTestSuiteTopicAutoscaling::TCurrentTest::Execute()::'lambda'()::operator()() const+71 (0x1393DC27) NUnitTest::TTestBase::Run(std::__y1::function, TBasicString> const&, char const*, bool)+126 (0x13C7A46E) NKikimr::NTestSuiteTopicAutoscaling::TCurrentTest::Execute()+419 (0x1393D483) NUnitTest::TTestFactory::Execute()+803 (0x13C7ABE3) NUnitTest::RunMain(int, char**)+3021 (0x13C8C78D) ??+0 (0x7F88D299DD90) __libc_start_ma ... vered_message; 2025-05-29T15:22:41.690273Z node 7 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:22:41.692906Z node 7 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [7:7509888450789895724:2079] 1748532161666262 != 1748532161666265 TServer::EnableGrpc on GrpcPort 7071, node 7 2025-05-29T15:22:41.719346Z node 7 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/ciyv/001f7a/r3tmp/yandexzUROxy.tmp 2025-05-29T15:22:41.719359Z node 7 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: /home/runner/.ya/build/build_root/ciyv/001f7a/r3tmp/yandexzUROxy.tmp 2025-05-29T15:22:41.719400Z node 7 :NET_CLASSIFIER WARN: net_classifier.cpp:202: successfully initialized from file: /home/runner/.ya/build/build_root/ciyv/001f7a/r3tmp/yandexzUROxy.tmp 2025-05-29T15:22:41.719447Z node 7 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-05-29T15:22:41.724665Z INFO: TTestServer started on Port 1139 GrpcPort 7071 TClient is connected to server localhost:1139 PQClient connected to localhost:7071 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-29T15:22:41.766236Z node 7 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:22:41.766271Z node 7 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:22:41.766635Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:22:41.767228Z node 7 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-29T15:22:41.767926Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:22:41.775331Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976715658, at schemeshard: 72057594046644480 2025-05-29T15:22:41.808488Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... waiting... waiting... 2025-05-29T15:22:41.850816Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976715661, at schemeshard: 72057594046644480 2025-05-29T15:22:42.012640Z node 7 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7509888455084863802:2335], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:22:42.012680Z node 7 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:22:42.012866Z node 7 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7509888455084863838:2339], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:22:42.013571Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715662:3, at schemeshard: 72057594046644480 2025-05-29T15:22:42.020980Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-05-29T15:22:42.021104Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976715662, at schemeshard: 72057594046644480 2025-05-29T15:22:42.021329Z node 7 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [7:7509888455084863840:2340], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715662 completed, doublechecking } 2025-05-29T15:22:42.033156Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-05-29T15:22:42.100517Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-05-29T15:22:42.105887Z node 7 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [7:7509888455084864107:2551] txid# 281474976715666, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 9], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-29T15:22:42.111166Z node 7 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [7:7509888455084864116:2369], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:5:17: Error: At function: KiReadTable!
:5:17: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Versions]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-05-29T15:22:42.111816Z node 7 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=7&id=NjZkMTlkOTctNzNjN2QzODgtNjc0NDhhMTEtYjRiMzI4NmM=, ActorId: [7:7509888455084863798:2332], ActorState: ExecuteState, TraceId: 01jwea6vev67jgevkysgr3q29x, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-05-29T15:22:42.111932Z node 7 :PERSQUEUE_CLUSTER_TRACKER ERROR: cluster_tracker.cpp:167: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 5 column: 17 } message: "At function: KiReadTable!" end_position { row: 5 column: 17 } severity: 1 issues { position { row: 5 column: 17 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Versions]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 5 column: 17 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost", true, true, 1000), ("dc2", "dc2.logbroker.yandex.net", false, true, 1000); 2025-05-29T15:22:42.129474Z node 7 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [7:7509888455084864155:2379], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:22:42.130313Z node 7 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=7&id=NjE3OWFhZTktYTQ4YzQzYmItNmY2MzE3ZmQtNzBkZTcyYmE=, ActorId: [7:7509888455084864152:2377], ActorState: ExecuteState, TraceId: 01jwea6vj5bmj8wsb4km2e6qnr, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: assertion failed at ydb/core/testlib/test_pq_client.h:537, TMaybe NKikimr::NPersQueueTests::TFlatMsgBusPQClient::RunYqlDataQueryWithParams(TString, const NYdb::TParams &): (result.IsSuccess())
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 TBackTrace::Capture()+28 (0x13AC482C) NUnitTest::NPrivate::RaiseError(char const*, TBasicString> const&, bool)+137 (0x13C785B9) NKikimr::NPersQueueTests::TFlatMsgBusPQClient::RunYqlDataQueryWithParams(TBasicString>, NYdb::Dev::TParams const&)+932 (0x139A34A4) NKikimr::NPersQueueTests::TFlatMsgBusPQClient::RunYqlDataQuery(TBasicString>)+88 (0x139A2A58) NKikimr::NPersQueueTests::TFlatMsgBusPQClient::InitDCs(THashMap>, NKikimr::NPersQueueTests::TPQTestClusterInfo, THash>>, TEqualTo>>, std::__y1::allocator>>>, TBasicString> const&)+1170 (0x139A1CA2) NKikimr::NPersQueueTests::TFlatMsgBusPQClient::FullInit(THashMap>, NKikimr::NPersQueueTests::TPQTestClusterInfo, THash>>, TEqualTo>>, std::__y1::allocator>>>, TBasicString> const&, TBasicString> const&)+327 (0x1399FC57) NPersQueue::TTestServer::StartServer(bool, TMaybe>, NMaybe::TPolicyUndefinedExcept>)+888 (0x1399E948) NYdb::NTopic::NTests::TTopicSdkTestSetup::TTopicSdkTestSetup(TBasicString> const&, NKikimr::Tests::TServerSettings const&, bool)+582 (0x262E6316) NKikimr::NPQ::NTest::CreateSetup()+178 (0x26290EC2) NKikimr::NTestSuiteTopicAutoscaling::TTestCasePartitionSplit_AutosplitByLoad_AfterAlter::Execute_(NUnitTest::TTestContext&)+35 (0x1391F9C3) NKikimr::NTestSuiteTopicAutoscaling::TCurrentTest::Execute()::'lambda'()::operator()() const+71 (0x1393DC27) NUnitTest::TTestBase::Run(std::__y1::function, TBasicString> const&, char const*, bool)+126 (0x13C7A46E) NKikimr::NTestSuiteTopicAutoscaling::TCurrentTest::Execute()+419 (0x1393D483) NUnitTest::TTestFactory::Execute()+803 (0x13C7ABE3) NUnitTest::RunMain(int, char**)+3021 (0x13C8C78D) ??+0 (0x7F88D299DD90) __libc_start_main+128 (0x7F88D299DE40) _start+41 (0x12A4C029) >> TFlatTest::PathSorting [GOOD] >> TFlatTest::PartBloomFilter >> CommitOffset::Commit_Flat_WithWrongSession [FAIL] >> CommitOffset::Commit_Flat_WithWrongSession_ToPast |61.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/ext_index/ut/unittest >> TSchemeShardTopicSplitMergeTest::MargeUnorderedPartitions [GOOD] >> TSchemeShardTopicSplitMergeTest::MargePartitions2 >> TopicAutoscaling::ControlPlane_CDC_Enable [FAIL] >> TopicAutoscaling::MidOfRange [GOOD] >> TSchemeShardTopicSplitMergeTest::SplitWithManyPartition [GOOD] >> CommitOffset::DistributedTxCommit_CheckOffsetCommitForDifferentCases [FAIL] >> CommitOffset::DistributedTxCommit_Flat_CheckOffsetCommitForDifferentCases >> CommitOffset::Commit_WithSession_ParentNotFinished_OtherSession_ParentCommittedToEnd [FAIL] >> CommitOffset::Commit_WithSession_ToPastParentPartition ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/scheme_board/ut_subscriber/unittest >> TSubscriberCombinationsTest::CombinationsMigratedPath [GOOD] Test command err: =========== Path: "/root/tenant" PathDescription { Self { PathVersion: 1 } DomainDescription { DomainKey { SchemeShard: 800 PathId: 2 } } } PathId: 2 PathOwnerId: 800 =========== Path: "/root/tenant" PathDescription { Self { PathVersion: 1 } DomainDescription { DomainKey { SchemeShard: 800 PathId: 2 } } } PathId: 2 PathOwnerId: 800 2025-05-29T15:21:28.088557Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:751: [1:3:2050] Handle NKikimrSchemeBoard.TEvHandshake { Owner: 800 Generation: 1 }: sender# [1:34:2065] 2025-05-29T15:21:28.088575Z node 1 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:769: [1:3:2050] Successful handshake: owner# 800, generation# 1 2025-05-29T15:21:28.088600Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:958: [1:3:2050] Handle NKikimrSchemeBoard.TEvCommitGeneration { Owner: 800 Generation: 1 }: sender# [1:34:2065] 2025-05-29T15:21:28.088604Z node 1 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:985: [1:3:2050] Commit generation: owner# 800, generation# 1 2025-05-29T15:21:28.088608Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:751: [1:6:2053] Handle NKikimrSchemeBoard.TEvHandshake { Owner: 800 Generation: 1 }: sender# [1:35:2066] 2025-05-29T15:21:28.088611Z node 1 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:769: [1:6:2053] Successful handshake: owner# 800, generation# 1 2025-05-29T15:21:28.088630Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:958: [1:6:2053] Handle NKikimrSchemeBoard.TEvCommitGeneration { Owner: 800 Generation: 1 }: sender# [1:35:2066] 2025-05-29T15:21:28.088633Z node 1 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:985: [1:6:2053] Commit generation: owner# 800, generation# 1 2025-05-29T15:21:28.088643Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:960: [main][1:37:2068][/root/tenant] Handle NKikimr::TEvStateStorage::TEvResolveReplicasList 2025-05-29T15:21:28.088684Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1061: [1:3:2050] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /root/tenant DomainOwnerId: 1 }: sender# [1:41:2068] 2025-05-29T15:21:28.088689Z node 1 :SCHEME_BOARD_REPLICA INFO: replica.cpp:520: [1:3:2050] Upsert description: path# /root/tenant 2025-05-29T15:21:28.088707Z node 1 :SCHEME_BOARD_REPLICA INFO: replica.cpp:646: [1:3:2050] Subscribe: subscriber# [1:41:2068], path# /root/tenant, domainOwnerId# 1, capabilities# AckNotifications: true 2025-05-29T15:21:28.088724Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1061: [1:6:2053] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /root/tenant DomainOwnerId: 1 }: sender# [1:42:2068] 2025-05-29T15:21:28.088726Z node 1 :SCHEME_BOARD_REPLICA INFO: replica.cpp:520: [1:6:2053] Upsert description: path# /root/tenant 2025-05-29T15:21:28.088730Z node 1 :SCHEME_BOARD_REPLICA INFO: replica.cpp:646: [1:6:2053] Subscribe: subscriber# [1:42:2068], path# /root/tenant, domainOwnerId# 1, capabilities# AckNotifications: true 2025-05-29T15:21:28.088738Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1061: [1:9:2056] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /root/tenant DomainOwnerId: 1 }: sender# [1:43:2068] 2025-05-29T15:21:28.088740Z node 1 :SCHEME_BOARD_REPLICA INFO: replica.cpp:520: [1:9:2056] Upsert description: path# /root/tenant 2025-05-29T15:21:28.088744Z node 1 :SCHEME_BOARD_REPLICA INFO: replica.cpp:646: [1:9:2056] Subscribe: subscriber# [1:43:2068], path# /root/tenant, domainOwnerId# 1, capabilities# AckNotifications: true 2025-05-29T15:21:28.088750Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:365: [replica][1:41:2068][/root/tenant] Handle NKikimrSchemeBoard.TEvNotify { Path: /root/tenant Version: 0 }: sender# [1:3:2050] 2025-05-29T15:21:28.088756Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1089: [1:3:2050] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 0 }: sender# [1:41:2068] 2025-05-29T15:21:28.088760Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:365: [replica][1:42:2068][/root/tenant] Handle NKikimrSchemeBoard.TEvNotify { Path: /root/tenant Version: 0 }: sender# [1:6:2053] 2025-05-29T15:21:28.088764Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1089: [1:6:2053] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 0 }: sender# [1:42:2068] 2025-05-29T15:21:28.088769Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:365: [replica][1:43:2068][/root/tenant] Handle NKikimrSchemeBoard.TEvNotify { Path: /root/tenant Version: 0 }: sender# [1:9:2056] 2025-05-29T15:21:28.088772Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1089: [1:9:2056] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 0 }: sender# [1:43:2068] 2025-05-29T15:21:28.088779Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:807: [main][1:37:2068][/root/tenant] Handle NKikimrSchemeBoard.TEvNotify { Path: /root/tenant Version: 0 }: sender# [1:38:2068] 2025-05-29T15:21:28.088788Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:807: [main][1:37:2068][/root/tenant] Handle NKikimrSchemeBoard.TEvNotify { Path: /root/tenant Version: 0 }: sender# [1:39:2068] 2025-05-29T15:21:28.088794Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: subscriber.cpp:836: [main][1:37:2068][/root/tenant] Set up state: owner# [1:36:2067], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2025-05-29T15:21:28.088799Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:807: [main][1:37:2068][/root/tenant] Handle NKikimrSchemeBoard.TEvNotify { Path: /root/tenant Version: 0 }: sender# [1:40:2068] 2025-05-29T15:21:28.088804Z node 1 :SCHEME_BOARD_SUBSCRIBER INFO: subscriber.cpp:854: [main][1:37:2068][/root/tenant] Ignore empty state: owner# [1:36:2067], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } =========== !argsLeft.IsDeletion 2025-05-29T15:21:28.088838Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:782: [1:3:2050] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 800 Generation: 1 }: sender# [1:34:2065], cookie# 0, event size# 103 2025-05-29T15:21:28.088843Z node 1 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:822: [1:3:2050] Update description: path# /root/tenant, pathId# [OwnerId: 800, LocalPathId: 2], deletion# false 2025-05-29T15:21:28.088856Z node 1 :SCHEME_BOARD_REPLICA INFO: replica.cpp:550: [1:3:2050] Upsert description: path# /root/tenant, pathId# [OwnerId: 800, LocalPathId: 2], pathDescription# {Status StatusSuccess, Path /root/tenant, PathId [OwnerId: 800, LocalPathId: 2], PathVersion 1, SubdomainPathId [OwnerId: 800, LocalPathId: 2], PathAbandonedTenantsSchemeShards size 0, DescribeSchemeResultSerialized size 60} 2025-05-29T15:21:28.088880Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:365: [replica][1:41:2068][/root/tenant] Handle NKikimrSchemeBoard.TEvNotify { Path: /root/tenant PathId: [OwnerId: 800, LocalPathId: 2] Version: 1 }: sender# [1:3:2050] 2025-05-29T15:21:28.088884Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1089: [1:3:2050] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 1 }: sender# [1:41:2068] 2025-05-29T15:21:28.088889Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:807: [main][1:37:2068][/root/tenant] Handle NKikimrSchemeBoard.TEvNotify { Path: /root/tenant PathId: [OwnerId: 800, LocalPathId: 2] Version: 1 }: sender# [1:38:2068] 2025-05-29T15:21:28.088894Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: subscriber.cpp:842: [main][1:37:2068][/root/tenant] Update to strong state: owner# [1:36:2067], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, new state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 800, LocalPathId: 2], Version: 1) DomainId: [OwnerId: 800, LocalPathId: 2] AbandonedSchemeShards: there are 0 elements } =========== argsLeft.GetSuperId() >= argsRight.GetSuperId() =========== Path: "/root/tenant" PathDescription { Self { PathVersion: 1 } DomainDescription { DomainKey { SchemeShard: 800 PathId: 2 } } } PathId: 2 PathOwnerId: 800 =========== Path: "/root/tenant" PathDescription { Self { PathVersion: 1 } DomainDescription { DomainKey { SchemeShard: 800 PathId: 2 } } } PathId: 1 PathOwnerId: 900 2025-05-29T15:21:28.493759Z node 3 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:751: [3:3:2050] Handle NKikimrSchemeBoard.TEvHandshake { Owner: 800 Generation: 1 }: sender# [3:34:2065] 2025-05-29T15:21:28.493775Z node 3 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:769: [3:3:2050] Successful handshake: owner# 800, generation# 1 2025-05-29T15:21:28.493789Z node 3 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:958: [3:3:2050] Handle NKikimrSchemeBoard.TEvCommitGeneration { Owner: 800 Generation: 1 }: sender# [3:34:2065] 2025-05-29T15:21:28.493793Z node 3 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:985: [3:3:2050] Commit generation: owner# 800, generation# 1 2025-05-29T15:21:28.493798Z node 3 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:751: [3:6:2053] Handle NKikimrSchemeBoard.TEvHandshake { Owner: 900 Generation: 1 }: sender# [3:35:2066] 2025-05-29T15:21:28.493800Z node 3 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:769: [3:6:2053] Successful handshake: owner# 900, generation# 1 2025-05-29T15:21:28.493813Z node 3 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:958: [3:6:2053] Handle NKikimrSchemeBoard.TEvCommitGeneration { Owner: 900 Generation: 1 }: sender# [3:35:2066] 2025-05-29T15:21:28.493815Z node 3 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:985: [3:6:2053] Commit generation: owner# 900, generation# 1 2025-05-29T15:21:28.493832Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:960: [main][3:37:2068][/root/tenant] Handle NKikimr::TEvStateStorage::TEvResolveReplicasList 2025-05-29T15:21:28.493869Z node 3 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1061: [3:3:2050] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /root/tenant DomainOwnerId: 1 }: sender# [3:41:2068] 2025-05-29T15:21:28.493873Z node 3 :SCHEME_BOARD_REPLICA INFO: replica.cpp:520: [3:3:2050] Upsert description: path# /root/tenant 2025-05-29T15:21:28.493889Z node 3 :SCHEME_BOARD_REPLICA INFO: replica.cpp:646: [3:3:2050] Subscribe: subscriber# [3:41:2068], path# /root/tenant, domainOwnerId# 1, capabilities# AckNotifications: true 2025-05-29T15:21:28.493902Z node 3 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1061: [3:6:2053] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /root/tenant DomainOwnerId: 1 }: sender# [3:42:2068] 2025-05-29T15:21:28.493904Z node 3 :SCHEME_BOARD_REPLICA INFO: replica.cpp:520: [3:6:2053] Upsert description: path# /root/tenant 2025-05-29T15:21:28.493908Z node 3 :SCHEME_BOARD_REPLICA INFO: replica.cpp:646: [3:6:2053] Subscribe: subscriber# [3:42:2068], path# /root/tenant, domainOwnerId# 1, capabilities# AckNotifications: true 2025-05-29T15:21:28.493916Z node 3 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1061: [3:9:2056] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /root/tenant DomainOwnerId: 1 }: sender# [3:43:2068] 2025-05-29T15:21:28.493918Z node 3 :SCHEME_BOARD_REPLICA INFO: replica.cpp:520: [3:9:2056] Upsert description: path# /root/tenant 2025-05-29T15:21:28.493921Z node 3 :SCHEME_BOARD_REPLICA INFO: replica.cpp:646: [3:9:2056] Subscribe: subscriber# [3:43:2068], path# /root/tenant, domainOwnerId# 1, capabilities# AckNotifications: true 2025-05-29T15:21:28.493928Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:365: [replica][3:41:2068][/root/tenant] Handle NKikimrSchemeBoard.TEvNotify { Path: /root/tenant Version: 0 }: sender# [3:3:2050] 2025-05-29T15:21:28.493935Z node 3 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1089: [3:3:2050] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 0 }: sender# [3:41:2068] 2025-05-29T15:21:28.493940Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:365: [replica][3:42:2068][/root/tenant] Handle NKikimrSchemeBoard.TEvNotify { Path: /root/tenant Version: 0 }: sender# [3:6:2053] 2025-05-29T15:21:28.493943Z node 3 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1089: [3:6:2053] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 0 }: sender# [3:42:2068] 2025-05-29T15: ... 2053] Handle NKikimrSchemeBoard.TEvHandshake { Owner: 910 Generation: 1 }: sender# [397:35:2066] 2025-05-29T15:22:46.149948Z node 397 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:769: [397:6:2053] Successful handshake: owner# 910, generation# 1 2025-05-29T15:22:46.149956Z node 397 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:958: [397:3:2050] Handle NKikimrSchemeBoard.TEvCommitGeneration { Owner: 910 Generation: 1 }: sender# [397:34:2065] 2025-05-29T15:22:46.149959Z node 397 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:985: [397:3:2050] Commit generation: owner# 910, generation# 1 2025-05-29T15:22:46.149974Z node 397 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:958: [397:6:2053] Handle NKikimrSchemeBoard.TEvCommitGeneration { Owner: 910 Generation: 1 }: sender# [397:35:2066] 2025-05-29T15:22:46.149977Z node 397 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:985: [397:6:2053] Commit generation: owner# 910, generation# 1 2025-05-29T15:22:46.150000Z node 397 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:960: [main][397:37:2068][/Root/Tenant/table_inside] Handle NKikimr::TEvStateStorage::TEvResolveReplicasList 2025-05-29T15:22:46.150245Z node 397 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1061: [397:3:2050] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /Root/Tenant/table_inside DomainOwnerId: 1 }: sender# [397:41:2068] 2025-05-29T15:22:46.150253Z node 397 :SCHEME_BOARD_REPLICA INFO: replica.cpp:520: [397:3:2050] Upsert description: path# /Root/Tenant/table_inside 2025-05-29T15:22:46.150277Z node 397 :SCHEME_BOARD_REPLICA INFO: replica.cpp:646: [397:3:2050] Subscribe: subscriber# [397:41:2068], path# /Root/Tenant/table_inside, domainOwnerId# 1, capabilities# AckNotifications: true 2025-05-29T15:22:46.150299Z node 397 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1061: [397:6:2053] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /Root/Tenant/table_inside DomainOwnerId: 1 }: sender# [397:42:2068] 2025-05-29T15:22:46.150303Z node 397 :SCHEME_BOARD_REPLICA INFO: replica.cpp:520: [397:6:2053] Upsert description: path# /Root/Tenant/table_inside 2025-05-29T15:22:46.150309Z node 397 :SCHEME_BOARD_REPLICA INFO: replica.cpp:646: [397:6:2053] Subscribe: subscriber# [397:42:2068], path# /Root/Tenant/table_inside, domainOwnerId# 1, capabilities# AckNotifications: true 2025-05-29T15:22:46.150326Z node 397 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1061: [397:9:2056] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /Root/Tenant/table_inside DomainOwnerId: 1 }: sender# [397:43:2068] 2025-05-29T15:22:46.150330Z node 397 :SCHEME_BOARD_REPLICA INFO: replica.cpp:520: [397:9:2056] Upsert description: path# /Root/Tenant/table_inside 2025-05-29T15:22:46.150336Z node 397 :SCHEME_BOARD_REPLICA INFO: replica.cpp:646: [397:9:2056] Subscribe: subscriber# [397:43:2068], path# /Root/Tenant/table_inside, domainOwnerId# 1, capabilities# AckNotifications: true 2025-05-29T15:22:46.150344Z node 397 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:365: [replica][397:41:2068][/Root/Tenant/table_inside] Handle NKikimrSchemeBoard.TEvNotify { Path: /Root/Tenant/table_inside Version: 0 }: sender# [397:3:2050] 2025-05-29T15:22:46.150352Z node 397 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1089: [397:3:2050] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 0 }: sender# [397:41:2068] 2025-05-29T15:22:46.150357Z node 397 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:365: [replica][397:42:2068][/Root/Tenant/table_inside] Handle NKikimrSchemeBoard.TEvNotify { Path: /Root/Tenant/table_inside Version: 0 }: sender# [397:6:2053] 2025-05-29T15:22:46.150366Z node 397 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1089: [397:6:2053] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 0 }: sender# [397:42:2068] 2025-05-29T15:22:46.150372Z node 397 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:365: [replica][397:43:2068][/Root/Tenant/table_inside] Handle NKikimrSchemeBoard.TEvNotify { Path: /Root/Tenant/table_inside Version: 0 }: sender# [397:9:2056] 2025-05-29T15:22:46.150378Z node 397 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1089: [397:9:2056] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 0 }: sender# [397:43:2068] 2025-05-29T15:22:46.150391Z node 397 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:807: [main][397:37:2068][/Root/Tenant/table_inside] Handle NKikimrSchemeBoard.TEvNotify { Path: /Root/Tenant/table_inside Version: 0 }: sender# [397:38:2068] 2025-05-29T15:22:46.150406Z node 397 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:807: [main][397:37:2068][/Root/Tenant/table_inside] Handle NKikimrSchemeBoard.TEvNotify { Path: /Root/Tenant/table_inside Version: 0 }: sender# [397:39:2068] 2025-05-29T15:22:46.150415Z node 397 :SCHEME_BOARD_SUBSCRIBER NOTICE: subscriber.cpp:836: [main][397:37:2068][/Root/Tenant/table_inside] Set up state: owner# [397:36:2067], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2025-05-29T15:22:46.150424Z node 397 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:807: [main][397:37:2068][/Root/Tenant/table_inside] Handle NKikimrSchemeBoard.TEvNotify { Path: /Root/Tenant/table_inside Version: 0 }: sender# [397:40:2068] 2025-05-29T15:22:46.150432Z node 397 :SCHEME_BOARD_SUBSCRIBER INFO: subscriber.cpp:854: [main][397:37:2068][/Root/Tenant/table_inside] Ignore empty state: owner# [397:36:2067], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } =========== argsLeft.GetSuperId() >= argsRight.GetSuperId() =========== Path: "/Root/Tenant/table_inside" PathDescription { Self { PathVersion: 18446744073709551615 } DomainDescription { DomainKey { SchemeShard: 800 PathId: 333 } } } PathId: 9 PathOwnerId: 910 =========== Path: "/Root/Tenant/table_inside" PathDescription { Self { PathVersion: 18446744073709551615 } DomainDescription { DomainKey { SchemeShard: 800 PathId: 333 } } } PathId: 9 PathOwnerId: 910 2025-05-29T15:22:46.566606Z node 399 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:751: [399:3:2050] Handle NKikimrSchemeBoard.TEvHandshake { Owner: 910 Generation: 1 }: sender# [399:34:2065] 2025-05-29T15:22:46.566636Z node 399 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:769: [399:3:2050] Successful handshake: owner# 910, generation# 1 2025-05-29T15:22:46.566657Z node 399 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:751: [399:6:2053] Handle NKikimrSchemeBoard.TEvHandshake { Owner: 910 Generation: 1 }: sender# [399:35:2066] 2025-05-29T15:22:46.566662Z node 399 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:769: [399:6:2053] Successful handshake: owner# 910, generation# 1 2025-05-29T15:22:46.566672Z node 399 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:958: [399:3:2050] Handle NKikimrSchemeBoard.TEvCommitGeneration { Owner: 910 Generation: 1 }: sender# [399:34:2065] 2025-05-29T15:22:46.566677Z node 399 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:985: [399:3:2050] Commit generation: owner# 910, generation# 1 2025-05-29T15:22:46.566697Z node 399 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:958: [399:6:2053] Handle NKikimrSchemeBoard.TEvCommitGeneration { Owner: 910 Generation: 1 }: sender# [399:35:2066] 2025-05-29T15:22:46.566701Z node 399 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:985: [399:6:2053] Commit generation: owner# 910, generation# 1 2025-05-29T15:22:46.566728Z node 399 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:960: [main][399:37:2068][/Root/Tenant/table_inside] Handle NKikimr::TEvStateStorage::TEvResolveReplicasList 2025-05-29T15:22:46.566817Z node 399 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1061: [399:3:2050] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /Root/Tenant/table_inside DomainOwnerId: 1 }: sender# [399:41:2068] 2025-05-29T15:22:46.566824Z node 399 :SCHEME_BOARD_REPLICA INFO: replica.cpp:520: [399:3:2050] Upsert description: path# /Root/Tenant/table_inside 2025-05-29T15:22:46.566853Z node 399 :SCHEME_BOARD_REPLICA INFO: replica.cpp:646: [399:3:2050] Subscribe: subscriber# [399:41:2068], path# /Root/Tenant/table_inside, domainOwnerId# 1, capabilities# AckNotifications: true 2025-05-29T15:22:46.566878Z node 399 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1061: [399:6:2053] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /Root/Tenant/table_inside DomainOwnerId: 1 }: sender# [399:42:2068] 2025-05-29T15:22:46.566882Z node 399 :SCHEME_BOARD_REPLICA INFO: replica.cpp:520: [399:6:2053] Upsert description: path# /Root/Tenant/table_inside 2025-05-29T15:22:46.566890Z node 399 :SCHEME_BOARD_REPLICA INFO: replica.cpp:646: [399:6:2053] Subscribe: subscriber# [399:42:2068], path# /Root/Tenant/table_inside, domainOwnerId# 1, capabilities# AckNotifications: true 2025-05-29T15:22:46.566910Z node 399 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1061: [399:9:2056] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /Root/Tenant/table_inside DomainOwnerId: 1 }: sender# [399:43:2068] 2025-05-29T15:22:46.566914Z node 399 :SCHEME_BOARD_REPLICA INFO: replica.cpp:520: [399:9:2056] Upsert description: path# /Root/Tenant/table_inside 2025-05-29T15:22:46.566921Z node 399 :SCHEME_BOARD_REPLICA INFO: replica.cpp:646: [399:9:2056] Subscribe: subscriber# [399:43:2068], path# /Root/Tenant/table_inside, domainOwnerId# 1, capabilities# AckNotifications: true 2025-05-29T15:22:46.566935Z node 399 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:365: [replica][399:41:2068][/Root/Tenant/table_inside] Handle NKikimrSchemeBoard.TEvNotify { Path: /Root/Tenant/table_inside Version: 0 }: sender# [399:3:2050] 2025-05-29T15:22:46.566946Z node 399 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1089: [399:3:2050] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 0 }: sender# [399:41:2068] 2025-05-29T15:22:46.566954Z node 399 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:365: [replica][399:42:2068][/Root/Tenant/table_inside] Handle NKikimrSchemeBoard.TEvNotify { Path: /Root/Tenant/table_inside Version: 0 }: sender# [399:6:2053] 2025-05-29T15:22:46.566964Z node 399 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1089: [399:6:2053] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 0 }: sender# [399:42:2068] 2025-05-29T15:22:46.566971Z node 399 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:365: [replica][399:43:2068][/Root/Tenant/table_inside] Handle NKikimrSchemeBoard.TEvNotify { Path: /Root/Tenant/table_inside Version: 0 }: sender# [399:9:2056] 2025-05-29T15:22:46.566977Z node 399 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1089: [399:9:2056] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 0 }: sender# [399:43:2068] 2025-05-29T15:22:46.566991Z node 399 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:807: [main][399:37:2068][/Root/Tenant/table_inside] Handle NKikimrSchemeBoard.TEvNotify { Path: /Root/Tenant/table_inside Version: 0 }: sender# [399:38:2068] 2025-05-29T15:22:46.567007Z node 399 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:807: [main][399:37:2068][/Root/Tenant/table_inside] Handle NKikimrSchemeBoard.TEvNotify { Path: /Root/Tenant/table_inside Version: 0 }: sender# [399:39:2068] 2025-05-29T15:22:46.567017Z node 399 :SCHEME_BOARD_SUBSCRIBER NOTICE: subscriber.cpp:836: [main][399:37:2068][/Root/Tenant/table_inside] Set up state: owner# [399:36:2067], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2025-05-29T15:22:46.567027Z node 399 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:807: [main][399:37:2068][/Root/Tenant/table_inside] Handle NKikimrSchemeBoard.TEvNotify { Path: /Root/Tenant/table_inside Version: 0 }: sender# [399:40:2068] 2025-05-29T15:22:46.567035Z node 399 :SCHEME_BOARD_SUBSCRIBER INFO: subscriber.cpp:854: [main][399:37:2068][/Root/Tenant/table_inside] Ignore empty state: owner# [399:36:2067], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } =========== argsLeft.GetSuperId() >= argsRight.GetSuperId() >> TObjectStorageListingTest::SuffixColumns [GOOD] >> TopicAutoscaling::BalancingAfterSplit_sessionsWithPartition [FAIL] >> TPersQueueMirrorer::ValidStartStream ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/persqueue/ut/unittest >> PQCountersSimple::Partition [GOOD] Test command err: Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:127:2057] recipient: [1:125:2158] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:127:2057] recipient: [1:125:2158] Leader for TabletID 72057594037927937 is [1:131:2162] sender: [1:132:2057] recipient: [1:125:2158] 2025-05-29T15:22:15.247462Z node 1 :PERSQUEUE NOTICE: pq_impl.cpp:1099: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-05-29T15:22:15.247495Z node 1 :PERSQUEUE INFO: pq_impl.cpp:800: [PQ: 72057594037927937] doesn't have tx writes info Leader for TabletID 72057594037927938 is [0:0:0] sender: [1:173:2057] recipient: [1:171:2193] IGNORE Leader for TabletID 72057594037927938 is [0:0:0] sender: [1:173:2057] recipient: [1:171:2193] Leader for TabletID 72057594037927938 is [1:177:2197] sender: [1:178:2057] recipient: [1:171:2193] Leader for TabletID 72057594037927937 is [1:131:2162] sender: [1:203:2057] recipient: [1:14:2061] 2025-05-29T15:22:15.252174Z node 1 :PERSQUEUE NOTICE: pq_impl.cpp:1099: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-05-29T15:22:15.255099Z node 1 :PERSQUEUE INFO: pq_impl.cpp:1487: [PQ: 72057594037927937] Config applied version 1 actor [1:201:2215] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 PartitionIds: 1 TopicName: "rt3.dc1--asdfgs--topic" Version: 1 LocalDC: true Topic: "topic" TopicPath: "/Root/PQ/rt3.dc1--asdfgs--topic" Partitions { PartitionId: 0 } Partitions { PartitionId: 1 } ReadRuleGenerations: 1 MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } AllPartitions { PartitionId: 1 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 1 Important: false } 2025-05-29T15:22:15.255351Z node 1 :PERSQUEUE INFO: partition_init.cpp:880: [PQ: 72057594037927937, Partition: 0, State: StateInit] bootstrapping 0 [1:209:2221] 2025-05-29T15:22:15.256044Z node 1 :PERSQUEUE INFO: partition.cpp:560: [PQ: 72057594037927937, Partition: 0, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 0 generation 2 [1:209:2221] 2025-05-29T15:22:15.256732Z node 1 :PERSQUEUE INFO: partition_init.cpp:880: [PQ: 72057594037927937, Partition: 1, State: StateInit] bootstrapping 1 [1:210:2222] 2025-05-29T15:22:15.257283Z node 1 :PERSQUEUE INFO: partition.cpp:560: [PQ: 72057594037927937, Partition: 1, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 1 generation 2 [1:210:2222] 2025-05-29T15:22:15.259318Z node 1 :PERSQUEUE INFO: ownerinfo.cpp:30: new Cookie default|3e927e5b-55713b5b-7684e85d-2475480c_0 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default 2025-05-29T15:22:15.260721Z node 1 :PERSQUEUE INFO: ownerinfo.cpp:30: new Cookie default|e3a1d204-47d40403-e88fc106-fd7834ad_1 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default 2025-05-29T15:22:15.261870Z node 1 :PERSQUEUE INFO: ownerinfo.cpp:30: new Cookie default|7ee7c1b5-6b35993c-5809d941-37f5f509_2 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default Expected: { "sensors": [ { "kind": "GAUGE", "labels": { "user_counters": "PersQueue", "client": "total", "important": "0", "topic": "rt3.dc1--asdfgs--topic", "sensor": "PQ/MessageLagByCommitted" }, "value": 30 }, { "kind": "GAUGE", "labels": { "user_counters": "PersQueue", "client": "total", "important": "0", "topic": "rt3.dc1--asdfgs--topic", "sensor": "PQ/MessageLagByLastRead" }, "value": 29 }, { "kind": "GAUGE", "labels": { "user_counters": "PersQueue", "client": "total", "important": "0", "topic": "rt3.dc1--asdfgs--topic", "sensor": "PQ/PartitionMaxReadQuotaUsage" }, "value": 0 }, { "kind": "GAUGE", "labels": { "user_counters": "PersQueue", "client": "total", "important": "0", "topic": "rt3.dc1--asdfgs--topic", "sensor": "PQ/ReadBytesAvailAvgMin" }, "value": 1000000000 }, { "kind": "GAUGE", "labels": { "user_counters": "PersQueue", "client": "total", "important": "0", "topic": "rt3.dc1--asdfgs--topic", "sensor": "PQ/ReadBytesAvailAvgSec" }, "value": 1000000000 }, { "kind": "GAUGE", "labels": { "user_counters": "PersQueue", "client": "total", "important": "0", "topic": "rt3.dc1--asdfgs--topic", "sensor": "PQ/ReadBytesMaxPerDay" }, "value": 0 }, { "kind": "GAUGE", "labels": { "user_counters": "PersQueue", "client": "total", "important": "0", "topic": "rt3.dc1--asdfgs--topic", "sensor": "PQ/ReadBytesMaxPerHour" }, "value": 0 }, { "kind": "GAUGE", "labels": { "user_counters": "PersQueue", "client": "total", "important": "0", "topic": "rt3.dc1--asdfgs--topic", "sensor": "PQ/ReadBytesMaxPerMin" }, "value": 0 }, { "kind": "GAUGE", "labels": { "user_counters": "PersQueue", "client": "total", "important": "0", "topic": "rt3.dc1--asdfgs--topic", "sensor": "PQ/ReadBytesMaxPerSec" }, "value": 0 }, { "kind": "GAUGE", "labels": { "user_counters": "PersQueue", "client": "total", "important": "0", "topic": "rt3.dc1--asdfgs--topic", "sensor": "PQ/ReadBytesPerDay" }, "value": 0 }, { "kind": "GAUGE", "labels": { "user_counters": "PersQueue", "client": "total", "important": "0", "topic": "rt3.dc1--asdfgs--topic", "sensor": "PQ/ReadBytesPerHour" }, "value": 0 }, { "kind": "GAUGE", "labels": { "user_counters": "PersQueue", "client": "total", "important": "0", "topic": "rt3.dc1--asdfgs--topic", "sensor": "PQ/ReadBytesPerMin" }, "value": 0 }, { "kind": "GAUGE", "labels": { "user_counters": "PersQueue", "client": "total", "important": "0", "topic": "rt3.dc1--asdfgs--topic", "sensor": "PQ/ReadBytesPerSec" }, "value": 0 }, { "kind": "GAUGE", "labels": { "user_counters": "PersQueue", "client": "total", "important": "0", "topic": "rt3.dc1--asdfgs--topic", "sensor": "PQ/ReadBytesQuota" }, "value": 1000000000 }, { "kind": "RATE", "labels": { "user_counters": "PersQueue", "client": "total", "important": "0", "topic": "rt3.dc1--asdfgs--topic", "sensor": "PQ/ReadOffsetRewindSum" }, "value": 0 }, { "kind": "GAUGE", "labels": { "user_counters": "PersQueue", "client": "total", "important": "0", "topic": "rt3.dc1--asdfgs--topic", "sensor": "PQ/ReadTimeLagMs" }, "value": 0 }, { "kind": "GAUGE", "labels": { "user_counters": "PersQueue", "client": "total", "important": "0", "topic": "rt3.dc1--asdfgs--topic", "sensor": "PQ/SizeLagByCommitted" }, "value": 747 }, { "kind": "GAUGE", "labels": { "user_counters": "PersQueue", "client": "total", "important": "0", "topic": "rt3.dc1--asdfgs--topic", "sensor": "PQ/SizeLagByLastRead" }, "value": 747 }, { "kind": "GAUGE", "labels": { "user_counters": "PersQueue", "client": "total", "important": "0", "topic": "rt3.dc1--asdfgs--topic", "sensor": "PQ/TimeSinceLastReadMs" }, "value": 5000 }, { "kind": "GAUGE", "labels": { "user_counters": "PersQueue", "client": "total", "important": "0", "topic": "rt3.dc1--asdfgs--topic", "sensor": "PQ/TotalMessageLagByLastRead" }, "value": 29 }, { "kind": "GAUGE", "labels": { "user_counters": "PersQueue", "client": "total", "important": "0", "topic": "rt3.dc1--asdfgs--topic", "sensor": "PQ/TotalSizeLagByLastRead" }, "value": 747 }, { "kind": "GAUGE", "labels": { "user_counters": "PersQueue", "client": "total", "important": "0", "topic": "rt3.dc1--asdfgs--topic", "sensor": "PQ/TotalTimeLagMsByLastRead" }, "value": 5000 }, { "kind": "GAUGE", "labels": { "user_counters": "PersQueue", "client": "total", "important": "0", "topic": "rt3.dc1--asdfgs--topic", "sensor": "PQ/UserPartitionsAnswered" }, "value": 2 }, { "kind": "GAUGE", "labels": { "user_counters": "PersQueue", "client": "total", "important": "0", "topic": "rt3.dc1--asdfgs--topic", "sensor": "PQ/WriteTimeLagMsByLastRead" }, "value": 30 }, { "kind": "GAUGE", "labels": { "user_counters": "PersQueue", "client": "total", "important": "0", "topic": "rt3.dc1--asdfgs--topic", "sensor": "PQ/WriteTimeLagMsByLastReadOld" }, "value": 5000 }, { "kind": "GAUGE", "labels": { "user_counters": "PersQueue", "client": "total", "important": "total", "topic": "rt3.dc1--asdfgs--topic", "sensor": "PQ/MessageLagByCommitted" }, "value": 30 }, { "kind": "GAUGE", "labels": { "user_counters": "PersQueue", "client": "total", "important": "total", "topic": "rt3.dc1--asdfgs--topic", "sensor": "PQ/MessageLagByLastRead" }, "value": 29 }, { "kind": "GAUGE", "labels": { "user_counters": "PersQueue", "client": "total", "important": "total", "topic": "rt3.dc1--asdfgs--topic", "sensor": "PQ/PartitionMaxReadQuotaUsage" }, "value": 0 }, { "kind": "GAUGE", "labels": { "user_counters": "PersQueue", "client": "total", "important": "total", "topic": "rt3.dc1--asdfgs--topic", "sensor": "PQ/ReadBytesAvailAvgMin" }, "value": 1000000000 }, { "kind": "GAUGE", "labels": { "user_counters": "PersQueue", "client": "total", "important": "total", "topic": "rt3.dc1--asdfgs--topic", "sensor": "PQ/ReadBytesAvailAvgSec" }, "value": 1000000000 }, { "kind": "GAUGE", "labels": { "user_counters": "PersQueue", "client": "total", "important": "total", "topic": "rt3.dc1--asdfgs--topic", "sensor": "PQ/ReadBytesMaxPerDay" }, "value": 0 }, { "kind": "GAUGE", "labels": { "user_counters": "PersQueue", "client": "total", "important": "total", "topic": "rt3.dc1--asdfgs--topic", "sensor": "PQ/ReadBytesMaxPerHour" }, "value": 0 }, { "kind": "GAUGE", "labels": { "user_counters": "PersQueue", "client": "total", "important": "total", "topic": "rt3.dc1--asdfgs--topic", "sensor": "PQ/ReadBytesMaxPerMin" }, "value": 0 }, { "kind": "GAUGE", "labels": { "user_counte ... ed in BillingMeteringConfig 2025-05-29T15:22:24.660376Z node 3 :PERSQUEUE INFO: pq_impl.cpp:1487: [PQ: 72057594037927937] Config applied version 5 actor [3:178:2192] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 PartitionIds: 1 TopicName: "rt3.dc1--asdfgs--topic" Version: 5 LocalDC: true Topic: "topic" TopicPath: "/Root/PQ/rt3.dc1--asdfgs--topic" Partitions { PartitionId: 0 } Partitions { PartitionId: 1 } ReadRuleGenerations: 3 MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } AllPartitions { PartitionId: 1 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 3 Important: false } NEW ANS: ANS GROUP total/total/rt3.dc1--asdfgs--topic ANS GROUP user/0/total ANS GROUP user/total/total ANS GROUP user/0/rt3.dc1--asdfgs--topic ANS GROUP total/total/total ANS GROUP rt3.dc1--asdfgs--topic ANS GROUP total/0/rt3.dc1--asdfgs--topic ANS GROUP total CHECKING GROUP user/0/rt3.dc1--asdfgs--topic 2025-05-29T15:22:26.632007Z node 3 :PERSQUEUE NOTICE: pq_impl.cpp:1099: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-05-29T15:22:26.633374Z node 3 :PERSQUEUE INFO: pq_impl.cpp:1487: [PQ: 72057594037927937] Config applied version 6 actor [3:178:2192] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 ImportantClientId: "user" ImportantClientId: "user2" LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 PartitionIds: 1 TopicName: "rt3.dc1--asdfgs--topic" Version: 6 LocalDC: true Topic: "topic" TopicPath: "/Root/PQ/rt3.dc1--asdfgs--topic" Partitions { PartitionId: 0 } Partitions { PartitionId: 1 } ReadRuleGenerations: 3 ReadRuleGenerations: 6 MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } AllPartitions { PartitionId: 1 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 3 Important: true } Consumers { Name: "user2" Generation: 6 Important: true } NEW ANS: ANS GROUP total/total/rt3.dc1--asdfgs--topic ANS GROUP user2/1/total ANS GROUP user/1/rt3.dc1--asdfgs--topic ANS GROUP user/1/total ANS GROUP user2/total/total ANS GROUP user/total/total ANS GROUP user2/1/rt3.dc1--asdfgs--topic ANS GROUP total/total/total ANS GROUP rt3.dc1--asdfgs--topic ANS GROUP total ANS GROUP total/1/rt3.dc1--asdfgs--topic CHECKING GROUP user/1/rt3.dc1--asdfgs--topic CHECKING GROUP user2/1/rt3.dc1--asdfgs--topic 2025-05-29T15:22:28.661138Z node 3 :PERSQUEUE NOTICE: pq_impl.cpp:1099: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-05-29T15:22:28.661995Z node 3 :PERSQUEUE INFO: pq_impl.cpp:1487: [PQ: 72057594037927937] Config applied version 7 actor [3:178:2192] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 ImportantClientId: "user" LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 PartitionIds: 1 TopicName: "rt3.dc1--asdfgs--topic" Version: 7 LocalDC: true Topic: "topic" TopicPath: "/Root/PQ/rt3.dc1--asdfgs--topic" Partitions { PartitionId: 0 } Partitions { PartitionId: 1 } ReadRuleGenerations: 3 ReadRuleGenerations: 6 MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } AllPartitions { PartitionId: 1 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 3 Important: true } Consumers { Name: "user2" Generation: 6 Important: false } NEW ANS: ANS GROUP total/total/rt3.dc1--asdfgs--topic ANS GROUP user/1/rt3.dc1--asdfgs--topic ANS GROUP user2/0/total ANS GROUP user/1/total ANS GROUP user2/total/total ANS GROUP user/total/total ANS GROUP user2/0/rt3.dc1--asdfgs--topic ANS GROUP total/total/total ANS GROUP total/0/rt3.dc1--asdfgs--topic ANS GROUP rt3.dc1--asdfgs--topic ANS GROUP total ANS GROUP total/1/rt3.dc1--asdfgs--topic CHECKING GROUP user/1/rt3.dc1--asdfgs--topic CHECKING GROUP user2/0/rt3.dc1--asdfgs--topic 2025-05-29T15:22:30.827155Z node 3 :PERSQUEUE NOTICE: pq_impl.cpp:1099: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-05-29T15:22:30.828542Z node 3 :PERSQUEUE INFO: pq_impl.cpp:1487: [PQ: 72057594037927937] Config applied version 8 actor [3:178:2192] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 ImportantClientId: "user" LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 PartitionIds: 1 TopicName: "rt3.dc1--asdfgs--topic" Version: 8 LocalDC: true Topic: "topic" TopicPath: "/Root/PQ/rt3.dc1--asdfgs--topic" Partitions { PartitionId: 0 } Partitions { PartitionId: 1 } ReadRuleGenerations: 3 MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } AllPartitions { PartitionId: 1 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 3 Important: true } NEW ANS: ANS GROUP total/total/rt3.dc1--asdfgs--topic ANS GROUP user/1/rt3.dc1--asdfgs--topic ANS GROUP user/1/total ANS GROUP user/total/total ANS GROUP total/total/total ANS GROUP rt3.dc1--asdfgs--topic ANS GROUP total ANS GROUP total/1/rt3.dc1--asdfgs--topic CHECKING GROUP user/1/rt3.dc1--asdfgs--topic 2025-05-29T15:22:33.272253Z node 4 :PERSQUEUE NOTICE: pq_impl.cpp:1099: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-05-29T15:22:33.272283Z node 4 :PERSQUEUE INFO: pq_impl.cpp:800: [PQ: 72057594037927937] doesn't have tx writes info 2025-05-29T15:22:33.277326Z node 4 :PERSQUEUE NOTICE: pq_impl.cpp:1099: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-05-29T15:22:33.277567Z node 4 :PERSQUEUE INFO: pq_impl.cpp:1487: [PQ: 72057594037927937] Config applied version 9 actor [4:199:2213] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 3600 ImportantClientId: "client" LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 WriteSpeedInBytesPerSecond: 102400 BurstSize: 102400 MaxWriteInflightSize: 90000000 } PartitionIds: 0 PartitionIds: 1 TopicName: "topic" Version: 9 LocalDC: true Topic: "topic" TopicPath: "/topic" YcCloudId: "somecloud" YcFolderId: "somefolder" YdbDatabaseId: "PQ" YdbDatabasePath: "/Root/PQ" Partitions { PartitionId: 0 } Partitions { PartitionId: 1 } ReadRuleGenerations: 9 ReadRuleGenerations: 9 FederationAccount: "federationAccount" MeteringMode: METERING_MODE_REQUEST_UNITS AllPartitions { PartitionId: 0 } AllPartitions { PartitionId: 1 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 9 Important: false } Consumers { Name: "client" Generation: 9 Important: true } 2025-05-29T15:22:33.277738Z node 4 :PERSQUEUE INFO: partition_init.cpp:880: [PQ: 72057594037927937, Partition: 0, State: StateInit] bootstrapping 0 [4:207:2219] 2025-05-29T15:22:33.277934Z node 4 :PERSQUEUE INFO: partition.cpp:560: [PQ: 72057594037927937, Partition: 0, State: StateInit] init complete for topic 'topic' partition 0 generation 2 [4:207:2219] 2025-05-29T15:22:33.278201Z node 4 :PERSQUEUE INFO: partition_init.cpp:880: [PQ: 72057594037927937, Partition: 1, State: StateInit] bootstrapping 1 [4:208:2220] 2025-05-29T15:22:33.278355Z node 4 :PERSQUEUE INFO: partition.cpp:560: [PQ: 72057594037927937, Partition: 1, State: StateInit] init complete for topic 'topic' partition 1 generation 2 [4:208:2220] 2025-05-29T15:22:33.282899Z node 4 :PERSQUEUE_READ_BALANCER INFO: read_balancer__balancing.cpp:1652: [72057594037927938][] pipe [4:252:2250] connected; active server actors: 1 2025-05-29T15:22:33.283752Z node 4 :PERSQUEUE INFO: ownerinfo.cpp:30: new Cookie default|7ff983ae-a1c145df-5bae2040-e58b20b8_0 generated for partition 0 topic 'topic' owner default 2025-05-29T15:22:33.284829Z node 4 :PERSQUEUE INFO: ownerinfo.cpp:30: new Cookie default|c972e0ea-614e6168-bae36332-5e1dbcb_1 generated for partition 0 topic 'topic' owner default 2025-05-29T15:22:33.285714Z node 4 :PERSQUEUE INFO: ownerinfo.cpp:30: new Cookie default|4882c801-cb7aecb0-c6465ff4-62b0194c_2 generated for partition 0 topic 'topic' owner default 2025-05-29T15:22:33.286570Z node 4 :PERSQUEUE_READ_BALANCER INFO: read_balancer__balancing.cpp:1652: [72057594037927938][topic] pipe [4:299:2291] connected; active server actors: 1 2025-05-29T15:22:39.477453Z node 4 :PERSQUEUE_READ_BALANCER INFO: read_balancer__balancing.cpp:1652: [72057594037927938][topic] pipe [4:426:2377] connected; active server actors: 1 2025-05-29T15:22:46.880845Z node 5 :PERSQUEUE NOTICE: pq_impl.cpp:1099: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-05-29T15:22:46.880873Z node 5 :PERSQUEUE INFO: pq_impl.cpp:800: [PQ: 72057594037927937] doesn't have tx writes info 2025-05-29T15:22:46.885199Z node 5 :PERSQUEUE NOTICE: pq_impl.cpp:1099: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-05-29T15:22:46.885430Z node 5 :PERSQUEUE INFO: pq_impl.cpp:1487: [PQ: 72057594037927937] Config applied version 10 actor [5:199:2213] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 PartitionIds: 1 TopicName: "rt3.dc1--asdfgs--topic" Version: 10 LocalDC: true Topic: "topic" TopicPath: "/Root/PQ/rt3.dc1--asdfgs--topic" Partitions { PartitionId: 0 } Partitions { PartitionId: 1 } ReadRuleGenerations: 10 MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } AllPartitions { PartitionId: 1 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 10 Important: false } 2025-05-29T15:22:46.885625Z node 5 :PERSQUEUE INFO: partition_init.cpp:880: [PQ: 72057594037927937, Partition: 0, State: StateInit] bootstrapping 0 [5:207:2219] 2025-05-29T15:22:46.886274Z node 5 :PERSQUEUE INFO: partition.cpp:560: [PQ: 72057594037927937, Partition: 0, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 0 generation 2 [5:207:2219] 2025-05-29T15:22:46.886643Z node 5 :PERSQUEUE INFO: partition_init.cpp:880: [PQ: 72057594037927937, Partition: 1, State: StateInit] bootstrapping 1 [5:208:2220] 2025-05-29T15:22:46.887128Z node 5 :PERSQUEUE INFO: partition.cpp:560: [PQ: 72057594037927937, Partition: 1, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 1 generation 2 [5:208:2220] 2025-05-29T15:22:46.888625Z node 5 :PERSQUEUE INFO: ownerinfo.cpp:30: new Cookie default|fdc65cd5-b943109f-648a3af5-d0f9d2f9_0 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default 2025-05-29T15:22:46.889633Z node 5 :PERSQUEUE INFO: ownerinfo.cpp:30: new Cookie default|288a2736-38bcc501-bf824b7d-50dc91b4_1 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default 2025-05-29T15:22:46.890348Z node 5 :PERSQUEUE INFO: ownerinfo.cpp:30: new Cookie default|918205f1-1f51bd4a-a157c6df-e195aa92_2 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default 2025-05-29T15:22:46.890968Z node 5 :PERSQUEUE INFO: ownerinfo.cpp:30: new Cookie default|21134e65-56047b47-b117d136-2fffee69_3 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default 2025-05-29T15:22:46.891204Z node 5 :PERSQUEUE INFO: ownerinfo.cpp:30: new Cookie default|5400f7f2-8dd889e-3dacc766-a5291ed2_4 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default |61.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/ext_index/ut/unittest ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/client/ut/unittest >> TLocksFatTest::ShardLocks [GOOD] Test command err: 2025-05-29T15:22:43.172722Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509888458693612141:2064];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:22:43.172742Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/002870/r3tmp/tmpuydwbT/pdisk_1.dat 2025-05-29T15:22:43.239782Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:22:43.240656Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [1:7509888458693612118:2079] 1748532163172533 != 1748532163172536 TClient is connected to server localhost:1549 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-05-29T15:22:43.313091Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:22:43.313127Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:22:43.314247Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-05-29T15:22:43.314267Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected waiting... 2025-05-29T15:22:43.318256Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:22:43.323701Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:22:43.397481Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:22:43.408965Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/002870/r3tmp/tmpd5Tiq3/pdisk_1.dat 2025-05-29T15:22:44.493298Z node 2 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7509888461194424795:2220];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:22:44.498697Z node 2 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-05-29T15:22:44.507945Z node 2 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:22:44.508138Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [2:7509888461194424586:2079] 1748532164486945 != 1748532164486948 TClient is connected to server localhost:21819 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-05-29T15:22:44.596859Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:22:44.596893Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:22:44.597251Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-05-29T15:22:44.597751Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected waiting... 2025-05-29T15:22:44.598862Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:22:44.605737Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:22:44.640878Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-05-29T15:22:44.652742Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/002870/r3tmp/tmpKkqXdr/pdisk_1.dat 2025-05-29T15:22:45.654105Z node 3 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:22:45.654284Z node 3 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-05-29T15:22:45.654547Z node 3 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [3:7509888466232671315:2079] 1748532165635529 != 1748532165635532 TClient is connected to server localhost:21106 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-05-29T15:22:45.743238Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:22:45.743270Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:22:45.743708Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:22:45.745099Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-29T15:22:45.746546Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:22:45.753404Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-05-29T15:22:45.768705Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 2025-05-29T15:22:45.780765Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/002870/r3tmp/tmpFfU3t8/pdisk_1.dat 2025-05-29T15:22:46.202052Z node 4 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:22:46.202532Z node 4 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [4:7509888469383049139:2079] 1748532166179919 != 1748532166179922 TClient is connected to server localhost:63880 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-05-29T15:22:46.279690Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:22:46.279730Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:22:46.279976Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:22:46.280728Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected waiting... 2025-05-29T15:22:46.283053Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976715658, at schemeshard: 72057594046644480 2025-05-29T15:22:46.284071Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:22:46.300070Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-05-29T15:22:46.316566Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 >> TopicAutoscaling::ReBalancingAfterSplit_sessionsWithPartition [FAIL] >> TopicAutoscaling::ReadFromTimestamp_AutoscaleAwareSDK >> TopicAutoscaling::PartitionSplit_ManySession_PQv1 [FAIL] >> TopicAutoscaling::PartitionSplit_ManySession_existed_AutoscaleAwareSDK ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_topic_splitmerge/unittest >> TSchemeShardTopicSplitMergeTest::SplitWithManyPartition [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:116:2058] recipient: [1:111:2141] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:116:2058] recipient: [1:111:2141] Leader for TabletID 72057594046678944 is [1:128:2152] sender: [1:132:2058] recipient: [1:111:2141] 2025-05-29T15:22:46.930233Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7488: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-05-29T15:22:46.930255Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7516: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:22:46.930258Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7402: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-05-29T15:22:46.930262Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7418: OperationsProcessing config: using default configuration 2025-05-29T15:22:46.930272Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-05-29T15:22:46.930275Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-05-29T15:22:46.930281Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7548: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:22:46.930290Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:39: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-05-29T15:22:46.930364Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7619: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-05-29T15:22:46.930417Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-05-29T15:22:46.939629Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7309: Cannot subscribe to console configs 2025-05-29T15:22:46.939651Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:22:46.942120Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-05-29T15:22:46.942254Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-05-29T15:22:46.942296Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-05-29T15:22:46.947252Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-05-29T15:22:46.947390Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:445: Clear TempDirsState with owners number: 0 2025-05-29T15:22:46.947504Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1348: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-05-29T15:22:46.947557Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:32: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-05-29T15:22:46.948098Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:157: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:22:46.948135Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:84: [RootDataErasureManager] Stop 2025-05-29T15:22:46.948424Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:22:46.948435Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:22:46.948455Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-05-29T15:22:46.948465Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-29T15:22:46.948472Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:202: TTxServerlessStorageBilling.Complete 2025-05-29T15:22:46.948506Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6671: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-05-29T15:22:46.950314Z node 1 :HIVE INFO: tablet_helpers.cpp:1130: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:128:2152] sender: [1:242:2058] recipient: [1:15:2062] 2025-05-29T15:22:46.965730Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:372: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-05-29T15:22:46.965802Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:22:46.965856Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-05-29T15:22:46.965901Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:130: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-05-29T15:22:46.965911Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:22:46.966626Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:451: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-05-29T15:22:46.966653Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-05-29T15:22:46.966704Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:22:46.966712Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:313: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-05-29T15:22:46.966717Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:367: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-05-29T15:22:46.966722Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 2 -> 3 2025-05-29T15:22:46.967093Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:22:46.967102Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-05-29T15:22:46.967107Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 3 -> 128 2025-05-29T15:22:46.967403Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:22:46.967411Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:22:46.967419Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:22:46.967425Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1650: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-05-29T15:22:46.967981Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1719: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-05-29T15:22:46.968326Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:652: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-05-29T15:22:46.968362Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1751: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-05-29T15:22:46.968518Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:676: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-05-29T15:22:46.968540Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:680: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 124 RawX2: 4294969445 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-05-29T15:22:46.968547Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:22:46.968610Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 128 -> 240 2025-05-29T15:22:46.968617Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:22:46.968648Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-05-29T15:22:46.968659Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:402: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-05-29T15:22:46.969039Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:22:46.969048Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-29T15:22:46.969086Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29 ... 3 Status: Active KeyRange { ToBound: "UUUUUUUUUUUUUUUT" } } Partitions { PartitionId: 3 GroupId: 4 TabletId: 72075186233409548 OwnerId: 72057594046678944 ShardId: 3 Status: Active ParentPartitionIds: 1 KeyRange { FromBound: "UUUUUUUUUUUUUUUT" ToBound: "\177" } } Partitions { PartitionId: 4 GroupId: 5 TabletId: 72075186233409548 OwnerId: 72057594046678944 ShardId: 3 Status: Active ParentPartitionIds: 1 KeyRange { FromBound: "\177" ToBound: "\252\252\252\252\252\252\252\252\252\252\252\252\252\252\252\251" } } BalancerTabletID: 72075186233409549 BalancerOwnerId: 72057594046678944 BalancerShardId: 4 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72075186233409546 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409547 } DomainKey { SchemeShard: 72057594046678944 PathId: 2 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 4 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 2 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 5 PQPartitionsLimit: 1000000 } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-05-29T15:22:47.116723Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:445: Clear TempDirsState with owners number: 0 Leader for TabletID 72057594046678944 is [1:128:2152] sender: [1:757:2058] recipient: [1:103:2137] Leader for TabletID 72057594046678944 is [1:128:2152] sender: [1:760:2058] recipient: [1:15:2062] Leader for TabletID 72057594046678944 is [1:128:2152] sender: [1:761:2058] recipient: [1:759:2666] Leader for TabletID 72057594046678944 is [1:762:2667] sender: [1:763:2058] recipient: [1:759:2666] 2025-05-29T15:22:47.122548Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7488: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-05-29T15:22:47.122571Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7516: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:22:47.122574Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7402: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-05-29T15:22:47.122578Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7418: OperationsProcessing config: using default configuration 2025-05-29T15:22:47.122582Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-05-29T15:22:47.122585Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-05-29T15:22:47.122590Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7548: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:22:47.122599Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:39: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-05-29T15:22:47.122669Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7619: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-05-29T15:22:47.122709Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-05-29T15:22:47.123447Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-05-29T15:22:47.123662Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-05-29T15:22:47.123689Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-05-29T15:22:47.123719Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7309: Cannot subscribe to console configs 2025-05-29T15:22:47.123723Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:22:47.123742Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:445: Clear TempDirsState with owners number: 0 2025-05-29T15:22:47.123802Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit for Paths, read records: 3, at schemeshard: 72057594046678944 2025-05-29T15:22:47.123815Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:319: AttachChild: child attached as only one child to the parent, parent id: [OwnerId: 72057594046678944, LocalPathId: 1], parent name: MyRoot, child name: USER_1, child id: [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2025-05-29T15:22:47.123819Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:319: AttachChild: child attached as only one child to the parent, parent id: [OwnerId: 72057594046678944, LocalPathId: 2], parent name: USER_1, child name: Topic1, child id: [OwnerId: 72057594046678944, LocalPathId: 3], at schemeshard: 72057594046678944 2025-05-29T15:22:47.123827Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1457: TTxInit for UserAttributes, read records: 0, at schemeshard: 72057594046678944 2025-05-29T15:22:47.123832Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1483: TTxInit for UserAttributesAlterData, read records: 0, at schemeshard: 72057594046678944 2025-05-29T15:22:47.123854Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 2025-05-29T15:22:47.123875Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1785: TTxInit for Tables, read records: 0, at schemeshard: 72057594046678944 2025-05-29T15:22:47.123881Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__root_data_erasure_manager.cpp:452: [RootDataErasureManager] Restore: Generation# 0, Status# 0, WakeupInterval# 604800 s, NumberDataErasureTenantsInRunning# 0 2025-05-29T15:22:47.123900Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:2033: TTxInit for Columns, read records: 0, at schemeshard: 72057594046678944 2025-05-29T15:22:47.123909Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:2093: TTxInit for ColumnsAlters, read records: 0, at schemeshard: 72057594046678944 2025-05-29T15:22:47.123921Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:2151: TTxInit for Shards, read records: 4, at schemeshard: 72057594046678944 2025-05-29T15:22:47.123927Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2025-05-29T15:22:47.123930Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2025-05-29T15:22:47.123933Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 0 2025-05-29T15:22:47.123936Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 1 2025-05-29T15:22:47.123947Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:2237: TTxInit for TablePartitions, read records: 0, at schemeshard: 72057594046678944 2025-05-29T15:22:47.123959Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:2303: TTxInit for TableShardPartitionConfigs, read records: 0, at schemeshard: 72057594046678944 2025-05-29T15:22:47.123984Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:2453: TTxInit for ChannelsBinding, read records: 14, at schemeshard: 72057594046678944 2025-05-29T15:22:47.124011Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 2 2025-05-29T15:22:47.124045Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:2832: TTxInit for TableIndexes, read records: 0, at schemeshard: 72057594046678944 2025-05-29T15:22:47.124055Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:2911: TTxInit for TableIndexKeys, read records: 0, at schemeshard: 72057594046678944 2025-05-29T15:22:47.124086Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:3409: TTxInit for KesusInfos, read records: 0, at schemeshard: 72057594046678944 2025-05-29T15:22:47.124091Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:3445: TTxInit for KesusAlters, read records: 0, at schemeshard: 72057594046678944 2025-05-29T15:22:47.124113Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:3659: TTxInit for TxShards, read records: 0, at schemeshard: 72057594046678944 2025-05-29T15:22:47.124122Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:3804: TTxInit for ShardToDelete, read records: 0, at schemeshard: 72057594046678944 2025-05-29T15:22:47.124134Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:3821: TTxInit for BackupSettings, read records: 0, at schemeshard: 72057594046678944 2025-05-29T15:22:47.124154Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:3981: TTxInit for ShardBackupStatus, read records: 0, at schemeshard: 72057594046678944 2025-05-29T15:22:47.124163Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:3997: TTxInit for CompletedBackup, read records: 0, at schemeshard: 72057594046678944 2025-05-29T15:22:47.124192Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4282: TTxInit for Publications, read records: 0, at schemeshard: 72057594046678944 2025-05-29T15:22:47.124222Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4587: IndexBuild , records: 0, at schemeshard: 72057594046678944 2025-05-29T15:22:47.124231Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4645: KMeansTreeSample records: 0, at schemeshard: 72057594046678944 2025-05-29T15:22:47.124244Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4740: SnapshotTables: snapshots: 0 tables: 0, at schemeshard: 72057594046678944 2025-05-29T15:22:47.124250Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4767: SnapshotSteps: snapshots: 0, at schemeshard: 72057594046678944 2025-05-29T15:22:47.124256Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4794: LongLocks: records: 0, at schemeshard: 72057594046678944 2025-05-29T15:22:47.125309Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:84: [RootDataErasureManager] Stop 2025-05-29T15:22:47.126018Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:22:47.126034Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:22:47.126106Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-05-29T15:22:47.126116Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-29T15:22:47.126123Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:202: TTxServerlessStorageBilling.Complete 2025-05-29T15:22:47.126478Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6671: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 >> TFlatTest::PartBloomFilter [GOOD] >> PartitionStats::CollectorOverload >> TSchemeShardTopicSplitMergeTest::MargePartitions2 [GOOD] >> PartitionStats::CollectorOverload [GOOD] |61.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/sys_view/partition_stats/ut/unittest |61.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/sys_view/partition_stats/ut/unittest >> TLocksTest::BrokenSameShardLock [GOOD] |61.4%| [TA] $(B)/ydb/core/tx/schemeshard/ut_replication/test-results/unittest/{meta.json ... results_accumulator.log} >> TopicAutoscaling::ReadFromTimestamp_BeforeAutoscaleAwareSDK [FAIL] >> TopicAutoscaling::ReadFromTimestamp_PQv1 >> TPQTest::TestReadSubscription [GOOD] >> TSchemeShardTopicSplitMergeTest::MargeInactivePartitions [GOOD] >> TSchemeShardTopicSplitMergeTest::EnableSplitMerge >> TLocksTest::CK_BrokenLock [GOOD] |61.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/sys_view/partition_stats/ut/unittest >> PartitionStats::CollectorOverload [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/client/ut/unittest >> TObjectStorageListingTest::SuffixColumns [GOOD] Test command err: 2025-05-29T15:22:46.262298Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509888469247921670:2201];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:22:46.263639Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/002858/r3tmp/tmpQe3IY4/pdisk_1.dat 2025-05-29T15:22:46.327720Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:22:46.329252Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [1:7509888469247921497:2079] 1748532166261531 != 1748532166261534 2025-05-29T15:22:46.364840Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:22:46.364873Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:22:46.365955Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 2280, node 1 2025-05-29T15:22:46.379911Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:22:46.379923Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-05-29T15:22:46.379925Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-05-29T15:22:46.379967Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:25900 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. waiting... 2025-05-29T15:22:46.409273Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:22:46.433055Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... TClient::Ls request: /dc-1/Dir/Table TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Table" PathId: 3 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976715659 CreateStep: 1748532166535 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "Table" Columns { Name: "Hash" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "Name" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } Columns { Name: "Path" ... (TRUNCATED) waiting... TClient::Ls request: /dc-1/Dir/Table TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Table" PathId: 3 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976715659 CreateStep: 1748532166535 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 4 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 4 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 2 } ChildrenExist: false } Table { Name: "Table" Columns { Name: "Hash" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "Name" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } Columns { Name: "Path" ... (TRUNCATED) 2025-05-29T15:22:46.937145Z node 2 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7509888470682302997:2062];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:22:46.937165Z node 2 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/002858/r3tmp/tmpVNBi9m/pdisk_1.dat 2025-05-29T15:22:46.950586Z node 2 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:22:46.950848Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [2:7509888470682302977:2079] 1748532166936988 != 1748532166936991 TServer::EnableGrpc on GrpcPort 29447, node 2 2025-05-29T15:22:46.960939Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:22:46.960960Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-05-29T15:22:46.960964Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-05-29T15:22:46.961030Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:4264 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-05-29T15:22:47.041643Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:22:47.041678Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:22:47.041930Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:22:47.042622Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected waiting... 2025-05-29T15:22:47.047745Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:22:47.151492Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3129: StateWork, received event# 269553163, Sender [2:7509888474977271605:2483], Recipient [2:7509888474977270952:2313]: NKikimrTxDataShard.TEvObjectStorageListingRequest TableId: 3 SerializedKeyPrefix: "\002\000\010\000\000\0002\000\000\000\000\000\000\000\010\000\000\000Bucket50" PathColumnPrefix: "Music/AC DC/" PathColumnDelimiter: "/" SerializedStartAfterKeySuffix: "\002\000\037\000\000\000Music/AC DC/Shoot to Thrill.mp3\010\000\000\000B\000\000\000\000\000\000\000" ColumnsToReturn: 3 ColumnsToReturn: 4 ColumnsToReturn: 6 MaxKeys: 10 2025-05-29T15:22:47.151509Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3145: StateWork, processing event TEvDataShard::TEvObjectStorageListingRequest 2025-05-29T15:22:47.151546Z node 2 :TX_DATASHARD DEBUG: datashard__object_storage_listing.cpp:152: 72075186224037888 S3 Listing: start at key ((type:4, value:"2\0\0\0\0\0\0\0") (type:4608, value:"Bucket50") (type:4608, value:"Music/AC DC/Shoot to Thrill.mp3") (type:4, value:"B\0\0\0\0\0\0\0")), end at key ((type:4, value:"2\0\0\0\0\0\0\0") (type:4608, value:"Bucket50") (type:4608, value:"Music/AC DC0") (type:0)) restarted: 0 last path: "" contents: 0 common prefixes: 0 2025-05-29T15:22:47.151616Z node 2 :TX_DATASHARD TRACE: datashard__object_storage_listing.cpp:240: 72075186224037888 S3 Listing: "Music/AC DC/Shoot to Thrill.mp3" -> (Utf8 : Music/AC DC/Shoot to Thrill.mp3, Uint64 : 77, String : ) 2025-05-29T15:22:47.151631Z node 2 :TX_DATASHARD TRACE: datashard__object_storage_listing.cpp:240: 72075186224037888 S3 Listing: "Music/AC DC/Shoot to Thrill.mp3" -> (Utf8 : Music/AC DC/Shoot to Thrill.mp3, Uint64 : 88, String : ) 2025-05-29T15:22:47.151641Z node 2 :TX_DATASHARD TRACE: datashard__object_storage_listing.cpp:240: 72075186224037888 S3 Listing: "Music/AC DC/Shoot to Thrill.mp3" -> (Utf8 : Music/AC DC/Shoot to Thrill.mp3, Uint64 : 666, String : ) 2025-05-29T15:22:47.151648Z node 2 :TX_DATASHARD TRACE: datashard__object_storage_listing.cpp:240: 72075186224037888 S3 Listing: "Music/AC DC/Thunderstruck.mp3" -> (Utf8 : Music/AC DC/Thunderstruck.mp3, Uint64 : 1, String : ) 2025-05-29T15:22:47.151657Z node 2 :TX_DATASHARD TRACE: datashard__object_storage_listing.cpp:240: 72075186224037888 S3 Listing: "Music/AC DC/Thunderstruck.mp3" -> (Utf8 : Music/AC DC/Thunderstruck.mp3, Uint64 : 66, String : ) 2025-05-29T15:22:47.151675Z node 2 :TX_DATASHARD DEBUG: datashard__object_storage_listing.cpp:374: 72075186224037888 S3 Listing: finished status: 0 description: "" contents: 5 common prefixes: 0 2025-05-29T15:22:47.154156Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3129: StateWork, received event# 269553163, Sender [2:7509888474977271609:2484], Recipient [2:7509888474977270952:2313]: NKikimrTxDataShard.TEvObjectStorageListingRequest TableId: 3 SerializedKeyPrefix: "\002\000\010\000\000\0002\000\000\000\000\000\000\000\010\000\000\000Bucket50" PathColumnPrefix: "Music/AC DC/" PathColumnDelimiter: "/" SerializedStartAfterKeySuffix: "\001\000\037\000\000\000Music/AC DC/Shoot to Thrill.mp3" ColumnsToReturn: 3 ColumnsToReturn: 4 ColumnsToReturn: 5 MaxKeys: 10 2025-05-29T15:22:47.154173Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3145: StateWork, processing event TEvDataShard::TEvObjectStorageListingRequest 2025-05-29T15:22:47.154214Z node 2 :TX_DATASHARD DEBUG: datashard__object_storage_listing.cpp:152: 72075186224037888 S3 Listing: start at key ((type:4, value:"2\0\0\0\0\0\0\0") (type:4608, value:"Bucket50") (type:4608, value:"Music/AC DC/Shoot to Thrill.mp3")), end at key ((type:4, value:"2\0\0\0\0\0\0\0") (type:4608, value:"Bucket50") (type:4608, value:"Music/AC DC0") (type:0)) restarted: 0 last path: "" contents: 0 common prefixes: 0 2025-05-29T15:22:47.154275Z node 2 :TX_DATASHARD TRACE: datashard__object_storage_listing.cpp:240: 72075186224037888 S3 Listing: "Music/AC DC/Thunderstruck.mp3" -> (Utf8 : Music/AC DC/Thunderstruck.mp3, Uint64 : 1, Uint64 : 10) 2025-05-29T15:22:47.154286Z node 2 :TX_DATASHARD TRACE: datashard__object_storage_listing.cpp:240: 72075186224037888 S3 Listing: "Music/AC DC/Thunderstruck.mp3" -> (Utf8 : Music/AC DC/Thunderstruck.mp3, Uint64 : 66, Uint64 : 10) 2025-05-29T15:22:47.154306Z node 2 :TX_DATASHARD DEBUG: datashard__object_storage_listing.cpp:374: 72075186224037888 S3 Listing: finished status: 0 description: "" contents: 2 common prefixes: 0 >> TLocksTest::Range_CorrectDot [GOOD] >> CommitOffset::Commit_Flat_WithWrongSession_ToPast [FAIL] >> CommitOffset::Commit_WithSession_ParentNotFinished_OtherSession >> CommitOffset::DistributedTxCommit_Flat_CheckOffsetCommitForDifferentCases [FAIL] >> TPersQueueMirrorer::TestBasicRemote >> KqpScan::ScanRetryRead ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_topic_splitmerge/unittest >> TSchemeShardTopicSplitMergeTest::MargePartitions2 [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:116:2058] recipient: [1:111:2141] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:116:2058] recipient: [1:111:2141] Leader for TabletID 72057594046678944 is [1:128:2152] sender: [1:132:2058] recipient: [1:111:2141] 2025-05-29T15:22:46.929156Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7488: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-05-29T15:22:46.929179Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7516: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:22:46.929183Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7402: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-05-29T15:22:46.929187Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7418: OperationsProcessing config: using default configuration 2025-05-29T15:22:46.929199Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-05-29T15:22:46.929202Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-05-29T15:22:46.929209Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7548: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:22:46.929219Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:39: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-05-29T15:22:46.929306Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7619: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-05-29T15:22:46.929371Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-05-29T15:22:46.942476Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7309: Cannot subscribe to console configs 2025-05-29T15:22:46.942497Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:22:46.945402Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-05-29T15:22:46.945553Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-05-29T15:22:46.945597Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-05-29T15:22:46.948702Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-05-29T15:22:46.948927Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:445: Clear TempDirsState with owners number: 0 2025-05-29T15:22:46.949056Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1348: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-05-29T15:22:46.949116Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:32: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-05-29T15:22:46.949813Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:157: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:22:46.949863Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:84: [RootDataErasureManager] Stop 2025-05-29T15:22:46.950174Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:22:46.950182Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:22:46.950201Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-05-29T15:22:46.950206Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-29T15:22:46.950211Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:202: TTxServerlessStorageBilling.Complete 2025-05-29T15:22:46.950240Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6671: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-05-29T15:22:46.951670Z node 1 :HIVE INFO: tablet_helpers.cpp:1130: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:128:2152] sender: [1:242:2058] recipient: [1:15:2062] 2025-05-29T15:22:46.974221Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:372: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-05-29T15:22:46.974305Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:22:46.974372Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-05-29T15:22:46.974431Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:130: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-05-29T15:22:46.974442Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:22:46.975144Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:451: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-05-29T15:22:46.975170Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-05-29T15:22:46.975223Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:22:46.975232Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:313: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-05-29T15:22:46.975238Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:367: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-05-29T15:22:46.975244Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 2 -> 3 2025-05-29T15:22:46.975696Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:22:46.975708Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-05-29T15:22:46.975713Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 3 -> 128 2025-05-29T15:22:46.976130Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:22:46.976143Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:22:46.976150Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:22:46.976158Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1650: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-05-29T15:22:46.976920Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1719: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-05-29T15:22:46.977314Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:652: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-05-29T15:22:46.977347Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1751: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-05-29T15:22:46.977531Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:676: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-05-29T15:22:46.977570Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:680: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 124 RawX2: 4294969445 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-05-29T15:22:46.977578Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:22:46.977655Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 128 -> 240 2025-05-29T15:22:46.977663Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:22:46.977700Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-05-29T15:22:46.977713Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:402: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-05-29T15:22:46.978203Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:22:46.978212Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-29T15:22:46.978258Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29 ... :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1764: TOperation FindRelatedPartByTabletId, TxId: 105, tablet: 72075186233409548, partId: 0 2025-05-29T15:22:47.635182Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:619: TTxOperationReply execute, operationId: 105:0, at schemeshard: 72057594046678944, message: Origin: 72075186233409548 Status: COMPLETE TxId: 105 Step: 200 2025-05-29T15:22:47.635197Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_pq.cpp:624: NPQState::TPropose operationId# 105:0 HandleReply TEvProposeTransactionResult triggers early, at schemeshard: 72057594046678944 message# Origin: 72075186233409548 Status: COMPLETE TxId: 105 Step: 200 2025-05-29T15:22:47.635210Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_pq.cpp:265: CollectPQConfigChanged accept TEvPersQueue::TEvProposeTransactionResult, operationId: 105:0, shardIdx: 72057594046678944:3, shard: 72075186233409548, left await: 0, txState.State: Propose, txState.ReadyForNotifications: 0, at schemeshard: 72057594046678944 2025-05-29T15:22:47.635215Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_pq.cpp:629: NPQState::TPropose operationId# 105:0 HandleReply TEvProposeTransactionResult CollectPQConfigChanged: true 2025-05-29T15:22:47.635265Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 105:0 128 -> 240 2025-05-29T15:22:47.635307Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 4 2025-05-29T15:22:47.635863Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:647: TTxOperationReply complete, operationId: 105:0, at schemeshard: 72057594046678944 2025-05-29T15:22:47.635957Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:22:47.635965Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 105, path id: [OwnerId: 72057594046678944, LocalPathId: 3] 2025-05-29T15:22:47.636026Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:22:47.636032Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [2:207:2208], at schemeshard: 72057594046678944, txId: 105, path id: 3 2025-05-29T15:22:47.636117Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 105:0, at schemeshard: 72057594046678944 2025-05-29T15:22:47.636125Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:482: [72057594046678944] TDone opId# 105:0 ProgressState 2025-05-29T15:22:47.636139Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:906: Part operation is done id#105:0 progress is 1/1 2025-05-29T15:22:47.636144Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 105 ready parts: 1/1 2025-05-29T15:22:47.636148Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:906: Part operation is done id#105:0 progress is 1/1 2025-05-29T15:22:47.636152Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 105 ready parts: 1/1 2025-05-29T15:22:47.636157Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1606: TOperation IsReadyToNotify, TxId: 105, ready parts: 1/1, is published: false 2025-05-29T15:22:47.636162Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 105 ready parts: 1/1 2025-05-29T15:22:47.636168Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:973: Operation and all the parts is done, operation id: 105:0 2025-05-29T15:22:47.636173Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5174: RemoveTx for txid 105:0 2025-05-29T15:22:47.636206Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 5 2025-05-29T15:22:47.636215Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:982: Publication still in progress, tx: 105, publications: 1, subscribers: 1 2025-05-29T15:22:47.636219Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:989: Publication details: tx: 105, [OwnerId: 72057594046678944, LocalPathId: 3], 3 2025-05-29T15:22:47.636324Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:5834: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 3 PathOwnerId: 72057594046678944, cookie: 105 2025-05-29T15:22:47.636337Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 3 PathOwnerId: 72057594046678944, cookie: 105 2025-05-29T15:22:47.636342Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 105 2025-05-29T15:22:47.636347Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 105, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 3 2025-05-29T15:22:47.636352Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 4 2025-05-29T15:22:47.636367Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 105, subscribers: 1 2025-05-29T15:22:47.636374Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:212: TTxAckPublishToSchemeBoard Notify send TEvNotifyTxCompletionResult, at schemeshard: 72057594046678944, to actorId: [2:410:2375] 2025-05-29T15:22:47.637364Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 105 2025-05-29T15:22:47.637389Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:227: tests -- TTxNotificationSubscriber for txId 105: got EvNotifyTxCompletionResult 2025-05-29T15:22:47.637396Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:236: tests -- TTxNotificationSubscriber for txId 105: satisfy waiter [2:678:2598] TestWaitNotification: OK eventTxId 105 2025-05-29T15:22:47.641477Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_1/Topic1" Options { ReturnPartitioningInfo: true ReturnPartitionConfig: true BackupInfo: false ReturnBoundaries: true ShowPrivateTable: true }, at schemeshard: 72057594046678944 2025-05-29T15:22:47.641578Z node 2 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/USER_1/Topic1" took 121us result status StatusSuccess 2025-05-29T15:22:47.641791Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/USER_1/Topic1" PathDescription { Self { Name: "Topic1" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypePersQueueGroup CreateFinished: true CreateTxId: 104 CreateStep: 150 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 PQVersion: 2 } ChildrenExist: false BalancerTabletID: 72075186233409549 } PersQueueGroup { Name: "Topic1" PathId: 3 TotalGroupCount: 5 PartitionPerTablet: 7 PQTabletConfig { PartitionConfig { LifetimeSeconds: 3600 } YdbDatabasePath: "/MyRoot" PartitionStrategy { PartitionStrategyType: CAN_SPLIT_AND_MERGE } } Partitions { PartitionId: 0 TabletId: 72075186233409548 KeyRange { ToBound: "?\377\377\377\377\377\377\377\377\377\377\377\377\377\377\376" } Status: Active } Partitions { PartitionId: 1 TabletId: 72075186233409548 KeyRange { FromBound: "?\377\377\377\377\377\377\377\377\377\377\377\377\377\377\376" ToBound: "\177\377\377\377\377\377\377\377\377\377\377\377\377\377\377\375" } Status: Inactive ChildPartitionIds: 4 } Partitions { PartitionId: 2 TabletId: 72075186233409548 KeyRange { FromBound: "\177\377\377\377\377\377\377\377\377\377\377\377\377\377\377\375" ToBound: "\277\377\377\377\377\377\377\377\377\377\377\377\377\377\377\374" } Status: Inactive ChildPartitionIds: 4 } Partitions { PartitionId: 3 TabletId: 72075186233409548 KeyRange { FromBound: "\277\377\377\377\377\377\377\377\377\377\377\377\377\377\377\374" } Status: Active } Partitions { PartitionId: 4 TabletId: 72075186233409548 KeyRange { FromBound: "?\377\377\377\377\377\377\377\377\377\377\377\377\377\377\376" ToBound: "\277\377\377\377\377\377\377\377\377\377\377\377\377\377\377\374" } Status: Active ParentPartitionIds: 1 ParentPartitionIds: 2 } AlterVersion: 2 BalancerTabletID: 72075186233409549 NextPartitionId: 5 Allocate { Name: "Topic1" AlterVersion: 2 TotalGroupCount: 5 NextPartitionId: 5 PartitionPerTablet: 7 PQTabletConfig { PartitionConfig { LifetimeSeconds: 3600 } YdbDatabasePath: "/MyRoot" PartitionStrategy { PartitionStrategyType: CAN_SPLIT_AND_MERGE } } Partitions { PartitionId: 1 GroupId: 2 TabletId: 72075186233409548 OwnerId: 72057594046678944 ShardId: 3 Status: Inactive KeyRange { FromBound: "?\377\377\377\377\377\377\377\377\377\377\377\377\377\377\376" ToBound: "\177\377\377\377\377\377\377\377\377\377\377\377\377\377\377\375" } } Partitions { PartitionId: 2 GroupId: 3 TabletId: 72075186233409548 OwnerId: 72057594046678944 ShardId: 3 Status: Inactive KeyRange { FromBound: "\177\377\377\377\377\377\377\377\377\377\377\377\377\377\377\375" ToBound: "\277\377\377\377\377\377\377\377\377\377\377\377\377\377\377\374" } } Partitions { PartitionId: 3 GroupId: 4 TabletId: 72075186233409548 OwnerId: 72057594046678944 ShardId: 3 Status: Active KeyRange { FromBound: "\277\377\377\377\377\377\377\377\377\377\377\377\377\377\377\374" } } Partitions { PartitionId: 0 GroupId: 1 TabletId: 72075186233409548 OwnerId: 72057594046678944 ShardId: 3 Status: Active KeyRange { ToBound: "?\377\377\377\377\377\377\377\377\377\377\377\377\377\377\376" } } Partitions { PartitionId: 4 GroupId: 5 TabletId: 72075186233409548 OwnerId: 72057594046678944 ShardId: 3 Status: Active ParentPartitionIds: 1 ParentPartitionIds: 2 KeyRange { FromBound: "?\377\377\377\377\377\377\377\377\377\377\377\377\377\377\376" ToBound: "\277\377\377\377\377\377\377\377\377\377\377\377\377\377\377\374" } } BalancerTabletID: 72075186233409549 BalancerOwnerId: 72057594046678944 BalancerShardId: 4 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72075186233409546 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409547 } DomainKey { SchemeShard: 72057594046678944 PathId: 2 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 4 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 2 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 5 PQPartitionsLimit: 1000000 } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> TLocksFatTest::RangeSetBreak [GOOD] >> TLocksFatTest::RangeSetNotBreak >> TPQTest::TestPartitionPerConsumerQuota [GOOD] >> TPQTest::TestPartitionWriteQuota >> KqpScan::RemoteShardScan ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/persqueue/ut/unittest >> TPQTest::TestReadSubscription [GOOD] Test command err: Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:104:2057] recipient: [1:102:2135] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:104:2057] recipient: [1:102:2135] Leader for TabletID 72057594037927937 is [1:108:2139] sender: [1:109:2057] recipient: [1:102:2135] 2025-05-29T15:21:56.048387Z node 1 :PERSQUEUE NOTICE: pq_impl.cpp:1099: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-05-29T15:21:56.048417Z node 1 :PERSQUEUE INFO: pq_impl.cpp:800: [PQ: 72057594037927937] doesn't have tx writes info Leader for TabletID 72057594037927938 is [0:0:0] sender: [1:150:2057] recipient: [1:148:2170] IGNORE Leader for TabletID 72057594037927938 is [0:0:0] sender: [1:150:2057] recipient: [1:148:2170] Leader for TabletID 72057594037927938 is [1:154:2174] sender: [1:155:2057] recipient: [1:148:2170] Leader for TabletID 72057594037927937 is [1:108:2139] sender: [1:180:2057] recipient: [1:14:2061] 2025-05-29T15:21:56.060900Z node 1 :PERSQUEUE NOTICE: pq_impl.cpp:1099: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-05-29T15:21:56.064092Z node 1 :PERSQUEUE INFO: pq_impl.cpp:1487: [PQ: 72057594037927937] Config applied version 1 actor [1:178:2192] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 PartitionIds: 1 PartitionIds: 2 TopicName: "rt3.dc1--asdfgs--topic" Version: 1 LocalDC: true Topic: "topic" TopicPath: "/Root/PQ/rt3.dc1--asdfgs--topic" Partitions { PartitionId: 0 } Partitions { PartitionId: 1 } Partitions { PartitionId: 2 } ReadRuleGenerations: 1 ReadRuleGenerations: 1 ReadRuleGenerations: 1 MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } AllPartitions { PartitionId: 1 } AllPartitions { PartitionId: 2 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 1 Important: false } Consumers { Name: "test" Generation: 1 Important: false } Consumers { Name: "another-user" Generation: 1 Important: false } 2025-05-29T15:21:56.064403Z node 1 :PERSQUEUE INFO: partition_init.cpp:880: [PQ: 72057594037927937, Partition: 0, State: StateInit] bootstrapping 0 [1:186:2198] 2025-05-29T15:21:56.065098Z node 1 :PERSQUEUE INFO: partition.cpp:560: [PQ: 72057594037927937, Partition: 0, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 0 generation 2 [1:186:2198] 2025-05-29T15:21:56.066092Z node 1 :PERSQUEUE INFO: partition_init.cpp:880: [PQ: 72057594037927937, Partition: 1, State: StateInit] bootstrapping 1 [1:187:2199] 2025-05-29T15:21:56.066600Z node 1 :PERSQUEUE INFO: partition.cpp:560: [PQ: 72057594037927937, Partition: 1, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 1 generation 2 [1:187:2199] 2025-05-29T15:21:56.067413Z node 1 :PERSQUEUE INFO: partition_init.cpp:880: [PQ: 72057594037927937, Partition: 2, State: StateInit] bootstrapping 2 [1:188:2200] 2025-05-29T15:21:56.067911Z node 1 :PERSQUEUE INFO: partition.cpp:560: [PQ: 72057594037927937, Partition: 2, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 2 generation 2 [1:188:2200] 2025-05-29T15:21:56.070299Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:2882: [PQ: 72057594037927937] server connected, pipe [1:208:2213], now have 1 active actors on pipe 2025-05-29T15:21:56.070321Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:347: Handle TEvRequest topic: 'rt3.dc1--asdfgs--topic' requestId: 2025-05-29T15:21:56.070330Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:2796: [PQ: 72057594037927937] got client message batch for topic 'rt3.dc1--asdfgs--topic' partition 0 2025-05-29T15:21:56.070396Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:2198: [PQ: 72057594037927937] got client message topic: rt3.dc1--asdfgs--topic partition: 0 SourceId: 'sourceid' SeqNo: 1 partNo : 0 messageNo: 0 size 1 offset: -1 2025-05-29T15:21:56.070403Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:2198: [PQ: 72057594037927937] got client message topic: rt3.dc1--asdfgs--topic partition: 0 SourceId: 'sourceid' SeqNo: 2 partNo : 0 messageNo: 0 size 1 offset: -1 2025-05-29T15:21:56.070420Z node 1 :PERSQUEUE DEBUG: event_helpers.cpp:40: tablet 72057594037927937 topic 'rt3.dc1--asdfgs--topic' partition 0 error: new GetOwnership request needed for owner 2025-05-29T15:21:56.070437Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:1424: [PQ: 72057594037927937] Handle TEvPQ::TEvError Cookie 1, Error new GetOwnership request needed for owner 2025-05-29T15:21:56.070442Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:401: Answer error topic: 'rt3.dc1--asdfgs--topic' partition: 0 messageNo: 0 requestId: error: new GetOwnership request needed for owner 2025-05-29T15:21:56.070480Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:2882: [PQ: 72057594037927937] server connected, pipe [1:210:2215], now have 1 active actors on pipe 2025-05-29T15:21:56.070492Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:347: Handle TEvRequest topic: 'rt3.dc1--asdfgs--topic' requestId: 2025-05-29T15:21:56.070497Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:2796: [PQ: 72057594037927937] got client message batch for topic 'rt3.dc1--asdfgs--topic' partition 0 2025-05-29T15:21:56.070518Z node 1 :PERSQUEUE INFO: ownerinfo.cpp:30: new Cookie default|7efb17bb-8cff78c3-d9ac17b-f45ce1e0_0 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default 2025-05-29T15:21:56.070539Z node 1 :PERSQUEUE DEBUG: partition_write.cpp:35: [PQ: 72057594037927937, Partition: 0, State: StateIdle] TPartition::ReplyOwnerOk. Partition: 0 2025-05-29T15:21:56.070553Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:382: Answer ok topic: 'rt3.dc1--asdfgs--topic' partition: 0 messageNo: 0 requestId: cookie: 0 2025-05-29T15:21:56.070596Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:2882: [PQ: 72057594037927937] server connected, pipe [1:212:2217], now have 1 active actors on pipe 2025-05-29T15:21:56.070608Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:347: Handle TEvRequest topic: 'rt3.dc1--asdfgs--topic' requestId: 2025-05-29T15:21:56.070613Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:2796: [PQ: 72057594037927937] got client message batch for topic 'rt3.dc1--asdfgs--topic' partition 0 2025-05-29T15:21:56.070619Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:2198: [PQ: 72057594037927937] got client message topic: rt3.dc1--asdfgs--topic partition: 0 SourceId: 'sourceid' SeqNo: 1 partNo : 0 messageNo: 0 size 1 offset: -1 2025-05-29T15:21:56.070625Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:2198: [PQ: 72057594037927937] got client message topic: rt3.dc1--asdfgs--topic partition: 0 SourceId: 'sourceid' SeqNo: 2 partNo : 0 messageNo: 0 size 1 offset: -1 2025-05-29T15:21:56.070653Z node 1 :PERSQUEUE DEBUG: partition_write.cpp:1233: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'rt3.dc1--asdfgs--topic' partition 0 part blob processing sourceId 'sourceid' seqNo 1 partNo 0 2025-05-29T15:21:56.070702Z node 1 :PERSQUEUE DEBUG: partition_write.cpp:1333: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'rt3.dc1--asdfgs--topic' partition 0 part blob complete sourceId 'sourceid' seqNo 1 partNo 0 FormedBlobsCount 0 NewHead: Offset 0 PartNo 0 PackedSize 72 count 1 nextOffset 1 batches 1 2025-05-29T15:21:56.070712Z node 1 :PERSQUEUE DEBUG: partition_write.cpp:1233: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'rt3.dc1--asdfgs--topic' partition 0 part blob processing sourceId 'sourceid' seqNo 2 partNo 0 2025-05-29T15:21:56.070719Z node 1 :PERSQUEUE DEBUG: partition_write.cpp:1333: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'rt3.dc1--asdfgs--topic' partition 0 part blob complete sourceId 'sourceid' seqNo 2 partNo 0 FormedBlobsCount 0 NewHead: Offset 0 PartNo 0 PackedSize 112 count 2 nextOffset 2 batches 1 2025-05-29T15:21:56.070785Z node 1 :PERSQUEUE DEBUG: partition_write.cpp:1623: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Add new write blob: topic 'rt3.dc1--asdfgs--topic' partition 0 compactOffset 0,2 HeadOffset 0 endOffset 0 curOffset 2 d0000000000_00000000000000000000_00000_0000000002_00000| size 94 WTime 331 2025-05-29T15:21:56.070816Z node 1 :PERSQUEUE DEBUG: partition.cpp:2185: [PQ: 72057594037927937, Partition: 0, State: StateIdle] === DumpKeyValueRequest === 2025-05-29T15:21:56.070823Z node 1 :PERSQUEUE DEBUG: partition.cpp:2186: [PQ: 72057594037927937, Partition: 0, State: StateIdle] --- delete ---------------- 2025-05-29T15:21:56.070829Z node 1 :PERSQUEUE DEBUG: partition.cpp:2192: [PQ: 72057594037927937, Partition: 0, State: StateIdle] [x0000000000, x0000000001) 2025-05-29T15:21:56.070834Z node 1 :PERSQUEUE DEBUG: partition.cpp:2194: [PQ: 72057594037927937, Partition: 0, State: StateIdle] --- write ----------------- 2025-05-29T15:21:56.070839Z node 1 :PERSQUEUE DEBUG: partition.cpp:2197: [PQ: 72057594037927937, Partition: 0, State: StateIdle] m0000000000psourceid 2025-05-29T15:21:56.070844Z node 1 :PERSQUEUE DEBUG: partition.cpp:2197: [PQ: 72057594037927937, Partition: 0, State: StateIdle] d0000000000_00000000000000000000_00000_0000000002_00000| 2025-05-29T15:21:56.070848Z node 1 :PERSQUEUE DEBUG: partition.cpp:2197: [PQ: 72057594037927937, Partition: 0, State: StateIdle] i0000000000 2025-05-29T15:21:56.070853Z node 1 :PERSQUEUE DEBUG: partition.cpp:2199: [PQ: 72057594037927937, Partition: 0, State: StateIdle] --- rename ---------------- 2025-05-29T15:21:56.070857Z node 1 :PERSQUEUE DEBUG: partition.cpp:2204: [PQ: 72057594037927937, Partition: 0, State: StateIdle] =========================== 2025-05-29T15:21:56.070870Z node 1 :PERSQUEUE DEBUG: read.h:262: CacheProxy. Passthrough write request to KV 2025-05-29T15:21:56.070894Z node 1 :PERSQUEUE DEBUG: read.h:300: CacheProxy. Passthrough blob. Partition 0 offset 0 partNo 0 count 2 size 94 2025-05-29T15:21:56.071447Z node 1 :PERSQUEUE DEBUG: cache_eviction.h:315: Caching head blob in L1. Partition 0 offset 0 count 2 size 94 actorID [1:135:2160] 2025-05-29T15:21:56.071471Z node 1 :PERSQUEUE DEBUG: pq_l2_cache.cpp:120: PQ Cache (L2). Adding blob. Tablet '72057594037927937' partition 0 offset 0 partno 0 count 2 parts 0 size 94 2025-05-29T15:21:56.071489Z node 1 :PERSQUEUE DEBUG: partition_write.cpp:524: [PQ: 72057594037927937, Partition: 0, State: StateIdle] TPartition::HandleWriteResponse writeNewSize# 18 WriteNewSizeFromSupportivePartitions# 0 2025-05-29T15:21:56.071499Z node 1 :PERSQUEUE DEBUG: partition_write.cpp:58: [PQ: 72057594037927937, Partition: 0, State: StateIdle] TPartition::ReplyWrite. Partition: 0 2025-05-29T15:21:56.071510Z node 1 :PERSQUEUE DEBUG: partition_write.cpp:324: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Answering for message sourceid: 'sourceid', Topic: 'rt3.dc1--asdfgs--topic', Partition: 0, SeqNo: 1, partNo: 0, Offset: 0 is stored on disk 2025-05-29T15:21:56.071518Z node 1 :PERSQUEUE DEBUG: partition_write.cpp:58: [PQ: 72057594037927937, Partition: 0, State: StateIdle] TPartition::ReplyWrite. Partition: 0 2025-05-29T15:21:56.071524Z node 1 :PERSQUEUE DEBUG: partition_write.cpp:324: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Answering for message sourceid: 'sourceid', Topic: 'rt3.dc1--asdfgs--topic', Partition: 0, SeqNo: 2, partNo: 0, Offset: 1 is stored on disk 2025-05-29T15:21:56.071545Z node 1 :PERSQUEUE DEBUG: partition_read.cpp:779: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'rt3.dc1--asdfgs--topic' partition 0 user user readTimeStamp for offset 0 initiated queuesize 0 startOffset 0 ReadingTimestamp 0 rrg 1 2025-05-29T15:21:56.071552Z node 1 :PERSQUEUE DEBUG: partition_read.cpp:821: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'rt3.dc1--asdfgs--topic' partition 0 user user send read request for offset 0 initiated queuesize 0 startOffset 0 ReadingTimestamp 1 rrg 1 2025-05-29T15:21:56.071559Z node 1 :PERSQUEUE DEBUG: partition_read.cpp:779: [PQ ... Z node 86 :PERSQUEUE INFO: partition.cpp:560: [PQ: 72057594037927937, Partition: 0, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 0 generation 2 [86:186:2198] 2025-05-29T15:22:44.343313Z node 86 :PERSQUEUE INFO: partition_init.cpp:880: [PQ: 72057594037927937, Partition: 1, State: StateInit] bootstrapping 1 [86:187:2199] 2025-05-29T15:22:44.343948Z node 86 :PERSQUEUE INFO: partition.cpp:560: [PQ: 72057594037927937, Partition: 1, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 1 generation 2 [86:187:2199] 2025-05-29T15:22:44.344555Z node 86 :PERSQUEUE INFO: partition_init.cpp:880: [PQ: 72057594037927937, Partition: 2, State: StateInit] bootstrapping 2 [86:188:2200] 2025-05-29T15:22:44.344985Z node 86 :PERSQUEUE INFO: partition.cpp:560: [PQ: 72057594037927937, Partition: 2, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 2 generation 2 [86:188:2200] 2025-05-29T15:22:44.345520Z node 86 :PERSQUEUE INFO: partition_init.cpp:880: [PQ: 72057594037927937, Partition: 3, State: StateInit] bootstrapping 3 [86:189:2201] 2025-05-29T15:22:44.345962Z node 86 :PERSQUEUE INFO: partition.cpp:560: [PQ: 72057594037927937, Partition: 3, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 3 generation 2 [86:189:2201] 2025-05-29T15:22:44.346454Z node 86 :PERSQUEUE INFO: partition_init.cpp:880: [PQ: 72057594037927937, Partition: 4, State: StateInit] bootstrapping 4 [86:190:2202] 2025-05-29T15:22:44.346916Z node 86 :PERSQUEUE INFO: partition.cpp:560: [PQ: 72057594037927937, Partition: 4, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 4 generation 2 [86:190:2202] 2025-05-29T15:22:44.349141Z node 86 :PERSQUEUE INFO: ownerinfo.cpp:30: new Cookie default|ce2dea27-ba9f2180-17192962-8bae5ab5_0 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default 2025-05-29T15:22:45.363453Z node 86 :PERSQUEUE INFO: ownerinfo.cpp:30: new Cookie default|a03d8173-62dcd464-4368ac48-9a94c339_1 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default 2025-05-29T15:22:45.365165Z node 86 :PERSQUEUE INFO: ownerinfo.cpp:30: new Cookie default|8611d187-15d8081f-5c635250-6fa2ded4_2 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default Leader for TabletID 72057594037927937 is [0:0:0] sender: [87:104:2057] recipient: [87:102:2135] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [87:104:2057] recipient: [87:102:2135] Leader for TabletID 72057594037927937 is [87:108:2139] sender: [87:109:2057] recipient: [87:102:2135] 2025-05-29T15:22:45.575858Z node 87 :PERSQUEUE NOTICE: pq_impl.cpp:1099: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-05-29T15:22:45.575889Z node 87 :PERSQUEUE INFO: pq_impl.cpp:800: [PQ: 72057594037927937] doesn't have tx writes info Leader for TabletID 72057594037927938 is [0:0:0] sender: [87:150:2057] recipient: [87:148:2170] IGNORE Leader for TabletID 72057594037927938 is [0:0:0] sender: [87:150:2057] recipient: [87:148:2170] Leader for TabletID 72057594037927938 is [87:154:2174] sender: [87:155:2057] recipient: [87:148:2170] Leader for TabletID 72057594037927937 is [87:108:2139] sender: [87:178:2057] recipient: [87:14:2061] 2025-05-29T15:22:45.579925Z node 87 :PERSQUEUE NOTICE: pq_impl.cpp:1099: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-05-29T15:22:45.580307Z node 87 :PERSQUEUE INFO: pq_impl.cpp:1487: [PQ: 72057594037927937] Config applied version 90 actor [87:176:2190] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 100 MaxSizeInPartition: 104857600 LifetimeSeconds: 172800 ImportantClientId: "user1" LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 PartitionIds: 1 PartitionIds: 2 PartitionIds: 3 PartitionIds: 4 TopicName: "rt3.dc1--asdfgs--topic" Version: 90 LocalDC: true Topic: "topic" TopicPath: "/Root/PQ/rt3.dc1--asdfgs--topic" Partitions { PartitionId: 0 } Partitions { PartitionId: 1 } Partitions { PartitionId: 2 } Partitions { PartitionId: 3 } Partitions { PartitionId: 4 } ReadRuleGenerations: 90 ReadRuleGenerations: 90 MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } AllPartitions { PartitionId: 1 } AllPartitions { PartitionId: 2 } AllPartitions { PartitionId: 3 } AllPartitions { PartitionId: 4 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 90 Important: false } Consumers { Name: "user1" Generation: 90 Important: true } 2025-05-29T15:22:45.580523Z node 87 :PERSQUEUE INFO: partition_init.cpp:880: [PQ: 72057594037927937, Partition: 0, State: StateInit] bootstrapping 0 [87:184:2196] 2025-05-29T15:22:45.581324Z node 87 :PERSQUEUE INFO: partition.cpp:560: [PQ: 72057594037927937, Partition: 0, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 0 generation 2 [87:184:2196] 2025-05-29T15:22:45.582011Z node 87 :PERSQUEUE INFO: partition_init.cpp:880: [PQ: 72057594037927937, Partition: 1, State: StateInit] bootstrapping 1 [87:185:2197] 2025-05-29T15:22:45.582474Z node 87 :PERSQUEUE INFO: partition.cpp:560: [PQ: 72057594037927937, Partition: 1, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 1 generation 2 [87:185:2197] 2025-05-29T15:22:45.583054Z node 87 :PERSQUEUE INFO: partition_init.cpp:880: [PQ: 72057594037927937, Partition: 2, State: StateInit] bootstrapping 2 [87:186:2198] 2025-05-29T15:22:45.583492Z node 87 :PERSQUEUE INFO: partition.cpp:560: [PQ: 72057594037927937, Partition: 2, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 2 generation 2 [87:186:2198] 2025-05-29T15:22:45.583934Z node 87 :PERSQUEUE INFO: partition_init.cpp:880: [PQ: 72057594037927937, Partition: 3, State: StateInit] bootstrapping 3 [87:187:2199] 2025-05-29T15:22:45.584366Z node 87 :PERSQUEUE INFO: partition.cpp:560: [PQ: 72057594037927937, Partition: 3, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 3 generation 2 [87:187:2199] 2025-05-29T15:22:45.584841Z node 87 :PERSQUEUE INFO: partition_init.cpp:880: [PQ: 72057594037927937, Partition: 4, State: StateInit] bootstrapping 4 [87:188:2200] 2025-05-29T15:22:45.585257Z node 87 :PERSQUEUE INFO: partition.cpp:560: [PQ: 72057594037927937, Partition: 4, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 4 generation 2 [87:188:2200] 2025-05-29T15:22:45.588073Z node 87 :PERSQUEUE INFO: ownerinfo.cpp:30: new Cookie default|dd768c7b-e2798ade-e0b3b31b-f0bf9318_0 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default 2025-05-29T15:22:46.599219Z node 87 :PERSQUEUE INFO: ownerinfo.cpp:30: new Cookie default|f98f895b-ef558614-b18d58e8-68c03e01_1 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default 2025-05-29T15:22:46.607794Z node 87 :PERSQUEUE INFO: ownerinfo.cpp:30: new Cookie default|36ff916e-f435dc93-2cc7a2c0-1bcc1428_2 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default Leader for TabletID 72057594037927937 is [0:0:0] sender: [88:104:2057] recipient: [88:102:2135] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [88:104:2057] recipient: [88:102:2135] Leader for TabletID 72057594037927937 is [88:108:2139] sender: [88:109:2057] recipient: [88:102:2135] 2025-05-29T15:22:46.889123Z node 88 :PERSQUEUE NOTICE: pq_impl.cpp:1099: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-05-29T15:22:46.889147Z node 88 :PERSQUEUE INFO: pq_impl.cpp:800: [PQ: 72057594037927937] doesn't have tx writes info Leader for TabletID 72057594037927938 is [0:0:0] sender: [88:150:2057] recipient: [88:148:2170] IGNORE Leader for TabletID 72057594037927938 is [0:0:0] sender: [88:150:2057] recipient: [88:148:2170] Leader for TabletID 72057594037927938 is [88:154:2174] sender: [88:155:2057] recipient: [88:148:2170] Leader for TabletID 72057594037927937 is [88:108:2139] sender: [88:180:2057] recipient: [88:14:2061] 2025-05-29T15:22:46.893961Z node 88 :PERSQUEUE NOTICE: pq_impl.cpp:1099: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-05-29T15:22:46.894332Z node 88 :PERSQUEUE INFO: pq_impl.cpp:1487: [PQ: 72057594037927937] Config applied version 91 actor [88:178:2192] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 100 MaxSizeInPartition: 104857600 LifetimeSeconds: 172800 ImportantClientId: "user1" LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 PartitionIds: 1 PartitionIds: 2 PartitionIds: 3 PartitionIds: 4 TopicName: "rt3.dc1--asdfgs--topic" Version: 91 LocalDC: true Topic: "topic" TopicPath: "/Root/PQ/rt3.dc1--asdfgs--topic" Partitions { PartitionId: 0 } Partitions { PartitionId: 1 } Partitions { PartitionId: 2 } Partitions { PartitionId: 3 } Partitions { PartitionId: 4 } ReadRuleGenerations: 91 ReadRuleGenerations: 91 MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } AllPartitions { PartitionId: 1 } AllPartitions { PartitionId: 2 } AllPartitions { PartitionId: 3 } AllPartitions { PartitionId: 4 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 91 Important: false } Consumers { Name: "user1" Generation: 91 Important: true } 2025-05-29T15:22:46.894534Z node 88 :PERSQUEUE INFO: partition_init.cpp:880: [PQ: 72057594037927937, Partition: 0, State: StateInit] bootstrapping 0 [88:186:2198] 2025-05-29T15:22:46.895290Z node 88 :PERSQUEUE INFO: partition.cpp:560: [PQ: 72057594037927937, Partition: 0, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 0 generation 2 [88:186:2198] 2025-05-29T15:22:46.895960Z node 88 :PERSQUEUE INFO: partition_init.cpp:880: [PQ: 72057594037927937, Partition: 1, State: StateInit] bootstrapping 1 [88:187:2199] 2025-05-29T15:22:46.896473Z node 88 :PERSQUEUE INFO: partition.cpp:560: [PQ: 72057594037927937, Partition: 1, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 1 generation 2 [88:187:2199] 2025-05-29T15:22:46.897005Z node 88 :PERSQUEUE INFO: partition_init.cpp:880: [PQ: 72057594037927937, Partition: 2, State: StateInit] bootstrapping 2 [88:188:2200] 2025-05-29T15:22:46.897506Z node 88 :PERSQUEUE INFO: partition.cpp:560: [PQ: 72057594037927937, Partition: 2, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 2 generation 2 [88:188:2200] 2025-05-29T15:22:46.898081Z node 88 :PERSQUEUE INFO: partition_init.cpp:880: [PQ: 72057594037927937, Partition: 3, State: StateInit] bootstrapping 3 [88:189:2201] 2025-05-29T15:22:46.898582Z node 88 :PERSQUEUE INFO: partition.cpp:560: [PQ: 72057594037927937, Partition: 3, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 3 generation 2 [88:189:2201] 2025-05-29T15:22:46.899092Z node 88 :PERSQUEUE INFO: partition_init.cpp:880: [PQ: 72057594037927937, Partition: 4, State: StateInit] bootstrapping 4 [88:190:2202] 2025-05-29T15:22:46.899594Z node 88 :PERSQUEUE INFO: partition.cpp:560: [PQ: 72057594037927937, Partition: 4, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 4 generation 2 [88:190:2202] 2025-05-29T15:22:46.902174Z node 88 :PERSQUEUE INFO: ownerinfo.cpp:30: new Cookie default|2e998f5d-bff09170-ee7625e4-e01d565e_0 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default 2025-05-29T15:22:47.893723Z node 88 :PERSQUEUE INFO: ownerinfo.cpp:30: new Cookie default|784ee806-63165b03-6e577a4-2f11a04d_1 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default 2025-05-29T15:22:47.898540Z node 88 :PERSQUEUE INFO: ownerinfo.cpp:30: new Cookie default|47e818b5-e852b982-3facdac4-43b9bb25_2 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default >> TTxAllocatorClientTest::ZeroRange [GOOD] >> CommitOffset::Commit_WithSession_ToPastParentPartition [FAIL] >> TopicAutoscaling::PartitionSplit_ManySession_existed_AutoscaleAwareSDK [FAIL] >> TGroupMapperTest::MakeDisksUnusable [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/client/ut/unittest >> TFlatTest::PartBloomFilter [GOOD] Test command err: 2025-05-29T15:22:46.803812Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509888470106570585:2058];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:22:46.803849Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/002854/r3tmp/tmpXJLubt/pdisk_1.dat 2025-05-29T15:22:46.880916Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [1:7509888470106570568:2079] 1748532166803546 != 1748532166803549 2025-05-29T15:22:46.882495Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded TClient is connected to server localhost:12665 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 0 Pa... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-05-29T15:22:46.956871Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:22:46.956901Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting waiting... 2025-05-29T15:22:46.957630Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-05-29T15:22:46.957870Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected waiting... waiting... waiting... waiting... waiting... waiting... TClient::Ls request: /dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 1748532167011 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 15 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 15 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 13 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: "A" PathId: 7 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715663 CreateStep: 1748532167060 ParentPathId: 1 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" ChildrenExist: false } Children { Name: "B" PathId: 4 Sche... (TRUNCATED) 2025-05-29T15:22:47.011361Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976715663, at schemeshard: 72057594046644480 2025-05-29T15:22:47.231138Z node 2 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7509888476842353493:2063];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:22:47.231176Z node 2 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/002854/r3tmp/tmpO40ShA/pdisk_1.dat 2025-05-29T15:22:47.242309Z node 2 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:22:47.242533Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [2:7509888476842353471:2079] 1748532167231037 != 1748532167231040 TClient is connected to server localhost:13732 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-05-29T15:22:47.335254Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:22:47.335300Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:22:47.335653Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:22:47.336226Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-29T15:22:47.346786Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:22:47.515191Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976715719:0, at schemeshard: 72057594046644480 waiting... ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/client/ut/unittest >> TLocksTest::BrokenSameShardLock [GOOD] Test command err: 2025-05-29T15:22:42.537474Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509888454406998632:2262];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:22:42.537527Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/002886/r3tmp/tmpULkEJV/pdisk_1.dat 2025-05-29T15:22:42.613149Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:22:42.613659Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [1:7509888454406998398:2079] 1748532162535475 != 1748532162535478 TClient is connected to server localhost:29092 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: 2025-05-29T15:22:42.640456Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:22:42.640491Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:22:42.641665Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-05-29T15:22:42.688565Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:22:42.695354Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:22:42.737237Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976715658, at schemeshard: 72057594046644480 2025-05-29T15:22:42.744044Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:22:42.773806Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:22:42.791777Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:22:43.089708Z node 2 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7509888456137715774:2065];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:22:43.089761Z node 2 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/002886/r3tmp/tmp2IZB7m/pdisk_1.dat 2025-05-29T15:22:43.101873Z node 2 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:22:43.102110Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [2:7509888456137715750:2079] 1748532163089533 != 1748532163089536 TClient is connected to server localhost:11244 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-05-29T15:22:43.192374Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:22:43.192405Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:22:43.192873Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:22:43.193467Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected waiting... waiting... 2025-05-29T15:22:43.204645Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:22:43.219482Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:22:43.245708Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 2025-05-29T15:22:43.650652Z node 3 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7509888458329456429:2203];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:22:43.650730Z node 3 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/002886/r3tmp/tmpWbcfbo/pdisk_1.dat 2025-05-29T15:22:43.685676Z node 3 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:22:43.694862Z node 3 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [3:7509888458329456254:2079] 1748532163649181 != 1748532163649184 TClient is connected to server localhost:13679 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-05-29T15:22:43.763548Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:22:43.763596Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:22:43.764017Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:22:43.765951Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-29T15:22:43.766453Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:22:43.778372Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976715658, at schemeshard: 72057594046644480 2025-05-29T15:22:43.779923Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:22:43.810915Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:22:43.822901Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/002886/r3tmp/tmp3kVKQa/pdisk_1.dat 2025-05-29T15:22:44.172913Z node 4 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7509888463033590637:2133];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:22:44.173899Z node 4 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-05-29T15 ... emeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:22:45.972975Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:22:45.984849Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:22:46.310100Z node 8 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[8:7509888470663924426:2142];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:22:46.310384Z node 8 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/002886/r3tmp/tmpfJav6q/pdisk_1.dat 2025-05-29T15:22:46.324434Z node 8 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:22:46.324656Z node 8 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [8:7509888470663924311:2079] 1748532166308791 != 1748532166308794 TClient is connected to server localhost:8414 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-05-29T15:22:46.410864Z node 8 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:22:46.410889Z node 8 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:22:46.411979Z node 8 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-29T15:22:46.413286Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:22:46.414862Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:22:46.431025Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976715658, at schemeshard: 72057594046644480 2025-05-29T15:22:46.432367Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:22:46.447660Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:22:46.460049Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:22:46.837620Z node 9 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[9:7509888472482099530:2156];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/002886/r3tmp/tmpLnM4SU/pdisk_1.dat 2025-05-29T15:22:46.843104Z node 9 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-05-29T15:22:46.853533Z node 9 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:22:46.853889Z node 9 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [9:7509888472482099383:2079] 1748532166835930 != 1748532166835933 TClient is connected to server localhost:14676 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-05-29T15:22:46.940780Z node 9 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:22:46.940807Z node 9 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:22:46.941181Z node 9 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-05-29T15:22:46.941922Z node 9 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Connecting -> Connected waiting... 2025-05-29T15:22:46.943109Z node 9 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:22:46.952035Z node 9 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:22:46.972629Z node 9 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:22:46.987724Z node 9 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:22:47.358416Z node 10 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[10:7509888477412174557:2062];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:22:47.358443Z node 10 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/002886/r3tmp/tmpYiKfN5/pdisk_1.dat 2025-05-29T15:22:47.373950Z node 10 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:22:47.374282Z node 10 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [10:7509888477412174534:2079] 1748532167358286 != 1748532167358289 TClient is connected to server localhost:1587 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-05-29T15:22:47.463067Z node 10 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:22:47.463100Z node 10 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:22:47.463400Z node 10 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:22:47.464067Z node 10 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Connecting -> Connected waiting... waiting... 2025-05-29T15:22:47.474493Z node 10 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 2025-05-29T15:22:47.489714Z node 10 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:22:47.504441Z node 10 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... >> TopicAutoscaling::ReadFromTimestamp_AutoscaleAwareSDK [FAIL] >> KqpScan::ScanDuringSplit10 >> TLocksTest::Range_CorrectNullDot [GOOD] >> TLocksTest::Range_EmptyKey >> TPersQueueMirrorer::ValidStartStream [FAIL] |61.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/sys_view/partition_stats/ut/unittest ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/client/ut/unittest >> TLocksTest::Range_CorrectDot [GOOD] Test command err: 2025-05-29T15:22:42.988729Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509888454633400571:2064];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:22:42.988750Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/002873/r3tmp/tmpaMhP2L/pdisk_1.dat 2025-05-29T15:22:43.054419Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:22:43.054627Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [1:7509888454633400548:2079] 1748532162988382 != 1748532162988385 TClient is connected to server localhost:19733 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-05-29T15:22:43.092347Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:22:43.092378Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:22:43.093199Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-29T15:22:43.130188Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-05-29T15:22:43.146038Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:22:43.216028Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:22:43.227775Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:22:43.468356Z node 2 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7509888459351121587:2064];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:22:43.468617Z node 2 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/002873/r3tmp/tmp9kJllx/pdisk_1.dat 2025-05-29T15:22:43.488018Z node 2 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded TClient is connected to server localhost:6391 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. waiting... 2025-05-29T15:22:43.572604Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:22:43.572630Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:22:43.572974Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-05-29T15:22:43.573428Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-29T15:22:43.574974Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:22:43.583952Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976715658, at schemeshard: 72057594046644480 2025-05-29T15:22:43.588115Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:22:43.603523Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:22:43.613811Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:22:44.141993Z node 3 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7509888463551746000:2062];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:22:44.142028Z node 3 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/002873/r3tmp/tmpeiTGHe/pdisk_1.dat 2025-05-29T15:22:44.166470Z node 3 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:22:44.169793Z node 3 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [3:7509888463551745980:2079] 1748532164141868 != 1748532164141871 TClient is connected to server localhost:27009 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-05-29T15:22:44.250410Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:22:44.250440Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:22:44.250809Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-05-29T15:22:44.254513Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected waiting... 2025-05-29T15:22:44.258645Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:22:44.263278Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976715658, at schemeshard: 72057594046644480 2025-05-29T15:22:44.265116Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:22:44.287551Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-05-29T15:22:44.307209Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 2025-05-29T15:22:44.630670Z node 4 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7509888460689882566:2061];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:22:44.630691Z node 4 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/002873/r3tmp/tmpHdNRPC/pdisk_1.dat 2025-05-29T15:22:44.651601Z node 4 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:22:44.651816Z node 4 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cooki ... 15:22:46.297984Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:22:46.313292Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:22:46.330793Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/002873/r3tmp/tmplA8fNe/pdisk_1.dat 2025-05-29T15:22:46.710866Z node 8 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-05-29T15:22:46.711281Z node 8 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:22:46.711516Z node 8 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [8:7509888472529717758:2079] 1748532166692202 != 1748532166692205 TClient is connected to server localhost:28882 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-05-29T15:22:46.797011Z node 8 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:22:46.797037Z node 8 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:22:46.797423Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-05-29T15:22:46.797907Z node 8 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Connecting -> Connected waiting... 2025-05-29T15:22:46.800170Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:22:46.812198Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-05-29T15:22:46.833109Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:22:46.848190Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 2025-05-29T15:22:47.185482Z node 9 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[9:7509888475563329302:2062];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:22:47.185505Z node 9 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/002873/r3tmp/tmpj9nAti/pdisk_1.dat 2025-05-29T15:22:47.201065Z node 9 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:22:47.201312Z node 9 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [9:7509888475563329282:2079] 1748532167185401 != 1748532167185404 TClient is connected to server localhost:12404 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-05-29T15:22:47.288919Z node 9 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:22:47.288954Z node 9 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:22:47.289751Z node 9 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-05-29T15:22:47.290056Z node 9 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Connecting -> Connected waiting... 2025-05-29T15:22:47.290895Z node 9 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:22:47.315140Z node 9 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976715658, at schemeshard: 72057594046644480 2025-05-29T15:22:47.316437Z node 9 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:22:47.330378Z node 9 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:22:47.342519Z node 9 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/002873/r3tmp/tmpkpuoGt/pdisk_1.dat 2025-05-29T15:22:47.711096Z node 10 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-05-29T15:22:47.737556Z node 10 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:22:47.740643Z node 10 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [10:7509888475175318411:2079] 1748532167692754 != 1748532167692757 TClient is connected to server localhost:61332 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-05-29T15:22:47.811256Z node 10 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:22:47.811286Z node 10 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:22:47.811821Z node 10 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-05-29T15:22:47.813092Z node 10 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Connecting -> Connected waiting... 2025-05-29T15:22:47.813659Z node 10 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:22:47.819697Z node 10 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976715658, at schemeshard: 72057594046644480 2025-05-29T15:22:47.820842Z node 10 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:22:47.848408Z node 10 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:22:47.875949Z node 10 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/client/ut/unittest >> TLocksTest::CK_BrokenLock [GOOD] Test command err: 2025-05-29T15:22:42.870939Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509888453051551707:2091];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:22:42.871328Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/00287c/r3tmp/tmpaeQNpz/pdisk_1.dat 2025-05-29T15:22:42.930947Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [1:7509888453051551626:2079] 1748532162864283 != 1748532162864286 2025-05-29T15:22:42.937738Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded TClient is connected to server localhost:2747 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-05-29T15:22:43.001876Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:22:43.001913Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:22:43.002696Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-29T15:22:43.002714Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-05-29T15:22:43.015416Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:22:43.086387Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:22:43.099163Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:22:43.400338Z node 2 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7509888458614475360:2064];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:22:43.400351Z node 2 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/00287c/r3tmp/tmpVdQw3r/pdisk_1.dat 2025-05-29T15:22:43.418183Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [2:7509888458614475338:2079] 1748532163400106 != 1748532163400109 2025-05-29T15:22:43.420501Z node 2 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded TClient is connected to server localhost:10572 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-05-29T15:22:43.505575Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:22:43.505607Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:22:43.506015Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-05-29T15:22:43.508330Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected waiting... waiting... 2025-05-29T15:22:43.515065Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-05-29T15:22:43.521191Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-05-29T15:22:43.535199Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:22:43.593739Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/00287c/r3tmp/tmpNdpytC/pdisk_1.dat 2025-05-29T15:22:43.951918Z node 3 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:22:43.954626Z node 3 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=incorrect path status: LookupError; TClient is connected to server localhost:18020 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-05-29T15:22:44.040484Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:22:44.040531Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:22:44.041532Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-29T15:22:44.043524Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:22:44.049155Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:22:44.062077Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:22:44.119783Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:22:44.132592Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:22:44.461466Z node 4 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7509888461686433627:2062];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:22:44.461486Z node 4 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/00287c/r3tmp/tmpbdBnIe/pdisk_1.dat 2025-05-29T15:22:44.481961Z node 4 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:22:44.482375Z node 4 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [4:7509888461686433607:2079] 1748532164461359 != 1748532164461362 TClient is connected to server localhost:9469 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPath ... MESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:22:46.169254Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976715658, at schemeshard: 72057594046644480 2025-05-29T15:22:46.170666Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:22:46.187495Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:22:46.199377Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:22:46.564633Z node 8 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[8:7509888471585347675:2064];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:22:46.565015Z node 8 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/00287c/r3tmp/tmpkyrbE2/pdisk_1.dat 2025-05-29T15:22:46.581181Z node 8 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [8:7509888471585347652:2079] 1748532166564261 != 1748532166564264 2025-05-29T15:22:46.582628Z node 8 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded TClient is connected to server localhost:30062 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-05-29T15:22:46.669796Z node 8 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:22:46.669829Z node 8 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:22:46.670106Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:22:46.672397Z node 8 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-29T15:22:46.675623Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:22:46.685149Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:22:46.745518Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:22:46.762785Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:22:47.103109Z node 9 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[9:7509888474156797397:2062];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:22:47.103143Z node 9 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/00287c/r3tmp/tmpBLDNR5/pdisk_1.dat 2025-05-29T15:22:47.115816Z node 9 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:22:47.116056Z node 9 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [9:7509888474156797376:2079] 1748532167102945 != 1748532167102948 TClient is connected to server localhost:10174 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-05-29T15:22:47.207198Z node 9 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:22:47.207223Z node 9 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:22:47.207577Z node 9 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:22:47.208199Z node 9 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Connecting -> Connected waiting... 2025-05-29T15:22:47.215292Z node 9 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:22:47.230454Z node 9 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:22:47.243930Z node 9 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/00287c/r3tmp/tmpFjmpSa/pdisk_1.dat 2025-05-29T15:22:47.660971Z node 10 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-05-29T15:22:47.662499Z node 10 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [10:7509888476707044129:2079] 1748532167625395 != 1748532167625398 2025-05-29T15:22:47.678862Z node 10 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded TClient is connected to server localhost:3575 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-05-29T15:22:47.731308Z node 10 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:22:47.731348Z node 10 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:22:47.731911Z node 10 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:22:47.733647Z node 10 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:22:47.735160Z node 10 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-29T15:22:47.740310Z node 10 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:22:47.757535Z node 10 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:22:47.777430Z node 10 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/tx_allocator_client/ut/unittest >> TTxAllocatorClientTest::ZeroRange [GOOD] Test command err: 2025-05-29T15:22:39.222943Z node 1 :TABLET_MAIN DEBUG: tablet_sys.cpp:1925: Tablet: 72057594046447617 LockedInitializationPath Marker# TSYS32 2025-05-29T15:22:39.223070Z node 1 :TABLET_MAIN DEBUG: tablet_sys.cpp:911: Tablet: 72057594046447617 HandleFindLatestLogEntry, NODATA Promote Marker# TSYS19 2025-05-29T15:22:39.223185Z node 1 :TABLET_MAIN DEBUG: tablet_sys.cpp:225: Tablet: 72057594046447617 TTablet::WriteZeroEntry. logid# [72057594046447617:2:0:0:0:0:0] Marker# TSYS01 2025-05-29T15:22:39.223624Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:54: Put Result: TEvPutResult {Id# [72057594046447617:2:0:0:0:20:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-05-29T15:22:39.223728Z node 1 :TX_ALLOCATOR DEBUG: txallocator_impl.cpp:17: tablet# 72057594046447617 OnActivateExecutor 2025-05-29T15:22:39.226017Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:54: Put Result: TEvPutResult {Id# [72057594046447617:2:1:1:28672:35:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-05-29T15:22:39.226076Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:54: Put Result: TEvPutResult {Id# [72057594046447617:2:1:0:0:42:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-05-29T15:22:39.226101Z node 1 :TABLET_MAIN DEBUG: tablet_sys.cpp:1396: Tablet: 72057594046447617 GcCollect 0 channel, tablet:gen:step => 2:0 Marker# TSYS28 2025-05-29T15:22:39.226131Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:54: Put Result: TEvPutResult {Id# [72057594046447617:2:2:1:8192:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-05-29T15:22:39.226147Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:54: Put Result: TEvPutResult {Id# [72057594046447617:2:2:0:0:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-05-29T15:22:39.226172Z node 1 :TX_ALLOCATOR DEBUG: txallocator__scheme.cpp:22: tablet# 72057594046447617 TTxSchema Complete 2025-05-29T15:22:39.226201Z node 1 :TABLET_MAIN INFO: tablet_sys.cpp:1009: Tablet: 72057594046447617 Active! Generation: 2, Type: TxAllocator started in 0msec Marker# TSYS24 2025-05-29T15:22:39.226335Z node 1 :TX_ALLOCATOR DEBUG: txallocator_impl.cpp:60: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:71:2105] requested range size#5000 2025-05-29T15:22:39.226443Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:54: Put Result: TEvPutResult {Id# [72057594046447617:2:3:1:24576:70:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-05-29T15:22:39.226452Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:54: Put Result: TEvPutResult {Id# [72057594046447617:2:3:0:0:69:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-05-29T15:22:39.226464Z node 1 :TX_ALLOCATOR DEBUG: txallocator__reserve.cpp:56: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 0 Reserved to# 5000 2025-05-29T15:22:39.226471Z node 1 :TX_ALLOCATOR DEBUG: txallocator_impl.cpp:70: tablet# 72057594046447617 Send to Sender# [1:71:2105] TEvAllocateResult from# 0 to# 5000 |61.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/sys_view/partition_stats/ut/unittest >> TopicAutoscaling::ReadFromTimestamp_PQv1 [FAIL] |61.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/sys_view/partition_stats/ut/unittest |61.5%| [TA] $(B)/ydb/core/tx/scheme_board/ut_subscriber/test-results/unittest/{meta.json ... results_accumulator.log} |61.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/mind/bscontroller/ut/unittest >> TGroupMapperTest::MakeDisksUnusable [GOOD] >> TGroupMapperTest::NonUniformClusterMirror3dc >> CommitOffset::Commit_WithSession_ParentNotFinished_OtherSession [FAIL] >> CommitOffset::Commit_FromSession_ToNewChild_WithoutCommitToParent |61.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/mind/bscontroller/ut/unittest >> TGroupMapperTest::NonUniformClusterMirror3dc [GOOD] >> TGroupMapperTest::Mirror3dc3Nodes [GOOD] |61.5%| [TA] $(B)/ydb/core/tx/tx_allocator_client/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> TBlobStorageControllerGrouperTest::TestGroupFromCandidatesTrivial [GOOD] >> TGroupMapperTest::NonUniformClusterDifferentSlotsPerDisk [GOOD] |61.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/mind/bscontroller/ut/unittest >> TGroupMapperTest::NonUniformClusterMirror3dc [GOOD] |61.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/mind/bscontroller/ut/unittest |61.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/mind/bscontroller/ut/unittest >> TGroupMapperTest::Mirror3dc3Nodes [GOOD] >> TPersQueueMirrorer::TestBasicRemote [FAIL] |61.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/mind/bscontroller/ut/unittest >> TBlobStorageControllerGrouperTest::TestGroupFromCandidatesTrivial [GOOD] >> TGroupMapperTest::NonUniformCluster2 >> TBlobStorageControllerGrouperTest::when_one_server_per_rack_in_4_racks_then_can_construct_group_with_4_domains [GOOD] >> TGroupMapperTest::Block42_1disk >> TSchemeShardTopicSplitMergeTest::EnableSplitMerge [GOOD] |61.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/mind/bscontroller/ut/unittest >> TGroupMapperTest::NonUniformClusterDifferentSlotsPerDisk [GOOD] >> TBlobStorageControllerGrouperTest::when_one_server_per_rack_in_4_racks_then_can_construct_group_with_4_domains_and_one_small_node [GOOD] >> TGroupMapperTest::ReassignGroupTest3dc |61.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/mind/bscontroller/ut/unittest >> TGroupMapperTest::MakeDisksNonoperational [GOOD] >> CommitOffset::Commit_FromSession_ToNewChild_WithoutCommitToParent [FAIL] |61.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/mind/bscontroller/ut/unittest >> TGroupMapperTest::CheckNotToBreakFailModel [GOOD] |61.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/mind/bscontroller/ut/unittest >> TBlobStorageControllerGrouperTest::when_one_server_per_rack_in_4_racks_then_can_construct_group_with_4_domains [GOOD] |61.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/mind/bscontroller/ut/unittest ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_topic_splitmerge/unittest >> TSchemeShardTopicSplitMergeTest::EnableSplitMerge [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:116:2058] recipient: [1:111:2141] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:116:2058] recipient: [1:111:2141] Leader for TabletID 72057594046678944 is [1:128:2152] sender: [1:132:2058] recipient: [1:111:2141] 2025-05-29T15:22:46.890083Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7488: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-05-29T15:22:46.890113Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7516: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:22:46.890120Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7402: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-05-29T15:22:46.890126Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7418: OperationsProcessing config: using default configuration 2025-05-29T15:22:46.890144Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-05-29T15:22:46.890148Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-05-29T15:22:46.890159Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7548: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:22:46.890174Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:39: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-05-29T15:22:46.890289Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7619: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-05-29T15:22:46.890369Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-05-29T15:22:46.904216Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7309: Cannot subscribe to console configs 2025-05-29T15:22:46.904242Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:22:46.907093Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-05-29T15:22:46.907229Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-05-29T15:22:46.907270Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-05-29T15:22:46.908988Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-05-29T15:22:46.909128Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:445: Clear TempDirsState with owners number: 0 2025-05-29T15:22:46.909217Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1348: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-05-29T15:22:46.909262Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:32: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-05-29T15:22:46.909743Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:157: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:22:46.909791Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:84: [RootDataErasureManager] Stop 2025-05-29T15:22:46.910043Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:22:46.910050Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:22:46.910066Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-05-29T15:22:46.910072Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-29T15:22:46.910076Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:202: TTxServerlessStorageBilling.Complete 2025-05-29T15:22:46.910105Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6671: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-05-29T15:22:46.911265Z node 1 :HIVE INFO: tablet_helpers.cpp:1130: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:128:2152] sender: [1:242:2058] recipient: [1:15:2062] 2025-05-29T15:22:46.930146Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:372: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-05-29T15:22:46.930225Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:22:46.930298Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-05-29T15:22:46.930351Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:130: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-05-29T15:22:46.930362Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:22:46.931074Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:451: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-05-29T15:22:46.931104Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-05-29T15:22:46.931171Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:22:46.931183Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:313: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-05-29T15:22:46.931189Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:367: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-05-29T15:22:46.931194Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 2 -> 3 2025-05-29T15:22:46.931652Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:22:46.931663Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-05-29T15:22:46.931668Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 3 -> 128 2025-05-29T15:22:46.932001Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:22:46.932013Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:22:46.932020Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:22:46.932028Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1650: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-05-29T15:22:46.932709Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1719: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-05-29T15:22:46.933100Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:652: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-05-29T15:22:46.933141Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1751: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-05-29T15:22:46.933331Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:676: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-05-29T15:22:46.933356Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:680: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 124 RawX2: 4294969445 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-05-29T15:22:46.933364Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:22:46.933438Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 128 -> 240 2025-05-29T15:22:46.933445Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:22:46.933483Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-05-29T15:22:46.933495Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:402: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-05-29T15:22:46.933915Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:22:46.933928Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-29T15:22:46.933985Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29 ... eshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [2:207:2208], at schemeshard: 72057594046678944, txId: 105, path id: 3 2025-05-29T15:22:49.105387Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 105:0, at schemeshard: 72057594046678944 2025-05-29T15:22:49.105397Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:482: [72057594046678944] TDone opId# 105:0 ProgressState 2025-05-29T15:22:49.105414Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:906: Part operation is done id#105:0 progress is 1/1 2025-05-29T15:22:49.105419Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 105 ready parts: 1/1 2025-05-29T15:22:49.105424Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:906: Part operation is done id#105:0 progress is 1/1 2025-05-29T15:22:49.105428Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 105 ready parts: 1/1 2025-05-29T15:22:49.105433Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1606: TOperation IsReadyToNotify, TxId: 105, ready parts: 1/1, is published: false 2025-05-29T15:22:49.105438Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 105 ready parts: 1/1 2025-05-29T15:22:49.105444Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:973: Operation and all the parts is done, operation id: 105:0 2025-05-29T15:22:49.105448Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5174: RemoveTx for txid 105:0 2025-05-29T15:22:49.105482Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 5 2025-05-29T15:22:49.105488Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:982: Publication still in progress, tx: 105, publications: 1, subscribers: 0 2025-05-29T15:22:49.105492Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:989: Publication details: tx: 105, [OwnerId: 72057594046678944, LocalPathId: 3], 3 2025-05-29T15:22:49.105632Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:5834: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 3 PathOwnerId: 72057594046678944, cookie: 105 2025-05-29T15:22:49.105646Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 3 PathOwnerId: 72057594046678944, cookie: 105 2025-05-29T15:22:49.105652Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 105 2025-05-29T15:22:49.105657Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 105, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 3 2025-05-29T15:22:49.105662Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 4 2025-05-29T15:22:49.105678Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 105, subscribers: 0 2025-05-29T15:22:49.106697Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 105 TestModificationResult got TxId: 105, wait until txId: 105 TestWaitNotification wait txId: 105 2025-05-29T15:22:49.108257Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:210: tests -- TTxNotificationSubscriber for txId 105: send EvNotifyTxCompletion 2025-05-29T15:22:49.108270Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:256: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 105 2025-05-29T15:22:49.108353Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 105, at schemeshard: 72057594046678944 2025-05-29T15:22:49.108375Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:227: tests -- TTxNotificationSubscriber for txId 105: got EvNotifyTxCompletionResult 2025-05-29T15:22:49.108380Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:236: tests -- TTxNotificationSubscriber for txId 105: satisfy waiter [2:758:2667] TestWaitNotification: OK eventTxId 105 2025-05-29T15:22:49.691410Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: PathId: 3 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2025-05-29T15:22:49.691533Z node 2 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:44: Tablet 72057594046678944 describe pathId 3 took 136us result status StatusSuccess 2025-05-29T15:22:49.691745Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/USER_1/Topic1" PathDescription { Self { Name: "Topic1" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypePersQueueGroup CreateFinished: true CreateTxId: 104 CreateStep: 150 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 PQVersion: 2 } ChildrenExist: false BalancerTabletID: 72075186233409549 } PersQueueGroup { Name: "Topic1" PathId: 3 TotalGroupCount: 3 PartitionPerTablet: 7 PQTabletConfig { PartitionConfig { LifetimeSeconds: 3600 } YdbDatabasePath: "/MyRoot" PartitionStrategy { PartitionStrategyType: CAN_SPLIT_AND_MERGE } } Partitions { PartitionId: 0 TabletId: 72075186233409548 KeyRange { ToBound: "UUUUUUUUUUUUUUUT" } Status: Active } Partitions { PartitionId: 1 TabletId: 72075186233409548 KeyRange { FromBound: "UUUUUUUUUUUUUUUT" ToBound: "\252\252\252\252\252\252\252\252\252\252\252\252\252\252\252\251" } Status: Active } Partitions { PartitionId: 2 TabletId: 72075186233409548 KeyRange { FromBound: "\252\252\252\252\252\252\252\252\252\252\252\252\252\252\252\251" } Status: Active } AlterVersion: 2 BalancerTabletID: 72075186233409549 NextPartitionId: 3 Allocate { Name: "Topic1" AlterVersion: 2 TotalGroupCount: 3 NextPartitionId: 3 PartitionPerTablet: 7 PQTabletConfig { PartitionConfig { LifetimeSeconds: 3600 } YdbDatabasePath: "/MyRoot" PartitionStrategy { PartitionStrategyType: CAN_SPLIT_AND_MERGE } } Partitions { PartitionId: 1 GroupId: 2 TabletId: 72075186233409548 OwnerId: 72057594046678944 ShardId: 3 Status: Active KeyRange { FromBound: "UUUUUUUUUUUUUUUT" ToBound: "\252\252\252\252\252\252\252\252\252\252\252\252\252\252\252\251" } } Partitions { PartitionId: 2 GroupId: 3 TabletId: 72075186233409548 OwnerId: 72057594046678944 ShardId: 3 Status: Active KeyRange { FromBound: "\252\252\252\252\252\252\252\252\252\252\252\252\252\252\252\251" } } Partitions { PartitionId: 0 GroupId: 1 TabletId: 72075186233409548 OwnerId: 72057594046678944 ShardId: 3 Status: Active KeyRange { ToBound: "UUUUUUUUUUUUUUUT" } } BalancerTabletID: 72075186233409549 BalancerOwnerId: 72057594046678944 BalancerShardId: 4 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72075186233409546 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409547 } DomainKey { SchemeShard: 72057594046678944 PathId: 2 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 4 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 2 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 3 PQPartitionsLimit: 1000000 } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-05-29T15:22:49.773725Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_1/Topic1" Options { ReturnPartitioningInfo: true ReturnPartitionConfig: true BackupInfo: false ReturnBoundaries: true ShowPrivateTable: true }, at schemeshard: 72057594046678944 2025-05-29T15:22:49.773847Z node 2 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/USER_1/Topic1" took 150us result status StatusSuccess 2025-05-29T15:22:49.774051Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/USER_1/Topic1" PathDescription { Self { Name: "Topic1" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypePersQueueGroup CreateFinished: true CreateTxId: 104 CreateStep: 150 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 PQVersion: 2 } ChildrenExist: false BalancerTabletID: 72075186233409549 } PersQueueGroup { Name: "Topic1" PathId: 3 TotalGroupCount: 3 PartitionPerTablet: 7 PQTabletConfig { PartitionConfig { LifetimeSeconds: 3600 } YdbDatabasePath: "/MyRoot" PartitionStrategy { PartitionStrategyType: CAN_SPLIT_AND_MERGE } } Partitions { PartitionId: 0 TabletId: 72075186233409548 KeyRange { ToBound: "UUUUUUUUUUUUUUUT" } Status: Active } Partitions { PartitionId: 1 TabletId: 72075186233409548 KeyRange { FromBound: "UUUUUUUUUUUUUUUT" ToBound: "\252\252\252\252\252\252\252\252\252\252\252\252\252\252\252\251" } Status: Active } Partitions { PartitionId: 2 TabletId: 72075186233409548 KeyRange { FromBound: "\252\252\252\252\252\252\252\252\252\252\252\252\252\252\252\251" } Status: Active } AlterVersion: 2 BalancerTabletID: 72075186233409549 NextPartitionId: 3 Allocate { Name: "Topic1" AlterVersion: 2 TotalGroupCount: 3 NextPartitionId: 3 PartitionPerTablet: 7 PQTabletConfig { PartitionConfig { LifetimeSeconds: 3600 } YdbDatabasePath: "/MyRoot" PartitionStrategy { PartitionStrategyType: CAN_SPLIT_AND_MERGE } } Partitions { PartitionId: 1 GroupId: 2 TabletId: 72075186233409548 OwnerId: 72057594046678944 ShardId: 3 Status: Active KeyRange { FromBound: "UUUUUUUUUUUUUUUT" ToBound: "\252\252\252\252\252\252\252\252\252\252\252\252\252\252\252\251" } } Partitions { PartitionId: 2 GroupId: 3 TabletId: 72075186233409548 OwnerId: 72057594046678944 ShardId: 3 Status: Active KeyRange { FromBound: "\252\252\252\252\252\252\252\252\252\252\252\252\252\252\252\251" } } Partitions { PartitionId: 0 GroupId: 1 TabletId: 72075186233409548 OwnerId: 72057594046678944 ShardId: 3 Status: Active KeyRange { ToBound: "UUUUUUUUUUUUUUUT" } } BalancerTabletID: 72075186233409549 BalancerOwnerId: 72057594046678944 BalancerShardId: 4 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72075186233409546 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409547 } DomainKey { SchemeShard: 72057594046678944 PathId: 2 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 4 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 2 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 3 PQPartitionsLimit: 1000000 } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >>>>> Verify partition 0 >>>>> Verify partition 1 >>>>> Verify partition 2 |61.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/mind/bscontroller/ut/unittest >> TBlobStorageControllerGrouperTest::when_one_server_per_rack_in_4_racks_then_can_construct_group_with_4_domains_and_one_small_node [GOOD] |61.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/mind/bscontroller/ut/unittest >> TGroupMapperTest::MakeDisksNonoperational [GOOD] |61.6%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/services/metadata/secret/ut/ydb-services-metadata-secret-ut |61.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/services/metadata/secret/ut/ydb-services-metadata-secret-ut |61.6%| [TA] {RESULT} $(B)/ydb/core/tx/datashard/ut_object_storage_listing/test-results/unittest/{meta.json ... results_accumulator.log} |61.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/mind/bscontroller/ut/unittest >> TGroupMapperTest::CheckNotToBreakFailModel [GOOD] |61.6%| [TA] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_replication/test-results/unittest/{meta.json ... results_accumulator.log} >> TLocksFatTest::RangeSetNotBreak [GOOD] >> TGroupMapperTest::MapperSequentialCalls >> TGroupMapperTest::NonUniformClusterMirror3dcWithUnusableDomain >> TGroupMapperTest::Mirror3dc >> TGroupMapperTest::NonUniformClusterMirror3dcWithUnusableDomain [GOOD] >> TMultiversionObjectMap::MonteCarlo >> TBlobStorageControllerGrouperTest::TestGroupFromCandidatesHuge >> TGroupMapperTest::Mirror3dc [GOOD] >> TBlobStorageControllerGrouperTest::TestGroupFromCandidatesEmpty [GOOD] >> TBlobStorageControllerGrouperTest::TestGroupFromCandidatesHuge [GOOD] >> TLocksTest::Range_EmptyKey [GOOD] >> TGroupMapperTest::MonteCarlo >> TGroupMapperTest::SanitizeGroupTest3dc |61.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/mind/bscontroller/ut/unittest >> TGroupMapperTest::NonUniformCluster |61.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/mind/bscontroller/ut/unittest >> TGroupMapperTest::NonUniformClusterMirror3dcWithUnusableDomain [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/client/ut/unittest >> TLocksFatTest::RangeSetNotBreak [GOOD] Test command err: 2025-05-29T15:22:45.953992Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509888467187529556:2062];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:22:45.954013Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/00285d/r3tmp/tmpY3KvI6/pdisk_1.dat 2025-05-29T15:22:46.024419Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:22:46.026810Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [1:7509888467187529536:2079] 1748532165953855 != 1748532165953858 TClient is connected to server localhost:30215 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 2025-05-29T15:22:46.058513Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:22:46.058552Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:22:46.062575Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-05-29T15:22:46.097383Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-05-29T15:22:46.102920Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:22:46.110834Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 2025-05-29T15:22:46.186575Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:22:46.206017Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:22:48.447289Z node 2 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7509888480228031871:2063];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:22:48.447463Z node 2 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/00285d/r3tmp/tmpPJXfMU/pdisk_1.dat 2025-05-29T15:22:48.463980Z node 2 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:22:48.464178Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [2:7509888480228031841:2079] 1748532168447126 != 1748532168447129 TClient is connected to server localhost:10940 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-05-29T15:22:48.551589Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:22:48.551632Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:22:48.552003Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:22:48.552504Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected waiting... 2025-05-29T15:22:48.559145Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:22:48.574498Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:22:48.588375Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... >> TPartitionTests::TestTxBatchInFederation [GOOD] >> TGroupMapperTest::InterlacedRacksWithoutInterlacedNodes [GOOD] >> KqpScan::ScanRetryRead [FAIL] >> TPartitionTests::The_DeletePartition_Message_Arrives_Before_The_ApproveWriteQuota_Message >> KqpScan::ScanRetryReadRanges |61.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/mind/bscontroller/ut/unittest >> TBlobStorageControllerGrouperTest::TestGroupFromCandidatesEmpty [GOOD] |61.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/mind/bscontroller/ut/unittest >> TGroupMapperTest::Mirror3dc [GOOD] |61.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/mind/bscontroller/ut/unittest >> TBlobStorageControllerGrouperTest::TestGroupFromCandidatesHuge [GOOD] |61.7%| [TA] {RESULT} $(B)/ydb/core/tx/scheme_board/ut_subscriber/test-results/unittest/{meta.json ... results_accumulator.log} |61.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/mind/bscontroller/ut/unittest |61.7%| [TA] {RESULT} $(B)/ydb/core/tx/tx_allocator_client/ut/test-results/unittest/{meta.json ... results_accumulator.log} |61.7%| [LD] {RESULT} $(B)/ydb/services/metadata/secret/ut/ydb-services-metadata-secret-ut ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/persqueue/ut/ut_with_sdk/unittest >> TopicAutoscaling::MidOfRange [GOOD] Test command err: 2025-05-29T15:22:41.508312Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509888448554635660:2131];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:22:41.508332Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/001f94/r3tmp/tmpFhm3JQ/pdisk_1.dat 2025-05-29T15:22:41.555992Z node 1 :PQ_READ_PROXY DEBUG: caching_service.cpp:44: Direct read cache: : Created 2025-05-29T15:22:41.581160Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [1:7509888448554635569:2079] 1748532161507296 != 1748532161507299 2025-05-29T15:22:41.585616Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 8332, node 1 2025-05-29T15:22:41.614935Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/ciyv/001f94/r3tmp/yandexRPi98u.tmp 2025-05-29T15:22:41.614950Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: /home/runner/.ya/build/build_root/ciyv/001f94/r3tmp/yandexRPi98u.tmp 2025-05-29T15:22:41.619119Z INFO: TTestServer started on Port 4535 GrpcPort 8332 TClient is connected to server localhost:4535 2025-05-29T15:22:41.638874Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:202: successfully initialized from file: /home/runner/.ya/build/build_root/ciyv/001f94/r3tmp/yandexRPi98u.tmp 2025-05-29T15:22:41.638998Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration PQClient connected to localhost:8332 WaitRootIsUp 'Root'... TClient::Ls request: Root 2025-05-29T15:22:41.650955Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:22:41.650986Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:22:41.652701Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-29T15:22:41.668489Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:22:41.671603Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:22:41.679342Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-05-29T15:22:41.751148Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976715660, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:22:41.894465Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509888448554636388:2339], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:22:41.894473Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509888448554636378:2336], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:22:41.894486Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:22:41.895088Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715662:3, at schemeshard: 72057594046644480 2025-05-29T15:22:41.897466Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976715662, at schemeshard: 72057594046644480 2025-05-29T15:22:41.897514Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7509888448554636392:2340], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715662 completed, doublechecking } 2025-05-29T15:22:41.949038Z node 1 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [1:7509888448554636461:2435] txid# 281474976715663, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 9], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-29T15:22:41.952659Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-05-29T15:22:41.969492Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-05-29T15:22:41.980048Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [1:7509888448554636483:2348], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:5:17: Error: At function: KiReadTable!
:5:17: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Versions]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-05-29T15:22:41.980801Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=1&id=NWJhMDVkMjctNzAyZDc0OWItY2I4NmFiMGYtMWVjMGZjODg=, ActorId: [1:7509888448554636375:2334], ActorState: ExecuteState, TraceId: 01jwea6vb31gar5v2mtxa81664, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-05-29T15:22:41.981324Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: cluster_tracker.cpp:167: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 5 column: 17 } message: "At function: KiReadTable!" end_position { row: 5 column: 17 } severity: 1 issues { position { row: 5 column: 17 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Versions]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 5 column: 17 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-05-29T15:22:41.996169Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost", true, true, 1000), ("dc2", "dc2.logbroker.yandex.net", false, true, 1000); 2025-05-29T15:22:42.049865Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [1:7509888452849604002:2379], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:22:42.049937Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=1&id=NTYzYTI5NWEtYzcyOGM3M2MtNWFkOGQwYjEtYjg5ZGEyYjc=, ActorId: [1:7509888452849603999:2377], ActorState: ExecuteState, TraceId: 01jwea6vfj646s9bbykz3yqcr5, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: assertion failed at ydb/core/testlib/test_pq_client.h:537, TMaybe NKikimr::NPersQueueTests::TFlatMsgBusPQClient::RunYqlDataQueryWithParams(TString, const NYdb::TParams &): (result.IsSuccess())
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 TBackTrace::Capture()+28 (0x13AC482C) NUnitTest::NPrivate::RaiseError(char const*, TBasicString> const&, bool)+137 (0x13C785B9) NKikimr::NPersQueueTests::TFlatMsgBusPQClient::RunYqlDataQueryWithParams(TBasicString>, NYdb::Dev::TParams const&)+932 (0x139A34A4) NKikimr::NPersQueueTests::TFlatMsgBusPQClient::RunYqlDataQuery(TBasicString>)+88 (0x139A2A58) NKikimr::NPersQueueTests::TFlatMsgBusPQClient::InitDCs(THashMap>, NKikimr::NPersQueueTests::TPQTestClusterInfo, THash>>, TEqualTo>>, std::__y1::allocator>>>, TBasicString> const&)+1170 (0x139A1CA2) NKikimr::NPersQueueTests::TFlatMsgBusPQClient::FullInit(THashMap>, NKikimr::NPersQueueTests::TPQTestClusterInfo, THash>>, TEqualTo>>, std::__y1::allocator>>>, TBasicString> const&, TBasicString> const&)+327 (0x1399FC57) NPersQueue::TTestServer::StartServer(bool, TMaybe>, NMaybe::TPolicyUndefinedExcept>)+888 (0x1399E948) NYdb::NTopic::NTests::TTopicSdkTestSetup::TTopicSdkTestSetup(TBasicString> const&, NKikimr::Tests::TServerSettings const&, bool)+582 (0x262E6316) NKikimr::NPQ::NTest::CreateSetup()+178 (0x26290EC2) NKikimr::NTestSuiteTopicAutoscaling::PartitionMerge_PreferedPartition(NKikimr::NPQ::NTest::SdkVersion, bool)+41 (0x13902EB9) NKikimr::NTestSuiteTopicAutoscaling::TCurrentTest::Execute()::'lambda'()::operator()( ... 6 (0x13C7A46E) NKikimr::NTestSuiteTopicAutoscaling::TCurrentTest::Execute()+419 (0x1393D483) NUnitTest::TTestFactory::Execute()+803 (0x13C7ABE3) NUnitTest::RunMain(int, char**)+3021 (0x13C8C78D) ??+0 (0x7F0D151F7D90) __libc_start_main+128 (0x7F0D151F7E40) _start+41 (0x12A4C029) 2025-05-29T15:22:46.503422Z node 6 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[6:7509888469558152189:2064];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:22:46.503445Z node 6 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/001f94/r3tmp/tmptNSjYL/pdisk_1.dat 2025-05-29T15:22:46.509308Z node 6 :PQ_READ_PROXY DEBUG: caching_service.cpp:44: Direct read cache: : Created 2025-05-29T15:22:46.515769Z node 6 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:22:46.516037Z node 6 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [6:7509888469558152166:2079] 1748532166503217 != 1748532166503220 TServer::EnableGrpc on GrpcPort 11233, node 6 2025-05-29T15:22:46.527765Z node 6 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/ciyv/001f94/r3tmp/yandexfkPqQe.tmp 2025-05-29T15:22:46.527776Z node 6 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: /home/runner/.ya/build/build_root/ciyv/001f94/r3tmp/yandexfkPqQe.tmp 2025-05-29T15:22:46.533783Z INFO: TTestServer started on Port 11524 GrpcPort 11233 2025-05-29T15:22:46.540955Z node 6 :NET_CLASSIFIER WARN: net_classifier.cpp:202: successfully initialized from file: /home/runner/.ya/build/build_root/ciyv/001f94/r3tmp/yandexfkPqQe.tmp 2025-05-29T15:22:46.541118Z node 6 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:11524 PQClient connected to localhost:11233 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... 2025-05-29T15:22:46.607619Z node 6 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:22:46.607656Z node 6 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:22:46.608091Z node 6 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-05-29T15:22:46.608536Z node 6 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-29T15:22:46.613825Z node 6 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:22:46.622699Z node 6 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... waiting... waiting... 2025-05-29T15:22:46.901548Z node 6 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7509888469558152977:2339], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:22:46.901567Z node 6 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7509888469558152967:2336], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:22:46.901592Z node 6 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:22:46.902362Z node 6 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715662:3, at schemeshard: 72057594046644480 2025-05-29T15:22:46.905797Z node 6 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976715662, at schemeshard: 72057594046644480 2025-05-29T15:22:46.905836Z node 6 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [6:7509888469558152987:2340], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715662 completed, doublechecking } 2025-05-29T15:22:46.908073Z node 6 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-05-29T15:22:46.915749Z node 6 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-05-29T15:22:46.934892Z node 6 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost", true, true, 1000), ("dc2", "dc2.logbroker.yandex.net", false, true, 1000); 2025-05-29T15:22:46.966397Z node 6 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [6:7509888469558153283:2374], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:22:46.966630Z node 6 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=6&id=ZTRiMTY4ODEtYTdhOGYyOWMtZTFkZDM0YS04MjgzMTVmMA==, ActorId: [6:7509888469558153280:2372], ActorState: ExecuteState, TraceId: 01jwea70924vzx89q85hkdgvm0, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: 2025-05-29T15:22:47.001318Z node 6 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [6:7509888473853120591:2579] txid# 281474976715666, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 9], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-29T15:22:47.035066Z node 6 :PERSQUEUE_CLUSTER_TRACKER ERROR: cluster_tracker.cpp:167: failed to list clusters: { Response { QueryIssues { message: "Execution" issue_code: 1060 severity: 2 issues { position { row: 3 column: 120 } message: "Cost Based Optimizer could not be applied to this query: couldn\'t load statistics" end_position { row: 3 column: 120 } issue_code: 8001 severity: 2 } } TxMeta { } YdbResults { columns { name: "C.name" type { optional_type { item { type_id: UTF8 } } } } columns { name: "C.balancer" type { optional_type { item { type_id: UTF8 } } } } columns { name: "C.local" type { optional_type { item { type_id: BOOL } } } } columns { name: "C.enabled" type { optional_type { item { type_id: BOOL } } } } columns { name: "C.weight" type { optional_type { item { type_id: UINT64 } } } } columns { name: "V.version" type { optional_type { item { type_id: INT64 } } } } } QueryDiagnostics: "" } YdbStatus: SUCCESS ConsumedRu: 21 } assertion failed at ydb/core/testlib/test_pq_client.h:537, TMaybe NKikimr::NPersQueueTests::TFlatMsgBusPQClient::RunYqlDataQueryWithParams(TString, const NYdb::TParams &): (result.IsSuccess())
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 TBackTrace::Capture()+28 (0x13AC482C) NUnitTest::NPrivate::RaiseError(char const*, TBasicString> const&, bool)+137 (0x13C785B9) NKikimr::NPersQueueTests::TFlatMsgBusPQClient::RunYqlDataQueryWithParams(TBasicString>, NYdb::Dev::TParams const&)+932 (0x139A34A4) NKikimr::NPersQueueTests::TFlatMsgBusPQClient::RunYqlDataQuery(TBasicString>)+88 (0x139A2A58) NKikimr::NPersQueueTests::TFlatMsgBusPQClient::InitDCs(THashMap>, NKikimr::NPersQueueTests::TPQTestClusterInfo, THash>>, TEqualTo>>, std::__y1::allocator>>>, TBasicString> const&)+1170 (0x139A1CA2) NKikimr::NPersQueueTests::TFlatMsgBusPQClient::FullInit(THashMap>, NKikimr::NPersQueueTests::TPQTestClusterInfo, THash>>, TEqualTo>>, std::__y1::allocator>>>, TBasicString> const&, TBasicString> const&)+327 (0x1399FC57) NPersQueue::TTestServer::StartServer(bool, TMaybe>, NMaybe::TPolicyUndefinedExcept>)+888 (0x1399E948) NYdb::NTopic::NTests::TTopicSdkTestSetup::TTopicSdkTestSetup(TBasicString> const&, NKikimr::Tests::TServerSettings const&, bool)+582 (0x262E6316) NKikimr::NPQ::NTest::CreateSetup()+178 (0x26290EC2) NKikimr::NTestSuiteTopicAutoscaling::TTestCaseControlPlane_CDC_Enable::Execute_(NUnitTest::TTestContext&)+33 (0x13929C41) NKikimr::NTestSuiteTopicAutoscaling::TCurrentTest::Execute()::'lambda'()::operator()() const+71 (0x1393DC27) NUnitTest::TTestBase::Run(std::__y1::function, TBasicString> const&, char const*, bool)+126 (0x13C7A46E) NKikimr::NTestSuiteTopicAutoscaling::TCurrentTest::Execute()+419 (0x1393D483) NUnitTest::TTestFactory::Execute()+803 (0x13C7ABE3) NUnitTest::RunMain(int, char**)+3021 (0x13C8C78D) ??+0 (0x7F0D151F7D90) __libc_start_main+128 (0x7F0D151F7E40) _start+41 (0x12A4C029) |61.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/mind/bscontroller/ut/unittest >> TGroupMapperTest::InterlacedRacksWithoutInterlacedNodes [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/client/ut/unittest >> TLocksTest::Range_EmptyKey [GOOD] Test command err: 2025-05-29T15:22:46.226392Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509888470895408706:2063];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:22:46.226445Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/002859/r3tmp/tmpOakTiz/pdisk_1.dat 2025-05-29T15:22:46.293460Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:22:46.293753Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [1:7509888470895408685:2079] 1748532166226172 != 1748532166226175 TClient is connected to server localhost:15353 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-05-29T15:22:46.320587Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:22:46.323749Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-05-29T15:22:46.329793Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:22:46.329827Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:22:46.330953Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected waiting... waiting... 2025-05-29T15:22:46.372463Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 2025-05-29T15:22:46.373505Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 2025-05-29T15:22:46.436806Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:22:46.450219Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:22:46.773177Z node 2 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7509888469543803118:2061];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:22:46.773203Z node 2 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/002859/r3tmp/tmp1TvbDp/pdisk_1.dat 2025-05-29T15:22:46.785904Z node 2 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:22:46.786335Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [2:7509888469543803098:2079] 1748532166773060 != 1748532166773063 TClient is connected to server localhost:25994 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-05-29T15:22:46.877625Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:22:46.877675Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:22:46.878048Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-05-29T15:22:46.878585Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected waiting... waiting... 2025-05-29T15:22:46.886521Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:22:46.901146Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:22:46.915182Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:22:47.268468Z node 3 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7509888477270676757:2061];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:22:47.268488Z node 3 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/002859/r3tmp/tmp2dHIg1/pdisk_1.dat 2025-05-29T15:22:47.285238Z node 3 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:22:47.285520Z node 3 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [3:7509888477270676738:2079] 1748532167268343 != 1748532167268346 TClient is connected to server localhost:16323 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-05-29T15:22:47.372507Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:22:47.372543Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:22:47.372911Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-05-29T15:22:47.373238Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected waiting... 2025-05-29T15:22:47.374688Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:22:47.386108Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:22:47.400582Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:22:47.412680Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/002859/r3tmp/tmp2PiSqQ/pdisk_1.dat 2025-05-29T15:22:47.781536Z node 4 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7509888474102578145:2156];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:22:47.792245Z node 4 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-05-29T15:22:47.823460Z node 4 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [4:7509888474102577998:2079] 1748532167779855 != 1748532167779858 ... ting... 2025-05-29T15:22:49.504307Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:22:49.518503Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:22:49.533372Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/002859/r3tmp/tmp0i2szK/pdisk_1.dat 2025-05-29T15:22:49.847048Z node 8 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:22:49.847312Z node 8 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [8:7509888482038767254:2079] 1748532169816942 != 1748532169816945 TClient is connected to server localhost:7003 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-05-29T15:22:49.917146Z node 8 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:22:49.917180Z node 8 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:22:49.917568Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-05-29T15:22:49.918935Z node 8 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Connecting -> Connected waiting... 2025-05-29T15:22:49.923698Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:22:49.939212Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976715658, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:22:49.947754Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 2025-05-29T15:22:49.967760Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-05-29T15:22:49.983761Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 2025-05-29T15:22:50.290586Z node 9 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[9:7509888487823391468:2063];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:22:50.290613Z node 9 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/002859/r3tmp/tmpQSkgwf/pdisk_1.dat 2025-05-29T15:22:50.302310Z node 9 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:22:50.302511Z node 9 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [9:7509888487823391447:2079] 1748532170290491 != 1748532170290494 TClient is connected to server localhost:13316 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-05-29T15:22:50.393743Z node 9 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:22:50.393778Z node 9 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:22:50.394375Z node 9 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-05-29T15:22:50.395088Z node 9 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Connecting -> Connected waiting... 2025-05-29T15:22:50.395776Z node 9 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:22:50.400266Z node 9 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:22:50.415398Z node 9 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:22:50.431488Z node 9 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:22:50.749552Z node 10 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[10:7509888490242029221:2064];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:22:50.749576Z node 10 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/002859/r3tmp/tmpSUKU35/pdisk_1.dat 2025-05-29T15:22:50.765977Z node 10 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:22:50.766479Z node 10 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [10:7509888490242029197:2079] 1748532170749405 != 1748532170749408 TClient is connected to server localhost:4362 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-05-29T15:22:50.855010Z node 10 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:22:50.855065Z node 10 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:22:50.855410Z node 10 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-05-29T15:22:50.855845Z node 10 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Connecting -> Connected waiting... 2025-05-29T15:22:50.864585Z node 10 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:22:50.875285Z node 10 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976715658, at schemeshard: 72057594046644480 2025-05-29T15:22:50.876531Z node 10 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:22:50.896184Z node 10 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:22:50.923637Z node 10 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... >> KqpScan::ScanDuringSplit10 [FAIL] >> KqpScan::ScanDuringSplitThenMerge >> KqpScan::RemoteShardScan [FAIL] >> KqpScan::ScanDuringSplit >> TGroupMapperTest::MapperSequentialCalls [GOOD] >> TGroupMapperTest::NonUniformCluster2 [GOOD] >> PartitionStats::Collector >> PartitionStats::Collector [GOOD] |61.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/sys_view/partition_stats/ut/unittest >> TPartitionTests::The_DeletePartition_Message_Arrives_Before_The_ApproveWriteQuota_Message [GOOD] >> TGroupMapperTest::SanitizeGroupTest3dc [GOOD] |61.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/mind/bscontroller/ut/unittest >> TGroupMapperTest::NonUniformCluster2 [GOOD] |61.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/mind/bscontroller/ut/unittest >> TGroupMapperTest::MapperSequentialCalls [GOOD] |61.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/ext_index/ut/unittest ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/persqueue/ut/ut_with_sdk/unittest >> TopicAutoscaling::PartitionSplit_ManySession_existed_AutoscaleAwareSDK [FAIL] Test command err: 2025-05-29T15:22:41.693382Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509888448270042570:2063];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:22:41.693531Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-05-29T15:22:41.743969Z node 1 :PQ_READ_PROXY DEBUG: caching_service.cpp:44: Direct read cache: : Created test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/001f86/r3tmp/tmpflAOfd/pdisk_1.dat 2025-05-29T15:22:41.779681Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:22:41.779823Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [1:7509888448270042545:2079] 1748532161693152 != 1748532161693155 TServer::EnableGrpc on GrpcPort 29142, node 1 2025-05-29T15:22:41.791877Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/ciyv/001f86/r3tmp/yandexzjYhxD.tmp 2025-05-29T15:22:41.791885Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: /home/runner/.ya/build/build_root/ciyv/001f86/r3tmp/yandexzjYhxD.tmp 2025-05-29T15:22:41.795598Z INFO: TTestServer started on Port 3639 GrpcPort 29142 TClient is connected to server localhost:3639 2025-05-29T15:22:41.814052Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:202: successfully initialized from file: /home/runner/.ya/build/build_root/ciyv/001f86/r3tmp/yandexzjYhxD.tmp 2025-05-29T15:22:41.814120Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration PQClient connected to localhost:29142 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-29T15:22:41.834330Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... waiting... waiting... 2025-05-29T15:22:41.850793Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 2025-05-29T15:22:41.854973Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:22:41.855001Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:22:41.859111Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected waiting... waiting... 2025-05-29T15:22:42.159520Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509888452565010632:2336], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:22:42.159560Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:22:42.159821Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509888452565010667:2339], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:22:42.160789Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710662:3, at schemeshard: 72057594046644480 2025-05-29T15:22:42.163666Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976710662, at schemeshard: 72057594046644480 2025-05-29T15:22:42.163761Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7509888452565010669:2340], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710662 completed, doublechecking } 2025-05-29T15:22:42.214261Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-05-29T15:22:42.244063Z node 1 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [1:7509888452565010801:2465] txid# 281474976710664, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 9], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-29T15:22:42.270260Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-05-29T15:22:42.271476Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [1:7509888452565010809:2353], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:5:17: Error: At function: KiReadTable!
:5:17: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Versions]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-05-29T15:22:42.272277Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=1&id=ZjJjYzFhZDQtZDdmMGIxZGQtZjY2MDM0MWUtNGFhMDk3OGI=, ActorId: [1:7509888452565010629:2334], ActorState: ExecuteState, TraceId: 01jwea6vkc0w8zzk5d1cg5pm1c, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-05-29T15:22:42.272799Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: cluster_tracker.cpp:167: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 5 column: 17 } message: "At function: KiReadTable!" end_position { row: 5 column: 17 } severity: 1 issues { position { row: 5 column: 17 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Versions]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 5 column: 17 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-05-29T15:22:42.290255Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost", true, true, 1000), ("dc2", "dc2.logbroker.yandex.net", false, true, 1000); 2025-05-29T15:22:42.372880Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [1:7509888452565010987:2379], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:22:42.373811Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=1&id=ZmZlNWNiOTAtMTRiYjcxMzUtNjY5YTYzY2YtMmNmMzg4Yg==, ActorId: [1:7509888452565010984:2377], ActorState: ExecuteState, TraceId: 01jwea6vskcm7zv24dnjy5pbwc, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: assertion failed at ydb/core/testlib/test_pq_client.h:537, TMaybe NKikimr::NPersQueueTests::TFlatMsgBusPQClient::RunYqlDataQueryWithParams(TString, const NYdb::TParams &): (result.IsSuccess())
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 TBackTrace::Capture()+28 (0x13AC482C) NUnitTest::NPrivate::RaiseError(char const*, TBasicString> const&, bool)+137 (0x13C785B9) NKikimr::NPersQueueTests::TFlatMsgBusPQClient::RunYqlDataQueryWithParams(TBasicString>, NYdb::Dev::TParams const&)+932 (0x139A34A4) NKikimr::NPersQueueTests::TFlatMsgBusPQClient::RunYqlDataQuery(TBasicString>)+88 (0x139A2A58) NKikimr::NPersQueueTests::TFlatMsgBusPQClient::InitDCs(THashMap>, NKikimr::NPersQueueTests::TPQTestClusterInfo, THash>>, TEqualTo>>, std::__y1::allocator>>>, TBasicString> const&)+1170 (0x139A1CA2) NKikimr::NPersQueueTests::TFlatMsgBusPQClient::FullInit(THashMap>, NKikimr::NPersQueueTests::TPQTestClusterInfo, THash>>, TEqualTo>>, std::__y1::allocator>>>, TBasicString> const&, TBasicString> const&)+327 (0x1399FC57) NPersQueue::TTestServer::StartServer(bool, TMaybe>, NMaybe::TPolicyUndefinedExcept>)+888 (0x1399E948) NYdb::NTopic::NTests::TTopicSdkTestSetup::TTopicSdkTestSetup(TBasicString> const&, NKikimr::Tests::TServerSettings const&, bool)+582 (0x262E6316) NKikimr::NPQ::NTest::CreateSetup()+178 (0x26290EC2) NKikimr::NTestSuiteTopicAutoscaling::PartitionSplit_oldSDK(NKikimr::NPQ::NTest::SdkVersion)+34 (0x138F6C92) NKikimr::NTestSuiteTopicAutoscaling::TCurrentTest::Execute()::'lambda'()::operator()() const+71 (0x1393DC27) NUnitTest::TTestBase::Run(std::__y1::function, TBasicString> const&, char const*, bool)+126 (0x13C7A46E) NKikimr::NTestSuiteTopicAutoscaling::TCurrentTest::Execute()+419 (0x1393D483) NUnitTest::TTestFactory::Execute()+803 (0x13C7ABE3) NUnitTest::RunMain(int, char**)+3021 (0x13C8C78D) ??+0 (0x7F358B0A6D90) __libc_start_main ... p()+178 (0x26290EC2) NKikimr::NTestSuiteTopicAutoscaling::PartitionSplit_ManySession(NKikimr::NPQ::NTest::SdkVersion)+35 (0x1390B553) NKikimr::NTestSuiteTopicAutoscaling::TCurrentTest::Execute()::'lambda'()::operator()() const+71 (0x1393DC27) NUnitTest::TTestBase::Run(std::__y1::function, TBasicString> const&, char const*, bool)+126 (0x13C7A46E) NKikimr::NTestSuiteTopicAutoscaling::TCurrentTest::Execute()+419 (0x1393D483) NUnitTest::TTestFactory::Execute()+803 (0x13C7ABE3) NUnitTest::RunMain(int, char**)+3021 (0x13C8C78D) ??+0 (0x7F358B0A6D90) __libc_start_main+128 (0x7F358B0A6E40) _start+41 (0x12A4C029) test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/001f86/r3tmp/tmp64dpMM/pdisk_1.dat 2025-05-29T15:22:47.799088Z node 7 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-05-29T15:22:47.801229Z node 7 :PQ_READ_PROXY DEBUG: caching_service.cpp:44: Direct read cache: : Created 2025-05-29T15:22:47.812245Z node 7 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:22:47.820933Z node 7 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [7:7509888477109918241:2079] 1748532167788215 != 1748532167788218 TServer::EnableGrpc on GrpcPort 16916, node 7 2025-05-29T15:22:47.838472Z node 7 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/ciyv/001f86/r3tmp/yandexnPPHBl.tmp 2025-05-29T15:22:47.838494Z node 7 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: /home/runner/.ya/build/build_root/ciyv/001f86/r3tmp/yandexnPPHBl.tmp 2025-05-29T15:22:47.838558Z node 7 :NET_CLASSIFIER WARN: net_classifier.cpp:202: successfully initialized from file: /home/runner/.ya/build/build_root/ciyv/001f86/r3tmp/yandexnPPHBl.tmp 2025-05-29T15:22:47.838585Z node 7 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-05-29T15:22:47.846149Z INFO: TTestServer started on Port 17444 GrpcPort 16916 TClient is connected to server localhost:17444 PQClient connected to localhost:16916 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-29T15:22:47.889592Z node 7 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:22:47.889625Z node 7 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:22:47.890077Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-05-29T15:22:47.890533Z node 7 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Connecting -> Connected waiting... 2025-05-29T15:22:47.892171Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:22:47.901936Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... waiting... waiting... 2025-05-29T15:22:48.256353Z node 7 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7509888481404886334:2336], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:22:48.256387Z node 7 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:22:48.256495Z node 7 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7509888481404886361:2339], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:22:48.257411Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715662:3, at schemeshard: 72057594046644480 2025-05-29T15:22:48.259870Z node 7 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [7:7509888481404886363:2340], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715662 completed, doublechecking } 2025-05-29T15:22:48.266271Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-05-29T15:22:48.276966Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-05-29T15:22:48.312075Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost", true, true, 1000), ("dc2", "dc2.logbroker.yandex.net", false, true, 1000); 2025-05-29T15:22:48.343729Z node 7 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [7:7509888481404886661:2578] txid# 281474976715666, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 9], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-29T15:22:48.357122Z node 7 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [7:7509888481404886659:2374], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:22:48.357909Z node 7 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=7&id=YzdlMWMzNjctYjQwMzg0MjctNzJjMWExODQtMzlmNmUwOTk=, ActorId: [7:7509888481404886656:2372], ActorState: ExecuteState, TraceId: 01jwea71mnfkt5prtyjwgt0vz5, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: 2025-05-29T15:22:48.378286Z node 7 :PERSQUEUE_CLUSTER_TRACKER ERROR: cluster_tracker.cpp:167: failed to list clusters: { Response { QueryIssues { message: "Execution" issue_code: 1060 severity: 2 issues { position { row: 3 column: 120 } message: "Cost Based Optimizer could not be applied to this query: couldn\'t load statistics" end_position { row: 3 column: 120 } issue_code: 8001 severity: 2 } } TxMeta { } YdbResults { columns { name: "C.name" type { optional_type { item { type_id: UTF8 } } } } columns { name: "C.balancer" type { optional_type { item { type_id: UTF8 } } } } columns { name: "C.local" type { optional_type { item { type_id: BOOL } } } } columns { name: "C.enabled" type { optional_type { item { type_id: BOOL } } } } columns { name: "C.weight" type { optional_type { item { type_id: UINT64 } } } } columns { name: "V.version" type { optional_type { item { type_id: INT64 } } } } } QueryDiagnostics: "" } YdbStatus: SUCCESS ConsumedRu: 21 } assertion failed at ydb/core/testlib/test_pq_client.h:537, TMaybe NKikimr::NPersQueueTests::TFlatMsgBusPQClient::RunYqlDataQueryWithParams(TString, const NYdb::TParams &): (result.IsSuccess())
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 TBackTrace::Capture()+28 (0x13AC482C) NUnitTest::NPrivate::RaiseError(char const*, TBasicString> const&, bool)+137 (0x13C785B9) NKikimr::NPersQueueTests::TFlatMsgBusPQClient::RunYqlDataQueryWithParams(TBasicString>, NYdb::Dev::TParams const&)+932 (0x139A34A4) NKikimr::NPersQueueTests::TFlatMsgBusPQClient::RunYqlDataQuery(TBasicString>)+88 (0x139A2A58) NKikimr::NPersQueueTests::TFlatMsgBusPQClient::InitDCs(THashMap>, NKikimr::NPersQueueTests::TPQTestClusterInfo, THash>>, TEqualTo>>, std::__y1::allocator>>>, TBasicString> const&)+1170 (0x139A1CA2) NKikimr::NPersQueueTests::TFlatMsgBusPQClient::FullInit(THashMap>, NKikimr::NPersQueueTests::TPQTestClusterInfo, THash>>, TEqualTo>>, std::__y1::allocator>>>, TBasicString> const&, TBasicString> const&)+327 (0x1399FC57) NPersQueue::TTestServer::StartServer(bool, TMaybe>, NMaybe::TPolicyUndefinedExcept>)+888 (0x1399E948) NYdb::NTopic::NTests::TTopicSdkTestSetup::TTopicSdkTestSetup(TBasicString> const&, NKikimr::Tests::TServerSettings const&, bool)+582 (0x262E6316) NKikimr::NPQ::NTest::CreateSetup()+178 (0x26290EC2) NKikimr::NTestSuiteTopicAutoscaling::TTestCasePartitionSplit_ManySession_existed_AutoscaleAwareSDK::Execute_(NUnitTest::TTestContext&)+32 (0x1390EE50) NKikimr::NTestSuiteTopicAutoscaling::TCurrentTest::Execute()::'lambda'()::operator()() const+71 (0x1393DC27) NUnitTest::TTestBase::Run(std::__y1::function, TBasicString> const&, char const*, bool)+126 (0x13C7A46E) NKikimr::NTestSuiteTopicAutoscaling::TCurrentTest::Execute()+419 (0x1393D483) NUnitTest::TTestFactory::Execute()+803 (0x13C7ABE3) NUnitTest::RunMain(int, char**)+3021 (0x13C8C78D) ??+0 (0x7F358B0A6D90) __libc_start_main+128 (0x7F358B0A6E40) _start+41 (0x12A4C029) |61.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/ext_index/ut/unittest |61.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/sys_view/partition_stats/ut/unittest >> PartitionStats::Collector [GOOD] |61.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/sys_view/partition_stats/ut/unittest |61.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/mind/bscontroller/ut/unittest >> TGroupMapperTest::SanitizeGroupTest3dc [GOOD] >> Secret::SimpleQueryService ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/persqueue/ut/unittest >> TPartitionTests::The_DeletePartition_Message_Arrives_Before_The_ApproveWriteQuota_Message [GOOD] Test command err: 2025-05-29T15:22:15.514199Z node 1 :PERSQUEUE NOTICE: pq_impl.cpp:1099: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-05-29T15:22:15.514227Z node 1 :PERSQUEUE INFO: pq_impl.cpp:800: [PQ: 72057594037927937] doesn't have tx writes info 2025-05-29T15:22:15.518327Z node 1 :PERSQUEUE INFO: partition_init.cpp:880: [PQ: 72057594037927937, Partition: 1, State: StateInit] bootstrapping 1 [1:180:2194] 2025-05-29T15:22:15.518558Z node 1 :PERSQUEUE INFO: partition.cpp:560: [PQ: 72057594037927937, Partition: 1, State: StateInit] init complete for topic 'Root/PQ/rt3.dc1--account--topic' partition 1 generation 0 [1:180:2194] Got cmd write: CmdWrite { Key: "i0000000001" Value: "\010\000\020\000\030\000(\000" StorageChannel: INLINE } CmdWrite { Key: "m0000000001cclient" Value: "\010\000\020\002\030\003\"\nsession-id(\0000\001@\000" StorageChannel: INLINE } CmdWrite { Key: "m0000000001uclient" Value: "\000\000\000\000\000\000\000\000\002\000\000\000\003\000\000\000session-id" StorageChannel: INLINE } Got cmd write: CmdWrite { Key: "i0000000001" Value: "\010\000\020\000\030\000(\000" StorageChannel: INLINE } CmdWrite { Key: "m0000000001cclient" Value: "\010\000\020\002\030\003\"\nsession-id(\0000\001@\000" StorageChannel: INLINE } CmdWrite { Key: "m0000000001uclient" Value: "\000\000\000\000\000\000\000\000\002\000\000\000\003\000\000\000session-id" StorageChannel: INLINE } Got cmd write: CmdWrite { Key: "i0000000001" Value: "\010\000\020\000\030\000(\000" StorageChannel: INLINE } CmdWrite { Key: "m0000000001cclient" Value: "\010\000\020\002\030\003\"\nsession-id(\0000\001@\000" StorageChannel: INLINE } CmdWrite { Key: "m0000000001uclient" Value: "\000\000\000\000\000\000\000\000\002\000\000\000\003\000\000\000session-id" StorageChannel: INLINE } Got cmd write: CmdWrite { Key: "i0000000001" Value: "\010\000\020\000\030\000(\000" StorageChannel: INLINE } CmdWrite { Key: "m0000000001cclient" Value: "\010\000\020\002\030\003\"\nsession-id(\0000\001@\000" StorageChannel: INLINE } CmdWrite { Key: "m0000000001uclient" Value: "\000\000\000\000\000\000\000\000\002\000\000\000\003\000\000\000session-id" StorageChannel: INLINE } Got cmd write: CmdWrite { Key: "i0000000001" Value: "\010\000\020\000\030\000(\000" StorageChannel: INLINE } CmdWrite { Key: "m0000000001cclient" Value: "\010\000\020\002\030\003\"\nsession-id(\0000\001@\000" StorageChannel: INLINE } CmdWrite { Key: "m0000000001uclient" Value: "\000\000\000\000\000\000\000\000\002\000\000\000\003\000\000\000session-id" StorageChannel: INLINE } Got cmd write: CmdWrite { Key: "i0000000001" Value: "\010\000\020\000\030\000(\000" StorageChannel: INLINE } CmdWrite { Key: "m0000000001cclient" Value: "\010\000\020\002\030\003\"\nsession-id(\0000\001@\000" StorageChannel: INLINE } CmdWrite { Key: "m0000000001uclient" Value: "\000\000\000\000\000\000\000\000\002\000\000\000\003\000\000\000session-id" StorageChannel: INLINE } Got cmd write: CmdWrite { Key: "i0000000001" Value: "\010\000\020\000\030\000(\000" StorageChannel: INLINE } CmdWrite { Key: "m0000000001cclient" Value: "\010\000\020\002\030\003\"\nsession-id(\0000\001@\000" StorageChannel: INLINE } CmdWrite { Key: "m0000000001uclient" Value: "\000\000\000\000\000\000\000\000\002\000\000\000\003\000\000\000session-id" StorageChannel: INLINE } Got cmd write: CmdWrite { Key: "i0000000001" Value: "\010\000\020\000\030\000(\000" StorageChannel: INLINE } CmdWrite { Key: "m0000000001cclient" Value: "\010\000\020\002\030\003\"\nsession-id(\0000\001@\000" StorageChannel: INLINE } CmdWrite { Key: "m0000000001uclient" Value: "\000\000\000\000\000\000\000\000\002\000\000\000\003\000\000\000session-id" StorageChannel: INLINE } Got cmd write: CmdWrite { Key: "i0000000001" Value: "\010\000\020\000\030\000(\000" StorageChannel: INLINE } CmdWrite { Key: "m0000000001cclient" Value: "\010\000\020\002\030\003\"\nsession-id(\0000\001@\000" StorageChannel: INLINE } CmdWrite { Key: "m0000000001uclient" Value: "\000\000\000\000\000\000\000\000\002\000\000\000\003\000\000\000session-id" StorageChannel: INLINE } Got cmd write: CmdWrite { Key: "i0000000001" Value: "\010\000\020\000\030\000(\000" StorageChannel: INLINE } CmdWrite { Key: "m0000000001cclient" Value: "\010\000\020\002\030\003\"\nsession-id(\0000\001@\000" StorageChannel: INLINE } CmdWrite { Key: "m0000000001uclient" Value: "\000\000\000\000\000\000\000\000\002\000\000\000\003\000\000\000session-id" StorageChannel: INLINE } Got cmd write: CmdWrite { Key: "i0000000001" Value: "\010\000\020\000\030\000(\000" StorageChannel: INLINE } CmdWrite { Key: "m0000000001cclient" Value: "\010\000\020\002\030\003\"\nsession-id(\0000\001@\000" StorageChannel: INLINE } CmdWrite { Key: "m0000000001uclient" Value: "\000\000\000\000\000\000\000\000\002\000\000\000\003\000\000\000session-id" StorageChannel: INLINE } Got cmd write: CmdWrite { Key: "i0000000001" Value: "\010\000\020\000\030\000(\000" StorageChannel: INLINE } CmdWrite { Key: "m0000000001cclient" Value: "\010\000\020\002\030\003\"\nsession-id(\0000\001@\000" StorageChannel: INLINE } CmdWrite { Key: "m0000000001uclient" Value: "\000\000\000\000\000\000\000\000\002\000\000\000\003\000\000\000session-id" StorageChannel: INLINE } Got cmd write: CmdWrite { Key: "i0000000001" Value: "\010\000\020\000\030\000(\000" StorageChannel: INLINE } CmdWrite { Key: "m0000000001cclient" Value: "\010\000\020\002\030\003\"\nsession-id(\0000\001@\000" StorageChannel: INLINE } CmdWrite { Key: "m0000000001uclient" Value: "\000\000\000\000\000\000\000\000\002\000\000\000\003\000\000\000session-id" StorageChannel: INLINE } Got cmd write: CmdWrite { Key: "i0000000001" Value: "\010\000\020\000\030\000(\000" StorageChannel: INLINE } CmdWrite { Key: "m0000000001cclient" Value: "\010\000\020\002\030\003\"\nsession-id(\0000\001@\000" StorageChannel: INLINE } CmdWrite { Key: "m0000000001uclient" Value: "\000\000\000\000\000\000\000\000\002\000\000\000\003\000\000\000session-id" StorageChannel: INLINE } Got cmd write: CmdWrite { Key: "i0000000001" Value: "\010\000\020\000\030\000(\000" StorageChannel: INLINE } CmdWrite { Key: "m0000000001cclient" Value: "\010\000\020\002\030\003\"\nsession-id(\0000\001@\000" StorageChannel: INLINE } CmdWrite { Key: "m0000000001uclient" Value: "\000\000\000\000\000\000\000\000\002\000\000\000\003\000\000\000session-id" StorageChannel: INLINE } Got cmd write: CmdWrite { Key: "i0000000001" Value: "\010\000\020\000\030\000(\000" StorageChannel: INLINE } CmdWrite { Key: "m0000000001cclient" Value: "\010\000\020\002\030\003\"\nsession-id(\0000\001@\000" StorageChannel: INLINE } CmdWrite { Key: "m0000000001uclient" Value: "\000\000\000\000\000\000\000\000\002\000\000\000\003\000\000\000session-id" StorageChannel: INLINE } Got cmd write: CmdWrite { Key: "i0000000001" Value: "\010\000\020\000\030\000(\000" StorageChannel: INLINE } CmdWrite { Key: "m0000000001cclient" Value: "\010\000\020\002\030\003\"\nsession-id(\0000\001@\000" StorageChannel: INLINE } CmdWrite { Key: "m0000000001uclient" Value: "\000\000\000\000\000\000\000\000\002\000\000\000\003\000\000\000session-id" StorageChannel: INLINE } Got cmd write: CmdWrite { Key: "i0000000001" Value: "\010\000\020\000\030\000(\000" StorageChannel: INLINE } CmdWrite { Key: "m0000000001cclient" Value: "\010\000\020\002\030\003\"\nsession-id(\0000\001@\000" StorageChannel: INLINE } CmdWrite { Key: "m0000000001uclient" Value: "\000\000\000\000\000\000\000\000\002\000\000\000\003\000\000\000session-id" StorageChannel: INLINE } Got cmd write: CmdWrite { Key: "i0000000001" Value: "\010\000\020\000\030\000(\000" StorageChannel: INLINE } CmdWrite { Key: "m0000000001cclient" Value: "\010\000\020\002\030\003\"\nsession-id(\0000\001@\000" StorageChannel: INLINE } CmdWrite { Key: "m0000000001uclient" Value: "\000\000\000\000\000\000\000\000\002\000\000\000\003\000\000\000session-id" StorageChannel: INLINE } Got cmd write: CmdWrite { Key: "i0000000001" Value: "\010\000\020\000\030\000(\000" StorageChannel: INLINE } CmdWrite { Key: "m0000000001cclient" Value: "\010\000\020\002\030\003\"\nsession-id(\0000\001@\000" StorageChannel: INLINE } CmdWrite { Key: "m0000000001uclient" Value: "\000\000\000\000\000\000\000\000\002\000\000\000\003\000\000\000session-id" StorageChannel: INLINE } Got cmd write: CmdWrite { Key: "i0000000001" Value: "\010\000\020\000\030\000(\000" StorageChannel: INLINE } CmdWrite { Key: "m0000000001cclient" Value: "\010\000\020\002\030\003\"\nsession-id(\0000\001@\000" StorageChannel: INLINE } CmdWrite { Key: "m0000000001uclient" Value: "\000\000\000\000\000\000\000\000\002\000\000\000\003\000\000\000session-id" StorageChannel: INLINE } Got cmd write: CmdWrite { Key: "i0000000001" Value: "\010\000\020\000\030\000(\000" StorageChannel: INLINE } CmdWrite { Key: "m0000000001cclient" Value: "\010\000\020\002\030\003\"\nsession-id(\0000\001@\000" StorageChannel: INLINE } CmdWrite { Key: "m0000000001uclient" Value: "\000\000\000\000\000\000\000\000\002\000\000\000\003\000\000\000session-id" StorageChannel: INLINE } Got cmd write: CmdWrite { Key: "i0000000001" Value: "\010\000\020\000\030\000(\000" StorageChannel: INLINE } CmdWrite { Key: "m0000000001cclient" Value: "\010\000\020\002\030\003\"\nsession-id(\0000\001@\000" StorageChannel: INLINE } CmdWrite { Key: "m0000000001uclient" Value: "\000\000\000\000\000\000\000\000\002\000\000\000\003\000\000\000session-id" StorageChannel: INLINE } Got cmd write: CmdWrite { Key: "i0000000001" Value: "\010\000\020\000\030\000(\000" StorageChannel: INLINE } CmdWrite { Key: "m0000000001cclient" Value: "\010\000\020\002\030\003\"\nsession-id(\0000\001@\000" StorageChannel: INLINE } CmdWrite { Key: "m0000000001uclient" Value: "\000\000\000\000\000\000\000\000\002\000\000\000\003\000\000\000session-id" StorageChannel: INLINE } Got cmd write: CmdWrite { Key: "i0000000001" Value: "\010\000\020\000\030\000(\000" StorageChannel: INLINE } CmdWrite { Key: "m0000000001cclient" Value: "\010\000\020\002\030\003\"\nsession-id(\0000\001@\000" StorageChannel: INLINE } CmdWrite { Key: "m0000000001uclient" Value: "\000\000\000\000\000\000\000\000\002\000\000\000\003\000\000\000session-id" StorageChannel: INLINE } Got cmd write: CmdWrite { Key: "i0000000001" Value: "\010\000\020\000\030\000(\000" StorageChannel: INLINE } CmdWrite { Key: "m0000000001cclient" Value: "\010\000\020\002\030\003\"\nsession-id(\0000\001@\000" StorageChannel: INLINE } CmdWrite { Key: "m0000000001uclient" Value: "\000\000\000\000\000\000\000\000\002\000\000\000\003\000\000\000session-id" StorageChannel: INLINE } Got cmd write: CmdWrite { Key: "i0000000001" Value: "\010\000\020\000\030\000(\000" StorageChannel: INLINE } CmdWrite { Key: "m0000000001cclient" Value: "\010\000\020\002\030\003\"\nsession-id(\0000\001@\000" StorageChannel: INLINE } CmdWrite { Key: "m0000000001uclient" Value: "\000\000\000\000\000\000\000\000\002\000\000\000\003\000\000\000session-id" StorageChannel: INLINE } Got cmd write: CmdWrite { Key: "i0000000001" Value: "\010\000\020\000\030\000(\000" StorageChannel: INLINE } CmdWrite { Key: "m0000000001cclient" Value: "\010\000\020\002\030\003\"\nsession-id(\0000\001@\000" StorageChannel: INLINE } CmdWrite { Key: "m0000000001uclient" Value: "\000\000\000\000\000\000\000\000\002\000\000\000\003\000\000\000session-id" StorageChannel: INLINE } Got cmd write: CmdWrite { Key: "i0000000001" Value: "\010\000\020\000\030\000(\000" StorageChannel: INLINE } CmdWrite { Key: "m0000000001cclient ... :22:51.447680Z node 4 :PERSQUEUE DEBUG: partition.cpp:2452: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Head=Offset 50 PartNo 0 PackedSize 0 count 0 nextOffset 50 batches 0, NewHead=Offset 50 PartNo 0 PackedSize 0 count 0 nextOffset 50 batches 0 2025-05-29T15:22:51.447685Z node 4 :PERSQUEUE DEBUG: partition.cpp:3322: [PQ: 72057594037927937, Partition: 0, State: StateIdle] schedule TEvPersQueue::TEvProposeTransactionResult(COMPLETE), reason= 2025-05-29T15:22:51.447701Z node 4 :PERSQUEUE DEBUG: partition_write.cpp:1126: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Already written message. Topic: 'rt3.dc1--account--topic' Partition: 0 SourceId: 'src4'. Message seqNo: 7. Committed seqNo: (NULL). Writing seqNo: 7. EndOffset: 50. CurOffset: 50. Offset: 50 2025-05-29T15:22:51.447720Z node 4 :PERSQUEUE DEBUG: partition_write.cpp:1233: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'rt3.dc1--account--topic' partition 0 part blob processing sourceId 'src4' seqNo 8 partNo 0 2025-05-29T15:22:51.447756Z node 4 :PERSQUEUE DEBUG: partition_write.cpp:1333: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'rt3.dc1--account--topic' partition 0 part blob complete sourceId 'src4' seqNo 8 partNo 0 FormedBlobsCount 0 NewHead: Offset 51 PartNo 0 PackedSize 84 count 1 nextOffset 52 batches 1 2025-05-29T15:22:51.447761Z node 4 :PERSQUEUE DEBUG: partition_write.cpp:1233: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'rt3.dc1--account--topic' partition 0 part blob processing sourceId 'src4' seqNo 9 partNo 0 2025-05-29T15:22:51.447765Z node 4 :PERSQUEUE DEBUG: partition_write.cpp:1333: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'rt3.dc1--account--topic' partition 0 part blob complete sourceId 'src4' seqNo 9 partNo 0 FormedBlobsCount 0 NewHead: Offset 51 PartNo 0 PackedSize 136 count 2 nextOffset 53 batches 1 2025-05-29T15:22:51.447769Z node 4 :PERSQUEUE DEBUG: partition_write.cpp:1233: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'rt3.dc1--account--topic' partition 0 part blob processing sourceId 'src4' seqNo 10 partNo 0 2025-05-29T15:22:51.447772Z node 4 :PERSQUEUE DEBUG: partition_write.cpp:1333: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'rt3.dc1--account--topic' partition 0 part blob complete sourceId 'src4' seqNo 10 partNo 0 FormedBlobsCount 0 NewHead: Offset 51 PartNo 0 PackedSize 188 count 3 nextOffset 54 batches 1 2025-05-29T15:22:51.447778Z node 4 :PERSQUEUE DEBUG: partition_write.cpp:1233: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'rt3.dc1--account--topic' partition 0 part blob processing sourceId 'src4' seqNo 11 partNo 0 2025-05-29T15:22:51.447785Z node 4 :PERSQUEUE DEBUG: partition_write.cpp:1333: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'rt3.dc1--account--topic' partition 0 part blob complete sourceId 'src4' seqNo 11 partNo 0 FormedBlobsCount 0 NewHead: Offset 51 PartNo 0 PackedSize 240 count 4 nextOffset 55 batches 1 2025-05-29T15:22:51.447788Z node 4 :PERSQUEUE DEBUG: partition_write.cpp:1233: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'rt3.dc1--account--topic' partition 0 part blob processing sourceId 'src4' seqNo 12 partNo 0 2025-05-29T15:22:51.447792Z node 4 :PERSQUEUE DEBUG: partition_write.cpp:1333: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'rt3.dc1--account--topic' partition 0 part blob complete sourceId 'src4' seqNo 12 partNo 0 FormedBlobsCount 0 NewHead: Offset 51 PartNo 0 PackedSize 292 count 5 nextOffset 56 batches 1 2025-05-29T15:22:51.447797Z node 4 :PERSQUEUE DEBUG: partition.cpp:2424: [PQ: 72057594037927937, Partition: 0, State: StateIdle] TPartition::CommitWriteOperations TxId: (empty maybe) 2025-05-29T15:22:51.447800Z node 4 :PERSQUEUE DEBUG: partition.cpp:2451: [PQ: 72057594037927937, Partition: 0, State: StateIdle] t.WriteInfo->BodyKeys.size=0, t.WriteInfo->BlobsFromHead.size=0 2025-05-29T15:22:51.447803Z node 4 :PERSQUEUE DEBUG: partition.cpp:2452: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Head=Offset 50 PartNo 0 PackedSize 0 count 0 nextOffset 50 batches 0, NewHead=Offset 51 PartNo 0 PackedSize 292 count 5 nextOffset 56 batches 1 2025-05-29T15:22:51.447806Z node 4 :PERSQUEUE DEBUG: partition.cpp:3322: [PQ: 72057594037927937, Partition: 0, State: StateIdle] schedule TEvPersQueue::TEvProposeTransactionResult(COMPLETE), reason= 2025-05-29T15:22:51.447810Z node 4 :PERSQUEUE DEBUG: partition.cpp:2424: [PQ: 72057594037927937, Partition: 0, State: StateIdle] TPartition::CommitWriteOperations TxId: 10 2025-05-29T15:22:51.447813Z node 4 :PERSQUEUE DEBUG: partition.cpp:2451: [PQ: 72057594037927937, Partition: 0, State: StateIdle] t.WriteInfo->BodyKeys.size=0, t.WriteInfo->BlobsFromHead.size=0 2025-05-29T15:22:51.447816Z node 4 :PERSQUEUE DEBUG: partition.cpp:2452: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Head=Offset 50 PartNo 0 PackedSize 0 count 0 nextOffset 50 batches 0, NewHead=Offset 51 PartNo 0 PackedSize 292 count 5 nextOffset 56 batches 1 2025-05-29T15:22:51.447858Z node 4 :PERSQUEUE DEBUG: partition_write.cpp:1623: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Add new write blob: topic 'rt3.dc1--account--topic' partition 0 compactOffset 51,5 HeadOffset 50 endOffset 50 curOffset 56 d0000000000_00000000000000000051_00000_0000000005_00000| size 189 WTime 21151 2025-05-29T15:22:51.447882Z node 4 :PERSQUEUE DEBUG: partition.cpp:2185: [PQ: 72057594037927937, Partition: 0, State: StateIdle] === DumpKeyValueRequest === 2025-05-29T15:22:51.447885Z node 4 :PERSQUEUE DEBUG: partition.cpp:2186: [PQ: 72057594037927937, Partition: 0, State: StateIdle] --- delete ---------------- 2025-05-29T15:22:51.447889Z node 4 :PERSQUEUE DEBUG: partition.cpp:2192: [PQ: 72057594037927937, Partition: 0, State: StateIdle] [x0000000000, x0000000001) 2025-05-29T15:22:51.447892Z node 4 :PERSQUEUE DEBUG: partition.cpp:2194: [PQ: 72057594037927937, Partition: 0, State: StateIdle] --- write ----------------- 2025-05-29T15:22:51.447895Z node 4 :PERSQUEUE DEBUG: partition.cpp:2197: [PQ: 72057594037927937, Partition: 0, State: StateIdle] m0000000000psrc2 2025-05-29T15:22:51.447897Z node 4 :PERSQUEUE DEBUG: partition.cpp:2197: [PQ: 72057594037927937, Partition: 0, State: StateIdle] m0000000000psrc4 2025-05-29T15:22:51.447899Z node 4 :PERSQUEUE DEBUG: partition.cpp:2197: [PQ: 72057594037927937, Partition: 0, State: StateIdle] m0000000000psrc1 2025-05-29T15:22:51.447901Z node 4 :PERSQUEUE DEBUG: partition.cpp:2197: [PQ: 72057594037927937, Partition: 0, State: StateIdle] d0000000000_00000000000000000051_00000_0000000005_00000| 2025-05-29T15:22:51.447904Z node 4 :PERSQUEUE DEBUG: partition.cpp:2197: [PQ: 72057594037927937, Partition: 0, State: StateIdle] i0000000000 2025-05-29T15:22:51.447907Z node 4 :PERSQUEUE DEBUG: partition.cpp:2197: [PQ: 72057594037927937, Partition: 0, State: StateIdle] i0000000000 2025-05-29T15:22:51.447909Z node 4 :PERSQUEUE DEBUG: partition.cpp:2197: [PQ: 72057594037927937, Partition: 0, State: StateIdle] I0000000000 2025-05-29T15:22:51.447911Z node 4 :PERSQUEUE DEBUG: partition.cpp:2199: [PQ: 72057594037927937, Partition: 0, State: StateIdle] --- rename ---------------- 2025-05-29T15:22:51.447914Z node 4 :PERSQUEUE DEBUG: partition.cpp:2204: [PQ: 72057594037927937, Partition: 0, State: StateIdle] =========================== Got KV request Got KV request Got KV request Got KV request Got KV request Got KV request Got KV request Got KV request Wait tx committed for tx 0 2025-05-29T15:22:51.478551Z node 4 :PERSQUEUE DEBUG: partition_write.cpp:524: [PQ: 72057594037927937, Partition: 0, State: StateIdle] TPartition::HandleWriteResponse writeNewSize# 85 WriteNewSizeFromSupportivePartitions# 4 2025-05-29T15:22:51.478582Z node 4 :PERSQUEUE DEBUG: partition_write.cpp:58: [PQ: 72057594037927937, Partition: 0, State: StateIdle] TPartition::ReplyWrite. Partition: 0 2025-05-29T15:22:51.478599Z node 4 :PERSQUEUE DEBUG: partition_write.cpp:324: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Answering for message sourceid: 'src4', Topic: 'rt3.dc1--account--topic', Partition: 0, SeqNo: 7, partNo: 0, Offset: 50 is already written 2025-05-29T15:22:51.478608Z node 4 :PERSQUEUE DEBUG: partition_write.cpp:58: [PQ: 72057594037927937, Partition: 0, State: StateIdle] TPartition::ReplyWrite. Partition: 0 2025-05-29T15:22:51.478614Z node 4 :PERSQUEUE DEBUG: partition_write.cpp:324: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Answering for message sourceid: 'src4', Topic: 'rt3.dc1--account--topic', Partition: 0, SeqNo: 8, partNo: 0, Offset: 50 is already written 2025-05-29T15:22:51.478619Z node 4 :PERSQUEUE DEBUG: partition_write.cpp:58: [PQ: 72057594037927937, Partition: 0, State: StateIdle] TPartition::ReplyWrite. Partition: 0 2025-05-29T15:22:51.478624Z node 4 :PERSQUEUE DEBUG: partition_write.cpp:324: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Answering for message sourceid: 'src4', Topic: 'rt3.dc1--account--topic', Partition: 0, SeqNo: 9, partNo: 0, Offset: 50 is already written 2025-05-29T15:22:51.478628Z node 4 :PERSQUEUE DEBUG: partition_write.cpp:58: [PQ: 72057594037927937, Partition: 0, State: StateIdle] TPartition::ReplyWrite. Partition: 0 2025-05-29T15:22:51.478633Z node 4 :PERSQUEUE DEBUG: partition_write.cpp:324: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Answering for message sourceid: 'src4', Topic: 'rt3.dc1--account--topic', Partition: 0, SeqNo: 10, partNo: 0, Offset: 50 is already written 2025-05-29T15:22:51.478637Z node 4 :PERSQUEUE DEBUG: partition_write.cpp:58: [PQ: 72057594037927937, Partition: 0, State: StateIdle] TPartition::ReplyWrite. Partition: 0 2025-05-29T15:22:51.478642Z node 4 :PERSQUEUE DEBUG: partition_write.cpp:324: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Answering for message sourceid: 'src4', Topic: 'rt3.dc1--account--topic', Partition: 0, SeqNo: 11, partNo: 0, Offset: 50 is already written 2025-05-29T15:22:51.478647Z node 4 :PERSQUEUE DEBUG: partition_write.cpp:58: [PQ: 72057594037927937, Partition: 0, State: StateIdle] TPartition::ReplyWrite. Partition: 0 2025-05-29T15:22:51.478652Z node 4 :PERSQUEUE DEBUG: partition_write.cpp:324: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Answering for message sourceid: 'src4', Topic: 'rt3.dc1--account--topic', Partition: 0, SeqNo: 12, partNo: 0, Offset: 50 is already written Wait immediate tx complete 3 Got propose resutl: Origin: 72057594037927937 Status: COMPLETE TxId: 3 Wait immediate tx complete 6 Got propose resutl: Origin: 72057594037927937 Status: COMPLETE TxId: 6 Wait tx committed for tx 10 2025-05-29T15:22:51.617745Z node 5 :PERSQUEUE NOTICE: pq_impl.cpp:1099: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-05-29T15:22:51.617770Z node 5 :PERSQUEUE INFO: pq_impl.cpp:800: [PQ: 72057594037927937] doesn't have tx writes info 2025-05-29T15:22:51.621047Z node 5 :PERSQUEUE INFO: partition_init.cpp:880: [PQ: 72057594037927937, Partition: {1, {2, 3}, 4}, State: StateInit] bootstrapping {1, {2, 3}, 4} [5:178:2192] 2025-05-29T15:22:51.621226Z node 5 :PERSQUEUE INFO: partition.cpp:560: [PQ: 72057594037927937, Partition: {1, {2, 3}, 4}, State: StateInit] init complete for topic 'Root/PQ/rt3.dc1--account--topic' partition {1, {2, 3}, 4} generation 0 [5:178:2192] Got cmd write: CmdDeleteRange { Range { From: "M0000000004" IncludeFrom: true To: "M0000000005" IncludeTo: false } } CmdDeleteRange { Range { From: "D0000000004" IncludeFrom: true To: "D0000000005" IncludeTo: false } } CmdDeleteRange { Range { From: "X0000000004" IncludeFrom: true To: "X0000000005" IncludeTo: false } } CmdDeleteRange { Range { From: "J0000000004" IncludeFrom: true To: "J0000000005" IncludeTo: false } } CmdDeleteRange { Range { From: "K0000000004" IncludeFrom: true To: "K0000000005" IncludeTo: false } } |61.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/ext_index/ut/unittest ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/persqueue/ut/ut_with_sdk/unittest >> TopicAutoscaling::ReadFromTimestamp_PQv1 [FAIL] Test command err: 2025-05-29T15:22:42.248209Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509888455056099712:2220];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/001f4e/r3tmp/tmpyr8LXm/pdisk_1.dat 2025-05-29T15:22:42.281734Z node 1 :PQ_READ_PROXY DEBUG: caching_service.cpp:44: Direct read cache: : Created 2025-05-29T15:22:42.283296Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-05-29T15:22:42.306336Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:22:42.306429Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [1:7509888455056099503:2079] 1748532162237609 != 1748532162237612 TServer::EnableGrpc on GrpcPort 21722, node 1 2025-05-29T15:22:42.320783Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/ciyv/001f4e/r3tmp/yandexjdRyzE.tmp 2025-05-29T15:22:42.320794Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: /home/runner/.ya/build/build_root/ciyv/001f4e/r3tmp/yandexjdRyzE.tmp 2025-05-29T15:22:42.320851Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:202: successfully initialized from file: /home/runner/.ya/build/build_root/ciyv/001f4e/r3tmp/yandexjdRyzE.tmp 2025-05-29T15:22:42.320887Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-05-29T15:22:42.325123Z INFO: TTestServer started on Port 15094 GrpcPort 21722 TClient is connected to server localhost:15094 PQClient connected to localhost:21722 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-29T15:22:42.388323Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:22:42.388349Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:22:42.388895Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:22:42.391017Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:22:42.394936Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-29T15:22:42.400385Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... waiting... waiting... 2025-05-29T15:22:42.646173Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509888455056100325:2339], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:22:42.646193Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509888455056100291:2336], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:22:42.646220Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:22:42.646995Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715662:3, at schemeshard: 72057594046644480 2025-05-29T15:22:42.649218Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7509888455056100328:2340], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715662 completed, doublechecking } 2025-05-29T15:22:42.686895Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-05-29T15:22:42.698888Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-05-29T15:22:42.714801Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost", true, true, 1000), ("dc2", "dc2.logbroker.yandex.net", false, true, 1000); 2025-05-29T15:22:42.741698Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [1:7509888455056100633:2375], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:22:42.742287Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=1&id=YjU1MTUxYTYtMzlhODU3ZDYtYTcyYzAzYzQtOTEyM2M2NTQ=, ActorId: [1:7509888455056100630:2373], ActorState: ExecuteState, TraceId: 01jwea6w532yxfe9tvfn6pvn8m, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: 2025-05-29T15:22:42.743108Z node 1 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [1:7509888455056100645:2582] txid# 281474976715666, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 9], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-29T15:22:42.791612Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: cluster_tracker.cpp:167: failed to list clusters: { Response { QueryIssues { message: "Execution" issue_code: 1060 severity: 2 issues { position { row: 3 column: 120 } message: "Cost Based Optimizer could not be applied to this query: couldn\'t load statistics" end_position { row: 3 column: 120 } issue_code: 8001 severity: 2 } } TxMeta { } YdbResults { columns { name: "C.name" type { optional_type { item { type_id: UTF8 } } } } columns { name: "C.balancer" type { optional_type { item { type_id: UTF8 } } } } columns { name: "C.local" type { optional_type { item { type_id: BOOL } } } } columns { name: "C.enabled" type { optional_type { item { type_id: BOOL } } } } columns { name: "C.weight" type { optional_type { item { type_id: UINT64 } } } } columns { name: "V.version" type { optional_type { item { type_id: INT64 } } } } } QueryDiagnostics: "" } YdbStatus: SUCCESS ConsumedRu: 30 } assertion failed at ydb/core/testlib/test_pq_client.h:537, TMaybe NKikimr::NPersQueueTests::TFlatMsgBusPQClient::RunYqlDataQueryWithParams(TString, const NYdb::TParams &): (result.IsSuccess())
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 TBackTrace::Capture()+28 (0x13AC482C) NUnitTest::NPrivate::RaiseError(char const*, TBasicString> const&, bool)+137 (0x13C785B9) NKikimr::NPersQueueTests::TFlatMsgBusPQClient::RunYqlDataQueryWithParams(TBasicString>, NYdb::Dev::TParams const&)+932 (0x139A34A4) NKikimr::NPersQueueTests::TFlatMsgBusPQClient::RunYqlDataQuery(TBasicString>)+88 (0x139A2A58) NKikimr::NPersQueueTests::TFlatMsgBusPQClient::InitDCs(THashMap>, NKikimr::NPersQueueTests::TPQTestClusterInfo, THash>>, TEqualTo>>, std::__y1::allocator>>>, TBasicString> const&)+1170 (0x139A1CA2) NKikimr::NPersQueueTests::TFlatMsgBusPQClient::FullInit(THashMap>, NKikimr::NPersQueueTests::TPQTestClusterInfo, THash>>, TEqualTo>>, std::__y1::allocator>>>, TBasicString> const&, TBasicString> const&)+327 (0x1399FC57) NPersQueue::TTestServer::StartServer(bool, TMaybe>, NMaybe::TPolicyUndefinedExcept>)+888 (0x1399E948) NYdb::NTopic::NTests::TTopicSdkTestSetup::TTopicSdkTestSetup(TBasicString> const&, NKikimr::Tests::TServerSettings const&, bool)+582 (0x262E6316) NKikimr::NPQ::NTest::CreateSetup()+178 (0x26290EC2) NKikimr::NTestSuiteTopicAutoscaling::ReadingAfterSplitTest(NKikimr::NPQ::NTest::SdkVersion, bool, bool)+40 (0x138F1658) NKikimr::NTestSuiteTopicAutoscaling::TCurrentTest::Execute()::'lambda'()::operator()() const+71 (0x1393DC27) NUnitTest::TTestBase::Run(std::__y1::function, TBasicString> const&, char const*, bool)+126 (0x13C7A46E) NKikimr::NTestSuiteTopicAutoscaling::TCurrentTest::Execute()+419 (0x1393D483) NUnitTest::TTestFactory::Execute()+803 (0x13C7ABE3) NUnitTest::RunMain(int, char**)+3021 (0x13C8C78D) ??+0 (0x7F988F916D90) __libc_start_main+128 (0x7F988F916E40) _start+41 (0x12A4C029) test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/001f4e/r3tmp/tmpzncmp8/pdisk_1.dat 2025-05-29T15:22:43.143629Z node 2 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7509888456812387058:2154];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:22:43.146395Z node 2 :PQ_READ_PROXY DEBUG: caching_service.cpp:44: Direct read cache: : Created 2025-05-29T15:22:43.148374Z node 2 :METADATA_PRO ... t=undelivered;self_id=[7:7509888477626406849:2205];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/001f4e/r3tmp/tmpa8uEvV/pdisk_1.dat 2025-05-29T15:22:48.159184Z node 7 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-05-29T15:22:48.159223Z node 7 :PQ_READ_PROXY DEBUG: caching_service.cpp:44: Direct read cache: : Created 2025-05-29T15:22:48.175531Z node 7 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:22:48.176860Z node 7 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [7:7509888477626406681:2079] 1748532168155033 != 1748532168155036 TServer::EnableGrpc on GrpcPort 2179, node 7 2025-05-29T15:22:48.186991Z node 7 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/ciyv/001f4e/r3tmp/yandexkBZq9o.tmp 2025-05-29T15:22:48.187004Z node 7 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: /home/runner/.ya/build/build_root/ciyv/001f4e/r3tmp/yandexkBZq9o.tmp 2025-05-29T15:22:48.187058Z node 7 :NET_CLASSIFIER WARN: net_classifier.cpp:202: successfully initialized from file: /home/runner/.ya/build/build_root/ciyv/001f4e/r3tmp/yandexkBZq9o.tmp 2025-05-29T15:22:48.187098Z node 7 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-05-29T15:22:48.195435Z INFO: TTestServer started on Port 18347 GrpcPort 2179 TClient is connected to server localhost:18347 PQClient connected to localhost:2179 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-29T15:22:48.262564Z node 7 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:22:48.262598Z node 7 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:22:48.262958Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-05-29T15:22:48.264011Z node 7 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Connecting -> Connected waiting... 2025-05-29T15:22:48.264717Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:22:48.272680Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... waiting... waiting... 2025-05-29T15:22:48.523936Z node 7 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7509888477626407463:2335], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:22:48.523954Z node 7 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7509888477626407484:2339], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:22:48.523960Z node 7 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:22:48.524411Z node 7 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7509888477626407528:2342], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:22:48.524427Z node 7 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:22:48.524669Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715662:3, at schemeshard: 72057594046644480 2025-05-29T15:22:48.526698Z node 7 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [7:7509888477626407501:2340], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715662 completed, doublechecking } 2025-05-29T15:22:48.528833Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-05-29T15:22:48.538508Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-05-29T15:22:48.558122Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost", true, true, 1000), ("dc2", "dc2.logbroker.yandex.net", false, true, 1000); 2025-05-29T15:22:48.580639Z node 7 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [7:7509888477626407800:2375], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:22:48.580743Z node 7 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=7&id=NzE1YzM2ZDItYzQ2YjY4MjEtNzYyMDRhYjktOGNkZWY2NjU=, ActorId: [7:7509888477626407797:2373], ActorState: ExecuteState, TraceId: 01jwea71vrdb7e25y89s78gvq5, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: 2025-05-29T15:22:48.599499Z node 7 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [7:7509888477626407812:2580] txid# 281474976715666, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 9], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-29T15:22:48.648709Z node 7 :PERSQUEUE_CLUSTER_TRACKER ERROR: cluster_tracker.cpp:167: failed to list clusters: { Response { QueryIssues { message: "Execution" issue_code: 1060 severity: 2 issues { position { row: 3 column: 120 } message: "Cost Based Optimizer could not be applied to this query: couldn\'t load statistics" end_position { row: 3 column: 120 } issue_code: 8001 severity: 2 } } TxMeta { } YdbResults { columns { name: "C.name" type { optional_type { item { type_id: UTF8 } } } } columns { name: "C.balancer" type { optional_type { item { type_id: UTF8 } } } } columns { name: "C.local" type { optional_type { item { type_id: BOOL } } } } columns { name: "C.enabled" type { optional_type { item { type_id: BOOL } } } } columns { name: "C.weight" type { optional_type { item { type_id: UINT64 } } } } columns { name: "V.version" type { optional_type { item { type_id: INT64 } } } } } QueryDiagnostics: "" } YdbStatus: SUCCESS ConsumedRu: 25 } assertion failed at ydb/core/testlib/test_pq_client.h:537, TMaybe NKikimr::NPersQueueTests::TFlatMsgBusPQClient::RunYqlDataQueryWithParams(TString, const NYdb::TParams &): (result.IsSuccess())
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 TBackTrace::Capture()+28 (0x13AC482C) NUnitTest::NPrivate::RaiseError(char const*, TBasicString> const&, bool)+137 (0x13C785B9) NKikimr::NPersQueueTests::TFlatMsgBusPQClient::RunYqlDataQueryWithParams(TBasicString>, NYdb::Dev::TParams const&)+932 (0x139A34A4) NKikimr::NPersQueueTests::TFlatMsgBusPQClient::RunYqlDataQuery(TBasicString>)+88 (0x139A2A58) NKikimr::NPersQueueTests::TFlatMsgBusPQClient::InitDCs(THashMap>, NKikimr::NPersQueueTests::TPQTestClusterInfo, THash>>, TEqualTo>>, std::__y1::allocator>>>, TBasicString> const&)+1170 (0x139A1CA2) NKikimr::NPersQueueTests::TFlatMsgBusPQClient::FullInit(THashMap>, NKikimr::NPersQueueTests::TPQTestClusterInfo, THash>>, TEqualTo>>, std::__y1::allocator>>>, TBasicString> const&, TBasicString> const&)+327 (0x1399FC57) NPersQueue::TTestServer::StartServer(bool, TMaybe>, NMaybe::TPolicyUndefinedExcept>)+888 (0x1399E948) NYdb::NTopic::NTests::TTopicSdkTestSetup::TTopicSdkTestSetup(TBasicString> const&, NKikimr::Tests::TServerSettings const&, bool)+582 (0x262E6316) NKikimr::NPQ::NTest::CreateSetup()+178 (0x26290EC2) NKikimr::NTestSuiteTopicAutoscaling::ReadFromTimestamp(NKikimr::NPQ::NTest::SdkVersion, bool)+44 (0x1393658C) NKikimr::NTestSuiteTopicAutoscaling::TCurrentTest::Execute()::'lambda'()::operator()() const+71 (0x1393DC27) NUnitTest::TTestBase::Run(std::__y1::function, TBasicString> const&, char const*, bool)+126 (0x13C7A46E) NKikimr::NTestSuiteTopicAutoscaling::TCurrentTest::Execute()+419 (0x1393D483) NUnitTest::TTestFactory::Execute()+803 (0x13C7ABE3) NUnitTest::RunMain(int, char**)+3021 (0x13C8C78D) ??+0 (0x7F988F916D90) __libc_start_main+128 (0x7F988F916E40) _start+41 (0x12A4C029) |61.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/metadata/secret/ut/unittest |61.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/ext_index/ut/unittest ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/persqueue/ut/ut_with_sdk/unittest >> CommitOffset::Commit_WithSession_ToPastParentPartition [FAIL] Test command err: 2025-05-29T15:22:41.499015Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509888451031493593:2079];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:22:41.499249Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/002209/r3tmp/tmpe5Q8E7/pdisk_1.dat 2025-05-29T15:22:41.562498Z node 1 :PQ_READ_PROXY DEBUG: caching_service.cpp:44: Direct read cache: : Created 2025-05-29T15:22:41.591642Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 65463, node 1 2025-05-29T15:22:41.610237Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/ciyv/002209/r3tmp/yandexNDNgNA.tmp 2025-05-29T15:22:41.610250Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: /home/runner/.ya/build/build_root/ciyv/002209/r3tmp/yandexNDNgNA.tmp 2025-05-29T15:22:41.610317Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:202: successfully initialized from file: /home/runner/.ya/build/build_root/ciyv/002209/r3tmp/yandexNDNgNA.tmp 2025-05-29T15:22:41.610372Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-05-29T15:22:41.616824Z INFO: TTestServer started on Port 11667 GrpcPort 65463 TClient is connected to server localhost:11667 PQClient connected to localhost:65463 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: 2025-05-29T15:22:41.660696Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:22:41.660730Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-29T15:22:41.661341Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-29T15:22:41.667757Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:22:41.671819Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:22:41.679048Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 2025-05-29T15:22:41.682899Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... waiting... waiting... 2025-05-29T15:22:41.948410Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509888451031494321:2335], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:22:41.948444Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:22:41.948741Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509888451031494357:2339], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:22:41.949625Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710662:3, at schemeshard: 72057594046644480 2025-05-29T15:22:41.951152Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509888451031494396:2343], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:22:41.951171Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:22:41.958134Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7509888451031494359:2340], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710662 completed, doublechecking } 2025-05-29T15:22:42.004361Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-05-29T15:22:42.018151Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-05-29T15:22:42.026808Z node 1 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [1:7509888455326461841:2507] txid# 281474976710665, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 9], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-29T15:22:42.039946Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [1:7509888455326461850:2359], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:5:17: Error: At function: KiReadTable!
:5:17: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Versions]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-05-29T15:22:42.040551Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=1&id=ZDRkOGNiODMtOTRiMDcwYTQtOTM3OWQ4Ni1lYzgzMGQyZA==, ActorId: [1:7509888451031494318:2333], ActorState: ExecuteState, TraceId: 01jwea6vcsaerpfq7jvv44htm6, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-05-29T15:22:42.040950Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: cluster_tracker.cpp:167: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 5 column: 17 } message: "At function: KiReadTable!" end_position { row: 5 column: 17 } severity: 1 issues { position { row: 5 column: 17 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Versions]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 5 column: 17 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-05-29T15:22:42.094846Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost", true, true, 1000), ("dc2", "dc2.logbroker.yandex.net", false, true, 1000); 2025-05-29T15:22:42.129716Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [1:7509888455326461975:2380], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:22:42.130497Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=1&id=MWQ5M2FmYWItY2YyZmNhMDAtNDIyYmJiOS1hNzZhZmMyMQ==, ActorId: [1:7509888455326461972:2378], ActorState: ExecuteState, TraceId: 01jwea6vj05xkpmv4y4cep9ey5, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: assertion failed at ydb/core/testlib/test_pq_client.h:537, TMaybe NKikimr::NPersQueueTests::TFlatMsgBusPQClient::RunYqlDataQueryWithParams(TString, const NYdb::TParams &): (result.IsSuccess())
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 TBackTrace::Capture()+28 (0x13AC482C) NUnitTest::NPrivate::RaiseError(char const*, TBasicString> const&, bool)+137 (0x13C785B9) NKikimr::NPersQueueTests::TFlatMsgBusPQClient::RunYqlDataQueryWithParams(TBasicString>, NYdb::Dev::TParams const&)+932 (0x139A34A4) NKikimr::NPersQueueTests::TFlatMsgBusPQClient::RunYqlDataQuery(TBasicString>)+88 (0x139A2A58) NKikimr::NPersQueueTests::TFlatMsgBusPQClient::InitDCs(THashMap>, NKikimr::NPersQueueTests::TPQTestClusterInfo, THash>>, TEqualTo>>, std::__y1::allocator>>>, TBasicString> const&)+1170 (0x139A1CA2) NKikimr::NPersQueueTests::TFlatMsgBusPQClient::FullInit(THashMap>, NKikimr::NPersQueueTests::TPQTestClusterInfo, THash>>, TEqualTo>>, std::__y1::allocator>>>, TBasicString> const&, TBasicString> const&)+327 (0x1399FC57) NPersQueue::TTestServer::StartServer(bool, TMaybe>, NMaybe::TPolicyUndefinedExcept>)+888 (0x1399E948) NYdb::NTopic::NTests::TTopicSdkTestSetup::TTopicSdkTestSetup(TBasicString> const&, NKikimr::Tests::TServerSettings const&, bool)+582 (0x262E6316) NKikimr::NPQ::NTest::CreateSetu ... test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/002209/r3tmp/tmpx4gjmG/pdisk_1.dat 2025-05-29T15:22:47.492306Z node 7 :PQ_READ_PROXY DEBUG: caching_service.cpp:44: Direct read cache: : Created 2025-05-29T15:22:47.499650Z node 7 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:22:47.499989Z node 7 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [7:7509888476929572966:2079] 1748532167487202 != 1748532167487205 TServer::EnableGrpc on GrpcPort 19199, node 7 2025-05-29T15:22:47.510688Z node 7 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/ciyv/002209/r3tmp/yandexd1DlJE.tmp 2025-05-29T15:22:47.510705Z node 7 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: /home/runner/.ya/build/build_root/ciyv/002209/r3tmp/yandexd1DlJE.tmp 2025-05-29T15:22:47.510794Z node 7 :NET_CLASSIFIER WARN: net_classifier.cpp:202: successfully initialized from file: /home/runner/.ya/build/build_root/ciyv/002209/r3tmp/yandexd1DlJE.tmp 2025-05-29T15:22:47.510848Z node 7 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-05-29T15:22:47.517869Z INFO: TTestServer started on Port 14255 GrpcPort 19199 TClient is connected to server localhost:14255 PQClient connected to localhost:19199 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-29T15:22:47.592638Z node 7 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:22:47.592665Z node 7 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:22:47.592677Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:22:47.594400Z node 7 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-29T15:22:47.594615Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:22:47.679314Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976715658, at schemeshard: 72057594046644480 2025-05-29T15:22:47.680854Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... waiting... waiting... 2025-05-29T15:22:47.718970Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976715661, at schemeshard: 72057594046644480 2025-05-29T15:22:47.924266Z node 7 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7509888476929573749:2335], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:22:47.924300Z node 7 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:22:47.924495Z node 7 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7509888476929573770:2339], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:22:47.925435Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715662:3, at schemeshard: 72057594046644480 2025-05-29T15:22:47.929333Z node 7 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [7:7509888476929573772:2340], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715662 completed, doublechecking } 2025-05-29T15:22:47.941864Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-05-29T15:22:47.961629Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-05-29T15:22:47.991737Z node 7 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [7:7509888476929574004:2525] txid# 281474976715665, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 9], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-29T15:22:47.997308Z node 7 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [7:7509888476929574012:2363], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:5:17: Error: At function: KiReadTable!
:5:17: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Versions]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-05-29T15:22:47.997830Z node 7 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=7&id=MTRlMDAzMjItOTQzNjcwZC00ZDMyOWIzYy02NjkxNWIyMQ==, ActorId: [7:7509888476929573745:2332], ActorState: ExecuteState, TraceId: 01jwea717j4hv4t81zjmw79y26, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-05-29T15:22:47.998002Z node 7 :PERSQUEUE_CLUSTER_TRACKER ERROR: cluster_tracker.cpp:167: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 5 column: 17 } message: "At function: KiReadTable!" end_position { row: 5 column: 17 } severity: 1 issues { position { row: 5 column: 17 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Versions]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 5 column: 17 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-05-29T15:22:47.998764Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost", true, true, 1000), ("dc2", "dc2.logbroker.yandex.net", false, true, 1000); 2025-05-29T15:22:48.068429Z node 7 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [7:7509888481224541398:2379], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:22:48.069220Z node 7 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=7&id=MzgyN2UyMjUtOWM3MDI0ZGMtYzE2MWNkNDgtM2U1YzhlOTM=, ActorId: [7:7509888481224541395:2377], ActorState: ExecuteState, TraceId: 01jwea71b61km9jhps5egkak2h, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: assertion failed at ydb/core/testlib/test_pq_client.h:537, TMaybe NKikimr::NPersQueueTests::TFlatMsgBusPQClient::RunYqlDataQueryWithParams(TString, const NYdb::TParams &): (result.IsSuccess())
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 TBackTrace::Capture()+28 (0x13AC482C) NUnitTest::NPrivate::RaiseError(char const*, TBasicString> const&, bool)+137 (0x13C785B9) NKikimr::NPersQueueTests::TFlatMsgBusPQClient::RunYqlDataQueryWithParams(TBasicString>, NYdb::Dev::TParams const&)+932 (0x139A34A4) NKikimr::NPersQueueTests::TFlatMsgBusPQClient::RunYqlDataQuery(TBasicString>)+88 (0x139A2A58) NKikimr::NPersQueueTests::TFlatMsgBusPQClient::InitDCs(THashMap>, NKikimr::NPersQueueTests::TPQTestClusterInfo, THash>>, TEqualTo>>, std::__y1::allocator>>>, TBasicString> const&)+1170 (0x139A1CA2) NKikimr::NPersQueueTests::TFlatMsgBusPQClient::FullInit(THashMap>, NKikimr::NPersQueueTests::TPQTestClusterInfo, THash>>, TEqualTo>>, std::__y1::allocator>>>, TBasicString> const&, TBasicString> const&)+327 (0x1399FC57) NPersQueue::TTestServer::StartServer(bool, TMaybe>, NMaybe::TPolicyUndefinedExcept>)+888 (0x1399E948) NYdb::NTopic::NTests::TTopicSdkTestSetup::TTopicSdkTestSetup(TBasicString> const&, NKikimr::Tests::TServerSettings const&, bool)+582 (0x262E6316) NKikimr::NPQ::NTest::CreateSetup()+178 (0x26290EC2) NKikimr::NTestSuiteCommitOffset::TTestCaseCommit_WithSession_ToPastParentPartition::Execute_(NUnitTest::TTestContext&)+35 (0x13969B93) NKikimr::NTestSuiteCommitOffset::TCurrentTest::Execute()::'lambda'()::operator()() const+71 (0x1397FD47) NUnitTest::TTestBase::Run(std::__y1::function, TBasicString> const&, char const*, bool)+126 (0x13C7A46E) NKikimr::NTestSuiteCommitOffset::TCurrentTest::Execute()+429 (0x1397F70D) NUnitTest::TTestFactory::Execute()+803 (0x13C7ABE3) NUnitTest::RunMain(int, char**)+3021 (0x13C8C78D) ??+0 (0x7F2655377D90) __libc_start_main+128 (0x7F2655377E40) _start+41 (0x12A4C029) >> TGroupMapperTest::MakeDisksForbidden [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/persqueue/ut/ut_with_sdk/unittest >> TopicAutoscaling::ReadFromTimestamp_AutoscaleAwareSDK [FAIL] Test command err: 2025-05-29T15:22:41.603899Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509888449526741121:2199];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:22:41.609017Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/0021b0/r3tmp/tmp1kAoMN/pdisk_1.dat 2025-05-29T15:22:41.645860Z node 1 :PQ_READ_PROXY DEBUG: caching_service.cpp:44: Direct read cache: : Created 2025-05-29T15:22:41.672580Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [1:7509888449526740961:2079] 1748532161597882 != 1748532161597885 2025-05-29T15:22:41.675340Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 28021, node 1 2025-05-29T15:22:41.694930Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/ciyv/0021b0/r3tmp/yandexgJxTsS.tmp 2025-05-29T15:22:41.694942Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: /home/runner/.ya/build/build_root/ciyv/0021b0/r3tmp/yandexgJxTsS.tmp 2025-05-29T15:22:41.694996Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:202: successfully initialized from file: /home/runner/.ya/build/build_root/ciyv/0021b0/r3tmp/yandexgJxTsS.tmp 2025-05-29T15:22:41.695029Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-05-29T15:22:41.698294Z INFO: TTestServer started on Port 15046 GrpcPort 28021 TClient is connected to server localhost:15046 PQClient connected to localhost:28021 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-29T15:22:41.741368Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:22:41.741394Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:22:41.743726Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-29T15:22:41.744308Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:22:41.746509Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:22:41.756413Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... waiting... waiting... 2025-05-29T15:22:41.851926Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976715661, at schemeshard: 72057594046644480 2025-05-29T15:22:42.122106Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509888453821709053:2336], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:22:42.122238Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:22:42.122960Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509888453821709080:2339], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:22:42.123810Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715662:3, at schemeshard: 72057594046644480 2025-05-29T15:22:42.126263Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976715662, at schemeshard: 72057594046644480 2025-05-29T15:22:42.126318Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7509888453821709082:2340], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715662 completed, doublechecking } 2025-05-29T15:22:42.217955Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-05-29T15:22:42.228383Z node 1 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [1:7509888453821709212:2463] txid# 281474976715664, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 9], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-29T15:22:42.241857Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-05-29T15:22:42.259442Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [1:7509888453821709234:2353], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:5:17: Error: At function: KiReadTable!
:5:17: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Versions]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-05-29T15:22:42.259673Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=1&id=NjA3NzNmNTItMWQ0YTY0ODItZjY0OGE4YzUtNzdlNWRiMDU=, ActorId: [1:7509888453821709050:2334], ActorState: ExecuteState, TraceId: 01jwea6vj6am5eff6sv6xfrcre, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-05-29T15:22:42.260158Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: cluster_tracker.cpp:167: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 5 column: 17 } message: "At function: KiReadTable!" end_position { row: 5 column: 17 } severity: 1 issues { position { row: 5 column: 17 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Versions]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 5 column: 17 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-05-29T15:22:42.273146Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost", true, true, 1000), ("dc2", "dc2.logbroker.yandex.net", false, true, 1000); 2025-05-29T15:22:42.315286Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [1:7509888453821709395:2379], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:22:42.315723Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=1&id=MzQwN2YwY2MtZmQ2NThjM2QtNTI4OTQ1MjQtODAwZGUwZmQ=, ActorId: [1:7509888453821709392:2377], ActorState: ExecuteState, TraceId: 01jwea6vqv6jhtzkv4vpg4vg6w, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: assertion failed at ydb/core/testlib/test_pq_client.h:537, TMaybe NKikimr::NPersQueueTests::TFlatMsgBusPQClient::RunYqlDataQueryWithParams(TString, const NYdb::TParams &): (result.IsSuccess())
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 TBackTrace::Capture()+28 (0x13AC482C) NUnitTest::NPrivate::RaiseError(char const*, TBasicString> const&, bool)+137 (0x13C785B9) NKikimr::NPersQueueTests::TFlatMsgBusPQClient::RunYqlDataQueryWithParams(TBasicString>, NYdb::Dev::TParams const&)+932 (0x139A34A4) NKikimr::NPersQueueTests::TFlatMsgBusPQClient::RunYqlDataQuery(TBasicString>)+88 (0x139A2A58) NKikimr::NPersQueueTests::TFlatMsgBusPQClient::InitDCs(THashMap>, NKikimr::NPersQueueTests::TPQTestClusterInfo, THash>>, TEqualTo>>, std::__y1::allocator>>>, TBasicString> const&)+1170 (0x139A1CA2) NKikimr::NPersQueueTests::TFlatMsgBusPQClient::FullInit(THashMap>, NKikimr::NPersQueueTests::TPQTestClusterInfo, THash>>, TEqualTo>>, std::__y1::allocator>>>, TBasicString> const&, TBasicString> const&)+327 (0x1399FC57) NPersQueue::TTestServer::StartServer(bool, TMaybe>, NMaybe::TPolicyUndefinedExcept>)+888 (0x1399E948) NYdb::NTopic::NTests::TTopicSdkTestSetup::TTopicSdkTestSetup(TBasicString> const&, NKikimr::Tests::TServerSettings const&, bool)+582 (0x262E6316) NKikimr::NPQ::NTest::CreateSetup()+178 (0x26290EC2) NKikimr::NTestSuiteTopicAutoscaling::PartitionSplit_ReadEmptyPartitions(NKikimr::NPQ::NTest::SdkVersion, bool)+37 (0x13908A15) NKikimr::NTestSuiteTopicAutoscaling::TCurrentTest::Execute()::'lambda'()::oper ... 37Z node 7 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-05-29T15:22:47.625895Z node 7 :PQ_READ_PROXY DEBUG: caching_service.cpp:44: Direct read cache: : Created test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/0021b0/r3tmp/tmpiBCeC7/pdisk_1.dat 2025-05-29T15:22:47.638418Z node 7 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:22:47.641096Z node 7 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [7:7509888475666387892:2079] 1748532167621922 != 1748532167621925 TServer::EnableGrpc on GrpcPort 5722, node 7 2025-05-29T15:22:47.649904Z node 7 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/ciyv/0021b0/r3tmp/yandexBsYs9F.tmp 2025-05-29T15:22:47.649919Z node 7 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: /home/runner/.ya/build/build_root/ciyv/0021b0/r3tmp/yandexBsYs9F.tmp 2025-05-29T15:22:47.649983Z node 7 :NET_CLASSIFIER WARN: net_classifier.cpp:202: successfully initialized from file: /home/runner/.ya/build/build_root/ciyv/0021b0/r3tmp/yandexBsYs9F.tmp 2025-05-29T15:22:47.650044Z node 7 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-05-29T15:22:47.663456Z INFO: TTestServer started on Port 23993 GrpcPort 5722 TClient is connected to server localhost:23993 PQClient connected to localhost:5722 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-29T15:22:47.727805Z node 7 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:22:47.727828Z node 7 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:22:47.728172Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-05-29T15:22:47.729454Z node 7 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Connecting -> Connected waiting... 2025-05-29T15:22:47.731355Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:22:47.743852Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... waiting... waiting... 2025-05-29T15:22:48.095647Z node 7 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7509888479961355972:2336], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:22:48.095684Z node 7 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:22:48.095817Z node 7 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7509888479961356007:2339], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:22:48.096712Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715662:3, at schemeshard: 72057594046644480 2025-05-29T15:22:48.099425Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976715662, at schemeshard: 72057594046644480 2025-05-29T15:22:48.101781Z node 7 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [7:7509888479961356009:2340], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715662 completed, doublechecking } 2025-05-29T15:22:48.105454Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-05-29T15:22:48.118141Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-05-29T15:22:48.189490Z node 7 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [7:7509888479961356227:2526] txid# 281474976715665, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 9], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-29T15:22:48.197529Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-05-29T15:22:48.198145Z node 7 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [7:7509888479961356235:2363], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:5:17: Error: At function: KiReadTable!
:5:17: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Versions]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-05-29T15:22:48.198684Z node 7 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=7&id=MTc1MzIwOGItMmFkZmNhYWYtZmFmNDY3OWItYmY5MTQxYWY=, ActorId: [7:7509888479961355969:2334], ActorState: ExecuteState, TraceId: 01jwea71cy5y9h94zxa9gpd0qn, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-05-29T15:22:48.198839Z node 7 :PERSQUEUE_CLUSTER_TRACKER ERROR: cluster_tracker.cpp:167: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 5 column: 17 } message: "At function: KiReadTable!" end_position { row: 5 column: 17 } severity: 1 issues { position { row: 5 column: 17 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Versions]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 5 column: 17 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost", true, true, 1000), ("dc2", "dc2.logbroker.yandex.net", false, true, 1000); 2025-05-29T15:22:48.229280Z node 7 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [7:7509888479961356325:2379], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:22:48.230083Z node 7 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=7&id=NzI5ZjAzYjAtODk3NDE5YzAtYTEzMjAyODUtMTRlMWI1MDg=, ActorId: [7:7509888479961356322:2377], ActorState: ExecuteState, TraceId: 01jwea71gpb9a6ksy00g3qwz5r, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: assertion failed at ydb/core/testlib/test_pq_client.h:537, TMaybe NKikimr::NPersQueueTests::TFlatMsgBusPQClient::RunYqlDataQueryWithParams(TString, const NYdb::TParams &): (result.IsSuccess())
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 TBackTrace::Capture()+28 (0x13AC482C) NUnitTest::NPrivate::RaiseError(char const*, TBasicString> const&, bool)+137 (0x13C785B9) NKikimr::NPersQueueTests::TFlatMsgBusPQClient::RunYqlDataQueryWithParams(TBasicString>, NYdb::Dev::TParams const&)+932 (0x139A34A4) NKikimr::NPersQueueTests::TFlatMsgBusPQClient::RunYqlDataQuery(TBasicString>)+88 (0x139A2A58) NKikimr::NPersQueueTests::TFlatMsgBusPQClient::InitDCs(THashMap>, NKikimr::NPersQueueTests::TPQTestClusterInfo, THash>>, TEqualTo>>, std::__y1::allocator>>>, TBasicString> const&)+1170 (0x139A1CA2) NKikimr::NPersQueueTests::TFlatMsgBusPQClient::FullInit(THashMap>, NKikimr::NPersQueueTests::TPQTestClusterInfo, THash>>, TEqualTo>>, std::__y1::allocator>>>, TBasicString> const&, TBasicString> const&)+327 (0x1399FC57) NPersQueue::TTestServer::StartServer(bool, TMaybe>, NMaybe::TPolicyUndefinedExcept>)+888 (0x1399E948) NYdb::NTopic::NTests::TTopicSdkTestSetup::TTopicSdkTestSetup(TBasicString> const&, NKikimr::Tests::TServerSettings const&, bool)+582 (0x262E6316) NKikimr::NPQ::NTest::CreateSetup()+178 (0x26290EC2) NKikimr::NTestSuiteTopicAutoscaling::ReadFromTimestamp(NKikimr::NPQ::NTest::SdkVersion, bool)+44 (0x1393658C) NKikimr::NTestSuiteTopicAutoscaling::TCurrentTest::Execute()::'lambda'()::operator()() const+71 (0x1393DC27) NUnitTest::TTestBase::Run(std::__y1::function, TBasicString> const&, char const*, bool)+126 (0x13C7A46E) NKikimr::NTestSuiteTopicAutoscaling::TCurrentTest::Execute()+419 (0x1393D483) NUnitTest::TTestFactory::Execute()+803 (0x13C7ABE3) NUnitTest::RunMain(int, char**)+3021 (0x13C8C78D) ??+0 (0x7FE2EC281D90) __libc_start_main+128 (0x7FE2EC281E40) _start+41 (0x12A4C029) ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/persqueue/ut/ut_with_sdk/unittest >> TPersQueueMirrorer::ValidStartStream [FAIL] Test command err: 2025-05-29T15:22:41.535219Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509888449208264736:2062];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:22:41.535236Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/0021bd/r3tmp/tmpP8VRBF/pdisk_1.dat 2025-05-29T15:22:41.558796Z node 1 :PQ_READ_PROXY DEBUG: caching_service.cpp:44: Direct read cache: : Created 2025-05-29T15:22:41.610029Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:22:41.611913Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [1:7509888449208264715:2079] 1748532161535084 != 1748532161535087 TServer::EnableGrpc on GrpcPort 29525, node 1 2025-05-29T15:22:41.638998Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/ciyv/0021bd/r3tmp/yandexn6hBRB.tmp 2025-05-29T15:22:41.639018Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: /home/runner/.ya/build/build_root/ciyv/0021bd/r3tmp/yandexn6hBRB.tmp 2025-05-29T15:22:41.639093Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:202: successfully initialized from file: /home/runner/.ya/build/build_root/ciyv/0021bd/r3tmp/yandexn6hBRB.tmp 2025-05-29T15:22:41.639144Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-05-29T15:22:41.641991Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:22:41.642019Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:22:41.643146Z INFO: TTestServer started on Port 19021 GrpcPort 29525 2025-05-29T15:22:41.645119Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:19021 PQClient connected to localhost:29525 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-29T15:22:41.715745Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:22:41.719245Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:22:41.732540Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... waiting... waiting... 2025-05-29T15:22:41.988098Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509888449208265517:2335], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:22:41.988136Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:22:41.988322Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509888449208265538:2339], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:22:41.989250Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710662:3, at schemeshard: 72057594046644480 2025-05-29T15:22:41.991377Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976710662, at schemeshard: 72057594046644480 2025-05-29T15:22:41.991459Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7509888449208265540:2340], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710662 completed, doublechecking } 2025-05-29T15:22:42.040052Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-05-29T15:22:42.051870Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-05-29T15:22:42.073646Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-05-29T15:22:42.083596Z node 1 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [1:7509888453503233123:2571] txid# 281474976710666, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 9], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost", true, true, 1000), ("dc2", "dc2.logbroker.yandex.net", false, true, 1000); 2025-05-29T15:22:42.112324Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [1:7509888453503233141:2376], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:22:42.113524Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=1&id=OTExYzdkMzAtNGVmZDliY2QtMWFiOWJhZi03MWNhMmEzYg==, ActorId: [1:7509888453503233138:2374], ActorState: ExecuteState, TraceId: 01jwea6vh7bdcf331bzj4rsx0h, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: 2025-05-29T15:22:42.144738Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: cluster_tracker.cpp:167: failed to list clusters: { Response { QueryIssues { message: "Execution" issue_code: 1060 severity: 2 issues { position { row: 3 column: 120 } message: "Cost Based Optimizer could not be applied to this query: couldn\'t load statistics" end_position { row: 3 column: 120 } issue_code: 8001 severity: 2 } } TxMeta { } YdbResults { columns { name: "C.name" type { optional_type { item { type_id: UTF8 } } } } columns { name: "C.balancer" type { optional_type { item { type_id: UTF8 } } } } columns { name: "C.local" type { optional_type { item { type_id: BOOL } } } } columns { name: "C.enabled" type { optional_type { item { type_id: BOOL } } } } columns { name: "C.weight" type { optional_type { item { type_id: UINT64 } } } } columns { name: "V.version" type { optional_type { item { type_id: INT64 } } } } } QueryDiagnostics: "" } YdbStatus: SUCCESS ConsumedRu: 37 } assertion failed at ydb/core/testlib/test_pq_client.h:537, TMaybe NKikimr::NPersQueueTests::TFlatMsgBusPQClient::RunYqlDataQueryWithParams(TString, const NYdb::TParams &): (result.IsSuccess())
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 TBackTrace::Capture()+28 (0x13AC482C) NUnitTest::NPrivate::RaiseError(char const*, TBasicString> const&, bool)+137 (0x13C785B9) NKikimr::NPersQueueTests::TFlatMsgBusPQClient::RunYqlDataQueryWithParams(TBasicString>, NYdb::Dev::TParams const&)+932 (0x139A34A4) NKikimr::NPersQueueTests::TFlatMsgBusPQClient::RunYqlDataQuery(TBasicString>)+88 (0x139A2A58) NKikimr::NPersQueueTests::TFlatMsgBusPQClient::InitDCs(THashMap>, NKikimr::NPersQueueTests::TPQTestClusterInfo, THash>>, TEqualTo>>, std::__y1::allocator>>>, TBasicString> const&)+1170 (0x139A1CA2) NKikimr::NPersQueueTests::TFlatMsgBusPQClient::FullInit(THashMap>, NKikimr::NPersQueueTests::TPQTestClusterInfo, THash>>, TEqualTo>>, std::__y1::allocator>>>, TBasicString> const&, TBasicString> const&)+327 (0x1399FC57) NPersQueue::TTestServer::StartServer(bool, TMaybe>, NMaybe::TPolicyUndefinedExcept>)+888 (0x1399E948) NYdb::NTopic::NTests::TTopicSdkTestSetup::TTopicSdkTestSetup(TBasicString> const&, NKikimr::Tests::TServerSettings const&, bool)+582 (0x262E6316) NKikimr::NPQ::NTest::CreateSetup()+178 (0x26290EC2) NKikimr::NTestSuiteTopicAutoscaling::TTestCaseControlPlane_BackCompatibility::Execute_(NUnitTest::TTestContext&)+35 (0x13917253) NKikimr::NTestSuiteTopicAutoscaling::TCurrentTest::Execute()::'lambda'()::operator()() const+71 (0x1393DC27) NUnitTest::TTestBase::Run(std::__y1::function, TBasicString> const&, char const*, bool)+126 (0x13C7A46E) NKikimr::NTestSuiteTopicAutoscaling::TCurrentTest::Execute()+419 (0x1393D483) NUnitTest::TTestFactory::Execute()+803 (0x13C7ABE3) NUnitTest::RunMain(int, char**)+3021 (0x13C8C78D) ??+0 (0x7F04E3C1CD90) __libc_start_main+128 (0x7F04E3C1CE40) _start+41 (0x12A4C029) 2025-05-29T15:22:42.662784Z node 2 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7509888453405186509:2062];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:22:42.662802Z node 2 :METADATA_PROVIDER ERROR: ... } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-29T15:22:47.674188Z node 7 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:22:47.674230Z node 7 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:22:47.675301Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-05-29T15:22:47.676178Z node 7 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:22:47.676198Z node 7 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:22:47.679624Z node 7 :HIVE WARN: hive_impl.cpp:771: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 8 Cookie 8 2025-05-29T15:22:47.679702Z node 7 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Connecting -> Connected waiting... 2025-05-29T15:22:47.680122Z node 7 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Connecting -> Connected waiting... 2025-05-29T15:22:47.693771Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... waiting... waiting... 2025-05-29T15:22:48.004257Z node 7 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7509888479004883284:2336], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:22:48.004285Z node 7 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:22:48.004394Z node 7 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7509888479004883297:2339], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:22:48.005143Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715662:3, at schemeshard: 72057594046644480 2025-05-29T15:22:48.009000Z node 7 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7509888479004883331:2342], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:22:48.009026Z node 7 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:22:48.013075Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-05-29T15:22:48.016684Z node 7 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [7:7509888479004883299:2340], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715662 completed, doublechecking } 2025-05-29T15:22:48.050144Z node 8 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [8:7509888480497723797:2315], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:5:17: Error: At function: KiReadTable!
:5:17: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Versions]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-05-29T15:22:48.050714Z node 8 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=8&id=YTk0YTdkMDktYTRjZGJiZTAtYTliNWUxNDAtZDIyYTJkMzM=, ActorId: [8:7509888480497723740:2307], ActorState: ExecuteState, TraceId: 01jwea71bb138dzmg3n9yjdbvf, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-05-29T15:22:48.050883Z node 8 :PERSQUEUE_CLUSTER_TRACKER ERROR: cluster_tracker.cpp:167: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 5 column: 17 } message: "At function: KiReadTable!" end_position { row: 5 column: 17 } severity: 1 issues { position { row: 5 column: 17 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Versions]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 5 column: 17 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-05-29T15:22:48.059353Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-05-29T15:22:48.107249Z node 7 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [7:7509888479004883568:2865] txid# 281474976715665, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 9], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-29T15:22:48.112371Z node 7 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [7:7509888479004883579:2358], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:5:17: Error: At function: KiReadTable!
:5:17: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Versions]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-05-29T15:22:48.112947Z node 7 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=7&id=MTU2OTViZDItODY2MjgxZDUtMzMzNzhjODktM2Q1ZWQ5Y2M=, ActorId: [7:7509888474709915963:2334], ActorState: ExecuteState, TraceId: 01jwea71a3d5g9kf5hjcm9f5aa, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-05-29T15:22:48.113087Z node 7 :PERSQUEUE_CLUSTER_TRACKER ERROR: cluster_tracker.cpp:167: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 5 column: 17 } message: "At function: KiReadTable!" end_position { row: 5 column: 17 } severity: 1 issues { position { row: 5 column: 17 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Versions]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 5 column: 17 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-05-29T15:22:48.196958Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost", true, true, 1000), ("dc2", "dc2.logbroker.yandex.net", false, true, 1000); 2025-05-29T15:22:48.258167Z node 7 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [7:7509888479004883774:2382], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:22:48.258853Z node 7 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=7&id=ODAzZjc0M2EtMjQ0NjQxZTYtMmFlYmZlNDctM2JlYzE5MWU=, ActorId: [7:7509888479004883771:2380], ActorState: ExecuteState, TraceId: 01jwea71hk9g4pn3kt9w9d14m8, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: assertion failed at ydb/core/testlib/test_pq_client.h:537, TMaybe NKikimr::NPersQueueTests::TFlatMsgBusPQClient::RunYqlDataQueryWithParams(TString, const NYdb::TParams &): (result.IsSuccess())
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 TBackTrace::Capture()+28 (0x13AC482C) NUnitTest::NPrivate::RaiseError(char const*, TBasicString> const&, bool)+137 (0x13C785B9) NKikimr::NPersQueueTests::TFlatMsgBusPQClient::RunYqlDataQueryWithParams(TBasicString>, NYdb::Dev::TParams const&)+932 (0x139A34A4) NKikimr::NPersQueueTests::TFlatMsgBusPQClient::RunYqlDataQuery(TBasicString>)+88 (0x139A2A58) NKikimr::NPersQueueTests::TFlatMsgBusPQClient::InitDCs(THashMap>, NKikimr::NPersQueueTests::TPQTestClusterInfo, THash>>, TEqualTo>>, std::__y1::allocator>>>, TBasicString> const&)+1170 (0x139A1CA2) NKikimr::NPersQueueTests::TFlatMsgBusPQClient::FullInit(THashMap>, NKikimr::NPersQueueTests::TPQTestClusterInfo, THash>>, TEqualTo>>, std::__y1::allocator>>>, TBasicString> const&, TBasicString> const&)+327 (0x1399FC57) NPersQueue::TTestServer::StartServer(bool, TMaybe>, NMaybe::TPolicyUndefinedExcept>)+888 (0x1399E948) NPersQueue::TTestServer::TTestServer(NKikimr::Tests::TServerSettings const&, bool, TVector> const&, NActors::NLog::EPriority, TMaybe, TDelete>, NMaybe::TPolicyUndefinedExcept>)+1563 (0x1399B5FB) NPersQueue::TTestServer::TTestServer(bool)+134 (0x13992FC6) NKikimr::NPersQueueTests::NTestSuiteTPersQueueMirrorer::TTestCaseValidStartStream::Execute_(NUnitTest::TTestContext&)+37 (0x13994E65) NKikimr::NPersQueueTests::NTestSuiteTPersQueueMirrorer::TCurrentTest::Execute()::'lambda'()::operator()() const+71 (0x1399A2D7) NUnitTest::TTestBase::Run(std::__y1::function, TBasicString> const&, char const*, bool)+126 (0x13C7A46E) NKikimr::NPersQueueTests::NTestSuiteTPersQueueMirrorer::TCurrentTest::Execute()+425 (0x13999C99) NUnitTest::TTestFactory::Execute()+803 (0x13C7ABE3) NUnitTest::RunMain(int, char**)+3021 (0x13C8C78D) ??+0 (0x7F04E3C1CD90) __libc_start_main+128 (0x7F04E3C1CE40) _start+41 (0x12A4C029) ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/persqueue/ut/ut_with_sdk/unittest >> CommitOffset::Commit_FromSession_ToNewChild_WithoutCommitToParent [FAIL] Test command err: 2025-05-29T15:22:42.295852Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509888453542023916:2062];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:22:42.295885Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/001f60/r3tmp/tmp3nUUeQ/pdisk_1.dat 2025-05-29T15:22:42.331933Z node 1 :PQ_READ_PROXY DEBUG: caching_service.cpp:44: Direct read cache: : Created 2025-05-29T15:22:42.370696Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [1:7509888453542023896:2079] 1748532162295648 != 1748532162295651 2025-05-29T15:22:42.374793Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 30387, node 1 2025-05-29T15:22:42.387995Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/ciyv/001f60/r3tmp/yandexRGKKou.tmp 2025-05-29T15:22:42.388013Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: /home/runner/.ya/build/build_root/ciyv/001f60/r3tmp/yandexRGKKou.tmp 2025-05-29T15:22:42.388087Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:202: successfully initialized from file: /home/runner/.ya/build/build_root/ciyv/001f60/r3tmp/yandexRGKKou.tmp 2025-05-29T15:22:42.388143Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-05-29T15:22:42.395955Z INFO: TTestServer started on Port 28889 GrpcPort 30387 2025-05-29T15:22:42.398889Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:22:42.398920Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:22:42.400021Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:28889 PQClient connected to localhost:30387 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-29T15:22:42.452399Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-05-29T15:22:42.455661Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-05-29T15:22:42.461882Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... waiting... waiting... 2025-05-29T15:22:42.740528Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509888453542024710:2336], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:22:42.740557Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:22:42.740814Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509888453542024722:2339], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:22:42.741473Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509888453542024727:2342], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:22:42.741488Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:22:42.741775Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715662:3, at schemeshard: 72057594046644480 2025-05-29T15:22:42.745765Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7509888453542024725:2341], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715662 completed, doublechecking } 2025-05-29T15:22:42.786448Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-05-29T15:22:42.807786Z node 1 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [1:7509888453542024844:2461] txid# 281474976715664, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 9], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-29T15:22:42.818318Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-05-29T15:22:42.835856Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [1:7509888453542024855:2353], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:5:17: Error: At function: KiReadTable!
:5:17: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Versions]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-05-29T15:22:42.836130Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=1&id=Y2M0OWU0NGItMjQwNGJmMmEtNWZiNDliZmQtNGQ1NmQ4ZDI=, ActorId: [1:7509888453542024707:2334], ActorState: ExecuteState, TraceId: 01jwea6w5h8eq7c9m9fpdrqe31, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-05-29T15:22:42.836613Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: cluster_tracker.cpp:167: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 5 column: 17 } message: "At function: KiReadTable!" end_position { row: 5 column: 17 } severity: 1 issues { position { row: 5 column: 17 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Versions]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 5 column: 17 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-05-29T15:22:42.841793Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost", true, true, 1000), ("dc2", "dc2.logbroker.yandex.net", false, true, 1000); 2025-05-29T15:22:42.921815Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [1:7509888453542025041:2380], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:22:42.922629Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=1&id=MzEzMjNlNWMtNTU4OGQyMmItMjVmM2U3MDgtYTRjODU1Mg==, ActorId: [1:7509888453542025038:2378], ActorState: ExecuteState, TraceId: 01jwea6wat2qymmc849jn3ken3, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: assertion failed at ydb/core/testlib/test_pq_client.h:537, TMaybe NKikimr::NPersQueueTests::TFlatMsgBusPQClient::RunYqlDataQueryWithParams(TString, const NYdb::TParams &): (result.IsSuccess())
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 TBackTrace::Capture()+28 (0x13AC482C) NUnitTest::NPrivate::RaiseError(char const*, TBasicString> const&, bool)+137 (0x13C785B9) NKikimr::NPersQueueTests::TFlatMsgBusPQClient::RunYqlDataQueryWithParams(TBasicString>, NYdb::Dev::TParams const&)+932 (0x139A34A4) NKikimr::NPersQueueTests::TFlatMsgBusPQClient::RunYqlDataQuery(TBasicString>)+88 (0x139A2A58) NKikimr::NPersQueueTests::TFlatMsgBusPQClient::InitDCs(THashMap>, NKikimr::NPersQueueTests::TPQTestClusterInfo, THash>>, TEqualTo>>, std::__y1::allocator>>>, TBasicString> const&)+1170 (0x139A1CA2) NKikimr::NPersQueueTests::TFlatMsgBusPQClient::FullInit(THashMap>, NKikimr::NPersQueueTests::TPQTestClusterInfo, THash>>, TEqualTo>>, std::__y1::allocator>>>, TBasicString> const&, TBasicString> const&)+327 (0x1399FC57) NPersQueue::TTestServer::StartServer(bool, TMaybe>, NMaybe::TPolicyUndefinedExcept>)+888 (0x1399E948) NYdb::NTopic::NTests::TTopicSdkTestSetup::TTopicSdkTestSetup(TBasicString> const&, NKikimr::Tests::TServerSettings const&, bool)+582 (0x262E6316) NKikimr::NPQ::NTe ... 8484176963039:2064];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:22:49.267743Z node 8 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/001f60/r3tmp/tmpky9S7l/pdisk_1.dat 2025-05-29T15:22:49.274917Z node 8 :PQ_READ_PROXY DEBUG: caching_service.cpp:44: Direct read cache: : Created 2025-05-29T15:22:49.281156Z node 8 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:22:49.281419Z node 8 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [8:7509888484176963017:2079] 1748532169267515 != 1748532169267518 TServer::EnableGrpc on GrpcPort 21997, node 8 2025-05-29T15:22:49.290788Z node 8 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/ciyv/001f60/r3tmp/yandexDtKakM.tmp 2025-05-29T15:22:49.290804Z node 8 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: /home/runner/.ya/build/build_root/ciyv/001f60/r3tmp/yandexDtKakM.tmp 2025-05-29T15:22:49.290894Z node 8 :NET_CLASSIFIER WARN: net_classifier.cpp:202: successfully initialized from file: /home/runner/.ya/build/build_root/ciyv/001f60/r3tmp/yandexDtKakM.tmp 2025-05-29T15:22:49.290951Z node 8 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-05-29T15:22:49.296933Z INFO: TTestServer started on Port 20422 GrpcPort 21997 TClient is connected to server localhost:20422 PQClient connected to localhost:21997 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-29T15:22:49.371888Z node 8 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:22:49.371927Z node 8 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:22:49.372442Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-05-29T15:22:49.373065Z node 8 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Connecting -> Connected waiting... 2025-05-29T15:22:49.374835Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:22:49.386176Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... waiting... waiting... 2025-05-29T15:22:49.597192Z node 8 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [8:7509888484176963834:2339], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:22:49.597213Z node 8 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [8:7509888484176963823:2336], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:22:49.597230Z node 8 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:22:49.597757Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715662:3, at schemeshard: 72057594046644480 2025-05-29T15:22:49.598272Z node 8 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [8:7509888484176963867:2342], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:22:49.598343Z node 8 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:22:49.599226Z node 8 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [8:7509888484176963837:2340], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715662 completed, doublechecking } 2025-05-29T15:22:49.602188Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-05-29T15:22:49.608747Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-05-29T15:22:49.623112Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost", true, true, 1000), ("dc2", "dc2.logbroker.yandex.net", false, true, 1000); 2025-05-29T15:22:49.646384Z node 8 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [8:7509888484176964134:2375], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:22:49.646544Z node 8 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=8&id=MjNkNjVkOGMtNzQ1ZWQwZTctN2U0MTkyZDctNTU1ODE0NWI=, ActorId: [8:7509888484176964131:2373], ActorState: ExecuteState, TraceId: 01jwea72x10c1241087d6mtdd5, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: 2025-05-29T15:22:49.653641Z node 8 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [8:7509888484176964146:2580] txid# 281474976715666, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 9], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-29T15:22:49.688092Z node 8 :PERSQUEUE_CLUSTER_TRACKER ERROR: cluster_tracker.cpp:167: failed to list clusters: { Response { QueryIssues { message: "Execution" issue_code: 1060 severity: 2 issues { position { row: 3 column: 120 } message: "Cost Based Optimizer could not be applied to this query: couldn\'t load statistics" end_position { row: 3 column: 120 } issue_code: 8001 severity: 2 } } TxMeta { } YdbResults { columns { name: "C.name" type { optional_type { item { type_id: UTF8 } } } } columns { name: "C.balancer" type { optional_type { item { type_id: UTF8 } } } } columns { name: "C.local" type { optional_type { item { type_id: BOOL } } } } columns { name: "C.enabled" type { optional_type { item { type_id: BOOL } } } } columns { name: "C.weight" type { optional_type { item { type_id: UINT64 } } } } columns { name: "V.version" type { optional_type { item { type_id: INT64 } } } } } QueryDiagnostics: "" } YdbStatus: SUCCESS ConsumedRu: 21 } assertion failed at ydb/core/testlib/test_pq_client.h:537, TMaybe NKikimr::NPersQueueTests::TFlatMsgBusPQClient::RunYqlDataQueryWithParams(TString, const NYdb::TParams &): (result.IsSuccess())
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 TBackTrace::Capture()+28 (0x13AC482C) NUnitTest::NPrivate::RaiseError(char const*, TBasicString> const&, bool)+137 (0x13C785B9) NKikimr::NPersQueueTests::TFlatMsgBusPQClient::RunYqlDataQueryWithParams(TBasicString>, NYdb::Dev::TParams const&)+932 (0x139A34A4) NKikimr::NPersQueueTests::TFlatMsgBusPQClient::RunYqlDataQuery(TBasicString>)+88 (0x139A2A58) NKikimr::NPersQueueTests::TFlatMsgBusPQClient::InitDCs(THashMap>, NKikimr::NPersQueueTests::TPQTestClusterInfo, THash>>, TEqualTo>>, std::__y1::allocator>>>, TBasicString> const&)+1170 (0x139A1CA2) NKikimr::NPersQueueTests::TFlatMsgBusPQClient::FullInit(THashMap>, NKikimr::NPersQueueTests::TPQTestClusterInfo, THash>>, TEqualTo>>, std::__y1::allocator>>>, TBasicString> const&, TBasicString> const&)+327 (0x1399FC57) NPersQueue::TTestServer::StartServer(bool, TMaybe>, NMaybe::TPolicyUndefinedExcept>)+888 (0x1399E948) NYdb::NTopic::NTests::TTopicSdkTestSetup::TTopicSdkTestSetup(TBasicString> const&, NKikimr::Tests::TServerSettings const&, bool)+582 (0x262E6316) NKikimr::NPQ::NTest::CreateSetup()+178 (0x26290EC2) NKikimr::NTestSuiteCommitOffset::TTestCaseCommit_FromSession_ToNewChild_WithoutCommitToParent::Execute_(NUnitTest::TTestContext&)+32 (0x1396DE10) NKikimr::NTestSuiteCommitOffset::TCurrentTest::Execute()::'lambda'()::operator()() const+71 (0x1397FD47) NUnitTest::TTestBase::Run(std::__y1::function, TBasicString> const&, char const*, bool)+126 (0x13C7A46E) NKikimr::NTestSuiteCommitOffset::TCurrentTest::Execute()+429 (0x1397F70D) NUnitTest::TTestFactory::Execute()+803 (0x13C7ABE3) NUnitTest::RunMain(int, char**)+3021 (0x13C8C78D) ??+0 (0x7F890FAF8D90) __libc_start_main+128 (0x7F890FAF8E40) _start+41 (0x12A4C029) |61.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/sys_view/partition_stats/ut/unittest |61.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/mind/bscontroller/ut/unittest >> TGroupMapperTest::MakeDisksForbidden [GOOD] |61.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/metadata/secret/ut/unittest >> Secret::ValidationQueryService |61.8%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/ydb_convert/ut/ydb-core-ydb_convert-ut >> Secret::Deactivated |61.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/ydb_convert/ut/ydb-core-ydb_convert-ut |61.8%| [LD] {RESULT} $(B)/ydb/core/ydb_convert/ut/ydb-core-ydb_convert-ut |61.8%| [TA] $(B)/ydb/core/sys_view/partition_stats/ut/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/persqueue/ut/ut_with_sdk/unittest >> TPersQueueMirrorer::TestBasicRemote [FAIL] Test command err: 2025-05-29T15:22:42.268745Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509888453696283336:2209];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:22:42.268829Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/001f41/r3tmp/tmpf1G2qs/pdisk_1.dat 2025-05-29T15:22:42.308534Z node 1 :PQ_READ_PROXY DEBUG: caching_service.cpp:44: Direct read cache: : Created 2025-05-29T15:22:42.345249Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 63878, node 1 2025-05-29T15:22:42.362944Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/ciyv/001f41/r3tmp/yandexLzOoyH.tmp 2025-05-29T15:22:42.362966Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: /home/runner/.ya/build/build_root/ciyv/001f41/r3tmp/yandexLzOoyH.tmp 2025-05-29T15:22:42.367019Z INFO: TTestServer started on Port 31342 GrpcPort 63878 2025-05-29T15:22:42.370970Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:22:42.371009Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:22:42.372681Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:31342 2025-05-29T15:22:42.399632Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:202: successfully initialized from file: /home/runner/.ya/build/build_root/ciyv/001f41/r3tmp/yandexLzOoyH.tmp 2025-05-29T15:22:42.399854Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration PQClient connected to localhost:63878 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-29T15:22:42.430158Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:22:42.433696Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:22:42.441859Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... waiting... waiting... 2025-05-29T15:22:42.669509Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509888453696283969:2339], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:22:42.669508Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509888453696283957:2336], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:22:42.669529Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:22:42.670403Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715662:3, at schemeshard: 72057594046644480 2025-05-29T15:22:42.672129Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509888453696284001:2342], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:22:42.672892Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:22:42.673036Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976715662, at schemeshard: 72057594046644480 2025-05-29T15:22:42.678817Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7509888453696283971:2340], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715662 completed, doublechecking } 2025-05-29T15:22:42.713349Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-05-29T15:22:42.721020Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-05-29T15:22:42.734086Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-05-29T15:22:42.735268Z node 1 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [1:7509888453696284209:2536] txid# 281474976715666, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 9], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost", true, true, 1000), ("dc2", "dc2.logbroker.yandex.net", false, true, 1000); 2025-05-29T15:22:42.766538Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [1:7509888453696284287:2381], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:22:42.768364Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=1&id=YTI2YTg3MDQtMzdhMTJhZGYtMjQyYWUwOWEtYjI4YWVhN2E=, ActorId: [1:7509888453696284284:2379], ActorState: ExecuteState, TraceId: 01jwea6w5z53tthdfgsjwdyy76, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: 2025-05-29T15:22:42.789043Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: cluster_tracker.cpp:167: failed to list clusters: { Response { QueryIssues { message: "Execution" issue_code: 1060 severity: 2 issues { position { row: 3 column: 120 } message: "Cost Based Optimizer could not be applied to this query: couldn\'t load statistics" end_position { row: 3 column: 120 } issue_code: 8001 severity: 2 } } TxMeta { } YdbResults { columns { name: "C.name" type { optional_type { item { type_id: UTF8 } } } } columns { name: "C.balancer" type { optional_type { item { type_id: UTF8 } } } } columns { name: "C.local" type { optional_type { item { type_id: BOOL } } } } columns { name: "C.enabled" type { optional_type { item { type_id: BOOL } } } } columns { name: "C.weight" type { optional_type { item { type_id: UINT64 } } } } columns { name: "V.version" type { optional_type { item { type_id: INT64 } } } } } QueryDiagnostics: "" } YdbStatus: SUCCESS ConsumedRu: 32 } assertion failed at ydb/core/testlib/test_pq_client.h:537, TMaybe NKikimr::NPersQueueTests::TFlatMsgBusPQClient::RunYqlDataQueryWithParams(TString, const NYdb::TParams &): (result.IsSuccess())
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 TBackTrace::Capture()+28 (0x13AC482C) NUnitTest::NPrivate::RaiseError(char const*, TBasicString> const&, bool)+137 (0x13C785B9) NKikimr::NPersQueueTests::TFlatMsgBusPQClient::RunYqlDataQueryWithParams(TBasicString>, NYdb::Dev::TParams const&)+932 (0x139A34A4) NKikimr::NPersQueueTests::TFlatMsgBusPQClient::RunYqlDataQuery(TBasicString>)+88 (0x139A2A58) NKikimr::NPersQueueTests::TFlatMsgBusPQClient::InitDCs(THashMap>, NKikimr::NPersQueueTests::TPQTestClusterInfo, THash>>, TEqualTo>>, std::__y1::allocator>>>, TBasicString> const&)+1170 (0x139A1CA2) NKikimr::NPersQueueTests::TFlatMsgBusPQClient::FullInit(THashMap>, NKikimr::NPersQueueTests::TPQTestClusterInfo, THash>>, TEqualTo>>, std::__y1::allocator>>>, TBasicString> const&, TBasicString> const&)+327 (0x1399FC57) NPersQueue::TTestServer::StartServer(bool, TMaybe>, NMaybe::TPolicyUndefinedExcept>)+888 (0x1399E948) NYdb::NTopic::NTests::TTopicSdkTestSetup::TTopicSdkTestSetup(TBasicString> const&, NKikimr::Tests::TServerSettings const&, bool)+582 (0x262E6316) NKikimr::NPQ::NTest::CreateSetup()+178 (0x26290EC2) NKikimr::NTestSuiteCommitOffset::TTestCasePartitionSplit_OffsetCommit::Execute_(NUnitTest::TTestContext&)+35 (0x1396F683) NKikimr::NTestSuiteCommitOffset::TCurrentTest::Execute()::'lambda'()::operator()() const+71 (0x1397FD47) NUnitTest::TTestBase::Run(std::__y1::function, TBasicString> const&, char const*, bool)+126 (0x13C7A46E) NKikimr::NTestSuiteCommitOffset::TCurrentTest::Execute()+429 (0x1397F70D) NUnitTest::TTestFactory::Execute()+803 (0x13C7ABE3) NUnitTest::RunMain(int ... arentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-29T15:22:48.513909Z node 7 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:22:48.513944Z node 7 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:22:48.515491Z node 7 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-29T15:22:48.517841Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976720657:0, at schemeshard: 72057594046644480 2025-05-29T15:22:48.518057Z node 7 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:22:48.518078Z node 7 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Disconnected -> Connecting waiting... 2025-05-29T15:22:48.519299Z node 7 :HIVE WARN: hive_impl.cpp:771: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 8 Cookie 8 2025-05-29T15:22:48.519563Z node 7 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Connecting -> Connected waiting... 2025-05-29T15:22:48.532896Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720659:0, at schemeshard: 72057594046644480 waiting... waiting... waiting... 2025-05-29T15:22:48.818816Z node 8 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [8:7509888481770753831:2310], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:22:48.818855Z node 8 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:22:48.821177Z node 8 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [8:7509888481770753858:2313], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:22:48.823118Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715657:3, at schemeshard: 72057594046644480 2025-05-29T15:22:48.831966Z node 8 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [8:7509888481770753860:2314], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715657 completed, doublechecking } 2025-05-29T15:22:48.853413Z node 7 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [7:7509888481579042408:2341], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-05-29T15:22:48.853998Z node 7 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=7&id=YmUyNjQxODItNWI0ZDc0ODgtYTM1MDNhODYtYzE0MDcxMDA=, ActorId: [7:7509888481579042367:2334], ActorState: ExecuteState, TraceId: 01jwea724g7q1fgcc6bwp2k2f2, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-05-29T15:22:48.854131Z node 7 :PERSQUEUE_CLUSTER_TRACKER ERROR: cluster_tracker.cpp:167: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-05-29T15:22:48.866380Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720662:0, at schemeshard: 72057594046644480 2025-05-29T15:22:48.902870Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720663:0, at schemeshard: 72057594046644480 2025-05-29T15:22:48.912088Z node 8 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [8:7509888481770753887:2166] txid# 281474976715658, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 9], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-29T15:22:48.918828Z node 8 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [8:7509888481770753917:2319], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:5:17: Error: At function: KiReadTable!
:5:17: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Versions]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-05-29T15:22:48.919387Z node 8 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=8&id=OGQwY2UwNzQtMTE0NGZkMy00MDE0NTI2Yy1iMTU2MTY3Yw==, ActorId: [8:7509888481770753829:2309], ActorState: ExecuteState, TraceId: 01jwea723f58f0d0etvnkyhkwg, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-05-29T15:22:48.919526Z node 8 :PERSQUEUE_CLUSTER_TRACKER ERROR: cluster_tracker.cpp:167: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 5 column: 17 } message: "At function: KiReadTable!" end_position { row: 5 column: 17 } severity: 1 issues { position { row: 5 column: 17 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Versions]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 5 column: 17 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-05-29T15:22:48.978187Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720664:0, at schemeshard: 72057594046644480 === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost", true, true, 1000), ("dc2", "dc2.logbroker.yandex.net", false, true, 1000); 2025-05-29T15:22:49.025142Z node 7 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [7:7509888485874010092:2376], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:22:49.025930Z node 7 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=7&id=OGRiZjU3MWMtNTM5ODcwYTgtOTUyNDc2MWYtYzc3MTY3YWM=, ActorId: [7:7509888485874010089:2374], ActorState: ExecuteState, TraceId: 01jwea729m1rprmrrgkrb630xt, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: assertion failed at ydb/core/testlib/test_pq_client.h:537, TMaybe NKikimr::NPersQueueTests::TFlatMsgBusPQClient::RunYqlDataQueryWithParams(TString, const NYdb::TParams &): (result.IsSuccess())
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 TBackTrace::Capture()+28 (0x13AC482C) NUnitTest::NPrivate::RaiseError(char const*, TBasicString> const&, bool)+137 (0x13C785B9) NKikimr::NPersQueueTests::TFlatMsgBusPQClient::RunYqlDataQueryWithParams(TBasicString>, NYdb::Dev::TParams const&)+932 (0x139A34A4) NKikimr::NPersQueueTests::TFlatMsgBusPQClient::RunYqlDataQuery(TBasicString>)+88 (0x139A2A58) NKikimr::NPersQueueTests::TFlatMsgBusPQClient::InitDCs(THashMap>, NKikimr::NPersQueueTests::TPQTestClusterInfo, THash>>, TEqualTo>>, std::__y1::allocator>>>, TBasicString> const&)+1170 (0x139A1CA2) NKikimr::NPersQueueTests::TFlatMsgBusPQClient::FullInit(THashMap>, NKikimr::NPersQueueTests::TPQTestClusterInfo, THash>>, TEqualTo>>, std::__y1::allocator>>>, TBasicString> const&, TBasicString> const&)+327 (0x1399FC57) NPersQueue::TTestServer::StartServer(bool, TMaybe>, NMaybe::TPolicyUndefinedExcept>)+888 (0x1399E948) NPersQueue::TTestServer::TTestServer(NKikimr::Tests::TServerSettings const&, bool, TVector> const&, NActors::NLog::EPriority, TMaybe, TDelete>, NMaybe::TPolicyUndefinedExcept>)+1563 (0x1399B5FB) NPersQueue::TTestServer::TTestServer(bool)+134 (0x13992FC6) NKikimr::NPersQueueTests::NTestSuiteTPersQueueMirrorer::TTestCaseTestBasicRemote::Execute_(NUnitTest::TTestContext&)+37 (0x13987965) NKikimr::NPersQueueTests::NTestSuiteTPersQueueMirrorer::TCurrentTest::Execute()::'lambda'()::operator()() const+71 (0x1399A2D7) NUnitTest::TTestBase::Run(std::__y1::function, TBasicString> const&, char const*, bool)+126 (0x13C7A46E) NKikimr::NPersQueueTests::NTestSuiteTPersQueueMirrorer::TCurrentTest::Execute()+425 (0x13999C99) NUnitTest::TTestFactory::Execute()+803 (0x13C7ABE3) NUnitTest::RunMain(int, char**)+3021 (0x13C8C78D) ??+0 (0x7F7A37B0AD90) __libc_start_main+128 (0x7F7A37B0AE40) _start+41 (0x12A4C029) |61.8%| [TA] {RESULT} $(B)/ydb/core/sys_view/partition_stats/ut/test-results/unittest/{meta.json ... results_accumulator.log} |61.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/metadata/secret/ut/unittest |61.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/metadata/secret/ut/unittest |61.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/metadata/secret/ut/unittest |61.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/metadata/secret/ut/unittest >> TPQTest::TestWritePQ [GOOD] >> TPQTest::TestWriteOffsetWithBigMessage |61.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/metadata/secret/ut/unittest |61.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/metadata/secret/ut/unittest |61.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/metadata/secret/ut/unittest >> TNodeBrokerTest::Test1001NodesSubscribers [GOOD] |61.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/metadata/secret/ut/unittest |61.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/metadata/secret/ut/unittest >> TGroupMapperTest::ReassignGroupTest3dc [GOOD] >> KqpScan::ScanRetryReadRanges [FAIL] >> Secret::Validation |61.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/metadata/secret/ut/unittest |61.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/metadata/secret/ut/unittest |61.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/metadata/secret/ut/unittest |61.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/mind/bscontroller/ut/unittest >> TGroupMapperTest::ReassignGroupTest3dc [GOOD] |61.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/metadata/secret/ut/unittest |61.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/metadata/secret/ut/unittest ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/mind/ut/unittest >> TNodeBrokerTest::Test1001NodesSubscribers [GOOD] Test command err: 2025-05-29T15:22:18.066154Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 2 Deadline: 18446744073709.551615s } 2025-05-29T15:22:18.066194Z node 3 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 3 Deadline: 18446744073709.551615s } 2025-05-29T15:22:18.066218Z node 4 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 4 Deadline: 18446744073709.551615s } 2025-05-29T15:22:18.066243Z node 5 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 5 Deadline: 18446744073709.551615s } 2025-05-29T15:22:18.066266Z node 6 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 6 Deadline: 18446744073709.551615s } 2025-05-29T15:22:18.066283Z node 7 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 7 Deadline: 18446744073709.551615s } 2025-05-29T15:22:18.072035Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:18.072163Z node 3 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:18.072204Z node 4 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:18.072235Z node 5 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:18.072272Z node 6 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:18.072304Z node 7 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:18.072360Z node 8 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 8 Deadline: 18446744073709.551615s } 2025-05-29T15:22:18.072383Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-05-29T15:22:18.072641Z node 3 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:18.072664Z node 4 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:18.072679Z node 5 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:18.072692Z node 6 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:18.072709Z node 7 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:18.072725Z node 8 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:18.072755Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:18.077632Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:18.077710Z node 8 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:18.077750Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:18.079043Z node 5 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:18.079106Z node 6 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:18.079152Z node 7 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:18.079287Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:18.079322Z node 3 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:18.079357Z node 4 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:18.079393Z node 8 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:18.079426Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:18.079729Z node 3 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-05-29T15:22:18.079774Z node 4 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-05-29T15:22:18.079804Z node 5 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-05-29T15:22:18.079851Z node 6 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-05-29T15:22:18.079886Z node 7 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-05-29T15:22:18.080042Z node 8 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-05-29T15:22:18.080180Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-05-29T15:22:18.080259Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 7 Deadline: 18446744073709.551615s } 2025-05-29T15:22:18.081268Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 3 Deadline: 18446744073709.551615s } 2025-05-29T15:22:18.081305Z node 3 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 8 Deadline: 18446744073709.551615s } 2025-05-29T15:22:18.081317Z node 4 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 6 Deadline: 18446744073709.551615s } 2025-05-29T15:22:18.081330Z node 5 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 6 Deadline: 18446744073709.551615s } 2025-05-29T15:22:18.081341Z node 6 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 7 Deadline: 18446744073709.551615s } 2025-05-29T15:22:18.081352Z node 7 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 6 Deadline: 18446744073709.551615s } 2025-05-29T15:22:18.081363Z node 8 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 4 Deadline: 18446744073709.551615s } 2025-05-29T15:22:18.087239Z node 4 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 8 Deadline: 18446744073709.551615s } 2025-05-29T15:22:18.087621Z node 6 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 4 Deadline: 18446744073709.551615s } 2025-05-29T15:22:18.087676Z node 7 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 5 Deadline: 18446744073709.551615s } 2025-05-29T15:22:18.087693Z node 8 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 3 Deadline: 18446744073709.551615s } 2025-05-29T15:22:18.088502Z node 3 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 2 Deadline: 18446744073709.551615s } 2025-05-29T15:22:18.088578Z node 6 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 5 Deadline: 18446744073709.551615s } 2025-05-29T15:22:18.089364Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 2 Deadline: 18446744073709.551615s } 2025-05-29T15:22:18.089837Z node 5 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 7 Deadline: 18446744073709.551615s } 2025-05-29T15:22:18.089961Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 3 Deadline: 18446744073709.551615s } 2025-05-29T15:22:18.090146Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 4 Deadline: 18446744073709.551615s } 2025-05-29T15:22:18.090319Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 5 Deadline: 18446744073709.551615s } 2025-05-29T15:22:18.090530Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 6 Deadline: 18446744073709.551615s } 2025-05-29T15:22:18.090842Z node 7 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 2 Deadline: 18446744073709.551615s } 2025-05-29T15:22:18.090989Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 8 Deadline: 18446744073709.551615s } 2025-05-29T15:22:18.091503Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 7 Deadline: 18446744073709.551615s } 2025-05-29T15:22:18.091947Z node 5 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 2 Deadline: 18446744073709.551615s } 2025-05-29T15:22:18.092532Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 5 Deadline: 18446744073709.551615s } 2025-05-29T15:22:18.094284Z node 5 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 8 Deadline: 18446744073709.551615s } 2025-05-29T15:22:18.094935Z node 7 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 3 Deadline: 18446744073709.551615s } 2025-05-29T15:22:18.095061Z node 8 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 5 Deadline: 18446744073709.551615s } 2025-05-29T15:22:18.095668Z node 3 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 7 Deadline: 18446744073709.551615s } 2025-05-29T15:22:18.127331Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7309: Cannot subscribe to console configs 2025-05-29T15:22:18.127360Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded ... waiting for nameservers are connected 2025-05-29T15:22:18.132745Z node 1 :NODE_BROKER DEBUG: node_broker_impl.h:236: StateInit event type: 10060000 event: NKikimr::TEvTablet::TEvBoot 2025-05-29T15:22:18.133248Z node 1 :NODE_BROKER DEBUG: node_broker_impl.h:236: StateInit event type: 10060001 event: NKikimr::TEvTablet::TEvRestored 2025-05-29T15:22:18.133316Z node 1 :NODE_BROKER DEBUG: node_broker__init_scheme.cpp:20: TTxInitScheme Execute 2025-05-29T15:22:18.133544Z node 1 :NODE_BROKER DEBUG: node_broker_impl.h:236: StateInit event type: 1006000c event: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-05-29T15:22:18.134477Z node 1 :NODE_BROKER DEBUG: node_broker__init_scheme.cpp:29: TTxInitScheme Complete 2025-05-29T15:22:18.134504Z node 1 :NODE_BROKER DEBUG: node_broker__load_state.cpp:19: TTxLoadState Execute 2025-05-29T15:22:18.134568Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:961: [DB] Using default config. 2025-05-29T15:22:18.134584Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:998: [DB] Start ... 025-05-29T15:22:52.847837Z node 1 :NODE_BROKER TRACE: node_broker.cpp:423: Send TEvNodesInfo for epoch #1.3 1970-01-01T00:00:00.025000Z - 1970-01-01T01:00:00.025000Z - 1970-01-01T02:00:00.025000Z 2025-05-29T15:22:52.859770Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 2146435074, Sender [0:0:0], Recipient [1:564:2184]: NKikimr::NNodeBroker::TNodeBroker::TEvPrivate::TEvProcessSubscribersQueue 2025-05-29T15:22:52.859796Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:258: StateWork, processing event TEvPrivate::TEvProcessSubscribersQueue 2025-05-29T15:22:52.859808Z node 1 :NODE_BROKER TRACE: node_broker.cpp:730: Send TEvUpdateNodes v2 -> v3 to [1:5588:7170] 2025-05-29T15:22:52.863995Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 269877761, Sender [1:9698:11227], Recipient [1:564:2184]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-05-29T15:22:52.864054Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 272039936, Sender [1:632:2214], Recipient [1:564:2184]: NKikimr::NNodeBroker::TEvNodeBroker::TEvListNodes { } 2025-05-29T15:22:52.864062Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:246: StateWork, processing event TEvNodeBroker::TEvListNodes 2025-05-29T15:22:52.864076Z node 1 :NODE_BROKER TRACE: node_broker.cpp:423: Send TEvNodesInfo for epoch #1.3 1970-01-01T00:00:00.025000Z - 1970-01-01T01:00:00.025000Z - 1970-01-01T02:00:00.025000Z 2025-05-29T15:22:52.876841Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 2146435074, Sender [0:0:0], Recipient [1:564:2184]: NKikimr::NNodeBroker::TNodeBroker::TEvPrivate::TEvProcessSubscribersQueue 2025-05-29T15:22:52.876884Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:258: StateWork, processing event TEvPrivate::TEvProcessSubscribersQueue 2025-05-29T15:22:52.876898Z node 1 :NODE_BROKER TRACE: node_broker.cpp:730: Send TEvUpdateNodes v2 -> v3 to [1:5593:7175] 2025-05-29T15:22:52.880504Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 269877761, Sender [1:9700:11229], Recipient [1:564:2184]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-05-29T15:22:52.880574Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 272039936, Sender [1:632:2214], Recipient [1:564:2184]: NKikimr::NNodeBroker::TEvNodeBroker::TEvListNodes { } 2025-05-29T15:22:52.880579Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:246: StateWork, processing event TEvNodeBroker::TEvListNodes 2025-05-29T15:22:52.880590Z node 1 :NODE_BROKER TRACE: node_broker.cpp:423: Send TEvNodesInfo for epoch #1.3 1970-01-01T00:00:00.025000Z - 1970-01-01T01:00:00.025000Z - 1970-01-01T02:00:00.025000Z 2025-05-29T15:22:52.892591Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 2146435074, Sender [0:0:0], Recipient [1:564:2184]: NKikimr::NNodeBroker::TNodeBroker::TEvPrivate::TEvProcessSubscribersQueue 2025-05-29T15:22:52.892614Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:258: StateWork, processing event TEvPrivate::TEvProcessSubscribersQueue 2025-05-29T15:22:52.892624Z node 1 :NODE_BROKER TRACE: node_broker.cpp:730: Send TEvUpdateNodes v2 -> v3 to [1:5598:7180] 2025-05-29T15:22:52.895895Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 269877761, Sender [1:9702:11231], Recipient [1:564:2184]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-05-29T15:22:52.895950Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 272039936, Sender [1:632:2214], Recipient [1:564:2184]: NKikimr::NNodeBroker::TEvNodeBroker::TEvListNodes { } 2025-05-29T15:22:52.895954Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:246: StateWork, processing event TEvNodeBroker::TEvListNodes 2025-05-29T15:22:52.895964Z node 1 :NODE_BROKER TRACE: node_broker.cpp:423: Send TEvNodesInfo for epoch #1.3 1970-01-01T00:00:00.025000Z - 1970-01-01T01:00:00.025000Z - 1970-01-01T02:00:00.025000Z 2025-05-29T15:22:52.907893Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 2146435074, Sender [0:0:0], Recipient [1:564:2184]: NKikimr::NNodeBroker::TNodeBroker::TEvPrivate::TEvProcessSubscribersQueue 2025-05-29T15:22:52.907924Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:258: StateWork, processing event TEvPrivate::TEvProcessSubscribersQueue 2025-05-29T15:22:52.907943Z node 1 :NODE_BROKER TRACE: node_broker.cpp:730: Send TEvUpdateNodes v2 -> v3 to [1:5603:7185] 2025-05-29T15:22:52.912331Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 269877761, Sender [1:9704:11233], Recipient [1:564:2184]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-05-29T15:22:52.912409Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 272039936, Sender [1:632:2214], Recipient [1:564:2184]: NKikimr::NNodeBroker::TEvNodeBroker::TEvListNodes { } 2025-05-29T15:22:52.912417Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:246: StateWork, processing event TEvNodeBroker::TEvListNodes 2025-05-29T15:22:52.912431Z node 1 :NODE_BROKER TRACE: node_broker.cpp:423: Send TEvNodesInfo for epoch #1.3 1970-01-01T00:00:00.025000Z - 1970-01-01T01:00:00.025000Z - 1970-01-01T02:00:00.025000Z 2025-05-29T15:22:52.925311Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 2146435074, Sender [0:0:0], Recipient [1:564:2184]: NKikimr::NNodeBroker::TNodeBroker::TEvPrivate::TEvProcessSubscribersQueue 2025-05-29T15:22:52.925335Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:258: StateWork, processing event TEvPrivate::TEvProcessSubscribersQueue 2025-05-29T15:22:52.925346Z node 1 :NODE_BROKER TRACE: node_broker.cpp:730: Send TEvUpdateNodes v2 -> v3 to [1:5608:7190] 2025-05-29T15:22:52.929445Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 269877761, Sender [1:9706:11235], Recipient [1:564:2184]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-05-29T15:22:52.929520Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 272039936, Sender [1:632:2214], Recipient [1:564:2184]: NKikimr::NNodeBroker::TEvNodeBroker::TEvListNodes { } 2025-05-29T15:22:52.929526Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:246: StateWork, processing event TEvNodeBroker::TEvListNodes 2025-05-29T15:22:52.929552Z node 1 :NODE_BROKER TRACE: node_broker.cpp:423: Send TEvNodesInfo for epoch #1.3 1970-01-01T00:00:00.025000Z - 1970-01-01T01:00:00.025000Z - 1970-01-01T02:00:00.025000Z 2025-05-29T15:22:52.941978Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 2146435074, Sender [0:0:0], Recipient [1:564:2184]: NKikimr::NNodeBroker::TNodeBroker::TEvPrivate::TEvProcessSubscribersQueue 2025-05-29T15:22:52.942003Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:258: StateWork, processing event TEvPrivate::TEvProcessSubscribersQueue 2025-05-29T15:22:52.942015Z node 1 :NODE_BROKER TRACE: node_broker.cpp:730: Send TEvUpdateNodes v2 -> v3 to [1:5613:7195] 2025-05-29T15:22:52.946090Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 269877761, Sender [1:9708:11237], Recipient [1:564:2184]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-05-29T15:22:52.946166Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 272039936, Sender [1:632:2214], Recipient [1:564:2184]: NKikimr::NNodeBroker::TEvNodeBroker::TEvListNodes { } 2025-05-29T15:22:52.946173Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:246: StateWork, processing event TEvNodeBroker::TEvListNodes 2025-05-29T15:22:52.946185Z node 1 :NODE_BROKER TRACE: node_broker.cpp:423: Send TEvNodesInfo for epoch #1.3 1970-01-01T00:00:00.025000Z - 1970-01-01T01:00:00.025000Z - 1970-01-01T02:00:00.025000Z 2025-05-29T15:22:52.959246Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 2146435074, Sender [0:0:0], Recipient [1:564:2184]: NKikimr::NNodeBroker::TNodeBroker::TEvPrivate::TEvProcessSubscribersQueue 2025-05-29T15:22:52.959269Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:258: StateWork, processing event TEvPrivate::TEvProcessSubscribersQueue 2025-05-29T15:22:52.959285Z node 1 :NODE_BROKER TRACE: node_broker.cpp:730: Send TEvUpdateNodes v2 -> v3 to [1:5618:7200] 2025-05-29T15:22:52.963330Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 269877761, Sender [1:9710:11239], Recipient [1:564:2184]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-05-29T15:22:52.963416Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 272039936, Sender [1:632:2214], Recipient [1:564:2184]: NKikimr::NNodeBroker::TEvNodeBroker::TEvListNodes { } 2025-05-29T15:22:52.963422Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:246: StateWork, processing event TEvNodeBroker::TEvListNodes 2025-05-29T15:22:52.963437Z node 1 :NODE_BROKER TRACE: node_broker.cpp:423: Send TEvNodesInfo for epoch #1.3 1970-01-01T00:00:00.025000Z - 1970-01-01T01:00:00.025000Z - 1970-01-01T02:00:00.025000Z 2025-05-29T15:22:52.976036Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 2146435074, Sender [0:0:0], Recipient [1:564:2184]: NKikimr::NNodeBroker::TNodeBroker::TEvPrivate::TEvProcessSubscribersQueue 2025-05-29T15:22:52.976062Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:258: StateWork, processing event TEvPrivate::TEvProcessSubscribersQueue 2025-05-29T15:22:52.976074Z node 1 :NODE_BROKER TRACE: node_broker.cpp:730: Send TEvUpdateNodes v2 -> v3 to [1:5623:7205] 2025-05-29T15:22:52.979071Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 269877761, Sender [1:9712:11241], Recipient [1:564:2184]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-05-29T15:22:52.979137Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 272039936, Sender [1:632:2214], Recipient [1:564:2184]: NKikimr::NNodeBroker::TEvNodeBroker::TEvListNodes { } 2025-05-29T15:22:52.979142Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:246: StateWork, processing event TEvNodeBroker::TEvListNodes 2025-05-29T15:22:52.979151Z node 1 :NODE_BROKER TRACE: node_broker.cpp:423: Send TEvNodesInfo for epoch #1.3 1970-01-01T00:00:00.025000Z - 1970-01-01T01:00:00.025000Z - 1970-01-01T02:00:00.025000Z 2025-05-29T15:22:52.990899Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 2146435074, Sender [0:0:0], Recipient [1:564:2184]: NKikimr::NNodeBroker::TNodeBroker::TEvPrivate::TEvProcessSubscribersQueue 2025-05-29T15:22:52.990928Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:258: StateWork, processing event TEvPrivate::TEvProcessSubscribersQueue 2025-05-29T15:22:52.990945Z node 1 :NODE_BROKER TRACE: node_broker.cpp:730: Send TEvUpdateNodes v2 -> v3 to [1:5628:7210] 2025-05-29T15:22:52.995410Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 269877761, Sender [1:9714:11243], Recipient [1:564:2184]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-05-29T15:22:52.995483Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 272039936, Sender [1:632:2214], Recipient [1:564:2184]: NKikimr::NNodeBroker::TEvNodeBroker::TEvListNodes { } 2025-05-29T15:22:52.995490Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:246: StateWork, processing event TEvNodeBroker::TEvListNodes 2025-05-29T15:22:52.995504Z node 1 :NODE_BROKER TRACE: node_broker.cpp:423: Send TEvNodesInfo for epoch #1.3 1970-01-01T00:00:00.025000Z - 1970-01-01T01:00:00.025000Z - 1970-01-01T02:00:00.025000Z 2025-05-29T15:22:53.008617Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 2146435074, Sender [0:0:0], Recipient [1:564:2184]: NKikimr::NNodeBroker::TNodeBroker::TEvPrivate::TEvProcessSubscribersQueue 2025-05-29T15:22:53.008649Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:258: StateWork, processing event TEvPrivate::TEvProcessSubscribersQueue 2025-05-29T15:22:53.008669Z node 1 :NODE_BROKER TRACE: node_broker.cpp:730: Send TEvUpdateNodes v2 -> v3 to [1:5633:7215] |61.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/metadata/secret/ut/unittest |61.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/metadata/secret/ut/unittest |62.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/metadata/secret/ut/unittest >> TMultiversionObjectMap::MonteCarlo [GOOD] >> KqpScan::ScanDuringSplitThenMerge [FAIL] >> Secret::DeactivatedQueryService |62.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/metadata/secret/ut/unittest |62.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/metadata/secret/ut/unittest |62.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/metadata/secret/ut/unittest |62.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/metadata/secret/ut/unittest >> KqpScan::ScanPg >> ConvertYdbValueToMiniKQLValueTest::SimpleInt32 [GOOD] >> ConvertYdbValueToMiniKQLValueTest::SimpleTzDate [GOOD] >> ConvertYdbValueToMiniKQLValueTest::SimpleTzDateTime [GOOD] >> ConvertYdbValueToMiniKQLValueTest::SimpleTzTimeStamp [GOOD] >> ConvertYdbValueToMiniKQLValueTest::SimpleInt32TypeMissmatch [GOOD] >> ConvertYdbValueToMiniKQLValueTest::SimpleUuid [GOOD] >> ConvertMiniKQLValueToYdbValueTest::SimpleInt32 [GOOD] >> ConvertMiniKQLValueToYdbValueTest::SimpleInt64 [GOOD] >> ConvertMiniKQLValueToYdbValueTest::SimpleTzDate [GOOD] >> ConvertMiniKQLValueToYdbValueTest::SimpleTzDateTime [GOOD] >> ConvertMiniKQLValueToYdbValueTest::SimpleTzTimeStamp [GOOD] >> ConvertMiniKQLValueToYdbValueTest::SimpleDecimal [GOOD] >> ConvertMiniKQLValueToYdbValueTest::SimpleUuid [GOOD] >> KqpScan::ScanDuringSplit [FAIL] >> KqpScan::ScanAfterSplitSlowMetaRead >> ConvertMiniKQLValueToYdbValueTest::SimpleBool [GOOD] >> ConvertMiniKQLValueToYdbValueTest::OptionalString [GOOD] >> ConvertMiniKQLValueToYdbValueTest::OptionalEmpty [GOOD] >> ConvertMiniKQLValueToYdbValueTest::OptionalOptionalEmpty [GOOD] >> ConvertMiniKQLValueToYdbValueTest::OptionalOptionalEmpty2 [GOOD] >> ConvertMiniKQLValueToYdbValueTest::List [GOOD] >> ConvertMiniKQLValueToYdbValueTest::Dict [GOOD] |62.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/metadata/secret/ut/unittest |62.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/mind/bscontroller/ut/unittest >> TMultiversionObjectMap::MonteCarlo [GOOD] |62.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/metadata/secret/ut/unittest |62.0%| [TA] $(B)/ydb/core/persqueue/ut/ut_with_sdk/test-results/unittest/{meta.json ... results_accumulator.log} >> ConvertYdbValueToMiniKQLValueTest::Void [GOOD] >> ConvertYdbValueToMiniKQLValueTest::SimpleUuidTypeMissmatch [GOOD] >> ConvertYdbValueToMiniKQLValueTest::Struct [GOOD] >> ConvertYdbValueToMiniKQLValueTest::Tuple [GOOD] >> ConvertYdbValueToMiniKQLValueTest::Variant [GOOD] >> ConvertYdbValueToMiniKQLValueTest::VariantIndexUnderflow [GOOD] |62.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/metadata/secret/ut/unittest |62.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/metadata/secret/ut/unittest >> CellsFromTupleTest::CellsFromTupleSuccess >> ConvertMiniKQLTypeToYdbTypeTest::TTzDateTime [GOOD] >> ConvertMiniKQLTypeToYdbTypeTest::TTzTimeStamp [GOOD] >> ConvertMiniKQLTypeToYdbTypeTest::UuidType [GOOD] >> ConvertMiniKQLTypeToYdbTypeTest::VariantTuple [GOOD] >> ConvertMiniKQLTypeToYdbTypeTest::VariantStruct [GOOD] >> ConvertMiniKQLTypeToYdbTypeTest::Void [GOOD] >> ConvertMiniKQLTypeToYdbTypeTest::Tuple [GOOD] |62.0%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/ydb_convert/ut/unittest >> ConvertMiniKQLValueToYdbValueTest::SimpleUuid [GOOD] |62.0%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/ydb_convert/ut/unittest >> ConvertYdbValueToMiniKQLValueTest::SimpleUuid [GOOD] >> ConvertYdbPermissionNameToACLAttrs::TestEqualGranularAndDeprecatedAcl [GOOD] >> ConvertYdbValueToMiniKQLValueTest::OptionalEmpty [GOOD] >> ConvertYdbValueToMiniKQLValueTest::OptionalOptionalEmpty [GOOD] >> ConvertYdbValueToMiniKQLValueTest::OptionalOptionalEmpty2 [GOOD] >> ConvertYdbValueToMiniKQLValueTest::List [GOOD] >> ConvertYdbValueToMiniKQLValueTest::Dict [GOOD] >> CellsFromTupleTest::CellsFromTupleSuccess [GOOD] >> CellsFromTupleTest::CellsFromTupleSuccessPg [GOOD] >> CellsFromTupleTest::CellsFromTupleFails [GOOD] >> CellsFromTupleTest::CellsFromTupleFailsPg [GOOD] >> CompressionTests::Zstd [GOOD] >> CompressionTests::Unsupported [GOOD] >> ConvertMiniKQLTypeToYdbTypeTest::DecimalType [GOOD] >> ConvertMiniKQLTypeToYdbTypeTest::SimpleType [GOOD] >> ConvertMiniKQLTypeToYdbTypeTest::TTzDate [GOOD] >> ConvertMiniKQLTypeToYdbTypeTest::Optional [GOOD] >> ConvertMiniKQLTypeToYdbTypeTest::List [GOOD] >> ConvertMiniKQLTypeToYdbTypeTest::Struct [GOOD] >> ConvertMiniKQLTypeToYdbTypeTest::Dict [GOOD] >> ConvertMiniKQLTypeToYdbTypeTest::PgType [GOOD] >> ConvertYdbValueToMiniKQLValueTest::SimpleBool [GOOD] >> ConvertYdbValueToMiniKQLValueTest::SimpleBoolTypeMissmatch [GOOD] >> ConvertYdbValueToMiniKQLValueTest::SimpleDecimal [GOOD] >> ConvertYdbValueToMiniKQLValueTest::SimpleDecimalTypeMissmatch [GOOD] >> ConvertYdbValueToMiniKQLValueTest::OptionalString [GOOD] >> ConvertYdbValueToMiniKQLValueTest::PgValue [GOOD] >> ConvertMiniKQLValueToYdbValueTest::Void [GOOD] >> ConvertMiniKQLValueToYdbValueTest::Struct [GOOD] >> ConvertMiniKQLValueToYdbValueTest::Tuple [GOOD] >> ConvertMiniKQLValueToYdbValueTest::Variant [GOOD] >> ConvertTableDescription::StorageSettings [GOOD] >> ConvertTableDescription::ColumnFamilies [GOOD] >> ConvertYdbPermissionNameToACLAttrs::SimpleConvertGood [GOOD] |62.0%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/ydb_convert/ut/unittest >> ConvertMiniKQLValueToYdbValueTest::Dict [GOOD] |62.0%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/ydb_convert/ut/unittest >> ConvertYdbValueToMiniKQLValueTest::VariantIndexUnderflow [GOOD] |62.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/metadata/secret/ut/unittest |62.0%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/ydb_convert/ut/unittest >> ConvertMiniKQLTypeToYdbTypeTest::DecimalType [GOOD] |62.0%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/ydb_convert/ut/unittest >> ConvertMiniKQLTypeToYdbTypeTest::Tuple [GOOD] |62.1%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/ydb_convert/ut/unittest >> ConvertYdbValueToMiniKQLValueTest::Dict [GOOD] |62.0%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/ydb_convert/ut/unittest >> ConvertYdbPermissionNameToACLAttrs::SimpleConvertGood [GOOD] |62.1%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/ydb_convert/ut/unittest >> ConvertMiniKQLTypeToYdbTypeTest::PgType [GOOD] |62.1%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/schemeshard/ut_column_build/ydb-core-tx-schemeshard-ut_column_build |62.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_column_build/ydb-core-tx-schemeshard-ut_column_build |62.1%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/ydb_convert/ut/unittest >> ConvertYdbValueToMiniKQLValueTest::PgValue [GOOD] |62.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/metadata/secret/ut/unittest |62.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/metadata/secret/ut/unittest |62.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/metadata/secret/ut/unittest |62.1%| [TA] {RESULT} $(B)/ydb/core/persqueue/ut/ut_with_sdk/test-results/unittest/{meta.json ... results_accumulator.log} |62.1%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_column_build/ydb-core-tx-schemeshard-ut_column_build |62.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/metadata/secret/ut/unittest |62.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/metadata/secret/ut/unittest |62.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/metadata/secret/ut/unittest |62.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/metadata/secret/ut/unittest |62.1%| [TA] $(B)/ydb/core/ydb_convert/ut/test-results/unittest/{meta.json ... results_accumulator.log} |62.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/metadata/secret/ut/unittest |62.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/metadata/secret/ut/unittest |62.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/metadata/secret/ut/unittest |62.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/metadata/secret/ut/unittest |62.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/metadata/secret/ut/unittest |62.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/metadata/secret/ut/unittest >> Secret::Simple |62.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/metadata/secret/ut/unittest |62.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/metadata/secret/ut/unittest |62.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/metadata/secret/ut/unittest |62.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/metadata/secret/ut/unittest |62.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/metadata/secret/ut/unittest |62.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/metadata/secret/ut/unittest |62.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/metadata/secret/ut/unittest |62.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/metadata/secret/ut/unittest |62.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/metadata/secret/ut/unittest |62.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/metadata/secret/ut/unittest >> ColumnBuildTest::BaseCase >> TNodeBrokerTest::Test1000NodesSubscribers [GOOD] |62.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/metadata/secret/ut/unittest |62.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/ext_index/ut/unittest >> ColumnBuildTest::CancelBuild |62.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/metadata/secret/ut/unittest |62.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/metadata/secret/ut/unittest >> ColumnBuildTest::ValidDefaultValue >> KqpScan::ScanPg [FAIL] >> KqpScan::ScanAfterSplitSlowMetaRead [FAIL] |62.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_column_build/unittest |62.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_column_build/unittest ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/datashard/ut_kqp_scan/unittest >> KqpScan::ScanRetryReadRanges [FAIL] Test command err: 2025-05-29T15:22:49.266919Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:701:2413], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-05-29T15:22:49.267028Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-05-29T15:22:49.267054Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-05-29T15:22:49.267087Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [2:698:2355], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-05-29T15:22:49.267133Z node 2 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-05-29T15:22:49.267137Z node 2 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/0021c0/r3tmp/tmpvynmji/pdisk_1.dat 2025-05-29T15:22:49.383158Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:22:49.460523Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-05-29T15:22:49.548951Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:22:49.548984Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:22:49.550596Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:22:49.550631Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:22:49.562203Z node 1 :HIVE WARN: hive_impl.cpp:771: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-05-29T15:22:49.562359Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-29T15:22:49.562457Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-29T15:22:49.844865Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-05-29T15:22:50.335825Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:1389:2830], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:22:50.335854Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:1400:2835], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:22:50.335865Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:22:50.336984Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2025-05-29T15:22:50.689960Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:1403:2838], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-05-29T15:22:50.790290Z node 1 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [1:1526:2906] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-29T15:22:50.956733Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [1:1539:2918], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:22:50.959007Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=1&id=ZDhkMjc4ZWYtMWYwYTY0YmMtYjQ0ZDJlNDItZjgyODcxOWY=, ActorId: [1:1387:2828], ActorState: ExecuteState, TraceId: 01jwea73jzb5q3xsf3kcyvtd8d, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: assertion failed at ydb/core/tx/datashard/ut_common/datashard_ut_common.cpp:2091, void NKikimr::ExecSQL(Tests::TServer::TPtr, TActorId, const TString &, bool, Ydb::StatusIds::StatusCode): (response.GetYdbStatus() == code) failed: (INTERNAL_ERROR != SUCCESS) Response { QueryIssues { message: "Execution" issue_code: 1060 issues { message: "yql/essentials/ast/yql_expr.h:1874: index out of range" issue_code: 1 } } TxMeta { } } YdbStatus: INTERNAL_ERROR ConsumedRu: 1 , with diff: (INT|SUCC)E(RNAL_ERROR|SS) TBackTrace::Capture()+28 (0x15DE701C) NUnitTest::NPrivate::RaiseError(char const*, TBasicString> const&, bool)+137 (0x15F99F49) NKikimr::ExecSQL(TIntrusivePtr>, NActors::TActorId, TBasicString> const&, bool, Ydb::StatusIds_StatusCode)+1300 (0x2862A574) NKikimr::NKqp::NTestSuiteKqpScan::TTestCaseScanRetryRead::Execute_(NUnitTest::TTestContext&)+1215 (0x15CC360F) NKikimr::NKqp::NTestSuiteKqpScan::TCurrentTest::Execute()::'lambda'()::operator()() const+71 (0x15CD79E7) NUnitTest::TTestBase::Run(std::__y1::function, TBasicString> const&, char const*, bool)+126 (0x15F9BDFE) NKikimr::NKqp::NTestSuiteKqpScan::TCurrentTest::Execute()+422 (0x15CD7246) NUnitTest::TTestFactory::Execute()+803 (0x15F9C573) NUnitTest::RunMain(int, char**)+3021 (0x15FAE11D) ??+0 (0x7FCACF783D90) __libc_start_main+128 (0x7FCACF783E40) _start+41 (0x14D2B029) 2025-05-29T15:22:52.327388Z node 3 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [3:701:2413], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-05-29T15:22:52.327458Z node 3 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-05-29T15:22:52.327470Z node 3 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-05-29T15:22:52.327704Z node 4 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [4:698:2355], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-05-29T15:22:52.327745Z node 4 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-05-29T15:22:52.327773Z node 4 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/0021c0/r3tmp/tmpeKn4Uh/pdisk_1.dat 2025-05-29T15:22:52.412515Z node 3 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:22:52.533044Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-05-29T15:22:52.621382Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:22:52.621419Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:22:52.622047Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:22:52.622066Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:22:52.633677Z node 3 :HIVE WARN: hive_impl.cpp:771: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 4 Cookie 4 2025-05-29T15:22:52.633814Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-29T15:22:52.633918Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-29T15:22:52.897317Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-05-29T15:22:53.354010Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:1390:2830], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:22:53.354034Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:1401:2835], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:22:53.354043Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:22:53.354918Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2025-05-29T15:22:53.730513Z node 3 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [3:1404:2838], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-05-29T15:22:53.814829Z node 3 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [3:1527:2906] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-29T15:22:53.881648Z node 3 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [3:1540:2918], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:22:53.883730Z node 3 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=3&id=OTBmYzNmYmQtNDE5MWY1ZDYtZmE2YzkyMC0yODkxODhkYQ==, ActorId: [3:1388:2828], ActorState: ExecuteState, TraceId: 01jwea76h917fbcabhaxkjqe7x, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: assertion failed at ydb/core/tx/datashard/ut_common/datashard_ut_common.cpp:2091, void NKikimr::ExecSQL(Tests::TServer::TPtr, TActorId, const TString &, bool, Ydb::StatusIds::StatusCode): (response.GetYdbStatus() == code) failed: (INTERNAL_ERROR != SUCCESS) Response { QueryIssues { message: "Execution" issue_code: 1060 issues { message: "yql/essentials/ast/yql_expr.h:1874: index out of range" issue_code: 1 } } TxMeta { } } YdbStatus: INTERNAL_ERROR ConsumedRu: 1 , with diff: (INT|SUCC)E(RNAL_ERROR|SS) TBackTrace::Capture()+28 (0x15DE701C) NUnitTest::NPrivate::RaiseError(char const*, TBasicString> const&, bool)+137 (0x15F99F49) NKikimr::ExecSQL(TIntrusivePtr>, NActors::TActorId, TBasicString> const&, bool, Ydb::StatusIds_StatusCode)+1300 (0x2862A574) NKikimr::NKqp::NTestSuiteKqpScan::TTestCaseScanRetryReadRanges::Execute_(NUnitTest::TTestContext&)+1227 (0x15CCBB1B) NKikimr::NKqp::NTestSuiteKqpScan::TCurrentTest::Execute()::'lambda'()::operator()() const+71 (0x15CD79E7) NUnitTest::TTestBase::Run(std::__y1::function, TBasicString> const&, char const*, bool)+126 (0x15F9BDFE) NKikimr::NKqp::NTestSuiteKqpScan::TCurrentTest::Execute()+422 (0x15CD7246) NUnitTest::TTestFactory::Execute()+803 (0x15F9C573) NUnitTest::RunMain(int, char**)+3021 (0x15FAE11D) ??+0 (0x7FCACF783D90) __libc_start_main+128 (0x7FCACF783E40) _start+41 (0x14D2B029) ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/mind/ut/unittest >> TNodeBrokerTest::Test1000NodesSubscribers [GOOD] Test command err: 2025-05-29T15:22:19.987863Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 2 Deadline: 18446744073709.551615s } 2025-05-29T15:22:19.987911Z node 3 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 3 Deadline: 18446744073709.551615s } 2025-05-29T15:22:19.987935Z node 4 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 4 Deadline: 18446744073709.551615s } 2025-05-29T15:22:19.987967Z node 5 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 5 Deadline: 18446744073709.551615s } 2025-05-29T15:22:19.988001Z node 6 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 6 Deadline: 18446744073709.551615s } 2025-05-29T15:22:19.988032Z node 7 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 7 Deadline: 18446744073709.551615s } 2025-05-29T15:22:19.995139Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:19.995259Z node 3 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:19.995302Z node 4 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:19.995351Z node 5 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:19.995400Z node 6 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:19.995435Z node 7 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:19.995496Z node 8 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 8 Deadline: 18446744073709.551615s } 2025-05-29T15:22:19.995522Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-05-29T15:22:19.995800Z node 3 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:19.995834Z node 4 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:19.995860Z node 5 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:19.995893Z node 6 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:19.995919Z node 7 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:19.995939Z node 8 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:19.995978Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:20.000175Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:20.000255Z node 8 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:20.000299Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:20.001375Z node 5 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:20.001430Z node 6 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:20.001474Z node 7 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:20.001611Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:20.001642Z node 3 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:20.001669Z node 4 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:20.001703Z node 8 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:20.001729Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:20.001958Z node 3 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-05-29T15:22:20.001995Z node 4 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-05-29T15:22:20.002023Z node 5 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-05-29T15:22:20.002055Z node 6 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-05-29T15:22:20.002087Z node 7 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-05-29T15:22:20.002197Z node 8 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-05-29T15:22:20.002313Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-05-29T15:22:20.002374Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 8 Deadline: 18446744073709.551615s } 2025-05-29T15:22:20.003219Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 6 Deadline: 18446744073709.551615s } 2025-05-29T15:22:20.003253Z node 3 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 5 Deadline: 18446744073709.551615s } 2025-05-29T15:22:20.003263Z node 4 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 2 Deadline: 18446744073709.551615s } 2025-05-29T15:22:20.003273Z node 5 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 7 Deadline: 18446744073709.551615s } 2025-05-29T15:22:20.003283Z node 6 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 7 Deadline: 18446744073709.551615s } 2025-05-29T15:22:20.003293Z node 7 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 6 Deadline: 18446744073709.551615s } 2025-05-29T15:22:20.003304Z node 8 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 4 Deadline: 18446744073709.551615s } 2025-05-29T15:22:20.006610Z node 4 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 8 Deadline: 18446744073709.551615s } 2025-05-29T15:22:20.006801Z node 5 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 3 Deadline: 18446744073709.551615s } 2025-05-29T15:22:20.006814Z node 6 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 2 Deadline: 18446744073709.551615s } 2025-05-29T15:22:20.006834Z node 7 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 5 Deadline: 18446744073709.551615s } 2025-05-29T15:22:20.006893Z node 7 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 8 Deadline: 18446744073709.551615s } 2025-05-29T15:22:20.007493Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 4 Deadline: 18446744073709.551615s } 2025-05-29T15:22:20.007735Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 5 Deadline: 18446744073709.551615s } 2025-05-29T15:22:20.007968Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 6 Deadline: 18446744073709.551615s } 2025-05-29T15:22:20.008167Z node 8 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 7 Deadline: 18446744073709.551615s } 2025-05-29T15:22:20.008178Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 7 Deadline: 18446744073709.551615s } 2025-05-29T15:22:20.008340Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 2 Deadline: 18446744073709.551615s } 2025-05-29T15:22:20.008416Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 3 Deadline: 18446744073709.551615s } 2025-05-29T15:22:20.008504Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 4 Deadline: 18446744073709.551615s } 2025-05-29T15:22:20.009442Z node 7 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 2 Deadline: 18446744073709.551615s } 2025-05-29T15:22:20.009508Z node 6 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 8 Deadline: 18446744073709.551615s } 2025-05-29T15:22:20.009543Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 8 Deadline: 18446744073709.551615s } 2025-05-29T15:22:20.009755Z node 8 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 2 Deadline: 18446744073709.551615s } 2025-05-29T15:22:20.009792Z node 8 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 6 Deadline: 18446744073709.551615s } 2025-05-29T15:22:20.009855Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 7 Deadline: 18446744073709.551615s } 2025-05-29T15:22:20.010454Z node 4 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 5 Deadline: 18446744073709.551615s } 2025-05-29T15:22:20.010900Z node 5 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 4 Deadline: 18446744073709.551615s } 2025-05-29T15:22:20.013146Z node 4 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 7 Deadline: 18446744073709.551615s } 2025-05-29T15:22:20.013411Z node 7 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 4 Deadline: 18446744073709.551615s } 2025-05-29T15:22:20.040552Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7309: Cannot subscribe to console configs 2025-05-29T15:22:20.040578Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded ... waiting for nameservers are connected 2025-05-29T15:22:20.045358Z node 1 :NODE_BROKER DEBUG: node_broker_impl.h:236: StateInit event type: 10060000 event: NKikimr::TEvTablet::TEvBoot 2025-05-29T15:22:20.045844Z node 1 :NODE_BROKER DEBUG: node_broker_impl.h:236: StateInit event type: 10060001 event: NKikimr::TEvTablet::TEvRestored 2025-05-29T15:22:20.045923Z node 1 :NODE_BROKER DEBUG: node_broker__init_scheme.cpp:20: TTxInitScheme Execute 2025-05-29T15:22:20.046126Z node 1 :NODE_BROKER DEBUG: node_broker_impl.h:236: StateInit event type: 1006000c event: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-05-29T15:22:20.046600Z node 1 :NODE_BROKER DEBUG: node_ ... 025-05-29T15:22:55.843588Z node 1 :NODE_BROKER TRACE: node_broker.cpp:423: Send TEvNodesInfo for epoch #1.3 1970-01-01T00:00:00.024000Z - 1970-01-01T01:00:00.024000Z - 1970-01-01T02:00:00.024000Z 2025-05-29T15:22:55.855841Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 2146435074, Sender [0:0:0], Recipient [1:565:2183]: NKikimr::NNodeBroker::TNodeBroker::TEvPrivate::TEvProcessSubscribersQueue 2025-05-29T15:22:55.855872Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:258: StateWork, processing event TEvPrivate::TEvProcessSubscribersQueue 2025-05-29T15:22:55.855887Z node 1 :NODE_BROKER TRACE: node_broker.cpp:730: Send TEvUpdateNodes v2 -> v3 to [1:5584:7164] 2025-05-29T15:22:55.859922Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 269877761, Sender [1:9690:11217], Recipient [1:565:2183]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-05-29T15:22:55.860005Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 272039936, Sender [1:633:2213], Recipient [1:565:2183]: NKikimr::NNodeBroker::TEvNodeBroker::TEvListNodes { } 2025-05-29T15:22:55.860013Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:246: StateWork, processing event TEvNodeBroker::TEvListNodes 2025-05-29T15:22:55.860029Z node 1 :NODE_BROKER TRACE: node_broker.cpp:423: Send TEvNodesInfo for epoch #1.3 1970-01-01T00:00:00.024000Z - 1970-01-01T01:00:00.024000Z - 1970-01-01T02:00:00.024000Z 2025-05-29T15:22:55.872824Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 2146435074, Sender [0:0:0], Recipient [1:565:2183]: NKikimr::NNodeBroker::TNodeBroker::TEvPrivate::TEvProcessSubscribersQueue 2025-05-29T15:22:55.872859Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:258: StateWork, processing event TEvPrivate::TEvProcessSubscribersQueue 2025-05-29T15:22:55.872876Z node 1 :NODE_BROKER TRACE: node_broker.cpp:730: Send TEvUpdateNodes v2 -> v3 to [1:5589:7169] 2025-05-29T15:22:55.877441Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 269877761, Sender [1:9692:11219], Recipient [1:565:2183]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-05-29T15:22:55.877557Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 272039936, Sender [1:633:2213], Recipient [1:565:2183]: NKikimr::NNodeBroker::TEvNodeBroker::TEvListNodes { } 2025-05-29T15:22:55.877565Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:246: StateWork, processing event TEvNodeBroker::TEvListNodes 2025-05-29T15:22:55.877582Z node 1 :NODE_BROKER TRACE: node_broker.cpp:423: Send TEvNodesInfo for epoch #1.3 1970-01-01T00:00:00.024000Z - 1970-01-01T01:00:00.024000Z - 1970-01-01T02:00:00.024000Z 2025-05-29T15:22:55.890999Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 2146435074, Sender [0:0:0], Recipient [1:565:2183]: NKikimr::NNodeBroker::TNodeBroker::TEvPrivate::TEvProcessSubscribersQueue 2025-05-29T15:22:55.891026Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:258: StateWork, processing event TEvPrivate::TEvProcessSubscribersQueue 2025-05-29T15:22:55.891049Z node 1 :NODE_BROKER TRACE: node_broker.cpp:730: Send TEvUpdateNodes v2 -> v3 to [1:5594:7174] 2025-05-29T15:22:55.895547Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 269877761, Sender [1:9694:11221], Recipient [1:565:2183]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-05-29T15:22:55.895608Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 272039936, Sender [1:633:2213], Recipient [1:565:2183]: NKikimr::NNodeBroker::TEvNodeBroker::TEvListNodes { } 2025-05-29T15:22:55.895636Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:246: StateWork, processing event TEvNodeBroker::TEvListNodes 2025-05-29T15:22:55.895652Z node 1 :NODE_BROKER TRACE: node_broker.cpp:423: Send TEvNodesInfo for epoch #1.3 1970-01-01T00:00:00.024000Z - 1970-01-01T01:00:00.024000Z - 1970-01-01T02:00:00.024000Z 2025-05-29T15:22:55.907850Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 2146435074, Sender [0:0:0], Recipient [1:565:2183]: NKikimr::NNodeBroker::TNodeBroker::TEvPrivate::TEvProcessSubscribersQueue 2025-05-29T15:22:55.907875Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:258: StateWork, processing event TEvPrivate::TEvProcessSubscribersQueue 2025-05-29T15:22:55.907886Z node 1 :NODE_BROKER TRACE: node_broker.cpp:730: Send TEvUpdateNodes v2 -> v3 to [1:5599:7179] 2025-05-29T15:22:55.911176Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 269877761, Sender [1:9696:11223], Recipient [1:565:2183]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-05-29T15:22:55.911256Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 272039936, Sender [1:633:2213], Recipient [1:565:2183]: NKikimr::NNodeBroker::TEvNodeBroker::TEvListNodes { } 2025-05-29T15:22:55.911261Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:246: StateWork, processing event TEvNodeBroker::TEvListNodes 2025-05-29T15:22:55.911272Z node 1 :NODE_BROKER TRACE: node_broker.cpp:423: Send TEvNodesInfo for epoch #1.3 1970-01-01T00:00:00.024000Z - 1970-01-01T01:00:00.024000Z - 1970-01-01T02:00:00.024000Z 2025-05-29T15:22:55.923239Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 2146435074, Sender [0:0:0], Recipient [1:565:2183]: NKikimr::NNodeBroker::TNodeBroker::TEvPrivate::TEvProcessSubscribersQueue 2025-05-29T15:22:55.923263Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:258: StateWork, processing event TEvPrivate::TEvProcessSubscribersQueue 2025-05-29T15:22:55.923276Z node 1 :NODE_BROKER TRACE: node_broker.cpp:730: Send TEvUpdateNodes v2 -> v3 to [1:5604:7184] 2025-05-29T15:22:55.926341Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 269877761, Sender [1:9698:11225], Recipient [1:565:2183]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-05-29T15:22:55.926409Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 272039936, Sender [1:633:2213], Recipient [1:565:2183]: NKikimr::NNodeBroker::TEvNodeBroker::TEvListNodes { } 2025-05-29T15:22:55.926414Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:246: StateWork, processing event TEvNodeBroker::TEvListNodes 2025-05-29T15:22:55.926425Z node 1 :NODE_BROKER TRACE: node_broker.cpp:423: Send TEvNodesInfo for epoch #1.3 1970-01-01T00:00:00.024000Z - 1970-01-01T01:00:00.024000Z - 1970-01-01T02:00:00.024000Z 2025-05-29T15:22:55.938172Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 2146435074, Sender [0:0:0], Recipient [1:565:2183]: NKikimr::NNodeBroker::TNodeBroker::TEvPrivate::TEvProcessSubscribersQueue 2025-05-29T15:22:55.938198Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:258: StateWork, processing event TEvPrivate::TEvProcessSubscribersQueue 2025-05-29T15:22:55.938209Z node 1 :NODE_BROKER TRACE: node_broker.cpp:730: Send TEvUpdateNodes v2 -> v3 to [1:5609:7189] 2025-05-29T15:22:55.941236Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 269877761, Sender [1:9700:11227], Recipient [1:565:2183]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-05-29T15:22:55.941304Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 272039936, Sender [1:633:2213], Recipient [1:565:2183]: NKikimr::NNodeBroker::TEvNodeBroker::TEvListNodes { } 2025-05-29T15:22:55.941309Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:246: StateWork, processing event TEvNodeBroker::TEvListNodes 2025-05-29T15:22:55.941320Z node 1 :NODE_BROKER TRACE: node_broker.cpp:423: Send TEvNodesInfo for epoch #1.3 1970-01-01T00:00:00.024000Z - 1970-01-01T01:00:00.024000Z - 1970-01-01T02:00:00.024000Z 2025-05-29T15:22:55.953203Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 2146435074, Sender [0:0:0], Recipient [1:565:2183]: NKikimr::NNodeBroker::TNodeBroker::TEvPrivate::TEvProcessSubscribersQueue 2025-05-29T15:22:55.953225Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:258: StateWork, processing event TEvPrivate::TEvProcessSubscribersQueue 2025-05-29T15:22:55.953236Z node 1 :NODE_BROKER TRACE: node_broker.cpp:730: Send TEvUpdateNodes v2 -> v3 to [1:5614:7194] 2025-05-29T15:22:55.956318Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 269877761, Sender [1:9702:11229], Recipient [1:565:2183]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-05-29T15:22:55.956374Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 272039936, Sender [1:633:2213], Recipient [1:565:2183]: NKikimr::NNodeBroker::TEvNodeBroker::TEvListNodes { } 2025-05-29T15:22:55.956379Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:246: StateWork, processing event TEvNodeBroker::TEvListNodes 2025-05-29T15:22:55.956392Z node 1 :NODE_BROKER TRACE: node_broker.cpp:423: Send TEvNodesInfo for epoch #1.3 1970-01-01T00:00:00.024000Z - 1970-01-01T01:00:00.024000Z - 1970-01-01T02:00:00.024000Z 2025-05-29T15:22:55.968126Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 2146435074, Sender [0:0:0], Recipient [1:565:2183]: NKikimr::NNodeBroker::TNodeBroker::TEvPrivate::TEvProcessSubscribersQueue 2025-05-29T15:22:55.968154Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:258: StateWork, processing event TEvPrivate::TEvProcessSubscribersQueue 2025-05-29T15:22:55.968173Z node 1 :NODE_BROKER TRACE: node_broker.cpp:730: Send TEvUpdateNodes v2 -> v3 to [1:5619:7199] 2025-05-29T15:22:55.972861Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 269877761, Sender [1:9704:11231], Recipient [1:565:2183]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-05-29T15:22:55.972955Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 272039936, Sender [1:633:2213], Recipient [1:565:2183]: NKikimr::NNodeBroker::TEvNodeBroker::TEvListNodes { } 2025-05-29T15:22:55.972963Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:246: StateWork, processing event TEvNodeBroker::TEvListNodes 2025-05-29T15:22:55.972979Z node 1 :NODE_BROKER TRACE: node_broker.cpp:423: Send TEvNodesInfo for epoch #1.3 1970-01-01T00:00:00.024000Z - 1970-01-01T01:00:00.024000Z - 1970-01-01T02:00:00.024000Z 2025-05-29T15:22:55.985839Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 2146435074, Sender [0:0:0], Recipient [1:565:2183]: NKikimr::NNodeBroker::TNodeBroker::TEvPrivate::TEvProcessSubscribersQueue 2025-05-29T15:22:55.985868Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:258: StateWork, processing event TEvPrivate::TEvProcessSubscribersQueue 2025-05-29T15:22:55.985883Z node 1 :NODE_BROKER TRACE: node_broker.cpp:730: Send TEvUpdateNodes v2 -> v3 to [1:5624:7204] 2025-05-29T15:22:55.990608Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 269877761, Sender [1:9706:11233], Recipient [1:565:2183]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-05-29T15:22:55.990693Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 272039936, Sender [1:633:2213], Recipient [1:565:2183]: NKikimr::NNodeBroker::TEvNodeBroker::TEvListNodes { } 2025-05-29T15:22:55.990701Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:246: StateWork, processing event TEvNodeBroker::TEvListNodes 2025-05-29T15:22:55.990718Z node 1 :NODE_BROKER TRACE: node_broker.cpp:423: Send TEvNodesInfo for epoch #1.3 1970-01-01T00:00:00.024000Z - 1970-01-01T01:00:00.024000Z - 1970-01-01T02:00:00.024000Z 2025-05-29T15:22:56.003621Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 2146435074, Sender [0:0:0], Recipient [1:565:2183]: NKikimr::NNodeBroker::TNodeBroker::TEvPrivate::TEvProcessSubscribersQueue 2025-05-29T15:22:56.003654Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:258: StateWork, processing event TEvPrivate::TEvProcessSubscribersQueue 2025-05-29T15:22:56.003674Z node 1 :NODE_BROKER TRACE: node_broker.cpp:730: Send TEvUpdateNodes v2 -> v3 to [1:5629:7209] >> ColumnBuildTest::BuildColumnDoesnotRestoreDeletedRows |62.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/ext_index/ut/unittest |62.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_column_build/unittest >> ColumnBuildTest::AlreadyExists |62.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_column_build/unittest |62.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_column_build/unittest |62.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/ext_index/ut/unittest |62.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/ext_index/ut/unittest |62.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/ext_index/ut/unittest |62.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/ext_index/ut/unittest |62.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/ext_index/ut/unittest |62.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/ext_index/ut/unittest |62.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/ext_index/ut/unittest |62.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/ext_index/ut/unittest |62.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/ext_index/ut/unittest |62.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/ext_index/ut/unittest |62.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/ext_index/ut/unittest |62.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/ext_index/ut/unittest |62.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/ext_index/ut/unittest |62.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/ext_index/ut/unittest |62.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/ext_index/ut/unittest |62.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/ext_index/ut/unittest |62.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/ext_index/ut/unittest >> ColumnBuildTest::CancelBuild [GOOD] |62.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/ext_index/ut/unittest |62.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/ext_index/ut/unittest |62.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/ext_index/ut/unittest >> ColumnBuildTest::AlreadyExists [GOOD] |62.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/ext_index/ut/unittest |62.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/ext_index/ut/unittest |62.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/ext_index/ut/unittest |62.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/ext_index/ut/unittest ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_column_build/unittest >> ColumnBuildTest::AlreadyExists [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:116:2058] recipient: [1:111:2141] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:116:2058] recipient: [1:111:2141] Leader for TabletID 72057594046678944 is [1:128:2152] sender: [1:132:2058] recipient: [1:111:2141] 2025-05-29T15:22:58.131544Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7488: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-05-29T15:22:58.131567Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7516: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:22:58.131573Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7402: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-05-29T15:22:58.131578Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7418: OperationsProcessing config: using default configuration 2025-05-29T15:22:58.131589Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-05-29T15:22:58.131593Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-05-29T15:22:58.131601Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7548: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:22:58.131612Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:39: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-05-29T15:22:58.131703Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7619: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-05-29T15:22:58.131777Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-05-29T15:22:58.141500Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7309: Cannot subscribe to console configs 2025-05-29T15:22:58.141519Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:22:58.143709Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-05-29T15:22:58.143827Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-05-29T15:22:58.143862Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-05-29T15:22:58.145887Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-05-29T15:22:58.146149Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:445: Clear TempDirsState with owners number: 0 2025-05-29T15:22:58.146314Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1348: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-05-29T15:22:58.146382Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:32: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-05-29T15:22:58.147125Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:157: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:22:58.147175Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:84: [RootDataErasureManager] Stop 2025-05-29T15:22:58.147443Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:22:58.147454Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:22:58.147474Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-05-29T15:22:58.147486Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-29T15:22:58.147494Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:202: TTxServerlessStorageBilling.Complete 2025-05-29T15:22:58.147526Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6671: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-05-29T15:22:58.149010Z node 1 :HIVE INFO: tablet_helpers.cpp:1130: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:128:2152] sender: [1:242:2058] recipient: [1:15:2062] 2025-05-29T15:22:58.175091Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:372: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-05-29T15:22:58.175158Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:22:58.175213Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-05-29T15:22:58.175253Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:130: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-05-29T15:22:58.175267Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:22:58.175934Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:451: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-05-29T15:22:58.175955Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-05-29T15:22:58.175996Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:22:58.176005Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:313: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-05-29T15:22:58.176011Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:367: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-05-29T15:22:58.176016Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 2 -> 3 2025-05-29T15:22:58.176498Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:22:58.176520Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-05-29T15:22:58.176527Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 3 -> 128 2025-05-29T15:22:58.177069Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:22:58.177083Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:22:58.177091Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:22:58.177099Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1650: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-05-29T15:22:58.177856Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1719: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-05-29T15:22:58.178531Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:652: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-05-29T15:22:58.178580Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1751: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-05-29T15:22:58.178824Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:676: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-05-29T15:22:58.178862Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:680: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 124 RawX2: 4294969445 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-05-29T15:22:58.178872Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:22:58.178954Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 128 -> 240 2025-05-29T15:22:58.178985Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:22:58.179024Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-05-29T15:22:58.179038Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:402: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-05-29T15:22:58.179578Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:22:58.179590Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-29T15:22:58.179634Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29 ... ull: false IsBuildInProgress: false } Columns { Name: "index" Type: "Uint32" TypeId: 2 Id: 2 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Utf8" TypeId: 4608 Id: 3 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 TableSchemaVersion: 1 IsBackup: false IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 3 ProcessingParams { Version: 2 PlanResolution: 50 Coordinators: 72075186233409550 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409551 SchemeShard: 72075186233409549 } DomainKey { SchemeShard: 72057594046678944 PathId: 3 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 4 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 2 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SharedHive: 72057594037968897 ServerlessComputeResourcesMode: EServerlessComputeResourcesModeShared } } PathId: 2 PathOwnerId: 72075186233409549, at schemeshard: 72075186233409549 2025-05-29T15:22:59.636840Z node 1 :BUILD_INDEX NOTICE: schemeshard_build_index__create.cpp:23: TIndexBuilder::TXTYPE_CREATE_INDEX_BUILD: DoExecute TxId: 106 DatabaseName: "/MyRoot/ServerLessDB" Settings { source_path: "/MyRoot/ServerLessDB/Table" max_shards_in_flight: 2 column_build_operation { column { ColumnName: "value" default_from_literal { type { type_id: UINT64 } value { uint64_value: 10 } } } } ScanSettings { MaxBatchRows: 1 } } 2025-05-29T15:22:59.637514Z node 1 :BUILD_INDEX NOTICE: schemeshard_build_index__progress.cpp:1121: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Execute: 106 AlterMainTable 2025-05-29T15:22:59.637560Z node 1 :BUILD_INDEX DEBUG: schemeshard_build_index__progress.cpp:1122: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Execute: 106 AlterMainTable TBuildInfo{ IndexBuildId: 106, Uid: , DomainPathId: [OwnerId: 72075186233409549, LocalPathId: 1], TablePathId: [OwnerId: 72075186233409549, LocalPathId: 2], IndexType: EIndexTypeInvalid, IndexName: , State: AlterMainTable, IsCancellationRequested: 0, Issue: , SubscribersCount: 0, CreateSender: [1:1147:3016], AlterMainTableTxId: 0, AlterMainTableTxStatus: StatusSuccess, AlterMainTableTxDone: 0, LockTxId: 0, LockTxStatus: StatusSuccess, LockTxDone: 0, InitiateTxId: 0, InitiateTxStatus: StatusSuccess, InitiateTxDone: 0, SnapshotStepId: 0, ApplyTxId: 0, ApplyTxStatus: StatusSuccess, ApplyTxDone: 0, UnlockTxId: 0, UnlockTxStatus: StatusSuccess, UnlockTxDone: 0, ToUploadShards: 0, DoneShards: 0, Processed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }, Billed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }} 2025-05-29T15:22:59.637571Z node 1 :BUILD_INDEX DEBUG: schemeshard_build_index_tx_base.cpp:186: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: AllocateTxId 106 2025-05-29T15:22:59.637608Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6671: Handle: TEvAllocateResult: Cookie# 106, at schemeshard: 72075186233409549 2025-05-29T15:22:59.637618Z node 1 :BUILD_INDEX INFO: schemeshard_build_index__progress.cpp:2623: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxReply : TEvAllocateResult, BuildIndexId: 106, txId# 281474976725757 2025-05-29T15:22:59.637627Z node 1 :BUILD_INDEX DEBUG: schemeshard_build_index__progress.cpp:2630: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxReply : TEvAllocateResult, buildInfo: TBuildInfo{ IndexBuildId: 106, Uid: , DomainPathId: [OwnerId: 72075186233409549, LocalPathId: 1], TablePathId: [OwnerId: 72075186233409549, LocalPathId: 2], IndexType: EIndexTypeInvalid, IndexName: , State: AlterMainTable, IsCancellationRequested: 0, Issue: , SubscribersCount: 0, CreateSender: [1:1147:3016], AlterMainTableTxId: 0, AlterMainTableTxStatus: StatusSuccess, AlterMainTableTxDone: 0, LockTxId: 0, LockTxStatus: StatusSuccess, LockTxDone: 0, InitiateTxId: 0, InitiateTxStatus: StatusSuccess, InitiateTxDone: 0, SnapshotStepId: 0, ApplyTxId: 0, ApplyTxStatus: StatusSuccess, ApplyTxDone: 0, UnlockTxId: 0, UnlockTxStatus: StatusSuccess, UnlockTxDone: 0, ToUploadShards: 0, DoneShards: 0, Processed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }, Billed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }} 2025-05-29T15:22:59.638026Z node 1 :BUILD_INDEX NOTICE: schemeshard_build_index__progress.cpp:1121: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Execute: 106 AlterMainTable 2025-05-29T15:22:59.638045Z node 1 :BUILD_INDEX DEBUG: schemeshard_build_index__progress.cpp:1122: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Execute: 106 AlterMainTable TBuildInfo{ IndexBuildId: 106, Uid: , DomainPathId: [OwnerId: 72075186233409549, LocalPathId: 1], TablePathId: [OwnerId: 72075186233409549, LocalPathId: 2], IndexType: EIndexTypeInvalid, IndexName: , State: AlterMainTable, IsCancellationRequested: 0, Issue: , SubscribersCount: 0, CreateSender: [1:1147:3016], AlterMainTableTxId: 281474976725757, AlterMainTableTxStatus: StatusSuccess, AlterMainTableTxDone: 0, LockTxId: 0, LockTxStatus: StatusSuccess, LockTxDone: 0, InitiateTxId: 0, InitiateTxStatus: StatusSuccess, InitiateTxDone: 0, SnapshotStepId: 0, ApplyTxId: 0, ApplyTxStatus: StatusSuccess, ApplyTxDone: 0, UnlockTxId: 0, UnlockTxStatus: StatusSuccess, UnlockTxDone: 0, ToUploadShards: 0, DoneShards: 0, Processed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }, Billed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }} 2025-05-29T15:22:59.638102Z node 1 :BUILD_INDEX DEBUG: schemeshard_build_index__progress.cpp:429: AlterMainTablePropose 106 AlterMainTable Transaction { WorkingDir: "/MyRoot/ServerLessDB" OperationType: ESchemeOpAlterTable AlterTable { Name: "Table" Columns { Name: "value" Type: "Uint64" DefaultFromLiteral { type { type_id: UINT64 } value { uint64_value: 10 } } IsBuildInProgress: true } } Internal: true } TxId: 281474976725757 TabletId: 72075186233409549 FailOnExist: true 2025-05-29T15:22:59.638804Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:372: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot/ServerLessDB" OperationType: ESchemeOpAlterTable AlterTable { Name: "Table" Columns { Name: "value" Type: "Uint64" DefaultFromLiteral { type { type_id: UINT64 } value { uint64_value: 10 } } IsBuildInProgress: true } } Internal: true } TxId: 281474976725757 TabletId: 72075186233409549 FailOnExist: true , at schemeshard: 72075186233409549 2025-05-29T15:22:59.638851Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_table.cpp:508: TAlterTable Propose, path: /MyRoot/ServerLessDB/Table, pathId: , opId: 281474976725757:0, at schemeshard: 72075186233409549 2025-05-29T15:22:59.638916Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:130: IgniteOperation, opId: 281474976725757:1, propose status:StatusInvalidParameter, reason: Cannot alter type for column 'value', at schemeshard: 72075186233409549 2025-05-29T15:22:59.639418Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:451: TTxOperationPropose Complete, txId: 281474976725757, response: Status: StatusInvalidParameter Reason: "Cannot alter type for column \'value\'" TxId: 281474976725757 SchemeshardId: 72075186233409549, at schemeshard: 72075186233409549 2025-05-29T15:22:59.639449Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 281474976725757, database: /MyRoot/ServerLessDB, subject: , status: StatusInvalidParameter, reason: Cannot alter type for column 'value', operation: ALTER TABLE, path: /MyRoot/ServerLessDB/Table 2025-05-29T15:22:59.639474Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6697: Handle: TEvModifySchemeTransactionResult: txId# 281474976725757, status# StatusInvalidParameter 2025-05-29T15:22:59.639481Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6699: Message: Status: StatusInvalidParameter Reason: "Cannot alter type for column \'value\'" TxId: 281474976725757 SchemeshardId: 72075186233409549 2025-05-29T15:22:59.639492Z node 1 :BUILD_INDEX INFO: schemeshard_build_index__progress.cpp:2467: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxReply : TEvModifySchemeTransactionResult, BuildIndexId: 106, cookie: 106, txId: 281474976725757, status: StatusInvalidParameter 2025-05-29T15:22:59.639507Z node 1 :BUILD_INDEX DEBUG: schemeshard_build_index__progress.cpp:2471: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxReply : TEvModifySchemeTransactionResult, buildInfo: TBuildInfo{ IndexBuildId: 106, Uid: , DomainPathId: [OwnerId: 72075186233409549, LocalPathId: 1], TablePathId: [OwnerId: 72075186233409549, LocalPathId: 2], IndexType: EIndexTypeInvalid, IndexName: , State: AlterMainTable, IsCancellationRequested: 0, Issue: , SubscribersCount: 0, CreateSender: [1:1147:3016], AlterMainTableTxId: 281474976725757, AlterMainTableTxStatus: StatusSuccess, AlterMainTableTxDone: 0, LockTxId: 0, LockTxStatus: StatusSuccess, LockTxDone: 0, InitiateTxId: 0, InitiateTxStatus: StatusSuccess, InitiateTxDone: 0, SnapshotStepId: 0, ApplyTxId: 0, ApplyTxStatus: StatusSuccess, ApplyTxDone: 0, UnlockTxId: 0, UnlockTxStatus: StatusSuccess, UnlockTxDone: 0, ToUploadShards: 0, DoneShards: 0, Processed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }, Billed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }}, record: Status: StatusInvalidParameter Reason: "Cannot alter type for column \'value\'" TxId: 281474976725757 SchemeshardId: 72075186233409549 2025-05-29T15:22:59.639773Z node 1 :BUILD_INDEX NOTICE: schemeshard_build_index__progress.cpp:2437: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TIndexBuilder::TTxReply: ReplyOnCreation, BuildIndexId: 106, status: BAD_REQUEST, error: At AlterMainTable state got unsuccess propose result, status: StatusInvalidParameter, reason: Cannot alter type for column 'value', replyTo: [1:1147:3016], message: TxId: 106 Status: BAD_REQUEST Issues { message: "At AlterMainTable state got unsuccess propose result, status: StatusInvalidParameter, reason: Cannot alter type for column \'value\'" severity: 1 } IndexBuild { Id: 106 Issues { message: "At AlterMainTable state got unsuccess propose result, status: StatusInvalidParameter, reason: Cannot alter type for column \'value\'" severity: 1 } State: STATE_PREPARING Settings { source_path: "/MyRoot/ServerLessDB/Table" max_shards_in_flight: 2 column_build_operation { column { ColumnName: "value" default_from_literal { type { type_id: UINT64 } value { uint64_value: 10 } } } } ScanSettings { MaxBatchRows: 1 } } Progress: 0 StartTime { } } BUILDCOLUMN RESPONSE CREATE: NKikimrIndexBuilder.TEvCreateResponse TxId: 106 Status: BAD_REQUEST Issues { message: "At AlterMainTable state got unsuccess propose result, status: StatusInvalidParameter, reason: Cannot alter type for column \'value\'" severity: 1 } IndexBuild { Id: 106 Issues { message: "At AlterMainTable state got unsuccess propose result, status: StatusInvalidParameter, reason: Cannot alter type for column \'value\'" severity: 1 } State: STATE_PREPARING Settings { source_path: "/MyRoot/ServerLessDB/Table" max_shards_in_flight: 2 column_build_operation { column { ColumnName: "value" default_from_literal { type { type_id: UINT64 } value { uint64_value: 10 } } } } ScanSettings { MaxBatchRows: 1 } } Progress: 0 StartTime { } } |62.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/ext_index/ut/unittest ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_column_build/unittest >> ColumnBuildTest::CancelBuild [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:116:2058] recipient: [1:111:2141] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:116:2058] recipient: [1:111:2141] Leader for TabletID 72057594046678944 is [1:128:2152] sender: [1:132:2058] recipient: [1:111:2141] 2025-05-29T15:22:57.646694Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7488: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-05-29T15:22:57.646723Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7516: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:22:57.646729Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7402: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-05-29T15:22:57.646761Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7418: OperationsProcessing config: using default configuration 2025-05-29T15:22:57.646773Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-05-29T15:22:57.646778Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-05-29T15:22:57.646788Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7548: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:22:57.646802Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:39: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-05-29T15:22:57.646934Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7619: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-05-29T15:22:57.647022Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-05-29T15:22:57.661023Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7309: Cannot subscribe to console configs 2025-05-29T15:22:57.661046Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:22:57.663443Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-05-29T15:22:57.663561Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-05-29T15:22:57.663600Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-05-29T15:22:57.665280Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-05-29T15:22:57.665464Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:445: Clear TempDirsState with owners number: 0 2025-05-29T15:22:57.665597Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1348: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-05-29T15:22:57.665650Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:32: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-05-29T15:22:57.666179Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:157: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:22:57.666230Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:84: [RootDataErasureManager] Stop 2025-05-29T15:22:57.666454Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:22:57.666461Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:22:57.666476Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-05-29T15:22:57.666481Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-29T15:22:57.666485Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:202: TTxServerlessStorageBilling.Complete 2025-05-29T15:22:57.666510Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6671: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-05-29T15:22:57.667781Z node 1 :HIVE INFO: tablet_helpers.cpp:1130: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:128:2152] sender: [1:242:2058] recipient: [1:15:2062] 2025-05-29T15:22:57.687269Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:372: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-05-29T15:22:57.687350Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:22:57.687408Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-05-29T15:22:57.687452Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:130: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-05-29T15:22:57.687463Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:22:57.688266Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:451: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-05-29T15:22:57.688295Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-05-29T15:22:57.688353Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:22:57.688364Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:313: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-05-29T15:22:57.688369Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:367: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-05-29T15:22:57.688374Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 2 -> 3 2025-05-29T15:22:57.688830Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:22:57.688844Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-05-29T15:22:57.688850Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 3 -> 128 2025-05-29T15:22:57.689288Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:22:57.689300Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:22:57.689306Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:22:57.689312Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1650: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-05-29T15:22:57.690077Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1719: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-05-29T15:22:57.690531Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:652: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-05-29T15:22:57.690575Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1751: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-05-29T15:22:57.690785Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:676: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-05-29T15:22:57.690814Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:680: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 124 RawX2: 4294969445 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-05-29T15:22:57.690821Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:22:57.690888Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 128 -> 240 2025-05-29T15:22:57.690896Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:22:57.690929Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-05-29T15:22:57.690941Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:402: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-05-29T15:22:57.691390Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:22:57.691399Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-29T15:22:57.691441Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29 ... TICE: schemeshard__operation.cpp:676: TTxOperationPlanStep Execute, stepId: 5000007, transactions count in step: 1, at schemeshard: 72057594046678944 2025-05-29T15:22:59.487097Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:680: TTxOperationPlanStep Execute, message: Transactions { TxId: 281474976710761 Coordinator: 72057594046316545 AckTo { RawX1: 124 RawX2: 4294969445 } } Step: 5000007 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-05-29T15:22:59.487105Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_drop_lock.cpp:44: [72057594046678944] TDropLock TPropose opId# 281474976710761:0 HandleReply TEvOperationPlan: step# 5000007 2025-05-29T15:22:59.487112Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 281474976710761:0 128 -> 240 2025-05-29T15:22:59.487572Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 281474976710761:0, at schemeshard: 72057594046678944 2025-05-29T15:22:59.487583Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:482: [72057594046678944] TDone opId# 281474976710761:0 ProgressState 2025-05-29T15:22:59.487596Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:906: Part operation is done id#281474976710761:0 progress is 1/1 2025-05-29T15:22:59.487600Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 281474976710761 ready parts: 1/1 2025-05-29T15:22:59.487605Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:906: Part operation is done id#281474976710761:0 progress is 1/1 2025-05-29T15:22:59.487610Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 281474976710761 ready parts: 1/1 2025-05-29T15:22:59.487615Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1606: TOperation IsReadyToNotify, TxId: 281474976710761, ready parts: 1/1, is published: true 2025-05-29T15:22:59.487625Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1629: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:128:2152] message: TxId: 281474976710761 2025-05-29T15:22:59.487632Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 281474976710761 ready parts: 1/1 2025-05-29T15:22:59.487638Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:973: Operation and all the parts is done, operation id: 281474976710761:0 2025-05-29T15:22:59.487642Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5174: RemoveTx for txid 281474976710761:0 2025-05-29T15:22:59.487654Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 12 FAKE_COORDINATOR: Erasing txId 281474976710761 2025-05-29T15:22:59.488144Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6746: Handle: TEvNotifyTxCompletionResult: txId# 281474976710761 2025-05-29T15:22:59.488163Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6748: Message: TxId: 281474976710761 2025-05-29T15:22:59.488174Z node 1 :BUILD_INDEX INFO: schemeshard_build_index__progress.cpp:2338: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxReply : TEvNotifyTxCompletionResult, txId# 281474976710761, buildInfoId: 102 2025-05-29T15:22:59.488191Z node 1 :BUILD_INDEX DEBUG: schemeshard_build_index__progress.cpp:2341: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxReply : TEvNotifyTxCompletionResult, txId# 281474976710761, buildInfo: TBuildInfo{ IndexBuildId: 102, Uid: , DomainPathId: [OwnerId: 72057594046678944, LocalPathId: 1], TablePathId: [OwnerId: 72057594046678944, LocalPathId: 2], IndexType: EIndexTypeInvalid, IndexName: , State: Cancellation_Unlocking, IsCancellationRequested: 1, Issue: , SubscribersCount: 1, CreateSender: [1:1178:3029], AlterMainTableTxId: 281474976710757, AlterMainTableTxStatus: StatusAccepted, AlterMainTableTxDone: 1, LockTxId: 281474976710758, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976710759, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 0, ApplyTxId: 281474976710760, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, UnlockTxId: 281474976710761, UnlockTxStatus: StatusAccepted, UnlockTxDone: 0, ToUploadShards: 0, DoneShards: 0, Processed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }, Billed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }} 2025-05-29T15:22:59.488582Z node 1 :BUILD_INDEX NOTICE: schemeshard_build_index__progress.cpp:1121: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Execute: 102 Cancellation_Unlocking 2025-05-29T15:22:59.488600Z node 1 :BUILD_INDEX DEBUG: schemeshard_build_index__progress.cpp:1122: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Execute: 102 Cancellation_Unlocking TBuildInfo{ IndexBuildId: 102, Uid: , DomainPathId: [OwnerId: 72057594046678944, LocalPathId: 1], TablePathId: [OwnerId: 72057594046678944, LocalPathId: 2], IndexType: EIndexTypeInvalid, IndexName: , State: Cancellation_Unlocking, IsCancellationRequested: 1, Issue: , SubscribersCount: 1, CreateSender: [1:1178:3029], AlterMainTableTxId: 281474976710757, AlterMainTableTxStatus: StatusAccepted, AlterMainTableTxDone: 1, LockTxId: 281474976710758, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976710759, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 0, ApplyTxId: 281474976710760, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, UnlockTxId: 281474976710761, UnlockTxStatus: StatusAccepted, UnlockTxDone: 1, ToUploadShards: 0, DoneShards: 0, Processed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }, Billed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }} 2025-05-29T15:22:59.488611Z node 1 :BUILD_INDEX INFO: schemeshard_build_index_tx_base.cpp:25: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: Change state from Cancellation_Unlocking to Cancelled 2025-05-29T15:22:59.488977Z node 1 :BUILD_INDEX NOTICE: schemeshard_build_index__progress.cpp:1121: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Execute: 102 Cancelled 2025-05-29T15:22:59.488993Z node 1 :BUILD_INDEX DEBUG: schemeshard_build_index__progress.cpp:1122: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Execute: 102 Cancelled TBuildInfo{ IndexBuildId: 102, Uid: , DomainPathId: [OwnerId: 72057594046678944, LocalPathId: 1], TablePathId: [OwnerId: 72057594046678944, LocalPathId: 2], IndexType: EIndexTypeInvalid, IndexName: , State: Cancelled, IsCancellationRequested: 1, Issue: , SubscribersCount: 1, CreateSender: [1:1178:3029], AlterMainTableTxId: 281474976710757, AlterMainTableTxStatus: StatusAccepted, AlterMainTableTxDone: 1, LockTxId: 281474976710758, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976710759, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 0, ApplyTxId: 281474976710760, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, UnlockTxId: 281474976710761, UnlockTxStatus: StatusAccepted, UnlockTxDone: 1, ToUploadShards: 0, DoneShards: 0, Processed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }, Billed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }} 2025-05-29T15:22:59.488999Z node 1 :BUILD_INDEX TRACE: schemeshard_build_index_tx_base.cpp:339: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TIndexBuildInfo SendNotifications: : id# 102, subscribers count# 1 2025-05-29T15:22:59.489023Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:227: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2025-05-29T15:22:59.489030Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:236: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [1:1200:3051] TestWaitNotification: OK eventTxId 102 2025-05-29T15:22:59.489418Z node 1 :BUILD_INDEX DEBUG: schemeshard_build_index__get.cpp:19: TIndexBuilder::TXTYPE_GET_INDEX_BUILD: DoExecute DatabaseName: "/MyRoot" IndexBuildId: 102 2025-05-29T15:22:59.489511Z node 1 :BUILD_INDEX DEBUG: schemeshard_build_index_tx_base.h:93: TIndexBuilder::TXTYPE_GET_INDEX_BUILD: Reply Status: SUCCESS IndexBuild { Id: 102 State: STATE_CANCELLED Settings { source_path: "/MyRoot/Table" max_shards_in_flight: 2 column_build_operation { column { ColumnName: "DefaultValue" default_from_literal { type { type_id: UINT64 } value { uint64_value: 10 } } } } ScanSettings { MaxBatchRows: 1 } } Progress: 0 StartTime { } EndTime { } } BUILDINDEX RESPONSE Get: NKikimrIndexBuilder.TEvGetResponse Status: SUCCESS IndexBuild { Id: 102 State: STATE_CANCELLED Settings { source_path: "/MyRoot/Table" max_shards_in_flight: 2 column_build_operation { column { ColumnName: "DefaultValue" default_from_literal { type { type_id: UINT64 } value { uint64_value: 10 } } } } ScanSettings { MaxBatchRows: 1 } } Progress: 0 StartTime { } EndTime { } } 2025-05-29T15:22:59.489764Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Table" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-05-29T15:22:59.489819Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/Table" took 60us result status StatusSuccess 2025-05-29T15:22:59.489977Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Table" PathDescription { Self { Name: "Table" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 6 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 6 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 4 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "Table" Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "index" Type: "Uint32" TypeId: 2 Id: 2 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Utf8" TypeId: 4608 Id: 3 NotNull: false IsBuildInProgress: false } Columns { Name: "DefaultValue" Type: "Uint64" TypeId: 4 Id: 4 NotNull: false DefaultFromLiteral { type { type_id: UINT64 } value { uint64_value: 10 } } IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 TableSchemaVersion: 4 IsBackup: false IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 10 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 10 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 |62.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/ext_index/ut/unittest |62.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/ext_index/ut/unittest |62.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/ext_index/ut/unittest >> ColumnBuildTest::BaseCase [GOOD] |62.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/ext_index/ut/unittest |62.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/ext_index/ut/unittest |62.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/ext_index/ut/unittest |62.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/ext_index/ut/unittest >> ColumnBuildTest::ValidDefaultValue [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_column_build/unittest >> ColumnBuildTest::BaseCase [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:116:2058] recipient: [1:111:2141] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:116:2058] recipient: [1:111:2141] Leader for TabletID 72057594046678944 is [1:128:2152] sender: [1:132:2058] recipient: [1:111:2141] 2025-05-29T15:22:57.590033Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7488: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-05-29T15:22:57.590057Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7516: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:22:57.590061Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7402: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-05-29T15:22:57.590065Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7418: OperationsProcessing config: using default configuration 2025-05-29T15:22:57.590073Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-05-29T15:22:57.590075Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-05-29T15:22:57.590082Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7548: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:22:57.590093Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:39: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-05-29T15:22:57.590176Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7619: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-05-29T15:22:57.590244Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-05-29T15:22:57.602075Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7309: Cannot subscribe to console configs 2025-05-29T15:22:57.602104Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:22:57.604840Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-05-29T15:22:57.604973Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-05-29T15:22:57.605017Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-05-29T15:22:57.606902Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-05-29T15:22:57.607075Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:445: Clear TempDirsState with owners number: 0 2025-05-29T15:22:57.607221Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1348: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-05-29T15:22:57.607279Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:32: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-05-29T15:22:57.608477Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:157: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:22:57.608595Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:84: [RootDataErasureManager] Stop 2025-05-29T15:22:57.608957Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:22:57.608972Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:22:57.608994Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-05-29T15:22:57.609004Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-29T15:22:57.609010Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:202: TTxServerlessStorageBilling.Complete 2025-05-29T15:22:57.609051Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6671: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-05-29T15:22:57.610933Z node 1 :HIVE INFO: tablet_helpers.cpp:1130: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:128:2152] sender: [1:242:2058] recipient: [1:15:2062] 2025-05-29T15:22:57.627253Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:372: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-05-29T15:22:57.627323Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:22:57.627382Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-05-29T15:22:57.627417Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:130: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-05-29T15:22:57.627425Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:22:57.628138Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:451: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-05-29T15:22:57.628163Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-05-29T15:22:57.628230Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:22:57.628241Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:313: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-05-29T15:22:57.628247Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:367: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-05-29T15:22:57.628253Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 2 -> 3 2025-05-29T15:22:57.628730Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:22:57.628745Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-05-29T15:22:57.628759Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 3 -> 128 2025-05-29T15:22:57.629205Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:22:57.629219Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:22:57.629225Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:22:57.629233Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1650: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-05-29T15:22:57.629730Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1719: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-05-29T15:22:57.630174Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:652: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-05-29T15:22:57.630226Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1751: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-05-29T15:22:57.630445Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:676: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-05-29T15:22:57.630471Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:680: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 124 RawX2: 4294969445 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-05-29T15:22:57.630479Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:22:57.630546Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 128 -> 240 2025-05-29T15:22:57.630553Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:22:57.630588Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-05-29T15:22:57.630599Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:402: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-05-29T15:22:57.631088Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:22:57.631098Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-29T15:22:57.631143Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29 ... AffectedSet { TabletId: 72075186233409549 Flags: 2 } ExecLevel: 0 TxId: 281474976725761 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72075186233409550 2025-05-29T15:22:59.990509Z node 1 :BUILD_INDEX NOTICE: schemeshard_build_index__progress.cpp:1121: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Execute: 106 Unlocking 2025-05-29T15:22:59.990529Z node 1 :BUILD_INDEX DEBUG: schemeshard_build_index__progress.cpp:1122: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Execute: 106 Unlocking TBuildInfo{ IndexBuildId: 106, Uid: , DomainPathId: [OwnerId: 72075186233409549, LocalPathId: 1], TablePathId: [OwnerId: 72075186233409549, LocalPathId: 2], IndexType: EIndexTypeInvalid, IndexName: , State: Unlocking, IsCancellationRequested: 0, Issue: , SubscribersCount: 1, CreateSender: [1:1147:3016], AlterMainTableTxId: 281474976725757, AlterMainTableTxStatus: StatusAccepted, AlterMainTableTxDone: 1, LockTxId: 281474976725758, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976725759, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 450, ApplyTxId: 281474976725760, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, UnlockTxId: 281474976725761, UnlockTxStatus: StatusAccepted, UnlockTxDone: 0, ToUploadShards: 0, DoneShards: 0, Processed: { upload rows: 101, upload bytes: 2424, read rows: 101, read bytes: 2424 }, Billed: { upload rows: 101, upload bytes: 2424, read rows: 101, read bytes: 2424 }} 2025-05-29T15:22:59.990605Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:652: Send tablet strongly msg operationId: 281474976725761:4294967295 from tablet: 72075186233409549 to tablet: 72075186233409550 cookie: 0:281474976725761 msg type: 269090816 2025-05-29T15:22:59.990632Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1751: TOperation RegisterRelationByTabletId, TxId: 281474976725761, partId: 4294967295, tablet: 72075186233409550 2025-05-29T15:22:59.990664Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__notify.cpp:30: NotifyTxCompletion operation in-flight, txId: 281474976725761, at schemeshard: 72075186233409549 2025-05-29T15:22:59.990670Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1606: TOperation IsReadyToNotify, TxId: 281474976725761, ready parts: 0/1, is published: true 2025-05-29T15:22:59.990677Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__notify.cpp:131: NotifyTxCompletion transaction is registered, txId: 281474976725761, at schemeshard: 72075186233409549 2025-05-29T15:23:00.001487Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3129: StateWork, received event# 269877763, Sender [1:1817:3678], Recipient [1:755:2643]: NKikimr::TEvTabletPipe::TEvClientDestroyed { TabletId: 72075186233409549 ClientId: [1:1817:3678] ServerId: [1:1821:3682] } 2025-05-29T15:23:00.001509Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3163: StateWork, processing event TEvTabletPipe::TEvClientDestroyed 2025-05-29T15:23:00.053938Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:676: TTxOperationPlanStep Execute, stepId: 650, transactions count in step: 1, at schemeshard: 72075186233409549 2025-05-29T15:23:00.053988Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:680: TTxOperationPlanStep Execute, message: Transactions { TxId: 281474976725761 AckTo { RawX1: 0 RawX2: 0 } } Step: 650 MediatorID: 72075186233409551 TabletID: 72075186233409549, at schemeshard: 72075186233409549 2025-05-29T15:23:00.054001Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_drop_lock.cpp:44: [72075186233409549] TDropLock TPropose opId# 281474976725761:0 HandleReply TEvOperationPlan: step# 650 2025-05-29T15:23:00.054009Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 281474976725761:0 128 -> 240 2025-05-29T15:23:00.054579Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 281474976725761:0, at schemeshard: 72075186233409549 2025-05-29T15:23:00.054592Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:482: [72075186233409549] TDone opId# 281474976725761:0 ProgressState 2025-05-29T15:23:00.054606Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:906: Part operation is done id#281474976725761:0 progress is 1/1 2025-05-29T15:23:00.054611Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 281474976725761 ready parts: 1/1 2025-05-29T15:23:00.054616Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:906: Part operation is done id#281474976725761:0 progress is 1/1 2025-05-29T15:23:00.054620Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 281474976725761 ready parts: 1/1 2025-05-29T15:23:00.054625Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1606: TOperation IsReadyToNotify, TxId: 281474976725761, ready parts: 1/1, is published: true 2025-05-29T15:23:00.054639Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1629: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:564:2501] message: TxId: 281474976725761 2025-05-29T15:23:00.054646Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 281474976725761 ready parts: 1/1 2025-05-29T15:23:00.054651Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:973: Operation and all the parts is done, operation id: 281474976725761:0 2025-05-29T15:23:00.054656Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5174: RemoveTx for txid 281474976725761:0 2025-05-29T15:23:00.054671Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72075186233409549, LocalPathId: 2] was 3 2025-05-29T15:23:00.055506Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6746: Handle: TEvNotifyTxCompletionResult: txId# 281474976725761 2025-05-29T15:23:00.055524Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6748: Message: TxId: 281474976725761 2025-05-29T15:23:00.055540Z node 1 :BUILD_INDEX INFO: schemeshard_build_index__progress.cpp:2338: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxReply : TEvNotifyTxCompletionResult, txId# 281474976725761, buildInfoId: 106 2025-05-29T15:23:00.055568Z node 1 :BUILD_INDEX DEBUG: schemeshard_build_index__progress.cpp:2341: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxReply : TEvNotifyTxCompletionResult, txId# 281474976725761, buildInfo: TBuildInfo{ IndexBuildId: 106, Uid: , DomainPathId: [OwnerId: 72075186233409549, LocalPathId: 1], TablePathId: [OwnerId: 72075186233409549, LocalPathId: 2], IndexType: EIndexTypeInvalid, IndexName: , State: Unlocking, IsCancellationRequested: 0, Issue: , SubscribersCount: 1, CreateSender: [1:1147:3016], AlterMainTableTxId: 281474976725757, AlterMainTableTxStatus: StatusAccepted, AlterMainTableTxDone: 1, LockTxId: 281474976725758, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976725759, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 450, ApplyTxId: 281474976725760, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, UnlockTxId: 281474976725761, UnlockTxStatus: StatusAccepted, UnlockTxDone: 0, ToUploadShards: 0, DoneShards: 0, Processed: { upload rows: 101, upload bytes: 2424, read rows: 101, read bytes: 2424 }, Billed: { upload rows: 101, upload bytes: 2424, read rows: 101, read bytes: 2424 }} 2025-05-29T15:23:00.055976Z node 1 :BUILD_INDEX NOTICE: schemeshard_build_index__progress.cpp:1121: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Execute: 106 Unlocking 2025-05-29T15:23:00.055989Z node 1 :BUILD_INDEX DEBUG: schemeshard_build_index__progress.cpp:1122: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Execute: 106 Unlocking TBuildInfo{ IndexBuildId: 106, Uid: , DomainPathId: [OwnerId: 72075186233409549, LocalPathId: 1], TablePathId: [OwnerId: 72075186233409549, LocalPathId: 2], IndexType: EIndexTypeInvalid, IndexName: , State: Unlocking, IsCancellationRequested: 0, Issue: , SubscribersCount: 1, CreateSender: [1:1147:3016], AlterMainTableTxId: 281474976725757, AlterMainTableTxStatus: StatusAccepted, AlterMainTableTxDone: 1, LockTxId: 281474976725758, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976725759, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 450, ApplyTxId: 281474976725760, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, UnlockTxId: 281474976725761, UnlockTxStatus: StatusAccepted, UnlockTxDone: 1, ToUploadShards: 0, DoneShards: 0, Processed: { upload rows: 101, upload bytes: 2424, read rows: 101, read bytes: 2424 }, Billed: { upload rows: 101, upload bytes: 2424, read rows: 101, read bytes: 2424 }} 2025-05-29T15:23:00.055996Z node 1 :BUILD_INDEX INFO: schemeshard_build_index_tx_base.cpp:25: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: Change state from Unlocking to Done 2025-05-29T15:23:00.056266Z node 1 :BUILD_INDEX NOTICE: schemeshard_build_index__progress.cpp:1121: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Execute: 106 Done 2025-05-29T15:23:00.056275Z node 1 :BUILD_INDEX DEBUG: schemeshard_build_index__progress.cpp:1122: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Execute: 106 Done TBuildInfo{ IndexBuildId: 106, Uid: , DomainPathId: [OwnerId: 72075186233409549, LocalPathId: 1], TablePathId: [OwnerId: 72075186233409549, LocalPathId: 2], IndexType: EIndexTypeInvalid, IndexName: , State: Done, IsCancellationRequested: 0, Issue: , SubscribersCount: 1, CreateSender: [1:1147:3016], AlterMainTableTxId: 281474976725757, AlterMainTableTxStatus: StatusAccepted, AlterMainTableTxDone: 1, LockTxId: 281474976725758, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976725759, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 450, ApplyTxId: 281474976725760, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, UnlockTxId: 281474976725761, UnlockTxStatus: StatusAccepted, UnlockTxDone: 1, ToUploadShards: 0, DoneShards: 0, Processed: { upload rows: 101, upload bytes: 2424, read rows: 101, read bytes: 2424 }, Billed: { upload rows: 101, upload bytes: 2424, read rows: 101, read bytes: 2424 }} 2025-05-29T15:23:00.056278Z node 1 :BUILD_INDEX TRACE: schemeshard_build_index_tx_base.cpp:339: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TIndexBuildInfo SendNotifications: : id# 106, subscribers count# 1 2025-05-29T15:23:00.056299Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:227: tests -- TTxNotificationSubscriber for txId 106: got EvNotifyTxCompletionResult 2025-05-29T15:23:00.056303Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:236: tests -- TTxNotificationSubscriber for txId 106: satisfy waiter [1:1167:3036] TestWaitNotification: OK eventTxId 106 2025-05-29T15:23:00.056615Z node 1 :BUILD_INDEX DEBUG: schemeshard_build_index__get.cpp:19: TIndexBuilder::TXTYPE_GET_INDEX_BUILD: DoExecute DatabaseName: "/MyRoot/ServerLessDB" IndexBuildId: 106 2025-05-29T15:23:00.056688Z node 1 :BUILD_INDEX DEBUG: schemeshard_build_index_tx_base.h:93: TIndexBuilder::TXTYPE_GET_INDEX_BUILD: Reply Status: SUCCESS IndexBuild { Id: 106 State: STATE_DONE Settings { source_path: "/MyRoot/ServerLessDB/Table" max_shards_in_flight: 2 column_build_operation { column { ColumnName: "DefaultValue" default_from_literal { type { type_id: UINT64 } value { uint64_value: 10 } } } } ScanSettings { MaxBatchRows: 1 } } Progress: 100 StartTime { } EndTime { } } BUILDINDEX RESPONSE Get: NKikimrIndexBuilder.TEvGetResponse Status: SUCCESS IndexBuild { Id: 106 State: STATE_DONE Settings { source_path: "/MyRoot/ServerLessDB/Table" max_shards_in_flight: 2 column_build_operation { column { ColumnName: "DefaultValue" default_from_literal { type { type_id: UINT64 } value { uint64_value: 10 } } } } ScanSettings { MaxBatchRows: 1 } } Progress: 100 StartTime { } EndTime { } } |62.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/ext_index/ut/unittest |62.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/ext_index/ut/unittest ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/datashard/ut_kqp_scan/unittest >> KqpScan::ScanAfterSplitSlowMetaRead [FAIL] Test command err: 2025-05-29T15:22:49.390714Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:701:2413], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-05-29T15:22:49.390857Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-05-29T15:22:49.390889Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-05-29T15:22:49.390928Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [2:698:2355], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-05-29T15:22:49.390977Z node 2 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-05-29T15:22:49.390982Z node 2 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/001f43/r3tmp/tmpwutzH7/pdisk_1.dat 2025-05-29T15:22:49.496687Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:22:49.574814Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-05-29T15:22:49.662514Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:213: actor# [1:210:2173] HANDLE TEvClientConnected success connect from tablet# 72057594046447617 2025-05-29T15:22:49.663248Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:22:49.663295Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:22:49.664067Z node 1 :TX_PROXY DEBUG: client.cpp:89: Handle TEvAllocateResult ACCEPTED RangeBegin# 281474976715656 RangeEnd# 281474976720656 txAllocator# 72057594046447617 2025-05-29T15:22:49.664340Z node 2 :TX_PROXY DEBUG: proxy_impl.cpp:213: actor# [2:240:2129] HANDLE TEvClientConnected success connect from tablet# 72057594046447617 2025-05-29T15:22:49.664783Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:22:49.664805Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:22:49.665253Z node 2 :TX_PROXY DEBUG: client.cpp:89: Handle TEvAllocateResult ACCEPTED RangeBegin# 281474976720656 RangeEnd# 281474976725656 txAllocator# 72057594046447617 2025-05-29T15:22:49.676597Z node 1 :HIVE WARN: hive_impl.cpp:771: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-05-29T15:22:49.676778Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-29T15:22:49.676896Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-29T15:22:49.972654Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:315: actor# [1:210:2173] Handle TEvProposeTransaction 2025-05-29T15:22:49.972676Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:238: actor# [1:210:2173] TxId# 281474976715657 ProcessProposeTransaction 2025-05-29T15:22:49.972761Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:257: actor# [1:210:2173] Cookie# 0 userReqId# "" txid# 281474976715657 SEND to# [1:1227:2742] 2025-05-29T15:22:49.995279Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1562: Actor# [1:1227:2742] txid# 281474976715657 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/Root" OperationType: ESchemeOpCreateTable CreateTable { Name: "table-1" Columns { Name: "key" Type: "Uint32" FamilyName: "" NotNull: false } Columns { Name: "value" Type: "Uint32" FamilyName: "" NotNull: false } KeyColumnNames: "key" UniformPartitionsCount: 7 } } } ExecTimeoutPeriod: 18446744073709551615 2025-05-29T15:22:49.995320Z node 1 :TX_PROXY DEBUG: schemereq.cpp:569: Actor# [1:1227:2742] txid# 281474976715657 Bootstrap, UserSID: CheckAdministrator: 0 CheckDatabaseAdministrator: 0 2025-05-29T15:22:49.995606Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1627: Actor# [1:1227:2742] txid# 281474976715657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P6 2025-05-29T15:22:49.995620Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1617: Actor# [1:1227:2742] txid# 281474976715657 TEvNavigateKeySet requested from SchemeCache 2025-05-29T15:22:49.995719Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1450: Actor# [1:1227:2742] txid# 281474976715657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-05-29T15:22:49.995756Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1497: Actor# [1:1227:2742] HANDLE EvNavigateKeySetResult, txid# 281474976715657 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 500 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2025-05-29T15:22:49.995770Z node 1 :TX_PROXY DEBUG: schemereq.cpp:103: Actor# [1:1227:2742] txid# 281474976715657 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976715657 TabletId# 72057594046644480} 2025-05-29T15:22:49.995855Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1352: Actor# [1:1227:2742] txid# 281474976715657 HANDLE EvClientConnected 2025-05-29T15:22:49.996241Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-05-29T15:22:49.997204Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1374: Actor# [1:1227:2742] txid# 281474976715657 Status StatusAccepted HANDLE {TEvModifySchemeTransactionResult Status# StatusAccepted txid# 281474976715657} 2025-05-29T15:22:49.997225Z node 1 :TX_PROXY DEBUG: schemereq.cpp:549: Actor# [1:1227:2742] txid# 281474976715657 SEND to# [1:1133:2683] Source {TEvProposeTransactionStatus txid# 281474976715657 Status# 53} 2025-05-29T15:22:50.036836Z node 1 :TX_DATASHARD INFO: datashard.cpp:373: TDataShard::OnActivateExecutor: tablet 72075186224037889 actor [1:1307:2802] 2025-05-29T15:22:50.036910Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:630: TxInitSchema.Execute 2025-05-29T15:22:50.050206Z node 1 :TX_DATASHARD INFO: datashard.cpp:373: TDataShard::OnActivateExecutor: tablet 72075186224037894 actor [1:1309:2803] 2025-05-29T15:22:50.050263Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:630: TxInitSchema.Execute 2025-05-29T15:22:50.052519Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:696: TxInitSchema.Complete 2025-05-29T15:22:50.052587Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:48: TDataShard::TTxInit::Execute 2025-05-29T15:22:50.052763Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1315: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037889 2025-05-29T15:22:50.052772Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1371: LoadLockChangeRecords at tablet: 72075186224037889 2025-05-29T15:22:50.052780Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1420: LoadChangeRecordCommits at tablet: 72075186224037889 2025-05-29T15:22:50.052836Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:92: TDataShard::TTxInit::Complete 2025-05-29T15:22:50.052925Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:100: TDataShard::TTxInitRestored::Execute 2025-05-29T15:22:50.052938Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:111: DataShard 72075186224037889 persisting started state actor id [1:1398:2802] in generation 1 2025-05-29T15:22:50.053116Z node 1 :TX_DATASHARD INFO: datashard.cpp:373: TDataShard::OnActivateExecutor: tablet 72075186224037892 actor [1:1315:2805] 2025-05-29T15:22:50.053152Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:630: TxInitSchema.Execute 2025-05-29T15:22:50.057866Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:696: TxInitSchema.Complete 2025-05-29T15:22:50.057935Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:48: TDataShard::TTxInit::Execute 2025-05-29T15:22:50.058096Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1315: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037894 2025-05-29T15:22:50.058106Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1371: LoadLockChangeRecords at tablet: 72075186224037894 2025-05-29T15:22:50.058114Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1420: LoadChangeRecordCommits at tablet: 72075186224037894 2025-05-29T15:22:50.058161Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:92: TDataShard::TTxInit::Complete 2025-05-29T15:22:50.058222Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:100: TDataShard::TTxInitRestored::Execute 2025-05-29T15:22:50.058232Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:111: DataShard 72075186224037894 persisting started state actor id [1:1427:2803] in generation 1 2025-05-29T15:22:50.058353Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:696: TxInitSchema.Complete 2025-05-29T15:22:50.058367Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:48: TDataShard::TTxInit::Execute 2025-05-29T15:22:50.058488Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1315: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037892 2025-05-29T15:22:50.058496Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1371: LoadLockChangeRecords at tablet: 72075186224037892 2025-05-29T15:22:50.058502Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1420: LoadChangeRecordCommits at tablet: 72075186224037892 2025-05-29T15:22:50.058535Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:92: TDataShard::TTxInit::Complete 2025-05-29T15:22:50.058577Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:100: TDataShard::TTxInitRestored::Execute 2025-05-29T15:22:50.058584Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:111: DataShard 72075186224037892 persisting started state actor id [1:1429:2805] in generation 1 2025-05-29T15:22:50.062561Z node 2 :TX_DATASHARD INFO: datashard.cpp:373: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [2:1411:2399] 2025-05-29T15:22:50.062624Z node 2 :TX_DATASHARD DEBUG: datashard__init.cpp:630: TxInitSchema.Execute 2025-05-29T15:22:50.073596Z node 2 :TX_DATASHARD INFO: datashard.cpp:373: TDataShard::OnActivateExecutor: tablet 72075186224037891 actor [2:1416:2400] 2025-05-29T15:22:50.073672Z node 2 :TX_DATASHARD DEBUG: datashard__init.cpp:630: TxInitSchema.Execute 2025-05-29T15:22:50.075869Z node 2 :TX_DATASHARD INFO: datashard.cpp:373: TDataShard::OnActivateExecutor: tablet 72075186224037893 actor [2:1419:2401] 2025-05-29T15:22:50.075919Z node 2 :TX_DATASHARD DEBUG: datashard__init.cpp:630: TxInitSchema.Execute 2025-05-29T15:22:50.077843Z node 2 :TX_DATASHARD INFO: datashard.cpp:373: TDataShard::OnActivateExecutor: tablet 72075186224037890 actor [2:1424:2403] 2025-05-29T15:22:50.077893Z node 2 :TX_DATASHARD DEBUG: datashard__init.cpp:630: TxInitSchema.Execute 2025-05-29T15:22:50.080882Z node 2 :TX_DATASHARD DEBUG: datashard__init.cpp:696: TxInitSchema.Complete 2025-05-29T15:22:50.080912Z node 2 :TX_DATASHARD DEBUG: datashard__init.cpp:696: TxInitSchema.Complete 2025-05-29T15:22:50.080924Z node 2 :TX_DATASHARD DEBUG: datashard__init.cpp:48: TDataShard::TTxInit::Execute 2025-05-29T15:22:50.081085Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:1315: LoadChangeRec ... 010\001\020\201\004\032\014root@builtin \003" NewOwner: "metadata@system" } Internal: true CreateResourcePool { Name: "default" Properties { Properties { key: "concurrent_query_limit" value: "-1" } Properties { key: "database_load_cpu_threshold" value: "-1" } Properties { key: "query_cancel_after_seconds" value: "0" } Properties { key: "query_cpu_limit_percent_per_node" value: "-1" } Properties { key: "query_memory_limit_percent_per_node" value: "-1" } Properties { key: "queue_size" value: "-1" } Properties { key: "resource_weight" value: "-1" } Properties { key: "total_cpu_limit_percent_per_node" value: "-1" } } } } } UserToken: "\n\017metadata@system\022\000" DatabaseName: "/Root" 2025-05-29T15:22:56.816000Z node 5 :TX_PROXY DEBUG: schemereq.cpp:569: Actor# [5:1404:2839] txid# 281474976715658 Bootstrap, UserSID: metadata@system CheckAdministrator: 0 CheckDatabaseAdministrator: 0 2025-05-29T15:22:56.816006Z node 5 :TX_PROXY DEBUG: schemereq.cpp:578: Actor# [5:1404:2839] txid# 281474976715658 Bootstrap, UserSID: metadata@system IsClusterAdministrator: 1 2025-05-29T15:22:56.816532Z node 5 :TX_PROXY DEBUG: schemereq.cpp:1627: Actor# [5:1404:2839] txid# 281474976715658 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P6 2025-05-29T15:22:56.816548Z node 5 :TX_PROXY DEBUG: schemereq.cpp:1617: Actor# [5:1404:2839] txid# 281474976715658 TEvNavigateKeySet requested from SchemeCache 2025-05-29T15:22:56.816642Z node 5 :TX_PROXY DEBUG: schemereq.cpp:1450: Actor# [5:1404:2839] txid# 281474976715658 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-05-29T15:22:56.816669Z node 5 :TX_PROXY DEBUG: schemereq.cpp:1497: Actor# [5:1404:2839] HANDLE EvNavigateKeySetResult, txid# 281474976715658 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 500 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2025-05-29T15:22:56.816681Z node 5 :TX_PROXY DEBUG: schemereq.cpp:103: Actor# [5:1404:2839] txid# 281474976715658 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976715658 TabletId# 72057594046644480} 2025-05-29T15:22:56.816751Z node 5 :TX_PROXY DEBUG: schemereq.cpp:1352: Actor# [5:1404:2839] txid# 281474976715658 HANDLE EvClientConnected 2025-05-29T15:22:56.816989Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2025-05-29T15:22:56.817859Z node 5 :TX_PROXY DEBUG: schemereq.cpp:1374: Actor# [5:1404:2839] txid# 281474976715658 Status StatusAccepted HANDLE {TEvModifySchemeTransactionResult Status# StatusAccepted txid# 281474976715658} 2025-05-29T15:22:56.817875Z node 5 :TX_PROXY DEBUG: schemereq.cpp:549: Actor# [5:1404:2839] txid# 281474976715658 SEND to# [5:1403:2838] Source {TEvProposeTransactionStatus txid# 281474976715658 Status# 53} 2025-05-29T15:22:56.819575Z node 6 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:129: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-05-29T15:22:56.947336Z node 6 :KQP_RESOURCE_MANAGER DEBUG: kqp_rm_service.cpp:645: Unexpected whiteboard info: pool size is smaller than user pool id, pool size: 0, user pool id: 0 2025-05-29T15:22:56.947363Z node 5 :KQP_RESOURCE_MANAGER DEBUG: kqp_rm_service.cpp:645: Unexpected whiteboard info: pool size is smaller than user pool id, pool size: 0, user pool id: 0 2025-05-29T15:22:57.014194Z node 5 :KQP_RESOURCE_MANAGER DEBUG: kqp_resource_info_exchanger.cpp:479: Get board info from subscriber, serving tenant: /Root, board: kqpexch+/Root, with size: 2 2025-05-29T15:22:57.014288Z node 5 :KQP_RESOURCE_MANAGER DEBUG: kqp_resource_info_exchanger.cpp:501: Get board info update from subscriber, serving tenant: /Root, board: kqpexch+/Root, with size: 2 2025-05-29T15:22:57.014345Z node 6 :KQP_RESOURCE_MANAGER DEBUG: kqp_resource_info_exchanger.cpp:526: Get resources info from node: 5 2025-05-29T15:22:57.193380Z node 6 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:129: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-05-29T15:22:57.194510Z node 5 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [5:1403:2838], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-05-29T15:22:57.236135Z node 6 :KQP_RESOURCE_MANAGER DEBUG: kqp_resource_info_exchanger.cpp:479: Get board info from subscriber, serving tenant: /Root, board: kqpexch+/Root, with size: 2 2025-05-29T15:22:57.236215Z node 6 :KQP_RESOURCE_MANAGER DEBUG: kqp_resource_info_exchanger.cpp:501: Get board info update from subscriber, serving tenant: /Root, board: kqpexch+/Root, with size: 2 2025-05-29T15:22:57.236245Z node 5 :KQP_RESOURCE_MANAGER DEBUG: kqp_resource_info_exchanger.cpp:526: Get resources info from node: 6 2025-05-29T15:22:57.236290Z node 6 :KQP_RESOURCE_MANAGER DEBUG: kqp_resource_info_exchanger.cpp:526: Get resources info from node: 5 2025-05-29T15:22:57.277360Z node 5 :TX_PROXY DEBUG: proxy_impl.cpp:315: actor# [5:210:2173] Handle TEvProposeTransaction 2025-05-29T15:22:57.277398Z node 5 :TX_PROXY DEBUG: proxy_impl.cpp:238: actor# [5:210:2173] TxId# 281474976715659 ProcessProposeTransaction 2025-05-29T15:22:57.277429Z node 5 :TX_PROXY DEBUG: proxy_impl.cpp:257: actor# [5:210:2173] Cookie# 0 userReqId# "" txid# 281474976715659 SEND to# [5:1526:2906] 2025-05-29T15:22:57.278409Z node 5 :TX_PROXY DEBUG: schemereq.cpp:1562: Actor# [5:1526:2906] txid# 281474976715659 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/Root/.metadata/workload_manager/pools" OperationType: ESchemeOpCreateResourcePool ModifyACL { Name: "default" DiffACL: "\n!\010\000\022\035\010\001\020\201\004\032\024all-users@well-known \003\n\031\010\000\022\025\010\001\020\201\004\032\014root@builtin \003" NewOwner: "metadata@system" } Internal: true CreateResourcePool { Name: "default" Properties { Properties { key: "concurrent_query_limit" value: "-1" } Properties { key: "database_load_cpu_threshold" value: "-1" } Properties { key: "query_cancel_after_seconds" value: "0" } Properties { key: "query_cpu_limit_percent_per_node" value: "-1" } Properties { key: "query_memory_limit_percent_per_node" value: "-1" } Properties { key: "queue_size" value: "-1" } Properties { key: "resource_weight" value: "-1" } Properties { key: "total_cpu_limit_percent_per_node" value: "-1" } } } } } UserToken: "\n\017metadata@system\022\000" DatabaseName: "/Root" 2025-05-29T15:22:57.278441Z node 5 :TX_PROXY DEBUG: schemereq.cpp:569: Actor# [5:1526:2906] txid# 281474976715659 Bootstrap, UserSID: metadata@system CheckAdministrator: 0 CheckDatabaseAdministrator: 0 2025-05-29T15:22:57.278446Z node 5 :TX_PROXY DEBUG: schemereq.cpp:578: Actor# [5:1526:2906] txid# 281474976715659 Bootstrap, UserSID: metadata@system IsClusterAdministrator: 1 2025-05-29T15:22:57.278835Z node 5 :TX_PROXY DEBUG: schemereq.cpp:1627: Actor# [5:1526:2906] txid# 281474976715659 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P6 2025-05-29T15:22:57.278858Z node 5 :TX_PROXY DEBUG: schemereq.cpp:1617: Actor# [5:1526:2906] txid# 281474976715659 TEvNavigateKeySet requested from SchemeCache 2025-05-29T15:22:57.278959Z node 5 :TX_PROXY DEBUG: schemereq.cpp:1450: Actor# [5:1526:2906] txid# 281474976715659 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-05-29T15:22:57.278992Z node 5 :TX_PROXY DEBUG: schemereq.cpp:1497: Actor# [5:1526:2906] HANDLE EvNavigateKeySetResult, txid# 281474976715659 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 500 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2025-05-29T15:22:57.279010Z node 5 :TX_PROXY DEBUG: schemereq.cpp:103: Actor# [5:1526:2906] txid# 281474976715659 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976715659 TabletId# 72057594046644480} 2025-05-29T15:22:57.279099Z node 5 :TX_PROXY DEBUG: schemereq.cpp:1352: Actor# [5:1526:2906] txid# 281474976715659 HANDLE EvClientConnected 2025-05-29T15:22:57.280029Z node 5 :TX_PROXY DEBUG: schemereq.cpp:1374: Actor# [5:1526:2906] txid# 281474976715659 Status StatusAlreadyExists HANDLE {TEvModifySchemeTransactionResult Status# StatusAlreadyExists txid# 281474976715659 Reason# Check failed: path: '/Root/.metadata/workload_manager/pools/default', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92} 2025-05-29T15:22:57.280069Z node 5 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [5:1526:2906] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-29T15:22:57.280076Z node 5 :TX_PROXY DEBUG: schemereq.cpp:549: Actor# [5:1526:2906] txid# 281474976715659 SEND to# [5:1403:2838] Source {TEvProposeTransactionStatus txid# 281474976715659 Status# 48} 2025-05-29T15:22:57.346552Z node 5 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [5:1537:2916], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:22:57.347756Z node 5 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=5&id=ZmM1MmU5YmQtMTJkN2E4Y2QtOTlmNzc3MTItMWE5ZjYwYjg=, ActorId: [5:1387:2828], ActorState: ExecuteState, TraceId: 01jwea79xe1b9tj73q1ccvqjdb, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: assertion failed at ydb/core/tx/datashard/ut_common/datashard_ut_common.cpp:2091, void NKikimr::ExecSQL(Tests::TServer::TPtr, TActorId, const TString &, bool, Ydb::StatusIds::StatusCode): (response.GetYdbStatus() == code) failed: (INTERNAL_ERROR != SUCCESS) Response { QueryIssues { message: "Execution" issue_code: 1060 issues { message: "yql/essentials/ast/yql_expr.h:1874: index out of range" issue_code: 1 } } TxMeta { } } YdbStatus: INTERNAL_ERROR ConsumedRu: 1 , with diff: (INT|SUCC)E(RNAL_ERROR|SS) TBackTrace::Capture()+28 (0x15DE701C) NUnitTest::NPrivate::RaiseError(char const*, TBasicString> const&, bool)+137 (0x15F99F49) NKikimr::ExecSQL(TIntrusivePtr>, NActors::TActorId, TBasicString> const&, bool, Ydb::StatusIds_StatusCode)+1300 (0x2862A574) NKikimr::NKqp::NTestSuiteKqpScan::TTestCaseScanAfterSplitSlowMetaRead::Execute_(NUnitTest::TTestContext&)+1380 (0x15CCD2E4) NKikimr::NKqp::NTestSuiteKqpScan::TCurrentTest::Execute()::'lambda'()::operator()() const+71 (0x15CD79E7) NUnitTest::TTestBase::Run(std::__y1::function, TBasicString> const&, char const*, bool)+126 (0x15F9BDFE) NKikimr::NKqp::NTestSuiteKqpScan::TCurrentTest::Execute()+422 (0x15CD7246) NUnitTest::TTestFactory::Execute()+803 (0x15F9C573) NUnitTest::RunMain(int, char**)+3021 (0x15FAE11D) ??+0 (0x7F385E128D90) __libc_start_main+128 (0x7F385E128E40) _start+41 (0x14D2B029) |62.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/ext_index/ut/unittest >> ExternalIndex::Simple ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_column_build/unittest >> ColumnBuildTest::ValidDefaultValue [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:116:2058] recipient: [1:111:2141] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:116:2058] recipient: [1:111:2141] Leader for TabletID 72057594046678944 is [1:128:2152] sender: [1:132:2058] recipient: [1:111:2141] 2025-05-29T15:22:57.857121Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7488: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-05-29T15:22:57.857145Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7516: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:22:57.857149Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7402: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-05-29T15:22:57.857153Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7418: OperationsProcessing config: using default configuration 2025-05-29T15:22:57.857159Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-05-29T15:22:57.857162Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-05-29T15:22:57.857168Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7548: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:22:57.857178Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:39: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-05-29T15:22:57.857250Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7619: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-05-29T15:22:57.857304Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-05-29T15:22:57.866967Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7309: Cannot subscribe to console configs 2025-05-29T15:22:57.866985Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:22:57.868822Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-05-29T15:22:57.868899Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-05-29T15:22:57.868926Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-05-29T15:22:57.870320Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-05-29T15:22:57.870447Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:445: Clear TempDirsState with owners number: 0 2025-05-29T15:22:57.870559Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1348: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-05-29T15:22:57.870603Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:32: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-05-29T15:22:57.871108Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:157: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:22:57.871149Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:84: [RootDataErasureManager] Stop 2025-05-29T15:22:57.871361Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:22:57.871368Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:22:57.871383Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-05-29T15:22:57.871388Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-29T15:22:57.871392Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:202: TTxServerlessStorageBilling.Complete 2025-05-29T15:22:57.871414Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6671: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-05-29T15:22:57.872283Z node 1 :HIVE INFO: tablet_helpers.cpp:1130: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:128:2152] sender: [1:242:2058] recipient: [1:15:2062] 2025-05-29T15:22:57.885191Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:372: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-05-29T15:22:57.885245Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:22:57.885287Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-05-29T15:22:57.885316Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:130: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-05-29T15:22:57.885323Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:22:57.885838Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:451: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-05-29T15:22:57.885856Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-05-29T15:22:57.885891Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:22:57.885896Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:313: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-05-29T15:22:57.885900Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:367: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-05-29T15:22:57.885903Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 2 -> 3 2025-05-29T15:22:57.886192Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:22:57.886199Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-05-29T15:22:57.886202Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 3 -> 128 2025-05-29T15:22:57.886846Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:22:57.886858Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:22:57.886862Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:22:57.886866Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1650: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-05-29T15:22:57.887318Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1719: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-05-29T15:22:57.887698Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:652: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-05-29T15:22:57.887728Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1751: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-05-29T15:22:57.887852Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:676: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-05-29T15:22:57.887869Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:680: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 124 RawX2: 4294969445 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-05-29T15:22:57.887874Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:22:57.887920Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 128 -> 240 2025-05-29T15:22:57.887925Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:22:57.887949Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-05-29T15:22:57.887956Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:402: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-05-29T15:22:57.888275Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:22:57.888281Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-29T15:22:57.888314Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29 ... ffectedSet { TabletId: 72075186233409549 Flags: 2 } ExecLevel: 0 TxId: 281474976725761 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72075186233409550 2025-05-29T15:23:00.234093Z node 1 :BUILD_INDEX NOTICE: schemeshard_build_index__progress.cpp:1121: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Execute: 106 Unlocking 2025-05-29T15:23:00.234114Z node 1 :BUILD_INDEX DEBUG: schemeshard_build_index__progress.cpp:1122: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Execute: 106 Unlocking TBuildInfo{ IndexBuildId: 106, Uid: , DomainPathId: [OwnerId: 72075186233409549, LocalPathId: 1], TablePathId: [OwnerId: 72075186233409549, LocalPathId: 2], IndexType: EIndexTypeInvalid, IndexName: , State: Unlocking, IsCancellationRequested: 0, Issue: , SubscribersCount: 1, CreateSender: [1:1147:3016], AlterMainTableTxId: 281474976725757, AlterMainTableTxStatus: StatusAccepted, AlterMainTableTxDone: 1, LockTxId: 281474976725758, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976725759, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 450, ApplyTxId: 281474976725760, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, UnlockTxId: 281474976725761, UnlockTxStatus: StatusAccepted, UnlockTxDone: 0, ToUploadShards: 0, DoneShards: 0, Processed: { upload rows: 101, upload bytes: 2424, read rows: 101, read bytes: 2424 }, Billed: { upload rows: 101, upload bytes: 2424, read rows: 101, read bytes: 2424 }} 2025-05-29T15:23:00.234176Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:652: Send tablet strongly msg operationId: 281474976725761:4294967295 from tablet: 72075186233409549 to tablet: 72075186233409550 cookie: 0:281474976725761 msg type: 269090816 2025-05-29T15:23:00.234203Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1751: TOperation RegisterRelationByTabletId, TxId: 281474976725761, partId: 4294967295, tablet: 72075186233409550 2025-05-29T15:23:00.234232Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__notify.cpp:30: NotifyTxCompletion operation in-flight, txId: 281474976725761, at schemeshard: 72075186233409549 2025-05-29T15:23:00.234238Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1606: TOperation IsReadyToNotify, TxId: 281474976725761, ready parts: 0/1, is published: true 2025-05-29T15:23:00.234244Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__notify.cpp:131: NotifyTxCompletion transaction is registered, txId: 281474976725761, at schemeshard: 72075186233409549 2025-05-29T15:23:00.245172Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3129: StateWork, received event# 269877763, Sender [1:1817:3678], Recipient [1:755:2643]: NKikimr::TEvTabletPipe::TEvClientDestroyed { TabletId: 72075186233409549 ClientId: [1:1817:3678] ServerId: [1:1821:3682] } 2025-05-29T15:23:00.245199Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3163: StateWork, processing event TEvTabletPipe::TEvClientDestroyed 2025-05-29T15:23:00.297813Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:676: TTxOperationPlanStep Execute, stepId: 650, transactions count in step: 1, at schemeshard: 72075186233409549 2025-05-29T15:23:00.297868Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:680: TTxOperationPlanStep Execute, message: Transactions { TxId: 281474976725761 AckTo { RawX1: 0 RawX2: 0 } } Step: 650 MediatorID: 72075186233409551 TabletID: 72075186233409549, at schemeshard: 72075186233409549 2025-05-29T15:23:00.297880Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_drop_lock.cpp:44: [72075186233409549] TDropLock TPropose opId# 281474976725761:0 HandleReply TEvOperationPlan: step# 650 2025-05-29T15:23:00.297889Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 281474976725761:0 128 -> 240 2025-05-29T15:23:00.298546Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 281474976725761:0, at schemeshard: 72075186233409549 2025-05-29T15:23:00.298566Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:482: [72075186233409549] TDone opId# 281474976725761:0 ProgressState 2025-05-29T15:23:00.298583Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:906: Part operation is done id#281474976725761:0 progress is 1/1 2025-05-29T15:23:00.298589Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 281474976725761 ready parts: 1/1 2025-05-29T15:23:00.298596Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:906: Part operation is done id#281474976725761:0 progress is 1/1 2025-05-29T15:23:00.298600Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 281474976725761 ready parts: 1/1 2025-05-29T15:23:00.298605Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1606: TOperation IsReadyToNotify, TxId: 281474976725761, ready parts: 1/1, is published: true 2025-05-29T15:23:00.298622Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1629: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:564:2501] message: TxId: 281474976725761 2025-05-29T15:23:00.298629Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 281474976725761 ready parts: 1/1 2025-05-29T15:23:00.298634Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:973: Operation and all the parts is done, operation id: 281474976725761:0 2025-05-29T15:23:00.298639Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5174: RemoveTx for txid 281474976725761:0 2025-05-29T15:23:00.298656Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72075186233409549, LocalPathId: 2] was 3 2025-05-29T15:23:00.299694Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6746: Handle: TEvNotifyTxCompletionResult: txId# 281474976725761 2025-05-29T15:23:00.299718Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6748: Message: TxId: 281474976725761 2025-05-29T15:23:00.299736Z node 1 :BUILD_INDEX INFO: schemeshard_build_index__progress.cpp:2338: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxReply : TEvNotifyTxCompletionResult, txId# 281474976725761, buildInfoId: 106 2025-05-29T15:23:00.299763Z node 1 :BUILD_INDEX DEBUG: schemeshard_build_index__progress.cpp:2341: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxReply : TEvNotifyTxCompletionResult, txId# 281474976725761, buildInfo: TBuildInfo{ IndexBuildId: 106, Uid: , DomainPathId: [OwnerId: 72075186233409549, LocalPathId: 1], TablePathId: [OwnerId: 72075186233409549, LocalPathId: 2], IndexType: EIndexTypeInvalid, IndexName: , State: Unlocking, IsCancellationRequested: 0, Issue: , SubscribersCount: 1, CreateSender: [1:1147:3016], AlterMainTableTxId: 281474976725757, AlterMainTableTxStatus: StatusAccepted, AlterMainTableTxDone: 1, LockTxId: 281474976725758, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976725759, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 450, ApplyTxId: 281474976725760, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, UnlockTxId: 281474976725761, UnlockTxStatus: StatusAccepted, UnlockTxDone: 0, ToUploadShards: 0, DoneShards: 0, Processed: { upload rows: 101, upload bytes: 2424, read rows: 101, read bytes: 2424 }, Billed: { upload rows: 101, upload bytes: 2424, read rows: 101, read bytes: 2424 }} 2025-05-29T15:23:00.300446Z node 1 :BUILD_INDEX NOTICE: schemeshard_build_index__progress.cpp:1121: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Execute: 106 Unlocking 2025-05-29T15:23:00.300471Z node 1 :BUILD_INDEX DEBUG: schemeshard_build_index__progress.cpp:1122: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Execute: 106 Unlocking TBuildInfo{ IndexBuildId: 106, Uid: , DomainPathId: [OwnerId: 72075186233409549, LocalPathId: 1], TablePathId: [OwnerId: 72075186233409549, LocalPathId: 2], IndexType: EIndexTypeInvalid, IndexName: , State: Unlocking, IsCancellationRequested: 0, Issue: , SubscribersCount: 1, CreateSender: [1:1147:3016], AlterMainTableTxId: 281474976725757, AlterMainTableTxStatus: StatusAccepted, AlterMainTableTxDone: 1, LockTxId: 281474976725758, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976725759, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 450, ApplyTxId: 281474976725760, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, UnlockTxId: 281474976725761, UnlockTxStatus: StatusAccepted, UnlockTxDone: 1, ToUploadShards: 0, DoneShards: 0, Processed: { upload rows: 101, upload bytes: 2424, read rows: 101, read bytes: 2424 }, Billed: { upload rows: 101, upload bytes: 2424, read rows: 101, read bytes: 2424 }} 2025-05-29T15:23:00.300479Z node 1 :BUILD_INDEX INFO: schemeshard_build_index_tx_base.cpp:25: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: Change state from Unlocking to Done 2025-05-29T15:23:00.300844Z node 1 :BUILD_INDEX NOTICE: schemeshard_build_index__progress.cpp:1121: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Execute: 106 Done 2025-05-29T15:23:00.300859Z node 1 :BUILD_INDEX DEBUG: schemeshard_build_index__progress.cpp:1122: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Execute: 106 Done TBuildInfo{ IndexBuildId: 106, Uid: , DomainPathId: [OwnerId: 72075186233409549, LocalPathId: 1], TablePathId: [OwnerId: 72075186233409549, LocalPathId: 2], IndexType: EIndexTypeInvalid, IndexName: , State: Done, IsCancellationRequested: 0, Issue: , SubscribersCount: 1, CreateSender: [1:1147:3016], AlterMainTableTxId: 281474976725757, AlterMainTableTxStatus: StatusAccepted, AlterMainTableTxDone: 1, LockTxId: 281474976725758, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976725759, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 450, ApplyTxId: 281474976725760, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, UnlockTxId: 281474976725761, UnlockTxStatus: StatusAccepted, UnlockTxDone: 1, ToUploadShards: 0, DoneShards: 0, Processed: { upload rows: 101, upload bytes: 2424, read rows: 101, read bytes: 2424 }, Billed: { upload rows: 101, upload bytes: 2424, read rows: 101, read bytes: 2424 }} 2025-05-29T15:23:00.300864Z node 1 :BUILD_INDEX TRACE: schemeshard_build_index_tx_base.cpp:339: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TIndexBuildInfo SendNotifications: : id# 106, subscribers count# 1 2025-05-29T15:23:00.300888Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:227: tests -- TTxNotificationSubscriber for txId 106: got EvNotifyTxCompletionResult 2025-05-29T15:23:00.300894Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:236: tests -- TTxNotificationSubscriber for txId 106: satisfy waiter [1:1167:3036] TestWaitNotification: OK eventTxId 106 2025-05-29T15:23:00.301197Z node 1 :BUILD_INDEX DEBUG: schemeshard_build_index__get.cpp:19: TIndexBuilder::TXTYPE_GET_INDEX_BUILD: DoExecute DatabaseName: "/MyRoot/ServerLessDB" IndexBuildId: 106 2025-05-29T15:23:00.301270Z node 1 :BUILD_INDEX DEBUG: schemeshard_build_index_tx_base.h:93: TIndexBuilder::TXTYPE_GET_INDEX_BUILD: Reply Status: SUCCESS IndexBuild { Id: 106 State: STATE_DONE Settings { source_path: "/MyRoot/ServerLessDB/Table" max_shards_in_flight: 2 column_build_operation { column { ColumnName: "ColumnValue" default_from_literal { type { type_id: UINT64 } value { uint64_value: 1111 } } } } ScanSettings { MaxBatchRows: 1 } } Progress: 100 StartTime { } EndTime { } } BUILDINDEX RESPONSE Get: NKikimrIndexBuilder.TEvGetResponse Status: SUCCESS IndexBuild { Id: 106 State: STATE_DONE Settings { source_path: "/MyRoot/ServerLessDB/Table" max_shards_in_flight: 2 column_build_operation { column { ColumnName: "ColumnValue" default_from_literal { type { type_id: UINT64 } value { uint64_value: 1111 } } } } ScanSettings { MaxBatchRows: 1 } } Progress: 100 StartTime { } EndTime { } } >> TPQTest::TestPQReadAhead [GOOD] >> TPQTest::TestOwnership >> ColumnBuildTest::BuildColumnDoesnotRestoreDeletedRows [GOOD] |62.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/ext_index/ut/unittest |62.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/ext_index/ut/unittest >> TSchemeShardTopicSplitMergeTest::CreateTopicWithOnePartition |62.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/ext_index/ut/unittest |62.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/ext_index/ut/unittest |62.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/ext_index/ut/unittest |62.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/ext_index/ut/unittest ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/datashard/ut_kqp_scan/unittest >> KqpScan::ScanPg [FAIL] Test command err: 2025-05-29T15:22:49.678312Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:701:2413], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-05-29T15:22:49.678409Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-05-29T15:22:49.678435Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-05-29T15:22:49.678467Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [2:698:2355], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-05-29T15:22:49.678515Z node 2 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-05-29T15:22:49.678520Z node 2 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/001f36/r3tmp/tmp7qVuU9/pdisk_1.dat 2025-05-29T15:22:49.834989Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:22:49.926511Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-05-29T15:22:50.026239Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:213: actor# [1:210:2173] HANDLE TEvClientConnected success connect from tablet# 72057594046447617 2025-05-29T15:22:50.026849Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:22:50.026886Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:22:50.027545Z node 1 :TX_PROXY DEBUG: client.cpp:89: Handle TEvAllocateResult ACCEPTED RangeBegin# 281474976715656 RangeEnd# 281474976720656 txAllocator# 72057594046447617 2025-05-29T15:22:50.027735Z node 2 :TX_PROXY DEBUG: proxy_impl.cpp:213: actor# [2:240:2129] HANDLE TEvClientConnected success connect from tablet# 72057594046447617 2025-05-29T15:22:50.028167Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:22:50.028190Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:22:50.028581Z node 2 :TX_PROXY DEBUG: client.cpp:89: Handle TEvAllocateResult ACCEPTED RangeBegin# 281474976720656 RangeEnd# 281474976725656 txAllocator# 72057594046447617 2025-05-29T15:22:50.039814Z node 1 :HIVE WARN: hive_impl.cpp:771: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-05-29T15:22:50.040080Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-29T15:22:50.040202Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-29T15:22:50.317254Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:315: actor# [1:210:2173] Handle TEvProposeTransaction 2025-05-29T15:22:50.317278Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:238: actor# [1:210:2173] TxId# 281474976715657 ProcessProposeTransaction 2025-05-29T15:22:50.317313Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:257: actor# [1:210:2173] Cookie# 0 userReqId# "" txid# 281474976715657 SEND to# [1:1226:2741] 2025-05-29T15:22:50.333359Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1562: Actor# [1:1226:2741] txid# 281474976715657 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/Root" OperationType: ESchemeOpCreateTable CreateTable { Name: "table-1" Columns { Name: "key" Type: "Uint32" FamilyName: "" NotNull: false } Columns { Name: "value" Type: "Uint32" FamilyName: "" NotNull: false } KeyColumnNames: "key" UniformPartitionsCount: 1 } } } ExecTimeoutPeriod: 18446744073709551615 2025-05-29T15:22:50.333402Z node 1 :TX_PROXY DEBUG: schemereq.cpp:569: Actor# [1:1226:2741] txid# 281474976715657 Bootstrap, UserSID: CheckAdministrator: 0 CheckDatabaseAdministrator: 0 2025-05-29T15:22:50.333671Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1627: Actor# [1:1226:2741] txid# 281474976715657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P6 2025-05-29T15:22:50.333688Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1617: Actor# [1:1226:2741] txid# 281474976715657 TEvNavigateKeySet requested from SchemeCache 2025-05-29T15:22:50.333779Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1450: Actor# [1:1226:2741] txid# 281474976715657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-05-29T15:22:50.333836Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1497: Actor# [1:1226:2741] HANDLE EvNavigateKeySetResult, txid# 281474976715657 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 500 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2025-05-29T15:22:50.333859Z node 1 :TX_PROXY DEBUG: schemereq.cpp:103: Actor# [1:1226:2741] txid# 281474976715657 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976715657 TabletId# 72057594046644480} 2025-05-29T15:22:50.333960Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1352: Actor# [1:1226:2741] txid# 281474976715657 HANDLE EvClientConnected 2025-05-29T15:22:50.334334Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-05-29T15:22:50.335525Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1374: Actor# [1:1226:2741] txid# 281474976715657 Status StatusAccepted HANDLE {TEvModifySchemeTransactionResult Status# StatusAccepted txid# 281474976715657} 2025-05-29T15:22:50.335559Z node 1 :TX_PROXY DEBUG: schemereq.cpp:549: Actor# [1:1226:2741] txid# 281474976715657 SEND to# [1:1133:2683] Source {TEvProposeTransactionStatus txid# 281474976715657 Status# 53} 2025-05-29T15:22:50.370560Z node 2 :TX_DATASHARD INFO: datashard.cpp:373: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [2:1277:2390] 2025-05-29T15:22:50.370648Z node 2 :TX_DATASHARD DEBUG: datashard__init.cpp:630: TxInitSchema.Execute 2025-05-29T15:22:50.383596Z node 2 :TX_DATASHARD DEBUG: datashard__init.cpp:696: TxInitSchema.Complete 2025-05-29T15:22:50.383646Z node 2 :TX_DATASHARD DEBUG: datashard__init.cpp:48: TDataShard::TTxInit::Execute 2025-05-29T15:22:50.383865Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:1315: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-05-29T15:22:50.383877Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:1371: LoadLockChangeRecords at tablet: 72075186224037888 2025-05-29T15:22:50.383886Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:1420: LoadChangeRecordCommits at tablet: 72075186224037888 2025-05-29T15:22:50.383957Z node 2 :TX_DATASHARD DEBUG: datashard__init.cpp:92: TDataShard::TTxInit::Complete 2025-05-29T15:22:50.383980Z node 2 :TX_DATASHARD DEBUG: datashard__init.cpp:100: TDataShard::TTxInitRestored::Execute 2025-05-29T15:22:50.383998Z node 2 :TX_DATASHARD DEBUG: datashard__init.cpp:111: DataShard 72075186224037888 persisting started state actor id [2:1297:2390] in generation 1 2025-05-29T15:22:50.396526Z node 2 :TX_DATASHARD DEBUG: datashard__init.cpp:120: TDataShard::TTxInitRestored::Complete 2025-05-29T15:22:50.402615Z node 2 :TX_DATASHARD INFO: datashard.cpp:417: Switched to work state WaitScheme tabletId 72075186224037888 2025-05-29T15:22:50.402731Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:457: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-05-29T15:22:50.402792Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:1250: Change sender created: at tablet: 72075186224037888, actorId: [2:1302:2406] 2025-05-29T15:22:50.402800Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:1255: Trying to activate change sender: at tablet: 72075186224037888 2025-05-29T15:22:50.402805Z node 2 :TX_DATASHARD INFO: datashard.cpp:1272: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-05-29T15:22:50.402813Z node 2 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-05-29T15:22:50.403048Z node 2 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 72075186224037888 2025-05-29T15:22:50.403081Z node 2 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-05-29T15:22:50.403102Z node 2 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2025-05-29T15:22:50.403112Z node 2 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-05-29T15:22:50.403125Z node 2 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 72075186224037888 TxInFly 0 2025-05-29T15:22:50.403132Z node 2 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2025-05-29T15:22:50.467096Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:3710: Server connected at leader tablet# 72075186224037888, clientId# [1:1261:2771], serverId# [2:1306:2407], sessionId# [0:0:0] 2025-05-29T15:22:50.467251Z node 2 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037888 2025-05-29T15:22:50.467324Z node 2 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:132: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-05-29T15:22:50.467365Z node 2 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:220: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2025-05-29T15:22:50.467940Z node 2 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:129: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-05-29T15:22:50.479585Z node 2 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 72075186224037888 2025-05-29T15:22:50.479677Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:467: 72075186224037888 not sending time cast registration request in state WaitScheme 2025-05-29T15:22:50.799667Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:3710: Server connected at leader tablet# 72075186224037888, clientId# [1:1330:2788], serverId# [2:1332:2414], sessionId# [0:0:0] 2025-05-29T15:22:50.807885Z node 2 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:69: Planned transaction txId 281474976715657 at step 1000 at tablet 72075186224037888 { Transactions { TxId: 281474976715657 AckTo { RawX1: 0 RawX2: 0 } } Step: 1000 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2025-05-29T15:22:50.807928Z node 2 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-05-29T15:22:50.808069Z node 2 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 7207518622 ... : Actor# [3:1528:2907] txid# 281474976715659 TEvNavigateKeySet requested from SchemeCache 2025-05-29T15:22:54.369366Z node 3 :TX_PROXY DEBUG: schemereq.cpp:1450: Actor# [3:1528:2907] txid# 281474976715659 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-05-29T15:22:54.369406Z node 3 :TX_PROXY DEBUG: schemereq.cpp:1497: Actor# [3:1528:2907] HANDLE EvNavigateKeySetResult, txid# 281474976715659 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 500 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2025-05-29T15:22:54.369425Z node 3 :TX_PROXY DEBUG: schemereq.cpp:103: Actor# [3:1528:2907] txid# 281474976715659 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976715659 TabletId# 72057594046644480} 2025-05-29T15:22:54.369828Z node 3 :TX_PROXY DEBUG: schemereq.cpp:1352: Actor# [3:1528:2907] txid# 281474976715659 HANDLE EvClientConnected 2025-05-29T15:22:54.370652Z node 3 :TX_PROXY DEBUG: schemereq.cpp:1374: Actor# [3:1528:2907] txid# 281474976715659 Status StatusAlreadyExists HANDLE {TEvModifySchemeTransactionResult Status# StatusAlreadyExists txid# 281474976715659 Reason# Check failed: path: '/Root/.metadata/workload_manager/pools/default', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92} 2025-05-29T15:22:54.370695Z node 3 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [3:1528:2907] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-29T15:22:54.370708Z node 3 :TX_PROXY DEBUG: schemereq.cpp:549: Actor# [3:1528:2907] txid# 281474976715659 SEND to# [3:1403:2837] Source {TEvProposeTransactionStatus txid# 281474976715659 Status# 48} 2025-05-29T15:22:54.450102Z node 3 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [3:1540:2918], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:22:54.452450Z node 3 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=3&id=ODZiYTNhZGUtOTNkYmQxNzctYWNhYmE5ZDAtNGQ0ODZiMjg=, ActorId: [3:1387:2827], ActorState: ExecuteState, TraceId: 01jwea772k8b1661wx9esbp6dd, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: assertion failed at ydb/core/tx/datashard/ut_common/datashard_ut_common.cpp:2091, void NKikimr::ExecSQL(Tests::TServer::TPtr, TActorId, const TString &, bool, Ydb::StatusIds::StatusCode): (response.GetYdbStatus() == code) failed: (INTERNAL_ERROR != SUCCESS) Response { QueryIssues { message: "Execution" issue_code: 1060 issues { message: "yql/essentials/ast/yql_expr.h:1874: index out of range" issue_code: 1 } } TxMeta { } } YdbStatus: INTERNAL_ERROR ConsumedRu: 1 , with diff: (INT|SUCC)E(RNAL_ERROR|SS) TBackTrace::Capture()+28 (0x15DE701C) NUnitTest::NPrivate::RaiseError(char const*, TBasicString> const&, bool)+137 (0x15F99F49) NKikimr::ExecSQL(TIntrusivePtr>, NActors::TActorId, TBasicString> const&, bool, Ydb::StatusIds_StatusCode)+1300 (0x2862A574) NKikimr::NKqp::NTestSuiteKqpScan::TTestCaseScanDuringSplitThenMerge::Execute_(NUnitTest::TTestContext&)+1389 (0x15CC91BD) NKikimr::NKqp::NTestSuiteKqpScan::TCurrentTest::Execute()::'lambda'()::operator()() const+71 (0x15CD79E7) NUnitTest::TTestBase::Run(std::__y1::function, TBasicString> const&, char const*, bool)+126 (0x15F9BDFE) NKikimr::NKqp::NTestSuiteKqpScan::TCurrentTest::Execute()+422 (0x15CD7246) NUnitTest::TTestFactory::Execute()+803 (0x15F9C573) NUnitTest::RunMain(int, char**)+3021 (0x15FAE11D) ??+0 (0x7F1F06331D90) __libc_start_main+128 (0x7F1F06331E40) _start+41 (0x14D2B029) 2025-05-29T15:22:55.798416Z node 5 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [5:700:2413], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-05-29T15:22:55.798503Z node 5 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-05-29T15:22:55.798535Z node 5 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-05-29T15:22:55.798559Z node 6 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [6:697:2355], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-05-29T15:22:55.798575Z node 6 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-05-29T15:22:55.798605Z node 6 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/001f36/r3tmp/tmpVjQhyp/pdisk_1.dat 2025-05-29T15:22:55.883975Z node 5 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:22:55.989886Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-05-29T15:22:56.076385Z node 5 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:22:56.076430Z node 5 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:22:56.078125Z node 5 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:22:56.078153Z node 5 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:22:56.089397Z node 5 :HIVE WARN: hive_impl.cpp:771: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 6 Cookie 6 2025-05-29T15:22:56.089532Z node 5 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-29T15:22:56.089694Z node 5 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-29T15:22:56.350712Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-05-29T15:22:56.700896Z node 5 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [5:1390:2830], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:22:56.700929Z node 5 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [5:1401:2835], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:22:56.701011Z node 5 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:22:56.702073Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2025-05-29T15:22:57.084079Z node 5 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [5:1404:2838], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-05-29T15:22:57.168786Z node 5 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [5:1529:2908] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-29T15:22:57.229697Z node 5 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [5:1541:2919], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:22:57.231371Z node 5 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=5&id=MzljMzM0ODEtNGVhZmQ1MGYtZDE2MWFhNWMtYmI4OTdmZjk=, ActorId: [5:1388:2828], ActorState: ExecuteState, TraceId: 01jwea79swa7eqwztche26vnak, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: assertion failed at ydb/core/tx/datashard/ut_common/datashard_ut_common.cpp:2091, void NKikimr::ExecSQL(Tests::TServer::TPtr, TActorId, const TString &, bool, Ydb::StatusIds::StatusCode): (response.GetYdbStatus() == code) failed: (INTERNAL_ERROR != SUCCESS) Response { QueryIssues { message: "Execution" issue_code: 1060 issues { message: "yql/essentials/ast/yql_expr.h:1874: index out of range" issue_code: 1 } } TxMeta { } } YdbStatus: INTERNAL_ERROR ConsumedRu: 1 , with diff: (INT|SUCC)E(RNAL_ERROR|SS) TBackTrace::Capture()+28 (0x15DE701C) NUnitTest::NPrivate::RaiseError(char const*, TBasicString> const&, bool)+137 (0x15F99F49) NKikimr::ExecSQL(TIntrusivePtr>, NActors::TActorId, TBasicString> const&, bool, Ydb::StatusIds_StatusCode)+1300 (0x2862A574) NKikimr::NKqp::NTestSuiteKqpScan::TTestCaseScanPg::Execute_(NUnitTest::TTestContext&)+3018 (0x15CD31DA) NKikimr::NKqp::NTestSuiteKqpScan::TCurrentTest::Execute()::'lambda'()::operator()() const+71 (0x15CD79E7) NUnitTest::TTestBase::Run(std::__y1::function, TBasicString> const&, char const*, bool)+126 (0x15F9BDFE) NKikimr::NKqp::NTestSuiteKqpScan::TCurrentTest::Execute()+422 (0x15CD7246) NUnitTest::TTestFactory::Execute()+803 (0x15F9C573) NUnitTest::RunMain(int, char**)+3021 (0x15FAE11D) ??+0 (0x7F1F06331D90) __libc_start_main+128 (0x7F1F06331E40) _start+41 (0x14D2B029) |62.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/ext_index/ut/unittest |62.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/ext_index/ut/unittest |62.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/ext_index/ut/unittest |62.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/ext_index/ut/unittest ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_column_build/unittest >> ColumnBuildTest::BuildColumnDoesnotRestoreDeletedRows [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:116:2058] recipient: [1:111:2141] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:116:2058] recipient: [1:111:2141] Leader for TabletID 72057594046678944 is [1:128:2152] sender: [1:132:2058] recipient: [1:111:2141] 2025-05-29T15:22:58.080443Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7488: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-05-29T15:22:58.080463Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7516: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:22:58.080467Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7402: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-05-29T15:22:58.080471Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7418: OperationsProcessing config: using default configuration 2025-05-29T15:22:58.080479Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-05-29T15:22:58.080481Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-05-29T15:22:58.080487Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7548: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:22:58.080496Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:39: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-05-29T15:22:58.080572Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7619: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-05-29T15:22:58.080626Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-05-29T15:22:58.089571Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7309: Cannot subscribe to console configs 2025-05-29T15:22:58.089589Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:22:58.091685Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-05-29T15:22:58.091773Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-05-29T15:22:58.091801Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-05-29T15:22:58.093170Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-05-29T15:22:58.093310Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:445: Clear TempDirsState with owners number: 0 2025-05-29T15:22:58.093394Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1348: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-05-29T15:22:58.093428Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:32: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-05-29T15:22:58.093812Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:157: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:22:58.093846Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:84: [RootDataErasureManager] Stop 2025-05-29T15:22:58.094017Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:22:58.094023Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:22:58.094035Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-05-29T15:22:58.094043Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-29T15:22:58.094047Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:202: TTxServerlessStorageBilling.Complete 2025-05-29T15:22:58.094069Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6671: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-05-29T15:22:58.095139Z node 1 :HIVE INFO: tablet_helpers.cpp:1130: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:128:2152] sender: [1:242:2058] recipient: [1:15:2062] 2025-05-29T15:22:58.108305Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:372: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-05-29T15:22:58.108365Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:22:58.108407Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-05-29T15:22:58.108435Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:130: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-05-29T15:22:58.108442Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:22:58.108964Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:451: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-05-29T15:22:58.108983Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-05-29T15:22:58.109020Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:22:58.109026Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:313: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-05-29T15:22:58.109030Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:367: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-05-29T15:22:58.109034Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 2 -> 3 2025-05-29T15:22:58.109355Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:22:58.109363Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-05-29T15:22:58.109366Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 3 -> 128 2025-05-29T15:22:58.109653Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:22:58.109661Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:22:58.109666Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:22:58.109671Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1650: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-05-29T15:22:58.110096Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1719: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-05-29T15:22:58.110381Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:652: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-05-29T15:22:58.110407Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1751: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-05-29T15:22:58.110526Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:676: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-05-29T15:22:58.110543Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:680: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 124 RawX2: 4294969445 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-05-29T15:22:58.110548Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:22:58.110591Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 128 -> 240 2025-05-29T15:22:58.110596Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:22:58.110619Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-05-29T15:22:58.110627Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:402: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-05-29T15:22:58.110975Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:22:58.110981Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-29T15:22:58.111013Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29 ... lMKQL Program { Program { Text: "\n (\n (let key \'(\'(\'key (Uint64 \'28))))\n (let select \'(\'key))\n (return (AsList\n (SetResult \'Result (SelectRow \'__user__Table key select))\n ))\n )\n " } } 2025-05-29T15:23:00.923562Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3129: StateWork, received event# 268830210, Sender [1:2056:3917], Recipient [1:755:2643]: NKikimrTabletTxBase.TEvLocalMKQL Program { Program { Text: "\n (\n (let key \'(\'(\'key (Uint64 \'29))))\n (let select \'(\'key))\n (return (AsList\n (SetResult \'Result (SelectRow \'__user__Table key select))\n ))\n )\n " } } 2025-05-29T15:23:00.924016Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3129: StateWork, received event# 268830210, Sender [1:2057:3918], Recipient [1:755:2643]: NKikimrTabletTxBase.TEvLocalMKQL Program { Program { Text: "\n (\n (let key \'(\'(\'key (Uint64 \'30))))\n (let select \'(\'key))\n (return (AsList\n (SetResult \'Result (SelectRow \'__user__Table key select))\n ))\n )\n " } } 2025-05-29T15:23:00.924462Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3129: StateWork, received event# 268830210, Sender [1:2058:3919], Recipient [1:755:2643]: NKikimrTabletTxBase.TEvLocalMKQL Program { Program { Text: "\n (\n (let key \'(\'(\'key (Uint64 \'31))))\n (let select \'(\'key))\n (return (AsList\n (SetResult \'Result (SelectRow \'__user__Table key select))\n ))\n )\n " } } 2025-05-29T15:23:00.924885Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3129: StateWork, received event# 268830210, Sender [1:2059:3920], Recipient [1:755:2643]: NKikimrTabletTxBase.TEvLocalMKQL Program { Program { Text: "\n (\n (let key \'(\'(\'key (Uint64 \'32))))\n (let select \'(\'key))\n (return (AsList\n (SetResult \'Result (SelectRow \'__user__Table key select))\n ))\n )\n " } } 2025-05-29T15:23:00.925330Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3129: StateWork, received event# 268830210, Sender [1:2060:3921], Recipient [1:755:2643]: NKikimrTabletTxBase.TEvLocalMKQL Program { Program { Text: "\n (\n (let key \'(\'(\'key (Uint64 \'33))))\n (let select \'(\'key))\n (return (AsList\n (SetResult \'Result (SelectRow \'__user__Table key select))\n ))\n )\n " } } 2025-05-29T15:23:00.925784Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3129: StateWork, received event# 268830210, Sender [1:2061:3922], Recipient [1:755:2643]: NKikimrTabletTxBase.TEvLocalMKQL Program { Program { Text: "\n (\n (let key \'(\'(\'key (Uint64 \'34))))\n (let select \'(\'key))\n (return (AsList\n (SetResult \'Result (SelectRow \'__user__Table key select))\n ))\n )\n " } } 2025-05-29T15:23:00.926233Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3129: StateWork, received event# 268830210, Sender [1:2062:3923], Recipient [1:755:2643]: NKikimrTabletTxBase.TEvLocalMKQL Program { Program { Text: "\n (\n (let key \'(\'(\'key (Uint64 \'35))))\n (let select \'(\'key))\n (return (AsList\n (SetResult \'Result (SelectRow \'__user__Table key select))\n ))\n )\n " } } 2025-05-29T15:23:00.926703Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3129: StateWork, received event# 268830210, Sender [1:2063:3924], Recipient [1:755:2643]: NKikimrTabletTxBase.TEvLocalMKQL Program { Program { Text: "\n (\n (let key \'(\'(\'key (Uint64 \'36))))\n (let select \'(\'key))\n (return (AsList\n (SetResult \'Result (SelectRow \'__user__Table key select))\n ))\n )\n " } } 2025-05-29T15:23:00.927492Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3129: StateWork, received event# 268830210, Sender [1:2064:3925], Recipient [1:755:2643]: NKikimrTabletTxBase.TEvLocalMKQL Program { Program { Text: "\n (\n (let key \'(\'(\'key (Uint64 \'37))))\n (let select \'(\'key))\n (return (AsList\n (SetResult \'Result (SelectRow \'__user__Table key select))\n ))\n )\n " } } 2025-05-29T15:23:00.928272Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3129: StateWork, received event# 268830210, Sender [1:2065:3926], Recipient [1:755:2643]: NKikimrTabletTxBase.TEvLocalMKQL Program { Program { Text: "\n (\n (let key \'(\'(\'key (Uint64 \'38))))\n (let select \'(\'key))\n (return (AsList\n (SetResult \'Result (SelectRow \'__user__Table key select))\n ))\n )\n " } } 2025-05-29T15:23:00.928797Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3129: StateWork, received event# 268830210, Sender [1:2066:3927], Recipient [1:755:2643]: NKikimrTabletTxBase.TEvLocalMKQL Program { Program { Text: "\n (\n (let key \'(\'(\'key (Uint64 \'39))))\n (let select \'(\'key))\n (return (AsList\n (SetResult \'Result (SelectRow \'__user__Table key select))\n ))\n )\n " } } 2025-05-29T15:23:00.929219Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3129: StateWork, received event# 268830210, Sender [1:2067:3928], Recipient [1:755:2643]: NKikimrTabletTxBase.TEvLocalMKQL Program { Program { Text: "\n (\n (let key \'(\'(\'key (Uint64 \'40))))\n (let select \'(\'key))\n (return (AsList\n (SetResult \'Result (SelectRow \'__user__Table key select))\n ))\n )\n " } } 2025-05-29T15:23:00.929905Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3129: StateWork, received event# 268830210, Sender [1:2068:3929], Recipient [1:755:2643]: NKikimrTabletTxBase.TEvLocalMKQL Program { Program { Text: "\n (\n (let key \'(\'(\'key (Uint64 \'41))))\n (let select \'(\'key))\n (return (AsList\n (SetResult \'Result (SelectRow \'__user__Table key select))\n ))\n )\n " } } 2025-05-29T15:23:00.930561Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3129: StateWork, received event# 268830210, Sender [1:2069:3930], Recipient [1:755:2643]: NKikimrTabletTxBase.TEvLocalMKQL Program { Program { Text: "\n (\n (let key \'(\'(\'key (Uint64 \'42))))\n (let select \'(\'key))\n (return (AsList\n (SetResult \'Result (SelectRow \'__user__Table key select))\n ))\n )\n " } } 2025-05-29T15:23:00.931113Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3129: StateWork, received event# 268830210, Sender [1:2070:3931], Recipient [1:755:2643]: NKikimrTabletTxBase.TEvLocalMKQL Program { Program { Text: "\n (\n (let key \'(\'(\'key (Uint64 \'43))))\n (let select \'(\'key))\n (return (AsList\n (SetResult \'Result (SelectRow \'__user__Table key select))\n ))\n )\n " } } 2025-05-29T15:23:00.931630Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3129: StateWork, received event# 268830210, Sender [1:2071:3932], Recipient [1:755:2643]: NKikimrTabletTxBase.TEvLocalMKQL Program { Program { Text: "\n (\n (let key \'(\'(\'key (Uint64 \'44))))\n (let select \'(\'key))\n (return (AsList\n (SetResult \'Result (SelectRow \'__user__Table key select))\n ))\n )\n " } } 2025-05-29T15:23:00.932119Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3129: StateWork, received event# 268830210, Sender [1:2072:3933], Recipient [1:755:2643]: NKikimrTabletTxBase.TEvLocalMKQL Program { Program { Text: "\n (\n (let key \'(\'(\'key (Uint64 \'45))))\n (let select \'(\'key))\n (return (AsList\n (SetResult \'Result (SelectRow \'__user__Table key select))\n ))\n )\n " } } 2025-05-29T15:23:00.932572Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3129: StateWork, received event# 268830210, Sender [1:2073:3934], Recipient [1:755:2643]: NKikimrTabletTxBase.TEvLocalMKQL Program { Program { Text: "\n (\n (let key \'(\'(\'key (Uint64 \'46))))\n (let select \'(\'key))\n (return (AsList\n (SetResult \'Result (SelectRow \'__user__Table key select))\n ))\n )\n " } } 2025-05-29T15:23:00.933132Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3129: StateWork, received event# 268830210, Sender [1:2074:3935], Recipient [1:755:2643]: NKikimrTabletTxBase.TEvLocalMKQL Program { Program { Text: "\n (\n (let key \'(\'(\'key (Uint64 \'47))))\n (let select \'(\'key))\n (return (AsList\n (SetResult \'Result (SelectRow \'__user__Table key select))\n ))\n )\n " } } 2025-05-29T15:23:00.933630Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3129: StateWork, received event# 268830210, Sender [1:2075:3936], Recipient [1:755:2643]: NKikimrTabletTxBase.TEvLocalMKQL Program { Program { Text: "\n (\n (let key \'(\'(\'key (Uint64 \'48))))\n (let select \'(\'key))\n (return (AsList\n (SetResult \'Result (SelectRow \'__user__Table key select))\n ))\n )\n " } } 2025-05-29T15:23:00.934164Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3129: StateWork, received event# 268830210, Sender [1:2076:3937], Recipient [1:755:2643]: NKikimrTabletTxBase.TEvLocalMKQL Program { Program { Text: "\n (\n (let key \'(\'(\'key (Uint64 \'49))))\n (let select \'(\'key))\n (return (AsList\n (SetResult \'Result (SelectRow \'__user__Table key select))\n ))\n )\n " } } 2025-05-29T15:23:00.934642Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3129: StateWork, received event# 268830210, Sender [1:2077:3938], Recipient [1:755:2643]: NKikimrTabletTxBase.TEvLocalMKQL Program { Program { Text: "\n (\n (let key \'(\'(\'key (Uint64 \'50))))\n (let select \'(\'key))\n (return (AsList\n (SetResult \'Result (SelectRow \'__user__Table key select))\n ))\n )\n " } } |62.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/ext_index/ut/unittest >> TSchemeShardTopicSplitMergeTest::SplitTwoPartitions >> TGroupMapperTest::NonUniformCluster [GOOD] >> TSchemeShardTopicSplitMergeTest::CreateTopicWithOnePartition [GOOD] >> TSchemeShardTopicSplitMergeTest::DisableSplitMerge |62.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/ext_index/ut/unittest |62.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/ext_index/ut/unittest |62.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/ext_index/ut/unittest |62.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/ext_index/ut/unittest >> TSchemeShardTopicSplitMergeTest::MargePartitions >> TSchemeShardTopicSplitMergeTest::SplitWithOnePartition |62.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/mind/bscontroller/ut/unittest >> TGroupMapperTest::NonUniformCluster [GOOD] |62.6%| [TA] $(B)/ydb/core/tx/datashard/ut_kqp_scan/test-results/unittest/{meta.json ... results_accumulator.log} >> TSchemeShardTopicSplitMergeTest::Boot >> TSchemeShardTopicSplitMergeTest::SplitTwoPartitions [GOOD] >> TSchemeShardTopicSplitMergeTest::SplitInactivePartition |62.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/ext_index/ut/unittest >> TSchemeShardTopicSplitMergeTest::SplitWithWrongPartition >> TSchemeShardTopicSplitMergeTest::Boot [GOOD] >> TSchemeShardTopicSplitMergeTest::CreateTopicWithManyPartition >> TSchemeShardTopicSplitMergeTest::SplitWithWrongBoundary |62.6%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/schemeshard/ut_subdomain/ydb-core-tx-schemeshard-ut_subdomain |62.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_subdomain/ydb-core-tx-schemeshard-ut_subdomain |62.6%| [TA] {RESULT} $(B)/ydb/core/ydb_convert/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> TSchemeShardTopicSplitMergeTest::MargePartitions [GOOD] >> TSchemeShardTopicSplitMergeTest::MargeNotAdjacentRangePartitions >> TSchemeShardTopicSplitMergeTest::SplitWithOnePartition [GOOD] >> TSchemeShardTopicSplitMergeTest::SplitInactivePartition [GOOD] |62.6%| [TA] {RESULT} $(B)/ydb/core/tx/datashard/ut_kqp_scan/test-results/unittest/{meta.json ... results_accumulator.log} |62.7%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_subdomain/ydb-core-tx-schemeshard-ut_subdomain >> TSchemeShardTopicSplitMergeTest::SplitWithWrongPartition [GOOD] >> TLocksTest::CK_Range_BrokenLock >> TSchemeShardTopicSplitMergeTest::CreateTopicWithManyPartition [GOOD] >> TObjectStorageListingTest::MaxKeysAndSharding >> TLocksTest::GoodDupLock >> TLocksFatTest::PointSetBreak >> TSchemeShardTopicSplitMergeTest::SplitWithWrongBoundary [GOOD] >> TFlatTest::CopyCopiedTableAndRead >> TObjectStorageListingTest::TestFilter >> TSchemeShardTopicSplitMergeTest::MargeNotAdjacentRangePartitions [GOOD] >> TFlatTest::SelectRangeForbidNullArgs2 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_topic_splitmerge/unittest >> TSchemeShardTopicSplitMergeTest::SplitInactivePartition [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:116:2058] recipient: [1:111:2141] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:116:2058] recipient: [1:111:2141] Leader for TabletID 72057594046678944 is [1:128:2152] sender: [1:132:2058] recipient: [1:111:2141] 2025-05-29T15:23:01.723496Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7488: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-05-29T15:23:01.723522Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7516: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:23:01.723526Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7402: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-05-29T15:23:01.723530Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7418: OperationsProcessing config: using default configuration 2025-05-29T15:23:01.723541Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-05-29T15:23:01.723543Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-05-29T15:23:01.723550Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7548: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:23:01.723561Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:39: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-05-29T15:23:01.723672Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7619: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-05-29T15:23:01.723741Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-05-29T15:23:01.732773Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7309: Cannot subscribe to console configs 2025-05-29T15:23:01.732793Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:23:01.734891Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-05-29T15:23:01.734984Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-05-29T15:23:01.735009Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-05-29T15:23:01.736453Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-05-29T15:23:01.736673Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:445: Clear TempDirsState with owners number: 0 2025-05-29T15:23:01.736753Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1348: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-05-29T15:23:01.736790Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:32: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-05-29T15:23:01.737198Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:157: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:23:01.737230Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:84: [RootDataErasureManager] Stop 2025-05-29T15:23:01.737420Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:23:01.737426Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:23:01.737442Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-05-29T15:23:01.737449Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-29T15:23:01.737454Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:202: TTxServerlessStorageBilling.Complete 2025-05-29T15:23:01.737479Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6671: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-05-29T15:23:01.738492Z node 1 :HIVE INFO: tablet_helpers.cpp:1130: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:128:2152] sender: [1:242:2058] recipient: [1:15:2062] 2025-05-29T15:23:01.752994Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:372: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-05-29T15:23:01.753070Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:23:01.753132Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-05-29T15:23:01.753183Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:130: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-05-29T15:23:01.753194Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:23:01.753933Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:451: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-05-29T15:23:01.753954Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-05-29T15:23:01.754010Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:23:01.754017Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:313: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-05-29T15:23:01.754021Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:367: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-05-29T15:23:01.754025Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 2 -> 3 2025-05-29T15:23:01.754356Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:23:01.754363Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-05-29T15:23:01.754367Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 3 -> 128 2025-05-29T15:23:01.754765Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:23:01.754775Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:23:01.754781Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:23:01.754788Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1650: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-05-29T15:23:01.755326Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1719: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-05-29T15:23:01.755674Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:652: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-05-29T15:23:01.755705Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1751: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-05-29T15:23:01.755846Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:676: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-05-29T15:23:01.755864Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:680: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 124 RawX2: 4294969445 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-05-29T15:23:01.755869Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:23:01.755932Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 128 -> 240 2025-05-29T15:23:01.755938Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:23:01.755960Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-05-29T15:23:01.755968Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:402: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-05-29T15:23:01.756313Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:23:01.756320Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-29T15:23:01.756361Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29 ... AT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_pq.cpp:648: NPQState::TPropose operationId# 105:0 HandleReply TEvProposeTransactionAttachResult CollectPQConfigChanged: false 2025-05-29T15:23:02.250430Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_pq.cpp:754: NPQState::TPropose operationId# 105:0 can't persist state: ShardsInProgress is not empty, remain: 1 2025-05-29T15:23:02.250689Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:647: TTxOperationReply complete, operationId: 105:0, at schemeshard: 72057594046678944 TestModificationResult got TxId: 105, wait until txId: 105 TestWaitNotification wait txId: 105 2025-05-29T15:23:02.250730Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:210: tests -- TTxNotificationSubscriber for txId 105: send EvNotifyTxCompletion 2025-05-29T15:23:02.250734Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:256: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 105 2025-05-29T15:23:02.250805Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__notify.cpp:30: NotifyTxCompletion operation in-flight, txId: 105, at schemeshard: 72057594046678944 2025-05-29T15:23:02.250809Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1606: TOperation IsReadyToNotify, TxId: 105, ready parts: 0/1, is published: true 2025-05-29T15:23:02.250811Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__notify.cpp:131: NotifyTxCompletion transaction is registered, txId: 105, at schemeshard: 72057594046678944 2025-05-29T15:23:02.292149Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:676: TTxOperationPlanStep Execute, stepId: 200, transactions count in step: 1, at schemeshard: 72057594046678944 2025-05-29T15:23:02.292187Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:680: TTxOperationPlanStep Execute, message: Transactions { TxId: 105 AckTo { RawX1: 0 RawX2: 0 } } Step: 200 MediatorID: 72075186233409547 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-05-29T15:23:02.292199Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_pq.cpp:662: NPQState::TPropose operationId# 105:0 HandleReply TEvOperationPlan, step: 200, at tablet: 72057594046678944 2025-05-29T15:23:02.292208Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_pq.cpp:754: NPQState::TPropose operationId# 105:0 can't persist state: ShardsInProgress is not empty, remain: 1 2025-05-29T15:23:02.298002Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1764: TOperation FindRelatedPartByTabletId, TxId: 105, tablet: 72075186233409548, partId: 0 2025-05-29T15:23:02.298044Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:619: TTxOperationReply execute, operationId: 105:0, at schemeshard: 72057594046678944, message: Origin: 72075186233409548 Status: COMPLETE TxId: 105 Step: 200 2025-05-29T15:23:02.298052Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_pq.cpp:624: NPQState::TPropose operationId# 105:0 HandleReply TEvProposeTransactionResult triggers early, at schemeshard: 72057594046678944 message# Origin: 72075186233409548 Status: COMPLETE TxId: 105 Step: 200 2025-05-29T15:23:02.298060Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_pq.cpp:265: CollectPQConfigChanged accept TEvPersQueue::TEvProposeTransactionResult, operationId: 105:0, shardIdx: 72057594046678944:3, shard: 72075186233409548, left await: 0, txState.State: Propose, txState.ReadyForNotifications: 0, at schemeshard: 72057594046678944 2025-05-29T15:23:02.298063Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_pq.cpp:629: NPQState::TPropose operationId# 105:0 HandleReply TEvProposeTransactionResult CollectPQConfigChanged: true 2025-05-29T15:23:02.298100Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 105:0 128 -> 240 2025-05-29T15:23:02.298126Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 4 2025-05-29T15:23:02.298618Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:647: TTxOperationReply complete, operationId: 105:0, at schemeshard: 72057594046678944 2025-05-29T15:23:02.298670Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:23:02.298675Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 105, path id: [OwnerId: 72057594046678944, LocalPathId: 3] 2025-05-29T15:23:02.298721Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:23:02.298725Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [2:207:2208], at schemeshard: 72057594046678944, txId: 105, path id: 3 2025-05-29T15:23:02.298821Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 105:0, at schemeshard: 72057594046678944 2025-05-29T15:23:02.298832Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:482: [72057594046678944] TDone opId# 105:0 ProgressState 2025-05-29T15:23:02.298844Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:906: Part operation is done id#105:0 progress is 1/1 2025-05-29T15:23:02.298849Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 105 ready parts: 1/1 2025-05-29T15:23:02.298855Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:906: Part operation is done id#105:0 progress is 1/1 2025-05-29T15:23:02.298859Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 105 ready parts: 1/1 2025-05-29T15:23:02.298864Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1606: TOperation IsReadyToNotify, TxId: 105, ready parts: 1/1, is published: false 2025-05-29T15:23:02.298870Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 105 ready parts: 1/1 2025-05-29T15:23:02.298876Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:973: Operation and all the parts is done, operation id: 105:0 2025-05-29T15:23:02.298881Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5174: RemoveTx for txid 105:0 2025-05-29T15:23:02.298917Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 5 2025-05-29T15:23:02.298922Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:982: Publication still in progress, tx: 105, publications: 1, subscribers: 1 2025-05-29T15:23:02.298927Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:989: Publication details: tx: 105, [OwnerId: 72057594046678944, LocalPathId: 3], 3 2025-05-29T15:23:02.299082Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:5834: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 3 PathOwnerId: 72057594046678944, cookie: 105 2025-05-29T15:23:02.299097Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 3 PathOwnerId: 72057594046678944, cookie: 105 2025-05-29T15:23:02.299102Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 105 2025-05-29T15:23:02.299109Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 105, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 3 2025-05-29T15:23:02.299114Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 4 2025-05-29T15:23:02.299128Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 105, subscribers: 1 2025-05-29T15:23:02.299134Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:212: TTxAckPublishToSchemeBoard Notify send TEvNotifyTxCompletionResult, at schemeshard: 72057594046678944, to actorId: [2:410:2375] 2025-05-29T15:23:02.300199Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 105 2025-05-29T15:23:02.300224Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:227: tests -- TTxNotificationSubscriber for txId 105: got EvNotifyTxCompletionResult 2025-05-29T15:23:02.300229Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:236: tests -- TTxNotificationSubscriber for txId 105: satisfy waiter [2:674:2596] TestWaitNotification: OK eventTxId 105 >>>>> Name: "Topic1" PQTabletConfig { PartitionConfig { } } Split { Partition: 1 SplitBoundary: "W" } TestModificationResults wait txId: 106 2025-05-29T15:23:02.302808Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:372: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot/USER_1" OperationType: ESchemeOpAlterPersQueueGroup AlterPersQueueGroup { Name: "Topic1" PQTabletConfig { PartitionConfig { } } Split { Partition: 1 SplitBoundary: "W" } } } TxId: 106 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-05-29T15:23:02.302843Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_pq.cpp:509: TAlterPQ Propose, path: /MyRoot/USER_1/Topic1, pathId: , opId: 106:0, at schemeshard: 72057594046678944 2025-05-29T15:23:02.302871Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:130: IgniteOperation, opId: 106:1, propose status:StatusInvalidParameter, reason: Invalid partition status: 2, at schemeshard: 72057594046678944 2025-05-29T15:23:02.303230Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:451: TTxOperationPropose Complete, txId: 106, response: Status: StatusInvalidParameter Reason: "Invalid partition status: 2" TxId: 106 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2025-05-29T15:23:02.303253Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 106, database: /MyRoot/USER_1, subject: , status: StatusInvalidParameter, reason: Invalid partition status: 2, operation: ALTER PERSISTENT QUEUE, path: /MyRoot/USER_1/Topic1 TestModificationResult got TxId: 106, wait until txId: 106 TestWaitNotification wait txId: 106 2025-05-29T15:23:02.303295Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:210: tests -- TTxNotificationSubscriber for txId 106: send EvNotifyTxCompletion 2025-05-29T15:23:02.303300Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:256: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 106 2025-05-29T15:23:02.303395Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 106, at schemeshard: 72057594046678944 2025-05-29T15:23:02.303408Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:227: tests -- TTxNotificationSubscriber for txId 106: got EvNotifyTxCompletionResult 2025-05-29T15:23:02.303411Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:236: tests -- TTxNotificationSubscriber for txId 106: satisfy waiter [2:768:2677] TestWaitNotification: OK eventTxId 106 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_topic_splitmerge/unittest >> TSchemeShardTopicSplitMergeTest::SplitWithOnePartition [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:116:2058] recipient: [1:111:2141] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:116:2058] recipient: [1:111:2141] Leader for TabletID 72057594046678944 is [1:128:2152] sender: [1:132:2058] recipient: [1:111:2141] 2025-05-29T15:23:02.008227Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7488: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-05-29T15:23:02.008245Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7516: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:23:02.008248Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7402: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-05-29T15:23:02.008251Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7418: OperationsProcessing config: using default configuration 2025-05-29T15:23:02.008261Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-05-29T15:23:02.008264Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-05-29T15:23:02.008271Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7548: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:23:02.008280Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:39: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-05-29T15:23:02.008350Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7619: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-05-29T15:23:02.008428Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-05-29T15:23:02.017718Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7309: Cannot subscribe to console configs 2025-05-29T15:23:02.017732Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:23:02.019416Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-05-29T15:23:02.019494Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-05-29T15:23:02.019521Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-05-29T15:23:02.020520Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-05-29T15:23:02.020654Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:445: Clear TempDirsState with owners number: 0 2025-05-29T15:23:02.020738Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1348: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-05-29T15:23:02.020779Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:32: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-05-29T15:23:02.021218Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:157: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:23:02.021245Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:84: [RootDataErasureManager] Stop 2025-05-29T15:23:02.021407Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:23:02.021413Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:23:02.021426Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-05-29T15:23:02.021432Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-29T15:23:02.021436Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:202: TTxServerlessStorageBilling.Complete 2025-05-29T15:23:02.021457Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6671: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-05-29T15:23:02.022308Z node 1 :HIVE INFO: tablet_helpers.cpp:1130: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:128:2152] sender: [1:242:2058] recipient: [1:15:2062] 2025-05-29T15:23:02.035038Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:372: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-05-29T15:23:02.035090Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:23:02.035129Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-05-29T15:23:02.035160Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:130: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-05-29T15:23:02.035167Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:23:02.035633Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:451: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-05-29T15:23:02.035653Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-05-29T15:23:02.035688Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:23:02.035694Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:313: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-05-29T15:23:02.035698Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:367: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-05-29T15:23:02.035702Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 2 -> 3 2025-05-29T15:23:02.035973Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:23:02.035983Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-05-29T15:23:02.035989Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 3 -> 128 2025-05-29T15:23:02.036342Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:23:02.036355Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:23:02.036361Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:23:02.036381Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1650: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-05-29T15:23:02.036810Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1719: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-05-29T15:23:02.037132Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:652: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-05-29T15:23:02.037154Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1751: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-05-29T15:23:02.037273Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:676: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-05-29T15:23:02.037290Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:680: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 124 RawX2: 4294969445 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-05-29T15:23:02.037295Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:23:02.037342Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 128 -> 240 2025-05-29T15:23:02.037347Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:23:02.037368Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-05-29T15:23:02.037379Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:402: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-05-29T15:23:02.037721Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:23:02.037729Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-29T15:23:02.037765Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29 ... WakeupInterval# 604800.000000s, IsManualStartup# false 2025-05-29T15:23:02.173991Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7619: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-05-29T15:23:02.174027Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-05-29T15:23:02.174724Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-05-29T15:23:02.175007Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-05-29T15:23:02.175031Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-05-29T15:23:02.175048Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7309: Cannot subscribe to console configs 2025-05-29T15:23:02.175051Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:23:02.175070Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:445: Clear TempDirsState with owners number: 0 2025-05-29T15:23:02.175123Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit for Paths, read records: 3, at schemeshard: 72057594046678944 2025-05-29T15:23:02.175134Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:319: AttachChild: child attached as only one child to the parent, parent id: [OwnerId: 72057594046678944, LocalPathId: 1], parent name: MyRoot, child name: USER_1, child id: [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2025-05-29T15:23:02.175137Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:319: AttachChild: child attached as only one child to the parent, parent id: [OwnerId: 72057594046678944, LocalPathId: 2], parent name: USER_1, child name: Topic1, child id: [OwnerId: 72057594046678944, LocalPathId: 3], at schemeshard: 72057594046678944 2025-05-29T15:23:02.175143Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1457: TTxInit for UserAttributes, read records: 0, at schemeshard: 72057594046678944 2025-05-29T15:23:02.175148Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1483: TTxInit for UserAttributesAlterData, read records: 0, at schemeshard: 72057594046678944 2025-05-29T15:23:02.175167Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 2025-05-29T15:23:02.175186Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1785: TTxInit for Tables, read records: 0, at schemeshard: 72057594046678944 2025-05-29T15:23:02.175193Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__root_data_erasure_manager.cpp:452: [RootDataErasureManager] Restore: Generation# 0, Status# 0, WakeupInterval# 604800 s, NumberDataErasureTenantsInRunning# 0 2025-05-29T15:23:02.175209Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:2033: TTxInit for Columns, read records: 0, at schemeshard: 72057594046678944 2025-05-29T15:23:02.175217Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:2093: TTxInit for ColumnsAlters, read records: 0, at schemeshard: 72057594046678944 2025-05-29T15:23:02.175226Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:2151: TTxInit for Shards, read records: 4, at schemeshard: 72057594046678944 2025-05-29T15:23:02.175229Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2025-05-29T15:23:02.175232Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2025-05-29T15:23:02.175234Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 0 2025-05-29T15:23:02.175236Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 1 2025-05-29T15:23:02.175244Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:2237: TTxInit for TablePartitions, read records: 0, at schemeshard: 72057594046678944 2025-05-29T15:23:02.175251Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:2303: TTxInit for TableShardPartitionConfigs, read records: 0, at schemeshard: 72057594046678944 2025-05-29T15:23:02.175264Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:2453: TTxInit for ChannelsBinding, read records: 14, at schemeshard: 72057594046678944 2025-05-29T15:23:02.175289Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 2 2025-05-29T15:23:02.175317Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:2832: TTxInit for TableIndexes, read records: 0, at schemeshard: 72057594046678944 2025-05-29T15:23:02.175327Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:2911: TTxInit for TableIndexKeys, read records: 0, at schemeshard: 72057594046678944 2025-05-29T15:23:02.175355Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:3409: TTxInit for KesusInfos, read records: 0, at schemeshard: 72057594046678944 2025-05-29T15:23:02.175360Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:3445: TTxInit for KesusAlters, read records: 0, at schemeshard: 72057594046678944 2025-05-29T15:23:02.175376Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:3659: TTxInit for TxShards, read records: 0, at schemeshard: 72057594046678944 2025-05-29T15:23:02.175382Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:3804: TTxInit for ShardToDelete, read records: 0, at schemeshard: 72057594046678944 2025-05-29T15:23:02.175388Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:3821: TTxInit for BackupSettings, read records: 0, at schemeshard: 72057594046678944 2025-05-29T15:23:02.175402Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:3981: TTxInit for ShardBackupStatus, read records: 0, at schemeshard: 72057594046678944 2025-05-29T15:23:02.175407Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:3997: TTxInit for CompletedBackup, read records: 0, at schemeshard: 72057594046678944 2025-05-29T15:23:02.175426Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4282: TTxInit for Publications, read records: 0, at schemeshard: 72057594046678944 2025-05-29T15:23:02.175443Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4587: IndexBuild , records: 0, at schemeshard: 72057594046678944 2025-05-29T15:23:02.175448Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4645: KMeansTreeSample records: 0, at schemeshard: 72057594046678944 2025-05-29T15:23:02.175457Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4740: SnapshotTables: snapshots: 0 tables: 0, at schemeshard: 72057594046678944 2025-05-29T15:23:02.175461Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4767: SnapshotSteps: snapshots: 0, at schemeshard: 72057594046678944 2025-05-29T15:23:02.175465Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4794: LongLocks: records: 0, at schemeshard: 72057594046678944 2025-05-29T15:23:02.176404Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:84: [RootDataErasureManager] Stop 2025-05-29T15:23:02.176646Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:23:02.176654Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:23:02.176663Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-05-29T15:23:02.176668Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-29T15:23:02.176671Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:202: TTxServerlessStorageBilling.Complete 2025-05-29T15:23:02.177016Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6671: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594046678944 is [1:748:2659] sender: [1:807:2058] recipient: [1:15:2062] 2025-05-29T15:23:02.238153Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_1/Topic1" Options { ReturnPartitioningInfo: true ReturnPartitionConfig: true BackupInfo: false ReturnBoundaries: true ShowPrivateTable: true }, at schemeshard: 72057594046678944 2025-05-29T15:23:02.238207Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/USER_1/Topic1" took 65us result status StatusSuccess 2025-05-29T15:23:02.238309Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/USER_1/Topic1" PathDescription { Self { Name: "Topic1" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypePersQueueGroup CreateFinished: true CreateTxId: 104 CreateStep: 150 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 PQVersion: 2 } ChildrenExist: false BalancerTabletID: 72075186233409549 } PersQueueGroup { Name: "Topic1" PathId: 3 TotalGroupCount: 3 PartitionPerTablet: 7 PQTabletConfig { PartitionConfig { LifetimeSeconds: 3600 } YdbDatabasePath: "/MyRoot" PartitionStrategy { PartitionStrategyType: CAN_SPLIT_AND_MERGE } } Partitions { PartitionId: 0 TabletId: 72075186233409548 Status: Inactive ChildPartitionIds: 1 ChildPartitionIds: 2 } Partitions { PartitionId: 1 TabletId: 72075186233409548 KeyRange { ToBound: "\177" } Status: Active ParentPartitionIds: 0 } Partitions { PartitionId: 2 TabletId: 72075186233409548 KeyRange { FromBound: "\177" } Status: Active ParentPartitionIds: 0 } AlterVersion: 2 BalancerTabletID: 72075186233409549 NextPartitionId: 3 Allocate { Name: "Topic1" AlterVersion: 2 TotalGroupCount: 3 NextPartitionId: 3 PartitionPerTablet: 7 PQTabletConfig { PartitionConfig { LifetimeSeconds: 3600 } YdbDatabasePath: "/MyRoot" PartitionStrategy { PartitionStrategyType: CAN_SPLIT_AND_MERGE } } Partitions { PartitionId: 0 GroupId: 1 TabletId: 72075186233409548 OwnerId: 72057594046678944 ShardId: 3 Status: Inactive } Partitions { PartitionId: 1 GroupId: 2 TabletId: 72075186233409548 OwnerId: 72057594046678944 ShardId: 3 Status: Active ParentPartitionIds: 0 KeyRange { ToBound: "\177" } } Partitions { PartitionId: 2 GroupId: 3 TabletId: 72075186233409548 OwnerId: 72057594046678944 ShardId: 3 Status: Active ParentPartitionIds: 0 KeyRange { FromBound: "\177" } } BalancerTabletID: 72075186233409549 BalancerOwnerId: 72057594046678944 BalancerShardId: 4 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72075186233409546 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409547 } DomainKey { SchemeShard: 72057594046678944 PathId: 2 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 4 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 2 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 3 PQPartitionsLimit: 1000000 } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_topic_splitmerge/unittest >> TSchemeShardTopicSplitMergeTest::CreateTopicWithManyPartition [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:116:2058] recipient: [1:111:2141] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:116:2058] recipient: [1:111:2141] Leader for TabletID 72057594046678944 is [1:128:2152] sender: [1:132:2058] recipient: [1:111:2141] 2025-05-29T15:23:02.030708Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7488: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-05-29T15:23:02.030731Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7516: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:23:02.030754Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7402: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-05-29T15:23:02.030758Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7418: OperationsProcessing config: using default configuration 2025-05-29T15:23:02.030772Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-05-29T15:23:02.030775Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-05-29T15:23:02.030783Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7548: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:23:02.030795Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:39: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-05-29T15:23:02.030883Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7619: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-05-29T15:23:02.030950Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-05-29T15:23:02.041041Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7309: Cannot subscribe to console configs 2025-05-29T15:23:02.041062Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:23:02.043057Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-05-29T15:23:02.043152Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-05-29T15:23:02.043191Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-05-29T15:23:02.044692Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-05-29T15:23:02.044849Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:445: Clear TempDirsState with owners number: 0 2025-05-29T15:23:02.044921Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1348: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-05-29T15:23:02.044959Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:32: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-05-29T15:23:02.045412Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:157: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:23:02.045454Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:84: [RootDataErasureManager] Stop 2025-05-29T15:23:02.045688Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:23:02.045696Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:23:02.045715Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-05-29T15:23:02.045720Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-29T15:23:02.045724Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:202: TTxServerlessStorageBilling.Complete 2025-05-29T15:23:02.045750Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6671: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-05-29T15:23:02.047059Z node 1 :HIVE INFO: tablet_helpers.cpp:1130: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:128:2152] sender: [1:242:2058] recipient: [1:15:2062] 2025-05-29T15:23:02.061485Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:372: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-05-29T15:23:02.061570Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:23:02.061627Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-05-29T15:23:02.061667Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:130: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-05-29T15:23:02.061675Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:23:02.062332Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:451: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-05-29T15:23:02.062350Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-05-29T15:23:02.062393Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:23:02.062401Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:313: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-05-29T15:23:02.062405Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:367: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-05-29T15:23:02.062410Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 2 -> 3 2025-05-29T15:23:02.062696Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:23:02.062702Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-05-29T15:23:02.062706Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 3 -> 128 2025-05-29T15:23:02.063073Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:23:02.063087Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:23:02.063094Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:23:02.063103Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1650: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-05-29T15:23:02.063784Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1719: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-05-29T15:23:02.064231Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:652: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-05-29T15:23:02.064267Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1751: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-05-29T15:23:02.064407Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:676: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-05-29T15:23:02.064426Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:680: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 124 RawX2: 4294969445 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-05-29T15:23:02.064431Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:23:02.064485Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 128 -> 240 2025-05-29T15:23:02.064491Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:23:02.064518Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-05-29T15:23:02.064527Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:402: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-05-29T15:23:02.064836Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:23:02.064842Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-29T15:23:02.064881Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29 ... pp:14: TxInitSchema.Execute 2025-05-29T15:23:02.377661Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-05-29T15:23:02.378043Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-05-29T15:23:02.378083Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-05-29T15:23:02.378119Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7309: Cannot subscribe to console configs 2025-05-29T15:23:02.378125Z node 2 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:23:02.378159Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:445: Clear TempDirsState with owners number: 0 2025-05-29T15:23:02.378255Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit for Paths, read records: 3, at schemeshard: 72057594046678944 2025-05-29T15:23:02.378274Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:319: AttachChild: child attached as only one child to the parent, parent id: [OwnerId: 72057594046678944, LocalPathId: 1], parent name: MyRoot, child name: USER_1, child id: [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2025-05-29T15:23:02.378280Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:319: AttachChild: child attached as only one child to the parent, parent id: [OwnerId: 72057594046678944, LocalPathId: 2], parent name: USER_1, child name: Topic1, child id: [OwnerId: 72057594046678944, LocalPathId: 3], at schemeshard: 72057594046678944 2025-05-29T15:23:02.378289Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1457: TTxInit for UserAttributes, read records: 0, at schemeshard: 72057594046678944 2025-05-29T15:23:02.378298Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1483: TTxInit for UserAttributesAlterData, read records: 0, at schemeshard: 72057594046678944 2025-05-29T15:23:02.378331Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 2025-05-29T15:23:02.378368Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1785: TTxInit for Tables, read records: 0, at schemeshard: 72057594046678944 2025-05-29T15:23:02.378378Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__root_data_erasure_manager.cpp:452: [RootDataErasureManager] Restore: Generation# 0, Status# 0, WakeupInterval# 604800 s, NumberDataErasureTenantsInRunning# 0 2025-05-29T15:23:02.378409Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:2033: TTxInit for Columns, read records: 0, at schemeshard: 72057594046678944 2025-05-29T15:23:02.378419Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:2093: TTxInit for ColumnsAlters, read records: 0, at schemeshard: 72057594046678944 2025-05-29T15:23:02.378433Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:2151: TTxInit for Shards, read records: 4, at schemeshard: 72057594046678944 2025-05-29T15:23:02.378439Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2025-05-29T15:23:02.378443Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2025-05-29T15:23:02.378447Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 0 2025-05-29T15:23:02.378451Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 1 2025-05-29T15:23:02.378464Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:2237: TTxInit for TablePartitions, read records: 0, at schemeshard: 72057594046678944 2025-05-29T15:23:02.378476Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:2303: TTxInit for TableShardPartitionConfigs, read records: 0, at schemeshard: 72057594046678944 2025-05-29T15:23:02.378499Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:2453: TTxInit for ChannelsBinding, read records: 14, at schemeshard: 72057594046678944 2025-05-29T15:23:02.378535Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 2 2025-05-29T15:23:02.378577Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:2832: TTxInit for TableIndexes, read records: 0, at schemeshard: 72057594046678944 2025-05-29T15:23:02.378594Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:2911: TTxInit for TableIndexKeys, read records: 0, at schemeshard: 72057594046678944 2025-05-29T15:23:02.378653Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:3409: TTxInit for KesusInfos, read records: 0, at schemeshard: 72057594046678944 2025-05-29T15:23:02.378661Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:3445: TTxInit for KesusAlters, read records: 0, at schemeshard: 72057594046678944 2025-05-29T15:23:02.378688Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:3659: TTxInit for TxShards, read records: 0, at schemeshard: 72057594046678944 2025-05-29T15:23:02.378698Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:3804: TTxInit for ShardToDelete, read records: 0, at schemeshard: 72057594046678944 2025-05-29T15:23:02.378708Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:3821: TTxInit for BackupSettings, read records: 0, at schemeshard: 72057594046678944 2025-05-29T15:23:02.378730Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:3981: TTxInit for ShardBackupStatus, read records: 0, at schemeshard: 72057594046678944 2025-05-29T15:23:02.378756Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:3997: TTxInit for CompletedBackup, read records: 0, at schemeshard: 72057594046678944 2025-05-29T15:23:02.378789Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4282: TTxInit for Publications, read records: 0, at schemeshard: 72057594046678944 2025-05-29T15:23:02.378823Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4587: IndexBuild , records: 0, at schemeshard: 72057594046678944 2025-05-29T15:23:02.378832Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4645: KMeansTreeSample records: 0, at schemeshard: 72057594046678944 2025-05-29T15:23:02.378849Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4740: SnapshotTables: snapshots: 0 tables: 0, at schemeshard: 72057594046678944 2025-05-29T15:23:02.378855Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4767: SnapshotSteps: snapshots: 0, at schemeshard: 72057594046678944 2025-05-29T15:23:02.378862Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4794: LongLocks: records: 0, at schemeshard: 72057594046678944 2025-05-29T15:23:02.380120Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:84: [RootDataErasureManager] Stop 2025-05-29T15:23:02.380563Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:23:02.380576Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:23:02.380594Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-05-29T15:23:02.380602Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-29T15:23:02.380607Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:202: TTxServerlessStorageBilling.Complete 2025-05-29T15:23:02.380662Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6671: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594046678944 is [2:650:2568] sender: [2:709:2058] recipient: [2:15:2062] 2025-05-29T15:23:02.442209Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_1/Topic1" Options { ReturnPartitioningInfo: true ReturnPartitionConfig: true BackupInfo: false ReturnBoundaries: true ShowPrivateTable: true }, at schemeshard: 72057594046678944 2025-05-29T15:23:02.442285Z node 2 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/USER_1/Topic1" took 95us result status StatusSuccess 2025-05-29T15:23:02.442413Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/USER_1/Topic1" PathDescription { Self { Name: "Topic1" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypePersQueueGroup CreateFinished: true CreateTxId: 104 CreateStep: 150 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 PQVersion: 1 } ChildrenExist: false BalancerTabletID: 72075186233409549 } PersQueueGroup { Name: "Topic1" PathId: 3 TotalGroupCount: 3 PartitionPerTablet: 7 PQTabletConfig { PartitionConfig { LifetimeSeconds: 3600 WriteSpeedInBytesPerSecond: 1024 } YdbDatabasePath: "/MyRoot" PartitionStrategy { PartitionStrategyType: CAN_SPLIT_AND_MERGE } } Partitions { PartitionId: 0 TabletId: 72075186233409548 KeyRange { ToBound: "UUUUUUUUUUUUUUUT" } Status: Active } Partitions { PartitionId: 1 TabletId: 72075186233409548 KeyRange { FromBound: "UUUUUUUUUUUUUUUT" ToBound: "\252\252\252\252\252\252\252\252\252\252\252\252\252\252\252\251" } Status: Active } Partitions { PartitionId: 2 TabletId: 72075186233409548 KeyRange { FromBound: "\252\252\252\252\252\252\252\252\252\252\252\252\252\252\252\251" } Status: Active } AlterVersion: 1 BalancerTabletID: 72075186233409549 NextPartitionId: 3 Allocate { Name: "Topic1" AlterVersion: 1 TotalGroupCount: 3 NextPartitionId: 3 PartitionPerTablet: 7 PQTabletConfig { PartitionConfig { LifetimeSeconds: 3600 WriteSpeedInBytesPerSecond: 1024 } YdbDatabasePath: "/MyRoot" PartitionStrategy { PartitionStrategyType: CAN_SPLIT_AND_MERGE } } Partitions { PartitionId: 0 GroupId: 1 TabletId: 72075186233409548 OwnerId: 72057594046678944 ShardId: 3 Status: Active KeyRange { ToBound: "UUUUUUUUUUUUUUUT" } } Partitions { PartitionId: 1 GroupId: 2 TabletId: 72075186233409548 OwnerId: 72057594046678944 ShardId: 3 Status: Active KeyRange { FromBound: "UUUUUUUUUUUUUUUT" ToBound: "\252\252\252\252\252\252\252\252\252\252\252\252\252\252\252\251" } } Partitions { PartitionId: 2 GroupId: 3 TabletId: 72075186233409548 OwnerId: 72057594046678944 ShardId: 3 Status: Active KeyRange { FromBound: "\252\252\252\252\252\252\252\252\252\252\252\252\252\252\252\251" } } BalancerTabletID: 72075186233409549 BalancerOwnerId: 72057594046678944 BalancerShardId: 4 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72075186233409546 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409547 } DomainKey { SchemeShard: 72057594046678944 PathId: 2 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 4 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 2 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 3 PQPartitionsLimit: 1000000 } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> TFlatTest::SelectRangeBytesLimit >> TLocksTest::Range_IncorrectDot1 >> TLocksTest::Range_Pinhole ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_topic_splitmerge/unittest >> TSchemeShardTopicSplitMergeTest::SplitWithWrongBoundary [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:116:2058] recipient: [1:111:2141] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:116:2058] recipient: [1:111:2141] Leader for TabletID 72057594046678944 is [1:128:2152] sender: [1:132:2058] recipient: [1:111:2141] 2025-05-29T15:23:02.369386Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7488: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-05-29T15:23:02.369410Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7516: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:23:02.369415Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7402: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-05-29T15:23:02.369420Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7418: OperationsProcessing config: using default configuration 2025-05-29T15:23:02.369433Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-05-29T15:23:02.369437Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-05-29T15:23:02.369445Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7548: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:23:02.369459Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:39: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-05-29T15:23:02.369572Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7619: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-05-29T15:23:02.369650Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-05-29T15:23:02.378187Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7309: Cannot subscribe to console configs 2025-05-29T15:23:02.378204Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:23:02.380448Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-05-29T15:23:02.380548Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-05-29T15:23:02.380583Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-05-29T15:23:02.382211Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-05-29T15:23:02.382351Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:445: Clear TempDirsState with owners number: 0 2025-05-29T15:23:02.382425Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1348: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-05-29T15:23:02.382456Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:32: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-05-29T15:23:02.382917Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:157: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:23:02.382946Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:84: [RootDataErasureManager] Stop 2025-05-29T15:23:02.383119Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:23:02.383124Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:23:02.383138Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-05-29T15:23:02.383143Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-29T15:23:02.383147Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:202: TTxServerlessStorageBilling.Complete 2025-05-29T15:23:02.383168Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6671: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-05-29T15:23:02.384070Z node 1 :HIVE INFO: tablet_helpers.cpp:1130: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:128:2152] sender: [1:242:2058] recipient: [1:15:2062] 2025-05-29T15:23:02.396127Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:372: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-05-29T15:23:02.396199Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:23:02.396254Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-05-29T15:23:02.396291Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:130: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-05-29T15:23:02.396299Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:23:02.396949Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:451: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-05-29T15:23:02.396969Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-05-29T15:23:02.397011Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:23:02.397018Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:313: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-05-29T15:23:02.397023Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:367: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-05-29T15:23:02.397026Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 2 -> 3 2025-05-29T15:23:02.397302Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:23:02.397309Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-05-29T15:23:02.397312Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 3 -> 128 2025-05-29T15:23:02.397590Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:23:02.397597Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:23:02.397601Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:23:02.397605Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1650: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-05-29T15:23:02.398041Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1719: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-05-29T15:23:02.398327Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:652: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-05-29T15:23:02.398354Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1751: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-05-29T15:23:02.398478Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:676: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-05-29T15:23:02.398495Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:680: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 124 RawX2: 4294969445 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-05-29T15:23:02.398500Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:23:02.398546Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 128 -> 240 2025-05-29T15:23:02.398551Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:23:02.398574Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-05-29T15:23:02.398582Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:402: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-05-29T15:23:02.398909Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:23:02.398914Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-29T15:23:02.398943Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29 ... satisfy waiter [1:640:2563] TestWaitNotification: OK eventTxId 105 >>>>> Name: "Topic1" PQTabletConfig { PartitionConfig { } } Split { Partition: 1 SplitBoundary: "\001" } TestModificationResults wait txId: 106 2025-05-29T15:23:02.477042Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:372: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot/USER_1" OperationType: ESchemeOpAlterPersQueueGroup AlterPersQueueGroup { Name: "Topic1" PQTabletConfig { PartitionConfig { } } Split { Partition: 1 SplitBoundary: "\001" } } } TxId: 106 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-05-29T15:23:02.477063Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_pq.cpp:509: TAlterPQ Propose, path: /MyRoot/USER_1/Topic1, pathId: , opId: 106:0, at schemeshard: 72057594046678944 2025-05-29T15:23:02.477103Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:130: IgniteOperation, opId: 106:1, propose status:StatusInvalidParameter, reason: Split boundary less or equals FromBound of partition: '01' <= '55 55 55 55 55 55 55 55 55 55 55 55 55 55 55 54', at schemeshard: 72057594046678944 2025-05-29T15:23:02.477443Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:451: TTxOperationPropose Complete, txId: 106, response: Status: StatusInvalidParameter Reason: "Split boundary less or equals FromBound of partition: \'01\' <= \'55 55 55 55 55 55 55 55 55 55 55 55 55 55 55 54\'" TxId: 106 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2025-05-29T15:23:02.477459Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 106, database: /MyRoot/USER_1, subject: , status: StatusInvalidParameter, reason: Split boundary less or equals FromBound of partition: '01' <= '55 55 55 55 55 55 55 55 55 55 55 55 55 55 55 54', operation: ALTER PERSISTENT QUEUE, path: /MyRoot/USER_1/Topic1 TestModificationResult got TxId: 106, wait until txId: 106 TestWaitNotification wait txId: 106 2025-05-29T15:23:02.477492Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:210: tests -- TTxNotificationSubscriber for txId 106: send EvNotifyTxCompletion 2025-05-29T15:23:02.477496Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:256: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 106 2025-05-29T15:23:02.477554Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 106, at schemeshard: 72057594046678944 2025-05-29T15:23:02.477569Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:227: tests -- TTxNotificationSubscriber for txId 106: got EvNotifyTxCompletionResult 2025-05-29T15:23:02.477573Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:236: tests -- TTxNotificationSubscriber for txId 106: satisfy waiter [1:647:2570] TestWaitNotification: OK eventTxId 106 >>>>> Name: "Topic1" PQTabletConfig { PartitionConfig { } } Split { Partition: 1 SplitBoundary: "UUUUUUUUUUUUUUUT" } TestModificationResults wait txId: 107 2025-05-29T15:23:02.477984Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:372: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot/USER_1" OperationType: ESchemeOpAlterPersQueueGroup AlterPersQueueGroup { Name: "Topic1" PQTabletConfig { PartitionConfig { } } Split { Partition: 1 SplitBoundary: "UUUUUUUUUUUUUUUT" } } } TxId: 107 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-05-29T15:23:02.477999Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_pq.cpp:509: TAlterPQ Propose, path: /MyRoot/USER_1/Topic1, pathId: , opId: 107:0, at schemeshard: 72057594046678944 2025-05-29T15:23:02.478022Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:130: IgniteOperation, opId: 107:1, propose status:StatusInvalidParameter, reason: Split boundary less or equals FromBound of partition: '55 55 55 55 55 55 55 55 55 55 55 55 55 55 55 54' <= '55 55 55 55 55 55 55 55 55 55 55 55 55 55 55 54', at schemeshard: 72057594046678944 2025-05-29T15:23:02.478330Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:451: TTxOperationPropose Complete, txId: 107, response: Status: StatusInvalidParameter Reason: "Split boundary less or equals FromBound of partition: \'55 55 55 55 55 55 55 55 55 55 55 55 55 55 55 54\' <= \'55 55 55 55 55 55 55 55 55 55 55 55 55 55 55 54\'" TxId: 107 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2025-05-29T15:23:02.478346Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 107, database: /MyRoot/USER_1, subject: , status: StatusInvalidParameter, reason: Split boundary less or equals FromBound of partition: '55 55 55 55 55 55 55 55 55 55 55 55 55 55 55 54' <= '55 55 55 55 55 55 55 55 55 55 55 55 55 55 55 54', operation: ALTER PERSISTENT QUEUE, path: /MyRoot/USER_1/Topic1 TestModificationResult got TxId: 107, wait until txId: 107 TestWaitNotification wait txId: 107 2025-05-29T15:23:02.478378Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:210: tests -- TTxNotificationSubscriber for txId 107: send EvNotifyTxCompletion 2025-05-29T15:23:02.478381Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:256: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 107 2025-05-29T15:23:02.478419Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 107, at schemeshard: 72057594046678944 2025-05-29T15:23:02.478430Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:227: tests -- TTxNotificationSubscriber for txId 107: got EvNotifyTxCompletionResult 2025-05-29T15:23:02.478432Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:236: tests -- TTxNotificationSubscriber for txId 107: satisfy waiter [1:654:2577] TestWaitNotification: OK eventTxId 107 >>>>> Name: "Topic1" PQTabletConfig { PartitionConfig { } } Split { Partition: 1 SplitBoundary: "\255" } TestModificationResults wait txId: 108 2025-05-29T15:23:02.478859Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:372: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot/USER_1" OperationType: ESchemeOpAlterPersQueueGroup AlterPersQueueGroup { Name: "Topic1" PQTabletConfig { PartitionConfig { } } Split { Partition: 1 SplitBoundary: "\255" } } } TxId: 108 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-05-29T15:23:02.478879Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_pq.cpp:509: TAlterPQ Propose, path: /MyRoot/USER_1/Topic1, pathId: , opId: 108:0, at schemeshard: 72057594046678944 2025-05-29T15:23:02.478902Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:130: IgniteOperation, opId: 108:1, propose status:StatusInvalidParameter, reason: Split boundary greate or equals ToBound of partition: 'AD' >= 'AA AA AA AA AA AA AA AA AA AA AA AA AA AA AA A9' (FromBound is '55 55 55 55 55 55 55 55 55 55 55 55 55 55 55 54'), at schemeshard: 72057594046678944 2025-05-29T15:23:02.479212Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:451: TTxOperationPropose Complete, txId: 108, response: Status: StatusInvalidParameter Reason: "Split boundary greate or equals ToBound of partition: \'AD\' >= \'AA AA AA AA AA AA AA AA AA AA AA AA AA AA AA A9\' (FromBound is \'55 55 55 55 55 55 55 55 55 55 55 55 55 55 55 54\')" TxId: 108 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2025-05-29T15:23:02.479227Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 108, database: /MyRoot/USER_1, subject: , status: StatusInvalidParameter, reason: Split boundary greate or equals ToBound of partition: 'AD' >= 'AA AA AA AA AA AA AA AA AA AA AA AA AA AA AA A9' (FromBound is '55 55 55 55 55 55 55 55 55 55 55 55 55 55 55 54'), operation: ALTER PERSISTENT QUEUE, path: /MyRoot/USER_1/Topic1 TestModificationResult got TxId: 108, wait until txId: 108 TestWaitNotification wait txId: 108 2025-05-29T15:23:02.479262Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:210: tests -- TTxNotificationSubscriber for txId 108: send EvNotifyTxCompletion 2025-05-29T15:23:02.479266Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:256: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 108 2025-05-29T15:23:02.479309Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 108, at schemeshard: 72057594046678944 2025-05-29T15:23:02.479319Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:227: tests -- TTxNotificationSubscriber for txId 108: got EvNotifyTxCompletionResult 2025-05-29T15:23:02.479322Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:236: tests -- TTxNotificationSubscriber for txId 108: satisfy waiter [1:661:2584] TestWaitNotification: OK eventTxId 108 >>>>> Name: "Topic1" PQTabletConfig { PartitionConfig { } } Split { Partition: 1 SplitBoundary: "\252\252\252\252\252\252\252\252\252\252\252\252\252\252\252\251" } TestModificationResults wait txId: 109 2025-05-29T15:23:02.479698Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:372: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot/USER_1" OperationType: ESchemeOpAlterPersQueueGroup AlterPersQueueGroup { Name: "Topic1" PQTabletConfig { PartitionConfig { } } Split { Partition: 1 SplitBoundary: "\252\252\252\252\252\252\252\252\252\252\252\252\252\252\252\251" } } } TxId: 109 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-05-29T15:23:02.479713Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_pq.cpp:509: TAlterPQ Propose, path: /MyRoot/USER_1/Topic1, pathId: , opId: 109:0, at schemeshard: 72057594046678944 2025-05-29T15:23:02.479735Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:130: IgniteOperation, opId: 109:1, propose status:StatusInvalidParameter, reason: Split boundary greate or equals ToBound of partition: 'AA AA AA AA AA AA AA AA AA AA AA AA AA AA AA A9' >= 'AA AA AA AA AA AA AA AA AA AA AA AA AA AA AA A9' (FromBound is '55 55 55 55 55 55 55 55 55 55 55 55 55 55 55 54'), at schemeshard: 72057594046678944 2025-05-29T15:23:02.480038Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:451: TTxOperationPropose Complete, txId: 109, response: Status: StatusInvalidParameter Reason: "Split boundary greate or equals ToBound of partition: \'AA AA AA AA AA AA AA AA AA AA AA AA AA AA AA A9\' >= \'AA AA AA AA AA AA AA AA AA AA AA AA AA AA AA A9\' (FromBound is \'55 55 55 55 55 55 55 55 55 55 55 55 55 55 55 54\')" TxId: 109 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2025-05-29T15:23:02.480052Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 109, database: /MyRoot/USER_1, subject: , status: StatusInvalidParameter, reason: Split boundary greate or equals ToBound of partition: 'AA AA AA AA AA AA AA AA AA AA AA AA AA AA AA A9' >= 'AA AA AA AA AA AA AA AA AA AA AA AA AA AA AA A9' (FromBound is '55 55 55 55 55 55 55 55 55 55 55 55 55 55 55 54'), operation: ALTER PERSISTENT QUEUE, path: /MyRoot/USER_1/Topic1 TestModificationResult got TxId: 109, wait until txId: 109 TestWaitNotification wait txId: 109 2025-05-29T15:23:02.480086Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:210: tests -- TTxNotificationSubscriber for txId 109: send EvNotifyTxCompletion 2025-05-29T15:23:02.480089Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:256: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 109 2025-05-29T15:23:02.480127Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 109, at schemeshard: 72057594046678944 2025-05-29T15:23:02.480137Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:227: tests -- TTxNotificationSubscriber for txId 109: got EvNotifyTxCompletionResult 2025-05-29T15:23:02.480140Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:236: tests -- TTxNotificationSubscriber for txId 109: satisfy waiter [1:668:2591] TestWaitNotification: OK eventTxId 109 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_topic_splitmerge/unittest >> TSchemeShardTopicSplitMergeTest::SplitWithWrongPartition [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:116:2058] recipient: [1:111:2141] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:116:2058] recipient: [1:111:2141] Leader for TabletID 72057594046678944 is [1:128:2152] sender: [1:132:2058] recipient: [1:111:2141] 2025-05-29T15:23:02.299997Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7488: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-05-29T15:23:02.300022Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7516: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:23:02.300027Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7402: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-05-29T15:23:02.300032Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7418: OperationsProcessing config: using default configuration 2025-05-29T15:23:02.300048Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-05-29T15:23:02.300052Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-05-29T15:23:02.300062Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7548: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:23:02.300076Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:39: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-05-29T15:23:02.300183Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7619: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-05-29T15:23:02.300240Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-05-29T15:23:02.310279Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7309: Cannot subscribe to console configs 2025-05-29T15:23:02.310298Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:23:02.312633Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-05-29T15:23:02.312735Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-05-29T15:23:02.312769Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-05-29T15:23:02.314086Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-05-29T15:23:02.314231Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:445: Clear TempDirsState with owners number: 0 2025-05-29T15:23:02.314306Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1348: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-05-29T15:23:02.314346Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:32: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-05-29T15:23:02.314734Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:157: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:23:02.314781Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:84: [RootDataErasureManager] Stop 2025-05-29T15:23:02.314989Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:23:02.314995Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:23:02.315011Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-05-29T15:23:02.315018Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-29T15:23:02.315022Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:202: TTxServerlessStorageBilling.Complete 2025-05-29T15:23:02.315049Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6671: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-05-29T15:23:02.316079Z node 1 :HIVE INFO: tablet_helpers.cpp:1130: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:128:2152] sender: [1:242:2058] recipient: [1:15:2062] 2025-05-29T15:23:02.329626Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:372: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-05-29T15:23:02.329694Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:23:02.329750Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-05-29T15:23:02.329789Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:130: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-05-29T15:23:02.329797Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:23:02.330431Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:451: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-05-29T15:23:02.330452Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-05-29T15:23:02.330499Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:23:02.330508Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:313: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-05-29T15:23:02.330512Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:367: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-05-29T15:23:02.330516Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 2 -> 3 2025-05-29T15:23:02.330938Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:23:02.330955Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-05-29T15:23:02.330959Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 3 -> 128 2025-05-29T15:23:02.331312Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:23:02.331321Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:23:02.331328Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:23:02.331333Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1650: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-05-29T15:23:02.331837Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1719: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-05-29T15:23:02.332123Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:652: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-05-29T15:23:02.332159Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1751: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-05-29T15:23:02.332331Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:676: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-05-29T15:23:02.332353Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:680: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 124 RawX2: 4294969445 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-05-29T15:23:02.332359Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:23:02.332414Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 128 -> 240 2025-05-29T15:23:02.332419Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:23:02.332445Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-05-29T15:23:02.332453Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:402: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-05-29T15:23:02.332790Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:23:02.332796Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-29T15:23:02.332834Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29 ... 46678944 2025-05-29T15:23:02.404124Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_pq.cpp:754: NPQState::TPropose operationId# 104:0 can't persist state: ShardsInProgress is not empty, remain: 1 2025-05-29T15:23:02.409286Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1764: TOperation FindRelatedPartByTabletId, TxId: 104, tablet: 72075186233409548, partId: 0 2025-05-29T15:23:02.409334Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:619: TTxOperationReply execute, operationId: 104:0, at schemeshard: 72057594046678944, message: Origin: 72075186233409548 Status: COMPLETE TxId: 104 Step: 150 2025-05-29T15:23:02.409344Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_pq.cpp:624: NPQState::TPropose operationId# 104:0 HandleReply TEvProposeTransactionResult triggers early, at schemeshard: 72057594046678944 message# Origin: 72075186233409548 Status: COMPLETE TxId: 104 Step: 150 2025-05-29T15:23:02.409352Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_pq.cpp:265: CollectPQConfigChanged accept TEvPersQueue::TEvProposeTransactionResult, operationId: 104:0, shardIdx: 72057594046678944:3, shard: 72075186233409548, left await: 0, txState.State: Propose, txState.ReadyForNotifications: 0, at schemeshard: 72057594046678944 2025-05-29T15:23:02.409356Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_pq.cpp:629: NPQState::TPropose operationId# 104:0 HandleReply TEvProposeTransactionResult CollectPQConfigChanged: true 2025-05-29T15:23:02.409391Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 104:0 128 -> 240 2025-05-29T15:23:02.409416Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 2025-05-29T15:23:02.409423Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 4 2025-05-29T15:23:02.409904Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:647: TTxOperationReply complete, operationId: 104:0, at schemeshard: 72057594046678944 2025-05-29T15:23:02.409981Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:23:02.409987Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 104, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2025-05-29T15:23:02.410017Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 104, path id: [OwnerId: 72057594046678944, LocalPathId: 3] 2025-05-29T15:23:02.410044Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:23:02.410047Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:209:2210], at schemeshard: 72057594046678944, txId: 104, path id: 2 2025-05-29T15:23:02.410051Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:209:2210], at schemeshard: 72057594046678944, txId: 104, path id: 3 2025-05-29T15:23:02.410115Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 104:0, at schemeshard: 72057594046678944 2025-05-29T15:23:02.410121Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:482: [72057594046678944] TDone opId# 104:0 ProgressState 2025-05-29T15:23:02.410130Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:906: Part operation is done id#104:0 progress is 1/1 2025-05-29T15:23:02.410133Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 104 ready parts: 1/1 2025-05-29T15:23:02.410137Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:906: Part operation is done id#104:0 progress is 1/1 2025-05-29T15:23:02.410139Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 104 ready parts: 1/1 2025-05-29T15:23:02.410142Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1606: TOperation IsReadyToNotify, TxId: 104, ready parts: 1/1, is published: false 2025-05-29T15:23:02.410146Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 104 ready parts: 1/1 2025-05-29T15:23:02.410150Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:973: Operation and all the parts is done, operation id: 104:0 2025-05-29T15:23:02.410153Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5174: RemoveTx for txid 104:0 2025-05-29T15:23:02.410170Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 5 2025-05-29T15:23:02.410174Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:982: Publication still in progress, tx: 104, publications: 2, subscribers: 1 2025-05-29T15:23:02.410177Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:989: Publication details: tx: 104, [OwnerId: 72057594046678944, LocalPathId: 2], 5 2025-05-29T15:23:02.410179Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:989: Publication details: tx: 104, [OwnerId: 72057594046678944, LocalPathId: 3], 2 2025-05-29T15:23:02.410305Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:5834: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 5 PathOwnerId: 72057594046678944, cookie: 104 2025-05-29T15:23:02.410315Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 5 PathOwnerId: 72057594046678944, cookie: 104 2025-05-29T15:23:02.410318Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 104 2025-05-29T15:23:02.410321Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 104, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 5 2025-05-29T15:23:02.410324Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 5 2025-05-29T15:23:02.410394Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:5834: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 2 PathOwnerId: 72057594046678944, cookie: 104 2025-05-29T15:23:02.410400Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 2 PathOwnerId: 72057594046678944, cookie: 104 2025-05-29T15:23:02.410405Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 104 2025-05-29T15:23:02.410407Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 104, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 2 2025-05-29T15:23:02.410410Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 4 2025-05-29T15:23:02.410415Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 104, subscribers: 1 2025-05-29T15:23:02.410418Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:212: TTxAckPublishToSchemeBoard Notify send TEvNotifyTxCompletionResult, at schemeshard: 72057594046678944, to actorId: [1:410:2375] 2025-05-29T15:23:02.411504Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 104 2025-05-29T15:23:02.411992Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 104 2025-05-29T15:23:02.412060Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:227: tests -- TTxNotificationSubscriber for txId 104: got EvNotifyTxCompletionResult 2025-05-29T15:23:02.412069Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:236: tests -- TTxNotificationSubscriber for txId 104: satisfy waiter [1:544:2478] TestWaitNotification: OK eventTxId 104 >>>>> Name: "Topic1" PQTabletConfig { PartitionConfig { } } Split { Partition: 7 SplitBoundary: "W" } TestModificationResults wait txId: 105 2025-05-29T15:23:02.414143Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:372: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot/USER_1" OperationType: ESchemeOpAlterPersQueueGroup AlterPersQueueGroup { Name: "Topic1" PQTabletConfig { PartitionConfig { } } Split { Partition: 7 SplitBoundary: "W" } } } TxId: 105 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-05-29T15:23:02.414181Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_pq.cpp:509: TAlterPQ Propose, path: /MyRoot/USER_1/Topic1, pathId: , opId: 105:0, at schemeshard: 72057594046678944 2025-05-29T15:23:02.414216Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:130: IgniteOperation, opId: 105:1, propose status:StatusInvalidParameter, reason: Splitting partition does not exists: 7, at schemeshard: 72057594046678944 2025-05-29T15:23:02.414655Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:451: TTxOperationPropose Complete, txId: 105, response: Status: StatusInvalidParameter Reason: "Splitting partition does not exists: 7" TxId: 105 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2025-05-29T15:23:02.414680Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 105, database: /MyRoot/USER_1, subject: , status: StatusInvalidParameter, reason: Splitting partition does not exists: 7, operation: ALTER PERSISTENT QUEUE, path: /MyRoot/USER_1/Topic1 TestModificationResult got TxId: 105, wait until txId: 105 TestWaitNotification wait txId: 105 2025-05-29T15:23:02.414729Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:210: tests -- TTxNotificationSubscriber for txId 105: send EvNotifyTxCompletion 2025-05-29T15:23:02.414735Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:256: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 105 2025-05-29T15:23:02.414818Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 105, at schemeshard: 72057594046678944 2025-05-29T15:23:02.414834Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:227: tests -- TTxNotificationSubscriber for txId 105: got EvNotifyTxCompletionResult 2025-05-29T15:23:02.414839Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:236: tests -- TTxNotificationSubscriber for txId 105: satisfy waiter [1:640:2563] TestWaitNotification: OK eventTxId 105 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_topic_splitmerge/unittest >> TSchemeShardTopicSplitMergeTest::MargeNotAdjacentRangePartitions [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:116:2058] recipient: [1:111:2141] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:116:2058] recipient: [1:111:2141] Leader for TabletID 72057594046678944 is [1:128:2152] sender: [1:132:2058] recipient: [1:111:2141] 2025-05-29T15:23:01.939764Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7488: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-05-29T15:23:01.939789Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7516: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:23:01.939794Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7402: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-05-29T15:23:01.939798Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7418: OperationsProcessing config: using default configuration 2025-05-29T15:23:01.939810Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-05-29T15:23:01.939814Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-05-29T15:23:01.939822Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7548: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:23:01.939832Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:39: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-05-29T15:23:01.939927Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7619: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-05-29T15:23:01.939992Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-05-29T15:23:01.952102Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7309: Cannot subscribe to console configs 2025-05-29T15:23:01.952125Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:23:01.954582Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-05-29T15:23:01.954689Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-05-29T15:23:01.954727Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-05-29T15:23:01.956503Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-05-29T15:23:01.956668Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:445: Clear TempDirsState with owners number: 0 2025-05-29T15:23:01.956775Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1348: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-05-29T15:23:01.956825Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:32: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-05-29T15:23:01.957373Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:157: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:23:01.957409Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:84: [RootDataErasureManager] Stop 2025-05-29T15:23:01.957664Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:23:01.957677Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:23:01.957695Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-05-29T15:23:01.957705Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-29T15:23:01.957710Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:202: TTxServerlessStorageBilling.Complete 2025-05-29T15:23:01.957743Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6671: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-05-29T15:23:01.959070Z node 1 :HIVE INFO: tablet_helpers.cpp:1130: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:128:2152] sender: [1:242:2058] recipient: [1:15:2062] 2025-05-29T15:23:01.977699Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:372: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-05-29T15:23:01.977772Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:23:01.977831Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-05-29T15:23:01.977872Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:130: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-05-29T15:23:01.977881Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:23:01.978667Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:451: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-05-29T15:23:01.978693Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-05-29T15:23:01.978763Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:23:01.978773Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:313: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-05-29T15:23:01.978778Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:367: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-05-29T15:23:01.978783Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 2 -> 3 2025-05-29T15:23:01.979228Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:23:01.979241Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-05-29T15:23:01.979245Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 3 -> 128 2025-05-29T15:23:01.979601Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:23:01.979612Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:23:01.979620Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:23:01.979626Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1650: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-05-29T15:23:01.980173Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1719: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-05-29T15:23:01.980548Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:652: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-05-29T15:23:01.980583Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1751: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-05-29T15:23:01.980740Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:676: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-05-29T15:23:01.980762Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:680: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 124 RawX2: 4294969445 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-05-29T15:23:01.980768Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:23:01.980839Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 128 -> 240 2025-05-29T15:23:01.980846Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:23:01.980872Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-05-29T15:23:01.980883Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:402: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-05-29T15:23:01.981320Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:23:01.981329Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-29T15:23:01.981364Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29 ... -29T15:23:02.481907Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_pq.cpp:754: NPQState::TPropose operationId# 104:0 can't persist state: ShardsInProgress is not empty, remain: 1 2025-05-29T15:23:02.489370Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1764: TOperation FindRelatedPartByTabletId, TxId: 104, tablet: 72075186233409548, partId: 0 2025-05-29T15:23:02.489428Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:619: TTxOperationReply execute, operationId: 104:0, at schemeshard: 72057594046678944, message: Origin: 72075186233409548 Status: COMPLETE TxId: 104 Step: 150 2025-05-29T15:23:02.489439Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_pq.cpp:624: NPQState::TPropose operationId# 104:0 HandleReply TEvProposeTransactionResult triggers early, at schemeshard: 72057594046678944 message# Origin: 72075186233409548 Status: COMPLETE TxId: 104 Step: 150 2025-05-29T15:23:02.489451Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_pq.cpp:265: CollectPQConfigChanged accept TEvPersQueue::TEvProposeTransactionResult, operationId: 104:0, shardIdx: 72057594046678944:3, shard: 72075186233409548, left await: 0, txState.State: Propose, txState.ReadyForNotifications: 0, at schemeshard: 72057594046678944 2025-05-29T15:23:02.489455Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_pq.cpp:629: NPQState::TPropose operationId# 104:0 HandleReply TEvProposeTransactionResult CollectPQConfigChanged: true 2025-05-29T15:23:02.489502Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 104:0 128 -> 240 2025-05-29T15:23:02.489553Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 2025-05-29T15:23:02.489566Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 4 2025-05-29T15:23:02.490272Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:647: TTxOperationReply complete, operationId: 104:0, at schemeshard: 72057594046678944 2025-05-29T15:23:02.490353Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:23:02.490363Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 104, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2025-05-29T15:23:02.490404Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 104, path id: [OwnerId: 72057594046678944, LocalPathId: 3] 2025-05-29T15:23:02.490438Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:23:02.490443Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [2:207:2208], at schemeshard: 72057594046678944, txId: 104, path id: 2 2025-05-29T15:23:02.490448Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [2:207:2208], at schemeshard: 72057594046678944, txId: 104, path id: 3 2025-05-29T15:23:02.490563Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 104:0, at schemeshard: 72057594046678944 2025-05-29T15:23:02.490574Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:482: [72057594046678944] TDone opId# 104:0 ProgressState 2025-05-29T15:23:02.490588Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:906: Part operation is done id#104:0 progress is 1/1 2025-05-29T15:23:02.490592Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 104 ready parts: 1/1 2025-05-29T15:23:02.490598Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:906: Part operation is done id#104:0 progress is 1/1 2025-05-29T15:23:02.490605Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 104 ready parts: 1/1 2025-05-29T15:23:02.490610Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1606: TOperation IsReadyToNotify, TxId: 104, ready parts: 1/1, is published: false 2025-05-29T15:23:02.490616Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 104 ready parts: 1/1 2025-05-29T15:23:02.490621Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:973: Operation and all the parts is done, operation id: 104:0 2025-05-29T15:23:02.490626Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5174: RemoveTx for txid 104:0 2025-05-29T15:23:02.490656Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 5 2025-05-29T15:23:02.490662Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:982: Publication still in progress, tx: 104, publications: 2, subscribers: 1 2025-05-29T15:23:02.490666Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:989: Publication details: tx: 104, [OwnerId: 72057594046678944, LocalPathId: 2], 5 2025-05-29T15:23:02.490669Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:989: Publication details: tx: 104, [OwnerId: 72057594046678944, LocalPathId: 3], 2 2025-05-29T15:23:02.490923Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:5834: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 5 PathOwnerId: 72057594046678944, cookie: 104 2025-05-29T15:23:02.490941Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 5 PathOwnerId: 72057594046678944, cookie: 104 2025-05-29T15:23:02.490946Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 104 2025-05-29T15:23:02.490951Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 104, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 5 2025-05-29T15:23:02.490955Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 5 2025-05-29T15:23:02.491285Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:5834: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 2 PathOwnerId: 72057594046678944, cookie: 104 2025-05-29T15:23:02.491302Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 2 PathOwnerId: 72057594046678944, cookie: 104 2025-05-29T15:23:02.491307Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 104 2025-05-29T15:23:02.491311Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 104, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 2 2025-05-29T15:23:02.491315Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 4 2025-05-29T15:23:02.491329Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 104, subscribers: 1 2025-05-29T15:23:02.491336Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:212: TTxAckPublishToSchemeBoard Notify send TEvNotifyTxCompletionResult, at schemeshard: 72057594046678944, to actorId: [2:410:2375] 2025-05-29T15:23:02.492207Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 104 2025-05-29T15:23:02.492485Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 104 2025-05-29T15:23:02.492503Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:227: tests -- TTxNotificationSubscriber for txId 104: got EvNotifyTxCompletionResult 2025-05-29T15:23:02.492509Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:236: tests -- TTxNotificationSubscriber for txId 104: satisfy waiter [2:547:2481] TestWaitNotification: OK eventTxId 104 >>>>> Name: "Topic1" PQTabletConfig { PartitionConfig { } } Merge { Partition: 0 AdjacentPartition: 2 } TestModificationResults wait txId: 105 2025-05-29T15:23:02.494534Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:372: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot/USER_1" OperationType: ESchemeOpAlterPersQueueGroup AlterPersQueueGroup { Name: "Topic1" PQTabletConfig { PartitionConfig { } } Merge { Partition: 0 AdjacentPartition: 2 } } } TxId: 105 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-05-29T15:23:02.494571Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_pq.cpp:509: TAlterPQ Propose, path: /MyRoot/USER_1/Topic1, pathId: , opId: 105:0, at schemeshard: 72057594046678944 2025-05-29T15:23:02.494603Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:130: IgniteOperation, opId: 105:1, propose status:StatusInvalidParameter, reason: You cannot merge non-contiguous partitions, at schemeshard: 72057594046678944 2025-05-29T15:23:02.495182Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:451: TTxOperationPropose Complete, txId: 105, response: Status: StatusInvalidParameter Reason: "You cannot merge non-contiguous partitions" TxId: 105 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2025-05-29T15:23:02.495222Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 105, database: /MyRoot/USER_1, subject: , status: StatusInvalidParameter, reason: You cannot merge non-contiguous partitions, operation: ALTER PERSISTENT QUEUE, path: /MyRoot/USER_1/Topic1 TestModificationResult got TxId: 105, wait until txId: 105 TestWaitNotification wait txId: 105 2025-05-29T15:23:02.495281Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:210: tests -- TTxNotificationSubscriber for txId 105: send EvNotifyTxCompletion 2025-05-29T15:23:02.495288Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:256: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 105 2025-05-29T15:23:02.495358Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 105, at schemeshard: 72057594046678944 2025-05-29T15:23:02.495376Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:227: tests -- TTxNotificationSubscriber for txId 105: got EvNotifyTxCompletionResult 2025-05-29T15:23:02.495381Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:236: tests -- TTxNotificationSubscriber for txId 105: satisfy waiter [2:645:2568] TestWaitNotification: OK eventTxId 105 >> TLocksTest::SetLockFail >> TLocksTest::Range_BrokenLock0 >> TFlatTest::LargeProxyReply >> TLocksTest::NoLocksSet >> TFlatTest::CopyTableAndRead >> TFlatTest::SelectRangeBytesLimit [GOOD] >> TFlatTest::SelectRangeForbidNullArgs1 >> TFlatTest::CopyCopiedTableAndRead [GOOD] >> TFlatTest::CopyTableAndAddFollowers >> TObjectStorageListingTest::TestFilter [GOOD] >> TObjectStorageListingTest::TestSkipShards >> TFlatTest::SelectRangeForbidNullArgs2 [GOOD] >> TFlatTest::SelectRangeForbidNullArgs3 >> TSchemeShardTopicSplitMergeTest::DisableSplitMerge [GOOD] >> TLocksTest::UpdateLockedKey >> TFlatTest::CopyTableAndRead [GOOD] >> TFlatTest::CopyTableAndDropOriginal >> TLocksTest::SetLockFail [GOOD] >> TLocksTest::SetEraseSet >> TFlatTest::SelectRangeForbidNullArgs1 [GOOD] >> TObjectStorageListingTest::TestSkipShards [GOOD] >> TFlatTest::SelectRangeForbidNullArgs3 [GOOD] >> TFlatTest::CopyTableAndAddFollowers [GOOD] >> TFlatTest::CopyCopiedTableAndDropFirstCopy ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_topic_splitmerge/unittest >> TSchemeShardTopicSplitMergeTest::DisableSplitMerge [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:116:2058] recipient: [1:111:2141] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:116:2058] recipient: [1:111:2141] Leader for TabletID 72057594046678944 is [1:128:2152] sender: [1:132:2058] recipient: [1:111:2141] 2025-05-29T15:23:01.248122Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7488: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-05-29T15:23:01.248151Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7516: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:23:01.248155Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7402: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-05-29T15:23:01.248159Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7418: OperationsProcessing config: using default configuration 2025-05-29T15:23:01.248173Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-05-29T15:23:01.248176Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-05-29T15:23:01.248184Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7548: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:23:01.248195Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:39: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-05-29T15:23:01.248281Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7619: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-05-29T15:23:01.248347Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-05-29T15:23:01.260141Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7309: Cannot subscribe to console configs 2025-05-29T15:23:01.260165Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:23:01.262455Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-05-29T15:23:01.262595Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-05-29T15:23:01.262639Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-05-29T15:23:01.264527Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-05-29T15:23:01.264684Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:445: Clear TempDirsState with owners number: 0 2025-05-29T15:23:01.264770Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1348: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-05-29T15:23:01.264814Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:32: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-05-29T15:23:01.265293Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:157: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:23:01.265328Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:84: [RootDataErasureManager] Stop 2025-05-29T15:23:01.265577Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:23:01.265585Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:23:01.265605Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-05-29T15:23:01.265610Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-29T15:23:01.265614Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:202: TTxServerlessStorageBilling.Complete 2025-05-29T15:23:01.265643Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6671: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-05-29T15:23:01.266723Z node 1 :HIVE INFO: tablet_helpers.cpp:1130: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:128:2152] sender: [1:242:2058] recipient: [1:15:2062] 2025-05-29T15:23:01.282923Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:372: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-05-29T15:23:01.283034Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:23:01.283118Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-05-29T15:23:01.283179Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:130: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-05-29T15:23:01.283191Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:23:01.284219Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:451: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-05-29T15:23:01.284251Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-05-29T15:23:01.284326Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:23:01.284338Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:313: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-05-29T15:23:01.284345Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:367: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-05-29T15:23:01.284351Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 2 -> 3 2025-05-29T15:23:01.284877Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:23:01.284891Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-05-29T15:23:01.284897Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 3 -> 128 2025-05-29T15:23:01.285273Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:23:01.285284Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:23:01.285292Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:23:01.285300Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1650: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-05-29T15:23:01.286041Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1719: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-05-29T15:23:01.286493Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:652: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-05-29T15:23:01.286540Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1751: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-05-29T15:23:01.286765Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:676: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-05-29T15:23:01.286797Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:680: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 124 RawX2: 4294969445 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-05-29T15:23:01.286805Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:23:01.286889Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 128 -> 240 2025-05-29T15:23:01.286897Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:23:01.286936Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-05-29T15:23:01.286951Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:402: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-05-29T15:23:01.287414Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:23:01.287424Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-29T15:23:01.287478Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29 ... ation_side_effects.cpp:982: Publication still in progress, tx: 105, publications: 1, subscribers: 0 2025-05-29T15:23:02.438485Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:989: Publication details: tx: 105, [OwnerId: 72057594046678944, LocalPathId: 3], 3 2025-05-29T15:23:02.438628Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:5834: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 3 PathOwnerId: 72057594046678944, cookie: 105 2025-05-29T15:23:02.438643Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 3 PathOwnerId: 72057594046678944, cookie: 105 2025-05-29T15:23:02.438649Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 105 2025-05-29T15:23:02.438654Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 105, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 3 2025-05-29T15:23:02.438663Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 4 2025-05-29T15:23:02.438678Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 105, subscribers: 0 2025-05-29T15:23:02.440316Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 105 TestModificationResult got TxId: 105, wait until txId: 105 TestWaitNotification wait txId: 105 2025-05-29T15:23:02.441453Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:210: tests -- TTxNotificationSubscriber for txId 105: send EvNotifyTxCompletion 2025-05-29T15:23:02.441464Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:256: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 105 2025-05-29T15:23:02.441559Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 105, at schemeshard: 72057594046678944 2025-05-29T15:23:02.441578Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:227: tests -- TTxNotificationSubscriber for txId 105: got EvNotifyTxCompletionResult 2025-05-29T15:23:02.441583Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:236: tests -- TTxNotificationSubscriber for txId 105: satisfy waiter [2:752:2663] TestWaitNotification: OK eventTxId 105 2025-05-29T15:23:03.095074Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: PathId: 3 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2025-05-29T15:23:03.095172Z node 2 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:44: Tablet 72057594046678944 describe pathId 3 took 109us result status StatusSuccess 2025-05-29T15:23:03.095355Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/USER_1/Topic1" PathDescription { Self { Name: "Topic1" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypePersQueueGroup CreateFinished: true CreateTxId: 104 CreateStep: 150 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 PQVersion: 2 } ChildrenExist: false BalancerTabletID: 72075186233409549 } PersQueueGroup { Name: "Topic1" PathId: 3 TotalGroupCount: 3 PartitionPerTablet: 7 PQTabletConfig { PartitionConfig { LifetimeSeconds: 3600 } YdbDatabasePath: "/MyRoot" PartitionStrategy { PartitionStrategyType: CAN_SPLIT_AND_MERGE } } Partitions { PartitionId: 0 TabletId: 72075186233409548 Status: Inactive ChildPartitionIds: 1 ChildPartitionIds: 2 } Partitions { PartitionId: 1 TabletId: 72075186233409548 KeyRange { ToBound: "\010" } Status: Active ParentPartitionIds: 0 } Partitions { PartitionId: 2 TabletId: 72075186233409548 KeyRange { FromBound: "\010" } Status: Active ParentPartitionIds: 0 } AlterVersion: 2 BalancerTabletID: 72075186233409549 NextPartitionId: 3 Allocate { Name: "Topic1" AlterVersion: 2 TotalGroupCount: 3 NextPartitionId: 3 PartitionPerTablet: 7 PQTabletConfig { PartitionConfig { LifetimeSeconds: 3600 } YdbDatabasePath: "/MyRoot" PartitionStrategy { PartitionStrategyType: CAN_SPLIT_AND_MERGE } } Partitions { PartitionId: 0 GroupId: 1 TabletId: 72075186233409548 OwnerId: 72057594046678944 ShardId: 3 Status: Inactive } Partitions { PartitionId: 1 GroupId: 2 TabletId: 72075186233409548 OwnerId: 72057594046678944 ShardId: 3 Status: Active ParentPartitionIds: 0 KeyRange { ToBound: "\010" } } Partitions { PartitionId: 2 GroupId: 3 TabletId: 72075186233409548 OwnerId: 72057594046678944 ShardId: 3 Status: Active ParentPartitionIds: 0 KeyRange { FromBound: "\010" } } BalancerTabletID: 72075186233409549 BalancerOwnerId: 72057594046678944 BalancerShardId: 4 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72075186233409546 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409547 } DomainKey { SchemeShard: 72057594046678944 PathId: 2 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 4 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 2 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 3 PQPartitionsLimit: 1000000 } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-05-29T15:23:03.177022Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_1/Topic1" Options { ReturnPartitioningInfo: true ReturnPartitionConfig: true BackupInfo: false ReturnBoundaries: true ShowPrivateTable: true }, at schemeshard: 72057594046678944 2025-05-29T15:23:03.177125Z node 2 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/USER_1/Topic1" took 126us result status StatusSuccess 2025-05-29T15:23:03.177301Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/USER_1/Topic1" PathDescription { Self { Name: "Topic1" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypePersQueueGroup CreateFinished: true CreateTxId: 104 CreateStep: 150 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 PQVersion: 2 } ChildrenExist: false BalancerTabletID: 72075186233409549 } PersQueueGroup { Name: "Topic1" PathId: 3 TotalGroupCount: 3 PartitionPerTablet: 7 PQTabletConfig { PartitionConfig { LifetimeSeconds: 3600 } YdbDatabasePath: "/MyRoot" PartitionStrategy { PartitionStrategyType: CAN_SPLIT_AND_MERGE } } Partitions { PartitionId: 0 TabletId: 72075186233409548 Status: Inactive ChildPartitionIds: 1 ChildPartitionIds: 2 } Partitions { PartitionId: 1 TabletId: 72075186233409548 KeyRange { ToBound: "\010" } Status: Active ParentPartitionIds: 0 } Partitions { PartitionId: 2 TabletId: 72075186233409548 KeyRange { FromBound: "\010" } Status: Active ParentPartitionIds: 0 } AlterVersion: 2 BalancerTabletID: 72075186233409549 NextPartitionId: 3 Allocate { Name: "Topic1" AlterVersion: 2 TotalGroupCount: 3 NextPartitionId: 3 PartitionPerTablet: 7 PQTabletConfig { PartitionConfig { LifetimeSeconds: 3600 } YdbDatabasePath: "/MyRoot" PartitionStrategy { PartitionStrategyType: CAN_SPLIT_AND_MERGE } } Partitions { PartitionId: 0 GroupId: 1 TabletId: 72075186233409548 OwnerId: 72057594046678944 ShardId: 3 Status: Inactive } Partitions { PartitionId: 1 GroupId: 2 TabletId: 72075186233409548 OwnerId: 72057594046678944 ShardId: 3 Status: Active ParentPartitionIds: 0 KeyRange { ToBound: "\010" } } Partitions { PartitionId: 2 GroupId: 3 TabletId: 72075186233409548 OwnerId: 72057594046678944 ShardId: 3 Status: Active ParentPartitionIds: 0 KeyRange { FromBound: "\010" } } BalancerTabletID: 72075186233409549 BalancerOwnerId: 72057594046678944 BalancerShardId: 4 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72075186233409546 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409547 } DomainKey { SchemeShard: 72057594046678944 PathId: 2 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 4 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 2 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 3 PQPartitionsLimit: 1000000 } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >>>>> Name: "Topic1" PQTabletConfig { PartitionConfig { } PartitionStrategy { PartitionStrategyType: DISABLED } } TestModificationResults wait txId: 106 2025-05-29T15:23:03.178244Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:372: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot/USER_1" OperationType: ESchemeOpAlterPersQueueGroup AlterPersQueueGroup { Name: "Topic1" PQTabletConfig { PartitionConfig { } PartitionStrategy { PartitionStrategyType: DISABLED } } } } TxId: 106 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-05-29T15:23:03.178288Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_pq.cpp:509: TAlterPQ Propose, path: /MyRoot/USER_1/Topic1, pathId: , opId: 106:0, at schemeshard: 72057594046678944 2025-05-29T15:23:03.178317Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:130: IgniteOperation, opId: 106:1, propose status:StatusInvalidParameter, reason: Can`t disable auto partitioning., at schemeshard: 72057594046678944 2025-05-29T15:23:03.179288Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:451: TTxOperationPropose Complete, txId: 106, response: Status: StatusInvalidParameter Reason: "Can`t disable auto partitioning." TxId: 106 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2025-05-29T15:23:03.179326Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 106, database: /MyRoot/USER_1, subject: , status: StatusInvalidParameter, reason: Can`t disable auto partitioning., operation: ALTER PERSISTENT QUEUE, path: /MyRoot/USER_1/Topic1 TestModificationResult got TxId: 106, wait until txId: 106 TestWaitNotification wait txId: 106 2025-05-29T15:23:03.179406Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:210: tests -- TTxNotificationSubscriber for txId 106: send EvNotifyTxCompletion 2025-05-29T15:23:03.179414Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:256: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 106 2025-05-29T15:23:03.179494Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 106, at schemeshard: 72057594046678944 2025-05-29T15:23:03.179514Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:227: tests -- TTxNotificationSubscriber for txId 106: got EvNotifyTxCompletionResult 2025-05-29T15:23:03.179519Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:236: tests -- TTxNotificationSubscriber for txId 106: satisfy waiter [2:767:2677] TestWaitNotification: OK eventTxId 106 >> TFlatTest::ShardFreezeUnfreezeAlreadySet >> TFlatTest::SplitEmptyToMany >> TNodeBrokerTest::Test999NodesSubscribers [GOOD] >> TLocksTest::BrokenLockUpdate >> TFlatTest::CopyCopiedTableAndDropFirstCopy [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/client/ut/unittest >> TObjectStorageListingTest::TestSkipShards [GOOD] Test command err: 2025-05-29T15:23:02.602912Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509888541903124412:2062];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:23:02.602941Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/002848/r3tmp/tmpjl7VCQ/pdisk_1.dat 2025-05-29T15:23:02.670625Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [1:7509888541903124392:2079] 1748532182602723 != 1748532182602726 2025-05-29T15:23:02.673099Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 21609, node 1 2025-05-29T15:23:02.683246Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:23:02.683257Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-05-29T15:23:02.683259Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-05-29T15:23:02.683311Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:11604 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 2025-05-29T15:23:02.705515Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:23:02.705544Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:23:02.706584Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-05-29T15:23:02.747378Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-05-29T15:23:02.765987Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/002848/r3tmp/tmpaHna2i/pdisk_1.dat TServer::EnableGrpc on GrpcPort 29151, node 2 TClient is connected to server localhost:11984 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. waiting... waiting... waiting... ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/client/ut/unittest >> TFlatTest::SelectRangeForbidNullArgs1 [GOOD] Test command err: 2025-05-29T15:23:02.658928Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509888538047279689:2065];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:23:02.658945Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/002840/r3tmp/tmpFF8Ngv/pdisk_1.dat 2025-05-29T15:23:02.701210Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [1:7509888538047279665:2079] 1748532182658728 != 1748532182658731 2025-05-29T15:23:02.702947Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded TClient is connected to server localhost:5841 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-05-29T15:23:02.761030Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:23:02.761064Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:23:02.762177Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-29T15:23:02.784411Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-05-29T15:23:02.819329Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:23:03.112754Z node 2 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7509888543866993889:2063];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:23:03.112771Z node 2 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/002840/r3tmp/tmpcfKDOh/pdisk_1.dat 2025-05-29T15:23:03.127127Z node 2 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:23:03.127293Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [2:7509888543866993868:2079] 1748532183112644 != 1748532183112647 TClient is connected to server localhost:10662 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-05-29T15:23:03.215675Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:23:03.215703Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:23:03.215928Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:23:03.216739Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected waiting... 2025-05-29T15:23:03.224097Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/client/ut/unittest >> TFlatTest::SelectRangeForbidNullArgs3 [GOOD] Test command err: 2025-05-29T15:23:02.697668Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509888538843975357:2063];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:23:02.697870Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/002839/r3tmp/tmpEIliS2/pdisk_1.dat 2025-05-29T15:23:02.750908Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [1:7509888538843975336:2079] 1748532182697514 != 1748532182697517 2025-05-29T15:23:02.750925Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded TClient is connected to server localhost:17294 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-05-29T15:23:02.825088Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:23:02.825114Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:23:02.826182Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-29T15:23:02.827200Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-05-29T15:23:02.831786Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:23:03.179996Z node 2 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7509888545370655443:2065];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:23:03.180018Z node 2 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/002839/r3tmp/tmpqxYKPf/pdisk_1.dat 2025-05-29T15:23:03.194984Z node 2 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:23:03.195286Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [2:7509888545370655416:2079] 1748532183179860 != 1748532183179863 TClient is connected to server localhost:2147 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-05-29T15:23:03.284815Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:23:03.284852Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:23:03.285192Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-05-29T15:23:03.285897Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected waiting... 2025-05-29T15:23:03.286694Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:23:03.294190Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... >> TFlatTest::CopyTableAndDropOriginal [GOOD] >> TFlatTest::SelectRangeNullArgs3 >> TLocksTest::GoodSameKeyLock >> TFlatTest::WriteSplitKillRead >> TFlatTest::ShardFreezeUnfreezeAlreadySet [GOOD] >> TFlatTest::ShardFreezeUnfreeze >> TLocksFatTest::PointSetBreak [GOOD] >> TLocksFatTest::LocksLimit >> TLocksTest::SetEraseSet [GOOD] >> TGroupMapperTest::Block42_1disk [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/mind/ut/unittest >> TNodeBrokerTest::Test999NodesSubscribers [GOOD] Test command err: 2025-05-29T15:22:28.175235Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 2 Deadline: 18446744073709.551615s } 2025-05-29T15:22:28.175290Z node 3 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 3 Deadline: 18446744073709.551615s } 2025-05-29T15:22:28.175322Z node 4 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 4 Deadline: 18446744073709.551615s } 2025-05-29T15:22:28.175360Z node 5 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 5 Deadline: 18446744073709.551615s } 2025-05-29T15:22:28.175392Z node 6 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 6 Deadline: 18446744073709.551615s } 2025-05-29T15:22:28.175416Z node 7 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 7 Deadline: 18446744073709.551615s } 2025-05-29T15:22:28.181120Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:28.181212Z node 3 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:28.181249Z node 4 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:28.181279Z node 5 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:28.181315Z node 6 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:28.181343Z node 7 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:28.181399Z node 8 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 8 Deadline: 18446744073709.551615s } 2025-05-29T15:22:28.181421Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-05-29T15:22:28.181679Z node 3 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:28.181702Z node 4 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:28.181719Z node 5 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:28.181734Z node 6 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:28.181752Z node 7 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:28.181770Z node 8 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:28.181804Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:28.185123Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:28.185167Z node 8 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:28.185197Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:28.186011Z node 5 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:28.186040Z node 6 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:28.186062Z node 7 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:28.186125Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:28.186139Z node 3 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:28.186153Z node 4 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:28.186168Z node 8 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:28.186182Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:28.186316Z node 3 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-05-29T15:22:28.186334Z node 4 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-05-29T15:22:28.186348Z node 5 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-05-29T15:22:28.186368Z node 6 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-05-29T15:22:28.186384Z node 7 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-05-29T15:22:28.186439Z node 8 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-05-29T15:22:28.186496Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-05-29T15:22:28.186528Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 4 Deadline: 18446744073709.551615s } 2025-05-29T15:22:28.187037Z node 3 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 8 Deadline: 18446744073709.551615s } 2025-05-29T15:22:28.187048Z node 4 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 5 Deadline: 18446744073709.551615s } 2025-05-29T15:22:28.187053Z node 5 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 3 Deadline: 18446744073709.551615s } 2025-05-29T15:22:28.187058Z node 6 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 3 Deadline: 18446744073709.551615s } 2025-05-29T15:22:28.187064Z node 7 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 5 Deadline: 18446744073709.551615s } 2025-05-29T15:22:28.187155Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 6 Deadline: 18446744073709.551615s } 2025-05-29T15:22:28.190063Z node 5 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 4 Deadline: 18446744073709.551615s } 2025-05-29T15:22:28.190095Z node 8 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 3 Deadline: 18446744073709.551615s } 2025-05-29T15:22:28.190187Z node 3 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 5 Deadline: 18446744073709.551615s } 2025-05-29T15:22:28.190210Z node 5 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 7 Deadline: 18446744073709.551615s } 2025-05-29T15:22:28.190223Z node 3 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 6 Deadline: 18446744073709.551615s } 2025-05-29T15:22:28.190956Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 5 Deadline: 18446744073709.551615s } 2025-05-29T15:22:28.191014Z node 6 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 2 Deadline: 18446744073709.551615s } 2025-05-29T15:22:28.191272Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 6 Deadline: 18446744073709.551615s } 2025-05-29T15:22:28.191367Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 7 Deadline: 18446744073709.551615s } 2025-05-29T15:22:28.191452Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 8 Deadline: 18446744073709.551615s } 2025-05-29T15:22:28.191501Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 2 Deadline: 18446744073709.551615s } 2025-05-29T15:22:28.191636Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 3 Deadline: 18446744073709.551615s } 2025-05-29T15:22:28.192181Z node 4 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 7 Deadline: 18446744073709.551615s } 2025-05-29T15:22:28.192190Z node 5 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 2 Deadline: 18446744073709.551615s } 2025-05-29T15:22:28.192397Z node 3 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 7 Deadline: 18446744073709.551615s } 2025-05-29T15:22:28.192424Z node 7 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 4 Deadline: 18446744073709.551615s } 2025-05-29T15:22:28.192533Z node 8 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 2 Deadline: 18446744073709.551615s } 2025-05-29T15:22:28.192549Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 5 Deadline: 18446744073709.551615s } 2025-05-29T15:22:28.192654Z node 7 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 3 Deadline: 18446744073709.551615s } 2025-05-29T15:22:28.192765Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 8 Deadline: 18446744073709.551615s } 2025-05-29T15:22:28.213105Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7309: Cannot subscribe to console configs 2025-05-29T15:22:28.213124Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded ... waiting for nameservers are connected 2025-05-29T15:22:28.216603Z node 1 :NODE_BROKER DEBUG: node_broker_impl.h:236: StateInit event type: 10060000 event: NKikimr::TEvTablet::TEvBoot 2025-05-29T15:22:28.216898Z node 1 :NODE_BROKER DEBUG: node_broker_impl.h:236: StateInit event type: 10060001 event: NKikimr::TEvTablet::TEvRestored 2025-05-29T15:22:28.216943Z node 1 :NODE_BROKER DEBUG: node_broker__init_scheme.cpp:20: TTxInitScheme Execute 2025-05-29T15:22:28.217088Z node 1 :NODE_BROKER DEBUG: node_broker_impl.h:236: StateInit event type: 1006000c event: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-05-29T15:22:28.217673Z node 1 :NODE_BROKER DEBUG: node_broker__init_scheme.cpp:29: TTxInitScheme Complete 2025-05-29T15:22:28.217693Z node 1 :NODE_BROKER DEBUG: node_broker__load_state.cpp:19: TTxLoadState Execute 2025-05-29T15:22:28.217729Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:961: [DB] Using default config. 2025-05-29T15:22:28.217739Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:998: [DB] Starting the first epoch: #1.1 1970-01-01T00:00:00.025000Z - 1970-01-01T01:00:00.025000Z - 1970-01-01T02:00:00.025000Z 2025-05-29T15:22:28.217742Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:1024: [DB] Loaded the first approximate epoch start: #1.1 2025-05-29T15:22:28.217751Z node 1 :NODE_BROKER DEBUG: node_broker__load_state.cpp:27: TTxLoadState Compl ... 025-05-29T15:23:02.539738Z node 1 :NODE_BROKER TRACE: node_broker.cpp:423: Send TEvNodesInfo for epoch #1.3 1970-01-01T00:00:00.025000Z - 1970-01-01T01:00:00.025000Z - 1970-01-01T02:00:00.025000Z 2025-05-29T15:23:02.552204Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 2146435074, Sender [0:0:0], Recipient [1:562:2184]: NKikimr::NNodeBroker::TNodeBroker::TEvPrivate::TEvProcessSubscribersQueue 2025-05-29T15:23:02.552237Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:258: StateWork, processing event TEvPrivate::TEvProcessSubscribersQueue 2025-05-29T15:23:02.552250Z node 1 :NODE_BROKER TRACE: node_broker.cpp:730: Send TEvUpdateNodes v2 -> v3 to [1:5576:7160] 2025-05-29T15:23:02.556021Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 269877761, Sender [1:9678:11209], Recipient [1:562:2184]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-05-29T15:23:02.556107Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 272039936, Sender [1:630:2214], Recipient [1:562:2184]: NKikimr::NNodeBroker::TEvNodeBroker::TEvListNodes { } 2025-05-29T15:23:02.556115Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:246: StateWork, processing event TEvNodeBroker::TEvListNodes 2025-05-29T15:23:02.556130Z node 1 :NODE_BROKER TRACE: node_broker.cpp:423: Send TEvNodesInfo for epoch #1.3 1970-01-01T00:00:00.025000Z - 1970-01-01T01:00:00.025000Z - 1970-01-01T02:00:00.025000Z 2025-05-29T15:23:02.568845Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 2146435074, Sender [0:0:0], Recipient [1:562:2184]: NKikimr::NNodeBroker::TNodeBroker::TEvPrivate::TEvProcessSubscribersQueue 2025-05-29T15:23:02.568874Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:258: StateWork, processing event TEvPrivate::TEvProcessSubscribersQueue 2025-05-29T15:23:02.568887Z node 1 :NODE_BROKER TRACE: node_broker.cpp:730: Send TEvUpdateNodes v2 -> v3 to [1:5581:7165] 2025-05-29T15:23:02.572848Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 269877761, Sender [1:9680:11211], Recipient [1:562:2184]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-05-29T15:23:02.572934Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 272039936, Sender [1:630:2214], Recipient [1:562:2184]: NKikimr::NNodeBroker::TEvNodeBroker::TEvListNodes { } 2025-05-29T15:23:02.572942Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:246: StateWork, processing event TEvNodeBroker::TEvListNodes 2025-05-29T15:23:02.572957Z node 1 :NODE_BROKER TRACE: node_broker.cpp:423: Send TEvNodesInfo for epoch #1.3 1970-01-01T00:00:00.025000Z - 1970-01-01T01:00:00.025000Z - 1970-01-01T02:00:00.025000Z 2025-05-29T15:23:02.585641Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 2146435074, Sender [0:0:0], Recipient [1:562:2184]: NKikimr::NNodeBroker::TNodeBroker::TEvPrivate::TEvProcessSubscribersQueue 2025-05-29T15:23:02.585677Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:258: StateWork, processing event TEvPrivate::TEvProcessSubscribersQueue 2025-05-29T15:23:02.585690Z node 1 :NODE_BROKER TRACE: node_broker.cpp:730: Send TEvUpdateNodes v2 -> v3 to [1:5586:7170] 2025-05-29T15:23:02.589444Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 269877761, Sender [1:9682:11213], Recipient [1:562:2184]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-05-29T15:23:02.589570Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 272039936, Sender [1:630:2214], Recipient [1:562:2184]: NKikimr::NNodeBroker::TEvNodeBroker::TEvListNodes { } 2025-05-29T15:23:02.589577Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:246: StateWork, processing event TEvNodeBroker::TEvListNodes 2025-05-29T15:23:02.589592Z node 1 :NODE_BROKER TRACE: node_broker.cpp:423: Send TEvNodesInfo for epoch #1.3 1970-01-01T00:00:00.025000Z - 1970-01-01T01:00:00.025000Z - 1970-01-01T02:00:00.025000Z 2025-05-29T15:23:02.603117Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 2146435074, Sender [0:0:0], Recipient [1:562:2184]: NKikimr::NNodeBroker::TNodeBroker::TEvPrivate::TEvProcessSubscribersQueue 2025-05-29T15:23:02.603149Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:258: StateWork, processing event TEvPrivate::TEvProcessSubscribersQueue 2025-05-29T15:23:02.603164Z node 1 :NODE_BROKER TRACE: node_broker.cpp:730: Send TEvUpdateNodes v2 -> v3 to [1:5591:7175] 2025-05-29T15:23:02.606371Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 269877761, Sender [1:9684:11215], Recipient [1:562:2184]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-05-29T15:23:02.606462Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 272039936, Sender [1:630:2214], Recipient [1:562:2184]: NKikimr::NNodeBroker::TEvNodeBroker::TEvListNodes { } 2025-05-29T15:23:02.606475Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:246: StateWork, processing event TEvNodeBroker::TEvListNodes 2025-05-29T15:23:02.606491Z node 1 :NODE_BROKER TRACE: node_broker.cpp:423: Send TEvNodesInfo for epoch #1.3 1970-01-01T00:00:00.025000Z - 1970-01-01T01:00:00.025000Z - 1970-01-01T02:00:00.025000Z 2025-05-29T15:23:02.627043Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 2146435074, Sender [0:0:0], Recipient [1:562:2184]: NKikimr::NNodeBroker::TNodeBroker::TEvPrivate::TEvProcessSubscribersQueue 2025-05-29T15:23:02.627074Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:258: StateWork, processing event TEvPrivate::TEvProcessSubscribersQueue 2025-05-29T15:23:02.627089Z node 1 :NODE_BROKER TRACE: node_broker.cpp:730: Send TEvUpdateNodes v2 -> v3 to [1:5596:7180] 2025-05-29T15:23:02.630279Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 269877761, Sender [1:9686:11217], Recipient [1:562:2184]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-05-29T15:23:02.630325Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 272039936, Sender [1:630:2214], Recipient [1:562:2184]: NKikimr::NNodeBroker::TEvNodeBroker::TEvListNodes { } 2025-05-29T15:23:02.630330Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:246: StateWork, processing event TEvNodeBroker::TEvListNodes 2025-05-29T15:23:02.630341Z node 1 :NODE_BROKER TRACE: node_broker.cpp:423: Send TEvNodesInfo for epoch #1.3 1970-01-01T00:00:00.025000Z - 1970-01-01T01:00:00.025000Z - 1970-01-01T02:00:00.025000Z 2025-05-29T15:23:02.641972Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 2146435074, Sender [0:0:0], Recipient [1:562:2184]: NKikimr::NNodeBroker::TNodeBroker::TEvPrivate::TEvProcessSubscribersQueue 2025-05-29T15:23:02.641993Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:258: StateWork, processing event TEvPrivate::TEvProcessSubscribersQueue 2025-05-29T15:23:02.642004Z node 1 :NODE_BROKER TRACE: node_broker.cpp:730: Send TEvUpdateNodes v2 -> v3 to [1:5601:7185] 2025-05-29T15:23:02.645050Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 269877761, Sender [1:9688:11219], Recipient [1:562:2184]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-05-29T15:23:02.645117Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 272039936, Sender [1:630:2214], Recipient [1:562:2184]: NKikimr::NNodeBroker::TEvNodeBroker::TEvListNodes { } 2025-05-29T15:23:02.645122Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:246: StateWork, processing event TEvNodeBroker::TEvListNodes 2025-05-29T15:23:02.645133Z node 1 :NODE_BROKER TRACE: node_broker.cpp:423: Send TEvNodesInfo for epoch #1.3 1970-01-01T00:00:00.025000Z - 1970-01-01T01:00:00.025000Z - 1970-01-01T02:00:00.025000Z 2025-05-29T15:23:02.656631Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 2146435074, Sender [0:0:0], Recipient [1:562:2184]: NKikimr::NNodeBroker::TNodeBroker::TEvPrivate::TEvProcessSubscribersQueue 2025-05-29T15:23:02.656655Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:258: StateWork, processing event TEvPrivate::TEvProcessSubscribersQueue 2025-05-29T15:23:02.656666Z node 1 :NODE_BROKER TRACE: node_broker.cpp:730: Send TEvUpdateNodes v2 -> v3 to [1:5606:7190] 2025-05-29T15:23:02.659106Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 269877761, Sender [1:9690:11221], Recipient [1:562:2184]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-05-29T15:23:02.659173Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 272039936, Sender [1:630:2214], Recipient [1:562:2184]: NKikimr::NNodeBroker::TEvNodeBroker::TEvListNodes { } 2025-05-29T15:23:02.659191Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:246: StateWork, processing event TEvNodeBroker::TEvListNodes 2025-05-29T15:23:02.659201Z node 1 :NODE_BROKER TRACE: node_broker.cpp:423: Send TEvNodesInfo for epoch #1.3 1970-01-01T00:00:00.025000Z - 1970-01-01T01:00:00.025000Z - 1970-01-01T02:00:00.025000Z 2025-05-29T15:23:02.670673Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 2146435074, Sender [0:0:0], Recipient [1:562:2184]: NKikimr::NNodeBroker::TNodeBroker::TEvPrivate::TEvProcessSubscribersQueue 2025-05-29T15:23:02.670697Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:258: StateWork, processing event TEvPrivate::TEvProcessSubscribersQueue 2025-05-29T15:23:02.670708Z node 1 :NODE_BROKER TRACE: node_broker.cpp:730: Send TEvUpdateNodes v2 -> v3 to [1:5611:7195] 2025-05-29T15:23:02.673135Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 269877761, Sender [1:9692:11223], Recipient [1:562:2184]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-05-29T15:23:02.673201Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 272039936, Sender [1:630:2214], Recipient [1:562:2184]: NKikimr::NNodeBroker::TEvNodeBroker::TEvListNodes { } 2025-05-29T15:23:02.673208Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:246: StateWork, processing event TEvNodeBroker::TEvListNodes 2025-05-29T15:23:02.673219Z node 1 :NODE_BROKER TRACE: node_broker.cpp:423: Send TEvNodesInfo for epoch #1.3 1970-01-01T00:00:00.025000Z - 1970-01-01T01:00:00.025000Z - 1970-01-01T02:00:00.025000Z 2025-05-29T15:23:02.685485Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 2146435074, Sender [0:0:0], Recipient [1:562:2184]: NKikimr::NNodeBroker::TNodeBroker::TEvPrivate::TEvProcessSubscribersQueue 2025-05-29T15:23:02.685508Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:258: StateWork, processing event TEvPrivate::TEvProcessSubscribersQueue 2025-05-29T15:23:02.685520Z node 1 :NODE_BROKER TRACE: node_broker.cpp:730: Send TEvUpdateNodes v2 -> v3 to [1:5616:7200] 2025-05-29T15:23:02.688553Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 269877761, Sender [1:9694:11225], Recipient [1:562:2184]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-05-29T15:23:02.688628Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 272039936, Sender [1:630:2214], Recipient [1:562:2184]: NKikimr::NNodeBroker::TEvNodeBroker::TEvListNodes { } 2025-05-29T15:23:02.688646Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:246: StateWork, processing event TEvNodeBroker::TEvListNodes 2025-05-29T15:23:02.688656Z node 1 :NODE_BROKER TRACE: node_broker.cpp:423: Send TEvNodesInfo for epoch #1.3 1970-01-01T00:00:00.025000Z - 1970-01-01T01:00:00.025000Z - 1970-01-01T02:00:00.025000Z 2025-05-29T15:23:02.700344Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 2146435074, Sender [0:0:0], Recipient [1:562:2184]: NKikimr::NNodeBroker::TNodeBroker::TEvPrivate::TEvProcessSubscribersQueue 2025-05-29T15:23:02.700375Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:258: StateWork, processing event TEvPrivate::TEvProcessSubscribersQueue 2025-05-29T15:23:02.700389Z node 1 :NODE_BROKER TRACE: node_broker.cpp:730: Send TEvUpdateNodes v2 -> v3 to [1:5621:7205] >> TFlatTest::SelectRangeNullArgs3 [GOOD] >> TFlatTest::SelectRangeNullArgs4 >> TFlatTest::ShardFreezeUnfreeze [GOOD] >> TFlatTest::LargeProxyReply [GOOD] >> TFlatTest::WriteSplitKillRead [GOOD] >> TFlatTest::WriteSplitWriteSplit >> TFlatTest::LargeProxyReplyRW ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/client/ut/unittest >> TFlatTest::CopyCopiedTableAndDropFirstCopy [GOOD] Test command err: 2025-05-29T15:23:02.672183Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509888538427310634:2063];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:23:02.672196Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/002842/r3tmp/tmpWzifjA/pdisk_1.dat 2025-05-29T15:23:02.731998Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [1:7509888538427310613:2079] 1748532182672067 != 1748532182672070 2025-05-29T15:23:02.732011Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded TClient is connected to server localhost:3282 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-05-29T15:23:02.802354Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:23:02.802391Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:23:02.803485Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-29T15:23:02.804487Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-05-29T15:23:02.820357Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976715658, at schemeshard: 72057594046644480 2025-05-29T15:23:02.822098Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:23:02.892842Z node 1 :OPS_COMPACT INFO: Compact{72075186224037888.1.11, eph 1} end=Done, 4 blobs 3r (max 3), put Spent{time=0.000s,wait=0.000s,interrupts=1} Part{ 2 pk, lobs 2 +0, (1265 647 2154)b }, ecr=1.000 2025-05-29T15:23:02.893006Z node 1 :OPS_COMPACT INFO: Compact{72075186224037889.1.11, eph 1} end=Done, 4 blobs 3r (max 3), put Spent{time=0.000s,wait=0.000s,interrupts=1} Part{ 2 pk, lobs 2 +0, (1139 521 2626)b }, ecr=1.000 2025-05-29T15:23:02.897410Z node 1 :OPS_COMPACT INFO: Compact{72075186224037888.1.16, eph 2} end=Done, 4 blobs 6r (max 6), put Spent{time=0.000s,wait=0.000s,interrupts=1} Part{ 2 pk, lobs 5 +0, (1573 647 6413)b }, ecr=1.000 2025-05-29T15:23:02.898111Z node 1 :OPS_COMPACT INFO: Compact{72075186224037889.1.16, eph 2} end=Done, 4 blobs 6r (max 6), put Spent{time=0.000s,wait=0.000s,interrupts=1} Part{ 2 pk, lobs 4 +0, (2326 1432 5183)b }, ecr=1.000 Copy TableOld to Table 2025-05-29T15:23:02.919174Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:372: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/dc-1/Dir" OperationType: ESchemeOpCreateTable CreateTable { Name: "Table" CopyFromTable: "/dc-1/Dir/TableOld" } } TxId: 281474976715676 TabletId: 72057594046644480 PeerName: "" , at schemeshard: 72057594046644480 2025-05-29T15:23:02.919252Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_copy_table.cpp:383: TCopyTable Propose, path: /dc-1/Dir/Table, opId: 281474976715676:0, at schemeshard: 72057594046644480 2025-05-29T15:23:02.919373Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:319: AttachChild: child attached as only one child to the parent, parent id: [OwnerId: 72057594046644480, LocalPathId: 2], parent name: Dir, child name: Table, child id: [OwnerId: 72057594046644480, LocalPathId: 4], at schemeshard: 72057594046644480 2025-05-29T15:23:02.919388Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046644480, LocalPathId: 4] was 0 2025-05-29T15:23:02.919390Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason transaction source path for pathId [OwnerId: 72057594046644480, LocalPathId: 3] was 3 2025-05-29T15:23:02.919397Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason new shard created for pathId [OwnerId: 72057594046644480, LocalPathId: 4] was 1 2025-05-29T15:23:02.919400Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason new shard created for pathId [OwnerId: 72057594046644480, LocalPathId: 4] was 2 2025-05-29T15:23:02.919444Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046644480, LocalPathId: 4] was 3 2025-05-29T15:23:02.919472Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:130: IgniteOperation, opId: 281474976715676:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-05-29T15:23:02.919652Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046644480, LocalPathId: 2] was 2 2025-05-29T15:23:02.919666Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046644480, LocalPathId: 4] was 4 2025-05-29T15:23:02.919831Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:451: TTxOperationPropose Complete, txId: 281474976715676, response: Status: StatusAccepted TxId: 281474976715676 SchemeshardId: 72057594046644480 PathId: 4, at schemeshard: 72057594046644480 2025-05-29T15:23:02.919860Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 281474976715676, database: /dc-1, subject: , status: StatusAccepted, operation: CREATE TABLE, path: /dc-1/Dir/Table 2025-05-29T15:23:02.919906Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2025-05-29T15:23:02.919911Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046644480, txId: 281474976715676, path id: [OwnerId: 72057594046644480, LocalPathId: 2] 2025-05-29T15:23:02.919968Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046644480, txId: 281474976715676, path id: [OwnerId: 72057594046644480, LocalPathId: 4] 2025-05-29T15:23:02.919989Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2025-05-29T15:23:02.919996Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:7509888538427311140:2239], at schemeshard: 72057594046644480, txId: 281474976715676, path id: 2 2025-05-29T15:23:02.920003Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:7509888538427311140:2239], at schemeshard: 72057594046644480, txId: 281474976715676, path id: 4 2025-05-29T15:23:02.920010Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 281474976715676:0, at schemeshard: 72057594046644480 2025-05-29T15:23:02.920018Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:313: TCreateParts opId# 281474976715676:0 ProgressState, operation type: TxCopyTable, at tablet# 72057594046644480 2025-05-29T15:23:02.920082Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:357: TCreateParts opId# 281474976715676:0 CreateRequest Event to Hive: 72057594037968897 msg: Owner: 72057594046644480 OwnerIdx: 3 TabletType: DataShard FollowerCount: 0 ObjectDomain { SchemeShard: 72057594046644480 PathId: 1 } ObjectId: 4 BindedChannels { StoragePoolName: "/dc-1:test" } BindedChannels { StoragePoolName: "/dc-1:test" } BindedChannels { StoragePoolName: "/dc-1:test" } AllowedDomains { SchemeShard: 72057594046644480 PathId: 1 } 2025-05-29T15:23:02.920099Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:357: TCreateParts opId# 281474976715676:0 CreateRequest Event to Hive: 72057594037968897 msg: Owner: 72057594046644480 OwnerIdx: 4 TabletType: DataShard FollowerCount: 0 ObjectDomain { SchemeShard: 72057594046644480 PathId: 1 } ObjectId: 4 BindedChannels { StoragePoolName: "/dc-1:test" } BindedChannels { StoragePoolName: "/dc-1:test" } BindedChannels { StoragePoolName: "/dc-1:test" } AllowedDomains { SchemeShard: 72057594046644480 PathId: 1 } waiting... 2025-05-29T15:23:02.920447Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:5834: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 2 Version: 6 PathOwnerId: 72057594046644480, cookie: 281474976715676 2025-05-29T15:23:02.920476Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 2 Version: 6 PathOwnerId: 72057594046644480, cookie: 281474976715676 2025-05-29T15:23:02.920483Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976715676 2025-05-29T15:23:02.920486Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715676, pathId: [OwnerId: 72057594046644480, LocalPathId: 2], version: 6 2025-05-29T15:23:02.920490Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046644480, LocalPathId: 2] was 3 2025-05-29T15:23:02.920534Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:5834: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 4 Version: 1 PathOwnerId: 72057594046644480, cookie: 281474976715676 2025-05-29T15:23:02.920547Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 4 Version: 1 PathOwnerId: 72057594046644480, cookie: 281474976715676 2025-05-29T15:23:02.920548Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72057594046644480, txId ... t# 72075186224037889, clientId# [3:7509888542759754621:2392], serverId# [3:7509888542759754630:2838], sessionId# [0:0:0] 2025-05-29T15:23:03.821485Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:3635: Client pipe to tablet 72075186224037893 from 72075186224037891 is reset 2025-05-29T15:23:03.821494Z node 3 :TX_DATASHARD INFO: datashard_impl.h:3306: 72075186224037891 Reporting state Offline to schemeshard 72057594046644480 2025-05-29T15:23:03.821522Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5549: Handle TEvStateChanged, at schemeshard: 72057594046644480, message: Source { RawX1: 7509888542759753882 RawX2: 4503612512274682 } TabletId: 72075186224037888 State: 4 2025-05-29T15:23:03.821553Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__state_changed_reply.cpp:78: TTxShardStateChanged DoExecute, datashard informs about state changing, datashardId: 72075186224037888, state: Offline, at schemeshard: 72057594046644480 2025-05-29T15:23:03.821600Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5549: Handle TEvStateChanged, at schemeshard: 72057594046644480, message: Source { RawX1: 7509888542759753885 RawX2: 4503612512274683 } TabletId: 72075186224037889 State: 4 2025-05-29T15:23:03.821608Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__state_changed_reply.cpp:78: TTxShardStateChanged DoExecute, datashard informs about state changing, datashardId: 72075186224037889, state: Offline, at schemeshard: 72057594046644480 2025-05-29T15:23:03.821622Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5549: Handle TEvStateChanged, at schemeshard: 72057594046644480, message: Source { RawX1: 7509888542759754189 RawX2: 4503612512274741 } TabletId: 72075186224037890 State: 4 2025-05-29T15:23:03.821630Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__state_changed_reply.cpp:78: TTxShardStateChanged DoExecute, datashard informs about state changing, datashardId: 72075186224037890, state: Offline, at schemeshard: 72057594046644480 2025-05-29T15:23:03.821643Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5549: Handle TEvStateChanged, at schemeshard: 72057594046644480, message: Source { RawX1: 7509888542759754191 RawX2: 4503612512274742 } TabletId: 72075186224037891 State: 4 2025-05-29T15:23:03.821650Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__state_changed_reply.cpp:78: TTxShardStateChanged DoExecute, datashard informs about state changing, datashardId: 72075186224037891, state: Offline, at schemeshard: 72057594046644480 2025-05-29T15:23:03.821679Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:46: Free shard 72057594046644480:1 hive 72057594037968897 at ss 72057594046644480 2025-05-29T15:23:03.821682Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:2962: Handle TEvStateChangedResult datashard 72075186224037888 state Offline 2025-05-29T15:23:03.821700Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:46: Free shard 72057594046644480:2 hive 72057594037968897 at ss 72057594046644480 2025-05-29T15:23:03.821700Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:2962: Handle TEvStateChangedResult datashard 72075186224037889 state Offline 2025-05-29T15:23:03.821708Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:2962: Handle TEvStateChangedResult datashard 72075186224037890 state Offline 2025-05-29T15:23:03.821708Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:46: Free shard 72057594046644480:3 hive 72057594037968897 at ss 72057594046644480 2025-05-29T15:23:03.821715Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:2962: Handle TEvStateChangedResult datashard 72075186224037891 state Offline 2025-05-29T15:23:03.821716Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:46: Free shard 72057594046644480:4 hive 72057594037968897 at ss 72057594046644480 Check that tablet 72075186224037892 was deleted 2025-05-29T15:23:03.821873Z node 3 :HIVE WARN: hive_impl.cpp:1945: HIVE#72057594037968897 Can't find the tablet from RequestHiveInfo(TabletID=72075186224037892) Check that tablet 72075186224037893 was deleted 2025-05-29T15:23:03.822026Z node 3 :HIVE WARN: hive_impl.cpp:1945: HIVE#72057594037968897 Can't find the tablet from RequestHiveInfo(TabletID=72075186224037893) Check that tablet 72075186224037888 was deleted 2025-05-29T15:23:03.822131Z node 3 :HIVE WARN: hive_impl.cpp:1945: HIVE#72057594037968897 Can't find the tablet from RequestHiveInfo(TabletID=72075186224037888) Check that tablet 72075186224037889 was deleted 2025-05-29T15:23:03.822219Z node 3 :HIVE WARN: hive_impl.cpp:1945: HIVE#72057594037968897 Can't find the tablet from RequestHiveInfo(TabletID=72075186224037889) Check that tablet 72075186224037890 was deleted 2025-05-29T15:23:03.822293Z node 3 :HIVE WARN: hive_impl.cpp:1945: HIVE#72057594037968897 Can't find the tablet from RequestHiveInfo(TabletID=72075186224037890) Check that tablet 72075186224037891 was deleted 2025-05-29T15:23:03.822361Z node 3 :HIVE WARN: hive_impl.cpp:1945: HIVE#72057594037968897 Can't find the tablet from RequestHiveInfo(TabletID=72075186224037891) 2025-05-29T15:23:03.822652Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5938: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 1 ShardOwnerId: 72057594046644480 ShardLocalIdx: 1, at schemeshard: 72057594046644480 2025-05-29T15:23:03.822690Z node 3 :TX_DATASHARD INFO: datashard.cpp:197: OnTabletStop: 72075186224037888 reason = ReasonStop 2025-05-29T15:23:03.822717Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046644480, LocalPathId: 3] was 2 2025-05-29T15:23:03.822765Z node 3 :TX_DATASHARD INFO: datashard.cpp:257: OnTabletDead: 72075186224037888 2025-05-29T15:23:03.822789Z node 3 :TX_DATASHARD INFO: datashard.cpp:1301: Change sender killed: at tablet: 72075186224037888 2025-05-29T15:23:03.822789Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5938: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 2 ShardOwnerId: 72057594046644480 ShardLocalIdx: 2, at schemeshard: 72057594046644480 2025-05-29T15:23:03.822812Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046644480, LocalPathId: 3] was 1 2025-05-29T15:23:03.822838Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5938: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 3 ShardOwnerId: 72057594046644480 ShardLocalIdx: 3, at schemeshard: 72057594046644480 2025-05-29T15:23:03.822864Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046644480, LocalPathId: 4] was 2 2025-05-29T15:23:03.822882Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5938: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 4 ShardOwnerId: 72057594046644480 ShardLocalIdx: 4, at schemeshard: 72057594046644480 2025-05-29T15:23:03.822906Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046644480, LocalPathId: 4] was 1 2025-05-29T15:23:03.822928Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:123: TTxCleanDroppedPaths Execute, 2 paths in candidate queue, at schemeshard: 72057594046644480 2025-05-29T15:23:03.822935Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:137: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046644480, LocalPathId: 4], at schemeshard: 72057594046644480 2025-05-29T15:23:03.822945Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046644480, LocalPathId: 2] was 2 2025-05-29T15:23:03.822952Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:137: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046644480, LocalPathId: 3], at schemeshard: 72057594046644480 2025-05-29T15:23:03.822955Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046644480, LocalPathId: 2] was 1 2025-05-29T15:23:03.823005Z node 3 :HIVE WARN: hive_impl.cpp:491: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037888 not found 2025-05-29T15:23:03.823171Z node 3 :TX_DATASHARD INFO: datashard.cpp:197: OnTabletStop: 72075186224037889 reason = ReasonStop 2025-05-29T15:23:03.823181Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:3728: Server disconnected at leader tablet# 72075186224037889, clientId# [3:7509888542759754008:2391], serverId# [3:7509888542759754009:2392], sessionId# [0:0:0] 2025-05-29T15:23:03.823185Z node 3 :TX_DATASHARD INFO: datashard.cpp:197: OnTabletStop: 72075186224037891 reason = ReasonStop 2025-05-29T15:23:03.823190Z node 3 :TX_DATASHARD INFO: datashard.cpp:197: OnTabletStop: 72075186224037890 reason = ReasonStop 2025-05-29T15:23:03.823240Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:174: Deleted shardIdx 72057594046644480:1 2025-05-29T15:23:03.823240Z node 3 :TX_DATASHARD INFO: datashard.cpp:257: OnTabletDead: 72075186224037889 2025-05-29T15:23:03.823250Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:180: Close pipe to deleted shardIdx 72057594046644480:1 tabletId 72075186224037888 2025-05-29T15:23:03.823252Z node 3 :TX_DATASHARD INFO: datashard.cpp:1301: Change sender killed: at tablet: 72075186224037889 2025-05-29T15:23:03.823258Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:174: Deleted shardIdx 72057594046644480:2 2025-05-29T15:23:03.823259Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:180: Close pipe to deleted shardIdx 72057594046644480:2 tabletId 72075186224037889 2025-05-29T15:23:03.823322Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:174: Deleted shardIdx 72057594046644480:3 2025-05-29T15:23:03.823328Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:180: Close pipe to deleted shardIdx 72057594046644480:3 tabletId 72075186224037890 2025-05-29T15:23:03.823332Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:174: Deleted shardIdx 72057594046644480:4 2025-05-29T15:23:03.823334Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:180: Close pipe to deleted shardIdx 72057594046644480:4 tabletId 72075186224037891 2025-05-29T15:23:03.823391Z node 3 :HIVE WARN: hive_impl.cpp:491: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037889 not found 2025-05-29T15:23:03.823404Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:160: TTxCleanDroppedPaths Complete, done PersistRemovePath for 2 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046644480 2025-05-29T15:23:03.823431Z node 3 :HIVE WARN: hive_impl.cpp:491: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037891 not found 2025-05-29T15:23:03.823438Z node 3 :HIVE WARN: hive_impl.cpp:491: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037890 not found 2025-05-29T15:23:03.823548Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:3728: Server disconnected at leader tablet# 72075186224037890, clientId# [3:7509888542759754266:2561], serverId# [3:7509888542759754267:2562], sessionId# [0:0:0] 2025-05-29T15:23:03.823599Z node 3 :TX_DATASHARD INFO: datashard.cpp:257: OnTabletDead: 72075186224037890 2025-05-29T15:23:03.823613Z node 3 :TX_DATASHARD INFO: datashard.cpp:1301: Change sender killed: at tablet: 72075186224037890 2025-05-29T15:23:03.823986Z node 3 :TX_DATASHARD INFO: datashard.cpp:257: OnTabletDead: 72075186224037891 2025-05-29T15:23:03.824003Z node 3 :TX_DATASHARD INFO: datashard.cpp:1301: Change sender killed: at tablet: 72075186224037891 |62.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/mind/bscontroller/ut/unittest >> TGroupMapperTest::Block42_1disk [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/client/ut/unittest >> TFlatTest::CopyTableAndDropOriginal [GOOD] Test command err: 2025-05-29T15:23:02.995182Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509888537791973113:2065];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:23:02.995204Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/00282b/r3tmp/tmpgCrhOQ/pdisk_1.dat 2025-05-29T15:23:03.044380Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:23:03.044573Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [1:7509888537791973089:2079] 1748532182995015 != 1748532182995018 TClient is connected to server localhost:25516 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-05-29T15:23:03.097424Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:23:03.097456Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:23:03.098505Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-29T15:23:03.123866Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-05-29T15:23:03.163322Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:23:03.231485Z node 1 :OPS_COMPACT INFO: Compact{72075186224037888.1.11, eph 1} end=Done, 4 blobs 3r (max 3), put Spent{time=0.000s,wait=0.000s,interrupts=1} Part{ 2 pk, lobs 2 +0, (1265 647 2154)b }, ecr=1.000 2025-05-29T15:23:03.231900Z node 1 :OPS_COMPACT INFO: Compact{72075186224037889.1.11, eph 1} end=Done, 4 blobs 3r (max 3), put Spent{time=0.000s,wait=0.000s,interrupts=1} Part{ 2 pk, lobs 2 +0, (1139 521 2626)b }, ecr=1.000 2025-05-29T15:23:03.235788Z node 1 :OPS_COMPACT INFO: Compact{72075186224037888.1.16, eph 2} end=Done, 4 blobs 6r (max 6), put Spent{time=0.000s,wait=0.000s,interrupts=1} Part{ 2 pk, lobs 5 +0, (1573 647 6413)b }, ecr=1.000 2025-05-29T15:23:03.236528Z node 1 :OPS_COMPACT INFO: Compact{72075186224037889.1.16, eph 2} end=Done, 4 blobs 6r (max 6), put Spent{time=0.000s,wait=0.000s,interrupts=1} Part{ 2 pk, lobs 4 +0, (2326 1432 5183)b }, ecr=1.000 Copy TableOld to Table 2025-05-29T15:23:03.253307Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:372: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/dc-1/Dir" OperationType: ESchemeOpCreateTable CreateTable { Name: "Table" CopyFromTable: "/dc-1/Dir/TableOld" } } TxId: 281474976715676 TabletId: 72057594046644480 PeerName: "" , at schemeshard: 72057594046644480 2025-05-29T15:23:03.253385Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_copy_table.cpp:383: TCopyTable Propose, path: /dc-1/Dir/Table, opId: 281474976715676:0, at schemeshard: 72057594046644480 2025-05-29T15:23:03.253504Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:319: AttachChild: child attached as only one child to the parent, parent id: [OwnerId: 72057594046644480, LocalPathId: 2], parent name: Dir, child name: Table, child id: [OwnerId: 72057594046644480, LocalPathId: 4], at schemeshard: 72057594046644480 2025-05-29T15:23:03.253521Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046644480, LocalPathId: 4] was 0 2025-05-29T15:23:03.253523Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason transaction source path for pathId [OwnerId: 72057594046644480, LocalPathId: 3] was 3 2025-05-29T15:23:03.253530Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason new shard created for pathId [OwnerId: 72057594046644480, LocalPathId: 4] was 1 2025-05-29T15:23:03.253545Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason new shard created for pathId [OwnerId: 72057594046644480, LocalPathId: 4] was 2 2025-05-29T15:23:03.253576Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046644480, LocalPathId: 4] was 3 2025-05-29T15:23:03.253602Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:130: IgniteOperation, opId: 281474976715676:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-05-29T15:23:03.253810Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046644480, LocalPathId: 2] was 2 2025-05-29T15:23:03.253823Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046644480, LocalPathId: 4] was 4 2025-05-29T15:23:03.253966Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:451: TTxOperationPropose Complete, txId: 281474976715676, response: Status: StatusAccepted TxId: 281474976715676 SchemeshardId: 72057594046644480 PathId: 4, at schemeshard: 72057594046644480 2025-05-29T15:23:03.253994Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 281474976715676, database: /dc-1, subject: , status: StatusAccepted, operation: CREATE TABLE, path: /dc-1/Dir/Table 2025-05-29T15:23:03.254032Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2025-05-29T15:23:03.254039Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046644480, txId: 281474976715676, path id: [OwnerId: 72057594046644480, LocalPathId: 2] 2025-05-29T15:23:03.254069Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046644480, txId: 281474976715676, path id: [OwnerId: 72057594046644480, LocalPathId: 4] 2025-05-29T15:23:03.254086Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2025-05-29T15:23:03.254092Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:7509888542086940913:2239], at schemeshard: 72057594046644480, txId: 281474976715676, path id: 2 2025-05-29T15:23:03.254096Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:7509888542086940913:2239], at schemeshard: 72057594046644480, txId: 281474976715676, path id: 4 2025-05-29T15:23:03.254102Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 281474976715676:0, at schemeshard: 72057594046644480 2025-05-29T15:23:03.254114Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:313: TCreateParts opId# 281474976715676:0 ProgressState, operation type: TxCopyTable, at tablet# 72057594046644480 2025-05-29T15:23:03.254189Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:357: TCreateParts opId# 281474976715676:0 CreateRequest Event to Hive: 72057594037968897 msg: Owner: 72057594046644480 OwnerIdx: 3 TabletType: DataShard FollowerCount: 0 ObjectDomain { SchemeShard: 72057594046644480 PathId: 1 } ObjectId: 4 BindedChannels { StoragePoolName: "/dc-1:test" } BindedChannels { StoragePoolName: "/dc-1:test" } BindedChannels { StoragePoolName: "/dc-1:test" } AllowedDomains { SchemeShard: 72057594046644480 PathId: 1 } 2025-05-29T15:23:03.254211Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:357: TCreateParts opId# 281474976715676:0 CreateRequest Event to Hive: 72057594037968897 msg: Owner: 72057594046644480 OwnerIdx: 4 TabletType: DataShard FollowerCount: 0 ObjectDomain { SchemeShard: 72057594046644480 PathId: 1 } ObjectId: 4 BindedChannels { StoragePoolName: "/dc-1:test" } BindedChannels { StoragePoolName: "/dc-1:test" } BindedChannels { StoragePoolName: "/dc-1:test" } AllowedDomains { SchemeShard: 72057594046644480 PathId: 1 } waiting... 2025-05-29T15:23:03.254588Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:5834: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 2 Version: 6 PathOwnerId: 72057594046644480, cookie: 281474976715676 2025-05-29T15:23:03.254613Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 2 Version: 6 PathOwnerId: 72057594046644480, cookie: 281474976715676 2025-05-29T15:23:03.254615Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976715676 2025-05-29T15:23:03.254619Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715676, pathId: [OwnerId: 72057594046644480, LocalPathId: 2], version: 6 2025-05-29T15:23:03.254623Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046644480, LocalPathId: 2] was 3 2025-05-29T15:23:03.254677Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:5834: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 4 Version: 1 PathOwnerId: 72057594046644480, cookie: 281474976715676 2025-05-29T15:23:03.254684Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 4 Version: 1 PathOwnerId: 72057594046644480, cookie: 281474976715676 2025-05-29T15:23:03.254685Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976715676 2025-05-29T15:23:03.254687Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046644480, txId: 281474976 ... DATASHARD INFO: datashard_impl.h:3306: 72075186224037890 Reporting state Offline to schemeshard 72057594046644480 2025-05-29T15:23:03.638416Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5549: Handle TEvStateChanged, at schemeshard: 72057594046644480, message: Source { RawX1: 7509888542817843090 RawX2: 4503608217307386 } TabletId: 72075186224037889 State: 4 2025-05-29T15:23:03.638434Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__state_changed_reply.cpp:78: TTxShardStateChanged DoExecute, datashard informs about state changing, datashardId: 72075186224037889, state: Offline, at schemeshard: 72057594046644480 2025-05-29T15:23:03.638494Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5549: Handle TEvStateChanged, at schemeshard: 72057594046644480, message: Source { RawX1: 7509888542817843399 RawX2: 4503608217307446 } TabletId: 72075186224037891 State: 4 2025-05-29T15:23:03.638501Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__state_changed_reply.cpp:78: TTxShardStateChanged DoExecute, datashard informs about state changing, datashardId: 72075186224037891, state: Offline, at schemeshard: 72057594046644480 2025-05-29T15:23:03.638519Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5549: Handle TEvStateChanged, at schemeshard: 72057594046644480, message: Source { RawX1: 7509888542817843098 RawX2: 4503608217307387 } TabletId: 72075186224037888 State: 4 2025-05-29T15:23:03.638528Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__state_changed_reply.cpp:78: TTxShardStateChanged DoExecute, datashard informs about state changing, datashardId: 72075186224037888, state: Offline, at schemeshard: 72057594046644480 2025-05-29T15:23:03.638544Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5549: Handle TEvStateChanged, at schemeshard: 72057594046644480, message: Source { RawX1: 7509888542817843397 RawX2: 4503608217307445 } TabletId: 72075186224037890 State: 4 2025-05-29T15:23:03.638547Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__state_changed_reply.cpp:78: TTxShardStateChanged DoExecute, datashard informs about state changing, datashardId: 72075186224037890, state: Offline, at schemeshard: 72057594046644480 2025-05-29T15:23:03.638561Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5549: Handle TEvStateChanged, at schemeshard: 72057594046644480, message: Source { RawX1: 7509888542817843397 RawX2: 4503608217307445 } TabletId: 72075186224037890 State: 4 2025-05-29T15:23:03.638565Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__state_changed_reply.cpp:78: TTxShardStateChanged DoExecute, datashard informs about state changing, datashardId: 72075186224037890, state: Offline, at schemeshard: 72057594046644480 2025-05-29T15:23:03.638601Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:46: Free shard 72057594046644480:2 hive 72057594037968897 at ss 72057594046644480 2025-05-29T15:23:03.638633Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:46: Free shard 72057594046644480:4 hive 72057594037968897 at ss 72057594046644480 2025-05-29T15:23:03.638642Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:46: Free shard 72057594046644480:1 hive 72057594037968897 at ss 72057594046644480 2025-05-29T15:23:03.638649Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:46: Free shard 72057594046644480:3 hive 72057594037968897 at ss 72057594046644480 2025-05-29T15:23:03.638656Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:46: Free shard 72057594046644480:3 hive 72057594037968897 at ss 72057594046644480 2025-05-29T15:23:03.638775Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:2962: Handle TEvStateChangedResult datashard 72075186224037889 state Offline 2025-05-29T15:23:03.638786Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:2962: Handle TEvStateChangedResult datashard 72075186224037891 state Offline 2025-05-29T15:23:03.638789Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:2962: Handle TEvStateChangedResult datashard 72075186224037888 state Offline 2025-05-29T15:23:03.638792Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:2962: Handle TEvStateChangedResult datashard 72075186224037890 state Offline 2025-05-29T15:23:03.638794Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:2962: Handle TEvStateChangedResult datashard 72075186224037890 state Offline 2025-05-29T15:23:03.639510Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5938: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 2 ShardOwnerId: 72057594046644480 ShardLocalIdx: 2, at schemeshard: 72057594046644480 2025-05-29T15:23:03.639580Z node 2 :TX_DATASHARD INFO: datashard.cpp:197: OnTabletStop: 72075186224037891 reason = ReasonStop 2025-05-29T15:23:03.639581Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046644480, LocalPathId: 3] was 2 2025-05-29T15:23:03.639592Z node 2 :TX_DATASHARD INFO: datashard.cpp:197: OnTabletStop: 72075186224037888 reason = ReasonStop 2025-05-29T15:23:03.639596Z node 2 :TX_DATASHARD INFO: datashard.cpp:197: OnTabletStop: 72075186224037890 reason = ReasonStop 2025-05-29T15:23:03.639630Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5938: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 4 ShardOwnerId: 72057594046644480 ShardLocalIdx: 4, at schemeshard: 72057594046644480 2025-05-29T15:23:03.639660Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046644480, LocalPathId: 4] was 2 2025-05-29T15:23:03.639682Z node 2 :TX_DATASHARD INFO: datashard.cpp:197: OnTabletStop: 72075186224037889 reason = ReasonStop 2025-05-29T15:23:03.639683Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5938: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 1 ShardOwnerId: 72057594046644480 ShardLocalIdx: 1, at schemeshard: 72057594046644480 2025-05-29T15:23:03.639702Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046644480, LocalPathId: 3] was 1 2025-05-29T15:23:03.639724Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5938: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 3 ShardOwnerId: 72057594046644480 ShardLocalIdx: 3, at schemeshard: 72057594046644480 2025-05-29T15:23:03.639745Z node 2 :TX_DATASHARD INFO: datashard.cpp:257: OnTabletDead: 72075186224037891 2025-05-29T15:23:03.639759Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046644480, LocalPathId: 4] was 1 2025-05-29T15:23:03.639780Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5938: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 3 ShardOwnerId: 72057594046644480 ShardLocalIdx: 3, at schemeshard: 72057594046644480 2025-05-29T15:23:03.639796Z node 2 :TX_DATASHARD INFO: datashard.cpp:1301: Change sender killed: at tablet: 72075186224037891 2025-05-29T15:23:03.639800Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:123: TTxCleanDroppedPaths Execute, 2 paths in candidate queue, at schemeshard: 72057594046644480 2025-05-29T15:23:03.639803Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:137: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046644480, LocalPathId: 4], at schemeshard: 72057594046644480 2025-05-29T15:23:03.639814Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046644480, LocalPathId: 2] was 2 2025-05-29T15:23:03.639824Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:137: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046644480, LocalPathId: 3], at schemeshard: 72057594046644480 2025-05-29T15:23:03.639827Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046644480, LocalPathId: 2] was 1 2025-05-29T15:23:03.640003Z node 2 :HIVE WARN: hive_impl.cpp:491: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037891 not found 2025-05-29T15:23:03.640012Z node 2 :HIVE WARN: hive_impl.cpp:491: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037888 not found 2025-05-29T15:23:03.640013Z node 2 :HIVE WARN: hive_impl.cpp:491: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037890 not found 2025-05-29T15:23:03.640015Z node 2 :HIVE WARN: hive_impl.cpp:491: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037889 not found 2025-05-29T15:23:03.640029Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:174: Deleted shardIdx 72057594046644480:2 2025-05-29T15:23:03.640032Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:180: Close pipe to deleted shardIdx 72057594046644480:2 tabletId 72075186224037889 2025-05-29T15:23:03.640040Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:174: Deleted shardIdx 72057594046644480:4 2025-05-29T15:23:03.640041Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:180: Close pipe to deleted shardIdx 72057594046644480:4 tabletId 72075186224037891 2025-05-29T15:23:03.640044Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:174: Deleted shardIdx 72057594046644480:1 2025-05-29T15:23:03.640045Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:180: Close pipe to deleted shardIdx 72057594046644480:1 tabletId 72075186224037888 2025-05-29T15:23:03.640048Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:174: Deleted shardIdx 72057594046644480:3 2025-05-29T15:23:03.640050Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:180: Close pipe to deleted shardIdx 72057594046644480:3 tabletId 72075186224037890 2025-05-29T15:23:03.640052Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:174: Deleted shardIdx 72057594046644480:3 2025-05-29T15:23:03.640058Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:160: TTxCleanDroppedPaths Complete, done PersistRemovePath for 2 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046644480 2025-05-29T15:23:03.640216Z node 2 :TX_DATASHARD INFO: datashard.cpp:257: OnTabletDead: 72075186224037888 2025-05-29T15:23:03.640234Z node 2 :TX_DATASHARD INFO: datashard.cpp:1301: Change sender killed: at tablet: 72075186224037888 2025-05-29T15:23:03.640551Z node 2 :TX_DATASHARD INFO: datashard.cpp:257: OnTabletDead: 72075186224037890 2025-05-29T15:23:03.640563Z node 2 :TX_DATASHARD INFO: datashard.cpp:1301: Change sender killed: at tablet: 72075186224037890 2025-05-29T15:23:03.640810Z node 2 :TX_DATASHARD INFO: datashard.cpp:257: OnTabletDead: 72075186224037889 2025-05-29T15:23:03.640823Z node 2 :TX_DATASHARD INFO: datashard.cpp:1301: Change sender killed: at tablet: 72075186224037889 Check that tablet 72075186224037889 was deleted 2025-05-29T15:23:03.938238Z node 2 :HIVE WARN: hive_impl.cpp:1945: HIVE#72057594037968897 Can't find the tablet from RequestHiveInfo(TabletID=72075186224037888) 2025-05-29T15:23:03.938337Z node 2 :HIVE WARN: hive_impl.cpp:1945: HIVE#72057594037968897 Can't find the tablet from RequestHiveInfo(TabletID=72075186224037889) Check that tablet 72075186224037890 was deleted 2025-05-29T15:23:03.938435Z node 2 :HIVE WARN: hive_impl.cpp:1945: HIVE#72057594037968897 Can't find the tablet from RequestHiveInfo(TabletID=72075186224037890) Check that tablet 72075186224037891 was deleted 2025-05-29T15:23:03.938482Z node 2 :HIVE WARN: hive_impl.cpp:1945: HIVE#72057594037968897 Can't find the tablet from RequestHiveInfo(TabletID=72075186224037891) >> TLocksTest::CK_Range_BrokenLock [GOOD] >> TLocksTest::CK_Range_BrokenLockInf >> TFlatTest::ReadOnlyMode >> TLocksTest::GoodDupLock [GOOD] >> TLocksTest::CK_Range_GoodLock ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/client/ut/unittest >> TLocksTest::SetEraseSet [GOOD] Test command err: 2025-05-29T15:23:03.019441Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509888544100628332:2063];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:23:03.019464Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/00281c/r3tmp/tmpo1vuVm/pdisk_1.dat 2025-05-29T15:23:03.067249Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [1:7509888544100628310:2079] 1748532183019259 != 1748532183019262 2025-05-29T15:23:03.068846Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded TClient is connected to server localhost:19081 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-05-29T15:23:03.121649Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:23:03.121678Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:23:03.122793Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-29T15:23:03.146205Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-05-29T15:23:03.163945Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:23:03.228153Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:23:03.236719Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:23:03.246263Z node 1 :TX_DATASHARD ERROR: datashard_pipeline.cpp:1570: Shard 72075186224037888 cannot parse tx 281474976715662: Validate (783): Key validation status: 3 2025-05-29T15:23:03.246301Z node 1 :TX_PROXY ERROR: datareq.cpp:1873: Actor# [1:7509888544100629209:2493] txid# 281474976715662 HANDLE Prepare TEvProposeTransactionResult TDataReq TabletStatus# StatusWait GetStatus# ERROR shard id 72075186224037888 read size 0 out readset size 0 marker# P6 2025-05-29T15:23:03.246330Z node 1 :TX_PROXY ERROR: datareq.cpp:2071: Actor# [1:7509888544100629209:2493] txid# 281474976715662 HANDLE PrepareErrors TEvProposeTransactionResult TDataReq TabletStatus# StatusWait shard id 72075186224037888 2025-05-29T15:23:03.246335Z node 1 :TX_PROXY ERROR: datareq.cpp:1274: Actor# [1:7509888544100629209:2493] txid# 281474976715662 invalidateDistCache: 1 DIE TDataReq MarkShardError TabletsLeft# 1 2025-05-29T15:23:03.246770Z node 1 :TX_DATASHARD ERROR: datashard_pipeline.cpp:1570: Shard 72075186224037888 cannot parse tx 281474976715663: Validate (783): Key validation status: 3 2025-05-29T15:23:03.246792Z node 1 :TX_PROXY ERROR: datareq.cpp:1873: Actor# [1:7509888544100629231:2500] txid# 281474976715663 HANDLE Prepare TEvProposeTransactionResult TDataReq TabletStatus# StatusWait GetStatus# ERROR shard id 72075186224037888 read size 0 out readset size 0 marker# P6 2025-05-29T15:23:03.246804Z node 1 :TX_PROXY ERROR: datareq.cpp:2071: Actor# [1:7509888544100629231:2500] txid# 281474976715663 HANDLE PrepareErrors TEvProposeTransactionResult TDataReq TabletStatus# StatusWait shard id 72075186224037888 2025-05-29T15:23:03.246815Z node 1 :TX_PROXY ERROR: datareq.cpp:1274: Actor# [1:7509888544100629231:2500] txid# 281474976715663 invalidateDistCache: 1 DIE TDataReq MarkShardError TabletsLeft# 1 DataShardErrors: [SCHEME_ERROR] Validate (783): Key validation status: 3 proxy error code: ProxyShardNotAvailable 2025-05-29T15:23:03.247241Z node 1 :TX_DATASHARD ERROR: datashard_pipeline.cpp:1570: Shard 72075186224037888 cannot parse tx 281474976715664: Validate (783): Key validation status: 3 2025-05-29T15:23:03.247262Z node 1 :TX_PROXY ERROR: datareq.cpp:1873: Actor# [1:7509888544100629238:2504] txid# 281474976715664 HANDLE Prepare TEvProposeTransactionResult TDataReq TabletStatus# StatusWait GetStatus# ERROR shard id 72075186224037888 read size 0 out readset size 0 marker# P6 2025-05-29T15:23:03.247271Z node 1 :TX_PROXY ERROR: datareq.cpp:2071: Actor# [1:7509888544100629238:2504] txid# 281474976715664 HANDLE PrepareErrors TEvProposeTransactionResult TDataReq TabletStatus# StatusWait shard id 72075186224037888 2025-05-29T15:23:03.247274Z node 1 :TX_PROXY ERROR: datareq.cpp:1274: Actor# [1:7509888544100629238:2504] txid# 281474976715664 invalidateDistCache: 1 DIE TDataReq MarkShardError TabletsLeft# 1 2025-05-29T15:23:03.247650Z node 1 :TX_DATASHARD ERROR: datashard_pipeline.cpp:1570: Shard 72075186224037888 cannot parse tx 281474976715665: Validate (783): Key validation status: 3 2025-05-29T15:23:03.247674Z node 1 :TX_PROXY ERROR: datareq.cpp:1873: Actor# [1:7509888544100629244:2507] txid# 281474976715665 HANDLE Prepare TEvProposeTransactionResult TDataReq TabletStatus# StatusWait GetStatus# ERROR shard id 72075186224037888 read size 0 out readset size 0 marker# P6 2025-05-29T15:23:03.247685Z node 1 :TX_PROXY ERROR: datareq.cpp:2071: Actor# [1:7509888544100629244:2507] txid# 281474976715665 HANDLE PrepareErrors TEvProposeTransactionResult TDataReq TabletStatus# StatusWait shard id 72075186224037888 2025-05-29T15:23:03.247688Z node 1 :TX_PROXY ERROR: datareq.cpp:1274: Actor# [1:7509888544100629244:2507] txid# 281474976715665 invalidateDistCache: 1 DIE TDataReq MarkShardError TabletsLeft# 1 DataShardErrors: [SCHEME_ERROR] Validate (783): Key validation status: 3 proxy error code: ProxyShardNotAvailable 2025-05-29T15:23:03.494033Z node 2 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7509888544955184379:2062];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:23:03.494052Z node 2 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/00281c/r3tmp/tmpSzA1ZX/pdisk_1.dat 2025-05-29T15:23:03.510890Z node 2 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:23:03.511163Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [2:7509888544955184359:2079] 1748532183493978 != 1748532183493981 TClient is connected to server localhost:9634 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-05-29T15:23:03.596755Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:23:03.596782Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:23:03.597530Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-05-29T15:23:03.597830Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected waiting... waiting... 2025-05-29T15:23:03.601500Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:23:03.617645Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:23:03.633091Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:23:03.978211Z node 3 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7509888543937722963:2065];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:23:03.978227Z node 3 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/00281c/r3tmp/tmpYNmrjs/pdisk_1.dat 2025-05-29T15:23:03.992352Z node 3 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:23:03.992559Z node 3 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [3:7509888543937722938:2079] 1748532183978040 != 1748532183978043 TClient is connected to server localhost:22914 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-05-29T15:23:04.082225Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:23:04.082260Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:23:04.082535Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:23:04.083291Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-29T15:23:04.083968Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:23:04.091915Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:23:04.106892Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:23:04.120867Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... |62.7%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/client/server/ut/ydb-core-client-server-ut >> TLocksTest::Range_Pinhole [GOOD] >> TFlatTest::SelectRangeNullArgs4 [GOOD] >> TLocksTest::SetBreakSetEraseBreak >> TFlatTest::WriteSplitWriteSplit [GOOD] >> TLocksTest::Range_IncorrectDot1 [GOOD] >> TLocksTest::Range_IncorrectDot2 >> TSchemeShardSubDomainTest::CreateWithoutTimeCastBuckets >> TLocksTest::Range_BrokenLock0 [GOOD] >> TLocksTest::Range_BrokenLock1 |62.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/client/server/ut/ydb-core-client-server-ut |62.8%| [LD] {RESULT} $(B)/ydb/core/client/server/ut/ydb-core-client-server-ut |62.8%| [TA] $(B)/ydb/core/tx/schemeshard/ut_column_build/test-results/unittest/{meta.json ... results_accumulator.log} >> TSchemeShardSubDomainTest::CreateWithoutPlanResolution >> TLocksFatTest::LocksLimit [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/client/ut/unittest >> TFlatTest::ShardFreezeUnfreeze [GOOD] Test command err: 2025-05-29T15:23:03.864854Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509888543647932495:2063];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:23:03.864875Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/002811/r3tmp/tmpWcEmp1/pdisk_1.dat 2025-05-29T15:23:03.910930Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [1:7509888543647932473:2079] 1748532183864675 != 1748532183864678 2025-05-29T15:23:03.913062Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded TClient is connected to server localhost:9883 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-05-29T15:23:03.966559Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:23:03.966588Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:23:03.967666Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected waiting... 2025-05-29T15:23:03.990825Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-05-29T15:23:03.994232Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:23:04.015041Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:23:04.023374Z node 1 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [1:7509888547942900508:2388] txid# 281474976715660, issues: { message: "Requested freeze state already set" severity: 1 } Error 1: Requested freeze state already set 2025-05-29T15:23:04.023876Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:23:04.029743Z node 1 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [1:7509888547942900548:2422] txid# 281474976715662, issues: { message: "Requested freeze state already set" severity: 1 } Error 1: Requested freeze state already set 2025-05-29T15:23:04.275623Z node 2 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7509888546407627191:2062];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:23:04.275645Z node 2 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/002811/r3tmp/tmpw3WxNm/pdisk_1.dat 2025-05-29T15:23:04.290819Z node 2 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:23:04.291079Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [2:7509888546407627171:2079] 1748532184275501 != 1748532184275504 TClient is connected to server localhost:26171 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-05-29T15:23:04.379659Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:23:04.379700Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:23:04.379989Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:23:04.380856Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-29T15:23:04.381728Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:23:04.393924Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:23:04.403705Z node 2 :TX_DATASHARD ERROR: datashard_pipeline.cpp:1570: Shard 72075186224037888 cannot parse tx 281474976715660: 2025-05-29T15:23:04.403747Z node 2 :TX_PROXY ERROR: datareq.cpp:1873: Actor# [2:7509888546407627919:2390] txid# 281474976715660 HANDLE Prepare TEvProposeTransactionResult TDataReq TabletStatus# StatusWait GetStatus# ERROR shard id 72075186224037888 read size 0 out readset size 0 marker# P6 2025-05-29T15:23:04.403769Z node 2 :TX_PROXY ERROR: datareq.cpp:2071: Actor# [2:7509888546407627919:2390] txid# 281474976715660 HANDLE PrepareErrors TEvProposeTransactionResult TDataReq TabletStatus# StatusWait shard id 72075186224037888 2025-05-29T15:23:04.403780Z node 2 :TX_PROXY ERROR: datareq.cpp:1274: Actor# [2:7509888546407627919:2390] txid# 281474976715660 invalidateDistCache: 0 DIE TDataReq MarkShardError TabletsLeft# 1 2025-05-29T15:23:04.404261Z node 2 :TX_DATASHARD ERROR: datashard_pipeline.cpp:1570: Shard 72075186224037888 cannot parse tx 281474976715661: 2025-05-29T15:23:04.404277Z node 2 :TX_PROXY ERROR: datareq.cpp:1873: Actor# [2:7509888546407627927:2395] txid# 281474976715661 HANDLE Prepare TEvProposeTransactionResult TDataReq TabletStatus# StatusWait GetStatus# ERROR shard id 72075186224037888 read size 0 out readset size 0 marker# P6 2025-05-29T15:23:04.404288Z node 2 :TX_PROXY ERROR: datareq.cpp:2071: Actor# [2:7509888546407627927:2395] txid# 281474976715661 HANDLE PrepareErrors TEvProposeTransactionResult TDataReq TabletStatus# StatusWait shard id 72075186224037888 2025-05-29T15:23:04.404293Z node 2 :TX_PROXY ERROR: datareq.cpp:1274: Actor# [2:7509888546407627927:2395] txid# 281474976715661 invalidateDistCache: 0 DIE TDataReq MarkShardError TabletsLeft# 1 2025-05-29T15:23:04.405810Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 waiting... >> TLocksTest::NoLocksSet [GOOD] >> TSchemeShardSubDomainTest::CreateWithoutPlanResolution [GOOD] >> TSchemeShardSubDomainTest::CreateWithoutTimeCastBuckets [GOOD] >> TStoragePoolsQuotasTest::DisableWritesToDatabase-IsExternalSubdomain-true >> TSchemeShardSubDomainTest::SchemeLimitsRejects >> TFlatTest::ReadOnlyMode [GOOD] >> TStoragePoolsQuotasTest::DisableWritesToDatabase-IsExternalSubdomain-false >> TSchemeShardSubDomainTest::Restart >> TLocksTest::UpdateLockedKey [GOOD] >> TPQTest::TestWriteOffsetWithBigMessage [GOOD] >> TSchemeShardSubDomainTest::SimultaneousDefine >> TLocksTest::SetBreakSetEraseBreak [GOOD] >> TSchemeShardSubDomainTest::CreateSubDomainWithoutTablets >> TLocksTest::MultipleLocks >> TFlatTest::RejectByIncomingReadSetSize >> TLocksTest::SetLockNothing >> TSchemeShardSubDomainTest::SimultaneousDefine [GOOD] >> TSchemeShardSubDomainTest::Restart [GOOD] >> TLocksTest::MultipleLocks [GOOD] |62.8%| [TA] $(B)/ydb/core/tx/schemeshard/ut_topic_splitmerge/test-results/unittest/{meta.json ... results_accumulator.log} |62.8%| [TA] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_column_build/test-results/unittest/{meta.json ... results_accumulator.log} >> TSchemeShardSubDomainTest::DiskSpaceUsage >> TFlatTest::LargeProxyReplyRW [GOOD] >> TSchemeShardSubDomainTest::CreateSubDomainWithoutTablets [GOOD] >> TLocksTest::BrokenLockUpdate [GOOD] >> Secret::Deactivated [GOOD] >> TPQTest::TestOwnership [GOOD] >> TPQTest::TestPQCacheSizeManagement >> TLocksTest::GoodSameKeyLock [GOOD] >> TLocksTest::SetLockNothing [GOOD] >> TLocksTest::CK_Range_BrokenLockInf [GOOD] >> TLocksTest::BrokenNullLock >> TSchemeShardSubDomainTest::SchemeLimitsRejects [GOOD] >> TLocksTest::GoodSameShardLock ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::SimultaneousDefine [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:116:2058] recipient: [1:111:2141] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:116:2058] recipient: [1:111:2141] Leader for TabletID 72057594046678944 is [1:128:2152] sender: [1:132:2058] recipient: [1:111:2141] 2025-05-29T15:23:05.981186Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7488: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-05-29T15:23:05.981208Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7516: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:23:05.981212Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7402: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-05-29T15:23:05.981216Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7418: OperationsProcessing config: using default configuration 2025-05-29T15:23:05.981225Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-05-29T15:23:05.981228Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-05-29T15:23:05.981234Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7548: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:23:05.981249Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:39: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-05-29T15:23:05.981344Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7619: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-05-29T15:23:05.981396Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-05-29T15:23:05.992213Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7309: Cannot subscribe to console configs 2025-05-29T15:23:05.992232Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:23:05.994443Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-05-29T15:23:05.994566Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-05-29T15:23:05.994616Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-05-29T15:23:05.995972Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-05-29T15:23:05.996123Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:445: Clear TempDirsState with owners number: 0 2025-05-29T15:23:05.996226Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1348: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-05-29T15:23:05.996293Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:32: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-05-29T15:23:05.996712Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:157: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:23:05.996744Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:84: [RootDataErasureManager] Stop 2025-05-29T15:23:05.996977Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:23:05.996985Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:23:05.997002Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-05-29T15:23:05.997007Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-29T15:23:05.997011Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:202: TTxServerlessStorageBilling.Complete 2025-05-29T15:23:05.997037Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6671: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-05-29T15:23:05.998201Z node 1 :HIVE INFO: tablet_helpers.cpp:1130: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:128:2152] sender: [1:242:2058] recipient: [1:15:2062] 2025-05-29T15:23:06.011428Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:372: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-05-29T15:23:06.011475Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:23:06.011519Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-05-29T15:23:06.011552Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:130: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-05-29T15:23:06.011559Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:23:06.012106Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:451: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-05-29T15:23:06.012130Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-05-29T15:23:06.012164Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:23:06.012173Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:313: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-05-29T15:23:06.012179Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:367: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-05-29T15:23:06.012184Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 2 -> 3 2025-05-29T15:23:06.012539Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:23:06.012549Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-05-29T15:23:06.012552Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 3 -> 128 2025-05-29T15:23:06.012873Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:23:06.012885Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:23:06.012889Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:23:06.012893Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1650: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-05-29T15:23:06.013329Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1719: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-05-29T15:23:06.013640Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:652: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-05-29T15:23:06.013667Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1751: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-05-29T15:23:06.013781Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:676: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-05-29T15:23:06.013799Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:680: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 124 RawX2: 4294969445 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-05-29T15:23:06.013804Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:23:06.013840Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 128 -> 240 2025-05-29T15:23:06.013844Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:23:06.013867Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-05-29T15:23:06.013874Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:402: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-05-29T15:23:06.014217Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:23:06.014223Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-29T15:23:06.014251Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29 ... :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1764: TOperation FindRelatedPartByTabletId, TxId: 101, tablet: 72075186233409548, partId: 0 2025-05-29T15:23:06.033706Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:619: TTxOperationReply execute, operationId: 101:0, at schemeshard: 72057594046678944, message: Status: SUCCESS OnTabletId: 72075186233409548 2025-05-29T15:23:06.033711Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:84: NSubDomainState::TConfigureParts operationId# 101:0 HandleReply TEvConfigureStatus operationId:101:0 at schemeshard:72057594046678944 2025-05-29T15:23:06.033715Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:120: NSubDomainState::TConfigureParts operationId# 101:0 Got OK TEvConfigureStatus from tablet# 72075186233409548 shardIdx# 72057594046678944:3 at schemeshard# 72057594046678944 2025-05-29T15:23:06.033722Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 101:0 3 -> 128 2025-05-29T15:23:06.034318Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:647: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2025-05-29T15:23:06.034476Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:647: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2025-05-29T15:23:06.034506Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 101:0, at schemeshard: 72057594046678944 2025-05-29T15:23:06.034513Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 101:0, at schemeshard: 72057594046678944 2025-05-29T15:23:06.034531Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 101:0, at tablet# 72057594046678944 2025-05-29T15:23:06.034540Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1650: TOperation IsReadyToPropose , TxId: 101 ready parts: 1/1 2025-05-29T15:23:06.034574Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1719: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 101 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-05-29T15:23:06.035004Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:652: Send tablet strongly msg operationId: 101:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:101 msg type: 269090816 2025-05-29T15:23:06.035029Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1751: TOperation RegisterRelationByTabletId, TxId: 101, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 101 at step: 5000003 FAKE_COORDINATOR: advance: minStep5000003 State->FrontStep: 5000002 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 101 at step: 5000003 2025-05-29T15:23:06.035087Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:676: TTxOperationPlanStep Execute, stepId: 5000003, transactions count in step: 1, at schemeshard: 72057594046678944 2025-05-29T15:23:06.035103Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:680: TTxOperationPlanStep Execute, message: Transactions { TxId: 101 Coordinator: 72057594046316545 AckTo { RawX1: 124 RawX2: 4294969445 } } Step: 5000003 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-05-29T15:23:06.035108Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 101:0, at tablet# 72057594046678944 2025-05-29T15:23:06.035168Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 101:0 128 -> 240 2025-05-29T15:23:06.035177Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 101:0, at tablet# 72057594046678944 2025-05-29T15:23:06.035207Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 5 2025-05-29T15:23:06.035222Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:402: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 101 2025-05-29T15:23:06.035654Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:23:06.035661Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 101, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2025-05-29T15:23:06.035695Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:23:06.035699Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:209:2210], at schemeshard: 72057594046678944, txId: 101, path id: 2 2025-05-29T15:23:06.035756Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 101:0, at schemeshard: 72057594046678944 2025-05-29T15:23:06.035762Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:482: [72057594046678944] TDone opId# 101:0 ProgressState 2025-05-29T15:23:06.035771Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:906: Part operation is done id#101:0 progress is 1/1 2025-05-29T15:23:06.035774Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2025-05-29T15:23:06.035777Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:906: Part operation is done id#101:0 progress is 1/1 2025-05-29T15:23:06.035779Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2025-05-29T15:23:06.035783Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1606: TOperation IsReadyToNotify, TxId: 101, ready parts: 1/1, is published: false 2025-05-29T15:23:06.035786Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2025-05-29T15:23:06.035790Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:973: Operation and all the parts is done, operation id: 101:0 2025-05-29T15:23:06.035793Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5174: RemoveTx for txid 101:0 2025-05-29T15:23:06.035821Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 6 2025-05-29T15:23:06.035826Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:982: Publication still in progress, tx: 101, publications: 1, subscribers: 1 2025-05-29T15:23:06.035828Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:989: Publication details: tx: 101, [OwnerId: 72057594046678944, LocalPathId: 2], 4 2025-05-29T15:23:06.035907Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:5834: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 4 PathOwnerId: 72057594046678944, cookie: 101 2025-05-29T15:23:06.035916Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 4 PathOwnerId: 72057594046678944, cookie: 101 2025-05-29T15:23:06.035919Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 101 2025-05-29T15:23:06.035923Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 4 2025-05-29T15:23:06.035929Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 5 2025-05-29T15:23:06.035937Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 101, subscribers: 1 2025-05-29T15:23:06.035940Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:212: TTxAckPublishToSchemeBoard Notify send TEvNotifyTxCompletionResult, at schemeshard: 72057594046678944, to actorId: [1:309:2299] 2025-05-29T15:23:06.036374Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2025-05-29T15:23:06.036390Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:227: tests -- TTxNotificationSubscriber for txId 101: got EvNotifyTxCompletionResult 2025-05-29T15:23:06.036395Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:236: tests -- TTxNotificationSubscriber for txId 101: satisfy waiter [1:316:2306] TestWaitNotification: OK eventTxId 100 TestWaitNotification: OK eventTxId 101 2025-05-29T15:23:06.036495Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-05-29T15:23:06.036526Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/USER_0" took 35us result status StatusSuccess 2025-05-29T15:23:06.036594Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/USER_0" PathDescription { Self { Name: "USER_0" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 100 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 4 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 4 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 2 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 2 PlanResolution: 50 Coordinators: 72075186233409546 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409547 Mediators: 72075186233409548 } DomainKey { SchemeShard: 72057594046678944 PathId: 2 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 3 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 2 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { } } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::CreateWithoutTimeCastBuckets [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:116:2058] recipient: [1:111:2141] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:116:2058] recipient: [1:111:2141] Leader for TabletID 72057594046678944 is [1:128:2152] sender: [1:132:2058] recipient: [1:111:2141] 2025-05-29T15:23:05.235440Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7488: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-05-29T15:23:05.235469Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7516: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:23:05.235476Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7402: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-05-29T15:23:05.235481Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7418: OperationsProcessing config: using default configuration 2025-05-29T15:23:05.235496Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-05-29T15:23:05.235501Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-05-29T15:23:05.235507Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7548: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:23:05.235518Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:39: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-05-29T15:23:05.235616Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7619: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-05-29T15:23:05.235671Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-05-29T15:23:05.249652Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7309: Cannot subscribe to console configs 2025-05-29T15:23:05.249676Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:23:05.252370Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-05-29T15:23:05.252477Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-05-29T15:23:05.252532Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-05-29T15:23:05.254264Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-05-29T15:23:05.254438Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:445: Clear TempDirsState with owners number: 0 2025-05-29T15:23:05.254562Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1348: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-05-29T15:23:05.254639Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:32: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-05-29T15:23:05.255106Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:157: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:23:05.255142Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:84: [RootDataErasureManager] Stop 2025-05-29T15:23:05.255367Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:23:05.255375Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:23:05.255391Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-05-29T15:23:05.255396Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-29T15:23:05.255400Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:202: TTxServerlessStorageBilling.Complete 2025-05-29T15:23:05.255428Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6671: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-05-29T15:23:05.256562Z node 1 :HIVE INFO: tablet_helpers.cpp:1130: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:128:2152] sender: [1:242:2058] recipient: [1:15:2062] 2025-05-29T15:23:05.271212Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:372: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-05-29T15:23:05.271306Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:23:05.271398Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-05-29T15:23:05.271453Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:130: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-05-29T15:23:05.271464Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:23:05.272364Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:451: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-05-29T15:23:05.272390Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-05-29T15:23:05.272436Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:23:05.272444Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:313: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-05-29T15:23:05.272447Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:367: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-05-29T15:23:05.272451Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 2 -> 3 2025-05-29T15:23:05.272807Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:23:05.272815Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-05-29T15:23:05.272820Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 3 -> 128 2025-05-29T15:23:05.273071Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:23:05.273078Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:23:05.273082Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:23:05.273088Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1650: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-05-29T15:23:05.273494Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1719: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-05-29T15:23:05.273856Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:652: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-05-29T15:23:05.273901Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1751: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-05-29T15:23:05.274048Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:676: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-05-29T15:23:05.274068Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:680: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 124 RawX2: 4294969445 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-05-29T15:23:05.274074Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:23:05.274132Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 128 -> 240 2025-05-29T15:23:05.274140Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:23:05.274166Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-05-29T15:23:05.274175Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:402: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-05-29T15:23:05.274552Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:23:05.274558Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-29T15:23:05.274596Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:23:05.274600Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:209:2210], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-05-29T15:23:05.274661Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:23:05.274667Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:482: [72057594046678944] TDone opId# 1:0 ProgressState 2025-05-29T15:23:05.274676Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:906: Part operation is done id#1:0 progress is 1/1 2025-05-29T15:23:05.274679Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-05-29T15:23:05.274683Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:906: Part operation is done id#1:0 progress is 1/1 2025-05-29T15:23:05.274685Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-05-29T15:23:05.274688Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1606: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-05-29T15:23:05.274694Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-05-29T15:23:05.274697Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:973: Operation and all the parts is done, operation id: 1:0 2025-05-29T15:23:05.274700Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5174: RemoveTx for txid 1:0 2025-05-29T15:23:05.274708Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-05-29T15:23:05.274713Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:982: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-05-29T15:23:05.274715Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:989: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-05-29T15:23:05.274949Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:5834: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-05-29T15:23:05.274961Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-05-29T15:23:05.274964Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2025-05-29T15:23:05.274968Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2025-05-29T15:23:05.274971Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-05-29T15:23:05.274981Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 1, subscribers: 0 2025-05-29T15:23:05.275506Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1 2025-05-29T15:23:05.275586Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 1, at schemeshard: 72057594046678944 TestModificationResults wait txId: 100 2025-05-29T15:23:05.275720Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:434: actor# [1:272:2262] Bootstrap 2025-05-29T15:23:05.277006Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:453: actor# [1:272:2262] Become StateWork (SchemeCache [1:277:2267]) 2025-05-29T15:23:05.277518Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:372: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpCreateSubDomain SubDomain { PlanResolution: 50 Coordinators: 1 Mediators: 1 Name: "USER_0" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 100 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-05-29T15:23:05.277568Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_create_subdomain.cpp:92: TCreateSubDomain Propose, path: /MyRoot/USER_0, opId: 100:0, at schemeshard: 72057594046678944 2025-05-29T15:23:05.277579Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:130: IgniteOperation, opId: 100:1, propose status:StatusInvalidParameter, reason: Malformed subdomain request: TimeCastBucketsPerMediator is 0, at schemeshard: 72057594046678944 2025-05-29T15:23:05.277693Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:213: actor# [1:272:2262] HANDLE TEvClientConnected success connect from tablet# 72057594046447617 2025-05-29T15:23:05.278206Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:451: TTxOperationPropose Complete, txId: 100, response: Status: StatusInvalidParameter Reason: "Malformed subdomain request: TimeCastBucketsPerMediator is 0" TxId: 100 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2025-05-29T15:23:05.278229Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 100, database: /MyRoot, subject: , status: StatusInvalidParameter, reason: Malformed subdomain request: TimeCastBucketsPerMediator is 0, operation: CREATE DATABASE, path: /MyRoot/USER_0 2025-05-29T15:23:05.278293Z node 1 :TX_PROXY DEBUG: client.cpp:89: Handle TEvAllocateResult ACCEPTED RangeBegin# 281474976715656 RangeEnd# 281474976720656 txAllocator# 72057594046447617 TestModificationResult got TxId: 100, wait until txId: 100 TestWaitNotification wait txId: 100 2025-05-29T15:23:05.278332Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:210: tests -- TTxNotificationSubscriber for txId 100: send EvNotifyTxCompletion 2025-05-29T15:23:05.278345Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:256: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 100 2025-05-29T15:23:05.278380Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 100, at schemeshard: 72057594046678944 2025-05-29T15:23:05.278393Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:227: tests -- TTxNotificationSubscriber for txId 100: got EvNotifyTxCompletionResult 2025-05-29T15:23:05.278396Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:236: tests -- TTxNotificationSubscriber for txId 100: satisfy waiter [1:287:2277] TestWaitNotification: OK eventTxId 100 2025-05-29T15:23:05.278461Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-05-29T15:23:05.278487Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/USER_0" took 31us result status StatusPathDoesNotExist 2025-05-29T15:23:05.278520Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/USER_0\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1]), source_location: ydb/core/tx/schemeshard/schemeshard_path_describer.cpp:1157" Path: "/MyRoot/USER_0" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: false } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/metadata/secret/ut/unittest >> Secret::Deactivated [GOOD] Test command err: 2025-05-29T15:22:54.278505Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:323:2366], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/00173c/r3tmp/tmpjFAjUL/pdisk_1.dat TServer::EnableGrpc on GrpcPort 5690, node 1 TClient is connected to server localhost:28225 2025-05-29T15:22:54.414948Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-05-29T15:22:54.430473Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:22:54.430499Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-05-29T15:22:54.430504Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-05-29T15:22:54.430591Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:22:54.431534Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-05-29T15:22:54.431612Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [1:32:2079] 1748532173840570 != 1748532173840574 2025-05-29T15:22:54.473075Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:22:54.473103Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:22:54.483554Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected Initialization finished REQUEST=CREATE OBJECT secret1 (TYPE SECRET) WITH value = `100`;EXPECTATION=0;WAITING=1 2025-05-29T15:23:06.104398Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:686:2576], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:23:06.104429Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } REQUEST=CREATE OBJECT secret1 (TYPE SECRET) WITH value = `100`;RESULT=
: Error: Execution, code: 1060
:1:50: Error: Executing CREATE OBJECT SECRET
: Error: metadata provider service is disabled ;EXPECTATION=0 FINISHED_REQUEST=CREATE OBJECT secret1 (TYPE SECRET) WITH value = `100`;EXPECTATION=0;WAITING=1 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::CreateSubDomainWithoutTablets [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:116:2058] recipient: [1:111:2141] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:116:2058] recipient: [1:111:2141] Leader for TabletID 72057594046678944 is [1:128:2152] sender: [1:132:2058] recipient: [1:111:2141] 2025-05-29T15:23:06.124538Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7488: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-05-29T15:23:06.124559Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7516: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:23:06.124563Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7402: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-05-29T15:23:06.124567Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7418: OperationsProcessing config: using default configuration 2025-05-29T15:23:06.124576Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-05-29T15:23:06.124579Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-05-29T15:23:06.124586Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7548: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:23:06.124596Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:39: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-05-29T15:23:06.124681Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7619: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-05-29T15:23:06.124733Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-05-29T15:23:06.135094Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7309: Cannot subscribe to console configs 2025-05-29T15:23:06.135114Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:23:06.138091Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-05-29T15:23:06.138209Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-05-29T15:23:06.138259Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-05-29T15:23:06.140744Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-05-29T15:23:06.140899Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:445: Clear TempDirsState with owners number: 0 2025-05-29T15:23:06.141020Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1348: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-05-29T15:23:06.141084Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:32: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-05-29T15:23:06.141568Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:157: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:23:06.141603Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:84: [RootDataErasureManager] Stop 2025-05-29T15:23:06.141881Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:23:06.141892Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:23:06.141915Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-05-29T15:23:06.141923Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-29T15:23:06.141929Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:202: TTxServerlessStorageBilling.Complete 2025-05-29T15:23:06.141964Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6671: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-05-29T15:23:06.143548Z node 1 :HIVE INFO: tablet_helpers.cpp:1130: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:128:2152] sender: [1:242:2058] recipient: [1:15:2062] 2025-05-29T15:23:06.159551Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:372: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-05-29T15:23:06.159647Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:23:06.159733Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-05-29T15:23:06.159793Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:130: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-05-29T15:23:06.159806Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:23:06.160581Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:451: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-05-29T15:23:06.160605Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-05-29T15:23:06.160648Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:23:06.160656Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:313: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-05-29T15:23:06.160660Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:367: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-05-29T15:23:06.160664Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 2 -> 3 2025-05-29T15:23:06.160983Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:23:06.160991Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-05-29T15:23:06.160995Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 3 -> 128 2025-05-29T15:23:06.161262Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:23:06.161272Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:23:06.161276Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:23:06.161281Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1650: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-05-29T15:23:06.161721Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1719: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-05-29T15:23:06.162020Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:652: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-05-29T15:23:06.162052Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1751: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-05-29T15:23:06.162201Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:676: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-05-29T15:23:06.162219Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:680: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 124 RawX2: 4294969445 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-05-29T15:23:06.162225Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:23:06.162268Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 128 -> 240 2025-05-29T15:23:06.162273Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:23:06.162297Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-05-29T15:23:06.162307Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:402: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-05-29T15:23:06.162638Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:23:06.162645Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-29T15:23:06.162683Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29 ... TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2025-05-29T15:23:06.168201Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:402: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 100 2025-05-29T15:23:06.168510Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:23:06.168516Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 100, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-29T15:23:06.168540Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 100, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2025-05-29T15:23:06.168550Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:23:06.168553Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:209:2210], at schemeshard: 72057594046678944, txId: 100, path id: 1 2025-05-29T15:23:06.168557Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:209:2210], at schemeshard: 72057594046678944, txId: 100, path id: 2 2025-05-29T15:23:06.168598Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 100:0, at schemeshard: 72057594046678944 2025-05-29T15:23:06.168602Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:482: [72057594046678944] TDone opId# 100:0 ProgressState 2025-05-29T15:23:06.168612Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:906: Part operation is done id#100:0 progress is 1/1 2025-05-29T15:23:06.168615Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 100 ready parts: 1/1 2025-05-29T15:23:06.168619Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:906: Part operation is done id#100:0 progress is 1/1 2025-05-29T15:23:06.168621Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 100 ready parts: 1/1 2025-05-29T15:23:06.168624Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1606: TOperation IsReadyToNotify, TxId: 100, ready parts: 1/1, is published: false 2025-05-29T15:23:06.168627Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 100 ready parts: 1/1 2025-05-29T15:23:06.168631Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:973: Operation and all the parts is done, operation id: 100:0 2025-05-29T15:23:06.168633Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5174: RemoveTx for txid 100:0 2025-05-29T15:23:06.168641Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2025-05-29T15:23:06.168646Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:982: Publication still in progress, tx: 100, publications: 2, subscribers: 0 2025-05-29T15:23:06.168648Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:989: Publication details: tx: 100, [OwnerId: 72057594046678944, LocalPathId: 1], 5 2025-05-29T15:23:06.168651Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:989: Publication details: tx: 100, [OwnerId: 72057594046678944, LocalPathId: 2], 3 2025-05-29T15:23:06.168740Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:5834: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 5 PathOwnerId: 72057594046678944, cookie: 100 2025-05-29T15:23:06.168749Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 5 PathOwnerId: 72057594046678944, cookie: 100 2025-05-29T15:23:06.168753Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 100 2025-05-29T15:23:06.168756Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 100, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 5 2025-05-29T15:23:06.168758Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-05-29T15:23:06.168821Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:5834: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046678944, cookie: 100 2025-05-29T15:23:06.168827Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046678944, cookie: 100 2025-05-29T15:23:06.168829Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 100 2025-05-29T15:23:06.168832Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 100, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 3 2025-05-29T15:23:06.168834Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2025-05-29T15:23:06.168839Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 100, subscribers: 0 2025-05-29T15:23:06.169280Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 100 2025-05-29T15:23:06.169296Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 100 TestModificationResult got TxId: 100, wait until txId: 100 TestWaitNotification wait txId: 100 2025-05-29T15:23:06.169335Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:210: tests -- TTxNotificationSubscriber for txId 100: send EvNotifyTxCompletion 2025-05-29T15:23:06.169348Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:256: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 100 2025-05-29T15:23:06.169388Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 100, at schemeshard: 72057594046678944 2025-05-29T15:23:06.169401Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:227: tests -- TTxNotificationSubscriber for txId 100: got EvNotifyTxCompletionResult 2025-05-29T15:23:06.169405Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:236: tests -- TTxNotificationSubscriber for txId 100: satisfy waiter [1:310:2300] TestWaitNotification: OK eventTxId 100 2025-05-29T15:23:06.169467Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-05-29T15:23:06.169498Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/USER_0" took 38us result status StatusSuccess 2025-05-29T15:23:06.169589Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/USER_0" PathDescription { Self { Name: "USER_0" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 100 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 1 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 2 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 2 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { } } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-05-29T15:23:06.169643Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-05-29T15:23:06.169654Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot" took 12us result status StatusSuccess 2025-05-29T15:23:06.169684Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot" PathDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 3 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: "USER_0" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 100 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 1 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/MyRoot" } } } PathId: 1 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/client/ut/unittest >> TLocksTest::MultipleLocks [GOOD] Test command err: 2025-05-29T15:23:02.997147Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509888537978825524:2064];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:23:02.997177Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/002833/r3tmp/tmpCKJF5u/pdisk_1.dat 2025-05-29T15:23:03.048821Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:23:03.049441Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [1:7509888537978825502:2079] 1748532182997007 != 1748532182997010 TClient is connected to server localhost:14581 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-05-29T15:23:03.127713Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:23:03.127737Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:23:03.128404Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-05-29T15:23:03.128693Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected waiting... 2025-05-29T15:23:03.132985Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:23:03.164108Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:23:03.227391Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:23:03.236360Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:23:03.476822Z node 2 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7509888543974243420:2064];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:23:03.476849Z node 2 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/002833/r3tmp/tmpMedWWl/pdisk_1.dat 2025-05-29T15:23:03.490945Z node 2 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:23:03.491231Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [2:7509888543974243397:2079] 1748532183476669 != 1748532183476672 TClient is connected to server localhost:21999 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-05-29T15:23:03.580358Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:23:03.580389Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:23:03.580659Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:23:03.581321Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected waiting... 2025-05-29T15:23:03.588286Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:23:03.603229Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:23:03.617001Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:23:03.977144Z node 3 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7509888543511038684:2062];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:23:03.977176Z node 3 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/002833/r3tmp/tmpiWr6SX/pdisk_1.dat 2025-05-29T15:23:03.992066Z node 3 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:23:03.992310Z node 3 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [3:7509888543511038664:2079] 1748532183977035 != 1748532183977038 TClient is connected to server localhost:28753 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-05-29T15:23:04.081452Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:23:04.081484Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:23:04.081794Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:23:04.082659Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-05-29T15:23:04.082686Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected waiting... 2025-05-29T15:23:04.092269Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:23:04.106910Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:23:04.120866Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:23:04.371500Z node 4 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7509888549067595280:2078];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:23:04.371720Z node 4 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/002833/r3tmp/tmpaUcMia/pdisk_1.dat 2025-05-29T15:23:04.388093Z node 4 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:23:04.388332Z node 4 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [4:7509888549067595239:2079] 1748532184370811 != 1748532184370814 TClient is connected to server localhost:27760 WaitRootIsUp 'dc-1'... TClient::L ... 5: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:23:04.476685Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:23:04.477355Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected waiting... 2025-05-29T15:23:04.483807Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:23:04.498855Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:23:04.512946Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:23:04.787383Z node 5 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[5:7509888547818984025:2062];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:23:04.787422Z node 5 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/002833/r3tmp/tmpJSM9Bz/pdisk_1.dat 2025-05-29T15:23:04.800861Z node 5 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:23:04.801213Z node 5 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [5:7509888547818984005:2079] 1748532184787294 != 1748532184787297 TClient is connected to server localhost:14228 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-05-29T15:23:04.892022Z node 5 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:23:04.892043Z node 5 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:23:04.892308Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:23:04.893041Z node 5 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Connecting -> Connected waiting... 2025-05-29T15:23:04.896765Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:23:04.911849Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:23:04.925724Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:23:05.294662Z node 6 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[6:7509888551407875835:2064];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:23:05.294751Z node 6 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/002833/r3tmp/tmpa7Q1ar/pdisk_1.dat 2025-05-29T15:23:05.307550Z node 6 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:23:05.308015Z node 6 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [6:7509888551407875813:2079] 1748532185294508 != 1748532185294511 TClient is connected to server localhost:12257 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-05-29T15:23:05.398451Z node 6 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:23:05.398486Z node 6 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:23:05.398768Z node 6 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:23:05.399402Z node 6 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Connecting -> Connected waiting... 2025-05-29T15:23:05.408054Z node 6 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:23:05.422529Z node 6 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:23:05.436999Z node 6 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:23:05.803572Z node 7 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[7:7509888554775788560:2177];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:23:05.803702Z node 7 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/002833/r3tmp/tmpSZry4J/pdisk_1.dat 2025-05-29T15:23:05.814774Z node 7 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:23:05.815013Z node 7 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [7:7509888554775788412:2079] 1748532185802869 != 1748532185802872 TClient is connected to server localhost:17734 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-05-29T15:23:05.906899Z node 7 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:23:05.906928Z node 7 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:23:05.907220Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:23:05.907862Z node 7 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Connecting -> Connected waiting... 2025-05-29T15:23:05.912086Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:23:05.927383Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:23:05.941058Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/client/ut/unittest >> TLocksTest::SetBreakSetEraseBreak [GOOD] Test command err: 2025-05-29T15:23:02.763358Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509888537753022519:2060];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:23:02.763392Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/002835/r3tmp/tmphpTRIM/pdisk_1.dat 2025-05-29T15:23:02.824985Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [1:7509888537753022501:2079] 1748532182763240 != 1748532182763243 2025-05-29T15:23:02.827132Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded TClient is connected to server localhost:2792 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-05-29T15:23:02.899804Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:23:02.899831Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:23:02.900375Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-05-29T15:23:02.900894Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected waiting... waiting... 2025-05-29T15:23:02.909189Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:23:02.927913Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:23:02.937910Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:23:03.240015Z node 2 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7509888544296883336:2062];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:23:03.240054Z node 2 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/002835/r3tmp/tmp0Uxggp/pdisk_1.dat 2025-05-29T15:23:03.251154Z node 2 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:23:03.251368Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [2:7509888544296883314:2079] 1748532183239897 != 1748532183239900 TClient is connected to server localhost:26849 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-05-29T15:23:03.344440Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:23:03.344471Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:23:03.344875Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:23:03.345321Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected waiting... 2025-05-29T15:23:03.349749Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:23:03.365111Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:23:03.378870Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:23:03.700066Z node 3 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7509888542723365795:2062];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:23:03.700086Z node 3 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/002835/r3tmp/tmptZatws/pdisk_1.dat 2025-05-29T15:23:03.713265Z node 3 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:23:03.716114Z node 3 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [3:7509888542723365775:2079] 1748532183699930 != 1748532183699933 TClient is connected to server localhost:29939 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-05-29T15:23:03.804055Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:23:03.804087Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:23:03.804378Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:23:03.805075Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected waiting... 2025-05-29T15:23:03.812555Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-05-29T15:23:03.826993Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 2025-05-29T15:23:03.840882Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:23:04.108423Z node 4 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7509888549390013197:2062];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:23:04.108443Z node 4 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/002835/r3tmp/tmpWPo5Xy/pdisk_1.dat 2025-05-29T15:23:04.122087Z node 4 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:23:04.123859Z node 4 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [4:7509888549390013177:2079] 1748532184108307 != 1748532184108310 TClient is connected to server localhost:13511 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: " ... 78: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-05-29T15:23:04.212622Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected waiting... 2025-05-29T15:23:04.218234Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:23:04.236666Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:23:04.247460Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:23:04.587756Z node 5 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[5:7509888547790178914:2059];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:23:04.587778Z node 5 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/002835/r3tmp/tmpdEhN8x/pdisk_1.dat 2025-05-29T15:23:04.604973Z node 5 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:23:04.605251Z node 5 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [5:7509888547790178897:2079] 1748532184587625 != 1748532184587628 TClient is connected to server localhost:3286 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-05-29T15:23:04.692905Z node 5 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:23:04.692936Z node 5 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:23:04.693176Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:23:04.693916Z node 5 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Connecting -> Connected waiting... 2025-05-29T15:23:04.737120Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:23:04.751019Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:23:04.765061Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:23:05.071457Z node 6 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[6:7509888553863151479:2065];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:23:05.071470Z node 6 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/002835/r3tmp/tmpJyWPKw/pdisk_1.dat 2025-05-29T15:23:05.081727Z node 6 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:23:05.081968Z node 6 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [6:7509888553863151447:2079] 1748532185071319 != 1748532185071322 TClient is connected to server localhost:23883 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-05-29T15:23:05.174145Z node 6 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:23:05.174173Z node 6 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:23:05.174911Z node 6 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-05-29T15:23:05.175230Z node 6 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Connecting -> Connected waiting... 2025-05-29T15:23:05.176214Z node 6 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:23:05.184141Z node 6 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:23:05.198998Z node 6 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:23:05.212835Z node 6 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:23:05.562350Z node 7 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[7:7509888552695465527:2064];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:23:05.562367Z node 7 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/002835/r3tmp/tmp8KNGId/pdisk_1.dat 2025-05-29T15:23:05.575097Z node 7 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:23:05.575327Z node 7 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [7:7509888552695465504:2079] 1748532185562210 != 1748532185562213 TClient is connected to server localhost:22126 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-05-29T15:23:05.666214Z node 7 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:23:05.666245Z node 7 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:23:05.666468Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:23:05.667254Z node 7 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Connecting -> Connected waiting... 2025-05-29T15:23:05.688811Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:23:05.703065Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:23:05.717011Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... >> TFlatTest::RejectByIncomingReadSetSize [GOOD] >> TLocksTest::CK_Range_GoodLock [GOOD] >> TPQTest::TestPQCacheSizeManagement [GOOD] >> TLocksTest::Range_BrokenLock1 [GOOD] >> TLocksTest::Range_IncorrectDot2 [GOOD] >> Secret::DeactivatedQueryService [GOOD] >> TPQTest::TestOffsetEstimation [GOOD] |62.8%| [TA] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_topic_splitmerge/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/client/ut/unittest >> TLocksTest::CK_Range_GoodLock [GOOD] Test command err: 2025-05-29T15:23:02.563937Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509888541760095475:2066];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:23:02.563964Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/002845/r3tmp/tmpaLcAWy/pdisk_1.dat 2025-05-29T15:23:02.635667Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [1:7509888541760095450:2079] 1748532182563669 != 1748532182563672 2025-05-29T15:23:02.638527Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded TClient is connected to server localhost:12988 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: 2025-05-29T15:23:02.666775Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:23:02.666810Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-05-29T15:23:02.667857Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-29T15:23:02.668797Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-05-29T15:23:02.678380Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:23:02.698830Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:23:02.708588Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:23:03.025213Z node 2 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7509888543851196707:2062];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:23:03.025233Z node 2 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/002845/r3tmp/tmpiuXagR/pdisk_1.dat 2025-05-29T15:23:03.039537Z node 2 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:23:03.041298Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [2:7509888543851196687:2079] 1748532183025107 != 1748532183025110 TClient is connected to server localhost:18832 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-05-29T15:23:03.129303Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-05-29T15:23:03.129505Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:23:03.129546Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting waiting... 2025-05-29T15:23:03.130622Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected waiting... waiting... 2025-05-29T15:23:03.162801Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 2025-05-29T15:23:03.172842Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:23:03.183114Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:23:03.522851Z node 3 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7509888545840159790:2061];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:23:03.522870Z node 3 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/002845/r3tmp/tmpBKoh8B/pdisk_1.dat 2025-05-29T15:23:03.533652Z node 3 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:23:03.533883Z node 3 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [3:7509888545840159771:2079] 1748532183522736 != 1748532183522739 TClient is connected to server localhost:5639 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-05-29T15:23:03.626794Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:23:03.626825Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:23:03.627164Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-05-29T15:23:03.627764Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected waiting... 2025-05-29T15:23:03.629744Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:23:03.638154Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:23:03.652570Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:23:03.708003Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:23:04.021175Z node 4 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7509888547838972356:2062];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:23:04.021200Z node 4 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/002845/r3tmp/tmpAkfW17/pdisk_1.dat 2025-05-29T15:23:04.033569Z node 4 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:23:04.033784Z node 4 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [4:7509888547838972335:2079] 1748532184021043 != 1748532184021046 TClient is connected to server localhost:19254 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathTyp ... d ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:23:05.521203Z node 7 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-29T15:23:05.522236Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:23:05.527290Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:23:05.541720Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:23:05.557525Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:23:05.911507Z node 8 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[8:7509888552601095475:2059];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:23:05.911551Z node 8 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/002845/r3tmp/tmp8M7Xlj/pdisk_1.dat 2025-05-29T15:23:05.925288Z node 8 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:23:05.925565Z node 8 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [8:7509888552601095458:2079] 1748532185911414 != 1748532185911417 TClient is connected to server localhost:8755 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-05-29T15:23:06.015227Z node 8 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:23:06.015254Z node 8 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:23:06.015584Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:23:06.016185Z node 8 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Connecting -> Connected waiting... 2025-05-29T15:23:06.023869Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:23:06.038597Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:23:06.052398Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:23:06.375864Z node 9 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[9:7509888557380011086:2062];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:23:06.375881Z node 9 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/002845/r3tmp/tmpAUxWCQ/pdisk_1.dat 2025-05-29T15:23:06.387103Z node 9 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:23:06.387260Z node 9 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [9:7509888557380011066:2079] 1748532186375798 != 1748532186375801 TClient is connected to server localhost:5963 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-05-29T15:23:06.478604Z node 9 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:23:06.478655Z node 9 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:23:06.478923Z node 9 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:23:06.479680Z node 9 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Connecting -> Connected waiting... 2025-05-29T15:23:06.486288Z node 9 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:23:06.500334Z node 9 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:23:06.514454Z node 9 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:23:06.773248Z node 10 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[10:7509888555769008233:2063];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:23:06.773278Z node 10 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/002845/r3tmp/tmpRFYjhx/pdisk_1.dat 2025-05-29T15:23:06.785430Z node 10 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:23:06.785803Z node 10 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [10:7509888555769008212:2079] 1748532186773150 != 1748532186773153 TClient is connected to server localhost:32423 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-05-29T15:23:06.877007Z node 10 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:23:06.877049Z node 10 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:23:06.877295Z node 10 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:23:06.877853Z node 10 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Connecting -> Connected waiting... 2025-05-29T15:23:06.885253Z node 10 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:23:06.899509Z node 10 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:23:06.913836Z node 10 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/client/ut/unittest >> TFlatTest::RejectByIncomingReadSetSize [GOOD] Test command err: 2025-05-29T15:23:04.959534Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509888546706822586:2066];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:23:04.959555Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/0027df/r3tmp/tmpSq8BBI/pdisk_1.dat 2025-05-29T15:23:05.028912Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [1:7509888546706822561:2079] 1748532184959285 != 1748532184959288 2025-05-29T15:23:05.032403Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded TClient is connected to server localhost:17203 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-05-29T15:23:05.097245Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:23:05.097283Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:23:05.098381Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-05-29T15:23:05.098587Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected waiting... 2025-05-29T15:23:05.115456Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:372: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/dc-1" OperationType: ESchemeOpMkDir MkDir { Name: "Dir1" } } TxId: 281474976715658 TabletId: 72057594046644480 PeerName: "" , at schemeshard: 72057594046644480 2025-05-29T15:23:05.115519Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_mkdir.cpp:115: TMkDir Propose, path: /dc-1/Dir1, operationId: 281474976715658:0, at schemeshard: 72057594046644480 2025-05-29T15:23:05.115540Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:319: AttachChild: child attached as only one child to the parent, parent id: [OwnerId: 72057594046644480, LocalPathId: 1], parent name: dc-1, child name: Dir1, child id: [OwnerId: 72057594046644480, LocalPathId: 2], at schemeshard: 72057594046644480 2025-05-29T15:23:05.115551Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046644480, LocalPathId: 2] was 0 2025-05-29T15:23:05.115562Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:130: IgniteOperation, opId: 281474976715658:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-05-29T15:23:05.115614Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 1 2025-05-29T15:23:05.115625Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046644480, LocalPathId: 2] was 1 2025-05-29T15:23:05.116331Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:451: TTxOperationPropose Complete, txId: 281474976715658, response: Status: StatusAccepted TxId: 281474976715658 SchemeshardId: 72057594046644480 PathId: 2, at schemeshard: 72057594046644480 2025-05-29T15:23:05.116363Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 281474976715658, database: /dc-1, subject: , status: StatusAccepted, operation: CREATE DIRECTORY, path: /dc-1/Dir1 2025-05-29T15:23:05.116409Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2025-05-29T15:23:05.116418Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046644480, txId: 281474976715658, path id: [OwnerId: 72057594046644480, LocalPathId: 1] 2025-05-29T15:23:05.116452Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046644480, txId: 281474976715658, path id: [OwnerId: 72057594046644480, LocalPathId: 2] 2025-05-29T15:23:05.116477Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2025-05-29T15:23:05.116485Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:7509888551001790543:2384], at schemeshard: 72057594046644480, txId: 281474976715658, path id: 1 2025-05-29T15:23:05.116492Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:7509888551001790543:2384], at schemeshard: 72057594046644480, txId: 281474976715658, path id: 2 2025-05-29T15:23:05.116499Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 281474976715658:0, at schemeshard: 72057594046644480 2025-05-29T15:23:05.116504Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_mkdir.cpp:63: MkDir::TPropose operationId# 281474976715658:0 ProgressState, at schemeshard: 72057594046644480 2025-05-29T15:23:05.116523Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1650: TOperation IsReadyToPropose , TxId: 281474976715658 ready parts: 1/1 waiting... 2025-05-29T15:23:05.117196Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1719: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046644480 Flags: 2 } ExecLevel: 0 TxId: 281474976715658 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-05-29T15:23:05.117469Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:5834: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 4 PathOwnerId: 72057594046644480, cookie: 281474976715658 2025-05-29T15:23:05.117484Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 4 PathOwnerId: 72057594046644480, cookie: 281474976715658 2025-05-29T15:23:05.117486Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976715658 2025-05-29T15:23:05.117489Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715658, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 4 2025-05-29T15:23:05.117492Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 2 2025-05-29T15:23:05.117544Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:5834: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 2 Version: 2 PathOwnerId: 72057594046644480, cookie: 281474976715658 2025-05-29T15:23:05.117558Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 2 Version: 2 PathOwnerId: 72057594046644480, cookie: 281474976715658 2025-05-29T15:23:05.117559Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976715658 2025-05-29T15:23:05.117561Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715658, pathId: [OwnerId: 72057594046644480, LocalPathId: 2], version: 2 2025-05-29T15:23:05.117563Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046644480, LocalPathId: 2] was 2 2025-05-29T15:23:05.117570Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1606: TOperation IsReadyToNotify, TxId: 281474976715658, ready parts: 0/1, is published: true 2025-05-29T15:23:05.117588Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__notify.cpp:30: NotifyTxCompletion operation in-flight, txId: 281474976715658, at schemeshard: 72057594046644480 2025-05-29T15:23:05.117594Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1606: TOperation IsReadyToNotify, TxId: 281474976715658, ready parts: 0/1, is published: true 2025-05-29T15:23:05.117598Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__notify.cpp:131: NotifyTxCompletion transaction is registered, txId: 281474976715658, at schemeshard: 72057594046644480 2025-05-29T15:23:05.117759Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:652: Send tablet strongly msg operationId: 281474976715658:4294967295 from tablet: 72057594046644480 to tablet: 72057594046316545 cookie: 0:281474976715658 msg type: 269090816 2025-05-29T15:23:05.117791Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1751: TOperation RegisterRelationByTabletId, TxId: 281474976715658, partId: 4294967295, tablet: 72057594046316545 2025-05-29T15:23:05.117899Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046644480, cookie: 281474976715658 2025-05-29T15:23:05.117915Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046644480, cookie: 281474976715658 2025-05-29T15:23:05.118155Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:676: TTxOperationPlanStep Execute, stepId: 1748532185162, transactions count in step: 1, at schemeshard: 72057594046644480 2025-05-29T15:23:05.118182Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:680: TTxOperationPlanStep Execute, message: Transactions { TxId: 281474976715658 AckTo { RawX1: 0 RawX2: 0 } } Step: 1748532185162 MediatorID: 72057594046382081 TabletID: 72057594046644480, at schemeshard: 72057594046644480 2025-05-29T15:23:05.118192Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_mkdir.cpp:33: MkDir::TPropose operationId# 281474976715658:0 HandleReply TEvPrivate::TEvOperationPlan, step: 17485 ... 281474976715661 2025-05-29T15:23:05.163503Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715661, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 9 2025-05-29T15:23:05.163506Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 4 2025-05-29T15:23:05.163522Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046644480:4:17} Tx{28, NKikimr::NSchemeShard::TSchemeShard::TTxAckPublishToSchemeBoard} hope 1 -> done Change{37, redo 166b alter 0b annex 0, ~{ 48, 59 } -{ }, 0 gb} 2025-05-29T15:23:05.163525Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046644480:4:17} Tx{28, NKikimr::NSchemeShard::TSchemeShard::TTxAckPublishToSchemeBoard} release 4194304b of static, Memory{0 dyn 0} 2025-05-29T15:23:05.163540Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:54: Put Result: TEvPutResult {Id# [72057594046316545:2:13:0:0:69:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-05-29T15:23:05.163542Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:5834: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 4 LocalPathId: 4 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976715661 2025-05-29T15:23:05.163545Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046644480:4:18} Tx{29, NKikimr::NSchemeShard::TSchemeShard::TTxAckPublishToSchemeBoard} queued, type NKikimr::NSchemeShard::TSchemeShard::TTxAckPublishToSchemeBoard 2025-05-29T15:23:05.163547Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046644480:4:18} Tx{29, NKikimr::NSchemeShard::TSchemeShard::TTxAckPublishToSchemeBoard} took 4194304b of static mem, Memory{4194304 dyn 0} 2025-05-29T15:23:05.163551Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:54: Put Result: TEvPutResult {Id# [72057594046316545:2:13:1:24576:107:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-05-29T15:23:05.163553Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 4 LocalPathId: 4 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976715661 2025-05-29T15:23:05.163554Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976715661 2025-05-29T15:23:05.163556Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715661, pathId: [OwnerId: 72057594046644480, LocalPathId: 4], version: 3 2025-05-29T15:23:05.163557Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046644480, LocalPathId: 4] was 1 2025-05-29T15:23:05.163563Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976715661, subscribers: 1 2025-05-29T15:23:05.163565Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:212: TTxAckPublishToSchemeBoard Notify send TEvNotifyTxCompletionResult, at schemeshard: 72057594046644480, to actorId: [1:7509888551001790897:2302] 2025-05-29T15:23:05.163572Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046644480:4:18} Tx{29, NKikimr::NSchemeShard::TSchemeShard::TTxAckPublishToSchemeBoard} hope 1 -> done Change{38, redo 166b alter 0b annex 0, ~{ 48, 59 } -{ }, 0 gb} 2025-05-29T15:23:05.163578Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046644480:4:18} Tx{29, NKikimr::NSchemeShard::TSchemeShard::TTxAckPublishToSchemeBoard} release 4194304b of static, Memory{0 dyn 0} 2025-05-29T15:23:05.163591Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046316545:2:14} commited cookie 1 for step 13 2025-05-29T15:23:05.163849Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:54: Put Result: TEvPutResult {Id# [72057594046644480:4:16:0:0:69:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-05-29T15:23:05.163857Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:54: Put Result: TEvPutResult {Id# [72057594046644480:4:16:1:24576:122:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-05-29T15:23:05.163865Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:54: Put Result: TEvPutResult {Id# [72057594046644480:4:17:0:0:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-05-29T15:23:05.163868Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:54: Put Result: TEvPutResult {Id# [72057594046644480:4:17:1:24576:119:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-05-29T15:23:05.163869Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:54: Put Result: TEvPutResult {Id# [72057594046644480:4:18:0:0:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-05-29T15:23:05.163875Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:54: Put Result: TEvPutResult {Id# [72057594046644480:4:18:1:24576:132:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-05-29T15:23:05.163879Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046644480:4:19} commited cookie 1 for step 16 2025-05-29T15:23:05.163904Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046644480:4:19} commited cookie 1 for step 17 2025-05-29T15:23:05.163913Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046644480, cookie: 281474976715661 2025-05-29T15:23:05.163918Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046644480:4:19} commited cookie 1 for step 18 2025-05-29T15:23:05.163920Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046644480, cookie: 281474976715661 2025-05-29T15:23:05.163985Z node 1 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:397: TClient[72057594046644480] received poison pill [1:7509888551001790899:2302] 2025-05-29T15:23:05.164003Z node 1 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:505: TClient[72057594046644480] notify reset [1:7509888551001790899:2302] 2025-05-29T15:23:05.164022Z node 1 :PIPE_SERVER DEBUG: tablet_pipe_server.cpp:182: [72057594046644480] Got PeerClosed from# [1:7509888551001790899:2302] 2025-05-29T15:23:05.168199Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046316545:2:14} Tx{19, NKikimr::NFlatTxCoordinator::TTxCoordinator::TTxPlanStep} queued, type NKikimr::NFlatTxCoordinator::TTxCoordinator::TTxPlanStep 2025-05-29T15:23:05.168218Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046316545:2:14} Tx{19, NKikimr::NFlatTxCoordinator::TTxCoordinator::TTxPlanStep} took 4194304b of static mem, Memory{4194304 dyn 0} 2025-05-29T15:23:05.168251Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046316545:2:14} Tx{19, NKikimr::NFlatTxCoordinator::TTxCoordinator::TTxPlanStep} hope 1 -> done Change{13, redo 0b alter 0b annex 0, ~{ } -{ }, 0 gb} 2025-05-29T15:23:05.168258Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046316545:2:14} Tx{19, NKikimr::NFlatTxCoordinator::TTxCoordinator::TTxPlanStep} release 4194304b of static, Memory{0 dyn 0} 2025-05-29T15:23:05.168277Z node 1 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:155: TClient[72057594046382081] send [1:7509888551001790516:2358] 2025-05-29T15:23:05.168288Z node 1 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:629: TClient[72057594046382081] push event to server [1:7509888551001790516:2358] 2025-05-29T15:23:05.168303Z node 1 :PIPE_SERVER DEBUG: tablet_pipe_server.cpp:141: [72057594046382081] HandleSend Sender# [1:7509888551001790513:2358] EventType# 269156352 2025-05-29T15:23:05.314991Z node 1 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:155: TClient[72057594037936129] send [1:7509888546706822833:2079] 2025-05-29T15:23:05.315007Z node 1 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:629: TClient[72057594037936129] push event to server [1:7509888546706822833:2079] 2025-05-29T15:23:05.315071Z node 1 :PIPE_SERVER DEBUG: tablet_pipe_server.cpp:141: [72057594037936129] HandleSend Sender# [1:7509888546706822561:2079] EventType# 272039936 2025-05-29T15:23:05.432469Z node 2 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7509888552926246114:2062];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:23:05.432494Z node 2 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/0027df/r3tmp/tmpvVtF6s/pdisk_1.dat 2025-05-29T15:23:05.444877Z node 2 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:23:05.445097Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [2:7509888552926246093:2079] 1748532185432389 != 1748532185432392 TClient is connected to server localhost:1886 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-05-29T15:23:05.536189Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:23:05.536227Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:23:05.536655Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:23:05.537131Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected waiting... 2025-05-29T15:23:05.539756Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976715658, at schemeshard: 72057594046644480 2025-05-29T15:23:05.540728Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:23:06.913701Z node 2 :TX_PROXY ERROR: datareq.cpp:2829: Actor# [2:7509888557221214518:2595] txid# 281474976715700 FailProposedRequest: Transaction incoming read set size 1000080 for tablet 72075186224037889 exceeded limit 1000 Status# ExecError 2025-05-29T15:23:06.913735Z node 2 :TX_PROXY ERROR: datareq.cpp:883: Actor# [2:7509888557221214518:2595] txid# 281474976715700 RESPONSE Status# ExecError marker# P13c ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/client/ut/unittest >> TLocksTest::Range_BrokenLock1 [GOOD] Test command err: 2025-05-29T15:23:02.981092Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509888539825499727:2066];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:23:02.981422Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/002825/r3tmp/tmpS9BsnW/pdisk_1.dat 2025-05-29T15:23:03.042755Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [1:7509888539825499702:2079] 1748532182980834 != 1748532182980837 2025-05-29T15:23:03.049841Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded TClient is connected to server localhost:11938 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-05-29T15:23:03.124694Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:23:03.124732Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:23:03.125805Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-29T15:23:03.125892Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-05-29T15:23:03.162662Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:23:03.225071Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:23:03.234346Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:23:03.428869Z node 2 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7509888543489659034:2059];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:23:03.428892Z node 2 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/002825/r3tmp/tmpyoFJnx/pdisk_1.dat 2025-05-29T15:23:03.444372Z node 2 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:23:03.444713Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [2:7509888543489659017:2079] 1748532183428808 != 1748532183428811 TClient is connected to server localhost:24831 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-05-29T15:23:03.532265Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:23:03.532292Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:23:03.532480Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:23:03.533380Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected waiting... 2025-05-29T15:23:03.538735Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:23:03.553461Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:23:03.567692Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:23:03.805294Z node 3 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7509888542354657404:2063];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:23:03.805316Z node 3 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/002825/r3tmp/tmpNJf2Po/pdisk_1.dat 2025-05-29T15:23:03.818952Z node 3 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:23:03.819174Z node 3 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [3:7509888542354657383:2079] 1748532183805166 != 1748532183805169 TClient is connected to server localhost:28405 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-05-29T15:23:03.909670Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:23:03.909699Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:23:03.909997Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:23:03.910639Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected waiting... 2025-05-29T15:23:03.917314Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:23:03.931734Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:23:03.945333Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:23:04.218626Z node 4 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7509888549197738786:2062];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:23:04.218649Z node 4 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/002825/r3tmp/tmpi9T67y/pdisk_1.dat 2025-05-29T15:23:04.234457Z node 4 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:23:04.234759Z node 4 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [4:7509888549197738766:2079] 1748532184218518 != 1748532184218521 TClient is connected to server localhost:29969 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: ... 4037968897 Node(7, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:23:05.752129Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:23:05.752923Z node 7 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Connecting -> Connected waiting... 2025-05-29T15:23:05.757752Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:23:05.773274Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:23:05.829451Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:23:06.114084Z node 8 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[8:7509888557303780026:2065];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:23:06.114107Z node 8 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/002825/r3tmp/tmpWZQfBh/pdisk_1.dat 2025-05-29T15:23:06.131980Z node 8 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:23:06.132267Z node 8 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [8:7509888557303779999:2079] 1748532186113928 != 1748532186113931 TClient is connected to server localhost:2854 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-05-29T15:23:06.217703Z node 8 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:23:06.217730Z node 8 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:23:06.218056Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:23:06.218676Z node 8 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Connecting -> Connected waiting... 2025-05-29T15:23:06.226765Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:23:06.241908Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:23:06.255085Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:23:06.456501Z node 9 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[9:7509888558192917861:2064];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:23:06.456517Z node 9 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/002825/r3tmp/tmpencmVm/pdisk_1.dat 2025-05-29T15:23:06.466805Z node 9 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:23:06.467075Z node 9 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [9:7509888558192917839:2079] 1748532186456400 != 1748532186456403 TClient is connected to server localhost:20916 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-05-29T15:23:06.559723Z node 9 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:23:06.559745Z node 9 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:23:06.560026Z node 9 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:23:06.560732Z node 9 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Connecting -> Connected waiting... 2025-05-29T15:23:06.562418Z node 9 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:23:06.577326Z node 9 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:23:06.591525Z node 9 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:23:06.805779Z node 10 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[10:7509888557615550228:2065];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:23:06.805796Z node 10 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/002825/r3tmp/tmpvDU58a/pdisk_1.dat 2025-05-29T15:23:06.816915Z node 10 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:23:06.817041Z node 10 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [10:7509888557615550204:2079] 1748532186805615 != 1748532186805618 TClient is connected to server localhost:23963 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-05-29T15:23:06.909405Z node 10 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:23:06.909433Z node 10 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:23:06.909746Z node 10 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:23:06.910423Z node 10 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Connecting -> Connected waiting... 2025-05-29T15:23:06.912237Z node 10 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:23:06.927693Z node 10 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:23:06.941793Z node 10 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... >> TLocksTest::BrokenNullLock [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/client/ut/unittest >> TLocksTest::Range_IncorrectDot2 [GOOD] Test command err: 2025-05-29T15:23:02.764298Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509888540735465823:2062];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:23:02.764328Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/00283d/r3tmp/tmpv1xrjk/pdisk_1.dat 2025-05-29T15:23:02.815975Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:23:02.816962Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [1:7509888540735465803:2079] 1748532182764158 != 1748532182764161 TClient is connected to server localhost:5638 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-05-29T15:23:02.866822Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:23:02.866850Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:23:02.867947Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-29T15:23:02.895322Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-05-29T15:23:02.902016Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:23:02.963770Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:23:02.973178Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:23:03.251535Z node 2 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7509888542711726611:2063];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:23:03.251574Z node 2 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/00283d/r3tmp/tmpuioKkI/pdisk_1.dat 2025-05-29T15:23:03.264559Z node 2 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:23:03.264895Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [2:7509888542711726590:2079] 1748532183251413 != 1748532183251416 TClient is connected to server localhost:14739 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-05-29T15:23:03.355808Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:23:03.355836Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:23:03.356171Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:23:03.356836Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected waiting... 2025-05-29T15:23:03.364135Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:23:03.379068Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:23:03.392688Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:23:03.713142Z node 3 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7509888545909704304:2062];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:23:03.713420Z node 3 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/00283d/r3tmp/tmpt1LB8I/pdisk_1.dat 2025-05-29T15:23:03.730450Z node 3 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:23:03.730763Z node 3 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [3:7509888545909704284:2079] 1748532183713007 != 1748532183713010 TClient is connected to server localhost:11719 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-05-29T15:23:03.817113Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:23:03.817134Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:23:03.817434Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:23:03.818119Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected waiting... waiting... 2025-05-29T15:23:03.826809Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 2025-05-29T15:23:03.841399Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:23:03.854267Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:23:04.106147Z node 4 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7509888548557531805:2063];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:23:04.106165Z node 4 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/00283d/r3tmp/tmpbWhgMr/pdisk_1.dat 2025-05-29T15:23:04.124748Z node 4 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:23:04.124923Z node 4 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [4:7509888548557531784:2079] 1748532184106047 != 1748532184106050 TClient is connected to server localhost:11448 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: " ... 037968897 Node(7, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:23:05.717521Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:23:05.718153Z node 7 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Connecting -> Connected waiting... 2025-05-29T15:23:05.723061Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:23:05.739713Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-05-29T15:23:05.751515Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 2025-05-29T15:23:06.025666Z node 8 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[8:7509888558867262538:2063];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:23:06.025696Z node 8 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/00283d/r3tmp/tmpS7mWG2/pdisk_1.dat 2025-05-29T15:23:06.036884Z node 8 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:23:06.037113Z node 8 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [8:7509888558867262517:2079] 1748532186025588 != 1748532186025591 TClient is connected to server localhost:25704 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-05-29T15:23:06.129243Z node 8 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:23:06.129269Z node 8 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:23:06.129466Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:23:06.130327Z node 8 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Connecting -> Connected waiting... 2025-05-29T15:23:06.135780Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:23:06.151063Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:23:06.164642Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:23:06.429221Z node 9 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[9:7509888558414775808:2066];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:23:06.429237Z node 9 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/00283d/r3tmp/tmpDx0dLT/pdisk_1.dat 2025-05-29T15:23:06.442061Z node 9 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:23:06.442219Z node 9 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [9:7509888558414775783:2079] 1748532186429079 != 1748532186429082 TClient is connected to server localhost:28517 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-05-29T15:23:06.533246Z node 9 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:23:06.533268Z node 9 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:23:06.533486Z node 9 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:23:06.534387Z node 9 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Connecting -> Connected waiting... 2025-05-29T15:23:06.541585Z node 9 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:23:06.556845Z node 9 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:23:06.570057Z node 9 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:23:06.903971Z node 10 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[10:7509888558807584833:2061];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:23:06.903985Z node 10 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/00283d/r3tmp/tmpSBV1MT/pdisk_1.dat 2025-05-29T15:23:06.913573Z node 10 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:23:06.913745Z node 10 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [10:7509888558807584807:2079] 1748532186903882 != 1748532186903885 TClient is connected to server localhost:25674 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-05-29T15:23:07.006868Z node 10 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:23:07.006893Z node 10 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:23:07.007132Z node 10 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:23:07.007914Z node 10 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Connecting -> Connected waiting... 2025-05-29T15:23:07.010302Z node 10 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:23:07.018518Z node 10 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:23:07.032158Z node 10 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::CreateWithoutPlanResolution [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:116:2058] recipient: [1:111:2141] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:116:2058] recipient: [1:111:2141] Leader for TabletID 72057594046678944 is [1:128:2152] sender: [1:132:2058] recipient: [1:111:2141] 2025-05-29T15:23:05.331590Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7488: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-05-29T15:23:05.331617Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7516: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:23:05.331623Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7402: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-05-29T15:23:05.331628Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7418: OperationsProcessing config: using default configuration 2025-05-29T15:23:05.331640Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-05-29T15:23:05.331644Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-05-29T15:23:05.331654Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7548: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:23:05.331669Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:39: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-05-29T15:23:05.331784Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7619: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-05-29T15:23:05.331854Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-05-29T15:23:05.344590Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7309: Cannot subscribe to console configs 2025-05-29T15:23:05.344612Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:23:05.348154Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-05-29T15:23:05.348278Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-05-29T15:23:05.348326Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-05-29T15:23:05.349950Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-05-29T15:23:05.350109Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:445: Clear TempDirsState with owners number: 0 2025-05-29T15:23:05.350204Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1348: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-05-29T15:23:05.350278Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:32: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-05-29T15:23:05.350778Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:157: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:23:05.350823Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:84: [RootDataErasureManager] Stop 2025-05-29T15:23:05.351109Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:23:05.351117Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:23:05.351135Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-05-29T15:23:05.351143Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-29T15:23:05.351149Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:202: TTxServerlessStorageBilling.Complete 2025-05-29T15:23:05.351184Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6671: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-05-29T15:23:05.352470Z node 1 :HIVE INFO: tablet_helpers.cpp:1130: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:128:2152] sender: [1:242:2058] recipient: [1:15:2062] 2025-05-29T15:23:05.366787Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:372: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-05-29T15:23:05.366875Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:23:05.366953Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-05-29T15:23:05.367000Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:130: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-05-29T15:23:05.367010Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:23:05.367939Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:451: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-05-29T15:23:05.367969Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-05-29T15:23:05.368025Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:23:05.368037Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:313: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-05-29T15:23:05.368042Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:367: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-05-29T15:23:05.368048Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 2 -> 3 2025-05-29T15:23:05.368466Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:23:05.368478Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-05-29T15:23:05.368485Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 3 -> 128 2025-05-29T15:23:05.368843Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:23:05.368855Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:23:05.368862Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:23:05.368870Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1650: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-05-29T15:23:05.369482Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1719: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-05-29T15:23:05.369889Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:652: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-05-29T15:23:05.369924Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1751: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-05-29T15:23:05.370071Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:676: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-05-29T15:23:05.370089Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:680: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 124 RawX2: 4294969445 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-05-29T15:23:05.370098Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:23:05.370139Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 128 -> 240 2025-05-29T15:23:05.370144Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:23:05.370169Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-05-29T15:23:05.370178Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:402: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-05-29T15:23:05.370532Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:23:05.370538Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-29T15:23:05.370577Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:23:05.370580Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:209:2210], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-05-29T15:23:05.370640Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:23:05.370645Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:482: [72057594046678944] TDone opId# 1:0 ProgressState 2025-05-29T15:23:05.370655Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:906: Part operation is done id#1:0 progress is 1/1 2025-05-29T15:23:05.370659Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-05-29T15:23:05.370663Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:906: Part operation is done id#1:0 progress is 1/1 2025-05-29T15:23:05.370665Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-05-29T15:23:05.370668Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1606: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-05-29T15:23:05.370672Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-05-29T15:23:05.370675Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:973: Operation and all the parts is done, operation id: 1:0 2025-05-29T15:23:05.370678Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5174: RemoveTx for txid 1:0 2025-05-29T15:23:05.370686Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-05-29T15:23:05.370691Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:982: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-05-29T15:23:05.370694Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:989: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-05-29T15:23:05.370929Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:5834: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-05-29T15:23:05.370941Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-05-29T15:23:05.370944Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2025-05-29T15:23:05.370948Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2025-05-29T15:23:05.370951Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-05-29T15:23:05.370960Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 1, subscribers: 0 2025-05-29T15:23:05.371461Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1 2025-05-29T15:23:05.371543Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 1, at schemeshard: 72057594046678944 TestModificationResults wait txId: 100 2025-05-29T15:23:05.371687Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:434: actor# [1:272:2262] Bootstrap 2025-05-29T15:23:05.373174Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:453: actor# [1:272:2262] Become StateWork (SchemeCache [1:277:2267]) 2025-05-29T15:23:05.373683Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:372: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpCreateSubDomain SubDomain { Coordinators: 1 Mediators: 1 Name: "USER_0" TimeCastBucketsPerMediator: 2 StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 100 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-05-29T15:23:05.373723Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_create_subdomain.cpp:92: TCreateSubDomain Propose, path: /MyRoot/USER_0, opId: 100:0, at schemeshard: 72057594046678944 2025-05-29T15:23:05.373734Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:130: IgniteOperation, opId: 100:1, propose status:StatusInvalidParameter, reason: Malformed subdomain request: plan resolution is 0, at schemeshard: 72057594046678944 2025-05-29T15:23:05.373850Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:213: actor# [1:272:2262] HANDLE TEvClientConnected success connect from tablet# 72057594046447617 2025-05-29T15:23:05.374457Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:451: TTxOperationPropose Complete, txId: 100, response: Status: StatusInvalidParameter Reason: "Malformed subdomain request: plan resolution is 0" TxId: 100 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2025-05-29T15:23:05.374492Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 100, database: /MyRoot, subject: , status: StatusInvalidParameter, reason: Malformed subdomain request: plan resolution is 0, operation: CREATE DATABASE, path: /MyRoot/USER_0 2025-05-29T15:23:05.374587Z node 1 :TX_PROXY DEBUG: client.cpp:89: Handle TEvAllocateResult ACCEPTED RangeBegin# 281474976715656 RangeEnd# 281474976720656 txAllocator# 72057594046447617 TestModificationResult got TxId: 100, wait until txId: 100 TestWaitNotification wait txId: 100 2025-05-29T15:23:05.374627Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:210: tests -- TTxNotificationSubscriber for txId 100: send EvNotifyTxCompletion 2025-05-29T15:23:05.374641Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:256: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 100 2025-05-29T15:23:05.374690Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 100, at schemeshard: 72057594046678944 2025-05-29T15:23:05.374710Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:227: tests -- TTxNotificationSubscriber for txId 100: got EvNotifyTxCompletionResult 2025-05-29T15:23:05.374715Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:236: tests -- TTxNotificationSubscriber for txId 100: satisfy waiter [1:287:2277] TestWaitNotification: OK eventTxId 100 2025-05-29T15:23:05.374815Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-05-29T15:23:05.374838Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/USER_0" took 28us result status StatusPathDoesNotExist 2025-05-29T15:23:05.374870Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/USER_0\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1]), source_location: ydb/core/tx/schemeshard/schemeshard_path_describer.cpp:1157" Path: "/MyRoot/USER_0" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: false } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::SchemeLimitsRejects [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:116:2058] recipient: [1:111:2141] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:116:2058] recipient: [1:111:2141] Leader for TabletID 72057594046678944 is [1:128:2152] sender: [1:132:2058] recipient: [1:111:2141] 2025-05-29T15:23:05.407284Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7488: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-05-29T15:23:05.407311Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7516: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:23:05.407316Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7402: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-05-29T15:23:05.407321Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7418: OperationsProcessing config: using default configuration 2025-05-29T15:23:05.407332Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-05-29T15:23:05.407336Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-05-29T15:23:05.407345Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7548: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:23:05.407359Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:39: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-05-29T15:23:05.407457Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7619: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-05-29T15:23:05.407525Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-05-29T15:23:05.418384Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7309: Cannot subscribe to console configs 2025-05-29T15:23:05.418408Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:23:05.421149Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-05-29T15:23:05.421280Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-05-29T15:23:05.421355Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-05-29T15:23:05.424129Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-05-29T15:23:05.424339Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:445: Clear TempDirsState with owners number: 0 2025-05-29T15:23:05.424451Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1348: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-05-29T15:23:05.424531Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:32: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-05-29T15:23:05.425041Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:157: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:23:05.425082Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:84: [RootDataErasureManager] Stop 2025-05-29T15:23:05.425327Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:23:05.425336Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:23:05.425363Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-05-29T15:23:05.425369Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-29T15:23:05.425373Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:202: TTxServerlessStorageBilling.Complete 2025-05-29T15:23:05.425399Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6671: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-05-29T15:23:05.426429Z node 1 :HIVE INFO: tablet_helpers.cpp:1130: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:128:2152] sender: [1:242:2058] recipient: [1:15:2062] 2025-05-29T15:23:05.441119Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:372: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-05-29T15:23:05.441191Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:23:05.441260Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-05-29T15:23:05.441311Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:130: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-05-29T15:23:05.441322Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:23:05.442156Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:451: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-05-29T15:23:05.442180Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-05-29T15:23:05.442223Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:23:05.442231Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:313: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-05-29T15:23:05.442235Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:367: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-05-29T15:23:05.442239Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 2 -> 3 2025-05-29T15:23:05.442589Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:23:05.442597Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-05-29T15:23:05.442600Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 3 -> 128 2025-05-29T15:23:05.442936Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:23:05.442949Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:23:05.442955Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:23:05.442962Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1650: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-05-29T15:23:05.443365Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1719: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-05-29T15:23:05.443712Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:652: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-05-29T15:23:05.443742Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1751: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-05-29T15:23:05.443885Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:676: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-05-29T15:23:05.443902Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:680: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 124 RawX2: 4294969445 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-05-29T15:23:05.443907Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:23:05.443951Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 128 -> 240 2025-05-29T15:23:05.443957Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:23:05.443983Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-05-29T15:23:05.443992Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:402: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-05-29T15:23:05.444349Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:23:05.444355Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-29T15:23:05.444390Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29 ... 14Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:46: Free shard 72057594046678944:1 hive 72057594037968897 at ss 72057594046678944 2025-05-29T15:23:06.759720Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:46: Free shard 72057594046678944:15 hive 72057594037968897 at ss 72057594046678944 2025-05-29T15:23:06.759723Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:46: Free shard 72057594046678944:14 hive 72057594037968897 at ss 72057594046678944 2025-05-29T15:23:06.759726Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:46: Free shard 72057594046678944:2 hive 72057594037968897 at ss 72057594046678944 2025-05-29T15:23:06.759729Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:46: Free shard 72057594046678944:16 hive 72057594037968897 at ss 72057594046678944 2025-05-29T15:23:06.760219Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 139 2025-05-29T15:23:06.760496Z node 1 :HIVE INFO: tablet_helpers.cpp:1356: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 1 TxId_Deprecated: 1 TabletID: 72075186233409546 2025-05-29T15:23:06.760723Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5938: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 1 ShardOwnerId: 72057594046678944 ShardLocalIdx: 1, at schemeshard: 72057594046678944 2025-05-29T15:23:06.760798Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 Forgetting tablet 72075186233409546 2025-05-29T15:23:06.760951Z node 1 :HIVE INFO: tablet_helpers.cpp:1356: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 15 TxId_Deprecated: 15 TabletID: 72075186233409556 2025-05-29T15:23:06.761106Z node 1 :HIVE INFO: tablet_helpers.cpp:1356: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 14 TxId_Deprecated: 14 TabletID: 72075186233409555 2025-05-29T15:23:06.761349Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5938: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 15 ShardOwnerId: 72057594046678944 ShardLocalIdx: 15, at schemeshard: 72057594046678944 2025-05-29T15:23:06.761387Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 16] was 3 Forgetting tablet 72075186233409556 2025-05-29T15:23:06.761685Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5938: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 14 ShardOwnerId: 72057594046678944 ShardLocalIdx: 14, at schemeshard: 72057594046678944 2025-05-29T15:23:06.761716Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 16] was 2 2025-05-29T15:23:06.761743Z node 1 :HIVE INFO: tablet_helpers.cpp:1356: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 2 TxId_Deprecated: 2 TabletID: 72075186233409547 Forgetting tablet 72075186233409555 2025-05-29T15:23:06.761895Z node 1 :HIVE INFO: tablet_helpers.cpp:1356: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 16 TxId_Deprecated: 16 TabletID: 72075186233409557 Forgetting tablet 72075186233409547 2025-05-29T15:23:06.762411Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5938: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 2 ShardOwnerId: 72057594046678944 ShardLocalIdx: 2, at schemeshard: 72057594046678944 2025-05-29T15:23:06.762454Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 Forgetting tablet 72075186233409557 2025-05-29T15:23:06.762623Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5938: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 16 ShardOwnerId: 72057594046678944 ShardLocalIdx: 16, at schemeshard: 72057594046678944 2025-05-29T15:23:06.762653Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 16] was 1 2025-05-29T15:23:06.762949Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 139 2025-05-29T15:23:06.762995Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 139 2025-05-29T15:23:06.763025Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:123: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-05-29T15:23:06.763029Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:137: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 16], at schemeshard: 72057594046678944 2025-05-29T15:23:06.763040Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2025-05-29T15:23:06.763063Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:205: TTxCleanDroppedSubDomains Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-05-29T15:23:06.763070Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:224: TTxCleanDroppedPaths: PersistRemoveSubDomain for PathId# [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2025-05-29T15:23:06.763089Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 2025-05-29T15:23:06.763181Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:174: Deleted shardIdx 72057594046678944:1 2025-05-29T15:23:06.763190Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:180: Close pipe to deleted shardIdx 72057594046678944:1 tabletId 72075186233409546 2025-05-29T15:23:06.763207Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:174: Deleted shardIdx 72057594046678944:15 2025-05-29T15:23:06.763211Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:180: Close pipe to deleted shardIdx 72057594046678944:15 tabletId 72075186233409556 2025-05-29T15:23:06.763983Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:174: Deleted shardIdx 72057594046678944:14 2025-05-29T15:23:06.764008Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:180: Close pipe to deleted shardIdx 72057594046678944:14 tabletId 72075186233409555 2025-05-29T15:23:06.764027Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:174: Deleted shardIdx 72057594046678944:2 2025-05-29T15:23:06.764030Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:180: Close pipe to deleted shardIdx 72057594046678944:2 tabletId 72075186233409547 2025-05-29T15:23:06.764062Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:174: Deleted shardIdx 72057594046678944:16 2025-05-29T15:23:06.764067Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:180: Close pipe to deleted shardIdx 72057594046678944:16 tabletId 72075186233409557 2025-05-29T15:23:06.764100Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:160: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 1 candidates, at schemeshard: 72057594046678944 2025-05-29T15:23:06.764119Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:245: TTxCleanDroppedSubDomains Complete, done PersistRemoveSubDomain for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 2025-05-29T15:23:06.764128Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:123: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-05-29T15:23:06.764132Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:137: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2025-05-29T15:23:06.764146Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-05-29T15:23:06.764561Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:160: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestModificationResult got TxId: 139, wait until txId: 139 TestWaitNotification wait txId: 139 2025-05-29T15:23:06.764768Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:210: tests -- TTxNotificationSubscriber for txId 139: send EvNotifyTxCompletion 2025-05-29T15:23:06.764774Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:256: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 139 2025-05-29T15:23:06.764884Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 139, at schemeshard: 72057594046678944 2025-05-29T15:23:06.764906Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:227: tests -- TTxNotificationSubscriber for txId 139: got EvNotifyTxCompletionResult 2025-05-29T15:23:06.764910Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:236: tests -- TTxNotificationSubscriber for txId 139: satisfy waiter [1:2273:4043] TestWaitNotification: OK eventTxId 139 2025-05-29T15:23:06.765062Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-05-29T15:23:06.765098Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot" took 50us result status StatusSuccess 2025-05-29T15:23:06.765165Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot" PathDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 7 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 7 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 5 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 0 PathsLimit: 5 ShardsInside: 0 ShardsLimit: 6 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 20 SecurityState { Audience: "/MyRoot" } } } PathId: 1 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/metadata/secret/ut/unittest >> Secret::DeactivatedQueryService [GOOD] Test command err: 2025-05-29T15:22:55.418342Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:323:2366], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/001705/r3tmp/tmpp0Llcv/pdisk_1.dat TServer::EnableGrpc on GrpcPort 27719, node 1 TClient is connected to server localhost:14420 2025-05-29T15:22:55.542345Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-05-29T15:22:55.558470Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:22:55.558501Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-05-29T15:22:55.558507Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-05-29T15:22:55.558629Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:22:55.559686Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-05-29T15:22:55.559769Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [1:32:2079] 1748532175005665 != 1748532175005669 2025-05-29T15:22:55.601575Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:22:55.601605Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:22:55.612073Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected Initialization finished REQUEST=CREATE OBJECT secret1 (TYPE SECRET) WITH value = `100`;EXPECTATION=0;WAITING=1 2025-05-29T15:23:07.204176Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:678:2570], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:23:07.204227Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:686:2575], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:23:07.204241Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:23:07.205404Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715657:3, at schemeshard: 72057594046644480 2025-05-29T15:23:07.208982Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:692:2578], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715657 completed, doublechecking } 2025-05-29T15:23:07.240120Z node 1 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [1:743:2610] txid# 281474976715658, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-29T15:23:07.268119Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [1:753:2619], status: GENERIC_ERROR, issues:
: Error: Execution, code: 1060
:1:50: Error: Executing CREATE OBJECT SECRET
: Error: metadata provider service is disabled 2025-05-29T15:23:07.268823Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=1&id=YzJkYzBlYTItM2EwMzFiNTMtOTExYWYwMzMtZGM1MDQwMWQ=, ActorId: [1:676:2568], ActorState: ExecuteState, TraceId: 01jwea7m221ssdsma41k0jqzyj, ReplyQueryCompileError, status GENERIC_ERROR remove tx with tx_id: REQUEST=CREATE OBJECT secret1 (TYPE SECRET) WITH value = `100`;RESULT=
: Error: Execution, code: 1060
:1:50: Error: Executing CREATE OBJECT SECRET
: Error: metadata provider service is disabled ;EXPECTATION=0 FINISHED_REQUEST=CREATE OBJECT secret1 (TYPE SECRET) WITH value = `100`;EXPECTATION=0;WAITING=1 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::Restart [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:116:2058] recipient: [1:111:2141] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:116:2058] recipient: [1:111:2141] Leader for TabletID 72057594046678944 is [1:128:2152] sender: [1:132:2058] recipient: [1:111:2141] 2025-05-29T15:23:05.808706Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7488: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-05-29T15:23:05.808739Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7516: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:23:05.808745Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7402: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-05-29T15:23:05.808751Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7418: OperationsProcessing config: using default configuration 2025-05-29T15:23:05.808764Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-05-29T15:23:05.808768Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-05-29T15:23:05.808779Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7548: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:23:05.808794Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:39: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-05-29T15:23:05.808908Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7619: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-05-29T15:23:05.808981Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-05-29T15:23:05.819736Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7309: Cannot subscribe to console configs 2025-05-29T15:23:05.819758Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:23:05.821804Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-05-29T15:23:05.821897Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-05-29T15:23:05.821941Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-05-29T15:23:05.823795Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-05-29T15:23:05.823970Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:445: Clear TempDirsState with owners number: 0 2025-05-29T15:23:05.824088Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1348: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-05-29T15:23:05.824172Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:32: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-05-29T15:23:05.824721Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:157: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:23:05.824786Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:84: [RootDataErasureManager] Stop 2025-05-29T15:23:05.825060Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:23:05.825070Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:23:05.825088Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-05-29T15:23:05.825094Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-29T15:23:05.825099Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:202: TTxServerlessStorageBilling.Complete 2025-05-29T15:23:05.825128Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6671: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-05-29T15:23:05.826278Z node 1 :HIVE INFO: tablet_helpers.cpp:1130: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:128:2152] sender: [1:242:2058] recipient: [1:15:2062] 2025-05-29T15:23:05.840118Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:372: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-05-29T15:23:05.840190Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:23:05.840262Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-05-29T15:23:05.840303Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:130: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-05-29T15:23:05.840312Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:23:05.840936Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:451: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-05-29T15:23:05.840964Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-05-29T15:23:05.841006Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:23:05.841016Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:313: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-05-29T15:23:05.841022Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:367: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-05-29T15:23:05.841028Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 2 -> 3 2025-05-29T15:23:05.841365Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:23:05.841375Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-05-29T15:23:05.841380Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 3 -> 128 2025-05-29T15:23:05.841678Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:23:05.841688Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:23:05.841692Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:23:05.841698Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1650: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-05-29T15:23:05.842160Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1719: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-05-29T15:23:05.842430Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:652: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-05-29T15:23:05.842459Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1751: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-05-29T15:23:05.842592Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:676: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-05-29T15:23:05.842609Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:680: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 124 RawX2: 4294969445 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-05-29T15:23:05.842614Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:23:05.842652Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 128 -> 240 2025-05-29T15:23:05.842656Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:23:05.842678Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-05-29T15:23:05.842687Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:402: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-05-29T15:23:05.843013Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:23:05.843019Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-29T15:23:05.843043Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29 ... HEMESHARD NOTICE: schemeshard_impl.cpp:7619: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-05-29T15:23:05.865152Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-05-29T15:23:05.865950Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-05-29T15:23:05.866158Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-05-29T15:23:05.866192Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-05-29T15:23:05.866218Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7309: Cannot subscribe to console configs 2025-05-29T15:23:05.866224Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:23:05.866247Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:445: Clear TempDirsState with owners number: 0 2025-05-29T15:23:05.866303Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit for Paths, read records: 2, at schemeshard: 72057594046678944 2025-05-29T15:23:05.866315Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:319: AttachChild: child attached as only one child to the parent, parent id: [OwnerId: 72057594046678944, LocalPathId: 1], parent name: MyRoot, child name: USER_0, child id: [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2025-05-29T15:23:05.866322Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1457: TTxInit for UserAttributes, read records: 0, at schemeshard: 72057594046678944 2025-05-29T15:23:05.866328Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1483: TTxInit for UserAttributesAlterData, read records: 0, at schemeshard: 72057594046678944 2025-05-29T15:23:05.866355Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 0 2025-05-29T15:23:05.866384Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1785: TTxInit for Tables, read records: 0, at schemeshard: 72057594046678944 2025-05-29T15:23:05.866392Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__root_data_erasure_manager.cpp:452: [RootDataErasureManager] Restore: Generation# 0, Status# 0, WakeupInterval# 604800 s, NumberDataErasureTenantsInRunning# 0 2025-05-29T15:23:05.866409Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:2033: TTxInit for Columns, read records: 0, at schemeshard: 72057594046678944 2025-05-29T15:23:05.866416Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:2093: TTxInit for ColumnsAlters, read records: 0, at schemeshard: 72057594046678944 2025-05-29T15:23:05.866425Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:2151: TTxInit for Shards, read records: 3, at schemeshard: 72057594046678944 2025-05-29T15:23:05.866429Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 2025-05-29T15:23:05.866431Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2025-05-29T15:23:05.866433Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2025-05-29T15:23:05.866445Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:2237: TTxInit for TablePartitions, read records: 0, at schemeshard: 72057594046678944 2025-05-29T15:23:05.866455Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:2303: TTxInit for TableShardPartitionConfigs, read records: 0, at schemeshard: 72057594046678944 2025-05-29T15:23:05.866476Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:2453: TTxInit for ChannelsBinding, read records: 9, at schemeshard: 72057594046678944 2025-05-29T15:23:05.866508Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:2832: TTxInit for TableIndexes, read records: 0, at schemeshard: 72057594046678944 2025-05-29T15:23:05.866515Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:2911: TTxInit for TableIndexKeys, read records: 0, at schemeshard: 72057594046678944 2025-05-29T15:23:05.866545Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:3409: TTxInit for KesusInfos, read records: 0, at schemeshard: 72057594046678944 2025-05-29T15:23:05.866550Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:3445: TTxInit for KesusAlters, read records: 0, at schemeshard: 72057594046678944 2025-05-29T15:23:05.866570Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:3659: TTxInit for TxShards, read records: 0, at schemeshard: 72057594046678944 2025-05-29T15:23:05.866576Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:3804: TTxInit for ShardToDelete, read records: 0, at schemeshard: 72057594046678944 2025-05-29T15:23:05.866583Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:3821: TTxInit for BackupSettings, read records: 0, at schemeshard: 72057594046678944 2025-05-29T15:23:05.866599Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:3981: TTxInit for ShardBackupStatus, read records: 0, at schemeshard: 72057594046678944 2025-05-29T15:23:05.866611Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:3997: TTxInit for CompletedBackup, read records: 0, at schemeshard: 72057594046678944 2025-05-29T15:23:05.866628Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4282: TTxInit for Publications, read records: 0, at schemeshard: 72057594046678944 2025-05-29T15:23:05.866647Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4587: IndexBuild , records: 0, at schemeshard: 72057594046678944 2025-05-29T15:23:05.866652Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4645: KMeansTreeSample records: 0, at schemeshard: 72057594046678944 2025-05-29T15:23:05.866661Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4740: SnapshotTables: snapshots: 0 tables: 0, at schemeshard: 72057594046678944 2025-05-29T15:23:05.866665Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4767: SnapshotSteps: snapshots: 0, at schemeshard: 72057594046678944 2025-05-29T15:23:05.866670Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4794: LongLocks: records: 0, at schemeshard: 72057594046678944 2025-05-29T15:23:05.867841Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:84: [RootDataErasureManager] Stop 2025-05-29T15:23:05.868546Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:23:05.868562Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:23:05.868618Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-05-29T15:23:05.868626Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-29T15:23:05.868631Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:202: TTxServerlessStorageBilling.Complete 2025-05-29T15:23:05.869025Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6671: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594046678944 is [1:469:2418] sender: [1:530:2058] recipient: [1:15:2062] 2025-05-29T15:23:05.932146Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-05-29T15:23:05.932215Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/USER_0" took 92us result status StatusSuccess 2025-05-29T15:23:05.932293Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/USER_0" PathDescription { Self { Name: "USER_0" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 100 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 1 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72075186233409546 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409547 Mediators: 72075186233409548 } DomainKey { SchemeShard: 72057594046678944 PathId: 2 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 3 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 2 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { } } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-05-29T15:23:05.932347Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-05-29T15:23:05.932358Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot" took 12us result status StatusSuccess 2025-05-29T15:23:05.932396Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot" PathDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 3 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: "USER_0" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 100 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 1 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/MyRoot" } } } PathId: 1 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/persqueue/ut/unittest >> TPQTest::TestOffsetEstimation [GOOD] Test command err: Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:104:2057] recipient: [1:102:2135] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:104:2057] recipient: [1:102:2135] Leader for TabletID 72057594037927937 is [1:108:2139] sender: [1:109:2057] recipient: [1:102:2135] 2025-05-29T15:22:13.603177Z node 1 :PERSQUEUE NOTICE: pq_impl.cpp:1099: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-05-29T15:22:13.603208Z node 1 :PERSQUEUE INFO: pq_impl.cpp:800: [PQ: 72057594037927937] doesn't have tx writes info Leader for TabletID 72057594037927938 is [0:0:0] sender: [1:150:2057] recipient: [1:148:2170] IGNORE Leader for TabletID 72057594037927938 is [0:0:0] sender: [1:150:2057] recipient: [1:148:2170] Leader for TabletID 72057594037927938 is [1:154:2174] sender: [1:155:2057] recipient: [1:148:2170] Leader for TabletID 72057594037927937 is [1:108:2139] sender: [1:180:2057] recipient: [1:14:2061] 2025-05-29T15:22:13.607084Z node 1 :PERSQUEUE NOTICE: pq_impl.cpp:1099: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-05-29T15:22:13.608881Z node 1 :PERSQUEUE INFO: pq_impl.cpp:1487: [PQ: 72057594037927937] Config applied version 1 actor [1:178:2192] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 ImportantClientId: "aaa" LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 PartitionIds: 1 TopicName: "rt3.dc1--asdfgs--topic" Version: 1 LocalDC: true Topic: "topic" TopicPath: "/Root/PQ/rt3.dc1--asdfgs--topic" Partitions { PartitionId: 0 } Partitions { PartitionId: 1 } ReadRuleGenerations: 1 ReadRuleGenerations: 1 MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } AllPartitions { PartitionId: 1 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 1 Important: false } Consumers { Name: "aaa" Generation: 1 Important: true } 2025-05-29T15:22:13.609033Z node 1 :PERSQUEUE INFO: partition_init.cpp:880: [PQ: 72057594037927937, Partition: 0, State: StateInit] bootstrapping 0 [1:186:2198] 2025-05-29T15:22:13.609461Z node 1 :PERSQUEUE INFO: partition.cpp:560: [PQ: 72057594037927937, Partition: 0, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 0 generation 2 [1:186:2198] 2025-05-29T15:22:13.610052Z node 1 :PERSQUEUE INFO: partition_init.cpp:880: [PQ: 72057594037927937, Partition: 1, State: StateInit] bootstrapping 1 [1:187:2199] 2025-05-29T15:22:13.610577Z node 1 :PERSQUEUE INFO: partition.cpp:560: [PQ: 72057594037927937, Partition: 1, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 1 generation 2 [1:187:2199] 2025-05-29T15:22:13.613418Z node 1 :PERSQUEUE INFO: ownerinfo.cpp:30: new Cookie default|314d5650-151b1b66-4138c439-dfc2d0d1_0 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default Send read request: PartitionRequest { Partition: 0 CmdRead { ClientId: "user" SessionId: "" Offset: 0 Count: 1 Bytes: 1 } Cookie: 123 } via pipe: [1:178:2192] Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:104:2057] recipient: [2:102:2135] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:104:2057] recipient: [2:102:2135] Leader for TabletID 72057594037927937 is [2:108:2139] sender: [2:109:2057] recipient: [2:102:2135] 2025-05-29T15:22:13.856401Z node 2 :PERSQUEUE NOTICE: pq_impl.cpp:1099: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-05-29T15:22:13.856432Z node 2 :PERSQUEUE INFO: pq_impl.cpp:800: [PQ: 72057594037927937] doesn't have tx writes info Leader for TabletID 72057594037927938 is [0:0:0] sender: [2:150:2057] recipient: [2:148:2170] IGNORE Leader for TabletID 72057594037927938 is [0:0:0] sender: [2:150:2057] recipient: [2:148:2170] Leader for TabletID 72057594037927938 is [2:154:2174] sender: [2:155:2057] recipient: [2:148:2170] Leader for TabletID 72057594037927937 is [2:108:2139] sender: [2:178:2057] recipient: [2:14:2061] !Reboot 72057594037927937 (actor [2:108:2139]) on event NKikimr::TEvPersQueue::TEvUpdateConfigBuilder ! Leader for TabletID 72057594037927937 is [2:108:2139] sender: [2:180:2057] recipient: [2:100:2134] Leader for TabletID 72057594037927937 is [2:108:2139] sender: [2:183:2057] recipient: [2:14:2061] Leader for TabletID 72057594037927937 is [2:108:2139] sender: [2:184:2057] recipient: [2:182:2193] Leader for TabletID 72057594037927937 is [2:185:2194] sender: [2:186:2057] recipient: [2:182:2193] 2025-05-29T15:22:13.871556Z node 2 :PERSQUEUE NOTICE: pq_impl.cpp:1099: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-05-29T15:22:13.871585Z node 2 :PERSQUEUE INFO: pq_impl.cpp:800: [PQ: 72057594037927937] doesn't have tx writes info !Reboot 72057594037927937 (actor [2:108:2139]) rebooted! !Reboot 72057594037927937 (actor [2:108:2139]) tablet resolver refreshed! new actor is[2:185:2194] Leader for TabletID 72057594037927937 is [2:185:2194] sender: [2:265:2057] recipient: [2:14:2061] 2025-05-29T15:22:15.415497Z node 2 :PERSQUEUE NOTICE: pq_impl.cpp:1099: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-05-29T15:22:15.415685Z node 2 :PERSQUEUE INFO: pq_impl.cpp:1487: [PQ: 72057594037927937] Config applied version 2 actor [2:176:2190] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 ImportantClientId: "aaa" LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 PartitionIds: 1 TopicName: "rt3.dc1--asdfgs--topic" Version: 2 LocalDC: true Topic: "topic" TopicPath: "/Root/PQ/rt3.dc1--asdfgs--topic" Partitions { PartitionId: 0 } Partitions { PartitionId: 1 } ReadRuleGenerations: 2 ReadRuleGenerations: 2 MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } AllPartitions { PartitionId: 1 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 2 Important: false } Consumers { Name: "aaa" Generation: 2 Important: true } 2025-05-29T15:22:15.415831Z node 2 :PERSQUEUE INFO: partition_init.cpp:880: [PQ: 72057594037927937, Partition: 0, State: StateInit] bootstrapping 0 [2:271:2256] 2025-05-29T15:22:15.416420Z node 2 :PERSQUEUE INFO: partition.cpp:560: [PQ: 72057594037927937, Partition: 0, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 0 generation 3 [2:271:2256] 2025-05-29T15:22:15.417030Z node 2 :PERSQUEUE INFO: partition_init.cpp:880: [PQ: 72057594037927937, Partition: 1, State: StateInit] bootstrapping 1 [2:272:2257] 2025-05-29T15:22:15.417503Z node 2 :PERSQUEUE INFO: partition.cpp:560: [PQ: 72057594037927937, Partition: 1, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 1 generation 3 [2:272:2257] 2025-05-29T15:22:15.420061Z node 2 :PERSQUEUE INFO: ownerinfo.cpp:30: new Cookie default|13931fb0-d0938be1-fb04194-42f0f241_0 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default Send read request: PartitionRequest { Partition: 0 CmdRead { ClientId: "user" SessionId: "" Offset: 0 Count: 1 Bytes: 1 } Cookie: 123 } via pipe: [2:176:2190] Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:104:2057] recipient: [3:102:2135] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:104:2057] recipient: [3:102:2135] Leader for TabletID 72057594037927937 is [3:108:2139] sender: [3:109:2057] recipient: [3:102:2135] 2025-05-29T15:22:15.493421Z node 3 :PERSQUEUE NOTICE: pq_impl.cpp:1099: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-05-29T15:22:15.493446Z node 3 :PERSQUEUE INFO: pq_impl.cpp:800: [PQ: 72057594037927937] doesn't have tx writes info Leader for TabletID 72057594037927938 is [0:0:0] sender: [3:150:2057] recipient: [3:148:2170] IGNORE Leader for TabletID 72057594037927938 is [0:0:0] sender: [3:150:2057] recipient: [3:148:2170] Leader for TabletID 72057594037927938 is [3:154:2174] sender: [3:155:2057] recipient: [3:148:2170] Leader for TabletID 72057594037927937 is [3:108:2139] sender: [3:180:2057] recipient: [3:14:2061] !Reboot 72057594037927937 (actor [3:108:2139]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [3:108:2139] sender: [3:182:2057] recipient: [3:100:2134] Leader for TabletID 72057594037927937 is [3:108:2139] sender: [3:185:2057] recipient: [3:14:2061] Leader for TabletID 72057594037927937 is [3:108:2139] sender: [3:186:2057] recipient: [3:184:2195] Leader for TabletID 72057594037927937 is [3:187:2196] sender: [3:188:2057] recipient: [3:184:2195] 2025-05-29T15:22:15.505780Z node 3 :PERSQUEUE NOTICE: pq_impl.cpp:1099: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-05-29T15:22:15.505801Z node 3 :PERSQUEUE INFO: pq_impl.cpp:800: [PQ: 72057594037927937] doesn't have tx writes info !Reboot 72057594037927937 (actor [3:108:2139]) rebooted! !Reboot 72057594037927937 (actor [3:108:2139]) tablet resolver refreshed! new actor is[3:187:2196] Leader for TabletID 72057594037927937 is [3:187:2196] sender: [3:267:2057] recipient: [3:14:2061] 2025-05-29T15:22:17.052866Z node 3 :PERSQUEUE NOTICE: pq_impl.cpp:1099: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-05-29T15:22:17.053073Z node 3 :PERSQUEUE INFO: pq_impl.cpp:1487: [PQ: 72057594037927937] Config applied version 3 actor [3:178:2192] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 ImportantClientId: "aaa" LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 PartitionIds: 1 TopicName: "rt3.dc1--asdfgs--topic" Version: 3 LocalDC: true Topic: "topic" TopicPath: "/Root/PQ/rt3.dc1--asdfgs--topic" Partitions { PartitionId: 0 } Partitions { PartitionId: 1 } ReadRuleGenerations: 3 ReadRuleGenerations: 3 MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } AllPartitions { PartitionId: 1 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 3 Important: false } Consumers { Name: "aaa" Generation: 3 Important: true } 2025-05-29T15:22:17.053212Z node 3 :PERSQUEUE INFO: partition_init.cpp:880: [PQ: 72057594037927937, Partition: 0, State: StateInit] bootstrapping 0 [3:273:2258] 2025-05-29T15:22:17.053685Z node 3 :PERSQUEUE INFO: partition.cpp:560: [PQ: 72057594037927937, Partition: 0, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 0 generation 3 [3:273:2258] 2025-05-29T15:22:17.054105Z node 3 :PERSQUEUE INFO: partition_init.cpp:880: [PQ: 72057594037927937, Partition: 1, State: StateInit] bootstrapping 1 [3:274:2259] 2025-05-29T15:22:17.054386Z node 3 :PERSQUEUE INFO: partition.cpp:560: [PQ: 72057594037927937, Partition: 1, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 1 generation 3 [3:274:2259] 2025-05-29T15:22:17.056889Z node 3 :PERSQUEUE INFO: ownerinfo.cpp:30: new Cookie default|e3f0954f-fb6d7e2e-a3701b4a-b9c3395b_0 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default Send read request: PartitionRequest { Partition: 0 CmdRead { ClientId: "user" SessionId: "" Offset: 0 Count: 1 Bytes: 1 } Cookie: 123 } via pipe: [3:178:2192] Leader for TabletID 72057594037927937 is [0:0:0] sender: [4:104:2057] recipient: [4:102:2135] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [4:104:2057] recipient: [4:102:2135] Leader for TabletID 72057594037927937 is [4:108:2139] sender: [4:109:2057] recipient: [4:102: ... on 0 generation 7 [77:657:2549] 2025-05-29T15:23:07.715127Z node 77 :PERSQUEUE WARN: pq_l2_cache.cpp:94: PQ Cache (L2). Same blob insertion. Tablet '72057594037927937' partition 0 offset 0 partno 0 count 7 parts 16 size 8364507 Send read request: PartitionRequest { Partition: 0 CmdRead { ClientId: "user" SessionId: "" Offset: 0 Count: 1 Bytes: 104857600 } Cookie: 123 } via pipe: [77:178:2192] Leader for TabletID 72057594037927937 is [77:600:2500] sender: [77:688:2057] recipient: [77:14:2061] Leader for TabletID 72057594037927937 is [77:600:2500] sender: [77:691:2057] recipient: [77:100:2134] Leader for TabletID 72057594037927937 is [77:600:2500] sender: [77:694:2057] recipient: [77:14:2061] Leader for TabletID 72057594037927937 is [77:600:2500] sender: [77:695:2057] recipient: [77:693:2570] Leader for TabletID 72057594037927937 is [77:696:2571] sender: [77:697:2057] recipient: [77:693:2570] 2025-05-29T15:23:07.722049Z node 77 :PERSQUEUE NOTICE: pq_impl.cpp:1099: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-05-29T15:23:07.722067Z node 77 :PERSQUEUE INFO: pq_impl.cpp:800: [PQ: 72057594037927937] doesn't have tx writes info 2025-05-29T15:23:07.722147Z node 77 :PERSQUEUE INFO: partition_init.cpp:880: [PQ: 72057594037927937, Partition: 0, State: StateInit] bootstrapping 0 [77:755:2622] 2025-05-29T15:23:07.722510Z node 77 :PERSQUEUE INFO: partition_init.cpp:880: [PQ: 72057594037927937, Partition: 1, State: StateInit] bootstrapping 1 [77:756:2623] 2025-05-29T15:23:07.723699Z node 77 :PERSQUEUE INFO: partition_init.cpp:774: [rt3.dc1--asdfgs--topic:1:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp skipped because already initialized. 2025-05-29T15:23:07.723709Z node 77 :PERSQUEUE INFO: partition.cpp:560: [PQ: 72057594037927937, Partition: 1, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 1 generation 8 [77:756:2623] 2025-05-29T15:23:07.726089Z node 77 :PERSQUEUE INFO: partition_init.cpp:774: [rt3.dc1--asdfgs--topic:0:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp skipped because already initialized. 2025-05-29T15:23:07.726106Z node 77 :PERSQUEUE INFO: partition.cpp:560: [PQ: 72057594037927937, Partition: 0, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 0 generation 8 [77:755:2622] 2025-05-29T15:23:07.730427Z node 77 :PERSQUEUE WARN: pq_l2_cache.cpp:94: PQ Cache (L2). Same blob insertion. Tablet '72057594037927937' partition 0 offset 0 partno 0 count 7 parts 16 size 8364507 Send read request: PartitionRequest { Partition: 0 CmdRead { ClientId: "user" SessionId: "" Offset: 0 Count: 1 Bytes: 104857600 } Cookie: 123 } via pipe: [77:178:2192] Leader for TabletID 72057594037927937 is [77:696:2571] sender: [77:786:2057] recipient: [77:14:2061] Leader for TabletID 72057594037927937 is [77:696:2571] sender: [77:789:2057] recipient: [77:100:2134] Leader for TabletID 72057594037927937 is [77:696:2571] sender: [77:791:2057] recipient: [77:14:2061] Leader for TabletID 72057594037927937 is [77:696:2571] sender: [77:793:2057] recipient: [77:792:2643] Leader for TabletID 72057594037927937 is [77:794:2644] sender: [77:795:2057] recipient: [77:792:2643] 2025-05-29T15:23:07.737149Z node 77 :PERSQUEUE NOTICE: pq_impl.cpp:1099: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-05-29T15:23:07.737168Z node 77 :PERSQUEUE INFO: pq_impl.cpp:800: [PQ: 72057594037927937] doesn't have tx writes info 2025-05-29T15:23:07.737241Z node 77 :PERSQUEUE INFO: partition_init.cpp:880: [PQ: 72057594037927937, Partition: 0, State: StateInit] bootstrapping 0 [77:855:2697] 2025-05-29T15:23:07.737696Z node 77 :PERSQUEUE INFO: partition_init.cpp:880: [PQ: 72057594037927937, Partition: 1, State: StateInit] bootstrapping 1 [77:856:2698] 2025-05-29T15:23:07.739229Z node 77 :PERSQUEUE INFO: partition_init.cpp:774: [rt3.dc1--asdfgs--topic:1:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp skipped because already initialized. 2025-05-29T15:23:07.739246Z node 77 :PERSQUEUE INFO: partition.cpp:560: [PQ: 72057594037927937, Partition: 1, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 1 generation 9 [77:856:2698] 2025-05-29T15:23:07.741646Z node 77 :PERSQUEUE INFO: partition_init.cpp:774: [rt3.dc1--asdfgs--topic:0:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp skipped because already initialized. 2025-05-29T15:23:07.741664Z node 77 :PERSQUEUE INFO: partition.cpp:560: [PQ: 72057594037927937, Partition: 0, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 0 generation 9 [77:855:2697] 2025-05-29T15:23:07.745810Z node 77 :PERSQUEUE WARN: pq_l2_cache.cpp:94: PQ Cache (L2). Same blob insertion. Tablet '72057594037927937' partition 0 offset 0 partno 0 count 7 parts 16 size 8364507 Send read request: PartitionRequest { Partition: 0 CmdRead { ClientId: "user" SessionId: "" Offset: 0 Count: 1 Bytes: 104857600 } Cookie: 123 } via pipe: [77:178:2192] Leader for TabletID 72057594037927937 is [77:794:2644] sender: [77:886:2057] recipient: [77:14:2061] Leader for TabletID 72057594037927937 is [77:794:2644] sender: [77:889:2057] recipient: [77:100:2134] Leader for TabletID 72057594037927937 is [77:794:2644] sender: [77:892:2057] recipient: [77:14:2061] Leader for TabletID 72057594037927937 is [77:794:2644] sender: [77:893:2057] recipient: [77:891:2718] Leader for TabletID 72057594037927937 is [77:894:2719] sender: [77:895:2057] recipient: [77:891:2718] 2025-05-29T15:23:07.753238Z node 77 :PERSQUEUE NOTICE: pq_impl.cpp:1099: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-05-29T15:23:07.753260Z node 77 :PERSQUEUE INFO: pq_impl.cpp:800: [PQ: 72057594037927937] doesn't have tx writes info 2025-05-29T15:23:07.753364Z node 77 :PERSQUEUE INFO: partition_init.cpp:880: [PQ: 72057594037927937, Partition: 0, State: StateInit] bootstrapping 0 [77:957:2774] 2025-05-29T15:23:07.753741Z node 77 :PERSQUEUE INFO: partition_init.cpp:880: [PQ: 72057594037927937, Partition: 1, State: StateInit] bootstrapping 1 [77:958:2775] 2025-05-29T15:23:07.755056Z node 77 :PERSQUEUE INFO: partition_init.cpp:774: [rt3.dc1--asdfgs--topic:1:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp skipped because already initialized. 2025-05-29T15:23:07.755071Z node 77 :PERSQUEUE INFO: partition.cpp:560: [PQ: 72057594037927937, Partition: 1, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 1 generation 10 [77:958:2775] 2025-05-29T15:23:07.757672Z node 77 :PERSQUEUE INFO: partition_init.cpp:774: [rt3.dc1--asdfgs--topic:0:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp skipped because already initialized. 2025-05-29T15:23:07.757691Z node 77 :PERSQUEUE INFO: partition.cpp:560: [PQ: 72057594037927937, Partition: 0, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 0 generation 10 [77:957:2774] 2025-05-29T15:23:07.762267Z node 77 :PERSQUEUE WARN: pq_l2_cache.cpp:94: PQ Cache (L2). Same blob insertion. Tablet '72057594037927937' partition 0 offset 0 partno 0 count 7 parts 16 size 8364507 Send read request: PartitionRequest { Partition: 0 CmdRead { ClientId: "user" SessionId: "" Offset: 0 Count: 1 Bytes: 104857600 } Cookie: 123 } via pipe: [77:178:2192] Leader for TabletID 72057594037927937 is [77:894:2719] sender: [77:990:2057] recipient: [77:14:2061] Leader for TabletID 72057594037927937 is [77:894:2719] sender: [77:993:2057] recipient: [77:100:2134] Leader for TabletID 72057594037927937 is [77:894:2719] sender: [77:995:2057] recipient: [77:14:2061] Leader for TabletID 72057594037927937 is [77:894:2719] sender: [77:997:2057] recipient: [77:996:2797] Leader for TabletID 72057594037927937 is [77:998:2798] sender: [77:999:2057] recipient: [77:996:2797] 2025-05-29T15:23:07.769343Z node 77 :PERSQUEUE NOTICE: pq_impl.cpp:1099: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-05-29T15:23:07.769361Z node 77 :PERSQUEUE INFO: pq_impl.cpp:800: [PQ: 72057594037927937] doesn't have tx writes info 2025-05-29T15:23:07.769442Z node 77 :PERSQUEUE INFO: partition_init.cpp:880: [PQ: 72057594037927937, Partition: 0, State: StateInit] bootstrapping 0 [77:1063:2855] 2025-05-29T15:23:07.769826Z node 77 :PERSQUEUE INFO: partition_init.cpp:880: [PQ: 72057594037927937, Partition: 1, State: StateInit] bootstrapping 1 [77:1064:2856] 2025-05-29T15:23:07.771182Z node 77 :PERSQUEUE INFO: partition_init.cpp:774: [rt3.dc1--asdfgs--topic:1:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp skipped because already initialized. 2025-05-29T15:23:07.771196Z node 77 :PERSQUEUE INFO: partition.cpp:560: [PQ: 72057594037927937, Partition: 1, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 1 generation 11 [77:1064:2856] 2025-05-29T15:23:07.773629Z node 77 :PERSQUEUE INFO: partition_init.cpp:774: [rt3.dc1--asdfgs--topic:0:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp skipped because already initialized. 2025-05-29T15:23:07.773652Z node 77 :PERSQUEUE INFO: partition.cpp:560: [PQ: 72057594037927937, Partition: 0, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 0 generation 11 [77:1063:2855] 2025-05-29T15:23:07.777945Z node 77 :PERSQUEUE WARN: pq_l2_cache.cpp:94: PQ Cache (L2). Same blob insertion. Tablet '72057594037927937' partition 0 offset 0 partno 0 count 7 parts 16 size 8364507 Send read request: PartitionRequest { Partition: 0 CmdRead { ClientId: "user" SessionId: "" Offset: 0 Count: 1 Bytes: 104857600 } Cookie: 123 } via pipe: [77:178:2192] Leader for TabletID 72057594037927937 is [77:998:2798] sender: [77:1096:2057] recipient: [77:14:2061] Leader for TabletID 72057594037927937 is [77:998:2798] sender: [77:1099:2057] recipient: [77:100:2134] Leader for TabletID 72057594037927937 is [77:998:2798] sender: [77:1102:2057] recipient: [77:14:2061] Leader for TabletID 72057594037927937 is [77:998:2798] sender: [77:1103:2057] recipient: [77:1101:2878] Leader for TabletID 72057594037927937 is [77:1104:2879] sender: [77:1105:2057] recipient: [77:1101:2878] 2025-05-29T15:23:07.786176Z node 77 :PERSQUEUE NOTICE: pq_impl.cpp:1099: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-05-29T15:23:07.786192Z node 77 :PERSQUEUE INFO: pq_impl.cpp:800: [PQ: 72057594037927937] doesn't have tx writes info 2025-05-29T15:23:07.786279Z node 77 :PERSQUEUE INFO: partition_init.cpp:880: [PQ: 72057594037927937, Partition: 0, State: StateInit] bootstrapping 0 [77:1171:2938] 2025-05-29T15:23:07.786694Z node 77 :PERSQUEUE INFO: partition_init.cpp:880: [PQ: 72057594037927937, Partition: 1, State: StateInit] bootstrapping 1 [77:1172:2939] 2025-05-29T15:23:07.788374Z node 77 :PERSQUEUE INFO: partition_init.cpp:774: [rt3.dc1--asdfgs--topic:1:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp skipped because already initialized. 2025-05-29T15:23:07.788390Z node 77 :PERSQUEUE INFO: partition.cpp:560: [PQ: 72057594037927937, Partition: 1, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 1 generation 12 [77:1172:2939] 2025-05-29T15:23:07.790841Z node 77 :PERSQUEUE INFO: partition_init.cpp:774: [rt3.dc1--asdfgs--topic:0:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp skipped because already initialized. 2025-05-29T15:23:07.790860Z node 77 :PERSQUEUE INFO: partition.cpp:560: [PQ: 72057594037927937, Partition: 0, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 0 generation 12 [77:1171:2938] 2025-05-29T15:23:07.794795Z node 77 :PERSQUEUE WARN: pq_l2_cache.cpp:94: PQ Cache (L2). Same blob insertion. Tablet '72057594037927937' partition 0 offset 0 partno 0 count 7 parts 16 size 8364507 >> TSchemeShardSubDomainTest::SchemeLimitsCreatePq >> TSchemeShardSubDomainTest::SchemeLimitsRejectsWithIndexedTables >> TSchemeShardSubDomainTest::CreateSubDomainsInSeparateDir ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/client/ut/unittest >> TFlatTest::SelectRangeNullArgs4 [GOOD] Test command err: 2025-05-29T15:23:04.167095Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509888549914487007:2201];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:23:04.167837Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/0027ed/r3tmp/tmpaCSIbK/pdisk_1.dat 2025-05-29T15:23:04.222686Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:23:04.222908Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [1:7509888549914486845:2079] 1748532184166220 != 1748532184166223 TClient is connected to server localhost:23003 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-05-29T15:23:04.268440Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:23:04.268474Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:23:04.269542Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-29T15:23:04.301133Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:23:04.305143Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:23:04.308800Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:23:04.618444Z node 2 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7509888547154220587:2063];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:23:04.618470Z node 2 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/0027ed/r3tmp/tmpXk5OWX/pdisk_1.dat 2025-05-29T15:23:04.630911Z node 2 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:23:04.631104Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [2:7509888547154220566:2079] 1748532184618275 != 1748532184618278 TClient is connected to server localhost:26699 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-05-29T15:23:04.722069Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:23:04.722094Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:23:04.722390Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:23:04.723056Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected waiting... 2025-05-29T15:23:04.745359Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976715658, at schemeshard: 72057594046644480 2025-05-29T15:23:04.746344Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/client/ut/unittest >> TFlatTest::WriteSplitWriteSplit [GOOD] Test command err: 2025-05-29T15:23:04.166930Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509888550107235805:2066];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:23:04.167067Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/0027f6/r3tmp/tmpxrP4ow/pdisk_1.dat 2025-05-29T15:23:04.230651Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:23:04.231996Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [1:7509888550107235774:2079] 1748532184166673 != 1748532184166676 TClient is connected to server localhost:27135 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-05-29T15:23:04.266551Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:23:04.268827Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-05-29T15:23:04.268934Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:23:04.268958Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting waiting... 2025-05-29T15:23:04.269988Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-29T15:23:04.274707Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:23:04.345728Z node 1 :OPS_COMPACT INFO: Compact{72075186224037888.1.11, eph 1} end=Done, 4 blobs 3r (max 3), put Spent{time=0.000s,wait=0.000s,interrupts=1} Part{ 2 pk, lobs 2 +0, (1265 647 2154)b }, ecr=1.000 2025-05-29T15:23:04.346091Z node 1 :OPS_COMPACT INFO: Compact{72075186224037889.1.11, eph 1} end=Done, 4 blobs 3r (max 3), put Spent{time=0.000s,wait=0.000s,interrupts=1} Part{ 2 pk, lobs 2 +0, (1139 521 2626)b }, ecr=1.000 2025-05-29T15:23:04.350870Z node 1 :OPS_COMPACT INFO: Compact{72075186224037888.1.16, eph 2} end=Done, 4 blobs 6r (max 6), put Spent{time=0.000s,wait=0.000s,interrupts=1} Part{ 2 pk, lobs 5 +0, (1573 647 6413)b }, ecr=1.000 2025-05-29T15:23:04.351454Z node 1 :OPS_COMPACT INFO: Compact{72075186224037889.1.16, eph 2} end=Done, 4 blobs 6r (max 6), put Spent{time=0.000s,wait=0.000s,interrupts=1} Part{ 2 pk, lobs 4 +0, (2326 1432 5183)b }, ecr=1.000 2025-05-29T15:23:04.358244Z node 1 :OPS_COMPACT INFO: Compact{72075186224037888.1.21, eph 3} end=Done, 4 blobs 8r (max 9), put Spent{time=0.000s,wait=0.000s,interrupts=1} Part{ 2 pk, lobs 5 +0, (3250 2180 6413)b }, ecr=1.000 TClient::Ls request: /dc-1/Dir/TableOld TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "TableOld" PathId: 3 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710659 CreateStep: 1748532184378 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "TableOld" Columns { Name: "Key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "Value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } Columns { Name: "... (TRUNCATED) 2025-05-29T15:23:04.391754Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:372: TTxOperationPropose Execute, message: Transaction { OperationType: ESchemeOpSplitMergeTablePartitions SplitMergeTablePartitions { TablePath: "/dc-1/Dir/TableOld" SourceTabletId: 72075186224037888 SplitBoundary { KeyPrefix { Tuple { Optional { Uint32: 100 } } } } SplitBoundary { KeyPrefix { Tuple { Optional { Uint32: 200 } } } } } } TxId: 281474976710680 TabletId: 72057594046644480 PeerName: "" , at schemeshard: 72057594046644480 2025-05-29T15:23:04.391830Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_split_merge.cpp:798: TSplitMerge Propose, tableStr: /dc-1/Dir/TableOld, tableId: , opId: 281474976710680:0, at schemeshard: 72057594046644480, request: TablePath: "/dc-1/Dir/TableOld" SourceTabletId: 72075186224037888 SplitBoundary { KeyPrefix { Tuple { Optional { Uint32: 100 } } } } SplitBoundary { KeyPrefix { Tuple { Optional { Uint32: 200 } } } } 2025-05-29T15:23:04.391941Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason new shard created for pathId [OwnerId: 72057594046644480, LocalPathId: 3] was 3 2025-05-29T15:23:04.391954Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason new shard created for pathId [OwnerId: 72057594046644480, LocalPathId: 3] was 4 2025-05-29T15:23:04.391957Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason new shard created for pathId [OwnerId: 72057594046644480, LocalPathId: 3] was 5 2025-05-29T15:23:04.392009Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046644480, LocalPathId: 3] was 6 2025-05-29T15:23:04.392076Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_split_merge.cpp:1077: TSplitMerge Propose accepted, tableStr: /dc-1/Dir/TableOld, tableId: , opId: 281474976710680:0, at schemeshard: 72057594046644480, op: SourceRanges { KeyRangeBegin: "\001\000\000\000\000\200" KeyRangeEnd: "\001\000\004\000\000\000\377\377\377\177" TabletID: 72075186224037888 ShardIdx: 1 } DestinationRanges { KeyRangeBegin: "\001\000\000\000\000\200" KeyRangeEnd: "\001\000\004\000\000\000d\000\000\000" ShardIdx: 3 } DestinationRanges { KeyRangeBegin: "\001\000\004\000\000\000d\000\000\000" KeyRangeEnd: "\001\000\004\000\000\000\310\000\000\000" ShardIdx: 4 } DestinationRanges { KeyRangeBegin: "\001\000\004\000\000\000\310\000\000\000" KeyRangeEnd: "\001\000\004\000\000\000\377\377\377\177" ShardIdx: 5 }, request: TablePath: "/dc-1/Dir/TableOld" SourceTabletId: 72075186224037888 SplitBoundary { KeyPrefix { Tuple { Optional { Uint32: 100 } } } } SplitBoundary { KeyPrefix { Tuple { Optional { Uint32: 200 } } } } 2025-05-29T15:23:04.392089Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:130: IgniteOperation, opId: 281474976710680:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-05-29T15:23:04.392393Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:451: TTxOperationPropose Complete, txId: 281474976710680, response: Status: StatusAccepted TxId: 281474976710680 SchemeshardId: 72057594046644480, at schemeshard: 72057594046644480 2025-05-29T15:23:04.392413Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 281474976710680, subject: , status: StatusAccepted, operation: ALTER TABLE PARTITIONS, path: /dc-1/Dir/TableOld 2025-05-29T15:23:04.392479Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 281474976710680:0, at schemeshard: 72057594046644480 2025-05-29T15:23:04.392491Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:313: TCreateParts opId# 281474976710680:0 ProgressState, operation type: TxSplitTablePartition, at tablet# 72057594046644480 2025-05-29T15:23:04.392565Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:357: TCreateParts opId# 281474976710680:0 CreateRequest Event to Hive: 72057594037968897 msg: Owner: 72057594046644480 OwnerIdx: 3 TabletType: DataShard FollowerCount: 0 ObjectDomain { SchemeShard: 72057594046644480 PathId: 1 } ObjectId: 3 BindedChannels { StoragePoolName: "/dc-1:test" } BindedChannels { StoragePoolName: "/dc-1:test" } BindedChannels { StoragePoolName: "/dc-1:test" } AllowedDomains { SchemeShard: 72057594046644480 PathId: 1 } 2025-05-29T15:23:04.392594Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:357: TCreateParts opId# 281474976710680:0 CreateRequest Event to Hive: 72057594037968897 msg: Owner: 72057594046644480 OwnerIdx: 4 TabletType: DataShard FollowerCount: 0 ObjectDomain { SchemeShard: 72057594046644480 PathId: 1 } ObjectId: 3 BindedChannels { StoragePoolName: "/dc-1:test" } BindedChannels { StoragePoolName: "/dc-1:test" } BindedChannels { StoragePoolName: "/dc-1:test" } AllowedDomains { SchemeShard: 72057594046644480 PathId: 1 } 2025-05-29T15:23:04.392610Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:357: TCreateParts opId# 281474976710680:0 CreateRequest Event to Hive: 72057594037968897 msg: Owner: 72057594046644480 OwnerIdx: 5 TabletType: DataShard FollowerCount: 0 ObjectDomain { SchemeShard: 72057594046644480 PathId: 1 } ObjectId: 3 BindedChannels { StoragePoolName: "/dc-1:test" } BindedChannels { StoragePoolName: "/dc-1:test" } BindedChannels { StoragePoolName: "/dc-1:test" } AllowedDomains { SchemeShard: 72057594046644480 PathId: 1 } 2025-05-29T15:23:04.392709Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:652: Send tablet strongly msg operationId: 281474976710680:0 from tablet: 72057594046644480 to tablet: 72057594037968897 cookie: 72057594046644480:3 msg type: 268697601 2025-05-29T15:23:04.392748Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:652: Send tablet strongly msg operationId: 281474976710680:0 from tablet: 72057594046644480 to tablet: 72057594037968897 cookie: 72057594046644480:4 msg type: 268697601 2025-05-29T15:23:04.392757Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:652: Send tablet strongly msg operationId: 281474976710680:0 from tablet: 72057594046644480 to tablet: 72057594037968897 cookie: 72057594046644480:5 msg type: 268697601 2025-05-29T15:23:04.392769Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1751: TOperation RegisterRelationByTabletId, TxId: 281474976710680 ... AT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5549: Handle TEvStateChanged, at schemeshard: 72057594046644480, message: Source { RawX1: 7509888546574154597 RawX2: 4503608217307454 } TabletId: 72075186224037891 State: 4 2025-05-29T15:23:04.857670Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__state_changed_reply.cpp:78: TTxShardStateChanged DoExecute, datashard informs about state changing, datashardId: 72075186224037891, state: Offline, at schemeshard: 72057594046644480 2025-05-29T15:23:04.857736Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:46: Free shard 72057594046644480:1 hive 72057594037968897 at ss 72057594046644480 2025-05-29T15:23:04.857758Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:46: Free shard 72057594046644480:4 hive 72057594037968897 at ss 72057594046644480 TClient::Ls request: /dc-1/Dir/TableOld 2025-05-29T15:23:04.858631Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5938: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 1 ShardOwnerId: 72057594046644480 ShardLocalIdx: 1, at schemeshard: 72057594046644480 2025-05-29T15:23:04.858690Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046644480, LocalPathId: 3] was 7 2025-05-29T15:23:04.858722Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5938: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 4 ShardOwnerId: 72057594046644480 ShardLocalIdx: 4, at schemeshard: 72057594046644480 2025-05-29T15:23:04.858763Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046644480, LocalPathId: 3] was 6 2025-05-29T15:23:04.859008Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:174: Deleted shardIdx 72057594046644480:1 2025-05-29T15:23:04.859015Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:180: Close pipe to deleted shardIdx 72057594046644480:1 tabletId 72075186224037888 2025-05-29T15:23:04.859022Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:174: Deleted shardIdx 72057594046644480:4 2025-05-29T15:23:04.859023Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:180: Close pipe to deleted shardIdx 72057594046644480:4 tabletId 72075186224037891 2025-05-29T15:23:04.859208Z node 2 :HIVE WARN: hive_impl.cpp:491: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037891 not found 2025-05-29T15:23:04.859268Z node 2 :HIVE WARN: hive_impl.cpp:491: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037888 not found 2025-05-29T15:23:04.859302Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5549: Handle TEvStateChanged, at schemeshard: 72057594046644480, message: Source { RawX1: 7509888546574154792 RawX2: 4503608217307482 } TabletId: 72075186224037893 State: 4 2025-05-29T15:23:04.859313Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__state_changed_reply.cpp:78: TTxShardStateChanged DoExecute, datashard informs about state changing, datashardId: 72075186224037893, state: Offline, at schemeshard: 72057594046644480 2025-05-29T15:23:04.859332Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5549: Handle TEvStateChanged, at schemeshard: 72057594046644480, message: Source { RawX1: 7509888546574154598 RawX2: 4503608217307455 } TabletId: 72075186224037892 State: 4 2025-05-29T15:23:04.859338Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__state_changed_reply.cpp:78: TTxShardStateChanged DoExecute, datashard informs about state changing, datashardId: 72075186224037892, state: Offline, at schemeshard: 72057594046644480 2025-05-29T15:23:04.859345Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5549: Handle TEvStateChanged, at schemeshard: 72057594046644480, message: Source { RawX1: 7509888546574154260 RawX2: 4503608217307387 } TabletId: 72075186224037889 State: 4 2025-05-29T15:23:04.859350Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__state_changed_reply.cpp:78: TTxShardStateChanged DoExecute, datashard informs about state changing, datashardId: 72075186224037889, state: Offline, at schemeshard: 72057594046644480 2025-05-29T15:23:04.859439Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:46: Free shard 72057594046644480:6 hive 72057594037968897 at ss 72057594046644480 2025-05-29T15:23:04.859458Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:46: Free shard 72057594046644480:5 hive 72057594037968897 at ss 72057594046644480 2025-05-29T15:23:04.859465Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:46: Free shard 72057594046644480:2 hive 72057594037968897 at ss 72057594046644480 2025-05-29T15:23:04.859502Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5549: Handle TEvStateChanged, at schemeshard: 72057594046644480, message: Source { RawX1: 7509888546574154601 RawX2: 4503608217307456 } TabletId: 72075186224037890 State: 4 2025-05-29T15:23:04.859509Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__state_changed_reply.cpp:78: TTxShardStateChanged DoExecute, datashard informs about state changing, datashardId: 72075186224037890, state: Offline, at schemeshard: 72057594046644480 TClient::Ls response: 2025-05-29T15:23:04.859549Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:46: Free shard 72057594046644480:3 hive 72057594037968897 at ss 72057594046644480 2025-05-29T15:23:04.859570Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5549: Handle TEvStateChanged, at schemeshard: 72057594046644480, message: Source { RawX1: 7509888546574154798 RawX2: 4503608217307483 } TabletId: 72075186224037894 State: 4 2025-05-29T15:23:04.859576Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__state_changed_reply.cpp:78: TTxShardStateChanged DoExecute, datashard informs about state changing, datashardId: 72075186224037894, state: Offline, at schemeshard: 72057594046644480 Status: 128 StatusCode: PATH_NOT_EXIST Issues { message: "Path not exist" issue_code: 200200 severity: 1 } SchemeStatus: 2 ErrorReason: "Path not found" 2025-05-29T15:23:04.859612Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:46: Free shard 72057594046644480:7 hive 72057594037968897 at ss 72057594046644480 2025-05-29T15:23:04.860220Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5938: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 6 ShardOwnerId: 72057594046644480 ShardLocalIdx: 6, at schemeshard: 72057594046644480 2025-05-29T15:23:04.860262Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046644480, LocalPathId: 3] was 5 2025-05-29T15:23:04.860294Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5938: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 5 ShardOwnerId: 72057594046644480 ShardLocalIdx: 5, at schemeshard: 72057594046644480 2025-05-29T15:23:04.860315Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046644480, LocalPathId: 3] was 4 2025-05-29T15:23:04.860334Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5938: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 2 ShardOwnerId: 72057594046644480 ShardLocalIdx: 2, at schemeshard: 72057594046644480 2025-05-29T15:23:04.860353Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046644480, LocalPathId: 3] was 3 2025-05-29T15:23:04.860371Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5938: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 3 ShardOwnerId: 72057594046644480 ShardLocalIdx: 3, at schemeshard: 72057594046644480 2025-05-29T15:23:04.860389Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046644480, LocalPathId: 3] was 2 2025-05-29T15:23:04.860407Z node 2 :HIVE WARN: hive_impl.cpp:491: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037890 not found 2025-05-29T15:23:04.860408Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5938: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 7 ShardOwnerId: 72057594046644480 ShardLocalIdx: 7, at schemeshard: 72057594046644480 2025-05-29T15:23:04.860422Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046644480, LocalPathId: 3] was 1 2025-05-29T15:23:04.860452Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:123: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046644480 2025-05-29T15:23:04.860458Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:137: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046644480, LocalPathId: 3], at schemeshard: 72057594046644480 2025-05-29T15:23:04.860467Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046644480, LocalPathId: 2] was 1 2025-05-29T15:23:04.860687Z node 2 :HIVE WARN: hive_impl.cpp:491: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037893 not found 2025-05-29T15:23:04.860693Z node 2 :HIVE WARN: hive_impl.cpp:491: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037892 not found 2025-05-29T15:23:04.860694Z node 2 :HIVE WARN: hive_impl.cpp:491: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037889 not found 2025-05-29T15:23:04.860695Z node 2 :HIVE WARN: hive_impl.cpp:491: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037894 not found 2025-05-29T15:23:04.860747Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:174: Deleted shardIdx 72057594046644480:6 2025-05-29T15:23:04.860757Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:180: Close pipe to deleted shardIdx 72057594046644480:6 tabletId 72075186224037893 2025-05-29T15:23:04.860765Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:174: Deleted shardIdx 72057594046644480:5 2025-05-29T15:23:04.860770Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:180: Close pipe to deleted shardIdx 72057594046644480:5 tabletId 72075186224037892 2025-05-29T15:23:04.860790Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:174: Deleted shardIdx 72057594046644480:2 2025-05-29T15:23:04.860796Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:180: Close pipe to deleted shardIdx 72057594046644480:2 tabletId 72075186224037889 2025-05-29T15:23:04.860802Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:174: Deleted shardIdx 72057594046644480:3 2025-05-29T15:23:04.860806Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:180: Close pipe to deleted shardIdx 72057594046644480:3 tabletId 72075186224037890 2025-05-29T15:23:04.860809Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:174: Deleted shardIdx 72057594046644480:7 2025-05-29T15:23:04.860811Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:180: Close pipe to deleted shardIdx 72057594046644480:7 tabletId 72075186224037894 2025-05-29T15:23:04.860816Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:160: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046644480 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/client/ut/unittest >> TLocksTest::SetLockNothing [GOOD] Test command err: 2025-05-29T15:23:03.423266Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509888545125177996:2062];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:23:03.423283Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/002817/r3tmp/tmpjCiudT/pdisk_1.dat 2025-05-29T15:23:03.478147Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [1:7509888545125177975:2079] 1748532183423138 != 1748532183423141 2025-05-29T15:23:03.480795Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded TClient is connected to server localhost:6158 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-05-29T15:23:03.525570Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:23:03.525613Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:23:03.526627Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-29T15:23:03.552434Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-05-29T15:23:03.559672Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:23:03.622600Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:23:03.632143Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:23:03.922427Z node 2 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7509888542848870136:2063];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:23:03.922452Z node 2 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/002817/r3tmp/tmppo2y8F/pdisk_1.dat 2025-05-29T15:23:03.938893Z node 2 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:23:03.939117Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [2:7509888542848870112:2079] 1748532183922289 != 1748532183922292 TClient is connected to server localhost:10271 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-05-29T15:23:04.026253Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:23:04.026277Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:23:04.026495Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:23:04.027338Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected waiting... 2025-05-29T15:23:04.036049Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:23:04.050794Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:23:04.065263Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:23:04.336816Z node 3 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7509888549550815496:2140];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/002817/r3tmp/tmpIGoQxL/pdisk_1.dat 2025-05-29T15:23:04.342979Z node 3 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-05-29T15:23:04.350077Z node 3 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:23:04.350308Z node 3 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [3:7509888549550815383:2079] 1748532184335932 != 1748532184335935 TClient is connected to server localhost:25946 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-05-29T15:23:04.440985Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:23:04.441021Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:23:04.441313Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:23:04.441986Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected waiting... 2025-05-29T15:23:04.449405Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:23:04.463826Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:23:04.477684Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:23:04.803914Z node 4 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7509888546835762297:2058];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:23:04.803950Z node 4 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/002817/r3tmp/tmphs0fHn/pdisk_1.dat 2025-05-29T15:23:04.816411Z node 4 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:23:04.816600Z node 4 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [4:7509888546835762281:2079] 1748532184803813 != 1748532184803816 TClient is connected to server localhost:19127 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: " ... p:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:23:04.907320Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:23:04.908609Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected waiting... 2025-05-29T15:23:04.910597Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:23:04.925657Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:23:04.939594Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:23:05.294035Z node 5 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[5:7509888553176578694:2062];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:23:05.294070Z node 5 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/002817/r3tmp/tmp3xt1xU/pdisk_1.dat 2025-05-29T15:23:05.310293Z node 5 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:23:05.310572Z node 5 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [5:7509888553176578674:2079] 1748532185293930 != 1748532185293933 TClient is connected to server localhost:2102 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-05-29T15:23:05.398984Z node 5 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:23:05.399012Z node 5 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:23:05.399259Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:23:05.399968Z node 5 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Connecting -> Connected waiting... 2025-05-29T15:23:05.408296Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:23:05.422913Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:23:05.437074Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:23:05.770645Z node 6 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[6:7509888553063603687:2062];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:23:05.770906Z node 6 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/002817/r3tmp/tmpEg9CH5/pdisk_1.dat 2025-05-29T15:23:05.785294Z node 6 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:23:05.785541Z node 6 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [6:7509888553063603658:2079] 1748532185770491 != 1748532185770494 TClient is connected to server localhost:1192 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-05-29T15:23:05.801556Z node 6 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-05-29T15:23:05.814120Z node 6 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:23:05.874021Z node 6 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:23:05.874042Z node 6 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:23:05.875162Z node 6 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-29T15:23:05.978566Z node 6 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:23:05.989926Z node 6 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:23:06.232978Z node 7 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[7:7509888557985443018:2062];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:23:06.233001Z node 7 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/002817/r3tmp/tmpyRYfHf/pdisk_1.dat 2025-05-29T15:23:06.242500Z node 7 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:23:06.242609Z node 7 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [7:7509888557985442998:2079] 1748532186232861 != 1748532186232864 TClient is connected to server localhost:1852 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-05-29T15:23:06.335873Z node 7 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:23:06.335893Z node 7 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:23:06.336193Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:23:06.336933Z node 7 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Connecting -> Connected waiting... 2025-05-29T15:23:06.345818Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:23:06.360580Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:23:06.374867Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/client/ut/unittest >> TFlatTest::LargeProxyReplyRW [GOOD] Test command err: 2025-05-29T15:23:02.983984Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509888541385085148:2061];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:23:02.984007Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/002830/r3tmp/tmpVhC5PQ/pdisk_1.dat 2025-05-29T15:23:03.036142Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:23:03.036310Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [1:7509888541385085129:2079] 1748532182983838 != 1748532182983841 TClient is connected to server localhost:18253 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-05-29T15:23:03.114660Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:23:03.114687Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:23:03.115862Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-29T15:23:03.116505Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:23:03.119264Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:23:03.126258Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:23:04.513150Z node 1 :TX_PROXY ERROR: datareq.cpp:2703: Actor# [1:7509888549975023293:4121] txid# 281474976716010 MergeResult Result too large TDataReq marker# P18 2025-05-29T15:23:04.513181Z node 1 :TX_PROXY ERROR: datareq.cpp:883: Actor# [1:7509888549975023293:4121] txid# 281474976716010 RESPONSE Status# ExecResultUnavailable marker# P13c MiniKQLErrors: Query result size limit exceeded. (71692241 > 50331648) proxy error code: ExecResultUnavailable 2025-05-29T15:23:04.658671Z node 2 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7509888548028739187:2063];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:23:04.658704Z node 2 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/002830/r3tmp/tmpJxRD2A/pdisk_1.dat 2025-05-29T15:23:04.674261Z node 2 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:23:04.674429Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [2:7509888548028739166:2079] 1748532184658561 != 1748532184658564 TClient is connected to server localhost:26127 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-05-29T15:23:04.762853Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:23:04.762886Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:23:04.763190Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:23:04.763837Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected waiting... 2025-05-29T15:23:04.771147Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:23:06.088398Z node 2 :TX_PROXY ERROR: datareq.cpp:2703: Actor# [2:7509888552323710041:4125] txid# 281474976716011 MergeResult Result too large TDataReq marker# P18 2025-05-29T15:23:06.088431Z node 2 :TX_PROXY ERROR: datareq.cpp:883: Actor# [2:7509888552323710041:4125] txid# 281474976716011 RESPONSE Status# ExecResultUnavailable marker# P13c MiniKQLErrors: Query result size limit exceeded. (71692241 > 50331648) proxy error code: ExecResultUnavailable ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/client/ut/unittest >> TLocksFatTest::LocksLimit [GOOD] Test command err: 2025-05-29T15:23:02.542037Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509888537716576063:2062];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:23:02.542077Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/00284b/r3tmp/tmp0ONuMb/pdisk_1.dat 2025-05-29T15:23:02.595658Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [1:7509888537716576042:2079] 1748532182541856 != 1748532182541859 2025-05-29T15:23:02.601758Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded TClient is connected to server localhost:63069 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-05-29T15:23:02.644462Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:23:02.644497Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:23:02.645648Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-29T15:23:02.676047Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-05-29T15:23:02.685334Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:23:02.705994Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:23:02.715521Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:23:04.239298Z node 2 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7509888547373296245:2061];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:23:04.239466Z node 2 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/00284b/r3tmp/tmp0mSIoa/pdisk_1.dat 2025-05-29T15:23:04.259983Z node 2 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:23:04.260308Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [2:7509888547373296225:2079] 1748532184239134 != 1748532184239137 TClient is connected to server localhost:11554 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-05-29T15:23:04.344302Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:23:04.344329Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:23:04.345067Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-05-29T15:23:04.345346Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected waiting... waiting... 2025-05-29T15:23:04.351228Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:23:04.366454Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:23:04.383306Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:23:04.753566Z node 3 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7509888547381573217:2060];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:23:04.753581Z node 3 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/00284b/r3tmp/tmpMM1rkZ/pdisk_1.dat 2025-05-29T15:23:04.766168Z node 3 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:23:04.766454Z node 3 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [3:7509888547381573199:2079] 1748532184753452 != 1748532184753455 TClient is connected to server localhost:18174 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-05-29T15:23:04.857355Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:23:04.857376Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:23:04.857621Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:23:04.858380Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected waiting... 2025-05-29T15:23:04.872585Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:23:04.883968Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:23:04.897922Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/persqueue/ut/unittest >> TPQTest::TestWriteOffsetWithBigMessage [GOOD] Test command err: Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:104:2057] recipient: [1:102:2135] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:104:2057] recipient: [1:102:2135] Leader for TabletID 72057594037927937 is [1:108:2139] sender: [1:109:2057] recipient: [1:102:2135] 2025-05-29T15:22:12.316764Z node 1 :PERSQUEUE NOTICE: pq_impl.cpp:1099: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-05-29T15:22:12.316794Z node 1 :PERSQUEUE INFO: pq_impl.cpp:800: [PQ: 72057594037927937] doesn't have tx writes info Leader for TabletID 72057594037927938 is [0:0:0] sender: [1:150:2057] recipient: [1:148:2170] IGNORE Leader for TabletID 72057594037927938 is [0:0:0] sender: [1:150:2057] recipient: [1:148:2170] Leader for TabletID 72057594037927938 is [1:154:2174] sender: [1:155:2057] recipient: [1:148:2170] Leader for TabletID 72057594037927937 is [1:108:2139] sender: [1:180:2057] recipient: [1:14:2061] 2025-05-29T15:22:12.320710Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:2882: [PQ: 72057594037927937] server connected, pipe [1:179:2193], now have 1 active actors on pipe 2025-05-29T15:22:12.320738Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:1460: [PQ: 72057594037927937] Handle TEvPersQueue::TEvUpdateConfig 2025-05-29T15:22:12.323719Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:1646: [PQ: 72057594037927937] Config update version 1(current 0) received from actor [1:178:2192] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 PartitionIds: 1 PartitionIds: 2 PartitionIds: 3 TopicName: "rt3.dc1--asdfgs--topic" Version: 1 LocalDC: true Topic: "topic" TopicPath: "/Root/PQ/rt3.dc1--asdfgs--topic" Partitions { PartitionId: 0 } Partitions { PartitionId: 1 } Partitions { PartitionId: 2 } Partitions { PartitionId: 3 } ReadRuleGenerations: 1 ReadRuleGenerations: 1 MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } AllPartitions { PartitionId: 1 } AllPartitions { PartitionId: 2 } AllPartitions { PartitionId: 3 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 1 Important: false } Consumers { Name: "test" Generation: 1 Important: false } 2025-05-29T15:22:12.324591Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:590: [PQ: 72057594037927937] Apply new config CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 PartitionIds: 1 PartitionIds: 2 PartitionIds: 3 TopicName: "rt3.dc1--asdfgs--topic" Version: 1 LocalDC: true Topic: "topic" TopicPath: "/Root/PQ/rt3.dc1--asdfgs--topic" Partitions { PartitionId: 0 } Partitions { PartitionId: 1 } Partitions { PartitionId: 2 } Partitions { PartitionId: 3 } ReadRuleGenerations: 1 ReadRuleGenerations: 1 MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } AllPartitions { PartitionId: 1 } AllPartitions { PartitionId: 2 } AllPartitions { PartitionId: 3 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 1 Important: false } Consumers { Name: "test" Generation: 1 Important: false } 2025-05-29T15:22:12.324623Z node 1 :PERSQUEUE NOTICE: pq_impl.cpp:1099: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-05-29T15:22:12.324920Z node 1 :PERSQUEUE INFO: pq_impl.cpp:1487: [PQ: 72057594037927937] Config applied version 1 actor [1:178:2192] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 PartitionIds: 1 PartitionIds: 2 PartitionIds: 3 TopicName: "rt3.dc1--asdfgs--topic" Version: 1 LocalDC: true Topic: "topic" TopicPath: "/Root/PQ/rt3.dc1--asdfgs--topic" Partitions { PartitionId: 0 } Partitions { PartitionId: 1 } Partitions { PartitionId: 2 } Partitions { PartitionId: 3 } ReadRuleGenerations: 1 ReadRuleGenerations: 1 MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } AllPartitions { PartitionId: 1 } AllPartitions { PartitionId: 2 } AllPartitions { PartitionId: 3 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 1 Important: false } Consumers { Name: "test" Generation: 1 Important: false } 2025-05-29T15:22:12.324952Z node 1 :PERSQUEUE DEBUG: partition_init.cpp:75: [rt3.dc1--asdfgs--topic:0:Initializer] Start initializing step TInitConfigStep 2025-05-29T15:22:12.324961Z node 1 :PERSQUEUE DEBUG: partition_init.cpp:75: [rt3.dc1--asdfgs--topic:1:Initializer] Start initializing step TInitConfigStep 2025-05-29T15:22:12.324967Z node 1 :PERSQUEUE DEBUG: partition_init.cpp:75: [rt3.dc1--asdfgs--topic:2:Initializer] Start initializing step TInitConfigStep 2025-05-29T15:22:12.324972Z node 1 :PERSQUEUE DEBUG: partition_init.cpp:75: [rt3.dc1--asdfgs--topic:3:Initializer] Start initializing step TInitConfigStep 2025-05-29T15:22:12.325095Z node 1 :PERSQUEUE DEBUG: partition_init.cpp:75: [rt3.dc1--asdfgs--topic:0:Initializer] Start initializing step TInitInternalFieldsStep 2025-05-29T15:22:12.325184Z node 1 :PERSQUEUE INFO: partition_init.cpp:880: [PQ: 72057594037927937, Partition: 0, State: StateInit] bootstrapping 0 [1:186:2198] 2025-05-29T15:22:12.325846Z node 1 :PERSQUEUE DEBUG: partition_init.cpp:55: [rt3.dc1--asdfgs--topic:0:Initializer] Initializing completed. 2025-05-29T15:22:12.325859Z node 1 :PERSQUEUE INFO: partition.cpp:560: [PQ: 72057594037927937, Partition: 0, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 0 generation 2 [1:186:2198] 2025-05-29T15:22:12.325867Z node 1 :PERSQUEUE DEBUG: partition.cpp:574: [PQ: 72057594037927937, Partition: 0, State: StateInit] SYNC INIT topic rt3.dc1--asdfgs--topic partitition 0 so 0 endOffset 0 Head Offset 0 PartNo 0 PackedSize 0 count 0 nextOffset 0 batches 0 2025-05-29T15:22:12.326385Z node 1 :PERSQUEUE DEBUG: partition.cpp:3850: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Process pending events. Count 0 2025-05-29T15:22:12.326404Z node 1 :PERSQUEUE DEBUG: partition.cpp:3155: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'rt3.dc1--asdfgs--topic' partition 0 user user reinit request with generation 1 2025-05-29T15:22:12.326409Z node 1 :PERSQUEUE DEBUG: partition.cpp:3224: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'rt3.dc1--asdfgs--topic' partition 0 user user reinit with generation 1 done 2025-05-29T15:22:12.326414Z node 1 :PERSQUEUE DEBUG: partition.cpp:3155: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'rt3.dc1--asdfgs--topic' partition 0 user test reinit request with generation 1 2025-05-29T15:22:12.326418Z node 1 :PERSQUEUE DEBUG: partition.cpp:3224: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'rt3.dc1--asdfgs--topic' partition 0 user test reinit with generation 1 done 2025-05-29T15:22:12.326445Z node 1 :PERSQUEUE DEBUG: partition.cpp:2185: [PQ: 72057594037927937, Partition: 0, State: StateIdle] === DumpKeyValueRequest === 2025-05-29T15:22:12.326467Z node 1 :PERSQUEUE DEBUG: partition.cpp:2186: [PQ: 72057594037927937, Partition: 0, State: StateIdle] --- delete ---------------- 2025-05-29T15:22:12.326471Z node 1 :PERSQUEUE DEBUG: partition.cpp:2194: [PQ: 72057594037927937, Partition: 0, State: StateIdle] --- write ----------------- 2025-05-29T15:22:12.326477Z node 1 :PERSQUEUE DEBUG: partition.cpp:2197: [PQ: 72057594037927937, Partition: 0, State: StateIdle] i0000000000 2025-05-29T15:22:12.326481Z node 1 :PERSQUEUE DEBUG: partition.cpp:2197: [PQ: 72057594037927937, Partition: 0, State: StateIdle] m0000000000cuser 2025-05-29T15:22:12.326484Z node 1 :PERSQUEUE DEBUG: partition.cpp:2197: [PQ: 72057594037927937, Partition: 0, State: StateIdle] m0000000000uuser 2025-05-29T15:22:12.326487Z node 1 :PERSQUEUE DEBUG: partition.cpp:2197: [PQ: 72057594037927937, Partition: 0, State: StateIdle] m0000000000ctest 2025-05-29T15:22:12.326491Z node 1 :PERSQUEUE DEBUG: partition.cpp:2197: [PQ: 72057594037927937, Partition: 0, State: StateIdle] m0000000000utest 2025-05-29T15:22:12.326494Z node 1 :PERSQUEUE DEBUG: partition.cpp:2199: [PQ: 72057594037927937, Partition: 0, State: StateIdle] --- rename ---------------- 2025-05-29T15:22:12.326499Z node 1 :PERSQUEUE DEBUG: partition.cpp:2204: [PQ: 72057594037927937, Partition: 0, State: StateIdle] =========================== 2025-05-29T15:22:12.326519Z node 1 :PERSQUEUE DEBUG: partition_read.cpp:779: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'rt3.dc1--asdfgs--topic' partition 0 user user readTimeStamp for offset 0 initiated queuesize 0 startOffset 0 ReadingTimestamp 0 rrg 0 2025-05-29T15:22:12.326524Z node 1 :PERSQUEUE DEBUG: partition_read.cpp:779: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'rt3.dc1--asdfgs--topic' partition 0 user test readTimeStamp for offset 0 initiated queuesize 0 startOffset 0 ReadingTimestamp 0 rrg 0 2025-05-29T15:22:12.326553Z node 1 :PERSQUEUE DEBUG: read.h:262: CacheProxy. Passthrough write request to KV 2025-05-29T15:22:12.326637Z node 1 :PERSQUEUE DEBUG: partition_init.cpp:75: [rt3.dc1--asdfgs--topic:1:Initializer] Start initializing step TInitInternalFieldsStep 2025-05-29T15:22:12.326680Z node 1 :PERSQUEUE INFO: partition_init.cpp:880: [PQ: 72057594037927937, Partition: 1, State: StateInit] bootstrapping 1 [1:187:2199] 2025-05-29T15:22:12.327134Z node 1 :PERSQUEUE DEBUG: partition_init.cpp:55: [rt3.dc1--asdfgs--topic:1:Initializer] Initializing completed. 2025-05-29T15:22:12.327144Z node 1 :PERSQUEUE INFO: partition.cpp:560: [PQ: 72057594037927937, Partition: 1, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 1 generation 2 [1:187:2199] 2025-05-29T15:22:12.327151Z node 1 :PERSQUEUE DEBUG: partition.cpp:574: [PQ: 72057594037927937, Partition: 1, State: StateInit] SYNC INIT topic rt3.dc1--asdfgs--topic partitition 1 so 0 endOffset 0 Head Offset 0 PartNo 0 PackedSize 0 count 0 nextOffset 0 batches 0 2025-05-29T15:22:12.327553Z node 1 :PERSQUEUE DEBUG: partition.cpp:3850: [PQ: 72057594037927937, Partition: 1, State: StateIdle] Process pending events. Count 0 2025-05-29T15:22:12.327565Z node 1 :PERSQUEUE DEBUG: partition.cpp:3155: [PQ: 72057594037927937, Partition: 1, State: StateIdle] Topic 'rt3.dc1--asdfgs--topic' partition 1 user user reinit request with generation 1 2025-05-29T15:22:12.327570Z node 1 :PERSQUEUE DEBUG: partition.cpp:3224: [PQ: 72057594037927937, Partition: 1, State: StateIdle] Topic 'rt3.dc1--asdfgs--topic' partition 1 user user reinit with generation 1 done 2025-05-29T15:22:12.327575Z node 1 :PERSQUEUE DEBUG: partition.cpp:3155: [PQ: 72057594037927937, Partition: 1, State: StateIdle] Topic 'rt3.dc1--asdfgs--topic' partition 1 user test reinit request with generation 1 2025-05-29T15:22:12.327579Z node 1 :PERSQUEUE DEBUG: partition.cpp:3224: [PQ: 72057594037927937, Partition: 1, State: StateIdle] Topic 'rt3.dc1--asdfgs--topic' partition 1 user test reinit with generation 1 done 2025-05-29T15:22:12.327595Z node 1 :PERSQUEUE DEBUG: partition.cpp:2185: [PQ: 72057594037927937, Partition: 1, State: StateIdle] === DumpKeyValueRequest === 2025-05-29T15:22:12.327599Z node 1 :PERSQUEUE DEBUG: partition.cpp:2186: [PQ: 72057594037927937, Partition: 1, State: StateIdle] --- delete ---------------- 2025-05-29T15:22:12.327604Z node 1 :PERSQUEUE DEBUG: partition.cpp:2194: [PQ: 72057594037927937, Partition: 1, State: StateIdle] --- write ----------------- 2025-05-29T15:22:12.327608Z node 1 :PERSQUEUE DEBUG: partition.cpp:2197: [PQ: 72057594037927937, Partition: 1, State: StateIdle] i0000000001 2025-05-29T15:22:12.327611Z node 1 :PERSQUEUE DEBUG: partition.cpp:2197: [PQ: 7205759403792793 ... ing 2 [82:265:2259] 2025-05-29T15:23:04.083967Z node 82 :PERSQUEUE INFO: partition_init.cpp:774: [rt3.dc1--asdfgs--topic:0:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp skipped because already initialized. 2025-05-29T15:23:04.083985Z node 82 :PERSQUEUE INFO: partition.cpp:560: [PQ: 72057594037927937, Partition: 0, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 0 generation 3 [82:263:2257] 2025-05-29T15:23:04.084604Z node 82 :PERSQUEUE INFO: partition_init.cpp:774: [rt3.dc1--asdfgs--topic:1:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp skipped because already initialized. 2025-05-29T15:23:04.084616Z node 82 :PERSQUEUE INFO: partition.cpp:560: [PQ: 72057594037927937, Partition: 1, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 1 generation 3 [82:264:2258] 2025-05-29T15:23:04.084639Z node 82 :PERSQUEUE INFO: partition_init.cpp:774: [rt3.dc1--asdfgs--topic:2:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp skipped because already initialized. 2025-05-29T15:23:04.084643Z node 82 :PERSQUEUE INFO: partition.cpp:560: [PQ: 72057594037927937, Partition: 2, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 2 generation 3 [82:265:2259] !Reboot 72057594037927937 (actor [82:108:2139]) rebooted! !Reboot 72057594037927937 (actor [82:108:2139]) tablet resolver refreshed! new actor is[82:210:2212] Leader for TabletID 72057594037927937 is [82:210:2212] sender: [82:324:2057] recipient: [82:14:2061] 2025-05-29T15:23:05.307156Z node 82 :PERSQUEUE INFO: ownerinfo.cpp:30: new Cookie default|12706ac4-70dc6a82-25bbfba7-993f1e57_0 generated for partition 1 topic 'rt3.dc1--asdfgs--topic' owner default 2025-05-29T15:23:05.325269Z node 82 :PERSQUEUE INFO: ownerinfo.cpp:30: new Cookie default|e3a4372a-83873b78-e210fc5f-1916678_1 generated for partition 1 topic 'rt3.dc1--asdfgs--topic' owner default 2025-05-29T15:23:05.340276Z node 82 :PERSQUEUE INFO: ownerinfo.cpp:30: new Cookie default|ca0ed03a-5cf26242-a91e7120-172607a0_0 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default 2025-05-29T15:23:05.352055Z node 82 :PERSQUEUE INFO: ownerinfo.cpp:30: new Cookie default|6af76e72-3a07228-c30c1b80-b3d13874_0 generated for partition 2 topic 'rt3.dc1--asdfgs--topic' owner default 2025-05-29T15:23:05.359596Z node 82 :PERSQUEUE NOTICE: read.h:361: Have to remove new data from cache. Topic rt3.dc1--asdfgs--topic, tablet id72057594037927937, cookie 0 2025-05-29T15:23:05.360509Z node 82 :PERSQUEUE INFO: ownerinfo.cpp:30: new Cookie default|1d0f231d-e6fd820-f920b6a6-84144d74_1 generated for partition 2 topic 'rt3.dc1--asdfgs--topic' owner default 2025-05-29T15:23:05.367437Z node 82 :PERSQUEUE NOTICE: read.h:361: Have to remove new data from cache. Topic rt3.dc1--asdfgs--topic, tablet id72057594037927937, cookie 0 Leader for TabletID 72057594037927937 is [0:0:0] sender: [83:104:2057] recipient: [83:102:2135] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [83:104:2057] recipient: [83:102:2135] Leader for TabletID 72057594037927937 is [83:108:2139] sender: [83:109:2057] recipient: [83:102:2135] 2025-05-29T15:23:05.516633Z node 83 :PERSQUEUE NOTICE: pq_impl.cpp:1099: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-05-29T15:23:05.516658Z node 83 :PERSQUEUE INFO: pq_impl.cpp:800: [PQ: 72057594037927937] doesn't have tx writes info Leader for TabletID 72057594037927938 is [0:0:0] sender: [83:150:2057] recipient: [83:148:2170] IGNORE Leader for TabletID 72057594037927938 is [0:0:0] sender: [83:150:2057] recipient: [83:148:2170] Leader for TabletID 72057594037927938 is [83:154:2174] sender: [83:155:2057] recipient: [83:148:2170] Leader for TabletID 72057594037927937 is [83:108:2139] sender: [83:180:2057] recipient: [83:14:2061] 2025-05-29T15:23:05.520832Z node 83 :PERSQUEUE NOTICE: pq_impl.cpp:1099: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-05-29T15:23:05.521076Z node 83 :PERSQUEUE INFO: pq_impl.cpp:1487: [PQ: 72057594037927937] Config applied version 81 actor [83:178:2192] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 ImportantClientId: "user" LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 PartitionIds: 1 PartitionIds: 2 TopicName: "rt3.dc1--asdfgs--topic" Version: 81 LocalDC: true Topic: "topic" TopicPath: "/Root/PQ/rt3.dc1--asdfgs--topic" Partitions { PartitionId: 0 } Partitions { PartitionId: 1 } Partitions { PartitionId: 2 } ReadRuleGenerations: 81 MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } AllPartitions { PartitionId: 1 } AllPartitions { PartitionId: 2 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 81 Important: true } 2025-05-29T15:23:05.521267Z node 83 :PERSQUEUE INFO: partition_init.cpp:880: [PQ: 72057594037927937, Partition: 0, State: StateInit] bootstrapping 0 [83:186:2198] 2025-05-29T15:23:05.521955Z node 83 :PERSQUEUE INFO: partition.cpp:560: [PQ: 72057594037927937, Partition: 0, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 0 generation 2 [83:186:2198] 2025-05-29T15:23:05.522346Z node 83 :PERSQUEUE INFO: partition_init.cpp:880: [PQ: 72057594037927937, Partition: 1, State: StateInit] bootstrapping 1 [83:187:2199] 2025-05-29T15:23:05.522860Z node 83 :PERSQUEUE INFO: partition.cpp:560: [PQ: 72057594037927937, Partition: 1, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 1 generation 2 [83:187:2199] 2025-05-29T15:23:05.523236Z node 83 :PERSQUEUE INFO: partition_init.cpp:880: [PQ: 72057594037927937, Partition: 2, State: StateInit] bootstrapping 2 [83:188:2200] 2025-05-29T15:23:05.523709Z node 83 :PERSQUEUE INFO: partition.cpp:560: [PQ: 72057594037927937, Partition: 2, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 2 generation 2 [83:188:2200] 2025-05-29T15:23:05.528739Z node 83 :PERSQUEUE INFO: ownerinfo.cpp:30: new Cookie default|60886ec4-c2e7a943-4f74bce-bb3eeb75_0 generated for partition 1 topic 'rt3.dc1--asdfgs--topic' owner default 2025-05-29T15:23:05.543206Z node 83 :PERSQUEUE INFO: ownerinfo.cpp:30: new Cookie default|a1cceb8b-7b646955-bb2fe21a-7ceb3e6_1 generated for partition 1 topic 'rt3.dc1--asdfgs--topic' owner default 2025-05-29T15:23:05.560828Z node 83 :PERSQUEUE INFO: ownerinfo.cpp:30: new Cookie default|4f11732b-70dd8b5e-bfdac505-8011b987_0 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default 2025-05-29T15:23:05.574019Z node 83 :PERSQUEUE INFO: ownerinfo.cpp:30: new Cookie default|847bee00-6bd25b02-462b5fcc-bbb7840c_0 generated for partition 2 topic 'rt3.dc1--asdfgs--topic' owner default 2025-05-29T15:23:05.582120Z node 83 :PERSQUEUE NOTICE: read.h:361: Have to remove new data from cache. Topic rt3.dc1--asdfgs--topic, tablet id72057594037927937, cookie 0 2025-05-29T15:23:05.583030Z node 83 :PERSQUEUE INFO: ownerinfo.cpp:30: new Cookie default|2bae1d00-1b63b79d-66694475-bff4ff33_1 generated for partition 2 topic 'rt3.dc1--asdfgs--topic' owner default 2025-05-29T15:23:05.590597Z node 83 :PERSQUEUE NOTICE: read.h:361: Have to remove new data from cache. Topic rt3.dc1--asdfgs--topic, tablet id72057594037927937, cookie 0 Leader for TabletID 72057594037927937 is [0:0:0] sender: [84:104:2057] recipient: [84:102:2135] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [84:104:2057] recipient: [84:102:2135] Leader for TabletID 72057594037927937 is [84:108:2139] sender: [84:109:2057] recipient: [84:102:2135] 2025-05-29T15:23:05.772052Z node 84 :PERSQUEUE NOTICE: pq_impl.cpp:1099: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-05-29T15:23:05.772081Z node 84 :PERSQUEUE INFO: pq_impl.cpp:800: [PQ: 72057594037927937] doesn't have tx writes info Leader for TabletID 72057594037927938 is [0:0:0] sender: [84:150:2057] recipient: [84:148:2170] IGNORE Leader for TabletID 72057594037927938 is [0:0:0] sender: [84:150:2057] recipient: [84:148:2170] Leader for TabletID 72057594037927938 is [84:154:2174] sender: [84:155:2057] recipient: [84:148:2170] Leader for TabletID 72057594037927937 is [84:108:2139] sender: [84:180:2057] recipient: [84:14:2061] 2025-05-29T15:23:05.776658Z node 84 :PERSQUEUE NOTICE: pq_impl.cpp:1099: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-05-29T15:23:05.776938Z node 84 :PERSQUEUE INFO: pq_impl.cpp:1487: [PQ: 72057594037927937] Config applied version 82 actor [84:178:2192] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 ImportantClientId: "user" LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 PartitionIds: 1 PartitionIds: 2 TopicName: "rt3.dc1--asdfgs--topic" Version: 82 LocalDC: true Topic: "topic" TopicPath: "/Root/PQ/rt3.dc1--asdfgs--topic" Partitions { PartitionId: 0 } Partitions { PartitionId: 1 } Partitions { PartitionId: 2 } ReadRuleGenerations: 82 MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } AllPartitions { PartitionId: 1 } AllPartitions { PartitionId: 2 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 82 Important: true } 2025-05-29T15:23:05.777112Z node 84 :PERSQUEUE INFO: partition_init.cpp:880: [PQ: 72057594037927937, Partition: 0, State: StateInit] bootstrapping 0 [84:186:2198] 2025-05-29T15:23:05.777837Z node 84 :PERSQUEUE INFO: partition.cpp:560: [PQ: 72057594037927937, Partition: 0, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 0 generation 2 [84:186:2198] 2025-05-29T15:23:05.778278Z node 84 :PERSQUEUE INFO: partition_init.cpp:880: [PQ: 72057594037927937, Partition: 1, State: StateInit] bootstrapping 1 [84:187:2199] 2025-05-29T15:23:05.778804Z node 84 :PERSQUEUE INFO: partition.cpp:560: [PQ: 72057594037927937, Partition: 1, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 1 generation 2 [84:187:2199] 2025-05-29T15:23:05.779194Z node 84 :PERSQUEUE INFO: partition_init.cpp:880: [PQ: 72057594037927937, Partition: 2, State: StateInit] bootstrapping 2 [84:188:2200] 2025-05-29T15:23:05.779700Z node 84 :PERSQUEUE INFO: partition.cpp:560: [PQ: 72057594037927937, Partition: 2, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 2 generation 2 [84:188:2200] 2025-05-29T15:23:05.784737Z node 84 :PERSQUEUE INFO: ownerinfo.cpp:30: new Cookie default|c559a988-40c57b5b-363adbc2-5182b45f_0 generated for partition 1 topic 'rt3.dc1--asdfgs--topic' owner default 2025-05-29T15:23:05.797623Z node 84 :PERSQUEUE INFO: ownerinfo.cpp:30: new Cookie default|d09feaac-252fba52-d53f46ec-9510ca12_1 generated for partition 1 topic 'rt3.dc1--asdfgs--topic' owner default 2025-05-29T15:23:05.813891Z node 84 :PERSQUEUE INFO: ownerinfo.cpp:30: new Cookie default|26dfdea7-d29fd131-b63d6f73-348a163c_0 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default 2025-05-29T15:23:05.827403Z node 84 :PERSQUEUE INFO: ownerinfo.cpp:30: new Cookie default|3beb348e-548d5ffc-199c64d7-ef454e0d_0 generated for partition 2 topic 'rt3.dc1--asdfgs--topic' owner default 2025-05-29T15:23:05.836434Z node 84 :PERSQUEUE NOTICE: read.h:361: Have to remove new data from cache. Topic rt3.dc1--asdfgs--topic, tablet id72057594037927937, cookie 0 2025-05-29T15:23:05.837127Z node 84 :PERSQUEUE INFO: ownerinfo.cpp:30: new Cookie default|59d5796d-592c35f7-64dc50a9-14ead43c_1 generated for partition 2 topic 'rt3.dc1--asdfgs--topic' owner default 2025-05-29T15:23:05.842546Z node 84 :PERSQUEUE NOTICE: read.h:361: Have to remove new data from cache. Topic rt3.dc1--asdfgs--topic, tablet id72057594037927937, cookie 0 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/client/ut/unittest >> TLocksTest::CK_Range_BrokenLockInf [GOOD] Test command err: 2025-05-29T15:23:02.526473Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509888539294258876:2063];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:23:02.526496Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/00284e/r3tmp/tmpyJTNkX/pdisk_1.dat 2025-05-29T15:23:02.580482Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [1:7509888539294258855:2079] 1748532182526362 != 1748532182526365 2025-05-29T15:23:02.581017Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded TClient is connected to server localhost:10611 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-05-29T15:23:02.655667Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:23:02.655689Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:23:02.656720Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-29T15:23:02.656859Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-05-29T15:23:02.663553Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:23:02.725569Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:23:02.734017Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:23:02.978099Z node 2 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7509888538922223787:2062];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:23:02.978147Z node 2 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/00284e/r3tmp/tmpGVUXNN/pdisk_1.dat 2025-05-29T15:23:02.990660Z node 2 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:23:02.990903Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [2:7509888538922223767:2079] 1748532182977958 != 1748532182977961 TClient is connected to server localhost:24154 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-05-29T15:23:03.082206Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:23:03.082237Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:23:03.082583Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:23:03.084098Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected waiting... 2025-05-29T15:23:03.091363Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:23:03.106234Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:23:03.119724Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:23:03.451016Z node 3 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7509888544200732081:2062];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:23:03.451053Z node 3 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/00284e/r3tmp/tmpeNzZHe/pdisk_1.dat 2025-05-29T15:23:03.466972Z node 3 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:23:03.467210Z node 3 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [3:7509888544200732056:2079] 1748532183450884 != 1748532183450887 TClient is connected to server localhost:8050 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-05-29T15:23:03.554873Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:23:03.554901Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:23:03.555118Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:23:03.555825Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected waiting... 2025-05-29T15:23:03.559787Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:23:03.574968Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:23:03.589225Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:23:03.843769Z node 4 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7509888544932414915:2062];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:23:03.843839Z node 4 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/00284e/r3tmp/tmpMoYMcW/pdisk_1.dat 2025-05-29T15:23:03.860168Z node 4 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:23:03.860399Z node 4 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [4:7509888544932414895:2079] 1748532183843577 != 1748532183843580 TClient is connected to server localhost:13895 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: " ... ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:23:05.387333Z node 7 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Connecting -> Connected waiting... 2025-05-29T15:23:05.393851Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:23:05.408908Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:23:05.422720Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:23:05.747537Z node 8 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[8:7509888551668688033:2062];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:23:05.747774Z node 8 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/00284e/r3tmp/tmp5CxzQc/pdisk_1.dat 2025-05-29T15:23:05.762260Z node 8 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:23:05.762589Z node 8 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [8:7509888551668688012:2079] 1748532185747109 != 1748532185747112 TClient is connected to server localhost:27833 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-05-29T15:23:05.852138Z node 8 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:23:05.852162Z node 8 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:23:05.852975Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-05-29T15:23:05.853224Z node 8 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Connecting -> Connected waiting... waiting... 2025-05-29T15:23:05.862981Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:23:05.878072Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:23:05.891732Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:23:06.139299Z node 9 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[9:7509888556010190475:2064];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:23:06.139343Z node 9 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/00284e/r3tmp/tmpcWGGKa/pdisk_1.dat 2025-05-29T15:23:06.154226Z node 9 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:23:06.154426Z node 9 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [9:7509888556010190453:2079] 1748532186139180 != 1748532186139183 TClient is connected to server localhost:4527 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-05-29T15:23:06.243342Z node 9 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:23:06.243369Z node 9 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:23:06.243646Z node 9 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:23:06.244384Z node 9 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-29T15:23:06.244521Z node 9 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:23:06.247686Z node 9 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:23:06.255542Z node 9 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:23:06.269142Z node 9 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:23:06.511718Z node 10 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[10:7509888555257493675:2060];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:23:06.511742Z node 10 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/00284e/r3tmp/tmp4NXaDw/pdisk_1.dat 2025-05-29T15:23:06.526894Z node 10 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:23:06.527411Z node 10 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [10:7509888555257493657:2079] 1748532186511595 != 1748532186511598 TClient is connected to server localhost:29235 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-05-29T15:23:06.615548Z node 10 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:23:06.615571Z node 10 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:23:06.615816Z node 10 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:23:06.616570Z node 10 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Connecting -> Connected waiting... 2025-05-29T15:23:06.618382Z node 10 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:23:06.633694Z node 10 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:23:06.647950Z node 10 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... >> TSchemeShardSubDomainTest::RestartAtInFly >> TSchemeShardSubDomainTest::DeleteAndRestart >> TSchemeShardSubDomainTest::ConcurrentCreateSubDomainAndDescribe >> TLocksTest::GoodSameShardLock [GOOD] >> TSchemeShardSubDomainTest::RestartAtInFly [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/client/ut/unittest >> TLocksTest::BrokenNullLock [GOOD] Test command err: 2025-05-29T15:23:04.107216Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509888548027388348:2061];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:23:04.107252Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/0027ff/r3tmp/tmpMNiJfv/pdisk_1.dat 2025-05-29T15:23:04.162919Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [1:7509888548027388329:2079] 1748532184107094 != 1748532184107097 2025-05-29T15:23:04.165241Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded TClient is connected to server localhost:4920 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-05-29T15:23:04.209309Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:23:04.209341Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:23:04.210487Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-29T15:23:04.239370Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:23:04.242286Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:23:04.247561Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:23:04.313100Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:23:04.322013Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:23:04.560612Z node 2 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7509888548032484420:2060];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:23:04.560645Z node 2 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/0027ff/r3tmp/tmpMAkTNR/pdisk_1.dat 2025-05-29T15:23:04.574707Z node 2 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:23:04.577116Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [2:7509888548032484401:2079] 1748532184560526 != 1748532184560529 TClient is connected to server localhost:6259 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-05-29T15:23:04.665036Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:23:04.665065Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:23:04.665334Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:23:04.666033Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected waiting... 2025-05-29T15:23:04.673111Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:23:04.687661Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:23:04.700830Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:23:05.051650Z node 3 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7509888554045070181:2065];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:23:05.051677Z node 3 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/0027ff/r3tmp/tmpIhbxSh/pdisk_1.dat 2025-05-29T15:23:05.067370Z node 3 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:23:05.067609Z node 3 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [3:7509888554045070148:2079] 1748532185051486 != 1748532185051489 TClient is connected to server localhost:6435 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-05-29T15:23:05.156529Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:23:05.156562Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:23:05.156806Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:23:05.157579Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected waiting... 2025-05-29T15:23:05.162993Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:23:05.178712Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:23:05.191730Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:23:05.509396Z node 4 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7509888554218197578:2062];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:23:05.509428Z node 4 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/0027ff/r3tmp/tmpLhoR85/pdisk_1.dat 2025-05-29T15:23:05.521723Z node 4 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:23:05.521929Z node 4 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [4:7509888554218197558:2079] 1748532185509305 != 1748532185509308 TClient is connected to server localhost:5595 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: ... 4037968897 Node(7, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:23:06.955011Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:23:06.955753Z node 7 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Connecting -> Connected waiting... 2025-05-29T15:23:06.961593Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:23:06.976901Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:23:06.990653Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:23:07.236341Z node 8 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[8:7509888560196111421:2061];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:23:07.236363Z node 8 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/0027ff/r3tmp/tmpQ3nMmU/pdisk_1.dat 2025-05-29T15:23:07.252039Z node 8 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:23:07.254342Z node 8 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [8:7509888560196111402:2079] 1748532187236255 != 1748532187236258 TClient is connected to server localhost:27646 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-05-29T15:23:07.340216Z node 8 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:23:07.340247Z node 8 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:23:07.340958Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-05-29T15:23:07.341294Z node 8 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Connecting -> Connected waiting... waiting... 2025-05-29T15:23:07.346726Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:23:07.362084Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:23:07.375583Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:23:07.613134Z node 9 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[9:7509888561353005524:2065];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:23:07.613154Z node 9 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/0027ff/r3tmp/tmpwucV8b/pdisk_1.dat 2025-05-29T15:23:07.625193Z node 9 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:23:07.625435Z node 9 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [9:7509888561353005500:2079] 1748532187613000 != 1748532187613003 TClient is connected to server localhost:1611 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-05-29T15:23:07.716550Z node 9 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:23:07.716581Z node 9 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:23:07.716887Z node 9 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:23:07.717541Z node 9 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Connecting -> Connected waiting... 2025-05-29T15:23:07.725174Z node 9 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:23:07.739732Z node 9 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:23:07.753504Z node 9 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:23:07.982164Z node 10 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[10:7509888560062776036:2061];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:23:07.982180Z node 10 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/0027ff/r3tmp/tmp3U5Oaq/pdisk_1.dat 2025-05-29T15:23:07.996831Z node 10 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:23:07.997069Z node 10 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [10:7509888560062776017:2079] 1748532187982093 != 1748532187982096 TClient is connected to server localhost:14991 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-05-29T15:23:08.085676Z node 10 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:23:08.085700Z node 10 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:23:08.085951Z node 10 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:23:08.086677Z node 10 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Connecting -> Connected waiting... 2025-05-29T15:23:08.095999Z node 10 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:23:08.110877Z node 10 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:23:08.124337Z node 10 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... >> TSchemeShardSubDomainTest::ConcurrentCreateSubDomainAndDescribe [GOOD] >> TSchemeShardSubDomainTest::ColumnSchemeLimitsRejects >> TSchemeShardSubDomainTest::SchemeLimitsCreatePq [GOOD] >> TSchemeShardSubDomainTest::CreateSubDomainsInSeparateDir [GOOD] >> TSchemeShardSubDomainTest::SchemeLimitsRejectsWithIndexedTables [GOOD] >> TSchemeShardSubDomainTest::DeleteAndRestart [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/client/ut/unittest >> TLocksTest::GoodSameShardLock [GOOD] Test command err: 2025-05-29T15:23:04.178034Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509888547915818856:2062];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:23:04.178247Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/0027e5/r3tmp/tmprLGDNf/pdisk_1.dat 2025-05-29T15:23:04.236975Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [1:7509888547915818835:2079] 1748532184177828 != 1748532184177831 2025-05-29T15:23:04.239303Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded TClient is connected to server localhost:20981 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-05-29T15:23:04.269528Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... waiting... waiting... 2025-05-29T15:23:04.281421Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 2025-05-29T15:23:04.311056Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:23:04.311086Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:23:04.312184Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-29T15:23:04.345304Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:23:04.354539Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:23:04.644396Z node 2 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7509888550099232054:2064];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:23:04.644649Z node 2 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/0027e5/r3tmp/tmptKARSi/pdisk_1.dat 2025-05-29T15:23:04.657338Z node 2 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:23:04.657553Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [2:7509888550099232031:2079] 1748532184643942 != 1748532184643945 TClient is connected to server localhost:4697 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-05-29T15:23:04.748735Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:23:04.748769Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:23:04.749036Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:23:04.749875Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected waiting... 2025-05-29T15:23:04.756965Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:23:04.772085Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:23:04.785776Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:23:05.122632Z node 3 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7509888551751237144:2062];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:23:05.122667Z node 3 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/0027e5/r3tmp/tmpD8vbqj/pdisk_1.dat 2025-05-29T15:23:05.138714Z node 3 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:23:05.138788Z node 3 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [3:7509888551751237122:2079] 1748532185122498 != 1748532185122501 TClient is connected to server localhost:24855 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-05-29T15:23:05.226983Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:23:05.227014Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:23:05.227378Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:23:05.228101Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected waiting... 2025-05-29T15:23:05.233025Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:23:05.250603Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:23:05.261732Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:23:05.626635Z node 4 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7509888554737037328:2066];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:23:05.626864Z node 4 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/0027e5/r3tmp/tmp7kaG9j/pdisk_1.dat 2025-05-29T15:23:05.641513Z node 4 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:23:05.641788Z node 4 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [4:7509888554737037303:2079] 1748532185626411 != 1748532185626414 TClient is connected to server localhost:18624 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: " ... 037968897 Node(7, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:23:07.070427Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:23:07.071262Z node 7 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Connecting -> Connected waiting... 2025-05-29T15:23:07.073491Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:23:07.088483Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:23:07.102168Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:23:07.332789Z node 8 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[8:7509888559562269604:2063];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:23:07.332810Z node 8 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/0027e5/r3tmp/tmplebX8P/pdisk_1.dat 2025-05-29T15:23:07.347835Z node 8 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:23:07.347986Z node 8 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [8:7509888559562269582:2079] 1748532187332656 != 1748532187332659 TClient is connected to server localhost:64071 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-05-29T15:23:07.436254Z node 8 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:23:07.436278Z node 8 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:23:07.436968Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:23:07.437319Z node 8 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Connecting -> Connected waiting... 2025-05-29T15:23:07.445121Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:23:07.460056Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:23:07.473856Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:23:07.812876Z node 9 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[9:7509888561003675969:2063];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:23:07.812907Z node 9 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/0027e5/r3tmp/tmpbCY7gI/pdisk_1.dat 2025-05-29T15:23:07.824774Z node 9 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:23:07.824920Z node 9 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [9:7509888561003675948:2079] 1748532187812776 != 1748532187812779 TClient is connected to server localhost:11622 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-05-29T15:23:07.916581Z node 9 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:23:07.916609Z node 9 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:23:07.916911Z node 9 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:23:07.917601Z node 9 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Connecting -> Connected waiting... 2025-05-29T15:23:07.920771Z node 9 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:23:07.935611Z node 9 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:23:07.949279Z node 9 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:23:08.138543Z node 10 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[10:7509888565562200220:2064];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:23:08.138563Z node 10 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/0027e5/r3tmp/tmpPlV4Rl/pdisk_1.dat 2025-05-29T15:23:08.153712Z node 10 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:23:08.153964Z node 10 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [10:7509888565562200197:2079] 1748532188138416 != 1748532188138419 TClient is connected to server localhost:15152 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-05-29T15:23:08.242581Z node 10 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:23:08.242605Z node 10 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:23:08.242967Z node 10 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:23:08.243713Z node 10 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Connecting -> Connected waiting... 2025-05-29T15:23:08.305577Z node 10 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:23:08.313074Z node 10 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:23:08.327435Z node 10 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... >> TMessageBusServerPersQueueGetPartitionLocationsMetaRequestTest::HandlesTimeout >> TSchemeShardSubDomainTest::SimultaneousCreateTenantTableForceDrop ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::RestartAtInFly [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:116:2058] recipient: [1:111:2141] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:116:2058] recipient: [1:111:2141] Leader for TabletID 72057594046678944 is [1:128:2152] sender: [1:132:2058] recipient: [1:111:2141] 2025-05-29T15:23:08.717057Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7488: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-05-29T15:23:08.717078Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7516: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:23:08.717082Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7402: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-05-29T15:23:08.717086Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7418: OperationsProcessing config: using default configuration 2025-05-29T15:23:08.717095Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-05-29T15:23:08.717098Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-05-29T15:23:08.717105Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7548: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:23:08.717115Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:39: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-05-29T15:23:08.717204Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7619: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-05-29T15:23:08.717264Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-05-29T15:23:08.727396Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7309: Cannot subscribe to console configs 2025-05-29T15:23:08.727423Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:23:08.730320Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-05-29T15:23:08.730472Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-05-29T15:23:08.730524Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-05-29T15:23:08.732721Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-05-29T15:23:08.732944Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:445: Clear TempDirsState with owners number: 0 2025-05-29T15:23:08.733041Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1348: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-05-29T15:23:08.733100Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:32: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-05-29T15:23:08.733592Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:157: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:23:08.733627Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:84: [RootDataErasureManager] Stop 2025-05-29T15:23:08.733874Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:23:08.733885Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:23:08.733903Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-05-29T15:23:08.733909Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-29T15:23:08.733913Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:202: TTxServerlessStorageBilling.Complete 2025-05-29T15:23:08.733941Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6671: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-05-29T15:23:08.735111Z node 1 :HIVE INFO: tablet_helpers.cpp:1130: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:128:2152] sender: [1:242:2058] recipient: [1:15:2062] 2025-05-29T15:23:08.753013Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:372: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-05-29T15:23:08.753089Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:23:08.753146Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-05-29T15:23:08.753182Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:130: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-05-29T15:23:08.753191Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:23:08.753921Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:451: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-05-29T15:23:08.753943Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-05-29T15:23:08.753980Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:23:08.753987Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:313: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-05-29T15:23:08.753991Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:367: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-05-29T15:23:08.753995Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 2 -> 3 2025-05-29T15:23:08.754383Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:23:08.754395Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-05-29T15:23:08.754401Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 3 -> 128 2025-05-29T15:23:08.754769Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:23:08.754783Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:23:08.754789Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:23:08.754796Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1650: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-05-29T15:23:08.755517Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1719: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-05-29T15:23:08.755978Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:652: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-05-29T15:23:08.756009Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1751: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-05-29T15:23:08.756155Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:676: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-05-29T15:23:08.756172Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:680: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 124 RawX2: 4294969445 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-05-29T15:23:08.756178Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:23:08.756221Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 128 -> 240 2025-05-29T15:23:08.756226Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:23:08.756250Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-05-29T15:23:08.756258Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:402: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-05-29T15:23:08.756660Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:23:08.756669Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-29T15:23:08.756711Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29 ... de 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:445: Clear TempDirsState with owners number: 0 2025-05-29T15:23:08.783061Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit for Paths, read records: 2, at schemeshard: 72057594046678944 2025-05-29T15:23:08.783075Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:319: AttachChild: child attached as only one child to the parent, parent id: [OwnerId: 72057594046678944, LocalPathId: 1], parent name: MyRoot, child name: USER_0, child id: [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2025-05-29T15:23:08.783082Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1457: TTxInit for UserAttributes, read records: 0, at schemeshard: 72057594046678944 2025-05-29T15:23:08.783088Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1483: TTxInit for UserAttributesAlterData, read records: 0, at schemeshard: 72057594046678944 2025-05-29T15:23:08.783116Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 0 2025-05-29T15:23:08.783146Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1785: TTxInit for Tables, read records: 0, at schemeshard: 72057594046678944 2025-05-29T15:23:08.783154Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__root_data_erasure_manager.cpp:452: [RootDataErasureManager] Restore: Generation# 0, Status# 0, WakeupInterval# 604800 s, NumberDataErasureTenantsInRunning# 0 2025-05-29T15:23:08.783170Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:2033: TTxInit for Columns, read records: 0, at schemeshard: 72057594046678944 2025-05-29T15:23:08.783177Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:2093: TTxInit for ColumnsAlters, read records: 0, at schemeshard: 72057594046678944 2025-05-29T15:23:08.783186Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:2151: TTxInit for Shards, read records: 3, at schemeshard: 72057594046678944 2025-05-29T15:23:08.783190Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 2025-05-29T15:23:08.783192Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2025-05-29T15:23:08.783195Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2025-05-29T15:23:08.783207Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:2237: TTxInit for TablePartitions, read records: 0, at schemeshard: 72057594046678944 2025-05-29T15:23:08.783217Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:2303: TTxInit for TableShardPartitionConfigs, read records: 0, at schemeshard: 72057594046678944 2025-05-29T15:23:08.783237Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:2453: TTxInit for ChannelsBinding, read records: 9, at schemeshard: 72057594046678944 2025-05-29T15:23:08.783271Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:2832: TTxInit for TableIndexes, read records: 0, at schemeshard: 72057594046678944 2025-05-29T15:23:08.783278Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:2911: TTxInit for TableIndexKeys, read records: 0, at schemeshard: 72057594046678944 2025-05-29T15:23:08.783309Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:3409: TTxInit for KesusInfos, read records: 0, at schemeshard: 72057594046678944 2025-05-29T15:23:08.783317Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:3445: TTxInit for KesusAlters, read records: 0, at schemeshard: 72057594046678944 2025-05-29T15:23:08.783336Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:3659: TTxInit for TxShards, read records: 0, at schemeshard: 72057594046678944 2025-05-29T15:23:08.783343Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:3804: TTxInit for ShardToDelete, read records: 0, at schemeshard: 72057594046678944 2025-05-29T15:23:08.783349Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:3821: TTxInit for BackupSettings, read records: 0, at schemeshard: 72057594046678944 2025-05-29T15:23:08.783364Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:3981: TTxInit for ShardBackupStatus, read records: 0, at schemeshard: 72057594046678944 2025-05-29T15:23:08.783384Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:3997: TTxInit for CompletedBackup, read records: 0, at schemeshard: 72057594046678944 2025-05-29T15:23:08.783402Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4282: TTxInit for Publications, read records: 0, at schemeshard: 72057594046678944 2025-05-29T15:23:08.783419Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4587: IndexBuild , records: 0, at schemeshard: 72057594046678944 2025-05-29T15:23:08.783424Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4645: KMeansTreeSample records: 0, at schemeshard: 72057594046678944 2025-05-29T15:23:08.783434Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4740: SnapshotTables: snapshots: 0 tables: 0, at schemeshard: 72057594046678944 2025-05-29T15:23:08.783438Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4767: SnapshotSteps: snapshots: 0, at schemeshard: 72057594046678944 2025-05-29T15:23:08.783443Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4794: LongLocks: records: 0, at schemeshard: 72057594046678944 2025-05-29T15:23:08.784423Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:84: [RootDataErasureManager] Stop 2025-05-29T15:23:08.785017Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:23:08.785033Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:23:08.785081Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-05-29T15:23:08.785090Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-29T15:23:08.785096Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:202: TTxServerlessStorageBilling.Complete 2025-05-29T15:23:08.785127Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6671: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 TestWaitNotification wait txId: 100 2025-05-29T15:23:08.846398Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:210: tests -- TTxNotificationSubscriber for txId 100: send EvNotifyTxCompletion 2025-05-29T15:23:08.846432Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:256: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 100 Leader for TabletID 72057594046678944 is [1:463:2412] sender: [1:524:2058] recipient: [1:15:2062] 2025-05-29T15:23:08.846605Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 100, at schemeshard: 72057594046678944 2025-05-29T15:23:08.846636Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:227: tests -- TTxNotificationSubscriber for txId 100: got EvNotifyTxCompletionResult 2025-05-29T15:23:08.846642Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:236: tests -- TTxNotificationSubscriber for txId 100: satisfy waiter [1:522:2457] TestWaitNotification: OK eventTxId 100 2025-05-29T15:23:08.846753Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-05-29T15:23:08.846804Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/USER_0" took 83us result status StatusSuccess 2025-05-29T15:23:08.846913Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/USER_0" PathDescription { Self { Name: "USER_0" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 100 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 1 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72075186233409546 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409547 Mediators: 72075186233409548 } DomainKey { SchemeShard: 72057594046678944 PathId: 2 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 3 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 2 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { } } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-05-29T15:23:08.846996Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-05-29T15:23:08.847017Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot" took 23us result status StatusSuccess 2025-05-29T15:23:08.847071Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot" PathDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 3 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: "USER_0" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 100 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 1 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/MyRoot" } } } PathId: 1 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::SchemeLimitsCreatePq [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:116:2058] recipient: [1:111:2141] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:116:2058] recipient: [1:111:2141] Leader for TabletID 72057594046678944 is [1:128:2152] sender: [1:132:2058] recipient: [1:111:2141] 2025-05-29T15:23:08.565876Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7488: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-05-29T15:23:08.565910Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7516: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:23:08.565916Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7402: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-05-29T15:23:08.565922Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7418: OperationsProcessing config: using default configuration 2025-05-29T15:23:08.565934Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-05-29T15:23:08.565938Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-05-29T15:23:08.565947Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7548: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:23:08.565960Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:39: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-05-29T15:23:08.566084Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7619: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-05-29T15:23:08.566161Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-05-29T15:23:08.584168Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7309: Cannot subscribe to console configs 2025-05-29T15:23:08.584196Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:23:08.588374Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-05-29T15:23:08.588509Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-05-29T15:23:08.588561Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-05-29T15:23:08.591373Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-05-29T15:23:08.591534Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:445: Clear TempDirsState with owners number: 0 2025-05-29T15:23:08.591642Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1348: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-05-29T15:23:08.591719Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:32: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-05-29T15:23:08.592150Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:157: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:23:08.592193Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:84: [RootDataErasureManager] Stop 2025-05-29T15:23:08.592468Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:23:08.592480Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:23:08.592504Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-05-29T15:23:08.592512Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-29T15:23:08.592518Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:202: TTxServerlessStorageBilling.Complete 2025-05-29T15:23:08.592552Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6671: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-05-29T15:23:08.593862Z node 1 :HIVE INFO: tablet_helpers.cpp:1130: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:128:2152] sender: [1:242:2058] recipient: [1:15:2062] 2025-05-29T15:23:08.616313Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:372: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-05-29T15:23:08.616391Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:23:08.616464Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-05-29T15:23:08.616513Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:130: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-05-29T15:23:08.616522Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:23:08.619229Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:451: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-05-29T15:23:08.619274Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-05-29T15:23:08.619336Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:23:08.619348Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:313: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-05-29T15:23:08.619355Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:367: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-05-29T15:23:08.619361Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 2 -> 3 2025-05-29T15:23:08.620162Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:23:08.620174Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-05-29T15:23:08.620178Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 3 -> 128 2025-05-29T15:23:08.620658Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:23:08.620681Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:23:08.620687Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:23:08.620693Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1650: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-05-29T15:23:08.621208Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1719: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-05-29T15:23:08.621667Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:652: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-05-29T15:23:08.621706Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1751: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-05-29T15:23:08.621845Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:676: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-05-29T15:23:08.621869Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:680: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 124 RawX2: 4294969445 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-05-29T15:23:08.621877Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:23:08.621940Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 128 -> 240 2025-05-29T15:23:08.621949Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:23:08.621978Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-05-29T15:23:08.621987Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:402: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-05-29T15:23:08.622510Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:23:08.622523Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-29T15:23:08.622575Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29 ... esult> execute, operationId: 104:0, at schemeshard: 72057594046678944, message: TabletId: 72075186233409551 TxId: 104 Status: OK 2025-05-29T15:23:08.761567Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_pq.cpp:643: NPQState::TPropose operationId# 104:0 HandleReply TEvProposeTransactionAttachResult triggers early, at schemeshard: 72057594046678944 message# TabletId: 72075186233409551 TxId: 104 Status: OK 2025-05-29T15:23:08.761572Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_pq.cpp:648: NPQState::TPropose operationId# 104:0 HandleReply TEvProposeTransactionAttachResult CollectPQConfigChanged: false 2025-05-29T15:23:08.761576Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_pq.cpp:754: NPQState::TPropose operationId# 104:0 can't persist state: ShardsInProgress is not empty, remain: 2 2025-05-29T15:23:08.762544Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:647: TTxOperationReply complete, operationId: 104:0, at schemeshard: 72057594046678944 FAKE_COORDINATOR: advance: minStep5000003 State->FrontStep: 5000003 2025-05-29T15:23:08.763289Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:647: TTxOperationReply complete, operationId: 104:0, at schemeshard: 72057594046678944 FAKE_COORDINATOR: advance: minStep5000003 State->FrontStep: 5000003 FAKE_COORDINATOR: Erasing txId 104 2025-05-29T15:23:08.783350Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1764: TOperation FindRelatedPartByTabletId, TxId: 104, tablet: 72075186233409551, partId: 0 2025-05-29T15:23:08.783413Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:619: TTxOperationReply execute, operationId: 104:0, at schemeshard: 72057594046678944, message: Origin: 72075186233409551 Status: COMPLETE TxId: 104 Step: 5000003 2025-05-29T15:23:08.783425Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_pq.cpp:624: NPQState::TPropose operationId# 104:0 HandleReply TEvProposeTransactionResult triggers early, at schemeshard: 72057594046678944 message# Origin: 72075186233409551 Status: COMPLETE TxId: 104 Step: 5000003 2025-05-29T15:23:08.783438Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_pq.cpp:265: CollectPQConfigChanged accept TEvPersQueue::TEvProposeTransactionResult, operationId: 104:0, shardIdx: 72057594046678944:6, shard: 72075186233409551, left await: 1, txState.State: Propose, txState.ReadyForNotifications: 0, at schemeshard: 72057594046678944 2025-05-29T15:23:08.783443Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_pq.cpp:629: NPQState::TPropose operationId# 104:0 HandleReply TEvProposeTransactionResult CollectPQConfigChanged: false 2025-05-29T15:23:08.783448Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_pq.cpp:754: NPQState::TPropose operationId# 104:0 can't persist state: ShardsInProgress is not empty, remain: 1 2025-05-29T15:23:08.783670Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1764: TOperation FindRelatedPartByTabletId, TxId: 104, tablet: 72075186233409550, partId: 0 2025-05-29T15:23:08.783693Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:619: TTxOperationReply execute, operationId: 104:0, at schemeshard: 72057594046678944, message: Origin: 72075186233409550 Status: COMPLETE TxId: 104 Step: 5000003 2025-05-29T15:23:08.783700Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_pq.cpp:624: NPQState::TPropose operationId# 104:0 HandleReply TEvProposeTransactionResult triggers early, at schemeshard: 72057594046678944 message# Origin: 72075186233409550 Status: COMPLETE TxId: 104 Step: 5000003 2025-05-29T15:23:08.783708Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_pq.cpp:265: CollectPQConfigChanged accept TEvPersQueue::TEvProposeTransactionResult, operationId: 104:0, shardIdx: 72057594046678944:5, shard: 72075186233409550, left await: 0, txState.State: Propose, txState.ReadyForNotifications: 0, at schemeshard: 72057594046678944 2025-05-29T15:23:08.783712Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_pq.cpp:629: NPQState::TPropose operationId# 104:0 HandleReply TEvProposeTransactionResult CollectPQConfigChanged: true 2025-05-29T15:23:08.783768Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 104:0 128 -> 240 2025-05-29T15:23:08.783800Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-05-29T15:23:08.783812Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 5 2025-05-29T15:23:08.784927Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:647: TTxOperationReply complete, operationId: 104:0, at schemeshard: 72057594046678944 2025-05-29T15:23:08.784963Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:647: TTxOperationReply complete, operationId: 104:0, at schemeshard: 72057594046678944 2025-05-29T15:23:08.784997Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:23:08.785004Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 104, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-29T15:23:08.785040Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 104, path id: [OwnerId: 72057594046678944, LocalPathId: 3] 2025-05-29T15:23:08.785080Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:23:08.785086Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:339:2314], at schemeshard: 72057594046678944, txId: 104, path id: 1 2025-05-29T15:23:08.785091Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:339:2314], at schemeshard: 72057594046678944, txId: 104, path id: 3 2025-05-29T15:23:08.785194Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 104:0, at schemeshard: 72057594046678944 2025-05-29T15:23:08.785202Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:482: [72057594046678944] TDone opId# 104:0 ProgressState 2025-05-29T15:23:08.785216Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:906: Part operation is done id#104:0 progress is 1/1 2025-05-29T15:23:08.785220Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 104 ready parts: 1/1 2025-05-29T15:23:08.785225Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:906: Part operation is done id#104:0 progress is 1/1 2025-05-29T15:23:08.785228Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 104 ready parts: 1/1 2025-05-29T15:23:08.785233Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1606: TOperation IsReadyToNotify, TxId: 104, ready parts: 1/1, is published: false 2025-05-29T15:23:08.785238Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 104 ready parts: 1/1 2025-05-29T15:23:08.785245Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:973: Operation and all the parts is done, operation id: 104:0 2025-05-29T15:23:08.785249Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5174: RemoveTx for txid 104:0 2025-05-29T15:23:08.785279Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 6 2025-05-29T15:23:08.785285Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:982: Publication still in progress, tx: 104, publications: 2, subscribers: 0 2025-05-29T15:23:08.785289Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:989: Publication details: tx: 104, [OwnerId: 72057594046678944, LocalPathId: 1], 7 2025-05-29T15:23:08.785293Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:989: Publication details: tx: 104, [OwnerId: 72057594046678944, LocalPathId: 3], 2 2025-05-29T15:23:08.785449Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:5834: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 3 LocalPathId: 1 Version: 7 PathOwnerId: 72057594046678944, cookie: 104 2025-05-29T15:23:08.785463Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 3 LocalPathId: 1 Version: 7 PathOwnerId: 72057594046678944, cookie: 104 2025-05-29T15:23:08.785468Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 104 2025-05-29T15:23:08.785476Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 104, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 7 2025-05-29T15:23:08.785480Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2025-05-29T15:23:08.785648Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:5834: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 3 LocalPathId: 3 Version: 2 PathOwnerId: 72057594046678944, cookie: 104 2025-05-29T15:23:08.785660Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 3 LocalPathId: 3 Version: 2 PathOwnerId: 72057594046678944, cookie: 104 2025-05-29T15:23:08.785664Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 104 2025-05-29T15:23:08.785668Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 104, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 2 2025-05-29T15:23:08.785671Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 5 2025-05-29T15:23:08.785681Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 104, subscribers: 0 2025-05-29T15:23:08.787422Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 104 2025-05-29T15:23:08.788960Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 104 TestModificationResult got TxId: 104, wait until txId: 104 >> TMessageBusServerPersQueueGetPartitionStatusMetaRequestTest::FailsOnBalancerDescribeResultFailureWhenTopicsAreGivenExplicitly >> TSchemeShardSubDomainTest::DeclareDefineAndDelete >> TMessageBusServerPersQueueGetPartitionOffsetsMetaRequestTest::FailsOnZeroBalancerTabletIdInGetNodeRequest >> TMessageBusServerPersQueueGetPartitionOffsetsMetaRequestTest::FailsOnBadRootStatusInGetNodeRequest >> TMessageBusServerPersQueueGetReadSessionsInfoMetaRequestTest::HandlesTimeout >> TMessageBusServerPersQueueGetTopicMetadataMetaRequestTest::FailsOnFailedGetAllTopicsRequest >> TMessageBusServerPersQueueGetReadSessionsInfoMetaRequestTest::FailsOnFailedGetAllTopicsRequest >> TMessageBusServerPersQueueGetPartitionOffsetsMetaRequestTest::HandlesTimeout ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::DeleteAndRestart [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:116:2058] recipient: [1:111:2141] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:116:2058] recipient: [1:111:2141] Leader for TabletID 72057594046678944 is [1:128:2152] sender: [1:132:2058] recipient: [1:111:2141] 2025-05-29T15:23:08.741945Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7488: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-05-29T15:23:08.741967Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7516: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:23:08.741973Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7402: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-05-29T15:23:08.741978Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7418: OperationsProcessing config: using default configuration 2025-05-29T15:23:08.741990Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-05-29T15:23:08.741994Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-05-29T15:23:08.742004Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7548: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:23:08.742025Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:39: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-05-29T15:23:08.742145Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7619: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-05-29T15:23:08.742215Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-05-29T15:23:08.754389Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7309: Cannot subscribe to console configs 2025-05-29T15:23:08.754408Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:23:08.756433Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-05-29T15:23:08.756508Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-05-29T15:23:08.756552Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-05-29T15:23:08.758708Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-05-29T15:23:08.758941Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:445: Clear TempDirsState with owners number: 0 2025-05-29T15:23:08.759073Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1348: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-05-29T15:23:08.759153Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:32: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-05-29T15:23:08.759736Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:157: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:23:08.759792Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:84: [RootDataErasureManager] Stop 2025-05-29T15:23:08.760047Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:23:08.760061Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:23:08.760084Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-05-29T15:23:08.760093Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-29T15:23:08.760100Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:202: TTxServerlessStorageBilling.Complete 2025-05-29T15:23:08.760134Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6671: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-05-29T15:23:08.761469Z node 1 :HIVE INFO: tablet_helpers.cpp:1130: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:128:2152] sender: [1:242:2058] recipient: [1:15:2062] 2025-05-29T15:23:08.782547Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:372: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-05-29T15:23:08.782601Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:23:08.782662Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-05-29T15:23:08.782699Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:130: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-05-29T15:23:08.782709Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:23:08.783252Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:451: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-05-29T15:23:08.783270Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-05-29T15:23:08.783297Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:23:08.783304Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:313: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-05-29T15:23:08.783307Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:367: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-05-29T15:23:08.783311Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 2 -> 3 2025-05-29T15:23:08.783603Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:23:08.783611Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-05-29T15:23:08.783614Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 3 -> 128 2025-05-29T15:23:08.783858Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:23:08.783865Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:23:08.783869Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:23:08.783874Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1650: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-05-29T15:23:08.784288Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1719: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-05-29T15:23:08.784749Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:652: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-05-29T15:23:08.784797Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1751: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-05-29T15:23:08.784984Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:676: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-05-29T15:23:08.785013Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:680: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 124 RawX2: 4294969445 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-05-29T15:23:08.785021Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:23:08.785080Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 128 -> 240 2025-05-29T15:23:08.785087Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:23:08.785118Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-05-29T15:23:08.785131Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:402: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-05-29T15:23:08.785622Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:23:08.785632Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-29T15:23:08.785667Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29 ... r TabletID 72057594046678944 is [1:622:2535] sender: [1:623:2058] recipient: [1:619:2534] 2025-05-29T15:23:08.898770Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7488: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-05-29T15:23:08.898797Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7516: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:23:08.898802Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7402: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-05-29T15:23:08.898805Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7418: OperationsProcessing config: using default configuration 2025-05-29T15:23:08.898810Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-05-29T15:23:08.898813Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-05-29T15:23:08.898822Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7548: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:23:08.898833Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:39: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-05-29T15:23:08.898903Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7619: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-05-29T15:23:08.898955Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-05-29T15:23:08.899815Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-05-29T15:23:08.900099Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-05-29T15:23:08.900134Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-05-29T15:23:08.900158Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7309: Cannot subscribe to console configs 2025-05-29T15:23:08.900165Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:23:08.900220Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:445: Clear TempDirsState with owners number: 0 2025-05-29T15:23:08.900274Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit for Paths, read records: 1, at schemeshard: 72057594046678944 2025-05-29T15:23:08.900289Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1457: TTxInit for UserAttributes, read records: 0, at schemeshard: 72057594046678944 2025-05-29T15:23:08.900295Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1483: TTxInit for UserAttributesAlterData, read records: 0, at schemeshard: 72057594046678944 2025-05-29T15:23:08.900338Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1785: TTxInit for Tables, read records: 0, at schemeshard: 72057594046678944 2025-05-29T15:23:08.900345Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__root_data_erasure_manager.cpp:452: [RootDataErasureManager] Restore: Generation# 0, Status# 0, WakeupInterval# 604800 s, NumberDataErasureTenantsInRunning# 0 2025-05-29T15:23:08.900363Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:2033: TTxInit for Columns, read records: 0, at schemeshard: 72057594046678944 2025-05-29T15:23:08.900373Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:2093: TTxInit for ColumnsAlters, read records: 0, at schemeshard: 72057594046678944 2025-05-29T15:23:08.900386Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:2151: TTxInit for Shards, read records: 0, at schemeshard: 72057594046678944 2025-05-29T15:23:08.900399Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:2237: TTxInit for TablePartitions, read records: 0, at schemeshard: 72057594046678944 2025-05-29T15:23:08.900412Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:2303: TTxInit for TableShardPartitionConfigs, read records: 0, at schemeshard: 72057594046678944 2025-05-29T15:23:08.900437Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:2453: TTxInit for ChannelsBinding, read records: 0, at schemeshard: 72057594046678944 2025-05-29T15:23:08.900460Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:2832: TTxInit for TableIndexes, read records: 0, at schemeshard: 72057594046678944 2025-05-29T15:23:08.900468Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:2911: TTxInit for TableIndexKeys, read records: 0, at schemeshard: 72057594046678944 2025-05-29T15:23:08.900497Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:3409: TTxInit for KesusInfos, read records: 0, at schemeshard: 72057594046678944 2025-05-29T15:23:08.900505Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:3445: TTxInit for KesusAlters, read records: 0, at schemeshard: 72057594046678944 2025-05-29T15:23:08.900522Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:3659: TTxInit for TxShards, read records: 0, at schemeshard: 72057594046678944 2025-05-29T15:23:08.900532Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:3804: TTxInit for ShardToDelete, read records: 0, at schemeshard: 72057594046678944 2025-05-29T15:23:08.900540Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:3821: TTxInit for BackupSettings, read records: 0, at schemeshard: 72057594046678944 2025-05-29T15:23:08.900555Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:3981: TTxInit for ShardBackupStatus, read records: 0, at schemeshard: 72057594046678944 2025-05-29T15:23:08.900562Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:3997: TTxInit for CompletedBackup, read records: 0, at schemeshard: 72057594046678944 2025-05-29T15:23:08.900577Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4282: TTxInit for Publications, read records: 0, at schemeshard: 72057594046678944 2025-05-29T15:23:08.900596Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4587: IndexBuild , records: 0, at schemeshard: 72057594046678944 2025-05-29T15:23:08.900601Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4645: KMeansTreeSample records: 0, at schemeshard: 72057594046678944 2025-05-29T15:23:08.900611Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4740: SnapshotTables: snapshots: 0 tables: 0, at schemeshard: 72057594046678944 2025-05-29T15:23:08.900614Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4767: SnapshotSteps: snapshots: 0, at schemeshard: 72057594046678944 2025-05-29T15:23:08.900618Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4794: LongLocks: records: 0, at schemeshard: 72057594046678944 2025-05-29T15:23:08.901444Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:84: [RootDataErasureManager] Stop 2025-05-29T15:23:08.901922Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:23:08.901935Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:23:08.902134Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-05-29T15:23:08.902142Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-29T15:23:08.902148Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:202: TTxServerlessStorageBilling.Complete 2025-05-29T15:23:08.902405Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6671: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594046678944 is [1:622:2535] sender: [1:681:2058] recipient: [1:15:2062] 2025-05-29T15:23:08.933250Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-05-29T15:23:08.933324Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/USER_0" took 95us result status StatusPathDoesNotExist 2025-05-29T15:23:08.933372Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/USER_0\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1]), source_location: ydb/core/tx/schemeshard/schemeshard_path_describer.cpp:1157" Path: "/MyRoot/USER_0" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: false } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 2025-05-29T15:23:08.933491Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-05-29T15:23:08.933526Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot" took 37us result status StatusSuccess 2025-05-29T15:23:08.933634Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot" PathDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 7 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 7 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 5 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/MyRoot" } } } PathId: 1 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::SchemeLimitsRejectsWithIndexedTables [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:116:2058] recipient: [1:111:2141] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:116:2058] recipient: [1:111:2141] Leader for TabletID 72057594046678944 is [1:128:2152] sender: [1:132:2058] recipient: [1:111:2141] 2025-05-29T15:23:08.649105Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7488: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-05-29T15:23:08.649134Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7516: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:23:08.649140Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7402: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-05-29T15:23:08.649146Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7418: OperationsProcessing config: using default configuration 2025-05-29T15:23:08.649157Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-05-29T15:23:08.649162Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-05-29T15:23:08.649171Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7548: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:23:08.649185Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:39: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-05-29T15:23:08.649298Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7619: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-05-29T15:23:08.649372Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-05-29T15:23:08.663279Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7309: Cannot subscribe to console configs 2025-05-29T15:23:08.663308Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:23:08.666064Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-05-29T15:23:08.666221Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-05-29T15:23:08.666283Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-05-29T15:23:08.668288Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-05-29T15:23:08.668460Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:445: Clear TempDirsState with owners number: 0 2025-05-29T15:23:08.668563Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1348: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-05-29T15:23:08.668623Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:32: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-05-29T15:23:08.669095Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:157: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:23:08.669130Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:84: [RootDataErasureManager] Stop 2025-05-29T15:23:08.669350Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:23:08.669363Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:23:08.669383Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-05-29T15:23:08.669391Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-29T15:23:08.669397Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:202: TTxServerlessStorageBilling.Complete 2025-05-29T15:23:08.669429Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6671: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-05-29T15:23:08.670723Z node 1 :HIVE INFO: tablet_helpers.cpp:1130: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:128:2152] sender: [1:242:2058] recipient: [1:15:2062] 2025-05-29T15:23:08.691768Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:372: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-05-29T15:23:08.691831Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:23:08.691893Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-05-29T15:23:08.691941Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:130: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-05-29T15:23:08.691952Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:23:08.692537Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:451: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-05-29T15:23:08.692562Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-05-29T15:23:08.692595Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:23:08.692605Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:313: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-05-29T15:23:08.692610Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:367: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-05-29T15:23:08.692616Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 2 -> 3 2025-05-29T15:23:08.693037Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:23:08.693050Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-05-29T15:23:08.693055Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 3 -> 128 2025-05-29T15:23:08.693452Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:23:08.693466Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:23:08.693472Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:23:08.693479Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1650: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-05-29T15:23:08.694156Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1719: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-05-29T15:23:08.694629Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:652: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-05-29T15:23:08.694666Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1751: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-05-29T15:23:08.694855Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:676: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-05-29T15:23:08.694880Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:680: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 124 RawX2: 4294969445 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-05-29T15:23:08.694887Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:23:08.694943Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 128 -> 240 2025-05-29T15:23:08.694950Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:23:08.694975Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-05-29T15:23:08.694986Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:402: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-05-29T15:23:08.695440Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:23:08.695450Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-29T15:23:08.695477Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29 ... CHEMESHARD DEBUG: schemeshard_impl.cpp:5512: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 610 RawX2: 4294969843 } Origin: 72075186233409548 State: 2 TxId: 107 Step: 0 Generation: 2 2025-05-29T15:23:08.957456Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1764: TOperation FindRelatedPartByTabletId, TxId: 107, tablet: 72075186233409548, partId: 0 2025-05-29T15:23:08.957469Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:619: TTxOperationReply execute, operationId: 107:0, at schemeshard: 72057594046678944, message: Source { RawX1: 610 RawX2: 4294969843 } Origin: 72075186233409548 State: 2 TxId: 107 Step: 0 Generation: 2 2025-05-29T15:23:08.957473Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:1005: NTableState::TProposedWaitParts operationId# 107:0 HandleReply TEvSchemaChanged at tablet: 72057594046678944 2025-05-29T15:23:08.957478Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:1009: NTableState::TProposedWaitParts operationId# 107:0 HandleReply TEvSchemaChanged at tablet: 72057594046678944 message: Source { RawX1: 610 RawX2: 4294969843 } Origin: 72075186233409548 State: 2 TxId: 107 Step: 0 Generation: 2 2025-05-29T15:23:08.957484Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:655: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 107:0, shardIdx: 72057594046678944:3, datashard: 72075186233409548, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2025-05-29T15:23:08.957487Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:674: all shard schema changes has been received, operationId: 107:0, at schemeshard: 72057594046678944 2025-05-29T15:23:08.957490Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:686: send schema changes ack message, operation: 107:0, datashard: 72075186233409548, at schemeshard: 72057594046678944 2025-05-29T15:23:08.957493Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 107:0 129 -> 240 2025-05-29T15:23:08.958105Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:647: TTxOperationReply complete, operationId: 107:2, at schemeshard: 72057594046678944 2025-05-29T15:23:08.958131Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 107 2025-05-29T15:23:08.958691Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 107 2025-05-29T15:23:08.958720Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:647: TTxOperationReply complete, operationId: 107:2, at schemeshard: 72057594046678944 2025-05-29T15:23:08.958795Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 107:2, at schemeshard: 72057594046678944 2025-05-29T15:23:08.958801Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:482: [72057594046678944] TDone opId# 107:2 ProgressState 2025-05-29T15:23:08.958812Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:906: Part operation is done id#107:2 progress is 2/3 2025-05-29T15:23:08.958815Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 107 ready parts: 2/3 2025-05-29T15:23:08.958818Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:906: Part operation is done id#107:2 progress is 2/3 2025-05-29T15:23:08.958820Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 107 ready parts: 2/3 2025-05-29T15:23:08.958823Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1606: TOperation IsReadyToNotify, TxId: 107, ready parts: 2/3, is published: true 2025-05-29T15:23:08.958844Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 107 2025-05-29T15:23:08.958853Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 107 2025-05-29T15:23:08.958864Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:647: TTxOperationReply complete, operationId: 107:0, at schemeshard: 72057594046678944 2025-05-29T15:23:08.958881Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:647: TTxOperationReply complete, operationId: 107:0, at schemeshard: 72057594046678944 2025-05-29T15:23:08.958889Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 107:0, at schemeshard: 72057594046678944 2025-05-29T15:23:08.958892Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:482: [72057594046678944] TDone opId# 107:0 ProgressState 2025-05-29T15:23:08.958897Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:906: Part operation is done id#107:0 progress is 3/3 2025-05-29T15:23:08.958899Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 107 ready parts: 3/3 2025-05-29T15:23:08.958901Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:906: Part operation is done id#107:0 progress is 3/3 2025-05-29T15:23:08.958903Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 107 ready parts: 3/3 2025-05-29T15:23:08.958906Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1606: TOperation IsReadyToNotify, TxId: 107, ready parts: 3/3, is published: true 2025-05-29T15:23:08.958916Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1629: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:485:2432] message: TxId: 107 2025-05-29T15:23:08.958920Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 107 ready parts: 3/3 2025-05-29T15:23:08.958926Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:973: Operation and all the parts is done, operation id: 107:0 2025-05-29T15:23:08.958930Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5174: RemoveTx for txid 107:0 2025-05-29T15:23:08.958947Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 4 2025-05-29T15:23:08.958951Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:973: Operation and all the parts is done, operation id: 107:1 2025-05-29T15:23:08.958953Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5174: RemoveTx for txid 107:1 2025-05-29T15:23:08.958956Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 3 2025-05-29T15:23:08.958958Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:973: Operation and all the parts is done, operation id: 107:2 2025-05-29T15:23:08.958960Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5174: RemoveTx for txid 107:2 2025-05-29T15:23:08.958965Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 6] was 3 2025-05-29T15:23:08.959477Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:227: tests -- TTxNotificationSubscriber for txId 107: got EvNotifyTxCompletionResult 2025-05-29T15:23:08.959491Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:236: tests -- TTxNotificationSubscriber for txId 107: satisfy waiter [1:541:2488] TestWaitNotification: OK eventTxId 107 TestModificationResults wait txId: 108 2025-05-29T15:23:08.960273Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:372: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot/USER_0" OperationType: ESchemeOpCreateIndexedTable CreateIndexedTable { TableDescription { Name: "Table7" Columns { Name: "RowId" Type: "Uint64" } Columns { Name: "Value0" Type: "Utf8" } Columns { Name: "Value1" Type: "Utf8" } Columns { Name: "Value2" Type: "Utf8" } Columns { Name: "Value3" Type: "Utf8" } Columns { Name: "Value4" Type: "Utf8" } KeyColumnNames: "RowId" } IndexDescription { Name: "UserDefinedIndexByValue0" KeyColumnNames: "Value0" } IndexDescription { Name: "UserDefinedIndexByValue1" KeyColumnNames: "Value1" } IndexDescription { Name: "UserDefinedIndexByValue2" KeyColumnNames: "Value2" } IndexDescription { Name: "UserDefinedIndexByValue3" KeyColumnNames: "Value3" } IndexDescription { Name: "UserDefinedIndexByValue4" KeyColumnNames: "Value4" } } } TxId: 108 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-05-29T15:23:08.960336Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_create_indexed_table.cpp:101: TCreateTableIndex construct operation table path: /MyRoot/USER_0/Table7 domain path id: [OwnerId: 72057594046678944, LocalPathId: 2] domain path: /MyRoot/USER_0 shardsToCreate: 6 GetShardsInside: 4 MaxShards: 7 2025-05-29T15:23:08.960347Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_just_reject.cpp:47: TReject Propose, opId: 108:0, explain: indexes count has reached maximum value in the table, children limit for dir in domain: 4, intention to create new children: 5, at schemeshard: 72057594046678944 2025-05-29T15:23:08.960352Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:130: IgniteOperation, opId: 108:1, propose status:StatusResourceExhausted, reason: indexes count has reached maximum value in the table, children limit for dir in domain: 4, intention to create new children: 5, at schemeshard: 72057594046678944 2025-05-29T15:23:08.960760Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:451: TTxOperationPropose Complete, txId: 108, response: Status: StatusResourceExhausted Reason: "indexes count has reached maximum value in the table, children limit for dir in domain: 4, intention to create new children: 5" TxId: 108 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2025-05-29T15:23:08.960792Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 108, database: /MyRoot/USER_0, subject: , status: StatusResourceExhausted, reason: indexes count has reached maximum value in the table, children limit for dir in domain: 4, intention to create new children: 5, operation: CREATE TABLE WITH INDEXES, path: /MyRoot/USER_0/Table7 TestModificationResult got TxId: 108, wait until txId: 108 TestWaitNotification wait txId: 108 2025-05-29T15:23:08.960848Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:210: tests -- TTxNotificationSubscriber for txId 108: send EvNotifyTxCompletion 2025-05-29T15:23:08.960853Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:256: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 108 2025-05-29T15:23:08.960904Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 108, at schemeshard: 72057594046678944 2025-05-29T15:23:08.960919Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:227: tests -- TTxNotificationSubscriber for txId 108: got EvNotifyTxCompletionResult 2025-05-29T15:23:08.960922Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:236: tests -- TTxNotificationSubscriber for txId 108: satisfy waiter [1:738:2652] TestWaitNotification: OK eventTxId 108 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::CreateSubDomainsInSeparateDir [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:116:2058] recipient: [1:111:2141] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:116:2058] recipient: [1:111:2141] Leader for TabletID 72057594046678944 is [1:128:2152] sender: [1:132:2058] recipient: [1:111:2141] 2025-05-29T15:23:08.649105Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7488: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-05-29T15:23:08.649133Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7516: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:23:08.649139Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7402: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-05-29T15:23:08.649145Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7418: OperationsProcessing config: using default configuration 2025-05-29T15:23:08.649155Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-05-29T15:23:08.649160Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-05-29T15:23:08.649170Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7548: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:23:08.649185Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:39: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-05-29T15:23:08.649298Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7619: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-05-29T15:23:08.649372Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-05-29T15:23:08.663683Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7309: Cannot subscribe to console configs 2025-05-29T15:23:08.663705Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:23:08.666137Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-05-29T15:23:08.666248Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-05-29T15:23:08.666292Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-05-29T15:23:08.668218Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-05-29T15:23:08.668392Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:445: Clear TempDirsState with owners number: 0 2025-05-29T15:23:08.668505Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1348: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-05-29T15:23:08.668585Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:32: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-05-29T15:23:08.669033Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:157: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:23:08.669071Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:84: [RootDataErasureManager] Stop 2025-05-29T15:23:08.669318Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:23:08.669329Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:23:08.669351Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-05-29T15:23:08.669359Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-29T15:23:08.669365Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:202: TTxServerlessStorageBilling.Complete 2025-05-29T15:23:08.669396Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6671: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-05-29T15:23:08.670495Z node 1 :HIVE INFO: tablet_helpers.cpp:1130: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:128:2152] sender: [1:242:2058] recipient: [1:15:2062] 2025-05-29T15:23:08.684232Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:372: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-05-29T15:23:08.684289Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:23:08.684341Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-05-29T15:23:08.684378Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:130: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-05-29T15:23:08.684386Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:23:08.684908Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:451: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-05-29T15:23:08.684931Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-05-29T15:23:08.684966Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:23:08.684975Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:313: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-05-29T15:23:08.684980Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:367: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-05-29T15:23:08.684985Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 2 -> 3 2025-05-29T15:23:08.685335Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:23:08.685344Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-05-29T15:23:08.685350Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 3 -> 128 2025-05-29T15:23:08.685685Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:23:08.685696Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:23:08.685702Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:23:08.685709Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1650: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-05-29T15:23:08.686135Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1719: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-05-29T15:23:08.686423Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:652: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-05-29T15:23:08.686448Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1751: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-05-29T15:23:08.686569Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:676: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-05-29T15:23:08.686590Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:680: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 124 RawX2: 4294969445 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-05-29T15:23:08.686597Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:23:08.686636Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 128 -> 240 2025-05-29T15:23:08.686640Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:23:08.686661Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-05-29T15:23:08.686669Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:402: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-05-29T15:23:08.687034Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:23:08.687041Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-29T15:23:08.687073Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29 ... EMESHARD INFO: schemeshard__operation_side_effects.cpp:906: Part operation is done id#102:0 progress is 1/1 2025-05-29T15:23:08.812885Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-05-29T15:23:08.812890Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:906: Part operation is done id#102:0 progress is 1/1 2025-05-29T15:23:08.812892Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-05-29T15:23:08.812897Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1606: TOperation IsReadyToNotify, TxId: 102, ready parts: 1/1, is published: false 2025-05-29T15:23:08.812902Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-05-29T15:23:08.812907Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:973: Operation and all the parts is done, operation id: 102:0 2025-05-29T15:23:08.812911Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5174: RemoveTx for txid 102:0 2025-05-29T15:23:08.812947Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 9 2025-05-29T15:23:08.812953Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:982: Publication still in progress, tx: 102, publications: 2, subscribers: 1 2025-05-29T15:23:08.812959Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:989: Publication details: tx: 102, [OwnerId: 72057594046678944, LocalPathId: 2], 7 2025-05-29T15:23:08.812962Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:989: Publication details: tx: 102, [OwnerId: 72057594046678944, LocalPathId: 4], 3 2025-05-29T15:23:08.813060Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:5834: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 7 PathOwnerId: 72057594046678944, cookie: 102 2025-05-29T15:23:08.813071Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 7 PathOwnerId: 72057594046678944, cookie: 102 2025-05-29T15:23:08.813075Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 102 2025-05-29T15:23:08.813080Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 7 2025-05-29T15:23:08.813084Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2025-05-29T15:23:08.813210Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:5834: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 3 PathOwnerId: 72057594046678944, cookie: 102 2025-05-29T15:23:08.813222Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 3 PathOwnerId: 72057594046678944, cookie: 102 2025-05-29T15:23:08.813225Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 102 2025-05-29T15:23:08.813229Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 4], version: 3 2025-05-29T15:23:08.813233Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 8 2025-05-29T15:23:08.813242Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 102, subscribers: 1 2025-05-29T15:23:08.813246Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:212: TTxAckPublishToSchemeBoard Notify send TEvNotifyTxCompletionResult, at schemeshard: 72057594046678944, to actorId: [1:899:2730] 2025-05-29T15:23:08.813942Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2025-05-29T15:23:08.814177Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2025-05-29T15:23:08.814197Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:227: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2025-05-29T15:23:08.814203Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:236: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [1:900:2731] TestWaitNotification: OK eventTxId 102 2025-05-29T15:23:08.814315Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/SubDomains/USER_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-05-29T15:23:08.814348Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/SubDomains/USER_0" took 41us result status StatusSuccess 2025-05-29T15:23:08.814448Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/SubDomains/USER_0" PathDescription { Self { Name: "USER_0" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 101 CreateStep: 5000003 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 1 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72075186233409546 Coordinators: 72075186233409547 Coordinators: 72075186233409548 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409549 Mediators: 72075186233409550 Mediators: 72075186233409551 } DomainKey { SchemeShard: 72057594046678944 PathId: 3 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 6 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 3 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { } } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-05-29T15:23:08.814527Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/SubDomains/USER_1" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-05-29T15:23:08.814542Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/SubDomains/USER_1" took 17us result status StatusSuccess 2025-05-29T15:23:08.814580Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/SubDomains/USER_1" PathDescription { Self { Name: "USER_1" PathId: 4 SchemeshardId: 72057594046678944 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 102 CreateStep: 5000004 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 1 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72075186233409552 Coordinators: 72075186233409553 Coordinators: 72075186233409554 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409555 Mediators: 72075186233409556 Mediators: 72075186233409557 } DomainKey { SchemeShard: 72057594046678944 PathId: 4 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 6 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 4 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { } } } PathId: 4 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-05-29T15:23:08.814620Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/SubDomains" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-05-29T15:23:08.814632Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/SubDomains" took 13us result status StatusSuccess 2025-05-29T15:23:08.814676Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/SubDomains" PathDescription { Self { Name: "SubDomains" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 100 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 7 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 7 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 6 } ChildrenExist: true } Children { Name: "USER_0" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 101 CreateStep: 5000003 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" } Children { Name: "USER_1" PathId: 4 SchemeshardId: 72057594046678944 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 102 CreateStep: 5000004 ParentPathId: 2 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 3 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> TMessageBusServerPersQueueGetPartitionLocationsMetaRequestTest::FailsOnFailedGetAllTopicsRequest >> TMessageBusServerPersQueueGetReadSessionsInfoMetaRequestTest::FailsOnBadRootStatusInGetNodeRequest >> TMessageBusServerPersQueueGetPartitionStatusMetaRequestTest::FailsOnFailedGetAllTopicsRequest >> TMessageBusServerPersQueueGetPartitionLocationsMetaRequestTest::HandlesTimeout [GOOD] >> TMessageBusServerPersQueueGetPartitionLocationsMetaRequestTest::SuccessfullyPassesResponsesFromTablets >> TMessageBusServerPersQueueGetPartitionStatusMetaRequestTest::FailsOnBalancerDescribeResultFailureWhenTopicsAreGivenExplicitly [GOOD] >> TMessageBusServerPersQueueGetPartitionStatusMetaRequestTest::FailsOnEmptyTopicName >> TSchemeShardSubDomainTest::ColumnSchemeLimitsRejects [GOOD] >> TMessageBusServerPersQueueGetReadSessionsInfoMetaRequestTest::HandlesTimeout [GOOD] >> TMessageBusServerPersQueueGetPartitionStatusMetaRequestTest::FailsOnZeroBalancerTabletIdInGetNodeRequest >> TMessageBusServerPersQueueGetPartitionOffsetsMetaRequestTest::FailsOnZeroBalancerTabletIdInGetNodeRequest [GOOD] >> TMessageBusServerPersQueueGetTopicMetadataMetaRequestTest::FailsOnBadRootStatusInGetNodeRequest >> TMessageBusServerPersQueueGetReadSessionsInfoMetaRequestTest::SuccessfullyPassesResponsesFromTablets >> TMessageBusServerPersQueueGetPartitionOffsetsMetaRequestTest::HandlesPipeDisconnection_DisconnectionComesFirst >> TMessageBusServerPersQueueGetPartitionLocationsMetaRequestTest::FailsOnBadRootStatusInGetNodeRequest >> TMessageBusServerPersQueueGetReadSessionsInfoMetaRequestTest::FailsOnFailedGetAllTopicsRequest [GOOD] >> TMessageBusServerPersQueueGetReadSessionsInfoMetaRequestTest::FailsOnNoBalancerInGetNodeRequest >> TSchemeShardSubDomainTest::SimultaneousCreateTenantTableForceDrop [GOOD] >> TSchemeShardSubDomainTest::DeclareDefineAndDelete [GOOD] >> TMessageBusServerPersQueueGetPartitionOffsetsMetaRequestTest::FailsOnBadRootStatusInGetNodeRequest [GOOD] >> TMessageBusServerPersQueueGetPartitionOffsetsMetaRequestTest::FailsOnBalancerDescribeResultFailureWhenTopicsAreGivenExplicitly >> TMessageBusServerPersQueueGetPartitionOffsetsMetaRequestTest::HandlesTimeout [GOOD] >> TMessageBusServerPersQueueGetPartitionOffsetsMetaRequestTest::SuccessfullyPassesResponsesFromTablets >> TMessageBusServerPersQueueGetTopicMetadataMetaRequestTest::FailsOnFailedGetAllTopicsRequest [GOOD] >> TMessageBusServerPersQueueGetTopicMetadataMetaRequestTest::FailsOnNotOkStatusInGetNodeRequest >> TMessageBusServerPersQueueGetReadSessionsInfoMetaRequestTest::SuccessfullyPassesResponsesFromTablets [GOOD] >> TMessageBusServerPersQueueGetPartitionOffsetsMetaRequestTest::HandlesPipeDisconnection_DisconnectionComesFirst [GOOD] >> TMessageBusServerPersQueueGetPartitionOffsetsMetaRequestTest::HandlesPipeDisconnection_DisconnectionComesSecond >> TMessageBusServerPersQueueGetTopicMetadataMetaRequestTest::FailesOnNotATopic >> TMessageBusServerPersQueueGetTopicMetadataMetaRequestTest::FailsOnBadRootStatusInGetNodeRequest [GOOD] >> TMessageBusServerPersQueueGetTopicMetadataMetaRequestTest::FailsOnBalancerDescribeResultFailureWhenTopicsAreGivenExplicitly >> TMessageBusServerPersQueueGetPartitionStatusMetaRequestTest::FailsOnZeroBalancerTabletIdInGetNodeRequest [GOOD] >> TMessageBusServerPersQueueGetPartitionStatusMetaRequestTest::HandlesPipeDisconnection_DisconnectionComesFirst >> TMessageBusServerPersQueueGetPartitionLocationsMetaRequestTest::FailsOnFailedGetAllTopicsRequest [GOOD] >> TMessageBusServerPersQueueGetPartitionLocationsMetaRequestTest::FailsOnNoBalancerInGetNodeRequest >> TMessageBusServerPersQueueGetReadSessionsInfoMetaRequestTest::FailsOnBadRootStatusInGetNodeRequest [GOOD] >> TMessageBusServerPersQueueGetReadSessionsInfoMetaRequestTest::FailesOnNotATopic >> TMessageBusServerPersQueueGetPartitionStatusMetaRequestTest::FailsOnFailedGetAllTopicsRequest [GOOD] >> TMessageBusServerPersQueueGetPartitionStatusMetaRequestTest::FailsOnNotOkStatusInGetNodeRequest ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::ColumnSchemeLimitsRejects [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:116:2058] recipient: [1:111:2141] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:116:2058] recipient: [1:111:2141] Leader for TabletID 72057594046678944 is [1:128:2152] sender: [1:132:2058] recipient: [1:111:2141] 2025-05-29T15:23:08.785622Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7488: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-05-29T15:23:08.785644Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7516: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:23:08.785650Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7402: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-05-29T15:23:08.785656Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7418: OperationsProcessing config: using default configuration 2025-05-29T15:23:08.785666Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-05-29T15:23:08.785671Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-05-29T15:23:08.785680Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7548: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:23:08.785693Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:39: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-05-29T15:23:08.785780Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7619: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-05-29T15:23:08.785839Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-05-29T15:23:08.795718Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7309: Cannot subscribe to console configs 2025-05-29T15:23:08.795738Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:23:08.797570Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-05-29T15:23:08.797650Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-05-29T15:23:08.797695Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-05-29T15:23:08.799345Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-05-29T15:23:08.799517Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:445: Clear TempDirsState with owners number: 0 2025-05-29T15:23:08.799615Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1348: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-05-29T15:23:08.799683Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:32: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-05-29T15:23:08.800051Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:157: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:23:08.800082Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:84: [RootDataErasureManager] Stop 2025-05-29T15:23:08.800277Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:23:08.800284Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:23:08.800301Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-05-29T15:23:08.800307Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-29T15:23:08.800311Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:202: TTxServerlessStorageBilling.Complete 2025-05-29T15:23:08.800337Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6671: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-05-29T15:23:08.801394Z node 1 :HIVE INFO: tablet_helpers.cpp:1130: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:128:2152] sender: [1:242:2058] recipient: [1:15:2062] 2025-05-29T15:23:08.819594Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:372: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-05-29T15:23:08.819668Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:23:08.819732Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-05-29T15:23:08.819782Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:130: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-05-29T15:23:08.819796Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:23:08.820406Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:451: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-05-29T15:23:08.820435Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-05-29T15:23:08.820473Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:23:08.820484Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:313: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-05-29T15:23:08.820490Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:367: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-05-29T15:23:08.820496Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 2 -> 3 2025-05-29T15:23:08.820885Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:23:08.820896Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-05-29T15:23:08.820902Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 3 -> 128 2025-05-29T15:23:08.821259Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:23:08.821270Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:23:08.821276Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:23:08.821283Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1650: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-05-29T15:23:08.821980Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1719: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-05-29T15:23:08.822363Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:652: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-05-29T15:23:08.822402Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1751: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-05-29T15:23:08.822588Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:676: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-05-29T15:23:08.822612Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:680: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 124 RawX2: 4294969445 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-05-29T15:23:08.822620Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:23:08.822676Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 128 -> 240 2025-05-29T15:23:08.822683Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:23:08.822712Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-05-29T15:23:08.822725Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:402: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-05-29T15:23:08.823159Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:23:08.823168Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-29T15:23:08.823211Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29 ... : schemeshard__operation.cpp:680: TTxOperationPlanStep Execute, message: Transactions { TxId: 108 Coordinator: 72057594046316545 AckTo { RawX1: 127 RawX2: 8589936743 } } Step: 5000004 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-05-29T15:23:09.520304Z node 2 :FLAT_TX_SCHEMESHARD INFO: alter_store.cpp:199: TAlterOlapStore TPropose operationId# 108:0 HandleReply TEvOperationPlan at tablet: 72057594046678944, stepId: 5000004 2025-05-29T15:23:09.520350Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 108:0 128 -> 129 2025-05-29T15:23:09.520388Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-05-29T15:23:09.520404Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 3 2025-05-29T15:23:09.520490Z node 2 :TX_COLUMNSHARD_TX WARN: log.cpp:784: tablet_id=72075186233409549;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=108;fline=tx_controller.cpp:214;event=finished_tx;tx_id=108; FAKE_COORDINATOR: advance: minStep5000004 State->FrontStep: 5000004 2025-05-29T15:23:09.520902Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:23:09.520911Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 108, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-29T15:23:09.520949Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 108, path id: [OwnerId: 72057594046678944, LocalPathId: 5] 2025-05-29T15:23:09.520977Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:23:09.520983Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [2:337:2312], at schemeshard: 72057594046678944, txId: 108, path id: 1 2025-05-29T15:23:09.520989Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [2:337:2312], at schemeshard: 72057594046678944, txId: 108, path id: 5 2025-05-29T15:23:09.521106Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 108:0, at schemeshard: 72057594046678944 2025-05-29T15:23:09.521129Z node 2 :FLAT_TX_SCHEMESHARD INFO: alter_store.cpp:305: TAlterOlapStore TProposedWaitParts operationId# 108:0 ProgressState at tablet: 72057594046678944 2025-05-29T15:23:09.521137Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: alter_store.cpp:332: TAlterOlapStore TProposedWaitParts operationId# 108:0 ProgressState wait for NotifyTxCompletionResult tabletId: 72075186233409549 2025-05-29T15:23:09.521263Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:5834: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 3 LocalPathId: 1 Version: 8 PathOwnerId: 72057594046678944, cookie: 108 2025-05-29T15:23:09.521276Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 3 LocalPathId: 1 Version: 8 PathOwnerId: 72057594046678944, cookie: 108 2025-05-29T15:23:09.521281Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72057594046678944, txId: 108 2025-05-29T15:23:09.521286Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 108, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 8 2025-05-29T15:23:09.521292Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2025-05-29T15:23:09.521463Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:5834: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 3 LocalPathId: 5 Version: 5 PathOwnerId: 72057594046678944, cookie: 108 2025-05-29T15:23:09.521480Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 3 LocalPathId: 5 Version: 5 PathOwnerId: 72057594046678944, cookie: 108 2025-05-29T15:23:09.521487Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72057594046678944, txId: 108 2025-05-29T15:23:09.521492Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 108, pathId: [OwnerId: 72057594046678944, LocalPathId: 5], version: 5 2025-05-29T15:23:09.521497Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 4 2025-05-29T15:23:09.521511Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1606: TOperation IsReadyToNotify, TxId: 108, ready parts: 0/1, is published: true 2025-05-29T15:23:09.522339Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:652: Send tablet strongly msg operationId: 108:0 from tablet: 72057594046678944 to tablet: 72075186233409549 cookie: 72057594046678944:4 msg type: 275382275 2025-05-29T15:23:09.522521Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 108 2025-05-29T15:23:09.522713Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 108 FAKE_COORDINATOR: Erasing txId 108 2025-05-29T15:23:09.533490Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6146: Handle TEvNotifyTxCompletionResult, at schemeshard: 72057594046678944, message: Origin: 72075186233409549 TxId: 108 2025-05-29T15:23:09.533504Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1764: TOperation FindRelatedPartByTabletId, TxId: 108, tablet: 72075186233409549, partId: 0 2025-05-29T15:23:09.533524Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:619: TTxOperationReply execute, operationId: 108:0, at schemeshard: 72057594046678944, message: Origin: 72075186233409549 TxId: 108 2025-05-29T15:23:09.533545Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 108:0 129 -> 240 2025-05-29T15:23:09.534003Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:647: TTxOperationReply complete, operationId: 108:0, at schemeshard: 72057594046678944 2025-05-29T15:23:09.534037Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 108:0, at schemeshard: 72057594046678944 2025-05-29T15:23:09.534043Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:482: [72057594046678944] TDone opId# 108:0 ProgressState 2025-05-29T15:23:09.534053Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:906: Part operation is done id#108:0 progress is 1/1 2025-05-29T15:23:09.534056Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 108 ready parts: 1/1 2025-05-29T15:23:09.534060Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:906: Part operation is done id#108:0 progress is 1/1 2025-05-29T15:23:09.534062Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 108 ready parts: 1/1 2025-05-29T15:23:09.534066Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1606: TOperation IsReadyToNotify, TxId: 108, ready parts: 1/1, is published: true 2025-05-29T15:23:09.534080Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1629: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [2:492:2441] message: TxId: 108 2025-05-29T15:23:09.534085Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 108 ready parts: 1/1 2025-05-29T15:23:09.534092Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:973: Operation and all the parts is done, operation id: 108:0 2025-05-29T15:23:09.534096Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5174: RemoveTx for txid 108:0 2025-05-29T15:23:09.534121Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 3 2025-05-29T15:23:09.534495Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:227: tests -- TTxNotificationSubscriber for txId 108: got EvNotifyTxCompletionResult 2025-05-29T15:23:09.534504Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:236: tests -- TTxNotificationSubscriber for txId 108: satisfy waiter [2:880:2790] TestWaitNotification: OK eventTxId 108 TestModificationResults wait txId: 109 2025-05-29T15:23:09.535227Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:372: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpAlterColumnStore AlterColumnStore { Name: "OlapStore1" AlterSchemaPresets { Name: "default" AlterSchema { AddColumns { Name: "comment2" Type: "Utf8" } } } } } TxId: 109 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-05-29T15:23:09.535268Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: alter_store.cpp:465: TAlterOlapStore Propose, path: /MyRoot/OlapStore1, opId: 109:0, at schemeshard: 72057594046678944 2025-05-29T15:23:09.535349Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:130: IgniteOperation, opId: 109:1, propose status:StatusSchemeError, reason: Too many columns. new: 4. Limit: 3, at schemeshard: 72057594046678944 2025-05-29T15:23:09.535854Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:451: TTxOperationPropose Complete, txId: 109, response: Status: StatusSchemeError Reason: "Too many columns. new: 4. Limit: 3" TxId: 109 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2025-05-29T15:23:09.535880Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 109, database: /MyRoot, subject: , status: StatusSchemeError, reason: Too many columns. new: 4. Limit: 3, operation: ALTER COLUMN STORE, path: /MyRoot/OlapStore1 TestModificationResult got TxId: 109, wait until txId: 109 TestWaitNotification wait txId: 109 2025-05-29T15:23:09.535942Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:210: tests -- TTxNotificationSubscriber for txId 109: send EvNotifyTxCompletion 2025-05-29T15:23:09.535946Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:256: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 109 2025-05-29T15:23:09.536006Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 109, at schemeshard: 72057594046678944 2025-05-29T15:23:09.536020Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:227: tests -- TTxNotificationSubscriber for txId 109: got EvNotifyTxCompletionResult 2025-05-29T15:23:09.536025Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:236: tests -- TTxNotificationSubscriber for txId 109: satisfy waiter [2:915:2825] TestWaitNotification: OK eventTxId 109 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::SimultaneousCreateTenantTableForceDrop [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:116:2058] recipient: [1:111:2141] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:116:2058] recipient: [1:111:2141] Leader for TabletID 72057594046678944 is [1:128:2152] sender: [1:132:2058] recipient: [1:111:2141] 2025-05-29T15:23:09.482564Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7488: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-05-29T15:23:09.482591Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7516: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:23:09.482596Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7402: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-05-29T15:23:09.482601Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7418: OperationsProcessing config: using default configuration 2025-05-29T15:23:09.482613Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-05-29T15:23:09.482618Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-05-29T15:23:09.482627Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7548: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:23:09.482639Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:39: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-05-29T15:23:09.482766Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7619: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-05-29T15:23:09.482826Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-05-29T15:23:09.493271Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7309: Cannot subscribe to console configs 2025-05-29T15:23:09.493288Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:23:09.495156Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-05-29T15:23:09.495230Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-05-29T15:23:09.495264Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-05-29T15:23:09.496682Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-05-29T15:23:09.496901Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:445: Clear TempDirsState with owners number: 0 2025-05-29T15:23:09.496972Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1348: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-05-29T15:23:09.497018Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:32: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-05-29T15:23:09.497402Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:157: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:23:09.497430Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:84: [RootDataErasureManager] Stop 2025-05-29T15:23:09.497637Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:23:09.497646Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:23:09.497662Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-05-29T15:23:09.497667Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-29T15:23:09.497671Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:202: TTxServerlessStorageBilling.Complete 2025-05-29T15:23:09.497694Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6671: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-05-29T15:23:09.498667Z node 1 :HIVE INFO: tablet_helpers.cpp:1130: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:128:2152] sender: [1:242:2058] recipient: [1:15:2062] 2025-05-29T15:23:09.515809Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:372: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-05-29T15:23:09.515877Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:23:09.515939Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-05-29T15:23:09.515981Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:130: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-05-29T15:23:09.515992Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:23:09.516663Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:451: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-05-29T15:23:09.516687Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-05-29T15:23:09.516730Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:23:09.516740Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:313: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-05-29T15:23:09.516745Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:367: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-05-29T15:23:09.516751Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 2 -> 3 2025-05-29T15:23:09.517153Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:23:09.517164Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-05-29T15:23:09.517170Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 3 -> 128 2025-05-29T15:23:09.517546Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:23:09.517559Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:23:09.517566Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:23:09.517572Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1650: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-05-29T15:23:09.518016Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1719: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-05-29T15:23:09.518330Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:652: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-05-29T15:23:09.518358Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1751: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-05-29T15:23:09.518483Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:676: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-05-29T15:23:09.518500Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:680: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 124 RawX2: 4294969445 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-05-29T15:23:09.518505Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:23:09.518543Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 128 -> 240 2025-05-29T15:23:09.518548Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:23:09.518572Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-05-29T15:23:09.518581Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:402: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-05-29T15:23:09.518974Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:23:09.518981Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-29T15:23:09.519013Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29 ... Id: 2] was 2 Forgetting tablet 72075186233409549 2025-05-29T15:23:09.548579Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:205: TTxCleanDroppedSubDomains Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-05-29T15:23:09.548585Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:224: TTxCleanDroppedPaths: PersistRemoveSubDomain for PathId# [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2025-05-29T15:23:09.548603Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 2025-05-29T15:23:09.548740Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:174: Deleted shardIdx 72057594046678944:5 2025-05-29T15:23:09.548747Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:180: Close pipe to deleted shardIdx 72057594046678944:5 tabletId 72075186233409550 2025-05-29T15:23:09.548774Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:5739: Failed to connect, to tablet: 72075186233409550, at schemeshard: 72057594046678944 2025-05-29T15:23:09.549261Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:174: Deleted shardIdx 72057594046678944:7 2025-05-29T15:23:09.549283Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:160: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 1 candidates, at schemeshard: 72057594046678944 2025-05-29T15:23:09.549294Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:123: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-05-29T15:23:09.549297Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:137: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2025-05-29T15:23:09.549306Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-05-29T15:23:09.549347Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:174: Deleted shardIdx 72057594046678944:1 2025-05-29T15:23:09.549351Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:180: Close pipe to deleted shardIdx 72057594046678944:1 tabletId 72075186233409546 2025-05-29T15:23:09.549357Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:174: Deleted shardIdx 72057594046678944:3 2025-05-29T15:23:09.549360Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:180: Close pipe to deleted shardIdx 72057594046678944:3 tabletId 72075186233409548 2025-05-29T15:23:09.549464Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:174: Deleted shardIdx 72057594046678944:6 2025-05-29T15:23:09.549467Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:180: Close pipe to deleted shardIdx 72057594046678944:6 tabletId 72075186233409551 2025-05-29T15:23:09.549488Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:5739: Failed to connect, to tablet: 72075186233409551, at schemeshard: 72057594046678944 2025-05-29T15:23:09.549498Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:174: Deleted shardIdx 72057594046678944:2 2025-05-29T15:23:09.549501Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:180: Close pipe to deleted shardIdx 72057594046678944:2 tabletId 72075186233409547 2025-05-29T15:23:09.549562Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:174: Deleted shardIdx 72057594046678944:4 2025-05-29T15:23:09.549566Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:180: Close pipe to deleted shardIdx 72057594046678944:4 tabletId 72075186233409549 2025-05-29T15:23:09.549586Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:245: TTxCleanDroppedSubDomains Complete, done PersistRemoveSubDomain for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 2025-05-29T15:23:09.549782Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:160: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestModificationResult got TxId: 102, wait until txId: 102 TestWaitNotification wait txId: 100 2025-05-29T15:23:09.549820Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:210: tests -- TTxNotificationSubscriber for txId 100: send EvNotifyTxCompletion 2025-05-29T15:23:09.549832Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:256: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 100 TestWaitNotification wait txId: 101 2025-05-29T15:23:09.549841Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:210: tests -- TTxNotificationSubscriber for txId 101: send EvNotifyTxCompletion 2025-05-29T15:23:09.549846Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:256: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 101 TestWaitNotification wait txId: 102 2025-05-29T15:23:09.549852Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:210: tests -- TTxNotificationSubscriber for txId 102: send EvNotifyTxCompletion 2025-05-29T15:23:09.549854Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:256: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 102 2025-05-29T15:23:09.549903Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 100, at schemeshard: 72057594046678944 2025-05-29T15:23:09.549919Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 101, at schemeshard: 72057594046678944 2025-05-29T15:23:09.549924Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:227: tests -- TTxNotificationSubscriber for txId 100: got EvNotifyTxCompletionResult 2025-05-29T15:23:09.549928Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:236: tests -- TTxNotificationSubscriber for txId 100: satisfy waiter [1:644:2545] 2025-05-29T15:23:09.549947Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:227: tests -- TTxNotificationSubscriber for txId 101: got EvNotifyTxCompletionResult 2025-05-29T15:23:09.549950Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:236: tests -- TTxNotificationSubscriber for txId 101: satisfy waiter [1:644:2545] 2025-05-29T15:23:09.549966Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 102, at schemeshard: 72057594046678944 2025-05-29T15:23:09.549974Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:227: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2025-05-29T15:23:09.549976Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:236: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [1:644:2545] TestWaitNotification: OK eventTxId 100 TestWaitNotification: OK eventTxId 101 TestWaitNotification: OK eventTxId 102 2025-05-29T15:23:09.550026Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-05-29T15:23:09.550053Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/USER_0" took 33us result status StatusPathDoesNotExist 2025-05-29T15:23:09.550089Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/USER_0\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1]), source_location: ydb/core/tx/schemeshard/schemeshard_path_describer.cpp:1157" Path: "/MyRoot/USER_0" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: false } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 2025-05-29T15:23:09.550140Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0/table_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-05-29T15:23:09.550149Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/USER_0/table_0" took 10us result status StatusPathDoesNotExist 2025-05-29T15:23:09.550162Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/USER_0/table_0\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1]), source_location: ydb/core/tx/schemeshard/schemeshard_path_describer.cpp:1157" Path: "/MyRoot/USER_0/table_0" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: false } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 2025-05-29T15:23:09.550196Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-05-29T15:23:09.550212Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot" took 17us result status StatusSuccess 2025-05-29T15:23:09.550265Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot" PathDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 6 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 6 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/MyRoot" } } } PathId: 1 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> TMessageBusServerPersQueueGetPartitionLocationsMetaRequestTest::SuccessfullyPassesResponsesFromTablets [GOOD] >> TMessageBusServerPersQueueGetPartitionStatusMetaRequestTest::FailsOnEmptyTopicName [GOOD] >> TMessageBusServerPersQueueGetPartitionLocationsMetaRequestTest::HandlesPipeDisconnection_DisconnectionComesSecond >> TMessageBusServerPersQueueGetPartitionStatusMetaRequestTest::FailsOnDuplicatedTopicName >> TMessageBusServerPersQueueGetPartitionOffsetsMetaRequestTest::FailsOnBalancerDescribeResultFailureWhenTopicsAreGivenExplicitly [GOOD] >> TMessageBusServerPersQueueGetPartitionOffsetsMetaRequestTest::FailsOnDuplicatedTopicName >> TStoragePoolsQuotasTest::DisableWritesToDatabase-IsExternalSubdomain-true [GOOD] >> TMessageBusServerPersQueueGetReadSessionsInfoMetaRequestTest::FailsOnNotOkStatusInGetNodeRequest >> TMessageBusServerPersQueueGetTopicMetadataMetaRequestTest::FailsOnNotOkStatusInGetNodeRequest [GOOD] >> TMessageBusServerPersQueueGetTopicMetadataMetaRequestTest::FailsOnNoBalancerInGetNodeRequest >> TMessageBusServerPersQueueGetPartitionLocationsMetaRequestTest::FailsOnBadRootStatusInGetNodeRequest [GOOD] >> TMessageBusServerPersQueueGetPartitionLocationsMetaRequestTest::FailesOnNotATopic >> TMessageBusServerPersQueueGetReadSessionsInfoMetaRequestTest::FailsOnNoBalancerInGetNodeRequest [GOOD] >> TMessageBusServerPersQueueGetReadSessionsInfoMetaRequestTest::FailsOnEmptyTopicName >> TMessageBusServerPersQueueGetPartitionOffsetsMetaRequestTest::SuccessfullyPassesResponsesFromTablets [GOOD] >> TMessageBusServerPersQueueGetPartitionStatusMetaRequestTest::FailsOnBadRootStatusInGetNodeRequest >> TMessageBusServerPersQueueGetPartitionStatusMetaRequestTest::HandlesPipeDisconnection_DisconnectionComesFirst [GOOD] >> TMessageBusServerPersQueueGetPartitionStatusMetaRequestTest::HandlesPipeDisconnection_AnswerDoesNotArrive >> TMessageBusServerPersQueueGetTopicMetadataMetaRequestTest::FailsOnBalancerDescribeResultFailureWhenTopicsAreGivenExplicitly [GOOD] >> TMessageBusServerPersQueueGetTopicMetadataMetaRequestTest::FailsOnEmptyTopicName >> TMessageBusServerPersQueueGetPartitionLocationsMetaRequestTest::FailsOnNoBalancerInGetNodeRequest [GOOD] >> TMessageBusServerPersQueueGetPartitionLocationsMetaRequestTest::FailsOnEmptyTopicName >> TMessageBusServerPersQueueGetReadSessionsInfoMetaRequestTest::FailesOnNotATopic [GOOD] >> TMessageBusServerPersQueueGetReadSessionsInfoMetaRequestTest::FailsOnBalancerDescribeResultFailureWhenTopicsAreGivenExplicitly ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::DeclareDefineAndDelete [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:116:2058] recipient: [1:111:2141] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:116:2058] recipient: [1:111:2141] Leader for TabletID 72057594046678944 is [1:128:2152] sender: [1:132:2058] recipient: [1:111:2141] 2025-05-29T15:23:09.478004Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7488: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-05-29T15:23:09.478033Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7516: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:23:09.478039Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7402: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-05-29T15:23:09.478044Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7418: OperationsProcessing config: using default configuration 2025-05-29T15:23:09.478056Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-05-29T15:23:09.478060Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-05-29T15:23:09.478069Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7548: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:23:09.478083Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:39: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-05-29T15:23:09.478205Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7619: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-05-29T15:23:09.478263Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-05-29T15:23:09.488730Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7309: Cannot subscribe to console configs 2025-05-29T15:23:09.488749Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:23:09.490850Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-05-29T15:23:09.490932Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-05-29T15:23:09.490969Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-05-29T15:23:09.492354Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-05-29T15:23:09.492531Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:445: Clear TempDirsState with owners number: 0 2025-05-29T15:23:09.492602Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1348: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-05-29T15:23:09.492645Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:32: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-05-29T15:23:09.493055Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:157: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:23:09.493084Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:84: [RootDataErasureManager] Stop 2025-05-29T15:23:09.493258Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:23:09.493264Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:23:09.493279Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-05-29T15:23:09.493285Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-29T15:23:09.493289Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:202: TTxServerlessStorageBilling.Complete 2025-05-29T15:23:09.493311Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6671: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-05-29T15:23:09.494344Z node 1 :HIVE INFO: tablet_helpers.cpp:1130: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:128:2152] sender: [1:242:2058] recipient: [1:15:2062] 2025-05-29T15:23:09.507612Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:372: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-05-29T15:23:09.507675Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:23:09.507732Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-05-29T15:23:09.507767Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:130: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-05-29T15:23:09.507775Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:23:09.508554Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:451: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-05-29T15:23:09.508578Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-05-29T15:23:09.508607Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:23:09.508614Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:313: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-05-29T15:23:09.508618Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:367: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-05-29T15:23:09.508622Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 2 -> 3 2025-05-29T15:23:09.509006Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:23:09.509017Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-05-29T15:23:09.509021Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 3 -> 128 2025-05-29T15:23:09.509371Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:23:09.509382Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:23:09.509388Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:23:09.509395Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1650: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-05-29T15:23:09.509888Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1719: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-05-29T15:23:09.510241Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:652: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-05-29T15:23:09.510275Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1751: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-05-29T15:23:09.510443Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:676: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-05-29T15:23:09.510464Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:680: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 124 RawX2: 4294969445 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-05-29T15:23:09.510469Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:23:09.510509Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 128 -> 240 2025-05-29T15:23:09.510513Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:23:09.510539Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-05-29T15:23:09.510547Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:402: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-05-29T15:23:09.511123Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:23:09.511134Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-29T15:23:09.511169Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29 ... chemeshard__operation.cpp:1606: TOperation IsReadyToNotify, TxId: 102, ready parts: 1/1, is published: false 2025-05-29T15:23:09.538684Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-05-29T15:23:09.538687Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:973: Operation and all the parts is done, operation id: 102:0 2025-05-29T15:23:09.538690Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5174: RemoveTx for txid 102:0 2025-05-29T15:23:09.538717Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 6 2025-05-29T15:23:09.538723Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:982: Publication still in progress, tx: 102, publications: 2, subscribers: 0 2025-05-29T15:23:09.538727Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:989: Publication details: tx: 102, [OwnerId: 72057594046678944, LocalPathId: 1], 7 2025-05-29T15:23:09.538730Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:989: Publication details: tx: 102, [OwnerId: 72057594046678944, LocalPathId: 2], 18446744073709551615 2025-05-29T15:23:09.538861Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:5834: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 7 PathOwnerId: 72057594046678944, cookie: 102 2025-05-29T15:23:09.538873Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 7 PathOwnerId: 72057594046678944, cookie: 102 2025-05-29T15:23:09.538879Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 102 2025-05-29T15:23:09.538883Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 7 2025-05-29T15:23:09.538890Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-05-29T15:23:09.538982Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:5834: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 102 2025-05-29T15:23:09.538992Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 102 2025-05-29T15:23:09.538996Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 102 2025-05-29T15:23:09.539000Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 18446744073709551615 2025-05-29T15:23:09.539003Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 5 2025-05-29T15:23:09.539011Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 102, subscribers: 0 2025-05-29T15:23:09.539486Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:46: Free shard 72057594046678944:1 hive 72057594037968897 at ss 72057594046678944 2025-05-29T15:23:09.539497Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:46: Free shard 72057594046678944:3 hive 72057594037968897 at ss 72057594046678944 2025-05-29T15:23:09.539501Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:46: Free shard 72057594046678944:2 hive 72057594037968897 at ss 72057594046678944 2025-05-29T15:23:09.539808Z node 1 :HIVE INFO: tablet_helpers.cpp:1356: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 1 TxId_Deprecated: 1 TabletID: 72075186233409546 2025-05-29T15:23:09.539853Z node 1 :HIVE INFO: tablet_helpers.cpp:1356: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 3 TxId_Deprecated: 3 TabletID: 72075186233409548 2025-05-29T15:23:09.539889Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5938: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 1 ShardOwnerId: 72057594046678944 ShardLocalIdx: 1, at schemeshard: 72057594046678944 2025-05-29T15:23:09.539946Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 2025-05-29T15:23:09.540209Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5938: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 3 ShardOwnerId: 72057594046678944 ShardLocalIdx: 3, at schemeshard: 72057594046678944 2025-05-29T15:23:09.540244Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 Forgetting tablet 72075186233409546 2025-05-29T15:23:09.540406Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2025-05-29T15:23:09.540446Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 Forgetting tablet 72075186233409548 2025-05-29T15:23:09.540492Z node 1 :HIVE INFO: tablet_helpers.cpp:1356: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 2 TxId_Deprecated: 2 TabletID: 72075186233409547 Forgetting tablet 72075186233409547 2025-05-29T15:23:09.540639Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5938: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 2 ShardOwnerId: 72057594046678944 ShardLocalIdx: 2, at schemeshard: 72057594046678944 2025-05-29T15:23:09.540672Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2025-05-29T15:23:09.540794Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:205: TTxCleanDroppedSubDomains Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-05-29T15:23:09.540802Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:224: TTxCleanDroppedPaths: PersistRemoveSubDomain for PathId# [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2025-05-29T15:23:09.540825Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 2025-05-29T15:23:09.540922Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:123: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-05-29T15:23:09.540928Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:137: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2025-05-29T15:23:09.540939Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-05-29T15:23:09.541431Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:174: Deleted shardIdx 72057594046678944:1 2025-05-29T15:23:09.541444Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:180: Close pipe to deleted shardIdx 72057594046678944:1 tabletId 72075186233409546 2025-05-29T15:23:09.541496Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:174: Deleted shardIdx 72057594046678944:3 2025-05-29T15:23:09.541500Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:180: Close pipe to deleted shardIdx 72057594046678944:3 tabletId 72075186233409548 2025-05-29T15:23:09.541861Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:174: Deleted shardIdx 72057594046678944:2 2025-05-29T15:23:09.541873Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:180: Close pipe to deleted shardIdx 72057594046678944:2 tabletId 72075186233409547 2025-05-29T15:23:09.541895Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:245: TTxCleanDroppedSubDomains Complete, done PersistRemoveSubDomain for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 2025-05-29T15:23:09.541918Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:160: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestModificationResult got TxId: 102, wait until txId: 102 TestWaitNotification wait txId: 102 2025-05-29T15:23:09.541981Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:210: tests -- TTxNotificationSubscriber for txId 102: send EvNotifyTxCompletion 2025-05-29T15:23:09.541988Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:256: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 102 2025-05-29T15:23:09.542053Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 102, at schemeshard: 72057594046678944 2025-05-29T15:23:09.542072Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:227: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2025-05-29T15:23:09.542081Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:236: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [1:522:2475] TestWaitNotification: OK eventTxId 102 2025-05-29T15:23:09.542160Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-05-29T15:23:09.542191Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/USER_0" took 43us result status StatusPathDoesNotExist 2025-05-29T15:23:09.542241Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/USER_0\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1]), source_location: ydb/core/tx/schemeshard/schemeshard_path_describer.cpp:1157" Path: "/MyRoot/USER_0" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: false } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 |63.0%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/tx_proxy/ut_schemereq/ydb-core-tx-tx_proxy-ut_schemereq |63.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/tx_proxy/ut_schemereq/ydb-core-tx-tx_proxy-ut_schemereq |63.0%| [LD] {RESULT} $(B)/ydb/core/tx/tx_proxy/ut_schemereq/ydb-core-tx-tx_proxy-ut_schemereq >> TMessageBusServerPersQueueGetPartitionStatusMetaRequestTest::FailsOnNotOkStatusInGetNodeRequest [GOOD] >> TMessageBusServerPersQueueGetPartitionStatusMetaRequestTest::FailsOnNoBalancerInGetNodeRequest >> TMessageBusServerPersQueueGetPartitionLocationsMetaRequestTest::HandlesPipeDisconnection_DisconnectionComesSecond [GOOD] >> TMessageBusServerPersQueueGetPartitionStatusMetaRequestTest::FailsOnDuplicatedTopicName [GOOD] >> TMessageBusServerPersQueueGetPartitionStatusMetaRequestTest::FailsOnDuplicatedPartition >> TMessageBusServerPersQueueGetPartitionOffsetsMetaRequestTest::FailesOnNotATopic >> TMessageBusServerPersQueueGetPartitionLocationsMetaRequestTest::FailsOnNotOkStatusInGetNodeRequest >> TMessageBusServerPersQueueGetPartitionOffsetsMetaRequestTest::FailsOnDuplicatedTopicName [GOOD] >> TMessageBusServerPersQueueGetPartitionOffsetsMetaRequestTest::FailsOnDuplicatedPartition >> TMessageBusServerPersQueueGetTopicMetadataMetaRequestTest::FailesOnNotATopic [GOOD] >> TMessageBusServerPersQueueGetReadSessionsInfoMetaRequestTest::FailsOnNotOkStatusInGetNodeRequest [GOOD] >> TMessageBusServerPersQueueGetPartitionOffsetsMetaRequestTest::HandlesPipeDisconnection_DisconnectionComesSecond [GOOD] >> TMessageBusServerPersQueueGetPartitionOffsetsMetaRequestTest::HandlesPipeDisconnection_AnswerDoesNotArrive >> TMessageBusServerPersQueueGetReadSessionsInfoMetaRequestTest::FailsOnZeroBalancerTabletIdInGetNodeRequest >> TMessageBusServerPersQueueGetReadSessionsInfoMetaRequestTest::HandlesPipeDisconnection_DisconnectionComesFirst >> TMessageBusServerPersQueueGetTopicMetadataMetaRequestTest::FailsOnNoBalancerInGetNodeRequest [GOOD] >> TMessageBusServerPersQueueGetPartitionLocationsMetaRequestTest::FailesOnNotATopic [GOOD] >> TMessageBusServerPersQueueGetPartitionLocationsMetaRequestTest::FailsOnBalancerDescribeResultFailureWhenTopicsAreGivenExplicitly >> TMessageBusServerPersQueueGetReadSessionsInfoMetaRequestTest::FailsOnEmptyTopicName [GOOD] >> TMessageBusServerPersQueueGetPartitionStatusMetaRequestTest::FailsOnBadRootStatusInGetNodeRequest [GOOD] >> TMessageBusServerPersQueueGetPartitionStatusMetaRequestTest::FailesOnNotATopic >> TMessageBusServerPersQueueGetPartitionOffsetsMetaRequestTest::FailsOnFailedGetAllTopicsRequest >> TStoragePoolsQuotasTest::DisableWritesToDatabase-IsExternalSubdomain-false [GOOD] >> TMessageBusServerPersQueueGetPartitionStatusMetaRequestTest::FailsOnDuplicatedPartition [GOOD] >> TMessageBusServerPersQueueGetPartitionOffsetsMetaRequestTest::FailesOnNotATopic [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TStoragePoolsQuotasTest::DisableWritesToDatabase-IsExternalSubdomain-true [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:116:2058] recipient: [1:111:2141] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:116:2058] recipient: [1:111:2141] Leader for TabletID 72057594046678944 is [1:128:2152] sender: [1:132:2058] recipient: [1:111:2141] 2025-05-29T15:23:05.577053Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7488: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-05-29T15:23:05.577085Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7516: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:23:05.577091Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7402: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2025-05-29T15:23:05.577096Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7418: OperationsProcessing config: using default configuration 2025-05-29T15:23:05.577108Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-05-29T15:23:05.577112Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-05-29T15:23:05.577122Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7548: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:23:05.577137Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:39: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-05-29T15:23:05.577259Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7619: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-05-29T15:23:05.577335Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-05-29T15:23:05.589810Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7309: Cannot subscribe to console configs 2025-05-29T15:23:05.589842Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:23:05.594015Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-05-29T15:23:05.594162Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-05-29T15:23:05.594221Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-05-29T15:23:05.597322Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-05-29T15:23:05.602900Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:445: Clear TempDirsState with owners number: 0 2025-05-29T15:23:05.603061Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1348: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-05-29T15:23:05.603174Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:32: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-05-29T15:23:05.604033Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:157: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:23:05.604081Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:84: [RootDataErasureManager] Stop 2025-05-29T15:23:05.604347Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:23:05.604355Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:23:05.604372Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-05-29T15:23:05.604378Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-29T15:23:05.604383Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:202: TTxServerlessStorageBilling.Complete 2025-05-29T15:23:05.604412Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6671: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-05-29T15:23:05.606303Z node 1 :HIVE INFO: tablet_helpers.cpp:1130: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:128:2152] sender: [1:242:2058] recipient: [1:15:2062] 2025-05-29T15:23:05.627318Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:372: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-05-29T15:23:05.627413Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:23:05.627494Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-05-29T15:23:05.627547Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:130: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-05-29T15:23:05.627560Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:23:05.628521Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:451: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-05-29T15:23:05.628553Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-05-29T15:23:05.628610Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:23:05.628621Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:313: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-05-29T15:23:05.628627Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:367: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-05-29T15:23:05.628633Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 2 -> 3 2025-05-29T15:23:05.629056Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:23:05.629067Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-05-29T15:23:05.629073Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 3 -> 128 2025-05-29T15:23:05.629437Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:23:05.629448Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:23:05.629455Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:23:05.629462Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1650: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-05-29T15:23:05.630155Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1719: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-05-29T15:23:05.630631Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:652: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-05-29T15:23:05.630678Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1751: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-05-29T15:23:05.630896Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:676: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-05-29T15:23:05.630920Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:680: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 124 RawX2: 4294969445 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-05-29T15:23:05.630928Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:23:05.630988Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 128 -> 240 2025-05-29T15:23:05.630995Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:23:05.631028Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-05-29T15:23:05.631041Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:402: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-05-29T15:23:05.631459Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:23:05.631469Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-29T15:23:05.631519Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29T1 ... _SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72075186233409546, LocalPathId: 1] was 4 2025-05-29T15:23:09.742609Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72075186233409546, LocalPathId: 2] was 3 2025-05-29T15:23:09.743192Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:647: TTxOperationReply complete, operationId: 104:0, at schemeshard: 72075186233409546 2025-05-29T15:23:09.743550Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:647: TTxOperationReply complete, operationId: 104:0, at schemeshard: 72075186233409546 2025-05-29T15:23:09.743606Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72075186233409546 2025-05-29T15:23:09.743614Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72075186233409546, txId: 104, path id: [OwnerId: 72075186233409546, LocalPathId: 1] 2025-05-29T15:23:09.743665Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72075186233409546, txId: 104, path id: [OwnerId: 72075186233409546, LocalPathId: 2] 2025-05-29T15:23:09.743699Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72075186233409546 2025-05-29T15:23:09.743705Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:450:2401], at schemeshard: 72075186233409546, txId: 104, path id: 1 2025-05-29T15:23:09.743711Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:450:2401], at schemeshard: 72075186233409546, txId: 104, path id: 2 2025-05-29T15:23:09.743849Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 104:0, at schemeshard: 72075186233409546 2025-05-29T15:23:09.743859Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:1036: NTableState::TProposedWaitParts operationId# 104:0 ProgressState at tablet: 72075186233409546 2025-05-29T15:23:09.743877Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:674: all shard schema changes has been received, operationId: 104:0, at schemeshard: 72075186233409546 2025-05-29T15:23:09.743885Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:686: send schema changes ack message, operation: 104:0, datashard: 72075186233409549, at schemeshard: 72075186233409546 2025-05-29T15:23:09.743892Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 104:0 129 -> 240 2025-05-29T15:23:09.744025Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:5834: Handle TEvUpdateAck, at schemeshard: 72075186233409546, msg: Owner: 72075186233409546 Generation: 2 LocalPathId: 1 Version: 9 PathOwnerId: 72075186233409546, cookie: 104 2025-05-29T15:23:09.744040Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72075186233409546, msg: Owner: 72075186233409546 Generation: 2 LocalPathId: 1 Version: 9 PathOwnerId: 72075186233409546, cookie: 104 2025-05-29T15:23:09.744045Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72075186233409546, txId: 104 2025-05-29T15:23:09.744050Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72075186233409546, txId: 104, pathId: [OwnerId: 72075186233409546, LocalPathId: 1], version: 9 2025-05-29T15:23:09.744058Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72075186233409546, LocalPathId: 1] was 5 2025-05-29T15:23:09.744248Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:5834: Handle TEvUpdateAck, at schemeshard: 72075186233409546, msg: Owner: 72075186233409546 Generation: 2 LocalPathId: 2 Version: 18446744073709551615 PathOwnerId: 72075186233409546, cookie: 104 2025-05-29T15:23:09.744261Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72075186233409546, msg: Owner: 72075186233409546 Generation: 2 LocalPathId: 2 Version: 18446744073709551615 PathOwnerId: 72075186233409546, cookie: 104 2025-05-29T15:23:09.744265Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72075186233409546, txId: 104 2025-05-29T15:23:09.744270Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72075186233409546, txId: 104, pathId: [OwnerId: 72075186233409546, LocalPathId: 2], version: 18446744073709551615 2025-05-29T15:23:09.744275Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72075186233409546, LocalPathId: 2] was 4 2025-05-29T15:23:09.744287Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1606: TOperation IsReadyToNotify, TxId: 104, ready parts: 0/1, is published: true 2025-05-29T15:23:09.744888Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 104:0, at schemeshard: 72075186233409546 2025-05-29T15:23:09.744901Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_drop_table.cpp:414: TDropTable TProposedDeletePart operationId: 104:0 ProgressState, at schemeshard: 72075186233409546 2025-05-29T15:23:09.744990Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove table for pathId [OwnerId: 72075186233409546, LocalPathId: 2] was 3 2025-05-29T15:23:09.745036Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:906: Part operation is done id#104:0 progress is 1/1 2025-05-29T15:23:09.745041Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 104 ready parts: 1/1 2025-05-29T15:23:09.745047Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:906: Part operation is done id#104:0 progress is 1/1 2025-05-29T15:23:09.745053Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 104 ready parts: 1/1 2025-05-29T15:23:09.745058Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1606: TOperation IsReadyToNotify, TxId: 104, ready parts: 1/1, is published: true 2025-05-29T15:23:09.745071Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1629: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:551:2489] message: TxId: 104 2025-05-29T15:23:09.745077Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 104 ready parts: 1/1 2025-05-29T15:23:09.745082Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:973: Operation and all the parts is done, operation id: 104:0 2025-05-29T15:23:09.745086Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5174: RemoveTx for txid 104:0 2025-05-29T15:23:09.745105Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72075186233409546, LocalPathId: 2] was 2 2025-05-29T15:23:09.745180Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72075186233409546 2025-05-29T15:23:09.745186Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72075186233409546, txId: 0, path id: [OwnerId: 72075186233409546, LocalPathId: 1] 2025-05-29T15:23:09.745468Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72075186233409546, cookie: 104 2025-05-29T15:23:09.745806Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72075186233409546, cookie: 104 2025-05-29T15:23:09.746046Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72075186233409546 2025-05-29T15:23:09.746057Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:450:2401], at schemeshard: 72075186233409546, txId: 0, path id: 1 2025-05-29T15:23:09.746074Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:227: tests -- TTxNotificationSubscriber for txId 104: got EvNotifyTxCompletionResult 2025-05-29T15:23:09.746080Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:236: tests -- TTxNotificationSubscriber for txId 104: satisfy waiter [1:742:2657] 2025-05-29T15:23:09.746234Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:5834: Handle TEvUpdateAck, at schemeshard: 72075186233409546, msg: Owner: 72075186233409546 Generation: 2 LocalPathId: 1 Version: 10 PathOwnerId: 72075186233409546, cookie: 0 TestWaitNotification: OK eventTxId 104 2025-05-29T15:23:09.746437Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/SomeDatabase" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72075186233409546 2025-05-29T15:23:09.746472Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72075186233409546 describe path "/MyRoot/SomeDatabase" took 45us result status StatusSuccess 2025-05-29T15:23:09.746572Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/SomeDatabase" PathDescription { Self { Name: "MyRoot/SomeDatabase" PathId: 1 SchemeshardId: 72075186233409546 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 10 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 10 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 5 SubDomainVersion: 2 SubDomainStateVersion: 2 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 2 ProcessingParams { Version: 2 PlanResolution: 50 Coordinators: 72075186233409547 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409548 SchemeShard: 72075186233409546 } DomainKey { SchemeShard: 72057594046678944 PathId: 2 } StoragePools { Name: "quoted_storage_pool" Kind: "quoted_storage_pool_kind" } StoragePools { Name: "unquoted_storage_pool" Kind: "unquoted_storage_pool_kind" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 4 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 2 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } StoragePoolsUsage { PoolKind: "unquoted_storage_pool_kind" TotalSize: 0 DataSize: 0 IndexSize: 0 } StoragePoolsUsage { PoolKind: "quoted_storage_pool_kind" TotalSize: 0 DataSize: 0 IndexSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 DatabaseQuotas { storage_quotas { unit_kind: "quoted_storage_pool_kind" data_size_hard_quota: 1 } } SecurityState { Audience: "/MyRoot/SomeDatabase" } } } PathId: 1 PathOwnerId: 72075186233409546, at schemeshard: 72075186233409546 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/client/server/ut/unittest >> TMessageBusServerPersQueueGetTopicMetadataMetaRequestTest::FailesOnNotATopic [GOOD] Test command err: Assert failed: Check response: { Status: 130 ErrorReason: "Timeout while waiting for response, may be just slow, Marker# PQ16" ErrorCode: ERROR } 2025-05-29T15:23:09.699681Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:3098: [PQ: 72057594037928037] Handle TEvInterconnect::TEvNodeInfo 2025-05-29T15:23:09.700397Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:3130: [PQ: 72057594037928037] Transactions request. From tx_00000000000000000000, To tx_18446744073709551615 2025-05-29T15:23:09.700445Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:751: [PQ: 72057594037928037] doesn't have tx info 2025-05-29T15:23:09.700451Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:763: [PQ: 72057594037928037] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2025-05-29T15:23:09.700454Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:975: [PQ: 72057594037928037] no config, start with empty partitions and default config 2025-05-29T15:23:09.700458Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:4889: [PQ: 72057594037928037] Txs.size=0, PlannedTxs.size=0 2025-05-29T15:23:09.700464Z node 2 :PERSQUEUE NOTICE: pq_impl.cpp:1099: [PQ: 72057594037928037] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-05-29T15:23:09.700471Z node 2 :PERSQUEUE INFO: pq_impl.cpp:800: [PQ: 72057594037928037] doesn't have tx writes info 2025-05-29T15:23:09.700595Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:2882: [PQ: 72057594037928037] server connected, pipe [2:262:2253], now have 1 active actors on pipe 2025-05-29T15:23:09.700608Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:1460: [PQ: 72057594037928037] Handle TEvPersQueue::TEvUpdateConfig 2025-05-29T15:23:09.702376Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:1646: [PQ: 72057594037928037] Config update version 1(current 0) received from actor [2:100:2134] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "rt3.dc1--topic1" Version: 1 Partitions { PartitionId: 0 } AllPartitions { PartitionId: 0 } 2025-05-29T15:23:09.702988Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:590: [PQ: 72057594037928037] Apply new config CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "rt3.dc1--topic1" Version: 1 Partitions { PartitionId: 0 } AllPartitions { PartitionId: 0 } 2025-05-29T15:23:09.703019Z node 2 :PERSQUEUE NOTICE: pq_impl.cpp:1099: [PQ: 72057594037928037] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-05-29T15:23:09.703165Z node 2 :PERSQUEUE INFO: pq_impl.cpp:1487: [PQ: 72057594037928037] Config applied version 1 actor [2:100:2134] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "rt3.dc1--topic1" Version: 1 Partitions { PartitionId: 0 } AllPartitions { PartitionId: 0 } 2025-05-29T15:23:09.703192Z node 2 :PERSQUEUE DEBUG: partition_init.cpp:75: [rt3.dc1--topic1:0:Initializer] Start initializing step TInitConfigStep 2025-05-29T15:23:09.703256Z node 2 :PERSQUEUE DEBUG: partition_init.cpp:75: [rt3.dc1--topic1:0:Initializer] Start initializing step TInitInternalFieldsStep 2025-05-29T15:23:09.703328Z node 2 :PERSQUEUE INFO: partition_init.cpp:880: [PQ: 72057594037928037, Partition: 0, State: StateInit] bootstrapping 0 [2:270:2259] 2025-05-29T15:23:09.703942Z node 2 :PERSQUEUE DEBUG: partition_init.cpp:55: [rt3.dc1--topic1:0:Initializer] Initializing completed. 2025-05-29T15:23:09.703952Z node 2 :PERSQUEUE INFO: partition.cpp:560: [PQ: 72057594037928037, Partition: 0, State: StateInit] init complete for topic 'rt3.dc1--topic1' partition 0 generation 2 [2:270:2259] 2025-05-29T15:23:09.703958Z node 2 :PERSQUEUE DEBUG: partition.cpp:574: [PQ: 72057594037928037, Partition: 0, State: StateInit] SYNC INIT topic rt3.dc1--topic1 partitition 0 so 0 endOffset 0 Head Offset 0 PartNo 0 PackedSize 0 count 0 nextOffset 0 batches 0 2025-05-29T15:23:09.703964Z node 2 :PERSQUEUE DEBUG: partition.cpp:3850: [PQ: 72057594037928037, Partition: 0, State: StateIdle] Process pending events. Count 0 2025-05-29T15:23:09.704084Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:2882: [PQ: 72057594037928037] server connected, pipe [2:273:2261], now have 1 active actors on pipe 2025-05-29T15:23:09.712535Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:3098: [PQ: 72057594037928138] Handle TEvInterconnect::TEvNodeInfo 2025-05-29T15:23:09.713051Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:3130: [PQ: 72057594037928138] Transactions request. From tx_00000000000000000000, To tx_18446744073709551615 2025-05-29T15:23:09.713095Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:751: [PQ: 72057594037928138] doesn't have tx info 2025-05-29T15:23:09.713101Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:763: [PQ: 72057594037928138] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2025-05-29T15:23:09.713104Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:975: [PQ: 72057594037928138] no config, start with empty partitions and default config 2025-05-29T15:23:09.713108Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:4889: [PQ: 72057594037928138] Txs.size=0, PlannedTxs.size=0 2025-05-29T15:23:09.713114Z node 2 :PERSQUEUE NOTICE: pq_impl.cpp:1099: [PQ: 72057594037928138] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-05-29T15:23:09.713120Z node 2 :PERSQUEUE INFO: pq_impl.cpp:800: [PQ: 72057594037928138] doesn't have tx writes info 2025-05-29T15:23:09.713204Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:2882: [PQ: 72057594037928138] server connected, pipe [2:408:2362], now have 1 active actors on pipe 2025-05-29T15:23:09.713217Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:1460: [PQ: 72057594037928138] Handle TEvPersQueue::TEvUpdateConfig 2025-05-29T15:23:09.713258Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:1646: [PQ: 72057594037928138] Config update version 2(current 0) received from actor [2:100:2134] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 1 TopicName: "rt3.dc1--topic2" Version: 2 Partitions { PartitionId: 1 } AllPartitions { PartitionId: 1 } 2025-05-29T15:23:09.713582Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:590: [PQ: 72057594037928138] Apply new config CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 1 TopicName: "rt3.dc1--topic2" Version: 2 Partitions { PartitionId: 1 } AllPartitions { PartitionId: 1 } 2025-05-29T15:23:09.713602Z node 2 :PERSQUEUE NOTICE: pq_impl.cpp:1099: [PQ: 72057594037928138] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-05-29T15:23:09.713703Z node 2 :PERSQUEUE INFO: pq_impl.cpp:1487: [PQ: 72057594037928138] Config applied version 2 actor [2:100:2134] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 1 TopicName: "rt3.dc1--topic2" Version: 2 Partitions { PartitionId: 1 } AllPartitions { PartitionId: 1 } 2025-05-29T15:23:09.713720Z node 2 :PERSQUEUE DEBUG: partition_init.cpp:75: [rt3.dc1--topic2:1:Initializer] Start initializing step TInitConfigStep 2025-05-29T15:23:09.713760Z node 2 :PERSQUEUE DEBUG: partition_init.cpp:75: [rt3.dc1--topic2:1:Initializer] Start initializing step TInitInternalFieldsStep 2025-05-29T15:23:09.713782Z node 2 :PERSQUEUE INFO: partition_init.cpp:880: [PQ: 72057594037928138, Partition: 1, State: StateInit] bootstrapping 1 [2:416:2368] 2025-05-29T15:23:09.714163Z node 2 :PERSQUEUE DEBUG: partition_init.cpp:55: [rt3.dc1--topic2:1:Initializer] Initializing completed. 2025-05-29T15:23:09.714171Z node 2 :PERSQUEUE INFO: partition.cpp:560: [PQ: 72057594037928138, Partition: 1, State: StateInit] init complete for topic 'rt3.dc1--topic2' partition 1 generation 2 [2:416:2368] 2025-05-29T15:23:09.714176Z node 2 :PERSQUEUE DEBUG: partition.cpp:574: [PQ: 72057594037928138, Partition: 1, State: StateInit] SYNC INIT topic rt3.dc1--topic2 partitition 1 so 0 endOffset 0 Head Offset 0 PartNo 0 PackedSize 0 count 0 nextOffset 0 batches 0 2025-05-29T15:23:09.714180Z node 2 :PERSQUEUE DEBUG: partition.cpp:3850: [PQ: 72057594037928138, Partition: 1, State: StateIdle] Process pending events. Count 0 2025-05-29T15:23:09.714285Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:2882: [PQ: 72057594037928138] server connected, pipe [2:419:2370], now have 1 active actors on pipe 2025-05-29T15:23:09.716654Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:3098: [PQ: 72057594037928139] Handle TEvInterconnect::TEvNodeInfo 2025-05-29T15:23:09.717242Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:3130: [PQ: 72057594037928139] Transactions request. From tx_00000000000000000000, To tx_18446744073709551615 2025-05-29T15:23:09.717284Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:751: [PQ: 72057594037928139] doesn't have tx info 2025-05-29T15:23:09.717289Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:763: [PQ: 72057594037928139] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2025-05-29T15:23:09.717292Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:975: [PQ: 72057594037928139] no config, start with empty partitions and default config 2025-05-29T15:23:09.717297Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:4889: [PQ: 72057594037928139] Txs.size=0, PlannedTxs.size=0 2025-05-29T15:23:09.717303Z node 2 :PERSQUEUE NOTICE: pq_impl.cpp:1099: [PQ: 72057594037928139] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-05-29T15:23:09.717310Z node 2 :PERSQUEUE INFO: pq_impl.cpp:800: [PQ: 72057594037928139] doesn't have tx writes info 2025-05-29T15:23:09.717390Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:2882: [PQ: 72057594037928139] server connected, pipe [2:468:2407], now have 1 active actors on pipe 2025-05-29T15:23:09.717402Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:1460: [PQ: 72057594037928139] Handle TEvPersQueue::TEvUpdateConfig 2025-05-29T15:23:09.717437Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:1646: [PQ: 72057594037928139] Config update version 3(current 0) received from actor [2:100:2134] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 2 TopicName: "rt3.dc1--topic2" Version: 3 Partitions { PartitionId: 2 } AllPartitions { PartitionId: 2 } 2025-05-29T15:23:09.717767Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:590: [PQ: 72057594037928139] Apply new config CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 2 TopicName: "rt3.dc1--topic2" Version: 3 Partitions { PartitionId: 2 } AllPartitions { PartitionId: 2 } 2025-05-29T15:23:09.717787Z node 2 :PERSQUEUE NOTICE: pq_impl.cpp:1099: [PQ: 72057594037928139] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-05-29T15:23:09.717912Z node 2 :PERSQUEUE INFO: pq_impl.cpp:1487: [PQ: 72057594037928139] Config applied version 3 actor [2:100:2134] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 2 TopicName: "rt3.dc1--topic2" Version: 3 Partitions { PartitionId: 2 } AllPartitions { PartitionId: 2 } 2025-05-29T15:23:09.717927Z node 2 :PERSQUEUE DEBUG: partition_init.cpp:75: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitConfigStep 2025-05-29T15:23:09.717967Z node 2 :PERSQUEUE DEBUG: partition_init.cpp:75: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitInternalFieldsStep 2025-05-29T15:23:09.717998Z node 2 :PERSQUEUE INFO: partition_init.cpp:880: [PQ: 72057594037928139, Partition: 2, State: StateInit] bootstrapping 2 [2:476:2413] 2025-05-29T15:23:09.718316Z node 2 :PERSQUEUE DEBUG: partition_init.cpp:55: [rt3.dc1--topic2:2:Initializer] Initializing completed. 2025-05-29T15:23:09.718324Z node 2 :PERSQUEUE INFO: partition.cpp:560: [PQ: 72057594037928139, Partition: 2, State: StateInit] init complete for topic 'rt3.dc1--topic2' partition 2 generation 2 [2:476:2413] 2025-05-29T15:23:09.718333Z node 2 :PERSQUEUE DEBUG: partition.cpp:574: [PQ: 72057594037928139, Partition: 2, State: StateInit] SYNC INIT topic rt3.dc1--topic2 partitition 2 so 0 endOffset 0 Head Offset 0 PartNo 0 PackedSize 0 count 0 nextOffset 0 batches 0 2025-05-29T15:23:09.718339Z node 2 :PERSQUEUE DEBUG: partition.cpp:3850: [PQ: 72057594037928139, Partition: 2, State: StateIdle] Process pending events. Count 0 2025-05-29T15:23:09.718469Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:2882: [PQ: 72057594037928139] server connected, pipe [2:479:2415], now have 1 active actors on pipe REQUEST MetaRequest { CmdGetReadSessionsInfo { ClientId: "client_id" Topic: "rt3.dc1--topic1" Topic: "rt3.dc1--topic2" } } Ticket: "client_id@builtin" 2025-05-29T15:23:09.719688Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:2882: [PQ: 72057594037928037] server connected, pipe [2:486:2418], now have 1 active actors on pipe 2025-05-29T15:23:09.719733Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:2882: [PQ: 72057594037928138] server connected, pipe [2:489:2419], now have 1 active actors on pipe 2025-05-29T15:23:09.719778Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:2882: [PQ: 72057594037928139] server connected, pipe [2:490:2419], now have 1 active actors on pipe 2025-05-29T15:23:09.719875Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:2907: [PQ: 72057594037928037] server disconnected, pipe [2:486:2418] destroyed 2025-05-29T15:23:09.719924Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:2907: [PQ: 72057594037928138] server disconnected, pipe [2:489:2419] destroyed 2025-05-29T15:23:09.719932Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:2907: [PQ: 72057594037928139] server disconnected, pipe [2:490:2419] destroyed RESULT Status: 1 ErrorCode: OK MetaResponse { CmdGetReadSessionsInfoResult { TopicResult { Topic: "rt3.dc1--topic1" PartitionResult { Partition: 0 ClientOffset: 0 StartOffset: 0 EndOffset: 0 TimeLag: 0 TabletNode: "::1" ClientReadOffset: 0 ReadTimeLag: 0 TabletNodeId: 2 ErrorCode: OK } ErrorCode: OK } TopicResult { Topic: "rt3.dc1--topic2" PartitionResult { Partition: 0 ErrorCode: INITIALIZING ErrorReason: "tablet for partition is not running" } PartitionResult { Partition: 1 ClientOffset: 0 StartOffset: 0 EndOffset: 0 TimeLag: 0 TabletNode: "::1" ClientReadOffset: 0 ReadTimeLag: 0 TabletNodeId: 2 ErrorCode: OK } PartitionResult { Partition: 2 ClientOffset: 0 StartOffset: 0 EndOffset: 0 TimeLag: 0 TabletNode: "::1" ClientReadOffset: 0 ReadTimeLag: 0 TabletNodeId: 2 ErrorCode: OK } ErrorCode: OK } } } Assert failed: Check response: { Status: 128 ErrorReason: "the following topics are not created: rt3.dc1--topic2, Marker# PQ95" ErrorCode: UNKNOWN_TOPIC } >> TMessageBusServerPersQueueGetPartitionStatusMetaRequestTest::HandlesPipeDisconnection_AnswerDoesNotArrive [GOOD] >> TMessageBusServerPersQueueGetTopicMetadataMetaRequestTest::FailsOnEmptyTopicName [GOOD] >> TMessageBusServerPersQueueGetPartitionOffsetsMetaRequestTest::HandlesPipeDisconnection_AnswerDoesNotArrive [GOOD] >> TMessageBusServerPersQueueGetPartitionLocationsMetaRequestTest::FailsOnEmptyTopicName [GOOD] >> TMessageBusServerPersQueueGetPartitionLocationsMetaRequestTest::FailsOnDuplicatedTopicName >> TMessageBusServerPersQueueGetReadSessionsInfoMetaRequestTest::FailsOnBalancerDescribeResultFailureWhenTopicsAreGivenExplicitly [GOOD] >> TMessageBusServerPersQueueGetPartitionStatusMetaRequestTest::FailsOnNoBalancerInGetNodeRequest [GOOD] >> TMessageBusServerPersQueueGetPartitionLocationsMetaRequestTest::FailsOnBalancerDescribeResultFailureWhenTopicsAreGivenExplicitly [GOOD] >> TMessageBusServerPersQueueGetPartitionLocationsMetaRequestTest::FailsOnDuplicatedPartition >> TMessageBusServerPersQueueGetPartitionLocationsMetaRequestTest::FailsOnNotOkStatusInGetNodeRequest [GOOD] >> TMessageBusServerPersQueueGetPartitionLocationsMetaRequestTest::FailsOnZeroBalancerTabletIdInGetNodeRequest >> TMessageBusServerPersQueueGetPartitionOffsetsMetaRequestTest::FailsOnDuplicatedPartition [GOOD] >> TMessageBusServerPersQueueGetReadSessionsInfoMetaRequestTest::FailsOnZeroBalancerTabletIdInGetNodeRequest [GOOD] >> TMessageBusServerPersQueueGetReadSessionsInfoMetaRequestTest::FailsOnNoClientSpecified >> TMessageBusServerPersQueueGetTopicMetadataMetaRequestTest::HandlesTimeout >> TMessageBusServerPersQueueGetReadSessionsInfoMetaRequestTest::HandlesPipeDisconnection_DisconnectionComesFirst [GOOD] >> TMessageBusServerPersQueueGetPartitionStatusMetaRequestTest::HandlesTimeout >> TMessageBusServerPersQueueGetReadSessionsInfoMetaRequestTest::HandlesPipeDisconnection_DisconnectionComesSecond >> TSchemeShardSubDomainTest::SchemeDatabaseQuotaRejects ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/client/server/ut/unittest >> TMessageBusServerPersQueueGetTopicMetadataMetaRequestTest::FailsOnNoBalancerInGetNodeRequest [GOOD] Test command err: Assert failed: Check response: { Status: 128 ErrorReason: "no path \'/Root/PQ/\', Marker# PQ17" ErrorCode: UNKNOWN_TOPIC } Assert failed: Check response: { Status: 128 ErrorReason: "no path \'Root/PQ\', Marker# PQ150" ErrorCode: UNKNOWN_TOPIC } Assert failed: Check response: { Status: 128 ErrorReason: "topic \'rt3.dc1--topic1\' has no balancer, Marker# PQ193" ErrorCode: UNKNOWN_TOPIC } ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/client/server/ut/unittest >> TMessageBusServerPersQueueGetReadSessionsInfoMetaRequestTest::FailsOnEmptyTopicName [GOOD] Test command err: Assert failed: Check response: { Status: 128 ErrorReason: "no path \'/Root/PQ/\', Marker# PQ17" ErrorCode: UNKNOWN_TOPIC } Assert failed: Check response: { Status: 128 ErrorReason: "topic \'rt3.dc1--topic1\' has no balancer, Marker# PQ193" ErrorCode: UNKNOWN_TOPIC } Assert failed: Check response: { Status: 128 ErrorReason: "empty topic in GetReadSessionsInfo request" ErrorCode: BAD_REQUEST } >> TMessageBusServerPersQueueGetPartitionStatusMetaRequestTest::FailesOnNotATopic [GOOD] >> TMessageBusServerPersQueueGetPartitionLocationsMetaRequestTest::FailsOnDuplicatedTopicName [GOOD] >> TMessageBusServerPersQueueGetPartitionOffsetsMetaRequestTest::FailsOnFailedGetAllTopicsRequest [GOOD] >> TMessageBusServerPersQueueGetPartitionOffsetsMetaRequestTest::FailsOnNotOkStatusInGetNodeRequest >> TMessageBusServerPersQueueGetPartitionLocationsMetaRequestTest::FailsOnZeroBalancerTabletIdInGetNodeRequest [GOOD] >> TMessageBusServerPersQueueGetPartitionLocationsMetaRequestTest::HandlesPipeDisconnection_DisconnectionComesFirst >> TMessageBusServerPersQueueGetPartitionStatusMetaRequestTest::HandlesTimeout [GOOD] >> TMessageBusServerPersQueueGetTopicMetadataMetaRequestTest::HandlesTimeout [GOOD] >> TMessageBusServerPersQueueGetPartitionStatusMetaRequestTest::SuccessfullyPassesResponsesFromTablets >> TMessageBusServerPersQueueGetTopicMetadataMetaRequestTest::FailsOnZeroBalancerTabletIdInGetNodeRequest ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/client/server/ut/unittest >> TMessageBusServerPersQueueGetPartitionStatusMetaRequestTest::HandlesPipeDisconnection_AnswerDoesNotArrive [GOOD] Test command err: Assert failed: Check response: { Status: 128 ErrorReason: "topic \'rt3.dc1--topic1\' is not created, Marker# PQ94" ErrorCode: UNKNOWN_TOPIC } 2025-05-29T15:23:09.907342Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:3098: [PQ: 72057594037928037] Handle TEvInterconnect::TEvNodeInfo 2025-05-29T15:23:09.908109Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:3130: [PQ: 72057594037928037] Transactions request. From tx_00000000000000000000, To tx_18446744073709551615 2025-05-29T15:23:09.908154Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:751: [PQ: 72057594037928037] doesn't have tx info 2025-05-29T15:23:09.908160Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:763: [PQ: 72057594037928037] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2025-05-29T15:23:09.908163Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:975: [PQ: 72057594037928037] no config, start with empty partitions and default config 2025-05-29T15:23:09.908167Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:4889: [PQ: 72057594037928037] Txs.size=0, PlannedTxs.size=0 2025-05-29T15:23:09.908172Z node 2 :PERSQUEUE NOTICE: pq_impl.cpp:1099: [PQ: 72057594037928037] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-05-29T15:23:09.908178Z node 2 :PERSQUEUE INFO: pq_impl.cpp:800: [PQ: 72057594037928037] doesn't have tx writes info 2025-05-29T15:23:09.908270Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:2882: [PQ: 72057594037928037] server connected, pipe [2:263:2254], now have 1 active actors on pipe 2025-05-29T15:23:09.908282Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:1460: [PQ: 72057594037928037] Handle TEvPersQueue::TEvUpdateConfig 2025-05-29T15:23:09.909818Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:1646: [PQ: 72057594037928037] Config update version 1(current 0) received from actor [2:100:2134] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "rt3.dc1--topic1" Version: 1 Partitions { PartitionId: 0 } AllPartitions { PartitionId: 0 } 2025-05-29T15:23:09.910262Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:590: [PQ: 72057594037928037] Apply new config CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "rt3.dc1--topic1" Version: 1 Partitions { PartitionId: 0 } AllPartitions { PartitionId: 0 } 2025-05-29T15:23:09.910281Z node 2 :PERSQUEUE NOTICE: pq_impl.cpp:1099: [PQ: 72057594037928037] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-05-29T15:23:09.910368Z node 2 :PERSQUEUE INFO: pq_impl.cpp:1487: [PQ: 72057594037928037] Config applied version 1 actor [2:100:2134] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "rt3.dc1--topic1" Version: 1 Partitions { PartitionId: 0 } AllPartitions { PartitionId: 0 } 2025-05-29T15:23:09.910383Z node 2 :PERSQUEUE DEBUG: partition_init.cpp:75: [rt3.dc1--topic1:0:Initializer] Start initializing step TInitConfigStep 2025-05-29T15:23:09.910420Z node 2 :PERSQUEUE DEBUG: partition_init.cpp:75: [rt3.dc1--topic1:0:Initializer] Start initializing step TInitInternalFieldsStep 2025-05-29T15:23:09.910456Z node 2 :PERSQUEUE INFO: partition_init.cpp:880: [PQ: 72057594037928037, Partition: 0, State: StateInit] bootstrapping 0 [2:271:2260] 2025-05-29T15:23:09.910839Z node 2 :PERSQUEUE DEBUG: partition_init.cpp:55: [rt3.dc1--topic1:0:Initializer] Initializing completed. 2025-05-29T15:23:09.910846Z node 2 :PERSQUEUE INFO: partition.cpp:560: [PQ: 72057594037928037, Partition: 0, State: StateInit] init complete for topic 'rt3.dc1--topic1' partition 0 generation 2 [2:271:2260] 2025-05-29T15:23:09.910851Z node 2 :PERSQUEUE DEBUG: partition.cpp:574: [PQ: 72057594037928037, Partition: 0, State: StateInit] SYNC INIT topic rt3.dc1--topic1 partitition 0 so 0 endOffset 0 Head Offset 0 PartNo 0 PackedSize 0 count 0 nextOffset 0 batches 0 2025-05-29T15:23:09.910857Z node 2 :PERSQUEUE DEBUG: partition.cpp:3850: [PQ: 72057594037928037, Partition: 0, State: StateIdle] Process pending events. Count 0 2025-05-29T15:23:09.910941Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:2882: [PQ: 72057594037928037] server connected, pipe [2:274:2262], now have 1 active actors on pipe 2025-05-29T15:23:09.919388Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:3098: [PQ: 72057594037928137] Handle TEvInterconnect::TEvNodeInfo 2025-05-29T15:23:09.920202Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:3130: [PQ: 72057594037928137] Transactions request. From tx_00000000000000000000, To tx_18446744073709551615 2025-05-29T15:23:09.920260Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:751: [PQ: 72057594037928137] doesn't have tx info 2025-05-29T15:23:09.920267Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:763: [PQ: 72057594037928137] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2025-05-29T15:23:09.920272Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:975: [PQ: 72057594037928137] no config, start with empty partitions and default config 2025-05-29T15:23:09.920277Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:4889: [PQ: 72057594037928137] Txs.size=0, PlannedTxs.size=0 2025-05-29T15:23:09.920283Z node 2 :PERSQUEUE NOTICE: pq_impl.cpp:1099: [PQ: 72057594037928137] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-05-29T15:23:09.920290Z node 2 :PERSQUEUE INFO: pq_impl.cpp:800: [PQ: 72057594037928137] doesn't have tx writes info 2025-05-29T15:23:09.920382Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:2882: [PQ: 72057594037928137] server connected, pipe [2:409:2363], now have 1 active actors on pipe 2025-05-29T15:23:09.920398Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:1460: [PQ: 72057594037928137] Handle TEvPersQueue::TEvUpdateConfig 2025-05-29T15:23:09.920432Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:1646: [PQ: 72057594037928137] Config update version 2(current 0) received from actor [2:100:2134] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "rt3.dc1--topic2" Version: 2 Partitions { PartitionId: 0 } AllPartitions { PartitionId: 0 } 2025-05-29T15:23:09.920844Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:590: [PQ: 72057594037928137] Apply new config CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "rt3.dc1--topic2" Version: 2 Partitions { PartitionId: 0 } AllPartitions { PartitionId: 0 } 2025-05-29T15:23:09.920866Z node 2 :PERSQUEUE NOTICE: pq_impl.cpp:1099: [PQ: 72057594037928137] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-05-29T15:23:09.920959Z node 2 :PERSQUEUE INFO: pq_impl.cpp:1487: [PQ: 72057594037928137] Config applied version 2 actor [2:100:2134] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "rt3.dc1--topic2" Version: 2 Partitions { PartitionId: 0 } AllPartitions { PartitionId: 0 } 2025-05-29T15:23:09.920978Z node 2 :PERSQUEUE DEBUG: partition_init.cpp:75: [rt3.dc1--topic2:0:Initializer] Start initializing step TInitConfigStep 2025-05-29T15:23:09.921031Z node 2 :PERSQUEUE DEBUG: partition_init.cpp:75: [rt3.dc1--topic2:0:Initializer] Start initializing step TInitInternalFieldsStep 2025-05-29T15:23:09.921053Z node 2 :PERSQUEUE INFO: partition_init.cpp:880: [PQ: 72057594037928137, Partition: 0, State: StateInit] bootstrapping 0 [2:417:2369] 2025-05-29T15:23:09.921529Z node 2 :PERSQUEUE DEBUG: partition_init.cpp:55: [rt3.dc1--topic2:0:Initializer] Initializing completed. 2025-05-29T15:23:09.921553Z node 2 :PERSQUEUE INFO: partition.cpp:560: [PQ: 72057594037928137, Partition: 0, State: StateInit] init complete for topic 'rt3.dc1--topic2' partition 0 generation 2 [2:417:2369] 2025-05-29T15:23:09.921559Z node 2 :PERSQUEUE DEBUG: partition.cpp:574: [PQ: 72057594037928137, Partition: 0, State: StateInit] SYNC INIT topic rt3.dc1--topic2 partitition 0 so 0 endOffset 0 Head Offset 0 PartNo 0 PackedSize 0 count 0 nextOffset 0 batches 0 2025-05-29T15:23:09.921565Z node 2 :PERSQUEUE DEBUG: partition.cpp:3850: [PQ: 72057594037928137, Partition: 0, State: StateIdle] Process pending events. Count 0 2025-05-29T15:23:09.921679Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:2882: [PQ: 72057594037928137] server connected, pipe [2:420:2371], now have 1 active actors on pipe 2025-05-29T15:23:09.924767Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:3098: [PQ: 72057594037928138] Handle TEvInterconnect::TEvNodeInfo 2025-05-29T15:23:09.925620Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:3130: [PQ: 72057594037928138] Transactions request. From tx_00000000000000000000, To tx_18446744073709551615 2025-05-29T15:23:09.925676Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:751: [PQ: 72057594037928138] doesn't have tx info 2025-05-29T15:23:09.925682Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:763: [PQ: 72057594037928138] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2025-05-29T15:23:09.925687Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:975: [PQ: 72057594037928138] no config, start with empty partitions and default config 2025-05-29T15:23:09.925692Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:4889: [PQ: 72057594037928138] Txs.size=0, PlannedTxs.size=0 2025-05-29T15:23:09.925697Z node 2 :PERSQUEUE NOTICE: pq_impl.cpp:1099: [PQ: 72057594037928138] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-05-29T15:23:09.925704Z node 2 :PERSQUEUE INFO: pq_impl.cpp:800: [PQ: 72057594037928138] doesn't have tx writes info 2025-05-29T15:23:09.925797Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:2882: [PQ: 72057594037928138] server connected, pipe [2:469:2408], now have 1 active actors on pipe 2025-05-29T15:23:09.925814Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:1460: [PQ: 72057594037928138] Handle TEvPersQueue::TEvUpdateConfig 2025-05-29T15:23:09.925843Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:1646: [PQ: 72057594037928138] Config update version 3(current 0) received from actor [2:100:2134] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 1 TopicName: "rt3.dc1--topic2" Version: 3 Partitions { PartitionId: 1 } AllPartitions { PartitionId: 1 } 2025-05-29T15:23:09.926265Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:590: [PQ: 72057594037928138] Apply new config CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 1 TopicName: "rt3.dc1--topic2" Version: 3 Partitions { PartitionId: 1 } AllPartitions { PartitionId: 1 } 2025-05-29T15:23:09.926286Z node 2 :PERSQUEUE NOTICE: pq_impl.cpp:1099: [PQ: 72057594037928138] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-05-29T15:23:09.926382Z node 2 :PERSQUEUE INFO: pq_impl.cpp:1487: [PQ: 72057594037928138] Config applied version 3 actor [2:100:2134] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 1 TopicName: "rt3.dc1--topic2" Version: 3 Partitions { PartitionId ... ermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 2 TopicName: "rt3.dc1--topic2" Version: 8 Partitions { PartitionId: 2 } AllPartitions { PartitionId: 2 } 2025-05-29T15:23:10.159613Z node 3 :PERSQUEUE DEBUG: pq_impl.cpp:590: [PQ: 72057594037928139] Apply new config CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 2 TopicName: "rt3.dc1--topic2" Version: 8 Partitions { PartitionId: 2 } AllPartitions { PartitionId: 2 } 2025-05-29T15:23:10.159631Z node 3 :PERSQUEUE NOTICE: pq_impl.cpp:1099: [PQ: 72057594037928139] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-05-29T15:23:10.159706Z node 3 :PERSQUEUE INFO: pq_impl.cpp:1487: [PQ: 72057594037928139] Config applied version 8 actor [3:100:2134] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 2 TopicName: "rt3.dc1--topic2" Version: 8 Partitions { PartitionId: 2 } AllPartitions { PartitionId: 2 } 2025-05-29T15:23:10.159724Z node 3 :PERSQUEUE DEBUG: partition_init.cpp:75: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitConfigStep 2025-05-29T15:23:10.159778Z node 3 :PERSQUEUE DEBUG: partition_init.cpp:75: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitInternalFieldsStep 2025-05-29T15:23:10.159810Z node 3 :PERSQUEUE INFO: partition_init.cpp:880: [PQ: 72057594037928139, Partition: 2, State: StateInit] bootstrapping 2 [3:531:2453] 2025-05-29T15:23:10.160374Z node 3 :PERSQUEUE DEBUG: partition_init.cpp:55: [rt3.dc1--topic2:2:Initializer] Initializing completed. 2025-05-29T15:23:10.160389Z node 3 :PERSQUEUE INFO: partition.cpp:560: [PQ: 72057594037928139, Partition: 2, State: StateInit] init complete for topic 'rt3.dc1--topic2' partition 2 generation 2 [3:531:2453] 2025-05-29T15:23:10.160396Z node 3 :PERSQUEUE DEBUG: partition.cpp:574: [PQ: 72057594037928139, Partition: 2, State: StateInit] SYNC INIT topic rt3.dc1--topic2 partitition 2 so 0 endOffset 0 Head Offset 0 PartNo 0 PackedSize 0 count 0 nextOffset 0 batches 0 2025-05-29T15:23:10.160402Z node 3 :PERSQUEUE DEBUG: partition.cpp:3850: [PQ: 72057594037928139, Partition: 2, State: StateIdle] Process pending events. Count 0 2025-05-29T15:23:10.160510Z node 3 :PERSQUEUE DEBUG: pq_impl.cpp:2882: [PQ: 72057594037928139] server connected, pipe [3:534:2455], now have 1 active actors on pipe 2025-05-29T15:23:10.160713Z node 3 :PERSQUEUE DEBUG: pq_impl.cpp:2882: [PQ: 72057594037928037] server connected, pipe [3:540:2458], now have 1 active actors on pipe 2025-05-29T15:23:10.160730Z node 3 :PERSQUEUE DEBUG: pq_impl.cpp:2882: [PQ: 72057594037928138] server connected, pipe [3:541:2459], now have 1 active actors on pipe 2025-05-29T15:23:10.160766Z node 3 :PERSQUEUE DEBUG: partition.cpp:858: [PQ: 72057594037928037, Partition: 0, State: StateIdle] Topic PartitionStatus PartitionSize: 0 UsedReserveSize: 0 ReserveSize: 0 PartitionConfig{ MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } 2025-05-29T15:23:10.160798Z node 3 :PERSQUEUE DEBUG: partition.cpp:858: [PQ: 72057594037928138, Partition: 1, State: StateIdle] Topic PartitionStatus PartitionSize: 0 UsedReserveSize: 0 ReserveSize: 0 PartitionConfig{ MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } 2025-05-29T15:23:10.160804Z node 3 :PERSQUEUE DEBUG: pq_impl.cpp:2882: [PQ: 72057594037928139] server connected, pipe [3:542:2459], now have 1 active actors on pipe 2025-05-29T15:23:10.160823Z node 3 :PERSQUEUE DEBUG: partition.cpp:858: [PQ: 72057594037928139, Partition: 2, State: StateIdle] Topic PartitionStatus PartitionSize: 0 UsedReserveSize: 0 ReserveSize: 0 PartitionConfig{ MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } 2025-05-29T15:23:10.171166Z node 3 :PERSQUEUE DEBUG: pq_impl.cpp:2882: [PQ: 72057594037928139] server connected, pipe [3:550:2466], now have 1 active actors on pipe 2025-05-29T15:23:10.177166Z node 3 :PERSQUEUE DEBUG: pq_impl.cpp:3098: [PQ: 72057594037928139] Handle TEvInterconnect::TEvNodeInfo 2025-05-29T15:23:10.177701Z node 3 :PERSQUEUE DEBUG: pq_impl.cpp:3130: [PQ: 72057594037928139] Transactions request. From tx_00000000000000000000, To tx_18446744073709551615 2025-05-29T15:23:10.177765Z node 3 :PERSQUEUE DEBUG: pq_impl.cpp:751: [PQ: 72057594037928139] doesn't have tx info 2025-05-29T15:23:10.177777Z node 3 :PERSQUEUE DEBUG: pq_impl.cpp:763: [PQ: 72057594037928139] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2025-05-29T15:23:10.177815Z node 3 :PERSQUEUE DEBUG: pq_impl.cpp:4889: [PQ: 72057594037928139] Txs.size=0, PlannedTxs.size=0 2025-05-29T15:23:10.177925Z node 3 :PERSQUEUE NOTICE: pq_impl.cpp:1099: [PQ: 72057594037928139] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-05-29T15:23:10.177932Z node 3 :PERSQUEUE INFO: pq_impl.cpp:800: [PQ: 72057594037928139] doesn't have tx writes info 2025-05-29T15:23:10.177952Z node 3 :PERSQUEUE DEBUG: partition_init.cpp:75: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitConfigStep 2025-05-29T15:23:10.178003Z node 3 :PERSQUEUE DEBUG: partition_init.cpp:75: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitInternalFieldsStep 2025-05-29T15:23:10.178032Z node 3 :PERSQUEUE INFO: partition_init.cpp:880: [PQ: 72057594037928139, Partition: 2, State: StateInit] bootstrapping 2 [3:607:2511] 2025-05-29T15:23:10.178550Z node 3 :PERSQUEUE DEBUG: partition_init.cpp:75: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitDiskStatusStep 2025-05-29T15:23:10.178806Z node 3 :PERSQUEUE DEBUG: partition_init.cpp:75: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitMetaStep 2025-05-29T15:23:10.178844Z node 3 :PERSQUEUE DEBUG: partition_init.cpp:75: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitInfoRangeStep 2025-05-29T15:23:10.178885Z node 3 :PERSQUEUE DEBUG: partition_init.cpp:75: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitDataRangeStep 2025-05-29T15:23:10.178915Z node 3 :PERSQUEUE DEBUG: partition_init.cpp:75: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitDataStep 2025-05-29T15:23:10.178920Z node 3 :PERSQUEUE DEBUG: partition_init.cpp:75: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitEndWriteTimestampStep 2025-05-29T15:23:10.178926Z node 3 :PERSQUEUE INFO: partition_init.cpp:774: [rt3.dc1--topic2:2:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp skipped because already initialized. 2025-05-29T15:23:10.178930Z node 3 :PERSQUEUE DEBUG: partition_init.cpp:55: [rt3.dc1--topic2:2:Initializer] Initializing completed. 2025-05-29T15:23:10.178938Z node 3 :PERSQUEUE INFO: partition.cpp:560: [PQ: 72057594037928139, Partition: 2, State: StateInit] init complete for topic 'rt3.dc1--topic2' partition 2 generation 3 [3:607:2511] 2025-05-29T15:23:10.178947Z node 3 :PERSQUEUE DEBUG: partition.cpp:574: [PQ: 72057594037928139, Partition: 2, State: StateInit] SYNC INIT topic rt3.dc1--topic2 partitition 2 so 0 endOffset 0 Head Offset 0 PartNo 0 PackedSize 0 count 0 nextOffset 0 batches 0 2025-05-29T15:23:10.178954Z node 3 :PERSQUEUE DEBUG: partition.cpp:3850: [PQ: 72057594037928139, Partition: 2, State: StateIdle] Process pending events. Count 0 2025-05-29T15:23:10.179159Z node 3 :PERSQUEUE DEBUG: pq_impl.cpp:2907: [PQ: 72057594037928138] server disconnected, pipe [3:541:2459] destroyed 2025-05-29T15:23:10.179175Z node 3 :PERSQUEUE DEBUG: pq_impl.cpp:2907: [PQ: 72057594037928037] server disconnected, pipe [3:540:2458] destroyed RESPONSE Status: 1 ErrorCode: OK MetaResponse { CmdGetPartitionStatusResult { TopicResult { Topic: "rt3.dc1--topic1" PartitionResult { Partition: 0 Status: STATUS_OK LastInitDurationSeconds: 0 CreationTimestamp: 0 GapCount: 0 GapSize: 0 AvgWriteSpeedPerSec: 0 AvgWriteSpeedPerMin: 0 AvgWriteSpeedPerHour: 0 AvgWriteSpeedPerDay: 0 AvgReadSpeedPerSec: 0 AvgReadSpeedPerMin: 0 AvgReadSpeedPerHour: 0 AvgReadSpeedPerDay: 0 ReadBytesQuota: 0 WriteBytesQuota: 50000000 PartitionSize: 0 StartOffset: 0 EndOffset: 0 LastWriteTimestampMs: 38 WriteLagMs: 0 AvgQuotaSpeedPerSec: 0 AvgQuotaSpeedPerMin: 0 AvgQuotaSpeedPerHour: 0 AvgQuotaSpeedPerDay: 0 SourceIdCount: 0 SourceIdRetentionPeriodSec: 0 UsedReserveSize: 0 AggregatedCounters { Values: 38 Values: 0 Values: 1 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 50000000 Values: 0 Values: 9223372036854775807 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 1 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 } Generation: 2 Cookie: 1 ScaleStatus: NORMAL } ErrorCode: OK } TopicResult { Topic: "rt3.dc1--topic2" PartitionResult { Partition: 1 Status: STATUS_OK LastInitDurationSeconds: 0 CreationTimestamp: 0 GapCount: 0 GapSize: 0 AvgWriteSpeedPerSec: 0 AvgWriteSpeedPerMin: 0 AvgWriteSpeedPerHour: 0 AvgWriteSpeedPerDay: 0 AvgReadSpeedPerSec: 0 AvgReadSpeedPerMin: 0 AvgReadSpeedPerHour: 0 AvgReadSpeedPerDay: 0 ReadBytesQuota: 0 WriteBytesQuota: 50000000 PartitionSize: 0 StartOffset: 0 EndOffset: 0 LastWriteTimestampMs: 77 WriteLagMs: 0 AvgQuotaSpeedPerSec: 0 AvgQuotaSpeedPerMin: 0 AvgQuotaSpeedPerHour: 0 AvgQuotaSpeedPerDay: 0 SourceIdCount: 0 SourceIdRetentionPeriodSec: 0 UsedReserveSize: 0 AggregatedCounters { Values: 77 Values: 0 Values: 1 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 50000000 Values: 0 Values: 9223372036854775807 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 1 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 } Generation: 2 Cookie: 1 ScaleStatus: NORMAL } PartitionResult { Partition: 2 Status: STATUS_UNKNOWN } ErrorCode: OK } } } ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/client/server/ut/unittest >> TMessageBusServerPersQueueGetPartitionOffsetsMetaRequestTest::HandlesPipeDisconnection_AnswerDoesNotArrive [GOOD] Test command err: Assert failed: Check response: { Status: 128 ErrorReason: "topic \'rt3.dc1--topic1\' is not created, Marker# PQ94" ErrorCode: UNKNOWN_TOPIC } 2025-05-29T15:23:09.672212Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:3098: [PQ: 72057594037928037] Handle TEvInterconnect::TEvNodeInfo 2025-05-29T15:23:09.672756Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:3130: [PQ: 72057594037928037] Transactions request. From tx_00000000000000000000, To tx_18446744073709551615 2025-05-29T15:23:09.672797Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:751: [PQ: 72057594037928037] doesn't have tx info 2025-05-29T15:23:09.672802Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:763: [PQ: 72057594037928037] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2025-05-29T15:23:09.672804Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:975: [PQ: 72057594037928037] no config, start with empty partitions and default config 2025-05-29T15:23:09.672808Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:4889: [PQ: 72057594037928037] Txs.size=0, PlannedTxs.size=0 2025-05-29T15:23:09.672812Z node 2 :PERSQUEUE NOTICE: pq_impl.cpp:1099: [PQ: 72057594037928037] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-05-29T15:23:09.672818Z node 2 :PERSQUEUE INFO: pq_impl.cpp:800: [PQ: 72057594037928037] doesn't have tx writes info 2025-05-29T15:23:09.672888Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:2882: [PQ: 72057594037928037] server connected, pipe [2:263:2254], now have 1 active actors on pipe 2025-05-29T15:23:09.672897Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:1460: [PQ: 72057594037928037] Handle TEvPersQueue::TEvUpdateConfig 2025-05-29T15:23:09.674413Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:1646: [PQ: 72057594037928037] Config update version 1(current 0) received from actor [2:100:2134] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "rt3.dc1--topic1" Version: 1 Partitions { PartitionId: 0 } AllPartitions { PartitionId: 0 } 2025-05-29T15:23:09.674784Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:590: [PQ: 72057594037928037] Apply new config CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "rt3.dc1--topic1" Version: 1 Partitions { PartitionId: 0 } AllPartitions { PartitionId: 0 } 2025-05-29T15:23:09.674808Z node 2 :PERSQUEUE NOTICE: pq_impl.cpp:1099: [PQ: 72057594037928037] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-05-29T15:23:09.674912Z node 2 :PERSQUEUE INFO: pq_impl.cpp:1487: [PQ: 72057594037928037] Config applied version 1 actor [2:100:2134] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "rt3.dc1--topic1" Version: 1 Partitions { PartitionId: 0 } AllPartitions { PartitionId: 0 } 2025-05-29T15:23:09.674933Z node 2 :PERSQUEUE DEBUG: partition_init.cpp:75: [rt3.dc1--topic1:0:Initializer] Start initializing step TInitConfigStep 2025-05-29T15:23:09.674985Z node 2 :PERSQUEUE DEBUG: partition_init.cpp:75: [rt3.dc1--topic1:0:Initializer] Start initializing step TInitInternalFieldsStep 2025-05-29T15:23:09.675052Z node 2 :PERSQUEUE INFO: partition_init.cpp:880: [PQ: 72057594037928037, Partition: 0, State: StateInit] bootstrapping 0 [2:271:2260] 2025-05-29T15:23:09.675591Z node 2 :PERSQUEUE DEBUG: partition_init.cpp:55: [rt3.dc1--topic1:0:Initializer] Initializing completed. 2025-05-29T15:23:09.675601Z node 2 :PERSQUEUE INFO: partition.cpp:560: [PQ: 72057594037928037, Partition: 0, State: StateInit] init complete for topic 'rt3.dc1--topic1' partition 0 generation 2 [2:271:2260] 2025-05-29T15:23:09.675608Z node 2 :PERSQUEUE DEBUG: partition.cpp:574: [PQ: 72057594037928037, Partition: 0, State: StateInit] SYNC INIT topic rt3.dc1--topic1 partitition 0 so 0 endOffset 0 Head Offset 0 PartNo 0 PackedSize 0 count 0 nextOffset 0 batches 0 2025-05-29T15:23:09.675615Z node 2 :PERSQUEUE DEBUG: partition.cpp:3850: [PQ: 72057594037928037, Partition: 0, State: StateIdle] Process pending events. Count 0 2025-05-29T15:23:09.675730Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:2882: [PQ: 72057594037928037] server connected, pipe [2:274:2262], now have 1 active actors on pipe 2025-05-29T15:23:09.684161Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:3098: [PQ: 72057594037928137] Handle TEvInterconnect::TEvNodeInfo 2025-05-29T15:23:09.685446Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:3130: [PQ: 72057594037928137] Transactions request. From tx_00000000000000000000, To tx_18446744073709551615 2025-05-29T15:23:09.685509Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:751: [PQ: 72057594037928137] doesn't have tx info 2025-05-29T15:23:09.685516Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:763: [PQ: 72057594037928137] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2025-05-29T15:23:09.685520Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:975: [PQ: 72057594037928137] no config, start with empty partitions and default config 2025-05-29T15:23:09.685525Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:4889: [PQ: 72057594037928137] Txs.size=0, PlannedTxs.size=0 2025-05-29T15:23:09.685541Z node 2 :PERSQUEUE NOTICE: pq_impl.cpp:1099: [PQ: 72057594037928137] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-05-29T15:23:09.685549Z node 2 :PERSQUEUE INFO: pq_impl.cpp:800: [PQ: 72057594037928137] doesn't have tx writes info 2025-05-29T15:23:09.685673Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:2882: [PQ: 72057594037928137] server connected, pipe [2:409:2363], now have 1 active actors on pipe 2025-05-29T15:23:09.685690Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:1460: [PQ: 72057594037928137] Handle TEvPersQueue::TEvUpdateConfig 2025-05-29T15:23:09.685726Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:1646: [PQ: 72057594037928137] Config update version 2(current 0) received from actor [2:100:2134] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "rt3.dc1--topic2" Version: 2 Partitions { PartitionId: 0 } AllPartitions { PartitionId: 0 } 2025-05-29T15:23:09.686199Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:590: [PQ: 72057594037928137] Apply new config CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "rt3.dc1--topic2" Version: 2 Partitions { PartitionId: 0 } AllPartitions { PartitionId: 0 } 2025-05-29T15:23:09.686218Z node 2 :PERSQUEUE NOTICE: pq_impl.cpp:1099: [PQ: 72057594037928137] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-05-29T15:23:09.686320Z node 2 :PERSQUEUE INFO: pq_impl.cpp:1487: [PQ: 72057594037928137] Config applied version 2 actor [2:100:2134] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "rt3.dc1--topic2" Version: 2 Partitions { PartitionId: 0 } AllPartitions { PartitionId: 0 } 2025-05-29T15:23:09.686339Z node 2 :PERSQUEUE DEBUG: partition_init.cpp:75: [rt3.dc1--topic2:0:Initializer] Start initializing step TInitConfigStep 2025-05-29T15:23:09.686389Z node 2 :PERSQUEUE DEBUG: partition_init.cpp:75: [rt3.dc1--topic2:0:Initializer] Start initializing step TInitInternalFieldsStep 2025-05-29T15:23:09.686415Z node 2 :PERSQUEUE INFO: partition_init.cpp:880: [PQ: 72057594037928137, Partition: 0, State: StateInit] bootstrapping 0 [2:417:2369] 2025-05-29T15:23:09.687014Z node 2 :PERSQUEUE DEBUG: partition_init.cpp:55: [rt3.dc1--topic2:0:Initializer] Initializing completed. 2025-05-29T15:23:09.687025Z node 2 :PERSQUEUE INFO: partition.cpp:560: [PQ: 72057594037928137, Partition: 0, State: StateInit] init complete for topic 'rt3.dc1--topic2' partition 0 generation 2 [2:417:2369] 2025-05-29T15:23:09.687032Z node 2 :PERSQUEUE DEBUG: partition.cpp:574: [PQ: 72057594037928137, Partition: 0, State: StateInit] SYNC INIT topic rt3.dc1--topic2 partitition 0 so 0 endOffset 0 Head Offset 0 PartNo 0 PackedSize 0 count 0 nextOffset 0 batches 0 2025-05-29T15:23:09.687037Z node 2 :PERSQUEUE DEBUG: partition.cpp:3850: [PQ: 72057594037928137, Partition: 0, State: StateIdle] Process pending events. Count 0 2025-05-29T15:23:09.687165Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:2882: [PQ: 72057594037928137] server connected, pipe [2:420:2371], now have 1 active actors on pipe 2025-05-29T15:23:09.689989Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:3098: [PQ: 72057594037928138] Handle TEvInterconnect::TEvNodeInfo 2025-05-29T15:23:09.690755Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:3130: [PQ: 72057594037928138] Transactions request. From tx_00000000000000000000, To tx_18446744073709551615 2025-05-29T15:23:09.690808Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:751: [PQ: 72057594037928138] doesn't have tx info 2025-05-29T15:23:09.690814Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:763: [PQ: 72057594037928138] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2025-05-29T15:23:09.690819Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:975: [PQ: 72057594037928138] no config, start with empty partitions and default config 2025-05-29T15:23:09.690824Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:4889: [PQ: 72057594037928138] Txs.size=0, PlannedTxs.size=0 2025-05-29T15:23:09.690830Z node 2 :PERSQUEUE NOTICE: pq_impl.cpp:1099: [PQ: 72057594037928138] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-05-29T15:23:09.690837Z node 2 :PERSQUEUE INFO: pq_impl.cpp:800: [PQ: 72057594037928138] doesn't have tx writes info 2025-05-29T15:23:09.690933Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:2882: [PQ: 72057594037928138] server connected, pipe [2:469:2408], now have 1 active actors on pipe 2025-05-29T15:23:09.690949Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:1460: [PQ: 72057594037928138] Handle TEvPersQueue::TEvUpdateConfig 2025-05-29T15:23:09.690982Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:1646: [PQ: 72057594037928138] Config update version 3(current 0) received from actor [2:100:2134] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 1 TopicName: "rt3.dc1--topic2" Version: 3 Partitions { PartitionId: 1 } AllPartitions { PartitionId: 1 } 2025-05-29T15:23:09.691388Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:590: [PQ: 72057594037928138] Apply new config CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 1 TopicName: "rt3.dc1--topic2" Version: 3 Partitions { PartitionId: 1 } AllPartitions { PartitionId: 1 } 2025-05-29T15:23:09.691407Z node 2 :PERSQUEUE NOTICE: pq_impl.cpp:1099: [PQ: 72057594037928138] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-05-29T15:23:09.691503Z node 2 :PERSQUEUE INFO: pq_impl.cpp:1487: [PQ: 72057594037928138] Config applied version 3 actor [2:100:2134] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 1 TopicName: "rt3.dc1--topic2" Version: 3 Partitions { PartitionId ... riteInflightSize: 90000000 } PartitionIds: 1 TopicName: "rt3.dc1--topic2" Version: 11 Partitions { PartitionId: 1 } AllPartitions { PartitionId: 1 } 2025-05-29T15:23:10.165183Z node 4 :PERSQUEUE DEBUG: pq_impl.cpp:590: [PQ: 72057594037928138] Apply new config CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 1 TopicName: "rt3.dc1--topic2" Version: 11 Partitions { PartitionId: 1 } AllPartitions { PartitionId: 1 } 2025-05-29T15:23:10.165195Z node 4 :PERSQUEUE NOTICE: pq_impl.cpp:1099: [PQ: 72057594037928138] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-05-29T15:23:10.165251Z node 4 :PERSQUEUE INFO: pq_impl.cpp:1487: [PQ: 72057594037928138] Config applied version 11 actor [4:100:2134] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 1 TopicName: "rt3.dc1--topic2" Version: 11 Partitions { PartitionId: 1 } AllPartitions { PartitionId: 1 } 2025-05-29T15:23:10.165273Z node 4 :PERSQUEUE DEBUG: partition_init.cpp:75: [rt3.dc1--topic2:1:Initializer] Start initializing step TInitConfigStep 2025-05-29T15:23:10.165307Z node 4 :PERSQUEUE DEBUG: partition_init.cpp:75: [rt3.dc1--topic2:1:Initializer] Start initializing step TInitInternalFieldsStep 2025-05-29T15:23:10.165321Z node 4 :PERSQUEUE INFO: partition_init.cpp:880: [PQ: 72057594037928138, Partition: 1, State: StateInit] bootstrapping 1 [4:477:2414] 2025-05-29T15:23:10.165593Z node 4 :PERSQUEUE DEBUG: partition_init.cpp:55: [rt3.dc1--topic2:1:Initializer] Initializing completed. 2025-05-29T15:23:10.165598Z node 4 :PERSQUEUE INFO: partition.cpp:560: [PQ: 72057594037928138, Partition: 1, State: StateInit] init complete for topic 'rt3.dc1--topic2' partition 1 generation 2 [4:477:2414] 2025-05-29T15:23:10.165601Z node 4 :PERSQUEUE DEBUG: partition.cpp:574: [PQ: 72057594037928138, Partition: 1, State: StateInit] SYNC INIT topic rt3.dc1--topic2 partitition 1 so 0 endOffset 0 Head Offset 0 PartNo 0 PackedSize 0 count 0 nextOffset 0 batches 0 2025-05-29T15:23:10.165604Z node 4 :PERSQUEUE DEBUG: partition.cpp:3850: [PQ: 72057594037928138, Partition: 1, State: StateIdle] Process pending events. Count 0 2025-05-29T15:23:10.165674Z node 4 :PERSQUEUE DEBUG: pq_impl.cpp:2882: [PQ: 72057594037928138] server connected, pipe [4:480:2416], now have 1 active actors on pipe 2025-05-29T15:23:10.167859Z node 4 :PERSQUEUE DEBUG: pq_impl.cpp:3098: [PQ: 72057594037928139] Handle TEvInterconnect::TEvNodeInfo 2025-05-29T15:23:10.168416Z node 4 :PERSQUEUE DEBUG: pq_impl.cpp:3130: [PQ: 72057594037928139] Transactions request. From tx_00000000000000000000, To tx_18446744073709551615 2025-05-29T15:23:10.168453Z node 4 :PERSQUEUE DEBUG: pq_impl.cpp:751: [PQ: 72057594037928139] doesn't have tx info 2025-05-29T15:23:10.168457Z node 4 :PERSQUEUE DEBUG: pq_impl.cpp:763: [PQ: 72057594037928139] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2025-05-29T15:23:10.168460Z node 4 :PERSQUEUE DEBUG: pq_impl.cpp:975: [PQ: 72057594037928139] no config, start with empty partitions and default config 2025-05-29T15:23:10.168463Z node 4 :PERSQUEUE DEBUG: pq_impl.cpp:4889: [PQ: 72057594037928139] Txs.size=0, PlannedTxs.size=0 2025-05-29T15:23:10.168467Z node 4 :PERSQUEUE NOTICE: pq_impl.cpp:1099: [PQ: 72057594037928139] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-05-29T15:23:10.168472Z node 4 :PERSQUEUE INFO: pq_impl.cpp:800: [PQ: 72057594037928139] doesn't have tx writes info 2025-05-29T15:23:10.168537Z node 4 :PERSQUEUE DEBUG: pq_impl.cpp:2882: [PQ: 72057594037928139] server connected, pipe [4:529:2453], now have 1 active actors on pipe 2025-05-29T15:23:10.168547Z node 4 :PERSQUEUE DEBUG: pq_impl.cpp:1460: [PQ: 72057594037928139] Handle TEvPersQueue::TEvUpdateConfig 2025-05-29T15:23:10.168569Z node 4 :PERSQUEUE DEBUG: pq_impl.cpp:1646: [PQ: 72057594037928139] Config update version 12(current 0) received from actor [4:100:2134] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 2 TopicName: "rt3.dc1--topic2" Version: 12 Partitions { PartitionId: 2 } AllPartitions { PartitionId: 2 } 2025-05-29T15:23:10.168889Z node 4 :PERSQUEUE DEBUG: pq_impl.cpp:590: [PQ: 72057594037928139] Apply new config CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 2 TopicName: "rt3.dc1--topic2" Version: 12 Partitions { PartitionId: 2 } AllPartitions { PartitionId: 2 } 2025-05-29T15:23:10.168905Z node 4 :PERSQUEUE NOTICE: pq_impl.cpp:1099: [PQ: 72057594037928139] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-05-29T15:23:10.168967Z node 4 :PERSQUEUE INFO: pq_impl.cpp:1487: [PQ: 72057594037928139] Config applied version 12 actor [4:100:2134] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 2 TopicName: "rt3.dc1--topic2" Version: 12 Partitions { PartitionId: 2 } AllPartitions { PartitionId: 2 } 2025-05-29T15:23:10.168978Z node 4 :PERSQUEUE DEBUG: partition_init.cpp:75: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitConfigStep 2025-05-29T15:23:10.169011Z node 4 :PERSQUEUE DEBUG: partition_init.cpp:75: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitInternalFieldsStep 2025-05-29T15:23:10.169027Z node 4 :PERSQUEUE INFO: partition_init.cpp:880: [PQ: 72057594037928139, Partition: 2, State: StateInit] bootstrapping 2 [4:537:2459] 2025-05-29T15:23:10.169288Z node 4 :PERSQUEUE DEBUG: partition_init.cpp:55: [rt3.dc1--topic2:2:Initializer] Initializing completed. 2025-05-29T15:23:10.169292Z node 4 :PERSQUEUE INFO: partition.cpp:560: [PQ: 72057594037928139, Partition: 2, State: StateInit] init complete for topic 'rt3.dc1--topic2' partition 2 generation 2 [4:537:2459] 2025-05-29T15:23:10.169296Z node 4 :PERSQUEUE DEBUG: partition.cpp:574: [PQ: 72057594037928139, Partition: 2, State: StateInit] SYNC INIT topic rt3.dc1--topic2 partitition 2 so 0 endOffset 0 Head Offset 0 PartNo 0 PackedSize 0 count 0 nextOffset 0 batches 0 2025-05-29T15:23:10.169298Z node 4 :PERSQUEUE DEBUG: partition.cpp:3850: [PQ: 72057594037928139, Partition: 2, State: StateIdle] Process pending events. Count 0 2025-05-29T15:23:10.169366Z node 4 :PERSQUEUE DEBUG: pq_impl.cpp:2882: [PQ: 72057594037928139] server connected, pipe [4:540:2461], now have 1 active actors on pipe 2025-05-29T15:23:10.169483Z node 4 :PERSQUEUE DEBUG: pq_impl.cpp:2882: [PQ: 72057594037928037] server connected, pipe [4:546:2464], now have 1 active actors on pipe 2025-05-29T15:23:10.169498Z node 4 :PERSQUEUE DEBUG: pq_impl.cpp:2882: [PQ: 72057594037928138] server connected, pipe [4:547:2465], now have 1 active actors on pipe 2025-05-29T15:23:10.169521Z node 4 :PERSQUEUE DEBUG: pq_impl.cpp:2882: [PQ: 72057594037928139] server connected, pipe [4:548:2465], now have 1 active actors on pipe 2025-05-29T15:23:10.179820Z node 4 :PERSQUEUE DEBUG: pq_impl.cpp:2882: [PQ: 72057594037928139] server connected, pipe [4:556:2472], now have 1 active actors on pipe 2025-05-29T15:23:10.184067Z node 4 :PERSQUEUE DEBUG: pq_impl.cpp:3098: [PQ: 72057594037928139] Handle TEvInterconnect::TEvNodeInfo 2025-05-29T15:23:10.184521Z node 4 :PERSQUEUE DEBUG: pq_impl.cpp:3130: [PQ: 72057594037928139] Transactions request. From tx_00000000000000000000, To tx_18446744073709551615 2025-05-29T15:23:10.184565Z node 4 :PERSQUEUE DEBUG: pq_impl.cpp:751: [PQ: 72057594037928139] doesn't have tx info 2025-05-29T15:23:10.184569Z node 4 :PERSQUEUE DEBUG: pq_impl.cpp:763: [PQ: 72057594037928139] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2025-05-29T15:23:10.184590Z node 4 :PERSQUEUE DEBUG: pq_impl.cpp:4889: [PQ: 72057594037928139] Txs.size=0, PlannedTxs.size=0 2025-05-29T15:23:10.184653Z node 4 :PERSQUEUE NOTICE: pq_impl.cpp:1099: [PQ: 72057594037928139] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-05-29T15:23:10.184657Z node 4 :PERSQUEUE INFO: pq_impl.cpp:800: [PQ: 72057594037928139] doesn't have tx writes info 2025-05-29T15:23:10.184669Z node 4 :PERSQUEUE DEBUG: partition_init.cpp:75: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitConfigStep 2025-05-29T15:23:10.184702Z node 4 :PERSQUEUE DEBUG: partition_init.cpp:75: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitInternalFieldsStep 2025-05-29T15:23:10.184722Z node 4 :PERSQUEUE INFO: partition_init.cpp:880: [PQ: 72057594037928139, Partition: 2, State: StateInit] bootstrapping 2 [4:613:2517] 2025-05-29T15:23:10.185002Z node 4 :PERSQUEUE DEBUG: partition_init.cpp:75: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitDiskStatusStep 2025-05-29T15:23:10.185137Z node 4 :PERSQUEUE DEBUG: partition_init.cpp:75: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitMetaStep 2025-05-29T15:23:10.185158Z node 4 :PERSQUEUE DEBUG: partition_init.cpp:75: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitInfoRangeStep 2025-05-29T15:23:10.185183Z node 4 :PERSQUEUE DEBUG: partition_init.cpp:75: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitDataRangeStep 2025-05-29T15:23:10.185202Z node 4 :PERSQUEUE DEBUG: partition_init.cpp:75: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitDataStep 2025-05-29T15:23:10.185204Z node 4 :PERSQUEUE DEBUG: partition_init.cpp:75: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitEndWriteTimestampStep 2025-05-29T15:23:10.185207Z node 4 :PERSQUEUE INFO: partition_init.cpp:774: [rt3.dc1--topic2:2:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp skipped because already initialized. 2025-05-29T15:23:10.185210Z node 4 :PERSQUEUE DEBUG: partition_init.cpp:55: [rt3.dc1--topic2:2:Initializer] Initializing completed. 2025-05-29T15:23:10.185214Z node 4 :PERSQUEUE INFO: partition.cpp:560: [PQ: 72057594037928139, Partition: 2, State: StateInit] init complete for topic 'rt3.dc1--topic2' partition 2 generation 3 [4:613:2517] 2025-05-29T15:23:10.185218Z node 4 :PERSQUEUE DEBUG: partition.cpp:574: [PQ: 72057594037928139, Partition: 2, State: StateInit] SYNC INIT topic rt3.dc1--topic2 partitition 2 so 0 endOffset 0 Head Offset 0 PartNo 0 PackedSize 0 count 0 nextOffset 0 batches 0 2025-05-29T15:23:10.185222Z node 4 :PERSQUEUE DEBUG: partition.cpp:3850: [PQ: 72057594037928139, Partition: 2, State: StateIdle] Process pending events. Count 0 2025-05-29T15:23:10.185337Z node 4 :PERSQUEUE DEBUG: pq_impl.cpp:2907: [PQ: 72057594037928138] server disconnected, pipe [4:547:2465] destroyed 2025-05-29T15:23:10.185345Z node 4 :PERSQUEUE DEBUG: pq_impl.cpp:2907: [PQ: 72057594037928037] server disconnected, pipe [4:546:2464] destroyed RESPONSE Status: 1 ErrorCode: OK MetaResponse { CmdGetPartitionOffsetsResult { TopicResult { Topic: "rt3.dc1--topic1" PartitionResult { Partition: 0 StartOffset: 0 EndOffset: 0 ErrorCode: OK WriteTimestampEstimateMS: 0 } ErrorCode: OK } TopicResult { Topic: "rt3.dc1--topic2" PartitionResult { Partition: 1 StartOffset: 0 EndOffset: 0 ErrorCode: OK WriteTimestampEstimateMS: 0 } PartitionResult { Partition: 2 ErrorCode: INITIALIZING ErrorReason: "partition is not ready yet" } ErrorCode: OK } } } |63.0%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/kqp/ut/idx_test/ydb-core-kqp-ut-idx_test |63.0%| [LD] {RESULT} $(B)/ydb/core/kqp/ut/idx_test/ydb-core-kqp-ut-idx_test |63.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/ut/idx_test/ydb-core-kqp-ut-idx_test ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/client/server/ut/unittest >> TMessageBusServerPersQueueGetTopicMetadataMetaRequestTest::FailsOnEmptyTopicName [GOOD] Test command err: Assert failed: Check response: { Status: 128 ErrorReason: "path \'Root/PQ\' has unknown/invalid root prefix \'Root\', Marker# PQ14" ErrorCode: UNKNOWN_TOPIC } Assert failed: Check response: { Status: 128 ErrorReason: "topic \'Root/PQ\' describe error, Status# LookupError, Marker# PQ1" ErrorCode: ERROR } Assert failed: Check response: { Status: 128 ErrorReason: "empty topic in GetTopicMetadata request" ErrorCode: BAD_REQUEST } ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TStoragePoolsQuotasTest::DisableWritesToDatabase-IsExternalSubdomain-false [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:116:2058] recipient: [1:111:2141] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:116:2058] recipient: [1:111:2141] Leader for TabletID 72057594046678944 is [1:128:2152] sender: [1:132:2058] recipient: [1:111:2141] 2025-05-29T15:23:05.822018Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7488: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-05-29T15:23:05.822045Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7516: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:23:05.822051Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7402: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2025-05-29T15:23:05.822057Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7418: OperationsProcessing config: using default configuration 2025-05-29T15:23:05.822069Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-05-29T15:23:05.822074Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-05-29T15:23:05.822084Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7548: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:23:05.822103Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:39: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-05-29T15:23:05.822215Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7619: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-05-29T15:23:05.822280Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-05-29T15:23:05.836872Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7309: Cannot subscribe to console configs 2025-05-29T15:23:05.836897Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:23:05.839623Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-05-29T15:23:05.839746Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-05-29T15:23:05.839799Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-05-29T15:23:05.841309Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-05-29T15:23:05.841502Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:445: Clear TempDirsState with owners number: 0 2025-05-29T15:23:05.841631Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1348: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-05-29T15:23:05.841698Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:32: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-05-29T15:23:05.842153Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:157: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:23:05.842188Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:84: [RootDataErasureManager] Stop 2025-05-29T15:23:05.842447Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:23:05.842457Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:23:05.842481Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-05-29T15:23:05.842489Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-29T15:23:05.842496Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:202: TTxServerlessStorageBilling.Complete 2025-05-29T15:23:05.842529Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6671: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-05-29T15:23:05.843910Z node 1 :HIVE INFO: tablet_helpers.cpp:1130: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:128:2152] sender: [1:242:2058] recipient: [1:15:2062] 2025-05-29T15:23:05.865008Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:372: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-05-29T15:23:05.865083Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:23:05.865156Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-05-29T15:23:05.865210Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:130: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-05-29T15:23:05.865222Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:23:05.865826Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:451: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-05-29T15:23:05.865852Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-05-29T15:23:05.865888Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:23:05.865898Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:313: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-05-29T15:23:05.865904Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:367: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-05-29T15:23:05.865910Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 2 -> 3 2025-05-29T15:23:05.866263Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:23:05.866272Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-05-29T15:23:05.866277Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 3 -> 128 2025-05-29T15:23:05.866580Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:23:05.866588Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:23:05.866593Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:23:05.866600Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1650: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-05-29T15:23:05.867324Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1719: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-05-29T15:23:05.867773Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:652: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-05-29T15:23:05.867824Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1751: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-05-29T15:23:05.868015Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:676: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-05-29T15:23:05.868047Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:680: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 124 RawX2: 4294969445 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-05-29T15:23:05.868055Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:23:05.868121Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 128 -> 240 2025-05-29T15:23:05.868130Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:23:05.868187Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-05-29T15:23:05.868203Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:402: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-05-29T15:23:05.868702Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:23:05.868712Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-29T15:23:05.868745Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29T1 ... pp:476: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 2025-05-29T15:23:10.020492Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 3 2025-05-29T15:23:10.021028Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:647: TTxOperationReply complete, operationId: 103:0, at schemeshard: 72057594046678944 2025-05-29T15:23:10.021411Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:647: TTxOperationReply complete, operationId: 103:0, at schemeshard: 72057594046678944 2025-05-29T15:23:10.021464Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:23:10.021472Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 103, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2025-05-29T15:23:10.021525Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 103, path id: [OwnerId: 72057594046678944, LocalPathId: 3] 2025-05-29T15:23:10.021574Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:23:10.021580Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:209:2210], at schemeshard: 72057594046678944, txId: 103, path id: 2 2025-05-29T15:23:10.021587Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:209:2210], at schemeshard: 72057594046678944, txId: 103, path id: 3 2025-05-29T15:23:10.021703Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 103:0, at schemeshard: 72057594046678944 2025-05-29T15:23:10.021728Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:1036: NTableState::TProposedWaitParts operationId# 103:0 ProgressState at tablet: 72057594046678944 2025-05-29T15:23:10.021745Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:674: all shard schema changes has been received, operationId: 103:0, at schemeshard: 72057594046678944 2025-05-29T15:23:10.021754Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:686: send schema changes ack message, operation: 103:0, datashard: 72075186233409548, at schemeshard: 72057594046678944 2025-05-29T15:23:10.021760Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 103:0 129 -> 240 2025-05-29T15:23:10.021914Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:5834: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 8 PathOwnerId: 72057594046678944, cookie: 103 2025-05-29T15:23:10.021931Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 8 PathOwnerId: 72057594046678944, cookie: 103 2025-05-29T15:23:10.021937Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72057594046678944, txId: 103 2025-05-29T15:23:10.021943Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 103, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 8 2025-05-29T15:23:10.021950Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 5 2025-05-29T15:23:10.022119Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:5834: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 103 2025-05-29T15:23:10.022133Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 103 2025-05-29T15:23:10.022137Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72057594046678944, txId: 103 2025-05-29T15:23:10.022141Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 103, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 18446744073709551615 2025-05-29T15:23:10.022145Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 4 2025-05-29T15:23:10.022155Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1606: TOperation IsReadyToNotify, TxId: 103, ready parts: 0/1, is published: true 2025-05-29T15:23:10.022812Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 103:0, at schemeshard: 72057594046678944 2025-05-29T15:23:10.022825Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_drop_table.cpp:414: TDropTable TProposedDeletePart operationId: 103:0 ProgressState, at schemeshard: 72057594046678944 2025-05-29T15:23:10.022916Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove table for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 3 2025-05-29T15:23:10.022957Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:906: Part operation is done id#103:0 progress is 1/1 2025-05-29T15:23:10.022963Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2025-05-29T15:23:10.022969Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:906: Part operation is done id#103:0 progress is 1/1 2025-05-29T15:23:10.022975Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2025-05-29T15:23:10.022980Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1606: TOperation IsReadyToNotify, TxId: 103, ready parts: 1/1, is published: true 2025-05-29T15:23:10.022993Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1629: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:410:2375] message: TxId: 103 2025-05-29T15:23:10.022999Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2025-05-29T15:23:10.023004Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:973: Operation and all the parts is done, operation id: 103:0 2025-05-29T15:23:10.023008Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5174: RemoveTx for txid 103:0 2025-05-29T15:23:10.023026Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 2 2025-05-29T15:23:10.023182Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:23:10.023192Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 0, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2025-05-29T15:23:10.023330Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 103 2025-05-29T15:23:10.023639Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 103 2025-05-29T15:23:10.023933Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:23:10.023943Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:209:2210], at schemeshard: 72057594046678944, txId: 0, path id: 2 2025-05-29T15:23:10.023957Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:227: tests -- TTxNotificationSubscriber for txId 103: got EvNotifyTxCompletionResult 2025-05-29T15:23:10.023963Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:236: tests -- TTxNotificationSubscriber for txId 103: satisfy waiter [1:662:2593] 2025-05-29T15:23:10.024117Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:5834: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 9 PathOwnerId: 72057594046678944, cookie: 0 TestWaitNotification: OK eventTxId 103 2025-05-29T15:23:10.024320Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/SomeDatabase" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-05-29T15:23:10.024356Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/SomeDatabase" took 46us result status StatusSuccess 2025-05-29T15:23:10.024456Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/SomeDatabase" PathDescription { Self { Name: "SomeDatabase" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 9 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 9 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 5 SubDomainVersion: 1 SubDomainStateVersion: 2 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72075186233409546 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409547 } DomainKey { SchemeShard: 72057594046678944 PathId: 2 } StoragePools { Name: "unquoted_storage_pool" Kind: "unquoted_storage_pool_kind" } StoragePools { Name: "quoted_storage_pool" Kind: "quoted_storage_pool_kind" } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 3 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 2 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } StoragePoolsUsage { PoolKind: "unquoted_storage_pool_kind" TotalSize: 0 DataSize: 0 IndexSize: 0 } StoragePoolsUsage { PoolKind: "quoted_storage_pool_kind" TotalSize: 0 DataSize: 0 IndexSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 DatabaseQuotas { storage_quotas { unit_kind: "quoted_storage_pool_kind" data_size_hard_quota: 1 } } SecurityState { } } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/client/server/ut/unittest >> TMessageBusServerPersQueueGetReadSessionsInfoMetaRequestTest::FailsOnBalancerDescribeResultFailureWhenTopicsAreGivenExplicitly [GOOD] Test command err: Assert failed: Check response: { Status: 128 ErrorReason: "path \'Root/PQ\' has unknown/invalid root prefix \'Root\', Marker# PQ14" ErrorCode: UNKNOWN_TOPIC } Assert failed: Check response: { Status: 128 ErrorReason: "the following topics are not created: rt3.dc1--topic2, Marker# PQ95" ErrorCode: UNKNOWN_TOPIC } Assert failed: Check response: { Status: 128 ErrorReason: "topic \'Root/PQ\' describe error, Status# LookupError, Marker# PQ1" ErrorCode: ERROR } ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/client/server/ut/unittest >> TMessageBusServerPersQueueGetPartitionOffsetsMetaRequestTest::FailesOnNotATopic [GOOD] Test command err: Assert failed: Check response: { Status: 130 ErrorReason: "Timeout while waiting for response, may be just slow, Marker# PQ16" ErrorCode: ERROR } 2025-05-29T15:23:09.669504Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:3098: [PQ: 72057594037928037] Handle TEvInterconnect::TEvNodeInfo 2025-05-29T15:23:09.670305Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:3130: [PQ: 72057594037928037] Transactions request. From tx_00000000000000000000, To tx_18446744073709551615 2025-05-29T15:23:09.670373Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:751: [PQ: 72057594037928037] doesn't have tx info 2025-05-29T15:23:09.670383Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:763: [PQ: 72057594037928037] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2025-05-29T15:23:09.670388Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:975: [PQ: 72057594037928037] no config, start with empty partitions and default config 2025-05-29T15:23:09.670393Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:4889: [PQ: 72057594037928037] Txs.size=0, PlannedTxs.size=0 2025-05-29T15:23:09.670401Z node 2 :PERSQUEUE NOTICE: pq_impl.cpp:1099: [PQ: 72057594037928037] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-05-29T15:23:09.670410Z node 2 :PERSQUEUE INFO: pq_impl.cpp:800: [PQ: 72057594037928037] doesn't have tx writes info 2025-05-29T15:23:09.670520Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:2882: [PQ: 72057594037928037] server connected, pipe [2:263:2254], now have 1 active actors on pipe 2025-05-29T15:23:09.670537Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:1460: [PQ: 72057594037928037] Handle TEvPersQueue::TEvUpdateConfig 2025-05-29T15:23:09.673253Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:1646: [PQ: 72057594037928037] Config update version 1(current 0) received from actor [2:100:2134] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "rt3.dc1--topic1" Version: 1 Partitions { PartitionId: 0 } AllPartitions { PartitionId: 0 } 2025-05-29T15:23:09.673809Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:590: [PQ: 72057594037928037] Apply new config CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "rt3.dc1--topic1" Version: 1 Partitions { PartitionId: 0 } AllPartitions { PartitionId: 0 } 2025-05-29T15:23:09.673838Z node 2 :PERSQUEUE NOTICE: pq_impl.cpp:1099: [PQ: 72057594037928037] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-05-29T15:23:09.673991Z node 2 :PERSQUEUE INFO: pq_impl.cpp:1487: [PQ: 72057594037928037] Config applied version 1 actor [2:100:2134] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "rt3.dc1--topic1" Version: 1 Partitions { PartitionId: 0 } AllPartitions { PartitionId: 0 } 2025-05-29T15:23:09.674020Z node 2 :PERSQUEUE DEBUG: partition_init.cpp:75: [rt3.dc1--topic1:0:Initializer] Start initializing step TInitConfigStep 2025-05-29T15:23:09.674082Z node 2 :PERSQUEUE DEBUG: partition_init.cpp:75: [rt3.dc1--topic1:0:Initializer] Start initializing step TInitInternalFieldsStep 2025-05-29T15:23:09.674154Z node 2 :PERSQUEUE INFO: partition_init.cpp:880: [PQ: 72057594037928037, Partition: 0, State: StateInit] bootstrapping 0 [2:271:2260] 2025-05-29T15:23:09.674709Z node 2 :PERSQUEUE DEBUG: partition_init.cpp:55: [rt3.dc1--topic1:0:Initializer] Initializing completed. 2025-05-29T15:23:09.674723Z node 2 :PERSQUEUE INFO: partition.cpp:560: [PQ: 72057594037928037, Partition: 0, State: StateInit] init complete for topic 'rt3.dc1--topic1' partition 0 generation 2 [2:271:2260] 2025-05-29T15:23:09.674731Z node 2 :PERSQUEUE DEBUG: partition.cpp:574: [PQ: 72057594037928037, Partition: 0, State: StateInit] SYNC INIT topic rt3.dc1--topic1 partitition 0 so 0 endOffset 0 Head Offset 0 PartNo 0 PackedSize 0 count 0 nextOffset 0 batches 0 2025-05-29T15:23:09.674757Z node 2 :PERSQUEUE DEBUG: partition.cpp:3850: [PQ: 72057594037928037, Partition: 0, State: StateIdle] Process pending events. Count 0 2025-05-29T15:23:09.674908Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:2882: [PQ: 72057594037928037] server connected, pipe [2:274:2262], now have 1 active actors on pipe 2025-05-29T15:23:09.683562Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:3098: [PQ: 72057594037928139] Handle TEvInterconnect::TEvNodeInfo 2025-05-29T15:23:09.684807Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:3130: [PQ: 72057594037928139] Transactions request. From tx_00000000000000000000, To tx_18446744073709551615 2025-05-29T15:23:09.684886Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:751: [PQ: 72057594037928139] doesn't have tx info 2025-05-29T15:23:09.684895Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:763: [PQ: 72057594037928139] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2025-05-29T15:23:09.684899Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:975: [PQ: 72057594037928139] no config, start with empty partitions and default config 2025-05-29T15:23:09.684905Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:4889: [PQ: 72057594037928139] Txs.size=0, PlannedTxs.size=0 2025-05-29T15:23:09.684912Z node 2 :PERSQUEUE NOTICE: pq_impl.cpp:1099: [PQ: 72057594037928139] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-05-29T15:23:09.684921Z node 2 :PERSQUEUE INFO: pq_impl.cpp:800: [PQ: 72057594037928139] doesn't have tx writes info 2025-05-29T15:23:09.685072Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:2882: [PQ: 72057594037928139] server connected, pipe [2:408:2362], now have 1 active actors on pipe 2025-05-29T15:23:09.685082Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:1460: [PQ: 72057594037928139] Handle TEvPersQueue::TEvUpdateConfig 2025-05-29T15:23:09.685128Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:1646: [PQ: 72057594037928139] Config update version 2(current 0) received from actor [2:100:2134] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 2 TopicName: "rt3.dc1--topic2" Version: 2 Partitions { PartitionId: 2 } AllPartitions { PartitionId: 2 } 2025-05-29T15:23:09.685778Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:590: [PQ: 72057594037928139] Apply new config CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 2 TopicName: "rt3.dc1--topic2" Version: 2 Partitions { PartitionId: 2 } AllPartitions { PartitionId: 2 } 2025-05-29T15:23:09.685807Z node 2 :PERSQUEUE NOTICE: pq_impl.cpp:1099: [PQ: 72057594037928139] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-05-29T15:23:09.685933Z node 2 :PERSQUEUE INFO: pq_impl.cpp:1487: [PQ: 72057594037928139] Config applied version 2 actor [2:100:2134] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 2 TopicName: "rt3.dc1--topic2" Version: 2 Partitions { PartitionId: 2 } AllPartitions { PartitionId: 2 } 2025-05-29T15:23:09.685956Z node 2 :PERSQUEUE DEBUG: partition_init.cpp:75: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitConfigStep 2025-05-29T15:23:09.686026Z node 2 :PERSQUEUE DEBUG: partition_init.cpp:75: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitInternalFieldsStep 2025-05-29T15:23:09.686054Z node 2 :PERSQUEUE INFO: partition_init.cpp:880: [PQ: 72057594037928139, Partition: 2, State: StateInit] bootstrapping 2 [2:416:2368] 2025-05-29T15:23:09.686632Z node 2 :PERSQUEUE DEBUG: partition_init.cpp:55: [rt3.dc1--topic2:2:Initializer] Initializing completed. 2025-05-29T15:23:09.686647Z node 2 :PERSQUEUE INFO: partition.cpp:560: [PQ: 72057594037928139, Partition: 2, State: StateInit] init complete for topic 'rt3.dc1--topic2' partition 2 generation 2 [2:416:2368] 2025-05-29T15:23:09.686655Z node 2 :PERSQUEUE DEBUG: partition.cpp:574: [PQ: 72057594037928139, Partition: 2, State: StateInit] SYNC INIT topic rt3.dc1--topic2 partitition 2 so 0 endOffset 0 Head Offset 0 PartNo 0 PackedSize 0 count 0 nextOffset 0 batches 0 2025-05-29T15:23:09.686661Z node 2 :PERSQUEUE DEBUG: partition.cpp:3850: [PQ: 72057594037928139, Partition: 2, State: StateIdle] Process pending events. Count 0 2025-05-29T15:23:09.686839Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:2882: [PQ: 72057594037928139] server connected, pipe [2:419:2370], now have 1 active actors on pipe 2025-05-29T15:23:09.687081Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:2882: [PQ: 72057594037928037] server connected, pipe [2:425:2373], now have 1 active actors on pipe 2025-05-29T15:23:09.687119Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:2882: [PQ: 72057594037928139] server connected, pipe [2:427:2374], now have 1 active actors on pipe 2025-05-29T15:23:09.687160Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:2907: [PQ: 72057594037928037] server disconnected, pipe [2:425:2373] destroyed 2025-05-29T15:23:09.687218Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:2907: [PQ: 72057594037928139] server disconnected, pipe [2:427:2374] destroyed 2025-05-29T15:23:09.903499Z node 3 :PERSQUEUE DEBUG: pq_impl.cpp:3098: [PQ: 72057594037928037] Handle TEvInterconnect::TEvNodeInfo 2025-05-29T15:23:09.904632Z node 3 :PERSQUEUE DEBUG: pq_impl.cpp:3130: [PQ: 72057594037928037] Transactions request. From tx_00000000000000000000, To tx_18446744073709551615 2025-05-29T15:23:09.904717Z node 3 :PERSQUEUE DEBUG: pq_impl.cpp:751: [PQ: 72057594037928037] doesn't have tx info 2025-05-29T15:23:09.904728Z node 3 :PERSQUEUE DEBUG: pq_impl.cpp:763: [PQ: 72057594037928037] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2025-05-29T15:23:09.904733Z node 3 :PERSQUEUE DEBUG: pq_impl.cpp:975: [PQ: 72057594037928037] no config, start with empty partitions and default config 2025-05-29T15:23:09.904741Z node 3 :PERSQUEUE DEBUG: pq_impl.cpp:4889: [PQ: 72057594037928037] Txs.size=0, PlannedTxs.size=0 2025-05-29T15:23:09.904750Z node 3 :PERSQUEUE NOTICE: pq_impl.cpp:1099: [PQ: 72057594037928037] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-05-29T15:23:09.904763Z node 3 :PERSQUEUE INFO: pq_impl.cpp:800: [PQ: 72057594037928037] doesn't have tx writes info 2025-05-29T15:23:09.904896Z node 3 :PERSQUEUE DEBUG: pq_impl.cpp:2882: [PQ: 72057594037928037] server connected, pipe [3:261:2252], now have 1 active actors on pipe 2025-05-29T15:23:09.904916Z node 3 :PERSQUEUE DEBUG: pq_impl.cpp:1460: [PQ: 72057594037928037] Handle TEvPersQueue::TEvUpdateConfig 2025-05-29T15:23:09.904974Z node 3 :PERSQUEUE DEBUG: pq_impl.cpp:1646: [PQ: 72057594037928037] Config update version 3(current 0) received from actor [3:100:2134] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "rt3.dc1--topic1" Version: 3 Partitions { PartitionId: 0 } AllPartitions { PartitionId: 0 } 2025-05-29T15:23:09.905404Z node 3 :PERSQUEUE DEBUG: pq_impl.cpp:590: [PQ: 72057594037928037] Apply new config CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "rt3.dc1--topic1" Version: 3 Partitions { PartitionId: 0 } AllPartitions { PartitionId: 0 } 2 ... icName: "rt3.dc1--topic2" Version: 5 Partitions { PartitionId: 1 } AllPartitions { PartitionId: 1 } 2025-05-29T15:23:09.920055Z node 3 :PERSQUEUE DEBUG: pq_impl.cpp:590: [PQ: 72057594037928138] Apply new config CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 1 TopicName: "rt3.dc1--topic2" Version: 5 Partitions { PartitionId: 1 } AllPartitions { PartitionId: 1 } 2025-05-29T15:23:09.920070Z node 3 :PERSQUEUE NOTICE: pq_impl.cpp:1099: [PQ: 72057594037928138] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-05-29T15:23:09.920175Z node 3 :PERSQUEUE INFO: pq_impl.cpp:1487: [PQ: 72057594037928138] Config applied version 5 actor [3:100:2134] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 1 TopicName: "rt3.dc1--topic2" Version: 5 Partitions { PartitionId: 1 } AllPartitions { PartitionId: 1 } 2025-05-29T15:23:09.920190Z node 3 :PERSQUEUE DEBUG: partition_init.cpp:75: [rt3.dc1--topic2:1:Initializer] Start initializing step TInitConfigStep 2025-05-29T15:23:09.920227Z node 3 :PERSQUEUE DEBUG: partition_init.cpp:75: [rt3.dc1--topic2:1:Initializer] Start initializing step TInitInternalFieldsStep 2025-05-29T15:23:09.920251Z node 3 :PERSQUEUE INFO: partition_init.cpp:880: [PQ: 72057594037928138, Partition: 1, State: StateInit] bootstrapping 1 [3:473:2410] 2025-05-29T15:23:09.920547Z node 3 :PERSQUEUE DEBUG: partition_init.cpp:55: [rt3.dc1--topic2:1:Initializer] Initializing completed. 2025-05-29T15:23:09.920552Z node 3 :PERSQUEUE INFO: partition.cpp:560: [PQ: 72057594037928138, Partition: 1, State: StateInit] init complete for topic 'rt3.dc1--topic2' partition 1 generation 2 [3:473:2410] 2025-05-29T15:23:09.920556Z node 3 :PERSQUEUE DEBUG: partition.cpp:574: [PQ: 72057594037928138, Partition: 1, State: StateInit] SYNC INIT topic rt3.dc1--topic2 partitition 1 so 0 endOffset 0 Head Offset 0 PartNo 0 PackedSize 0 count 0 nextOffset 0 batches 0 2025-05-29T15:23:09.920559Z node 3 :PERSQUEUE DEBUG: partition.cpp:3850: [PQ: 72057594037928138, Partition: 1, State: StateIdle] Process pending events. Count 0 2025-05-29T15:23:09.920639Z node 3 :PERSQUEUE DEBUG: pq_impl.cpp:2882: [PQ: 72057594037928138] server connected, pipe [3:476:2412], now have 1 active actors on pipe 2025-05-29T15:23:09.923146Z node 3 :PERSQUEUE DEBUG: pq_impl.cpp:3098: [PQ: 72057594037928139] Handle TEvInterconnect::TEvNodeInfo 2025-05-29T15:23:09.923912Z node 3 :PERSQUEUE DEBUG: pq_impl.cpp:3130: [PQ: 72057594037928139] Transactions request. From tx_00000000000000000000, To tx_18446744073709551615 2025-05-29T15:23:09.923960Z node 3 :PERSQUEUE DEBUG: pq_impl.cpp:751: [PQ: 72057594037928139] doesn't have tx info 2025-05-29T15:23:09.923966Z node 3 :PERSQUEUE DEBUG: pq_impl.cpp:763: [PQ: 72057594037928139] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2025-05-29T15:23:09.923970Z node 3 :PERSQUEUE DEBUG: pq_impl.cpp:975: [PQ: 72057594037928139] no config, start with empty partitions and default config 2025-05-29T15:23:09.923975Z node 3 :PERSQUEUE DEBUG: pq_impl.cpp:4889: [PQ: 72057594037928139] Txs.size=0, PlannedTxs.size=0 2025-05-29T15:23:09.923980Z node 3 :PERSQUEUE NOTICE: pq_impl.cpp:1099: [PQ: 72057594037928139] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-05-29T15:23:09.923987Z node 3 :PERSQUEUE INFO: pq_impl.cpp:800: [PQ: 72057594037928139] doesn't have tx writes info 2025-05-29T15:23:09.924080Z node 3 :PERSQUEUE DEBUG: pq_impl.cpp:2882: [PQ: 72057594037928139] server connected, pipe [3:525:2449], now have 1 active actors on pipe 2025-05-29T15:23:09.924095Z node 3 :PERSQUEUE DEBUG: pq_impl.cpp:1460: [PQ: 72057594037928139] Handle TEvPersQueue::TEvUpdateConfig 2025-05-29T15:23:09.924124Z node 3 :PERSQUEUE DEBUG: pq_impl.cpp:1646: [PQ: 72057594037928139] Config update version 6(current 0) received from actor [3:100:2134] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 2 TopicName: "rt3.dc1--topic2" Version: 6 Partitions { PartitionId: 2 } AllPartitions { PartitionId: 2 } 2025-05-29T15:23:09.924530Z node 3 :PERSQUEUE DEBUG: pq_impl.cpp:590: [PQ: 72057594037928139] Apply new config CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 2 TopicName: "rt3.dc1--topic2" Version: 6 Partitions { PartitionId: 2 } AllPartitions { PartitionId: 2 } 2025-05-29T15:23:09.924553Z node 3 :PERSQUEUE NOTICE: pq_impl.cpp:1099: [PQ: 72057594037928139] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-05-29T15:23:09.924719Z node 3 :PERSQUEUE INFO: pq_impl.cpp:1487: [PQ: 72057594037928139] Config applied version 6 actor [3:100:2134] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 2 TopicName: "rt3.dc1--topic2" Version: 6 Partitions { PartitionId: 2 } AllPartitions { PartitionId: 2 } 2025-05-29T15:23:09.924737Z node 3 :PERSQUEUE DEBUG: partition_init.cpp:75: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitConfigStep 2025-05-29T15:23:09.924798Z node 3 :PERSQUEUE DEBUG: partition_init.cpp:75: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitInternalFieldsStep 2025-05-29T15:23:09.924830Z node 3 :PERSQUEUE INFO: partition_init.cpp:880: [PQ: 72057594037928139, Partition: 2, State: StateInit] bootstrapping 2 [3:533:2455] 2025-05-29T15:23:09.925262Z node 3 :PERSQUEUE DEBUG: partition_init.cpp:55: [rt3.dc1--topic2:2:Initializer] Initializing completed. 2025-05-29T15:23:09.925274Z node 3 :PERSQUEUE INFO: partition.cpp:560: [PQ: 72057594037928139, Partition: 2, State: StateInit] init complete for topic 'rt3.dc1--topic2' partition 2 generation 2 [3:533:2455] 2025-05-29T15:23:09.925281Z node 3 :PERSQUEUE DEBUG: partition.cpp:574: [PQ: 72057594037928139, Partition: 2, State: StateInit] SYNC INIT topic rt3.dc1--topic2 partitition 2 so 0 endOffset 0 Head Offset 0 PartNo 0 PackedSize 0 count 0 nextOffset 0 batches 0 2025-05-29T15:23:09.925287Z node 3 :PERSQUEUE DEBUG: partition.cpp:3850: [PQ: 72057594037928139, Partition: 2, State: StateIdle] Process pending events. Count 0 2025-05-29T15:23:09.925425Z node 3 :PERSQUEUE DEBUG: pq_impl.cpp:2882: [PQ: 72057594037928139] server connected, pipe [3:536:2457], now have 1 active actors on pipe 2025-05-29T15:23:09.925642Z node 3 :PERSQUEUE DEBUG: pq_impl.cpp:2882: [PQ: 72057594037928138] server connected, pipe [3:543:2461], now have 1 active actors on pipe 2025-05-29T15:23:09.925657Z node 3 :PERSQUEUE DEBUG: pq_impl.cpp:2882: [PQ: 72057594037928037] server connected, pipe [3:542:2460], now have 1 active actors on pipe 2025-05-29T15:23:09.925671Z node 3 :PERSQUEUE DEBUG: pq_impl.cpp:2882: [PQ: 72057594037928139] server connected, pipe [3:544:2461], now have 1 active actors on pipe 2025-05-29T15:23:09.935922Z node 3 :PERSQUEUE DEBUG: pq_impl.cpp:2882: [PQ: 72057594037928139] server connected, pipe [3:549:2465], now have 1 active actors on pipe 2025-05-29T15:23:09.939749Z node 3 :PERSQUEUE DEBUG: pq_impl.cpp:3098: [PQ: 72057594037928139] Handle TEvInterconnect::TEvNodeInfo 2025-05-29T15:23:09.940208Z node 3 :PERSQUEUE DEBUG: pq_impl.cpp:3130: [PQ: 72057594037928139] Transactions request. From tx_00000000000000000000, To tx_18446744073709551615 2025-05-29T15:23:09.940262Z node 3 :PERSQUEUE DEBUG: pq_impl.cpp:751: [PQ: 72057594037928139] doesn't have tx info 2025-05-29T15:23:09.940268Z node 3 :PERSQUEUE DEBUG: pq_impl.cpp:763: [PQ: 72057594037928139] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2025-05-29T15:23:09.940285Z node 3 :PERSQUEUE DEBUG: pq_impl.cpp:4889: [PQ: 72057594037928139] Txs.size=0, PlannedTxs.size=0 2025-05-29T15:23:09.940336Z node 3 :PERSQUEUE NOTICE: pq_impl.cpp:1099: [PQ: 72057594037928139] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-05-29T15:23:09.940340Z node 3 :PERSQUEUE INFO: pq_impl.cpp:800: [PQ: 72057594037928139] doesn't have tx writes info 2025-05-29T15:23:09.940350Z node 3 :PERSQUEUE DEBUG: partition_init.cpp:75: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitConfigStep 2025-05-29T15:23:09.940382Z node 3 :PERSQUEUE DEBUG: partition_init.cpp:75: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitInternalFieldsStep 2025-05-29T15:23:09.940398Z node 3 :PERSQUEUE INFO: partition_init.cpp:880: [PQ: 72057594037928139, Partition: 2, State: StateInit] bootstrapping 2 [3:606:2510] 2025-05-29T15:23:09.940681Z node 3 :PERSQUEUE DEBUG: partition_init.cpp:75: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitDiskStatusStep 2025-05-29T15:23:09.940887Z node 3 :PERSQUEUE DEBUG: partition_init.cpp:75: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitMetaStep 2025-05-29T15:23:09.940916Z node 3 :PERSQUEUE DEBUG: partition_init.cpp:75: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitInfoRangeStep 2025-05-29T15:23:09.940941Z node 3 :PERSQUEUE DEBUG: partition_init.cpp:75: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitDataRangeStep 2025-05-29T15:23:09.940959Z node 3 :PERSQUEUE DEBUG: partition_init.cpp:75: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitDataStep 2025-05-29T15:23:09.940962Z node 3 :PERSQUEUE DEBUG: partition_init.cpp:75: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitEndWriteTimestampStep 2025-05-29T15:23:09.940965Z node 3 :PERSQUEUE INFO: partition_init.cpp:774: [rt3.dc1--topic2:2:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp skipped because already initialized. 2025-05-29T15:23:09.940968Z node 3 :PERSQUEUE DEBUG: partition_init.cpp:55: [rt3.dc1--topic2:2:Initializer] Initializing completed. 2025-05-29T15:23:09.940972Z node 3 :PERSQUEUE INFO: partition.cpp:560: [PQ: 72057594037928139, Partition: 2, State: StateInit] init complete for topic 'rt3.dc1--topic2' partition 2 generation 3 [3:606:2510] 2025-05-29T15:23:09.940976Z node 3 :PERSQUEUE DEBUG: partition.cpp:574: [PQ: 72057594037928139, Partition: 2, State: StateInit] SYNC INIT topic rt3.dc1--topic2 partitition 2 so 0 endOffset 0 Head Offset 0 PartNo 0 PackedSize 0 count 0 nextOffset 0 batches 0 2025-05-29T15:23:09.940980Z node 3 :PERSQUEUE DEBUG: partition.cpp:3850: [PQ: 72057594037928139, Partition: 2, State: StateIdle] Process pending events. Count 0 2025-05-29T15:23:09.941083Z node 3 :PERSQUEUE DEBUG: pq_impl.cpp:2907: [PQ: 72057594037928138] server disconnected, pipe [3:543:2461] destroyed 2025-05-29T15:23:09.941104Z node 3 :PERSQUEUE DEBUG: pq_impl.cpp:2907: [PQ: 72057594037928037] server disconnected, pipe [3:542:2460] destroyed RESPONSE Status: 1 ErrorCode: OK MetaResponse { CmdGetPartitionLocationsResult { TopicResult { Topic: "rt3.dc1--topic2" PartitionLocation { Partition: 1 Host: "::1" HostId: 3 ErrorCode: OK } PartitionLocation { Partition: 2 Host: "::1" HostId: 3 ErrorCode: OK } ErrorCode: OK } TopicResult { Topic: "rt3.dc1--topic1" PartitionLocation { Partition: 0 Host: "::1" HostId: 3 ErrorCode: OK } ErrorCode: OK } } } Assert failed: Check response: { Status: 128 ErrorReason: "the following topics are not created: rt3.dc1--topic2, Marker# PQ95" ErrorCode: UNKNOWN_TOPIC } ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/client/server/ut/unittest >> TMessageBusServerPersQueueGetPartitionStatusMetaRequestTest::FailsOnNoBalancerInGetNodeRequest [GOOD] Test command err: Assert failed: Check response: { Status: 128 ErrorReason: "no path \'/Root/PQ/\', Marker# PQ17" ErrorCode: UNKNOWN_TOPIC } Assert failed: Check response: { Status: 128 ErrorReason: "no path \'Root/PQ\', Marker# PQ150" ErrorCode: UNKNOWN_TOPIC } Assert failed: Check response: { Status: 128 ErrorReason: "topic \'rt3.dc1--topic1\' has no balancer, Marker# PQ193" ErrorCode: UNKNOWN_TOPIC } ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/client/server/ut/unittest >> TMessageBusServerPersQueueGetPartitionStatusMetaRequestTest::FailsOnDuplicatedPartition [GOOD] Test command err: Assert failed: Check response: { Status: 128 ErrorReason: "topic \'Root/PQ\' describe error, Status# LookupError, Marker# PQ1" ErrorCode: ERROR } Assert failed: Check response: { Status: 128 ErrorReason: "TopicRequest must have Topic field." ErrorCode: BAD_REQUEST } Assert failed: Check response: { Status: 128 ErrorReason: "multiple TopicRequest for topic \'rt3.dc1--topic1\'" ErrorCode: BAD_REQUEST } Assert failed: Check response: { Status: 128 ErrorReason: "multiple partition 2 in TopicRequest for topic \'rt3.dc1--topic2\'" ErrorCode: BAD_REQUEST } >> TMessageBusServerPersQueueGetReadSessionsInfoMetaRequestTest::FailsOnNoClientSpecified [GOOD] >> TMessageBusServerPersQueueGetPartitionLocationsMetaRequestTest::FailsOnDuplicatedPartition [GOOD] >> TMessageBusServerPersQueueGetReadSessionsInfoMetaRequestTest::HandlesPipeDisconnection_DisconnectionComesSecond [GOOD] >> TMessageBusServerPersQueueGetReadSessionsInfoMetaRequestTest::HandlesPipeDisconnection_AnswerDoesNotArrive ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/client/server/ut/unittest >> TMessageBusServerPersQueueGetPartitionOffsetsMetaRequestTest::FailsOnDuplicatedPartition [GOOD] Test command err: Assert failed: Check response: { Status: 128 ErrorReason: "path \'Root/PQ\' has unknown/invalid root prefix \'Root\', Marker# PQ14" ErrorCode: UNKNOWN_TOPIC } Assert failed: Check response: { Status: 128 ErrorReason: "topic \'Root/PQ\' describe error, Status# LookupError, Marker# PQ1" ErrorCode: ERROR } Assert failed: Check response: { Status: 128 ErrorReason: "multiple TopicRequest for topic \'rt3.dc1--topic1\'" ErrorCode: BAD_REQUEST } Assert failed: Check response: { Status: 128 ErrorReason: "multiple partition 2 in TopicRequest for topic \'rt3.dc1--topic2\'" ErrorCode: BAD_REQUEST } ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/client/server/ut/unittest >> TMessageBusServerPersQueueGetPartitionLocationsMetaRequestTest::FailsOnDuplicatedTopicName [GOOD] Test command err: Assert failed: Check response: { Status: 128 ErrorReason: "no path \'/Root/PQ/\', Marker# PQ17" ErrorCode: UNKNOWN_TOPIC } Assert failed: Check response: { Status: 128 ErrorReason: "topic \'rt3.dc1--topic1\' has no balancer, Marker# PQ193" ErrorCode: UNKNOWN_TOPIC } Assert failed: Check response: { Status: 128 ErrorReason: "TopicRequest must have Topic field." ErrorCode: BAD_REQUEST } Assert failed: Check response: { Status: 128 ErrorReason: "multiple TopicRequest for topic \'rt3.dc1--topic1\'" ErrorCode: BAD_REQUEST } ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/client/server/ut/unittest >> TMessageBusServerPersQueueGetPartitionStatusMetaRequestTest::FailesOnNotATopic [GOOD] Test command err: Assert failed: Check response: { Status: 130 ErrorReason: "Timeout while waiting for response, may be just slow, Marker# PQ16" ErrorCode: ERROR } 2025-05-29T15:23:09.822498Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:3098: [PQ: 72057594037928037] Handle TEvInterconnect::TEvNodeInfo 2025-05-29T15:23:09.823037Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:3130: [PQ: 72057594037928037] Transactions request. From tx_00000000000000000000, To tx_18446744073709551615 2025-05-29T15:23:09.823084Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:751: [PQ: 72057594037928037] doesn't have tx info 2025-05-29T15:23:09.823090Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:763: [PQ: 72057594037928037] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2025-05-29T15:23:09.823093Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:975: [PQ: 72057594037928037] no config, start with empty partitions and default config 2025-05-29T15:23:09.823096Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:4889: [PQ: 72057594037928037] Txs.size=0, PlannedTxs.size=0 2025-05-29T15:23:09.823101Z node 2 :PERSQUEUE NOTICE: pq_impl.cpp:1099: [PQ: 72057594037928037] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-05-29T15:23:09.823107Z node 2 :PERSQUEUE INFO: pq_impl.cpp:800: [PQ: 72057594037928037] doesn't have tx writes info 2025-05-29T15:23:09.823186Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:2882: [PQ: 72057594037928037] server connected, pipe [2:262:2253], now have 1 active actors on pipe 2025-05-29T15:23:09.823197Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:1460: [PQ: 72057594037928037] Handle TEvPersQueue::TEvUpdateConfig 2025-05-29T15:23:09.824737Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:1646: [PQ: 72057594037928037] Config update version 1(current 0) received from actor [2:100:2134] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "rt3.dc1--topic1" Version: 1 Partitions { PartitionId: 0 } AllPartitions { PartitionId: 0 } 2025-05-29T15:23:09.825138Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:590: [PQ: 72057594037928037] Apply new config CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "rt3.dc1--topic1" Version: 1 Partitions { PartitionId: 0 } AllPartitions { PartitionId: 0 } 2025-05-29T15:23:09.825158Z node 2 :PERSQUEUE NOTICE: pq_impl.cpp:1099: [PQ: 72057594037928037] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-05-29T15:23:09.825245Z node 2 :PERSQUEUE INFO: pq_impl.cpp:1487: [PQ: 72057594037928037] Config applied version 1 actor [2:100:2134] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "rt3.dc1--topic1" Version: 1 Partitions { PartitionId: 0 } AllPartitions { PartitionId: 0 } 2025-05-29T15:23:09.825261Z node 2 :PERSQUEUE DEBUG: partition_init.cpp:75: [rt3.dc1--topic1:0:Initializer] Start initializing step TInitConfigStep 2025-05-29T15:23:09.825299Z node 2 :PERSQUEUE DEBUG: partition_init.cpp:75: [rt3.dc1--topic1:0:Initializer] Start initializing step TInitInternalFieldsStep 2025-05-29T15:23:09.825336Z node 2 :PERSQUEUE INFO: partition_init.cpp:880: [PQ: 72057594037928037, Partition: 0, State: StateInit] bootstrapping 0 [2:270:2259] 2025-05-29T15:23:09.825721Z node 2 :PERSQUEUE DEBUG: partition_init.cpp:55: [rt3.dc1--topic1:0:Initializer] Initializing completed. 2025-05-29T15:23:09.825728Z node 2 :PERSQUEUE INFO: partition.cpp:560: [PQ: 72057594037928037, Partition: 0, State: StateInit] init complete for topic 'rt3.dc1--topic1' partition 0 generation 2 [2:270:2259] 2025-05-29T15:23:09.825735Z node 2 :PERSQUEUE DEBUG: partition.cpp:574: [PQ: 72057594037928037, Partition: 0, State: StateInit] SYNC INIT topic rt3.dc1--topic1 partitition 0 so 0 endOffset 0 Head Offset 0 PartNo 0 PackedSize 0 count 0 nextOffset 0 batches 0 2025-05-29T15:23:09.825743Z node 2 :PERSQUEUE DEBUG: partition.cpp:3850: [PQ: 72057594037928037, Partition: 0, State: StateIdle] Process pending events. Count 0 2025-05-29T15:23:09.825850Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:2882: [PQ: 72057594037928037] server connected, pipe [2:273:2261], now have 1 active actors on pipe 2025-05-29T15:23:09.833020Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:3098: [PQ: 72057594037928139] Handle TEvInterconnect::TEvNodeInfo 2025-05-29T15:23:09.833665Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:3130: [PQ: 72057594037928139] Transactions request. From tx_00000000000000000000, To tx_18446744073709551615 2025-05-29T15:23:09.833729Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:751: [PQ: 72057594037928139] doesn't have tx info 2025-05-29T15:23:09.833736Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:763: [PQ: 72057594037928139] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2025-05-29T15:23:09.833740Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:975: [PQ: 72057594037928139] no config, start with empty partitions and default config 2025-05-29T15:23:09.833743Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:4889: [PQ: 72057594037928139] Txs.size=0, PlannedTxs.size=0 2025-05-29T15:23:09.833748Z node 2 :PERSQUEUE NOTICE: pq_impl.cpp:1099: [PQ: 72057594037928139] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-05-29T15:23:09.833754Z node 2 :PERSQUEUE INFO: pq_impl.cpp:800: [PQ: 72057594037928139] doesn't have tx writes info 2025-05-29T15:23:09.833833Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:2882: [PQ: 72057594037928139] server connected, pipe [2:407:2361], now have 1 active actors on pipe 2025-05-29T15:23:09.833846Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:1460: [PQ: 72057594037928139] Handle TEvPersQueue::TEvUpdateConfig 2025-05-29T15:23:09.833877Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:1646: [PQ: 72057594037928139] Config update version 2(current 0) received from actor [2:100:2134] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 2 TopicName: "rt3.dc1--topic2" Version: 2 Partitions { PartitionId: 2 } AllPartitions { PartitionId: 2 } 2025-05-29T15:23:09.834203Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:590: [PQ: 72057594037928139] Apply new config CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 2 TopicName: "rt3.dc1--topic2" Version: 2 Partitions { PartitionId: 2 } AllPartitions { PartitionId: 2 } 2025-05-29T15:23:09.834223Z node 2 :PERSQUEUE NOTICE: pq_impl.cpp:1099: [PQ: 72057594037928139] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-05-29T15:23:09.834303Z node 2 :PERSQUEUE INFO: pq_impl.cpp:1487: [PQ: 72057594037928139] Config applied version 2 actor [2:100:2134] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 2 TopicName: "rt3.dc1--topic2" Version: 2 Partitions { PartitionId: 2 } AllPartitions { PartitionId: 2 } 2025-05-29T15:23:09.834318Z node 2 :PERSQUEUE DEBUG: partition_init.cpp:75: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitConfigStep 2025-05-29T15:23:09.834368Z node 2 :PERSQUEUE DEBUG: partition_init.cpp:75: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitInternalFieldsStep 2025-05-29T15:23:09.834390Z node 2 :PERSQUEUE INFO: partition_init.cpp:880: [PQ: 72057594037928139, Partition: 2, State: StateInit] bootstrapping 2 [2:415:2367] 2025-05-29T15:23:09.834699Z node 2 :PERSQUEUE DEBUG: partition_init.cpp:55: [rt3.dc1--topic2:2:Initializer] Initializing completed. 2025-05-29T15:23:09.834706Z node 2 :PERSQUEUE INFO: partition.cpp:560: [PQ: 72057594037928139, Partition: 2, State: StateInit] init complete for topic 'rt3.dc1--topic2' partition 2 generation 2 [2:415:2367] 2025-05-29T15:23:09.834710Z node 2 :PERSQUEUE DEBUG: partition.cpp:574: [PQ: 72057594037928139, Partition: 2, State: StateInit] SYNC INIT topic rt3.dc1--topic2 partitition 2 so 0 endOffset 0 Head Offset 0 PartNo 0 PackedSize 0 count 0 nextOffset 0 batches 0 2025-05-29T15:23:09.834714Z node 2 :PERSQUEUE DEBUG: partition.cpp:3850: [PQ: 72057594037928139, Partition: 2, State: StateIdle] Process pending events. Count 0 2025-05-29T15:23:09.834823Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:2882: [PQ: 72057594037928139] server connected, pipe [2:418:2369], now have 1 active actors on pipe 2025-05-29T15:23:09.834971Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:2882: [PQ: 72057594037928037] server connected, pipe [2:424:2372], now have 1 active actors on pipe 2025-05-29T15:23:09.835014Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:2882: [PQ: 72057594037928139] server connected, pipe [2:426:2373], now have 1 active actors on pipe 2025-05-29T15:23:09.835045Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:2907: [PQ: 72057594037928037] server disconnected, pipe [2:424:2372] destroyed 2025-05-29T15:23:09.835085Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:2907: [PQ: 72057594037928139] server disconnected, pipe [2:426:2373] destroyed Assert failed: Check response: { Status: 128 ErrorReason: "path \'Root/PQ\' has unknown/invalid root prefix \'Root\', Marker# PQ14" ErrorCode: UNKNOWN_TOPIC } Assert failed: Check response: { Status: 128 ErrorReason: "the following topics are not created: rt3.dc1--topic2, Marker# PQ95" ErrorCode: UNKNOWN_TOPIC } >> TPQTest::TestSourceIdDropByUserWrites [GOOD] >> TPQTest::TestSourceIdDropBySourceIdCount >> TMessageBusServerPersQueueGetPartitionOffsetsMetaRequestTest::FailsOnNotOkStatusInGetNodeRequest [GOOD] >> TMessageBusServerPersQueueGetPartitionOffsetsMetaRequestTest::FailsOnNoBalancerInGetNodeRequest >> TMessageBusServerPersQueueGetPartitionStatusMetaRequestTest::SuccessfullyPassesResponsesFromTablets [GOOD] >> TMessageBusServerPersQueueGetPartitionStatusMetaRequestTest::HandlesPipeDisconnection_DisconnectionComesSecond >> TMessageBusServerPersQueueGetTopicMetadataMetaRequestTest::FailsOnZeroBalancerTabletIdInGetNodeRequest [GOOD] >> TMessageBusServerPersQueueGetTopicMetadataMetaRequestTest::SuccessfullyReplies >> TMessageBusServerPersQueueGetPartitionLocationsMetaRequestTest::HandlesPipeDisconnection_DisconnectionComesFirst [GOOD] >> TMessageBusServerPersQueueGetPartitionLocationsMetaRequestTest::HandlesPipeDisconnection_AnswerDoesNotArrive >> TMessageBusServerPersQueueGetReadSessionsInfoMetaRequestTest::HandlesPipeDisconnection_AnswerDoesNotArrive [GOOD] >> TStoragePoolsQuotasTest::DifferentQuotasInteraction >> TSchemeShardSubDomainTest::SchemeDatabaseQuotaRejects [GOOD] >> TSchemeShardSubDomainTest::CreateSubDomainWithoutTabletsThenMkDir ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/client/server/ut/unittest >> TMessageBusServerPersQueueGetPartitionLocationsMetaRequestTest::FailsOnDuplicatedPartition [GOOD] Test command err: Assert failed: Check response: { Status: 128 ErrorReason: "path \'Root/PQ\' has unknown/invalid root prefix \'Root\', Marker# PQ14" ErrorCode: UNKNOWN_TOPIC } Assert failed: Check response: { Status: 128 ErrorReason: "the following topics are not created: rt3.dc1--topic2, Marker# PQ95" ErrorCode: UNKNOWN_TOPIC } Assert failed: Check response: { Status: 128 ErrorReason: "topic \'Root/PQ\' describe error, Status# LookupError, Marker# PQ1" ErrorCode: ERROR } Assert failed: Check response: { Status: 128 ErrorReason: "multiple partition 2 in TopicRequest for topic \'rt3.dc1--topic2\'" ErrorCode: BAD_REQUEST } ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/client/server/ut/unittest >> TMessageBusServerPersQueueGetReadSessionsInfoMetaRequestTest::FailsOnNoClientSpecified [GOOD] Test command err: Assert failed: Check response: { Status: 128 ErrorReason: "no path \'Root/PQ\', Marker# PQ150" ErrorCode: UNKNOWN_TOPIC } Assert failed: Check response: { Status: 128 ErrorReason: "topic \'rt3.dc1--topic1\' is not created, Marker# PQ94" ErrorCode: UNKNOWN_TOPIC } Assert failed: Check response: { Status: 128 ErrorReason: "No clientId specified in CmdGetReadSessionsInfo" ErrorCode: BAD_REQUEST } >> TMessageBusServerPersQueueGetPartitionOffsetsMetaRequestTest::FailsOnNoBalancerInGetNodeRequest [GOOD] >> TMessageBusServerPersQueueGetPartitionOffsetsMetaRequestTest::FailsOnEmptyTopicName >> TMessageBusServerPersQueueGetTopicMetadataMetaRequestTest::SuccessfullyReplies [GOOD] >> TMessageBusServerPersQueueGetPartitionLocationsMetaRequestTest::HandlesPipeDisconnection_AnswerDoesNotArrive [GOOD] >> TFlatTest::CrossRW >> TMessageBusServerPersQueueGetPartitionStatusMetaRequestTest::HandlesPipeDisconnection_DisconnectionComesSecond [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/client/server/ut/unittest >> TMessageBusServerPersQueueGetReadSessionsInfoMetaRequestTest::HandlesPipeDisconnection_AnswerDoesNotArrive [GOOD] Test command err: 2025-05-29T15:23:10.251571Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:3098: [PQ: 72057594037928037] Handle TEvInterconnect::TEvNodeInfo 2025-05-29T15:23:10.252171Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:3130: [PQ: 72057594037928037] Transactions request. From tx_00000000000000000000, To tx_18446744073709551615 2025-05-29T15:23:10.252216Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:751: [PQ: 72057594037928037] doesn't have tx info 2025-05-29T15:23:10.252221Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:763: [PQ: 72057594037928037] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2025-05-29T15:23:10.252224Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:975: [PQ: 72057594037928037] no config, start with empty partitions and default config 2025-05-29T15:23:10.252228Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:4889: [PQ: 72057594037928037] Txs.size=0, PlannedTxs.size=0 2025-05-29T15:23:10.252234Z node 1 :PERSQUEUE NOTICE: pq_impl.cpp:1099: [PQ: 72057594037928037] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-05-29T15:23:10.252240Z node 1 :PERSQUEUE INFO: pq_impl.cpp:800: [PQ: 72057594037928037] doesn't have tx writes info 2025-05-29T15:23:10.252325Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:2882: [PQ: 72057594037928037] server connected, pipe [1:261:2252], now have 1 active actors on pipe 2025-05-29T15:23:10.252337Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:1460: [PQ: 72057594037928037] Handle TEvPersQueue::TEvUpdateConfig 2025-05-29T15:23:10.253958Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:1646: [PQ: 72057594037928037] Config update version 1(current 0) received from actor [1:100:2134] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "rt3.dc1--topic1" Version: 1 Partitions { PartitionId: 0 } AllPartitions { PartitionId: 0 } 2025-05-29T15:23:10.254356Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:590: [PQ: 72057594037928037] Apply new config CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "rt3.dc1--topic1" Version: 1 Partitions { PartitionId: 0 } AllPartitions { PartitionId: 0 } 2025-05-29T15:23:10.254375Z node 1 :PERSQUEUE NOTICE: pq_impl.cpp:1099: [PQ: 72057594037928037] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-05-29T15:23:10.254484Z node 1 :PERSQUEUE INFO: pq_impl.cpp:1487: [PQ: 72057594037928037] Config applied version 1 actor [1:100:2134] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "rt3.dc1--topic1" Version: 1 Partitions { PartitionId: 0 } AllPartitions { PartitionId: 0 } 2025-05-29T15:23:10.254503Z node 1 :PERSQUEUE DEBUG: partition_init.cpp:75: [rt3.dc1--topic1:0:Initializer] Start initializing step TInitConfigStep 2025-05-29T15:23:10.254541Z node 1 :PERSQUEUE DEBUG: partition_init.cpp:75: [rt3.dc1--topic1:0:Initializer] Start initializing step TInitInternalFieldsStep 2025-05-29T15:23:10.254584Z node 1 :PERSQUEUE INFO: partition_init.cpp:880: [PQ: 72057594037928037, Partition: 0, State: StateInit] bootstrapping 0 [1:269:2258] 2025-05-29T15:23:10.254987Z node 1 :PERSQUEUE DEBUG: partition_init.cpp:55: [rt3.dc1--topic1:0:Initializer] Initializing completed. 2025-05-29T15:23:10.254994Z node 1 :PERSQUEUE INFO: partition.cpp:560: [PQ: 72057594037928037, Partition: 0, State: StateInit] init complete for topic 'rt3.dc1--topic1' partition 0 generation 2 [1:269:2258] 2025-05-29T15:23:10.254999Z node 1 :PERSQUEUE DEBUG: partition.cpp:574: [PQ: 72057594037928037, Partition: 0, State: StateInit] SYNC INIT topic rt3.dc1--topic1 partitition 0 so 0 endOffset 0 Head Offset 0 PartNo 0 PackedSize 0 count 0 nextOffset 0 batches 0 2025-05-29T15:23:10.255003Z node 1 :PERSQUEUE DEBUG: partition.cpp:3850: [PQ: 72057594037928037, Partition: 0, State: StateIdle] Process pending events. Count 0 2025-05-29T15:23:10.255108Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:2882: [PQ: 72057594037928037] server connected, pipe [1:272:2260], now have 1 active actors on pipe 2025-05-29T15:23:10.261356Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:3098: [PQ: 72057594037928137] Handle TEvInterconnect::TEvNodeInfo 2025-05-29T15:23:10.262003Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:3130: [PQ: 72057594037928137] Transactions request. From tx_00000000000000000000, To tx_18446744073709551615 2025-05-29T15:23:10.262052Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:751: [PQ: 72057594037928137] doesn't have tx info 2025-05-29T15:23:10.262071Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:763: [PQ: 72057594037928137] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2025-05-29T15:23:10.262074Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:975: [PQ: 72057594037928137] no config, start with empty partitions and default config 2025-05-29T15:23:10.262077Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:4889: [PQ: 72057594037928137] Txs.size=0, PlannedTxs.size=0 2025-05-29T15:23:10.262082Z node 1 :PERSQUEUE NOTICE: pq_impl.cpp:1099: [PQ: 72057594037928137] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-05-29T15:23:10.262088Z node 1 :PERSQUEUE INFO: pq_impl.cpp:800: [PQ: 72057594037928137] doesn't have tx writes info 2025-05-29T15:23:10.262165Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:2882: [PQ: 72057594037928137] server connected, pipe [1:406:2360], now have 1 active actors on pipe 2025-05-29T15:23:10.262171Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:1460: [PQ: 72057594037928137] Handle TEvPersQueue::TEvUpdateConfig 2025-05-29T15:23:10.262202Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:1646: [PQ: 72057594037928137] Config update version 2(current 0) received from actor [1:100:2134] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "rt3.dc1--topic2" Version: 2 Partitions { PartitionId: 0 } AllPartitions { PartitionId: 0 } 2025-05-29T15:23:10.262523Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:590: [PQ: 72057594037928137] Apply new config CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "rt3.dc1--topic2" Version: 2 Partitions { PartitionId: 0 } AllPartitions { PartitionId: 0 } 2025-05-29T15:23:10.262540Z node 1 :PERSQUEUE NOTICE: pq_impl.cpp:1099: [PQ: 72057594037928137] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-05-29T15:23:10.262634Z node 1 :PERSQUEUE INFO: pq_impl.cpp:1487: [PQ: 72057594037928137] Config applied version 2 actor [1:100:2134] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "rt3.dc1--topic2" Version: 2 Partitions { PartitionId: 0 } AllPartitions { PartitionId: 0 } 2025-05-29T15:23:10.262649Z node 1 :PERSQUEUE DEBUG: partition_init.cpp:75: [rt3.dc1--topic2:0:Initializer] Start initializing step TInitConfigStep 2025-05-29T15:23:10.262685Z node 1 :PERSQUEUE DEBUG: partition_init.cpp:75: [rt3.dc1--topic2:0:Initializer] Start initializing step TInitInternalFieldsStep 2025-05-29T15:23:10.262703Z node 1 :PERSQUEUE INFO: partition_init.cpp:880: [PQ: 72057594037928137, Partition: 0, State: StateInit] bootstrapping 0 [1:414:2366] 2025-05-29T15:23:10.263076Z node 1 :PERSQUEUE DEBUG: partition_init.cpp:55: [rt3.dc1--topic2:0:Initializer] Initializing completed. 2025-05-29T15:23:10.263085Z node 1 :PERSQUEUE INFO: partition.cpp:560: [PQ: 72057594037928137, Partition: 0, State: StateInit] init complete for topic 'rt3.dc1--topic2' partition 0 generation 2 [1:414:2366] 2025-05-29T15:23:10.263090Z node 1 :PERSQUEUE DEBUG: partition.cpp:574: [PQ: 72057594037928137, Partition: 0, State: StateInit] SYNC INIT topic rt3.dc1--topic2 partitition 0 so 0 endOffset 0 Head Offset 0 PartNo 0 PackedSize 0 count 0 nextOffset 0 batches 0 2025-05-29T15:23:10.263093Z node 1 :PERSQUEUE DEBUG: partition.cpp:3850: [PQ: 72057594037928137, Partition: 0, State: StateIdle] Process pending events. Count 0 2025-05-29T15:23:10.263192Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:2882: [PQ: 72057594037928137] server connected, pipe [1:417:2368], now have 1 active actors on pipe 2025-05-29T15:23:10.265412Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:3098: [PQ: 72057594037928138] Handle TEvInterconnect::TEvNodeInfo 2025-05-29T15:23:10.266067Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:3130: [PQ: 72057594037928138] Transactions request. From tx_00000000000000000000, To tx_18446744073709551615 2025-05-29T15:23:10.266109Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:751: [PQ: 72057594037928138] doesn't have tx info 2025-05-29T15:23:10.266113Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:763: [PQ: 72057594037928138] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2025-05-29T15:23:10.266115Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:975: [PQ: 72057594037928138] no config, start with empty partitions and default config 2025-05-29T15:23:10.266119Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:4889: [PQ: 72057594037928138] Txs.size=0, PlannedTxs.size=0 2025-05-29T15:23:10.266124Z node 1 :PERSQUEUE NOTICE: pq_impl.cpp:1099: [PQ: 72057594037928138] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-05-29T15:23:10.266129Z node 1 :PERSQUEUE INFO: pq_impl.cpp:800: [PQ: 72057594037928138] doesn't have tx writes info 2025-05-29T15:23:10.266201Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:2882: [PQ: 72057594037928138] server connected, pipe [1:466:2405], now have 1 active actors on pipe 2025-05-29T15:23:10.266213Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:1460: [PQ: 72057594037928138] Handle TEvPersQueue::TEvUpdateConfig 2025-05-29T15:23:10.266241Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:1646: [PQ: 72057594037928138] Config update version 3(current 0) received from actor [1:100:2134] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 1 TopicName: "rt3.dc1--topic2" Version: 3 Partitions { PartitionId: 1 } AllPartitions { PartitionId: 1 } 2025-05-29T15:23:10.266530Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:590: [PQ: 72057594037928138] Apply new config CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 1 TopicName: "rt3.dc1--topic2" Version: 3 Partitions { PartitionId: 1 } AllPartitions { PartitionId: 1 } 2025-05-29T15:23:10.266546Z node 1 :PERSQUEUE NOTICE: pq_impl.cpp:1099: [PQ: 72057594037928138] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-05-29T15:23:10.266632Z node 1 :PERSQUEUE INFO: pq_impl.cpp:1487: [PQ: 72057594037928138] Config applied version 3 actor [1:100:2134] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 1 TopicName: "rt3.dc1--topic2" Version: 3 Partitions { PartitionId: 1 } AllPartitions { PartitionId: 1 } 2025-05-29T15:23:10.266647Z node 1 :PERSQUEUE DEBUG: partition_init.cpp:75: [rt3.dc1--topic2:1:Initia ... UEUE INFO: pq_impl.cpp:1487: [PQ: 72057594037928138] Config applied version 11 actor [3:100:2134] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 1 TopicName: "rt3.dc1--topic2" Version: 11 Partitions { PartitionId: 1 } AllPartitions { PartitionId: 1 } 2025-05-29T15:23:10.820205Z node 3 :PERSQUEUE DEBUG: partition_init.cpp:75: [rt3.dc1--topic2:1:Initializer] Start initializing step TInitConfigStep 2025-05-29T15:23:10.820269Z node 3 :PERSQUEUE DEBUG: partition_init.cpp:75: [rt3.dc1--topic2:1:Initializer] Start initializing step TInitInternalFieldsStep 2025-05-29T15:23:10.820309Z node 3 :PERSQUEUE INFO: partition_init.cpp:880: [PQ: 72057594037928138, Partition: 1, State: StateInit] bootstrapping 1 [3:472:2409] 2025-05-29T15:23:10.820845Z node 3 :PERSQUEUE DEBUG: partition_init.cpp:55: [rt3.dc1--topic2:1:Initializer] Initializing completed. 2025-05-29T15:23:10.820857Z node 3 :PERSQUEUE INFO: partition.cpp:560: [PQ: 72057594037928138, Partition: 1, State: StateInit] init complete for topic 'rt3.dc1--topic2' partition 1 generation 2 [3:472:2409] 2025-05-29T15:23:10.820866Z node 3 :PERSQUEUE DEBUG: partition.cpp:574: [PQ: 72057594037928138, Partition: 1, State: StateInit] SYNC INIT topic rt3.dc1--topic2 partitition 1 so 0 endOffset 0 Head Offset 0 PartNo 0 PackedSize 0 count 0 nextOffset 0 batches 0 2025-05-29T15:23:10.820874Z node 3 :PERSQUEUE DEBUG: partition.cpp:3850: [PQ: 72057594037928138, Partition: 1, State: StateIdle] Process pending events. Count 0 2025-05-29T15:23:10.821026Z node 3 :PERSQUEUE DEBUG: pq_impl.cpp:2882: [PQ: 72057594037928138] server connected, pipe [3:475:2411], now have 1 active actors on pipe 2025-05-29T15:23:10.824942Z node 3 :PERSQUEUE DEBUG: pq_impl.cpp:3098: [PQ: 72057594037928139] Handle TEvInterconnect::TEvNodeInfo 2025-05-29T15:23:10.826730Z node 3 :PERSQUEUE DEBUG: pq_impl.cpp:3130: [PQ: 72057594037928139] Transactions request. From tx_00000000000000000000, To tx_18446744073709551615 2025-05-29T15:23:10.826843Z node 3 :PERSQUEUE DEBUG: pq_impl.cpp:751: [PQ: 72057594037928139] doesn't have tx info 2025-05-29T15:23:10.826855Z node 3 :PERSQUEUE DEBUG: pq_impl.cpp:763: [PQ: 72057594037928139] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2025-05-29T15:23:10.826862Z node 3 :PERSQUEUE DEBUG: pq_impl.cpp:975: [PQ: 72057594037928139] no config, start with empty partitions and default config 2025-05-29T15:23:10.826868Z node 3 :PERSQUEUE DEBUG: pq_impl.cpp:4889: [PQ: 72057594037928139] Txs.size=0, PlannedTxs.size=0 2025-05-29T15:23:10.826876Z node 3 :PERSQUEUE NOTICE: pq_impl.cpp:1099: [PQ: 72057594037928139] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-05-29T15:23:10.826887Z node 3 :PERSQUEUE INFO: pq_impl.cpp:800: [PQ: 72057594037928139] doesn't have tx writes info 2025-05-29T15:23:10.827061Z node 3 :PERSQUEUE DEBUG: pq_impl.cpp:2882: [PQ: 72057594037928139] server connected, pipe [3:524:2448], now have 1 active actors on pipe 2025-05-29T15:23:10.827084Z node 3 :PERSQUEUE DEBUG: pq_impl.cpp:1460: [PQ: 72057594037928139] Handle TEvPersQueue::TEvUpdateConfig 2025-05-29T15:23:10.827141Z node 3 :PERSQUEUE DEBUG: pq_impl.cpp:1646: [PQ: 72057594037928139] Config update version 12(current 0) received from actor [3:100:2134] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 2 TopicName: "rt3.dc1--topic2" Version: 12 Partitions { PartitionId: 2 } AllPartitions { PartitionId: 2 } 2025-05-29T15:23:10.827792Z node 3 :PERSQUEUE DEBUG: pq_impl.cpp:590: [PQ: 72057594037928139] Apply new config CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 2 TopicName: "rt3.dc1--topic2" Version: 12 Partitions { PartitionId: 2 } AllPartitions { PartitionId: 2 } 2025-05-29T15:23:10.827840Z node 3 :PERSQUEUE NOTICE: pq_impl.cpp:1099: [PQ: 72057594037928139] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-05-29T15:23:10.827970Z node 3 :PERSQUEUE INFO: pq_impl.cpp:1487: [PQ: 72057594037928139] Config applied version 12 actor [3:100:2134] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 2 TopicName: "rt3.dc1--topic2" Version: 12 Partitions { PartitionId: 2 } AllPartitions { PartitionId: 2 } 2025-05-29T15:23:10.828002Z node 3 :PERSQUEUE DEBUG: partition_init.cpp:75: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitConfigStep 2025-05-29T15:23:10.828073Z node 3 :PERSQUEUE DEBUG: partition_init.cpp:75: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitInternalFieldsStep 2025-05-29T15:23:10.828112Z node 3 :PERSQUEUE INFO: partition_init.cpp:880: [PQ: 72057594037928139, Partition: 2, State: StateInit] bootstrapping 2 [3:532:2454] 2025-05-29T15:23:10.828660Z node 3 :PERSQUEUE DEBUG: partition_init.cpp:55: [rt3.dc1--topic2:2:Initializer] Initializing completed. 2025-05-29T15:23:10.828668Z node 3 :PERSQUEUE INFO: partition.cpp:560: [PQ: 72057594037928139, Partition: 2, State: StateInit] init complete for topic 'rt3.dc1--topic2' partition 2 generation 2 [3:532:2454] 2025-05-29T15:23:10.828678Z node 3 :PERSQUEUE DEBUG: partition.cpp:574: [PQ: 72057594037928139, Partition: 2, State: StateInit] SYNC INIT topic rt3.dc1--topic2 partitition 2 so 0 endOffset 0 Head Offset 0 PartNo 0 PackedSize 0 count 0 nextOffset 0 batches 0 2025-05-29T15:23:10.828685Z node 3 :PERSQUEUE DEBUG: partition.cpp:3850: [PQ: 72057594037928139, Partition: 2, State: StateIdle] Process pending events. Count 0 2025-05-29T15:23:10.828861Z node 3 :PERSQUEUE DEBUG: pq_impl.cpp:2882: [PQ: 72057594037928139] server connected, pipe [3:535:2456], now have 1 active actors on pipe 2025-05-29T15:23:10.829119Z node 3 :PERSQUEUE DEBUG: pq_impl.cpp:2882: [PQ: 72057594037928037] server connected, pipe [3:542:2459], now have 1 active actors on pipe 2025-05-29T15:23:10.829222Z node 3 :PERSQUEUE DEBUG: pq_impl.cpp:2882: [PQ: 72057594037928137] server connected, pipe [3:544:2460], now have 1 active actors on pipe 2025-05-29T15:23:10.829235Z node 3 :PERSQUEUE DEBUG: pq_impl.cpp:2882: [PQ: 72057594037928138] server connected, pipe [3:545:2460], now have 1 active actors on pipe 2025-05-29T15:23:10.829272Z node 3 :PERSQUEUE DEBUG: pq_impl.cpp:2882: [PQ: 72057594037928139] server connected, pipe [3:546:2460], now have 1 active actors on pipe 2025-05-29T15:23:10.829369Z node 3 :PERSQUEUE DEBUG: pq_impl.cpp:2882: [PQ: 72057594037928139] server connected, pipe [3:559:2471], now have 1 active actors on pipe 2025-05-29T15:23:10.834452Z node 3 :PERSQUEUE DEBUG: pq_impl.cpp:3098: [PQ: 72057594037928139] Handle TEvInterconnect::TEvNodeInfo 2025-05-29T15:23:10.835104Z node 3 :PERSQUEUE DEBUG: pq_impl.cpp:3130: [PQ: 72057594037928139] Transactions request. From tx_00000000000000000000, To tx_18446744073709551615 2025-05-29T15:23:10.835181Z node 3 :PERSQUEUE DEBUG: pq_impl.cpp:751: [PQ: 72057594037928139] doesn't have tx info 2025-05-29T15:23:10.835190Z node 3 :PERSQUEUE DEBUG: pq_impl.cpp:763: [PQ: 72057594037928139] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2025-05-29T15:23:10.835231Z node 3 :PERSQUEUE DEBUG: pq_impl.cpp:4889: [PQ: 72057594037928139] Txs.size=0, PlannedTxs.size=0 2025-05-29T15:23:10.835324Z node 3 :PERSQUEUE NOTICE: pq_impl.cpp:1099: [PQ: 72057594037928139] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-05-29T15:23:10.835335Z node 3 :PERSQUEUE INFO: pq_impl.cpp:800: [PQ: 72057594037928139] doesn't have tx writes info 2025-05-29T15:23:10.835357Z node 3 :PERSQUEUE DEBUG: partition_init.cpp:75: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitConfigStep 2025-05-29T15:23:10.835406Z node 3 :PERSQUEUE DEBUG: partition_init.cpp:75: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitInternalFieldsStep 2025-05-29T15:23:10.835438Z node 3 :PERSQUEUE INFO: partition_init.cpp:880: [PQ: 72057594037928139, Partition: 2, State: StateInit] bootstrapping 2 [3:616:2516] 2025-05-29T15:23:10.836038Z node 3 :PERSQUEUE DEBUG: partition_init.cpp:75: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitDiskStatusStep 2025-05-29T15:23:10.836269Z node 3 :PERSQUEUE DEBUG: partition_init.cpp:75: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitMetaStep 2025-05-29T15:23:10.836306Z node 3 :PERSQUEUE DEBUG: partition_init.cpp:75: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitInfoRangeStep 2025-05-29T15:23:10.836344Z node 3 :PERSQUEUE DEBUG: partition_init.cpp:75: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitDataRangeStep 2025-05-29T15:23:10.836371Z node 3 :PERSQUEUE DEBUG: partition_init.cpp:75: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitDataStep 2025-05-29T15:23:10.836376Z node 3 :PERSQUEUE DEBUG: partition_init.cpp:75: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitEndWriteTimestampStep 2025-05-29T15:23:10.836381Z node 3 :PERSQUEUE INFO: partition_init.cpp:774: [rt3.dc1--topic2:2:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp skipped because already initialized. 2025-05-29T15:23:10.836385Z node 3 :PERSQUEUE DEBUG: partition_init.cpp:55: [rt3.dc1--topic2:2:Initializer] Initializing completed. 2025-05-29T15:23:10.836393Z node 3 :PERSQUEUE INFO: partition.cpp:560: [PQ: 72057594037928139, Partition: 2, State: StateInit] init complete for topic 'rt3.dc1--topic2' partition 2 generation 3 [3:616:2516] 2025-05-29T15:23:10.836401Z node 3 :PERSQUEUE DEBUG: partition.cpp:574: [PQ: 72057594037928139, Partition: 2, State: StateInit] SYNC INIT topic rt3.dc1--topic2 partitition 2 so 0 endOffset 0 Head Offset 0 PartNo 0 PackedSize 0 count 0 nextOffset 0 batches 0 2025-05-29T15:23:10.836408Z node 3 :PERSQUEUE DEBUG: partition.cpp:3850: [PQ: 72057594037928139, Partition: 2, State: StateIdle] Process pending events. Count 0 2025-05-29T15:23:10.836670Z node 3 :PERSQUEUE DEBUG: pq_impl.cpp:2907: [PQ: 72057594037928138] server disconnected, pipe [3:545:2460] destroyed 2025-05-29T15:23:10.836683Z node 3 :PERSQUEUE DEBUG: pq_impl.cpp:2907: [PQ: 72057594037928037] server disconnected, pipe [3:542:2459] destroyed 2025-05-29T15:23:10.836690Z node 3 :PERSQUEUE DEBUG: pq_impl.cpp:2907: [PQ: 72057594037928137] server disconnected, pipe [3:544:2460] destroyed RESPONSE Status: 1 ErrorCode: OK MetaResponse { CmdGetReadSessionsInfoResult { TopicResult { Topic: "rt3.dc1--topic1" PartitionResult { Partition: 0 ClientOffset: 0 StartOffset: 0 EndOffset: 0 TimeLag: 0 TabletNode: "::1" ClientReadOffset: 0 ReadTimeLag: 0 TabletNodeId: 3 ErrorCode: OK } ErrorCode: OK } TopicResult { Topic: "rt3.dc1--topic2" PartitionResult { Partition: 0 ClientOffset: 0 StartOffset: 0 EndOffset: 0 TimeLag: 0 TabletNode: "::1" ClientReadOffset: 0 ReadTimeLag: 0 TabletNodeId: 3 ErrorCode: OK } PartitionResult { Partition: 1 ClientOffset: 0 StartOffset: 0 EndOffset: 0 TimeLag: 0 TabletNode: "::1" ClientReadOffset: 0 ReadTimeLag: 0 TabletNodeId: 3 ErrorCode: OK } PartitionResult { Partition: 2 ErrorCode: INITIALIZING ErrorReason: "tablet for partition is not running" } ErrorCode: OK } } } ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::SchemeDatabaseQuotaRejects [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:116:2058] recipient: [1:111:2141] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:116:2058] recipient: [1:111:2141] Leader for TabletID 72057594046678944 is [1:128:2152] sender: [1:132:2058] recipient: [1:111:2141] 2025-05-29T15:23:10.630597Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7488: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-05-29T15:23:10.630626Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7516: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:23:10.630633Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7402: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-05-29T15:23:10.630639Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7418: OperationsProcessing config: using default configuration 2025-05-29T15:23:10.630651Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-05-29T15:23:10.630655Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-05-29T15:23:10.630666Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7548: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:23:10.630681Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:39: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-05-29T15:23:10.630819Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7619: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-05-29T15:23:10.630896Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-05-29T15:23:10.645221Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7309: Cannot subscribe to console configs 2025-05-29T15:23:10.645250Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:23:10.648056Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-05-29T15:23:10.648190Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-05-29T15:23:10.648245Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-05-29T15:23:10.649982Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-05-29T15:23:10.650190Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:445: Clear TempDirsState with owners number: 0 2025-05-29T15:23:10.650302Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1348: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-05-29T15:23:10.650384Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:32: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-05-29T15:23:10.650871Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:157: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:23:10.650916Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:84: [RootDataErasureManager] Stop 2025-05-29T15:23:10.651197Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:23:10.651211Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:23:10.651236Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-05-29T15:23:10.651245Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-29T15:23:10.651251Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:202: TTxServerlessStorageBilling.Complete 2025-05-29T15:23:10.651288Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6671: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-05-29T15:23:10.652805Z node 1 :HIVE INFO: tablet_helpers.cpp:1130: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:128:2152] sender: [1:242:2058] recipient: [1:15:2062] 2025-05-29T15:23:10.666397Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:372: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-05-29T15:23:10.666473Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:23:10.666543Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-05-29T15:23:10.666584Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:130: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-05-29T15:23:10.666593Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:23:10.667455Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:451: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-05-29T15:23:10.667481Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-05-29T15:23:10.667534Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:23:10.667542Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:313: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-05-29T15:23:10.667546Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:367: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-05-29T15:23:10.667551Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 2 -> 3 2025-05-29T15:23:10.667951Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:23:10.667962Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-05-29T15:23:10.667969Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 3 -> 128 2025-05-29T15:23:10.668306Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:23:10.668317Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:23:10.668323Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:23:10.668329Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1650: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-05-29T15:23:10.668797Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1719: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-05-29T15:23:10.669118Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:652: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-05-29T15:23:10.669150Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1751: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-05-29T15:23:10.669291Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:676: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-05-29T15:23:10.669314Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:680: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 124 RawX2: 4294969445 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-05-29T15:23:10.669320Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:23:10.669356Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 128 -> 240 2025-05-29T15:23:10.669360Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:23:10.669385Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-05-29T15:23:10.669393Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:402: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-05-29T15:23:10.669721Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:23:10.669727Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-29T15:23:10.669764Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29 ... 5:23:10.910300Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:46: Free shard 72057594046678944:5 hive 72057594037968897 at ss 72057594046678944 2025-05-29T15:23:10.910310Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:46: Free shard 72057594046678944:1 hive 72057594037968897 at ss 72057594046678944 2025-05-29T15:23:10.910315Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:46: Free shard 72057594046678944:3 hive 72057594037968897 at ss 72057594046678944 2025-05-29T15:23:10.910330Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:46: Free shard 72057594046678944:2 hive 72057594037968897 at ss 72057594046678944 2025-05-29T15:23:10.910333Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:46: Free shard 72057594046678944:4 hive 72057594037968897 at ss 72057594046678944 2025-05-29T15:23:10.910484Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 106 2025-05-29T15:23:10.910769Z node 1 :HIVE INFO: tablet_helpers.cpp:1356: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 5 TxId_Deprecated: 5 TabletID: 72075186233409550 2025-05-29T15:23:10.911083Z node 1 :HIVE INFO: tablet_helpers.cpp:1356: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 1 TxId_Deprecated: 1 TabletID: 72075186233409546 2025-05-29T15:23:10.911153Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5938: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 5 ShardOwnerId: 72057594046678944 ShardLocalIdx: 5, at schemeshard: 72057594046678944 2025-05-29T15:23:10.911240Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 3 2025-05-29T15:23:10.911319Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5938: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 1 ShardOwnerId: 72057594046678944 ShardLocalIdx: 1, at schemeshard: 72057594046678944 2025-05-29T15:23:10.911346Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 Forgetting tablet 72075186233409550 2025-05-29T15:23:10.911658Z node 1 :HIVE INFO: tablet_helpers.cpp:1356: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 3 TxId_Deprecated: 3 TabletID: 72075186233409548 Forgetting tablet 72075186233409546 2025-05-29T15:23:10.911804Z node 1 :HIVE INFO: tablet_helpers.cpp:1356: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 2 TxId_Deprecated: 2 TabletID: 72075186233409547 2025-05-29T15:23:10.912009Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5938: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 3 ShardOwnerId: 72057594046678944 ShardLocalIdx: 3, at schemeshard: 72057594046678944 2025-05-29T15:23:10.912063Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 2 2025-05-29T15:23:10.912225Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 106 2025-05-29T15:23:10.912349Z node 1 :HIVE INFO: tablet_helpers.cpp:1356: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 4 TxId_Deprecated: 4 TabletID: 72075186233409549 Forgetting tablet 72075186233409548 2025-05-29T15:23:10.912794Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5938: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 2 ShardOwnerId: 72057594046678944 ShardLocalIdx: 2, at schemeshard: 72057594046678944 2025-05-29T15:23:10.912844Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 Forgetting tablet 72075186233409547 2025-05-29T15:23:10.913004Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 106 Forgetting tablet 72075186233409549 2025-05-29T15:23:10.913122Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5938: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 4 ShardOwnerId: 72057594046678944 ShardLocalIdx: 4, at schemeshard: 72057594046678944 2025-05-29T15:23:10.913148Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 1 2025-05-29T15:23:10.913306Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:123: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-05-29T15:23:10.913313Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:137: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 3], at schemeshard: 72057594046678944 2025-05-29T15:23:10.913326Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2025-05-29T15:23:10.913384Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:205: TTxCleanDroppedSubDomains Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-05-29T15:23:10.913390Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:224: TTxCleanDroppedPaths: PersistRemoveSubDomain for PathId# [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2025-05-29T15:23:10.913412Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 2025-05-29T15:23:10.913942Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:174: Deleted shardIdx 72057594046678944:5 2025-05-29T15:23:10.913954Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:180: Close pipe to deleted shardIdx 72057594046678944:5 tabletId 72075186233409550 2025-05-29T15:23:10.913971Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:174: Deleted shardIdx 72057594046678944:1 2025-05-29T15:23:10.913975Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:180: Close pipe to deleted shardIdx 72057594046678944:1 tabletId 72075186233409546 2025-05-29T15:23:10.914047Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:174: Deleted shardIdx 72057594046678944:3 2025-05-29T15:23:10.914052Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:180: Close pipe to deleted shardIdx 72057594046678944:3 tabletId 72075186233409548 2025-05-29T15:23:10.914426Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:174: Deleted shardIdx 72057594046678944:2 2025-05-29T15:23:10.914434Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:180: Close pipe to deleted shardIdx 72057594046678944:2 tabletId 72075186233409547 2025-05-29T15:23:10.914447Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:174: Deleted shardIdx 72057594046678944:4 2025-05-29T15:23:10.914453Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:180: Close pipe to deleted shardIdx 72057594046678944:4 tabletId 72075186233409549 2025-05-29T15:23:10.914527Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:160: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 1 candidates, at schemeshard: 72057594046678944 2025-05-29T15:23:10.914548Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:123: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-05-29T15:23:10.914553Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:137: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2025-05-29T15:23:10.914579Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-05-29T15:23:10.914884Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:245: TTxCleanDroppedSubDomains Complete, done PersistRemoveSubDomain for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 2025-05-29T15:23:10.915184Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:160: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestModificationResult got TxId: 106, wait until txId: 106 TestWaitNotification wait txId: 106 2025-05-29T15:23:10.915263Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:210: tests -- TTxNotificationSubscriber for txId 106: send EvNotifyTxCompletion 2025-05-29T15:23:10.915270Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:256: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 106 2025-05-29T15:23:10.915347Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 106, at schemeshard: 72057594046678944 2025-05-29T15:23:10.915367Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:227: tests -- TTxNotificationSubscriber for txId 106: got EvNotifyTxCompletionResult 2025-05-29T15:23:10.915372Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:236: tests -- TTxNotificationSubscriber for txId 106: satisfy waiter [1:915:2812] TestWaitNotification: OK eventTxId 106 2025-05-29T15:23:10.915466Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-05-29T15:23:10.915514Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot" took 60us result status StatusSuccess 2025-05-29T15:23:10.915609Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot" PathDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 7 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 7 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 5 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/MyRoot" } } } PathId: 1 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> TMessageBusServerPersQueueGetPartitionOffsetsMetaRequestTest::FailsOnEmptyTopicName [GOOD] >> TSchemeShardSubDomainTest::CreateSubDomainWithoutTabletsThenMkDir [GOOD] >> TSchemeShardSubDomainTest::CreateSubDomainWithoutTabletsThenForceDrop ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/client/server/ut/unittest >> TMessageBusServerPersQueueGetPartitionLocationsMetaRequestTest::HandlesPipeDisconnection_AnswerDoesNotArrive [GOOD] Test command err: Assert failed: Check response: { Status: 128 ErrorReason: "no path \'Root/PQ\', Marker# PQ150" ErrorCode: UNKNOWN_TOPIC } Assert failed: Check response: { Status: 128 ErrorReason: "topic \'rt3.dc1--topic1\' is not created, Marker# PQ94" ErrorCode: UNKNOWN_TOPIC } 2025-05-29T15:23:10.723690Z node 3 :PERSQUEUE DEBUG: pq_impl.cpp:3098: [PQ: 72057594037928037] Handle TEvInterconnect::TEvNodeInfo 2025-05-29T15:23:10.724294Z node 3 :PERSQUEUE DEBUG: pq_impl.cpp:3130: [PQ: 72057594037928037] Transactions request. From tx_00000000000000000000, To tx_18446744073709551615 2025-05-29T15:23:10.724338Z node 3 :PERSQUEUE DEBUG: pq_impl.cpp:751: [PQ: 72057594037928037] doesn't have tx info 2025-05-29T15:23:10.724345Z node 3 :PERSQUEUE DEBUG: pq_impl.cpp:763: [PQ: 72057594037928037] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2025-05-29T15:23:10.724348Z node 3 :PERSQUEUE DEBUG: pq_impl.cpp:975: [PQ: 72057594037928037] no config, start with empty partitions and default config 2025-05-29T15:23:10.724353Z node 3 :PERSQUEUE DEBUG: pq_impl.cpp:4889: [PQ: 72057594037928037] Txs.size=0, PlannedTxs.size=0 2025-05-29T15:23:10.724359Z node 3 :PERSQUEUE NOTICE: pq_impl.cpp:1099: [PQ: 72057594037928037] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-05-29T15:23:10.724367Z node 3 :PERSQUEUE INFO: pq_impl.cpp:800: [PQ: 72057594037928037] doesn't have tx writes info 2025-05-29T15:23:10.724455Z node 3 :PERSQUEUE DEBUG: pq_impl.cpp:2882: [PQ: 72057594037928037] server connected, pipe [3:260:2251], now have 1 active actors on pipe 2025-05-29T15:23:10.724468Z node 3 :PERSQUEUE DEBUG: pq_impl.cpp:1460: [PQ: 72057594037928037] Handle TEvPersQueue::TEvUpdateConfig 2025-05-29T15:23:10.726056Z node 3 :PERSQUEUE DEBUG: pq_impl.cpp:1646: [PQ: 72057594037928037] Config update version 1(current 0) received from actor [3:100:2134] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "rt3.dc1--topic1" Version: 1 Partitions { PartitionId: 0 } AllPartitions { PartitionId: 0 } 2025-05-29T15:23:10.726431Z node 3 :PERSQUEUE DEBUG: pq_impl.cpp:590: [PQ: 72057594037928037] Apply new config CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "rt3.dc1--topic1" Version: 1 Partitions { PartitionId: 0 } AllPartitions { PartitionId: 0 } 2025-05-29T15:23:10.726452Z node 3 :PERSQUEUE NOTICE: pq_impl.cpp:1099: [PQ: 72057594037928037] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-05-29T15:23:10.726550Z node 3 :PERSQUEUE INFO: pq_impl.cpp:1487: [PQ: 72057594037928037] Config applied version 1 actor [3:100:2134] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "rt3.dc1--topic1" Version: 1 Partitions { PartitionId: 0 } AllPartitions { PartitionId: 0 } 2025-05-29T15:23:10.726565Z node 3 :PERSQUEUE DEBUG: partition_init.cpp:75: [rt3.dc1--topic1:0:Initializer] Start initializing step TInitConfigStep 2025-05-29T15:23:10.726611Z node 3 :PERSQUEUE DEBUG: partition_init.cpp:75: [rt3.dc1--topic1:0:Initializer] Start initializing step TInitInternalFieldsStep 2025-05-29T15:23:10.726651Z node 3 :PERSQUEUE INFO: partition_init.cpp:880: [PQ: 72057594037928037, Partition: 0, State: StateInit] bootstrapping 0 [3:268:2257] 2025-05-29T15:23:10.727046Z node 3 :PERSQUEUE DEBUG: partition_init.cpp:55: [rt3.dc1--topic1:0:Initializer] Initializing completed. 2025-05-29T15:23:10.727053Z node 3 :PERSQUEUE INFO: partition.cpp:560: [PQ: 72057594037928037, Partition: 0, State: StateInit] init complete for topic 'rt3.dc1--topic1' partition 0 generation 2 [3:268:2257] 2025-05-29T15:23:10.727060Z node 3 :PERSQUEUE DEBUG: partition.cpp:574: [PQ: 72057594037928037, Partition: 0, State: StateInit] SYNC INIT topic rt3.dc1--topic1 partitition 0 so 0 endOffset 0 Head Offset 0 PartNo 0 PackedSize 0 count 0 nextOffset 0 batches 0 2025-05-29T15:23:10.727064Z node 3 :PERSQUEUE DEBUG: partition.cpp:3850: [PQ: 72057594037928037, Partition: 0, State: StateIdle] Process pending events. Count 0 2025-05-29T15:23:10.727163Z node 3 :PERSQUEUE DEBUG: pq_impl.cpp:2882: [PQ: 72057594037928037] server connected, pipe [3:271:2259], now have 1 active actors on pipe 2025-05-29T15:23:10.733438Z node 3 :PERSQUEUE DEBUG: pq_impl.cpp:3098: [PQ: 72057594037928137] Handle TEvInterconnect::TEvNodeInfo 2025-05-29T15:23:10.734327Z node 3 :PERSQUEUE DEBUG: pq_impl.cpp:3130: [PQ: 72057594037928137] Transactions request. From tx_00000000000000000000, To tx_18446744073709551615 2025-05-29T15:23:10.734387Z node 3 :PERSQUEUE DEBUG: pq_impl.cpp:751: [PQ: 72057594037928137] doesn't have tx info 2025-05-29T15:23:10.734396Z node 3 :PERSQUEUE DEBUG: pq_impl.cpp:763: [PQ: 72057594037928137] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2025-05-29T15:23:10.734400Z node 3 :PERSQUEUE DEBUG: pq_impl.cpp:975: [PQ: 72057594037928137] no config, start with empty partitions and default config 2025-05-29T15:23:10.734407Z node 3 :PERSQUEUE DEBUG: pq_impl.cpp:4889: [PQ: 72057594037928137] Txs.size=0, PlannedTxs.size=0 2025-05-29T15:23:10.734414Z node 3 :PERSQUEUE NOTICE: pq_impl.cpp:1099: [PQ: 72057594037928137] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-05-29T15:23:10.734423Z node 3 :PERSQUEUE INFO: pq_impl.cpp:800: [PQ: 72057594037928137] doesn't have tx writes info 2025-05-29T15:23:10.734528Z node 3 :PERSQUEUE DEBUG: pq_impl.cpp:2882: [PQ: 72057594037928137] server connected, pipe [3:406:2360], now have 1 active actors on pipe 2025-05-29T15:23:10.734537Z node 3 :PERSQUEUE DEBUG: pq_impl.cpp:1460: [PQ: 72057594037928137] Handle TEvPersQueue::TEvUpdateConfig 2025-05-29T15:23:10.734582Z node 3 :PERSQUEUE DEBUG: pq_impl.cpp:1646: [PQ: 72057594037928137] Config update version 2(current 0) received from actor [3:100:2134] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "rt3.dc1--topic2" Version: 2 Partitions { PartitionId: 0 } AllPartitions { PartitionId: 0 } 2025-05-29T15:23:10.735163Z node 3 :PERSQUEUE DEBUG: pq_impl.cpp:590: [PQ: 72057594037928137] Apply new config CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "rt3.dc1--topic2" Version: 2 Partitions { PartitionId: 0 } AllPartitions { PartitionId: 0 } 2025-05-29T15:23:10.735202Z node 3 :PERSQUEUE NOTICE: pq_impl.cpp:1099: [PQ: 72057594037928137] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-05-29T15:23:10.735318Z node 3 :PERSQUEUE INFO: pq_impl.cpp:1487: [PQ: 72057594037928137] Config applied version 2 actor [3:100:2134] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "rt3.dc1--topic2" Version: 2 Partitions { PartitionId: 0 } AllPartitions { PartitionId: 0 } 2025-05-29T15:23:10.735344Z node 3 :PERSQUEUE DEBUG: partition_init.cpp:75: [rt3.dc1--topic2:0:Initializer] Start initializing step TInitConfigStep 2025-05-29T15:23:10.735416Z node 3 :PERSQUEUE DEBUG: partition_init.cpp:75: [rt3.dc1--topic2:0:Initializer] Start initializing step TInitInternalFieldsStep 2025-05-29T15:23:10.735456Z node 3 :PERSQUEUE INFO: partition_init.cpp:880: [PQ: 72057594037928137, Partition: 0, State: StateInit] bootstrapping 0 [3:414:2366] 2025-05-29T15:23:10.735949Z node 3 :PERSQUEUE DEBUG: partition_init.cpp:55: [rt3.dc1--topic2:0:Initializer] Initializing completed. 2025-05-29T15:23:10.735963Z node 3 :PERSQUEUE INFO: partition.cpp:560: [PQ: 72057594037928137, Partition: 0, State: StateInit] init complete for topic 'rt3.dc1--topic2' partition 0 generation 2 [3:414:2366] 2025-05-29T15:23:10.735994Z node 3 :PERSQUEUE DEBUG: partition.cpp:574: [PQ: 72057594037928137, Partition: 0, State: StateInit] SYNC INIT topic rt3.dc1--topic2 partitition 0 so 0 endOffset 0 Head Offset 0 PartNo 0 PackedSize 0 count 0 nextOffset 0 batches 0 2025-05-29T15:23:10.736001Z node 3 :PERSQUEUE DEBUG: partition.cpp:3850: [PQ: 72057594037928137, Partition: 0, State: StateIdle] Process pending events. Count 0 2025-05-29T15:23:10.736164Z node 3 :PERSQUEUE DEBUG: pq_impl.cpp:2882: [PQ: 72057594037928137] server connected, pipe [3:417:2368], now have 1 active actors on pipe 2025-05-29T15:23:10.740077Z node 3 :PERSQUEUE DEBUG: pq_impl.cpp:3098: [PQ: 72057594037928138] Handle TEvInterconnect::TEvNodeInfo 2025-05-29T15:23:10.741193Z node 3 :PERSQUEUE DEBUG: pq_impl.cpp:3130: [PQ: 72057594037928138] Transactions request. From tx_00000000000000000000, To tx_18446744073709551615 2025-05-29T15:23:10.741282Z node 3 :PERSQUEUE DEBUG: pq_impl.cpp:751: [PQ: 72057594037928138] doesn't have tx info 2025-05-29T15:23:10.741294Z node 3 :PERSQUEUE DEBUG: pq_impl.cpp:763: [PQ: 72057594037928138] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2025-05-29T15:23:10.741299Z node 3 :PERSQUEUE DEBUG: pq_impl.cpp:975: [PQ: 72057594037928138] no config, start with empty partitions and default config 2025-05-29T15:23:10.741306Z node 3 :PERSQUEUE DEBUG: pq_impl.cpp:4889: [PQ: 72057594037928138] Txs.size=0, PlannedTxs.size=0 2025-05-29T15:23:10.741315Z node 3 :PERSQUEUE NOTICE: pq_impl.cpp:1099: [PQ: 72057594037928138] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-05-29T15:23:10.741327Z node 3 :PERSQUEUE INFO: pq_impl.cpp:800: [PQ: 72057594037928138] doesn't have tx writes info 2025-05-29T15:23:10.741496Z node 3 :PERSQUEUE DEBUG: pq_impl.cpp:2882: [PQ: 72057594037928138] server connected, pipe [3:466:2405], now have 1 active actors on pipe 2025-05-29T15:23:10.741510Z node 3 :PERSQUEUE DEBUG: pq_impl.cpp:1460: [PQ: 72057594037928138] Handle TEvPersQueue::TEvUpdateConfig 2025-05-29T15:23:10.741583Z node 3 :PERSQUEUE DEBUG: pq_impl.cpp:1646: [PQ: 72057594037928138] Config update version 3(current 0) received from actor [3:100:2134] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 1 TopicName: "rt3.dc1--topic2" Version: 3 Partitions { PartitionId: 1 } AllPartitions { PartitionId: 1 } 2025-05-29T15:23:10.742199Z node 3 :PERSQUEUE DEBUG: pq_impl.cpp:590: [PQ: 72057594037928138] Apply new config CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 1 TopicName: "rt3.dc1--topic2" Version: 3 Partitions { PartitionId: 1 } AllPartitions { PartitionId: 1 } 2025-05-29T15:23:10.742231Z node 3 :PERSQUEUE NOTICE: pq_impl.cpp:1099: [PQ: 72057594037928138] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-05-29T15:23:10.742345Z node 3 :PERSQUEUE INFO: pq_impl.cpp:1487: [PQ: 72057594037928138] Config applied version 3 actor [3:100:2134] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: ... conds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 1 TopicName: "rt3.dc1--topic2" Version: 7 Partitions { PartitionId: 1 } AllPartitions { PartitionId: 1 } 2025-05-29T15:23:10.981305Z node 4 :PERSQUEUE DEBUG: pq_impl.cpp:590: [PQ: 72057594037928138] Apply new config CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 1 TopicName: "rt3.dc1--topic2" Version: 7 Partitions { PartitionId: 1 } AllPartitions { PartitionId: 1 } 2025-05-29T15:23:10.981325Z node 4 :PERSQUEUE NOTICE: pq_impl.cpp:1099: [PQ: 72057594037928138] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-05-29T15:23:10.981408Z node 4 :PERSQUEUE INFO: pq_impl.cpp:1487: [PQ: 72057594037928138] Config applied version 7 actor [4:100:2134] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 1 TopicName: "rt3.dc1--topic2" Version: 7 Partitions { PartitionId: 1 } AllPartitions { PartitionId: 1 } 2025-05-29T15:23:10.981424Z node 4 :PERSQUEUE DEBUG: partition_init.cpp:75: [rt3.dc1--topic2:1:Initializer] Start initializing step TInitConfigStep 2025-05-29T15:23:10.981467Z node 4 :PERSQUEUE DEBUG: partition_init.cpp:75: [rt3.dc1--topic2:1:Initializer] Start initializing step TInitInternalFieldsStep 2025-05-29T15:23:10.981491Z node 4 :PERSQUEUE INFO: partition_init.cpp:880: [PQ: 72057594037928138, Partition: 1, State: StateInit] bootstrapping 1 [4:477:2414] 2025-05-29T15:23:10.981811Z node 4 :PERSQUEUE DEBUG: partition_init.cpp:55: [rt3.dc1--topic2:1:Initializer] Initializing completed. 2025-05-29T15:23:10.981817Z node 4 :PERSQUEUE INFO: partition.cpp:560: [PQ: 72057594037928138, Partition: 1, State: StateInit] init complete for topic 'rt3.dc1--topic2' partition 1 generation 2 [4:477:2414] 2025-05-29T15:23:10.981823Z node 4 :PERSQUEUE DEBUG: partition.cpp:574: [PQ: 72057594037928138, Partition: 1, State: StateInit] SYNC INIT topic rt3.dc1--topic2 partitition 1 so 0 endOffset 0 Head Offset 0 PartNo 0 PackedSize 0 count 0 nextOffset 0 batches 0 2025-05-29T15:23:10.981828Z node 4 :PERSQUEUE DEBUG: partition.cpp:3850: [PQ: 72057594037928138, Partition: 1, State: StateIdle] Process pending events. Count 0 2025-05-29T15:23:10.981913Z node 4 :PERSQUEUE DEBUG: pq_impl.cpp:2882: [PQ: 72057594037928138] server connected, pipe [4:480:2416], now have 1 active actors on pipe 2025-05-29T15:23:10.984900Z node 4 :PERSQUEUE DEBUG: pq_impl.cpp:3098: [PQ: 72057594037928139] Handle TEvInterconnect::TEvNodeInfo 2025-05-29T15:23:10.985699Z node 4 :PERSQUEUE DEBUG: pq_impl.cpp:3130: [PQ: 72057594037928139] Transactions request. From tx_00000000000000000000, To tx_18446744073709551615 2025-05-29T15:23:10.985766Z node 4 :PERSQUEUE DEBUG: pq_impl.cpp:751: [PQ: 72057594037928139] doesn't have tx info 2025-05-29T15:23:10.985775Z node 4 :PERSQUEUE DEBUG: pq_impl.cpp:763: [PQ: 72057594037928139] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2025-05-29T15:23:10.985780Z node 4 :PERSQUEUE DEBUG: pq_impl.cpp:975: [PQ: 72057594037928139] no config, start with empty partitions and default config 2025-05-29T15:23:10.985787Z node 4 :PERSQUEUE DEBUG: pq_impl.cpp:4889: [PQ: 72057594037928139] Txs.size=0, PlannedTxs.size=0 2025-05-29T15:23:10.985794Z node 4 :PERSQUEUE NOTICE: pq_impl.cpp:1099: [PQ: 72057594037928139] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-05-29T15:23:10.985804Z node 4 :PERSQUEUE INFO: pq_impl.cpp:800: [PQ: 72057594037928139] doesn't have tx writes info 2025-05-29T15:23:10.985941Z node 4 :PERSQUEUE DEBUG: pq_impl.cpp:2882: [PQ: 72057594037928139] server connected, pipe [4:529:2453], now have 1 active actors on pipe 2025-05-29T15:23:10.985962Z node 4 :PERSQUEUE DEBUG: pq_impl.cpp:1460: [PQ: 72057594037928139] Handle TEvPersQueue::TEvUpdateConfig 2025-05-29T15:23:10.986014Z node 4 :PERSQUEUE DEBUG: pq_impl.cpp:1646: [PQ: 72057594037928139] Config update version 8(current 0) received from actor [4:100:2134] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 2 TopicName: "rt3.dc1--topic2" Version: 8 Partitions { PartitionId: 2 } AllPartitions { PartitionId: 2 } 2025-05-29T15:23:10.986514Z node 4 :PERSQUEUE DEBUG: pq_impl.cpp:590: [PQ: 72057594037928139] Apply new config CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 2 TopicName: "rt3.dc1--topic2" Version: 8 Partitions { PartitionId: 2 } AllPartitions { PartitionId: 2 } 2025-05-29T15:23:10.986542Z node 4 :PERSQUEUE NOTICE: pq_impl.cpp:1099: [PQ: 72057594037928139] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-05-29T15:23:10.986634Z node 4 :PERSQUEUE INFO: pq_impl.cpp:1487: [PQ: 72057594037928139] Config applied version 8 actor [4:100:2134] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 2 TopicName: "rt3.dc1--topic2" Version: 8 Partitions { PartitionId: 2 } AllPartitions { PartitionId: 2 } 2025-05-29T15:23:10.986657Z node 4 :PERSQUEUE DEBUG: partition_init.cpp:75: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitConfigStep 2025-05-29T15:23:10.986700Z node 4 :PERSQUEUE DEBUG: partition_init.cpp:75: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitInternalFieldsStep 2025-05-29T15:23:10.986729Z node 4 :PERSQUEUE INFO: partition_init.cpp:880: [PQ: 72057594037928139, Partition: 2, State: StateInit] bootstrapping 2 [4:537:2459] 2025-05-29T15:23:10.987279Z node 4 :PERSQUEUE DEBUG: partition_init.cpp:55: [rt3.dc1--topic2:2:Initializer] Initializing completed. 2025-05-29T15:23:10.987292Z node 4 :PERSQUEUE INFO: partition.cpp:560: [PQ: 72057594037928139, Partition: 2, State: StateInit] init complete for topic 'rt3.dc1--topic2' partition 2 generation 2 [4:537:2459] 2025-05-29T15:23:10.987301Z node 4 :PERSQUEUE DEBUG: partition.cpp:574: [PQ: 72057594037928139, Partition: 2, State: StateInit] SYNC INIT topic rt3.dc1--topic2 partitition 2 so 0 endOffset 0 Head Offset 0 PartNo 0 PackedSize 0 count 0 nextOffset 0 batches 0 2025-05-29T15:23:10.987309Z node 4 :PERSQUEUE DEBUG: partition.cpp:3850: [PQ: 72057594037928139, Partition: 2, State: StateIdle] Process pending events. Count 0 2025-05-29T15:23:10.987468Z node 4 :PERSQUEUE DEBUG: pq_impl.cpp:2882: [PQ: 72057594037928139] server connected, pipe [4:540:2461], now have 1 active actors on pipe 2025-05-29T15:23:10.987711Z node 4 :PERSQUEUE DEBUG: pq_impl.cpp:2882: [PQ: 72057594037928037] server connected, pipe [4:546:2464], now have 1 active actors on pipe 2025-05-29T15:23:10.987729Z node 4 :PERSQUEUE DEBUG: pq_impl.cpp:2882: [PQ: 72057594037928138] server connected, pipe [4:547:2465], now have 1 active actors on pipe 2025-05-29T15:23:10.987748Z node 4 :PERSQUEUE DEBUG: pq_impl.cpp:2882: [PQ: 72057594037928139] server connected, pipe [4:548:2465], now have 1 active actors on pipe 2025-05-29T15:23:10.998064Z node 4 :PERSQUEUE DEBUG: pq_impl.cpp:2882: [PQ: 72057594037928139] server connected, pipe [4:553:2469], now have 1 active actors on pipe 2025-05-29T15:23:11.002782Z node 4 :PERSQUEUE DEBUG: pq_impl.cpp:3098: [PQ: 72057594037928139] Handle TEvInterconnect::TEvNodeInfo 2025-05-29T15:23:11.003334Z node 4 :PERSQUEUE DEBUG: pq_impl.cpp:3130: [PQ: 72057594037928139] Transactions request. From tx_00000000000000000000, To tx_18446744073709551615 2025-05-29T15:23:11.003401Z node 4 :PERSQUEUE DEBUG: pq_impl.cpp:751: [PQ: 72057594037928139] doesn't have tx info 2025-05-29T15:23:11.003411Z node 4 :PERSQUEUE DEBUG: pq_impl.cpp:763: [PQ: 72057594037928139] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2025-05-29T15:23:11.003453Z node 4 :PERSQUEUE DEBUG: pq_impl.cpp:4889: [PQ: 72057594037928139] Txs.size=0, PlannedTxs.size=0 2025-05-29T15:23:11.003553Z node 4 :PERSQUEUE NOTICE: pq_impl.cpp:1099: [PQ: 72057594037928139] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-05-29T15:23:11.003561Z node 4 :PERSQUEUE INFO: pq_impl.cpp:800: [PQ: 72057594037928139] doesn't have tx writes info 2025-05-29T15:23:11.003587Z node 4 :PERSQUEUE DEBUG: partition_init.cpp:75: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitConfigStep 2025-05-29T15:23:11.003645Z node 4 :PERSQUEUE DEBUG: partition_init.cpp:75: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitInternalFieldsStep 2025-05-29T15:23:11.003669Z node 4 :PERSQUEUE INFO: partition_init.cpp:880: [PQ: 72057594037928139, Partition: 2, State: StateInit] bootstrapping 2 [4:610:2514] 2025-05-29T15:23:11.003994Z node 4 :PERSQUEUE DEBUG: partition_init.cpp:75: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitDiskStatusStep 2025-05-29T15:23:11.004199Z node 4 :PERSQUEUE DEBUG: partition_init.cpp:75: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitMetaStep 2025-05-29T15:23:11.004240Z node 4 :PERSQUEUE DEBUG: partition_init.cpp:75: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitInfoRangeStep 2025-05-29T15:23:11.004282Z node 4 :PERSQUEUE DEBUG: partition_init.cpp:75: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitDataRangeStep 2025-05-29T15:23:11.004311Z node 4 :PERSQUEUE DEBUG: partition_init.cpp:75: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitDataStep 2025-05-29T15:23:11.004316Z node 4 :PERSQUEUE DEBUG: partition_init.cpp:75: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitEndWriteTimestampStep 2025-05-29T15:23:11.004322Z node 4 :PERSQUEUE INFO: partition_init.cpp:774: [rt3.dc1--topic2:2:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp skipped because already initialized. 2025-05-29T15:23:11.004330Z node 4 :PERSQUEUE DEBUG: partition_init.cpp:55: [rt3.dc1--topic2:2:Initializer] Initializing completed. 2025-05-29T15:23:11.004336Z node 4 :PERSQUEUE INFO: partition.cpp:560: [PQ: 72057594037928139, Partition: 2, State: StateInit] init complete for topic 'rt3.dc1--topic2' partition 2 generation 3 [4:610:2514] 2025-05-29T15:23:11.004342Z node 4 :PERSQUEUE DEBUG: partition.cpp:574: [PQ: 72057594037928139, Partition: 2, State: StateInit] SYNC INIT topic rt3.dc1--topic2 partitition 2 so 0 endOffset 0 Head Offset 0 PartNo 0 PackedSize 0 count 0 nextOffset 0 batches 0 2025-05-29T15:23:11.004349Z node 4 :PERSQUEUE DEBUG: partition.cpp:3850: [PQ: 72057594037928139, Partition: 2, State: StateIdle] Process pending events. Count 0 2025-05-29T15:23:11.004535Z node 4 :PERSQUEUE DEBUG: pq_impl.cpp:2907: [PQ: 72057594037928138] server disconnected, pipe [4:547:2465] destroyed 2025-05-29T15:23:11.004548Z node 4 :PERSQUEUE DEBUG: pq_impl.cpp:2907: [PQ: 72057594037928037] server disconnected, pipe [4:546:2464] destroyed RESPONSE Status: 1 ErrorCode: OK MetaResponse { CmdGetPartitionLocationsResult { TopicResult { Topic: "rt3.dc1--topic1" PartitionLocation { Partition: 0 Host: "::1" HostId: 4 ErrorCode: OK } ErrorCode: OK } TopicResult { Topic: "rt3.dc1--topic2" PartitionLocation { Partition: 1 Host: "::1" HostId: 4 ErrorCode: OK } PartitionLocation { Partition: 2 ErrorCode: INITIALIZING ErrorReason: "Tablet for that partition is not running" } ErrorCode: OK } } } ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/client/server/ut/unittest >> TMessageBusServerPersQueueGetTopicMetadataMetaRequestTest::SuccessfullyReplies [GOOD] Test command err: Assert failed: Check response: { Status: 130 ErrorReason: "Timeout while waiting for response, may be just slow, Marker# PQ16" ErrorCode: ERROR } Assert failed: Check response: { Status: 128 ErrorReason: "topic \'rt3.dc1--topic1\' is not created, Marker# PQ94" ErrorCode: UNKNOWN_TOPIC } >> TStoragePoolsQuotasTest::QuoteNonexistentPool-IsExternalSubdomain-false >> TSchemeShardSubDomainTest::SimultaneousCreateForceDropTwice >> TSchemeShardSubDomainTest::Create >> TSchemeShardSubDomainTest::SchemeQuotas >> TSchemeShardSubDomainTest::CreateSubDomainWithoutSomeTablets >> TSchemeShardSubDomainTest::CreateSubDomainWithoutTabletsThenForceDrop [GOOD] >> TFlatTest::CrossRW [GOOD] >> TFlatTest::GetTabletCounters >> TSchemeShardSubDomainTest::Redefine >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-ModifyUser-25 >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-CreateUser-1 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/client/server/ut/unittest >> TMessageBusServerPersQueueGetPartitionOffsetsMetaRequestTest::FailsOnEmptyTopicName [GOOD] Test command err: Assert failed: Check response: { Status: 128 ErrorReason: "no path \'/Root/PQ/\', Marker# PQ17" ErrorCode: UNKNOWN_TOPIC } Assert failed: Check response: { Status: 128 ErrorReason: "no path \'Root/PQ\', Marker# PQ150" ErrorCode: UNKNOWN_TOPIC } Assert failed: Check response: { Status: 128 ErrorReason: "topic \'rt3.dc1--topic1\' has no balancer, Marker# PQ193" ErrorCode: UNKNOWN_TOPIC } Assert failed: Check response: { Status: 128 ErrorReason: "TopicRequest must have Topic field." ErrorCode: BAD_REQUEST } ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::CreateSubDomainWithoutTabletsThenMkDir [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:116:2058] recipient: [1:111:2141] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:116:2058] recipient: [1:111:2141] Leader for TabletID 72057594046678944 is [1:128:2152] sender: [1:132:2058] recipient: [1:111:2141] 2025-05-29T15:23:11.143443Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7488: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-05-29T15:23:11.143472Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7516: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:23:11.143476Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7402: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-05-29T15:23:11.143480Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7418: OperationsProcessing config: using default configuration 2025-05-29T15:23:11.143488Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-05-29T15:23:11.143491Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-05-29T15:23:11.143498Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7548: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:23:11.143508Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:39: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-05-29T15:23:11.143596Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7619: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-05-29T15:23:11.143651Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-05-29T15:23:11.153160Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7309: Cannot subscribe to console configs 2025-05-29T15:23:11.153181Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:23:11.155188Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-05-29T15:23:11.155306Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-05-29T15:23:11.155353Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-05-29T15:23:11.156925Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-05-29T15:23:11.157054Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:445: Clear TempDirsState with owners number: 0 2025-05-29T15:23:11.157135Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1348: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-05-29T15:23:11.157186Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:32: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-05-29T15:23:11.157526Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:157: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:23:11.157564Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:84: [RootDataErasureManager] Stop 2025-05-29T15:23:11.157753Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:23:11.157760Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:23:11.157774Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-05-29T15:23:11.157779Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-29T15:23:11.157783Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:202: TTxServerlessStorageBilling.Complete 2025-05-29T15:23:11.157806Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6671: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-05-29T15:23:11.158712Z node 1 :HIVE INFO: tablet_helpers.cpp:1130: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:128:2152] sender: [1:242:2058] recipient: [1:15:2062] 2025-05-29T15:23:11.174475Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:372: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-05-29T15:23:11.174549Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:23:11.174626Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-05-29T15:23:11.174682Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:130: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-05-29T15:23:11.174694Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:23:11.175534Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:451: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-05-29T15:23:11.175564Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-05-29T15:23:11.175615Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:23:11.175626Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:313: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-05-29T15:23:11.175633Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:367: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-05-29T15:23:11.175638Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 2 -> 3 2025-05-29T15:23:11.176090Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:23:11.176100Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-05-29T15:23:11.176106Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 3 -> 128 2025-05-29T15:23:11.176479Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:23:11.176489Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:23:11.176495Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:23:11.176503Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1650: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-05-29T15:23:11.177151Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1719: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-05-29T15:23:11.177556Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:652: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-05-29T15:23:11.177599Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1751: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-05-29T15:23:11.177802Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:676: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-05-29T15:23:11.177826Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:680: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 124 RawX2: 4294969445 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-05-29T15:23:11.177833Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:23:11.177888Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 128 -> 240 2025-05-29T15:23:11.177896Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:23:11.177927Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-05-29T15:23:11.177939Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:402: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-05-29T15:23:11.178307Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:23:11.178313Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-29T15:23:11.178348Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29 ... pId# 101:0 ProgressState 2025-05-29T15:23:11.191877Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:906: Part operation is done id#101:0 progress is 1/1 2025-05-29T15:23:11.191881Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2025-05-29T15:23:11.191886Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:906: Part operation is done id#101:0 progress is 1/1 2025-05-29T15:23:11.191889Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2025-05-29T15:23:11.191894Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1606: TOperation IsReadyToNotify, TxId: 101, ready parts: 1/1, is published: false 2025-05-29T15:23:11.191899Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2025-05-29T15:23:11.191904Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:973: Operation and all the parts is done, operation id: 101:0 2025-05-29T15:23:11.191908Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5174: RemoveTx for txid 101:0 2025-05-29T15:23:11.191919Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 2 2025-05-29T15:23:11.191925Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:982: Publication still in progress, tx: 101, publications: 2, subscribers: 0 2025-05-29T15:23:11.191930Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:989: Publication details: tx: 101, [OwnerId: 72057594046678944, LocalPathId: 2], 5 2025-05-29T15:23:11.191933Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:989: Publication details: tx: 101, [OwnerId: 72057594046678944, LocalPathId: 3], 3 2025-05-29T15:23:11.192043Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:5834: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 5 PathOwnerId: 72057594046678944, cookie: 101 2025-05-29T15:23:11.192055Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 5 PathOwnerId: 72057594046678944, cookie: 101 2025-05-29T15:23:11.192060Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 101 2025-05-29T15:23:11.192067Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 5 2025-05-29T15:23:11.192072Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2025-05-29T15:23:11.192168Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:5834: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 3 PathOwnerId: 72057594046678944, cookie: 101 2025-05-29T15:23:11.192180Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 3 PathOwnerId: 72057594046678944, cookie: 101 2025-05-29T15:23:11.192184Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 101 2025-05-29T15:23:11.192188Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 3 2025-05-29T15:23:11.192192Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 1 2025-05-29T15:23:11.192202Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 101, subscribers: 0 2025-05-29T15:23:11.193040Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2025-05-29T15:23:11.193113Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 TestModificationResult got TxId: 101, wait until txId: 101 TestWaitNotification wait txId: 101 2025-05-29T15:23:11.193167Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:210: tests -- TTxNotificationSubscriber for txId 101: send EvNotifyTxCompletion 2025-05-29T15:23:11.193175Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:256: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 101 2025-05-29T15:23:11.193242Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 101, at schemeshard: 72057594046678944 2025-05-29T15:23:11.193261Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:227: tests -- TTxNotificationSubscriber for txId 101: got EvNotifyTxCompletionResult 2025-05-29T15:23:11.193266Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:236: tests -- TTxNotificationSubscriber for txId 101: satisfy waiter [1:338:2328] TestWaitNotification: OK eventTxId 101 2025-05-29T15:23:11.193346Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-05-29T15:23:11.193375Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot" took 41us result status StatusSuccess 2025-05-29T15:23:11.193526Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot" PathDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 3 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: "USER_0" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 100 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 1 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/MyRoot" } } } PathId: 1 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-05-29T15:23:11.193638Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-05-29T15:23:11.193680Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/USER_0" took 45us result status StatusSuccess 2025-05-29T15:23:11.193737Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/USER_0" PathDescription { Self { Name: "USER_0" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 100 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 3 SubDomainVersion: 1 SecurityStateVersion: 0 } } Children { Name: "MyDir" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 101 CreateStep: 5000003 ParentPathId: 2 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 2 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 1 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 2 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { } } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-05-29T15:23:11.193787Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0/MyDir" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-05-29T15:23:11.193805Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/USER_0/MyDir" took 20us result status StatusSuccess 2025-05-29T15:23:11.193841Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/USER_0/MyDir" PathDescription { Self { Name: "MyDir" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 101 CreateStep: 5000003 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 2 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 2 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 2 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/client/server/ut/unittest >> TMessageBusServerPersQueueGetPartitionStatusMetaRequestTest::HandlesPipeDisconnection_DisconnectionComesSecond [GOOD] Test command err: Assert failed: Check response: { Status: 130 ErrorReason: "Timeout while waiting for response, may be just slow, Marker# PQ16" ErrorCode: ERROR } 2025-05-29T15:23:10.728312Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:3098: [PQ: 72057594037928037] Handle TEvInterconnect::TEvNodeInfo 2025-05-29T15:23:10.728836Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:3130: [PQ: 72057594037928037] Transactions request. From tx_00000000000000000000, To tx_18446744073709551615 2025-05-29T15:23:10.728879Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:751: [PQ: 72057594037928037] doesn't have tx info 2025-05-29T15:23:10.728884Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:763: [PQ: 72057594037928037] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2025-05-29T15:23:10.728887Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:975: [PQ: 72057594037928037] no config, start with empty partitions and default config 2025-05-29T15:23:10.728890Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:4889: [PQ: 72057594037928037] Txs.size=0, PlannedTxs.size=0 2025-05-29T15:23:10.728895Z node 2 :PERSQUEUE NOTICE: pq_impl.cpp:1099: [PQ: 72057594037928037] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-05-29T15:23:10.728901Z node 2 :PERSQUEUE INFO: pq_impl.cpp:800: [PQ: 72057594037928037] doesn't have tx writes info 2025-05-29T15:23:10.728976Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:2882: [PQ: 72057594037928037] server connected, pipe [2:263:2254], now have 1 active actors on pipe 2025-05-29T15:23:10.728985Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:1460: [PQ: 72057594037928037] Handle TEvPersQueue::TEvUpdateConfig 2025-05-29T15:23:10.730522Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:1646: [PQ: 72057594037928037] Config update version 1(current 0) received from actor [2:100:2134] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "rt3.dc1--topic1" Version: 1 Partitions { PartitionId: 0 } AllPartitions { PartitionId: 0 } 2025-05-29T15:23:10.730895Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:590: [PQ: 72057594037928037] Apply new config CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "rt3.dc1--topic1" Version: 1 Partitions { PartitionId: 0 } AllPartitions { PartitionId: 0 } 2025-05-29T15:23:10.730913Z node 2 :PERSQUEUE NOTICE: pq_impl.cpp:1099: [PQ: 72057594037928037] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-05-29T15:23:10.731018Z node 2 :PERSQUEUE INFO: pq_impl.cpp:1487: [PQ: 72057594037928037] Config applied version 1 actor [2:100:2134] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "rt3.dc1--topic1" Version: 1 Partitions { PartitionId: 0 } AllPartitions { PartitionId: 0 } 2025-05-29T15:23:10.731034Z node 2 :PERSQUEUE DEBUG: partition_init.cpp:75: [rt3.dc1--topic1:0:Initializer] Start initializing step TInitConfigStep 2025-05-29T15:23:10.731071Z node 2 :PERSQUEUE DEBUG: partition_init.cpp:75: [rt3.dc1--topic1:0:Initializer] Start initializing step TInitInternalFieldsStep 2025-05-29T15:23:10.731110Z node 2 :PERSQUEUE INFO: partition_init.cpp:880: [PQ: 72057594037928037, Partition: 0, State: StateInit] bootstrapping 0 [2:271:2260] 2025-05-29T15:23:10.731460Z node 2 :PERSQUEUE DEBUG: partition_init.cpp:55: [rt3.dc1--topic1:0:Initializer] Initializing completed. 2025-05-29T15:23:10.731465Z node 2 :PERSQUEUE INFO: partition.cpp:560: [PQ: 72057594037928037, Partition: 0, State: StateInit] init complete for topic 'rt3.dc1--topic1' partition 0 generation 2 [2:271:2260] 2025-05-29T15:23:10.731470Z node 2 :PERSQUEUE DEBUG: partition.cpp:574: [PQ: 72057594037928037, Partition: 0, State: StateInit] SYNC INIT topic rt3.dc1--topic1 partitition 0 so 0 endOffset 0 Head Offset 0 PartNo 0 PackedSize 0 count 0 nextOffset 0 batches 0 2025-05-29T15:23:10.731474Z node 2 :PERSQUEUE DEBUG: partition.cpp:3850: [PQ: 72057594037928037, Partition: 0, State: StateIdle] Process pending events. Count 0 2025-05-29T15:23:10.731556Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:2882: [PQ: 72057594037928037] server connected, pipe [2:274:2262], now have 1 active actors on pipe 2025-05-29T15:23:10.741521Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:3098: [PQ: 72057594037928139] Handle TEvInterconnect::TEvNodeInfo 2025-05-29T15:23:10.742497Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:3130: [PQ: 72057594037928139] Transactions request. From tx_00000000000000000000, To tx_18446744073709551615 2025-05-29T15:23:10.742559Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:751: [PQ: 72057594037928139] doesn't have tx info 2025-05-29T15:23:10.742566Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:763: [PQ: 72057594037928139] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2025-05-29T15:23:10.742570Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:975: [PQ: 72057594037928139] no config, start with empty partitions and default config 2025-05-29T15:23:10.742576Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:4889: [PQ: 72057594037928139] Txs.size=0, PlannedTxs.size=0 2025-05-29T15:23:10.742584Z node 2 :PERSQUEUE NOTICE: pq_impl.cpp:1099: [PQ: 72057594037928139] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-05-29T15:23:10.742592Z node 2 :PERSQUEUE INFO: pq_impl.cpp:800: [PQ: 72057594037928139] doesn't have tx writes info 2025-05-29T15:23:10.742710Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:2882: [PQ: 72057594037928139] server connected, pipe [2:408:2362], now have 1 active actors on pipe 2025-05-29T15:23:10.742728Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:1460: [PQ: 72057594037928139] Handle TEvPersQueue::TEvUpdateConfig 2025-05-29T15:23:10.742799Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:1646: [PQ: 72057594037928139] Config update version 2(current 0) received from actor [2:100:2134] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 2 TopicName: "rt3.dc1--topic2" Version: 2 Partitions { PartitionId: 2 } AllPartitions { PartitionId: 2 } 2025-05-29T15:23:10.743283Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:590: [PQ: 72057594037928139] Apply new config CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 2 TopicName: "rt3.dc1--topic2" Version: 2 Partitions { PartitionId: 2 } AllPartitions { PartitionId: 2 } 2025-05-29T15:23:10.743307Z node 2 :PERSQUEUE NOTICE: pq_impl.cpp:1099: [PQ: 72057594037928139] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-05-29T15:23:10.743412Z node 2 :PERSQUEUE INFO: pq_impl.cpp:1487: [PQ: 72057594037928139] Config applied version 2 actor [2:100:2134] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 2 TopicName: "rt3.dc1--topic2" Version: 2 Partitions { PartitionId: 2 } AllPartitions { PartitionId: 2 } 2025-05-29T15:23:10.743436Z node 2 :PERSQUEUE DEBUG: partition_init.cpp:75: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitConfigStep 2025-05-29T15:23:10.743490Z node 2 :PERSQUEUE DEBUG: partition_init.cpp:75: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitInternalFieldsStep 2025-05-29T15:23:10.743520Z node 2 :PERSQUEUE INFO: partition_init.cpp:880: [PQ: 72057594037928139, Partition: 2, State: StateInit] bootstrapping 2 [2:416:2368] 2025-05-29T15:23:10.744028Z node 2 :PERSQUEUE DEBUG: partition_init.cpp:55: [rt3.dc1--topic2:2:Initializer] Initializing completed. 2025-05-29T15:23:10.744040Z node 2 :PERSQUEUE INFO: partition.cpp:560: [PQ: 72057594037928139, Partition: 2, State: StateInit] init complete for topic 'rt3.dc1--topic2' partition 2 generation 2 [2:416:2368] 2025-05-29T15:23:10.744047Z node 2 :PERSQUEUE DEBUG: partition.cpp:574: [PQ: 72057594037928139, Partition: 2, State: StateInit] SYNC INIT topic rt3.dc1--topic2 partitition 2 so 0 endOffset 0 Head Offset 0 PartNo 0 PackedSize 0 count 0 nextOffset 0 batches 0 2025-05-29T15:23:10.744055Z node 2 :PERSQUEUE DEBUG: partition.cpp:3850: [PQ: 72057594037928139, Partition: 2, State: StateIdle] Process pending events. Count 0 2025-05-29T15:23:10.744204Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:2882: [PQ: 72057594037928139] server connected, pipe [2:419:2370], now have 1 active actors on pipe 2025-05-29T15:23:10.744431Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:2882: [PQ: 72057594037928037] server connected, pipe [2:425:2373], now have 1 active actors on pipe 2025-05-29T15:23:10.744460Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:2882: [PQ: 72057594037928139] server connected, pipe [2:427:2374], now have 1 active actors on pipe 2025-05-29T15:23:10.744494Z node 2 :PERSQUEUE DEBUG: partition.cpp:858: [PQ: 72057594037928037, Partition: 0, State: StateIdle] Topic PartitionStatus PartitionSize: 0 UsedReserveSize: 0 ReserveSize: 0 PartitionConfig{ MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } 2025-05-29T15:23:10.744545Z node 2 :PERSQUEUE DEBUG: partition.cpp:858: [PQ: 72057594037928139, Partition: 2, State: StateIdle] Topic PartitionStatus PartitionSize: 0 UsedReserveSize: 0 ReserveSize: 0 PartitionConfig{ MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } 2025-05-29T15:23:10.744585Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:2907: [PQ: 72057594037928037] server disconnected, pipe [2:425:2373] destroyed 2025-05-29T15:23:10.744630Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:2907: [PQ: 72057594037928139] server disconnected, pipe [2:427:2374] destroyed 2025-05-29T15:23:11.018892Z node 3 :PERSQUEUE DEBUG: pq_impl.cpp:3098: [PQ: 72057594037928037] Handle TEvInterconnect::TEvNodeInfo 2025-05-29T15:23:11.019846Z node 3 :PERSQUEUE DEBUG: pq_impl.cpp:3130: [PQ: 72057594037928037] Transactions request. From tx_00000000000000000000, To tx_18446744073709551615 2025-05-29T15:23:11.019903Z node 3 :PERSQUEUE DEBUG: pq_impl.cpp:751: [PQ: 72057594037928037] doesn't have tx info 2025-05-29T15:23:11.019908Z node 3 :PERSQUEUE DEBUG: pq_impl.cpp:763: [PQ: 72057594037928037] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2025-05-29T15:23:11.019911Z node 3 :PERSQUEUE DEBUG: pq_impl.cpp:975: [PQ: 72057594037928037] no config, start with empty partitions and default config 2025-05-29T15:23:11.019915Z node 3 :PERSQUEUE DEBUG: pq_impl.cpp:4889: [PQ: 72057594037928037] Txs.size=0, PlannedTxs.size=0 2025-05-29T15:23:11.019920Z node 3 :PERSQUEUE NOTICE: pq_impl.cpp:1099: [PQ: 72057594037928037] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-05-29T15:23:11.019927Z node 3 :PERSQUEUE INFO: pq_impl.cpp:800: [PQ: 72057594037928037] doesn't have tx writes info 2025-05-29T15:23:11.020032Z node 3 :PERSQUEUE DEBUG: pq_impl.cpp:2882: [PQ: 72057594037928037] server connected, pipe [3:260:2251], now have 1 active actors on pipe 2025-05-29T15:23:11.020052Z node 3 :PERSQUEUE DEBUG: pq_impl.cpp:1460: [PQ: 72057594037928037] Handle TEvPersQueue::TEvUpdateConfig 2025-05-29T15:23:11.020088Z node 3 :PERSQUEUE DEBUG: pq_impl.cpp:1646: [PQ: 72057594037928037] Config update version 3(current 0) received from actor [3:100:2134] txId 12345 config: CacheS ... 2, State: StateInit] bootstrapping 2 [3:534:2456] 2025-05-29T15:23:11.045587Z node 3 :PERSQUEUE DEBUG: partition_init.cpp:55: [rt3.dc1--topic2:2:Initializer] Initializing completed. 2025-05-29T15:23:11.045593Z node 3 :PERSQUEUE INFO: partition.cpp:560: [PQ: 72057594037928139, Partition: 2, State: StateInit] init complete for topic 'rt3.dc1--topic2' partition 2 generation 2 [3:534:2456] 2025-05-29T15:23:11.045598Z node 3 :PERSQUEUE DEBUG: partition.cpp:574: [PQ: 72057594037928139, Partition: 2, State: StateInit] SYNC INIT topic rt3.dc1--topic2 partitition 2 so 0 endOffset 0 Head Offset 0 PartNo 0 PackedSize 0 count 0 nextOffset 0 batches 0 2025-05-29T15:23:11.045601Z node 3 :PERSQUEUE DEBUG: partition.cpp:3850: [PQ: 72057594037928139, Partition: 2, State: StateIdle] Process pending events. Count 0 2025-05-29T15:23:11.045699Z node 3 :PERSQUEUE DEBUG: pq_impl.cpp:2882: [PQ: 72057594037928139] server connected, pipe [3:537:2458], now have 1 active actors on pipe 2025-05-29T15:23:11.045851Z node 3 :PERSQUEUE DEBUG: pq_impl.cpp:2882: [PQ: 72057594037928037] server connected, pipe [3:543:2461], now have 1 active actors on pipe 2025-05-29T15:23:11.045865Z node 3 :PERSQUEUE DEBUG: pq_impl.cpp:2882: [PQ: 72057594037928138] server connected, pipe [3:544:2462], now have 1 active actors on pipe 2025-05-29T15:23:11.045908Z node 3 :PERSQUEUE DEBUG: partition.cpp:858: [PQ: 72057594037928037, Partition: 0, State: StateIdle] Topic PartitionStatus PartitionSize: 0 UsedReserveSize: 0 ReserveSize: 0 PartitionConfig{ MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } 2025-05-29T15:23:11.045933Z node 3 :PERSQUEUE DEBUG: partition.cpp:858: [PQ: 72057594037928138, Partition: 1, State: StateIdle] Topic PartitionStatus PartitionSize: 0 UsedReserveSize: 0 ReserveSize: 0 PartitionConfig{ MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } 2025-05-29T15:23:11.045939Z node 3 :PERSQUEUE DEBUG: pq_impl.cpp:2882: [PQ: 72057594037928139] server connected, pipe [3:545:2462], now have 1 active actors on pipe 2025-05-29T15:23:11.045962Z node 3 :PERSQUEUE DEBUG: partition.cpp:858: [PQ: 72057594037928139, Partition: 2, State: StateIdle] Topic PartitionStatus PartitionSize: 0 UsedReserveSize: 0 ReserveSize: 0 PartitionConfig{ MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } 2025-05-29T15:23:11.056281Z node 3 :PERSQUEUE DEBUG: pq_impl.cpp:2882: [PQ: 72057594037928139] server connected, pipe [3:553:2469], now have 1 active actors on pipe 2025-05-29T15:23:11.061692Z node 3 :PERSQUEUE DEBUG: pq_impl.cpp:3098: [PQ: 72057594037928139] Handle TEvInterconnect::TEvNodeInfo 2025-05-29T15:23:11.062387Z node 3 :PERSQUEUE DEBUG: pq_impl.cpp:3130: [PQ: 72057594037928139] Transactions request. From tx_00000000000000000000, To tx_18446744073709551615 2025-05-29T15:23:11.062478Z node 3 :PERSQUEUE DEBUG: pq_impl.cpp:751: [PQ: 72057594037928139] doesn't have tx info 2025-05-29T15:23:11.062490Z node 3 :PERSQUEUE DEBUG: pq_impl.cpp:763: [PQ: 72057594037928139] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2025-05-29T15:23:11.062519Z node 3 :PERSQUEUE DEBUG: pq_impl.cpp:4889: [PQ: 72057594037928139] Txs.size=0, PlannedTxs.size=0 2025-05-29T15:23:11.062582Z node 3 :PERSQUEUE NOTICE: pq_impl.cpp:1099: [PQ: 72057594037928139] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-05-29T15:23:11.062586Z node 3 :PERSQUEUE INFO: pq_impl.cpp:800: [PQ: 72057594037928139] doesn't have tx writes info 2025-05-29T15:23:11.062602Z node 3 :PERSQUEUE DEBUG: partition_init.cpp:75: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitConfigStep 2025-05-29T15:23:11.062640Z node 3 :PERSQUEUE DEBUG: partition_init.cpp:75: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitInternalFieldsStep 2025-05-29T15:23:11.062657Z node 3 :PERSQUEUE INFO: partition_init.cpp:880: [PQ: 72057594037928139, Partition: 2, State: StateInit] bootstrapping 2 [3:610:2514] 2025-05-29T15:23:11.062964Z node 3 :PERSQUEUE DEBUG: partition_init.cpp:75: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitDiskStatusStep 2025-05-29T15:23:11.063134Z node 3 :PERSQUEUE DEBUG: partition_init.cpp:75: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitMetaStep 2025-05-29T15:23:11.063160Z node 3 :PERSQUEUE DEBUG: partition_init.cpp:75: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitInfoRangeStep 2025-05-29T15:23:11.063187Z node 3 :PERSQUEUE DEBUG: partition_init.cpp:75: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitDataRangeStep 2025-05-29T15:23:11.063203Z node 3 :PERSQUEUE DEBUG: partition_init.cpp:75: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitDataStep 2025-05-29T15:23:11.063206Z node 3 :PERSQUEUE DEBUG: partition_init.cpp:75: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitEndWriteTimestampStep 2025-05-29T15:23:11.063209Z node 3 :PERSQUEUE INFO: partition_init.cpp:774: [rt3.dc1--topic2:2:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp skipped because already initialized. 2025-05-29T15:23:11.063212Z node 3 :PERSQUEUE DEBUG: partition_init.cpp:55: [rt3.dc1--topic2:2:Initializer] Initializing completed. 2025-05-29T15:23:11.063216Z node 3 :PERSQUEUE INFO: partition.cpp:560: [PQ: 72057594037928139, Partition: 2, State: StateInit] init complete for topic 'rt3.dc1--topic2' partition 2 generation 3 [3:610:2514] 2025-05-29T15:23:11.063220Z node 3 :PERSQUEUE DEBUG: partition.cpp:574: [PQ: 72057594037928139, Partition: 2, State: StateInit] SYNC INIT topic rt3.dc1--topic2 partitition 2 so 0 endOffset 0 Head Offset 0 PartNo 0 PackedSize 0 count 0 nextOffset 0 batches 0 2025-05-29T15:23:11.063224Z node 3 :PERSQUEUE DEBUG: partition.cpp:3850: [PQ: 72057594037928139, Partition: 2, State: StateIdle] Process pending events. Count 0 2025-05-29T15:23:11.063364Z node 3 :PERSQUEUE DEBUG: pq_impl.cpp:2907: [PQ: 72057594037928138] server disconnected, pipe [3:544:2462] destroyed 2025-05-29T15:23:11.063372Z node 3 :PERSQUEUE DEBUG: pq_impl.cpp:2907: [PQ: 72057594037928037] server disconnected, pipe [3:543:2461] destroyed RESPONSE Status: 1 ErrorCode: OK MetaResponse { CmdGetPartitionStatusResult { TopicResult { Topic: "rt3.dc1--topic1" PartitionResult { Partition: 0 Status: STATUS_OK LastInitDurationSeconds: 0 CreationTimestamp: 0 GapCount: 0 GapSize: 0 AvgWriteSpeedPerSec: 0 AvgWriteSpeedPerMin: 0 AvgWriteSpeedPerHour: 0 AvgWriteSpeedPerDay: 0 AvgReadSpeedPerSec: 0 AvgReadSpeedPerMin: 0 AvgReadSpeedPerHour: 0 AvgReadSpeedPerDay: 0 ReadBytesQuota: 0 WriteBytesQuota: 50000000 PartitionSize: 0 StartOffset: 0 EndOffset: 0 LastWriteTimestampMs: 38 WriteLagMs: 0 AvgQuotaSpeedPerSec: 0 AvgQuotaSpeedPerMin: 0 AvgQuotaSpeedPerHour: 0 AvgQuotaSpeedPerDay: 0 SourceIdCount: 0 SourceIdRetentionPeriodSec: 0 UsedReserveSize: 0 AggregatedCounters { Values: 38 Values: 0 Values: 1 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 50000000 Values: 0 Values: 9223372036854775807 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 1 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 } Generation: 2 Cookie: 1 ScaleStatus: NORMAL } ErrorCode: OK } TopicResult { Topic: "rt3.dc1--topic2" PartitionResult { Partition: 1 Status: STATUS_OK LastInitDurationSeconds: 0 CreationTimestamp: 0 GapCount: 0 GapSize: 0 AvgWriteSpeedPerSec: 0 AvgWriteSpeedPerMin: 0 AvgWriteSpeedPerHour: 0 AvgWriteSpeedPerDay: 0 AvgReadSpeedPerSec: 0 AvgReadSpeedPerMin: 0 AvgReadSpeedPerHour: 0 AvgReadSpeedPerDay: 0 ReadBytesQuota: 0 WriteBytesQuota: 50000000 PartitionSize: 0 StartOffset: 0 EndOffset: 0 LastWriteTimestampMs: 78 WriteLagMs: 0 AvgQuotaSpeedPerSec: 0 AvgQuotaSpeedPerMin: 0 AvgQuotaSpeedPerHour: 0 AvgQuotaSpeedPerDay: 0 SourceIdCount: 0 SourceIdRetentionPeriodSec: 0 UsedReserveSize: 0 AggregatedCounters { Values: 78 Values: 0 Values: 1 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 50000000 Values: 0 Values: 9223372036854775807 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 1 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 } Generation: 2 Cookie: 1 ScaleStatus: NORMAL } PartitionResult { Partition: 2 Status: STATUS_OK LastInitDurationSeconds: 0 CreationTimestamp: 0 GapCount: 0 GapSize: 0 AvgWriteSpeedPerSec: 0 AvgWriteSpeedPerMin: 0 AvgWriteSpeedPerHour: 0 AvgWriteSpeedPerDay: 0 AvgReadSpeedPerSec: 0 AvgReadSpeedPerMin: 0 AvgReadSpeedPerHour: 0 AvgReadSpeedPerDay: 0 ReadBytesQuota: 0 WriteBytesQuota: 50000000 PartitionSize: 0 StartOffset: 0 EndOffset: 0 LastWriteTimestampMs: 92 WriteLagMs: 0 AvgQuotaSpeedPerSec: 0 AvgQuotaSpeedPerMin: 0 AvgQuotaSpeedPerHour: 0 AvgQuotaSpeedPerDay: 0 SourceIdCount: 0 SourceIdRetentionPeriodSec: 0 UsedReserveSize: 0 AggregatedCounters { Values: 92 Values: 0 Values: 1 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 50000000 Values: 0 Values: 9223372036854775807 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 1 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 } Generation: 2 Cookie: 1 ScaleStatus: NORMAL } ErrorCode: OK } } } >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-ModifyUser-31 >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-CreateUser-2 >> TSchemeShardSubDomainTest::SetSchemeLimits >> TStoragePoolsQuotasTest::QuoteNonexistentPool-IsExternalSubdomain-false [GOOD] >> TSchemeShardSubDomainTest::Create [GOOD] >> TSchemeShardSubDomainTest::CreateAlterNbsChannels >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-ModifyUser-25 >> TSchemeShardSubDomainTest::SimultaneousCreateForceDropTwice [GOOD] >> YdbIndexTable::OnlineBuild >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-ModifyUser-43 >> TSchemeShardSubDomainTest::CreateSubDomainWithoutSomeTablets [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::CreateSubDomainWithoutTabletsThenForceDrop [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:116:2058] recipient: [1:111:2141] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:116:2058] recipient: [1:111:2141] Leader for TabletID 72057594046678944 is [1:128:2152] sender: [1:132:2058] recipient: [1:111:2141] 2025-05-29T15:23:11.444794Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7488: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-05-29T15:23:11.444819Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7516: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:23:11.444826Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7402: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-05-29T15:23:11.444831Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7418: OperationsProcessing config: using default configuration 2025-05-29T15:23:11.444844Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-05-29T15:23:11.444849Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-05-29T15:23:11.444859Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7548: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:23:11.444870Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:39: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-05-29T15:23:11.444957Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7619: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-05-29T15:23:11.445006Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-05-29T15:23:11.456813Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7309: Cannot subscribe to console configs 2025-05-29T15:23:11.456835Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:23:11.459274Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-05-29T15:23:11.459373Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-05-29T15:23:11.459426Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-05-29T15:23:11.461267Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-05-29T15:23:11.461435Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:445: Clear TempDirsState with owners number: 0 2025-05-29T15:23:11.461574Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1348: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-05-29T15:23:11.461647Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:32: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-05-29T15:23:11.462206Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:157: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:23:11.462256Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:84: [RootDataErasureManager] Stop 2025-05-29T15:23:11.462522Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:23:11.462533Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:23:11.462556Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-05-29T15:23:11.462565Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-29T15:23:11.462571Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:202: TTxServerlessStorageBilling.Complete 2025-05-29T15:23:11.462607Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6671: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-05-29T15:23:11.464076Z node 1 :HIVE INFO: tablet_helpers.cpp:1130: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:128:2152] sender: [1:242:2058] recipient: [1:15:2062] 2025-05-29T15:23:11.484475Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:372: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-05-29T15:23:11.484536Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:23:11.484599Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-05-29T15:23:11.484635Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:130: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-05-29T15:23:11.484643Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:23:11.485277Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:451: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-05-29T15:23:11.485299Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-05-29T15:23:11.485332Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:23:11.485339Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:313: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-05-29T15:23:11.485342Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:367: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-05-29T15:23:11.485346Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 2 -> 3 2025-05-29T15:23:11.485723Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:23:11.485733Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-05-29T15:23:11.485736Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 3 -> 128 2025-05-29T15:23:11.486104Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:23:11.486117Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:23:11.486124Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:23:11.486131Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1650: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-05-29T15:23:11.486585Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1719: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-05-29T15:23:11.487132Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:652: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-05-29T15:23:11.487182Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1751: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-05-29T15:23:11.487385Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:676: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-05-29T15:23:11.487415Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:680: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 124 RawX2: 4294969445 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-05-29T15:23:11.487423Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:23:11.487480Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 128 -> 240 2025-05-29T15:23:11.487487Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:23:11.487522Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-05-29T15:23:11.487535Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:402: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-05-29T15:23:11.488023Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:23:11.488032Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-29T15:23:11.488066Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29 ... e 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:23:11.503475Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:209:2210], at schemeshard: 72057594046678944, txId: 101, path id: 1 2025-05-29T15:23:11.503481Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:209:2210], at schemeshard: 72057594046678944, txId: 101, path id: 2 FAKE_COORDINATOR: Erasing txId 101 2025-05-29T15:23:11.503553Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 101:0, at schemeshard: 72057594046678944 2025-05-29T15:23:11.503562Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:416: [72057594046678944] TDeleteParts opId# 101:0 ProgressState 2025-05-29T15:23:11.503573Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:906: Part operation is done id#101:0 progress is 1/1 2025-05-29T15:23:11.503577Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2025-05-29T15:23:11.503582Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:906: Part operation is done id#101:0 progress is 1/1 2025-05-29T15:23:11.503586Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2025-05-29T15:23:11.503592Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1606: TOperation IsReadyToNotify, TxId: 101, ready parts: 1/1, is published: false 2025-05-29T15:23:11.503596Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2025-05-29T15:23:11.503601Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:973: Operation and all the parts is done, operation id: 101:0 2025-05-29T15:23:11.503605Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5174: RemoveTx for txid 101:0 2025-05-29T15:23:11.503618Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2025-05-29T15:23:11.503624Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:982: Publication still in progress, tx: 101, publications: 2, subscribers: 0 2025-05-29T15:23:11.503629Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:989: Publication details: tx: 101, [OwnerId: 72057594046678944, LocalPathId: 1], 7 2025-05-29T15:23:11.503632Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:989: Publication details: tx: 101, [OwnerId: 72057594046678944, LocalPathId: 2], 18446744073709551615 2025-05-29T15:23:11.503735Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:5834: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 7 PathOwnerId: 72057594046678944, cookie: 101 2025-05-29T15:23:11.503747Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 7 PathOwnerId: 72057594046678944, cookie: 101 2025-05-29T15:23:11.503750Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 101 2025-05-29T15:23:11.503754Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 7 2025-05-29T15:23:11.503756Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-05-29T15:23:11.503829Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:5834: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 101 2025-05-29T15:23:11.503836Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 101 2025-05-29T15:23:11.503839Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 101 2025-05-29T15:23:11.503842Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 18446744073709551615 2025-05-29T15:23:11.503844Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2025-05-29T15:23:11.503851Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 101, subscribers: 0 2025-05-29T15:23:11.503893Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:205: TTxCleanDroppedSubDomains Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-05-29T15:23:11.503898Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:224: TTxCleanDroppedPaths: PersistRemoveSubDomain for PathId# [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2025-05-29T15:23:11.503912Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 2025-05-29T15:23:11.504104Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:123: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-05-29T15:23:11.504110Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:137: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2025-05-29T15:23:11.504119Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-05-29T15:23:11.504377Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2025-05-29T15:23:11.504614Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2025-05-29T15:23:11.504628Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:245: TTxCleanDroppedSubDomains Complete, done PersistRemoveSubDomain for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 2025-05-29T15:23:11.504636Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:160: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestModificationResult got TxId: 101, wait until txId: 101 TestWaitNotification wait txId: 101 2025-05-29T15:23:11.504668Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:210: tests -- TTxNotificationSubscriber for txId 101: send EvNotifyTxCompletion 2025-05-29T15:23:11.504672Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:256: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 101 2025-05-29T15:23:11.504726Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 101, at schemeshard: 72057594046678944 2025-05-29T15:23:11.504742Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:227: tests -- TTxNotificationSubscriber for txId 101: got EvNotifyTxCompletionResult 2025-05-29T15:23:11.504746Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:236: tests -- TTxNotificationSubscriber for txId 101: satisfy waiter [1:342:2332] TestWaitNotification: OK eventTxId 101 2025-05-29T15:23:11.504807Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-05-29T15:23:11.504830Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/USER_0" took 33us result status StatusPathDoesNotExist 2025-05-29T15:23:11.504869Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/USER_0\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1]), source_location: ydb/core/tx/schemeshard/schemeshard_path_describer.cpp:1157" Path: "/MyRoot/USER_0" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: false } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 2025-05-29T15:23:11.504919Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-05-29T15:23:11.504935Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot" took 16us result status StatusSuccess 2025-05-29T15:23:11.504987Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot" PathDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 7 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 7 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 5 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/MyRoot" } } } PathId: 1 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> TSchemeShardSubDomainTest::Redefine [GOOD] >> YdbIndexTable::MultiShardTableUniqAndNonUniqIndex >> TFlatTest::GetTabletCounters [GOOD] >> TSchemeShardSubDomainTest::SetSchemeLimits [GOOD] >> YdbIndexTable::MultiShardTableOneIndex ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TStoragePoolsQuotasTest::QuoteNonexistentPool-IsExternalSubdomain-false [GOOD] >> TSchemeShardSubDomainTest::CreateAlterNbsChannels [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:116:2058] recipient: [1:111:2141] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:116:2058] recipient: [1:111:2141] Leader for TabletID 72057594046678944 is [1:128:2152] sender: [1:132:2058] recipient: [1:111:2141] 2025-05-29T15:23:11.664823Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7488: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-05-29T15:23:11.664848Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7516: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:23:11.664854Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7402: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-05-29T15:23:11.664860Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7418: OperationsProcessing config: using default configuration 2025-05-29T15:23:11.664871Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-05-29T15:23:11.664875Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-05-29T15:23:11.664885Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7548: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:23:11.664900Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:39: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-05-29T15:23:11.665022Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7619: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-05-29T15:23:11.665084Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-05-29T15:23:11.676299Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7309: Cannot subscribe to console configs 2025-05-29T15:23:11.676316Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:23:11.677971Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-05-29T15:23:11.678052Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-05-29T15:23:11.678092Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-05-29T15:23:11.679188Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-05-29T15:23:11.679308Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:445: Clear TempDirsState with owners number: 0 2025-05-29T15:23:11.679378Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1348: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-05-29T15:23:11.679427Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:32: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-05-29T15:23:11.679732Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:157: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:23:11.679757Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:84: [RootDataErasureManager] Stop 2025-05-29T15:23:11.679941Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:23:11.679948Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:23:11.679964Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-05-29T15:23:11.679970Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-29T15:23:11.679974Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:202: TTxServerlessStorageBilling.Complete 2025-05-29T15:23:11.679997Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6671: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-05-29T15:23:11.680870Z node 1 :HIVE INFO: tablet_helpers.cpp:1130: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:128:2152] sender: [1:242:2058] recipient: [1:15:2062] 2025-05-29T15:23:11.693664Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:372: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-05-29T15:23:11.693747Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:23:11.693821Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-05-29T15:23:11.693870Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:130: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-05-29T15:23:11.693883Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:23:11.694571Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:451: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-05-29T15:23:11.694601Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-05-29T15:23:11.694641Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:23:11.694652Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:313: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-05-29T15:23:11.694657Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:367: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-05-29T15:23:11.694663Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 2 -> 3 2025-05-29T15:23:11.695092Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:23:11.695104Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-05-29T15:23:11.695110Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 3 -> 128 2025-05-29T15:23:11.695482Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:23:11.695493Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:23:11.695500Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:23:11.695507Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1650: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-05-29T15:23:11.696189Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1719: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-05-29T15:23:11.696567Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:652: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-05-29T15:23:11.696600Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1751: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-05-29T15:23:11.696776Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:676: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-05-29T15:23:11.696800Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:680: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 124 RawX2: 4294969445 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-05-29T15:23:11.696808Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:23:11.696867Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 128 -> 240 2025-05-29T15:23:11.696874Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:23:11.696905Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-05-29T15:23:11.696916Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:402: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-05-29T15:23:11.697284Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:23:11.697293Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-29T15:23:11.697345Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29 ... ons { TxId: 101 Coordinator: 72057594046316545 AckTo { RawX1: 124 RawX2: 4294969445 } } Step: 5000002 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-05-29T15:23:11.704581Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 101:0, at tablet# 72057594046678944 2025-05-29T15:23:11.704632Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 101:0 128 -> 240 2025-05-29T15:23:11.704638Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 101:0, at tablet# 72057594046678944 2025-05-29T15:23:11.704661Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-05-29T15:23:11.704669Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2025-05-29T15:23:11.704677Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:402: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 101 2025-05-29T15:23:11.705057Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:23:11.705065Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 101, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-29T15:23:11.705094Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 101, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2025-05-29T15:23:11.705108Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:23:11.705113Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:209:2210], at schemeshard: 72057594046678944, txId: 101, path id: 1 2025-05-29T15:23:11.705120Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:209:2210], at schemeshard: 72057594046678944, txId: 101, path id: 2 2025-05-29T15:23:11.705168Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 101:0, at schemeshard: 72057594046678944 2025-05-29T15:23:11.705175Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:482: [72057594046678944] TDone opId# 101:0 ProgressState 2025-05-29T15:23:11.705187Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:906: Part operation is done id#101:0 progress is 1/1 2025-05-29T15:23:11.705191Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2025-05-29T15:23:11.705196Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:906: Part operation is done id#101:0 progress is 1/1 2025-05-29T15:23:11.705200Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2025-05-29T15:23:11.705204Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1606: TOperation IsReadyToNotify, TxId: 101, ready parts: 1/1, is published: false 2025-05-29T15:23:11.705209Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2025-05-29T15:23:11.705214Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:973: Operation and all the parts is done, operation id: 101:0 2025-05-29T15:23:11.705218Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5174: RemoveTx for txid 101:0 2025-05-29T15:23:11.705228Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2025-05-29T15:23:11.705233Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:982: Publication still in progress, tx: 101, publications: 2, subscribers: 0 2025-05-29T15:23:11.705238Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:989: Publication details: tx: 101, [OwnerId: 72057594046678944, LocalPathId: 1], 5 2025-05-29T15:23:11.705241Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:989: Publication details: tx: 101, [OwnerId: 72057594046678944, LocalPathId: 2], 3 2025-05-29T15:23:11.705352Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:5834: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 5 PathOwnerId: 72057594046678944, cookie: 101 2025-05-29T15:23:11.705363Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 5 PathOwnerId: 72057594046678944, cookie: 101 2025-05-29T15:23:11.705368Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 101 2025-05-29T15:23:11.705372Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 5 2025-05-29T15:23:11.705376Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-05-29T15:23:11.705461Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:5834: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046678944, cookie: 101 2025-05-29T15:23:11.705471Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046678944, cookie: 101 2025-05-29T15:23:11.705475Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 101 2025-05-29T15:23:11.705479Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 3 2025-05-29T15:23:11.705483Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2025-05-29T15:23:11.705491Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 101, subscribers: 0 2025-05-29T15:23:11.706058Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2025-05-29T15:23:11.706081Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 TestModificationResult got TxId: 101, wait until txId: 101 TestModificationResults wait txId: 102 2025-05-29T15:23:11.706845Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:372: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpAlterSubDomain SubDomain { PlanResolution: 50 Coordinators: 1 Mediators: 1 Name: "SomeDatabase" TimeCastBucketsPerMediator: 2 DatabaseQuotas { storage_quotas { unit_kind: "nonexistent_storage_kind" data_size_hard_quota: 1 } } } } TxId: 102 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-05-29T15:23:11.706883Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: /MyRoot/SomeDatabase, opId: 102:0, at schemeshard: 72057594046678944 2025-05-29T15:23:11.706924Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:130: IgniteOperation, opId: 102:1, propose status:StatusInvalidParameter, reason: Malformed subdomain request: cannot set storage quotas of the following kinds: nonexistent_storage_kind, because no storage pool in the subdomain /MyRoot/SomeDatabase has the specified kinds. Existing storage kinds are: pool-kind-1, pool-kind-2, at schemeshard: 72057594046678944 2025-05-29T15:23:11.707343Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:451: TTxOperationPropose Complete, txId: 102, response: Status: StatusInvalidParameter Reason: "Malformed subdomain request: cannot set storage quotas of the following kinds: nonexistent_storage_kind, because no storage pool in the subdomain /MyRoot/SomeDatabase has the specified kinds. Existing storage kinds are: pool-kind-1, pool-kind-2" TxId: 102 SchemeshardId: 72057594046678944 PathId: 2, at schemeshard: 72057594046678944 2025-05-29T15:23:11.707374Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 102, database: /MyRoot, subject: , status: StatusInvalidParameter, reason: Malformed subdomain request: cannot set storage quotas of the following kinds: nonexistent_storage_kind, because no storage pool in the subdomain /MyRoot/SomeDatabase has the specified kinds. Existing storage kinds are: pool-kind-1, pool-kind-2, operation: ALTER DATABASE, path: /MyRoot/SomeDatabase TestModificationResult got TxId: 102, wait until txId: 102 TestWaitNotification wait txId: 101 2025-05-29T15:23:11.707428Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:210: tests -- TTxNotificationSubscriber for txId 101: send EvNotifyTxCompletion 2025-05-29T15:23:11.707443Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:256: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 101 TestWaitNotification wait txId: 102 2025-05-29T15:23:11.707458Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:210: tests -- TTxNotificationSubscriber for txId 102: send EvNotifyTxCompletion 2025-05-29T15:23:11.707461Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:256: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 102 2025-05-29T15:23:11.707522Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 101, at schemeshard: 72057594046678944 2025-05-29T15:23:11.707542Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:227: tests -- TTxNotificationSubscriber for txId 101: got EvNotifyTxCompletionResult 2025-05-29T15:23:11.707548Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:236: tests -- TTxNotificationSubscriber for txId 101: satisfy waiter [1:314:2304] 2025-05-29T15:23:11.707574Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 102, at schemeshard: 72057594046678944 2025-05-29T15:23:11.707594Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:227: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2025-05-29T15:23:11.707598Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:236: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [1:314:2304] TestWaitNotification: OK eventTxId 101 TestWaitNotification: OK eventTxId 102 >> YdbIndexTable::MultiShardTableOneUniqIndex ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::CreateSubDomainWithoutSomeTablets [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:116:2058] recipient: [1:111:2141] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:116:2058] recipient: [1:111:2141] Leader for TabletID 72057594046678944 is [1:128:2152] sender: [1:132:2058] recipient: [1:111:2141] 2025-05-29T15:23:11.779217Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7488: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-05-29T15:23:11.779239Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7516: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:23:11.779244Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7402: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-05-29T15:23:11.779247Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7418: OperationsProcessing config: using default configuration 2025-05-29T15:23:11.779262Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-05-29T15:23:11.779265Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-05-29T15:23:11.779271Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7548: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:23:11.779282Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:39: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-05-29T15:23:11.779374Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7619: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-05-29T15:23:11.779427Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-05-29T15:23:11.788044Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7309: Cannot subscribe to console configs 2025-05-29T15:23:11.788063Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:23:11.790007Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-05-29T15:23:11.790100Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-05-29T15:23:11.790140Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-05-29T15:23:11.791526Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-05-29T15:23:11.791713Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:445: Clear TempDirsState with owners number: 0 2025-05-29T15:23:11.791817Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1348: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-05-29T15:23:11.791890Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:32: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-05-29T15:23:11.792250Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:157: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:23:11.792278Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:84: [RootDataErasureManager] Stop 2025-05-29T15:23:11.792478Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:23:11.792485Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:23:11.792500Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-05-29T15:23:11.792505Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-29T15:23:11.792509Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:202: TTxServerlessStorageBilling.Complete 2025-05-29T15:23:11.792532Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6671: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-05-29T15:23:11.793642Z node 1 :HIVE INFO: tablet_helpers.cpp:1130: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:128:2152] sender: [1:242:2058] recipient: [1:15:2062] 2025-05-29T15:23:11.806271Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:372: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-05-29T15:23:11.806333Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:23:11.806394Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-05-29T15:23:11.806431Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:130: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-05-29T15:23:11.806440Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:23:11.807213Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:451: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-05-29T15:23:11.807241Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-05-29T15:23:11.807292Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:23:11.807302Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:313: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-05-29T15:23:11.807309Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:367: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-05-29T15:23:11.807314Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 2 -> 3 2025-05-29T15:23:11.807707Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:23:11.807717Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-05-29T15:23:11.807722Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 3 -> 128 2025-05-29T15:23:11.808153Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:23:11.808174Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:23:11.808180Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:23:11.808188Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1650: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-05-29T15:23:11.808797Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1719: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-05-29T15:23:11.809248Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:652: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-05-29T15:23:11.809289Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1751: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-05-29T15:23:11.809474Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:676: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-05-29T15:23:11.809499Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:680: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 124 RawX2: 4294969445 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-05-29T15:23:11.809507Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:23:11.809575Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 128 -> 240 2025-05-29T15:23:11.809581Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:23:11.809614Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-05-29T15:23:11.809625Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:402: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-05-29T15:23:11.810154Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:23:11.810169Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-29T15:23:11.810232Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29 ... 46678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-05-29T15:23:11.810693Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2025-05-29T15:23:11.810699Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2025-05-29T15:23:11.810702Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-05-29T15:23:11.810714Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 1, subscribers: 0 2025-05-29T15:23:11.811482Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1 2025-05-29T15:23:11.811607Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 1, at schemeshard: 72057594046678944 TestModificationResults wait txId: 100 2025-05-29T15:23:11.811774Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:434: actor# [1:272:2262] Bootstrap 2025-05-29T15:23:11.814056Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:453: actor# [1:272:2262] Become StateWork (SchemeCache [1:277:2267]) 2025-05-29T15:23:11.814794Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:372: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpCreateSubDomain SubDomain { PlanResolution: 50 Coordinators: 1 Name: "USER_1" TimeCastBucketsPerMediator: 2 StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 100 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-05-29T15:23:11.814878Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_create_subdomain.cpp:92: TCreateSubDomain Propose, path: /MyRoot/USER_1, opId: 100:0, at schemeshard: 72057594046678944 2025-05-29T15:23:11.814899Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:130: IgniteOperation, opId: 100:1, propose status:StatusInvalidParameter, reason: Malformed subdomain request: cant create subdomain with coordinators, but no mediators, at schemeshard: 72057594046678944 2025-05-29T15:23:11.815097Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:213: actor# [1:272:2262] HANDLE TEvClientConnected success connect from tablet# 72057594046447617 2025-05-29T15:23:11.816415Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:451: TTxOperationPropose Complete, txId: 100, response: Status: StatusInvalidParameter Reason: "Malformed subdomain request: cant create subdomain with coordinators, but no mediators" TxId: 100 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2025-05-29T15:23:11.816461Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 100, database: /MyRoot, subject: , status: StatusInvalidParameter, reason: Malformed subdomain request: cant create subdomain with coordinators, but no mediators, operation: CREATE DATABASE, path: /MyRoot/USER_1 2025-05-29T15:23:11.816600Z node 1 :TX_PROXY DEBUG: client.cpp:89: Handle TEvAllocateResult ACCEPTED RangeBegin# 281474976715656 RangeEnd# 281474976720656 txAllocator# 72057594046447617 TestModificationResult got TxId: 100, wait until txId: 100 TestModificationResults wait txId: 101 2025-05-29T15:23:11.817262Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:372: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpCreateSubDomain SubDomain { PlanResolution: 50 Mediators: 1 Name: "USER_2" TimeCastBucketsPerMediator: 2 StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 101 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-05-29T15:23:11.817329Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_create_subdomain.cpp:92: TCreateSubDomain Propose, path: /MyRoot/USER_2, opId: 101:0, at schemeshard: 72057594046678944 2025-05-29T15:23:11.817347Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:130: IgniteOperation, opId: 101:1, propose status:StatusInvalidParameter, reason: Malformed subdomain request: cant create subdomain with mediators, but no coordinators, at schemeshard: 72057594046678944 2025-05-29T15:23:11.817886Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:451: TTxOperationPropose Complete, txId: 101, response: Status: StatusInvalidParameter Reason: "Malformed subdomain request: cant create subdomain with mediators, but no coordinators" TxId: 101 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2025-05-29T15:23:11.817917Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 101, database: /MyRoot, subject: , status: StatusInvalidParameter, reason: Malformed subdomain request: cant create subdomain with mediators, but no coordinators, operation: CREATE DATABASE, path: /MyRoot/USER_2 TestModificationResult got TxId: 101, wait until txId: 101 TestWaitNotification wait txId: 100 2025-05-29T15:23:11.817978Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:210: tests -- TTxNotificationSubscriber for txId 100: send EvNotifyTxCompletion 2025-05-29T15:23:11.817999Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:256: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 100 TestWaitNotification wait txId: 101 2025-05-29T15:23:11.818016Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:210: tests -- TTxNotificationSubscriber for txId 101: send EvNotifyTxCompletion 2025-05-29T15:23:11.818020Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:256: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 101 2025-05-29T15:23:11.818100Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 100, at schemeshard: 72057594046678944 2025-05-29T15:23:11.818123Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:227: tests -- TTxNotificationSubscriber for txId 100: got EvNotifyTxCompletionResult 2025-05-29T15:23:11.818129Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:236: tests -- TTxNotificationSubscriber for txId 100: satisfy waiter [1:291:2281] 2025-05-29T15:23:11.818161Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 101, at schemeshard: 72057594046678944 2025-05-29T15:23:11.818183Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:227: tests -- TTxNotificationSubscriber for txId 101: got EvNotifyTxCompletionResult 2025-05-29T15:23:11.818187Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:236: tests -- TTxNotificationSubscriber for txId 101: satisfy waiter [1:291:2281] TestWaitNotification: OK eventTxId 100 TestWaitNotification: OK eventTxId 101 2025-05-29T15:23:11.818257Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_1" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-05-29T15:23:11.818290Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/USER_1" took 39us result status StatusPathDoesNotExist 2025-05-29T15:23:11.818337Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/USER_1\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1]), source_location: ydb/core/tx/schemeshard/schemeshard_path_describer.cpp:1157" Path: "/MyRoot/USER_1" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: false } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 2025-05-29T15:23:11.818420Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_2" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-05-29T15:23:11.818436Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/USER_2" took 18us result status StatusPathDoesNotExist 2025-05-29T15:23:11.818454Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/USER_2\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1]), source_location: ydb/core/tx/schemeshard/schemeshard_path_describer.cpp:1157" Path: "/MyRoot/USER_2" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: false } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 2025-05-29T15:23:11.818513Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-05-29T15:23:11.818541Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot" took 29us result status StatusSuccess 2025-05-29T15:23:11.818629Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot" PathDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/MyRoot" } } } PathId: 1 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> YdbIndexTable::MultiShardTableOneIndexIndexOverlapDataColumn ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::SetSchemeLimits [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:116:2058] recipient: [1:111:2141] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:116:2058] recipient: [1:111:2141] Leader for TabletID 72057594046678944 is [1:128:2152] sender: [1:132:2058] recipient: [1:111:2141] 2025-05-29T15:23:11.872393Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7488: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-05-29T15:23:11.872416Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7516: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:23:11.872420Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7402: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-05-29T15:23:11.872424Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7418: OperationsProcessing config: using default configuration 2025-05-29T15:23:11.872433Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-05-29T15:23:11.872435Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-05-29T15:23:11.872442Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7548: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:23:11.872452Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:39: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-05-29T15:23:11.872539Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7619: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-05-29T15:23:11.872590Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-05-29T15:23:11.887160Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7309: Cannot subscribe to console configs 2025-05-29T15:23:11.887199Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:23:11.890340Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-05-29T15:23:11.890452Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-05-29T15:23:11.890498Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-05-29T15:23:11.892614Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-05-29T15:23:11.892816Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:445: Clear TempDirsState with owners number: 0 2025-05-29T15:23:11.892933Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1348: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-05-29T15:23:11.892995Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:32: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-05-29T15:23:11.893635Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:157: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:23:11.893686Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:84: [RootDataErasureManager] Stop 2025-05-29T15:23:11.893983Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:23:11.893999Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:23:11.894024Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-05-29T15:23:11.894034Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-29T15:23:11.894041Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:202: TTxServerlessStorageBilling.Complete 2025-05-29T15:23:11.894077Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6671: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-05-29T15:23:11.895686Z node 1 :HIVE INFO: tablet_helpers.cpp:1130: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:128:2152] sender: [1:242:2058] recipient: [1:15:2062] 2025-05-29T15:23:11.919497Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:372: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-05-29T15:23:11.919573Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:23:11.919655Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-05-29T15:23:11.919705Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:130: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-05-29T15:23:11.919717Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:23:11.920485Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:451: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-05-29T15:23:11.920514Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-05-29T15:23:11.920560Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:23:11.920570Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:313: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-05-29T15:23:11.920575Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:367: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-05-29T15:23:11.920580Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 2 -> 3 2025-05-29T15:23:11.921029Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:23:11.921042Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-05-29T15:23:11.921048Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 3 -> 128 2025-05-29T15:23:11.921548Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:23:11.921562Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:23:11.921569Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:23:11.921576Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1650: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-05-29T15:23:11.922290Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1719: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-05-29T15:23:11.922757Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:652: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-05-29T15:23:11.922817Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1751: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-05-29T15:23:11.923018Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:676: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-05-29T15:23:11.923047Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:680: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 124 RawX2: 4294969445 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-05-29T15:23:11.923055Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:23:11.923112Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 128 -> 240 2025-05-29T15:23:11.923120Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:23:11.923151Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-05-29T15:23:11.923171Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:402: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-05-29T15:23:11.923885Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:23:11.923896Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-29T15:23:11.923941Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29 ... lish path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 2025-05-29T15:23:12.004845Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:402: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 100 2025-05-29T15:23:12.005259Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:23:12.005270Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 100, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-29T15:23:12.005311Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 100, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2025-05-29T15:23:12.005331Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:23:12.005337Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:339:2314], at schemeshard: 72057594046678944, txId: 100, path id: 1 2025-05-29T15:23:12.005343Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:339:2314], at schemeshard: 72057594046678944, txId: 100, path id: 2 2025-05-29T15:23:12.005420Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 100:0, at schemeshard: 72057594046678944 2025-05-29T15:23:12.005430Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:482: [72057594046678944] TDone opId# 100:0 ProgressState 2025-05-29T15:23:12.005444Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:906: Part operation is done id#100:0 progress is 1/1 2025-05-29T15:23:12.005448Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 100 ready parts: 1/1 2025-05-29T15:23:12.005454Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:906: Part operation is done id#100:0 progress is 1/1 2025-05-29T15:23:12.005457Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 100 ready parts: 1/1 2025-05-29T15:23:12.005463Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1606: TOperation IsReadyToNotify, TxId: 100, ready parts: 1/1, is published: false 2025-05-29T15:23:12.005468Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 100 ready parts: 1/1 2025-05-29T15:23:12.005474Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:973: Operation and all the parts is done, operation id: 100:0 2025-05-29T15:23:12.005478Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5174: RemoveTx for txid 100:0 2025-05-29T15:23:12.005507Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 5 2025-05-29T15:23:12.005514Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:982: Publication still in progress, tx: 100, publications: 2, subscribers: 0 2025-05-29T15:23:12.005518Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:989: Publication details: tx: 100, [OwnerId: 72057594046678944, LocalPathId: 1], 5 2025-05-29T15:23:12.005522Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:989: Publication details: tx: 100, [OwnerId: 72057594046678944, LocalPathId: 2], 3 2025-05-29T15:23:12.005686Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:5834: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 3 LocalPathId: 1 Version: 5 PathOwnerId: 72057594046678944, cookie: 100 2025-05-29T15:23:12.005700Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 3 LocalPathId: 1 Version: 5 PathOwnerId: 72057594046678944, cookie: 100 2025-05-29T15:23:12.005706Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 100 2025-05-29T15:23:12.005711Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 100, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 5 2025-05-29T15:23:12.005716Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-05-29T15:23:12.005823Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:5834: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 3 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046678944, cookie: 100 2025-05-29T15:23:12.005835Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 3 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046678944, cookie: 100 2025-05-29T15:23:12.005842Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 100 2025-05-29T15:23:12.005847Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 100, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 3 2025-05-29T15:23:12.005851Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 2025-05-29T15:23:12.005861Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 100, subscribers: 0 2025-05-29T15:23:12.006508Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 100 2025-05-29T15:23:12.006720Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 100 TestModificationResult got TxId: 100, wait until txId: 100 TestWaitNotification wait txId: 100 2025-05-29T15:23:12.006804Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:210: tests -- TTxNotificationSubscriber for txId 100: send EvNotifyTxCompletion 2025-05-29T15:23:12.006821Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:256: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 100 2025-05-29T15:23:12.006883Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 100, at schemeshard: 72057594046678944 2025-05-29T15:23:12.006904Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:227: tests -- TTxNotificationSubscriber for txId 100: got EvNotifyTxCompletionResult 2025-05-29T15:23:12.006911Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:236: tests -- TTxNotificationSubscriber for txId 100: satisfy waiter [1:486:2433] TestWaitNotification: OK eventTxId 100 2025-05-29T15:23:12.007003Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-05-29T15:23:12.007038Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/USER_0" took 46us result status StatusSuccess 2025-05-29T15:23:12.007132Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/USER_0" PathDescription { Self { Name: "USER_0" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 100 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 1 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72075186233409546 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409547 } DomainKey { SchemeShard: 72057594046678944 PathId: 2 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 0 PathsLimit: 3 ShardsInside: 2 ShardsLimit: 3 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 2 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 300 DatabaseQuotas { data_stream_shards_quota: 3 } SecurityState { } } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-05-29T15:23:12.007196Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-05-29T15:23:12.007213Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot" took 19us result status StatusSuccess 2025-05-29T15:23:12.007268Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot" PathDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 3 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: "USER_0" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 100 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 1 PathsLimit: 3 ShardsInside: 0 ShardsLimit: 3 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 300 SecurityState { Audience: "/MyRoot" } } } PathId: 1 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::SimultaneousCreateForceDropTwice [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:116:2058] recipient: [1:111:2141] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:116:2058] recipient: [1:111:2141] Leader for TabletID 72057594046678944 is [1:128:2152] sender: [1:132:2058] recipient: [1:111:2141] 2025-05-29T15:23:11.672855Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7488: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-05-29T15:23:11.672876Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7516: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:23:11.672882Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7402: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-05-29T15:23:11.672887Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7418: OperationsProcessing config: using default configuration 2025-05-29T15:23:11.672897Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-05-29T15:23:11.672901Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-05-29T15:23:11.672910Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7548: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:23:11.672924Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:39: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-05-29T15:23:11.673035Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7619: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-05-29T15:23:11.673103Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-05-29T15:23:11.681684Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7309: Cannot subscribe to console configs 2025-05-29T15:23:11.681698Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:23:11.683426Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-05-29T15:23:11.683505Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-05-29T15:23:11.683540Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-05-29T15:23:11.685130Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-05-29T15:23:11.685275Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:445: Clear TempDirsState with owners number: 0 2025-05-29T15:23:11.685375Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1348: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-05-29T15:23:11.685437Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:32: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-05-29T15:23:11.685878Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:157: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:23:11.685911Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:84: [RootDataErasureManager] Stop 2025-05-29T15:23:11.686143Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:23:11.686152Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:23:11.686175Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-05-29T15:23:11.686183Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-29T15:23:11.686189Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:202: TTxServerlessStorageBilling.Complete 2025-05-29T15:23:11.686219Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6671: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-05-29T15:23:11.687491Z node 1 :HIVE INFO: tablet_helpers.cpp:1130: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:128:2152] sender: [1:242:2058] recipient: [1:15:2062] 2025-05-29T15:23:11.708091Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:372: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-05-29T15:23:11.708150Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:23:11.708207Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-05-29T15:23:11.708251Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:130: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-05-29T15:23:11.708263Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:23:11.708889Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:451: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-05-29T15:23:11.708907Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-05-29T15:23:11.708939Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:23:11.708948Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:313: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-05-29T15:23:11.708954Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:367: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-05-29T15:23:11.708959Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 2 -> 3 2025-05-29T15:23:11.709311Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:23:11.709319Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-05-29T15:23:11.709325Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 3 -> 128 2025-05-29T15:23:11.709623Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:23:11.709632Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:23:11.709638Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:23:11.709645Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1650: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-05-29T15:23:11.710312Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1719: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-05-29T15:23:11.710645Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:652: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-05-29T15:23:11.710677Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1751: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-05-29T15:23:11.710847Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:676: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-05-29T15:23:11.710869Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:680: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 124 RawX2: 4294969445 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-05-29T15:23:11.710876Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:23:11.710926Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 128 -> 240 2025-05-29T15:23:11.710933Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:23:11.710957Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-05-29T15:23:11.710967Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:402: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-05-29T15:23:11.711349Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:23:11.711357Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-29T15:23:11.711386Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29 ... 155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 102, subscribers: 1 2025-05-29T15:23:11.722799Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:212: TTxAckPublishToSchemeBoard Notify send TEvNotifyTxCompletionResult, at schemeshard: 72057594046678944, to actorId: [1:279:2269] 2025-05-29T15:23:11.723170Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:46: Free shard 72057594046678944:5 hive 72057594037968897 at ss 72057594046678944 2025-05-29T15:23:11.723181Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:46: Free shard 72057594046678944:1 hive 72057594037968897 at ss 72057594046678944 2025-05-29T15:23:11.723186Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:46: Free shard 72057594046678944:3 hive 72057594037968897 at ss 72057594046678944 2025-05-29T15:23:11.723190Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:46: Free shard 72057594046678944:6 hive 72057594037968897 at ss 72057594046678944 2025-05-29T15:23:11.723195Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:46: Free shard 72057594046678944:2 hive 72057594037968897 at ss 72057594046678944 2025-05-29T15:23:11.723199Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:46: Free shard 72057594046678944:4 hive 72057594037968897 at ss 72057594046678944 2025-05-29T15:23:11.724222Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2025-05-29T15:23:11.724677Z node 1 :HIVE INFO: tablet_helpers.cpp:1356: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 5 TxId_Deprecated: 5 2025-05-29T15:23:11.740188Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5938: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 5 ShardOwnerId: 72057594046678944 ShardLocalIdx: 5, at schemeshard: 72057594046678944 2025-05-29T15:23:11.740330Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 7 2025-05-29T15:23:11.740442Z node 1 :HIVE INFO: tablet_helpers.cpp:1356: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 1 TxId_Deprecated: 1 2025-05-29T15:23:11.740509Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5938: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 1 ShardOwnerId: 72057594046678944 ShardLocalIdx: 1, at schemeshard: 72057594046678944 2025-05-29T15:23:11.740543Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 6 2025-05-29T15:23:11.740619Z node 1 :HIVE INFO: tablet_helpers.cpp:1356: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 3 TxId_Deprecated: 3 2025-05-29T15:23:11.740683Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5938: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 3 ShardOwnerId: 72057594046678944 ShardLocalIdx: 3, at schemeshard: 72057594046678944 2025-05-29T15:23:11.740708Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 5 2025-05-29T15:23:11.740789Z node 1 :HIVE INFO: tablet_helpers.cpp:1356: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 6 TxId_Deprecated: 6 2025-05-29T15:23:11.740830Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5938: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 6 ShardOwnerId: 72057594046678944 ShardLocalIdx: 6, at schemeshard: 72057594046678944 2025-05-29T15:23:11.740855Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 2025-05-29T15:23:11.740892Z node 1 :HIVE INFO: tablet_helpers.cpp:1356: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 2 TxId_Deprecated: 2 2025-05-29T15:23:11.740946Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5938: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 2 ShardOwnerId: 72057594046678944 ShardLocalIdx: 2, at schemeshard: 72057594046678944 2025-05-29T15:23:11.740967Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2025-05-29T15:23:11.741045Z node 1 :HIVE INFO: tablet_helpers.cpp:1356: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 4 TxId_Deprecated: 4 2025-05-29T15:23:11.741084Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5938: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 4 ShardOwnerId: 72057594046678944 ShardLocalIdx: 4, at schemeshard: 72057594046678944 2025-05-29T15:23:11.741106Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2025-05-29T15:23:11.741141Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:205: TTxCleanDroppedSubDomains Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-05-29T15:23:11.741149Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:224: TTxCleanDroppedPaths: PersistRemoveSubDomain for PathId# [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2025-05-29T15:23:11.741176Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 2025-05-29T15:23:11.741621Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2025-05-29T15:23:11.741668Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:123: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-05-29T15:23:11.741677Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:137: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2025-05-29T15:23:11.741693Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-05-29T15:23:11.741724Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:227: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2025-05-29T15:23:11.741729Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:236: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [1:280:2270] 2025-05-29T15:23:11.741896Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:174: Deleted shardIdx 72057594046678944:5 2025-05-29T15:23:11.742674Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:174: Deleted shardIdx 72057594046678944:1 2025-05-29T15:23:11.742696Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:174: Deleted shardIdx 72057594046678944:3 2025-05-29T15:23:11.742710Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:174: Deleted shardIdx 72057594046678944:6 2025-05-29T15:23:11.742719Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:174: Deleted shardIdx 72057594046678944:2 2025-05-29T15:23:11.742730Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:174: Deleted shardIdx 72057594046678944:4 2025-05-29T15:23:11.742796Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:245: TTxCleanDroppedSubDomains Complete, done PersistRemoveSubDomain for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 2025-05-29T15:23:11.742810Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:160: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestWaitNotification: OK eventTxId 100 TestWaitNotification: OK eventTxId 101 TestWaitNotification: OK eventTxId 102 2025-05-29T15:23:11.742928Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-05-29T15:23:11.742974Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/USER_0" took 55us result status StatusPathDoesNotExist 2025-05-29T15:23:11.743030Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/USER_0\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1]), source_location: ydb/core/tx/schemeshard/schemeshard_path_describer.cpp:1157" Path: "/MyRoot/USER_0" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: false } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 2025-05-29T15:23:11.743097Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-05-29T15:23:11.743127Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot" took 32us result status StatusSuccess 2025-05-29T15:23:11.743220Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot" PathDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 7 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 7 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 5 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/MyRoot" } } } PathId: 1 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::Redefine [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:116:2058] recipient: [1:111:2141] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:116:2058] recipient: [1:111:2141] Leader for TabletID 72057594046678944 is [1:128:2152] sender: [1:132:2058] recipient: [1:111:2141] 2025-05-29T15:23:11.827444Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7488: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-05-29T15:23:11.827473Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7516: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:23:11.827479Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7402: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-05-29T15:23:11.827485Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7418: OperationsProcessing config: using default configuration 2025-05-29T15:23:11.827496Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-05-29T15:23:11.827501Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-05-29T15:23:11.827510Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7548: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:23:11.827526Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:39: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-05-29T15:23:11.827648Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7619: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-05-29T15:23:11.827718Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-05-29T15:23:11.839607Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7309: Cannot subscribe to console configs 2025-05-29T15:23:11.839630Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:23:11.842282Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-05-29T15:23:11.842378Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-05-29T15:23:11.842422Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-05-29T15:23:11.843693Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-05-29T15:23:11.843803Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:445: Clear TempDirsState with owners number: 0 2025-05-29T15:23:11.843897Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1348: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-05-29T15:23:11.843972Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:32: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-05-29T15:23:11.844369Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:157: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:23:11.844405Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:84: [RootDataErasureManager] Stop 2025-05-29T15:23:11.844634Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:23:11.844642Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:23:11.844660Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-05-29T15:23:11.844666Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-29T15:23:11.844670Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:202: TTxServerlessStorageBilling.Complete 2025-05-29T15:23:11.844697Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6671: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-05-29T15:23:11.845706Z node 1 :HIVE INFO: tablet_helpers.cpp:1130: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:128:2152] sender: [1:242:2058] recipient: [1:15:2062] 2025-05-29T15:23:11.858614Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:372: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-05-29T15:23:11.858680Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:23:11.858767Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-05-29T15:23:11.858808Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:130: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-05-29T15:23:11.858818Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:23:11.859513Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:451: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-05-29T15:23:11.859538Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-05-29T15:23:11.859578Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:23:11.859586Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:313: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-05-29T15:23:11.859589Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:367: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-05-29T15:23:11.859593Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 2 -> 3 2025-05-29T15:23:11.859946Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:23:11.859955Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-05-29T15:23:11.859958Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 3 -> 128 2025-05-29T15:23:11.860256Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:23:11.860266Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:23:11.860270Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:23:11.860275Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1650: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-05-29T15:23:11.860728Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1719: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-05-29T15:23:11.861031Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:652: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-05-29T15:23:11.861062Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1751: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-05-29T15:23:11.861200Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:676: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-05-29T15:23:11.861217Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:680: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 124 RawX2: 4294969445 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-05-29T15:23:11.861222Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:23:11.861258Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 128 -> 240 2025-05-29T15:23:11.861263Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:23:11.861285Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-05-29T15:23:11.861293Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:402: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-05-29T15:23:11.861668Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:23:11.861677Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-29T15:23:11.861718Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29 ... heme_board.cpp:147: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 104 2025-05-29T15:23:11.926921Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 104, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 7 2025-05-29T15:23:11.926926Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-05-29T15:23:11.927037Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:5834: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 104 2025-05-29T15:23:11.927047Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 104 2025-05-29T15:23:11.927050Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 104 2025-05-29T15:23:11.927053Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 104, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 18446744073709551615 2025-05-29T15:23:11.927057Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 5 2025-05-29T15:23:11.927065Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 104, subscribers: 0 2025-05-29T15:23:11.927648Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:46: Free shard 72057594046678944:1 hive 72057594037968897 at ss 72057594046678944 2025-05-29T15:23:11.927660Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:46: Free shard 72057594046678944:3 hive 72057594037968897 at ss 72057594046678944 2025-05-29T15:23:11.927664Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:46: Free shard 72057594046678944:2 hive 72057594037968897 at ss 72057594046678944 2025-05-29T15:23:11.927927Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 104 2025-05-29T15:23:11.928039Z node 1 :HIVE INFO: tablet_helpers.cpp:1356: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 1 TxId_Deprecated: 1 TabletID: 72075186233409546 2025-05-29T15:23:11.928078Z node 1 :HIVE INFO: tablet_helpers.cpp:1356: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 3 TxId_Deprecated: 3 TabletID: 72075186233409548 2025-05-29T15:23:11.928279Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5938: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 1 ShardOwnerId: 72057594046678944 ShardLocalIdx: 1, at schemeshard: 72057594046678944 2025-05-29T15:23:11.928324Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 Forgetting tablet 72075186233409546 2025-05-29T15:23:11.928447Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5938: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 3 ShardOwnerId: 72057594046678944 ShardLocalIdx: 3, at schemeshard: 72057594046678944 2025-05-29T15:23:11.928463Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2025-05-29T15:23:11.928512Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 104 Forgetting tablet 72075186233409548 2025-05-29T15:23:11.928540Z node 1 :HIVE INFO: tablet_helpers.cpp:1356: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 2 TxId_Deprecated: 2 TabletID: 72075186233409547 Forgetting tablet 72075186233409547 2025-05-29T15:23:11.928650Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5938: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 2 ShardOwnerId: 72057594046678944 ShardLocalIdx: 2, at schemeshard: 72057594046678944 2025-05-29T15:23:11.928668Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2025-05-29T15:23:11.928769Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:205: TTxCleanDroppedSubDomains Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-05-29T15:23:11.928777Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:224: TTxCleanDroppedPaths: PersistRemoveSubDomain for PathId# [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2025-05-29T15:23:11.928799Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 2025-05-29T15:23:11.928840Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:123: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-05-29T15:23:11.928845Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:137: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2025-05-29T15:23:11.928851Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-05-29T15:23:11.929153Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:174: Deleted shardIdx 72057594046678944:1 2025-05-29T15:23:11.929167Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:180: Close pipe to deleted shardIdx 72057594046678944:1 tabletId 72075186233409546 2025-05-29T15:23:11.929243Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:174: Deleted shardIdx 72057594046678944:3 2025-05-29T15:23:11.929249Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:180: Close pipe to deleted shardIdx 72057594046678944:3 tabletId 72075186233409548 2025-05-29T15:23:11.929591Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:174: Deleted shardIdx 72057594046678944:2 2025-05-29T15:23:11.929602Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:180: Close pipe to deleted shardIdx 72057594046678944:2 tabletId 72075186233409547 2025-05-29T15:23:11.929630Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:245: TTxCleanDroppedSubDomains Complete, done PersistRemoveSubDomain for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 2025-05-29T15:23:11.929638Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:160: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestModificationResult got TxId: 104, wait until txId: 104 TestWaitNotification wait txId: 104 2025-05-29T15:23:11.929685Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:210: tests -- TTxNotificationSubscriber for txId 104: send EvNotifyTxCompletion 2025-05-29T15:23:11.929691Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:256: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 104 2025-05-29T15:23:11.929740Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 104, at schemeshard: 72057594046678944 2025-05-29T15:23:11.929753Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:227: tests -- TTxNotificationSubscriber for txId 104: got EvNotifyTxCompletionResult 2025-05-29T15:23:11.929756Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:236: tests -- TTxNotificationSubscriber for txId 104: satisfy waiter [1:576:2529] TestWaitNotification: OK eventTxId 104 2025-05-29T15:23:11.929829Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-05-29T15:23:11.929861Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/USER_0" took 41us result status StatusPathDoesNotExist 2025-05-29T15:23:11.929900Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/USER_0\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1]), source_location: ydb/core/tx/schemeshard/schemeshard_path_describer.cpp:1157" Path: "/MyRoot/USER_0" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: false } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 2025-05-29T15:23:11.929962Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-05-29T15:23:11.929985Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot" took 25us result status StatusSuccess 2025-05-29T15:23:11.930047Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot" PathDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 7 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 7 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 5 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/MyRoot" } } } PathId: 1 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-CreateUser-1 >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-CreateUser-4 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::CreateAlterNbsChannels [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:116:2058] recipient: [1:111:2141] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:116:2058] recipient: [1:111:2141] Leader for TabletID 72057594046678944 is [1:128:2152] sender: [1:132:2058] recipient: [1:111:2141] 2025-05-29T15:23:11.632290Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7488: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-05-29T15:23:11.632312Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7516: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:23:11.632317Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7402: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-05-29T15:23:11.632321Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7418: OperationsProcessing config: using default configuration 2025-05-29T15:23:11.632329Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-05-29T15:23:11.632332Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-05-29T15:23:11.632339Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7548: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:23:11.632350Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:39: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-05-29T15:23:11.632449Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7619: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-05-29T15:23:11.632512Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-05-29T15:23:11.640635Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7309: Cannot subscribe to console configs 2025-05-29T15:23:11.640651Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:23:11.642324Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-05-29T15:23:11.642402Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-05-29T15:23:11.642444Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-05-29T15:23:11.644145Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-05-29T15:23:11.644324Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:445: Clear TempDirsState with owners number: 0 2025-05-29T15:23:11.644445Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1348: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-05-29T15:23:11.644526Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:32: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-05-29T15:23:11.645062Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:157: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:23:11.645116Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:84: [RootDataErasureManager] Stop 2025-05-29T15:23:11.645343Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:23:11.645351Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:23:11.645367Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-05-29T15:23:11.645373Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-29T15:23:11.645377Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:202: TTxServerlessStorageBilling.Complete 2025-05-29T15:23:11.645404Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6671: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-05-29T15:23:11.646424Z node 1 :HIVE INFO: tablet_helpers.cpp:1130: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:128:2152] sender: [1:242:2058] recipient: [1:15:2062] 2025-05-29T15:23:11.662283Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:372: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-05-29T15:23:11.662367Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:23:11.662441Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-05-29T15:23:11.662483Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:130: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-05-29T15:23:11.662493Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:23:11.663256Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:451: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-05-29T15:23:11.663288Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-05-29T15:23:11.663343Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:23:11.663355Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:313: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-05-29T15:23:11.663361Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:367: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-05-29T15:23:11.663367Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 2 -> 3 2025-05-29T15:23:11.663896Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:23:11.663911Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-05-29T15:23:11.663915Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 3 -> 128 2025-05-29T15:23:11.664480Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:23:11.664499Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:23:11.664507Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:23:11.664516Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1650: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-05-29T15:23:11.665189Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1719: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-05-29T15:23:11.665625Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:652: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-05-29T15:23:11.665660Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1751: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-05-29T15:23:11.665804Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:676: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-05-29T15:23:11.665826Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:680: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 124 RawX2: 4294969445 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-05-29T15:23:11.665834Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:23:11.665886Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 128 -> 240 2025-05-29T15:23:11.665891Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:23:11.665919Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-05-29T15:23:11.665930Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:402: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-05-29T15:23:11.666374Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:23:11.666384Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-29T15:23:11.666428Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29 ... 12.119467Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 105, subscribers: 0 2025-05-29T15:23:12.119537Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:46: Free shard 72057594046678944:1 hive 72057594037968897 at ss 72057594046678944 2025-05-29T15:23:12.119543Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:46: Free shard 72057594046678944:3 hive 72057594037968897 at ss 72057594046678944 2025-05-29T15:23:12.119547Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:46: Free shard 72057594046678944:2 hive 72057594037968897 at ss 72057594046678944 2025-05-29T15:23:12.119551Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:46: Free shard 72057594046678944:4 hive 72057594037968897 at ss 72057594046678944 2025-05-29T15:23:12.119868Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 105 2025-05-29T15:23:12.120008Z node 2 :HIVE INFO: tablet_helpers.cpp:1356: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 1 TxId_Deprecated: 1 TabletID: 72075186233409546 2025-05-29T15:23:12.120057Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5938: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 1 ShardOwnerId: 72057594046678944 ShardLocalIdx: 1, at schemeshard: 72057594046678944 2025-05-29T15:23:12.120132Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 2025-05-29T15:23:12.120580Z node 2 :HIVE INFO: tablet_helpers.cpp:1356: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 3 TxId_Deprecated: 3 TabletID: 72075186233409548 Forgetting tablet 72075186233409546 2025-05-29T15:23:12.184050Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5938: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 3 ShardOwnerId: 72057594046678944 ShardLocalIdx: 3, at schemeshard: 72057594046678944 2025-05-29T15:23:12.184594Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 2 2025-05-29T15:23:12.184831Z node 2 :HIVE INFO: tablet_helpers.cpp:1356: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 2 TxId_Deprecated: 2 TabletID: 72075186233409547 Forgetting tablet 72075186233409548 2025-05-29T15:23:12.185048Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5938: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 2 ShardOwnerId: 72057594046678944 ShardLocalIdx: 2, at schemeshard: 72057594046678944 2025-05-29T15:23:12.185103Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2025-05-29T15:23:12.185340Z node 2 :HIVE INFO: tablet_helpers.cpp:1356: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 4 TxId_Deprecated: 4 TabletID: 72075186233409549 2025-05-29T15:23:12.185411Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5938: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 4 ShardOwnerId: 72057594046678944 ShardLocalIdx: 4, at schemeshard: 72057594046678944 2025-05-29T15:23:12.185454Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 1 Forgetting tablet 72075186233409547 2025-05-29T15:23:12.185804Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:123: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-05-29T15:23:12.185814Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:137: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 3], at schemeshard: 72057594046678944 2025-05-29T15:23:12.185831Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 Forgetting tablet 72075186233409549 2025-05-29T15:23:12.185968Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:205: TTxCleanDroppedSubDomains Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-05-29T15:23:12.185975Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:224: TTxCleanDroppedPaths: PersistRemoveSubDomain for PathId# [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2025-05-29T15:23:12.186004Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 2025-05-29T15:23:12.186174Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 105 2025-05-29T15:23:12.186214Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 105 2025-05-29T15:23:12.187795Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:174: Deleted shardIdx 72057594046678944:1 2025-05-29T15:23:12.187812Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:180: Close pipe to deleted shardIdx 72057594046678944:1 tabletId 72075186233409546 2025-05-29T15:23:12.187840Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:174: Deleted shardIdx 72057594046678944:3 2025-05-29T15:23:12.187844Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:180: Close pipe to deleted shardIdx 72057594046678944:3 tabletId 72075186233409548 2025-05-29T15:23:12.188471Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:174: Deleted shardIdx 72057594046678944:2 2025-05-29T15:23:12.188539Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:180: Close pipe to deleted shardIdx 72057594046678944:2 tabletId 72075186233409547 2025-05-29T15:23:12.188555Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:174: Deleted shardIdx 72057594046678944:4 2025-05-29T15:23:12.188562Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:180: Close pipe to deleted shardIdx 72057594046678944:4 tabletId 72075186233409549 2025-05-29T15:23:12.188589Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:160: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 1 candidates, at schemeshard: 72057594046678944 2025-05-29T15:23:12.188620Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:245: TTxCleanDroppedSubDomains Complete, done PersistRemoveSubDomain for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 2025-05-29T15:23:12.188632Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:123: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-05-29T15:23:12.188637Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:137: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2025-05-29T15:23:12.188659Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-05-29T15:23:12.189039Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:160: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestModificationResult got TxId: 105, wait until txId: 105 TestWaitNotification wait txId: 105 2025-05-29T15:23:12.189112Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:210: tests -- TTxNotificationSubscriber for txId 105: send EvNotifyTxCompletion 2025-05-29T15:23:12.189121Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:256: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 105 2025-05-29T15:23:12.189209Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 105, at schemeshard: 72057594046678944 2025-05-29T15:23:12.189228Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:227: tests -- TTxNotificationSubscriber for txId 105: got EvNotifyTxCompletionResult 2025-05-29T15:23:12.189235Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:236: tests -- TTxNotificationSubscriber for txId 105: satisfy waiter [2:655:2605] TestWaitNotification: OK eventTxId 105 2025-05-29T15:23:12.189324Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0/BSVolume" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-05-29T15:23:12.189370Z node 2 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/USER_0/BSVolume" took 62us result status StatusPathDoesNotExist 2025-05-29T15:23:12.189422Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/USER_0/BSVolume\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1]), source_location: ydb/core/tx/schemeshard/schemeshard_path_describer.cpp:1157" Path: "/MyRoot/USER_0/BSVolume" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: false } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 2025-05-29T15:23:12.189500Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-05-29T15:23:12.189515Z node 2 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/USER_0" took 17us result status StatusPathDoesNotExist 2025-05-29T15:23:12.189550Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/USER_0\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1]), source_location: ydb/core/tx/schemeshard/schemeshard_path_describer.cpp:1157" Path: "/MyRoot/USER_0" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: false } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 |63.2%| [TA] $(B)/ydb/core/client/server/ut/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/client/ut/unittest >> TFlatTest::GetTabletCounters [GOOD] Test command err: 2025-05-29T15:23:11.199850Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509888577639245447:2062];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:23:11.200114Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/0027d7/r3tmp/tmp44OVi4/pdisk_1.dat 2025-05-29T15:23:11.272686Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:23:11.273317Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [1:7509888577639245427:2079] 1748532191199635 != 1748532191199638 TClient is connected to server localhost:15696 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-05-29T15:23:11.303703Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-05-29T15:23:11.327240Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:23:11.346536Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:23:11.346557Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:23:11.347699Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-29T15:23:11.698032Z node 2 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7509888576817523090:2065];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:23:11.698055Z node 2 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/0027d7/r3tmp/tmpnOeJuw/pdisk_1.dat 2025-05-29T15:23:11.715446Z node 2 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:23:11.715680Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [2:7509888576817523066:2079] 1748532191697915 != 1748532191697918 TClient is connected to server localhost:6371 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-05-29T15:23:11.801721Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:23:11.801741Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:23:11.802042Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:23:11.803305Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected waiting... 2025-05-29T15:23:11.805845Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... TClient::Ls request: /dc-1/Dir/TableOld TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "TableOld" PathId: 3 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976715659 CreateStep: 1748532191868 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "TableOld" Columns { Name: "Key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "Value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } Columns { Name: "... (TRUNCATED) >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-CreateUser-1 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-CreateUser-2 >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-ModifyUser-25 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-ModifyUser-26 >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-CreateUser-2 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-CreateUser-3 >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-ModifyUser-25 >> TSchemeShardSubDomainTest::TableDiskSpaceQuotas >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-ModifyUser-37 >> TSchemeShardSubDomainTest::LS >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-ModifyUser-25 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-ModifyUser-26 >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-ModifyUser-43 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-ModifyUser-44 >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-ModifyUser-31 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-ModifyUser-32 >> TSchemeShardSubDomainTest::SchemeQuotas [GOOD] >> TSchemeShardSubDomainTest::LS [GOOD] >> TSchemeShardSubDomainTest::DeclareAndForbidTableInside >> TSchemeShardSubDomainTest::SimultaneousCreateTenantDirTable >> TSchemeShardSubDomainTest::RmDir >> TSchemeShardSubDomainTest::CopyRejects ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::SchemeQuotas [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:116:2058] recipient: [1:111:2141] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:116:2058] recipient: [1:111:2141] Leader for TabletID 72057594046678944 is [1:128:2152] sender: [1:132:2058] recipient: [1:111:2141] 2025-05-29T15:23:11.629827Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7488: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-05-29T15:23:11.629857Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7516: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:23:11.629863Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7402: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-05-29T15:23:11.629868Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7418: OperationsProcessing config: using default configuration 2025-05-29T15:23:11.629881Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-05-29T15:23:11.629886Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-05-29T15:23:11.629895Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7548: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:23:11.629910Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:39: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-05-29T15:23:11.630039Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7619: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-05-29T15:23:11.630117Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-05-29T15:23:11.644617Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7309: Cannot subscribe to console configs 2025-05-29T15:23:11.644642Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:23:11.647535Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-05-29T15:23:11.647664Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-05-29T15:23:11.647716Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-05-29T15:23:11.650599Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-05-29T15:23:11.650905Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:445: Clear TempDirsState with owners number: 0 2025-05-29T15:23:11.651031Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1348: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-05-29T15:23:11.651107Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:32: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-05-29T15:23:11.651684Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:157: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:23:11.651726Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:84: [RootDataErasureManager] Stop 2025-05-29T15:23:11.652001Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:23:11.652014Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:23:11.652038Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-05-29T15:23:11.652046Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-29T15:23:11.652053Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:202: TTxServerlessStorageBilling.Complete 2025-05-29T15:23:11.652090Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6671: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-05-29T15:23:11.653443Z node 1 :HIVE INFO: tablet_helpers.cpp:1130: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:128:2152] sender: [1:242:2058] recipient: [1:15:2062] 2025-05-29T15:23:11.674710Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:372: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-05-29T15:23:11.674807Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:23:11.674879Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-05-29T15:23:11.674926Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:130: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-05-29T15:23:11.674938Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:23:11.675774Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:451: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-05-29T15:23:11.675805Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-05-29T15:23:11.675853Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:23:11.675864Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:313: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-05-29T15:23:11.675870Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:367: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-05-29T15:23:11.675876Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 2 -> 3 2025-05-29T15:23:11.676399Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:23:11.676414Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-05-29T15:23:11.676420Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 3 -> 128 2025-05-29T15:23:11.676883Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:23:11.676895Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:23:11.676902Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:23:11.676909Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1650: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-05-29T15:23:11.677571Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1719: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-05-29T15:23:11.678012Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:652: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-05-29T15:23:11.678053Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1751: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-05-29T15:23:11.678234Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:676: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-05-29T15:23:11.678263Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:680: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 124 RawX2: 4294969445 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-05-29T15:23:11.678270Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:23:11.678326Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 128 -> 240 2025-05-29T15:23:11.678333Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:23:11.678365Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-05-29T15:23:11.678377Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:402: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-05-29T15:23:11.678850Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:23:11.678861Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-29T15:23:11.678905Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29 ... CHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 137:0, at schemeshard: 72057594046678944 2025-05-29T15:23:12.824143Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 11 2025-05-29T15:23:12.824154Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 10] was 3 2025-05-29T15:23:12.824639Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:451: TTxOperationPropose Complete, txId: 137, response: Status: StatusAccepted TxId: 137 SchemeshardId: 72057594046678944 PathId: 10, at schemeshard: 72057594046678944 2025-05-29T15:23:12.824675Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 137, database: /MyRoot/USER_0, subject: , status: StatusAccepted, operation: CREATE TABLE, path: /MyRoot/USER_0/Table11 2025-05-29T15:23:12.824724Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:23:12.824731Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 137, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2025-05-29T15:23:12.824776Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 137, path id: [OwnerId: 72057594046678944, LocalPathId: 10] 2025-05-29T15:23:12.824796Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:23:12.824802Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:1026:2885], at schemeshard: 72057594046678944, txId: 137, path id: 2 2025-05-29T15:23:12.824809Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:1026:2885], at schemeshard: 72057594046678944, txId: 137, path id: 10 2025-05-29T15:23:12.824820Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 137:0, at schemeshard: 72057594046678944 2025-05-29T15:23:12.824829Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:313: TCreateParts opId# 137:0 ProgressState, operation type: TxCreateTable, at tablet# 72057594046678944 2025-05-29T15:23:12.824873Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:357: TCreateParts opId# 137:0 CreateRequest Event to Hive: 72057594037968897 msg: Owner: 72057594046678944 OwnerIdx: 10 TabletType: DataShard ObjectDomain { SchemeShard: 72057594046678944 PathId: 2 } ObjectId: 10 BindedChannels { StoragePoolName: "pool-1" } BindedChannels { StoragePoolName: "pool-1" } BindedChannels { StoragePoolName: "pool-1" } AllowedDomains { SchemeShard: 72057594046678944 PathId: 2 } 2025-05-29T15:23:12.825133Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:5834: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 4 LocalPathId: 2 Version: 18 PathOwnerId: 72057594046678944, cookie: 137 2025-05-29T15:23:12.825151Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 4 LocalPathId: 2 Version: 18 PathOwnerId: 72057594046678944, cookie: 137 2025-05-29T15:23:12.825156Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72057594046678944, txId: 137 2025-05-29T15:23:12.825162Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 137, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 18 2025-05-29T15:23:12.825169Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 12 2025-05-29T15:23:12.825395Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:5834: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 4 LocalPathId: 10 Version: 1 PathOwnerId: 72057594046678944, cookie: 137 2025-05-29T15:23:12.825410Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 4 LocalPathId: 10 Version: 1 PathOwnerId: 72057594046678944, cookie: 137 2025-05-29T15:23:12.825415Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72057594046678944, txId: 137 2025-05-29T15:23:12.825421Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 137, pathId: [OwnerId: 72057594046678944, LocalPathId: 10], version: 1 2025-05-29T15:23:12.825426Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 10] was 4 2025-05-29T15:23:12.825441Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1606: TOperation IsReadyToNotify, TxId: 137, ready parts: 0/1, is published: true 2025-05-29T15:23:12.826174Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:652: Send tablet strongly msg operationId: 137:0 from tablet: 72057594046678944 to tablet: 72057594037968897 cookie: 72057594046678944:10 msg type: 268697601 2025-05-29T15:23:12.826208Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1751: TOperation RegisterRelationByTabletId, TxId: 137, partId: 0, tablet: 72057594037968897 2025-05-29T15:23:12.826214Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1779: TOperation RegisterRelationByShardIdx, TxId: 137, shardIdx: 72057594046678944:10, partId: 0 2025-05-29T15:23:12.826351Z node 1 :HIVE INFO: tablet_helpers.cpp:1181: [72057594037968897] TEvCreateTablet, msg: Owner: 72057594046678944 OwnerIdx: 10 TabletType: DataShard ObjectDomain { SchemeShard: 72057594046678944 PathId: 2 } ObjectId: 10 BindedChannels { StoragePoolName: "pool-1" } BindedChannels { StoragePoolName: "pool-1" } BindedChannels { StoragePoolName: "pool-1" } AllowedDomains { SchemeShard: 72057594046678944 PathId: 2 } 2025-05-29T15:23:12.826410Z node 1 :HIVE INFO: tablet_helpers.cpp:1245: [72057594037968897] TEvCreateTablet, Owner 72057594046678944, OwnerIdx 10, type DataShard, boot OK, tablet id 72075186233409555 2025-05-29T15:23:12.826429Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5867: Handle TEvCreateTabletReply at schemeshard: 72057594046678944 message: Status: OK Owner: 72057594046678944 OwnerIdx: 10 TabletID: 72075186233409555 Origin: 72057594037968897 2025-05-29T15:23:12.826435Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1793: TOperation FindRelatedPartByShardIdx, TxId: 137, shardIdx: 72057594046678944:10, partId: 0 2025-05-29T15:23:12.826452Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:619: TTxOperationReply execute, operationId: 137:0, at schemeshard: 72057594046678944, message: Status: OK Owner: 72057594046678944 OwnerIdx: 10 TabletID: 72075186233409555 Origin: 72057594037968897 2025-05-29T15:23:12.826459Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:175: TCreateParts opId# 137:0 HandleReply TEvCreateTabletReply, at tabletId: 72057594046678944 2025-05-29T15:23:12.826467Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:178: TCreateParts opId# 137:0 HandleReply TEvCreateTabletReply, message: Status: OK Owner: 72057594046678944 OwnerIdx: 10 TabletID: 72075186233409555 Origin: 72057594037968897 2025-05-29T15:23:12.826487Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 137:0 2 -> 3 2025-05-29T15:23:12.826660Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 137 2025-05-29T15:23:12.828355Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 137 2025-05-29T15:23:12.829055Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:647: TTxOperationReply complete, operationId: 137:0, at schemeshard: 72057594046678944 2025-05-29T15:23:12.829208Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 137:0, at schemeshard: 72057594046678944 2025-05-29T15:23:12.829217Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_create_table.cpp:200: TCreateTable TConfigureParts operationId# 137:0 ProgressState at tabletId# 72057594046678944 2025-05-29T15:23:12.829228Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_create_table.cpp:220: TCreateTable TConfigureParts operationId# 137:0 ProgressState Propose modify scheme on datashard datashardId: 72075186233409555 seqNo: 4:5 2025-05-29T15:23:12.829299Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_create_table.cpp:236: TCreateTable TConfigureParts operationId# 137:0 ProgressState Propose modify scheme on datashard datashardId: 72075186233409555 message: TxKind: TX_KIND_SCHEME SourceDeprecated { RawX1: 975 RawX2: 4294970141 } TxBody: "\n\236\004\n\007Table11\020\n\032\r\n\003key\030\002 \001(\000@\000\032\020\n\005Value\030\200$ \002(\000@\000(\001:\262\003\022\253\003\010\200\200\200\002\020\254\002\030\364\003 \200\200\200\010(\0000\200\200\200 8\200\200\200\010@\2008H\000RX\010\000\020\000\030\010 \010(\200\200\200@0\377\377\377\377\0178\001B$\010e\020d\031\000\000\000\000\000\000\360?*\025background_compactionJ\017compaction_gen1P\nX\200\200\001`nh\000p\000Rb\010\001\020\200\200\200\024\030\005 \020(\200\200\200\200\0020\377\377\377\377\0178\000B$\010e\020d\031\000\000\000\000\000\000\360?*\025background_compactionJ\017compaction_gen2P\nX\200\200\001`nh\200\200\200\004p\200\200\200\004Rc\010\002\020\200\200\200\310\001\030\005 \020(\200\200\200\200@0\377\377\377\377\0178\000B$\010e\020d\031\000\000\000\000\000\000\360?*\025background_compactionJ\017compaction_gen3P\nX\200\200\001`nh\200\200\200(p\200\200\200(X\001`\005j$\010e\020d\031\000\000\000\000\000\000\360?*\025background_compactionr\017compaction_gen0z\017compaction_gen0\202\001\004scan\210\001\200\200\200\010\220\001\364\003\230\0012\270\001\2008\300\001\006R\002\020\001J\026/MyRoot/USER_0/Table11\242\001\006\001\000\000\000\000\200\252\001\000\260\001\001\270\001\000\210\002\001\222\002\013\t\240\207\205\000\000\000\000\001\020\n:\004\010\004\020\005" TxId: 137 ExecLevel: 0 Flags: 0 SchemeShardId: 72057594046678944 ProcessingParams { Version: 2 PlanResolution: 50 Coordinators: 72075186233409546 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409547 } SubDomainPathId: 2 2025-05-29T15:23:12.829986Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:652: Send tablet strongly msg operationId: 137:0 from tablet: 72057594046678944 to tablet: 72075186233409555 cookie: 72057594046678944:10 msg type: 269549568 2025-05-29T15:23:12.830024Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1751: TOperation RegisterRelationByTabletId, TxId: 137, partId: 0, tablet: 72075186233409555 TestModificationResult got TxId: 137, wait until txId: 137 >> TSchemeShardSubDomainTest::SimultaneousCreateDelete >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-CreateUser-1 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-CreateUser-10 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::LS [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:116:2058] recipient: [1:111:2141] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:116:2058] recipient: [1:111:2141] Leader for TabletID 72057594046678944 is [1:128:2152] sender: [1:132:2058] recipient: [1:111:2141] 2025-05-29T15:23:12.939267Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7488: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-05-29T15:23:12.939294Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7516: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:23:12.939301Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7402: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-05-29T15:23:12.939306Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7418: OperationsProcessing config: using default configuration 2025-05-29T15:23:12.939320Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-05-29T15:23:12.939324Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-05-29T15:23:12.939334Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7548: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:23:12.939347Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:39: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-05-29T15:23:12.939457Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7619: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-05-29T15:23:12.939522Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-05-29T15:23:12.953623Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7309: Cannot subscribe to console configs 2025-05-29T15:23:12.953643Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:23:12.956218Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-05-29T15:23:12.956335Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-05-29T15:23:12.956381Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-05-29T15:23:12.958056Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-05-29T15:23:12.958222Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:445: Clear TempDirsState with owners number: 0 2025-05-29T15:23:12.958324Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1348: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-05-29T15:23:12.958393Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:32: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-05-29T15:23:12.958873Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:157: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:23:12.958911Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:84: [RootDataErasureManager] Stop 2025-05-29T15:23:12.959168Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:23:12.959179Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:23:12.959201Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-05-29T15:23:12.959209Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-29T15:23:12.959216Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:202: TTxServerlessStorageBilling.Complete 2025-05-29T15:23:12.959250Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6671: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-05-29T15:23:12.960541Z node 1 :HIVE INFO: tablet_helpers.cpp:1130: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:128:2152] sender: [1:242:2058] recipient: [1:15:2062] 2025-05-29T15:23:12.987973Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:372: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-05-29T15:23:12.988048Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:23:12.988118Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-05-29T15:23:12.988166Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:130: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-05-29T15:23:12.988179Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:23:12.988998Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:451: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-05-29T15:23:12.989025Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-05-29T15:23:12.989073Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:23:12.989083Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:313: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-05-29T15:23:12.989089Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:367: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-05-29T15:23:12.989096Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 2 -> 3 2025-05-29T15:23:12.989578Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:23:12.989592Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-05-29T15:23:12.989597Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 3 -> 128 2025-05-29T15:23:12.990013Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:23:12.990023Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:23:12.990030Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:23:12.990037Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1650: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-05-29T15:23:12.990786Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1719: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-05-29T15:23:12.991259Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:652: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-05-29T15:23:12.991306Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1751: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-05-29T15:23:12.991499Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:676: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-05-29T15:23:12.991525Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:680: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 124 RawX2: 4294969445 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-05-29T15:23:12.991532Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:23:12.991591Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 128 -> 240 2025-05-29T15:23:12.991599Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:23:12.991630Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-05-29T15:23:12.991644Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:402: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-05-29T15:23:12.992123Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:23:12.992134Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-29T15:23:12.992186Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29 ... h for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 5 2025-05-29T15:23:13.012504Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:402: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2025-05-29T15:23:13.012884Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:23:13.012896Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 100, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-29T15:23:13.012935Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 100, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2025-05-29T15:23:13.012953Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:23:13.012959Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:209:2210], at schemeshard: 72057594046678944, txId: 100, path id: 1 2025-05-29T15:23:13.012965Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:209:2210], at schemeshard: 72057594046678944, txId: 100, path id: 2 FAKE_COORDINATOR: Erasing txId 100 2025-05-29T15:23:13.013037Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 100:0, at schemeshard: 72057594046678944 2025-05-29T15:23:13.013047Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:482: [72057594046678944] TDone opId# 100:0 ProgressState 2025-05-29T15:23:13.013059Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:906: Part operation is done id#100:0 progress is 1/1 2025-05-29T15:23:13.013064Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 100 ready parts: 1/1 2025-05-29T15:23:13.013070Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:906: Part operation is done id#100:0 progress is 1/1 2025-05-29T15:23:13.013074Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 100 ready parts: 1/1 2025-05-29T15:23:13.013079Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1606: TOperation IsReadyToNotify, TxId: 100, ready parts: 1/1, is published: false 2025-05-29T15:23:13.013085Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 100 ready parts: 1/1 2025-05-29T15:23:13.013090Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:973: Operation and all the parts is done, operation id: 100:0 2025-05-29T15:23:13.013095Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5174: RemoveTx for txid 100:0 2025-05-29T15:23:13.013127Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 6 2025-05-29T15:23:13.013134Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:982: Publication still in progress, tx: 100, publications: 2, subscribers: 0 2025-05-29T15:23:13.013139Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:989: Publication details: tx: 100, [OwnerId: 72057594046678944, LocalPathId: 1], 5 2025-05-29T15:23:13.013144Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:989: Publication details: tx: 100, [OwnerId: 72057594046678944, LocalPathId: 2], 3 2025-05-29T15:23:13.013268Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:5834: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 5 PathOwnerId: 72057594046678944, cookie: 100 2025-05-29T15:23:13.013280Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 5 PathOwnerId: 72057594046678944, cookie: 100 2025-05-29T15:23:13.013286Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 100 2025-05-29T15:23:13.013291Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 100, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 5 2025-05-29T15:23:13.013296Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-05-29T15:23:13.013395Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:5834: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046678944, cookie: 100 2025-05-29T15:23:13.013405Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046678944, cookie: 100 2025-05-29T15:23:13.013410Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 100 2025-05-29T15:23:13.013416Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 100, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 3 2025-05-29T15:23:13.013421Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 5 2025-05-29T15:23:13.013431Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 100, subscribers: 0 2025-05-29T15:23:13.014054Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 100 2025-05-29T15:23:13.014077Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 100 TestModificationResult got TxId: 100, wait until txId: 100 TestWaitNotification wait txId: 100 2025-05-29T15:23:13.014125Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:210: tests -- TTxNotificationSubscriber for txId 100: send EvNotifyTxCompletion 2025-05-29T15:23:13.014140Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:256: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 100 2025-05-29T15:23:13.014199Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 100, at schemeshard: 72057594046678944 2025-05-29T15:23:13.014218Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:227: tests -- TTxNotificationSubscriber for txId 100: got EvNotifyTxCompletionResult 2025-05-29T15:23:13.014223Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:236: tests -- TTxNotificationSubscriber for txId 100: satisfy waiter [1:458:2411] TestWaitNotification: OK eventTxId 100 2025-05-29T15:23:13.014307Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-05-29T15:23:13.014338Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/USER_0" took 41us result status StatusSuccess 2025-05-29T15:23:13.014440Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/USER_0" PathDescription { Self { Name: "USER_0" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 100 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 1 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72075186233409546 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409547 Mediators: 72075186233409548 } DomainKey { SchemeShard: 72057594046678944 PathId: 2 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 3 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 2 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { } } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-05-29T15:23:13.014532Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-05-29T15:23:13.014549Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot" took 20us result status StatusSuccess 2025-05-29T15:23:13.014605Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot" PathDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 3 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: "USER_0" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 100 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 1 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/MyRoot" } } } PathId: 1 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> TSchemeShardSubDomainTest::DeclareAndForbidTableInside [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-CreateUser-2 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-CreateUser-3 >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-ModifyUser-26 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-ModifyUser-27 >> TSchemeShardSubDomainTest::RmDir [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-CreateUser-3 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-CreateUser-4 >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-ModifyUser-25 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-ModifyUser-26 >> TSchemeShardSubDomainTest::SimultaneousCreateDelete [GOOD] >> TSchemeShardSubDomainTest::TopicDiskSpaceQuotas >> TSchemeShardSubDomainTest::DeleteAdd >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-CreateUser-4 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-CreateUser-5 >> TObjectStorageListingTest::MaxKeysAndSharding [GOOD] >> TObjectStorageListingTest::SchemaChecks >> TSchemeShardSubDomainTest::SimultaneousCreateTenantDirTable [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-ModifyUser-37 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-ModifyUser-38 >> TSchemeShardSubDomainTest::CopyRejects [GOOD] >> TSchemeShardSubDomainTest::ConsistentCopyRejects >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-ModifyUser-44 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-ModifyUser-45 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::RmDir [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:116:2058] recipient: [1:111:2141] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:116:2058] recipient: [1:111:2141] Leader for TabletID 72057594046678944 is [1:128:2152] sender: [1:132:2058] recipient: [1:111:2141] 2025-05-29T15:23:13.351247Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7488: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-05-29T15:23:13.351271Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7516: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:23:13.351277Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7402: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-05-29T15:23:13.351282Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7418: OperationsProcessing config: using default configuration 2025-05-29T15:23:13.351295Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-05-29T15:23:13.351299Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-05-29T15:23:13.351315Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7548: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:23:13.351325Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:39: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-05-29T15:23:13.351433Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7619: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-05-29T15:23:13.351497Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-05-29T15:23:13.362172Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7309: Cannot subscribe to console configs 2025-05-29T15:23:13.362192Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:23:13.364151Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-05-29T15:23:13.364233Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-05-29T15:23:13.364272Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-05-29T15:23:13.365695Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-05-29T15:23:13.365886Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:445: Clear TempDirsState with owners number: 0 2025-05-29T15:23:13.365978Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1348: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-05-29T15:23:13.366036Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:32: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-05-29T15:23:13.366470Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:157: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:23:13.366498Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:84: [RootDataErasureManager] Stop 2025-05-29T15:23:13.366692Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:23:13.366700Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:23:13.366726Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-05-29T15:23:13.366732Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-29T15:23:13.366753Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:202: TTxServerlessStorageBilling.Complete 2025-05-29T15:23:13.366786Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6671: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-05-29T15:23:13.367955Z node 1 :HIVE INFO: tablet_helpers.cpp:1130: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:128:2152] sender: [1:242:2058] recipient: [1:15:2062] 2025-05-29T15:23:13.384581Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:372: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-05-29T15:23:13.384645Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:23:13.384703Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-05-29T15:23:13.384741Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:130: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-05-29T15:23:13.384748Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:23:13.385456Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:451: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-05-29T15:23:13.385485Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-05-29T15:23:13.385528Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:23:13.385552Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:313: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-05-29T15:23:13.385558Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:367: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-05-29T15:23:13.385564Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 2 -> 3 2025-05-29T15:23:13.385971Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:23:13.385982Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-05-29T15:23:13.385987Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 3 -> 128 2025-05-29T15:23:13.386289Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:23:13.386300Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:23:13.386306Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:23:13.386313Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1650: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-05-29T15:23:13.386818Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1719: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-05-29T15:23:13.387192Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:652: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-05-29T15:23:13.387229Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1751: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-05-29T15:23:13.387412Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:676: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-05-29T15:23:13.387439Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:680: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 124 RawX2: 4294969445 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-05-29T15:23:13.387447Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:23:13.387505Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 128 -> 240 2025-05-29T15:23:13.387512Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:23:13.387542Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-05-29T15:23:13.387554Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:402: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-05-29T15:23:13.387961Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:23:13.387967Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-29T15:23:13.387998Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29 ... rationId 100:0, at tablet# 72057594046678944 2025-05-29T15:23:13.474568Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-05-29T15:23:13.474578Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 8 2025-05-29T15:23:13.474587Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:402: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 100 2025-05-29T15:23:13.475024Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:23:13.475033Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 100, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-29T15:23:13.475064Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 100, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2025-05-29T15:23:13.475082Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:23:13.475088Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:209:2210], at schemeshard: 72057594046678944, txId: 100, path id: 1 2025-05-29T15:23:13.475095Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:209:2210], at schemeshard: 72057594046678944, txId: 100, path id: 2 2025-05-29T15:23:13.475166Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 100:0, at schemeshard: 72057594046678944 2025-05-29T15:23:13.475173Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:482: [72057594046678944] TDone opId# 100:0 ProgressState 2025-05-29T15:23:13.475185Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:906: Part operation is done id#100:0 progress is 1/1 2025-05-29T15:23:13.475190Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 100 ready parts: 1/1 2025-05-29T15:23:13.475195Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:906: Part operation is done id#100:0 progress is 1/1 2025-05-29T15:23:13.475199Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 100 ready parts: 1/1 2025-05-29T15:23:13.475204Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1606: TOperation IsReadyToNotify, TxId: 100, ready parts: 1/1, is published: false 2025-05-29T15:23:13.475209Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 100 ready parts: 1/1 2025-05-29T15:23:13.475215Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:973: Operation and all the parts is done, operation id: 100:0 2025-05-29T15:23:13.475219Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5174: RemoveTx for txid 100:0 2025-05-29T15:23:13.475257Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 9 2025-05-29T15:23:13.475263Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:982: Publication still in progress, tx: 100, publications: 2, subscribers: 1 2025-05-29T15:23:13.475269Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:989: Publication details: tx: 100, [OwnerId: 72057594046678944, LocalPathId: 1], 5 2025-05-29T15:23:13.475273Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:989: Publication details: tx: 100, [OwnerId: 72057594046678944, LocalPathId: 2], 3 2025-05-29T15:23:13.475390Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:5834: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 5 PathOwnerId: 72057594046678944, cookie: 100 2025-05-29T15:23:13.475403Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 5 PathOwnerId: 72057594046678944, cookie: 100 2025-05-29T15:23:13.475408Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 100 2025-05-29T15:23:13.475413Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 100, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 5 2025-05-29T15:23:13.475418Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-05-29T15:23:13.475514Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:5834: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046678944, cookie: 100 2025-05-29T15:23:13.475525Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046678944, cookie: 100 2025-05-29T15:23:13.475529Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 100 2025-05-29T15:23:13.475533Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 100, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 3 2025-05-29T15:23:13.475538Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 8 2025-05-29T15:23:13.475546Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 100, subscribers: 1 2025-05-29T15:23:13.475551Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:212: TTxAckPublishToSchemeBoard Notify send TEvNotifyTxCompletionResult, at schemeshard: 72057594046678944, to actorId: [1:575:2484] 2025-05-29T15:23:13.476411Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 100 2025-05-29T15:23:13.476439Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 100 2025-05-29T15:23:13.476452Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:227: tests -- TTxNotificationSubscriber for txId 100: got EvNotifyTxCompletionResult 2025-05-29T15:23:13.476458Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:236: tests -- TTxNotificationSubscriber for txId 100: satisfy waiter [1:576:2485] TestWaitNotification: OK eventTxId 100 2025-05-29T15:23:13.476544Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-05-29T15:23:13.476575Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/USER_0" took 37us result status StatusSuccess 2025-05-29T15:23:13.476681Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/USER_0" PathDescription { Self { Name: "USER_0" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 100 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 1 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72075186233409546 Coordinators: 72075186233409547 Coordinators: 72075186233409548 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409549 Mediators: 72075186233409550 Mediators: 72075186233409551 } DomainKey { SchemeShard: 72057594046678944 PathId: 2 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 6 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 2 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { } } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 TestModificationResults wait txId: 101 2025-05-29T15:23:13.477304Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:372: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpRmDir Drop { Name: "USER_0" } } TxId: 101 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-05-29T15:23:13.477331Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_rmdir.cpp:29: TRmDir Propose, path: /MyRoot/USER_0, pathId: 0, opId: 101:0, at schemeshard: 72057594046678944 2025-05-29T15:23:13.477353Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:130: IgniteOperation, opId: 101:1, propose status:StatusPathIsNotDirectory, reason: Check failed: path: '/MyRoot/USER_0', error: path is not a directory (id: [OwnerId: 72057594046678944, LocalPathId: 2], type: EPathTypeSubDomain, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_rmdir.cpp:37, at schemeshard: 72057594046678944 2025-05-29T15:23:13.477844Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:451: TTxOperationPropose Complete, txId: 101, response: Status: StatusPathIsNotDirectory Reason: "Check failed: path: \'/MyRoot/USER_0\', error: path is not a directory (id: [OwnerId: 72057594046678944, LocalPathId: 2], type: EPathTypeSubDomain, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_rmdir.cpp:37" TxId: 101 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2025-05-29T15:23:13.477873Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 101, database: /MyRoot, subject: , status: StatusPathIsNotDirectory, reason: Check failed: path: '/MyRoot/USER_0', error: path is not a directory (id: [OwnerId: 72057594046678944, LocalPathId: 2], type: EPathTypeSubDomain, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_rmdir.cpp:37, operation: DROP DIRECTORY, path: /MyRoot/USER_0 TestModificationResult got TxId: 101, wait until txId: 101 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::DeclareAndForbidTableInside [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:116:2058] recipient: [1:111:2141] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:116:2058] recipient: [1:111:2141] Leader for TabletID 72057594046678944 is [1:128:2152] sender: [1:132:2058] recipient: [1:111:2141] 2025-05-29T15:23:13.282608Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7488: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-05-29T15:23:13.282630Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7516: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:23:13.282634Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7402: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-05-29T15:23:13.282638Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7418: OperationsProcessing config: using default configuration 2025-05-29T15:23:13.282646Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-05-29T15:23:13.282649Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-05-29T15:23:13.282655Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7548: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:23:13.282669Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:39: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-05-29T15:23:13.282770Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7619: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-05-29T15:23:13.282830Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-05-29T15:23:13.291429Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7309: Cannot subscribe to console configs 2025-05-29T15:23:13.291448Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:23:13.293307Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-05-29T15:23:13.293391Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-05-29T15:23:13.293432Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-05-29T15:23:13.295046Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-05-29T15:23:13.295233Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:445: Clear TempDirsState with owners number: 0 2025-05-29T15:23:13.295311Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1348: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-05-29T15:23:13.295368Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:32: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-05-29T15:23:13.295749Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:157: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:23:13.295779Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:84: [RootDataErasureManager] Stop 2025-05-29T15:23:13.295973Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:23:13.295981Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:23:13.295998Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-05-29T15:23:13.296003Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-29T15:23:13.296007Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:202: TTxServerlessStorageBilling.Complete 2025-05-29T15:23:13.296030Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6671: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-05-29T15:23:13.297014Z node 1 :HIVE INFO: tablet_helpers.cpp:1130: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:128:2152] sender: [1:242:2058] recipient: [1:15:2062] 2025-05-29T15:23:13.309441Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:372: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-05-29T15:23:13.309506Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:23:13.309583Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-05-29T15:23:13.309624Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:130: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-05-29T15:23:13.309632Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:23:13.310329Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:451: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-05-29T15:23:13.310352Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-05-29T15:23:13.310393Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:23:13.310400Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:313: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-05-29T15:23:13.310404Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:367: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-05-29T15:23:13.310407Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 2 -> 3 2025-05-29T15:23:13.310865Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:23:13.310879Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-05-29T15:23:13.310884Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 3 -> 128 2025-05-29T15:23:13.311311Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:23:13.311325Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:23:13.311332Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:23:13.311339Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1650: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-05-29T15:23:13.311982Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1719: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-05-29T15:23:13.312360Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:652: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-05-29T15:23:13.312397Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1751: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-05-29T15:23:13.312539Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:676: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-05-29T15:23:13.312556Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:680: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 124 RawX2: 4294969445 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-05-29T15:23:13.312561Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:23:13.312608Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 128 -> 240 2025-05-29T15:23:13.312613Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:23:13.312635Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-05-29T15:23:13.312644Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:402: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-05-29T15:23:13.313252Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:23:13.313273Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-29T15:23:13.313329Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29 ... : 72057594046678944, cookie: 101 2025-05-29T15:23:13.325024Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 2 PathOwnerId: 72057594046678944, cookie: 101 2025-05-29T15:23:13.325028Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72057594046678944, txId: 101 2025-05-29T15:23:13.325032Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 2 2025-05-29T15:23:13.325040Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 2 2025-05-29T15:23:13.325054Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1606: TOperation IsReadyToNotify, TxId: 101, ready parts: 0/1, is published: true 2025-05-29T15:23:13.325178Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:652: Send tablet strongly msg operationId: 101:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:101 msg type: 269090816 2025-05-29T15:23:13.325204Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1751: TOperation RegisterRelationByTabletId, TxId: 101, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 101 at step: 5000003 FAKE_COORDINATOR: advance: minStep5000003 State->FrontStep: 5000002 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 101 at step: 5000003 2025-05-29T15:23:13.325456Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:676: TTxOperationPlanStep Execute, stepId: 5000003, transactions count in step: 1, at schemeshard: 72057594046678944 2025-05-29T15:23:13.325477Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:680: TTxOperationPlanStep Execute, message: Transactions { TxId: 101 Coordinator: 72057594046316545 AckTo { RawX1: 124 RawX2: 4294969445 } } Step: 5000003 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-05-29T15:23:13.325485Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_mkdir.cpp:33: MkDir::TPropose operationId# 101:0 HandleReply TEvPrivate::TEvOperationPlan, step: 5000003, at schemeshard: 72057594046678944 2025-05-29T15:23:13.325508Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 101:0 128 -> 240 2025-05-29T15:23:13.325545Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2025-05-29T15:23:13.325569Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 1 2025-05-29T15:23:13.325727Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2025-05-29T15:23:13.326015Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 FAKE_COORDINATOR: Erasing txId 101 2025-05-29T15:23:13.326324Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:23:13.326331Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 101, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2025-05-29T15:23:13.326354Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 101, path id: [OwnerId: 72057594046678944, LocalPathId: 3] 2025-05-29T15:23:13.326366Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:23:13.326371Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:209:2210], at schemeshard: 72057594046678944, txId: 101, path id: 2 2025-05-29T15:23:13.326375Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:209:2210], at schemeshard: 72057594046678944, txId: 101, path id: 3 2025-05-29T15:23:13.326388Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 101:0, at schemeshard: 72057594046678944 2025-05-29T15:23:13.326394Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:482: [72057594046678944] TDone opId# 101:0 ProgressState 2025-05-29T15:23:13.326406Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:906: Part operation is done id#101:0 progress is 1/1 2025-05-29T15:23:13.326411Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2025-05-29T15:23:13.326415Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:906: Part operation is done id#101:0 progress is 1/1 2025-05-29T15:23:13.326418Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2025-05-29T15:23:13.326423Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1606: TOperation IsReadyToNotify, TxId: 101, ready parts: 1/1, is published: false 2025-05-29T15:23:13.326429Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2025-05-29T15:23:13.326433Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:973: Operation and all the parts is done, operation id: 101:0 2025-05-29T15:23:13.326437Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5174: RemoveTx for txid 101:0 2025-05-29T15:23:13.326447Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 2 2025-05-29T15:23:13.326453Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:982: Publication still in progress, tx: 101, publications: 2, subscribers: 0 2025-05-29T15:23:13.326457Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:989: Publication details: tx: 101, [OwnerId: 72057594046678944, LocalPathId: 2], 5 2025-05-29T15:23:13.326461Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:989: Publication details: tx: 101, [OwnerId: 72057594046678944, LocalPathId: 3], 3 2025-05-29T15:23:13.326603Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:5834: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 5 PathOwnerId: 72057594046678944, cookie: 101 2025-05-29T15:23:13.326614Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 5 PathOwnerId: 72057594046678944, cookie: 101 2025-05-29T15:23:13.326619Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 101 2025-05-29T15:23:13.326623Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 5 2025-05-29T15:23:13.326627Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2025-05-29T15:23:13.326716Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:5834: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 3 PathOwnerId: 72057594046678944, cookie: 101 2025-05-29T15:23:13.326724Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 3 PathOwnerId: 72057594046678944, cookie: 101 2025-05-29T15:23:13.326732Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 101 2025-05-29T15:23:13.326751Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 3 2025-05-29T15:23:13.326756Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 1 2025-05-29T15:23:13.326766Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 101, subscribers: 0 2025-05-29T15:23:13.327523Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2025-05-29T15:23:13.327562Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 TestModificationResult got TxId: 101, wait until txId: 101 TestModificationResults wait txId: 102 2025-05-29T15:23:13.328298Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:372: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot/USER_0/dir" OperationType: ESchemeOpCreateTable CreateTable { Name: "table_0" Columns { Name: "RowId" Type: "Uint64" } Columns { Name: "Value" Type: "Utf8" } KeyColumnNames: "RowId" } } TxId: 102 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-05-29T15:23:13.328357Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_create_table.cpp:426: TCreateTable Propose, path: /MyRoot/USER_0/dir/table_0, opId: 102:0, at schemeshard: 72057594046678944 2025-05-29T15:23:13.328370Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_create_table.cpp:433: TCreateTable Propose, path: /MyRoot/USER_0/dir/table_0, opId: 102:0, schema: Name: "table_0" Columns { Name: "RowId" Type: "Uint64" } Columns { Name: "Value" Type: "Utf8" } KeyColumnNames: "RowId", at schemeshard: 72057594046678944 2025-05-29T15:23:13.328388Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:130: IgniteOperation, opId: 102:1, propose status:StatusNameConflict, reason: Inclusive subDomain do not support shared transactions, at schemeshard: 72057594046678944 2025-05-29T15:23:13.328849Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:451: TTxOperationPropose Complete, txId: 102, response: Status: StatusNameConflict Reason: "Inclusive subDomain do not support shared transactions" TxId: 102 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2025-05-29T15:23:13.328880Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 102, database: /MyRoot/USER_0, subject: , status: StatusNameConflict, reason: Inclusive subDomain do not support shared transactions, operation: CREATE TABLE, path: /MyRoot/USER_0/dir/table_0 TestModificationResult got TxId: 102, wait until txId: 102 >> TStoragePoolsQuotasTest::DifferentQuotasInteraction [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-ModifyUser-26 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-ModifyUser-27 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::SimultaneousCreateDelete [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:116:2058] recipient: [1:111:2141] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:116:2058] recipient: [1:111:2141] Leader for TabletID 72057594046678944 is [1:128:2152] sender: [1:132:2058] recipient: [1:111:2141] 2025-05-29T15:23:13.453990Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7488: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-05-29T15:23:13.454009Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7516: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:23:13.454012Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7402: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-05-29T15:23:13.454016Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7418: OperationsProcessing config: using default configuration 2025-05-29T15:23:13.454025Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-05-29T15:23:13.454028Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-05-29T15:23:13.454033Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7548: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:23:13.454043Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:39: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-05-29T15:23:13.454141Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7619: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-05-29T15:23:13.454200Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-05-29T15:23:13.464811Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7309: Cannot subscribe to console configs 2025-05-29T15:23:13.464828Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:23:13.466905Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-05-29T15:23:13.467058Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-05-29T15:23:13.467126Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-05-29T15:23:13.469714Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-05-29T15:23:13.469934Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:445: Clear TempDirsState with owners number: 0 2025-05-29T15:23:13.470029Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1348: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-05-29T15:23:13.470080Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:32: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-05-29T15:23:13.470511Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:157: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:23:13.470538Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:84: [RootDataErasureManager] Stop 2025-05-29T15:23:13.470715Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:23:13.470724Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:23:13.470758Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-05-29T15:23:13.470767Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-29T15:23:13.470773Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:202: TTxServerlessStorageBilling.Complete 2025-05-29T15:23:13.470797Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6671: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-05-29T15:23:13.471951Z node 1 :HIVE INFO: tablet_helpers.cpp:1130: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:128:2152] sender: [1:242:2058] recipient: [1:15:2062] 2025-05-29T15:23:13.489550Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:372: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-05-29T15:23:13.489622Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:23:13.489683Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-05-29T15:23:13.489728Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:130: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-05-29T15:23:13.489738Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:23:13.490339Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:451: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-05-29T15:23:13.490365Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-05-29T15:23:13.490401Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:23:13.490410Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:313: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-05-29T15:23:13.490417Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:367: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-05-29T15:23:13.490422Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 2 -> 3 2025-05-29T15:23:13.490786Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:23:13.490798Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-05-29T15:23:13.490803Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 3 -> 128 2025-05-29T15:23:13.491121Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:23:13.491133Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:23:13.491139Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:23:13.491146Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1650: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-05-29T15:23:13.491713Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1719: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-05-29T15:23:13.492059Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:652: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-05-29T15:23:13.492085Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1751: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-05-29T15:23:13.492241Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:676: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-05-29T15:23:13.492265Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:680: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 124 RawX2: 4294969445 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-05-29T15:23:13.492272Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:23:13.492318Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 128 -> 240 2025-05-29T15:23:13.492323Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:23:13.492347Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-05-29T15:23:13.492355Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:402: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-05-29T15:23:13.492722Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:23:13.492731Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-29T15:23:13.492766Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29 ... TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 101 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-05-29T15:23:13.582767Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:652: Send tablet strongly msg operationId: 101:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:101 msg type: 269090816 2025-05-29T15:23:13.582802Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1751: TOperation RegisterRelationByTabletId, TxId: 101, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 101 at step: 5000002 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000001 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 101 at step: 5000002 2025-05-29T15:23:13.582889Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:676: TTxOperationPlanStep Execute, stepId: 5000002, transactions count in step: 1, at schemeshard: 72057594046678944 2025-05-29T15:23:13.582911Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:680: TTxOperationPlanStep Execute, message: Transactions { TxId: 101 Coordinator: 72057594046316545 AckTo { RawX1: 124 RawX2: 4294969445 } } Step: 5000002 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-05-29T15:23:13.582919Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 101:0, at tablet# 72057594046678944 2025-05-29T15:23:13.583013Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 101:0 128 -> 240 2025-05-29T15:23:13.583025Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 101:0, at tablet# 72057594046678944 2025-05-29T15:23:13.583065Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-05-29T15:23:13.583078Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 8 2025-05-29T15:23:13.583088Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:402: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 101 2025-05-29T15:23:13.584941Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:23:13.584955Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 101, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-29T15:23:13.585002Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 101, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2025-05-29T15:23:13.585024Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:23:13.585029Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:209:2210], at schemeshard: 72057594046678944, txId: 101, path id: 1 2025-05-29T15:23:13.585035Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:209:2210], at schemeshard: 72057594046678944, txId: 101, path id: 2 2025-05-29T15:23:13.585097Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 101:0, at schemeshard: 72057594046678944 2025-05-29T15:23:13.585106Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:482: [72057594046678944] TDone opId# 101:0 ProgressState 2025-05-29T15:23:13.585121Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:906: Part operation is done id#101:0 progress is 1/1 2025-05-29T15:23:13.585124Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2025-05-29T15:23:13.585129Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:906: Part operation is done id#101:0 progress is 1/1 2025-05-29T15:23:13.585131Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2025-05-29T15:23:13.585135Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1606: TOperation IsReadyToNotify, TxId: 101, ready parts: 1/1, is published: false 2025-05-29T15:23:13.585139Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2025-05-29T15:23:13.585144Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:973: Operation and all the parts is done, operation id: 101:0 2025-05-29T15:23:13.585147Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5174: RemoveTx for txid 101:0 2025-05-29T15:23:13.585180Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 9 2025-05-29T15:23:13.585184Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:982: Publication still in progress, tx: 101, publications: 2, subscribers: 1 2025-05-29T15:23:13.585187Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:989: Publication details: tx: 101, [OwnerId: 72057594046678944, LocalPathId: 1], 5 2025-05-29T15:23:13.585190Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:989: Publication details: tx: 101, [OwnerId: 72057594046678944, LocalPathId: 2], 3 2025-05-29T15:23:13.585335Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:5834: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 5 PathOwnerId: 72057594046678944, cookie: 101 2025-05-29T15:23:13.585351Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 5 PathOwnerId: 72057594046678944, cookie: 101 2025-05-29T15:23:13.585356Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 101 2025-05-29T15:23:13.585361Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 5 2025-05-29T15:23:13.585366Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-05-29T15:23:13.585500Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:5834: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046678944, cookie: 101 2025-05-29T15:23:13.585511Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046678944, cookie: 101 2025-05-29T15:23:13.585514Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 101 2025-05-29T15:23:13.585519Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 3 2025-05-29T15:23:13.585524Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 8 2025-05-29T15:23:13.585547Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 101, subscribers: 1 2025-05-29T15:23:13.585553Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:212: TTxAckPublishToSchemeBoard Notify send TEvNotifyTxCompletionResult, at schemeshard: 72057594046678944, to actorId: [1:577:2486] 2025-05-29T15:23:13.586526Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2025-05-29T15:23:13.586597Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2025-05-29T15:23:13.586616Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:227: tests -- TTxNotificationSubscriber for txId 101: got EvNotifyTxCompletionResult 2025-05-29T15:23:13.586622Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:236: tests -- TTxNotificationSubscriber for txId 101: satisfy waiter [1:578:2487] TestWaitNotification: OK eventTxId 101 2025-05-29T15:23:13.586760Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-05-29T15:23:13.586804Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot" took 73us result status StatusSuccess 2025-05-29T15:23:13.586926Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot" PathDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 3 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: "USER_0" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 1 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/MyRoot" } } } PathId: 1 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-ModifyUser-32 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-ModifyUser-33 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::SimultaneousCreateTenantDirTable [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:116:2058] recipient: [1:111:2141] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:116:2058] recipient: [1:111:2141] Leader for TabletID 72057594046678944 is [1:128:2152] sender: [1:132:2058] recipient: [1:111:2141] 2025-05-29T15:23:13.358885Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7488: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-05-29T15:23:13.358912Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7516: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:23:13.358918Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7402: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-05-29T15:23:13.358923Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7418: OperationsProcessing config: using default configuration 2025-05-29T15:23:13.358934Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-05-29T15:23:13.358938Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-05-29T15:23:13.358947Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7548: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:23:13.358961Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:39: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-05-29T15:23:13.359072Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7619: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-05-29T15:23:13.359143Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-05-29T15:23:13.368910Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7309: Cannot subscribe to console configs 2025-05-29T15:23:13.368929Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:23:13.370677Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-05-29T15:23:13.370776Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-05-29T15:23:13.370819Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-05-29T15:23:13.372363Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-05-29T15:23:13.372530Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:445: Clear TempDirsState with owners number: 0 2025-05-29T15:23:13.372613Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1348: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-05-29T15:23:13.372674Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:32: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-05-29T15:23:13.373047Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:157: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:23:13.373081Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:84: [RootDataErasureManager] Stop 2025-05-29T15:23:13.373287Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:23:13.373294Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:23:13.373310Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-05-29T15:23:13.373315Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-29T15:23:13.373320Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:202: TTxServerlessStorageBilling.Complete 2025-05-29T15:23:13.373343Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6671: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-05-29T15:23:13.374457Z node 1 :HIVE INFO: tablet_helpers.cpp:1130: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:128:2152] sender: [1:242:2058] recipient: [1:15:2062] 2025-05-29T15:23:13.386856Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:372: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-05-29T15:23:13.386921Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:23:13.386983Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-05-29T15:23:13.387023Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:130: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-05-29T15:23:13.387030Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:23:13.387641Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:451: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-05-29T15:23:13.387661Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-05-29T15:23:13.387697Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:23:13.387703Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:313: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-05-29T15:23:13.387707Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:367: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-05-29T15:23:13.387710Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 2 -> 3 2025-05-29T15:23:13.387984Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:23:13.387992Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-05-29T15:23:13.387995Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 3 -> 128 2025-05-29T15:23:13.388260Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:23:13.388269Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:23:13.388273Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:23:13.388278Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1650: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-05-29T15:23:13.388757Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1719: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-05-29T15:23:13.389086Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:652: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-05-29T15:23:13.389116Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1751: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-05-29T15:23:13.389282Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:676: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-05-29T15:23:13.389304Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:680: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 124 RawX2: 4294969445 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-05-29T15:23:13.389311Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:23:13.389360Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 128 -> 240 2025-05-29T15:23:13.389367Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:23:13.389393Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-05-29T15:23:13.389402Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:402: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-05-29T15:23:13.389901Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:23:13.389914Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-29T15:23:13.389962Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29 ... calIdx: 1, at schemeshard: 72057594046678944 2025-05-29T15:23:13.557101Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 6 2025-05-29T15:23:13.557213Z node 1 :HIVE INFO: tablet_helpers.cpp:1356: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 6 TxId_Deprecated: 6 TabletID: 72075186233409551 Forgetting tablet 72075186233409546 2025-05-29T15:23:13.557887Z node 1 :HIVE INFO: tablet_helpers.cpp:1356: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 2 TxId_Deprecated: 2 TabletID: 72075186233409547 Forgetting tablet 72075186233409548 2025-05-29T15:23:13.558153Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5938: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 3 ShardOwnerId: 72057594046678944 ShardLocalIdx: 3, at schemeshard: 72057594046678944 2025-05-29T15:23:13.558192Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 5 2025-05-29T15:23:13.558282Z node 1 :HIVE INFO: tablet_helpers.cpp:1356: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 4 TxId_Deprecated: 4 TabletID: 72075186233409549 2025-05-29T15:23:13.558314Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5938: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 6 ShardOwnerId: 72057594046678944 ShardLocalIdx: 6, at schemeshard: 72057594046678944 2025-05-29T15:23:13.558335Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 Forgetting tablet 72075186233409551 2025-05-29T15:23:13.558589Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 103 Forgetting tablet 72075186233409547 Forgetting tablet 72075186233409549 2025-05-29T15:23:13.558755Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5938: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 2 ShardOwnerId: 72057594046678944 ShardLocalIdx: 2, at schemeshard: 72057594046678944 2025-05-29T15:23:13.558786Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2025-05-29T15:23:13.558852Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 103 2025-05-29T15:23:13.558921Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5938: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 4 ShardOwnerId: 72057594046678944 ShardLocalIdx: 4, at schemeshard: 72057594046678944 2025-05-29T15:23:13.558944Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2025-05-29T15:23:13.558996Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 103 2025-05-29T15:23:13.559021Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:205: TTxCleanDroppedSubDomains Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-05-29T15:23:13.559026Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:224: TTxCleanDroppedPaths: PersistRemoveSubDomain for PathId# [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2025-05-29T15:23:13.559052Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 2025-05-29T15:23:13.559757Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:174: Deleted shardIdx 72057594046678944:5 2025-05-29T15:23:13.559770Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:180: Close pipe to deleted shardIdx 72057594046678944:5 tabletId 72075186233409550 2025-05-29T15:23:13.559784Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:174: Deleted shardIdx 72057594046678944:7 2025-05-29T15:23:13.559788Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:180: Close pipe to deleted shardIdx 72057594046678944:7 tabletId 72075186233409552 2025-05-29T15:23:13.559892Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:160: TTxCleanDroppedPaths Complete, done PersistRemovePath for 2 paths, skipped 0, left 1 candidates, at schemeshard: 72057594046678944 2025-05-29T15:23:13.560054Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:123: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-05-29T15:23:13.560062Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:137: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2025-05-29T15:23:13.560075Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-05-29T15:23:13.560102Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:174: Deleted shardIdx 72057594046678944:1 2025-05-29T15:23:13.560107Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:180: Close pipe to deleted shardIdx 72057594046678944:1 tabletId 72075186233409546 2025-05-29T15:23:13.560122Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:174: Deleted shardIdx 72057594046678944:3 2025-05-29T15:23:13.560126Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:180: Close pipe to deleted shardIdx 72057594046678944:3 tabletId 72075186233409548 2025-05-29T15:23:13.560144Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:174: Deleted shardIdx 72057594046678944:6 2025-05-29T15:23:13.560150Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:180: Close pipe to deleted shardIdx 72057594046678944:6 tabletId 72075186233409551 2025-05-29T15:23:13.560236Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:174: Deleted shardIdx 72057594046678944:2 2025-05-29T15:23:13.560241Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:180: Close pipe to deleted shardIdx 72057594046678944:2 tabletId 72075186233409547 2025-05-29T15:23:13.560251Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:174: Deleted shardIdx 72057594046678944:4 2025-05-29T15:23:13.560256Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:180: Close pipe to deleted shardIdx 72057594046678944:4 tabletId 72075186233409549 2025-05-29T15:23:13.560293Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:245: TTxCleanDroppedSubDomains Complete, done PersistRemoveSubDomain for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 2025-05-29T15:23:13.560574Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:160: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestModificationResult got TxId: 103, wait until txId: 103 TestWaitNotification wait txId: 103 2025-05-29T15:23:13.560635Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:210: tests -- TTxNotificationSubscriber for txId 103: send EvNotifyTxCompletion 2025-05-29T15:23:13.560643Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:256: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 103 2025-05-29T15:23:13.560706Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 103, at schemeshard: 72057594046678944 2025-05-29T15:23:13.560722Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:227: tests -- TTxNotificationSubscriber for txId 103: got EvNotifyTxCompletionResult 2025-05-29T15:23:13.560727Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:236: tests -- TTxNotificationSubscriber for txId 103: satisfy waiter [1:789:2675] TestWaitNotification: OK eventTxId 103 2025-05-29T15:23:13.560803Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-05-29T15:23:13.560838Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/USER_0" took 47us result status StatusPathDoesNotExist 2025-05-29T15:23:13.560878Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/USER_0\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1]), source_location: ydb/core/tx/schemeshard/schemeshard_path_describer.cpp:1157" Path: "/MyRoot/USER_0" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: false } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 2025-05-29T15:23:13.560926Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-05-29T15:23:13.560950Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot" took 25us result status StatusSuccess 2025-05-29T15:23:13.561018Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot" PathDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 7 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 7 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 5 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/MyRoot" } } } PathId: 1 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TStoragePoolsQuotasTest::DifferentQuotasInteraction [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:116:2058] recipient: [1:111:2141] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:116:2058] recipient: [1:111:2141] Leader for TabletID 72057594046678944 is [1:128:2152] sender: [1:132:2058] recipient: [1:111:2141] 2025-05-29T15:23:11.104080Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7488: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-05-29T15:23:11.104107Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7516: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:23:11.104113Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7402: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2025-05-29T15:23:11.104118Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7418: OperationsProcessing config: using default configuration 2025-05-29T15:23:11.104130Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-05-29T15:23:11.104134Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-05-29T15:23:11.104144Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7548: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:23:11.104156Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:39: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-05-29T15:23:11.104267Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7619: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-05-29T15:23:11.104331Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-05-29T15:23:11.118217Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7309: Cannot subscribe to console configs 2025-05-29T15:23:11.118242Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:23:11.120967Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-05-29T15:23:11.121085Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-05-29T15:23:11.121136Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-05-29T15:23:11.122837Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-05-29T15:23:11.123047Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:445: Clear TempDirsState with owners number: 0 2025-05-29T15:23:11.123148Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1348: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-05-29T15:23:11.123217Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:32: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-05-29T15:23:11.123694Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:157: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:23:11.123730Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:84: [RootDataErasureManager] Stop 2025-05-29T15:23:11.123981Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:23:11.123992Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:23:11.124013Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-05-29T15:23:11.124021Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-29T15:23:11.124027Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:202: TTxServerlessStorageBilling.Complete 2025-05-29T15:23:11.124061Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6671: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-05-29T15:23:11.125334Z node 1 :HIVE INFO: tablet_helpers.cpp:1130: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:128:2152] sender: [1:242:2058] recipient: [1:15:2062] 2025-05-29T15:23:11.146410Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:372: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-05-29T15:23:11.146481Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:23:11.146546Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-05-29T15:23:11.146592Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:130: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-05-29T15:23:11.146603Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:23:11.147316Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:451: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-05-29T15:23:11.147343Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-05-29T15:23:11.147388Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:23:11.147399Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:313: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-05-29T15:23:11.147405Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:367: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-05-29T15:23:11.147410Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 2 -> 3 2025-05-29T15:23:11.148086Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:23:11.148099Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-05-29T15:23:11.148105Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 3 -> 128 2025-05-29T15:23:11.148472Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:23:11.148484Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:23:11.148490Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:23:11.148498Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1650: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-05-29T15:23:11.149160Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1719: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-05-29T15:23:11.149527Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:652: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-05-29T15:23:11.149580Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1751: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-05-29T15:23:11.149751Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:676: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-05-29T15:23:11.149776Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:680: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 124 RawX2: 4294969445 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-05-29T15:23:11.149783Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:23:11.149839Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 128 -> 240 2025-05-29T15:23:11.149846Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:23:11.149875Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-05-29T15:23:11.149887Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:402: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-05-29T15:23:11.150270Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:23:11.150280Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-29T15:23:11.150320Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29T1 ... : 72057594046678944, LocalPathId: 2] was 4 2025-05-29T15:23:13.843794Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 3 2025-05-29T15:23:13.844221Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:647: TTxOperationReply complete, operationId: 103:0, at schemeshard: 72057594046678944 2025-05-29T15:23:13.844450Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:647: TTxOperationReply complete, operationId: 103:0, at schemeshard: 72057594046678944 2025-05-29T15:23:13.844483Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:23:13.844488Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 103, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2025-05-29T15:23:13.844528Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 103, path id: [OwnerId: 72057594046678944, LocalPathId: 3] 2025-05-29T15:23:13.844552Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:23:13.844555Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:209:2210], at schemeshard: 72057594046678944, txId: 103, path id: 2 2025-05-29T15:23:13.844559Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:209:2210], at schemeshard: 72057594046678944, txId: 103, path id: 3 2025-05-29T15:23:13.844566Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 103:0, at schemeshard: 72057594046678944 2025-05-29T15:23:13.844571Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:1036: NTableState::TProposedWaitParts operationId# 103:0 ProgressState at tablet: 72057594046678944 2025-05-29T15:23:13.844583Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:674: all shard schema changes has been received, operationId: 103:0, at schemeshard: 72057594046678944 2025-05-29T15:23:13.844590Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:686: send schema changes ack message, operation: 103:0, datashard: 72075186233409548, at schemeshard: 72057594046678944 2025-05-29T15:23:13.844593Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 103:0 129 -> 240 2025-05-29T15:23:13.844797Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:5834: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 8 PathOwnerId: 72057594046678944, cookie: 103 2025-05-29T15:23:13.844806Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 8 PathOwnerId: 72057594046678944, cookie: 103 2025-05-29T15:23:13.844809Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72057594046678944, txId: 103 2025-05-29T15:23:13.844813Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 103, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 8 2025-05-29T15:23:13.844816Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 5 2025-05-29T15:23:13.844917Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:5834: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 103 2025-05-29T15:23:13.844926Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 103 2025-05-29T15:23:13.844929Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72057594046678944, txId: 103 2025-05-29T15:23:13.844932Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 103, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 18446744073709551615 2025-05-29T15:23:13.844934Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 4 2025-05-29T15:23:13.844942Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1606: TOperation IsReadyToNotify, TxId: 103, ready parts: 0/1, is published: true 2025-05-29T15:23:13.845697Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 103:0, at schemeshard: 72057594046678944 2025-05-29T15:23:13.845718Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_drop_table.cpp:414: TDropTable TProposedDeletePart operationId: 103:0 ProgressState, at schemeshard: 72057594046678944 2025-05-29T15:23:13.845840Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove table for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 3 2025-05-29T15:23:13.845896Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:906: Part operation is done id#103:0 progress is 1/1 2025-05-29T15:23:13.845904Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2025-05-29T15:23:13.845913Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:906: Part operation is done id#103:0 progress is 1/1 2025-05-29T15:23:13.845925Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2025-05-29T15:23:13.845933Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1606: TOperation IsReadyToNotify, TxId: 103, ready parts: 1/1, is published: true 2025-05-29T15:23:13.845949Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1629: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:410:2375] message: TxId: 103 2025-05-29T15:23:13.845955Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2025-05-29T15:23:13.845960Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:973: Operation and all the parts is done, operation id: 103:0 2025-05-29T15:23:13.845965Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5174: RemoveTx for txid 103:0 2025-05-29T15:23:13.845987Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 2 2025-05-29T15:23:13.846096Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:23:13.846105Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 0, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2025-05-29T15:23:13.846497Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 103 2025-05-29T15:23:13.846799Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 103 2025-05-29T15:23:13.847175Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:23:13.847185Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:209:2210], at schemeshard: 72057594046678944, txId: 0, path id: 2 2025-05-29T15:23:13.847197Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:227: tests -- TTxNotificationSubscriber for txId 103: got EvNotifyTxCompletionResult 2025-05-29T15:23:13.847202Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:236: tests -- TTxNotificationSubscriber for txId 103: satisfy waiter [1:732:2664] 2025-05-29T15:23:13.847326Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:5834: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 9 PathOwnerId: 72057594046678944, cookie: 0 TestWaitNotification: OK eventTxId 103 2025-05-29T15:23:13.847591Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/SomeDatabase" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-05-29T15:23:13.847621Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/SomeDatabase" took 36us result status StatusSuccess 2025-05-29T15:23:13.847698Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/SomeDatabase" PathDescription { Self { Name: "SomeDatabase" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 9 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 9 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 5 SubDomainVersion: 1 SubDomainStateVersion: 2 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72075186233409546 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409547 } DomainKey { SchemeShard: 72057594046678944 PathId: 2 } StoragePools { Name: "fast" Kind: "fast_kind" } StoragePools { Name: "large" Kind: "large_kind" } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 3 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 2 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } StoragePoolsUsage { PoolKind: "large_kind" TotalSize: 0 DataSize: 0 IndexSize: 0 } StoragePoolsUsage { PoolKind: "fast_kind" TotalSize: 0 DataSize: 0 IndexSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 DatabaseQuotas { data_size_hard_quota: 2800 data_size_soft_quota: 2200 storage_quotas { unit_kind: "fast_kind" data_size_hard_quota: 600 data_size_soft_quota: 500 } storage_quotas { unit_kind: "large_kind" data_size_hard_quota: 2200 data_size_soft_quota: 1700 } } SecurityState { } } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> TObjectStorageListingTest::SchemaChecks [GOOD] >> TStoragePoolsQuotasTest::QuoteNonexistentPool-IsExternalSubdomain-true >> TSchemeShardSubDomainTest::DeleteAdd [GOOD] >> TSchemeShardSubDomainTest::ConsistentCopyRejects [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-CreateUser-10 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-CreateUser-11 >> TSchemeShardSubDomainTest::SimultaneousDeclareAndCreateTable >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-CreateUser-3 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-ModifyUser-27 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-ModifyUser-28 >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-CreateUser-4 >> TStoragePoolsQuotasTest::QuoteNonexistentPool-IsExternalSubdomain-true [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-CreateUser-4 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-CreateUser-5 >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-ModifyUser-26 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-ModifyUser-27 |63.2%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/schemeshard/ut_serverless_reboots/ydb-core-tx-schemeshard-ut_serverless_reboots |63.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_serverless_reboots/ydb-core-tx-schemeshard-ut_serverless_reboots ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::DeleteAdd [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:116:2058] recipient: [1:111:2141] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:116:2058] recipient: [1:111:2141] Leader for TabletID 72057594046678944 is [1:128:2152] sender: [1:132:2058] recipient: [1:111:2141] 2025-05-29T15:23:13.834993Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7488: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-05-29T15:23:13.835020Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7516: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:23:13.835026Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7402: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-05-29T15:23:13.835032Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7418: OperationsProcessing config: using default configuration 2025-05-29T15:23:13.835045Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-05-29T15:23:13.835049Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-05-29T15:23:13.835058Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7548: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:23:13.835072Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:39: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-05-29T15:23:13.835191Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7619: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-05-29T15:23:13.835260Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-05-29T15:23:13.845904Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7309: Cannot subscribe to console configs 2025-05-29T15:23:13.845925Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:23:13.848030Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-05-29T15:23:13.848119Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-05-29T15:23:13.848165Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-05-29T15:23:13.849641Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-05-29T15:23:13.849825Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:445: Clear TempDirsState with owners number: 0 2025-05-29T15:23:13.849908Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1348: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-05-29T15:23:13.849963Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:32: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-05-29T15:23:13.850365Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:157: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:23:13.850392Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:84: [RootDataErasureManager] Stop 2025-05-29T15:23:13.850608Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:23:13.850616Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:23:13.850632Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-05-29T15:23:13.850639Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-29T15:23:13.850643Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:202: TTxServerlessStorageBilling.Complete 2025-05-29T15:23:13.850667Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6671: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-05-29T15:23:13.851782Z node 1 :HIVE INFO: tablet_helpers.cpp:1130: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:128:2152] sender: [1:242:2058] recipient: [1:15:2062] 2025-05-29T15:23:13.867841Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:372: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-05-29T15:23:13.867906Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:23:13.867964Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-05-29T15:23:13.868001Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:130: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-05-29T15:23:13.868009Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:23:13.868699Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:451: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-05-29T15:23:13.868721Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-05-29T15:23:13.868765Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:23:13.868773Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:313: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-05-29T15:23:13.868777Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:367: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-05-29T15:23:13.868781Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 2 -> 3 2025-05-29T15:23:13.869123Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:23:13.869131Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-05-29T15:23:13.869135Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 3 -> 128 2025-05-29T15:23:13.869498Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:23:13.869508Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:23:13.869514Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:23:13.869521Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1650: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-05-29T15:23:13.869986Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1719: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-05-29T15:23:13.870367Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:652: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-05-29T15:23:13.870398Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1751: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-05-29T15:23:13.870531Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:676: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-05-29T15:23:13.870554Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:680: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 124 RawX2: 4294969445 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-05-29T15:23:13.870561Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:23:13.870604Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 128 -> 240 2025-05-29T15:23:13.870609Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:23:13.870632Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-05-29T15:23:13.870640Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:402: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-05-29T15:23:13.871049Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:23:13.871055Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-29T15:23:13.871088Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29 ... ode 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:402: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 3], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 102 2025-05-29T15:23:13.998841Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:23:13.998850Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 102, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-29T15:23:13.998890Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 102, path id: [OwnerId: 72057594046678944, LocalPathId: 3] 2025-05-29T15:23:13.998908Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:23:13.998915Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:209:2210], at schemeshard: 72057594046678944, txId: 102, path id: 1 2025-05-29T15:23:13.998921Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:209:2210], at schemeshard: 72057594046678944, txId: 102, path id: 3 2025-05-29T15:23:13.998993Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2025-05-29T15:23:13.999001Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:482: [72057594046678944] TDone opId# 102:0 ProgressState 2025-05-29T15:23:13.999014Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:906: Part operation is done id#102:0 progress is 1/1 2025-05-29T15:23:13.999019Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-05-29T15:23:13.999029Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:906: Part operation is done id#102:0 progress is 1/1 2025-05-29T15:23:13.999035Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-05-29T15:23:13.999040Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1606: TOperation IsReadyToNotify, TxId: 102, ready parts: 1/1, is published: false 2025-05-29T15:23:13.999046Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-05-29T15:23:13.999052Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:973: Operation and all the parts is done, operation id: 102:0 2025-05-29T15:23:13.999056Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5174: RemoveTx for txid 102:0 2025-05-29T15:23:13.999093Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 9 2025-05-29T15:23:13.999098Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:982: Publication still in progress, tx: 102, publications: 2, subscribers: 0 2025-05-29T15:23:13.999101Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:989: Publication details: tx: 102, [OwnerId: 72057594046678944, LocalPathId: 1], 9 2025-05-29T15:23:13.999104Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:989: Publication details: tx: 102, [OwnerId: 72057594046678944, LocalPathId: 3], 3 2025-05-29T15:23:13.999221Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:5834: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 9 PathOwnerId: 72057594046678944, cookie: 102 2025-05-29T15:23:13.999233Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 9 PathOwnerId: 72057594046678944, cookie: 102 2025-05-29T15:23:13.999236Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 102 2025-05-29T15:23:13.999240Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 9 2025-05-29T15:23:13.999243Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-05-29T15:23:13.999322Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:5834: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 3 PathOwnerId: 72057594046678944, cookie: 102 2025-05-29T15:23:13.999329Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 3 PathOwnerId: 72057594046678944, cookie: 102 2025-05-29T15:23:13.999332Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 102 2025-05-29T15:23:13.999334Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 3 2025-05-29T15:23:13.999337Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 8 2025-05-29T15:23:13.999343Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 102, subscribers: 0 2025-05-29T15:23:14.000078Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2025-05-29T15:23:14.000314Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 TestModificationResult got TxId: 102, wait until txId: 102 TestWaitNotification wait txId: 102 2025-05-29T15:23:14.000374Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:210: tests -- TTxNotificationSubscriber for txId 102: send EvNotifyTxCompletion 2025-05-29T15:23:14.000381Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:256: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 102 2025-05-29T15:23:14.000451Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 102, at schemeshard: 72057594046678944 2025-05-29T15:23:14.000469Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:227: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2025-05-29T15:23:14.000474Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:236: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [1:986:2809] TestWaitNotification: OK eventTxId 102 2025-05-29T15:23:14.000545Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-05-29T15:23:14.000585Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/USER_0" took 52us result status StatusSuccess 2025-05-29T15:23:14.000670Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/USER_0" PathDescription { Self { Name: "USER_0" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 102 CreateStep: 5000004 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 1 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72075186233409552 Coordinators: 72075186233409553 Coordinators: 72075186233409554 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409555 Mediators: 72075186233409556 Mediators: 72075186233409557 } DomainKey { SchemeShard: 72057594046678944 PathId: 3 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 6 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 3 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { } } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-05-29T15:23:14.000743Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-05-29T15:23:14.000758Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot" took 17us result status StatusSuccess 2025-05-29T15:23:14.000804Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot" PathDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 9 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 9 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 7 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: "USER_0" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 102 CreateStep: 5000004 ParentPathId: 1 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 1 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/MyRoot" } } } PathId: 1 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 |63.2%| [TA] {RESULT} $(B)/ydb/core/client/server/ut/test-results/unittest/{meta.json ... results_accumulator.log} |63.3%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_serverless_reboots/ydb-core-tx-schemeshard-ut_serverless_reboots ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::ConsistentCopyRejects [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:116:2058] recipient: [1:111:2141] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:116:2058] recipient: [1:111:2141] Leader for TabletID 72057594046678944 is [1:128:2152] sender: [1:132:2058] recipient: [1:111:2141] 2025-05-29T15:23:13.421187Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7488: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-05-29T15:23:13.421205Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7516: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:23:13.421211Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7402: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-05-29T15:23:13.421216Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7418: OperationsProcessing config: using default configuration 2025-05-29T15:23:13.421228Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-05-29T15:23:13.421232Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-05-29T15:23:13.421242Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7548: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:23:13.421253Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:39: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-05-29T15:23:13.421341Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7619: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-05-29T15:23:13.421392Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-05-29T15:23:13.430820Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7309: Cannot subscribe to console configs 2025-05-29T15:23:13.430836Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:23:13.432952Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-05-29T15:23:13.433044Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-05-29T15:23:13.433096Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-05-29T15:23:13.434694Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-05-29T15:23:13.434857Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:445: Clear TempDirsState with owners number: 0 2025-05-29T15:23:13.434958Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1348: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-05-29T15:23:13.435036Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:32: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-05-29T15:23:13.435520Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:157: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:23:13.435555Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:84: [RootDataErasureManager] Stop 2025-05-29T15:23:13.435776Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:23:13.435787Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:23:13.435805Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-05-29T15:23:13.435811Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-29T15:23:13.435816Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:202: TTxServerlessStorageBilling.Complete 2025-05-29T15:23:13.435838Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6671: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-05-29T15:23:13.436991Z node 1 :HIVE INFO: tablet_helpers.cpp:1130: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:128:2152] sender: [1:242:2058] recipient: [1:15:2062] 2025-05-29T15:23:13.457800Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:372: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-05-29T15:23:13.457867Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:23:13.457934Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-05-29T15:23:13.457979Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:130: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-05-29T15:23:13.457990Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:23:13.458731Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:451: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-05-29T15:23:13.458774Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-05-29T15:23:13.458817Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:23:13.458827Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:313: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-05-29T15:23:13.458833Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:367: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-05-29T15:23:13.458838Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 2 -> 3 2025-05-29T15:23:13.459353Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:23:13.459368Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-05-29T15:23:13.459374Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 3 -> 128 2025-05-29T15:23:13.459829Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:23:13.459845Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:23:13.459851Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:23:13.459859Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1650: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-05-29T15:23:13.460504Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1719: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-05-29T15:23:13.460959Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:652: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-05-29T15:23:13.461011Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1751: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-05-29T15:23:13.461192Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:676: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-05-29T15:23:13.461218Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:680: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 124 RawX2: 4294969445 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-05-29T15:23:13.461225Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:23:13.461281Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 128 -> 240 2025-05-29T15:23:13.461288Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:23:13.461319Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-05-29T15:23:13.461350Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:402: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-05-29T15:23:13.461792Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:23:13.461802Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-29T15:23:13.461843Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29 ... rd: 72057594046678944 2025-05-29T15:23:14.154400Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 106:0 129 -> 240 2025-05-29T15:23:14.154945Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:647: TTxOperationReply complete, operationId: 106:0, at schemeshard: 72057594046678944 2025-05-29T15:23:14.155081Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 106:0, at schemeshard: 72057594046678944 2025-05-29T15:23:14.155093Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_copy_table.cpp:306: TCopyTable TCopyTableBarrier operationId: 106:0ProgressState, operation type TxCopyTable 2025-05-29T15:23:14.155103Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1059: Set barrier, OperationId: 106:0, name: CopyTableBarrier, done: 0, blocked: 1, parts count: 1 2025-05-29T15:23:14.155108Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1103: All parts have reached barrier, tx: 106, done: 0, blocked: 1 2025-05-29T15:23:14.155122Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_copy_table.cpp:289: TCopyTable TCopyTableBarrier operationId: 106:0 HandleReply TEvPrivate::TEvCompleteBarrier, msg: NKikimr::NSchemeShard::TEvPrivate::TEvCompleteBarrier { TxId: 106 Name: CopyTableBarrier }, at tablet# 72057594046678944 2025-05-29T15:23:14.155129Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 106:0 240 -> 240 2025-05-29T15:23:14.155681Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 106:0, at schemeshard: 72057594046678944 2025-05-29T15:23:14.155696Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:482: [72057594046678944] TDone opId# 106:0 ProgressState 2025-05-29T15:23:14.155713Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:906: Part operation is done id#106:0 progress is 1/1 2025-05-29T15:23:14.155719Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 106 ready parts: 1/1 2025-05-29T15:23:14.155724Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:906: Part operation is done id#106:0 progress is 1/1 2025-05-29T15:23:14.155727Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 106 ready parts: 1/1 2025-05-29T15:23:14.155733Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1606: TOperation IsReadyToNotify, TxId: 106, ready parts: 1/1, is published: true 2025-05-29T15:23:14.155755Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1629: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [2:643:2564] message: TxId: 106 2025-05-29T15:23:14.155762Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 106 ready parts: 1/1 2025-05-29T15:23:14.155768Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:973: Operation and all the parts is done, operation id: 106:0 2025-05-29T15:23:14.155773Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5174: RemoveTx for txid 106:0 2025-05-29T15:23:14.155812Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 6] was 3 2025-05-29T15:23:14.155817Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove txstate source path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 3 2025-05-29T15:23:14.156269Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:227: tests -- TTxNotificationSubscriber for txId 106: got EvNotifyTxCompletionResult 2025-05-29T15:23:14.156279Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:236: tests -- TTxNotificationSubscriber for txId 106: satisfy waiter [2:820:2716] TestWaitNotification: OK eventTxId 106 2025-05-29T15:23:14.156407Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0/table" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-05-29T15:23:14.156461Z node 2 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/USER_0/table" took 62us result status StatusSuccess 2025-05-29T15:23:14.156559Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/USER_0/table" PathDescription { Self { Name: "table" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 101 CreateStep: 150 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "table" Columns { Name: "RowId" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "Value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "RowId" KeyColumnIds: 1 TableSchemaVersion: 1 IsBackup: false IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72075186233409546 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409547 } DomainKey { SchemeShard: 72057594046678944 PathId: 2 } PathsInside: 2 PathsLimit: 10000 ShardsInside: 4 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 2 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-05-29T15:23:14.156624Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0/dst" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-05-29T15:23:14.156636Z node 2 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/USER_0/dst" took 13us result status StatusSuccess 2025-05-29T15:23:14.156685Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/USER_0/dst" PathDescription { Self { Name: "dst" PathId: 6 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 106 CreateStep: 250 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "dst" Columns { Name: "RowId" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "Value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "RowId" KeyColumnIds: 1 TableSchemaVersion: 1 IsBackup: false IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72075186233409546 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409547 } DomainKey { SchemeShard: 72057594046678944 PathId: 2 } PathsInside: 2 PathsLimit: 10000 ShardsInside: 4 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 2 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 6 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-05-29T15:23:14.156730Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-05-29T15:23:14.156745Z node 2 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/USER_0" took 16us result status StatusSuccess 2025-05-29T15:23:14.156792Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/USER_0" PathDescription { Self { Name: "USER_0" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 100 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 7 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 7 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 5 SubDomainVersion: 1 SecurityStateVersion: 0 } } Children { Name: "dst" PathId: 6 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 106 CreateStep: 250 ParentPathId: 2 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" ChildrenExist: false } Children { Name: "table" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 101 CreateStep: 150 ParentPathId: 2 PathState: EPathStateCopying Owner: "root@builtin" ACL: "" ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72075186233409546 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409547 } DomainKey { SchemeShard: 72057594046678944 PathId: 2 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 2 PathsLimit: 10000 ShardsInside: 4 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 2 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { } } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-ModifyUser-38 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-ModifyUser-39 >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-CreateUser-5 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-CreateUser-6 >> TSchemeShardSubDomainTest::SimultaneousDeclareAndCreateTable [GOOD] >> TSchemeShardSubDomainTest::RedefineErrors >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-ModifyUser-45 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-ModifyUser-46 >> TSchemeShardSubDomainTest::CreateForceDropSolomon ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/client/ut/unittest >> TObjectStorageListingTest::SchemaChecks [GOOD] Test command err: 2025-05-29T15:23:02.552751Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509888540525938805:2062];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:23:02.552782Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/002851/r3tmp/tmp4ACKpK/pdisk_1.dat 2025-05-29T15:23:02.612581Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:23:02.612792Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [1:7509888540525938785:2079] 1748532182552619 != 1748532182552622 TServer::EnableGrpc on GrpcPort 22672, node 1 2025-05-29T15:23:02.624834Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:23:02.624845Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-05-29T15:23:02.624846Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-05-29T15:23:02.624880Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:27566 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-05-29T15:23:02.655349Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:23:02.655375Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:23:02.656463Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-29T15:23:02.686941Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-05-29T15:23:02.700513Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:23:07.552958Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7509888540525938805:2062];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:23:07.552995Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=timeout; 2025-05-29T15:23:13.732200Z node 2 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7509888585475458724:2060];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:23:13.732248Z node 2 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/002851/r3tmp/tmpn0dWiu/pdisk_1.dat 2025-05-29T15:23:13.748888Z node 2 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:23:13.749242Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [2:7509888585475458706:2079] 1748532193732103 != 1748532193732106 TServer::EnableGrpc on GrpcPort 4089, node 2 2025-05-29T15:23:13.761726Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:23:13.761739Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-05-29T15:23:13.761742Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-05-29T15:23:13.761795Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:14628 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-05-29T15:23:13.838559Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:23:13.838592Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:23:13.839077Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:23:13.839566Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected waiting... 2025-05-29T15:23:13.843355Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TStoragePoolsQuotasTest::QuoteNonexistentPool-IsExternalSubdomain-true [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:116:2058] recipient: [1:111:2141] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:116:2058] recipient: [1:111:2141] Leader for TabletID 72057594046678944 is [1:128:2152] sender: [1:132:2058] recipient: [1:111:2141] 2025-05-29T15:23:14.385647Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7488: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-05-29T15:23:14.385672Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7516: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:23:14.385676Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7402: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-05-29T15:23:14.385680Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7418: OperationsProcessing config: using default configuration 2025-05-29T15:23:14.385690Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-05-29T15:23:14.385693Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-05-29T15:23:14.385700Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7548: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:23:14.385710Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:39: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-05-29T15:23:14.385814Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7619: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-05-29T15:23:14.385873Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-05-29T15:23:14.396949Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7309: Cannot subscribe to console configs 2025-05-29T15:23:14.396979Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:23:14.399784Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-05-29T15:23:14.399914Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-05-29T15:23:14.399968Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-05-29T15:23:14.401653Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-05-29T15:23:14.401901Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:445: Clear TempDirsState with owners number: 0 2025-05-29T15:23:14.402047Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1348: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-05-29T15:23:14.402137Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:32: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-05-29T15:23:14.402811Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:157: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:23:14.402863Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:84: [RootDataErasureManager] Stop 2025-05-29T15:23:14.403199Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:23:14.403216Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:23:14.403240Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-05-29T15:23:14.403252Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-29T15:23:14.403258Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:202: TTxServerlessStorageBilling.Complete 2025-05-29T15:23:14.403295Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6671: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-05-29T15:23:14.404705Z node 1 :HIVE INFO: tablet_helpers.cpp:1130: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:128:2152] sender: [1:242:2058] recipient: [1:15:2062] 2025-05-29T15:23:14.430998Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:372: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-05-29T15:23:14.431078Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:23:14.431146Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-05-29T15:23:14.431187Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:130: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-05-29T15:23:14.431196Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:23:14.434209Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:451: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-05-29T15:23:14.434266Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-05-29T15:23:14.434356Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:23:14.434370Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:313: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-05-29T15:23:14.434377Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:367: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-05-29T15:23:14.434385Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 2 -> 3 2025-05-29T15:23:14.439478Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:23:14.439517Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-05-29T15:23:14.439527Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 3 -> 128 2025-05-29T15:23:14.440667Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:23:14.440693Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:23:14.440700Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:23:14.440709Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1650: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-05-29T15:23:14.441581Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1719: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-05-29T15:23:14.446037Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:652: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-05-29T15:23:14.446115Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1751: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-05-29T15:23:14.446371Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:676: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-05-29T15:23:14.446417Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:680: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 124 RawX2: 4294969445 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-05-29T15:23:14.446427Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:23:14.446510Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 128 -> 240 2025-05-29T15:23:14.446519Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:23:14.446560Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-05-29T15:23:14.446574Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:402: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-05-29T15:23:14.447218Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:23:14.447228Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-29T15:23:14.447282Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29 ... meBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2025-05-29T15:23:14.454605Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 FAKE_COORDINATOR: Erasing txId 101 2025-05-29T15:23:14.454844Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:23:14.454852Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 101, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-29T15:23:14.454881Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 101, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2025-05-29T15:23:14.454895Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:23:14.454899Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:209:2210], at schemeshard: 72057594046678944, txId: 101, path id: 1 2025-05-29T15:23:14.454904Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:209:2210], at schemeshard: 72057594046678944, txId: 101, path id: 2 2025-05-29T15:23:14.454952Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 101:0, at schemeshard: 72057594046678944 2025-05-29T15:23:14.454962Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:482: [72057594046678944] TDone opId# 101:0 ProgressState 2025-05-29T15:23:14.454975Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:906: Part operation is done id#101:0 progress is 1/1 2025-05-29T15:23:14.454980Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2025-05-29T15:23:14.454986Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:906: Part operation is done id#101:0 progress is 1/1 2025-05-29T15:23:14.454989Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2025-05-29T15:23:14.454994Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1606: TOperation IsReadyToNotify, TxId: 101, ready parts: 1/1, is published: false 2025-05-29T15:23:14.454999Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2025-05-29T15:23:14.455005Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:973: Operation and all the parts is done, operation id: 101:0 2025-05-29T15:23:14.455009Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5174: RemoveTx for txid 101:0 2025-05-29T15:23:14.455020Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2025-05-29T15:23:14.455027Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:982: Publication still in progress, tx: 101, publications: 2, subscribers: 0 2025-05-29T15:23:14.455031Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:989: Publication details: tx: 101, [OwnerId: 72057594046678944, LocalPathId: 1], 5 2025-05-29T15:23:14.455035Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:989: Publication details: tx: 101, [OwnerId: 72057594046678944, LocalPathId: 2], 3 2025-05-29T15:23:14.455148Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:5834: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 5 PathOwnerId: 72057594046678944, cookie: 101 2025-05-29T15:23:14.455159Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 5 PathOwnerId: 72057594046678944, cookie: 101 2025-05-29T15:23:14.455164Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 101 2025-05-29T15:23:14.455168Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 5 2025-05-29T15:23:14.455173Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-05-29T15:23:14.455274Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:5834: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046678944, cookie: 101 2025-05-29T15:23:14.455283Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046678944, cookie: 101 2025-05-29T15:23:14.455287Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 101 2025-05-29T15:23:14.455294Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 3 2025-05-29T15:23:14.455298Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2025-05-29T15:23:14.455307Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 101, subscribers: 0 2025-05-29T15:23:14.455880Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2025-05-29T15:23:14.455946Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 TestModificationResult got TxId: 101, wait until txId: 101 TestModificationResults wait txId: 102 2025-05-29T15:23:14.456765Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:372: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpAlterExtSubDomain SubDomain { PlanResolution: 50 Coordinators: 1 Mediators: 1 Name: "SomeDatabase" TimeCastBucketsPerMediator: 2 ExternalSchemeShard: true DatabaseQuotas { storage_quotas { unit_kind: "nonexistent_storage_kind" data_size_hard_quota: 1 } } } } TxId: 102 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-05-29T15:23:14.456802Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_alter_extsubdomain.cpp:1102: [72057594046678944] CreateCompatibleAlterExtSubDomain, opId 102:0, feature flag EnableAlterDatabaseCreateHiveFirst 1, tx WorkingDir: "/MyRoot" OperationType: ESchemeOpAlterExtSubDomain SubDomain { PlanResolution: 50 Coordinators: 1 Mediators: 1 Name: "SomeDatabase" TimeCastBucketsPerMediator: 2 ExternalSchemeShard: true DatabaseQuotas { storage_quotas { unit_kind: "nonexistent_storage_kind" data_size_hard_quota: 1 } } } 2025-05-29T15:23:14.456808Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_alter_extsubdomain.cpp:1108: [72057594046678944] CreateCompatibleAlterExtSubDomain, opId 102:0, path /MyRoot/SomeDatabase 2025-05-29T15:23:14.456845Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_just_reject.cpp:47: TReject Propose, opId: 102:0, explain: Invalid AlterExtSubDomain request: Invalid ExtSubDomain request: Malformed subdomain request: cannot set storage quotas of the following kinds: nonexistent_storage_kind, because no storage pool in the subdomain SomeDatabase has the specified kinds. Existing storage kinds are: , at schemeshard: 72057594046678944 2025-05-29T15:23:14.456852Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:130: IgniteOperation, opId: 102:1, propose status:StatusInvalidParameter, reason: Invalid AlterExtSubDomain request: Invalid ExtSubDomain request: Malformed subdomain request: cannot set storage quotas of the following kinds: nonexistent_storage_kind, because no storage pool in the subdomain SomeDatabase has the specified kinds. Existing storage kinds are: , at schemeshard: 72057594046678944 2025-05-29T15:23:14.457286Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:451: TTxOperationPropose Complete, txId: 102, response: Status: StatusInvalidParameter Reason: "Invalid AlterExtSubDomain request: Invalid ExtSubDomain request: Malformed subdomain request: cannot set storage quotas of the following kinds: nonexistent_storage_kind, because no storage pool in the subdomain SomeDatabase has the specified kinds. Existing storage kinds are: " TxId: 102 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2025-05-29T15:23:14.457314Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 102, database: /MyRoot, subject: , status: StatusInvalidParameter, reason: Invalid AlterExtSubDomain request: Invalid ExtSubDomain request: Malformed subdomain request: cannot set storage quotas of the following kinds: nonexistent_storage_kind, because no storage pool in the subdomain SomeDatabase has the specified kinds. Existing storage kinds are: , operation: ALTER DATABASE, path: /MyRoot/SomeDatabase TestModificationResult got TxId: 102, wait until txId: 102 TestWaitNotification wait txId: 101 2025-05-29T15:23:14.457367Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:210: tests -- TTxNotificationSubscriber for txId 101: send EvNotifyTxCompletion 2025-05-29T15:23:14.457383Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:256: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 101 TestWaitNotification wait txId: 102 2025-05-29T15:23:14.457397Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:210: tests -- TTxNotificationSubscriber for txId 102: send EvNotifyTxCompletion 2025-05-29T15:23:14.457401Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:256: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 102 2025-05-29T15:23:14.457469Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 101, at schemeshard: 72057594046678944 2025-05-29T15:23:14.457494Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:227: tests -- TTxNotificationSubscriber for txId 101: got EvNotifyTxCompletionResult 2025-05-29T15:23:14.457499Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:236: tests -- TTxNotificationSubscriber for txId 101: satisfy waiter [1:310:2300] 2025-05-29T15:23:14.457545Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 102, at schemeshard: 72057594046678944 2025-05-29T15:23:14.457571Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:227: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2025-05-29T15:23:14.457575Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:236: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [1:310:2300] TestWaitNotification: OK eventTxId 101 TestWaitNotification: OK eventTxId 102 >> TSchemeShardSubDomainTest::RedefineErrors [GOOD] >> TStoragePoolsQuotasTest::DifferentQuotasInteraction-EnableSeparateQuotas >> TSchemeShardSubDomainTest::SimultaneousCreateTableForceDrop >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-ModifyUser-33 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-ModifyUser-34 >> TSchemeShardSubDomainTest::CreateAndWait ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::SimultaneousDeclareAndCreateTable [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:116:2058] recipient: [1:111:2141] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:116:2058] recipient: [1:111:2141] Leader for TabletID 72057594046678944 is [1:128:2152] sender: [1:132:2058] recipient: [1:111:2141] 2025-05-29T15:23:14.583590Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7488: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-05-29T15:23:14.583613Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7516: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:23:14.583617Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7402: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-05-29T15:23:14.583621Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7418: OperationsProcessing config: using default configuration 2025-05-29T15:23:14.583633Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-05-29T15:23:14.583636Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-05-29T15:23:14.583642Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7548: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:23:14.583653Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:39: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-05-29T15:23:14.583740Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7619: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-05-29T15:23:14.583795Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-05-29T15:23:14.593150Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7309: Cannot subscribe to console configs 2025-05-29T15:23:14.593172Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:23:14.596662Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-05-29T15:23:14.596767Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-05-29T15:23:14.596810Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-05-29T15:23:14.606358Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-05-29T15:23:14.606624Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:445: Clear TempDirsState with owners number: 0 2025-05-29T15:23:14.606770Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1348: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-05-29T15:23:14.606857Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:32: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-05-29T15:23:14.608540Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:157: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:23:14.608579Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:84: [RootDataErasureManager] Stop 2025-05-29T15:23:14.608809Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:23:14.608816Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:23:14.608834Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-05-29T15:23:14.608840Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-29T15:23:14.608844Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:202: TTxServerlessStorageBilling.Complete 2025-05-29T15:23:14.608874Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6671: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-05-29T15:23:14.610010Z node 1 :HIVE INFO: tablet_helpers.cpp:1130: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:128:2152] sender: [1:242:2058] recipient: [1:15:2062] 2025-05-29T15:23:14.624646Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:372: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-05-29T15:23:14.624731Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:23:14.624808Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-05-29T15:23:14.624854Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:130: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-05-29T15:23:14.624864Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:23:14.625773Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:451: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-05-29T15:23:14.625805Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-05-29T15:23:14.625860Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:23:14.625870Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:313: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-05-29T15:23:14.625876Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:367: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-05-29T15:23:14.625881Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 2 -> 3 2025-05-29T15:23:14.626287Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:23:14.626300Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-05-29T15:23:14.626306Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 3 -> 128 2025-05-29T15:23:14.626617Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:23:14.626629Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:23:14.626635Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:23:14.626642Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1650: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-05-29T15:23:14.627205Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1719: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-05-29T15:23:14.627574Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:652: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-05-29T15:23:14.627617Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1751: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-05-29T15:23:14.627799Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:676: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-05-29T15:23:14.627821Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:680: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 124 RawX2: 4294969445 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-05-29T15:23:14.627828Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:23:14.627882Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 128 -> 240 2025-05-29T15:23:14.627888Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:23:14.627919Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-05-29T15:23:14.627930Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:402: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-05-29T15:23:14.628312Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:23:14.628320Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-29T15:23:14.628366Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29 ... enant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 100 2025-05-29T15:23:14.636975Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:23:14.636984Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 100, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-29T15:23:14.637014Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 100, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2025-05-29T15:23:14.637028Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:23:14.637033Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:209:2210], at schemeshard: 72057594046678944, txId: 100, path id: 1 2025-05-29T15:23:14.637040Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:209:2210], at schemeshard: 72057594046678944, txId: 100, path id: 2 2025-05-29T15:23:14.637094Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 100:0, at schemeshard: 72057594046678944 2025-05-29T15:23:14.637102Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:482: [72057594046678944] TDone opId# 100:0 ProgressState 2025-05-29T15:23:14.637115Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:906: Part operation is done id#100:0 progress is 1/1 2025-05-29T15:23:14.637119Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 100 ready parts: 1/1 2025-05-29T15:23:14.637125Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:906: Part operation is done id#100:0 progress is 1/1 2025-05-29T15:23:14.637129Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 100 ready parts: 1/1 2025-05-29T15:23:14.637134Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1606: TOperation IsReadyToNotify, TxId: 100, ready parts: 1/1, is published: false 2025-05-29T15:23:14.637142Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 100 ready parts: 1/1 2025-05-29T15:23:14.637147Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:973: Operation and all the parts is done, operation id: 100:0 2025-05-29T15:23:14.637151Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5174: RemoveTx for txid 100:0 2025-05-29T15:23:14.637162Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2025-05-29T15:23:14.637169Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:982: Publication still in progress, tx: 100, publications: 2, subscribers: 0 2025-05-29T15:23:14.637173Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:989: Publication details: tx: 100, [OwnerId: 72057594046678944, LocalPathId: 1], 5 2025-05-29T15:23:14.637177Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:989: Publication details: tx: 100, [OwnerId: 72057594046678944, LocalPathId: 2], 3 2025-05-29T15:23:14.637280Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:5834: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 5 PathOwnerId: 72057594046678944, cookie: 100 2025-05-29T15:23:14.637290Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 5 PathOwnerId: 72057594046678944, cookie: 100 2025-05-29T15:23:14.637295Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 100 2025-05-29T15:23:14.637300Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 100, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 5 2025-05-29T15:23:14.637304Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-05-29T15:23:14.637431Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:5834: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046678944, cookie: 100 2025-05-29T15:23:14.637444Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046678944, cookie: 100 2025-05-29T15:23:14.637448Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 100 2025-05-29T15:23:14.637452Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 100, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 3 2025-05-29T15:23:14.637456Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2025-05-29T15:23:14.637467Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 100, subscribers: 0 2025-05-29T15:23:14.638163Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 100 2025-05-29T15:23:14.638242Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 100 TestModificationResult got TxId: 100, wait until txId: 100 TestModificationResults wait txId: 101 TestModificationResult got TxId: 101, wait until txId: 101 TestWaitNotification wait txId: 100 2025-05-29T15:23:14.638301Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:210: tests -- TTxNotificationSubscriber for txId 100: send EvNotifyTxCompletion 2025-05-29T15:23:14.638317Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:256: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 100 TestWaitNotification wait txId: 101 2025-05-29T15:23:14.638332Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:210: tests -- TTxNotificationSubscriber for txId 101: send EvNotifyTxCompletion 2025-05-29T15:23:14.638336Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:256: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 101 2025-05-29T15:23:14.638401Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 100, at schemeshard: 72057594046678944 2025-05-29T15:23:14.638420Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:227: tests -- TTxNotificationSubscriber for txId 100: got EvNotifyTxCompletionResult 2025-05-29T15:23:14.638425Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:236: tests -- TTxNotificationSubscriber for txId 100: satisfy waiter [1:316:2306] 2025-05-29T15:23:14.638470Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 101, at schemeshard: 72057594046678944 2025-05-29T15:23:14.638479Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:227: tests -- TTxNotificationSubscriber for txId 101: got EvNotifyTxCompletionResult 2025-05-29T15:23:14.638483Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:236: tests -- TTxNotificationSubscriber for txId 101: satisfy waiter [1:316:2306] TestWaitNotification: OK eventTxId 100 TestWaitNotification: OK eventTxId 101 2025-05-29T15:23:14.638544Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-05-29T15:23:14.638571Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/USER_0" took 34us result status StatusSuccess 2025-05-29T15:23:14.638672Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/USER_0" PathDescription { Self { Name: "USER_0" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 100 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 1 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 0 TimeCastBucketsPerMediator: 0 } DomainKey { SchemeShard: 72057594046678944 PathId: 2 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 2 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { } } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-05-29T15:23:14.638813Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0/table_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-05-29T15:23:14.638837Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/USER_0/table_0" took 27us result status StatusPathDoesNotExist 2025-05-29T15:23:14.638864Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/USER_0/table_0\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot/USER_0\' (id: [OwnerId: 72057594046678944, LocalPathId: 2]), source_location: ydb/core/tx/schemeshard/schemeshard_path_describer.cpp:1157" Path: "/MyRoot/USER_0/table_0" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot/USER_0" LastExistedPrefixPathId: 2 LastExistedPrefixDescription { Self { Name: "USER_0" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 100 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-ModifyUser-27 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-ModifyUser-28 >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-ModifyUser-28 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-ModifyUser-29 >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-CreateUser-11 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-CreateUser-12 >> TSchemeShardSubDomainTest::CreateForceDropSolomon [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-CreateUser-4 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-CreateUser-5 >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-CreateUser-5 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-CreateUser-6 >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-ModifyUser-27 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-ModifyUser-28 >> TSchemeShardSubDomainTest::CreateAndWait [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::RedefineErrors [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:116:2058] recipient: [1:111:2141] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:116:2058] recipient: [1:111:2141] Leader for TabletID 72057594046678944 is [1:128:2152] sender: [1:132:2058] recipient: [1:111:2141] 2025-05-29T15:23:14.942918Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7488: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-05-29T15:23:14.942946Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7516: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:23:14.942951Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7402: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-05-29T15:23:14.942957Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7418: OperationsProcessing config: using default configuration 2025-05-29T15:23:14.942968Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-05-29T15:23:14.942972Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-05-29T15:23:14.942981Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7548: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:23:14.942995Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:39: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-05-29T15:23:14.943114Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7619: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-05-29T15:23:14.943181Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-05-29T15:23:14.959115Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7309: Cannot subscribe to console configs 2025-05-29T15:23:14.959143Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:23:14.962298Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-05-29T15:23:14.962428Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-05-29T15:23:14.962482Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-05-29T15:23:14.965564Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-05-29T15:23:14.965719Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:445: Clear TempDirsState with owners number: 0 2025-05-29T15:23:14.965823Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1348: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-05-29T15:23:14.965898Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:32: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-05-29T15:23:14.966353Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:157: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:23:14.966393Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:84: [RootDataErasureManager] Stop 2025-05-29T15:23:14.966666Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:23:14.966676Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:23:14.966700Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-05-29T15:23:14.966708Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-29T15:23:14.966715Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:202: TTxServerlessStorageBilling.Complete 2025-05-29T15:23:14.966765Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6671: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-05-29T15:23:14.968249Z node 1 :HIVE INFO: tablet_helpers.cpp:1130: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:128:2152] sender: [1:242:2058] recipient: [1:15:2062] 2025-05-29T15:23:14.991934Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:372: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-05-29T15:23:14.992013Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:23:14.992097Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-05-29T15:23:14.992146Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:130: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-05-29T15:23:14.992158Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:23:14.997068Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:451: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-05-29T15:23:14.997110Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-05-29T15:23:14.997174Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:23:14.997187Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:313: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-05-29T15:23:14.997194Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:367: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-05-29T15:23:14.997201Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 2 -> 3 2025-05-29T15:23:14.998107Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:23:14.998118Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-05-29T15:23:14.998124Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 3 -> 128 2025-05-29T15:23:14.998530Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:23:14.998541Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:23:14.998548Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:23:14.998556Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1650: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-05-29T15:23:14.999314Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1719: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-05-29T15:23:14.999694Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:652: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-05-29T15:23:14.999741Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1751: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-05-29T15:23:14.999943Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:676: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-05-29T15:23:14.999968Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:680: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 124 RawX2: 4294969445 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-05-29T15:23:14.999977Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:23:15.000034Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 128 -> 240 2025-05-29T15:23:15.000041Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:23:15.000077Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-05-29T15:23:15.000089Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:402: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-05-29T15:23:15.000541Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:23:15.000554Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-29T15:23:15.000608Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29 ... ration.cpp:485: TTxOperationProgress Execute, operationId: 108:0, at schemeshard: 72057594046678944 2025-05-29T15:23:15.045573Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:313: TCreateParts opId# 108:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-05-29T15:23:15.045577Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:367: TCreateParts opId# 108:0 ProgressState no shards to create, do next state 2025-05-29T15:23:15.045581Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 108:0 2 -> 3 2025-05-29T15:23:15.045916Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 108:0, at schemeshard: 72057594046678944 2025-05-29T15:23:15.045929Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 108:0 ProgressState, at schemeshard: 72057594046678944 2025-05-29T15:23:15.045933Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 108:0 3 -> 128 2025-05-29T15:23:15.046252Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 108:0, at schemeshard: 72057594046678944 2025-05-29T15:23:15.046263Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 108:0, at schemeshard: 72057594046678944 2025-05-29T15:23:15.046269Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 108:0, at tablet# 72057594046678944 2025-05-29T15:23:15.046275Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1650: TOperation IsReadyToPropose , TxId: 108 ready parts: 1/1 2025-05-29T15:23:15.046299Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1719: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 108 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-05-29T15:23:15.046600Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:652: Send tablet strongly msg operationId: 108:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:108 msg type: 269090816 2025-05-29T15:23:15.046624Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1751: TOperation RegisterRelationByTabletId, TxId: 108, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 108 at step: 5000007 FAKE_COORDINATOR: advance: minStep5000007 State->FrontStep: 5000006 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 108 at step: 5000007 2025-05-29T15:23:15.046687Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:676: TTxOperationPlanStep Execute, stepId: 5000007, transactions count in step: 1, at schemeshard: 72057594046678944 2025-05-29T15:23:15.046704Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:680: TTxOperationPlanStep Execute, message: Transactions { TxId: 108 Coordinator: 72057594046316545 AckTo { RawX1: 124 RawX2: 4294969445 } } Step: 5000007 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-05-29T15:23:15.046711Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 108:0, at tablet# 72057594046678944 2025-05-29T15:23:15.046774Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 108:0 128 -> 240 2025-05-29T15:23:15.046781Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 108:0, at tablet# 72057594046678944 2025-05-29T15:23:15.046803Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 5 2025-05-29T15:23:15.046813Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:402: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 108 2025-05-29T15:23:15.047171Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:23:15.047179Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 108, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2025-05-29T15:23:15.047210Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:23:15.047215Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:209:2210], at schemeshard: 72057594046678944, txId: 108, path id: 2 2025-05-29T15:23:15.047268Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 108:0, at schemeshard: 72057594046678944 2025-05-29T15:23:15.047274Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:482: [72057594046678944] TDone opId# 108:0 ProgressState 2025-05-29T15:23:15.047286Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:906: Part operation is done id#108:0 progress is 1/1 2025-05-29T15:23:15.047290Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 108 ready parts: 1/1 2025-05-29T15:23:15.047294Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:906: Part operation is done id#108:0 progress is 1/1 2025-05-29T15:23:15.047297Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 108 ready parts: 1/1 2025-05-29T15:23:15.047302Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1606: TOperation IsReadyToNotify, TxId: 108, ready parts: 1/1, is published: false 2025-05-29T15:23:15.047307Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 108 ready parts: 1/1 2025-05-29T15:23:15.047311Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:973: Operation and all the parts is done, operation id: 108:0 2025-05-29T15:23:15.047315Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5174: RemoveTx for txid 108:0 2025-05-29T15:23:15.047325Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 6 2025-05-29T15:23:15.047329Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:982: Publication still in progress, tx: 108, publications: 1, subscribers: 0 2025-05-29T15:23:15.047333Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:989: Publication details: tx: 108, [OwnerId: 72057594046678944, LocalPathId: 2], 8 2025-05-29T15:23:15.047408Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:5834: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 8 PathOwnerId: 72057594046678944, cookie: 108 2025-05-29T15:23:15.047419Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 8 PathOwnerId: 72057594046678944, cookie: 108 2025-05-29T15:23:15.047423Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 108 2025-05-29T15:23:15.047428Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 108, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 8 2025-05-29T15:23:15.047432Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 5 2025-05-29T15:23:15.047443Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 108, subscribers: 0 2025-05-29T15:23:15.047974Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 108 TestModificationResult got TxId: 108, wait until txId: 108 TestWaitNotification wait txId: 108 2025-05-29T15:23:15.048029Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:210: tests -- TTxNotificationSubscriber for txId 108: send EvNotifyTxCompletion 2025-05-29T15:23:15.048036Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:256: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 108 2025-05-29T15:23:15.048101Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 108, at schemeshard: 72057594046678944 2025-05-29T15:23:15.048118Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:227: tests -- TTxNotificationSubscriber for txId 108: got EvNotifyTxCompletionResult 2025-05-29T15:23:15.048122Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:236: tests -- TTxNotificationSubscriber for txId 108: satisfy waiter [1:599:2552] TestWaitNotification: OK eventTxId 108 2025-05-29T15:23:15.048200Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-05-29T15:23:15.048225Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/USER_0" took 31us result status StatusSuccess 2025-05-29T15:23:15.048290Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/USER_0" PathDescription { Self { Name: "USER_0" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 100 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 8 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 8 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 6 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 6 PlanResolution: 50 Coordinators: 72075186233409546 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409547 Mediators: 72075186233409548 } DomainKey { SchemeShard: 72057594046678944 PathId: 2 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } StoragePools { Name: "pool-hdd-1" Kind: "hdd-1" } StoragePools { Name: "pool-hdd-2" Kind: "hdd-1" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 3 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 2 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { } } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-ModifyUser-39 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-ModifyUser-40 >> TSchemeShardSubDomainTest::SimultaneousCreateTableForceDrop [GOOD] >> TSchemeShardSubDomainTest::CreateSubDomainWithoutTabletsThenDrop >> TSchemeShardSubDomainTest::CreateWithNoEqualName >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-ModifyUser-46 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-ModifyUser-47 >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-CreateUser-6 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-CreateUser-7 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::CreateForceDropSolomon [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:116:2058] recipient: [1:111:2141] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:116:2058] recipient: [1:111:2141] Leader for TabletID 72057594046678944 is [1:128:2152] sender: [1:132:2058] recipient: [1:111:2141] 2025-05-29T15:23:15.012886Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7488: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-05-29T15:23:15.012914Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7516: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:23:15.012919Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7402: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-05-29T15:23:15.012925Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7418: OperationsProcessing config: using default configuration 2025-05-29T15:23:15.012935Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-05-29T15:23:15.012939Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-05-29T15:23:15.012947Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7548: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:23:15.012959Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:39: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-05-29T15:23:15.013065Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7619: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-05-29T15:23:15.013123Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-05-29T15:23:15.025050Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7309: Cannot subscribe to console configs 2025-05-29T15:23:15.025068Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:23:15.027513Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-05-29T15:23:15.027627Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-05-29T15:23:15.027675Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-05-29T15:23:15.029235Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-05-29T15:23:15.029401Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:445: Clear TempDirsState with owners number: 0 2025-05-29T15:23:15.029502Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1348: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-05-29T15:23:15.029577Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:32: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-05-29T15:23:15.030007Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:157: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:23:15.030050Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:84: [RootDataErasureManager] Stop 2025-05-29T15:23:15.030271Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:23:15.030281Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:23:15.030298Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-05-29T15:23:15.030305Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-29T15:23:15.030310Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:202: TTxServerlessStorageBilling.Complete 2025-05-29T15:23:15.030335Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6671: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-05-29T15:23:15.031590Z node 1 :HIVE INFO: tablet_helpers.cpp:1130: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:128:2152] sender: [1:242:2058] recipient: [1:15:2062] 2025-05-29T15:23:15.048386Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:372: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-05-29T15:23:15.048438Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:23:15.048488Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-05-29T15:23:15.048528Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:130: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-05-29T15:23:15.048537Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:23:15.049082Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:451: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-05-29T15:23:15.049099Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-05-29T15:23:15.049127Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:23:15.049135Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:313: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-05-29T15:23:15.049139Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:367: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-05-29T15:23:15.049144Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 2 -> 3 2025-05-29T15:23:15.049496Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:23:15.049504Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-05-29T15:23:15.049509Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 3 -> 128 2025-05-29T15:23:15.049840Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:23:15.049850Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:23:15.049856Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:23:15.049862Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1650: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-05-29T15:23:15.050405Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1719: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-05-29T15:23:15.050721Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:652: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-05-29T15:23:15.050767Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1751: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-05-29T15:23:15.050925Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:676: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-05-29T15:23:15.050947Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:680: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 124 RawX2: 4294969445 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-05-29T15:23:15.050953Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:23:15.050996Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 128 -> 240 2025-05-29T15:23:15.051002Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:23:15.051024Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-05-29T15:23:15.051033Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:402: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-05-29T15:23:15.051390Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:23:15.051398Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-29T15:23:15.051423Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29 ... DEBUG: schemeshard__delete_tablet_reply.cpp:174: Deleted shardIdx 72057594046678944:8 2025-05-29T15:23:15.301101Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:180: Close pipe to deleted shardIdx 72057594046678944:8 tabletId 72075186233409553 2025-05-29T15:23:15.301174Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:174: Deleted shardIdx 72057594046678944:12 2025-05-29T15:23:15.301178Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:180: Close pipe to deleted shardIdx 72057594046678944:12 tabletId 72075186233409557 2025-05-29T15:23:15.301187Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:174: Deleted shardIdx 72057594046678944:7 2025-05-29T15:23:15.301190Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:180: Close pipe to deleted shardIdx 72057594046678944:7 tabletId 72075186233409552 2025-05-29T15:23:15.301428Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:174: Deleted shardIdx 72057594046678944:16 2025-05-29T15:23:15.301431Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:180: Close pipe to deleted shardIdx 72057594046678944:16 tabletId 72075186233409561 2025-05-29T15:23:15.301444Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:174: Deleted shardIdx 72057594046678944:25 2025-05-29T15:23:15.301448Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:180: Close pipe to deleted shardIdx 72057594046678944:25 tabletId 72075186233409570 2025-05-29T15:23:15.301460Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:174: Deleted shardIdx 72057594046678944:20 2025-05-29T15:23:15.301462Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:180: Close pipe to deleted shardIdx 72057594046678944:20 tabletId 72075186233409565 2025-05-29T15:23:15.301471Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:174: Deleted shardIdx 72057594046678944:29 2025-05-29T15:23:15.301473Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:180: Close pipe to deleted shardIdx 72057594046678944:29 tabletId 72075186233409574 2025-05-29T15:23:15.301702Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:174: Deleted shardIdx 72057594046678944:33 2025-05-29T15:23:15.301706Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:180: Close pipe to deleted shardIdx 72057594046678944:33 tabletId 72075186233409578 2025-05-29T15:23:15.301715Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:174: Deleted shardIdx 72057594046678944:2 2025-05-29T15:23:15.301718Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:180: Close pipe to deleted shardIdx 72057594046678944:2 tabletId 72075186233409547 2025-05-29T15:23:15.301783Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:174: Deleted shardIdx 72057594046678944:37 2025-05-29T15:23:15.301787Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:180: Close pipe to deleted shardIdx 72057594046678944:37 tabletId 72075186233409582 2025-05-29T15:23:15.302019Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:174: Deleted shardIdx 72057594046678944:42 2025-05-29T15:23:15.302023Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:180: Close pipe to deleted shardIdx 72057594046678944:42 tabletId 72075186233409587 2025-05-29T15:23:15.302035Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:174: Deleted shardIdx 72057594046678944:6 2025-05-29T15:23:15.302038Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:180: Close pipe to deleted shardIdx 72057594046678944:6 tabletId 72075186233409551 2025-05-29T15:23:15.302050Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:174: Deleted shardIdx 72057594046678944:11 2025-05-29T15:23:15.302052Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:180: Close pipe to deleted shardIdx 72057594046678944:11 tabletId 72075186233409556 2025-05-29T15:23:15.302063Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:174: Deleted shardIdx 72057594046678944:15 2025-05-29T15:23:15.302066Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:180: Close pipe to deleted shardIdx 72057594046678944:15 tabletId 72075186233409560 2025-05-29T15:23:15.302075Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:174: Deleted shardIdx 72057594046678944:19 2025-05-29T15:23:15.302078Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:180: Close pipe to deleted shardIdx 72057594046678944:19 tabletId 72075186233409564 2025-05-29T15:23:15.302216Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:174: Deleted shardIdx 72057594046678944:24 2025-05-29T15:23:15.302219Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:180: Close pipe to deleted shardIdx 72057594046678944:24 tabletId 72075186233409569 2025-05-29T15:23:15.302228Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:174: Deleted shardIdx 72057594046678944:23 2025-05-29T15:23:15.302230Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:180: Close pipe to deleted shardIdx 72057594046678944:23 tabletId 72075186233409568 2025-05-29T15:23:15.302240Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:174: Deleted shardIdx 72057594046678944:28 2025-05-29T15:23:15.302242Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:180: Close pipe to deleted shardIdx 72057594046678944:28 tabletId 72075186233409573 2025-05-29T15:23:15.302251Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:174: Deleted shardIdx 72057594046678944:32 2025-05-29T15:23:15.302254Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:180: Close pipe to deleted shardIdx 72057594046678944:32 tabletId 72075186233409577 2025-05-29T15:23:15.302263Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:174: Deleted shardIdx 72057594046678944:1 2025-05-29T15:23:15.302266Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:180: Close pipe to deleted shardIdx 72057594046678944:1 tabletId 72075186233409546 2025-05-29T15:23:15.302274Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:174: Deleted shardIdx 72057594046678944:36 2025-05-29T15:23:15.302278Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:180: Close pipe to deleted shardIdx 72057594046678944:36 tabletId 72075186233409581 2025-05-29T15:23:15.302296Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:160: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 1 candidates, at schemeshard: 72057594046678944 2025-05-29T15:23:15.302316Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:245: TTxCleanDroppedSubDomains Complete, done PersistRemoveSubDomain for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 2025-05-29T15:23:15.302338Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:123: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-05-29T15:23:15.302342Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:137: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2025-05-29T15:23:15.302358Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-05-29T15:23:15.302955Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:160: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestModificationResult got TxId: 103, wait until txId: 103 TestWaitNotification wait txId: 103 2025-05-29T15:23:15.303010Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:210: tests -- TTxNotificationSubscriber for txId 103: send EvNotifyTxCompletion 2025-05-29T15:23:15.303015Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:256: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 103 2025-05-29T15:23:15.303073Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 103, at schemeshard: 72057594046678944 2025-05-29T15:23:15.303085Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:227: tests -- TTxNotificationSubscriber for txId 103: got EvNotifyTxCompletionResult 2025-05-29T15:23:15.303091Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:236: tests -- TTxNotificationSubscriber for txId 103: satisfy waiter [1:2062:3661] TestWaitNotification: OK eventTxId 103 2025-05-29T15:23:15.303155Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0/Solomon" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-05-29T15:23:15.303186Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/USER_0/Solomon" took 45us result status StatusPathDoesNotExist 2025-05-29T15:23:15.303227Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/USER_0/Solomon\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1]), source_location: ydb/core/tx/schemeshard/schemeshard_path_describer.cpp:1157" Path: "/MyRoot/USER_0/Solomon" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: false } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 2025-05-29T15:23:15.303275Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-05-29T15:23:15.303284Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/USER_0" took 10us result status StatusPathDoesNotExist 2025-05-29T15:23:15.303295Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/USER_0\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1]), source_location: ydb/core/tx/schemeshard/schemeshard_path_describer.cpp:1157" Path: "/MyRoot/USER_0" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: false } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 >> TSchemeShardSubDomainTest::Delete >> TSchemeShardSubDomainTest::CreateItemsInsideSubdomainWithStoragePools ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::CreateAndWait [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:116:2058] recipient: [1:111:2141] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:116:2058] recipient: [1:111:2141] Leader for TabletID 72057594046678944 is [1:128:2152] sender: [1:132:2058] recipient: [1:111:2141] 2025-05-29T15:23:15.359631Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7488: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-05-29T15:23:15.359656Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7516: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:23:15.359660Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7402: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-05-29T15:23:15.359664Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7418: OperationsProcessing config: using default configuration 2025-05-29T15:23:15.359673Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-05-29T15:23:15.359676Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-05-29T15:23:15.359683Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7548: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:23:15.359696Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:39: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-05-29T15:23:15.359811Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7619: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-05-29T15:23:15.359913Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-05-29T15:23:15.370687Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7309: Cannot subscribe to console configs 2025-05-29T15:23:15.370706Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:23:15.375977Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-05-29T15:23:15.376123Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-05-29T15:23:15.376175Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-05-29T15:23:15.379274Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-05-29T15:23:15.379465Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:445: Clear TempDirsState with owners number: 0 2025-05-29T15:23:15.379612Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1348: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-05-29T15:23:15.379703Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:32: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-05-29T15:23:15.380210Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:157: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:23:15.380255Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:84: [RootDataErasureManager] Stop 2025-05-29T15:23:15.380543Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:23:15.380557Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:23:15.380582Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-05-29T15:23:15.380592Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-29T15:23:15.380599Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:202: TTxServerlessStorageBilling.Complete 2025-05-29T15:23:15.380637Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6671: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-05-29T15:23:15.382126Z node 1 :HIVE INFO: tablet_helpers.cpp:1130: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:128:2152] sender: [1:242:2058] recipient: [1:15:2062] 2025-05-29T15:23:15.403776Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:372: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-05-29T15:23:15.403862Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:23:15.403944Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-05-29T15:23:15.404001Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:130: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-05-29T15:23:15.404013Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:23:15.404849Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:451: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-05-29T15:23:15.404880Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-05-29T15:23:15.404935Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:23:15.404946Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:313: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-05-29T15:23:15.404952Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:367: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-05-29T15:23:15.404958Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 2 -> 3 2025-05-29T15:23:15.405385Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:23:15.405397Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-05-29T15:23:15.405403Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 3 -> 128 2025-05-29T15:23:15.405772Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:23:15.405786Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:23:15.405792Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:23:15.405800Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1650: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-05-29T15:23:15.406436Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1719: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-05-29T15:23:15.406888Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:652: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-05-29T15:23:15.406936Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1751: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-05-29T15:23:15.407113Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:676: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-05-29T15:23:15.407136Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:680: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 124 RawX2: 4294969445 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-05-29T15:23:15.407142Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:23:15.407193Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 128 -> 240 2025-05-29T15:23:15.407198Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:23:15.407227Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-05-29T15:23:15.407236Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:402: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-05-29T15:23:15.407731Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:23:15.407745Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-29T15:23:15.407798Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29 ... h_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 101, path id: [OwnerId: 72057594046678944, LocalPathId: 3] 2025-05-29T15:23:15.420763Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:23:15.420769Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:209:2210], at schemeshard: 72057594046678944, txId: 101, path id: 2 2025-05-29T15:23:15.420774Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:209:2210], at schemeshard: 72057594046678944, txId: 101, path id: 3 2025-05-29T15:23:15.420814Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 101:0, at schemeshard: 72057594046678944 2025-05-29T15:23:15.420821Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:482: [72057594046678944] TDone opId# 101:0 ProgressState 2025-05-29T15:23:15.420832Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:906: Part operation is done id#101:0 progress is 1/1 2025-05-29T15:23:15.420837Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2025-05-29T15:23:15.420842Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:906: Part operation is done id#101:0 progress is 1/1 2025-05-29T15:23:15.420845Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2025-05-29T15:23:15.420849Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1606: TOperation IsReadyToNotify, TxId: 101, ready parts: 1/1, is published: false 2025-05-29T15:23:15.420855Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2025-05-29T15:23:15.420859Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:973: Operation and all the parts is done, operation id: 101:0 2025-05-29T15:23:15.420863Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5174: RemoveTx for txid 101:0 2025-05-29T15:23:15.420874Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 3 2025-05-29T15:23:15.420879Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:982: Publication still in progress, tx: 101, publications: 2, subscribers: 0 2025-05-29T15:23:15.420883Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:989: Publication details: tx: 101, [OwnerId: 72057594046678944, LocalPathId: 2], 5 2025-05-29T15:23:15.420887Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:989: Publication details: tx: 101, [OwnerId: 72057594046678944, LocalPathId: 3], 3 2025-05-29T15:23:15.421009Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:5834: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 5 PathOwnerId: 72057594046678944, cookie: 101 2025-05-29T15:23:15.421021Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 5 PathOwnerId: 72057594046678944, cookie: 101 2025-05-29T15:23:15.421025Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 101 2025-05-29T15:23:15.421030Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 5 2025-05-29T15:23:15.421038Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2025-05-29T15:23:15.421157Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:5834: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 3 PathOwnerId: 72057594046678944, cookie: 101 2025-05-29T15:23:15.421169Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 3 PathOwnerId: 72057594046678944, cookie: 101 2025-05-29T15:23:15.421174Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 101 2025-05-29T15:23:15.421178Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 3 2025-05-29T15:23:15.421183Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 2 2025-05-29T15:23:15.421193Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 101, subscribers: 0 2025-05-29T15:23:15.421836Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2025-05-29T15:23:15.422051Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 TestModificationResult got TxId: 100, wait until txId: 101 TestModificationResults wait txId: 101 TestModificationResult got TxId: 101, wait until txId: 101 TestWaitNotification wait txId: 100 2025-05-29T15:23:15.422118Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:210: tests -- TTxNotificationSubscriber for txId 100: send EvNotifyTxCompletion 2025-05-29T15:23:15.422140Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:256: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 100 TestWaitNotification wait txId: 101 2025-05-29T15:23:15.422156Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:210: tests -- TTxNotificationSubscriber for txId 101: send EvNotifyTxCompletion 2025-05-29T15:23:15.422160Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:256: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 101 2025-05-29T15:23:15.422228Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 100, at schemeshard: 72057594046678944 2025-05-29T15:23:15.422250Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:227: tests -- TTxNotificationSubscriber for txId 100: got EvNotifyTxCompletionResult 2025-05-29T15:23:15.422255Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:236: tests -- TTxNotificationSubscriber for txId 100: satisfy waiter [1:334:2324] 2025-05-29T15:23:15.422283Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 101, at schemeshard: 72057594046678944 2025-05-29T15:23:15.422301Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:227: tests -- TTxNotificationSubscriber for txId 101: got EvNotifyTxCompletionResult 2025-05-29T15:23:15.422305Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:236: tests -- TTxNotificationSubscriber for txId 101: satisfy waiter [1:334:2324] TestWaitNotification: OK eventTxId 100 TestWaitNotification: OK eventTxId 101 2025-05-29T15:23:15.422368Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/dir/USER_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-05-29T15:23:15.422396Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/dir/USER_0" took 36us result status StatusSuccess 2025-05-29T15:23:15.422500Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/dir/USER_0" PathDescription { Self { Name: "USER_0" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 101 CreateStep: 5000003 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 1 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 0 TimeCastBucketsPerMediator: 0 } DomainKey { SchemeShard: 72057594046678944 PathId: 3 } StoragePools { Name: "/dc-1/users/tenant-1:hdd" Kind: "hdd" } StoragePools { Name: "/dc-1/users/tenant-1:hdd-1" Kind: "hdd-1" } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 3 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { } } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-05-29T15:23:15.422572Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/dir" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-05-29T15:23:15.422589Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/dir" took 20us result status StatusSuccess 2025-05-29T15:23:15.422637Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/dir" PathDescription { Self { Name: "dir" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 100 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 } ChildrenExist: true } Children { Name: "USER_0" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 101 CreateStep: 5000003 ParentPathId: 2 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 2 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::SimultaneousCreateTableForceDrop [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:116:2058] recipient: [1:111:2141] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:116:2058] recipient: [1:111:2141] Leader for TabletID 72057594046678944 is [1:128:2152] sender: [1:132:2058] recipient: [1:111:2141] 2025-05-29T15:23:15.251472Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7488: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-05-29T15:23:15.251499Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7516: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:23:15.251505Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7402: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-05-29T15:23:15.251510Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7418: OperationsProcessing config: using default configuration 2025-05-29T15:23:15.251521Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-05-29T15:23:15.251525Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-05-29T15:23:15.251534Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7548: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:23:15.251549Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:39: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-05-29T15:23:15.251683Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7619: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-05-29T15:23:15.251765Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-05-29T15:23:15.264001Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7309: Cannot subscribe to console configs 2025-05-29T15:23:15.264027Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:23:15.266430Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-05-29T15:23:15.266542Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-05-29T15:23:15.266590Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-05-29T15:23:15.267904Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-05-29T15:23:15.268029Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:445: Clear TempDirsState with owners number: 0 2025-05-29T15:23:15.268137Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1348: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-05-29T15:23:15.268235Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:32: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-05-29T15:23:15.268630Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:157: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:23:15.268668Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:84: [RootDataErasureManager] Stop 2025-05-29T15:23:15.268933Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:23:15.268944Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:23:15.268966Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-05-29T15:23:15.268974Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-29T15:23:15.268980Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:202: TTxServerlessStorageBilling.Complete 2025-05-29T15:23:15.269013Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6671: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-05-29T15:23:15.270377Z node 1 :HIVE INFO: tablet_helpers.cpp:1130: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:128:2152] sender: [1:242:2058] recipient: [1:15:2062] 2025-05-29T15:23:15.289726Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:372: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-05-29T15:23:15.289815Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:23:15.289893Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-05-29T15:23:15.289942Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:130: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-05-29T15:23:15.289953Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:23:15.293305Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:451: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-05-29T15:23:15.293340Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-05-29T15:23:15.293393Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:23:15.293405Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:313: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-05-29T15:23:15.293411Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:367: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-05-29T15:23:15.293417Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 2 -> 3 2025-05-29T15:23:15.293935Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:23:15.293947Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-05-29T15:23:15.293952Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 3 -> 128 2025-05-29T15:23:15.294288Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:23:15.294298Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:23:15.294304Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:23:15.294311Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1650: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-05-29T15:23:15.294933Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1719: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-05-29T15:23:15.295344Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:652: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-05-29T15:23:15.295385Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1751: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-05-29T15:23:15.295571Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:676: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-05-29T15:23:15.295594Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:680: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 124 RawX2: 4294969445 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-05-29T15:23:15.295602Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:23:15.295672Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 128 -> 240 2025-05-29T15:23:15.295679Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:23:15.295709Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-05-29T15:23:15.295728Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:402: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-05-29T15:23:15.296150Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:23:15.296158Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-29T15:23:15.296215Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29 ... hard_impl.cpp:5938: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 2 ShardOwnerId: 72057594046678944 ShardLocalIdx: 2, at schemeshard: 72057594046678944 2025-05-29T15:23:15.460358Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 Forgetting tablet 72075186233409547 Forgetting tablet 72075186233409549 2025-05-29T15:23:15.460449Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5938: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 4 ShardOwnerId: 72057594046678944 ShardLocalIdx: 4, at schemeshard: 72057594046678944 2025-05-29T15:23:15.460473Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2025-05-29T15:23:15.460689Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:205: TTxCleanDroppedSubDomains Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-05-29T15:23:15.460696Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:224: TTxCleanDroppedPaths: PersistRemoveSubDomain for PathId# [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2025-05-29T15:23:15.460715Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 2025-05-29T15:23:15.461282Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:174: Deleted shardIdx 72057594046678944:5 2025-05-29T15:23:15.461295Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:180: Close pipe to deleted shardIdx 72057594046678944:5 tabletId 72075186233409550 2025-05-29T15:23:15.461313Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:174: Deleted shardIdx 72057594046678944:7 2025-05-29T15:23:15.461317Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:180: Close pipe to deleted shardIdx 72057594046678944:7 tabletId 72075186233409552 2025-05-29T15:23:15.461357Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:5739: Failed to connect, to tablet: 72075186233409552, at schemeshard: 72057594046678944 2025-05-29T15:23:15.461376Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:174: Deleted shardIdx 72057594046678944:1 2025-05-29T15:23:15.461380Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:180: Close pipe to deleted shardIdx 72057594046678944:1 tabletId 72075186233409546 2025-05-29T15:23:15.461834Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:160: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 1 candidates, at schemeshard: 72057594046678944 2025-05-29T15:23:15.461858Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:123: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-05-29T15:23:15.461865Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:137: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2025-05-29T15:23:15.461879Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-05-29T15:23:15.461906Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:174: Deleted shardIdx 72057594046678944:3 2025-05-29T15:23:15.461912Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:180: Close pipe to deleted shardIdx 72057594046678944:3 tabletId 72075186233409548 2025-05-29T15:23:15.461924Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:174: Deleted shardIdx 72057594046678944:6 2025-05-29T15:23:15.461928Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:180: Close pipe to deleted shardIdx 72057594046678944:6 tabletId 72075186233409551 2025-05-29T15:23:15.461936Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:174: Deleted shardIdx 72057594046678944:2 2025-05-29T15:23:15.461940Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:180: Close pipe to deleted shardIdx 72057594046678944:2 tabletId 72075186233409547 2025-05-29T15:23:15.461956Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:174: Deleted shardIdx 72057594046678944:4 2025-05-29T15:23:15.461962Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:180: Close pipe to deleted shardIdx 72057594046678944:4 tabletId 72075186233409549 2025-05-29T15:23:15.462217Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:245: TTxCleanDroppedSubDomains Complete, done PersistRemoveSubDomain for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 2025-05-29T15:23:15.462447Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:160: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestModificationResult got TxId: 102, wait until txId: 102 TestWaitNotification wait txId: 101 2025-05-29T15:23:15.462492Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:210: tests -- TTxNotificationSubscriber for txId 101: send EvNotifyTxCompletion 2025-05-29T15:23:15.462499Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:256: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 101 TestWaitNotification wait txId: 102 2025-05-29T15:23:15.462509Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:210: tests -- TTxNotificationSubscriber for txId 102: send EvNotifyTxCompletion 2025-05-29T15:23:15.462511Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:256: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 102 2025-05-29T15:23:15.462560Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 101, at schemeshard: 72057594046678944 2025-05-29T15:23:15.462575Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:227: tests -- TTxNotificationSubscriber for txId 101: got EvNotifyTxCompletionResult 2025-05-29T15:23:15.462581Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:236: tests -- TTxNotificationSubscriber for txId 101: satisfy waiter [1:728:2616] 2025-05-29T15:23:15.462605Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 102, at schemeshard: 72057594046678944 2025-05-29T15:23:15.462616Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:227: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2025-05-29T15:23:15.462619Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:236: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [1:728:2616] TestWaitNotification: OK eventTxId 101 TestWaitNotification: OK eventTxId 102 2025-05-29T15:23:15.462710Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-05-29T15:23:15.462762Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/USER_0" took 80us result status StatusPathDoesNotExist 2025-05-29T15:23:15.462813Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/USER_0\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1]), source_location: ydb/core/tx/schemeshard/schemeshard_path_describer.cpp:1157" Path: "/MyRoot/USER_0" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: false } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 2025-05-29T15:23:15.462868Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0/table_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-05-29T15:23:15.462883Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/USER_0/table_0" took 18us result status StatusPathDoesNotExist 2025-05-29T15:23:15.462900Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/USER_0/table_0\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1]), source_location: ydb/core/tx/schemeshard/schemeshard_path_describer.cpp:1157" Path: "/MyRoot/USER_0/table_0" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: false } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 2025-05-29T15:23:15.462937Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-05-29T15:23:15.462962Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot" took 27us result status StatusSuccess 2025-05-29T15:23:15.463053Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot" PathDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 7 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 7 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 5 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/MyRoot" } } } PathId: 1 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> TSchemeShardSubDomainTest::CreateSubDomainWithoutTabletsThenDrop [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-ModifyUser-34 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-ModifyUser-35 >> TSchemeShardSubDomainTest::Delete [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-ModifyUser-28 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-ModifyUser-29 >> TSchemeShardSubDomainTest::SimultaneousCreateTenantTable >> TSchemeShardSubDomainTest::CreateWithNoEqualName [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-CreateUser-12 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-CreateUser-13 >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-CreateUser-5 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-CreateUser-6 >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-ModifyUser-29 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-ModifyUser-30 >> YdbIndexTable::OnlineBuildWithDataColumn >> YdbIndexTable::MultiShardTableOneIndexDataColumn >> TSchemeShardSubDomainTest::CreateItemsInsideSubdomainWithStoragePools [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::CreateSubDomainWithoutTabletsThenDrop [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:116:2058] recipient: [1:111:2141] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:116:2058] recipient: [1:111:2141] Leader for TabletID 72057594046678944 is [1:128:2152] sender: [1:132:2058] recipient: [1:111:2141] 2025-05-29T15:23:15.770703Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7488: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-05-29T15:23:15.770732Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7516: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:23:15.770774Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7402: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-05-29T15:23:15.770780Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7418: OperationsProcessing config: using default configuration 2025-05-29T15:23:15.770792Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-05-29T15:23:15.770796Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-05-29T15:23:15.770806Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7548: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:23:15.770819Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:39: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-05-29T15:23:15.770938Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7619: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-05-29T15:23:15.771002Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-05-29T15:23:15.784876Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7309: Cannot subscribe to console configs 2025-05-29T15:23:15.784905Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:23:15.787640Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-05-29T15:23:15.787771Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-05-29T15:23:15.787821Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-05-29T15:23:15.789345Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-05-29T15:23:15.789491Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:445: Clear TempDirsState with owners number: 0 2025-05-29T15:23:15.789614Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1348: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-05-29T15:23:15.789681Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:32: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-05-29T15:23:15.790139Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:157: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:23:15.790174Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:84: [RootDataErasureManager] Stop 2025-05-29T15:23:15.790417Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:23:15.790427Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:23:15.790448Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-05-29T15:23:15.790455Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-29T15:23:15.790462Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:202: TTxServerlessStorageBilling.Complete 2025-05-29T15:23:15.790494Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6671: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-05-29T15:23:15.791811Z node 1 :HIVE INFO: tablet_helpers.cpp:1130: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:128:2152] sender: [1:242:2058] recipient: [1:15:2062] 2025-05-29T15:23:15.810899Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:372: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-05-29T15:23:15.810966Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:23:15.811033Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-05-29T15:23:15.811082Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:130: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-05-29T15:23:15.811092Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:23:15.811763Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:451: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-05-29T15:23:15.811805Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-05-29T15:23:15.811857Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:23:15.811867Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:313: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-05-29T15:23:15.811873Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:367: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-05-29T15:23:15.811879Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 2 -> 3 2025-05-29T15:23:15.812329Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:23:15.812341Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-05-29T15:23:15.812347Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 3 -> 128 2025-05-29T15:23:15.812698Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:23:15.812711Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:23:15.812717Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:23:15.812724Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1650: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-05-29T15:23:15.813446Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1719: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-05-29T15:23:15.813814Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:652: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-05-29T15:23:15.813851Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1751: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-05-29T15:23:15.814024Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:676: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-05-29T15:23:15.814049Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:680: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 124 RawX2: 4294969445 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-05-29T15:23:15.814056Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:23:15.814112Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 128 -> 240 2025-05-29T15:23:15.814119Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:23:15.814163Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-05-29T15:23:15.814175Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:402: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-05-29T15:23:15.814567Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:23:15.814576Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-29T15:23:15.814616Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29 ... e 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:23:15.829544Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:209:2210], at schemeshard: 72057594046678944, txId: 101, path id: 1 2025-05-29T15:23:15.829549Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:209:2210], at schemeshard: 72057594046678944, txId: 101, path id: 2 FAKE_COORDINATOR: Erasing txId 101 2025-05-29T15:23:15.829588Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 101:0, at schemeshard: 72057594046678944 2025-05-29T15:23:15.829592Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:416: [72057594046678944] TDeleteParts opId# 101:0 ProgressState 2025-05-29T15:23:15.829598Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:906: Part operation is done id#101:0 progress is 1/1 2025-05-29T15:23:15.829601Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2025-05-29T15:23:15.829604Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:906: Part operation is done id#101:0 progress is 1/1 2025-05-29T15:23:15.829606Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2025-05-29T15:23:15.829610Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1606: TOperation IsReadyToNotify, TxId: 101, ready parts: 1/1, is published: false 2025-05-29T15:23:15.829613Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2025-05-29T15:23:15.829618Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:973: Operation and all the parts is done, operation id: 101:0 2025-05-29T15:23:15.829621Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5174: RemoveTx for txid 101:0 2025-05-29T15:23:15.829628Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2025-05-29T15:23:15.829632Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:982: Publication still in progress, tx: 101, publications: 2, subscribers: 0 2025-05-29T15:23:15.829635Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:989: Publication details: tx: 101, [OwnerId: 72057594046678944, LocalPathId: 1], 7 2025-05-29T15:23:15.829637Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:989: Publication details: tx: 101, [OwnerId: 72057594046678944, LocalPathId: 2], 18446744073709551615 2025-05-29T15:23:15.829704Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:5834: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 7 PathOwnerId: 72057594046678944, cookie: 101 2025-05-29T15:23:15.829710Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 7 PathOwnerId: 72057594046678944, cookie: 101 2025-05-29T15:23:15.829713Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 101 2025-05-29T15:23:15.829716Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 7 2025-05-29T15:23:15.829719Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-05-29T15:23:15.829779Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:5834: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 101 2025-05-29T15:23:15.829785Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 101 2025-05-29T15:23:15.829788Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 101 2025-05-29T15:23:15.829791Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 18446744073709551615 2025-05-29T15:23:15.829793Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2025-05-29T15:23:15.829800Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 101, subscribers: 0 2025-05-29T15:23:15.829838Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:205: TTxCleanDroppedSubDomains Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-05-29T15:23:15.829843Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:224: TTxCleanDroppedPaths: PersistRemoveSubDomain for PathId# [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2025-05-29T15:23:15.829858Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 2025-05-29T15:23:15.829977Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:123: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-05-29T15:23:15.829982Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:137: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2025-05-29T15:23:15.829990Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-05-29T15:23:15.830236Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2025-05-29T15:23:15.830471Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2025-05-29T15:23:15.830483Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:245: TTxCleanDroppedSubDomains Complete, done PersistRemoveSubDomain for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 2025-05-29T15:23:15.830489Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:160: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestModificationResult got TxId: 101, wait until txId: 101 TestWaitNotification wait txId: 101 2025-05-29T15:23:15.830517Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:210: tests -- TTxNotificationSubscriber for txId 101: send EvNotifyTxCompletion 2025-05-29T15:23:15.830522Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:256: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 101 2025-05-29T15:23:15.830569Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 101, at schemeshard: 72057594046678944 2025-05-29T15:23:15.830580Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:227: tests -- TTxNotificationSubscriber for txId 101: got EvNotifyTxCompletionResult 2025-05-29T15:23:15.830583Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:236: tests -- TTxNotificationSubscriber for txId 101: satisfy waiter [1:342:2332] TestWaitNotification: OK eventTxId 101 2025-05-29T15:23:15.830630Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-05-29T15:23:15.830647Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/USER_0" took 24us result status StatusPathDoesNotExist 2025-05-29T15:23:15.830684Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/USER_0\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1]), source_location: ydb/core/tx/schemeshard/schemeshard_path_describer.cpp:1157" Path: "/MyRoot/USER_0" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: false } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 2025-05-29T15:23:15.830734Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-05-29T15:23:15.830769Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot" took 34us result status StatusSuccess 2025-05-29T15:23:15.830820Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot" PathDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 7 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 7 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 5 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/MyRoot" } } } PathId: 1 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-ModifyUser-28 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-ModifyUser-29 >> TStoragePoolsQuotasTest::DifferentQuotasInteraction-IsExternalSubdomain-EnableSeparateQuotas >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-ModifyUser-40 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-ModifyUser-41 >> YdbIndexTable::MultiShardTableOneUniqIndexDataColumn >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-CreateUser-6 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-CreateUser-7 >> YdbIndexTable::MultiShardTableTwoIndexes >> YdbIndexTable::MultiShardTableOneIndexPkOverlap >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-CreateUser-7 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-CreateUser-8 >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-ModifyUser-47 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-ModifyUser-48 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::CreateWithNoEqualName [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:116:2058] recipient: [1:111:2141] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:116:2058] recipient: [1:111:2141] Leader for TabletID 72057594046678944 is [1:128:2152] sender: [1:132:2058] recipient: [1:111:2141] 2025-05-29T15:23:15.836345Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7488: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-05-29T15:23:15.836371Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7516: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:23:15.836375Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7402: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-05-29T15:23:15.836379Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7418: OperationsProcessing config: using default configuration 2025-05-29T15:23:15.836388Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-05-29T15:23:15.836391Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-05-29T15:23:15.836398Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7548: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:23:15.836407Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:39: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-05-29T15:23:15.836493Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7619: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-05-29T15:23:15.836550Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-05-29T15:23:15.846432Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7309: Cannot subscribe to console configs 2025-05-29T15:23:15.846459Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:23:15.848797Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-05-29T15:23:15.848909Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-05-29T15:23:15.848961Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-05-29T15:23:15.850421Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-05-29T15:23:15.850574Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:445: Clear TempDirsState with owners number: 0 2025-05-29T15:23:15.850676Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1348: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-05-29T15:23:15.850755Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:32: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-05-29T15:23:15.851247Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:157: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:23:15.851281Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:84: [RootDataErasureManager] Stop 2025-05-29T15:23:15.851523Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:23:15.851533Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:23:15.851555Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-05-29T15:23:15.851563Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-29T15:23:15.851569Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:202: TTxServerlessStorageBilling.Complete 2025-05-29T15:23:15.851603Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6671: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-05-29T15:23:15.852753Z node 1 :HIVE INFO: tablet_helpers.cpp:1130: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:128:2152] sender: [1:242:2058] recipient: [1:15:2062] 2025-05-29T15:23:15.870601Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:372: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-05-29T15:23:15.870668Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:23:15.870728Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-05-29T15:23:15.870792Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:130: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-05-29T15:23:15.870803Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:23:15.871935Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:451: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-05-29T15:23:15.871959Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-05-29T15:23:15.871999Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:23:15.872007Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:313: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-05-29T15:23:15.872010Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:367: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-05-29T15:23:15.872015Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 2 -> 3 2025-05-29T15:23:15.878094Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:23:15.878121Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-05-29T15:23:15.878130Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 3 -> 128 2025-05-29T15:23:15.879845Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:23:15.879865Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:23:15.879871Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:23:15.879878Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1650: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-05-29T15:23:15.880582Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1719: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-05-29T15:23:15.881327Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:652: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-05-29T15:23:15.881372Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1751: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-05-29T15:23:15.881559Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:676: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-05-29T15:23:15.881583Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:680: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 124 RawX2: 4294969445 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-05-29T15:23:15.881589Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:23:15.881662Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 128 -> 240 2025-05-29T15:23:15.881670Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:23:15.881701Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-05-29T15:23:15.881711Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:402: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-05-29T15:23:15.885666Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:23:15.885686Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-29T15:23:15.885756Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29 ... G: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-05-29T15:23:16.028687Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1606: TOperation IsReadyToNotify, TxId: 102, ready parts: 1/1, is published: true 2025-05-29T15:23:16.028697Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1629: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:632:2564] message: TxId: 102 2025-05-29T15:23:16.028702Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-05-29T15:23:16.028706Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:973: Operation and all the parts is done, operation id: 102:0 2025-05-29T15:23:16.028712Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5174: RemoveTx for txid 102:0 2025-05-29T15:23:16.028732Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 3 2025-05-29T15:23:16.029075Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:227: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2025-05-29T15:23:16.029083Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:236: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [1:633:2565] TestWaitNotification: OK eventTxId 102 TestModificationResults wait txId: 108 2025-05-29T15:23:16.029775Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:372: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: "USER_3" } } TxId: 108 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-05-29T15:23:16.029820Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_mkdir.cpp:115: TMkDir Propose, path: /MyRoot/USER_3, operationId: 108:0, at schemeshard: 72057594046678944 2025-05-29T15:23:16.029840Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:130: IgniteOperation, opId: 108:1, propose status:StatusAlreadyExists, reason: Check failed: path: '/MyRoot/USER_3', error: path exist, request accepts it (id: [OwnerId: 72057594046678944, LocalPathId: 5], type: EPathTypeSubDomain, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_mkdir.cpp:155, at schemeshard: 72057594046678944 2025-05-29T15:23:16.030212Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:451: TTxOperationPropose Complete, txId: 108, response: Status: StatusAlreadyExists Reason: "Check failed: path: \'/MyRoot/USER_3\', error: path exist, request accepts it (id: [OwnerId: 72057594046678944, LocalPathId: 5], type: EPathTypeSubDomain, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_mkdir.cpp:155" TxId: 108 SchemeshardId: 72057594046678944 PathId: 5 PathCreateTxId: 106, at schemeshard: 72057594046678944 2025-05-29T15:23:16.030236Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 108, database: /MyRoot, subject: , status: StatusAlreadyExists, reason: Check failed: path: '/MyRoot/USER_3', error: path exist, request accepts it (id: [OwnerId: 72057594046678944, LocalPathId: 5], type: EPathTypeSubDomain, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_mkdir.cpp:155, operation: CREATE DIRECTORY, path: /MyRoot/USER_3 TestModificationResult got TxId: 108, wait until txId: 108 2025-05-29T15:23:16.030332Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-05-29T15:23:16.030358Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/USER_0" took 30us result status StatusSuccess 2025-05-29T15:23:16.030414Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/USER_0" PathDescription { Self { Name: "USER_0" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 100 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 1 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72075186233409546 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409547 } DomainKey { SchemeShard: 72057594046678944 PathId: 2 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 2 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 2 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { } } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-05-29T15:23:16.030460Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_1" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-05-29T15:23:16.030476Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/USER_1" took 17us result status StatusSuccess 2025-05-29T15:23:16.030538Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/USER_1" PathDescription { Self { Name: "USER_1" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 102 CreateStep: 5000005 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "USER_1" Columns { Name: "RowId" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "Value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "RowId" KeyColumnIds: 1 TableSchemaVersion: 1 IsBackup: false IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 4 PathsLimit: 10000 ShardsInside: 1 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-05-29T15:23:16.030585Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_2" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-05-29T15:23:16.030592Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/USER_2" took 9us result status StatusSuccess 2025-05-29T15:23:16.030612Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/USER_2" PathDescription { Self { Name: "USER_2" PathId: 4 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 104 CreateStep: 5000003 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 2 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 4 PathsLimit: 10000 ShardsInside: 1 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 4 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-05-29T15:23:16.030643Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_3" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-05-29T15:23:16.030653Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/USER_3" took 10us result status StatusSuccess 2025-05-29T15:23:16.030676Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/USER_3" PathDescription { Self { Name: "USER_3" PathId: 5 SchemeshardId: 72057594046678944 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 106 CreateStep: 5000004 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 1 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72075186233409549 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409550 } DomainKey { SchemeShard: 72057594046678944 PathId: 5 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 2 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 5 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { } } } PathId: 5 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::Delete [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:116:2058] recipient: [1:111:2141] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:116:2058] recipient: [1:111:2141] Leader for TabletID 72057594046678944 is [1:128:2152] sender: [1:132:2058] recipient: [1:111:2141] 2025-05-29T15:23:15.986888Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7488: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-05-29T15:23:15.986914Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7516: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:23:15.986919Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7402: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-05-29T15:23:15.986924Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7418: OperationsProcessing config: using default configuration 2025-05-29T15:23:15.986937Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-05-29T15:23:15.986942Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-05-29T15:23:15.986951Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7548: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:23:15.986970Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:39: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-05-29T15:23:15.987083Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7619: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-05-29T15:23:15.987149Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-05-29T15:23:15.996979Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7309: Cannot subscribe to console configs 2025-05-29T15:23:15.997003Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:23:15.999152Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-05-29T15:23:15.999274Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-05-29T15:23:15.999329Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-05-29T15:23:16.001018Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-05-29T15:23:16.001176Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:445: Clear TempDirsState with owners number: 0 2025-05-29T15:23:16.001269Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1348: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-05-29T15:23:16.001320Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:32: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-05-29T15:23:16.001760Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:157: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:23:16.001789Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:84: [RootDataErasureManager] Stop 2025-05-29T15:23:16.002008Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:23:16.002016Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:23:16.002031Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-05-29T15:23:16.002037Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-29T15:23:16.002041Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:202: TTxServerlessStorageBilling.Complete 2025-05-29T15:23:16.002065Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6671: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-05-29T15:23:16.003102Z node 1 :HIVE INFO: tablet_helpers.cpp:1130: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:128:2152] sender: [1:242:2058] recipient: [1:15:2062] 2025-05-29T15:23:16.019809Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:372: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-05-29T15:23:16.019874Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:23:16.019936Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-05-29T15:23:16.019973Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:130: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-05-29T15:23:16.019982Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:23:16.020634Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:451: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-05-29T15:23:16.020656Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-05-29T15:23:16.020692Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:23:16.020699Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:313: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-05-29T15:23:16.020703Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:367: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-05-29T15:23:16.020707Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 2 -> 3 2025-05-29T15:23:16.021049Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:23:16.021056Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-05-29T15:23:16.021060Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 3 -> 128 2025-05-29T15:23:16.021308Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:23:16.021316Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:23:16.021321Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:23:16.021326Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1650: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-05-29T15:23:16.021873Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1719: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-05-29T15:23:16.022238Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:652: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-05-29T15:23:16.022269Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1751: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-05-29T15:23:16.022394Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:676: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-05-29T15:23:16.022411Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:680: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 124 RawX2: 4294969445 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-05-29T15:23:16.022416Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:23:16.022458Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 128 -> 240 2025-05-29T15:23:16.022463Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:23:16.022497Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-05-29T15:23:16.022505Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:402: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-05-29T15:23:16.022874Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:23:16.022883Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-29T15:23:16.022925Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29 ... ublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2025-05-29T15:23:16.050597Z node 1 :HIVE INFO: tablet_helpers.cpp:1356: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 1 TxId_Deprecated: 1 TabletID: 72075186233409546 2025-05-29T15:23:16.050818Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5938: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 1 ShardOwnerId: 72057594046678944 ShardLocalIdx: 1, at schemeshard: 72057594046678944 2025-05-29T15:23:16.050899Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 Forgetting tablet 72075186233409546 2025-05-29T15:23:16.051112Z node 1 :HIVE INFO: tablet_helpers.cpp:1356: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 3 TxId_Deprecated: 3 TabletID: 72075186233409548 2025-05-29T15:23:16.051198Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5938: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 3 ShardOwnerId: 72057594046678944 ShardLocalIdx: 3, at schemeshard: 72057594046678944 2025-05-29T15:23:16.051228Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2025-05-29T15:23:16.051334Z node 1 :HIVE INFO: tablet_helpers.cpp:1356: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 2 TxId_Deprecated: 2 TabletID: 72075186233409547 Forgetting tablet 72075186233409548 2025-05-29T15:23:16.051478Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5938: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 2 ShardOwnerId: 72057594046678944 ShardLocalIdx: 2, at schemeshard: 72057594046678944 2025-05-29T15:23:16.051512Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 Forgetting tablet 72075186233409547 2025-05-29T15:23:16.051632Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:205: TTxCleanDroppedSubDomains Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-05-29T15:23:16.051643Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:224: TTxCleanDroppedPaths: PersistRemoveSubDomain for PathId# [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2025-05-29T15:23:16.051665Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 2025-05-29T15:23:16.051774Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2025-05-29T15:23:16.051966Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:123: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-05-29T15:23:16.051974Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:137: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2025-05-29T15:23:16.051985Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-05-29T15:23:16.052081Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:174: Deleted shardIdx 72057594046678944:1 2025-05-29T15:23:16.052089Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:180: Close pipe to deleted shardIdx 72057594046678944:1 tabletId 72075186233409546 2025-05-29T15:23:16.052627Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:174: Deleted shardIdx 72057594046678944:3 2025-05-29T15:23:16.052645Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:180: Close pipe to deleted shardIdx 72057594046678944:3 tabletId 72075186233409548 2025-05-29T15:23:16.052675Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:174: Deleted shardIdx 72057594046678944:2 2025-05-29T15:23:16.052682Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:180: Close pipe to deleted shardIdx 72057594046678944:2 tabletId 72075186233409547 2025-05-29T15:23:16.052748Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:245: TTxCleanDroppedSubDomains Complete, done PersistRemoveSubDomain for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 2025-05-29T15:23:16.053025Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:160: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestModificationResult got TxId: 101, wait until txId: 101 TestWaitNotification wait txId: 101 2025-05-29T15:23:16.053084Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:210: tests -- TTxNotificationSubscriber for txId 101: send EvNotifyTxCompletion 2025-05-29T15:23:16.053092Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:256: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 101 2025-05-29T15:23:16.053154Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 101, at schemeshard: 72057594046678944 2025-05-29T15:23:16.053172Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:227: tests -- TTxNotificationSubscriber for txId 101: got EvNotifyTxCompletionResult 2025-05-29T15:23:16.053178Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:236: tests -- TTxNotificationSubscriber for txId 101: satisfy waiter [1:500:2453] TestWaitNotification: OK eventTxId 101 2025-05-29T15:23:16.053253Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-05-29T15:23:16.053286Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/USER_0" took 44us result status StatusPathDoesNotExist 2025-05-29T15:23:16.053328Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/USER_0\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1]), source_location: ydb/core/tx/schemeshard/schemeshard_path_describer.cpp:1157" Path: "/MyRoot/USER_0" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: false } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 2025-05-29T15:23:16.053416Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-05-29T15:23:16.053438Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot" took 23us result status StatusSuccess 2025-05-29T15:23:16.053506Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot" PathDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 7 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 7 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 5 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/MyRoot" } } } PathId: 1 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 wait until 72075186233409546 is deleted wait until 72075186233409547 is deleted wait until 72075186233409548 is deleted 2025-05-29T15:23:16.053601Z node 1 :HIVE INFO: tablet_helpers.cpp:1476: [72057594037968897] TEvSubscribeToTabletDeletion, 72075186233409546 2025-05-29T15:23:16.053626Z node 1 :HIVE INFO: tablet_helpers.cpp:1476: [72057594037968897] TEvSubscribeToTabletDeletion, 72075186233409547 2025-05-29T15:23:16.053635Z node 1 :HIVE INFO: tablet_helpers.cpp:1476: [72057594037968897] TEvSubscribeToTabletDeletion, 72075186233409548 Deleted tabletId 72075186233409546 Deleted tabletId 72075186233409547 Deleted tabletId 72075186233409548 2025-05-29T15:23:16.053707Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-05-29T15:23:16.053729Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot" took 25us result status StatusSuccess 2025-05-29T15:23:16.053778Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot" PathDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 7 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 7 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 5 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/MyRoot" } } } PathId: 1 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> TSchemeShardSubDomainTest::SimultaneousCreateTenantTable [GOOD] >> TSchemeShardSubDomainTest::SimultaneousDeclare ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::CreateItemsInsideSubdomainWithStoragePools [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:116:2058] recipient: [1:111:2141] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:116:2058] recipient: [1:111:2141] Leader for TabletID 72057594046678944 is [1:128:2152] sender: [1:132:2058] recipient: [1:111:2141] 2025-05-29T15:23:16.004001Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7488: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-05-29T15:23:16.004024Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7516: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:23:16.004031Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7402: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-05-29T15:23:16.004037Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7418: OperationsProcessing config: using default configuration 2025-05-29T15:23:16.004049Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-05-29T15:23:16.004054Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-05-29T15:23:16.004064Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7548: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:23:16.004077Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:39: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-05-29T15:23:16.004195Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7619: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-05-29T15:23:16.004252Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-05-29T15:23:16.019755Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7309: Cannot subscribe to console configs 2025-05-29T15:23:16.019778Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:23:16.022592Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-05-29T15:23:16.022704Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-05-29T15:23:16.022764Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-05-29T15:23:16.024804Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-05-29T15:23:16.025003Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:445: Clear TempDirsState with owners number: 0 2025-05-29T15:23:16.025112Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1348: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-05-29T15:23:16.025179Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:32: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-05-29T15:23:16.025659Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:157: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:23:16.025694Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:84: [RootDataErasureManager] Stop 2025-05-29T15:23:16.025971Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:23:16.025982Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:23:16.026020Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-05-29T15:23:16.026029Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-29T15:23:16.026036Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:202: TTxServerlessStorageBilling.Complete 2025-05-29T15:23:16.026078Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6671: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-05-29T15:23:16.027637Z node 1 :HIVE INFO: tablet_helpers.cpp:1130: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:128:2152] sender: [1:242:2058] recipient: [1:15:2062] 2025-05-29T15:23:16.043185Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:372: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-05-29T15:23:16.043257Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:23:16.043321Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-05-29T15:23:16.043371Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:130: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-05-29T15:23:16.043382Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:23:16.044063Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:451: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-05-29T15:23:16.044097Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-05-29T15:23:16.044158Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:23:16.044171Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:313: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-05-29T15:23:16.044187Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:367: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-05-29T15:23:16.044192Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 2 -> 3 2025-05-29T15:23:16.044706Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:23:16.044721Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-05-29T15:23:16.044727Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 3 -> 128 2025-05-29T15:23:16.045159Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:23:16.045177Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:23:16.045184Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:23:16.045191Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1650: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-05-29T15:23:16.045971Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1719: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-05-29T15:23:16.046432Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:652: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-05-29T15:23:16.046471Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1751: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-05-29T15:23:16.046652Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:676: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-05-29T15:23:16.046681Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:680: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 124 RawX2: 4294969445 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-05-29T15:23:16.046689Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:23:16.046765Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 128 -> 240 2025-05-29T15:23:16.046774Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:23:16.046804Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-05-29T15:23:16.046817Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:402: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-05-29T15:23:16.047384Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:23:16.047396Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-29T15:23:16.047429Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29 ... 1/1 2025-05-29T15:23:16.238368Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2025-05-29T15:23:16.238374Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:906: Part operation is done id#103:0 progress is 1/1 2025-05-29T15:23:16.238378Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2025-05-29T15:23:16.238384Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1606: TOperation IsReadyToNotify, TxId: 103, ready parts: 1/1, is published: true 2025-05-29T15:23:16.238397Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1629: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:491:2445] message: TxId: 103 2025-05-29T15:23:16.238405Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2025-05-29T15:23:16.238412Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:973: Operation and all the parts is done, operation id: 103:0 2025-05-29T15:23:16.238417Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5174: RemoveTx for txid 103:0 2025-05-29T15:23:16.238439Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 3 2025-05-29T15:23:16.238851Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:227: tests -- TTxNotificationSubscriber for txId 103: got EvNotifyTxCompletionResult 2025-05-29T15:23:16.238864Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:236: tests -- TTxNotificationSubscriber for txId 103: satisfy waiter [1:492:2446] TestWaitNotification: OK eventTxId 103 2025-05-29T15:23:16.238963Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-05-29T15:23:16.239005Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/USER_0" took 48us result status StatusSuccess 2025-05-29T15:23:16.239096Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/USER_0" PathDescription { Self { Name: "USER_0" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 100 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 8 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 8 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 6 SubDomainVersion: 1 SecurityStateVersion: 0 } } Children { Name: "dir_0" PathId: 4 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 102 CreateStep: 150 ParentPathId: 2 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" ChildrenExist: true } Children { Name: "table_0" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 101 CreateStep: 150 ParentPathId: 2 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72075186233409546 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409547 } DomainKey { SchemeShard: 72057594046678944 PathId: 2 } StoragePools { Name: "name_USER_0_kind_hdd-1" Kind: "hdd-1" } StoragePools { Name: "name_USER_0_kind_hdd-2" Kind: "hdd-2" } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 3 PathsLimit: 10000 ShardsInside: 4 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 2 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { } } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-05-29T15:23:16.239170Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0/table_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-05-29T15:23:16.239191Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/USER_0/table_0" took 22us result status StatusSuccess 2025-05-29T15:23:16.239246Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/USER_0/table_0" PathDescription { Self { Name: "table_0" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 101 CreateStep: 150 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "table_0" Columns { Name: "RowId" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "Value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "RowId" KeyColumnIds: 1 TableSchemaVersion: 1 IsBackup: false IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72075186233409546 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409547 } DomainKey { SchemeShard: 72057594046678944 PathId: 2 } PathsInside: 3 PathsLimit: 10000 ShardsInside: 4 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 2 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-05-29T15:23:16.239287Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0/dir_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-05-29T15:23:16.239297Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/USER_0/dir_0" took 10us result status StatusSuccess 2025-05-29T15:23:16.239322Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/USER_0/dir_0" PathDescription { Self { Name: "dir_0" PathId: 4 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 102 CreateStep: 150 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 } ChildrenExist: true } Children { Name: "table_1" PathId: 5 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 103 CreateStep: 200 ParentPathId: 4 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72075186233409546 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409547 } DomainKey { SchemeShard: 72057594046678944 PathId: 2 } PathsInside: 3 PathsLimit: 10000 ShardsInside: 4 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 2 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 4 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-05-29T15:23:16.239364Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0/dir_0/table_1" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-05-29T15:23:16.239381Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/USER_0/dir_0/table_1" took 18us result status StatusSuccess 2025-05-29T15:23:16.239432Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/USER_0/dir_0/table_1" PathDescription { Self { Name: "table_1" PathId: 5 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 103 CreateStep: 200 ParentPathId: 4 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "table_1" Columns { Name: "RowId" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "Value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "RowId" KeyColumnIds: 1 TableSchemaVersion: 1 IsBackup: false IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72075186233409546 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409547 } DomainKey { SchemeShard: 72057594046678944 PathId: 2 } PathsInside: 3 PathsLimit: 10000 ShardsInside: 4 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 2 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 5 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> TSchemeShardSubDomainTest::SimultaneousDeclareAndDefine ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::SimultaneousCreateTenantTable [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:116:2058] recipient: [1:111:2141] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:116:2058] recipient: [1:111:2141] Leader for TabletID 72057594046678944 is [1:128:2152] sender: [1:132:2058] recipient: [1:111:2141] 2025-05-29T15:23:16.286109Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7488: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-05-29T15:23:16.286138Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7516: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:23:16.286144Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7402: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-05-29T15:23:16.286149Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7418: OperationsProcessing config: using default configuration 2025-05-29T15:23:16.286161Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-05-29T15:23:16.286165Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-05-29T15:23:16.286175Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7548: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:23:16.286190Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:39: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-05-29T15:23:16.286304Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7619: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-05-29T15:23:16.286384Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-05-29T15:23:16.299107Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7309: Cannot subscribe to console configs 2025-05-29T15:23:16.299139Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:23:16.302081Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-05-29T15:23:16.302227Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-05-29T15:23:16.302292Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-05-29T15:23:16.303726Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-05-29T15:23:16.303903Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:445: Clear TempDirsState with owners number: 0 2025-05-29T15:23:16.304019Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1348: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-05-29T15:23:16.304106Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:32: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-05-29T15:23:16.304601Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:157: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:23:16.304650Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:84: [RootDataErasureManager] Stop 2025-05-29T15:23:16.304946Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:23:16.304958Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:23:16.304983Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-05-29T15:23:16.304992Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-29T15:23:16.304999Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:202: TTxServerlessStorageBilling.Complete 2025-05-29T15:23:16.305037Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6671: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-05-29T15:23:16.306481Z node 1 :HIVE INFO: tablet_helpers.cpp:1130: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:128:2152] sender: [1:242:2058] recipient: [1:15:2062] 2025-05-29T15:23:16.328037Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:372: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-05-29T15:23:16.328139Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:23:16.328235Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-05-29T15:23:16.328293Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:130: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-05-29T15:23:16.328306Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:23:16.329393Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:451: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-05-29T15:23:16.329432Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-05-29T15:23:16.329495Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:23:16.329508Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:313: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-05-29T15:23:16.329515Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:367: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-05-29T15:23:16.329521Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 2 -> 3 2025-05-29T15:23:16.330034Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:23:16.330047Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-05-29T15:23:16.330053Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 3 -> 128 2025-05-29T15:23:16.330429Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:23:16.330440Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:23:16.330447Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:23:16.330455Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1650: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-05-29T15:23:16.331111Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1719: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-05-29T15:23:16.331550Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:652: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-05-29T15:23:16.331601Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1751: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-05-29T15:23:16.331820Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:676: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-05-29T15:23:16.331849Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:680: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 124 RawX2: 4294969445 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-05-29T15:23:16.331857Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:23:16.331927Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 128 -> 240 2025-05-29T15:23:16.331935Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:23:16.331973Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-05-29T15:23:16.331987Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:402: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-05-29T15:23:16.332476Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:23:16.332486Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-29T15:23:16.332542Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29 ... 552 CpuTimeUsec: 377 } } CommitVersion { Step: 150 TxId: 101 } 2025-05-29T15:23:16.484120Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1764: TOperation FindRelatedPartByTabletId, TxId: 101, tablet: 72075186233409552, partId: 0 2025-05-29T15:23:16.484144Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:619: TTxOperationReply execute, operationId: 101:0, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409552 Status: COMPLETE TxId: 101 Step: 150 OrderId: 101 ExecLatency: 0 ProposeLatency: 2 DomainCoordinators: 72075186233409546 DomainCoordinators: 72075186233409547 DomainCoordinators: 72075186233409548 TxStats { PerShardStats { ShardId: 72075186233409552 CpuTimeUsec: 377 } } CommitVersion { Step: 150 TxId: 101 } 2025-05-29T15:23:16.484161Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_part.cpp:108: HandleReply TEvDataShard::TEvProposeTransactionResult Ignore message: tablet# 72057594046678944, ev# TxKind: TX_KIND_SCHEME Origin: 72075186233409552 Status: COMPLETE TxId: 101 Step: 150 OrderId: 101 ExecLatency: 0 ProposeLatency: 2 DomainCoordinators: 72075186233409546 DomainCoordinators: 72075186233409547 DomainCoordinators: 72075186233409548 TxStats { PerShardStats { ShardId: 72075186233409552 CpuTimeUsec: 377 } } CommitVersion { Step: 150 TxId: 101 } 2025-05-29T15:23:16.484311Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5512: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 630 RawX2: 4294969834 } Origin: 72075186233409552 State: 2 TxId: 101 Step: 0 Generation: 2 2025-05-29T15:23:16.484332Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1764: TOperation FindRelatedPartByTabletId, TxId: 101, tablet: 72075186233409552, partId: 0 2025-05-29T15:23:16.484349Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:619: TTxOperationReply execute, operationId: 101:0, at schemeshard: 72057594046678944, message: Source { RawX1: 630 RawX2: 4294969834 } Origin: 72075186233409552 State: 2 TxId: 101 Step: 0 Generation: 2 2025-05-29T15:23:16.484356Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:1005: NTableState::TProposedWaitParts operationId# 101:0 HandleReply TEvSchemaChanged at tablet: 72057594046678944 2025-05-29T15:23:16.484368Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:1009: NTableState::TProposedWaitParts operationId# 101:0 HandleReply TEvSchemaChanged at tablet: 72057594046678944 message: Source { RawX1: 630 RawX2: 4294969834 } Origin: 72075186233409552 State: 2 TxId: 101 Step: 0 Generation: 2 2025-05-29T15:23:16.484383Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:655: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 101:0, shardIdx: 72057594046678944:7, datashard: 72075186233409552, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2025-05-29T15:23:16.484388Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:674: all shard schema changes has been received, operationId: 101:0, at schemeshard: 72057594046678944 2025-05-29T15:23:16.484393Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:686: send schema changes ack message, operation: 101:0, datashard: 72075186233409552, at schemeshard: 72057594046678944 2025-05-29T15:23:16.484401Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 101:0 129 -> 240 2025-05-29T15:23:16.485424Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2025-05-29T15:23:16.485450Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2025-05-29T15:23:16.485465Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:647: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2025-05-29T15:23:16.485486Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:647: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2025-05-29T15:23:16.485557Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 101:0, at schemeshard: 72057594046678944 2025-05-29T15:23:16.485568Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:482: [72057594046678944] TDone opId# 101:0 ProgressState 2025-05-29T15:23:16.485584Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:906: Part operation is done id#101:0 progress is 1/1 2025-05-29T15:23:16.485590Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2025-05-29T15:23:16.485595Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:906: Part operation is done id#101:0 progress is 1/1 2025-05-29T15:23:16.485598Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2025-05-29T15:23:16.485604Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1606: TOperation IsReadyToNotify, TxId: 101, ready parts: 1/1, is published: true 2025-05-29T15:23:16.485634Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1629: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:278:2268] message: TxId: 101 2025-05-29T15:23:16.485642Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2025-05-29T15:23:16.485649Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:973: Operation and all the parts is done, operation id: 101:0 2025-05-29T15:23:16.485655Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5174: RemoveTx for txid 101:0 2025-05-29T15:23:16.485693Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 3 2025-05-29T15:23:16.486106Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:227: tests -- TTxNotificationSubscriber for txId 101: got EvNotifyTxCompletionResult 2025-05-29T15:23:16.486130Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:236: tests -- TTxNotificationSubscriber for txId 101: satisfy waiter [1:279:2269] TestWaitNotification: OK eventTxId 101 2025-05-29T15:23:16.486254Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-05-29T15:23:16.486303Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/USER_0" took 59us result status StatusSuccess 2025-05-29T15:23:16.486434Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/USER_0" PathDescription { Self { Name: "USER_0" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 100 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 3 SubDomainVersion: 1 SecurityStateVersion: 0 } } Children { Name: "table_0" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 101 CreateStep: 150 ParentPathId: 2 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 10 Coordinators: 72075186233409546 Coordinators: 72075186233409547 Coordinators: 72075186233409548 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409549 Mediators: 72075186233409550 Mediators: 72075186233409551 } DomainKey { SchemeShard: 72057594046678944 PathId: 2 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 1 PathsLimit: 10000 ShardsInside: 7 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 2 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { } } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-05-29T15:23:16.486550Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0/table_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-05-29T15:23:16.486576Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/USER_0/table_0" took 28us result status StatusSuccess 2025-05-29T15:23:16.486670Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/USER_0/table_0" PathDescription { Self { Name: "table_0" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 101 CreateStep: 150 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "table_0" Columns { Name: "RowId" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "Value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "RowId" KeyColumnIds: 1 TableSchemaVersion: 1 IsBackup: false IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 10 Coordinators: 72075186233409546 Coordinators: 72075186233409547 Coordinators: 72075186233409548 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409549 Mediators: 72075186233409550 Mediators: 72075186233409551 } DomainKey { SchemeShard: 72057594046678944 PathId: 2 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 7 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 2 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 |63.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_serverless_reboots/unittest >> TSchemeShardSubDomainTest::SimultaneousDeclare [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-CreateUser-13 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-CreateUser-14 >> TSchemeShardSubDomainTest::SimultaneousDeclareAndDefine [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-ModifyUser-35 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-ModifyUser-36 >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-CreateUser-6 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-CreateUser-7 >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-ModifyUser-30 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-ModifyUser-31 >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-ModifyUser-29 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-ModifyUser-30 >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-ModifyUser-29 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-ModifyUser-30 >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-ModifyUser-41 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-ModifyUser-42 >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-CreateUser-7 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-CreateUser-8 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::SimultaneousDeclare [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:116:2058] recipient: [1:111:2141] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:116:2058] recipient: [1:111:2141] Leader for TabletID 72057594046678944 is [1:128:2152] sender: [1:132:2058] recipient: [1:111:2141] 2025-05-29T15:23:16.899771Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7488: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-05-29T15:23:16.899802Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7516: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:23:16.899807Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7402: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-05-29T15:23:16.899813Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7418: OperationsProcessing config: using default configuration 2025-05-29T15:23:16.899824Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-05-29T15:23:16.899828Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-05-29T15:23:16.899837Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7548: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:23:16.899852Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:39: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-05-29T15:23:16.899980Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7619: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-05-29T15:23:16.900058Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-05-29T15:23:16.913156Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7309: Cannot subscribe to console configs 2025-05-29T15:23:16.913186Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:23:16.916235Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-05-29T15:23:16.916368Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-05-29T15:23:16.916420Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-05-29T15:23:16.918675Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-05-29T15:23:16.918960Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:445: Clear TempDirsState with owners number: 0 2025-05-29T15:23:16.919096Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1348: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-05-29T15:23:16.919185Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:32: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-05-29T15:23:16.919810Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:157: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:23:16.919860Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:84: [RootDataErasureManager] Stop 2025-05-29T15:23:16.920196Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:23:16.920208Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:23:16.920231Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-05-29T15:23:16.920239Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-29T15:23:16.920246Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:202: TTxServerlessStorageBilling.Complete 2025-05-29T15:23:16.920285Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6671: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-05-29T15:23:16.921802Z node 1 :HIVE INFO: tablet_helpers.cpp:1130: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:128:2152] sender: [1:242:2058] recipient: [1:15:2062] 2025-05-29T15:23:16.944412Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:372: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-05-29T15:23:16.944513Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:23:16.944602Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-05-29T15:23:16.944658Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:130: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-05-29T15:23:16.944671Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:23:16.945630Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:451: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-05-29T15:23:16.945658Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-05-29T15:23:16.945715Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:23:16.945726Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:313: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-05-29T15:23:16.945732Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:367: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-05-29T15:23:16.945737Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 2 -> 3 2025-05-29T15:23:16.948933Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:23:16.948964Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-05-29T15:23:16.948974Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 3 -> 128 2025-05-29T15:23:16.949732Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:23:16.949748Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:23:16.949756Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:23:16.949764Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1650: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-05-29T15:23:16.950577Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1719: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-05-29T15:23:16.951168Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:652: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-05-29T15:23:16.951231Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1751: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-05-29T15:23:16.951416Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:676: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-05-29T15:23:16.951451Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:680: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 124 RawX2: 4294969445 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-05-29T15:23:16.951462Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:23:16.951522Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 128 -> 240 2025-05-29T15:23:16.951529Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:23:16.951564Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-05-29T15:23:16.951572Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:402: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-05-29T15:23:16.952048Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:23:16.952056Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-29T15:23:16.952091Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29 ... 5:23:16.959785Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1650: TOperation IsReadyToPropose , TxId: 100 ready parts: 1/1 2025-05-29T15:23:16.959809Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1719: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 100 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-05-29T15:23:16.960095Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:652: Send tablet strongly msg operationId: 100:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:100 msg type: 269090816 2025-05-29T15:23:16.960118Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1751: TOperation RegisterRelationByTabletId, TxId: 100, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 100 at step: 5000002 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000001 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 100 at step: 5000002 2025-05-29T15:23:16.960183Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:676: TTxOperationPlanStep Execute, stepId: 5000002, transactions count in step: 1, at schemeshard: 72057594046678944 2025-05-29T15:23:16.960196Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:680: TTxOperationPlanStep Execute, message: Transactions { TxId: 100 Coordinator: 72057594046316545 AckTo { RawX1: 124 RawX2: 4294969445 } } Step: 5000002 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-05-29T15:23:16.960202Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 100:0, at tablet# 72057594046678944 2025-05-29T15:23:16.960239Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 100:0 128 -> 240 2025-05-29T15:23:16.960243Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 100:0, at tablet# 72057594046678944 2025-05-29T15:23:16.960267Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-05-29T15:23:16.960272Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2025-05-29T15:23:16.960279Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:402: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2025-05-29T15:23:16.960617Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:23:16.960624Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 100, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-29T15:23:16.960649Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 100, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2025-05-29T15:23:16.960659Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:23:16.960662Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:209:2210], at schemeshard: 72057594046678944, txId: 100, path id: 1 2025-05-29T15:23:16.960666Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:209:2210], at schemeshard: 72057594046678944, txId: 100, path id: 2 FAKE_COORDINATOR: Erasing txId 100 2025-05-29T15:23:16.960715Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 100:0, at schemeshard: 72057594046678944 2025-05-29T15:23:16.960720Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:482: [72057594046678944] TDone opId# 100:0 ProgressState 2025-05-29T15:23:16.960728Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:906: Part operation is done id#100:0 progress is 1/1 2025-05-29T15:23:16.960731Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 100 ready parts: 1/1 2025-05-29T15:23:16.960735Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:906: Part operation is done id#100:0 progress is 1/1 2025-05-29T15:23:16.960739Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 100 ready parts: 1/1 2025-05-29T15:23:16.960742Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1606: TOperation IsReadyToNotify, TxId: 100, ready parts: 1/1, is published: false 2025-05-29T15:23:16.960746Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 100 ready parts: 1/1 2025-05-29T15:23:16.960749Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:973: Operation and all the parts is done, operation id: 100:0 2025-05-29T15:23:16.960752Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5174: RemoveTx for txid 100:0 2025-05-29T15:23:16.960760Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2025-05-29T15:23:16.960764Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:982: Publication still in progress, tx: 100, publications: 2, subscribers: 1 2025-05-29T15:23:16.960767Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:989: Publication details: tx: 100, [OwnerId: 72057594046678944, LocalPathId: 1], 5 2025-05-29T15:23:16.960769Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:989: Publication details: tx: 100, [OwnerId: 72057594046678944, LocalPathId: 2], 3 2025-05-29T15:23:16.960901Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:5834: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 5 PathOwnerId: 72057594046678944, cookie: 100 2025-05-29T15:23:16.960910Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 5 PathOwnerId: 72057594046678944, cookie: 100 2025-05-29T15:23:16.960913Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 100 2025-05-29T15:23:16.960917Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 100, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 5 2025-05-29T15:23:16.960919Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-05-29T15:23:16.961128Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:5834: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046678944, cookie: 100 2025-05-29T15:23:16.961136Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046678944, cookie: 100 2025-05-29T15:23:16.961139Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 100 2025-05-29T15:23:16.961142Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 100, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 3 2025-05-29T15:23:16.961145Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2025-05-29T15:23:16.961151Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 100, subscribers: 1 2025-05-29T15:23:16.961157Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:212: TTxAckPublishToSchemeBoard Notify send TEvNotifyTxCompletionResult, at schemeshard: 72057594046678944, to actorId: [1:278:2268] 2025-05-29T15:23:16.961729Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 100 2025-05-29T15:23:16.961790Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 100 2025-05-29T15:23:16.961800Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:227: tests -- TTxNotificationSubscriber for txId 100: got EvNotifyTxCompletionResult 2025-05-29T15:23:16.961804Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:236: tests -- TTxNotificationSubscriber for txId 100: satisfy waiter [1:279:2269] TestWaitNotification: OK eventTxId 101 TestWaitNotification: OK eventTxId 100 2025-05-29T15:23:16.961877Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-05-29T15:23:16.961907Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/USER_0" took 33us result status StatusSuccess 2025-05-29T15:23:16.961993Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/USER_0" PathDescription { Self { Name: "USER_0" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 100 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 1 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 0 TimeCastBucketsPerMediator: 0 } DomainKey { SchemeShard: 72057594046678944 PathId: 2 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 2 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { } } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-CreateUser-8 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-CreateUser-9 |63.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_serverless_reboots/unittest ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::SimultaneousDeclareAndDefine [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:116:2058] recipient: [1:111:2141] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:116:2058] recipient: [1:111:2141] Leader for TabletID 72057594046678944 is [1:128:2152] sender: [1:132:2058] recipient: [1:111:2141] 2025-05-29T15:23:17.044742Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7488: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-05-29T15:23:17.044763Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7516: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:23:17.044767Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7402: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-05-29T15:23:17.044771Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7418: OperationsProcessing config: using default configuration 2025-05-29T15:23:17.044780Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-05-29T15:23:17.044783Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-05-29T15:23:17.044789Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7548: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:23:17.044798Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:39: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-05-29T15:23:17.044888Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7619: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-05-29T15:23:17.044936Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-05-29T15:23:17.054912Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7309: Cannot subscribe to console configs 2025-05-29T15:23:17.054935Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:23:17.057014Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-05-29T15:23:17.057097Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-05-29T15:23:17.057142Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-05-29T15:23:17.058581Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-05-29T15:23:17.058775Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:445: Clear TempDirsState with owners number: 0 2025-05-29T15:23:17.058879Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1348: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-05-29T15:23:17.058945Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:32: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-05-29T15:23:17.059330Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:157: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:23:17.059358Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:84: [RootDataErasureManager] Stop 2025-05-29T15:23:17.059543Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:23:17.059553Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:23:17.059573Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-05-29T15:23:17.059582Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-29T15:23:17.059588Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:202: TTxServerlessStorageBilling.Complete 2025-05-29T15:23:17.059617Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6671: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-05-29T15:23:17.060719Z node 1 :HIVE INFO: tablet_helpers.cpp:1130: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:128:2152] sender: [1:242:2058] recipient: [1:15:2062] 2025-05-29T15:23:17.074876Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:372: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-05-29T15:23:17.074935Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:23:17.074997Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-05-29T15:23:17.075047Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:130: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-05-29T15:23:17.075058Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:23:17.075690Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:451: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-05-29T15:23:17.075710Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-05-29T15:23:17.075750Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:23:17.075760Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:313: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-05-29T15:23:17.075766Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:367: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-05-29T15:23:17.075771Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 2 -> 3 2025-05-29T15:23:17.076252Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:23:17.076271Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-05-29T15:23:17.076277Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 3 -> 128 2025-05-29T15:23:17.076705Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:23:17.076720Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:23:17.076727Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:23:17.076734Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1650: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-05-29T15:23:17.077327Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1719: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-05-29T15:23:17.077721Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:652: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-05-29T15:23:17.077759Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1751: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-05-29T15:23:17.077937Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:676: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-05-29T15:23:17.077959Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:680: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 124 RawX2: 4294969445 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-05-29T15:23:17.077964Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:23:17.078008Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 128 -> 240 2025-05-29T15:23:17.078012Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:23:17.078036Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-05-29T15:23:17.078044Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:402: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-05-29T15:23:17.078420Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:23:17.078427Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-29T15:23:17.078459Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29 ... ationPlanStep Execute, stepId: 5000002, transactions count in step: 1, at schemeshard: 72057594046678944 2025-05-29T15:23:17.085667Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:680: TTxOperationPlanStep Execute, message: Transactions { TxId: 100 Coordinator: 72057594046316545 AckTo { RawX1: 124 RawX2: 4294969445 } } Step: 5000002 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-05-29T15:23:17.085673Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 100:0, at tablet# 72057594046678944 2025-05-29T15:23:17.085725Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 100:0 128 -> 240 2025-05-29T15:23:17.085731Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 100:0, at tablet# 72057594046678944 2025-05-29T15:23:17.085755Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-05-29T15:23:17.085762Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2025-05-29T15:23:17.085771Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:402: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 100 2025-05-29T15:23:17.086200Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:23:17.086207Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 100, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-29T15:23:17.086234Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 100, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2025-05-29T15:23:17.086248Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:23:17.086253Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:209:2210], at schemeshard: 72057594046678944, txId: 100, path id: 1 2025-05-29T15:23:17.086258Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:209:2210], at schemeshard: 72057594046678944, txId: 100, path id: 2 2025-05-29T15:23:17.086310Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 100:0, at schemeshard: 72057594046678944 2025-05-29T15:23:17.086317Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:482: [72057594046678944] TDone opId# 100:0 ProgressState 2025-05-29T15:23:17.086327Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:906: Part operation is done id#100:0 progress is 1/1 2025-05-29T15:23:17.086332Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 100 ready parts: 1/1 2025-05-29T15:23:17.086335Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:906: Part operation is done id#100:0 progress is 1/1 2025-05-29T15:23:17.086338Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 100 ready parts: 1/1 2025-05-29T15:23:17.086341Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1606: TOperation IsReadyToNotify, TxId: 100, ready parts: 1/1, is published: false 2025-05-29T15:23:17.086345Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 100 ready parts: 1/1 2025-05-29T15:23:17.086348Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:973: Operation and all the parts is done, operation id: 100:0 2025-05-29T15:23:17.086351Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5174: RemoveTx for txid 100:0 2025-05-29T15:23:17.086358Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2025-05-29T15:23:17.086363Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:982: Publication still in progress, tx: 100, publications: 2, subscribers: 0 2025-05-29T15:23:17.086365Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:989: Publication details: tx: 100, [OwnerId: 72057594046678944, LocalPathId: 1], 5 2025-05-29T15:23:17.086368Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:989: Publication details: tx: 100, [OwnerId: 72057594046678944, LocalPathId: 2], 3 2025-05-29T15:23:17.086439Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:5834: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 5 PathOwnerId: 72057594046678944, cookie: 100 2025-05-29T15:23:17.086446Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 5 PathOwnerId: 72057594046678944, cookie: 100 2025-05-29T15:23:17.086449Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 100 2025-05-29T15:23:17.086452Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 100, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 5 2025-05-29T15:23:17.086455Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-05-29T15:23:17.086540Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:5834: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046678944, cookie: 100 2025-05-29T15:23:17.086547Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046678944, cookie: 100 2025-05-29T15:23:17.086550Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 100 2025-05-29T15:23:17.086552Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 100, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 3 2025-05-29T15:23:17.086557Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2025-05-29T15:23:17.086566Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 100, subscribers: 0 2025-05-29T15:23:17.087372Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 100 2025-05-29T15:23:17.087440Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 100 TestModificationResult got TxId: 100, wait until txId: 100 TestModificationResults wait txId: 101 TestModificationResult got TxId: 101, wait until txId: 101 TestWaitNotification wait txId: 100 2025-05-29T15:23:17.087494Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:210: tests -- TTxNotificationSubscriber for txId 100: send EvNotifyTxCompletion 2025-05-29T15:23:17.087508Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:256: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 100 TestWaitNotification wait txId: 101 2025-05-29T15:23:17.087523Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:210: tests -- TTxNotificationSubscriber for txId 101: send EvNotifyTxCompletion 2025-05-29T15:23:17.087527Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:256: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 101 2025-05-29T15:23:17.087591Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 100, at schemeshard: 72057594046678944 2025-05-29T15:23:17.087609Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:227: tests -- TTxNotificationSubscriber for txId 100: got EvNotifyTxCompletionResult 2025-05-29T15:23:17.087614Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:236: tests -- TTxNotificationSubscriber for txId 100: satisfy waiter [1:316:2306] 2025-05-29T15:23:17.087659Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 101, at schemeshard: 72057594046678944 2025-05-29T15:23:17.087667Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:227: tests -- TTxNotificationSubscriber for txId 101: got EvNotifyTxCompletionResult 2025-05-29T15:23:17.087670Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:236: tests -- TTxNotificationSubscriber for txId 101: satisfy waiter [1:316:2306] TestWaitNotification: OK eventTxId 100 TestWaitNotification: OK eventTxId 101 2025-05-29T15:23:17.087716Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-05-29T15:23:17.087738Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/USER_0" took 28us result status StatusSuccess 2025-05-29T15:23:17.087825Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/USER_0" PathDescription { Self { Name: "USER_0" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 100 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 1 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 0 TimeCastBucketsPerMediator: 0 } DomainKey { SchemeShard: 72057594046678944 PathId: 2 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 2 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { } } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 |63.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_serverless_reboots/unittest >> TSchemeShardSubDomainTest::DiskSpaceUsage [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-ModifyUser-48 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-CreateUser-1 |63.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_serverless_reboots/unittest |63.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_serverless_reboots/unittest |63.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_serverless_reboots/unittest ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::DiskSpaceUsage [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:116:2058] recipient: [1:111:2141] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:116:2058] recipient: [1:111:2141] Leader for TabletID 72057594046678944 is [1:128:2152] sender: [1:132:2058] recipient: [1:111:2141] 2025-05-29T15:23:06.407897Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7488: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-05-29T15:23:06.407918Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7516: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:23:06.407922Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7402: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2025-05-29T15:23:06.407926Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7418: OperationsProcessing config: using default configuration 2025-05-29T15:23:06.407935Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-05-29T15:23:06.407938Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-05-29T15:23:06.407945Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7548: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:23:06.407955Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:39: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-05-29T15:23:06.408044Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7619: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-05-29T15:23:06.408096Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-05-29T15:23:06.417267Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7309: Cannot subscribe to console configs 2025-05-29T15:23:06.417285Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:23:06.419178Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-05-29T15:23:06.419262Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-05-29T15:23:06.419301Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-05-29T15:23:06.420784Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-05-29T15:23:06.420941Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:445: Clear TempDirsState with owners number: 0 2025-05-29T15:23:06.421033Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1348: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-05-29T15:23:06.421081Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:32: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-05-29T15:23:06.421592Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:157: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:23:06.421627Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:84: [RootDataErasureManager] Stop 2025-05-29T15:23:06.421834Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:23:06.421843Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:23:06.421864Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-05-29T15:23:06.421872Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-29T15:23:06.421878Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:202: TTxServerlessStorageBilling.Complete 2025-05-29T15:23:06.421903Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6671: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-05-29T15:23:06.423015Z node 1 :HIVE INFO: tablet_helpers.cpp:1130: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:128:2152] sender: [1:242:2058] recipient: [1:15:2062] 2025-05-29T15:23:06.438068Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:372: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-05-29T15:23:06.438138Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:23:06.438201Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-05-29T15:23:06.438242Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:130: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-05-29T15:23:06.438250Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:23:06.439147Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:451: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-05-29T15:23:06.439175Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-05-29T15:23:06.439219Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:23:06.439227Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:313: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-05-29T15:23:06.439231Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:367: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-05-29T15:23:06.439235Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 2 -> 3 2025-05-29T15:23:06.439657Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:23:06.439666Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-05-29T15:23:06.439671Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 3 -> 128 2025-05-29T15:23:06.439998Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:23:06.440012Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:23:06.440018Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:23:06.440025Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1650: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-05-29T15:23:06.440561Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1719: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-05-29T15:23:06.440909Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:652: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-05-29T15:23:06.440938Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1751: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-05-29T15:23:06.441082Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:676: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-05-29T15:23:06.441100Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:680: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 124 RawX2: 4294969445 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-05-29T15:23:06.441105Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:23:06.441150Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 128 -> 240 2025-05-29T15:23:06.441154Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:23:06.441178Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-05-29T15:23:06.441186Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:402: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-05-29T15:23:06.441597Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:23:06.441604Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-29T15:23:06.441636Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29T1 ... 885Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7402: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2025-05-29T15:23:17.419890Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7418: OperationsProcessing config: using default configuration 2025-05-29T15:23:17.419897Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-05-29T15:23:17.419901Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-05-29T15:23:17.419911Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7548: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:23:17.419924Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:39: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-05-29T15:23:17.420032Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7619: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-05-29T15:23:17.420101Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-05-29T15:23:17.421306Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-05-29T15:23:17.421654Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-05-29T15:23:17.421684Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-05-29T15:23:17.421699Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7309: Cannot subscribe to console configs 2025-05-29T15:23:17.421703Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:23:17.421749Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:445: Clear TempDirsState with owners number: 0 2025-05-29T15:23:17.421806Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit for Paths, read records: 3, at schemeshard: 72057594046678944 2025-05-29T15:23:17.421819Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:319: AttachChild: child attached as only one child to the parent, parent id: [OwnerId: 72057594046678944, LocalPathId: 1], parent name: MyRoot, child name: Table1, child id: [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2025-05-29T15:23:17.421823Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:319: AttachChild: child attached as only one child to the parent, parent id: [OwnerId: 72057594046678944, LocalPathId: 1], parent name: MyRoot, child name: Table2, child id: [OwnerId: 72057594046678944, LocalPathId: 3], at schemeshard: 72057594046678944 2025-05-29T15:23:17.421830Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1457: TTxInit for UserAttributes, read records: 0, at schemeshard: 72057594046678944 2025-05-29T15:23:17.421836Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1483: TTxInit for UserAttributesAlterData, read records: 0, at schemeshard: 72057594046678944 2025-05-29T15:23:17.421879Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1785: TTxInit for Tables, read records: 2, at schemeshard: 72057594046678944 2025-05-29T15:23:17.421903Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 0 2025-05-29T15:23:17.421909Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 0 2025-05-29T15:23:17.421916Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__root_data_erasure_manager.cpp:452: [RootDataErasureManager] Restore: Generation# 0, Status# 0, WakeupInterval# 604800 s, NumberDataErasureTenantsInRunning# 0 2025-05-29T15:23:17.421939Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:2033: TTxInit for Columns, read records: 4, at schemeshard: 72057594046678944 2025-05-29T15:23:17.421954Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:2093: TTxInit for ColumnsAlters, read records: 0, at schemeshard: 72057594046678944 2025-05-29T15:23:17.421964Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:2151: TTxInit for Shards, read records: 3, at schemeshard: 72057594046678944 2025-05-29T15:23:17.421967Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 2025-05-29T15:23:17.421970Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 1 2025-05-29T15:23:17.421976Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 2 2025-05-29T15:23:17.421988Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:2237: TTxInit for TablePartitions, read records: 3, at schemeshard: 72057594046678944 2025-05-29T15:23:17.422011Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:2303: TTxInit for TableShardPartitionConfigs, read records: 0, at schemeshard: 72057594046678944 2025-05-29T15:23:17.422041Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:2453: TTxInit for ChannelsBinding, read records: 9, at schemeshard: 72057594046678944 2025-05-29T15:23:17.422069Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:2832: TTxInit for TableIndexes, read records: 0, at schemeshard: 72057594046678944 2025-05-29T15:23:17.422077Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:2911: TTxInit for TableIndexKeys, read records: 0, at schemeshard: 72057594046678944 2025-05-29T15:23:17.422104Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:3409: TTxInit for KesusInfos, read records: 0, at schemeshard: 72057594046678944 2025-05-29T15:23:17.422109Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:3445: TTxInit for KesusAlters, read records: 0, at schemeshard: 72057594046678944 2025-05-29T15:23:17.422124Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:3659: TTxInit for TxShards, read records: 0, at schemeshard: 72057594046678944 2025-05-29T15:23:17.422130Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:3804: TTxInit for ShardToDelete, read records: 0, at schemeshard: 72057594046678944 2025-05-29T15:23:17.422137Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:3821: TTxInit for BackupSettings, read records: 0, at schemeshard: 72057594046678944 2025-05-29T15:23:17.422151Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:3981: TTxInit for ShardBackupStatus, read records: 0, at schemeshard: 72057594046678944 2025-05-29T15:23:17.422157Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:3997: TTxInit for CompletedBackup, read records: 0, at schemeshard: 72057594046678944 2025-05-29T15:23:17.422176Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4282: TTxInit for Publications, read records: 0, at schemeshard: 72057594046678944 2025-05-29T15:23:17.422789Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4587: IndexBuild , records: 0, at schemeshard: 72057594046678944 2025-05-29T15:23:17.422812Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4645: KMeansTreeSample records: 0, at schemeshard: 72057594046678944 2025-05-29T15:23:17.422830Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4740: SnapshotTables: snapshots: 0 tables: 0, at schemeshard: 72057594046678944 2025-05-29T15:23:17.422836Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4767: SnapshotSteps: snapshots: 0, at schemeshard: 72057594046678944 2025-05-29T15:23:17.422844Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4794: LongLocks: records: 0, at schemeshard: 72057594046678944 2025-05-29T15:23:17.425727Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:84: [RootDataErasureManager] Stop 2025-05-29T15:23:17.426470Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:23:17.426484Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:23:17.426588Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-05-29T15:23:17.426597Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-29T15:23:17.426604Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:202: TTxServerlessStorageBilling.Complete 2025-05-29T15:23:17.427204Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6671: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594046678944 is [1:759:2674] sender: [1:815:2058] recipient: [1:15:2062] 2025-05-29T15:23:17.462876Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-05-29T15:23:17.462950Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot" took 79us result status StatusSuccess 2025-05-29T15:23:17.463064Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot" PathDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 7 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 7 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 5 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: "Table1" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" ChildrenExist: false } Children { Name: "Table2" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 102 CreateStep: 5000003 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 2 PathsLimit: 10000 ShardsInside: 3 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 1752 DataSize: 1752 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/MyRoot" } } } PathId: 1 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-CreateUser-14 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-ModifyUser-25 >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-CreateUser-7 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-CreateUser-8 >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-ModifyUser-31 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-ModifyUser-32 >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-ModifyUser-30 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-DropUser-49 >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-ModifyUser-36 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-ModifyUser-37 |63.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_serverless_reboots/unittest >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-CreateUser-8 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-CreateUser-9 >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-ModifyUser-30 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-ModifyUser-31 >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-ModifyUser-42 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-ModifyUser-43 |63.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_serverless_reboots/unittest >> TSchemeShardServerLessReboots::TestServerlessComputeResourcesModeWithReboots |63.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_serverless_reboots/unittest >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-CreateUser-1 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-CreateUser-2 >> TSchemeShardSubDomainTest::CreateDropSolomon >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-CreateUser-9 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-DropUser-49 >> TSchemeShardSubDomainTest::DeclareAndDelete >> TSchemeShardSubDomainTest::CreateDropNbs >> TSchemeShardSubDomainTest::SimultaneousDefineAndCreateTable >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-ModifyUser-25 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-ModifyUser-26 >> TSchemeShardSubDomainTest::DeclareAndDelete [GOOD] >> TSchemeShardSubDomainTest::CreateDropNbs [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-CreateUser-8 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-CreateUser-9 >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-ModifyUser-32 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-ModifyUser-33 >> TSchemeShardSubDomainTest::CreateItemsInsideSubdomain >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-DropUser-49 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-DropUser-50 >> TSchemeShardSubDomainTest::CreateDropSolomon [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-ModifyUser-37 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-ModifyUser-38 >> TStoragePoolsQuotasTest::DifferentQuotasInteraction-IsExternalSubdomain >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-CreateUser-9 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-CreateUser-15 >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-ModifyUser-31 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-ModifyUser-32 >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-ModifyUser-43 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-ModifyUser-44 >> TSchemeShardSubDomainTest::SimultaneousDefineAndCreateTable [GOOD] >> TSchemeShardSubDomainTest::ForceDropTwice ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::DeclareAndDelete [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:116:2058] recipient: [1:111:2141] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:116:2058] recipient: [1:111:2141] Leader for TabletID 72057594046678944 is [1:128:2152] sender: [1:132:2058] recipient: [1:111:2141] 2025-05-29T15:23:18.870519Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7488: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-05-29T15:23:18.870547Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7516: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:23:18.870553Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7402: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-05-29T15:23:18.870558Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7418: OperationsProcessing config: using default configuration 2025-05-29T15:23:18.870570Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-05-29T15:23:18.870575Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-05-29T15:23:18.870584Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7548: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:23:18.870597Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:39: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-05-29T15:23:18.870714Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7619: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-05-29T15:23:18.870800Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-05-29T15:23:18.884293Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7309: Cannot subscribe to console configs 2025-05-29T15:23:18.884313Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:23:18.886564Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-05-29T15:23:18.886652Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-05-29T15:23:18.886700Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-05-29T15:23:18.888479Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-05-29T15:23:18.888632Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:445: Clear TempDirsState with owners number: 0 2025-05-29T15:23:18.888717Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1348: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-05-29T15:23:18.888771Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:32: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-05-29T15:23:18.889153Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:157: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:23:18.889187Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:84: [RootDataErasureManager] Stop 2025-05-29T15:23:18.889437Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:23:18.889449Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:23:18.889471Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-05-29T15:23:18.889478Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-29T15:23:18.889483Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:202: TTxServerlessStorageBilling.Complete 2025-05-29T15:23:18.889510Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6671: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-05-29T15:23:18.890651Z node 1 :HIVE INFO: tablet_helpers.cpp:1130: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:128:2152] sender: [1:242:2058] recipient: [1:15:2062] 2025-05-29T15:23:18.905142Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:372: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-05-29T15:23:18.905196Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:23:18.905244Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-05-29T15:23:18.905285Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:130: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-05-29T15:23:18.905296Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:23:18.905852Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:451: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-05-29T15:23:18.905875Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-05-29T15:23:18.905903Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:23:18.905910Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:313: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-05-29T15:23:18.905913Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:367: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-05-29T15:23:18.905917Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 2 -> 3 2025-05-29T15:23:18.906225Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:23:18.906232Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-05-29T15:23:18.906236Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 3 -> 128 2025-05-29T15:23:18.906522Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:23:18.906536Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:23:18.906541Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:23:18.906547Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1650: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-05-29T15:23:18.907051Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1719: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-05-29T15:23:18.907404Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:652: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-05-29T15:23:18.907433Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1751: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-05-29T15:23:18.907545Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:676: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-05-29T15:23:18.907563Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:680: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 124 RawX2: 4294969445 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-05-29T15:23:18.907568Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:23:18.907604Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 128 -> 240 2025-05-29T15:23:18.907608Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:23:18.907628Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-05-29T15:23:18.907636Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:402: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-05-29T15:23:18.908000Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:23:18.908006Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-29T15:23:18.908032Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29 ... h id [OwnerId: 72057594046678944, LocalPathId: 2] name: USER_0 type: EPathTypeSubDomain state: EPathStateDrop stepDropped: 0 droppedTxId: 101 parent: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-29T15:23:18.916957Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5234: ExamineTreeVFS run path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2025-05-29T15:23:18.916972Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 101:0 128 -> 130 2025-05-29T15:23:18.916984Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-05-29T15:23:18.916989Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2025-05-29T15:23:18.917104Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2025-05-29T15:23:18.917281Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 FAKE_COORDINATOR: Erasing txId 101 2025-05-29T15:23:18.917455Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:23:18.917459Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 101, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-29T15:23:18.917474Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 101, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2025-05-29T15:23:18.917491Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:23:18.917494Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:209:2210], at schemeshard: 72057594046678944, txId: 101, path id: 1 2025-05-29T15:23:18.917497Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:209:2210], at schemeshard: 72057594046678944, txId: 101, path id: 2 2025-05-29T15:23:18.917503Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 101:0, at schemeshard: 72057594046678944 2025-05-29T15:23:18.917507Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:416: [72057594046678944] TDeleteParts opId# 101:0 ProgressState 2025-05-29T15:23:18.917511Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:906: Part operation is done id#101:0 progress is 1/1 2025-05-29T15:23:18.917514Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2025-05-29T15:23:18.917516Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:906: Part operation is done id#101:0 progress is 1/1 2025-05-29T15:23:18.917519Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2025-05-29T15:23:18.917521Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1606: TOperation IsReadyToNotify, TxId: 101, ready parts: 1/1, is published: false 2025-05-29T15:23:18.917524Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2025-05-29T15:23:18.917526Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:973: Operation and all the parts is done, operation id: 101:0 2025-05-29T15:23:18.917542Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5174: RemoveTx for txid 101:0 2025-05-29T15:23:18.917550Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2025-05-29T15:23:18.917553Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:982: Publication still in progress, tx: 101, publications: 2, subscribers: 0 2025-05-29T15:23:18.917556Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:989: Publication details: tx: 101, [OwnerId: 72057594046678944, LocalPathId: 1], 7 2025-05-29T15:23:18.917558Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:989: Publication details: tx: 101, [OwnerId: 72057594046678944, LocalPathId: 2], 18446744073709551615 2025-05-29T15:23:18.917653Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:5834: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 7 PathOwnerId: 72057594046678944, cookie: 101 2025-05-29T15:23:18.917661Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 7 PathOwnerId: 72057594046678944, cookie: 101 2025-05-29T15:23:18.917664Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 101 2025-05-29T15:23:18.917667Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 7 2025-05-29T15:23:18.917671Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-05-29T15:23:18.917731Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:5834: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 101 2025-05-29T15:23:18.917738Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 101 2025-05-29T15:23:18.917740Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 101 2025-05-29T15:23:18.917743Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 18446744073709551615 2025-05-29T15:23:18.917745Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2025-05-29T15:23:18.917752Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 101, subscribers: 0 2025-05-29T15:23:18.917764Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:205: TTxCleanDroppedSubDomains Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-05-29T15:23:18.917768Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:224: TTxCleanDroppedPaths: PersistRemoveSubDomain for PathId# [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2025-05-29T15:23:18.917777Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 2025-05-29T15:23:18.917813Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:123: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-05-29T15:23:18.917817Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:137: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2025-05-29T15:23:18.917822Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-05-29T15:23:18.918172Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2025-05-29T15:23:18.918424Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2025-05-29T15:23:18.918443Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:245: TTxCleanDroppedSubDomains Complete, done PersistRemoveSubDomain for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 2025-05-29T15:23:18.918451Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:160: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestModificationResult got TxId: 101, wait until txId: 101 TestWaitNotification wait txId: 101 2025-05-29T15:23:18.918476Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:210: tests -- TTxNotificationSubscriber for txId 101: send EvNotifyTxCompletion 2025-05-29T15:23:18.918481Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:256: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 101 2025-05-29T15:23:18.918518Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 101, at schemeshard: 72057594046678944 2025-05-29T15:23:18.918527Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:227: tests -- TTxNotificationSubscriber for txId 101: got EvNotifyTxCompletionResult 2025-05-29T15:23:18.918530Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:236: tests -- TTxNotificationSubscriber for txId 101: satisfy waiter [1:341:2331] TestWaitNotification: OK eventTxId 101 2025-05-29T15:23:18.918570Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-05-29T15:23:18.918586Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/USER_0" took 21us result status StatusPathDoesNotExist 2025-05-29T15:23:18.918608Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/USER_0\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1]), source_location: ydb/core/tx/schemeshard/schemeshard_path_describer.cpp:1157" Path: "/MyRoot/USER_0" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: false } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::CreateDropNbs [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:116:2058] recipient: [1:111:2141] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:116:2058] recipient: [1:111:2141] Leader for TabletID 72057594046678944 is [1:128:2152] sender: [1:132:2058] recipient: [1:111:2141] 2025-05-29T15:23:18.865798Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7488: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-05-29T15:23:18.865822Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7516: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:23:18.865828Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7402: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-05-29T15:23:18.865834Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7418: OperationsProcessing config: using default configuration 2025-05-29T15:23:18.865846Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-05-29T15:23:18.865850Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-05-29T15:23:18.865858Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7548: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:23:18.865868Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:39: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-05-29T15:23:18.865957Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7619: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-05-29T15:23:18.866010Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-05-29T15:23:18.875501Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7309: Cannot subscribe to console configs 2025-05-29T15:23:18.875518Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:23:18.877487Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-05-29T15:23:18.877581Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-05-29T15:23:18.877621Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-05-29T15:23:18.878752Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-05-29T15:23:18.878937Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:445: Clear TempDirsState with owners number: 0 2025-05-29T15:23:18.879057Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1348: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-05-29T15:23:18.879130Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:32: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-05-29T15:23:18.879586Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:157: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:23:18.879624Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:84: [RootDataErasureManager] Stop 2025-05-29T15:23:18.879892Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:23:18.879904Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:23:18.879927Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-05-29T15:23:18.879935Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-29T15:23:18.879941Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:202: TTxServerlessStorageBilling.Complete 2025-05-29T15:23:18.879976Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6671: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-05-29T15:23:18.881172Z node 1 :HIVE INFO: tablet_helpers.cpp:1130: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:128:2152] sender: [1:242:2058] recipient: [1:15:2062] 2025-05-29T15:23:18.894668Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:372: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-05-29T15:23:18.894732Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:23:18.894797Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-05-29T15:23:18.894833Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:130: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-05-29T15:23:18.894841Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:23:18.895341Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:451: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-05-29T15:23:18.895361Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-05-29T15:23:18.895391Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:23:18.895398Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:313: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-05-29T15:23:18.895402Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:367: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-05-29T15:23:18.895406Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 2 -> 3 2025-05-29T15:23:18.895757Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:23:18.895772Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-05-29T15:23:18.895780Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 3 -> 128 2025-05-29T15:23:18.896104Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:23:18.896114Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:23:18.896119Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:23:18.896124Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1650: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-05-29T15:23:18.896552Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1719: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-05-29T15:23:18.896827Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:652: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-05-29T15:23:18.896854Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1751: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-05-29T15:23:18.896971Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:676: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-05-29T15:23:18.896987Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:680: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 124 RawX2: 4294969445 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-05-29T15:23:18.896992Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:23:18.897028Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 128 -> 240 2025-05-29T15:23:18.897032Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:23:18.897053Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-05-29T15:23:18.897061Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:402: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-05-29T15:23:18.897354Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:23:18.897359Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-29T15:23:18.897390Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29 ... 18.966160Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 102, subscribers: 0 2025-05-29T15:23:18.966359Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:46: Free shard 72057594046678944:1 hive 72057594037968897 at ss 72057594046678944 2025-05-29T15:23:18.966366Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:46: Free shard 72057594046678944:3 hive 72057594037968897 at ss 72057594046678944 2025-05-29T15:23:18.966368Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:46: Free shard 72057594046678944:2 hive 72057594037968897 at ss 72057594046678944 2025-05-29T15:23:18.966380Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:46: Free shard 72057594046678944:4 hive 72057594037968897 at ss 72057594046678944 2025-05-29T15:23:18.966405Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2025-05-29T15:23:18.966600Z node 1 :HIVE INFO: tablet_helpers.cpp:1356: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 1 TxId_Deprecated: 1 TabletID: 72075186233409546 2025-05-29T15:23:18.966637Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5938: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 1 ShardOwnerId: 72057594046678944 ShardLocalIdx: 1, at schemeshard: 72057594046678944 2025-05-29T15:23:18.966673Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 Forgetting tablet 72075186233409546 2025-05-29T15:23:18.966928Z node 1 :HIVE INFO: tablet_helpers.cpp:1356: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 3 TxId_Deprecated: 3 TabletID: 72075186233409548 2025-05-29T15:23:18.966951Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5938: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 3 ShardOwnerId: 72057594046678944 ShardLocalIdx: 3, at schemeshard: 72057594046678944 2025-05-29T15:23:18.966974Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 2 2025-05-29T15:23:18.967023Z node 1 :HIVE INFO: tablet_helpers.cpp:1356: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 2 TxId_Deprecated: 2 TabletID: 72075186233409547 2025-05-29T15:23:18.967043Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5938: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 2 ShardOwnerId: 72057594046678944 ShardLocalIdx: 2, at schemeshard: 72057594046678944 2025-05-29T15:23:18.967056Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 Forgetting tablet 72075186233409548 2025-05-29T15:23:18.967235Z node 1 :HIVE INFO: tablet_helpers.cpp:1356: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 4 TxId_Deprecated: 4 TabletID: 72075186233409549 2025-05-29T15:23:18.967263Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5938: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 4 ShardOwnerId: 72057594046678944 ShardLocalIdx: 4, at schemeshard: 72057594046678944 2025-05-29T15:23:18.967277Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 1 Forgetting tablet 72075186233409547 Forgetting tablet 72075186233409549 2025-05-29T15:23:18.967645Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2025-05-29T15:23:18.967705Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2025-05-29T15:23:18.967737Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:123: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-05-29T15:23:18.967747Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:137: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 3], at schemeshard: 72057594046678944 2025-05-29T15:23:18.967761Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2025-05-29T15:23:18.967847Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:205: TTxCleanDroppedSubDomains Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-05-29T15:23:18.967853Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:224: TTxCleanDroppedPaths: PersistRemoveSubDomain for PathId# [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2025-05-29T15:23:18.967877Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 2025-05-29T15:23:18.967921Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:174: Deleted shardIdx 72057594046678944:1 2025-05-29T15:23:18.967929Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:180: Close pipe to deleted shardIdx 72057594046678944:1 tabletId 72075186233409546 2025-05-29T15:23:18.968337Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:174: Deleted shardIdx 72057594046678944:3 2025-05-29T15:23:18.968348Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:180: Close pipe to deleted shardIdx 72057594046678944:3 tabletId 72075186233409548 2025-05-29T15:23:18.968362Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:174: Deleted shardIdx 72057594046678944:2 2025-05-29T15:23:18.968367Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:180: Close pipe to deleted shardIdx 72057594046678944:2 tabletId 72075186233409547 2025-05-29T15:23:18.968737Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:174: Deleted shardIdx 72057594046678944:4 2025-05-29T15:23:18.968747Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:180: Close pipe to deleted shardIdx 72057594046678944:4 tabletId 72075186233409549 2025-05-29T15:23:18.968784Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:160: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 1 candidates, at schemeshard: 72057594046678944 2025-05-29T15:23:18.968799Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:245: TTxCleanDroppedSubDomains Complete, done PersistRemoveSubDomain for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 2025-05-29T15:23:18.968809Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:123: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-05-29T15:23:18.968815Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:137: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2025-05-29T15:23:18.968827Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-05-29T15:23:18.969126Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:160: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestModificationResult got TxId: 102, wait until txId: 102 TestWaitNotification wait txId: 102 2025-05-29T15:23:18.969177Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:210: tests -- TTxNotificationSubscriber for txId 102: send EvNotifyTxCompletion 2025-05-29T15:23:18.969185Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:256: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 102 2025-05-29T15:23:18.969249Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 102, at schemeshard: 72057594046678944 2025-05-29T15:23:18.969264Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:227: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2025-05-29T15:23:18.969269Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:236: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [1:542:2495] TestWaitNotification: OK eventTxId 102 2025-05-29T15:23:18.970905Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0/BSVolume" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-05-29T15:23:18.970939Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/USER_0/BSVolume" took 47us result status StatusPathDoesNotExist 2025-05-29T15:23:18.970978Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/USER_0/BSVolume\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1]), source_location: ydb/core/tx/schemeshard/schemeshard_path_describer.cpp:1157" Path: "/MyRoot/USER_0/BSVolume" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: false } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 2025-05-29T15:23:18.971051Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-05-29T15:23:18.971064Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/USER_0" took 16us result status StatusPathDoesNotExist 2025-05-29T15:23:18.971081Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/USER_0\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1]), source_location: ydb/core/tx/schemeshard/schemeshard_path_describer.cpp:1157" Path: "/MyRoot/USER_0" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: false } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::CreateDropSolomon [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:116:2058] recipient: [1:111:2141] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:116:2058] recipient: [1:111:2141] Leader for TabletID 72057594046678944 is [1:128:2152] sender: [1:132:2058] recipient: [1:111:2141] 2025-05-29T15:23:18.741622Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7488: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-05-29T15:23:18.741647Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7516: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:23:18.741652Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7402: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-05-29T15:23:18.741655Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7418: OperationsProcessing config: using default configuration 2025-05-29T15:23:18.741665Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-05-29T15:23:18.741668Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-05-29T15:23:18.741674Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7548: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:23:18.741686Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:39: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-05-29T15:23:18.741785Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7619: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-05-29T15:23:18.741848Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-05-29T15:23:18.752737Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7309: Cannot subscribe to console configs 2025-05-29T15:23:18.752766Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:23:18.755755Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-05-29T15:23:18.755895Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-05-29T15:23:18.755951Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-05-29T15:23:18.757728Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-05-29T15:23:18.757899Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:445: Clear TempDirsState with owners number: 0 2025-05-29T15:23:18.758010Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1348: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-05-29T15:23:18.758074Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:32: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-05-29T15:23:18.758541Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:157: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:23:18.758576Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:84: [RootDataErasureManager] Stop 2025-05-29T15:23:18.758868Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:23:18.758882Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:23:18.758904Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-05-29T15:23:18.758913Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-29T15:23:18.758920Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:202: TTxServerlessStorageBilling.Complete 2025-05-29T15:23:18.758959Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6671: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-05-29T15:23:18.760435Z node 1 :HIVE INFO: tablet_helpers.cpp:1130: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:128:2152] sender: [1:242:2058] recipient: [1:15:2062] 2025-05-29T15:23:18.778385Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:372: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-05-29T15:23:18.778454Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:23:18.778513Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-05-29T15:23:18.778552Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:130: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-05-29T15:23:18.778562Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:23:18.779324Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:451: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-05-29T15:23:18.779347Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-05-29T15:23:18.779384Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:23:18.779391Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:313: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-05-29T15:23:18.779394Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:367: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-05-29T15:23:18.779399Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 2 -> 3 2025-05-29T15:23:18.779765Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:23:18.779774Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-05-29T15:23:18.779777Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 3 -> 128 2025-05-29T15:23:18.780062Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:23:18.780071Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:23:18.780076Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:23:18.780081Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1650: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-05-29T15:23:18.780532Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1719: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-05-29T15:23:18.780844Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:652: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-05-29T15:23:18.780872Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1751: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-05-29T15:23:18.781017Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:676: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-05-29T15:23:18.781035Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:680: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 124 RawX2: 4294969445 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-05-29T15:23:18.781040Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:23:18.781080Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 128 -> 240 2025-05-29T15:23:18.781084Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:23:18.781108Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-05-29T15:23:18.781116Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:402: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-05-29T15:23:18.781480Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:23:18.781486Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-29T15:23:18.781516Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29 ... :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:973: Operation and all the parts is done, operation id: 104:0 2025-05-29T15:23:19.044097Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5174: RemoveTx for txid 104:0 2025-05-29T15:23:19.044121Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 5 2025-05-29T15:23:19.044126Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:982: Publication still in progress, tx: 104, publications: 2, subscribers: 0 2025-05-29T15:23:19.044128Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:989: Publication details: tx: 104, [OwnerId: 72057594046678944, LocalPathId: 1], 7 2025-05-29T15:23:19.044131Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:989: Publication details: tx: 104, [OwnerId: 72057594046678944, LocalPathId: 2], 18446744073709551615 2025-05-29T15:23:19.044212Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:5834: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 7 PathOwnerId: 72057594046678944, cookie: 104 2025-05-29T15:23:19.044223Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 7 PathOwnerId: 72057594046678944, cookie: 104 2025-05-29T15:23:19.044228Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 104 2025-05-29T15:23:19.044232Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 104, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 7 2025-05-29T15:23:19.044236Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-05-29T15:23:19.044349Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:5834: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 104 2025-05-29T15:23:19.044358Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 104 2025-05-29T15:23:19.044360Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 104 2025-05-29T15:23:19.044363Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 104, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 18446744073709551615 2025-05-29T15:23:19.044366Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 2025-05-29T15:23:19.044372Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 104, subscribers: 0 2025-05-29T15:23:19.044620Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:46: Free shard 72057594046678944:1 hive 72057594037968897 at ss 72057594046678944 2025-05-29T15:23:19.044628Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:46: Free shard 72057594046678944:2 hive 72057594037968897 at ss 72057594046678944 2025-05-29T15:23:19.044872Z node 1 :HIVE INFO: tablet_helpers.cpp:1356: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 1 TxId_Deprecated: 1 TabletID: 72075186233409546 2025-05-29T15:23:19.045104Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5938: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 1 ShardOwnerId: 72057594046678944 ShardLocalIdx: 1, at schemeshard: 72057594046678944 2025-05-29T15:23:19.045153Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2025-05-29T15:23:19.045188Z node 1 :HIVE INFO: tablet_helpers.cpp:1356: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 2 TxId_Deprecated: 2 TabletID: 72075186233409547 Forgetting tablet 72075186233409546 Forgetting tablet 72075186233409547 2025-05-29T15:23:19.045400Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5938: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 2 ShardOwnerId: 72057594046678944 ShardLocalIdx: 2, at schemeshard: 72057594046678944 2025-05-29T15:23:19.045439Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2025-05-29T15:23:19.045513Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 104 2025-05-29T15:23:19.045563Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 104 2025-05-29T15:23:19.045611Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:205: TTxCleanDroppedSubDomains Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-05-29T15:23:19.045616Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:224: TTxCleanDroppedPaths: PersistRemoveSubDomain for PathId# [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2025-05-29T15:23:19.045632Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 2025-05-29T15:23:19.045703Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:123: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-05-29T15:23:19.045707Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:137: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2025-05-29T15:23:19.045714Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-05-29T15:23:19.046014Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:174: Deleted shardIdx 72057594046678944:1 2025-05-29T15:23:19.046023Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:180: Close pipe to deleted shardIdx 72057594046678944:1 tabletId 72075186233409546 2025-05-29T15:23:19.046310Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:174: Deleted shardIdx 72057594046678944:2 2025-05-29T15:23:19.046319Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:180: Close pipe to deleted shardIdx 72057594046678944:2 tabletId 72075186233409547 2025-05-29T15:23:19.046343Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:245: TTxCleanDroppedSubDomains Complete, done PersistRemoveSubDomain for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 2025-05-29T15:23:19.046569Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:160: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestModificationResult got TxId: 104, wait until txId: 104 TestWaitNotification wait txId: 104 2025-05-29T15:23:19.046647Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:210: tests -- TTxNotificationSubscriber for txId 104: send EvNotifyTxCompletion 2025-05-29T15:23:19.046655Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:256: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 104 2025-05-29T15:23:19.046755Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 104, at schemeshard: 72057594046678944 2025-05-29T15:23:19.046773Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:227: tests -- TTxNotificationSubscriber for txId 104: got EvNotifyTxCompletionResult 2025-05-29T15:23:19.046779Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:236: tests -- TTxNotificationSubscriber for txId 104: satisfy waiter [1:2100:3702] TestWaitNotification: OK eventTxId 104 2025-05-29T15:23:19.047998Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0/Solomon" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-05-29T15:23:19.048038Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/USER_0/Solomon" took 55us result status StatusPathDoesNotExist 2025-05-29T15:23:19.048086Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/USER_0/Solomon\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1]), source_location: ydb/core/tx/schemeshard/schemeshard_path_describer.cpp:1157" Path: "/MyRoot/USER_0/Solomon" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: false } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 2025-05-29T15:23:19.048176Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-05-29T15:23:19.048189Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/USER_0" took 15us result status StatusPathDoesNotExist 2025-05-29T15:23:19.048203Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/USER_0\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1]), source_location: ydb/core/tx/schemeshard/schemeshard_path_describer.cpp:1157" Path: "/MyRoot/USER_0" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: false } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-CreateUser-2 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-CreateUser-3 >> TSchemeShardSubDomainTest::CreateItemsInsideSubdomain [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-DropUser-49 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-DropUser-50 >> TSchemeShardSubDomainTest::ForceDropTwice [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::SimultaneousDefineAndCreateTable [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:116:2058] recipient: [1:111:2141] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:116:2058] recipient: [1:111:2141] Leader for TabletID 72057594046678944 is [1:128:2152] sender: [1:132:2058] recipient: [1:111:2141] 2025-05-29T15:23:19.126177Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7488: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-05-29T15:23:19.126202Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7516: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:23:19.126206Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7402: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-05-29T15:23:19.126210Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7418: OperationsProcessing config: using default configuration 2025-05-29T15:23:19.126219Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-05-29T15:23:19.126222Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-05-29T15:23:19.126228Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7548: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:23:19.126238Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:39: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-05-29T15:23:19.126346Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7619: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-05-29T15:23:19.126428Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-05-29T15:23:19.138587Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7309: Cannot subscribe to console configs 2025-05-29T15:23:19.138615Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:23:19.140820Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-05-29T15:23:19.140914Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-05-29T15:23:19.140957Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-05-29T15:23:19.142318Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-05-29T15:23:19.142492Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:445: Clear TempDirsState with owners number: 0 2025-05-29T15:23:19.142599Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1348: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-05-29T15:23:19.142673Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:32: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-05-29T15:23:19.143164Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:157: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:23:19.143204Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:84: [RootDataErasureManager] Stop 2025-05-29T15:23:19.143456Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:23:19.143467Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:23:19.143488Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-05-29T15:23:19.143497Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-29T15:23:19.143503Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:202: TTxServerlessStorageBilling.Complete 2025-05-29T15:23:19.143539Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6671: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-05-29T15:23:19.144679Z node 1 :HIVE INFO: tablet_helpers.cpp:1130: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:128:2152] sender: [1:242:2058] recipient: [1:15:2062] 2025-05-29T15:23:19.164673Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:372: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-05-29T15:23:19.164757Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:23:19.164841Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-05-29T15:23:19.164887Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:130: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-05-29T15:23:19.164897Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:23:19.165714Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:451: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-05-29T15:23:19.165742Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-05-29T15:23:19.165809Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:23:19.165821Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:313: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-05-29T15:23:19.165827Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:367: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-05-29T15:23:19.165832Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 2 -> 3 2025-05-29T15:23:19.166282Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:23:19.166294Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-05-29T15:23:19.166300Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 3 -> 128 2025-05-29T15:23:19.166660Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:23:19.166673Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:23:19.166679Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:23:19.166687Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1650: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-05-29T15:23:19.167260Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1719: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-05-29T15:23:19.167617Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:652: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-05-29T15:23:19.167654Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1751: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-05-29T15:23:19.167826Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:676: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-05-29T15:23:19.167854Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:680: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 124 RawX2: 4294969445 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-05-29T15:23:19.167862Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:23:19.167928Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 128 -> 240 2025-05-29T15:23:19.167935Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:23:19.167972Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-05-29T15:23:19.167986Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:402: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-05-29T15:23:19.168441Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:23:19.168453Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-29T15:23:19.168506Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29 ... n.cpp:1005: NTableState::TProposedWaitParts operationId# 102:0 HandleReply TEvSchemaChanged at tablet: 72057594046678944 2025-05-29T15:23:19.269176Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:1009: NTableState::TProposedWaitParts operationId# 102:0 HandleReply TEvSchemaChanged at tablet: 72057594046678944 message: Source { RawX1: 509 RawX2: 4294969756 } Origin: 72075186233409549 State: 2 TxId: 102 Step: 0 Generation: 2 2025-05-29T15:23:19.269189Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:655: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 102:0, shardIdx: 72057594046678944:4, datashard: 72075186233409549, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2025-05-29T15:23:19.269194Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:674: all shard schema changes has been received, operationId: 102:0, at schemeshard: 72057594046678944 2025-05-29T15:23:19.269199Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:686: send schema changes ack message, operation: 102:0, datashard: 72075186233409549, at schemeshard: 72057594046678944 2025-05-29T15:23:19.269206Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 102:0 129 -> 240 2025-05-29T15:23:19.269394Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:5834: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 6 PathOwnerId: 72057594046678944, cookie: 102 2025-05-29T15:23:19.269409Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 6 PathOwnerId: 72057594046678944, cookie: 102 2025-05-29T15:23:19.269414Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72057594046678944, txId: 102 2025-05-29T15:23:19.269420Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 6 2025-05-29T15:23:19.269426Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 6 2025-05-29T15:23:19.269493Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:5834: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 3 PathOwnerId: 72057594046678944, cookie: 102 2025-05-29T15:23:19.269504Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 3 PathOwnerId: 72057594046678944, cookie: 102 2025-05-29T15:23:19.269509Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72057594046678944, txId: 102 2025-05-29T15:23:19.269513Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 3 2025-05-29T15:23:19.269517Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 4 2025-05-29T15:23:19.269526Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1606: TOperation IsReadyToNotify, TxId: 102, ready parts: 0/1, is published: true 2025-05-29T15:23:19.270592Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:647: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2025-05-29T15:23:19.270620Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:647: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2025-05-29T15:23:19.270670Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2025-05-29T15:23:19.270682Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2025-05-29T15:23:19.270695Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2025-05-29T15:23:19.270703Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:482: [72057594046678944] TDone opId# 102:0 ProgressState 2025-05-29T15:23:19.270718Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:906: Part operation is done id#102:0 progress is 1/1 2025-05-29T15:23:19.270723Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-05-29T15:23:19.270729Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:906: Part operation is done id#102:0 progress is 1/1 2025-05-29T15:23:19.270733Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-05-29T15:23:19.270758Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1606: TOperation IsReadyToNotify, TxId: 102, ready parts: 1/1, is published: true 2025-05-29T15:23:19.270772Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1629: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:309:2299] message: TxId: 102 2025-05-29T15:23:19.270780Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-05-29T15:23:19.270785Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:973: Operation and all the parts is done, operation id: 102:0 2025-05-29T15:23:19.270789Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5174: RemoveTx for txid 102:0 2025-05-29T15:23:19.270814Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 3 2025-05-29T15:23:19.271302Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:227: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2025-05-29T15:23:19.271315Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:236: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [1:535:2478] TestWaitNotification: OK eventTxId 102 2025-05-29T15:23:19.271423Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-05-29T15:23:19.271458Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/USER_0" took 42us result status StatusSuccess 2025-05-29T15:23:19.271570Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/USER_0" PathDescription { Self { Name: "USER_0" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 100 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 6 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 6 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 3 SubDomainVersion: 2 SecurityStateVersion: 0 } } Children { Name: "table_0" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 102 CreateStep: 140 ParentPathId: 2 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 2 PlanResolution: 10 Coordinators: 72075186233409546 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409547 Mediators: 72075186233409548 } DomainKey { SchemeShard: 72057594046678944 PathId: 2 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 1 PathsLimit: 10000 ShardsInside: 4 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 2 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { } } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-05-29T15:23:19.271693Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0/table_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-05-29T15:23:19.271722Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/USER_0/table_0" took 31us result status StatusSuccess 2025-05-29T15:23:19.271817Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/USER_0/table_0" PathDescription { Self { Name: "table_0" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 102 CreateStep: 140 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "table_0" Columns { Name: "RowId" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "Value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "RowId" KeyColumnIds: 1 TableSchemaVersion: 1 IsBackup: false IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 2 PlanResolution: 10 Coordinators: 72075186233409546 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409547 Mediators: 72075186233409548 } DomainKey { SchemeShard: 72057594046678944 PathId: 2 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 4 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 2 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> TSchemeShardSubDomainTest::SimultaneousCreateForceDrop >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-CreateUser-15 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-CreateUser-16 >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-ModifyUser-32 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-ModifyUser-33 >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-CreateUser-9 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-CreateUser-10 >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-ModifyUser-33 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-ModifyUser-34 >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-DropUser-50 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-DropUser-51 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::CreateItemsInsideSubdomain [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:116:2058] recipient: [1:111:2141] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:116:2058] recipient: [1:111:2141] Leader for TabletID 72057594046678944 is [1:128:2152] sender: [1:132:2058] recipient: [1:111:2141] 2025-05-29T15:23:19.178686Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7488: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-05-29T15:23:19.178714Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7516: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:23:19.178721Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7402: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-05-29T15:23:19.178727Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7418: OperationsProcessing config: using default configuration 2025-05-29T15:23:19.178757Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-05-29T15:23:19.178761Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-05-29T15:23:19.178771Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7548: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:23:19.178786Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:39: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-05-29T15:23:19.178920Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7619: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-05-29T15:23:19.179004Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-05-29T15:23:19.193103Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7309: Cannot subscribe to console configs 2025-05-29T15:23:19.193133Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:23:19.199813Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-05-29T15:23:19.199904Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-05-29T15:23:19.199949Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-05-29T15:23:19.202053Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-05-29T15:23:19.202231Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:445: Clear TempDirsState with owners number: 0 2025-05-29T15:23:19.202342Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1348: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-05-29T15:23:19.202405Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:32: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-05-29T15:23:19.202960Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:157: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:23:19.202999Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:84: [RootDataErasureManager] Stop 2025-05-29T15:23:19.203279Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:23:19.203289Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:23:19.203311Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-05-29T15:23:19.203319Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-29T15:23:19.203326Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:202: TTxServerlessStorageBilling.Complete 2025-05-29T15:23:19.203358Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6671: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-05-29T15:23:19.204675Z node 1 :HIVE INFO: tablet_helpers.cpp:1130: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:128:2152] sender: [1:242:2058] recipient: [1:15:2062] 2025-05-29T15:23:19.221349Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:372: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-05-29T15:23:19.221435Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:23:19.221509Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-05-29T15:23:19.221575Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:130: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-05-29T15:23:19.221590Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:23:19.222382Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:451: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-05-29T15:23:19.222420Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-05-29T15:23:19.222471Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:23:19.222480Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:313: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-05-29T15:23:19.222484Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:367: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-05-29T15:23:19.222489Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 2 -> 3 2025-05-29T15:23:19.223217Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:23:19.223246Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-05-29T15:23:19.223256Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 3 -> 128 2025-05-29T15:23:19.223873Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:23:19.223889Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:23:19.223895Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:23:19.223903Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1650: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-05-29T15:23:19.224801Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1719: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-05-29T15:23:19.225340Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:652: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-05-29T15:23:19.225382Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1751: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-05-29T15:23:19.225582Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:676: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-05-29T15:23:19.225611Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:680: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 124 RawX2: 4294969445 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-05-29T15:23:19.225619Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:23:19.225685Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 128 -> 240 2025-05-29T15:23:19.225694Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:23:19.225727Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-05-29T15:23:19.225741Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:402: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-05-29T15:23:19.226250Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:23:19.226261Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-29T15:23:19.226305Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29 ... Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:906: Part operation is done id#103:0 progress is 1/1 2025-05-29T15:23:19.421895Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2025-05-29T15:23:19.421899Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:906: Part operation is done id#103:0 progress is 1/1 2025-05-29T15:23:19.421901Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2025-05-29T15:23:19.421904Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1606: TOperation IsReadyToNotify, TxId: 103, ready parts: 1/1, is published: true 2025-05-29T15:23:19.421914Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1629: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:491:2445] message: TxId: 103 2025-05-29T15:23:19.421918Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2025-05-29T15:23:19.421922Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:973: Operation and all the parts is done, operation id: 103:0 2025-05-29T15:23:19.421925Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5174: RemoveTx for txid 103:0 2025-05-29T15:23:19.421947Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 3 2025-05-29T15:23:19.422270Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:227: tests -- TTxNotificationSubscriber for txId 103: got EvNotifyTxCompletionResult 2025-05-29T15:23:19.422283Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:236: tests -- TTxNotificationSubscriber for txId 103: satisfy waiter [1:492:2446] TestWaitNotification: OK eventTxId 103 2025-05-29T15:23:19.422399Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-05-29T15:23:19.422444Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/USER_0" took 54us result status StatusSuccess 2025-05-29T15:23:19.422572Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/USER_0" PathDescription { Self { Name: "USER_0" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 100 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 8 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 8 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 6 SubDomainVersion: 1 SecurityStateVersion: 0 } } Children { Name: "dir_0" PathId: 4 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 102 CreateStep: 150 ParentPathId: 2 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" ChildrenExist: true } Children { Name: "table_0" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 101 CreateStep: 150 ParentPathId: 2 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72075186233409546 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409547 } DomainKey { SchemeShard: 72057594046678944 PathId: 2 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 3 PathsLimit: 10000 ShardsInside: 4 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 2 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { } } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-05-29T15:23:19.422690Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0/table_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-05-29T15:23:19.422720Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/USER_0/table_0" took 33us result status StatusSuccess 2025-05-29T15:23:19.422833Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/USER_0/table_0" PathDescription { Self { Name: "table_0" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 101 CreateStep: 150 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "table_0" Columns { Name: "RowId" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "Value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "RowId" KeyColumnIds: 1 TableSchemaVersion: 1 IsBackup: false IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72075186233409546 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409547 } DomainKey { SchemeShard: 72057594046678944 PathId: 2 } PathsInside: 3 PathsLimit: 10000 ShardsInside: 4 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 2 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-05-29T15:23:19.422922Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0/dir_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-05-29T15:23:19.422943Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/USER_0/dir_0" took 23us result status StatusSuccess 2025-05-29T15:23:19.422987Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/USER_0/dir_0" PathDescription { Self { Name: "dir_0" PathId: 4 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 102 CreateStep: 150 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 } ChildrenExist: true } Children { Name: "table_1" PathId: 5 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 103 CreateStep: 200 ParentPathId: 4 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72075186233409546 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409547 } DomainKey { SchemeShard: 72057594046678944 PathId: 2 } PathsInside: 3 PathsLimit: 10000 ShardsInside: 4 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 2 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 4 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-05-29T15:23:19.423036Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0/dir_0/table_1" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-05-29T15:23:19.423054Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/USER_0/dir_0/table_1" took 19us result status StatusSuccess 2025-05-29T15:23:19.423118Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/USER_0/dir_0/table_1" PathDescription { Self { Name: "table_1" PathId: 5 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 103 CreateStep: 200 ParentPathId: 4 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "table_1" Columns { Name: "RowId" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "Value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "RowId" KeyColumnIds: 1 TableSchemaVersion: 1 IsBackup: false IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72075186233409546 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409547 } DomainKey { SchemeShard: 72057594046678944 PathId: 2 } PathsInside: 3 PathsLimit: 10000 ShardsInside: 4 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 2 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 5 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::ForceDropTwice [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:116:2058] recipient: [1:111:2141] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:116:2058] recipient: [1:111:2141] Leader for TabletID 72057594046678944 is [1:128:2152] sender: [1:132:2058] recipient: [1:111:2141] 2025-05-29T15:23:19.458111Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7488: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-05-29T15:23:19.458137Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7516: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:23:19.458144Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7402: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-05-29T15:23:19.458149Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7418: OperationsProcessing config: using default configuration 2025-05-29T15:23:19.458161Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-05-29T15:23:19.458165Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-05-29T15:23:19.458173Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7548: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:23:19.458186Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:39: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-05-29T15:23:19.458296Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7619: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-05-29T15:23:19.458355Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-05-29T15:23:19.472840Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7309: Cannot subscribe to console configs 2025-05-29T15:23:19.472858Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:23:19.475604Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-05-29T15:23:19.475722Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-05-29T15:23:19.475769Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-05-29T15:23:19.477345Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-05-29T15:23:19.477516Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:445: Clear TempDirsState with owners number: 0 2025-05-29T15:23:19.477637Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1348: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-05-29T15:23:19.477696Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:32: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-05-29T15:23:19.478148Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:157: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:23:19.478181Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:84: [RootDataErasureManager] Stop 2025-05-29T15:23:19.478414Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:23:19.478424Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:23:19.478443Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-05-29T15:23:19.478451Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-29T15:23:19.478456Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:202: TTxServerlessStorageBilling.Complete 2025-05-29T15:23:19.478487Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6671: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-05-29T15:23:19.479635Z node 1 :HIVE INFO: tablet_helpers.cpp:1130: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:128:2152] sender: [1:242:2058] recipient: [1:15:2062] 2025-05-29T15:23:19.500471Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:372: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-05-29T15:23:19.500544Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:23:19.500612Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-05-29T15:23:19.500659Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:130: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-05-29T15:23:19.500671Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:23:19.501424Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:451: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-05-29T15:23:19.501449Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-05-29T15:23:19.501496Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:23:19.501505Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:313: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-05-29T15:23:19.501511Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:367: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-05-29T15:23:19.501517Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 2 -> 3 2025-05-29T15:23:19.501886Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:23:19.501895Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-05-29T15:23:19.501900Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 3 -> 128 2025-05-29T15:23:19.502229Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:23:19.502240Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:23:19.502245Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:23:19.502252Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1650: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-05-29T15:23:19.502972Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1719: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-05-29T15:23:19.503404Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:652: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-05-29T15:23:19.503438Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1751: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-05-29T15:23:19.503605Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:676: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-05-29T15:23:19.503627Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:680: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 124 RawX2: 4294969445 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-05-29T15:23:19.503635Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:23:19.503689Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 128 -> 240 2025-05-29T15:23:19.503697Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:23:19.503731Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-05-29T15:23:19.503743Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:402: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-05-29T15:23:19.504120Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:23:19.504127Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-29T15:23:19.504165Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29 ... reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 1 ShardOwnerId: 72057594046678944 ShardLocalIdx: 1, at schemeshard: 72057594046678944 2025-05-29T15:23:19.603244Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 6 2025-05-29T15:23:19.603387Z node 1 :HIVE INFO: tablet_helpers.cpp:1356: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 3 TxId_Deprecated: 3 TabletID: 72075186233409548 Forgetting tablet 72075186233409546 2025-05-29T15:23:19.603709Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5938: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 3 ShardOwnerId: 72057594046678944 ShardLocalIdx: 3, at schemeshard: 72057594046678944 2025-05-29T15:23:19.603739Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 5 2025-05-29T15:23:19.603796Z node 1 :HIVE INFO: tablet_helpers.cpp:1356: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 6 TxId_Deprecated: 6 TabletID: 72075186233409551 2025-05-29T15:23:19.603947Z node 1 :HIVE INFO: tablet_helpers.cpp:1356: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 2 TxId_Deprecated: 2 TabletID: 72075186233409547 2025-05-29T15:23:19.603983Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5938: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 6 ShardOwnerId: 72057594046678944 ShardLocalIdx: 6, at schemeshard: 72057594046678944 2025-05-29T15:23:19.604007Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 Forgetting tablet 72075186233409548 2025-05-29T15:23:19.604214Z node 1 :HIVE INFO: tablet_helpers.cpp:1356: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 4 TxId_Deprecated: 4 TabletID: 72075186233409549 2025-05-29T15:23:19.604351Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5938: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 2 ShardOwnerId: 72057594046678944 ShardLocalIdx: 2, at schemeshard: 72057594046678944 2025-05-29T15:23:19.604379Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 Forgetting tablet 72075186233409551 Forgetting tablet 72075186233409547 2025-05-29T15:23:19.604797Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5938: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 4 ShardOwnerId: 72057594046678944 ShardLocalIdx: 4, at schemeshard: 72057594046678944 2025-05-29T15:23:19.604831Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 Forgetting tablet 72075186233409549 2025-05-29T15:23:19.605050Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:205: TTxCleanDroppedSubDomains Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-05-29T15:23:19.605058Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:224: TTxCleanDroppedPaths: PersistRemoveSubDomain for PathId# [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2025-05-29T15:23:19.605083Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 2025-05-29T15:23:19.605255Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:123: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-05-29T15:23:19.605263Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:137: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2025-05-29T15:23:19.605274Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-05-29T15:23:19.605486Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:174: Deleted shardIdx 72057594046678944:5 2025-05-29T15:23:19.605519Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:180: Close pipe to deleted shardIdx 72057594046678944:5 tabletId 72075186233409550 2025-05-29T15:23:19.605716Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:174: Deleted shardIdx 72057594046678944:1 2025-05-29T15:23:19.605723Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:180: Close pipe to deleted shardIdx 72057594046678944:1 tabletId 72075186233409546 2025-05-29T15:23:19.605742Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:174: Deleted shardIdx 72057594046678944:3 2025-05-29T15:23:19.605747Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:180: Close pipe to deleted shardIdx 72057594046678944:3 tabletId 72075186233409548 2025-05-29T15:23:19.606216Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:174: Deleted shardIdx 72057594046678944:6 2025-05-29T15:23:19.606224Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:180: Close pipe to deleted shardIdx 72057594046678944:6 tabletId 72075186233409551 2025-05-29T15:23:19.606236Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:174: Deleted shardIdx 72057594046678944:2 2025-05-29T15:23:19.606241Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:180: Close pipe to deleted shardIdx 72057594046678944:2 tabletId 72075186233409547 2025-05-29T15:23:19.606291Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:174: Deleted shardIdx 72057594046678944:4 2025-05-29T15:23:19.606297Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:180: Close pipe to deleted shardIdx 72057594046678944:4 tabletId 72075186233409549 2025-05-29T15:23:19.606341Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:245: TTxCleanDroppedSubDomains Complete, done PersistRemoveSubDomain for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 2025-05-29T15:23:19.606541Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:160: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestWaitNotification wait txId: 102 2025-05-29T15:23:19.606598Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:210: tests -- TTxNotificationSubscriber for txId 102: send EvNotifyTxCompletion 2025-05-29T15:23:19.606605Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:256: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 102 TestWaitNotification wait txId: 103 2025-05-29T15:23:19.606619Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:210: tests -- TTxNotificationSubscriber for txId 103: send EvNotifyTxCompletion 2025-05-29T15:23:19.606624Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:256: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 103 2025-05-29T15:23:19.606700Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 102, at schemeshard: 72057594046678944 2025-05-29T15:23:19.606723Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:227: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2025-05-29T15:23:19.606731Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:236: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [1:668:2571] 2025-05-29T15:23:19.606759Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 103, at schemeshard: 72057594046678944 2025-05-29T15:23:19.606777Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:227: tests -- TTxNotificationSubscriber for txId 103: got EvNotifyTxCompletionResult 2025-05-29T15:23:19.606781Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:236: tests -- TTxNotificationSubscriber for txId 103: satisfy waiter [1:668:2571] TestWaitNotification: OK eventTxId 102 TestWaitNotification: OK eventTxId 103 2025-05-29T15:23:19.606860Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-05-29T15:23:19.606893Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/USER_0" took 45us result status StatusPathDoesNotExist 2025-05-29T15:23:19.606939Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/USER_0\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1]), source_location: ydb/core/tx/schemeshard/schemeshard_path_describer.cpp:1157" Path: "/MyRoot/USER_0" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: false } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 2025-05-29T15:23:19.606987Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-05-29T15:23:19.607010Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot" took 25us result status StatusSuccess 2025-05-29T15:23:19.607081Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot" PathDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 8 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 8 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 6 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/MyRoot" } } } PathId: 1 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-ModifyUser-38 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-ModifyUser-39 >> TSchemeShardSubDomainTest::SimultaneousCreateForceDrop [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-ModifyUser-26 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-ModifyUser-27 >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-ModifyUser-44 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-ModifyUser-45 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::SimultaneousCreateForceDrop [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:116:2058] recipient: [1:111:2141] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:116:2058] recipient: [1:111:2141] Leader for TabletID 72057594046678944 is [1:128:2152] sender: [1:132:2058] recipient: [1:111:2141] 2025-05-29T15:23:19.800819Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7488: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-05-29T15:23:19.800841Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7516: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:23:19.800845Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7402: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-05-29T15:23:19.800849Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7418: OperationsProcessing config: using default configuration 2025-05-29T15:23:19.800857Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-05-29T15:23:19.800860Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-05-29T15:23:19.800866Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7548: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:23:19.800878Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:39: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-05-29T15:23:19.800984Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7619: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-05-29T15:23:19.801045Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-05-29T15:23:19.810373Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7309: Cannot subscribe to console configs 2025-05-29T15:23:19.810395Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:23:19.812299Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-05-29T15:23:19.812385Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-05-29T15:23:19.812429Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-05-29T15:23:19.813571Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-05-29T15:23:19.813685Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:445: Clear TempDirsState with owners number: 0 2025-05-29T15:23:19.813773Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1348: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-05-29T15:23:19.813832Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:32: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-05-29T15:23:19.814154Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:157: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:23:19.814182Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:84: [RootDataErasureManager] Stop 2025-05-29T15:23:19.814388Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:23:19.814394Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:23:19.814410Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-05-29T15:23:19.814416Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-29T15:23:19.814420Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:202: TTxServerlessStorageBilling.Complete 2025-05-29T15:23:19.814446Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6671: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-05-29T15:23:19.815421Z node 1 :HIVE INFO: tablet_helpers.cpp:1130: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:128:2152] sender: [1:242:2058] recipient: [1:15:2062] 2025-05-29T15:23:19.828494Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:372: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-05-29T15:23:19.828564Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:23:19.828631Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-05-29T15:23:19.828682Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:130: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-05-29T15:23:19.828693Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:23:19.829243Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:451: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-05-29T15:23:19.829266Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-05-29T15:23:19.829304Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:23:19.829312Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:313: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-05-29T15:23:19.829315Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:367: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-05-29T15:23:19.829319Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 2 -> 3 2025-05-29T15:23:19.829572Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:23:19.829580Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-05-29T15:23:19.829583Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 3 -> 128 2025-05-29T15:23:19.829811Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:23:19.829820Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:23:19.829824Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:23:19.829829Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1650: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-05-29T15:23:19.830236Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1719: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-05-29T15:23:19.830498Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:652: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-05-29T15:23:19.830528Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1751: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-05-29T15:23:19.830667Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:676: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-05-29T15:23:19.830683Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:680: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 124 RawX2: 4294969445 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-05-29T15:23:19.830688Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:23:19.830731Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 128 -> 240 2025-05-29T15:23:19.830752Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:23:19.830782Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-05-29T15:23:19.830796Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:402: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-05-29T15:23:19.831248Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:23:19.831261Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-29T15:23:19.831303Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29 ... 6 2025-05-29T15:23:19.858708Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5938: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 3 ShardOwnerId: 72057594046678944 ShardLocalIdx: 3, at schemeshard: 72057594046678944 2025-05-29T15:23:19.858722Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 5 2025-05-29T15:23:19.858946Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2025-05-29T15:23:19.859007Z node 1 :HIVE INFO: tablet_helpers.cpp:1356: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 6 TxId_Deprecated: 6 TabletID: 72075186233409551 Forgetting tablet 72075186233409548 2025-05-29T15:23:19.859140Z node 1 :HIVE INFO: tablet_helpers.cpp:1356: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 2 TxId_Deprecated: 2 TabletID: 72075186233409547 2025-05-29T15:23:19.859196Z node 1 :HIVE INFO: tablet_helpers.cpp:1356: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 4 TxId_Deprecated: 4 TabletID: 72075186233409549 2025-05-29T15:23:19.859283Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5938: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 6 ShardOwnerId: 72057594046678944 ShardLocalIdx: 6, at schemeshard: 72057594046678944 2025-05-29T15:23:19.859309Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 Forgetting tablet 72075186233409551 2025-05-29T15:23:19.859437Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5938: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 2 ShardOwnerId: 72057594046678944 ShardLocalIdx: 2, at schemeshard: 72057594046678944 2025-05-29T15:23:19.859454Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 Forgetting tablet 72075186233409547 2025-05-29T15:23:19.859666Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5938: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 4 ShardOwnerId: 72057594046678944 ShardLocalIdx: 4, at schemeshard: 72057594046678944 2025-05-29T15:23:19.859687Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 Forgetting tablet 72075186233409549 2025-05-29T15:23:19.859838Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:205: TTxCleanDroppedSubDomains Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-05-29T15:23:19.859844Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:224: TTxCleanDroppedPaths: PersistRemoveSubDomain for PathId# [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2025-05-29T15:23:19.859869Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 2025-05-29T15:23:19.859912Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:123: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-05-29T15:23:19.859916Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:137: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2025-05-29T15:23:19.859923Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-05-29T15:23:19.860022Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:174: Deleted shardIdx 72057594046678944:5 2025-05-29T15:23:19.860029Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:180: Close pipe to deleted shardIdx 72057594046678944:5 tabletId 72075186233409550 2025-05-29T15:23:19.860188Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:5739: Failed to connect, to tablet: 72075186233409550, at schemeshard: 72057594046678944 2025-05-29T15:23:19.860274Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:174: Deleted shardIdx 72057594046678944:1 2025-05-29T15:23:19.860279Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:180: Close pipe to deleted shardIdx 72057594046678944:1 tabletId 72075186233409546 2025-05-29T15:23:19.860293Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:174: Deleted shardIdx 72057594046678944:3 2025-05-29T15:23:19.860295Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:180: Close pipe to deleted shardIdx 72057594046678944:3 tabletId 72075186233409548 2025-05-29T15:23:19.860596Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:174: Deleted shardIdx 72057594046678944:6 2025-05-29T15:23:19.860602Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:180: Close pipe to deleted shardIdx 72057594046678944:6 tabletId 72075186233409551 2025-05-29T15:23:19.860617Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:5739: Failed to connect, to tablet: 72075186233409551, at schemeshard: 72057594046678944 2025-05-29T15:23:19.860644Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:174: Deleted shardIdx 72057594046678944:2 2025-05-29T15:23:19.860647Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:180: Close pipe to deleted shardIdx 72057594046678944:2 tabletId 72075186233409547 2025-05-29T15:23:19.860653Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:174: Deleted shardIdx 72057594046678944:4 2025-05-29T15:23:19.860656Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:180: Close pipe to deleted shardIdx 72057594046678944:4 tabletId 72075186233409549 2025-05-29T15:23:19.860667Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:245: TTxCleanDroppedSubDomains Complete, done PersistRemoveSubDomain for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 2025-05-29T15:23:19.860807Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:160: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestModificationResult got TxId: 101, wait until txId: 101 TestWaitNotification wait txId: 100 2025-05-29T15:23:19.860856Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:210: tests -- TTxNotificationSubscriber for txId 100: send EvNotifyTxCompletion 2025-05-29T15:23:19.860871Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:256: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 100 TestWaitNotification wait txId: 101 2025-05-29T15:23:19.860882Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:210: tests -- TTxNotificationSubscriber for txId 101: send EvNotifyTxCompletion 2025-05-29T15:23:19.860884Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:256: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 101 2025-05-29T15:23:19.860924Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 100, at schemeshard: 72057594046678944 2025-05-29T15:23:19.860941Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:227: tests -- TTxNotificationSubscriber for txId 100: got EvNotifyTxCompletionResult 2025-05-29T15:23:19.860944Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:236: tests -- TTxNotificationSubscriber for txId 100: satisfy waiter [1:627:2530] 2025-05-29T15:23:19.860961Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 101, at schemeshard: 72057594046678944 2025-05-29T15:23:19.860977Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:227: tests -- TTxNotificationSubscriber for txId 101: got EvNotifyTxCompletionResult 2025-05-29T15:23:19.860979Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:236: tests -- TTxNotificationSubscriber for txId 101: satisfy waiter [1:627:2530] TestWaitNotification: OK eventTxId 100 TestWaitNotification: OK eventTxId 101 2025-05-29T15:23:19.861029Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-05-29T15:23:19.861073Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/USER_0" took 54us result status StatusPathDoesNotExist 2025-05-29T15:23:19.861116Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/USER_0\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1]), source_location: ydb/core/tx/schemeshard/schemeshard_path_describer.cpp:1157" Path: "/MyRoot/USER_0" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: false } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 2025-05-29T15:23:19.861157Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-05-29T15:23:19.861175Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot" took 19us result status StatusSuccess 2025-05-29T15:23:19.861233Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot" PathDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 6 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 6 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/MyRoot" } } } PathId: 1 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-CreateUser-3 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-CreateUser-4 >> TObjectStorageListingTest::Listing >> TLocksTest::Range_IncorrectNullDot1 >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-DropUser-50 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-DropUser-51 >> YdbIndexTable::MultiShardTableOneIndexIndexOverlap ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/idx_test/unittest >> YdbIndexTable::OnlineBuildWithDataColumn Test command err: Trying to start YDB, gRPC: 61096, MsgBus: 8826 2025-05-29T15:23:11.975886Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509888576460738530:2062];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:23:11.975921Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/00104d/r3tmp/tmpM3o5NK/pdisk_1.dat 2025-05-29T15:23:12.036102Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:23:12.038910Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [1:7509888576460738509:2079] 1748532191975726 != 1748532191975729 TServer::EnableGrpc on GrpcPort 61096, node 1 2025-05-29T15:23:12.050482Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:23:12.050495Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-05-29T15:23:12.050497Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-05-29T15:23:12.050546Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:8826 2025-05-29T15:23:12.078856Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:23:12.078887Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:23:12.079845Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:8826 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-29T15:23:12.120386Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:23:12.186997Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:23:12.251808Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:23:12.274133Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:23:12.285643Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:23:12.335704Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509888580755707440:2403], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:23:12.335726Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:23:12.373305Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-05-29T15:23:12.381553Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-05-29T15:23:12.394869Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-05-29T15:23:12.449584Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-05-29T15:23:12.457419Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-05-29T15:23:12.471367Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-05-29T15:23:12.485135Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-05-29T15:23:12.501436Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509888580755708093:2466], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:23:12.501453Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509888580755708098:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:23:12.501474Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:23:12.502253Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710669:3, at schemeshard: 72057594046644480 2025-05-29T15:23:12.505698Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7509888580755708100:2470], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710669 completed, doublechecking } 2025-05-29T15:23:12.593922Z node 1 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [1:7509888580755708151:3397] txid# 281474976710670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-29T15:23:12.682397Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [1:7509888580755708167:2474], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:23:12.682490Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=1&id=Y2JiYThiZi01NTk4OWI1ZC04NWU2YmZmOC1jNWZmMWM2YQ==, ActorId: [1:7509888580755707422:2401], ActorState: ExecuteState, TraceId: 01jwea7s7n5d5rez17rjpbhwah, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: VERIFY failed (2025-05-29T15:23:12.686057Z): assertion failed in non-unittest thread with message: assertion failed at ydb/core/kqp/ut/common/kqp_ut_common.h:375, void NKikimr::NKqp::AssertSuccessResult(const NYdb::TStatus &): (result.IsSuccess())
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 library/cpp/testing/unittest/registar.cpp:36 RaiseError(): requirement UnittestThread failed 0. /-S/util/system/yassert.cpp:83: InternalPanicImpl @ 0x13AA6355 1. /-S/util/system/yassert.cpp:55: Panic @ 0x13A9D356 2. /tmp//-S/library/cpp/testing/unittest/registar.cpp:36: RaiseError @ 0x13C3F0E6 3. /-S/ydb/core/kqp/ut/common/kqp_ut_common.h:375: AssertSuccessResult @ 0x260E1942 4. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:365: CreateSampleTables @ 0x260E1242 5. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:581: operator() @ 0x26102EDC 6. /-S/library/cpp/threading/future/core/future-inl.h:493: SetValue @ 0x26102EDC 7. /-S/library/cpp/threading/future/async.h:24: operator() @ 0x26102EDC 8. /-S/util/thread/pool.h:71: Process @ 0x26102EDC 9. /-S/util/thread/pool.cpp:418: DoExecute @ 0x13AADCD9 10. /-S/util/thread/factory.h:15: Execute @ 0x13AAC6C9 11. /-S/util/thread/factory.cpp:36: ThreadProc @ 0x13AAC6C9 12. /-S/util/system/thread.cpp:244: ThreadProxy @ 0x13AA7B3C 13. ??:0: ?? @ 0x7F062D7A7AC2 14. ??:0: ?? @ 0x7F062D83984F Trying to start YDB, gRPC: 61885, MsgBus: 20748 test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/00104d/r3tmp/tmpplmO43/pdisk_1.dat 2025-05-29T15:23:16.409376Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509888600115859972:2134];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:23:16.409512Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-05-29T15:23:16.486911Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [1:7509888600115859878:2079] 1748532196404760 != 1748532196404763 2025-05-29T15:23:16.487007Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 61885, node 1 2025-05-29T15:23:16.501143Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:23:16.501155Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-05-29T15:23:16.501157Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-05-29T15:23:16.501191Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:20748 TClient is connected to server localhost:20748 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-29T15:23:16.557331Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:23:16.557356Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:23:16.558482Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-29T15:23:16.563378Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:23:16.567265Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-05-29T15:23:16.570430Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:23:16.592069Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:23:16.612995Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-05-29T15:23:16.672072Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 2025-05-29T15:23:16.796639Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509888600115861541:2403], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:23:16.796666Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:23:16.848406Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-05-29T15:23:16.855506Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-05-29T15:23:16.867362Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-05-29T15:23:16.881124Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-05-29T15:23:16.895597Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-05-29T15:23:16.909689Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-05-29T15:23:16.923073Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-05-29T15:23:16.939513Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509888600115862192:2466], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:23:16.939537Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509888600115862197:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:23:16.939541Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:23:16.940258Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715669:3, at schemeshard: 72057594046644480 2025-05-29T15:23:16.943078Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7509888600115862199:2470], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715669 completed, doublechecking } 2025-05-29T15:23:17.019472Z node 1 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [1:7509888604410829546:3401] txid# 281474976715670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-29T15:23:17.096910Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [1:7509888604410829562:2474], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:23:17.097015Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=1&id=NTdlOTQwZTctYTM3MThjOWQtYzA4Nzc2MDYtODYwMDE4NDU=, ActorId: [1:7509888600115861522:2401], ActorState: ExecuteState, TraceId: 01jwea7xjb3qv62kbpqe24d8jx, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: VERIFY failed (2025-05-29T15:23:17.097717Z): assertion failed in non-unittest thread with message: assertion failed at ydb/core/kqp/ut/common/kqp_ut_common.h:375, void NKikimr::NKqp::AssertSuccessResult(const NYdb::TStatus &): (result.IsSuccess())
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 library/cpp/testing/unittest/registar.cpp:36 RaiseError(): requirement UnittestThread failed 0. /-S/util/system/yassert.cpp:83: InternalPanicImpl @ 0x13AA6355 1. /-S/util/system/yassert.cpp:55: Panic @ 0x13A9D356 2. /tmp//-S/library/cpp/testing/unittest/registar.cpp:36: RaiseError @ 0x13C3F0E6 3. /-S/ydb/core/kqp/ut/common/kqp_ut_common.h:375: AssertSuccessResult @ 0x260E1942 4. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:365: CreateSampleTables @ 0x260E1242 5. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:581: operator() @ 0x26102EDC 6. /-S/library/cpp/threading/future/core/future-inl.h:493: SetValue @ 0x26102EDC 7. /-S/library/cpp/threading/future/async.h:24: operator() @ 0x26102EDC 8. /-S/util/thread/pool.h:71: Process @ 0x26102EDC 9. /-S/util/thread/pool.cpp:418: DoExecute @ 0x13AADCD9 10. /-S/util/thread/factory.h:15: Execute @ 0x13AAC6C9 11. /-S/util/thread/factory.cpp:36: ThreadProc @ 0x13AAC6C9 12. /-S/util/system/thread.cpp:244: ThreadProxy @ 0x13AA7B3C 13. ??:0: ?? @ 0x7FB8189DCAC2 14. ??:0: ?? @ 0x7FB818A6E84F ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/idx_test/unittest >> YdbIndexTable::MultiShardTableTwoIndexes Test command err: Trying to start YDB, gRPC: 28394, MsgBus: 15045 2025-05-29T15:23:12.068949Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509888584865520146:2060];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:23:12.068986Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/00100c/r3tmp/tmpXVe341/pdisk_1.dat 2025-05-29T15:23:12.122832Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [1:7509888584865520128:2079] 1748532192068827 != 1748532192068830 2025-05-29T15:23:12.123006Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 28394, node 1 2025-05-29T15:23:12.134546Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:23:12.134563Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-05-29T15:23:12.134565Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-05-29T15:23:12.134608Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:15045 TClient is connected to server localhost:15045 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-29T15:23:12.188994Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:23:12.198418Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:23:12.198451Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:23:12.198888Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-05-29T15:23:12.199604Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected waiting... 2025-05-29T15:23:12.262978Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:23:12.282000Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:23:12.292571Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:23:12.443814Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509888584865521759:2403], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:23:12.443847Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:23:12.484129Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-05-29T15:23:12.490947Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-05-29T15:23:12.498574Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-05-29T15:23:12.505817Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-05-29T15:23:12.512975Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-05-29T15:23:12.519950Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-05-29T15:23:12.526978Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-05-29T15:23:12.542913Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509888584865522411:2466], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:23:12.542936Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:23:12.542966Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509888584865522416:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:23:12.543607Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710669:3, at schemeshard: 72057594046644480 2025-05-29T15:23:12.547287Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7509888584865522418:2470], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710669 completed, doublechecking } 2025-05-29T15:23:12.642124Z node 1 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [1:7509888584865522469:3395] txid# 281474976710670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-29T15:23:12.744960Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [1:7509888584865522485:2474], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:23:12.745059Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=1&id=YjVhYTYzYjYtMWFhNDlhZGQtMjFiNWYyZGQtMjY0NjhiNTY=, ActorId: [1:7509888584865521741:2401], ActorState: ExecuteState, TraceId: 01jwea7s8ydmeg0bns90b2p88n, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: VERIFY failed (2025-05-29T15:23:12.745706Z): assertion failed in non-unittest thread with message: assertion failed at ydb/core/kqp/ut/common/kqp_ut_common.h:375, void NKikimr::NKqp::AssertSuccessResult(const NYdb::TStatus &): (result.IsSuccess())
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 library/cpp/testing/unittest/registar.cpp:36 RaiseError(): requirement UnittestThread failed 0. /-S/util/system/yassert.cpp:83: InternalPanicImpl @ 0x13AA6355 1. /-S/util/system/yassert.cpp:55: Panic @ 0x13A9D356 2. /tmp//-S/library/cpp/testing/unittest/registar.cpp:36: RaiseError @ 0x13C3F0E6 3. /-S/ydb/core/kqp/ut/common/kqp_ut_common.h:375: AssertSuccessResult @ 0x260E1942 4. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:365: CreateSampleTables @ 0x260E1242 5. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:581: operator() @ 0x26102EDC 6. /-S/library/cpp/threading/future/core/future-inl.h:493: SetValue @ 0x26102EDC 7. /-S/library/cpp/threading/future/async.h:24: operator() @ 0x26102EDC 8. /-S/util/thread/pool.h:71: Process @ 0x26102EDC 9. /-S/util/thread/pool.cpp:418: DoExecute @ 0x13AADCD9 10. /-S/util/thread/factory.h:15: Execute @ 0x13AAC6C9 11. /-S/util/thread/factory.cpp:36: ThreadProc @ 0x13AAC6C9 12. /-S/util/system/thread.cpp:244: ThreadProxy @ 0x13AA7B3C 13. ??:0: ?? @ 0x7F287766DAC2 14. ??:0: ?? @ 0x7F28776FF84F Trying to start YDB, gRPC: 22014, MsgBus: 10636 2025-05-29T15:23:16.560866Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509888599993352730:2062];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:23:16.560898Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/00100c/r3tmp/tmpgSl5tT/pdisk_1.dat 2025-05-29T15:23:16.670652Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:23:16.670684Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:23:16.675578Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:23:16.676852Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [1:7509888599993352709:2079] 1748532196560717 != 1748532196560720 2025-05-29T15:23:16.678795Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 22014, node 1 2025-05-29T15:23:16.690858Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:23:16.690873Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-05-29T15:23:16.690875Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-05-29T15:23:16.690929Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:10636 TClient is connected to server localhost:10636 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-29T15:23:16.754511Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:23:16.760656Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:23:16.820816Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:23:16.840771Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:23:16.851747Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:23:16.934037Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509888599993354349:2403], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:23:16.934060Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:23:16.983499Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-05-29T15:23:17.038688Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-05-29T15:23:17.048850Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-05-29T15:23:17.055873Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-05-29T15:23:17.063288Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-05-29T15:23:17.077817Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-05-29T15:23:17.091457Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-05-29T15:23:17.107789Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509888604288322300:2466], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:23:17.107821Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:23:17.107831Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509888604288322305:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:23:17.108358Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715669:3, at schemeshard: 72057594046644480 2025-05-29T15:23:17.111010Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7509888604288322307:2470], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715669 completed, doublechecking } 2025-05-29T15:23:17.189997Z node 1 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [1:7509888604288322358:3397] txid# 281474976715670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-29T15:23:17.295746Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [1:7509888604288322374:2474], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:23:17.296922Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=1&id=NjViMjg2NjgtNjFhYzM1MGQtZWM4ODMyZGEtOGVjNzQ3YTI=, ActorId: [1:7509888599993354345:2401], ActorState: ExecuteState, TraceId: 01jwea7xqk2y8y05e0jnpc128b, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: VERIFY failed (2025-05-29T15:23:17.298878Z): assertion failed in non-unittest thread with message: assertion failed at ydb/core/kqp/ut/common/kqp_ut_common.h:375, void NKikimr::NKqp::AssertSuccessResult(const NYdb::TStatus &): (result.IsSuccess())
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 library/cpp/testing/unittest/registar.cpp:36 RaiseError(): requirement UnittestThread failed 0. /-S/util/system/yassert.cpp:83: InternalPanicImpl @ 0x13AA6355 1. /-S/util/system/yassert.cpp:55: Panic @ 0x13A9D356 2. /tmp//-S/library/cpp/testing/unittest/registar.cpp:36: RaiseError @ 0x13C3F0E6 3. /-S/ydb/core/kqp/ut/common/kqp_ut_common.h:375: AssertSuccessResult @ 0x260E1942 4. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:365: CreateSampleTables @ 0x260E1242 5. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:581: operator() @ 0x26102EDC 6. /-S/library/cpp/threading/future/core/future-inl.h:493: SetValue @ 0x26102EDC 7. /-S/library/cpp/threading/future/async.h:24: operator() @ 0x26102EDC 8. /-S/util/thread/pool.h:71: Process @ 0x26102EDC 9. /-S/util/thread/pool.cpp:418: DoExecute @ 0x13AADCD9 10. /-S/util/thread/factory.h:15: Execute @ 0x13AAC6C9 11. /-S/util/thread/factory.cpp:36: ThreadProc @ 0x13AAC6C9 12. /-S/util/system/thread.cpp:244: ThreadProxy @ 0x13AA7B3C 13. ??:0: ?? @ 0x7FF72A635AC2 14. ??:0: ?? @ 0x7FF72A6C784F >> TFlatTest::CopyTableAndCompareColumnsSchema >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-CreateUser-16 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-CreateUser-17 >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-ModifyUser-33 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-ModifyUser-34 >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-CreateUser-10 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-CreateUser-11 >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-DropUser-51 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-DropUser-52 >> TPQTest::TestComactifiedWithRetention [GOOD] >> TPQTest::TestChangeConfig ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/idx_test/unittest >> YdbIndexTable::MultiShardTableOneUniqIndexDataColumn Test command err: Trying to start YDB, gRPC: 63662, MsgBus: 30793 2025-05-29T15:23:12.266245Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509888582073325287:2062];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:23:12.266275Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/001007/r3tmp/tmpVm2VcU/pdisk_1.dat 2025-05-29T15:23:12.322349Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:23:12.322450Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [1:7509888582073325266:2079] 1748532192266105 != 1748532192266108 TServer::EnableGrpc on GrpcPort 63662, node 1 2025-05-29T15:23:12.331502Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:23:12.331516Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-05-29T15:23:12.331518Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-05-29T15:23:12.331557Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:30793 2025-05-29T15:23:12.368078Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:23:12.368114Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:23:12.369156Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:30793 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-29T15:23:12.398558Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:23:12.408334Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:23:12.427803Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:23:12.447379Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:23:12.458849Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:23:12.573791Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509888582073326919:2403], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:23:12.573817Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:23:12.607683Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-05-29T15:23:12.615794Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-05-29T15:23:12.625631Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-05-29T15:23:12.639170Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-05-29T15:23:12.646110Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-05-29T15:23:12.661422Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-05-29T15:23:12.675488Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-05-29T15:23:12.691613Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509888582073327572:2466], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:23:12.691646Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509888582073327577:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:23:12.691649Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:23:12.692496Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715669:3, at schemeshard: 72057594046644480 2025-05-29T15:23:12.694573Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7509888582073327579:2470], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715669 completed, doublechecking } 2025-05-29T15:23:12.790363Z node 1 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [1:7509888582073327630:3396] txid# 281474976715670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-29T15:23:12.916990Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [1:7509888582073327639:2474], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:23:12.917103Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=1&id=NzRjOTJmZWItY2M1NDU4YzUtMTEzZTY3NC04OTFjMjJjYQ==, ActorId: [1:7509888582073326901:2401], ActorState: ExecuteState, TraceId: 01jwea7sdkfy0hc2zs84k5c8ad, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: VERIFY failed (2025-05-29T15:23:12.917931Z): assertion failed in non-unittest thread with message: assertion failed at ydb/core/kqp/ut/common/kqp_ut_common.h:375, void NKikimr::NKqp::AssertSuccessResult(const NYdb::TStatus &): (result.IsSuccess())
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 library/cpp/testing/unittest/registar.cpp:36 RaiseError(): requirement UnittestThread failed 0. /-S/util/system/yassert.cpp:83: InternalPanicImpl @ 0x13AA6355 1. /-S/util/system/yassert.cpp:55: Panic @ 0x13A9D356 2. /tmp//-S/library/cpp/testing/unittest/registar.cpp:36: RaiseError @ 0x13C3F0E6 3. /-S/ydb/core/kqp/ut/common/kqp_ut_common.h:375: AssertSuccessResult @ 0x260E1942 4. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:365: CreateSampleTables @ 0x260E1242 5. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:581: operator() @ 0x26102EDC 6. /-S/library/cpp/threading/future/core/future-inl.h:493: SetValue @ 0x26102EDC 7. /-S/library/cpp/threading/future/async.h:24: operator() @ 0x26102EDC 8. /-S/util/thread/pool.h:71: Process @ 0x26102EDC 9. /-S/util/thread/pool.cpp:418: DoExecute @ 0x13AADCD9 10. /-S/util/thread/factory.h:15: Execute @ 0x13AAC6C9 11. /-S/util/thread/factory.cpp:36: ThreadProc @ 0x13AAC6C9 12. /-S/util/system/thread.cpp:244: ThreadProxy @ 0x13AA7B3C 13. ??:0: ?? @ 0x7EFD74519AC2 14. ??:0: ?? @ 0x7EFD745AB84F Trying to start YDB, gRPC: 3192, MsgBus: 8173 2025-05-29T15:23:16.508653Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509888598429608795:2067];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:23:16.508682Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/001007/r3tmp/tmpkreibr/pdisk_1.dat 2025-05-29T15:23:16.578141Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:23:16.578220Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [1:7509888598429608768:2079] 1748532196508397 != 1748532196508400 TServer::EnableGrpc on GrpcPort 3192, node 1 2025-05-29T15:23:16.586634Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:23:16.586646Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-05-29T15:23:16.586648Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-05-29T15:23:16.586691Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:8173 TClient is connected to server localhost:8173 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: 2025-05-29T15:23:16.653055Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:23:16.653091Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-29T15:23:16.653993Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-29T15:23:16.654909Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:23:16.662179Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:23:16.684618Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:23:16.706891Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:23:16.719572Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:23:16.869486Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509888598429610403:2403], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:23:16.869525Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:23:16.909970Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-05-29T15:23:16.916885Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-05-29T15:23:16.930123Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-05-29T15:23:16.937296Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-05-29T15:23:16.944236Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-05-29T15:23:16.958514Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-05-29T15:23:16.972539Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-05-29T15:23:16.988367Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509888598429611055:2466], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:23:16.988393Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509888598429611060:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:23:16.988393Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:23:16.989129Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715669:3, at schemeshard: 72057594046644480 2025-05-29T15:23:16.992239Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7509888598429611062:2470], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715669 completed, doublechecking } 2025-05-29T15:23:17.072649Z node 1 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [1:7509888602724578409:3400] txid# 281474976715670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-29T15:23:17.150122Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [1:7509888602724578425:2474], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:23:17.150289Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=1&id=YzE0ZmYzMzQtNWJiODFkNjQtM2Y2ZjY3MjktZjIyOTY1MTY=, ActorId: [1:7509888598429610385:2401], ActorState: ExecuteState, TraceId: 01jwea7xkv0664ew5vs6zq20et, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: VERIFY failed (2025-05-29T15:23:17.150973Z): assertion failed in non-unittest thread with message: assertion failed at ydb/core/kqp/ut/common/kqp_ut_common.h:375, void NKikimr::NKqp::AssertSuccessResult(const NYdb::TStatus &): (result.IsSuccess())
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 library/cpp/testing/unittest/registar.cpp:36 RaiseError(): requirement UnittestThread failed 0. /-S/util/system/yassert.cpp:83: InternalPanicImpl @ 0x13AA6355 1. /-S/util/system/yassert.cpp:55: Panic @ 0x13A9D356 2. /tmp//-S/library/cpp/testing/unittest/registar.cpp:36: RaiseError @ 0x13C3F0E6 3. /-S/ydb/core/kqp/ut/common/kqp_ut_common.h:375: AssertSuccessResult @ 0x260E1942 4. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:365: CreateSampleTables @ 0x260E1242 5. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:581: operator() @ 0x26102EDC 6. /-S/library/cpp/threading/future/core/future-inl.h:493: SetValue @ 0x26102EDC 7. /-S/library/cpp/threading/future/async.h:24: operator() @ 0x26102EDC 8. /-S/util/thread/pool.h:71: Process @ 0x26102EDC 9. /-S/util/thread/pool.cpp:418: DoExecute @ 0x13AADCD9 10. /-S/util/thread/factory.h:15: Execute @ 0x13AAC6C9 11. /-S/util/thread/factory.cpp:36: ThreadProc @ 0x13AAC6C9 12. /-S/util/system/thread.cpp:244: ThreadProxy @ 0x13AA7B3C 13. ??:0: ?? @ 0x7F51442BAAC2 14. ??:0: ?? @ 0x7F514434C84F ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/idx_test/unittest >> YdbIndexTable::MultiShardTableOneIndexPkOverlap Test command err: Trying to start YDB, gRPC: 27236, MsgBus: 4556 2025-05-29T15:23:12.391037Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509888584685805511:2061];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:23:12.391060Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/000ebd/r3tmp/tmp6Bga3f/pdisk_1.dat 2025-05-29T15:23:12.442630Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:23:12.443017Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [1:7509888584685805492:2079] 1748532192390925 != 1748532192390928 TServer::EnableGrpc on GrpcPort 27236, node 1 2025-05-29T15:23:12.454140Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:23:12.454153Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-05-29T15:23:12.454154Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-05-29T15:23:12.454185Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:4556 TClient is connected to server localhost:4556 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-29T15:23:12.519449Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:23:12.519482Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:23:12.520516Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-05-29T15:23:12.520573Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected waiting... 2025-05-29T15:23:12.528853Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:23:12.590706Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:23:12.607465Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:23:12.619468Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:23:12.693916Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509888584685807125:2403], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:23:12.693972Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:23:12.723737Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-05-29T15:23:12.732672Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-05-29T15:23:12.788037Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-05-29T15:23:12.843564Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-05-29T15:23:12.857253Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-05-29T15:23:12.871387Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-05-29T15:23:12.890165Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-05-29T15:23:12.904152Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509888584685807781:2466], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:23:12.904183Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:23:12.904211Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509888584685807786:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:23:12.905054Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715669:3, at schemeshard: 72057594046644480 2025-05-29T15:23:12.912136Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7509888584685807788:2470], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715669 completed, doublechecking } 2025-05-29T15:23:12.999963Z node 1 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [1:7509888584685807839:3397] txid# 281474976715670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-29T15:23:13.119491Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [1:7509888588980775151:2474], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:23:13.119613Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=1&id=YzBlMDRiYzMtY2I5YTc5NzYtMjhkOTU4MjAtNTBjZjkxMmI=, ActorId: [1:7509888584685807122:2401], ActorState: ExecuteState, TraceId: 01jwea7sm725kcxryr54w11jsv, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: VERIFY failed (2025-05-29T15:23:13.120244Z): assertion failed in non-unittest thread with message: assertion failed at ydb/core/kqp/ut/common/kqp_ut_common.h:375, void NKikimr::NKqp::AssertSuccessResult(const NYdb::TStatus &): (result.IsSuccess())
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 library/cpp/testing/unittest/registar.cpp:36 RaiseError(): requirement UnittestThread failed 0. /-S/util/system/yassert.cpp:83: InternalPanicImpl @ 0x13AA6355 1. /-S/util/system/yassert.cpp:55: Panic @ 0x13A9D356 2. /tmp//-S/library/cpp/testing/unittest/registar.cpp:36: RaiseError @ 0x13C3F0E6 3. /-S/ydb/core/kqp/ut/common/kqp_ut_common.h:375: AssertSuccessResult @ 0x260E1942 4. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:365: CreateSampleTables @ 0x260E1242 5. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:581: operator() @ 0x26102EDC 6. /-S/library/cpp/threading/future/core/future-inl.h:493: SetValue @ 0x26102EDC 7. /-S/library/cpp/threading/future/async.h:24: operator() @ 0x26102EDC 8. /-S/util/thread/pool.h:71: Process @ 0x26102EDC 9. /-S/util/thread/pool.cpp:418: DoExecute @ 0x13AADCD9 10. /-S/util/thread/factory.h:15: Execute @ 0x13AAC6C9 11. /-S/util/thread/factory.cpp:36: ThreadProc @ 0x13AAC6C9 12. /-S/util/system/thread.cpp:244: ThreadProxy @ 0x13AA7B3C 13. ??:0: ?? @ 0x7EFFE2D3CAC2 14. ??:0: ?? @ 0x7EFFE2DCE84F Trying to start YDB, gRPC: 15112, MsgBus: 27113 test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/000ebd/r3tmp/tmpIcry1P/pdisk_1.dat 2025-05-29T15:23:16.707708Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-05-29T15:23:16.725507Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:23:16.728909Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [1:7509888598256840454:2079] 1748532196655971 != 1748532196655974 TServer::EnableGrpc on GrpcPort 15112, node 1 2025-05-29T15:23:16.740989Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:23:16.741000Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-05-29T15:23:16.741002Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-05-29T15:23:16.741038Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:27113 TClient is connected to server localhost:27113 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: 2025-05-29T15:23:16.803750Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:23:16.803781Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:23:16.804655Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-29T15:23:16.810395Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:23:16.819808Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:23:16.838513Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:23:16.856805Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-05-29T15:23:16.868502Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 2025-05-29T15:23:17.018143Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509888602551809383:2403], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:23:17.018165Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:23:17.050138Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-05-29T15:23:17.055980Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-05-29T15:23:17.062479Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-05-29T15:23:17.070089Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-05-29T15:23:17.077527Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-05-29T15:23:17.091663Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-05-29T15:23:17.105424Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-05-29T15:23:17.121219Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509888602551810036:2466], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:23:17.121254Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:23:17.121268Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509888602551810041:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:23:17.121836Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715669:3, at schemeshard: 72057594046644480 2025-05-29T15:23:17.124795Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7509888602551810043:2470], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715669 completed, doublechecking } 2025-05-29T15:23:17.220959Z node 1 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [1:7509888602551810094:3398] txid# 281474976715670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-29T15:23:17.316053Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [1:7509888602551810110:2474], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 VERIFY failed (2025-05-29T15:23:17.317228Z): assertion failed in non-unittest thread with message: assertion failed at ydb/core/kqp/ut/common/kqp_ut_common.h:375, void NKikimr::NKqp::AssertSuccessResult(const NYdb::TStatus &): (result.IsSuccess())
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 library/cpp/testing/unittest/registar.cpp:36 RaiseError(): requirement UnittestThread failed 2025-05-29T15:23:17.316184Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=1&id=ZTQ4MzlmZWMtZjg3MGQ5YWYtMmUxOWQ4ZWItODIwYTdjYTc=, ActorId: [1:7509888602551809380:2401], ActorState: ExecuteState, TraceId: 01jwea7xr09vjp4f75xtac7r9w, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: 0. /-S/util/system/yassert.cpp:83: InternalPanicImpl @ 0x13AA6355 1. /-S/util/system/yassert.cpp:55: Panic @ 0x13A9D356 2. /tmp//-S/library/cpp/testing/unittest/registar.cpp:36: RaiseError @ 0x13C3F0E6 3. /-S/ydb/core/kqp/ut/common/kqp_ut_common.h:375: AssertSuccessResult @ 0x260E1942 4. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:365: CreateSampleTables @ 0x260E1242 5. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:581: operator() @ 0x26102EDC 6. /-S/library/cpp/threading/future/core/future-inl.h:493: SetValue @ 0x26102EDC 7. /-S/library/cpp/threading/future/async.h:24: operator() @ 0x26102EDC 8. /-S/util/thread/pool.h:71: Process @ 0x26102EDC 9. /-S/util/thread/pool.cpp:418: DoExecute @ 0x13AADCD9 10. /-S/util/thread/factory.h:15: Execute @ 0x13AAC6C9 11. /-S/util/thread/factory.cpp:36: ThreadProc @ 0x13AAC6C9 12. /-S/util/system/thread.cpp:244: ThreadProxy @ 0x13AA7B3C 13. ??:0: ?? @ 0x7F4CBC4B7AC2 14. ??:0: ?? @ 0x7F4CBC54984F >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-ModifyUser-39 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-ModifyUser-40 >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-ModifyUser-27 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-ModifyUser-28 >> TFlatTest::ShardUnfreezeNonFrozen |63.5%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/schemeshard/ut_ttl/ydb-core-tx-schemeshard-ut_ttl |63.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_ttl/ydb-core-tx-schemeshard-ut_ttl |63.5%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_ttl/ydb-core-tx-schemeshard-ut_ttl >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-ModifyUser-45 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-ModifyUser-46 >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-ModifyUser-34 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-ModifyUser-35 >> TFlatTest::LargeDatashardReplyDistributed >> TFlatTest::Init >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-CreateUser-4 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-CreateUser-5 >> TFlatTest::ShardUnfreezeNonFrozen [GOOD] >> TFlatTest::ShardFreezeUnfreezeRejectScheme >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-CreateUser-11 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-CreateUser-12 >> TPQTest::TestChangeConfig [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-DropUser-51 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-DropUser-52 >> TFlatTest::WriteSplitByPartialKeyAndRead >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-ModifyUser-34 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-ModifyUser-35 >> TFlatTest::Init [GOOD] >> TFlatTest::LargeDatashardReply >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-CreateUser-17 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-CreateUser-18 >> TFlatTest::ShardFreezeUnfreezeRejectScheme [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-ModifyUser-40 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-ModifyUser-41 >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-ModifyUser-35 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-ModifyUser-36 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/persqueue/ut/unittest >> TPQTest::TestChangeConfig [GOOD] Test command err: Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:104:2057] recipient: [1:102:2135] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:104:2057] recipient: [1:102:2135] Leader for TabletID 72057594037927937 is [1:108:2139] sender: [1:109:2057] recipient: [1:102:2135] 2025-05-29T15:22:15.855835Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:3098: [PQ: 72057594037927937] Handle TEvInterconnect::TEvNodeInfo 2025-05-29T15:22:15.856519Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:3130: [PQ: 72057594037927937] Transactions request. From tx_00000000000000000000, To tx_18446744073709551615 2025-05-29T15:22:15.856590Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:751: [PQ: 72057594037927937] doesn't have tx info 2025-05-29T15:22:15.856598Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:763: [PQ: 72057594037927937] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2025-05-29T15:22:15.856603Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:975: [PQ: 72057594037927937] no config, start with empty partitions and default config 2025-05-29T15:22:15.856609Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:4889: [PQ: 72057594037927937] Txs.size=0, PlannedTxs.size=0 2025-05-29T15:22:15.856616Z node 1 :PERSQUEUE NOTICE: pq_impl.cpp:1099: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-05-29T15:22:15.856626Z node 1 :PERSQUEUE INFO: pq_impl.cpp:800: [PQ: 72057594037927937] doesn't have tx writes info Leader for TabletID 72057594037927938 is [0:0:0] sender: [1:150:2057] recipient: [1:148:2170] IGNORE Leader for TabletID 72057594037927938 is [0:0:0] sender: [1:150:2057] recipient: [1:148:2170] Leader for TabletID 72057594037927938 is [1:154:2174] sender: [1:155:2057] recipient: [1:148:2170] Leader for TabletID 72057594037927937 is [1:108:2139] sender: [1:181:2057] recipient: [1:14:2061] 2025-05-29T15:22:15.859237Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:2882: [PQ: 72057594037927937] server connected, pipe [1:180:2194], now have 1 active actors on pipe 2025-05-29T15:22:15.859254Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:1460: [PQ: 72057594037927937] Handle TEvPersQueue::TEvUpdateConfig 2025-05-29T15:22:15.860945Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:1646: [PQ: 72057594037927937] Config update version 1(current 0) received from actor [1:178:2192] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 ImportantClientId: "user1" LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 WriteSpeedInBytesPerSecond: 102400 BurstSize: 102400 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "rt3.dc1--asdfgs--topic" Version: 1 LocalDC: true Topic: "topic" TopicPath: "/Root/PQ/rt3.dc1--asdfgs--topic" Partitions { PartitionId: 0 } ReadRuleGenerations: 1 ReadRuleGenerations: 1 MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 1 Important: false } Consumers { Name: "user1" Generation: 1 Important: true } 2025-05-29T15:22:15.861430Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:590: [PQ: 72057594037927937] Apply new config CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 ImportantClientId: "user1" LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 WriteSpeedInBytesPerSecond: 102400 BurstSize: 102400 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "rt3.dc1--asdfgs--topic" Version: 1 LocalDC: true Topic: "topic" TopicPath: "/Root/PQ/rt3.dc1--asdfgs--topic" Partitions { PartitionId: 0 } ReadRuleGenerations: 1 ReadRuleGenerations: 1 MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 1 Important: false } Consumers { Name: "user1" Generation: 1 Important: true } 2025-05-29T15:22:15.861462Z node 1 :PERSQUEUE NOTICE: pq_impl.cpp:1099: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-05-29T15:22:15.861575Z node 1 :PERSQUEUE INFO: pq_impl.cpp:1487: [PQ: 72057594037927937] Config applied version 1 actor [1:178:2192] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 ImportantClientId: "user1" LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 WriteSpeedInBytesPerSecond: 102400 BurstSize: 102400 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "rt3.dc1--asdfgs--topic" Version: 1 LocalDC: true Topic: "topic" TopicPath: "/Root/PQ/rt3.dc1--asdfgs--topic" Partitions { PartitionId: 0 } ReadRuleGenerations: 1 ReadRuleGenerations: 1 MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 1 Important: false } Consumers { Name: "user1" Generation: 1 Important: true } 2025-05-29T15:22:15.861598Z node 1 :PERSQUEUE DEBUG: partition_init.cpp:75: [rt3.dc1--asdfgs--topic:0:Initializer] Start initializing step TInitConfigStep 2025-05-29T15:22:15.861654Z node 1 :PERSQUEUE DEBUG: partition_init.cpp:75: [rt3.dc1--asdfgs--topic:0:Initializer] Start initializing step TInitInternalFieldsStep 2025-05-29T15:22:15.861697Z node 1 :PERSQUEUE INFO: partition_init.cpp:880: [PQ: 72057594037927937, Partition: 0, State: StateInit] bootstrapping 0 [1:187:2199] 2025-05-29T15:22:15.862187Z node 1 :PERSQUEUE DEBUG: partition_init.cpp:55: [rt3.dc1--asdfgs--topic:0:Initializer] Initializing completed. 2025-05-29T15:22:15.862195Z node 1 :PERSQUEUE INFO: partition.cpp:560: [PQ: 72057594037927937, Partition: 0, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 0 generation 2 [1:187:2199] 2025-05-29T15:22:15.862200Z node 1 :PERSQUEUE DEBUG: partition.cpp:574: [PQ: 72057594037927937, Partition: 0, State: StateInit] SYNC INIT topic rt3.dc1--asdfgs--topic partitition 0 so 0 endOffset 0 Head Offset 0 PartNo 0 PackedSize 0 count 0 nextOffset 0 batches 0 2025-05-29T15:22:15.862525Z node 1 :PERSQUEUE DEBUG: partition.cpp:3850: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Process pending events. Count 0 2025-05-29T15:22:15.862534Z node 1 :PERSQUEUE DEBUG: partition.cpp:3155: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'rt3.dc1--asdfgs--topic' partition 0 user user reinit request with generation 1 2025-05-29T15:22:15.862538Z node 1 :PERSQUEUE DEBUG: partition.cpp:3224: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'rt3.dc1--asdfgs--topic' partition 0 user user reinit with generation 1 done 2025-05-29T15:22:15.862542Z node 1 :PERSQUEUE DEBUG: partition.cpp:3155: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'rt3.dc1--asdfgs--topic' partition 0 user user1 reinit request with generation 1 2025-05-29T15:22:15.862545Z node 1 :PERSQUEUE DEBUG: partition.cpp:3224: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'rt3.dc1--asdfgs--topic' partition 0 user user1 reinit with generation 1 done 2025-05-29T15:22:15.862562Z node 1 :PERSQUEUE DEBUG: partition.cpp:2185: [PQ: 72057594037927937, Partition: 0, State: StateIdle] === DumpKeyValueRequest === 2025-05-29T15:22:15.862567Z node 1 :PERSQUEUE DEBUG: partition.cpp:2186: [PQ: 72057594037927937, Partition: 0, State: StateIdle] --- delete ---------------- 2025-05-29T15:22:15.862570Z node 1 :PERSQUEUE DEBUG: partition.cpp:2194: [PQ: 72057594037927937, Partition: 0, State: StateIdle] --- write ----------------- 2025-05-29T15:22:15.862573Z node 1 :PERSQUEUE DEBUG: partition.cpp:2197: [PQ: 72057594037927937, Partition: 0, State: StateIdle] i0000000000 2025-05-29T15:22:15.862575Z node 1 :PERSQUEUE DEBUG: partition.cpp:2197: [PQ: 72057594037927937, Partition: 0, State: StateIdle] m0000000000cuser 2025-05-29T15:22:15.862577Z node 1 :PERSQUEUE DEBUG: partition.cpp:2197: [PQ: 72057594037927937, Partition: 0, State: StateIdle] m0000000000uuser 2025-05-29T15:22:15.862579Z node 1 :PERSQUEUE DEBUG: partition.cpp:2197: [PQ: 72057594037927937, Partition: 0, State: StateIdle] m0000000000cuser1 2025-05-29T15:22:15.862582Z node 1 :PERSQUEUE DEBUG: partition.cpp:2197: [PQ: 72057594037927937, Partition: 0, State: StateIdle] m0000000000uuser1 2025-05-29T15:22:15.862584Z node 1 :PERSQUEUE DEBUG: partition.cpp:2199: [PQ: 72057594037927937, Partition: 0, State: StateIdle] --- rename ---------------- 2025-05-29T15:22:15.862586Z node 1 :PERSQUEUE DEBUG: partition.cpp:2204: [PQ: 72057594037927937, Partition: 0, State: StateIdle] =========================== 2025-05-29T15:22:15.862600Z node 1 :PERSQUEUE DEBUG: partition_read.cpp:779: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'rt3.dc1--asdfgs--topic' partition 0 user user readTimeStamp for offset 0 initiated queuesize 0 startOffset 0 ReadingTimestamp 0 rrg 0 2025-05-29T15:22:15.862603Z node 1 :PERSQUEUE DEBUG: partition_read.cpp:779: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'rt3.dc1--asdfgs--topic' partition 0 user user1 readTimeStamp for offset 0 initiated queuesize 0 startOffset 0 ReadingTimestamp 0 rrg 0 2025-05-29T15:22:15.862630Z node 1 :PERSQUEUE DEBUG: read.h:262: CacheProxy. Passthrough write request to KV 2025-05-29T15:22:15.863147Z node 1 :PERSQUEUE DEBUG: partition_write.cpp:524: [PQ: 72057594037927937, Partition: 0, State: StateIdle] TPartition::HandleWriteResponse writeNewSize# 0 WriteNewSizeFromSupportivePartitions# 0 2025-05-29T15:22:15.863217Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:2882: [PQ: 72057594037927937] server connected, pipe [1:194:2204], now have 1 active actors on pipe 2025-05-29T15:22:15.866053Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:2882: [PQ: 72057594037927937] server connected, pipe [1:197:2206], now have 1 active actors on pipe 2025-05-29T15:22:15.866071Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:347: Handle TEvRequest topic: 'rt3.dc1--asdfgs--topic' requestId: 2025-05-29T15:22:15.866075Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:2796: [PQ: 72057594037927937] got client message batch for topic 'rt3.dc1--asdfgs--topic' partition 0 2025-05-29T15:22:15.866120Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:2198: [PQ: 72057594037927937] got client message topic: rt3.dc1--asdfgs--topic partition: 0 SourceId: 'sourceid0' SeqNo: 1 partNo : 0 messageNo: 0 size 102400 offset: 0 2025-05-29T15:22:15.866124Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:2198: [PQ: 72057594037927937] got client message topic: rt3.dc1--asdfgs--topic partition: 0 SourceId: 'sourceid0' SeqNo: 2 partNo : 0 messageNo: 0 size 102400 offset: 1 2025-05-29T15:22:15.866127Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:2198: [PQ: 72057594037927937] got client message topic: rt3.dc1--asdfgs--topic partition: 0 SourceId: 'sourceid0' SeqNo: 3 partNo : 0 messageNo: 0 size 102400 offset: 2 2025-05-29T15:22:15.866130Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:2198: [PQ: 72057594037927937] got client message topic: rt3.dc1--asdfgs--topic partition: 0 SourceId: 'sourceid0' SeqNo: 4 partNo : 0 messageNo: 0 size 102400 offset: 3 2025-05-29T15:22:15.866134Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:2198: [PQ: 72057594037927937] got client message topic: rt3.dc1--asdfgs--topic partition: 0 SourceId: 'sourceid0' SeqNo: 5 partNo : 0 messageNo: 0 size 102400 offset: 4 2025-05-29T15:22:15.866137Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:2198: [PQ: 72057594037927937] got client message topic: rt3.dc1--asdfgs--topic partition: 0 SourceId: 'sourceid0' SeqNo: 6 partNo : 0 messageNo: 0 size 102400 offset: 5 2025-05-29T15:22:15.866140Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:2198: [PQ: 72057594037927937] got client message topic: rt3.dc1--asdfgs--topic partition: 0 SourceId: 'sourceid0' SeqNo: 7 partNo : 0 messageNo: 0 size 102400 offset: 6 2025-05-29T15:22:15.866143Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:2198: [PQ: 72057594037927937] got client message topic: rt3.dc1--asdfgs--topic partition: 0 SourceId: 'sourceid0' SeqNo: 8 partNo : 0 messageNo: 0 size 102400 offset: 7 2025-05-29T15:22:15.866146Z node 1 :PER ... nit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 7 generation 2 [84:236:2235] 2025-05-29T15:23:21.206603Z node 84 :PERSQUEUE INFO: partition_init.cpp:880: [PQ: 72057594037927937, Partition: 8, State: StateInit] bootstrapping 8 [84:237:2236] 2025-05-29T15:23:21.207115Z node 84 :PERSQUEUE INFO: partition.cpp:560: [PQ: 72057594037927937, Partition: 8, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 8 generation 2 [84:237:2236] 2025-05-29T15:23:21.207829Z node 84 :PERSQUEUE INFO: partition_init.cpp:880: [PQ: 72057594037927937, Partition: 9, State: StateInit] bootstrapping 9 [84:238:2237] 2025-05-29T15:23:21.208310Z node 84 :PERSQUEUE INFO: partition.cpp:560: [PQ: 72057594037927937, Partition: 9, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 9 generation 2 [84:238:2237] 2025-05-29T15:23:21.216068Z node 84 :PERSQUEUE INFO: pq_impl.cpp:1487: [PQ: 72057594037927937] Config applied version 139 actor [84:178:2192] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 5 MaxSizeInPartition: 1048576 LifetimeSeconds: 86400 ImportantClientId: "bbb" ImportantClientId: "ccc" LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 PartitionIds: 1 PartitionIds: 2 PartitionIds: 3 PartitionIds: 4 PartitionIds: 5 PartitionIds: 6 PartitionIds: 7 PartitionIds: 8 PartitionIds: 9 TopicName: "rt3.dc1--asdfgs--topic" Version: 139 LocalDC: true Topic: "topic" TopicPath: "/Root/PQ/rt3.dc1--asdfgs--topic" Partitions { PartitionId: 0 } Partitions { PartitionId: 1 } Partitions { PartitionId: 2 } Partitions { PartitionId: 3 } Partitions { PartitionId: 4 } Partitions { PartitionId: 5 } Partitions { PartitionId: 6 } Partitions { PartitionId: 7 } Partitions { PartitionId: 8 } Partitions { PartitionId: 9 } ReadRuleGenerations: 138 ReadRuleGenerations: 139 ReadRuleGenerations: 139 MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } AllPartitions { PartitionId: 1 } AllPartitions { PartitionId: 2 } AllPartitions { PartitionId: 3 } AllPartitions { PartitionId: 4 } AllPartitions { PartitionId: 5 } AllPartitions { PartitionId: 6 } AllPartitions { PartitionId: 7 } AllPartitions { PartitionId: 8 } AllPartitions { PartitionId: 9 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 138 Important: false } Consumers { Name: "bbb" Generation: 139 Important: true } Consumers { Name: "ccc" Generation: 139 Important: true } 2025-05-29T15:23:21.216383Z node 84 :PERSQUEUE INFO: ownerinfo.cpp:30: new Cookie default|70080e3c-ccc7a3a7-3d6c8449-bd5667ce_1 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default 2025-05-29T15:23:21.217019Z node 84 :PERSQUEUE INFO: ownerinfo.cpp:30: new Cookie default|199e84cd-6620645e-209477b7-e7b6d49f_0 generated for partition 1 topic 'rt3.dc1--asdfgs--topic' owner default 2025-05-29T15:23:21.217690Z node 84 :PERSQUEUE INFO: ownerinfo.cpp:30: new Cookie default|27ef107f-1625116f-4fda2104-2c28a263_0 generated for partition 9 topic 'rt3.dc1--asdfgs--topic' owner default Leader for TabletID 72057594037927937 is [0:0:0] sender: [85:104:2057] recipient: [85:102:2135] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [85:104:2057] recipient: [85:102:2135] Leader for TabletID 72057594037927937 is [85:108:2139] sender: [85:109:2057] recipient: [85:102:2135] 2025-05-29T15:23:21.424617Z node 85 :PERSQUEUE NOTICE: pq_impl.cpp:1099: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-05-29T15:23:21.424645Z node 85 :PERSQUEUE INFO: pq_impl.cpp:800: [PQ: 72057594037927937] doesn't have tx writes info Leader for TabletID 72057594037927938 is [0:0:0] sender: [85:150:2057] recipient: [85:148:2170] IGNORE Leader for TabletID 72057594037927938 is [0:0:0] sender: [85:150:2057] recipient: [85:148:2170] Leader for TabletID 72057594037927938 is [85:154:2174] sender: [85:155:2057] recipient: [85:148:2170] Leader for TabletID 72057594037927937 is [85:108:2139] sender: [85:180:2057] recipient: [85:14:2061] 2025-05-29T15:23:21.429130Z node 85 :PERSQUEUE NOTICE: pq_impl.cpp:1099: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-05-29T15:23:21.429471Z node 85 :PERSQUEUE INFO: pq_impl.cpp:1487: [PQ: 72057594037927937] Config applied version 140 actor [85:178:2192] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 100 MaxSizeInPartition: 104857600 LifetimeSeconds: 172800 ImportantClientId: "aaa" LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 PartitionIds: 1 PartitionIds: 2 PartitionIds: 3 PartitionIds: 4 TopicName: "rt3.dc1--asdfgs--topic" Version: 140 LocalDC: true Topic: "topic" TopicPath: "/Root/PQ/rt3.dc1--asdfgs--topic" Partitions { PartitionId: 0 } Partitions { PartitionId: 1 } Partitions { PartitionId: 2 } Partitions { PartitionId: 3 } Partitions { PartitionId: 4 } ReadRuleGenerations: 140 ReadRuleGenerations: 140 MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } AllPartitions { PartitionId: 1 } AllPartitions { PartitionId: 2 } AllPartitions { PartitionId: 3 } AllPartitions { PartitionId: 4 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 140 Important: false } Consumers { Name: "aaa" Generation: 140 Important: true } 2025-05-29T15:23:21.429671Z node 85 :PERSQUEUE INFO: partition_init.cpp:880: [PQ: 72057594037927937, Partition: 0, State: StateInit] bootstrapping 0 [85:186:2198] 2025-05-29T15:23:21.430393Z node 85 :PERSQUEUE INFO: partition.cpp:560: [PQ: 72057594037927937, Partition: 0, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 0 generation 2 [85:186:2198] 2025-05-29T15:23:21.431079Z node 85 :PERSQUEUE INFO: partition_init.cpp:880: [PQ: 72057594037927937, Partition: 1, State: StateInit] bootstrapping 1 [85:187:2199] 2025-05-29T15:23:21.431626Z node 85 :PERSQUEUE INFO: partition.cpp:560: [PQ: 72057594037927937, Partition: 1, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 1 generation 2 [85:187:2199] 2025-05-29T15:23:21.432182Z node 85 :PERSQUEUE INFO: partition_init.cpp:880: [PQ: 72057594037927937, Partition: 2, State: StateInit] bootstrapping 2 [85:188:2200] 2025-05-29T15:23:21.432733Z node 85 :PERSQUEUE INFO: partition.cpp:560: [PQ: 72057594037927937, Partition: 2, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 2 generation 2 [85:188:2200] 2025-05-29T15:23:21.433261Z node 85 :PERSQUEUE INFO: partition_init.cpp:880: [PQ: 72057594037927937, Partition: 3, State: StateInit] bootstrapping 3 [85:189:2201] 2025-05-29T15:23:21.433796Z node 85 :PERSQUEUE INFO: partition.cpp:560: [PQ: 72057594037927937, Partition: 3, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 3 generation 2 [85:189:2201] 2025-05-29T15:23:21.434310Z node 85 :PERSQUEUE INFO: partition_init.cpp:880: [PQ: 72057594037927937, Partition: 4, State: StateInit] bootstrapping 4 [85:190:2202] 2025-05-29T15:23:21.434858Z node 85 :PERSQUEUE INFO: partition.cpp:560: [PQ: 72057594037927937, Partition: 4, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 4 generation 2 [85:190:2202] 2025-05-29T15:23:21.437462Z node 85 :PERSQUEUE INFO: ownerinfo.cpp:30: new Cookie default|816ec3a3-9d0170cf-1ecd9fe8-8bfd3010_0 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default 2025-05-29T15:23:21.439373Z node 85 :PERSQUEUE NOTICE: pq_impl.cpp:1099: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-05-29T15:23:21.440153Z node 85 :PERSQUEUE INFO: partition_init.cpp:880: [PQ: 72057594037927937, Partition: 5, State: StateInit] bootstrapping 5 [85:234:2233] 2025-05-29T15:23:21.440706Z node 85 :PERSQUEUE INFO: partition.cpp:560: [PQ: 72057594037927937, Partition: 5, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 5 generation 2 [85:234:2233] 2025-05-29T15:23:21.441763Z node 85 :PERSQUEUE INFO: partition_init.cpp:880: [PQ: 72057594037927937, Partition: 6, State: StateInit] bootstrapping 6 [85:235:2234] 2025-05-29T15:23:21.442332Z node 85 :PERSQUEUE INFO: partition.cpp:560: [PQ: 72057594037927937, Partition: 6, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 6 generation 2 [85:235:2234] 2025-05-29T15:23:21.443166Z node 85 :PERSQUEUE INFO: partition_init.cpp:880: [PQ: 72057594037927937, Partition: 7, State: StateInit] bootstrapping 7 [85:236:2235] 2025-05-29T15:23:21.443678Z node 85 :PERSQUEUE INFO: partition.cpp:560: [PQ: 72057594037927937, Partition: 7, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 7 generation 2 [85:236:2235] 2025-05-29T15:23:21.444660Z node 85 :PERSQUEUE INFO: partition_init.cpp:880: [PQ: 72057594037927937, Partition: 8, State: StateInit] bootstrapping 8 [85:237:2236] 2025-05-29T15:23:21.445172Z node 85 :PERSQUEUE INFO: partition.cpp:560: [PQ: 72057594037927937, Partition: 8, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 8 generation 2 [85:237:2236] 2025-05-29T15:23:21.445938Z node 85 :PERSQUEUE INFO: partition_init.cpp:880: [PQ: 72057594037927937, Partition: 9, State: StateInit] bootstrapping 9 [85:238:2237] 2025-05-29T15:23:21.446440Z node 85 :PERSQUEUE INFO: partition.cpp:560: [PQ: 72057594037927937, Partition: 9, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 9 generation 2 [85:238:2237] 2025-05-29T15:23:21.452788Z node 85 :PERSQUEUE INFO: pq_impl.cpp:1487: [PQ: 72057594037927937] Config applied version 141 actor [85:178:2192] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 5 MaxSizeInPartition: 1048576 LifetimeSeconds: 86400 ImportantClientId: "bbb" ImportantClientId: "ccc" LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 PartitionIds: 1 PartitionIds: 2 PartitionIds: 3 PartitionIds: 4 PartitionIds: 5 PartitionIds: 6 PartitionIds: 7 PartitionIds: 8 PartitionIds: 9 TopicName: "rt3.dc1--asdfgs--topic" Version: 141 LocalDC: true Topic: "topic" TopicPath: "/Root/PQ/rt3.dc1--asdfgs--topic" Partitions { PartitionId: 0 } Partitions { PartitionId: 1 } Partitions { PartitionId: 2 } Partitions { PartitionId: 3 } Partitions { PartitionId: 4 } Partitions { PartitionId: 5 } Partitions { PartitionId: 6 } Partitions { PartitionId: 7 } Partitions { PartitionId: 8 } Partitions { PartitionId: 9 } ReadRuleGenerations: 140 ReadRuleGenerations: 141 ReadRuleGenerations: 141 MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } AllPartitions { PartitionId: 1 } AllPartitions { PartitionId: 2 } AllPartitions { PartitionId: 3 } AllPartitions { PartitionId: 4 } AllPartitions { PartitionId: 5 } AllPartitions { PartitionId: 6 } AllPartitions { PartitionId: 7 } AllPartitions { PartitionId: 8 } AllPartitions { PartitionId: 9 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 140 Important: false } Consumers { Name: "bbb" Generation: 141 Important: true } Consumers { Name: "ccc" Generation: 141 Important: true } 2025-05-29T15:23:21.453932Z node 85 :PERSQUEUE INFO: ownerinfo.cpp:30: new Cookie default|1586c7b5-339c3b65-f8c440e3-e5c20571_1 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default 2025-05-29T15:23:21.454839Z node 85 :PERSQUEUE INFO: ownerinfo.cpp:30: new Cookie default|70266594-85985151-fe4b03cf-df1ab286_0 generated for partition 1 topic 'rt3.dc1--asdfgs--topic' owner default 2025-05-29T15:23:21.455712Z node 85 :PERSQUEUE INFO: ownerinfo.cpp:30: new Cookie default|5485ce22-a73504d3-9282ddeb-20363f6a_0 generated for partition 9 topic 'rt3.dc1--asdfgs--topic' owner default >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-ModifyUser-46 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-ModifyUser-47 >> TFlatTest::RejectByPerShardReadSize >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-DropUser-52 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-DropUser-53 >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-ModifyUser-28 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-ModifyUser-29 >> TFlatTest::AutoSplitBySize >> TFlatTest::SelectRangeReverse >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-CreateUser-5 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-CreateUser-6 >> TFlatTest::WriteSplitByPartialKeyAndRead [GOOD] >> TFlatTest::WriteSplitAndReadFromFollower |63.5%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/public/sdk/cpp/src/client/topic/ut/with_direct_read_ut/src-client-topic-ut-with_direct_read_ut |63.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/public/sdk/cpp/src/client/topic/ut/with_direct_read_ut/src-client-topic-ut-with_direct_read_ut |63.5%| [LD] {RESULT} $(B)/ydb/public/sdk/cpp/src/client/topic/ut/with_direct_read_ut/src-client-topic-ut-with_direct_read_ut >> TStoragePoolsQuotasTest::DifferentQuotasInteraction-IsExternalSubdomain [GOOD] >> TObjectStorageListingTest::CornerCases >> TPQTest::TestPartitionWriteQuota [GOOD] >> TPQTest::TestPartitionedBlobFails >> TObjectStorageListingTest::Listing [GOOD] >> TObjectStorageListingTest::ManyDeletes >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-CreateUser-12 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-CreateUser-13 >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-DropUser-52 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-DropUser-53 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/client/ut/unittest >> TFlatTest::ShardFreezeUnfreezeRejectScheme [GOOD] Test command err: 2025-05-29T15:23:21.067336Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509888621001254461:2064];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:23:21.067354Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/0027be/r3tmp/tmpjfN4c6/pdisk_1.dat 2025-05-29T15:23:21.127694Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:23:21.128289Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [1:7509888621001254439:2079] 1748532201067182 != 1748532201067185 TClient is connected to server localhost:22359 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-05-29T15:23:21.169648Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:23:21.169676Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:23:21.170668Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-29T15:23:21.197431Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:23:21.200807Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:23:21.263710Z node 1 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [1:7509888621001255140:2358] txid# 281474976715659, issues: { message: "Requested freeze state already set" severity: 1 } Error 1: Requested freeze state already set 2025-05-29T15:23:21.527486Z node 2 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7509888619733935522:2062];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:23:21.527529Z node 2 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/0027be/r3tmp/tmpRGwhxZ/pdisk_1.dat 2025-05-29T15:23:21.537282Z node 2 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:23:21.537473Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [2:7509888619733935502:2079] 1748532201527370 != 1748532201527373 TClient is connected to server localhost:13092 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-05-29T15:23:21.630949Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:23:21.630979Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:23:21.631265Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:23:21.631975Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-29T15:23:21.632153Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-05-29T15:23:21.633299Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:23:21.642905Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:23:21.648183Z node 2 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [2:7509888619733936238:2389] txid# 281474976715660, issues: { message: "Table is frozen. Only unfreeze alter is allowed" severity: 1 } Error 128: Table is frozen. Only unfreeze alter is allowed 2025-05-29T15:23:21.648665Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:23:21.655261Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 waiting... ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TStoragePoolsQuotasTest::DifferentQuotasInteraction-IsExternalSubdomain [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:116:2058] recipient: [1:111:2141] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:116:2058] recipient: [1:111:2141] Leader for TabletID 72057594046678944 is [1:128:2152] sender: [1:132:2058] recipient: [1:111:2141] 2025-05-29T15:23:19.295902Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7488: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-05-29T15:23:19.295939Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7516: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:23:19.295944Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7402: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2025-05-29T15:23:19.295950Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7418: OperationsProcessing config: using default configuration 2025-05-29T15:23:19.295961Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-05-29T15:23:19.295965Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-05-29T15:23:19.295975Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7548: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:23:19.295990Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:39: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-05-29T15:23:19.296106Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7619: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-05-29T15:23:19.296181Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-05-29T15:23:19.314955Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7309: Cannot subscribe to console configs 2025-05-29T15:23:19.314985Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:23:19.317852Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-05-29T15:23:19.317985Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-05-29T15:23:19.318041Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-05-29T15:23:19.319883Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-05-29T15:23:19.320051Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:445: Clear TempDirsState with owners number: 0 2025-05-29T15:23:19.320172Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1348: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-05-29T15:23:19.320239Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:32: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-05-29T15:23:19.320874Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:157: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:23:19.320915Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:84: [RootDataErasureManager] Stop 2025-05-29T15:23:19.321176Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:23:19.321185Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:23:19.321204Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-05-29T15:23:19.321210Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-29T15:23:19.321215Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:202: TTxServerlessStorageBilling.Complete 2025-05-29T15:23:19.321241Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6671: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-05-29T15:23:19.322355Z node 1 :HIVE INFO: tablet_helpers.cpp:1130: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:128:2152] sender: [1:242:2058] recipient: [1:15:2062] 2025-05-29T15:23:19.343827Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:372: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-05-29T15:23:19.343920Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:23:19.344006Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-05-29T15:23:19.344066Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:130: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-05-29T15:23:19.344078Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:23:19.344936Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:451: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-05-29T15:23:19.344967Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-05-29T15:23:19.345022Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:23:19.345033Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:313: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-05-29T15:23:19.345040Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:367: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-05-29T15:23:19.345056Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 2 -> 3 2025-05-29T15:23:19.345511Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:23:19.345523Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-05-29T15:23:19.345540Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 3 -> 128 2025-05-29T15:23:19.345965Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:23:19.345977Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:23:19.345984Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:23:19.345991Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1650: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-05-29T15:23:19.346703Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1719: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-05-29T15:23:19.347153Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:652: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-05-29T15:23:19.347200Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1751: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-05-29T15:23:19.347402Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:676: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-05-29T15:23:19.347428Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:680: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 124 RawX2: 4294969445 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-05-29T15:23:19.347436Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:23:19.347496Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 128 -> 240 2025-05-29T15:23:19.347504Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:23:19.347535Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-05-29T15:23:19.347549Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:402: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-05-29T15:23:19.347963Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:23:19.347973Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-29T15:23:19.348020Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29T1 ... eason publish path for pathId [OwnerId: 72075186233409546, LocalPathId: 1] was 4 2025-05-29T15:23:22.063140Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72075186233409546, LocalPathId: 2] was 3 2025-05-29T15:23:22.063605Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:647: TTxOperationReply complete, operationId: 104:0, at schemeshard: 72075186233409546 2025-05-29T15:23:22.063877Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:647: TTxOperationReply complete, operationId: 104:0, at schemeshard: 72075186233409546 2025-05-29T15:23:22.063917Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72075186233409546 2025-05-29T15:23:22.063923Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72075186233409546, txId: 104, path id: [OwnerId: 72075186233409546, LocalPathId: 1] 2025-05-29T15:23:22.063961Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72075186233409546, txId: 104, path id: [OwnerId: 72075186233409546, LocalPathId: 2] 2025-05-29T15:23:22.063988Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72075186233409546 2025-05-29T15:23:22.063994Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:450:2401], at schemeshard: 72075186233409546, txId: 104, path id: 1 2025-05-29T15:23:22.064000Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:450:2401], at schemeshard: 72075186233409546, txId: 104, path id: 2 2025-05-29T15:23:22.064061Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 104:0, at schemeshard: 72075186233409546 2025-05-29T15:23:22.064067Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:1036: NTableState::TProposedWaitParts operationId# 104:0 ProgressState at tablet: 72075186233409546 2025-05-29T15:23:22.064084Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:674: all shard schema changes has been received, operationId: 104:0, at schemeshard: 72075186233409546 2025-05-29T15:23:22.064091Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:686: send schema changes ack message, operation: 104:0, datashard: 72075186233409549, at schemeshard: 72075186233409546 2025-05-29T15:23:22.064097Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 104:0 129 -> 240 2025-05-29T15:23:22.064253Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:5834: Handle TEvUpdateAck, at schemeshard: 72075186233409546, msg: Owner: 72075186233409546 Generation: 2 LocalPathId: 1 Version: 9 PathOwnerId: 72075186233409546, cookie: 104 2025-05-29T15:23:22.064265Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72075186233409546, msg: Owner: 72075186233409546 Generation: 2 LocalPathId: 1 Version: 9 PathOwnerId: 72075186233409546, cookie: 104 2025-05-29T15:23:22.064270Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72075186233409546, txId: 104 2025-05-29T15:23:22.064275Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72075186233409546, txId: 104, pathId: [OwnerId: 72075186233409546, LocalPathId: 1], version: 9 2025-05-29T15:23:22.064280Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72075186233409546, LocalPathId: 1] was 5 2025-05-29T15:23:22.064410Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:5834: Handle TEvUpdateAck, at schemeshard: 72075186233409546, msg: Owner: 72075186233409546 Generation: 2 LocalPathId: 2 Version: 18446744073709551615 PathOwnerId: 72075186233409546, cookie: 104 2025-05-29T15:23:22.064421Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72075186233409546, msg: Owner: 72075186233409546 Generation: 2 LocalPathId: 2 Version: 18446744073709551615 PathOwnerId: 72075186233409546, cookie: 104 2025-05-29T15:23:22.064425Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72075186233409546, txId: 104 2025-05-29T15:23:22.064430Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72075186233409546, txId: 104, pathId: [OwnerId: 72075186233409546, LocalPathId: 2], version: 18446744073709551615 2025-05-29T15:23:22.064434Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72075186233409546, LocalPathId: 2] was 4 2025-05-29T15:23:22.064443Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1606: TOperation IsReadyToNotify, TxId: 104, ready parts: 0/1, is published: true 2025-05-29T15:23:22.064976Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 104:0, at schemeshard: 72075186233409546 2025-05-29T15:23:22.064986Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_drop_table.cpp:414: TDropTable TProposedDeletePart operationId: 104:0 ProgressState, at schemeshard: 72075186233409546 2025-05-29T15:23:22.065070Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove table for pathId [OwnerId: 72075186233409546, LocalPathId: 2] was 3 2025-05-29T15:23:22.065108Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:906: Part operation is done id#104:0 progress is 1/1 2025-05-29T15:23:22.065113Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 104 ready parts: 1/1 2025-05-29T15:23:22.065118Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:906: Part operation is done id#104:0 progress is 1/1 2025-05-29T15:23:22.065125Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 104 ready parts: 1/1 2025-05-29T15:23:22.065129Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1606: TOperation IsReadyToNotify, TxId: 104, ready parts: 1/1, is published: true 2025-05-29T15:23:22.065141Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1629: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:551:2489] message: TxId: 104 2025-05-29T15:23:22.065147Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 104 ready parts: 1/1 2025-05-29T15:23:22.065152Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:973: Operation and all the parts is done, operation id: 104:0 2025-05-29T15:23:22.065157Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5174: RemoveTx for txid 104:0 2025-05-29T15:23:22.065175Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72075186233409546, LocalPathId: 2] was 2 2025-05-29T15:23:22.065237Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72075186233409546 2025-05-29T15:23:22.065243Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72075186233409546, txId: 0, path id: [OwnerId: 72075186233409546, LocalPathId: 1] 2025-05-29T15:23:22.065647Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72075186233409546, cookie: 104 2025-05-29T15:23:22.065800Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72075186233409546, cookie: 104 2025-05-29T15:23:22.065878Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72075186233409546 2025-05-29T15:23:22.065884Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:450:2401], at schemeshard: 72075186233409546, txId: 0, path id: 1 2025-05-29T15:23:22.065899Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:227: tests -- TTxNotificationSubscriber for txId 104: got EvNotifyTxCompletionResult 2025-05-29T15:23:22.065904Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:236: tests -- TTxNotificationSubscriber for txId 104: satisfy waiter [1:814:2730] 2025-05-29T15:23:22.066052Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:5834: Handle TEvUpdateAck, at schemeshard: 72075186233409546, msg: Owner: 72075186233409546 Generation: 2 LocalPathId: 1 Version: 10 PathOwnerId: 72075186233409546, cookie: 0 TestWaitNotification: OK eventTxId 104 2025-05-29T15:23:22.066293Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/SomeDatabase" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72075186233409546 2025-05-29T15:23:22.066318Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72075186233409546 describe path "/MyRoot/SomeDatabase" took 31us result status StatusSuccess 2025-05-29T15:23:22.066384Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/SomeDatabase" PathDescription { Self { Name: "MyRoot/SomeDatabase" PathId: 1 SchemeshardId: 72075186233409546 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 10 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 10 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 5 SubDomainVersion: 2 SubDomainStateVersion: 2 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 2 ProcessingParams { Version: 2 PlanResolution: 50 Coordinators: 72075186233409547 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409548 SchemeShard: 72075186233409546 } DomainKey { SchemeShard: 72057594046678944 PathId: 2 } StoragePools { Name: "fast" Kind: "fast_kind" } StoragePools { Name: "large" Kind: "large_kind" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 4 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 2 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } StoragePoolsUsage { PoolKind: "large_kind" TotalSize: 0 DataSize: 0 IndexSize: 0 } StoragePoolsUsage { PoolKind: "fast_kind" TotalSize: 0 DataSize: 0 IndexSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 DatabaseQuotas { data_size_hard_quota: 2800 data_size_soft_quota: 2200 storage_quotas { unit_kind: "fast_kind" data_size_hard_quota: 600 data_size_soft_quota: 500 } storage_quotas { unit_kind: "large_kind" data_size_hard_quota: 2200 data_size_soft_quota: 1700 } } SecurityState { Audience: "/MyRoot/SomeDatabase" } } } PathId: 1 PathOwnerId: 72075186233409546, at schemeshard: 72075186233409546 >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-CreateUser-18 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-ModifyUser-35 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-ModifyUser-36 >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-CreateUser-19 >> TFlatTest::SelectRangeReverse [GOOD] >> TFlatTest::SelectRangeReverseExcludeKeys >> TObjectStorageListingTest::CornerCases [GOOD] >> TObjectStorageListingTest::Decimal >> TLocksTest::Range_IncorrectNullDot1 [GOOD] >> TLocksTest::Range_IncorrectNullDot2 >> TFlatTest::WriteSplitAndReadFromFollower [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-ModifyUser-36 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-DropUser-55 >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-ModifyUser-47 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-ModifyUser-48 >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-DropUser-53 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-DropUser-54 >> TFlatTest::SelectRangeReverseExcludeKeys [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-ModifyUser-29 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-ModifyUser-30 >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-ModifyUser-41 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-ModifyUser-42 >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-CreateUser-6 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-CreateUser-7 >> TObjectStorageListingTest::Decimal [GOOD] |63.5%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/datashard/ut_upload_rows/ydb-core-tx-datashard-ut_upload_rows |63.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_upload_rows/ydb-core-tx-datashard-ut_upload_rows |63.5%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_upload_rows/ydb-core-tx-datashard-ut_upload_rows >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-CreateUser-13 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-CreateUser-14 >> TFlatTest::WriteMergeAndRead >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-CreateUser-19 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-ModifyUser-36 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-CreateUser-20 >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-ModifyUser-37 >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-DropUser-53 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-DropUser-54 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/client/ut/unittest >> TFlatTest::WriteSplitAndReadFromFollower [GOOD] Test command err: 2025-05-29T15:23:21.764792Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509888619794262138:2063];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:23:21.764830Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/0027aa/r3tmp/tmpA16yIy/pdisk_1.dat 2025-05-29T15:23:21.829028Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [1:7509888619794262117:2079] 1748532201764664 != 1748532201764667 2025-05-29T15:23:21.830619Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded TClient is connected to server localhost:22260 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-05-29T15:23:21.866814Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:23:21.866844Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:23:21.867998Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-29T15:23:21.904687Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:23:21.907900Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:23:21.945325Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... TClient::Ls request: /dc-1/Dir/TableOld TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "TableOld" PathId: 3 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976715659 CreateStep: 1748532202046 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "TableOld" Columns { Name: "Key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "Key2" Type: "Uint32" TypeId: 2 Id: 2 NotNull: false IsBuildInProgress: false } Columns { Name: "Va... (TRUNCATED) 2025-05-29T15:23:22.027311Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037888 2025-05-29T15:23:22.027571Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 72075186224037888 2025-05-29T15:23:22.027578Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-05-29T15:23:22.037687Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:372: TTxOperationPropose Execute, message: Transaction { OperationType: ESchemeOpSplitMergeTablePartitions SplitMergeTablePartitions { TablePath: "/dc-1/Dir/TableOld" SourceTabletId: 72075186224037888 SplitBoundary { KeyPrefix { Tuple { Optional { Uint32: 100 } } } } } } TxId: 281474976715668 TabletId: 72057594046644480 PeerName: "" , at schemeshard: 72057594046644480 2025-05-29T15:23:22.037737Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_split_merge.cpp:798: TSplitMerge Propose, tableStr: /dc-1/Dir/TableOld, tableId: , opId: 281474976715668:0, at schemeshard: 72057594046644480, request: TablePath: "/dc-1/Dir/TableOld" SourceTabletId: 72075186224037888 SplitBoundary { KeyPrefix { Tuple { Optional { Uint32: 100 } } } } 2025-05-29T15:23:22.037816Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason new shard created for pathId [OwnerId: 72057594046644480, LocalPathId: 3] was 2 2025-05-29T15:23:22.037823Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason new shard created for pathId [OwnerId: 72057594046644480, LocalPathId: 3] was 3 2025-05-29T15:23:22.037859Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046644480, LocalPathId: 3] was 4 2025-05-29T15:23:22.037897Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_split_merge.cpp:1077: TSplitMerge Propose accepted, tableStr: /dc-1/Dir/TableOld, tableId: , opId: 281474976715668:0, at schemeshard: 72057594046644480, op: SourceRanges { KeyRangeBegin: "\002\000\000\000\000\200\000\000\000\200" KeyRangeEnd: "" TabletID: 72075186224037888 ShardIdx: 1 } DestinationRanges { KeyRangeBegin: "\002\000\000\000\000\200\000\000\000\200" KeyRangeEnd: "\002\000\004\000\000\000d\000\000\000\000\000\000\200" ShardIdx: 2 } DestinationRanges { KeyRangeBegin: "\002\000\004\000\000\000d\000\000\000\000\000\000\200" KeyRangeEnd: "" ShardIdx: 3 }, request: TablePath: "/dc-1/Dir/TableOld" SourceTabletId: 72075186224037888 SplitBoundary { KeyPrefix { Tuple { Optional { Uint32: 100 } } } } 2025-05-29T15:23:22.037906Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:130: IgniteOperation, opId: 281474976715668:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-05-29T15:23:22.038105Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:451: TTxOperationPropose Complete, txId: 281474976715668, response: Status: StatusAccepted TxId: 281474976715668 SchemeshardId: 72057594046644480, at schemeshard: 72057594046644480 2025-05-29T15:23:22.038120Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 281474976715668, subject: , status: StatusAccepted, operation: ALTER TABLE PARTITIONS, path: /dc-1/Dir/TableOld 2025-05-29T15:23:22.038144Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 281474976715668:0, at schemeshard: 72057594046644480 2025-05-29T15:23:22.038152Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:313: TCreateParts opId# 281474976715668:0 ProgressState, operation type: TxSplitTablePartition, at tablet# 72057594046644480 2025-05-29T15:23:22.038216Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:357: TCreateParts opId# 281474976715668:0 CreateRequest Event to Hive: 72057594037968897 msg: Owner: 72057594046644480 OwnerIdx: 2 TabletType: DataShard ObjectDomain { SchemeShard: 72057594046644480 PathId: 1 } ObjectId: 3 BindedChannels { StoragePoolName: "/dc-1:test" } BindedChannels { StoragePoolName: "/dc-1:test" } BindedChannels { StoragePoolName: "/dc-1:test" } AllowedDomains { SchemeShard: 72057594046644480 PathId: 1 } 2025-05-29T15:23:22.038234Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:357: TCreateParts opId# 281474976715668:0 CreateRequest Event to Hive: 72057594037968897 msg: Owner: 72057594046644480 OwnerIdx: 3 TabletType: DataShard ObjectDomain { SchemeShard: 72057594046644480 PathId: 1 } ObjectId: 3 BindedChannels { StoragePoolName: "/dc-1:test" } BindedChannels { StoragePoolName: "/dc-1:test" } BindedChannels { StoragePoolName: "/dc-1:test" } AllowedDomains { SchemeShard: 72057594046644480 PathId: 1 } 2025-05-29T15:23:22.038302Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:652: Send tablet strongly msg operationId: 281474976715668:0 from tablet: 72057594046644480 to tablet: 72057594037968897 cookie: 72057594046644480:2 msg type: 268697601 2025-05-29T15:23:22.038332Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:652: Send tablet strongly msg operationId: 281474976715668:0 from tablet: 72057594046644480 to tablet: 72057594037968897 cookie: 72057594046644480:3 msg type: 268697601 2025-05-29T15:23:22.038346Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1751: TOperation RegisterRelationByTabletId, TxId: 281474976715668, partId: 0, tablet: 72057594037968897 waiting... 2025-05-29T15:23:22.038352Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1779: TOperation RegisterRelationByShardIdx, TxId: 281474976715668, shardIdx: 72057594046644480:2, partId: 0 2025-05-29T15:23:22.038353Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1779: TOperation RegisterRelationByShardIdx, TxId: 281474976715668, shardIdx: 72057594046644480:3, partId: 0 2025-05-29T15:23:22.038574Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__notify.cpp:30: NotifyTxCompletion operation in-flight, txId: 281474976715668, at schemeshard: 72057594046644480 2025-05-29T15:23:22.038581Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1606: TOperation IsReadyToNotify, TxId: 281474976715668, ready parts: 0/1, is published: true 2025-05-29T15:23:22.038583Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__notify.cpp:131: NotifyTxCompletion transaction is registered, txId: 281474976715668, at schemeshard: 72057594046644480 2025-05-29T15:23:22.039303Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5867: Handle TEvCreateTabletReply at schemeshard: 72057594046644480 message: Status: OK Owner: 72057594046644480 OwnerIdx: 2 TabletID: 72075186224037889 Origin: 72057594037968897 2025-05-29T15:23:22.039314Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1793: TOperation FindRelatedPartByShardIdx, TxId: 281474976715668, shardIdx: 72057594046644480:2, partId: 0 2025-05-29T15:23:22.039335Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:619: TTxOperationReply execute, operationId: 281474976715668:0, at schemeshard: 72057594046644480, message: Status: OK Owner: 72057594046644480 OwnerIdx: 2 TabletID: 72075186224037889 Origin: 72057594037968897 2025-05-29T15:23:22.039339Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:175: TCreateParts opId# 281474976715668:0 HandleReply TEvCreateTabletReply, at tabletId: 72057594046644480 2025-05-29T15:23:22.039345Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_c ... tashard informs about state changing, datashardId: 72075186224037890, state: Offline, at schemeshard: 72057594046644480 2025-05-29T15:23:22.622651Z node 2 :TX_DATASHARD INFO: datashard.cpp:197: OnTabletStop: 72075186224037891 reason = ReasonStop 2025-05-29T15:23:22.622661Z node 2 :TX_DATASHARD INFO: datashard.cpp:197: OnTabletStop: 72075186224037889 reason = ReasonStop 2025-05-29T15:23:22.622681Z node 2 :TX_DATASHARD INFO: datashard.cpp:257: OnTabletDead: 72075186224037889 2025-05-29T15:23:22.622714Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5938: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 4 ShardOwnerId: 72057594046644480 ShardLocalIdx: 4, at schemeshard: 72057594046644480 2025-05-29T15:23:22.622730Z node 2 :HIVE WARN: hive_impl.cpp:491: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037891 not found 2025-05-29T15:23:22.622756Z node 2 :HIVE WARN: hive_impl.cpp:491: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037889 not found 2025-05-29T15:23:22.622814Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046644480, LocalPathId: 3] was 5 2025-05-29T15:23:22.622877Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5938: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 2 ShardOwnerId: 72057594046644480 ShardLocalIdx: 2, at schemeshard: 72057594046644480 2025-05-29T15:23:22.622902Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046644480, LocalPathId: 3] was 4 2025-05-29T15:23:22.622933Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:46: Free shard 72057594046644480:1 hive 72057594037968897 at ss 72057594046644480 2025-05-29T15:23:22.622951Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:46: Free shard 72057594046644480:1 hive 72057594037968897 at ss 72057594046644480 2025-05-29T15:23:22.623014Z node 2 :HIVE WARN: hive_impl.cpp:491: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037889 not found 2025-05-29T15:23:22.623017Z node 2 :HIVE WARN: hive_impl.cpp:491: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037891 not found 2025-05-29T15:23:22.623039Z node 2 :TX_DATASHARD INFO: datashard.cpp:257: OnTabletDead: 72075186224037891 2025-05-29T15:23:22.623265Z node 2 :HIVE WARN: hive_impl.cpp:491: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037889 not found 2025-05-29T15:23:22.623269Z node 2 :HIVE WARN: hive_impl.cpp:491: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037891 not found 2025-05-29T15:23:22.623351Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:2962: Handle TEvStateChangedResult datashard 72075186224037888 state Offline 2025-05-29T15:23:22.623364Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:2962: Handle TEvStateChangedResult datashard 72075186224037888 state Offline 2025-05-29T15:23:22.623538Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:46: Free shard 72057594046644480:5 hive 72057594037968897 at ss 72057594046644480 2025-05-29T15:23:22.623571Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:46: Free shard 72057594046644480:3 hive 72057594037968897 at ss 72057594046644480 2025-05-29T15:23:22.623583Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:174: Deleted shardIdx 72057594046644480:4 2025-05-29T15:23:22.623590Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:180: Close pipe to deleted shardIdx 72057594046644480:4 tabletId 72075186224037891 2025-05-29T15:23:22.623598Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:174: Deleted shardIdx 72057594046644480:2 2025-05-29T15:23:22.623599Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:180: Close pipe to deleted shardIdx 72057594046644480:2 tabletId 72075186224037889 2025-05-29T15:23:22.623640Z node 2 :TX_DATASHARD INFO: datashard.cpp:257: OnTabletDead: 72075186224037889 2025-05-29T15:23:22.623701Z node 2 :TX_DATASHARD INFO: datashard.cpp:1301: Change sender killed: at tablet: 72075186224037889 2025-05-29T15:23:22.623468Z node 3 :TX_DATASHARD INFO: datashard.cpp:257: OnTabletDead: 72075186224037889 2025-05-29T15:23:22.623745Z node 3 :TX_DATASHARD INFO: datashard.cpp:257: OnTabletDead: 72075186224037891 2025-05-29T15:23:22.624161Z node 2 :TX_DATASHARD INFO: datashard.cpp:257: OnTabletDead: 72075186224037891 2025-05-29T15:23:22.624180Z node 2 :TX_DATASHARD INFO: datashard.cpp:1301: Change sender killed: at tablet: 72075186224037891 2025-05-29T15:23:22.624305Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5938: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 1 ShardOwnerId: 72057594046644480 ShardLocalIdx: 1, at schemeshard: 72057594046644480 2025-05-29T15:23:22.624378Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046644480, LocalPathId: 3] was 3 2025-05-29T15:23:22.624427Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5938: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 1 ShardOwnerId: 72057594046644480 ShardLocalIdx: 1, at schemeshard: 72057594046644480 2025-05-29T15:23:22.624449Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5938: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 5 ShardOwnerId: 72057594046644480 ShardLocalIdx: 5, at schemeshard: 72057594046644480 2025-05-29T15:23:22.624471Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046644480, LocalPathId: 3] was 2 2025-05-29T15:23:22.624490Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5938: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 3 ShardOwnerId: 72057594046644480 ShardLocalIdx: 3, at schemeshard: 72057594046644480 2025-05-29T15:23:22.624512Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046644480, LocalPathId: 3] was 1 2025-05-29T15:23:22.624533Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:123: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046644480 2025-05-29T15:23:22.624539Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:137: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046644480, LocalPathId: 3], at schemeshard: 72057594046644480 2025-05-29T15:23:22.624550Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046644480, LocalPathId: 2] was 1 2025-05-29T15:23:22.624641Z node 2 :TX_DATASHARD INFO: datashard.cpp:257: OnTabletDead: 72075186224037890 2025-05-29T15:23:22.624690Z node 2 :HIVE WARN: hive_impl.cpp:491: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037888 not found 2025-05-29T15:23:22.624699Z node 2 :HIVE WARN: hive_impl.cpp:491: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037890 not found 2025-05-29T15:23:22.624701Z node 2 :HIVE WARN: hive_impl.cpp:491: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037890 not found 2025-05-29T15:23:22.624705Z node 2 :HIVE WARN: hive_impl.cpp:491: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037888 not found 2025-05-29T15:23:22.624708Z node 2 :HIVE WARN: hive_impl.cpp:491: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037892 not found 2025-05-29T15:23:22.624711Z node 2 :HIVE WARN: hive_impl.cpp:491: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037892 not found 2025-05-29T15:23:22.624961Z node 2 :TX_DATASHARD INFO: datashard.cpp:257: OnTabletDead: 72075186224037888 2025-05-29T15:23:22.624546Z node 3 :TX_DATASHARD INFO: datashard.cpp:257: OnTabletDead: 72075186224037888 2025-05-29T15:23:22.624779Z node 3 :TX_DATASHARD INFO: datashard.cpp:257: OnTabletDead: 72075186224037892 2025-05-29T15:23:22.625050Z node 3 :TX_DATASHARD INFO: datashard.cpp:257: OnTabletDead: 72075186224037892 2025-05-29T15:23:22.625382Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:2962: Handle TEvStateChangedResult datashard 72075186224037892 state Offline 2025-05-29T15:23:22.625390Z node 3 :TX_DATASHARD INFO: datashard.cpp:197: OnTabletStop: 72075186224037892 reason = ReasonStop 2025-05-29T15:23:22.625397Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:2962: Handle TEvStateChangedResult datashard 72075186224037890 state Offline 2025-05-29T15:23:22.625400Z node 3 :TX_DATASHARD INFO: datashard.cpp:197: OnTabletStop: 72075186224037890 reason = ReasonStop 2025-05-29T15:23:22.625410Z node 3 :TX_DATASHARD INFO: datashard.cpp:197: OnTabletStop: 72075186224037888 reason = ReasonStop 2025-05-29T15:23:22.625372Z node 2 :TX_DATASHARD INFO: datashard.cpp:257: OnTabletDead: 72075186224037890 2025-05-29T15:23:22.625813Z node 2 :HIVE WARN: hive_impl.cpp:491: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037892 not found 2025-05-29T15:23:22.625817Z node 2 :HIVE WARN: hive_impl.cpp:491: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037890 not found 2025-05-29T15:23:22.625819Z node 2 :HIVE WARN: hive_impl.cpp:491: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037888 not found 2025-05-29T15:23:22.625864Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:174: Deleted shardIdx 72057594046644480:1 2025-05-29T15:23:22.625869Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:180: Close pipe to deleted shardIdx 72057594046644480:1 tabletId 72075186224037888 2025-05-29T15:23:22.625880Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:174: Deleted shardIdx 72057594046644480:1 2025-05-29T15:23:22.625885Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:174: Deleted shardIdx 72057594046644480:5 2025-05-29T15:23:22.625886Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:180: Close pipe to deleted shardIdx 72057594046644480:5 tabletId 72075186224037892 2025-05-29T15:23:22.625891Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:174: Deleted shardIdx 72057594046644480:3 2025-05-29T15:23:22.625894Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:180: Close pipe to deleted shardIdx 72057594046644480:3 tabletId 72075186224037890 2025-05-29T15:23:22.625903Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:160: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046644480 2025-05-29T15:23:22.625864Z node 3 :TX_DATASHARD INFO: datashard.cpp:257: OnTabletDead: 72075186224037892 2025-05-29T15:23:22.625886Z node 3 :TX_DATASHARD INFO: datashard.cpp:1301: Change sender killed: at tablet: 72075186224037892 2025-05-29T15:23:22.626273Z node 3 :TX_DATASHARD INFO: datashard.cpp:257: OnTabletDead: 72075186224037890 2025-05-29T15:23:22.626287Z node 3 :TX_DATASHARD INFO: datashard.cpp:1301: Change sender killed: at tablet: 72075186224037890 2025-05-29T15:23:22.626731Z node 3 :TX_DATASHARD INFO: datashard.cpp:257: OnTabletDead: 72075186224037888 2025-05-29T15:23:22.626755Z node 3 :TX_DATASHARD INFO: datashard.cpp:1301: Change sender killed: at tablet: 72075186224037888 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/client/ut/unittest >> TFlatTest::SelectRangeReverseExcludeKeys [GOOD] Test command err: 2025-05-29T15:23:22.168464Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509888625923446288:2061];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:23:22.168492Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/00279a/r3tmp/tmpsqp6Jd/pdisk_1.dat 2025-05-29T15:23:22.239864Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:23:22.240064Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [1:7509888625923446269:2079] 1748532202168322 != 1748532202168325 TClient is connected to server localhost:29195 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-05-29T15:23:22.313463Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:23:22.313492Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:23:22.313979Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-05-29T15:23:22.314579Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected waiting... waiting... 2025-05-29T15:23:22.320278Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/00279a/r3tmp/tmpQ6pJ1S/pdisk_1.dat 2025-05-29T15:23:22.721050Z node 2 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-05-29T15:23:22.723104Z node 2 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:23:22.723339Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [2:7509888624482373034:2079] 1748532202704905 != 1748532202704908 TClient is connected to server localhost:63838 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-05-29T15:23:22.809698Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:23:22.809725Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting waiting... 2025-05-29T15:23:22.810006Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-05-29T15:23:22.810645Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-29T15:23:22.811969Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:23:22.817547Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... |63.5%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/datashard/ut_locks/ydb-core-tx-datashard-ut_locks >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-ModifyUser-48 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-CreateUser-1 >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-DropUser-55 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-DropUser-56 >> TFlatTest::WriteMergeAndRead [GOOD] >> TFlatTest::WriteSplitAndRead |63.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_locks/ydb-core-tx-datashard-ut_locks |63.6%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_locks/ydb-core-tx-datashard-ut_locks ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/client/ut/unittest >> TObjectStorageListingTest::Decimal [GOOD] Test command err: 2025-05-29T15:23:22.368852Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509888624726721449:2063];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:23:22.369162Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/00278e/r3tmp/tmpMYCokI/pdisk_1.dat 2025-05-29T15:23:22.437790Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [1:7509888624726721428:2079] 1748532202368574 != 1748532202368577 2025-05-29T15:23:22.440087Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 10756, node 1 2025-05-29T15:23:22.452193Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:23:22.452206Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-05-29T15:23:22.452208Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-05-29T15:23:22.452252Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:9154 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-05-29T15:23:22.511217Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:23:22.511249Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:23:22.512328Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-29T15:23:22.512880Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:23:22.515326Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:23:22.523190Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976715658, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:23:22.528637Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 2025-05-29T15:23:22.847987Z node 2 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7509888625438241315:2061];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:23:22.848019Z node 2 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/00278e/r3tmp/tmp15OmZo/pdisk_1.dat 2025-05-29T15:23:22.869214Z node 2 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:23:22.869604Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [2:7509888625438241296:2079] 1748532202847878 != 1748532202847881 TServer::EnableGrpc on GrpcPort 18503, node 2 2025-05-29T15:23:22.882866Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:23:22.882883Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-05-29T15:23:22.882885Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-05-29T15:23:22.882934Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:29741 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-05-29T15:23:22.908961Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-05-29T15:23:22.914980Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:23:22.956038Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:23:22.956075Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:23:22.957172Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-DropUser-54 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-DropUser-55 >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-ModifyUser-42 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-ModifyUser-43 >> TLocksTest::Range_BrokenLock2 >> TFlatTest::ShardFreezeRejectBadProtobuf >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-CreateUser-14 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-CreateUser-15 >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-CreateUser-7 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-CreateUser-8 >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-ModifyUser-30 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-ModifyUser-31 >> TFlatTest::LargeDatashardReplyDistributed [GOOD] >> TFlatTest::LargeDatashardReplyRW >> TFlatTest::WriteSplitAndRead [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-ModifyUser-37 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-ModifyUser-38 >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-CreateUser-20 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-CreateUser-21 >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-DropUser-54 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-DropUser-55 >> TFlatTest::ShardFreezeRejectBadProtobuf [GOOD] >> TFlatTest::SelectRangeSkipNullKeys >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-DropUser-56 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-DropUser-57 >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-CreateUser-1 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-CreateUser-2 >> TSchemeShardSubDomainTest::TableDiskSpaceQuotas [GOOD] >> TCancelTx::CrossShardReadOnly >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-ModifyUser-43 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-ModifyUser-44 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/client/ut/unittest >> TFlatTest::WriteSplitAndRead [GOOD] Test command err: 2025-05-29T15:23:23.525149Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509888630516464998:2200];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:23:23.525997Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/002784/r3tmp/tmps8d86v/pdisk_1.dat 2025-05-29T15:23:23.580587Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [1:7509888630516464837:2079] 1748532203524082 != 1748532203524085 2025-05-29T15:23:23.584105Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded TClient is connected to server localhost:7425 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-05-29T15:23:23.655689Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:23:23.655728Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:23:23.656802Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-29T15:23:23.657298Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:23:23.660629Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:23:23.664235Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:23:23.693787Z node 1 :OPS_COMPACT INFO: Compact{72075186224037888.1.11, eph 1} end=Done, 4 blobs 3r (max 3), put Spent{time=0.000s,wait=0.000s,interrupts=1} Part{ 2 pk, lobs 2 +0, (1265 647 2154)b }, ecr=1.000 2025-05-29T15:23:23.694544Z node 1 :OPS_COMPACT INFO: Compact{72075186224037889.1.11, eph 1} end=Done, 4 blobs 3r (max 3), put Spent{time=0.000s,wait=0.000s,interrupts=1} Part{ 2 pk, lobs 2 +0, (1139 521 2626)b }, ecr=1.000 2025-05-29T15:23:23.699852Z node 1 :OPS_COMPACT INFO: Compact{72075186224037888.1.16, eph 2} end=Done, 4 blobs 6r (max 6), put Spent{time=0.000s,wait=0.000s,interrupts=1} Part{ 2 pk, lobs 5 +0, (1573 647 6413)b }, ecr=1.000 2025-05-29T15:23:23.701055Z node 1 :OPS_COMPACT INFO: Compact{72075186224037889.1.16, eph 2} end=Done, 4 blobs 6r (max 6), put Spent{time=0.000s,wait=0.000s,interrupts=1} Part{ 2 pk, lobs 4 +0, (2326 1432 5183)b }, ecr=1.000 2025-05-29T15:23:23.707459Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037888 2025-05-29T15:23:23.707725Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 72075186224037888 2025-05-29T15:23:23.707746Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-05-29T15:23:23.708340Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037888 2025-05-29T15:23:23.708570Z node 1 :OPS_COMPACT INFO: Compact{72075186224037888.1.21, eph 3} end=Done, 4 blobs 8r (max 9), put Spent{time=0.000s,wait=0.000s,interrupts=1} Part{ 2 pk, lobs 5 +0, (3250 2180 6413)b }, ecr=1.000 2025-05-29T15:23:23.708719Z node 1 :TX_DATASHARD DEBUG: datashard__compaction.cpp:203: CompactionComplete of tablet# 72075186224037888, table# 1001, finished edge# 0, ts 1970-01-01T00:00:00.000000Z 2025-05-29T15:23:23.708732Z node 1 :TX_DATASHARD DEBUG: datashard__compaction.cpp:240: ReplyCompactionWaiters of tablet# 72075186224037888, table# 1001, finished edge# 0, front# 0 2025-05-29T15:23:23.708765Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 72075186224037888 2025-05-29T15:23:23.708785Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-05-29T15:23:23.709321Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037889 2025-05-29T15:23:23.709756Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 72075186224037889 2025-05-29T15:23:23.709769Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2025-05-29T15:23:23.710043Z node 1 :OPS_COMPACT INFO: Compact{72075186224037889.1.21, eph 3} end=Done, 4 blobs 9r (max 9), put Spent{time=0.000s,wait=0.000s,interrupts=1} Part{ 2 pk, lobs 4 +0, (4073 2983 5183)b }, ecr=1.000 2025-05-29T15:23:23.710259Z node 1 :TX_DATASHARD DEBUG: datashard__compaction.cpp:203: CompactionComplete of tablet# 72075186224037889, table# 1001, finished edge# 0, ts 1970-01-01T00:00:00.000000Z 2025-05-29T15:23:23.710266Z node 1 :TX_DATASHARD DEBUG: datashard__compaction.cpp:240: ReplyCompactionWaiters of tablet# 72075186224037889, table# 1001, finished edge# 0, front# 0 2025-05-29T15:23:23.710789Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037889 2025-05-29T15:23:23.711870Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 72075186224037889 2025-05-29T15:23:23.711886Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 TClient::Ls request: /dc-1/Dir/TableOld TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "TableOld" PathId: 3 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976715659 CreateStep: 1748532203726 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "TableOld" Columns { Name: "Key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "Value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } Columns { Name: "... (TRUNCATED) 2025-05-29T15:23:23.716029Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037888 2025-05-29T15:23:23.716541Z node 1 :TX_DATASHARD DEBUG: datashard_active_transaction.cpp:561: tx 281474976715680 released its data 2025-05-29T15:23:23.716601Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037889 2025-05-29T15:23:23.716824Z node 1 :TX_DATASHARD DEBUG: datashard_active_transaction.cpp:561: tx 281474976715680 released its data 2025-05-29T15:23:23.717052Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037888 2025-05-29T15:23:23.717135Z node 1 :TX_DATASHARD DEBUG: datashard_active_transaction.cpp:661: tx 281474976715680 at 72075186224037888 restored its data 2025-05-29T15:23:23.717286Z node 1 :TX_DATASHARD DEBUG: datashard_active_transaction.cpp:561: tx 281474976715680 released its data 2025-05-29T15:23:23.717316Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037889 2025-05-29T15:23:23.717373Z node 1 :TX_DATASHARD DEBUG: datashard_active_transaction.cpp:661: tx 281474976715680 at 72075186224037889 restored its data 2025-05-29T15:23:23.717495Z node 1 :TX_DATASHARD DEBUG: datashard_active_transaction.cpp:561: tx 281474976715680 released its data 2025-05-29T15:23:23.717518Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037888 2025-05-29T15:23:23.717584Z node 1 :TX_DATASHARD DEBUG: datashard_active_transaction.cpp:661: tx 281474976715680 at 72075186224037888 restored its data 2025-05-29T15:23:23.717707Z node 1 :TX_DATASHARD DEBUG: datashard_active_transaction.cpp:561: tx 281474976715680 released its data 2025-05-29T15:23:23.717731Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037889 2025-05-29T15:23:23.717782Z node 1 :TX_DATASHARD DEBUG: datashard_active_transaction.cpp:661: tx 281474976715680 at 72075186224037889 restored its data 2025-05-29T15:23:23.717902Z node 1 :TX_DATASHARD DEBUG: datashard_active_transaction.cpp:561: tx 281474976715680 released its data 2025-05-29T15:23:23.717922Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037888 2025-05-29T15:23:23.717972Z node 1 :TX_DATASHARD DEBUG: datashard_active_transaction.cpp:661: tx 281474976715680 at 72075186224037888 restored its data 2025-05-29T15:23:23.718116Z node 1 :TX_DATASHARD DEBUG: datashard_active_transaction.cpp:561: tx 281474976715680 released its data 2025-05-29T15:23:23.718144Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037889 2025-05-29T15:23:23.718197Z node 1 :TX_DATASHARD DEBUG: datashard_active_transaction.cpp:661: tx 281474976715680 at 72075186224037889 restored its data 2025-05-29T15:23:23.718323Z node 1 :TX_DATASHARD DEBUG: datashard_active_transaction.cpp:561: tx 281474976715680 released its data 2025-05-29T15:23:23.718345Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037888 2025-05-29T15:23:23.718391Z node 1 :TX_DATASHARD DEBUG: datashard_active_transaction.cpp:661: tx 281474976715680 at 72075186224037888 restored its data 2025-05-29T15:23:23.718504Z node 1 :TX_DATASHARD DEBUG: datashard_active_transaction.cpp:561: tx 281474976715680 released its data 2025-05-29T15:23:23.718525Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037889 2025-05-29T15:23:23.718577Z node 1 :TX_DATASHARD DEBUG: datashard_active_transaction.cpp:661: tx 28147497 ... .cpp:5549: Handle TEvStateChanged, at schemeshard: 72057594046644480, message: Source { RawX1: 7509888632311451419 RawX2: 4503608217307458 } TabletId: 72075186224037890 State: 4 2025-05-29T15:23:24.189079Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__state_changed_reply.cpp:78: TTxShardStateChanged DoExecute, datashard informs about state changing, datashardId: 72075186224037890, state: Offline, at schemeshard: 72057594046644480 2025-05-29T15:23:24.189100Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5549: Handle TEvStateChanged, at schemeshard: 72057594046644480, message: Source { RawX1: 7509888632311451418 RawX2: 4503608217307457 } TabletId: 72075186224037892 State: 4 2025-05-29T15:23:24.189103Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__state_changed_reply.cpp:78: TTxShardStateChanged DoExecute, datashard informs about state changing, datashardId: 72075186224037892, state: Offline, at schemeshard: 72057594046644480 2025-05-29T15:23:24.189114Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5549: Handle TEvStateChanged, at schemeshard: 72057594046644480, message: Source { RawX1: 7509888632311451418 RawX2: 4503608217307457 } TabletId: 72075186224037892 State: 4 2025-05-29T15:23:24.189117Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__state_changed_reply.cpp:78: TTxShardStateChanged DoExecute, datashard informs about state changing, datashardId: 72075186224037892, state: Offline, at schemeshard: 72057594046644480 TClient::Ls response: Status: 128 StatusCode: PATH_NOT_EXIST Issues { message: "Path not exist" issue_code: 200200 severity: 1 } SchemeStatus: 2 ErrorReason: "Path not found" 2025-05-29T15:23:24.189197Z node 2 :TX_DATASHARD INFO: datashard.cpp:257: OnTabletDead: 72075186224037888 2025-05-29T15:23:24.189229Z node 2 :TX_DATASHARD INFO: datashard.cpp:1301: Change sender killed: at tablet: 72075186224037888 2025-05-29T15:23:24.189246Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:46: Free shard 72057594046644480:4 hive 72057594037968897 at ss 72057594046644480 2025-05-29T15:23:24.189260Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:46: Free shard 72057594046644480:3 hive 72057594037968897 at ss 72057594046644480 2025-05-29T15:23:24.189268Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:46: Free shard 72057594046644480:5 hive 72057594037968897 at ss 72057594046644480 2025-05-29T15:23:24.189275Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:46: Free shard 72057594046644480:5 hive 72057594037968897 at ss 72057594046644480 2025-05-29T15:23:24.189664Z node 2 :TX_DATASHARD DEBUG: datashard_loans.cpp:220: 72075186224037889 in PreOffline state HasSharedBobs: 0 SchemaOperations: [ ] OutReadSets count: 0 ChangesQueue size: 0 ChangeExchangeSplit: 1 siblings to be activated: wait to activation from: 2025-05-29T15:23:24.189680Z node 2 :TX_DATASHARD INFO: datashard_loans.cpp:177: 72075186224037889 Initiating switch from PreOffline to Offline state 2025-05-29T15:23:24.190025Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:2962: Handle TEvStateChangedResult datashard 72075186224037891 state Offline 2025-05-29T15:23:24.190029Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:2962: Handle TEvStateChangedResult datashard 72075186224037890 state Offline 2025-05-29T15:23:24.190032Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:2962: Handle TEvStateChangedResult datashard 72075186224037892 state Offline 2025-05-29T15:23:24.190035Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:2962: Handle TEvStateChangedResult datashard 72075186224037892 state Offline 2025-05-29T15:23:24.190070Z node 2 :TX_DATASHARD INFO: datashard_impl.h:3306: 72075186224037889 Reporting state Offline to schemeshard 72057594046644480 2025-05-29T15:23:24.190113Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5549: Handle TEvStateChanged, at schemeshard: 72057594046644480, message: Source { RawX1: 7509888632311451071 RawX2: 4503608217307386 } TabletId: 72075186224037889 State: 4 2025-05-29T15:23:24.190119Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__state_changed_reply.cpp:78: TTxShardStateChanged DoExecute, datashard informs about state changing, datashardId: 72075186224037889, state: Offline, at schemeshard: 72057594046644480 2025-05-29T15:23:24.190148Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5938: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 4 ShardOwnerId: 72057594046644480 ShardLocalIdx: 4, at schemeshard: 72057594046644480 2025-05-29T15:23:24.190189Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046644480, LocalPathId: 3] was 4 2025-05-29T15:23:24.190214Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5938: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 3 ShardOwnerId: 72057594046644480 ShardLocalIdx: 3, at schemeshard: 72057594046644480 2025-05-29T15:23:24.190232Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046644480, LocalPathId: 3] was 3 2025-05-29T15:23:24.190247Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5938: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 5 ShardOwnerId: 72057594046644480 ShardLocalIdx: 5, at schemeshard: 72057594046644480 2025-05-29T15:23:24.190261Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046644480, LocalPathId: 3] was 2 2025-05-29T15:23:24.190293Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5938: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 5 ShardOwnerId: 72057594046644480 ShardLocalIdx: 5, at schemeshard: 72057594046644480 2025-05-29T15:23:24.190324Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:46: Free shard 72057594046644480:2 hive 72057594037968897 at ss 72057594046644480 2025-05-29T15:23:24.190421Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:174: Deleted shardIdx 72057594046644480:4 2025-05-29T15:23:24.190425Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:180: Close pipe to deleted shardIdx 72057594046644480:4 tabletId 72075186224037891 2025-05-29T15:23:24.190432Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:174: Deleted shardIdx 72057594046644480:3 2025-05-29T15:23:24.190434Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:180: Close pipe to deleted shardIdx 72057594046644480:3 tabletId 72075186224037890 2025-05-29T15:23:24.190467Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:174: Deleted shardIdx 72057594046644480:5 2025-05-29T15:23:24.190469Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:180: Close pipe to deleted shardIdx 72057594046644480:5 tabletId 72075186224037892 2025-05-29T15:23:24.190473Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:174: Deleted shardIdx 72057594046644480:5 2025-05-29T15:23:24.190520Z node 2 :TX_DATASHARD INFO: datashard.cpp:197: OnTabletStop: 72075186224037891 reason = ReasonStop 2025-05-29T15:23:24.190534Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:3728: Server disconnected at leader tablet# 72075186224037891, clientId# [2:7509888632311451550:2612], serverId# [2:7509888632311451553:2615], sessionId# [0:0:0] 2025-05-29T15:23:24.190537Z node 2 :TX_DATASHARD INFO: datashard.cpp:197: OnTabletStop: 72075186224037890 reason = ReasonStop 2025-05-29T15:23:24.190546Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:3728: Server disconnected at leader tablet# 72075186224037890, clientId# [2:7509888632311451549:2611], serverId# [2:7509888632311451552:2614], sessionId# [0:0:0] 2025-05-29T15:23:24.190549Z node 2 :TX_DATASHARD INFO: datashard.cpp:197: OnTabletStop: 72075186224037892 reason = ReasonStop 2025-05-29T15:23:24.190553Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:3728: Server disconnected at leader tablet# 72075186224037892, clientId# [2:7509888632311451551:2613], serverId# [2:7509888632311451554:2616], sessionId# [0:0:0] 2025-05-29T15:23:24.190558Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:2962: Handle TEvStateChangedResult datashard 72075186224037889 state Offline 2025-05-29T15:23:24.190601Z node 2 :TX_DATASHARD INFO: datashard.cpp:257: OnTabletDead: 72075186224037891 2025-05-29T15:23:24.190618Z node 2 :TX_DATASHARD INFO: datashard.cpp:1301: Change sender killed: at tablet: 72075186224037891 2025-05-29T15:23:24.190638Z node 2 :HIVE WARN: hive_impl.cpp:491: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037891 not found 2025-05-29T15:23:24.190641Z node 2 :HIVE WARN: hive_impl.cpp:491: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037890 not found 2025-05-29T15:23:24.190644Z node 2 :HIVE WARN: hive_impl.cpp:491: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037892 not found 2025-05-29T15:23:24.190972Z node 2 :TX_DATASHARD INFO: datashard.cpp:257: OnTabletDead: 72075186224037890 2025-05-29T15:23:24.190982Z node 2 :TX_DATASHARD INFO: datashard.cpp:1301: Change sender killed: at tablet: 72075186224037890 2025-05-29T15:23:24.191288Z node 2 :TX_DATASHARD INFO: datashard.cpp:257: OnTabletDead: 72075186224037892 2025-05-29T15:23:24.191295Z node 2 :TX_DATASHARD INFO: datashard.cpp:1301: Change sender killed: at tablet: 72075186224037892 2025-05-29T15:23:24.191466Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5938: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 2 ShardOwnerId: 72057594046644480 ShardLocalIdx: 2, at schemeshard: 72057594046644480 2025-05-29T15:23:24.191519Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046644480, LocalPathId: 3] was 1 2025-05-29T15:23:24.191561Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:123: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046644480 2025-05-29T15:23:24.191563Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:137: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046644480, LocalPathId: 3], at schemeshard: 72057594046644480 2025-05-29T15:23:24.191575Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046644480, LocalPathId: 2] was 1 2025-05-29T15:23:24.191646Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:174: Deleted shardIdx 72057594046644480:2 2025-05-29T15:23:24.191650Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:180: Close pipe to deleted shardIdx 72057594046644480:2 tabletId 72075186224037889 2025-05-29T15:23:24.191662Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:160: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046644480 2025-05-29T15:23:24.191764Z node 2 :TX_DATASHARD INFO: datashard.cpp:197: OnTabletStop: 72075186224037889 reason = ReasonStop 2025-05-29T15:23:24.191776Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:3728: Server disconnected at leader tablet# 72075186224037889, clientId# [2:7509888632311451197:2391], serverId# [2:7509888632311451198:2392], sessionId# [0:0:0] 2025-05-29T15:23:24.191833Z node 2 :HIVE WARN: hive_impl.cpp:491: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037889 not found 2025-05-29T15:23:24.191834Z node 2 :TX_DATASHARD INFO: datashard.cpp:257: OnTabletDead: 72075186224037889 2025-05-29T15:23:24.191846Z node 2 :TX_DATASHARD INFO: datashard.cpp:1301: Change sender killed: at tablet: 72075186224037889 |63.6%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/schemeshard/ut_export_reboots_s3/ydb-core-tx-schemeshard-ut_export_reboots_s3 |63.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_export_reboots_s3/ydb-core-tx-schemeshard-ut_export_reboots_s3 |63.6%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_export_reboots_s3/ydb-core-tx-schemeshard-ut_export_reboots_s3 >> TFlatTest::SelectRangeSkipNullKeys [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-CreateUser-15 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-CreateUser-16 >> TFlatTest::MiniKQLRanges >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-CreateUser-8 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-CreateUser-9 >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-DropUser-55 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-DropUser-56 >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-ModifyUser-31 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-ModifyUser-32 >> TFlatTest::LargeDatashardReply [GOOD] >> TLocksTest::GoodLock >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-ModifyUser-38 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-ModifyUser-39 >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-CreateUser-21 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-CreateUser-22 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::TableDiskSpaceQuotas [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:116:2058] recipient: [1:111:2141] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:116:2058] recipient: [1:111:2141] Leader for TabletID 72057594046678944 is [1:128:2152] sender: [1:132:2058] recipient: [1:111:2141] 2025-05-29T15:23:12.772366Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7488: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-05-29T15:23:12.772390Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7516: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:23:12.772395Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7402: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2025-05-29T15:23:12.772400Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7418: OperationsProcessing config: using default configuration 2025-05-29T15:23:12.772409Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-05-29T15:23:12.772412Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-05-29T15:23:12.772419Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7548: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:23:12.772430Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:39: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-05-29T15:23:12.772528Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7619: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-05-29T15:23:12.772584Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-05-29T15:23:12.783583Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7309: Cannot subscribe to console configs 2025-05-29T15:23:12.783610Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:23:12.786220Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-05-29T15:23:12.786349Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-05-29T15:23:12.786403Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-05-29T15:23:12.788493Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-05-29T15:23:12.788706Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:445: Clear TempDirsState with owners number: 0 2025-05-29T15:23:12.788834Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1348: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-05-29T15:23:12.788902Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:32: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-05-29T15:23:12.790693Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:157: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:23:12.790763Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:84: [RootDataErasureManager] Stop 2025-05-29T15:23:12.791052Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:23:12.791062Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:23:12.791086Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-05-29T15:23:12.791094Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-29T15:23:12.791101Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:202: TTxServerlessStorageBilling.Complete 2025-05-29T15:23:12.791138Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6671: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-05-29T15:23:12.793056Z node 1 :HIVE INFO: tablet_helpers.cpp:1130: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:128:2152] sender: [1:242:2058] recipient: [1:15:2062] 2025-05-29T15:23:12.822283Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:372: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-05-29T15:23:12.822353Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:23:12.822421Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-05-29T15:23:12.822473Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:130: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-05-29T15:23:12.822484Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:23:12.825892Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:451: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-05-29T15:23:12.825922Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-05-29T15:23:12.825963Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:23:12.825974Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:313: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-05-29T15:23:12.825979Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:367: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-05-29T15:23:12.825985Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 2 -> 3 2025-05-29T15:23:12.826824Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:23:12.826835Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-05-29T15:23:12.826842Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 3 -> 128 2025-05-29T15:23:12.828403Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:23:12.828419Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:23:12.828425Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:23:12.828432Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1650: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-05-29T15:23:12.829281Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1719: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-05-29T15:23:12.829949Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:652: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-05-29T15:23:12.829986Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1751: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-05-29T15:23:12.830182Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:676: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-05-29T15:23:12.830209Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:680: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 124 RawX2: 4294969445 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-05-29T15:23:12.830218Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:23:12.830284Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 128 -> 240 2025-05-29T15:23:12.830292Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:23:12.830324Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-05-29T15:23:12.830337Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:402: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-05-29T15:23:12.830831Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:23:12.830839Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-29T15:23:12.830879Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29T1 ... entPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 2025-05-29T15:23:24.974976Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 3 2025-05-29T15:23:24.975358Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:647: TTxOperationReply complete, operationId: 107:0, at schemeshard: 72057594046678944 2025-05-29T15:23:24.975411Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:647: TTxOperationReply complete, operationId: 107:0, at schemeshard: 72057594046678944 2025-05-29T15:23:24.975712Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:23:24.975720Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 107, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2025-05-29T15:23:24.975756Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 107, path id: [OwnerId: 72057594046678944, LocalPathId: 4] 2025-05-29T15:23:24.975778Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:23:24.975782Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:209:2210], at schemeshard: 72057594046678944, txId: 107, path id: 2 2025-05-29T15:23:24.975786Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:209:2210], at schemeshard: 72057594046678944, txId: 107, path id: 4 2025-05-29T15:23:24.975794Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 107:0, at schemeshard: 72057594046678944 2025-05-29T15:23:24.975799Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:1036: NTableState::TProposedWaitParts operationId# 107:0 ProgressState at tablet: 72057594046678944 2025-05-29T15:23:24.975815Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:674: all shard schema changes has been received, operationId: 107:0, at schemeshard: 72057594046678944 2025-05-29T15:23:24.975819Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:686: send schema changes ack message, operation: 107:0, datashard: 72075186233409549, at schemeshard: 72057594046678944 2025-05-29T15:23:24.975823Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 107:0 129 -> 240 2025-05-29T15:23:24.976016Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:5834: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 14 PathOwnerId: 72057594046678944, cookie: 107 2025-05-29T15:23:24.976026Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 14 PathOwnerId: 72057594046678944, cookie: 107 2025-05-29T15:23:24.976030Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72057594046678944, txId: 107 2025-05-29T15:23:24.976036Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 107, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 14 2025-05-29T15:23:24.976041Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 5 2025-05-29T15:23:24.976143Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:5834: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 107 2025-05-29T15:23:24.976152Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 107 2025-05-29T15:23:24.976155Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72057594046678944, txId: 107 2025-05-29T15:23:24.976158Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 107, pathId: [OwnerId: 72057594046678944, LocalPathId: 4], version: 18446744073709551615 2025-05-29T15:23:24.976161Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 4 2025-05-29T15:23:24.976168Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1606: TOperation IsReadyToNotify, TxId: 107, ready parts: 0/1, is published: true 2025-05-29T15:23:24.976796Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 107:0, at schemeshard: 72057594046678944 2025-05-29T15:23:24.976806Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_drop_table.cpp:414: TDropTable TProposedDeletePart operationId: 107:0 ProgressState, at schemeshard: 72057594046678944 2025-05-29T15:23:24.976868Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove table for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 3 2025-05-29T15:23:24.976899Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:906: Part operation is done id#107:0 progress is 1/1 2025-05-29T15:23:24.976902Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 107 ready parts: 1/1 2025-05-29T15:23:24.976906Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:906: Part operation is done id#107:0 progress is 1/1 2025-05-29T15:23:24.976908Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 107 ready parts: 1/1 2025-05-29T15:23:24.976911Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1606: TOperation IsReadyToNotify, TxId: 107, ready parts: 1/1, is published: true 2025-05-29T15:23:24.976914Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 107 ready parts: 1/1 2025-05-29T15:23:24.976918Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:973: Operation and all the parts is done, operation id: 107:0 2025-05-29T15:23:24.976921Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5174: RemoveTx for txid 107:0 2025-05-29T15:23:24.976934Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 2 2025-05-29T15:23:24.977006Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:23:24.977016Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 0, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2025-05-29T15:23:24.977064Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 107 2025-05-29T15:23:24.977268Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 107 2025-05-29T15:23:24.977615Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:23:24.977633Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:209:2210], at schemeshard: 72057594046678944, txId: 0, path id: 2 2025-05-29T15:23:24.977780Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:5834: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 15 PathOwnerId: 72057594046678944, cookie: 0 TestWaitNotification wait txId: 107 2025-05-29T15:23:24.977923Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:210: tests -- TTxNotificationSubscriber for txId 107: send EvNotifyTxCompletion 2025-05-29T15:23:24.977932Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:256: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 107 2025-05-29T15:23:24.978016Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 107, at schemeshard: 72057594046678944 2025-05-29T15:23:24.978033Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:227: tests -- TTxNotificationSubscriber for txId 107: got EvNotifyTxCompletionResult 2025-05-29T15:23:24.978039Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:236: tests -- TTxNotificationSubscriber for txId 107: satisfy waiter [1:987:2911] TestWaitNotification: OK eventTxId 107 2025-05-29T15:23:24.978138Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-05-29T15:23:24.978171Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/USER_0" took 43us result status StatusSuccess 2025-05-29T15:23:24.978262Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/USER_0" PathDescription { Self { Name: "USER_0" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 15 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 15 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 9 SubDomainVersion: 1 SubDomainStateVersion: 4 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72075186233409546 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409547 } DomainKey { SchemeShard: 72057594046678944 PathId: 2 } StoragePools { Name: "name_USER_0_kind_hdd-1" Kind: "hdd-1" } StoragePools { Name: "name_USER_0_kind_hdd-2" Kind: "hdd-2" } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 3 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 2 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 DatabaseQuotas { data_size_hard_quota: 1 } SecurityState { } } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/idx_test/unittest >> YdbIndexTable::MultiShardTableOneIndexIndexOverlap Test command err: Trying to start YDB, gRPC: 62270, MsgBus: 21459 2025-05-29T15:23:12.094214Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509888581857034239:2062];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:23:12.094496Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/001018/r3tmp/tmpFoq2St/pdisk_1.dat 2025-05-29T15:23:12.145141Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [1:7509888581857034219:2079] 1748532192093938 != 1748532192093941 2025-05-29T15:23:12.146835Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 62270, node 1 2025-05-29T15:23:12.157376Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:23:12.157388Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-05-29T15:23:12.157390Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-05-29T15:23:12.157445Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:21459 TClient is connected to server localhost:21459 WaitRootIsUp 'Root'... TClient::Ls request: Root 2025-05-29T15:23:12.197989Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:23:12.198021Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:23:12.198732Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-29T15:23:12.227551Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:23:12.236346Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:23:12.253616Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:23:12.269851Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:23:12.283148Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:23:12.429558Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509888581857035852:2403], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:23:12.429588Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:23:12.476174Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-05-29T15:23:12.483269Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-05-29T15:23:12.491916Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-05-29T15:23:12.499293Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-05-29T15:23:12.513351Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-05-29T15:23:12.527280Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-05-29T15:23:12.541918Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-05-29T15:23:12.557206Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509888581857036504:2466], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:23:12.557220Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509888581857036509:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:23:12.557229Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:23:12.558010Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715669:3, at schemeshard: 72057594046644480 2025-05-29T15:23:12.561430Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7509888581857036511:2470], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715669 completed, doublechecking } 2025-05-29T15:23:12.662029Z node 1 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [1:7509888581857036562:3398] txid# 281474976715670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-29T15:23:12.781717Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [1:7509888581857036578:2474], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:23:12.781861Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=1&id=NGEyYTliMjAtODQ4YWUxOTAtNTQ3ZjQzYzQtYTljNDlmY2M=, ActorId: [1:7509888581857035834:2401], ActorState: ExecuteState, TraceId: 01jwea7s9c384rafh89vh2gmzd, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: VERIFY failed (2025-05-29T15:23:12.782485Z): assertion failed in non-unittest thread with message: assertion failed at ydb/core/kqp/ut/common/kqp_ut_common.h:375, void NKikimr::NKqp::AssertSuccessResult(const NYdb::TStatus &): (result.IsSuccess())
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 library/cpp/testing/unittest/registar.cpp:36 RaiseError(): requirement UnittestThread failed 0. /-S/util/system/yassert.cpp:83: InternalPanicImpl @ 0x13AA6355 1. /-S/util/system/yassert.cpp:55: Panic @ 0x13A9D356 2. /tmp//-S/library/cpp/testing/unittest/registar.cpp:36: RaiseError @ 0x13C3F0E6 3. /-S/ydb/core/kqp/ut/common/kqp_ut_common.h:375: AssertSuccessResult @ 0x260E1942 4. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:365: CreateSampleTables @ 0x260E1242 5. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:581: operator() @ 0x26102EDC 6. /-S/library/cpp/threading/future/core/future-inl.h:493: SetValue @ 0x26102EDC 7. /-S/library/cpp/threading/future/async.h:24: operator() @ 0x26102EDC 8. /-S/util/thread/pool.h:71: Process @ 0x26102EDC 9. /-S/util/thread/pool.cpp:418: DoExecute @ 0x13AADCD9 10. /-S/util/thread/factory.h:15: Execute @ 0x13AAC6C9 11. /-S/util/thread/factory.cpp:36: ThreadProc @ 0x13AAC6C9 12. /-S/util/system/thread.cpp:244: ThreadProxy @ 0x13AA7B3C 13. ??:0: ?? @ 0x7F4B8AC9DAC2 14. ??:0: ?? @ 0x7F4B8AD2F84F Trying to start YDB, gRPC: 5517, MsgBus: 24348 test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/001018/r3tmp/tmpkYjTQs/pdisk_1.dat 2025-05-29T15:23:16.380409Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509888602090591055:2065];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:23:16.380428Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-05-29T15:23:16.452925Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:23:16.454858Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [1:7509888602090591031:2079] 1748532196380165 != 1748532196380168 TServer::EnableGrpc on GrpcPort 5517, node 1 2025-05-29T1 ... x13AA6355 1. /-S/util/system/yassert.cpp:55: Panic @ 0x13A9D356 2. /tmp//-S/library/cpp/testing/unittest/registar.cpp:36: RaiseError @ 0x13C3F0E6 3. /-S/ydb/core/kqp/ut/common/kqp_ut_common.h:375: AssertSuccessResult @ 0x260E1942 4. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:365: CreateSampleTables @ 0x260E1242 5. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:581: operator() @ 0x26102EDC 6. /-S/library/cpp/threading/future/core/future-inl.h:493: SetValue @ 0x26102EDC 7. /-S/library/cpp/threading/future/async.h:24: operator() @ 0x26102EDC 8. /-S/util/thread/pool.h:71: Process @ 0x26102EDC 9. /-S/util/thread/pool.cpp:418: DoExecute @ 0x13AADCD9 10. /-S/util/thread/factory.h:15: Execute @ 0x13AAC6C9 11. /-S/util/thread/factory.cpp:36: ThreadProc @ 0x13AAC6C9 12. /-S/util/system/thread.cpp:244: ThreadProxy @ 0x13AA7B3C 13. ??:0: ?? @ 0x7FEF2D4AEAC2 14. ??:0: ?? @ 0x7FEF2D54084F Trying to start YDB, gRPC: 1663, MsgBus: 25515 2025-05-29T15:23:20.738250Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509888616767413501:2063];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:23:20.738273Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/001018/r3tmp/tmpuIC8Ao/pdisk_1.dat 2025-05-29T15:23:20.807573Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:23:20.807653Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [1:7509888616767413480:2079] 1748532200738114 != 1748532200738117 TServer::EnableGrpc on GrpcPort 1663, node 1 2025-05-29T15:23:20.819230Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:23:20.819244Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-05-29T15:23:20.819245Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-05-29T15:23:20.819279Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:25515 2025-05-29T15:23:20.840457Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:23:20.840487Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:23:20.841590Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:25515 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-29T15:23:20.882454Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:23:20.896227Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:23:20.915322Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:23:20.934646Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:23:20.946977Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:23:21.151171Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509888621062382412:2403], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:23:21.151200Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:23:21.190234Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-05-29T15:23:21.196775Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-05-29T15:23:21.207753Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-05-29T15:23:21.221261Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-05-29T15:23:21.235753Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-05-29T15:23:21.249932Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-05-29T15:23:21.263835Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-05-29T15:23:21.279565Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509888621062383064:2466], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:23:21.279606Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:23:21.279612Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509888621062383069:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:23:21.280493Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715669:3, at schemeshard: 72057594046644480 2025-05-29T15:23:21.283271Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7509888621062383071:2470], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715669 completed, doublechecking } 2025-05-29T15:23:21.363343Z node 1 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [1:7509888621062383122:3397] txid# 281474976715670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-29T15:23:21.459762Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [1:7509888621062383138:2474], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:23:21.459874Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=1&id=ZmEwNGUwMTAtNzhiNzJhY2ItOTVhNGRhODgtOTgyNDNjN2Y=, ActorId: [1:7509888621062382394:2401], ActorState: ExecuteState, TraceId: 01jwea81sz3dxzh5rew8xccevw, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: VERIFY failed (2025-05-29T15:23:21.462998Z): assertion failed in non-unittest thread with message: assertion failed at ydb/core/kqp/ut/common/kqp_ut_common.h:375, void NKikimr::NKqp::AssertSuccessResult(const NYdb::TStatus &): (result.IsSuccess())
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 library/cpp/testing/unittest/registar.cpp:36 RaiseError(): requirement UnittestThread failed 0. /-S/util/system/yassert.cpp:83: InternalPanicImpl @ 0x13AA6355 1. /-S/util/system/yassert.cpp:55: Panic @ 0x13A9D356 2. /tmp//-S/library/cpp/testing/unittest/registar.cpp:36: RaiseError @ 0x13C3F0E6 3. /-S/ydb/core/kqp/ut/common/kqp_ut_common.h:375: AssertSuccessResult @ 0x260E1942 4. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:365: CreateSampleTables @ 0x260E1242 5. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:581: operator() @ 0x26102EDC 6. /-S/library/cpp/threading/future/core/future-inl.h:493: SetValue @ 0x26102EDC 7. /-S/library/cpp/threading/future/async.h:24: operator() @ 0x26102EDC 8. /-S/util/thread/pool.h:71: Process @ 0x26102EDC 9. /-S/util/thread/pool.cpp:418: DoExecute @ 0x13AADCD9 10. /-S/util/thread/factory.h:15: Execute @ 0x13AAC6C9 11. /-S/util/thread/factory.cpp:36: ThreadProc @ 0x13AAC6C9 12. /-S/util/system/thread.cpp:244: ThreadProxy @ 0x13AA7B3C 13. ??:0: ?? @ 0x7F796200CAC2 14. ??:0: ?? @ 0x7F796209E84F >> TLocksTest::Range_IncorrectNullDot2 [GOOD] >> TFlatTest::CopyTableAndCompareColumnsSchema [GOOD] >> TFlatTest::CopyTableAndDropCopy >> TCancelTx::CrossShardReadOnly [GOOD] >> TCancelTx::CrossShardReadOnlyWithReadSets >> TFlatTest::MiniKQLRanges [GOOD] >> TFlatTest::MergeEmptyAndWrite >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-DropUser-55 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-DropUser-56 >> TStoragePoolsQuotasTest::DifferentQuotasInteraction-EnableSeparateQuotas [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-DropUser-57 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-DropUser-58 |63.6%| [TA] $(B)/ydb/core/kqp/ut/idx_test/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/client/ut/unittest >> TFlatTest::SelectRangeSkipNullKeys [GOOD] Test command err: 2025-05-29T15:23:24.207488Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509888633255739107:2203];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:23:24.207750Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/00277d/r3tmp/tmpxasvDX/pdisk_1.dat 2025-05-29T15:23:24.280838Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [1:7509888633255738942:2079] 1748532204205696 != 1748532204205699 2025-05-29T15:23:24.283362Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded TClient is connected to server localhost:6544 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 2025-05-29T15:23:24.308453Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:23:24.308486Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:23:24.309467Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-05-29T15:23:24.355641Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:23:24.358494Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-05-29T15:23:24.360737Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:23:24.427284Z node 1 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [1:7509888633255739642:2359] txid# 281474976715659, issues: { message: "Mix freeze cmd with other options is forbidden" severity: 1 } Error 128: Mix freeze cmd with other options is forbidden 2025-05-29T15:23:24.428357Z node 1 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [1:7509888633255739655:2365] txid# 281474976715660, issues: { message: "Unexpected freeze state" severity: 1 } Error 128: Unexpected freeze state 2025-05-29T15:23:24.429086Z node 1 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [1:7509888633255739661:2370] txid# 281474976715661, issues: { message: "Mix freeze cmd with other options is forbidden" severity: 1 } Error 128: Mix freeze cmd with other options is forbidden 2025-05-29T15:23:24.429719Z node 1 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [1:7509888633255739667:2375] txid# 281474976715662, issues: { message: "Mix freeze cmd with other options is forbidden" severity: 1 } Error 128: Mix freeze cmd with other options is forbidden 2025-05-29T15:23:24.698897Z node 2 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7509888636122042989:2065];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:23:24.698923Z node 2 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/00277d/r3tmp/tmpKgSVxi/pdisk_1.dat 2025-05-29T15:23:24.717161Z node 2 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:23:24.717494Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [2:7509888636122042959:2079] 1748532204698662 != 1748532204698665 TClient is connected to server localhost:17060 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-05-29T15:23:24.803351Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:23:24.803381Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:23:24.803644Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:23:24.804486Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected waiting... 2025-05-29T15:23:24.812308Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... >> TFlatTest::CopyTableAndDropCopy [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/client/ut/unittest >> TLocksTest::Range_IncorrectNullDot2 [GOOD] Test command err: 2025-05-29T15:23:20.560638Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509888619051286165:2134];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:23:20.560770Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/0027c9/r3tmp/tmpRqxrQq/pdisk_1.dat 2025-05-29T15:23:20.640863Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [1:7509888619051286061:2079] 1748532200560049 != 1748532200560052 2025-05-29T15:23:20.641570Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:23:20.662823Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:23:20.662856Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:23:20.663879Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:20313 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-05-29T15:23:20.712167Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-05-29T15:23:20.718025Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:23:20.740903Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-05-29T15:23:20.760724Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 2025-05-29T15:23:21.007138Z node 2 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7509888621428826158:2063];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:23:21.007158Z node 2 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/0027c9/r3tmp/tmp2PGF2m/pdisk_1.dat 2025-05-29T15:23:21.020164Z node 2 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:23:21.020437Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [2:7509888621428826137:2079] 1748532201006968 != 1748532201006971 TClient is connected to server localhost:29431 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-05-29T15:23:21.111253Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:23:21.111286Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:23:21.111576Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:23:21.112248Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected waiting... 2025-05-29T15:23:21.115805Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:23:21.130635Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:23:21.145087Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:23:21.413534Z node 3 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7509888620338518905:2064];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:23:21.413561Z node 3 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/0027c9/r3tmp/tmpy5sETC/pdisk_1.dat 2025-05-29T15:23:21.427031Z node 3 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [3:7509888620338518882:2079] 1748532201413414 != 1748532201413417 2025-05-29T15:23:21.428702Z node 3 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded TClient is connected to server localhost:9741 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-05-29T15:23:21.517792Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:23:21.517814Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:23:21.518052Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:23:21.518722Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected waiting... 2025-05-29T15:23:21.521319Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:23:21.536976Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:23:21.550514Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:23:21.896278Z node 4 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7509888619323619053:2203];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:23:21.896392Z node 4 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/0027c9/r3tmp/tmpM01M9D/pdisk_1.dat 2025-05-29T15:23:21.915299Z node 4 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:23:21.915670Z node 4 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [4:7509888619323618878:2079] 1748532201894573 != 1748532201894576 TClient is connected to server localhost:31219 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: " ... :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Connecting -> Connected waiting... 2025-05-29T15:23:23.545253Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:23:23.560028Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:23:23.573894Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:23:23.920462Z node 8 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[8:7509888627993321188:2059];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:23:23.920480Z node 8 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/0027c9/r3tmp/tmpb1aozx/pdisk_1.dat 2025-05-29T15:23:23.935643Z node 8 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:23:23.935887Z node 8 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [8:7509888627993321170:2079] 1748532203920349 != 1748532203920352 TClient is connected to server localhost:8953 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-05-29T15:23:24.024529Z node 8 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:23:24.024570Z node 8 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:23:24.024918Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-05-29T15:23:24.025574Z node 8 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Connecting -> Connected waiting... 2025-05-29T15:23:24.026499Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:23:24.035632Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:23:24.050220Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:23:24.063971Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:23:24.380136Z node 9 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[9:7509888634131548259:2061];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:23:24.380162Z node 9 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/0027c9/r3tmp/tmpxIDNQd/pdisk_1.dat 2025-05-29T15:23:24.396185Z node 9 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:23:24.396412Z node 9 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [9:7509888634131548240:2079] 1748532204380016 != 1748532204380019 TClient is connected to server localhost:18340 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. waiting... 2025-05-29T15:23:24.485293Z node 9 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:23:24.485326Z node 9 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:23:24.485609Z node 9 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-05-29T15:23:24.487941Z node 9 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-29T15:23:24.488242Z node 9 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:23:24.501034Z node 9 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:23:24.557609Z node 9 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:23:24.568235Z node 9 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:23:24.919692Z node 10 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[10:7509888634171084108:2064];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:23:24.919706Z node 10 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/0027c9/r3tmp/tmpYJqG6C/pdisk_1.dat 2025-05-29T15:23:24.936646Z node 10 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:23:24.936839Z node 10 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [10:7509888634171084085:2079] 1748532204919549 != 1748532204919552 TClient is connected to server localhost:30971 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-05-29T15:23:25.024279Z node 10 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-05-29T15:23:25.024404Z node 10 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:23:25.024423Z node 10 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Disconnected -> Connecting waiting... 2025-05-29T15:23:25.025450Z node 10 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Connecting -> Connected waiting... 2025-05-29T15:23:25.028687Z node 10 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:23:25.043195Z node 10 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:23:25.058374Z node 10 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TStoragePoolsQuotasTest::DifferentQuotasInteraction-EnableSeparateQuotas [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:116:2058] recipient: [1:111:2141] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:116:2058] recipient: [1:111:2141] Leader for TabletID 72057594046678944 is [1:128:2152] sender: [1:132:2058] recipient: [1:111:2141] 2025-05-29T15:23:15.204737Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7488: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-05-29T15:23:15.204759Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7516: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:23:15.204764Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7402: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2025-05-29T15:23:15.204767Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7418: OperationsProcessing config: using default configuration 2025-05-29T15:23:15.204778Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-05-29T15:23:15.204780Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-05-29T15:23:15.204786Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7548: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:23:15.204797Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:39: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-05-29T15:23:15.204887Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7619: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-05-29T15:23:15.204953Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-05-29T15:23:15.216375Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7309: Cannot subscribe to console configs 2025-05-29T15:23:15.216398Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:23:15.218490Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-05-29T15:23:15.218571Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-05-29T15:23:15.218612Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-05-29T15:23:15.221642Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-05-29T15:23:15.221807Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:445: Clear TempDirsState with owners number: 0 2025-05-29T15:23:15.221918Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1348: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-05-29T15:23:15.221984Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:32: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-05-29T15:23:15.222469Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:157: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:23:15.222509Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:84: [RootDataErasureManager] Stop 2025-05-29T15:23:15.222801Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:23:15.222816Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:23:15.222839Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-05-29T15:23:15.222848Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-29T15:23:15.222854Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:202: TTxServerlessStorageBilling.Complete 2025-05-29T15:23:15.222890Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6671: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-05-29T15:23:15.223936Z node 1 :HIVE INFO: tablet_helpers.cpp:1130: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:128:2152] sender: [1:242:2058] recipient: [1:15:2062] 2025-05-29T15:23:15.249155Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:372: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-05-29T15:23:15.249236Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:23:15.249308Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-05-29T15:23:15.249361Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:130: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-05-29T15:23:15.249374Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:23:15.252220Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:451: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-05-29T15:23:15.252258Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-05-29T15:23:15.252314Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:23:15.252326Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:313: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-05-29T15:23:15.252332Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:367: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-05-29T15:23:15.252338Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 2 -> 3 2025-05-29T15:23:15.253021Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:23:15.253035Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-05-29T15:23:15.253043Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 3 -> 128 2025-05-29T15:23:15.253581Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:23:15.253596Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:23:15.253604Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:23:15.253611Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1650: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-05-29T15:23:15.254393Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1719: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-05-29T15:23:15.257120Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:652: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-05-29T15:23:15.257168Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1751: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-05-29T15:23:15.257320Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:676: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-05-29T15:23:15.257354Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:680: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 124 RawX2: 4294969445 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-05-29T15:23:15.257361Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:23:15.257419Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 128 -> 240 2025-05-29T15:23:15.257426Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:23:15.257455Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-05-29T15:23:15.257465Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:402: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-05-29T15:23:15.258078Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:23:15.258093Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-29T15:23:15.258141Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29T1 ... 594046678944, LocalPathId: 2] was 4 2025-05-29T15:23:25.771900Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 3 2025-05-29T15:23:25.772074Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:647: TTxOperationReply complete, operationId: 103:0, at schemeshard: 72057594046678944 2025-05-29T15:23:25.772414Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:647: TTxOperationReply complete, operationId: 103:0, at schemeshard: 72057594046678944 2025-05-29T15:23:25.772713Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:23:25.772724Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 103, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2025-05-29T15:23:25.772770Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 103, path id: [OwnerId: 72057594046678944, LocalPathId: 3] 2025-05-29T15:23:25.772800Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:23:25.772805Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:209:2210], at schemeshard: 72057594046678944, txId: 103, path id: 2 2025-05-29T15:23:25.772810Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:209:2210], at schemeshard: 72057594046678944, txId: 103, path id: 3 2025-05-29T15:23:25.772919Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 103:0, at schemeshard: 72057594046678944 2025-05-29T15:23:25.772927Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:1036: NTableState::TProposedWaitParts operationId# 103:0 ProgressState at tablet: 72057594046678944 2025-05-29T15:23:25.772943Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:674: all shard schema changes has been received, operationId: 103:0, at schemeshard: 72057594046678944 2025-05-29T15:23:25.772951Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:686: send schema changes ack message, operation: 103:0, datashard: 72075186233409548, at schemeshard: 72057594046678944 2025-05-29T15:23:25.772957Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 103:0 129 -> 240 2025-05-29T15:23:25.773103Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:5834: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 10 PathOwnerId: 72057594046678944, cookie: 103 2025-05-29T15:23:25.773115Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 10 PathOwnerId: 72057594046678944, cookie: 103 2025-05-29T15:23:25.773120Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72057594046678944, txId: 103 2025-05-29T15:23:25.773125Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 103, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 10 2025-05-29T15:23:25.773131Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 5 2025-05-29T15:23:25.773243Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:5834: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 103 2025-05-29T15:23:25.773254Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 103 2025-05-29T15:23:25.773259Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72057594046678944, txId: 103 2025-05-29T15:23:25.773263Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 103, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 18446744073709551615 2025-05-29T15:23:25.773268Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 4 2025-05-29T15:23:25.773277Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1606: TOperation IsReadyToNotify, TxId: 103, ready parts: 0/1, is published: true 2025-05-29T15:23:25.773972Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 103:0, at schemeshard: 72057594046678944 2025-05-29T15:23:25.773984Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_drop_table.cpp:414: TDropTable TProposedDeletePart operationId: 103:0 ProgressState, at schemeshard: 72057594046678944 2025-05-29T15:23:25.774075Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove table for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 3 2025-05-29T15:23:25.774115Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:906: Part operation is done id#103:0 progress is 1/1 2025-05-29T15:23:25.774119Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2025-05-29T15:23:25.774125Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:906: Part operation is done id#103:0 progress is 1/1 2025-05-29T15:23:25.774131Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2025-05-29T15:23:25.774136Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1606: TOperation IsReadyToNotify, TxId: 103, ready parts: 1/1, is published: true 2025-05-29T15:23:25.774148Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1629: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:410:2375] message: TxId: 103 2025-05-29T15:23:25.774154Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2025-05-29T15:23:25.774160Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:973: Operation and all the parts is done, operation id: 103:0 2025-05-29T15:23:25.774164Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5174: RemoveTx for txid 103:0 2025-05-29T15:23:25.774181Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 2 2025-05-29T15:23:25.774268Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:23:25.774274Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 0, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2025-05-29T15:23:25.774448Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 103 2025-05-29T15:23:25.774485Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 103 2025-05-29T15:23:25.774793Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:23:25.774805Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:209:2210], at schemeshard: 72057594046678944, txId: 0, path id: 2 2025-05-29T15:23:25.774866Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:227: tests -- TTxNotificationSubscriber for txId 103: got EvNotifyTxCompletionResult 2025-05-29T15:23:25.774872Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:236: tests -- TTxNotificationSubscriber for txId 103: satisfy waiter [1:1339:3262] 2025-05-29T15:23:25.775003Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:5834: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 11 PathOwnerId: 72057594046678944, cookie: 0 TestWaitNotification: OK eventTxId 103 2025-05-29T15:23:25.775597Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/SomeDatabase" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-05-29T15:23:25.775636Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/SomeDatabase" took 49us result status StatusSuccess 2025-05-29T15:23:25.775751Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/SomeDatabase" PathDescription { Self { Name: "SomeDatabase" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 11 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 11 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 5 SubDomainVersion: 1 SubDomainStateVersion: 4 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72075186233409546 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409547 } DomainKey { SchemeShard: 72057594046678944 PathId: 2 } StoragePools { Name: "fast" Kind: "fast_kind" } StoragePools { Name: "large" Kind: "large_kind" } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 3 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 2 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } StoragePoolsUsage { PoolKind: "large_kind" TotalSize: 0 DataSize: 0 IndexSize: 0 } StoragePoolsUsage { PoolKind: "fast_kind" TotalSize: 0 DataSize: 0 IndexSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 DatabaseQuotas { data_size_hard_quota: 2800 data_size_soft_quota: 2200 storage_quotas { unit_kind: "fast_kind" data_size_hard_quota: 600 data_size_soft_quota: 500 } storage_quotas { unit_kind: "large_kind" data_size_hard_quota: 2200 data_size_soft_quota: 1700 } } SecurityState { } } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-CreateUser-16 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-CreateUser-17 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/client/ut/unittest >> TFlatTest::LargeDatashardReply [GOOD] Test command err: 2025-05-29T15:23:21.311377Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509888623039210731:2061];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:23:21.311403Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/0027af/r3tmp/tmpYxh8fK/pdisk_1.dat 2025-05-29T15:23:21.362073Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:23:21.362914Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [1:7509888623039210712:2079] 1748532201311252 != 1748532201311255 TClient is connected to server localhost:64451 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 2025-05-29T15:23:21.379485Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:403: actor# [1:7509888623039210978:2102] Handle TEvNavigate describe path dc-1 2025-05-29T15:23:21.380766Z node 1 :TX_PROXY DEBUG: describe.cpp:272: Actor# [1:7509888623039211250:2250] HANDLE EvNavigateScheme dc-1 2025-05-29T15:23:21.381021Z node 1 :TX_PROXY DEBUG: describe.cpp:356: Actor# [1:7509888623039211250:2250] HANDLE EvNavigateKeySetResult TDescribeReq marker# P5 ErrorCount# 0 2025-05-29T15:23:21.386122Z node 1 :TX_PROXY DEBUG: describe.cpp:435: Actor# [1:7509888623039211250:2250] SEND to# 72057594046644480 shardToRequest NKikimrSchemeOp.TDescribePath Path: "dc-1" Options { ShowPrivateTable: true } 2025-05-29T15:23:21.387661Z node 1 :TX_PROXY DEBUG: describe.cpp:448: Actor# [1:7509888623039211250:2250] Handle TEvDescribeSchemeResult Forward to# [1:7509888623039211249:2249] Cookie: 0 TEvDescribeSchemeResult: NKikimrScheme.TEvDescribeSchemeResult PreSerializedData size# 2 Record# Status: StatusSuccess Path: "dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/dc-1" } } } PathId: 1 PathOwnerId: 72057594046644480 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-05-29T15:23:21.390711Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:315: actor# [1:7509888623039210978:2102] Handle TEvProposeTransaction 2025-05-29T15:23:21.390725Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:327: actor# [1:7509888623039210978:2102] Cookie# 0 userReqId# "" DELAY REQUEST, wait txids from allocator Type# Scheme 2025-05-29T15:23:21.441570Z node 1 :HIVE DEBUG: hive_impl.cpp:141: HIVE#72057594037968897 Handle TEvLocal::TEvRegisterNode from [1:7509888623039210963:2099] HiveId: 72057594037968897 ServicedDomains { SchemeShard: 72057594046644480 PathId: 1 } TabletAvailability { Type: Mediator Priority: 0 } TabletAvailability { Type: Dummy Priority: 0 } TabletAvailability { Type: KeyValue Priority: 0 } TabletAvailability { Type: Coordinator Priority: 0 } TabletAvailability { Type: Hive Priority: 0 } TabletAvailability { Type: SchemeShard Priority: 0 } TabletAvailability { Type: DataShard Priority: 0 } TabletAvailability { Type: PersQueue Priority: 0 } TabletAvailability { Type: PersQueueReadBalancer Priority: 0 } TabletAvailability { Type: Kesus Priority: 0 } TabletAvailability { Type: SysViewProcessor Priority: 0 } TabletAvailability { Type: ColumnShard Priority: 0 } TabletAvailability { Type: SequenceShard Priority: 0 } TabletAvailability { Type: ReplicationController Priority: 0 } TabletAvailability { Type: StatisticsAggregator Priority: 0 } 2025-05-29T15:23:21.441607Z node 1 :HIVE DEBUG: tx__register_node.cpp:21: HIVE#72057594037968897 THive::TTxRegisterNode(1)::Execute 2025-05-29T15:23:21.441635Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:23:21.441638Z node 1 :HIVE DEBUG: hive_impl.cpp:361: HIVE#72057594037968897 ProcessWaitQueue (0) 2025-05-29T15:23:21.441640Z node 1 :HIVE DEBUG: hive_impl.cpp:342: HIVE#72057594037968897 ProcessBootQueue (0) 2025-05-29T15:23:21.441645Z node 1 :HIVE DEBUG: hive_impl.cpp:361: HIVE#72057594037968897 ProcessWaitQueue (0) 2025-05-29T15:23:21.441646Z node 1 :HIVE DEBUG: hive_impl.cpp:342: HIVE#72057594037968897 ProcessBootQueue (0) 2025-05-29T15:23:21.441660Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:23:21.441705Z node 1 :HIVE DEBUG: hive_impl.cpp:798: HIVE#72057594037968897 TEvInterconnect::TEvNodeInfo NodeId 1 Location DataCenter: "1" Module: "1" Rack: "1" Unit: "1" 2025-05-29T15:23:21.441716Z node 1 :HIVE DEBUG: tx__process_boot_queue.cpp:18: HIVE#72057594037968897 THive::TTxProcessBootQueue()::Execute 2025-05-29T15:23:21.441726Z node 1 :HIVE DEBUG: hive_impl.cpp:222: HIVE#72057594037968897 Handle ProcessBootQueue (size: 0) 2025-05-29T15:23:21.441729Z node 1 :HIVE DEBUG: hive_impl.cpp:225: HIVE#72057594037968897 Handle ProcessWaitQueue (size: 0) 2025-05-29T15:23:21.441733Z node 1 :HIVE DEBUG: hive_impl.cpp:302: HIVE#72057594037968897 ProcessBootQueue - BootQueue empty (WaitQueue: 0) 2025-05-29T15:23:21.442073Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:213: actor# [1:7509888623039210978:2102] HANDLE TEvClientConnected success connect from tablet# 72057594046447617 2025-05-29T15:23:21.442152Z node 1 :TX_PROXY DEBUG: client.cpp:89: Handle TEvAllocateResult ACCEPTED RangeBegin# 281474976715656 RangeEnd# 281474976720656 txAllocator# 72057594046447617 2025-05-29T15:23:21.442162Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:238: actor# [1:7509888623039210978:2102] TxId# 281474976715657 ProcessProposeTransaction 2025-05-29T15:23:21.442197Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:257: actor# [1:7509888623039210978:2102] Cookie# 0 userReqId# "" txid# 281474976715657 SEND to# [1:7509888623039211269:2260] 2025-05-29T15:23:21.442575Z node 1 :HIVE DEBUG: tx__register_node.cpp:88: HIVE#72057594037968897 THive::TTxRegisterNode(1)::Complete 2025-05-29T15:23:21.442597Z node 1 :HIVE DEBUG: node_info.cpp:367: HIVE#72057594037968897 Node(1) Ping([1:7509888623039210963:2099]) 2025-05-29T15:23:21.442613Z node 1 :HIVE DEBUG: tx__process_boot_queue.cpp:26: HIVE#72057594037968897 THive::TTxProcessBootQueue()::Complete 2025-05-29T15:23:21.442792Z node 1 :HIVE DEBUG: hive_impl.cpp:727: HIVE#72057594037968897 THive::Handle::TEvSyncTablets 2025-05-29T15:23:21.442817Z node 1 :HIVE DEBUG: tx__sync_tablets.cpp:41: HIVE#72057594037968897 THive::TTxSyncTablets([1:7509888623039210963:2099])::Execute 2025-05-29T15:23:21.442824Z node 1 :HIVE DEBUG: hive_impl.cpp:342: HIVE#72057594037968897 ProcessBootQueue (0) 2025-05-29T15:23:21.442837Z node 1 :HIVE DEBUG: tx__sync_tablets.cpp:130: HIVE#72057594037968897 THive::TTxSyncTablets([1:7509888623039210963:2099])::Complete 2025-05-29T15:23:21.442868Z node 1 :HIVE DEBUG: hive_impl.cpp:721: HIVE#72057594037968897 Handle TEvLocal::TEvStatus for Node 1: Status: 0 StartTime: 1748532201312432 ResourceMaximum { Memory: 202797641728 } 2025-05-29T15:23:21.442891Z node 1 :HIVE DEBUG: tx__status.cpp:22: HIVE#72057594037968897 THive::TTxStatus(1)::Execute 2025-05-29T15:23:21.442899Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-29T15:23:21.442924Z node 1 :HIVE DEBUG: hive_impl.cpp:2781: HIVE#72057594037968897 AddRegisteredDataCentersNode(1, 1) 2025-05-29T15:23:21.442946Z node 1 :HIVE DEBUG: hive_impl.cpp:361: HIVE#72057594037968897 ProcessWaitQueue (0) 2025-05-29T15:23:21.442952Z node 1 :HIVE DEBUG: hive_impl.cpp:342: HIVE#72057594037968897 ProcessBootQueue (0) 2025-05-29T15:23:21.442968Z node 1 :HIVE DEBUG: tx__process_boot_queue.cpp:18: HIVE#72057594037968897 THive::TTxProcessBootQueue()::Execute 2025-05-29T15:23:21.442978Z node 1 :HIVE DEBUG: hive_impl.cpp:222: HIVE#72057594037968897 Handle ProcessBootQueue (size: 0) 2025-05-29T15:23:21.442980Z node 1 :HIVE DEBUG: hive_impl.cpp:225: HIVE#72057594037968897 Handle ProcessWaitQueue (size: 0) 2025-05-29T15:23:21.442983Z node 1 :HIVE DEBUG: hive_impl.cpp:302: HIVE#72057594037968897 ProcessBootQueue - BootQueue empty (WaitQueue: 0) 2025-05-29T15:23:21.443595Z node 1 :HIVE DEBUG: tx__status.cpp:65: HIVE#72057594037968897 THive::TTxStatus(1)::Complete 2025-05-29T15:23:21.443608Z node 1 :HIVE DEBUG: tx__process_boot_queue.cpp:26: HIVE#72057594037968897 THive::TTxProcessBootQueue()::Complete 2025-05-29T15:23:21.454387Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1562: Actor# [1:7509888623039211269:2260] txid# 281474976715657 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "dc-1" StoragePools { Name: "/dc-1:test" Kind: "test" } } } } UserToken: "" PeerName: "" 2025-05-29T15:23:21.454426Z node 1 :TX_PROXY DEBUG: schemereq.cpp:569: Actor# [1:7509888623039211269:2260] txid# 281474976715657 Bootstrap, UserSID: CheckAdministrator: 0 CheckDatabaseAdministrator: 0 2025-05-29T15:23:21.454441Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1617: Actor# [1:7509888623039211269:2260] txid# 281474976715657 TEvNavigateKeySet requested from SchemeCache 2025-05-29T15:23:21.454560Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1450: Actor# [1:7509888623039211269:2260] txid# 281474976715657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-05-29T15:23:21.454596Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1497: Actor# [1:7509888623039211269:2260] HANDLE EvNavigateKeySetResult, txid# 281474976715657 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# false 2025-05-29T15:23:21.454604Z node 1 :TX_PROXY DEBUG: schemereq.cpp:103: Actor# [1:7509888623039211269:2260] txid# 281474976715657 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976715657 TabletId# 72 ... 76715674:0 progress is 1/1 2025-05-29T15:23:21.661548Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 281474976715674 ready parts: 1/1 2025-05-29T15:23:21.661552Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:906: Part operation is done id#281474976715674:0 progress is 1/1 2025-05-29T15:23:21.661554Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 281474976715674 ready parts: 1/1 2025-05-29T15:23:21.661556Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1606: TOperation IsReadyToNotify, TxId: 281474976715674, ready parts: 1/1, is published: true 2025-05-29T15:23:21.661563Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1629: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:7509888623039212272:2375] message: TxId: 281474976715674 2025-05-29T15:23:21.661566Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 281474976715674 ready parts: 1/1 2025-05-29T15:23:21.661569Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:973: Operation and all the parts is done, operation id: 281474976715674:0 2025-05-29T15:23:21.661572Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5174: RemoveTx for txid 281474976715674:0 2025-05-29T15:23:21.661588Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046644480, LocalPathId: 6] was 2 2025-05-29T15:23:21.662273Z node 1 :TX_DATASHARD DEBUG: datashard_loans.cpp:220: 72075186224037899 in PreOffline state HasSharedBobs: 0 SchemaOperations: [ ] OutReadSets count: 0 ChangesQueue size: 0 ChangeExchangeSplit: 1 siblings to be activated: wait to activation from: 2025-05-29T15:23:21.662296Z node 1 :TX_DATASHARD INFO: datashard_loans.cpp:177: 72075186224037899 Initiating switch from PreOffline to Offline state 2025-05-29T15:23:21.662591Z node 1 :TX_DATASHARD INFO: datashard_impl.h:3306: 72075186224037899 Reporting state Offline to schemeshard 72057594046644480 2025-05-29T15:23:21.662671Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5549: Handle TEvStateChanged, at schemeshard: 72057594046644480, message: Source { RawX1: 7509888623039211857 RawX2: 4503603922340126 } TabletId: 72075186224037899 State: 4 2025-05-29T15:23:21.662688Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__state_changed_reply.cpp:78: TTxShardStateChanged DoExecute, datashard informs about state changing, datashardId: 72075186224037899, state: Offline, at schemeshard: 72057594046644480 2025-05-29T15:23:21.662791Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:2962: Handle TEvStateChangedResult datashard 72075186224037899 state Offline 2025-05-29T15:23:21.662795Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:46: Free shard 72057594046644480:12 hive 72057594037968897 at ss 72057594046644480 2025-05-29T15:23:21.662839Z node 1 :HIVE DEBUG: tx__delete_tablet.cpp:74: HIVE#72057594037968897 THive::TTxDeleteTablet::Execute() ShardOwnerId: 72057594046644480 ShardLocalIdx: 12 TxId_Deprecated: 12 TabletID: 72075186224037899 2025-05-29T15:23:21.662851Z node 1 :HIVE DEBUG: tx__delete_tablet.cpp:19: HIVE#72057594037968897 THive::TTxDeleteTablet::Execute Tablet 72075186224037899 2025-05-29T15:23:21.662866Z node 1 :HIVE DEBUG: tablet_info.cpp:123: HIVE#72057594037968897 Tablet(DataShard.72075186224037899.Leader.1) VolatileState: Running -> Stopped (Node 1) 2025-05-29T15:23:21.662890Z node 1 :HIVE DEBUG: tablet_info.cpp:523: HIVE#72057594037968897 Sending TEvStopTablet(DataShard.72075186224037899.Leader.1 gen 1) to node 1 2025-05-29T15:23:21.662911Z node 1 :HIVE DEBUG: tx__delete_tablet.cpp:67: HIVE#72057594037968897 THive::TTxDeleteTablet::Execute() result Status: OK Origin: 72057594037968897 TxId_Deprecated: 12 ShardOwnerId: 72057594046644480 ShardLocalIdx: 12 2025-05-29T15:23:21.663797Z node 1 :HIVE DEBUG: tx__delete_tablet.cpp:136: HIVE#72057594037968897 THive::TTxDeleteTablet::Complete() SideEffects: {Notifications: 0x10080003 [1:7509888623039210963:2099] NKikimrLocal.TEvStopTablet TabletId: 72075186224037899 FollowerId: 0 Generation: 1,0x10040206 [1:7509888623039211161:2193] NKikimrHive.TEvDeleteTabletReply Status: OK Origin: 72057594037968897 TxId_Deprecated: 12 ShardOwnerId: 72057594046644480 ShardLocalIdx: 12 Actions: NKikimr::TTabletReqBlockBlobStorage} 2025-05-29T15:23:21.663822Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5938: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 12 ShardOwnerId: 72057594046644480 ShardLocalIdx: 12, at schemeshard: 72057594046644480 2025-05-29T15:23:21.663854Z node 1 :TX_DATASHARD INFO: datashard.cpp:197: OnTabletStop: 72075186224037899 reason = ReasonStop 2025-05-29T15:23:21.663872Z node 1 :HIVE DEBUG: tx__block_storage_result.cpp:23: HIVE#72057594037968897 THive::TTxBlockStorageResult::Execute(72075186224037899 OK) 2025-05-29T15:23:21.663874Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046644480, LocalPathId: 6] was 1 2025-05-29T15:23:21.663887Z node 1 :HIVE DEBUG: tx__block_storage_result.cpp:65: HIVE#72057594037968897 THive::TTxBlockStorageResult::Complete(72075186224037899 OK) 2025-05-29T15:23:21.663894Z node 1 :HIVE DEBUG: hive_impl.cpp:882: HIVE#72057594037968897 THive::Handle::TEvInitiateDeleteStorage TabletId=72075186224037899 2025-05-29T15:23:21.663900Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:123: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046644480 2025-05-29T15:23:21.663902Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:137: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046644480, LocalPathId: 6], at schemeshard: 72057594046644480 2025-05-29T15:23:21.663908Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046644480, LocalPathId: 3] was 1 2025-05-29T15:23:21.663964Z node 1 :HIVE DEBUG: tx__delete_tablet_result.cpp:26: HIVE#72057594037968897 THive::TTxDeleteTabletResult::Execute(72075186224037899 OK) 2025-05-29T15:23:21.663976Z node 1 :TX_DATASHARD INFO: datashard.cpp:257: OnTabletDead: 72075186224037899 2025-05-29T15:23:21.664003Z node 1 :TX_DATASHARD INFO: datashard.cpp:1301: Change sender killed: at tablet: 72075186224037899 2025-05-29T15:23:21.664029Z node 1 :HIVE DEBUG: hive_impl.cpp:480: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus, TabletId: 72075186224037899 2025-05-29T15:23:21.664037Z node 1 :HIVE WARN: hive_impl.cpp:491: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037899 not found 2025-05-29T15:23:21.664075Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:174: Deleted shardIdx 72057594046644480:12 2025-05-29T15:23:21.664085Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:180: Close pipe to deleted shardIdx 72057594046644480:12 tabletId 72075186224037899 2025-05-29T15:23:21.664101Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:160: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046644480 2025-05-29T15:23:21.664803Z node 1 :HIVE DEBUG: tx__delete_tablet_result.cpp:72: HIVE#72057594037968897 THive::TTxDeleteTabletResult(72075186224037899)::Complete SideEffects {} 2025-05-29T15:23:21.768710Z node 2 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7509888621895353051:2062];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:23:21.768733Z node 2 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/0027af/r3tmp/tmpcXPJY7/pdisk_1.dat 2025-05-29T15:23:21.781587Z node 2 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:23:21.781777Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [2:7509888621895353031:2079] 1748532201768594 != 1748532201768597 TClient is connected to server localhost:29273 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-05-29T15:23:21.876712Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:23:21.876740Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:23:21.877166Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-05-29T15:23:21.878491Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected waiting... 2025-05-29T15:23:21.882044Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:23:21.885747Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:23:24.961148Z node 2 :MINIKQL_ENGINE ERROR: datashard__engine_host.cpp:512: Shard %72075186224037888, txid %281474976716360, engine error: Error executing transaction (read-only: 1): Datashard 72075186224037888: reply size limit exceeded. (71580986 > 50331648) 2025-05-29T15:23:24.968958Z node 2 :TX_DATASHARD ERROR: execute_data_tx_unit.cpp:268: Datashard execution error for [0:281474976716360] at 72075186224037888: Datashard 72075186224037888: reply size limit exceeded. (71580986 > 50331648) 2025-05-29T15:23:24.969799Z node 2 :TX_DATASHARD ERROR: finish_propose_unit.cpp:174: Errors while proposing transaction txid 281474976716360 at tablet 72075186224037888 status: RESULT_UNAVAILABLE errors: REPLY_SIZE_EXCEEDED (Datashard 72075186224037888: reply size limit exceeded. (71580986 > 50331648)) | 2025-05-29T15:23:24.978451Z node 2 :TX_PROXY ERROR: datareq.cpp:883: Actor# [2:7509888634780261305:5880] txid# 281474976716360 RESPONSE Status# ExecResultUnavailable marker# P13c DataShardErrors: [REPLY_SIZE_EXCEEDED] Datashard 72075186224037888: reply size limit exceeded. (71580986 > 50331648) proxy error code: ExecResultUnavailable >> TFlatTest::MergeEmptyAndWrite [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-ModifyUser-32 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-ModifyUser-33 >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-ModifyUser-44 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-ModifyUser-45 >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-CreateUser-9 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-CreateUser-10 >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-CreateUser-2 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-CreateUser-3 >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-DropUser-56 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-DropUser-57 >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-ModifyUser-39 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-ModifyUser-40 >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-CreateUser-22 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-CreateUser-23 |63.6%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/datashard/ut_trace/ydb-core-tx-datashard-ut_trace |63.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_trace/ydb-core-tx-datashard-ut_trace |63.6%| [TA] {RESULT} $(B)/ydb/core/kqp/ut/idx_test/test-results/unittest/{meta.json ... results_accumulator.log} |63.6%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_trace/ydb-core-tx-datashard-ut_trace >> TFlatTest::SelectRangeReverseItemsLimit >> TLocksTest::Range_BrokenLock2 [GOOD] >> TLocksTest::Range_BrokenLock3 >> TCancelTx::CrossShardReadOnlyWithReadSets [GOOD] >> TCancelTx::ImmediateReadOnly >> TxUsage::WriteToTopic_Demo_1_Table >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-DropUser-58 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-DropUser-59 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/client/ut/unittest >> TFlatTest::CopyTableAndDropCopy [GOOD] Test command err: 2025-05-29T15:23:20.751174Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509888617224298353:2062];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:23:20.751215Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/0027c4/r3tmp/tmp6SoAAT/pdisk_1.dat 2025-05-29T15:23:20.811915Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [1:7509888617224298333:2079] 1748532200750986 != 1748532200750989 2025-05-29T15:23:20.813519Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded TClient is connected to server localhost:22736 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-05-29T15:23:20.854152Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:23:20.854177Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:23:20.855291Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-29T15:23:20.892395Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:23:20.894992Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:23:20.899478Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... waiting... TClient::Ls request: /dc-1/Dir/Table_1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Table_1" PathId: 3 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976715659 CreateStep: 1748532201003 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "Table_1" Columns { Name: "col_0" Type: "Int32" TypeId: 1 Id: 1 NotNull: false IsBuildInProgress: false } KeyColumnNames: "col_0" KeyColumnIds: 1 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 ... (TRUNCATED) TClient::Ls request: /dc-1/Dir/Table_1_Copy TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Table_1_Copy" PathId: 4 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976715660 CreateStep: 1748532201017 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "Table_1_Copy" Columns { Name: "col_0" Type: "Int32" TypeId: 1 Id: 1 NotNull: false IsBuildInProgress: false } KeyColumnNames: "col_0" KeyColumnIds: 1 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot... (TRUNCATED) 2025-05-29T15:23:20.975576Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... waiting... TClient::Ls request: /dc-1/Dir/Table_2 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Table_2" PathId: 5 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976715661 CreateStep: 1748532201073 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "Table_2" Columns { Name: "col_0" Type: "Int32" TypeId: 1 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "col_1" Type: "Int32" TypeId: 1 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "col_0" ... (TRUNCATED) TClient::Ls request: /dc-1/Dir/Table_2_Copy TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Table_2_Copy" PathId: 6 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976715662 CreateStep: 1748532201087 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "Table_2_Copy" Columns { Name: "col_0" Type: "Int32" TypeId: 1 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "col_1" Type: "Int32" TypeId: 1 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: ... (TRUNCATED) 2025-05-29T15:23:21.053407Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 waiting... waiting... TClient::Ls request: /dc-1/Dir/Table_3 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Table_3" PathId: 7 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976715663 CreateStep: 1748532201108 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "Table_3" Columns { Name: "col_0" Type: "Int32" TypeId: 1 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "col_1" Type: "Int32" TypeId: 1 Id: 2 NotNull: false IsBuildInProgress: false } Columns { Name: "col... (TRUNCATED) TClient::Ls request: /dc-1/Dir/Table_3_Copy TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Table_3_Copy" PathId: 8 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976715664 CreateStep: 1748532201122 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "Table_3_Copy" Columns { Name: "col_0" Type: "Int32" TypeId: 1 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "col_1" Type: "Int32" TypeId: 1 Id: 2 NotNull: false IsBuildInProgress: false } Columns { ... (TRUNCATED) 2025-05-29T15:23:21.104794Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 waiting... waiting... TClient::Ls request: /dc-1/Dir/Table_4 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Table_4" PathId: 9 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976715665 CreateStep: 1748532201206 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "Table_4" Columns { Name: "col_0" Type: "Int32" TypeId: 1 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "col_1" Type: "Int32" TypeId: 1 Id: 2 NotNull: false IsBuildInProgress: false } Columns { Name: "col... (TRUNCATED) TClient::Ls request: /dc-1/Dir/Table_4_Copy TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Table_4_Copy" PathId: 10 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976715666 CreateStep: 1748532201220 ParentPathId: 2 PathState: EPathStateNoChange ... athId [OwnerId: 72057594046644480, LocalPathId: 6] was 4 2025-05-29T15:23:25.864702Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:2953: Handle TEvSchemaChangedResult 281474976715686 datashard 72075186224037895 state Ready 2025-05-29T15:23:25.864718Z node 2 :TX_DATASHARD DEBUG: datashard__schema_changed.cpp:22: 72075186224037895 Got TEvSchemaChangedResult from SS at 72075186224037895 2025-05-29T15:23:25.864753Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:2953: Handle TEvSchemaChangedResult 281474976715686 datashard 72075186224037894 state Ready 2025-05-29T15:23:25.864766Z node 2 :TX_DATASHARD DEBUG: datashard__schema_changed.cpp:22: 72075186224037894 Got TEvSchemaChangedResult from SS at 72075186224037894 2025-05-29T15:23:25.865848Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:3710: Server connected at leader tablet# 72075186224037894, clientId# [2:7509888639330468404:2998], serverId# [2:7509888639330468405:2999], sessionId# [0:0:0] 2025-05-29T15:23:25.865889Z node 2 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037894 2025-05-29T15:23:25.866314Z node 2 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 72075186224037894 2025-05-29T15:23:25.866339Z node 2 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037894 2025-05-29T15:23:25.867216Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:3710: Server connected at leader tablet# 72075186224037895, clientId# [2:7509888639330468416:3007], serverId# [2:7509888639330468417:3008], sessionId# [0:0:0] 2025-05-29T15:23:25.867262Z node 2 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037895 2025-05-29T15:23:25.867641Z node 2 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 72075186224037895 2025-05-29T15:23:25.867656Z node 2 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037895 2025-05-29T15:23:25.868485Z node 2 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037894 2025-05-29T15:23:25.868824Z node 2 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 72075186224037894 2025-05-29T15:23:25.868847Z node 2 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037894 2025-05-29T15:23:25.869482Z node 2 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037895 2025-05-29T15:23:25.870643Z node 2 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 72075186224037895 2025-05-29T15:23:25.870676Z node 2 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037895 2025-05-29T15:23:25.871423Z node 2 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037894 2025-05-29T15:23:25.871725Z node 2 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 72075186224037894 2025-05-29T15:23:25.871735Z node 2 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037894 2025-05-29T15:23:25.872044Z node 2 :OPS_COMPACT INFO: Compact{72075186224037894.1.11, eph 1} end=Done, 4 blobs 3r (max 3), put Spent{time=0.000s,wait=0.000s,interrupts=1} Part{ 2 pk, lobs 2 +0, (1297 647 2154)b }, ecr=1.000 2025-05-29T15:23:25.872265Z node 2 :TX_DATASHARD DEBUG: datashard__compaction.cpp:203: CompactionComplete of tablet# 72075186224037894, table# 1001, finished edge# 0, ts 1970-01-01T00:00:00.000000Z 2025-05-29T15:23:25.872277Z node 2 :TX_DATASHARD DEBUG: datashard__compaction.cpp:240: ReplyCompactionWaiters of tablet# 72075186224037894, table# 1001, finished edge# 0, front# 0 2025-05-29T15:23:25.872744Z node 2 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037895 2025-05-29T15:23:25.873006Z node 2 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 72075186224037895 2025-05-29T15:23:25.873015Z node 2 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037895 2025-05-29T15:23:25.873477Z node 2 :OPS_COMPACT INFO: Compact{72075186224037895.1.11, eph 1} end=Done, 4 blobs 3r (max 3), put Spent{time=0.000s,wait=0.000s,interrupts=1} Part{ 2 pk, lobs 2 +0, (1171 521 2626)b }, ecr=1.000 2025-05-29T15:23:25.873616Z node 2 :TX_DATASHARD DEBUG: datashard__compaction.cpp:203: CompactionComplete of tablet# 72075186224037895, table# 1001, finished edge# 0, ts 1970-01-01T00:00:00.000000Z 2025-05-29T15:23:25.873626Z node 2 :TX_DATASHARD DEBUG: datashard__compaction.cpp:240: ReplyCompactionWaiters of tablet# 72075186224037895, table# 1001, finished edge# 0, front# 0 2025-05-29T15:23:25.873699Z node 2 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037894 2025-05-29T15:23:25.874086Z node 2 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 72075186224037894 2025-05-29T15:23:25.874111Z node 2 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037894 2025-05-29T15:23:25.874697Z node 2 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037895 2025-05-29T15:23:25.874950Z node 2 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 72075186224037895 2025-05-29T15:23:25.874964Z node 2 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037895 2025-05-29T15:23:25.875518Z node 2 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037894 2025-05-29T15:23:25.875721Z node 2 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 72075186224037894 2025-05-29T15:23:25.875732Z node 2 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037894 2025-05-29T15:23:25.876255Z node 2 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037895 2025-05-29T15:23:25.876448Z node 2 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 72075186224037895 2025-05-29T15:23:25.876464Z node 2 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037895 2025-05-29T15:23:25.876900Z node 2 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037894 2025-05-29T15:23:25.877189Z node 2 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 72075186224037894 2025-05-29T15:23:25.877202Z node 2 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037894 2025-05-29T15:23:25.877730Z node 2 :OPS_COMPACT INFO: Compact{72075186224037894.1.16, eph 2} end=Done, 4 blobs 6r (max 6), put Spent{time=0.000s,wait=0.000s,interrupts=1} Part{ 2 pk, lobs 5 +0, (1653 647 6413)b }, ecr=1.000 2025-05-29T15:23:25.877885Z node 2 :TX_DATASHARD DEBUG: datashard__compaction.cpp:203: CompactionComplete of tablet# 72075186224037894, table# 1001, finished edge# 0, ts 1970-01-01T00:00:00.000000Z 2025-05-29T15:23:25.877897Z node 2 :TX_DATASHARD DEBUG: datashard__compaction.cpp:240: ReplyCompactionWaiters of tablet# 72075186224037894, table# 1001, finished edge# 0, front# 0 2025-05-29T15:23:25.877951Z node 2 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037895 2025-05-29T15:23:25.878166Z node 2 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 72075186224037895 2025-05-29T15:23:25.878178Z node 2 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037895 2025-05-29T15:23:25.878659Z node 2 :OPS_COMPACT INFO: Compact{72075186224037895.1.16, eph 2} end=Done, 4 blobs 6r (max 6), put Spent{time=0.000s,wait=0.000s,interrupts=1} Part{ 2 pk, lobs 4 +0, (2406 1432 5183)b }, ecr=1.000 2025-05-29T15:23:25.878770Z node 2 :TX_DATASHARD DEBUG: datashard__compaction.cpp:203: CompactionComplete of tablet# 72075186224037895, table# 1001, finished edge# 0, ts 1970-01-01T00:00:00.000000Z 2025-05-29T15:23:25.878779Z node 2 :TX_DATASHARD DEBUG: datashard__compaction.cpp:240: ReplyCompactionWaiters of tablet# 72075186224037895, table# 1001, finished edge# 0, front# 0 2025-05-29T15:23:25.878891Z node 2 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037894 2025-05-29T15:23:25.879825Z node 2 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 72075186224037894 2025-05-29T15:23:25.879846Z node 2 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037894 2025-05-29T15:23:25.880413Z node 2 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037895 2025-05-29T15:23:25.881325Z node 2 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 72075186224037895 2025-05-29T15:23:25.881342Z node 2 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037895 2025-05-29T15:23:25.881870Z node 2 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037894 2025-05-29T15:23:25.884411Z node 2 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 72075186224037894 2025-05-29T15:23:25.884442Z node 2 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037894 2025-05-29T15:23:25.885152Z node 2 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037895 2025-05-29T15:23:25.885955Z node 2 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 72075186224037895 2025-05-29T15:23:25.885974Z node 2 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037895 Check that tablet 72075186224037892 was deleted 2025-05-29T15:23:25.886197Z node 2 :HIVE WARN: hive_impl.cpp:1945: HIVE#72057594037968897 Can't find the tablet from RequestHiveInfo(TabletID=72075186224037892) Check that tablet 72075186224037893 was deleted 2025-05-29T15:23:25.886394Z node 2 :HIVE WARN: hive_impl.cpp:1945: HIVE#72057594037968897 Can't find the tablet from RequestHiveInfo(TabletID=72075186224037893) Check that tablet 72075186224037888 was deleted 2025-05-29T15:23:25.886485Z node 2 :HIVE WARN: hive_impl.cpp:1945: HIVE#72057594037968897 Can't find the tablet from RequestHiveInfo(TabletID=72075186224037888) Check that tablet 72075186224037889 was deleted 2025-05-29T15:23:25.886577Z node 2 :HIVE WARN: hive_impl.cpp:1945: HIVE#72057594037968897 Can't find the tablet from RequestHiveInfo(TabletID=72075186224037889) Check that tablet 72075186224037890 was deleted 2025-05-29T15:23:25.886679Z node 2 :HIVE WARN: hive_impl.cpp:1945: HIVE#72057594037968897 Can't find the tablet from RequestHiveInfo(TabletID=72075186224037890) Check that tablet 72075186224037891 was deleted 2025-05-29T15:23:25.886971Z node 2 :HIVE WARN: hive_impl.cpp:1945: HIVE#72057594037968897 Can't find the tablet from RequestHiveInfo(TabletID=72075186224037891) ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/client/ut/unittest >> TFlatTest::MergeEmptyAndWrite [GOOD] Test command err: 2025-05-29T15:23:25.182478Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509888638975974464:2063];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:23:25.182529Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/00276e/r3tmp/tmpsb96br/pdisk_1.dat 2025-05-29T15:23:25.236085Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [1:7509888638975974442:2079] 1748532205182357 != 1748532205182360 2025-05-29T15:23:25.236527Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded TClient is connected to server localhost:7400 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-05-29T15:23:25.285409Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:23:25.285437Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:23:25.286588Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-29T15:23:25.315804Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:23:25.322755Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... waiting... 2025-05-29T15:23:25.330440Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 2025-05-29T15:23:25.705420Z node 2 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7509888638149271324:2062];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:23:25.705437Z node 2 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/00276e/r3tmp/tmpnLuRuz/pdisk_1.dat 2025-05-29T15:23:25.719587Z node 2 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:23:25.719823Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [2:7509888638149271304:2079] 1748532205705304 != 1748532205705307 TClient is connected to server localhost:6835 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-05-29T15:23:25.805744Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:23:25.805775Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:23:25.805996Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:23:25.806774Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-29T15:23:25.806875Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... waiting... 2025-05-29T15:23:25.812767Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 2025-05-29T15:23:25.834318Z node 2 :OPS_COMPACT INFO: Compact{72075186224037888.1.11, eph 1} end=Done, 4 blobs 3r (max 3), put Spent{time=0.000s,wait=0.000s,interrupts=1} Part{ 2 pk, lobs 2 +0, (1281 647 2154)b }, ecr=1.000 2025-05-29T15:23:25.835935Z node 2 :OPS_COMPACT INFO: Compact{72075186224037889.1.11, eph 1} end=Done, 4 blobs 3r (max 3), put Spent{time=0.000s,wait=0.000s,interrupts=1} Part{ 2 pk, lobs 2 +0, (1155 521 2626)b }, ecr=1.000 2025-05-29T15:23:25.843723Z node 2 :OPS_COMPACT INFO: Compact{72075186224037888.1.16, eph 2} end=Done, 4 blobs 6r (max 6), put Spent{time=0.000s,wait=0.000s,interrupts=1} Part{ 2 pk, lobs 5 +0, (1637 647 6413)b }, ecr=1.000 2025-05-29T15:23:25.843815Z node 2 :OPS_COMPACT INFO: Compact{72075186224037889.1.16, eph 2} end=Done, 4 blobs 6r (max 6), put Spent{time=0.000s,wait=0.000s,interrupts=1} Part{ 2 pk, lobs 4 +0, (2390 1432 5183)b }, ecr=1.000 TClient::Ls request: /dc-1/Dir/TableOld TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "TableOld" PathId: 3 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976715659 CreateStep: 1748532205875 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "TableOld" Columns { Name: "Key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "Value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } Columns { Name: "... (TRUNCATED) 2025-05-29T15:23:25.860355Z node 2 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037889 2025-05-29T15:23:25.860876Z node 2 :TX_DATASHARD DEBUG: datashard_active_transaction.cpp:561: tx 281474976715676 released its data 2025-05-29T15:23:25.860936Z node 2 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037888 2025-05-29T15:23:25.861128Z node 2 :TX_DATASHARD DEBUG: datashard_active_transaction.cpp:561: tx 281474976715676 released its data 2025-05-29T15:23:25.861160Z node 2 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037889 2025-05-29T15:23:25.861211Z node 2 :TX_DATASHARD DEBUG: datashard_active_transaction.cpp:661: tx 281474976715676 at 72075186224037889 restored its data 2025-05-29T15:23:25.861318Z node 2 :TX_DATASHARD DEBUG: datashard_active_transaction.cpp:561: tx 281474976715676 released its data 2025-05-29T15:23:25.861339Z node 2 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037888 2025-05-29T15:23:25.861378Z node 2 :TX_DATASHARD DEBUG: datashard_active_transaction.cpp:661: tx 281474976715676 at 72075186224037888 restored its data 2025-05-29T15:23:25.861456Z node 2 :TX_DATASHARD DEBUG: datashard_active_transaction.cpp:561: tx 281474976715676 released its data 2025-05-29T15:23:25.862254Z node 2 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037889 2025-05-29T15:23:25.862304Z node 2 :TX_DATASHARD DEBUG: datashard_active_transaction.cpp:661: tx 281474976715676 at 72075186224037889 restored its data 2025-05-29T15:23:25.862403Z node 2 :TX_DATASHARD DEBUG: datashard_active_transaction.cpp:561: tx 281474976715676 released its data 2025-05-29T15:23:25.862419Z node 2 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037888 2025-05-29T15:23:25.862456Z node 2 :TX_DATASHARD DEBUG: datashard_active_transaction.cpp:661: tx 281474976715676 at 72075186224037888 restored its data 2025-05-29T15:23:25.862532Z node 2 :TX_DATASHARD DEBUG: datashard_active_transaction.cpp:561: tx 281474976715676 released its data 2025-05-29T15:23:25.862546Z node 2 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037889 2025-05-29T15:23:25.862579Z node 2 :TX_DATASHARD DEBUG: datashard_active_transaction.cpp:661: tx 281474976715676 at 72075186224037889 restored its data 2025-05-29T15:23:25.862664Z node 2 :TX_DATASHARD DEBUG: datashard_active_transaction.cpp:561: tx 281474976715676 released its data 2025-05-29T15:23:25.862677Z node 2 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037888 2025-05-29T15:23:25.862710Z node 2 :TX_DATASHARD DEBUG: datashard_active_transaction.cpp:661: tx 281474976715676 at 72075186224037888 restored its data 2025-05-29T15:23:25.862830Z node 2 :TX_DATASHARD DEBUG: datashard_active_transaction.cpp:561: tx 281474976715676 released its data 2025-05-29T15:23:25.862851Z node 2 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037889 2025-05-29T15:23:25.862908Z node 2 :TX_DATASHARD DEBUG: datashard_active_transaction.cpp:661: tx 281474976715676 at 72075186224037889 restored its data 2025-05-29T15:23:25.863030Z node 2 :TX_DATASHARD DEBUG: datashard_active_transaction.cpp:561: tx 281474976715676 released its data 2025-05-29T15:23:25.863051Z node 2 :TX_DA ... Transaction::Complete at 72075186224037890 2025-05-29T15:23:25.932605Z node 2 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037890 2025-05-29T15:23:25.932619Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:801: Complete [1748532205980 : 281474976715687] from 72075186224037890 at tablet 72075186224037890 send result to client [2:7509888638149271675:2151], exec latency: 0 ms, propose latency: 0 ms 2025-05-29T15:23:25.932628Z node 2 :TX_DATASHARD INFO: datashard.cpp:1590: 72075186224037890 Sending notify to schemeshard 72057594046644480 txId 281474976715687 state PreOffline TxInFly 0 2025-05-29T15:23:25.932637Z node 2 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037890 2025-05-29T15:23:25.932800Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6285: Handle TEvProposeTransactionResult, at schemeshard: 72057594046644480, message: TxKind: TX_KIND_SCHEME Origin: 72075186224037890 Status: COMPLETE TxId: 281474976715687 Step: 1748532205980 OrderId: 281474976715687 ExecLatency: 0 ProposeLatency: 0 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186224037890 CpuTimeUsec: 216 } } CommitVersion { Step: 1748532205980 TxId: 281474976715687 } 2025-05-29T15:23:25.932807Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1764: TOperation FindRelatedPartByTabletId, TxId: 281474976715687, tablet: 72075186224037890, partId: 0 2025-05-29T15:23:25.932832Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:619: TTxOperationReply execute, operationId: 281474976715687:0, at schemeshard: 72057594046644480, message: TxKind: TX_KIND_SCHEME Origin: 72075186224037890 Status: COMPLETE TxId: 281474976715687 Step: 1748532205980 OrderId: 281474976715687 ExecLatency: 0 ProposeLatency: 0 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186224037890 CpuTimeUsec: 216 } } CommitVersion { Step: 1748532205980 TxId: 281474976715687 } 2025-05-29T15:23:25.932843Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_part.cpp:108: HandleReply TEvDataShard::TEvProposeTransactionResult Ignore message: tablet# 72057594046644480, ev# TxKind: TX_KIND_SCHEME Origin: 72075186224037890 Status: COMPLETE TxId: 281474976715687 Step: 1748532205980 OrderId: 281474976715687 ExecLatency: 0 ProposeLatency: 0 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186224037890 CpuTimeUsec: 216 } } CommitVersion { Step: 1748532205980 TxId: 281474976715687 } 2025-05-29T15:23:25.933009Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:647: TTxOperationReply complete, operationId: 281474976715687:0, at schemeshard: 72057594046644480 2025-05-29T15:23:25.933211Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5512: Handle TEvSchemaChanged, tabletId: 72057594046644480, at schemeshard: 72057594046644480, message: Source { RawX1: 7509888638149272235 RawX2: 4503608217307444 } Origin: 72075186224037890 State: 5 TxId: 281474976715687 Step: 0 Generation: 1 2025-05-29T15:23:25.933221Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1764: TOperation FindRelatedPartByTabletId, TxId: 281474976715687, tablet: 72075186224037890, partId: 0 2025-05-29T15:23:25.933241Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:619: TTxOperationReply execute, operationId: 281474976715687:0, at schemeshard: 72057594046644480, message: Source { RawX1: 7509888638149272235 RawX2: 4503608217307444 } Origin: 72075186224037890 State: 5 TxId: 281474976715687 Step: 0 Generation: 1 2025-05-29T15:23:25.933250Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:1005: NTableState::TProposedWaitParts operationId# 281474976715687:0 HandleReply TEvSchemaChanged at tablet: 72057594046644480 2025-05-29T15:23:25.933257Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:1009: NTableState::TProposedWaitParts operationId# 281474976715687:0 HandleReply TEvSchemaChanged at tablet: 72057594046644480 message: Source { RawX1: 7509888638149272235 RawX2: 4503608217307444 } Origin: 72075186224037890 State: 5 TxId: 281474976715687 Step: 0 Generation: 1 2025-05-29T15:23:25.933265Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:655: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 281474976715687:0, shardIdx: 72057594046644480:3, datashard: 72075186224037890, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046644480 2025-05-29T15:23:25.933270Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:674: all shard schema changes has been received, operationId: 281474976715687:0, at schemeshard: 72057594046644480 2025-05-29T15:23:25.933273Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:686: send schema changes ack message, operation: 281474976715687:0, datashard: 72075186224037890, at schemeshard: 72057594046644480 2025-05-29T15:23:25.933278Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 281474976715687:0 129 -> 240 2025-05-29T15:23:25.933352Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:647: TTxOperationReply complete, operationId: 281474976715687:0, at schemeshard: 72057594046644480 2025-05-29T15:23:25.933383Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 281474976715687:0, at schemeshard: 72057594046644480 2025-05-29T15:23:25.933387Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_drop_table.cpp:414: TDropTable TProposedDeletePart operationId: 281474976715687:0 ProgressState, at schemeshard: 72057594046644480 2025-05-29T15:23:25.933476Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove table for pathId [OwnerId: 72057594046644480, LocalPathId: 3] was 3 2025-05-29T15:23:25.933508Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:906: Part operation is done id#281474976715687:0 progress is 1/1 2025-05-29T15:23:25.933511Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 281474976715687 ready parts: 1/1 2025-05-29T15:23:25.933514Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:906: Part operation is done id#281474976715687:0 progress is 1/1 2025-05-29T15:23:25.933515Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 281474976715687 ready parts: 1/1 2025-05-29T15:23:25.933518Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1606: TOperation IsReadyToNotify, TxId: 281474976715687, ready parts: 1/1, is published: true 2025-05-29T15:23:25.933558Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1629: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [2:7509888638149272459:2393] message: TxId: 281474976715687 2025-05-29T15:23:25.933562Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 281474976715687 ready parts: 1/1 2025-05-29T15:23:25.933565Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:973: Operation and all the parts is done, operation id: 281474976715687:0 2025-05-29T15:23:25.933567Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5174: RemoveTx for txid 281474976715687:0 2025-05-29T15:23:25.933586Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046644480, LocalPathId: 3] was 2 2025-05-29T15:23:25.933871Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:2953: Handle TEvSchemaChangedResult 281474976715687 datashard 72075186224037890 state PreOffline 2025-05-29T15:23:25.933888Z node 2 :TX_DATASHARD DEBUG: datashard__schema_changed.cpp:22: 72075186224037890 Got TEvSchemaChangedResult from SS at 72075186224037890 TClient::Ls request: /dc-1/Dir/TableOld 2025-05-29T15:23:25.934947Z node 2 :TX_DATASHARD DEBUG: datashard_loans.cpp:220: 72075186224037890 in PreOffline state HasSharedBobs: 0 SchemaOperations: [ ] OutReadSets count: 0 ChangesQueue size: 0 ChangeExchangeSplit: 1 siblings to be activated: wait to activation from: 2025-05-29T15:23:25.934969Z node 2 :TX_DATASHARD INFO: datashard_loans.cpp:177: 72075186224037890 Initiating switch from PreOffline to Offline state 2025-05-29T15:23:25.935256Z node 2 :TX_DATASHARD INFO: datashard_impl.h:3306: 72075186224037890 Reporting state Offline to schemeshard 72057594046644480 2025-05-29T15:23:25.935303Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5549: Handle TEvStateChanged, at schemeshard: 72057594046644480, message: Source { RawX1: 7509888638149272235 RawX2: 4503608217307444 } TabletId: 72075186224037890 State: 4 2025-05-29T15:23:25.935314Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__state_changed_reply.cpp:78: TTxShardStateChanged DoExecute, datashard informs about state changing, datashardId: 72075186224037890, state: Offline, at schemeshard: 72057594046644480 2025-05-29T15:23:25.935373Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:46: Free shard 72057594046644480:3 hive 72057594037968897 at ss 72057594046644480 2025-05-29T15:23:25.935380Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:2962: Handle TEvStateChangedResult datashard 72075186224037890 state Offline 2025-05-29T15:23:25.936107Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5938: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 3 ShardOwnerId: 72057594046644480 ShardLocalIdx: 3, at schemeshard: 72057594046644480 2025-05-29T15:23:25.936166Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046644480, LocalPathId: 3] was 1 2025-05-29T15:23:25.936201Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:123: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046644480 2025-05-29T15:23:25.936203Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:137: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046644480, LocalPathId: 3], at schemeshard: 72057594046644480 2025-05-29T15:23:25.936211Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046644480, LocalPathId: 2] was 1 2025-05-29T15:23:25.936252Z node 2 :TX_DATASHARD INFO: datashard.cpp:197: OnTabletStop: 72075186224037890 reason = ReasonStop 2025-05-29T15:23:25.936310Z node 2 :TX_DATASHARD INFO: datashard.cpp:257: OnTabletDead: 72075186224037890 2025-05-29T15:23:25.936312Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:174: Deleted shardIdx 72057594046644480:3 2025-05-29T15:23:25.936317Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:180: Close pipe to deleted shardIdx 72057594046644480:3 tabletId 72075186224037890 2025-05-29T15:23:25.936326Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:160: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046644480 2025-05-29T15:23:25.936337Z node 2 :TX_DATASHARD INFO: datashard.cpp:1301: Change sender killed: at tablet: 72075186224037890 2025-05-29T15:23:25.936370Z node 2 :HIVE WARN: hive_impl.cpp:491: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037890 not found TClient::Ls response: Status: 128 StatusCode: PATH_NOT_EXIST Issues { message: "Path not exist" issue_code: 200200 severity: 1 } SchemeStatus: 2 ErrorReason: "Path not found" >> TLocksFatTest::PointSetNotBreak >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-DropUser-56 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-DropUser-57 >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-CreateUser-17 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-CreateUser-18 >> TFlatTest::SelectRangeReverseItemsLimit [GOOD] >> TFlatTest::SelectRangeReverseIncludeKeys >> TCancelTx::ImmediateReadOnly [GOOD] >> TStoragePoolsQuotasTest::DifferentQuotasInteraction-IsExternalSubdomain-EnableSeparateQuotas [GOOD] >> TxUsage::WriteToTopic_Demo_2_Query >> TxUsage::WriteToTopic_Demo_2_Table >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-DropUser-57 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-DropUser-58 >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-ModifyUser-45 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-ModifyUser-46 >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-CreateUser-23 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-CreateUser-24 >> TxUsage::TwoSessionOneConsumer_Table >> TTxDataShardUploadRows::TestUploadShadowRows >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-ModifyUser-33 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-ModifyUser-34 >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-CreateUser-10 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-CreateUser-11 >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-ModifyUser-40 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-ModifyUser-41 >> TxUsage::WriteToTopic_Demo_1_Table [FAIL] >> TxUsage::WriteToTopic_Demo_1_Query >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-CreateUser-3 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-CreateUser-10 >> TFlatTest::RejectByPerShardReadSize [GOOD] >> TFlatTest::RejectByPerRequestSize >> TFlatTest::SelectRangeReverseIncludeKeys [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-DropUser-59 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-DropUser-60 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TStoragePoolsQuotasTest::DifferentQuotasInteraction-IsExternalSubdomain-EnableSeparateQuotas [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:116:2058] recipient: [1:111:2141] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:116:2058] recipient: [1:111:2141] Leader for TabletID 72057594046678944 is [1:128:2152] sender: [1:132:2058] recipient: [1:111:2141] 2025-05-29T15:23:16.607302Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7488: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-05-29T15:23:16.607330Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7516: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:23:16.607338Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7402: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2025-05-29T15:23:16.607343Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7418: OperationsProcessing config: using default configuration 2025-05-29T15:23:16.607363Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-05-29T15:23:16.607368Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-05-29T15:23:16.607378Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7548: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:23:16.607393Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:39: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-05-29T15:23:16.607509Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7619: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-05-29T15:23:16.607576Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-05-29T15:23:16.622506Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7309: Cannot subscribe to console configs 2025-05-29T15:23:16.622531Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:23:16.625163Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-05-29T15:23:16.625286Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-05-29T15:23:16.625331Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-05-29T15:23:16.626898Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-05-29T15:23:16.627069Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:445: Clear TempDirsState with owners number: 0 2025-05-29T15:23:16.627170Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1348: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-05-29T15:23:16.627238Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:32: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-05-29T15:23:16.627723Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:157: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:23:16.627761Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:84: [RootDataErasureManager] Stop 2025-05-29T15:23:16.628036Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:23:16.628046Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:23:16.628069Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-05-29T15:23:16.628078Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-29T15:23:16.628084Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:202: TTxServerlessStorageBilling.Complete 2025-05-29T15:23:16.628119Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6671: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-05-29T15:23:16.629366Z node 1 :HIVE INFO: tablet_helpers.cpp:1130: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:128:2152] sender: [1:242:2058] recipient: [1:15:2062] 2025-05-29T15:23:16.651629Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:372: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-05-29T15:23:16.651715Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:23:16.651790Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-05-29T15:23:16.651860Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:130: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-05-29T15:23:16.651873Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:23:16.652685Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:451: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-05-29T15:23:16.652710Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-05-29T15:23:16.652759Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:23:16.652769Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:313: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-05-29T15:23:16.652776Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:367: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-05-29T15:23:16.652782Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 2 -> 3 2025-05-29T15:23:16.653186Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:23:16.653197Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-05-29T15:23:16.653203Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 3 -> 128 2025-05-29T15:23:16.653583Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:23:16.653595Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:23:16.653602Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:23:16.653610Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1650: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-05-29T15:23:16.654350Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1719: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-05-29T15:23:16.654752Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:652: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-05-29T15:23:16.654794Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1751: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-05-29T15:23:16.654986Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:676: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-05-29T15:23:16.655014Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:680: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 124 RawX2: 4294969445 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-05-29T15:23:16.655021Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:23:16.655078Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 128 -> 240 2025-05-29T15:23:16.655086Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:23:16.655119Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-05-29T15:23:16.655131Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:402: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-05-29T15:23:16.655543Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:23:16.655551Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-29T15:23:16.655597Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29T1 ... n publish path for pathId [OwnerId: 72075186233409546, LocalPathId: 1] was 4 2025-05-29T15:23:27.091490Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72075186233409546, LocalPathId: 2] was 3 2025-05-29T15:23:27.092288Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:647: TTxOperationReply complete, operationId: 104:0, at schemeshard: 72075186233409546 2025-05-29T15:23:27.092606Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:647: TTxOperationReply complete, operationId: 104:0, at schemeshard: 72075186233409546 2025-05-29T15:23:27.092669Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72075186233409546 2025-05-29T15:23:27.092678Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72075186233409546, txId: 104, path id: [OwnerId: 72075186233409546, LocalPathId: 1] 2025-05-29T15:23:27.092734Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72075186233409546, txId: 104, path id: [OwnerId: 72075186233409546, LocalPathId: 2] 2025-05-29T15:23:27.092768Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72075186233409546 2025-05-29T15:23:27.092775Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:450:2401], at schemeshard: 72075186233409546, txId: 104, path id: 1 2025-05-29T15:23:27.092782Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:450:2401], at schemeshard: 72075186233409546, txId: 104, path id: 2 2025-05-29T15:23:27.092938Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 104:0, at schemeshard: 72075186233409546 2025-05-29T15:23:27.092949Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:1036: NTableState::TProposedWaitParts operationId# 104:0 ProgressState at tablet: 72075186233409546 2025-05-29T15:23:27.092973Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:674: all shard schema changes has been received, operationId: 104:0, at schemeshard: 72075186233409546 2025-05-29T15:23:27.092984Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:686: send schema changes ack message, operation: 104:0, datashard: 72075186233409549, at schemeshard: 72075186233409546 2025-05-29T15:23:27.092991Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 104:0 129 -> 240 2025-05-29T15:23:27.093175Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:5834: Handle TEvUpdateAck, at schemeshard: 72075186233409546, msg: Owner: 72075186233409546 Generation: 2 LocalPathId: 1 Version: 11 PathOwnerId: 72075186233409546, cookie: 104 2025-05-29T15:23:27.093190Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72075186233409546, msg: Owner: 72075186233409546 Generation: 2 LocalPathId: 1 Version: 11 PathOwnerId: 72075186233409546, cookie: 104 2025-05-29T15:23:27.093195Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72075186233409546, txId: 104 2025-05-29T15:23:27.093201Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72075186233409546, txId: 104, pathId: [OwnerId: 72075186233409546, LocalPathId: 1], version: 11 2025-05-29T15:23:27.093208Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72075186233409546, LocalPathId: 1] was 5 2025-05-29T15:23:27.093446Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:5834: Handle TEvUpdateAck, at schemeshard: 72075186233409546, msg: Owner: 72075186233409546 Generation: 2 LocalPathId: 2 Version: 18446744073709551615 PathOwnerId: 72075186233409546, cookie: 104 2025-05-29T15:23:27.093460Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72075186233409546, msg: Owner: 72075186233409546 Generation: 2 LocalPathId: 2 Version: 18446744073709551615 PathOwnerId: 72075186233409546, cookie: 104 2025-05-29T15:23:27.093464Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72075186233409546, txId: 104 2025-05-29T15:23:27.093469Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72075186233409546, txId: 104, pathId: [OwnerId: 72075186233409546, LocalPathId: 2], version: 18446744073709551615 2025-05-29T15:23:27.093474Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72075186233409546, LocalPathId: 2] was 4 2025-05-29T15:23:27.093489Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1606: TOperation IsReadyToNotify, TxId: 104, ready parts: 0/1, is published: true 2025-05-29T15:23:27.094100Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 104:0, at schemeshard: 72075186233409546 2025-05-29T15:23:27.094113Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_drop_table.cpp:414: TDropTable TProposedDeletePart operationId: 104:0 ProgressState, at schemeshard: 72075186233409546 2025-05-29T15:23:27.094226Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove table for pathId [OwnerId: 72075186233409546, LocalPathId: 2] was 3 2025-05-29T15:23:27.094275Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:906: Part operation is done id#104:0 progress is 1/1 2025-05-29T15:23:27.094280Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 104 ready parts: 1/1 2025-05-29T15:23:27.094286Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:906: Part operation is done id#104:0 progress is 1/1 2025-05-29T15:23:27.094294Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 104 ready parts: 1/1 2025-05-29T15:23:27.094300Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1606: TOperation IsReadyToNotify, TxId: 104, ready parts: 1/1, is published: true 2025-05-29T15:23:27.094313Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1629: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:551:2489] message: TxId: 104 2025-05-29T15:23:27.094318Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 104 ready parts: 1/1 2025-05-29T15:23:27.094324Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:973: Operation and all the parts is done, operation id: 104:0 2025-05-29T15:23:27.094328Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5174: RemoveTx for txid 104:0 2025-05-29T15:23:27.094350Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72075186233409546, LocalPathId: 2] was 2 2025-05-29T15:23:27.094601Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72075186233409546 2025-05-29T15:23:27.094610Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72075186233409546, txId: 0, path id: [OwnerId: 72075186233409546, LocalPathId: 1] 2025-05-29T15:23:27.094823Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72075186233409546, cookie: 104 2025-05-29T15:23:27.095110Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72075186233409546, cookie: 104 2025-05-29T15:23:27.095373Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72075186233409546 2025-05-29T15:23:27.095383Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:450:2401], at schemeshard: 72075186233409546, txId: 0, path id: 1 2025-05-29T15:23:27.095492Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:227: tests -- TTxNotificationSubscriber for txId 104: got EvNotifyTxCompletionResult 2025-05-29T15:23:27.095500Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:236: tests -- TTxNotificationSubscriber for txId 104: satisfy waiter [1:1421:3328] 2025-05-29T15:23:27.095625Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:5834: Handle TEvUpdateAck, at schemeshard: 72075186233409546, msg: Owner: 72075186233409546 Generation: 2 LocalPathId: 1 Version: 12 PathOwnerId: 72075186233409546, cookie: 0 TestWaitNotification: OK eventTxId 104 2025-05-29T15:23:27.096415Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/SomeDatabase" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72075186233409546 2025-05-29T15:23:27.096463Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72075186233409546 describe path "/MyRoot/SomeDatabase" took 62us result status StatusSuccess 2025-05-29T15:23:27.096588Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/SomeDatabase" PathDescription { Self { Name: "MyRoot/SomeDatabase" PathId: 1 SchemeshardId: 72075186233409546 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 12 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 12 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 5 SubDomainVersion: 2 SubDomainStateVersion: 4 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 2 ProcessingParams { Version: 2 PlanResolution: 50 Coordinators: 72075186233409547 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409548 SchemeShard: 72075186233409546 } DomainKey { SchemeShard: 72057594046678944 PathId: 2 } StoragePools { Name: "fast" Kind: "fast_kind" } StoragePools { Name: "large" Kind: "large_kind" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 4 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 2 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } StoragePoolsUsage { PoolKind: "large_kind" TotalSize: 0 DataSize: 0 IndexSize: 0 } StoragePoolsUsage { PoolKind: "fast_kind" TotalSize: 0 DataSize: 0 IndexSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 DatabaseQuotas { data_size_hard_quota: 2800 data_size_soft_quota: 2200 storage_quotas { unit_kind: "fast_kind" data_size_hard_quota: 600 data_size_soft_quota: 500 } storage_quotas { unit_kind: "large_kind" data_size_hard_quota: 2200 data_size_soft_quota: 1700 } } SecurityState { Audience: "/MyRoot/SomeDatabase" } } } PathId: 1 PathOwnerId: 72075186233409546, at schemeshard: 72075186233409546 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/client/ut/unittest >> TCancelTx::ImmediateReadOnly [GOOD] Test command err: 2025-05-29T15:23:25.134009Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509888640479212384:2202];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:23:25.134133Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/002768/r3tmp/tmpdl1r10/pdisk_1.dat 2025-05-29T15:23:25.209989Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded TClient is connected to server localhost:27408 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-05-29T15:23:25.280634Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:23:25.280660Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:23:25.281490Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-05-29T15:23:25.281651Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected waiting... 2025-05-29T15:23:25.287818Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-05-29T15:23:25.291839Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... TClient is connected to server localhost:27408 2025-05-29T15:23:25.388082Z node 1 :TX_PROXY ERROR: datareq.cpp:2286: Actor# [1:7509888640479212959:2378] txid# 281474976715660 HANDLE Plan TEvProposeTransactionResult TDataReq GetStatus# CANCELLED shard id 72075186224037888 marker# P12 2025-05-29T15:23:25.388105Z node 1 :TX_PROXY ERROR: datareq.cpp:883: Actor# [1:7509888640479212959:2378] txid# 281474976715660 RESPONSE Status# ExecCancelled marker# P13c 2025-05-29T15:23:25.392660Z node 1 :TX_PROXY ERROR: datareq.cpp:2286: Actor# [1:7509888640479212972:2388] txid# 281474976715661 HANDLE Plan TEvProposeTransactionResult TDataReq GetStatus# CANCELLED shard id 72075186224037888 marker# P12 2025-05-29T15:23:25.392689Z node 1 :TX_PROXY ERROR: datareq.cpp:883: Actor# [1:7509888640479212972:2388] txid# 281474976715661 RESPONSE Status# ExecCancelled marker# P13c 2025-05-29T15:23:25.399332Z node 1 :TX_PROXY ERROR: datareq.cpp:2286: Actor# [1:7509888640479212985:2398] txid# 281474976715662 HANDLE Plan TEvProposeTransactionResult TDataReq GetStatus# CANCELLED shard id 72075186224037888 marker# P12 2025-05-29T15:23:25.399362Z node 1 :TX_PROXY ERROR: datareq.cpp:883: Actor# [1:7509888640479212985:2398] txid# 281474976715662 RESPONSE Status# ExecCancelled marker# P13c 2025-05-29T15:23:25.414652Z node 1 :TX_PROXY ERROR: datareq.cpp:2286: Actor# [1:7509888640479213011:2418] txid# 281474976715664 HANDLE Plan TEvProposeTransactionResult TDataReq GetStatus# CANCELLED shard id 72075186224037889 marker# P12 2025-05-29T15:23:25.414683Z node 1 :TX_PROXY ERROR: datareq.cpp:883: Actor# [1:7509888640479213011:2418] txid# 281474976715664 RESPONSE Status# ExecCancelled marker# P13c 2025-05-29T15:23:25.420819Z node 1 :TX_PROXY ERROR: datareq.cpp:2286: Actor# [1:7509888640479213024:2428] txid# 281474976715665 HANDLE Plan TEvProposeTransactionResult TDataReq GetStatus# CANCELLED shard id 72075186224037889 marker# P12 2025-05-29T15:23:25.420852Z node 1 :TX_PROXY ERROR: datareq.cpp:883: Actor# [1:7509888640479213024:2428] txid# 281474976715665 RESPONSE Status# ExecCancelled marker# P13c 2025-05-29T15:23:25.427501Z node 1 :TX_PROXY ERROR: datareq.cpp:2286: Actor# [1:7509888640479213037:2438] txid# 281474976715666 HANDLE Plan TEvProposeTransactionResult TDataReq GetStatus# CANCELLED shard id 72075186224037889 marker# P12 2025-05-29T15:23:25.427532Z node 1 :TX_PROXY ERROR: datareq.cpp:883: Actor# [1:7509888640479213037:2438] txid# 281474976715666 RESPONSE Status# ExecCancelled marker# P13c test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/002768/r3tmp/tmpeMqV82/pdisk_1.dat 2025-05-29T15:23:25.637033Z node 2 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-05-29T15:23:25.661828Z node 2 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:23:25.662894Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [2:7509888637531481582:2079] 1748532205614546 != 1748532205614549 TClient is connected to server localhost:7514 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-05-29T15:23:25.725550Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:23:25.725592Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:23:25.726548Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-29T15:23:25.727140Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:23:25.728611Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-05-29T15:23:25.729502Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... TClient is connected to server localhost:7514 2025-05-29T15:23:26.126043Z node 3 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7509888641290009880:2203];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:23:26.126123Z node 3 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/002768/r3tmp/tmpBDZ01S/pdisk_1.dat 2025-05-29T15:23:26.147072Z node 3 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:23:26.147406Z node 3 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [3:7509888641290009705:2079] 1748532206124774 != 1748532206124777 TClient is connected to server localhost:24581 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-05-29T15:23:26.228775Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:23:26.228803Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:23:26.229365Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-05-29T15:23:26.229929Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected waiting... 2025-05-29T15:23:26.234785Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-05-29T15:23:26.236042Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... TClient is connected to server localhost:24581 2025-05-29T15:23:26.278398Z node 3 :TX_PROXY ERROR: datareq.cpp:2286: Actor# [3:7509888641290010446:2378] txid# 281474976715660 HANDLE Plan TEvProposeTransactionResult TDataReq GetStatus# CANCELLED shard id 72075186224037888 marker# P12 2025-05-29T15:23:26.278425Z node 3 :TX_PROXY ERROR: datareq.cpp:883: Actor# [3:7509888641290010446:2378] txid# 281474976715660 RESPONSE Status# ExecCancelled marker# P13c 2025-05-29T15:23:26.289330Z node 3 :TX_PROXY ERROR: datareq.cpp:2286: Actor# [3:7509888641290010461:2390] txid# 281474976715661 HANDLE Plan TEvProposeTransactionResult TDataReq GetStatus# CANCELLED shard id 72075186224037888 marker# P12 2025-05-29T15:23:26.289371Z node 3 :TX_PROXY ERROR: datareq.cpp:883: Actor# [3:7509888641290010461:2390] txid# 281474976715661 RESPONSE Status# ExecCancelled marker# P13c 2025-05-29T15:23:26.295884Z node 3 :TX_PROXY ERROR: datareq.cpp:2286: Actor# [3:7509888641290010475:2401] txid# 281474976715662 HANDLE Plan TEvProposeTransactionResult TDataReq GetStatus# CANCELLED shard id 72075186224037888 marker# P12 2025-05-29T15:23:26.295921Z node 3 :TX_PROXY ERROR: datareq.cpp:883: Actor# [3:7509888641290010475:2401] txid# 281474976715662 RESPONSE Status# ExecCancelled marker# P13c 2025-05-29T15:23:26.310187Z node 3 :TX_PROXY ERROR: datareq.cpp:2286: Actor# [3:7509888641290010504:2424] txid# 281474976715664 HANDLE Plan TEvProposeTransactionResult TDataReq GetStatus# CANCELLED shard id 72075186224037889 marker# P12 2025-05-29T15:23:26.310209Z node 3 :TX_PROXY ERROR: datareq.cpp:883: Actor# [3:7509888641290010504:2424] txid# 281474976715664 RESPONSE Status# ExecCancelled marker# P13c 2025-05-29T15:23:26.320978Z node 3 :TX_PROXY ERROR: datareq.cpp:2286: Actor# [3:7509888641290010517:2434] txid# 281474976715665 HANDLE Plan TEvProposeTransactionResult TDataReq GetStatus# CANCELLED shard id 72075186224037889 marker# P12 2025-05-29T15:23:26.321010Z node 3 :TX_PROXY ERROR: datareq.cpp:883: Actor# [3:7509888641290010517:2434] txid# 281474976715665 RESPONSE Status# ExecCancelled marker# P13c 2025-05-29T15:23:26.332824Z node 3 :TX_PROXY ERROR: datareq.cpp:2286: Actor# [3:7509888641290010531:2445] txid# 281474976715666 HANDLE Plan TEvProposeTransactionResult TDataReq GetStatus# CANCELLED shard id 72075186224037889 marker# P12 2025-05-29T15:23:26.332853Z node 3 :TX_PROXY ERROR: datareq.cpp:883: Actor# [3:7509888641290010531:2445] txid# 281474976715666 RESPONSE Status# ExecCancelled marker# P13c test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/002768/r3tmp/tmpxbpEz6/pdisk_1.dat 2025-05-29T15:23:26.679975Z node 4 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7509888642412457723:2071];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:23:26.682770Z node 4 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-05-29T15:23:26.695390Z node 4 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:23:26.695595Z node 4 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [4:7509888642412457691:2079] 1748532206679295 != 1748532206679298 TClient is connected to server localhost:16581 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-05-29T15:23:26.784740Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:23:26.784772Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:23:26.785158Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-05-29T15:23:26.786040Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected waiting... 2025-05-29T15:23:26.787178Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-05-29T15:23:26.788132Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... TClient is connected to server localhost:16581 2025-05-29T15:23:26.838366Z node 4 :TX_DATASHARD ERROR: finish_propose_unit.cpp:174: Errors while proposing transaction txid 281474976715660 at tablet 72075186224037888 status: CANCELLED errors: EXECUTION_CANCELLED (Tx was cancelled) | 2025-05-29T15:23:26.838817Z node 4 :TX_PROXY ERROR: datareq.cpp:883: Actor# [4:7509888642412458431:2379] txid# 281474976715660 RESPONSE Status# ExecCancelled marker# P13c 2025-05-29T15:23:26.840813Z node 4 :TX_DATASHARD ERROR: finish_propose_unit.cpp:174: Errors while proposing transaction txid 281474976715662 at tablet 72075186224037889 status: CANCELLED errors: EXECUTION_CANCELLED (Tx was cancelled) | 2025-05-29T15:23:26.840893Z node 4 :TX_PROXY ERROR: datareq.cpp:883: Actor# [4:7509888642412458445:2387] txid# 281474976715662 RESPONSE Status# ExecCancelled marker# P13c |63.7%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/datashard/ut_kqp/ydb-core-tx-datashard-ut_kqp |63.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_kqp/ydb-core-tx-datashard-ut_kqp |63.7%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_kqp/ydb-core-tx-datashard-ut_kqp >> TLocksTest::GoodLock [GOOD] >> TLocksTest::GoodNullLock >> TTxDataShardUploadRows::ShouldRejectOnChangeQueueOverflow >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-CreateUser-18 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-CreateUser-19 >> TFlatTest::LargeDatashardReplyRW [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-DropUser-57 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-DropUser-58 >> TDataShardLocksTest::Points_OneTx [GOOD] >> TDataShardLocksTest::Points_ManyTx_RemoveAll >> TFlatTest::AutoSplitBySize [GOOD] >> TFlatTest::AutoMergeBySize >> TDataShardLocksTest::Points_ManyTx_RemoveAll [GOOD] >> TDataShardLocksTest::UseLocksCache >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-CreateUser-24 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-DropUser-49 >> TxUsage::WriteToTopic_Demo_2_Table [FAIL] >> TxUsage::WriteToTopic_Demo_30_Table >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-CreateUser-11 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-CreateUser-12 >> TxUsage::WriteToTopic_Demo_2_Query [FAIL] >> TxUsage::WriteToTopic_Demo_22_RestartNo_Table >> TxUsage::WriteToTopic_Demo_1_Query [FAIL] >> TxUsage::WriteToTopic_Demo_19_RestartNo_Table >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-DropUser-58 [GOOD] >> TxUsage::TwoSessionOneConsumer_Table [FAIL] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-DropUser-59 >> TxUsage::TwoSessionOneConsumer_Query >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-ModifyUser-46 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-ModifyUser-47 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/client/ut/unittest >> TFlatTest::SelectRangeReverseIncludeKeys [GOOD] Test command err: 2025-05-29T15:23:26.696595Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509888643685394481:2078];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:23:26.696617Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/00275c/r3tmp/tmp8LOurc/pdisk_1.dat 2025-05-29T15:23:26.772050Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded TClient is connected to server localhost:9071 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 2025-05-29T15:23:26.798527Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:23:26.798557Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:23:26.800581Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. waiting... 2025-05-29T15:23:26.810881Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 2025-05-29T15:23:26.813551Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:23:26.822499Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:23:27.177087Z node 2 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7509888648409960373:2061];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:23:27.177106Z node 2 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/00275c/r3tmp/tmpv608dK/pdisk_1.dat 2025-05-29T15:23:27.194008Z node 2 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:23:27.194250Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [2:7509888648409960354:2079] 1748532207176995 != 1748532207176998 TClient is connected to server localhost:23943 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-05-29T15:23:27.283455Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:23:27.283482Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:23:27.283853Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:23:27.284432Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected waiting... 2025-05-29T15:23:27.289971Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-CreateUser-10 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-CreateUser-11 >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-ModifyUser-34 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-ModifyUser-35 >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-ModifyUser-41 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-ModifyUser-42 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/client/ut/unittest >> TFlatTest::LargeDatashardReplyRW [GOOD] Test command err: 2025-05-29T15:23:21.336730Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509888621989474377:2064];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:23:21.336753Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/0027b5/r3tmp/tmpV9z2eb/pdisk_1.dat 2025-05-29T15:23:21.397223Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:23:21.397869Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [1:7509888621989474354:2079] 1748532201336559 != 1748532201336562 TClient is connected to server localhost:1862 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-05-29T15:23:21.431320Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-05-29T15:23:21.470889Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:23:21.470917Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:23:21.471959Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-29T15:23:21.477878Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:23:24.255082Z node 1 :MINIKQL_ENGINE ERROR: datashard__engine_host.cpp:512: Shard %72075186224037889, txid %281474976711360, engine error: Error executing transaction (read-only: 1): Datashard 72075186224037889: reply size limit exceeded. (61442990 > 50331648) 2025-05-29T15:23:24.262037Z node 1 :TX_DATASHARD ERROR: execute_data_tx_unit.cpp:268: Datashard execution error for [1748532203964:281474976711360] at 72075186224037889: Datashard 72075186224037889: reply size limit exceeded. (61442990 > 50331648) 2025-05-29T15:23:24.263884Z node 1 :TX_PROXY ERROR: datareq.cpp:2286: Actor# [1:7509888630579415366:5904] txid# 281474976711360 HANDLE Plan TEvProposeTransactionResult TDataReq GetStatus# RESULT_UNAVAILABLE shard id 72075186224037889 marker# P12 2025-05-29T15:23:24.263923Z node 1 :TX_PROXY ERROR: datareq.cpp:883: Actor# [1:7509888630579415366:5904] txid# 281474976711360 RESPONSE Status# ExecResultUnavailable marker# P13c DataShardErrors: [REPLY_SIZE_EXCEEDED] Datashard 72075186224037889: reply size limit exceeded. (61442990 > 50331648) proxy error code: ExecResultUnavailable 2025-05-29T15:23:24.483927Z node 2 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7509888635202824823:2064];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:23:24.483948Z node 2 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/0027b5/r3tmp/tmplchHKu/pdisk_1.dat 2025-05-29T15:23:24.506210Z node 2 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:23:24.506444Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [2:7509888635202824800:2079] 1748532204483647 != 1748532204483650 TClient is connected to server localhost:5039 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-05-29T15:23:24.592622Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:23:24.592652Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:23:24.593335Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-05-29T15:23:24.593625Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected waiting... waiting... 2025-05-29T15:23:24.603405Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:23:27.788701Z node 2 :MINIKQL_ENGINE ERROR: datashard__engine_host.cpp:512: Shard %72075186224037888, txid %281474976716361, engine error: Error executing transaction (read-only: 0): Datashard 72075186224037888: reply size limit exceeded. (71580986 > 50331648) 2025-05-29T15:23:27.790807Z node 2 :TX_DATASHARD ERROR: execute_data_tx_unit.cpp:268: Datashard execution error for [0:281474976716361] at 72075186224037888: Datashard 72075186224037888: reply size limit exceeded. (71580986 > 50331648) 2025-05-29T15:23:27.791830Z node 2 :TX_DATASHARD ERROR: finish_propose_unit.cpp:174: Errors while proposing transaction txid 281474976716361 at tablet 72075186224037888 status: RESULT_UNAVAILABLE errors: REPLY_SIZE_EXCEEDED (Datashard 72075186224037888: reply size limit exceeded. (71580986 > 50331648)) | 2025-05-29T15:23:27.791878Z node 2 :TX_PROXY ERROR: datareq.cpp:883: Actor# [2:7509888648087733084:5886] txid# 281474976716361 RESPONSE Status# ExecResultUnavailable marker# P13c DataShardErrors: [REPLY_SIZE_EXCEEDED] Datashard 72075186224037888: reply size limit exceeded. (71580986 > 50331648) proxy error code: ExecResultUnavailable >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-DropUser-60 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-DropUser-61 >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-CreateUser-19 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-CreateUser-20 >> TFlatTest::RejectByPerRequestSize [GOOD] >> TTxDataShardUploadRows::TestUploadShadowRows [FAIL] >> TTxDataShardUploadRows::TestUploadShadowRowsShadowData >> TTxDataShardUploadRows::ShouldRejectOnChangeQueueOverflow [GOOD] >> TTxDataShardUploadRows::ShouldRejectOnChangeQueueOverflowAndRetry >> TDataShardLocksTest::MvccTestOooTxDoesntBreakPrecedingReadersLocks [GOOD] >> TDataShardLocksTest::MvccTestOutdatedLocksRemove [GOOD] >> TDataShardLocksTest::MvccTestBreakEdge [GOOD] >> TDataShardLocksTest::MvccTestAlreadyBrokenLocks [GOOD] >> KqpOlapStats::AddRowsSomeTablesInTableStore [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-DropUser-58 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-DropUser-59 >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-DropUser-49 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-DropUser-50 >> TLocksTest::Range_BrokenLock3 [GOOD] >> TLocksFatTest::PointSetNotBreak [GOOD] >> TLocksFatTest::PointSetRemove >> TFlatTest::SplitEmptyToMany [GOOD] >> TFlatTest::SplitEmptyTwice |63.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/datashard/ut_locks/unittest >> TDataShardLocksTest::MvccTestAlreadyBrokenLocks [GOOD] >> TxUsage::WriteToTopic_Demo_30_Table [FAIL] >> TxUsage::WriteToTopic_Demo_30_Query >> TxUsage::WriteToTopic_Demo_22_RestartNo_Table [FAIL] >> TxUsage::WriteToTopic_Demo_22_RestartNo_Query >> TxUsage::WriteToTopic_Demo_19_RestartNo_Table [FAIL] >> TxUsage::WriteToTopic_Demo_19_RestartNo_Query >> TxUsage::TwoSessionOneConsumer_Query [FAIL] >> TxUsage::WriteToTopic_Demo_10_Table >> TExportToS3WithRebootsTests::ShouldSucceedOnSingleTable [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-ModifyUser-47 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-ModifyUser-48 >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-ModifyUser-42 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-DropUser-61 >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-DropUser-59 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-DropUser-60 >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-CreateUser-12 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-CreateUser-13 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/client/ut/unittest >> TFlatTest::RejectByPerRequestSize [GOOD] Test command err: 2025-05-29T15:23:22.156229Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509888623811823424:2200];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:23:22.156333Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/00279f/r3tmp/tmpEstGID/pdisk_1.dat 2025-05-29T15:23:22.227545Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [1:7509888623811823262:2079] 1748532202153955 != 1748532202153958 2025-05-29T15:23:22.229511Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded TClient is connected to server localhost:2019 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-05-29T15:23:22.304475Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:23:22.304503Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:23:22.305172Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-29T15:23:22.305322Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-05-29T15:23:22.313410Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:23:24.680438Z node 1 :TX_DATASHARD ERROR: check_data_tx_unit.cpp:133: Transaction read size 51002229 exceeds limit 10000 at tablet 72075186224037888 txId 281474976715760 2025-05-29T15:23:24.680518Z node 1 :TX_DATASHARD ERROR: finish_propose_unit.cpp:174: Errors while proposing transaction txid 281474976715760 at tablet 72075186224037888 status: BAD_REQUEST errors: READ_SIZE_EXECEEDED (Transaction read size 51002229 exceeds limit 10000 at tablet 72075186224037888 txId 281474976715760) | 2025-05-29T15:23:24.680557Z node 1 :TX_PROXY ERROR: datareq.cpp:883: Actor# [1:7509888632401759479:2905] txid# 281474976715760 RESPONSE Status# WrongRequest marker# P13c 2025-05-29T15:23:24.935436Z node 2 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7509888633446743501:2060];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:23:24.935470Z node 2 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/00279f/r3tmp/tmpk8iWmc/pdisk_1.dat 2025-05-29T15:23:24.952423Z node 2 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:23:24.952585Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [2:7509888633446743483:2079] 1748532204935341 != 1748532204935344 TClient is connected to server localhost:25756 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-05-29T15:23:25.041756Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:23:25.041784Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:23:25.042034Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:23:25.042766Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected waiting... 2025-05-29T15:23:25.050246Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:23:27.518304Z node 2 :TX_DATASHARD ERROR: check_data_tx_unit.cpp:133: Transaction read size 51002149 exceeds limit 10000 at tablet 72075186224037888 txId 281474976715760 2025-05-29T15:23:27.518340Z node 2 :TX_DATASHARD ERROR: finish_propose_unit.cpp:174: Errors while proposing transaction txid 281474976715760 at tablet 72075186224037888 status: BAD_REQUEST errors: READ_SIZE_EXECEEDED (Transaction read size 51002149 exceeds limit 10000 at tablet 72075186224037888 txId 281474976715760) | 2025-05-29T15:23:27.518591Z node 2 :TX_PROXY ERROR: datareq.cpp:883: Actor# [2:7509888646331646996:2905] txid# 281474976715760 RESPONSE Status# WrongRequest marker# P13c 2025-05-29T15:23:27.672961Z node 3 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7509888649075454103:2061];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:23:27.672998Z node 3 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/00279f/r3tmp/tmpgZ9Rjq/pdisk_1.dat 2025-05-29T15:23:27.702996Z node 3 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:23:27.706861Z node 3 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [3:7509888649075454084:2079] 1748532207672852 != 1748532207672855 TClient is connected to server localhost:10540 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. waiting... 2025-05-29T15:23:27.788747Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:23:27.788782Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:23:27.789179Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-05-29T15:23:27.790088Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-29T15:23:27.797049Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... waiting... 2025-05-29T15:23:27.809385Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 2025-05-29T15:23:28.798106Z node 3 :TX_PROXY DEBUG: proxy_impl.cpp:315: actor# [3:7509888649075454311:2086] Handle TEvProposeTransaction 2025-05-29T15:23:28.798125Z node 3 :TX_PROXY DEBUG: proxy_impl.cpp:238: actor# [3:7509888649075454311:2086] TxId# 281474976715700 ProcessProposeTransaction 2025-05-29T15:23:28.798134Z node 3 :TX_PROXY DEBUG: proxy_impl.cpp:273: actor# [3:7509888649075454311:2086] Cookie# 0 userReqId# "" txid# 281474976715700 SEND to# [3:7509888653370422510:2598] DataReq marker# P0 2025-05-29T15:23:28.798152Z node 3 :TX_PROXY DEBUG: datareq.cpp:1330: Actor# [3:7509888653370422510:2598] Cookie# 0 txid# 281474976715700 HANDLE TDataReq marker# P1 2025-05-29T15:23:28.798241Z node 3 :TX_PROXY DEBUG: datareq.cpp:1245: Actor [3:7509888653370422510:2598] txid 281474976715700 disallow followers cause of operation 2 read target mode 0 2025-05-29T15:23:28.798244Z node 3 :TX_PROXY DEBUG: datareq.cpp:1245: Actor [3:7509888653370422510:2598] txid 281474976715700 disallow followers cause of operation 2 read target mode 0 2025-05-29T15:23:28.798248Z node 3 :TX_PROXY DEBUG: datareq.cpp:1453: Actor# [3:7509888653370422510:2598] txid# 281474976715700 SEND to# [3:7509888649075454401:2126] TSchemeCache with 2 scheme entries. DataReq marker# P2 2025-05-29T15:23:28.798288Z node 3 :TX_PROXY DEBUG: datareq.cpp:1620: Actor# [3:7509888653370422510:2598] txid# 281474976715700 HANDLE EvResolveKeySetResult TDataReq marker# P3 ErrorCount# 0 2025-05-29T15:23:28.798528Z node 3 :TX_PROXY DEBUG: datareq.cpp:1115: Actor# [3:7509888653370422510:2598] txid# 281474976715700 SEND TEvProposeTransaction to datashard 72075186224037888 with 734 bytes program affected shards 2 followers disallowed marker# P4 2025-05-29T15:23:28.798574Z node 3 :TX_PROXY DEBUG: datareq.cpp:1115: Actor# [3:7509888653370422510:2598] txid# 281474976715700 SEND TEvProposeTransaction to datashard 72075186224037889 with 734 bytes program affected shards 2 followers disallowed marker# P4 2025-05-29T15:23:28.798690Z node 3 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037888 2025-05-29T15:23:28.798946Z node 3 :TX_DATASHARD DEBUG: check_data_tx_unit.cpp:313: Prepared DataTx transaction txId 281474976715700 at tablet 72075186224037888 2025-05-29T15:23:28.799037Z node 3 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037889 2025-05-29T15:23:28.799227Z node 3 :TX_DATASHARD DEBUG: check_data_tx_unit.cpp:313: Prepared DataTx transaction txId 281474976715700 at tablet 72075186224037889 2025-05-29T15:23:28.799794Z node 3 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 72075186224037888 2025-05-29T15:23:28.799850Z node 3 :TX_PROXY DEBUG: datareq.cpp:1873: Actor# [3:7509888653370422510:2598] txid# 281474976715700 HANDLE Prepare TEvProposeTransactionResult TDataReq TabletStatus# StatusWait GetStatus# PREPARED shard id 72075186224037888 read size 17000887 out readset size 0 marker# P6 2025-05-29T15:23:28.800254Z node 3 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 72075186224037889 2025-05-29T15:23:28.800295Z node 3 :TX_PROXY DEBUG: datareq.cpp:1873: Actor# [3:7509888653370422510:2598] txid# 281474976715700 HANDLE Prepare TEvProposeTransactionResult TDataReq TabletStatus# StatusWait GetStatus# PREPARED shard id 72075186224037889 read size 9000479 out readset size 0 marker# P6 2025-05-29T15:23:28.800303Z node 3 :TX_PROXY ERROR: datareq.cpp:2829: Actor# [3:7509888653370422510:2598] txid# 281474976715700 FailProposedRequest: Transaction total read size 26001366 exceeded limit 10000 Status# ExecError 2025-05-29T15:23:28.800316Z node 3 :TX_PROXY ERROR: datareq.cpp:883: Actor# [3:7509888653370422510:2598] txid# 281474976715700 RESPONSE Status# ExecError marker# P13c 2025-05-29T15:23:28.800498Z node 3 :TX_DATASHARD DEBUG: datashard__cancel_tx_proposal.cpp:73: Got TEvDataShard::TEvCancelTransactionProposal 72075186224037888 txId 281474976715700 2025-05-29T15:23:28.800507Z node 3 :TX_DATASHARD DEBUG: datashard__cancel_tx_proposal.cpp:44: Start TTxCancelTransactionProposal at tablet 72075186224037888 txId 281474976715700 2025-05-29T15:23:28.800610Z node 3 :TX_DATASHARD DEBUG: datashard__cancel_tx_proposal.cpp:73: Got TEvDataShard::TEvCancelTransactionProposal 72075186224037889 txId 281474976715700 2025-05-29T15:23:28.800613Z node 3 :TX_DATASHARD DEBUG: datashard__cancel_tx_proposal.cpp:44: Start TTxCancelTransactionProposal at tablet 72075186224037889 txId 281474976715700 >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-CreateUser-11 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-CreateUser-12 >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-DropUser-61 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-DropUser-62 >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-CreateUser-20 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-CreateUser-21 >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-ModifyUser-35 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-ModifyUser-36 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/client/ut/unittest >> TLocksTest::Range_BrokenLock3 [GOOD] Test command err: 2025-05-29T15:23:24.183001Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509888636416950068:2264];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:23:24.183060Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/002776/r3tmp/tmpFCEtpe/pdisk_1.dat 2025-05-29T15:23:24.248885Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [1:7509888636416949843:2079] 1748532204182096 != 1748532204182099 2025-05-29T15:23:24.250848Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded TClient is connected to server localhost:14266 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-05-29T15:23:24.321769Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:23:24.321821Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:23:24.322796Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-29T15:23:24.323857Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:23:24.326881Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:23:24.336562Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:23:24.399814Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:23:24.408420Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:23:24.682273Z node 2 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7509888635025121209:2136];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:23:24.685077Z node 2 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/002776/r3tmp/tmptmN62U/pdisk_1.dat 2025-05-29T15:23:24.693648Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [2:7509888635025121111:2079] 1748532204681391 != 1748532204681394 2025-05-29T15:23:24.695121Z node 2 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded TClient is connected to server localhost:20399 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-05-29T15:23:24.786118Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:23:24.786147Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:23:24.786395Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:23:24.787123Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected waiting... 2025-05-29T15:23:24.790830Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:23:24.805657Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:23:24.819505Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:23:25.163648Z node 3 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7509888640022038375:2062];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:23:25.163681Z node 3 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/002776/r3tmp/tmpvGdCV2/pdisk_1.dat 2025-05-29T15:23:25.179183Z node 3 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:23:25.179448Z node 3 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [3:7509888640022038355:2079] 1748532205163534 != 1748532205163537 TClient is connected to server localhost:19979 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-05-29T15:23:25.268422Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:23:25.268460Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:23:25.268839Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-05-29T15:23:25.269433Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected waiting... 2025-05-29T15:23:25.275050Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:23:25.282862Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:23:25.299581Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:23:25.309573Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:23:25.693458Z node 4 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7509888637390513356:2066];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:23:25.693474Z node 4 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/002776/r3tmp/tmpIqBJ24/pdisk_1.dat 2025-05-29T15:23:25.713569Z node 4 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:23:25.714345Z node 4 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [4:7509888637390513330:2079] 1748532205693273 != 1748532205693276 TClient is connected to server localhost:10596 WaitRootIsUp 'dc-1'... TClient::L ... 0 waiting... 2025-05-29T15:23:27.326908Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:23:27.393488Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:23:27.712383Z node 8 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[8:7509888646992813266:2064];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:23:27.712577Z node 8 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/002776/r3tmp/tmpZJuiHV/pdisk_1.dat 2025-05-29T15:23:27.725274Z node 8 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [8:7509888646992813241:2079] 1748532207712222 != 1748532207712225 2025-05-29T15:23:27.726641Z node 8 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded TClient is connected to server localhost:65099 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-05-29T15:23:27.815734Z node 8 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:23:27.815767Z node 8 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:23:27.816219Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-05-29T15:23:27.816693Z node 8 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Connecting -> Connected waiting... 2025-05-29T15:23:27.817927Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... waiting... 2025-05-29T15:23:27.834221Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:23:27.855858Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 2025-05-29T15:23:27.867724Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/002776/r3tmp/tmpTYzKcE/pdisk_1.dat 2025-05-29T15:23:28.284596Z node 9 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[9:7509888649893815160:2219];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:23:28.309522Z node 9 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-05-29T15:23:28.313322Z node 9 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:23:28.313701Z node 9 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [9:7509888649893814952:2079] 1748532208281213 != 1748532208281216 TClient is connected to server localhost:2561 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. waiting... 2025-05-29T15:23:28.381375Z node 9 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:23:28.381397Z node 9 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:23:28.381827Z node 9 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-05-29T15:23:28.383040Z node 9 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-29T15:23:28.386901Z node 9 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:23:28.396818Z node 9 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976715658, at schemeshard: 72057594046644480 2025-05-29T15:23:28.398018Z node 9 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:23:28.413884Z node 9 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:23:28.427134Z node 9 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:23:28.829922Z node 10 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[10:7509888652741680479:2061];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:23:28.829964Z node 10 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/002776/r3tmp/tmpwxHhTy/pdisk_1.dat 2025-05-29T15:23:28.848278Z node 10 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:23:28.848735Z node 10 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [10:7509888652741680457:2079] 1748532208829811 != 1748532208829814 TClient is connected to server localhost:23428 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-05-29T15:23:28.935192Z node 10 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:23:28.935227Z node 10 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:23:28.935486Z node 10 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:23:28.937157Z node 10 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-05-29T15:23:28.939167Z node 10 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Connecting -> Connected waiting... 2025-05-29T15:23:28.942140Z node 10 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:23:28.961759Z node 10 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-05-29T15:23:28.976115Z node 10 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 >> TFlatTest::SplitEmptyTwice [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-DropUser-59 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-DropUser-60 |63.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_export_reboots_s3/unittest >> TExportToS3WithRebootsTests::ShouldSucceedOnSingleTable [GOOD] >> TxUsage::WriteToTopic_Demo_30_Query [FAIL] >> TxUsage::WriteToTopic_Demo_31_Table >> TTxDataShardUploadRows::TestUploadShadowRowsShadowData [FAIL] >> TxUsage::WriteToTopic_Demo_10_Table [FAIL] >> TxUsage::WriteToTopic_Demo_10_Query >> TxUsage::WriteToTopic_Demo_22_RestartNo_Query [FAIL] >> TxUsage::WriteToTopic_Demo_23_RestartNo_Table >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-DropUser-50 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-DropUser-51 >> TTxDataShardUploadRows::ShouldRejectOnChangeQueueOverflowAndRetry [GOOD] >> TTxDataShardUploadRows::BulkUpsertDuringAddIndexRaceCorruption >> TExportToS3WithRebootsTests::ForgetShouldSucceedOnSingleTable [GOOD] >> TExportToS3WithRebootsTests::ForgetShouldSucceedOnSingleShardTableWithChangefeed >> TLocksTest::GoodNullLock [GOOD] >> TxUsage::WriteToTopic_Demo_19_RestartNo_Query [FAIL] >> TxUsage::WriteToTopic_Demo_19_RestartBeforeCommit_Table >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-CreateUser-13 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-CreateUser-14 >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-ModifyUser-48 [GOOD] >> SchemeReqAdminAccessInTenant::ClusterAdminCanAdministerTenant >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-DropUser-60 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-DropUser-61 >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-DropUser-61 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-DropUser-62 >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-CreateUser-21 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-CreateUser-22 >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-CreateUser-12 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-CreateUser-13 >> TExportToS3WithRebootsTests::ForgetShouldSucceedOnSingleView >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-DropUser-62 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-DropUser-63 >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-ModifyUser-36 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-ModifyUser-37 >> TxUsage::WriteToTopic_Demo_23_RestartNo_Table [FAIL] >> TxUsage::WriteToTopic_Demo_23_RestartNo_Query >> TLocksFatTest::PointSetRemove [GOOD] >> TxUsage::WriteToTopic_Demo_31_Table [FAIL] >> TxUsage::WriteToTopic_Demo_31_Query ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/client/ut/unittest >> TFlatTest::SplitEmptyTwice [GOOD] Test command err: 2025-05-29T15:23:03.985966Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509888544971398580:2062];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:23:03.985987Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/002808/r3tmp/tmp3iG0vV/pdisk_1.dat 2025-05-29T15:23:04.040419Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:23:04.040819Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [1:7509888544971398560:2079] 1748532183985845 != 1748532183985848 TClient is connected to server localhost:8808 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-05-29T15:23:04.088707Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:23:04.088728Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:23:04.089867Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-29T15:23:04.120981Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-05-29T15:23:04.127708Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:23:04.156817Z node 1 :OPS_COMPACT INFO: Compact{72075186224037888.1.11, eph 1} end=Done, 4 blobs 3r (max 3), put Spent{time=0.000s,wait=0.000s,interrupts=1} Part{ 2 pk, lobs 2 +0, (1265 647 2154)b }, ecr=1.000 2025-05-29T15:23:04.157293Z node 1 :OPS_COMPACT INFO: Compact{72075186224037889.1.11, eph 1} end=Done, 4 blobs 3r (max 3), put Spent{time=0.000s,wait=0.000s,interrupts=1} Part{ 2 pk, lobs 2 +0, (1139 521 2626)b }, ecr=1.000 2025-05-29T15:23:04.164425Z node 1 :OPS_COMPACT INFO: Compact{72075186224037888.1.16, eph 2} end=Done, 4 blobs 6r (max 6), put Spent{time=0.000s,wait=0.000s,interrupts=1} Part{ 2 pk, lobs 5 +0, (1573 647 6413)b }, ecr=1.000 2025-05-29T15:23:04.165071Z node 1 :OPS_COMPACT INFO: Compact{72075186224037889.1.16, eph 2} end=Done, 4 blobs 6r (max 6), put Spent{time=0.000s,wait=0.000s,interrupts=1} Part{ 2 pk, lobs 4 +0, (2326 1432 5183)b }, ecr=1.000 TClient::Ls request: /dc-1/Dir/TableOld TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "TableOld" PathId: 3 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976715659 CreateStep: 1748532184189 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "TableOld" Columns { Name: "Key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "Value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } Columns { Name: "... (TRUNCATED) TClient::Ls request: /dc-1/Dir/TableOld TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "TableOld" PathId: 3 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976715659 CreateStep: 1748532184189 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "TableOld" Columns { Name: "Key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "Value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } Columns { Name: "... (TRUNCATED) waiting... 2025-05-29T15:23:04.514236Z node 1 :OPS_COMPACT INFO: Compact{72057594037968897.2.32, eph 1} end=Done, 2 blobs 200r (max 200), put Spent{time=0.002s,wait=0.001s,interrupts=1} Part{ 1 pk, lobs 0 +0, (58053 0 0)b }, ecr=1.000 2025-05-29T15:23:04.522298Z node 1 :OPS_COMPACT INFO: Compact{72057594037968897.2.42, eph 1} end=Done, 2 blobs 849r (max 849), put Spent{time=0.002s,wait=0.001s,interrupts=1} Part{ 1 pk, lobs 0 +0, (55717 0 0)b }, ecr=1.000 2025-05-29T15:23:04.543030Z node 1 :OPS_COMPACT INFO: Compact{72057594037968897.2.62, eph 2} end=Done, 2 blobs 455r (max 456), put Spent{time=0.002s,wait=0.000s,interrupts=1} Part{ 1 pk, lobs 0 +0, (131688 0 0)b }, ecr=1.000 2025-05-29T15:23:04.543187Z node 1 :OPS_COMPACT INFO: Compact{72057594046644480.2.516, eph 1} end=Done, 2 blobs 3r (max 3), put Spent{time=0.000s,wait=0.000s,interrupts=1} Part{ 1 pk, lobs 0 +0, (187 0 0)b }, ecr=1.000 2025-05-29T15:23:04.544898Z node 1 :OPS_COMPACT INFO: Compact{72057594037968897.2.69, eph 1} end=Done, 2 blobs 2r (max 2), put Spent{time=0.000s,wait=0.000s,interrupts=1} Part{ 1 pk, lobs 0 +0, (252 0 0)b }, ecr=1.000 2025-05-29T15:23:04.544951Z node 1 :OPS_COMPACT INFO: Compact{72057594037968897.2.71, eph 1} end=Done, 2 blobs 2r (max 2), put Spent{time=0.000s,wait=0.000s,interrupts=1} Part{ 1 pk, lobs 0 +0, (181 0 0)b }, ecr=1.000 2025-05-29T15:23:04.544996Z node 1 :OPS_COMPACT INFO: Compact{72057594037968897.2.73, eph 1} end=Done, 2 blobs 502r (max 502), put Spent{time=0.000s,wait=0.000s,interrupts=1} Part{ 1 pk, lobs 0 +0, (32029 0 0)b }, ecr=1.000 2025-05-29T15:23:04.546887Z node 1 :OPS_COMPACT INFO: Compact{72057594037968897.2.74, eph 1} end=Done, 2 blobs 1503r (max 1503), put Spent{time=0.002s,wait=0.000s,interrupts=1} Part{ 1 pk, lobs 0 +0, (103274 0 0)b }, ecr=1.000 2025-05-29T15:23:04.549436Z node 1 :OPS_COMPACT INFO: Compact{72057594037968897.2.83, eph 2} end=Done, 2 blobs 1602r (max 1605), put Spent{time=0.002s,wait=0.000s,interrupts=1} Part{ 1 pk, lobs 0 +0, (105040 0 0)b }, ecr=1.000 2025-05-29T15:23:04.553485Z node 1 :OPS_COMPACT INFO: Compact{72057594046644480.2.544, eph 1} end=Done, 2 blobs 10001r (max 10001), put Spent{time=0.008s,wait=0.000s,interrupts=1} Part{ 1 pk, lobs 0 +0, (553660 0 0)b }, ecr=1.000 2025-05-29T15:23:04.563890Z node 1 :OPS_COMPACT INFO: Compact{72057594037968897.2.102, eph 3} end=Done, 2 blobs 718r (max 719), put Spent{time=0.003s,wait=0.000s,interrupts=1} Part{ 1 pk, lobs 0 +0, (207664 0 0)b }, ecr=1.000 2025-05-29T15:23:04.566418Z node 1 :OPS_COMPACT INFO: Compact{72057594037968897.2.108, eph 3} end=Done, 2 blobs 2358r (max 2361), put Spent{time=0.002s,wait=0.000s,interrupts=1} Part{ 1 pk, lobs 0 +0, (154558 0 0)b }, ecr=1.000 2025-05-29T15:23:04.580223Z node 1 :OPS_COMPACT INFO: Compact{72057594037968897.2.131, eph 2} end=Done, 2 blobs 2r (max 3), put Spent{time=0.000s,wait=0.000s,interrupts=1} Part{ 1 pk, lobs 0 +0, (252 0 0)b }, ecr=1.000 2025-05-29T15:23:04.581281Z node 1 :OPS_COMPACT INFO: Compact{72057594037968897.2.127, eph 4} end=Done, 2 blobs 969r (max 970), put Spent{time=0.003s,wait=0.001s,interrupts=1} Part{ 1 pk, lobs 0 +0, (280196 0 0)b }, ecr=1.000 2025-05-29T15:23:04.581329Z node 1 :OPS_COMPACT INFO: Compact{72057594046644480.2.1047, eph 2} end=Done, 2 blobs 3r (max 5), put Spent{time=0.001s,wait=0.001s,interrupts=1} Part{ 1 pk, lobs 0 +0, (187 0 0)b }, ecr=1.000 2025-05-29T15:23:04.581367Z node 1 :OPS_COMPACT INFO: Compact{72057594037968897.2.135, eph 2} end=Done, 2 blobs 1015r (max 1015), put Spent{time=0.001s,wait=0.000s,interrupts=1} Part{ 1 pk, lobs 0 +0, (64548 0 0)b }, ecr=1.000 2025-05-29T15:23:04.581499Z node 1 :OPS_COMPACT INFO: Compact{72057594037968897.2.132, eph 2} end=Done, 2 blobs 2r (max 3), put Spent{time=0.002s,wait=0.001s,interrupts=1} Part{ 1 pk, lobs 0 +0, (181 0 0)b }, ecr=1.000 2025-05-29T15:23:04.584213Z node 1 :OPS_COMPACT INFO: Compact{72057594037968897.2.133, eph 2} end=Done, 2 blobs 3003r (max 3003), put Spent{time=0.004s,wait=0.001s,interrupts=1} Part{ 1 pk, lobs 0 +0, (206156 0 0)b }, ecr=1.000 2025-05-29T15:23:04.584386Z node 1 :OPS_COMPACT INFO: Compact{72057594037968897.2.140, eph 4} end=Done, 2 blobs 3111r (max 3114), put Spent{time=0.002s,wait=0.000s,interrupts=1} Part{ 1 pk, lobs 0 +0, (203881 0 0)b }, ecr=1.000 2025-05-29T15:23:04.590089Z node 1 :OPS_COMPACT INFO: Compact{72057594046644480.2.1064, eph 2} end=Done, 2 blobs 10001r (max 10515), put Spent{time=0.008s,wait=0.000s,interrupts=1} Part{ 1 pk, lobs 0 +0, (553660 0 0)b }, ecr=1.000 2025-05-29T15:23:04.595033Z node 1 :OPS_COMPACT INFO: Compact{72057594037968897.2.161, eph 5} end=Done, 2 blobs 1229r (max 1230), put Spent{time=0.003s,wait=0.000s,interrupts=1} Part{ 1 pk, lobs 0 +0, (355311 0 0)b }, ecr=1.000 2025-05-29T15:23:04.599501Z node 1 :OPS_COMPACT INFO: Compact{72057594037968897.2.167, eph 5} end=Done, 2 blobs 3879r (max 3882), put Spent{time=0.004s,wait=0.000s,interrupts=1} Part{ 1 pk, lobs 0 +0, (254179 0 0)b }, ecr=1.000 2025-05-29T15:23:04.609052Z node 1 :OPS_COMPACT INFO: Compact{72057594037968897.2.189, eph 3} end=Done, 2 blobs 2r (max 3), put Spent{time=0.001s,wait=0.000s,interrupts=1} Part{ 1 pk, lobs 0 +0, (252 0 0)b }, ecr=1.000 2025-05-29T15:23:04.609116Z node 1 :OPS_COMPACT INFO: Compact{72057594037968897.2.191, eph 3} end=Done, 2 blobs 2r (max 3), put Spent{time=0.001s,wait=0.000s,interrupts=1} Part{ 1 pk, lobs 0 +0, (181 0 0)b }, ecr=1.000 2025-05-29T15:23:04.609222Z node 1 :OPS_COMPACT INFO: Compact{72057594037968897.2.183, eph 6} end=Done, 2 blobs 1480r (max 1481), put Spent{time=0.003s,wait=0.000s,interrupts=1} Part{ 1 pk, lobs 0 +0, (427798 0 0)b }, ecr=1.000 2025-05-29T15:23:04.610852Z node 1 :OPS_COMPACT INFO: Compact{72057594037968897.2.193, eph 3} end=Done, 2 blobs 1516r (max 1516), put Spent{time=0.001s,wait=0.000s,interrupts=1} Part{ 1 pk, lobs 0 +0, (96361 0 0)b }, ecr=1.000 2025-05-29T15:23:04.610912Z node 1 :OPS_COMPACT INFO: Compact{72057594046644480 ... } TabletId: 72075186224037891 State: 4 2025-05-29T15:23:29.874225Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__state_changed_reply.cpp:78: TTxShardStateChanged DoExecute, datashard informs about state changing, datashardId: 72075186224037891, state: Offline, at schemeshard: 72057594046644480 2025-05-29T15:23:29.874238Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5549: Handle TEvStateChanged, at schemeshard: 72057594046644480, message: Source { RawX1: 7509888654578910563 RawX2: 4503608217307445 } TabletId: 72075186224037890 State: 4 2025-05-29T15:23:29.874241Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__state_changed_reply.cpp:78: TTxShardStateChanged DoExecute, datashard informs about state changing, datashardId: 72075186224037890, state: Offline, at schemeshard: 72057594046644480 2025-05-29T15:23:29.874252Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5549: Handle TEvStateChanged, at schemeshard: 72057594046644480, message: Source { RawX1: 7509888654578910563 RawX2: 4503608217307445 } TabletId: 72075186224037890 State: 4 2025-05-29T15:23:29.874255Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__state_changed_reply.cpp:78: TTxShardStateChanged DoExecute, datashard informs about state changing, datashardId: 72075186224037890, state: Offline, at schemeshard: 72057594046644480 2025-05-29T15:23:29.874266Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5549: Handle TEvStateChanged, at schemeshard: 72057594046644480, message: Source { RawX1: 7509888654578910256 RawX2: 4503608217307387 } TabletId: 72075186224037889 State: 4 2025-05-29T15:23:29.874269Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__state_changed_reply.cpp:78: TTxShardStateChanged DoExecute, datashard informs about state changing, datashardId: 72075186224037889, state: Offline, at schemeshard: 72057594046644480 2025-05-29T15:23:29.874283Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5549: Handle TEvStateChanged, at schemeshard: 72057594046644480, message: Source { RawX1: 7509888654578910744 RawX2: 4503608217307466 } TabletId: 72075186224037895 State: 4 2025-05-29T15:23:29.874287Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__state_changed_reply.cpp:78: TTxShardStateChanged DoExecute, datashard informs about state changing, datashardId: 72075186224037895, state: Offline, at schemeshard: 72057594046644480 2025-05-29T15:23:29.874299Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5549: Handle TEvStateChanged, at schemeshard: 72057594046644480, message: Source { RawX1: 7509888654578910738 RawX2: 4503608217307464 } TabletId: 72075186224037893 State: 4 2025-05-29T15:23:29.874302Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__state_changed_reply.cpp:78: TTxShardStateChanged DoExecute, datashard informs about state changing, datashardId: 72075186224037893, state: Offline, at schemeshard: 72057594046644480 2025-05-29T15:23:29.874402Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:46: Free shard 72057594046644480:7 hive 72057594037968897 at ss 72057594046644480 2025-05-29T15:23:29.874420Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:46: Free shard 72057594046644480:4 hive 72057594037968897 at ss 72057594046644480 2025-05-29T15:23:29.874427Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:46: Free shard 72057594046644480:3 hive 72057594037968897 at ss 72057594046644480 2025-05-29T15:23:29.874433Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:46: Free shard 72057594046644480:3 hive 72057594037968897 at ss 72057594046644480 2025-05-29T15:23:29.874439Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:46: Free shard 72057594046644480:2 hive 72057594037968897 at ss 72057594046644480 2025-05-29T15:23:29.874445Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:46: Free shard 72057594046644480:8 hive 72057594037968897 at ss 72057594046644480 2025-05-29T15:23:29.874452Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:46: Free shard 72057594046644480:6 hive 72057594037968897 at ss 72057594046644480 2025-05-29T15:23:29.874999Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5938: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 1 ShardOwnerId: 72057594046644480 ShardLocalIdx: 1, at schemeshard: 72057594046644480 2025-05-29T15:23:29.875064Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046644480, LocalPathId: 3] was 7 2025-05-29T15:23:29.875106Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5938: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 7 ShardOwnerId: 72057594046644480 ShardLocalIdx: 7, at schemeshard: 72057594046644480 2025-05-29T15:23:29.875122Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046644480, LocalPathId: 3] was 6 2025-05-29T15:23:29.875136Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5938: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 4 ShardOwnerId: 72057594046644480 ShardLocalIdx: 4, at schemeshard: 72057594046644480 2025-05-29T15:23:29.875150Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046644480, LocalPathId: 3] was 5 2025-05-29T15:23:29.875173Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5938: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 3 ShardOwnerId: 72057594046644480 ShardLocalIdx: 3, at schemeshard: 72057594046644480 2025-05-29T15:23:29.875190Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046644480, LocalPathId: 3] was 4 2025-05-29T15:23:29.875205Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5938: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 3 ShardOwnerId: 72057594046644480 ShardLocalIdx: 3, at schemeshard: 72057594046644480 2025-05-29T15:23:29.875218Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5938: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 2 ShardOwnerId: 72057594046644480 ShardLocalIdx: 2, at schemeshard: 72057594046644480 2025-05-29T15:23:29.875234Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046644480, LocalPathId: 3] was 3 2025-05-29T15:23:29.875250Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5938: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 8 ShardOwnerId: 72057594046644480 ShardLocalIdx: 8, at schemeshard: 72057594046644480 2025-05-29T15:23:29.875265Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046644480, LocalPathId: 3] was 2 2025-05-29T15:23:29.875278Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5938: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 6 ShardOwnerId: 72057594046644480 ShardLocalIdx: 6, at schemeshard: 72057594046644480 2025-05-29T15:23:29.875304Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046644480, LocalPathId: 3] was 1 2025-05-29T15:23:29.875332Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:123: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046644480 2025-05-29T15:23:29.875335Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:137: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046644480, LocalPathId: 3], at schemeshard: 72057594046644480 2025-05-29T15:23:29.875345Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046644480, LocalPathId: 2] was 1 2025-05-29T15:23:29.876077Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:174: Deleted shardIdx 72057594046644480:1 2025-05-29T15:23:29.876082Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:180: Close pipe to deleted shardIdx 72057594046644480:1 tabletId 72075186224037888 2025-05-29T15:23:29.876091Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:174: Deleted shardIdx 72057594046644480:7 2025-05-29T15:23:29.876092Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:180: Close pipe to deleted shardIdx 72057594046644480:7 tabletId 72075186224037894 2025-05-29T15:23:29.876096Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:174: Deleted shardIdx 72057594046644480:4 2025-05-29T15:23:29.876098Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:180: Close pipe to deleted shardIdx 72057594046644480:4 tabletId 72075186224037891 2025-05-29T15:23:29.876101Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:174: Deleted shardIdx 72057594046644480:3 2025-05-29T15:23:29.876103Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:180: Close pipe to deleted shardIdx 72057594046644480:3 tabletId 72075186224037890 2025-05-29T15:23:29.876106Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:174: Deleted shardIdx 72057594046644480:3 2025-05-29T15:23:29.876110Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:174: Deleted shardIdx 72057594046644480:2 2025-05-29T15:23:29.876112Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:180: Close pipe to deleted shardIdx 72057594046644480:2 tabletId 72075186224037889 2025-05-29T15:23:29.876115Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:174: Deleted shardIdx 72057594046644480:8 2025-05-29T15:23:29.876117Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:180: Close pipe to deleted shardIdx 72057594046644480:8 tabletId 72075186224037895 2025-05-29T15:23:29.876120Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:174: Deleted shardIdx 72057594046644480:6 2025-05-29T15:23:29.876123Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:180: Close pipe to deleted shardIdx 72057594046644480:6 tabletId 72075186224037893 2025-05-29T15:23:29.876130Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:160: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046644480 2025-05-29T15:23:29.876780Z node 2 :HIVE WARN: hive_impl.cpp:491: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037891 not found 2025-05-29T15:23:29.876786Z node 2 :HIVE WARN: hive_impl.cpp:491: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037890 not found 2025-05-29T15:23:29.876789Z node 2 :HIVE WARN: hive_impl.cpp:491: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037889 not found 2025-05-29T15:23:29.876791Z node 2 :HIVE WARN: hive_impl.cpp:491: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037895 not found 2025-05-29T15:23:29.876793Z node 2 :HIVE WARN: hive_impl.cpp:491: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037893 not found 2025-05-29T15:23:29.876817Z node 2 :HIVE WARN: hive_impl.cpp:491: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037888 not found 2025-05-29T15:23:29.876830Z node 2 :HIVE WARN: hive_impl.cpp:491: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037894 not found >> TExportToS3WithRebootsTests::CancelShouldSucceedOnSingleView >> TDataShardTrace::TestTraceDistributedUpsert-UseSink |63.7%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/schemeshard/ut_user_attributes_reboots/core-tx-schemeshard-ut_user_attributes_reboots |63.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_user_attributes_reboots/core-tx-schemeshard-ut_user_attributes_reboots |63.7%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_user_attributes_reboots/core-tx-schemeshard-ut_user_attributes_reboots ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/client/ut/unittest >> TLocksTest::GoodNullLock [GOOD] Test command err: 2025-05-29T15:23:25.418119Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509888636695966400:2079];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:23:25.418335Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/00275f/r3tmp/tmpNula6N/pdisk_1.dat 2025-05-29T15:23:25.470842Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded TClient is connected to server localhost:30567 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-05-29T15:23:25.547178Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:23:25.547243Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:23:25.548112Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-05-29T15:23:25.548314Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected waiting... 2025-05-29T15:23:25.550373Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:23:25.553785Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976715658, at schemeshard: 72057594046644480 2025-05-29T15:23:25.555151Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:23:25.623869Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:23:25.633874Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976715660, at schemeshard: 72057594046644480 2025-05-29T15:23:25.639725Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:23:25.889295Z node 2 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7509888639643058222:2065];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:23:25.889336Z node 2 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/00275f/r3tmp/tmpN2buie/pdisk_1.dat 2025-05-29T15:23:25.911394Z node 2 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:23:25.911667Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [2:7509888639643058195:2079] 1748532205889183 != 1748532205889186 TClient is connected to server localhost:32308 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-05-29T15:23:25.994174Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:23:25.994206Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:23:25.994982Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-05-29T15:23:25.995249Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected waiting... waiting... 2025-05-29T15:23:26.002637Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:23:26.017052Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:23:26.031785Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:23:26.436169Z node 3 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7509888644760986664:2066];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:23:26.436186Z node 3 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/00275f/r3tmp/tmpr9btSu/pdisk_1.dat 2025-05-29T15:23:26.451566Z node 3 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded TClient is connected to server localhost:13041 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-05-29T15:23:26.541246Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:23:26.541282Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:23:26.541648Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-05-29T15:23:26.542126Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected waiting... 2025-05-29T15:23:26.546911Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:23:26.557270Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:23:26.573051Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-05-29T15:23:26.587873Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 2025-05-29T15:23:26.933571Z node 4 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7509888643861125127:2202];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:23:26.936241Z node 4 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/00275f/r3tmp/tmpN0L5Ru/pdisk_1.dat 2025-05-29T15:23:26.952521Z node 4 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:23:26.952765Z node 4 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [4:7509888643861124953:2079] 1748532206932791 != 1748532206932794 TClient is connected to server localhost:13887 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls ... LAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:23:28.742492Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:23:28.756035Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/00275f/r3tmp/tmpZTh2O2/pdisk_1.dat 2025-05-29T15:23:29.116529Z node 8 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-05-29T15:23:29.118134Z node 8 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:23:29.118433Z node 8 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [8:7509888655299472539:2079] 1748532209100717 != 1748532209100720 TClient is connected to server localhost:6194 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-05-29T15:23:29.206179Z node 8 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:23:29.206207Z node 8 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:23:29.206624Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-05-29T15:23:29.207998Z node 8 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Connecting -> Connected waiting... 2025-05-29T15:23:29.222925Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:23:29.225441Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976715658, at schemeshard: 72057594046644480 2025-05-29T15:23:29.226474Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:23:29.265381Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-05-29T15:23:29.287288Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 2025-05-29T15:23:29.696291Z node 9 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[9:7509888656628599673:2061];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:23:29.696318Z node 9 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/00275f/r3tmp/tmpqXbe1T/pdisk_1.dat 2025-05-29T15:23:29.709734Z node 9 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:23:29.711136Z node 9 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [9:7509888656628599653:2079] 1748532209696187 != 1748532209696190 TClient is connected to server localhost:25005 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-05-29T15:23:29.802617Z node 9 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:23:29.802647Z node 9 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:23:29.802993Z node 9 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:23:29.804472Z node 9 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-29T15:23:29.815106Z node 9 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:23:29.818215Z node 9 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976715658, at schemeshard: 72057594046644480 2025-05-29T15:23:29.819806Z node 9 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:23:29.847547Z node 9 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:23:29.859580Z node 9 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:23:30.210323Z node 10 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[10:7509888662017833073:2062];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:23:30.210344Z node 10 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/00275f/r3tmp/tmpIzAtH6/pdisk_1.dat 2025-05-29T15:23:30.228072Z node 10 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:23:30.228391Z node 10 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [10:7509888662017833053:2079] 1748532210210188 != 1748532210210191 TClient is connected to server localhost:9034 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-05-29T15:23:30.316541Z node 10 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:23:30.316581Z node 10 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:23:30.316960Z node 10 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:23:30.317499Z node 10 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Connecting -> Connected waiting... 2025-05-29T15:23:30.320785Z node 10 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:23:30.340128Z node 10 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:23:30.350073Z node 10 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... |63.7%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/schemeshard/ut_base/ydb-core-tx-schemeshard-ut_base >> TxUsage::WriteToTopic_Demo_10_Query [FAIL] >> TxUsage::WriteToTopic_Demo_11_Table >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-DropUser-51 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-DropUser-52 >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-DropUser-60 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-DropUser-61 |63.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_base/ydb-core-tx-schemeshard-ut_base |63.8%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_base/ydb-core-tx-schemeshard-ut_base >> TExportToS3WithRebootsTests::CancelShouldSucceedOnSingleShardTable >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-DropUser-62 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-DropUser-63 >> TxUsage::WriteToTopic_Demo_19_RestartBeforeCommit_Table [FAIL] >> TxUsage::WriteToTopic_Demo_20_RestartNo_Table |63.8%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/datashard/ut_order/ydb-core-tx-datashard-ut_order |63.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_order/ydb-core-tx-datashard-ut_order |63.8%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_order/ydb-core-tx-datashard-ut_order >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-DropUser-61 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-DropUser-62 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/metadata/secret/ut/unittest >> Secret::Simple Test command err: 2025-05-29T15:22:57.358453Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:102:2148], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-05-29T15:22:57.358497Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-05-29T15:22:57.358514Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/0016a9/r3tmp/tmpEYcXTr/pdisk_1.dat TServer::EnableGrpc on GrpcPort 29210, node 1 TClient is connected to server localhost:24139 2025-05-29T15:22:57.485795Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-05-29T15:22:57.500117Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:22:57.500785Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:22:57.500796Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-05-29T15:22:57.500799Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-05-29T15:22:57.500898Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-05-29T15:22:57.500950Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [1:32:2079] 1748532176985616 != 1748532176985620 2025-05-29T15:22:57.542891Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:22:57.542933Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:22:57.553714Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected snapshot->GetSecrets().size() incorrect: SECRETS:ACCESS: Initialization finished REQUEST=CREATE OBJECT secret1 (TYPE SECRET) WITH value = `100`;EXPECTATION=1;WAITING=1 2025-05-29T15:23:09.298284Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:807:2674], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:23:09.298354Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:23:09.299218Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:2, at schemeshard: 72057594046644480 2025-05-29T15:23:09.344311Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:925:2752], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:23:09.344342Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:23:09.344371Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:930:2757], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:23:09.345010Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:2, at schemeshard: 72057594046644480 2025-05-29T15:23:09.459786Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:932:2759], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-05-29T15:23:09.558824Z node 1 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [1:1026:2824] txid# 281474976715661, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 7], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-29T15:23:09.649023Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [1:1058:2848], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:23:09.649795Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=1&id=ZTAwZDNiNDEtNDJhZWJjYWYtOTQ2MWZhYTYtZjBjMmEzZTk=, ActorId: [1:920:2747], ActorState: ExecuteState, TraceId: 01jwea7pe235frqyhsb4wjk4f8, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: 01jwea7pdm9q4fca971j3v3kv0 2025-05-29T15:23:09.651075Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=request_actor.h:64;event=unexpected reply;error_message=operation { ready: true status: INTERNAL_ERROR issues { message: "Execution" issue_code: 1060 issues { message: "yql/essentials/ast/yql_expr.h:1874: index out of range" issue_code: 1 } } result { [type.googleapis.com/Ydb.Table.ExecuteQueryResult] { tx_meta { } } } } ;request=session_id: "ydb://session/3?node_id=1&id=ZTAwZDNiNDEtNDJhZWJjYWYtOTQ2MWZhYTYtZjBjMmEzZTk=" tx_control { tx_id: "01jwea7pdm9q4fca971j3v3kv0" commit_tx: true } query { yql_text: "DECLARE $objects AS List>;\nUPSERT INTO `//Root/.metadata/initialization/migrations`\nSELECT componentId,modificationId,instant FROM AS_TABLE($objects)\n" } parameters { key: "$objects" value { type { list_type { item { struct_type { members { name: "componentId" type { type_id: UTF8 } } members { name: "modificationId" type { type_id: UTF8 } } members { name: "instant" type { type_id: UINT32 } } } } } } value { items { items { text_value: "INITIALIZATION" } items { text_value: "create" } items { uint32_value: 10 } } } } } ; 2025-05-29T15:23:09.651184Z node 1 :METADATA_INITIALIZER ERROR: log.cpp:784: fline=accessor_init.cpp:81;event=OnAlteringProblem;error=cannot UPSERT objects:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 ; 2025-05-29T15:23:10.792612Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7301: Cannot get console configs 2025-05-29T15:23:10.792637Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded REQUEST=CREATE OBJECT secret1 (TYPE SECRET) WITH value = `100`;RESULT=
: Error: GRpc error: (1): Cancelled on the server side
: Error: Grpc error response on endpoint localhost:29210 ;EXPECTATION=1 VERIFY failed (2025-05-29T15:23:29.449133Z): assertion failed in non-unittest thread with message: assertion failed at ydb/core/testlib/common_helper.cpp:157, auto NKikimr::Tests::NCommon::THelper::StartSchemaRequestTableServiceImpl(const TString &, const bool, const bool)::(anonymous class)::operator()(NThreading::TFuture)::(anonymous class)::operator()(NYdb::TAsyncStatus) const: (expectation == f.GetValueSync().IsSuccess()) library/cpp/testing/unittest/registar.cpp:36 RaiseError(): requirement UnittestThread failed NPrivate::InternalPanicImpl(int, char const*, char const*, int, int, int, TBasicStringBuf>, char const*, unsigned long)+438 (0x137DC456) NPrivate::Panic(NPrivate::TStaticBuf const&, int, char const*, char const*, char const*, ...)+263 (0x137D3457) NUnitTest::NPrivate::RaiseError(char const*, TBasicString> const&, bool)+375 (0x13971107) ??+0 (0x1F0319EA) ??+0 (0x1F031FC8) bool NThreading::NImpl::TFutureState::TrySetValue(NYdb::Dev::TStatus&&)+438 (0x15EBEE76) NThreading::TFuture NYdb::Dev::NSessionPool::InjectSessionStatusInterception(std::__y1::shared_ptr, NThreading::TFuture, bool, TDuration, std::__y1::function)::'lambda'(NThreading::TFuture)::operator()(NThreading::TFuture)+286 (0x1E3FC1DE) decltype(std::declval()(std::declval const&>())) std::__y1::__invoke[abi:fe200000] NYdb::Dev::NSessionPool::InjectSessionStatusInterception(std::__y1::shared_ptr, NThreading::TFuture, bool, TDuration, std::__y1::function)::'lambda'(NThreading::TFuture)&, NThreading::TFuture const&>(NYdb::Dev::TStatus&&, NThreading::TFuture const&)+36 (0x1E3FCD64) bool NThreading::NImpl::TFutureState::TrySetValue(NYdb::Dev::TStatus&&)+438 (0x15EBEE76) decltype(std::declval()(std::declval(), std::declval())) std::__y1::__invoke[abi:fe200000] NYdb::Dev::TClientImplCommon::RunSimple(Ydb::Table::ExecuteSchemeQueryRequest&&, NYdbGrpc::Dev::TSimpleRequestProcessor::TAsyncRequest, NYdb::Dev::TRpcRequestSettings const&)::'lambda'(google::protobuf::Any*, NYdb::Dev::TPlainStatus)&, google::protobuf::Any*, NYdb::Dev::TPlainStatus>(Ydb::Table::V1::TableService&&, google::protobuf::Any*&&, NYdb::Dev::TPlainStatus&&)+68 (0x1E4D48A4) void NYdb::Dev::TGRpcConnectionsImpl::RunDeferred(Ydb::Table::ExecuteSchemeQueryRequest&&, std::__y1::function&&, NYdbGrpc::Dev::TSimpleRequestProcessor::TAsyncRequest, std::__y1::shared_ptr, TDuration, NYdb::Dev::TRpcRequestSettings const&, std::__y1::shared_ptr)::'lambda'(Ydb::Operations::Operation*, NYdb::Dev::TPlainStatus)::operator()(Ydb::Operations::Operation*, NYdb::Dev::TPlainStatus)+119 (0x1E4D42F7) decltype(std::declval()(std::declval(), std::declval())) std::__y1::__invoke[abi:fe200000](Ydb::Table::ExecuteSchemeQueryRequest&&, std::__y1::function&&, NYdbGrpc::Dev::TSimpleRequestProcessor::TAsyncRequest, std::__y1::shared_ptr, TDuration, NYdb::Dev::TRpcRequestSettings const&, std::__y1::shared_ptr)::'lambda'(Ydb::Operations::Operation*, NYdb::Dev::TPlainStatus)&, Ydb::Operations::Operation*, NYdb::Dev::TPlainStatus>(Ydb::Table::V1::TableService&&, Ydb::Operations::Operation*&&, NYdb::Dev::TPlainStatus&&)+51 (0x1E4D41E3) void NYdb::Dev::TGRpcConnectionsImpl::RunDeferred(Ydb::Table::ExecuteSchemeQueryRequest&&, std::__y1::function&&, NYdbGrpc::Dev::TSimpleRequestProcessor::TAsyncRequest, std::__y1::shared_ptr, TDuration, NYdb::Dev::TRpcRequestSettings const&, bool, std::__y1::shared_ptr)::'lambda'(Ydb::Table::ExecuteSchemeQueryResponse*, NYdb::Dev::TPlainStatus)::operator()(Ydb::Table::ExecuteSchemeQueryResponse*, NYdb::Dev::TPlainStatus)+116 (0x1E4D3C24) decltype(std::declval()(std::declval(), std::declval())) std::__y1::__invoke[abi:fe200000](Ydb::Table::ExecuteSchemeQueryRequest&&, std::__y1::function&&, NYdbGrpc::Dev::TSimpleRequestProcessor::TAsyncRequest, std::__y1::shared_ptr, TDuration, NYdb::Dev::TRpcRequestSettings const&, bool, std::__y1::shared_ptr)::'lambda'(Ydb::Table::ExecuteSchemeQueryResponse*, NYdb::Dev::TPlainStatus)&, Ydb::Table::ExecuteSchemeQueryResponse*, NYdb::Dev::TPlainStatus>(Ydb::Table::V1::TableService&&, Ydb::Table::ExecuteSchemeQueryResponse*&&+51 (0x1E4D3B13) NYdb::Dev::TGRpcErrorResponse::Process(void*)+577 (0x1E4D1711) TAdaptiveThreadPool::TImpl::TThread::DoExecute()+298 (0x13C9079A) ??+0 (0x13C8F14A) ??+0 (0x137DDC3D) ??+0 (0x7FCF39BEEAC3) ??+0 (0x7FCF39C80850) ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/client/ut/unittest >> TLocksFatTest::PointSetRemove [GOOD] Test command err: 2025-05-29T15:23:26.926497Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509888644964567150:2134];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:23:26.927481Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/002759/r3tmp/tmpzGE5Td/pdisk_1.dat 2025-05-29T15:23:26.990063Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:23:26.990214Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [1:7509888644964567055:2079] 1748532206925819 != 1748532206925822 TClient is connected to server localhost:23774 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-05-29T15:23:27.029603Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:23:27.029632Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:23:27.030694Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-29T15:23:27.064166Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-05-29T15:23:27.073201Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:23:27.136341Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:23:27.146430Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/002759/r3tmp/tmpzLYvqi/pdisk_1.dat 2025-05-29T15:23:29.280268Z node 2 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7509888657495372201:2217];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:23:29.286699Z node 2 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-05-29T15:23:29.305176Z node 2 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:23:29.309630Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [2:7509888657495371999:2079] 1748532209276083 != 1748532209276086 TClient is connected to server localhost:27851 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. waiting... 2025-05-29T15:23:29.384127Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:23:29.384158Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:23:29.384586Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-05-29T15:23:29.386801Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected waiting... 2025-05-29T15:23:29.386945Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-05-29T15:23:29.397632Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:23:29.413152Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:23:29.475803Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:23:30.380763Z node 3 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7509888659529214569:2060];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:23:30.380795Z node 3 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/002759/r3tmp/tmpOejQdG/pdisk_1.dat 2025-05-29T15:23:30.394237Z node 3 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:23:30.394462Z node 3 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [3:7509888659529214546:2079] 1748532210380636 != 1748532210380639 TClient is connected to server localhost:23071 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-05-29T15:23:30.485577Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:23:30.485615Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:23:30.486239Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-05-29T15:23:30.486353Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected waiting... 2025-05-29T15:23:30.489141Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:23:30.507914Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:23:30.572372Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:23:30.586629Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-CreateUser-14 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-CreateUser-15 >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-DropUser-63 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-DropUser-64 >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-CreateUser-22 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-CreateUser-23 >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-ModifyUser-37 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-ModifyUser-38 >> TPQTest::TestLowWatermark [GOOD] >> TPQTest::TestGetTimestamps >> TxUsage::WriteToTopic_Demo_31_Query [FAIL] >> TxUsage::WriteToTopic_Demo_32_Table >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-CreateUser-13 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-CreateUser-14 >> TxUsage::WriteToTopic_Demo_23_RestartNo_Query [FAIL] >> TxUsage::WriteToTopic_Demo_23_RestartBeforeCommit_Table >> TExportToS3WithRebootsTests::ShouldSucceedAutoDropping >> TTxDataShardUploadRows::BulkUpsertDuringAddIndexRaceCorruption [FAIL] >> TxUsage::WriteToTopic_Demo_11_Table [FAIL] >> TxUsage::WriteToTopic_Demo_11_Query >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-DropUser-52 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-DropUser-53 >> TDataShardTrace::TestTraceDistributedUpsert+UseSink >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-DropUser-63 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-DropUser-64 >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-DropUser-61 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-DropUser-62 >> TDataShardTrace::TestTraceDistributedSelect >> TPQTest::TestGetTimestamps [GOOD] >> TPQTest::TestMaxTimeLagRewind >> SchemeReqAdminAccessInTenant::ClusterAdminCanAdministerTenant [GOOD] >> SchemeReqAdminAccessInTenant::ClusterAdminCanAdministerTenant-StrictAclCheck >> TxUsage::WriteToTopic_Demo_20_RestartNo_Table [FAIL] >> TxUsage::WriteToTopic_Demo_20_RestartNo_Query |63.8%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/schemeshard/ut_stats/ydb-core-tx-schemeshard-ut_stats |63.8%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_stats/ydb-core-tx-schemeshard-ut_stats |63.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_stats/ydb-core-tx-schemeshard-ut_stats >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-DropUser-62 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-DropUser-63 >> TxUsage::WriteToTopic_Demo_23_RestartBeforeCommit_Table [FAIL] >> TxUsage::WriteToTopic_Demo_23_RestartBeforeCommit_Query >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-DropUser-64 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-DropUser-65 >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-CreateUser-23 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-CreateUser-24 >> TDataShardTrace::TestTraceWriteImmediateOnShard |63.8%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/fq/libs/result_formatter/ut/ydb-core-fq-libs-result_formatter-ut |63.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/fq/libs/result_formatter/ut/ydb-core-fq-libs-result_formatter-ut |63.8%| [LD] {RESULT} $(B)/ydb/core/fq/libs/result_formatter/ut/ydb-core-fq-libs-result_formatter-ut >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-CreateUser-15 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-CreateUser-16 >> KqpStreamLookup::ReadTableDuringSplit >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-CreateUser-14 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-CreateUser-15 >> TxUsage::WriteToTopic_Demo_32_Table [FAIL] >> TxUsage::WriteToTopic_Demo_32_Query >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-ModifyUser-38 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-ModifyUser-39 >> TxUsage::WriteToTopic_Demo_11_Query [FAIL] >> TxUsage::Sinks_Oltp_WriteToTopic_4_Table >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-DropUser-64 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-DropUser-65 >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-DropUser-53 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-DropUser-54 |63.8%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/kqp/workload_service/ut/ydb-core-kqp-workload_service-ut |63.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/workload_service/ut/ydb-core-kqp-workload_service-ut |63.9%| [LD] {RESULT} $(B)/ydb/core/kqp/workload_service/ut/ydb-core-kqp-workload_service-ut >> TxUsage::WriteToTopic_Demo_20_RestartNo_Query [FAIL] >> TxUsage::WriteToTopic_Demo_20_RestartBeforeCommit_Table >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-DropUser-62 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-DropUser-63 >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-DropUser-65 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-DropUser-66 >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-CreateUser-24 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-DropUser-49 >> TxUsage::WriteToTopic_Demo_23_RestartBeforeCommit_Query [FAIL] >> TxUsage::WriteToTopic_Demo_23_RestartAfterCommit_Table >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-DropUser-63 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-DropUser-64 >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-CreateUser-16 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-CreateUser-17 >> TSchemeShardSubDomainTest::TopicDiskSpaceQuotas [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-CreateUser-15 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-CreateUser-16 >> TxUsage::Sinks_Oltp_WriteToTopic_4_Table [FAIL] >> TxUsage::WriteToTopic_Demo_32_Query [FAIL] >> TxUsage::WriteToTopic_Demo_33_Table >> TxUsage::Sinks_Oltp_WriteToTopic_4_Query |63.9%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/kqp/rm_service/ut/ydb-core-kqp-rm_service-ut |63.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/rm_service/ut/ydb-core-kqp-rm_service-ut |63.9%| [LD] {RESULT} $(B)/ydb/core/kqp/rm_service/ut/ydb-core-kqp-rm_service-ut >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-ModifyUser-39 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-ModifyUser-40 >> TDataShardTrace::TestTraceDistributedUpsert-UseSink [FAIL] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-DropUser-54 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-DropUser-55 >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-DropUser-65 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-DropUser-66 >> TxUsage::WriteToTopic_Demo_20_RestartBeforeCommit_Table [FAIL] >> TxUsage::WriteToTopic_Demo_20_RestartBeforeCommit_Query >> TxUsage::WriteToTopic_Demo_23_RestartAfterCommit_Table [FAIL] >> TxUsage::WriteToTopic_Demo_23_RestartAfterCommit_Query >> KqpOlap::CheckEarlyFilterOnEmptySelect [GOOD] >> KqpOlap::BulkUpsertUpdate >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-DropUser-49 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-DropUser-50 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/datashard/ut_upload_rows/unittest >> TTxDataShardUploadRows::TestUploadShadowRowsShadowData [FAIL] Test command err: 2025-05-29T15:23:27.839091Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:102:2148], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-05-29T15:23:27.839125Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-05-29T15:23:27.839139Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/0015f3/r3tmp/tmpnGJ2gL/pdisk_1.dat 2025-05-29T15:23:27.968670Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-05-29T15:23:27.983457Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:23:27.987912Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [1:32:2079] 1748532207406049 != 1748532207406053 2025-05-29T15:23:28.035180Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:23:28.035232Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:23:28.046537Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-29T15:23:28.130293Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-05-29T15:23:28.167184Z node 1 :TX_DATASHARD INFO: datashard.cpp:373: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:664:2569] 2025-05-29T15:23:28.167291Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:630: TxInitSchema.Execute 2025-05-29T15:23:28.179585Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:696: TxInitSchema.Complete 2025-05-29T15:23:28.179653Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:48: TDataShard::TTxInit::Execute 2025-05-29T15:23:28.179862Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1315: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-05-29T15:23:28.179873Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1371: LoadLockChangeRecords at tablet: 72075186224037888 2025-05-29T15:23:28.179881Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1420: LoadChangeRecordCommits at tablet: 72075186224037888 2025-05-29T15:23:28.179952Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:92: TDataShard::TTxInit::Complete 2025-05-29T15:23:28.179984Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:100: TDataShard::TTxInitRestored::Execute 2025-05-29T15:23:28.180002Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:111: DataShard 72075186224037888 persisting started state actor id [1:681:2569] in generation 1 2025-05-29T15:23:28.190399Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:120: TDataShard::TTxInitRestored::Complete 2025-05-29T15:23:28.196002Z node 1 :TX_DATASHARD INFO: datashard.cpp:417: Switched to work state WaitScheme tabletId 72075186224037888 2025-05-29T15:23:28.196128Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:457: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-05-29T15:23:28.196167Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1250: Change sender created: at tablet: 72075186224037888, actorId: [1:683:2579] 2025-05-29T15:23:28.196175Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1255: Trying to activate change sender: at tablet: 72075186224037888 2025-05-29T15:23:28.196181Z node 1 :TX_DATASHARD INFO: datashard.cpp:1272: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-05-29T15:23:28.196187Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-05-29T15:23:28.196407Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 72075186224037888 2025-05-29T15:23:28.196437Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-05-29T15:23:28.196455Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2025-05-29T15:23:28.196464Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-05-29T15:23:28.196475Z node 1 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 72075186224037888 TxInFly 0 2025-05-29T15:23:28.196481Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2025-05-29T15:23:28.196626Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3710: Server connected at leader tablet# 72075186224037888, clientId# [1:662:2567], serverId# [1:674:2574], sessionId# [0:0:0] 2025-05-29T15:23:28.196657Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037888 2025-05-29T15:23:28.196725Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:132: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-05-29T15:23:28.196746Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:220: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2025-05-29T15:23:28.197169Z node 1 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:129: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-05-29T15:23:28.207585Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 72075186224037888 2025-05-29T15:23:28.207648Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:467: 72075186224037888 not sending time cast registration request in state WaitScheme 2025-05-29T15:23:28.358280Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3710: Server connected at leader tablet# 72075186224037888, clientId# [1:697:2587], serverId# [1:699:2589], sessionId# [0:0:0] 2025-05-29T15:23:28.359431Z node 1 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:69: Planned transaction txId 281474976715657 at step 1000 at tablet 72075186224037888 { Transactions { TxId: 281474976715657 AckTo { RawX1: 0 RawX2: 0 } } Step: 1000 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2025-05-29T15:23:28.359459Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-05-29T15:23:28.359518Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2025-05-29T15:23:28.359530Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2025-05-29T15:23:28.359542Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:333: Found ready operation [1000:281474976715657] in PlanQueue unit at 72075186224037888 2025-05-29T15:23:28.359626Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:634: LoadTxDetails at 72075186224037888 loaded tx from db 1000:281474976715657 keys extracted: 0 2025-05-29T15:23:28.359664Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 1 immediate 0 planned 1 2025-05-29T15:23:28.359808Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2025-05-29T15:23:28.359938Z node 1 :TX_DATASHARD INFO: create_table_unit.cpp:69: Trying to CREATE TABLE at 72075186224037888 tableId# [OwnerId: 72057594046644480, LocalPathId: 2] schema version# 1 2025-05-29T15:23:28.360071Z node 1 :TX_DATASHARD INFO: datashard.cpp:475: Send registration request to time cast Ready tabletId 72075186224037888 mediators count is 1 coordinators count is 1 buckets per mediator 2 2025-05-29T15:23:28.360201Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-05-29T15:23:28.360599Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3742: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037888 time 0 2025-05-29T15:23:28.360608Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-05-29T15:23:28.360846Z node 1 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:98: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 1000} 2025-05-29T15:23:28.360861Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2025-05-29T15:23:28.361104Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2025-05-29T15:23:28.361114Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1255: Trying to activate change sender: at tablet: 72075186224037888 2025-05-29T15:23:28.361121Z node 1 :TX_DATASHARD INFO: datashard.cpp:1293: Change sender activated: at tablet: 72075186224037888 2025-05-29T15:23:28.361141Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:801: Complete [1000 : 281474976715657] from 72075186224037888 at tablet 72075186224037888 send result to client [1:411:2405], exec latency: 0 ms, propose latency: 0 ms 2025-05-29T15:23:28.361154Z node 1 :TX_DATASHARD INFO: datashard.cpp:1590: 72075186224037888 Sending notify to schemeshard 72057594046644480 txId 281474976715657 state Ready TxInFly 0 2025-05-29T15:23:28.361192Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-05-29T15:23:28.362509Z node 1 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:129: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-05-29T15:23:28.362900Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:2953: Handle TEvSchemaChangedResult 281474976715657 datashard 72075186224037888 state Ready 2025-05-29T15:23:28.362918Z node 1 :TX_DATASHARD DEBUG: datashard__schema_changed.cpp:22: 72075186224037888 Got TEvSchemaChangedResult from SS at 72075186224037888 2025-05-29T15:23:28.363112Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3760: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037888 coordinator 72057594046316545 last step 0 next step 1000 2025-05-29T15:23:28.370782Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:731:2613], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:23:28.370820Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:742:2618], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:23:28.370907Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:23:28.371983Z nod ... d_pipeline.cpp:1827: Trying to execute [1000:281474976715657] at 72075186224037888 on unit CompleteOperation 2025-05-29T15:23:29.811001Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1862: Execution status for [1000:281474976715657] at 72075186224037888 is DelayComplete 2025-05-29T15:23:29.811007Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1910: Advance execution plan for [1000:281474976715657] at 72075186224037888 executing on unit CompleteOperation 2025-05-29T15:23:29.811024Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1916: Add [1000:281474976715657] at 72075186224037888 to execution unit CompletedOperations 2025-05-29T15:23:29.811031Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1827: Trying to execute [1000:281474976715657] at 72075186224037888 on unit CompletedOperations 2025-05-29T15:23:29.811038Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1862: Execution status for [1000:281474976715657] at 72075186224037888 is Executed 2025-05-29T15:23:29.811042Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1910: Advance execution plan for [1000:281474976715657] at 72075186224037888 executing on unit CompletedOperations 2025-05-29T15:23:29.811048Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1922: Execution plan for [1000:281474976715657] at 72075186224037888 has finished 2025-05-29T15:23:29.811053Z node 2 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-05-29T15:23:29.811057Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:327: Check candidate unit PlanQueue at 72075186224037888 2025-05-29T15:23:29.811062Z node 2 :TX_DATASHARD TRACE: plan_queue_unit.cpp:59: TPlanQueueUnit at 72075186224037888 has no attached operations 2025-05-29T15:23:29.811066Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:341: Unit PlanQueue has no ready operations at 72075186224037888 2025-05-29T15:23:29.811431Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3129: StateWork, received event# 270270976, Sender [2:24:2071], Recipient [2:663:2568]: {TEvRegisterTabletResult TabletId# 72075186224037888 Entry# 0} 2025-05-29T15:23:29.811442Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3167: StateWork, processing event TEvMediatorTimecast::TEvRegisterTabletResult 2025-05-29T15:23:29.811449Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:3742: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037888 time 0 2025-05-29T15:23:29.811455Z node 2 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-05-29T15:23:29.811650Z node 2 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:98: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 1000} 2025-05-29T15:23:29.811665Z node 2 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2025-05-29T15:23:29.811800Z node 2 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2025-05-29T15:23:29.811809Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1933: Complete execution for [1000:281474976715657] at 72075186224037888 on unit CreateTable 2025-05-29T15:23:29.811815Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:1255: Trying to activate change sender: at tablet: 72075186224037888 2025-05-29T15:23:29.811822Z node 2 :TX_DATASHARD INFO: datashard.cpp:1293: Change sender activated: at tablet: 72075186224037888 2025-05-29T15:23:29.811827Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1933: Complete execution for [1000:281474976715657] at 72075186224037888 on unit CompleteOperation 2025-05-29T15:23:29.811845Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:801: Complete [1000 : 281474976715657] from 72075186224037888 at tablet 72075186224037888 send result to client [2:410:2404], exec latency: 0 ms, propose latency: 0 ms 2025-05-29T15:23:29.811858Z node 2 :TX_DATASHARD INFO: datashard.cpp:1590: 72075186224037888 Sending notify to schemeshard 72057594046644480 txId 281474976715657 state Ready TxInFly 0 2025-05-29T15:23:29.811871Z node 2 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-05-29T15:23:29.812173Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3129: StateWork, received event# 269746185, Sender [2:683:2579], Recipient [2:663:2568]: NKikimr::TEvTxProxySchemeCache::TEvWatchNotifyUpdated 2025-05-29T15:23:29.812187Z node 2 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:129: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-05-29T15:23:29.812463Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3129: StateWork, received event# 269877760, Sender [2:706:2594], Recipient [2:663:2568]: NKikimr::TEvTabletPipe::TEvClientConnected { TabletId: 72057594046644480 Status: OK ServerId: [2:712:2600] Leader: 1 Dead: 0 Generation: 2 VersionInfo: } 2025-05-29T15:23:29.812473Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3162: StateWork, processing event TEvTabletPipe::TEvClientConnected 2025-05-29T15:23:29.812694Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3129: StateWork, received event# 269552132, Sender [2:410:2404], Recipient [2:663:2568]: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 281474976715657 2025-05-29T15:23:29.812703Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3133: StateWork, processing event TEvDataShard::TEvSchemaChangedResult 2025-05-29T15:23:29.812710Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:2953: Handle TEvSchemaChangedResult 281474976715657 datashard 72075186224037888 state Ready 2025-05-29T15:23:29.812718Z node 2 :TX_DATASHARD DEBUG: datashard__schema_changed.cpp:22: 72075186224037888 Got TEvSchemaChangedResult from SS at 72075186224037888 2025-05-29T15:23:29.812804Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3129: StateWork, received event# 270270978, Sender [2:24:2071], Recipient [2:663:2568]: NKikimr::TEvMediatorTimecast::TEvSubscribeReadStepResult{ CoordinatorId# 72057594046316545 LastReadStep# 0 NextReadStep# 1000 ReadStep# 1000 } 2025-05-29T15:23:29.812812Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3168: StateWork, processing event TEvMediatorTimecast::TEvSubscribeReadStepResult 2025-05-29T15:23:29.812817Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:3760: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037888 coordinator 72057594046316545 last step 0 next step 1000 2025-05-29T15:23:29.816437Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:731:2613], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:23:29.816462Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:742:2618], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:23:29.816472Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:23:29.817500Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2025-05-29T15:23:29.822621Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3129: StateWork, received event# 269746185, Sender [2:683:2579], Recipient [2:663:2568]: NKikimr::TEvTxProxySchemeCache::TEvWatchNotifyUpdated 2025-05-29T15:23:29.822649Z node 2 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:129: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-05-29T15:23:29.846778Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3129: StateWork, received event# 269877763, Sender [2:706:2594], Recipient [2:663:2568]: NKikimr::TEvTabletPipe::TEvClientDestroyed { TabletId: 72057594046644480 ClientId: [2:706:2594] ServerId: [2:712:2600] } 2025-05-29T15:23:29.846809Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3163: StateWork, processing event TEvTabletPipe::TEvClientDestroyed 2025-05-29T15:23:29.972464Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3129: StateWork, received event# 269746185, Sender [2:683:2579], Recipient [2:663:2568]: NKikimr::TEvTxProxySchemeCache::TEvWatchNotifyUpdated 2025-05-29T15:23:29.972500Z node 2 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:129: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-05-29T15:23:29.973233Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [2:745:2621], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-05-29T15:23:30.005150Z node 2 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [2:815:2660] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-29T15:23:30.016245Z node 2 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [2:825:2669], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:23:30.016901Z node 2 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=2&id=MjI3OTlhMy1hNTY1NGFkYS01M2ZjZjg1LTg1YTUxMjVm, ActorId: [2:729:2611], ActorState: ExecuteState, TraceId: 01jwea8a4r9fv09dadfhfgwc18, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: assertion failed at ydb/core/tx/datashard/ut_common/datashard_ut_common.cpp:2091, void NKikimr::ExecSQL(Tests::TServer::TPtr, TActorId, const TString &, bool, Ydb::StatusIds::StatusCode): (response.GetYdbStatus() == code) failed: (INTERNAL_ERROR != SUCCESS) Response { QueryIssues { message: "Execution" issue_code: 1060 issues { message: "yql/essentials/ast/yql_expr.h:1874: index out of range" issue_code: 1 } } TxMeta { } } YdbStatus: INTERNAL_ERROR ConsumedRu: 1 , with diff: (INT|SUCC)E(RNAL_ERROR|SS) TBackTrace::Capture()+28 (0x13AD9BEC) NUnitTest::NPrivate::RaiseError(char const*, TBasicString> const&, bool)+137 (0x13C8D789) NKikimr::ExecSQL(TIntrusivePtr>, NActors::TActorId, TBasicString> const&, bool, Ydb::StatusIds_StatusCode)+1300 (0x26319804) NKikimr::NTestSuiteTTxDataShardUploadRows::TTestCaseTestUploadShadowRowsShadowData::Execute_(NUnitTest::TTestContext&)+1118 (0x139AE74E) NKikimr::NTestSuiteTTxDataShardUploadRows::TCurrentTest::Execute()::'lambda'()::operator()() const+71 (0x139C78B7) NUnitTest::TTestBase::Run(std::__y1::function, TBasicString> const&, char const*, bool)+126 (0x13C8F63E) NKikimr::NTestSuiteTTxDataShardUploadRows::TCurrentTest::Execute()+433 (0x139C7111) NUnitTest::TTestFactory::Execute()+803 (0x13C8FDB3) NUnitTest::RunMain(int, char**)+3021 (0x13CA195D) ??+0 (0x7F3FC42B6D90) __libc_start_main+128 (0x7F3FC42B6E40) _start+41 (0x12A0C029) >> SchemeReqAdminAccessInTenant::ClusterAdminCanAdministerTenant-StrictAclCheck [GOOD] >> SchemeReqAdminAccessInTenant::ClusterAdminCanAdministerTenant-DomainLoginOnly >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-DropUser-66 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-DropUser-67 |63.9%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/conveyor_composite/ut/ydb-core-tx-conveyor_composite-ut |63.9%| [LD] {RESULT} $(B)/ydb/core/tx/conveyor_composite/ut/ydb-core-tx-conveyor_composite-ut |63.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/conveyor_composite/ut/ydb-core-tx-conveyor_composite-ut >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-DropUser-64 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-DropUser-65 >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-CreateUser-17 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-CreateUser-18 >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-DropUser-63 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-DropUser-64 >> TxUsage::WriteToTopic_Demo_33_Table [FAIL] >> TxUsage::WriteToTopic_Demo_33_Query >> TxUsage::Sinks_Oltp_WriteToTopic_4_Query [FAIL] >> TxUsage::Sinks_Oltp_WriteToTopic_5_Table >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-CreateUser-16 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-CreateUser-17 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::TopicDiskSpaceQuotas [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:116:2058] recipient: [1:111:2141] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:116:2058] recipient: [1:111:2141] Leader for TabletID 72057594046678944 is [1:128:2152] sender: [1:132:2058] recipient: [1:111:2141] 2025-05-29T15:23:13.768368Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7488: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-05-29T15:23:13.768389Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7516: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:23:13.768393Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7402: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2025-05-29T15:23:13.768397Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7418: OperationsProcessing config: using default configuration 2025-05-29T15:23:13.768407Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-05-29T15:23:13.768409Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-05-29T15:23:13.768416Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7548: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:23:13.768426Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:39: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-05-29T15:23:13.768521Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7619: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-05-29T15:23:13.768580Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-05-29T15:23:13.780823Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7309: Cannot subscribe to console configs 2025-05-29T15:23:13.780850Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:23:13.784652Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-05-29T15:23:13.784816Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-05-29T15:23:13.784872Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-05-29T15:23:13.787143Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-05-29T15:23:13.787338Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:445: Clear TempDirsState with owners number: 0 2025-05-29T15:23:13.787488Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1348: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-05-29T15:23:13.787581Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:32: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-05-29T15:23:13.788152Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:157: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:23:13.788199Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:84: [RootDataErasureManager] Stop 2025-05-29T15:23:13.788515Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:23:13.788525Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:23:13.788550Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-05-29T15:23:13.788559Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-29T15:23:13.788565Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:202: TTxServerlessStorageBilling.Complete 2025-05-29T15:23:13.788605Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6671: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-05-29T15:23:13.790155Z node 1 :HIVE INFO: tablet_helpers.cpp:1130: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:128:2152] sender: [1:242:2058] recipient: [1:15:2062] 2025-05-29T15:23:13.811237Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:372: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-05-29T15:23:13.811328Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:23:13.811420Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-05-29T15:23:13.811478Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:130: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-05-29T15:23:13.811490Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:23:13.812358Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:451: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-05-29T15:23:13.812389Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-05-29T15:23:13.812444Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:23:13.812457Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:313: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-05-29T15:23:13.812464Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:367: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-05-29T15:23:13.812471Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 2 -> 3 2025-05-29T15:23:13.812898Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:23:13.812910Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-05-29T15:23:13.812915Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 3 -> 128 2025-05-29T15:23:13.813208Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:23:13.813215Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:23:13.813220Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:23:13.813226Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1650: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-05-29T15:23:13.813722Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1719: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-05-29T15:23:13.814016Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:652: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-05-29T15:23:13.814048Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1751: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-05-29T15:23:13.814191Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:676: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-05-29T15:23:13.814208Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:680: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 124 RawX2: 4294969445 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-05-29T15:23:13.814214Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:23:13.814255Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 128 -> 240 2025-05-29T15:23:13.814259Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:23:13.814285Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-05-29T15:23:13.814294Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:402: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-05-29T15:23:13.814646Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:23:13.814652Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-29T15:23:13.814688Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29T1 ... T15:23:34.121962Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 103, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2025-05-29T15:23:34.122026Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 103, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2025-05-29T15:23:34.122036Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 103, path id: [OwnerId: 72057594046678944, LocalPathId: 3] 2025-05-29T15:23:34.122066Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:23:34.122072Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:209:2210], at schemeshard: 72057594046678944, txId: 103, path id: 2 2025-05-29T15:23:34.122078Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:209:2210], at schemeshard: 72057594046678944, txId: 103, path id: 2 2025-05-29T15:23:34.122083Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:209:2210], at schemeshard: 72057594046678944, txId: 103, path id: 3 2025-05-29T15:23:34.122267Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 103:0, at schemeshard: 72057594046678944 2025-05-29T15:23:34.122277Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:482: [72057594046678944] TDone opId# 103:0 ProgressState 2025-05-29T15:23:34.122293Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:906: Part operation is done id#103:0 progress is 1/1 2025-05-29T15:23:34.122298Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2025-05-29T15:23:34.122303Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:906: Part operation is done id#103:0 progress is 1/1 2025-05-29T15:23:34.122307Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2025-05-29T15:23:34.122313Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1606: TOperation IsReadyToNotify, TxId: 103, ready parts: 1/1, is published: false 2025-05-29T15:23:34.122319Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2025-05-29T15:23:34.122325Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:973: Operation and all the parts is done, operation id: 103:0 2025-05-29T15:23:34.122329Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5174: RemoveTx for txid 103:0 2025-05-29T15:23:34.122360Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 2 2025-05-29T15:23:34.122367Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:982: Publication still in progress, tx: 103, publications: 2, subscribers: 0 2025-05-29T15:23:34.122371Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:989: Publication details: tx: 103, [OwnerId: 72057594046678944, LocalPathId: 2], 9 2025-05-29T15:23:34.122375Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:989: Publication details: tx: 103, [OwnerId: 72057594046678944, LocalPathId: 3], 18446744073709551615 2025-05-29T15:23:34.122492Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:5834: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 103 2025-05-29T15:23:34.122507Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 103 2025-05-29T15:23:34.122512Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 103 2025-05-29T15:23:34.122517Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 103, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 18446744073709551615 2025-05-29T15:23:34.122522Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 1 2025-05-29T15:23:34.122586Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:123: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-05-29T15:23:34.122592Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:137: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 3], at schemeshard: 72057594046678944 2025-05-29T15:23:34.122603Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 5 2025-05-29T15:23:34.122660Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:5834: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 9 PathOwnerId: 72057594046678944, cookie: 103 2025-05-29T15:23:34.122669Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 9 PathOwnerId: 72057594046678944, cookie: 103 2025-05-29T15:23:34.122674Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 103 2025-05-29T15:23:34.122678Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 103, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 9 2025-05-29T15:23:34.122682Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 2025-05-29T15:23:34.122690Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 103, subscribers: 0 2025-05-29T15:23:34.122768Z node 1 :HIVE INFO: tablet_helpers.cpp:1356: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 3 TxId_Deprecated: 3 2025-05-29T15:23:34.122940Z node 1 :HIVE INFO: tablet_helpers.cpp:1356: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 4 TxId_Deprecated: 4 2025-05-29T15:23:34.122966Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5938: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 3 ShardOwnerId: 72057594046678944 ShardLocalIdx: 3, at schemeshard: 72057594046678944 2025-05-29T15:23:34.123043Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5938: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 4 ShardOwnerId: 72057594046678944 ShardLocalIdx: 4, at schemeshard: 72057594046678944 2025-05-29T15:23:34.123976Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 103 2025-05-29T15:23:34.124003Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:160: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 2025-05-29T15:23:34.124015Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 103 2025-05-29T15:23:34.124410Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:174: Deleted shardIdx 72057594046678944:3 2025-05-29T15:23:34.124435Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:174: Deleted shardIdx 72057594046678944:4 TestModificationResult got TxId: 103, wait until txId: 103 TestWaitNotification wait txId: 103 2025-05-29T15:23:34.124552Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:210: tests -- TTxNotificationSubscriber for txId 103: send EvNotifyTxCompletion 2025-05-29T15:23:34.124560Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:256: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 103 2025-05-29T15:23:34.124637Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 103, at schemeshard: 72057594046678944 2025-05-29T15:23:34.124658Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:227: tests -- TTxNotificationSubscriber for txId 103: got EvNotifyTxCompletionResult 2025-05-29T15:23:34.124663Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:236: tests -- TTxNotificationSubscriber for txId 103: satisfy waiter [1:775:2684] TestWaitNotification: OK eventTxId 103 2025-05-29T15:23:34.611225Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_1" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-05-29T15:23:34.611317Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/USER_1" took 109us result status StatusSuccess 2025-05-29T15:23:34.611430Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/USER_1" PathDescription { Self { Name: "USER_1" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 9 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 9 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 5 SubDomainVersion: 1 SubDomainStateVersion: 2 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72075186233409546 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409547 } DomainKey { SchemeShard: 72057594046678944 PathId: 2 } StoragePools { Name: "name_USER_0_kind_hdd-1" Kind: "hdd-1" } StoragePools { Name: "name_USER_0_kind_hdd-2" Kind: "hdd-2" } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 2 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 2 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 DatabaseQuotas { data_size_hard_quota: 1 } SecurityState { } } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/ext_index/ut/unittest >> ExternalIndex::Simple Test command err: 2025-05-29T15:23:01.158081Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:307:2350], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-05-29T15:23:01.158169Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-05-29T15:23:01.158204Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/cs_index/external;error=incorrect path status: LookupError; 2025-05-29T15:23:01.158226Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/0017ad/r3tmp/tmpE0Dvor/pdisk_1.dat TServer::EnableGrpc on GrpcPort 24244, node 1 TClient is connected to server localhost:64309 2025-05-29T15:23:01.298711Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:392: actor# [1:60:2107] Handle TEvGetProxyServicesRequest 2025-05-29T15:23:01.298862Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:392: actor# [1:60:2107] Handle TEvGetProxyServicesRequest 2025-05-29T15:23:01.299785Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-05-29T15:23:01.317753Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:23:01.318927Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:23:01.318950Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-05-29T15:23:01.318955Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-05-29T15:23:01.319032Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-05-29T15:23:01.319119Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [1:32:2079] 1748532180816920 != 1748532180816923 2025-05-29T15:23:01.360689Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:213: actor# [1:60:2107] HANDLE TEvClientConnected success connect from tablet# 72057594046447617 2025-05-29T15:23:01.361036Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:23:01.361058Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:23:01.361089Z node 1 :TX_PROXY DEBUG: client.cpp:89: Handle TEvAllocateResult ACCEPTED RangeBegin# 281474976715656 RangeEnd# 281474976720656 txAllocator# 72057594046447617 2025-05-29T15:23:01.371711Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-29T15:23:01.454835Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:315: actor# [1:60:2107] Handle TEvProposeTransaction 2025-05-29T15:23:01.454867Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:238: actor# [1:60:2107] TxId# 281474976715657 ProcessProposeTransaction 2025-05-29T15:23:01.454917Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:257: actor# [1:60:2107] Cookie# 0 userReqId# "" txid# 281474976715657 SEND to# [1:676:2569] 2025-05-29T15:23:01.468671Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1562: Actor# [1:676:2569] txid# 281474976715657 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/Root" OperationType: ESchemeOpCreateColumnStore CreateColumnStore { Name: "olapStore" ColumnShardCount: 4 SchemaPresets { Name: "default" Schema { Columns { Name: "timestamp" Type: "Timestamp" NotNull: true } Columns { Name: "resource_id" Type: "Utf8" DataAccessorConstructor { ClassName: "SPARSED" } } Columns { Name: "uid" Type: "Utf8" NotNull: true StorageId: "__MEMORY" } Columns { Name: "level" Type: "Int32" } Columns { Name: "message" Type: "Utf8" StorageId: "__MEMORY" } Columns { Name: "json_payload" Type: "JsonDocument" } KeyColumnNames: "timestamp" KeyColumnNames: "uid" } } } } } ExecTimeoutPeriod: 18446744073709551615 2025-05-29T15:23:01.468716Z node 1 :TX_PROXY DEBUG: schemereq.cpp:569: Actor# [1:676:2569] txid# 281474976715657 Bootstrap, UserSID: CheckAdministrator: 0 CheckDatabaseAdministrator: 0 2025-05-29T15:23:01.468938Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1627: Actor# [1:676:2569] txid# 281474976715657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P6 2025-05-29T15:23:01.468953Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1617: Actor# [1:676:2569] txid# 281474976715657 TEvNavigateKeySet requested from SchemeCache 2025-05-29T15:23:01.469018Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1450: Actor# [1:676:2569] txid# 281474976715657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-05-29T15:23:01.469074Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1497: Actor# [1:676:2569] HANDLE EvNavigateKeySetResult, txid# 281474976715657 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 500 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2025-05-29T15:23:01.469091Z node 1 :TX_PROXY DEBUG: schemereq.cpp:103: Actor# [1:676:2569] txid# 281474976715657 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976715657 TabletId# 72057594046644480} 2025-05-29T15:23:01.469939Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnStore, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-05-29T15:23:01.470149Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1352: Actor# [1:676:2569] txid# 281474976715657 HANDLE EvClientConnected 2025-05-29T15:23:01.470315Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1374: Actor# [1:676:2569] txid# 281474976715657 Status StatusAccepted HANDLE {TEvModifySchemeTransactionResult Status# StatusAccepted txid# 281474976715657} 2025-05-29T15:23:01.470328Z node 1 :TX_PROXY DEBUG: schemereq.cpp:549: Actor# [1:676:2569] txid# 281474976715657 SEND to# [1:675:2568] Source {TEvProposeTransactionStatus txid# 281474976715657 Status# 53} Status: 53 TxId: 281474976715657 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 2 2025-05-29T15:23:01.493807Z node 1 :TX_COLUMNSHARD INFO: log.cpp:784: tablet_id=72075186224037888;self_id=[1:750:2630];fline=columnshard.cpp:102;event=initialize_shard;step=OnActivateExecutor; 2025-05-29T15:23:01.498060Z node 1 :TX_COLUMNSHARD INFO: log.cpp:784: tablet_id=72075186224037888;self_id=[1:750:2630];fline=columnshard.cpp:120;event=initialize_shard;step=initialize_tiring_finished; 2025-05-29T15:23:01.498164Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Execute at tablet 72075186224037888 2025-05-29T15:23:01.498947Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;self_id=[1:750:2630];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-05-29T15:23:01.499009Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;self_id=[1:750:2630];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-05-29T15:23:01.499079Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;self_id=[1:750:2630];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-05-29T15:23:01.499101Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;self_id=[1:750:2630];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-05-29T15:23:01.499123Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;self_id=[1:750:2630];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-05-29T15:23:01.499144Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;self_id=[1:750:2630];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-05-29T15:23:01.499168Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;self_id=[1:750:2630];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-05-29T15:23:01.499195Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;self_id=[1:750:2630];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-05-29T15:23:01.499217Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;self_id=[1:750:2630];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-05-29T15:23:01.499237Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;self_id=[1:750:2630];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-05-29T15:23:01.499258Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;self_id=[1:750:2630];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2025-05-29T15:23:01.499278Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;self_id=[1:750:2630];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2025-05-29T15:23:01.505007Z node 1 :TX_COLUMNSHARD INFO: log.cpp:784: tablet_id=72075186224037889;self_id=[1:751:2631];fline=columnshard.cpp:102;event=initialize_shard;step=OnActivateExecutor; 2025-05-29T15:23:01.508056Z node 1 :TX_COLUMNSHARD INFO: log.cpp:784: tablet_id=72075186224037889;self_id=[1:751:2631];fline=columnshard.cpp:120;event=initialize_shard;step=initialize_tiring_finished; 2025-05-29T15:23:01.508144Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Execute at tablet 72075186224037889 2025-05-29T15:23:01.508871Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037889;self_id=[1:751:2631];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-05-29T15:23:01.508908Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037889;self_id=[1:751:2631];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-05-29T15:23:01.508972Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037889;self_id=[1:751:2631];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-05-29T15:23:01.508997Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037889;self_id=[1:751:2631];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=no ... .157760Z node 1 :KQP_YQL INFO: log.cpp:67: TraceId: 01jwea8cc16w485s96d58vmy60, SessionId: CompileActor 2025-05-29 15:23:32.157 INFO ydb-services-ext_index-ut(pid=3608018, tid=0x00007FB45DF7EDC0) [core exec] yql_execution.cpp:594: Node #268 created 0 trackable nodes: 2025-05-29T15:23:32.157765Z node 1 :KQP_YQL TRACE: log.cpp:67: TraceId: 01jwea8cc16w485s96d58vmy60, SessionId: CompileActor 2025-05-29 15:23:32.157 TRACE ydb-services-ext_index-ut(pid=3608018, tid=0x00007FB45DF7EDC0) [core exec] yql_execution.cpp:387: {3}, callable #275 2025-05-29T15:23:32.157770Z node 1 :KQP_YQL INFO: log.cpp:67: TraceId: 01jwea8cc16w485s96d58vmy60, SessionId: CompileActor 2025-05-29 15:23:32.157 INFO ydb-services-ext_index-ut(pid=3608018, tid=0x00007FB45DF7EDC0) [core exec] yql_execution.cpp:577: Node #275 finished execution 2025-05-29T15:23:32.157778Z node 1 :KQP_YQL TRACE: log.cpp:67: TraceId: 01jwea8cc16w485s96d58vmy60, SessionId: CompileActor 2025-05-29 15:23:32.157 TRACE ydb-services-ext_index-ut(pid=3608018, tid=0x00007FB45DF7EDC0) [core exec] yql_execution.cpp:387: {2}, callable #278 2025-05-29T15:23:32.157811Z node 1 :KQP_YQL INFO: log.cpp:67: TraceId: 01jwea8cc16w485s96d58vmy60, SessionId: CompileActor 2025-05-29 15:23:32.157 INFO ydb-services-ext_index-ut(pid=3608018, tid=0x00007FB45DF7EDC0) [core exec] yql_execution.cpp:577: Node #278 finished execution 2025-05-29T15:23:32.157818Z node 1 :KQP_YQL INFO: log.cpp:67: TraceId: 01jwea8cc16w485s96d58vmy60, SessionId: CompileActor 2025-05-29 15:23:32.157 INFO ydb-services-ext_index-ut(pid=3608018, tid=0x00007FB45DF7EDC0) [core exec] yql_execution.cpp:594: Node #278 created 0 trackable nodes: 2025-05-29T15:23:32.157823Z node 1 :KQP_YQL TRACE: log.cpp:67: TraceId: 01jwea8cc16w485s96d58vmy60, SessionId: CompileActor 2025-05-29 15:23:32.157 TRACE ydb-services-ext_index-ut(pid=3608018, tid=0x00007FB45DF7EDC0) [core exec] yql_execution.cpp:387: {1}, callable #279 2025-05-29T15:23:32.157833Z node 1 :KQP_YQL INFO: log.cpp:67: TraceId: 01jwea8cc16w485s96d58vmy60, SessionId: CompileActor 2025-05-29 15:23:32.157 INFO ydb-services-ext_index-ut(pid=3608018, tid=0x00007FB45DF7EDC0) [core exec] yql_execution.cpp:577: Node #279 finished execution 2025-05-29T15:23:32.157839Z node 1 :KQP_YQL INFO: log.cpp:67: TraceId: 01jwea8cc16w485s96d58vmy60, SessionId: CompileActor 2025-05-29 15:23:32.157 INFO ydb-services-ext_index-ut(pid=3608018, tid=0x00007FB45DF7EDC0) [core exec] yql_execution.cpp:594: Node #279 created 0 trackable nodes: 2025-05-29T15:23:32.157844Z node 1 :KQP_YQL TRACE: log.cpp:67: TraceId: 01jwea8cc16w485s96d58vmy60, SessionId: CompileActor 2025-05-29 15:23:32.157 TRACE ydb-services-ext_index-ut(pid=3608018, tid=0x00007FB45DF7EDC0) [core exec] yql_execution.cpp:387: {0}, callable #280 2025-05-29T15:23:32.157851Z node 1 :KQP_YQL INFO: log.cpp:67: TraceId: 01jwea8cc16w485s96d58vmy60, SessionId: CompileActor 2025-05-29 15:23:32.157 INFO ydb-services-ext_index-ut(pid=3608018, tid=0x00007FB45DF7EDC0) [core exec] yql_execution.cpp:577: Node #280 finished execution 2025-05-29T15:23:32.157856Z node 1 :KQP_YQL INFO: log.cpp:67: TraceId: 01jwea8cc16w485s96d58vmy60, SessionId: CompileActor 2025-05-29 15:23:32.157 INFO ydb-services-ext_index-ut(pid=3608018, tid=0x00007FB45DF7EDC0) [core exec] yql_execution.cpp:594: Node #280 created 0 trackable nodes: 2025-05-29T15:23:32.157862Z node 1 :KQP_YQL INFO: log.cpp:67: TraceId: 01jwea8cc16w485s96d58vmy60, SessionId: CompileActor 2025-05-29 15:23:32.157 INFO ydb-services-ext_index-ut(pid=3608018, tid=0x00007FB45DF7EDC0) [core exec] yql_execution.cpp:87: Finish, output #280, status: Ok 2025-05-29T15:23:32.157867Z node 1 :KQP_YQL INFO: log.cpp:67: TraceId: 01jwea8cc16w485s96d58vmy60, SessionId: CompileActor 2025-05-29 15:23:32.157 INFO ydb-services-ext_index-ut(pid=3608018, tid=0x00007FB45DF7EDC0) [core exec] yql_execution.cpp:93: Creating finalizing transformer, output #280 2025-05-29T15:23:32.169740Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:353: actor# [1:60:2107] Handle TEvExecuteKqpTransaction 2025-05-29T15:23:32.169767Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:342: actor# [1:60:2107] TxId# 281474976715764 ProcessProposeKqpTransaction 2025-05-29T15:23:32.171679Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:353: actor# [1:60:2107] Handle TEvExecuteKqpTransaction 2025-05-29T15:23:32.171700Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:342: actor# [1:60:2107] TxId# 281474976715765 ProcessProposeKqpTransaction REQUEST=CREATE OBJECT `/Root/olapStore/olapTable:ext_index_simple` ( TYPE CS_EXT_INDEX) WITH (extractor = `{"class_name" : "city64", "object" :{"fields" : [{"id":"uid"}, {"id":"level"}, {"id":"json_payload", "path" : "strict $.a.b"}]}}`);RESULT=
: Error: GRpc error: (1): Cancelled on the server side
: Error: Grpc error response on endpoint localhost:24244 ;EXPECTATION=1 VERIFY failed (2025-05-29T15:23:33.299153Z): assertion failed in non-unittest thread with message: assertion failed at ydb/core/testlib/common_helper.cpp:157, auto NKikimr::Tests::NCommon::THelper::StartSchemaRequestTableServiceImpl(const TString &, const bool, const bool)::(anonymous class)::operator()(NThreading::TFuture)::(anonymous class)::operator()(NYdb::TAsyncStatus) const: (expectation == f.GetValueSync().IsSuccess()) library/cpp/testing/unittest/registar.cpp:36 RaiseError(): requirement UnittestThread failed NPrivate::InternalPanicImpl(int, char const*, char const*, int, int, int, TBasicStringBuf>, char const*, unsigned long)+438 (0x137D9BB6) NPrivate::Panic(NPrivate::TStaticBuf const&, int, char const*, char const*, char const*, ...)+263 (0x137D0BB7) NUnitTest::NPrivate::RaiseError(char const*, TBasicString> const&, bool)+375 (0x1396E867) ??+0 (0x1F02F1CA) ??+0 (0x1F02F7A8) bool NThreading::NImpl::TFutureState::TrySetValue(NYdb::Dev::TStatus&&)+438 (0x15EBBE76) NThreading::TFuture NYdb::Dev::NSessionPool::InjectSessionStatusInterception(std::__y1::shared_ptr, NThreading::TFuture, bool, TDuration, std::__y1::function)::'lambda'(NThreading::TFuture)::operator()(NThreading::TFuture)+286 (0x1EC856BE) decltype(std::declval()(std::declval const&>())) std::__y1::__invoke[abi:fe200000] NYdb::Dev::NSessionPool::InjectSessionStatusInterception(std::__y1::shared_ptr, NThreading::TFuture, bool, TDuration, std::__y1::function)::'lambda'(NThreading::TFuture)&, NThreading::TFuture const&>(NYdb::Dev::TStatus&&, NThreading::TFuture const&)+36 (0x1EC86244) bool NThreading::NImpl::TFutureState::TrySetValue(NYdb::Dev::TStatus&&)+438 (0x15EBBE76) decltype(std::declval()(std::declval(), std::declval())) std::__y1::__invoke[abi:fe200000] NYdb::Dev::TClientImplCommon::RunSimple(Ydb::Table::ExecuteSchemeQueryRequest&&, NYdbGrpc::Dev::TSimpleRequestProcessor::TAsyncRequest, NYdb::Dev::TRpcRequestSettings const&)::'lambda'(google::protobuf::Any*, NYdb::Dev::TPlainStatus)&, google::protobuf::Any*, NYdb::Dev::TPlainStatus>(Ydb::Table::V1::TableService&&, google::protobuf::Any*&&, NYdb::Dev::TPlainStatus&&)+68 (0x1ED5DD34) void NYdb::Dev::TGRpcConnectionsImpl::RunDeferred(Ydb::Table::ExecuteSchemeQueryRequest&&, std::__y1::function&&, NYdbGrpc::Dev::TSimpleRequestProcessor::TAsyncRequest, std::__y1::shared_ptr, TDuration, NYdb::Dev::TRpcRequestSettings const&, std::__y1::shared_ptr)::'lambda'(Ydb::Operations::Operation*, NYdb::Dev::TPlainStatus)::operator()(Ydb::Operations::Operation*, NYdb::Dev::TPlainStatus)+119 (0x1ED5D787) decltype(std::declval()(std::declval(), std::declval())) std::__y1::__invoke[abi:fe200000](Ydb::Table::ExecuteSchemeQueryRequest&&, std::__y1::function&&, NYdbGrpc::Dev::TSimpleRequestProcessor::TAsyncRequest, std::__y1::shared_ptr, TDuration, NYdb::Dev::TRpcRequestSettings const&, std::__y1::shared_ptr)::'lambda'(Ydb::Operations::Operation*, NYdb::Dev::TPlainStatus)&, Ydb::Operations::Operation*, NYdb::Dev::TPlainStatus>(Ydb::Table::V1::TableService&&, Ydb::Operations::Operation*&&, NYdb::Dev::TPlainStatus&&)+51 (0x1ED5D673) void NYdb::Dev::TGRpcConnectionsImpl::RunDeferred(Ydb::Table::ExecuteSchemeQueryRequest&&, std::__y1::function&&, NYdbGrpc::Dev::TSimpleRequestProcessor::TAsyncRequest, std::__y1::shared_ptr, TDuration, NYdb::Dev::TRpcRequestSettings const&, bool, std::__y1::shared_ptr)::'lambda'(Ydb::Table::ExecuteSchemeQueryResponse*, NYdb::Dev::TPlainStatus)::operator()(Ydb::Table::ExecuteSchemeQueryResponse*, NYdb::Dev::TPlainStatus)+116 (0x1ED5D0B4) decltype(std::declval()(std::declval(), std::declval())) std::__y1::__invoke[abi:fe200000](Ydb::Table::ExecuteSchemeQueryRequest&&, std::__y1::function&&, NYdbGrpc::Dev::TSimpleRequestProcessor::TAsyncRequest, std::__y1::shared_ptr, TDuration, NYdb::Dev::TRpcRequestSettings const&, bool, std::__y1::shared_ptr)::'lambda'(Ydb::Table::ExecuteSchemeQueryResponse*, NYdb::Dev::TPlainStatus)&, Ydb::Table::ExecuteSchemeQueryResponse*, NYdb::Dev::TPlainStatus>(Ydb::Table::V1::TableService&&, Ydb::Table::ExecuteSchemeQueryResponse*&&+51 (0x1ED5CFA3) NYdb::Dev::TGRpcErrorResponse::Process(void*)+577 (0x1ED5ABA1) TAdaptiveThreadPool::TImpl::TThread::DoExecute()+298 (0x13C8DF1A) ??+0 (0x13C8C8CA) ??+0 (0x137DB39D) ??+0 (0x7FB45E0FEAC3) ??+0 (0x7FB45E190850) >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-ModifyUser-40 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-ModifyUser-41 >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-DropUser-66 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-DropUser-67 >> TDataShardTrace::TestTraceDistributedUpsert+UseSink [FAIL] >> TxUsage::WriteToTopic_Demo_20_RestartBeforeCommit_Query [FAIL] >> TxUsage::WriteToTopic_Demo_20_RestartAfterCommit_Table >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-DropUser-55 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-DropUser-56 >> TDataShardTrace::TestTraceDistributedSelect [FAIL] >> TDataShardTrace::TestTraceWriteImmediateOnShard [GOOD] >> TxUsage::WriteToTopic_Demo_23_RestartAfterCommit_Query [FAIL] >> TxUsage::WriteToTopic_Demo_24_Table >> KqpStreamLookup::ReadTableDuringSplit [FAIL] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-DropUser-65 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-DropUser-66 >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-DropUser-67 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-DropUser-68 >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-CreateUser-18 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-DropUser-50 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-CreateUser-19 >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-DropUser-51 >> TxUsage::WriteToTopic_Demo_33_Query [FAIL] >> TxUsage::WriteToTopic_Demo_34_Table ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/datashard/ut_trace/unittest >> TDataShardTrace::TestTraceWriteImmediateOnShard [GOOD] Test command err: 2025-05-29T15:23:33.882422Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:102:2148], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-05-29T15:23:33.882467Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-05-29T15:23:33.882483Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/000d11/r3tmp/tmp6PxXut/pdisk_1.dat 2025-05-29T15:23:34.000060Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-05-29T15:23:34.013075Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:23:34.017605Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [1:32:2079] 1748532213431824 != 1748532213431828 2025-05-29T15:23:34.060913Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:23:34.060966Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:23:34.071767Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-29T15:23:34.151587Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 >> KqpStreamLookup::ReadTableWithIndexDuringSplit >> TxUsage::Sinks_Oltp_WriteToTopic_5_Table [FAIL] >> TxUsage::Sinks_Oltp_WriteToTopic_5_Query >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-DropUser-64 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-DropUser-65 >> TxUsage::WriteToTopic_Demo_20_RestartAfterCommit_Table [FAIL] >> TxUsage::WriteToTopic_Demo_20_RestartAfterCommit_Query >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-CreateUser-17 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-CreateUser-18 >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-DropUser-67 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-DropUser-68 >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-ModifyUser-41 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-ModifyUser-42 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/datashard/ut_upload_rows/unittest >> TTxDataShardUploadRows::BulkUpsertDuringAddIndexRaceCorruption [FAIL] Test command err: 2025-05-29T15:23:28.386614Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:102:2148], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-05-29T15:23:28.386662Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-05-29T15:23:28.386677Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/0015d6/r3tmp/tmpYgGnE6/pdisk_1.dat 2025-05-29T15:23:28.509969Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-05-29T15:23:28.523572Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:23:28.527595Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [1:32:2079] 1748532207944065 != 1748532207944069 2025-05-29T15:23:28.569655Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:23:28.569704Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:23:28.580350Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-29T15:23:28.656383Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-05-29T15:23:28.677002Z node 1 :TX_DATASHARD INFO: datashard.cpp:373: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:676:2577] 2025-05-29T15:23:28.677106Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:630: TxInitSchema.Execute 2025-05-29T15:23:28.688266Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:696: TxInitSchema.Complete 2025-05-29T15:23:28.688343Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:48: TDataShard::TTxInit::Execute 2025-05-29T15:23:28.688555Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1315: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-05-29T15:23:28.688566Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1371: LoadLockChangeRecords at tablet: 72075186224037888 2025-05-29T15:23:28.688575Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1420: LoadChangeRecordCommits at tablet: 72075186224037888 2025-05-29T15:23:28.688640Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:92: TDataShard::TTxInit::Complete 2025-05-29T15:23:28.688906Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:100: TDataShard::TTxInitRestored::Execute 2025-05-29T15:23:28.688922Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:111: DataShard 72075186224037888 persisting started state actor id [1:704:2577] in generation 1 2025-05-29T15:23:28.689057Z node 1 :TX_DATASHARD INFO: datashard.cpp:373: TDataShard::OnActivateExecutor: tablet 72075186224037889 actor [1:679:2579] 2025-05-29T15:23:28.689104Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:630: TxInitSchema.Execute 2025-05-29T15:23:28.690786Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:696: TxInitSchema.Complete 2025-05-29T15:23:28.690824Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:48: TDataShard::TTxInit::Execute 2025-05-29T15:23:28.690981Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1315: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037889 2025-05-29T15:23:28.690990Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1371: LoadLockChangeRecords at tablet: 72075186224037889 2025-05-29T15:23:28.690998Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1420: LoadChangeRecordCommits at tablet: 72075186224037889 2025-05-29T15:23:28.691042Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:92: TDataShard::TTxInit::Complete 2025-05-29T15:23:28.691061Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:100: TDataShard::TTxInitRestored::Execute 2025-05-29T15:23:28.691073Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:111: DataShard 72075186224037889 persisting started state actor id [1:710:2579] in generation 1 2025-05-29T15:23:28.701611Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:120: TDataShard::TTxInitRestored::Complete 2025-05-29T15:23:28.706427Z node 1 :TX_DATASHARD INFO: datashard.cpp:417: Switched to work state WaitScheme tabletId 72075186224037888 2025-05-29T15:23:28.706539Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:457: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-05-29T15:23:28.706573Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1250: Change sender created: at tablet: 72075186224037888, actorId: [1:713:2598] 2025-05-29T15:23:28.706579Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1255: Trying to activate change sender: at tablet: 72075186224037888 2025-05-29T15:23:28.706585Z node 1 :TX_DATASHARD INFO: datashard.cpp:1272: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-05-29T15:23:28.706591Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-05-29T15:23:28.706720Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:120: TDataShard::TTxInitRestored::Complete 2025-05-29T15:23:28.706729Z node 1 :TX_DATASHARD INFO: datashard.cpp:417: Switched to work state WaitScheme tabletId 72075186224037889 2025-05-29T15:23:28.708634Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:457: 72075186224037889 not sending time cast registration request in state WaitScheme: missing processing params 2025-05-29T15:23:28.708723Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1250: Change sender created: at tablet: 72075186224037889, actorId: [1:714:2599] 2025-05-29T15:23:28.708730Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1255: Trying to activate change sender: at tablet: 72075186224037889 2025-05-29T15:23:28.708736Z node 1 :TX_DATASHARD INFO: datashard.cpp:1272: Cannot activate change sender: at tablet: 72075186224037889, state: WaitScheme 2025-05-29T15:23:28.708743Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2025-05-29T15:23:28.708950Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 72075186224037888 2025-05-29T15:23:28.708982Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-05-29T15:23:28.709012Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2025-05-29T15:23:28.709021Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-05-29T15:23:28.709032Z node 1 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 72075186224037888 TxInFly 0 2025-05-29T15:23:28.709038Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2025-05-29T15:23:28.709045Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 72075186224037889 2025-05-29T15:23:28.709055Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037889 2025-05-29T15:23:28.709190Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3710: Server connected at leader tablet# 72075186224037888, clientId# [1:669:2573], serverId# [1:687:2583], sessionId# [0:0:0] 2025-05-29T15:23:28.709198Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037889 2025-05-29T15:23:28.709203Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037889 active 0 active planned 0 immediate 0 planned 0 2025-05-29T15:23:28.709207Z node 1 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 72075186224037889 TxInFly 0 2025-05-29T15:23:28.709213Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037889 2025-05-29T15:23:28.709252Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037888 2025-05-29T15:23:28.709315Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:132: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-05-29T15:23:28.709335Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:220: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2025-05-29T15:23:28.709446Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3710: Server connected at leader tablet# 72075186224037889, clientId# [1:670:2574], serverId# [1:695:2589], sessionId# [0:0:0] 2025-05-29T15:23:28.709481Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037889 2025-05-29T15:23:28.709508Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:132: Propose scheme transaction at tablet 72075186224037889 txId 281474976715657 ssId 72057594046644480 seqNo 2:2 2025-05-29T15:23:28.709519Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:220: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037889 2025-05-29T15:23:28.709911Z node 1 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:129: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-05-29T15:23:28.709928Z node 1 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:129: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037889 2025-05-29T15:23:28.720367Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 72075186224037888 2025-05-29T15:23:28.720425Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:467: 72075186224037888 not sending time cast registration request in state WaitScheme 2025-05-29T15:23:28.720594Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 72075186224037889 2025-05-29T15:23:28.720605Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:467: 72075186224037889 not sending time cast registration request in state WaitScheme 2025-05-29T15:23:28.872452Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3710: Server connected at leader tablet# 72075186224037889, clientId# [1:731:2610], serverId# [1:734:2613], sessionId# [0:0:0] 2025-05-29T15:23:28.872508Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3710: Server connected at leader tablet# 72075186224037888, clientId# [1:732:2611], serverId# [1:735:2614], sessionId# [0:0:0] 2025-05-29T15:23:28.873650Z node 1 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:69: Planned transaction txId 281474976715657 at step 1000 at tablet 72075186224037889 { Transactions { TxId: 281474976715657 AckTo { RawX1: 0 RawX2: 0 } } Step: 1000 MediatorID: 72057594046382081 TabletID: 72075186224037889 } 2025-05-29T15:23:28.873679Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2025-05-29T15:23:28.873815Z node 1 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:69: Planned transaction txId 281474976715657 at step 1000 at tablet 72075186224037888 { Transactions { TxId: 281474976715657 AckTo { RawX1: 0 RawX2: 0 } } Step: 100 ... 39712Z node 3 :TX_PROXY DEBUG: proxy_impl.cpp:238: actor# [3:59:2106] TxId# 281474976715658 ProcessProposeTransaction 2025-05-29T15:23:31.839724Z node 3 :TX_PROXY DEBUG: proxy_impl.cpp:257: actor# [3:59:2106] Cookie# 0 userReqId# "" txid# 281474976715658 SEND to# [3:764:2628] 2025-05-29T15:23:31.840679Z node 3 :TX_PROXY DEBUG: schemereq.cpp:1562: Actor# [3:764:2628] txid# 281474976715658 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/Root/.metadata/workload_manager/pools" OperationType: ESchemeOpCreateResourcePool ModifyACL { Name: "default" DiffACL: "\n!\010\000\022\035\010\001\020\201\004\032\024all-users@well-known \003\n\031\010\000\022\025\010\001\020\201\004\032\014root@builtin \003" NewOwner: "metadata@system" } Internal: true CreateResourcePool { Name: "default" Properties { Properties { key: "concurrent_query_limit" value: "-1" } Properties { key: "database_load_cpu_threshold" value: "-1" } Properties { key: "query_cancel_after_seconds" value: "0" } Properties { key: "query_cpu_limit_percent_per_node" value: "-1" } Properties { key: "query_memory_limit_percent_per_node" value: "-1" } Properties { key: "queue_size" value: "-1" } Properties { key: "resource_weight" value: "-1" } Properties { key: "total_cpu_limit_percent_per_node" value: "-1" } } } } } UserToken: "\n\017metadata@system\022\000" DatabaseName: "/Root" 2025-05-29T15:23:31.840708Z node 3 :TX_PROXY DEBUG: schemereq.cpp:569: Actor# [3:764:2628] txid# 281474976715658 Bootstrap, UserSID: metadata@system CheckAdministrator: 0 CheckDatabaseAdministrator: 0 2025-05-29T15:23:31.840714Z node 3 :TX_PROXY DEBUG: schemereq.cpp:578: Actor# [3:764:2628] txid# 281474976715658 Bootstrap, UserSID: metadata@system IsClusterAdministrator: 1 2025-05-29T15:23:31.841323Z node 3 :TX_PROXY DEBUG: schemereq.cpp:1627: Actor# [3:764:2628] txid# 281474976715658 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P6 2025-05-29T15:23:31.841344Z node 3 :TX_PROXY DEBUG: schemereq.cpp:1617: Actor# [3:764:2628] txid# 281474976715658 TEvNavigateKeySet requested from SchemeCache 2025-05-29T15:23:31.841415Z node 3 :TX_PROXY DEBUG: schemereq.cpp:1450: Actor# [3:764:2628] txid# 281474976715658 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-05-29T15:23:31.841448Z node 3 :TX_PROXY DEBUG: schemereq.cpp:1497: Actor# [3:764:2628] HANDLE EvNavigateKeySetResult, txid# 281474976715658 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 1000 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2025-05-29T15:23:31.841466Z node 3 :TX_PROXY DEBUG: schemereq.cpp:103: Actor# [3:764:2628] txid# 281474976715658 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976715658 TabletId# 72057594046644480} 2025-05-29T15:23:31.841796Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2025-05-29T15:23:31.841987Z node 3 :TX_PROXY DEBUG: schemereq.cpp:1352: Actor# [3:764:2628] txid# 281474976715658 HANDLE EvClientConnected 2025-05-29T15:23:31.842150Z node 3 :TX_PROXY DEBUG: schemereq.cpp:1374: Actor# [3:764:2628] txid# 281474976715658 Status StatusAccepted HANDLE {TEvModifySchemeTransactionResult Status# StatusAccepted txid# 281474976715658} 2025-05-29T15:23:31.842163Z node 3 :TX_PROXY DEBUG: schemereq.cpp:549: Actor# [3:764:2628] txid# 281474976715658 SEND to# [3:763:2627] Source {TEvProposeTransactionStatus txid# 281474976715658 Status# 53} 2025-05-29T15:23:31.843413Z node 3 :TX_DATASHARD TRACE: datashard_impl.h:3129: StateWork, received event# 269746185, Sender [3:684:2580], Recipient [3:665:2569]: NKikimr::TEvTxProxySchemeCache::TEvWatchNotifyUpdated 2025-05-29T15:23:31.843432Z node 3 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:129: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-05-29T15:23:31.854838Z node 3 :TX_DATASHARD TRACE: datashard_impl.h:3129: StateWork, received event# 269877763, Sender [3:724:2600], Recipient [3:665:2569]: NKikimr::TEvTabletPipe::TEvClientDestroyed { TabletId: 72057594046644480 ClientId: [3:724:2600] ServerId: [3:730:2606] } 2025-05-29T15:23:31.854863Z node 3 :TX_DATASHARD TRACE: datashard_impl.h:3163: StateWork, processing event TEvTabletPipe::TEvClientDestroyed 2025-05-29T15:23:32.006106Z node 3 :TX_DATASHARD TRACE: datashard_impl.h:3129: StateWork, received event# 269746185, Sender [3:684:2580], Recipient [3:665:2569]: NKikimr::TEvTxProxySchemeCache::TEvWatchNotifyUpdated 2025-05-29T15:23:32.006146Z node 3 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:129: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-05-29T15:23:32.007113Z node 3 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [3:763:2627], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-05-29T15:23:32.031085Z node 3 :TX_PROXY DEBUG: proxy_impl.cpp:315: actor# [3:59:2106] Handle TEvProposeTransaction 2025-05-29T15:23:32.031120Z node 3 :TX_PROXY DEBUG: proxy_impl.cpp:238: actor# [3:59:2106] TxId# 281474976715659 ProcessProposeTransaction 2025-05-29T15:23:32.031147Z node 3 :TX_PROXY DEBUG: proxy_impl.cpp:257: actor# [3:59:2106] Cookie# 0 userReqId# "" txid# 281474976715659 SEND to# [3:823:2668] 2025-05-29T15:23:32.032239Z node 3 :TX_PROXY DEBUG: schemereq.cpp:1562: Actor# [3:823:2668] txid# 281474976715659 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/Root/.metadata/workload_manager/pools" OperationType: ESchemeOpCreateResourcePool ModifyACL { Name: "default" DiffACL: "\n!\010\000\022\035\010\001\020\201\004\032\024all-users@well-known \003\n\031\010\000\022\025\010\001\020\201\004\032\014root@builtin \003" NewOwner: "metadata@system" } Internal: true CreateResourcePool { Name: "default" Properties { Properties { key: "concurrent_query_limit" value: "-1" } Properties { key: "database_load_cpu_threshold" value: "-1" } Properties { key: "query_cancel_after_seconds" value: "0" } Properties { key: "query_cpu_limit_percent_per_node" value: "-1" } Properties { key: "query_memory_limit_percent_per_node" value: "-1" } Properties { key: "queue_size" value: "-1" } Properties { key: "resource_weight" value: "-1" } Properties { key: "total_cpu_limit_percent_per_node" value: "-1" } } } } } UserToken: "\n\017metadata@system\022\000" DatabaseName: "/Root" 2025-05-29T15:23:32.032281Z node 3 :TX_PROXY DEBUG: schemereq.cpp:569: Actor# [3:823:2668] txid# 281474976715659 Bootstrap, UserSID: metadata@system CheckAdministrator: 0 CheckDatabaseAdministrator: 0 2025-05-29T15:23:32.032288Z node 3 :TX_PROXY DEBUG: schemereq.cpp:578: Actor# [3:823:2668] txid# 281474976715659 Bootstrap, UserSID: metadata@system IsClusterAdministrator: 1 2025-05-29T15:23:32.032607Z node 3 :TX_PROXY DEBUG: schemereq.cpp:1627: Actor# [3:823:2668] txid# 281474976715659 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P6 2025-05-29T15:23:32.032625Z node 3 :TX_PROXY DEBUG: schemereq.cpp:1617: Actor# [3:823:2668] txid# 281474976715659 TEvNavigateKeySet requested from SchemeCache 2025-05-29T15:23:32.032692Z node 3 :TX_PROXY DEBUG: schemereq.cpp:1450: Actor# [3:823:2668] txid# 281474976715659 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-05-29T15:23:32.032734Z node 3 :TX_PROXY DEBUG: schemereq.cpp:1497: Actor# [3:823:2668] HANDLE EvNavigateKeySetResult, txid# 281474976715659 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 1000 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2025-05-29T15:23:32.032756Z node 3 :TX_PROXY DEBUG: schemereq.cpp:103: Actor# [3:823:2668] txid# 281474976715659 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976715659 TabletId# 72057594046644480} 2025-05-29T15:23:32.033130Z node 3 :TX_PROXY DEBUG: schemereq.cpp:1352: Actor# [3:823:2668] txid# 281474976715659 HANDLE EvClientConnected 2025-05-29T15:23:32.033295Z node 3 :TX_PROXY DEBUG: schemereq.cpp:1374: Actor# [3:823:2668] txid# 281474976715659 Status StatusAlreadyExists HANDLE {TEvModifySchemeTransactionResult Status# StatusAlreadyExists txid# 281474976715659 Reason# Check failed: path: '/Root/.metadata/workload_manager/pools/default', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92} 2025-05-29T15:23:32.033840Z node 3 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [3:823:2668] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-29T15:23:32.033860Z node 3 :TX_PROXY DEBUG: schemereq.cpp:549: Actor# [3:823:2668] txid# 281474976715659 SEND to# [3:763:2627] Source {TEvProposeTransactionStatus txid# 281474976715659 Status# 48} 2025-05-29T15:23:32.110405Z node 3 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [3:833:2677], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:23:32.111153Z node 3 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=3&id=MWNiOWZjMDktMTAwNjEyNTctOTE1YWNkZTUtNzNhNDZiZTA=, ActorId: [3:747:2617], ActorState: ExecuteState, TraceId: 01jwea8c3zd3qj1229qcsjj561, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: assertion failed at ydb/core/tx/datashard/ut_common/datashard_ut_common.cpp:2091, void NKikimr::ExecSQL(Tests::TServer::TPtr, TActorId, const TString &, bool, Ydb::StatusIds::StatusCode): (response.GetYdbStatus() == code) failed: (INTERNAL_ERROR != SUCCESS) Response { QueryIssues { message: "Execution" issue_code: 1060 issues { message: "yql/essentials/ast/yql_expr.h:1874: index out of range" issue_code: 1 } } TxMeta { } } YdbStatus: INTERNAL_ERROR ConsumedRu: 1 , with diff: (INT|SUCC)E(RNAL_ERROR|SS) TBackTrace::Capture()+28 (0x13AD9BEC) NUnitTest::NPrivate::RaiseError(char const*, TBasicString> const&, bool)+137 (0x13C8D789) NKikimr::ExecSQL(TIntrusivePtr>, NActors::TActorId, TBasicString> const&, bool, Ydb::StatusIds_StatusCode)+1300 (0x26319804) NKikimr::NTestSuiteTTxDataShardUploadRows::TTestCaseBulkUpsertDuringAddIndexRaceCorruption::Execute_(NUnitTest::TTestContext&)+1300 (0x139C0BF4) NKikimr::NTestSuiteTTxDataShardUploadRows::TCurrentTest::Execute()::'lambda'()::operator()() const+71 (0x139C78B7) NUnitTest::TTestBase::Run(std::__y1::function, TBasicString> const&, char const*, bool)+126 (0x13C8F63E) NKikimr::NTestSuiteTTxDataShardUploadRows::TCurrentTest::Execute()+433 (0x139C7111) NUnitTest::TTestFactory::Execute()+803 (0x13C8FDB3) NUnitTest::RunMain(int, char**)+3021 (0x13CA195D) ??+0 (0x7F9BCFC96D90) __libc_start_main+128 (0x7F9BCFC96E40) _start+41 (0x12A0C029) >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-DropUser-56 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-DropUser-57 >> TxUsage::WriteToTopic_Demo_24_Table [FAIL] >> TxUsage::WriteToTopic_Demo_24_Query >> TDataShardTrace::TestTraceDistributedSelectViaReadActors >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-DropUser-68 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-DropUser-51 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-DropUser-52 >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-DropUser-69 >> TxUsage::WriteToTopic_Demo_34_Table [FAIL] >> TxUsage::WriteToTopic_Demo_34_Query |63.9%| [TA] $(B)/ydb/services/ext_index/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-DropUser-66 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-DropUser-67 >> TxUsage::Sinks_Oltp_WriteToTopic_5_Query [FAIL] >> TxUsage::Sinks_Oltp_WriteToTopics_1_Table >> TExportToS3WithRebootsTests::ShouldSucceedOnSingleTopic >> SchemeReqAdminAccessInTenant::ClusterAdminCanAdministerTenant-DomainLoginOnly [GOOD] >> SchemeReqAdminAccessInTenant::ClusterAdminCanAdministerTenant-DomainLoginOnly-StrictAclCheck >> TxUsage::WriteToTopic_Demo_20_RestartAfterCommit_Query [FAIL] >> TxUsage::WriteToTopic_Demo_21_RestartNo_Table >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-DropUser-65 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-DropUser-66 >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-DropUser-68 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-DropUser-69 >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-DropUser-57 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-DropUser-58 |63.9%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/replication/service/ut_topic_reader/ydb-core-tx-replication-service-ut_topic_reader |63.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/replication/service/ut_topic_reader/ydb-core-tx-replication-service-ut_topic_reader >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-CreateUser-18 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-ModifyUser-42 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-ModifyUser-43 >> TxUsage::WriteToTopic_Demo_24_Query [FAIL] >> TExportToS3WithRebootsTests::ShouldSucceedOnSingleView >> TxUsage::WriteToTopic_Demo_25_Table >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-CreateUser-19 >> TxUsage::WriteToTopic_Demo_34_Query [FAIL] >> TxUsage::WriteToTopic_Demo_35_Table >> TxUsage::Sinks_Oltp_WriteToTopics_1_Table [FAIL] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-DropUser-69 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-DropUser-52 [GOOD] >> TxUsage::Sinks_Oltp_WriteToTopics_1_Query >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-DropUser-70 >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-DropUser-53 >> TExportToS3WithRebootsTests::ForgetShouldSucceedOnSingleShardTable >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-DropUser-58 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-DropUser-59 |63.9%| [TA] {RESULT} $(B)/ydb/services/ext_index/ut/test-results/unittest/{meta.json ... results_accumulator.log} |63.9%| [LD] {RESULT} $(B)/ydb/core/tx/replication/service/ut_topic_reader/ydb-core-tx-replication-service-ut_topic_reader >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-DropUser-67 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-DropUser-68 >> TxUsage::WriteToTopic_Demo_21_RestartNo_Table [FAIL] >> TxUsage::WriteToTopic_Demo_21_RestartNo_Query >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-DropUser-69 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-DropUser-70 >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-DropUser-66 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-DropUser-67 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/datashard/ut_trace/unittest >> TDataShardTrace::TestTraceDistributedUpsert-UseSink [FAIL] Test command err: 2025-05-29T15:23:32.206189Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:102:2148], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-05-29T15:23:32.206229Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-05-29T15:23:32.206245Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/000d5d/r3tmp/tmpAGERoV/pdisk_1.dat 2025-05-29T15:23:32.331939Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-05-29T15:23:32.349420Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:23:32.354716Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [1:32:2079] 1748532211698367 != 1748532211698371 2025-05-29T15:23:32.408030Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:23:32.408077Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:23:32.418851Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-29T15:23:32.497387Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-05-29T15:23:34.484792Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:927:2769], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:23:34.484829Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:938:2774], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:23:34.484846Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:23:34.485929Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480 2025-05-29T15:23:34.499411Z node 1 :HIVE WARN: hive_impl.cpp:491: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037888 not found 2025-05-29T15:23:34.658137Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:941:2777], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2025-05-29T15:23:34.696546Z node 1 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [1:1003:2819] txid# 281474976715660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-29T15:23:34.753394Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [1:1013:2828], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:23:34.754393Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=1&id=ZWY5ZGE3My0zYjgyMjM5NC1mMTY3NWZkZS04YWZlODU1Mg==, ActorId: [1:925:2767], ActorState: ExecuteState, TraceId: 01jwea8epmcvpcfqtg7hynhdya, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: assertion failed at ydb/core/tx/datashard/datashard_ut_trace.cpp:37, void NKikimr::NTestSuiteTDataShardTrace::ExecSQL(Tests::TServer::TPtr, TActorId, const TString &, Ydb::StatusIds::StatusCode, NWilson::TTraceId): (ev->Get()->Record.GetYdbStatus() == code) failed: (INTERNAL_ERROR != SUCCESS) , with diff: (INT|SUCC)E(RNAL_ERROR|SS) TBackTrace::Capture()+28 (0x13AB26EC) NUnitTest::NPrivate::RaiseError(char const*, TBasicString> const&, bool)+137 (0x13C66289) NKikimr::NTestSuiteTDataShardTrace::ExecSQL(TIntrusivePtr>, NActors::TActorId, TBasicString> const&, Ydb::StatusIds_StatusCode, NWilson::TTraceId)+1990 (0x139917E6) NKikimr::NTestSuiteTDataShardTrace::TTestCaseTestTraceDistributedUpsert::Execute_(NUnitTest::TTestContext&)+1249 (0x139ADC81) NKikimr::NTestSuiteTDataShardTrace::TCurrentTest::Execute()::'lambda'()::operator()() const+71 (0x139A4707) NUnitTest::TTestBase::Run(std::__y1::function, TBasicString> const&, char const*, bool)+126 (0x13C6813E) NKikimr::NTestSuiteTDataShardTrace::TCurrentTest::Execute()+436 (0x139A3F64) NUnitTest::TTestFactory::Execute()+803 (0x13C688B3) NUnitTest::RunMain(int, char**)+3021 (0x13C7A45D) ??+0 (0x7F4370284D90) __libc_start_main+128 (0x7F4370284E40) _start+41 (0x129F9029) >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-CreateUser-19 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-CreateUser-20 >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-ModifyUser-43 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-ModifyUser-44 >> TxUsage::WriteToTopic_Demo_25_Table [FAIL] >> TxUsage::WriteToTopic_Demo_25_Query >> TPQTest::TestPartitionedBlobFails [GOOD] >> TPQTest::TestPQSmallRead >> TxUsage::WriteToTopic_Demo_35_Table [FAIL] >> TxUsage::WriteToTopic_Demo_35_Query |63.9%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/datashard/ut_read_iterator/ydb-core-tx-datashard-ut_read_iterator |63.9%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_read_iterator/ydb-core-tx-datashard-ut_read_iterator |63.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_read_iterator/ydb-core-tx-datashard-ut_read_iterator >> TxUsage::Sinks_Oltp_WriteToTopics_1_Query [FAIL] >> TxUsage::Sinks_Oltp_WriteToTopics_2_Table >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-DropUser-70 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-DropUser-71 >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-DropUser-53 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-DropUser-54 >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-DropUser-59 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-DropUser-60 >> TxUsage::WriteToTopic_Demo_21_RestartNo_Query [FAIL] >> TxUsage::WriteToTopic_Demo_21_RestartBeforeCommit_Table >> KqpStreamLookup::ReadTableWithIndexDuringSplit [FAIL] >> SchemeReqAdminAccessInTenant::ClusterAdminCanAdministerTenant-DomainLoginOnly-StrictAclCheck [GOOD] >> SchemeReqAdminAccessInTenant::ClusterAdminCanAuthOnEmptyTenant >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-DropUser-68 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-DropUser-69 >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-DropUser-70 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-DropUser-71 >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-DropUser-67 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-DropUser-68 >> TxUsage::WriteToTopic_Demo_25_Query [FAIL] >> TxUsage::WriteToTopic_Demo_26_Table >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-CreateUser-20 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-CreateUser-21 >> TxUsage::WriteToTopic_Demo_35_Query [FAIL] >> TxUsage::WriteToTopic_Demo_36_Table >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-ModifyUser-44 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-ModifyUser-45 >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-DropUser-54 [GOOD] >> TxUsage::Sinks_Oltp_WriteToTopics_2_Table [FAIL] >> TxUsage::Sinks_Oltp_WriteToTopics_2_Query >> TDataShardTrace::TestTraceDistributedSelectViaReadActors [FAIL] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-DropUser-71 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-DropUser-72 >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-CreateUser-19 [FAIL] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-CreateUser-20 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/datashard/ut_trace/unittest >> TDataShardTrace::TestTraceDistributedUpsert+UseSink [FAIL] Test command err: 2025-05-29T15:23:33.344047Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:102:2148], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-05-29T15:23:33.344090Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-05-29T15:23:33.344107Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/000d3a/r3tmp/tmpm2YSy8/pdisk_1.dat 2025-05-29T15:23:33.468006Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-05-29T15:23:33.484925Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:23:33.489754Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [1:32:2079] 1748532212889175 != 1748532212889179 2025-05-29T15:23:33.533402Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:23:33.533441Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:23:33.545906Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-29T15:23:33.622338Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-05-29T15:23:35.478265Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:927:2769], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:23:35.478308Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:938:2774], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:23:35.478331Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:23:35.479517Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480 2025-05-29T15:23:35.493791Z node 1 :HIVE WARN: hive_impl.cpp:491: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037888 not found 2025-05-29T15:23:35.646927Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:941:2777], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2025-05-29T15:23:35.687903Z node 1 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [1:1003:2819] txid# 281474976715660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-29T15:23:35.750302Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [1:1013:2828], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:23:35.751478Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=1&id=MmM5NTU2M2ItN2ExMjRlNDgtMThmZjE0OTUtYTI1NzVhODk=, ActorId: [1:925:2767], ActorState: ExecuteState, TraceId: 01jwea8fnn5kastecyxvdw1p58, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: assertion failed at ydb/core/tx/datashard/datashard_ut_trace.cpp:37, void NKikimr::NTestSuiteTDataShardTrace::ExecSQL(Tests::TServer::TPtr, TActorId, const TString &, Ydb::StatusIds::StatusCode, NWilson::TTraceId): (ev->Get()->Record.GetYdbStatus() == code) failed: (INTERNAL_ERROR != SUCCESS) , with diff: (INT|SUCC)E(RNAL_ERROR|SS) TBackTrace::Capture()+28 (0x13AB26EC) NUnitTest::NPrivate::RaiseError(char const*, TBasicString> const&, bool)+137 (0x13C66289) NKikimr::NTestSuiteTDataShardTrace::ExecSQL(TIntrusivePtr>, NActors::TActorId, TBasicString> const&, Ydb::StatusIds_StatusCode, NWilson::TTraceId)+1990 (0x139917E6) NKikimr::NTestSuiteTDataShardTrace::TTestCaseTestTraceDistributedUpsert::Execute_(NUnitTest::TTestContext&)+1245 (0x139A9B2D) NKikimr::NTestSuiteTDataShardTrace::TCurrentTest::Execute()::'lambda'()::operator()() const+71 (0x139A4707) NUnitTest::TTestBase::Run(std::__y1::function, TBasicString> const&, char const*, bool)+126 (0x13C6813E) NKikimr::NTestSuiteTDataShardTrace::TCurrentTest::Execute()+436 (0x139A3F64) NUnitTest::TTestFactory::Execute()+803 (0x13C688B3) NUnitTest::RunMain(int, char**)+3021 (0x13C7A45D) ??+0 (0x7FCE2D2C6D90) __libc_start_main+128 (0x7FCE2D2C6E40) _start+41 (0x129F9029) >> TxUsage::WriteToTopic_Demo_21_RestartBeforeCommit_Table [FAIL] >> TxUsage::WriteToTopic_Demo_21_RestartBeforeCommit_Query >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-DropUser-60 [GOOD] >> TExportToS3WithRebootsTests::ForgetShouldSucceedOnMultiShardTable ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/datashard/ut_kqp/unittest >> KqpStreamLookup::ReadTableDuringSplit [FAIL] Test command err: 2025-05-29T15:23:34.066512Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:102:2148], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-05-29T15:23:34.066554Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-05-29T15:23:34.066571Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/001296/r3tmp/tmpAor4Rc/pdisk_1.dat 2025-05-29T15:23:34.184873Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-05-29T15:23:34.199369Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:23:34.205505Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [1:32:2079] 1748532213634098 != 1748532213634102 2025-05-29T15:23:34.248198Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:23:34.248246Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:23:34.258926Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-29T15:23:34.334434Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-05-29T15:23:34.553825Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:733:2615], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:23:34.553862Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:744:2620], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:23:34.553947Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:23:34.555086Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2025-05-29T15:23:34.717713Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:747:2623], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-05-29T15:23:34.754586Z node 1 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [1:818:2663] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-29T15:23:36.142730Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [1:828:2672], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:23:36.164863Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=1&id=Yzc3NWJmMmUtMTA1MzJmMmMtNmEwMzEwOGMtZGIyNDEyNWQ=, ActorId: [1:731:2613], ActorState: ExecuteState, TraceId: 01jwea8ersfzft22yhj78sdvcs, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: assertion failed at ydb/core/tx/datashard/ut_common/datashard_ut_common.cpp:2091, void NKikimr::ExecSQL(Tests::TServer::TPtr, TActorId, const TString &, bool, Ydb::StatusIds::StatusCode): (response.GetYdbStatus() == code) failed: (INTERNAL_ERROR != SUCCESS) Response { QueryIssues { message: "Execution" issue_code: 1060 issues { message: "yql/essentials/ast/yql_expr.h:1874: index out of range" issue_code: 1 } } TxMeta { } } YdbStatus: INTERNAL_ERROR ConsumedRu: 1 , with diff: (INT|SUCC)E(RNAL_ERROR|SS) TBackTrace::Capture()+28 (0x13AAAC1C) NUnitTest::NPrivate::RaiseError(char const*, TBasicString> const&, bool)+137 (0x13C5DDB9) NKikimr::ExecSQL(TIntrusivePtr>, NActors::TActorId, TBasicString> const&, bool, Ydb::StatusIds_StatusCode)+1300 (0x262EDC84) NKikimr::NTestSuiteKqpStreamLookup::TTestCaseReadTableDuringSplit::Execute_(NUnitTest::TTestContext&)+1252 (0x139A5B24) NKikimr::NTestSuiteKqpStreamLookup::TCurrentTest::Execute()::'lambda'()::operator()() const+71 (0x139AC807) NUnitTest::TTestBase::Run(std::__y1::function, TBasicString> const&, char const*, bool)+126 (0x13C5FC6E) NKikimr::NTestSuiteKqpStreamLookup::TCurrentTest::Execute()+436 (0x139AC084) NUnitTest::TTestFactory::Execute()+803 (0x13C603E3) NUnitTest::RunMain(int, char**)+3021 (0x13C71F8D) ??+0 (0x7FD3814A2D90) __libc_start_main+128 (0x7FD3814A2E40) _start+41 (0x129F7029) ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/datashard/ut_trace/unittest >> TDataShardTrace::TestTraceDistributedSelect [FAIL] Test command err: 2025-05-29T15:23:33.589845Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:102:2148], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-05-29T15:23:33.589902Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-05-29T15:23:33.589930Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/000d2f/r3tmp/tmpupfMzR/pdisk_1.dat 2025-05-29T15:23:33.715235Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-05-29T15:23:33.728269Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:23:33.732972Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [1:32:2079] 1748532213149791 != 1748532213149795 2025-05-29T15:23:33.776122Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:23:33.776169Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:23:33.786934Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-29T15:23:33.860695Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-05-29T15:23:35.752434Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:927:2769], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:23:35.752469Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:938:2774], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:23:35.752485Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:23:35.753613Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480 2025-05-29T15:23:35.766953Z node 1 :HIVE WARN: hive_impl.cpp:491: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037888 not found 2025-05-29T15:23:35.919223Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:941:2777], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2025-05-29T15:23:35.959483Z node 1 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [1:1003:2819] txid# 281474976715660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-29T15:23:36.036908Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [1:1013:2828], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:23:36.038124Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=1&id=ZGE4ZTFjODAtODUwY2M3ZjgtMWFiYTg3ZjUtNGFjZmMyZDA=, ActorId: [1:925:2767], ActorState: ExecuteState, TraceId: 01jwea8fy830s1g7t4zcm5m21p, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: assertion failed at ydb/core/tx/datashard/datashard_ut_trace.cpp:37, void NKikimr::NTestSuiteTDataShardTrace::ExecSQL(Tests::TServer::TPtr, TActorId, const TString &, Ydb::StatusIds::StatusCode, NWilson::TTraceId): (ev->Get()->Record.GetYdbStatus() == code) failed: (INTERNAL_ERROR != SUCCESS) , with diff: (INT|SUCC)E(RNAL_ERROR|SS) TBackTrace::Capture()+28 (0x13AB26EC) NUnitTest::NPrivate::RaiseError(char const*, TBasicString> const&, bool)+137 (0x13C66289) NKikimr::NTestSuiteTDataShardTrace::ExecSQL(TIntrusivePtr>, NActors::TActorId, TBasicString> const&, Ydb::StatusIds_StatusCode, NWilson::TTraceId)+1990 (0x139917E6) NKikimr::NTestSuiteTDataShardTrace::TTestCaseTestTraceDistributedSelect::Execute_(NUnitTest::TTestContext&)+1159 (0x13996567) NKikimr::NTestSuiteTDataShardTrace::TCurrentTest::Execute()::'lambda'()::operator()() const+71 (0x139A4707) NUnitTest::TTestBase::Run(std::__y1::function, TBasicString> const&, char const*, bool)+126 (0x13C6813E) NKikimr::NTestSuiteTDataShardTrace::TCurrentTest::Execute()+436 (0x139A3F64) NUnitTest::TTestFactory::Execute()+803 (0x13C688B3) NUnitTest::RunMain(int, char**)+3021 (0x13C7A45D) ??+0 (0x7FE43766AD90) __libc_start_main+128 (0x7FE43766AE40) _start+41 (0x129F9029) >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-DropUser-69 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-DropUser-70 >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-DropUser-68 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-DropUser-69 >> TxUsage::WriteToTopic_Demo_26_Table [FAIL] >> TxUsage::WriteToTopic_Demo_26_Query >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-CreateUser-21 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-CreateUser-22 >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-DropUser-71 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-DropUser-72 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/tx_proxy/ut_schemereq/unittest >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-DropUser-54 [GOOD] Test command err: Starting YDB, grpc: 20443, msgbus: 1855 2025-05-29T15:23:11.769148Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509888580235541140:2075];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:23:11.769162Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/001b5c/r3tmp/tmpBse6bt/pdisk_1.dat 2025-05-29T15:23:11.828145Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 20443, node 1 2025-05-29T15:23:11.836432Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:23:11.836444Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-05-29T15:23:11.836446Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-05-29T15:23:11.836491Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:1855 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 2025-05-29T15:23:11.856831Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:403: actor# [1:7509888580235541365:2139] Handle TEvNavigate describe path dc-1 2025-05-29T15:23:11.858653Z node 1 :TX_PROXY DEBUG: describe.cpp:272: Actor# [1:7509888580235541808:2422] HANDLE EvNavigateScheme dc-1 2025-05-29T15:23:11.858967Z node 1 :TX_PROXY DEBUG: describe.cpp:356: Actor# [1:7509888580235541808:2422] HANDLE EvNavigateKeySetResult TDescribeReq marker# P5 ErrorCount# 0 2025-05-29T15:23:11.866138Z node 1 :TX_PROXY DEBUG: describe.cpp:435: Actor# [1:7509888580235541808:2422] SEND to# 72057594046644480 shardToRequest NKikimrSchemeOp.TDescribePath Path: "dc-1" Options { ReturnBoundaries: true ShowPrivateTable: true ReturnRangeKey: true } 2025-05-29T15:23:11.868351Z node 1 :TX_PROXY DEBUG: describe.cpp:448: Actor# [1:7509888580235541808:2422] Handle TEvDescribeSchemeResult Forward to# [1:7509888580235541806:2421] Cookie: 0 TEvDescribeSchemeResult: NKikimrScheme.TEvDescribeSchemeResult PreSerializedData size# 2 Record# Status: StatusSuccess Path: "dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/dc-1" } } } PathId: 1 PathOwnerId: 72057594046644480 TClient::Ls response: 2025-05-29T15:23:11.869498Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:23:11.869526Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:23:11.871082Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-05-29T15:23:11.871724Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:315: actor# [1:7509888580235541365:2139] Handle TEvProposeTransaction 2025-05-29T15:23:11.871733Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:327: actor# [1:7509888580235541365:2139] Cookie# 0 userReqId# "" DELAY REQUEST, wait txids from allocator Type# Scheme 2025-05-29T15:23:11.893943Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:213: actor# [1:7509888580235541365:2139] HANDLE TEvClientConnected success connect from tablet# 72057594046447617 2025-05-29T15:23:11.894644Z node 1 :TX_PROXY DEBUG: client.cpp:89: Handle TEvAllocateResult ACCEPTED RangeBegin# 281474976715656 RangeEnd# 281474976720656 txAllocator# 72057594046447617 2025-05-29T15:23:11.894656Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:238: actor# [1:7509888580235541365:2139] TxId# 281474976715657 ProcessProposeTransaction 2025-05-29T15:23:11.894695Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:257: actor# [1:7509888580235541365:2139] Cookie# 0 userReqId# "" txid# 281474976715657 SEND to# [1:7509888580235541833:2439] 2025-05-29T15:23:11.909659Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1562: Actor# [1:7509888580235541833:2439] txid# 281474976715657 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "dc-1" StoragePools { Name: "" Kind: "tenant-db" } StoragePools { Name: "/dc-1:test" Kind: "test" } } } } UserToken: "\n\014root@builtin\022\030\022\026\n\024all-users@well-known\032\014root@builtin\"\007Builtin*\017**** (B6C6F477)" PeerName: "" 2025-05-29T15:23:11.910320Z node 1 :TX_PROXY DEBUG: schemereq.cpp:569: Actor# [1:7509888580235541833:2439] txid# 281474976715657 Bootstrap, UserSID: root@builtin CheckAdministrator: 0 CheckDatabaseAdministrator: 0 2025-05-29T15:23:11.910333Z node 1 :TX_PROXY DEBUG: schemereq.cpp:578: Actor# [1:7509888580235541833:2439] txid# 281474976715657 Bootstrap, UserSID: root@builtin IsClusterAdministrator: 1 2025-05-29T15:23:11.910347Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1617: Actor# [1:7509888580235541833:2439] txid# 281474976715657 TEvNavigateKeySet requested from SchemeCache 2025-05-29T15:23:11.910455Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1450: Actor# [1:7509888580235541833:2439] txid# 281474976715657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-05-29T15:23:11.910522Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1497: Actor# [1:7509888580235541833:2439] HANDLE EvNavigateKeySetResult, txid# 281474976715657 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# false 2025-05-29T15:23:11.910542Z node 1 :TX_PROXY DEBUG: schemereq.cpp:103: Actor# [1:7509888580235541833:2439] txid# 281474976715657 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976715657 TabletId# 72057594046644480} 2025-05-29T15:23:11.910598Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1352: Actor# [1:7509888580235541833:2439] txid# 281474976715657 HANDLE EvClientConnected 2025-05-29T15:23:11.910845Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-05-29T15:23:11.915324Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1374: Actor# [1:7509888580235541833:2439] txid# 281474976715657 Status StatusAccepted HANDLE {TEvModifySchemeTransactionResult Status# StatusAccepted txid# 281474976715657} 2025-05-29T15:23:11.915349Z node 1 :TX_PROXY DEBUG: schemereq.cpp:549: Actor# [1:7509888580235541833:2439] txid# 281474976715657 SEND to# [1:7509888580235541825:2431] Source {TEvProposeTransactionStatus txid# 281474976715657 Status# 53} waiting... 2025-05-29T15:23:11.919310Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:315: actor# [1:7509888580235541365:2139] Handle TEvProposeTransaction 2025-05-29T15:23:11.919321Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:238: actor# [1:7509888580235541365:2139] TxId# 281474976715658 ProcessProposeTransaction 2025-05-29T15:23:11.919331Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:257: actor# [1:7509888580235541365:2139] Cookie# 0 userReqId# "" txid# 281474976715658 SEND to# [1:7509888580235541873:2475] 2025-05-29T15:23:11.919995Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1562: Actor# [1:7509888580235541873:2475] txid# 281474976715658 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/" OperationType: ESchemeOpModifyACL ModifyACL { Name: "dc-1" DiffACL: "\n\032\010\000\022\026\010\001\020\377\377\003\032\014root@builtin \003" } } } UserToken: "\n\014root@builtin\022\030\022\026\n\024all-users@well-known\032\014root@builtin\"\007Builtin*\017**** (B6C6F477)" PeerName: "" 2025-05-29T15:23:11.920018Z node 1 :TX_PROXY DEBUG: schemereq.cpp:569: Actor# [1:7509888580235541873:2475] txid# 281474976715658 Bootstrap, UserSID: root@builtin CheckAdministrator: 0 CheckDatabaseAdministrator: 0 2025-05-29T15:23:11.920022Z node 1 :TX_PROXY DEBUG: schemereq.cpp:578: Actor# [1:7509888580235541873:2475] txid# 281474976715658 Bootstrap, UserSID: root@builtin IsClusterAdministrator: 1 2025-05-29T15:23:11.920036Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1617: Actor# [1:7509888580235541873:2475] txid# 281474976715658 TEvNavigateKeySet requested from SchemeCache 2025-05-29T15:23:11.920118Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1450: Actor# [1:7509888580235541873:2475] txid# 281474976715658 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-05-29T15:23:11.920135Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1497: Actor# [1:7509888580235541873:2475] HANDLE EvNavigateKeySetResult, txid# 281474976715658 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2025-05-29T15:23:11.920144Z node 1 :TX_PROXY DEBUG: schemereq.cpp:103: Actor# [1:7509888580235541873:2475] txid# 281474976715658 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976715658 TabletId# 72057594046644480} 2025-05-29T15:23:11.920168Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1352: Actor# [1:7509888580235541873:2475] txid# 281474976715658 HANDLE EvClientConnected 2025-05-29T15:23:11.920269Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-05-29T15:23:11.920700Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1374: Actor# [1:7509888580235541873:2475] txid# 281474976715658 Status StatusSuccess HANDLE {TEvModifySchemeTransactionRes ... e, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715660:3, at schemeshard: 72057594046644480 2025-05-29T15:23:40.291489Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1374: Actor# [59:7509888705174231969:2504] txid# 281474976715660 Status StatusAccepted HANDLE {TEvModifySchemeTransactionResult Status# StatusAccepted txid# 281474976715660} 2025-05-29T15:23:40.291509Z node 59 :TX_PROXY DEBUG: schemereq.cpp:549: Actor# [59:7509888705174231969:2504] txid# 281474976715660 SEND to# [59:7509888705174231968:2338] Source {TEvProposeTransactionStatus txid# 281474976715660 Status# 53} 2025-05-29T15:23:40.300563Z node 59 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [59:7509888705174231968:2338], DatabaseId: /dc-1, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715660 completed, doublechecking } 2025-05-29T15:23:40.372837Z node 59 :TX_PROXY DEBUG: proxy_impl.cpp:315: actor# [59:7509888700879263837:2113] Handle TEvProposeTransaction 2025-05-29T15:23:40.372856Z node 59 :TX_PROXY DEBUG: proxy_impl.cpp:238: actor# [59:7509888700879263837:2113] TxId# 281474976715661 ProcessProposeTransaction 2025-05-29T15:23:40.372876Z node 59 :TX_PROXY DEBUG: proxy_impl.cpp:257: actor# [59:7509888700879263837:2113] Cookie# 0 userReqId# "" txid# 281474976715661 SEND to# [59:7509888705174232043:2558] 2025-05-29T15:23:40.373702Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1562: Actor# [59:7509888705174232043:2558] txid# 281474976715661 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/dc-1/.metadata/workload_manager/pools" OperationType: ESchemeOpCreateResourcePool ModifyACL { Name: "default" DiffACL: "\n\032\010\000\022\026\010\001\020\377\377\003\032\014root@builtin \003\n#\010\000\022\037\010\001\020\377\377\003\032\025cluster_admin@builtin \003\n!\010\000\022\035\010\001\020\201\004\032\024all-users@well-known \003\n\031\010\000\022\025\010\001\020\201\004\032\014root@builtin \003" NewOwner: "metadata@system" } Internal: true CreateResourcePool { Name: "default" Properties { Properties { key: "concurrent_query_limit" value: "-1" } Properties { key: "database_load_cpu_threshold" value: "-1" } Properties { key: "query_cancel_after_seconds" value: "0" } Properties { key: "query_cpu_limit_percent_per_node" value: "-1" } Properties { key: "query_memory_limit_percent_per_node" value: "-1" } Properties { key: "queue_size" value: "-1" } Properties { key: "resource_weight" value: "-1" } Properties { key: "total_cpu_limit_percent_per_node" value: "-1" } } } } } UserToken: "\n\017metadata@system\022\000" DatabaseName: "/dc-1" 2025-05-29T15:23:40.373715Z node 59 :TX_PROXY DEBUG: schemereq.cpp:569: Actor# [59:7509888705174232043:2558] txid# 281474976715661 Bootstrap, UserSID: metadata@system CheckAdministrator: 0 CheckDatabaseAdministrator: 0 2025-05-29T15:23:40.373720Z node 59 :TX_PROXY DEBUG: schemereq.cpp:578: Actor# [59:7509888705174232043:2558] txid# 281474976715661 Bootstrap, UserSID: metadata@system IsClusterAdministrator: 0 2025-05-29T15:23:40.373899Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1627: Actor# [59:7509888705174232043:2558] txid# 281474976715661 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P6 2025-05-29T15:23:40.373910Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1617: Actor# [59:7509888705174232043:2558] txid# 281474976715661 TEvNavigateKeySet requested from SchemeCache 2025-05-29T15:23:40.373932Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1450: Actor# [59:7509888705174232043:2558] txid# 281474976715661 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-05-29T15:23:40.373962Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1497: Actor# [59:7509888705174232043:2558] HANDLE EvNavigateKeySetResult, txid# 281474976715661 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2025-05-29T15:23:40.373974Z node 59 :TX_PROXY DEBUG: schemereq.cpp:103: Actor# [59:7509888705174232043:2558] txid# 281474976715661 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976715661 TabletId# 72057594046644480} 2025-05-29T15:23:40.374018Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1352: Actor# [59:7509888705174232043:2558] txid# 281474976715661 HANDLE EvClientConnected 2025-05-29T15:23:40.378824Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1374: Actor# [59:7509888705174232043:2558] txid# 281474976715661 Status StatusAlreadyExists HANDLE {TEvModifySchemeTransactionResult Status# StatusAlreadyExists txid# 281474976715661 Reason# Check failed: path: '/dc-1/.metadata/workload_manager/pools/default', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92} 2025-05-29T15:23:40.378869Z node 59 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [59:7509888705174232043:2558] txid# 281474976715661, issues: { message: "Check failed: path: \'/dc-1/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-29T15:23:40.378874Z node 59 :TX_PROXY DEBUG: schemereq.cpp:549: Actor# [59:7509888705174232043:2558] txid# 281474976715661 SEND to# [59:7509888705174231968:2338] Source {TEvProposeTransactionStatus txid# 281474976715661 Status# 48} 2025-05-29T15:23:40.382530Z node 59 :TX_PROXY DEBUG: proxy_impl.cpp:315: actor# [59:7509888700879263837:2113] Handle TEvProposeTransaction 2025-05-29T15:23:40.382545Z node 59 :TX_PROXY DEBUG: proxy_impl.cpp:238: actor# [59:7509888700879263837:2113] TxId# 281474976715662 ProcessProposeTransaction 2025-05-29T15:23:40.382555Z node 59 :TX_PROXY DEBUG: proxy_impl.cpp:257: actor# [59:7509888700879263837:2113] Cookie# 0 userReqId# "" txid# 281474976715662 SEND to# [59:7509888705174232059:2568] 2025-05-29T15:23:40.383355Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1562: Actor# [59:7509888705174232059:2568] txid# 281474976715662 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/dc-1" OperationType: ESchemeOpAlterLogin AlterLogin { CreateUser { User: "targetuser" Password: "passwd" CanLogin: true IsHashedPassword: false } } } } UserToken: "\n\014root@builtin\022\030\022\026\n\024all-users@well-known\032\014root@builtin\"\007Builtin*\017**** (B6C6F477)" DatabaseName: "/dc-1" RequestType: "" PeerName: "ipv6:[::1]:57190" 2025-05-29T15:23:40.383372Z node 59 :TX_PROXY DEBUG: schemereq.cpp:569: Actor# [59:7509888705174232059:2568] txid# 281474976715662 Bootstrap, UserSID: root@builtin CheckAdministrator: 0 CheckDatabaseAdministrator: 0 2025-05-29T15:23:40.383378Z node 59 :TX_PROXY DEBUG: schemereq.cpp:578: Actor# [59:7509888705174232059:2568] txid# 281474976715662 Bootstrap, UserSID: root@builtin IsClusterAdministrator: 1 2025-05-29T15:23:40.383392Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1617: Actor# [59:7509888705174232059:2568] txid# 281474976715662 TEvNavigateKeySet requested from SchemeCache 2025-05-29T15:23:40.383497Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1450: Actor# [59:7509888705174232059:2568] txid# 281474976715662 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-05-29T15:23:40.383521Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1497: Actor# [59:7509888705174232059:2568] HANDLE EvNavigateKeySetResult, txid# 281474976715662 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2025-05-29T15:23:40.383533Z node 59 :TX_PROXY DEBUG: schemereq.cpp:103: Actor# [59:7509888705174232059:2568] txid# 281474976715662 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976715662 TabletId# 72057594046644480} 2025-05-29T15:23:40.383585Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1352: Actor# [59:7509888705174232059:2568] txid# 281474976715662 HANDLE EvClientConnected 2025-05-29T15:23:40.392424Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1374: Actor# [59:7509888705174232059:2568] txid# 281474976715662 Status StatusSuccess HANDLE {TEvModifySchemeTransactionResult Status# StatusSuccess txid# 281474976715662} 2025-05-29T15:23:40.392444Z node 59 :TX_PROXY DEBUG: schemereq.cpp:549: Actor# [59:7509888705174232059:2568] txid# 281474976715662 SEND to# [59:7509888705174232057:2330] Source {TEvProposeTransactionStatus txid# 281474976715662 Status# 48} 2025-05-29T15:23:40.400232Z node 59 :TX_PROXY DEBUG: proxy_impl.cpp:315: actor# [59:7509888700879263837:2113] Handle TEvProposeTransaction 2025-05-29T15:23:40.400247Z node 59 :TX_PROXY DEBUG: proxy_impl.cpp:238: actor# [59:7509888700879263837:2113] TxId# 281474976715663 ProcessProposeTransaction 2025-05-29T15:23:40.400263Z node 59 :TX_PROXY DEBUG: proxy_impl.cpp:257: actor# [59:7509888700879263837:2113] Cookie# 0 userReqId# "" txid# 281474976715663 SEND to# [59:7509888705174232098:2583] 2025-05-29T15:23:40.400981Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1562: Actor# [59:7509888705174232098:2583] txid# 281474976715663 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/dc-1" OperationType: ESchemeOpAlterLogin AlterLogin { RemoveUser { User: "targetuser" MissingOk: false } } } } UserToken: "\n\025cluster_admin@builtin\022\030\022\026\n\024all-users@well-known\032\025cluster_admin@builtin\"\007Builtin*\027clus****ltin (2AB0E265)" DatabaseName: "/dc-1" RequestType: "" PeerName: "ipv6:[::1]:57190" 2025-05-29T15:23:40.400995Z node 59 :TX_PROXY DEBUG: schemereq.cpp:569: Actor# [59:7509888705174232098:2583] txid# 281474976715663 Bootstrap, UserSID: cluster_admin@builtin CheckAdministrator: 0 CheckDatabaseAdministrator: 0 2025-05-29T15:23:40.401001Z node 59 :TX_PROXY DEBUG: schemereq.cpp:578: Actor# [59:7509888705174232098:2583] txid# 281474976715663 Bootstrap, UserSID: cluster_admin@builtin IsClusterAdministrator: 1 2025-05-29T15:23:40.401014Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1617: Actor# [59:7509888705174232098:2583] txid# 281474976715663 TEvNavigateKeySet requested from SchemeCache 2025-05-29T15:23:40.401112Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1450: Actor# [59:7509888705174232098:2583] txid# 281474976715663 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-05-29T15:23:40.401122Z node 59 :TX_PROXY ERROR: schemereq.cpp:1072: Actor# [59:7509888705174232098:2583] txid# 281474976715663, Access denied for cluster_admin@builtin on path /dc-1, with access AlterSchema 2025-05-29T15:23:40.401146Z node 59 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [59:7509888705174232098:2583] txid# 281474976715663, issues: { message: "Access denied for cluster_admin@builtin on path /dc-1" issue_code: 200000 severity: 1 } 2025-05-29T15:23:40.401150Z node 59 :TX_PROXY DEBUG: schemereq.cpp:549: Actor# [59:7509888705174232098:2583] txid# 281474976715663 SEND to# [59:7509888705174232097:2347] Source {TEvProposeTransactionStatus Status# 5} 2025-05-29T15:23:40.401353Z node 59 :KQP_SESSION WARN: kqp_session_actor.cpp:2583: SessionId: ydb://session/3?node_id=59&id=ODVjMzQ1OWEtODlhNDhhYTktOGZhYmQxMWMtZGVhZjI3MDc=, ActorId: [59:7509888705174232083:2347], ActorState: ExecuteState, TraceId: 01jwea8mfdckf8h17yage2w2tg, Create QueryResponse for error on request, msg: 2025-05-29T15:23:40.401549Z node 59 :TX_PROXY DEBUG: proxy_impl.cpp:353: actor# [59:7509888700879263837:2113] Handle TEvExecuteKqpTransaction 2025-05-29T15:23:40.401555Z node 59 :TX_PROXY DEBUG: proxy_impl.cpp:342: actor# [59:7509888700879263837:2113] TxId# 281474976715664 ProcessProposeKqpTransaction >> TxUsage::WriteToTopic_Demo_36_Table [FAIL] >> TxUsage::WriteToTopic_Demo_36_Query >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-ModifyUser-45 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-ModifyUser-46 >> TxUsage::Sinks_Oltp_WriteToTopics_2_Query [FAIL] >> TxUsage::Sinks_Oltp_WriteToTopics_3_Table ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/tx_proxy/ut_schemereq/unittest >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-DropUser-60 [GOOD] Test command err: Starting YDB, grpc: 25155, msgbus: 25448 2025-05-29T15:23:11.825194Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509888579044509376:2209];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:23:11.825306Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/001b50/r3tmp/tmp5iV2cv/pdisk_1.dat 2025-05-29T15:23:11.883524Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 25155, node 1 2025-05-29T15:23:11.899055Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:23:11.899066Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-05-29T15:23:11.899068Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-05-29T15:23:11.899113Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:25448 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 2025-05-29T15:23:11.915361Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:403: actor# [1:7509888579044509458:2139] Handle TEvNavigate describe path dc-1 2025-05-29T15:23:11.917368Z node 1 :TX_PROXY DEBUG: describe.cpp:272: Actor# [1:7509888579044509904:2427] HANDLE EvNavigateScheme dc-1 2025-05-29T15:23:11.917656Z node 1 :TX_PROXY DEBUG: describe.cpp:356: Actor# [1:7509888579044509904:2427] HANDLE EvNavigateKeySetResult TDescribeReq marker# P5 ErrorCount# 0 2025-05-29T15:23:11.924278Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:23:11.924313Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:23:11.925724Z node 1 :TX_PROXY DEBUG: describe.cpp:435: Actor# [1:7509888579044509904:2427] SEND to# 72057594046644480 shardToRequest NKikimrSchemeOp.TDescribePath Path: "dc-1" Options { ReturnBoundaries: true ShowPrivateTable: true ReturnRangeKey: true } 2025-05-29T15:23:11.925799Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-29T15:23:11.927685Z node 1 :TX_PROXY DEBUG: describe.cpp:448: Actor# [1:7509888579044509904:2427] Handle TEvDescribeSchemeResult Forward to# [1:7509888579044509903:2426] Cookie: 0 TEvDescribeSchemeResult: NKikimrScheme.TEvDescribeSchemeResult PreSerializedData size# 2 Record# Status: StatusSuccess Path: "dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/dc-1" } } } PathId: 1 PathOwnerId: 72057594046644480 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-05-29T15:23:11.931282Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:315: actor# [1:7509888579044509458:2139] Handle TEvProposeTransaction 2025-05-29T15:23:11.931294Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:238: actor# [1:7509888579044509458:2139] TxId# 281474976710657 ProcessProposeTransaction 2025-05-29T15:23:11.931333Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:257: actor# [1:7509888579044509458:2139] Cookie# 0 userReqId# "" txid# 281474976710657 SEND to# [1:7509888579044509923:2438] 2025-05-29T15:23:11.941934Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1562: Actor# [1:7509888579044509923:2438] txid# 281474976710657 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "dc-1" StoragePools { Name: "" Kind: "tenant-db" } StoragePools { Name: "/dc-1:test" Kind: "test" } } } } UserToken: "\n\014root@builtin\022\030\022\026\n\024all-users@well-known\032\014root@builtin\"\007Builtin*\017**** (B6C6F477)" PeerName: "" 2025-05-29T15:23:11.941975Z node 1 :TX_PROXY DEBUG: schemereq.cpp:569: Actor# [1:7509888579044509923:2438] txid# 281474976710657 Bootstrap, UserSID: root@builtin CheckAdministrator: 0 CheckDatabaseAdministrator: 0 2025-05-29T15:23:11.941979Z node 1 :TX_PROXY DEBUG: schemereq.cpp:578: Actor# [1:7509888579044509923:2438] txid# 281474976710657 Bootstrap, UserSID: root@builtin IsClusterAdministrator: 1 2025-05-29T15:23:11.941990Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1617: Actor# [1:7509888579044509923:2438] txid# 281474976710657 TEvNavigateKeySet requested from SchemeCache 2025-05-29T15:23:11.942093Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1450: Actor# [1:7509888579044509923:2438] txid# 281474976710657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-05-29T15:23:11.942126Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1497: Actor# [1:7509888579044509923:2438] HANDLE EvNavigateKeySetResult, txid# 281474976710657 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# false 2025-05-29T15:23:11.942135Z node 1 :TX_PROXY DEBUG: schemereq.cpp:103: Actor# [1:7509888579044509923:2438] txid# 281474976710657 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976710657 TabletId# 72057594046644480} 2025-05-29T15:23:11.942192Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1352: Actor# [1:7509888579044509923:2438] txid# 281474976710657 HANDLE EvClientConnected 2025-05-29T15:23:11.942360Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 2025-05-29T15:23:11.943061Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1374: Actor# [1:7509888579044509923:2438] txid# 281474976710657 Status StatusAccepted HANDLE {TEvModifySchemeTransactionResult Status# StatusAccepted txid# 281474976710657} 2025-05-29T15:23:11.943072Z node 1 :TX_PROXY DEBUG: schemereq.cpp:549: Actor# [1:7509888579044509923:2438] txid# 281474976710657 SEND to# [1:7509888579044509922:2437] Source {TEvProposeTransactionStatus txid# 281474976710657 Status# 53} waiting... 2025-05-29T15:23:11.946338Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:315: actor# [1:7509888579044509458:2139] Handle TEvProposeTransaction 2025-05-29T15:23:11.946352Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:238: actor# [1:7509888579044509458:2139] TxId# 281474976710658 ProcessProposeTransaction 2025-05-29T15:23:11.946364Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:257: actor# [1:7509888579044509458:2139] Cookie# 0 userReqId# "" txid# 281474976710658 SEND to# [1:7509888579044509961:2472] 2025-05-29T15:23:11.947065Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1562: Actor# [1:7509888579044509961:2472] txid# 281474976710658 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/" OperationType: ESchemeOpModifyACL ModifyACL { Name: "dc-1" DiffACL: "\n\032\010\000\022\026\010\001\020\377\377\003\032\014root@builtin \003" } } } UserToken: "\n\014root@builtin\022\030\022\026\n\024all-users@well-known\032\014root@builtin\"\007Builtin*\017**** (B6C6F477)" PeerName: "" 2025-05-29T15:23:11.947086Z node 1 :TX_PROXY DEBUG: schemereq.cpp:569: Actor# [1:7509888579044509961:2472] txid# 281474976710658 Bootstrap, UserSID: root@builtin CheckAdministrator: 0 CheckDatabaseAdministrator: 0 2025-05-29T15:23:11.947089Z node 1 :TX_PROXY DEBUG: schemereq.cpp:578: Actor# [1:7509888579044509961:2472] txid# 281474976710658 Bootstrap, UserSID: root@builtin IsClusterAdministrator: 1 2025-05-29T15:23:11.947103Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1617: Actor# [1:7509888579044509961:2472] txid# 281474976710658 TEvNavigateKeySet requested from SchemeCache 2025-05-29T15:23:11.947185Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1450: Actor# [1:7509888579044509961:2472] txid# 281474976710658 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-05-29T15:23:11.947208Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1497: Actor# [1:7509888579044509961:2472] HANDLE EvNavigateKeySetResult, txid# 281474976710658 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2025-05-29T15:23:11.947221Z node 1 :TX_PROXY DEBUG: schemereq.cpp:103: Actor# [1:7509888579044509961:2472] txid# 281474976710658 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976710658 TabletId# 72057594046644480} 2025-05-29T15:23:11.947252Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1352: Actor# [1:7509888579044509961:2472] txid# 281474976710658 HANDLE EvClientConnected 2025-05-29T15:23:11.947336Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-05-29T15:23:11.947781Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1374: Actor# [1:7509888579044509961:2472] txid# 281474976710658 Status StatusSuccess HANDLE {TEvModifySchemeTransactionResult Status# StatusSuccess txid# 281474976710658} 2025-05-29T15:23:11.947792Z node 1 :TX_PROXY DEBUG: schemereq.cpp:549: Actor# [1:7509888579044509961:2472] txid# 281474976710658 SEND to# [1:7509888579044509960:2471] Source {TEvProposeTransactionStatus txid# 281474976710658 Status# 48} 2025-05-29T15:23:11.951698Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:315: actor# [1:7509888579044509458:2139] Handle TEvProposeTransaction 2025-05-29T15:23:11.951713Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:238: actor# [1:7509888579044509458:2139] TxId# 281474976710659 ... 644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2025-05-29T15:23:40.659965Z node 59 :TX_PROXY DEBUG: schemereq.cpp:103: Actor# [59:7509888703551128749:2549] txid# 281474976715661 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976715661 TabletId# 72057594046644480} 2025-05-29T15:23:40.660007Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1352: Actor# [59:7509888703551128749:2549] txid# 281474976715661 HANDLE EvClientConnected 2025-05-29T15:23:40.663149Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1374: Actor# [59:7509888703551128749:2549] txid# 281474976715661 Status StatusAlreadyExists HANDLE {TEvModifySchemeTransactionResult Status# StatusAlreadyExists txid# 281474976715661 Reason# Check failed: path: '/dc-1/.metadata/workload_manager/pools/default', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92} 2025-05-29T15:23:40.663201Z node 59 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [59:7509888703551128749:2549] txid# 281474976715661, issues: { message: "Check failed: path: \'/dc-1/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-29T15:23:40.663206Z node 59 :TX_PROXY DEBUG: schemereq.cpp:549: Actor# [59:7509888703551128749:2549] txid# 281474976715661 SEND to# [59:7509888703551128678:2338] Source {TEvProposeTransactionStatus txid# 281474976715661 Status# 48} 2025-05-29T15:23:40.666906Z node 59 :TX_PROXY DEBUG: proxy_impl.cpp:315: actor# [59:7509888703551127994:2113] Handle TEvProposeTransaction 2025-05-29T15:23:40.666912Z node 59 :TX_PROXY DEBUG: proxy_impl.cpp:238: actor# [59:7509888703551127994:2113] TxId# 281474976715662 ProcessProposeTransaction 2025-05-29T15:23:40.666920Z node 59 :TX_PROXY DEBUG: proxy_impl.cpp:257: actor# [59:7509888703551127994:2113] Cookie# 0 userReqId# "" txid# 281474976715662 SEND to# [59:7509888703551128773:2561] 2025-05-29T15:23:40.667645Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1562: Actor# [59:7509888703551128773:2561] txid# 281474976715662 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/dc-1" OperationType: ESchemeOpAlterLogin AlterLogin { CreateUser { User: "targetuser" Password: "passwd" CanLogin: true IsHashedPassword: false } } } } UserToken: "\n\014root@builtin\022\030\022\026\n\024all-users@well-known\032\014root@builtin\"\007Builtin*\017**** (B6C6F477)" DatabaseName: "/dc-1" RequestType: "" PeerName: "ipv6:[::1]:40390" 2025-05-29T15:23:40.667655Z node 59 :TX_PROXY DEBUG: schemereq.cpp:569: Actor# [59:7509888703551128773:2561] txid# 281474976715662 Bootstrap, UserSID: root@builtin CheckAdministrator: 1 CheckDatabaseAdministrator: 1 2025-05-29T15:23:40.667659Z node 59 :TX_PROXY DEBUG: schemereq.cpp:578: Actor# [59:7509888703551128773:2561] txid# 281474976715662 Bootstrap, UserSID: root@builtin IsClusterAdministrator: 1 2025-05-29T15:23:40.667667Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1617: Actor# [59:7509888703551128773:2561] txid# 281474976715662 TEvNavigateKeySet requested from SchemeCache 2025-05-29T15:23:40.667741Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1450: Actor# [59:7509888703551128773:2561] txid# 281474976715662 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-05-29T15:23:40.667757Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1497: Actor# [59:7509888703551128773:2561] HANDLE EvNavigateKeySetResult, txid# 281474976715662 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2025-05-29T15:23:40.667767Z node 59 :TX_PROXY DEBUG: schemereq.cpp:103: Actor# [59:7509888703551128773:2561] txid# 281474976715662 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976715662 TabletId# 72057594046644480} 2025-05-29T15:23:40.667800Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1352: Actor# [59:7509888703551128773:2561] txid# 281474976715662 HANDLE EvClientConnected 2025-05-29T15:23:40.673670Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1374: Actor# [59:7509888703551128773:2561] txid# 281474976715662 Status StatusSuccess HANDLE {TEvModifySchemeTransactionResult Status# StatusSuccess txid# 281474976715662} 2025-05-29T15:23:40.673691Z node 59 :TX_PROXY DEBUG: schemereq.cpp:549: Actor# [59:7509888703551128773:2561] txid# 281474976715662 SEND to# [59:7509888703551128772:2331] Source {TEvProposeTransactionStatus txid# 281474976715662 Status# 48} 2025-05-29T15:23:40.679819Z node 59 :TX_PROXY DEBUG: proxy_impl.cpp:315: actor# [59:7509888703551127994:2113] Handle TEvProposeTransaction 2025-05-29T15:23:40.679831Z node 59 :TX_PROXY DEBUG: proxy_impl.cpp:238: actor# [59:7509888703551127994:2113] TxId# 281474976715663 ProcessProposeTransaction 2025-05-29T15:23:40.679847Z node 59 :TX_PROXY DEBUG: proxy_impl.cpp:257: actor# [59:7509888703551127994:2113] Cookie# 0 userReqId# "" txid# 281474976715663 SEND to# [59:7509888703551128786:2570] 2025-05-29T15:23:40.680599Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1562: Actor# [59:7509888703551128786:2570] txid# 281474976715663 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "" OperationType: ESchemeOpModifyACL ModifyACL { Name: "dc-1" DiffACL: "" NewOwner: "db_admin@builtin" } } } UserToken: "\n\014root@builtin\022\030\022\026\n\024all-users@well-known\032\014root@builtin\"\007Builtin*\017**** (B6C6F477)" DatabaseName: "/dc-1" RequestType: "" PeerName: "ipv6:[::1]:40390" 2025-05-29T15:23:40.680614Z node 59 :TX_PROXY DEBUG: schemereq.cpp:569: Actor# [59:7509888703551128786:2570] txid# 281474976715663 Bootstrap, UserSID: root@builtin CheckAdministrator: 1 CheckDatabaseAdministrator: 1 2025-05-29T15:23:40.680618Z node 59 :TX_PROXY DEBUG: schemereq.cpp:578: Actor# [59:7509888703551128786:2570] txid# 281474976715663 Bootstrap, UserSID: root@builtin IsClusterAdministrator: 1 2025-05-29T15:23:40.680634Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1617: Actor# [59:7509888703551128786:2570] txid# 281474976715663 TEvNavigateKeySet requested from SchemeCache 2025-05-29T15:23:40.680743Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1450: Actor# [59:7509888703551128786:2570] txid# 281474976715663 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-05-29T15:23:40.680768Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1497: Actor# [59:7509888703551128786:2570] HANDLE EvNavigateKeySetResult, txid# 281474976715663 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2025-05-29T15:23:40.680780Z node 59 :TX_PROXY DEBUG: schemereq.cpp:103: Actor# [59:7509888703551128786:2570] txid# 281474976715663 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976715663 TabletId# 72057594046644480} 2025-05-29T15:23:40.680822Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1352: Actor# [59:7509888703551128786:2570] txid# 281474976715663 HANDLE EvClientConnected 2025-05-29T15:23:40.680951Z node 59 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-05-29T15:23:40.683338Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1374: Actor# [59:7509888703551128786:2570] txid# 281474976715663 Status StatusSuccess HANDLE {TEvModifySchemeTransactionResult Status# StatusSuccess txid# 281474976715663} 2025-05-29T15:23:40.683352Z node 59 :TX_PROXY DEBUG: schemereq.cpp:549: Actor# [59:7509888703551128786:2570] txid# 281474976715663 SEND to# [59:7509888703551128785:2343] Source {TEvProposeTransactionStatus txid# 281474976715663 Status# 48} 2025-05-29T15:23:40.698816Z node 59 :TX_PROXY DEBUG: proxy_impl.cpp:315: actor# [59:7509888703551127994:2113] Handle TEvProposeTransaction 2025-05-29T15:23:40.698839Z node 59 :TX_PROXY DEBUG: proxy_impl.cpp:238: actor# [59:7509888703551127994:2113] TxId# 281474976715664 ProcessProposeTransaction 2025-05-29T15:23:40.698859Z node 59 :TX_PROXY DEBUG: proxy_impl.cpp:257: actor# [59:7509888703551127994:2113] Cookie# 0 userReqId# "" txid# 281474976715664 SEND to# [59:7509888703551128817:2584] 2025-05-29T15:23:40.699797Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1562: Actor# [59:7509888703551128817:2584] txid# 281474976715664 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/dc-1" OperationType: ESchemeOpAlterLogin AlterLogin { RemoveUser { User: "targetuser" MissingOk: false } } } } UserToken: "\n\020db_admin@builtin\022\030\022\026\n\024all-users@well-known\032\020db_admin@builtin\"\007Builtin*\027db_a****ltin (DEFA2CD5)" DatabaseName: "/dc-1" RequestType: "" PeerName: "ipv6:[::1]:40390" 2025-05-29T15:23:40.699815Z node 59 :TX_PROXY DEBUG: schemereq.cpp:569: Actor# [59:7509888703551128817:2584] txid# 281474976715664 Bootstrap, UserSID: db_admin@builtin CheckAdministrator: 1 CheckDatabaseAdministrator: 1 2025-05-29T15:23:40.699821Z node 59 :TX_PROXY DEBUG: schemereq.cpp:578: Actor# [59:7509888703551128817:2584] txid# 281474976715664 Bootstrap, UserSID: db_admin@builtin IsClusterAdministrator: 0 2025-05-29T15:23:40.699878Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1401: Actor# [59:7509888703551128817:2584] txid# 281474976715664 HandleResolveDatabase, ResultSet size: 1 ResultSet error count: 0 2025-05-29T15:23:40.699888Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1436: Actor# [59:7509888703551128817:2584] txid# 281474976715664 HandleResolveDatabase, UserSID: db_admin@builtin CheckAdministrator: 1 CheckDatabaseAdministrator: 1 IsClusterAdministrator: 0 IsDatabaseAdministrator: 1 DatabaseOwner: db_admin@builtin 2025-05-29T15:23:40.699899Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1617: Actor# [59:7509888703551128817:2584] txid# 281474976715664 TEvNavigateKeySet requested from SchemeCache 2025-05-29T15:23:40.699969Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1450: Actor# [59:7509888703551128817:2584] txid# 281474976715664 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-05-29T15:23:40.699994Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1497: Actor# [59:7509888703551128817:2584] HANDLE EvNavigateKeySetResult, txid# 281474976715664 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2025-05-29T15:23:40.700008Z node 59 :TX_PROXY DEBUG: schemereq.cpp:103: Actor# [59:7509888703551128817:2584] txid# 281474976715664 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976715664 TabletId# 72057594046644480} 2025-05-29T15:23:40.700057Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1352: Actor# [59:7509888703551128817:2584] txid# 281474976715664 HANDLE EvClientConnected 2025-05-29T15:23:40.703367Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1374: Actor# [59:7509888703551128817:2584] txid# 281474976715664 Status StatusSuccess HANDLE {TEvModifySchemeTransactionResult Status# StatusSuccess txid# 281474976715664} 2025-05-29T15:23:40.703387Z node 59 :TX_PROXY DEBUG: schemereq.cpp:549: Actor# [59:7509888703551128817:2584] txid# 281474976715664 SEND to# [59:7509888703551128816:2348] Source {TEvProposeTransactionStatus txid# 281474976715664 Status# 48} >> TxUsage::WriteToTopic_Demo_21_RestartBeforeCommit_Query [FAIL] >> TxUsage::WriteToTopic_Demo_21_RestartAfterCommit_Table >> TFlatTest::AutoMergeBySize [GOOD] >> TFlatTest::AutoSplitMergeQueue >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-DropUser-72 [GOOD] >> TExportToS3WithRebootsTests::ForgetShouldSucceedOnSingleView [GOOD] >> TExportToS3WithRebootsTests::ForgetShouldSucceedOnViewsAndTables >> SchemeReqAdminAccessInTenant::ClusterAdminCanAuthOnEmptyTenant [GOOD] >> SchemeReqAdminAccessInTenant::ClusterAdminCanAuthOnEmptyTenant-StrictAclCheck >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-CreateUser-20 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-CreateUser-21 |64.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_user_attributes_reboots/unittest >> TxUsage::WriteToTopic_Demo_26_Query [FAIL] >> TxUsage::WriteToTopic_Demo_27_Table >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-DropUser-70 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-DropUser-71 >> TxUsage::WriteToTopic_Demo_36_Query [FAIL] >> TxUsage::WriteToTopic_Demo_37_Table >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-DropUser-69 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-DropUser-70 >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-DropUser-72 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-CreateUser-22 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-CreateUser-23 >> TxUsage::Sinks_Oltp_WriteToTopics_3_Table [FAIL] >> TxUsage::Sinks_Oltp_WriteToTopics_3_Query >> KqpRm::Reduce ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/tx_proxy/ut_schemereq/unittest >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-DropUser-72 [GOOD] Test command err: Starting YDB, grpc: 30844, msgbus: 24680 2025-05-29T15:23:11.782850Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509888579501752275:2075];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:23:11.782871Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/001b47/r3tmp/tmp0x4KZ3/pdisk_1.dat 2025-05-29T15:23:11.844804Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 30844, node 1 2025-05-29T15:23:11.855873Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:23:11.855888Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-05-29T15:23:11.855890Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-05-29T15:23:11.855947Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:24680 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 2025-05-29T15:23:11.877306Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:403: actor# [1:7509888579501752501:2139] Handle TEvNavigate describe path dc-1 2025-05-29T15:23:11.878387Z node 1 :TX_PROXY DEBUG: describe.cpp:272: Actor# [1:7509888579501752954:2433] HANDLE EvNavigateScheme dc-1 2025-05-29T15:23:11.878662Z node 1 :TX_PROXY DEBUG: describe.cpp:356: Actor# [1:7509888579501752954:2433] HANDLE EvNavigateKeySetResult TDescribeReq marker# P5 ErrorCount# 0 2025-05-29T15:23:11.883161Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:23:11.883189Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:23:11.884811Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-29T15:23:11.888040Z node 1 :TX_PROXY DEBUG: describe.cpp:435: Actor# [1:7509888579501752954:2433] SEND to# 72057594046644480 shardToRequest NKikimrSchemeOp.TDescribePath Path: "dc-1" Options { ReturnBoundaries: true ShowPrivateTable: true ReturnRangeKey: true } 2025-05-29T15:23:11.890075Z node 1 :TX_PROXY DEBUG: describe.cpp:448: Actor# [1:7509888579501752954:2433] Handle TEvDescribeSchemeResult Forward to# [1:7509888579501752953:2432] Cookie: 0 TEvDescribeSchemeResult: NKikimrScheme.TEvDescribeSchemeResult PreSerializedData size# 2 Record# Status: StatusSuccess Path: "dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/dc-1" } } } PathId: 1 PathOwnerId: 72057594046644480 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-05-29T15:23:11.892932Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:315: actor# [1:7509888579501752501:2139] Handle TEvProposeTransaction 2025-05-29T15:23:11.892941Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:327: actor# [1:7509888579501752501:2139] Cookie# 0 userReqId# "" DELAY REQUEST, wait txids from allocator Type# Scheme 2025-05-29T15:23:11.912758Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:213: actor# [1:7509888579501752501:2139] HANDLE TEvClientConnected success connect from tablet# 72057594046447617 2025-05-29T15:23:11.915098Z node 1 :TX_PROXY DEBUG: client.cpp:89: Handle TEvAllocateResult ACCEPTED RangeBegin# 281474976715656 RangeEnd# 281474976720656 txAllocator# 72057594046447617 2025-05-29T15:23:11.915118Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:238: actor# [1:7509888579501752501:2139] TxId# 281474976715657 ProcessProposeTransaction 2025-05-29T15:23:11.915163Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:257: actor# [1:7509888579501752501:2139] Cookie# 0 userReqId# "" txid# 281474976715657 SEND to# [1:7509888579501752978:2449] 2025-05-29T15:23:11.928405Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1562: Actor# [1:7509888579501752978:2449] txid# 281474976715657 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "dc-1" StoragePools { Name: "" Kind: "tenant-db" } StoragePools { Name: "/dc-1:test" Kind: "test" } } } } UserToken: "\n\014root@builtin\022\030\022\026\n\024all-users@well-known\032\014root@builtin\"\007Builtin*\017**** (B6C6F477)" PeerName: "" 2025-05-29T15:23:11.928454Z node 1 :TX_PROXY DEBUG: schemereq.cpp:569: Actor# [1:7509888579501752978:2449] txid# 281474976715657 Bootstrap, UserSID: root@builtin CheckAdministrator: 0 CheckDatabaseAdministrator: 0 2025-05-29T15:23:11.928460Z node 1 :TX_PROXY DEBUG: schemereq.cpp:578: Actor# [1:7509888579501752978:2449] txid# 281474976715657 Bootstrap, UserSID: root@builtin IsClusterAdministrator: 1 2025-05-29T15:23:11.928475Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1617: Actor# [1:7509888579501752978:2449] txid# 281474976715657 TEvNavigateKeySet requested from SchemeCache 2025-05-29T15:23:11.928611Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1450: Actor# [1:7509888579501752978:2449] txid# 281474976715657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-05-29T15:23:11.928669Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1497: Actor# [1:7509888579501752978:2449] HANDLE EvNavigateKeySetResult, txid# 281474976715657 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# false 2025-05-29T15:23:11.928684Z node 1 :TX_PROXY DEBUG: schemereq.cpp:103: Actor# [1:7509888579501752978:2449] txid# 281474976715657 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976715657 TabletId# 72057594046644480} 2025-05-29T15:23:11.928755Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1352: Actor# [1:7509888579501752978:2449] txid# 281474976715657 HANDLE EvClientConnected 2025-05-29T15:23:11.928940Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:23:11.931225Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1374: Actor# [1:7509888579501752978:2449] txid# 281474976715657 Status StatusAccepted HANDLE {TEvModifySchemeTransactionResult Status# StatusAccepted txid# 281474976715657} 2025-05-29T15:23:11.931244Z node 1 :TX_PROXY DEBUG: schemereq.cpp:549: Actor# [1:7509888579501752978:2449] txid# 281474976715657 SEND to# [1:7509888579501752973:2444] Source {TEvProposeTransactionStatus txid# 281474976715657 Status# 53} 2025-05-29T15:23:11.934978Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:315: actor# [1:7509888579501752501:2139] Handle TEvProposeTransaction 2025-05-29T15:23:11.934994Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:238: actor# [1:7509888579501752501:2139] TxId# 281474976715658 ProcessProposeTransaction 2025-05-29T15:23:11.935007Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:257: actor# [1:7509888579501752501:2139] Cookie# 0 userReqId# "" txid# 281474976715658 SEND to# [1:7509888579501753018:2485] 2025-05-29T15:23:11.935800Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1562: Actor# [1:7509888579501753018:2485] txid# 281474976715658 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/" OperationType: ESchemeOpModifyACL ModifyACL { Name: "dc-1" DiffACL: "\n\032\010\000\022\026\010\001\020\377\377\003\032\014root@builtin \003" } } } UserToken: "\n\014root@builtin\022\030\022\026\n\024all-users@well-known\032\014root@builtin\"\007Builtin*\017**** (B6C6F477)" PeerName: "" 2025-05-29T15:23:11.935831Z node 1 :TX_PROXY DEBUG: schemereq.cpp:569: Actor# [1:7509888579501753018:2485] txid# 281474976715658 Bootstrap, UserSID: root@builtin CheckAdministrator: 0 CheckDatabaseAdministrator: 0 2025-05-29T15:23:11.935835Z node 1 :TX_PROXY DEBUG: schemereq.cpp:578: Actor# [1:7509888579501753018:2485] txid# 281474976715658 Bootstrap, UserSID: root@builtin IsClusterAdministrator: 1 2025-05-29T15:23:11.935851Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1617: Actor# [1:7509888579501753018:2485] txid# 281474976715658 TEvNavigateKeySet requested from SchemeCache 2025-05-29T15:23:11.935963Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1450: Actor# [1:7509888579501753018:2485] txid# 281474976715658 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-05-29T15:23:11.935992Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1497: Actor# [1:7509888579501753018:2485] HANDLE EvNavigateKeySetResult, txid# 281474976715658 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2025-05-29T15:23:11.936008Z node 1 :TX_PROXY DEBUG: schemereq.cpp:103: Actor# [1:7509888579501753018:2485] txid# 281474976715658 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976715658 TabletId# 72057594046644480} 2025-05-29T15:23:11.936051Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1352: Actor# [1:7509888579501753018:2485] txid# 281474976715658 HANDLE EvClientConnected 2025-05-29T15:23:11.936154Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-05-29T15:23:11.936668Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1374: Actor# [1:7509888579501753018:2485] txid# 281474976715658 Status StatusSuccess HANDLE {TEvModifySchemeTransactionR ... e 59 :TX_PROXY DEBUG: proxy_impl.cpp:315: actor# [59:7509888705709467664:2109] Handle TEvProposeTransaction 2025-05-29T15:23:41.737636Z node 59 :TX_PROXY DEBUG: proxy_impl.cpp:238: actor# [59:7509888705709467664:2109] TxId# 281474976715661 ProcessProposeTransaction 2025-05-29T15:23:41.737657Z node 59 :TX_PROXY DEBUG: proxy_impl.cpp:257: actor# [59:7509888705709467664:2109] Cookie# 0 userReqId# "" txid# 281474976715661 SEND to# [59:7509888705709468597:2565] 2025-05-29T15:23:41.738596Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1562: Actor# [59:7509888705709468597:2565] txid# 281474976715661 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/dc-1/.metadata/workload_manager/pools" OperationType: ESchemeOpCreateResourcePool ModifyACL { Name: "default" DiffACL: "\n\032\010\000\022\026\010\001\020\377\377\003\032\014root@builtin \003\n!\010\000\022\035\010\001\020\201\004\032\024all-users@well-known \003\n\031\010\000\022\025\010\001\020\201\004\032\014root@builtin \003" NewOwner: "metadata@system" } Internal: true CreateResourcePool { Name: "default" Properties { Properties { key: "concurrent_query_limit" value: "-1" } Properties { key: "database_load_cpu_threshold" value: "-1" } Properties { key: "query_cancel_after_seconds" value: "0" } Properties { key: "query_cpu_limit_percent_per_node" value: "-1" } Properties { key: "query_memory_limit_percent_per_node" value: "-1" } Properties { key: "queue_size" value: "-1" } Properties { key: "resource_weight" value: "-1" } Properties { key: "total_cpu_limit_percent_per_node" value: "-1" } } } } } UserToken: "\n\017metadata@system\022\000" DatabaseName: "/dc-1" 2025-05-29T15:23:41.738625Z node 59 :TX_PROXY DEBUG: schemereq.cpp:569: Actor# [59:7509888705709468597:2565] txid# 281474976715661 Bootstrap, UserSID: metadata@system CheckAdministrator: 1 CheckDatabaseAdministrator: 1 2025-05-29T15:23:41.738630Z node 59 :TX_PROXY DEBUG: schemereq.cpp:578: Actor# [59:7509888705709468597:2565] txid# 281474976715661 Bootstrap, UserSID: metadata@system IsClusterAdministrator: 0 2025-05-29T15:23:41.738688Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1401: Actor# [59:7509888705709468597:2565] txid# 281474976715661 HandleResolveDatabase, ResultSet size: 1 ResultSet error count: 0 2025-05-29T15:23:41.738695Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1436: Actor# [59:7509888705709468597:2565] txid# 281474976715661 HandleResolveDatabase, UserSID: metadata@system CheckAdministrator: 1 CheckDatabaseAdministrator: 1 IsClusterAdministrator: 0 IsDatabaseAdministrator: 0 DatabaseOwner: root@builtin 2025-05-29T15:23:41.738868Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1627: Actor# [59:7509888705709468597:2565] txid# 281474976715661 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P6 2025-05-29T15:23:41.738882Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1617: Actor# [59:7509888705709468597:2565] txid# 281474976715661 TEvNavigateKeySet requested from SchemeCache 2025-05-29T15:23:41.738906Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1450: Actor# [59:7509888705709468597:2565] txid# 281474976715661 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-05-29T15:23:41.738941Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1497: Actor# [59:7509888705709468597:2565] HANDLE EvNavigateKeySetResult, txid# 281474976715661 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2025-05-29T15:23:41.738953Z node 59 :TX_PROXY DEBUG: schemereq.cpp:103: Actor# [59:7509888705709468597:2565] txid# 281474976715661 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976715661 TabletId# 72057594046644480} 2025-05-29T15:23:41.738998Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1352: Actor# [59:7509888705709468597:2565] txid# 281474976715661 HANDLE EvClientConnected 2025-05-29T15:23:41.743242Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1374: Actor# [59:7509888705709468597:2565] txid# 281474976715661 Status StatusAlreadyExists HANDLE {TEvModifySchemeTransactionResult Status# StatusAlreadyExists txid# 281474976715661 Reason# Check failed: path: '/dc-1/.metadata/workload_manager/pools/default', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92} 2025-05-29T15:23:41.743298Z node 59 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [59:7509888705709468597:2565] txid# 281474976715661, issues: { message: "Check failed: path: \'/dc-1/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-29T15:23:41.743303Z node 59 :TX_PROXY DEBUG: schemereq.cpp:549: Actor# [59:7509888705709468597:2565] txid# 281474976715661 SEND to# [59:7509888705709468524:2338] Source {TEvProposeTransactionStatus txid# 281474976715661 Status# 48} 2025-05-29T15:23:41.753990Z node 59 :TX_PROXY DEBUG: proxy_impl.cpp:315: actor# [59:7509888705709467664:2109] Handle TEvProposeTransaction 2025-05-29T15:23:41.754005Z node 59 :TX_PROXY DEBUG: proxy_impl.cpp:238: actor# [59:7509888705709467664:2109] TxId# 281474976715662 ProcessProposeTransaction 2025-05-29T15:23:41.754015Z node 59 :TX_PROXY DEBUG: proxy_impl.cpp:257: actor# [59:7509888705709467664:2109] Cookie# 0 userReqId# "" txid# 281474976715662 SEND to# [59:7509888705709468614:2576] 2025-05-29T15:23:41.754906Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1562: Actor# [59:7509888705709468614:2576] txid# 281474976715662 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/dc-1" OperationType: ESchemeOpAlterLogin AlterLogin { CreateUser { User: "targetuser" Password: "passwd" CanLogin: true IsHashedPassword: false } } } } UserToken: "\n\014root@builtin\022\030\022\026\n\024all-users@well-known\032\014root@builtin\"\007Builtin*\017**** (B6C6F477)" DatabaseName: "/dc-1" RequestType: "" PeerName: "ipv6:[::1]:35802" 2025-05-29T15:23:41.754920Z node 59 :TX_PROXY DEBUG: schemereq.cpp:569: Actor# [59:7509888705709468614:2576] txid# 281474976715662 Bootstrap, UserSID: root@builtin CheckAdministrator: 1 CheckDatabaseAdministrator: 1 2025-05-29T15:23:41.754925Z node 59 :TX_PROXY DEBUG: schemereq.cpp:578: Actor# [59:7509888705709468614:2576] txid# 281474976715662 Bootstrap, UserSID: root@builtin IsClusterAdministrator: 1 2025-05-29T15:23:41.754939Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1617: Actor# [59:7509888705709468614:2576] txid# 281474976715662 TEvNavigateKeySet requested from SchemeCache 2025-05-29T15:23:41.755053Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1450: Actor# [59:7509888705709468614:2576] txid# 281474976715662 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-05-29T15:23:41.755074Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1497: Actor# [59:7509888705709468614:2576] HANDLE EvNavigateKeySetResult, txid# 281474976715662 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2025-05-29T15:23:41.755086Z node 59 :TX_PROXY DEBUG: schemereq.cpp:103: Actor# [59:7509888705709468614:2576] txid# 281474976715662 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976715662 TabletId# 72057594046644480} 2025-05-29T15:23:41.755279Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1352: Actor# [59:7509888705709468614:2576] txid# 281474976715662 HANDLE EvClientConnected 2025-05-29T15:23:41.758329Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1374: Actor# [59:7509888705709468614:2576] txid# 281474976715662 Status StatusSuccess HANDLE {TEvModifySchemeTransactionResult Status# StatusSuccess txid# 281474976715662} 2025-05-29T15:23:41.758343Z node 59 :TX_PROXY DEBUG: schemereq.cpp:549: Actor# [59:7509888705709468614:2576] txid# 281474976715662 SEND to# [59:7509888705709468612:2329] Source {TEvProposeTransactionStatus txid# 281474976715662 Status# 48} 2025-05-29T15:23:41.782806Z node 59 :TX_PROXY DEBUG: proxy_impl.cpp:315: actor# [59:7509888705709467664:2109] Handle TEvProposeTransaction 2025-05-29T15:23:41.782823Z node 59 :TX_PROXY DEBUG: proxy_impl.cpp:238: actor# [59:7509888705709467664:2109] TxId# 281474976715663 ProcessProposeTransaction 2025-05-29T15:23:41.782843Z node 59 :TX_PROXY DEBUG: proxy_impl.cpp:257: actor# [59:7509888705709467664:2109] Cookie# 0 userReqId# "" txid# 281474976715663 SEND to# [59:7509888705709468653:2591] 2025-05-29T15:23:41.783702Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1562: Actor# [59:7509888705709468653:2591] txid# 281474976715663 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/dc-1" OperationType: ESchemeOpAlterLogin AlterLogin { RemoveUser { User: "targetuser" MissingOk: false } } } } UserToken: "\n\024ordinaryuser@builtin\022\030\022\026\n\024all-users@well-known\032\024ordinaryuser@builtin\"\007Builtin*\027ordi****ltin (32520BBF)" DatabaseName: "/dc-1" RequestType: "" PeerName: "ipv6:[::1]:35802" 2025-05-29T15:23:41.783716Z node 59 :TX_PROXY DEBUG: schemereq.cpp:569: Actor# [59:7509888705709468653:2591] txid# 281474976715663 Bootstrap, UserSID: ordinaryuser@builtin CheckAdministrator: 1 CheckDatabaseAdministrator: 1 2025-05-29T15:23:41.783722Z node 59 :TX_PROXY DEBUG: schemereq.cpp:578: Actor# [59:7509888705709468653:2591] txid# 281474976715663 Bootstrap, UserSID: ordinaryuser@builtin IsClusterAdministrator: 0 2025-05-29T15:23:41.783772Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1401: Actor# [59:7509888705709468653:2591] txid# 281474976715663 HandleResolveDatabase, ResultSet size: 1 ResultSet error count: 0 2025-05-29T15:23:41.783780Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1436: Actor# [59:7509888705709468653:2591] txid# 281474976715663 HandleResolveDatabase, UserSID: ordinaryuser@builtin CheckAdministrator: 1 CheckDatabaseAdministrator: 1 IsClusterAdministrator: 0 IsDatabaseAdministrator: 0 DatabaseOwner: root@builtin 2025-05-29T15:23:41.783790Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1617: Actor# [59:7509888705709468653:2591] txid# 281474976715663 TEvNavigateKeySet requested from SchemeCache 2025-05-29T15:23:41.783879Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1450: Actor# [59:7509888705709468653:2591] txid# 281474976715663 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-05-29T15:23:41.783885Z node 59 :TX_PROXY ERROR: schemereq.cpp:1079: Actor# [59:7509888705709468653:2591] txid# 281474976715663, Access denied for ordinaryuser@builtin, attempt to manage user 2025-05-29T15:23:41.783903Z node 59 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [59:7509888705709468653:2591] txid# 281474976715663, issues: { message: "Access denied for ordinaryuser@builtin" issue_code: 200000 severity: 1 } 2025-05-29T15:23:41.783908Z node 59 :TX_PROXY DEBUG: schemereq.cpp:549: Actor# [59:7509888705709468653:2591] txid# 281474976715663 SEND to# [59:7509888705709468652:2347] Source {TEvProposeTransactionStatus Status# 5} 2025-05-29T15:23:41.786500Z node 59 :KQP_SESSION WARN: kqp_session_actor.cpp:2583: SessionId: ydb://session/3?node_id=59&id=YTMwMjI3NzctNzFiZWQwZGUtYTVjMTI2OWEtMTNjNTM2MjM=, ActorId: [59:7509888705709468638:2347], ActorState: ExecuteState, TraceId: 01jwea8ntdbp0ym04gz19g36es, Create QueryResponse for error on request, msg: 2025-05-29T15:23:41.786636Z node 59 :TX_PROXY DEBUG: proxy_impl.cpp:353: actor# [59:7509888705709467664:2109] Handle TEvExecuteKqpTransaction 2025-05-29T15:23:41.786653Z node 59 :TX_PROXY DEBUG: proxy_impl.cpp:342: actor# [59:7509888705709467664:2109] TxId# 281474976715664 ProcessProposeKqpTransaction >> TxUsage::WriteToTopic_Demo_21_RestartAfterCommit_Table [FAIL] >> TxUsage::WriteToTopic_Demo_21_RestartAfterCommit_Query >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-ModifyUser-46 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-ModifyUser-47 |64.0%| [TA] $(B)/ydb/core/tx/schemeshard/ut_subdomain/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/tx_proxy/ut_schemereq/unittest >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-DropUser-72 [GOOD] Test command err: Starting YDB, grpc: 27423, msgbus: 61832 2025-05-29T15:23:12.763295Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509888582542789943:2212];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:23:12.763847Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/001b1c/r3tmp/tmptQDuUH/pdisk_1.dat 2025-05-29T15:23:12.838249Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 27423, node 1 2025-05-29T15:23:12.853715Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:23:12.853729Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-05-29T15:23:12.853731Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-05-29T15:23:12.853781Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-05-29T15:23:12.863954Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:23:12.863978Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:23:12.865586Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:61832 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 2025-05-29T15:23:12.876688Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:403: actor# [1:7509888582542790029:2139] Handle TEvNavigate describe path dc-1 2025-05-29T15:23:12.878677Z node 1 :TX_PROXY DEBUG: describe.cpp:272: Actor# [1:7509888582542790480:2429] HANDLE EvNavigateScheme dc-1 2025-05-29T15:23:12.879005Z node 1 :TX_PROXY DEBUG: describe.cpp:356: Actor# [1:7509888582542790480:2429] HANDLE EvNavigateKeySetResult TDescribeReq marker# P5 ErrorCount# 0 2025-05-29T15:23:12.887967Z node 1 :TX_PROXY DEBUG: describe.cpp:435: Actor# [1:7509888582542790480:2429] SEND to# 72057594046644480 shardToRequest NKikimrSchemeOp.TDescribePath Path: "dc-1" Options { ReturnBoundaries: true ShowPrivateTable: true ReturnRangeKey: true } 2025-05-29T15:23:12.890134Z node 1 :TX_PROXY DEBUG: describe.cpp:448: Actor# [1:7509888582542790480:2429] Handle TEvDescribeSchemeResult Forward to# [1:7509888582542790479:2428] Cookie: 0 TEvDescribeSchemeResult: NKikimrScheme.TEvDescribeSchemeResult PreSerializedData size# 2 Record# Status: StatusSuccess Path: "dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/dc-1" } } } PathId: 1 PathOwnerId: 72057594046644480 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-05-29T15:23:12.894423Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:315: actor# [1:7509888582542790029:2139] Handle TEvProposeTransaction 2025-05-29T15:23:12.894438Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:327: actor# [1:7509888582542790029:2139] Cookie# 0 userReqId# "" DELAY REQUEST, wait txids from allocator Type# Scheme 2025-05-29T15:23:12.902243Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:213: actor# [1:7509888582542790029:2139] HANDLE TEvClientConnected success connect from tablet# 72057594046447617 2025-05-29T15:23:12.903456Z node 1 :TX_PROXY DEBUG: client.cpp:89: Handle TEvAllocateResult ACCEPTED RangeBegin# 281474976715656 RangeEnd# 281474976720656 txAllocator# 72057594046447617 2025-05-29T15:23:12.903471Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:238: actor# [1:7509888582542790029:2139] TxId# 281474976715657 ProcessProposeTransaction 2025-05-29T15:23:12.903506Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:257: actor# [1:7509888582542790029:2139] Cookie# 0 userReqId# "" txid# 281474976715657 SEND to# [1:7509888582542790498:2440] 2025-05-29T15:23:12.918415Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1562: Actor# [1:7509888582542790498:2440] txid# 281474976715657 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "dc-1" StoragePools { Name: "" Kind: "tenant-db" } StoragePools { Name: "/dc-1:test" Kind: "test" } } } } UserToken: "\n\014root@builtin\022\030\022\026\n\024all-users@well-known\032\014root@builtin\"\007Builtin*\017**** (B6C6F477)" PeerName: "" 2025-05-29T15:23:12.918469Z node 1 :TX_PROXY DEBUG: schemereq.cpp:569: Actor# [1:7509888582542790498:2440] txid# 281474976715657 Bootstrap, UserSID: root@builtin CheckAdministrator: 0 CheckDatabaseAdministrator: 0 2025-05-29T15:23:12.918474Z node 1 :TX_PROXY DEBUG: schemereq.cpp:578: Actor# [1:7509888582542790498:2440] txid# 281474976715657 Bootstrap, UserSID: root@builtin IsClusterAdministrator: 1 2025-05-29T15:23:12.918492Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1617: Actor# [1:7509888582542790498:2440] txid# 281474976715657 TEvNavigateKeySet requested from SchemeCache 2025-05-29T15:23:12.918630Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1450: Actor# [1:7509888582542790498:2440] txid# 281474976715657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-05-29T15:23:12.918684Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1497: Actor# [1:7509888582542790498:2440] HANDLE EvNavigateKeySetResult, txid# 281474976715657 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# false 2025-05-29T15:23:12.918699Z node 1 :TX_PROXY DEBUG: schemereq.cpp:103: Actor# [1:7509888582542790498:2440] txid# 281474976715657 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976715657 TabletId# 72057594046644480} 2025-05-29T15:23:12.918769Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1352: Actor# [1:7509888582542790498:2440] txid# 281474976715657 HANDLE EvClientConnected 2025-05-29T15:23:12.919322Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-05-29T15:23:12.920152Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1374: Actor# [1:7509888582542790498:2440] txid# 281474976715657 Status StatusAccepted HANDLE {TEvModifySchemeTransactionResult Status# StatusAccepted txid# 281474976715657} 2025-05-29T15:23:12.920170Z node 1 :TX_PROXY DEBUG: schemereq.cpp:549: Actor# [1:7509888582542790498:2440] txid# 281474976715657 SEND to# [1:7509888582542790492:2434] Source {TEvProposeTransactionStatus txid# 281474976715657 Status# 53} waiting... 2025-05-29T15:23:12.926377Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:315: actor# [1:7509888582542790029:2139] Handle TEvProposeTransaction 2025-05-29T15:23:12.926394Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:238: actor# [1:7509888582542790029:2139] TxId# 281474976715658 ProcessProposeTransaction 2025-05-29T15:23:12.926406Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:257: actor# [1:7509888582542790029:2139] Cookie# 0 userReqId# "" txid# 281474976715658 SEND to# [1:7509888582542790538:2476] 2025-05-29T15:23:12.927288Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1562: Actor# [1:7509888582542790538:2476] txid# 281474976715658 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/" OperationType: ESchemeOpModifyACL ModifyACL { Name: "dc-1" DiffACL: "\n\032\010\000\022\026\010\001\020\377\377\003\032\014root@builtin \003" } } } UserToken: "\n\014root@builtin\022\030\022\026\n\024all-users@well-known\032\014root@builtin\"\007Builtin*\017**** (B6C6F477)" PeerName: "" 2025-05-29T15:23:12.927313Z node 1 :TX_PROXY DEBUG: schemereq.cpp:569: Actor# [1:7509888582542790538:2476] txid# 281474976715658 Bootstrap, UserSID: root@builtin CheckAdministrator: 0 CheckDatabaseAdministrator: 0 2025-05-29T15:23:12.927318Z node 1 :TX_PROXY DEBUG: schemereq.cpp:578: Actor# [1:7509888582542790538:2476] txid# 281474976715658 Bootstrap, UserSID: root@builtin IsClusterAdministrator: 1 2025-05-29T15:23:12.927336Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1617: Actor# [1:7509888582542790538:2476] txid# 281474976715658 TEvNavigateKeySet requested from SchemeCache 2025-05-29T15:23:12.927458Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1450: Actor# [1:7509888582542790538:2476] txid# 281474976715658 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-05-29T15:23:12.927488Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1497: Actor# [1:7509888582542790538:2476] HANDLE EvNavigateKeySetResult, txid# 281474976715658 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2025-05-29T15:23:12.927508Z node 1 :TX_PROXY DEBUG: schemereq.cpp:103: Actor# [1:7509888582542790538:2476] txid# 281474976715658 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976715658 TabletId# 72057594046644480} 2025-05-29T15:23:12.927551Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1352: Actor# [1:7509888582542790538:2476] txid# 281474976715658 HANDLE EvClientConnected 2025-05-29T15:23:12.927680Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-05-29T15:23:12.928502Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1374: Actor# [1:7509888582542790538:2476] txid# 281474976715658 Status StatusSuccess HANDLE {TEvModifySchemeTransactionR ... e 59 :TX_PROXY DEBUG: proxy_impl.cpp:315: actor# [59:7509888708790288235:2102] Handle TEvProposeTransaction 2025-05-29T15:23:42.373588Z node 59 :TX_PROXY DEBUG: proxy_impl.cpp:238: actor# [59:7509888708790288235:2102] TxId# 281474976715661 ProcessProposeTransaction 2025-05-29T15:23:42.373612Z node 59 :TX_PROXY DEBUG: proxy_impl.cpp:257: actor# [59:7509888708790288235:2102] Cookie# 0 userReqId# "" txid# 281474976715661 SEND to# [59:7509888713085256463:2553] 2025-05-29T15:23:42.374475Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1562: Actor# [59:7509888713085256463:2553] txid# 281474976715661 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/dc-1/.metadata/workload_manager/pools" OperationType: ESchemeOpCreateResourcePool ModifyACL { Name: "default" DiffACL: "\n\032\010\000\022\026\010\001\020\377\377\003\032\014root@builtin \003\n!\010\000\022\035\010\001\020\201\004\032\024all-users@well-known \003\n\031\010\000\022\025\010\001\020\201\004\032\014root@builtin \003" NewOwner: "metadata@system" } Internal: true CreateResourcePool { Name: "default" Properties { Properties { key: "concurrent_query_limit" value: "-1" } Properties { key: "database_load_cpu_threshold" value: "-1" } Properties { key: "query_cancel_after_seconds" value: "0" } Properties { key: "query_cpu_limit_percent_per_node" value: "-1" } Properties { key: "query_memory_limit_percent_per_node" value: "-1" } Properties { key: "queue_size" value: "-1" } Properties { key: "resource_weight" value: "-1" } Properties { key: "total_cpu_limit_percent_per_node" value: "-1" } } } } } UserToken: "\n\017metadata@system\022\000" DatabaseName: "/dc-1" 2025-05-29T15:23:42.374498Z node 59 :TX_PROXY DEBUG: schemereq.cpp:569: Actor# [59:7509888713085256463:2553] txid# 281474976715661 Bootstrap, UserSID: metadata@system CheckAdministrator: 1 CheckDatabaseAdministrator: 1 2025-05-29T15:23:42.374504Z node 59 :TX_PROXY DEBUG: schemereq.cpp:578: Actor# [59:7509888713085256463:2553] txid# 281474976715661 Bootstrap, UserSID: metadata@system IsClusterAdministrator: 0 2025-05-29T15:23:42.374577Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1401: Actor# [59:7509888713085256463:2553] txid# 281474976715661 HandleResolveDatabase, ResultSet size: 1 ResultSet error count: 0 2025-05-29T15:23:42.374601Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1436: Actor# [59:7509888713085256463:2553] txid# 281474976715661 HandleResolveDatabase, UserSID: metadata@system CheckAdministrator: 1 CheckDatabaseAdministrator: 1 IsClusterAdministrator: 0 IsDatabaseAdministrator: 0 DatabaseOwner: root@builtin 2025-05-29T15:23:42.374813Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1627: Actor# [59:7509888713085256463:2553] txid# 281474976715661 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P6 2025-05-29T15:23:42.374840Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1617: Actor# [59:7509888713085256463:2553] txid# 281474976715661 TEvNavigateKeySet requested from SchemeCache 2025-05-29T15:23:42.374888Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1450: Actor# [59:7509888713085256463:2553] txid# 281474976715661 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-05-29T15:23:42.374939Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1497: Actor# [59:7509888713085256463:2553] HANDLE EvNavigateKeySetResult, txid# 281474976715661 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2025-05-29T15:23:42.374956Z node 59 :TX_PROXY DEBUG: schemereq.cpp:103: Actor# [59:7509888713085256463:2553] txid# 281474976715661 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976715661 TabletId# 72057594046644480} 2025-05-29T15:23:42.375029Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1352: Actor# [59:7509888713085256463:2553] txid# 281474976715661 HANDLE EvClientConnected 2025-05-29T15:23:42.376222Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1374: Actor# [59:7509888713085256463:2553] txid# 281474976715661 Status StatusAlreadyExists HANDLE {TEvModifySchemeTransactionResult Status# StatusAlreadyExists txid# 281474976715661 Reason# Check failed: path: '/dc-1/.metadata/workload_manager/pools/default', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92} 2025-05-29T15:23:42.376268Z node 59 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [59:7509888713085256463:2553] txid# 281474976715661, issues: { message: "Check failed: path: \'/dc-1/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-29T15:23:42.376274Z node 59 :TX_PROXY DEBUG: schemereq.cpp:549: Actor# [59:7509888713085256463:2553] txid# 281474976715661 SEND to# [59:7509888713085256389:2338] Source {TEvProposeTransactionStatus txid# 281474976715661 Status# 48} 2025-05-29T15:23:42.379379Z node 59 :TX_PROXY DEBUG: proxy_impl.cpp:315: actor# [59:7509888708790288235:2102] Handle TEvProposeTransaction 2025-05-29T15:23:42.379395Z node 59 :TX_PROXY DEBUG: proxy_impl.cpp:238: actor# [59:7509888708790288235:2102] TxId# 281474976715662 ProcessProposeTransaction 2025-05-29T15:23:42.379416Z node 59 :TX_PROXY DEBUG: proxy_impl.cpp:257: actor# [59:7509888708790288235:2102] Cookie# 0 userReqId# "" txid# 281474976715662 SEND to# [59:7509888713085256487:2565] 2025-05-29T15:23:42.380258Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1562: Actor# [59:7509888713085256487:2565] txid# 281474976715662 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/dc-1" OperationType: ESchemeOpAlterLogin AlterLogin { CreateUser { User: "targetuser" Password: "passwd" CanLogin: true IsHashedPassword: false } } } } UserToken: "\n\014root@builtin\022\030\022\026\n\024all-users@well-known\032\014root@builtin\"\007Builtin*\017**** (B6C6F477)" DatabaseName: "/dc-1" RequestType: "" PeerName: "ipv6:[::1]:37582" 2025-05-29T15:23:42.380283Z node 59 :TX_PROXY DEBUG: schemereq.cpp:569: Actor# [59:7509888713085256487:2565] txid# 281474976715662 Bootstrap, UserSID: root@builtin CheckAdministrator: 1 CheckDatabaseAdministrator: 1 2025-05-29T15:23:42.380292Z node 59 :TX_PROXY DEBUG: schemereq.cpp:578: Actor# [59:7509888713085256487:2565] txid# 281474976715662 Bootstrap, UserSID: root@builtin IsClusterAdministrator: 1 2025-05-29T15:23:42.380305Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1617: Actor# [59:7509888713085256487:2565] txid# 281474976715662 TEvNavigateKeySet requested from SchemeCache 2025-05-29T15:23:42.380469Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1450: Actor# [59:7509888713085256487:2565] txid# 281474976715662 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-05-29T15:23:42.380524Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1497: Actor# [59:7509888713085256487:2565] HANDLE EvNavigateKeySetResult, txid# 281474976715662 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2025-05-29T15:23:42.380546Z node 59 :TX_PROXY DEBUG: schemereq.cpp:103: Actor# [59:7509888713085256487:2565] txid# 281474976715662 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976715662 TabletId# 72057594046644480} 2025-05-29T15:23:42.380618Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1352: Actor# [59:7509888713085256487:2565] txid# 281474976715662 HANDLE EvClientConnected 2025-05-29T15:23:42.383321Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1374: Actor# [59:7509888713085256487:2565] txid# 281474976715662 Status StatusSuccess HANDLE {TEvModifySchemeTransactionResult Status# StatusSuccess txid# 281474976715662} 2025-05-29T15:23:42.383335Z node 59 :TX_PROXY DEBUG: schemereq.cpp:549: Actor# [59:7509888713085256487:2565] txid# 281474976715662 SEND to# [59:7509888713085256486:2331] Source {TEvProposeTransactionStatus txid# 281474976715662 Status# 48} 2025-05-29T15:23:42.390211Z node 59 :TX_PROXY DEBUG: proxy_impl.cpp:315: actor# [59:7509888708790288235:2102] Handle TEvProposeTransaction 2025-05-29T15:23:42.390223Z node 59 :TX_PROXY DEBUG: proxy_impl.cpp:238: actor# [59:7509888708790288235:2102] TxId# 281474976715663 ProcessProposeTransaction 2025-05-29T15:23:42.390233Z node 59 :TX_PROXY DEBUG: proxy_impl.cpp:257: actor# [59:7509888708790288235:2102] Cookie# 0 userReqId# "" txid# 281474976715663 SEND to# [59:7509888713085256519:2579] 2025-05-29T15:23:42.390863Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1562: Actor# [59:7509888713085256519:2579] txid# 281474976715663 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/dc-1" OperationType: ESchemeOpAlterLogin AlterLogin { RemoveUser { User: "targetuser" MissingOk: false } } } } UserToken: "\n\024ordinaryuser@builtin\022\030\022\026\n\024all-users@well-known\032\024ordinaryuser@builtin\"\007Builtin*\027ordi****ltin (32520BBF)" DatabaseName: "/dc-1" RequestType: "" PeerName: "ipv6:[::1]:37582" 2025-05-29T15:23:42.390876Z node 59 :TX_PROXY DEBUG: schemereq.cpp:569: Actor# [59:7509888713085256519:2579] txid# 281474976715663 Bootstrap, UserSID: ordinaryuser@builtin CheckAdministrator: 1 CheckDatabaseAdministrator: 1 2025-05-29T15:23:42.390878Z node 59 :TX_PROXY DEBUG: schemereq.cpp:578: Actor# [59:7509888713085256519:2579] txid# 281474976715663 Bootstrap, UserSID: ordinaryuser@builtin IsClusterAdministrator: 0 2025-05-29T15:23:42.390911Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1401: Actor# [59:7509888713085256519:2579] txid# 281474976715663 HandleResolveDatabase, ResultSet size: 1 ResultSet error count: 0 2025-05-29T15:23:42.390925Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1436: Actor# [59:7509888713085256519:2579] txid# 281474976715663 HandleResolveDatabase, UserSID: ordinaryuser@builtin CheckAdministrator: 1 CheckDatabaseAdministrator: 1 IsClusterAdministrator: 0 IsDatabaseAdministrator: 0 DatabaseOwner: root@builtin 2025-05-29T15:23:42.390935Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1617: Actor# [59:7509888713085256519:2579] txid# 281474976715663 TEvNavigateKeySet requested from SchemeCache 2025-05-29T15:23:42.391004Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1450: Actor# [59:7509888713085256519:2579] txid# 281474976715663 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-05-29T15:23:42.391010Z node 59 :TX_PROXY ERROR: schemereq.cpp:1079: Actor# [59:7509888713085256519:2579] txid# 281474976715663, Access denied for ordinaryuser@builtin, attempt to manage user 2025-05-29T15:23:42.391031Z node 59 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [59:7509888713085256519:2579] txid# 281474976715663, issues: { message: "Access denied for ordinaryuser@builtin" issue_code: 200000 severity: 1 } 2025-05-29T15:23:42.391037Z node 59 :TX_PROXY DEBUG: schemereq.cpp:549: Actor# [59:7509888713085256519:2579] txid# 281474976715663 SEND to# [59:7509888713085256518:2347] Source {TEvProposeTransactionStatus Status# 5} 2025-05-29T15:23:42.391089Z node 59 :KQP_SESSION WARN: kqp_session_actor.cpp:2583: SessionId: ydb://session/3?node_id=59&id=N2U5NDBhM2YtYzhkNjlkYzEtZjNjNTJkNmEtZjNjMDIyMGM=, ActorId: [59:7509888713085256504:2347], ActorState: ExecuteState, TraceId: 01jwea8pdm2ra2sw65pqewejrh, Create QueryResponse for error on request, msg: 2025-05-29T15:23:42.391156Z node 59 :TX_PROXY DEBUG: proxy_impl.cpp:353: actor# [59:7509888708790288235:2102] Handle TEvExecuteKqpTransaction 2025-05-29T15:23:42.391162Z node 59 :TX_PROXY DEBUG: proxy_impl.cpp:342: actor# [59:7509888708790288235:2102] TxId# 281474976715664 ProcessProposeKqpTransaction |64.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/conveyor_composite/ut/unittest >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-CreateUser-21 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-CreateUser-22 >> CompositeConveyorTests::Test10xDistribution >> TxUsage::WriteToTopic_Demo_27_Table [FAIL] >> TxUsage::WriteToTopic_Demo_27_Query |64.0%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/kqp/ut/pg/ydb-core-kqp-ut-pg |64.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/ut/pg/ydb-core-kqp-ut-pg >> KqpRm::Reduce [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-CreateUser-23 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-DropUser-71 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-CreateUser-22 [GOOD] >> TxUsage::WriteToTopic_Demo_37_Table [FAIL] >> TxUsage::WriteToTopic_Demo_21_RestartAfterCommit_Query [FAIL] >> TPQTest::TestMaxTimeLagRewind [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-ModifyUser-47 [GOOD] >> TxUsage::WriteToTopic_Demo_37_Query >> TxUsage::Sinks_Oltp_WriteToTopics_3_Query [FAIL] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-DropUser-70 [GOOD] >> TxUsage::WriteToTopic_Demo_27_Query [FAIL] >> SchemeReqAdminAccessInTenant::ClusterAdminCanAuthOnEmptyTenant-StrictAclCheck [GOOD] >> TExportToS3WithRebootsTests::CancelShouldSucceedOnSingleView [GOOD] >> DataShardOutOfOrder::TestLateKqpScanAfterColumnDrop+UseSink >> DataShardScan::ScanFollowedByUpdate >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-CreateUser-24 >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-DropUser-71 >> TxUsage::WriteToTopic_Demo_37_Query [FAIL] >> TxUsage::Sinks_Oltp_WriteToTopics_4_Table |64.0%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/statistics/service/ut/ut_aggregation/ydb-core-statistics-service-ut-ut_aggregation |64.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/statistics/service/ut/ut_aggregation/ydb-core-statistics-service-ut-ut_aggregation |64.0%| [LD] {RESULT} $(B)/ydb/core/kqp/ut/pg/ydb-core-kqp-ut-pg |64.0%| [LD] {RESULT} $(B)/ydb/core/statistics/service/ut/ut_aggregation/ydb-core-statistics-service-ut-ut_aggregation |64.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/conveyor_composite/ut/unittest |64.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/conveyor_composite/ut/unittest |64.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/replication/service/ut_topic_reader/unittest >> TExportToS3WithRebootsTests::CancelShouldSucceedOnViewsAndTables >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-CreateUser-24 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-DropUser-72 >> TPQTest::TestManyConsumers >> TxUsage::WriteToTopic_Demo_28_Table >> TxUsage::WriteToTopic_Demo_22_RestartBeforeCommit_Table >> SchemeReqAdminAccessInTenant::ClusterAdminCanAuthOnEmptyTenant-DomainLoginOnly >> TExportToS3WithRebootsTests::ShouldSucceedAutoDropping [GOOD] >> TxUsage::Sinks_Oltp_WriteToTopics_4_Table [FAIL] >> DataShardOutOfOrder::TestLateKqpScanAfterColumnDrop+UseSink [FAIL] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-DropUser-71 [GOOD] >> DataShardScan::ScanFollowedByUpdate [GOOD] >> TExportToS3WithRebootsTests::ShouldSucceedOnSingleTopic [GOOD] >> TNodeBrokerTest::TestRandomActions [GOOD] >> TExportToS3WithRebootsTests::ShouldSucceedOnSingleView [GOOD] >> TPQTest::TestPQSmallRead [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-ModifyUser-48 >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-CreateUser-23 >> TPQTest::TestReadAndDeleteConsumer ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/olap/unittest >> KqpOlapStats::AddRowsSomeTablesInTableStore [GOOD] Test command err: Trying to start YDB, gRPC: 63795, MsgBus: 62027 test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/0026b7/r3tmp/tmpS5b7uD/pdisk_1.dat 2025-05-29T15:21:34.046152Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-05-29T15:21:34.068305Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:21:34.077195Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [1:7509888158826436022:2079] 1748532093891649 != 1748532093891652 TServer::EnableGrpc on GrpcPort 63795, node 1 2025-05-29T15:21:34.115046Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:21:34.115076Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:21:34.127097Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-29T15:21:34.134957Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:21:34.134972Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-05-29T15:21:34.134975Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-05-29T15:21:34.135019Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:62027 TClient is connected to server localhost:62027 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-29T15:21:34.363760Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:21:34.371326Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 CREATE TABLESTORE `/Root/TableStoreTest` (id Int32 NOT NULL, resource_id Utf8, level Int32, PRIMARY KEY (id)) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 1); 2025-05-29T15:21:34.543390Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509888163121403984:2331], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:21:34.543431Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:21:34.583221Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnStore, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-05-29T15:21:34.599830Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;self_id=[1:7509888163121404051:2334];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-05-29T15:21:34.599894Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;self_id=[1:7509888163121404051:2334];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-05-29T15:21:34.599953Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;self_id=[1:7509888163121404051:2334];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-05-29T15:21:34.599973Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;self_id=[1:7509888163121404051:2334];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-05-29T15:21:34.599992Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;self_id=[1:7509888163121404051:2334];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-05-29T15:21:34.600013Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;self_id=[1:7509888163121404051:2334];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-05-29T15:21:34.600036Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;self_id=[1:7509888163121404051:2334];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-05-29T15:21:34.600054Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;self_id=[1:7509888163121404051:2334];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-05-29T15:21:34.600079Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;self_id=[1:7509888163121404051:2334];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-05-29T15:21:34.600095Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;self_id=[1:7509888163121404051:2334];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-05-29T15:21:34.600117Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;self_id=[1:7509888163121404051:2334];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2025-05-29T15:21:34.600133Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;self_id=[1:7509888163121404051:2334];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2025-05-29T15:21:34.605319Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-05-29T15:21:34.605339Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-05-29T15:21:34.605355Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-05-29T15:21:34.605361Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-05-29T15:21:34.605380Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-05-29T15:21:34.605386Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-05-29T15:21:34.605397Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-05-29T15:21:34.605403Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-05-29T15:21:34.605416Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-05-29T15:21:34.605422Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-05-29T15:21:34.605429Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-05-29T15:21:34.605435Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-05-29T15:21:34.605460Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-05-29T15:21:34.605469Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-05-29T15:21:34.605490Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-05-29T15:21:34.605496Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-05-29T15:21:34.605509Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-05-29T15:21:34.605515Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanDeprecatedSnapshot;id=CleanDeprecatedSn ... ;fline=columnshard_ut_common.h:502;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:502;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:502;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:502;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:502;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:502;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:502;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:502;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:502;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:502;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:502;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:502;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:502;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:502;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:502;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:502;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:502;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:502;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:502;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:502;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:502;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:502;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:502;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:502;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:502;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:502;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:502;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:502;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:502;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:502;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:502;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:502;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:502;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:502;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:502;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:502;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:502;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:502;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:502;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:502;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:502;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:502;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:502;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:502;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:502;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:502;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:502;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:502;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:502;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:502;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:502;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:502;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:502;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:502;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:502;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:502;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:502;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:502;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:502;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:502;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:502;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:502;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:502;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:502;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:502;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:502;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:502;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:502;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:502;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:502;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:502;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:502;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:502;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:502;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:502;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:502;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:502;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:502;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:502;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:502;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:502;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:502;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:502;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:502;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:502;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:502;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:502;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:502;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:502;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:502;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:502;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:502;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:502;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:502;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:502;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:502;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:502;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:502;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:502;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:502;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:502;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:502;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:502;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:502;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:502;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=55496;columns=3; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=55496;columns=3; 2025-05-29T15:23:04.543180Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;parent_id=[1:7509888163121404051:2334];path_id=1002;entity_id=2;size=16544;limit=10240;r_count=1999;fline=column_info.h:130;sizes=8272,8272;s_splitted=8280,8264;r_splitted=999,1000; >> Secret::SimpleQueryService [FAIL] >> DataShardOutOfOrder::TestLateKqpScanAfterColumnDrop-UseSink >> TxUsage::WriteToTopic_Demo_38_Table >> TExportToS3WithRebootsTests::ShouldDisableAutoDropping >> TxUsage::Sinks_Oltp_WriteToTopics_4_Query >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-DropUser-72 >> TPQTest::TestReadAndDeleteConsumer [GOOD] >> TxUsage::WriteToTopic_Demo_28_Table [FAIL] >> TPQTest::TestManyConsumers [GOOD] >> DataShardOutOfOrder::TestLateKqpScanAfterColumnDrop-UseSink [FAIL] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-DropUser-72 [GOOD] >> TxUsage::Sinks_Oltp_WriteToTopics_4_Query [FAIL] >> TxUsage::WriteToTopic_Demo_22_RestartBeforeCommit_Table [FAIL] >> SchemeReqAdminAccessInTenant::ClusterAdminCanAuthOnEmptyTenant-DomainLoginOnly [GOOD] >> TExportToS3WithRebootsTests::ShouldDisableAutoDropping [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-DropUser-72 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-ModifyUser-48 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-CreateUser-23 [GOOD] >> TxUsage::WriteToTopic_Demo_28_Query >> TxUsage::WriteToTopic_Demo_28_Query [FAIL] >> TxUsage::WriteToTopic_Demo_29_Table >> TxUsage::WriteToTopic_Demo_29_Table [FAIL] >> TxUsage::WriteToTopic_Demo_29_Query >> TxUsage::WriteToTopic_Demo_38_Table [FAIL] >> SchemeReqAdminAccessInTenant::ClusterAdminCanAuthOnEmptyTenant-DomainLoginOnly-StrictAclCheck >> TxUsage::Transactions_Conflict_On_SeqNo_Table >> TxUsage::Transactions_Conflict_On_SeqNo_Table [FAIL] >> TxUsage::Transactions_Conflict_On_SeqNo_Query >> TxUsage::Transactions_Conflict_On_SeqNo_Query [FAIL] >> TxUsage::The_Transaction_Starts_On_One_Version_And_Ends_On_The_Other >> TxUsage::WriteToTopic_Demo_29_Query [FAIL] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-CreateUser-24 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/datashard/ut_trace/unittest >> TDataShardTrace::TestTraceDistributedSelectViaReadActors [FAIL] Test command err: 2025-05-29T15:23:38.034116Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:102:2148], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-05-29T15:23:38.034158Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-05-29T15:23:38.034174Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/000d0e/r3tmp/tmpGwuZZe/pdisk_1.dat 2025-05-29T15:23:38.157712Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-05-29T15:23:38.170888Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:23:38.175574Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [1:32:2079] 1748532217582338 != 1748532217582342 2025-05-29T15:23:38.223340Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:23:38.223378Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:23:38.235419Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-29T15:23:38.320517Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-05-29T15:23:40.275471Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:927:2769], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:23:40.275502Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:938:2774], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:23:40.275516Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:23:40.276538Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480 2025-05-29T15:23:40.289782Z node 1 :HIVE WARN: hive_impl.cpp:491: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037888 not found 2025-05-29T15:23:40.439956Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:941:2777], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2025-05-29T15:23:40.480717Z node 1 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [1:1003:2819] txid# 281474976715660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-29T15:23:40.540862Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [1:1013:2828], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:23:40.541843Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=1&id=MTUxODUzNzgtYTkzMjNlODYtNzkwNzAwNzEtMTYyMWU2NmM=, ActorId: [1:925:2767], ActorState: ExecuteState, TraceId: 01jwea8mbkcxbp4r4581691xwy, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: assertion failed at ydb/core/tx/datashard/datashard_ut_trace.cpp:37, void NKikimr::NTestSuiteTDataShardTrace::ExecSQL(Tests::TServer::TPtr, TActorId, const TString &, Ydb::StatusIds::StatusCode, NWilson::TTraceId): (ev->Get()->Record.GetYdbStatus() == code) failed: (INTERNAL_ERROR != SUCCESS) , with diff: (INT|SUCC)E(RNAL_ERROR|SS) TBackTrace::Capture()+28 (0x13AB26EC) NUnitTest::NPrivate::RaiseError(char const*, TBasicString> const&, bool)+137 (0x13C66289) NKikimr::NTestSuiteTDataShardTrace::ExecSQL(TIntrusivePtr>, NActors::TActorId, TBasicString> const&, Ydb::StatusIds_StatusCode, NWilson::TTraceId)+1990 (0x139917E6) NKikimr::NTestSuiteTDataShardTrace::TTestCaseTestTraceDistributedSelectViaReadActors::Execute_(NUnitTest::TTestContext&)+1109 (0x1399BC85) NKikimr::NTestSuiteTDataShardTrace::TCurrentTest::Execute()::'lambda'()::operator()() const+71 (0x139A4707) NUnitTest::TTestBase::Run(std::__y1::function, TBasicString> const&, char const*, bool)+126 (0x13C6813E) NKikimr::NTestSuiteTDataShardTrace::TCurrentTest::Execute()+436 (0x139A3F64) NUnitTest::TTestFactory::Execute()+803 (0x13C688B3) NUnitTest::RunMain(int, char**)+3021 (0x13C7A45D) ??+0 (0x7F507C2C5D90) __libc_start_main+128 (0x7F507C2C5E40) _start+41 (0x129F9029) >> TxUsage::WriteToTopic_Demo_38_Query ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/datashard/ut_order/unittest >> DataShardScan::ScanFollowedByUpdate [GOOD] >> TxUsage::WriteToTopic_Demo_38_Query [FAIL] >> TxUsage::The_Transaction_Starts_On_One_Version_And_Ends_On_The_Other [FAIL] >> TxUsage::WriteToTopic_Demo_22_RestartBeforeCommit_Query Test command err: 2025-05-29T15:23:45.304771Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3097: StateInit, received event# 268828672, Sender [1:110:2140], Recipient [1:130:2153]: NKikimr::TEvTablet::TEvBoot 2025-05-29T15:23:45.319724Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7309: Cannot subscribe to console configs 2025-05-29T15:23:45.319753Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:23:45.323985Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3097: StateInit, received event# 268828673, Sender [1:110:2140], Recipient [1:130:2153]: NKikimr::TEvTablet::TEvRestored 2025-05-29T15:23:45.324127Z node 1 :TX_DATASHARD INFO: datashard.cpp:373: TDataShard::OnActivateExecutor: tablet 9437184 actor [1:130:2153] 2025-05-29T15:23:45.324198Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:630: TxInitSchema.Execute 2025-05-29T15:23:45.327377Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3110: StateInactive, received event# 268828684, Sender [1:110:2140], Recipient [1:130:2153]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-05-29T15:23:45.344337Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:696: TxInitSchema.Complete 2025-05-29T15:23:45.344469Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:48: TDataShard::TTxInit::Execute 2025-05-29T15:23:45.344685Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1315: LoadChangeRecords: QueueSize: 0, at tablet: 9437184 2025-05-29T15:23:45.344695Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1371: LoadLockChangeRecords at tablet: 9437184 2025-05-29T15:23:45.344703Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1420: LoadChangeRecordCommits at tablet: 9437184 2025-05-29T15:23:45.344787Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:92: TDataShard::TTxInit::Complete 2025-05-29T15:23:45.344801Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:100: TDataShard::TTxInitRestored::Execute 2025-05-29T15:23:45.344815Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:111: DataShard 9437184 persisting started state actor id [1:197:2153] in generation 2 2025-05-29T15:23:45.378402Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:120: TDataShard::TTxInitRestored::Complete 2025-05-29T15:23:45.384347Z node 1 :TX_DATASHARD INFO: datashard.cpp:417: Switched to work state WaitScheme tabletId 9437184 2025-05-29T15:23:45.384447Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:457: 9437184 not sending time cast registration request in state WaitScheme: missing processing params 2025-05-29T15:23:45.384480Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1250: Change sender created: at tablet: 9437184, actorId: [1:215:2212] 2025-05-29T15:23:45.384488Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1255: Trying to activate change sender: at tablet: 9437184 2025-05-29T15:23:45.384493Z node 1 :TX_DATASHARD INFO: datashard.cpp:1272: Cannot activate change sender: at tablet: 9437184, state: WaitScheme 2025-05-29T15:23:45.384498Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-05-29T15:23:45.384566Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3129: StateWork, received event# 2146435072, Sender [1:130:2153], Recipient [1:130:2153]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-05-29T15:23:45.384573Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3154: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-05-29T15:23:45.384658Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 9437184 2025-05-29T15:23:45.384685Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 9437184 2025-05-29T15:23:45.384691Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 9437184 2025-05-29T15:23:45.384698Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 0 2025-05-29T15:23:45.384704Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:385: Check unit PlanQueue at 9437184 2025-05-29T15:23:45.384709Z node 1 :TX_DATASHARD TRACE: plan_queue_unit.cpp:59: TPlanQueueUnit at 9437184 has no attached operations 2025-05-29T15:23:45.384712Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:403: Unit PlanQueue has no ready operations at 9437184 2025-05-29T15:23:45.384717Z node 1 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 9437184 TxInFly 0 2025-05-29T15:23:45.384720Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 9437184 2025-05-29T15:23:45.384729Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3129: StateWork, received event# 269877761, Sender [1:211:2209], Recipient [1:130:2153]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-05-29T15:23:45.384733Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3165: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-05-29T15:23:45.384739Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3710: Server connected at leader tablet# 9437184, clientId# [1:209:2208], serverId# [1:211:2209], sessionId# [0:0:0] 2025-05-29T15:23:45.385126Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3129: StateWork, received event# 269549568, Sender [1:100:2134], Recipient [1:130:2153]: NKikimrTxDataShard.TEvProposeTransaction TxKind: TX_KIND_SCHEME SourceDeprecated { RawX1: 100 RawX2: 4294969430 } TxBody: "\nK\n\006table1\020\r\032\t\n\003key\030\002 \"\032\014\n\005value\030\200$ 8\032\n\n\004uint\030\002 9(\":\n \000Z\006\010\010\030\001(\000J\014/Root/table1" TxId: 1 ExecLevel: 0 Flags: 0 SchemeShardId: 4200 ProcessingParams { } 2025-05-29T15:23:45.385138Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3135: StateWork, processing event TEvDataShard::TEvProposeTransaction 2025-05-29T15:23:45.385151Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 9437184 2025-05-29T15:23:45.398861Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1827: Trying to execute [0:1] at 9437184 on unit CheckSchemeTx 2025-05-29T15:23:45.398931Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:132: Propose scheme transaction at tablet 9437184 txId 1 ssId 4200 seqNo 0:0 2025-05-29T15:23:45.398945Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:220: Prepared scheme transaction txId 1 at tablet 9437184 2025-05-29T15:23:45.398955Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1862: Execution status for [0:1] at 9437184 is ExecutedNoMoreRestarts 2025-05-29T15:23:45.398962Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1910: Advance execution plan for [0:1] at 9437184 executing on unit CheckSchemeTx 2025-05-29T15:23:45.398969Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1916: Add [0:1] at 9437184 to execution unit StoreSchemeTx 2025-05-29T15:23:45.398974Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1827: Trying to execute [0:1] at 9437184 on unit StoreSchemeTx 2025-05-29T15:23:45.399102Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1862: Execution status for [0:1] at 9437184 is DelayCompleteNoMoreRestarts 2025-05-29T15:23:45.399106Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1910: Advance execution plan for [0:1] at 9437184 executing on unit StoreSchemeTx 2025-05-29T15:23:45.399109Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1916: Add [0:1] at 9437184 to execution unit FinishPropose 2025-05-29T15:23:45.399112Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1827: Trying to execute [0:1] at 9437184 on unit FinishPropose 2025-05-29T15:23:45.399126Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1862: Execution status for [0:1] at 9437184 is DelayComplete 2025-05-29T15:23:45.399128Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1910: Advance execution plan for [0:1] at 9437184 executing on unit FinishPropose 2025-05-29T15:23:45.399131Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1916: Add [0:1] at 9437184 to execution unit WaitForPlan 2025-05-29T15:23:45.399134Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1827: Trying to execute [0:1] at 9437184 on unit WaitForPlan 2025-05-29T15:23:45.399138Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1832: Operation [0:1] at 9437184 is not ready to execute on unit WaitForPlan 2025-05-29T15:23:45.410279Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 9437184 2025-05-29T15:23:45.410318Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1933: Complete execution for [0:1] at 9437184 on unit StoreSchemeTx 2025-05-29T15:23:45.410326Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1933: Complete execution for [0:1] at 9437184 on unit FinishPropose 2025-05-29T15:23:45.410342Z node 1 :TX_DATASHARD TRACE: finish_propose_unit.cpp:167: Propose transaction complete txid 1 at tablet 9437184 send to client, exec latency: 0 ms, propose latency: 1 ms, status: PREPARED 2025-05-29T15:23:45.410361Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:467: 9437184 not sending time cast registration request in state WaitScheme 2025-05-29T15:23:45.410519Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3129: StateWork, received event# 269877761, Sender [1:221:2218], Recipient [1:130:2153]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-05-29T15:23:45.410534Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3165: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-05-29T15:23:45.410544Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3710: Server connected at leader tablet# 9437184, clientId# [1:220:2217], serverId# [1:221:2218], sessionId# [0:0:0] 2025-05-29T15:23:45.410568Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3129: StateWork, received event# 269287424, Sender [1:100:2134], Recipient [1:130:2153]: {TEvPlanStep step# 1000001 MediatorId# 0 TabletID 9437184} 2025-05-29T15:23:45.410573Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3147: StateWork, processing event TEvTxProcessing::TEvPlanStep 2025-05-29T15:23:45.410624Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1790: Trying to execute [1000001:1] at 9437184 on unit WaitForPlan 2025-05-29T15:23:45.410634Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1805: Execution status for [1000001:1] at 9437184 is Executed 2025-05-29T15:23:45.410639Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1910: Advance execution plan for [1000001:1] at 9437184 executing on unit WaitForPlan 2025-05-29T15:23:45.410645Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1916: Add [1000001:1] at 9437184 to execution unit PlanQueue 2025-05-29T15:23:45.411561Z node 1 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:69: Planned transaction txId 1 at step 1000001 at tablet 9437184 { Transactions { TxId: 1 AckTo { RawX1: 100 RawX2: 4294969430 } } Step: 1000001 MediatorID: 0 TabletID: 9437184 } 2025-05-29T15:23:45.411588Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-05-29T15:23:45.411677Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3129: StateWork, received event# 2146435072, Sender [1:130:2153], Recipient [1:130:2153]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-05-29T15:23:45.411687Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3154: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-05-29T15:23:45.411698Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 9437184 2025-05-29T15:23:45.411708Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 1 2025-05-29T15:23:45.411714Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:327: Check candidate unit PlanQueue at 9437184 2025-05-29T15:23:45.411723Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:333: Found ready operation [1000001:1] in PlanQueue unit at 9437184 2025-05-29T15:23:45.411729Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1827: Trying to execute [100 ... on::Execute at 9437185 2025-05-29T15:23:46.507888Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 9437185 active 1 active planned 1 immediate 0 planned 1 2025-05-29T15:23:46.507894Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:307: Found ready candidate operation [1000006:36] at 9437185 for ReadTableScan 2025-05-29T15:23:46.507897Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1827: Trying to execute [1000006:36] at 9437185 on unit ReadTableScan 2025-05-29T15:23:46.507902Z node 1 :TX_DATASHARD TRACE: read_table_scan_unit.cpp:158: ReadTable scan complete for [1000006:36] at 9437185 error: , IsFatalError: 0 2025-05-29T15:23:46.507908Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1862: Execution status for [1000006:36] at 9437185 is Executed 2025-05-29T15:23:46.507911Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1910: Advance execution plan for [1000006:36] at 9437185 executing on unit ReadTableScan 2025-05-29T15:23:46.507914Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1916: Add [1000006:36] at 9437185 to execution unit CompleteOperation 2025-05-29T15:23:46.507918Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1827: Trying to execute [1000006:36] at 9437185 on unit CompleteOperation 2025-05-29T15:23:46.507971Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1862: Execution status for [1000006:36] at 9437185 is DelayComplete 2025-05-29T15:23:46.507974Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1910: Advance execution plan for [1000006:36] at 9437185 executing on unit CompleteOperation 2025-05-29T15:23:46.507977Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1916: Add [1000006:36] at 9437185 to execution unit CompletedOperations 2025-05-29T15:23:46.507980Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1827: Trying to execute [1000006:36] at 9437185 on unit CompletedOperations 2025-05-29T15:23:46.507984Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1862: Execution status for [1000006:36] at 9437185 is Executed 2025-05-29T15:23:46.507988Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1910: Advance execution plan for [1000006:36] at 9437185 executing on unit CompletedOperations 2025-05-29T15:23:46.507991Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1922: Execution plan for [1000006:36] at 9437185 has finished 2025-05-29T15:23:46.507993Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 9437185 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-05-29T15:23:46.507996Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:327: Check candidate unit PlanQueue at 9437185 2025-05-29T15:23:46.507999Z node 1 :TX_DATASHARD TRACE: plan_queue_unit.cpp:59: TPlanQueueUnit at 9437185 has no attached operations 2025-05-29T15:23:46.508001Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:341: Unit PlanQueue has no ready operations at 9437185 2025-05-29T15:23:46.508027Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3129: StateWork, received event# 2146435072, Sender [1:459:2399], Recipient [1:459:2399]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-05-29T15:23:46.508029Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3154: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-05-29T15:23:46.508033Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 9437186 2025-05-29T15:23:46.508035Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 9437186 active 1 active planned 1 immediate 0 planned 1 2025-05-29T15:23:46.508038Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:307: Found ready candidate operation [1000006:36] at 9437186 for ReadTableScan 2025-05-29T15:23:46.508040Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1827: Trying to execute [1000006:36] at 9437186 on unit ReadTableScan 2025-05-29T15:23:46.508042Z node 1 :TX_DATASHARD TRACE: read_table_scan_unit.cpp:158: ReadTable scan complete for [1000006:36] at 9437186 error: , IsFatalError: 0 2025-05-29T15:23:46.508045Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1862: Execution status for [1000006:36] at 9437186 is Executed 2025-05-29T15:23:46.508047Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1910: Advance execution plan for [1000006:36] at 9437186 executing on unit ReadTableScan 2025-05-29T15:23:46.508049Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1916: Add [1000006:36] at 9437186 to execution unit CompleteOperation 2025-05-29T15:23:46.508052Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1827: Trying to execute [1000006:36] at 9437186 on unit CompleteOperation 2025-05-29T15:23:46.508082Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1862: Execution status for [1000006:36] at 9437186 is DelayComplete 2025-05-29T15:23:46.508084Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1910: Advance execution plan for [1000006:36] at 9437186 executing on unit CompleteOperation 2025-05-29T15:23:46.508086Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1916: Add [1000006:36] at 9437186 to execution unit CompletedOperations 2025-05-29T15:23:46.508088Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1827: Trying to execute [1000006:36] at 9437186 on unit CompletedOperations 2025-05-29T15:23:46.508091Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1862: Execution status for [1000006:36] at 9437186 is Executed 2025-05-29T15:23:46.508093Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1910: Advance execution plan for [1000006:36] at 9437186 executing on unit CompletedOperations 2025-05-29T15:23:46.508095Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1922: Execution plan for [1000006:36] at 9437186 has finished 2025-05-29T15:23:46.508097Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 9437186 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-05-29T15:23:46.508099Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:327: Check candidate unit PlanQueue at 9437186 2025-05-29T15:23:46.508103Z node 1 :TX_DATASHARD TRACE: plan_queue_unit.cpp:59: TPlanQueueUnit at 9437186 has no attached operations 2025-05-29T15:23:46.508105Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:341: Unit PlanQueue has no ready operations at 9437186 2025-05-29T15:23:46.508118Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3129: StateWork, received event# 2146435072, Sender [1:235:2227], Recipient [1:235:2227]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-05-29T15:23:46.508121Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3154: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-05-29T15:23:46.508124Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 9437184 2025-05-29T15:23:46.508127Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 9437184 active 1 active planned 1 immediate 0 planned 1 2025-05-29T15:23:46.508129Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:307: Found ready candidate operation [1000006:36] at 9437184 for ReadTableScan 2025-05-29T15:23:46.508131Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1827: Trying to execute [1000006:36] at 9437184 on unit ReadTableScan 2025-05-29T15:23:46.508133Z node 1 :TX_DATASHARD TRACE: read_table_scan_unit.cpp:158: ReadTable scan complete for [1000006:36] at 9437184 error: , IsFatalError: 0 2025-05-29T15:23:46.508136Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1862: Execution status for [1000006:36] at 9437184 is Executed 2025-05-29T15:23:46.508138Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1910: Advance execution plan for [1000006:36] at 9437184 executing on unit ReadTableScan 2025-05-29T15:23:46.508140Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1916: Add [1000006:36] at 9437184 to execution unit CompleteOperation 2025-05-29T15:23:46.508143Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1827: Trying to execute [1000006:36] at 9437184 on unit CompleteOperation 2025-05-29T15:23:46.508174Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1862: Execution status for [1000006:36] at 9437184 is DelayComplete 2025-05-29T15:23:46.508176Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1910: Advance execution plan for [1000006:36] at 9437184 executing on unit CompleteOperation 2025-05-29T15:23:46.508178Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1916: Add [1000006:36] at 9437184 to execution unit CompletedOperations 2025-05-29T15:23:46.508180Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1827: Trying to execute [1000006:36] at 9437184 on unit CompletedOperations 2025-05-29T15:23:46.508184Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1862: Execution status for [1000006:36] at 9437184 is Executed 2025-05-29T15:23:46.508186Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1910: Advance execution plan for [1000006:36] at 9437184 executing on unit CompletedOperations 2025-05-29T15:23:46.508188Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1922: Execution plan for [1000006:36] at 9437184 has finished 2025-05-29T15:23:46.508190Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 9437184 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-05-29T15:23:46.508192Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:327: Check candidate unit PlanQueue at 9437184 2025-05-29T15:23:46.508194Z node 1 :TX_DATASHARD TRACE: plan_queue_unit.cpp:59: TPlanQueueUnit at 9437184 has no attached operations 2025-05-29T15:23:46.508196Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:341: Unit PlanQueue has no ready operations at 9437184 2025-05-29T15:23:46.519435Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 9437184 2025-05-29T15:23:46.519455Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 9437184 2025-05-29T15:23:46.519462Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1933: Complete execution for [1000006:36] at 9437184 on unit CompleteOperation 2025-05-29T15:23:46.519479Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:801: Complete [1000006 : 36] from 9437184 at tablet 9437184 send result to client [1:100:2134], exec latency: 2 ms, propose latency: 4 ms 2025-05-29T15:23:46.519487Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-05-29T15:23:46.519561Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 9437185 2025-05-29T15:23:46.519565Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 9437185 2025-05-29T15:23:46.519568Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1933: Complete execution for [1000006:36] at 9437185 on unit CompleteOperation 2025-05-29T15:23:46.519574Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:801: Complete [1000006 : 36] from 9437185 at tablet 9437185 send result to client [1:100:2134], exec latency: 2 ms, propose latency: 4 ms 2025-05-29T15:23:46.519577Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437185 2025-05-29T15:23:46.519627Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 9437186 2025-05-29T15:23:46.519631Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 9437186 2025-05-29T15:23:46.519633Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1933: Complete execution for [1000006:36] at 9437186 on unit CompleteOperation 2025-05-29T15:23:46.519638Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:801: Complete [1000006 : 36] from 9437186 at tablet 9437186 send result to client [1:100:2134], exec latency: 2 ms, propose latency: 4 ms 2025-05-29T15:23:46.519641Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437186 >> SchemeReqAdminAccessInTenant::ClusterAdminCanAuthOnEmptyTenant-DomainLoginOnly-StrictAclCheck [GOOD] >> TxUsage::WriteToTopic_Demo_39_Table ------- [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/federated_query/generic_ut/unittest >> GenericFederatedQuery::YdbFilterPushdown >> TxUsage::WriteToTopic_Demo_39_Table [FAIL] 2025-05-29 15:23:31,855 WARNING devtools.ya.test.programs.test_tool.run_test.run_test: Wrapper execution timed out 2025-05-29 15:23:31,916 WARNING devtools.ya.test.programs.test_tool.run_test.run_test: Wrapper has overrun 60 secs timeout. Process tree before termination: pid rss ref pdirt 3583873 46.0M 45.6M 23.2M test_tool run_ut @/home/runner/.ya/build/build_root/ciyv/00193a/ydb/core/kqp/ut/federated_query/generic_ut/test-results/unittest/testing_out_stuff/chunk8/testing_out_stuff/te 3584581 371M 372M 127M └─ ydb-core-kqp-ut-federated_query-generic_ut --trace-path-append /home/runner/.ya/build/build_root/ciyv/00193a/ydb/core/kqp/ut/federated_query/generic_ut/test-results/uni Test command err: Trying to start YDB, gRPC: 10692, MsgBus: 30165 2025-05-29T15:22:32.677393Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509888412274830696:2063];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:22:32.677411Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/00193a/r3tmp/tmpBYjcW4/pdisk_1.dat 2025-05-29T15:22:32.731727Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:22:32.735018Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [1:7509888412274830675:2079] 1748532152677279 != 1748532152677282 TServer::EnableGrpc on GrpcPort 10692, node 1 2025-05-29T15:22:32.746269Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:22:32.746281Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-05-29T15:22:32.746282Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-05-29T15:22:32.746313Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:30165 TClient is connected to server localhost:30165 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-29T15:22:32.808812Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:22:32.808840Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:22:32.809491Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-05-29T15:22:32.809801Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected waiting... 2025-05-29T15:22:32.961968Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509888412274831335:2328], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:22:32.961996Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:22:33.679176Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:2, at schemeshard: 72057594046644480 2025-05-29T15:22:33.739569Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509888416569798762:2342], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:22:33.739601Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:22:33.739680Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509888416569798767:2345], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:22:33.740463Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:2, at schemeshard: 72057594046644480 2025-05-29T15:22:33.746538Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7509888416569798769:2346], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2025-05-29T15:22:33.800266Z node 1 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [1:7509888416569798809:2389] txid# 281474976715660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 7], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-29T15:22:33.868604Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [1:7509888416569798859:2362], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:22:33.868749Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=1&id=NTJkZjQ1MS04ZGUzYTdmNi0zYjg3MDhiLWExMjk0MmJi, ActorId: [1:7509888416569798756:2338], ActorState: ExecuteState, TraceId: 01jwea6kg3ch2cvwcwbv78r46s, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: 01jwea6kg09bywjf6sz7jjd7ft 2025-05-29T15:22:33.869971Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=request_actor.h:64;event=unexpected reply;error_message=operation { ready: true status: INTERNAL_ERROR issues { message: "Execution" issue_code: 1060 issues { message: "yql/essentials/ast/yql_expr.h:1874: index out of range" issue_code: 1 } } result { [type.googleapis.com/Ydb.Table.ExecuteQueryResult] { tx_meta { } } } } ;request=session_id: "ydb://session/3?node_id=1&id=NTJkZjQ1MS04ZGUzYTdmNi0zYjg3MDhiLWExMjk0MmJi" tx_control { tx_id: "01jwea6kg09bywjf6sz7jjd7ft" commit_tx: true } query { yql_text: "DECLARE $objects AS List>;\nUPSERT INTO `//Root/.metadata/initialization/migrations`\nSELECT componentId,modificationId,instant FROM AS_TABLE($objects)\n" } parameters { key: "$objects" value { type { list_type { item { struct_type { members { name: "componentId" type { type_id: UTF8 } } members { name: "modificationId" type { type_id: UTF8 } } members { name: "instant" type { type_id: UINT32 } } } } } } value { items { items { text_value: "INITIALIZATION" } items { text_value: "create" } items { uint32_value: 1748532153 } } } } } ; 2025-05-29T15:22:33.870032Z node 1 :METADATA_INITIALIZER ERROR: log.cpp:784: fline=accessor_init.cpp:81;event=OnAlteringProblem;error=cannot UPSERT objects:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 ; 2025-05-29T15:22:34.952654Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [1:7509888420864766237:2396], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:22:34.953577Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=1&id=N2M5MmUxMDMtZjM1YzUwYi1iODgyNWUzNS1kYzc5MGQxNg==, ActorId: [1:7509888420864766207:2383], ActorState: ExecuteState, TraceId: 01jwea6mht7hhbpbnbrn3g52wk, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: 01jwea6mhk3wzbxbdqyb0159re 2025-05-29T15:22:34.954364Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=request_actor.h:64;event=unexpected reply;error_message=operation { ready: true status: INTERNAL_ERROR issues { message: "Execution" issue_code: 1060 issues { message: "yql/essentials/ast/yql_expr.h:1874: index out of range" issue_code: 1 } } result { [type.googleapis.com/Ydb.Table.ExecuteQueryResult] { tx_meta { } } } } ;request=session_id: "ydb://session/3?node_id=1&id=N2M5MmUxMDMtZjM1YzUwYi1iODgyNWUzNS1kYzc5MGQxNg==" tx_control { tx_id: "01jwea6mhk3wzbxbdqyb0159re" commit_tx: true } query { yql_text: "DECLARE $objects AS List>;\nUPSERT INTO `//Root/.metadata/initialization/migrations`\nSELECT componentId,modificationId,instant FROM AS_TABLE($objects)\n" } parameters { key: "$objects" value { type { list_type { item { struct_type { members { name: "componentId" type { type_id: UTF8 } } members { name: "modificationId" type { type_id: UTF8 } } members { name: "instant" type { type_id: UINT32 } } } } } } value { items { items { text_value: "INITIALIZATION" } items { text_value: "create" } items { uint32_value: 1748532154 } } } } } ; 2025-05-29T15:22:34.954460Z node 1 :METADATA_INITIALIZER ERROR: log.cpp:784: fline=accessor_init.cpp:81;event=OnAlteringProblem;error=cannot UPSERT objects:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 ; 2025-05-29T15:22:36.019526Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [1:7509888429454700910:2430], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:22:36.019711Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=1&id=MjdmMjliNi0yYTc2YzQxZC04Njg5MGE2OS1hYWRjZjlhOQ==, ActorId: [1:7509888425159733583:2416], ActorState: ... or=cannot UPSERT objects:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 ; 2025-05-29T15:23:28.181374Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [1:7509888652793005720:4562], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:23:28.182282Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=1&id=ZGRjZmYxOTYtNGM2MDY0MmEtNTkxNTJkN2EtNzYzYWY1MDk=, ActorId: [1:7509888652793005690:4549], ActorState: ExecuteState, TraceId: 01jwea88h94ma2xs815jtehnsm, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: 01jwea88h1famwjnfzsf7m8103 2025-05-29T15:23:28.182974Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=request_actor.h:64;event=unexpected reply;error_message=operation { ready: true status: INTERNAL_ERROR issues { message: "Execution" issue_code: 1060 issues { message: "yql/essentials/ast/yql_expr.h:1874: index out of range" issue_code: 1 } } result { [type.googleapis.com/Ydb.Table.ExecuteQueryResult] { tx_meta { } } } } ;request=session_id: "ydb://session/3?node_id=1&id=ZGRjZmYxOTYtNGM2MDY0MmEtNTkxNTJkN2EtNzYzYWY1MDk=" tx_control { tx_id: "01jwea88h1famwjnfzsf7m8103" commit_tx: true } query { yql_text: "DECLARE $objects AS List>;\nUPSERT INTO `//Root/.metadata/initialization/migrations`\nSELECT componentId,modificationId,instant FROM AS_TABLE($objects)\n" } parameters { key: "$objects" value { type { list_type { item { struct_type { members { name: "componentId" type { type_id: UTF8 } } members { name: "modificationId" type { type_id: UTF8 } } members { name: "instant" type { type_id: UINT32 } } } } } } value { items { items { text_value: "INITIALIZATION" } items { text_value: "create" } items { uint32_value: 1748532208 } } } } } ; 2025-05-29T15:23:28.183228Z node 1 :METADATA_INITIALIZER ERROR: log.cpp:784: fline=accessor_init.cpp:81;event=OnAlteringProblem;error=cannot UPSERT objects:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 ; 2025-05-29T15:23:29.245699Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [1:7509888657087973125:4605], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:23:29.246551Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=1&id=YTUyNjI2ZTItMTk3ZTZiZmItOWU5NjY2YmEtOGJhZmVhMWY=, ActorId: [1:7509888657087973095:4592], ActorState: ExecuteState, TraceId: 01jwea89jjey8gvvpkx7wn5crb, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: 01jwea89jg9ch8qc58spg8vw1d 2025-05-29T15:23:29.247269Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=request_actor.h:64;event=unexpected reply;error_message=operation { ready: true status: INTERNAL_ERROR issues { message: "Execution" issue_code: 1060 issues { message: "yql/essentials/ast/yql_expr.h:1874: index out of range" issue_code: 1 } } result { [type.googleapis.com/Ydb.Table.ExecuteQueryResult] { tx_meta { } } } } ;request=session_id: "ydb://session/3?node_id=1&id=YTUyNjI2ZTItMTk3ZTZiZmItOWU5NjY2YmEtOGJhZmVhMWY=" tx_control { tx_id: "01jwea89jg9ch8qc58spg8vw1d" commit_tx: true } query { yql_text: "DECLARE $objects AS List>;\nUPSERT INTO `//Root/.metadata/initialization/migrations`\nSELECT componentId,modificationId,instant FROM AS_TABLE($objects)\n" } parameters { key: "$objects" value { type { list_type { item { struct_type { members { name: "componentId" type { type_id: UTF8 } } members { name: "modificationId" type { type_id: UTF8 } } members { name: "instant" type { type_id: UINT32 } } } } } } value { items { items { text_value: "INITIALIZATION" } items { text_value: "create" } items { uint32_value: 1748532209 } } } } } ; 2025-05-29T15:23:29.247380Z node 1 :METADATA_INITIALIZER ERROR: log.cpp:784: fline=accessor_init.cpp:81;event=OnAlteringProblem;error=cannot UPSERT objects:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 ; 2025-05-29T15:23:30.307968Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [1:7509888661382940530:4648], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:23:30.308192Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=1&id=ZWEwYWI3M2YtZjg4NGUyNzMtZDJiNWU1MWUtYzJiYjExZWQ=, ActorId: [1:7509888661382940500:4635], ActorState: ExecuteState, TraceId: 01jwea8akseyggjytq0rk0hs2j, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: 01jwea8akpb9fkxqkhpekt807q 2025-05-29T15:23:30.308840Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=request_actor.h:64;event=unexpected reply;error_message=operation { ready: true status: INTERNAL_ERROR issues { message: "Execution" issue_code: 1060 issues { message: "yql/essentials/ast/yql_expr.h:1874: index out of range" issue_code: 1 } } result { [type.googleapis.com/Ydb.Table.ExecuteQueryResult] { tx_meta { } } } } ;request=session_id: "ydb://session/3?node_id=1&id=ZWEwYWI3M2YtZjg4NGUyNzMtZDJiNWU1MWUtYzJiYjExZWQ=" tx_control { tx_id: "01jwea8akpb9fkxqkhpekt807q" commit_tx: true } query { yql_text: "DECLARE $objects AS List>;\nUPSERT INTO `//Root/.metadata/initialization/migrations`\nSELECT componentId,modificationId,instant FROM AS_TABLE($objects)\n" } parameters { key: "$objects" value { type { list_type { item { struct_type { members { name: "componentId" type { type_id: UTF8 } } members { name: "modificationId" type { type_id: UTF8 } } members { name: "instant" type { type_id: UINT32 } } } } } } value { items { items { text_value: "INITIALIZATION" } items { text_value: "create" } items { uint32_value: 1748532210 } } } } } ; 2025-05-29T15:23:30.308890Z node 1 :METADATA_INITIALIZER ERROR: log.cpp:784: fline=accessor_init.cpp:81;event=OnAlteringProblem;error=cannot UPSERT objects:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 ; 2025-05-29T15:23:31.372238Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [1:7509888665677907935:4691], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:23:31.373154Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=1&id=MjczYzkxZTItNjcyMjNhNjEtMzI1NTcxYTctNjMxYzlhMzM=, ActorId: [1:7509888665677907905:4678], ActorState: ExecuteState, TraceId: 01jwea8bn0ftz0gt1sjate3q6v, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: 01jwea8bmtdbjt689nkrhfzpg5 2025-05-29T15:23:31.374692Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=request_actor.h:64;event=unexpected reply;error_message=operation { ready: true status: INTERNAL_ERROR issues { message: "Execution" issue_code: 1060 issues { message: "yql/essentials/ast/yql_expr.h:1874: index out of range" issue_code: 1 } } result { [type.googleapis.com/Ydb.Table.ExecuteQueryResult] { tx_meta { } } } } ;request=session_id: "ydb://session/3?node_id=1&id=MjczYzkxZTItNjcyMjNhNjEtMzI1NTcxYTctNjMxYzlhMzM=" tx_control { tx_id: "01jwea8bmtdbjt689nkrhfzpg5" commit_tx: true } query { yql_text: "DECLARE $objects AS List>;\nUPSERT INTO `//Root/.metadata/initialization/migrations`\nSELECT componentId,modificationId,instant FROM AS_TABLE($objects)\n" } parameters { key: "$objects" value { type { list_type { item { struct_type { members { name: "componentId" type { type_id: UTF8 } } members { name: "modificationId" type { type_id: UTF8 } } members { name: "instant" type { type_id: UINT32 } } } } } } value { items { items { text_value: "INITIALIZATION" } items { text_value: "create" } items { uint32_value: 1748532211 } } } } } ; 2025-05-29T15:23:31.374795Z node 1 :METADATA_INITIALIZER ERROR: log.cpp:784: fline=accessor_init.cpp:81;event=OnAlteringProblem;error=cannot UPSERT objects:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 ; Traceback (most recent call last): File "library/python/testing/yatest_common/yatest/common/process.py", line 384, in wait wait_for( File "library/python/testing/yatest_common/yatest/common/process.py", line 764, in wait_for raise TimeoutError(truncate(message, MAX_MESSAGE_LEN)) yatest.common.process.TimeoutError: 60 second(s) wait timeout has expired: Command '['/home/runner/.ya/tools/v4/8689590287/test_tool', 'run_ut', '@/home/runner/.ya/build/build_root/ciyv/00193a/ydb/core/kqp/ut/federated_query/generic_ut/test-results/unittest/testing_out_stuff/chunk8/testing_out_stuff/test_tool.args']' stopped by 60 seconds timeout During handling of the above exception, another exception occurred: Traceback (most recent call last): File "devtools/ya/test/programs/test_tool/run_test/run_test.py", line 1738, in main res.wait(check_exit_code=False, timeout=run_timeout, on_timeout=timeout_callback) File "library/python/testing/yatest_common/yatest/common/process.py", line 398, in wait raise ExecutionTimeoutError(self, str(e)) yatest.common.process.ExecutionTimeoutError: (("60 second(s) wait timeout has expired: Command '['/home/runner/.ya/tools/v4/8689590287/test_tool', 'run_ut', '@/home/runner/.ya/build/build_root/ciyv/00193a/ydb/core/kqp/ut/federated_query/generic_ut/test-results/unittest/testing_out_stuff/chunk8/testing_out_stuff/test_tool.args']' stopped by 60 seconds timeout",), {}) >> TxUsage::WriteToTopic_Demo_39_Query ------- [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/federated_query/generic_ut/unittest >> GenericFederatedQuery::IcebergHiveTokenSelectAll 2025-05-29 15:23:32,063 WARNING devtools.ya.test.programs.test_tool.run_test.run_test: Wrapper execution timed out 2025-05-29 15:23:32,146 WARNING devtools.ya.test.programs.test_tool.run_test.run_test: Wrapper has overrun 60 secs timeout. Process tree before termination: pid rss ref pdirt 3584118 46.0M 45.9M 23.2M test_tool run_ut @/home/runner/.ya/build/build_root/ciyv/00191c/ydb/core/kqp/ut/federated_query/generic_ut/test-results/unittest/testing_out_stuff/chunk6/testing_out_stuff/te 3584726 371M 372M 126M └─ ydb-core-kqp-ut-federated_query-generic_ut --trace-path-append /home/runner/.ya/build/build_root/ciyv/00191c/ydb/core/kqp/ut/federated_query/generic_ut/test-results/uni Test command err: Trying to start YDB, gRPC: 3150, MsgBus: 5000 2025-05-29T15:22:32.805734Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509888409080864135:2063];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:22:32.805776Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/00191c/r3tmp/tmpqm2BXZ/pdisk_1.dat 2025-05-29T15:22:32.859222Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:22:32.859390Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [1:7509888409080864114:2079] 1748532152805590 != 1748532152805593 TServer::EnableGrpc on GrpcPort 3150, node 1 2025-05-29T15:22:32.869312Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:22:32.869326Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-05-29T15:22:32.869328Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-05-29T15:22:32.869370Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:5000 2025-05-29T15:22:32.908285Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:22:32.908318Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:22:32.909479Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:5000 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-29T15:22:32.938277Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:22:33.150255Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509888413375832070:2328], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:22:33.150300Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:22:33.807756Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:2, at schemeshard: 72057594046644480 2025-05-29T15:22:33.867875Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509888413375832200:2342], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:22:33.867898Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:22:33.867925Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509888413375832205:2345], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:22:33.868722Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:2, at schemeshard: 72057594046644480 2025-05-29T15:22:33.872287Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7509888413375832207:2346], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2025-05-29T15:22:33.946455Z node 1 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [1:7509888413375832247:2387] txid# 281474976715660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 7], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-29T15:22:34.052766Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [1:7509888417670799595:2362], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:22:34.053721Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=1&id=NmY2MDBhMjYtYWEzZjFjYzgtZGFlODExNWItNDUyZGI4Zjg=, ActorId: [1:7509888413375832194:2338], ActorState: ExecuteState, TraceId: 01jwea6knnfefchbjtrpfnz5fe, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: 01jwea6kng6myczjdvw5c4aapb 2025-05-29T15:22:34.055855Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=request_actor.h:64;event=unexpected reply;error_message=operation { ready: true status: INTERNAL_ERROR issues { message: "Execution" issue_code: 1060 issues { message: "yql/essentials/ast/yql_expr.h:1874: index out of range" issue_code: 1 } } result { [type.googleapis.com/Ydb.Table.ExecuteQueryResult] { tx_meta { } } } } ;request=session_id: "ydb://session/3?node_id=1&id=NmY2MDBhMjYtYWEzZjFjYzgtZGFlODExNWItNDUyZGI4Zjg=" tx_control { tx_id: "01jwea6kng6myczjdvw5c4aapb" commit_tx: true } query { yql_text: "DECLARE $objects AS List>;\nUPSERT INTO `//Root/.metadata/initialization/migrations`\nSELECT componentId,modificationId,instant FROM AS_TABLE($objects)\n" } parameters { key: "$objects" value { type { list_type { item { struct_type { members { name: "componentId" type { type_id: UTF8 } } members { name: "modificationId" type { type_id: UTF8 } } members { name: "instant" type { type_id: UINT32 } } } } } } value { items { items { text_value: "INITIALIZATION" } items { text_value: "create" } items { uint32_value: 1748532153 } } } } } ; 2025-05-29T15:22:34.055977Z node 1 :METADATA_INITIALIZER ERROR: log.cpp:784: fline=accessor_init.cpp:81;event=OnAlteringProblem;error=cannot UPSERT objects:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 ; 2025-05-29T15:22:35.122095Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [1:7509888421965766972:2396], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:22:35.122853Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=1&id=ODViYTk0ZWEtNzM5Y2ZmMjUtM2M1Njc1MWEtZWFlOTlkYjc=, ActorId: [1:7509888421965766942:2383], ActorState: ExecuteState, TraceId: 01jwea6mq6a6hqg8aznec630xs, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: 01jwea6mq4cf7q5f7gxzh8ag84 2025-05-29T15:22:35.123435Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=request_actor.h:64;event=unexpected reply;error_message=operation { ready: true status: INTERNAL_ERROR issues { message: "Execution" issue_code: 1060 issues { message: "yql/essentials/ast/yql_expr.h:1874: index out of range" issue_code: 1 } } result { [type.googleapis.com/Ydb.Table.ExecuteQueryResult] { tx_meta { } } } } ;request=session_id: "ydb://session/3?node_id=1&id=ODViYTk0ZWEtNzM5Y2ZmMjUtM2M1Njc1MWEtZWFlOTlkYjc=" tx_control { tx_id: "01jwea6mq4cf7q5f7gxzh8ag84" commit_tx: true } query { yql_text: "DECLARE $objects AS List>;\nUPSERT INTO `//Root/.metadata/initialization/migrations`\nSELECT componentId,modificationId,instant FROM AS_TABLE($objects)\n" } parameters { key: "$objects" value { type { list_type { item { struct_type { members { name: "componentId" type { type_id: UTF8 } } members { name: "modificationId" type { type_id: UTF8 } } members { name: "instant" type { type_id: UINT32 } } } } } } value { items { items { text_value: "INITIALIZATION" } items { text_value: "create" } items { uint32_value: 1748532155 } } } } } ; 2025-05-29T15:22:35.123491Z node 1 :METADATA_INITIALIZER ERROR: log.cpp:784: fline=accessor_init.cpp:81;event=OnAlteringProblem;error=cannot UPSERT objects:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 ; 2025-05-29T15:22:36.193403Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [1:7509888426260734349:2430], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:22:36.193578Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=1&id=NWNlZWU4NzgtOGVlZWFlZjYtYzhlYWM5M2MtNmM1OGRiNjc=, ActorId: [1:7509888426260734317:2416], ActorStat ... or=cannot UPSERT objects:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 ; 2025-05-29T15:23:28.537804Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [1:7509888649599039154:4562], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:23:28.537965Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=1&id=ZDg5MTBlZjMtNjY3YzE4YWMtNjdkODcyNTUtZmNmZTYyZDY=, ActorId: [1:7509888649599039124:4549], ActorState: ExecuteState, TraceId: 01jwea88wgfg0axr2xb63wa5cx, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: 01jwea88wdeh5m3webcyhqkkqq 2025-05-29T15:23:28.538576Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=request_actor.h:64;event=unexpected reply;error_message=operation { ready: true status: INTERNAL_ERROR issues { message: "Execution" issue_code: 1060 issues { message: "yql/essentials/ast/yql_expr.h:1874: index out of range" issue_code: 1 } } result { [type.googleapis.com/Ydb.Table.ExecuteQueryResult] { tx_meta { } } } } ;request=session_id: "ydb://session/3?node_id=1&id=ZDg5MTBlZjMtNjY3YzE4YWMtNjdkODcyNTUtZmNmZTYyZDY=" tx_control { tx_id: "01jwea88wdeh5m3webcyhqkkqq" commit_tx: true } query { yql_text: "DECLARE $objects AS List>;\nUPSERT INTO `//Root/.metadata/initialization/migrations`\nSELECT componentId,modificationId,instant FROM AS_TABLE($objects)\n" } parameters { key: "$objects" value { type { list_type { item { struct_type { members { name: "componentId" type { type_id: UTF8 } } members { name: "modificationId" type { type_id: UTF8 } } members { name: "instant" type { type_id: UINT32 } } } } } } value { items { items { text_value: "INITIALIZATION" } items { text_value: "create" } items { uint32_value: 1748532208 } } } } } ; 2025-05-29T15:23:28.538693Z node 1 :METADATA_INITIALIZER ERROR: log.cpp:784: fline=accessor_init.cpp:81;event=OnAlteringProblem;error=cannot UPSERT objects:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 ; 2025-05-29T15:23:29.634035Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [1:7509888653894006558:4605], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:23:29.635502Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=1&id=YjBhMmU1ZWMtNjRlN2I2OWMtYjBlNGI1ZGYtNGUzMDk5ZWM=, ActorId: [1:7509888653894006528:4592], ActorState: ExecuteState, TraceId: 01jwea89yk3q39htydpm3fz8gy, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: 01jwea89yedtpddvktfq4n09cd 2025-05-29T15:23:29.636442Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=request_actor.h:64;event=unexpected reply;error_message=operation { ready: true status: INTERNAL_ERROR issues { message: "Execution" issue_code: 1060 issues { message: "yql/essentials/ast/yql_expr.h:1874: index out of range" issue_code: 1 } } result { [type.googleapis.com/Ydb.Table.ExecuteQueryResult] { tx_meta { } } } } ;request=session_id: "ydb://session/3?node_id=1&id=YjBhMmU1ZWMtNjRlN2I2OWMtYjBlNGI1ZGYtNGUzMDk5ZWM=" tx_control { tx_id: "01jwea89yedtpddvktfq4n09cd" commit_tx: true } query { yql_text: "DECLARE $objects AS List>;\nUPSERT INTO `//Root/.metadata/initialization/migrations`\nSELECT componentId,modificationId,instant FROM AS_TABLE($objects)\n" } parameters { key: "$objects" value { type { list_type { item { struct_type { members { name: "componentId" type { type_id: UTF8 } } members { name: "modificationId" type { type_id: UTF8 } } members { name: "instant" type { type_id: UINT32 } } } } } } value { items { items { text_value: "INITIALIZATION" } items { text_value: "create" } items { uint32_value: 1748532209 } } } } } ; 2025-05-29T15:23:29.636891Z node 1 :METADATA_INITIALIZER ERROR: log.cpp:784: fline=accessor_init.cpp:81;event=OnAlteringProblem;error=cannot UPSERT objects:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 ; 2025-05-29T15:23:30.823887Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [1:7509888658188973962:4648], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:23:30.825391Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=1&id=ZDgzMjkxZC04ODBiODA5MC05N2M4ZjVmNC1kMzI4YmVhMg==, ActorId: [1:7509888658188973932:4635], ActorState: ExecuteState, TraceId: 01jwea8b383anmny51hrycbedr, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: 01jwea8b353yegprpgbj94mcf1 2025-05-29T15:23:30.838729Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=request_actor.h:64;event=unexpected reply;error_message=operation { ready: true status: INTERNAL_ERROR issues { message: "Execution" issue_code: 1060 issues { message: "yql/essentials/ast/yql_expr.h:1874: index out of range" issue_code: 1 } } result { [type.googleapis.com/Ydb.Table.ExecuteQueryResult] { tx_meta { } } } } ;request=session_id: "ydb://session/3?node_id=1&id=ZDgzMjkxZC04ODBiODA5MC05N2M4ZjVmNC1kMzI4YmVhMg==" tx_control { tx_id: "01jwea8b353yegprpgbj94mcf1" commit_tx: true } query { yql_text: "DECLARE $objects AS List>;\nUPSERT INTO `//Root/.metadata/initialization/migrations`\nSELECT componentId,modificationId,instant FROM AS_TABLE($objects)\n" } parameters { key: "$objects" value { type { list_type { item { struct_type { members { name: "componentId" type { type_id: UTF8 } } members { name: "modificationId" type { type_id: UTF8 } } members { name: "instant" type { type_id: UINT32 } } } } } } value { items { items { text_value: "INITIALIZATION" } items { text_value: "create" } items { uint32_value: 1748532210 } } } } } ; 2025-05-29T15:23:30.838903Z node 1 :METADATA_INITIALIZER ERROR: log.cpp:784: fline=accessor_init.cpp:81;event=OnAlteringProblem;error=cannot UPSERT objects:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 ; 2025-05-29T15:23:31.911786Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [1:7509888662483941368:4691], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:23:31.912576Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=1&id=NzBlMTg5ZGMtYWI5NzI1ZTktOTI4MmYzYTItYjYwMDNjOTA=, ActorId: [1:7509888662483941337:4678], ActorState: ExecuteState, TraceId: 01jwea8c5w1d4j160b8h3m9fbw, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: 01jwea8c5s4bxz418c737s2pag 2025-05-29T15:23:31.913153Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=request_actor.h:64;event=unexpected reply;error_message=operation { ready: true status: INTERNAL_ERROR issues { message: "Execution" issue_code: 1060 issues { message: "yql/essentials/ast/yql_expr.h:1874: index out of range" issue_code: 1 } } result { [type.googleapis.com/Ydb.Table.ExecuteQueryResult] { tx_meta { } } } } ;request=session_id: "ydb://session/3?node_id=1&id=NzBlMTg5ZGMtYWI5NzI1ZTktOTI4MmYzYTItYjYwMDNjOTA=" tx_control { tx_id: "01jwea8c5s4bxz418c737s2pag" commit_tx: true } query { yql_text: "DECLARE $objects AS List>;\nUPSERT INTO `//Root/.metadata/initialization/migrations`\nSELECT componentId,modificationId,instant FROM AS_TABLE($objects)\n" } parameters { key: "$objects" value { type { list_type { item { struct_type { members { name: "componentId" type { type_id: UTF8 } } members { name: "modificationId" type { type_id: UTF8 } } members { name: "instant" type { type_id: UINT32 } } } } } } value { items { items { text_value: "INITIALIZATION" } items { text_value: "create" } items { uint32_value: 1748532211 } } } } } ; 2025-05-29T15:23:31.913462Z node 1 :METADATA_INITIALIZER ERROR: log.cpp:784: fline=accessor_init.cpp:81;event=OnAlteringProblem;error=cannot UPSERT objects:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 ; Traceback (most recent call last): File "library/python/testing/yatest_common/yatest/common/process.py", line 384, in wait wait_for( File "library/python/testing/yatest_common/yatest/common/process.py", line 764, in wait_for raise TimeoutError(truncate(message, MAX_MESSAGE_LEN)) yatest.common.process.TimeoutError: 60 second(s) wait timeout has expired: Command '['/home/runner/.ya/tools/v4/8689590287/test_tool', 'run_ut', '@/home/runner/.ya/build/build_root/ciyv/00191c/ydb/core/kqp/ut/federated_query/generic_ut/test-results/unittest/testing_out_stuff/chunk6/testing_out_stuff/test_tool.args']' stopped by 60 seconds timeout During handling of the above exception, another exception occurred: Traceback (most recent call last): File "devtools/ya/test/programs/test_tool/run_test/run_test.py", line 1738, in main res.wait(check_exit_code=False, timeout=run_timeout, on_timeout=timeout_callback) File "library/python/testing/yatest_common/yatest/common/process.py", line 398, in wait raise ExecutionTimeoutError(self, str(e)) yatest.common.process.ExecutionTimeoutError: (("60 second(s) wait timeout has expired: Command '['/home/runner/.ya/tools/v4/8689590287/test_tool', 'run_ut', '@/home/runner/.ya/build/build_root/ciyv/00191c/ydb/core/kqp/ut/federated_query/generic_ut/test-results/unittest/testing_out_stuff/chunk6/testing_out_stuff/test_tool.args']' stopped by 60 seconds timeout",), {}) ------- [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/federated_query/generic_ut/unittest >> GenericFederatedQuery::IcebergHadoopTokenSelectAll 2025-05-29 15:23:31,698 WARNING devtools.ya.test.programs.test_tool.run_test.run_test: Wrapper execution timed out 2025-05-29 15:23:31,763 WARNING devtools.ya.test.programs.test_tool.run_test.run_test: Wrapper has overrun 60 secs timeout. Process tree before termination: pid rss ref pdirt 3583800 46.0M 45.7M 23.2M test_tool run_ut @/home/runner/.ya/build/build_root/ciyv/00194a/ydb/core/kqp/ut/federated_query/generic_ut/test-results/unittest/testing_out_stuff/chunk3/testing_out_stuff/te 3584403 370M 372M 127M └─ ydb-core-kqp-ut-federated_query-generic_ut --trace-path-append /home/runner/.ya/build/build_root/ciyv/00194a/ydb/core/kqp/ut/federated_query/generic_ut/test-results/uni Test command err: Trying to start YDB, gRPC: 16177, MsgBus: 23215 2025-05-29T15:22:32.544801Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509888411385875777:2062];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:22:32.544822Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/00194a/r3tmp/tmp7xbTtd/pdisk_1.dat 2025-05-29T15:22:32.606695Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [1:7509888411385875756:2079] 1748532152544667 != 1748532152544670 2025-05-29T15:22:32.608708Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 16177, node 1 2025-05-29T15:22:32.642900Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:22:32.642917Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-05-29T15:22:32.642920Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-05-29T15:22:32.642973Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:23215 2025-05-29T15:22:32.680069Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:22:32.680099Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:22:32.681191Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:23215 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-29T15:22:32.713222Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:22:32.875193Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509888411385876413:2328], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:22:32.875220Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:22:33.546461Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:2, at schemeshard: 72057594046644480 2025-05-29T15:22:33.607000Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509888415680843839:2342], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:22:33.607027Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:22:33.607041Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509888415680843844:2345], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:22:33.607636Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710659:2, at schemeshard: 72057594046644480 2025-05-29T15:22:33.613141Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7509888415680843847:2346], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710659 completed, doublechecking } 2025-05-29T15:22:33.697419Z node 1 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [1:7509888415680843887:2388] txid# 281474976710660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 7], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-29T15:22:33.772883Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [1:7509888415680843938:2362], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:22:33.772993Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=1&id=MjkxMjlkZDUtNWMzODliOWEtNzgwNTRmYTYtY2E5ZjIxZDc=, ActorId: [1:7509888415680843834:2338], ActorState: ExecuteState, TraceId: 01jwea6kd38xsn2t7a1ef4amrp, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: 01jwea6kcz6cddbv8arpq52w41 2025-05-29T15:22:33.773955Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=request_actor.h:64;event=unexpected reply;error_message=operation { ready: true status: INTERNAL_ERROR issues { message: "Execution" issue_code: 1060 issues { message: "yql/essentials/ast/yql_expr.h:1874: index out of range" issue_code: 1 } } result { [type.googleapis.com/Ydb.Table.ExecuteQueryResult] { tx_meta { } } } } ;request=session_id: "ydb://session/3?node_id=1&id=MjkxMjlkZDUtNWMzODliOWEtNzgwNTRmYTYtY2E5ZjIxZDc=" tx_control { tx_id: "01jwea6kcz6cddbv8arpq52w41" commit_tx: true } query { yql_text: "DECLARE $objects AS List>;\nUPSERT INTO `//Root/.metadata/initialization/migrations`\nSELECT componentId,modificationId,instant FROM AS_TABLE($objects)\n" } parameters { key: "$objects" value { type { list_type { item { struct_type { members { name: "componentId" type { type_id: UTF8 } } members { name: "modificationId" type { type_id: UTF8 } } members { name: "instant" type { type_id: UINT32 } } } } } } value { items { items { text_value: "INITIALIZATION" } items { text_value: "create" } items { uint32_value: 1748532153 } } } } } ; 2025-05-29T15:22:33.774000Z node 1 :METADATA_INITIALIZER ERROR: log.cpp:784: fline=accessor_init.cpp:81;event=OnAlteringProblem;error=cannot UPSERT objects:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 ; 2025-05-29T15:22:34.870689Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [1:7509888419975811317:2396], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:22:34.871581Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=1&id=OWEyMTBlYmQtYjc1OGY5NTMtNmI2NGQ4YmEtNTgzZGEwNjc=, ActorId: [1:7509888419975811287:2383], ActorState: ExecuteState, TraceId: 01jwea6mf608stnk2jq0t6tdck, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: 01jwea6mer0v085xj7r79fp0ff 2025-05-29T15:22:34.872261Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=request_actor.h:64;event=unexpected reply;error_message=operation { ready: true status: INTERNAL_ERROR issues { message: "Execution" issue_code: 1060 issues { message: "yql/essentials/ast/yql_expr.h:1874: index out of range" issue_code: 1 } } result { [type.googleapis.com/Ydb.Table.ExecuteQueryResult] { tx_meta { } } } } ;request=session_id: "ydb://session/3?node_id=1&id=OWEyMTBlYmQtYjc1OGY5NTMtNmI2NGQ4YmEtNTgzZGEwNjc=" tx_control { tx_id: "01jwea6mer0v085xj7r79fp0ff" commit_tx: true } query { yql_text: "DECLARE $objects AS List>;\nUPSERT INTO `//Root/.metadata/initialization/migrations`\nSELECT componentId,modificationId,instant FROM AS_TABLE($objects)\n" } parameters { key: "$objects" value { type { list_type { item { struct_type { members { name: "componentId" type { type_id: UTF8 } } members { name: "modificationId" type { type_id: UTF8 } } members { name: "instant" type { type_id: UINT32 } } } } } } value { items { items { text_value: "INITIALIZATION" } items { text_value: "create" } items { uint32_value: 1748532154 } } } } } ; 2025-05-29T15:22:34.872313Z node 1 :METADATA_INITIALIZER ERROR: log.cpp:784: fline=accessor_init.cpp:81;event=OnAlteringProblem;error=cannot UPSERT objects:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 ; 2025-05-29T15:22:35.938768Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [1:7509888424270778694:2430], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:22:35.939860Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=1&id=NmQzMTQzZTUtZGVlYjE5ODAtMTUyZTliYzctNTRmOWY2MzM=, ActorId: [1:7509888424270778664:2417], Acto ... or=cannot UPSERT objects:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 ; 2025-05-29T15:23:28.170589Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [1:7509888651904050823:4562], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:23:28.170772Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=1&id=ZGNkOGFlODAtYjI2ZDJjNTEtM2U0Mjc3NTQtNjJjMDRkMzM=, ActorId: [1:7509888651904050793:4549], ActorState: ExecuteState, TraceId: 01jwea88gt5a47fcp720gest8c, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: 01jwea88gp93ar7hpbm44c8rmc 2025-05-29T15:23:28.173190Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=request_actor.h:64;event=unexpected reply;error_message=operation { ready: true status: INTERNAL_ERROR issues { message: "Execution" issue_code: 1060 issues { message: "yql/essentials/ast/yql_expr.h:1874: index out of range" issue_code: 1 } } result { [type.googleapis.com/Ydb.Table.ExecuteQueryResult] { tx_meta { } } } } ;request=session_id: "ydb://session/3?node_id=1&id=ZGNkOGFlODAtYjI2ZDJjNTEtM2U0Mjc3NTQtNjJjMDRkMzM=" tx_control { tx_id: "01jwea88gp93ar7hpbm44c8rmc" commit_tx: true } query { yql_text: "DECLARE $objects AS List>;\nUPSERT INTO `//Root/.metadata/initialization/migrations`\nSELECT componentId,modificationId,instant FROM AS_TABLE($objects)\n" } parameters { key: "$objects" value { type { list_type { item { struct_type { members { name: "componentId" type { type_id: UTF8 } } members { name: "modificationId" type { type_id: UTF8 } } members { name: "instant" type { type_id: UINT32 } } } } } } value { items { items { text_value: "INITIALIZATION" } items { text_value: "create" } items { uint32_value: 1748532208 } } } } } ; 2025-05-29T15:23:28.173329Z node 1 :METADATA_INITIALIZER ERROR: log.cpp:784: fline=accessor_init.cpp:81;event=OnAlteringProblem;error=cannot UPSERT objects:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 ; 2025-05-29T15:23:29.261662Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [1:7509888656199018229:4605], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:23:29.262898Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=1&id=NzQ0Y2IwZi1lY2NlODJjNy1lZGM3MDQ1Mi1kNzczNGNiYg==, ActorId: [1:7509888656199018199:4592], ActorState: ExecuteState, TraceId: 01jwea89jz0ykn1y8881sfmwq2, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: 01jwea89jvb8z556z7ypvgsa2n 2025-05-29T15:23:29.263659Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=request_actor.h:64;event=unexpected reply;error_message=operation { ready: true status: INTERNAL_ERROR issues { message: "Execution" issue_code: 1060 issues { message: "yql/essentials/ast/yql_expr.h:1874: index out of range" issue_code: 1 } } result { [type.googleapis.com/Ydb.Table.ExecuteQueryResult] { tx_meta { } } } } ;request=session_id: "ydb://session/3?node_id=1&id=NzQ0Y2IwZi1lY2NlODJjNy1lZGM3MDQ1Mi1kNzczNGNiYg==" tx_control { tx_id: "01jwea89jvb8z556z7ypvgsa2n" commit_tx: true } query { yql_text: "DECLARE $objects AS List>;\nUPSERT INTO `//Root/.metadata/initialization/migrations`\nSELECT componentId,modificationId,instant FROM AS_TABLE($objects)\n" } parameters { key: "$objects" value { type { list_type { item { struct_type { members { name: "componentId" type { type_id: UTF8 } } members { name: "modificationId" type { type_id: UTF8 } } members { name: "instant" type { type_id: UINT32 } } } } } } value { items { items { text_value: "INITIALIZATION" } items { text_value: "create" } items { uint32_value: 1748532209 } } } } } ; 2025-05-29T15:23:29.263791Z node 1 :METADATA_INITIALIZER ERROR: log.cpp:784: fline=accessor_init.cpp:81;event=OnAlteringProblem;error=cannot UPSERT objects:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 ; 2025-05-29T15:23:30.328895Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [1:7509888660493985635:4648], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:23:30.329257Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=1&id=NDU2ZjRjODAtNjI0OWE2NjMtZmRlMmNhODItZWYzNWViZjQ=, ActorId: [1:7509888660493985605:4635], ActorState: ExecuteState, TraceId: 01jwea8amfbs6c2kn99frywz4a, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: 01jwea8amcfd4mp71hp8mkf8jj 2025-05-29T15:23:30.329720Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=request_actor.h:64;event=unexpected reply;error_message=operation { ready: true status: INTERNAL_ERROR issues { message: "Execution" issue_code: 1060 issues { message: "yql/essentials/ast/yql_expr.h:1874: index out of range" issue_code: 1 } } result { [type.googleapis.com/Ydb.Table.ExecuteQueryResult] { tx_meta { } } } } ;request=session_id: "ydb://session/3?node_id=1&id=NDU2ZjRjODAtNjI0OWE2NjMtZmRlMmNhODItZWYzNWViZjQ=" tx_control { tx_id: "01jwea8amcfd4mp71hp8mkf8jj" commit_tx: true } query { yql_text: "DECLARE $objects AS List>;\nUPSERT INTO `//Root/.metadata/initialization/migrations`\nSELECT componentId,modificationId,instant FROM AS_TABLE($objects)\n" } parameters { key: "$objects" value { type { list_type { item { struct_type { members { name: "componentId" type { type_id: UTF8 } } members { name: "modificationId" type { type_id: UTF8 } } members { name: "instant" type { type_id: UINT32 } } } } } } value { items { items { text_value: "INITIALIZATION" } items { text_value: "create" } items { uint32_value: 1748532210 } } } } } ; 2025-05-29T15:23:30.329781Z node 1 :METADATA_INITIALIZER ERROR: log.cpp:784: fline=accessor_init.cpp:81;event=OnAlteringProblem;error=cannot UPSERT objects:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 ; 2025-05-29T15:23:31.409094Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [1:7509888664788953041:4691], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:23:31.409225Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=1&id=NjBiZThhYzAtNzc5OTU0YjEtOGU5MjIyMjgtNzU5N2ViZmU=, ActorId: [1:7509888664788953011:4678], ActorState: ExecuteState, TraceId: 01jwea8bp4fn3zdaaz8eahkc73, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: 01jwea8bny354dfyt5zjmwvz7x 2025-05-29T15:23:31.410486Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=request_actor.h:64;event=unexpected reply;error_message=operation { ready: true status: INTERNAL_ERROR issues { message: "Execution" issue_code: 1060 issues { message: "yql/essentials/ast/yql_expr.h:1874: index out of range" issue_code: 1 } } result { [type.googleapis.com/Ydb.Table.ExecuteQueryResult] { tx_meta { } } } } ;request=session_id: "ydb://session/3?node_id=1&id=NjBiZThhYzAtNzc5OTU0YjEtOGU5MjIyMjgtNzU5N2ViZmU=" tx_control { tx_id: "01jwea8bny354dfyt5zjmwvz7x" commit_tx: true } query { yql_text: "DECLARE $objects AS List>;\nUPSERT INTO `//Root/.metadata/initialization/migrations`\nSELECT componentId,modificationId,instant FROM AS_TABLE($objects)\n" } parameters { key: "$objects" value { type { list_type { item { struct_type { members { name: "componentId" type { type_id: UTF8 } } members { name: "modificationId" type { type_id: UTF8 } } members { name: "instant" type { type_id: UINT32 } } } } } } value { items { items { text_value: "INITIALIZATION" } items { text_value: "create" } items { uint32_value: 1748532211 } } } } } ; 2025-05-29T15:23:31.410608Z node 1 :METADATA_INITIALIZER ERROR: log.cpp:784: fline=accessor_init.cpp:81;event=OnAlteringProblem;error=cannot UPSERT objects:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 ; Traceback (most recent call last): File "library/python/testing/yatest_common/yatest/common/process.py", line 384, in wait wait_for( File "library/python/testing/yatest_common/yatest/common/process.py", line 764, in wait_for raise TimeoutError(truncate(message, MAX_MESSAGE_LEN)) yatest.common.process.TimeoutError: 60 second(s) wait timeout has expired: Command '['/home/runner/.ya/tools/v4/8689590287/test_tool', 'run_ut', '@/home/runner/.ya/build/build_root/ciyv/00194a/ydb/core/kqp/ut/federated_query/generic_ut/test-results/unittest/testing_out_stuff/chunk3/testing_out_stuff/test_tool.args']' stopped by 60 seconds timeout During handling of the above exception, another exception occurred: Traceback (most recent call last): File "devtools/ya/test/programs/test_tool/run_test/run_test.py", line 1738, in main res.wait(check_exit_code=False, timeout=run_timeout, on_timeout=timeout_callback) File "library/python/testing/yatest_common/yatest/common/process.py", line 398, in wait raise ExecutionTimeoutError(self, str(e)) yatest.common.process.ExecutionTimeoutError: (("60 second(s) wait timeout has expired: Command '['/home/runner/.ya/tools/v4/8689590287/test_tool', 'run_ut', '@/home/runner/.ya/build/build_root/ciyv/00194a/ydb/core/kqp/ut/federated_query/generic_ut/test-results/unittest/testing_out_stuff/chunk3/testing_out_stuff/test_tool.args']' stopped by 60 seconds timeout",), {}) ------- [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/federated_query/generic_ut/unittest >> GenericFederatedQuery::IcebergHadoopBasicSelectAll 2025-05-29 15:23:31,509 WARNING devtools.ya.test.programs.test_tool.run_test.run_test: Wrapper execution timed out 2025-05-29 15:23:31,567 WARNING devtools.ya.test.programs.test_tool.run_test.run_test: Wrapper has overrun 60 secs timeout. Process tree before termination: pid rss ref pdirt 3583664 46.1M 46.1M 23.4M test_tool run_ut @/home/runner/.ya/build/build_root/ciyv/001953/ydb/core/kqp/ut/federated_query/generic_ut/test-results/unittest/testing_out_stuff/chunk1/testing_out_stuff/te 3584253 371M 373M 127M └─ ydb-core-kqp-ut-federated_query-generic_ut --trace-path-append /home/runner/.ya/build/build_root/ciyv/001953/ydb/core/kqp/ut/federated_query/generic_ut/test-results/uni Test command err: Trying to start YDB, gRPC: 20027, MsgBus: 4010 2025-05-29T15:22:32.312938Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509888410431496715:2066];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:22:32.313112Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/001953/r3tmp/tmpHzTUV9/pdisk_1.dat 2025-05-29T15:22:32.366366Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [1:7509888410431496690:2079] 1748532152312720 != 1748532152312723 2025-05-29T15:22:32.370955Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 20027, node 1 2025-05-29T15:22:32.380161Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:22:32.380176Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-05-29T15:22:32.380179Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-05-29T15:22:32.380221Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:4010 2025-05-29T15:22:32.415431Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:22:32.415472Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:22:32.416581Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:4010 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-29T15:22:32.451549Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:22:32.653720Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509888410431497349:2328], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:22:32.653758Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:22:33.315015Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:2, at schemeshard: 72057594046644480 2025-05-29T15:22:33.377021Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509888414726464775:2342], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:22:33.377051Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:22:33.377175Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509888414726464781:2345], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:22:33.378122Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:2, at schemeshard: 72057594046644480 2025-05-29T15:22:33.382490Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7509888414726464783:2346], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2025-05-29T15:22:33.475102Z node 1 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [1:7509888414726464823:2388] txid# 281474976715660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 7], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-29T15:22:33.562727Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [1:7509888414726464874:2362], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:22:33.562925Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=1&id=M2UyYWI1OTgtNWUxMzgxY2QtMmI2ZThmNGMtYWI2YTg1YzQ=, ActorId: [1:7509888414726464770:2338], ActorState: ExecuteState, TraceId: 01jwea6k6c0tttgnbgghenxym5, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: 01jwea6k6783n7yr2mehcj852q 2025-05-29T15:22:33.564040Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=request_actor.h:64;event=unexpected reply;error_message=operation { ready: true status: INTERNAL_ERROR issues { message: "Execution" issue_code: 1060 issues { message: "yql/essentials/ast/yql_expr.h:1874: index out of range" issue_code: 1 } } result { [type.googleapis.com/Ydb.Table.ExecuteQueryResult] { tx_meta { } } } } ;request=session_id: "ydb://session/3?node_id=1&id=M2UyYWI1OTgtNWUxMzgxY2QtMmI2ZThmNGMtYWI2YTg1YzQ=" tx_control { tx_id: "01jwea6k6783n7yr2mehcj852q" commit_tx: true } query { yql_text: "DECLARE $objects AS List>;\nUPSERT INTO `//Root/.metadata/initialization/migrations`\nSELECT componentId,modificationId,instant FROM AS_TABLE($objects)\n" } parameters { key: "$objects" value { type { list_type { item { struct_type { members { name: "componentId" type { type_id: UTF8 } } members { name: "modificationId" type { type_id: UTF8 } } members { name: "instant" type { type_id: UINT32 } } } } } } value { items { items { text_value: "INITIALIZATION" } items { text_value: "create" } items { uint32_value: 1748532153 } } } } } ; 2025-05-29T15:22:33.564108Z node 1 :METADATA_INITIALIZER ERROR: log.cpp:784: fline=accessor_init.cpp:81;event=OnAlteringProblem;error=cannot UPSERT objects:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 ; 2025-05-29T15:22:34.649289Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [1:7509888419021432253:2396], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:22:34.650166Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=1&id=YTYyZDk4MWEtNWE1OGQxNDAtNjk4ODMzMDgtMjMzM2Y4ODI=, ActorId: [1:7509888419021432223:2383], ActorState: ExecuteState, TraceId: 01jwea6m8d5yvahq2t0sdea4zd, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: 01jwea6m874mxehkcbdr95mnht 2025-05-29T15:22:34.650954Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=request_actor.h:64;event=unexpected reply;error_message=operation { ready: true status: INTERNAL_ERROR issues { message: "Execution" issue_code: 1060 issues { message: "yql/essentials/ast/yql_expr.h:1874: index out of range" issue_code: 1 } } result { [type.googleapis.com/Ydb.Table.ExecuteQueryResult] { tx_meta { } } } } ;request=session_id: "ydb://session/3?node_id=1&id=YTYyZDk4MWEtNWE1OGQxNDAtNjk4ODMzMDgtMjMzM2Y4ODI=" tx_control { tx_id: "01jwea6m874mxehkcbdr95mnht" commit_tx: true } query { yql_text: "DECLARE $objects AS List>;\nUPSERT INTO `//Root/.metadata/initialization/migrations`\nSELECT componentId,modificationId,instant FROM AS_TABLE($objects)\n" } parameters { key: "$objects" value { type { list_type { item { struct_type { members { name: "componentId" type { type_id: UTF8 } } members { name: "modificationId" type { type_id: UTF8 } } members { name: "instant" type { type_id: UINT32 } } } } } } value { items { items { text_value: "INITIALIZATION" } items { text_value: "create" } items { uint32_value: 1748532154 } } } } } ; 2025-05-29T15:22:34.651052Z node 1 :METADATA_INITIALIZER ERROR: log.cpp:784: fline=accessor_init.cpp:81;event=OnAlteringProblem;error=cannot UPSERT objects:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 ; 2025-05-29T15:22:35.742814Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [1:7509888423316399631:2430], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:22:35.742994Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=1&id=NTM5NTRjZS04MDE2OWJmOC1kNWI0OGE5NS0zYWE3MzBiMg==, ActorId: [1:7509888423316399599:2416], ActorSt ... or=cannot UPSERT objects:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 ; 2025-05-29T15:23:27.793271Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [1:7509888646654704460:4562], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:23:27.793418Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=1&id=NTIxMzM4YWQtOWIzOGNjZDMtNGM5YWYxMTktOTNkZmMyNTg=, ActorId: [1:7509888646654704430:4549], ActorState: ExecuteState, TraceId: 01jwea88568svbxv39qt6nfjmw, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: 01jwea8853c922b6c8fm3y40r3 2025-05-29T15:23:27.794211Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=request_actor.h:64;event=unexpected reply;error_message=operation { ready: true status: INTERNAL_ERROR issues { message: "Execution" issue_code: 1060 issues { message: "yql/essentials/ast/yql_expr.h:1874: index out of range" issue_code: 1 } } result { [type.googleapis.com/Ydb.Table.ExecuteQueryResult] { tx_meta { } } } } ;request=session_id: "ydb://session/3?node_id=1&id=NTIxMzM4YWQtOWIzOGNjZDMtNGM5YWYxMTktOTNkZmMyNTg=" tx_control { tx_id: "01jwea8853c922b6c8fm3y40r3" commit_tx: true } query { yql_text: "DECLARE $objects AS List>;\nUPSERT INTO `//Root/.metadata/initialization/migrations`\nSELECT componentId,modificationId,instant FROM AS_TABLE($objects)\n" } parameters { key: "$objects" value { type { list_type { item { struct_type { members { name: "componentId" type { type_id: UTF8 } } members { name: "modificationId" type { type_id: UTF8 } } members { name: "instant" type { type_id: UINT32 } } } } } } value { items { items { text_value: "INITIALIZATION" } items { text_value: "create" } items { uint32_value: 1748532207 } } } } } ; 2025-05-29T15:23:27.794308Z node 1 :METADATA_INITIALIZER ERROR: log.cpp:784: fline=accessor_init.cpp:81;event=OnAlteringProblem;error=cannot UPSERT objects:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 ; 2025-05-29T15:23:28.854281Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [1:7509888650949671865:4605], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:23:28.855248Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=1&id=NGVkMzk3MTItNDIyNDdjZDctNjExYmNlOWYtNmJjYzgwN2I=, ActorId: [1:7509888650949671835:4592], ActorState: ExecuteState, TraceId: 01jwea896c5amytn84pbmvgw6z, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: 01jwea896a049brkbpkvjdsqbb 2025-05-29T15:23:28.855768Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=request_actor.h:64;event=unexpected reply;error_message=operation { ready: true status: INTERNAL_ERROR issues { message: "Execution" issue_code: 1060 issues { message: "yql/essentials/ast/yql_expr.h:1874: index out of range" issue_code: 1 } } result { [type.googleapis.com/Ydb.Table.ExecuteQueryResult] { tx_meta { } } } } ;request=session_id: "ydb://session/3?node_id=1&id=NGVkMzk3MTItNDIyNDdjZDctNjExYmNlOWYtNmJjYzgwN2I=" tx_control { tx_id: "01jwea896a049brkbpkvjdsqbb" commit_tx: true } query { yql_text: "DECLARE $objects AS List>;\nUPSERT INTO `//Root/.metadata/initialization/migrations`\nSELECT componentId,modificationId,instant FROM AS_TABLE($objects)\n" } parameters { key: "$objects" value { type { list_type { item { struct_type { members { name: "componentId" type { type_id: UTF8 } } members { name: "modificationId" type { type_id: UTF8 } } members { name: "instant" type { type_id: UINT32 } } } } } } value { items { items { text_value: "INITIALIZATION" } items { text_value: "create" } items { uint32_value: 1748532208 } } } } } ; 2025-05-29T15:23:28.855934Z node 1 :METADATA_INITIALIZER ERROR: log.cpp:784: fline=accessor_init.cpp:81;event=OnAlteringProblem;error=cannot UPSERT objects:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 ; 2025-05-29T15:23:29.936969Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [1:7509888655244639269:4648], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:23:29.937774Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=1&id=YTcyYzIzYTItOGUyODJlNDUtZmIwZDQ3ZGItM2FlZWQ0Zjg=, ActorId: [1:7509888655244639239:4635], ActorState: ExecuteState, TraceId: 01jwea8a856015m66vxrawm04m, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: 01jwea8a80592z083j2npdzpdb 2025-05-29T15:23:29.938390Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=request_actor.h:64;event=unexpected reply;error_message=operation { ready: true status: INTERNAL_ERROR issues { message: "Execution" issue_code: 1060 issues { message: "yql/essentials/ast/yql_expr.h:1874: index out of range" issue_code: 1 } } result { [type.googleapis.com/Ydb.Table.ExecuteQueryResult] { tx_meta { } } } } ;request=session_id: "ydb://session/3?node_id=1&id=YTcyYzIzYTItOGUyODJlNDUtZmIwZDQ3ZGItM2FlZWQ0Zjg=" tx_control { tx_id: "01jwea8a80592z083j2npdzpdb" commit_tx: true } query { yql_text: "DECLARE $objects AS List>;\nUPSERT INTO `//Root/.metadata/initialization/migrations`\nSELECT componentId,modificationId,instant FROM AS_TABLE($objects)\n" } parameters { key: "$objects" value { type { list_type { item { struct_type { members { name: "componentId" type { type_id: UTF8 } } members { name: "modificationId" type { type_id: UTF8 } } members { name: "instant" type { type_id: UINT32 } } } } } } value { items { items { text_value: "INITIALIZATION" } items { text_value: "create" } items { uint32_value: 1748532209 } } } } } ; 2025-05-29T15:23:29.938462Z node 1 :METADATA_INITIALIZER ERROR: log.cpp:784: fline=accessor_init.cpp:81;event=OnAlteringProblem;error=cannot UPSERT objects:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 ; 2025-05-29T15:23:31.006197Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [1:7509888659539606674:4691], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:23:31.007354Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=1&id=NzllNmYxN2UtMmJiZGNmYWMtYTM4ZjQ4MzItYTAzOTlkNjA=, ActorId: [1:7509888659539606644:4678], ActorState: ExecuteState, TraceId: 01jwea8b9k96zcenwjgmwq9jwv, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: 01jwea8b9fatcq3wmtep9mdvda 2025-05-29T15:23:31.008345Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=request_actor.h:64;event=unexpected reply;error_message=operation { ready: true status: INTERNAL_ERROR issues { message: "Execution" issue_code: 1060 issues { message: "yql/essentials/ast/yql_expr.h:1874: index out of range" issue_code: 1 } } result { [type.googleapis.com/Ydb.Table.ExecuteQueryResult] { tx_meta { } } } } ;request=session_id: "ydb://session/3?node_id=1&id=NzllNmYxN2UtMmJiZGNmYWMtYTM4ZjQ4MzItYTAzOTlkNjA=" tx_control { tx_id: "01jwea8b9fatcq3wmtep9mdvda" commit_tx: true } query { yql_text: "DECLARE $objects AS List>;\nUPSERT INTO `//Root/.metadata/initialization/migrations`\nSELECT componentId,modificationId,instant FROM AS_TABLE($objects)\n" } parameters { key: "$objects" value { type { list_type { item { struct_type { members { name: "componentId" type { type_id: UTF8 } } members { name: "modificationId" type { type_id: UTF8 } } members { name: "instant" type { type_id: UINT32 } } } } } } value { items { items { text_value: "INITIALIZATION" } items { text_value: "create" } items { uint32_value: 1748532210 } } } } } ; 2025-05-29T15:23:31.008394Z node 1 :METADATA_INITIALIZER ERROR: log.cpp:784: fline=accessor_init.cpp:81;event=OnAlteringProblem;error=cannot UPSERT objects:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 ; Traceback (most recent call last): File "library/python/testing/yatest_common/yatest/common/process.py", line 384, in wait wait_for( File "library/python/testing/yatest_common/yatest/common/process.py", line 764, in wait_for raise TimeoutError(truncate(message, MAX_MESSAGE_LEN)) yatest.common.process.TimeoutError: 60 second(s) wait timeout has expired: Command '['/home/runner/.ya/tools/v4/8689590287/test_tool', 'run_ut', '@/home/runner/.ya/build/build_root/ciyv/001953/ydb/core/kqp/ut/federated_query/generic_ut/test-results/unittest/testing_out_stuff/chunk1/testing_out_stuff/test_tool.args']' stopped by 60 seconds timeout During handling of the above exception, another exception occurred: Traceback (most recent call last): File "devtools/ya/test/programs/test_tool/run_test/run_test.py", line 1738, in main res.wait(check_exit_code=False, timeout=run_timeout, on_timeout=timeout_callback) File "library/python/testing/yatest_common/yatest/common/process.py", line 398, in wait raise ExecutionTimeoutError(self, str(e)) yatest.common.process.ExecutionTimeoutError: (("60 second(s) wait timeout has expired: Command '['/home/runner/.ya/tools/v4/8689590287/test_tool', 'run_ut', '@/home/runner/.ya/build/build_root/ciyv/001953/ydb/core/kqp/ut/federated_query/generic_ut/test-results/unittest/testing_out_stuff/chunk1/testing_out_stuff/test_tool.args']' stopped by 60 seconds timeout",), {}) ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/persqueue/ut/unittest >> TPQTest::TestManyConsumers [GOOD] Test command err: Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:104:2057] recipient: [1:102:2135] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:104:2057] recipient: [1:102:2135] Leader for TabletID 72057594037927937 is [1:108:2139] sender: [1:109:2057] recipient: [1:102:2135] 2025-05-29T15:22:15.367218Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:3098: [PQ: 72057594037927937] Handle TEvInterconnect::TEvNodeInfo 2025-05-29T15:22:15.368458Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:3130: [PQ: 72057594037927937] Transactions request. From tx_00000000000000000000, To tx_18446744073709551615 2025-05-29T15:22:15.368526Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:751: [PQ: 72057594037927937] doesn't have tx info 2025-05-29T15:22:15.368532Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:763: [PQ: 72057594037927937] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2025-05-29T15:22:15.368535Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:975: [PQ: 72057594037927937] no config, start with empty partitions and default config 2025-05-29T15:22:15.368540Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:4889: [PQ: 72057594037927937] Txs.size=0, PlannedTxs.size=0 2025-05-29T15:22:15.368547Z node 1 :PERSQUEUE NOTICE: pq_impl.cpp:1099: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-05-29T15:22:15.368555Z node 1 :PERSQUEUE INFO: pq_impl.cpp:800: [PQ: 72057594037927937] doesn't have tx writes info Leader for TabletID 72057594037927938 is [0:0:0] sender: [1:150:2057] recipient: [1:148:2170] IGNORE Leader for TabletID 72057594037927938 is [0:0:0] sender: [1:150:2057] recipient: [1:148:2170] Leader for TabletID 72057594037927938 is [1:154:2174] sender: [1:155:2057] recipient: [1:148:2170] Leader for TabletID 72057594037927937 is [1:108:2139] sender: [1:181:2057] recipient: [1:14:2061] 2025-05-29T15:22:15.371655Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:2882: [PQ: 72057594037927937] server connected, pipe [1:180:2194], now have 1 active actors on pipe 2025-05-29T15:22:15.371674Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:1460: [PQ: 72057594037927937] Handle TEvPersQueue::TEvUpdateConfig 2025-05-29T15:22:15.373534Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:1646: [PQ: 72057594037927937] Config update version 1(current 0) received from actor [1:178:2192] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 ImportantClientId: "user1" LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 WriteSpeedInBytesPerSecond: 102400 BurstSize: 102400 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "rt3.dc1--asdfgs--topic" Version: 1 LocalDC: true Topic: "topic" TopicPath: "/Root/PQ/rt3.dc1--asdfgs--topic" Partitions { PartitionId: 0 } ReadRuleGenerations: 1 ReadRuleGenerations: 1 MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 1 Important: false } Consumers { Name: "user1" Generation: 1 Important: true } 2025-05-29T15:22:15.374065Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:590: [PQ: 72057594037927937] Apply new config CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 ImportantClientId: "user1" LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 WriteSpeedInBytesPerSecond: 102400 BurstSize: 102400 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "rt3.dc1--asdfgs--topic" Version: 1 LocalDC: true Topic: "topic" TopicPath: "/Root/PQ/rt3.dc1--asdfgs--topic" Partitions { PartitionId: 0 } ReadRuleGenerations: 1 ReadRuleGenerations: 1 MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 1 Important: false } Consumers { Name: "user1" Generation: 1 Important: true } 2025-05-29T15:22:15.374085Z node 1 :PERSQUEUE NOTICE: pq_impl.cpp:1099: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-05-29T15:22:15.374183Z node 1 :PERSQUEUE INFO: pq_impl.cpp:1487: [PQ: 72057594037927937] Config applied version 1 actor [1:178:2192] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 ImportantClientId: "user1" LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 WriteSpeedInBytesPerSecond: 102400 BurstSize: 102400 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "rt3.dc1--asdfgs--topic" Version: 1 LocalDC: true Topic: "topic" TopicPath: "/Root/PQ/rt3.dc1--asdfgs--topic" Partitions { PartitionId: 0 } ReadRuleGenerations: 1 ReadRuleGenerations: 1 MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 1 Important: false } Consumers { Name: "user1" Generation: 1 Important: true } 2025-05-29T15:22:15.374201Z node 1 :PERSQUEUE DEBUG: partition_init.cpp:75: [rt3.dc1--asdfgs--topic:0:Initializer] Start initializing step TInitConfigStep 2025-05-29T15:22:15.374259Z node 1 :PERSQUEUE DEBUG: partition_init.cpp:75: [rt3.dc1--asdfgs--topic:0:Initializer] Start initializing step TInitInternalFieldsStep 2025-05-29T15:22:15.374321Z node 1 :PERSQUEUE INFO: partition_init.cpp:880: [PQ: 72057594037927937, Partition: 0, State: StateInit] bootstrapping 0 [1:187:2199] 2025-05-29T15:22:15.374730Z node 1 :PERSQUEUE DEBUG: partition_init.cpp:55: [rt3.dc1--asdfgs--topic:0:Initializer] Initializing completed. 2025-05-29T15:22:15.374756Z node 1 :PERSQUEUE INFO: partition.cpp:560: [PQ: 72057594037927937, Partition: 0, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 0 generation 2 [1:187:2199] 2025-05-29T15:22:15.374765Z node 1 :PERSQUEUE DEBUG: partition.cpp:574: [PQ: 72057594037927937, Partition: 0, State: StateInit] SYNC INIT topic rt3.dc1--asdfgs--topic partitition 0 so 0 endOffset 0 Head Offset 0 PartNo 0 PackedSize 0 count 0 nextOffset 0 batches 0 2025-05-29T15:22:15.375104Z node 1 :PERSQUEUE DEBUG: partition.cpp:3850: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Process pending events. Count 0 2025-05-29T15:22:15.375116Z node 1 :PERSQUEUE DEBUG: partition.cpp:3155: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'rt3.dc1--asdfgs--topic' partition 0 user user reinit request with generation 1 2025-05-29T15:22:15.375120Z node 1 :PERSQUEUE DEBUG: partition.cpp:3224: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'rt3.dc1--asdfgs--topic' partition 0 user user reinit with generation 1 done 2025-05-29T15:22:15.375123Z node 1 :PERSQUEUE DEBUG: partition.cpp:3155: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'rt3.dc1--asdfgs--topic' partition 0 user user1 reinit request with generation 1 2025-05-29T15:22:15.375126Z node 1 :PERSQUEUE DEBUG: partition.cpp:3224: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'rt3.dc1--asdfgs--topic' partition 0 user user1 reinit with generation 1 done 2025-05-29T15:22:15.375143Z node 1 :PERSQUEUE DEBUG: partition.cpp:2185: [PQ: 72057594037927937, Partition: 0, State: StateIdle] === DumpKeyValueRequest === 2025-05-29T15:22:15.375146Z node 1 :PERSQUEUE DEBUG: partition.cpp:2186: [PQ: 72057594037927937, Partition: 0, State: StateIdle] --- delete ---------------- 2025-05-29T15:22:15.375149Z node 1 :PERSQUEUE DEBUG: partition.cpp:2194: [PQ: 72057594037927937, Partition: 0, State: StateIdle] --- write ----------------- 2025-05-29T15:22:15.375151Z node 1 :PERSQUEUE DEBUG: partition.cpp:2197: [PQ: 72057594037927937, Partition: 0, State: StateIdle] i0000000000 2025-05-29T15:22:15.375153Z node 1 :PERSQUEUE DEBUG: partition.cpp:2197: [PQ: 72057594037927937, Partition: 0, State: StateIdle] m0000000000cuser 2025-05-29T15:22:15.375155Z node 1 :PERSQUEUE DEBUG: partition.cpp:2197: [PQ: 72057594037927937, Partition: 0, State: StateIdle] m0000000000uuser 2025-05-29T15:22:15.375157Z node 1 :PERSQUEUE DEBUG: partition.cpp:2197: [PQ: 72057594037927937, Partition: 0, State: StateIdle] m0000000000cuser1 2025-05-29T15:22:15.375159Z node 1 :PERSQUEUE DEBUG: partition.cpp:2197: [PQ: 72057594037927937, Partition: 0, State: StateIdle] m0000000000uuser1 2025-05-29T15:22:15.375163Z node 1 :PERSQUEUE DEBUG: partition.cpp:2199: [PQ: 72057594037927937, Partition: 0, State: StateIdle] --- rename ---------------- 2025-05-29T15:22:15.375166Z node 1 :PERSQUEUE DEBUG: partition.cpp:2204: [PQ: 72057594037927937, Partition: 0, State: StateIdle] =========================== 2025-05-29T15:22:15.375181Z node 1 :PERSQUEUE DEBUG: partition_read.cpp:779: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'rt3.dc1--asdfgs--topic' partition 0 user user readTimeStamp for offset 0 initiated queuesize 0 startOffset 0 ReadingTimestamp 0 rrg 0 2025-05-29T15:22:15.375184Z node 1 :PERSQUEUE DEBUG: partition_read.cpp:779: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'rt3.dc1--asdfgs--topic' partition 0 user user1 readTimeStamp for offset 0 initiated queuesize 0 startOffset 0 ReadingTimestamp 0 rrg 0 2025-05-29T15:22:15.375209Z node 1 :PERSQUEUE DEBUG: read.h:262: CacheProxy. Passthrough write request to KV 2025-05-29T15:22:15.375643Z node 1 :PERSQUEUE DEBUG: partition_write.cpp:524: [PQ: 72057594037927937, Partition: 0, State: StateIdle] TPartition::HandleWriteResponse writeNewSize# 0 WriteNewSizeFromSupportivePartitions# 0 2025-05-29T15:22:15.375714Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:2882: [PQ: 72057594037927937] server connected, pipe [1:194:2204], now have 1 active actors on pipe 2025-05-29T15:22:15.376974Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:2882: [PQ: 72057594037927937] server connected, pipe [1:197:2206], now have 1 active actors on pipe 2025-05-29T15:22:15.376991Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:347: Handle TEvRequest topic: 'rt3.dc1--asdfgs--topic' requestId: 2025-05-29T15:22:15.376996Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:2796: [PQ: 72057594037927937] got client message batch for topic 'rt3.dc1--asdfgs--topic' partition 0 2025-05-29T15:22:15.377090Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:2165: [PQ: 72057594037927937] got client PART message topic: rt3.dc1--asdfgs--topic partition: 0 SourceId: 'sourceid0' SeqNo: 1 partNo : 0 messageNo: 0 size: 511957 2025-05-29T15:22:15.377141Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:2165: [PQ: 72057594037927937] got client PART message topic: rt3.dc1--asdfgs--topic partition: 0 SourceId: 'sourceid0' SeqNo: 1 partNo : 1 messageNo: 0 size: 511957 2025-05-29T15:22:15.377189Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:2165: [PQ: 72057594037927937] got client PART message topic: rt3.dc1--asdfgs--topic partition: 0 SourceId: 'sourceid0' SeqNo: 1 partNo : 2 messageNo: 0 size: 511957 2025-05-29T15:22:15.377238Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:2165: [PQ: 72057594037927937] got client PART message topic: rt3.dc1--asdfgs--topic partition: 0 SourceId: 'sourceid0' SeqNo: 1 partNo : 3 messageNo: 0 size: 511957 2025-05-29T15:22:15.377249Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:2165: [PQ: 72057594037927937] got client PART message topic: rt3.dc1--asdfgs--topic partition: 0 SourceId: 'sourceid0' SeqNo: 1 partNo : 4 messageNo: 0 size: 49324 2025-05-29T15:22:15.377253Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:2198: [PQ: 72057594037927937] got client message topic: rt3.dc1--asdfgs--topic partition: 0 SourceId: 'sourceid0' SeqNo: 1 partNo : 4 messageNo: 0 size 49324 offset: 0 2025-05-29T15:22:15.377269Z node 1 :PERSQUEUE DEBUG: event_helpers.cpp:40: tablet 72057594037927937 topic 'rt3.dc1--asdfgs--topic' partition 0 error: new GetOwnership request needed for owner 2025-05-29T15:22:15.377285Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:1424: [PQ: 72057594037927937] Handle TEvPQ::TEvError Cookie 1, Error new GetOwnership request needed for owner 2025-05-29T15:22:15.377289Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:401: Answer error topic: 'rt3.dc1--asdfgs--topic' partition: 0 messageNo: 0 requestId: ... UsedReserveSize: 0 ReserveSize: 0 PartitionConfig{ MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } 2025-05-29T15:23:46.220320Z node 90 :PERSQUEUE DEBUG: partition.cpp:858: [PQ: 72057594037927937, Partition: 1, State: StateIdle] Topic PartitionStatus PartitionSize: 0 UsedReserveSize: 0 ReserveSize: 0 PartitionConfig{ MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } 2025-05-29T15:23:46.224720Z node 90 :PERSQUEUE DEBUG: pq_impl.cpp:2882: [PQ: 72057594037927937] server connected, pipe [90:1003:2998], now have 1 active actors on pipe 2025-05-29T15:23:46.225196Z node 90 :PERSQUEUE DEBUG: partition.cpp:858: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic PartitionStatus PartitionSize: 0 UsedReserveSize: 0 ReserveSize: 0 PartitionConfig{ MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } 2025-05-29T15:23:46.227188Z node 90 :PERSQUEUE DEBUG: partition.cpp:858: [PQ: 72057594037927937, Partition: 1, State: StateIdle] Topic PartitionStatus PartitionSize: 0 UsedReserveSize: 0 ReserveSize: 0 PartitionConfig{ MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } 2025-05-29T15:23:46.231556Z node 90 :PERSQUEUE DEBUG: pq_impl.cpp:2882: [PQ: 72057594037927937] server connected, pipe [90:1006:3001], now have 1 active actors on pipe 2025-05-29T15:23:46.232201Z node 90 :PERSQUEUE DEBUG: partition.cpp:858: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic PartitionStatus PartitionSize: 0 UsedReserveSize: 0 ReserveSize: 0 PartitionConfig{ MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } 2025-05-29T15:23:46.234307Z node 90 :PERSQUEUE DEBUG: partition.cpp:858: [PQ: 72057594037927937, Partition: 1, State: StateIdle] Topic PartitionStatus PartitionSize: 0 UsedReserveSize: 0 ReserveSize: 0 PartitionConfig{ MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } 2025-05-29T15:23:46.238189Z node 90 :PERSQUEUE DEBUG: pq_impl.cpp:2882: [PQ: 72057594037927937] server connected, pipe [90:1009:3004], now have 1 active actors on pipe 2025-05-29T15:23:46.238677Z node 90 :PERSQUEUE DEBUG: partition.cpp:858: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic PartitionStatus PartitionSize: 0 UsedReserveSize: 0 ReserveSize: 0 PartitionConfig{ MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } 2025-05-29T15:23:46.241002Z node 90 :PERSQUEUE DEBUG: partition.cpp:858: [PQ: 72057594037927937, Partition: 1, State: StateIdle] Topic PartitionStatus PartitionSize: 0 UsedReserveSize: 0 ReserveSize: 0 PartitionConfig{ MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } 2025-05-29T15:23:46.245200Z node 90 :PERSQUEUE DEBUG: pq_impl.cpp:2882: [PQ: 72057594037927937] server connected, pipe [90:1012:3007], now have 1 active actors on pipe 2025-05-29T15:23:46.245918Z node 90 :PERSQUEUE DEBUG: partition.cpp:858: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic PartitionStatus PartitionSize: 0 UsedReserveSize: 0 ReserveSize: 0 PartitionConfig{ MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } 2025-05-29T15:23:46.248277Z node 90 :PERSQUEUE DEBUG: partition.cpp:858: [PQ: 72057594037927937, Partition: 1, State: StateIdle] Topic PartitionStatus PartitionSize: 0 UsedReserveSize: 0 ReserveSize: 0 PartitionConfig{ MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } 2025-05-29T15:23:46.252718Z node 90 :PERSQUEUE DEBUG: pq_impl.cpp:2882: [PQ: 72057594037927937] server connected, pipe [90:1015:3010], now have 1 active actors on pipe 2025-05-29T15:23:46.253222Z node 90 :PERSQUEUE DEBUG: partition.cpp:858: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic PartitionStatus PartitionSize: 0 UsedReserveSize: 0 ReserveSize: 0 PartitionConfig{ MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } 2025-05-29T15:23:46.255390Z node 90 :PERSQUEUE DEBUG: partition.cpp:858: [PQ: 72057594037927937, Partition: 1, State: StateIdle] Topic PartitionStatus PartitionSize: 0 UsedReserveSize: 0 ReserveSize: 0 PartitionConfig{ MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } 2025-05-29T15:23:46.259704Z node 90 :PERSQUEUE DEBUG: pq_impl.cpp:2882: [PQ: 72057594037927937] server connected, pipe [90:1018:3013], now have 1 active actors on pipe 2025-05-29T15:23:46.260204Z node 90 :PERSQUEUE DEBUG: partition.cpp:858: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic PartitionStatus PartitionSize: 0 UsedReserveSize: 0 ReserveSize: 0 PartitionConfig{ MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } 2025-05-29T15:23:46.262378Z node 90 :PERSQUEUE DEBUG: partition.cpp:858: [PQ: 72057594037927937, Partition: 1, State: StateIdle] Topic PartitionStatus PartitionSize: 0 UsedReserveSize: 0 ReserveSize: 0 PartitionConfig{ MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } 2025-05-29T15:23:46.267726Z node 90 :PERSQUEUE DEBUG: pq_impl.cpp:2882: [PQ: 72057594037927937] server connected, pipe [90:1021:3016], now have 1 active actors on pipe 2025-05-29T15:23:46.268504Z node 90 :PERSQUEUE DEBUG: partition.cpp:858: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic PartitionStatus PartitionSize: 0 UsedReserveSize: 0 ReserveSize: 0 PartitionConfig{ MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } 2025-05-29T15:23:46.271391Z node 90 :PERSQUEUE DEBUG: partition.cpp:858: [PQ: 72057594037927937, Partition: 1, State: StateIdle] Topic PartitionStatus PartitionSize: 0 UsedReserveSize: 0 ReserveSize: 0 PartitionConfig{ MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } 2025-05-29T15:23:46.276966Z node 90 :PERSQUEUE DEBUG: pq_impl.cpp:2882: [PQ: 72057594037927937] server connected, pipe [90:1024:3019], now have 1 active actors on pipe 2025-05-29T15:23:46.277746Z node 90 :PERSQUEUE DEBUG: partition.cpp:858: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic PartitionStatus PartitionSize: 0 UsedReserveSize: 0 ReserveSize: 0 PartitionConfig{ MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } 2025-05-29T15:23:46.280686Z node 90 :PERSQUEUE DEBUG: partition.cpp:858: [PQ: 72057594037927937, Partition: 1, State: StateIdle] Topic PartitionStatus PartitionSize: 0 UsedReserveSize: 0 ReserveSize: 0 PartitionConfig{ MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } 2025-05-29T15:23:46.286215Z node 90 :PERSQUEUE DEBUG: pq_impl.cpp:2882: [PQ: 72057594037927937] server connected, pipe [90:1027:3022], now have 1 active actors on pipe 2025-05-29T15:23:46.286978Z node 90 :PERSQUEUE DEBUG: partition.cpp:858: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic PartitionStatus PartitionSize: 0 UsedReserveSize: 0 ReserveSize: 0 PartitionConfig{ MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } 2025-05-29T15:23:46.289877Z node 90 :PERSQUEUE DEBUG: partition.cpp:858: [PQ: 72057594037927937, Partition: 1, State: StateIdle] Topic PartitionStatus PartitionSize: 0 UsedReserveSize: 0 ReserveSize: 0 PartitionConfig{ MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } 2025-05-29T15:23:46.295723Z node 90 :PERSQUEUE DEBUG: pq_impl.cpp:2882: [PQ: 72057594037927937] server connected, pipe [90:1030:3025], now have 1 active actors on pipe 2025-05-29T15:23:46.296458Z node 90 :PERSQUEUE DEBUG: partition.cpp:858: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic PartitionStatus PartitionSize: 0 UsedReserveSize: 0 ReserveSize: 0 PartitionConfig{ MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } 2025-05-29T15:23:46.299307Z node 90 :PERSQUEUE DEBUG: partition.cpp:858: [PQ: 72057594037927937, Partition: 1, State: StateIdle] Topic PartitionStatus PartitionSize: 0 UsedReserveSize: 0 ReserveSize: 0 PartitionConfig{ MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } 2025-05-29T15:23:46.304612Z node 90 :PERSQUEUE DEBUG: pq_impl.cpp:2882: [PQ: 72057594037927937] server connected, pipe [90:1033:3028], now have 1 active actors on pipe 2025-05-29T15:23:46.305241Z node 90 :PERSQUEUE DEBUG: partition.cpp:858: [PQ: 72057594037927937, Partition: 1, State: StateIdle] Topic PartitionStatus PartitionSize: 0 UsedReserveSize: 0 ReserveSize: 0 PartitionConfig{ MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } 2025-05-29T15:23:46.308043Z node 90 :PERSQUEUE DEBUG: partition.cpp:858: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic PartitionStatus PartitionSize: 0 UsedReserveSize: 0 ReserveSize: 0 PartitionConfig{ MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } 2025-05-29T15:23:46.313501Z node 90 :PERSQUEUE DEBUG: pq_impl.cpp:2882: [PQ: 72057594037927937] server connected, pipe [90:1036:3031], now have 1 active actors on pipe 2025-05-29T15:23:46.314205Z node 90 :PERSQUEUE DEBUG: partition.cpp:858: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic PartitionStatus PartitionSize: 0 UsedReserveSize: 0 ReserveSize: 0 PartitionConfig{ MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } 2025-05-29T15:23:46.316992Z node 90 :PERSQUEUE DEBUG: partition.cpp:858: [PQ: 72057594037927937, Partition: 1, State: StateIdle] Topic PartitionStatus PartitionSize: 0 UsedReserveSize: 0 ReserveSize: 0 PartitionConfig{ MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } 2025-05-29T15:23:46.322488Z node 90 :PERSQUEUE_READ_BALANCER INFO: read_balancer__balancing.cpp:1652: [72057594037927938][rt3.dc1--topic] pipe [90:1039:3034] connected; active server actors: 1 >> SchemeReqAdminAccessInTenant::ClusterAdminCanAuthOnNonEmptyTenant >> TxUsage::WriteToTopic_Demo_22_RestartBeforeCommit_Query [FAIL] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-CreateUser-24 [GOOD] >> TxUsage::WriteToTopic_Demo_39_Query [FAIL] ------- [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/federated_query/generic_ut/unittest >> GenericFederatedQuery::ClickHouseManagedSelectAll 2025-05-29 15:23:31,902 WARNING devtools.ya.test.programs.test_tool.run_test.run_test: Wrapper execution timed out 2025-05-29 15:23:31,974 WARNING devtools.ya.test.programs.test_tool.run_test.run_test: Wrapper has overrun 60 secs timeout. Process tree before termination: pid rss ref pdirt 3583885 46.0M 45.6M 23.2M test_tool run_ut @/home/runner/.ya/build/build_root/ciyv/00192b/ydb/core/kqp/ut/federated_query/generic_ut/test-results/unittest/testing_out_stuff/chunk0/testing_out_stuff/te 3584694 374M 375M 129M └─ ydb-core-kqp-ut-federated_query-generic_ut --trace-path-append /home/runner/.ya/build/build_root/ciyv/00192b/ydb/core/kqp/ut/federated_query/generic_ut/test-results/uni Test command err: Trying to start YDB, gRPC: 14901, MsgBus: 10587 2025-05-29T15:22:32.757389Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509888411494501069:2065];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:22:32.757471Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/00192b/r3tmp/tmpDEfSK6/pdisk_1.dat 2025-05-29T15:22:32.812429Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [1:7509888411494501045:2079] 1748532152757144 != 1748532152757147 2025-05-29T15:22:32.814001Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 14901, node 1 2025-05-29T15:22:32.824513Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:22:32.824529Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-05-29T15:22:32.824531Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-05-29T15:22:32.824580Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:10587 2025-05-29T15:22:32.860195Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:22:32.860249Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:22:32.861311Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:10587 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-29T15:22:32.893068Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:22:33.164006Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509888415789469001:2328], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:22:33.164026Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:22:33.760316Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:2, at schemeshard: 72057594046644480 2025-05-29T15:22:33.819171Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509888415789469132:2342], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:22:33.819205Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509888415789469137:2345], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:22:33.819206Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:22:33.819726Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:2, at schemeshard: 72057594046644480 2025-05-29T15:22:33.823178Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7509888415789469139:2346], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2025-05-29T15:22:33.903871Z node 1 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [1:7509888415789469179:2388] txid# 281474976715660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 7], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-29T15:22:33.971188Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [1:7509888415789469230:2362], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:22:33.971959Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=1&id=ZDNjOGJlMmYtZWIwNGJjMTAtZTYwNTdjYjMtODgwMmM5ZDg=, ActorId: [1:7509888415789469126:2338], ActorState: ExecuteState, TraceId: 01jwea6kk46pw0kyjnfq9n02gj, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: 01jwea6kk05t76hd48maka876r 2025-05-29T15:22:33.973178Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=request_actor.h:64;event=unexpected reply;error_message=operation { ready: true status: INTERNAL_ERROR issues { message: "Execution" issue_code: 1060 issues { message: "yql/essentials/ast/yql_expr.h:1874: index out of range" issue_code: 1 } } result { [type.googleapis.com/Ydb.Table.ExecuteQueryResult] { tx_meta { } } } } ;request=session_id: "ydb://session/3?node_id=1&id=ZDNjOGJlMmYtZWIwNGJjMTAtZTYwNTdjYjMtODgwMmM5ZDg=" tx_control { tx_id: "01jwea6kk05t76hd48maka876r" commit_tx: true } query { yql_text: "DECLARE $objects AS List>;\nUPSERT INTO `//Root/.metadata/initialization/migrations`\nSELECT componentId,modificationId,instant FROM AS_TABLE($objects)\n" } parameters { key: "$objects" value { type { list_type { item { struct_type { members { name: "componentId" type { type_id: UTF8 } } members { name: "modificationId" type { type_id: UTF8 } } members { name: "instant" type { type_id: UINT32 } } } } } } value { items { items { text_value: "INITIALIZATION" } items { text_value: "create" } items { uint32_value: 1748532153 } } } } } ; 2025-05-29T15:22:33.973246Z node 1 :METADATA_INITIALIZER ERROR: log.cpp:784: fline=accessor_init.cpp:81;event=OnAlteringProblem;error=cannot UPSERT objects:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 ; 2025-05-29T15:22:35.044009Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [1:7509888424379403903:2396], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:22:35.044196Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=1&id=NzRmZTJmOGUtMWFkZDU2MmItYzk0ZGJlMjQtYWU2NGZiMTE=, ActorId: [1:7509888420084436578:2383], ActorState: ExecuteState, TraceId: 01jwea6mmpde94stc6rnbj3fph, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: 01jwea6mmg6qm2cew4yygenc8s 2025-05-29T15:22:35.047153Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=request_actor.h:64;event=unexpected reply;error_message=operation { ready: true status: INTERNAL_ERROR issues { message: "Execution" issue_code: 1060 issues { message: "yql/essentials/ast/yql_expr.h:1874: index out of range" issue_code: 1 } } result { [type.googleapis.com/Ydb.Table.ExecuteQueryResult] { tx_meta { } } } } ;request=session_id: "ydb://session/3?node_id=1&id=NzRmZTJmOGUtMWFkZDU2MmItYzk0ZGJlMjQtYWU2NGZiMTE=" tx_control { tx_id: "01jwea6mmg6qm2cew4yygenc8s" commit_tx: true } query { yql_text: "DECLARE $objects AS List>;\nUPSERT INTO `//Root/.metadata/initialization/migrations`\nSELECT componentId,modificationId,instant FROM AS_TABLE($objects)\n" } parameters { key: "$objects" value { type { list_type { item { struct_type { members { name: "componentId" type { type_id: UTF8 } } members { name: "modificationId" type { type_id: UTF8 } } members { name: "instant" type { type_id: UINT32 } } } } } } value { items { items { text_value: "INITIALIZATION" } items { text_value: "create" } items { uint32_value: 1748532154 } } } } } ; 2025-05-29T15:22:35.047198Z node 1 :METADATA_INITIALIZER ERROR: log.cpp:784: fline=accessor_init.cpp:81;event=OnAlteringProblem;error=cannot UPSERT objects:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 ; 2025-05-29T15:22:36.086680Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [1:7509888428674371279:2429], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:22:36.086824Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=1&id=ZWJiYTM4Y2ItMzQ5ZGJiM2YtMzgyMzFhMDktYmQzNGM1OTU=, ActorId: [1:7509888428674371249:2416], Acto ... or=cannot UPSERT objects:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 ; 2025-05-29T15:23:28.411039Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [1:7509888652012676101:4562], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:23:28.412004Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=1&id=ZmIwMGE2ZWYtNDljZTAwYjEtZDk0ZGM2NGItN2U0OWIwZGU=, ActorId: [1:7509888652012676071:4549], ActorState: ExecuteState, TraceId: 01jwea88rd4g1gh4aws7tf2kym, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: 01jwea88r91ktepvvnztkj9zzn 2025-05-29T15:23:28.412928Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=request_actor.h:64;event=unexpected reply;error_message=operation { ready: true status: INTERNAL_ERROR issues { message: "Execution" issue_code: 1060 issues { message: "yql/essentials/ast/yql_expr.h:1874: index out of range" issue_code: 1 } } result { [type.googleapis.com/Ydb.Table.ExecuteQueryResult] { tx_meta { } } } } ;request=session_id: "ydb://session/3?node_id=1&id=ZmIwMGE2ZWYtNDljZTAwYjEtZDk0ZGM2NGItN2U0OWIwZGU=" tx_control { tx_id: "01jwea88r91ktepvvnztkj9zzn" commit_tx: true } query { yql_text: "DECLARE $objects AS List>;\nUPSERT INTO `//Root/.metadata/initialization/migrations`\nSELECT componentId,modificationId,instant FROM AS_TABLE($objects)\n" } parameters { key: "$objects" value { type { list_type { item { struct_type { members { name: "componentId" type { type_id: UTF8 } } members { name: "modificationId" type { type_id: UTF8 } } members { name: "instant" type { type_id: UINT32 } } } } } } value { items { items { text_value: "INITIALIZATION" } items { text_value: "create" } items { uint32_value: 1748532208 } } } } } ; 2025-05-29T15:23:28.413090Z node 1 :METADATA_INITIALIZER ERROR: log.cpp:784: fline=accessor_init.cpp:81;event=OnAlteringProblem;error=cannot UPSERT objects:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 ; 2025-05-29T15:23:29.654833Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [1:7509888656307643506:4605], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:23:29.656266Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=1&id=MjQxNGY5NTktNzllMDgxZmEtODIyYTY2M2UtM2JlZTY4YWI=, ActorId: [1:7509888656307643476:4592], ActorState: ExecuteState, TraceId: 01jwea89yh8tds2c0hnwasm2fd, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: 01jwea89y8b5kbcj2dss69hx85 2025-05-29T15:23:29.665777Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=request_actor.h:64;event=unexpected reply;error_message=operation { ready: true status: INTERNAL_ERROR issues { message: "Execution" issue_code: 1060 issues { message: "yql/essentials/ast/yql_expr.h:1874: index out of range" issue_code: 1 } } result { [type.googleapis.com/Ydb.Table.ExecuteQueryResult] { tx_meta { } } } } ;request=session_id: "ydb://session/3?node_id=1&id=MjQxNGY5NTktNzllMDgxZmEtODIyYTY2M2UtM2JlZTY4YWI=" tx_control { tx_id: "01jwea89y8b5kbcj2dss69hx85" commit_tx: true } query { yql_text: "DECLARE $objects AS List>;\nUPSERT INTO `//Root/.metadata/initialization/migrations`\nSELECT componentId,modificationId,instant FROM AS_TABLE($objects)\n" } parameters { key: "$objects" value { type { list_type { item { struct_type { members { name: "componentId" type { type_id: UTF8 } } members { name: "modificationId" type { type_id: UTF8 } } members { name: "instant" type { type_id: UINT32 } } } } } } value { items { items { text_value: "INITIALIZATION" } items { text_value: "create" } items { uint32_value: 1748532209 } } } } } ; 2025-05-29T15:23:29.665872Z node 1 :METADATA_INITIALIZER ERROR: log.cpp:784: fline=accessor_init.cpp:81;event=OnAlteringProblem;error=cannot UPSERT objects:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 ; 2025-05-29T15:23:30.799262Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [1:7509888660602610910:4648], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:23:30.800347Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=1&id=NDlkMWEyNzQtYzY4M2RmMTEtNjE5NTUzZWItNmJjNDhmNWU=, ActorId: [1:7509888660602610880:4635], ActorState: ExecuteState, TraceId: 01jwea8b324vvzfrd837bdwzd2, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: 01jwea8b2ve4e0m1s96c90vand 2025-05-29T15:23:30.800996Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=request_actor.h:64;event=unexpected reply;error_message=operation { ready: true status: INTERNAL_ERROR issues { message: "Execution" issue_code: 1060 issues { message: "yql/essentials/ast/yql_expr.h:1874: index out of range" issue_code: 1 } } result { [type.googleapis.com/Ydb.Table.ExecuteQueryResult] { tx_meta { } } } } ;request=session_id: "ydb://session/3?node_id=1&id=NDlkMWEyNzQtYzY4M2RmMTEtNjE5NTUzZWItNmJjNDhmNWU=" tx_control { tx_id: "01jwea8b2ve4e0m1s96c90vand" commit_tx: true } query { yql_text: "DECLARE $objects AS List>;\nUPSERT INTO `//Root/.metadata/initialization/migrations`\nSELECT componentId,modificationId,instant FROM AS_TABLE($objects)\n" } parameters { key: "$objects" value { type { list_type { item { struct_type { members { name: "componentId" type { type_id: UTF8 } } members { name: "modificationId" type { type_id: UTF8 } } members { name: "instant" type { type_id: UINT32 } } } } } } value { items { items { text_value: "INITIALIZATION" } items { text_value: "create" } items { uint32_value: 1748532210 } } } } } ; 2025-05-29T15:23:30.801268Z node 1 :METADATA_INITIALIZER ERROR: log.cpp:784: fline=accessor_init.cpp:81;event=OnAlteringProblem;error=cannot UPSERT objects:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 ; 2025-05-29T15:23:31.864933Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [1:7509888664897578315:4691], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:23:31.865642Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=1&id=ODVmMjhjODYtZWY0ZTcwNDEtNWE1NGQ3MjgtNjQ2ZmYwNGU=, ActorId: [1:7509888664897578285:4678], ActorState: ExecuteState, TraceId: 01jwea8c4g1d1e8n0hs61ejwah, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: 01jwea8c4c0hq514qmt7gc617j 2025-05-29T15:23:31.866185Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=request_actor.h:64;event=unexpected reply;error_message=operation { ready: true status: INTERNAL_ERROR issues { message: "Execution" issue_code: 1060 issues { message: "yql/essentials/ast/yql_expr.h:1874: index out of range" issue_code: 1 } } result { [type.googleapis.com/Ydb.Table.ExecuteQueryResult] { tx_meta { } } } } ;request=session_id: "ydb://session/3?node_id=1&id=ODVmMjhjODYtZWY0ZTcwNDEtNWE1NGQ3MjgtNjQ2ZmYwNGU=" tx_control { tx_id: "01jwea8c4c0hq514qmt7gc617j" commit_tx: true } query { yql_text: "DECLARE $objects AS List>;\nUPSERT INTO `//Root/.metadata/initialization/migrations`\nSELECT componentId,modificationId,instant FROM AS_TABLE($objects)\n" } parameters { key: "$objects" value { type { list_type { item { struct_type { members { name: "componentId" type { type_id: UTF8 } } members { name: "modificationId" type { type_id: UTF8 } } members { name: "instant" type { type_id: UINT32 } } } } } } value { items { items { text_value: "INITIALIZATION" } items { text_value: "create" } items { uint32_value: 1748532211 } } } } } ; 2025-05-29T15:23:31.866212Z node 1 :METADATA_INITIALIZER ERROR: log.cpp:784: fline=accessor_init.cpp:81;event=OnAlteringProblem;error=cannot UPSERT objects:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 ; Traceback (most recent call last): File "library/python/testing/yatest_common/yatest/common/process.py", line 384, in wait wait_for( File "library/python/testing/yatest_common/yatest/common/process.py", line 764, in wait_for raise TimeoutError(truncate(message, MAX_MESSAGE_LEN)) yatest.common.process.TimeoutError: 60 second(s) wait timeout has expired: Command '['/home/runner/.ya/tools/v4/8689590287/test_tool', 'run_ut', '@/home/runner/.ya/build/build_root/ciyv/00192b/ydb/core/kqp/ut/federated_query/generic_ut/test-results/unittest/testing_out_stuff/chunk0/testing_out_stuff/test_tool.args']' stopped by 60 seconds timeout During handling of the above exception, another exception occurred: Traceback (most recent call last): File "devtools/ya/test/programs/test_tool/run_test/run_test.py", line 1738, in main res.wait(check_exit_code=False, timeout=run_timeout, on_timeout=timeout_callback) File "library/python/testing/yatest_common/yatest/common/process.py", line 398, in wait raise ExecutionTimeoutError(self, str(e)) yatest.common.process.ExecutionTimeoutError: (("60 second(s) wait timeout has expired: Command '['/home/runner/.ya/tools/v4/8689590287/test_tool', 'run_ut', '@/home/runner/.ya/build/build_root/ciyv/00192b/ydb/core/kqp/ut/federated_query/generic_ut/test-results/unittest/testing_out_stuff/chunk0/testing_out_stuff/test_tool.args']' stopped by 60 seconds timeout",), {}) >> TxUsage::WriteToTopic_Demo_22_RestartAfterCommit_Table >> SchemeReqAdminAccessInTenant::ClusterAdminCanAuthOnNonEmptyTenant [GOOD] >> TxUsage::WriteToTopic_Demo_22_RestartAfterCommit_Table [FAIL] >> TxUsage::WriteToTopic_Demo_22_RestartAfterCommit_Query >> TxUsage::WriteToTopic_Demo_22_RestartAfterCommit_Query [FAIL] >> SchemeReqAdminAccessInTenant::ClusterAdminCanAuthOnNonEmptyTenant-StrictAclCheck >> SchemeReqAdminAccessInTenant::ClusterAdminCanAuthOnNonEmptyTenant-StrictAclCheck [GOOD] >> SchemeReqAdminAccessInTenant::ClusterAdminCanAuthOnNonEmptyTenant-DomainLoginOnly >> SchemeReqAdminAccessInTenant::ClusterAdminCanAuthOnNonEmptyTenant-DomainLoginOnly [GOOD] >> SchemeReqAdminAccessInTenant::ClusterAdminCanAuthOnNonEmptyTenant-DomainLoginOnly-StrictAclCheck >> SchemeReqAdminAccessInTenant::ClusterAdminCanAuthOnNonEmptyTenant-DomainLoginOnly-StrictAclCheck [GOOD] ------- [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/rm_service/ut/unittest >> KqpRm::Reduce [GOOD] Test command err: 2025-05-29T15:23:43.076675Z node 2 :TX_PROXY DEBUG: proxy_impl.cpp:434: actor# [2:180:2102] Bootstrap 2025-05-29T15:23:43.110991Z node 2 :TX_PROXY DEBUG: proxy_impl.cpp:453: actor# [2:180:2102] Become StateWork (SchemeCache [2:187:2105]) 2025-05-29T15:23:43.111166Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:434: actor# [1:179:2152] Bootstrap 2025-05-29T15:23:43.113184Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:453: actor# [1:179:2152] Become StateWork (SchemeCache [1:190:2158]) 2025-05-29T15:23:43.126862Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2057} StateInit event Type# 268828672 Event# NKikimr::TEvTablet::TEvBoot 2025-05-29T15:23:43.129015Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2057} StateInit event Type# 268828673 Event# NKikimr::TEvTablet::TEvRestored 2025-05-29T15:23:43.129081Z node 1 :BS_CONTROLLER DEBUG: {BSC22@console_interaction.cpp:14} Console interaction started 2025-05-29T15:23:43.129492Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2057} StateInit event Type# 268828684 Event# NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-05-29T15:23:43.129745Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2057} StateInit event Type# 268639244 Event# NKikimr::TEvNodeWardenStorageConfig 2025-05-29T15:23:43.129952Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2057} StateInit event Type# 131082 Event# NActors::TEvInterconnect::TEvNodesInfo 2025-05-29T15:23:43.129962Z node 1 :BS_CONTROLLER DEBUG: {BSC01@bsc.cpp:521} Handle TEvInterconnect::TEvNodesInfo 2025-05-29T15:23:43.129999Z node 1 :BS_CONTROLLER DEBUG: {BSCTXIS01@init_scheme.cpp:17} TTxInitScheme Execute 2025-05-29T15:23:43.133005Z node 1 :BS_CONTROLLER DEBUG: {BSCTXIS03@init_scheme.cpp:44} TTxInitScheme Complete 2025-05-29T15:23:43.133036Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM01@migrate.cpp:190} Execute tx 2025-05-29T15:23:43.133051Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM02@migrate.cpp:251} Complete tx IncompatibleData# false 2025-05-29T15:23:43.133105Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxTrimUnusedSlots 2025-05-29T15:23:43.133121Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxTrimUnusedSlots 2025-05-29T15:23:43.133145Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateSchemaVersion 2025-05-29T15:23:43.169468Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateSchemaVersion 2025-05-29T15:23:43.169550Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxGenerateInstanceId 2025-05-29T15:23:43.181812Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxGenerateInstanceId 2025-05-29T15:23:43.181873Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateStaticPDiskInfo 2025-05-29T15:23:43.181891Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateStaticPDiskInfo 2025-05-29T15:23:43.181905Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxFillInNonNullConfigForPDisk 2025-05-29T15:23:43.181936Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxFillInNonNullConfigForPDisk 2025-05-29T15:23:43.181945Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxDropDriveStatus 2025-05-29T15:23:43.181951Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxDropDriveStatus 2025-05-29T15:23:43.181961Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateCompatibilityInfo 2025-05-29T15:23:43.195185Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateCompatibilityInfo 2025-05-29T15:23:43.195242Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateEnableConfigV2 2025-05-29T15:23:43.206124Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateEnableConfigV2 2025-05-29T15:23:43.206181Z node 1 :BS_CONTROLLER DEBUG: {BSCTXLE01@load_everything.cpp:19} TTxLoadEverything Execute 2025-05-29T15:23:43.206412Z node 1 :BS_CONTROLLER DEBUG: {BSCTXLE03@load_everything.cpp:557} TTxLoadEverything Complete 2025-05-29T15:23:43.206422Z node 1 :BS_CONTROLLER DEBUG: {BSC09@impl.h:2188} LoadFinished 2025-05-29T15:23:43.208445Z node 1 :BS_CONTROLLER DEBUG: {BSC18@console_interaction.cpp:31} Console connection service started 2025-05-29T15:23:43.208465Z node 1 :BS_CONTROLLER DEBUG: {BSCTXLE04@load_everything.cpp:562} TTxLoadEverything InitQueue processed 2025-05-29T15:23:43.208817Z node 1 :BS_CONTROLLER DEBUG: {BSCTXRN01@register_node.cpp:216} Handle TEvControllerRegisterNode Request# {NodeID: 1 VDiskStatus { VDiskId { GroupID: 0 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } NodeId: 1 PDiskId: 1 VSlotId: 0 PDiskGuid: 123 Status: INIT_PENDING OnlyPhantomsRemain: false } DeclarativePDiskManagement: true } 2025-05-29T15:23:43.209156Z node 1 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:398} Execute TEvControllerConfigRequest Request# {Command { DefineHostConfig { HostConfigId: 1 Drive { Path: "/home/runner/.ya/build/build_root/ciyv/002348/r3tmp/tmpn0MqSI/pdisk_1.dat" } } } Command { DefineBox { BoxId: 1 Host { Key { Fqdn: "::1" IcPort: 12001 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12002 } HostConfigId: 1 } } } } 2025-05-29T15:23:43.209220Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:340} Create new pdisk PDiskId# 1:1 Path# /home/runner/.ya/build/build_root/ciyv/002348/r3tmp/tmpn0MqSI/pdisk_1.dat 2025-05-29T15:23:43.209227Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:340} Create new pdisk PDiskId# 2:1000 Path# /home/runner/.ya/build/build_root/ciyv/002348/r3tmp/tmpn0MqSI/pdisk_1.dat 2025-05-29T15:23:43.209440Z node 1 :BS_CONTROLLER DEBUG: {BSCTXRN01@register_node.cpp:216} Handle TEvControllerRegisterNode Request# {NodeID: 2 DeclarativePDiskManagement: true } 2025-05-29T15:23:43.209545Z node 1 :BS_CONTROLLER DEBUG: {BSCTXUDM01@disk_metrics.cpp:68} Updating disk status Record# {VDisksMetrics { VDiskId { GroupID: 0 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 0 } State: Initial Replicated: false DiskSpace: Green } } 2025-05-29T15:23:43.209573Z node 1 :BS_CONTROLLER DEBUG: {BSC10@scrub.cpp:187} Handle(TEvControllerScrubQueryStartQuantum) Msg# {VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 0 } } 2025-05-29T15:23:43.209589Z node 1 :BS_CONTROLLER DEBUG: {BSC13@scrub.cpp:597} sending TEvControllerScrubStartQuantum Msg# NKikimrBlobStorage.TEvControllerScrubStartQuantum VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 0 } 2025-05-29T15:23:43.209630Z node 1 :BS_CONTROLLER DEBUG: {BSCTXUDM01@disk_metrics.cpp:68} Updating disk status Record# {VDiskStatus { VDiskId { GroupID: 0 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } NodeId: 1 PDiskId: 1 VSlotId: 0 PDiskGuid: 123 Status: REPLICATING OnlyPhantomsRemain: false } } 2025-05-29T15:23:43.209670Z node 1 :BS_CONTROLLER DEBUG: {BSCTXUDM01@disk_metrics.cpp:68} Updating disk status Record# {VDiskStatus { VDiskId { GroupID: 0 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } NodeId: 1 PDiskId: 1 VSlotId: 0 PDiskGuid: 123 Status: READY OnlyPhantomsRemain: false } } 2025-05-29T15:23:43.210679Z node 1 :BS_CONTROLLER DEBUG: {BSC11@scrub.cpp:214} Handle(TEvControllerScrubQuantumFinished) Msg# {VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 0 } Success: true } 2025-05-29T15:23:43.210761Z node 1 :BS_CONTROLLER DEBUG: {BSC10@scrub.cpp:187} Handle(TEvControllerScrubQueryStartQuantum) Msg# {VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 0 } } 2025-05-29T15:23:43.223500Z node 1 :BS_CONTROLLER DEBUG: {BSCTXRN05@register_node.cpp:34} Add devicesData from NodeWarden NodeId# 1 Devices# [] 2025-05-29T15:23:43.223567Z node 1 :BS_CONTROLLER DEBUG: {BSCTXRN05@register_node.cpp:34} Add devicesData from NodeWarden NodeId# 2 Devices# [] 2025-05-29T15:23:43.225985Z node 2 :BS_PDISK WARN: {BPD01@blobstorage_pdisk_blockdevice_async.cpp:922} Warning# got EIoResult::FileOpenError from IoContext->Setup PDiskId# 1000 2025-05-29T15:23:43.226165Z node 2 :BS_PDISK CRIT: {BPD39@blobstorage_pdisk_impl.cpp:2856} BlockDevice initialization error Details# Can't open file "/home/runner/.ya/build/build_root/ciyv/002348/r3tmp/tmpn0MqSI/pdisk_1.dat": unknown reason, errno# 0. PDiskId# 1000 2025-05-29T15:23:43.226338Z node 2 :BS_PDISK CRIT: {BPD01@blobstorage_pdisk_actor.cpp:290} PDiskId# 1000 bootstrapped to the StateError, reason# Can't open file "/home/runner/.ya/build/build_root/ciyv/002348/r3tmp/tmpn0MqSI/pdisk_1.dat": unknown reason, errno# 0. Can not be initialized Config: {TPDiskConfg Path# "/home/runner/.ya/build/build_root/ciyv/002348/r3tmp/tmpn0MqSI/pdisk_1.dat" ExpectedPath# "" ExpectedSerial# "" PDiskGuid# 15064228221824075048 PDiskId# 1000 PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} MetadataOnly# 0 StartOwnerRound# 2 SectorMap# true EnableSectorEncryption # 0 ChunkSize# 134217728 SectorSize# 4096 StatisticsUpdateIntervalMs# 1000 SchedulerCfg# {TPDiskSchedulerConfig BytesSchedulerWeight# 1 LogWeight# 1 FreshWeight# 2 CompWeight# 7 SyncLogWeight# 15 HugeWeight# 2 FastReadWeight# 1 OtherReadWeight# 1 LoadWeight# 2 LowReadWeight# 1 MaxChunkReadsPerCycle# 16 MaxChunkReadsDurationPerCycleMs# 0.25 MaxChunkWritesPerCycle# 8 MaxChunkWritesDurationPerCycleMs# 1} MinLogChunksTotal# 4 MaxLogChunksPerOwnerMultiplier# 5 MaxLogChunksPerOwnerDivisor# 4 SortFreeChunksPerItems# 100 GetDriveDataSwitch# DoNotTouch WriteCacheSwitch# DoNotTouch DriveModelSeekTimeNs# 8000000 DriveModelSpeedBps# 127000000 DriveModelSpeedBpsMin# 135000000 DriveModelSpeedBpsMax# 200000000 DriveModelBulkWrieBlockSize# 2097152 DriveModelTrimSpeedBps# 0 ReorderingMs# 50 DeviceInFlight# 4 CostLimitNs# 50000000 BufferPoolBufferSizeBytes# 524288 BufferPoolBufferCount# 256 MaxQueuedCompletionActions# 128 ExpectedSlotCount# 0 ReserveLogChunksMultiplier# 56 InsaneLogChunksMultiplier# 40 RedLogChunksMultiplier# 30 OrangeLogChunksMultiplier# 20 WarningLogChunksMultiplier# 4 YellowLogChunksMultiplier# 4 MaxMetadataMegabytes# 32 SpaceColorBorder# GREEN CompletionThreadsCount# 1 UseNoopScheduler# false PlainDataChunks# 0} PDiskId# 1000 2025-05-29T15:23:43.226517Z node 1 :TENANT_POOL DEBUG: tenant_pool.cpp:826: TTenantPool::Bootstrap 2025-05-29T15:23:43.226583Z node 1 :LOCAL DEBUG: local.cpp:1491: TLocal::Bootstrap 2025-05-29T15:23:43.226589Z node 2 :TENANT_POOL DEBUG: tenant_pool.cpp:826: TTenantPool::Bootstrap 2025-05-29T15:23:43.226625Z node 2 :LOCAL DEBUG: local.cpp:1491: TLocal::Bootstrap 2025-05-29T15:23:43.226638Z node 1 :TENANT_POOL DEBUG: tenant_pool.cpp:412: TDomainTenantPool(dc-1) Bootstrap 2025-05-29T15:23:43.226666Z node 1 :TENANT_POOL DEBUG: tenant_pool.cpp:286: TDomainTenantPool(dc-1) send request to add tenant /dc-1 with resources CPU: 1 Memory: 1 Network: 1 2025-05-29T15:23:43.226689Z node 1 :LOCAL DEBUG: local.cpp:1441: TDomainLocal(dc-1): Bootstrap 2025-05-29T15:23:43.226694Z node 2 :TENANT_POOL DEBUG: tenant_pool.cpp:412: TDomainTenantPool(dc-1) Bootstrap 2025-05-29T15:23:43.226701Z node 2 :TENANT_POOL DEBUG: tenant_pool.cpp:286: TDomainTenantPool(dc-1) send request to add tenant /dc-1 with resources CPU: 1 Memory: 1 Network: 1 2025-05-29T15:23:43.226710Z node 2 :LOCAL DEBUG: local.cpp:1441: TDomainLocal(dc-1): Bootstrap 2025-05-29T15:23:43.227253Z node 1 :LOCAL DEBUG: local.cpp:1149: TDomainLocal(dc-1): Binding to hive 72057594046578946 at domain dc-1 (allocated resources: CPU: 1 Memory: 1 Network: 1) 2025-05-29T15:23:43.227275Z node 1 :LOCAL DEBUG: local.cpp:975: TLocalNodeRegistrar::Bootstrap 2025-05-29T15:23:43.227281Z node 1 :LOCAL DEBUG: local.cpp:181: TLocalNodeRegistrar::TryToRegister 2025-05-29T15:23:43.227305Z node 1 :LOCAL DEBUG: local.cpp:213: TLocalNodeRegistrar::TryToRegister pipe to hive, pipe:[1:415:2306] 2025-05-29T15:23:43.227369Z node 2 :LOCAL DEBUG: local.cpp:1149: TDomainLocal(dc-1): Binding to hive 72057594046578946 at domain dc-1 (allocated resources: CPU: 1 Memory: 1 Network: 1) 2025-05-29T15:23:43.227376Z node 2 :LOCAL DEBUG: local.cpp:975: TLocalNodeRegistrar::Bootstrap 2025-05-29T15:23:43.227380Z node 2 :LOCAL DEBUG: local.cpp:181: TLocalNodeRegistrar::TryToRegister 2025-05-29T15:23:43.227394Z node 2 :LOCAL DEBUG: local.cpp:213: TLocalNodeRegistrar::TryToRegister pipe to hive, pipe:[2:417:2115] 2025-05-29T15:23:43.228133Z node 1 :TENANT_POOL NOTICE: tenant_pool.cpp:526: TDomainTenantPool(dc-1) started tenant /dc-1 2025-05-29T15:23:43.228147Z node 1 :TENANT_POOL DEBUG: tenant_pool.cpp:274: TDomainTenantPool(dc-1) send status update to [1:404:2302] 2025-05-29T15:23:43.228182Z node 2 :TENANT_POOL NOTICE: tenant_pool.cpp:526: TDomainTenantPool(dc-1) started tenant /dc-1 2025-05-29T15:23:43.228188Z node 2 :TENANT_POOL DEBUG: tenant_pool.cpp:274: TDomainTenantPool(dc-1) send status update to [2:405:2111] 2025-05-29T15:23:43.228399Z node 1 :LOCAL DEBUG: local.cpp:260: TEvTabletPipe::TEvClientConnected {TabletId=72057594046578946 Status=OK ClientId=[1:415:2306]} 2025-05-29T15:23:43.228422Z node 1 :LOCAL DEBUG: local.cpp:324: TLocalNodeRegistrar::Handle TEvLocal::TEvPing 2025-05-29T15:23:43.228431Z node 1 :LOCAL DEBUG: local.cpp:380: TLocalNodeRegistrar TEvPing - CONNECTED 2025-05-29T15:23:43.228434Z node 1 :LOCAL DEBUG: local.cpp:297: TLocalNodeRegistrar SendStatusOk 2025-05-29T15:23:43.228552Z node 2 :LOCAL DEBUG: local.cpp:260: TEvTabletPipe::TEvClientConnected {TabletId=72057594046578946 Status=OK ClientId=[2:417:2115]} 2025-05-29T15:23:43.228615Z node 2 :LOCAL DEBUG: local.cpp:324: TLocalNodeRegistrar::Handle TEvLocal::TEvPing 2025-05-29T15:23:43.228622Z node 2 :LOCAL DEBUG: local.cpp:380: TLocalNodeRegistrar TEvPing - CONNECTED 2025-05-29T15:23:43.228625Z node 2 :LOCAL DEBUG: local.cpp:297: TLocalNodeRegistrar SendStatusOk 2025-05-29T15:23:43.250967Z node 2 :KQP_RESOURCE_MANAGER CRIT: kqp_rm_service.cpp:796: Failed to deliver subscription request to config dispatcher 2025-05-29T15:23:43.250994Z node 2 :KQP_RESOURCE_MANAGER CRIT: kqp_resource_info_exchanger.cpp:411: Failed to deliver subscription request to config dispatcher. 2025-05-29T15:23:43.251001Z node 1 :KQP_RESOURCE_MANAGER CRIT: kqp_rm_service.cpp:796: Failed to deliver subscription request to config dispatcher 2025-05-29T15:23:43.251008Z node 1 :KQP_RESOURCE_MANAGER CRIT: kqp_resource_info_exchanger.cpp:411: Failed to deliver subscription request to config dispatcher. ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/tx_proxy/ut_schemereq/unittest >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-DropUser-72 [GOOD] Test command err: Starting YDB, grpc: 4834, msgbus: 5563 2025-05-29T15:23:12.589765Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509888584463720624:2073];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:23:12.589788Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/001b2d/r3tmp/tmp957Dyy/pdisk_1.dat 2025-05-29T15:23:12.655701Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 4834, node 1 2025-05-29T15:23:12.670921Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:23:12.670933Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-05-29T15:23:12.670935Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-05-29T15:23:12.670975Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:5563 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 2025-05-29T15:23:12.687866Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:403: actor# [1:7509888584463720855:2139] Handle TEvNavigate describe path dc-1 2025-05-29T15:23:12.689488Z node 1 :TX_PROXY DEBUG: describe.cpp:272: Actor# [1:7509888584463721302:2427] HANDLE EvNavigateScheme dc-1 2025-05-29T15:23:12.689751Z node 1 :TX_PROXY DEBUG: describe.cpp:356: Actor# [1:7509888584463721302:2427] HANDLE EvNavigateKeySetResult TDescribeReq marker# P5 ErrorCount# 0 2025-05-29T15:23:12.690478Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:23:12.690495Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:23:12.692045Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-29T15:23:12.696085Z node 1 :TX_PROXY DEBUG: describe.cpp:435: Actor# [1:7509888584463721302:2427] SEND to# 72057594046644480 shardToRequest NKikimrSchemeOp.TDescribePath Path: "dc-1" Options { ReturnBoundaries: true ShowPrivateTable: true ReturnRangeKey: true } 2025-05-29T15:23:12.697339Z node 1 :TX_PROXY DEBUG: describe.cpp:448: Actor# [1:7509888584463721302:2427] Handle TEvDescribeSchemeResult Forward to# [1:7509888584463721301:2426] Cookie: 0 TEvDescribeSchemeResult: NKikimrScheme.TEvDescribeSchemeResult PreSerializedData size# 2 Record# Status: StatusSuccess Path: "dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/dc-1" } } } PathId: 1 PathOwnerId: 72057594046644480 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-05-29T15:23:12.699763Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:315: actor# [1:7509888584463720855:2139] Handle TEvProposeTransaction 2025-05-29T15:23:12.699773Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:327: actor# [1:7509888584463720855:2139] Cookie# 0 userReqId# "" DELAY REQUEST, wait txids from allocator Type# Scheme 2025-05-29T15:23:12.731554Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:213: actor# [1:7509888584463720855:2139] HANDLE TEvClientConnected success connect from tablet# 72057594046447617 2025-05-29T15:23:12.732368Z node 1 :TX_PROXY DEBUG: client.cpp:89: Handle TEvAllocateResult ACCEPTED RangeBegin# 281474976715656 RangeEnd# 281474976720656 txAllocator# 72057594046447617 2025-05-29T15:23:12.732401Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:238: actor# [1:7509888584463720855:2139] TxId# 281474976715657 ProcessProposeTransaction 2025-05-29T15:23:12.732437Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:257: actor# [1:7509888584463720855:2139] Cookie# 0 userReqId# "" txid# 281474976715657 SEND to# [1:7509888584463721326:2443] 2025-05-29T15:23:12.741796Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1562: Actor# [1:7509888584463721326:2443] txid# 281474976715657 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "dc-1" StoragePools { Name: "" Kind: "tenant-db" } StoragePools { Name: "/dc-1:test" Kind: "test" } } } } UserToken: "\n\014root@builtin\022\030\022\026\n\024all-users@well-known\032\014root@builtin\"\007Builtin*\017**** (B6C6F477)" PeerName: "" 2025-05-29T15:23:12.741832Z node 1 :TX_PROXY DEBUG: schemereq.cpp:569: Actor# [1:7509888584463721326:2443] txid# 281474976715657 Bootstrap, UserSID: root@builtin CheckAdministrator: 1 CheckDatabaseAdministrator: 1 2025-05-29T15:23:12.741836Z node 1 :TX_PROXY DEBUG: schemereq.cpp:578: Actor# [1:7509888584463721326:2443] txid# 281474976715657 Bootstrap, UserSID: root@builtin IsClusterAdministrator: 1 2025-05-29T15:23:12.741847Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1617: Actor# [1:7509888584463721326:2443] txid# 281474976715657 TEvNavigateKeySet requested from SchemeCache 2025-05-29T15:23:12.741963Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1450: Actor# [1:7509888584463721326:2443] txid# 281474976715657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-05-29T15:23:12.742005Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1497: Actor# [1:7509888584463721326:2443] HANDLE EvNavigateKeySetResult, txid# 281474976715657 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# false 2025-05-29T15:23:12.742021Z node 1 :TX_PROXY DEBUG: schemereq.cpp:103: Actor# [1:7509888584463721326:2443] txid# 281474976715657 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976715657 TabletId# 72057594046644480} 2025-05-29T15:23:12.742059Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1352: Actor# [1:7509888584463721326:2443] txid# 281474976715657 HANDLE EvClientConnected 2025-05-29T15:23:12.742277Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-05-29T15:23:12.742852Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1374: Actor# [1:7509888584463721326:2443] txid# 281474976715657 Status StatusAccepted HANDLE {TEvModifySchemeTransactionResult Status# StatusAccepted txid# 281474976715657} 2025-05-29T15:23:12.742871Z node 1 :TX_PROXY DEBUG: schemereq.cpp:549: Actor# [1:7509888584463721326:2443] txid# 281474976715657 SEND to# [1:7509888584463721320:2437] Source {TEvProposeTransactionStatus txid# 281474976715657 Status# 53} waiting... 2025-05-29T15:23:12.745906Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:315: actor# [1:7509888584463720855:2139] Handle TEvProposeTransaction 2025-05-29T15:23:12.745917Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:238: actor# [1:7509888584463720855:2139] TxId# 281474976715658 ProcessProposeTransaction 2025-05-29T15:23:12.745926Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:257: actor# [1:7509888584463720855:2139] Cookie# 0 userReqId# "" txid# 281474976715658 SEND to# [1:7509888584463721366:2479] 2025-05-29T15:23:12.746784Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1562: Actor# [1:7509888584463721366:2479] txid# 281474976715658 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/" OperationType: ESchemeOpModifyACL ModifyACL { Name: "dc-1" DiffACL: "\n\032\010\000\022\026\010\001\020\377\377\003\032\014root@builtin \003" } } } UserToken: "\n\014root@builtin\022\030\022\026\n\024all-users@well-known\032\014root@builtin\"\007Builtin*\017**** (B6C6F477)" PeerName: "" 2025-05-29T15:23:12.746806Z node 1 :TX_PROXY DEBUG: schemereq.cpp:569: Actor# [1:7509888584463721366:2479] txid# 281474976715658 Bootstrap, UserSID: root@builtin CheckAdministrator: 1 CheckDatabaseAdministrator: 1 2025-05-29T15:23:12.746810Z node 1 :TX_PROXY DEBUG: schemereq.cpp:578: Actor# [1:7509888584463721366:2479] txid# 281474976715658 Bootstrap, UserSID: root@builtin IsClusterAdministrator: 1 2025-05-29T15:23:12.746822Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1617: Actor# [1:7509888584463721366:2479] txid# 281474976715658 TEvNavigateKeySet requested from SchemeCache 2025-05-29T15:23:12.746894Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1450: Actor# [1:7509888584463721366:2479] txid# 281474976715658 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-05-29T15:23:12.746919Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1497: Actor# [1:7509888584463721366:2479] HANDLE EvNavigateKeySetResult, txid# 281474976715658 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2025-05-29T15:23:12.746935Z node 1 :TX_PROXY DEBUG: schemereq.cpp:103: Actor# [1:7509888584463721366:2479] txid# 281474976715658 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976715658 TabletId# 72057594046644480} 2025-05-29T15:23:12.746982Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1352: Actor# [1:7509888584463721366:2479] txid# 281474976715658 HANDLE EvClientConnected 2025-05-29T15:23:12.747054Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-05-29T15:23:12.747533Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1374: Actor# [1:7509888584463721366:2479] txid# 281474976715658 Status StatusSuccess HANDLE {TEvModifySchemeTransactionResul ... :23:45.664159Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1497: Actor# [59:7509888724608004225:2555] HANDLE EvNavigateKeySetResult, txid# 281474976715661 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2025-05-29T15:23:45.664175Z node 59 :TX_PROXY DEBUG: schemereq.cpp:103: Actor# [59:7509888724608004225:2555] txid# 281474976715661 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976715661 TabletId# 72057594046644480} 2025-05-29T15:23:45.664227Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1352: Actor# [59:7509888724608004225:2555] txid# 281474976715661 HANDLE EvClientConnected 2025-05-29T15:23:45.667445Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1374: Actor# [59:7509888724608004225:2555] txid# 281474976715661 Status StatusSuccess HANDLE {TEvModifySchemeTransactionResult Status# StatusSuccess txid# 281474976715661} 2025-05-29T15:23:45.667464Z node 59 :TX_PROXY DEBUG: schemereq.cpp:549: Actor# [59:7509888724608004225:2555] txid# 281474976715661 SEND to# [59:7509888724608004224:2330] Source {TEvProposeTransactionStatus txid# 281474976715661 Status# 48} 2025-05-29T15:23:45.777321Z node 59 :TX_PROXY DEBUG: proxy_impl.cpp:315: actor# [59:7509888724608003343:2136] Handle TEvProposeTransaction 2025-05-29T15:23:45.777335Z node 59 :TX_PROXY DEBUG: proxy_impl.cpp:238: actor# [59:7509888724608003343:2136] TxId# 281474976715662 ProcessProposeTransaction 2025-05-29T15:23:45.777352Z node 59 :TX_PROXY DEBUG: proxy_impl.cpp:257: actor# [59:7509888724608003343:2136] Cookie# 0 userReqId# "" txid# 281474976715662 SEND to# [59:7509888724608004246:2570] 2025-05-29T15:23:45.778549Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1562: Actor# [59:7509888724608004246:2570] txid# 281474976715662 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "" OperationType: ESchemeOpModifyACL ModifyACL { Name: "dc-1" DiffACL: "\n\022\010\001\022\016\032\014ordinaryuser\n\032\010\000\022\026\010\001\020\200\200\002\032\014ordinaryuser \000" } } } UserToken: "\n\014root@builtin\022\030\022\026\n\024all-users@well-known\032\014root@builtin\"\007Builtin*\017**** (B6C6F477)" DatabaseName: "/dc-1" RequestType: "" PeerName: "ipv6:[::1]:45320" 2025-05-29T15:23:45.778574Z node 59 :TX_PROXY DEBUG: schemereq.cpp:569: Actor# [59:7509888724608004246:2570] txid# 281474976715662 Bootstrap, UserSID: root@builtin CheckAdministrator: 1 CheckDatabaseAdministrator: 1 2025-05-29T15:23:45.778578Z node 59 :TX_PROXY DEBUG: schemereq.cpp:578: Actor# [59:7509888724608004246:2570] txid# 281474976715662 Bootstrap, UserSID: root@builtin IsClusterAdministrator: 1 2025-05-29T15:23:45.778595Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1617: Actor# [59:7509888724608004246:2570] txid# 281474976715662 TEvNavigateKeySet requested from SchemeCache 2025-05-29T15:23:45.778704Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1450: Actor# [59:7509888724608004246:2570] txid# 281474976715662 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-05-29T15:23:45.778735Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1497: Actor# [59:7509888724608004246:2570] HANDLE EvNavigateKeySetResult, txid# 281474976715662 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2025-05-29T15:23:45.778771Z node 59 :TX_PROXY DEBUG: schemereq.cpp:103: Actor# [59:7509888724608004246:2570] txid# 281474976715662 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976715662 TabletId# 72057594046644480} 2025-05-29T15:23:45.778820Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1352: Actor# [59:7509888724608004246:2570] txid# 281474976715662 HANDLE EvClientConnected 2025-05-29T15:23:45.778964Z node 59 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-05-29T15:23:45.779738Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1374: Actor# [59:7509888724608004246:2570] txid# 281474976715662 Status StatusSuccess HANDLE {TEvModifySchemeTransactionResult Status# StatusSuccess txid# 281474976715662} 2025-05-29T15:23:45.779747Z node 59 :TX_PROXY DEBUG: schemereq.cpp:549: Actor# [59:7509888724608004246:2570] txid# 281474976715662 SEND to# [59:7509888724608004245:2343] Source {TEvProposeTransactionStatus txid# 281474976715662 Status# 48} 2025-05-29T15:23:45.784823Z node 59 :TX_PROXY DEBUG: proxy_impl.cpp:315: actor# [59:7509888724608003343:2136] Handle TEvProposeTransaction 2025-05-29T15:23:45.784838Z node 59 :TX_PROXY DEBUG: proxy_impl.cpp:238: actor# [59:7509888724608003343:2136] TxId# 281474976715663 ProcessProposeTransaction 2025-05-29T15:23:45.784855Z node 59 :TX_PROXY DEBUG: proxy_impl.cpp:257: actor# [59:7509888724608003343:2136] Cookie# 0 userReqId# "" txid# 281474976715663 SEND to# [59:7509888724608004277:2587] 2025-05-29T15:23:45.785844Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1562: Actor# [59:7509888724608004277:2587] txid# 281474976715663 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/dc-1" OperationType: ESchemeOpAlterLogin AlterLogin { CreateUser { User: "targetuser" Password: "passwd" CanLogin: true IsHashedPassword: false } } } } UserToken: "\n\014root@builtin\022\030\022\026\n\024all-users@well-known\032\014root@builtin\"\007Builtin*\017**** (B6C6F477)" DatabaseName: "/dc-1" RequestType: "" PeerName: "ipv6:[::1]:45320" 2025-05-29T15:23:45.785865Z node 59 :TX_PROXY DEBUG: schemereq.cpp:569: Actor# [59:7509888724608004277:2587] txid# 281474976715663 Bootstrap, UserSID: root@builtin CheckAdministrator: 1 CheckDatabaseAdministrator: 1 2025-05-29T15:23:45.785869Z node 59 :TX_PROXY DEBUG: schemereq.cpp:578: Actor# [59:7509888724608004277:2587] txid# 281474976715663 Bootstrap, UserSID: root@builtin IsClusterAdministrator: 1 2025-05-29T15:23:45.785881Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1617: Actor# [59:7509888724608004277:2587] txid# 281474976715663 TEvNavigateKeySet requested from SchemeCache 2025-05-29T15:23:45.785969Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1450: Actor# [59:7509888724608004277:2587] txid# 281474976715663 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-05-29T15:23:45.786008Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1497: Actor# [59:7509888724608004277:2587] HANDLE EvNavigateKeySetResult, txid# 281474976715663 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2025-05-29T15:23:45.786023Z node 59 :TX_PROXY DEBUG: schemereq.cpp:103: Actor# [59:7509888724608004277:2587] txid# 281474976715663 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976715663 TabletId# 72057594046644480} 2025-05-29T15:23:45.786072Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1352: Actor# [59:7509888724608004277:2587] txid# 281474976715663 HANDLE EvClientConnected 2025-05-29T15:23:45.788763Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1374: Actor# [59:7509888724608004277:2587] txid# 281474976715663 Status StatusSuccess HANDLE {TEvModifySchemeTransactionResult Status# StatusSuccess txid# 281474976715663} 2025-05-29T15:23:45.788780Z node 59 :TX_PROXY DEBUG: schemereq.cpp:549: Actor# [59:7509888724608004277:2587] txid# 281474976715663 SEND to# [59:7509888724608004276:2345] Source {TEvProposeTransactionStatus txid# 281474976715663 Status# 48} 2025-05-29T15:23:45.798004Z node 59 :TX_PROXY DEBUG: proxy_impl.cpp:315: actor# [59:7509888724608003343:2136] Handle TEvProposeTransaction 2025-05-29T15:23:45.798023Z node 59 :TX_PROXY DEBUG: proxy_impl.cpp:238: actor# [59:7509888724608003343:2136] TxId# 281474976715664 ProcessProposeTransaction 2025-05-29T15:23:45.798046Z node 59 :TX_PROXY DEBUG: proxy_impl.cpp:257: actor# [59:7509888724608003343:2136] Cookie# 0 userReqId# "" txid# 281474976715664 SEND to# [59:7509888724608004304:2599] 2025-05-29T15:23:45.798909Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1562: Actor# [59:7509888724608004304:2599] txid# 281474976715664 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/dc-1" OperationType: ESchemeOpAlterLogin AlterLogin { RemoveUser { User: "targetuser" MissingOk: false } } } } UserToken: "\n\014ordinaryuser\022\030\022\026\n\024all-users@well-known\032\334\003eyJhbGciOiJQUzI1NiIsImtpZCI6IjEifQ.eyJhdWQiOlsiXC9kYy0xIl0sImV4cCI6MTc0ODU3NTQyNSwiaWF0IjoxNzQ4NTMyMjI1LCJzdWIiOiJvcmRpbmFyeXVzZXIifQ.tD9O4l6m7O86b_EBIif8fpQInnhRb_cQ0H3L-3m5xT3qpqCrEohyxIH8KUD7nacWoByyww15nLdfDcKDGw2gZFSseAYpNHIClpOgj9nZJSYf5wsYgTY9PQyEbPbjeGr9SiLRygRHcYRzDIXDAA1TCaQHcVyekDm6nl_MoUIHpxV-FJgiMQNvkKEYtdEjO9Ni_A7AF_tzxT7T08JF__NrEaArCwagNWlL0MVntQ6xqGJhsouP4eA_wQBmyp4K7pdTr03Gm-4ftzws7v1gYDmUvMRGpW6OoV46Oul6fYJtI0YICzfCm9H0hn1RQWb97mXzgl9T-98nE4_67Iy-8IXoNw\"\005Login*\210\001eyJhbGciOiJQUzI1NiIsImtpZCI6IjEifQ.eyJhdWQiOlsiXC9kYy0xIl0sImV4cCI6MTc0ODU3NTQyNSwiaWF0IjoxNzQ4NTMyMjI1LCJzdWIiOiJvcmRpbmFyeXVzZXIifQ.**" DatabaseName: "/dc-1" RequestType: "" PeerName: "ipv6:[::1]:45320" 2025-05-29T15:23:45.798934Z node 59 :TX_PROXY DEBUG: schemereq.cpp:569: Actor# [59:7509888724608004304:2599] txid# 281474976715664 Bootstrap, UserSID: ordinaryuser CheckAdministrator: 1 CheckDatabaseAdministrator: 1 2025-05-29T15:23:45.798938Z node 59 :TX_PROXY DEBUG: schemereq.cpp:578: Actor# [59:7509888724608004304:2599] txid# 281474976715664 Bootstrap, UserSID: ordinaryuser IsClusterAdministrator: 0 2025-05-29T15:23:45.799003Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1401: Actor# [59:7509888724608004304:2599] txid# 281474976715664 HandleResolveDatabase, ResultSet size: 1 ResultSet error count: 0 2025-05-29T15:23:45.799037Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1436: Actor# [59:7509888724608004304:2599] txid# 281474976715664 HandleResolveDatabase, UserSID: ordinaryuser CheckAdministrator: 1 CheckDatabaseAdministrator: 1 IsClusterAdministrator: 0 IsDatabaseAdministrator: 0 DatabaseOwner: root@builtin 2025-05-29T15:23:45.799055Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1617: Actor# [59:7509888724608004304:2599] txid# 281474976715664 TEvNavigateKeySet requested from SchemeCache 2025-05-29T15:23:45.799136Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1450: Actor# [59:7509888724608004304:2599] txid# 281474976715664 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-05-29T15:23:45.799146Z node 59 :TX_PROXY ERROR: schemereq.cpp:1079: Actor# [59:7509888724608004304:2599] txid# 281474976715664, Access denied for ordinaryuser, attempt to manage user 2025-05-29T15:23:45.799166Z node 59 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [59:7509888724608004304:2599] txid# 281474976715664, issues: { message: "Access denied for ordinaryuser" issue_code: 200000 severity: 1 } 2025-05-29T15:23:45.799174Z node 59 :TX_PROXY DEBUG: schemereq.cpp:549: Actor# [59:7509888724608004304:2599] txid# 281474976715664 SEND to# [59:7509888724608004303:2356] Source {TEvProposeTransactionStatus Status# 5} 2025-05-29T15:23:45.799253Z node 59 :KQP_SESSION WARN: kqp_session_actor.cpp:2583: SessionId: ydb://session/3?node_id=59&id=ZDc0NDA2YmMtZjYzMDY2OTktY2Q1NWZmZjMtNTBiNGVhODQ=, ActorId: [59:7509888724608004294:2356], ActorState: ExecuteState, TraceId: 01jwea8sr356abg08cp9ft9nbm, Create QueryResponse for error on request, msg: 2025-05-29T15:23:45.799335Z node 59 :TX_PROXY DEBUG: proxy_impl.cpp:353: actor# [59:7509888724608003343:2136] Handle TEvExecuteKqpTransaction 2025-05-29T15:23:45.799342Z node 59 :TX_PROXY DEBUG: proxy_impl.cpp:342: actor# [59:7509888724608003343:2136] TxId# 281474976715665 ProcessProposeKqpTransaction ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/tx_proxy/ut_schemereq/unittest >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-ModifyUser-48 [GOOD] Test command err: Starting YDB, grpc: 19572, msgbus: 64584 2025-05-29T15:23:12.593285Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509888583754439767:2075];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:23:12.593320Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/001b37/r3tmp/tmpyNMbZ3/pdisk_1.dat 2025-05-29T15:23:12.648523Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 19572, node 1 2025-05-29T15:23:12.674067Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:23:12.674077Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-05-29T15:23:12.674080Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-05-29T15:23:12.674119Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:64584 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 2025-05-29T15:23:12.685990Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:403: actor# [1:7509888583754439992:2139] Handle TEvNavigate describe path dc-1 2025-05-29T15:23:12.687844Z node 1 :TX_PROXY DEBUG: describe.cpp:272: Actor# [1:7509888583754440438:2425] HANDLE EvNavigateScheme dc-1 2025-05-29T15:23:12.688095Z node 1 :TX_PROXY DEBUG: describe.cpp:356: Actor# [1:7509888583754440438:2425] HANDLE EvNavigateKeySetResult TDescribeReq marker# P5 ErrorCount# 0 2025-05-29T15:23:12.693812Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:23:12.693838Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:23:12.694530Z node 1 :TX_PROXY DEBUG: describe.cpp:435: Actor# [1:7509888583754440438:2425] SEND to# 72057594046644480 shardToRequest NKikimrSchemeOp.TDescribePath Path: "dc-1" Options { ReturnBoundaries: true ShowPrivateTable: true ReturnRangeKey: true } 2025-05-29T15:23:12.695418Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-29T15:23:12.696804Z node 1 :TX_PROXY DEBUG: describe.cpp:448: Actor# [1:7509888583754440438:2425] Handle TEvDescribeSchemeResult Forward to# [1:7509888583754440437:2424] Cookie: 0 TEvDescribeSchemeResult: NKikimrScheme.TEvDescribeSchemeResult PreSerializedData size# 2 Record# Status: StatusSuccess Path: "dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/dc-1" } } } PathId: 1 PathOwnerId: 72057594046644480 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-05-29T15:23:12.700396Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:315: actor# [1:7509888583754439992:2139] Handle TEvProposeTransaction 2025-05-29T15:23:12.700407Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:327: actor# [1:7509888583754439992:2139] Cookie# 0 userReqId# "" DELAY REQUEST, wait txids from allocator Type# Scheme 2025-05-29T15:23:12.723938Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:213: actor# [1:7509888583754439992:2139] HANDLE TEvClientConnected success connect from tablet# 72057594046447617 2025-05-29T15:23:12.724641Z node 1 :TX_PROXY DEBUG: client.cpp:89: Handle TEvAllocateResult ACCEPTED RangeBegin# 281474976715656 RangeEnd# 281474976720656 txAllocator# 72057594046447617 2025-05-29T15:23:12.724653Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:238: actor# [1:7509888583754439992:2139] TxId# 281474976715657 ProcessProposeTransaction 2025-05-29T15:23:12.724694Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:257: actor# [1:7509888583754439992:2139] Cookie# 0 userReqId# "" txid# 281474976715657 SEND to# [1:7509888583754440462:2441] 2025-05-29T15:23:12.736865Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1562: Actor# [1:7509888583754440462:2441] txid# 281474976715657 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "dc-1" StoragePools { Name: "" Kind: "tenant-db" } StoragePools { Name: "/dc-1:test" Kind: "test" } } } } UserToken: "\n\014root@builtin\022\030\022\026\n\024all-users@well-known\032\014root@builtin\"\007Builtin*\017**** (B6C6F477)" PeerName: "" 2025-05-29T15:23:12.736908Z node 1 :TX_PROXY DEBUG: schemereq.cpp:569: Actor# [1:7509888583754440462:2441] txid# 281474976715657 Bootstrap, UserSID: root@builtin CheckAdministrator: 0 CheckDatabaseAdministrator: 0 2025-05-29T15:23:12.736914Z node 1 :TX_PROXY DEBUG: schemereq.cpp:578: Actor# [1:7509888583754440462:2441] txid# 281474976715657 Bootstrap, UserSID: root@builtin IsClusterAdministrator: 1 2025-05-29T15:23:12.736929Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1617: Actor# [1:7509888583754440462:2441] txid# 281474976715657 TEvNavigateKeySet requested from SchemeCache 2025-05-29T15:23:12.737047Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1450: Actor# [1:7509888583754440462:2441] txid# 281474976715657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-05-29T15:23:12.737093Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1497: Actor# [1:7509888583754440462:2441] HANDLE EvNavigateKeySetResult, txid# 281474976715657 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# false 2025-05-29T15:23:12.737111Z node 1 :TX_PROXY DEBUG: schemereq.cpp:103: Actor# [1:7509888583754440462:2441] txid# 281474976715657 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976715657 TabletId# 72057594046644480} 2025-05-29T15:23:12.737175Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1352: Actor# [1:7509888583754440462:2441] txid# 281474976715657 HANDLE EvClientConnected 2025-05-29T15:23:12.737392Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-05-29T15:23:12.738112Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1374: Actor# [1:7509888583754440462:2441] txid# 281474976715657 Status StatusAccepted HANDLE {TEvModifySchemeTransactionResult Status# StatusAccepted txid# 281474976715657} 2025-05-29T15:23:12.738128Z node 1 :TX_PROXY DEBUG: schemereq.cpp:549: Actor# [1:7509888583754440462:2441] txid# 281474976715657 SEND to# [1:7509888583754440456:2435] Source {TEvProposeTransactionStatus txid# 281474976715657 Status# 53} waiting... 2025-05-29T15:23:12.741921Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:315: actor# [1:7509888583754439992:2139] Handle TEvProposeTransaction 2025-05-29T15:23:12.741937Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:238: actor# [1:7509888583754439992:2139] TxId# 281474976715658 ProcessProposeTransaction 2025-05-29T15:23:12.741946Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:257: actor# [1:7509888583754439992:2139] Cookie# 0 userReqId# "" txid# 281474976715658 SEND to# [1:7509888583754440502:2477] 2025-05-29T15:23:12.742852Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1562: Actor# [1:7509888583754440502:2477] txid# 281474976715658 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/" OperationType: ESchemeOpModifyACL ModifyACL { Name: "dc-1" DiffACL: "\n\032\010\000\022\026\010\001\020\377\377\003\032\014root@builtin \003" } } } UserToken: "\n\014root@builtin\022\030\022\026\n\024all-users@well-known\032\014root@builtin\"\007Builtin*\017**** (B6C6F477)" PeerName: "" 2025-05-29T15:23:12.742876Z node 1 :TX_PROXY DEBUG: schemereq.cpp:569: Actor# [1:7509888583754440502:2477] txid# 281474976715658 Bootstrap, UserSID: root@builtin CheckAdministrator: 0 CheckDatabaseAdministrator: 0 2025-05-29T15:23:12.742881Z node 1 :TX_PROXY DEBUG: schemereq.cpp:578: Actor# [1:7509888583754440502:2477] txid# 281474976715658 Bootstrap, UserSID: root@builtin IsClusterAdministrator: 1 2025-05-29T15:23:12.742894Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1617: Actor# [1:7509888583754440502:2477] txid# 281474976715658 TEvNavigateKeySet requested from SchemeCache 2025-05-29T15:23:12.742982Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1450: Actor# [1:7509888583754440502:2477] txid# 281474976715658 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-05-29T15:23:12.743013Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1497: Actor# [1:7509888583754440502:2477] HANDLE EvNavigateKeySetResult, txid# 281474976715658 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2025-05-29T15:23:12.743030Z node 1 :TX_PROXY DEBUG: schemereq.cpp:103: Actor# [1:7509888583754440502:2477] txid# 281474976715658 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976715658 TabletId# 72057594046644480} 2025-05-29T15:23:12.743064Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1352: Actor# [1:7509888583754440502:2477] txid# 281474976715658 HANDLE EvClientConnected 2025-05-29T15:23:12.743173Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-05-29T15:23:12.743684Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1374: Actor# [1:7509888583754440502:2477] txid# 281474976715658 Status StatusSuccess HANDLE {TEvModifySchemeTransactionR ... _PROXY DEBUG: schemereq.cpp:1497: Actor# [59:7509888718964640174:2557] HANDLE EvNavigateKeySetResult, txid# 281474976715661 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2025-05-29T15:23:44.913803Z node 59 :TX_PROXY DEBUG: schemereq.cpp:103: Actor# [59:7509888718964640174:2557] txid# 281474976715661 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976715661 TabletId# 72057594046644480} 2025-05-29T15:23:44.913873Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1352: Actor# [59:7509888718964640174:2557] txid# 281474976715661 HANDLE EvClientConnected 2025-05-29T15:23:44.918105Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1374: Actor# [59:7509888718964640174:2557] txid# 281474976715661 Status StatusSuccess HANDLE {TEvModifySchemeTransactionResult Status# StatusSuccess txid# 281474976715661} 2025-05-29T15:23:44.918125Z node 59 :TX_PROXY DEBUG: schemereq.cpp:549: Actor# [59:7509888718964640174:2557] txid# 281474976715661 SEND to# [59:7509888718964640173:2329] Source {TEvProposeTransactionStatus txid# 281474976715661 Status# 48} 2025-05-29T15:23:45.011050Z node 59 :TX_PROXY DEBUG: proxy_impl.cpp:315: actor# [59:7509888718964639436:2135] Handle TEvProposeTransaction 2025-05-29T15:23:45.011069Z node 59 :TX_PROXY DEBUG: proxy_impl.cpp:238: actor# [59:7509888718964639436:2135] TxId# 281474976715662 ProcessProposeTransaction 2025-05-29T15:23:45.011089Z node 59 :TX_PROXY DEBUG: proxy_impl.cpp:257: actor# [59:7509888718964639436:2135] Cookie# 0 userReqId# "" txid# 281474976715662 SEND to# [59:7509888723259607493:2574] 2025-05-29T15:23:45.011906Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1562: Actor# [59:7509888723259607493:2574] txid# 281474976715662 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "" OperationType: ESchemeOpModifyACL ModifyACL { Name: "dc-1" DiffACL: "\n\022\010\001\022\016\032\014ordinaryuser\n\032\010\000\022\026\010\001\020\200\200\002\032\014ordinaryuser \000" } } } UserToken: "\n\014root@builtin\022\030\022\026\n\024all-users@well-known\032\014root@builtin\"\007Builtin*\017**** (B6C6F477)" DatabaseName: "/dc-1" RequestType: "" PeerName: "ipv6:[::1]:45764" 2025-05-29T15:23:45.011926Z node 59 :TX_PROXY DEBUG: schemereq.cpp:569: Actor# [59:7509888723259607493:2574] txid# 281474976715662 Bootstrap, UserSID: root@builtin CheckAdministrator: 1 CheckDatabaseAdministrator: 1 2025-05-29T15:23:45.011931Z node 59 :TX_PROXY DEBUG: schemereq.cpp:578: Actor# [59:7509888723259607493:2574] txid# 281474976715662 Bootstrap, UserSID: root@builtin IsClusterAdministrator: 1 2025-05-29T15:23:45.011951Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1617: Actor# [59:7509888723259607493:2574] txid# 281474976715662 TEvNavigateKeySet requested from SchemeCache 2025-05-29T15:23:45.012068Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1450: Actor# [59:7509888723259607493:2574] txid# 281474976715662 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-05-29T15:23:45.012095Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1497: Actor# [59:7509888723259607493:2574] HANDLE EvNavigateKeySetResult, txid# 281474976715662 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2025-05-29T15:23:45.012108Z node 59 :TX_PROXY DEBUG: schemereq.cpp:103: Actor# [59:7509888723259607493:2574] txid# 281474976715662 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976715662 TabletId# 72057594046644480} 2025-05-29T15:23:45.012156Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1352: Actor# [59:7509888723259607493:2574] txid# 281474976715662 HANDLE EvClientConnected 2025-05-29T15:23:45.012312Z node 59 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-05-29T15:23:45.019407Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1374: Actor# [59:7509888723259607493:2574] txid# 281474976715662 Status StatusSuccess HANDLE {TEvModifySchemeTransactionResult Status# StatusSuccess txid# 281474976715662} 2025-05-29T15:23:45.019435Z node 59 :TX_PROXY DEBUG: schemereq.cpp:549: Actor# [59:7509888723259607493:2574] txid# 281474976715662 SEND to# [59:7509888723259607492:2343] Source {TEvProposeTransactionStatus txid# 281474976715662 Status# 48} 2025-05-29T15:23:45.046955Z node 59 :TX_PROXY DEBUG: proxy_impl.cpp:315: actor# [59:7509888718964639436:2135] Handle TEvProposeTransaction 2025-05-29T15:23:45.046976Z node 59 :TX_PROXY DEBUG: proxy_impl.cpp:238: actor# [59:7509888718964639436:2135] TxId# 281474976715663 ProcessProposeTransaction 2025-05-29T15:23:45.046994Z node 59 :TX_PROXY DEBUG: proxy_impl.cpp:257: actor# [59:7509888718964639436:2135] Cookie# 0 userReqId# "" txid# 281474976715663 SEND to# [59:7509888723259607524:2591] 2025-05-29T15:23:45.047781Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1562: Actor# [59:7509888723259607524:2591] txid# 281474976715663 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/dc-1" OperationType: ESchemeOpAlterLogin AlterLogin { CreateUser { User: "targetuser" Password: "passwd" CanLogin: true IsHashedPassword: false } } } } UserToken: "\n\014root@builtin\022\030\022\026\n\024all-users@well-known\032\014root@builtin\"\007Builtin*\017**** (B6C6F477)" DatabaseName: "/dc-1" RequestType: "" PeerName: "ipv6:[::1]:45764" 2025-05-29T15:23:45.047795Z node 59 :TX_PROXY DEBUG: schemereq.cpp:569: Actor# [59:7509888723259607524:2591] txid# 281474976715663 Bootstrap, UserSID: root@builtin CheckAdministrator: 1 CheckDatabaseAdministrator: 1 2025-05-29T15:23:45.047799Z node 59 :TX_PROXY DEBUG: schemereq.cpp:578: Actor# [59:7509888723259607524:2591] txid# 281474976715663 Bootstrap, UserSID: root@builtin IsClusterAdministrator: 1 2025-05-29T15:23:45.047811Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1617: Actor# [59:7509888723259607524:2591] txid# 281474976715663 TEvNavigateKeySet requested from SchemeCache 2025-05-29T15:23:45.047905Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1450: Actor# [59:7509888723259607524:2591] txid# 281474976715663 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-05-29T15:23:45.047925Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1497: Actor# [59:7509888723259607524:2591] HANDLE EvNavigateKeySetResult, txid# 281474976715663 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2025-05-29T15:23:45.047938Z node 59 :TX_PROXY DEBUG: schemereq.cpp:103: Actor# [59:7509888723259607524:2591] txid# 281474976715663 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976715663 TabletId# 72057594046644480} 2025-05-29T15:23:45.047984Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1352: Actor# [59:7509888723259607524:2591] txid# 281474976715663 HANDLE EvClientConnected 2025-05-29T15:23:45.054822Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1374: Actor# [59:7509888723259607524:2591] txid# 281474976715663 Status StatusSuccess HANDLE {TEvModifySchemeTransactionResult Status# StatusSuccess txid# 281474976715663} 2025-05-29T15:23:45.054848Z node 59 :TX_PROXY DEBUG: schemereq.cpp:549: Actor# [59:7509888723259607524:2591] txid# 281474976715663 SEND to# [59:7509888723259607523:2345] Source {TEvProposeTransactionStatus txid# 281474976715663 Status# 48} 2025-05-29T15:23:45.070794Z node 59 :TX_PROXY DEBUG: proxy_impl.cpp:315: actor# [59:7509888718964639436:2135] Handle TEvProposeTransaction 2025-05-29T15:23:45.070813Z node 59 :TX_PROXY DEBUG: proxy_impl.cpp:238: actor# [59:7509888718964639436:2135] TxId# 281474976715664 ProcessProposeTransaction 2025-05-29T15:23:45.070841Z node 59 :TX_PROXY DEBUG: proxy_impl.cpp:257: actor# [59:7509888718964639436:2135] Cookie# 0 userReqId# "" txid# 281474976715664 SEND to# [59:7509888723259607551:2603] 2025-05-29T15:23:45.071719Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1562: Actor# [59:7509888723259607551:2603] txid# 281474976715664 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/dc-1" OperationType: ESchemeOpAlterLogin AlterLogin { ModifyUser { User: "targetuser" Password: "passwd" IsHashedPassword: false } } } } UserToken: "\n\014ordinaryuser\022\030\022\026\n\024all-users@well-known\032\334\003eyJhbGciOiJQUzI1NiIsImtpZCI6IjEifQ.eyJhdWQiOlsiXC9kYy0xIl0sImV4cCI6MTc0ODU3NTQyNCwiaWF0IjoxNzQ4NTMyMjI0LCJzdWIiOiJvcmRpbmFyeXVzZXIifQ.e8AhtYi_L8JKSVOUnrET2kARgDwlMFuv3rHC0T7SrPptMtjmjxn0KJSuMhGgiUyLeE0tcFrxsx-zy71BrLgqXNJF7GZW17UN7IkO2Lh7kYsqRH4wP27iyvFjzpO1tlVTpW0IQeGkIpE3UITub4ZwTaChKmwLtNS5nmN8QyuQMXGj32v5PJKUqxrC681oXcznmSrnUTLZ6FOIWe1lrcZNmyjLBNQ4N4KU9If3xqfgNoa8XDq6AV2feaRS28hwFLBFNBvwDHabMSWmSRC8828YARuDqkbSyXkz4B9D6muZhGXNMyR-fCgzZp-pDRjkj7F4rF40AAzM_ZH08mW-9PBVJw\"\005Login*\210\001eyJhbGciOiJQUzI1NiIsImtpZCI6IjEifQ.eyJhdWQiOlsiXC9kYy0xIl0sImV4cCI6MTc0ODU3NTQyNCwiaWF0IjoxNzQ4NTMyMjI0LCJzdWIiOiJvcmRpbmFyeXVzZXIifQ.**" DatabaseName: "/dc-1" RequestType: "" PeerName: "ipv6:[::1]:45764" 2025-05-29T15:23:45.071752Z node 59 :TX_PROXY DEBUG: schemereq.cpp:569: Actor# [59:7509888723259607551:2603] txid# 281474976715664 Bootstrap, UserSID: ordinaryuser CheckAdministrator: 1 CheckDatabaseAdministrator: 1 2025-05-29T15:23:45.071758Z node 59 :TX_PROXY DEBUG: schemereq.cpp:578: Actor# [59:7509888723259607551:2603] txid# 281474976715664 Bootstrap, UserSID: ordinaryuser IsClusterAdministrator: 0 2025-05-29T15:23:45.071825Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1401: Actor# [59:7509888723259607551:2603] txid# 281474976715664 HandleResolveDatabase, ResultSet size: 1 ResultSet error count: 0 2025-05-29T15:23:45.071835Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1436: Actor# [59:7509888723259607551:2603] txid# 281474976715664 HandleResolveDatabase, UserSID: ordinaryuser CheckAdministrator: 1 CheckDatabaseAdministrator: 1 IsClusterAdministrator: 0 IsDatabaseAdministrator: 0 DatabaseOwner: root@builtin 2025-05-29T15:23:45.071848Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1617: Actor# [59:7509888723259607551:2603] txid# 281474976715664 TEvNavigateKeySet requested from SchemeCache 2025-05-29T15:23:45.071917Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1450: Actor# [59:7509888723259607551:2603] txid# 281474976715664 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-05-29T15:23:45.071922Z node 59 :TX_PROXY ERROR: schemereq.cpp:1079: Actor# [59:7509888723259607551:2603] txid# 281474976715664, Access denied for ordinaryuser, attempt to manage user 2025-05-29T15:23:45.071943Z node 59 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [59:7509888723259607551:2603] txid# 281474976715664, issues: { message: "Access denied for ordinaryuser" issue_code: 200000 severity: 1 } 2025-05-29T15:23:45.071948Z node 59 :TX_PROXY DEBUG: schemereq.cpp:549: Actor# [59:7509888723259607551:2603] txid# 281474976715664 SEND to# [59:7509888723259607550:2356] Source {TEvProposeTransactionStatus Status# 5} 2025-05-29T15:23:45.072066Z node 59 :KQP_SESSION WARN: kqp_session_actor.cpp:2583: SessionId: ydb://session/3?node_id=59&id=MjljMTUwMDItNTNmNGEyYzYtMzQ5Mjc4NWUtYWMxYmExNTU=, ActorId: [59:7509888723259607541:2356], ActorState: ExecuteState, TraceId: 01jwea8s1abvqxdc9x19jcb4ct, Create QueryResponse for error on request, msg: 2025-05-29T15:23:45.072196Z node 59 :TX_PROXY DEBUG: proxy_impl.cpp:353: actor# [59:7509888718964639436:2135] Handle TEvExecuteKqpTransaction 2025-05-29T15:23:45.072202Z node 59 :TX_PROXY DEBUG: proxy_impl.cpp:342: actor# [59:7509888718964639436:2135] TxId# 281474976715665 ProcessProposeKqpTransaction ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_export_reboots_s3/unittest >> TExportToS3WithRebootsTests::ShouldSucceedOnSingleView [GOOD] Test command err: ==== RunWithTabletReboots =========== RUN: Trace =========== Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2140] Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:116:2058] recipient: [1:110:2141] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:116:2058] recipient: [1:110:2141] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:117:2058] recipient: [1:111:2142] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:117:2058] recipient: [1:111:2142] Leader for TabletID 72057594046678944 is [1:126:2151] sender: [1:127:2058] recipient: [1:109:2140] Leader for TabletID 72057594046447617 is [1:130:2154] sender: [1:132:2058] recipient: [1:110:2141] Leader for TabletID 72057594046316545 is [1:133:2155] sender: [1:135:2058] recipient: [1:111:2142] 2025-05-29T15:23:38.652501Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7488: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-05-29T15:23:38.652519Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7516: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:23:38.652523Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7402: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2025-05-29T15:23:38.652528Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7418: OperationsProcessing config: using default configuration 2025-05-29T15:23:38.652538Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-05-29T15:23:38.652542Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-05-29T15:23:38.652550Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7548: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:23:38.652564Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:39: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-05-29T15:23:38.652641Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7619: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-05-29T15:23:38.652698Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-05-29T15:23:38.664187Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7651: Got new config: QueryServiceConfig { AllExternalDataSourcesAreAvailable: true } 2025-05-29T15:23:38.664208Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:23:38.664305Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7619: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# Leader for TabletID 72057594046447617 is [1:130:2154] sender: [1:175:2058] recipient: [1:15:2062] 2025-05-29T15:23:38.667129Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-05-29T15:23:38.667159Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-05-29T15:23:38.667189Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-05-29T15:23:38.670388Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-05-29T15:23:38.670472Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:445: Clear TempDirsState with owners number: 0 2025-05-29T15:23:38.670604Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1348: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-05-29T15:23:38.670830Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:32: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-05-29T15:23:38.671588Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:157: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:23:38.671639Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:84: [RootDataErasureManager] Stop 2025-05-29T15:23:38.671906Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:23:38.671929Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:23:38.671973Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-05-29T15:23:38.671986Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-29T15:23:38.671995Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:202: TTxServerlessStorageBilling.Complete 2025-05-29T15:23:38.672024Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6671: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:214:2058] recipient: [1:212:2212] IGNORE Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:214:2058] recipient: [1:212:2212] Leader for TabletID 72057594037968897 is [1:218:2216] sender: [1:219:2058] recipient: [1:212:2212] 2025-05-29T15:23:38.673617Z node 1 :HIVE INFO: tablet_helpers.cpp:1130: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:126:2151] sender: [1:239:2058] recipient: [1:15:2062] 2025-05-29T15:23:38.695081Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:372: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-05-29T15:23:38.695153Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:23:38.695206Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-05-29T15:23:38.695264Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:130: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-05-29T15:23:38.695276Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:23:38.696912Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:451: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-05-29T15:23:38.696944Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-05-29T15:23:38.696992Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:23:38.697002Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:313: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-05-29T15:23:38.697008Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:367: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-05-29T15:23:38.697013Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 2 -> 3 2025-05-29T15:23:38.697479Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:23:38.697495Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-05-29T15:23:38.697501Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 3 -> 128 2025-05-29T15:23:38.697886Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:23:38.697900Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:23:38.697906Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:23:38.697912Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1650: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-05-29T15:23:38.698557Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1719: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-05-29T15:23:38.699084Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:652: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-05-29T15:23:38.699114Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1751: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 Leader for TabletID 72057594046316545 is [1:133:2155] sender: [1:254:2058] recipient: [1:15:2062] FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-05-29T15:23:38.699276Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:676: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-05-29T15:23:38.699304Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:680: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 133 RawX2: 4294969451 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-05-29T15:23:38.699311Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:23:38.699398Z node 1 :FLAT_TX_SCHEMESHARD INFO: sch ... d, txId: 281474976710758, at schemeshard: 72057594046678944 2025-05-29T15:23:48.550844Z node 37 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:652: Send tablet strongly msg operationId: 281474976710758:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:281474976710758 msg type: 269090816 2025-05-29T15:23:48.550864Z node 37 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1751: TOperation RegisterRelationByTabletId, TxId: 281474976710758, partId: 4294967295, tablet: 72057594046316545 2025-05-29T15:23:48.550880Z node 37 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976710758 FAKE_COORDINATOR: Add transaction: 281474976710758 at step: 5000005 FAKE_COORDINATOR: advance: minStep5000005 State->FrontStep: 5000004 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710758 at step: 5000005 2025-05-29T15:23:48.551131Z node 37 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:676: TTxOperationPlanStep Execute, stepId: 5000005, transactions count in step: 1, at schemeshard: 72057594046678944 2025-05-29T15:23:48.551154Z node 37 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:680: TTxOperationPlanStep Execute, message: Transactions { TxId: 281474976710758 Coordinator: 72057594046316545 AckTo { RawX1: 134 RawX2: 158913792108 } } Step: 5000005 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-05-29T15:23:48.551162Z node 37 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_rmdir.cpp:129: TRmDir HandleReply TEvOperationPlan, opId: 281474976710758:0, step: 5000005, at schemeshard: 72057594046678944 2025-05-29T15:23:48.551183Z node 37 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_rmdir.cpp:180: RmDir is done, opId: 281474976710758:0, at schemeshard: 72057594046678944 2025-05-29T15:23:48.551191Z node 37 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:906: Part operation is done id#281474976710758:0 progress is 1/1 2025-05-29T15:23:48.551196Z node 37 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 281474976710758 ready parts: 1/1 2025-05-29T15:23:48.551201Z node 37 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:906: Part operation is done id#281474976710758:0 progress is 1/1 2025-05-29T15:23:48.551205Z node 37 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 281474976710758 ready parts: 1/1 2025-05-29T15:23:48.551213Z node 37 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2025-05-29T15:23:48.551222Z node 37 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 1 2025-05-29T15:23:48.551227Z node 37 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1606: TOperation IsReadyToNotify, TxId: 281474976710758, ready parts: 1/1, is published: false 2025-05-29T15:23:48.551234Z node 37 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 281474976710758 ready parts: 1/1 2025-05-29T15:23:48.551238Z node 37 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:973: Operation and all the parts is done, operation id: 281474976710758:0 2025-05-29T15:23:48.551242Z node 37 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5174: RemoveTx for txid 281474976710758:0 2025-05-29T15:23:48.551251Z node 37 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 2 2025-05-29T15:23:48.551256Z node 37 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:982: Publication still in progress, tx: 281474976710758, publications: 2, subscribers: 1 2025-05-29T15:23:48.551261Z node 37 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:989: Publication details: tx: 281474976710758, [OwnerId: 72057594046678944, LocalPathId: 1], 10 2025-05-29T15:23:48.551265Z node 37 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:989: Publication details: tx: 281474976710758, [OwnerId: 72057594046678944, LocalPathId: 4], 18446744073709551615 2025-05-29T15:23:48.551393Z node 37 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976710758 2025-05-29T15:23:48.551699Z node 37 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:23:48.551707Z node 37 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 281474976710758, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-29T15:23:48.551733Z node 37 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 281474976710758, path id: [OwnerId: 72057594046678944, LocalPathId: 4] 2025-05-29T15:23:48.551756Z node 37 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:23:48.551761Z node 37 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [37:206:2207], at schemeshard: 72057594046678944, txId: 281474976710758, path id: 1 2025-05-29T15:23:48.551766Z node 37 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [37:206:2207], at schemeshard: 72057594046678944, txId: 281474976710758, path id: 4 FAKE_COORDINATOR: Erasing txId 281474976710758 2025-05-29T15:23:48.551863Z node 37 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:5834: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 10 PathOwnerId: 72057594046678944, cookie: 281474976710758 2025-05-29T15:23:48.551874Z node 37 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 10 PathOwnerId: 72057594046678944, cookie: 281474976710758 2025-05-29T15:23:48.551881Z node 37 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 281474976710758 2025-05-29T15:23:48.551886Z node 37 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 281474976710758, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 10 2025-05-29T15:23:48.551891Z node 37 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 4 2025-05-29T15:23:48.551948Z node 37 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:5834: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 281474976710758 2025-05-29T15:23:48.551957Z node 37 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 281474976710758 2025-05-29T15:23:48.551961Z node 37 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 281474976710758 2025-05-29T15:23:48.551965Z node 37 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 281474976710758, pathId: [OwnerId: 72057594046678944, LocalPathId: 4], version: 18446744073709551615 2025-05-29T15:23:48.551970Z node 37 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 1 2025-05-29T15:23:48.551979Z node 37 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 281474976710758, subscribers: 1 2025-05-29T15:23:48.551984Z node 37 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:212: TTxAckPublishToSchemeBoard Notify send TEvNotifyTxCompletionResult, at schemeshard: 72057594046678944, to actorId: [37:126:2151] 2025-05-29T15:23:48.552017Z node 37 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:123: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-05-29T15:23:48.552022Z node 37 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:137: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 4], at schemeshard: 72057594046678944 2025-05-29T15:23:48.552031Z node 37 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2025-05-29T15:23:48.552424Z node 37 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976710758 2025-05-29T15:23:48.552635Z node 37 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976710758 2025-05-29T15:23:48.552652Z node 37 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6746: Handle: TEvNotifyTxCompletionResult: txId# 281474976710758 2025-05-29T15:23:48.552662Z node 37 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6748: Message: TxId: 281474976710758 2025-05-29T15:23:48.552669Z node 37 :EXPORT DEBUG: schemeshard_export__create.cpp:309: TExport::TTxProgress: DoExecute 2025-05-29T15:23:48.552676Z node 37 :EXPORT DEBUG: schemeshard_export__create.cpp:1232: TExport::TTxProgress: OnNotifyResult: txId# 281474976710758 2025-05-29T15:23:48.552681Z node 37 :EXPORT DEBUG: schemeshard_export__create.cpp:1263: TExport::TTxProgress: OnNotifyResult: txId# 281474976710758, id# 1003, itemIdx# 4294967295 2025-05-29T15:23:48.552729Z node 37 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:160: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 2025-05-29T15:23:48.552964Z node 37 :EXPORT DEBUG: schemeshard_export__create.cpp:329: TExport::TTxProgress: DoComplete TestWaitNotification wait txId: 1003 2025-05-29T15:23:48.553003Z node 37 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:210: tests -- TTxNotificationSubscriber for txId 1003: send EvNotifyTxCompletion 2025-05-29T15:23:48.553010Z node 37 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:256: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 1003 2025-05-29T15:23:48.553065Z node 37 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 1003, at schemeshard: 72057594046678944 2025-05-29T15:23:48.553080Z node 37 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:227: tests -- TTxNotificationSubscriber for txId 1003: got EvNotifyTxCompletionResult 2025-05-29T15:23:48.553085Z node 37 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:236: tests -- TTxNotificationSubscriber for txId 1003: satisfy waiter [37:395:2383] TestWaitNotification: OK eventTxId 1003 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_export_reboots_s3/unittest >> TExportToS3WithRebootsTests::ShouldDisableAutoDropping [GOOD] Test command err: ==== RunWithTabletReboots =========== RUN: Trace =========== Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2140] Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:116:2058] recipient: [1:110:2141] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:116:2058] recipient: [1:110:2141] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:117:2058] recipient: [1:111:2142] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:117:2058] recipient: [1:111:2142] Leader for TabletID 72057594046678944 is [1:126:2151] sender: [1:127:2058] recipient: [1:109:2140] Leader for TabletID 72057594046447617 is [1:130:2154] sender: [1:132:2058] recipient: [1:110:2141] Leader for TabletID 72057594046316545 is [1:133:2155] sender: [1:135:2058] recipient: [1:111:2142] 2025-05-29T15:23:32.639793Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7488: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-05-29T15:23:32.639825Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7516: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:23:32.639833Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7402: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2025-05-29T15:23:32.639840Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7418: OperationsProcessing config: using default configuration 2025-05-29T15:23:32.639855Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-05-29T15:23:32.639859Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-05-29T15:23:32.639871Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7548: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:23:32.639889Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:39: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-05-29T15:23:32.640029Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7619: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-05-29T15:23:32.640150Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-05-29T15:23:32.656591Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7651: Got new config: QueryServiceConfig { AllExternalDataSourcesAreAvailable: true } 2025-05-29T15:23:32.656616Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:23:32.656715Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7619: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# Leader for TabletID 72057594046447617 is [1:130:2154] sender: [1:175:2058] recipient: [1:15:2062] 2025-05-29T15:23:32.662376Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-05-29T15:23:32.662416Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-05-29T15:23:32.662453Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-05-29T15:23:32.666752Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-05-29T15:23:32.666851Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:445: Clear TempDirsState with owners number: 0 2025-05-29T15:23:32.666977Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1348: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-05-29T15:23:32.667177Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:32: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-05-29T15:23:32.668090Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:157: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:23:32.668145Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:84: [RootDataErasureManager] Stop 2025-05-29T15:23:32.668399Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:23:32.668412Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:23:32.668449Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-05-29T15:23:32.668458Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-29T15:23:32.668465Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:202: TTxServerlessStorageBilling.Complete 2025-05-29T15:23:32.668487Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6671: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:214:2058] recipient: [1:212:2212] IGNORE Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:214:2058] recipient: [1:212:2212] Leader for TabletID 72057594037968897 is [1:218:2216] sender: [1:219:2058] recipient: [1:212:2212] 2025-05-29T15:23:32.670761Z node 1 :HIVE INFO: tablet_helpers.cpp:1130: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:126:2151] sender: [1:239:2058] recipient: [1:15:2062] 2025-05-29T15:23:32.715990Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:372: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-05-29T15:23:32.716073Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:23:32.716140Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-05-29T15:23:32.716191Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:130: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-05-29T15:23:32.716203Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:23:32.727143Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:451: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-05-29T15:23:32.727193Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-05-29T15:23:32.727255Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:23:32.727270Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:313: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-05-29T15:23:32.727277Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:367: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-05-29T15:23:32.727284Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 2 -> 3 2025-05-29T15:23:32.735112Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:23:32.735147Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-05-29T15:23:32.735156Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 3 -> 128 2025-05-29T15:23:32.739095Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:23:32.739128Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:23:32.739138Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:23:32.739150Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1650: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-05-29T15:23:32.739996Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1719: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-05-29T15:23:32.741886Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:652: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-05-29T15:23:32.741947Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1751: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 Leader for TabletID 72057594046316545 is [1:133:2155] sender: [1:254:2058] recipient: [1:15:2062] FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-05-29T15:23:32.742169Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:676: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-05-29T15:23:32.742210Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:680: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 133 RawX2: 4294969451 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-05-29T15:23:32.742221Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:23:32.742307Z node 1 :FLAT_TX_SCHEMESHARD INFO: sch ... ablet strongly msg operationId: 1004:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1004 msg type: 269090816 2025-05-29T15:23:57.074438Z node 84 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1751: TOperation RegisterRelationByTabletId, TxId: 1004, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1004 at step: 5000007 FAKE_COORDINATOR: advance: minStep5000007 State->FrontStep: 5000006 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1004 at step: 5000007 2025-05-29T15:23:57.074498Z node 84 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:676: TTxOperationPlanStep Execute, stepId: 5000007, transactions count in step: 1, at schemeshard: 72057594046678944 2025-05-29T15:23:57.074510Z node 84 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:680: TTxOperationPlanStep Execute, message: Transactions { TxId: 1004 Coordinator: 72057594046316545 AckTo { RawX1: 132 RawX2: 360777255019 } } Step: 5000007 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-05-29T15:23:57.074514Z node 84 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_rmdir.cpp:129: TRmDir HandleReply TEvOperationPlan, opId: 1004:0, step: 5000007, at schemeshard: 72057594046678944 2025-05-29T15:23:57.074529Z node 84 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_rmdir.cpp:180: RmDir is done, opId: 1004:0, at schemeshard: 72057594046678944 2025-05-29T15:23:57.074533Z node 84 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:906: Part operation is done id#1004:0 progress is 1/1 2025-05-29T15:23:57.074536Z node 84 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 1004 ready parts: 1/1 2025-05-29T15:23:57.074540Z node 84 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:906: Part operation is done id#1004:0 progress is 1/1 2025-05-29T15:23:57.074542Z node 84 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 1004 ready parts: 1/1 2025-05-29T15:23:57.074547Z node 84 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2025-05-29T15:23:57.074553Z node 84 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 2025-05-29T15:23:57.074557Z node 84 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1606: TOperation IsReadyToNotify, TxId: 1004, ready parts: 1/1, is published: false 2025-05-29T15:23:57.074561Z node 84 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 1004 ready parts: 1/1 2025-05-29T15:23:57.074566Z node 84 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:973: Operation and all the parts is done, operation id: 1004:0 2025-05-29T15:23:57.074569Z node 84 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5174: RemoveTx for txid 1004:0 2025-05-29T15:23:57.074574Z node 84 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2025-05-29T15:23:57.074578Z node 84 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:982: Publication still in progress, tx: 1004, publications: 2, subscribers: 0 2025-05-29T15:23:57.074581Z node 84 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:989: Publication details: tx: 1004, [OwnerId: 72057594046678944, LocalPathId: 1], 12 2025-05-29T15:23:57.074583Z node 84 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:989: Publication details: tx: 1004, [OwnerId: 72057594046678944, LocalPathId: 2], 18446744073709551615 2025-05-29T15:23:57.074696Z node 84 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1004 2025-05-29T15:23:57.074917Z node 84 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1004 2025-05-29T15:23:57.075094Z node 84 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:23:57.075099Z node 84 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1004, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-29T15:23:57.075117Z node 84 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1004, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2025-05-29T15:23:57.075131Z node 84 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:23:57.075134Z node 84 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [84:206:2207], at schemeshard: 72057594046678944, txId: 1004, path id: 1 2025-05-29T15:23:57.075138Z node 84 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [84:206:2207], at schemeshard: 72057594046678944, txId: 1004, path id: 2 FAKE_COORDINATOR: Erasing txId 1004 2025-05-29T15:23:57.075220Z node 84 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:5834: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 12 PathOwnerId: 72057594046678944, cookie: 1004 2025-05-29T15:23:57.075227Z node 84 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 12 PathOwnerId: 72057594046678944, cookie: 1004 2025-05-29T15:23:57.075230Z node 84 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 1004 2025-05-29T15:23:57.075233Z node 84 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 1004, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 12 2025-05-29T15:23:57.075236Z node 84 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 4 2025-05-29T15:23:57.075295Z node 84 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:5834: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 1004 2025-05-29T15:23:57.075303Z node 84 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 1004 2025-05-29T15:23:57.075307Z node 84 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1004 2025-05-29T15:23:57.075311Z node 84 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 1004, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 18446744073709551615 2025-05-29T15:23:57.075315Z node 84 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 2025-05-29T15:23:57.075325Z node 84 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 1004, subscribers: 0 2025-05-29T15:23:57.075354Z node 84 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:123: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-05-29T15:23:57.075357Z node 84 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:137: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2025-05-29T15:23:57.075363Z node 84 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2025-05-29T15:23:57.075666Z node 84 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1004 2025-05-29T15:23:57.075823Z node 84 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1004 2025-05-29T15:23:57.075835Z node 84 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:160: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestModificationResult got TxId: 1004, wait until txId: 1004 2025-05-29T15:23:57.075886Z node 84 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-05-29T15:23:57.075905Z node 84 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot" took 22us result status StatusSuccess 2025-05-29T15:23:57.075956Z node 84 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot" PathDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 12 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 12 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 10 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: "Table" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 1002 CreateStep: 5000003 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" ChildrenExist: false } Children { Name: "export-1003" PathId: 4 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976710757 CreateStep: 5000004 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 3 PathsLimit: 10000 ShardsInside: 2 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/MyRoot" } } } PathId: 1 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/mind/ut/unittest >> TNodeBrokerTest::TestRandomActions [GOOD] Test command err: 2025-05-29T15:22:16.397390Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 2 Deadline: 18446744073709.551615s } 2025-05-29T15:22:16.397458Z node 3 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 3 Deadline: 18446744073709.551615s } 2025-05-29T15:22:16.397490Z node 4 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 4 Deadline: 18446744073709.551615s } 2025-05-29T15:22:16.397530Z node 5 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 5 Deadline: 18446744073709.551615s } 2025-05-29T15:22:16.397595Z node 6 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 6 Deadline: 18446744073709.551615s } 2025-05-29T15:22:16.397624Z node 7 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 7 Deadline: 18446744073709.551615s } 2025-05-29T15:22:16.406320Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:16.406482Z node 3 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:16.406544Z node 4 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:16.406592Z node 5 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:16.406652Z node 6 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:16.406698Z node 7 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:16.406806Z node 8 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 8 Deadline: 18446744073709.551615s } 2025-05-29T15:22:16.406842Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-05-29T15:22:16.407174Z node 3 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:16.407206Z node 4 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:16.407229Z node 5 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:16.407251Z node 6 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:16.407277Z node 7 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:16.407300Z node 8 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:16.407346Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:16.411746Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:16.411820Z node 8 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:16.411860Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:16.413094Z node 5 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:16.413143Z node 6 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:16.413180Z node 7 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:16.413297Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:16.413321Z node 3 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:16.413348Z node 4 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:16.413376Z node 5 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-05-29T15:22:16.413436Z node 6 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-05-29T15:22:16.413461Z node 7 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-05-29T15:22:16.413488Z node 8 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:16.413512Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:547: Handle NActors::TEvInterconnect::TEvListNodes 2025-05-29T15:22:16.413737Z node 3 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-05-29T15:22:16.413771Z node 4 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-05-29T15:22:16.413886Z node 8 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-05-29T15:22:16.414004Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-05-29T15:22:16.414062Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 6 Deadline: 18446744073709.551615s } 2025-05-29T15:22:16.414906Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 5 Deadline: 18446744073709.551615s } 2025-05-29T15:22:16.414938Z node 3 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 7 Deadline: 18446744073709.551615s } 2025-05-29T15:22:16.414949Z node 4 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 7 Deadline: 18446744073709.551615s } 2025-05-29T15:22:16.414961Z node 6 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 3 Deadline: 18446744073709.551615s } 2025-05-29T15:22:16.414977Z node 8 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 4 Deadline: 18446744073709.551615s } 2025-05-29T15:22:16.419746Z node 5 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 2 Deadline: 18446744073709.551615s } 2025-05-29T15:22:16.420314Z node 7 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 3 Deadline: 18446744073709.551615s } 2025-05-29T15:22:16.420391Z node 3 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 6 Deadline: 18446744073709.551615s } 2025-05-29T15:22:16.420406Z node 4 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 8 Deadline: 18446744073709.551615s } 2025-05-29T15:22:16.420452Z node 7 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 4 Deadline: 18446744073709.551615s } 2025-05-29T15:22:16.421693Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 4 Deadline: 18446744073709.551615s } 2025-05-29T15:22:16.422290Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 2 Deadline: 18446744073709.551615s } 2025-05-29T15:22:16.422415Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 5 Deadline: 18446744073709.551615s } 2025-05-29T15:22:16.422540Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 7 Deadline: 18446744073709.551615s } 2025-05-29T15:22:16.422580Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 3 Deadline: 18446744073709.551615s } 2025-05-29T15:22:16.422671Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 8 Deadline: 18446744073709.551615s } 2025-05-29T15:22:16.424057Z node 3 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 4 Deadline: 18446744073709.551615s } 2025-05-29T15:22:16.424438Z node 4 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:576: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 3 Deadline: 18446744073709.551615s } 2025-05-29T15:22:16.453496Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7309: Cannot subscribe to console configs 2025-05-29T15:22:16.453526Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded ... waiting for nameservers are connected 2025-05-29T15:22:16.459120Z node 1 :NODE_BROKER DEBUG: node_broker_impl.h:236: StateInit event type: 10060000 event: NKikimr::TEvTablet::TEvBoot 2025-05-29T15:22:16.459733Z node 1 :NODE_BROKER DEBUG: node_broker_impl.h:236: StateInit event type: 10060001 event: NKikimr::TEvTablet::TEvRestored 2025-05-29T15:22:16.459820Z node 1 :NODE_BROKER DEBUG: node_broker__init_scheme.cpp:20: TTxInitScheme Execute 2025-05-29T15:22:16.460094Z node 1 :NODE_BROKER DEBUG: node_broker_impl.h:236: StateInit event type: 1006000c event: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-05-29T15:22:16.461114Z node 1 :NODE_BROKER DEBUG: node_broker__init_scheme.cpp:29: TTxInitScheme Complete 2025-05-29T15:22:16.461146Z node 1 :NODE_BROKER DEBUG: node_broker__load_state.cpp:19: TTxLoadState Execute 2025-05-29T15:22:16.461222Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:961: [DB] Using default config. 2025-05-29T15:22:16.461242Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:998: [DB] Starting the first epoch: #1.1 1970-01-01T00:00:00.025000Z - 1970-01-01T01:00:00.025000Z - 1970-01-01T02:00:00.025000Z 2025-05-29T15:22:16.461248Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:1024: [DB] Loaded the first approximate epoch start: #1.1 2025-05-29T15:22:16.461267Z node 1 :NODE_BROKER DEBUG: node_broker__load_state.cpp:27: TTxLoadState Complete 2025-05-29T15:22:16.461292Z node 1 :NODE_BROKER DEBUG: node_broker__migrate_state.cpp:84: TTxMigrateState Execute 2025-05-29T15:22:16.461299Z node 1 :NODE_BROKER DEBUG: node_broker__migrate_state.cpp:52: TTxMigrateState ProcessMigrationBatch UpdateNodes left 0, NewVersionUpdateNodes left 0 2025-05-29T15:22:16.461304Z node 1 :NODE_BROKER DEBUG: node_broker__migrate_state.cpp:21: TTxMigrateState FinalizeMigration 2025-05-29T15:22:16.461310Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:1311: [DB] Update epoch in database: #1.1 1970-01-01T00:00:00.025000Z - 1970-01-01T01:00:00.025000Z - 1970-01-01T02:00:00.025000Z 2025-05-29T15:22:16.461335Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:1330: [DB] Update approx epoch start in database: #1.1 2025-05-29T15:22:16.461342Z node 1 :NODE_BROKER NOTICE: node_broker.cpp:1343: [DB] Update main nodes table to: Nodes 2025-05-29T15:22:16.493300Z node 1 :NODE_BROKER DEBUG: node_broker__migrate_state.cpp:95: TTxMigrateState Complete 2025-05-29T15:22:16.493336Z node 1 :NODE_BROKER TRACE: node_broker.cpp:456: Scheduled epoch update at 1970-01-01T01:00:00.025000Z 2025-05-29T15:22:16.493345Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:562: Preparing nodes list cache for epoch #1.1 1970-01-01T00:00:00.025000Z - 1970-01-01T01:00:00.025000Z - 1970-01-01T02:00:00.025000Z, approximate epoch start #1.1 nodes=0 expired=0 2025-05-29T15:22:16.493355Z n ... esourcesMode: (empty maybe) Users: [] Groups: [] } }] } 2025-05-29T15:23:51.803126Z node 1 :NODE_BROKER TRACE: node_broker.cpp:1532: Handle TEvTxProxySchemeCache::TEvNavigateKeySetResult: response# { Path: dc-1 TableId: [72057594046678944:1:0] RequestType: ByPath Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046678944, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046678944, LocalPathId: 1] Params { Version: 0 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } } 2025-05-29T15:23:51.803138Z node 1 :NODE_BROKER TRACE: node_broker.cpp:1558: Finished resolving tenant: request# Host: "host4" Port: 3 ResolveHost: "host4" Address: "host4" Location { DataCenter: "3" Module: "3" Rack: "3" Unit: "3" } FixedNodeId: false Path: "dc-1": scope id# <72057594046678944:1>: serviced subdomain# 72057594046678944:1 2025-05-29T15:23:51.803151Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 2146435073, Sender [1:28179:21004], Recipient [1:28114:21004]: NKikimr::NNodeBroker::TNodeBroker::TEvPrivate::TEvResolvedRegistrationRequest 2025-05-29T15:23:51.803156Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:257: StateWork, processing event TEvPrivate::TEvResolvedRegistrationRequest 2025-05-29T15:23:51.803163Z node 1 :NODE_BROKER DEBUG: node_broker__register_node.cpp:77: TTxRegisterNode Execute 2025-05-29T15:23:51.803167Z node 1 :NODE_BROKER DEBUG: node_broker__register_node.cpp:81: Registration request from host4:3 (not fixed) tenant: dc-1 2025-05-29T15:23:51.803184Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:856: [DB] Adding node #1028.v872 host4:3 to database state=Active resolvehost=host4 address=host4 dc=3 location=DC=3/M=3/R=3/U=3/ lease=1 expire=Fri, 09 Jan 1970 06:00:00 UTC servicedsubdomain=72057594046678944:1 slotindex=4 authorizedbycertificate=false 2025-05-29T15:23:51.803229Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:264: [Dirty] Register new active node #1028.v872 host4:3 2025-05-29T15:23:51.803235Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:552: [Dirty] Update current epoch version from 871 to 872 2025-05-29T15:23:51.803240Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:1356: [DB] Update epoch version in database version=872 2025-05-29T15:23:51.814277Z node 1 :NODE_BROKER DEBUG: node_broker__register_node.cpp:186: TTxRegisterNode Complete 2025-05-29T15:23:51.814298Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:264: [Committed] Register new active node #1028.v872 host4:3 2025-05-29T15:23:51.814306Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:552: [Committed] Update current epoch version from 871 to 872 2025-05-29T15:23:51.814313Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:630: Add node #1028.v872 host4:3 to epoch cache 2025-05-29T15:23:51.814331Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:659: Add node #1028.v872 to update nodes log 2025-05-29T15:23:51.814361Z node 1 :NODE_BROKER TRACE: node_broker__register_node.cpp:58: TTxRegisterNode reply with: Status { Code: OK } Node { NodeId: 1028 Host: "host4" Port: 3 ResolveHost: "host4" Address: "host4" Location { DataCenter: "3" Module: "3" Rack: "3" Unit: "3" } Expire: 712800025000 Name: "slot-4" } 2025-05-29T15:23:51.814786Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 269877761, Sender [1:28184:21066], Recipient [1:28114:21004]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-05-29T15:23:51.814819Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 272039938, Sender [1:622:2214], Recipient [1:28114:21004]: NKikimr::NNodeBroker::TEvNodeBroker::TEvRegistrationRequest { Host: "host7" Port: 6 ResolveHost: "host7" Address: "host7" Location { DataCenter: "6" Module: "6" Rack: "6" Unit: "6" } FixedNodeId: false Path: "dc-1" } 2025-05-29T15:23:51.814824Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:248: StateWork, processing event TEvNodeBroker::TEvRegistrationRequest 2025-05-29T15:23:51.814830Z node 1 :NODE_BROKER TRACE: node_broker.cpp:1487: Handle TEvNodeBroker::TEvRegistrationRequest: request# Host: "host7" Port: 6 ResolveHost: "host7" Address: "host7" Location { DataCenter: "6" Module: "6" Rack: "6" Unit: "6" } FixedNodeId: false Path: "dc-1" 2025-05-29T15:23:51.814865Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2702: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [1:23:2070], request# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-05-29T15:23:51.814885Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:1842: FillEntry for TNavigate: self# [1:23:2070], cacheItem# { Subscriber: { Subscriber: [1:28072:20989] DomainOwnerId: 72057594046678944 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusSuccess Kind: 2 TableKind: 0 Created: 1 CreateStep: 0 PathId: [OwnerId: 72057594046678944, LocalPathId: 1] DomainId: [OwnerId: 72057594046678944, LocalPathId: 1] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-05-29T15:23:51.814923Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:265: Send result: self# [1:28186:21067], recipient# [1:28185:21004], result# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [72057594046678944:1:0] RequestType: ByPath Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046678944, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046678944, LocalPathId: 1] Params { Version: 0 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } }] } 2025-05-29T15:23:51.814933Z node 1 :NODE_BROKER TRACE: node_broker.cpp:1532: Handle TEvTxProxySchemeCache::TEvNavigateKeySetResult: response# { Path: dc-1 TableId: [72057594046678944:1:0] RequestType: ByPath Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046678944, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046678944, LocalPathId: 1] Params { Version: 0 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } } 2025-05-29T15:23:51.814943Z node 1 :NODE_BROKER TRACE: node_broker.cpp:1558: Finished resolving tenant: request# Host: "host7" Port: 6 ResolveHost: "host7" Address: "host7" Location { DataCenter: "6" Module: "6" Rack: "6" Unit: "6" } FixedNodeId: false Path: "dc-1": scope id# <72057594046678944:1>: serviced subdomain# 72057594046678944:1 2025-05-29T15:23:51.814952Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 2146435073, Sender [1:28185:21004], Recipient [1:28114:21004]: NKikimr::NNodeBroker::TNodeBroker::TEvPrivate::TEvResolvedRegistrationRequest 2025-05-29T15:23:51.814955Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:257: StateWork, processing event TEvPrivate::TEvResolvedRegistrationRequest 2025-05-29T15:23:51.814969Z node 1 :NODE_BROKER DEBUG: node_broker__register_node.cpp:77: TTxRegisterNode Execute 2025-05-29T15:23:51.814972Z node 1 :NODE_BROKER DEBUG: node_broker__register_node.cpp:81: Registration request from host7:6 (not fixed) tenant: dc-1 2025-05-29T15:23:51.814990Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:856: [DB] Adding node #1029.v873 host7:6 to database state=Active resolvehost=host7 address=host7 dc=6 location=DC=6/M=6/R=6/U=6/ lease=1 expire=Fri, 09 Jan 1970 06:00:00 UTC servicedsubdomain=72057594046678944:1 slotindex=5 authorizedbycertificate=false 2025-05-29T15:23:51.815029Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:264: [Dirty] Register new active node #1029.v873 host7:6 2025-05-29T15:23:51.815033Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:552: [Dirty] Update current epoch version from 872 to 873 2025-05-29T15:23:51.815036Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:1356: [DB] Update epoch version in database version=873 2025-05-29T15:23:51.825973Z node 1 :NODE_BROKER DEBUG: node_broker__register_node.cpp:186: TTxRegisterNode Complete 2025-05-29T15:23:51.825992Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:264: [Committed] Register new active node #1029.v873 host7:6 2025-05-29T15:23:51.826000Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:552: [Committed] Update current epoch version from 872 to 873 2025-05-29T15:23:51.826005Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:630: Add node #1029.v873 host7:6 to epoch cache 2025-05-29T15:23:51.826022Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:659: Add node #1029.v873 to update nodes log 2025-05-29T15:23:51.826052Z node 1 :NODE_BROKER TRACE: node_broker__register_node.cpp:58: TTxRegisterNode reply with: Status { Code: OK } Node { NodeId: 1029 Host: "host7" Port: 6 ResolveHost: "host7" Address: "host7" Location { DataCenter: "6" Module: "6" Rack: "6" Unit: "6" } Expire: 712800025000 Name: "slot-5" } 2025-05-29T15:23:51.826581Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 269877761, Sender [1:28190:21071], Recipient [1:28114:21004]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-05-29T15:23:51.826610Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 272039939, Sender [1:622:2214], Recipient [1:28114:21004]: NKikimr::NNodeBroker::TEvNodeBroker::TEvExtendLeaseRequest { NodeId: 1030 } 2025-05-29T15:23:51.826615Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:250: StateWork, processing event TEvNodeBroker::TEvExtendLeaseRequest 2025-05-29T15:23:51.826630Z node 1 :NODE_BROKER DEBUG: node_broker__extend_lease.cpp:44: TTxExtendLease Execute node #1030 2025-05-29T15:23:51.826634Z node 1 :NODE_BROKER ERROR: node_broker__extend_lease.cpp:31: Cannot extend lease for node #1030: WRONG_REQUEST: Unknown node 2025-05-29T15:23:51.826649Z node 1 :NODE_BROKER DEBUG: node_broker__extend_lease.cpp:78: TTxExtendLease Complete 2025-05-29T15:23:51.826659Z node 1 :NODE_BROKER TRACE: node_broker__extend_lease.cpp:82: TTxExtendLease reply with: NKikimr::NNodeBroker::TEvNodeBroker::TEvExtendLeaseResponse { Status { Code: WRONG_REQUEST Reason: "Unknown node" } NodeId: 1030 } 2025-05-29T15:23:51.826976Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 269877761, Sender [1:28192:21073], Recipient [1:28114:21004]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-05-29T15:23:51.826993Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:242: StateWork, received event# 272039939, Sender [1:622:2214], Recipient [1:28114:21004]: NKikimr::NNodeBroker::TEvNodeBroker::TEvExtendLeaseRequest { NodeId: 1030 } 2025-05-29T15:23:51.826996Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:250: StateWork, processing event TEvNodeBroker::TEvExtendLeaseRequest 2025-05-29T15:23:51.827000Z node 1 :NODE_BROKER DEBUG: node_broker__extend_lease.cpp:44: TTxExtendLease Execute node #1030 2025-05-29T15:23:51.827002Z node 1 :NODE_BROKER ERROR: node_broker__extend_lease.cpp:31: Cannot extend lease for node #1030: WRONG_REQUEST: Unknown node 2025-05-29T15:23:51.827007Z node 1 :NODE_BROKER DEBUG: node_broker__extend_lease.cpp:78: TTxExtendLease Complete 2025-05-29T15:23:51.827013Z node 1 :NODE_BROKER TRACE: node_broker__extend_lease.cpp:82: TTxExtendLease reply with: NKikimr::NNodeBroker::TEvNodeBroker::TEvExtendLeaseResponse { Status { Code: WRONG_REQUEST Reason: "Unknown node" } NodeId: 1030 } >> DataShardReadIterator::ShouldStopWhenNodeDisconnected >> AggregateStatistics::ShouldBeCcorrectProcessingTabletTimeout [GOOD] >> AggregateStatistics::ShouldBeCorrectlyAggregateStatisticsFromAllNodes [GOOD] >> AggregateStatistics::ShouldBePings >> DataShardReadIterator::ShouldReturnBrokenLockWhenReadKey+EvWrite >> AggregateStatistics::ShouldBeCcorrectProcessingOfLocalTablets [GOOD] >> DataShardReadIterator::ShouldReverseReadMultipleKeys >> DataShardReadIterator::ShouldReadRangeCellVec >> TFlatTest::AutoSplitMergeQueue [GOOD] >> DataShardReadIterator::ShouldRangeReadReverseLeftInclusive >> DataShardReadIterator::ShouldReadRangeInclusiveEndsCellVec >> DataShardReadIterator::ShouldReadKeyCellVec >> DataShardReadIterator::ShouldHandleReadAck >> DataShardReadIteratorConsistency::LocalSnapshotReadWithPlanQueueRace >> TObjectStorageListingTest::ManyDeletes [GOOD] >> AggregateStatistics::RootNodeShouldBeInvalidateByTimeout |64.1%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/kqp/ut/data/ydb-core-kqp-ut-data |64.1%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/schemeshard/ut_rtmr/ydb-core-tx-schemeshard-ut_rtmr |64.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_rtmr/ydb-core-tx-schemeshard-ut_rtmr |64.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/ut/data/ydb-core-kqp-ut-data |64.2%| [LD] {RESULT} $(B)/ydb/core/kqp/ut/data/ydb-core-kqp-ut-data |64.2%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_rtmr/ydb-core-tx-schemeshard-ut_rtmr ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/tx_proxy/ut_schemereq/unittest >> SchemeReqAdminAccessInTenant::ClusterAdminCanAuthOnNonEmptyTenant-DomainLoginOnly-StrictAclCheck [GOOD] Test command err: Starting YDB, grpc: 62224, msgbus: 4764 2025-05-29T15:23:11.899704Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509888577279494181:2075];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:23:11.900007Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/001b44/r3tmp/tmpVZgaxJ/pdisk_1.dat 2025-05-29T15:23:11.976229Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 62224, node 1 2025-05-29T15:23:11.994520Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:23:11.994529Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-05-29T15:23:11.994530Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-05-29T15:23:11.994562Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-05-29T15:23:12.000305Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:23:12.000342Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:23:12.001917Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:4764 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 2025-05-29T15:23:12.010315Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:403: actor# [1:7509888577279494407:2139] Handle TEvNavigate describe path dc-1 2025-05-29T15:23:12.012435Z node 1 :TX_PROXY DEBUG: describe.cpp:272: Actor# [1:7509888581574462155:2432] HANDLE EvNavigateScheme dc-1 2025-05-29T15:23:12.012758Z node 1 :TX_PROXY DEBUG: describe.cpp:356: Actor# [1:7509888581574462155:2432] HANDLE EvNavigateKeySetResult TDescribeReq marker# P5 ErrorCount# 0 2025-05-29T15:23:12.021487Z node 1 :TX_PROXY DEBUG: describe.cpp:435: Actor# [1:7509888581574462155:2432] SEND to# 72057594046644480 shardToRequest NKikimrSchemeOp.TDescribePath Path: "dc-1" Options { ReturnBoundaries: true ShowPrivateTable: true ReturnRangeKey: true } 2025-05-29T15:23:12.023705Z node 1 :TX_PROXY DEBUG: describe.cpp:448: Actor# [1:7509888581574462155:2432] Handle TEvDescribeSchemeResult Forward to# [1:7509888581574462154:2431] Cookie: 0 TEvDescribeSchemeResult: NKikimrScheme.TEvDescribeSchemeResult PreSerializedData size# 2 Record# Status: StatusSuccess Path: "dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/dc-1" } } } PathId: 1 PathOwnerId: 72057594046644480 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-05-29T15:23:12.026631Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:315: actor# [1:7509888577279494407:2139] Handle TEvProposeTransaction 2025-05-29T15:23:12.026646Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:238: actor# [1:7509888577279494407:2139] TxId# 281474976715657 ProcessProposeTransaction 2025-05-29T15:23:12.026687Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:257: actor# [1:7509888577279494407:2139] Cookie# 0 userReqId# "" txid# 281474976715657 SEND to# [1:7509888581574462168:2438] 2025-05-29T15:23:12.035627Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1562: Actor# [1:7509888581574462168:2438] txid# 281474976715657 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "dc-1" StoragePools { Name: "" Kind: "tenant-db" } StoragePools { Name: "/dc-1:test" Kind: "test" } } } } UserToken: "\n\014root@builtin\022\030\022\026\n\024all-users@well-known\032\014root@builtin\"\007Builtin*\017**** (B6C6F477)" PeerName: "" 2025-05-29T15:23:12.035672Z node 1 :TX_PROXY DEBUG: schemereq.cpp:569: Actor# [1:7509888581574462168:2438] txid# 281474976715657 Bootstrap, UserSID: root@builtin CheckAdministrator: 1 CheckDatabaseAdministrator: 0 2025-05-29T15:23:12.035677Z node 1 :TX_PROXY DEBUG: schemereq.cpp:578: Actor# [1:7509888581574462168:2438] txid# 281474976715657 Bootstrap, UserSID: root@builtin IsClusterAdministrator: 1 2025-05-29T15:23:12.035692Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1617: Actor# [1:7509888581574462168:2438] txid# 281474976715657 TEvNavigateKeySet requested from SchemeCache 2025-05-29T15:23:12.035821Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1450: Actor# [1:7509888581574462168:2438] txid# 281474976715657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-05-29T15:23:12.035861Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1497: Actor# [1:7509888581574462168:2438] HANDLE EvNavigateKeySetResult, txid# 281474976715657 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# false 2025-05-29T15:23:12.035877Z node 1 :TX_PROXY DEBUG: schemereq.cpp:103: Actor# [1:7509888581574462168:2438] txid# 281474976715657 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976715657 TabletId# 72057594046644480} 2025-05-29T15:23:12.035933Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1352: Actor# [1:7509888581574462168:2438] txid# 281474976715657 HANDLE EvClientConnected 2025-05-29T15:23:12.036198Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-05-29T15:23:12.036973Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1374: Actor# [1:7509888581574462168:2438] txid# 281474976715657 Status StatusAccepted HANDLE {TEvModifySchemeTransactionResult Status# StatusAccepted txid# 281474976715657} 2025-05-29T15:23:12.036987Z node 1 :TX_PROXY DEBUG: schemereq.cpp:549: Actor# [1:7509888581574462168:2438] txid# 281474976715657 SEND to# [1:7509888581574462167:2437] Source {TEvProposeTransactionStatus txid# 281474976715657 Status# 53} waiting... 2025-05-29T15:23:12.039653Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:315: actor# [1:7509888577279494407:2139] Handle TEvProposeTransaction 2025-05-29T15:23:12.039661Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:238: actor# [1:7509888577279494407:2139] TxId# 281474976715658 ProcessProposeTransaction 2025-05-29T15:23:12.039669Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:257: actor# [1:7509888577279494407:2139] Cookie# 0 userReqId# "" txid# 281474976715658 SEND to# [1:7509888581574462208:2474] 2025-05-29T15:23:12.040164Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1562: Actor# [1:7509888581574462208:2474] txid# 281474976715658 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/" OperationType: ESchemeOpModifyACL ModifyACL { Name: "dc-1" DiffACL: "\n\032\010\000\022\026\010\001\020\377\377\003\032\014root@builtin \003" } } } UserToken: "\n\014root@builtin\022\030\022\026\n\024all-users@well-known\032\014root@builtin\"\007Builtin*\017**** (B6C6F477)" PeerName: "" 2025-05-29T15:23:12.040179Z node 1 :TX_PROXY DEBUG: schemereq.cpp:569: Actor# [1:7509888581574462208:2474] txid# 281474976715658 Bootstrap, UserSID: root@builtin CheckAdministrator: 1 CheckDatabaseAdministrator: 0 2025-05-29T15:23:12.040182Z node 1 :TX_PROXY DEBUG: schemereq.cpp:578: Actor# [1:7509888581574462208:2474] txid# 281474976715658 Bootstrap, UserSID: root@builtin IsClusterAdministrator: 1 2025-05-29T15:23:12.040197Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1617: Actor# [1:7509888581574462208:2474] txid# 281474976715658 TEvNavigateKeySet requested from SchemeCache 2025-05-29T15:23:12.040284Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1450: Actor# [1:7509888581574462208:2474] txid# 281474976715658 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-05-29T15:23:12.040307Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1497: Actor# [1:7509888581574462208:2474] HANDLE EvNavigateKeySetResult, txid# 281474976715658 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2025-05-29T15:23:12.040319Z node 1 :TX_PROXY DEBUG: schemereq.cpp:103: Actor# [1:7509888581574462208:2474] txid# 281474976715658 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976715658 TabletId# 72057594046644480} 2025-05-29T15:23:12.040352Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1352: Actor# [1:7509888581574462208:2474] txid# 281474976715658 HANDLE EvClientConnected 2025-05-29T15:23:12.040421Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-05-29T15:23:12.040864Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1374: Actor# [1:7509888581574462208:2474] txid# 281474976715658 Status StatusSuccess HANDLE {TEvModifySchemeTransactionResult Status# StatusSuccess txid# 281474976715658} 2025-05-29T15:23:12.040877Z node 1 :TX_PROXY DEBUG: schemereq.cpp:549: Actor# [1:7509888581574462208:2474] txid# 281474976715658 SEND to# [1:7509888581574462207:2473] Source {TEvProposeTransactionStatus txid# 281474976715658 Status# 48} 2025-05-29T15:23:12.291367Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509888581574462269:2333], DatabaseId: /dc-1, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resour ... node 60 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:372: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/dc-1" OperationType: ESchemeOpModifyACL ModifyACL { Name: "tenant-db" DiffACL: "\n\022\010\001\022\016\032\014clusteradmin\n\031\010\000\022\025\010\001\020\200\004\032\014clusteradmin \003" } } TxId: 281474976715665 TabletId: 72075186224037891 Owner: "root@builtin" UserToken: "***" PeerName: "ipv6:[::1]:60420" , at schemeshard: 72075186224037891 2025-05-29T15:23:56.078547Z node 60 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_modify_acl.cpp:33: TModifyACL Propose, path: /dc-1/tenant-db, operationId: 281474976715665:0, at schemeshard: 72075186224037891 2025-05-29T15:23:56.078579Z node 60 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5218: ExamineTreeVFS visit path id [OwnerId: 72075186224037891, LocalPathId: 1] name: dc-1/tenant-db type: EPathTypeSubDomain state: EPathStateNoChanges stepDropped: 0 droppedTxId: 0 parent: [OwnerId: 72075186224037891, LocalPathId: 1] 2025-05-29T15:23:56.078586Z node 60 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5234: ExamineTreeVFS run path id: [OwnerId: 72075186224037891, LocalPathId: 1] 2025-05-29T15:23:56.078614Z node 60 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:130: IgniteOperation, opId: 281474976715665:1, propose status:StatusSuccess, reason: , at schemeshard: 72075186224037891 2025-05-29T15:23:56.078623Z node 60 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715665:0, at schemeshard: 72075186224037891 2025-05-29T15:23:56.078636Z node 60 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:906: Part operation is done id#281474976715665:0 progress is 1/1 2025-05-29T15:23:56.078643Z node 60 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 281474976715665 ready parts: 1/1 2025-05-29T15:23:56.078646Z node 60 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:906: Part operation is done id#281474976715665:0 progress is 1/1 2025-05-29T15:23:56.078649Z node 60 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 281474976715665 ready parts: 1/1 2025-05-29T15:23:56.078654Z node 60 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72075186224037891, LocalPathId: 1] was 4 2025-05-29T15:23:56.078664Z node 60 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1606: TOperation IsReadyToNotify, TxId: 281474976715665, ready parts: 1/1, is published: false 2025-05-29T15:23:56.078673Z node 60 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:402: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72075186224037891, LocalPathId: 1], at schemeshard: 72075186224037891 2025-05-29T15:23:56.078675Z node 60 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 281474976715665 ready parts: 1/1 2025-05-29T15:23:56.078677Z node 60 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:973: Operation and all the parts is done, operation id: 281474976715665:0 2025-05-29T15:23:56.078681Z node 60 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:982: Publication still in progress, tx: 281474976715665, publications: 1, subscribers: 0 2025-05-29T15:23:56.078683Z node 60 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:989: Publication details: tx: 281474976715665, [OwnerId: 72075186224037891, LocalPathId: 1], 9 2025-05-29T15:23:56.079261Z node 60 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:451: TTxOperationPropose Complete, txId: 281474976715665, response: Status: StatusSuccess TxId: 281474976715665 SchemeshardId: 72075186224037891, at schemeshard: 72075186224037891 2025-05-29T15:23:56.079294Z node 60 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 281474976715665, subject: root@builtin, status: StatusSuccess, operation: MODIFY ACL, path: /dc-1/tenant-db, add access: +(DS):clusteradmin, remove access: -():clusteradmin:- 2025-05-29T15:23:56.079326Z node 60 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72075186224037891 2025-05-29T15:23:56.079333Z node 60 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72075186224037891, txId: 281474976715665, path id: [OwnerId: 72075186224037891, LocalPathId: 1] 2025-05-29T15:23:56.079355Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1374: Actor# [59:7509888771502156584:2812] txid# 281474976715665 Status StatusSuccess HANDLE {TEvModifySchemeTransactionResult Status# StatusSuccess txid# 281474976715665} 2025-05-29T15:23:56.079369Z node 59 :TX_PROXY DEBUG: schemereq.cpp:549: Actor# [59:7509888771502156584:2812] txid# 281474976715665 SEND to# [59:7509888771502156583:2351] Source {TEvProposeTransactionStatus txid# 281474976715665 Status# 48} 2025-05-29T15:23:56.079365Z node 60 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72075186224037891, txId: 281474976715665, path id: [OwnerId: 72075186224037891, LocalPathId: 1] 2025-05-29T15:23:56.079381Z node 60 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72075186224037891 2025-05-29T15:23:56.079384Z node 60 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [60:7509888762652174803:2304], at schemeshard: 72075186224037891, txId: 281474976715665, path id: 1 2025-05-29T15:23:56.079394Z node 60 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [60:7509888762652174803:2304], at schemeshard: 72075186224037891, txId: 281474976715665, path id: 1 2025-05-29T15:23:56.079553Z node 60 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:5834: Handle TEvUpdateAck, at schemeshard: 72075186224037891, msg: Owner: 72075186224037891 Generation: 1 LocalPathId: 1 Version: 9 PathOwnerId: 72075186224037891, cookie: 281474976715665 2025-05-29T15:23:56.079565Z node 60 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72075186224037891, msg: Owner: 72075186224037891 Generation: 1 LocalPathId: 1 Version: 9 PathOwnerId: 72075186224037891, cookie: 281474976715665 2025-05-29T15:23:56.079567Z node 60 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72075186224037891, txId: 281474976715665 2025-05-29T15:23:56.079570Z node 60 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72075186224037891, txId: 281474976715665, pathId: [OwnerId: 72075186224037891, LocalPathId: 1], version: 9 2025-05-29T15:23:56.079572Z node 60 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72075186224037891, LocalPathId: 1] was 5 2025-05-29T15:23:56.079584Z node 60 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72075186224037891, txId: 281474976715665, subscribers: 0 TEST clusteradmin triggers auth on tenant 2025-05-29T15:23:56.080057Z node 60 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72075186224037891, cookie: 281474976715665 TClient is connected to server localhost:14846 TClient::Ls request: /dc-1/tenant-db 2025-05-29T15:23:56.088253Z node 59 :TX_PROXY DEBUG: proxy_impl.cpp:403: actor# [59:7509888762912220934:2113] Handle TEvNavigate describe path /dc-1/tenant-db 2025-05-29T15:23:56.089900Z node 59 :TX_PROXY DEBUG: describe.cpp:272: Actor# [59:7509888771502156590:2817] HANDLE EvNavigateScheme /dc-1/tenant-db 2025-05-29T15:23:56.090016Z node 59 :TX_PROXY DEBUG: describe.cpp:356: Actor# [59:7509888771502156590:2817] HANDLE EvNavigateKeySetResult TDescribeReq marker# P5 ErrorCount# 0 2025-05-29T15:23:56.090056Z node 59 :TX_PROXY DEBUG: describe.cpp:435: Actor# [59:7509888771502156590:2817] SEND to# 72075186224037891 shardToRequest NKikimrSchemeOp.TDescribePath Path: "/dc-1/tenant-db" Options { ReturnBoundaries: false ShowPrivateTable: true ReturnRangeKey: false } 2025-05-29T15:23:56.090512Z node 59 :TX_PROXY DEBUG: describe.cpp:448: Actor# [59:7509888771502156590:2817] Handle TEvDescribeSchemeResult Forward to# [59:7509888771502156589:2816] Cookie: 0 TEvDescribeSchemeResult: NKikimrScheme.TEvDescribeSchemeResult PreSerializedData size# 0 Record# Status: StatusSuccess Path: "/dc-1/tenant-db" PathDescription { Self { Name: "dc-1/tenant-db" PathId: 1 SchemeshardId: 72075186224037891 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "\n\025\010\001\020\200\004\032\014clusteradmin \003" EffectiveACL: "\n\030\010\001\020\377\377\003\032\014root@builtin \003(\001\n\025\010\001\020\200\004\032\014clusteradmin \003" PathVersion: 9 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 9 ACLVersion: 1 EffectiveACLVersion: 2 UserAttrsVersion: 1 ChildrenVersion: 2 SubDomainVersion: 3 SecurityStateVersion: 1 } } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 2 ProcessingParams { Version: 3 PlanResolution: 50 Coordinators: 72075186224037890 TimeCastBucketsPerMediator: 2 Mediators: 72075186224037889 SchemeShard: 72075186224037891 Hive: 72075186224037888 } DomainKey { SchemeShard: 72057594046644480 PathId: 2 } StoragePools { Name: "name_tenant-db_kind_tenant-db" Kind: "tenant-db" } StoragePools { Name: "name_tenant-db_kind_test" Kind: "test" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 4 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 2 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Sids { Name: "tenantuser" Type: USER } Audience: "/dc-1/tenant-db" } } } PathId: 1 PathOwnerId: 72075186224037891 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1/tenant-db" PathId: 1 SchemeshardId: 72075186224037891 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "\n\025\010\001\020\200\004\032\014clusteradmin \003" EffectiveACL: "\n\030\010\001\020\377\377\003\032\014root@builtin \003(\001\n\025\010\001\020\200\004\032\014clusteradmin \003" PathVersion: 9 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 9 ACLVersion: 1 EffectiveACLVersion: 2 UserAttrsVersion: 1 ChildrenVersion: 2 SubDomainVersion: 3 SecurityStateVersion: 1 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72075186224037891 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 184467440737095... (TRUNCATED) 2025-05-29T15:23:56.095741Z node 59 :HIVE WARN: tx__status.cpp:57: HIVE#72057594037968897 THive::TTxStatus(status=2 node=Connected) - killing node 60 2025-05-29T15:23:56.095855Z node 59 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(60, (0,0,0,0)) VolatileState: Connected -> Disconnected 2025-05-29T15:23:56.096033Z node 60 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:445: Clear TempDirsState with owners number: 0 |64.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/statistics/service/ut/ut_aggregation/unittest |64.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/statistics/service/ut/ut_aggregation/unittest >> AggregateStatistics::ShouldBePings [GOOD] >> DataShardReadIterator::ShouldReverseReadMultipleKeys [FAIL] >> DataShardReadIterator::ShouldStopWhenNodeDisconnected [FAIL] >> CompositeConveyorTests::Test10xDistribution [GOOD] >> DataShardReadIterator::ShouldReadRangeCellVec [FAIL] >> DataShardReadIterator::ShouldReturnBrokenLockWhenReadKey+EvWrite [FAIL] >> DataShardReadIterator::ShouldReadKeyCellVec [FAIL] >> DataShardReadIterator::ShouldReadRangeInclusiveEndsCellVec [FAIL] >> DataShardReadIterator::ShouldRangeReadReverseLeftInclusive [FAIL] >> DataShardReadIterator::ShouldHandleReadAck [FAIL] >> DataShardReadIteratorConsistency::LocalSnapshotReadWithPlanQueueRace [FAIL] >> DataShardReadIterator::ShouldReverseReadMultipleKeysOneByOne >> DataShardReadIterator::ShouldReverseReadMultipleKeysOneByOne [FAIL] >> DataShardReadIterator::ShouldReverseReadMultipleRanges >> DataShardReadIterator::TryCommitLocksPrepared-Volatile-BreakLocks >> AggregateStatistics::RootNodeShouldBeInvalidateByTimeout [GOOD] >> DataShardReadIterator::ShouldReadRangeArrow >> DataShardReadIterator::ShouldReturnBrokenLockWhenReadKey-EvWrite >> DataShardReadIterator::ShouldReadRangeArrow [FAIL] >> DataShardReadIterator::ShouldReadNoColumnsKeysRequestCellVec >> DataShardReadIterator::ShouldReturnBrokenLockWhenReadKey-EvWrite [FAIL] >> DataShardReadIterator::ShouldReturnBrokenLockWhenReadRange+EvWrite >> DataShardReadIterator::ShouldReadKeyArrow >> DataShardReadIterator::ShouldReadKeyArrow [FAIL] >> DataShardReadIterator::ShouldReadKeyOnlyValueColumn >> DataShardReadIterator::ShouldReadRangeInclusiveEndsArrow >> DataShardReadIterator::ShouldReadRangeInclusiveEndsArrow [FAIL] >> DataShardReadIterator::ShouldReadRangeReverse >> DataShardReadIterator::ShouldRangeReadReverseLeftNonInclusive >> DataShardReadIterator::ShouldRangeReadReverseLeftNonInclusive [FAIL] >> DataShardReadIterator::ShouldNotReadAfterCancel >> DataShardReadIterator::ShouldHandleOutOfOrderReadAck >> DataShardReadIterator::ShouldHandleOutOfOrderReadAck [FAIL] >> DataShardReadIteratorConsistency::LocalSnapshotReadHasRequiredDependencies >> DataShardReadIterator::ShouldHandleReadAckWhenExhaustedRangeRead >> DataShardReadIterator::ShouldReverseReadMultipleRanges [FAIL] >> DataShardReadIterator::TryCommitLocksPrepared-Volatile-BreakLocks [FAIL] >> DataShardReadIterator::ShouldReadNoColumnsKeysRequestCellVec [FAIL] >> DataShardReadIterator::ShouldReturnBrokenLockWhenReadRange+EvWrite [FAIL] >> DataShardReadIterator::ShouldReadRangeReverse [FAIL] >> DataShardReadIterator::ShouldReadKeyOnlyValueColumn [FAIL] >> DataShardReadIterator::TryCommitLocksPrepared+Volatile-BreakLocks >> DataShardReadIterator::ShouldHandleReadAckWhenExhaustedRangeRead [FAIL] >> DataShardReadIterator::ShouldReverseReadMultipleRangesOneByOneWithAcks >> DataShardReadIterator::ShouldNotReadAfterCancel [FAIL] >> DataShardReadIterator::ShouldReturnBrokenLockWhenReadRange-EvWrite >> DataShardReadIteratorConsistency::LocalSnapshotReadHasRequiredDependencies [FAIL] >> DataShardReadIterator::ShouldHandleReadAckWhenExhaustedRangeReadReverse >> DataShardReadIterator::ShouldLimitReadRangeChunk1Limit100 >> DataShardReadIterator::ShouldReadNoColumnsKeysRequestArrow >> DataShardReadIterator::ShouldReadKeyValueColumnAndSomeKeyColumn >> DataShardReadIterator::ShouldReadRangeInclusiveEndsMissingLeftRight >> DataShardReadIterator::ShouldReverseReadMultipleRangesOneByOneWithAcks [FAIL] >> DataShardReadIterator::ShouldReturnMvccSnapshotFromFuture >> DataShardReadIterator::ShouldHandleReadAckWhenExhaustedRangeReadReverse [FAIL] >> DataShardReadIterator::ShouldForbidDuplicatedReadId >> DataShardReadIterator::TryCommitLocksPrepared+Volatile-BreakLocks [FAIL] >> DataShardReadIteratorConsistency::LocalSnapshotReadNoUnnecessaryDependencies >> DataShardReadIterator::ShouldReadNoColumnsKeysRequestArrow [FAIL] >> DataShardReadIterator::ShouldReadNoColumnsRangeRequestCellVec >> DataShardReadIterator::ShouldReadRangeInclusiveEndsMissingLeftRight [FAIL] >> DataShardReadIterator::ShouldReadKeyValueColumnAndSomeKeyColumn [FAIL] >> DataShardReadIterator::ShouldLimitReadRangeChunk1Limit100 [FAIL] >> DataShardReadIterator::ShouldReturnBrokenLockWhenReadRange-EvWrite [FAIL] >> DataShardReadIterator::ShouldReturnMvccSnapshotFromFuture [FAIL] >> DataShardReadIterator::ShouldForbidDuplicatedReadId [FAIL] >> DataShardReadIterator::ShouldRollbackLocksWhenWrite >> DataShardReadIterator::TryCommitLocksPrepared-Volatile+BreakLocks >> DataShardReadIteratorConsistency::LocalSnapshotReadNoUnnecessaryDependencies [FAIL] >> DataShardReadIterator::ShouldReadNoColumnsRangeRequestCellVec [FAIL] >> DataShardReadIterator::ShouldReadMultipleKeys >> DataShardReadIterator::ShouldReadRangeNonInclusiveEnds >> DataShardReadIterator::ShouldLimitRead10RangesChunk99Limit98 >> DataShardReadIterator::ShouldReturnBrokenLockWhenReadRangeInvisibleRowSkips+EvWrite >> DataShardReadIterator::ShouldLimitRead10RangesChunk100Limit1000 >> DataShardReadIterator::ShouldRollbackLocksWhenWrite [FAIL] >> DataShardReadIterator::TryCommitLocksPrepared-Volatile+BreakLocks [FAIL] >> DataShardReadIterator::TryCommitLocksPrepared+Volatile+BreakLocks >> DataShardReadIterator::ShouldReadNoColumnsRangeRequestArrow >> DataShardReadIteratorConsistency::LocalSnapshotReadWithConcurrentWrites >> DataShardReadIterator::ShouldReturnBrokenLockWhenReadRangeInvisibleRowSkips+EvWrite [FAIL] >> DataShardReadIterator::ShouldReadMultipleKeys [FAIL] >> DataShardReadIterator::ShouldReadRangeNonInclusiveEnds [FAIL] >> DataShardReadIterator::ShouldLimitRead10RangesChunk99Limit98 [FAIL] >> DataShardReadIteratorConsistency::LocalSnapshotReadWithConcurrentWrites [FAIL] >> DataShardReadIterator::ShouldReturnBrokenLockWhenWriteInSeparateTransactions+EvWrite >> DataShardReadIterator::ShouldLimitRead10RangesChunk99Limit99 >> DataShardReadIterator::ShouldReadRangeLeftInclusive >> DataShardReadIterator::ShouldReadMultipleKeysOneByOne >> DataShardReadIterator::ShouldReturnBrokenLockWhenReadKeyPrefix+EvWrite >> DataShardReadIteratorConsistency::Bug_7674_IteratorDuplicateRows |64.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/statistics/service/ut/ut_aggregation/unittest |64.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/statistics/service/ut/ut_aggregation/unittest ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/statistics/service/ut/ut_aggregation/unittest >> AggregateStatistics::ShouldBePings [GOOD] Test command err: 2025-05-29T15:24:00.505952Z node 1 :STATISTICS DEBUG: service_impl.cpp:588: Received TEvAggregateStatistics from node: 1, Round: 1, current Round: 0 2025-05-29T15:24:00.506050Z node 2 :STATISTICS DEBUG: service_impl.cpp:588: Received TEvAggregateStatistics from node: 1, Round: 1, current Round: 0 2025-05-29T15:24:00.607500Z node 1 :STATISTICS DEBUG: service_impl.cpp:317: Received TEvStatisticsResponse TabletId: 1 2025-05-29T15:24:00.607526Z node 2 :STATISTICS DEBUG: service_impl.cpp:317: Received TEvStatisticsResponse TabletId: 2 2025-05-29T15:24:00.607533Z node 2 :STATISTICS DEBUG: service_impl.cpp:502: Send aggregate statistics response to node: 1 2025-05-29T15:24:00.607657Z node 1 :STATISTICS DEBUG: service_impl.cpp:1086: EvClientConnected, node id = 1, client id = [1:16:2056], server id = [0:0:0], tablet id = 1, status = ERROR 2025-05-29T15:24:00.607663Z node 1 :STATISTICS DEBUG: service_impl.cpp:1110: Skip EvClientConnected 2025-05-29T15:24:00.607672Z node 2 :STATISTICS DEBUG: service_impl.cpp:1086: EvClientConnected, node id = 2, client id = [2:19:2055], server id = [0:0:0], tablet id = 2, status = ERROR 2025-05-29T15:24:00.607674Z node 2 :STATISTICS DEBUG: service_impl.cpp:1110: Skip EvClientConnected 2025-05-29T15:24:00.607681Z node 1 :STATISTICS DEBUG: service_impl.cpp:448: Received TEvAggregateStatisticsResponse SenderNodeId: 2 2025-05-29T15:24:00.607685Z node 1 :STATISTICS DEBUG: service_impl.cpp:502: Send aggregate statistics response to node: 1 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/statistics/service/ut/ut_aggregation/unittest >> AggregateStatistics::ShouldBeCorrectlyAggregateStatisticsFromAllNodes [GOOD] Test command err: 2025-05-29T15:24:00.342301Z node 1 :STATISTICS DEBUG: service_impl.cpp:588: Received TEvAggregateStatistics from node: 1, Round: 1, current Round: 0 2025-05-29T15:24:00.343051Z node 1 :STATISTICS DEBUG: service_impl.cpp:1086: EvClientConnected, node id = 1, client id = [1:38:2058], server id = [1:38:2058], tablet id = 1, status = OK 2025-05-29T15:24:00.343095Z node 1 :STATISTICS DEBUG: service_impl.cpp:1055: TEvStatisticsRequest send, client id = [1:38:2058], path = { OwnerId: 3 LocalId: 3 } 2025-05-29T15:24:00.343143Z node 1 :STATISTICS DEBUG: service_impl.cpp:1086: EvClientConnected, node id = 1, client id = [1:39:2059], server id = [1:39:2059], tablet id = 2, status = OK 2025-05-29T15:24:00.343148Z node 1 :STATISTICS DEBUG: service_impl.cpp:1055: TEvStatisticsRequest send, client id = [1:39:2059], path = { OwnerId: 3 LocalId: 3 } 2025-05-29T15:24:00.343196Z node 2 :STATISTICS DEBUG: service_impl.cpp:588: Received TEvAggregateStatistics from node: 1, Round: 1, current Round: 0 2025-05-29T15:24:00.343206Z node 1 :STATISTICS DEBUG: service_impl.cpp:317: Received TEvStatisticsResponse TabletId: 1 2025-05-29T15:24:00.343235Z node 3 :STATISTICS DEBUG: service_impl.cpp:588: Received TEvAggregateStatistics from node: 1, Round: 1, current Round: 0 2025-05-29T15:24:00.343242Z node 2 :STATISTICS DEBUG: service_impl.cpp:1086: EvClientConnected, node id = 2, client id = [2:44:2057], server id = [2:44:2057], tablet id = 4, status = OK 2025-05-29T15:24:00.343246Z node 2 :STATISTICS DEBUG: service_impl.cpp:1055: TEvStatisticsRequest send, client id = [2:44:2057], path = { OwnerId: 3 LocalId: 3 } 2025-05-29T15:24:00.343252Z node 1 :STATISTICS DEBUG: service_impl.cpp:1086: EvClientConnected, node id = 1, client id = [1:40:2060], server id = [1:40:2060], tablet id = 3, status = OK 2025-05-29T15:24:00.343255Z node 1 :STATISTICS DEBUG: service_impl.cpp:1055: TEvStatisticsRequest send, client id = [1:40:2060], path = { OwnerId: 3 LocalId: 3 } 2025-05-29T15:24:00.343265Z node 1 :STATISTICS DEBUG: service_impl.cpp:317: Received TEvStatisticsResponse TabletId: 2 2025-05-29T15:24:00.343276Z node 4 :STATISTICS DEBUG: service_impl.cpp:588: Received TEvAggregateStatistics from node: 2, Round: 1, current Round: 0 2025-05-29T15:24:00.343284Z node 3 :STATISTICS DEBUG: service_impl.cpp:1086: EvClientConnected, node id = 3, client id = [3:47:2057], server id = [3:47:2057], tablet id = 5, status = OK 2025-05-29T15:24:00.343287Z node 3 :STATISTICS DEBUG: service_impl.cpp:1055: TEvStatisticsRequest send, client id = [3:47:2057], path = { OwnerId: 3 LocalId: 3 } 2025-05-29T15:24:00.343291Z node 2 :STATISTICS DEBUG: service_impl.cpp:317: Received TEvStatisticsResponse TabletId: 4 2025-05-29T15:24:00.343297Z node 1 :STATISTICS DEBUG: service_impl.cpp:1086: EvClientConnected, node id = 1, client id = [1:38:2058], server id = [0:0:0], tablet id = 1, status = ERROR 2025-05-29T15:24:00.343299Z node 1 :STATISTICS DEBUG: service_impl.cpp:1110: Skip EvClientConnected 2025-05-29T15:24:00.343301Z node 3 :STATISTICS DEBUG: service_impl.cpp:317: Received TEvStatisticsResponse TabletId: 5 2025-05-29T15:24:00.343305Z node 3 :STATISTICS DEBUG: service_impl.cpp:502: Send aggregate statistics response to node: 1 2025-05-29T15:24:00.343315Z node 1 :STATISTICS DEBUG: service_impl.cpp:317: Received TEvStatisticsResponse TabletId: 3 2025-05-29T15:24:00.343325Z node 2 :STATISTICS DEBUG: service_impl.cpp:1086: EvClientConnected, node id = 2, client id = [2:44:2057], server id = [0:0:0], tablet id = 4, status = ERROR 2025-05-29T15:24:00.343327Z node 2 :STATISTICS DEBUG: service_impl.cpp:1110: Skip EvClientConnected 2025-05-29T15:24:00.343330Z node 1 :STATISTICS DEBUG: service_impl.cpp:1086: EvClientConnected, node id = 1, client id = [1:39:2059], server id = [0:0:0], tablet id = 2, status = ERROR 2025-05-29T15:24:00.343332Z node 1 :STATISTICS DEBUG: service_impl.cpp:1110: Skip EvClientConnected 2025-05-29T15:24:00.343335Z node 4 :STATISTICS DEBUG: service_impl.cpp:1086: EvClientConnected, node id = 4, client id = [4:49:2057], server id = [4:49:2057], tablet id = 6, status = OK 2025-05-29T15:24:00.343338Z node 4 :STATISTICS DEBUG: service_impl.cpp:1055: TEvStatisticsRequest send, client id = [4:49:2057], path = { OwnerId: 3 LocalId: 3 } 2025-05-29T15:24:00.343343Z node 3 :STATISTICS DEBUG: service_impl.cpp:1086: EvClientConnected, node id = 3, client id = [3:47:2057], server id = [0:0:0], tablet id = 5, status = ERROR 2025-05-29T15:24:00.343345Z node 3 :STATISTICS DEBUG: service_impl.cpp:1110: Skip EvClientConnected 2025-05-29T15:24:00.343349Z node 1 :STATISTICS DEBUG: service_impl.cpp:1086: EvClientConnected, node id = 1, client id = [1:40:2060], server id = [0:0:0], tablet id = 3, status = ERROR 2025-05-29T15:24:00.343351Z node 1 :STATISTICS DEBUG: service_impl.cpp:1110: Skip EvClientConnected 2025-05-29T15:24:00.343355Z node 4 :STATISTICS DEBUG: service_impl.cpp:317: Received TEvStatisticsResponse TabletId: 6 2025-05-29T15:24:00.343357Z node 4 :STATISTICS DEBUG: service_impl.cpp:502: Send aggregate statistics response to node: 2 2025-05-29T15:24:00.343381Z node 1 :STATISTICS DEBUG: service_impl.cpp:448: Received TEvAggregateStatisticsResponse SenderNodeId: 3 2025-05-29T15:24:00.343392Z node 4 :STATISTICS DEBUG: service_impl.cpp:1086: EvClientConnected, node id = 4, client id = [4:49:2057], server id = [0:0:0], tablet id = 6, status = ERROR 2025-05-29T15:24:00.343394Z node 4 :STATISTICS DEBUG: service_impl.cpp:1110: Skip EvClientConnected 2025-05-29T15:24:00.343401Z node 2 :STATISTICS DEBUG: service_impl.cpp:448: Received TEvAggregateStatisticsResponse SenderNodeId: 4 2025-05-29T15:24:00.343405Z node 2 :STATISTICS DEBUG: service_impl.cpp:502: Send aggregate statistics response to node: 1 2025-05-29T15:24:00.343415Z node 1 :STATISTICS DEBUG: service_impl.cpp:448: Received TEvAggregateStatisticsResponse SenderNodeId: 2 2025-05-29T15:24:00.343419Z node 1 :STATISTICS DEBUG: service_impl.cpp:502: Send aggregate statistics response to node: 1 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/datashard/ut_kqp/unittest >> KqpStreamLookup::ReadTableWithIndexDuringSplit [FAIL] Test command err: 2025-05-29T15:23:37.312268Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:102:2148], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-05-29T15:23:37.312314Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-05-29T15:23:37.312335Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/00123f/r3tmp/tmp0F6xOu/pdisk_1.dat 2025-05-29T15:23:37.440827Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-05-29T15:23:37.455035Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:23:37.459034Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [1:32:2079] 1748532216895549 != 1748532216895553 2025-05-29T15:23:37.503023Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:23:37.503073Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:23:37.515427Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-29T15:23:37.600302Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-05-29T15:23:37.832610Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:781:2652], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:23:37.832641Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:792:2657], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:23:37.832714Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:23:37.833635Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2025-05-29T15:23:37.999590Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:795:2660], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-05-29T15:23:38.038256Z node 1 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [1:866:2700] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 8], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-29T15:23:39.765328Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [1:876:2709], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:23:39.781142Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=1&id=NmUyNWM0NDUtMzE1NmQwYTItZGEwYmU1ZS0zN2YzMTQ5Yw==, ActorId: [1:779:2650], ActorState: ExecuteState, TraceId: 01jwea8hz8bshsfc6y8zhkmvbb, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: assertion failed at ydb/core/tx/datashard/ut_common/datashard_ut_common.cpp:2091, void NKikimr::ExecSQL(Tests::TServer::TPtr, TActorId, const TString &, bool, Ydb::StatusIds::StatusCode): (response.GetYdbStatus() == code) failed: (INTERNAL_ERROR != SUCCESS) Response { QueryIssues { message: "Execution" issue_code: 1060 issues { message: "yql/essentials/ast/yql_expr.h:1874: index out of range" issue_code: 1 } } TxMeta { } } YdbStatus: INTERNAL_ERROR ConsumedRu: 1 , with diff: (INT|SUCC)E(RNAL_ERROR|SS) TBackTrace::Capture()+28 (0x13AAAC1C) NUnitTest::NPrivate::RaiseError(char const*, TBasicString> const&, bool)+137 (0x13C5DDB9) NKikimr::ExecSQL(TIntrusivePtr>, NActors::TActorId, TBasicString> const&, bool, Ydb::StatusIds_StatusCode)+1300 (0x262EDC84) NKikimr::NTestSuiteKqpStreamLookup::TTestCaseReadTableWithIndexDuringSplit::Execute_(NUnitTest::TTestContext&)+2116 (0x139A8834) NKikimr::NTestSuiteKqpStreamLookup::TCurrentTest::Execute()::'lambda'()::operator()() const+71 (0x139AC807) NUnitTest::TTestBase::Run(std::__y1::function, TBasicString> const&, char const*, bool)+126 (0x13C5FC6E) NKikimr::NTestSuiteKqpStreamLookup::TCurrentTest::Execute()+436 (0x139AC084) NUnitTest::TTestFactory::Execute()+803 (0x13C603E3) NUnitTest::RunMain(int, char**)+3021 (0x13C71F8D) ??+0 (0x7F4EED9D3D90) __libc_start_main+128 (0x7F4EED9D3E40) _start+41 (0x129F7029) ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/statistics/service/ut/ut_aggregation/unittest >> AggregateStatistics::ShouldBeCcorrectProcessingTabletTimeout [GOOD] Test command err: 2025-05-29T15:24:00.174544Z node 1 :STATISTICS DEBUG: service_impl.cpp:588: Received TEvAggregateStatistics from node: 1, Round: 1, current Round: 0 2025-05-29T15:24:00.175642Z node 1 :STATISTICS DEBUG: service_impl.cpp:1086: EvClientConnected, node id = 1, client id = [1:8:2055], server id = [1:8:2055], tablet id = 1, status = OK 2025-05-29T15:24:00.175700Z node 1 :STATISTICS DEBUG: service_impl.cpp:1055: TEvStatisticsRequest send, client id = [1:8:2055], path = { OwnerId: 3 LocalId: 3 } 2025-05-29T15:24:00.175715Z node 1 :STATISTICS DEBUG: service_impl.cpp:1086: EvClientConnected, node id = 1, client id = [1:9:2056], server id = [1:9:2056], tablet id = 2, status = OK 2025-05-29T15:24:00.175720Z node 1 :STATISTICS DEBUG: service_impl.cpp:1055: TEvStatisticsRequest send, client id = [1:9:2056], path = { OwnerId: 3 LocalId: 3 } 2025-05-29T15:24:00.175729Z node 1 :STATISTICS DEBUG: service_impl.cpp:317: Received TEvStatisticsResponse TabletId: 1 2025-05-29T15:24:00.175751Z node 1 :STATISTICS DEBUG: service_impl.cpp:1086: EvClientConnected, node id = 1, client id = [1:10:2057], server id = [1:10:2057], tablet id = 3, status = OK 2025-05-29T15:24:00.175756Z node 1 :STATISTICS DEBUG: service_impl.cpp:1055: TEvStatisticsRequest send, client id = [1:10:2057], path = { OwnerId: 3 LocalId: 3 } 2025-05-29T15:24:00.175764Z node 1 :STATISTICS DEBUG: service_impl.cpp:1086: EvClientConnected, node id = 1, client id = [1:11:2058], server id = [1:11:2058], tablet id = 4, status = OK 2025-05-29T15:24:00.175768Z node 1 :STATISTICS DEBUG: service_impl.cpp:1055: TEvStatisticsRequest send, client id = [1:11:2058], path = { OwnerId: 3 LocalId: 3 } 2025-05-29T15:24:00.175775Z node 1 :STATISTICS DEBUG: service_impl.cpp:1086: EvClientConnected, node id = 1, client id = [1:8:2055], server id = [0:0:0], tablet id = 1, status = ERROR 2025-05-29T15:24:00.175778Z node 1 :STATISTICS DEBUG: service_impl.cpp:1110: Skip EvClientConnected 2025-05-29T15:24:00.175783Z node 1 :STATISTICS DEBUG: service_impl.cpp:1086: EvClientConnected, node id = 1, client id = [1:12:2059], server id = [1:12:2059], tablet id = 5, status = OK 2025-05-29T15:24:00.175788Z node 1 :STATISTICS DEBUG: service_impl.cpp:1055: TEvStatisticsRequest send, client id = [1:12:2059], path = { OwnerId: 3 LocalId: 3 } 2025-05-29T15:24:00.175799Z node 1 :STATISTICS DEBUG: service_impl.cpp:317: Received TEvStatisticsResponse TabletId: 3 2025-05-29T15:24:00.175809Z node 1 :STATISTICS DEBUG: service_impl.cpp:1086: EvClientConnected, node id = 1, client id = [1:13:2060], server id = [1:13:2060], tablet id = 6, status = OK 2025-05-29T15:24:00.175814Z node 1 :STATISTICS DEBUG: service_impl.cpp:1055: TEvStatisticsRequest send, client id = [1:13:2060], path = { OwnerId: 3 LocalId: 3 } 2025-05-29T15:24:00.175820Z node 1 :STATISTICS DEBUG: service_impl.cpp:317: Received TEvStatisticsResponse TabletId: 5 2025-05-29T15:24:00.175826Z node 1 :STATISTICS DEBUG: service_impl.cpp:1086: EvClientConnected, node id = 1, client id = [1:10:2057], server id = [0:0:0], tablet id = 3, status = ERROR 2025-05-29T15:24:00.175829Z node 1 :STATISTICS DEBUG: service_impl.cpp:1110: Skip EvClientConnected 2025-05-29T15:24:00.175834Z node 1 :STATISTICS DEBUG: service_impl.cpp:1086: EvClientConnected, node id = 1, client id = [1:14:2061], server id = [1:14:2061], tablet id = 7, status = OK 2025-05-29T15:24:00.175838Z node 1 :STATISTICS DEBUG: service_impl.cpp:1055: TEvStatisticsRequest send, client id = [1:14:2061], path = { OwnerId: 3 LocalId: 3 } 2025-05-29T15:24:00.175844Z node 1 :STATISTICS DEBUG: service_impl.cpp:1086: EvClientConnected, node id = 1, client id = [1:12:2059], server id = [0:0:0], tablet id = 5, status = ERROR 2025-05-29T15:24:00.175847Z node 1 :STATISTICS DEBUG: service_impl.cpp:1110: Skip EvClientConnected 2025-05-29T15:24:00.175851Z node 1 :STATISTICS DEBUG: service_impl.cpp:317: Received TEvStatisticsResponse TabletId: 7 2025-05-29T15:24:00.175857Z node 1 :STATISTICS DEBUG: service_impl.cpp:1086: EvClientConnected, node id = 1, client id = [1:14:2061], server id = [0:0:0], tablet id = 7, status = ERROR 2025-05-29T15:24:00.175860Z node 1 :STATISTICS DEBUG: service_impl.cpp:1110: Skip EvClientConnected 2025-05-29T15:24:00.186005Z node 1 :STATISTICS DEBUG: service_impl.cpp:1028: Tablet 1 has already been processed 2025-05-29T15:24:00.186033Z node 1 :STATISTICS ERROR: service_impl.cpp:1032: No result was received from the tablet 2 2025-05-29T15:24:00.186038Z node 1 :STATISTICS DEBUG: service_impl.cpp:1063: Tablet 2 is not local. 2025-05-29T15:24:00.186061Z node 1 :STATISTICS DEBUG: service_impl.cpp:1028: Tablet 3 has already been processed 2025-05-29T15:24:00.186066Z node 1 :STATISTICS ERROR: service_impl.cpp:1032: No result was received from the tablet 4 2025-05-29T15:24:00.186069Z node 1 :STATISTICS DEBUG: service_impl.cpp:1063: Tablet 4 is not local. 2025-05-29T15:24:00.186077Z node 1 :STATISTICS DEBUG: service_impl.cpp:1028: Tablet 5 has already been processed 2025-05-29T15:24:00.186081Z node 1 :STATISTICS ERROR: service_impl.cpp:1032: No result was received from the tablet 6 2025-05-29T15:24:00.186084Z node 1 :STATISTICS DEBUG: service_impl.cpp:1063: Tablet 6 is not local. 2025-05-29T15:24:00.186088Z node 1 :STATISTICS DEBUG: service_impl.cpp:502: Send aggregate statistics response to node: 1 2025-05-29T15:24:00.186103Z node 1 :STATISTICS DEBUG: service_impl.cpp:252: Event round 1 is different from the current 0 2025-05-29T15:24:00.186106Z node 1 :STATISTICS DEBUG: service_impl.cpp:1021: Skip TEvStatisticsRequestTimeout 2025-05-29T15:24:00.186115Z node 1 :STATISTICS DEBUG: service_impl.cpp:1086: EvClientConnected, node id = 1, client id = [1:9:2056], server id = [0:0:0], tablet id = 2, status = ERROR 2025-05-29T15:24:00.186118Z node 1 :STATISTICS DEBUG: service_impl.cpp:1110: Skip EvClientConnected 2025-05-29T15:24:00.186124Z node 1 :STATISTICS DEBUG: service_impl.cpp:1086: EvClientConnected, node id = 1, client id = [1:11:2058], server id = [0:0:0], tablet id = 4, status = ERROR 2025-05-29T15:24:00.186127Z node 1 :STATISTICS DEBUG: service_impl.cpp:1110: Skip EvClientConnected 2025-05-29T15:24:00.186132Z node 1 :STATISTICS DEBUG: service_impl.cpp:1086: EvClientConnected, node id = 1, client id = [1:13:2060], server id = [0:0:0], tablet id = 6, status = ERROR 2025-05-29T15:24:00.186135Z node 1 :STATISTICS DEBUG: service_impl.cpp:1110: Skip EvClientConnected ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/statistics/service/ut/ut_aggregation/unittest >> AggregateStatistics::ShouldBeCcorrectProcessingOfLocalTablets [GOOD] Test command err: 2025-05-29T15:24:00.058130Z node 1 :STATISTICS DEBUG: service_impl.cpp:588: Received TEvAggregateStatistics from node: 1, Round: 1, current Round: 0 2025-05-29T15:24:00.059105Z node 1 :STATISTICS DEBUG: service_impl.cpp:1121: EvClientDestroyed, node id = 1, client id = [1:9:2056], server id = [1:9:2056], tablet id = 2 2025-05-29T15:24:00.059118Z node 1 :STATISTICS DEBUG: service_impl.cpp:1063: Tablet 2 is not local. 2025-05-29T15:24:00.059131Z node 1 :STATISTICS DEBUG: service_impl.cpp:317: Received TEvStatisticsResponse TabletId: 3 2025-05-29T15:24:00.059228Z node 1 :STATISTICS DEBUG: service_impl.cpp:1086: EvClientConnected, node id = 1, client id = [1:8:2055], server id = [1:8:2055], tablet id = 1, status = ERROR 2025-05-29T15:24:00.059234Z node 1 :STATISTICS DEBUG: service_impl.cpp:1063: Tablet 1 is not local. 2025-05-29T15:24:00.059244Z node 1 :STATISTICS DEBUG: service_impl.cpp:1086: EvClientConnected, node id = 1, client id = [1:10:2057], server id = [0:0:0], tablet id = 3, status = ERROR 2025-05-29T15:24:00.059246Z node 1 :STATISTICS DEBUG: service_impl.cpp:1110: Skip EvClientConnected 2025-05-29T15:24:00.059250Z node 1 :STATISTICS DEBUG: service_impl.cpp:1086: EvClientConnected, node id = 1, client id = [1:11:2058], server id = [1:11:2058], tablet id = 4, status = ERROR 2025-05-29T15:24:00.059253Z node 1 :STATISTICS DEBUG: service_impl.cpp:1063: Tablet 4 is not local. 2025-05-29T15:24:00.059258Z node 1 :STATISTICS DEBUG: service_impl.cpp:1121: EvClientDestroyed, node id = 1, client id = [1:12:2059], server id = [1:12:2059], tablet id = 5 2025-05-29T15:24:00.059260Z node 1 :STATISTICS DEBUG: service_impl.cpp:1063: Tablet 5 is not local. 2025-05-29T15:24:00.059265Z node 1 :STATISTICS DEBUG: service_impl.cpp:317: Received TEvStatisticsResponse TabletId: 6 2025-05-29T15:24:00.059271Z node 1 :STATISTICS DEBUG: service_impl.cpp:1086: EvClientConnected, node id = 1, client id = [1:14:2061], server id = [1:14:2061], tablet id = 7, status = ERROR 2025-05-29T15:24:00.059273Z node 1 :STATISTICS DEBUG: service_impl.cpp:1063: Tablet 7 is not local. 2025-05-29T15:24:00.059277Z node 1 :STATISTICS DEBUG: service_impl.cpp:1086: EvClientConnected, node id = 1, client id = [1:13:2060], server id = [0:0:0], tablet id = 6, status = ERROR 2025-05-29T15:24:00.059279Z node 1 :STATISTICS DEBUG: service_impl.cpp:1110: Skip EvClientConnected 2025-05-29T15:24:00.059282Z node 1 :STATISTICS DEBUG: service_impl.cpp:1121: EvClientDestroyed, node id = 1, client id = [1:15:2062], server id = [1:15:2062], tablet id = 8 2025-05-29T15:24:00.059284Z node 1 :STATISTICS DEBUG: service_impl.cpp:1063: Tablet 8 is not local. 2025-05-29T15:24:00.059287Z node 1 :STATISTICS DEBUG: service_impl.cpp:502: Send aggregate statistics response to node: 1 |64.2%| [TA] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_subdomain/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/conveyor_composite/ut/unittest >> CompositeConveyorTests::Test10xDistribution [GOOD] Test command err: {I:1753};{S:15410};{N:153892}; {I:1950};{S:20110};{N:206980}; {I:2943};{S:30005};{N:305560}; {I:4737};{S:47632};{N:480690}; {I:9354};{S:93822};{N:674028}; {I:18654};{S:187957};{N:815550}; {I:34526};{S:311412};{N:921417}; {I:56155};{S:456831};{N:997726}; {I:85209};{S:645551};{N:1000000}; {I:159458};{S:794202};{N:1000000}; {I:249666};{S:926198};{N:1000000}; {I:388692};{S:1000000};{N:1000000}; {I:610706};{S:1000000};{N:1000000}; {I:809720};{S:1000000};{N:1000000}; {I:981302};{S:1000000};{N:1000000}; {I:1000000};{S:1000000};{N:1000000}; 47us per task 15.001787s;11.001343s;8.001012s; ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/statistics/service/ut/ut_aggregation/unittest >> AggregateStatistics::RootNodeShouldBeInvalidateByTimeout [GOOD] Test command err: 2025-05-29T15:24:00.050367Z node 1 :STATISTICS DEBUG: service_impl.cpp:588: Received TEvAggregateStatistics from node: 1, Round: 1, current Round: 0 2025-05-29T15:24:00.051552Z node 1 :STATISTICS DEBUG: service_impl.cpp:1086: EvClientConnected, node id = 1, client id = [1:38:2058], server id = [1:38:2058], tablet id = 1, status = OK 2025-05-29T15:24:00.051611Z node 1 :STATISTICS DEBUG: service_impl.cpp:1055: TEvStatisticsRequest send, client id = [1:38:2058], path = { OwnerId: 3 LocalId: 3 } 2025-05-29T15:24:00.051633Z node 1 :STATISTICS DEBUG: service_impl.cpp:317: Received TEvStatisticsResponse TabletId: 1 2025-05-29T15:24:00.051686Z node 2 :STATISTICS DEBUG: service_impl.cpp:588: Received TEvAggregateStatistics from node: 1, Round: 1, current Round: 0 2025-05-29T15:24:00.051703Z node 1 :STATISTICS DEBUG: service_impl.cpp:1086: EvClientConnected, node id = 1, client id = [1:38:2058], server id = [0:0:0], tablet id = 1, status = ERROR 2025-05-29T15:24:00.051707Z node 1 :STATISTICS DEBUG: service_impl.cpp:1110: Skip EvClientConnected 2025-05-29T15:24:00.051731Z node 3 :STATISTICS DEBUG: service_impl.cpp:588: Received TEvAggregateStatistics from node: 1, Round: 1, current Round: 0 2025-05-29T15:24:00.051748Z node 3 :STATISTICS DEBUG: service_impl.cpp:1086: EvClientConnected, node id = 3, client id = [3:45:2057], server id = [3:45:2057], tablet id = 3, status = OK 2025-05-29T15:24:00.051756Z node 3 :STATISTICS DEBUG: service_impl.cpp:1055: TEvStatisticsRequest send, client id = [3:45:2057], path = { OwnerId: 3 LocalId: 3 } 2025-05-29T15:24:00.051770Z node 4 :STATISTICS DEBUG: service_impl.cpp:588: Received TEvAggregateStatistics from node: 2, Round: 1, current Round: 0 2025-05-29T15:24:00.051779Z node 3 :STATISTICS DEBUG: service_impl.cpp:317: Received TEvStatisticsResponse TabletId: 3 2025-05-29T15:24:00.051784Z node 3 :STATISTICS DEBUG: service_impl.cpp:502: Send aggregate statistics response to node: 1 2025-05-29T15:24:00.051803Z node 4 :STATISTICS DEBUG: service_impl.cpp:1086: EvClientConnected, node id = 4, client id = [4:47:2057], server id = [4:47:2057], tablet id = 4, status = OK 2025-05-29T15:24:00.051809Z node 4 :STATISTICS DEBUG: service_impl.cpp:1055: TEvStatisticsRequest send, client id = [4:47:2057], path = { OwnerId: 3 LocalId: 3 } 2025-05-29T15:24:00.051815Z node 3 :STATISTICS DEBUG: service_impl.cpp:1086: EvClientConnected, node id = 3, client id = [3:45:2057], server id = [0:0:0], tablet id = 3, status = ERROR 2025-05-29T15:24:00.051818Z node 3 :STATISTICS DEBUG: service_impl.cpp:1110: Skip EvClientConnected 2025-05-29T15:24:00.051825Z node 4 :STATISTICS DEBUG: service_impl.cpp:317: Received TEvStatisticsResponse TabletId: 4 2025-05-29T15:24:00.051829Z node 4 :STATISTICS DEBUG: service_impl.cpp:502: Send aggregate statistics response to node: 2 2025-05-29T15:24:00.051840Z node 1 :STATISTICS DEBUG: service_impl.cpp:448: Received TEvAggregateStatisticsResponse SenderNodeId: 3 2025-05-29T15:24:00.051855Z node 4 :STATISTICS DEBUG: service_impl.cpp:1086: EvClientConnected, node id = 4, client id = [4:47:2057], server id = [0:0:0], tablet id = 4, status = ERROR 2025-05-29T15:24:00.051859Z node 4 :STATISTICS DEBUG: service_impl.cpp:1110: Skip EvClientConnected 2025-05-29T15:24:00.051867Z node 2 :STATISTICS DEBUG: service_impl.cpp:448: Received TEvAggregateStatisticsResponse SenderNodeId: 4 2025-05-29T15:24:00.062012Z node 4 :STATISTICS DEBUG: service_impl.cpp:252: Event round 1 is different from the current 0 2025-05-29T15:24:00.062029Z node 4 :STATISTICS DEBUG: service_impl.cpp:379: Skip TEvDispatchKeepAlive 2025-05-29T15:24:00.062042Z node 3 :STATISTICS DEBUG: service_impl.cpp:252: Event round 1 is different from the current 0 2025-05-29T15:24:00.062047Z node 3 :STATISTICS DEBUG: service_impl.cpp:379: Skip TEvDispatchKeepAlive 2025-05-29T15:24:00.072221Z node 1 :STATISTICS INFO: service_impl.cpp:416: Node 2 is unavailable 2025-05-29T15:24:00.072239Z node 1 :STATISTICS DEBUG: service_impl.cpp:502: Send aggregate statistics response to node: 1 2025-05-29T15:24:00.072261Z node 2 :STATISTICS DEBUG: service_impl.cpp:401: Skip TEvKeepAliveTimeout 2025-05-29T15:24:00.072273Z node 1 :STATISTICS DEBUG: service_impl.cpp:252: Event round 1 is different from the current 0 2025-05-29T15:24:00.072277Z node 1 :STATISTICS DEBUG: service_impl.cpp:393: Skip TEvKeepAliveTimeout 2025-05-29T15:24:00.072301Z node 1 :STATISTICS DEBUG: service_impl.cpp:252: Event round 1 is different from the current 0 2025-05-29T15:24:00.072305Z node 1 :STATISTICS DEBUG: service_impl.cpp:379: Skip TEvDispatchKeepAlive 2025-05-29T15:24:00.072315Z node 1 :STATISTICS DEBUG: service_impl.cpp:252: Event round 1 is different from the current 0 2025-05-29T15:24:00.072319Z node 1 :STATISTICS DEBUG: service_impl.cpp:428: Skip TEvAggregateKeepAlive ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_export_reboots_s3/unittest >> TExportToS3WithRebootsTests::ShouldSucceedOnSingleTopic [GOOD] Test command err: ==== RunWithTabletReboots =========== RUN: Trace =========== Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2140] Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:116:2058] recipient: [1:110:2141] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:116:2058] recipient: [1:110:2141] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:117:2058] recipient: [1:111:2142] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:117:2058] recipient: [1:111:2142] Leader for TabletID 72057594046678944 is [1:126:2151] sender: [1:127:2058] recipient: [1:109:2140] Leader for TabletID 72057594046447617 is [1:130:2154] sender: [1:132:2058] recipient: [1:110:2141] Leader for TabletID 72057594046316545 is [1:133:2155] sender: [1:135:2058] recipient: [1:111:2142] 2025-05-29T15:23:38.039008Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7488: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-05-29T15:23:38.039034Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7516: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:23:38.039039Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7402: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2025-05-29T15:23:38.039044Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7418: OperationsProcessing config: using default configuration 2025-05-29T15:23:38.039055Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-05-29T15:23:38.039059Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-05-29T15:23:38.039068Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7548: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:23:38.039083Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:39: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-05-29T15:23:38.039169Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7619: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-05-29T15:23:38.039244Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-05-29T15:23:38.053344Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7651: Got new config: QueryServiceConfig { AllExternalDataSourcesAreAvailable: true } 2025-05-29T15:23:38.053367Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:23:38.053465Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7619: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# Leader for TabletID 72057594046447617 is [1:130:2154] sender: [1:175:2058] recipient: [1:15:2062] 2025-05-29T15:23:38.056599Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-05-29T15:23:38.056640Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-05-29T15:23:38.056678Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-05-29T15:23:38.059800Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-05-29T15:23:38.059891Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:445: Clear TempDirsState with owners number: 0 2025-05-29T15:23:38.060013Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1348: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-05-29T15:23:38.060202Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:32: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-05-29T15:23:38.060898Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:157: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:23:38.060954Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:84: [RootDataErasureManager] Stop 2025-05-29T15:23:38.061185Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:23:38.061195Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:23:38.061229Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-05-29T15:23:38.061237Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-29T15:23:38.061244Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:202: TTxServerlessStorageBilling.Complete 2025-05-29T15:23:38.061265Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6671: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:214:2058] recipient: [1:212:2212] IGNORE Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:214:2058] recipient: [1:212:2212] Leader for TabletID 72057594037968897 is [1:218:2216] sender: [1:219:2058] recipient: [1:212:2212] 2025-05-29T15:23:38.062942Z node 1 :HIVE INFO: tablet_helpers.cpp:1130: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:126:2151] sender: [1:239:2058] recipient: [1:15:2062] 2025-05-29T15:23:38.081888Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:372: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-05-29T15:23:38.081946Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:23:38.081996Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-05-29T15:23:38.082034Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:130: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-05-29T15:23:38.082043Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:23:38.082635Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:451: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-05-29T15:23:38.082660Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-05-29T15:23:38.082711Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:23:38.082722Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:313: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-05-29T15:23:38.082728Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:367: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-05-29T15:23:38.082734Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 2 -> 3 2025-05-29T15:23:38.083149Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:23:38.083160Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-05-29T15:23:38.083164Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 3 -> 128 2025-05-29T15:23:38.083533Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:23:38.083543Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:23:38.083549Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:23:38.083556Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1650: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-05-29T15:23:38.084051Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1719: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-05-29T15:23:38.084421Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:652: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-05-29T15:23:38.084455Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1751: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 Leader for TabletID 72057594046316545 is [1:133:2155] sender: [1:254:2058] recipient: [1:15:2062] FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-05-29T15:23:38.084617Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:676: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-05-29T15:23:38.084637Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:680: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 133 RawX2: 4294969451 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-05-29T15:23:38.084643Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:23:38.084695Z node 1 :FLAT_TX_SCHEMESHARD INFO: sch ... d, txId: 281474976710758, at schemeshard: 72057594046678944 2025-05-29T15:23:52.341816Z node 51 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:652: Send tablet strongly msg operationId: 281474976710758:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:281474976710758 msg type: 269090816 2025-05-29T15:23:52.341833Z node 51 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1751: TOperation RegisterRelationByTabletId, TxId: 281474976710758, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 281474976710758 at step: 5000005 FAKE_COORDINATOR: advance: minStep5000005 State->FrontStep: 5000004 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710758 at step: 5000005 2025-05-29T15:23:52.341923Z node 51 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:676: TTxOperationPlanStep Execute, stepId: 5000005, transactions count in step: 1, at schemeshard: 72057594046678944 2025-05-29T15:23:52.341939Z node 51 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:680: TTxOperationPlanStep Execute, message: Transactions { TxId: 281474976710758 Coordinator: 72057594046316545 AckTo { RawX1: 134 RawX2: 219043334252 } } Step: 5000005 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-05-29T15:23:52.341944Z node 51 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_rmdir.cpp:129: TRmDir HandleReply TEvOperationPlan, opId: 281474976710758:0, step: 5000005, at schemeshard: 72057594046678944 2025-05-29T15:23:52.341960Z node 51 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_rmdir.cpp:180: RmDir is done, opId: 281474976710758:0, at schemeshard: 72057594046678944 2025-05-29T15:23:52.341966Z node 51 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:906: Part operation is done id#281474976710758:0 progress is 1/1 2025-05-29T15:23:52.341970Z node 51 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 281474976710758 ready parts: 1/1 2025-05-29T15:23:52.341975Z node 51 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:906: Part operation is done id#281474976710758:0 progress is 1/1 2025-05-29T15:23:52.341978Z node 51 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 281474976710758 ready parts: 1/1 2025-05-29T15:23:52.341985Z node 51 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2025-05-29T15:23:52.341992Z node 51 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 1 2025-05-29T15:23:52.341997Z node 51 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1606: TOperation IsReadyToNotify, TxId: 281474976710758, ready parts: 1/1, is published: false 2025-05-29T15:23:52.342002Z node 51 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 281474976710758 ready parts: 1/1 2025-05-29T15:23:52.342006Z node 51 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:973: Operation and all the parts is done, operation id: 281474976710758:0 2025-05-29T15:23:52.342010Z node 51 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5174: RemoveTx for txid 281474976710758:0 2025-05-29T15:23:52.342016Z node 51 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 2 2025-05-29T15:23:52.342021Z node 51 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:982: Publication still in progress, tx: 281474976710758, publications: 2, subscribers: 1 2025-05-29T15:23:52.342025Z node 51 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:989: Publication details: tx: 281474976710758, [OwnerId: 72057594046678944, LocalPathId: 1], 11 2025-05-29T15:23:52.342029Z node 51 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:989: Publication details: tx: 281474976710758, [OwnerId: 72057594046678944, LocalPathId: 4], 18446744073709551615 2025-05-29T15:23:52.342297Z node 51 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976710758 2025-05-29T15:23:52.342311Z node 51 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976710758 2025-05-29T15:23:52.342539Z node 51 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:23:52.342546Z node 51 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 281474976710758, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-29T15:23:52.342565Z node 51 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 281474976710758, path id: [OwnerId: 72057594046678944, LocalPathId: 4] 2025-05-29T15:23:52.342584Z node 51 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:23:52.342589Z node 51 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [51:208:2209], at schemeshard: 72057594046678944, txId: 281474976710758, path id: 1 2025-05-29T15:23:52.342593Z node 51 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [51:208:2209], at schemeshard: 72057594046678944, txId: 281474976710758, path id: 4 FAKE_COORDINATOR: Erasing txId 281474976710758 2025-05-29T15:23:52.342695Z node 51 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:5834: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 11 PathOwnerId: 72057594046678944, cookie: 281474976710758 2025-05-29T15:23:52.342704Z node 51 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 11 PathOwnerId: 72057594046678944, cookie: 281474976710758 2025-05-29T15:23:52.342709Z node 51 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 281474976710758 2025-05-29T15:23:52.342714Z node 51 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 281474976710758, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 11 2025-05-29T15:23:52.342718Z node 51 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 4 2025-05-29T15:23:52.342809Z node 51 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:5834: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 281474976710758 2025-05-29T15:23:52.342819Z node 51 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 281474976710758 2025-05-29T15:23:52.342823Z node 51 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 281474976710758 2025-05-29T15:23:52.342827Z node 51 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 281474976710758, pathId: [OwnerId: 72057594046678944, LocalPathId: 4], version: 18446744073709551615 2025-05-29T15:23:52.342834Z node 51 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 1 2025-05-29T15:23:52.342844Z node 51 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 281474976710758, subscribers: 1 2025-05-29T15:23:52.342848Z node 51 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:212: TTxAckPublishToSchemeBoard Notify send TEvNotifyTxCompletionResult, at schemeshard: 72057594046678944, to actorId: [51:123:2148] 2025-05-29T15:23:52.342880Z node 51 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:123: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-05-29T15:23:52.342885Z node 51 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:137: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 4], at schemeshard: 72057594046678944 2025-05-29T15:23:52.342892Z node 51 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2025-05-29T15:23:52.343211Z node 51 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976710758 2025-05-29T15:23:52.343417Z node 51 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976710758 2025-05-29T15:23:52.343432Z node 51 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6746: Handle: TEvNotifyTxCompletionResult: txId# 281474976710758 2025-05-29T15:23:52.343440Z node 51 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6748: Message: TxId: 281474976710758 2025-05-29T15:23:52.343446Z node 51 :EXPORT DEBUG: schemeshard_export__create.cpp:309: TExport::TTxProgress: DoExecute 2025-05-29T15:23:52.343451Z node 51 :EXPORT DEBUG: schemeshard_export__create.cpp:1232: TExport::TTxProgress: OnNotifyResult: txId# 281474976710758 2025-05-29T15:23:52.343455Z node 51 :EXPORT DEBUG: schemeshard_export__create.cpp:1263: TExport::TTxProgress: OnNotifyResult: txId# 281474976710758, id# 1003, itemIdx# 4294967295 2025-05-29T15:23:52.343484Z node 51 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:160: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 2025-05-29T15:23:52.343694Z node 51 :EXPORT DEBUG: schemeshard_export__create.cpp:329: TExport::TTxProgress: DoComplete TestWaitNotification wait txId: 1003 2025-05-29T15:23:52.343728Z node 51 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:210: tests -- TTxNotificationSubscriber for txId 1003: send EvNotifyTxCompletion 2025-05-29T15:23:52.343734Z node 51 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:256: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 1003 2025-05-29T15:23:52.343780Z node 51 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 1003, at schemeshard: 72057594046678944 2025-05-29T15:23:52.343791Z node 51 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:227: tests -- TTxNotificationSubscriber for txId 1003: got EvNotifyTxCompletionResult 2025-05-29T15:23:52.343795Z node 51 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:236: tests -- TTxNotificationSubscriber for txId 1003: satisfy waiter [51:683:2611] TestWaitNotification: OK eventTxId 1003 >> DataShardReadIteratorConsistency::Bug_7674_IteratorDuplicateRows [FAIL] >> DataShardReadIterator::ShouldReadRangeLeftInclusive [FAIL] >> DataShardReadIterator::ShouldReadMultipleKeysOneByOne [FAIL] >> DataShardReadIterator::ShouldReturnBrokenLockWhenReadKeyPrefix+EvWrite [FAIL] >> DataShardReadIterator::ShouldLimitRead10RangesChunk99Limit99 [FAIL] >> DataShardReadIterator::ShouldReadRangeRightInclusive >> DataShardReadIterator::ShouldReadKeyPrefix1 >> DataShardReadIterator::ShouldReturnBrokenLockWhenReadKeyPrefix-EvWrite >> DataShardReadIteratorConsistency::LeaseConfirmationNotOutOfOrder >> DataShardReadIterator::ShouldLimitRead10RangesChunk99Limit100 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/tx_proxy/ut_schemereq/unittest >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-DropUser-72 [GOOD] Test command err: Starting YDB, grpc: 3068, msgbus: 8884 2025-05-29T15:23:11.910769Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509888578507427330:2075];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:23:11.910790Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/001b41/r3tmp/tmpJ77Uqt/pdisk_1.dat 2025-05-29T15:23:11.983281Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 3068, node 1 2025-05-29T15:23:11.996971Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:23:11.996988Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-05-29T15:23:11.996990Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-05-29T15:23:11.997034Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-05-29T15:23:12.011306Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:23:12.011339Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:23:12.013047Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:8884 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 2025-05-29T15:23:12.018524Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:403: actor# [1:7509888578507427551:2114] Handle TEvNavigate describe path dc-1 2025-05-29T15:23:12.020259Z node 1 :TX_PROXY DEBUG: describe.cpp:272: Actor# [1:7509888582802395338:2430] HANDLE EvNavigateScheme dc-1 2025-05-29T15:23:12.020606Z node 1 :TX_PROXY DEBUG: describe.cpp:356: Actor# [1:7509888582802395338:2430] HANDLE EvNavigateKeySetResult TDescribeReq marker# P5 ErrorCount# 0 2025-05-29T15:23:12.028307Z node 1 :TX_PROXY DEBUG: describe.cpp:435: Actor# [1:7509888582802395338:2430] SEND to# 72057594046644480 shardToRequest NKikimrSchemeOp.TDescribePath Path: "dc-1" Options { ReturnBoundaries: true ShowPrivateTable: true ReturnRangeKey: true } 2025-05-29T15:23:12.029827Z node 1 :TX_PROXY DEBUG: describe.cpp:448: Actor# [1:7509888582802395338:2430] Handle TEvDescribeSchemeResult Forward to# [1:7509888582802395337:2429] Cookie: 0 TEvDescribeSchemeResult: NKikimrScheme.TEvDescribeSchemeResult PreSerializedData size# 2 Record# Status: StatusSuccess Path: "dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/dc-1" } } } PathId: 1 PathOwnerId: 72057594046644480 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-05-29T15:23:12.033468Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:315: actor# [1:7509888578507427551:2114] Handle TEvProposeTransaction 2025-05-29T15:23:12.033480Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:327: actor# [1:7509888578507427551:2114] Cookie# 0 userReqId# "" DELAY REQUEST, wait txids from allocator Type# Scheme 2025-05-29T15:23:12.053681Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:213: actor# [1:7509888578507427551:2114] HANDLE TEvClientConnected success connect from tablet# 72057594046447617 2025-05-29T15:23:12.054427Z node 1 :TX_PROXY DEBUG: client.cpp:89: Handle TEvAllocateResult ACCEPTED RangeBegin# 281474976715656 RangeEnd# 281474976720656 txAllocator# 72057594046447617 2025-05-29T15:23:12.054440Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:238: actor# [1:7509888578507427551:2114] TxId# 281474976715657 ProcessProposeTransaction 2025-05-29T15:23:12.054493Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:257: actor# [1:7509888578507427551:2114] Cookie# 0 userReqId# "" txid# 281474976715657 SEND to# [1:7509888582802395356:2441] 2025-05-29T15:23:12.065599Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1562: Actor# [1:7509888582802395356:2441] txid# 281474976715657 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "dc-1" StoragePools { Name: "" Kind: "tenant-db" } StoragePools { Name: "/dc-1:test" Kind: "test" } } } } UserToken: "\n\014root@builtin\022\030\022\026\n\024all-users@well-known\032\014root@builtin\"\007Builtin*\017**** (B6C6F477)" PeerName: "" 2025-05-29T15:23:12.065648Z node 1 :TX_PROXY DEBUG: schemereq.cpp:569: Actor# [1:7509888582802395356:2441] txid# 281474976715657 Bootstrap, UserSID: root@builtin CheckAdministrator: 0 CheckDatabaseAdministrator: 0 2025-05-29T15:23:12.065653Z node 1 :TX_PROXY DEBUG: schemereq.cpp:578: Actor# [1:7509888582802395356:2441] txid# 281474976715657 Bootstrap, UserSID: root@builtin IsClusterAdministrator: 1 2025-05-29T15:23:12.065666Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1617: Actor# [1:7509888582802395356:2441] txid# 281474976715657 TEvNavigateKeySet requested from SchemeCache 2025-05-29T15:23:12.065785Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1450: Actor# [1:7509888582802395356:2441] txid# 281474976715657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-05-29T15:23:12.065841Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1497: Actor# [1:7509888582802395356:2441] HANDLE EvNavigateKeySetResult, txid# 281474976715657 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# false 2025-05-29T15:23:12.065861Z node 1 :TX_PROXY DEBUG: schemereq.cpp:103: Actor# [1:7509888582802395356:2441] txid# 281474976715657 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976715657 TabletId# 72057594046644480} 2025-05-29T15:23:12.065914Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1352: Actor# [1:7509888582802395356:2441] txid# 281474976715657 HANDLE EvClientConnected 2025-05-29T15:23:12.066137Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-05-29T15:23:12.067118Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1374: Actor# [1:7509888582802395356:2441] txid# 281474976715657 Status StatusAccepted HANDLE {TEvModifySchemeTransactionResult Status# StatusAccepted txid# 281474976715657} 2025-05-29T15:23:12.067134Z node 1 :TX_PROXY DEBUG: schemereq.cpp:549: Actor# [1:7509888582802395356:2441] txid# 281474976715657 SEND to# [1:7509888582802395350:2435] Source {TEvProposeTransactionStatus txid# 281474976715657 Status# 53} waiting... 2025-05-29T15:23:12.075958Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:315: actor# [1:7509888578507427551:2114] Handle TEvProposeTransaction 2025-05-29T15:23:12.075971Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:238: actor# [1:7509888578507427551:2114] TxId# 281474976715658 ProcessProposeTransaction 2025-05-29T15:23:12.075981Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:257: actor# [1:7509888578507427551:2114] Cookie# 0 userReqId# "" txid# 281474976715658 SEND to# [1:7509888582802395396:2477] 2025-05-29T15:23:12.076718Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1562: Actor# [1:7509888582802395396:2477] txid# 281474976715658 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/" OperationType: ESchemeOpModifyACL ModifyACL { Name: "dc-1" DiffACL: "\n\032\010\000\022\026\010\001\020\377\377\003\032\014root@builtin \003" } } } UserToken: "\n\014root@builtin\022\030\022\026\n\024all-users@well-known\032\014root@builtin\"\007Builtin*\017**** (B6C6F477)" PeerName: "" 2025-05-29T15:23:12.076742Z node 1 :TX_PROXY DEBUG: schemereq.cpp:569: Actor# [1:7509888582802395396:2477] txid# 281474976715658 Bootstrap, UserSID: root@builtin CheckAdministrator: 0 CheckDatabaseAdministrator: 0 2025-05-29T15:23:12.076746Z node 1 :TX_PROXY DEBUG: schemereq.cpp:578: Actor# [1:7509888582802395396:2477] txid# 281474976715658 Bootstrap, UserSID: root@builtin IsClusterAdministrator: 1 2025-05-29T15:23:12.076761Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1617: Actor# [1:7509888582802395396:2477] txid# 281474976715658 TEvNavigateKeySet requested from SchemeCache 2025-05-29T15:23:12.076850Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1450: Actor# [1:7509888582802395396:2477] txid# 281474976715658 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-05-29T15:23:12.076871Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1497: Actor# [1:7509888582802395396:2477] HANDLE EvNavigateKeySetResult, txid# 281474976715658 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2025-05-29T15:23:12.076883Z node 1 :TX_PROXY DEBUG: schemereq.cpp:103: Actor# [1:7509888582802395396:2477] txid# 281474976715658 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976715658 TabletId# 72057594046644480} 2025-05-29T15:23:12.076933Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1352: Actor# [1:7509888582802395396:2477] txid# 281474976715658 HANDLE EvClientConnected 2025-05-29T15:23:12.077013Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-05-29T15:23:12.077526Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1374: Actor# [1:7509888582802395396:2477] txid# 281474976715658 Status StatusSuccess HANDLE {TEvModifySchemeTransactionResul ... 9T15:23:44.449677Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1497: Actor# [59:7509888720891777078:2553] HANDLE EvNavigateKeySetResult, txid# 281474976715661 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2025-05-29T15:23:44.449688Z node 59 :TX_PROXY DEBUG: schemereq.cpp:103: Actor# [59:7509888720891777078:2553] txid# 281474976715661 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976715661 TabletId# 72057594046644480} 2025-05-29T15:23:44.449738Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1352: Actor# [59:7509888720891777078:2553] txid# 281474976715661 HANDLE EvClientConnected 2025-05-29T15:23:44.453907Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1374: Actor# [59:7509888720891777078:2553] txid# 281474976715661 Status StatusSuccess HANDLE {TEvModifySchemeTransactionResult Status# StatusSuccess txid# 281474976715661} 2025-05-29T15:23:44.453924Z node 59 :TX_PROXY DEBUG: schemereq.cpp:549: Actor# [59:7509888720891777078:2553] txid# 281474976715661 SEND to# [59:7509888720891777077:2329] Source {TEvProposeTransactionStatus txid# 281474976715661 Status# 48} 2025-05-29T15:23:44.615746Z node 59 :TX_PROXY DEBUG: proxy_impl.cpp:315: actor# [59:7509888716596809069:2113] Handle TEvProposeTransaction 2025-05-29T15:23:44.615761Z node 59 :TX_PROXY DEBUG: proxy_impl.cpp:238: actor# [59:7509888716596809069:2113] TxId# 281474976715662 ProcessProposeTransaction 2025-05-29T15:23:44.615776Z node 59 :TX_PROXY DEBUG: proxy_impl.cpp:257: actor# [59:7509888716596809069:2113] Cookie# 0 userReqId# "" txid# 281474976715662 SEND to# [59:7509888720891777099:2568] 2025-05-29T15:23:44.616552Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1562: Actor# [59:7509888720891777099:2568] txid# 281474976715662 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "" OperationType: ESchemeOpModifyACL ModifyACL { Name: "dc-1" DiffACL: "\n\022\010\001\022\016\032\014ordinaryuser\n\032\010\000\022\026\010\001\020\200\200\002\032\014ordinaryuser \000" } } } UserToken: "\n\014root@builtin\022\030\022\026\n\024all-users@well-known\032\014root@builtin\"\007Builtin*\017**** (B6C6F477)" DatabaseName: "/dc-1" RequestType: "" PeerName: "ipv6:[::1]:49558" 2025-05-29T15:23:44.616567Z node 59 :TX_PROXY DEBUG: schemereq.cpp:569: Actor# [59:7509888720891777099:2568] txid# 281474976715662 Bootstrap, UserSID: root@builtin CheckAdministrator: 1 CheckDatabaseAdministrator: 1 2025-05-29T15:23:44.616572Z node 59 :TX_PROXY DEBUG: schemereq.cpp:578: Actor# [59:7509888720891777099:2568] txid# 281474976715662 Bootstrap, UserSID: root@builtin IsClusterAdministrator: 1 2025-05-29T15:23:44.616590Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1617: Actor# [59:7509888720891777099:2568] txid# 281474976715662 TEvNavigateKeySet requested from SchemeCache 2025-05-29T15:23:44.616705Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1450: Actor# [59:7509888720891777099:2568] txid# 281474976715662 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-05-29T15:23:44.616729Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1497: Actor# [59:7509888720891777099:2568] HANDLE EvNavigateKeySetResult, txid# 281474976715662 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2025-05-29T15:23:44.616740Z node 59 :TX_PROXY DEBUG: schemereq.cpp:103: Actor# [59:7509888720891777099:2568] txid# 281474976715662 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976715662 TabletId# 72057594046644480} 2025-05-29T15:23:44.616777Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1352: Actor# [59:7509888720891777099:2568] txid# 281474976715662 HANDLE EvClientConnected 2025-05-29T15:23:44.617038Z node 59 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-05-29T15:23:44.623307Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1374: Actor# [59:7509888720891777099:2568] txid# 281474976715662 Status StatusSuccess HANDLE {TEvModifySchemeTransactionResult Status# StatusSuccess txid# 281474976715662} 2025-05-29T15:23:44.623327Z node 59 :TX_PROXY DEBUG: schemereq.cpp:549: Actor# [59:7509888720891777099:2568] txid# 281474976715662 SEND to# [59:7509888720891777098:2343] Source {TEvProposeTransactionStatus txid# 281474976715662 Status# 48} 2025-05-29T15:23:44.639877Z node 59 :TX_PROXY DEBUG: proxy_impl.cpp:315: actor# [59:7509888716596809069:2113] Handle TEvProposeTransaction 2025-05-29T15:23:44.639889Z node 59 :TX_PROXY DEBUG: proxy_impl.cpp:238: actor# [59:7509888716596809069:2113] TxId# 281474976715663 ProcessProposeTransaction 2025-05-29T15:23:44.639904Z node 59 :TX_PROXY DEBUG: proxy_impl.cpp:257: actor# [59:7509888716596809069:2113] Cookie# 0 userReqId# "" txid# 281474976715663 SEND to# [59:7509888720891777132:2587] 2025-05-29T15:23:44.640655Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1562: Actor# [59:7509888720891777132:2587] txid# 281474976715663 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/dc-1" OperationType: ESchemeOpAlterLogin AlterLogin { CreateUser { User: "targetuser" Password: "passwd" CanLogin: true IsHashedPassword: false } } } } UserToken: "\n\014root@builtin\022\030\022\026\n\024all-users@well-known\032\014root@builtin\"\007Builtin*\017**** (B6C6F477)" DatabaseName: "/dc-1" RequestType: "" PeerName: "ipv6:[::1]:49558" 2025-05-29T15:23:44.640671Z node 59 :TX_PROXY DEBUG: schemereq.cpp:569: Actor# [59:7509888720891777132:2587] txid# 281474976715663 Bootstrap, UserSID: root@builtin CheckAdministrator: 1 CheckDatabaseAdministrator: 1 2025-05-29T15:23:44.640675Z node 59 :TX_PROXY DEBUG: schemereq.cpp:578: Actor# [59:7509888720891777132:2587] txid# 281474976715663 Bootstrap, UserSID: root@builtin IsClusterAdministrator: 1 2025-05-29T15:23:44.640687Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1617: Actor# [59:7509888720891777132:2587] txid# 281474976715663 TEvNavigateKeySet requested from SchemeCache 2025-05-29T15:23:44.640784Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1450: Actor# [59:7509888720891777132:2587] txid# 281474976715663 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-05-29T15:23:44.640808Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1497: Actor# [59:7509888720891777132:2587] HANDLE EvNavigateKeySetResult, txid# 281474976715663 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2025-05-29T15:23:44.640820Z node 59 :TX_PROXY DEBUG: schemereq.cpp:103: Actor# [59:7509888720891777132:2587] txid# 281474976715663 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976715663 TabletId# 72057594046644480} 2025-05-29T15:23:44.640863Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1352: Actor# [59:7509888720891777132:2587] txid# 281474976715663 HANDLE EvClientConnected 2025-05-29T15:23:44.643333Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1374: Actor# [59:7509888720891777132:2587] txid# 281474976715663 Status StatusSuccess HANDLE {TEvModifySchemeTransactionResult Status# StatusSuccess txid# 281474976715663} 2025-05-29T15:23:44.643349Z node 59 :TX_PROXY DEBUG: schemereq.cpp:549: Actor# [59:7509888720891777132:2587] txid# 281474976715663 SEND to# [59:7509888720891777131:2345] Source {TEvProposeTransactionStatus txid# 281474976715663 Status# 48} 2025-05-29T15:23:44.653247Z node 59 :TX_PROXY DEBUG: proxy_impl.cpp:315: actor# [59:7509888716596809069:2113] Handle TEvProposeTransaction 2025-05-29T15:23:44.653261Z node 59 :TX_PROXY DEBUG: proxy_impl.cpp:238: actor# [59:7509888716596809069:2113] TxId# 281474976715664 ProcessProposeTransaction 2025-05-29T15:23:44.653270Z node 59 :TX_PROXY DEBUG: proxy_impl.cpp:257: actor# [59:7509888716596809069:2113] Cookie# 0 userReqId# "" txid# 281474976715664 SEND to# [59:7509888720891777159:2599] 2025-05-29T15:23:44.654107Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1562: Actor# [59:7509888720891777159:2599] txid# 281474976715664 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/dc-1" OperationType: ESchemeOpAlterLogin AlterLogin { RemoveUser { User: "targetuser" MissingOk: false } } } } UserToken: "\n\014ordinaryuser\022\030\022\026\n\024all-users@well-known\032\334\003eyJhbGciOiJQUzI1NiIsImtpZCI6IjEifQ.eyJhdWQiOlsiXC9kYy0xIl0sImV4cCI6MTc0ODU3NTQyNCwiaWF0IjoxNzQ4NTMyMjI0LCJzdWIiOiJvcmRpbmFyeXVzZXIifQ.SNcj0fOD-V-y2S6uFlpWh5TzzZp_iUfGScxqVLzgF09-8a_tVdCBEAryx2EewRkzxXpa5auwMoGmSYUKBDya-KiT6pInDCX12Me5G_mcUAYEHr80MJIWu83Ewtt6GXkmAhQspEjU3Grpp2EM5p7Ol25S6XavQ1COKhptTGGcXeIRLZ2XKQEfpSeZPOkF9ByZXdX6qbHzEU5sNjzWbtEHzAxRqhiqVu4-cyOoabEMI2kshPtBVsKuRY_3NVuCihdywGLYzzX3t8AA7OzSnbuaormw1cxwiL6HQE6bd3KKShdO6vLdNwnJ3bNlGEX6_uDXDlyBEo_eSHFvE5FC0QqtSA\"\005Login*\210\001eyJhbGciOiJQUzI1NiIsImtpZCI6IjEifQ.eyJhdWQiOlsiXC9kYy0xIl0sImV4cCI6MTc0ODU3NTQyNCwiaWF0IjoxNzQ4NTMyMjI0LCJzdWIiOiJvcmRpbmFyeXVzZXIifQ.**" DatabaseName: "/dc-1" RequestType: "" PeerName: "ipv6:[::1]:49558" 2025-05-29T15:23:44.654124Z node 59 :TX_PROXY DEBUG: schemereq.cpp:569: Actor# [59:7509888720891777159:2599] txid# 281474976715664 Bootstrap, UserSID: ordinaryuser CheckAdministrator: 1 CheckDatabaseAdministrator: 1 2025-05-29T15:23:44.654129Z node 59 :TX_PROXY DEBUG: schemereq.cpp:578: Actor# [59:7509888720891777159:2599] txid# 281474976715664 Bootstrap, UserSID: ordinaryuser IsClusterAdministrator: 0 2025-05-29T15:23:44.654182Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1401: Actor# [59:7509888720891777159:2599] txid# 281474976715664 HandleResolveDatabase, ResultSet size: 1 ResultSet error count: 0 2025-05-29T15:23:44.654193Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1436: Actor# [59:7509888720891777159:2599] txid# 281474976715664 HandleResolveDatabase, UserSID: ordinaryuser CheckAdministrator: 1 CheckDatabaseAdministrator: 1 IsClusterAdministrator: 0 IsDatabaseAdministrator: 0 DatabaseOwner: root@builtin 2025-05-29T15:23:44.654204Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1617: Actor# [59:7509888720891777159:2599] txid# 281474976715664 TEvNavigateKeySet requested from SchemeCache 2025-05-29T15:23:44.654265Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1450: Actor# [59:7509888720891777159:2599] txid# 281474976715664 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-05-29T15:23:44.654270Z node 59 :TX_PROXY ERROR: schemereq.cpp:1079: Actor# [59:7509888720891777159:2599] txid# 281474976715664, Access denied for ordinaryuser, attempt to manage user 2025-05-29T15:23:44.654290Z node 59 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [59:7509888720891777159:2599] txid# 281474976715664, issues: { message: "Access denied for ordinaryuser" issue_code: 200000 severity: 1 } 2025-05-29T15:23:44.654295Z node 59 :TX_PROXY DEBUG: schemereq.cpp:549: Actor# [59:7509888720891777159:2599] txid# 281474976715664 SEND to# [59:7509888720891777158:2356] Source {TEvProposeTransactionStatus Status# 5} 2025-05-29T15:23:44.654571Z node 59 :KQP_SESSION WARN: kqp_session_actor.cpp:2583: SessionId: ydb://session/3?node_id=59&id=NWIwZTgwZmYtZjgxNmZiYTUtMzg2MzY4OGMtMmUyMWE3, ActorId: [59:7509888720891777149:2356], ActorState: ExecuteState, TraceId: 01jwea8rma9pbj5e1wnpvy45wb, Create QueryResponse for error on request, msg: 2025-05-29T15:23:44.654724Z node 59 :TX_PROXY DEBUG: proxy_impl.cpp:353: actor# [59:7509888716596809069:2113] Handle TEvExecuteKqpTransaction 2025-05-29T15:23:44.654731Z node 59 :TX_PROXY DEBUG: proxy_impl.cpp:342: actor# [59:7509888716596809069:2113] TxId# 281474976715665 ProcessProposeKqpTransaction ------- [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/federated_query/generic_ut/unittest >> GenericFederatedQuery::IcebergHiveBasicSelectAll 2025-05-29 15:23:30,317 WARNING devtools.ya.test.programs.test_tool.run_test.run_test: Wrapper execution timed out 2025-05-29 15:23:30,388 WARNING devtools.ya.test.programs.test_tool.run_test.run_test: Wrapper has overrun 60 secs timeout. Process tree before termination: pid rss ref pdirt 3582157 46.0M 46.0M 23.2M test_tool run_ut @/home/runner/.ya/build/build_root/ciyv/0019d5/ydb/core/kqp/ut/federated_query/generic_ut/test-results/unittest/testing_out_stuff/chunk4/testing_out_stuff/te 3582813 371M 372M 127M └─ ydb-core-kqp-ut-federated_query-generic_ut --trace-path-append /home/runner/.ya/build/build_root/ciyv/0019d5/ydb/core/kqp/ut/federated_query/generic_ut/test-results/uni Test command err: Trying to start YDB, gRPC: 62581, MsgBus: 29384 test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/0019d5/r3tmp/tmpGpw0By/pdisk_1.dat 2025-05-29T15:22:30.998213Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509888402218749443:2265];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:22:30.998240Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-05-29T15:22:31.059615Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [1:7509888402218749217:2079] 1748532150996058 != 1748532150996061 2025-05-29T15:22:31.063229Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 62581, node 1 2025-05-29T15:22:31.078899Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:22:31.078917Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-05-29T15:22:31.078919Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-05-29T15:22:31.078971Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-05-29T15:22:31.102105Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:22:31.102146Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting TClient is connected to server localhost:29384 2025-05-29T15:22:31.107091Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:29384 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-29T15:22:31.169697Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:22:31.175209Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-05-29T15:22:31.462208Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509888406513717171:2328], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:22:31.462825Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:22:32.000138Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:2, at schemeshard: 72057594046644480 2025-05-29T15:22:32.070588Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509888410808684598:2342], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:22:32.070622Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:22:32.070686Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509888410808684604:2345], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:22:32.071517Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710659:2, at schemeshard: 72057594046644480 2025-05-29T15:22:32.073316Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976710659, at schemeshard: 72057594046644480 2025-05-29T15:22:32.073445Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7509888410808684606:2346], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710659 completed, doublechecking } 2025-05-29T15:22:32.166309Z node 1 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [1:7509888410808684646:2389] txid# 281474976710660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 7], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-29T15:22:32.237195Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [1:7509888410808684697:2362], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:22:32.237424Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=1&id=ZTJkNWI4MzktMmFlNGYyNTItYWQ3YmE0MzUtYjA2OTRkZGY=, ActorId: [1:7509888410808684593:2338], ActorState: ExecuteState, TraceId: 01jwea6hwx47kb24z8ednmyf04, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: 01jwea6hwr4gm93447v7mw1m82 2025-05-29T15:22:32.238925Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=request_actor.h:64;event=unexpected reply;error_message=operation { ready: true status: INTERNAL_ERROR issues { message: "Execution" issue_code: 1060 issues { message: "yql/essentials/ast/yql_expr.h:1874: index out of range" issue_code: 1 } } result { [type.googleapis.com/Ydb.Table.ExecuteQueryResult] { tx_meta { } } } } ;request=session_id: "ydb://session/3?node_id=1&id=ZTJkNWI4MzktMmFlNGYyNTItYWQ3YmE0MzUtYjA2OTRkZGY=" tx_control { tx_id: "01jwea6hwr4gm93447v7mw1m82" commit_tx: true } query { yql_text: "DECLARE $objects AS List>;\nUPSERT INTO `//Root/.metadata/initialization/migrations`\nSELECT componentId,modificationId,instant FROM AS_TABLE($objects)\n" } parameters { key: "$objects" value { type { list_type { item { struct_type { members { name: "componentId" type { type_id: UTF8 } } members { name: "modificationId" type { type_id: UTF8 } } members { name: "instant" type { type_id: UINT32 } } } } } } value { items { items { text_value: "INITIALIZATION" } items { text_value: "create" } items { uint32_value: 1748532152 } } } } } ; 2025-05-29T15:22:32.239007Z node 1 :METADATA_INITIALIZER ERROR: log.cpp:784: fline=accessor_init.cpp:81;event=OnAlteringProblem;error=cannot UPSERT objects:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 ; 2025-05-29T15:22:33.294715Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [1:7509888415103652075:2396], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:22:33.294901Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=1&id=NjM0NTkyZDMtNTU1MzRlMWMtZjVlZjhiYjQtMTM2MWFlYTM=, ActorId: [1:7509888415103652045:2383], ActorState: ExecuteState, TraceId: 01jwea6jy443q6pngc7t5xy8xv, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: 01jwea6jy2et8stwm79gj5smqy 2025-05-29T15:22:33.295517Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=request_actor.h:64;event=unexpected reply;error_message=operation { ready: true status: INTERNAL_ERROR issues { message: "Execution" issue_code: 1060 issues { message: "yql/essentials/ast/yql_expr.h:1874: index out of range" issue_code: 1 } } result { [type.googleapis.com/Ydb.Table.ExecuteQueryResult] { tx_meta { } } } } ;request=session_id: "ydb://session/3?node_id=1&id=NjM0NTkyZDMtNTU1MzRlMWMtZjVlZjhiYjQtMTM2MWFlYTM=" tx_control { tx_id: "01jwea6jy2et8stwm79gj5smqy" commit_tx: true } query { yql_text: "DECLARE $objects AS List>;\nUPSERT INTO `//Root/.metadata/initialization/migrations`\nSELECT componentId,modificationId,instant FROM AS_TABLE($objects)\n" } parameters { key: "$objects" value { type { list_type { item { struct_type { members { name: "componentId" type { type_id: UTF8 } } members { name: "modificationId" type { type_id: UTF8 } } members { name: "instant" type { type_id: UINT32 } } } } } } value { items { items { text_value: "INITIALIZATION" } items { text_value: "create" } items { uint32_value: 1748532153 } } } } } ; 2025-05-29T15:22:33.295799Z node 1 :METADATA_INITIALIZER ERROR: log.cpp:784: fline=accessor_init.cpp:81;event=OnAlteringProblem;error=cannot UPSERT objects:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 ; 2025-05-29T15:22:34.347373Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [1:7509888419398619 ... or=cannot UPSERT objects:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 ; 2025-05-29T15:23:26.419811Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [1:7509888642736924291:4561], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:23:26.420766Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=1&id=NjI3ZDljMmUtMzNmYTQ4ZWYtZTc3NGEzNTAtNjA3ZDUzN2M=, ActorId: [1:7509888642736924261:4548], ActorState: ExecuteState, TraceId: 01jwea86t8cdvf7a72gavxkgrm, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: 01jwea86t51n58vgfqp5e6yb47 2025-05-29T15:23:26.421324Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=request_actor.h:64;event=unexpected reply;error_message=operation { ready: true status: INTERNAL_ERROR issues { message: "Execution" issue_code: 1060 issues { message: "yql/essentials/ast/yql_expr.h:1874: index out of range" issue_code: 1 } } result { [type.googleapis.com/Ydb.Table.ExecuteQueryResult] { tx_meta { } } } } ;request=session_id: "ydb://session/3?node_id=1&id=NjI3ZDljMmUtMzNmYTQ4ZWYtZTc3NGEzNTAtNjA3ZDUzN2M=" tx_control { tx_id: "01jwea86t51n58vgfqp5e6yb47" commit_tx: true } query { yql_text: "DECLARE $objects AS List>;\nUPSERT INTO `//Root/.metadata/initialization/migrations`\nSELECT componentId,modificationId,instant FROM AS_TABLE($objects)\n" } parameters { key: "$objects" value { type { list_type { item { struct_type { members { name: "componentId" type { type_id: UTF8 } } members { name: "modificationId" type { type_id: UTF8 } } members { name: "instant" type { type_id: UINT32 } } } } } } value { items { items { text_value: "INITIALIZATION" } items { text_value: "create" } items { uint32_value: 1748532206 } } } } } ; 2025-05-29T15:23:26.421372Z node 1 :METADATA_INITIALIZER ERROR: log.cpp:784: fline=accessor_init.cpp:81;event=OnAlteringProblem;error=cannot UPSERT objects:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 ; 2025-05-29T15:23:27.507406Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [1:7509888647031891696:4604], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:23:27.508368Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=1&id=ZDQ3YzlhMWItODUyMjJjYjgtZWVmNmE0ZDItODJjNDJiZmU=, ActorId: [1:7509888647031891666:4591], ActorState: ExecuteState, TraceId: 01jwea87w79rwte172t1wb0yz1, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: 01jwea87w26h62paj08ka5jysz 2025-05-29T15:23:27.514419Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=request_actor.h:64;event=unexpected reply;error_message=operation { ready: true status: INTERNAL_ERROR issues { message: "Execution" issue_code: 1060 issues { message: "yql/essentials/ast/yql_expr.h:1874: index out of range" issue_code: 1 } } result { [type.googleapis.com/Ydb.Table.ExecuteQueryResult] { tx_meta { } } } } ;request=session_id: "ydb://session/3?node_id=1&id=ZDQ3YzlhMWItODUyMjJjYjgtZWVmNmE0ZDItODJjNDJiZmU=" tx_control { tx_id: "01jwea87w26h62paj08ka5jysz" commit_tx: true } query { yql_text: "DECLARE $objects AS List>;\nUPSERT INTO `//Root/.metadata/initialization/migrations`\nSELECT componentId,modificationId,instant FROM AS_TABLE($objects)\n" } parameters { key: "$objects" value { type { list_type { item { struct_type { members { name: "componentId" type { type_id: UTF8 } } members { name: "modificationId" type { type_id: UTF8 } } members { name: "instant" type { type_id: UINT32 } } } } } } value { items { items { text_value: "INITIALIZATION" } items { text_value: "create" } items { uint32_value: 1748532207 } } } } } ; 2025-05-29T15:23:27.514515Z node 1 :METADATA_INITIALIZER ERROR: log.cpp:784: fline=accessor_init.cpp:81;event=OnAlteringProblem;error=cannot UPSERT objects:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 ; 2025-05-29T15:23:28.567251Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [1:7509888651326859103:4648], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:23:28.567465Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=1&id=ZTdlM2NkODQtNjM3MGU5NzctMWNjYzRkZTUtMTRjYTYzNzg=, ActorId: [1:7509888651326859071:4634], ActorState: ExecuteState, TraceId: 01jwea88xc973v6q1kmas578d2, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: 01jwea88xa82y8sz263eb6dbg9 2025-05-29T15:23:28.568040Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=request_actor.h:64;event=unexpected reply;error_message=operation { ready: true status: INTERNAL_ERROR issues { message: "Execution" issue_code: 1060 issues { message: "yql/essentials/ast/yql_expr.h:1874: index out of range" issue_code: 1 } } result { [type.googleapis.com/Ydb.Table.ExecuteQueryResult] { tx_meta { } } } } ;request=session_id: "ydb://session/3?node_id=1&id=ZTdlM2NkODQtNjM3MGU5NzctMWNjYzRkZTUtMTRjYTYzNzg=" tx_control { tx_id: "01jwea88xa82y8sz263eb6dbg9" commit_tx: true } query { yql_text: "DECLARE $objects AS List>;\nUPSERT INTO `//Root/.metadata/initialization/migrations`\nSELECT componentId,modificationId,instant FROM AS_TABLE($objects)\n" } parameters { key: "$objects" value { type { list_type { item { struct_type { members { name: "componentId" type { type_id: UTF8 } } members { name: "modificationId" type { type_id: UTF8 } } members { name: "instant" type { type_id: UINT32 } } } } } } value { items { items { text_value: "INITIALIZATION" } items { text_value: "create" } items { uint32_value: 1748532208 } } } } } ; 2025-05-29T15:23:28.568146Z node 1 :METADATA_INITIALIZER ERROR: log.cpp:784: fline=accessor_init.cpp:81;event=OnAlteringProblem;error=cannot UPSERT objects:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 ; 2025-05-29T15:23:29.712137Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [1:7509888655621826508:4691], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:23:29.712524Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=1&id=YmNjMDY1NzYtNzdjYTRmYjgtNDc4YjE5OGQtYmFmZGUyYWM=, ActorId: [1:7509888655621826478:4678], ActorState: ExecuteState, TraceId: 01jwea8a10d09ywhzfrygnb8cs, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: 01jwea8a0cfc3hy402cggenpr1 2025-05-29T15:23:29.713253Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=request_actor.h:64;event=unexpected reply;error_message=operation { ready: true status: INTERNAL_ERROR issues { message: "Execution" issue_code: 1060 issues { message: "yql/essentials/ast/yql_expr.h:1874: index out of range" issue_code: 1 } } result { [type.googleapis.com/Ydb.Table.ExecuteQueryResult] { tx_meta { } } } } ;request=session_id: "ydb://session/3?node_id=1&id=YmNjMDY1NzYtNzdjYTRmYjgtNDc4YjE5OGQtYmFmZGUyYWM=" tx_control { tx_id: "01jwea8a0cfc3hy402cggenpr1" commit_tx: true } query { yql_text: "DECLARE $objects AS List>;\nUPSERT INTO `//Root/.metadata/initialization/migrations`\nSELECT componentId,modificationId,instant FROM AS_TABLE($objects)\n" } parameters { key: "$objects" value { type { list_type { item { struct_type { members { name: "componentId" type { type_id: UTF8 } } members { name: "modificationId" type { type_id: UTF8 } } members { name: "instant" type { type_id: UINT32 } } } } } } value { items { items { text_value: "INITIALIZATION" } items { text_value: "create" } items { uint32_value: 1748532209 } } } } } ; 2025-05-29T15:23:29.713366Z node 1 :METADATA_INITIALIZER ERROR: log.cpp:784: fline=accessor_init.cpp:81;event=OnAlteringProblem;error=cannot UPSERT objects:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 ; Traceback (most recent call last): File "library/python/testing/yatest_common/yatest/common/process.py", line 384, in wait wait_for( File "library/python/testing/yatest_common/yatest/common/process.py", line 764, in wait_for raise TimeoutError(truncate(message, MAX_MESSAGE_LEN)) yatest.common.process.TimeoutError: 60 second(s) wait timeout has expired: Command '['/home/runner/.ya/tools/v4/8689590287/test_tool', 'run_ut', '@/home/runner/.ya/build/build_root/ciyv/0019d5/ydb/core/kqp/ut/federated_query/generic_ut/test-results/unittest/testing_out_stuff/chunk4/testing_out_stuff/test_tool.args']' stopped by 60 seconds timeout During handling of the above exception, another exception occurred: Traceback (most recent call last): File "devtools/ya/test/programs/test_tool/run_test/run_test.py", line 1738, in main res.wait(check_exit_code=False, timeout=run_timeout, on_timeout=timeout_callback) File "library/python/testing/yatest_common/yatest/common/process.py", line 398, in wait raise ExecutionTimeoutError(self, str(e)) yatest.common.process.ExecutionTimeoutError: (("60 second(s) wait timeout has expired: Command '['/home/runner/.ya/tools/v4/8689590287/test_tool', 'run_ut', '@/home/runner/.ya/build/build_root/ciyv/0019d5/ydb/core/kqp/ut/federated_query/generic_ut/test-results/unittest/testing_out_stuff/chunk4/testing_out_stuff/test_tool.args']' stopped by 60 seconds timeout",), {}) ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/persqueue/ut/unittest >> TPQTest::TestReadAndDeleteConsumer [GOOD] Test command err: Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:104:2057] recipient: [1:102:2135] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:104:2057] recipient: [1:102:2135] Leader for TabletID 72057594037927937 is [1:108:2139] sender: [1:109:2057] recipient: [1:102:2135] 2025-05-29T15:22:15.451617Z node 1 :PERSQUEUE NOTICE: pq_impl.cpp:1099: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-05-29T15:22:15.451649Z node 1 :PERSQUEUE INFO: pq_impl.cpp:800: [PQ: 72057594037927937] doesn't have tx writes info Leader for TabletID 72057594037927938 is [0:0:0] sender: [1:150:2057] recipient: [1:148:2170] IGNORE Leader for TabletID 72057594037927938 is [0:0:0] sender: [1:150:2057] recipient: [1:148:2170] Leader for TabletID 72057594037927938 is [1:154:2174] sender: [1:155:2057] recipient: [1:148:2170] Leader for TabletID 72057594037927937 is [1:108:2139] sender: [1:180:2057] recipient: [1:14:2061] 2025-05-29T15:22:15.456100Z node 1 :PERSQUEUE NOTICE: pq_impl.cpp:1099: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-05-29T15:22:15.458926Z node 1 :PERSQUEUE INFO: pq_impl.cpp:1487: [PQ: 72057594037927937] Config applied version 1 actor [1:178:2192] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 ImportantClientId: "important_user" LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 WriteSpeedInBytesPerSecond: 102400 BurstSize: 102400 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "rt3.dc1--asdfgs--topic" Version: 1 LocalDC: true Topic: "topic" TopicPath: "/Root/PQ/rt3.dc1--asdfgs--topic" Partitions { PartitionId: 0 } ReadRuleGenerations: 1 ReadRuleGenerations: 1 MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 1 Important: false } Consumers { Name: "important_user" Generation: 1 Important: true } 2025-05-29T15:22:15.459141Z node 1 :PERSQUEUE INFO: partition_init.cpp:880: [PQ: 72057594037927937, Partition: 0, State: StateInit] bootstrapping 0 [1:186:2198] 2025-05-29T15:22:15.459803Z node 1 :PERSQUEUE INFO: partition.cpp:560: [PQ: 72057594037927937, Partition: 0, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 0 generation 2 [1:186:2198] 2025-05-29T15:22:15.462985Z node 1 :PERSQUEUE INFO: ownerinfo.cpp:30: new Cookie default|2681755b-e8234517-b168bae8-77add4a7_0 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default Send read request: PartitionRequest { Partition: 0 CmdRead { ClientId: "user1" SessionId: "" Offset: 0 Count: 2147483647 Bytes: 2147483647 } Cookie: 123 } via pipe: [1:178:2192] Send read request: PartitionRequest { Partition: 0 CmdRead { ClientId: "user2" SessionId: "" Offset: 0 Count: 2147483647 Bytes: 2147483647 } Cookie: 123 } via pipe: [1:178:2192] Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:104:2057] recipient: [2:102:2135] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:104:2057] recipient: [2:102:2135] Leader for TabletID 72057594037927937 is [2:108:2139] sender: [2:109:2057] recipient: [2:102:2135] 2025-05-29T15:22:23.547018Z node 2 :PERSQUEUE NOTICE: pq_impl.cpp:1099: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-05-29T15:22:23.547047Z node 2 :PERSQUEUE INFO: pq_impl.cpp:800: [PQ: 72057594037927937] doesn't have tx writes info Leader for TabletID 72057594037927938 is [0:0:0] sender: [2:150:2057] recipient: [2:148:2170] IGNORE Leader for TabletID 72057594037927938 is [0:0:0] sender: [2:150:2057] recipient: [2:148:2170] Leader for TabletID 72057594037927938 is [2:154:2174] sender: [2:155:2057] recipient: [2:148:2170] Leader for TabletID 72057594037927937 is [2:108:2139] sender: [2:178:2057] recipient: [2:14:2061] 2025-05-29T15:22:23.550886Z node 2 :PERSQUEUE NOTICE: pq_impl.cpp:1099: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-05-29T15:22:23.551038Z node 2 :PERSQUEUE INFO: pq_impl.cpp:1487: [PQ: 72057594037927937] Config applied version 2 actor [2:176:2190] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 ImportantClientId: "important_user" LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 WriteSpeedInBytesPerSecond: 102400 BurstSize: 102400 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "rt3.dc1--asdfgs--topic" Version: 2 LocalDC: true Topic: "topic" TopicPath: "/Root/PQ/rt3.dc1--asdfgs--topic" Partitions { PartitionId: 0 } ReadRuleGenerations: 2 ReadRuleGenerations: 2 MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 2 Important: false } Consumers { Name: "important_user" Generation: 2 Important: true } 2025-05-29T15:22:23.551140Z node 2 :PERSQUEUE INFO: partition_init.cpp:880: [PQ: 72057594037927937, Partition: 0, State: StateInit] bootstrapping 0 [2:184:2196] 2025-05-29T15:22:23.551788Z node 2 :PERSQUEUE INFO: partition.cpp:560: [PQ: 72057594037927937, Partition: 0, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 0 generation 2 [2:184:2196] 2025-05-29T15:22:23.557011Z node 2 :PERSQUEUE INFO: ownerinfo.cpp:30: new Cookie default|c92b43b7-24b6369b-ac6edcbb-f4b4ea32_0 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default Send read request: PartitionRequest { Partition: 0 CmdRead { ClientId: "user1" SessionId: "" Offset: 0 Count: 2147483647 Bytes: 2147483647 } Cookie: 123 } via pipe: [2:176:2190] Send read request: PartitionRequest { Partition: 0 CmdRead { ClientId: "user2" SessionId: "" Offset: 0 Count: 2147483647 Bytes: 2147483647 } Cookie: 123 } via pipe: [2:176:2190] Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:104:2057] recipient: [3:102:2135] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:104:2057] recipient: [3:102:2135] Leader for TabletID 72057594037927937 is [3:108:2139] sender: [3:109:2057] recipient: [3:102:2135] 2025-05-29T15:22:31.266551Z node 3 :PERSQUEUE NOTICE: pq_impl.cpp:1099: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-05-29T15:22:31.266576Z node 3 :PERSQUEUE INFO: pq_impl.cpp:800: [PQ: 72057594037927937] doesn't have tx writes info Leader for TabletID 72057594037927938 is [0:0:0] sender: [3:150:2057] recipient: [3:148:2170] IGNORE Leader for TabletID 72057594037927938 is [0:0:0] sender: [3:150:2057] recipient: [3:148:2170] Leader for TabletID 72057594037927938 is [3:154:2174] sender: [3:155:2057] recipient: [3:148:2170] Leader for TabletID 72057594037927937 is [3:108:2139] sender: [3:180:2057] recipient: [3:14:2061] 2025-05-29T15:22:31.269704Z node 3 :PERSQUEUE NOTICE: pq_impl.cpp:1099: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-05-29T15:22:31.269864Z node 3 :PERSQUEUE INFO: pq_impl.cpp:1487: [PQ: 72057594037927937] Config applied version 3 actor [3:178:2192] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 ImportantClientId: "important_user" LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 WriteSpeedInBytesPerSecond: 102400 BurstSize: 102400 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "rt3.dc1--asdfgs--topic" Version: 3 LocalDC: true Topic: "topic" TopicPath: "/Root/PQ/rt3.dc1--asdfgs--topic" Partitions { PartitionId: 0 } ReadRuleGenerations: 3 ReadRuleGenerations: 3 MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 3 Important: false } Consumers { Name: "important_user" Generation: 3 Important: true } 2025-05-29T15:22:31.269980Z node 3 :PERSQUEUE INFO: partition_init.cpp:880: [PQ: 72057594037927937, Partition: 0, State: StateInit] bootstrapping 0 [3:186:2198] 2025-05-29T15:22:31.270535Z node 3 :PERSQUEUE INFO: partition.cpp:560: [PQ: 72057594037927937, Partition: 0, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 0 generation 2 [3:186:2198] 2025-05-29T15:22:31.273691Z node 3 :PERSQUEUE INFO: ownerinfo.cpp:30: new Cookie default|bb7d3661-c0ce3d0b-ed2cd99d-15d8d3ed_0 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default Send read request: PartitionRequest { Partition: 0 CmdRead { ClientId: "user1" SessionId: "" Offset: 0 Count: 2147483647 Bytes: 2147483647 } Cookie: 123 } via pipe: [3:178:2192] Send read request: PartitionRequest { Partition: 0 CmdRead { ClientId: "user2" SessionId: "" Offset: 0 Count: 2147483647 Bytes: 2147483647 } Cookie: 123 } via pipe: [3:178:2192] Leader for TabletID 72057594037927937 is [0:0:0] sender: [4:104:2057] recipient: [4:102:2135] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [4:104:2057] recipient: [4:102:2135] Leader for TabletID 72057594037927937 is [4:108:2139] sender: [4:109:2057] recipient: [4:102:2135] 2025-05-29T15:22:39.458766Z node 4 :PERSQUEUE NOTICE: pq_impl.cpp:1099: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-05-29T15:22:39.458796Z node 4 :PERSQUEUE INFO: pq_impl.cpp:800: [PQ: 72057594037927937] doesn't have tx writes info Leader for TabletID 72057594037927938 is [0:0:0] sender: [4:150:2057] recipient: [4:148:2170] IGNORE Leader for TabletID 72057594037927938 is [0:0:0] sender: [4:150:2057] recipient: [4:148:2170] Leader for TabletID 72057594037927938 is [4:154:2174] sender: [4:155:2057] recipient: [4:148:2170] Leader for TabletID 72057594037927937 is [4:108:2139] sender: [4:180:2057] recipient: [4:14:2061] 2025-05-29T15:22:39.462801Z node 4 :PERSQUEUE NOTICE: pq_impl.cpp:1099: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-05-29T15:22:39.462957Z node 4 :PERSQUEUE INFO: pq_impl.cpp:1487: [PQ: 72057594037927937] Config applied version 4 actor [4:178:2192] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 ImportantClientId: "important_user" LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 WriteSpeedInBytesPerSecond: 102400 BurstSize: 102400 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "rt3.dc1--asdfgs--topic" Version: 4 LocalDC: true Topic: "topic" TopicPath: "/Root/PQ/rt3.dc1--asdfgs--topic" Partitions { PartitionId: 0 } ReadRuleGenerations: 4 ReadRuleGenerations: 4 MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 4 Important: false } Consumers { Name: "important_user" Generation: 4 Important: true } 2025-05-29T15:22:39.463064Z node 4 :PERSQUEUE INFO: partition_init.cpp:880: [PQ: 72057594037927937, Partition: 0, State: StateInit] bootstrapping 0 [4:186:2198] 2025-05-29T15:22:39.463692Z node 4 :PERSQUEUE INFO: partition.cpp:560: [PQ: 72057594037927937, Partition: 0, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 0 generation 2 [4:186:2198] 2025-05-29T15:22:39.466884Z node 4 :PERSQUEUE INFO: ownerinfo.cpp:30: new Cookie default|ba990691-29427890-31f27177-738c3001_0 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default Send read request: PartitionRequest { Partition: 0 CmdRead { ClientId: "user1" SessionId: "" Offset: 0 Count: 2147483647 Bytes: 2147483647 } Cookie: 1 ... e.cpp:94: PQ Cache (L2). Same blob insertion. Tablet '72057594037927937' partition 0 offset 0 partno 0 count 81 parts 0 size 8296398 Leader for TabletID 72057594037927937 is [38:246:2244] sender: [38:318:2057] recipient: [38:14:2061] 2025-05-29T15:23:50.146472Z node 38 :PERSQUEUE NOTICE: pq_impl.cpp:1099: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-05-29T15:23:50.147226Z node 38 :PERSQUEUE INFO: pq_impl.cpp:1487: [PQ: 72057594037927937] Config applied version 1001 actor [38:315:2298] txId 42 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 100 MaxSizeInPartition: 104857600 LifetimeSeconds: 172800 ImportantClientId: "user1" ImportantClientId: "user2" LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 Version: 1001 LocalDC: true Topic: "topic" Partitions { PartitionId: 0 } ReadRuleGenerations: 1000 AllPartitions { PartitionId: 0 } Consumers { Name: "user2" Generation: 1000 Important: true } Leader for TabletID 72057594037927937 is [0:0:0] sender: [39:104:2057] recipient: [39:102:2135] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [39:104:2057] recipient: [39:102:2135] Leader for TabletID 72057594037927937 is [39:108:2139] sender: [39:109:2057] recipient: [39:102:2135] 2025-05-29T15:23:50.267519Z node 39 :PERSQUEUE NOTICE: pq_impl.cpp:1099: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-05-29T15:23:50.267541Z node 39 :PERSQUEUE INFO: pq_impl.cpp:800: [PQ: 72057594037927937] doesn't have tx writes info Leader for TabletID 72057594037927938 is [0:0:0] sender: [39:150:2057] recipient: [39:148:2170] IGNORE Leader for TabletID 72057594037927938 is [0:0:0] sender: [39:150:2057] recipient: [39:148:2170] Leader for TabletID 72057594037927938 is [39:154:2174] sender: [39:155:2057] recipient: [39:148:2170] Leader for TabletID 72057594037927937 is [39:108:2139] sender: [39:178:2057] recipient: [39:14:2061] 2025-05-29T15:23:50.270089Z node 39 :PERSQUEUE NOTICE: pq_impl.cpp:1099: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-05-29T15:23:50.270193Z node 39 :PERSQUEUE INFO: pq_impl.cpp:1487: [PQ: 72057594037927937] Config applied version 1002 actor [39:176:2190] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 100 MaxSizeInPartition: 104857600 LifetimeSeconds: 172800 ImportantClientId: "user1" ImportantClientId: "user2" LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "rt3.dc1--asdfgs--topic" Version: 1002 LocalDC: true Topic: "topic" TopicPath: "/Root/PQ/rt3.dc1--asdfgs--topic" Partitions { PartitionId: 0 } ReadRuleGenerations: 1002 ReadRuleGenerations: 1002 ReadRuleGenerations: 1002 MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 1002 Important: false } Consumers { Name: "user1" Generation: 1002 Important: true } Consumers { Name: "user2" Generation: 1002 Important: true } 2025-05-29T15:23:50.270275Z node 39 :PERSQUEUE INFO: partition_init.cpp:880: [PQ: 72057594037927937, Partition: 0, State: StateInit] bootstrapping 0 [39:184:2196] 2025-05-29T15:23:50.270676Z node 39 :PERSQUEUE INFO: partition.cpp:560: [PQ: 72057594037927937, Partition: 0, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 0 generation 2 [39:184:2196] 2025-05-29T15:23:50.272518Z node 39 :PERSQUEUE INFO: ownerinfo.cpp:30: new Cookie default|5874af1-d6d9b55d-3009b738-26e5308e_0 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default 2025-05-29T15:23:50.378688Z node 39 :PERSQUEUE NOTICE: read.h:361: Have to remove new data from cache. Topic rt3.dc1--asdfgs--topic, tablet id72057594037927937, cookie 0 2025-05-29T15:23:50.388307Z node 39 :PERSQUEUE NOTICE: read.h:361: Have to remove new data from cache. Topic rt3.dc1--asdfgs--topic, tablet id72057594037927937, cookie 0 Leader for TabletID 72057594037927937 is [39:108:2139] sender: [39:239:2057] recipient: [39:100:2134] Leader for TabletID 72057594037927937 is [39:108:2139] sender: [39:242:2057] recipient: [39:14:2061] Leader for TabletID 72057594037927937 is [39:108:2139] sender: [39:243:2057] recipient: [39:241:2241] Leader for TabletID 72057594037927937 is [39:244:2242] sender: [39:245:2057] recipient: [39:241:2241] 2025-05-29T15:23:50.393858Z node 39 :PERSQUEUE NOTICE: pq_impl.cpp:1099: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-05-29T15:23:50.393877Z node 39 :PERSQUEUE INFO: pq_impl.cpp:800: [PQ: 72057594037927937] doesn't have tx writes info 2025-05-29T15:23:50.393936Z node 39 :PERSQUEUE INFO: partition_init.cpp:880: [PQ: 72057594037927937, Partition: 0, State: StateInit] bootstrapping 0 [39:293:2283] 2025-05-29T15:23:50.397562Z node 39 :PERSQUEUE INFO: partition_init.cpp:774: [rt3.dc1--asdfgs--topic:0:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp skipped because already initialized. 2025-05-29T15:23:50.397587Z node 39 :PERSQUEUE INFO: partition.cpp:560: [PQ: 72057594037927937, Partition: 0, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 0 generation 3 [39:293:2283] 2025-05-29T15:23:50.401410Z node 39 :PERSQUEUE WARN: pq_l2_cache.cpp:94: PQ Cache (L2). Same blob insertion. Tablet '72057594037927937' partition 0 offset 0 partno 0 count 81 parts 0 size 8296398 Leader for TabletID 72057594037927937 is [39:244:2242] sender: [39:316:2057] recipient: [39:14:2061] 2025-05-29T15:23:50.402628Z node 39 :PERSQUEUE NOTICE: pq_impl.cpp:1099: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-05-29T15:23:50.403205Z node 39 :PERSQUEUE INFO: pq_impl.cpp:1487: [PQ: 72057594037927937] Config applied version 1003 actor [39:313:2296] txId 42 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 100 MaxSizeInPartition: 104857600 LifetimeSeconds: 172800 ImportantClientId: "user1" ImportantClientId: "user2" LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 Version: 1003 LocalDC: true Topic: "topic" Partitions { PartitionId: 0 } ReadRuleGenerations: 1002 AllPartitions { PartitionId: 0 } Consumers { Name: "user2" Generation: 1002 Important: true } Leader for TabletID 72057594037927937 is [0:0:0] sender: [40:104:2057] recipient: [40:102:2135] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [40:104:2057] recipient: [40:102:2135] Leader for TabletID 72057594037927937 is [40:108:2139] sender: [40:109:2057] recipient: [40:102:2135] 2025-05-29T15:23:50.512527Z node 40 :PERSQUEUE NOTICE: pq_impl.cpp:1099: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-05-29T15:23:50.512549Z node 40 :PERSQUEUE INFO: pq_impl.cpp:800: [PQ: 72057594037927937] doesn't have tx writes info Leader for TabletID 72057594037927938 is [0:0:0] sender: [40:150:2057] recipient: [40:148:2170] IGNORE Leader for TabletID 72057594037927938 is [0:0:0] sender: [40:150:2057] recipient: [40:148:2170] Leader for TabletID 72057594037927938 is [40:154:2174] sender: [40:155:2057] recipient: [40:148:2170] Leader for TabletID 72057594037927937 is [40:108:2139] sender: [40:180:2057] recipient: [40:14:2061] 2025-05-29T15:23:50.516016Z node 40 :PERSQUEUE NOTICE: pq_impl.cpp:1099: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-05-29T15:23:50.516136Z node 40 :PERSQUEUE INFO: pq_impl.cpp:1487: [PQ: 72057594037927937] Config applied version 1004 actor [40:178:2192] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 100 MaxSizeInPartition: 104857600 LifetimeSeconds: 172800 ImportantClientId: "user1" ImportantClientId: "user2" LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "rt3.dc1--asdfgs--topic" Version: 1004 LocalDC: true Topic: "topic" TopicPath: "/Root/PQ/rt3.dc1--asdfgs--topic" Partitions { PartitionId: 0 } ReadRuleGenerations: 1004 ReadRuleGenerations: 1004 ReadRuleGenerations: 1004 MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 1004 Important: false } Consumers { Name: "user1" Generation: 1004 Important: true } Consumers { Name: "user2" Generation: 1004 Important: true } 2025-05-29T15:23:50.516227Z node 40 :PERSQUEUE INFO: partition_init.cpp:880: [PQ: 72057594037927937, Partition: 0, State: StateInit] bootstrapping 0 [40:186:2198] 2025-05-29T15:23:50.516681Z node 40 :PERSQUEUE INFO: partition.cpp:560: [PQ: 72057594037927937, Partition: 0, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 0 generation 2 [40:186:2198] 2025-05-29T15:23:50.518590Z node 40 :PERSQUEUE INFO: ownerinfo.cpp:30: new Cookie default|dae1e80a-ffddaef7-41548875-6cfc025f_0 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default 2025-05-29T15:23:50.593661Z node 40 :PERSQUEUE NOTICE: read.h:361: Have to remove new data from cache. Topic rt3.dc1--asdfgs--topic, tablet id72057594037927937, cookie 0 2025-05-29T15:23:50.600154Z node 40 :PERSQUEUE NOTICE: read.h:361: Have to remove new data from cache. Topic rt3.dc1--asdfgs--topic, tablet id72057594037927937, cookie 0 Leader for TabletID 72057594037927937 is [40:108:2139] sender: [40:241:2057] recipient: [40:100:2134] Leader for TabletID 72057594037927937 is [40:108:2139] sender: [40:244:2057] recipient: [40:14:2061] Leader for TabletID 72057594037927937 is [40:108:2139] sender: [40:245:2057] recipient: [40:243:2243] Leader for TabletID 72057594037927937 is [40:246:2244] sender: [40:247:2057] recipient: [40:243:2243] 2025-05-29T15:23:50.605483Z node 40 :PERSQUEUE NOTICE: pq_impl.cpp:1099: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-05-29T15:23:50.605502Z node 40 :PERSQUEUE INFO: pq_impl.cpp:800: [PQ: 72057594037927937] doesn't have tx writes info 2025-05-29T15:23:50.605581Z node 40 :PERSQUEUE INFO: partition_init.cpp:880: [PQ: 72057594037927937, Partition: 0, State: StateInit] bootstrapping 0 [40:295:2285] 2025-05-29T15:23:50.609059Z node 40 :PERSQUEUE INFO: partition_init.cpp:774: [rt3.dc1--asdfgs--topic:0:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp skipped because already initialized. 2025-05-29T15:23:50.609080Z node 40 :PERSQUEUE INFO: partition.cpp:560: [PQ: 72057594037927937, Partition: 0, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 0 generation 3 [40:295:2285] 2025-05-29T15:23:50.613168Z node 40 :PERSQUEUE WARN: pq_l2_cache.cpp:94: PQ Cache (L2). Same blob insertion. Tablet '72057594037927937' partition 0 offset 0 partno 0 count 81 parts 0 size 8296398 Leader for TabletID 72057594037927937 is [40:246:2244] sender: [40:318:2057] recipient: [40:14:2061] 2025-05-29T15:23:50.614349Z node 40 :PERSQUEUE NOTICE: pq_impl.cpp:1099: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-05-29T15:23:50.614889Z node 40 :PERSQUEUE INFO: pq_impl.cpp:1487: [PQ: 72057594037927937] Config applied version 1005 actor [40:315:2298] txId 42 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 100 MaxSizeInPartition: 104857600 LifetimeSeconds: 172800 ImportantClientId: "user1" ImportantClientId: "user2" LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 Version: 1005 LocalDC: true Topic: "topic" Partitions { PartitionId: 0 } ReadRuleGenerations: 1004 AllPartitions { PartitionId: 0 } Consumers { Name: "user2" Generation: 1004 Important: true } ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/client/ut/unittest >> TObjectStorageListingTest::ManyDeletes [GOOD] Test command err: 2025-05-29T15:23:20.582896Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509888617971882056:2062];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:23:20.582914Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/0027d1/r3tmp/tmpBfjgwf/pdisk_1.dat 2025-05-29T15:23:20.677276Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:23:20.677461Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [1:7509888617971882036:2079] 1748532200582713 != 1748532200582716 TServer::EnableGrpc on GrpcPort 12471, node 1 2025-05-29T15:23:20.693711Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:23:20.693728Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-05-29T15:23:20.693731Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-05-29T15:23:20.693792Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:5169 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-05-29T15:23:20.734595Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:23:20.734639Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:23:20.735398Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-05-29T15:23:20.735608Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected waiting... waiting... waiting... 2025-05-29T15:23:20.747799Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 2025-05-29T15:23:22.456233Z node 2 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7509888626270108338:2065];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:23:22.456253Z node 2 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/0027d1/r3tmp/tmpnLbGi5/pdisk_1.dat 2025-05-29T15:23:22.471848Z node 2 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:23:22.472953Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [2:7509888626270108311:2079] 1748532202456050 != 1748532202456053 TServer::EnableGrpc on GrpcPort 16553, node 2 2025-05-29T15:23:22.496349Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:23:22.496365Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-05-29T15:23:22.496367Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-05-29T15:23:22.496420Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:3913 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-05-29T15:23:22.560858Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:23:22.560884Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:23:22.561183Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:23:22.561768Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected waiting... waiting... 2025-05-29T15:23:22.566982Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 .....2025-05-29T15:23:27.458939Z node 2 :METADATA_PROVIDER ERROR: log.cpp:784: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7509888626270108338:2065];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:23:27.459083Z node 2 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=timeout; ...2025-05-29T15:23:37.471287Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7301: Cannot get console configs 2025-05-29T15:23:37.471331Z node 2 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded .. 2025-05-29T15:23:47.135486Z node 2 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037889 2025-05-29T15:23:47.135513Z node 2 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037890 2025-05-29T15:23:47.135720Z node 2 :TX_DATASHARD DEBUG: check_data_tx_unit.cpp:313: Prepared DataTx transaction txId 281474976719700 at tablet 72075186224037889 2025-05-29T15:23:47.135721Z node 2 :TX_DATASHARD DEBUG: check_data_tx_unit.cpp:313: Prepared DataTx transaction txId 281474976719700 at tablet 72075186224037890 2025-05-29T15:23:47.135852Z node 2 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037891 2025-05-29T15:23:47.135877Z node 2 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037892 2025-05-29T15:23:47.136010Z node 2 :TX_DATASHARD DEBUG: check_data_tx_unit.cpp:313: Prepared DataTx transaction txId 281474976719700 at tablet 72075186224037892 2025-05-29T15:23:47.136031Z node 2 :TX_DATASHARD DEBUG: check_data_tx_unit.cpp:313: Prepared DataTx transaction txId 281474976719700 at tablet 72075186224037891 2025-05-29T15:23:47.136757Z node 2 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 72075186224037889 2025-05-29T15:23:47.136769Z node 2 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 72075186224037890 2025-05-29T15:23:47.137221Z node 2 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 72075186224037891 2025-05-29T15:23:47.137233Z node 2 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 72075186224037892 2025-05-29T15:23:47.138866Z node 2 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:69: Planned transaction txId 281474976719700 at step 1748532227183 at tablet 72075186224037890 { Transactions { TxId: 281474976719700 AckTo { RawX1: 0 RawX2: 0 } } Step: 1748532227183 MediatorID: 72057594046382081 TabletID: 72075186224037890 } 2025-05-29T15:23:47.138866Z node 2 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:69: Planned transaction txId 281474976719700 at step 1748532227183 at tablet 72075186224037892 { Transactions { TxId: 281474976719700 AckTo { RawX1: 0 RawX2: 0 } } Step: 1748532227183 MediatorID: 72057594046382081 TabletID: 72075186224037892 } 2025-05-29T15:23:47.138876Z node 2 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037892 2025-05-29T15:23:47.138879Z node 2 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037890 2025-05-29T15:23:47.138917Z node 2 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037892 2025-05-29T15:23:47.138917Z node 2 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037890 2025-05-29T15:23:47.138922Z node 2 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037892 active 0 active planned 0 immediate 0 planned 1 2025-05-29T15:23:47.138922Z node 2 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037890 active 0 active planned 0 immediate 0 planned 1 2025-05-29T15:23:47.138929Z node 2 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:333: Found ready operation [1748532227183:281474976719700] in PlanQueue unit at 72075186224037890 2025-05-29T15:23:47.138940Z node 2 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:333: Found ready operation [1748532227183:281474976719700] in PlanQueue unit at 72075186224037892 2025-05-29T15:23:47.138942Z node 2 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:595: LoadTxDetails at 72075186224037890 got data tx from cache 1748532227183:281474976719700 2025-05-29T15:23:47.138952Z node 2 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:595: LoadTxDetails at 72075186224037892 got data tx from cache 1748532227183:281474976719700 2025-05-29T15:23:47.139477Z node 2 :TX_D ... mmediate 0 planned 1 2025-05-29T15:24:02.421324Z node 2 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037889 2025-05-29T15:24:02.421370Z node 2 :TX_DATASHARD DEBUG: datashard_active_transaction.cpp:661: tx 281474976721711 at 72075186224037892 restored its data 2025-05-29T15:24:02.421401Z node 2 :TX_DATASHARD DEBUG: datashard_active_transaction.cpp:661: tx 281474976721711 at 72075186224037889 restored its data 2025-05-29T15:24:02.421496Z node 2 :TX_DATASHARD DEBUG: datashard_active_transaction.cpp:561: tx 281474976721711 released its data 2025-05-29T15:24:02.421502Z node 2 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037892 (dry run) active 1 active planned 1 immediate 0 planned 1 2025-05-29T15:24:02.421562Z node 2 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037890 2025-05-29T15:24:02.421597Z node 2 :TX_DATASHARD DEBUG: datashard_active_transaction.cpp:561: tx 281474976721711 released its data 2025-05-29T15:24:02.421603Z node 2 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037889 (dry run) active 1 active planned 1 immediate 0 planned 1 2025-05-29T15:24:02.421629Z node 2 :TX_DATASHARD DEBUG: datashard_active_transaction.cpp:661: tx 281474976721711 at 72075186224037890 restored its data 2025-05-29T15:24:02.421704Z node 2 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:98: Sending '{TEvPlanStepAccepted TabletId# 72075186224037892 step# 1748532242464} 2025-05-29T15:24:02.421716Z node 2 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037892 2025-05-29T15:24:02.421744Z node 2 :TX_DATASHARD DEBUG: datashard_active_transaction.cpp:561: tx 281474976721711 released its data 2025-05-29T15:24:02.421749Z node 2 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037890 (dry run) active 1 active planned 1 immediate 0 planned 1 2025-05-29T15:24:02.421782Z node 2 :TX_DATASHARD DEBUG: datashard_active_transaction.cpp:661: tx 281474976721711 at 72075186224037892 restored its data 2025-05-29T15:24:02.421824Z node 2 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:98: Sending '{TEvPlanStepAccepted TabletId# 72075186224037889 step# 1748532242464} 2025-05-29T15:24:02.421892Z node 2 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037892 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-05-29T15:24:02.422051Z node 2 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:98: Sending '{TEvPlanStepAccepted TabletId# 72075186224037890 step# 1748532242464} 2025-05-29T15:24:02.422051Z node 2 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:98: Sending '{TEvPlanStepAccepted TabletId# 72075186224037891 step# 1748532242464} 2025-05-29T15:24:02.422055Z node 2 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037891 2025-05-29T15:24:02.422064Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:801: Complete [1748532242464 : 281474976721711] from 72075186224037891 at tablet 72075186224037891 send result to client [2:7509888798068866539:13971], exec latency: 0 ms, propose latency: 0 ms 2025-05-29T15:24:02.422069Z node 2 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037891 2025-05-29T15:24:02.422158Z node 2 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037890 2025-05-29T15:24:02.422164Z node 2 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037889 2025-05-29T15:24:02.422225Z node 2 :TX_DATASHARD DEBUG: datashard_active_transaction.cpp:661: tx 281474976721711 at 72075186224037890 restored its data 2025-05-29T15:24:02.422234Z node 2 :TX_DATASHARD DEBUG: datashard_active_transaction.cpp:661: tx 281474976721711 at 72075186224037889 restored its data 2025-05-29T15:24:02.422313Z node 2 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037890 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-05-29T15:24:02.422990Z node 2 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037890 2025-05-29T15:24:02.423006Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:801: Complete [1748532242464 : 281474976721711] from 72075186224037890 at tablet 72075186224037890 send result to client [2:7509888798068866539:13971], exec latency: 1 ms, propose latency: 1 ms 2025-05-29T15:24:02.423014Z node 2 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037890 2025-05-29T15:24:02.423046Z node 2 :TX_DATASHARD DEBUG: datashard_active_transaction.cpp:561: tx 281474976721711 released its data 2025-05-29T15:24:02.423050Z node 2 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037892 2025-05-29T15:24:02.423053Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:801: Complete [1748532242464 : 281474976721711] from 72075186224037892 at tablet 72075186224037892 send result to client [2:7509888798068866539:13971], exec latency: 1 ms, propose latency: 2 ms 2025-05-29T15:24:02.423058Z node 2 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037892 2025-05-29T15:24:02.423059Z node 2 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037889 (dry run) active 1 active planned 1 immediate 0 planned 1 2025-05-29T15:24:02.430842Z node 2 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037889 2025-05-29T15:24:02.431066Z node 2 :TX_DATASHARD DEBUG: datashard_active_transaction.cpp:661: tx 281474976721711 at 72075186224037889 restored its data 2025-05-29T15:24:02.434112Z node 2 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037889 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-05-29T15:24:02.436816Z node 2 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037889 2025-05-29T15:24:02.436838Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:801: Complete [1748532242464 : 281474976721711] from 72075186224037889 at tablet 72075186224037889 send result to client [2:7509888798068866539:13971], exec latency: 13 ms, propose latency: 16 ms 2025-05-29T15:24:02.436853Z node 2 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2025-05-29T15:24:02.448868Z node 2 :TX_DATASHARD DEBUG: datashard__object_storage_listing.cpp:152: 72075186224037889 S3 Listing: start at key ((type:4, value:"d\0\0\0\0\0\0\0") (type:4608, value:"Bucket100") (type:4608, value:"/Videos/") (type:0)), end at key ((type:4, value:"d\0\0\0\0\0\0\0") (type:4608, value:"Bucket100") (type:4608, value:"/Videos0") (type:0)) restarted: 0 last path: "" contents: 0 common prefixes: 0 2025-05-29T15:24:02.449028Z node 2 :TX_DATASHARD DEBUG: datashard__object_storage_listing.cpp:152: 72075186224037889 S3 Listing: start at key ((type:4, value:"d\0\0\0\0\0\0\0") (type:4608, value:"Bucket100") (type:4608, value:"/Videos/") (type:0)), end at key ((type:4, value:"d\0\0\0\0\0\0\0") (type:4608, value:"Bucket100") (type:4608, value:"/Videos0") (type:0)) restarted: 1 last path: "" contents: 0 common prefixes: 0 2025-05-29T15:24:02.449095Z node 2 :TX_DATASHARD DEBUG: datashard__object_storage_listing.cpp:152: 72075186224037889 S3 Listing: start at key ((type:4, value:"d\0\0\0\0\0\0\0") (type:4608, value:"Bucket100") (type:4608, value:"/Videos/") (type:0)), end at key ((type:4, value:"d\0\0\0\0\0\0\0") (type:4608, value:"Bucket100") (type:4608, value:"/Videos0") (type:0)) restarted: 2 last path: "" contents: 0 common prefixes: 0 2025-05-29T15:24:02.449147Z node 2 :TX_DATASHARD DEBUG: datashard__object_storage_listing.cpp:152: 72075186224037889 S3 Listing: start at key ((type:4, value:"d\0\0\0\0\0\0\0") (type:4608, value:"Bucket100") (type:4608, value:"/Videos/") (type:0)), end at key ((type:4, value:"d\0\0\0\0\0\0\0") (type:4608, value:"Bucket100") (type:4608, value:"/Videos0") (type:0)) restarted: 3 last path: "" contents: 0 common prefixes: 0 2025-05-29T15:24:02.449186Z node 2 :TX_DATASHARD DEBUG: datashard__object_storage_listing.cpp:374: 72075186224037889 S3 Listing: finished status: 0 description: "" contents: 0 common prefixes: 1 2025-05-29T15:24:02.449261Z node 2 :TX_DATASHARD DEBUG: datashard__object_storage_listing.cpp:152: 72075186224037891 S3 Listing: start at key ((type:4, value:"d\0\0\0\0\0\0\0") (type:4608, value:"Bucket100") (type:4608, value:"/Videos/") (type:0)), end at key ((type:4, value:"d\0\0\0\0\0\0\0") (type:4608, value:"Bucket100") (type:4608, value:"/Videos0") (type:0)) restarted: 0 last path: "" contents: 0 common prefixes: 0 2025-05-29T15:24:02.449298Z node 2 :TX_DATASHARD DEBUG: datashard__object_storage_listing.cpp:374: 72075186224037891 S3 Listing: finished status: 0 description: "" contents: 0 common prefixes: 0 2025-05-29T15:24:02.449368Z node 2 :TX_DATASHARD DEBUG: datashard__object_storage_listing.cpp:152: 72075186224037892 S3 Listing: start at key ((type:4, value:"d\0\0\0\0\0\0\0") (type:4608, value:"Bucket100") (type:4608, value:"/Videos/") (type:0)), end at key ((type:4, value:"d\0\0\0\0\0\0\0") (type:4608, value:"Bucket100") (type:4608, value:"/Videos0") (type:0)) restarted: 0 last path: "" contents: 0 common prefixes: 0 2025-05-29T15:24:02.449447Z node 2 :TX_DATASHARD DEBUG: datashard__object_storage_listing.cpp:152: 72075186224037892 S3 Listing: start at key ((type:4, value:"d\0\0\0\0\0\0\0") (type:4608, value:"Bucket100") (type:4608, value:"/Videos/") (type:0)), end at key ((type:4, value:"d\0\0\0\0\0\0\0") (type:4608, value:"Bucket100") (type:4608, value:"/Videos0") (type:0)) restarted: 1 last path: "" contents: 0 common prefixes: 0 2025-05-29T15:24:02.449498Z node 2 :TX_DATASHARD DEBUG: datashard__object_storage_listing.cpp:152: 72075186224037892 S3 Listing: start at key ((type:4, value:"d\0\0\0\0\0\0\0") (type:4608, value:"Bucket100") (type:4608, value:"/Videos/") (type:0)), end at key ((type:4, value:"d\0\0\0\0\0\0\0") (type:4608, value:"Bucket100") (type:4608, value:"/Videos0") (type:0)) restarted: 2 last path: "" contents: 0 common prefixes: 0 2025-05-29T15:24:02.449559Z node 2 :TX_DATASHARD DEBUG: datashard__object_storage_listing.cpp:152: 72075186224037892 S3 Listing: start at key ((type:4, value:"d\0\0\0\0\0\0\0") (type:4608, value:"Bucket100") (type:4608, value:"/Videos/Godfather.avi") (type:0)), end at key ((type:4, value:"d\0\0\0\0\0\0\0") (type:4608, value:"Bucket100") (type:4608, value:"/Videos0") (type:0)) restarted: 3 last path: "/Videos/Godfather.avi" contents: 2 common prefixes: 0 2025-05-29T15:24:02.449609Z node 2 :TX_DATASHARD DEBUG: datashard__object_storage_listing.cpp:152: 72075186224037892 S3 Listing: start at key ((type:4, value:"d\0\0\0\0\0\0\0") (type:4608, value:"Bucket100") (type:4608, value:"/Videos/House of Cards/Season 1/Chapter 1.avi") (type:0)), end at key ((type:4, value:"d\0\0\0\0\0\0\0") (type:4608, value:"Bucket100") (type:4608, value:"/Videos0") (type:0)) restarted: 4 last path: "/Videos/House of Cards/Season 1/Chapter 1.avi" contents: 3 common prefixes: 1 2025-05-29T15:24:02.449653Z node 2 :TX_DATASHARD DEBUG: datashard__object_storage_listing.cpp:152: 72075186224037892 S3 Listing: start at key ((type:4, value:"d\0\0\0\0\0\0\0") (type:4608, value:"Bucket100") (type:4608, value:"/Videos/Terminator 2.avi") (type:0)), end at key ((type:4, value:"d\0\0\0\0\0\0\0") (type:4608, value:"Bucket100") (type:4608, value:"/Videos0") (type:0)) restarted: 5 last path: "/Videos/Terminator 2.avi" contents: 4 common prefixes: 1 2025-05-29T15:24:02.449670Z node 2 :TX_DATASHARD DEBUG: datashard__object_storage_listing.cpp:374: 72075186224037892 S3 Listing: finished status: 0 description: "" contents: 4 common prefixes: 1 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/tx_proxy/ut_schemereq/unittest >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-CreateUser-24 [GOOD] Test command err: Starting YDB, grpc: 26622, msgbus: 23462 2025-05-29T15:23:12.826920Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509888583823230551:2074];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:23:12.826941Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/001b27/r3tmp/tmpSWabDK/pdisk_1.dat 2025-05-29T15:23:12.895449Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 26622, node 1 2025-05-29T15:23:12.908358Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:23:12.908368Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-05-29T15:23:12.908370Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-05-29T15:23:12.908413Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-05-29T15:23:12.928269Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:23:12.928301Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting TClient is connected to server localhost:23462 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 2025-05-29T15:23:12.929790Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-29T15:23:12.930250Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:403: actor# [1:7509888583823230778:2139] Handle TEvNavigate describe path dc-1 2025-05-29T15:23:12.932063Z node 1 :TX_PROXY DEBUG: describe.cpp:272: Actor# [1:7509888583823231235:2435] HANDLE EvNavigateScheme dc-1 2025-05-29T15:23:12.932289Z node 1 :TX_PROXY DEBUG: describe.cpp:356: Actor# [1:7509888583823231235:2435] HANDLE EvNavigateKeySetResult TDescribeReq marker# P5 ErrorCount# 0 2025-05-29T15:23:12.939444Z node 1 :TX_PROXY DEBUG: describe.cpp:435: Actor# [1:7509888583823231235:2435] SEND to# 72057594046644480 shardToRequest NKikimrSchemeOp.TDescribePath Path: "dc-1" Options { ReturnBoundaries: true ShowPrivateTable: true ReturnRangeKey: true } 2025-05-29T15:23:12.941702Z node 1 :TX_PROXY DEBUG: describe.cpp:448: Actor# [1:7509888583823231235:2435] Handle TEvDescribeSchemeResult Forward to# [1:7509888583823231232:2432] Cookie: 0 TEvDescribeSchemeResult: NKikimrScheme.TEvDescribeSchemeResult PreSerializedData size# 2 Record# Status: StatusSuccess Path: "dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/dc-1" } } } PathId: 1 PathOwnerId: 72057594046644480 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-05-29T15:23:12.946031Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:315: actor# [1:7509888583823230778:2139] Handle TEvProposeTransaction 2025-05-29T15:23:12.946045Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:327: actor# [1:7509888583823230778:2139] Cookie# 0 userReqId# "" DELAY REQUEST, wait txids from allocator Type# Scheme 2025-05-29T15:23:12.960637Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:213: actor# [1:7509888583823230778:2139] HANDLE TEvClientConnected success connect from tablet# 72057594046447617 2025-05-29T15:23:12.961282Z node 1 :TX_PROXY DEBUG: client.cpp:89: Handle TEvAllocateResult ACCEPTED RangeBegin# 281474976715656 RangeEnd# 281474976720656 txAllocator# 72057594046447617 2025-05-29T15:23:12.961298Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:238: actor# [1:7509888583823230778:2139] TxId# 281474976715657 ProcessProposeTransaction 2025-05-29T15:23:12.961345Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:257: actor# [1:7509888583823230778:2139] Cookie# 0 userReqId# "" txid# 281474976715657 SEND to# [1:7509888583823231253:2446] 2025-05-29T15:23:12.974139Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1562: Actor# [1:7509888583823231253:2446] txid# 281474976715657 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "dc-1" StoragePools { Name: "" Kind: "tenant-db" } StoragePools { Name: "/dc-1:test" Kind: "test" } } } } UserToken: "\n\014root@builtin\022\030\022\026\n\024all-users@well-known\032\014root@builtin\"\007Builtin*\017**** (B6C6F477)" PeerName: "" 2025-05-29T15:23:12.974193Z node 1 :TX_PROXY DEBUG: schemereq.cpp:569: Actor# [1:7509888583823231253:2446] txid# 281474976715657 Bootstrap, UserSID: root@builtin CheckAdministrator: 0 CheckDatabaseAdministrator: 0 2025-05-29T15:23:12.974200Z node 1 :TX_PROXY DEBUG: schemereq.cpp:578: Actor# [1:7509888583823231253:2446] txid# 281474976715657 Bootstrap, UserSID: root@builtin IsClusterAdministrator: 1 2025-05-29T15:23:12.974217Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1617: Actor# [1:7509888583823231253:2446] txid# 281474976715657 TEvNavigateKeySet requested from SchemeCache 2025-05-29T15:23:12.974386Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1450: Actor# [1:7509888583823231253:2446] txid# 281474976715657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-05-29T15:23:12.974439Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1497: Actor# [1:7509888583823231253:2446] HANDLE EvNavigateKeySetResult, txid# 281474976715657 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# false 2025-05-29T15:23:12.974456Z node 1 :TX_PROXY DEBUG: schemereq.cpp:103: Actor# [1:7509888583823231253:2446] txid# 281474976715657 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976715657 TabletId# 72057594046644480} 2025-05-29T15:23:12.974540Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1352: Actor# [1:7509888583823231253:2446] txid# 281474976715657 HANDLE EvClientConnected 2025-05-29T15:23:12.974730Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-05-29T15:23:12.976786Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1374: Actor# [1:7509888583823231253:2446] txid# 281474976715657 Status StatusAccepted HANDLE {TEvModifySchemeTransactionResult Status# StatusAccepted txid# 281474976715657} 2025-05-29T15:23:12.976805Z node 1 :TX_PROXY DEBUG: schemereq.cpp:549: Actor# [1:7509888583823231253:2446] txid# 281474976715657 SEND to# [1:7509888583823231247:2440] Source {TEvProposeTransactionStatus txid# 281474976715657 Status# 53} waiting... 2025-05-29T15:23:12.983268Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:315: actor# [1:7509888583823230778:2139] Handle TEvProposeTransaction 2025-05-29T15:23:12.983284Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:238: actor# [1:7509888583823230778:2139] TxId# 281474976715658 ProcessProposeTransaction 2025-05-29T15:23:12.983298Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:257: actor# [1:7509888583823230778:2139] Cookie# 0 userReqId# "" txid# 281474976715658 SEND to# [1:7509888583823231291:2480] 2025-05-29T15:23:12.984039Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1562: Actor# [1:7509888583823231291:2480] txid# 281474976715658 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/" OperationType: ESchemeOpModifyACL ModifyACL { Name: "dc-1" DiffACL: "\n\032\010\000\022\026\010\001\020\377\377\003\032\014root@builtin \003" } } } UserToken: "\n\014root@builtin\022\030\022\026\n\024all-users@well-known\032\014root@builtin\"\007Builtin*\017**** (B6C6F477)" PeerName: "" 2025-05-29T15:23:12.984059Z node 1 :TX_PROXY DEBUG: schemereq.cpp:569: Actor# [1:7509888583823231291:2480] txid# 281474976715658 Bootstrap, UserSID: root@builtin CheckAdministrator: 0 CheckDatabaseAdministrator: 0 2025-05-29T15:23:12.984062Z node 1 :TX_PROXY DEBUG: schemereq.cpp:578: Actor# [1:7509888583823231291:2480] txid# 281474976715658 Bootstrap, UserSID: root@builtin IsClusterAdministrator: 1 2025-05-29T15:23:12.984081Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1617: Actor# [1:7509888583823231291:2480] txid# 281474976715658 TEvNavigateKeySet requested from SchemeCache 2025-05-29T15:23:12.984174Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1450: Actor# [1:7509888583823231291:2480] txid# 281474976715658 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-05-29T15:23:12.984200Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1497: Actor# [1:7509888583823231291:2480] HANDLE EvNavigateKeySetResult, txid# 281474976715658 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2025-05-29T15:23:12.984216Z node 1 :TX_PROXY DEBUG: schemereq.cpp:103: Actor# [1:7509888583823231291:2480] txid# 281474976715658 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976715658 TabletId# 72057594046644480} 2025-05-29T15:23:12.984261Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1352: Actor# [1:7509888583823231291:2480] txid# 281474976715658 HANDLE EvClientConnected 2025-05-29T15:23:12.984391Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-05-29T15:23:12.985239Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1374: Actor# [1:7509888583823231291:2480] txid# 281474976715658 Status StatusSuccess HANDLE {TEvModifySchemeTransactionR ... _PROXY DEBUG: schemereq.cpp:1374: Actor# [59:7509888718594361368:2542] txid# 281474976715660 Status StatusAlreadyExists HANDLE {TEvModifySchemeTransactionResult Status# StatusAlreadyExists txid# 281474976715660 Reason# Check failed: path: '/dc-1/.metadata/workload_manager/pools/default', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92} 2025-05-29T15:23:44.427059Z node 59 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [59:7509888718594361368:2542] txid# 281474976715660, issues: { message: "Check failed: path: \'/dc-1/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-29T15:23:44.427064Z node 59 :TX_PROXY DEBUG: schemereq.cpp:549: Actor# [59:7509888718594361368:2542] txid# 281474976715660 SEND to# [59:7509888718594361298:2337] Source {TEvProposeTransactionStatus txid# 281474976715660 Status# 48} 2025-05-29T15:23:44.430187Z node 59 :TX_PROXY DEBUG: proxy_impl.cpp:315: actor# [59:7509888714299393186:2113] Handle TEvProposeTransaction 2025-05-29T15:23:44.430209Z node 59 :TX_PROXY DEBUG: proxy_impl.cpp:238: actor# [59:7509888714299393186:2113] TxId# 281474976715661 ProcessProposeTransaction 2025-05-29T15:23:44.430219Z node 59 :TX_PROXY DEBUG: proxy_impl.cpp:257: actor# [59:7509888714299393186:2113] Cookie# 0 userReqId# "" txid# 281474976715661 SEND to# [59:7509888718594361386:2554] 2025-05-29T15:23:44.431178Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1562: Actor# [59:7509888718594361386:2554] txid# 281474976715661 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/dc-1" OperationType: ESchemeOpAlterLogin AlterLogin { CreateUser { User: "ordinaryuser" Password: "passwd" CanLogin: true IsHashedPassword: false } } } } UserToken: "\n\014root@builtin\022\030\022\026\n\024all-users@well-known\032\014root@builtin\"\007Builtin*\017**** (B6C6F477)" DatabaseName: "/dc-1" RequestType: "" PeerName: "ipv6:[::1]:46768" 2025-05-29T15:23:44.431203Z node 59 :TX_PROXY DEBUG: schemereq.cpp:569: Actor# [59:7509888718594361386:2554] txid# 281474976715661 Bootstrap, UserSID: root@builtin CheckAdministrator: 1 CheckDatabaseAdministrator: 1 2025-05-29T15:23:44.431207Z node 59 :TX_PROXY DEBUG: schemereq.cpp:578: Actor# [59:7509888718594361386:2554] txid# 281474976715661 Bootstrap, UserSID: root@builtin IsClusterAdministrator: 1 2025-05-29T15:23:44.431220Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1617: Actor# [59:7509888718594361386:2554] txid# 281474976715661 TEvNavigateKeySet requested from SchemeCache 2025-05-29T15:23:44.431326Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1450: Actor# [59:7509888718594361386:2554] txid# 281474976715661 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-05-29T15:23:44.431350Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1497: Actor# [59:7509888718594361386:2554] HANDLE EvNavigateKeySetResult, txid# 281474976715661 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2025-05-29T15:23:44.431366Z node 59 :TX_PROXY DEBUG: schemereq.cpp:103: Actor# [59:7509888718594361386:2554] txid# 281474976715661 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976715661 TabletId# 72057594046644480} 2025-05-29T15:23:44.431407Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1352: Actor# [59:7509888718594361386:2554] txid# 281474976715661 HANDLE EvClientConnected 2025-05-29T15:23:44.437687Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1374: Actor# [59:7509888718594361386:2554] txid# 281474976715661 Status StatusSuccess HANDLE {TEvModifySchemeTransactionResult Status# StatusSuccess txid# 281474976715661} 2025-05-29T15:23:44.437710Z node 59 :TX_PROXY DEBUG: schemereq.cpp:549: Actor# [59:7509888718594361386:2554] txid# 281474976715661 SEND to# [59:7509888718594361385:2328] Source {TEvProposeTransactionStatus txid# 281474976715661 Status# 48} 2025-05-29T15:23:44.667175Z node 59 :TX_PROXY DEBUG: proxy_impl.cpp:315: actor# [59:7509888714299393186:2113] Handle TEvProposeTransaction 2025-05-29T15:23:44.667197Z node 59 :TX_PROXY DEBUG: proxy_impl.cpp:238: actor# [59:7509888714299393186:2113] TxId# 281474976715662 ProcessProposeTransaction 2025-05-29T15:23:44.667215Z node 59 :TX_PROXY DEBUG: proxy_impl.cpp:257: actor# [59:7509888714299393186:2113] Cookie# 0 userReqId# "" txid# 281474976715662 SEND to# [59:7509888718594361413:2569] 2025-05-29T15:23:44.668101Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1562: Actor# [59:7509888718594361413:2569] txid# 281474976715662 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "" OperationType: ESchemeOpModifyACL ModifyACL { Name: "dc-1" DiffACL: "\n\022\010\001\022\016\032\014ordinaryuser\n\032\010\000\022\026\010\001\020\200\200\002\032\014ordinaryuser \000" } } } UserToken: "\n\014root@builtin\022\030\022\026\n\024all-users@well-known\032\014root@builtin\"\007Builtin*\017**** (B6C6F477)" DatabaseName: "/dc-1" RequestType: "" PeerName: "ipv6:[::1]:46768" 2025-05-29T15:23:44.668125Z node 59 :TX_PROXY DEBUG: schemereq.cpp:569: Actor# [59:7509888718594361413:2569] txid# 281474976715662 Bootstrap, UserSID: root@builtin CheckAdministrator: 1 CheckDatabaseAdministrator: 1 2025-05-29T15:23:44.668130Z node 59 :TX_PROXY DEBUG: schemereq.cpp:578: Actor# [59:7509888718594361413:2569] txid# 281474976715662 Bootstrap, UserSID: root@builtin IsClusterAdministrator: 1 2025-05-29T15:23:44.668146Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1617: Actor# [59:7509888718594361413:2569] txid# 281474976715662 TEvNavigateKeySet requested from SchemeCache 2025-05-29T15:23:44.668249Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1450: Actor# [59:7509888718594361413:2569] txid# 281474976715662 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-05-29T15:23:44.668284Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1497: Actor# [59:7509888718594361413:2569] HANDLE EvNavigateKeySetResult, txid# 281474976715662 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2025-05-29T15:23:44.668303Z node 59 :TX_PROXY DEBUG: schemereq.cpp:103: Actor# [59:7509888718594361413:2569] txid# 281474976715662 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976715662 TabletId# 72057594046644480} 2025-05-29T15:23:44.668356Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1352: Actor# [59:7509888718594361413:2569] txid# 281474976715662 HANDLE EvClientConnected 2025-05-29T15:23:44.668513Z node 59 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-05-29T15:23:44.673255Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1374: Actor# [59:7509888718594361413:2569] txid# 281474976715662 Status StatusSuccess HANDLE {TEvModifySchemeTransactionResult Status# StatusSuccess txid# 281474976715662} 2025-05-29T15:23:44.673281Z node 59 :TX_PROXY DEBUG: schemereq.cpp:549: Actor# [59:7509888718594361413:2569] txid# 281474976715662 SEND to# [59:7509888718594361412:2343] Source {TEvProposeTransactionStatus txid# 281474976715662 Status# 48} 2025-05-29T15:23:44.688682Z node 59 :TX_PROXY DEBUG: proxy_impl.cpp:315: actor# [59:7509888714299393186:2113] Handle TEvProposeTransaction 2025-05-29T15:23:44.688696Z node 59 :TX_PROXY DEBUG: proxy_impl.cpp:238: actor# [59:7509888714299393186:2113] TxId# 281474976715663 ProcessProposeTransaction 2025-05-29T15:23:44.688712Z node 59 :TX_PROXY DEBUG: proxy_impl.cpp:257: actor# [59:7509888714299393186:2113] Cookie# 0 userReqId# "" txid# 281474976715663 SEND to# [59:7509888718594361457:2596] 2025-05-29T15:23:44.689496Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1562: Actor# [59:7509888718594361457:2596] txid# 281474976715663 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/dc-1" OperationType: ESchemeOpAlterLogin AlterLogin { CreateUser { User: "targetuser" Password: "passwd" CanLogin: true IsHashedPassword: false } } } } UserToken: "\n\014ordinaryuser\022\030\022\026\n\024all-users@well-known\032\334\003eyJhbGciOiJQUzI1NiIsImtpZCI6IjEifQ.eyJhdWQiOlsiXC9kYy0xIl0sImV4cCI6MTc0ODU3NTQyNCwiaWF0IjoxNzQ4NTMyMjI0LCJzdWIiOiJvcmRpbmFyeXVzZXIifQ.MK4H0sLyHyz585aHK1DXf78CPREk9omICSDSUi2-Cw_psAR0QnxWug0XtuAyBVEEXtfUJEIns-NvtgtNgVtm1C1sw5hugiO7z3ErBN-9qtHRDvScgesYSWU2UFHgDt9l_VKeqMd0dgpcb_WmKSrCePWC2sTIpuqHmB2XXCkN3Y34hpb5HzLFU_xqs2lOBEDZsHcERCfw_wtDxtl8EGZCjV8BOTeBe1EBq_yvDkmawp7ClUmIMzgCSqGhOvxJ39dK2ev29M_qpuipyPyeHx1DVScM1dEqSr-8ZhApZAzXyhjyJZ8G1aqR68UQ23jFP6U5S4GVEy-kXF7gYTROA6NCuw\"\005Login*\210\001eyJhbGciOiJQUzI1NiIsImtpZCI6IjEifQ.eyJhdWQiOlsiXC9kYy0xIl0sImV4cCI6MTc0ODU3NTQyNCwiaWF0IjoxNzQ4NTMyMjI0LCJzdWIiOiJvcmRpbmFyeXVzZXIifQ.**" DatabaseName: "/dc-1" RequestType: "" PeerName: "ipv6:[::1]:46768" 2025-05-29T15:23:44.689513Z node 59 :TX_PROXY DEBUG: schemereq.cpp:569: Actor# [59:7509888718594361457:2596] txid# 281474976715663 Bootstrap, UserSID: ordinaryuser CheckAdministrator: 1 CheckDatabaseAdministrator: 1 2025-05-29T15:23:44.689516Z node 59 :TX_PROXY DEBUG: schemereq.cpp:578: Actor# [59:7509888718594361457:2596] txid# 281474976715663 Bootstrap, UserSID: ordinaryuser IsClusterAdministrator: 0 2025-05-29T15:23:44.689580Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1401: Actor# [59:7509888718594361457:2596] txid# 281474976715663 HandleResolveDatabase, ResultSet size: 1 ResultSet error count: 0 2025-05-29T15:23:44.689591Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1436: Actor# [59:7509888718594361457:2596] txid# 281474976715663 HandleResolveDatabase, UserSID: ordinaryuser CheckAdministrator: 1 CheckDatabaseAdministrator: 1 IsClusterAdministrator: 0 IsDatabaseAdministrator: 0 DatabaseOwner: root@builtin 2025-05-29T15:23:44.689601Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1617: Actor# [59:7509888718594361457:2596] txid# 281474976715663 TEvNavigateKeySet requested from SchemeCache 2025-05-29T15:23:44.689666Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1450: Actor# [59:7509888718594361457:2596] txid# 281474976715663 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-05-29T15:23:44.689672Z node 59 :TX_PROXY ERROR: schemereq.cpp:1079: Actor# [59:7509888718594361457:2596] txid# 281474976715663, Access denied for ordinaryuser, attempt to manage user 2025-05-29T15:23:44.689696Z node 59 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [59:7509888718594361457:2596] txid# 281474976715663, issues: { message: "Access denied for ordinaryuser" issue_code: 200000 severity: 1 } 2025-05-29T15:23:44.689701Z node 59 :TX_PROXY DEBUG: schemereq.cpp:549: Actor# [59:7509888718594361457:2596] txid# 281474976715663 SEND to# [59:7509888718594361456:2348] Source {TEvProposeTransactionStatus Status# 5} 2025-05-29T15:23:44.689960Z node 59 :KQP_SESSION WARN: kqp_session_actor.cpp:2583: SessionId: ydb://session/3?node_id=59&id=YzMyZTBmNi0yZTc1Zjk0OC05M2U2MzQ3NS1iMTc5MGU4ZQ==, ActorId: [59:7509888718594361442:2348], ActorState: ExecuteState, TraceId: 01jwea8rnd9d97fpamwndnyx8w, Create QueryResponse for error on request, msg: 2025-05-29T15:23:44.690092Z node 59 :TX_PROXY DEBUG: proxy_impl.cpp:353: actor# [59:7509888714299393186:2113] Handle TEvExecuteKqpTransaction 2025-05-29T15:23:44.690098Z node 59 :TX_PROXY DEBUG: proxy_impl.cpp:342: actor# [59:7509888714299393186:2113] TxId# 281474976715664 ProcessProposeKqpTransaction ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/client/ut/unittest >> TFlatTest::AutoSplitMergeQueue [GOOD] Test command err: 2025-05-29T15:23:22.157481Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509888627141748973:2068];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:23:22.157514Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/002797/r3tmp/tmpP96SJH/pdisk_1.dat 2025-05-29T15:23:22.221157Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded TClient is connected to server localhost:28855 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-05-29T15:23:22.292113Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:23:22.292148Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:23:22.292734Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-05-29T15:23:22.293140Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected waiting... waiting... 2025-05-29T15:23:22.299494Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... TClient::Ls request: /dc-1/Dir/T1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "T1" PathId: 3 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976715659 CreateStep: 1748532202396 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "T1" Columns { Name: "Key" Type: "String" TypeId: 4097 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "Value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "Key" KeyCol... (TRUNCATED) A-0 B-0 2025-05-29T15:23:22.504755Z node 1 :OPS_COMPACT INFO: Compact{72075186224037888.1.9, eph 1} end=Done, 2 blobs 1r (max 1), put Spent{time=0.003s,wait=0.000s,interrupts=1} Part{ 1 pk, lobs 0 +0, (6291598 0 0)b }, ecr=1.000 2025-05-29T15:23:22.510390Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:563: Got periodic table stats at tablet 72057594046644480 from shard 72075186224037888 followerId 0 pathId [OwnerId: 72057594046644480, LocalPathId: 3] state 'Ready' dataSize 6291502 rowCount 1 cpuUsage 0 2025-05-29T15:23:22.510619Z node 1 :OPS_COMPACT INFO: Compact{72075186224037888.1.11, eph 1} end=Done, 2 blobs 1r (max 1), put Spent{time=0.004s,wait=0.000s,interrupts=1} Part{ 1 pk, lobs 0 +0, (6291598 0 0)b }, ecr=1.000 2025-05-29T15:23:22.511220Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:563: Got periodic table stats at tablet 72057594046644480 from shard 72075186224037888 followerId 0 pathId [OwnerId: 72057594046644480, LocalPathId: 3] state 'Ready' dataSize 6291502 rowCount 1 cpuUsage 0 2025-05-29T15:23:22.610604Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:590: Started TEvPersistStats at tablet 72057594046644480, queue size# 1 2025-05-29T15:23:22.610678Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:267: PersistSingleStats for pathId 3 shard idx 72057594046644480:1 data size 6291502 row count 1 2025-05-29T15:23:22.610714Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:292: TTxStoreTableStats.PersistSingleStats: main stats from datashardId(TabletID)=72075186224037888 maps to shardIdx: 72057594046644480:1 followerId=0, pathId: [OwnerId: 72057594046644480, LocalPathId: 3], pathId map=T1, is column=0, is olap=0, RowCount 1, DataSize 6291502 2025-05-29T15:23:22.610783Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:485: Do not want to split tablet 72075186224037888 2025-05-29T15:23:22.610863Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:590: Started TEvPersistStats at tablet 72057594046644480, queue size# 0 A-1 2025-05-29T15:23:22.657632Z node 1 :OPS_COMPACT INFO: Compact{72075186224037888.1.14, eph 2} end=Done, 2 blobs 1r (max 1), put Spent{time=0.002s,wait=0.000s,interrupts=1} Part{ 1 pk, lobs 0 +0, (6291598 0 0)b }, ecr=1.000 2025-05-29T15:23:22.660305Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:563: Got periodic table stats at tablet 72057594046644480 from shard 72075186224037888 followerId 0 pathId [OwnerId: 72057594046644480, LocalPathId: 3] state 'Ready' dataSize 12583004 rowCount 2 cpuUsage 0 2025-05-29T15:23:22.664994Z node 1 :OPS_COMPACT INFO: Compact{72075186224037888.1.16, eph 2} end=Done, 3 blobs 2r (max 2), put Spent{time=0.006s,wait=0.000s,interrupts=1} Part{ 1 pk, lobs 0 +0, (12583126 0 0)b }, ecr=1.000 2025-05-29T15:23:22.665756Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:563: Got periodic table stats at tablet 72057594046644480 from shard 72075186224037888 followerId 0 pathId [OwnerId: 72057594046644480, LocalPathId: 3] state 'Ready' dataSize 12583004 rowCount 2 cpuUsage 0 2025-05-29T15:23:22.762899Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:590: Started TEvPersistStats at tablet 72057594046644480, queue size# 1 2025-05-29T15:23:22.762967Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:267: PersistSingleStats for pathId 3 shard idx 72057594046644480:1 data size 12583004 row count 2 2025-05-29T15:23:22.762993Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:292: TTxStoreTableStats.PersistSingleStats: main stats from datashardId(TabletID)=72075186224037888 maps to shardIdx: 72057594046644480:1 followerId=0, pathId: [OwnerId: 72057594046644480, LocalPathId: 3], pathId map=T1, is column=0, is olap=0, RowCount 2, DataSize 12583004 2025-05-29T15:23:22.763029Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:485: Do not want to split tablet 72075186224037888 2025-05-29T15:23:22.763437Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:590: Started TEvPersistStats at tablet 72057594046644480, queue size# 0 B-1 2025-05-29T15:23:22.810560Z node 1 :OPS_COMPACT INFO: Compact{72075186224037888.1.19, eph 3} end=Done, 2 blobs 1r (max 1), put Spent{time=0.003s,wait=0.000s,interrupts=1} Part{ 1 pk, lobs 0 +0, (6291598 0 0)b }, ecr=1.000 2025-05-29T15:23:22.813240Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:563: Got periodic table stats at tablet 72057594046644480 from shard 72075186224037888 followerId 0 pathId [OwnerId: 72057594046644480, LocalPathId: 3] state 'Ready' dataSize 18874506 rowCount 3 cpuUsage 0 2025-05-29T15:23:22.828431Z node 1 :OPS_COMPACT INFO: Compact{72075186224037888.1.21, eph 3} end=Done, 4 blobs 3r (max 3), put Spent{time=0.016s,wait=0.000s,interrupts=1} Part{ 1 pk, lobs 0 +0, (18874656 0 0)b }, ecr=1.000 2025-05-29T15:23:22.828904Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:563: Got periodic table stats at tablet 72057594046644480 from shard 72075186224037888 followerId 0 pathId [OwnerId: 72057594046644480, LocalPathId: 3] state 'Ready' dataSize 18874506 rowCount 3 cpuUsage 0 2025-05-29T15:23:22.913467Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:590: Started TEvPersistStats at tablet 72057594046644480, queue size# 1 2025-05-29T15:23:22.913515Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:267: PersistSingleStats for pathId 3 shard idx 72057594046644480:1 data size 18874506 row count 3 2025-05-29T15:23:22.913550Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:292: TTxStoreTableStats.PersistSingleStats: main stats from datashardId(TabletID)=72075186224037888 maps to shardIdx: 72057594046644480:1 followerId=0, pathId: [OwnerId: 72057594046644480, LocalPathId: 3], pathId map=T1, is column=0, is olap=0, RowCount 3, DataSize 18874506 2025-05-29T15:23:22.913584Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:485: Do not want to split tablet 72075186224037888 2025-05-29T15:23:22.913945Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:590: Started TEvPersistStats at tablet 72057594046644480, queue size# 0 A-2 2025-05-29T15:23:22.943356Z node 1 :OPS_COMPACT INFO: Compact{72075186224037888.1.24, eph 4} end=Done, 2 blobs 1r (max 1), put Spent{time=0.002s,wait=0.000s,interrupts=1} Part{ 1 pk, lobs 0 +0, (6291598 0 0)b }, ecr=1.000 2025-05-29T15:23:22.943882Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:563: Got periodic table stats at tablet 72057594046644480 from shard 72075186224037888 followerId 0 pathId [OwnerId: 72057594046644480, LocalPathId: 3] state 'Ready' dataSize 25166008 rowCount 4 cpuUsage 0 2025-05-29T15:23:22.971680Z node 1 :OPS_COMPACT INFO: Compact{72075186224037888.1.26, eph 4} end=Done, 5 blobs 4r (max 4), put Spent{time=0.028s,wait=0.000s,interrupts=1} Part{ 1 pk, lobs 0 +0, (25166186 0 0)b }, ecr=1.000 2025-05-29T15:23:22.972215Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:563: Got periodic table stats at tablet 72057594046644480 from shard 72075186224037888 followerId 0 pathId [OwnerId: 72057594046644480, LocalPathId: 3] state 'Ready' dataSize 25166008 rowCount 4 cpuUsage 0 2025-05-29T15:23:23.044040Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:590: Started TEvPersistStats at tablet 72057594046644480, queue size# 1 2025-05-29T15:23:23.044096Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:267: PersistSingleStats for pathId 3 shard idx 72057594046644480:1 data size 25166008 row count 4 2025-05-29T15:23:23.044124Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:292: TTxStoreTableStats.PersistSingleStats: main stats from datashardId(TabletID)=72075186224037888 maps to shardIdx: 72057594046644480:1 followerId=0, pathId: [OwnerId: 72057594046644480, LocalPathId: 3], pathId map=T1, is column=0, is olap=0, RowCount 4, DataSize 25166008 2025-05-29T15:23:23.044166Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:485: Do not want to split tablet 72 ... neralVersion: 8 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 6 } ChildrenExist: false } Table { Name: "T1" Columns { Name: "Key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "Value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "Key" KeyColumn... (TRUNCATED) TClient::Ls request: /dc-1/Dir/T1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "T1" PathId: 3 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976715659 CreateStep: 1748532222563 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 8 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 8 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 6 } ChildrenExist: false } Table { Name: "T1" Columns { Name: "Key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "Value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "Key" KeyColumn... (TRUNCATED) TClient::Ls request: /dc-1/Dir/T1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "T1" PathId: 3 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976715659 CreateStep: 1748532222563 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 8 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 8 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 6 } ChildrenExist: false } Table { Name: "T1" Columns { Name: "Key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "Value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "Key" KeyColumn... (TRUNCATED) TClient::Ls request: /dc-1/Dir/T1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "T1" PathId: 3 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976715659 CreateStep: 1748532222563 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 8 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 8 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 6 } ChildrenExist: false } Table { Name: "T1" Columns { Name: "Key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "Value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "Key" KeyColumn... (TRUNCATED) TClient::Ls request: /dc-1/Dir/T1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "T1" PathId: 3 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976715659 CreateStep: 1748532222563 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 8 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 8 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 6 } ChildrenExist: false } Table { Name: "T1" Columns { Name: "Key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "Value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "Key" KeyColumn... (TRUNCATED) TClient::Ls request: /dc-1/Dir/T1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "T1" PathId: 3 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976715659 CreateStep: 1748532222563 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 8 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 8 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 6 } ChildrenExist: false } Table { Name: "T1" Columns { Name: "Key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "Value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "Key" KeyColumn... (TRUNCATED) 2025-05-29T15:23:57.362340Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7301: Cannot get console configs 2025-05-29T15:23:57.362358Z node 3 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded TClient::Ls request: /dc-1/Dir/T1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "T1" PathId: 3 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976715659 CreateStep: 1748532222563 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 8 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 8 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 6 } ChildrenExist: false } Table { Name: "T1" Columns { Name: "Key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "Value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "Key" KeyColumn... (TRUNCATED) TClient::Ls request: /dc-1/Dir/T1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "T1" PathId: 3 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976715659 CreateStep: 1748532222563 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 8 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 8 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 6 } ChildrenExist: false } Table { Name: "T1" Columns { Name: "Key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "Value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "Key" KeyColumn... (TRUNCATED) 2025-05-29T15:23:58.991494Z node 3 :HIVE WARN: hive_impl.cpp:491: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037896 not found 2025-05-29T15:23:58.991507Z node 3 :HIVE WARN: hive_impl.cpp:491: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037894 not found 2025-05-29T15:23:58.991509Z node 3 :HIVE WARN: hive_impl.cpp:491: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037895 not found 2025-05-29T15:23:58.991511Z node 3 :HIVE WARN: hive_impl.cpp:491: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037893 not found 2025-05-29T15:23:59.049020Z node 3 :HIVE WARN: hive_impl.cpp:491: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037898 not found 2025-05-29T15:23:59.049033Z node 3 :HIVE WARN: hive_impl.cpp:491: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037897 not found 2025-05-29T15:23:59.087159Z node 3 :HIVE WARN: hive_impl.cpp:491: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037900 not found 2025-05-29T15:23:59.087175Z node 3 :HIVE WARN: hive_impl.cpp:491: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037901 not found 2025-05-29T15:23:59.087177Z node 3 :HIVE WARN: hive_impl.cpp:491: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037899 not found TClient::Ls request: /dc-1/Dir/T1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "T1" PathId: 3 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976715659 CreateStep: 1748532222563 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 12 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 12 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 10 } ChildrenExist: false } Table { Name: "T1" Columns { Name: "Key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "Value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "Key" KeyCol... (TRUNCATED) TClient::Ls request: /dc-1/Dir/T1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "T1" PathId: 3 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976715659 CreateStep: 1748532222563 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 12 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 12 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 10 } ChildrenExist: false } Table { Name: "T1" Columns { Name: "Key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "Value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "Key" KeyCol... (TRUNCATED) >> Secret::ValidationQueryService [FAIL] >> DataShardReadIterator::ShouldReadNoColumnsRangeRequestArrow [FAIL] >> DataShardReadIterator::ShouldReadNonExistingKey |64.3%| [TA] $(B)/ydb/core/tx/datashard/ut_trace/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/public/sdk/cpp/src/client/topic/ut/with_direct_read_ut/unittest >> TxUsage::WriteToTopic_Demo_39_Query [FAIL] Test command err: 2025-05-29T15:23:27.400488Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509888647431691495:2148];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:23:27.440644Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-05-29T15:23:27.440675Z node 1 :PQ_READ_PROXY DEBUG: caching_service.cpp:44: Direct read cache: : Created test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/00230d/r3tmp/tmpBNJ3nN/pdisk_1.dat 2025-05-29T15:23:27.476928Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [1:7509888647431691368:2079] 1748532207396770 != 1748532207396773 2025-05-29T15:23:27.480068Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 23528, node 1 2025-05-29T15:23:27.493506Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/ciyv/00230d/r3tmp/yandexbrsyTV.tmp 2025-05-29T15:23:27.493520Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: /home/runner/.ya/build/build_root/ciyv/00230d/r3tmp/yandexbrsyTV.tmp 2025-05-29T15:23:27.493615Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:202: successfully initialized from file: /home/runner/.ya/build/build_root/ciyv/00230d/r3tmp/yandexbrsyTV.tmp 2025-05-29T15:23:27.493655Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-05-29T15:23:27.499359Z INFO: TTestServer started on Port 20871 GrpcPort 23528 TClient is connected to server localhost:20871 PQClient connected to localhost:23528 WaitRootIsUp 'Root'... TClient::Ls request: Root 2025-05-29T15:23:27.545679Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:23:27.545707Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:23:27.547075Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-29T15:23:27.560533Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:23:27.563619Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:23:27.570793Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-05-29T15:23:27.633793Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976710660, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:23:27.799978Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509888647431692162:2336], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:23:27.800012Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:23:27.800117Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509888647431692189:2339], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:23:27.800962Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710662:3, at schemeshard: 72057594046644480 2025-05-29T15:23:27.803197Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976710662, at schemeshard: 72057594046644480 2025-05-29T15:23:27.803255Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7509888647431692191:2340], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710662 completed, doublechecking } 2025-05-29T15:23:27.841585Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-05-29T15:23:27.850346Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-05-29T15:23:27.903957Z node 1 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [1:7509888647431692383:2507] txid# 281474976710665, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 9], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-29T15:23:27.951559Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-05-29T15:23:27.957342Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [1:7509888647431692393:2359], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:5:17: Error: At function: KiReadTable!
:5:17: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Versions]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-05-29T15:23:27.957883Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=1&id=NjVhNTRiNzAtYTIxODg3MmMtYzY3ZjJhYjUtNjQ4MTAwOWM=, ActorId: [1:7509888647431692159:2334], ActorState: ExecuteState, TraceId: 01jwea885pcd6z0d06xa6tmcj9, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-05-29T15:23:27.958300Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: cluster_tracker.cpp:167: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 5 column: 17 } message: "At function: KiReadTable!" end_position { row: 5 column: 17 } severity: 1 issues { position { row: 5 column: 17 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Versions]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 5 column: 17 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost", true, true, 1000), ("dc2", "dc2.logbroker.yandex.net", false, true, 1000); 2025-05-29T15:23:28.018101Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [1:7509888647431692506:2379], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:23:28.018885Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=1&id=NzU0ODUyYmQtMjkzODc5NTYtZDkyYTE3ZmYtZjA5YTE4Mjc=, ActorId: [1:7509888647431692503:2377], ActorState: ExecuteState, TraceId: 01jwea88be73x40zgczy1dhqe1, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: assertion failed at ydb/core/testlib/test_pq_client.h:537, TMaybe NKikimr::NPersQueueTests::TFlatMsgBusPQClient::RunYqlDataQueryWithParams(TString, const NYdb::TParams &): (result.IsSuccess())
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 TBackTrace::Capture()+28 (0x13C00AEC) NUnitTest::NPrivate::RaiseError(char const*, TBasicString> const&, bool)+137 (0x13DB87C9) NKikimr::NPersQueueTests::TFlatMsgBusPQClient::RunYqlDataQueryWithParams(TBasicString>, NYdb::Dev::TParams const&)+932 (0x26260E34) NKikimr::NPersQueueTests::TFlatMsgBusPQClient::RunYqlDataQuery(TBasicString>)+88 (0x26260598) NKikimr::NPersQueueTests::TFlatMsgBusPQClient::InitDCs(THashMap>, NKikimr::NPersQueueTests::TPQTestClusterInfo, THash>>, TEqualTo>>, std::__y1::allocator>>>, TBasicString> const&)+1170 (0x2625F7E2) NKikimr::NPersQueueTests::TFlatMsgBusPQClient::FullInit(THashMap>, NKikimr::NPersQueueTests::TPQTestClusterInfo, THash>>, TEqualTo>>, std::__y1::allocator>>>, TBasicString> const&, TBasicString> const&)+327 (0x2625D837) NPersQueue::TTestServer::StartServer(bool, TMaybe>, NMaybe::TPolicyUndefinedExcept>)+888 (0x26255FB8) NYdb::NTopic::NTests::TTopicSdkTestSetup::TTopicSdkTestSetup(TBasicString> const&, NKikimr::Tests::TServerSettings const&, bool)+582 (0x262550B6) std::__y1::__unique_if::__unique_single std::__y1::make_unique[abi:fe200000](char const*&, NKikimr::Tests::TServerSettings&)+ ... /pdisk_1.dat 2025-05-29T15:23:47.476054Z node 21 :PQ_READ_PROXY DEBUG: caching_service.cpp:44: Direct read cache: : Created 2025-05-29T15:23:47.484003Z node 21 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 5049, node 21 2025-05-29T15:23:47.494820Z node 21 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/ciyv/00230d/r3tmp/yandexZ8KLAP.tmp 2025-05-29T15:23:47.494841Z node 21 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: /home/runner/.ya/build/build_root/ciyv/00230d/r3tmp/yandexZ8KLAP.tmp 2025-05-29T15:23:47.494893Z node 21 :NET_CLASSIFIER WARN: net_classifier.cpp:202: successfully initialized from file: /home/runner/.ya/build/build_root/ciyv/00230d/r3tmp/yandexZ8KLAP.tmp 2025-05-29T15:23:47.494939Z node 21 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-05-29T15:23:47.498865Z INFO: TTestServer started on Port 10159 GrpcPort 5049 TClient is connected to server localhost:10159 PQClient connected to localhost:5049 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-29T15:23:47.574592Z node 21 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(21, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:23:47.574616Z node 21 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(21, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:23:47.574967Z node 21 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:23:47.575567Z node 21 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(21, (0,0,0,0)) VolatileState: Connecting -> Connected waiting... 2025-05-29T15:23:47.583334Z node 21 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... waiting... waiting... 2025-05-29T15:23:47.808008Z node 21 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [21:7509888731696590103:2336], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:23:47.808029Z node 21 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:23:47.808060Z node 21 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [21:7509888731696590115:2339], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:23:47.808678Z node 21 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715662:3, at schemeshard: 72057594046644480 2025-05-29T15:23:47.808856Z node 21 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [21:7509888731696590147:2342], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:23:47.808871Z node 21 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:23:47.810635Z node 21 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [21:7509888731696590117:2340], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715662 completed, doublechecking } 2025-05-29T15:23:47.812563Z node 21 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-05-29T15:23:47.863937Z node 21 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [21:7509888731696590240:2460] txid# 281474976715664, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 9], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-29T15:23:47.867075Z node 21 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-05-29T15:23:47.867817Z node 21 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [21:7509888731696590258:2353], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:5:17: Error: At function: KiReadTable!
:5:17: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Versions]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-05-29T15:23:47.867900Z node 21 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=21&id=N2Y5YjAxYzctYjFkNDljYzktOGMyMmE1NzYtNzFkNGY0YmM=, ActorId: [21:7509888731696590100:2334], ActorState: ExecuteState, TraceId: 01jwea8vpz42rse3yfd7dygee0, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-05-29T15:23:47.868030Z node 21 :PERSQUEUE_CLUSTER_TRACKER ERROR: cluster_tracker.cpp:167: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 5 column: 17 } message: "At function: KiReadTable!" end_position { row: 5 column: 17 } severity: 1 issues { position { row: 5 column: 17 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Versions]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 5 column: 17 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-05-29T15:23:47.927174Z node 21 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost", true, true, 1000), ("dc2", "dc2.logbroker.yandex.net", false, true, 1000); 2025-05-29T15:23:47.947124Z node 21 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [21:7509888731696590436:2380], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:23:47.947229Z node 21 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=21&id=NjU5MTE3ZWEtM2YwYmQ3NjAtM2M2OWQ5MDMtN2I0M2JmZDk=, ActorId: [21:7509888731696590433:2378], ActorState: ExecuteState, TraceId: 01jwea8vv047x2f8xgxqfx5rnk, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: assertion failed at ydb/core/testlib/test_pq_client.h:537, TMaybe NKikimr::NPersQueueTests::TFlatMsgBusPQClient::RunYqlDataQueryWithParams(TString, const NYdb::TParams &): (result.IsSuccess())
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 TBackTrace::Capture()+28 (0x13C00AEC) NUnitTest::NPrivate::RaiseError(char const*, TBasicString> const&, bool)+137 (0x13DB87C9) NKikimr::NPersQueueTests::TFlatMsgBusPQClient::RunYqlDataQueryWithParams(TBasicString>, NYdb::Dev::TParams const&)+932 (0x26260E34) NKikimr::NPersQueueTests::TFlatMsgBusPQClient::RunYqlDataQuery(TBasicString>)+88 (0x26260598) NKikimr::NPersQueueTests::TFlatMsgBusPQClient::InitDCs(THashMap>, NKikimr::NPersQueueTests::TPQTestClusterInfo, THash>>, TEqualTo>>, std::__y1::allocator>>>, TBasicString> const&)+1170 (0x2625F7E2) NKikimr::NPersQueueTests::TFlatMsgBusPQClient::FullInit(THashMap>, NKikimr::NPersQueueTests::TPQTestClusterInfo, THash>>, TEqualTo>>, std::__y1::allocator>>>, TBasicString> const&, TBasicString> const&)+327 (0x2625D837) NPersQueue::TTestServer::StartServer(bool, TMaybe>, NMaybe::TPolicyUndefinedExcept>)+888 (0x26255FB8) NYdb::NTopic::NTests::TTopicSdkTestSetup::TTopicSdkTestSetup(TBasicString> const&, NKikimr::Tests::TServerSettings const&, bool)+582 (0x262550B6) std::__y1::__unique_if::__unique_single std::__y1::make_unique[abi:fe200000](char const*&, NKikimr::Tests::TServerSettings&)+70 (0x13A73A36) NYdb::NTopic::NTests::NTestSuiteTxUsage::TFixture::SetUp(NUnitTest::TTestContext&)+392 (0x13A73508) NYdb::NTopic::NTests::NTestSuiteTxUsage::TCurrentTest::Execute()::'lambda'()::operator()() const+49 (0x13AE86D1) NUnitTest::TTestBase::Run(std::__y1::function, TBasicString> const&, char const*, bool)+126 (0x13DBA67E) NYdb::NTopic::NTests::NTestSuiteTxUsage::TCurrentTest::Execute()+422 (0x13AE80A6) NUnitTest::TTestFactory::Execute()+803 (0x13DBADF3) NUnitTest::RunMain(int, char**)+3021 (0x13DCC99D) ??+0 (0x7FEF98B6FD90) __libc_start_main+128 (0x7FEF98B6FE40) _start+41 (0x12ADC029) ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/public/sdk/cpp/src/client/topic/ut/with_direct_read_ut/unittest >> TxUsage::WriteToTopic_Demo_22_RestartAfterCommit_Query [FAIL] Test command err: 2025-05-29T15:23:26.711069Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509888642419573047:2265];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:23:26.711098Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-05-29T15:23:26.745839Z node 1 :PQ_READ_PROXY DEBUG: caching_service.cpp:44: Direct read cache: : Created test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/0022f8/r3tmp/tmpKrUgrW/pdisk_1.dat 2025-05-29T15:23:26.783607Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:23:26.783962Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [1:7509888642419572821:2079] 1748532206708936 != 1748532206708939 TServer::EnableGrpc on GrpcPort 64344, node 1 2025-05-29T15:23:26.803820Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/ciyv/0022f8/r3tmp/yandex5y3TEj.tmp 2025-05-29T15:23:26.803835Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: /home/runner/.ya/build/build_root/ciyv/0022f8/r3tmp/yandex5y3TEj.tmp 2025-05-29T15:23:26.803896Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:202: successfully initialized from file: /home/runner/.ya/build/build_root/ciyv/0022f8/r3tmp/yandex5y3TEj.tmp 2025-05-29T15:23:26.803936Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-05-29T15:23:26.809285Z INFO: TTestServer started on Port 31397 GrpcPort 64344 TClient is connected to server localhost:31397 PQClient connected to localhost:64344 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-29T15:23:26.850673Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:23:26.855173Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:23:26.855207Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:23:26.857382Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-05-29T15:23:26.859174Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected waiting... 2025-05-29T15:23:26.869382Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 2025-05-29T15:23:26.870908Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... waiting... waiting... 2025-05-29T15:23:27.130303Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509888646714540910:2336], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:23:27.130343Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509888646714540937:2339], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:23:27.130356Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:23:27.131222Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710662:3, at schemeshard: 72057594046644480 2025-05-29T15:23:27.131541Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509888646714540969:2342], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:23:27.131561Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:23:27.133406Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7509888646714540939:2340], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710662 completed, doublechecking } 2025-05-29T15:23:27.171911Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-05-29T15:23:27.180110Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-05-29T15:23:27.228896Z node 1 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [1:7509888646714541124:2505] txid# 281474976710665, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 9], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-29T15:23:27.242289Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-05-29T15:23:27.245036Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [1:7509888646714541133:2359], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:5:17: Error: At function: KiReadTable!
:5:17: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Versions]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-05-29T15:23:27.245628Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=1&id=YzBhOTBlZjktMjQxYzcyNi02MzE5YjNmMy03YTM0ZDgwYg==, ActorId: [1:7509888646714540907:2334], ActorState: ExecuteState, TraceId: 01jwea87greqhrta8hgnkhc4bk, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-05-29T15:23:27.246084Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: cluster_tracker.cpp:167: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 5 column: 17 } message: "At function: KiReadTable!" end_position { row: 5 column: 17 } severity: 1 issues { position { row: 5 column: 17 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Versions]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 5 column: 17 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost", true, true, 1000), ("dc2", "dc2.logbroker.yandex.net", false, true, 1000); 2025-05-29T15:23:27.273171Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [1:7509888646714541257:2380], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:23:27.273286Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=1&id=N2Q4MTVlMTctMzlmMTkzZWUtYzc1M2RhODQtZTgwMDQzYzM=, ActorId: [1:7509888646714541254:2378], ActorState: ExecuteState, TraceId: 01jwea87mm1n1d85m4kmt077a6, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: assertion failed at ydb/core/testlib/test_pq_client.h:537, TMaybe NKikimr::NPersQueueTests::TFlatMsgBusPQClient::RunYqlDataQueryWithParams(TString, const NYdb::TParams &): (result.IsSuccess())
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 TBackTrace::Capture()+28 (0x13C00AEC) NUnitTest::NPrivate::RaiseError(char const*, TBasicString> const&, bool)+137 (0x13DB87C9) NKikimr::NPersQueueTests::TFlatMsgBusPQClient::RunYqlDataQueryWithParams(TBasicString>, NYdb::Dev::TParams const&)+932 (0x26260E34) NKikimr::NPersQueueTests::TFlatMsgBusPQClient::RunYqlDataQuery(TBasicString>)+88 (0x26260598) NKikimr::NPersQueueTests::TFlatMsgBusPQClient::InitDCs(THashMap>, NKikimr::NPersQueueTests::TPQTestClusterInfo, THash>>, TEqualTo>>, std::__y1::allocator>>>, TBasicString> const&)+1170 (0x2625F7E2) NKikimr::NPersQueueTests::TFlatMsgBusPQClient::FullInit(THashMap>, NKikimr::NPersQueueTests::TPQTestClusterInfo, THash>>, TEqualTo>>, std::__y1::allocator>>>, TBasicString> const&, TBasicString> const&)+327 (0x2625D837) NPersQueue::TTestServer::StartServer(bool, TMaybe>, NMaybe::TPolicyUndefinedExcept>)+888 (0x26255FB8) NYdb::NTo ... itTest::TTestBase::Run(std::__y1::function, TBasicString> const&, char const*, bool)+126 (0x13DBA67E) NYdb::NTopic::NTests::NTestSuiteTxUsage::TCurrentTest::Execute()+422 (0x13AE80A6) NUnitTest::TTestFactory::Execute()+803 (0x13DBADF3) NUnitTest::RunMain(int, char**)+3021 (0x13DCC99D) ??+0 (0x7F20D07C8D90) __libc_start_main+128 (0x7F20D07C8E40) _start+41 (0x12ADC029) 2025-05-29T15:23:46.769726Z node 21 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[21:7509888728108907888:2060];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:23:46.769742Z node 21 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/0022f8/r3tmp/tmpHYUgPu/pdisk_1.dat 2025-05-29T15:23:46.774753Z node 21 :PQ_READ_PROXY DEBUG: caching_service.cpp:44: Direct read cache: : Created 2025-05-29T15:23:46.779672Z node 21 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:23:46.779906Z node 21 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [21:7509888728108907870:2079] 1748532226769635 != 1748532226769638 TServer::EnableGrpc on GrpcPort 7988, node 21 2025-05-29T15:23:46.792560Z node 21 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/ciyv/0022f8/r3tmp/yandexQh4Y98.tmp 2025-05-29T15:23:46.792576Z node 21 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: /home/runner/.ya/build/build_root/ciyv/0022f8/r3tmp/yandexQh4Y98.tmp 2025-05-29T15:23:46.792636Z node 21 :NET_CLASSIFIER WARN: net_classifier.cpp:202: successfully initialized from file: /home/runner/.ya/build/build_root/ciyv/0022f8/r3tmp/yandexQh4Y98.tmp 2025-05-29T15:23:46.792698Z node 21 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-05-29T15:23:46.796095Z INFO: TTestServer started on Port 10287 GrpcPort 7988 TClient is connected to server localhost:10287 PQClient connected to localhost:7988 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-29T15:23:46.873129Z node 21 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(21, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:23:46.873153Z node 21 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(21, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:23:46.873415Z node 21 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:23:46.874216Z node 21 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(21, (0,0,0,0)) VolatileState: Connecting -> Connected waiting... 2025-05-29T15:23:46.882967Z node 21 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... waiting... waiting... 2025-05-29T15:23:47.069395Z node 21 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [21:7509888732403875973:2339], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:23:47.069413Z node 21 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [21:7509888732403875953:2335], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:23:47.069434Z node 21 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:23:47.070040Z node 21 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715662:3, at schemeshard: 72057594046644480 2025-05-29T15:23:47.071729Z node 21 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [21:7509888732403875986:2340], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715662 completed, doublechecking } 2025-05-29T15:23:47.074466Z node 21 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-05-29T15:23:47.087564Z node 21 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-05-29T15:23:47.115022Z node 21 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost", true, true, 1000), ("dc2", "dc2.logbroker.yandex.net", false, true, 1000); 2025-05-29T15:23:47.141927Z node 21 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [21:7509888732403876281:2374], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:23:47.142708Z node 21 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=21&id=YzcyYTA3ZTgtMjAzOGVhNDItNjNlNzFhNzctYmE4Nzc0ZjI=, ActorId: [21:7509888732403876278:2372], ActorState: ExecuteState, TraceId: 01jwea8v1r39k4rmkw2x56fh5d, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: 2025-05-29T15:23:47.143556Z node 21 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [21:7509888732403876292:2579] txid# 281474976715666, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 9], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-29T15:23:47.182945Z node 21 :PERSQUEUE_CLUSTER_TRACKER ERROR: cluster_tracker.cpp:167: failed to list clusters: { Response { QueryIssues { message: "Execution" issue_code: 1060 severity: 2 issues { position { row: 3 column: 120 } message: "Cost Based Optimizer could not be applied to this query: couldn\'t load statistics" end_position { row: 3 column: 120 } issue_code: 8001 severity: 2 } } TxMeta { } YdbResults { columns { name: "C.name" type { optional_type { item { type_id: UTF8 } } } } columns { name: "C.balancer" type { optional_type { item { type_id: UTF8 } } } } columns { name: "C.local" type { optional_type { item { type_id: BOOL } } } } columns { name: "C.enabled" type { optional_type { item { type_id: BOOL } } } } columns { name: "C.weight" type { optional_type { item { type_id: UINT64 } } } } columns { name: "V.version" type { optional_type { item { type_id: INT64 } } } } } QueryDiagnostics: "" } YdbStatus: SUCCESS ConsumedRu: 24 } assertion failed at ydb/core/testlib/test_pq_client.h:537, TMaybe NKikimr::NPersQueueTests::TFlatMsgBusPQClient::RunYqlDataQueryWithParams(TString, const NYdb::TParams &): (result.IsSuccess())
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 TBackTrace::Capture()+28 (0x13C00AEC) NUnitTest::NPrivate::RaiseError(char const*, TBasicString> const&, bool)+137 (0x13DB87C9) NKikimr::NPersQueueTests::TFlatMsgBusPQClient::RunYqlDataQueryWithParams(TBasicString>, NYdb::Dev::TParams const&)+932 (0x26260E34) NKikimr::NPersQueueTests::TFlatMsgBusPQClient::RunYqlDataQuery(TBasicString>)+88 (0x26260598) NKikimr::NPersQueueTests::TFlatMsgBusPQClient::InitDCs(THashMap>, NKikimr::NPersQueueTests::TPQTestClusterInfo, THash>>, TEqualTo>>, std::__y1::allocator>>>, TBasicString> const&)+1170 (0x2625F7E2) NKikimr::NPersQueueTests::TFlatMsgBusPQClient::FullInit(THashMap>, NKikimr::NPersQueueTests::TPQTestClusterInfo, THash>>, TEqualTo>>, std::__y1::allocator>>>, TBasicString> const&, TBasicString> const&)+327 (0x2625D837) NPersQueue::TTestServer::StartServer(bool, TMaybe>, NMaybe::TPolicyUndefinedExcept>)+888 (0x26255FB8) NYdb::NTopic::NTests::TTopicSdkTestSetup::TTopicSdkTestSetup(TBasicString> const&, NKikimr::Tests::TServerSettings const&, bool)+582 (0x262550B6) std::__y1::__unique_if::__unique_single std::__y1::make_unique[abi:fe200000](char const*&, NKikimr::Tests::TServerSettings&)+70 (0x13A73A36) NYdb::NTopic::NTests::NTestSuiteTxUsage::TFixture::SetUp(NUnitTest::TTestContext&)+392 (0x13A73508) NYdb::NTopic::NTests::NTestSuiteTxUsage::TCurrentTest::Execute()::'lambda'()::operator()() const+49 (0x13AE86D1) NUnitTest::TTestBase::Run(std::__y1::function, TBasicString> const&, char const*, bool)+126 (0x13DBA67E) NYdb::NTopic::NTests::NTestSuiteTxUsage::TCurrentTest::Execute()+422 (0x13AE80A6) NUnitTest::TTestFactory::Execute()+803 (0x13DBADF3) NUnitTest::RunMain(int, char**)+3021 (0x13DCC99D) ??+0 (0x7F20D07C8D90) __libc_start_main+128 (0x7F20D07C8E40) _start+41 (0x12ADC029) ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/public/sdk/cpp/src/client/topic/ut/with_direct_read_ut/unittest >> TxUsage::The_Transaction_Starts_On_One_Version_And_Ends_On_The_Other [FAIL] Test command err: 2025-05-29T15:23:27.474662Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509888645241698271:2209];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:23:27.474712Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/002382/r3tmp/tmpKdzLLS/pdisk_1.dat 2025-05-29T15:23:27.544694Z node 1 :PQ_READ_PROXY DEBUG: caching_service.cpp:44: Direct read cache: : Created 2025-05-29T15:23:27.565659Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 16661, node 1 2025-05-29T15:23:27.591054Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/ciyv/002382/r3tmp/yandexuYjf2h.tmp 2025-05-29T15:23:27.591068Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: /home/runner/.ya/build/build_root/ciyv/002382/r3tmp/yandexuYjf2h.tmp 2025-05-29T15:23:27.591140Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:202: successfully initialized from file: /home/runner/.ya/build/build_root/ciyv/002382/r3tmp/yandexuYjf2h.tmp 2025-05-29T15:23:27.591177Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-05-29T15:23:27.596960Z INFO: TTestServer started on Port 14895 GrpcPort 16661 TClient is connected to server localhost:14895 PQClient connected to localhost:16661 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: 2025-05-29T15:23:27.636361Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:23:27.636400Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:23:27.637455Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-29T15:23:27.645632Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:23:27.653196Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:23:27.664954Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976715658, at schemeshard: 72057594046644480 2025-05-29T15:23:27.669139Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-05-29T15:23:27.710178Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976715660, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:23:27.923267Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509888645241698876:2336], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:23:27.923308Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:23:27.923696Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509888645241698911:2339], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:23:27.924616Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715662:3, at schemeshard: 72057594046644480 2025-05-29T15:23:27.927707Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976715662, at schemeshard: 72057594046644480 2025-05-29T15:23:27.927807Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7509888645241698913:2340], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715662 completed, doublechecking } 2025-05-29T15:23:27.991167Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-05-29T15:23:27.994327Z node 1 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [1:7509888645241699015:2446] txid# 281474976715664, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 9], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-29T15:23:28.010419Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-05-29T15:23:28.011589Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [1:7509888645241699046:2352], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:5:17: Error: At function: KiReadTable!
:5:17: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Versions]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-05-29T15:23:28.012056Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=1&id=YWMwNjMxZGEtMjI3NGM2MDUtZDI3YTE1M2UtZmFkNTIyYzM=, ActorId: [1:7509888645241698872:2333], ActorState: ExecuteState, TraceId: 01jwea889g1g4fwpnhgs4bq4nf, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-05-29T15:23:28.012572Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: cluster_tracker.cpp:167: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 5 column: 17 } message: "At function: KiReadTable!" end_position { row: 5 column: 17 } severity: 1 issues { position { row: 5 column: 17 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Versions]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 5 column: 17 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-05-29T15:23:28.047485Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost", true, true, 1000), ("dc2", "dc2.logbroker.yandex.net", false, true, 1000); 2025-05-29T15:23:28.110090Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [1:7509888649536666522:2379], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:23:28.111101Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=1&id=YzRjZDNhZmItZGQwMGFlYzItZjlmY2Q5OC01MGUxM2NhOQ==, ActorId: [1:7509888649536666519:2377], ActorState: ExecuteState, TraceId: 01jwea88e07g1dzc6dw1ye1d7s, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: assertion failed at ydb/core/testlib/test_pq_client.h:537, TMaybe NKikimr::NPersQueueTests::TFlatMsgBusPQClient::RunYqlDataQueryWithParams(TString, const NYdb::TParams &): (result.IsSuccess())
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 TBackTrace::Capture()+28 (0x13C00AEC) NUnitTest::NPrivate::RaiseError(char const*, TBasicString> const&, bool)+137 (0x13DB87C9) NKikimr::NPersQueueTests::TFlatMsgBusPQClient::RunYqlDataQueryWithParams(TBasicString>, NYdb::Dev::TParams const&)+932 (0x26260E34) NKikimr::NPersQueueTests::TFlatMsgBusPQClient::RunYqlDataQuery(TBasicString>)+88 (0x26260598) NKikimr::NPersQueueTests::TFlatMsgBusPQClient::InitDCs(THashMap>, NKikimr::NPersQueueTests::TPQTestClusterInfo, THash>>, TEqualTo>>, std::__y1::allocator>>>, TBasicString> const&)+1170 (0x2625F7E2) NKikimr::NPersQueueTests::TFlatMsgBusPQClient::FullInit(THashMap>, NKikimr::NPersQueueTests::TPQTestClusterInfo, THash>>, TEqualTo>>, std::__y1::allocator>>>, TBasicString> const&, TBasicString> const&)+327 (0x2625D837) NPersQueue::TTestServer::StartServer(bool, TMaybe>, NMaybe::TPolicyUndefinedExcept>)+888 (0x26255FB8) NYdb::NTopic::NTests::TTopicSdkTestSetup::TTopicSdkTestSetup(TBasicString> const&, NKikimr::Tests::TServerSettings const&, bool)+582 (0x262550B6) std::__y1::__unique_if::__unique_single std::__y1::make_unique[abi:fe200000](char const*&, NKikimr::Tests::TServerSettings&)+70 (0x13A73A36 ... 58146131:7762515]; 2025-05-29T15:23:47.470179Z node 21 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/002382/r3tmp/tmpp2o8lQ/pdisk_1.dat 2025-05-29T15:23:47.475273Z node 21 :PQ_READ_PROXY DEBUG: caching_service.cpp:44: Direct read cache: : Created 2025-05-29T15:23:47.480340Z node 21 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:23:47.480577Z node 21 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [21:7509888731543697744:2079] 1748532227470065 != 1748532227470068 TServer::EnableGrpc on GrpcPort 21575, node 21 2025-05-29T15:23:47.490684Z node 21 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/ciyv/002382/r3tmp/yandex6Mm5La.tmp 2025-05-29T15:23:47.490704Z node 21 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: /home/runner/.ya/build/build_root/ciyv/002382/r3tmp/yandex6Mm5La.tmp 2025-05-29T15:23:47.490770Z node 21 :NET_CLASSIFIER WARN: net_classifier.cpp:202: successfully initialized from file: /home/runner/.ya/build/build_root/ciyv/002382/r3tmp/yandex6Mm5La.tmp 2025-05-29T15:23:47.490823Z node 21 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-05-29T15:23:47.494796Z INFO: TTestServer started on Port 27653 GrpcPort 21575 TClient is connected to server localhost:27653 PQClient connected to localhost:21575 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-29T15:23:47.573158Z node 21 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(21, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:23:47.573193Z node 21 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(21, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:23:47.573593Z node 21 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:23:47.574121Z node 21 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(21, (0,0,0,0)) VolatileState: Connecting -> Connected waiting... 2025-05-29T15:23:47.582885Z node 21 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... waiting... waiting... 2025-05-29T15:23:47.747537Z node 21 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [21:7509888731543698552:2336], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:23:47.747565Z node 21 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:23:47.747640Z node 21 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [21:7509888731543698564:2339], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:23:47.748327Z node 21 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715662:3, at schemeshard: 72057594046644480 2025-05-29T15:23:47.748405Z node 21 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [21:7509888731543698595:2342], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:23:47.748419Z node 21 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:23:47.749806Z node 21 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [21:7509888731543698566:2340], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715662 completed, doublechecking } 2025-05-29T15:23:47.752209Z node 21 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-05-29T15:23:47.765270Z node 21 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-05-29T15:23:47.778445Z node 21 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost", true, true, 1000), ("dc2", "dc2.logbroker.yandex.net", false, true, 1000); 2025-05-29T15:23:47.800168Z node 21 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [21:7509888731543698864:2375], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:23:47.800317Z node 21 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=21&id=MTgzOTY2OTMtNGE5NWRlZDgtY2ZmODQ1M2UtMjA0MDQ3MzE=, ActorId: [21:7509888731543698861:2373], ActorState: ExecuteState, TraceId: 01jwea8vpc0vh2k0eh8gynxa43, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: 2025-05-29T15:23:47.809099Z node 21 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [21:7509888731543698876:2580] txid# 281474976715666, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 9], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-29T15:23:47.838944Z node 21 :PERSQUEUE_CLUSTER_TRACKER ERROR: cluster_tracker.cpp:167: failed to list clusters: { Response { QueryIssues { message: "Execution" issue_code: 1060 severity: 2 issues { position { row: 3 column: 120 } message: "Cost Based Optimizer could not be applied to this query: couldn\'t load statistics" end_position { row: 3 column: 120 } issue_code: 8001 severity: 2 } } TxMeta { } YdbResults { columns { name: "C.name" type { optional_type { item { type_id: UTF8 } } } } columns { name: "C.balancer" type { optional_type { item { type_id: UTF8 } } } } columns { name: "C.local" type { optional_type { item { type_id: BOOL } } } } columns { name: "C.enabled" type { optional_type { item { type_id: BOOL } } } } columns { name: "C.weight" type { optional_type { item { type_id: UINT64 } } } } columns { name: "V.version" type { optional_type { item { type_id: INT64 } } } } } QueryDiagnostics: "" } YdbStatus: SUCCESS ConsumedRu: 18 } assertion failed at ydb/core/testlib/test_pq_client.h:537, TMaybe NKikimr::NPersQueueTests::TFlatMsgBusPQClient::RunYqlDataQueryWithParams(TString, const NYdb::TParams &): (result.IsSuccess())
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 TBackTrace::Capture()+28 (0x13C00AEC) NUnitTest::NPrivate::RaiseError(char const*, TBasicString> const&, bool)+137 (0x13DB87C9) NKikimr::NPersQueueTests::TFlatMsgBusPQClient::RunYqlDataQueryWithParams(TBasicString>, NYdb::Dev::TParams const&)+932 (0x26260E34) NKikimr::NPersQueueTests::TFlatMsgBusPQClient::RunYqlDataQuery(TBasicString>)+88 (0x26260598) NKikimr::NPersQueueTests::TFlatMsgBusPQClient::InitDCs(THashMap>, NKikimr::NPersQueueTests::TPQTestClusterInfo, THash>>, TEqualTo>>, std::__y1::allocator>>>, TBasicString> const&)+1170 (0x2625F7E2) NKikimr::NPersQueueTests::TFlatMsgBusPQClient::FullInit(THashMap>, NKikimr::NPersQueueTests::TPQTestClusterInfo, THash>>, TEqualTo>>, std::__y1::allocator>>>, TBasicString> const&, TBasicString> const&)+327 (0x2625D837) NPersQueue::TTestServer::StartServer(bool, TMaybe>, NMaybe::TPolicyUndefinedExcept>)+888 (0x26255FB8) NYdb::NTopic::NTests::TTopicSdkTestSetup::TTopicSdkTestSetup(TBasicString> const&, NKikimr::Tests::TServerSettings const&, bool)+582 (0x262550B6) std::__y1::__unique_if::__unique_single std::__y1::make_unique[abi:fe200000](char const*&, NKikimr::Tests::TServerSettings&)+70 (0x13A73A36) NYdb::NTopic::NTests::NTestSuiteTxUsage::TFixture::SetUp(NUnitTest::TTestContext&)+392 (0x13A73508) NYdb::NTopic::NTests::NTestSuiteTxUsage::TCurrentTest::Execute()::'lambda'()::operator()() const+49 (0x13AE86D1) NUnitTest::TTestBase::Run(std::__y1::function, TBasicString> const&, char const*, bool)+126 (0x13DBA67E) NYdb::NTopic::NTests::NTestSuiteTxUsage::TCurrentTest::Execute()+422 (0x13AE80A6) NUnitTest::TTestFactory::Execute()+803 (0x13DBADF3) NUnitTest::RunMain(int, char**)+3021 (0x13DCC99D) ??+0 (0x7F800A7F2D90) __libc_start_main+128 (0x7F800A7F2E40) _start+41 (0x12ADC029) ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/public/sdk/cpp/src/client/topic/ut/with_direct_read_ut/unittest >> TxUsage::WriteToTopic_Demo_29_Query [FAIL] Test command err: 2025-05-29T15:23:27.401712Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509888646673933699:2060];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:23:27.401736Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/00238c/r3tmp/tmpYTPLBd/pdisk_1.dat 2025-05-29T15:23:27.437425Z node 1 :PQ_READ_PROXY DEBUG: caching_service.cpp:44: Direct read cache: : Created 2025-05-29T15:23:27.469258Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [1:7509888646673933681:2079] 1748532207401583 != 1748532207401586 2025-05-29T15:23:27.470263Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 6135, node 1 2025-05-29T15:23:27.487989Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/ciyv/00238c/r3tmp/yandexDnd9D6.tmp 2025-05-29T15:23:27.488004Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: /home/runner/.ya/build/build_root/ciyv/00238c/r3tmp/yandexDnd9D6.tmp 2025-05-29T15:23:27.488065Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:202: successfully initialized from file: /home/runner/.ya/build/build_root/ciyv/00238c/r3tmp/yandexDnd9D6.tmp 2025-05-29T15:23:27.488111Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-05-29T15:23:27.500830Z INFO: TTestServer started on Port 29106 GrpcPort 6135 TClient is connected to server localhost:29106 2025-05-29T15:23:27.541903Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:23:27.541938Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:23:27.542845Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected PQClient connected to localhost:6135 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-29T15:23:27.566533Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:23:27.570101Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:23:27.577316Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... waiting... waiting... 2025-05-29T15:23:27.844586Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509888646673934467:2336], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:23:27.844621Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:23:27.845211Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509888646673934502:2339], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:23:27.851053Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509888646673934507:2342], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:23:27.851079Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:23:27.851696Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710662:3, at schemeshard: 72057594046644480 2025-05-29T15:23:27.854114Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7509888646673934505:2341], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710662 completed, doublechecking } 2025-05-29T15:23:27.911591Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-05-29T15:23:27.920533Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-05-29T15:23:27.948129Z node 1 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [1:7509888646673934726:2528] txid# 281474976710665, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 9], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-29T15:23:27.949800Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost", true, true, 1000), ("dc2", "dc2.logbroker.yandex.net", false, true, 1000); 2025-05-29T15:23:27.987488Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [1:7509888646673934821:2381], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:23:27.988151Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=1&id=OTE0MWZhOWQtMmIxZjU3ODItOGQ5YzhlMTgtY2U3ODUwOGM=, ActorId: [1:7509888646673934818:2379], ActorState: ExecuteState, TraceId: 01jwea88b0a9s1r8hvanqw9xmm, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: 2025-05-29T15:23:28.019357Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: cluster_tracker.cpp:167: failed to list clusters: { Response { QueryIssues { message: "Execution" issue_code: 1060 severity: 2 issues { position { row: 3 column: 120 } message: "Cost Based Optimizer could not be applied to this query: couldn\'t load statistics" end_position { row: 3 column: 120 } issue_code: 8001 severity: 2 } } TxMeta { } YdbResults { columns { name: "C.name" type { optional_type { item { type_id: UTF8 } } } } columns { name: "C.balancer" type { optional_type { item { type_id: UTF8 } } } } columns { name: "C.local" type { optional_type { item { type_id: BOOL } } } } columns { name: "C.enabled" type { optional_type { item { type_id: BOOL } } } } columns { name: "C.weight" type { optional_type { item { type_id: UINT64 } } } } columns { name: "V.version" type { optional_type { item { type_id: INT64 } } } } } QueryDiagnostics: "" } YdbStatus: SUCCESS ConsumedRu: 42 } assertion failed at ydb/core/testlib/test_pq_client.h:537, TMaybe NKikimr::NPersQueueTests::TFlatMsgBusPQClient::RunYqlDataQueryWithParams(TString, const NYdb::TParams &): (result.IsSuccess())
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 TBackTrace::Capture()+28 (0x13C00AEC) NUnitTest::NPrivate::RaiseError(char const*, TBasicString> const&, bool)+137 (0x13DB87C9) NKikimr::NPersQueueTests::TFlatMsgBusPQClient::RunYqlDataQueryWithParams(TBasicString>, NYdb::Dev::TParams const&)+932 (0x26260E34) NKikimr::NPersQueueTests::TFlatMsgBusPQClient::RunYqlDataQuery(TBasicString>)+88 (0x26260598) NKikimr::NPersQueueTests::TFlatMsgBusPQClient::InitDCs(THashMap>, NKikimr::NPersQueueTests::TPQTestClusterInfo, THash>>, TEqualTo>>, std::__y1::allocator>>>, TBasicString> const&)+1170 (0x2625F7E2) NKikimr::NPersQueueTests::TFlatMsgBusPQClient::FullInit(THashMap>, NKikimr::NPersQueueTests::TPQTestClusterInfo, THash>>, TEqualTo>>, std::__y1::allocator>>>, TBasicString> const&, TBasicString> const&)+327 (0x2625D837) NPersQueue::TTestServer::StartServer(bool, TMaybe>, NMaybe::TPolicyUndefinedExcept>)+888 (0x26255FB8) NYdb::NTopic::NTests::TTopicSdkTestSetup::TTopicSdkTestSetup(TBasicString> const&, NKikimr::Tests::TServerSettings const&, bool)+582 (0x262550B6) std::__y1::__unique_if::__unique_single std::__y1::make_unique[abi:fe200000](char const*&, NKikimr::Tests::TServerSettings&)+70 (0x13A73A36) NYdb::NTopic::NTests::NTestSuiteTxUsage::TFixture::SetUp(NUnitTest::TTestContext&)+392 (0x13A73508) NYdb::NTopic::NTests::NTestSuiteTxUsage::TCurrentTest::Execute()::'lambda'()::operator()() const+49 (0x13AE86D1) NUnitTest::TTestBase::Run(std::__y1::function, TBasicString Disconnected 2025-05-29T15:23:47.096862Z node 21 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(21, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:23:47.097162Z node 21 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:23:47.097837Z node 21 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(21, (0,0,0,0)) VolatileState: Connecting -> Connected waiting... 2025-05-29T15:23:47.107446Z node 21 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... waiting... waiting... 2025-05-29T15:23:47.368873Z node 21 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [21:7509888733864479576:2339], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:23:47.368893Z node 21 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [21:7509888733864479550:2336], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:23:47.368902Z node 21 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:23:47.369452Z node 21 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [21:7509888733864479607:2342], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:23:47.369465Z node 21 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:23:47.369471Z node 21 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715662:3, at schemeshard: 72057594046644480 2025-05-29T15:23:47.370831Z node 21 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [21:7509888733864479579:2340], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715662 completed, doublechecking } 2025-05-29T15:23:47.373293Z node 21 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-05-29T15:23:47.379853Z node 21 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-05-29T15:23:47.392556Z node 21 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost", true, true, 1000), ("dc2", "dc2.logbroker.yandex.net", false, true, 1000); 2025-05-29T15:23:47.417811Z node 21 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [21:7509888733864479878:2375], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:23:47.417910Z node 21 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=21&id=YTJlMDc0ZjQtYTU2NWNmODEtMTM0ZTEwN2EtNDNiMTc4Mg==, ActorId: [21:7509888733864479875:2373], ActorState: ExecuteState, TraceId: 01jwea8vab3629ch4seg0fqcna, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: 2025-05-29T15:23:47.468748Z node 21 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [21:7509888733864479890:2580] txid# 281474976715666, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 9], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-29T15:23:47.496009Z node 21 :PERSQUEUE_CLUSTER_TRACKER ERROR: cluster_tracker.cpp:167: failed to list clusters: { Response { QueryIssues { message: "Execution" issue_code: 1060 severity: 2 issues { position { row: 3 column: 120 } message: "Cost Based Optimizer could not be applied to this query: couldn\'t load statistics" end_position { row: 3 column: 120 } issue_code: 8001 severity: 2 } } TxMeta { } YdbResults { columns { name: "C.name" type { optional_type { item { type_id: UTF8 } } } } columns { name: "C.balancer" type { optional_type { item { type_id: UTF8 } } } } columns { name: "C.local" type { optional_type { item { type_id: BOOL } } } } columns { name: "C.enabled" type { optional_type { item { type_id: BOOL } } } } columns { name: "C.weight" type { optional_type { item { type_id: UINT64 } } } } columns { name: "V.version" type { optional_type { item { type_id: INT64 } } } } } QueryDiagnostics: "" } YdbStatus: SUCCESS ConsumedRu: 16 } assertion failed at ydb/core/testlib/test_pq_client.h:537, TMaybe NKikimr::NPersQueueTests::TFlatMsgBusPQClient::RunYqlDataQueryWithParams(TString, const NYdb::TParams &): (result.IsSuccess())
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 TBackTrace::Capture()+28 (0x13C00AEC) NUnitTest::NPrivate::RaiseError(char const*, TBasicString> const&, bool)+137 (0x13DB87C9) NKikimr::NPersQueueTests::TFlatMsgBusPQClient::RunYqlDataQueryWithParams(TBasicString>, NYdb::Dev::TParams const&)+932 (0x26260E34) NKikimr::NPersQueueTests::TFlatMsgBusPQClient::RunYqlDataQuery(TBasicString>)+88 (0x26260598) NKikimr::NPersQueueTests::TFlatMsgBusPQClient::InitDCs(THashMap>, NKikimr::NPersQueueTests::TPQTestClusterInfo, THash>>, TEqualTo>>, std::__y1::allocator>>>, TBasicString> const&)+1170 (0x2625F7E2) NKikimr::NPersQueueTests::TFlatMsgBusPQClient::FullInit(THashMap>, NKikimr::NPersQueueTests::TPQTestClusterInfo, THash>>, TEqualTo>>, std::__y1::allocator>>>, TBasicString> const&, TBasicString> const&)+327 (0x2625D837) NPersQueue::TTestServer::StartServer(bool, TMaybe>, NMaybe::TPolicyUndefinedExcept>)+888 (0x26255FB8) NYdb::NTopic::NTests::TTopicSdkTestSetup::TTopicSdkTestSetup(TBasicString> const&, NKikimr::Tests::TServerSettings const&, bool)+582 (0x262550B6) std::__y1::__unique_if::__unique_single std::__y1::make_unique[abi:fe200000](char const*&, NKikimr::Tests::TServerSettings&)+70 (0x13A73A36) NYdb::NTopic::NTests::NTestSuiteTxUsage::TFixture::SetUp(NUnitTest::TTestContext&)+392 (0x13A73508) NYdb::NTopic::NTests::NTestSuiteTxUsage::TCurrentTest::Execute()::'lambda'()::operator()() const+49 (0x13AE86D1) NUnitTest::TTestBase::Run(std::__y1::function, TBasicString> const&, char const*, bool)+126 (0x13DBA67E) NYdb::NTopic::NTests::NTestSuiteTxUsage::TCurrentTest::Execute()+422 (0x13AE80A6) NUnitTest::TTestFactory::Execute()+803 (0x13DBADF3) NUnitTest::RunMain(int, char**)+3021 (0x13DCC99D) ??+0 (0x7F9A4BA0CD90) __libc_start_main+128 (0x7F9A4BA0CE40) _start+41 (0x12ADC029) |64.3%| [TA] $(B)/ydb/core/tx/datashard/ut_kqp/test-results/unittest/{meta.json ... results_accumulator.log} >> DataShardReadIterator::ShouldLimitRead10RangesChunk100Limit1000 [FAIL] >> DataShardReadIterator::ShouldFailUknownColumns >> DataShardReadIterator::TryCommitLocksPrepared+Volatile+BreakLocks [FAIL] >> DataShardReadIterator::TryWriteManyRows+Commit |64.3%| [TA] {RESULT} $(B)/ydb/core/tx/datashard/ut_trace/test-results/unittest/{meta.json ... results_accumulator.log} |64.3%| [TA] {RESULT} $(B)/ydb/core/tx/datashard/ut_kqp/test-results/unittest/{meta.json ... results_accumulator.log} >> Secret::Validation [FAIL] >> DataShardReadIteratorConsistency::LeaseConfirmationNotOutOfOrder [FAIL] >> DataShardReadIteratorConsistency::BrokenWriteLockBeforeIteration >> TColumnShardTestReadWrite::CompactionSplitGranule_PKInt64 [GOOD] |64.3%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/scheme_board/ut_cache/ydb-core-tx-scheme_board-ut_cache |64.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/scheme_board/ut_cache/ydb-core-tx-scheme_board-ut_cache |64.3%| [LD] {RESULT} $(B)/ydb/core/tx/scheme_board/ut_cache/ydb-core-tx-scheme_board-ut_cache |64.3%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/fq/libs/checkpoint_storage/ut/ydb-core-fq-libs-checkpoint_storage-ut |64.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/fq/libs/checkpoint_storage/ut/ydb-core-fq-libs-checkpoint_storage-ut |64.4%| [LD] {RESULT} $(B)/ydb/core/fq/libs/checkpoint_storage/ut/ydb-core-fq-libs-checkpoint_storage-ut >> AggregateStatistics::ChildNodesShouldBeInvalidateByTimeout [GOOD] >> KqpPg::NoTableQuery+useSink >> DataShardReadIterator::ShouldReadRangeRightInclusive [FAIL] >> DataShardReadIterator::ShouldReadRangeOneByOne >> KqpPg::TypeCoercionInsert-useSink >> KqpPg::JoinWithQueryService+StreamLookup >> KqpPg::EmptyQuery+useSink >> KqpPg::CreateTableSerialColumns+useSink |64.4%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/datashard/ut_erase_rows/ydb-core-tx-datashard-ut_erase_rows |64.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_erase_rows/ydb-core-tx-datashard-ut_erase_rows |64.4%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_erase_rows/ydb-core-tx-datashard-ut_erase_rows >> KqpPg::ReadPgArray [GOOD] >> KqpPg::TableArrayInsert+useSink >> TRtmrTest::CreateWithoutTimeCastBuckets >> DataShardReadIterator::ShouldLimitRead10RangesChunk99Limit100 [FAIL] >> DataShardReadIterator::ShouldReturnBrokenLockWhenWriteInSeparateTransactions+EvWrite [FAIL] >> DataShardReadIterator::ShouldReturnBrokenLockWhenWriteInSeparateTransactions-EvWrite >> DataShardReadIterator::ShouldLimitRead10RangesChunk99Limit101 >> KqpPg::InsertFromSelect_Simple+useSink >> KqpPg::TypeCoercionBulkUpsert |64.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_rtmr/unittest |64.4%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/datashard/ut_change_collector/ydb-core-tx-datashard-ut_change_collector |64.4%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_change_collector/ydb-core-tx-datashard-ut_change_collector |64.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_change_collector/ydb-core-tx-datashard-ut_change_collector |64.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_rtmr/unittest >> DataShardReadIterator::ShouldReadKeyPrefix1 [FAIL] >> DataShardReadIterator::ShouldReadKeyPrefix2 >> DataShardReadIterator::ShouldReturnBrokenLockWhenReadKeyPrefix-EvWrite [FAIL] >> DataShardReadIterator::ShouldReturnBrokenLockWhenReadKeyPrefixLeftBorder+EvWrite ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/statistics/service/ut/ut_aggregation/unittest >> AggregateStatistics::ChildNodesShouldBeInvalidateByTimeout [GOOD] Test command err: 2025-05-29T15:24:07.440376Z node 1 :STATISTICS DEBUG: service_impl.cpp:588: Received TEvAggregateStatistics from node: 1, Round: 1, current Round: 0 2025-05-29T15:24:07.441119Z node 1 :STATISTICS DEBUG: service_impl.cpp:1086: EvClientConnected, node id = 1, client id = [1:38:2058], server id = [1:38:2058], tablet id = 1, status = OK 2025-05-29T15:24:07.441163Z node 1 :STATISTICS DEBUG: service_impl.cpp:1055: TEvStatisticsRequest send, client id = [1:38:2058], path = { OwnerId: 3 LocalId: 3 } 2025-05-29T15:24:07.441178Z node 1 :STATISTICS DEBUG: service_impl.cpp:317: Received TEvStatisticsResponse TabletId: 1 2025-05-29T15:24:07.441210Z node 2 :STATISTICS DEBUG: service_impl.cpp:588: Received TEvAggregateStatistics from node: 1, Round: 1, current Round: 0 2025-05-29T15:24:07.441221Z node 1 :STATISTICS DEBUG: service_impl.cpp:1086: EvClientConnected, node id = 1, client id = [1:38:2058], server id = [0:0:0], tablet id = 1, status = ERROR 2025-05-29T15:24:07.441223Z node 1 :STATISTICS DEBUG: service_impl.cpp:1110: Skip EvClientConnected 2025-05-29T15:24:07.441237Z node 3 :STATISTICS DEBUG: service_impl.cpp:588: Received TEvAggregateStatistics from node: 1, Round: 1, current Round: 0 2025-05-29T15:24:07.441249Z node 3 :STATISTICS DEBUG: service_impl.cpp:1086: EvClientConnected, node id = 3, client id = [3:45:2057], server id = [3:45:2057], tablet id = 3, status = OK 2025-05-29T15:24:07.441253Z node 3 :STATISTICS DEBUG: service_impl.cpp:1055: TEvStatisticsRequest send, client id = [3:45:2057], path = { OwnerId: 3 LocalId: 3 } 2025-05-29T15:24:07.441262Z node 4 :STATISTICS DEBUG: service_impl.cpp:588: Received TEvAggregateStatistics from node: 2, Round: 1, current Round: 0 2025-05-29T15:24:07.441267Z node 3 :STATISTICS DEBUG: service_impl.cpp:317: Received TEvStatisticsResponse TabletId: 3 2025-05-29T15:24:07.441271Z node 3 :STATISTICS DEBUG: service_impl.cpp:502: Send aggregate statistics response to node: 1 2025-05-29T15:24:07.441282Z node 4 :STATISTICS DEBUG: service_impl.cpp:1086: EvClientConnected, node id = 4, client id = [4:47:2057], server id = [4:47:2057], tablet id = 4, status = OK 2025-05-29T15:24:07.441285Z node 4 :STATISTICS DEBUG: service_impl.cpp:1055: TEvStatisticsRequest send, client id = [4:47:2057], path = { OwnerId: 3 LocalId: 3 } 2025-05-29T15:24:07.441289Z node 3 :STATISTICS DEBUG: service_impl.cpp:1086: EvClientConnected, node id = 3, client id = [3:45:2057], server id = [0:0:0], tablet id = 3, status = ERROR 2025-05-29T15:24:07.441291Z node 3 :STATISTICS DEBUG: service_impl.cpp:1110: Skip EvClientConnected 2025-05-29T15:24:07.441295Z node 4 :STATISTICS DEBUG: service_impl.cpp:317: Received TEvStatisticsResponse TabletId: 4 2025-05-29T15:24:07.441298Z node 4 :STATISTICS DEBUG: service_impl.cpp:502: Send aggregate statistics response to node: 2 2025-05-29T15:24:07.441305Z node 1 :STATISTICS DEBUG: service_impl.cpp:448: Received TEvAggregateStatisticsResponse SenderNodeId: 3 2025-05-29T15:24:07.441313Z node 4 :STATISTICS DEBUG: service_impl.cpp:1086: EvClientConnected, node id = 4, client id = [4:47:2057], server id = [0:0:0], tablet id = 4, status = ERROR 2025-05-29T15:24:07.441315Z node 4 :STATISTICS DEBUG: service_impl.cpp:1110: Skip EvClientConnected 2025-05-29T15:24:07.441320Z node 2 :STATISTICS DEBUG: service_impl.cpp:448: Received TEvAggregateStatisticsResponse SenderNodeId: 4 2025-05-29T15:24:07.451484Z node 4 :STATISTICS DEBUG: service_impl.cpp:252: Event round 1 is different from the current 0 2025-05-29T15:24:07.451509Z node 4 :STATISTICS DEBUG: service_impl.cpp:379: Skip TEvDispatchKeepAlive 2025-05-29T15:24:07.451526Z node 3 :STATISTICS DEBUG: service_impl.cpp:252: Event round 1 is different from the current 0 2025-05-29T15:24:07.451531Z node 3 :STATISTICS DEBUG: service_impl.cpp:379: Skip TEvDispatchKeepAlive 2025-05-29T15:24:07.461725Z node 1 :STATISTICS INFO: service_impl.cpp:416: Node 2 is unavailable 2025-05-29T15:24:07.461751Z node 1 :STATISTICS DEBUG: service_impl.cpp:502: Send aggregate statistics response to node: 1 2025-05-29T15:24:07.461774Z node 2 :STATISTICS DEBUG: service_impl.cpp:401: Skip TEvKeepAliveTimeout 2025-05-29T15:24:07.461790Z node 1 :STATISTICS DEBUG: service_impl.cpp:252: Event round 1 is different from the current 0 2025-05-29T15:24:07.461793Z node 1 :STATISTICS DEBUG: service_impl.cpp:393: Skip TEvKeepAliveTimeout 2025-05-29T15:24:07.461818Z node 1 :STATISTICS DEBUG: service_impl.cpp:252: Event round 1 is different from the current 0 2025-05-29T15:24:07.461822Z node 1 :STATISTICS DEBUG: service_impl.cpp:379: Skip TEvDispatchKeepAlive 2025-05-29T15:24:07.461831Z node 1 :STATISTICS DEBUG: service_impl.cpp:252: Event round 1 is different from the current 0 2025-05-29T15:24:07.461835Z node 1 :STATISTICS DEBUG: service_impl.cpp:428: Skip TEvAggregateKeepAlive |64.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_rtmr/unittest |64.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_rtmr/unittest |64.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_rtmr/unittest |64.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_rtmr/unittest |64.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_rtmr/unittest |64.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_rtmr/unittest |64.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_rtmr/unittest >> TRtmrTest::CreateWithoutTimeCastBuckets [GOOD] |64.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/data/unittest |64.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/data/unittest |64.4%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/kqp/ut/join/ydb-core-kqp-ut-join |64.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/ut/join/ydb-core-kqp-ut-join |64.4%| [LD] {RESULT} $(B)/ydb/core/kqp/ut/join/ydb-core-kqp-ut-join |64.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/data/unittest |64.4%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/replication/ydb_proxy/ut/ydb-core-tx-replication-ydb_proxy-ut |64.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/replication/ydb_proxy/ut/ydb-core-tx-replication-ydb_proxy-ut |64.5%| [LD] {RESULT} $(B)/ydb/core/tx/replication/ydb_proxy/ut/ydb-core-tx-replication-ydb_proxy-ut |64.5%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/schemeshard/ut_bsvolume/ydb-core-tx-schemeshard-ut_bsvolume |64.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_bsvolume/ydb-core-tx-schemeshard-ut_bsvolume |64.5%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_bsvolume/ydb-core-tx-schemeshard-ut_bsvolume |64.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/data/unittest |64.5%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/cms/console/ut/ydb-core-cms-console-ut >> DataShardReadIterator::ShouldReadNonExistingKey [FAIL] >> DataShardReadIterator::ShouldReadNotExistingRange >> DataShardReadIterator::ShouldFailUknownColumns [FAIL] >> DataShardReadIterator::ShouldFailWrongSchema |64.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/cms/console/ut/ydb-core-cms-console-ut |64.5%| [LD] {RESULT} $(B)/ydb/core/cms/console/ut/ydb-core-cms-console-ut >> KqpPg::NoTableQuery+useSink [GOOD] >> KqpPg::NoTableQuery-useSink |64.5%| [TA] $(B)/ydb/core/statistics/service/ut/ut_aggregation/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_rtmr/unittest >> TRtmrTest::CreateWithoutTimeCastBuckets [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:116:2058] recipient: [1:111:2141] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:116:2058] recipient: [1:111:2141] Leader for TabletID 72057594046678944 is [1:128:2152] sender: [1:132:2058] recipient: [1:111:2141] 2025-05-29T15:24:07.739191Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7488: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-05-29T15:24:07.739220Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7516: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:24:07.739226Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7402: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-05-29T15:24:07.739233Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7418: OperationsProcessing config: using default configuration 2025-05-29T15:24:07.739238Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-05-29T15:24:07.739243Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-05-29T15:24:07.739252Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7548: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:24:07.739267Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:39: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-05-29T15:24:07.739381Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7619: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-05-29T15:24:07.739453Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-05-29T15:24:07.752241Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7309: Cannot subscribe to console configs 2025-05-29T15:24:07.752264Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:24:07.754391Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-05-29T15:24:07.754489Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-05-29T15:24:07.754521Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-05-29T15:24:07.755935Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-05-29T15:24:07.756072Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:445: Clear TempDirsState with owners number: 0 2025-05-29T15:24:07.756177Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1348: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-05-29T15:24:07.756245Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:32: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-05-29T15:24:07.756656Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:157: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:24:07.756696Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:84: [RootDataErasureManager] Stop 2025-05-29T15:24:07.756996Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:24:07.757010Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:24:07.757037Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-05-29T15:24:07.757046Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-29T15:24:07.757052Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:202: TTxServerlessStorageBilling.Complete 2025-05-29T15:24:07.757087Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6671: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-05-29T15:24:07.758289Z node 1 :HIVE INFO: tablet_helpers.cpp:1130: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:128:2152] sender: [1:242:2058] recipient: [1:15:2062] 2025-05-29T15:24:07.779409Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:372: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-05-29T15:24:07.779495Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:24:07.779569Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-05-29T15:24:07.779626Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:130: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-05-29T15:24:07.779635Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:24:07.780545Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:451: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-05-29T15:24:07.780583Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-05-29T15:24:07.780662Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:24:07.780675Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:313: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-05-29T15:24:07.780682Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:367: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-05-29T15:24:07.780688Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 2 -> 3 2025-05-29T15:24:07.781214Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:24:07.781226Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-05-29T15:24:07.781233Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 3 -> 128 2025-05-29T15:24:07.781606Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:24:07.781619Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:24:07.781626Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:24:07.781634Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1650: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-05-29T15:24:07.782364Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1719: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-05-29T15:24:07.782809Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:652: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-05-29T15:24:07.782860Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1751: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-05-29T15:24:07.783080Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:676: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-05-29T15:24:07.783107Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:680: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 124 RawX2: 4294969445 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-05-29T15:24:07.783115Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:24:07.783180Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 128 -> 240 2025-05-29T15:24:07.783188Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:24:07.783224Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-05-29T15:24:07.783238Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:402: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-05-29T15:24:07.783704Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:24:07.783716Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-29T15:24:07.783770Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29 ... :24:07.791277Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_create_rtmr.cpp:162: TCreateRTMR TPropose, operationId: 100:0 ProgressState, at schemeshard: 72057594046678944 2025-05-29T15:24:07.791288Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1650: TOperation IsReadyToPropose , TxId: 100 ready parts: 1/1 2025-05-29T15:24:07.791322Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1719: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 100 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-05-29T15:24:07.791680Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:652: Send tablet strongly msg operationId: 100:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:100 msg type: 269090816 2025-05-29T15:24:07.791717Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1751: TOperation RegisterRelationByTabletId, TxId: 100, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 100 at step: 5000002 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000001 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 100 at step: 5000002 2025-05-29T15:24:07.791797Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:676: TTxOperationPlanStep Execute, stepId: 5000002, transactions count in step: 1, at schemeshard: 72057594046678944 2025-05-29T15:24:07.791815Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:680: TTxOperationPlanStep Execute, message: Transactions { TxId: 100 Coordinator: 72057594046316545 AckTo { RawX1: 124 RawX2: 4294969445 } } Step: 5000002 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-05-29T15:24:07.791824Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_create_rtmr.cpp:130: TCreateRTMR TPropose, operationId: 100:0 HandleReply TEvOperationPlan, at schemeshard: 72057594046678944 2025-05-29T15:24:07.791844Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 100:0 128 -> 240 2025-05-29T15:24:07.791872Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-05-29T15:24:07.791882Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 FAKE_COORDINATOR: Erasing txId 100 2025-05-29T15:24:07.792295Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:24:07.792304Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 100, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-29T15:24:07.792344Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 100, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2025-05-29T15:24:07.792363Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:24:07.792369Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:209:2210], at schemeshard: 72057594046678944, txId: 100, path id: 1 2025-05-29T15:24:07.792375Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:209:2210], at schemeshard: 72057594046678944, txId: 100, path id: 2 2025-05-29T15:24:07.792440Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 100:0, at schemeshard: 72057594046678944 2025-05-29T15:24:07.792448Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:482: [72057594046678944] TDone opId# 100:0 ProgressState 2025-05-29T15:24:07.792461Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:906: Part operation is done id#100:0 progress is 1/1 2025-05-29T15:24:07.792465Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 100 ready parts: 1/1 2025-05-29T15:24:07.792471Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:906: Part operation is done id#100:0 progress is 1/1 2025-05-29T15:24:07.792474Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 100 ready parts: 1/1 2025-05-29T15:24:07.792479Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1606: TOperation IsReadyToNotify, TxId: 100, ready parts: 1/1, is published: false 2025-05-29T15:24:07.792488Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 100 ready parts: 1/1 2025-05-29T15:24:07.792493Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:973: Operation and all the parts is done, operation id: 100:0 2025-05-29T15:24:07.792497Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5174: RemoveTx for txid 100:0 2025-05-29T15:24:07.792509Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2025-05-29T15:24:07.792516Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:982: Publication still in progress, tx: 100, publications: 2, subscribers: 0 2025-05-29T15:24:07.792521Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:989: Publication details: tx: 100, [OwnerId: 72057594046678944, LocalPathId: 1], 5 2025-05-29T15:24:07.792524Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:989: Publication details: tx: 100, [OwnerId: 72057594046678944, LocalPathId: 2], 2 2025-05-29T15:24:07.792649Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:5834: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 5 PathOwnerId: 72057594046678944, cookie: 100 2025-05-29T15:24:07.792663Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 5 PathOwnerId: 72057594046678944, cookie: 100 2025-05-29T15:24:07.792668Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 100 2025-05-29T15:24:07.792673Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 100, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 5 2025-05-29T15:24:07.792678Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-05-29T15:24:07.792822Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:5834: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 2 PathOwnerId: 72057594046678944, cookie: 100 2025-05-29T15:24:07.792836Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 2 PathOwnerId: 72057594046678944, cookie: 100 2025-05-29T15:24:07.792841Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 100 2025-05-29T15:24:07.792846Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 100, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 2 2025-05-29T15:24:07.792850Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2025-05-29T15:24:07.792861Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 100, subscribers: 0 2025-05-29T15:24:07.793367Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 100 2025-05-29T15:24:07.793621Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 100 TestModificationResult got TxId: 100, wait until txId: 100 TestWaitNotification wait txId: 100 2025-05-29T15:24:07.793676Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:210: tests -- TTxNotificationSubscriber for txId 100: send EvNotifyTxCompletion 2025-05-29T15:24:07.793683Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:256: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 100 2025-05-29T15:24:07.793756Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 100, at schemeshard: 72057594046678944 2025-05-29T15:24:07.793773Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:227: tests -- TTxNotificationSubscriber for txId 100: got EvNotifyTxCompletionResult 2025-05-29T15:24:07.793778Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:236: tests -- TTxNotificationSubscriber for txId 100: satisfy waiter [1:314:2304] TestWaitNotification: OK eventTxId 100 2025-05-29T15:24:07.793862Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/rtmr1" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-05-29T15:24:07.793896Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/rtmr1" took 41us result status StatusSuccess 2025-05-29T15:24:07.793974Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/rtmr1" PathDescription { Self { Name: "rtmr1" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeRtmrVolume CreateFinished: true CreateTxId: 100 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 RTMRVersion: 1 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } RtmrVolumeDescription { Name: "rtmr1" PathId: 2 PartitionsCount: 0 } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> DataShardReadIterator::TryWriteManyRows+Commit [FAIL] >> DataShardReadIterator::TryWriteManyRows-Commit >> TExportToS3WithRebootsTests::ForgetShouldSucceedOnSingleShardTable [GOOD] >> TExportToS3WithRebootsTests::ForgetShouldSucceedOnOnSingleTopic >> DataShardReadIteratorConsistency::BrokenWriteLockBeforeIteration [FAIL] >> DataShardReadIteratorConsistency::BrokenWriteLockDuringIteration >> KqpPg::CreateTableBulkUpsertAndRead |64.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/data/unittest |64.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/data/unittest >> AsyncIndexChangeCollector::InsertSingleRow >> KqpPg::NoTableQuery-useSink [GOOD] >> KqpPg::PgCreateTable >> EraseRowsTests::ConditionalEraseRowsShouldEraseOnDyNumberMicroSeconds >> DataShardReadIterator::ShouldReturnBrokenLockWhenWriteInSeparateTransactions-EvWrite [FAIL] >> DataShardReadIterator::ShouldReturnBrokenLockWhenReadRangeInvisibleRowSkips-EvWrite >> CdcStreamChangeCollector::InsertSingleRow >> DataShardReadIterator::ShouldReadRangeOneByOne [FAIL] >> DataShardReadIterator::ShouldReadRangePrefix1 >> KqpUserConstraint::KqpReadNull-UploadNull |64.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/data/unittest >> AsyncIndexChangeCollector::DeleteNothing |64.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/data/unittest |64.5%| [TA] $(B)/ydb/core/tx/schemeshard/ut_rtmr/test-results/unittest/{meta.json ... results_accumulator.log} >> KqpPg::InsertNoTargetColumns_Simple+useSink >> DataShardReadIterator::ShouldLimitRead10RangesChunk99Limit101 [FAIL] >> DataShardReadIterator::ShouldLimitRead10RangesChunk99Limit198 >> CdcStreamChangeCollector::UpsertToSameKey |64.6%| [TA] {RESULT} $(B)/ydb/core/statistics/service/ut/ut_aggregation/test-results/unittest/{meta.json ... results_accumulator.log} |64.6%| [TA] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_rtmr/test-results/unittest/{meta.json ... results_accumulator.log} >> KqpUserConstraint::KqpReadNull+UploadNull >> DataShardReadIterator::ShouldReturnBrokenLockWhenReadKeyPrefixLeftBorder+EvWrite [FAIL] >> DataShardReadIterator::ShouldReturnBrokenLockWhenReadKeyPrefixLeftBorder-EvWrite >> DataShardReadIterator::ShouldReadKeyPrefix2 [FAIL] >> DataShardReadIterator::ShouldReadKeyPrefix3 >> CdcStreamChangeCollector::UpsertManyRows >> DataShardReadIterator::ShouldReadNotExistingRange [FAIL] >> DataShardReadIterator::ShouldReadRangeChunk1_100 >> TExportToS3WithRebootsTests::ForgetShouldSucceedOnSingleShardTableWithChangefeed [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/columnshard/ut_rw/unittest >> TColumnShardTestReadWrite::CompactionSplitGranule_PKInt64 [GOOD] Test command err: 2025-05-29T15:21:37.589465Z node 1 :TX_COLUMNSHARD INFO: log.cpp:784: tablet_id=9437184;self_id=[1:139:2170];fline=columnshard.cpp:102;event=initialize_shard;step=OnActivateExecutor; 2025-05-29T15:21:37.593455Z node 1 :TX_COLUMNSHARD INFO: log.cpp:784: tablet_id=9437184;self_id=[1:139:2170];fline=columnshard.cpp:120;event=initialize_shard;step=initialize_tiring_finished; 2025-05-29T15:21:37.593546Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Execute at tablet 9437184 2025-05-29T15:21:37.594199Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=9437184;self_id=[1:139:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-05-29T15:21:37.594241Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=9437184;self_id=[1:139:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-05-29T15:21:37.594273Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=9437184;self_id=[1:139:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-05-29T15:21:37.594287Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=9437184;self_id=[1:139:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-05-29T15:21:37.594300Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=9437184;self_id=[1:139:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-05-29T15:21:37.594317Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=9437184;self_id=[1:139:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-05-29T15:21:37.594328Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=9437184;self_id=[1:139:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-05-29T15:21:37.594340Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=9437184;self_id=[1:139:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-05-29T15:21:37.594352Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=9437184;self_id=[1:139:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-05-29T15:21:37.594365Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=9437184;self_id=[1:139:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-05-29T15:21:37.594381Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=9437184;self_id=[1:139:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2025-05-29T15:21:37.594396Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=9437184;self_id=[1:139:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2025-05-29T15:21:37.602821Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Complete at tablet 9437184 2025-05-29T15:21:37.602912Z node 1 :TX_COLUMNSHARD INFO: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:137;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2025-05-29T15:21:37.602925Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-05-29T15:21:37.602960Z node 1 :TX_COLUMNSHARD INFO: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-05-29T15:21:37.603000Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-05-29T15:21:37.603014Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-05-29T15:21:37.603020Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-05-29T15:21:37.603031Z node 1 :TX_COLUMNSHARD INFO: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2025-05-29T15:21:37.603040Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-05-29T15:21:37.603048Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-05-29T15:21:37.603053Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-05-29T15:21:37.603073Z node 1 :TX_COLUMNSHARD INFO: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-05-29T15:21:37.603081Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-05-29T15:21:37.603089Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-05-29T15:21:37.603094Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-05-29T15:21:37.603105Z node 1 :TX_COLUMNSHARD INFO: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-05-29T15:21:37.603112Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-05-29T15:21:37.603120Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-05-29T15:21:37.603125Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=8;type=CleanInsertionDedup; 2025-05-29T15:21:37.603138Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-05-29T15:21:37.603157Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-05-29T15:21:37.603162Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-05-29T15:21:37.603172Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-05-29T15:21:37.603182Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-05-29T15:21:37.603187Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-05-29T15:21:37.603215Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-05-29T15:21:37.603224Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-05-29T15:21:37.603229Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2025-05-29T15:21:37.603251Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-05-29T15:21:37.603260Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-05-29T15:21:37.603264Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2025-05-29T15:21:37.603278Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-05-29T15:21:37.603286Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanDeprecatedSnapshot;id=CleanDeprecatedSnapshot; 2025-05-29T15:21:37.603290Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=17;type=CleanDeprecatedSnapshot; 2025-05-29T15:21:37.603299Z node 1 :TX_COLUMNSHARD CRIT: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_deprecated_snapshot.cpp:30;tasks_for_rewrite=0; 2025-05-29T15:21:37.603308Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanDeprecatedSnapshot;id=17; 2025-05-29T15:21:37.603316Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV0ChunksMeta;id=RestoreV0ChunksMeta; 2025-05-29T15:21:37.603321Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=18;type=RestoreV0ChunksMeta; 2025-05-29T15:21:37.603401Z node 1 :TX_COLUMNSHARD INFO: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=10; 2025-05-29T15:21:37.603412Z node 1 :TX_COLUMNSHARD INFO: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=7; 2025-05-29T15:21:37.603420Z node 1 :TX_COLUMNSHARD INFO: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute ... et_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;fline=common_data.cpp:29;EXECUTE:granulesLoadingTime=3415; 2025-05-29T15:24:06.787063Z node 1 :TX_COLUMNSHARD INFO: log.cpp:784: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;fline=common_data.cpp:29;PRECHARGE:finishLoadingTime=1; 2025-05-29T15:24:06.787293Z node 1 :TX_COLUMNSHARD INFO: log.cpp:784: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;fline=common_data.cpp:29;EXECUTE:finishLoadingTime=226; 2025-05-29T15:24:06.787297Z node 1 :TX_COLUMNSHARD INFO: log.cpp:784: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:column_enginesLoadingTime=3701; 2025-05-29T15:24:06.787314Z node 1 :TX_COLUMNSHARD INFO: log.cpp:784: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:insert_tableLoadingTime=13; 2025-05-29T15:24:06.787417Z node 1 :TX_COLUMNSHARD INFO: log.cpp:784: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:composite_init/insert_table;fline=common_data.cpp:29;InsertTableLoadingTime=13; 2025-05-29T15:24:06.787426Z node 1 :TX_COLUMNSHARD INFO: log.cpp:784: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:insert_tableLoadingTime=109; 2025-05-29T15:24:06.787440Z node 1 :TX_COLUMNSHARD INFO: log.cpp:784: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:tx_controllerLoadingTime=10; 2025-05-29T15:24:06.787451Z node 1 :TX_COLUMNSHARD INFO: log.cpp:784: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:tx_controllerLoadingTime=8; 2025-05-29T15:24:06.787469Z node 1 :TX_COLUMNSHARD INFO: log.cpp:784: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:operations_managerLoadingTime=14; 2025-05-29T15:24:06.787481Z node 1 :TX_COLUMNSHARD INFO: log.cpp:784: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:operations_managerLoadingTime=7; 2025-05-29T15:24:06.789870Z node 1 :TX_COLUMNSHARD INFO: log.cpp:784: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:storages_managerLoadingTime=2381; 2025-05-29T15:24:06.792998Z node 1 :TX_COLUMNSHARD INFO: log.cpp:784: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:storages_managerLoadingTime=3104; 2025-05-29T15:24:06.793027Z node 1 :TX_COLUMNSHARD INFO: log.cpp:784: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:long_txLoadingTime=7; 2025-05-29T15:24:06.793033Z node 1 :TX_COLUMNSHARD INFO: log.cpp:784: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:long_txLoadingTime=2; 2025-05-29T15:24:06.793038Z node 1 :TX_COLUMNSHARD INFO: log.cpp:784: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:db_locksLoadingTime=1; 2025-05-29T15:24:06.793045Z node 1 :TX_COLUMNSHARD INFO: log.cpp:784: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:db_locksLoadingTime=1; 2025-05-29T15:24:06.793050Z node 1 :TX_COLUMNSHARD INFO: log.cpp:784: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:bg_sessionsLoadingTime=1; 2025-05-29T15:24:06.793062Z node 1 :TX_COLUMNSHARD INFO: log.cpp:784: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:bg_sessionsLoadingTime=8; 2025-05-29T15:24:06.793067Z node 1 :TX_COLUMNSHARD INFO: log.cpp:784: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:sharing_sessionsLoadingTime=0; 2025-05-29T15:24:06.793080Z node 1 :TX_COLUMNSHARD INFO: log.cpp:784: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:sharing_sessionsLoadingTime=9; 2025-05-29T15:24:06.793085Z node 1 :TX_COLUMNSHARD INFO: log.cpp:784: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:in_flight_readsLoadingTime=0; 2025-05-29T15:24:06.793094Z node 1 :TX_COLUMNSHARD INFO: log.cpp:784: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:in_flight_readsLoadingTime=6; 2025-05-29T15:24:06.793106Z node 1 :TX_COLUMNSHARD INFO: log.cpp:784: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:tiers_managerLoadingTime=8; 2025-05-29T15:24:06.793118Z node 1 :TX_COLUMNSHARD INFO: log.cpp:784: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:tiers_managerLoadingTime=8; 2025-05-29T15:24:06.793123Z node 1 :TX_COLUMNSHARD INFO: log.cpp:784: tablet_id=9437184;event=initialize_shard;fline=common_data.cpp:29;EXECUTE:composite_initLoadingTime=10399; 2025-05-29T15:24:06.793174Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: Index: tables 1 inserted {blob_bytes=0;raw_bytes=0;count=0;records=0} compacted {blob_bytes=0;raw_bytes=0;count=0;records=0} s-compacted {blob_bytes=104296632;raw_bytes=191125560;count=2;records=1845000} inactive {blob_bytes=1252684680;raw_bytes=2245386718;count=44;records=22220000} evicted {blob_bytes=0;raw_bytes=0;count=0;records=0} at tablet 9437184 2025-05-29T15:24:06.793216Z node 1 :TX_COLUMNSHARD INFO: log.cpp:784: tablet_id=9437184;self_id=[1:12665:14595];process=SwitchToWork;fline=columnshard.cpp:77;event=initialize_shard;step=SwitchToWork; 2025-05-29T15:24:06.793228Z node 1 :TX_COLUMNSHARD INFO: log.cpp:784: tablet_id=9437184;self_id=[1:12665:14595];process=SwitchToWork;fline=columnshard.cpp:80;event=initialize_shard;step=SignalTabletActive; 2025-05-29T15:24:06.793244Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:784: tablet_id=9437184;self_id=[1:12665:14595];process=SwitchToWork;fline=columnshard_impl.cpp:1614;event=OnTieringModified;path_id=NO_VALUE_OPTIONAL; 2025-05-29T15:24:06.793251Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:784: tablet_id=9437184;self_id=[1:12665:14595];process=SwitchToWork;fline=column_engine_logs.cpp:493;event=OnTieringModified;new_count_tierings=0; 2025-05-29T15:24:06.793283Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:784: tablet_id=9437184;fline=columnshard_impl.cpp:515;event=EnqueueBackgroundActivities;periodic=0; 2025-05-29T15:24:06.793294Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:784: tablet_id=9437184;fline=columnshard_impl.cpp:784;event=start_indexation_tasks;insert_overload_size=0; 2025-05-29T15:24:06.793310Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:784: tablet_id=9437184;fline=column_engine_logs.cpp:246;event=StartCleanup;portions_count=22; 2025-05-29T15:24:06.793322Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:784: tablet_id=9437184;fline=column_engine_logs.cpp:288;event=StartCleanupStop;snapshot=plan_step=1748531812301;tx_id=18446744073709551615;;current_snapshot_ts=1748532098706; 2025-05-29T15:24:06.793330Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:784: tablet_id=9437184;fline=column_engine_logs.cpp:321;event=StartCleanup;portions_count=22;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2025-05-29T15:24:06.793340Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:784: tablet_id=9437184;fline=columnshard_impl.cpp:1060;background=cleanup;skip_reason=no_changes; 2025-05-29T15:24:06.793344Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:784: tablet_id=9437184;fline=columnshard_impl.cpp:1092;background=cleanup;skip_reason=no_changes; 2025-05-29T15:24:06.793362Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:784: tablet_id=9437184;fline=columnshard_impl.cpp:1001;background=ttl;skip_reason=no_changes; 2025-05-29T15:24:06.795669Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:784: tablet_id=9437184;tx_state=TTxProgressTx::Complete;fline=columnshard_impl.cpp:784;event=start_indexation_tasks;insert_overload_size=0; 2025-05-29T15:24:06.795705Z node 1 :TX_COLUMNSHARD INFO: log.cpp:784: self_id=[1:12702:14624];tablet_id=9437184;parent=[1:12665:14595];fline=manager.cpp:85;event=ask_data;request=request_id=112;1={portions_count=46};; 2025-05-29T15:24:06.797743Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:784: tablet_id=9437184;self_id=[1:12665:14595];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:254;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=9437184; 2025-05-29T15:24:06.798626Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:784: tablet_id=9437184;self_id=[1:12665:14595];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;fline=columnshard.cpp:243;event=TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184; 2025-05-29T15:24:06.798636Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: Send periodic stats. 2025-05-29T15:24:06.798640Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: Disabled periodic stats at tablet 9437184 2025-05-29T15:24:06.798645Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:784: tablet_id=9437184;self_id=[1:12665:14595];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:515;event=EnqueueBackgroundActivities;periodic=0; 2025-05-29T15:24:06.798662Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:784: tablet_id=9437184;self_id=[1:12665:14595];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:784;event=start_indexation_tasks;insert_overload_size=0; 2025-05-29T15:24:06.798682Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:784: tablet_id=9437184;self_id=[1:12665:14595];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:246;event=StartCleanup;portions_count=22; 2025-05-29T15:24:06.798694Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:784: tablet_id=9437184;self_id=[1:12665:14595];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:288;event=StartCleanupStop;snapshot=plan_step=1748531812301;tx_id=18446744073709551615;;current_snapshot_ts=1748532098706; 2025-05-29T15:24:06.798702Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:784: tablet_id=9437184;self_id=[1:12665:14595];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:321;event=StartCleanup;portions_count=22;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2025-05-29T15:24:06.798713Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:784: tablet_id=9437184;self_id=[1:12665:14595];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:1060;background=cleanup;skip_reason=no_changes; 2025-05-29T15:24:06.798716Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:784: tablet_id=9437184;self_id=[1:12665:14595];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:1092;background=cleanup;skip_reason=no_changes; 2025-05-29T15:24:06.798756Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:784: tablet_id=9437184;self_id=[1:12665:14595];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;queue=ttl;external_count=0;fline=granule.cpp:164;event=skip_actualization;waiting=0.999000s; 2025-05-29T15:24:06.798766Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:784: tablet_id=9437184;self_id=[1:12665:14595];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:1001;background=ttl;skip_reason=no_changes; >> DataShardReadIterator::ShouldFailWrongSchema [FAIL] >> DataShardReadIterator::ShouldFailReadNextAfterSchemeChange >> DataShardReadIterator::TryWriteManyRows-Commit [FAIL] >> DataShardReadIteratorBatchMode::RangeFull >> AsyncIndexChangeCollector::CoveredIndexUpdateCoveredColumn >> DataShardReadIteratorConsistency::BrokenWriteLockDuringIteration [FAIL] >> DataShardReadIteratorConsistency::WriteLockThenUncommittedReadUpgradeRetryAndRestart >> TGroupMapperTest::MonteCarlo [GOOD] >> DataShardReadIteratorSysTables::ShouldRead >> DistributedEraseTests::DistributedEraseTxShouldFailOnVariousErrors >> AsyncIndexChangeCollector::UpsertToSameKey >> AsyncIndexChangeCollector::UpsertSingleRow >> KqpPg::InsertFromSelect_Simple+useSink [FAIL] >> KqpPg::InsertFromSelect_Simple-useSink >> KqpPg::JoinWithQueryService+StreamLookup [FAIL] >> KqpPg::Insert_Serial+useSink >> KqpPg::CreateTableSerialColumns+useSink [FAIL] >> KqpPg::CreateTableSerialColumns-useSink >> DataShardReadIterator::ShouldReadRangePrefix1 [FAIL] >> DataShardReadIterator::ShouldReadRangePrefix2 >> DataShardReadIterator::ShouldReturnBrokenLockWhenReadRangeInvisibleRowSkips-EvWrite [FAIL] >> DataShardReadIterator::ShouldLimitRead10RangesChunk99Limit198 [FAIL] >> DataShardReadIterator::ShouldLimitRead10RangesChunk99Limit900 >> DataShardReadIterator::ShouldReturnBrokenLockWhenReadRangeInvisibleRowSkips2+EvWrite >> KqpUserConstraint::KqpReadNull-UploadNull [GOOD] >> AsyncIndexChangeCollector::DeleteNothing [FAIL] >> AsyncIndexChangeCollector::DeleteSingleRow >> KqpPg::EmptyQuery+useSink [FAIL] >> KqpPg::EmptyQuery-useSink >> KqpPg::TableArrayInsert+useSink [FAIL] >> KqpPg::TableArrayInsert-useSink >> EraseRowsTests::ConditionalEraseRowsShouldEraseOnDyNumberMicroSeconds [FAIL] >> EraseRowsTests::ConditionalEraseRowsShouldEraseOnDate32 |64.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/mind/bscontroller/ut/unittest >> TGroupMapperTest::MonteCarlo [GOOD] >> KqpUserConstraint::KqpReadNull+UploadNull [GOOD] >> AsyncIndexChangeCollector::InsertSingleRow [FAIL] >> AsyncIndexChangeCollector::InsertManyRows >> CdcStreamChangeCollector::InsertSingleRow [FAIL] >> CdcStreamChangeCollector::InsertSingleUuidRow >> EraseRowsTests::ConditionalEraseRowsShouldEraseOnUint32 >> KqpPg::TypeCoercionInsert-useSink [FAIL] >> KqpPg::V1CreateTable >> DataShardReadIterator::ShouldReturnBrokenLockWhenReadKeyPrefixLeftBorder-EvWrite [FAIL] >> DataShardReadIterator::ShouldReturnBrokenLockWhenReadKeyPrefixRightBorder+EvWrite >> DataShardReadIterator::ShouldReadKeyPrefix3 [FAIL] >> DataShardReadIterator::ShouldReadHeadFromFollower >> TExportToS3WithRebootsTests::ForgetShouldSucceedOnViewsAndTables [GOOD] >> DataShardReadIterator::ShouldFailReadNextAfterSchemeChange [FAIL] >> CdcStreamChangeCollector::UpsertToSameKey [FAIL] >> CdcStreamChangeCollector::UpsertToSameKeyWithImages >> DataShardReadIterator::ShouldFailReadNextAfterSchemeChangeExhausted >> DataShardReadIteratorBatchMode::RangeFull [FAIL] >> DataShardReadIteratorBatchMode::RangeFromInclusive >> KqpPg::TypeCoercionBulkUpsert [GOOD] >> KqpPg::TypeCoercionInsert+useSink >> DataShardReadIterator::ShouldReadRangeChunk1_100 [FAIL] >> DataShardReadIterator::ShouldReadRangeChunk1 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/data/unittest >> KqpUserConstraint::KqpReadNull-UploadNull [GOOD] Test command err: 2025-05-29T15:24:09.244384Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:102:2148], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-05-29T15:24:09.244416Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-05-29T15:24:09.244426Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/002457/r3tmp/tmpUmOGQG/pdisk_1.dat 2025-05-29T15:24:09.345832Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-05-29T15:24:09.359374Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:24:09.362361Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [1:32:2079] 1748532248889923 != 1748532248889927 2025-05-29T15:24:09.404147Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:24:09.404175Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:24:09.414758Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-29T15:24:09.488474Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-05-29T15:24:09.741941Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:854:2701], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:24:09.741966Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:865:2706], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:24:09.741976Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:24:09.742727Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2025-05-29T15:24:09.878855Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:868:2709], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-05-29T15:24:09.913971Z node 1 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [1:938:2748] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-29T15:24:09.961687Z node 1 :KQP_EXECUTER ERROR: kqp_planner.cpp:119: TxId: 281474976715660. Ctx: { TraceId: 01jwea9h4dcm6k0yb2ta6875ae, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=N2U0NmI3YWYtOGQ5ZGJmZTUtZTk3NzZkNzUtMzEzMWJkOGY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/data/unittest >> KqpUserConstraint::KqpReadNull+UploadNull [GOOD] Test command err: 2025-05-29T15:24:09.403726Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:102:2148], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-05-29T15:24:09.403758Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-05-29T15:24:09.403768Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/00244a/r3tmp/tmplLRjro/pdisk_1.dat 2025-05-29T15:24:09.509845Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-05-29T15:24:09.523332Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:24:09.526444Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [1:32:2079] 1748532248994581 != 1748532248994585 2025-05-29T15:24:09.568205Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:24:09.568246Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:24:09.578836Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-29T15:24:09.651737Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-05-29T15:24:09.903823Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:854:2701], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:24:09.903855Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:865:2706], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:24:09.903869Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:24:09.904971Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2025-05-29T15:24:10.042100Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:868:2709], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-05-29T15:24:10.077052Z node 1 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [1:938:2748] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-29T15:24:10.125879Z node 1 :KQP_EXECUTER ERROR: kqp_planner.cpp:119: TxId: 281474976715660. Ctx: { TraceId: 01jwea9h9f9gcb4qa43dbxkzqw, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MWVmZDAwYzUtZWFlYTBjZDMtOTI5ODc5ZmUtMTg2M2VkNDg=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-05-29T15:24:10.127326Z node 1 :KQP_COMPUTE ERROR: dq_compute_actor_impl.h:1546: SelfId: [1:969:2769], TxId: 281474976715660, task: 1. Ctx: { TraceId : 01jwea9h9f9gcb4qa43dbxkzqw. SessionId : ydb://session/3?node_id=1&id=MWVmZDAwYzUtZWFlYTBjZDMtOTI5ODc5ZmUtMTg2M2VkNDg=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. Source[0] fatal error: {
: Fatal: Read from column index 1: got NULL from NOT NULL column, code: 2012 } 2025-05-29T15:24:10.127787Z node 1 :KQP_COMPUTE ERROR: dq_compute_actor_impl.h:678: SelfId: [1:969:2769], TxId: 281474976715660, task: 1. Ctx: { TraceId : 01jwea9h9f9gcb4qa43dbxkzqw. SessionId : ydb://session/3?node_id=1&id=MWVmZDAwYzUtZWFlYTBjZDMtOTI5ODc5ZmUtMTg2M2VkNDg=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. InternalError: INTERNAL_ERROR KIKIMR_CONSTRAINT_VIOLATION: {
: Fatal: Read from column index 1: got NULL from NOT NULL column, code: 2012 }. 2025-05-29T15:24:10.128413Z node 1 :KQP_COMPUTE ERROR: dq_compute_actor_impl.h:678: SelfId: [1:970:2770], TxId: 281474976715660, task: 2. Ctx: { SessionId : ydb://session/3?node_id=1&id=MWVmZDAwYzUtZWFlYTBjZDMtOTI5ODc5ZmUtMTg2M2VkNDg=. TraceId : 01jwea9h9f9gcb4qa43dbxkzqw. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. InternalError: INTERNAL_ERROR DEFAULT_ERROR: {
: Error: Terminate execution }. 2025-05-29T15:24:10.129303Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2583: SessionId: ydb://session/3?node_id=1&id=MWVmZDAwYzUtZWFlYTBjZDMtOTI5ODc5ZmUtMTg2M2VkNDg=, ActorId: [1:852:2699], ActorState: ExecuteState, TraceId: 01jwea9h9f9gcb4qa43dbxkzqw, Create QueryResponse for error on request, msg: 2025-05-29T15:24:10.129746Z node 1 :KQP_EXECUTER ERROR: kqp_planner.cpp:119: TxId: 281474976715661. Ctx: { TraceId: 01jwea9h9f9gcb4qa43dbxkzqw, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MWVmZDAwYzUtZWFlYTBjZDMtOTI5ODc5ZmUtMTg2M2VkNDg=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root >> KqpPg::Insert_Serial+useSink [FAIL] >> KqpPg::Insert_Serial-useSink >> KqpPg::CreateTableSerialColumns-useSink [FAIL] >> KqpPg::DropIndex ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_export_reboots_s3/unittest >> TExportToS3WithRebootsTests::ForgetShouldSucceedOnSingleShardTableWithChangefeed [GOOD] Test command err: ==== RunWithTabletReboots =========== RUN: Trace =========== Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2140] Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:116:2058] recipient: [1:110:2141] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:116:2058] recipient: [1:110:2141] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:117:2058] recipient: [1:111:2142] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:117:2058] recipient: [1:111:2142] Leader for TabletID 72057594046678944 is [1:126:2151] sender: [1:127:2058] recipient: [1:109:2140] Leader for TabletID 72057594046447617 is [1:130:2154] sender: [1:132:2058] recipient: [1:110:2141] Leader for TabletID 72057594046316545 is [1:133:2155] sender: [1:135:2058] recipient: [1:111:2142] 2025-05-29T15:23:30.977712Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7488: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-05-29T15:23:30.977740Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7516: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:23:30.977746Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7402: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2025-05-29T15:23:30.977752Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7418: OperationsProcessing config: using default configuration 2025-05-29T15:23:30.977766Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-05-29T15:23:30.977770Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-05-29T15:23:30.977780Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7548: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:23:30.977796Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:39: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-05-29T15:23:30.977916Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7619: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-05-29T15:23:30.978000Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-05-29T15:23:30.994451Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7651: Got new config: QueryServiceConfig { AllExternalDataSourcesAreAvailable: true } 2025-05-29T15:23:30.994478Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:23:30.994581Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7619: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# Leader for TabletID 72057594046447617 is [1:130:2154] sender: [1:175:2058] recipient: [1:15:2062] 2025-05-29T15:23:30.998372Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-05-29T15:23:30.998425Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-05-29T15:23:30.998472Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-05-29T15:23:31.001460Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-05-29T15:23:31.001549Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:445: Clear TempDirsState with owners number: 0 2025-05-29T15:23:31.001673Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1348: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-05-29T15:23:31.001834Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:32: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-05-29T15:23:31.002403Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:157: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:23:31.002450Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:84: [RootDataErasureManager] Stop 2025-05-29T15:23:31.002684Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:23:31.002693Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:23:31.002727Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-05-29T15:23:31.002796Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-29T15:23:31.002806Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:202: TTxServerlessStorageBilling.Complete 2025-05-29T15:23:31.002829Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6671: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:214:2058] recipient: [1:212:2212] IGNORE Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:214:2058] recipient: [1:212:2212] Leader for TabletID 72057594037968897 is [1:218:2216] sender: [1:219:2058] recipient: [1:212:2212] 2025-05-29T15:23:31.004195Z node 1 :HIVE INFO: tablet_helpers.cpp:1130: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:126:2151] sender: [1:239:2058] recipient: [1:15:2062] 2025-05-29T15:23:31.032656Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:372: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-05-29T15:23:31.032742Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:23:31.032811Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-05-29T15:23:31.032864Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:130: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-05-29T15:23:31.032876Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:23:31.035323Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:451: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-05-29T15:23:31.035381Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-05-29T15:23:31.035459Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:23:31.035476Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:313: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-05-29T15:23:31.035484Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:367: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-05-29T15:23:31.035490Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 2 -> 3 2025-05-29T15:23:31.041139Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:23:31.041171Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-05-29T15:23:31.041181Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 3 -> 128 2025-05-29T15:23:31.041748Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:23:31.041760Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:23:31.041766Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:23:31.041775Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1650: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-05-29T15:23:31.042629Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1719: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-05-29T15:23:31.043272Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:652: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-05-29T15:23:31.043314Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1751: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 Leader for TabletID 72057594046316545 is [1:133:2155] sender: [1:254:2058] recipient: [1:15:2062] FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-05-29T15:23:31.043524Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:676: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-05-29T15:23:31.043553Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:680: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 133 RawX2: 4294969451 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-05-29T15:23:31.043563Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:23:31.043639Z node 1 :FLAT_TX_SCHEMESHARD INFO: sch ... OORDINATOR: advance: minStep5000009 State->FrontStep: 5000008 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710761 at step: 5000009 2025-05-29T15:24:09.145458Z node 122 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:676: TTxOperationPlanStep Execute, stepId: 5000009, transactions count in step: 1, at schemeshard: 72057594046678944 2025-05-29T15:24:09.145473Z node 122 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:680: TTxOperationPlanStep Execute, message: Transactions { TxId: 281474976710761 Coordinator: 72057594046316545 AckTo { RawX1: 134 RawX2: 523986012268 } } Step: 5000009 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-05-29T15:24:09.145478Z node 122 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_rmdir.cpp:129: TRmDir HandleReply TEvOperationPlan, opId: 281474976710761:0, step: 5000009, at schemeshard: 72057594046678944 2025-05-29T15:24:09.145498Z node 122 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_rmdir.cpp:180: RmDir is done, opId: 281474976710761:0, at schemeshard: 72057594046678944 2025-05-29T15:24:09.145505Z node 122 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:906: Part operation is done id#281474976710761:0 progress is 1/1 2025-05-29T15:24:09.145507Z node 122 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 281474976710761 ready parts: 1/1 2025-05-29T15:24:09.145511Z node 122 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:906: Part operation is done id#281474976710761:0 progress is 1/1 2025-05-29T15:24:09.145522Z node 122 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 281474976710761 ready parts: 1/1 2025-05-29T15:24:09.145530Z node 122 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2025-05-29T15:24:09.145537Z node 122 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 6] was 1 2025-05-29T15:24:09.145541Z node 122 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1606: TOperation IsReadyToNotify, TxId: 281474976710761, ready parts: 1/1, is published: false 2025-05-29T15:24:09.145545Z node 122 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 281474976710761 ready parts: 1/1 2025-05-29T15:24:09.145548Z node 122 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:973: Operation and all the parts is done, operation id: 281474976710761:0 2025-05-29T15:24:09.145551Z node 122 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5174: RemoveTx for txid 281474976710761:0 2025-05-29T15:24:09.145556Z node 122 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 6] was 2 2025-05-29T15:24:09.145562Z node 122 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:982: Publication still in progress, tx: 281474976710761, publications: 2, subscribers: 1 2025-05-29T15:24:09.145565Z node 122 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:989: Publication details: tx: 281474976710761, [OwnerId: 72057594046678944, LocalPathId: 1], 14 2025-05-29T15:24:09.145567Z node 122 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:989: Publication details: tx: 281474976710761, [OwnerId: 72057594046678944, LocalPathId: 6], 18446744073709551615 2025-05-29T15:24:09.146039Z node 122 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976710761 2025-05-29T15:24:09.146082Z node 122 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:174: Deleted shardIdx 72057594046678944:4 2025-05-29T15:24:09.146089Z node 122 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:180: Close pipe to deleted shardIdx 72057594046678944:4 tabletId 72075186233409549 2025-05-29T15:24:09.146098Z node 122 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976710761 2025-05-29T15:24:09.146263Z node 122 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:160: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 2025-05-29T15:24:09.146339Z node 122 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:24:09.146344Z node 122 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 281474976710761, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-29T15:24:09.146373Z node 122 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 281474976710761, path id: [OwnerId: 72057594046678944, LocalPathId: 6] 2025-05-29T15:24:09.146390Z node 122 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:24:09.146393Z node 122 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [122:209:2210], at schemeshard: 72057594046678944, txId: 281474976710761, path id: 1 2025-05-29T15:24:09.146396Z node 122 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [122:209:2210], at schemeshard: 72057594046678944, txId: 281474976710761, path id: 6 FAKE_COORDINATOR: Erasing txId 281474976710761 2025-05-29T15:24:09.146507Z node 122 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:5834: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 14 PathOwnerId: 72057594046678944, cookie: 281474976710761 2025-05-29T15:24:09.146515Z node 122 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 14 PathOwnerId: 72057594046678944, cookie: 281474976710761 2025-05-29T15:24:09.146518Z node 122 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 281474976710761 2025-05-29T15:24:09.146521Z node 122 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 281474976710761, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 14 2025-05-29T15:24:09.146527Z node 122 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 4 2025-05-29T15:24:09.146619Z node 122 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:5834: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 6 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 281474976710761 2025-05-29T15:24:09.146626Z node 122 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 6 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 281474976710761 2025-05-29T15:24:09.146628Z node 122 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 281474976710761 2025-05-29T15:24:09.146631Z node 122 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 281474976710761, pathId: [OwnerId: 72057594046678944, LocalPathId: 6], version: 18446744073709551615 2025-05-29T15:24:09.146633Z node 122 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 6] was 1 2025-05-29T15:24:09.146640Z node 122 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 281474976710761, subscribers: 1 2025-05-29T15:24:09.146643Z node 122 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:212: TTxAckPublishToSchemeBoard Notify send TEvNotifyTxCompletionResult, at schemeshard: 72057594046678944, to actorId: [122:126:2151] 2025-05-29T15:24:09.146655Z node 122 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:123: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-05-29T15:24:09.146658Z node 122 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:137: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 6], at schemeshard: 72057594046678944 2025-05-29T15:24:09.146663Z node 122 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2025-05-29T15:24:09.147059Z node 122 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976710761 2025-05-29T15:24:09.147281Z node 122 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976710761 2025-05-29T15:24:09.147296Z node 122 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6746: Handle: TEvNotifyTxCompletionResult: txId# 281474976710761 2025-05-29T15:24:09.147304Z node 122 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6748: Message: TxId: 281474976710761 2025-05-29T15:24:09.147309Z node 122 :EXPORT DEBUG: schemeshard_export__create.cpp:309: TExport::TTxProgress: DoExecute 2025-05-29T15:24:09.147313Z node 122 :EXPORT DEBUG: schemeshard_export__create.cpp:1232: TExport::TTxProgress: OnNotifyResult: txId# 281474976710761 2025-05-29T15:24:09.147316Z node 122 :EXPORT DEBUG: schemeshard_export__create.cpp:1263: TExport::TTxProgress: OnNotifyResult: txId# 281474976710761, id# 1004, itemIdx# 4294967295 2025-05-29T15:24:09.147352Z node 122 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:160: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 2025-05-29T15:24:09.147597Z node 122 :EXPORT DEBUG: schemeshard_export__create.cpp:329: TExport::TTxProgress: DoComplete TestWaitNotification wait txId: 1004 2025-05-29T15:24:09.147639Z node 122 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:210: tests -- TTxNotificationSubscriber for txId 1004: send EvNotifyTxCompletion 2025-05-29T15:24:09.147644Z node 122 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:256: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 1004 2025-05-29T15:24:09.147687Z node 122 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 1004, at schemeshard: 72057594046678944 2025-05-29T15:24:09.147698Z node 122 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:227: tests -- TTxNotificationSubscriber for txId 1004: got EvNotifyTxCompletionResult 2025-05-29T15:24:09.147701Z node 122 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:236: tests -- TTxNotificationSubscriber for txId 1004: satisfy waiter [122:976:2879] TestWaitNotification: OK eventTxId 1004 >> KqpPg::InsertFromSelect_Simple-useSink [FAIL] >> KqpPg::InsertFromSelect_NoReorder-useSink >> DataShardReadIteratorConsistency::WriteLockThenUncommittedReadUpgradeRetryAndRestart [FAIL] >> DataShardReadIteratorConsistency::WriteLockThenUncommittedReadUpgradeRestartWithStateMigrationRetryAndRestartWithoutStateMigration >> KqpPg::EmptyQuery-useSink [FAIL] >> KqpPg::DuplicatedColumns+useSink >> KqpPg::TableArrayInsert-useSink [FAIL] >> KqpPg::Returning+useSink >> DistributedEraseTests::DistributedEraseTxShouldFailOnVariousErrors [GOOD] >> EraseRowsTests::ConditionalEraseRowsShouldErase >> CdcStreamChangeCollector::UpsertManyRows [FAIL] >> CdcStreamChangeCollector::UpsertIntoTwoStreams >> AsyncIndexChangeCollector::CoveredIndexUpdateCoveredColumn [FAIL] >> AsyncIndexChangeCollector::CoveredIndexUpsert >> AsyncIndexChangeCollector::UpsertToSameKey [FAIL] >> AsyncIndexChangeCollector::UpsertWithoutIndexedValue >> KqpPg::V1CreateTable [FAIL] >> KqpPg::ValuesInsert+useSink >> DataShardReadIteratorSysTables::ShouldRead [FAIL] >> DataShardReadIteratorSysTables::ShouldNotReadUserTableUsingLocalTid >> AsyncIndexChangeCollector::UpsertSingleRow [FAIL] >> AsyncIndexChangeCollector::UpsertManyRows >> EraseRowsTests::EraseRowsShouldSuccess >> EraseRowsTests::ConditionalEraseRowsShouldEraseOnDate32 [FAIL] >> EraseRowsTests::ConditionalEraseRowsShouldEraseOnDatetime64 >> DataShardReadIterator::ShouldReadRangePrefix2 [FAIL] >> DataShardReadIterator::ShouldReadRangePrefix3 >> KqpPg::InsertFromSelect_NoReorder-useSink [FAIL] >> KqpPg::InsertFromSelect_Serial+useSink |64.7%| [TA] $(B)/ydb/core/kqp/ut/data/test-results/unittest/{meta.json ... results_accumulator.log} >> AsyncIndexChangeCollector::DeleteSingleRow [FAIL] >> KqpPg::InsertNoTargetColumns_Simple+useSink [FAIL] >> KqpPg::InsertNoTargetColumns_Simple-useSink >> AsyncIndexChangeCollector::IndexedPrimaryKeyDeleteSingleRow >> KqpPg::Insert_Serial-useSink [FAIL] >> KqpPg::InsertValuesFromTableWithDefaultText+useSink >> KqpPg::DropIndex [GOOD] >> KqpPg::PgCreateTable [FAIL] >> KqpPg::CreateUniqPgColumn+useSink >> KqpPg::PgUpdate+useSink >> DataShardReadIterator::ShouldReturnBrokenLockWhenReadRangeInvisibleRowSkips2+EvWrite [FAIL] >> DataShardReadIterator::ShouldReturnBrokenLockWhenReadRangeInvisibleRowSkips2-EvWrite >> DataShardReadIterator::ShouldLimitRead10RangesChunk99Limit900 [FAIL] >> DataShardReadIterator::ShouldLimitRead10RangesChunk100Limit900 >> KqpPg::DuplicatedColumns+useSink [GOOD] >> KqpPg::DuplicatedColumns-useSink >> TBSV::ShardsNotLeftInShardsToDelete >> KqpIndexLookupJoin::InnerJoinCustomColumnOrder+StreamLookup >> AsyncIndexChangeCollector::InsertManyRows [FAIL] >> AsyncIndexChangeCollector::MultiIndexedTableInsertSingleRow >> EraseRowsTests::ConditionalEraseRowsShouldEraseOnUint32 [FAIL] >> EraseRowsTests::ConditionalEraseRowsShouldEraseOnTimestamp64 >> TColumnShardTestReadWrite::CompactionSplitGranule_PKUInt64 [GOOD] >> DataShardReadIterator::ShouldReturnBrokenLockWhenReadKeyPrefixRightBorder+EvWrite [FAIL] >> DataShardReadIterator::ShouldReturnBrokenLockWhenReadKeyPrefixRightBorder-EvWrite >> KqpPg::Returning+useSink [GOOD] >> KqpPg::Returning-useSink >> DataShardReadIterator::ShouldFailReadNextAfterSchemeChangeExhausted [FAIL] >> DataShardReadIterator::NoErrorOnFinalACK ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_export_reboots_s3/unittest >> TExportToS3WithRebootsTests::ForgetShouldSucceedOnViewsAndTables [GOOD] Test command err: ==== RunWithTabletReboots =========== RUN: Trace =========== Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2140] Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:116:2058] recipient: [1:110:2141] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:116:2058] recipient: [1:110:2141] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:117:2058] recipient: [1:111:2142] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:117:2058] recipient: [1:111:2142] Leader for TabletID 72057594046678944 is [1:126:2151] sender: [1:127:2058] recipient: [1:109:2140] Leader for TabletID 72057594046447617 is [1:130:2154] sender: [1:132:2058] recipient: [1:110:2141] Leader for TabletID 72057594046316545 is [1:133:2155] sender: [1:135:2058] recipient: [1:111:2142] 2025-05-29T15:23:31.474779Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7488: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-05-29T15:23:31.474808Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7516: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:23:31.474814Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7402: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2025-05-29T15:23:31.474820Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7418: OperationsProcessing config: using default configuration 2025-05-29T15:23:31.474832Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-05-29T15:23:31.474837Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-05-29T15:23:31.474847Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7548: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:23:31.474863Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:39: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-05-29T15:23:31.474980Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7619: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-05-29T15:23:31.475073Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-05-29T15:23:31.491032Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7651: Got new config: QueryServiceConfig { AllExternalDataSourcesAreAvailable: true } 2025-05-29T15:23:31.491056Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:23:31.491146Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7619: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# Leader for TabletID 72057594046447617 is [1:130:2154] sender: [1:175:2058] recipient: [1:15:2062] 2025-05-29T15:23:31.494601Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-05-29T15:23:31.494643Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-05-29T15:23:31.494681Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-05-29T15:23:31.513920Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-05-29T15:23:31.514042Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:445: Clear TempDirsState with owners number: 0 2025-05-29T15:23:31.514184Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1348: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-05-29T15:23:31.514577Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:32: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-05-29T15:23:31.515909Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:157: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:23:31.515970Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:84: [RootDataErasureManager] Stop 2025-05-29T15:23:31.516298Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:23:31.516318Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:23:31.516356Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-05-29T15:23:31.516368Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-29T15:23:31.516374Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:202: TTxServerlessStorageBilling.Complete 2025-05-29T15:23:31.516396Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6671: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:214:2058] recipient: [1:212:2212] IGNORE Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:214:2058] recipient: [1:212:2212] Leader for TabletID 72057594037968897 is [1:218:2216] sender: [1:219:2058] recipient: [1:212:2212] 2025-05-29T15:23:31.518618Z node 1 :HIVE INFO: tablet_helpers.cpp:1130: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:126:2151] sender: [1:239:2058] recipient: [1:15:2062] 2025-05-29T15:23:31.539735Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:372: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-05-29T15:23:31.539803Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:23:31.539861Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-05-29T15:23:31.539919Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:130: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-05-29T15:23:31.539931Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:23:31.540876Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:451: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-05-29T15:23:31.540908Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-05-29T15:23:31.540952Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:23:31.540963Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:313: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-05-29T15:23:31.540968Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:367: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-05-29T15:23:31.540977Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 2 -> 3 2025-05-29T15:23:31.541699Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:23:31.541716Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-05-29T15:23:31.541723Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 3 -> 128 2025-05-29T15:23:31.543313Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:23:31.543330Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:23:31.543339Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:23:31.543346Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1650: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-05-29T15:23:31.544197Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1719: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-05-29T15:23:31.551084Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:652: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-05-29T15:23:31.551145Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1751: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 Leader for TabletID 72057594046316545 is [1:133:2155] sender: [1:254:2058] recipient: [1:15:2062] FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-05-29T15:23:31.551371Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:676: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-05-29T15:23:31.551412Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:680: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 133 RawX2: 4294969451 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-05-29T15:23:31.551422Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:23:31.551506Z node 1 :FLAT_TX_SCHEMESHARD INFO: sch ... OORDINATOR: advance: minStep5000009 State->FrontStep: 5000008 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710761 at step: 5000009 2025-05-29T15:24:10.354097Z node 150 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:676: TTxOperationPlanStep Execute, stepId: 5000009, transactions count in step: 1, at schemeshard: 72057594046678944 2025-05-29T15:24:10.354119Z node 150 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:680: TTxOperationPlanStep Execute, message: Transactions { TxId: 281474976710761 Coordinator: 72057594046316545 AckTo { RawX1: 136 RawX2: 644245096557 } } Step: 5000009 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-05-29T15:24:10.354129Z node 150 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_rmdir.cpp:129: TRmDir HandleReply TEvOperationPlan, opId: 281474976710761:0, step: 5000009, at schemeshard: 72057594046678944 2025-05-29T15:24:10.354154Z node 150 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_rmdir.cpp:180: RmDir is done, opId: 281474976710761:0, at schemeshard: 72057594046678944 2025-05-29T15:24:10.354163Z node 150 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:906: Part operation is done id#281474976710761:0 progress is 1/1 2025-05-29T15:24:10.354167Z node 150 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 281474976710761 ready parts: 1/1 2025-05-29T15:24:10.354173Z node 150 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:906: Part operation is done id#281474976710761:0 progress is 1/1 2025-05-29T15:24:10.354176Z node 150 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 281474976710761 ready parts: 1/1 2025-05-29T15:24:10.354187Z node 150 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 4 2025-05-29T15:24:10.354197Z node 150 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 1 2025-05-29T15:24:10.354203Z node 150 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1606: TOperation IsReadyToNotify, TxId: 281474976710761, ready parts: 1/1, is published: false 2025-05-29T15:24:10.354209Z node 150 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 281474976710761 ready parts: 1/1 2025-05-29T15:24:10.354214Z node 150 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:973: Operation and all the parts is done, operation id: 281474976710761:0 2025-05-29T15:24:10.354218Z node 150 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5174: RemoveTx for txid 281474976710761:0 2025-05-29T15:24:10.354228Z node 150 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 2 2025-05-29T15:24:10.354235Z node 150 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:982: Publication still in progress, tx: 281474976710761, publications: 2, subscribers: 1 2025-05-29T15:24:10.354239Z node 150 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:989: Publication details: tx: 281474976710761, [OwnerId: 72057594046678944, LocalPathId: 1], 14 2025-05-29T15:24:10.354243Z node 150 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:989: Publication details: tx: 281474976710761, [OwnerId: 72057594046678944, LocalPathId: 5], 18446744073709551615 2025-05-29T15:24:10.354700Z node 150 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976710761 2025-05-29T15:24:10.354728Z node 150 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:174: Deleted shardIdx 72057594046678944:2 2025-05-29T15:24:10.354755Z node 150 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:180: Close pipe to deleted shardIdx 72057594046678944:2 tabletId 72075186233409547 2025-05-29T15:24:10.355211Z node 150 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:160: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 2025-05-29T15:24:10.355230Z node 150 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976710761 2025-05-29T15:24:10.355287Z node 150 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:24:10.355294Z node 150 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 281474976710761, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-29T15:24:10.355333Z node 150 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 281474976710761, path id: [OwnerId: 72057594046678944, LocalPathId: 5] 2025-05-29T15:24:10.355358Z node 150 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:24:10.355364Z node 150 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [150:206:2207], at schemeshard: 72057594046678944, txId: 281474976710761, path id: 1 2025-05-29T15:24:10.355368Z node 150 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [150:206:2207], at schemeshard: 72057594046678944, txId: 281474976710761, path id: 5 FAKE_COORDINATOR: Erasing txId 281474976710761 2025-05-29T15:24:10.355526Z node 150 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:5834: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 14 PathOwnerId: 72057594046678944, cookie: 281474976710761 2025-05-29T15:24:10.355538Z node 150 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 14 PathOwnerId: 72057594046678944, cookie: 281474976710761 2025-05-29T15:24:10.355543Z node 150 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 281474976710761 2025-05-29T15:24:10.355548Z node 150 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 281474976710761, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 14 2025-05-29T15:24:10.355553Z node 150 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 5 2025-05-29T15:24:10.355625Z node 150 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:5834: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 5 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 281474976710761 2025-05-29T15:24:10.355636Z node 150 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 5 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 281474976710761 2025-05-29T15:24:10.355641Z node 150 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 281474976710761 2025-05-29T15:24:10.355645Z node 150 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 281474976710761, pathId: [OwnerId: 72057594046678944, LocalPathId: 5], version: 18446744073709551615 2025-05-29T15:24:10.355650Z node 150 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 1 2025-05-29T15:24:10.355661Z node 150 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 281474976710761, subscribers: 1 2025-05-29T15:24:10.355667Z node 150 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:212: TTxAckPublishToSchemeBoard Notify send TEvNotifyTxCompletionResult, at schemeshard: 72057594046678944, to actorId: [150:125:2150] 2025-05-29T15:24:10.355726Z node 150 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:123: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-05-29T15:24:10.355732Z node 150 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:137: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 5], at schemeshard: 72057594046678944 2025-05-29T15:24:10.355742Z node 150 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 4 2025-05-29T15:24:10.356307Z node 150 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976710761 2025-05-29T15:24:10.356407Z node 150 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976710761 2025-05-29T15:24:10.356425Z node 150 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6746: Handle: TEvNotifyTxCompletionResult: txId# 281474976710761 2025-05-29T15:24:10.356435Z node 150 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6748: Message: TxId: 281474976710761 2025-05-29T15:24:10.356444Z node 150 :EXPORT DEBUG: schemeshard_export__create.cpp:309: TExport::TTxProgress: DoExecute 2025-05-29T15:24:10.356449Z node 150 :EXPORT DEBUG: schemeshard_export__create.cpp:1232: TExport::TTxProgress: OnNotifyResult: txId# 281474976710761 2025-05-29T15:24:10.356454Z node 150 :EXPORT DEBUG: schemeshard_export__create.cpp:1263: TExport::TTxProgress: OnNotifyResult: txId# 281474976710761, id# 1004, itemIdx# 4294967295 2025-05-29T15:24:10.356515Z node 150 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:160: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 2025-05-29T15:24:10.356792Z node 150 :EXPORT DEBUG: schemeshard_export__create.cpp:329: TExport::TTxProgress: DoComplete TestWaitNotification wait txId: 1004 2025-05-29T15:24:10.356840Z node 150 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:210: tests -- TTxNotificationSubscriber for txId 1004: send EvNotifyTxCompletion 2025-05-29T15:24:10.356847Z node 150 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:256: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 1004 2025-05-29T15:24:10.356913Z node 150 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 1004, at schemeshard: 72057594046678944 2025-05-29T15:24:10.356929Z node 150 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:227: tests -- TTxNotificationSubscriber for txId 1004: got EvNotifyTxCompletionResult 2025-05-29T15:24:10.356934Z node 150 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:236: tests -- TTxNotificationSubscriber for txId 1004: satisfy waiter [150:750:2707] TestWaitNotification: OK eventTxId 1004 >> DataShardReadIterator::ShouldReadHeadFromFollower [FAIL] >> DataShardReadIterator::ShouldReadFromHead >> CdcStreamChangeCollector::InsertSingleUuidRow [FAIL] >> CdcStreamChangeCollector::IndexAndStreamUpsert >> KqpPg::ValuesInsert+useSink [FAIL] >> KqpPg::ValuesInsert-useSink >> DataShardReadIterator::ShouldReadRangeChunk1 [FAIL] >> DataShardReadIterator::ShouldReadRangeChunk2 >> DataShardReadIteratorBatchMode::RangeFromInclusive [FAIL] >> DataShardReadIteratorBatchMode::RangeFromNonInclusive >> TBSV::ShardsNotLeftInShardsToDelete [GOOD] >> DataShardReadIteratorConsistency::WriteLockThenUncommittedReadUpgradeRestartWithStateMigrationRetryAndRestartWithoutStateMigration [FAIL] >> DataShardReadIteratorFastCancel::ShouldProcessFastCancel >> TConsoleTests::TestCreateTenant >> CdcStreamChangeCollector::UpsertToSameKeyWithImages [FAIL] >> CdcStreamChangeCollector::UpsertModifyDelete >> KqpPg::InsertNoTargetColumns_Simple-useSink [FAIL] >> KqpPg::InsertNoTargetColumns_Serial-useSink >> KqpPg::InsertValuesFromTableWithDefaultText+useSink [FAIL] >> KqpPg::InsertValuesFromTableWithDefaultText-useSink >> KqpPg::CreateUniqPgColumn+useSink [FAIL] >> KqpPg::CreateUniqPgColumn-useSink >> EraseRowsTests::ConditionalEraseRowsShouldErase [FAIL] >> EraseRowsTests::ConditionalEraseRowsShouldBreakLocks >> KqpPg::DuplicatedColumns-useSink [GOOD] >> KqpPg::InsertFromSelect_NoReorder+useSink ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_bsvolume/unittest >> TBSV::ShardsNotLeftInShardsToDelete [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:116:2058] recipient: [1:111:2141] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:116:2058] recipient: [1:111:2141] Leader for TabletID 72057594046678944 is [1:128:2152] sender: [1:132:2058] recipient: [1:111:2141] 2025-05-29T15:24:11.703245Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7488: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-05-29T15:24:11.703266Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7516: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:24:11.703270Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7402: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-05-29T15:24:11.703273Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7418: OperationsProcessing config: using default configuration 2025-05-29T15:24:11.703284Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-05-29T15:24:11.703287Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-05-29T15:24:11.703293Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7548: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:24:11.703305Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:39: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-05-29T15:24:11.703398Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7619: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-05-29T15:24:11.703457Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-05-29T15:24:11.712893Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7309: Cannot subscribe to console configs 2025-05-29T15:24:11.712911Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:24:11.715319Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-05-29T15:24:11.715431Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-05-29T15:24:11.715462Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-05-29T15:24:11.716824Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-05-29T15:24:11.716958Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:445: Clear TempDirsState with owners number: 0 2025-05-29T15:24:11.717059Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1348: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-05-29T15:24:11.717104Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:32: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-05-29T15:24:11.717533Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:157: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:24:11.717583Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:84: [RootDataErasureManager] Stop 2025-05-29T15:24:11.717855Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:24:11.717865Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:24:11.717885Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-05-29T15:24:11.717894Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-29T15:24:11.717900Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:202: TTxServerlessStorageBilling.Complete 2025-05-29T15:24:11.717934Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6671: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-05-29T15:24:11.719010Z node 1 :HIVE INFO: tablet_helpers.cpp:1130: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:128:2152] sender: [1:242:2058] recipient: [1:15:2062] 2025-05-29T15:24:11.733530Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:372: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-05-29T15:24:11.733608Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:24:11.733660Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-05-29T15:24:11.733698Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:130: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-05-29T15:24:11.733709Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:24:11.734379Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:451: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-05-29T15:24:11.734401Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-05-29T15:24:11.734443Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:24:11.734453Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:313: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-05-29T15:24:11.734457Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:367: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-05-29T15:24:11.734462Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 2 -> 3 2025-05-29T15:24:11.734839Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:24:11.734849Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-05-29T15:24:11.734854Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 3 -> 128 2025-05-29T15:24:11.735161Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:24:11.735170Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:24:11.735174Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:24:11.735181Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1650: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-05-29T15:24:11.735650Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1719: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-05-29T15:24:11.735977Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:652: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-05-29T15:24:11.736012Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1751: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-05-29T15:24:11.736181Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:676: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-05-29T15:24:11.736200Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:680: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 124 RawX2: 4294969445 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-05-29T15:24:11.736205Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:24:11.736271Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 128 -> 240 2025-05-29T15:24:11.736278Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:24:11.736311Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-05-29T15:24:11.736324Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:402: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-05-29T15:24:11.736699Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:24:11.736710Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-29T15:24:11.736755Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29 ... 7594046678944, at schemeshard: 72057594046678944 2025-05-29T15:24:11.752043Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_drop_bsv.cpp:40: TDropBlockStoreVolume TPropose, operationId: 102:0 HandleReply TEvOperationPlan, step: 5000003, at schemeshard: 72057594046678944 2025-05-29T15:24:11.752068Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2025-05-29T15:24:11.752089Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:906: Part operation is done id#102:0 progress is 1/1 2025-05-29T15:24:11.752093Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-05-29T15:24:11.752098Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:906: Part operation is done id#102:0 progress is 1/1 2025-05-29T15:24:11.752101Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-05-29T15:24:11.752109Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-05-29T15:24:11.752116Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 2025-05-29T15:24:11.752123Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1606: TOperation IsReadyToNotify, TxId: 102, ready parts: 1/1, is published: false 2025-05-29T15:24:11.752128Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-05-29T15:24:11.752132Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:973: Operation and all the parts is done, operation id: 102:0 2025-05-29T15:24:11.752136Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5174: RemoveTx for txid 102:0 2025-05-29T15:24:11.752152Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2025-05-29T15:24:11.752158Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:982: Publication still in progress, tx: 102, publications: 2, subscribers: 0 2025-05-29T15:24:11.752161Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:989: Publication details: tx: 102, [OwnerId: 72057594046678944, LocalPathId: 1], 7 2025-05-29T15:24:11.752165Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:989: Publication details: tx: 102, [OwnerId: 72057594046678944, LocalPathId: 2], 18446744073709551615 2025-05-29T15:24:11.752732Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:174: Deleted shardIdx 72057594046678944:1 2025-05-29T15:24:11.752743Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:180: Close pipe to deleted shardIdx 72057594046678944:1 tabletId 72075186233409546 2025-05-29T15:24:11.752763Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:174: Deleted shardIdx 72057594046678944:2 2025-05-29T15:24:11.752768Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:180: Close pipe to deleted shardIdx 72057594046678944:2 tabletId 72075186233409547 2025-05-29T15:24:11.752873Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:46: Free shard 72057594046678944:1 hive 72057594037968897 at ss 72057594046678944 2025-05-29T15:24:11.752879Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:46: Free shard 72057594046678944:2 hive 72057594037968897 at ss 72057594046678944 2025-05-29T15:24:11.752899Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:24:11.752905Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 102, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-29T15:24:11.752929Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 102, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2025-05-29T15:24:11.752950Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:24:11.752954Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:209:2210], at schemeshard: 72057594046678944, txId: 102, path id: 1 2025-05-29T15:24:11.752959Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:209:2210], at schemeshard: 72057594046678944, txId: 102, path id: 2 FAKE_COORDINATOR: Erasing txId 102 2025-05-29T15:24:11.753046Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:5834: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 102 2025-05-29T15:24:11.753058Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 102 2025-05-29T15:24:11.753063Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 102 2025-05-29T15:24:11.753068Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 18446744073709551615 2025-05-29T15:24:11.753073Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 2025-05-29T15:24:11.753127Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:123: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-05-29T15:24:11.753134Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:137: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2025-05-29T15:24:11.753143Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-05-29T15:24:11.753182Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:5834: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 7 PathOwnerId: 72057594046678944, cookie: 102 2025-05-29T15:24:11.753190Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 7 PathOwnerId: 72057594046678944, cookie: 102 2025-05-29T15:24:11.753194Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 102 2025-05-29T15:24:11.753198Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 7 2025-05-29T15:24:11.753202Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-05-29T15:24:11.753211Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 102, subscribers: 0 2025-05-29T15:24:11.753288Z node 1 :HIVE INFO: tablet_helpers.cpp:1356: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 1 TxId_Deprecated: 1 2025-05-29T15:24:11.753333Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5938: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 1 ShardOwnerId: 72057594046678944 ShardLocalIdx: 1, at schemeshard: 72057594046678944 2025-05-29T15:24:11.753360Z node 1 :HIVE INFO: tablet_helpers.cpp:1356: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 2 TxId_Deprecated: 2 2025-05-29T15:24:11.753412Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5938: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 2 ShardOwnerId: 72057594046678944 ShardLocalIdx: 2, at schemeshard: 72057594046678944 2025-05-29T15:24:11.753892Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2025-05-29T15:24:11.753984Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:160: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 2025-05-29T15:24:11.753997Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2025-05-29T15:24:11.754281Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:174: Deleted shardIdx 72057594046678944:1 2025-05-29T15:24:11.754301Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:174: Deleted shardIdx 72057594046678944:2 TestModificationResult got TxId: 102, wait until txId: 102 TestWaitNotification wait txId: 102 2025-05-29T15:24:11.754364Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:210: tests -- TTxNotificationSubscriber for txId 102: send EvNotifyTxCompletion 2025-05-29T15:24:11.754371Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:256: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 102 2025-05-29T15:24:11.754423Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 102, at schemeshard: 72057594046678944 2025-05-29T15:24:11.754438Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:227: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2025-05-29T15:24:11.754442Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:236: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [1:395:2374] TestWaitNotification: OK eventTxId 102 wait until 72075186233409546 is deleted wait until 72075186233409547 is deleted 2025-05-29T15:24:11.754497Z node 1 :HIVE INFO: tablet_helpers.cpp:1476: [72057594037968897] TEvSubscribeToTabletDeletion, 72075186233409546 2025-05-29T15:24:11.754522Z node 1 :HIVE INFO: tablet_helpers.cpp:1476: [72057594037968897] TEvSubscribeToTabletDeletion, 72075186233409547 Deleted tabletId 72075186233409546 Deleted tabletId 72075186233409547 { Type { Kind: Struct Struct { Member { Name: "ShardsToDelete" Type { Kind: Optional Optional { Item { Kind: Struct Struct { Member { Name: "List" Type { Kind: List List { Item { Kind: Struct Struct { Member { Name: "ShardIdx" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } } } } } } Member { Name: "Truncated" Type { Kind: Data Data { Scheme: 6 } } } } } } } } } } Value { Struct { Optional { Struct { } Struct { Bool: false } } } } } >> AsyncIndexChangeCollector::CoveredIndexUpsert [FAIL] >> AsyncIndexChangeCollector::AllColumnsInPk >> AsyncIndexChangeCollector::UpsertWithoutIndexedValue [FAIL] >> CdcStreamChangeCollector::DeleteNothing >> KqpPg::PgUpdate+useSink [FAIL] >> KqpPg::PgUpdate-useSink >> KqpPg::InsertFromSelect_Serial+useSink [FAIL] >> KqpPg::InsertFromSelect_Serial-useSink >> DataShardReadIteratorSysTables::ShouldNotReadUserTableUsingLocalTid [FAIL] >> AsyncIndexChangeCollector::UpsertManyRows [FAIL] >> DataShardReadIteratorSysTables::ShouldForbidSchemaVersion >> AsyncIndexChangeCollector::MultiIndexedTableUpdateOneIndexedColumn >> KqpPg::ValuesInsert-useSink [FAIL] >> PgCatalog::PgType >> DataShardReadIterator::ShouldReadRangePrefix3 [FAIL] >> DataShardReadIterator::ShouldReadRangePrefix4 >> CdcStreamChangeCollector::UpsertIntoTwoStreams [FAIL] >> CdcStreamChangeCollector::PageFaults |64.7%| [TA] $(B)/ydb/core/mind/bscontroller/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> DataShardReadIterator::ShouldReturnBrokenLockWhenReadRangeInvisibleRowSkips2-EvWrite [FAIL] >> DataShardReadIterator::ShouldReturnBrokenLockWhenReadRangeLeftBorder+EvWrite >> EraseRowsTests::EraseRowsShouldSuccess [FAIL] >> EraseRowsTests::EraseRowsShouldFailOnVariousErrors >> TConfigsCacheTests::TestNoNotificationIfConfigIsCached >> AsyncIndexChangeCollector::IndexedPrimaryKeyDeleteSingleRow [FAIL] >> AsyncIndexChangeCollector::ImplicitlyUpdateCoveredColumn >> KqpPg::InsertValuesFromTableWithDefaultText-useSink [FAIL] >> KqpPg::InsertValuesFromTableWithDefaultTextNotNull+useSink >> KqpPg::CreateUniqPgColumn-useSink [FAIL] >> KqpPg::CreateUniqComplexPgColumn+useSink >> DataShardReadIterator::ShouldLimitRead10RangesChunk100Limit900 [FAIL] >> DataShardReadIterator::ShouldLimitRead10RangesChunk100Limit1001 >> KqpPg::InsertNoTargetColumns_Serial-useSink [FAIL] >> KqpPg::InsertValuesFromTableWithDefault+useSink >> AsyncIndexChangeCollector::MultiIndexedTableInsertSingleRow [FAIL] >> AsyncIndexChangeCollector::IndexedPrimaryKeyInsertSingleRow >> KqpPg::Returning-useSink [GOOD] >> KqpPg::SelectIndex+useSink >> EraseRowsTests::ConditionalEraseRowsShouldEraseOnDatetime64 [FAIL] >> EraseRowsTests::ConditionalEraseRowsShouldEraseOnTimestamp64 [FAIL] >> KqpPg::InsertFromSelect_NoReorder+useSink [FAIL] >> KqpPg::DropTablePg >> KqpPg::PgUpdate-useSink [FAIL] >> KqpPg::JoinWithQueryService-StreamLookup >> DataShardReadIterator::ShouldReturnBrokenLockWhenReadKeyPrefixRightBorder-EvWrite [FAIL] >> DataShardReadIterator::ShouldReturnBrokenLockWhenReadKeyWithContinue+EvWrite >> DataShardReadIterator::NoErrorOnFinalACK [FAIL] >> DataShardReadIterator::ShouldCancelMvccSnapshotFromFuture >> DataShardReadIterator::ShouldReadFromHead [FAIL] >> DataShardReadIterator::ShouldReadFromHeadWithConflict+UseSink >> DataShardReadIterator::ShouldReadRangeChunk2 [FAIL] >> DataShardReadIterator::ShouldReadRangeChunk3 >> KqpPg::InsertFromSelect_Serial-useSink [FAIL] >> KqpPg::InsertNoTargetColumns_ColumnOrder+useSink >> DataShardReadIteratorBatchMode::RangeFromNonInclusive [FAIL] >> DataShardReadIteratorBatchMode::RangeToInclusive >> DataShardReadIteratorFastCancel::ShouldProcessFastCancel [FAIL] >> DataShardReadIteratorLatency::ReadSplitLatency >> TConsoleConfigTests::TestModifyConfigItem >> PgCatalog::PgType [GOOD] >> PgCatalog::InformationSchema |64.7%| [TA] $(B)/ydb/core/mind/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> CdcStreamChangeCollector::IndexAndStreamUpsert [FAIL] >> CdcStreamChangeCollector::NewImage >> KqpPg::TypeCoercionInsert+useSink [FAIL] >> KqpPg::TableSelect+useSink >> CdcStreamChangeCollector::UpsertModifyDelete [FAIL] >> AsyncIndexChangeCollector::AllColumnsInPk [FAIL] >> AsyncIndexChangeCollector::CoverIndexedColumn >> TConfigsCacheTests::TestNoNotificationIfConfigIsCached [GOOD] >> TConfigsCacheTests::TestFullConfigurationRestore >> EraseRowsTests::ConditionalEraseRowsShouldBreakLocks [FAIL] >> KqpPg::InsertValuesFromTableWithDefaultTextNotNull+useSink [FAIL] >> KqpPg::InsertValuesFromTableWithDefaultTextNotNull-useSink >> KqpPg::CreateUniqComplexPgColumn+useSink [FAIL] >> KqpPg::CreateUniqComplexPgColumn-useSink >> KqpPg::SelectIndex+useSink [FAIL] >> KqpPg::SelectIndex-useSink ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/datashard/ut_order/unittest >> DataShardOutOfOrder::TestLateKqpScanAfterColumnDrop-UseSink [FAIL] Test command err: 2025-05-29T15:23:45.682518Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:102:2148], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-05-29T15:23:45.682553Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-05-29T15:23:45.682568Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/001662/r3tmp/tmp5xllX6/pdisk_1.dat 2025-05-29T15:23:45.799753Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-05-29T15:23:45.816251Z node 1 :KQP_RESOURCE_MANAGER INFO: kqp_rm_service.cpp:782: Updated table service config: ComputeActorsCount: 10000 ChannelBufferSize: 8388608 MkqlLightProgramMemoryLimit: 1048576 MkqlHeavyProgramMemoryLimit: 31457280 QueryMemoryLimit: 32212254720 PublishStatisticsIntervalSec: 2 MaxTotalChannelBuffersSize: 2147483648 MinChannelBufferSize: 2048 2025-05-29T15:23:45.816313Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:519: Updated table service config. 2025-05-29T15:23:45.816324Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1665: Updated YQL logs priority to current level: 7 2025-05-29T15:23:45.816353Z node 1 :KQP_RESOURCE_MANAGER DEBUG: kqp_resource_info_exchanger.cpp:382: Updated table service config. 2025-05-29T15:23:45.816412Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:23:45.817333Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [1:32:2079] 1748532225193597 != 1748532225193601 2025-05-29T15:23:45.859012Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:213: actor# [1:59:2106] HANDLE TEvClientConnected success connect from tablet# 72057594046447617 2025-05-29T15:23:45.859363Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:23:45.859389Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:23:45.859478Z node 1 :TX_PROXY DEBUG: client.cpp:89: Handle TEvAllocateResult ACCEPTED RangeBegin# 281474976715656 RangeEnd# 281474976720656 txAllocator# 72057594046447617 2025-05-29T15:23:45.870060Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-29T15:23:45.942889Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:315: actor# [1:59:2106] Handle TEvProposeTransaction 2025-05-29T15:23:45.942919Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:238: actor# [1:59:2106] TxId# 281474976715657 ProcessProposeTransaction 2025-05-29T15:23:45.942962Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:257: actor# [1:59:2106] Cookie# 0 userReqId# "" txid# 281474976715657 SEND to# [1:641:2549] 2025-05-29T15:23:45.960723Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1562: Actor# [1:641:2549] txid# 281474976715657 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/Root" OperationType: ESchemeOpCreateTable CreateTable { Name: "table-1" Columns { Name: "key" Type: "Uint32" FamilyName: "" NotNull: false } Columns { Name: "value1" Type: "Uint32" FamilyName: "" NotNull: false } Columns { Name: "value2" Type: "Uint32" FamilyName: "" NotNull: false } KeyColumnNames: "key" UniformPartitionsCount: 1 } } } ExecTimeoutPeriod: 18446744073709551615 2025-05-29T15:23:45.960770Z node 1 :TX_PROXY DEBUG: schemereq.cpp:569: Actor# [1:641:2549] txid# 281474976715657 Bootstrap, UserSID: CheckAdministrator: 0 CheckDatabaseAdministrator: 0 2025-05-29T15:23:45.960992Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1627: Actor# [1:641:2549] txid# 281474976715657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P6 2025-05-29T15:23:45.961011Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1617: Actor# [1:641:2549] txid# 281474976715657 TEvNavigateKeySet requested from SchemeCache 2025-05-29T15:23:45.961094Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1450: Actor# [1:641:2549] txid# 281474976715657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-05-29T15:23:45.961152Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1497: Actor# [1:641:2549] HANDLE EvNavigateKeySetResult, txid# 281474976715657 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 500 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2025-05-29T15:23:45.961173Z node 1 :TX_PROXY DEBUG: schemereq.cpp:103: Actor# [1:641:2549] txid# 281474976715657 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976715657 TabletId# 72057594046644480} 2025-05-29T15:23:45.961660Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-05-29T15:23:45.961828Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1352: Actor# [1:641:2549] txid# 281474976715657 HANDLE EvClientConnected 2025-05-29T15:23:45.961999Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1374: Actor# [1:641:2549] txid# 281474976715657 Status StatusAccepted HANDLE {TEvModifySchemeTransactionResult Status# StatusAccepted txid# 281474976715657} 2025-05-29T15:23:45.962012Z node 1 :TX_PROXY DEBUG: schemereq.cpp:549: Actor# [1:641:2549] txid# 281474976715657 SEND to# [1:592:2518] Source {TEvProposeTransactionStatus txid# 281474976715657 Status# 53} 2025-05-29T15:23:45.990619Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3097: StateInit, received event# 268828672, Sender [1:657:2564], Recipient [1:666:2570]: NKikimr::TEvTablet::TEvBoot 2025-05-29T15:23:45.990985Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3097: StateInit, received event# 268828673, Sender [1:657:2564], Recipient [1:666:2570]: NKikimr::TEvTablet::TEvRestored 2025-05-29T15:23:45.991131Z node 1 :TX_DATASHARD INFO: datashard.cpp:373: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:666:2570] 2025-05-29T15:23:45.991242Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:630: TxInitSchema.Execute 2025-05-29T15:23:45.992533Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3110: StateInactive, received event# 268828684, Sender [1:657:2564], Recipient [1:666:2570]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-05-29T15:23:46.001042Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:696: TxInitSchema.Complete 2025-05-29T15:23:46.001103Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:48: TDataShard::TTxInit::Execute 2025-05-29T15:23:46.001284Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1315: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-05-29T15:23:46.001301Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1371: LoadLockChangeRecords at tablet: 72075186224037888 2025-05-29T15:23:46.001310Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1420: LoadChangeRecordCommits at tablet: 72075186224037888 2025-05-29T15:23:46.001390Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:92: TDataShard::TTxInit::Complete 2025-05-29T15:23:46.001437Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:100: TDataShard::TTxInitRestored::Execute 2025-05-29T15:23:46.001457Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:111: DataShard 72075186224037888 persisting started state actor id [1:682:2570] in generation 1 2025-05-29T15:23:46.011820Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:120: TDataShard::TTxInitRestored::Complete 2025-05-29T15:23:46.015507Z node 1 :TX_DATASHARD INFO: datashard.cpp:417: Switched to work state WaitScheme tabletId 72075186224037888 2025-05-29T15:23:46.015582Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:457: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-05-29T15:23:46.015604Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1250: Change sender created: at tablet: 72075186224037888, actorId: [1:684:2580] 2025-05-29T15:23:46.015608Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1255: Trying to activate change sender: at tablet: 72075186224037888 2025-05-29T15:23:46.015611Z node 1 :TX_DATASHARD INFO: datashard.cpp:1272: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-05-29T15:23:46.015615Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-05-29T15:23:46.015669Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3129: StateWork, received event# 2146435072, Sender [1:666:2570], Recipient [1:666:2570]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-05-29T15:23:46.015674Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3154: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-05-29T15:23:46.015755Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 72075186224037888 2025-05-29T15:23:46.015774Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-05-29T15:23:46.015780Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2025-05-29T15:23:46.015785Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-05-29T15:23:46.015791Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:385: Check unit PlanQueue at 72075186224037888 2025-05-29T15:23:46.015795Z node 1 :TX_DATASHARD TRACE: plan_queue_unit.cpp:59: TPlanQueueUnit at 72075186224037888 has no attached operations 2025-05-29T15:23:46.015798Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:403: Unit PlanQueue has no ready operations at 72075186224037888 2025-05-29T15:23:46.015801Z node 1 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 72075186224037888 TxInFly 0 2025-05-29T15:23:46.015805Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2025-05-29T15:23:46.015819Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3129: StateWork, received event# 269877761, Sender [1:673:2574], Recipient [1:666:2570]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-05-29T15:23:46.015823Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3165: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-05-29T15:23:46.015827Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3710: Server connected at leader tablet# 72075186224037888, clientId# [1:662:2567], serverId# [1:673:2574], sessionId# [0:0:0] 2025-05-29T15:23:46.015889Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3129: StateWork, received event# 269549568, Sender [1:411:2405], Recipient [1:673:2574] 2025-05-29T15:23:46.015893Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3135: StateWork, processing event TEvDataShard::TEvProposeTransaction 2025-05-29T15:23:46.015913Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037888 2025-05-29T15:23:46.015954Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1827: Trying to execute [0:281474976715657] at 72075186224037888 on unit CheckSchemeTx 2025-05-29T15:23:46.015962Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:132: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo ... p:67: TraceId: 01jwea8y6k4bw9k3440611pcp0, SessionId: CompileActor 2025-05-29 15:23:50.560 DEBUG ydb-core-tx-datashard-ut_order(pid=3678837, tid=0x00007F9D5D910DC0) [perf] yql_expr_csee.cpp:620: Execution of [UpdateCompletness] took 3us 2025-05-29T15:23:50.560936Z node 2 :KQP_YQL DEBUG: log.cpp:67: TraceId: 01jwea8y6k4bw9k3440611pcp0, SessionId: CompileActor 2025-05-29 15:23:50.560 DEBUG ydb-core-tx-datashard-ut_order(pid=3678837, tid=0x00007F9D5D910DC0) [perf] yql_expr_csee.cpp:633: Execution of [EliminateCommonSubExpressionsForSubGraph] took 11us 2025-05-29T15:23:50.561044Z node 2 :KQP_YQL DEBUG: log.cpp:67: TraceId: 01jwea8y6k4bw9k3440611pcp0, SessionId: CompileActor 2025-05-29 15:23:50.561 DEBUG ydb-core-tx-datashard-ut_order(pid=3678837, tid=0x00007F9D5D910DC0) [perf] type_ann_expr.cpp:48: Execution of [TypeAnnotationTransformer::DoTransform] took 1us 2025-05-29T15:23:50.561050Z node 2 :KQP_YQL DEBUG: log.cpp:67: TraceId: 01jwea8y6k4bw9k3440611pcp0, SessionId: CompileActor 2025-05-29 15:23:50.561 DEBUG ydb-core-tx-datashard-ut_order(pid=3678837, tid=0x00007F9D5D910DC0) [perf] yql_expr_constraint.cpp:3248: Execution of [ConstraintTransformer::DoTransform] took 1us 2025-05-29T15:23:50.561056Z node 2 :KQP_YQL DEBUG: log.cpp:67: TraceId: 01jwea8y6k4bw9k3440611pcp0, SessionId: CompileActor 2025-05-29 15:23:50.561 DEBUG ydb-core-tx-datashard-ut_order(pid=3678837, tid=0x00007F9D5D910DC0) [perf] yql_expr_csee.cpp:620: Execution of [UpdateCompletness] took 2us 2025-05-29T15:23:50.561069Z node 2 :KQP_YQL DEBUG: log.cpp:67: TraceId: 01jwea8y6k4bw9k3440611pcp0, SessionId: CompileActor 2025-05-29 15:23:50.561 DEBUG ydb-core-tx-datashard-ut_order(pid=3678837, tid=0x00007F9D5D910DC0) [perf] yql_expr_csee.cpp:633: Execution of [EliminateCommonSubExpressionsForSubGraph] took 9us 2025-05-29T15:23:50.561130Z node 2 :KQP_YQL DEBUG: log.cpp:67: TraceId: 01jwea8y6k4bw9k3440611pcp0, SessionId: CompileActor 2025-05-29 15:23:50.561 DEBUG ydb-core-tx-datashard-ut_order(pid=3678837, tid=0x00007F9D5D910DC0) [perf] type_ann_expr.cpp:48: Execution of [TypeAnnotationTransformer::DoTransform] took 10us 2025-05-29T15:23:50.561142Z node 2 :KQP_YQL DEBUG: log.cpp:67: TraceId: 01jwea8y6k4bw9k3440611pcp0, SessionId: CompileActor 2025-05-29 15:23:50.561 DEBUG ydb-core-tx-datashard-ut_order(pid=3678837, tid=0x00007F9D5D910DC0) [perf] yql_expr_constraint.cpp:3248: Execution of [ConstraintTransformer::DoTransform] took 7us 2025-05-29T15:23:50.561150Z node 2 :KQP_YQL DEBUG: log.cpp:67: TraceId: 01jwea8y6k4bw9k3440611pcp0, SessionId: CompileActor 2025-05-29 15:23:50.561 DEBUG ydb-core-tx-datashard-ut_order(pid=3678837, tid=0x00007F9D5D910DC0) [perf] yql_expr_csee.cpp:620: Execution of [UpdateCompletness] took 4us 2025-05-29T15:23:50.561172Z node 2 :KQP_YQL DEBUG: log.cpp:67: TraceId: 01jwea8y6k4bw9k3440611pcp0, SessionId: CompileActor 2025-05-29 15:23:50.561 DEBUG ydb-core-tx-datashard-ut_order(pid=3678837, tid=0x00007F9D5D910DC0) [perf] yql_expr_csee.cpp:633: Execution of [EliminateCommonSubExpressionsForSubGraph] took 19us 2025-05-29T15:23:50.561220Z node 2 :KQP_YQL DEBUG: log.cpp:67: TraceId: 01jwea8y6k4bw9k3440611pcp0, SessionId: CompileActor 2025-05-29 15:23:50.561 DEBUG ydb-core-tx-datashard-ut_order(pid=3678837, tid=0x00007F9D5D910DC0) [KQP] kqp_opt_peephole.cpp:489: >>> TKqpTxPeepholeTransformer[skip]: ( (declare %kqp%tx_result_binding_0_0 (ListType (StructType '('"key" (OptionalType (DataType 'Uint32))) '('"value1" (OptionalType (DataType 'Uint32))) '('"value2" (OptionalType (DataType 'Uint32)))))) (let $1 '"%kqp%tx_result_binding_0_0") (let $2 (OptionalType (DataType 'Uint32))) (let $3 (ListType (StructType '('"key" $2) '('"value1" $2) '('"value2" $2)))) (let $4 (DqPhyStage '() (lambda '() (block '( (let $6 (KqpTable '"/Root/table-1" '"72057594046644480:2" '"" '1)) (let $7 '('"key" '"value1" '"value2")) (return (KqpEffects (KqpUpsertRows $6 (Iterator %kqp%tx_result_binding_0_0) $7 '('('"Mode" '"upsert"))))) ))) '('('"_logical_id" '317) '('"_id" '"dc7bf505-bbaf59a4-1409b969-14570980")))) (let $5 (KqpTxResultBinding $3 '"0" '"0")) (return (KqpPhysicalTx '($4) '() '('($1 $5)) '('('"type" '"data") '('"with_effects")))) ) 2025-05-29T15:23:50.561253Z node 2 :KQP_YQL DEBUG: log.cpp:67: TraceId: 01jwea8y6k4bw9k3440611pcp0, SessionId: CompileActor 2025-05-29 15:23:50.561 DEBUG ydb-core-tx-datashard-ut_order(pid=3678837, tid=0x00007F9D5D910DC0) [perf] type_ann_expr.cpp:48: Execution of [TypeAnnotationTransformer::DoTransform] took 1us 2025-05-29T15:23:50.561260Z node 2 :KQP_YQL DEBUG: log.cpp:67: TraceId: 01jwea8y6k4bw9k3440611pcp0, SessionId: CompileActor 2025-05-29 15:23:50.561 DEBUG ydb-core-tx-datashard-ut_order(pid=3678837, tid=0x00007F9D5D910DC0) [perf] yql_expr_constraint.cpp:3248: Execution of [ConstraintTransformer::DoTransform] took 1us 2025-05-29T15:23:50.561268Z node 2 :KQP_YQL DEBUG: log.cpp:67: TraceId: 01jwea8y6k4bw9k3440611pcp0, SessionId: CompileActor 2025-05-29 15:23:50.561 DEBUG ydb-core-tx-datashard-ut_order(pid=3678837, tid=0x00007F9D5D910DC0) [perf] yql_expr_csee.cpp:620: Execution of [UpdateCompletness] took 4us 2025-05-29T15:23:50.561286Z node 2 :KQP_YQL DEBUG: log.cpp:67: TraceId: 01jwea8y6k4bw9k3440611pcp0, SessionId: CompileActor 2025-05-29 15:23:50.561 DEBUG ydb-core-tx-datashard-ut_order(pid=3678837, tid=0x00007F9D5D910DC0) [perf] yql_expr_csee.cpp:633: Execution of [EliminateCommonSubExpressions] took 13us 2025-05-29T15:23:50.561498Z node 2 :KQP_YQL DEBUG: log.cpp:67: TraceId: 01jwea8y6k4bw9k3440611pcp0, SessionId: CompileActor 2025-05-29 15:23:50.561 DEBUG ydb-core-tx-datashard-ut_order(pid=3678837, tid=0x00007F9D5D910DC0) [perf] type_ann_expr.cpp:48: Execution of [TypeAnnotationTransformer::DoTransform] took 166us 2025-05-29T15:23:50.561574Z node 2 :KQP_YQL INFO: log.cpp:67: TraceId: 01jwea8y6k4bw9k3440611pcp0, SessionId: CompileActor 2025-05-29 15:23:50.561 INFO ydb-core-tx-datashard-ut_order(pid=3678837, tid=0x00007F9D5D910DC0) [core exec] yql_execution.cpp:466: Register async execution for node #168 2025-05-29T15:23:50.561589Z node 2 :KQP_YQL INFO: log.cpp:67: TraceId: 01jwea8y6k4bw9k3440611pcp0, SessionId: CompileActor 2025-05-29 15:23:50.561 INFO ydb-core-tx-datashard-ut_order(pid=3678837, tid=0x00007F9D5D910DC0) [core exec] yql_execution.cpp:87: Finish, output #170, status: Async 2025-05-29T15:23:50.561652Z node 2 :KQP_YQL INFO: log.cpp:67: TraceId: 01jwea8y6k4bw9k3440611pcp0, SessionId: CompileActor 2025-05-29 15:23:50.561 INFO ydb-core-tx-datashard-ut_order(pid=3678837, tid=0x00007F9D5D910DC0) [core exec] yql_execution.cpp:133: Completed async execution for node #168 2025-05-29T15:23:50.561660Z node 2 :KQP_YQL INFO: log.cpp:67: TraceId: 01jwea8y6k4bw9k3440611pcp0, SessionId: CompileActor 2025-05-29 15:23:50.561 INFO ydb-core-tx-datashard-ut_order(pid=3678837, tid=0x00007F9D5D910DC0) [core exec] yql_execution.cpp:153: State is ExecutionRequired after apply async changes for node #168 2025-05-29T15:23:50.561668Z node 2 :KQP_YQL INFO: log.cpp:67: TraceId: 01jwea8y6k4bw9k3440611pcp0, SessionId: CompileActor 2025-05-29 15:23:50.561 INFO ydb-core-tx-datashard-ut_order(pid=3678837, tid=0x00007F9D5D910DC0) [core exec] yql_execution.cpp:59: Begin, root #170 2025-05-29T15:23:50.561677Z node 2 :KQP_YQL INFO: log.cpp:67: TraceId: 01jwea8y6k4bw9k3440611pcp0, SessionId: CompileActor 2025-05-29 15:23:50.561 INFO ydb-core-tx-datashard-ut_order(pid=3678837, tid=0x00007F9D5D910DC0) [core exec] yql_execution.cpp:72: Collect unused nodes for root #170, status: Ok 2025-05-29T15:23:50.561711Z node 2 :KQP_YQL NOTICE: log.cpp:67: TraceId: 01jwea8y6k4bw9k3440611pcp0, SessionId: CompileActor 2025-05-29 15:23:50.561 NOTE ydb-core-tx-datashard-ut_order(pid=3678837, tid=0x00007F9D5D910DC0) [common provider] yql_provider_gateway.cpp:21:
: Fatal: Execution, code: 1060 2025-05-29T15:23:50.561718Z node 2 :KQP_YQL NOTICE: log.cpp:67: TraceId: 01jwea8y6k4bw9k3440611pcp0, SessionId: CompileActor 2025-05-29 15:23:50.561 NOTE ydb-core-tx-datashard-ut_order(pid=3678837, tid=0x00007F9D5D910DC0) [common provider] yql_provider_gateway.cpp:21:
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:23:50.561732Z node 2 :KQP_YQL INFO: log.cpp:67: TraceId: 01jwea8y6k4bw9k3440611pcp0, SessionId: CompileActor 2025-05-29 15:23:50.561 INFO ydb-core-tx-datashard-ut_order(pid=3678837, tid=0x00007F9D5D910DC0) [core exec] yql_execution.cpp:87: Finish, output #170, status: Error 2025-05-29T15:23:50.561748Z node 2 :KQP_YQL NOTICE: log.cpp:67: TraceId: 01jwea8y6k4bw9k3440611pcp0, SessionId: CompileActor 2025-05-29 15:23:50.561 NOTE ydb-core-tx-datashard-ut_order(pid=3678837, tid=0x00007F9D5D910DC0) [common provider] yql_provider_gateway.cpp:21:
: Fatal: Execution, code: 1060 2025-05-29T15:23:50.561754Z node 2 :KQP_YQL NOTICE: log.cpp:67: TraceId: 01jwea8y6k4bw9k3440611pcp0, SessionId: CompileActor 2025-05-29 15:23:50.561 NOTE ydb-core-tx-datashard-ut_order(pid=3678837, tid=0x00007F9D5D910DC0) [common provider] yql_provider_gateway.cpp:21:
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:23:50.561864Z node 2 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [2:826:2670], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:23:50.562339Z node 2 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=2&id=NjdmNGQ4N2UtM2FjNjA3NC1jYWVkYzNhMi1iN2E1NTk1Nw==, ActorId: [2:730:2612], ActorState: ExecuteState, TraceId: 01jwea8y6k4bw9k3440611pcp0, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: 2025-05-29T15:23:50.562432Z node 2 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:953: Forwarded response to sender actor, requestId: 2, sender: [2:591:2517], selfId: [2:57:2104], source: [2:730:2612] 2025-05-29T15:23:50.562520Z node 2 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1353: Session closed, sessionId: ydb://session/3?node_id=2&id=NjdmNGQ4N2UtM2FjNjA3NC1jYWVkYzNhMi1iN2E1NTk1Nw==, workerId: [2:730:2612], local sessions count: 0 assertion failed at ydb/core/tx/datashard/ut_common/datashard_ut_common.cpp:2091, void NKikimr::ExecSQL(Tests::TServer::TPtr, TActorId, const TString &, bool, Ydb::StatusIds::StatusCode): (response.GetYdbStatus() == code) failed: (INTERNAL_ERROR != SUCCESS) Response { QueryIssues { message: "Execution" issue_code: 1060 issues { message: "yql/essentials/ast/yql_expr.h:1874: index out of range" issue_code: 1 } } TxMeta { } } YdbStatus: INTERNAL_ERROR ConsumedRu: 1 , with diff: (INT|SUCC)E(RNAL_ERROR|SS) TBackTrace::Capture()+28 (0x13BB0A2C) NUnitTest::NPrivate::RaiseError(char const*, TBasicString> const&, bool)+137 (0x13D686B9) NKikimr::ExecSQL(TIntrusivePtr>, NActors::TActorId, TBasicString> const&, bool, Ydb::StatusIds_StatusCode)+1300 (0x263E5704) NKikimr::NTestSuiteDataShardOutOfOrder::TestLateKqpQueryAfterColumnDrop(bool, bool, TBasicString> const&)+3848 (0x13A20D88) NKikimr::NTestSuiteDataShardOutOfOrder::TTestCaseTestLateKqpScanAfterColumnDrop::Execute_(NUnitTest::TTestContext&)+117 (0x13A93CE5) NKikimr::NTestSuiteDataShardOutOfOrder::TCurrentTest::Execute()::'lambda'()::operator()() const+71 (0x13A62687) NUnitTest::TTestBase::Run(std::__y1::function, TBasicString> const&, char const*, bool)+126 (0x13D6A56E) NKikimr::NTestSuiteDataShardOutOfOrder::TCurrentTest::Execute()+426 (0x13A6204A) NUnitTest::TTestFactory::Execute()+803 (0x13D6ACE3) NUnitTest::RunMain(int, char**)+3021 (0x13D7C86D) ??+0 (0x7F9D5DA25D90) __libc_start_main+128 (0x7F9D5DA25E40) _start+41 (0x12A57029) >> EraseRowsTests::EraseRowsShouldFailOnVariousErrors [GOOD] >> DataShardReadIteratorSysTables::ShouldForbidSchemaVersion [FAIL] >> DataShardReadIteratorSysTables::ShouldNotAllowArrow >> KqpPg::InsertValuesFromTableWithDefault+useSink [FAIL] >> KqpPg::InsertValuesFromTableWithDefault-useSink >> AsyncIndexChangeCollector::MultiIndexedTableUpdateOneIndexedColumn [FAIL] >> AsyncIndexChangeCollector::MultiIndexedTableReplaceSingleRow >> KqpPg::JoinWithQueryService-StreamLookup [FAIL] >> KqpPg::PgAggregate+useSink >> KqpPg::InsertNoTargetColumns_ColumnOrder+useSink [FAIL] >> KqpPg::InsertNoTargetColumns_ColumnOrder-useSink >> TConsoleConfigTests::TestModifyConfigItem [GOOD] >> TConsoleConfigTests::TestRemoveConfigItem >> CdcStreamChangeCollector::DeleteNothing [FAIL] >> CdcStreamChangeCollector::DeleteSingleRow >> PgCatalog::InformationSchema [GOOD] >> DataShardReadIterator::ShouldReadRangePrefix4 [FAIL] >> TConfigsCacheTests::TestFullConfigurationRestore [GOOD] >> DataShardReadIterator::ShouldReadRangePrefix5 >> PgCatalog::CheckSetConfig >> TConfigsCacheTests::TestConfigurationSaveOnNotification ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/columnshard/ut_rw/unittest >> TColumnShardTestReadWrite::CompactionSplitGranule_PKUInt64 [GOOD] Test command err: 2025-05-29T15:21:37.149891Z node 1 :TX_COLUMNSHARD INFO: log.cpp:784: tablet_id=9437184;self_id=[1:139:2170];fline=columnshard.cpp:102;event=initialize_shard;step=OnActivateExecutor; 2025-05-29T15:21:37.154619Z node 1 :TX_COLUMNSHARD INFO: log.cpp:784: tablet_id=9437184;self_id=[1:139:2170];fline=columnshard.cpp:120;event=initialize_shard;step=initialize_tiring_finished; 2025-05-29T15:21:37.154708Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Execute at tablet 9437184 2025-05-29T15:21:37.155593Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=9437184;self_id=[1:139:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-05-29T15:21:37.155654Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=9437184;self_id=[1:139:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-05-29T15:21:37.155696Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=9437184;self_id=[1:139:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-05-29T15:21:37.155715Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=9437184;self_id=[1:139:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-05-29T15:21:37.155740Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=9437184;self_id=[1:139:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-05-29T15:21:37.155759Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=9437184;self_id=[1:139:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-05-29T15:21:37.155776Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=9437184;self_id=[1:139:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-05-29T15:21:37.155795Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=9437184;self_id=[1:139:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-05-29T15:21:37.155813Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=9437184;self_id=[1:139:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-05-29T15:21:37.155832Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=9437184;self_id=[1:139:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-05-29T15:21:37.155853Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=9437184;self_id=[1:139:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2025-05-29T15:21:37.155880Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=9437184;self_id=[1:139:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2025-05-29T15:21:37.164462Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Complete at tablet 9437184 2025-05-29T15:21:37.164540Z node 1 :TX_COLUMNSHARD INFO: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:137;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2025-05-29T15:21:37.164551Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-05-29T15:21:37.164585Z node 1 :TX_COLUMNSHARD INFO: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-05-29T15:21:37.164626Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-05-29T15:21:37.164640Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-05-29T15:21:37.164647Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-05-29T15:21:37.164657Z node 1 :TX_COLUMNSHARD INFO: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2025-05-29T15:21:37.164667Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-05-29T15:21:37.164676Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-05-29T15:21:37.164680Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-05-29T15:21:37.164700Z node 1 :TX_COLUMNSHARD INFO: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-05-29T15:21:37.164710Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-05-29T15:21:37.164718Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-05-29T15:21:37.164722Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-05-29T15:21:37.164733Z node 1 :TX_COLUMNSHARD INFO: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-05-29T15:21:37.164741Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-05-29T15:21:37.164750Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-05-29T15:21:37.164755Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=8;type=CleanInsertionDedup; 2025-05-29T15:21:37.164768Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-05-29T15:21:37.164777Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-05-29T15:21:37.164895Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-05-29T15:21:37.164905Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-05-29T15:21:37.164914Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-05-29T15:21:37.164918Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-05-29T15:21:37.164938Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-05-29T15:21:37.164944Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-05-29T15:21:37.164948Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2025-05-29T15:21:37.164964Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-05-29T15:21:37.164970Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-05-29T15:21:37.164974Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2025-05-29T15:21:37.164984Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-05-29T15:21:37.164989Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanDeprecatedSnapshot;id=CleanDeprecatedSnapshot; 2025-05-29T15:21:37.164992Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=17;type=CleanDeprecatedSnapshot; 2025-05-29T15:21:37.164998Z node 1 :TX_COLUMNSHARD CRIT: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_deprecated_snapshot.cpp:30;tasks_for_rewrite=0; 2025-05-29T15:21:37.165005Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanDeprecatedSnapshot;id=17; 2025-05-29T15:21:37.165011Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV0ChunksMeta;id=RestoreV0ChunksMeta; 2025-05-29T15:21:37.165014Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=18;type=RestoreV0ChunksMeta; 2025-05-29T15:21:37.165094Z node 1 :TX_COLUMNSHARD INFO: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=10; 2025-05-29T15:21:37.165102Z node 1 :TX_COLUMNSHARD INFO: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=5; 2025-05-29T15:21:37.165109Z node 1 :TX_COLUMNSHARD INFO: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute ... =9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;fline=common_data.cpp:29;EXECUTE:granulesLoadingTime=4004; 2025-05-29T15:24:11.097594Z node 1 :TX_COLUMNSHARD INFO: log.cpp:784: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;fline=common_data.cpp:29;PRECHARGE:finishLoadingTime=2; 2025-05-29T15:24:11.097855Z node 1 :TX_COLUMNSHARD INFO: log.cpp:784: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;fline=common_data.cpp:29;EXECUTE:finishLoadingTime=257; 2025-05-29T15:24:11.097861Z node 1 :TX_COLUMNSHARD INFO: log.cpp:784: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:column_enginesLoadingTime=4319; 2025-05-29T15:24:11.097880Z node 1 :TX_COLUMNSHARD INFO: log.cpp:784: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:insert_tableLoadingTime=14; 2025-05-29T15:24:11.097988Z node 1 :TX_COLUMNSHARD INFO: log.cpp:784: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:composite_init/insert_table;fline=common_data.cpp:29;InsertTableLoadingTime=12; 2025-05-29T15:24:11.097996Z node 1 :TX_COLUMNSHARD INFO: log.cpp:784: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:insert_tableLoadingTime=113; 2025-05-29T15:24:11.098011Z node 1 :TX_COLUMNSHARD INFO: log.cpp:784: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:tx_controllerLoadingTime=11; 2025-05-29T15:24:11.098023Z node 1 :TX_COLUMNSHARD INFO: log.cpp:784: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:tx_controllerLoadingTime=7; 2025-05-29T15:24:11.098043Z node 1 :TX_COLUMNSHARD INFO: log.cpp:784: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:operations_managerLoadingTime=16; 2025-05-29T15:24:11.098054Z node 1 :TX_COLUMNSHARD INFO: log.cpp:784: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:operations_managerLoadingTime=7; 2025-05-29T15:24:11.100829Z node 1 :TX_COLUMNSHARD INFO: log.cpp:784: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:storages_managerLoadingTime=2761; 2025-05-29T15:24:11.106108Z node 1 :TX_COLUMNSHARD INFO: log.cpp:784: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:storages_managerLoadingTime=5238; 2025-05-29T15:24:11.106161Z node 1 :TX_COLUMNSHARD INFO: log.cpp:784: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:long_txLoadingTime=10; 2025-05-29T15:24:11.106173Z node 1 :TX_COLUMNSHARD INFO: log.cpp:784: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:long_txLoadingTime=3; 2025-05-29T15:24:11.106181Z node 1 :TX_COLUMNSHARD INFO: log.cpp:784: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:db_locksLoadingTime=1; 2025-05-29T15:24:11.106187Z node 1 :TX_COLUMNSHARD INFO: log.cpp:784: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:db_locksLoadingTime=1; 2025-05-29T15:24:11.106195Z node 1 :TX_COLUMNSHARD INFO: log.cpp:784: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:bg_sessionsLoadingTime=1; 2025-05-29T15:24:11.106212Z node 1 :TX_COLUMNSHARD INFO: log.cpp:784: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:bg_sessionsLoadingTime=11; 2025-05-29T15:24:11.106221Z node 1 :TX_COLUMNSHARD INFO: log.cpp:784: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:sharing_sessionsLoadingTime=1; 2025-05-29T15:24:11.106240Z node 1 :TX_COLUMNSHARD INFO: log.cpp:784: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:sharing_sessionsLoadingTime=13; 2025-05-29T15:24:11.106248Z node 1 :TX_COLUMNSHARD INFO: log.cpp:784: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:in_flight_readsLoadingTime=1; 2025-05-29T15:24:11.106261Z node 1 :TX_COLUMNSHARD INFO: log.cpp:784: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:in_flight_readsLoadingTime=6; 2025-05-29T15:24:11.106280Z node 1 :TX_COLUMNSHARD INFO: log.cpp:784: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:tiers_managerLoadingTime=12; 2025-05-29T15:24:11.106298Z node 1 :TX_COLUMNSHARD INFO: log.cpp:784: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:tiers_managerLoadingTime=11; 2025-05-29T15:24:11.106305Z node 1 :TX_COLUMNSHARD INFO: log.cpp:784: tablet_id=9437184;event=initialize_shard;fline=common_data.cpp:29;EXECUTE:composite_initLoadingTime=13535; 2025-05-29T15:24:11.106365Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: Index: tables 1 inserted {blob_bytes=0;raw_bytes=0;count=0;records=0} compacted {blob_bytes=0;raw_bytes=0;count=0;records=0} s-compacted {blob_bytes=104296632;raw_bytes=191125560;count=2;records=1845000} inactive {blob_bytes=1252684688;raw_bytes=2245386718;count=44;records=22220000} evicted {blob_bytes=0;raw_bytes=0;count=0;records=0} at tablet 9437184 2025-05-29T15:24:11.106405Z node 1 :TX_COLUMNSHARD INFO: log.cpp:784: tablet_id=9437184;self_id=[1:12665:14595];process=SwitchToWork;fline=columnshard.cpp:77;event=initialize_shard;step=SwitchToWork; 2025-05-29T15:24:11.106419Z node 1 :TX_COLUMNSHARD INFO: log.cpp:784: tablet_id=9437184;self_id=[1:12665:14595];process=SwitchToWork;fline=columnshard.cpp:80;event=initialize_shard;step=SignalTabletActive; 2025-05-29T15:24:11.106438Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:784: tablet_id=9437184;self_id=[1:12665:14595];process=SwitchToWork;fline=columnshard_impl.cpp:1614;event=OnTieringModified;path_id=NO_VALUE_OPTIONAL; 2025-05-29T15:24:11.106448Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:784: tablet_id=9437184;self_id=[1:12665:14595];process=SwitchToWork;fline=column_engine_logs.cpp:493;event=OnTieringModified;new_count_tierings=0; 2025-05-29T15:24:11.106484Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:784: tablet_id=9437184;fline=columnshard_impl.cpp:515;event=EnqueueBackgroundActivities;periodic=0; 2025-05-29T15:24:11.106498Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:784: tablet_id=9437184;fline=columnshard_impl.cpp:784;event=start_indexation_tasks;insert_overload_size=0; 2025-05-29T15:24:11.106521Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:784: tablet_id=9437184;fline=column_engine_logs.cpp:246;event=StartCleanup;portions_count=22; 2025-05-29T15:24:11.106537Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:784: tablet_id=9437184;fline=column_engine_logs.cpp:288;event=StartCleanupStop;snapshot=plan_step=1748531811863;tx_id=18446744073709551615;;current_snapshot_ts=1748532098269; 2025-05-29T15:24:11.106545Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:784: tablet_id=9437184;fline=column_engine_logs.cpp:321;event=StartCleanup;portions_count=22;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2025-05-29T15:24:11.106557Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:784: tablet_id=9437184;fline=columnshard_impl.cpp:1060;background=cleanup;skip_reason=no_changes; 2025-05-29T15:24:11.106563Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:784: tablet_id=9437184;fline=columnshard_impl.cpp:1092;background=cleanup;skip_reason=no_changes; 2025-05-29T15:24:11.106588Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:784: tablet_id=9437184;fline=columnshard_impl.cpp:1001;background=ttl;skip_reason=no_changes; 2025-05-29T15:24:11.108541Z node 1 :TX_COLUMNSHARD INFO: log.cpp:784: self_id=[1:12702:14624];tablet_id=9437184;parent=[1:12665:14595];fline=manager.cpp:85;event=ask_data;request=request_id=112;1={portions_count=46};; 2025-05-29T15:24:11.109400Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:784: tablet_id=9437184;tx_state=TTxProgressTx::Complete;fline=columnshard_impl.cpp:784;event=start_indexation_tasks;insert_overload_size=0; 2025-05-29T15:24:11.111486Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:784: tablet_id=9437184;self_id=[1:12665:14595];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:254;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=9437184; 2025-05-29T15:24:11.112228Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:784: tablet_id=9437184;self_id=[1:12665:14595];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;fline=columnshard.cpp:243;event=TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184; 2025-05-29T15:24:11.112240Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: Send periodic stats. 2025-05-29T15:24:11.112246Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: Disabled periodic stats at tablet 9437184 2025-05-29T15:24:11.112254Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:784: tablet_id=9437184;self_id=[1:12665:14595];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:515;event=EnqueueBackgroundActivities;periodic=0; 2025-05-29T15:24:11.112272Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:784: tablet_id=9437184;self_id=[1:12665:14595];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:784;event=start_indexation_tasks;insert_overload_size=0; 2025-05-29T15:24:11.112298Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:784: tablet_id=9437184;self_id=[1:12665:14595];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:246;event=StartCleanup;portions_count=22; 2025-05-29T15:24:11.112314Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:784: tablet_id=9437184;self_id=[1:12665:14595];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:288;event=StartCleanupStop;snapshot=plan_step=1748531811863;tx_id=18446744073709551615;;current_snapshot_ts=1748532098269; 2025-05-29T15:24:11.112324Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:784: tablet_id=9437184;self_id=[1:12665:14595];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:321;event=StartCleanup;portions_count=22;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2025-05-29T15:24:11.112337Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:784: tablet_id=9437184;self_id=[1:12665:14595];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:1060;background=cleanup;skip_reason=no_changes; 2025-05-29T15:24:11.112344Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:784: tablet_id=9437184;self_id=[1:12665:14595];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:1092;background=cleanup;skip_reason=no_changes; 2025-05-29T15:24:11.112365Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:784: tablet_id=9437184;self_id=[1:12665:14595];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;queue=ttl;external_count=0;fline=granule.cpp:164;event=skip_actualization;waiting=0.999000s; 2025-05-29T15:24:11.112375Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:784: tablet_id=9437184;self_id=[1:12665:14595];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:1001;background=ttl;skip_reason=no_changes; >> AsyncIndexChangeCollector::ImplicitlyUpdateCoveredColumn [FAIL] >> DataShardReadIterator::ShouldReturnBrokenLockWhenReadRangeLeftBorder+EvWrite [FAIL] >> DataShardReadIterator::ShouldReturnBrokenLockWhenReadRangeLeftBorder-EvWrite >> DataShardReadIterator::ShouldReturnBrokenLockWhenReadKeyWithContinue+EvWrite [FAIL] >> DataShardReadIterator::ShouldReturnBrokenLockWhenReadKeyWithContinue-EvWrite >> DataShardReadIterator::ShouldCancelMvccSnapshotFromFuture [FAIL] >> DataShardReadIterator::ShouldCommitLocksWhenReadWriteInOneTransaction >> DataShardReadIterator::ShouldLimitRead10RangesChunk100Limit1001 [FAIL] >> DataShardReadIterator::ShouldReadFromFollower >> KqpPg::InsertValuesFromTableWithDefaultTextNotNull-useSink [FAIL] >> KqpPg::InsertValuesFromTableWithDefaultTextNotNullButNull+useSink >> KqpPg::SelectIndex-useSink [FAIL] >> KqpPg::TableDeleteAllData+useSink >> DataShardReadIterator::ShouldReadRangeChunk3 [FAIL] >> AsyncIndexChangeCollector::IndexedPrimaryKeyInsertSingleRow [FAIL] >> DataShardReadIterator::ShouldReadRangeChunk5 >> KqpPg::CreateUniqComplexPgColumn-useSink [FAIL] >> KqpPg::CreateTempTable >> DataShardReadIterator::ShouldReadFromHeadWithConflict+UseSink [FAIL] >> DataShardReadIterator::ShouldReadFromHeadWithConflict-UseSink >> DataShardReadIteratorBatchMode::RangeToInclusive [FAIL] >> DataShardReadIteratorBatchMode::RangeToNonInclusive >> DataShardReadIteratorLatency::ReadSplitLatency [FAIL] >> DataShardReadIteratorPageFaults::CancelPageFaultedReadThenDropTable >> KqpPg::PgAggregate+useSink [FAIL] >> KqpPg::PgAggregate-useSink >> KqpPg::CreateTableBulkUpsertAndRead [GOOD] >> KqpPg::CopyTableSerialColumns+useSink >> TConsoleConfigTests::TestRemoveConfigItem [GOOD] >> TConsoleConfigTests::TestRemoveConfigItems >> TConsoleInMemoryConfigSubscriptionTests::TestSubscriptionCreate >> PgCatalog::CheckSetConfig [FAIL] >> TConfigsCacheTests::TestConfigurationSaveOnNotification [GOOD] >> TConfigsCacheTests::TestOverwrittenConfigurationDoesntCauseNotification >> PgCatalog::PgDatabase+useSink >> CdcStreamChangeCollector::NewImage [FAIL] >> KqpPg::InsertNoTargetColumns_ColumnOrder-useSink [FAIL] >> KqpPg::InsertNoTargetColumns_NotOneSize+useSink >> AsyncIndexChangeCollector::CoverIndexedColumn [FAIL] >> KqpPg::InsertValuesFromTableWithDefault-useSink [FAIL] >> KqpPg::InsertValuesFromTableWithDefaultAndCast+useSink >> DataShardReadIteratorSysTables::ShouldNotAllowArrow [FAIL] >> ReadIteratorExternalBlobs::ExtBlobs >> KqpPg::CreateTempTable [GOOD] >> KqpPg::CreateTempTableSerial >> CdcStreamChangeCollector::PageFaults [FAIL] >> CdcStreamChangeCollector::OldImage >> TConsoleConfigTests::TestRemoveConfigItems [GOOD] >> TConsoleConfigTests::TestConfigureOrderConflicts >> KqpPg::InsertValuesFromTableWithDefaultTextNotNullButNull+useSink [GOOD] >> TConsoleConfigSubscriptionTests::TestAddConfigSubscription >> KqpPg::InsertValuesFromTableWithDefaultTextNotNullButNull-useSink >> KqpPg::TableDeleteAllData+useSink [FAIL] >> KqpPg::TableDeleteAllData-useSink >> AsyncIndexChangeCollector::MultiIndexedTableReplaceSingleRow [FAIL] >> CdcStreamChangeCollector::DeleteSingleRow [FAIL] >> KqpPg::PgAggregate-useSink [FAIL] >> KqpPg::MkqlTerminate >> TConsoleInMemoryConfigSubscriptionTests::TestSubscriptionCreate [GOOD] >> TConsoleInMemoryConfigSubscriptionTests::TestSubscriptionClient >> DataShardReadIterator::ShouldReadRangePrefix5 [FAIL] >> DataShardReadIterator::ShouldReceiveErrorAfterSplit >> PgCatalog::PgDatabase+useSink [GOOD] >> PgCatalog::PgDatabase-useSink >> DataShardReadIterator::ShouldReturnBrokenLockWhenReadRangeLeftBorder-EvWrite [FAIL] >> DataShardReadIterator::ShouldReturnBrokenLockWhenReadRangeRightBorder+EvWrite >> TConsoleTests::TestCreateTenant [GOOD] >> TConsoleTests::TestCreateTenantExtSubdomain >> DataShardReadIterator::ShouldReturnBrokenLockWhenReadKeyWithContinue-EvWrite [FAIL] >> DataShardReadIterator::ShouldReturnBrokenLockWhenReadKeyWithContinueInvisibleRowSkips+EvWrite >> DataShardReadIterator::ShouldCommitLocksWhenReadWriteInOneTransaction [FAIL] >> DataShardReadIterator::ShouldCommitLocksWhenReadWriteInSeparateTransactions >> DataShardReadIterator::ShouldReadFromFollower [FAIL] >> DataShardReadIterator::ShouldNotReadFutureMvccFromFollower >> TConfigsCacheTests::TestOverwrittenConfigurationDoesntCauseNotification [GOOD] >> TConfigsCacheTests::TestConfigurationChangeSensor >> DataShardReadIterator::ShouldReadRangeChunk5 [FAIL] >> DataShardReadIterator::ShouldReadRangeChunk7 >> DataShardReadIterator::ShouldReadFromHeadWithConflict-UseSink [FAIL] >> DataShardReadIterator::ShouldReadFromHeadToMvccWithConflict+UseSink >> DataShardReadIteratorPageFaults::CancelPageFaultedReadThenDropTable [FAIL] >> DataShardReadIteratorPageFaults::LocksNotLostOnPageFault >> KqpPg::InsertNoTargetColumns_NotOneSize+useSink [FAIL] >> DataShardReadIteratorBatchMode::RangeToNonInclusive [FAIL] >> KqpPg::InsertNoTargetColumns_NotOneSize-useSink >> DataShardReadIteratorBatchMode::MultipleRanges >> KqpPg::InsertValuesFromTableWithDefaultAndCast+useSink [FAIL] >> KqpPg::InsertValuesFromTableWithDefaultAndCast-useSink >> TConsoleConfigTests::TestConfigureOrderConflicts [GOOD] >> TConsoleConfigTests::TestGetItems >> KqpPg::CreateTempTableSerial [GOOD] >> KqpPg::DropSequence >> KqpPg::InsertValuesFromTableWithDefaultTextNotNullButNull-useSink [GOOD] >> KqpPg::InsertValuesFromTableWithDefaultNegativeCase+useSink >> KqpPg::TableDeleteAllData-useSink [FAIL] >> KqpPg::PgUpdateCompoundKey+useSink >> TConsoleConfigSubscriptionTests::TestAddConfigSubscription [GOOD] >> TConsoleConfigSubscriptionTests::TestRemoveConfigSubscription >> TConsoleInMemoryConfigSubscriptionTests::TestSubscriptionClient [GOOD] >> TConsoleInMemoryConfigSubscriptionTests::TestSubscriptionClientManyUpdates >> TConfigsCacheTests::TestConfigurationChangeSensor [GOOD] >> TConfigsDispatcherTests::TestSubscriptionNotification >> KqpPg::MkqlTerminate [FAIL] >> KqpPg::NoSelectFullScan >> PgCatalog::PgDatabase-useSink [GOOD] >> PgCatalog::PgRoles >> TConfigsDispatcherTests::TestSubscriptionNotification [GOOD] >> TConfigsDispatcherTests::TestSubscriptionNotificationForNewSubscriberAfterUpdate >> KqpIndexLookupJoin::InnerJoinCustomColumnOrder-StreamLookup >> KqpPg::DropTablePgMultiple >> KqpPg::DropSequence [GOOD] >> KqpPg::DeleteWithQueryService+useSink >> TConsoleConfigTests::TestGetItems [GOOD] >> TConsoleConfigTests::TestGetNodeItems >> KqpPg::InsertValuesFromTableWithDefaultNegativeCase+useSink [GOOD] >> KqpPg::InsertValuesFromTableWithDefaultNegativeCase-useSink >> KqpPg::InsertNoTargetColumns_NotOneSize-useSink [FAIL] >> KqpPg::InsertNoTargetColumns_Alter+useSink >> KqpPg::InsertValuesFromTableWithDefaultAndCast-useSink [FAIL] >> KqpPg::InsertValuesFromTableWithDefaultBool+useSink >> ReadIteratorExternalBlobs::ExtBlobs [FAIL] >> ReadIteratorExternalBlobs::ExtBlobsWithSpecificKeys >> KqpPg::PgUpdateCompoundKey+useSink [FAIL] >> KqpPg::PgUpdateCompoundKey-useSink >> CdcStreamChangeCollector::OldImage [FAIL] >> DataShardReadIterator::ShouldReceiveErrorAfterSplit [FAIL] >> DataShardReadIterator::ShouldReceiveErrorAfterSplitWhenExhausted >> TConsoleConfigSubscriptionTests::TestRemoveConfigSubscription [GOOD] >> TConsoleConfigSubscriptionTests::TestRemoveConfigSubscriptions >> DataShardReadIterator::ShouldReturnBrokenLockWhenReadRangeRightBorder+EvWrite [FAIL] >> DataShardReadIterator::ShouldReturnBrokenLockWhenReadRangeRightBorder-EvWrite >> TConfigsDispatcherTests::TestSubscriptionNotificationForNewSubscriberAfterUpdate [GOOD] >> TConfigsDispatcherTests::TestSubscriptionNotificationForNewSubscriberDuringUpdate >> DataShardReadIteratorPageFaults::LocksNotLostOnPageFault [FAIL] >> DataShardReadIteratorState::ShouldCalculateQuota [GOOD] >> DataShardReadIterator::ShouldReturnBrokenLockWhenReadKeyWithContinueInvisibleRowSkips+EvWrite [FAIL] >> DataShardReadIterator::ShouldReturnBrokenLockWhenReadKeyWithContinueInvisibleRowSkips-EvWrite >> DataShardReadIterator::ShouldCommitLocksWhenReadWriteInSeparateTransactions [FAIL] >> DataShardReadIterator::HandlePersistentSnapshotGoneInContinue [GOOD] >> DataShardReadIterator::HandleMvccGoneInContinue [GOOD] >> DataShardReadIterator::ShouldNotReadFutureMvccFromFollower [FAIL] >> DataShardReadIterator::ShouldProperlyOrderConflictingTransactionsMvcc+UseSink >> PgCatalog::PgRoles [GOOD] >> PgCatalog::PgTables >> DataShardReadIterator::ShouldReadRangeChunk7 [FAIL] >> DataShardReadIterator::ShouldReadRangeChunk100 >> DataShardReadIterator::ShouldReadFromHeadToMvccWithConflict+UseSink [FAIL] >> DataShardReadIterator::ShouldReadFromHeadToMvccWithConflict-UseSink >> KqpPg::DropTablePgMultiple [GOOD] >> KqpPg::DropTableIfExists >> DataShardReadIteratorBatchMode::MultipleRanges [FAIL] >> DataShardReadIteratorBatchMode::SelectingColumns >> TConfigsDispatcherTests::TestSubscriptionNotificationForNewSubscriberDuringUpdate [GOOD] >> TConfigsDispatcherTests::TestRemoveSubscription ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/datashard/ut_erase_rows/unittest >> EraseRowsTests::ConditionalEraseRowsShouldEraseOnTimestamp64 [FAIL] Test command err: 2025-05-29T15:24:10.661573Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:102:2148], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-05-29T15:24:10.661605Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-05-29T15:24:10.661616Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/001e9c/r3tmp/tmp33i7JT/pdisk_1.dat 2025-05-29T15:24:10.763179Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-05-29T15:24:10.776938Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:24:10.780690Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [1:32:2079] 1748532250331061 != 1748532250331065 2025-05-29T15:24:10.822491Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:24:10.822521Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:24:10.833077Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-29T15:24:10.906657Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-05-29T15:24:10.924767Z node 1 :TX_DATASHARD INFO: datashard.cpp:373: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:664:2569] 2025-05-29T15:24:10.924855Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:630: TxInitSchema.Execute 2025-05-29T15:24:10.935080Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:696: TxInitSchema.Complete 2025-05-29T15:24:10.935129Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:48: TDataShard::TTxInit::Execute 2025-05-29T15:24:10.935261Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1315: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-05-29T15:24:10.935269Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1371: LoadLockChangeRecords at tablet: 72075186224037888 2025-05-29T15:24:10.935274Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1420: LoadChangeRecordCommits at tablet: 72075186224037888 2025-05-29T15:24:10.935317Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:92: TDataShard::TTxInit::Complete 2025-05-29T15:24:10.935329Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:100: TDataShard::TTxInitRestored::Execute 2025-05-29T15:24:10.935339Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:111: DataShard 72075186224037888 persisting started state actor id [1:681:2569] in generation 1 2025-05-29T15:24:10.945663Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:120: TDataShard::TTxInitRestored::Complete 2025-05-29T15:24:10.948737Z node 1 :TX_DATASHARD INFO: datashard.cpp:417: Switched to work state WaitScheme tabletId 72075186224037888 2025-05-29T15:24:10.948828Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:457: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-05-29T15:24:10.948860Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1250: Change sender created: at tablet: 72075186224037888, actorId: [1:683:2579] 2025-05-29T15:24:10.948867Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1255: Trying to activate change sender: at tablet: 72075186224037888 2025-05-29T15:24:10.948872Z node 1 :TX_DATASHARD INFO: datashard.cpp:1272: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-05-29T15:24:10.948877Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-05-29T15:24:10.949014Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 72075186224037888 2025-05-29T15:24:10.949035Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-05-29T15:24:10.949047Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2025-05-29T15:24:10.949054Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-05-29T15:24:10.949061Z node 1 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 72075186224037888 TxInFly 0 2025-05-29T15:24:10.949064Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2025-05-29T15:24:10.949152Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3710: Server connected at leader tablet# 72075186224037888, clientId# [1:662:2567], serverId# [1:674:2574], sessionId# [0:0:0] 2025-05-29T15:24:10.949175Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037888 2025-05-29T15:24:10.949226Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:132: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-05-29T15:24:10.949241Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:220: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2025-05-29T15:24:10.949529Z node 1 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:129: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-05-29T15:24:10.959845Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 72075186224037888 2025-05-29T15:24:10.959910Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:467: 72075186224037888 not sending time cast registration request in state WaitScheme 2025-05-29T15:24:11.103773Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3710: Server connected at leader tablet# 72075186224037888, clientId# [1:697:2587], serverId# [1:699:2589], sessionId# [0:0:0] 2025-05-29T15:24:11.104884Z node 1 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:69: Planned transaction txId 281474976715657 at step 1000 at tablet 72075186224037888 { Transactions { TxId: 281474976715657 AckTo { RawX1: 0 RawX2: 0 } } Step: 1000 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2025-05-29T15:24:11.104918Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-05-29T15:24:11.104987Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2025-05-29T15:24:11.104998Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2025-05-29T15:24:11.105011Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:333: Found ready operation [1000:281474976715657] in PlanQueue unit at 72075186224037888 2025-05-29T15:24:11.105095Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:634: LoadTxDetails at 72075186224037888 loaded tx from db 1000:281474976715657 keys extracted: 0 2025-05-29T15:24:11.105135Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 1 immediate 0 planned 1 2025-05-29T15:24:11.105268Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2025-05-29T15:24:11.105285Z node 1 :TX_DATASHARD INFO: create_table_unit.cpp:69: Trying to CREATE TABLE at 72075186224037888 tableId# [OwnerId: 72057594046644480, LocalPathId: 2] schema version# 1 2025-05-29T15:24:11.105761Z node 1 :TX_DATASHARD INFO: datashard.cpp:475: Send registration request to time cast Ready tabletId 72075186224037888 mediators count is 1 coordinators count is 1 buckets per mediator 2 2025-05-29T15:24:11.105871Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-05-29T15:24:11.106283Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3742: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037888 time 0 2025-05-29T15:24:11.106296Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-05-29T15:24:11.106534Z node 1 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:98: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 1000} 2025-05-29T15:24:11.106551Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2025-05-29T15:24:11.106836Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2025-05-29T15:24:11.106851Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1255: Trying to activate change sender: at tablet: 72075186224037888 2025-05-29T15:24:11.106860Z node 1 :TX_DATASHARD INFO: datashard.cpp:1293: Change sender activated: at tablet: 72075186224037888 2025-05-29T15:24:11.106880Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:801: Complete [1000 : 281474976715657] from 72075186224037888 at tablet 72075186224037888 send result to client [1:411:2405], exec latency: 0 ms, propose latency: 0 ms 2025-05-29T15:24:11.106891Z node 1 :TX_DATASHARD INFO: datashard.cpp:1590: 72075186224037888 Sending notify to schemeshard 72057594046644480 txId 281474976715657 state Ready TxInFly 0 2025-05-29T15:24:11.106906Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-05-29T15:24:11.108004Z node 1 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:129: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-05-29T15:24:11.108326Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:2953: Handle TEvSchemaChangedResult 281474976715657 datashard 72075186224037888 state Ready 2025-05-29T15:24:11.108343Z node 1 :TX_DATASHARD DEBUG: datashard__schema_changed.cpp:22: 72075186224037888 Got TEvSchemaChangedResult from SS at 72075186224037888 2025-05-29T15:24:11.108514Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3760: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037888 coordinator 72057594046316545 last step 0 next step 1000 2025-05-29T15:24:11.112916Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:731:2613], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:24:11.112950Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:742:2618], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:24:11.113038Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:24:11.114145Z nod ... HARD DEBUG: datashard.cpp:1255: Trying to activate change sender: at tablet: 72075186224037888 2025-05-29T15:24:12.341234Z node 2 :TX_DATASHARD INFO: datashard.cpp:1272: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-05-29T15:24:12.341243Z node 2 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-05-29T15:24:12.341447Z node 2 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 72075186224037888 2025-05-29T15:24:12.341491Z node 2 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-05-29T15:24:12.341666Z node 2 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2025-05-29T15:24:12.341678Z node 2 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-05-29T15:24:12.341689Z node 2 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 72075186224037888 TxInFly 0 2025-05-29T15:24:12.341701Z node 2 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2025-05-29T15:24:12.341724Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:3710: Server connected at leader tablet# 72075186224037888, clientId# [2:662:2567], serverId# [2:671:2572], sessionId# [0:0:0] 2025-05-29T15:24:12.341777Z node 2 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037888 2025-05-29T15:24:12.341845Z node 2 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:132: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-05-29T15:24:12.341873Z node 2 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:220: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2025-05-29T15:24:12.342525Z node 2 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:129: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-05-29T15:24:12.353024Z node 2 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 72075186224037888 2025-05-29T15:24:12.353099Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:467: 72075186224037888 not sending time cast registration request in state WaitScheme 2025-05-29T15:24:12.497756Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:3710: Server connected at leader tablet# 72075186224037888, clientId# [2:697:2587], serverId# [2:699:2589], sessionId# [0:0:0] 2025-05-29T15:24:12.497941Z node 2 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:69: Planned transaction txId 281474976715657 at step 1000 at tablet 72075186224037888 { Transactions { TxId: 281474976715657 AckTo { RawX1: 0 RawX2: 0 } } Step: 1000 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2025-05-29T15:24:12.497954Z node 2 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-05-29T15:24:12.498131Z node 2 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2025-05-29T15:24:12.498141Z node 2 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2025-05-29T15:24:12.498150Z node 2 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:333: Found ready operation [1000:281474976715657] in PlanQueue unit at 72075186224037888 2025-05-29T15:24:12.498215Z node 2 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:634: LoadTxDetails at 72075186224037888 loaded tx from db 1000:281474976715657 keys extracted: 0 2025-05-29T15:24:12.498256Z node 2 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 1 immediate 0 planned 1 2025-05-29T15:24:12.498441Z node 2 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2025-05-29T15:24:12.498457Z node 2 :TX_DATASHARD INFO: create_table_unit.cpp:69: Trying to CREATE TABLE at 72075186224037888 tableId# [OwnerId: 72057594046644480, LocalPathId: 2] schema version# 1 2025-05-29T15:24:12.498549Z node 2 :TX_DATASHARD INFO: datashard.cpp:475: Send registration request to time cast Ready tabletId 72075186224037888 mediators count is 1 coordinators count is 1 buckets per mediator 2 2025-05-29T15:24:12.498645Z node 2 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-05-29T15:24:12.498978Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:3742: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037888 time 0 2025-05-29T15:24:12.498989Z node 2 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-05-29T15:24:12.499150Z node 2 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:98: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 1000} 2025-05-29T15:24:12.499162Z node 2 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2025-05-29T15:24:12.499278Z node 2 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2025-05-29T15:24:12.499285Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:1255: Trying to activate change sender: at tablet: 72075186224037888 2025-05-29T15:24:12.499291Z node 2 :TX_DATASHARD INFO: datashard.cpp:1293: Change sender activated: at tablet: 72075186224037888 2025-05-29T15:24:12.499308Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:801: Complete [1000 : 281474976715657] from 72075186224037888 at tablet 72075186224037888 send result to client [2:410:2404], exec latency: 0 ms, propose latency: 0 ms 2025-05-29T15:24:12.499319Z node 2 :TX_DATASHARD INFO: datashard.cpp:1590: 72075186224037888 Sending notify to schemeshard 72057594046644480 txId 281474976715657 state Ready TxInFly 0 2025-05-29T15:24:12.499329Z node 2 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-05-29T15:24:12.499579Z node 2 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:129: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-05-29T15:24:12.499977Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:2953: Handle TEvSchemaChangedResult 281474976715657 datashard 72075186224037888 state Ready 2025-05-29T15:24:12.499994Z node 2 :TX_DATASHARD DEBUG: datashard__schema_changed.cpp:22: 72075186224037888 Got TEvSchemaChangedResult from SS at 72075186224037888 2025-05-29T15:24:12.500053Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:3760: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037888 coordinator 72057594046316545 last step 0 next step 1000 2025-05-29T15:24:12.503225Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:731:2613], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:24:12.503248Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:742:2618], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:24:12.503257Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:24:12.504196Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2025-05-29T15:24:12.505144Z node 2 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:129: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-05-29T15:24:12.650810Z node 2 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:129: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-05-29T15:24:12.651307Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [2:745:2621], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-05-29T15:24:12.682869Z node 2 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [2:815:2660] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-29T15:24:12.693038Z node 2 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [2:825:2669], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:24:12.693955Z node 2 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=2&id=YzhjYTI3NGQtNTQ3ZTIwNWMtMThiYjBhMy04MGFmZDY3OQ==, ActorId: [2:729:2611], ActorState: ExecuteState, TraceId: 01jwea9ktp1b1vrf61c72f54v2, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: assertion failed at ydb/core/tx/datashard/ut_common/datashard_ut_common.cpp:2091, void NKikimr::ExecSQL(Tests::TServer::TPtr, TActorId, const TString &, bool, Ydb::StatusIds::StatusCode): (response.GetYdbStatus() == code) failed: (INTERNAL_ERROR != SUCCESS) Response { QueryIssues { message: "Execution" issue_code: 1060 issues { message: "yql/essentials/ast/yql_expr.h:1874: index out of range" issue_code: 1 } } TxMeta { } } YdbStatus: INTERNAL_ERROR ConsumedRu: 1 , with diff: (INT|SUCC)E(RNAL_ERROR|SS) TBackTrace::Capture()+28 (0x15E3C6EC) NUnitTest::NPrivate::RaiseError(char const*, TBasicString> const&, bool)+137 (0x15FF0019) NKikimr::ExecSQL(TIntrusivePtr>, NActors::TActorId, TBasicString> const&, bool, Ydb::StatusIds_StatusCode)+1300 (0x286779A4) NKikimr::NTestSuiteEraseRowsTests::ConditionalEraseShouldSuccess(TBasicString> const&, NKikimrSchemeOp::TTTLSettings_EUnit, TBasicString> const&, TBasicString> const&, bool)+930 (0x15CEB692) NKikimr::NTestSuiteEraseRowsTests::TTestCaseConditionalEraseRowsShouldEraseOnTimestamp64::Execute_(NUnitTest::TTestContext&)+283 (0x15CF001B) NKikimr::NTestSuiteEraseRowsTests::TCurrentTest::Execute()::'lambda'()::operator()() const+71 (0x15D22657) NUnitTest::TTestBase::Run(std::__y1::function, TBasicString> const&, char const*, bool)+126 (0x15FF1ECE) NKikimr::NTestSuiteEraseRowsTests::TCurrentTest::Execute()+436 (0x15D21EB4) NUnitTest::TTestFactory::Execute()+803 (0x15FF2643) NUnitTest::RunMain(int, char**)+3021 (0x160041DD) ??+0 (0x7F562A1E3D90) __libc_start_main+128 (0x7F562A1E3E40) _start+41 (0x14D4D029) >> TConsoleConfigTests::TestGetNodeItems [GOOD] >> KqpPg::NoSelectFullScan [FAIL] >> TConsoleConfigTests::TestGetNodeConfig >> KqpPg::LongDomainName >> KqpPg::InsertValuesFromTableWithDefaultNegativeCase-useSink [GOOD] >> KqpPg::InsertNoTargetColumns_Alter+useSink [FAIL] >> KqpPg::InsertNoTargetColumns_Alter-useSink >> TConfigsDispatcherTests::TestRemoveSubscription [GOOD] >> TConfigsDispatcherTests::TestRemoveSubscriptionWhileUpdateInProcess >> KqpPg::CopyTableSerialColumns+useSink [FAIL] >> KqpPg::CopyTableSerialColumns-useSink >> KqpPg::DeleteWithQueryService+useSink [FAIL] >> KqpPg::DeleteWithQueryService-useSink >> KqpPg::InsertValuesFromTableWithDefaultBool+useSink [FAIL] >> KqpPg::InsertValuesFromTableWithDefaultBool-useSink >> TConsoleConfigSubscriptionTests::TestRemoveConfigSubscriptions [GOOD] >> TConsoleConfigSubscriptionTests::TestListConfigSubscriptions ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/datashard/ut_erase_rows/unittest >> EraseRowsTests::ConditionalEraseRowsShouldEraseOnDatetime64 [FAIL] Test command err: 2025-05-29T15:24:09.267461Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:102:2148], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-05-29T15:24:09.267494Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-05-29T15:24:09.267508Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/001ebc/r3tmp/tmpBrTRaT/pdisk_1.dat 2025-05-29T15:24:09.382450Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-05-29T15:24:09.396431Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:24:09.400581Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [1:32:2079] 1748532248816950 != 1748532248816954 2025-05-29T15:24:09.442333Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:24:09.442364Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:24:09.452850Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-29T15:24:09.525712Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-05-29T15:24:09.540907Z node 1 :TX_DATASHARD INFO: datashard.cpp:373: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:664:2569] 2025-05-29T15:24:09.541003Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:630: TxInitSchema.Execute 2025-05-29T15:24:09.547464Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:696: TxInitSchema.Complete 2025-05-29T15:24:09.547501Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:48: TDataShard::TTxInit::Execute 2025-05-29T15:24:09.547619Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1315: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-05-29T15:24:09.547625Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1371: LoadLockChangeRecords at tablet: 72075186224037888 2025-05-29T15:24:09.547629Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1420: LoadChangeRecordCommits at tablet: 72075186224037888 2025-05-29T15:24:09.547666Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:92: TDataShard::TTxInit::Complete 2025-05-29T15:24:09.547676Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:100: TDataShard::TTxInitRestored::Execute 2025-05-29T15:24:09.547685Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:111: DataShard 72075186224037888 persisting started state actor id [1:681:2569] in generation 1 2025-05-29T15:24:09.557915Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:120: TDataShard::TTxInitRestored::Complete 2025-05-29T15:24:09.560773Z node 1 :TX_DATASHARD INFO: datashard.cpp:417: Switched to work state WaitScheme tabletId 72075186224037888 2025-05-29T15:24:09.560837Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:457: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-05-29T15:24:09.560857Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1250: Change sender created: at tablet: 72075186224037888, actorId: [1:683:2579] 2025-05-29T15:24:09.560861Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1255: Trying to activate change sender: at tablet: 72075186224037888 2025-05-29T15:24:09.560864Z node 1 :TX_DATASHARD INFO: datashard.cpp:1272: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-05-29T15:24:09.560868Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-05-29T15:24:09.560974Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 72075186224037888 2025-05-29T15:24:09.560992Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-05-29T15:24:09.561002Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2025-05-29T15:24:09.561007Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-05-29T15:24:09.561014Z node 1 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 72075186224037888 TxInFly 0 2025-05-29T15:24:09.561017Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2025-05-29T15:24:09.561088Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3710: Server connected at leader tablet# 72075186224037888, clientId# [1:662:2567], serverId# [1:674:2574], sessionId# [0:0:0] 2025-05-29T15:24:09.561107Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037888 2025-05-29T15:24:09.561146Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:132: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-05-29T15:24:09.561159Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:220: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2025-05-29T15:24:09.561399Z node 1 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:129: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-05-29T15:24:09.571659Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 72075186224037888 2025-05-29T15:24:09.571698Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:467: 72075186224037888 not sending time cast registration request in state WaitScheme 2025-05-29T15:24:09.714854Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3710: Server connected at leader tablet# 72075186224037888, clientId# [1:697:2587], serverId# [1:699:2589], sessionId# [0:0:0] 2025-05-29T15:24:09.715801Z node 1 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:69: Planned transaction txId 281474976715657 at step 1000 at tablet 72075186224037888 { Transactions { TxId: 281474976715657 AckTo { RawX1: 0 RawX2: 0 } } Step: 1000 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2025-05-29T15:24:09.715823Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-05-29T15:24:09.715874Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2025-05-29T15:24:09.715882Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2025-05-29T15:24:09.715891Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:333: Found ready operation [1000:281474976715657] in PlanQueue unit at 72075186224037888 2025-05-29T15:24:09.715954Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:634: LoadTxDetails at 72075186224037888 loaded tx from db 1000:281474976715657 keys extracted: 0 2025-05-29T15:24:09.715986Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 1 immediate 0 planned 1 2025-05-29T15:24:09.716105Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2025-05-29T15:24:09.716119Z node 1 :TX_DATASHARD INFO: create_table_unit.cpp:69: Trying to CREATE TABLE at 72075186224037888 tableId# [OwnerId: 72057594046644480, LocalPathId: 2] schema version# 1 2025-05-29T15:24:09.716533Z node 1 :TX_DATASHARD INFO: datashard.cpp:475: Send registration request to time cast Ready tabletId 72075186224037888 mediators count is 1 coordinators count is 1 buckets per mediator 2 2025-05-29T15:24:09.716624Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-05-29T15:24:09.716949Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3742: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037888 time 0 2025-05-29T15:24:09.716959Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-05-29T15:24:09.717144Z node 1 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:98: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 1000} 2025-05-29T15:24:09.717156Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2025-05-29T15:24:09.717357Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2025-05-29T15:24:09.717366Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1255: Trying to activate change sender: at tablet: 72075186224037888 2025-05-29T15:24:09.717372Z node 1 :TX_DATASHARD INFO: datashard.cpp:1293: Change sender activated: at tablet: 72075186224037888 2025-05-29T15:24:09.717386Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:801: Complete [1000 : 281474976715657] from 72075186224037888 at tablet 72075186224037888 send result to client [1:411:2405], exec latency: 0 ms, propose latency: 0 ms 2025-05-29T15:24:09.717395Z node 1 :TX_DATASHARD INFO: datashard.cpp:1590: 72075186224037888 Sending notify to schemeshard 72057594046644480 txId 281474976715657 state Ready TxInFly 0 2025-05-29T15:24:09.717407Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-05-29T15:24:09.718322Z node 1 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:129: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-05-29T15:24:09.718572Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:2953: Handle TEvSchemaChangedResult 281474976715657 datashard 72075186224037888 state Ready 2025-05-29T15:24:09.718586Z node 1 :TX_DATASHARD DEBUG: datashard__schema_changed.cpp:22: 72075186224037888 Got TEvSchemaChangedResult from SS at 72075186224037888 2025-05-29T15:24:09.718759Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3760: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037888 coordinator 72057594046316545 last step 0 next step 1000 2025-05-29T15:24:09.722526Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:731:2613], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:24:09.722547Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:742:2618], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:24:09.722614Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:24:09.723547Z nod ... SHARD DEBUG: datashard.cpp:1255: Trying to activate change sender: at tablet: 72075186224037888 2025-05-29T15:24:12.091916Z node 3 :TX_DATASHARD INFO: datashard.cpp:1272: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-05-29T15:24:12.091921Z node 3 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-05-29T15:24:12.092014Z node 3 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 72075186224037888 2025-05-29T15:24:12.092035Z node 3 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-05-29T15:24:12.092052Z node 3 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2025-05-29T15:24:12.092058Z node 3 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-05-29T15:24:12.092084Z node 3 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 72075186224037888 TxInFly 0 2025-05-29T15:24:12.092090Z node 3 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2025-05-29T15:24:12.092169Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:3710: Server connected at leader tablet# 72075186224037888, clientId# [3:661:2566], serverId# [3:671:2572], sessionId# [0:0:0] 2025-05-29T15:24:12.092201Z node 3 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037888 2025-05-29T15:24:12.092248Z node 3 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:132: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-05-29T15:24:12.092266Z node 3 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:220: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2025-05-29T15:24:12.092545Z node 3 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:129: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-05-29T15:24:12.102822Z node 3 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 72075186224037888 2025-05-29T15:24:12.102860Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:467: 72075186224037888 not sending time cast registration request in state WaitScheme 2025-05-29T15:24:12.250476Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:3710: Server connected at leader tablet# 72075186224037888, clientId# [3:697:2587], serverId# [3:699:2589], sessionId# [0:0:0] 2025-05-29T15:24:12.250592Z node 3 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:69: Planned transaction txId 281474976715657 at step 1000 at tablet 72075186224037888 { Transactions { TxId: 281474976715657 AckTo { RawX1: 0 RawX2: 0 } } Step: 1000 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2025-05-29T15:24:12.250601Z node 3 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-05-29T15:24:12.250679Z node 3 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2025-05-29T15:24:12.250688Z node 3 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2025-05-29T15:24:12.250696Z node 3 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:333: Found ready operation [1000:281474976715657] in PlanQueue unit at 72075186224037888 2025-05-29T15:24:12.250780Z node 3 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:634: LoadTxDetails at 72075186224037888 loaded tx from db 1000:281474976715657 keys extracted: 0 2025-05-29T15:24:12.250811Z node 3 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 1 immediate 0 planned 1 2025-05-29T15:24:12.250835Z node 3 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2025-05-29T15:24:12.250849Z node 3 :TX_DATASHARD INFO: create_table_unit.cpp:69: Trying to CREATE TABLE at 72075186224037888 tableId# [OwnerId: 72057594046644480, LocalPathId: 2] schema version# 1 2025-05-29T15:24:12.250943Z node 3 :TX_DATASHARD INFO: datashard.cpp:475: Send registration request to time cast Ready tabletId 72075186224037888 mediators count is 1 coordinators count is 1 buckets per mediator 2 2025-05-29T15:24:12.251021Z node 3 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-05-29T15:24:12.251337Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:3742: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037888 time 0 2025-05-29T15:24:12.251347Z node 3 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-05-29T15:24:12.251543Z node 3 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:98: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 1000} 2025-05-29T15:24:12.251555Z node 3 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2025-05-29T15:24:12.251779Z node 3 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2025-05-29T15:24:12.251788Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:1255: Trying to activate change sender: at tablet: 72075186224037888 2025-05-29T15:24:12.251794Z node 3 :TX_DATASHARD INFO: datashard.cpp:1293: Change sender activated: at tablet: 72075186224037888 2025-05-29T15:24:12.251810Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:801: Complete [1000 : 281474976715657] from 72075186224037888 at tablet 72075186224037888 send result to client [3:413:2406], exec latency: 0 ms, propose latency: 0 ms 2025-05-29T15:24:12.251820Z node 3 :TX_DATASHARD INFO: datashard.cpp:1590: 72075186224037888 Sending notify to schemeshard 72057594046644480 txId 281474976715657 state Ready TxInFly 0 2025-05-29T15:24:12.251829Z node 3 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-05-29T15:24:12.252206Z node 3 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:129: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-05-29T15:24:12.252438Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:2953: Handle TEvSchemaChangedResult 281474976715657 datashard 72075186224037888 state Ready 2025-05-29T15:24:12.252453Z node 3 :TX_DATASHARD DEBUG: datashard__schema_changed.cpp:22: 72075186224037888 Got TEvSchemaChangedResult from SS at 72075186224037888 2025-05-29T15:24:12.252560Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:3760: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037888 coordinator 72057594046316545 last step 0 next step 1000 2025-05-29T15:24:12.255242Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:731:2613], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:24:12.255263Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:741:2618], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:24:12.255270Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:24:12.260544Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2025-05-29T15:24:12.265494Z node 3 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:129: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-05-29T15:24:12.411655Z node 3 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:129: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-05-29T15:24:12.412077Z node 3 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [3:745:2621], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-05-29T15:24:12.444464Z node 3 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [3:815:2660] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-29T15:24:12.453363Z node 3 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [3:825:2669], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:24:12.453891Z node 3 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=3&id=ZWZiNmMzZjctMzMzYmExZi0xYmExMGEyMy0yMjBhZmVjYg==, ActorId: [3:729:2611], ActorState: ExecuteState, TraceId: 01jwea9kjzfdz91ey543rv5qfm, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: assertion failed at ydb/core/tx/datashard/ut_common/datashard_ut_common.cpp:2091, void NKikimr::ExecSQL(Tests::TServer::TPtr, TActorId, const TString &, bool, Ydb::StatusIds::StatusCode): (response.GetYdbStatus() == code) failed: (INTERNAL_ERROR != SUCCESS) Response { QueryIssues { message: "Execution" issue_code: 1060 issues { message: "yql/essentials/ast/yql_expr.h:1874: index out of range" issue_code: 1 } } TxMeta { } } YdbStatus: INTERNAL_ERROR ConsumedRu: 1 , with diff: (INT|SUCC)E(RNAL_ERROR|SS) TBackTrace::Capture()+28 (0x15E3C6EC) NUnitTest::NPrivate::RaiseError(char const*, TBasicString> const&, bool)+137 (0x15FF0019) NKikimr::ExecSQL(TIntrusivePtr>, NActors::TActorId, TBasicString> const&, bool, Ydb::StatusIds_StatusCode)+1300 (0x286779A4) NKikimr::NTestSuiteEraseRowsTests::ConditionalEraseShouldSuccess(TBasicString> const&, NKikimrSchemeOp::TTTLSettings_EUnit, TBasicString> const&, TBasicString> const&, bool)+930 (0x15CEB692) NKikimr::NTestSuiteEraseRowsTests::TTestCaseConditionalEraseRowsShouldEraseOnDatetime64::Execute_(NUnitTest::TTestContext&)+278 (0x15CEFDE6) NKikimr::NTestSuiteEraseRowsTests::TCurrentTest::Execute()::'lambda'()::operator()() const+71 (0x15D22657) NUnitTest::TTestBase::Run(std::__y1::function, TBasicString> const&, char const*, bool)+126 (0x15FF1ECE) NKikimr::NTestSuiteEraseRowsTests::TCurrentTest::Execute()+436 (0x15D21EB4) NUnitTest::TTestFactory::Execute()+803 (0x15FF2643) NUnitTest::RunMain(int, char**)+3021 (0x160041DD) ??+0 (0x7F4E9F402D90) __libc_start_main+128 (0x7F4E9F402E40) _start+41 (0x14D4D029) >> TConfigsDispatcherTests::TestRemoveSubscriptionWhileUpdateInProcess [GOOD] >> TConfigsDispatcherTests::TestEmptyChangeCausesNoNotification |64.7%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/blobstorage/nodewarden/ut/ydb-core-blobstorage-nodewarden-ut |64.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/blobstorage/nodewarden/ut/ydb-core-blobstorage-nodewarden-ut |64.7%| [TA] {RESULT} $(B)/ydb/core/kqp/ut/data/test-results/unittest/{meta.json ... results_accumulator.log} |64.7%| [TA] {RESULT} $(B)/ydb/core/mind/bscontroller/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> TConsoleConfigTests::TestGetNodeConfig [GOOD] >> TConsoleConfigTests::TestAutoOrder >> KqpPg::PgUpdateCompoundKey-useSink [FAIL] >> KqpPg::LongDomainName [GOOD] >> TConsoleConfigSubscriptionTests::TestListConfigSubscriptions [GOOD] >> TConsoleConfigSubscriptionTests::TestReplaceConfigSubscriptions >> KqpPg::DropTableIfExists [GOOD] >> KqpPg::DropTableIfExists_GenericQuery >> TConfigsDispatcherTests::TestEmptyChangeCausesNoNotification [GOOD] >> DataShardReadIterator::ShouldReceiveErrorAfterSplitWhenExhausted [FAIL] >> TConfigsDispatcherTests::TestYamlAndNonYamlCoexist >> DataShardReadIterator::ShouldReturnBrokenLockWhenReadRangeRightBorder-EvWrite [FAIL] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/datashard/ut_erase_rows/unittest >> EraseRowsTests::ConditionalEraseRowsShouldBreakLocks [FAIL] Test command err: 2025-05-29T15:24:10.191930Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:102:2148], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-05-29T15:24:10.191974Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-05-29T15:24:10.191990Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/001ea7/r3tmp/tmptB79Si/pdisk_1.dat 2025-05-29T15:24:10.296903Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-05-29T15:24:10.310677Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:24:10.313702Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [1:32:2079] 1748532249835139 != 1748532249835143 2025-05-29T15:24:10.355597Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:24:10.355643Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:24:10.366319Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-29T15:24:10.440025Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-05-29T15:24:10.460337Z node 1 :TX_DATASHARD INFO: datashard.cpp:373: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:687:2585] 2025-05-29T15:24:10.460443Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:630: TxInitSchema.Execute 2025-05-29T15:24:10.470298Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:696: TxInitSchema.Complete 2025-05-29T15:24:10.470369Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:48: TDataShard::TTxInit::Execute 2025-05-29T15:24:10.470566Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1315: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-05-29T15:24:10.470577Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1371: LoadLockChangeRecords at tablet: 72075186224037888 2025-05-29T15:24:10.470585Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1420: LoadChangeRecordCommits at tablet: 72075186224037888 2025-05-29T15:24:10.470652Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:92: TDataShard::TTxInit::Complete 2025-05-29T15:24:10.471028Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:100: TDataShard::TTxInitRestored::Execute 2025-05-29T15:24:10.471045Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:111: DataShard 72075186224037888 persisting started state actor id [1:720:2585] in generation 1 2025-05-29T15:24:10.471241Z node 1 :TX_DATASHARD INFO: datashard.cpp:373: TDataShard::OnActivateExecutor: tablet 72075186224037889 actor [1:690:2587] 2025-05-29T15:24:10.471277Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:630: TxInitSchema.Execute 2025-05-29T15:24:10.473016Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:696: TxInitSchema.Complete 2025-05-29T15:24:10.473059Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:48: TDataShard::TTxInit::Execute 2025-05-29T15:24:10.473203Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1315: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037889 2025-05-29T15:24:10.473212Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1371: LoadLockChangeRecords at tablet: 72075186224037889 2025-05-29T15:24:10.473219Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1420: LoadChangeRecordCommits at tablet: 72075186224037889 2025-05-29T15:24:10.473263Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:92: TDataShard::TTxInit::Complete 2025-05-29T15:24:10.473358Z node 1 :TX_DATASHARD INFO: datashard.cpp:373: TDataShard::OnActivateExecutor: tablet 72075186224037890 actor [1:693:2589] 2025-05-29T15:24:10.473391Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:630: TxInitSchema.Execute 2025-05-29T15:24:10.474673Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:100: TDataShard::TTxInitRestored::Execute 2025-05-29T15:24:10.474692Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:111: DataShard 72075186224037889 persisting started state actor id [1:733:2587] in generation 1 2025-05-29T15:24:10.474857Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:696: TxInitSchema.Complete 2025-05-29T15:24:10.474877Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:48: TDataShard::TTxInit::Execute 2025-05-29T15:24:10.475013Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1315: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037890 2025-05-29T15:24:10.475022Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1371: LoadLockChangeRecords at tablet: 72075186224037890 2025-05-29T15:24:10.475028Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1420: LoadChangeRecordCommits at tablet: 72075186224037890 2025-05-29T15:24:10.475067Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:92: TDataShard::TTxInit::Complete 2025-05-29T15:24:10.475084Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:100: TDataShard::TTxInitRestored::Execute 2025-05-29T15:24:10.475094Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:111: DataShard 72075186224037890 persisting started state actor id [1:738:2589] in generation 1 2025-05-29T15:24:10.485455Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:120: TDataShard::TTxInitRestored::Complete 2025-05-29T15:24:10.488518Z node 1 :TX_DATASHARD INFO: datashard.cpp:417: Switched to work state WaitScheme tabletId 72075186224037888 2025-05-29T15:24:10.488594Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:457: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-05-29T15:24:10.488616Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1250: Change sender created: at tablet: 72075186224037888, actorId: [1:742:2616] 2025-05-29T15:24:10.488620Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1255: Trying to activate change sender: at tablet: 72075186224037888 2025-05-29T15:24:10.488624Z node 1 :TX_DATASHARD INFO: datashard.cpp:1272: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-05-29T15:24:10.488628Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-05-29T15:24:10.488727Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:120: TDataShard::TTxInitRestored::Complete 2025-05-29T15:24:10.488735Z node 1 :TX_DATASHARD INFO: datashard.cpp:417: Switched to work state WaitScheme tabletId 72075186224037889 2025-05-29T15:24:10.488746Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:457: 72075186224037889 not sending time cast registration request in state WaitScheme: missing processing params 2025-05-29T15:24:10.488755Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1250: Change sender created: at tablet: 72075186224037889, actorId: [1:743:2617] 2025-05-29T15:24:10.488758Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1255: Trying to activate change sender: at tablet: 72075186224037889 2025-05-29T15:24:10.488762Z node 1 :TX_DATASHARD INFO: datashard.cpp:1272: Cannot activate change sender: at tablet: 72075186224037889, state: WaitScheme 2025-05-29T15:24:10.488765Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2025-05-29T15:24:10.488844Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:120: TDataShard::TTxInitRestored::Complete 2025-05-29T15:24:10.488848Z node 1 :TX_DATASHARD INFO: datashard.cpp:417: Switched to work state WaitScheme tabletId 72075186224037890 2025-05-29T15:24:10.488854Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:457: 72075186224037890 not sending time cast registration request in state WaitScheme: missing processing params 2025-05-29T15:24:10.488859Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1250: Change sender created: at tablet: 72075186224037890, actorId: [1:744:2618] 2025-05-29T15:24:10.488862Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1255: Trying to activate change sender: at tablet: 72075186224037890 2025-05-29T15:24:10.488864Z node 1 :TX_DATASHARD INFO: datashard.cpp:1272: Cannot activate change sender: at tablet: 72075186224037890, state: WaitScheme 2025-05-29T15:24:10.488866Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037890 2025-05-29T15:24:10.488907Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 72075186224037888 2025-05-29T15:24:10.488938Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-05-29T15:24:10.488955Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2025-05-29T15:24:10.488961Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-05-29T15:24:10.488968Z node 1 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 72075186224037888 TxInFly 0 2025-05-29T15:24:10.488971Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2025-05-29T15:24:10.488975Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 72075186224037889 2025-05-29T15:24:10.488984Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037889 2025-05-29T15:24:10.489001Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 72075186224037890 2025-05-29T15:24:10.489007Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037890 2025-05-29T15:24:10.489090Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3710: Server connected at leader tablet# 72075186224037888, clientId# [1:676:2580], serverId# [1:702:2593], sessionId# [0:0:0] 2025-05-29T15:24:10.489094Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037889 2025-05-29T15:24:10.489097Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037889 active 0 active planned 0 immediate 0 planned 0 2025-05-29T15:24:10.489100Z node 1 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 72075186224037889 TxInFly 0 2025-05-29T15:24:10.489103Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037889 2025-05-29T15:24:10.489125Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037888 2025-05-29T15:24:10.489175Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:132: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-05-29T15:24:10.489195Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:220: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2025-05-29T15:24:10.489277Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3710: Server connected at leader tablet# 72075186224037889, clientId# [1:678:2581], serverId# [1:710:2599], sessionId# [0:0:0] 2025-05-29T15:24:10.489282Z node 1 :TX_D ... 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-05-29T15:24:12.779718Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:1250: Change sender created: at tablet: 72075186224037888, actorId: [3:682:2578] 2025-05-29T15:24:12.779724Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:1255: Trying to activate change sender: at tablet: 72075186224037888 2025-05-29T15:24:12.779730Z node 3 :TX_DATASHARD INFO: datashard.cpp:1272: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-05-29T15:24:12.779736Z node 3 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-05-29T15:24:12.779858Z node 3 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 72075186224037888 2025-05-29T15:24:12.779887Z node 3 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-05-29T15:24:12.779904Z node 3 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2025-05-29T15:24:12.779912Z node 3 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-05-29T15:24:12.779922Z node 3 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 72075186224037888 TxInFly 0 2025-05-29T15:24:12.779927Z node 3 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2025-05-29T15:24:12.780030Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:3710: Server connected at leader tablet# 72075186224037888, clientId# [3:661:2566], serverId# [3:671:2572], sessionId# [0:0:0] 2025-05-29T15:24:12.780071Z node 3 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037888 2025-05-29T15:24:12.780133Z node 3 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:132: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-05-29T15:24:12.780154Z node 3 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:220: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2025-05-29T15:24:12.780510Z node 3 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:129: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-05-29T15:24:12.790859Z node 3 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 72075186224037888 2025-05-29T15:24:12.790909Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:467: 72075186224037888 not sending time cast registration request in state WaitScheme 2025-05-29T15:24:12.935568Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:3710: Server connected at leader tablet# 72075186224037888, clientId# [3:697:2587], serverId# [3:699:2589], sessionId# [0:0:0] 2025-05-29T15:24:12.935713Z node 3 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:69: Planned transaction txId 281474976715657 at step 1000 at tablet 72075186224037888 { Transactions { TxId: 281474976715657 AckTo { RawX1: 0 RawX2: 0 } } Step: 1000 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2025-05-29T15:24:12.935725Z node 3 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-05-29T15:24:12.935821Z node 3 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2025-05-29T15:24:12.935831Z node 3 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2025-05-29T15:24:12.935863Z node 3 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:333: Found ready operation [1000:281474976715657] in PlanQueue unit at 72075186224037888 2025-05-29T15:24:12.935951Z node 3 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:634: LoadTxDetails at 72075186224037888 loaded tx from db 1000:281474976715657 keys extracted: 0 2025-05-29T15:24:12.935989Z node 3 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 1 immediate 0 planned 1 2025-05-29T15:24:12.936020Z node 3 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2025-05-29T15:24:12.936036Z node 3 :TX_DATASHARD INFO: create_table_unit.cpp:69: Trying to CREATE TABLE at 72075186224037888 tableId# [OwnerId: 72057594046644480, LocalPathId: 2] schema version# 1 2025-05-29T15:24:12.936184Z node 3 :TX_DATASHARD INFO: datashard.cpp:475: Send registration request to time cast Ready tabletId 72075186224037888 mediators count is 1 coordinators count is 1 buckets per mediator 2 2025-05-29T15:24:12.936280Z node 3 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-05-29T15:24:12.936656Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:3742: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037888 time 0 2025-05-29T15:24:12.936668Z node 3 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-05-29T15:24:12.936913Z node 3 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:98: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 1000} 2025-05-29T15:24:12.936928Z node 3 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2025-05-29T15:24:12.937209Z node 3 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2025-05-29T15:24:12.937222Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:1255: Trying to activate change sender: at tablet: 72075186224037888 2025-05-29T15:24:12.937229Z node 3 :TX_DATASHARD INFO: datashard.cpp:1293: Change sender activated: at tablet: 72075186224037888 2025-05-29T15:24:12.937248Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:801: Complete [1000 : 281474976715657] from 72075186224037888 at tablet 72075186224037888 send result to client [3:413:2406], exec latency: 0 ms, propose latency: 0 ms 2025-05-29T15:24:12.937259Z node 3 :TX_DATASHARD INFO: datashard.cpp:1590: 72075186224037888 Sending notify to schemeshard 72057594046644480 txId 281474976715657 state Ready TxInFly 0 2025-05-29T15:24:12.937270Z node 3 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-05-29T15:24:12.937746Z node 3 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:129: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-05-29T15:24:12.938044Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:2953: Handle TEvSchemaChangedResult 281474976715657 datashard 72075186224037888 state Ready 2025-05-29T15:24:12.938061Z node 3 :TX_DATASHARD DEBUG: datashard__schema_changed.cpp:22: 72075186224037888 Got TEvSchemaChangedResult from SS at 72075186224037888 2025-05-29T15:24:12.938181Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:3760: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037888 coordinator 72057594046316545 last step 0 next step 1000 2025-05-29T15:24:12.942475Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:731:2613], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:24:12.942507Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:741:2618], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:24:12.942519Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:24:12.943731Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2025-05-29T15:24:12.944988Z node 3 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:129: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-05-29T15:24:13.096628Z node 3 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:129: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-05-29T15:24:13.097180Z node 3 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [3:745:2621], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-05-29T15:24:13.132020Z node 3 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [3:815:2660] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-29T15:24:13.148350Z node 3 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [3:825:2669], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:24:13.148906Z node 3 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=3&id=Y2Q5N2IwM2EtNTlkYjllYjAtZWE3ZTI3NGItM2I1MTVhMzM=, ActorId: [3:729:2611], ActorState: ExecuteState, TraceId: 01jwea9m8e5atqbg1s3myqmy4b, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: assertion failed at ydb/core/tx/datashard/ut_common/datashard_ut_common.cpp:2091, void NKikimr::ExecSQL(Tests::TServer::TPtr, TActorId, const TString &, bool, Ydb::StatusIds::StatusCode): (response.GetYdbStatus() == code) failed: (INTERNAL_ERROR != SUCCESS) Response { QueryIssues { message: "Execution" issue_code: 1060 issues { message: "yql/essentials/ast/yql_expr.h:1874: index out of range" issue_code: 1 } } TxMeta { } } YdbStatus: INTERNAL_ERROR ConsumedRu: 1 , with diff: (INT|SUCC)E(RNAL_ERROR|SS) TBackTrace::Capture()+28 (0x15E3C6EC) NUnitTest::NPrivate::RaiseError(char const*, TBasicString> const&, bool)+137 (0x15FF0019) NKikimr::ExecSQL(TIntrusivePtr>, NActors::TActorId, TBasicString> const&, bool, Ydb::StatusIds_StatusCode)+1300 (0x286779A4) NKikimr::NTestSuiteEraseRowsTests::TTestCaseConditionalEraseRowsShouldBreakLocks::Execute_(NUnitTest::TTestContext&)+1172 (0x15CF2654) NKikimr::NTestSuiteEraseRowsTests::TCurrentTest::Execute()::'lambda'()::operator()() const+71 (0x15D22657) NUnitTest::TTestBase::Run(std::__y1::function, TBasicString> const&, char const*, bool)+126 (0x15FF1ECE) NKikimr::NTestSuiteEraseRowsTests::TCurrentTest::Execute()+436 (0x15D21EB4) NUnitTest::TTestFactory::Execute()+803 (0x15FF2643) NUnitTest::RunMain(int, char**)+3021 (0x160041DD) ??+0 (0x7FFA40D8ED90) __libc_start_main+128 (0x7FFA40D8EE40) _start+41 (0x14D4D029) ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/datashard/ut_change_collector/unittest >> CdcStreamChangeCollector::UpsertModifyDelete [FAIL] Test command err: 2025-05-29T15:24:09.330778Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:102:2148], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-05-29T15:24:09.330809Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-05-29T15:24:09.330821Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/000f97/r3tmp/tmpazyUms/pdisk_1.dat 2025-05-29T15:24:09.426952Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-05-29T15:24:09.440160Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:24:09.440877Z node 1 :TABLET_SAUSAGECACHE NOTICE: shared_sausagecache.cpp:1191: Update config MemoryLimit: 33554432 2025-05-29T15:24:09.443932Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [1:32:2079] 1748532248968121 != 1748532248968125 2025-05-29T15:24:09.485878Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:24:09.485907Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:24:09.496426Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-29T15:24:09.569487Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-05-29T15:24:09.585143Z node 1 :TX_DATASHARD INFO: datashard.cpp:373: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:664:2569] 2025-05-29T15:24:09.585198Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:630: TxInitSchema.Execute 2025-05-29T15:24:09.592703Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:696: TxInitSchema.Complete 2025-05-29T15:24:09.592745Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:48: TDataShard::TTxInit::Execute 2025-05-29T15:24:09.592913Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1315: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-05-29T15:24:09.592921Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1371: LoadLockChangeRecords at tablet: 72075186224037888 2025-05-29T15:24:09.592925Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1420: LoadChangeRecordCommits at tablet: 72075186224037888 2025-05-29T15:24:09.592972Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:92: TDataShard::TTxInit::Complete 2025-05-29T15:24:09.592986Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:100: TDataShard::TTxInitRestored::Execute 2025-05-29T15:24:09.592995Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:111: DataShard 72075186224037888 persisting started state actor id [1:681:2569] in generation 1 2025-05-29T15:24:09.603229Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:120: TDataShard::TTxInitRestored::Complete 2025-05-29T15:24:09.606321Z node 1 :TX_DATASHARD INFO: datashard.cpp:417: Switched to work state WaitScheme tabletId 72075186224037888 2025-05-29T15:24:09.606373Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:457: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-05-29T15:24:09.606389Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1250: Change sender created: at tablet: 72075186224037888, actorId: [1:683:2579] 2025-05-29T15:24:09.606394Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1255: Trying to activate change sender: at tablet: 72075186224037888 2025-05-29T15:24:09.606398Z node 1 :TX_DATASHARD INFO: datashard.cpp:1272: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-05-29T15:24:09.606402Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-05-29T15:24:09.606503Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 72075186224037888 2025-05-29T15:24:09.606518Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-05-29T15:24:09.606529Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2025-05-29T15:24:09.606533Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-05-29T15:24:09.606540Z node 1 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 72075186224037888 TxInFly 0 2025-05-29T15:24:09.606543Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2025-05-29T15:24:09.606613Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3710: Server connected at leader tablet# 72075186224037888, clientId# [1:662:2567], serverId# [1:674:2574], sessionId# [0:0:0] 2025-05-29T15:24:09.606628Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037888 2025-05-29T15:24:09.606656Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:132: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-05-29T15:24:09.606667Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:220: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2025-05-29T15:24:09.606904Z node 1 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:129: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-05-29T15:24:09.617162Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 72075186224037888 2025-05-29T15:24:09.617201Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:467: 72075186224037888 not sending time cast registration request in state WaitScheme 2025-05-29T15:24:09.760200Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3710: Server connected at leader tablet# 72075186224037888, clientId# [1:697:2587], serverId# [1:699:2589], sessionId# [0:0:0] 2025-05-29T15:24:09.760907Z node 1 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:69: Planned transaction txId 281474976715657 at step 1000 at tablet 72075186224037888 { Transactions { TxId: 281474976715657 AckTo { RawX1: 0 RawX2: 0 } } Step: 1000 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2025-05-29T15:24:09.760924Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-05-29T15:24:09.760971Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2025-05-29T15:24:09.760978Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2025-05-29T15:24:09.760987Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:333: Found ready operation [1000:281474976715657] in PlanQueue unit at 72075186224037888 2025-05-29T15:24:09.761049Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:634: LoadTxDetails at 72075186224037888 loaded tx from db 1000:281474976715657 keys extracted: 0 2025-05-29T15:24:09.761081Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 1 immediate 0 planned 1 2025-05-29T15:24:09.761165Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2025-05-29T15:24:09.761176Z node 1 :TX_DATASHARD INFO: create_table_unit.cpp:69: Trying to CREATE TABLE at 72075186224037888 tableId# [OwnerId: 72057594046644480, LocalPathId: 2] schema version# 1 2025-05-29T15:24:09.761488Z node 1 :TX_DATASHARD INFO: datashard.cpp:475: Send registration request to time cast Ready tabletId 72075186224037888 mediators count is 1 coordinators count is 1 buckets per mediator 2 2025-05-29T15:24:09.761599Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-05-29T15:24:09.761875Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3742: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037888 time 0 2025-05-29T15:24:09.761882Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-05-29T15:24:09.762048Z node 1 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:98: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 1000} 2025-05-29T15:24:09.762061Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2025-05-29T15:24:09.762220Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2025-05-29T15:24:09.762226Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1255: Trying to activate change sender: at tablet: 72075186224037888 2025-05-29T15:24:09.762232Z node 1 :TX_DATASHARD INFO: datashard.cpp:1293: Change sender activated: at tablet: 72075186224037888 2025-05-29T15:24:09.762245Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:801: Complete [1000 : 281474976715657] from 72075186224037888 at tablet 72075186224037888 send result to client [1:411:2405], exec latency: 0 ms, propose latency: 0 ms 2025-05-29T15:24:09.762252Z node 1 :TX_DATASHARD INFO: datashard.cpp:1590: 72075186224037888 Sending notify to schemeshard 72057594046644480 txId 281474976715657 state Ready TxInFly 0 2025-05-29T15:24:09.762260Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-05-29T15:24:09.762988Z node 1 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:129: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-05-29T15:24:09.763196Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:2953: Handle TEvSchemaChangedResult 281474976715657 datashard 72075186224037888 state Ready 2025-05-29T15:24:09.763207Z node 1 :TX_DATASHARD DEBUG: datashard__schema_changed.cpp:22: 72075186224037888 Got TEvSchemaChangedResult from SS at 72075186224037888 2025-05-29T15:24:09.763322Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3760: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037888 coordinator 72057594046316545 last step 0 next step 1000 2025-05-29T15:24:09.766486Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037888 2025-05-29T15:24:09.766526Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:132: Propose scheme transaction at tablet 72075186224037888 txId 281474976715658 ssId 72057594046644480 seqNo 2:2 2025-05-29T15:24:09.766538Z node 1 :TX_DATASHARD INFO: check_scheme_tx_unit.cpp:234: Check scheme tx, proposed scheme version# 2 current version# 1 expected version# 2 at tablet# 72075186224037888 txId# 281474976715658 2025-05-29T15:24:09.766543Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:220: Prepared scheme transaction txId 281474976715658 at tablet 72075186224037888 2025-05-29T15:24:09.766661Z node 1 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:129: Dis ... iveOp at 72075186224037888 (dry run) active 1 active planned 1 immediate 0 planned 1 2025-05-29T15:24:12.720434Z node 3 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2025-05-29T15:24:12.720444Z node 3 :TX_DATASHARD INFO: create_table_unit.cpp:69: Trying to CREATE TABLE at 72075186224037888 tableId# [OwnerId: 72057594046644480, LocalPathId: 2] schema version# 1 2025-05-29T15:24:12.720535Z node 3 :TX_DATASHARD INFO: datashard.cpp:475: Send registration request to time cast Ready tabletId 72075186224037888 mediators count is 1 coordinators count is 1 buckets per mediator 2 2025-05-29T15:24:12.720609Z node 3 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-05-29T15:24:12.720968Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:3742: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037888 time 0 2025-05-29T15:24:12.720977Z node 3 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-05-29T15:24:12.721162Z node 3 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:98: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 1000} 2025-05-29T15:24:12.721177Z node 3 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2025-05-29T15:24:12.721394Z node 3 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2025-05-29T15:24:12.721402Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:1255: Trying to activate change sender: at tablet: 72075186224037888 2025-05-29T15:24:12.721407Z node 3 :TX_DATASHARD INFO: datashard.cpp:1293: Change sender activated: at tablet: 72075186224037888 2025-05-29T15:24:12.721423Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:801: Complete [1000 : 281474976715657] from 72075186224037888 at tablet 72075186224037888 send result to client [3:413:2406], exec latency: 0 ms, propose latency: 0 ms 2025-05-29T15:24:12.721431Z node 3 :TX_DATASHARD INFO: datashard.cpp:1590: 72075186224037888 Sending notify to schemeshard 72057594046644480 txId 281474976715657 state Ready TxInFly 0 2025-05-29T15:24:12.721438Z node 3 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-05-29T15:24:12.721767Z node 3 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:129: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-05-29T15:24:12.722025Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:2953: Handle TEvSchemaChangedResult 281474976715657 datashard 72075186224037888 state Ready 2025-05-29T15:24:12.722040Z node 3 :TX_DATASHARD DEBUG: datashard__schema_changed.cpp:22: 72075186224037888 Got TEvSchemaChangedResult from SS at 72075186224037888 2025-05-29T15:24:12.722137Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:3760: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037888 coordinator 72057594046316545 last step 0 next step 1000 2025-05-29T15:24:12.725160Z node 3 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037888 2025-05-29T15:24:12.725197Z node 3 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:132: Propose scheme transaction at tablet 72075186224037888 txId 281474976715658 ssId 72057594046644480 seqNo 2:2 2025-05-29T15:24:12.725207Z node 3 :TX_DATASHARD INFO: check_scheme_tx_unit.cpp:234: Check scheme tx, proposed scheme version# 2 current version# 1 expected version# 2 at tablet# 72075186224037888 txId# 281474976715658 2025-05-29T15:24:12.725211Z node 3 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:220: Prepared scheme transaction txId 281474976715658 at tablet 72075186224037888 2025-05-29T15:24:12.725424Z node 3 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:129: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-05-29T15:24:12.746515Z node 3 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 72075186224037888 2025-05-29T15:24:12.908431Z node 3 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:69: Planned transaction txId 281474976715658 at step 1500 at tablet 72075186224037888 { Transactions { TxId: 281474976715658 AckTo { RawX1: 0 RawX2: 0 } } Step: 1500 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2025-05-29T15:24:12.908458Z node 3 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-05-29T15:24:12.908676Z node 3 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2025-05-29T15:24:12.908685Z node 3 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2025-05-29T15:24:12.908693Z node 3 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:333: Found ready operation [1500:281474976715658] in PlanQueue unit at 72075186224037888 2025-05-29T15:24:12.908751Z node 3 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:634: LoadTxDetails at 72075186224037888 loaded tx from db 1500:281474976715658 keys extracted: 0 2025-05-29T15:24:12.908784Z node 3 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 1 immediate 0 planned 1 2025-05-29T15:24:12.908823Z node 3 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2025-05-29T15:24:12.908958Z node 3 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-05-29T15:24:12.943226Z node 3 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:98: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 1500} 2025-05-29T15:24:12.943261Z node 3 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2025-05-29T15:24:12.943270Z node 3 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2025-05-29T15:24:12.943283Z node 3 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-05-29T15:24:12.943309Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:801: Complete [1500 : 281474976715658] from 72075186224037888 at tablet 72075186224037888 send result to client [3:413:2406], exec latency: 0 ms, propose latency: 0 ms 2025-05-29T15:24:12.943328Z node 3 :TX_DATASHARD INFO: datashard.cpp:1590: 72075186224037888 Sending notify to schemeshard 72057594046644480 txId 281474976715658 state Ready TxInFly 0 2025-05-29T15:24:12.943342Z node 3 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-05-29T15:24:12.944026Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:2953: Handle TEvSchemaChangedResult 281474976715658 datashard 72075186224037888 state Ready 2025-05-29T15:24:12.944042Z node 3 :TX_DATASHARD DEBUG: datashard__schema_changed.cpp:22: 72075186224037888 Got TEvSchemaChangedResult from SS at 72075186224037888 2025-05-29T15:24:12.947473Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:876:2714], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:24:12.947497Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:886:2719], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:24:12.947507Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:24:12.948489Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480 2025-05-29T15:24:12.949592Z node 3 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:129: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-05-29T15:24:13.108784Z node 3 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:129: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-05-29T15:24:13.109607Z node 3 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [3:890:2722], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2025-05-29T15:24:13.135844Z node 3 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [3:946:2759] txid# 281474976715660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 8], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-29T15:24:13.144653Z node 3 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [3:955:2767], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:24:13.145269Z node 3 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=3&id=MzkzMWU4ZGMtMjUyMjg5NDctYmI5YmQwNGEtMTZkOGYxMTY=, ActorId: [3:874:2712], ActorState: ExecuteState, TraceId: 01jwea9m8k4ekd6f9za1a9trg4, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: assertion failed at ydb/core/tx/datashard/ut_common/datashard_ut_common.cpp:2091, void NKikimr::ExecSQL(Tests::TServer::TPtr, TActorId, const TString &, bool, Ydb::StatusIds::StatusCode): (response.GetYdbStatus() == code) failed: (INTERNAL_ERROR != SUCCESS) Response { QueryIssues { message: "Execution" issue_code: 1060 issues { message: "yql/essentials/ast/yql_expr.h:1874: index out of range" issue_code: 1 } } TxMeta { } } YdbStatus: INTERNAL_ERROR ConsumedRu: 1 , with diff: (INT|SUCC)E(RNAL_ERROR|SS) TBackTrace::Capture()+28 (0x13AF911C) NUnitTest::NPrivate::RaiseError(char const*, TBasicString> const&, bool)+137 (0x13CAC049) NKikimr::ExecSQL(TIntrusivePtr>, NActors::TActorId, TBasicString> const&, bool, Ydb::StatusIds_StatusCode)+1300 (0x26340864) ??+0 (0x139D4D45) NKikimr::NTestSuiteCdcStreamChangeCollector::TTestCaseUpsertModifyDelete::Execute_(NUnitTest::TTestContext&)+2763 (0x139CEA1B) NKikimr::NTestSuiteCdcStreamChangeCollector::TCurrentTest::Execute()::'lambda'()::operator()() const+71 (0x139E4357) NUnitTest::TTestBase::Run(std::__y1::function, TBasicString> const&, char const*, bool)+126 (0x13CADEFE) NKikimr::NTestSuiteCdcStreamChangeCollector::TCurrentTest::Execute()+484 (0x139E3CF4) NUnitTest::TTestFactory::Execute()+803 (0x13CAE673) NUnitTest::RunMain(int, char**)+3021 (0x13CC021D) ??+0 (0x7FAC7103AD90) __libc_start_main+128 (0x7FAC7103AE40) _start+41 (0x12A09029) ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/datashard/ut_erase_rows/unittest >> EraseRowsTests::EraseRowsShouldFailOnVariousErrors [GOOD] Test command err: 2025-05-29T15:24:11.624759Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:102:2148], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-05-29T15:24:11.624808Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-05-29T15:24:11.624825Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/001e8b/r3tmp/tmp6dBkiu/pdisk_1.dat 2025-05-29T15:24:11.739884Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-05-29T15:24:11.754058Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:24:11.757912Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [1:32:2079] 1748532251216621 != 1748532251216625 2025-05-29T15:24:11.799891Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:24:11.799922Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:24:11.811235Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-29T15:24:11.885961Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-05-29T15:24:11.903882Z node 1 :TX_DATASHARD INFO: datashard.cpp:373: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:664:2569] 2025-05-29T15:24:11.903973Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:630: TxInitSchema.Execute 2025-05-29T15:24:11.913596Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:696: TxInitSchema.Complete 2025-05-29T15:24:11.913656Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:48: TDataShard::TTxInit::Execute 2025-05-29T15:24:11.913866Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1315: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-05-29T15:24:11.913877Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1371: LoadLockChangeRecords at tablet: 72075186224037888 2025-05-29T15:24:11.913885Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1420: LoadChangeRecordCommits at tablet: 72075186224037888 2025-05-29T15:24:11.913952Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:92: TDataShard::TTxInit::Complete 2025-05-29T15:24:11.913973Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:100: TDataShard::TTxInitRestored::Execute 2025-05-29T15:24:11.913989Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:111: DataShard 72075186224037888 persisting started state actor id [1:681:2569] in generation 1 2025-05-29T15:24:11.924341Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:120: TDataShard::TTxInitRestored::Complete 2025-05-29T15:24:11.929494Z node 1 :TX_DATASHARD INFO: datashard.cpp:417: Switched to work state WaitScheme tabletId 72075186224037888 2025-05-29T15:24:11.929617Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:457: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-05-29T15:24:11.929648Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1250: Change sender created: at tablet: 72075186224037888, actorId: [1:683:2579] 2025-05-29T15:24:11.929654Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1255: Trying to activate change sender: at tablet: 72075186224037888 2025-05-29T15:24:11.929659Z node 1 :TX_DATASHARD INFO: datashard.cpp:1272: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-05-29T15:24:11.929666Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-05-29T15:24:11.929852Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 72075186224037888 2025-05-29T15:24:11.929882Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-05-29T15:24:11.929898Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2025-05-29T15:24:11.929905Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-05-29T15:24:11.929915Z node 1 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 72075186224037888 TxInFly 0 2025-05-29T15:24:11.929920Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2025-05-29T15:24:11.930042Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3710: Server connected at leader tablet# 72075186224037888, clientId# [1:662:2567], serverId# [1:674:2574], sessionId# [0:0:0] 2025-05-29T15:24:11.930073Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037888 2025-05-29T15:24:11.930134Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:132: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-05-29T15:24:11.930156Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:220: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2025-05-29T15:24:11.930534Z node 1 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:129: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-05-29T15:24:11.940875Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 72075186224037888 2025-05-29T15:24:11.940926Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:467: 72075186224037888 not sending time cast registration request in state WaitScheme 2025-05-29T15:24:12.087836Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3710: Server connected at leader tablet# 72075186224037888, clientId# [1:697:2587], serverId# [1:699:2589], sessionId# [0:0:0] 2025-05-29T15:24:12.088791Z node 1 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:69: Planned transaction txId 281474976715657 at step 1000 at tablet 72075186224037888 { Transactions { TxId: 281474976715657 AckTo { RawX1: 0 RawX2: 0 } } Step: 1000 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2025-05-29T15:24:12.088814Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-05-29T15:24:12.088867Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2025-05-29T15:24:12.088876Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2025-05-29T15:24:12.088885Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:333: Found ready operation [1000:281474976715657] in PlanQueue unit at 72075186224037888 2025-05-29T15:24:12.088950Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:634: LoadTxDetails at 72075186224037888 loaded tx from db 1000:281474976715657 keys extracted: 0 2025-05-29T15:24:12.088981Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 1 immediate 0 planned 1 2025-05-29T15:24:12.089098Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2025-05-29T15:24:12.089110Z node 1 :TX_DATASHARD INFO: create_table_unit.cpp:69: Trying to CREATE TABLE at 72075186224037888 tableId# [OwnerId: 72057594046644480, LocalPathId: 2] schema version# 1 2025-05-29T15:24:12.089500Z node 1 :TX_DATASHARD INFO: datashard.cpp:475: Send registration request to time cast Ready tabletId 72075186224037888 mediators count is 1 coordinators count is 1 buckets per mediator 2 2025-05-29T15:24:12.089599Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-05-29T15:24:12.089932Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3742: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037888 time 0 2025-05-29T15:24:12.089941Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-05-29T15:24:12.090114Z node 1 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:98: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 1000} 2025-05-29T15:24:12.090125Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2025-05-29T15:24:12.090329Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2025-05-29T15:24:12.090340Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1255: Trying to activate change sender: at tablet: 72075186224037888 2025-05-29T15:24:12.090346Z node 1 :TX_DATASHARD INFO: datashard.cpp:1293: Change sender activated: at tablet: 72075186224037888 2025-05-29T15:24:12.090361Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:801: Complete [1000 : 281474976715657] from 72075186224037888 at tablet 72075186224037888 send result to client [1:411:2405], exec latency: 0 ms, propose latency: 0 ms 2025-05-29T15:24:12.090371Z node 1 :TX_DATASHARD INFO: datashard.cpp:1590: 72075186224037888 Sending notify to schemeshard 72057594046644480 txId 281474976715657 state Ready TxInFly 0 2025-05-29T15:24:12.090383Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-05-29T15:24:12.091257Z node 1 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:129: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-05-29T15:24:12.091521Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:2953: Handle TEvSchemaChangedResult 281474976715657 datashard 72075186224037888 state Ready 2025-05-29T15:24:12.091533Z node 1 :TX_DATASHARD DEBUG: datashard__schema_changed.cpp:22: 72075186224037888 Got TEvSchemaChangedResult from SS at 72075186224037888 2025-05-29T15:24:12.091672Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3760: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037888 coordinator 72057594046316545 last step 0 next step 1000 2025-05-29T15:24:12.095137Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:731:2613], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:24:12.095161Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:742:2618], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:24:12.095232Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:24:12.096122Z nod ... shard.cpp:1255: Trying to activate change sender: at tablet: 72075186224037888 2025-05-29T15:24:13.283460Z node 2 :TX_DATASHARD INFO: datashard.cpp:1272: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-05-29T15:24:13.283467Z node 2 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-05-29T15:24:13.283602Z node 2 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 72075186224037888 2025-05-29T15:24:13.283633Z node 2 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-05-29T15:24:13.283750Z node 2 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2025-05-29T15:24:13.283760Z node 2 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-05-29T15:24:13.283770Z node 2 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 72075186224037888 TxInFly 0 2025-05-29T15:24:13.283777Z node 2 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2025-05-29T15:24:13.283790Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:3710: Server connected at leader tablet# 72075186224037888, clientId# [2:662:2567], serverId# [2:671:2572], sessionId# [0:0:0] 2025-05-29T15:24:13.283824Z node 2 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037888 2025-05-29T15:24:13.283888Z node 2 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:132: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-05-29T15:24:13.283912Z node 2 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:220: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2025-05-29T15:24:13.284405Z node 2 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:129: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-05-29T15:24:13.294815Z node 2 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 72075186224037888 2025-05-29T15:24:13.294869Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:467: 72075186224037888 not sending time cast registration request in state WaitScheme 2025-05-29T15:24:13.438514Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:3710: Server connected at leader tablet# 72075186224037888, clientId# [2:697:2587], serverId# [2:699:2589], sessionId# [0:0:0] 2025-05-29T15:24:13.438651Z node 2 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:69: Planned transaction txId 281474976715657 at step 1000 at tablet 72075186224037888 { Transactions { TxId: 281474976715657 AckTo { RawX1: 0 RawX2: 0 } } Step: 1000 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2025-05-29T15:24:13.438659Z node 2 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-05-29T15:24:13.438887Z node 2 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2025-05-29T15:24:13.438898Z node 2 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2025-05-29T15:24:13.438907Z node 2 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:333: Found ready operation [1000:281474976715657] in PlanQueue unit at 72075186224037888 2025-05-29T15:24:13.438973Z node 2 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:634: LoadTxDetails at 72075186224037888 loaded tx from db 1000:281474976715657 keys extracted: 0 2025-05-29T15:24:13.439003Z node 2 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 1 immediate 0 planned 1 2025-05-29T15:24:13.439138Z node 2 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2025-05-29T15:24:13.439150Z node 2 :TX_DATASHARD INFO: create_table_unit.cpp:69: Trying to CREATE TABLE at 72075186224037888 tableId# [OwnerId: 72057594046644480, LocalPathId: 2] schema version# 1 2025-05-29T15:24:13.439224Z node 2 :TX_DATASHARD INFO: datashard.cpp:475: Send registration request to time cast Ready tabletId 72075186224037888 mediators count is 1 coordinators count is 1 buckets per mediator 2 2025-05-29T15:24:13.439323Z node 2 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-05-29T15:24:13.439544Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:3742: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037888 time 0 2025-05-29T15:24:13.439549Z node 2 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-05-29T15:24:13.439689Z node 2 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:98: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 1000} 2025-05-29T15:24:13.439698Z node 2 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2025-05-29T15:24:13.439777Z node 2 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2025-05-29T15:24:13.439783Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:1255: Trying to activate change sender: at tablet: 72075186224037888 2025-05-29T15:24:13.439787Z node 2 :TX_DATASHARD INFO: datashard.cpp:1293: Change sender activated: at tablet: 72075186224037888 2025-05-29T15:24:13.439804Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:801: Complete [1000 : 281474976715657] from 72075186224037888 at tablet 72075186224037888 send result to client [2:410:2404], exec latency: 0 ms, propose latency: 0 ms 2025-05-29T15:24:13.439814Z node 2 :TX_DATASHARD INFO: datashard.cpp:1590: 72075186224037888 Sending notify to schemeshard 72057594046644480 txId 281474976715657 state Ready TxInFly 0 2025-05-29T15:24:13.439826Z node 2 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-05-29T15:24:13.440043Z node 2 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:129: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-05-29T15:24:13.440361Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:2953: Handle TEvSchemaChangedResult 281474976715657 datashard 72075186224037888 state Ready 2025-05-29T15:24:13.440375Z node 2 :TX_DATASHARD DEBUG: datashard__schema_changed.cpp:22: 72075186224037888 Got TEvSchemaChangedResult from SS at 72075186224037888 2025-05-29T15:24:13.440428Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:3760: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037888 coordinator 72057594046316545 last step 0 next step 1000 2025-05-29T15:24:13.443448Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:3710: Server connected at leader tablet# 72075186224037888, clientId# [2:733:2615], serverId# [2:734:2616], sessionId# [0:0:0] 2025-05-29T15:24:13.443509Z node 2 :TX_DATASHARD INFO: datashard__op_rows.cpp:26: TTxDirectBase(48) Execute: at tablet# 72075186224037888 2025-05-29T15:24:13.464012Z node 2 :TX_DATASHARD INFO: datashard__op_rows.cpp:80: TTxDirectBase(48) Complete: at tablet# 72075186224037888 2025-05-29T15:24:13.464046Z node 2 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-05-29T15:24:13.464126Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:3728: Server disconnected at leader tablet# 72075186224037888, clientId# [2:733:2615], serverId# [2:734:2616], sessionId# [0:0:0] 2025-05-29T15:24:13.465848Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:3710: Server connected at leader tablet# 72075186224037888, clientId# [2:739:2621], serverId# [2:740:2622], sessionId# [0:0:0] 2025-05-29T15:24:13.465891Z node 2 :TX_DATASHARD INFO: datashard__op_rows.cpp:26: TTxDirectBase(48) Execute: at tablet# 72075186224037888 2025-05-29T15:24:13.465941Z node 2 :TX_DATASHARD INFO: datashard__op_rows.cpp:80: TTxDirectBase(48) Complete: at tablet# 72075186224037888 2025-05-29T15:24:13.465947Z node 2 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-05-29T15:24:13.465972Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:3728: Server disconnected at leader tablet# 72075186224037888, clientId# [2:739:2621], serverId# [2:740:2622], sessionId# [0:0:0] 2025-05-29T15:24:13.467204Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:3710: Server connected at leader tablet# 72075186224037888, clientId# [2:744:2626], serverId# [2:745:2627], sessionId# [0:0:0] 2025-05-29T15:24:13.467235Z node 2 :TX_DATASHARD INFO: datashard__op_rows.cpp:26: TTxDirectBase(48) Execute: at tablet# 72075186224037888 2025-05-29T15:24:13.467258Z node 2 :TX_DATASHARD INFO: datashard__op_rows.cpp:80: TTxDirectBase(48) Complete: at tablet# 72075186224037888 2025-05-29T15:24:13.467262Z node 2 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-05-29T15:24:13.467283Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:3728: Server disconnected at leader tablet# 72075186224037888, clientId# [2:744:2626], serverId# [2:745:2627], sessionId# [0:0:0] 2025-05-29T15:24:13.468356Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:3710: Server connected at leader tablet# 72075186224037888, clientId# [2:749:2631], serverId# [2:750:2632], sessionId# [0:0:0] 2025-05-29T15:24:13.468376Z node 2 :TX_DATASHARD INFO: datashard__op_rows.cpp:26: TTxDirectBase(48) Execute: at tablet# 72075186224037888 2025-05-29T15:24:13.468395Z node 2 :TX_DATASHARD INFO: datashard__op_rows.cpp:80: TTxDirectBase(48) Complete: at tablet# 72075186224037888 2025-05-29T15:24:13.468398Z node 2 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-05-29T15:24:13.468417Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:3728: Server disconnected at leader tablet# 72075186224037888, clientId# [2:749:2631], serverId# [2:750:2632], sessionId# [0:0:0] 2025-05-29T15:24:13.469434Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:3710: Server connected at leader tablet# 72075186224037888, clientId# [2:754:2636], serverId# [2:755:2637], sessionId# [0:0:0] 2025-05-29T15:24:13.469457Z node 2 :TX_DATASHARD INFO: datashard__op_rows.cpp:26: TTxDirectBase(48) Execute: at tablet# 72075186224037888 2025-05-29T15:24:13.469499Z node 2 :TX_DATASHARD INFO: datashard__op_rows.cpp:80: TTxDirectBase(48) Complete: at tablet# 72075186224037888 2025-05-29T15:24:13.469505Z node 2 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-05-29T15:24:13.469543Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:3728: Server disconnected at leader tablet# 72075186224037888, clientId# [2:754:2636], serverId# [2:755:2637], sessionId# [0:0:0] 2025-05-29T15:24:13.470644Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:3710: Server connected at leader tablet# 72075186224037888, clientId# [2:759:2641], serverId# [2:760:2642], sessionId# [0:0:0] 2025-05-29T15:24:13.470660Z node 2 :TX_DATASHARD INFO: datashard__op_rows.cpp:26: TTxDirectBase(48) Execute: at tablet# 72075186224037888 2025-05-29T15:24:13.470682Z node 2 :TX_DATASHARD INFO: datashard__op_rows.cpp:80: TTxDirectBase(48) Complete: at tablet# 72075186224037888 2025-05-29T15:24:13.470686Z node 2 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-05-29T15:24:13.470702Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:3728: Server disconnected at leader tablet# 72075186224037888, clientId# [2:759:2641], serverId# [2:760:2642], sessionId# [0:0:0] >> ReadIteratorExternalBlobs::ExtBlobsWithSpecificKeys [FAIL] >> ReadIteratorExternalBlobs::ExtBlobsWithDeletesInTheBeginning >> DataShardReadIterator::ShouldProperlyOrderConflictingTransactionsMvcc+UseSink [FAIL] >> KqpPg::InsertValuesFromTableWithDefaultBool-useSink [FAIL] >> KqpPg::InsertNoTargetColumns_SerialNotNull+useSink >> DataShardReadIterator::ShouldReadRangeChunk100 [FAIL] >> TJaegerTracingConfiguratorTests::DefaultConfig >> KqpPg::InsertNoTargetColumns_Alter-useSink [FAIL] >> DataShardReadIterator::ShouldProperlyOrderConflictingTransactionsMvcc-UseSink >> KqpPg::InsertNoTargetColumns_Serial+useSink >> TConsoleConfigTests::TestAutoOrder [GOOD] >> DataShardReadIterator::ShouldReadFromHeadToMvccWithConflict-UseSink [FAIL] >> KqpPg::CopyTableSerialColumns-useSink [FAIL] >> DataShardReadIterator::ShouldReturnBrokenLockWhenReadKeyWithContinueInvisibleRowSkips-EvWrite [FAIL] >> DataShardReadIteratorBatchMode::SelectingColumns [FAIL] >> DataShardReadIteratorBatchMode::ShouldHandleReadAck >> KqpPg::CreateIndex >> TConsoleConfigTests::TestAutoSplit >> TConfigsDispatcherTests::TestYamlAndNonYamlCoexist [GOOD] >> KqpPg::DeleteWithQueryService-useSink [FAIL] >> TConfigsDispatcherTests::TestYamlEndToEnd >> PgCatalog::PgTables [GOOD] |64.7%| [TA] {RESULT} $(B)/ydb/core/mind/ut/test-results/unittest/{meta.json ... results_accumulator.log} |64.7%| [LD] {RESULT} $(B)/ydb/core/blobstorage/nodewarden/ut/ydb-core-blobstorage-nodewarden-ut ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/datashard/ut_change_collector/unittest >> AsyncIndexChangeCollector::ImplicitlyUpdateCoveredColumn [FAIL] Test command err: 2025-05-29T15:24:09.264280Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:102:2148], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-05-29T15:24:09.264308Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-05-29T15:24:09.264318Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/000fa7/r3tmp/tmpPhUF2w/pdisk_1.dat 2025-05-29T15:24:09.363057Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-05-29T15:24:09.376362Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:24:09.379569Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [1:32:2079] 1748532248867840 != 1748532248867844 2025-05-29T15:24:09.421192Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:24:09.421228Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:24:09.431697Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-29T15:24:09.504465Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-05-29T15:24:09.521233Z node 1 :TX_DATASHARD INFO: datashard.cpp:373: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:676:2577] 2025-05-29T15:24:09.521304Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:630: TxInitSchema.Execute 2025-05-29T15:24:09.528472Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:696: TxInitSchema.Complete 2025-05-29T15:24:09.528533Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:48: TDataShard::TTxInit::Execute 2025-05-29T15:24:09.528685Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1315: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-05-29T15:24:09.528692Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1371: LoadLockChangeRecords at tablet: 72075186224037888 2025-05-29T15:24:09.528697Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1420: LoadChangeRecordCommits at tablet: 72075186224037888 2025-05-29T15:24:09.528745Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:92: TDataShard::TTxInit::Complete 2025-05-29T15:24:09.528950Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:100: TDataShard::TTxInitRestored::Execute 2025-05-29T15:24:09.528960Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:111: DataShard 72075186224037888 persisting started state actor id [1:704:2577] in generation 1 2025-05-29T15:24:09.529071Z node 1 :TX_DATASHARD INFO: datashard.cpp:373: TDataShard::OnActivateExecutor: tablet 72075186224037889 actor [1:679:2579] 2025-05-29T15:24:09.529102Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:630: TxInitSchema.Execute 2025-05-29T15:24:09.530148Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:696: TxInitSchema.Complete 2025-05-29T15:24:09.530170Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:48: TDataShard::TTxInit::Execute 2025-05-29T15:24:09.530255Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1315: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037889 2025-05-29T15:24:09.530260Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1371: LoadLockChangeRecords at tablet: 72075186224037889 2025-05-29T15:24:09.530264Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1420: LoadChangeRecordCommits at tablet: 72075186224037889 2025-05-29T15:24:09.530290Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:92: TDataShard::TTxInit::Complete 2025-05-29T15:24:09.530302Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:100: TDataShard::TTxInitRestored::Execute 2025-05-29T15:24:09.530309Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:111: DataShard 72075186224037889 persisting started state actor id [1:710:2579] in generation 1 2025-05-29T15:24:09.540569Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:120: TDataShard::TTxInitRestored::Complete 2025-05-29T15:24:09.543848Z node 1 :TX_DATASHARD INFO: datashard.cpp:417: Switched to work state WaitScheme tabletId 72075186224037888 2025-05-29T15:24:09.543927Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:457: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-05-29T15:24:09.543948Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1250: Change sender created: at tablet: 72075186224037888, actorId: [1:713:2598] 2025-05-29T15:24:09.543953Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1255: Trying to activate change sender: at tablet: 72075186224037888 2025-05-29T15:24:09.543956Z node 1 :TX_DATASHARD INFO: datashard.cpp:1272: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-05-29T15:24:09.543960Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-05-29T15:24:09.544048Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:120: TDataShard::TTxInitRestored::Complete 2025-05-29T15:24:09.544056Z node 1 :TX_DATASHARD INFO: datashard.cpp:417: Switched to work state WaitScheme tabletId 72075186224037889 2025-05-29T15:24:09.544065Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:457: 72075186224037889 not sending time cast registration request in state WaitScheme: missing processing params 2025-05-29T15:24:09.544073Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1250: Change sender created: at tablet: 72075186224037889, actorId: [1:714:2599] 2025-05-29T15:24:09.544079Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1255: Trying to activate change sender: at tablet: 72075186224037889 2025-05-29T15:24:09.544083Z node 1 :TX_DATASHARD INFO: datashard.cpp:1272: Cannot activate change sender: at tablet: 72075186224037889, state: WaitScheme 2025-05-29T15:24:09.544086Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2025-05-29T15:24:09.544205Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 72075186224037888 2025-05-29T15:24:09.544233Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-05-29T15:24:09.544259Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2025-05-29T15:24:09.544266Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-05-29T15:24:09.544277Z node 1 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 72075186224037888 TxInFly 0 2025-05-29T15:24:09.544282Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2025-05-29T15:24:09.544288Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 72075186224037889 2025-05-29T15:24:09.544296Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037889 2025-05-29T15:24:09.544401Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3710: Server connected at leader tablet# 72075186224037888, clientId# [1:669:2573], serverId# [1:687:2583], sessionId# [0:0:0] 2025-05-29T15:24:09.544409Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037889 2025-05-29T15:24:09.544413Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037889 active 0 active planned 0 immediate 0 planned 0 2025-05-29T15:24:09.544417Z node 1 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 72075186224037889 TxInFly 0 2025-05-29T15:24:09.544421Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037889 2025-05-29T15:24:09.544454Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037888 2025-05-29T15:24:09.544504Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:132: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-05-29T15:24:09.544524Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:220: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2025-05-29T15:24:09.544625Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3710: Server connected at leader tablet# 72075186224037889, clientId# [1:670:2574], serverId# [1:695:2589], sessionId# [0:0:0] 2025-05-29T15:24:09.544656Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037889 2025-05-29T15:24:09.544681Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:132: Propose scheme transaction at tablet 72075186224037889 txId 281474976715657 ssId 72057594046644480 seqNo 2:2 2025-05-29T15:24:09.544693Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:220: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037889 2025-05-29T15:24:09.545002Z node 1 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:129: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-05-29T15:24:09.545019Z node 1 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:129: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037889 2025-05-29T15:24:09.555336Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 72075186224037888 2025-05-29T15:24:09.555381Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:467: 72075186224037888 not sending time cast registration request in state WaitScheme 2025-05-29T15:24:09.555537Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 72075186224037889 2025-05-29T15:24:09.555550Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:467: 72075186224037889 not sending time cast registration request in state WaitScheme 2025-05-29T15:24:09.698592Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3710: Server connected at leader tablet# 72075186224037889, clientId# [1:731:2610], serverId# [1:734:2613], sessionId# [0:0:0] 2025-05-29T15:24:09.698632Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3710: Server connected at leader tablet# 72075186224037888, clientId# [1:732:2611], serverId# [1:735:2614], sessionId# [0:0:0] 2025-05-29T15:24:09.699319Z node 1 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:69: Planned transaction txId 281474976715657 at step 1000 at tablet 72075186224037889 { Transactions { TxId: 281474976715657 AckTo { RawX1: 0 RawX2: 0 } } Step: 1000 MediatorID: 72057594046382081 TabletID: 72075186224037889 } 2025-05-29T15:24:09.699335Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2025-05-29T15:24:09.699420Z node 1 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:69: Planned transaction txId 281474976715657 at step 1000 at tablet 72075186224037888 { Transactions { TxId: 281474976715657 AckTo { RawX1: 0 RawX2: 0 } } Step: 100 ... ashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2025-05-29T15:24:13.556658Z node 4 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2025-05-29T15:24:13.556661Z node 4 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:333: Found ready operation [1000:281474976715657] in PlanQueue unit at 72075186224037888 2025-05-29T15:24:13.556685Z node 4 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:634: LoadTxDetails at 72075186224037888 loaded tx from db 1000:281474976715657 keys extracted: 0 2025-05-29T15:24:13.556695Z node 4 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 1 immediate 0 planned 1 2025-05-29T15:24:13.556703Z node 4 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037889 2025-05-29T15:24:13.556712Z node 4 :TX_DATASHARD INFO: create_table_unit.cpp:69: Trying to CREATE TABLE at 72075186224037889 tableId# [OwnerId: 72057594046644480, LocalPathId: 2] schema version# 1 2025-05-29T15:24:13.556822Z node 4 :TX_DATASHARD INFO: datashard.cpp:475: Send registration request to time cast Ready tabletId 72075186224037889 mediators count is 1 coordinators count is 1 buckets per mediator 2 2025-05-29T15:24:13.556903Z node 4 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037889 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-05-29T15:24:13.557243Z node 4 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2025-05-29T15:24:13.557256Z node 4 :TX_DATASHARD INFO: create_table_unit.cpp:69: Trying to CREATE TABLE at 72075186224037888 tableId# [OwnerId: 72057594046644480, LocalPathId: 4] schema version# 1 2025-05-29T15:24:13.557306Z node 4 :TX_DATASHARD INFO: datashard.cpp:475: Send registration request to time cast Ready tabletId 72075186224037888 mediators count is 1 coordinators count is 1 buckets per mediator 2 2025-05-29T15:24:13.557352Z node 4 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-05-29T15:24:13.557468Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:3742: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037889 time 0 2025-05-29T15:24:13.557472Z node 4 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2025-05-29T15:24:13.557650Z node 4 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:98: Sending '{TEvPlanStepAccepted TabletId# 72075186224037889 step# 1000} 2025-05-29T15:24:13.557660Z node 4 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037889 2025-05-29T15:24:13.557799Z node 4 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:98: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 1000} 2025-05-29T15:24:13.557807Z node 4 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2025-05-29T15:24:13.557922Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:3742: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037888 time 0 2025-05-29T15:24:13.557927Z node 4 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-05-29T15:24:13.557971Z node 4 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037889 2025-05-29T15:24:13.557978Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:1255: Trying to activate change sender: at tablet: 72075186224037889 2025-05-29T15:24:13.557983Z node 4 :TX_DATASHARD INFO: datashard.cpp:1293: Change sender activated: at tablet: 72075186224037889 2025-05-29T15:24:13.557998Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:801: Complete [1000 : 281474976715657] from 72075186224037889 at tablet 72075186224037889 send result to client [4:410:2404], exec latency: 0 ms, propose latency: 0 ms 2025-05-29T15:24:13.558005Z node 4 :TX_DATASHARD INFO: datashard.cpp:1590: 72075186224037889 Sending notify to schemeshard 72057594046644480 txId 281474976715657 state Ready TxInFly 0 2025-05-29T15:24:13.558013Z node 4 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2025-05-29T15:24:13.558337Z node 4 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2025-05-29T15:24:13.558345Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:1255: Trying to activate change sender: at tablet: 72075186224037888 2025-05-29T15:24:13.558348Z node 4 :TX_DATASHARD INFO: datashard.cpp:1293: Change sender activated: at tablet: 72075186224037888 2025-05-29T15:24:13.558355Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:801: Complete [1000 : 281474976715657] from 72075186224037888 at tablet 72075186224037888 send result to client [4:410:2404], exec latency: 0 ms, propose latency: 0 ms 2025-05-29T15:24:13.558360Z node 4 :TX_DATASHARD INFO: datashard.cpp:1590: 72075186224037888 Sending notify to schemeshard 72057594046644480 txId 281474976715657 state Ready TxInFly 0 2025-05-29T15:24:13.558366Z node 4 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-05-29T15:24:13.558416Z node 4 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:129: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-05-29T15:24:13.558425Z node 4 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:129: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037889 2025-05-29T15:24:13.558820Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:3760: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037889 coordinator 72057594046316545 last step 0 next step 1000 2025-05-29T15:24:13.558861Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:3760: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037888 coordinator 72057594046316545 last step 0 next step 1000 2025-05-29T15:24:13.558868Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:2953: Handle TEvSchemaChangedResult 281474976715657 datashard 72075186224037889 state Ready 2025-05-29T15:24:13.558874Z node 4 :TX_DATASHARD DEBUG: datashard__schema_changed.cpp:22: 72075186224037889 Got TEvSchemaChangedResult from SS at 72075186224037889 2025-05-29T15:24:13.558948Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:2953: Handle TEvSchemaChangedResult 281474976715657 datashard 72075186224037888 state Ready 2025-05-29T15:24:13.558952Z node 4 :TX_DATASHARD DEBUG: datashard__schema_changed.cpp:22: 72075186224037888 Got TEvSchemaChangedResult from SS at 72075186224037888 2025-05-29T15:24:13.561473Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:779:2650], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:24:13.561495Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:788:2655], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:24:13.561506Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:24:13.562284Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2025-05-29T15:24:13.563192Z node 4 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:129: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-05-29T15:24:13.563212Z node 4 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:129: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037889 2025-05-29T15:24:13.708818Z node 4 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:129: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-05-29T15:24:13.708864Z node 4 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:129: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037889 2025-05-29T15:24:13.709390Z node 4 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [4:793:2658], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-05-29T15:24:13.744390Z node 4 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [4:864:2698] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 8], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-29T15:24:13.763056Z node 4 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [4:873:2706], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:24:13.763658Z node 4 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=4&id=ZmE1MWRlOTctYTg1Yzk5OWMtY2E2YmNkNTYtOTQ3OTQ3N2U=, ActorId: [4:777:2648], ActorState: ExecuteState, TraceId: 01jwea9mvs4j8edterdnzaytzg, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: assertion failed at ydb/core/tx/datashard/ut_common/datashard_ut_common.cpp:2091, void NKikimr::ExecSQL(Tests::TServer::TPtr, TActorId, const TString &, bool, Ydb::StatusIds::StatusCode): (response.GetYdbStatus() == code) failed: (INTERNAL_ERROR != SUCCESS) Response { QueryIssues { message: "Execution" issue_code: 1060 issues { message: "yql/essentials/ast/yql_expr.h:1874: index out of range" issue_code: 1 } } TxMeta { } } YdbStatus: INTERNAL_ERROR ConsumedRu: 1 , with diff: (INT|SUCC)E(RNAL_ERROR|SS) TBackTrace::Capture()+28 (0x13AF911C) NUnitTest::NPrivate::RaiseError(char const*, TBasicString> const&, bool)+137 (0x13CAC049) NKikimr::ExecSQL(TIntrusivePtr>, NActors::TActorId, TBasicString> const&, bool, Ydb::StatusIds_StatusCode)+1300 (0x26340864) ??+0 (0x139A990C) NKikimr::NTestSuiteAsyncIndexChangeCollector::TTestCaseImplicitlyUpdateCoveredColumn::Execute_(NUnitTest::TTestContext&)+5193 (0x139C1D59) NKikimr::NTestSuiteAsyncIndexChangeCollector::TCurrentTest::Execute()::'lambda'()::operator()() const+71 (0x139E09F7) NUnitTest::TTestBase::Run(std::__y1::function, TBasicString> const&, char const*, bool)+126 (0x13CADEFE) NKikimr::NTestSuiteAsyncIndexChangeCollector::TCurrentTest::Execute()+481 (0x139E0231) NUnitTest::TTestFactory::Execute()+803 (0x13CAE673) NUnitTest::RunMain(int, char**)+3021 (0x13CC021D) ??+0 (0x7FBC8153AD90) __libc_start_main+128 (0x7FBC8153AE40) _start+41 (0x12A09029) ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/datashard/ut_change_collector/unittest >> AsyncIndexChangeCollector::IndexedPrimaryKeyInsertSingleRow [FAIL] Test command err: 2025-05-29T15:24:09.291369Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:102:2148], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-05-29T15:24:09.291407Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-05-29T15:24:09.291420Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/000fa1/r3tmp/tmpkVVYtQ/pdisk_1.dat 2025-05-29T15:24:09.400170Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-05-29T15:24:09.413062Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:24:09.416200Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [1:32:2079] 1748532248836586 != 1748532248836590 2025-05-29T15:24:09.457953Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:24:09.458001Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:24:09.468512Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-29T15:24:09.541867Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-05-29T15:24:09.558524Z node 1 :TX_DATASHARD INFO: datashard.cpp:373: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:676:2577] 2025-05-29T15:24:09.558596Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:630: TxInitSchema.Execute 2025-05-29T15:24:09.565966Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:696: TxInitSchema.Complete 2025-05-29T15:24:09.566020Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:48: TDataShard::TTxInit::Execute 2025-05-29T15:24:09.566155Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1315: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-05-29T15:24:09.566162Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1371: LoadLockChangeRecords at tablet: 72075186224037888 2025-05-29T15:24:09.566166Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1420: LoadChangeRecordCommits at tablet: 72075186224037888 2025-05-29T15:24:09.566211Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:92: TDataShard::TTxInit::Complete 2025-05-29T15:24:09.566385Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:100: TDataShard::TTxInitRestored::Execute 2025-05-29T15:24:09.566395Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:111: DataShard 72075186224037888 persisting started state actor id [1:704:2577] in generation 1 2025-05-29T15:24:09.566491Z node 1 :TX_DATASHARD INFO: datashard.cpp:373: TDataShard::OnActivateExecutor: tablet 72075186224037889 actor [1:679:2579] 2025-05-29T15:24:09.566519Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:630: TxInitSchema.Execute 2025-05-29T15:24:09.567418Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:696: TxInitSchema.Complete 2025-05-29T15:24:09.567440Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:48: TDataShard::TTxInit::Execute 2025-05-29T15:24:09.567544Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1315: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037889 2025-05-29T15:24:09.567554Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1371: LoadLockChangeRecords at tablet: 72075186224037889 2025-05-29T15:24:09.567560Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1420: LoadChangeRecordCommits at tablet: 72075186224037889 2025-05-29T15:24:09.567593Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:92: TDataShard::TTxInit::Complete 2025-05-29T15:24:09.567611Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:100: TDataShard::TTxInitRestored::Execute 2025-05-29T15:24:09.567621Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:111: DataShard 72075186224037889 persisting started state actor id [1:710:2579] in generation 1 2025-05-29T15:24:09.577917Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:120: TDataShard::TTxInitRestored::Complete 2025-05-29T15:24:09.582164Z node 1 :TX_DATASHARD INFO: datashard.cpp:417: Switched to work state WaitScheme tabletId 72075186224037888 2025-05-29T15:24:09.582233Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:457: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-05-29T15:24:09.582254Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1250: Change sender created: at tablet: 72075186224037888, actorId: [1:713:2598] 2025-05-29T15:24:09.582257Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1255: Trying to activate change sender: at tablet: 72075186224037888 2025-05-29T15:24:09.582261Z node 1 :TX_DATASHARD INFO: datashard.cpp:1272: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-05-29T15:24:09.582264Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-05-29T15:24:09.582334Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:120: TDataShard::TTxInitRestored::Complete 2025-05-29T15:24:09.582339Z node 1 :TX_DATASHARD INFO: datashard.cpp:417: Switched to work state WaitScheme tabletId 72075186224037889 2025-05-29T15:24:09.582345Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:457: 72075186224037889 not sending time cast registration request in state WaitScheme: missing processing params 2025-05-29T15:24:09.582354Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1250: Change sender created: at tablet: 72075186224037889, actorId: [1:714:2599] 2025-05-29T15:24:09.582357Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1255: Trying to activate change sender: at tablet: 72075186224037889 2025-05-29T15:24:09.582360Z node 1 :TX_DATASHARD INFO: datashard.cpp:1272: Cannot activate change sender: at tablet: 72075186224037889, state: WaitScheme 2025-05-29T15:24:09.582363Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2025-05-29T15:24:09.582458Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 72075186224037888 2025-05-29T15:24:09.582478Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-05-29T15:24:09.582495Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2025-05-29T15:24:09.582501Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-05-29T15:24:09.582508Z node 1 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 72075186224037888 TxInFly 0 2025-05-29T15:24:09.582511Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2025-05-29T15:24:09.582515Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 72075186224037889 2025-05-29T15:24:09.582520Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037889 2025-05-29T15:24:09.582602Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3710: Server connected at leader tablet# 72075186224037888, clientId# [1:669:2573], serverId# [1:687:2583], sessionId# [0:0:0] 2025-05-29T15:24:09.582606Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037889 2025-05-29T15:24:09.582609Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037889 active 0 active planned 0 immediate 0 planned 0 2025-05-29T15:24:09.582611Z node 1 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 72075186224037889 TxInFly 0 2025-05-29T15:24:09.582614Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037889 2025-05-29T15:24:09.582644Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037888 2025-05-29T15:24:09.582692Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:132: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-05-29T15:24:09.582705Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:220: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2025-05-29T15:24:09.582811Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3710: Server connected at leader tablet# 72075186224037889, clientId# [1:670:2574], serverId# [1:695:2589], sessionId# [0:0:0] 2025-05-29T15:24:09.582837Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037889 2025-05-29T15:24:09.582855Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:132: Propose scheme transaction at tablet 72075186224037889 txId 281474976715657 ssId 72057594046644480 seqNo 2:2 2025-05-29T15:24:09.582866Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:220: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037889 2025-05-29T15:24:09.583086Z node 1 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:129: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-05-29T15:24:09.583096Z node 1 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:129: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037889 2025-05-29T15:24:09.593387Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 72075186224037888 2025-05-29T15:24:09.593420Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:467: 72075186224037888 not sending time cast registration request in state WaitScheme 2025-05-29T15:24:09.593543Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 72075186224037889 2025-05-29T15:24:09.593549Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:467: 72075186224037889 not sending time cast registration request in state WaitScheme 2025-05-29T15:24:09.736478Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3710: Server connected at leader tablet# 72075186224037889, clientId# [1:731:2610], serverId# [1:734:2613], sessionId# [0:0:0] 2025-05-29T15:24:09.736515Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3710: Server connected at leader tablet# 72075186224037888, clientId# [1:732:2611], serverId# [1:735:2614], sessionId# [0:0:0] 2025-05-29T15:24:09.737125Z node 1 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:69: Planned transaction txId 281474976715657 at step 1000 at tablet 72075186224037889 { Transactions { TxId: 281474976715657 AckTo { RawX1: 0 RawX2: 0 } } Step: 1000 MediatorID: 72057594046382081 TabletID: 72075186224037889 } 2025-05-29T15:24:09.737136Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2025-05-29T15:24:09.737187Z node 1 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:69: Planned transaction txId 281474976715657 at step 1000 at tablet 72075186224037888 { Transactions { TxId: 281474976715657 AckTo { RawX1: 0 RawX2: 0 } } Step: 100 ... cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2025-05-29T15:24:13.746826Z node 4 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2025-05-29T15:24:13.746831Z node 4 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:333: Found ready operation [1000:281474976715657] in PlanQueue unit at 72075186224037888 2025-05-29T15:24:13.746860Z node 4 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:634: LoadTxDetails at 72075186224037888 loaded tx from db 1000:281474976715657 keys extracted: 0 2025-05-29T15:24:13.746876Z node 4 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 1 immediate 0 planned 1 2025-05-29T15:24:13.746890Z node 4 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037889 2025-05-29T15:24:13.746902Z node 4 :TX_DATASHARD INFO: create_table_unit.cpp:69: Trying to CREATE TABLE at 72075186224037889 tableId# [OwnerId: 72057594046644480, LocalPathId: 2] schema version# 1 2025-05-29T15:24:13.747023Z node 4 :TX_DATASHARD INFO: datashard.cpp:475: Send registration request to time cast Ready tabletId 72075186224037889 mediators count is 1 coordinators count is 1 buckets per mediator 2 2025-05-29T15:24:13.747110Z node 4 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037889 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-05-29T15:24:13.747542Z node 4 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2025-05-29T15:24:13.747560Z node 4 :TX_DATASHARD INFO: create_table_unit.cpp:69: Trying to CREATE TABLE at 72075186224037888 tableId# [OwnerId: 72057594046644480, LocalPathId: 4] schema version# 1 2025-05-29T15:24:13.747630Z node 4 :TX_DATASHARD INFO: datashard.cpp:475: Send registration request to time cast Ready tabletId 72075186224037888 mediators count is 1 coordinators count is 1 buckets per mediator 2 2025-05-29T15:24:13.747695Z node 4 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-05-29T15:24:13.747873Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:3742: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037889 time 0 2025-05-29T15:24:13.747880Z node 4 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2025-05-29T15:24:13.748116Z node 4 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:98: Sending '{TEvPlanStepAccepted TabletId# 72075186224037889 step# 1000} 2025-05-29T15:24:13.748130Z node 4 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037889 2025-05-29T15:24:13.748279Z node 4 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:98: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 1000} 2025-05-29T15:24:13.748288Z node 4 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2025-05-29T15:24:13.748437Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:3742: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037888 time 0 2025-05-29T15:24:13.748444Z node 4 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-05-29T15:24:13.748505Z node 4 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037889 2025-05-29T15:24:13.748513Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:1255: Trying to activate change sender: at tablet: 72075186224037889 2025-05-29T15:24:13.748519Z node 4 :TX_DATASHARD INFO: datashard.cpp:1293: Change sender activated: at tablet: 72075186224037889 2025-05-29T15:24:13.748535Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:801: Complete [1000 : 281474976715657] from 72075186224037889 at tablet 72075186224037889 send result to client [4:410:2404], exec latency: 0 ms, propose latency: 0 ms 2025-05-29T15:24:13.748545Z node 4 :TX_DATASHARD INFO: datashard.cpp:1590: 72075186224037889 Sending notify to schemeshard 72057594046644480 txId 281474976715657 state Ready TxInFly 0 2025-05-29T15:24:13.748556Z node 4 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2025-05-29T15:24:13.748881Z node 4 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2025-05-29T15:24:13.748891Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:1255: Trying to activate change sender: at tablet: 72075186224037888 2025-05-29T15:24:13.748896Z node 4 :TX_DATASHARD INFO: datashard.cpp:1293: Change sender activated: at tablet: 72075186224037888 2025-05-29T15:24:13.748906Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:801: Complete [1000 : 281474976715657] from 72075186224037888 at tablet 72075186224037888 send result to client [4:410:2404], exec latency: 0 ms, propose latency: 0 ms 2025-05-29T15:24:13.748913Z node 4 :TX_DATASHARD INFO: datashard.cpp:1590: 72075186224037888 Sending notify to schemeshard 72057594046644480 txId 281474976715657 state Ready TxInFly 0 2025-05-29T15:24:13.748922Z node 4 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-05-29T15:24:13.748991Z node 4 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:129: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-05-29T15:24:13.749003Z node 4 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:129: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037889 2025-05-29T15:24:13.749535Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:3760: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037889 coordinator 72057594046316545 last step 0 next step 1000 2025-05-29T15:24:13.749597Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:3760: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037888 coordinator 72057594046316545 last step 0 next step 1000 2025-05-29T15:24:13.749605Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:2953: Handle TEvSchemaChangedResult 281474976715657 datashard 72075186224037889 state Ready 2025-05-29T15:24:13.749613Z node 4 :TX_DATASHARD DEBUG: datashard__schema_changed.cpp:22: 72075186224037889 Got TEvSchemaChangedResult from SS at 72075186224037889 2025-05-29T15:24:13.749722Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:2953: Handle TEvSchemaChangedResult 281474976715657 datashard 72075186224037888 state Ready 2025-05-29T15:24:13.749729Z node 4 :TX_DATASHARD DEBUG: datashard__schema_changed.cpp:22: 72075186224037888 Got TEvSchemaChangedResult from SS at 72075186224037888 2025-05-29T15:24:13.753343Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:779:2650], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:24:13.753369Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:788:2655], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:24:13.753378Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:24:13.754310Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2025-05-29T15:24:13.755686Z node 4 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:129: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-05-29T15:24:13.755712Z node 4 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:129: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037889 2025-05-29T15:24:13.924458Z node 4 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:129: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-05-29T15:24:13.924505Z node 4 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:129: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037889 2025-05-29T15:24:13.925018Z node 4 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [4:793:2658], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-05-29T15:24:13.959852Z node 4 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [4:864:2698] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 8], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-29T15:24:13.977269Z node 4 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [4:873:2706], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:24:13.977819Z node 4 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=4&id=MTBlMTk0MTYtNmJmYzk1Ny01MWZhZjA4OS0xMGM0OTNjYw==, ActorId: [4:777:2648], ActorState: ExecuteState, TraceId: 01jwea9n1s5hq1wceajv8d5t6y, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: assertion failed at ydb/core/tx/datashard/ut_common/datashard_ut_common.cpp:2091, void NKikimr::ExecSQL(Tests::TServer::TPtr, TActorId, const TString &, bool, Ydb::StatusIds::StatusCode): (response.GetYdbStatus() == code) failed: (INTERNAL_ERROR != SUCCESS) Response { QueryIssues { message: "Execution" issue_code: 1060 issues { message: "yql/essentials/ast/yql_expr.h:1874: index out of range" issue_code: 1 } } TxMeta { } } YdbStatus: INTERNAL_ERROR ConsumedRu: 1 , with diff: (INT|SUCC)E(RNAL_ERROR|SS) TBackTrace::Capture()+28 (0x13AF911C) NUnitTest::NPrivate::RaiseError(char const*, TBasicString> const&, bool)+137 (0x13CAC049) NKikimr::ExecSQL(TIntrusivePtr>, NActors::TActorId, TBasicString> const&, bool, Ydb::StatusIds_StatusCode)+1300 (0x26340864) ??+0 (0x139A990C) ??+0 (0x139A2D52) NKikimr::NTestSuiteAsyncIndexChangeCollector::TTestCaseIndexedPrimaryKeyInsertSingleRow::Execute_(NUnitTest::TTestContext&)+856 (0x139B4128) NKikimr::NTestSuiteAsyncIndexChangeCollector::TCurrentTest::Execute()::'lambda'()::operator()() const+71 (0x139E09F7) NUnitTest::TTestBase::Run(std::__y1::function, TBasicString> const&, char const*, bool)+126 (0x13CADEFE) NKikimr::NTestSuiteAsyncIndexChangeCollector::TCurrentTest::Execute()+481 (0x139E0231) NUnitTest::TTestFactory::Execute()+803 (0x13CAE673) NUnitTest::RunMain(int, char**)+3021 (0x13CC021D) ??+0 (0x7F48CF185D90) __libc_start_main+128 (0x7F48CF185E40) _start+41 (0x12A09029) >> TConsoleConfigSubscriptionTests::TestReplaceConfigSubscriptions [GOOD] >> TConsoleConfigSubscriptionTests::TestNotificationForNewSubscription >> TConsoleInMemoryConfigSubscriptionTests::TestSubscriptionClientManyUpdates [GOOD] >> TConsoleInMemoryConfigSubscriptionTests::TestSubscriptionClientManyUpdatesAddRemove >> TModificationsValidatorTests::TestIndexAndModificationsShrink_AddItems_NONE [GOOD] >> TModificationsValidatorTests::TestIndexAndModificationsShrink_AddItems_DOMAIN [GOOD] >> TModificationsValidatorTests::TestIndexAndModificationsShrink_AddItems_TENANTS [GOOD] >> TModificationsValidatorTests::TestIndexAndModificationsShrink_AddItems_TENANTS_AND_NODE_TYPES [GOOD] >> TModificationsValidatorTests::TestIndexAndModificationsShrink_ModifyItemsExpandScope_NONE [GOOD] >> TModificationsValidatorTests::TestIndexAndModificationsShrink_ModifyItemsExpandScope_DOMAIN [GOOD] >> TModificationsValidatorTests::TestIndexAndModificationsShrink_ModifyItemsExpandScope_TENANTS [GOOD] >> TModificationsValidatorTests::TestIndexAndModificationsShrink_ModifyItemsExpandScope_TENANTS_AND_NODE_TYPES [GOOD] >> TModificationsValidatorTests::TestIndexAndModificationsShrink_ModifyItemsNarrowScope_DOMAIN [GOOD] >> TModificationsValidatorTests::TestComputeAffectedConfigs_DomainAffected_DOMAIN [GOOD] >> TModificationsValidatorTests::TestComputeAffectedConfigs_DomainAffected_TENANTS [GOOD] >> TModificationsValidatorTests::TestComputeAffectedConfigs_DomainAffected_TENANTS_AND_NODE_TYPES [GOOD] >> TModificationsValidatorTests::TestComputeAffectedConfigs_DomainUnaffected_TENANTS >> TConsoleTests::TestCreateTenantExtSubdomain [GOOD] >> TConsoleTests::TestCreateSharedTenant >> KqpPg::DropTableIfExists_GenericQuery [GOOD] >> KqpPg::EquiJoin+useSink >> TModificationsValidatorTests::TestComputeAffectedConfigs_DomainUnaffected_TENANTS [GOOD] >> TModificationsValidatorTests::TestComputeAffectedConfigs_DomainUnaffected_TENANTS_AND_NODE_TYPES [GOOD] >> TModificationsValidatorTests::TestComputeAffectedConfigs_All_DomainAffected_DOMAIN [GOOD] >> TModificationsValidatorTests::TestComputeAffectedConfigs_All_DomainAffected_TENANTS [GOOD] >> TModificationsValidatorTests::TestComputeAffectedConfigs_All_DomainAffected_TENANTS_AND_NODE_TYPES [GOOD] >> TModificationsValidatorTests::TestComputeAffectedConfigs_All_DomainUnaffected_TENANTS [GOOD] >> TModificationsValidatorTests::TestComputeAffectedConfigs_All_DomainUnaffected_TENANTS_AND_NODE_TYPES [GOOD] >> TConsoleConfigTests::TestAutoSplit [GOOD] >> TConsoleConfigTests::TestValidation >> TConfigsDispatcherTests::TestYamlEndToEnd [GOOD] >> TConsoleConfigHelpersTests::TestConfigCourier >> TJaegerTracingConfiguratorTests::DefaultConfig [GOOD] >> TJaegerTracingConfiguratorTests::GlobalRules >> KqpPg::InsertNoTargetColumns_SerialNotNull+useSink [FAIL] >> KqpPg::InsertNoTargetColumns_SerialNotNull-useSink >> KqpPg::InsertNoTargetColumns_Serial+useSink [FAIL] >> KqpPg::CreateIndex [GOOD] >> KqpPg::CreateNotNullPgColumn |64.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/cms/console/ut/unittest >> TModificationsValidatorTests::TestComputeAffectedConfigs_All_DomainUnaffected_TENANTS_AND_NODE_TYPES [GOOD] |64.8%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/cms/ut/ydb-core-cms-ut |64.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/cms/ut/ydb-core-cms-ut |64.8%| [LD] {RESULT} $(B)/ydb/core/cms/ut/ydb-core-cms-ut >> TConsoleTests::TestGetUnknownTenantStatus >> KqpPg::TableSelect+useSink [GOOD] >> KqpPg::TableSelect-useSink ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/datashard/ut_change_collector/unittest >> CdcStreamChangeCollector::NewImage [FAIL] Test command err: 2025-05-29T15:24:09.154676Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:102:2148], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-05-29T15:24:09.154715Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-05-29T15:24:09.154730Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/000fad/r3tmp/tmp0vQl1S/pdisk_1.dat 2025-05-29T15:24:09.259694Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-05-29T15:24:09.272705Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:24:09.273329Z node 1 :TABLET_SAUSAGECACHE NOTICE: shared_sausagecache.cpp:1191: Update config MemoryLimit: 33554432 2025-05-29T15:24:09.275777Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [1:32:2079] 1748532248816902 != 1748532248816906 2025-05-29T15:24:09.317427Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:24:09.317468Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:24:09.328005Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-29T15:24:09.401220Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-05-29T15:24:09.419086Z node 1 :TX_DATASHARD INFO: datashard.cpp:373: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:664:2569] 2025-05-29T15:24:09.419170Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:630: TxInitSchema.Execute 2025-05-29T15:24:09.429532Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:696: TxInitSchema.Complete 2025-05-29T15:24:09.429581Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:48: TDataShard::TTxInit::Execute 2025-05-29T15:24:09.429768Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1315: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-05-29T15:24:09.429780Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1371: LoadLockChangeRecords at tablet: 72075186224037888 2025-05-29T15:24:09.429788Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1420: LoadChangeRecordCommits at tablet: 72075186224037888 2025-05-29T15:24:09.429850Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:92: TDataShard::TTxInit::Complete 2025-05-29T15:24:09.429870Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:100: TDataShard::TTxInitRestored::Execute 2025-05-29T15:24:09.429883Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:111: DataShard 72075186224037888 persisting started state actor id [1:681:2569] in generation 1 2025-05-29T15:24:09.440160Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:120: TDataShard::TTxInitRestored::Complete 2025-05-29T15:24:09.445337Z node 1 :TX_DATASHARD INFO: datashard.cpp:417: Switched to work state WaitScheme tabletId 72075186224037888 2025-05-29T15:24:09.445418Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:457: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-05-29T15:24:09.445445Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1250: Change sender created: at tablet: 72075186224037888, actorId: [1:683:2579] 2025-05-29T15:24:09.445451Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1255: Trying to activate change sender: at tablet: 72075186224037888 2025-05-29T15:24:09.445456Z node 1 :TX_DATASHARD INFO: datashard.cpp:1272: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-05-29T15:24:09.445462Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-05-29T15:24:09.445629Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 72075186224037888 2025-05-29T15:24:09.445654Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-05-29T15:24:09.445669Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2025-05-29T15:24:09.445676Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-05-29T15:24:09.445686Z node 1 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 72075186224037888 TxInFly 0 2025-05-29T15:24:09.445691Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2025-05-29T15:24:09.445801Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3710: Server connected at leader tablet# 72075186224037888, clientId# [1:662:2567], serverId# [1:674:2574], sessionId# [0:0:0] 2025-05-29T15:24:09.445830Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037888 2025-05-29T15:24:09.445887Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:132: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-05-29T15:24:09.445906Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:220: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2025-05-29T15:24:09.446263Z node 1 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:129: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-05-29T15:24:09.456503Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 72075186224037888 2025-05-29T15:24:09.456542Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:467: 72075186224037888 not sending time cast registration request in state WaitScheme 2025-05-29T15:24:09.599536Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3710: Server connected at leader tablet# 72075186224037888, clientId# [1:697:2587], serverId# [1:699:2589], sessionId# [0:0:0] 2025-05-29T15:24:09.600233Z node 1 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:69: Planned transaction txId 281474976715657 at step 1000 at tablet 72075186224037888 { Transactions { TxId: 281474976715657 AckTo { RawX1: 0 RawX2: 0 } } Step: 1000 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2025-05-29T15:24:09.600251Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-05-29T15:24:09.600293Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2025-05-29T15:24:09.600301Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2025-05-29T15:24:09.600309Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:333: Found ready operation [1000:281474976715657] in PlanQueue unit at 72075186224037888 2025-05-29T15:24:09.600363Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:634: LoadTxDetails at 72075186224037888 loaded tx from db 1000:281474976715657 keys extracted: 0 2025-05-29T15:24:09.600392Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 1 immediate 0 planned 1 2025-05-29T15:24:09.600479Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2025-05-29T15:24:09.600490Z node 1 :TX_DATASHARD INFO: create_table_unit.cpp:69: Trying to CREATE TABLE at 72075186224037888 tableId# [OwnerId: 72057594046644480, LocalPathId: 2] schema version# 1 2025-05-29T15:24:09.600815Z node 1 :TX_DATASHARD INFO: datashard.cpp:475: Send registration request to time cast Ready tabletId 72075186224037888 mediators count is 1 coordinators count is 1 buckets per mediator 2 2025-05-29T15:24:09.600890Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-05-29T15:24:09.601146Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3742: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037888 time 0 2025-05-29T15:24:09.601152Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-05-29T15:24:09.601337Z node 1 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:98: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 1000} 2025-05-29T15:24:09.601350Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2025-05-29T15:24:09.601509Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2025-05-29T15:24:09.601531Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1255: Trying to activate change sender: at tablet: 72075186224037888 2025-05-29T15:24:09.601536Z node 1 :TX_DATASHARD INFO: datashard.cpp:1293: Change sender activated: at tablet: 72075186224037888 2025-05-29T15:24:09.601550Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:801: Complete [1000 : 281474976715657] from 72075186224037888 at tablet 72075186224037888 send result to client [1:411:2405], exec latency: 0 ms, propose latency: 0 ms 2025-05-29T15:24:09.601558Z node 1 :TX_DATASHARD INFO: datashard.cpp:1590: 72075186224037888 Sending notify to schemeshard 72057594046644480 txId 281474976715657 state Ready TxInFly 0 2025-05-29T15:24:09.601568Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-05-29T15:24:09.602239Z node 1 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:129: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-05-29T15:24:09.602453Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:2953: Handle TEvSchemaChangedResult 281474976715657 datashard 72075186224037888 state Ready 2025-05-29T15:24:09.602464Z node 1 :TX_DATASHARD DEBUG: datashard__schema_changed.cpp:22: 72075186224037888 Got TEvSchemaChangedResult from SS at 72075186224037888 2025-05-29T15:24:09.602576Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3760: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037888 coordinator 72057594046316545 last step 0 next step 1000 2025-05-29T15:24:09.605412Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037888 2025-05-29T15:24:09.605445Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:132: Propose scheme transaction at tablet 72075186224037888 txId 281474976715658 ssId 72057594046644480 seqNo 2:2 2025-05-29T15:24:09.605454Z node 1 :TX_DATASHARD INFO: check_scheme_tx_unit.cpp:234: Check scheme tx, proposed scheme version# 2 current version# 1 expected version# 2 at tablet# 72075186224037888 txId# 281474976715658 2025-05-29T15:24:09.605457Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:220: Prepared scheme transaction txId 281474976715658 at tablet 72075186224037888 2025-05-29T15:24:09.605558Z node 1 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:129: Dis ... GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 1 immediate 0 planned 1 2025-05-29T15:24:14.112379Z node 4 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2025-05-29T15:24:14.112396Z node 4 :TX_DATASHARD INFO: create_table_unit.cpp:69: Trying to CREATE TABLE at 72075186224037888 tableId# [OwnerId: 72057594046644480, LocalPathId: 2] schema version# 1 2025-05-29T15:24:14.112516Z node 4 :TX_DATASHARD INFO: datashard.cpp:475: Send registration request to time cast Ready tabletId 72075186224037888 mediators count is 1 coordinators count is 1 buckets per mediator 2 2025-05-29T15:24:14.112613Z node 4 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-05-29T15:24:14.113024Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:3742: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037888 time 0 2025-05-29T15:24:14.113036Z node 4 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-05-29T15:24:14.113190Z node 4 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:98: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 1000} 2025-05-29T15:24:14.113203Z node 4 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2025-05-29T15:24:14.113549Z node 4 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:129: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-05-29T15:24:14.113611Z node 4 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2025-05-29T15:24:14.113622Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:1255: Trying to activate change sender: at tablet: 72075186224037888 2025-05-29T15:24:14.113629Z node 4 :TX_DATASHARD INFO: datashard.cpp:1293: Change sender activated: at tablet: 72075186224037888 2025-05-29T15:24:14.113649Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:801: Complete [1000 : 281474976715657] from 72075186224037888 at tablet 72075186224037888 send result to client [4:410:2404], exec latency: 0 ms, propose latency: 0 ms 2025-05-29T15:24:14.113661Z node 4 :TX_DATASHARD INFO: datashard.cpp:1590: 72075186224037888 Sending notify to schemeshard 72057594046644480 txId 281474976715657 state Ready TxInFly 0 2025-05-29T15:24:14.113672Z node 4 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-05-29T15:24:14.114128Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:3760: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037888 coordinator 72057594046316545 last step 0 next step 1000 2025-05-29T15:24:14.114254Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:2953: Handle TEvSchemaChangedResult 281474976715657 datashard 72075186224037888 state Ready 2025-05-29T15:24:14.114265Z node 4 :TX_DATASHARD DEBUG: datashard__schema_changed.cpp:22: 72075186224037888 Got TEvSchemaChangedResult from SS at 72075186224037888 2025-05-29T15:24:14.127227Z node 4 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037888 2025-05-29T15:24:14.127286Z node 4 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:132: Propose scheme transaction at tablet 72075186224037888 txId 281474976715658 ssId 72057594046644480 seqNo 2:2 2025-05-29T15:24:14.127301Z node 4 :TX_DATASHARD INFO: check_scheme_tx_unit.cpp:234: Check scheme tx, proposed scheme version# 2 current version# 1 expected version# 2 at tablet# 72075186224037888 txId# 281474976715658 2025-05-29T15:24:14.127308Z node 4 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:220: Prepared scheme transaction txId 281474976715658 at tablet 72075186224037888 2025-05-29T15:24:14.127668Z node 4 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:129: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-05-29T15:24:14.154528Z node 4 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 72075186224037888 2025-05-29T15:24:14.325873Z node 4 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:69: Planned transaction txId 281474976715658 at step 1500 at tablet 72075186224037888 { Transactions { TxId: 281474976715658 AckTo { RawX1: 0 RawX2: 0 } } Step: 1500 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2025-05-29T15:24:14.325903Z node 4 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-05-29T15:24:14.326155Z node 4 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2025-05-29T15:24:14.326169Z node 4 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2025-05-29T15:24:14.326180Z node 4 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:333: Found ready operation [1500:281474976715658] in PlanQueue unit at 72075186224037888 2025-05-29T15:24:14.326240Z node 4 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:634: LoadTxDetails at 72075186224037888 loaded tx from db 1500:281474976715658 keys extracted: 0 2025-05-29T15:24:14.326278Z node 4 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 1 immediate 0 planned 1 2025-05-29T15:24:14.326340Z node 4 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2025-05-29T15:24:14.326521Z node 4 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-05-29T15:24:14.367078Z node 4 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:98: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 1500} 2025-05-29T15:24:14.367118Z node 4 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2025-05-29T15:24:14.367128Z node 4 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2025-05-29T15:24:14.367141Z node 4 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-05-29T15:24:14.367167Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:801: Complete [1500 : 281474976715658] from 72075186224037888 at tablet 72075186224037888 send result to client [4:410:2404], exec latency: 0 ms, propose latency: 0 ms 2025-05-29T15:24:14.367185Z node 4 :TX_DATASHARD INFO: datashard.cpp:1590: 72075186224037888 Sending notify to schemeshard 72057594046644480 txId 281474976715658 state Ready TxInFly 0 2025-05-29T15:24:14.367203Z node 4 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-05-29T15:24:14.367778Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:2953: Handle TEvSchemaChangedResult 281474976715658 datashard 72075186224037888 state Ready 2025-05-29T15:24:14.367795Z node 4 :TX_DATASHARD DEBUG: datashard__schema_changed.cpp:22: 72075186224037888 Got TEvSchemaChangedResult from SS at 72075186224037888 2025-05-29T15:24:14.371511Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:876:2714], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:24:14.371543Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:886:2719], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:24:14.371555Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:24:14.372616Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480 2025-05-29T15:24:14.373976Z node 4 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:129: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-05-29T15:24:14.529589Z node 4 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:129: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-05-29T15:24:14.530114Z node 4 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [4:890:2722], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2025-05-29T15:24:14.552253Z node 4 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [4:946:2759] txid# 281474976715660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 8], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-29T15:24:14.561133Z node 4 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [4:956:2768], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:24:14.561662Z node 4 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=4&id=YjNhZTM2OGItY2FmOWYxMWYtMzZhOWU3NTYtOGJjNGE3ODI=, ActorId: [4:874:2712], ActorState: ExecuteState, TraceId: 01jwea9nn3625kkdp1axn8hsfq, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: assertion failed at ydb/core/tx/datashard/ut_common/datashard_ut_common.cpp:2091, void NKikimr::ExecSQL(Tests::TServer::TPtr, TActorId, const TString &, bool, Ydb::StatusIds::StatusCode): (response.GetYdbStatus() == code) failed: (INTERNAL_ERROR != SUCCESS) Response { QueryIssues { message: "Execution" issue_code: 1060 issues { message: "yql/essentials/ast/yql_expr.h:1874: index out of range" issue_code: 1 } } TxMeta { } } YdbStatus: INTERNAL_ERROR ConsumedRu: 1 , with diff: (INT|SUCC)E(RNAL_ERROR|SS) TBackTrace::Capture()+28 (0x13AF911C) NUnitTest::NPrivate::RaiseError(char const*, TBasicString> const&, bool)+137 (0x13CAC049) NKikimr::ExecSQL(TIntrusivePtr>, NActors::TActorId, TBasicString> const&, bool, Ydb::StatusIds_StatusCode)+1300 (0x26340864) ??+0 (0x139D4D45) NKikimr::NTestSuiteCdcStreamChangeCollector::TTestCaseNewImage::Execute_(NUnitTest::TTestContext&)+4166 (0x139D9696) NKikimr::NTestSuiteCdcStreamChangeCollector::TCurrentTest::Execute()::'lambda'()::operator()() const+71 (0x139E4357) NUnitTest::TTestBase::Run(std::__y1::function, TBasicString> const&, char const*, bool)+126 (0x13CADEFE) NKikimr::NTestSuiteCdcStreamChangeCollector::TCurrentTest::Execute()+484 (0x139E3CF4) NUnitTest::TTestFactory::Execute()+803 (0x13CAE673) NUnitTest::RunMain(int, char**)+3021 (0x13CC021D) ??+0 (0x7F33986D5D90) __libc_start_main+128 (0x7F33986D5E40) _start+41 (0x12A09029) ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/datashard/ut_change_collector/unittest >> AsyncIndexChangeCollector::CoverIndexedColumn [FAIL] Test command err: 2025-05-29T15:24:09.940458Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:102:2148], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-05-29T15:24:09.940495Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-05-29T15:24:09.940511Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/000f7b/r3tmp/tmpR8IXTR/pdisk_1.dat 2025-05-29T15:24:10.055900Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-05-29T15:24:10.069919Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:24:10.074057Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [1:32:2079] 1748532249636646 != 1748532249636650 2025-05-29T15:24:10.115632Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:24:10.115663Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:24:10.126176Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-29T15:24:10.199362Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-05-29T15:24:10.215789Z node 1 :TX_DATASHARD INFO: datashard.cpp:373: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:676:2577] 2025-05-29T15:24:10.215857Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:630: TxInitSchema.Execute 2025-05-29T15:24:10.222110Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:696: TxInitSchema.Complete 2025-05-29T15:24:10.222158Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:48: TDataShard::TTxInit::Execute 2025-05-29T15:24:10.222282Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1315: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-05-29T15:24:10.222288Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1371: LoadLockChangeRecords at tablet: 72075186224037888 2025-05-29T15:24:10.222293Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1420: LoadChangeRecordCommits at tablet: 72075186224037888 2025-05-29T15:24:10.222335Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:92: TDataShard::TTxInit::Complete 2025-05-29T15:24:10.222495Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:100: TDataShard::TTxInitRestored::Execute 2025-05-29T15:24:10.222505Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:111: DataShard 72075186224037888 persisting started state actor id [1:704:2577] in generation 1 2025-05-29T15:24:10.222585Z node 1 :TX_DATASHARD INFO: datashard.cpp:373: TDataShard::OnActivateExecutor: tablet 72075186224037889 actor [1:679:2579] 2025-05-29T15:24:10.222615Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:630: TxInitSchema.Execute 2025-05-29T15:24:10.223546Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:696: TxInitSchema.Complete 2025-05-29T15:24:10.223573Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:48: TDataShard::TTxInit::Execute 2025-05-29T15:24:10.223655Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1315: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037889 2025-05-29T15:24:10.223660Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1371: LoadLockChangeRecords at tablet: 72075186224037889 2025-05-29T15:24:10.223664Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1420: LoadChangeRecordCommits at tablet: 72075186224037889 2025-05-29T15:24:10.223686Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:92: TDataShard::TTxInit::Complete 2025-05-29T15:24:10.223698Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:100: TDataShard::TTxInitRestored::Execute 2025-05-29T15:24:10.223704Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:111: DataShard 72075186224037889 persisting started state actor id [1:710:2579] in generation 1 2025-05-29T15:24:10.233975Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:120: TDataShard::TTxInitRestored::Complete 2025-05-29T15:24:10.237468Z node 1 :TX_DATASHARD INFO: datashard.cpp:417: Switched to work state WaitScheme tabletId 72075186224037888 2025-05-29T15:24:10.237558Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:457: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-05-29T15:24:10.237578Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1250: Change sender created: at tablet: 72075186224037888, actorId: [1:713:2598] 2025-05-29T15:24:10.237583Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1255: Trying to activate change sender: at tablet: 72075186224037888 2025-05-29T15:24:10.237586Z node 1 :TX_DATASHARD INFO: datashard.cpp:1272: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-05-29T15:24:10.237591Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-05-29T15:24:10.237670Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:120: TDataShard::TTxInitRestored::Complete 2025-05-29T15:24:10.237679Z node 1 :TX_DATASHARD INFO: datashard.cpp:417: Switched to work state WaitScheme tabletId 72075186224037889 2025-05-29T15:24:10.237689Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:457: 72075186224037889 not sending time cast registration request in state WaitScheme: missing processing params 2025-05-29T15:24:10.237699Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1250: Change sender created: at tablet: 72075186224037889, actorId: [1:714:2599] 2025-05-29T15:24:10.237701Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1255: Trying to activate change sender: at tablet: 72075186224037889 2025-05-29T15:24:10.237704Z node 1 :TX_DATASHARD INFO: datashard.cpp:1272: Cannot activate change sender: at tablet: 72075186224037889, state: WaitScheme 2025-05-29T15:24:10.237706Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2025-05-29T15:24:10.237793Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 72075186224037888 2025-05-29T15:24:10.237822Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-05-29T15:24:10.237846Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2025-05-29T15:24:10.237853Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-05-29T15:24:10.237859Z node 1 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 72075186224037888 TxInFly 0 2025-05-29T15:24:10.237863Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2025-05-29T15:24:10.237867Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 72075186224037889 2025-05-29T15:24:10.237872Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037889 2025-05-29T15:24:10.237978Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3710: Server connected at leader tablet# 72075186224037888, clientId# [1:669:2573], serverId# [1:687:2583], sessionId# [0:0:0] 2025-05-29T15:24:10.237986Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037889 2025-05-29T15:24:10.237990Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037889 active 0 active planned 0 immediate 0 planned 0 2025-05-29T15:24:10.237995Z node 1 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 72075186224037889 TxInFly 0 2025-05-29T15:24:10.238000Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037889 2025-05-29T15:24:10.238032Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037888 2025-05-29T15:24:10.238089Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:132: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-05-29T15:24:10.238108Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:220: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2025-05-29T15:24:10.238197Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3710: Server connected at leader tablet# 72075186224037889, clientId# [1:670:2574], serverId# [1:695:2589], sessionId# [0:0:0] 2025-05-29T15:24:10.238231Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037889 2025-05-29T15:24:10.238257Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:132: Propose scheme transaction at tablet 72075186224037889 txId 281474976715657 ssId 72057594046644480 seqNo 2:2 2025-05-29T15:24:10.238268Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:220: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037889 2025-05-29T15:24:10.238611Z node 1 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:129: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-05-29T15:24:10.238632Z node 1 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:129: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037889 2025-05-29T15:24:10.248988Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 72075186224037888 2025-05-29T15:24:10.249031Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:467: 72075186224037888 not sending time cast registration request in state WaitScheme 2025-05-29T15:24:10.249177Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 72075186224037889 2025-05-29T15:24:10.249186Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:467: 72075186224037889 not sending time cast registration request in state WaitScheme 2025-05-29T15:24:10.392383Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3710: Server connected at leader tablet# 72075186224037889, clientId# [1:731:2610], serverId# [1:734:2613], sessionId# [0:0:0] 2025-05-29T15:24:10.392434Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3710: Server connected at leader tablet# 72075186224037888, clientId# [1:732:2611], serverId# [1:735:2614], sessionId# [0:0:0] 2025-05-29T15:24:10.393339Z node 1 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:69: Planned transaction txId 281474976715657 at step 1000 at tablet 72075186224037889 { Transactions { TxId: 281474976715657 AckTo { RawX1: 0 RawX2: 0 } } Step: 1000 MediatorID: 72057594046382081 TabletID: 72075186224037889 } 2025-05-29T15:24:10.393358Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2025-05-29T15:24:10.393454Z node 1 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:69: Planned transaction txId 281474976715657 at step 1000 at tablet 72075186224037888 { Transactions { TxId: 281474976715657 AckTo { RawX1: 0 RawX2: 0 } } Step: 100 ... UG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037889 2025-05-29T15:24:14.427605Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:3742: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037888 time 0 2025-05-29T15:24:14.427617Z node 4 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-05-29T15:24:14.427629Z node 4 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037889 2025-05-29T15:24:14.427637Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:1255: Trying to activate change sender: at tablet: 72075186224037889 2025-05-29T15:24:14.427643Z node 4 :TX_DATASHARD INFO: datashard.cpp:1293: Change sender activated: at tablet: 72075186224037889 2025-05-29T15:24:14.427662Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:801: Complete [1000 : 281474976715657] from 72075186224037889 at tablet 72075186224037889 send result to client [4:410:2404], exec latency: 0 ms, propose latency: 0 ms 2025-05-29T15:24:14.427674Z node 4 :TX_DATASHARD INFO: datashard.cpp:1590: 72075186224037889 Sending notify to schemeshard 72057594046644480 txId 281474976715657 state Ready TxInFly 0 2025-05-29T15:24:14.427686Z node 4 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2025-05-29T15:24:14.427765Z node 4 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:98: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 1000} 2025-05-29T15:24:14.427772Z node 4 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2025-05-29T15:24:14.427889Z node 4 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:98: Sending '{TEvPlanStepAccepted TabletId# 72075186224037890 step# 1000} 2025-05-29T15:24:14.427896Z node 4 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037890 2025-05-29T15:24:14.427996Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:3742: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037890 time 0 2025-05-29T15:24:14.428002Z node 4 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037890 2025-05-29T15:24:14.428235Z node 4 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2025-05-29T15:24:14.428243Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:1255: Trying to activate change sender: at tablet: 72075186224037888 2025-05-29T15:24:14.428247Z node 4 :TX_DATASHARD INFO: datashard.cpp:1293: Change sender activated: at tablet: 72075186224037888 2025-05-29T15:24:14.428257Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:801: Complete [1000 : 281474976715657] from 72075186224037888 at tablet 72075186224037888 send result to client [4:410:2404], exec latency: 0 ms, propose latency: 0 ms 2025-05-29T15:24:14.428263Z node 4 :TX_DATASHARD INFO: datashard.cpp:1590: 72075186224037888 Sending notify to schemeshard 72057594046644480 txId 281474976715657 state Ready TxInFly 0 2025-05-29T15:24:14.428271Z node 4 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-05-29T15:24:14.428478Z node 4 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:129: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-05-29T15:24:14.428491Z node 4 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:129: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037889 2025-05-29T15:24:14.428500Z node 4 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:129: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037890 2025-05-29T15:24:14.428687Z node 4 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037890 2025-05-29T15:24:14.428694Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:1255: Trying to activate change sender: at tablet: 72075186224037890 2025-05-29T15:24:14.428698Z node 4 :TX_DATASHARD INFO: datashard.cpp:1293: Change sender activated: at tablet: 72075186224037890 2025-05-29T15:24:14.428707Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:801: Complete [1000 : 281474976715657] from 72075186224037890 at tablet 72075186224037890 send result to client [4:410:2404], exec latency: 0 ms, propose latency: 0 ms 2025-05-29T15:24:14.428712Z node 4 :TX_DATASHARD INFO: datashard.cpp:1590: 72075186224037890 Sending notify to schemeshard 72057594046644480 txId 281474976715657 state Ready TxInFly 0 2025-05-29T15:24:14.428720Z node 4 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037890 2025-05-29T15:24:14.429257Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:2953: Handle TEvSchemaChangedResult 281474976715657 datashard 72075186224037889 state Ready 2025-05-29T15:24:14.429271Z node 4 :TX_DATASHARD DEBUG: datashard__schema_changed.cpp:22: 72075186224037889 Got TEvSchemaChangedResult from SS at 72075186224037889 2025-05-29T15:24:14.429330Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:3760: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037889 coordinator 72057594046316545 last step 0 next step 1000 2025-05-29T15:24:14.429355Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:3760: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037888 coordinator 72057594046316545 last step 0 next step 1000 2025-05-29T15:24:14.429423Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:3760: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037890 coordinator 72057594046316545 last step 0 next step 1000 2025-05-29T15:24:14.429443Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:2953: Handle TEvSchemaChangedResult 281474976715657 datashard 72075186224037888 state Ready 2025-05-29T15:24:14.429448Z node 4 :TX_DATASHARD DEBUG: datashard__schema_changed.cpp:22: 72075186224037888 Got TEvSchemaChangedResult from SS at 72075186224037888 2025-05-29T15:24:14.429570Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:2953: Handle TEvSchemaChangedResult 281474976715657 datashard 72075186224037890 state Ready 2025-05-29T15:24:14.429577Z node 4 :TX_DATASHARD DEBUG: datashard__schema_changed.cpp:22: 72075186224037890 Got TEvSchemaChangedResult from SS at 72075186224037890 2025-05-29T15:24:14.432836Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:828:2687], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:24:14.432858Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:838:2692], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:24:14.432867Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:24:14.433724Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2025-05-29T15:24:14.434666Z node 4 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:129: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-05-29T15:24:14.434685Z node 4 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:129: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037889 2025-05-29T15:24:14.434696Z node 4 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:129: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037890 2025-05-29T15:24:14.588375Z node 4 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:129: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-05-29T15:24:14.588417Z node 4 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:129: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037889 2025-05-29T15:24:14.588433Z node 4 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:129: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037890 2025-05-29T15:24:14.588894Z node 4 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [4:842:2695], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-05-29T15:24:14.622533Z node 4 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [4:914:2736] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 10], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-29T15:24:14.637206Z node 4 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [4:924:2745], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:24:14.637764Z node 4 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=4&id=YTdmMjY5ZGQtZmQ4ZTExZDUtMzcwNDgzNjUtZGVjZDY1M2M=, ActorId: [4:826:2685], ActorState: ExecuteState, TraceId: 01jwea9nq0f0febqbffmrrr309, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: assertion failed at ydb/core/tx/datashard/ut_common/datashard_ut_common.cpp:2091, void NKikimr::ExecSQL(Tests::TServer::TPtr, TActorId, const TString &, bool, Ydb::StatusIds::StatusCode): (response.GetYdbStatus() == code) failed: (INTERNAL_ERROR != SUCCESS) Response { QueryIssues { message: "Execution" issue_code: 1060 issues { message: "yql/essentials/ast/yql_expr.h:1874: index out of range" issue_code: 1 } } TxMeta { } } YdbStatus: INTERNAL_ERROR ConsumedRu: 1 , with diff: (INT|SUCC)E(RNAL_ERROR|SS) TBackTrace::Capture()+28 (0x13AF911C) NUnitTest::NPrivate::RaiseError(char const*, TBasicString> const&, bool)+137 (0x13CAC049) NKikimr::ExecSQL(TIntrusivePtr>, NActors::TActorId, TBasicString> const&, bool, Ydb::StatusIds_StatusCode)+1300 (0x26340864) ??+0 (0x139A990C) NKikimr::NTestSuiteAsyncIndexChangeCollector::TTestCaseCoverIndexedColumn::Execute_(NUnitTest::TTestContext&)+5873 (0x139BF7E1) NKikimr::NTestSuiteAsyncIndexChangeCollector::TCurrentTest::Execute()::'lambda'()::operator()() const+71 (0x139E09F7) NUnitTest::TTestBase::Run(std::__y1::function, TBasicString> const&, char const*, bool)+126 (0x13CADEFE) NKikimr::NTestSuiteAsyncIndexChangeCollector::TCurrentTest::Execute()+481 (0x139E0231) NUnitTest::TTestFactory::Execute()+803 (0x13CAE673) NUnitTest::RunMain(int, char**)+3021 (0x13CC021D) ??+0 (0x7F4FD6058D90) __libc_start_main+128 (0x7F4FD6058E40) _start+41 (0x12A09029) ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/datashard/ut_change_collector/unittest >> AsyncIndexChangeCollector::MultiIndexedTableReplaceSingleRow [FAIL] Test command err: 2025-05-29T15:24:10.173595Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:102:2148], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-05-29T15:24:10.173630Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-05-29T15:24:10.173642Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/000f39/r3tmp/tmpVGUjoT/pdisk_1.dat 2025-05-29T15:24:10.279307Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-05-29T15:24:10.292689Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:24:10.295832Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [1:32:2079] 1748532249841245 != 1748532249841249 2025-05-29T15:24:10.337743Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:24:10.337781Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:24:10.348366Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-29T15:24:10.421124Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-05-29T15:24:10.437505Z node 1 :TX_DATASHARD INFO: datashard.cpp:373: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:676:2577] 2025-05-29T15:24:10.437590Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:630: TxInitSchema.Execute 2025-05-29T15:24:10.445305Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:696: TxInitSchema.Complete 2025-05-29T15:24:10.445359Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:48: TDataShard::TTxInit::Execute 2025-05-29T15:24:10.445505Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1315: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-05-29T15:24:10.445528Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1371: LoadLockChangeRecords at tablet: 72075186224037888 2025-05-29T15:24:10.445533Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1420: LoadChangeRecordCommits at tablet: 72075186224037888 2025-05-29T15:24:10.445581Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:92: TDataShard::TTxInit::Complete 2025-05-29T15:24:10.445789Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:100: TDataShard::TTxInitRestored::Execute 2025-05-29T15:24:10.445800Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:111: DataShard 72075186224037888 persisting started state actor id [1:704:2577] in generation 1 2025-05-29T15:24:10.445895Z node 1 :TX_DATASHARD INFO: datashard.cpp:373: TDataShard::OnActivateExecutor: tablet 72075186224037889 actor [1:679:2579] 2025-05-29T15:24:10.445927Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:630: TxInitSchema.Execute 2025-05-29T15:24:10.446884Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:696: TxInitSchema.Complete 2025-05-29T15:24:10.446904Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:48: TDataShard::TTxInit::Execute 2025-05-29T15:24:10.446983Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1315: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037889 2025-05-29T15:24:10.446988Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1371: LoadLockChangeRecords at tablet: 72075186224037889 2025-05-29T15:24:10.446992Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1420: LoadChangeRecordCommits at tablet: 72075186224037889 2025-05-29T15:24:10.447016Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:92: TDataShard::TTxInit::Complete 2025-05-29T15:24:10.447026Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:100: TDataShard::TTxInitRestored::Execute 2025-05-29T15:24:10.447033Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:111: DataShard 72075186224037889 persisting started state actor id [1:710:2579] in generation 1 2025-05-29T15:24:10.457364Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:120: TDataShard::TTxInitRestored::Complete 2025-05-29T15:24:10.462457Z node 1 :TX_DATASHARD INFO: datashard.cpp:417: Switched to work state WaitScheme tabletId 72075186224037888 2025-05-29T15:24:10.462544Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:457: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-05-29T15:24:10.462571Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1250: Change sender created: at tablet: 72075186224037888, actorId: [1:713:2598] 2025-05-29T15:24:10.462577Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1255: Trying to activate change sender: at tablet: 72075186224037888 2025-05-29T15:24:10.462582Z node 1 :TX_DATASHARD INFO: datashard.cpp:1272: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-05-29T15:24:10.462588Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-05-29T15:24:10.462687Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:120: TDataShard::TTxInitRestored::Complete 2025-05-29T15:24:10.462697Z node 1 :TX_DATASHARD INFO: datashard.cpp:417: Switched to work state WaitScheme tabletId 72075186224037889 2025-05-29T15:24:10.462706Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:457: 72075186224037889 not sending time cast registration request in state WaitScheme: missing processing params 2025-05-29T15:24:10.462718Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1250: Change sender created: at tablet: 72075186224037889, actorId: [1:714:2599] 2025-05-29T15:24:10.462722Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1255: Trying to activate change sender: at tablet: 72075186224037889 2025-05-29T15:24:10.462726Z node 1 :TX_DATASHARD INFO: datashard.cpp:1272: Cannot activate change sender: at tablet: 72075186224037889, state: WaitScheme 2025-05-29T15:24:10.462730Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2025-05-29T15:24:10.462863Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 72075186224037888 2025-05-29T15:24:10.462890Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-05-29T15:24:10.462916Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2025-05-29T15:24:10.462923Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-05-29T15:24:10.462933Z node 1 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 72075186224037888 TxInFly 0 2025-05-29T15:24:10.462938Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2025-05-29T15:24:10.462944Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 72075186224037889 2025-05-29T15:24:10.462953Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037889 2025-05-29T15:24:10.463056Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3710: Server connected at leader tablet# 72075186224037888, clientId# [1:669:2573], serverId# [1:687:2583], sessionId# [0:0:0] 2025-05-29T15:24:10.463065Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037889 2025-05-29T15:24:10.463069Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037889 active 0 active planned 0 immediate 0 planned 0 2025-05-29T15:24:10.463074Z node 1 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 72075186224037889 TxInFly 0 2025-05-29T15:24:10.463079Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037889 2025-05-29T15:24:10.463112Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037888 2025-05-29T15:24:10.463163Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:132: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-05-29T15:24:10.463182Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:220: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2025-05-29T15:24:10.463283Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3710: Server connected at leader tablet# 72075186224037889, clientId# [1:670:2574], serverId# [1:695:2589], sessionId# [0:0:0] 2025-05-29T15:24:10.463321Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037889 2025-05-29T15:24:10.463345Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:132: Propose scheme transaction at tablet 72075186224037889 txId 281474976715657 ssId 72057594046644480 seqNo 2:2 2025-05-29T15:24:10.463356Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:220: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037889 2025-05-29T15:24:10.463658Z node 1 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:129: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-05-29T15:24:10.463673Z node 1 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:129: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037889 2025-05-29T15:24:10.474002Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 72075186224037888 2025-05-29T15:24:10.474041Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:467: 72075186224037888 not sending time cast registration request in state WaitScheme 2025-05-29T15:24:10.474172Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 72075186224037889 2025-05-29T15:24:10.474179Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:467: 72075186224037889 not sending time cast registration request in state WaitScheme 2025-05-29T15:24:10.617332Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3710: Server connected at leader tablet# 72075186224037889, clientId# [1:731:2610], serverId# [1:734:2613], sessionId# [0:0:0] 2025-05-29T15:24:10.617370Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3710: Server connected at leader tablet# 72075186224037888, clientId# [1:732:2611], serverId# [1:735:2614], sessionId# [0:0:0] 2025-05-29T15:24:10.618124Z node 1 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:69: Planned transaction txId 281474976715657 at step 1000 at tablet 72075186224037889 { Transactions { TxId: 281474976715657 AckTo { RawX1: 0 RawX2: 0 } } Step: 1000 MediatorID: 72057594046382081 TabletID: 72075186224037889 } 2025-05-29T15:24:10.618139Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2025-05-29T15:24:10.618225Z node 1 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:69: Planned transaction txId 281474976715657 at step 1000 at tablet 72075186224037888 { Transactions { TxId: 281474976715657 AckTo { RawX1: 0 RawX2: 0 } } Step: 100 ... progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037889 2025-05-29T15:24:14.764425Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:3742: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037888 time 0 2025-05-29T15:24:14.764435Z node 4 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-05-29T15:24:14.764443Z node 4 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037889 2025-05-29T15:24:14.764449Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:1255: Trying to activate change sender: at tablet: 72075186224037889 2025-05-29T15:24:14.764454Z node 4 :TX_DATASHARD INFO: datashard.cpp:1293: Change sender activated: at tablet: 72075186224037889 2025-05-29T15:24:14.764470Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:801: Complete [1000 : 281474976715657] from 72075186224037889 at tablet 72075186224037889 send result to client [4:410:2404], exec latency: 0 ms, propose latency: 0 ms 2025-05-29T15:24:14.764483Z node 4 :TX_DATASHARD INFO: datashard.cpp:1590: 72075186224037889 Sending notify to schemeshard 72057594046644480 txId 281474976715657 state Ready TxInFly 0 2025-05-29T15:24:14.764494Z node 4 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2025-05-29T15:24:14.764579Z node 4 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:98: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 1000} 2025-05-29T15:24:14.764589Z node 4 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2025-05-29T15:24:14.764697Z node 4 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:98: Sending '{TEvPlanStepAccepted TabletId# 72075186224037890 step# 1000} 2025-05-29T15:24:14.764705Z node 4 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037890 2025-05-29T15:24:14.764790Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:3742: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037890 time 0 2025-05-29T15:24:14.764794Z node 4 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037890 2025-05-29T15:24:14.765161Z node 4 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2025-05-29T15:24:14.765173Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:1255: Trying to activate change sender: at tablet: 72075186224037888 2025-05-29T15:24:14.765178Z node 4 :TX_DATASHARD INFO: datashard.cpp:1293: Change sender activated: at tablet: 72075186224037888 2025-05-29T15:24:14.765188Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:801: Complete [1000 : 281474976715657] from 72075186224037888 at tablet 72075186224037888 send result to client [4:410:2404], exec latency: 0 ms, propose latency: 0 ms 2025-05-29T15:24:14.765195Z node 4 :TX_DATASHARD INFO: datashard.cpp:1590: 72075186224037888 Sending notify to schemeshard 72057594046644480 txId 281474976715657 state Ready TxInFly 0 2025-05-29T15:24:14.765204Z node 4 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-05-29T15:24:14.765403Z node 4 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:129: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-05-29T15:24:14.765415Z node 4 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:129: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037889 2025-05-29T15:24:14.765422Z node 4 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:129: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037890 2025-05-29T15:24:14.765548Z node 4 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037890 2025-05-29T15:24:14.765553Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:1255: Trying to activate change sender: at tablet: 72075186224037890 2025-05-29T15:24:14.765556Z node 4 :TX_DATASHARD INFO: datashard.cpp:1293: Change sender activated: at tablet: 72075186224037890 2025-05-29T15:24:14.765563Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:801: Complete [1000 : 281474976715657] from 72075186224037890 at tablet 72075186224037890 send result to client [4:410:2404], exec latency: 0 ms, propose latency: 0 ms 2025-05-29T15:24:14.765567Z node 4 :TX_DATASHARD INFO: datashard.cpp:1590: 72075186224037890 Sending notify to schemeshard 72057594046644480 txId 281474976715657 state Ready TxInFly 0 2025-05-29T15:24:14.765573Z node 4 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037890 2025-05-29T15:24:14.766124Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:2953: Handle TEvSchemaChangedResult 281474976715657 datashard 72075186224037889 state Ready 2025-05-29T15:24:14.766142Z node 4 :TX_DATASHARD DEBUG: datashard__schema_changed.cpp:22: 72075186224037889 Got TEvSchemaChangedResult from SS at 72075186224037889 2025-05-29T15:24:14.766213Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:3760: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037889 coordinator 72057594046316545 last step 0 next step 1000 2025-05-29T15:24:14.766243Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:3760: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037888 coordinator 72057594046316545 last step 0 next step 1000 2025-05-29T15:24:14.766302Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:3760: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037890 coordinator 72057594046316545 last step 0 next step 1000 2025-05-29T15:24:14.766318Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:2953: Handle TEvSchemaChangedResult 281474976715657 datashard 72075186224037888 state Ready 2025-05-29T15:24:14.766321Z node 4 :TX_DATASHARD DEBUG: datashard__schema_changed.cpp:22: 72075186224037888 Got TEvSchemaChangedResult from SS at 72075186224037888 2025-05-29T15:24:14.766385Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:2953: Handle TEvSchemaChangedResult 281474976715657 datashard 72075186224037890 state Ready 2025-05-29T15:24:14.766389Z node 4 :TX_DATASHARD DEBUG: datashard__schema_changed.cpp:22: 72075186224037890 Got TEvSchemaChangedResult from SS at 72075186224037890 2025-05-29T15:24:14.769487Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:828:2687], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:24:14.769507Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:838:2692], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:24:14.769529Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:24:14.770262Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2025-05-29T15:24:14.771209Z node 4 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:129: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-05-29T15:24:14.771236Z node 4 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:129: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037889 2025-05-29T15:24:14.771249Z node 4 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:129: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037890 2025-05-29T15:24:14.932547Z node 4 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:129: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-05-29T15:24:14.932596Z node 4 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:129: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037889 2025-05-29T15:24:14.932610Z node 4 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:129: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037890 2025-05-29T15:24:14.933081Z node 4 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [4:842:2695], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-05-29T15:24:14.967555Z node 4 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [4:914:2736] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 10], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-29T15:24:14.978308Z node 4 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [4:924:2745], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:24:14.978775Z node 4 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=4&id=MWMxNTY2ZTAtZjVhMzM0YzMtZTM0ZTYzMi1kNTUxZWU3ZA==, ActorId: [4:826:2685], ActorState: ExecuteState, TraceId: 01jwea9p1hdqq4wmybgbkzb9b4, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: assertion failed at ydb/core/tx/datashard/ut_common/datashard_ut_common.cpp:2091, void NKikimr::ExecSQL(Tests::TServer::TPtr, TActorId, const TString &, bool, Ydb::StatusIds::StatusCode): (response.GetYdbStatus() == code) failed: (INTERNAL_ERROR != SUCCESS) Response { QueryIssues { message: "Execution" issue_code: 1060 issues { message: "yql/essentials/ast/yql_expr.h:1874: index out of range" issue_code: 1 } } TxMeta { } } YdbStatus: INTERNAL_ERROR ConsumedRu: 1 , with diff: (INT|SUCC)E(RNAL_ERROR|SS) TBackTrace::Capture()+28 (0x13AF911C) NUnitTest::NPrivate::RaiseError(char const*, TBasicString> const&, bool)+137 (0x13CAC049) NKikimr::ExecSQL(TIntrusivePtr>, NActors::TActorId, TBasicString> const&, bool, Ydb::StatusIds_StatusCode)+1300 (0x26340864) ??+0 (0x139A990C) NKikimr::NTestSuiteAsyncIndexChangeCollector::TTestCaseMultiIndexedTableReplaceSingleRow::Execute_(NUnitTest::TTestContext&)+3651 (0x139B1B03) NKikimr::NTestSuiteAsyncIndexChangeCollector::TCurrentTest::Execute()::'lambda'()::operator()() const+71 (0x139E09F7) NUnitTest::TTestBase::Run(std::__y1::function, TBasicString> const&, char const*, bool)+126 (0x13CADEFE) NKikimr::NTestSuiteAsyncIndexChangeCollector::TCurrentTest::Execute()+481 (0x139E0231) NUnitTest::TTestFactory::Execute()+803 (0x13CAE673) NUnitTest::RunMain(int, char**)+3021 (0x13CC021D) ??+0 (0x7FC826C84D90) __libc_start_main+128 (0x7FC826C84E40) _start+41 (0x12A09029) >> TModificationsValidatorTests::TestIsValidationRequired_NONE [GOOD] >> TModificationsValidatorTests::TestIsValidationRequired_DOMAIN [GOOD] >> TModificationsValidatorTests::TestIsValidationRequired_TENANTS [GOOD] >> TModificationsValidatorTests::TestIsValidationRequired_TENANTS_AND_NODE_TYPES [GOOD] >> TModificationsValidatorTests::TestIndexAndModificationsShrink_RemoveItems_NONE [GOOD] >> TModificationsValidatorTests::TestIndexAndModificationsShrink_RemoveItems_DOMAIN [GOOD] >> TModificationsValidatorTests::TestIndexAndModificationsShrink_RemoveItems_TENANTS [GOOD] >> TModificationsValidatorTests::TestIndexAndModificationsShrink_RemoveItems_TENANTS_AND_NODE_TYPES [GOOD] >> TModificationsValidatorTests::TestIndexAndModificationsShrink_ModifyItemsSameScope_NONE [GOOD] >> TModificationsValidatorTests::TestIndexAndModificationsShrink_ModifyItemsSameScope_DOMAIN [GOOD] >> TModificationsValidatorTests::TestIndexAndModificationsShrink_ModifyItemsSameScope_TENANTS [GOOD] >> TModificationsValidatorTests::TestIndexAndModificationsShrink_ModifyItemsSameScope_TENANTS_AND_NODE_TYPES [GOOD] >> TModificationsValidatorTests::TestIndexAndModificationsShrink_ModifyItemsNarrowScope_NONE [GOOD] >> TModificationsValidatorTests::TestIndexAndModificationsShrink_ModifyItemsNarrowScope_TENANTS [GOOD] >> TModificationsValidatorTests::TestIndexAndModificationsShrink_ModifyItemsNarrowScope_TENANTS_AND_NODE_TYPES [GOOD] >> TNetClassifierUpdaterTest::TestGetUpdatesFromHttpServer >> TConsoleConfigHelpersTests::TestConfigCourier [GOOD] >> TConsoleConfigHelpersTests::TestConfigSubscriber >> DataShardReadIterator::ShouldProperlyOrderConflictingTransactionsMvcc-UseSink [FAIL] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/pg/unittest >> KqpPg::InsertValuesFromTableWithDefaultNegativeCase-useSink [GOOD] Test command err: Trying to start YDB, gRPC: 22322, MsgBus: 63447 2025-05-29T15:24:07.644958Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509888818515339613:2069];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:24:07.644985Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/0025d5/r3tmp/tmpmKMvUS/pdisk_1.dat 2025-05-29T15:24:07.701106Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 22322, node 1 2025-05-29T15:24:07.718172Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:24:07.718185Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-05-29T15:24:07.718187Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-05-29T15:24:07.718228Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:63447 2025-05-29T15:24:07.746959Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:24:07.746987Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:24:07.748166Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:63447 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-29T15:24:07.780527Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:24:07.989024Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509888818515340234:2328], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:24:07.989047Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:24:07.998442Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-05-29T15:24:08.062337Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509888822810307636:2340], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:24:08.062358Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:24:08.065298Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 2025-05-29T15:24:08.072476Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509888822810307714:2351], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:24:08.072490Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:24:08.072509Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509888822810307719:2354], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:24:08.072984Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710660:3, at schemeshard: 72057594046644480 2025-05-29T15:24:08.078554Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7509888822810307721:2355], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710660 completed, doublechecking } 2025-05-29T15:24:08.171720Z node 1 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [1:7509888822810307772:2431] txid# 281474976710661, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 7], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-29T15:24:08.192316Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [1:7509888822810307788:2359], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:24:08.192433Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=1&id=YWI4MjhjLTYzM2IxNjMwLTVhYTI4ZTBlLTUyMGFjMzA1, ActorId: [1:7509888822810307712:2350], ActorState: ExecuteState, TraceId: 01jwea9fg7968s8xeqhr4gap29, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: assertion failed at ydb/core/kqp/ut/pg/kqp_pg_ut.cpp:4127, virtual void NKikimr::NKqp::NTestSuiteKqpPg::TTestCaseJoinWithQueryService::Execute_(NUnitTest::TTestContext &) [StreamLookup = true]: (result.GetStatus() == EStatus::SUCCESS) failed: (INTERNAL_ERROR != SUCCESS)
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 , with diff: (INT|SUCC)E(RNAL_ERROR|SS) 0. /-S/util/system/backtrace.cpp:284: ?? @ 0x15FBD15B 1. /tmp//-S/library/cpp/testing/unittest/registar.cpp:46: RaiseError @ 0x16174FC8 2. /tmp//-S/ydb/core/kqp/ut/pg/kqp_pg_ut.cpp:4127: Execute_ @ 0x15E0EF6C 3. /tmp//-S/ydb/core/kqp/ut/pg/kqp_pg_ut.cpp:198: operator() @ 0x15D71686 4. /tmp//-S/library/cpp/testing/unittest/registar.cpp:373: Run @ 0x16176E7D 5. /tmp//-S/ydb/core/kqp/ut/pg/kqp_pg_ut.cpp:198: Execute @ 0x15D70EE0 6. /tmp//-S/library/cpp/testing/unittest/registar.cpp:494: Execute @ 0x161775F2 7. /tmp//-S/library/cpp/testing/unittest/utmain.cpp:872: RunMain @ 0x1618919C 8. ??:0: ?? @ 0x7FA00C332D8F 9. ??:0: ?? @ 0x7FA00C332E3F 10. ??:0: ?? @ 0x14D5E028 Trying to start YDB, gRPC: 22694, MsgBus: 23685 2025-05-29T15:24:09.979054Z node 2 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7509888826212479268:2073];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:24:09.979080Z node 2 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/0025d5/r3tmp/tmpwyQtsc/pdisk_1.dat 2025-05-29T15:24:09.989981Z node 2 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 22694, node 2 2025-05-29T15:24:09.999998Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:24:10.000007Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-05-29T15:24:10.000008Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-05-29T15:24:10.000044Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:23685 TClient is connected to server localhost:23685 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-29T15:24:10.079954Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:24:10.079988Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:24:10.081069Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-29T15:24:10.081853Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... ... :24:16.090084Z node 10 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-05-29T15:24:16.090086Z node 10 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-05-29T15:24:16.090147Z node 10 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:12287 2025-05-29T15:24:16.133413Z node 10 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:24:16.133449Z node 10 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:24:16.135004Z node 10 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:12287 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-29T15:24:16.198051Z node 10 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:24:16.202179Z node 10 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-05-29T15:24:16.508093Z node 10 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [10:7509888858081477230:2327], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:24:16.508120Z node 10 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:24:16.508344Z node 10 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [10:7509888858081477265:2330], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:24:16.509094Z node 10 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-05-29T15:24:16.511672Z node 10 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 2025-05-29T15:24:16.511749Z node 10 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [10:7509888858081477267:2331], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-05-29T15:24:16.571840Z node 10 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [10:7509888858081477318:2326] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 }
: Error: Type annotation, code: 1030
:1:1: Error: At function: KiCreateTable!
:1:1: Error: Failed to parse default expr for typename int4, error reason: Error while converting text to binary: yql/essentials/minikql/mkql_terminator.cpp:47: ERROR: invalid input syntax for type integer: "text" 2025-05-29T15:24:16.575622Z node 10 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [10:7509888858081477327:2335], status: GENERIC_ERROR, issues:
: Error: Type annotation, code: 1030
:1:1: Error: At function: KiCreateTable!
:1:1: Error: Failed to parse default expr for typename int4, error reason: Error while converting text to binary: yql/essentials/minikql/mkql_terminator.cpp:47: ERROR: invalid input syntax for type integer: "text" 2025-05-29T15:24:16.576308Z node 10 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=10&id=ZTQxODY4MTQtNmFkN2I2NWYtY2FjMTQ4NmUtODBhMTlhNmU=, ActorId: [10:7509888858081477226:2324], ActorState: ExecuteState, TraceId: 01jwea9qekcnheqyzn8r9scfvf, ReplyQueryCompileError, status GENERIC_ERROR remove tx with tx_id: Trying to start YDB, gRPC: 7300, MsgBus: 6792 test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/0025d5/r3tmp/tmpAoUpK7/pdisk_1.dat 2025-05-29T15:24:16.778970Z node 11 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-05-29T15:24:16.791893Z node 11 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 7300, node 11 2025-05-29T15:24:16.822974Z node 11 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:24:16.822992Z node 11 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-05-29T15:24:16.822994Z node 11 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-05-29T15:24:16.823054Z node 11 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:6792 2025-05-29T15:24:16.872542Z node 11 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(11, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:24:16.872575Z node 11 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(11, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:24:16.875957Z node 11 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(11, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:6792 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-29T15:24:16.907861Z node 11 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:24:17.176687Z node 11 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [11:7509888862693187773:2327], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:24:17.176720Z node 11 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [11:7509888862693187788:2330], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:24:17.176732Z node 11 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:24:17.177543Z node 11 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2025-05-29T15:24:17.179416Z node 11 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [11:7509888862693187790:2331], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-05-29T15:24:17.242426Z node 11 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [11:7509888862693187841:2326] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-29T15:24:17.245534Z node 11 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [11:7509888862693187858:2335], status: GENERIC_ERROR, issues:
: Error: Type annotation, code: 1030
:1:1: Error: At function: KiCreateTable!
:1:1: Error: Failed to parse default expr for typename int4, error reason: Error while converting text to binary: yql/essentials/minikql/mkql_terminator.cpp:47: ERROR: invalid input syntax for type integer: "text" 2025-05-29T15:24:17.245615Z node 11 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=11&id=OTI0ZmVkZTUtZTRkMTE0OTQtNjFiODYxZmItMTAxMDIxNA==, ActorId: [11:7509888862693187759:2326], ActorState: ExecuteState, TraceId: 01jwea9r4rbavjt1cxy9sb5449, ReplyQueryCompileError, status GENERIC_ERROR remove tx with tx_id:
: Error: Type annotation, code: 1030
:1:1: Error: At function: KiCreateTable!
:1:1: Error: Failed to parse default expr for typename int4, error reason: Error while converting text to binary: yql/essentials/minikql/mkql_terminator.cpp:47: ERROR: invalid input syntax for type integer: "text" ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/pg/unittest >> KqpPg::PgUpdateCompoundKey-useSink [FAIL] Test command err: Trying to start YDB, gRPC: 16275, MsgBus: 31842 2025-05-29T15:24:07.639785Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509888817849790727:2065];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:24:07.639843Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/0026d1/r3tmp/tmpJlhg3y/pdisk_1.dat 2025-05-29T15:24:07.666851Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [1:7509888817849790694:2079] 1748532247639661 != 1748532247639664 2025-05-29T15:24:07.669510Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 16275, node 1 2025-05-29T15:24:07.686973Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:24:07.686994Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-05-29T15:24:07.686997Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-05-29T15:24:07.687050Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:31842 TClient is connected to server localhost:31842 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: 2025-05-29T15:24:07.741373Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:24:07.741401Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:24:07.742474Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-29T15:24:07.749726Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 16 2025-05-29T15:24:08.063086Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-05-29T15:24:08.124176Z node 1 :READ_TABLE_API WARN: rpc_read_table.cpp:116: ForgetAction occurred, send TEvPoisonPill --!syntax_pg INSERT INTO Pg1000_b (key, value) VALUES ( '0'::int2, ARRAY ['false'::bool, 'false'::bool] ); 2025-05-29T15:24:08.126347Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509888822144758758:2341], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:24:08.126354Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509888822144758747:2338], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:24:08.126370Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:24:08.126940Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715660:3, at schemeshard: 72057594046644480 2025-05-29T15:24:08.128339Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7509888822144758761:2342], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715660 completed, doublechecking } 2025-05-29T15:24:08.181911Z node 1 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [1:7509888822144758812:2383] txid# 281474976715661, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-29T15:24:08.194004Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [1:7509888822144758828:2346], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:24:08.194094Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=1&id=MTJmMDk5N2YtYmI1MjQzNzgtNGM1NjUyY2MtNjhmZWYwYTM=, ActorId: [1:7509888822144758744:2336], ActorState: ExecuteState, TraceId: 01jwea9fhxa18ba42jr7dehs5y, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: assertion failed at ydb/core/kqp/ut/pg/kqp_pg_ut.cpp:115, void NKikimr::NKqp::ExecutePgArrayInsert(NKikimr::NKqp::TKikimrRunner &, const TString &, const TPgTypeTestSpec &): (result.IsSuccess())
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 0. /-S/util/system/backtrace.cpp:284: ?? @ 0x15FBD15B 1. /tmp//-S/library/cpp/testing/unittest/registar.cpp:46: RaiseError @ 0x16174FC8 2. /tmp//-S/ydb/core/kqp/ut/pg/kqp_pg_ut.cpp:115: ExecutePgArrayInsert @ 0x15D90920 3. /tmp//-S/ydb/core/kqp/ut/pg/kqp_pg_ut.cpp:1194: operator() @ 0x15D8EBC9 4. /tmp//-S/ydb/core/kqp/ut/pg/kqp_pg_ut.cpp:1215: operator() @ 0x15D8EBC9 5. /tmp//-S/ydb/core/kqp/ut/pg/kqp_pg_ut.cpp:1220: Execute_ @ 0x15D8EBC9 6. /tmp//-S/ydb/core/kqp/ut/pg/kqp_pg_ut.cpp:198: operator() @ 0x15D71686 7. /tmp//-S/library/cpp/testing/unittest/registar.cpp:373: Run @ 0x16176E7D 8. /tmp//-S/ydb/core/kqp/ut/pg/kqp_pg_ut.cpp:198: Execute @ 0x15D70EE0 9. /tmp//-S/library/cpp/testing/unittest/registar.cpp:494: Execute @ 0x161775F2 10. /tmp//-S/library/cpp/testing/unittest/utmain.cpp:872: RunMain @ 0x1618919C 11. ??:0: ?? @ 0x7F8295F86D8F 12. ??:0: ?? @ 0x7F8295F86E3F 13. ??:0: ?? @ 0x14D5E028 Trying to start YDB, gRPC: 11732, MsgBus: 62014 2025-05-29T15:24:10.177369Z node 2 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7509888830034490240:2063];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:24:10.177392Z node 2 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/0026d1/r3tmp/tmpbx1Rxa/pdisk_1.dat TServer::EnableGrpc on GrpcPort 11732, node 2 2025-05-29T15:24:10.191429Z node 2 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:24:10.202909Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:24:10.202923Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-05-29T15:24:10.202925Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-05-29T15:24:10.202968Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:62014 TClient is connected to server localhost:62014 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-29T15:24:10.279913Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:24:10.279946Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:24:10.280245Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:24:10.280915Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 16 2025-05-29T15:24:10.528171Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-05-29T15:24:10.535145Z node 2 :READ_TABLE_API WARN: rpc_read_table.cpp:116: ForgetAction occurred, send TEvPoisonPill --!syntax_pg INSERT INTO Pg1000_b (key, value) VALUES ( '0'::int2, ARRAY ['false'::bool, 'false'::bool] ); 2025-05-29T15:24:10.536621Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7509888830034490972:2341], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:24:10.536636Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [T ... 715658:0, at schemeshard: 72057594046644480 2025-05-29T15:24:16.529101Z node 9 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [9:7509888859692005270:2339], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:24:16.529137Z node 9 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:24:16.529260Z node 9 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [9:7509888859692005275:2342], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:24:16.530823Z node 9 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480 2025-05-29T15:24:16.535327Z node 9 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [9:7509888859692005277:2343], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2025-05-29T15:24:16.607059Z node 9 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [9:7509888859692005328:2384] txid# 281474976715660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-29T15:24:16.621878Z node 9 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [9:7509888859692005337:2347], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:24:16.622280Z node 9 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=9&id=OTVmODg1ZTktODc5MmE1MjgtY2RkYTY5OTMtOWEwMThhYjg=, ActorId: [9:7509888859692005268:2338], ActorState: ExecuteState, TraceId: 01jwea9qrf3f89ybcyrw6d10c7, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: assertion failed at ydb/core/kqp/ut/pg/kqp_pg_ut.cpp:3670, virtual void NKikimr::NKqp::NTestSuiteKqpPg::TTestCasePgUpdateCompoundKey::Execute_(NUnitTest::TTestContext &) [useSink = true]: (result.GetStatus() == EStatus::SUCCESS) failed: (INTERNAL_ERROR != SUCCESS)
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 , with diff: (INT|SUCC)E(RNAL_ERROR|SS) 0. /-S/util/system/backtrace.cpp:284: ?? @ 0x15FBD15B 1. /tmp//-S/library/cpp/testing/unittest/registar.cpp:46: RaiseError @ 0x16174FC8 2. /tmp//-S/ydb/core/kqp/ut/pg/kqp_pg_ut.cpp:3670: Execute_ @ 0x15E03C4F 3. /tmp//-S/ydb/core/kqp/ut/pg/kqp_pg_ut.cpp:198: operator() @ 0x15D71686 4. /tmp//-S/library/cpp/testing/unittest/registar.cpp:373: Run @ 0x16176E7D 5. /tmp//-S/ydb/core/kqp/ut/pg/kqp_pg_ut.cpp:198: Execute @ 0x15D70EE0 6. /tmp//-S/library/cpp/testing/unittest/registar.cpp:494: Execute @ 0x161775F2 7. /tmp//-S/library/cpp/testing/unittest/utmain.cpp:872: RunMain @ 0x1618919C 8. ??:0: ?? @ 0x7F8295F86D8F 9. ??:0: ?? @ 0x7F8295F86E3F 10. ??:0: ?? @ 0x14D5E028 Trying to start YDB, gRPC: 19676, MsgBus: 27121 2025-05-29T15:24:16.970312Z node 10 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[10:7509888857300752730:2140];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:24:16.971417Z node 10 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/0026d1/r3tmp/tmpiVTCGG/pdisk_1.dat 2025-05-29T15:24:16.997749Z node 10 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 19676, node 10 2025-05-29T15:24:17.015533Z node 10 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:24:17.015549Z node 10 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-05-29T15:24:17.015551Z node 10 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-05-29T15:24:17.015614Z node 10 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:27121 TClient is connected to server localhost:27121 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-29T15:24:17.076516Z node 10 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:24:17.076551Z node 10 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:24:17.077003Z node 10 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-05-29T15:24:17.077853Z node 10 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Connecting -> Connected waiting... 2025-05-29T15:24:17.078999Z node 10 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-05-29T15:24:17.373464Z node 10 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [10:7509888861595720573:2328], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:24:17.373506Z node 10 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:24:17.379559Z node 10 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-05-29T15:24:17.451581Z node 10 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [10:7509888861595720679:2339], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:24:17.451608Z node 10 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:24:17.451720Z node 10 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [10:7509888861595720684:2342], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:24:17.452498Z node 10 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480 2025-05-29T15:24:17.455307Z node 10 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [10:7509888861595720686:2343], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2025-05-29T15:24:17.523550Z node 10 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [10:7509888861595720737:2382] txid# 281474976715660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-29T15:24:17.559454Z node 10 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [10:7509888861595720746:2347], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:24:17.559587Z node 10 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=10&id=ZTEzOWNhMWUtNDNmMTZmZDctNTFjZWEwMWYtMWFiMzZiYWM=, ActorId: [10:7509888861595720677:2338], ActorState: ExecuteState, TraceId: 01jwea9rna6p3gy0zp8a5258nb, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: assertion failed at ydb/core/kqp/ut/pg/kqp_pg_ut.cpp:3670, virtual void NKikimr::NKqp::NTestSuiteKqpPg::TTestCasePgUpdateCompoundKey::Execute_(NUnitTest::TTestContext &) [useSink = false]: (result.GetStatus() == EStatus::SUCCESS) failed: (INTERNAL_ERROR != SUCCESS)
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 , with diff: (INT|SUCC)E(RNAL_ERROR|SS) 0. /-S/util/system/backtrace.cpp:284: ?? @ 0x15FBD15B 1. /tmp//-S/library/cpp/testing/unittest/registar.cpp:46: RaiseError @ 0x16174FC8 2. /tmp//-S/ydb/core/kqp/ut/pg/kqp_pg_ut.cpp:3670: Execute_ @ 0x15E091AF 3. /tmp//-S/ydb/core/kqp/ut/pg/kqp_pg_ut.cpp:198: operator() @ 0x15D71686 4. /tmp//-S/library/cpp/testing/unittest/registar.cpp:373: Run @ 0x16176E7D 5. /tmp//-S/ydb/core/kqp/ut/pg/kqp_pg_ut.cpp:198: Execute @ 0x15D70EE0 6. /tmp//-S/library/cpp/testing/unittest/registar.cpp:494: Execute @ 0x161775F2 7. /tmp//-S/library/cpp/testing/unittest/utmain.cpp:872: RunMain @ 0x1618919C 8. ??:0: ?? @ 0x7F8295F86D8F 9. ??:0: ?? @ 0x7F8295F86E3F 10. ??:0: ?? @ 0x14D5E028 >> TBSV::CleanupDroppedVolumesOnRestart >> TJaegerTracingConfiguratorTests::GlobalRules [GOOD] >> TJaegerTracingConfiguratorTests::ExternalTracePlusSampling >> TConsoleConfigTests::TestValidation [GOOD] >> TConsoleConfigTests::TestCheckConfigUpdates ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/datashard/ut_change_collector/unittest >> CdcStreamChangeCollector::DeleteSingleRow [FAIL] Test command err: 2025-05-29T15:24:10.131108Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:102:2148], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-05-29T15:24:10.131142Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-05-29T15:24:10.131153Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/000f66/r3tmp/tmpF92gfQ/pdisk_1.dat 2025-05-29T15:24:10.233983Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-05-29T15:24:10.247706Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:24:10.251925Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [1:32:2079] 1748532249765982 != 1748532249765986 2025-05-29T15:24:10.293877Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:24:10.293915Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:24:10.304431Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-29T15:24:10.377579Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-05-29T15:24:10.394599Z node 1 :TX_DATASHARD INFO: datashard.cpp:373: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:676:2577] 2025-05-29T15:24:10.394674Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:630: TxInitSchema.Execute 2025-05-29T15:24:10.401967Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:696: TxInitSchema.Complete 2025-05-29T15:24:10.402021Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:48: TDataShard::TTxInit::Execute 2025-05-29T15:24:10.402155Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1315: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-05-29T15:24:10.402162Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1371: LoadLockChangeRecords at tablet: 72075186224037888 2025-05-29T15:24:10.402167Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1420: LoadChangeRecordCommits at tablet: 72075186224037888 2025-05-29T15:24:10.402213Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:92: TDataShard::TTxInit::Complete 2025-05-29T15:24:10.402443Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:100: TDataShard::TTxInitRestored::Execute 2025-05-29T15:24:10.402458Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:111: DataShard 72075186224037888 persisting started state actor id [1:704:2577] in generation 1 2025-05-29T15:24:10.402588Z node 1 :TX_DATASHARD INFO: datashard.cpp:373: TDataShard::OnActivateExecutor: tablet 72075186224037889 actor [1:679:2579] 2025-05-29T15:24:10.402628Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:630: TxInitSchema.Execute 2025-05-29T15:24:10.404126Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:696: TxInitSchema.Complete 2025-05-29T15:24:10.404155Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:48: TDataShard::TTxInit::Execute 2025-05-29T15:24:10.404291Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1315: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037889 2025-05-29T15:24:10.404300Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1371: LoadLockChangeRecords at tablet: 72075186224037889 2025-05-29T15:24:10.404308Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1420: LoadChangeRecordCommits at tablet: 72075186224037889 2025-05-29T15:24:10.404351Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:92: TDataShard::TTxInit::Complete 2025-05-29T15:24:10.404369Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:100: TDataShard::TTxInitRestored::Execute 2025-05-29T15:24:10.404381Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:111: DataShard 72075186224037889 persisting started state actor id [1:710:2579] in generation 1 2025-05-29T15:24:10.414655Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:120: TDataShard::TTxInitRestored::Complete 2025-05-29T15:24:10.417665Z node 1 :TX_DATASHARD INFO: datashard.cpp:417: Switched to work state WaitScheme tabletId 72075186224037888 2025-05-29T15:24:10.417731Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:457: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-05-29T15:24:10.417750Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1250: Change sender created: at tablet: 72075186224037888, actorId: [1:713:2598] 2025-05-29T15:24:10.417754Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1255: Trying to activate change sender: at tablet: 72075186224037888 2025-05-29T15:24:10.417758Z node 1 :TX_DATASHARD INFO: datashard.cpp:1272: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-05-29T15:24:10.417761Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-05-29T15:24:10.417837Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:120: TDataShard::TTxInitRestored::Complete 2025-05-29T15:24:10.417842Z node 1 :TX_DATASHARD INFO: datashard.cpp:417: Switched to work state WaitScheme tabletId 72075186224037889 2025-05-29T15:24:10.417848Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:457: 72075186224037889 not sending time cast registration request in state WaitScheme: missing processing params 2025-05-29T15:24:10.417856Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1250: Change sender created: at tablet: 72075186224037889, actorId: [1:714:2599] 2025-05-29T15:24:10.417858Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1255: Trying to activate change sender: at tablet: 72075186224037889 2025-05-29T15:24:10.417860Z node 1 :TX_DATASHARD INFO: datashard.cpp:1272: Cannot activate change sender: at tablet: 72075186224037889, state: WaitScheme 2025-05-29T15:24:10.417862Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2025-05-29T15:24:10.417950Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 72075186224037888 2025-05-29T15:24:10.417968Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-05-29T15:24:10.417986Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2025-05-29T15:24:10.417991Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-05-29T15:24:10.417999Z node 1 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 72075186224037888 TxInFly 0 2025-05-29T15:24:10.418003Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2025-05-29T15:24:10.418007Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 72075186224037889 2025-05-29T15:24:10.418012Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037889 2025-05-29T15:24:10.418091Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3710: Server connected at leader tablet# 72075186224037888, clientId# [1:669:2573], serverId# [1:687:2583], sessionId# [0:0:0] 2025-05-29T15:24:10.418097Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037889 2025-05-29T15:24:10.418099Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037889 active 0 active planned 0 immediate 0 planned 0 2025-05-29T15:24:10.418102Z node 1 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 72075186224037889 TxInFly 0 2025-05-29T15:24:10.418105Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037889 2025-05-29T15:24:10.418129Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037888 2025-05-29T15:24:10.418172Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:132: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-05-29T15:24:10.418187Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:220: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2025-05-29T15:24:10.418266Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3710: Server connected at leader tablet# 72075186224037889, clientId# [1:670:2574], serverId# [1:695:2589], sessionId# [0:0:0] 2025-05-29T15:24:10.418291Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037889 2025-05-29T15:24:10.418314Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:132: Propose scheme transaction at tablet 72075186224037889 txId 281474976715657 ssId 72057594046644480 seqNo 2:2 2025-05-29T15:24:10.418328Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:220: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037889 2025-05-29T15:24:10.418556Z node 1 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:129: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-05-29T15:24:10.418567Z node 1 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:129: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037889 2025-05-29T15:24:10.428837Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 72075186224037888 2025-05-29T15:24:10.428875Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:467: 72075186224037888 not sending time cast registration request in state WaitScheme 2025-05-29T15:24:10.428980Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 72075186224037889 2025-05-29T15:24:10.428986Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:467: 72075186224037889 not sending time cast registration request in state WaitScheme 2025-05-29T15:24:10.571956Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3710: Server connected at leader tablet# 72075186224037889, clientId# [1:731:2610], serverId# [1:734:2613], sessionId# [0:0:0] 2025-05-29T15:24:10.572007Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3710: Server connected at leader tablet# 72075186224037888, clientId# [1:732:2611], serverId# [1:735:2614], sessionId# [0:0:0] 2025-05-29T15:24:10.572709Z node 1 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:69: Planned transaction txId 281474976715657 at step 1000 at tablet 72075186224037889 { Transactions { TxId: 281474976715657 AckTo { RawX1: 0 RawX2: 0 } } Step: 1000 MediatorID: 72057594046382081 TabletID: 72075186224037889 } 2025-05-29T15:24:10.572726Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2025-05-29T15:24:10.572819Z node 1 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:69: Planned transaction txId 281474976715657 at step 1000 at tablet 72075186224037888 { Transactions { TxId: 281474976715657 AckTo { RawX1: 0 RawX2: 0 } } Step: 100 ... ActiveOp at 72075186224037888 (dry run) active 1 active planned 1 immediate 0 planned 1 2025-05-29T15:24:14.739805Z node 4 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2025-05-29T15:24:14.739823Z node 4 :TX_DATASHARD INFO: create_table_unit.cpp:69: Trying to CREATE TABLE at 72075186224037888 tableId# [OwnerId: 72057594046644480, LocalPathId: 2] schema version# 1 2025-05-29T15:24:14.739941Z node 4 :TX_DATASHARD INFO: datashard.cpp:475: Send registration request to time cast Ready tabletId 72075186224037888 mediators count is 1 coordinators count is 1 buckets per mediator 2 2025-05-29T15:24:14.740049Z node 4 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-05-29T15:24:14.740401Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:3742: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037888 time 0 2025-05-29T15:24:14.740412Z node 4 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-05-29T15:24:14.740558Z node 4 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:98: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 1000} 2025-05-29T15:24:14.740571Z node 4 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2025-05-29T15:24:14.740889Z node 4 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:129: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-05-29T15:24:14.740946Z node 4 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2025-05-29T15:24:14.740955Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:1255: Trying to activate change sender: at tablet: 72075186224037888 2025-05-29T15:24:14.740964Z node 4 :TX_DATASHARD INFO: datashard.cpp:1293: Change sender activated: at tablet: 72075186224037888 2025-05-29T15:24:14.740982Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:801: Complete [1000 : 281474976715657] from 72075186224037888 at tablet 72075186224037888 send result to client [4:410:2404], exec latency: 0 ms, propose latency: 0 ms 2025-05-29T15:24:14.740994Z node 4 :TX_DATASHARD INFO: datashard.cpp:1590: 72075186224037888 Sending notify to schemeshard 72057594046644480 txId 281474976715657 state Ready TxInFly 0 2025-05-29T15:24:14.741008Z node 4 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-05-29T15:24:14.741463Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:3760: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037888 coordinator 72057594046316545 last step 0 next step 1000 2025-05-29T15:24:14.741611Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:2953: Handle TEvSchemaChangedResult 281474976715657 datashard 72075186224037888 state Ready 2025-05-29T15:24:14.741624Z node 4 :TX_DATASHARD DEBUG: datashard__schema_changed.cpp:22: 72075186224037888 Got TEvSchemaChangedResult from SS at 72075186224037888 2025-05-29T15:24:14.745595Z node 4 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037888 2025-05-29T15:24:14.745638Z node 4 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:132: Propose scheme transaction at tablet 72075186224037888 txId 281474976715658 ssId 72057594046644480 seqNo 2:2 2025-05-29T15:24:14.745653Z node 4 :TX_DATASHARD INFO: check_scheme_tx_unit.cpp:234: Check scheme tx, proposed scheme version# 2 current version# 1 expected version# 2 at tablet# 72075186224037888 txId# 281474976715658 2025-05-29T15:24:14.745658Z node 4 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:220: Prepared scheme transaction txId 281474976715658 at tablet 72075186224037888 2025-05-29T15:24:14.745941Z node 4 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:129: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-05-29T15:24:14.766940Z node 4 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 72075186224037888 2025-05-29T15:24:14.937447Z node 4 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:69: Planned transaction txId 281474976715658 at step 1500 at tablet 72075186224037888 { Transactions { TxId: 281474976715658 AckTo { RawX1: 0 RawX2: 0 } } Step: 1500 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2025-05-29T15:24:14.937481Z node 4 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-05-29T15:24:14.937766Z node 4 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2025-05-29T15:24:14.937781Z node 4 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2025-05-29T15:24:14.937791Z node 4 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:333: Found ready operation [1500:281474976715658] in PlanQueue unit at 72075186224037888 2025-05-29T15:24:14.937849Z node 4 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:634: LoadTxDetails at 72075186224037888 loaded tx from db 1500:281474976715658 keys extracted: 0 2025-05-29T15:24:14.937884Z node 4 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 1 immediate 0 planned 1 2025-05-29T15:24:14.937933Z node 4 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2025-05-29T15:24:14.938117Z node 4 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-05-29T15:24:14.977032Z node 4 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:98: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 1500} 2025-05-29T15:24:14.977072Z node 4 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2025-05-29T15:24:14.977081Z node 4 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2025-05-29T15:24:14.977093Z node 4 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-05-29T15:24:14.977116Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:801: Complete [1500 : 281474976715658] from 72075186224037888 at tablet 72075186224037888 send result to client [4:410:2404], exec latency: 0 ms, propose latency: 0 ms 2025-05-29T15:24:14.977132Z node 4 :TX_DATASHARD INFO: datashard.cpp:1590: 72075186224037888 Sending notify to schemeshard 72057594046644480 txId 281474976715658 state Ready TxInFly 0 2025-05-29T15:24:14.977147Z node 4 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-05-29T15:24:14.977733Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:2953: Handle TEvSchemaChangedResult 281474976715658 datashard 72075186224037888 state Ready 2025-05-29T15:24:14.977749Z node 4 :TX_DATASHARD DEBUG: datashard__schema_changed.cpp:22: 72075186224037888 Got TEvSchemaChangedResult from SS at 72075186224037888 2025-05-29T15:24:14.980920Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:876:2714], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:24:14.980948Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:886:2719], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:24:14.980958Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:24:14.981936Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480 2025-05-29T15:24:14.983176Z node 4 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:129: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-05-29T15:24:15.141089Z node 4 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:129: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-05-29T15:24:15.141636Z node 4 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [4:890:2722], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2025-05-29T15:24:15.165069Z node 4 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [4:946:2759] txid# 281474976715660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 8], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-29T15:24:15.173797Z node 4 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [4:956:2768], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:24:15.174383Z node 4 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=4&id=ZDkyMDliM2MtZjFlZmNhNmYtNWU3YWRlYzQtYTcwYTZhNQ==, ActorId: [4:874:2712], ActorState: ExecuteState, TraceId: 01jwea9p84ep85kg4jpfh48t8e, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: assertion failed at ydb/core/tx/datashard/ut_common/datashard_ut_common.cpp:2091, void NKikimr::ExecSQL(Tests::TServer::TPtr, TActorId, const TString &, bool, Ydb::StatusIds::StatusCode): (response.GetYdbStatus() == code) failed: (INTERNAL_ERROR != SUCCESS) Response { QueryIssues { message: "Execution" issue_code: 1060 issues { message: "yql/essentials/ast/yql_expr.h:1874: index out of range" issue_code: 1 } } TxMeta { } } YdbStatus: INTERNAL_ERROR ConsumedRu: 1 , with diff: (INT|SUCC)E(RNAL_ERROR|SS) TBackTrace::Capture()+28 (0x13AF911C) NUnitTest::NPrivate::RaiseError(char const*, TBasicString> const&, bool)+137 (0x13CAC049) NKikimr::ExecSQL(TIntrusivePtr>, NActors::TActorId, TBasicString> const&, bool, Ydb::StatusIds_StatusCode)+1300 (0x26340864) ??+0 (0x139D4D45) NKikimr::NTestSuiteCdcStreamChangeCollector::TTestCaseDeleteSingleRow::Execute_(NUnitTest::TTestContext&)+2107 (0x139CD19B) NKikimr::NTestSuiteCdcStreamChangeCollector::TCurrentTest::Execute()::'lambda'()::operator()() const+71 (0x139E4357) NUnitTest::TTestBase::Run(std::__y1::function, TBasicString> const&, char const*, bool)+126 (0x13CADEFE) NKikimr::NTestSuiteCdcStreamChangeCollector::TCurrentTest::Execute()+484 (0x139E3CF4) NUnitTest::TTestFactory::Execute()+803 (0x13CAE673) NUnitTest::RunMain(int, char**)+3021 (0x13CC021D) ??+0 (0x7FECD0577D90) __libc_start_main+128 (0x7FECD0577E40) _start+41 (0x12A09029) >> ReadIteratorExternalBlobs::ExtBlobsWithDeletesInTheBeginning [FAIL] >> ReadIteratorExternalBlobs::ExtBlobsWithDeletesInTheEnd >> TConsoleTests::TestRestartConsoleAndPoolsExtSubdomain >> DataShardReadIteratorBatchMode::ShouldHandleReadAck [FAIL] >> KqpPg::InsertNoTargetColumns_SerialNotNull-useSink [FAIL] |64.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_bsvolume/unittest >> KqpPg::CreateNotNullPgColumn [GOOD] >> KqpPg::CreateSequence ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/pg/unittest >> KqpPg::LongDomainName [GOOD] Test command err: Trying to start YDB, gRPC: 30687, MsgBus: 21593 2025-05-29T15:24:07.509262Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509888820216184980:2067];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:24:07.509289Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/0026e7/r3tmp/tmpt30EM5/pdisk_1.dat TServer::EnableGrpc on GrpcPort 30687, node 1 2025-05-29T15:24:07.565784Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:24:07.572179Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:24:07.572192Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-05-29T15:24:07.572193Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-05-29T15:24:07.572228Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:21593 2025-05-29T15:24:07.610822Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:24:07.610856Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:24:07.611880Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:21593 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-29T15:24:07.638644Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:24:07.843441Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509888820216185605:2328], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:24:07.843457Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509888820216185613:2331], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:24:07.843461Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:24:07.844159Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2025-05-29T15:24:07.846541Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7509888820216185619:2332], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-05-29T15:24:07.942516Z node 1 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [1:7509888820216185670:2324] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } Trying to start YDB, gRPC: 3792, MsgBus: 8647 2025-05-29T15:24:08.164725Z node 2 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7509888822735474768:2071];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:24:08.164762Z node 2 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/0026e7/r3tmp/tmpLOHVk4/pdisk_1.dat 2025-05-29T15:24:08.178859Z node 2 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 3792, node 2 2025-05-29T15:24:08.195612Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:24:08.195629Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-05-29T15:24:08.195631Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-05-29T15:24:08.195683Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:8647 TClient is connected to server localhost:8647 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-29T15:24:08.265051Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:24:08.265080Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:24:08.266200Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-29T15:24:08.267779Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:24:08.268510Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-05-29T15:24:08.506949Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7509888822735475379:2328], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:24:08.506963Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7509888822735475389:2331], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:24:08.506970Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:24:08.507811Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2025-05-29T15:24:08.510015Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7509888822735475393:2332], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-05-29T15:24:08.602207Z node 2 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [2:7509888822735475444:2323] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } Trying to start YDB, gRPC: 22949, MsgBus: 12729 2025-05-29T15:24:08.839958Z node 3 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7509888822767262617:2062];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:24:08.839975Z node 3 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/0026e7/r3tmp/tmp1m2Exk/pdisk_1.dat 2025-05-29T15:24:08.851860Z node 3 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:24:08.852004Z node 3 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [3:7509888822767262588:2079] 1748532248839862 != 1748532248839865 TServer::EnableGrpc on GrpcPort 22949, node 3 2025-05-29T15:24:08.867721Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:24:08.867734Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-05-29T15:24:08.867737Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-05-29T15:24:08.867791Z node 3 :NET_CLASSIFI ... erver localhost:1120 2025-05-29T15:24:16.563002Z node 10 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:24:16.563032Z node 10 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:24:16.567153Z node 10 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:1120 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-29T15:24:16.584776Z node 10 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:24:16.923865Z node 10 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [10:7509888859262677500:2327], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:24:16.923894Z node 10 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:24:16.924026Z node 10 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [10:7509888859262677527:2330], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:24:16.924889Z node 10 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2025-05-29T15:24:16.927817Z node 10 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [10:7509888859262677529:2331], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-05-29T15:24:17.002301Z node 10 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [10:7509888863557644876:2326] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-29T15:24:17.009686Z node 10 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 2025-05-29T15:24:17.047840Z node 10 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [10:7509888863557644993:2347], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:24:17.048050Z node 10 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=10&id=OWYxZDc4YjctZmRlZjYzYzYtNTllMDY0ZmUtY2VjYTVlMjE=, ActorId: [10:7509888863557644986:2343], ActorState: ExecuteState, TraceId: 01jwea9r7z97ra32vvfsvgb7nv, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: assertion failed at ydb/core/kqp/ut/pg/kqp_pg_ut.cpp:5065, virtual void NKikimr::NKqp::NTestSuiteKqpPg::TTestCaseNoSelectFullScan::Execute_(NUnitTest::TTestContext &): (result.GetStatus() == EStatus::SUCCESS) failed: (INTERNAL_ERROR != SUCCESS)
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 , with diff: (INT|SUCC)E(RNAL_ERROR|SS) 0. /-S/util/system/backtrace.cpp:284: ?? @ 0x15FBD15B 1. /tmp//-S/library/cpp/testing/unittest/registar.cpp:46: RaiseError @ 0x16174FC8 2. /tmp//-S/ydb/core/kqp/ut/pg/kqp_pg_ut.cpp:5065: Execute_ @ 0x15D629B2 3. /tmp//-S/ydb/core/kqp/ut/pg/kqp_pg_ut.cpp:198: operator() @ 0x15D71686 4. /tmp//-S/library/cpp/testing/unittest/registar.cpp:373: Run @ 0x16176E7D 5. /tmp//-S/ydb/core/kqp/ut/pg/kqp_pg_ut.cpp:198: Execute @ 0x15D70EE0 6. /tmp//-S/library/cpp/testing/unittest/registar.cpp:494: Execute @ 0x161775F2 7. /tmp//-S/library/cpp/testing/unittest/utmain.cpp:872: RunMain @ 0x1618919C 8. ??:0: ?? @ 0x7F057546BD8F 9. ??:0: ?? @ 0x7F057546BE3F 10. ??:0: ?? @ 0x14D5E028 Trying to start YDB, gRPC: 62110, MsgBus: 25350 2025-05-29T15:24:17.414595Z node 11 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[11:7509888860546188749:2071];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:24:17.414830Z node 11 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/0026e7/r3tmp/tmp0NAaXz/pdisk_1.dat 2025-05-29T15:24:17.439622Z node 11 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 62110, node 11 2025-05-29T15:24:17.462959Z node 11 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:24:17.462976Z node 11 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-05-29T15:24:17.462978Z node 11 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-05-29T15:24:17.463033Z node 11 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:25350 TClient is connected to server localhost:25350 WaitRootIsUp 'aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa'... TClient::Ls request: aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa 2025-05-29T15:24:17.519256Z node 11 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(11, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:24:17.519290Z node 11 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(11, (0,0,0,0)) VolatileState: Disconnected -> Connecting TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_D... (TRUNCATED) WaitRootIsUp 'aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa' success. 2025-05-29T15:24:17.520258Z node 11 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-05-29T15:24:17.520550Z node 11 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(11, (0,0,0,0)) VolatileState: Connecting -> Connected waiting... 2025-05-29T15:24:17.815956Z node 11 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [11:7509888860546189361:2327], DatabaseId: /aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:24:17.815997Z node 11 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:24:17.816103Z node 11 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [11:7509888860546189373:2330], DatabaseId: /aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:24:17.817038Z node 11 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2025-05-29T15:24:17.819404Z node 11 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [11:7509888860546189375:2331], DatabaseId: /aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-05-29T15:24:17.919635Z node 11 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [11:7509888860546189426:2323] txid# 281474976715659, issues: { message: "Check failed: path: \'/aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-29T15:24:17.924693Z node 11 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 >> TConsoleConfigSubscriptionTests::TestNotificationForNewSubscription [GOOD] >> TConsoleConfigSubscriptionTests::TestNotificationForNewConfigItem >> TBSV::CleanupDroppedVolumesOnRestart [GOOD] |64.8%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/schemeshard/ut_user_attributes/ydb-core-tx-schemeshard-ut_user_attributes |64.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_user_attributes/ydb-core-tx-schemeshard-ut_user_attributes |64.8%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_user_attributes/ydb-core-tx-schemeshard-ut_user_attributes >> TConsoleConfigTests::TestCheckConfigUpdates [GOOD] >> TConsoleConfigTests::TestManageValidators >> TJaegerTracingConfiguratorTests::ExternalTracePlusSampling [GOOD] >> TJaegerTracingConfiguratorTests::RequestTypeThrottler >> TSchemeShardServerLessReboots::TestServerlessComputeResourcesModeWithReboots [GOOD] >> TBSV::ShouldLimitBlockStoreVolumeDropRate |64.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_bsvolume/unittest ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_bsvolume/unittest >> TBSV::CleanupDroppedVolumesOnRestart [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:116:2058] recipient: [1:111:2141] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:116:2058] recipient: [1:111:2141] Leader for TabletID 72057594046678944 is [1:128:2152] sender: [1:132:2058] recipient: [1:111:2141] 2025-05-29T15:24:20.713559Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7488: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-05-29T15:24:20.713604Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7516: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:24:20.713610Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7402: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-05-29T15:24:20.713616Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7418: OperationsProcessing config: using default configuration 2025-05-29T15:24:20.713635Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-05-29T15:24:20.713639Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-05-29T15:24:20.713649Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7548: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:24:20.713665Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:39: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-05-29T15:24:20.713786Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7619: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-05-29T15:24:20.713881Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-05-29T15:24:20.727804Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7309: Cannot subscribe to console configs 2025-05-29T15:24:20.727838Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:24:20.731476Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-05-29T15:24:20.731653Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-05-29T15:24:20.731718Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-05-29T15:24:20.734053Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-05-29T15:24:20.734227Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:445: Clear TempDirsState with owners number: 0 2025-05-29T15:24:20.734363Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1348: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-05-29T15:24:20.734435Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:32: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-05-29T15:24:20.734991Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:157: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:24:20.735054Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:84: [RootDataErasureManager] Stop 2025-05-29T15:24:20.735375Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:24:20.735387Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:24:20.735411Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-05-29T15:24:20.735420Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-29T15:24:20.735427Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:202: TTxServerlessStorageBilling.Complete 2025-05-29T15:24:20.735472Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6671: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-05-29T15:24:20.737088Z node 1 :HIVE INFO: tablet_helpers.cpp:1130: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:128:2152] sender: [1:242:2058] recipient: [1:15:2062] 2025-05-29T15:24:20.757868Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:372: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-05-29T15:24:20.757974Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:24:20.758061Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-05-29T15:24:20.758111Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:130: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-05-29T15:24:20.758123Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:24:20.759136Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:451: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-05-29T15:24:20.759180Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-05-29T15:24:20.759248Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:24:20.759265Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:313: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-05-29T15:24:20.759272Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:367: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-05-29T15:24:20.759278Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 2 -> 3 2025-05-29T15:24:20.759937Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:24:20.759956Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-05-29T15:24:20.759962Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 3 -> 128 2025-05-29T15:24:20.760374Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:24:20.760386Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:24:20.760392Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:24:20.760400Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1650: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-05-29T15:24:20.761164Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1719: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-05-29T15:24:20.761621Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:652: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-05-29T15:24:20.761671Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1751: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-05-29T15:24:20.761886Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:676: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-05-29T15:24:20.761916Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:680: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 124 RawX2: 4294969445 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-05-29T15:24:20.761924Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:24:20.762005Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 128 -> 240 2025-05-29T15:24:20.762013Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:24:20.762052Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-05-29T15:24:20.762068Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:402: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-05-29T15:24:20.762498Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:24:20.762507Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-29T15:24:20.762601Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29 ... 473:2058] recipient: [1:15:2062] 2025-05-29T15:24:20.875146Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/BSVolume" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-05-29T15:24:20.875224Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/BSVolume" took 99us result status StatusPathDoesNotExist 2025-05-29T15:24:20.875268Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/BSVolume\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1]), source_location: ydb/core/tx/schemeshard/schemeshard_path_describer.cpp:1157" Path: "/MyRoot/BSVolume" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: false } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 2025-05-29T15:24:20.875469Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:445: Clear TempDirsState with owners number: 0 Leader for TabletID 72057594046678944 is [1:406:2381] sender: [1:474:2058] recipient: [1:103:2137] Leader for TabletID 72057594046678944 is [1:406:2381] sender: [1:477:2058] recipient: [1:476:2434] Leader for TabletID 72057594046678944 is [1:406:2381] sender: [1:478:2058] recipient: [1:15:2062] Leader for TabletID 72057594046678944 is [1:479:2435] sender: [1:480:2058] recipient: [1:476:2434] 2025-05-29T15:24:20.892213Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7488: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-05-29T15:24:20.892246Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7516: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:24:20.892252Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7402: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-05-29T15:24:20.892258Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7418: OperationsProcessing config: using default configuration 2025-05-29T15:24:20.892264Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-05-29T15:24:20.892269Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-05-29T15:24:20.892279Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7548: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:24:20.892292Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:39: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-05-29T15:24:20.892402Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7619: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-05-29T15:24:20.892473Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-05-29T15:24:20.896199Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-05-29T15:24:20.896726Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-05-29T15:24:20.896787Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-05-29T15:24:20.896807Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7309: Cannot subscribe to console configs 2025-05-29T15:24:20.896814Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:24:20.896899Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:445: Clear TempDirsState with owners number: 0 2025-05-29T15:24:20.897010Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit for Paths, read records: 1, at schemeshard: 72057594046678944 2025-05-29T15:24:20.897033Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1457: TTxInit for UserAttributes, read records: 0, at schemeshard: 72057594046678944 2025-05-29T15:24:20.897042Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1483: TTxInit for UserAttributesAlterData, read records: 0, at schemeshard: 72057594046678944 2025-05-29T15:24:20.897106Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1785: TTxInit for Tables, read records: 0, at schemeshard: 72057594046678944 2025-05-29T15:24:20.897119Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__root_data_erasure_manager.cpp:452: [RootDataErasureManager] Restore: Generation# 0, Status# 0, WakeupInterval# 604800 s, NumberDataErasureTenantsInRunning# 0 2025-05-29T15:24:20.897148Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:2033: TTxInit for Columns, read records: 0, at schemeshard: 72057594046678944 2025-05-29T15:24:20.897160Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:2093: TTxInit for ColumnsAlters, read records: 0, at schemeshard: 72057594046678944 2025-05-29T15:24:20.897174Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:2151: TTxInit for Shards, read records: 0, at schemeshard: 72057594046678944 2025-05-29T15:24:20.897187Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:2237: TTxInit for TablePartitions, read records: 0, at schemeshard: 72057594046678944 2025-05-29T15:24:20.897201Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:2303: TTxInit for TableShardPartitionConfigs, read records: 0, at schemeshard: 72057594046678944 2025-05-29T15:24:20.897227Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:2453: TTxInit for ChannelsBinding, read records: 0, at schemeshard: 72057594046678944 2025-05-29T15:24:20.897264Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:2832: TTxInit for TableIndexes, read records: 0, at schemeshard: 72057594046678944 2025-05-29T15:24:20.897279Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:2911: TTxInit for TableIndexKeys, read records: 0, at schemeshard: 72057594046678944 2025-05-29T15:24:20.897333Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:3409: TTxInit for KesusInfos, read records: 0, at schemeshard: 72057594046678944 2025-05-29T15:24:20.897342Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:3445: TTxInit for KesusAlters, read records: 0, at schemeshard: 72057594046678944 2025-05-29T15:24:20.897371Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:3659: TTxInit for TxShards, read records: 0, at schemeshard: 72057594046678944 2025-05-29T15:24:20.897387Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:3804: TTxInit for ShardToDelete, read records: 0, at schemeshard: 72057594046678944 2025-05-29T15:24:20.897399Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:3821: TTxInit for BackupSettings, read records: 0, at schemeshard: 72057594046678944 2025-05-29T15:24:20.897425Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:3981: TTxInit for ShardBackupStatus, read records: 0, at schemeshard: 72057594046678944 2025-05-29T15:24:20.897439Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:3997: TTxInit for CompletedBackup, read records: 0, at schemeshard: 72057594046678944 2025-05-29T15:24:20.897463Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4282: TTxInit for Publications, read records: 0, at schemeshard: 72057594046678944 2025-05-29T15:24:20.897493Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4587: IndexBuild , records: 0, at schemeshard: 72057594046678944 2025-05-29T15:24:20.897503Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4645: KMeansTreeSample records: 0, at schemeshard: 72057594046678944 2025-05-29T15:24:20.897541Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4740: SnapshotTables: snapshots: 0 tables: 0, at schemeshard: 72057594046678944 2025-05-29T15:24:20.897547Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4767: SnapshotSteps: snapshots: 0, at schemeshard: 72057594046678944 2025-05-29T15:24:20.897555Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4794: LongLocks: records: 0, at schemeshard: 72057594046678944 2025-05-29T15:24:20.902468Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:84: [RootDataErasureManager] Stop 2025-05-29T15:24:20.903072Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:24:20.903093Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:24:20.903326Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-05-29T15:24:20.903339Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-29T15:24:20.903347Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:202: TTxServerlessStorageBilling.Complete 2025-05-29T15:24:20.903577Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6671: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594046678944 is [1:479:2435] sender: [1:540:2058] recipient: [1:15:2062] 2025-05-29T15:24:20.934495Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/BSVolume" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-05-29T15:24:20.934584Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/BSVolume" took 108us result status StatusPathDoesNotExist 2025-05-29T15:24:20.934626Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/BSVolume\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1]), source_location: ydb/core/tx/schemeshard/schemeshard_path_describer.cpp:1157" Path: "/MyRoot/BSVolume" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: false } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 >> TBlobStorageWardenTest::TestHttpMonPage |64.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_bsvolume/unittest ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/pg/unittest >> KqpPg::DeleteWithQueryService-useSink [FAIL] Test command err: Trying to start YDB, gRPC: 28227, MsgBus: 10089 2025-05-29T15:24:07.637702Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509888817393290063:2067];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:24:07.637730Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/0026c3/r3tmp/tmp0S9ict/pdisk_1.dat 2025-05-29T15:24:07.715001Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 28227, node 1 2025-05-29T15:24:07.734140Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:24:07.734153Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-05-29T15:24:07.734155Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-05-29T15:24:07.734200Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-05-29T15:24:07.739316Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:24:07.739347Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:24:07.740384Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:10089 TClient is connected to server localhost:10089 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-29T15:24:07.805304Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:24:08.009367Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509888821688257985:2328], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:24:08.009387Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:24:08.016739Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-05-29T15:24:08.078770Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509888821688258119:2338], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:24:08.078800Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509888821688258124:2341], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:24:08.078800Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:24:08.079413Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480 2025-05-29T15:24:08.085349Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7509888821688258126:2342], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2025-05-29T15:24:08.148468Z node 1 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [1:7509888821688258177:2404] txid# 281474976715660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 7], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-29T15:24:08.162144Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [1:7509888821688258193:2346], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:24:08.162259Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=1&id=OTM0YmNjOWItYjdjMjcyNy0yYTMwZGJlMi1kN2FlOTkyNg==, ActorId: [1:7509888821688257967:2326], ActorState: ExecuteState, TraceId: 01jwea9fge7q7z2sh66at0y9n2, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: assertion failed at ydb/core/kqp/ut/pg/kqp_pg_ut.cpp:1687, virtual void NKikimr::NKqp::NTestSuiteKqpPg::TTestCaseCreateTableSerialColumns::Execute_(NUnitTest::TTestContext &) [useSink = true]: (result.IsSuccess())
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 0. /-S/util/system/backtrace.cpp:284: ?? @ 0x15FBD15B 1. /tmp//-S/library/cpp/testing/unittest/registar.cpp:46: RaiseError @ 0x16174FC8 2. /tmp//-S/ydb/core/kqp/ut/pg/kqp_pg_ut.cpp:1687: Execute_ @ 0x15DB23F5 3. /tmp//-S/ydb/core/kqp/ut/pg/kqp_pg_ut.cpp:198: operator() @ 0x15D71686 4. /tmp//-S/library/cpp/testing/unittest/registar.cpp:373: Run @ 0x16176E7D 5. /tmp//-S/ydb/core/kqp/ut/pg/kqp_pg_ut.cpp:198: Execute @ 0x15D70EE0 6. /tmp//-S/library/cpp/testing/unittest/registar.cpp:494: Execute @ 0x161775F2 7. /tmp//-S/library/cpp/testing/unittest/utmain.cpp:872: RunMain @ 0x1618919C 8. ??:0: ?? @ 0x7FE856CDBD8F 9. ??:0: ?? @ 0x7FE856CDBE3F 10. ??:0: ?? @ 0x14D5E028 Trying to start YDB, gRPC: 1559, MsgBus: 18506 2025-05-29T15:24:09.966001Z node 2 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7509888826161853515:2067];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:24:09.966034Z node 2 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/0026c3/r3tmp/tmpLPV1Dv/pdisk_1.dat 2025-05-29T15:24:09.977179Z node 2 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:24:09.977375Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [2:7509888826161853478:2079] 1748532249965818 != 1748532249965821 TServer::EnableGrpc on GrpcPort 1559, node 2 2025-05-29T15:24:09.990840Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:24:09.990852Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-05-29T15:24:09.990854Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-05-29T15:24:09.990895Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:18506 TClient is connected to server localhost:18506 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-29T15:24:10.068845Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:24:10.068882Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:24:10.069224Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:24:10.069868Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-29T15:24:10.278486Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7509888830456821432:2328], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:24:10.278512Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:24:10.280732Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976 ... t, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:24:17.189599Z node 11 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [11:7509888864104081729:2342], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:24:17.189605Z node 11 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:24:17.190246Z node 11 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480 2025-05-29T15:24:17.192389Z node 11 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [11:7509888864104081731:2343], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2025-05-29T15:24:17.275185Z node 11 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [11:7509888864104081782:2385] txid# 281474976715660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-29T15:24:17.288798Z node 11 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [11:7509888864104081798:2347], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:24:17.289796Z node 11 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=11&id=MjFhYTVkMDQtZDAxYTA4ZjktOTgwMzQzMGItZTk1MmMzMTQ=, ActorId: [11:7509888864104081722:2338], ActorState: ExecuteState, TraceId: 01jwea9rd41fm5tfbcv6ykamdk, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: assertion failed at ydb/core/kqp/ut/pg/kqp_pg_ut.cpp:3562, virtual void NKikimr::NKqp::NTestSuiteKqpPg::TTestCaseDeleteWithQueryService::Execute_(NUnitTest::TTestContext &) [useSink = true]: (result.GetStatus() == EStatus::SUCCESS) failed: (INTERNAL_ERROR != SUCCESS)
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 , with diff: (INT|SUCC)E(RNAL_ERROR|SS) 0. /-S/util/system/backtrace.cpp:284: ?? @ 0x15FBD15B 1. /tmp//-S/library/cpp/testing/unittest/registar.cpp:46: RaiseError @ 0x16174FC8 2. /tmp//-S/ydb/core/kqp/ut/pg/kqp_pg_ut.cpp:3562: Execute_ @ 0x15DF49D4 3. /tmp//-S/ydb/core/kqp/ut/pg/kqp_pg_ut.cpp:198: operator() @ 0x15D71686 4. /tmp//-S/library/cpp/testing/unittest/registar.cpp:373: Run @ 0x16176E7D 5. /tmp//-S/ydb/core/kqp/ut/pg/kqp_pg_ut.cpp:198: Execute @ 0x15D70EE0 6. /tmp//-S/library/cpp/testing/unittest/registar.cpp:494: Execute @ 0x161775F2 7. /tmp//-S/library/cpp/testing/unittest/utmain.cpp:872: RunMain @ 0x1618919C 8. ??:0: ?? @ 0x7FE856CDBD8F 9. ??:0: ?? @ 0x7FE856CDBE3F 10. ??:0: ?? @ 0x14D5E028 Trying to start YDB, gRPC: 28739, MsgBus: 11908 2025-05-29T15:24:17.670713Z node 12 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[12:7509888861477421317:2064];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:24:17.671047Z node 12 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/0026c3/r3tmp/tmpTADioc/pdisk_1.dat 2025-05-29T15:24:17.685975Z node 12 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:24:17.686979Z node 12 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [12:7509888861477421294:2079] 1748532257670570 != 1748532257670573 TServer::EnableGrpc on GrpcPort 28739, node 12 2025-05-29T15:24:17.702954Z node 12 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:24:17.702969Z node 12 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-05-29T15:24:17.702971Z node 12 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-05-29T15:24:17.703027Z node 12 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:11908 TClient is connected to server localhost:11908 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-29T15:24:17.777272Z node 12 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(12, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:24:17.777301Z node 12 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(12, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:24:17.778657Z node 12 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(12, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-29T15:24:17.779188Z node 12 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:24:17.783683Z node 12 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-05-29T15:24:18.570346Z node 12 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [12:7509888865772389248:2328], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:24:18.570399Z node 12 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:24:18.572143Z node 12 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-05-29T15:24:18.604808Z node 12 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [12:7509888865772389352:2339], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:24:18.604839Z node 12 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:24:18.606970Z node 12 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [12:7509888865772389357:2342], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:24:18.609221Z node 12 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480 2025-05-29T15:24:18.611839Z node 12 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [12:7509888865772389359:2343], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2025-05-29T15:24:18.673273Z node 12 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [12:7509888865772389412:2383] txid# 281474976715660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-29T15:24:18.684520Z node 12 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [12:7509888865772389435:2348], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:24:18.684610Z node 12 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=12&id=NWRjODg5MjMtZWY5ZDRkMTMtOGViZTZmMzItYjU3YjZhMWY=, ActorId: [12:7509888865772389350:2338], ActorState: ExecuteState, TraceId: 01jwea9ssb9yteg6c6etyr53eh, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: assertion failed at ydb/core/kqp/ut/pg/kqp_pg_ut.cpp:3562, virtual void NKikimr::NKqp::NTestSuiteKqpPg::TTestCaseDeleteWithQueryService::Execute_(NUnitTest::TTestContext &) [useSink = false]: (result.GetStatus() == EStatus::SUCCESS) failed: (INTERNAL_ERROR != SUCCESS)
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 , with diff: (INT|SUCC)E(RNAL_ERROR|SS) 0. /-S/util/system/backtrace.cpp:284: ?? @ 0x15FBD15B 1. /tmp//-S/library/cpp/testing/unittest/registar.cpp:46: RaiseError @ 0x16174FC8 2. /tmp//-S/ydb/core/kqp/ut/pg/kqp_pg_ut.cpp:3562: Execute_ @ 0x15DF7004 3. /tmp//-S/ydb/core/kqp/ut/pg/kqp_pg_ut.cpp:198: operator() @ 0x15D71686 4. /tmp//-S/library/cpp/testing/unittest/registar.cpp:373: Run @ 0x16176E7D 5. /tmp//-S/ydb/core/kqp/ut/pg/kqp_pg_ut.cpp:198: Execute @ 0x15D70EE0 6. /tmp//-S/library/cpp/testing/unittest/registar.cpp:494: Execute @ 0x161775F2 7. /tmp//-S/library/cpp/testing/unittest/utmain.cpp:872: RunMain @ 0x1618919C 8. ??:0: ?? @ 0x7FE856CDBD8F 9. ??:0: ?? @ 0x7FE856CDBE3F 10. ??:0: ?? @ 0x14D5E028 >> TBlobStorageWardenTest::TestLimitedKeylessGroupThenNoMonitoring >> KqpPg::CreateSequence [GOOD] >> KqpPg::AlterSequence >> TBlobStorageWardenTest::TestReceivedPDiskRestartNotAllowed [GOOD] >> TConsoleConfigHelpersTests::TestConfigSubscriber [GOOD] >> TConsoleConfigHelpersTests::TestConfigSubscriberAutoTenantTenant ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/pg/unittest >> PgCatalog::PgTables [GOOD] Test command err: Trying to start YDB, gRPC: 62299, MsgBus: 25505 2025-05-29T15:24:07.614639Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509888818620267977:2070];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:24:07.614782Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/0025e0/r3tmp/tmpb5vGln/pdisk_1.dat 2025-05-29T15:24:07.672334Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 62299, node 1 2025-05-29T15:24:07.689681Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:24:07.689692Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-05-29T15:24:07.689694Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-05-29T15:24:07.689747Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:25505 2025-05-29T15:24:07.716052Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:24:07.716077Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:24:07.717329Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:25505 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-29T15:24:07.754077Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 1042 2025-05-29T15:24:08.043503Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480
: Error: Bulk upsert to table '/Root/Coerce_pgbpchar_17472595041006102391_17823623939509273229' Typemod mismatch, got type pgbpchar for column value, type mod , but expected 2 --!syntax_pg INSERT INTO Coerce_pgbpchar_17472595041006102391_17823623939509273229 (key, value) VALUES ( '0'::int2, 'abcd'::bpchar ) 2025-05-29T15:24:08.109832Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509888822915235991:2338], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:24:08.109843Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509888822915235999:2341], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:24:08.109855Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:24:08.110433Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710659:3, at schemeshard: 72057594046644480 2025-05-29T15:24:08.112021Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7509888822915236005:2342], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710659 completed, doublechecking } 2025-05-29T15:24:08.190225Z node 1 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [1:7509888822915236056:2385] txid# 281474976710660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-29T15:24:08.220992Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [1:7509888822915236072:2346], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:24:08.221124Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=1&id=NWJhMmQ5OWItNjhlMTU0MTgtMTI4MTFkMmYtNGQ0NTk5NjQ=, ActorId: [1:7509888822915235988:2336], ActorState: ExecuteState, TraceId: 01jwea9fhdfkq7k2gs80vh1p58, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:24:08.224443Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480
: Error: Bulk upsert to table '/Root/Coerce__pgbpchar_17472595041006102391_5352544928909966465' Typemod mismatch, got type _pgbpchar for column value, type mod , but expected 2 --!syntax_pg INSERT INTO Coerce__pgbpchar_17472595041006102391_5352544928909966465 (key, value) VALUES ( '0'::int2, '{abcd,abcd}'::_bpchar ) 2025-05-29T15:24:08.260521Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [1:7509888822915236169:2367], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:24:08.260628Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=1&id=Y2YxZGYzYTMtYWM5NWIzN2YtNGZjMDM5YmYtNmVjMzZkMGM=, ActorId: [1:7509888822915236161:2362], ActorState: ExecuteState, TraceId: 01jwea9fneehqqqg9m511yar31, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 1042 2025-05-29T15:24:08.263854Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480
: Error: Bulk upsert to table '/Root/Coerce_pgbpchar_2169371982377735806_17823623939509273229' Typemod mismatch, got type pgbpchar for column value, type mod , but expected 4 --!syntax_pg INSERT INTO Coerce_pgbpchar_2169371982377735806_17823623939509273229 (key, value) VALUES ( '0'::int2, 'abcd'::bpchar ) 2025-05-29T15:24:08.299046Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [1:7509888822915236261:2385], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:24:08.299138Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=1&id=ODk2OTNhZjktYmQ3MTJhZDctMWM4YWFiYjQtODE1YTBkZQ==, ActorId: [1:7509888822915236258:2383], ActorState: ExecuteState, TraceId: 01jwea9fpm2ja0g5bw5ef3rcjq, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 assertion failed at ydb/core/kqp/ut/pg/kqp_pg_ut.cpp:997, auto NKikimr::NKqp::NTestSuiteKqpPg::TTestCaseTypeCoercionInsert::Execute_(NUnitTest::TTestContext &)::(anonymous class)::operator()(const TPgTypeCoercionTestSpec &) const [useSink = false]: (success == spec.ShouldPass) failed: (0 != 1) 0. /-S/util/system/backtrace.cpp:284: ?? @ 0x15FBD15B 1. /tmp//-S/library/cpp/testing/unittest/registar.cpp:46: RaiseError @ 0x16174FC8 2. /tmp//-S/ydb/core/kqp/ut/pg/kqp_pg_ut.cpp:997: operator() @ 0x15D818A1 3. /tmp//-S/ydb/core/kqp/ut/pg/kqp_pg_ut.cpp:1021: operator() @ 0x15D80F69 4. /tmp//-S/ydb/core/kqp/ut/pg/kqp_pg_ut.cpp:1027: Execute_ @ 0x15D80F69 5. /tmp//-S/ydb/core/kqp/ut/pg/kqp_pg_ut.cpp:198: operator() @ 0x15D71686 6. /tmp//-S/library/cpp/testing/unittest/registar.cpp:373: Run @ 0x16176E7D 7. /tmp//-S/ydb/core/kqp/ut/pg/kqp_pg_ut.cpp:198: Execute @ 0x15D70EE0 8. /tmp//-S/library/cpp/testing/unittest/registar.cpp:494: Execute @ 0x161775F2 9. /tmp//-S/library/cpp/testing/unittest/utmain.cpp:872: RunMain @ 0x1618919C 10. ??:0: ?? @ 0x7FD6B0BEFD8F 11. ??:0: ?? @ 0x7FD6B0BEFE3F 12. ??:0: ?? @ 0x14D5E028 Trying to start YDB, gRPC: 19242, MsgBus: 63474 2025-05-29T15:24:10.338208Z node 2 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7509888831541262724:2064];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:24:10.338253Z node 2 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/0025e0/r3tmp/tmphRGnk8/pdisk_1.dat 2025-05-29T15:24:10.351785Z node 2 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 19242, node 2 2025-05-29T15:24:10.366723Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:24:10.366751Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-05-29T15:24:10.366753Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-05-29T15:24:10.366804Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:63474 TClient is connected to server localhost:63474 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 Schemeshard ... 10 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:24666 2025-05-29T15:24:16.626264Z node 10 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:24:16.626300Z node 10 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:24:16.628207Z node 10 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:24666 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... 2025-05-29T15:24:16.640462Z node 10 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-05-29T15:24:16.642454Z node 10 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-05-29T15:24:16.963357Z node 10 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [10:7509888855883219777:2327], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:24:16.963384Z node 10 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [10:7509888855883219785:2330], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:24:16.963418Z node 10 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:24:16.967678Z node 10 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2025-05-29T15:24:16.970675Z node 10 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [10:7509888855883219814:2331], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-05-29T15:24:17.059259Z node 10 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [10:7509888860178187161:2324] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } Trying to start YDB, gRPC: 20605, MsgBus: 62184 2025-05-29T15:24:17.256636Z node 11 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[11:7509888862479305682:2069];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:24:17.256677Z node 11 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/0025e0/r3tmp/tmprTl5LL/pdisk_1.dat 2025-05-29T15:24:17.276768Z node 11 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 20605, node 11 2025-05-29T15:24:17.302420Z node 11 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:24:17.302438Z node 11 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-05-29T15:24:17.302440Z node 11 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-05-29T15:24:17.302498Z node 11 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:62184 TClient is connected to server localhost:62184 WaitRootIsUp 'Root'... TClient::Ls request: Root 2025-05-29T15:24:17.359691Z node 11 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(11, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:24:17.359725Z node 11 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(11, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:24:17.362917Z node 11 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(11, (0,0,0,0)) VolatileState: Connecting -> Connected TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-29T15:24:17.371343Z node 11 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:24:17.373986Z node 11 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-05-29T15:24:17.610367Z node 11 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [11:7509888862479306282:2327], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:24:17.610438Z node 11 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:24:17.610539Z node 11 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [11:7509888862479306309:2330], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:24:17.611492Z node 11 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2025-05-29T15:24:17.613868Z node 11 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [11:7509888862479306311:2331], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-05-29T15:24:17.703590Z node 11 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [11:7509888862479306362:2323] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-29T15:24:17.709888Z node 11 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 2025-05-29T15:24:17.722817Z node 11 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 2025-05-29T15:24:18.601811Z node 11 :HIVE WARN: hive_impl.cpp:491: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 11, TabletId: 72075186224037888 not found 2025-05-29T15:24:18.602307Z node 11 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2025-05-29T15:24:18.683372Z node 11 :KQP_COMPUTE ERROR: dq_compute_actor_impl.h:678: SelfId: [11:7509888866774274176:2432], TxId: 281474976715672, task: 1. Ctx: { CustomerSuppliedId : . TraceId : 01jwea9sv3dkkwenhmfam1xg93. SessionId : ydb://session/3?node_id=11&id=ZTlkY2JkMDctNWZjNGI2YzYtOWExZTMyYzAtY2M4OTVkMjk=. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. InternalError: PRECONDITION_FAILED DEFAULT_ERROR: {
: Error: Terminate was called, reason(57): ERROR: invalid input syntax for type boolean: "pg_proc" }. 2025-05-29T15:24:18.683560Z node 11 :KQP_COMPUTE ERROR: dq_compute_actor_impl.h:1210: SelfId: [11:7509888866774274177:2433], TxId: 281474976715672, task: 2. Ctx: { TraceId : 01jwea9sv3dkkwenhmfam1xg93. CustomerSuppliedId : . SessionId : ydb://session/3?node_id=11&id=ZTlkY2JkMDctNWZjNGI2YzYtOWExZTMyYzAtY2M4OTVkMjk=. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. Handle abort execution event from: [11:7509888866774274173:2429], status: PRECONDITION_FAILED, reason: {
: Error: Terminate execution } 2025-05-29T15:24:18.683645Z node 11 :KQP_SESSION WARN: kqp_session_actor.cpp:2583: SessionId: ydb://session/3?node_id=11&id=ZTlkY2JkMDctNWZjNGI2YzYtOWExZTMyYzAtY2M4OTVkMjk=, ActorId: [11:7509888866774274167:2429], ActorState: ExecuteState, TraceId: 01jwea9sv3dkkwenhmfam1xg93, Create QueryResponse for error on request, msg: |64.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_bsvolume/unittest >> TBSV::ShouldLimitBlockStoreVolumeDropRate [GOOD] >> TConsoleConfigTests::TestManageValidators [GOOD] >> TConsoleConfigTests::TestDryRun >> TJaegerTracingConfiguratorTests::RequestTypeThrottler [GOOD] >> TJaegerTracingConfiguratorTests::RequestTypeSampler |64.9%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/backup/impl/ut_local_partition_reader/ydb-core-backup-impl-ut_local_partition_reader |64.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/backup/impl/ut_local_partition_reader/ydb-core-backup-impl-ut_local_partition_reader |64.9%| [LD] {RESULT} $(B)/ydb/core/backup/impl/ut_local_partition_reader/ydb-core-backup-impl-ut_local_partition_reader >> TBlobStorageWardenTest::TestFilterBadSerials [GOOD] >> TBlobStorageWardenTest::TestGivenPDiskFormatedWithGuid1AndCreatedWithGuid2WhenYardInitThenError ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/pg/unittest >> KqpPg::InsertNoTargetColumns_Serial+useSink [FAIL] Test command err: Trying to start YDB, gRPC: 22366, MsgBus: 4476 2025-05-29T15:24:07.676738Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509888818446100009:2066];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:24:07.676812Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/0025c9/r3tmp/tmptcBI84/pdisk_1.dat 2025-05-29T15:24:07.747555Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:24:07.747616Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [1:7509888818446099975:2079] 1748532247676629 != 1748532247676632 TServer::EnableGrpc on GrpcPort 22366, node 1 2025-05-29T15:24:07.770076Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:24:07.770089Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-05-29T15:24:07.770091Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-05-29T15:24:07.770136Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-05-29T15:24:07.778460Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:24:07.778487Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:24:07.779469Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:4476 TClient is connected to server localhost:4476 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-29T15:24:07.828241Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 16 2025-05-29T15:24:08.091957Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-05-29T15:24:08.161308Z node 1 :READ_TABLE_API WARN: rpc_read_table.cpp:116: ForgetAction occurred, send TEvPoisonPill 2025-05-29T15:24:08.162175Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 2025-05-29T15:24:08.171806Z node 1 :READ_TABLE_API WARN: rpc_read_table.cpp:116: ForgetAction occurred, send TEvPoisonPill 2025-05-29T15:24:08.174214Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509888822741068108:2346], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:24:08.174217Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509888822741068119:2349], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:24:08.174241Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:24:08.175031Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715662:3, at schemeshard: 72057594046644480 2025-05-29T15:24:08.183595Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7509888822741068122:2350], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715662 completed, doublechecking } 2025-05-29T15:24:08.234878Z node 1 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [1:7509888822741068173:2436] txid# 281474976715663, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 7], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-29T15:24:08.251625Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [1:7509888822741068189:2354], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:24:08.251739Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=1&id=OGRhOTNiY2QtZTZkYzgxMjMtMmViMzhlMGYtNTczOWI1YmE=, ActorId: [1:7509888822741068104:2343], ActorState: ExecuteState, TraceId: 01jwea9fkd6195qpk5bd2rjtw0, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: assertion failed at ydb/core/kqp/ut/pg/kqp_pg_ut.cpp:1244, auto NKikimr::NKqp::NTestSuiteKqpPg::TTestCaseInsertFromSelect_Simple::Execute_(NUnitTest::TTestContext &)::(anonymous class)::operator()(const TPgTypeTestSpec &) const [useSink = true]: (result.IsSuccess())
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 0. /-S/util/system/backtrace.cpp:284: ?? @ 0x15FBD15B 1. /tmp//-S/library/cpp/testing/unittest/registar.cpp:46: RaiseError @ 0x16174FC8 2. /tmp//-S/ydb/core/kqp/ut/pg/kqp_pg_ut.cpp:1244: operator() @ 0x15D941EC 3. /tmp//-S/ydb/core/kqp/ut/pg/kqp_pg_ut.cpp:1251: Execute_ @ 0x15D941EC 4. /tmp//-S/ydb/core/kqp/ut/pg/kqp_pg_ut.cpp:198: operator() @ 0x15D71686 5. /tmp//-S/library/cpp/testing/unittest/registar.cpp:373: Run @ 0x16176E7D 6. /tmp//-S/ydb/core/kqp/ut/pg/kqp_pg_ut.cpp:198: Execute @ 0x15D70EE0 7. /tmp//-S/library/cpp/testing/unittest/registar.cpp:494: Execute @ 0x161775F2 8. /tmp//-S/library/cpp/testing/unittest/utmain.cpp:872: RunMain @ 0x1618919C 9. ??:0: ?? @ 0x7F8EE3F05D8F 10. ??:0: ?? @ 0x7F8EE3F05E3F 11. ??:0: ?? @ 0x14D5E028 Trying to start YDB, gRPC: 21140, MsgBus: 13999 2025-05-29T15:24:10.018887Z node 2 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7509888831574293744:2068];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:24:10.018911Z node 2 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/0025c9/r3tmp/tmptyyLC7/pdisk_1.dat 2025-05-29T15:24:10.030895Z node 2 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:24:10.031021Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [2:7509888831574293706:2079] 1748532250018693 != 1748532250018696 TServer::EnableGrpc on GrpcPort 21140, node 2 2025-05-29T15:24:10.048142Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:24:10.048153Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-05-29T15:24:10.048154Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-05-29T15:24:10.048191Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:13999 TClient is connected to server localhost:13999 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-29T15:24:10.119298Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:24:10.119334Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:24:10.120373Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-29T15:24:10.122126Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 16 2025-05-29T15:24:10.355751Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-05-29T15:24:10.367326Z node 2 :READ_TABLE_API WARN: rpc_read_table.cpp:116: ForgetAction occurred, send TEvPoisonPill 2025-05-29T15:24:10.368338Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at s ... 94046644480 2025-05-29T15:24:18.143318Z node 11 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [11:7509888867481215924:2326], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:24:18.143365Z node 11 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:24:18.147331Z node 11 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [11:7509888867481215960:2330], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:24:18.148506Z node 11 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2025-05-29T15:24:18.152115Z node 11 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976715658, at schemeshard: 72057594046644480 2025-05-29T15:24:18.152195Z node 11 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [11:7509888867481215962:2331], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-05-29T15:24:18.243103Z node 11 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [11:7509888867481216013:2324] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-29T15:24:18.250625Z node 11 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 2025-05-29T15:24:18.372828Z node 11 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [11:7509888867481216130:2347], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:24:18.373875Z node 11 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=11&id=ZjAwNDAyNmYtYzVhOTUyOTctOGE0OGZhZWYtMThmZDBjNTM=, ActorId: [11:7509888867481216123:2343], ActorState: ExecuteState, TraceId: 01jwea9sfn0f1tz3pryks49fpw, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: assertion failed at ydb/core/kqp/ut/pg/kqp_pg_ut.cpp:4361, virtual void NKikimr::NKqp::NTestSuiteKqpPg::TTestCaseInsertNoTargetColumns_Alter::Execute_(NUnitTest::TTestContext &) [useSink = false]: (result.GetStatus() == EStatus::SUCCESS) failed: (INTERNAL_ERROR != SUCCESS)
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 , with diff: (INT|SUCC)E(RNAL_ERROR|SS) 0. /-S/util/system/backtrace.cpp:284: ?? @ 0x15FBD15B 1. /tmp//-S/library/cpp/testing/unittest/registar.cpp:46: RaiseError @ 0x16174FC8 2. /tmp//-S/ydb/core/kqp/ut/pg/kqp_pg_ut.cpp:4361: Execute_ @ 0x15E3590A 3. /tmp//-S/ydb/core/kqp/ut/pg/kqp_pg_ut.cpp:198: operator() @ 0x15D71686 4. /tmp//-S/library/cpp/testing/unittest/registar.cpp:373: Run @ 0x16176E7D 5. /tmp//-S/ydb/core/kqp/ut/pg/kqp_pg_ut.cpp:198: Execute @ 0x15D70EE0 6. /tmp//-S/library/cpp/testing/unittest/registar.cpp:494: Execute @ 0x161775F2 7. /tmp//-S/library/cpp/testing/unittest/utmain.cpp:872: RunMain @ 0x1618919C 8. ??:0: ?? @ 0x7F8EE3F05D8F 9. ??:0: ?? @ 0x7F8EE3F05E3F 10. ??:0: ?? @ 0x14D5E028 Trying to start YDB, gRPC: 30689, MsgBus: 4128 test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/0025c9/r3tmp/tmpT7x6Ic/pdisk_1.dat 2025-05-29T15:24:18.845116Z node 12 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-05-29T15:24:18.856150Z node 12 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:24:18.859030Z node 12 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [12:7509888867875811427:2079] 1748532258816739 != 1748532258816742 TServer::EnableGrpc on GrpcPort 30689, node 12 2025-05-29T15:24:18.881259Z node 12 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:24:18.881275Z node 12 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-05-29T15:24:18.881278Z node 12 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-05-29T15:24:18.881334Z node 12 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:4128 TClient is connected to server localhost:4128 WaitRootIsUp 'Root'... TClient::Ls request: Root 2025-05-29T15:24:18.941020Z node 12 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(12, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:24:18.941051Z node 12 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(12, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:24:18.941620Z node 12 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(12, (0,0,0,0)) VolatileState: Connecting -> Connected TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-29T15:24:18.944545Z node 12 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:24:18.946447Z node 12 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-05-29T15:24:19.279502Z node 12 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [12:7509888872170779358:2327], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:24:19.279554Z node 12 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:24:19.279762Z node 12 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [12:7509888872170779393:2330], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:24:19.280692Z node 12 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2025-05-29T15:24:19.283887Z node 12 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976715658, at schemeshard: 72057594046644480 2025-05-29T15:24:19.283969Z node 12 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [12:7509888872170779395:2331], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-05-29T15:24:19.375999Z node 12 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [12:7509888872170779446:2326] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-29T15:24:19.380992Z node 12 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 2025-05-29T15:24:19.461484Z node 12 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [12:7509888872170779599:2348], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:24:19.462443Z node 12 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=12&id=YzdhMGU1NmMtYTMxYmI5OTktZjc4YjkyNzEtNTI2M2E1Y2U=, ActorId: [12:7509888872170779592:2344], ActorState: ExecuteState, TraceId: 01jwea9tkq4b7c423xakshkgrj, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: assertion failed at ydb/core/kqp/ut/pg/kqp_pg_ut.cpp:4445, virtual void NKikimr::NKqp::NTestSuiteKqpPg::TTestCaseInsertNoTargetColumns_Serial::Execute_(NUnitTest::TTestContext &) [useSink = true]: (result.GetStatus() == EStatus::SUCCESS) failed: (INTERNAL_ERROR != SUCCESS)
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 , with diff: (INT|SUCC)E(RNAL_ERROR|SS) 0. /-S/util/system/backtrace.cpp:284: ?? @ 0x15FBD15B 1. /tmp//-S/library/cpp/testing/unittest/registar.cpp:46: RaiseError @ 0x16174FC8 2. /tmp//-S/ydb/core/kqp/ut/pg/kqp_pg_ut.cpp:4445: Execute_ @ 0x15E3D989 3. /tmp//-S/ydb/core/kqp/ut/pg/kqp_pg_ut.cpp:198: operator() @ 0x15D71686 4. /tmp//-S/library/cpp/testing/unittest/registar.cpp:373: Run @ 0x16176E7D 5. /tmp//-S/ydb/core/kqp/ut/pg/kqp_pg_ut.cpp:198: Execute @ 0x15D70EE0 6. /tmp//-S/library/cpp/testing/unittest/registar.cpp:494: Execute @ 0x161775F2 7. /tmp//-S/library/cpp/testing/unittest/utmain.cpp:872: RunMain @ 0x1618919C 8. ??:0: ?? @ 0x7F8EE3F05D8F 9. ??:0: ?? @ 0x7F8EE3F05E3F 10. ??:0: ?? @ 0x14D5E028 >> TBlobStorageWardenTest::TestSendUsefulMonitoring >> TBlobStorageWardenTest::TestLimitedKeylessGroupThenNoMonitoring [GOOD] |64.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/nodewarden/ut/unittest >> TBlobStorageWardenTest::TestReceivedPDiskRestartNotAllowed [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_bsvolume/unittest >> TBSV::ShouldLimitBlockStoreVolumeDropRate [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:116:2058] recipient: [1:111:2141] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:116:2058] recipient: [1:111:2141] Leader for TabletID 72057594046678944 is [1:128:2152] sender: [1:132:2058] recipient: [1:111:2141] 2025-05-29T15:24:21.401087Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7488: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-05-29T15:24:21.401112Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7516: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:24:21.401117Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7402: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-05-29T15:24:21.401122Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7418: OperationsProcessing config: using default configuration 2025-05-29T15:24:21.401137Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-05-29T15:24:21.401142Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-05-29T15:24:21.401150Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7548: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:24:21.401180Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:39: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-05-29T15:24:21.401306Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7619: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-05-29T15:24:21.401390Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-05-29T15:24:21.410509Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7309: Cannot subscribe to console configs 2025-05-29T15:24:21.410532Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:24:21.412675Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-05-29T15:24:21.412761Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-05-29T15:24:21.412793Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-05-29T15:24:21.414552Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-05-29T15:24:21.414731Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:445: Clear TempDirsState with owners number: 0 2025-05-29T15:24:21.414848Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1348: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-05-29T15:24:21.414913Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:32: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-05-29T15:24:21.415361Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:157: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:24:21.415405Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:84: [RootDataErasureManager] Stop 2025-05-29T15:24:21.415640Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:24:21.415649Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:24:21.415664Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-05-29T15:24:21.415670Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-29T15:24:21.415674Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:202: TTxServerlessStorageBilling.Complete 2025-05-29T15:24:21.415702Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6671: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-05-29T15:24:21.416918Z node 1 :HIVE INFO: tablet_helpers.cpp:1130: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:128:2152] sender: [1:242:2058] recipient: [1:15:2062] 2025-05-29T15:24:21.436741Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:372: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-05-29T15:24:21.436828Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:24:21.436893Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-05-29T15:24:21.436935Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:130: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-05-29T15:24:21.436947Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:24:21.437840Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:451: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-05-29T15:24:21.437875Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-05-29T15:24:21.437933Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:24:21.437945Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:313: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-05-29T15:24:21.437950Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:367: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-05-29T15:24:21.437955Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 2 -> 3 2025-05-29T15:24:21.438420Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:24:21.438431Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-05-29T15:24:21.438435Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 3 -> 128 2025-05-29T15:24:21.438850Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:24:21.438861Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:24:21.438867Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:24:21.438875Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1650: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-05-29T15:24:21.439500Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1719: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-05-29T15:24:21.439875Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:652: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-05-29T15:24:21.439909Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1751: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-05-29T15:24:21.440050Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:676: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-05-29T15:24:21.440070Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:680: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 124 RawX2: 4294969445 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-05-29T15:24:21.440076Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:24:21.440130Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 128 -> 240 2025-05-29T15:24:21.440135Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:24:21.440163Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-05-29T15:24:21.440172Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:402: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-05-29T15:24:21.440520Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:24:21.440526Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-29T15:24:21.440564Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29 ... 707050Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1751: TOperation RegisterRelationByTabletId, TxId: 129, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 129 at step: 5000028 FAKE_COORDINATOR: advance: minStep5000028 State->FrontStep: 5000027 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 129 at step: 5000028 2025-05-29T15:24:21.707209Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:676: TTxOperationPlanStep Execute, stepId: 5000028, transactions count in step: 1, at schemeshard: 72057594046678944 2025-05-29T15:24:21.707244Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:680: TTxOperationPlanStep Execute, message: Transactions { TxId: 129 Coordinator: 72057594046316545 AckTo { RawX1: 124 RawX2: 4294969445 } } Step: 5000028 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-05-29T15:24:21.707253Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_drop_bsv.cpp:40: TDropBlockStoreVolume TPropose, operationId: 129:0 HandleReply TEvOperationPlan, step: 5000028, at schemeshard: 72057594046678944 2025-05-29T15:24:21.707292Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 13] was 2 2025-05-29T15:24:21.707323Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:906: Part operation is done id#129:0 progress is 1/1 2025-05-29T15:24:21.707328Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 129 ready parts: 1/1 2025-05-29T15:24:21.707335Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:906: Part operation is done id#129:0 progress is 1/1 2025-05-29T15:24:21.707338Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 129 ready parts: 1/1 2025-05-29T15:24:21.707351Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-05-29T15:24:21.707363Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 13] was 1 2025-05-29T15:24:21.707373Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1606: TOperation IsReadyToNotify, TxId: 129, ready parts: 1/1, is published: false 2025-05-29T15:24:21.707381Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 129 ready parts: 1/1 2025-05-29T15:24:21.707386Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:973: Operation and all the parts is done, operation id: 129:0 2025-05-29T15:24:21.707390Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5174: RemoveTx for txid 129:0 2025-05-29T15:24:21.707419Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 13] was 2 2025-05-29T15:24:21.707425Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:982: Publication still in progress, tx: 129, publications: 2, subscribers: 0 2025-05-29T15:24:21.707429Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:989: Publication details: tx: 129, [OwnerId: 72057594046678944, LocalPathId: 1], 54 2025-05-29T15:24:21.707432Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:989: Publication details: tx: 129, [OwnerId: 72057594046678944, LocalPathId: 13], 18446744073709551615 2025-05-29T15:24:21.708056Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:174: Deleted shardIdx 72057594046678944:24 2025-05-29T15:24:21.708071Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:180: Close pipe to deleted shardIdx 72057594046678944:24 tabletId 72075186233409569 2025-05-29T15:24:21.708260Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:174: Deleted shardIdx 72057594046678944:23 2025-05-29T15:24:21.708268Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:180: Close pipe to deleted shardIdx 72057594046678944:23 tabletId 72075186233409568 2025-05-29T15:24:21.708414Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:46: Free shard 72057594046678944:24 hive 72057594037968897 at ss 72057594046678944 2025-05-29T15:24:21.708421Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:46: Free shard 72057594046678944:23 hive 72057594037968897 at ss 72057594046678944 2025-05-29T15:24:21.708462Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:24:21.708469Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 129, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-29T15:24:21.708514Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 129, path id: [OwnerId: 72057594046678944, LocalPathId: 13] 2025-05-29T15:24:21.708543Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:24:21.708548Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:209:2210], at schemeshard: 72057594046678944, txId: 129, path id: 1 2025-05-29T15:24:21.708554Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:209:2210], at schemeshard: 72057594046678944, txId: 129, path id: 13 FAKE_COORDINATOR: Erasing txId 129 2025-05-29T15:24:21.708680Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:5834: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 13 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 129 2025-05-29T15:24:21.708694Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 13 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 129 2025-05-29T15:24:21.708700Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 129 2025-05-29T15:24:21.708705Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 129, pathId: [OwnerId: 72057594046678944, LocalPathId: 13], version: 18446744073709551615 2025-05-29T15:24:21.708710Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 13] was 1 2025-05-29T15:24:21.708791Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:123: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-05-29T15:24:21.708797Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:137: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 13], at schemeshard: 72057594046678944 2025-05-29T15:24:21.708807Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-05-29T15:24:21.708853Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:5834: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 54 PathOwnerId: 72057594046678944, cookie: 129 2025-05-29T15:24:21.708861Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 54 PathOwnerId: 72057594046678944, cookie: 129 2025-05-29T15:24:21.708865Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 129 2025-05-29T15:24:21.708869Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 129, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 54 2025-05-29T15:24:21.708873Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-05-29T15:24:21.708882Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 129, subscribers: 0 2025-05-29T15:24:21.708916Z node 1 :HIVE INFO: tablet_helpers.cpp:1356: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 24 TxId_Deprecated: 24 2025-05-29T15:24:21.709029Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5938: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 24 ShardOwnerId: 72057594046678944 ShardLocalIdx: 24, at schemeshard: 72057594046678944 2025-05-29T15:24:21.709073Z node 1 :HIVE INFO: tablet_helpers.cpp:1356: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 23 TxId_Deprecated: 23 2025-05-29T15:24:21.709138Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5938: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 23 ShardOwnerId: 72057594046678944 ShardLocalIdx: 23, at schemeshard: 72057594046678944 2025-05-29T15:24:21.709457Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 129 2025-05-29T15:24:21.710039Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:160: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 2025-05-29T15:24:21.710061Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 129 2025-05-29T15:24:21.710076Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:174: Deleted shardIdx 72057594046678944:24 2025-05-29T15:24:21.710089Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:174: Deleted shardIdx 72057594046678944:23 TestModificationResult got TxId: 129, wait until txId: 129 TestWaitNotification wait txId: 129 2025-05-29T15:24:21.710229Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:210: tests -- TTxNotificationSubscriber for txId 129: send EvNotifyTxCompletion 2025-05-29T15:24:21.710236Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:256: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 129 2025-05-29T15:24:21.710337Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 129, at schemeshard: 72057594046678944 2025-05-29T15:24:21.710354Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:227: tests -- TTxNotificationSubscriber for txId 129: got EvNotifyTxCompletionResult 2025-05-29T15:24:21.710359Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:236: tests -- TTxNotificationSubscriber for txId 129: satisfy waiter [1:1672:3541] TestWaitNotification: OK eventTxId 129 >> TCmsTest::RequestReplaceDevices >> TConsoleConfigTests::TestDryRun [GOOD] >> TConsoleInMemoryConfigSubscriptionTests::TestNoYamlWithoutFlag >> ReadIteratorExternalBlobs::ExtBlobsWithDeletesInTheEnd [FAIL] >> ReadIteratorExternalBlobs::ExtBlobsWithDeletesInTheMiddle |64.9%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/datashard/ut_external_blobs/ydb-core-tx-datashard-ut_external_blobs |64.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_external_blobs/ydb-core-tx-datashard-ut_external_blobs |64.9%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_external_blobs/ydb-core-tx-datashard-ut_external_blobs >> TJaegerTracingConfiguratorTests::RequestTypeSampler [GOOD] >> TJaegerTracingConfiguratorTests::SamplingSameScope >> TBlobStorageWardenTest::ObtainTenantKeySamePin [GOOD] >> TBlobStorageWardenTest::ObtainTenantKeyDifferentPin [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/datashard/ut_read_iterator/unittest >> DataShardReadIterator::HandleMvccGoneInContinue [GOOD] Test command err: 2025-05-29T15:24:00.379399Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:102:2148], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-05-29T15:24:00.379427Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-05-29T15:24:00.379437Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/001583/r3tmp/tmpFQ0MsX/pdisk_1.dat 2025-05-29T15:24:00.473843Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-05-29T15:24:00.486645Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:24:00.489735Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [1:32:2079] 1748532240027272 != 1748532240027276 2025-05-29T15:24:00.531285Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:24:00.531328Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:24:00.541864Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-29T15:24:00.614874Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-05-29T15:24:00.630097Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3097: StateInit, received event# 268828672, Sender [1:656:2563], Recipient [1:664:2569]: NKikimr::TEvTablet::TEvBoot 2025-05-29T15:24:00.630308Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3097: StateInit, received event# 268828673, Sender [1:656:2563], Recipient [1:664:2569]: NKikimr::TEvTablet::TEvRestored 2025-05-29T15:24:00.630378Z node 1 :TX_DATASHARD INFO: datashard.cpp:373: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:664:2569] 2025-05-29T15:24:00.630421Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:630: TxInitSchema.Execute 2025-05-29T15:24:00.636660Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3110: StateInactive, received event# 268828684, Sender [1:656:2563], Recipient [1:664:2569]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-05-29T15:24:00.636787Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:696: TxInitSchema.Complete 2025-05-29T15:24:00.636810Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:48: TDataShard::TTxInit::Execute 2025-05-29T15:24:00.636931Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1315: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-05-29T15:24:00.636936Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1371: LoadLockChangeRecords at tablet: 72075186224037888 2025-05-29T15:24:00.636940Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1420: LoadChangeRecordCommits at tablet: 72075186224037888 2025-05-29T15:24:00.636979Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:92: TDataShard::TTxInit::Complete 2025-05-29T15:24:00.636990Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:100: TDataShard::TTxInitRestored::Execute 2025-05-29T15:24:00.636998Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:111: DataShard 72075186224037888 persisting started state actor id [1:681:2569] in generation 1 2025-05-29T15:24:00.647260Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:120: TDataShard::TTxInitRestored::Complete 2025-05-29T15:24:00.650310Z node 1 :TX_DATASHARD INFO: datashard.cpp:417: Switched to work state WaitScheme tabletId 72075186224037888 2025-05-29T15:24:00.650360Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:457: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-05-29T15:24:00.650385Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1250: Change sender created: at tablet: 72075186224037888, actorId: [1:683:2579] 2025-05-29T15:24:00.650389Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1255: Trying to activate change sender: at tablet: 72075186224037888 2025-05-29T15:24:00.650392Z node 1 :TX_DATASHARD INFO: datashard.cpp:1272: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-05-29T15:24:00.650396Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-05-29T15:24:00.650449Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3129: StateWork, received event# 2146435072, Sender [1:664:2569], Recipient [1:664:2569]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-05-29T15:24:00.650458Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3154: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-05-29T15:24:00.650531Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 72075186224037888 2025-05-29T15:24:00.650548Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-05-29T15:24:00.650559Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2025-05-29T15:24:00.650564Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-05-29T15:24:00.650572Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:385: Check unit PlanQueue at 72075186224037888 2025-05-29T15:24:00.650576Z node 1 :TX_DATASHARD TRACE: plan_queue_unit.cpp:59: TPlanQueueUnit at 72075186224037888 has no attached operations 2025-05-29T15:24:00.650578Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:403: Unit PlanQueue has no ready operations at 72075186224037888 2025-05-29T15:24:00.650582Z node 1 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 72075186224037888 TxInFly 0 2025-05-29T15:24:00.650585Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2025-05-29T15:24:00.650658Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3129: StateWork, received event# 269877761, Sender [1:674:2574], Recipient [1:664:2569]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-05-29T15:24:00.650662Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3165: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-05-29T15:24:00.650667Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3710: Server connected at leader tablet# 72075186224037888, clientId# [1:662:2567], serverId# [1:674:2574], sessionId# [0:0:0] 2025-05-29T15:24:00.650672Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3129: StateWork, received event# 269549568, Sender [1:411:2405], Recipient [1:674:2574] 2025-05-29T15:24:00.650675Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3135: StateWork, processing event TEvDataShard::TEvProposeTransaction 2025-05-29T15:24:00.650689Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037888 2025-05-29T15:24:00.650756Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1827: Trying to execute [0:281474976715657] at 72075186224037888 on unit CheckSchemeTx 2025-05-29T15:24:00.650765Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:132: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-05-29T15:24:00.650777Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:220: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2025-05-29T15:24:00.650783Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1862: Execution status for [0:281474976715657] at 72075186224037888 is ExecutedNoMoreRestarts 2025-05-29T15:24:00.650786Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1910: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit CheckSchemeTx 2025-05-29T15:24:00.650790Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1916: Add [0:281474976715657] at 72075186224037888 to execution unit StoreSchemeTx 2025-05-29T15:24:00.650793Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1827: Trying to execute [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2025-05-29T15:24:00.650826Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1862: Execution status for [0:281474976715657] at 72075186224037888 is DelayCompleteNoMoreRestarts 2025-05-29T15:24:00.650829Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1910: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit StoreSchemeTx 2025-05-29T15:24:00.650832Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1916: Add [0:281474976715657] at 72075186224037888 to execution unit FinishPropose 2025-05-29T15:24:00.650834Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1827: Trying to execute [0:281474976715657] at 72075186224037888 on unit FinishPropose 2025-05-29T15:24:00.650842Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1862: Execution status for [0:281474976715657] at 72075186224037888 is DelayComplete 2025-05-29T15:24:00.650844Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1910: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit FinishPropose 2025-05-29T15:24:00.650848Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1916: Add [0:281474976715657] at 72075186224037888 to execution unit WaitForPlan 2025-05-29T15:24:00.650851Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1827: Trying to execute [0:281474976715657] at 72075186224037888 on unit WaitForPlan 2025-05-29T15:24:00.650854Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1832: Operation [0:281474976715657] at 72075186224037888 is not ready to execute on unit WaitForPlan 2025-05-29T15:24:00.651043Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3129: StateWork, received event# 269746185, Sender [1:684:2580], Recipient [1:664:2569]: NKikimr::TEvTxProxySchemeCache::TEvWatchNotifyUpdated 2025-05-29T15:24:00.651049Z node 1 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:129: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-05-29T15:24:00.661292Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 72075186224037888 2025-05-29T15:24:00.661315Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1933: Complete execution for [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2025-05-29T15:24:00.661322Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1933: Complete execution for [0:281474976715657] at 72075186224037888 on unit FinishPropose 2025-05-29T15:24:00.661332Z node 1 :TX_DATASHARD TRACE: finish_propose_unit.cpp:167: Propose transaction complete txid 281474976715657 at tablet 72075186224037888 send to client, exec latency: 0 ms, propose latency: 0 ms, status: PREPARED 2025-05-29T15:24:00.661347Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:467: 72075186224037888 not sending time cast registration request in state WaitScheme 2025-05-29T15:24:00.804438Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3129: StateWork, received event# 269877761, Sender [1:699:2589], Recipient [1:664:2569]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-05-29T15:24:00.804459Z node 1 :TX_DATASHARD TRACE: datashard_impl. ... 7Z node 14 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1862: Execution status for [1000:281474976715657] at 72075186224037888 is DelayComplete 2025-05-29T15:24:16.537833Z node 14 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1910: Advance execution plan for [1000:281474976715657] at 72075186224037888 executing on unit CompleteOperation 2025-05-29T15:24:16.537837Z node 14 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1916: Add [1000:281474976715657] at 72075186224037888 to execution unit CompletedOperations 2025-05-29T15:24:16.537841Z node 14 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1827: Trying to execute [1000:281474976715657] at 72075186224037888 on unit CompletedOperations 2025-05-29T15:24:16.537847Z node 14 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1862: Execution status for [1000:281474976715657] at 72075186224037888 is Executed 2025-05-29T15:24:16.537851Z node 14 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1910: Advance execution plan for [1000:281474976715657] at 72075186224037888 executing on unit CompletedOperations 2025-05-29T15:24:16.537855Z node 14 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1922: Execution plan for [1000:281474976715657] at 72075186224037888 has finished 2025-05-29T15:24:16.537859Z node 14 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-05-29T15:24:16.537862Z node 14 :TX_DATASHARD TRACE: datashard_pipeline.cpp:327: Check candidate unit PlanQueue at 72075186224037888 2025-05-29T15:24:16.537866Z node 14 :TX_DATASHARD TRACE: plan_queue_unit.cpp:59: TPlanQueueUnit at 72075186224037888 has no attached operations 2025-05-29T15:24:16.537870Z node 14 :TX_DATASHARD TRACE: datashard_pipeline.cpp:341: Unit PlanQueue has no ready operations at 72075186224037888 2025-05-29T15:24:16.538248Z node 14 :TX_DATASHARD TRACE: datashard_impl.h:3129: StateWork, received event# 270270976, Sender [14:24:2071], Recipient [14:663:2568]: {TEvRegisterTabletResult TabletId# 72075186224037888 Entry# 0} 2025-05-29T15:24:16.538262Z node 14 :TX_DATASHARD TRACE: datashard_impl.h:3167: StateWork, processing event TEvMediatorTimecast::TEvRegisterTabletResult 2025-05-29T15:24:16.538267Z node 14 :TX_DATASHARD DEBUG: datashard.cpp:3742: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037888 time 0 2025-05-29T15:24:16.538273Z node 14 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-05-29T15:24:16.538470Z node 14 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:98: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 1000} 2025-05-29T15:24:16.538486Z node 14 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2025-05-29T15:24:16.538629Z node 14 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2025-05-29T15:24:16.538638Z node 14 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1933: Complete execution for [1000:281474976715657] at 72075186224037888 on unit CreateTable 2025-05-29T15:24:16.538644Z node 14 :TX_DATASHARD DEBUG: datashard.cpp:1255: Trying to activate change sender: at tablet: 72075186224037888 2025-05-29T15:24:16.538651Z node 14 :TX_DATASHARD INFO: datashard.cpp:1293: Change sender activated: at tablet: 72075186224037888 2025-05-29T15:24:16.538670Z node 14 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1933: Complete execution for [1000:281474976715657] at 72075186224037888 on unit CompleteOperation 2025-05-29T15:24:16.538686Z node 14 :TX_DATASHARD DEBUG: datashard.cpp:801: Complete [1000 : 281474976715657] from 72075186224037888 at tablet 72075186224037888 send result to client [14:410:2404], exec latency: 0 ms, propose latency: 0 ms 2025-05-29T15:24:16.538698Z node 14 :TX_DATASHARD INFO: datashard.cpp:1590: 72075186224037888 Sending notify to schemeshard 72057594046644480 txId 281474976715657 state Ready TxInFly 0 2025-05-29T15:24:16.538710Z node 14 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-05-29T15:24:16.539055Z node 14 :TX_DATASHARD TRACE: datashard_impl.h:3129: StateWork, received event# 269746185, Sender [14:683:2579], Recipient [14:663:2568]: NKikimr::TEvTxProxySchemeCache::TEvWatchNotifyUpdated 2025-05-29T15:24:16.539069Z node 14 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:129: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-05-29T15:24:16.539322Z node 14 :TX_DATASHARD TRACE: datashard_impl.h:3129: StateWork, received event# 269877760, Sender [14:706:2594], Recipient [14:663:2568]: NKikimr::TEvTabletPipe::TEvClientConnected { TabletId: 72057594046644480 Status: OK ServerId: [14:712:2600] Leader: 1 Dead: 0 Generation: 2 VersionInfo: } 2025-05-29T15:24:16.539331Z node 14 :TX_DATASHARD TRACE: datashard_impl.h:3162: StateWork, processing event TEvTabletPipe::TEvClientConnected 2025-05-29T15:24:16.539546Z node 14 :TX_DATASHARD TRACE: datashard_impl.h:3129: StateWork, received event# 269552132, Sender [14:410:2404], Recipient [14:663:2568]: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 281474976715657 2025-05-29T15:24:16.539553Z node 14 :TX_DATASHARD TRACE: datashard_impl.h:3133: StateWork, processing event TEvDataShard::TEvSchemaChangedResult 2025-05-29T15:24:16.539558Z node 14 :TX_DATASHARD DEBUG: datashard.cpp:2953: Handle TEvSchemaChangedResult 281474976715657 datashard 72075186224037888 state Ready 2025-05-29T15:24:16.539565Z node 14 :TX_DATASHARD DEBUG: datashard__schema_changed.cpp:22: 72075186224037888 Got TEvSchemaChangedResult from SS at 72075186224037888 2025-05-29T15:24:16.539636Z node 14 :TX_DATASHARD TRACE: datashard_impl.h:3129: StateWork, received event# 270270978, Sender [14:24:2071], Recipient [14:663:2568]: NKikimr::TEvMediatorTimecast::TEvSubscribeReadStepResult{ CoordinatorId# 72057594046316545 LastReadStep# 0 NextReadStep# 1000 ReadStep# 1000 } 2025-05-29T15:24:16.539642Z node 14 :TX_DATASHARD TRACE: datashard_impl.h:3168: StateWork, processing event TEvMediatorTimecast::TEvSubscribeReadStepResult 2025-05-29T15:24:16.539648Z node 14 :TX_DATASHARD DEBUG: datashard.cpp:3760: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037888 coordinator 72057594046316545 last step 0 next step 1000 2025-05-29T15:24:16.543360Z node 14 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [14:731:2613], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:24:16.543388Z node 14 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [14:742:2618], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:24:16.543400Z node 14 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:24:16.544594Z node 14 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2025-05-29T15:24:16.545993Z node 14 :TX_DATASHARD TRACE: datashard_impl.h:3129: StateWork, received event# 269746185, Sender [14:683:2579], Recipient [14:663:2568]: NKikimr::TEvTxProxySchemeCache::TEvWatchNotifyUpdated 2025-05-29T15:24:16.546016Z node 14 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:129: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-05-29T15:24:16.575109Z node 14 :TX_DATASHARD TRACE: datashard_impl.h:3129: StateWork, received event# 269877763, Sender [14:706:2594], Recipient [14:663:2568]: NKikimr::TEvTabletPipe::TEvClientDestroyed { TabletId: 72057594046644480 ClientId: [14:706:2594] ServerId: [14:712:2600] } 2025-05-29T15:24:16.575139Z node 14 :TX_DATASHARD TRACE: datashard_impl.h:3163: StateWork, processing event TEvTabletPipe::TEvClientDestroyed 2025-05-29T15:24:16.708509Z node 14 :TX_DATASHARD TRACE: datashard_impl.h:3129: StateWork, received event# 269746185, Sender [14:683:2579], Recipient [14:663:2568]: NKikimr::TEvTxProxySchemeCache::TEvWatchNotifyUpdated 2025-05-29T15:24:16.708539Z node 14 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:129: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-05-29T15:24:16.709168Z node 14 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [14:745:2621], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-05-29T15:24:16.741982Z node 14 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [14:815:2660] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-29T15:24:16.760466Z node 14 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [14:825:2669], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:24:16.761124Z node 14 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=14&id=NTg5MmQ5ZTItOTEwMjY2MzYtMzVkNWFiNTktOWY4MjM1ZWY=, ActorId: [14:729:2611], ActorState: ExecuteState, TraceId: 01jwea9qrz8mvb7a3ehjgjchev, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: assertion failed at ydb/core/tx/datashard/ut_common/datashard_ut_common.cpp:2091, void NKikimr::ExecSQL(Tests::TServer::TPtr, TActorId, const TString &, bool, Ydb::StatusIds::StatusCode): (response.GetYdbStatus() == code) failed: (INTERNAL_ERROR != SUCCESS) Response { QueryIssues { message: "Execution" issue_code: 1060 issues { message: "yql/essentials/ast/yql_expr.h:1874: index out of range" issue_code: 1 } } TxMeta { } } YdbStatus: INTERNAL_ERROR ConsumedRu: 1 , with diff: (INT|SUCC)E(RNAL_ERROR|SS) TBackTrace::Capture()+28 (0x13C95FEC) NUnitTest::NPrivate::RaiseError(char const*, TBasicString> const&, bool)+137 (0x13E49919) NKikimr::ExecSQL(TIntrusivePtr>, NActors::TActorId, TBasicString> const&, bool, Ydb::StatusIds_StatusCode)+1300 (0x264C9A24) ??+0 (0x13ADB793) ??+0 (0x13A2F9D1) NKikimr::NTestSuiteDataShardReadIterator::TTestCaseShouldCommitLocksWhenReadWriteInSeparateTransactions::Execute_(NUnitTest::TTestContext&)+34 (0x13A82302) NKikimr::NTestSuiteDataShardReadIterator::TCurrentTest::Execute()::'lambda'()::operator()() const+71 (0x13AD4527) NUnitTest::TTestBase::Run(std::__y1::function, TBasicString> const&, char const*, bool)+126 (0x13E4B7CE) NKikimr::NTestSuiteDataShardReadIterator::TCurrentTest::Execute()+433 (0x13AD3D81) NUnitTest::TTestFactory::Execute()+803 (0x13E4BF43) NUnitTest::RunMain(int, char**)+3021 (0x13E5DAED) ??+0 (0x7FCFC0962D90) __libc_start_main+128 (0x7FCFC0962E40) _start+41 (0x12A96029) ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/join/unittest >> KqpIndexLookupJoin::InnerJoinCustomColumnOrder-StreamLookup Test command err: Trying to start YDB, gRPC: 24172, MsgBus: 19732 2025-05-29T15:24:11.624324Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509888836698463175:2196];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:24:11.625079Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/00242f/r3tmp/tmp8oJSe2/pdisk_1.dat 2025-05-29T15:24:11.693238Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [1:7509888836698463018:2079] 1748532251623495 != 1748532251623498 2025-05-29T15:24:11.695903Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 24172, node 1 2025-05-29T15:24:11.707744Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:24:11.707755Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-05-29T15:24:11.707757Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-05-29T15:24:11.707797Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:19732 TClient is connected to server localhost:19732 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: 2025-05-29T15:24:11.767730Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:24:11.767759Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:24:11.769013Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-29T15:24:11.780528Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:24:11.797205Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:24:11.864671Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:24:11.927493Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:24:11.991577Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:24:12.024909Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509888840993431948:2403], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:24:12.024935Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:24:12.070074Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-05-29T15:24:12.079522Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-05-29T15:24:12.091028Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-05-29T15:24:12.147895Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-05-29T15:24:12.160950Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-05-29T15:24:12.176572Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-05-29T15:24:12.190939Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-05-29T15:24:12.207159Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509888840993432601:2466], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:24:12.207193Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:24:12.207310Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509888840993432606:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:24:12.208219Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710669:3, at schemeshard: 72057594046644480 2025-05-29T15:24:12.216839Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7509888840993432608:2470], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710669 completed, doublechecking } 2025-05-29T15:24:12.279416Z node 1 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [1:7509888840993432659:3396] txid# 281474976710670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-29T15:24:12.364600Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [1:7509888840993432675:2474], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:24:12.365829Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=1&id=MjE3MGY4MzUtNzIwMDlmZWEtOTg3ZDRkYTItNTk0MTgyMTY=, ActorId: [1:7509888840993431945:2401], ActorState: ExecuteState, TraceId: 01jwea9khed2pba4jgc8yaz5sv, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: VERIFY failed (2025-05-29T15:24:12.366412Z): assertion failed in non-unittest thread with message: assertion failed at ydb/core/kqp/ut/common/kqp_ut_common.h:375, void NKikimr::NKqp::AssertSuccessResult(const NYdb::TStatus &): (result.IsSuccess())
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 library/cpp/testing/unittest/registar.cpp:36 RaiseError(): requirement UnittestThread failed 0. /-S/util/system/yassert.cpp:83: InternalPanicImpl @ 0x13BCB6C5 1. /-S/util/system/yassert.cpp:55: Panic @ 0x13BC26C6 2. /tmp//-S/library/cpp/testing/unittest/registar.cpp:36: RaiseError @ 0x13D67F66 3. /-S/ydb/core/kqp/ut/common/kqp_ut_common.h:375: AssertSuccessResult @ 0x13A29E22 4. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:365: CreateSampleTables @ 0x26213602 5. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:581: operator() @ 0x2623494C 6. /-S/library/cpp/threading/future/core/future-inl.h:493: SetValue @ 0x2623494C 7. /-S/library/cpp/threading/future/async.h:24: operator() @ 0x2623494C 8. /-S/util/thread/pool.h:71: Process @ 0x2623494C 9. /-S/util/thread/pool.cpp:418: DoExecute @ 0x13BD3049 10. /-S/util/thread/factory.h:15: Execute @ 0x13BD1A39 11. /-S/util/thread/factory.cpp:36: ThreadProc @ 0x13BD1A39 12. /-S/util/system/thread.cpp:244: ThreadProxy @ 0x13BCCEAC 13. ??:0: ?? @ 0x7F071FEB1AC2 14. ??:0: ?? @ 0x7F071FF4384F Trying to start YDB, gRPC: 9975, MsgBus: 8183 2025-05-29T15:24:16.666211Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509888858878968620:2201];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:24:16.666917Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/00242f/r3tmp/tmpo4pgll/pdisk_1.dat 2025-05-29T15:24:16.739609Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [1:7509888858878968458:2079] 1748532256665396 != 1748532256665399 2025-05-29T15:24:16.741380Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 9975, node 1 2025-05-29T15:24:16.756528Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:24:16.756541Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-05-29T15:24:16.756543Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-05-29T15:24:16.756587Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:8183 TClient is connected to server localhost:2025-05-29T15:24:16.807190Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:24:16.807225Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 8183 WaitRootIsUp 'Root'... TClient::Ls request: Root 2025-05-29T15:24:16.808149Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-29T15:24:16.836738Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:24:16.839484Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-05-29T15:24:16.851298Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:24:16.877563Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-05-29T15:24:16.939568Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 2025-05-29T15:24:16.960786Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:24:17.082622Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509888863173937400:2403], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:24:17.082654Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:24:17.137673Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-05-29T15:24:17.146897Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-05-29T15:24:17.203366Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-05-29T15:24:17.214987Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-05-29T15:24:17.229366Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-05-29T15:24:17.243219Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-05-29T15:24:17.257327Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-05-29T15:24:17.275543Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509888863173938055:2466], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:24:17.275567Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:24:17.275650Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509888863173938060:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:24:17.276533Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715669:3, at schemeshard: 72057594046644480 2025-05-29T15:24:17.284425Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7509888863173938062:2470], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715669 completed, doublechecking } 2025-05-29T15:24:17.356112Z node 1 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [1:7509888863173938113:3397] txid# 281474976715670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-29T15:24:17.455251Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [1:7509888863173938122:2474], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:24:17.456592Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=1&id=ODM5ZjNmZjktNzU4ZGZmOTgtNGI3MjJiMy0zYTlmNTEwYw==, ActorId: [1:7509888863173937397:2401], ActorState: ExecuteState, TraceId: 01jwea9rfv826qvv4c7nz0zj9b, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: VERIFY failed (2025-05-29T15:24:17.458130Z): assertion failed in non-unittest thread with message: assertion failed at ydb/core/kqp/ut/common/kqp_ut_common.h:375, void NKikimr::NKqp::AssertSuccessResult(const NYdb::TStatus &): (result.IsSuccess())
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 library/cpp/testing/unittest/registar.cpp:36 RaiseError(): requirement UnittestThread failed 0. /-S/util/system/yassert.cpp:83: InternalPanicImpl @ 0x13BCB6C5 1. /-S/util/system/yassert.cpp:55: Panic @ 0x13BC26C6 2. /tmp//-S/library/cpp/testing/unittest/registar.cpp:36: RaiseError @ 0x13D67F66 3. /-S/ydb/core/kqp/ut/common/kqp_ut_common.h:375: AssertSuccessResult @ 0x13A29E22 4. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:365: CreateSampleTables @ 0x26213602 5. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:581: operator() @ 0x2623494C 6. /-S/library/cpp/threading/future/core/future-inl.h:493: SetValue @ 0x2623494C 7. /-S/library/cpp/threading/future/async.h:24: operator() @ 0x2623494C 8. /-S/util/thread/pool.h:71: Process @ 0x2623494C 9. /-S/util/thread/pool.cpp:418: DoExecute @ 0x13BD3049 10. /-S/util/thread/factory.h:15: Execute @ 0x13BD1A39 11. /-S/util/thread/factory.cpp:36: ThreadProc @ 0x13BD1A39 12. /-S/util/system/thread.cpp:244: ThreadProxy @ 0x13BCCEAC 13. ??:0: ?? @ 0x7FDB6E7B8AC2 14. ??:0: ?? @ 0x7FDB6E84A84F ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/nodewarden/ut/unittest >> TBlobStorageWardenTest::TestLimitedKeylessGroupThenNoMonitoring [GOOD] Test command err: 2025-05-29T15:24:21.559220Z node 1 :BS_NODE DEBUG: {NW26@node_warden_impl.cpp:321} Bootstrap 2025-05-29T15:24:21.560021Z node 1 :BS_NODE DEBUG: {NW18@node_warden_resource.cpp:51} ApplyServiceSet IsStatic# true Comprehensive# false Origin# initial ServiceSet# {PDisks { NodeID: 1 PDiskID: 0 Path: "SectorMap:/home/runner/.ya/build/build_root/ciyv/001940/r3tmp/tmpIehdSA/pdisk_map" PDiskGuid: 1 } VDisks { VDiskID { GroupID: 33554432 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } VDiskLocation { NodeID: 1 PDiskID: 0 VDiskSlotID: 0 PDiskGuid: 1 } } VDisks { VDiskID { GroupID: 33554432 GroupGeneration: 1 Ring: 0 Domain: 1 VDisk: 0 } VDiskLocation { NodeID: 1 PDiskID: 0 VDiskSlotID: 1 PDiskGuid: 1 } } VDisks { VDiskID { GroupID: 33554432 GroupGeneration: 1 Ring: 0 Domain: 2 VDisk: 0 } VDiskLocation { NodeID: 1 PDiskID: 0 VDiskSlotID: 2 PDiskGuid: 1 } } VDisks { VDiskID { GroupID: 33554432 GroupGeneration: 1 Ring: 0 Domain: 3 VDisk: 0 } VDiskLocation { NodeID: 1 PDiskID: 0 VDiskSlotID: 3 PDiskGuid: 1 } } Groups { GroupID: 33554432 GroupGeneration: 1 ErasureSpecies: 1 Rings { FailDomains { VDiskLocations { NodeID: 1 PDiskID: 0 VDiskSlotID: 0 PDiskGuid: 1 } } FailDomains { VDiskLocations { NodeID: 1 PDiskID: 0 VDiskSlotID: 1 PDiskGuid: 1 } } FailDomains { VDiskLocations { NodeID: 1 PDiskID: 0 VDiskSlotID: 2 PDiskGuid: 1 } } FailDomains { VDiskLocations { NodeID: 1 PDiskID: 0 VDiskSlotID: 3 PDiskGuid: 1 } } } } AvailabilityDomains: 1 } 2025-05-29T15:24:21.560104Z node 1 :BS_NODE DEBUG: {NW04@node_warden_pdisk.cpp:196} StartLocalPDisk NodeId# 1 PDiskId# 0 Path# "SectorMap:/home/runner/.ya/build/build_root/ciyv/001940/r3tmp/tmpIehdSA/pdisk_map" PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} Temporary# false 2025-05-29T15:24:21.560482Z node 1 :BS_NODE WARN: {NW89@node_warden_pdisk.cpp:106} Can't write new MockDevicesConfig to file Path# /Berkanavt/kikimr/testing/mock_devices.txt 2025-05-29T15:24:21.560574Z node 1 :BS_NODE DEBUG: {NW23@node_warden_vdisk.cpp:67} StartLocalVDiskActor SlayInFlight# false VDiskId# [2000000:1:0:0:0] VSlotId# 1:0:0 PDiskGuid# 1 DonorMode# false PDiskRestartInFlight# false PDisksWaitingToStart# false 2025-05-29T15:24:21.560827Z node 1 :BS_NODE DEBUG: {NW24@node_warden_vdisk.cpp:265} StartLocalVDiskActor done VDiskId# [2000000:1:0:0:0] VSlotId# 1:0:0 PDiskGuid# 1 2025-05-29T15:24:21.560847Z node 1 :BS_NODE DEBUG: {NW23@node_warden_vdisk.cpp:67} StartLocalVDiskActor SlayInFlight# false VDiskId# [2000000:1:0:1:0] VSlotId# 1:0:1 PDiskGuid# 1 DonorMode# false PDiskRestartInFlight# false PDisksWaitingToStart# false 2025-05-29T15:24:21.560961Z node 1 :BS_NODE DEBUG: {NW24@node_warden_vdisk.cpp:265} StartLocalVDiskActor done VDiskId# [2000000:1:0:1:0] VSlotId# 1:0:1 PDiskGuid# 1 2025-05-29T15:24:21.560970Z node 1 :BS_NODE DEBUG: {NW23@node_warden_vdisk.cpp:67} StartLocalVDiskActor SlayInFlight# false VDiskId# [2000000:1:0:2:0] VSlotId# 1:0:2 PDiskGuid# 1 DonorMode# false PDiskRestartInFlight# false PDisksWaitingToStart# false 2025-05-29T15:24:21.561087Z node 1 :BS_NODE DEBUG: {NW24@node_warden_vdisk.cpp:265} StartLocalVDiskActor done VDiskId# [2000000:1:0:2:0] VSlotId# 1:0:2 PDiskGuid# 1 2025-05-29T15:24:21.561098Z node 1 :BS_NODE DEBUG: {NW23@node_warden_vdisk.cpp:67} StartLocalVDiskActor SlayInFlight# false VDiskId# [2000000:1:0:3:0] VSlotId# 1:0:3 PDiskGuid# 1 DonorMode# false PDiskRestartInFlight# false PDisksWaitingToStart# false 2025-05-29T15:24:21.561193Z node 1 :BS_NODE DEBUG: {NW24@node_warden_vdisk.cpp:265} StartLocalVDiskActor done VDiskId# [2000000:1:0:3:0] VSlotId# 1:0:3 PDiskGuid# 1 2025-05-29T15:24:21.561202Z node 1 :BS_NODE DEBUG: {NW12@node_warden_proxy.cpp:23} StartLocalProxy GroupId# 33554432 2025-05-29T15:24:21.561376Z node 1 :BS_NODE DEBUG: {NW21@node_warden_pipe.cpp:23} EstablishPipe AvailDomainId# 1 PipeClientId# [1:47:2076] ControllerId# 72057594037932033 2025-05-29T15:24:21.561381Z node 1 :BS_NODE DEBUG: {NW20@node_warden_pipe.cpp:72} SendRegisterNode 2025-05-29T15:24:21.561415Z node 1 :BS_NODE DEBUG: {NW11@node_warden_impl.cpp:296} StartInvalidGroupProxy GroupId# 4294967295 2025-05-29T15:24:21.561435Z node 1 :BS_NODE DEBUG: {NW62@node_warden_impl.cpp:308} StartRequestReportingThrottler 2025-05-29T15:24:21.565552Z node 1 :BS_NODE DEBUG: {NWDC00@distconf.cpp:20} Bootstrap 2025-05-29T15:24:21.565826Z node 2 :BS_NODE DEBUG: {NW26@node_warden_impl.cpp:321} Bootstrap 2025-05-29T15:24:21.566423Z node 2 :BS_NODE DEBUG: {NW18@node_warden_resource.cpp:51} ApplyServiceSet IsStatic# true Comprehensive# false Origin# initial ServiceSet# {PDisks { NodeID: 1 PDiskID: 0 Path: "pdisk0.dat" PDiskGuid: 1 } VDisks { VDiskID { GroupID: 33554432 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } VDiskLocation { NodeID: 1 PDiskID: 0 VDiskSlotID: 0 PDiskGuid: 1 } } VDisks { VDiskID { GroupID: 33554432 GroupGeneration: 1 Ring: 0 Domain: 1 VDisk: 0 } VDiskLocation { NodeID: 1 PDiskID: 0 VDiskSlotID: 1 PDiskGuid: 1 } } VDisks { VDiskID { GroupID: 33554432 GroupGeneration: 1 Ring: 0 Domain: 2 VDisk: 0 } VDiskLocation { NodeID: 1 PDiskID: 0 VDiskSlotID: 2 PDiskGuid: 1 } } VDisks { VDiskID { GroupID: 33554432 GroupGeneration: 1 Ring: 0 Domain: 3 VDisk: 0 } VDiskLocation { NodeID: 1 PDiskID: 0 VDiskSlotID: 3 PDiskGuid: 1 } } Groups { GroupID: 33554432 GroupGeneration: 1 ErasureSpecies: 1 Rings { FailDomains { VDiskLocations { NodeID: 1 PDiskID: 0 VDiskSlotID: 0 PDiskGuid: 1 } } FailDomains { VDiskLocations { NodeID: 1 PDiskID: 0 VDiskSlotID: 1 PDiskGuid: 1 } } FailDomains { VDiskLocations { NodeID: 1 PDiskID: 0 VDiskSlotID: 2 PDiskGuid: 1 } } FailDomains { VDiskLocations { NodeID: 1 PDiskID: 0 VDiskSlotID: 3 PDiskGuid: 1 } } } } AvailabilityDomains: 1 } 2025-05-29T15:24:21.566456Z node 2 :BS_NODE DEBUG: {NW12@node_warden_proxy.cpp:23} StartLocalProxy GroupId# 33554432 2025-05-29T15:24:21.566637Z node 2 :BS_NODE DEBUG: {NW21@node_warden_pipe.cpp:23} EstablishPipe AvailDomainId# 1 PipeClientId# [2:95:2070] ControllerId# 72057594037932033 2025-05-29T15:24:21.566644Z node 2 :BS_NODE DEBUG: {NW20@node_warden_pipe.cpp:72} SendRegisterNode 2025-05-29T15:24:21.566659Z node 2 :BS_NODE DEBUG: {NW11@node_warden_impl.cpp:296} StartInvalidGroupProxy GroupId# 4294967295 2025-05-29T15:24:21.566676Z node 2 :BS_NODE DEBUG: {NW62@node_warden_impl.cpp:308} StartRequestReportingThrottler 2025-05-29T15:24:21.567900Z node 2 :BS_NODE DEBUG: {NWDC00@distconf.cpp:20} Bootstrap 2025-05-29T15:24:21.568031Z node 1 :BS_NODE DEBUG: {NWDC40@distconf_persistent_storage.cpp:25} TReaderActor bootstrap Paths# [] 2025-05-29T15:24:21.568196Z node 2 :BS_NODE DEBUG: {NWDC40@distconf_persistent_storage.cpp:25} TReaderActor bootstrap Paths# [] 2025-05-29T15:24:21.574838Z node 1 :BS_NODE DEBUG: {NWDC53@distconf.cpp:300} StateWaitForInit event Type# 131082 StorageConfigLoaded# false NodeListObtained# false PendingEvents.size# 0 2025-05-29T15:24:21.574861Z node 1 :BS_NODE DEBUG: {NWDC11@distconf_binding.cpp:6} TEvNodesInfo 2025-05-29T15:24:21.574982Z node 2 :BS_NODE DEBUG: {NWDC53@distconf.cpp:300} StateWaitForInit event Type# 131082 StorageConfigLoaded# false NodeListObtained# false PendingEvents.size# 0 2025-05-29T15:24:21.574989Z node 2 :BS_NODE DEBUG: {NWDC11@distconf_binding.cpp:6} TEvNodesInfo 2025-05-29T15:24:21.575784Z node 1 :BS_NODE DEBUG: {NWDC53@distconf.cpp:300} StateWaitForInit event Type# 2146435074 StorageConfigLoaded# false NodeListObtained# false PendingEvents.size# 0 2025-05-29T15:24:21.575794Z node 1 :BS_NODE DEBUG: {NWDC32@distconf_persistent_storage.cpp:221} TEvStorageConfigLoaded Cookie# 0 NumItemsRead# 0 2025-05-29T15:24:21.576437Z node 1 :BS_NODE DEBUG: {NWDC35@distconf_persistent_storage.cpp:184} PersistConfig Record# {} Drives# [] 2025-05-29T15:24:21.576521Z node 2 :BS_NODE DEBUG: {NWDC53@distconf.cpp:300} StateWaitForInit event Type# 2146435074 StorageConfigLoaded# false NodeListObtained# false PendingEvents.size# 0 2025-05-29T15:24:21.576527Z node 2 :BS_NODE DEBUG: {NWDC32@distconf_persistent_storage.cpp:221} TEvStorageConfigLoaded Cookie# 0 NumItemsRead# 0 2025-05-29T15:24:21.576540Z node 2 :BS_NODE DEBUG: {NWDC35@distconf_persistent_storage.cpp:184} PersistConfig Record# {} Drives# [] 2025-05-29T15:24:21.576570Z node 1 :BS_NODE DEBUG: {NWDC51@distconf_persistent_storage.cpp:103} TWriterActor bootstrap Drives# [] Record# {} 2025-05-29T15:24:21.576576Z node 2 :BS_NODE DEBUG: {NWDC51@distconf_persistent_storage.cpp:103} TWriterActor bootstrap Drives# [] Record# {} 2025-05-29T15:24:21.576848Z node 1 :BS_NODE DEBUG: {NWDC53@distconf.cpp:300} StateWaitForInit event Type# 2146435075 StorageConfigLoaded# true NodeListObtained# false PendingEvents.size# 0 2025-05-29T15:24:21.576938Z node 2 :BS_NODE DEBUG: {NW18@node_warden_resource.cpp:51} ApplyServiceSet IsStatic# true Comprehensive# true Origin# distconf ServiceSet# {PDisks { NodeID: 1 PDiskID: 0 Path: "pdisk0.dat" PDiskGuid: 1 } VDisks { VDiskID { GroupID: 33554432 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } VDiskLocation { NodeID: 1 PDiskID: 0 VDiskSlotID: 0 PDiskGuid: 1 } } VDisks { VDiskID { GroupID: 33554432 GroupGeneration: 1 Ring: 0 Domain: 1 VDisk: 0 } VDiskLocation { NodeID: 1 PDiskID: 0 VDiskSlotID: 1 PDiskGuid: 1 } } VDisks { VDiskID { GroupID: 33554432 GroupGeneration: 1 Ring: 0 Domain: 2 VDisk: 0 } VDiskLocation { NodeID: 1 PDiskID: 0 VDiskSlotID: 2 PDiskGuid: 1 } } VDisks { VDiskID { GroupID: 33554432 GroupGeneration: 1 Ring: 0 Domain: 3 VDisk: 0 } VDiskLocation { NodeID: 1 PDiskID: 0 VDiskSlotID: 3 PDiskGuid: 1 } } Groups { GroupID: 33554432 GroupGeneration: 1 ErasureSpecies: 1 Rings { FailDomains { VDiskLocations { NodeID: 1 PDiskID: 0 VDiskSlotID: 0 PDiskGuid: 1 } } FailDomains { VDiskLocations { NodeID: 1 PDiskID: 0 VDiskSlotID: 1 PDiskGuid: 1 } } FailDomains { VDiskLocations { NodeID: 1 PDiskID: 0 VDiskSlotID: 2 PDiskGuid: 1 } } FailDomains { VDiskLocations { NodeID: 1 PDiskID: 0 VDiskSlotID: 3 PDiskGuid: 1 } } } } AvailabilityDomains: 1 } 2025-05-29T15:24:21.576974Z node 2 :BS_NODE DEBUG: {NWDC53@distconf.cpp:300} StateWaitForInit event Type# 2146435075 StorageConfigLoaded# true NodeListObtained# false PendingEvents.size# 0 2025-05-29T15:24:21.577036Z node 2 :BS_NODE DEBUG: {NWDC53@distconf.cpp:300} StateWaitForInit event Type# 268639248 StorageConfigLoaded# true NodeListObtained# false PendingEvents.size# 1 2025-05-29T15:24:21.577098Z node 1 :BS_NODE DEBUG: {NWDC53@distconf.cpp:300} StateWaitForInit event Type# 131082 StorageConfigLoaded# true NodeListObtained# false PendingEvents.size# 1 2025-05-29T15:24:21.577102Z node 1 :BS_NODE DEBUG: {NWDC11@distconf_binding.cpp:6} TEvNodesInfo 2025-05-29T15:24:21.577115Z node 1 :BS_NODE DEBUG: {NWDC18@distconf_binding.cpp:322} UpdateBound RefererNodeId# 1 NodeId# ::1:12001/1 Meta# {Fingerprint: "DS\"\t\367u.n$\276\205j\r\267\033\007\201\236-`" } 2025-05-29T15:24:21.577129Z node 1 :BS_NODE DEBUG: {NWDC54@distconf_binding.cpp:237} SubscribeToPeerNode NodeId# 2 SessionId# [0:0:0] Inserted# true Subscription# {SessionId# [0:0:0] SubscriptionCookie# 0} NextSubscribeCookie# 1 2025-05-29T15:24:21.577137Z node 1 :BS_NODE DEBUG: {NWDC29@distconf_binding.cpp:80} Initiated bind NodeId# 2 Binding# {2.0/15902121711099483241@[0:0:0]} SessionId# [0:0:0] 2025-05-29T15:24:21.577182Z node 1 :BS_NODE DEBUG: {NWDC53@distconf.cpp:300} StateWaitForInit event Type# 2146435072 StorageConfigLoaded# true NodeListObtained# true PendingEvents.size# 1 2025-05-29T15:24:21.577187Z node 1 :BS_NODE DEBUG: {NWDC15@distconf.cpp:361} StateFunc Type# 2146435075 Sender# [1:140:2121] SessionId# [0:0:0] Cookie# 0 2025-05-29T15:24:21.577191Z node 1 :BS_NODE DEBUG: {NWDC36@distconf_persistent_storage.cpp:205} TEvStorageConfigStored NumOk# 0 NumError# 0 Passed# 0.001371s 2025-05-29T15:24:21.578463Z node 2 :BS_NODE DEBUG: {NWDC53@distconf.cpp:300} StateWaitForInit event Type# 131082 StorageConfigLoaded# true NodeListObtained# false PendingEvents.size# 2 2025-05-29T15:24:21.578472Z node 2 :BS_NODE DEBUG: {NWDC11@distconf_binding.cpp:6} TEvNodesInfo 2025-05-29T15:24:21.578490Z node 2 :BS_NODE DEBUG: {NWDC18@distconf_binding.c ... {NodeID: 2 GroupIDs: 2181038082 } 2025-05-29T15:24:21.915622Z node 2 :BS_NODE DEBUG: {NW17@node_warden_impl.cpp:800} Handle(TEvBlobStorage::TEvControllerNodeServiceSetUpdate) Msg# {Status: OK NodeID: 2 ServiceSet { Groups { GroupID: 2181038082 GroupGeneration: 1 ErasureSpecies: 0 Rings { FailDomains { VDiskLocations { NodeID: 1 PDiskID: 1000 VDiskSlotID: 1002 PDiskGuid: 15628991030950777129 } } } EncryptionMode: 1 LifeCyclePhase: 3 MainKeyId: "/home/runner/.ya/build/build_root/ciyv/001940/r3tmp/tmpIehdSA//key.txt" EncryptedGroupKey: "\350\351\251\264\322\306\262\235\221\251\204\206\276\010\2345p\'\324\357\214m\301x\315}*\365h\367\314\247-\272XY" GroupKeyNonce: 2181038082 MainKeyVersion: 1 StoragePoolName: "test_storage" DeviceType: ROT } } } 2025-05-29T15:24:21.915642Z node 2 :BS_NODE DEBUG: {NW18@node_warden_resource.cpp:51} ApplyServiceSet IsStatic# false Comprehensive# false Origin# controller ServiceSet# {Groups { GroupID: 2181038082 GroupGeneration: 1 ErasureSpecies: 0 Rings { FailDomains { VDiskLocations { NodeID: 1 PDiskID: 1000 VDiskSlotID: 1002 PDiskGuid: 15628991030950777129 } } } EncryptionMode: 1 LifeCyclePhase: 3 MainKeyId: "/home/runner/.ya/build/build_root/ciyv/001940/r3tmp/tmpIehdSA//key.txt" EncryptedGroupKey: "\350\351\251\264\322\306\262\235\221\251\204\206\276\010\2345p\'\324\357\214m\301x\315}*\365h\367\314\247-\272XY" GroupKeyNonce: 2181038082 MainKeyVersion: 1 StoragePoolName: "test_storage" DeviceType: ROT } } 2025-05-29T15:24:21.915675Z node 2 :BS_NODE ERROR: {NW19@node_warden_group.cpp:211} error while parsing group GroupId# 2181038082 Err# LifeCyclePhase# KEY_NOT_LOADED Key.Id# "" Key.Version# 0 MainKey.Id# "/home/runner/.ya/build/build_root/ciyv/001940/r3tmp/tmpIehdSA//key.txt" MainKey.Version# 1 GroupKeyNonce# 2181038082 2025-05-29T15:24:21.915887Z node 2 :BS_NODE INFO: {NW81@node_warden_group_resolver.cpp:270} TGroupResolverActor::PassAway GroupId# 2181038082 2025-05-29T15:24:21.915900Z node 2 :BS_PROXY INFO: dsproxy_state.cpp:146: Group# 2181038082 TEvConfigureProxy received GroupGeneration# 1 IsLimitedKeyless# true Marker# DSP02 2025-05-29T15:24:21.915905Z node 2 :BS_PROXY NOTICE: dsproxy_state.cpp:294: EnsureMonitoring Group# 2181038082 IsLimitedKeyless# 1 fullIfPossible# 0 Marker# DSP58 2025-05-29T15:24:21.916240Z node 2 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 2181038082 Actor# [2:604:2105] Create Queue# [2:606:2106] targetNodeId# 1 Marker# DSP01 2025-05-29T15:24:21.916265Z node 2 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 2181038082 Actor# [2:604:2105] Create Queue# [2:607:2107] targetNodeId# 1 Marker# DSP01 2025-05-29T15:24:21.916287Z node 2 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 2181038082 Actor# [2:604:2105] Create Queue# [2:608:2108] targetNodeId# 1 Marker# DSP01 2025-05-29T15:24:21.916309Z node 2 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 2181038082 Actor# [2:604:2105] Create Queue# [2:609:2109] targetNodeId# 1 Marker# DSP01 2025-05-29T15:24:21.916332Z node 2 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 2181038082 Actor# [2:604:2105] Create Queue# [2:610:2110] targetNodeId# 1 Marker# DSP01 2025-05-29T15:24:21.916354Z node 2 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 2181038082 Actor# [2:604:2105] Create Queue# [2:611:2111] targetNodeId# 1 Marker# DSP01 2025-05-29T15:24:21.916376Z node 2 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 2181038082 Actor# [2:604:2105] Create Queue# [2:612:2112] targetNodeId# 1 Marker# DSP01 2025-05-29T15:24:21.916380Z node 2 :BS_PROXY INFO: dsproxy_state.cpp:29: Group# 2181038082 SetStateEstablishingSessions Marker# DSP03 2025-05-29T15:24:21.916637Z node 2 :BS_PROXY DEBUG: dsproxy_state.cpp:209: Group# 2181038082 Handle TEvProxyQueueState# {VDiskId# [82000002:1:0:0:0] QueueId# 1 IsConnected# true ExtraBlockChecksSupport# true CostModel# {SeekTimeUs# 8000 ReadSpeedBps# 127000000 WriteSpeedBps# 127000000 ReadBlockSize# 524288 WriteBlockSize# 524288 MinHugeBlobInBytes# 524257 GType# none}} Duration# 0.000000s Marker# DSP04 2025-05-29T15:24:21.916692Z node 2 :BS_PROXY DEBUG: dsproxy_state.cpp:209: Group# 2181038082 Handle TEvProxyQueueState# {VDiskId# [82000002:1:0:0:0] QueueId# 2 IsConnected# true ExtraBlockChecksSupport# true CostModel# {SeekTimeUs# 8000 ReadSpeedBps# 127000000 WriteSpeedBps# 127000000 ReadBlockSize# 524288 WriteBlockSize# 524288 MinHugeBlobInBytes# 524257 GType# none}} Duration# 0.000000s Marker# DSP04 2025-05-29T15:24:21.916704Z node 2 :BS_PROXY DEBUG: dsproxy_state.cpp:209: Group# 2181038082 Handle TEvProxyQueueState# {VDiskId# [82000002:1:0:0:0] QueueId# 3 IsConnected# true ExtraBlockChecksSupport# true CostModel# {SeekTimeUs# 8000 ReadSpeedBps# 127000000 WriteSpeedBps# 127000000 ReadBlockSize# 524288 WriteBlockSize# 524288 MinHugeBlobInBytes# 524257 GType# none}} Duration# 0.000000s Marker# DSP04 2025-05-29T15:24:21.916746Z node 2 :BS_PROXY DEBUG: dsproxy_state.cpp:209: Group# 2181038082 Handle TEvProxyQueueState# {VDiskId# [82000002:1:0:0:0] QueueId# 4 IsConnected# true ExtraBlockChecksSupport# true CostModel# {SeekTimeUs# 8000 ReadSpeedBps# 127000000 WriteSpeedBps# 127000000 ReadBlockSize# 524288 WriteBlockSize# 524288 MinHugeBlobInBytes# 524257 GType# none}} Duration# 0.000000s Marker# DSP04 2025-05-29T15:24:21.916759Z node 2 :BS_PROXY DEBUG: dsproxy_state.cpp:209: Group# 2181038082 Handle TEvProxyQueueState# {VDiskId# [82000002:1:0:0:0] QueueId# 5 IsConnected# true ExtraBlockChecksSupport# true CostModel# {SeekTimeUs# 8000 ReadSpeedBps# 127000000 WriteSpeedBps# 127000000 ReadBlockSize# 524288 WriteBlockSize# 524288 MinHugeBlobInBytes# 524257 GType# none}} Duration# 0.000000s Marker# DSP04 2025-05-29T15:24:21.916768Z node 2 :BS_PROXY DEBUG: dsproxy_state.cpp:209: Group# 2181038082 Handle TEvProxyQueueState# {VDiskId# [82000002:1:0:0:0] QueueId# 6 IsConnected# true ExtraBlockChecksSupport# true CostModel# {SeekTimeUs# 8000 ReadSpeedBps# 127000000 WriteSpeedBps# 127000000 ReadBlockSize# 524288 WriteBlockSize# 524288 MinHugeBlobInBytes# 524257 GType# none}} Duration# 0.000000s Marker# DSP04 2025-05-29T15:24:21.916778Z node 2 :BS_PROXY DEBUG: dsproxy_state.cpp:209: Group# 2181038082 Handle TEvProxyQueueState# {VDiskId# [82000002:1:0:0:0] QueueId# 7 IsConnected# true ExtraBlockChecksSupport# true CostModel# {SeekTimeUs# 8000 ReadSpeedBps# 127000000 WriteSpeedBps# 127000000 ReadBlockSize# 524288 WriteBlockSize# 524288 MinHugeBlobInBytes# 524257 GType# none}} Duration# 0.000000s Marker# DSP04 2025-05-29T15:24:21.916783Z node 2 :BS_PROXY INFO: dsproxy_state.cpp:183: Group# 2181038082 -> StateWork Marker# DSP11 2025-05-29T15:24:21.916788Z node 2 :BS_PROXY INFO: dsproxy_state.cpp:78: Group# 2181038082 SetStateWork Marker# DSP15 2025-05-29T15:24:21.916814Z node 2 :BS_PROXY_BLOCK DEBUG: dsproxy_block.cpp:150: [efc53170c63234c6] bootstrap ActorId# [2:613:2113] Group# 2181038082 TabletId# 1234 Generation# 3 Deadline# 586524-01-19T08:01:49.551615Z RestartCounter# 0 Marker# DSPB05 2025-05-29T15:24:21.916821Z node 2 :BS_PROXY_BLOCK DEBUG: dsproxy_block.cpp:111: [efc53170c63234c6] Sending TEvVBlock Tablet# 1234 Generation# 3 vdiskId# [82000002:1:0:0:0] node# 1 Marker# DSPB03 2025-05-29T15:24:21.916861Z node 2 :BS_PROXY DEBUG: group_sessions.h:165: Send to queueActorId# [2:606:2106] NKikimr::TEvBlobStorage::TEvVBlock# NKikimrBlobStorage.TEvVBlock TabletId: 1234 Generation: 3 VDiskID { GroupID: 2181038082 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } IssuerGuid: 13399503064704693959 MsgQoS { ExtQueueId: PutTabletLog } cookie# 0 2025-05-29T15:24:21.918172Z node 2 :BS_PROXY_BLOCK DEBUG: dsproxy_block.cpp:43: [efc53170c63234c6] Handle TEvVBlockResult status# OK From# [82000002:1:0:0:0] NodeId# 1 Marker# DSPB01 2025-05-29T15:24:21.918199Z node 2 :BS_PROXY_BLOCK DEBUG: dsproxy_block.cpp:100: [efc53170c63234c6] Result# TEvBlockResult {Status# OK} Marker# DSPB04 Sending TEvPut 2025-05-29T15:24:21.918298Z node 2 :BS_PROXY INFO: dsproxy_impl.h:309: Group# 2181038082 HandleError ev# TEvPut {Id# [1234:3:0:0:0:10:0] Size# 10 Deadline# 18446744073709551 HandleClass# TabletLog Tactic# Default} Response# TEvPutResult {Id# [1234:3:0:0:0:10:0] Status# ERROR StatusFlags# { } ErrorReason# "Created as LIMITED without keys. It happens when tenant keys are missing on the node." ApproximateFreeSpaceShare# 0} Marker# DSP31 Sending TEvPut 2025-05-29T15:24:21.918331Z node 2 :BS_PROXY DEBUG: dsproxy_impl.h:309: Group# 2181038082 HandleError ev# TEvPut {Id# [1234:4:0:0:0:10:0] Size# 10 Deadline# 18446744073709551 HandleClass# TabletLog Tactic# Default} Response# TEvPutResult {Id# [1234:4:0:0:0:10:0] Status# ERROR StatusFlags# { } ErrorReason# "Created as LIMITED without keys. It happens when tenant keys are missing on the node." ApproximateFreeSpaceShare# 0} Marker# DSP31 Sending TEvPut 2025-05-29T15:24:21.918412Z node 1 :BS_PROXY_PUT INFO: dsproxy_put.cpp:645: [c85e1a21dcb31b54] bootstrap ActorId# [1:614:2513] Group# 2181038082 BlobCount# 1 BlobIDs# [[1234:2:0:0:0:11:0]] HandleClass# TabletLog Tactic# Default RestartCounter# 0 Marker# BPP13 2025-05-29T15:24:21.918460Z node 1 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:42: [c85e1a21dcb31b54] Id# [1234:2:0:0:0:11:0] restore disk# 0 part# 0 situation# ESituation::Unknown Marker# BPG51 2025-05-29T15:24:21.918468Z node 1 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:65: [c85e1a21dcb31b54] restore Id# [1234:2:0:0:0:11:0] optimisticReplicas# 1 optimisticState# EBS_FULL Marker# BPG55 2025-05-29T15:24:21.918478Z node 1 :BS_PROXY_PUT DEBUG: dsproxy_strategy_base.cpp:324: [c85e1a21dcb31b54] partPlacement record partSituation# ESituation::Unknown to# 0 blob Id# [1234:2:0:0:0:11:1] Marker# BPG33 2025-05-29T15:24:21.918484Z node 1 :BS_PROXY_PUT DEBUG: dsproxy_strategy_base.cpp:345: [c85e1a21dcb31b54] Sending missing VPut part# 0 to# 0 blob Id# [1234:2:0:0:0:11:1] Marker# BPG32 2025-05-29T15:24:21.918510Z node 1 :BS_PROXY DEBUG: group_sessions.h:165: Send to queueActorId# [1:593:2503] NKikimr::TEvBlobStorage::TEvVPut# {ID# [1234:2:0:0:0:11:1] FDS# 11 HandleClass# TabletLog {MsgQoS ExtQueueId# PutTabletLog} DataSize# 0 Data# } cookie# 0 2025-05-29T15:24:21.918569Z node 1 :BS_VDISK_PUT ERROR: blobstorage_skeleton.cpp:568: PDiskId# 1000 VDISK[82000002:_:0:0:0]: (2181038082) TEvVPut: failed to pass the Hull check; id# [1234:2:0:0:0:11:1] status# {Status# BLOCKED} Marker# BSVS03 2025-05-29T15:24:21.918627Z node 1 :BS_PROXY_PUT INFO: dsproxy_put.cpp:260: [c85e1a21dcb31b54] received {EvVPutResult Status# BLOCKED ErrorReason# "blocked" ID# [1234:2:0:0:0:11:1] {MsgQoS MsgId# { SequenceId: 1 MsgId: 3 } Cost# 80086 ExtQueueId# PutTabletLog IntQueueId# IntPutLog Window# { Status# Processed ActualWindowSize# 0 MaxWindowSize# 150000000 ExpectedMsgId# { SequenceId: 1 MsgId: 4 }}}} from# [82000002:1:0:0:0] Marker# BPP01 2025-05-29T15:24:21.918640Z node 1 :BS_PROXY_PUT ERROR: dsproxy_put_impl.cpp:72: [c85e1a21dcb31b54] Result# TEvPutResult {Id# [1234:2:0:0:0:11:0] Status# BLOCKED StatusFlags# { } ErrorReason# "Got VPutResult status# BLOCKED from VDiskId# [82000002:1:0:0:0]" ApproximateFreeSpaceShare# 0} GroupId# 2181038082 Marker# BPP12 2025-05-29T15:24:21.918647Z node 1 :BS_PROXY_PUT NOTICE: dsproxy_put.cpp:486: [c85e1a21dcb31b54] SendReply putResult# TEvPutResult {Id# [1234:2:0:0:0:11:0] Status# BLOCKED StatusFlags# { } ErrorReason# "Got VPutResult status# BLOCKED from VDiskId# [82000002:1:0:0:0]" ApproximateFreeSpaceShare# 0} ResponsesSent# 0 PutImpl.Blobs.size# 1 Last# true Marker# BPP21 2025-05-29T15:24:21.918669Z node 1 :BS_PROXY_PUT DEBUG: {BPP72@dsproxy_put.cpp:470} Query history GroupId# 2181038082 HandleClass# TabletLog Tactic# Default History# THistory { Entries# [ TEvVPut{ TimestampMs# 0.133 sample PartId# [1234:2:0:0:0:11:1] QueryCount# 1 VDiskId# [82000002:1:0:0:0] NodeId# 1 } ] } 2025-05-29T15:24:21.918768Z node 2 :BS_PROXY DEBUG: group_sessions.h:165: Send to queueActorId# [2:606:2106] NKikimr::TEvBlobStorage::TEvVCollectGarbage# {TEvVCollectGarbage for [tablet:gen:cnt:channel]=[1234:4294967295:4294967295:0] collect=[4294967295:4294967295] cookie# 0 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/datashard/ut_change_collector/unittest >> CdcStreamChangeCollector::OldImage [FAIL] Test command err: 2025-05-29T15:24:09.669824Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:102:2148], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-05-29T15:24:09.669859Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-05-29T15:24:09.669872Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/000f8a/r3tmp/tmp062Xf5/pdisk_1.dat 2025-05-29T15:24:09.785306Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-05-29T15:24:09.799280Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:24:09.800143Z node 1 :TABLET_SAUSAGECACHE NOTICE: shared_sausagecache.cpp:1191: Update config MemoryLimit: 33554432 2025-05-29T15:24:09.802980Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [1:32:2079] 1748532249249093 != 1748532249249097 2025-05-29T15:24:09.844726Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:24:09.844762Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:24:09.855299Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-29T15:24:09.928427Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-05-29T15:24:09.945562Z node 1 :TX_DATASHARD INFO: datashard.cpp:373: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:664:2569] 2025-05-29T15:24:09.945664Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:630: TxInitSchema.Execute 2025-05-29T15:24:09.954343Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:696: TxInitSchema.Complete 2025-05-29T15:24:09.954393Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:48: TDataShard::TTxInit::Execute 2025-05-29T15:24:09.954565Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1315: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-05-29T15:24:09.954574Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1371: LoadLockChangeRecords at tablet: 72075186224037888 2025-05-29T15:24:09.954581Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1420: LoadChangeRecordCommits at tablet: 72075186224037888 2025-05-29T15:24:09.954641Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:92: TDataShard::TTxInit::Complete 2025-05-29T15:24:09.954661Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:100: TDataShard::TTxInitRestored::Execute 2025-05-29T15:24:09.954672Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:111: DataShard 72075186224037888 persisting started state actor id [1:681:2569] in generation 1 2025-05-29T15:24:09.964893Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:120: TDataShard::TTxInitRestored::Complete 2025-05-29T15:24:09.969911Z node 1 :TX_DATASHARD INFO: datashard.cpp:417: Switched to work state WaitScheme tabletId 72075186224037888 2025-05-29T15:24:09.969990Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:457: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-05-29T15:24:09.970013Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1250: Change sender created: at tablet: 72075186224037888, actorId: [1:683:2579] 2025-05-29T15:24:09.970019Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1255: Trying to activate change sender: at tablet: 72075186224037888 2025-05-29T15:24:09.970024Z node 1 :TX_DATASHARD INFO: datashard.cpp:1272: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-05-29T15:24:09.970030Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-05-29T15:24:09.970167Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 72075186224037888 2025-05-29T15:24:09.970193Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-05-29T15:24:09.970207Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2025-05-29T15:24:09.970212Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-05-29T15:24:09.970219Z node 1 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 72075186224037888 TxInFly 0 2025-05-29T15:24:09.970223Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2025-05-29T15:24:09.970317Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3710: Server connected at leader tablet# 72075186224037888, clientId# [1:662:2567], serverId# [1:674:2574], sessionId# [0:0:0] 2025-05-29T15:24:09.970338Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037888 2025-05-29T15:24:09.970388Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:132: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-05-29T15:24:09.970402Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:220: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2025-05-29T15:24:09.970671Z node 1 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:129: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-05-29T15:24:09.980897Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 72075186224037888 2025-05-29T15:24:09.980934Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:467: 72075186224037888 not sending time cast registration request in state WaitScheme 2025-05-29T15:24:10.123834Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3710: Server connected at leader tablet# 72075186224037888, clientId# [1:697:2587], serverId# [1:699:2589], sessionId# [0:0:0] 2025-05-29T15:24:10.124649Z node 1 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:69: Planned transaction txId 281474976715657 at step 1000 at tablet 72075186224037888 { Transactions { TxId: 281474976715657 AckTo { RawX1: 0 RawX2: 0 } } Step: 1000 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2025-05-29T15:24:10.124667Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-05-29T15:24:10.124708Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2025-05-29T15:24:10.124716Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2025-05-29T15:24:10.124723Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:333: Found ready operation [1000:281474976715657] in PlanQueue unit at 72075186224037888 2025-05-29T15:24:10.124780Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:634: LoadTxDetails at 72075186224037888 loaded tx from db 1000:281474976715657 keys extracted: 0 2025-05-29T15:24:10.124806Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 1 immediate 0 planned 1 2025-05-29T15:24:10.124911Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2025-05-29T15:24:10.124925Z node 1 :TX_DATASHARD INFO: create_table_unit.cpp:69: Trying to CREATE TABLE at 72075186224037888 tableId# [OwnerId: 72057594046644480, LocalPathId: 2] schema version# 1 2025-05-29T15:24:10.125332Z node 1 :TX_DATASHARD INFO: datashard.cpp:475: Send registration request to time cast Ready tabletId 72075186224037888 mediators count is 1 coordinators count is 1 buckets per mediator 2 2025-05-29T15:24:10.125409Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-05-29T15:24:10.125716Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3742: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037888 time 0 2025-05-29T15:24:10.125727Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-05-29T15:24:10.125921Z node 1 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:98: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 1000} 2025-05-29T15:24:10.125933Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2025-05-29T15:24:10.126088Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2025-05-29T15:24:10.126095Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1255: Trying to activate change sender: at tablet: 72075186224037888 2025-05-29T15:24:10.126099Z node 1 :TX_DATASHARD INFO: datashard.cpp:1293: Change sender activated: at tablet: 72075186224037888 2025-05-29T15:24:10.126113Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:801: Complete [1000 : 281474976715657] from 72075186224037888 at tablet 72075186224037888 send result to client [1:411:2405], exec latency: 0 ms, propose latency: 0 ms 2025-05-29T15:24:10.126121Z node 1 :TX_DATASHARD INFO: datashard.cpp:1590: 72075186224037888 Sending notify to schemeshard 72057594046644480 txId 281474976715657 state Ready TxInFly 0 2025-05-29T15:24:10.126129Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-05-29T15:24:10.126902Z node 1 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:129: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-05-29T15:24:10.127150Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:2953: Handle TEvSchemaChangedResult 281474976715657 datashard 72075186224037888 state Ready 2025-05-29T15:24:10.127162Z node 1 :TX_DATASHARD DEBUG: datashard__schema_changed.cpp:22: 72075186224037888 Got TEvSchemaChangedResult from SS at 72075186224037888 2025-05-29T15:24:10.127294Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3760: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037888 coordinator 72057594046316545 last step 0 next step 1000 2025-05-29T15:24:10.130462Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037888 2025-05-29T15:24:10.130494Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:132: Propose scheme transaction at tablet 72075186224037888 txId 281474976715658 ssId 72057594046644480 seqNo 2:2 2025-05-29T15:24:10.130503Z node 1 :TX_DATASHARD INFO: check_scheme_tx_unit.cpp:234: Check scheme tx, proposed scheme version# 2 current version# 1 expected version# 2 at tablet# 72075186224037888 txId# 281474976715658 2025-05-29T15:24:10.130506Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:220: Prepared scheme transaction txId 281474976715658 at tablet 72075186224037888 2025-05-29T15:24:10.130601Z node 1 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:129: Dis ... GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 1 immediate 0 planned 1 2025-05-29T15:24:16.076617Z node 4 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2025-05-29T15:24:16.076637Z node 4 :TX_DATASHARD INFO: create_table_unit.cpp:69: Trying to CREATE TABLE at 72075186224037888 tableId# [OwnerId: 72057594046644480, LocalPathId: 2] schema version# 1 2025-05-29T15:24:16.076786Z node 4 :TX_DATASHARD INFO: datashard.cpp:475: Send registration request to time cast Ready tabletId 72075186224037888 mediators count is 1 coordinators count is 1 buckets per mediator 2 2025-05-29T15:24:16.076890Z node 4 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-05-29T15:24:16.077297Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:3742: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037888 time 0 2025-05-29T15:24:16.077315Z node 4 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-05-29T15:24:16.077493Z node 4 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:98: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 1000} 2025-05-29T15:24:16.077509Z node 4 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2025-05-29T15:24:16.077936Z node 4 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:129: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-05-29T15:24:16.078006Z node 4 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2025-05-29T15:24:16.078016Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:1255: Trying to activate change sender: at tablet: 72075186224037888 2025-05-29T15:24:16.078023Z node 4 :TX_DATASHARD INFO: datashard.cpp:1293: Change sender activated: at tablet: 72075186224037888 2025-05-29T15:24:16.078045Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:801: Complete [1000 : 281474976715657] from 72075186224037888 at tablet 72075186224037888 send result to client [4:410:2404], exec latency: 0 ms, propose latency: 0 ms 2025-05-29T15:24:16.078060Z node 4 :TX_DATASHARD INFO: datashard.cpp:1590: 72075186224037888 Sending notify to schemeshard 72057594046644480 txId 281474976715657 state Ready TxInFly 0 2025-05-29T15:24:16.078074Z node 4 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-05-29T15:24:16.078677Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:3760: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037888 coordinator 72057594046316545 last step 0 next step 1000 2025-05-29T15:24:16.083044Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:2953: Handle TEvSchemaChangedResult 281474976715657 datashard 72075186224037888 state Ready 2025-05-29T15:24:16.083081Z node 4 :TX_DATASHARD DEBUG: datashard__schema_changed.cpp:22: 72075186224037888 Got TEvSchemaChangedResult from SS at 72075186224037888 2025-05-29T15:24:16.087742Z node 4 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037888 2025-05-29T15:24:16.087804Z node 4 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:132: Propose scheme transaction at tablet 72075186224037888 txId 281474976715658 ssId 72057594046644480 seqNo 2:2 2025-05-29T15:24:16.087819Z node 4 :TX_DATASHARD INFO: check_scheme_tx_unit.cpp:234: Check scheme tx, proposed scheme version# 2 current version# 1 expected version# 2 at tablet# 72075186224037888 txId# 281474976715658 2025-05-29T15:24:16.087826Z node 4 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:220: Prepared scheme transaction txId 281474976715658 at tablet 72075186224037888 2025-05-29T15:24:16.088192Z node 4 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:129: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-05-29T15:24:16.110344Z node 4 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 72075186224037888 2025-05-29T15:24:16.291437Z node 4 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:69: Planned transaction txId 281474976715658 at step 1500 at tablet 72075186224037888 { Transactions { TxId: 281474976715658 AckTo { RawX1: 0 RawX2: 0 } } Step: 1500 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2025-05-29T15:24:16.291470Z node 4 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-05-29T15:24:16.291774Z node 4 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2025-05-29T15:24:16.291792Z node 4 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2025-05-29T15:24:16.291807Z node 4 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:333: Found ready operation [1500:281474976715658] in PlanQueue unit at 72075186224037888 2025-05-29T15:24:16.291880Z node 4 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:634: LoadTxDetails at 72075186224037888 loaded tx from db 1500:281474976715658 keys extracted: 0 2025-05-29T15:24:16.291921Z node 4 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 1 immediate 0 planned 1 2025-05-29T15:24:16.291972Z node 4 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2025-05-29T15:24:16.292154Z node 4 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-05-29T15:24:16.328363Z node 4 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:98: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 1500} 2025-05-29T15:24:16.328408Z node 4 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2025-05-29T15:24:16.328416Z node 4 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2025-05-29T15:24:16.328429Z node 4 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-05-29T15:24:16.328457Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:801: Complete [1500 : 281474976715658] from 72075186224037888 at tablet 72075186224037888 send result to client [4:410:2404], exec latency: 0 ms, propose latency: 0 ms 2025-05-29T15:24:16.328475Z node 4 :TX_DATASHARD INFO: datashard.cpp:1590: 72075186224037888 Sending notify to schemeshard 72057594046644480 txId 281474976715658 state Ready TxInFly 0 2025-05-29T15:24:16.328494Z node 4 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-05-29T15:24:16.329109Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:2953: Handle TEvSchemaChangedResult 281474976715658 datashard 72075186224037888 state Ready 2025-05-29T15:24:16.329131Z node 4 :TX_DATASHARD DEBUG: datashard__schema_changed.cpp:22: 72075186224037888 Got TEvSchemaChangedResult from SS at 72075186224037888 2025-05-29T15:24:16.332729Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:876:2714], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:24:16.332754Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:886:2719], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:24:16.332762Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:24:16.333581Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480 2025-05-29T15:24:16.335231Z node 4 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:129: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-05-29T15:24:16.501686Z node 4 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:129: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-05-29T15:24:16.502314Z node 4 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [4:890:2722], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2025-05-29T15:24:16.525614Z node 4 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [4:946:2759] txid# 281474976715660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 8], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-29T15:24:16.540215Z node 4 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [4:956:2768], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:24:16.540854Z node 4 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=4&id=MTE2MjYzYTYtMzc1OTJkN2EtNTJmZGNlY2ItYTdkOThkMTI=, ActorId: [4:874:2712], ActorState: ExecuteState, TraceId: 01jwea9qjcbgk98m31015x6zfy, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: assertion failed at ydb/core/tx/datashard/ut_common/datashard_ut_common.cpp:2091, void NKikimr::ExecSQL(Tests::TServer::TPtr, TActorId, const TString &, bool, Ydb::StatusIds::StatusCode): (response.GetYdbStatus() == code) failed: (INTERNAL_ERROR != SUCCESS) Response { QueryIssues { message: "Execution" issue_code: 1060 issues { message: "yql/essentials/ast/yql_expr.h:1874: index out of range" issue_code: 1 } } TxMeta { } } YdbStatus: INTERNAL_ERROR ConsumedRu: 1 , with diff: (INT|SUCC)E(RNAL_ERROR|SS) TBackTrace::Capture()+28 (0x13AF911C) NUnitTest::NPrivate::RaiseError(char const*, TBasicString> const&, bool)+137 (0x13CAC049) NKikimr::ExecSQL(TIntrusivePtr>, NActors::TActorId, TBasicString> const&, bool, Ydb::StatusIds_StatusCode)+1300 (0x26340864) ??+0 (0x139D4D45) NKikimr::NTestSuiteCdcStreamChangeCollector::TTestCaseOldImage::Execute_(NUnitTest::TTestContext&)+4171 (0x139DB5FB) NKikimr::NTestSuiteCdcStreamChangeCollector::TCurrentTest::Execute()::'lambda'()::operator()() const+71 (0x139E4357) NUnitTest::TTestBase::Run(std::__y1::function, TBasicString> const&, char const*, bool)+126 (0x13CADEFE) NKikimr::NTestSuiteCdcStreamChangeCollector::TCurrentTest::Execute()+484 (0x139E3CF4) NUnitTest::TTestFactory::Execute()+803 (0x13CAE673) NUnitTest::RunMain(int, char**)+3021 (0x13CC021D) ??+0 (0x7FD4CC038D90) __libc_start_main+128 (0x7FD4CC038E40) _start+41 (0x12A09029) >> KqpPg::AlterSequence [GOOD] >> TConsoleConfigHelpersTests::TestConfigSubscriberAutoTenantTenant [GOOD] >> KqpPg::AlterColumnSetDefaultFromSequence >> TConsoleConfigHelpersTests::TestConfigSubscriberAutoTenantMultipleTenants >> TConsoleInMemoryConfigSubscriptionTests::TestSubscriptionClientManyUpdatesAddRemove [GOOD] >> TConsoleInMemoryConfigSubscriptionTests::TestSubscriptionClientDeadCausesSubscriptionDeregistration >> TBlobStorageWardenTest::TestSendUsefulMonitoring [GOOD] >> TBlobStorageWardenTest::TestHttpMonPage [GOOD] >> TCmsTest::TestTwoOrMoreDisksFromGroupAtTheSameRequestBlock42 >> TCmsTenatsTest::TestClusterRatioLimit >> TCmsTenatsTest::TestTenantRatioLimit >> TCmsTest::TestOutdatedState ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/datashard/ut_read_iterator/unittest >> DataShardReadIteratorState::ShouldCalculateQuota [GOOD] Test command err: 2025-05-29T15:24:00.311232Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:102:2148], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-05-29T15:24:00.311259Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-05-29T15:24:00.311270Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/001596/r3tmp/tmp0ikXLi/pdisk_1.dat 2025-05-29T15:24:00.407789Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-05-29T15:24:00.420553Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:24:00.423347Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [1:32:2079] 1748532239944098 != 1748532239944102 2025-05-29T15:24:00.464756Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:24:00.464784Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:24:00.475181Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-29T15:24:00.547966Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-05-29T15:24:00.563029Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3097: StateInit, received event# 268828672, Sender [1:656:2563], Recipient [1:664:2569]: NKikimr::TEvTablet::TEvBoot 2025-05-29T15:24:00.563202Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3097: StateInit, received event# 268828673, Sender [1:656:2563], Recipient [1:664:2569]: NKikimr::TEvTablet::TEvRestored 2025-05-29T15:24:00.563269Z node 1 :TX_DATASHARD INFO: datashard.cpp:373: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:664:2569] 2025-05-29T15:24:00.563319Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:630: TxInitSchema.Execute 2025-05-29T15:24:00.570303Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3110: StateInactive, received event# 268828684, Sender [1:656:2563], Recipient [1:664:2569]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-05-29T15:24:00.570438Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:696: TxInitSchema.Complete 2025-05-29T15:24:00.570461Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:48: TDataShard::TTxInit::Execute 2025-05-29T15:24:00.570592Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1315: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-05-29T15:24:00.570599Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1371: LoadLockChangeRecords at tablet: 72075186224037888 2025-05-29T15:24:00.570603Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1420: LoadChangeRecordCommits at tablet: 72075186224037888 2025-05-29T15:24:00.570642Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:92: TDataShard::TTxInit::Complete 2025-05-29T15:24:00.570656Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:100: TDataShard::TTxInitRestored::Execute 2025-05-29T15:24:00.570664Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:111: DataShard 72075186224037888 persisting started state actor id [1:681:2569] in generation 1 2025-05-29T15:24:00.570717Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:120: TDataShard::TTxInitRestored::Complete 2025-05-29T15:24:00.573903Z node 1 :TX_DATASHARD INFO: datashard.cpp:417: Switched to work state WaitScheme tabletId 72075186224037888 2025-05-29T15:24:00.573953Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:457: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-05-29T15:24:00.573976Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1250: Change sender created: at tablet: 72075186224037888, actorId: [1:683:2579] 2025-05-29T15:24:00.573979Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1255: Trying to activate change sender: at tablet: 72075186224037888 2025-05-29T15:24:00.573982Z node 1 :TX_DATASHARD INFO: datashard.cpp:1272: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-05-29T15:24:00.573986Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-05-29T15:24:00.574035Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3129: StateWork, received event# 2146435072, Sender [1:664:2569], Recipient [1:664:2569]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-05-29T15:24:00.574042Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3154: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-05-29T15:24:00.574108Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 72075186224037888 2025-05-29T15:24:00.574125Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-05-29T15:24:00.574137Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2025-05-29T15:24:00.574142Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-05-29T15:24:00.574150Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:385: Check unit PlanQueue at 72075186224037888 2025-05-29T15:24:00.574155Z node 1 :TX_DATASHARD TRACE: plan_queue_unit.cpp:59: TPlanQueueUnit at 72075186224037888 has no attached operations 2025-05-29T15:24:00.574157Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:403: Unit PlanQueue has no ready operations at 72075186224037888 2025-05-29T15:24:00.574161Z node 1 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 72075186224037888 TxInFly 0 2025-05-29T15:24:00.574164Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2025-05-29T15:24:00.574248Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3129: StateWork, received event# 269877761, Sender [1:674:2574], Recipient [1:664:2569]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-05-29T15:24:00.574253Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3165: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-05-29T15:24:00.574258Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3710: Server connected at leader tablet# 72075186224037888, clientId# [1:662:2567], serverId# [1:674:2574], sessionId# [0:0:0] 2025-05-29T15:24:00.574264Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3129: StateWork, received event# 269549568, Sender [1:411:2405], Recipient [1:674:2574] 2025-05-29T15:24:00.574267Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3135: StateWork, processing event TEvDataShard::TEvProposeTransaction 2025-05-29T15:24:00.574283Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037888 2025-05-29T15:24:00.574335Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1827: Trying to execute [0:281474976715657] at 72075186224037888 on unit CheckSchemeTx 2025-05-29T15:24:00.574343Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:132: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-05-29T15:24:00.574356Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:220: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2025-05-29T15:24:00.574362Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1862: Execution status for [0:281474976715657] at 72075186224037888 is ExecutedNoMoreRestarts 2025-05-29T15:24:00.574366Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1910: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit CheckSchemeTx 2025-05-29T15:24:00.574369Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1916: Add [0:281474976715657] at 72075186224037888 to execution unit StoreSchemeTx 2025-05-29T15:24:00.574372Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1827: Trying to execute [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2025-05-29T15:24:00.574418Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1862: Execution status for [0:281474976715657] at 72075186224037888 is DelayCompleteNoMoreRestarts 2025-05-29T15:24:00.574421Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1910: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit StoreSchemeTx 2025-05-29T15:24:00.574424Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1916: Add [0:281474976715657] at 72075186224037888 to execution unit FinishPropose 2025-05-29T15:24:00.574426Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1827: Trying to execute [0:281474976715657] at 72075186224037888 on unit FinishPropose 2025-05-29T15:24:00.574434Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1862: Execution status for [0:281474976715657] at 72075186224037888 is DelayComplete 2025-05-29T15:24:00.574436Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1910: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit FinishPropose 2025-05-29T15:24:00.574440Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1916: Add [0:281474976715657] at 72075186224037888 to execution unit WaitForPlan 2025-05-29T15:24:00.574443Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1827: Trying to execute [0:281474976715657] at 72075186224037888 on unit WaitForPlan 2025-05-29T15:24:00.574447Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1832: Operation [0:281474976715657] at 72075186224037888 is not ready to execute on unit WaitForPlan 2025-05-29T15:24:00.574556Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 72075186224037888 2025-05-29T15:24:00.574561Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1933: Complete execution for [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2025-05-29T15:24:00.574564Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1933: Complete execution for [0:281474976715657] at 72075186224037888 on unit FinishPropose 2025-05-29T15:24:00.574569Z node 1 :TX_DATASHARD TRACE: finish_propose_unit.cpp:167: Propose transaction complete txid 281474976715657 at tablet 72075186224037888 send to client, exec latency: 0 ms, propose latency: 0 ms, status: PREPARED 2025-05-29T15:24:00.574577Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:467: 72075186224037888 not sending time cast registration request in state WaitScheme 2025-05-29T15:24:00.574956Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3129: StateWork, received event# 269746185, Sender [1:685:2581], Recipient [1:664:2569]: NKikimr::TEvTxProxySchemeCache::TEvWatchNotifyUpdated 2025-05-29T15:24:00.574966Z node 1 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:129: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-05-29T15:24:00.718057Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3129: StateWork, received event# 269877761, Sender [1:699:2589], Recipient [1:664:2569]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-05-29T15:24:00.718084Z node 1 :TX_DATASHARD TRACE: datashard_impl. ... eration 2025-05-29T15:24:16.654397Z node 14 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1862: Execution status for [1000:281474976715657] at 72075186224037888 is DelayComplete 2025-05-29T15:24:16.654414Z node 14 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1910: Advance execution plan for [1000:281474976715657] at 72075186224037888 executing on unit CompleteOperation 2025-05-29T15:24:16.654419Z node 14 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1916: Add [1000:281474976715657] at 72075186224037888 to execution unit CompletedOperations 2025-05-29T15:24:16.654423Z node 14 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1827: Trying to execute [1000:281474976715657] at 72075186224037888 on unit CompletedOperations 2025-05-29T15:24:16.654432Z node 14 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1862: Execution status for [1000:281474976715657] at 72075186224037888 is Executed 2025-05-29T15:24:16.654436Z node 14 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1910: Advance execution plan for [1000:281474976715657] at 72075186224037888 executing on unit CompletedOperations 2025-05-29T15:24:16.654440Z node 14 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1922: Execution plan for [1000:281474976715657] at 72075186224037888 has finished 2025-05-29T15:24:16.654446Z node 14 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-05-29T15:24:16.654450Z node 14 :TX_DATASHARD TRACE: datashard_pipeline.cpp:327: Check candidate unit PlanQueue at 72075186224037888 2025-05-29T15:24:16.654454Z node 14 :TX_DATASHARD TRACE: plan_queue_unit.cpp:59: TPlanQueueUnit at 72075186224037888 has no attached operations 2025-05-29T15:24:16.654458Z node 14 :TX_DATASHARD TRACE: datashard_pipeline.cpp:341: Unit PlanQueue has no ready operations at 72075186224037888 2025-05-29T15:24:16.654956Z node 14 :TX_DATASHARD TRACE: datashard_impl.h:3129: StateWork, received event# 270270976, Sender [14:24:2071], Recipient [14:663:2568]: {TEvRegisterTabletResult TabletId# 72075186224037888 Entry# 0} 2025-05-29T15:24:16.654969Z node 14 :TX_DATASHARD TRACE: datashard_impl.h:3167: StateWork, processing event TEvMediatorTimecast::TEvRegisterTabletResult 2025-05-29T15:24:16.654975Z node 14 :TX_DATASHARD DEBUG: datashard.cpp:3742: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037888 time 0 2025-05-29T15:24:16.654981Z node 14 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-05-29T15:24:16.655141Z node 14 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:98: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 1000} 2025-05-29T15:24:16.655345Z node 14 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2025-05-29T15:24:16.655594Z node 14 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2025-05-29T15:24:16.655605Z node 14 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1933: Complete execution for [1000:281474976715657] at 72075186224037888 on unit CreateTable 2025-05-29T15:24:16.655612Z node 14 :TX_DATASHARD DEBUG: datashard.cpp:1255: Trying to activate change sender: at tablet: 72075186224037888 2025-05-29T15:24:16.655618Z node 14 :TX_DATASHARD INFO: datashard.cpp:1293: Change sender activated: at tablet: 72075186224037888 2025-05-29T15:24:16.655626Z node 14 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1933: Complete execution for [1000:281474976715657] at 72075186224037888 on unit CompleteOperation 2025-05-29T15:24:16.655643Z node 14 :TX_DATASHARD DEBUG: datashard.cpp:801: Complete [1000 : 281474976715657] from 72075186224037888 at tablet 72075186224037888 send result to client [14:410:2404], exec latency: 0 ms, propose latency: 0 ms 2025-05-29T15:24:16.655656Z node 14 :TX_DATASHARD INFO: datashard.cpp:1590: 72075186224037888 Sending notify to schemeshard 72057594046644480 txId 281474976715657 state Ready TxInFly 0 2025-05-29T15:24:16.655668Z node 14 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-05-29T15:24:16.655860Z node 14 :TX_DATASHARD TRACE: datashard_impl.h:3129: StateWork, received event# 269746185, Sender [14:683:2579], Recipient [14:663:2568]: NKikimr::TEvTxProxySchemeCache::TEvWatchNotifyUpdated 2025-05-29T15:24:16.655871Z node 14 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:129: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-05-29T15:24:16.656042Z node 14 :TX_DATASHARD TRACE: datashard_impl.h:3129: StateWork, received event# 269877760, Sender [14:708:2596], Recipient [14:663:2568]: NKikimr::TEvTabletPipe::TEvClientConnected { TabletId: 72057594046644480 Status: OK ServerId: [14:713:2601] Leader: 1 Dead: 0 Generation: 2 VersionInfo: } 2025-05-29T15:24:16.656050Z node 14 :TX_DATASHARD TRACE: datashard_impl.h:3162: StateWork, processing event TEvTabletPipe::TEvClientConnected 2025-05-29T15:24:16.656250Z node 14 :TX_DATASHARD TRACE: datashard_impl.h:3129: StateWork, received event# 269552132, Sender [14:410:2404], Recipient [14:663:2568]: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 281474976715657 2025-05-29T15:24:16.656259Z node 14 :TX_DATASHARD TRACE: datashard_impl.h:3133: StateWork, processing event TEvDataShard::TEvSchemaChangedResult 2025-05-29T15:24:16.656264Z node 14 :TX_DATASHARD DEBUG: datashard.cpp:2953: Handle TEvSchemaChangedResult 281474976715657 datashard 72075186224037888 state Ready 2025-05-29T15:24:16.656272Z node 14 :TX_DATASHARD DEBUG: datashard__schema_changed.cpp:22: 72075186224037888 Got TEvSchemaChangedResult from SS at 72075186224037888 2025-05-29T15:24:16.656464Z node 14 :TX_DATASHARD TRACE: datashard_impl.h:3129: StateWork, received event# 270270978, Sender [14:24:2071], Recipient [14:663:2568]: NKikimr::TEvMediatorTimecast::TEvSubscribeReadStepResult{ CoordinatorId# 72057594046316545 LastReadStep# 0 NextReadStep# 1000 ReadStep# 1000 } 2025-05-29T15:24:16.656474Z node 14 :TX_DATASHARD TRACE: datashard_impl.h:3168: StateWork, processing event TEvMediatorTimecast::TEvSubscribeReadStepResult 2025-05-29T15:24:16.656480Z node 14 :TX_DATASHARD DEBUG: datashard.cpp:3760: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037888 coordinator 72057594046316545 last step 0 next step 1000 2025-05-29T15:24:16.656531Z node 14 :TX_DATASHARD TRACE: datashard_impl.h:3129: StateWork, received event# 269877763, Sender [14:708:2596], Recipient [14:663:2568]: NKikimr::TEvTabletPipe::TEvClientDestroyed { TabletId: 72057594046644480 ClientId: [14:708:2596] ServerId: [14:713:2601] } 2025-05-29T15:24:16.656537Z node 14 :TX_DATASHARD TRACE: datashard_impl.h:3163: StateWork, processing event TEvTabletPipe::TEvClientDestroyed 2025-05-29T15:24:16.662433Z node 14 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [14:733:2615], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:24:16.662459Z node 14 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [14:743:2620], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:24:16.662469Z node 14 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:24:16.663495Z node 14 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2025-05-29T15:24:16.665202Z node 14 :TX_DATASHARD TRACE: datashard_impl.h:3129: StateWork, received event# 269746185, Sender [14:683:2579], Recipient [14:663:2568]: NKikimr::TEvTxProxySchemeCache::TEvWatchNotifyUpdated 2025-05-29T15:24:16.665227Z node 14 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:129: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-05-29T15:24:16.812796Z node 14 :TX_DATASHARD TRACE: datashard_impl.h:3129: StateWork, received event# 269746185, Sender [14:683:2579], Recipient [14:663:2568]: NKikimr::TEvTxProxySchemeCache::TEvWatchNotifyUpdated 2025-05-29T15:24:16.812826Z node 14 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:129: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-05-29T15:24:16.813396Z node 14 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [14:747:2623], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-05-29T15:24:16.850344Z node 14 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [14:816:2661] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-29T15:24:16.861225Z node 14 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [14:826:2670], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:24:16.861836Z node 14 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=14&id=Nzc4MzQxMDMtZTVmZTM3MDEtYTlkNzc5ZTgtODUzNDkzY2Y=, ActorId: [14:731:2613], ActorState: ExecuteState, TraceId: 01jwea9qwp5cd8haz3j76vac2a, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: assertion failed at ydb/core/tx/datashard/ut_common/datashard_ut_common.cpp:2091, void NKikimr::ExecSQL(Tests::TServer::TPtr, TActorId, const TString &, bool, Ydb::StatusIds::StatusCode): (response.GetYdbStatus() == code) failed: (INTERNAL_ERROR != SUCCESS) Response { QueryIssues { message: "Execution" issue_code: 1060 issues { message: "yql/essentials/ast/yql_expr.h:1874: index out of range" issue_code: 1 } } TxMeta { } } YdbStatus: INTERNAL_ERROR ConsumedRu: 1 , with diff: (INT|SUCC)E(RNAL_ERROR|SS) TBackTrace::Capture()+28 (0x13C95FEC) NUnitTest::NPrivate::RaiseError(char const*, TBasicString> const&, bool)+137 (0x13E49919) NKikimr::ExecSQL(TIntrusivePtr>, NActors::TActorId, TBasicString> const&, bool, Ydb::StatusIds_StatusCode)+1300 (0x264C9A24) NKikimr::NTestSuiteDataShardReadIteratorPageFaults::TTestCaseLocksNotLostOnPageFault::Execute_(NUnitTest::TTestContext&)+3577 (0x13A97F09) NKikimr::NTestSuiteDataShardReadIteratorPageFaults::TCurrentTest::Execute()::'lambda'()::operator()() const+71 (0x13B48B67) NUnitTest::TTestBase::Run(std::__y1::function, TBasicString> const&, char const*, bool)+126 (0x13E4B7CE) NKikimr::NTestSuiteDataShardReadIteratorPageFaults::TCurrentTest::Execute()+481 (0x13B48501) NUnitTest::TTestFactory::Execute()+803 (0x13E4BF43) NUnitTest::RunMain(int, char**)+3021 (0x13E5DAED) ??+0 (0x7FB1B3D89D90) __libc_start_main+128 (0x7FB1B3D89E40) _start+41 (0x12A96029) |64.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/nodewarden/ut/unittest >> TBlobStorageWardenTest::ObtainTenantKeyDifferentPin [GOOD] >> TJaegerTracingConfiguratorTests::SamplingSameScope [GOOD] >> TJaegerTracingConfiguratorTests::ThrottlingByDb >> TConsoleTests::TestGetUnknownTenantStatus [GOOD] >> TConsoleTests::TestGetUnknownTenantStatusExtSubdomain >> KqpPg::EquiJoin+useSink [FAIL] >> KqpPg::EquiJoin-useSink |65.0%| [TA] $(B)/ydb/core/tx/datashard/ut_change_collector/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/pg/unittest >> KqpPg::InsertNoTargetColumns_SerialNotNull-useSink [FAIL] Test command err: Trying to start YDB, gRPC: 19043, MsgBus: 5518 2025-05-29T15:24:08.891514Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509888822933592980:2070];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:24:08.891534Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/0025ae/r3tmp/tmpiPzMTU/pdisk_1.dat TServer::EnableGrpc on GrpcPort 19043, node 1 2025-05-29T15:24:08.949352Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:24:08.955358Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:24:08.955367Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-05-29T15:24:08.955369Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-05-29T15:24:08.955408Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:5518 2025-05-29T15:24:08.992854Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:24:08.992897Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:24:08.993909Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:5518 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-29T15:24:09.026391Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:24:09.176676Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509888827228560905:2330], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:24:09.176702Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509888827228560894:2327], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:24:09.176773Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:24:09.177460Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2025-05-29T15:24:09.179254Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7509888827228560908:2331], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-05-29T15:24:09.237706Z node 1 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [1:7509888827228560959:2324] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-29T15:24:09.250769Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 2025-05-29T15:24:09.326764Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [1:7509888827228561078:2347], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:24:09.326860Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=1&id=Yzc4NmI1MDMtZmQxMmM1ZC1mM2EzZDQ0MC1hOGI0OGM0ZA==, ActorId: [1:7509888827228561071:2343], ActorState: ExecuteState, TraceId: 01jwea9gpz0845shy6hb4pkv56, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: assertion failed at ydb/core/kqp/ut/pg/kqp_pg_ut.cpp:4230, virtual void NKikimr::NKqp::NTestSuiteKqpPg::TTestCaseInsertNoTargetColumns_Simple::Execute_(NUnitTest::TTestContext &) [useSink = true]: (result.GetStatus() == EStatus::SUCCESS) failed: (INTERNAL_ERROR != SUCCESS)
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 , with diff: (INT|SUCC)E(RNAL_ERROR|SS) 0. /-S/util/system/backtrace.cpp:284: ?? @ 0x15FBD15B 1. /tmp//-S/library/cpp/testing/unittest/registar.cpp:46: RaiseError @ 0x16174FC8 2. /tmp//-S/ydb/core/kqp/ut/pg/kqp_pg_ut.cpp:4230: Execute_ @ 0x15E1DD91 3. /tmp//-S/ydb/core/kqp/ut/pg/kqp_pg_ut.cpp:198: operator() @ 0x15D71686 4. /tmp//-S/library/cpp/testing/unittest/registar.cpp:373: Run @ 0x16176E7D 5. /tmp//-S/ydb/core/kqp/ut/pg/kqp_pg_ut.cpp:198: Execute @ 0x15D70EE0 6. /tmp//-S/library/cpp/testing/unittest/registar.cpp:494: Execute @ 0x161775F2 7. /tmp//-S/library/cpp/testing/unittest/utmain.cpp:872: RunMain @ 0x1618919C 8. ??:0: ?? @ 0x7FA8BFF63D8F 9. ??:0: ?? @ 0x7FA8BFF63E3F 10. ??:0: ?? @ 0x14D5E028 Trying to start YDB, gRPC: 30141, MsgBus: 23885 2025-05-29T15:24:11.409306Z node 2 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7509888834683112964:2069];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:24:11.409331Z node 2 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/0025ae/r3tmp/tmpHLQ6Fw/pdisk_1.dat 2025-05-29T15:24:11.426344Z node 2 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 30141, node 2 2025-05-29T15:24:11.440528Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:24:11.440540Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-05-29T15:24:11.440542Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-05-29T15:24:11.440580Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:23885 TClient is connected to server localhost:23885 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-29T15:24:11.509797Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:24:11.509897Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:24:11.510967Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-29T15:24:11.514442Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:24:11.515962Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-05-29T15:24:11.743336Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7509888834683113563:2327], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:24:11.743359Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:24:11.743442Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7509888834683113590:2330], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:24:11.744159Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2025-05-29T15:24:11.745800Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [Worklo ... ctor# [10:7509888871170974973:2324] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-29T15:24:19.208812Z node 10 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 2025-05-29T15:24:19.245393Z node 10 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 2025-05-29T15:24:19.275777Z node 10 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [10:7509888871170975207:2357], status: BAD_REQUEST, issues:
: Error: Type annotation, code: 1030
:1:1: Error: At function: KiWriteTable!
:1:1: Error: Missing not null column in input: c. All not null columns should be initialized, code: 2032 2025-05-29T15:24:19.276436Z node 10 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=10&id=NjdkN2I3ZWItMjRmYjUyNGUtYzViNmEwYTUtNDA0MjNiZWY=, ActorId: [10:7509888871170975205:2356], ActorState: ExecuteState, TraceId: 01jwea9te6aaq16exba4pjqtbg, ReplyQueryCompileError, status BAD_REQUEST remove tx with tx_id: 2025-05-29T15:24:19.311144Z node 10 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [10:7509888871170975218:2362], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:24:19.311979Z node 10 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=10&id=NDA4NjM1ODgtMWM5NjIwMWYtY2JjMzJlMDgtN2EwMTFkMDU=, ActorId: [10:7509888871170975216:2361], ActorState: ExecuteState, TraceId: 01jwea9tenfdzcn6yqfbmk4wmb, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: assertion failed at ydb/core/kqp/ut/pg/kqp_pg_ut.cpp:4714, virtual void NKikimr::NKqp::NTestSuiteKqpPg::TTestCaseInsertNoTargetColumns_SerialNotNull::Execute_(NUnitTest::TTestContext &) [useSink = true]: (result.GetStatus() == EStatus::SUCCESS) failed: (INTERNAL_ERROR != SUCCESS)
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 , with diff: (INT|SUCC)E(RNAL_ERROR|SS) 0. /-S/util/system/backtrace.cpp:284: ?? @ 0x15FBD15B 1. /tmp//-S/library/cpp/testing/unittest/registar.cpp:46: RaiseError @ 0x16174FC8 2. /tmp//-S/ydb/core/kqp/ut/pg/kqp_pg_ut.cpp:4714: Execute_ @ 0x15E5CC8F 3. /tmp//-S/ydb/core/kqp/ut/pg/kqp_pg_ut.cpp:198: operator() @ 0x15D71686 4. /tmp//-S/library/cpp/testing/unittest/registar.cpp:373: Run @ 0x16176E7D 5. /tmp//-S/ydb/core/kqp/ut/pg/kqp_pg_ut.cpp:198: Execute @ 0x15D70EE0 6. /tmp//-S/library/cpp/testing/unittest/registar.cpp:494: Execute @ 0x161775F2 7. /tmp//-S/library/cpp/testing/unittest/utmain.cpp:872: RunMain @ 0x1618919C 8. ??:0: ?? @ 0x7FA8BFF63D8F 9. ??:0: ?? @ 0x7FA8BFF63E3F 10. ??:0: ?? @ 0x14D5E028 Trying to start YDB, gRPC: 9006, MsgBus: 61902 test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/0025ae/r3tmp/tmpGZLxGp/pdisk_1.dat 2025-05-29T15:24:19.733905Z node 11 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-05-29T15:24:19.747185Z node 11 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 9006, node 11 2025-05-29T15:24:19.773559Z node 11 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:24:19.773573Z node 11 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-05-29T15:24:19.773575Z node 11 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-05-29T15:24:19.773629Z node 11 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:61902 2025-05-29T15:24:19.835231Z node 11 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(11, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:24:19.835258Z node 11 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(11, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:24:19.836854Z node 11 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(11, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:61902 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-29T15:24:19.895454Z node 11 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:24:19.900945Z node 11 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-05-29T15:24:20.177082Z node 11 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [11:7509888876803500724:2327], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:24:20.177116Z node 11 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:24:20.178829Z node 11 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [11:7509888876803500759:2330], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:24:20.179918Z node 11 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2025-05-29T15:24:20.182938Z node 11 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976715658, at schemeshard: 72057594046644480 2025-05-29T15:24:20.182998Z node 11 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [11:7509888876803500761:2331], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-05-29T15:24:20.280708Z node 11 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [11:7509888876803500812:2326] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-29T15:24:20.286106Z node 11 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 2025-05-29T15:24:20.318707Z node 11 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 2025-05-29T15:24:20.347495Z node 11 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [11:7509888876803501045:2357], status: BAD_REQUEST, issues:
: Error: Type annotation, code: 1030
:1:1: Error: At function: KiWriteTable!
:1:1: Error: Missing not null column in input: c. All not null columns should be initialized, code: 2032 2025-05-29T15:24:20.348088Z node 11 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=11&id=YzM0NjAwMmQtYjI3OTRiZGYtNDlhZTNlZDMtODEyZGViZDU=, ActorId: [11:7509888876803501043:2356], ActorState: ExecuteState, TraceId: 01jwea9vfj69tbd21yce57dvs8, ReplyQueryCompileError, status BAD_REQUEST remove tx with tx_id: 2025-05-29T15:24:20.387763Z node 11 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [11:7509888876803501056:2362], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:24:20.387962Z node 11 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=11&id=ZDFlNDZkZjctZmU4YjIzZTUtOTRjYmQ2NGQtMjgzMzUwNGM=, ActorId: [11:7509888876803501054:2361], ActorState: ExecuteState, TraceId: 01jwea9vfxepxd2dryqw0jvs8a, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: assertion failed at ydb/core/kqp/ut/pg/kqp_pg_ut.cpp:4714, virtual void NKikimr::NKqp::NTestSuiteKqpPg::TTestCaseInsertNoTargetColumns_SerialNotNull::Execute_(NUnitTest::TTestContext &) [useSink = false]: (result.GetStatus() == EStatus::SUCCESS) failed: (INTERNAL_ERROR != SUCCESS)
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 , with diff: (INT|SUCC)E(RNAL_ERROR|SS) 0. /-S/util/system/backtrace.cpp:284: ?? @ 0x15FBD15B 1. /tmp//-S/library/cpp/testing/unittest/registar.cpp:46: RaiseError @ 0x16174FC8 2. /tmp//-S/ydb/core/kqp/ut/pg/kqp_pg_ut.cpp:4714: Execute_ @ 0x15E5FD6F 3. /tmp//-S/ydb/core/kqp/ut/pg/kqp_pg_ut.cpp:198: operator() @ 0x15D71686 4. /tmp//-S/library/cpp/testing/unittest/registar.cpp:373: Run @ 0x16176E7D 5. /tmp//-S/ydb/core/kqp/ut/pg/kqp_pg_ut.cpp:198: Execute @ 0x15D70EE0 6. /tmp//-S/library/cpp/testing/unittest/registar.cpp:494: Execute @ 0x161775F2 7. /tmp//-S/library/cpp/testing/unittest/utmain.cpp:872: RunMain @ 0x1618919C 8. ??:0: ?? @ 0x7FA8BFF63D8F 9. ??:0: ?? @ 0x7FA8BFF63E3F 10. ??:0: ?? @ 0x14D5E028 >> TConsoleTests::TestCreateSharedTenant [GOOD] >> TConsoleTests::TestCreateServerlessTenant >> TCmsTest::TestKeepAvailableMode ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/nodewarden/ut/unittest >> TBlobStorageWardenTest::TestSendUsefulMonitoring [GOOD] Test command err: 2025-05-29T15:24:22.080059Z node 1 :BS_NODE DEBUG: {NW26@node_warden_impl.cpp:321} Bootstrap 2025-05-29T15:24:22.080882Z node 1 :BS_NODE DEBUG: {NW18@node_warden_resource.cpp:51} ApplyServiceSet IsStatic# true Comprehensive# false Origin# initial ServiceSet# {PDisks { NodeID: 1 PDiskID: 0 Path: "SectorMap:/home/runner/.ya/build/build_root/ciyv/001914/r3tmp/tmpnQLQ7C/pdisk_map" PDiskGuid: 1 } VDisks { VDiskID { GroupID: 33554432 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } VDiskLocation { NodeID: 1 PDiskID: 0 VDiskSlotID: 0 PDiskGuid: 1 } } VDisks { VDiskID { GroupID: 33554432 GroupGeneration: 1 Ring: 0 Domain: 1 VDisk: 0 } VDiskLocation { NodeID: 1 PDiskID: 0 VDiskSlotID: 1 PDiskGuid: 1 } } VDisks { VDiskID { GroupID: 33554432 GroupGeneration: 1 Ring: 0 Domain: 2 VDisk: 0 } VDiskLocation { NodeID: 1 PDiskID: 0 VDiskSlotID: 2 PDiskGuid: 1 } } VDisks { VDiskID { GroupID: 33554432 GroupGeneration: 1 Ring: 0 Domain: 3 VDisk: 0 } VDiskLocation { NodeID: 1 PDiskID: 0 VDiskSlotID: 3 PDiskGuid: 1 } } Groups { GroupID: 33554432 GroupGeneration: 1 ErasureSpecies: 1 Rings { FailDomains { VDiskLocations { NodeID: 1 PDiskID: 0 VDiskSlotID: 0 PDiskGuid: 1 } } FailDomains { VDiskLocations { NodeID: 1 PDiskID: 0 VDiskSlotID: 1 PDiskGuid: 1 } } FailDomains { VDiskLocations { NodeID: 1 PDiskID: 0 VDiskSlotID: 2 PDiskGuid: 1 } } FailDomains { VDiskLocations { NodeID: 1 PDiskID: 0 VDiskSlotID: 3 PDiskGuid: 1 } } } } AvailabilityDomains: 1 } 2025-05-29T15:24:22.080959Z node 1 :BS_NODE DEBUG: {NW04@node_warden_pdisk.cpp:196} StartLocalPDisk NodeId# 1 PDiskId# 0 Path# "SectorMap:/home/runner/.ya/build/build_root/ciyv/001914/r3tmp/tmpnQLQ7C/pdisk_map" PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} Temporary# false 2025-05-29T15:24:22.081192Z node 1 :BS_NODE WARN: {NW89@node_warden_pdisk.cpp:106} Can't write new MockDevicesConfig to file Path# /Berkanavt/kikimr/testing/mock_devices.txt 2025-05-29T15:24:22.081261Z node 1 :BS_NODE DEBUG: {NW23@node_warden_vdisk.cpp:67} StartLocalVDiskActor SlayInFlight# false VDiskId# [2000000:1:0:0:0] VSlotId# 1:0:0 PDiskGuid# 1 DonorMode# false PDiskRestartInFlight# false PDisksWaitingToStart# false 2025-05-29T15:24:22.081434Z node 1 :BS_NODE DEBUG: {NW24@node_warden_vdisk.cpp:265} StartLocalVDiskActor done VDiskId# [2000000:1:0:0:0] VSlotId# 1:0:0 PDiskGuid# 1 2025-05-29T15:24:22.081447Z node 1 :BS_NODE DEBUG: {NW23@node_warden_vdisk.cpp:67} StartLocalVDiskActor SlayInFlight# false VDiskId# [2000000:1:0:1:0] VSlotId# 1:0:1 PDiskGuid# 1 DonorMode# false PDiskRestartInFlight# false PDisksWaitingToStart# false 2025-05-29T15:24:22.081571Z node 1 :BS_NODE DEBUG: {NW24@node_warden_vdisk.cpp:265} StartLocalVDiskActor done VDiskId# [2000000:1:0:1:0] VSlotId# 1:0:1 PDiskGuid# 1 2025-05-29T15:24:22.081577Z node 1 :BS_NODE DEBUG: {NW23@node_warden_vdisk.cpp:67} StartLocalVDiskActor SlayInFlight# false VDiskId# [2000000:1:0:2:0] VSlotId# 1:0:2 PDiskGuid# 1 DonorMode# false PDiskRestartInFlight# false PDisksWaitingToStart# false 2025-05-29T15:24:22.081649Z node 1 :BS_NODE DEBUG: {NW24@node_warden_vdisk.cpp:265} StartLocalVDiskActor done VDiskId# [2000000:1:0:2:0] VSlotId# 1:0:2 PDiskGuid# 1 2025-05-29T15:24:22.081657Z node 1 :BS_NODE DEBUG: {NW23@node_warden_vdisk.cpp:67} StartLocalVDiskActor SlayInFlight# false VDiskId# [2000000:1:0:3:0] VSlotId# 1:0:3 PDiskGuid# 1 DonorMode# false PDiskRestartInFlight# false PDisksWaitingToStart# false 2025-05-29T15:24:22.081747Z node 1 :BS_NODE DEBUG: {NW24@node_warden_vdisk.cpp:265} StartLocalVDiskActor done VDiskId# [2000000:1:0:3:0] VSlotId# 1:0:3 PDiskGuid# 1 2025-05-29T15:24:22.081753Z node 1 :BS_NODE DEBUG: {NW12@node_warden_proxy.cpp:23} StartLocalProxy GroupId# 33554432 2025-05-29T15:24:22.082415Z node 1 :BS_NODE DEBUG: {NW21@node_warden_pipe.cpp:23} EstablishPipe AvailDomainId# 1 PipeClientId# [1:47:2076] ControllerId# 72057594037932033 2025-05-29T15:24:22.082425Z node 1 :BS_NODE DEBUG: {NW20@node_warden_pipe.cpp:72} SendRegisterNode 2025-05-29T15:24:22.082456Z node 1 :BS_NODE DEBUG: {NW11@node_warden_impl.cpp:296} StartInvalidGroupProxy GroupId# 4294967295 2025-05-29T15:24:22.082474Z node 1 :BS_NODE DEBUG: {NW62@node_warden_impl.cpp:308} StartRequestReportingThrottler 2025-05-29T15:24:22.086848Z node 1 :BS_NODE DEBUG: {NWDC00@distconf.cpp:20} Bootstrap 2025-05-29T15:24:22.087165Z node 2 :BS_NODE DEBUG: {NW26@node_warden_impl.cpp:321} Bootstrap 2025-05-29T15:24:22.087788Z node 2 :BS_NODE DEBUG: {NW18@node_warden_resource.cpp:51} ApplyServiceSet IsStatic# true Comprehensive# false Origin# initial ServiceSet# {PDisks { NodeID: 1 PDiskID: 0 Path: "pdisk0.dat" PDiskGuid: 1 } VDisks { VDiskID { GroupID: 33554432 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } VDiskLocation { NodeID: 1 PDiskID: 0 VDiskSlotID: 0 PDiskGuid: 1 } } VDisks { VDiskID { GroupID: 33554432 GroupGeneration: 1 Ring: 0 Domain: 1 VDisk: 0 } VDiskLocation { NodeID: 1 PDiskID: 0 VDiskSlotID: 1 PDiskGuid: 1 } } VDisks { VDiskID { GroupID: 33554432 GroupGeneration: 1 Ring: 0 Domain: 2 VDisk: 0 } VDiskLocation { NodeID: 1 PDiskID: 0 VDiskSlotID: 2 PDiskGuid: 1 } } VDisks { VDiskID { GroupID: 33554432 GroupGeneration: 1 Ring: 0 Domain: 3 VDisk: 0 } VDiskLocation { NodeID: 1 PDiskID: 0 VDiskSlotID: 3 PDiskGuid: 1 } } Groups { GroupID: 33554432 GroupGeneration: 1 ErasureSpecies: 1 Rings { FailDomains { VDiskLocations { NodeID: 1 PDiskID: 0 VDiskSlotID: 0 PDiskGuid: 1 } } FailDomains { VDiskLocations { NodeID: 1 PDiskID: 0 VDiskSlotID: 1 PDiskGuid: 1 } } FailDomains { VDiskLocations { NodeID: 1 PDiskID: 0 VDiskSlotID: 2 PDiskGuid: 1 } } FailDomains { VDiskLocations { NodeID: 1 PDiskID: 0 VDiskSlotID: 3 PDiskGuid: 1 } } } } AvailabilityDomains: 1 } 2025-05-29T15:24:22.087826Z node 2 :BS_NODE DEBUG: {NW12@node_warden_proxy.cpp:23} StartLocalProxy GroupId# 33554432 2025-05-29T15:24:22.088045Z node 2 :BS_NODE DEBUG: {NW21@node_warden_pipe.cpp:23} EstablishPipe AvailDomainId# 1 PipeClientId# [2:95:2070] ControllerId# 72057594037932033 2025-05-29T15:24:22.088050Z node 2 :BS_NODE DEBUG: {NW20@node_warden_pipe.cpp:72} SendRegisterNode 2025-05-29T15:24:22.088065Z node 2 :BS_NODE DEBUG: {NW11@node_warden_impl.cpp:296} StartInvalidGroupProxy GroupId# 4294967295 2025-05-29T15:24:22.088084Z node 2 :BS_NODE DEBUG: {NW62@node_warden_impl.cpp:308} StartRequestReportingThrottler 2025-05-29T15:24:22.089301Z node 2 :BS_NODE DEBUG: {NWDC00@distconf.cpp:20} Bootstrap 2025-05-29T15:24:22.089446Z node 1 :BS_NODE DEBUG: {NWDC40@distconf_persistent_storage.cpp:25} TReaderActor bootstrap Paths# [] 2025-05-29T15:24:22.089638Z node 2 :BS_NODE DEBUG: {NWDC40@distconf_persistent_storage.cpp:25} TReaderActor bootstrap Paths# [] 2025-05-29T15:24:22.096306Z node 1 :BS_NODE DEBUG: {NWDC53@distconf.cpp:300} StateWaitForInit event Type# 131082 StorageConfigLoaded# false NodeListObtained# false PendingEvents.size# 0 2025-05-29T15:24:22.096325Z node 1 :BS_NODE DEBUG: {NWDC11@distconf_binding.cpp:6} TEvNodesInfo 2025-05-29T15:24:22.096416Z node 2 :BS_NODE DEBUG: {NWDC53@distconf.cpp:300} StateWaitForInit event Type# 131082 StorageConfigLoaded# false NodeListObtained# false PendingEvents.size# 0 2025-05-29T15:24:22.096424Z node 2 :BS_NODE DEBUG: {NWDC11@distconf_binding.cpp:6} TEvNodesInfo 2025-05-29T15:24:22.097688Z node 2 :BS_NODE DEBUG: {NWDC53@distconf.cpp:300} StateWaitForInit event Type# 2146435074 StorageConfigLoaded# false NodeListObtained# false PendingEvents.size# 0 2025-05-29T15:24:22.097702Z node 2 :BS_NODE DEBUG: {NWDC32@distconf_persistent_storage.cpp:221} TEvStorageConfigLoaded Cookie# 0 NumItemsRead# 0 2025-05-29T15:24:22.098592Z node 2 :BS_NODE DEBUG: {NWDC35@distconf_persistent_storage.cpp:184} PersistConfig Record# {} Drives# [] 2025-05-29T15:24:22.098709Z node 2 :BS_NODE DEBUG: {NWDC51@distconf_persistent_storage.cpp:103} TWriterActor bootstrap Drives# [] Record# {} 2025-05-29T15:24:22.098792Z node 1 :BS_NODE DEBUG: {NWDC53@distconf.cpp:300} StateWaitForInit event Type# 2146435074 StorageConfigLoaded# false NodeListObtained# false PendingEvents.size# 0 2025-05-29T15:24:22.098803Z node 1 :BS_NODE DEBUG: {NWDC32@distconf_persistent_storage.cpp:221} TEvStorageConfigLoaded Cookie# 0 NumItemsRead# 0 2025-05-29T15:24:22.098825Z node 1 :BS_NODE DEBUG: {NWDC35@distconf_persistent_storage.cpp:184} PersistConfig Record# {} Drives# [] 2025-05-29T15:24:22.098925Z node 2 :BS_NODE DEBUG: {NWDC53@distconf.cpp:300} StateWaitForInit event Type# 2146435075 StorageConfigLoaded# true NodeListObtained# false PendingEvents.size# 0 2025-05-29T15:24:22.098944Z node 1 :BS_NODE DEBUG: {NWDC51@distconf_persistent_storage.cpp:103} TWriterActor bootstrap Drives# [] Record# {} 2025-05-29T15:24:22.099212Z node 2 :BS_NODE DEBUG: {NW18@node_warden_resource.cpp:51} ApplyServiceSet IsStatic# true Comprehensive# true Origin# distconf ServiceSet# {PDisks { NodeID: 1 PDiskID: 0 Path: "pdisk0.dat" PDiskGuid: 1 } VDisks { VDiskID { GroupID: 33554432 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } VDiskLocation { NodeID: 1 PDiskID: 0 VDiskSlotID: 0 PDiskGuid: 1 } } VDisks { VDiskID { GroupID: 33554432 GroupGeneration: 1 Ring: 0 Domain: 1 VDisk: 0 } VDiskLocation { NodeID: 1 PDiskID: 0 VDiskSlotID: 1 PDiskGuid: 1 } } VDisks { VDiskID { GroupID: 33554432 GroupGeneration: 1 Ring: 0 Domain: 2 VDisk: 0 } VDiskLocation { NodeID: 1 PDiskID: 0 VDiskSlotID: 2 PDiskGuid: 1 } } VDisks { VDiskID { GroupID: 33554432 GroupGeneration: 1 Ring: 0 Domain: 3 VDisk: 0 } VDiskLocation { NodeID: 1 PDiskID: 0 VDiskSlotID: 3 PDiskGuid: 1 } } Groups { GroupID: 33554432 GroupGeneration: 1 ErasureSpecies: 1 Rings { FailDomains { VDiskLocations { NodeID: 1 PDiskID: 0 VDiskSlotID: 0 PDiskGuid: 1 } } FailDomains { VDiskLocations { NodeID: 1 PDiskID: 0 VDiskSlotID: 1 PDiskGuid: 1 } } FailDomains { VDiskLocations { NodeID: 1 PDiskID: 0 VDiskSlotID: 2 PDiskGuid: 1 } } FailDomains { VDiskLocations { NodeID: 1 PDiskID: 0 VDiskSlotID: 3 PDiskGuid: 1 } } } } AvailabilityDomains: 1 } 2025-05-29T15:24:22.099247Z node 2 :BS_NODE DEBUG: {NWDC53@distconf.cpp:300} StateWaitForInit event Type# 268639248 StorageConfigLoaded# true NodeListObtained# false PendingEvents.size# 1 2025-05-29T15:24:22.099307Z node 1 :BS_NODE DEBUG: {NWDC53@distconf.cpp:300} StateWaitForInit event Type# 2146435075 StorageConfigLoaded# true NodeListObtained# false PendingEvents.size# 0 2025-05-29T15:24:22.099333Z node 2 :BS_NODE DEBUG: {NWDC53@distconf.cpp:300} StateWaitForInit event Type# 131082 StorageConfigLoaded# true NodeListObtained# false PendingEvents.size# 2 2025-05-29T15:24:22.099342Z node 2 :BS_NODE DEBUG: {NWDC11@distconf_binding.cpp:6} TEvNodesInfo 2025-05-29T15:24:22.099358Z node 2 :BS_NODE DEBUG: {NWDC18@distconf_binding.cpp:322} UpdateBound RefererNodeId# 2 NodeId# ::1:12002/2 Meta# {Fingerprint: "\037\333G\274~`\021\312\255\356\031b\206\336z\361\177u\211\202" } 2025-05-29T15:24:22.099373Z node 2 :BS_NODE DEBUG: {NWDC54@distconf_binding.cpp:237} SubscribeToPeerNode NodeId# 1 SessionId# [0:0:0] Inserted# true Subscription# {SessionId# [0:0:0] SubscriptionCookie# 0} NextSubscribeCookie# 1 2025-05-29T15:24:22.099381Z node 2 :BS_NODE DEBUG: {NWDC29@distconf_binding.cpp:80} Initiated bind NodeId# 1 Binding# {1.0/8684487695858997022@[0:0:0]} SessionId# [0:0:0] 2025-05-29T15:24:22.099457Z node 1 :BS_NODE DEBUG: {NWDC53@distconf.cpp:300} StateWaitForInit event Type# 131082 StorageConfigLoaded# true NodeListObtained# false PendingEvents.size# 1 2025-05-29T15:24:22.099462Z node 1 :BS_NODE DEBUG: {NWDC11@distconf_binding.cpp:6} TEvNodesInfo 2025-05-29T15:24:22.099473Z node 1 :BS_NODE DEBUG: {NWDC18@distconf_binding.cpp:322} UpdateBound RefererNodeId# 1 NodeId# ::1:12001/1 Meta# {Fingerprint: "b\004P\211\274\241J:Pg\324#8n\305\261.\356J\205" } 2025-05-29T15:24:22.099481Z node 1 :BS_NODE DEBUG: {NWDC54@distconf_binding.cpp:237} SubscribeToPeerNode NodeId# 2 SessionId# [0:0:0] Inserted# true Subscription# {SessionId# [0:0:0] SubscriptionCookie# 0} NextSubscribeCookie# 1 2025-05-29T15:24:22.099488Z node 1 :BS_NODE DEBUG: {NWDC29@distconf_binding.cpp:80} Initiated bind NodeId# 2 Binding# {2.0/13997 ... 1:0:0:0] QueueId# 7 IsConnected# true ExtraBlockChecksSupport# true CostModel# {SeekTimeUs# 8000 ReadSpeedBps# 127000000 WriteSpeedBps# 127000000 ReadBlockSize# 524288 WriteBlockSize# 524288 MinHugeBlobInBytes# 524257 GType# none}} Duration# 0.000000s Marker# DSP04 2025-05-29T15:24:22.660266Z node 1 :BS_PROXY INFO: dsproxy_state.cpp:183: Group# 2181038082 -> StateWork Marker# DSP11 2025-05-29T15:24:22.660271Z node 1 :BS_PROXY INFO: dsproxy_state.cpp:78: Group# 2181038082 SetStateWork Marker# DSP15 2025-05-29T15:24:22.660278Z node 1 :BS_PROXY NOTICE: dsproxy_state.cpp:290: EnsureMonitoring Group# 2181038082 IsLimitedKeyless# 0 Marker# DSP57 initialize full monitoring 2025-05-29T15:24:22.660399Z node 1 :BS_PROXY_PUT INFO: dsproxy_put.cpp:645: [d70ef3c23a1a2346] bootstrap ActorId# [1:600:2510] Group# 2181038082 BlobCount# 1 BlobIDs# [[1234:2:0:0:0:5:0]] HandleClass# TabletLog Tactic# Default RestartCounter# 0 Marker# BPP13 2025-05-29T15:24:22.660435Z node 1 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:42: [d70ef3c23a1a2346] Id# [1234:2:0:0:0:5:0] restore disk# 0 part# 0 situation# ESituation::Unknown Marker# BPG51 2025-05-29T15:24:22.660441Z node 1 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:65: [d70ef3c23a1a2346] restore Id# [1234:2:0:0:0:5:0] optimisticReplicas# 1 optimisticState# EBS_FULL Marker# BPG55 2025-05-29T15:24:22.660451Z node 1 :BS_PROXY_PUT DEBUG: dsproxy_strategy_base.cpp:324: [d70ef3c23a1a2346] partPlacement record partSituation# ESituation::Unknown to# 0 blob Id# [1234:2:0:0:0:5:1] Marker# BPG33 2025-05-29T15:24:22.660457Z node 1 :BS_PROXY_PUT DEBUG: dsproxy_strategy_base.cpp:345: [d70ef3c23a1a2346] Sending missing VPut part# 0 to# 0 blob Id# [1234:2:0:0:0:5:1] Marker# BPG32 2025-05-29T15:24:22.660484Z node 1 :BS_PROXY DEBUG: group_sessions.h:165: Send to queueActorId# [1:593:2503] NKikimr::TEvBlobStorage::TEvVPut# {ID# [1234:2:0:0:0:5:1] FDS# 5 HandleClass# TabletLog {MsgQoS ExtQueueId# PutTabletLog} DataSize# 0 Data# } cookie# 0 2025-05-29T15:24:22.665067Z node 1 :BS_PROXY_PUT DEBUG: dsproxy_put.cpp:260: [d70ef3c23a1a2346] received {EvVPutResult Status# OK ID# [1234:2:0:0:0:5:1] {MsgQoS MsgId# { SequenceId: 1 MsgId: 0 } Cost# 80039 ExtQueueId# PutTabletLog IntQueueId# IntPutLog CostSettings# { SeekTimeUs# 8000 ReadSpeedBps# 127000000 WriteSpeedBps# 127000000 ReadBlockSize# 524288 WriteBlockSize# 524288 MinHugeBlobInBytes# 524257} Window# { Status# Processed ActualWindowSize# 0 MaxWindowSize# 150000000 ExpectedMsgId# { SequenceId: 1 MsgId: 1 }}}} from# [82000002:1:0:0:0] Marker# BPP01 2025-05-29T15:24:22.665119Z node 1 :BS_PROXY_PUT DEBUG: dsproxy_put_impl.cpp:72: [d70ef3c23a1a2346] Result# TEvPutResult {Id# [1234:2:0:0:0:5:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0.999479} GroupId# 2181038082 Marker# BPP12 2025-05-29T15:24:22.665177Z node 1 :BS_PROXY_PUT INFO: dsproxy_put.cpp:486: [d70ef3c23a1a2346] SendReply putResult# TEvPutResult {Id# [1234:2:0:0:0:5:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0.999479} ResponsesSent# 0 PutImpl.Blobs.size# 1 Last# true Marker# BPP21 2025-05-29T15:24:22.665210Z node 1 :BS_PROXY_PUT DEBUG: {BPP72@dsproxy_put.cpp:470} Query history GroupId# 2181038082 HandleClass# TabletLog Tactic# Default History# THistory { Entries# [ TEvVPut{ TimestampMs# 0.119 sample PartId# [1234:2:0:0:0:5:1] QueryCount# 1 VDiskId# [82000002:1:0:0:0] NodeId# 1 } TEvVPutResult{ TimestampMs# 4.73 VDiskId# [82000002:1:0:0:0] NodeId# 1 Status# OK } ] } 2025-05-29T15:24:22.665309Z node 2 :BS_NODE DEBUG: {NW46@node_warden_proxy.cpp:130} HandleForwarded GroupId# 2181038082 EnableProxyMock# false NoGroup# false 2025-05-29T15:24:22.665322Z node 2 :BS_NODE DEBUG: {NW12@node_warden_proxy.cpp:23} StartLocalProxy GroupId# 2181038082 2025-05-29T15:24:22.665328Z node 2 :BS_NODE DEBUG: {NW98@node_warden_group.cpp:265} RequestGroupConfig GroupId# 2181038082 2025-05-29T15:24:22.665404Z node 2 :BS_NODE INFO: {NW79@node_warden_group_resolver.cpp:74} TGroupResolverActor::Bootstrap GroupId# 2181038082 2025-05-29T15:24:22.665419Z node 2 :BS_PROXY INFO: dsproxy_state.cpp:146: Group# 2181038082 TEvConfigureProxy received GroupGeneration# IsLimitedKeyless# false Marker# DSP02 2025-05-29T15:24:22.665424Z node 2 :BS_PROXY INFO: dsproxy_state.cpp:55: Group# 2181038082 SetStateUnconfigured Marker# DSP07 2025-05-29T15:24:22.665441Z node 2 :BS_PROXY DEBUG: dsproxy_impl.h:205: Group# 2181038082 HandleEnqueue# TEvCollectGarbage {TabletId# 1234 RecordGeneration# 4294967295 PerGenerationCounter# 4294967295 Channel# 0 Deadline# 18446744073709551 Collect# true CollectGeneration# 4294967295 CollectStep# 4294967295 Hard# true IsMultiCollectAllowed# 1 IsMonitored# 1} Marker# DSP17 2025-05-29T15:24:22.665547Z node 1 :BS_CONTROLLER DEBUG: {BSCTXGG02@get_group.cpp:58} TEvControllerGetGroup Sender# [2:30:2059] Cookie# 0 Recipient# [1:441:2379] RecipientRewrite# [1:400:2347] Request# {NodeID: 2 GroupIDs: 2181038082 } StopGivingGroups# false 2025-05-29T15:24:22.665574Z node 1 :BS_CONTROLLER DEBUG: {BSCTXGG01@get_group.cpp:22} Handle TEvControllerGetGroup Request# {NodeID: 2 GroupIDs: 2181038082 } 2025-05-29T15:24:22.665690Z node 2 :BS_NODE DEBUG: {NW17@node_warden_impl.cpp:800} Handle(TEvBlobStorage::TEvControllerNodeServiceSetUpdate) Msg# {Status: OK NodeID: 2 ServiceSet { Groups { GroupID: 2181038082 GroupGeneration: 1 ErasureSpecies: 0 Rings { FailDomains { VDiskLocations { NodeID: 1 PDiskID: 1000 VDiskSlotID: 1002 PDiskGuid: 14113799756207155617 } } } EncryptionMode: 1 LifeCyclePhase: 3 MainKeyId: "/home/runner/.ya/build/build_root/ciyv/001914/r3tmp/tmpnQLQ7C//key.txt" EncryptedGroupKey: "\277~Ji\370\366\340\206\314^\\Z\r\177\2026Mr\021/?\000A\311\301\355\300-\2469\001\367v\310\306\236" GroupKeyNonce: 2181038082 MainKeyVersion: 1 StoragePoolName: "test_storage" DeviceType: ROT } } } 2025-05-29T15:24:22.665715Z node 2 :BS_NODE DEBUG: {NW18@node_warden_resource.cpp:51} ApplyServiceSet IsStatic# false Comprehensive# false Origin# controller ServiceSet# {Groups { GroupID: 2181038082 GroupGeneration: 1 ErasureSpecies: 0 Rings { FailDomains { VDiskLocations { NodeID: 1 PDiskID: 1000 VDiskSlotID: 1002 PDiskGuid: 14113799756207155617 } } } EncryptionMode: 1 LifeCyclePhase: 3 MainKeyId: "/home/runner/.ya/build/build_root/ciyv/001914/r3tmp/tmpnQLQ7C//key.txt" EncryptedGroupKey: "\277~Ji\370\366\340\206\314^\\Z\r\177\2026Mr\021/?\000A\311\301\355\300-\2469\001\367v\310\306\236" GroupKeyNonce: 2181038082 MainKeyVersion: 1 StoragePoolName: "test_storage" DeviceType: ROT } } 2025-05-29T15:24:22.668368Z node 2 :BS_NODE ERROR: {NW19@node_warden_group.cpp:211} error while parsing group GroupId# 2181038082 Err# LifeCyclePhase# KEY_NOT_LOADED Key.Id# "" Key.Version# 0 MainKey.Id# "/home/runner/.ya/build/build_root/ciyv/001914/r3tmp/tmpnQLQ7C//key.txt" MainKey.Version# 1 GroupKeyNonce# 2181038082 2025-05-29T15:24:22.668672Z node 2 :BS_PROXY INFO: dsproxy_state.cpp:146: Group# 2181038082 TEvConfigureProxy received GroupGeneration# 1 IsLimitedKeyless# true Marker# DSP02 2025-05-29T15:24:22.668683Z node 2 :BS_PROXY NOTICE: dsproxy_state.cpp:294: EnsureMonitoring Group# 2181038082 IsLimitedKeyless# 1 fullIfPossible# 0 Marker# DSP58 2025-05-29T15:24:22.669043Z node 2 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 2181038082 Actor# [2:602:2105] Create Queue# [2:604:2106] targetNodeId# 1 Marker# DSP01 2025-05-29T15:24:22.669069Z node 2 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 2181038082 Actor# [2:602:2105] Create Queue# [2:605:2107] targetNodeId# 1 Marker# DSP01 2025-05-29T15:24:22.669091Z node 2 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 2181038082 Actor# [2:602:2105] Create Queue# [2:606:2108] targetNodeId# 1 Marker# DSP01 2025-05-29T15:24:22.669113Z node 2 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 2181038082 Actor# [2:602:2105] Create Queue# [2:607:2109] targetNodeId# 1 Marker# DSP01 2025-05-29T15:24:22.669136Z node 2 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 2181038082 Actor# [2:602:2105] Create Queue# [2:608:2110] targetNodeId# 1 Marker# DSP01 2025-05-29T15:24:22.669160Z node 2 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 2181038082 Actor# [2:602:2105] Create Queue# [2:609:2111] targetNodeId# 1 Marker# DSP01 2025-05-29T15:24:22.669182Z node 2 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 2181038082 Actor# [2:602:2105] Create Queue# [2:610:2112] targetNodeId# 1 Marker# DSP01 2025-05-29T15:24:22.669187Z node 2 :BS_PROXY INFO: dsproxy_state.cpp:29: Group# 2181038082 SetStateEstablishingSessions Marker# DSP03 2025-05-29T15:24:22.669311Z node 2 :BS_NODE INFO: {NW81@node_warden_group_resolver.cpp:270} TGroupResolverActor::PassAway GroupId# 2181038082 2025-05-29T15:24:22.671441Z node 2 :BS_PROXY DEBUG: dsproxy_state.cpp:209: Group# 2181038082 Handle TEvProxyQueueState# {VDiskId# [82000002:1:0:0:0] QueueId# 1 IsConnected# true ExtraBlockChecksSupport# true CostModel# {SeekTimeUs# 8000 ReadSpeedBps# 127000000 WriteSpeedBps# 127000000 ReadBlockSize# 524288 WriteBlockSize# 524288 MinHugeBlobInBytes# 524257 GType# none}} Duration# 0.000000s Marker# DSP04 2025-05-29T15:24:22.671481Z node 2 :BS_PROXY DEBUG: dsproxy_state.cpp:209: Group# 2181038082 Handle TEvProxyQueueState# {VDiskId# [82000002:1:0:0:0] QueueId# 2 IsConnected# true ExtraBlockChecksSupport# true CostModel# {SeekTimeUs# 8000 ReadSpeedBps# 127000000 WriteSpeedBps# 127000000 ReadBlockSize# 524288 WriteBlockSize# 524288 MinHugeBlobInBytes# 524257 GType# none}} Duration# 0.000000s Marker# DSP04 2025-05-29T15:24:22.671513Z node 2 :BS_PROXY DEBUG: dsproxy_state.cpp:209: Group# 2181038082 Handle TEvProxyQueueState# {VDiskId# [82000002:1:0:0:0] QueueId# 3 IsConnected# true ExtraBlockChecksSupport# true CostModel# {SeekTimeUs# 8000 ReadSpeedBps# 127000000 WriteSpeedBps# 127000000 ReadBlockSize# 524288 WriteBlockSize# 524288 MinHugeBlobInBytes# 524257 GType# none}} Duration# 0.000000s Marker# DSP04 2025-05-29T15:24:22.671526Z node 2 :BS_PROXY DEBUG: dsproxy_state.cpp:209: Group# 2181038082 Handle TEvProxyQueueState# {VDiskId# [82000002:1:0:0:0] QueueId# 4 IsConnected# true ExtraBlockChecksSupport# true CostModel# {SeekTimeUs# 8000 ReadSpeedBps# 127000000 WriteSpeedBps# 127000000 ReadBlockSize# 524288 WriteBlockSize# 524288 MinHugeBlobInBytes# 524257 GType# none}} Duration# 0.000000s Marker# DSP04 2025-05-29T15:24:22.671564Z node 2 :BS_PROXY DEBUG: dsproxy_state.cpp:209: Group# 2181038082 Handle TEvProxyQueueState# {VDiskId# [82000002:1:0:0:0] QueueId# 5 IsConnected# true ExtraBlockChecksSupport# true CostModel# {SeekTimeUs# 8000 ReadSpeedBps# 127000000 WriteSpeedBps# 127000000 ReadBlockSize# 524288 WriteBlockSize# 524288 MinHugeBlobInBytes# 524257 GType# none}} Duration# 0.000000s Marker# DSP04 2025-05-29T15:24:22.671576Z node 2 :BS_PROXY DEBUG: dsproxy_state.cpp:209: Group# 2181038082 Handle TEvProxyQueueState# {VDiskId# [82000002:1:0:0:0] QueueId# 6 IsConnected# true ExtraBlockChecksSupport# true CostModel# {SeekTimeUs# 8000 ReadSpeedBps# 127000000 WriteSpeedBps# 127000000 ReadBlockSize# 524288 WriteBlockSize# 524288 MinHugeBlobInBytes# 524257 GType# none}} Duration# 0.000000s Marker# DSP04 2025-05-29T15:24:22.671586Z node 2 :BS_PROXY DEBUG: dsproxy_state.cpp:209: Group# 2181038082 Handle TEvProxyQueueState# {VDiskId# [82000002:1:0:0:0] QueueId# 7 IsConnected# true ExtraBlockChecksSupport# true CostModel# {SeekTimeUs# 8000 ReadSpeedBps# 127000000 WriteSpeedBps# 127000000 ReadBlockSize# 524288 WriteBlockSize# 524288 MinHugeBlobInBytes# 524257 GType# none}} Duration# 0.000000s Marker# DSP04 2025-05-29T15:24:22.671633Z node 2 :BS_PROXY INFO: dsproxy_state.cpp:183: Group# 2181038082 -> StateWork Marker# DSP11 2025-05-29T15:24:22.671640Z node 2 :BS_PROXY INFO: dsproxy_state.cpp:78: Group# 2181038082 SetStateWork Marker# DSP15 2025-05-29T15:24:22.671685Z node 2 :BS_PROXY DEBUG: group_sessions.h:165: Send to queueActorId# [2:604:2106] NKikimr::TEvBlobStorage::TEvVCollectGarbage# {TEvVCollectGarbage for [tablet:gen:cnt:channel]=[1234:4294967295:4294967295:0] collect=[4294967295:4294967295] cookie# 0 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/nodewarden/ut/unittest >> TBlobStorageWardenTest::TestHttpMonPage [GOOD] Test command err: 2025-05-29T15:24:21.466348Z node 1 :BS_NODE DEBUG: {NW26@node_warden_impl.cpp:321} Bootstrap 2025-05-29T15:24:21.467058Z node 1 :BS_NODE DEBUG: {NW18@node_warden_resource.cpp:51} ApplyServiceSet IsStatic# true Comprehensive# false Origin# initial ServiceSet# {PDisks { NodeID: 1 PDiskID: 0 Path: "SectorMap:/home/runner/.ya/build/build_root/ciyv/00192a/r3tmp/tmprTX28p/pdisk_map" PDiskGuid: 1 } VDisks { VDiskID { GroupID: 33554432 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } VDiskLocation { NodeID: 1 PDiskID: 0 VDiskSlotID: 0 PDiskGuid: 1 } } VDisks { VDiskID { GroupID: 33554432 GroupGeneration: 1 Ring: 0 Domain: 1 VDisk: 0 } VDiskLocation { NodeID: 1 PDiskID: 0 VDiskSlotID: 1 PDiskGuid: 1 } } VDisks { VDiskID { GroupID: 33554432 GroupGeneration: 1 Ring: 0 Domain: 2 VDisk: 0 } VDiskLocation { NodeID: 1 PDiskID: 0 VDiskSlotID: 2 PDiskGuid: 1 } } VDisks { VDiskID { GroupID: 33554432 GroupGeneration: 1 Ring: 0 Domain: 3 VDisk: 0 } VDiskLocation { NodeID: 1 PDiskID: 0 VDiskSlotID: 3 PDiskGuid: 1 } } Groups { GroupID: 33554432 GroupGeneration: 1 ErasureSpecies: 1 Rings { FailDomains { VDiskLocations { NodeID: 1 PDiskID: 0 VDiskSlotID: 0 PDiskGuid: 1 } } FailDomains { VDiskLocations { NodeID: 1 PDiskID: 0 VDiskSlotID: 1 PDiskGuid: 1 } } FailDomains { VDiskLocations { NodeID: 1 PDiskID: 0 VDiskSlotID: 2 PDiskGuid: 1 } } FailDomains { VDiskLocations { NodeID: 1 PDiskID: 0 VDiskSlotID: 3 PDiskGuid: 1 } } } } AvailabilityDomains: 1 } 2025-05-29T15:24:21.467128Z node 1 :BS_NODE DEBUG: {NW04@node_warden_pdisk.cpp:196} StartLocalPDisk NodeId# 1 PDiskId# 0 Path# "SectorMap:/home/runner/.ya/build/build_root/ciyv/00192a/r3tmp/tmprTX28p/pdisk_map" PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} Temporary# false 2025-05-29T15:24:21.467431Z node 1 :BS_NODE WARN: {NW89@node_warden_pdisk.cpp:106} Can't write new MockDevicesConfig to file Path# /Berkanavt/kikimr/testing/mock_devices.txt 2025-05-29T15:24:21.467492Z node 1 :BS_NODE DEBUG: {NW23@node_warden_vdisk.cpp:67} StartLocalVDiskActor SlayInFlight# false VDiskId# [2000000:1:0:0:0] VSlotId# 1:0:0 PDiskGuid# 1 DonorMode# false PDiskRestartInFlight# false PDisksWaitingToStart# false 2025-05-29T15:24:21.467668Z node 1 :BS_NODE DEBUG: {NW24@node_warden_vdisk.cpp:265} StartLocalVDiskActor done VDiskId# [2000000:1:0:0:0] VSlotId# 1:0:0 PDiskGuid# 1 2025-05-29T15:24:21.467681Z node 1 :BS_NODE DEBUG: {NW23@node_warden_vdisk.cpp:67} StartLocalVDiskActor SlayInFlight# false VDiskId# [2000000:1:0:1:0] VSlotId# 1:0:1 PDiskGuid# 1 DonorMode# false PDiskRestartInFlight# false PDisksWaitingToStart# false 2025-05-29T15:24:21.467772Z node 1 :BS_NODE DEBUG: {NW24@node_warden_vdisk.cpp:265} StartLocalVDiskActor done VDiskId# [2000000:1:0:1:0] VSlotId# 1:0:1 PDiskGuid# 1 2025-05-29T15:24:21.467777Z node 1 :BS_NODE DEBUG: {NW23@node_warden_vdisk.cpp:67} StartLocalVDiskActor SlayInFlight# false VDiskId# [2000000:1:0:2:0] VSlotId# 1:0:2 PDiskGuid# 1 DonorMode# false PDiskRestartInFlight# false PDisksWaitingToStart# false 2025-05-29T15:24:21.467859Z node 1 :BS_NODE DEBUG: {NW24@node_warden_vdisk.cpp:265} StartLocalVDiskActor done VDiskId# [2000000:1:0:2:0] VSlotId# 1:0:2 PDiskGuid# 1 2025-05-29T15:24:21.467866Z node 1 :BS_NODE DEBUG: {NW23@node_warden_vdisk.cpp:67} StartLocalVDiskActor SlayInFlight# false VDiskId# [2000000:1:0:3:0] VSlotId# 1:0:3 PDiskGuid# 1 DonorMode# false PDiskRestartInFlight# false PDisksWaitingToStart# false 2025-05-29T15:24:21.467957Z node 1 :BS_NODE DEBUG: {NW24@node_warden_vdisk.cpp:265} StartLocalVDiskActor done VDiskId# [2000000:1:0:3:0] VSlotId# 1:0:3 PDiskGuid# 1 2025-05-29T15:24:21.467963Z node 1 :BS_NODE DEBUG: {NW12@node_warden_proxy.cpp:23} StartLocalProxy GroupId# 33554432 2025-05-29T15:24:21.468104Z node 1 :BS_NODE DEBUG: {NW21@node_warden_pipe.cpp:23} EstablishPipe AvailDomainId# 1 PipeClientId# [1:28:2075] ControllerId# 72057594037932033 2025-05-29T15:24:21.468107Z node 1 :BS_NODE DEBUG: {NW20@node_warden_pipe.cpp:72} SendRegisterNode 2025-05-29T15:24:21.468127Z node 1 :BS_NODE DEBUG: {NW11@node_warden_impl.cpp:296} StartInvalidGroupProxy GroupId# 4294967295 2025-05-29T15:24:21.468152Z node 1 :BS_NODE DEBUG: {NW62@node_warden_impl.cpp:308} StartRequestReportingThrottler 2025-05-29T15:24:21.472659Z node 1 :BS_NODE DEBUG: {NWDC00@distconf.cpp:20} Bootstrap 2025-05-29T15:24:21.472935Z node 1 :BS_NODE DEBUG: {NWDC40@distconf_persistent_storage.cpp:25} TReaderActor bootstrap Paths# [] 2025-05-29T15:24:21.472980Z node 1 :BS_NODE DEBUG: {NWDC53@distconf.cpp:300} StateWaitForInit event Type# 131082 StorageConfigLoaded# false NodeListObtained# false PendingEvents.size# 0 2025-05-29T15:24:21.472984Z node 1 :BS_NODE DEBUG: {NWDC11@distconf_binding.cpp:6} TEvNodesInfo 2025-05-29T15:24:21.478041Z node 1 :BS_NODE DEBUG: {NWDC53@distconf.cpp:300} StateWaitForInit event Type# 2146435074 StorageConfigLoaded# false NodeListObtained# false PendingEvents.size# 0 2025-05-29T15:24:21.478062Z node 1 :BS_NODE DEBUG: {NWDC32@distconf_persistent_storage.cpp:221} TEvStorageConfigLoaded Cookie# 0 NumItemsRead# 0 2025-05-29T15:24:21.478689Z node 1 :BS_NODE DEBUG: {NWDC35@distconf_persistent_storage.cpp:184} PersistConfig Record# {} Drives# [] 2025-05-29T15:24:21.479381Z node 1 :BS_NODE DEBUG: {NWDC51@distconf_persistent_storage.cpp:103} TWriterActor bootstrap Drives# [] Record# {} 2025-05-29T15:24:21.479493Z node 1 :BS_NODE DEBUG: {NWDC53@distconf.cpp:300} StateWaitForInit event Type# 2146435075 StorageConfigLoaded# true NodeListObtained# false PendingEvents.size# 0 2025-05-29T15:24:21.479833Z node 1 :BS_NODE DEBUG: {NWDC53@distconf.cpp:300} StateWaitForInit event Type# 131082 StorageConfigLoaded# true NodeListObtained# false PendingEvents.size# 1 2025-05-29T15:24:21.479839Z node 1 :BS_NODE DEBUG: {NWDC11@distconf_binding.cpp:6} TEvNodesInfo 2025-05-29T15:24:21.479860Z node 1 :BS_NODE DEBUG: {NWDC18@distconf_binding.cpp:322} UpdateBound RefererNodeId# 1 NodeId# ::1:12001/1 Meta# {Fingerprint: "#\333\325l\026f)x\212\236AG\272~,\255\'\3632p" } 2025-05-29T15:24:21.479886Z node 1 :BS_NODE DEBUG: {NWDC53@distconf.cpp:300} StateWaitForInit event Type# 2146435072 StorageConfigLoaded# true NodeListObtained# true PendingEvents.size# 1 2025-05-29T15:24:21.479893Z node 1 :BS_NODE DEBUG: {NWDC15@distconf.cpp:361} StateFunc Type# 2146435075 Sender# [1:72:2116] SessionId# [0:0:0] Cookie# 0 2025-05-29T15:24:21.479899Z node 1 :BS_NODE DEBUG: {NWDC36@distconf_persistent_storage.cpp:205} TEvStorageConfigStored NumOk# 0 NumError# 0 Passed# 0.001804s 2025-05-29T15:24:21.483038Z node 1 :BS_NODE DEBUG: {NW18@node_warden_resource.cpp:51} ApplyServiceSet IsStatic# true Comprehensive# true Origin# distconf ServiceSet# {PDisks { NodeID: 1 PDiskID: 0 Path: "SectorMap:/home/runner/.ya/build/build_root/ciyv/00192a/r3tmp/tmprTX28p/pdisk_map" PDiskGuid: 1 } VDisks { VDiskID { GroupID: 33554432 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } VDiskLocation { NodeID: 1 PDiskID: 0 VDiskSlotID: 0 PDiskGuid: 1 } } VDisks { VDiskID { GroupID: 33554432 GroupGeneration: 1 Ring: 0 Domain: 1 VDisk: 0 } VDiskLocation { NodeID: 1 PDiskID: 0 VDiskSlotID: 1 PDiskGuid: 1 } } VDisks { VDiskID { GroupID: 33554432 GroupGeneration: 1 Ring: 0 Domain: 2 VDisk: 0 } VDiskLocation { NodeID: 1 PDiskID: 0 VDiskSlotID: 2 PDiskGuid: 1 } } VDisks { VDiskID { GroupID: 33554432 GroupGeneration: 1 Ring: 0 Domain: 3 VDisk: 0 } VDiskLocation { NodeID: 1 PDiskID: 0 VDiskSlotID: 3 PDiskGuid: 1 } } Groups { GroupID: 33554432 GroupGeneration: 1 ErasureSpecies: 1 Rings { FailDomains { VDiskLocations { NodeID: 1 PDiskID: 0 VDiskSlotID: 0 PDiskGuid: 1 } } FailDomains { VDiskLocations { NodeID: 1 PDiskID: 0 VDiskSlotID: 1 PDiskGuid: 1 } } FailDomains { VDiskLocations { NodeID: 1 PDiskID: 0 VDiskSlotID: 2 PDiskGuid: 1 } } FailDomains { VDiskLocations { NodeID: 1 PDiskID: 0 VDiskSlotID: 3 PDiskGuid: 1 } } } } AvailabilityDomains: 1 } 2025-05-29T15:24:21.483097Z node 1 :BS_NODE DEBUG: {NWDC15@distconf.cpp:361} StateFunc Type# 268639248 Sender# [1:11:2058] SessionId# [0:0:0] Cookie# 0 2025-05-29T15:24:21.487613Z node 1 :BS_NODE DEBUG: {NW47@node_warden_impl.cpp:1141} Handle(TEvStatusUpdate) 2025-05-29T15:24:21.488106Z node 1 :BS_NODE DEBUG: {NW47@node_warden_impl.cpp:1141} Handle(TEvStatusUpdate) 2025-05-29T15:24:21.489104Z node 1 :BS_NODE DEBUG: {NW47@node_warden_impl.cpp:1141} Handle(TEvStatusUpdate) 2025-05-29T15:24:21.490221Z node 1 :BS_SYNCLOG WARN: blobstorage_synclog.cpp:177: PDiskId# 0 VDISK[2000000:_:0:0:0]: (33554432) Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [2000000:1:0:1:0] targetVDisk# [2000000:1:0:0:0] oldSyncState# [0 0] DbBirthLsn# 0 2025-05-29T15:24:21.490885Z node 1 :BS_NODE DEBUG: {NW47@node_warden_impl.cpp:1141} Handle(TEvStatusUpdate) 2025-05-29T15:24:21.490953Z node 1 :BS_NODE DEBUG: {NW47@node_warden_impl.cpp:1141} Handle(TEvStatusUpdate) 2025-05-29T15:24:21.491222Z node 1 :BS_NODE DEBUG: {NW47@node_warden_impl.cpp:1141} Handle(TEvStatusUpdate) 2025-05-29T15:24:21.491455Z node 1 :BS_NODE DEBUG: {NW47@node_warden_impl.cpp:1141} Handle(TEvStatusUpdate) 2025-05-29T15:24:21.491742Z node 1 :BS_SYNCLOG WARN: blobstorage_synclog.cpp:177: PDiskId# 0 VDISK[2000000:_:0:1:0]: (33554432) Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [2000000:1:0:2:0] targetVDisk# [2000000:1:0:1:0] oldSyncState# [0 0] DbBirthLsn# 0 2025-05-29T15:24:21.491981Z node 1 :BS_SYNCLOG WARN: blobstorage_synclog.cpp:177: PDiskId# 0 VDISK[2000000:_:0:0:0]: (33554432) Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [2000000:1:0:2:0] targetVDisk# [2000000:1:0:0:0] oldSyncState# [0 0] DbBirthLsn# 0 2025-05-29T15:24:21.492005Z node 1 :BS_NODE DEBUG: {NW47@node_warden_impl.cpp:1141} Handle(TEvStatusUpdate) 2025-05-29T15:24:21.492320Z node 1 :BS_SYNCLOG WARN: blobstorage_synclog.cpp:177: PDiskId# 0 VDISK[2000000:_:0:1:0]: (33554432) Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [2000000:1:0:3:0] targetVDisk# [2000000:1:0:1:0] oldSyncState# [0 0] DbBirthLsn# 0 2025-05-29T15:24:21.492625Z node 1 :BS_SYNCLOG WARN: blobstorage_synclog.cpp:177: PDiskId# 0 VDISK[2000000:_:0:2:0]: (33554432) Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [2000000:1:0:3:0] targetVDisk# [2000000:1:0:2:0] oldSyncState# [0 0] DbBirthLsn# 0 2025-05-29T15:24:21.493308Z node 1 :BS_SYNCLOG WARN: blobstorage_synclog.cpp:177: PDiskId# 0 VDISK[2000000:_:0:0:0]: (33554432) Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [2000000:1:0:3:0] targetVDisk# [2000000:1:0:0:0] oldSyncState# [0 0] DbBirthLsn# 0 2025-05-29T15:24:21.497696Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2057} StateInit event Type# 268828672 Event# NKikimr::TEvTablet::TEvBoot 2025-05-29T15:24:21.502971Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2057} StateInit event Type# 268828673 Event# NKikimr::TEvTablet::TEvRestored 2025-05-29T15:24:21.503095Z node 1 :BS_CONTROLLER DEBUG: {BSC22@console_interaction.cpp:14} Console interaction started 2025-05-29T15:24:21.503330Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2057} StateInit event Type# 268828684 Event# NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-05-29T15:24:21.503450Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2057} StateInit event Type# 268639244 Event# NKikimr::TEvNodeWardenStorageConfig 2025-05-29T15:24:21.503583Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2057} StateInit event Type# 131082 Event# NActors::TEvInterconnect::TEvNodesInfo 2025-05-29T15:24:21.503589Z node 1 :BS_CONTROLLER DEBUG: {BSC01@bsc.cpp:521} Handle TEvInterconnect::TEvNodesInfo 2025-05-29T15:24:21.503624Z node 1 :BS_CONTROLLER DEBUG: {BSCTXIS01@init_scheme.cpp:17} TTxInitScheme Execute 2025-05-29T15:24:21.508163Z node 1 :BS_CONTROLLER DEBUG: {BSCTXIS03@init_scheme.cpp:44} TTxInitScheme Complete 2025-05-29T15:24:21.508911Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM01@migrate.cpp:190} Execute tx 2025-05-29T15:24:21.508963Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM02@migrate.cpp:251} Complete tx IncompatibleData# false 2025-05-29T15:24:21.509020Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp ... skStatus { VDiskId { GroupID: 33554432 GroupGeneration: 1 Ring: 0 Domain: 2 VDisk: 0 } NodeId: 2 PDiskId: 0 VSlotId: 2 PDiskGuid: 1 Status: READY OnlyPhantomsRemain: false } } 2025-05-29T15:24:22.473114Z node 2 :BS_NODE DEBUG: {NW47@node_warden_impl.cpp:1141} Handle(TEvStatusUpdate) 2025-05-29T15:24:22.473139Z node 2 :BS_CONTROLLER DEBUG: {BSCTXUDM01@disk_metrics.cpp:68} Updating disk status Record# {VDiskStatus { VDiskId { GroupID: 33554432 GroupGeneration: 1 Ring: 0 Domain: 3 VDisk: 0 } NodeId: 2 PDiskId: 0 VSlotId: 3 PDiskGuid: 1 Status: READY OnlyPhantomsRemain: false } } 2025-05-29T15:24:22.473146Z node 2 :BS_NODE DEBUG: {NW47@node_warden_impl.cpp:1141} Handle(TEvStatusUpdate) 2025-05-29T15:24:22.473160Z node 2 :BS_CONTROLLER DEBUG: {BSCTXUDM01@disk_metrics.cpp:68} Updating disk status Record# {VDiskStatus { VDiskId { GroupID: 33554432 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } NodeId: 2 PDiskId: 0 VSlotId: 0 PDiskGuid: 1 Status: READY OnlyPhantomsRemain: false } } 2025-05-29T15:24:22.473175Z node 2 :BS_CONTROLLER DEBUG: {BSC11@scrub.cpp:214} Handle(TEvControllerScrubQuantumFinished) Msg# {VSlotId { NodeId: 2 PDiskId: 0 VSlotId: 1 } Success: true } 2025-05-29T15:24:22.473186Z node 2 :BS_CONTROLLER DEBUG: {BSC13@scrub.cpp:597} sending TEvControllerScrubStartQuantum Msg# NKikimrBlobStorage.TEvControllerScrubStartQuantum VSlotId { NodeId: 2 PDiskId: 0 VSlotId: 2 } 2025-05-29T15:24:22.473251Z node 2 :BS_CONTROLLER DEBUG: {BSC10@scrub.cpp:187} Handle(TEvControllerScrubQueryStartQuantum) Msg# {VSlotId { NodeId: 2 PDiskId: 0 VSlotId: 1 } } 2025-05-29T15:24:22.473263Z node 2 :BS_NODE DEBUG: {NW47@node_warden_impl.cpp:1141} Handle(TEvStatusUpdate) 2025-05-29T15:24:22.473385Z node 2 :BS_NODE DEBUG: {NW47@node_warden_impl.cpp:1141} Handle(TEvStatusUpdate) 2025-05-29T15:24:22.473402Z node 2 :BS_CONTROLLER DEBUG: {BSC11@scrub.cpp:214} Handle(TEvControllerScrubQuantumFinished) Msg# {VSlotId { NodeId: 2 PDiskId: 0 VSlotId: 2 } Success: true } 2025-05-29T15:24:22.473410Z node 2 :BS_CONTROLLER DEBUG: {BSC13@scrub.cpp:597} sending TEvControllerScrubStartQuantum Msg# NKikimrBlobStorage.TEvControllerScrubStartQuantum VSlotId { NodeId: 2 PDiskId: 0 VSlotId: 3 } 2025-05-29T15:24:22.473441Z node 2 :BS_NODE DEBUG: {NW47@node_warden_impl.cpp:1141} Handle(TEvStatusUpdate) 2025-05-29T15:24:22.473452Z node 2 :BS_CONTROLLER DEBUG: {BSC10@scrub.cpp:187} Handle(TEvControllerScrubQueryStartQuantum) Msg# {VSlotId { NodeId: 2 PDiskId: 0 VSlotId: 2 } } 2025-05-29T15:24:22.473493Z node 2 :BS_NODE DEBUG: {NW47@node_warden_impl.cpp:1141} Handle(TEvStatusUpdate) 2025-05-29T15:24:22.473622Z node 2 :BS_NODE DEBUG: {NW47@node_warden_impl.cpp:1141} Handle(TEvStatusUpdate) 2025-05-29T15:24:22.473646Z node 2 :BS_CONTROLLER DEBUG: {BSC11@scrub.cpp:214} Handle(TEvControllerScrubQuantumFinished) Msg# {VSlotId { NodeId: 2 PDiskId: 0 VSlotId: 3 } Success: true } 2025-05-29T15:24:22.473653Z node 2 :BS_CONTROLLER DEBUG: {BSC13@scrub.cpp:597} sending TEvControllerScrubStartQuantum Msg# NKikimrBlobStorage.TEvControllerScrubStartQuantum VSlotId { NodeId: 2 PDiskId: 0 VSlotId: 0 } 2025-05-29T15:24:22.473673Z node 2 :BS_NODE DEBUG: {NW47@node_warden_impl.cpp:1141} Handle(TEvStatusUpdate) 2025-05-29T15:24:22.473687Z node 2 :BS_CONTROLLER DEBUG: {BSC10@scrub.cpp:187} Handle(TEvControllerScrubQueryStartQuantum) Msg# {VSlotId { NodeId: 2 PDiskId: 0 VSlotId: 3 } } 2025-05-29T15:24:22.473730Z node 2 :BS_NODE DEBUG: {NW47@node_warden_impl.cpp:1141} Handle(TEvStatusUpdate) 2025-05-29T15:24:22.473835Z node 2 :BS_NODE DEBUG: {NW47@node_warden_impl.cpp:1141} Handle(TEvStatusUpdate) 2025-05-29T15:24:22.473851Z node 2 :BS_CONTROLLER DEBUG: {BSC11@scrub.cpp:214} Handle(TEvControllerScrubQuantumFinished) Msg# {VSlotId { NodeId: 2 PDiskId: 0 VSlotId: 0 } Success: true } 2025-05-29T15:24:22.473868Z node 2 :BS_NODE DEBUG: {NW47@node_warden_impl.cpp:1141} Handle(TEvStatusUpdate) 2025-05-29T15:24:22.473883Z node 2 :BS_CONTROLLER DEBUG: {BSC10@scrub.cpp:187} Handle(TEvControllerScrubQueryStartQuantum) Msg# {VSlotId { NodeId: 2 PDiskId: 0 VSlotId: 0 } } 2025-05-29T15:24:22.501651Z node 2 :BS_CONTROLLER DEBUG: {BSCTXRN05@register_node.cpp:34} Add devicesData from NodeWarden NodeId# 2 Devices# [] 2025-05-29T15:24:22.501766Z node 2 :BS_NODE DEBUG: {NW17@node_warden_impl.cpp:800} Handle(TEvBlobStorage::TEvControllerNodeServiceSetUpdate) Msg# {Status: OK NodeID: 2 ServiceSet { PDisks { NodeID: 2 PDiskID: 1000 Path: "/home/runner/.ya/build/build_root/ciyv/00192a/r3tmp/tmpWa5OZO/pdisk_1.dat" PDiskGuid: 9684823154808868943 PDiskCategory: 0 EntityStatus: CREATE ExpectedSerial: "" ManagementStage: DISCOVER_SERIAL SpaceColorBorder: GREEN } VDisks { VDiskID { GroupID: 2181038080 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } VDiskLocation { NodeID: 2 PDiskID: 1000 VDiskSlotID: 1000 PDiskGuid: 9684823154808868943 } VDiskKind: Default EntityStatus: CREATE StoragePoolName: "pool-1" } VDisks { VDiskID { GroupID: 2181038081 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } VDiskLocation { NodeID: 2 PDiskID: 1000 VDiskSlotID: 1001 PDiskGuid: 9684823154808868943 } VDiskKind: Default EntityStatus: CREATE StoragePoolName: "pool-2" } Groups { GroupID: 2181038080 GroupGeneration: 1 ErasureSpecies: 0 Rings { FailDomains { VDiskLocations { NodeID: 2 PDiskID: 1000 VDiskSlotID: 1000 PDiskGuid: 9684823154808868943 } } } EncryptionMode: 0 LifeCyclePhase: 0 MainKeyId: "" EncryptedGroupKey: "" GroupKeyNonce: 2181038080 MainKeyVersion: 0 StoragePoolName: "pool-1" DeviceType: ROT } Groups { GroupID: 2181038081 GroupGeneration: 1 ErasureSpecies: 0 Rings { FailDomains { VDiskLocations { NodeID: 2 PDiskID: 1000 VDiskSlotID: 1001 PDiskGuid: 9684823154808868943 } } } EncryptionMode: 0 LifeCyclePhase: 0 MainKeyId: "" EncryptedGroupKey: "" GroupKeyNonce: 2181038081 MainKeyVersion: 0 StoragePoolName: "pool-2" DeviceType: ROT } } InstanceId: "2d57b872-5ee34e82-2e2f2811-4c9302ac" AvailDomain: 1 } 2025-05-29T15:24:22.501807Z node 2 :BS_NODE DEBUG: {NW18@node_warden_resource.cpp:51} ApplyServiceSet IsStatic# false Comprehensive# false Origin# controller ServiceSet# {PDisks { NodeID: 2 PDiskID: 1000 Path: "/home/runner/.ya/build/build_root/ciyv/00192a/r3tmp/tmpWa5OZO/pdisk_1.dat" PDiskGuid: 9684823154808868943 PDiskCategory: 0 EntityStatus: CREATE ExpectedSerial: "" ManagementStage: DISCOVER_SERIAL SpaceColorBorder: GREEN } VDisks { VDiskID { GroupID: 2181038080 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } VDiskLocation { NodeID: 2 PDiskID: 1000 VDiskSlotID: 1000 PDiskGuid: 9684823154808868943 } VDiskKind: Default EntityStatus: CREATE StoragePoolName: "pool-1" } VDisks { VDiskID { GroupID: 2181038081 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } VDiskLocation { NodeID: 2 PDiskID: 1000 VDiskSlotID: 1001 PDiskGuid: 9684823154808868943 } VDiskKind: Default EntityStatus: CREATE StoragePoolName: "pool-2" } Groups { GroupID: 2181038080 GroupGeneration: 1 ErasureSpecies: 0 Rings { FailDomains { VDiskLocations { NodeID: 2 PDiskID: 1000 VDiskSlotID: 1000 PDiskGuid: 9684823154808868943 } } } EncryptionMode: 0 LifeCyclePhase: 0 MainKeyId: "" EncryptedGroupKey: "" GroupKeyNonce: 2181038080 MainKeyVersion: 0 StoragePoolName: "pool-1" DeviceType: ROT } Groups { GroupID: 2181038081 GroupGeneration: 1 ErasureSpecies: 0 Rings { FailDomains { VDiskLocations { NodeID: 2 PDiskID: 1000 VDiskSlotID: 1001 PDiskGuid: 9684823154808868943 } } } EncryptionMode: 0 LifeCyclePhase: 0 MainKeyId: "" EncryptedGroupKey: "" GroupKeyNonce: 2181038081 MainKeyVersion: 0 StoragePoolName: "pool-2" DeviceType: ROT } } 2025-05-29T15:24:22.501853Z node 2 :BS_NODE DEBUG: {NW04@node_warden_pdisk.cpp:196} StartLocalPDisk NodeId# 2 PDiskId# 1000 Path# "/home/runner/.ya/build/build_root/ciyv/00192a/r3tmp/tmpWa5OZO/pdisk_1.dat" PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} Temporary# false 2025-05-29T15:24:22.502026Z node 2 :BS_NODE DEBUG: {NW23@node_warden_vdisk.cpp:67} StartLocalVDiskActor SlayInFlight# false VDiskId# [82000000:1:0:0:0] VSlotId# 2:1000:1000 PDiskGuid# 9684823154808868943 DonorMode# false PDiskRestartInFlight# false PDisksWaitingToStart# false 2025-05-29T15:24:22.502154Z node 2 :BS_NODE DEBUG: {NW24@node_warden_vdisk.cpp:265} StartLocalVDiskActor done VDiskId# [82000000:1:0:0:0] VSlotId# 2:1000:1000 PDiskGuid# 9684823154808868943 2025-05-29T15:24:22.502168Z node 2 :BS_NODE DEBUG: {NW23@node_warden_vdisk.cpp:67} StartLocalVDiskActor SlayInFlight# false VDiskId# [82000001:1:0:0:0] VSlotId# 2:1000:1001 PDiskGuid# 9684823154808868943 DonorMode# false PDiskRestartInFlight# false PDisksWaitingToStart# false 2025-05-29T15:24:22.502255Z node 2 :BS_NODE DEBUG: {NW24@node_warden_vdisk.cpp:265} StartLocalVDiskActor done VDiskId# [82000001:1:0:0:0] VSlotId# 2:1000:1001 PDiskGuid# 9684823154808868943 2025-05-29T15:24:22.779946Z node 2 :BS_CONTROLLER DEBUG: {BSCTXUDM01@disk_metrics.cpp:68} Updating disk status Record# {VDiskStatus { VDiskId { GroupID: 2181038080 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } NodeId: 2 PDiskId: 1000 VSlotId: 1000 PDiskGuid: 9684823154808868943 Status: INIT_PENDING OnlyPhantomsRemain: false } VDiskStatus { VDiskId { GroupID: 2181038081 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } NodeId: 2 PDiskId: 1000 VSlotId: 1001 PDiskGuid: 9684823154808868943 Status: INIT_PENDING OnlyPhantomsRemain: false } } 2025-05-29T15:24:22.781419Z node 2 :BS_CONTROLLER DEBUG: {BSCTXUDM01@disk_metrics.cpp:68} Updating disk status Record# {VDisksMetrics { VDiskId { GroupID: 2181038080 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } VSlotId { NodeId: 2 PDiskId: 1000 VSlotId: 1000 } State: Initial Replicated: false DiskSpace: Green } } 2025-05-29T15:24:22.781470Z node 2 :BS_CONTROLLER DEBUG: {BSCTXUDM01@disk_metrics.cpp:68} Updating disk status Record# {VDisksMetrics { VDiskId { GroupID: 2181038081 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } VSlotId { NodeId: 2 PDiskId: 1000 VSlotId: 1001 } State: Initial Replicated: false DiskSpace: Green } } 2025-05-29T15:24:22.787224Z node 2 :BS_CONTROLLER DEBUG: {BSC10@scrub.cpp:187} Handle(TEvControllerScrubQueryStartQuantum) Msg# {VSlotId { NodeId: 2 PDiskId: 1000 VSlotId: 1000 } } 2025-05-29T15:24:22.787266Z node 2 :BS_CONTROLLER DEBUG: {BSC10@scrub.cpp:187} Handle(TEvControllerScrubQueryStartQuantum) Msg# {VSlotId { NodeId: 2 PDiskId: 1000 VSlotId: 1001 } } 2025-05-29T15:24:22.789943Z node 2 :BS_NODE DEBUG: {NW47@node_warden_impl.cpp:1141} Handle(TEvStatusUpdate) 2025-05-29T15:24:22.790036Z node 2 :BS_NODE DEBUG: {NW47@node_warden_impl.cpp:1141} Handle(TEvStatusUpdate) 2025-05-29T15:24:22.790208Z node 2 :BS_CONTROLLER DEBUG: {BSCTXUDM01@disk_metrics.cpp:68} Updating disk status Record# {VDiskStatus { VDiskId { GroupID: 2181038080 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } NodeId: 2 PDiskId: 1000 VSlotId: 1000 PDiskGuid: 9684823154808868943 Status: REPLICATING OnlyPhantomsRemain: false } } 2025-05-29T15:24:22.790391Z node 2 :BS_CONTROLLER DEBUG: {BSCTXUDM01@disk_metrics.cpp:68} Updating disk status Record# {VDiskStatus { VDiskId { GroupID: 2181038081 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } NodeId: 2 PDiskId: 1000 VSlotId: 1001 PDiskGuid: 9684823154808868943 Status: REPLICATING OnlyPhantomsRemain: false } } 2025-05-29T15:24:22.790450Z node 2 :BS_NODE DEBUG: {NW47@node_warden_impl.cpp:1141} Handle(TEvStatusUpdate) 2025-05-29T15:24:22.790608Z node 2 :BS_NODE DEBUG: {NW47@node_warden_impl.cpp:1141} Handle(TEvStatusUpdate) 2025-05-29T15:24:22.790653Z node 2 :BS_CONTROLLER DEBUG: {BSCTXUDM01@disk_metrics.cpp:68} Updating disk status Record# {VDiskStatus { VDiskId { GroupID: 2181038080 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } NodeId: 2 PDiskId: 1000 VSlotId: 1000 PDiskGuid: 9684823154808868943 Status: READY OnlyPhantomsRemain: false } } 2025-05-29T15:24:22.790694Z node 2 :BS_CONTROLLER DEBUG: {BSCTXUDM01@disk_metrics.cpp:68} Updating disk status Record# {VDiskStatus { VDiskId { GroupID: 2181038081 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } NodeId: 2 PDiskId: 1000 VSlotId: 1001 PDiskGuid: 9684823154808868943 Status: READY OnlyPhantomsRemain: false } } >> TConsoleInMemoryConfigSubscriptionTests::TestSubscriptionClientDeadCausesSubscriptionDeregistration [GOOD] >> TConsoleInMemoryConfigSubscriptionTests::TestSubscriptionClientReconnectsOnConnectionLoose >> TConsoleConfigHelpersTests::TestConfigSubscriberAutoTenantMultipleTenants [GOOD] >> TConsoleConfigHelpersTests::TestConfigSubscriberAutoTenantDomain >> TCmsTenatsTest::TestTenantLimit >> TConsoleConfigSubscriptionTests::TestNotificationForNewConfigItem [GOOD] >> TConsoleConfigSubscriptionTests::TestNotificationForModifiedConfigItem >> TConsoleInMemoryConfigSubscriptionTests::TestNoYamlWithoutFlag [GOOD] >> TConsoleInMemoryConfigSubscriptionTests::TestConsoleRestart >> KqpPg::AlterColumnSetDefaultFromSequence [FAIL] >> KqpPg::CreateTableIfNotExists_GenericQuery >> TBlobStorageWardenTest::TestGivenPDiskFormatedWithGuid1AndCreatedWithGuid2WhenYardInitThenError [GOOD] >> TCmsTest::ManagePermissions >> TSchemeShardUserAttrsTest::SetAttrs ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/datashard/ut_read_iterator/unittest >> DataShardReadIterator::ShouldReturnBrokenLockWhenReadKeyWithContinueInvisibleRowSkips-EvWrite [FAIL] Test command err: 2025-05-29T15:24:00.309052Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:102:2148], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-05-29T15:24:00.309080Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-05-29T15:24:00.309089Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/00158b/r3tmp/tmp9tlJRJ/pdisk_1.dat 2025-05-29T15:24:00.407422Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-05-29T15:24:00.419901Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:24:00.422481Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [1:32:2079] 1748532240010388 != 1748532240010392 2025-05-29T15:24:00.463915Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:24:00.463950Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:24:00.474460Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-29T15:24:00.546946Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-05-29T15:24:00.561371Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3097: StateInit, received event# 268828672, Sender [1:656:2563], Recipient [1:664:2569]: NKikimr::TEvTablet::TEvBoot 2025-05-29T15:24:00.561551Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3097: StateInit, received event# 268828673, Sender [1:656:2563], Recipient [1:664:2569]: NKikimr::TEvTablet::TEvRestored 2025-05-29T15:24:00.561620Z node 1 :TX_DATASHARD INFO: datashard.cpp:373: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:664:2569] 2025-05-29T15:24:00.561675Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:630: TxInitSchema.Execute 2025-05-29T15:24:00.567628Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3110: StateInactive, received event# 268828684, Sender [1:656:2563], Recipient [1:664:2569]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-05-29T15:24:00.567723Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:696: TxInitSchema.Complete 2025-05-29T15:24:00.567744Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:48: TDataShard::TTxInit::Execute 2025-05-29T15:24:00.567851Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1315: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-05-29T15:24:00.567857Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1371: LoadLockChangeRecords at tablet: 72075186224037888 2025-05-29T15:24:00.567860Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1420: LoadChangeRecordCommits at tablet: 72075186224037888 2025-05-29T15:24:00.567894Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:92: TDataShard::TTxInit::Complete 2025-05-29T15:24:00.567904Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:100: TDataShard::TTxInitRestored::Execute 2025-05-29T15:24:00.567912Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:111: DataShard 72075186224037888 persisting started state actor id [1:681:2569] in generation 1 2025-05-29T15:24:00.578122Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:120: TDataShard::TTxInitRestored::Complete 2025-05-29T15:24:00.580870Z node 1 :TX_DATASHARD INFO: datashard.cpp:417: Switched to work state WaitScheme tabletId 72075186224037888 2025-05-29T15:24:00.580910Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:457: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-05-29T15:24:00.580933Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1250: Change sender created: at tablet: 72075186224037888, actorId: [1:683:2579] 2025-05-29T15:24:00.580937Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1255: Trying to activate change sender: at tablet: 72075186224037888 2025-05-29T15:24:00.580940Z node 1 :TX_DATASHARD INFO: datashard.cpp:1272: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-05-29T15:24:00.580943Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-05-29T15:24:00.580980Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3129: StateWork, received event# 2146435072, Sender [1:664:2569], Recipient [1:664:2569]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-05-29T15:24:00.580985Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3154: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-05-29T15:24:00.581046Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 72075186224037888 2025-05-29T15:24:00.581063Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-05-29T15:24:00.581072Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2025-05-29T15:24:00.581077Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-05-29T15:24:00.581080Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:385: Check unit PlanQueue at 72075186224037888 2025-05-29T15:24:00.581084Z node 1 :TX_DATASHARD TRACE: plan_queue_unit.cpp:59: TPlanQueueUnit at 72075186224037888 has no attached operations 2025-05-29T15:24:00.581087Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:403: Unit PlanQueue has no ready operations at 72075186224037888 2025-05-29T15:24:00.581089Z node 1 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 72075186224037888 TxInFly 0 2025-05-29T15:24:00.581093Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2025-05-29T15:24:00.581162Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3129: StateWork, received event# 269877761, Sender [1:674:2574], Recipient [1:664:2569]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-05-29T15:24:00.581167Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3165: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-05-29T15:24:00.581172Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3710: Server connected at leader tablet# 72075186224037888, clientId# [1:662:2567], serverId# [1:674:2574], sessionId# [0:0:0] 2025-05-29T15:24:00.581177Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3129: StateWork, received event# 269549568, Sender [1:411:2405], Recipient [1:674:2574] 2025-05-29T15:24:00.581180Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3135: StateWork, processing event TEvDataShard::TEvProposeTransaction 2025-05-29T15:24:00.581193Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037888 2025-05-29T15:24:00.581239Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1827: Trying to execute [0:281474976715657] at 72075186224037888 on unit CheckSchemeTx 2025-05-29T15:24:00.581249Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:132: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-05-29T15:24:00.581264Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:220: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2025-05-29T15:24:00.581271Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1862: Execution status for [0:281474976715657] at 72075186224037888 is ExecutedNoMoreRestarts 2025-05-29T15:24:00.581274Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1910: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit CheckSchemeTx 2025-05-29T15:24:00.581277Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1916: Add [0:281474976715657] at 72075186224037888 to execution unit StoreSchemeTx 2025-05-29T15:24:00.581280Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1827: Trying to execute [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2025-05-29T15:24:00.581309Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1862: Execution status for [0:281474976715657] at 72075186224037888 is DelayCompleteNoMoreRestarts 2025-05-29T15:24:00.581312Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1910: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit StoreSchemeTx 2025-05-29T15:24:00.581315Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1916: Add [0:281474976715657] at 72075186224037888 to execution unit FinishPropose 2025-05-29T15:24:00.581317Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1827: Trying to execute [0:281474976715657] at 72075186224037888 on unit FinishPropose 2025-05-29T15:24:00.581323Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1862: Execution status for [0:281474976715657] at 72075186224037888 is DelayComplete 2025-05-29T15:24:00.581326Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1910: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit FinishPropose 2025-05-29T15:24:00.581328Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1916: Add [0:281474976715657] at 72075186224037888 to execution unit WaitForPlan 2025-05-29T15:24:00.581330Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1827: Trying to execute [0:281474976715657] at 72075186224037888 on unit WaitForPlan 2025-05-29T15:24:00.581334Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1832: Operation [0:281474976715657] at 72075186224037888 is not ready to execute on unit WaitForPlan 2025-05-29T15:24:00.581492Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3129: StateWork, received event# 269746185, Sender [1:684:2580], Recipient [1:664:2569]: NKikimr::TEvTxProxySchemeCache::TEvWatchNotifyUpdated 2025-05-29T15:24:00.581497Z node 1 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:129: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-05-29T15:24:00.591721Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 72075186224037888 2025-05-29T15:24:00.591740Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1933: Complete execution for [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2025-05-29T15:24:00.591745Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1933: Complete execution for [0:281474976715657] at 72075186224037888 on unit FinishPropose 2025-05-29T15:24:00.591755Z node 1 :TX_DATASHARD TRACE: finish_propose_unit.cpp:167: Propose transaction complete txid 281474976715657 at tablet 72075186224037888 send to client, exec latency: 0 ms, propose latency: 0 ms, status: PREPARED 2025-05-29T15:24:00.591766Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:467: 72075186224037888 not sending time cast registration request in state WaitScheme 2025-05-29T15:24:00.734236Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3129: StateWork, received event# 269877761, Sender [1:699:2589], Recipient [1:664:2569]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-05-29T15:24:00.734256Z node 1 :TX_DATASHARD TRACE: datashard_impl. ... TASHARD TRACE: datashard_pipeline.cpp:1862: Execution status for [1000:281474976715657] at 72075186224037888 is DelayComplete 2025-05-29T15:24:18.079849Z node 15 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1910: Advance execution plan for [1000:281474976715657] at 72075186224037888 executing on unit CompleteOperation 2025-05-29T15:24:18.079853Z node 15 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1916: Add [1000:281474976715657] at 72075186224037888 to execution unit CompletedOperations 2025-05-29T15:24:18.079858Z node 15 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1827: Trying to execute [1000:281474976715657] at 72075186224037888 on unit CompletedOperations 2025-05-29T15:24:18.079864Z node 15 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1862: Execution status for [1000:281474976715657] at 72075186224037888 is Executed 2025-05-29T15:24:18.079868Z node 15 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1910: Advance execution plan for [1000:281474976715657] at 72075186224037888 executing on unit CompletedOperations 2025-05-29T15:24:18.079873Z node 15 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1922: Execution plan for [1000:281474976715657] at 72075186224037888 has finished 2025-05-29T15:24:18.079878Z node 15 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-05-29T15:24:18.079882Z node 15 :TX_DATASHARD TRACE: datashard_pipeline.cpp:327: Check candidate unit PlanQueue at 72075186224037888 2025-05-29T15:24:18.079886Z node 15 :TX_DATASHARD TRACE: plan_queue_unit.cpp:59: TPlanQueueUnit at 72075186224037888 has no attached operations 2025-05-29T15:24:18.079890Z node 15 :TX_DATASHARD TRACE: datashard_pipeline.cpp:341: Unit PlanQueue has no ready operations at 72075186224037888 2025-05-29T15:24:18.080426Z node 15 :TX_DATASHARD TRACE: datashard_impl.h:3129: StateWork, received event# 270270976, Sender [15:24:2071], Recipient [15:664:2568]: {TEvRegisterTabletResult TabletId# 72075186224037888 Entry# 0} 2025-05-29T15:24:18.080439Z node 15 :TX_DATASHARD TRACE: datashard_impl.h:3167: StateWork, processing event TEvMediatorTimecast::TEvRegisterTabletResult 2025-05-29T15:24:18.080444Z node 15 :TX_DATASHARD DEBUG: datashard.cpp:3742: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037888 time 0 2025-05-29T15:24:18.080449Z node 15 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-05-29T15:24:18.080568Z node 15 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:98: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 1000} 2025-05-29T15:24:18.080581Z node 15 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2025-05-29T15:24:18.080816Z node 15 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2025-05-29T15:24:18.080827Z node 15 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1933: Complete execution for [1000:281474976715657] at 72075186224037888 on unit CreateTable 2025-05-29T15:24:18.080832Z node 15 :TX_DATASHARD DEBUG: datashard.cpp:1255: Trying to activate change sender: at tablet: 72075186224037888 2025-05-29T15:24:18.080860Z node 15 :TX_DATASHARD INFO: datashard.cpp:1293: Change sender activated: at tablet: 72075186224037888 2025-05-29T15:24:18.080868Z node 15 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1933: Complete execution for [1000:281474976715657] at 72075186224037888 on unit CompleteOperation 2025-05-29T15:24:18.080885Z node 15 :TX_DATASHARD DEBUG: datashard.cpp:801: Complete [1000 : 281474976715657] from 72075186224037888 at tablet 72075186224037888 send result to client [15:410:2404], exec latency: 0 ms, propose latency: 0 ms 2025-05-29T15:24:18.080898Z node 15 :TX_DATASHARD INFO: datashard.cpp:1590: 72075186224037888 Sending notify to schemeshard 72057594046644480 txId 281474976715657 state Ready TxInFly 0 2025-05-29T15:24:18.080909Z node 15 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-05-29T15:24:18.081169Z node 15 :TX_DATASHARD TRACE: datashard_impl.h:3129: StateWork, received event# 269746185, Sender [15:683:2579], Recipient [15:664:2568]: NKikimr::TEvTxProxySchemeCache::TEvWatchNotifyUpdated 2025-05-29T15:24:18.081181Z node 15 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:129: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-05-29T15:24:18.081402Z node 15 :TX_DATASHARD TRACE: datashard_impl.h:3129: StateWork, received event# 269877760, Sender [15:707:2595], Recipient [15:664:2568]: NKikimr::TEvTabletPipe::TEvClientConnected { TabletId: 72057594046644480 Status: OK ServerId: [15:711:2599] Leader: 1 Dead: 0 Generation: 2 VersionInfo: } 2025-05-29T15:24:18.081410Z node 15 :TX_DATASHARD TRACE: datashard_impl.h:3162: StateWork, processing event TEvTabletPipe::TEvClientConnected 2025-05-29T15:24:18.081678Z node 15 :TX_DATASHARD TRACE: datashard_impl.h:3129: StateWork, received event# 269552132, Sender [15:410:2404], Recipient [15:664:2568]: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 281474976715657 2025-05-29T15:24:18.081687Z node 15 :TX_DATASHARD TRACE: datashard_impl.h:3133: StateWork, processing event TEvDataShard::TEvSchemaChangedResult 2025-05-29T15:24:18.081695Z node 15 :TX_DATASHARD DEBUG: datashard.cpp:2953: Handle TEvSchemaChangedResult 281474976715657 datashard 72075186224037888 state Ready 2025-05-29T15:24:18.081703Z node 15 :TX_DATASHARD DEBUG: datashard__schema_changed.cpp:22: 72075186224037888 Got TEvSchemaChangedResult from SS at 72075186224037888 2025-05-29T15:24:18.081802Z node 15 :TX_DATASHARD TRACE: datashard_impl.h:3129: StateWork, received event# 270270978, Sender [15:24:2071], Recipient [15:664:2568]: NKikimr::TEvMediatorTimecast::TEvSubscribeReadStepResult{ CoordinatorId# 72057594046316545 LastReadStep# 0 NextReadStep# 1000 ReadStep# 1000 } 2025-05-29T15:24:18.081808Z node 15 :TX_DATASHARD TRACE: datashard_impl.h:3168: StateWork, processing event TEvMediatorTimecast::TEvSubscribeReadStepResult 2025-05-29T15:24:18.081814Z node 15 :TX_DATASHARD DEBUG: datashard.cpp:3760: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037888 coordinator 72057594046316545 last step 0 next step 1000 2025-05-29T15:24:18.126260Z node 15 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [15:731:2613], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:24:18.126291Z node 15 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [15:740:2618], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:24:18.126302Z node 15 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:24:18.147951Z node 15 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2025-05-29T15:24:18.149334Z node 15 :TX_DATASHARD TRACE: datashard_impl.h:3129: StateWork, received event# 269746185, Sender [15:683:2579], Recipient [15:664:2568]: NKikimr::TEvTxProxySchemeCache::TEvWatchNotifyUpdated 2025-05-29T15:24:18.149361Z node 15 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:129: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-05-29T15:24:18.175151Z node 15 :TX_DATASHARD TRACE: datashard_impl.h:3129: StateWork, received event# 269877763, Sender [15:707:2595], Recipient [15:664:2568]: NKikimr::TEvTabletPipe::TEvClientDestroyed { TabletId: 72057594046644480 ClientId: [15:707:2595] ServerId: [15:711:2599] } 2025-05-29T15:24:18.175185Z node 15 :TX_DATASHARD TRACE: datashard_impl.h:3163: StateWork, processing event TEvTabletPipe::TEvClientDestroyed 2025-05-29T15:24:18.311039Z node 15 :TX_DATASHARD TRACE: datashard_impl.h:3129: StateWork, received event# 269746185, Sender [15:683:2579], Recipient [15:664:2568]: NKikimr::TEvTxProxySchemeCache::TEvWatchNotifyUpdated 2025-05-29T15:24:18.311064Z node 15 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:129: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-05-29T15:24:18.311637Z node 15 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [15:745:2621], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-05-29T15:24:18.344237Z node 15 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [15:815:2660] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-29T15:24:18.405451Z node 15 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [15:825:2669], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:24:18.406367Z node 15 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=15&id=OGZmZWQwZjItNDdkYzdmZWEtYjIzYWRiZDMtM2Q5NzE0N2M=, ActorId: [15:729:2611], ActorState: ExecuteState, TraceId: 01jwea9sad978e3czdjcfhrk2f, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: assertion failed at ydb/core/tx/datashard/ut_common/datashard_ut_common.cpp:2091, void NKikimr::ExecSQL(Tests::TServer::TPtr, TActorId, const TString &, bool, Ydb::StatusIds::StatusCode): (response.GetYdbStatus() == code) failed: (INTERNAL_ERROR != SUCCESS) Response { QueryIssues { message: "Execution" issue_code: 1060 issues { message: "yql/essentials/ast/yql_expr.h:1874: index out of range" issue_code: 1 } } TxMeta { } } YdbStatus: INTERNAL_ERROR ConsumedRu: 1 , with diff: (INT|SUCC)E(RNAL_ERROR|SS) TBackTrace::Capture()+28 (0x13C95FEC) NUnitTest::NPrivate::RaiseError(char const*, TBasicString> const&, bool)+137 (0x13E49919) NKikimr::ExecSQL(TIntrusivePtr>, NActors::TActorId, TBasicString> const&, bool, Ydb::StatusIds_StatusCode)+1300 (0x264C9A24) ??+0 (0x13ADB793) ??+0 (0x13A2F9D1) NKikimr::NTestSuiteDataShardReadIterator::TTestCaseShouldReturnBrokenLockWhenReadKeyWithContinueInvisibleRowSkips::Execute_(NUnitTest::TTestContext&)+34 (0x13B42012) NKikimr::NTestSuiteDataShardReadIterator::TCurrentTest::Execute()::'lambda'()::operator()() const+71 (0x13AD4527) NUnitTest::TTestBase::Run(std::__y1::function, TBasicString> const&, char const*, bool)+126 (0x13E4B7CE) NKikimr::NTestSuiteDataShardReadIterator::TCurrentTest::Execute()+433 (0x13AD3D81) NUnitTest::TTestFactory::Execute()+803 (0x13E4BF43) NUnitTest::RunMain(int, char**)+3021 (0x13E5DAED) ??+0 (0x7FF63394DD90) __libc_start_main+128 (0x7FF63394DE40) _start+41 (0x12A96029) >> TJaegerTracingConfiguratorTests::ThrottlingByDb [GOOD] >> TJaegerTracingConfiguratorTests::SamplingByDb ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/datashard/ut_read_iterator/unittest >> DataShardReadIterator::ShouldReadFromHeadToMvccWithConflict-UseSink [FAIL] Test command err: 2025-05-29T15:24:00.150177Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:102:2148], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-05-29T15:24:00.150203Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-05-29T15:24:00.150215Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/0015db/r3tmp/tmpYjaNA3/pdisk_1.dat 2025-05-29T15:24:00.243438Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-05-29T15:24:00.256745Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:24:00.260080Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [1:32:2079] 1748532239860323 != 1748532239860327 2025-05-29T15:24:00.301822Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:24:00.301847Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:24:00.312350Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-29T15:24:00.385129Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-05-29T15:24:00.400598Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3097: StateInit, received event# 268828672, Sender [1:656:2563], Recipient [1:664:2569]: NKikimr::TEvTablet::TEvBoot 2025-05-29T15:24:00.400782Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3097: StateInit, received event# 268828673, Sender [1:656:2563], Recipient [1:664:2569]: NKikimr::TEvTablet::TEvRestored 2025-05-29T15:24:00.400857Z node 1 :TX_DATASHARD INFO: datashard.cpp:373: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:664:2569] 2025-05-29T15:24:00.400903Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:630: TxInitSchema.Execute 2025-05-29T15:24:00.406513Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3110: StateInactive, received event# 268828684, Sender [1:656:2563], Recipient [1:664:2569]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-05-29T15:24:00.406640Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:696: TxInitSchema.Complete 2025-05-29T15:24:00.406663Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:48: TDataShard::TTxInit::Execute 2025-05-29T15:24:00.406803Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1315: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-05-29T15:24:00.406810Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1371: LoadLockChangeRecords at tablet: 72075186224037888 2025-05-29T15:24:00.406814Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1420: LoadChangeRecordCommits at tablet: 72075186224037888 2025-05-29T15:24:00.406853Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:92: TDataShard::TTxInit::Complete 2025-05-29T15:24:00.406866Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:100: TDataShard::TTxInitRestored::Execute 2025-05-29T15:24:00.406876Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:111: DataShard 72075186224037888 persisting started state actor id [1:681:2569] in generation 1 2025-05-29T15:24:00.417110Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:120: TDataShard::TTxInitRestored::Complete 2025-05-29T15:24:00.420330Z node 1 :TX_DATASHARD INFO: datashard.cpp:417: Switched to work state WaitScheme tabletId 72075186224037888 2025-05-29T15:24:00.420377Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:457: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-05-29T15:24:00.420403Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1250: Change sender created: at tablet: 72075186224037888, actorId: [1:683:2579] 2025-05-29T15:24:00.420407Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1255: Trying to activate change sender: at tablet: 72075186224037888 2025-05-29T15:24:00.420410Z node 1 :TX_DATASHARD INFO: datashard.cpp:1272: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-05-29T15:24:00.420416Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-05-29T15:24:00.420456Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3129: StateWork, received event# 2146435072, Sender [1:664:2569], Recipient [1:664:2569]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-05-29T15:24:00.420462Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3154: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-05-29T15:24:00.420525Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 72075186224037888 2025-05-29T15:24:00.420544Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-05-29T15:24:00.420554Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2025-05-29T15:24:00.420560Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-05-29T15:24:00.420566Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:385: Check unit PlanQueue at 72075186224037888 2025-05-29T15:24:00.420570Z node 1 :TX_DATASHARD TRACE: plan_queue_unit.cpp:59: TPlanQueueUnit at 72075186224037888 has no attached operations 2025-05-29T15:24:00.420573Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:403: Unit PlanQueue has no ready operations at 72075186224037888 2025-05-29T15:24:00.420576Z node 1 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 72075186224037888 TxInFly 0 2025-05-29T15:24:00.420580Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2025-05-29T15:24:00.420653Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3129: StateWork, received event# 269877761, Sender [1:674:2574], Recipient [1:664:2569]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-05-29T15:24:00.420658Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3165: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-05-29T15:24:00.420663Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3710: Server connected at leader tablet# 72075186224037888, clientId# [1:662:2567], serverId# [1:674:2574], sessionId# [0:0:0] 2025-05-29T15:24:00.420669Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3129: StateWork, received event# 269549568, Sender [1:411:2405], Recipient [1:674:2574] 2025-05-29T15:24:00.420672Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3135: StateWork, processing event TEvDataShard::TEvProposeTransaction 2025-05-29T15:24:00.420688Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037888 2025-05-29T15:24:00.420741Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1827: Trying to execute [0:281474976715657] at 72075186224037888 on unit CheckSchemeTx 2025-05-29T15:24:00.420749Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:132: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-05-29T15:24:00.420762Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:220: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2025-05-29T15:24:00.420769Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1862: Execution status for [0:281474976715657] at 72075186224037888 is ExecutedNoMoreRestarts 2025-05-29T15:24:00.420773Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1910: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit CheckSchemeTx 2025-05-29T15:24:00.420776Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1916: Add [0:281474976715657] at 72075186224037888 to execution unit StoreSchemeTx 2025-05-29T15:24:00.420779Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1827: Trying to execute [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2025-05-29T15:24:00.420814Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1862: Execution status for [0:281474976715657] at 72075186224037888 is DelayCompleteNoMoreRestarts 2025-05-29T15:24:00.420817Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1910: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit StoreSchemeTx 2025-05-29T15:24:00.420820Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1916: Add [0:281474976715657] at 72075186224037888 to execution unit FinishPropose 2025-05-29T15:24:00.420822Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1827: Trying to execute [0:281474976715657] at 72075186224037888 on unit FinishPropose 2025-05-29T15:24:00.420829Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1862: Execution status for [0:281474976715657] at 72075186224037888 is DelayComplete 2025-05-29T15:24:00.420832Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1910: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit FinishPropose 2025-05-29T15:24:00.420835Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1916: Add [0:281474976715657] at 72075186224037888 to execution unit WaitForPlan 2025-05-29T15:24:00.420837Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1827: Trying to execute [0:281474976715657] at 72075186224037888 on unit WaitForPlan 2025-05-29T15:24:00.420841Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1832: Operation [0:281474976715657] at 72075186224037888 is not ready to execute on unit WaitForPlan 2025-05-29T15:24:00.421035Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3129: StateWork, received event# 269746185, Sender [1:684:2580], Recipient [1:664:2569]: NKikimr::TEvTxProxySchemeCache::TEvWatchNotifyUpdated 2025-05-29T15:24:00.421041Z node 1 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:129: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-05-29T15:24:00.431287Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 72075186224037888 2025-05-29T15:24:00.431310Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1933: Complete execution for [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2025-05-29T15:24:00.431318Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1933: Complete execution for [0:281474976715657] at 72075186224037888 on unit FinishPropose 2025-05-29T15:24:00.431330Z node 1 :TX_DATASHARD TRACE: finish_propose_unit.cpp:167: Propose transaction complete txid 281474976715657 at tablet 72075186224037888 send to client, exec latency: 0 ms, propose latency: 0 ms, status: PREPARED 2025-05-29T15:24:00.431345Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:467: 72075186224037888 not sending time cast registration request in state WaitScheme 2025-05-29T15:24:00.574266Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3129: StateWork, received event# 269877761, Sender [1:699:2589], Recipient [1:664:2569]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-05-29T15:24:00.574287Z node 1 :TX_DATASHARD TRACE: datashard_impl. ... 2025-05-29T15:24:18.108040Z node 15 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1862: Execution status for [1000:281474976715657] at 72075186224037888 is DelayComplete 2025-05-29T15:24:18.108045Z node 15 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1910: Advance execution plan for [1000:281474976715657] at 72075186224037888 executing on unit CompleteOperation 2025-05-29T15:24:18.108050Z node 15 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1916: Add [1000:281474976715657] at 72075186224037888 to execution unit CompletedOperations 2025-05-29T15:24:18.108053Z node 15 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1827: Trying to execute [1000:281474976715657] at 72075186224037888 on unit CompletedOperations 2025-05-29T15:24:18.108059Z node 15 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1862: Execution status for [1000:281474976715657] at 72075186224037888 is Executed 2025-05-29T15:24:18.108063Z node 15 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1910: Advance execution plan for [1000:281474976715657] at 72075186224037888 executing on unit CompletedOperations 2025-05-29T15:24:18.108068Z node 15 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1922: Execution plan for [1000:281474976715657] at 72075186224037888 has finished 2025-05-29T15:24:18.108072Z node 15 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-05-29T15:24:18.108076Z node 15 :TX_DATASHARD TRACE: datashard_pipeline.cpp:327: Check candidate unit PlanQueue at 72075186224037888 2025-05-29T15:24:18.108079Z node 15 :TX_DATASHARD TRACE: plan_queue_unit.cpp:59: TPlanQueueUnit at 72075186224037888 has no attached operations 2025-05-29T15:24:18.108082Z node 15 :TX_DATASHARD TRACE: datashard_pipeline.cpp:341: Unit PlanQueue has no ready operations at 72075186224037888 2025-05-29T15:24:18.108521Z node 15 :TX_DATASHARD TRACE: datashard_impl.h:3129: StateWork, received event# 270270976, Sender [15:24:2071], Recipient [15:664:2568]: {TEvRegisterTabletResult TabletId# 72075186224037888 Entry# 0} 2025-05-29T15:24:18.108534Z node 15 :TX_DATASHARD TRACE: datashard_impl.h:3167: StateWork, processing event TEvMediatorTimecast::TEvRegisterTabletResult 2025-05-29T15:24:18.108540Z node 15 :TX_DATASHARD DEBUG: datashard.cpp:3742: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037888 time 0 2025-05-29T15:24:18.108545Z node 15 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-05-29T15:24:18.108643Z node 15 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:98: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 1000} 2025-05-29T15:24:18.108654Z node 15 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2025-05-29T15:24:18.108843Z node 15 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2025-05-29T15:24:18.108852Z node 15 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1933: Complete execution for [1000:281474976715657] at 72075186224037888 on unit CreateTable 2025-05-29T15:24:18.108858Z node 15 :TX_DATASHARD DEBUG: datashard.cpp:1255: Trying to activate change sender: at tablet: 72075186224037888 2025-05-29T15:24:18.108864Z node 15 :TX_DATASHARD INFO: datashard.cpp:1293: Change sender activated: at tablet: 72075186224037888 2025-05-29T15:24:18.108871Z node 15 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1933: Complete execution for [1000:281474976715657] at 72075186224037888 on unit CompleteOperation 2025-05-29T15:24:18.108885Z node 15 :TX_DATASHARD DEBUG: datashard.cpp:801: Complete [1000 : 281474976715657] from 72075186224037888 at tablet 72075186224037888 send result to client [15:410:2404], exec latency: 0 ms, propose latency: 0 ms 2025-05-29T15:24:18.108895Z node 15 :TX_DATASHARD INFO: datashard.cpp:1590: 72075186224037888 Sending notify to schemeshard 72057594046644480 txId 281474976715657 state Ready TxInFly 0 2025-05-29T15:24:18.108905Z node 15 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-05-29T15:24:18.109099Z node 15 :TX_DATASHARD TRACE: datashard_impl.h:3129: StateWork, received event# 269746185, Sender [15:683:2579], Recipient [15:664:2568]: NKikimr::TEvTxProxySchemeCache::TEvWatchNotifyUpdated 2025-05-29T15:24:18.109109Z node 15 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:129: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-05-29T15:24:18.109305Z node 15 :TX_DATASHARD TRACE: datashard_impl.h:3129: StateWork, received event# 269877760, Sender [15:707:2595], Recipient [15:664:2568]: NKikimr::TEvTabletPipe::TEvClientConnected { TabletId: 72057594046644480 Status: OK ServerId: [15:711:2599] Leader: 1 Dead: 0 Generation: 2 VersionInfo: } 2025-05-29T15:24:18.109313Z node 15 :TX_DATASHARD TRACE: datashard_impl.h:3162: StateWork, processing event TEvTabletPipe::TEvClientConnected 2025-05-29T15:24:18.109530Z node 15 :TX_DATASHARD TRACE: datashard_impl.h:3129: StateWork, received event# 269552132, Sender [15:410:2404], Recipient [15:664:2568]: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 281474976715657 2025-05-29T15:24:18.109538Z node 15 :TX_DATASHARD TRACE: datashard_impl.h:3133: StateWork, processing event TEvDataShard::TEvSchemaChangedResult 2025-05-29T15:24:18.109543Z node 15 :TX_DATASHARD DEBUG: datashard.cpp:2953: Handle TEvSchemaChangedResult 281474976715657 datashard 72075186224037888 state Ready 2025-05-29T15:24:18.109550Z node 15 :TX_DATASHARD DEBUG: datashard__schema_changed.cpp:22: 72075186224037888 Got TEvSchemaChangedResult from SS at 72075186224037888 2025-05-29T15:24:18.109635Z node 15 :TX_DATASHARD TRACE: datashard_impl.h:3129: StateWork, received event# 270270978, Sender [15:24:2071], Recipient [15:664:2568]: NKikimr::TEvMediatorTimecast::TEvSubscribeReadStepResult{ CoordinatorId# 72057594046316545 LastReadStep# 0 NextReadStep# 1000 ReadStep# 1000 } 2025-05-29T15:24:18.109641Z node 15 :TX_DATASHARD TRACE: datashard_impl.h:3168: StateWork, processing event TEvMediatorTimecast::TEvSubscribeReadStepResult 2025-05-29T15:24:18.109647Z node 15 :TX_DATASHARD DEBUG: datashard.cpp:3760: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037888 coordinator 72057594046316545 last step 0 next step 1000 2025-05-29T15:24:18.123274Z node 15 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [15:731:2613], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:24:18.123304Z node 15 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [15:740:2618], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:24:18.123315Z node 15 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:24:18.124392Z node 15 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2025-05-29T15:24:18.141277Z node 15 :TX_DATASHARD TRACE: datashard_impl.h:3129: StateWork, received event# 269746185, Sender [15:683:2579], Recipient [15:664:2568]: NKikimr::TEvTxProxySchemeCache::TEvWatchNotifyUpdated 2025-05-29T15:24:18.141307Z node 15 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:129: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-05-29T15:24:18.165276Z node 15 :TX_DATASHARD TRACE: datashard_impl.h:3129: StateWork, received event# 269877763, Sender [15:707:2595], Recipient [15:664:2568]: NKikimr::TEvTabletPipe::TEvClientDestroyed { TabletId: 72057594046644480 ClientId: [15:707:2595] ServerId: [15:711:2599] } 2025-05-29T15:24:18.165304Z node 15 :TX_DATASHARD TRACE: datashard_impl.h:3163: StateWork, processing event TEvTabletPipe::TEvClientDestroyed 2025-05-29T15:24:18.310047Z node 15 :TX_DATASHARD TRACE: datashard_impl.h:3129: StateWork, received event# 269746185, Sender [15:683:2579], Recipient [15:664:2568]: NKikimr::TEvTxProxySchemeCache::TEvWatchNotifyUpdated 2025-05-29T15:24:18.310082Z node 15 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:129: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-05-29T15:24:18.311686Z node 15 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [15:745:2621], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-05-29T15:24:18.346344Z node 15 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [15:815:2660] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-29T15:24:18.435631Z node 15 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [15:825:2669], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:24:18.436384Z node 15 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=15&id=NTY3ZmJjYzgtMWZmMzc4NWEtN2Y3Yzk2NGUtY2I2YTA0OWU=, ActorId: [15:729:2611], ActorState: ExecuteState, TraceId: 01jwea9saa42nkt8s9m1wmdp1v, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: assertion failed at ydb/core/tx/datashard/ut_common/datashard_ut_common.cpp:2091, void NKikimr::ExecSQL(Tests::TServer::TPtr, TActorId, const TString &, bool, Ydb::StatusIds::StatusCode): (response.GetYdbStatus() == code) failed: (INTERNAL_ERROR != SUCCESS) Response { QueryIssues { message: "Execution" issue_code: 1060 issues { message: "yql/essentials/ast/yql_expr.h:1874: index out of range" issue_code: 1 } } TxMeta { } } YdbStatus: INTERNAL_ERROR ConsumedRu: 1 , with diff: (INT|SUCC)E(RNAL_ERROR|SS) TBackTrace::Capture()+28 (0x13C95FEC) NUnitTest::NPrivate::RaiseError(char const*, TBasicString> const&, bool)+137 (0x13E49919) NKikimr::ExecSQL(TIntrusivePtr>, NActors::TActorId, TBasicString> const&, bool, Ydb::StatusIds_StatusCode)+1300 (0x264C9A24) ??+0 (0x13ADB793) NKikimr::NTestSuiteDataShardReadIterator::TTestCaseShouldReadFromHeadToMvccWithConflict::Execute_(NUnitTest::TTestContext&)+585 (0x13AF4B79) NKikimr::NTestSuiteDataShardReadIterator::TCurrentTest::Execute()::'lambda'()::operator()() const+71 (0x13AD4527) NUnitTest::TTestBase::Run(std::__y1::function, TBasicString> const&, char const*, bool)+126 (0x13E4B7CE) NKikimr::NTestSuiteDataShardReadIterator::TCurrentTest::Execute()+433 (0x13AD3D81) NUnitTest::TTestFactory::Execute()+803 (0x13E4BF43) NUnitTest::RunMain(int, char**)+3021 (0x13E5DAED) ??+0 (0x7FAF004B6D90) __libc_start_main+128 (0x7FAF004B6E40) _start+41 (0x12A96029) ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/datashard/ut_read_iterator/unittest >> DataShardReadIterator::ShouldReturnBrokenLockWhenReadRangeRightBorder-EvWrite [FAIL] Test command err: 2025-05-29T15:24:00.040240Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:102:2148], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-05-29T15:24:00.040274Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-05-29T15:24:00.040284Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/0015a8/r3tmp/tmpEFm6NJ/pdisk_1.dat 2025-05-29T15:24:00.134030Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-05-29T15:24:00.146917Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:24:00.149711Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [1:32:2079] 1748532239757165 != 1748532239757169 2025-05-29T15:24:00.191098Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:24:00.191129Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:24:00.201553Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-29T15:24:00.274165Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-05-29T15:24:00.288873Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3097: StateInit, received event# 268828672, Sender [1:656:2563], Recipient [1:664:2569]: NKikimr::TEvTablet::TEvBoot 2025-05-29T15:24:00.289038Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3097: StateInit, received event# 268828673, Sender [1:656:2563], Recipient [1:664:2569]: NKikimr::TEvTablet::TEvRestored 2025-05-29T15:24:00.289104Z node 1 :TX_DATASHARD INFO: datashard.cpp:373: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:664:2569] 2025-05-29T15:24:00.289160Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:630: TxInitSchema.Execute 2025-05-29T15:24:00.295192Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3110: StateInactive, received event# 268828684, Sender [1:656:2563], Recipient [1:664:2569]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-05-29T15:24:00.295295Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:696: TxInitSchema.Complete 2025-05-29T15:24:00.295316Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:48: TDataShard::TTxInit::Execute 2025-05-29T15:24:00.295450Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1315: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-05-29T15:24:00.295456Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1371: LoadLockChangeRecords at tablet: 72075186224037888 2025-05-29T15:24:00.295461Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1420: LoadChangeRecordCommits at tablet: 72075186224037888 2025-05-29T15:24:00.295497Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:92: TDataShard::TTxInit::Complete 2025-05-29T15:24:00.295508Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:100: TDataShard::TTxInitRestored::Execute 2025-05-29T15:24:00.295516Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:111: DataShard 72075186224037888 persisting started state actor id [1:681:2569] in generation 1 2025-05-29T15:24:00.305711Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:120: TDataShard::TTxInitRestored::Complete 2025-05-29T15:24:00.308366Z node 1 :TX_DATASHARD INFO: datashard.cpp:417: Switched to work state WaitScheme tabletId 72075186224037888 2025-05-29T15:24:00.308409Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:457: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-05-29T15:24:00.308437Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1250: Change sender created: at tablet: 72075186224037888, actorId: [1:683:2579] 2025-05-29T15:24:00.308441Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1255: Trying to activate change sender: at tablet: 72075186224037888 2025-05-29T15:24:00.308444Z node 1 :TX_DATASHARD INFO: datashard.cpp:1272: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-05-29T15:24:00.308448Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-05-29T15:24:00.308486Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3129: StateWork, received event# 2146435072, Sender [1:664:2569], Recipient [1:664:2569]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-05-29T15:24:00.308492Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3154: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-05-29T15:24:00.308548Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 72075186224037888 2025-05-29T15:24:00.308565Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-05-29T15:24:00.308575Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2025-05-29T15:24:00.308581Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-05-29T15:24:00.308587Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:385: Check unit PlanQueue at 72075186224037888 2025-05-29T15:24:00.308591Z node 1 :TX_DATASHARD TRACE: plan_queue_unit.cpp:59: TPlanQueueUnit at 72075186224037888 has no attached operations 2025-05-29T15:24:00.308594Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:403: Unit PlanQueue has no ready operations at 72075186224037888 2025-05-29T15:24:00.308597Z node 1 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 72075186224037888 TxInFly 0 2025-05-29T15:24:00.308601Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2025-05-29T15:24:00.308672Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3129: StateWork, received event# 269877761, Sender [1:674:2574], Recipient [1:664:2569]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-05-29T15:24:00.308677Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3165: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-05-29T15:24:00.308683Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3710: Server connected at leader tablet# 72075186224037888, clientId# [1:662:2567], serverId# [1:674:2574], sessionId# [0:0:0] 2025-05-29T15:24:00.308688Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3129: StateWork, received event# 269549568, Sender [1:411:2405], Recipient [1:674:2574] 2025-05-29T15:24:00.308691Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3135: StateWork, processing event TEvDataShard::TEvProposeTransaction 2025-05-29T15:24:00.308704Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037888 2025-05-29T15:24:00.308758Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1827: Trying to execute [0:281474976715657] at 72075186224037888 on unit CheckSchemeTx 2025-05-29T15:24:00.308768Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:132: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-05-29T15:24:00.308784Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:220: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2025-05-29T15:24:00.308793Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1862: Execution status for [0:281474976715657] at 72075186224037888 is ExecutedNoMoreRestarts 2025-05-29T15:24:00.308798Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1910: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit CheckSchemeTx 2025-05-29T15:24:00.308803Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1916: Add [0:281474976715657] at 72075186224037888 to execution unit StoreSchemeTx 2025-05-29T15:24:00.308807Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1827: Trying to execute [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2025-05-29T15:24:00.308839Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1862: Execution status for [0:281474976715657] at 72075186224037888 is DelayCompleteNoMoreRestarts 2025-05-29T15:24:00.308842Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1910: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit StoreSchemeTx 2025-05-29T15:24:00.308845Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1916: Add [0:281474976715657] at 72075186224037888 to execution unit FinishPropose 2025-05-29T15:24:00.308847Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1827: Trying to execute [0:281474976715657] at 72075186224037888 on unit FinishPropose 2025-05-29T15:24:00.308855Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1862: Execution status for [0:281474976715657] at 72075186224037888 is DelayComplete 2025-05-29T15:24:00.308857Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1910: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit FinishPropose 2025-05-29T15:24:00.308861Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1916: Add [0:281474976715657] at 72075186224037888 to execution unit WaitForPlan 2025-05-29T15:24:00.308863Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1827: Trying to execute [0:281474976715657] at 72075186224037888 on unit WaitForPlan 2025-05-29T15:24:00.308867Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1832: Operation [0:281474976715657] at 72075186224037888 is not ready to execute on unit WaitForPlan 2025-05-29T15:24:00.309044Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3129: StateWork, received event# 269746185, Sender [1:684:2580], Recipient [1:664:2569]: NKikimr::TEvTxProxySchemeCache::TEvWatchNotifyUpdated 2025-05-29T15:24:00.309051Z node 1 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:129: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-05-29T15:24:00.319288Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 72075186224037888 2025-05-29T15:24:00.319310Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1933: Complete execution for [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2025-05-29T15:24:00.319318Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1933: Complete execution for [0:281474976715657] at 72075186224037888 on unit FinishPropose 2025-05-29T15:24:00.319328Z node 1 :TX_DATASHARD TRACE: finish_propose_unit.cpp:167: Propose transaction complete txid 281474976715657 at tablet 72075186224037888 send to client, exec latency: 0 ms, propose latency: 0 ms, status: PREPARED 2025-05-29T15:24:00.319342Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:467: 72075186224037888 not sending time cast registration request in state WaitScheme 2025-05-29T15:24:00.462147Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3129: StateWork, received event# 269877761, Sender [1:699:2589], Recipient [1:664:2569]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-05-29T15:24:00.462169Z node 1 :TX_DATASHARD TRACE: datashard_impl. ... Z node 15 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1862: Execution status for [1000:281474976715657] at 72075186224037888 is DelayComplete 2025-05-29T15:24:17.969007Z node 15 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1910: Advance execution plan for [1000:281474976715657] at 72075186224037888 executing on unit CompleteOperation 2025-05-29T15:24:17.969012Z node 15 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1916: Add [1000:281474976715657] at 72075186224037888 to execution unit CompletedOperations 2025-05-29T15:24:17.969015Z node 15 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1827: Trying to execute [1000:281474976715657] at 72075186224037888 on unit CompletedOperations 2025-05-29T15:24:17.969020Z node 15 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1862: Execution status for [1000:281474976715657] at 72075186224037888 is Executed 2025-05-29T15:24:17.969024Z node 15 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1910: Advance execution plan for [1000:281474976715657] at 72075186224037888 executing on unit CompletedOperations 2025-05-29T15:24:17.969028Z node 15 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1922: Execution plan for [1000:281474976715657] at 72075186224037888 has finished 2025-05-29T15:24:17.969032Z node 15 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-05-29T15:24:17.969035Z node 15 :TX_DATASHARD TRACE: datashard_pipeline.cpp:327: Check candidate unit PlanQueue at 72075186224037888 2025-05-29T15:24:17.969039Z node 15 :TX_DATASHARD TRACE: plan_queue_unit.cpp:59: TPlanQueueUnit at 72075186224037888 has no attached operations 2025-05-29T15:24:17.969042Z node 15 :TX_DATASHARD TRACE: datashard_pipeline.cpp:341: Unit PlanQueue has no ready operations at 72075186224037888 2025-05-29T15:24:17.969453Z node 15 :TX_DATASHARD TRACE: datashard_impl.h:3129: StateWork, received event# 270270976, Sender [15:24:2071], Recipient [15:664:2568]: {TEvRegisterTabletResult TabletId# 72075186224037888 Entry# 0} 2025-05-29T15:24:17.969463Z node 15 :TX_DATASHARD TRACE: datashard_impl.h:3167: StateWork, processing event TEvMediatorTimecast::TEvRegisterTabletResult 2025-05-29T15:24:17.969469Z node 15 :TX_DATASHARD DEBUG: datashard.cpp:3742: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037888 time 0 2025-05-29T15:24:17.969475Z node 15 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-05-29T15:24:17.969595Z node 15 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:98: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 1000} 2025-05-29T15:24:17.969606Z node 15 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2025-05-29T15:24:17.969799Z node 15 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2025-05-29T15:24:17.969807Z node 15 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1933: Complete execution for [1000:281474976715657] at 72075186224037888 on unit CreateTable 2025-05-29T15:24:17.969813Z node 15 :TX_DATASHARD DEBUG: datashard.cpp:1255: Trying to activate change sender: at tablet: 72075186224037888 2025-05-29T15:24:17.969820Z node 15 :TX_DATASHARD INFO: datashard.cpp:1293: Change sender activated: at tablet: 72075186224037888 2025-05-29T15:24:17.969828Z node 15 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1933: Complete execution for [1000:281474976715657] at 72075186224037888 on unit CompleteOperation 2025-05-29T15:24:17.969845Z node 15 :TX_DATASHARD DEBUG: datashard.cpp:801: Complete [1000 : 281474976715657] from 72075186224037888 at tablet 72075186224037888 send result to client [15:410:2404], exec latency: 0 ms, propose latency: 0 ms 2025-05-29T15:24:17.969855Z node 15 :TX_DATASHARD INFO: datashard.cpp:1590: 72075186224037888 Sending notify to schemeshard 72057594046644480 txId 281474976715657 state Ready TxInFly 0 2025-05-29T15:24:17.969865Z node 15 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-05-29T15:24:17.970059Z node 15 :TX_DATASHARD TRACE: datashard_impl.h:3129: StateWork, received event# 269746185, Sender [15:683:2579], Recipient [15:664:2568]: NKikimr::TEvTxProxySchemeCache::TEvWatchNotifyUpdated 2025-05-29T15:24:17.970069Z node 15 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:129: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-05-29T15:24:17.970266Z node 15 :TX_DATASHARD TRACE: datashard_impl.h:3129: StateWork, received event# 269877760, Sender [15:707:2595], Recipient [15:664:2568]: NKikimr::TEvTabletPipe::TEvClientConnected { TabletId: 72057594046644480 Status: OK ServerId: [15:711:2599] Leader: 1 Dead: 0 Generation: 2 VersionInfo: } 2025-05-29T15:24:17.970274Z node 15 :TX_DATASHARD TRACE: datashard_impl.h:3162: StateWork, processing event TEvTabletPipe::TEvClientConnected 2025-05-29T15:24:17.970488Z node 15 :TX_DATASHARD TRACE: datashard_impl.h:3129: StateWork, received event# 269552132, Sender [15:410:2404], Recipient [15:664:2568]: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 281474976715657 2025-05-29T15:24:17.970496Z node 15 :TX_DATASHARD TRACE: datashard_impl.h:3133: StateWork, processing event TEvDataShard::TEvSchemaChangedResult 2025-05-29T15:24:17.970501Z node 15 :TX_DATASHARD DEBUG: datashard.cpp:2953: Handle TEvSchemaChangedResult 281474976715657 datashard 72075186224037888 state Ready 2025-05-29T15:24:17.970508Z node 15 :TX_DATASHARD DEBUG: datashard__schema_changed.cpp:22: 72075186224037888 Got TEvSchemaChangedResult from SS at 72075186224037888 2025-05-29T15:24:17.970590Z node 15 :TX_DATASHARD TRACE: datashard_impl.h:3129: StateWork, received event# 270270978, Sender [15:24:2071], Recipient [15:664:2568]: NKikimr::TEvMediatorTimecast::TEvSubscribeReadStepResult{ CoordinatorId# 72057594046316545 LastReadStep# 0 NextReadStep# 1000 ReadStep# 1000 } 2025-05-29T15:24:17.970596Z node 15 :TX_DATASHARD TRACE: datashard_impl.h:3168: StateWork, processing event TEvMediatorTimecast::TEvSubscribeReadStepResult 2025-05-29T15:24:17.970602Z node 15 :TX_DATASHARD DEBUG: datashard.cpp:3760: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037888 coordinator 72057594046316545 last step 0 next step 1000 2025-05-29T15:24:17.974206Z node 15 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [15:731:2613], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:24:17.974230Z node 15 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [15:740:2618], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:24:17.974242Z node 15 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:24:17.983571Z node 15 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2025-05-29T15:24:17.984747Z node 15 :TX_DATASHARD TRACE: datashard_impl.h:3129: StateWork, received event# 269746185, Sender [15:683:2579], Recipient [15:664:2568]: NKikimr::TEvTxProxySchemeCache::TEvWatchNotifyUpdated 2025-05-29T15:24:17.984767Z node 15 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:129: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-05-29T15:24:18.010985Z node 15 :TX_DATASHARD TRACE: datashard_impl.h:3129: StateWork, received event# 269877763, Sender [15:707:2595], Recipient [15:664:2568]: NKikimr::TEvTabletPipe::TEvClientDestroyed { TabletId: 72057594046644480 ClientId: [15:707:2595] ServerId: [15:711:2599] } 2025-05-29T15:24:18.011011Z node 15 :TX_DATASHARD TRACE: datashard_impl.h:3163: StateWork, processing event TEvTabletPipe::TEvClientDestroyed 2025-05-29T15:24:18.152411Z node 15 :TX_DATASHARD TRACE: datashard_impl.h:3129: StateWork, received event# 269746185, Sender [15:683:2579], Recipient [15:664:2568]: NKikimr::TEvTxProxySchemeCache::TEvWatchNotifyUpdated 2025-05-29T15:24:18.152441Z node 15 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:129: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-05-29T15:24:18.152957Z node 15 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [15:745:2621], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-05-29T15:24:18.191900Z node 15 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [15:815:2660] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-29T15:24:18.240725Z node 15 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [15:825:2669], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:24:18.241465Z node 15 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=15&id=MTVmZjgxYTktOTJkMWQyNWYtNmQzY2VhMzUtOWJhYjJhMWU=, ActorId: [15:729:2611], ActorState: ExecuteState, TraceId: 01jwea9s5nbn2v5ke6meqjdbjh, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: assertion failed at ydb/core/tx/datashard/ut_common/datashard_ut_common.cpp:2091, void NKikimr::ExecSQL(Tests::TServer::TPtr, TActorId, const TString &, bool, Ydb::StatusIds::StatusCode): (response.GetYdbStatus() == code) failed: (INTERNAL_ERROR != SUCCESS) Response { QueryIssues { message: "Execution" issue_code: 1060 issues { message: "yql/essentials/ast/yql_expr.h:1874: index out of range" issue_code: 1 } } TxMeta { } } YdbStatus: INTERNAL_ERROR ConsumedRu: 1 , with diff: (INT|SUCC)E(RNAL_ERROR|SS) TBackTrace::Capture()+28 (0x13C95FEC) NUnitTest::NPrivate::RaiseError(char const*, TBasicString> const&, bool)+137 (0x13E49919) NKikimr::ExecSQL(TIntrusivePtr>, NActors::TActorId, TBasicString> const&, bool, Ydb::StatusIds_StatusCode)+1300 (0x264C9A24) ??+0 (0x13ADB793) ??+0 (0x13A2F9D1) NKikimr::NTestSuiteDataShardReadIterator::TTestCaseShouldReturnBrokenLockWhenReadRangeRightBorder::Execute_(NUnitTest::TTestContext&)+28 (0x13B3ABFC) NKikimr::NTestSuiteDataShardReadIterator::TCurrentTest::Execute()::'lambda'()::operator()() const+71 (0x13AD4527) NUnitTest::TTestBase::Run(std::__y1::function, TBasicString> const&, char const*, bool)+126 (0x13E4B7CE) NKikimr::NTestSuiteDataShardReadIterator::TCurrentTest::Execute()+433 (0x13AD3D81) NUnitTest::TTestFactory::Execute()+803 (0x13E4BF43) NUnitTest::RunMain(int, char**)+3021 (0x13E5DAED) ??+0 (0x7FF6BFC25D90) __libc_start_main+128 (0x7FF6BFC25E40) _start+41 (0x12A96029) >> TSchemeShardUserAttrsTest::Boot >> TSchemeShardUserAttrsTest::SetAttrs [GOOD] >> TSchemeShardUserAttrsTest::VariousUse ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/nodewarden/ut/unittest >> TBlobStorageWardenTest::TestGivenPDiskFormatedWithGuid1AndCreatedWithGuid2WhenYardInitThenError [GOOD] Test command err: 2025-05-29T15:24:22.043686Z node 1 :BS_NODE DEBUG: {NW26@node_warden_impl.cpp:321} Bootstrap 2025-05-29T15:24:22.044453Z node 1 :BS_NODE DEBUG: {NW18@node_warden_resource.cpp:51} ApplyServiceSet IsStatic# true Comprehensive# false Origin# initial ServiceSet# {PDisks { NodeID: 1 PDiskID: 0 Path: "SectorMap:/home/runner/.ya/build/build_root/ciyv/001915/r3tmp/tmpQms46r/pdisk_map" PDiskGuid: 1 } VDisks { VDiskID { GroupID: 33554432 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } VDiskLocation { NodeID: 1 PDiskID: 0 VDiskSlotID: 0 PDiskGuid: 1 } } VDisks { VDiskID { GroupID: 33554432 GroupGeneration: 1 Ring: 0 Domain: 1 VDisk: 0 } VDiskLocation { NodeID: 1 PDiskID: 0 VDiskSlotID: 1 PDiskGuid: 1 } } VDisks { VDiskID { GroupID: 33554432 GroupGeneration: 1 Ring: 0 Domain: 2 VDisk: 0 } VDiskLocation { NodeID: 1 PDiskID: 0 VDiskSlotID: 2 PDiskGuid: 1 } } VDisks { VDiskID { GroupID: 33554432 GroupGeneration: 1 Ring: 0 Domain: 3 VDisk: 0 } VDiskLocation { NodeID: 1 PDiskID: 0 VDiskSlotID: 3 PDiskGuid: 1 } } Groups { GroupID: 33554432 GroupGeneration: 1 ErasureSpecies: 1 Rings { FailDomains { VDiskLocations { NodeID: 1 PDiskID: 0 VDiskSlotID: 0 PDiskGuid: 1 } } FailDomains { VDiskLocations { NodeID: 1 PDiskID: 0 VDiskSlotID: 1 PDiskGuid: 1 } } FailDomains { VDiskLocations { NodeID: 1 PDiskID: 0 VDiskSlotID: 2 PDiskGuid: 1 } } FailDomains { VDiskLocations { NodeID: 1 PDiskID: 0 VDiskSlotID: 3 PDiskGuid: 1 } } } } AvailabilityDomains: 1 } 2025-05-29T15:24:22.044527Z node 1 :BS_NODE DEBUG: {NW04@node_warden_pdisk.cpp:196} StartLocalPDisk NodeId# 1 PDiskId# 0 Path# "SectorMap:/home/runner/.ya/build/build_root/ciyv/001915/r3tmp/tmpQms46r/pdisk_map" PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} Temporary# false 2025-05-29T15:24:22.044848Z node 1 :BS_NODE WARN: {NW89@node_warden_pdisk.cpp:106} Can't write new MockDevicesConfig to file Path# /Berkanavt/kikimr/testing/mock_devices.txt 2025-05-29T15:24:22.044904Z node 1 :BS_NODE DEBUG: {NW23@node_warden_vdisk.cpp:67} StartLocalVDiskActor SlayInFlight# false VDiskId# [2000000:1:0:0:0] VSlotId# 1:0:0 PDiskGuid# 1 DonorMode# false PDiskRestartInFlight# false PDisksWaitingToStart# false 2025-05-29T15:24:22.045078Z node 1 :BS_NODE DEBUG: {NW24@node_warden_vdisk.cpp:265} StartLocalVDiskActor done VDiskId# [2000000:1:0:0:0] VSlotId# 1:0:0 PDiskGuid# 1 2025-05-29T15:24:22.045092Z node 1 :BS_NODE DEBUG: {NW23@node_warden_vdisk.cpp:67} StartLocalVDiskActor SlayInFlight# false VDiskId# [2000000:1:0:1:0] VSlotId# 1:0:1 PDiskGuid# 1 DonorMode# false PDiskRestartInFlight# false PDisksWaitingToStart# false 2025-05-29T15:24:22.045189Z node 1 :BS_NODE DEBUG: {NW24@node_warden_vdisk.cpp:265} StartLocalVDiskActor done VDiskId# [2000000:1:0:1:0] VSlotId# 1:0:1 PDiskGuid# 1 2025-05-29T15:24:22.045194Z node 1 :BS_NODE DEBUG: {NW23@node_warden_vdisk.cpp:67} StartLocalVDiskActor SlayInFlight# false VDiskId# [2000000:1:0:2:0] VSlotId# 1:0:2 PDiskGuid# 1 DonorMode# false PDiskRestartInFlight# false PDisksWaitingToStart# false 2025-05-29T15:24:22.045270Z node 1 :BS_NODE DEBUG: {NW24@node_warden_vdisk.cpp:265} StartLocalVDiskActor done VDiskId# [2000000:1:0:2:0] VSlotId# 1:0:2 PDiskGuid# 1 2025-05-29T15:24:22.045282Z node 1 :BS_NODE DEBUG: {NW23@node_warden_vdisk.cpp:67} StartLocalVDiskActor SlayInFlight# false VDiskId# [2000000:1:0:3:0] VSlotId# 1:0:3 PDiskGuid# 1 DonorMode# false PDiskRestartInFlight# false PDisksWaitingToStart# false 2025-05-29T15:24:22.045362Z node 1 :BS_NODE DEBUG: {NW24@node_warden_vdisk.cpp:265} StartLocalVDiskActor done VDiskId# [2000000:1:0:3:0] VSlotId# 1:0:3 PDiskGuid# 1 2025-05-29T15:24:22.045368Z node 1 :BS_NODE DEBUG: {NW12@node_warden_proxy.cpp:23} StartLocalProxy GroupId# 33554432 2025-05-29T15:24:22.045529Z node 1 :BS_NODE DEBUG: {NW21@node_warden_pipe.cpp:23} EstablishPipe AvailDomainId# 1 PipeClientId# [1:47:2076] ControllerId# 72057594037932033 2025-05-29T15:24:22.045533Z node 1 :BS_NODE DEBUG: {NW20@node_warden_pipe.cpp:72} SendRegisterNode 2025-05-29T15:24:22.045558Z node 1 :BS_NODE DEBUG: {NW11@node_warden_impl.cpp:296} StartInvalidGroupProxy GroupId# 4294967295 2025-05-29T15:24:22.045579Z node 1 :BS_NODE DEBUG: {NW62@node_warden_impl.cpp:308} StartRequestReportingThrottler 2025-05-29T15:24:22.049081Z node 1 :BS_NODE DEBUG: {NWDC00@distconf.cpp:20} Bootstrap 2025-05-29T15:24:22.049300Z node 2 :BS_NODE DEBUG: {NW26@node_warden_impl.cpp:321} Bootstrap 2025-05-29T15:24:22.049899Z node 2 :BS_NODE DEBUG: {NW18@node_warden_resource.cpp:51} ApplyServiceSet IsStatic# true Comprehensive# false Origin# initial ServiceSet# {PDisks { NodeID: 1 PDiskID: 0 Path: "pdisk0.dat" PDiskGuid: 1 } VDisks { VDiskID { GroupID: 33554432 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } VDiskLocation { NodeID: 1 PDiskID: 0 VDiskSlotID: 0 PDiskGuid: 1 } } VDisks { VDiskID { GroupID: 33554432 GroupGeneration: 1 Ring: 0 Domain: 1 VDisk: 0 } VDiskLocation { NodeID: 1 PDiskID: 0 VDiskSlotID: 1 PDiskGuid: 1 } } VDisks { VDiskID { GroupID: 33554432 GroupGeneration: 1 Ring: 0 Domain: 2 VDisk: 0 } VDiskLocation { NodeID: 1 PDiskID: 0 VDiskSlotID: 2 PDiskGuid: 1 } } VDisks { VDiskID { GroupID: 33554432 GroupGeneration: 1 Ring: 0 Domain: 3 VDisk: 0 } VDiskLocation { NodeID: 1 PDiskID: 0 VDiskSlotID: 3 PDiskGuid: 1 } } Groups { GroupID: 33554432 GroupGeneration: 1 ErasureSpecies: 1 Rings { FailDomains { VDiskLocations { NodeID: 1 PDiskID: 0 VDiskSlotID: 0 PDiskGuid: 1 } } FailDomains { VDiskLocations { NodeID: 1 PDiskID: 0 VDiskSlotID: 1 PDiskGuid: 1 } } FailDomains { VDiskLocations { NodeID: 1 PDiskID: 0 VDiskSlotID: 2 PDiskGuid: 1 } } FailDomains { VDiskLocations { NodeID: 1 PDiskID: 0 VDiskSlotID: 3 PDiskGuid: 1 } } } } AvailabilityDomains: 1 } 2025-05-29T15:24:22.049937Z node 2 :BS_NODE DEBUG: {NW12@node_warden_proxy.cpp:23} StartLocalProxy GroupId# 33554432 2025-05-29T15:24:22.050075Z node 2 :BS_NODE DEBUG: {NW21@node_warden_pipe.cpp:23} EstablishPipe AvailDomainId# 1 PipeClientId# [2:95:2070] ControllerId# 72057594037932033 2025-05-29T15:24:22.050079Z node 2 :BS_NODE DEBUG: {NW20@node_warden_pipe.cpp:72} SendRegisterNode 2025-05-29T15:24:22.050090Z node 2 :BS_NODE DEBUG: {NW11@node_warden_impl.cpp:296} StartInvalidGroupProxy GroupId# 4294967295 2025-05-29T15:24:22.050106Z node 2 :BS_NODE DEBUG: {NW62@node_warden_impl.cpp:308} StartRequestReportingThrottler 2025-05-29T15:24:22.051143Z node 2 :BS_NODE DEBUG: {NWDC00@distconf.cpp:20} Bootstrap 2025-05-29T15:24:22.051253Z node 1 :BS_NODE DEBUG: {NWDC40@distconf_persistent_storage.cpp:25} TReaderActor bootstrap Paths# [] 2025-05-29T15:24:22.051383Z node 2 :BS_NODE DEBUG: {NWDC40@distconf_persistent_storage.cpp:25} TReaderActor bootstrap Paths# [] 2025-05-29T15:24:22.069990Z node 1 :BS_NODE DEBUG: {NWDC53@distconf.cpp:300} StateWaitForInit event Type# 131082 StorageConfigLoaded# false NodeListObtained# false PendingEvents.size# 0 2025-05-29T15:24:22.070015Z node 1 :BS_NODE DEBUG: {NWDC11@distconf_binding.cpp:6} TEvNodesInfo 2025-05-29T15:24:22.070148Z node 2 :BS_NODE DEBUG: {NWDC53@distconf.cpp:300} StateWaitForInit event Type# 131082 StorageConfigLoaded# false NodeListObtained# false PendingEvents.size# 0 2025-05-29T15:24:22.070157Z node 2 :BS_NODE DEBUG: {NWDC11@distconf_binding.cpp:6} TEvNodesInfo 2025-05-29T15:24:22.071390Z node 1 :BS_NODE DEBUG: {NWDC53@distconf.cpp:300} StateWaitForInit event Type# 2146435074 StorageConfigLoaded# false NodeListObtained# false PendingEvents.size# 0 2025-05-29T15:24:22.071409Z node 1 :BS_NODE DEBUG: {NWDC32@distconf_persistent_storage.cpp:221} TEvStorageConfigLoaded Cookie# 0 NumItemsRead# 0 2025-05-29T15:24:22.072316Z node 1 :BS_NODE DEBUG: {NWDC35@distconf_persistent_storage.cpp:184} PersistConfig Record# {} Drives# [] 2025-05-29T15:24:22.072414Z node 2 :BS_NODE DEBUG: {NWDC53@distconf.cpp:300} StateWaitForInit event Type# 2146435074 StorageConfigLoaded# false NodeListObtained# false PendingEvents.size# 0 2025-05-29T15:24:22.072422Z node 2 :BS_NODE DEBUG: {NWDC32@distconf_persistent_storage.cpp:221} TEvStorageConfigLoaded Cookie# 0 NumItemsRead# 0 2025-05-29T15:24:22.072440Z node 2 :BS_NODE DEBUG: {NWDC35@distconf_persistent_storage.cpp:184} PersistConfig Record# {} Drives# [] 2025-05-29T15:24:22.072479Z node 1 :BS_NODE DEBUG: {NWDC51@distconf_persistent_storage.cpp:103} TWriterActor bootstrap Drives# [] Record# {} 2025-05-29T15:24:22.072490Z node 2 :BS_NODE DEBUG: {NWDC51@distconf_persistent_storage.cpp:103} TWriterActor bootstrap Drives# [] Record# {} 2025-05-29T15:24:22.077406Z node 1 :BS_NODE DEBUG: {NWDC53@distconf.cpp:300} StateWaitForInit event Type# 2146435075 StorageConfigLoaded# true NodeListObtained# false PendingEvents.size# 0 2025-05-29T15:24:22.077619Z node 2 :BS_NODE DEBUG: {NW18@node_warden_resource.cpp:51} ApplyServiceSet IsStatic# true Comprehensive# true Origin# distconf ServiceSet# {PDisks { NodeID: 1 PDiskID: 0 Path: "pdisk0.dat" PDiskGuid: 1 } VDisks { VDiskID { GroupID: 33554432 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } VDiskLocation { NodeID: 1 PDiskID: 0 VDiskSlotID: 0 PDiskGuid: 1 } } VDisks { VDiskID { GroupID: 33554432 GroupGeneration: 1 Ring: 0 Domain: 1 VDisk: 0 } VDiskLocation { NodeID: 1 PDiskID: 0 VDiskSlotID: 1 PDiskGuid: 1 } } VDisks { VDiskID { GroupID: 33554432 GroupGeneration: 1 Ring: 0 Domain: 2 VDisk: 0 } VDiskLocation { NodeID: 1 PDiskID: 0 VDiskSlotID: 2 PDiskGuid: 1 } } VDisks { VDiskID { GroupID: 33554432 GroupGeneration: 1 Ring: 0 Domain: 3 VDisk: 0 } VDiskLocation { NodeID: 1 PDiskID: 0 VDiskSlotID: 3 PDiskGuid: 1 } } Groups { GroupID: 33554432 GroupGeneration: 1 ErasureSpecies: 1 Rings { FailDomains { VDiskLocations { NodeID: 1 PDiskID: 0 VDiskSlotID: 0 PDiskGuid: 1 } } FailDomains { VDiskLocations { NodeID: 1 PDiskID: 0 VDiskSlotID: 1 PDiskGuid: 1 } } FailDomains { VDiskLocations { NodeID: 1 PDiskID: 0 VDiskSlotID: 2 PDiskGuid: 1 } } FailDomains { VDiskLocations { NodeID: 1 PDiskID: 0 VDiskSlotID: 3 PDiskGuid: 1 } } } } AvailabilityDomains: 1 } 2025-05-29T15:24:22.077684Z node 2 :BS_NODE DEBUG: {NWDC53@distconf.cpp:300} StateWaitForInit event Type# 2146435075 StorageConfigLoaded# true NodeListObtained# false PendingEvents.size# 0 2025-05-29T15:24:22.077796Z node 2 :BS_NODE DEBUG: {NWDC53@distconf.cpp:300} StateWaitForInit event Type# 268639248 StorageConfigLoaded# true NodeListObtained# false PendingEvents.size# 1 2025-05-29T15:24:22.077897Z node 1 :BS_NODE DEBUG: {NWDC53@distconf.cpp:300} StateWaitForInit event Type# 131082 StorageConfigLoaded# true NodeListObtained# false PendingEvents.size# 1 2025-05-29T15:24:22.077905Z node 1 :BS_NODE DEBUG: {NWDC11@distconf_binding.cpp:6} TEvNodesInfo 2025-05-29T15:24:22.077928Z node 1 :BS_NODE DEBUG: {NWDC18@distconf_binding.cpp:322} UpdateBound RefererNodeId# 1 NodeId# ::1:12001/1 Meta# {Fingerprint: "\2775L_EK[fI\3531a\266\200_\004iC|\301" } 2025-05-29T15:24:22.077949Z node 1 :BS_NODE DEBUG: {NWDC54@distconf_binding.cpp:237} SubscribeToPeerNode NodeId# 2 SessionId# [0:0:0] Inserted# true Subscription# {SessionId# [0:0:0] SubscriptionCookie# 0} NextSubscribeCookie# 1 2025-05-29T15:24:22.077959Z node 1 :BS_NODE DEBUG: {NWDC29@distconf_binding.cpp:80} Initiated bind NodeId# 2 Binding# {2.0/17978818938535626086@[0:0:0]} SessionId# [0:0:0] 2025-05-29T15:24:22.078029Z node 1 :BS_NODE DEBUG: {NWDC53@distconf.cpp:300} StateWaitForInit event Type# 2146435072 StorageConfigLoaded# true NodeListObtained# true PendingEvents.size# 1 2025-05-29T15:24:22.078038Z node 1 :BS_NODE DEBUG: {NWDC15@distconf.cpp:361} StateFunc Type# 2146435075 Sender# [1:140:2121] SessionId# [0:0:0] Cookie# 0 2025-05-29T15:24:22.078047Z node 1 :BS_NODE DEBUG: {NWDC36@distconf_persistent_storage.cpp:205} TEvStorageConfigStored NumOk# 0 NumError# 0 Passed# 0.006599s 2025-05-29T15:24:22.080483Z node 2 :BS_NODE DEBUG: {NWDC53@distconf.cpp:300} StateWaitForInit event Type# 131082 StorageConfigLoaded# true NodeListObtained# false PendingEvents.size# 2 2025-05-29T15:24:22.080499Z node 2 :BS_NODE DEBUG: {NWDC11@distconf_binding.cpp:6} TEvNodesInfo 2025-05-29T15:24:22.080527Z node 2 :BS_NODE DEBUG: {NWDC18@distconf_binding.cpp:322} U ... {VDiskStatus { VDiskId { GroupID: 2181038081 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } NodeId: 1 PDiskId: 1000 VSlotId: 1001 PDiskGuid: 7676227744158286547 Status: REPLICATING OnlyPhantomsRemain: false } } 2025-05-29T15:24:22.366570Z node 1 :BS_NODE DEBUG: {NW47@node_warden_impl.cpp:1141} Handle(TEvStatusUpdate) 2025-05-29T15:24:22.366591Z node 1 :BS_NODE DEBUG: {NW47@node_warden_impl.cpp:1141} Handle(TEvStatusUpdate) 2025-05-29T15:24:22.366623Z node 1 :BS_CONTROLLER DEBUG: {BSCTXUDM01@disk_metrics.cpp:68} Updating disk status Record# {VDiskStatus { VDiskId { GroupID: 2181038080 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } NodeId: 1 PDiskId: 1000 VSlotId: 1000 PDiskGuid: 7676227744158286547 Status: READY OnlyPhantomsRemain: false } } 2025-05-29T15:24:22.366661Z node 1 :BS_CONTROLLER DEBUG: {BSCTXUDM01@disk_metrics.cpp:68} Updating disk status Record# {VDiskStatus { VDiskId { GroupID: 2181038081 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } NodeId: 1 PDiskId: 1000 VSlotId: 1001 PDiskGuid: 7676227744158286547 Status: READY OnlyPhantomsRemain: false } } Formatting pdisk Creating PDisk Creating pdisk Verify that PDisk returns ERROR 2025-05-29T15:24:22.431587Z node 1 :BS_NODE DEBUG: {NW17@node_warden_impl.cpp:800} Handle(TEvBlobStorage::TEvControllerNodeServiceSetUpdate) Msg# {Status: OK NodeID: 1 ServiceSet { PDisks { NodeID: 1 PDiskID: 1001 Path: "/home/runner/.ya/build/build_root/ciyv/001915/r3tmp/tmpJew7Id//new_pdisk.dat" PDiskGuid: 12674177115071753817 PDiskCategory: 0 EntityStatus: CREATE } } } 2025-05-29T15:24:22.431612Z node 1 :BS_NODE DEBUG: {NW18@node_warden_resource.cpp:51} ApplyServiceSet IsStatic# false Comprehensive# false Origin# controller ServiceSet# {PDisks { NodeID: 1 PDiskID: 1001 Path: "/home/runner/.ya/build/build_root/ciyv/001915/r3tmp/tmpJew7Id//new_pdisk.dat" PDiskGuid: 12674177115071753817 PDiskCategory: 0 EntityStatus: CREATE } } 2025-05-29T15:24:22.431635Z node 1 :BS_NODE DEBUG: {NW04@node_warden_pdisk.cpp:196} StartLocalPDisk NodeId# 1 PDiskId# 1001 Path# "/home/runner/.ya/build/build_root/ciyv/001915/r3tmp/tmpJew7Id//new_pdisk.dat" PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} Temporary# false 2025-05-29T15:24:22.433038Z node 1 :BS_PDISK CRIT: {BPD39@blobstorage_pdisk_impl.cpp:2856} BlockDevice initialization error Details# Can't open file "/home/runner/.ya/build/build_root/ciyv/001915/r3tmp/tmpJew7Id//new_pdisk.dat": no such file. PDiskId# 1001 2025-05-29T15:24:22.433102Z node 1 :BS_PDISK CRIT: {BPD01@blobstorage_pdisk_actor.cpp:290} PDiskId# 1001 bootstrapped to the StateError, reason# Can't open file "/home/runner/.ya/build/build_root/ciyv/001915/r3tmp/tmpJew7Id//new_pdisk.dat": no such file. Can not be initialized Config: {TPDiskConfg Path# "/home/runner/.ya/build/build_root/ciyv/001915/r3tmp/tmpJew7Id//new_pdisk.dat" ExpectedPath# "" ExpectedSerial# "" PDiskGuid# 12674177115071753817 PDiskId# 1001 PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} MetadataOnly# 0 HashedMainKey[0]# 0x221976E60BD392C7 StartOwnerRound# 10 SectorMap# false EnableSectorEncryption # 1 ChunkSize# 134217728 SectorSize# 4096 StatisticsUpdateIntervalMs# 1000 SchedulerCfg# {TPDiskSchedulerConfig BytesSchedulerWeight# 1 LogWeight# 1 FreshWeight# 2 CompWeight# 7 SyncLogWeight# 15 HugeWeight# 2 FastReadWeight# 1 OtherReadWeight# 1 LoadWeight# 2 LowReadWeight# 1 MaxChunkReadsPerCycle# 16 MaxChunkReadsDurationPerCycleMs# 0.25 MaxChunkWritesPerCycle# 8 MaxChunkWritesDurationPerCycleMs# 1} MinLogChunksTotal# 4 MaxLogChunksPerOwnerMultiplier# 5 MaxLogChunksPerOwnerDivisor# 4 SortFreeChunksPerItems# 100 GetDriveDataSwitch# Enable WriteCacheSwitch# DoNotTouch DriveModelSeekTimeNs# 8000000 DriveModelSpeedBps# 127000000 DriveModelSpeedBpsMin# 135000000 DriveModelSpeedBpsMax# 200000000 DriveModelBulkWrieBlockSize# 2097152 DriveModelTrimSpeedBps# 0 ReorderingMs# 50 DeviceInFlight# 4 CostLimitNs# 50000000 BufferPoolBufferSizeBytes# 524288 BufferPoolBufferCount# 256 MaxQueuedCompletionActions# 128 ExpectedSlotCount# 0 ReserveLogChunksMultiplier# 56 InsaneLogChunksMultiplier# 40 RedLogChunksMultiplier# 30 OrangeLogChunksMultiplier# 20 WarningLogChunksMultiplier# 4 YellowLogChunksMultiplier# 4 MaxMetadataMegabytes# 32 SpaceColorBorder# GREEN CompletionThreadsCount# 1 UseNoopScheduler# false PlainDataChunks# 0} PDiskId# 1001 2025-05-29T15:24:22.471138Z node 1 :BS_PROXY_PUT INFO: dsproxy_put.cpp:645: [b6b2c6548553d7a5] bootstrap ActorId# [1:542:2461] Group# 33554432 BlobCount# 1 BlobIDs# [[72057594037932033:2:8:0:0:349:0]] HandleClass# TabletLog Tactic# MinLatency RestartCounter# 0 Marker# BPP13 2025-05-29T15:24:22.471188Z node 1 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:42: [b6b2c6548553d7a5] Id# [72057594037932033:2:8:0:0:349:0] restore disk# 0 part# 0 situation# ESituation::Unknown Marker# BPG51 2025-05-29T15:24:22.471194Z node 1 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:42: [b6b2c6548553d7a5] Id# [72057594037932033:2:8:0:0:349:0] restore disk# 1 part# 1 situation# ESituation::Unknown Marker# BPG51 2025-05-29T15:24:22.471198Z node 1 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:42: [b6b2c6548553d7a5] Id# [72057594037932033:2:8:0:0:349:0] restore disk# 2 part# 2 situation# ESituation::Unknown Marker# BPG51 2025-05-29T15:24:22.471232Z node 1 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:42: [b6b2c6548553d7a5] Id# [72057594037932033:2:8:0:0:349:0] restore disk# 3 part# 0 situation# ESituation::Unknown Marker# BPG51 2025-05-29T15:24:22.471237Z node 1 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:42: [b6b2c6548553d7a5] Id# [72057594037932033:2:8:0:0:349:0] restore disk# 3 part# 1 situation# ESituation::Unknown Marker# BPG51 2025-05-29T15:24:22.471242Z node 1 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:42: [b6b2c6548553d7a5] Id# [72057594037932033:2:8:0:0:349:0] restore disk# 3 part# 2 situation# ESituation::Unknown Marker# BPG51 2025-05-29T15:24:22.471248Z node 1 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:65: [b6b2c6548553d7a5] restore Id# [72057594037932033:2:8:0:0:349:0] optimisticReplicas# 3 optimisticState# EBS_FULL Marker# BPG55 2025-05-29T15:24:22.471260Z node 1 :BS_PROXY_PUT DEBUG: dsproxy_strategy_base.cpp:324: [b6b2c6548553d7a5] partPlacement record partSituation# ESituation::Unknown to# 0 blob Id# [72057594037932033:2:8:0:0:349:1] Marker# BPG33 2025-05-29T15:24:22.471265Z node 1 :BS_PROXY_PUT DEBUG: dsproxy_strategy_base.cpp:345: [b6b2c6548553d7a5] Sending missing VPut part# 0 to# 0 blob Id# [72057594037932033:2:8:0:0:349:1] Marker# BPG32 2025-05-29T15:24:22.471270Z node 1 :BS_PROXY_PUT DEBUG: dsproxy_strategy_base.cpp:324: [b6b2c6548553d7a5] partPlacement record partSituation# ESituation::Unknown to# 1 blob Id# [72057594037932033:2:8:0:0:349:2] Marker# BPG33 2025-05-29T15:24:22.471274Z node 1 :BS_PROXY_PUT DEBUG: dsproxy_strategy_base.cpp:345: [b6b2c6548553d7a5] Sending missing VPut part# 1 to# 1 blob Id# [72057594037932033:2:8:0:0:349:2] Marker# BPG32 2025-05-29T15:24:22.471279Z node 1 :BS_PROXY_PUT DEBUG: dsproxy_strategy_base.cpp:324: [b6b2c6548553d7a5] partPlacement record partSituation# ESituation::Unknown to# 2 blob Id# [72057594037932033:2:8:0:0:349:3] Marker# BPG33 2025-05-29T15:24:22.471283Z node 1 :BS_PROXY_PUT DEBUG: dsproxy_strategy_base.cpp:345: [b6b2c6548553d7a5] Sending missing VPut part# 2 to# 2 blob Id# [72057594037932033:2:8:0:0:349:3] Marker# BPG32 2025-05-29T15:24:22.471314Z node 1 :BS_PROXY DEBUG: group_sessions.h:165: Send to queueActorId# [1:65:2091] NKikimr::TEvBlobStorage::TEvVPut# {ID# [72057594037932033:2:8:0:0:349:3] FDS# 349 HandleClass# TabletLog {MsgQoS ExtQueueId# PutTabletLog} DataSize# 0 Data# } cookie# 0 2025-05-29T15:24:22.471322Z node 1 :BS_PROXY DEBUG: group_sessions.h:165: Send to queueActorId# [1:58:2084] NKikimr::TEvBlobStorage::TEvVPut# {ID# [72057594037932033:2:8:0:0:349:2] FDS# 349 HandleClass# TabletLog {MsgQoS ExtQueueId# PutTabletLog} DataSize# 0 Data# } cookie# 0 2025-05-29T15:24:22.471329Z node 1 :BS_PROXY DEBUG: group_sessions.h:165: Send to queueActorId# [1:79:2105] NKikimr::TEvBlobStorage::TEvVPut# {ID# [72057594037932033:2:8:0:0:349:1] FDS# 349 HandleClass# TabletLog {MsgQoS ExtQueueId# PutTabletLog} DataSize# 0 Data# } cookie# 0 2025-05-29T15:24:22.471888Z node 1 :BS_PROXY_PUT DEBUG: dsproxy_put.cpp:260: [b6b2c6548553d7a5] received {EvVPutResult Status# OK ID# [72057594037932033:2:8:0:0:349:2] {MsgQoS MsgId# { SequenceId: 1 MsgId: 9 } Cost# 82748 ExtQueueId# PutTabletLog IntQueueId# IntPutLog Window# { Status# Processed ActualWindowSize# 0 MaxWindowSize# 150000000 ExpectedMsgId# { SequenceId: 1 MsgId: 10 }}}} from# [2000000:1:0:0:0] Marker# BPP01 2025-05-29T15:24:22.471918Z node 1 :BS_PROXY_PUT DEBUG: dsproxy_put.cpp:260: [b6b2c6548553d7a5] received {EvVPutResult Status# OK ID# [72057594037932033:2:8:0:0:349:3] {MsgQoS MsgId# { SequenceId: 1 MsgId: 10 } Cost# 82748 ExtQueueId# PutTabletLog IntQueueId# IntPutLog Window# { Status# Processed ActualWindowSize# 0 MaxWindowSize# 150000000 ExpectedMsgId# { SequenceId: 1 MsgId: 11 }}}} from# [2000000:1:0:1:0] Marker# BPP01 2025-05-29T15:24:22.471932Z node 1 :BS_PROXY_PUT DEBUG: dsproxy_put.cpp:260: [b6b2c6548553d7a5] received {EvVPutResult Status# OK ID# [72057594037932033:2:8:0:0:349:1] {MsgQoS MsgId# { SequenceId: 1 MsgId: 8 } Cost# 82748 ExtQueueId# PutTabletLog IntQueueId# IntPutLog Window# { Status# Processed ActualWindowSize# 0 MaxWindowSize# 150000000 ExpectedMsgId# { SequenceId: 1 MsgId: 9 }}}} from# [2000000:1:0:3:0] Marker# BPP01 2025-05-29T15:24:22.471951Z node 1 :BS_PROXY_PUT DEBUG: dsproxy_put_impl.cpp:72: [b6b2c6548553d7a5] Result# TEvPutResult {Id# [72057594037932033:2:8:0:0:349:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0.999479} GroupId# 33554432 Marker# BPP12 2025-05-29T15:24:22.471959Z node 1 :BS_PROXY_PUT INFO: dsproxy_put.cpp:486: [b6b2c6548553d7a5] SendReply putResult# TEvPutResult {Id# [72057594037932033:2:8:0:0:349:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0.999479} ResponsesSent# 0 PutImpl.Blobs.size# 1 Last# true Marker# BPP21 2025-05-29T15:24:22.471987Z node 1 :BS_PROXY_PUT DEBUG: {BPP72@dsproxy_put.cpp:470} Query history GroupId# 33554432 HandleClass# TabletLog Tactic# MinLatency History# THistory { Entries# [ TEvVPut{ TimestampMs# 0.232 sample PartId# [72057594037932033:2:8:0:0:349:3] QueryCount# 1 VDiskId# [2000000:1:0:1:0] NodeId# 1 } TEvVPut{ TimestampMs# 0.232 sample PartId# [72057594037932033:2:8:0:0:349:2] QueryCount# 1 VDiskId# [2000000:1:0:0:0] NodeId# 1 } TEvVPut{ TimestampMs# 0.232 sample PartId# [72057594037932033:2:8:0:0:349:1] QueryCount# 1 VDiskId# [2000000:1:0:3:0] NodeId# 1 } TEvVPutResult{ TimestampMs# 0.8 VDiskId# [2000000:1:0:0:0] NodeId# 1 Status# OK } TEvVPutResult{ TimestampMs# 0.821 VDiskId# [2000000:1:0:1:0] NodeId# 1 Status# OK } TEvVPutResult{ TimestampMs# 0.839 VDiskId# [2000000:1:0:3:0] NodeId# 1 Status# OK } ] } 2025-05-29T15:24:23.125045Z node 1 :BS_CONTROLLER DEBUG: {BSCTXUDM01@disk_metrics.cpp:68} Updating disk status Record# {PDisksMetrics { PDiskId: 0 AvailableSize: 68557996032 TotalSize: 68719476736 MaxReadThroughput: 127000000 MaxWriteThroughput: 127000000 NonRealTimeMs: 0 SlowDeviceMs: 0 MaxIOPS: 125 EnforcedDynamicSlotSize: 17112760320 State: Normal } } 2025-05-29T15:24:23.238195Z node 1 :BS_CONTROLLER DEBUG: {BSCTXUDM01@disk_metrics.cpp:68} Updating disk status Record# {PDisksMetrics { PDiskId: 1000 AvailableSize: 68557996032 TotalSize: 68719476736 MaxReadThroughput: 127000000 MaxWriteThroughput: 127000000 NonRealTimeMs: 0 SlowDeviceMs: 0 MaxIOPS: 125 EnforcedDynamicSlotSize: 34225520640 State: Normal } } 2025-05-29T15:24:23.270106Z node 1 :BS_CONTROLLER DEBUG: {BSCTXUDM01@disk_metrics.cpp:68} Updating disk status Record# {PDisksMetrics { PDiskId: 1001 AvailableSize: 0 TotalSize: 0 MaxReadThroughput: 127000000 MaxWriteThroughput: 127000000 NonRealTimeMs: 0 SlowDeviceMs: 0 MaxIOPS: 125 State: OpenFileError } } 2025-05-29T15:24:23.270140Z node 1 :BS_CONTROLLER NOTICE: {BSCTXUDM03@disk_metrics.cpp:114} PDisk not found PDiskId# 1:1001 |65.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_user_attributes/unittest |65.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_user_attributes/unittest |65.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_user_attributes/unittest >> ReadIteratorExternalBlobs::ExtBlobsWithDeletesInTheMiddle [FAIL] >> ReadIteratorExternalBlobs::ExtBlobsWithFirstRowPreloaded >> KqpPg::EquiJoin-useSink [FAIL] >> KqpPg::ExplainColumnsReorder ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/datashard/ut_read_iterator/unittest >> DataShardReadIterator::ShouldReceiveErrorAfterSplitWhenExhausted [FAIL] Test command err: 2025-05-29T15:24:00.055657Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:102:2148], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-05-29T15:24:00.055694Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-05-29T15:24:00.055711Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/0015c1/r3tmp/tmpZx6azN/pdisk_1.dat 2025-05-29T15:24:00.159396Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-05-29T15:24:00.172601Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:24:00.176398Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [1:32:2079] 1748532239757163 != 1748532239757167 2025-05-29T15:24:00.217925Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:24:00.217953Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:24:00.228455Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-29T15:24:00.300897Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-05-29T15:24:00.315691Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3097: StateInit, received event# 268828672, Sender [1:656:2563], Recipient [1:664:2569]: NKikimr::TEvTablet::TEvBoot 2025-05-29T15:24:00.315856Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3097: StateInit, received event# 268828673, Sender [1:656:2563], Recipient [1:664:2569]: NKikimr::TEvTablet::TEvRestored 2025-05-29T15:24:00.315924Z node 1 :TX_DATASHARD INFO: datashard.cpp:373: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:664:2569] 2025-05-29T15:24:00.315967Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:630: TxInitSchema.Execute 2025-05-29T15:24:00.322224Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3110: StateInactive, received event# 268828684, Sender [1:656:2563], Recipient [1:664:2569]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-05-29T15:24:00.322338Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:696: TxInitSchema.Complete 2025-05-29T15:24:00.322359Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:48: TDataShard::TTxInit::Execute 2025-05-29T15:24:00.322478Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1315: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-05-29T15:24:00.322484Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1371: LoadLockChangeRecords at tablet: 72075186224037888 2025-05-29T15:24:00.322489Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1420: LoadChangeRecordCommits at tablet: 72075186224037888 2025-05-29T15:24:00.322526Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:92: TDataShard::TTxInit::Complete 2025-05-29T15:24:00.322537Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:100: TDataShard::TTxInitRestored::Execute 2025-05-29T15:24:00.322545Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:111: DataShard 72075186224037888 persisting started state actor id [1:681:2569] in generation 1 2025-05-29T15:24:00.332767Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:120: TDataShard::TTxInitRestored::Complete 2025-05-29T15:24:00.336056Z node 1 :TX_DATASHARD INFO: datashard.cpp:417: Switched to work state WaitScheme tabletId 72075186224037888 2025-05-29T15:24:00.336112Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:457: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-05-29T15:24:00.336144Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1250: Change sender created: at tablet: 72075186224037888, actorId: [1:683:2579] 2025-05-29T15:24:00.336149Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1255: Trying to activate change sender: at tablet: 72075186224037888 2025-05-29T15:24:00.336154Z node 1 :TX_DATASHARD INFO: datashard.cpp:1272: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-05-29T15:24:00.336159Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-05-29T15:24:00.336211Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3129: StateWork, received event# 2146435072, Sender [1:664:2569], Recipient [1:664:2569]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-05-29T15:24:00.336219Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3154: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-05-29T15:24:00.336307Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 72075186224037888 2025-05-29T15:24:00.336332Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-05-29T15:24:00.336345Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2025-05-29T15:24:00.336353Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-05-29T15:24:00.336360Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:385: Check unit PlanQueue at 72075186224037888 2025-05-29T15:24:00.336365Z node 1 :TX_DATASHARD TRACE: plan_queue_unit.cpp:59: TPlanQueueUnit at 72075186224037888 has no attached operations 2025-05-29T15:24:00.336369Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:403: Unit PlanQueue has no ready operations at 72075186224037888 2025-05-29T15:24:00.336374Z node 1 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 72075186224037888 TxInFly 0 2025-05-29T15:24:00.336379Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2025-05-29T15:24:00.336482Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3129: StateWork, received event# 269877761, Sender [1:674:2574], Recipient [1:664:2569]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-05-29T15:24:00.336491Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3165: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-05-29T15:24:00.336497Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3710: Server connected at leader tablet# 72075186224037888, clientId# [1:662:2567], serverId# [1:674:2574], sessionId# [0:0:0] 2025-05-29T15:24:00.336505Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3129: StateWork, received event# 269549568, Sender [1:411:2405], Recipient [1:674:2574] 2025-05-29T15:24:00.336510Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3135: StateWork, processing event TEvDataShard::TEvProposeTransaction 2025-05-29T15:24:00.336531Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037888 2025-05-29T15:24:00.336590Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1827: Trying to execute [0:281474976715657] at 72075186224037888 on unit CheckSchemeTx 2025-05-29T15:24:00.336599Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:132: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-05-29T15:24:00.336618Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:220: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2025-05-29T15:24:00.336625Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1862: Execution status for [0:281474976715657] at 72075186224037888 is ExecutedNoMoreRestarts 2025-05-29T15:24:00.336630Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1910: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit CheckSchemeTx 2025-05-29T15:24:00.336635Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1916: Add [0:281474976715657] at 72075186224037888 to execution unit StoreSchemeTx 2025-05-29T15:24:00.336639Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1827: Trying to execute [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2025-05-29T15:24:00.336680Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1862: Execution status for [0:281474976715657] at 72075186224037888 is DelayCompleteNoMoreRestarts 2025-05-29T15:24:00.336685Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1910: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit StoreSchemeTx 2025-05-29T15:24:00.336689Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1916: Add [0:281474976715657] at 72075186224037888 to execution unit FinishPropose 2025-05-29T15:24:00.336692Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1827: Trying to execute [0:281474976715657] at 72075186224037888 on unit FinishPropose 2025-05-29T15:24:00.336702Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1862: Execution status for [0:281474976715657] at 72075186224037888 is DelayComplete 2025-05-29T15:24:00.336705Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1910: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit FinishPropose 2025-05-29T15:24:00.336709Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1916: Add [0:281474976715657] at 72075186224037888 to execution unit WaitForPlan 2025-05-29T15:24:00.336712Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1827: Trying to execute [0:281474976715657] at 72075186224037888 on unit WaitForPlan 2025-05-29T15:24:00.336717Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1832: Operation [0:281474976715657] at 72075186224037888 is not ready to execute on unit WaitForPlan 2025-05-29T15:24:00.336957Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3129: StateWork, received event# 269746185, Sender [1:684:2580], Recipient [1:664:2569]: NKikimr::TEvTxProxySchemeCache::TEvWatchNotifyUpdated 2025-05-29T15:24:00.336966Z node 1 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:129: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-05-29T15:24:00.347193Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 72075186224037888 2025-05-29T15:24:00.347216Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1933: Complete execution for [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2025-05-29T15:24:00.347221Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1933: Complete execution for [0:281474976715657] at 72075186224037888 on unit FinishPropose 2025-05-29T15:24:00.347230Z node 1 :TX_DATASHARD TRACE: finish_propose_unit.cpp:167: Propose transaction complete txid 281474976715657 at tablet 72075186224037888 send to client, exec latency: 0 ms, propose latency: 0 ms, status: PREPARED 2025-05-29T15:24:00.347245Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:467: 72075186224037888 not sending time cast registration request in state WaitScheme 2025-05-29T15:24:00.490118Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3129: StateWork, received event# 269877761, Sender [1:699:2589], Recipient [1:664:2569]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-05-29T15:24:00.490141Z node 1 :TX_DATASHARD TRACE: datashard_impl. ... 24:17.831801Z node 15 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1862: Execution status for [1000:281474976715657] at 72075186224037888 is DelayComplete 2025-05-29T15:24:17.831806Z node 15 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1910: Advance execution plan for [1000:281474976715657] at 72075186224037888 executing on unit CompleteOperation 2025-05-29T15:24:17.831809Z node 15 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1916: Add [1000:281474976715657] at 72075186224037888 to execution unit CompletedOperations 2025-05-29T15:24:17.831813Z node 15 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1827: Trying to execute [1000:281474976715657] at 72075186224037888 on unit CompletedOperations 2025-05-29T15:24:17.831818Z node 15 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1862: Execution status for [1000:281474976715657] at 72075186224037888 is Executed 2025-05-29T15:24:17.831822Z node 15 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1910: Advance execution plan for [1000:281474976715657] at 72075186224037888 executing on unit CompletedOperations 2025-05-29T15:24:17.831827Z node 15 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1922: Execution plan for [1000:281474976715657] at 72075186224037888 has finished 2025-05-29T15:24:17.831831Z node 15 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-05-29T15:24:17.831835Z node 15 :TX_DATASHARD TRACE: datashard_pipeline.cpp:327: Check candidate unit PlanQueue at 72075186224037888 2025-05-29T15:24:17.831839Z node 15 :TX_DATASHARD TRACE: plan_queue_unit.cpp:59: TPlanQueueUnit at 72075186224037888 has no attached operations 2025-05-29T15:24:17.831842Z node 15 :TX_DATASHARD TRACE: datashard_pipeline.cpp:341: Unit PlanQueue has no ready operations at 72075186224037888 2025-05-29T15:24:17.832239Z node 15 :TX_DATASHARD TRACE: datashard_impl.h:3129: StateWork, received event# 270270976, Sender [15:24:2071], Recipient [15:664:2568]: {TEvRegisterTabletResult TabletId# 72075186224037888 Entry# 0} 2025-05-29T15:24:17.832249Z node 15 :TX_DATASHARD TRACE: datashard_impl.h:3167: StateWork, processing event TEvMediatorTimecast::TEvRegisterTabletResult 2025-05-29T15:24:17.832255Z node 15 :TX_DATASHARD DEBUG: datashard.cpp:3742: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037888 time 0 2025-05-29T15:24:17.832261Z node 15 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-05-29T15:24:17.832375Z node 15 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:98: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 1000} 2025-05-29T15:24:17.832386Z node 15 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2025-05-29T15:24:17.832585Z node 15 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2025-05-29T15:24:17.832593Z node 15 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1933: Complete execution for [1000:281474976715657] at 72075186224037888 on unit CreateTable 2025-05-29T15:24:17.832598Z node 15 :TX_DATASHARD DEBUG: datashard.cpp:1255: Trying to activate change sender: at tablet: 72075186224037888 2025-05-29T15:24:17.832606Z node 15 :TX_DATASHARD INFO: datashard.cpp:1293: Change sender activated: at tablet: 72075186224037888 2025-05-29T15:24:17.832614Z node 15 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1933: Complete execution for [1000:281474976715657] at 72075186224037888 on unit CompleteOperation 2025-05-29T15:24:17.832630Z node 15 :TX_DATASHARD DEBUG: datashard.cpp:801: Complete [1000 : 281474976715657] from 72075186224037888 at tablet 72075186224037888 send result to client [15:410:2404], exec latency: 0 ms, propose latency: 0 ms 2025-05-29T15:24:17.832639Z node 15 :TX_DATASHARD INFO: datashard.cpp:1590: 72075186224037888 Sending notify to schemeshard 72057594046644480 txId 281474976715657 state Ready TxInFly 0 2025-05-29T15:24:17.832650Z node 15 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-05-29T15:24:17.832851Z node 15 :TX_DATASHARD TRACE: datashard_impl.h:3129: StateWork, received event# 269746185, Sender [15:683:2579], Recipient [15:664:2568]: NKikimr::TEvTxProxySchemeCache::TEvWatchNotifyUpdated 2025-05-29T15:24:17.832861Z node 15 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:129: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-05-29T15:24:17.833080Z node 15 :TX_DATASHARD TRACE: datashard_impl.h:3129: StateWork, received event# 269877760, Sender [15:707:2595], Recipient [15:664:2568]: NKikimr::TEvTabletPipe::TEvClientConnected { TabletId: 72057594046644480 Status: OK ServerId: [15:711:2599] Leader: 1 Dead: 0 Generation: 2 VersionInfo: } 2025-05-29T15:24:17.833089Z node 15 :TX_DATASHARD TRACE: datashard_impl.h:3162: StateWork, processing event TEvTabletPipe::TEvClientConnected 2025-05-29T15:24:17.833301Z node 15 :TX_DATASHARD TRACE: datashard_impl.h:3129: StateWork, received event# 269552132, Sender [15:410:2404], Recipient [15:664:2568]: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 281474976715657 2025-05-29T15:24:17.833309Z node 15 :TX_DATASHARD TRACE: datashard_impl.h:3133: StateWork, processing event TEvDataShard::TEvSchemaChangedResult 2025-05-29T15:24:17.833316Z node 15 :TX_DATASHARD DEBUG: datashard.cpp:2953: Handle TEvSchemaChangedResult 281474976715657 datashard 72075186224037888 state Ready 2025-05-29T15:24:17.833323Z node 15 :TX_DATASHARD DEBUG: datashard__schema_changed.cpp:22: 72075186224037888 Got TEvSchemaChangedResult from SS at 72075186224037888 2025-05-29T15:24:17.833405Z node 15 :TX_DATASHARD TRACE: datashard_impl.h:3129: StateWork, received event# 270270978, Sender [15:24:2071], Recipient [15:664:2568]: NKikimr::TEvMediatorTimecast::TEvSubscribeReadStepResult{ CoordinatorId# 72057594046316545 LastReadStep# 0 NextReadStep# 1000 ReadStep# 1000 } 2025-05-29T15:24:17.833410Z node 15 :TX_DATASHARD TRACE: datashard_impl.h:3168: StateWork, processing event TEvMediatorTimecast::TEvSubscribeReadStepResult 2025-05-29T15:24:17.833417Z node 15 :TX_DATASHARD DEBUG: datashard.cpp:3760: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037888 coordinator 72057594046316545 last step 0 next step 1000 2025-05-29T15:24:17.836706Z node 15 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [15:731:2613], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:24:17.836728Z node 15 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [15:740:2618], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:24:17.836738Z node 15 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:24:17.837694Z node 15 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2025-05-29T15:24:17.838694Z node 15 :TX_DATASHARD TRACE: datashard_impl.h:3129: StateWork, received event# 269746185, Sender [15:683:2579], Recipient [15:664:2568]: NKikimr::TEvTxProxySchemeCache::TEvWatchNotifyUpdated 2025-05-29T15:24:17.838710Z node 15 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:129: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-05-29T15:24:17.861048Z node 15 :TX_DATASHARD TRACE: datashard_impl.h:3129: StateWork, received event# 269877763, Sender [15:707:2595], Recipient [15:664:2568]: NKikimr::TEvTabletPipe::TEvClientDestroyed { TabletId: 72057594046644480 ClientId: [15:707:2595] ServerId: [15:711:2599] } 2025-05-29T15:24:17.861075Z node 15 :TX_DATASHARD TRACE: datashard_impl.h:3163: StateWork, processing event TEvTabletPipe::TEvClientDestroyed 2025-05-29T15:24:18.012407Z node 15 :TX_DATASHARD TRACE: datashard_impl.h:3129: StateWork, received event# 269746185, Sender [15:683:2579], Recipient [15:664:2568]: NKikimr::TEvTxProxySchemeCache::TEvWatchNotifyUpdated 2025-05-29T15:24:18.012432Z node 15 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:129: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-05-29T15:24:18.012922Z node 15 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [15:745:2621], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-05-29T15:24:18.047904Z node 15 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [15:815:2660] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-29T15:24:18.092025Z node 15 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [15:825:2669], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:24:18.092675Z node 15 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=15&id=OTMwZTk1N2QtNzJhZjY2ZjgtNjA3NmI2NzAtNDAzYTYzMDg=, ActorId: [15:729:2611], ActorState: ExecuteState, TraceId: 01jwea9s1ccmat08mq5dnae3da, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: assertion failed at ydb/core/tx/datashard/ut_common/datashard_ut_common.cpp:2091, void NKikimr::ExecSQL(Tests::TServer::TPtr, TActorId, const TString &, bool, Ydb::StatusIds::StatusCode): (response.GetYdbStatus() == code) failed: (INTERNAL_ERROR != SUCCESS) Response { QueryIssues { message: "Execution" issue_code: 1060 issues { message: "yql/essentials/ast/yql_expr.h:1874: index out of range" issue_code: 1 } } TxMeta { } } YdbStatus: INTERNAL_ERROR ConsumedRu: 1 , with diff: (INT|SUCC)E(RNAL_ERROR|SS) TBackTrace::Capture()+28 (0x13C95FEC) NUnitTest::NPrivate::RaiseError(char const*, TBasicString> const&, bool)+137 (0x13E49919) NKikimr::ExecSQL(TIntrusivePtr>, NActors::TActorId, TBasicString> const&, bool, Ydb::StatusIds_StatusCode)+1300 (0x264C9A24) ??+0 (0x13ADB793) ??+0 (0x13A2F9D1) NKikimr::NTestSuiteDataShardReadIterator::TTestCaseShouldReceiveErrorAfterSplitWhenExhausted::Execute_(NUnitTest::TTestContext&)+34 (0x13A6A052) NKikimr::NTestSuiteDataShardReadIterator::TCurrentTest::Execute()::'lambda'()::operator()() const+71 (0x13AD4527) NUnitTest::TTestBase::Run(std::__y1::function, TBasicString> const&, char const*, bool)+126 (0x13E4B7CE) NKikimr::NTestSuiteDataShardReadIterator::TCurrentTest::Execute()+433 (0x13AD3D81) NUnitTest::TTestFactory::Execute()+803 (0x13E4BF43) NUnitTest::RunMain(int, char**)+3021 (0x13E5DAED) ??+0 (0x7FCBB099FD90) __libc_start_main+128 (0x7FCBB099FE40) _start+41 (0x12A96029) |65.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/backup/impl/ut_local_partition_reader/unittest >> TSchemeShardUserAttrsTest::Boot [GOOD] >> TConsoleInMemoryConfigSubscriptionTests::TestConsoleRestart [GOOD] >> TConsoleInMemoryConfigSubscriptionTests::TestConsoleRestartSimplified >> TConsoleInMemoryConfigSubscriptionTests::TestSubscriptionClientReconnectsOnConnectionLoose [GOOD] >> TConsoleInMemoryConfigSubscriptionTests::TestSubscribeAfterConfigApplyWithKnownConfig ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_serverless_reboots/unittest >> TSchemeShardServerLessReboots::TestServerlessComputeResourcesModeWithReboots [GOOD] Test command err: ==== RunWithTabletReboots =========== RUN: Trace =========== Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2140] Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:116:2058] recipient: [1:110:2141] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:116:2058] recipient: [1:110:2141] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:117:2058] recipient: [1:111:2142] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:117:2058] recipient: [1:111:2142] Leader for TabletID 72057594046678944 is [1:126:2151] sender: [1:127:2058] recipient: [1:109:2140] Leader for TabletID 72057594046447617 is [1:130:2154] sender: [1:132:2058] recipient: [1:110:2141] Leader for TabletID 72057594046316545 is [1:133:2155] sender: [1:135:2058] recipient: [1:111:2142] 2025-05-29T15:23:18.633595Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7488: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-05-29T15:23:18.633623Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7516: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:23:18.633630Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7402: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2025-05-29T15:23:18.633636Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7418: OperationsProcessing config: using default configuration 2025-05-29T15:23:18.633648Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-05-29T15:23:18.633653Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-05-29T15:23:18.633664Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7548: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:23:18.633679Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:39: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-05-29T15:23:18.633786Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7619: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-05-29T15:23:18.633868Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-05-29T15:23:18.648508Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7651: Got new config: QueryServiceConfig { AllExternalDataSourcesAreAvailable: true } 2025-05-29T15:23:18.648536Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:23:18.648627Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7619: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# Leader for TabletID 72057594046447617 is [1:130:2154] sender: [1:175:2058] recipient: [1:15:2062] 2025-05-29T15:23:18.651293Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-05-29T15:23:18.651329Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-05-29T15:23:18.651365Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-05-29T15:23:18.657608Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-05-29T15:23:18.657674Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:445: Clear TempDirsState with owners number: 0 2025-05-29T15:23:18.657764Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1348: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-05-29T15:23:18.658043Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:32: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-05-29T15:23:18.658841Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:157: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:23:18.658903Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:84: [RootDataErasureManager] Stop 2025-05-29T15:23:18.659195Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:23:18.659208Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:23:18.659245Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-05-29T15:23:18.659254Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-29T15:23:18.659261Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:202: TTxServerlessStorageBilling.Complete 2025-05-29T15:23:18.659285Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6671: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:214:2058] recipient: [1:212:2212] IGNORE Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:214:2058] recipient: [1:212:2212] Leader for TabletID 72057594037968897 is [1:218:2216] sender: [1:219:2058] recipient: [1:212:2212] 2025-05-29T15:23:18.660726Z node 1 :HIVE INFO: tablet_helpers.cpp:1130: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:126:2151] sender: [1:239:2058] recipient: [1:15:2062] 2025-05-29T15:23:18.678479Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:372: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-05-29T15:23:18.678558Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:23:18.678620Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-05-29T15:23:18.678670Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:130: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-05-29T15:23:18.678681Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:23:18.679610Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:451: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-05-29T15:23:18.679658Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-05-29T15:23:18.679719Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:23:18.679732Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:313: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-05-29T15:23:18.679739Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:367: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-05-29T15:23:18.679745Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 2 -> 3 2025-05-29T15:23:18.680163Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:23:18.680172Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-05-29T15:23:18.680176Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 3 -> 128 2025-05-29T15:23:18.680443Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:23:18.680449Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:23:18.680453Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:23:18.680457Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1650: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-05-29T15:23:18.680878Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1719: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-05-29T15:23:18.681166Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:652: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-05-29T15:23:18.681195Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1751: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 Leader for TabletID 72057594046316545 is [1:133:2155] sender: [1:254:2058] recipient: [1:15:2062] FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-05-29T15:23:18.681353Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:676: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-05-29T15:23:18.681370Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:680: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 133 RawX2: 4294969451 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-05-29T15:23:18.681375Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:23:18.681431Z node 1 :FLAT_TX_SCHEMESHARD INFO: sch ... 75186234409549 2025-05-29T15:24:21.162643Z node 213 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [213:731:2632], at schemeshard: 72075186234409549, txId: 0, path id: 1 2025-05-29T15:24:21.162917Z node 213 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:5834: Handle TEvUpdateAck, at schemeshard: 72075186234409549, msg: Owner: 72075186234409549 Generation: 2 LocalPathId: 1 Version: 6 PathOwnerId: 72075186234409549, cookie: 0 2025-05-29T15:24:21.163013Z node 213 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:652: Send tablet strongly msg operationId: 1007:0 from tablet: 72057594046678944 to tablet: 72075186233409546 cookie: 72057594046678944:4 msg type: 268697640 2025-05-29T15:24:21.163032Z node 213 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1751: TOperation RegisterRelationByTabletId, TxId: 1007, partId: 0, tablet: 72075186233409546 2025-05-29T15:24:21.163070Z node 213 :HIVE INFO: tablet_helpers.cpp:1453: [72075186233409546] TEvUpdateDomain, msg: DomainKey { SchemeShard: 72057594046678944 PathId: 4 } ServerlessComputeResourcesMode: EServerlessComputeResourcesModeShared TxId: 1007 2025-05-29T15:24:21.163082Z node 213 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1007 2025-05-29T15:24:21.163090Z node 213 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__sync_update_tenants.cpp:36: TTxSyncTenant DoComplete, pathId: [OwnerId: 72057594046678944, LocalPathId: 4], at schemeshard: 72057594046678944 2025-05-29T15:24:21.163104Z node 213 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5994: Update domain reply, message: Origin: 72075186233409546 TxId: 1007, at schemeshard: 72057594046678944 2025-05-29T15:24:21.163108Z node 213 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1764: TOperation FindRelatedPartByTabletId, TxId: 1007, tablet: 72075186233409546, partId: 0 2025-05-29T15:24:21.163126Z node 213 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:619: TTxOperationReply execute, operationId: 1007:0, at schemeshard: 72057594046678944, message: Origin: 72075186233409546 TxId: 1007 2025-05-29T15:24:21.163133Z node 213 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_alter_extsubdomain.cpp:822: [72057594046678944] TSyncHive, operationId 1007:0, HandleReply TEvUpdateDomainReply, from hive: 72075186233409546 2025-05-29T15:24:21.163138Z node 213 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1007:0 138 -> 240 2025-05-29T15:24:21.163471Z node 213 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:647: TTxOperationReply complete, operationId: 1007:0, at schemeshard: 72057594046678944 2025-05-29T15:24:21.163494Z node 213 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1007:0, at schemeshard: 72057594046678944 2025-05-29T15:24:21.163501Z node 213 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:482: [72057594046678944] TDone opId# 1007:0 ProgressState 2025-05-29T15:24:21.163514Z node 213 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:906: Part operation is done id#1007:0 progress is 1/1 2025-05-29T15:24:21.163518Z node 213 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 1007 ready parts: 1/1 2025-05-29T15:24:21.163524Z node 213 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:906: Part operation is done id#1007:0 progress is 1/1 2025-05-29T15:24:21.163528Z node 213 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 1007 ready parts: 1/1 2025-05-29T15:24:21.163532Z node 213 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1606: TOperation IsReadyToNotify, TxId: 1007, ready parts: 1/1, is published: true 2025-05-29T15:24:21.163538Z node 213 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 1007 ready parts: 1/1 2025-05-29T15:24:21.163543Z node 213 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:973: Operation and all the parts is done, operation id: 1007:0 2025-05-29T15:24:21.163548Z node 213 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5174: RemoveTx for txid 1007:0 2025-05-29T15:24:21.163558Z node 213 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 5 TestModificationResult got TxId: 1007, wait until txId: 1007 TestWaitNotification wait txId: 1007 2025-05-29T15:24:21.163913Z node 213 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:210: tests -- TTxNotificationSubscriber for txId 1007: send EvNotifyTxCompletion 2025-05-29T15:24:21.163922Z node 213 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:256: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 1007 2025-05-29T15:24:21.164003Z node 213 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 1007, at schemeshard: 72057594046678944 2025-05-29T15:24:21.164018Z node 213 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:227: tests -- TTxNotificationSubscriber for txId 1007: got EvNotifyTxCompletionResult 2025-05-29T15:24:21.164022Z node 213 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:236: tests -- TTxNotificationSubscriber for txId 1007: satisfy waiter [213:875:2756] TestWaitNotification: OK eventTxId 1007 2025-05-29T15:24:21.164113Z node 213 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/ServerLess0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-05-29T15:24:21.164140Z node 213 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/ServerLess0" took 36us result status StatusSuccess 2025-05-29T15:24:21.164219Z node 213 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/ServerLess0" PathDescription { Self { Name: "ServerLess0" PathId: 4 SchemeshardId: 72057594046678944 PathType: EPathTypeExtSubDomain CreateFinished: true CreateTxId: 1004 CreateStep: 5000005 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 6 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 6 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 4 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 4 PlanResolution: 50 Coordinators: 72075186234409550 TimeCastBucketsPerMediator: 2 Mediators: 72075186234409551 SchemeShard: 72075186234409549 } DomainKey { SchemeShard: 72057594046678944 PathId: 4 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 3 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 3 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { } SharedHive: 72075186233409546 ServerlessComputeResourcesMode: EServerlessComputeResourcesModeShared } } PathId: 4 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-05-29T15:24:21.164285Z node 213 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/ServerLess0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-05-29T15:24:21.164297Z node 213 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/ServerLess0" took 13us result status StatusSuccess 2025-05-29T15:24:21.164336Z node 213 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/ServerLess0" PathDescription { Self { Name: "ServerLess0" PathId: 4 SchemeshardId: 72057594046678944 PathType: EPathTypeExtSubDomain CreateFinished: true CreateTxId: 1004 CreateStep: 5000005 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 6 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 6 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 4 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 4 PlanResolution: 50 Coordinators: 72075186234409550 TimeCastBucketsPerMediator: 2 Mediators: 72075186234409551 SchemeShard: 72075186234409549 } DomainKey { SchemeShard: 72057594046678944 PathId: 4 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 3 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 3 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { } SharedHive: 72075186233409546 ServerlessComputeResourcesMode: EServerlessComputeResourcesModeShared } } PathId: 4 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-05-29T15:24:21.164387Z node 213 :HIVE INFO: tablet_helpers.cpp:1470: [72075186233409546] TEvRequestDomainInfo, 72057594046678944:4 2025-05-29T15:24:21.164440Z node 213 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/ServerLess0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72075186234409549 2025-05-29T15:24:21.164453Z node 213 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72075186234409549 describe path "/MyRoot/ServerLess0" took 14us result status StatusSuccess 2025-05-29T15:24:21.164493Z node 213 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/ServerLess0" PathDescription { Self { Name: "MyRoot/ServerLess0" PathId: 1 SchemeshardId: 72075186234409549 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 6 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 6 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 4 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 4 ProcessingParams { Version: 4 PlanResolution: 50 Coordinators: 72075186234409550 TimeCastBucketsPerMediator: 2 Mediators: 72075186234409551 SchemeShard: 72075186234409549 } DomainKey { SchemeShard: 72057594046678944 PathId: 4 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 3 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 3 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/MyRoot/ServerLess0" } SharedHive: 72075186233409546 ServerlessComputeResourcesMode: EServerlessComputeResourcesModeShared } } PathId: 1 PathOwnerId: 72075186234409549, at schemeshard: 72075186234409549 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/datashard/ut_read_iterator/unittest >> DataShardReadIterator::ShouldReadRangeChunk100 [FAIL] Test command err: 2025-05-29T15:24:00.475470Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:102:2148], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-05-29T15:24:00.475495Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-05-29T15:24:00.475506Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/00157b/r3tmp/tmpTZUlj5/pdisk_1.dat 2025-05-29T15:24:00.574372Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-05-29T15:24:00.587011Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:24:00.589907Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [1:32:2079] 1748532240153771 != 1748532240153775 2025-05-29T15:24:00.631266Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:24:00.631297Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:24:00.641746Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-29T15:24:00.714345Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-05-29T15:24:00.730381Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3097: StateInit, received event# 268828672, Sender [1:656:2563], Recipient [1:664:2569]: NKikimr::TEvTablet::TEvBoot 2025-05-29T15:24:00.730605Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3097: StateInit, received event# 268828673, Sender [1:656:2563], Recipient [1:664:2569]: NKikimr::TEvTablet::TEvRestored 2025-05-29T15:24:00.730726Z node 1 :TX_DATASHARD INFO: datashard.cpp:373: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:664:2569] 2025-05-29T15:24:00.730818Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:630: TxInitSchema.Execute 2025-05-29T15:24:00.738029Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3110: StateInactive, received event# 268828684, Sender [1:656:2563], Recipient [1:664:2569]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-05-29T15:24:00.738171Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:696: TxInitSchema.Complete 2025-05-29T15:24:00.738203Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:48: TDataShard::TTxInit::Execute 2025-05-29T15:24:00.738329Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1315: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-05-29T15:24:00.738335Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1371: LoadLockChangeRecords at tablet: 72075186224037888 2025-05-29T15:24:00.738340Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1420: LoadChangeRecordCommits at tablet: 72075186224037888 2025-05-29T15:24:00.738378Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:92: TDataShard::TTxInit::Complete 2025-05-29T15:24:00.738390Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:100: TDataShard::TTxInitRestored::Execute 2025-05-29T15:24:00.738399Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:111: DataShard 72075186224037888 persisting started state actor id [1:681:2569] in generation 1 2025-05-29T15:24:00.748624Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:120: TDataShard::TTxInitRestored::Complete 2025-05-29T15:24:00.751552Z node 1 :TX_DATASHARD INFO: datashard.cpp:417: Switched to work state WaitScheme tabletId 72075186224037888 2025-05-29T15:24:00.751592Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:457: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-05-29T15:24:00.751613Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1250: Change sender created: at tablet: 72075186224037888, actorId: [1:683:2579] 2025-05-29T15:24:00.751616Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1255: Trying to activate change sender: at tablet: 72075186224037888 2025-05-29T15:24:00.751619Z node 1 :TX_DATASHARD INFO: datashard.cpp:1272: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-05-29T15:24:00.751623Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-05-29T15:24:00.751663Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3129: StateWork, received event# 2146435072, Sender [1:664:2569], Recipient [1:664:2569]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-05-29T15:24:00.751668Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3154: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-05-29T15:24:00.751719Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 72075186224037888 2025-05-29T15:24:00.751732Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-05-29T15:24:00.751742Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2025-05-29T15:24:00.751746Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-05-29T15:24:00.751751Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:385: Check unit PlanQueue at 72075186224037888 2025-05-29T15:24:00.751754Z node 1 :TX_DATASHARD TRACE: plan_queue_unit.cpp:59: TPlanQueueUnit at 72075186224037888 has no attached operations 2025-05-29T15:24:00.751759Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:403: Unit PlanQueue has no ready operations at 72075186224037888 2025-05-29T15:24:00.751762Z node 1 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 72075186224037888 TxInFly 0 2025-05-29T15:24:00.751765Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2025-05-29T15:24:00.751821Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3129: StateWork, received event# 269877761, Sender [1:674:2574], Recipient [1:664:2569]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-05-29T15:24:00.751825Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3165: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-05-29T15:24:00.751829Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3710: Server connected at leader tablet# 72075186224037888, clientId# [1:662:2567], serverId# [1:674:2574], sessionId# [0:0:0] 2025-05-29T15:24:00.751835Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3129: StateWork, received event# 269549568, Sender [1:411:2405], Recipient [1:674:2574] 2025-05-29T15:24:00.751837Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3135: StateWork, processing event TEvDataShard::TEvProposeTransaction 2025-05-29T15:24:00.751848Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037888 2025-05-29T15:24:00.751890Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1827: Trying to execute [0:281474976715657] at 72075186224037888 on unit CheckSchemeTx 2025-05-29T15:24:00.751895Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:132: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-05-29T15:24:00.751906Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:220: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2025-05-29T15:24:00.751910Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1862: Execution status for [0:281474976715657] at 72075186224037888 is ExecutedNoMoreRestarts 2025-05-29T15:24:00.751913Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1910: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit CheckSchemeTx 2025-05-29T15:24:00.751917Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1916: Add [0:281474976715657] at 72075186224037888 to execution unit StoreSchemeTx 2025-05-29T15:24:00.751919Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1827: Trying to execute [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2025-05-29T15:24:00.751947Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1862: Execution status for [0:281474976715657] at 72075186224037888 is DelayCompleteNoMoreRestarts 2025-05-29T15:24:00.751950Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1910: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit StoreSchemeTx 2025-05-29T15:24:00.751952Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1916: Add [0:281474976715657] at 72075186224037888 to execution unit FinishPropose 2025-05-29T15:24:00.751954Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1827: Trying to execute [0:281474976715657] at 72075186224037888 on unit FinishPropose 2025-05-29T15:24:00.751960Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1862: Execution status for [0:281474976715657] at 72075186224037888 is DelayComplete 2025-05-29T15:24:00.751962Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1910: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit FinishPropose 2025-05-29T15:24:00.751965Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1916: Add [0:281474976715657] at 72075186224037888 to execution unit WaitForPlan 2025-05-29T15:24:00.751967Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1827: Trying to execute [0:281474976715657] at 72075186224037888 on unit WaitForPlan 2025-05-29T15:24:00.751971Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1832: Operation [0:281474976715657] at 72075186224037888 is not ready to execute on unit WaitForPlan 2025-05-29T15:24:00.752155Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3129: StateWork, received event# 269746185, Sender [1:684:2580], Recipient [1:664:2569]: NKikimr::TEvTxProxySchemeCache::TEvWatchNotifyUpdated 2025-05-29T15:24:00.752161Z node 1 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:129: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-05-29T15:24:00.762426Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 72075186224037888 2025-05-29T15:24:00.762454Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1933: Complete execution for [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2025-05-29T15:24:00.762460Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1933: Complete execution for [0:281474976715657] at 72075186224037888 on unit FinishPropose 2025-05-29T15:24:00.762470Z node 1 :TX_DATASHARD TRACE: finish_propose_unit.cpp:167: Propose transaction complete txid 281474976715657 at tablet 72075186224037888 send to client, exec latency: 0 ms, propose latency: 0 ms, status: PREPARED 2025-05-29T15:24:00.762482Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:467: 72075186224037888 not sending time cast registration request in state WaitScheme 2025-05-29T15:24:00.905259Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3129: StateWork, received event# 269877761, Sender [1:699:2589], Recipient [1:664:2569]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-05-29T15:24:00.905279Z node 1 :TX_DATASHARD TRACE: datashard_impl. ... ion 2025-05-29T15:24:18.089384Z node 15 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1862: Execution status for [1000:281474976715657] at 72075186224037888 is DelayComplete 2025-05-29T15:24:18.089389Z node 15 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1910: Advance execution plan for [1000:281474976715657] at 72075186224037888 executing on unit CompleteOperation 2025-05-29T15:24:18.089393Z node 15 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1916: Add [1000:281474976715657] at 72075186224037888 to execution unit CompletedOperations 2025-05-29T15:24:18.089397Z node 15 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1827: Trying to execute [1000:281474976715657] at 72075186224037888 on unit CompletedOperations 2025-05-29T15:24:18.089403Z node 15 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1862: Execution status for [1000:281474976715657] at 72075186224037888 is Executed 2025-05-29T15:24:18.089407Z node 15 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1910: Advance execution plan for [1000:281474976715657] at 72075186224037888 executing on unit CompletedOperations 2025-05-29T15:24:18.089412Z node 15 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1922: Execution plan for [1000:281474976715657] at 72075186224037888 has finished 2025-05-29T15:24:18.089417Z node 15 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-05-29T15:24:18.089421Z node 15 :TX_DATASHARD TRACE: datashard_pipeline.cpp:327: Check candidate unit PlanQueue at 72075186224037888 2025-05-29T15:24:18.089424Z node 15 :TX_DATASHARD TRACE: plan_queue_unit.cpp:59: TPlanQueueUnit at 72075186224037888 has no attached operations 2025-05-29T15:24:18.089427Z node 15 :TX_DATASHARD TRACE: datashard_pipeline.cpp:341: Unit PlanQueue has no ready operations at 72075186224037888 2025-05-29T15:24:18.089872Z node 15 :TX_DATASHARD TRACE: datashard_impl.h:3129: StateWork, received event# 270270976, Sender [15:24:2071], Recipient [15:664:2568]: {TEvRegisterTabletResult TabletId# 72075186224037888 Entry# 0} 2025-05-29T15:24:18.089887Z node 15 :TX_DATASHARD TRACE: datashard_impl.h:3167: StateWork, processing event TEvMediatorTimecast::TEvRegisterTabletResult 2025-05-29T15:24:18.089893Z node 15 :TX_DATASHARD DEBUG: datashard.cpp:3742: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037888 time 0 2025-05-29T15:24:18.089899Z node 15 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-05-29T15:24:18.090003Z node 15 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:98: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 1000} 2025-05-29T15:24:18.090014Z node 15 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2025-05-29T15:24:18.090213Z node 15 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2025-05-29T15:24:18.090223Z node 15 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1933: Complete execution for [1000:281474976715657] at 72075186224037888 on unit CreateTable 2025-05-29T15:24:18.090229Z node 15 :TX_DATASHARD DEBUG: datashard.cpp:1255: Trying to activate change sender: at tablet: 72075186224037888 2025-05-29T15:24:18.090235Z node 15 :TX_DATASHARD INFO: datashard.cpp:1293: Change sender activated: at tablet: 72075186224037888 2025-05-29T15:24:18.090242Z node 15 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1933: Complete execution for [1000:281474976715657] at 72075186224037888 on unit CompleteOperation 2025-05-29T15:24:18.090256Z node 15 :TX_DATASHARD DEBUG: datashard.cpp:801: Complete [1000 : 281474976715657] from 72075186224037888 at tablet 72075186224037888 send result to client [15:410:2404], exec latency: 0 ms, propose latency: 0 ms 2025-05-29T15:24:18.090266Z node 15 :TX_DATASHARD INFO: datashard.cpp:1590: 72075186224037888 Sending notify to schemeshard 72057594046644480 txId 281474976715657 state Ready TxInFly 0 2025-05-29T15:24:18.090277Z node 15 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-05-29T15:24:18.090478Z node 15 :TX_DATASHARD TRACE: datashard_impl.h:3129: StateWork, received event# 269746185, Sender [15:683:2579], Recipient [15:664:2568]: NKikimr::TEvTxProxySchemeCache::TEvWatchNotifyUpdated 2025-05-29T15:24:18.090488Z node 15 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:129: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-05-29T15:24:18.090686Z node 15 :TX_DATASHARD TRACE: datashard_impl.h:3129: StateWork, received event# 269877760, Sender [15:707:2595], Recipient [15:664:2568]: NKikimr::TEvTabletPipe::TEvClientConnected { TabletId: 72057594046644480 Status: OK ServerId: [15:711:2599] Leader: 1 Dead: 0 Generation: 2 VersionInfo: } 2025-05-29T15:24:18.090694Z node 15 :TX_DATASHARD TRACE: datashard_impl.h:3162: StateWork, processing event TEvTabletPipe::TEvClientConnected 2025-05-29T15:24:18.100299Z node 15 :TX_DATASHARD TRACE: datashard_impl.h:3129: StateWork, received event# 269552132, Sender [15:410:2404], Recipient [15:664:2568]: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 281474976715657 2025-05-29T15:24:18.100327Z node 15 :TX_DATASHARD TRACE: datashard_impl.h:3133: StateWork, processing event TEvDataShard::TEvSchemaChangedResult 2025-05-29T15:24:18.100335Z node 15 :TX_DATASHARD DEBUG: datashard.cpp:2953: Handle TEvSchemaChangedResult 281474976715657 datashard 72075186224037888 state Ready 2025-05-29T15:24:18.100347Z node 15 :TX_DATASHARD DEBUG: datashard__schema_changed.cpp:22: 72075186224037888 Got TEvSchemaChangedResult from SS at 72075186224037888 2025-05-29T15:24:18.100478Z node 15 :TX_DATASHARD TRACE: datashard_impl.h:3129: StateWork, received event# 270270978, Sender [15:24:2071], Recipient [15:664:2568]: NKikimr::TEvMediatorTimecast::TEvSubscribeReadStepResult{ CoordinatorId# 72057594046316545 LastReadStep# 0 NextReadStep# 1000 ReadStep# 1000 } 2025-05-29T15:24:18.100484Z node 15 :TX_DATASHARD TRACE: datashard_impl.h:3168: StateWork, processing event TEvMediatorTimecast::TEvSubscribeReadStepResult 2025-05-29T15:24:18.100490Z node 15 :TX_DATASHARD DEBUG: datashard.cpp:3760: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037888 coordinator 72057594046316545 last step 0 next step 1000 2025-05-29T15:24:18.104352Z node 15 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [15:731:2613], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:24:18.104382Z node 15 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [15:740:2618], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:24:18.104393Z node 15 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:24:18.105532Z node 15 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2025-05-29T15:24:18.107501Z node 15 :TX_DATASHARD TRACE: datashard_impl.h:3129: StateWork, received event# 269746185, Sender [15:683:2579], Recipient [15:664:2568]: NKikimr::TEvTxProxySchemeCache::TEvWatchNotifyUpdated 2025-05-29T15:24:18.107529Z node 15 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:129: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-05-29T15:24:18.135362Z node 15 :TX_DATASHARD TRACE: datashard_impl.h:3129: StateWork, received event# 269877763, Sender [15:707:2595], Recipient [15:664:2568]: NKikimr::TEvTabletPipe::TEvClientDestroyed { TabletId: 72057594046644480 ClientId: [15:707:2595] ServerId: [15:711:2599] } 2025-05-29T15:24:18.135393Z node 15 :TX_DATASHARD TRACE: datashard_impl.h:3163: StateWork, processing event TEvTabletPipe::TEvClientDestroyed 2025-05-29T15:24:18.282656Z node 15 :TX_DATASHARD TRACE: datashard_impl.h:3129: StateWork, received event# 269746185, Sender [15:683:2579], Recipient [15:664:2568]: NKikimr::TEvTxProxySchemeCache::TEvWatchNotifyUpdated 2025-05-29T15:24:18.282688Z node 15 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:129: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-05-29T15:24:18.283255Z node 15 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [15:745:2621], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-05-29T15:24:18.323846Z node 15 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [15:815:2660] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-29T15:24:18.354023Z node 15 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [15:825:2669], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:24:18.354709Z node 15 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=15&id=MjhmNjI5YjMtYjBjOWI4ZjktZWM4MzlkZjMtZTlkYzcxZDA=, ActorId: [15:729:2611], ActorState: ExecuteState, TraceId: 01jwea9s9r6z4yc6xym0gpwmt1, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: assertion failed at ydb/core/tx/datashard/ut_common/datashard_ut_common.cpp:2091, void NKikimr::ExecSQL(Tests::TServer::TPtr, TActorId, const TString &, bool, Ydb::StatusIds::StatusCode): (response.GetYdbStatus() == code) failed: (INTERNAL_ERROR != SUCCESS) Response { QueryIssues { message: "Execution" issue_code: 1060 issues { message: "yql/essentials/ast/yql_expr.h:1874: index out of range" issue_code: 1 } } TxMeta { } } YdbStatus: INTERNAL_ERROR ConsumedRu: 1 , with diff: (INT|SUCC)E(RNAL_ERROR|SS) TBackTrace::Capture()+28 (0x13C95FEC) NUnitTest::NPrivate::RaiseError(char const*, TBasicString> const&, bool)+137 (0x13E49919) NKikimr::ExecSQL(TIntrusivePtr>, NActors::TActorId, TBasicString> const&, bool, Ydb::StatusIds_StatusCode)+1300 (0x264C9A24) ??+0 (0x13ADB793) ??+0 (0x13A2F9D1) NKikimr::NTestSuiteDataShardReadIterator::TTestCaseShouldReadRangeChunk100::Execute_(NUnitTest::TTestContext&)+23 (0x13A60267) NKikimr::NTestSuiteDataShardReadIterator::TCurrentTest::Execute()::'lambda'()::operator()() const+71 (0x13AD4527) NUnitTest::TTestBase::Run(std::__y1::function, TBasicString> const&, char const*, bool)+126 (0x13E4B7CE) NKikimr::NTestSuiteDataShardReadIterator::TCurrentTest::Execute()+433 (0x13AD3D81) NUnitTest::TTestFactory::Execute()+803 (0x13E4BF43) NUnitTest::RunMain(int, char**)+3021 (0x13E5DAED) ??+0 (0x7F214DA2AD90) __libc_start_main+128 (0x7F214DA2AE40) _start+41 (0x12A96029) ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_user_attributes/unittest >> TSchemeShardUserAttrsTest::SetAttrs [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:116:2058] recipient: [1:111:2141] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:116:2058] recipient: [1:111:2141] Leader for TabletID 72057594046678944 is [1:128:2152] sender: [1:132:2058] recipient: [1:111:2141] 2025-05-29T15:24:23.894691Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7488: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-05-29T15:24:23.894719Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7516: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:24:23.894725Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7402: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-05-29T15:24:23.894730Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7418: OperationsProcessing config: using default configuration 2025-05-29T15:24:23.894761Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-05-29T15:24:23.894766Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-05-29T15:24:23.894775Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7548: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:24:23.894788Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:39: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-05-29T15:24:23.894908Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7619: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-05-29T15:24:23.894994Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-05-29T15:24:23.906783Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7309: Cannot subscribe to console configs 2025-05-29T15:24:23.906806Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:24:23.909604Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-05-29T15:24:23.909801Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-05-29T15:24:23.909851Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-05-29T15:24:23.912033Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-05-29T15:24:23.912206Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:445: Clear TempDirsState with owners number: 0 2025-05-29T15:24:23.912357Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1348: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-05-29T15:24:23.912421Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:32: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-05-29T15:24:23.913112Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:157: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:24:23.913174Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:84: [RootDataErasureManager] Stop 2025-05-29T15:24:23.913534Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:24:23.913549Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:24:23.913574Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-05-29T15:24:23.913584Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-29T15:24:23.913589Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:202: TTxServerlessStorageBilling.Complete 2025-05-29T15:24:23.913621Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6671: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-05-29T15:24:23.915005Z node 1 :HIVE INFO: tablet_helpers.cpp:1130: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:128:2152] sender: [1:242:2058] recipient: [1:15:2062] 2025-05-29T15:24:23.937018Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:372: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-05-29T15:24:23.937121Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:24:23.937204Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-05-29T15:24:23.937261Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:130: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-05-29T15:24:23.937274Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:24:23.938217Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:451: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-05-29T15:24:23.938248Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-05-29T15:24:23.938307Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:24:23.938319Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:313: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-05-29T15:24:23.938326Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:367: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-05-29T15:24:23.938331Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 2 -> 3 2025-05-29T15:24:23.939009Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:24:23.939028Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-05-29T15:24:23.939035Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 3 -> 128 2025-05-29T15:24:23.939447Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:24:23.939460Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:24:23.939466Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:24:23.939474Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1650: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-05-29T15:24:23.940219Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1719: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-05-29T15:24:23.940611Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:652: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-05-29T15:24:23.940653Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1751: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-05-29T15:24:23.940826Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:676: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-05-29T15:24:23.940846Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:680: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 124 RawX2: 4294969445 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-05-29T15:24:23.940852Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:24:23.940913Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 128 -> 240 2025-05-29T15:24:23.940918Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:24:23.940946Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-05-29T15:24:23.940956Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:402: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-05-29T15:24:23.941278Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:24:23.941284Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-29T15:24:23.941322Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29 ... it: 1000000 } UserAttributes { Key: "AttrA1" Value: "ValA1" } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 TestModificationResults wait txId: 103 2025-05-29T15:24:23.956573Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:372: TTxOperationPropose Execute, message: Transaction { WorkingDir: "" OperationType: ESchemeOpAlterUserAttributes AlterUserAttributes { PathName: "MyRoot" UserAttributes { Key: "AttrRoot" Value: "ValRoot" } } } TxId: 103 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-05-29T15:24:23.956605Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_user_attrs.cpp:26: TAlterUserAttrs Propose, path: /MyRoot, operationId: 103:0, at schemeshard: 72057594046678944 2025-05-29T15:24:23.956626Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-05-29T15:24:23.956657Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:130: IgniteOperation, opId: 103:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-05-29T15:24:23.956664Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 103:0, at schemeshard: 72057594046678944 2025-05-29T15:24:23.957043Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:451: TTxOperationPropose Complete, txId: 103, response: Status: StatusAccepted TxId: 103 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2025-05-29T15:24:23.957070Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 103, subject: , status: StatusAccepted, operation: ALTER USER ATTRIBUTES, path: MyRoot 2025-05-29T15:24:23.957099Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 103:0, at schemeshard: 72057594046678944 2025-05-29T15:24:23.957104Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_alter_user_attrs.cpp:97: TAlterUserAttrs ProgressState, opId: 103:0, at schemeshard: 72057594046678944 2025-05-29T15:24:23.957112Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1650: TOperation IsReadyToPropose , TxId: 103 ready parts: 1/1 2025-05-29T15:24:23.957137Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1719: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 103 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-05-29T15:24:23.957447Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:652: Send tablet strongly msg operationId: 103:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:103 msg type: 269090816 2025-05-29T15:24:23.957481Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1751: TOperation RegisterRelationByTabletId, TxId: 103, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 103 at step: 5000004 FAKE_COORDINATOR: advance: minStep5000004 State->FrontStep: 5000003 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 103 at step: 5000004 2025-05-29T15:24:23.957573Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:676: TTxOperationPlanStep Execute, stepId: 5000004, transactions count in step: 1, at schemeshard: 72057594046678944 2025-05-29T15:24:23.957592Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:680: TTxOperationPlanStep Execute, message: Transactions { TxId: 103 Coordinator: 72057594046316545 AckTo { RawX1: 124 RawX2: 4294969445 } } Step: 5000004 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-05-29T15:24:23.957598Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_alter_user_attrs.cpp:114: TAlterUserAttrs HandleReply TEvOperationPlan, opId: 103:0, stepId:5000004, at schemeshard: 72057594046678944 2025-05-29T15:24:23.957656Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:906: Part operation is done id#103:0 progress is 1/1 2025-05-29T15:24:23.957660Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2025-05-29T15:24:23.957666Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:906: Part operation is done id#103:0 progress is 1/1 2025-05-29T15:24:23.957670Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2025-05-29T15:24:23.957680Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-05-29T15:24:23.957690Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1606: TOperation IsReadyToNotify, TxId: 103, ready parts: 1/1, is published: false 2025-05-29T15:24:23.957699Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:402: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-05-29T15:24:23.957703Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2025-05-29T15:24:23.957707Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:973: Operation and all the parts is done, operation id: 103:0 2025-05-29T15:24:23.957712Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5174: RemoveTx for txid 103:0 2025-05-29T15:24:23.957721Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2025-05-29T15:24:23.957726Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:982: Publication still in progress, tx: 103, publications: 1, subscribers: 0 2025-05-29T15:24:23.957731Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:989: Publication details: tx: 103, [OwnerId: 72057594046678944, LocalPathId: 1], 6 2025-05-29T15:24:23.958159Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:24:23.958171Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 103, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-29T15:24:23.958202Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:24:23.958206Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:209:2210], at schemeshard: 72057594046678944, txId: 103, path id: 1 FAKE_COORDINATOR: Erasing txId 103 2025-05-29T15:24:23.958297Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:5834: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 6 PathOwnerId: 72057594046678944, cookie: 103 2025-05-29T15:24:23.958304Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 6 PathOwnerId: 72057594046678944, cookie: 103 2025-05-29T15:24:23.958307Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 103 2025-05-29T15:24:23.958311Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 103, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 6 2025-05-29T15:24:23.958314Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-05-29T15:24:23.958327Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 103, subscribers: 0 2025-05-29T15:24:23.958796Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 103 TestModificationResult got TxId: 103, wait until txId: 103 TestWaitNotification wait txId: 103 2025-05-29T15:24:23.958855Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:210: tests -- TTxNotificationSubscriber for txId 103: send EvNotifyTxCompletion 2025-05-29T15:24:23.958863Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:256: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 103 2025-05-29T15:24:23.958939Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 103, at schemeshard: 72057594046678944 2025-05-29T15:24:23.958954Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:227: tests -- TTxNotificationSubscriber for txId 103: got EvNotifyTxCompletionResult 2025-05-29T15:24:23.958959Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:236: tests -- TTxNotificationSubscriber for txId 103: satisfy waiter [1:342:2332] TestWaitNotification: OK eventTxId 103 2025-05-29T15:24:23.959027Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-05-29T15:24:23.959052Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot" took 34us result status StatusSuccess 2025-05-29T15:24:23.959193Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot" PathDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 6 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 6 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 2 ChildrenVersion: 3 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: "DirA" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 100 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 1 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/MyRoot" } } UserAttributes { Key: "AttrRoot" Value: "ValRoot" } } PathId: 1 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> KqpPg::CreateTableIfNotExists_GenericQuery [GOOD] >> KqpPg::AlterColumnSetDefaultFromSequenceWithSchemaname >> TSchemeShardUserAttrsTest::VariousUse [GOOD] >> TJaegerTracingConfiguratorTests::SamplingByDb [GOOD] >> TJaegerTracingConfiguratorTests::SharedThrottlingLimits |65.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/backup/impl/ut_local_partition_reader/unittest |65.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/backup/impl/ut_local_partition_reader/unittest |65.1%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/schemeshard/ut_external_table/ydb-core-tx-schemeshard-ut_external_table >> TExportToS3WithRebootsTests::ForgetShouldSucceedOnOnSingleTopic [GOOD] |65.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_external_table/ydb-core-tx-schemeshard-ut_external_table |65.1%| [TA] {RESULT} $(B)/ydb/core/tx/datashard/ut_change_collector/test-results/unittest/{meta.json ... results_accumulator.log} |65.1%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_external_table/ydb-core-tx-schemeshard-ut_external_table |65.1%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/schemeshard/ut_external_data_source/ydb-core-tx-schemeshard-ut_external_data_source |65.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_external_data_source/ydb-core-tx-schemeshard-ut_external_data_source |65.1%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_external_data_source/ydb-core-tx-schemeshard-ut_external_data_source ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_user_attributes/unittest >> TSchemeShardUserAttrsTest::Boot [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:116:2058] recipient: [1:111:2141] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:116:2058] recipient: [1:111:2141] Leader for TabletID 72057594046678944 is [1:128:2152] sender: [1:132:2058] recipient: [1:111:2141] 2025-05-29T15:24:24.123372Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7488: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-05-29T15:24:24.123404Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7516: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:24:24.123410Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7402: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-05-29T15:24:24.123416Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7418: OperationsProcessing config: using default configuration 2025-05-29T15:24:24.123435Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-05-29T15:24:24.123440Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-05-29T15:24:24.123449Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7548: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:24:24.123464Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:39: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-05-29T15:24:24.123578Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7619: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-05-29T15:24:24.123666Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-05-29T15:24:24.141644Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7309: Cannot subscribe to console configs 2025-05-29T15:24:24.141675Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:24:24.144796Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-05-29T15:24:24.144896Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-05-29T15:24:24.144931Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-05-29T15:24:24.146437Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-05-29T15:24:24.146606Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:445: Clear TempDirsState with owners number: 0 2025-05-29T15:24:24.146719Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1348: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-05-29T15:24:24.146795Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:32: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-05-29T15:24:24.147291Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:157: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:24:24.147340Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:84: [RootDataErasureManager] Stop 2025-05-29T15:24:24.147608Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:24:24.147618Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:24:24.147639Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-05-29T15:24:24.147649Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-29T15:24:24.147655Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:202: TTxServerlessStorageBilling.Complete 2025-05-29T15:24:24.147694Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6671: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-05-29T15:24:24.148871Z node 1 :HIVE INFO: tablet_helpers.cpp:1130: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:128:2152] sender: [1:242:2058] recipient: [1:15:2062] 2025-05-29T15:24:24.172504Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:372: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-05-29T15:24:24.172596Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:24:24.172679Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-05-29T15:24:24.172733Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:130: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-05-29T15:24:24.172745Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:24:24.173662Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:451: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-05-29T15:24:24.173696Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-05-29T15:24:24.173752Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:24:24.173764Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:313: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-05-29T15:24:24.173771Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:367: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-05-29T15:24:24.173776Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 2 -> 3 2025-05-29T15:24:24.174457Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:24:24.174488Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-05-29T15:24:24.174497Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 3 -> 128 2025-05-29T15:24:24.175210Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:24:24.175224Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:24:24.175233Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:24:24.175240Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1650: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-05-29T15:24:24.175983Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1719: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-05-29T15:24:24.176506Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:652: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-05-29T15:24:24.176547Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1751: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-05-29T15:24:24.176741Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:676: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-05-29T15:24:24.176768Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:680: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 124 RawX2: 4294969445 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-05-29T15:24:24.176774Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:24:24.176857Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 128 -> 240 2025-05-29T15:24:24.176865Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:24:24.176899Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-05-29T15:24:24.176911Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:402: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-05-29T15:24:24.177323Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:24:24.177332Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-29T15:24:24.177407Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:24:24.177412Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:209:2210], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-05-29T15:24:24.177498Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:24:24.177508Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:482: [72057594046678944] TDone opId# 1:0 ProgressState 2025-05-29T15:24:24.177534Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:906: Part operation is done id#1:0 progress is 1/1 2025-05-29T15:24:24.177543Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-05-29T15:24:24.177548Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:906: Part operation is done id#1:0 progress is 1/1 2025-05-29T15:24:24.177551Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-05-29T15:24:24.177555Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1606: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-05-29T15:24:24.177560Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-05-29T15:24:24.177565Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:973: Operation and all the parts is done, operation id: 1:0 2025-05-29T15:24:24.177569Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5174: RemoveTx for txid 1:0 2025-05-29T15:24:24.177581Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-05-29T15:24:24.177587Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:982: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-05-29T15:24:24.177592Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:989: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-05-29T15:24:24.177969Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:5834: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-05-29T15:24:24.177985Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-05-29T15:24:24.177990Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2025-05-29T15:24:24.177996Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2025-05-29T15:24:24.178001Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-05-29T15:24:24.178016Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 1, subscribers: 0 2025-05-29T15:24:24.178587Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1 2025-05-29T15:24:24.178683Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 1, at schemeshard: 72057594046678944 >> TConsoleConfigHelpersTests::TestConfigSubscriberAutoTenantDomain [GOOD] >> TConsoleConfigHelpersTests::TestConfigSubscriptionEraser >> TConsoleTests::TestRestartConsoleAndPoolsExtSubdomain [GOOD] >> TConsoleTests::TestSetDefaultStorageUnitsQuota >> TCmsTest::TestOutdatedState [GOOD] >> TCmsTest::TestSetResetMarkers >> ExternalBlobsMultipleChannels::WithNewColumnFamilyAndCompaction ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/datashard/ut_read_iterator/unittest >> DataShardReadIterator::ShouldProperlyOrderConflictingTransactionsMvcc-UseSink [FAIL] Test command err: 2025-05-29T15:24:00.076281Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:102:2148], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-05-29T15:24:00.076319Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-05-29T15:24:00.076335Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/0015bd/r3tmp/tmpUnqERK/pdisk_1.dat 2025-05-29T15:24:00.175953Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-05-29T15:24:00.189583Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:24:00.192912Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [1:32:2079] 1748532239759464 != 1748532239759468 2025-05-29T15:24:00.234403Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:24:00.234430Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:24:00.244876Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-29T15:24:00.317800Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-05-29T15:24:00.333254Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3097: StateInit, received event# 268828672, Sender [1:656:2563], Recipient [1:664:2569]: NKikimr::TEvTablet::TEvBoot 2025-05-29T15:24:00.333482Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3097: StateInit, received event# 268828673, Sender [1:656:2563], Recipient [1:664:2569]: NKikimr::TEvTablet::TEvRestored 2025-05-29T15:24:00.333569Z node 1 :TX_DATASHARD INFO: datashard.cpp:373: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:664:2569] 2025-05-29T15:24:00.333633Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:630: TxInitSchema.Execute 2025-05-29T15:24:00.341094Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3110: StateInactive, received event# 268828684, Sender [1:656:2563], Recipient [1:664:2569]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-05-29T15:24:00.341237Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:696: TxInitSchema.Complete 2025-05-29T15:24:00.341259Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:48: TDataShard::TTxInit::Execute 2025-05-29T15:24:00.341398Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1315: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-05-29T15:24:00.341404Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1371: LoadLockChangeRecords at tablet: 72075186224037888 2025-05-29T15:24:00.341408Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1420: LoadChangeRecordCommits at tablet: 72075186224037888 2025-05-29T15:24:00.341450Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:92: TDataShard::TTxInit::Complete 2025-05-29T15:24:00.341463Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:100: TDataShard::TTxInitRestored::Execute 2025-05-29T15:24:00.341473Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:111: DataShard 72075186224037888 persisting started state actor id [1:681:2569] in generation 1 2025-05-29T15:24:00.351715Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:120: TDataShard::TTxInitRestored::Complete 2025-05-29T15:24:00.354765Z node 1 :TX_DATASHARD INFO: datashard.cpp:417: Switched to work state WaitScheme tabletId 72075186224037888 2025-05-29T15:24:00.354818Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:457: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-05-29T15:24:00.354846Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1250: Change sender created: at tablet: 72075186224037888, actorId: [1:683:2579] 2025-05-29T15:24:00.354849Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1255: Trying to activate change sender: at tablet: 72075186224037888 2025-05-29T15:24:00.354853Z node 1 :TX_DATASHARD INFO: datashard.cpp:1272: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-05-29T15:24:00.354856Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-05-29T15:24:00.354903Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3129: StateWork, received event# 2146435072, Sender [1:664:2569], Recipient [1:664:2569]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-05-29T15:24:00.354908Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3154: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-05-29T15:24:00.354972Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 72075186224037888 2025-05-29T15:24:00.354987Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-05-29T15:24:00.354997Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2025-05-29T15:24:00.355003Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-05-29T15:24:00.355009Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:385: Check unit PlanQueue at 72075186224037888 2025-05-29T15:24:00.355012Z node 1 :TX_DATASHARD TRACE: plan_queue_unit.cpp:59: TPlanQueueUnit at 72075186224037888 has no attached operations 2025-05-29T15:24:00.355017Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:403: Unit PlanQueue has no ready operations at 72075186224037888 2025-05-29T15:24:00.355021Z node 1 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 72075186224037888 TxInFly 0 2025-05-29T15:24:00.355024Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2025-05-29T15:24:00.355094Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3129: StateWork, received event# 269877761, Sender [1:674:2574], Recipient [1:664:2569]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-05-29T15:24:00.355098Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3165: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-05-29T15:24:00.355103Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3710: Server connected at leader tablet# 72075186224037888, clientId# [1:662:2567], serverId# [1:674:2574], sessionId# [0:0:0] 2025-05-29T15:24:00.355109Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3129: StateWork, received event# 269549568, Sender [1:411:2405], Recipient [1:674:2574] 2025-05-29T15:24:00.355112Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3135: StateWork, processing event TEvDataShard::TEvProposeTransaction 2025-05-29T15:24:00.355126Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037888 2025-05-29T15:24:00.355190Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1827: Trying to execute [0:281474976715657] at 72075186224037888 on unit CheckSchemeTx 2025-05-29T15:24:00.355200Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:132: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-05-29T15:24:00.355229Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:220: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2025-05-29T15:24:00.355236Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1862: Execution status for [0:281474976715657] at 72075186224037888 is ExecutedNoMoreRestarts 2025-05-29T15:24:00.355239Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1910: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit CheckSchemeTx 2025-05-29T15:24:00.355243Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1916: Add [0:281474976715657] at 72075186224037888 to execution unit StoreSchemeTx 2025-05-29T15:24:00.355246Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1827: Trying to execute [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2025-05-29T15:24:00.355280Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1862: Execution status for [0:281474976715657] at 72075186224037888 is DelayCompleteNoMoreRestarts 2025-05-29T15:24:00.355283Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1910: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit StoreSchemeTx 2025-05-29T15:24:00.355286Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1916: Add [0:281474976715657] at 72075186224037888 to execution unit FinishPropose 2025-05-29T15:24:00.355288Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1827: Trying to execute [0:281474976715657] at 72075186224037888 on unit FinishPropose 2025-05-29T15:24:00.355295Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1862: Execution status for [0:281474976715657] at 72075186224037888 is DelayComplete 2025-05-29T15:24:00.355298Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1910: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit FinishPropose 2025-05-29T15:24:00.355300Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1916: Add [0:281474976715657] at 72075186224037888 to execution unit WaitForPlan 2025-05-29T15:24:00.355303Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1827: Trying to execute [0:281474976715657] at 72075186224037888 on unit WaitForPlan 2025-05-29T15:24:00.355308Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1832: Operation [0:281474976715657] at 72075186224037888 is not ready to execute on unit WaitForPlan 2025-05-29T15:24:00.355489Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3129: StateWork, received event# 269746185, Sender [1:684:2580], Recipient [1:664:2569]: NKikimr::TEvTxProxySchemeCache::TEvWatchNotifyUpdated 2025-05-29T15:24:00.355497Z node 1 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:129: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-05-29T15:24:00.365747Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 72075186224037888 2025-05-29T15:24:00.365769Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1933: Complete execution for [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2025-05-29T15:24:00.365775Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1933: Complete execution for [0:281474976715657] at 72075186224037888 on unit FinishPropose 2025-05-29T15:24:00.365784Z node 1 :TX_DATASHARD TRACE: finish_propose_unit.cpp:167: Propose transaction complete txid 281474976715657 at tablet 72075186224037888 send to client, exec latency: 0 ms, propose latency: 0 ms, status: PREPARED 2025-05-29T15:24:00.365796Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:467: 72075186224037888 not sending time cast registration request in state WaitScheme 2025-05-29T15:24:00.508421Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3129: StateWork, received event# 269877761, Sender [1:699:2589], Recipient [1:664:2569]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-05-29T15:24:00.508441Z node 1 :TX_DATASHARD TRACE: datashard_impl. ... 9T15:24:19.637283Z node 16 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1862: Execution status for [1000:281474976715657] at 72075186224037888 is DelayComplete 2025-05-29T15:24:19.637288Z node 16 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1910: Advance execution plan for [1000:281474976715657] at 72075186224037888 executing on unit CompleteOperation 2025-05-29T15:24:19.637292Z node 16 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1916: Add [1000:281474976715657] at 72075186224037888 to execution unit CompletedOperations 2025-05-29T15:24:19.637295Z node 16 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1827: Trying to execute [1000:281474976715657] at 72075186224037888 on unit CompletedOperations 2025-05-29T15:24:19.637301Z node 16 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1862: Execution status for [1000:281474976715657] at 72075186224037888 is Executed 2025-05-29T15:24:19.637304Z node 16 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1910: Advance execution plan for [1000:281474976715657] at 72075186224037888 executing on unit CompletedOperations 2025-05-29T15:24:19.637308Z node 16 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1922: Execution plan for [1000:281474976715657] at 72075186224037888 has finished 2025-05-29T15:24:19.637313Z node 16 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-05-29T15:24:19.637317Z node 16 :TX_DATASHARD TRACE: datashard_pipeline.cpp:327: Check candidate unit PlanQueue at 72075186224037888 2025-05-29T15:24:19.637320Z node 16 :TX_DATASHARD TRACE: plan_queue_unit.cpp:59: TPlanQueueUnit at 72075186224037888 has no attached operations 2025-05-29T15:24:19.637323Z node 16 :TX_DATASHARD TRACE: datashard_pipeline.cpp:341: Unit PlanQueue has no ready operations at 72075186224037888 2025-05-29T15:24:19.637834Z node 16 :TX_DATASHARD TRACE: datashard_impl.h:3129: StateWork, received event# 270270976, Sender [16:24:2071], Recipient [16:664:2568]: {TEvRegisterTabletResult TabletId# 72075186224037888 Entry# 0} 2025-05-29T15:24:19.637848Z node 16 :TX_DATASHARD TRACE: datashard_impl.h:3167: StateWork, processing event TEvMediatorTimecast::TEvRegisterTabletResult 2025-05-29T15:24:19.637852Z node 16 :TX_DATASHARD DEBUG: datashard.cpp:3742: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037888 time 0 2025-05-29T15:24:19.637858Z node 16 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-05-29T15:24:19.637996Z node 16 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:98: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 1000} 2025-05-29T15:24:19.638008Z node 16 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2025-05-29T15:24:19.638208Z node 16 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2025-05-29T15:24:19.638217Z node 16 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1933: Complete execution for [1000:281474976715657] at 72075186224037888 on unit CreateTable 2025-05-29T15:24:19.638222Z node 16 :TX_DATASHARD DEBUG: datashard.cpp:1255: Trying to activate change sender: at tablet: 72075186224037888 2025-05-29T15:24:19.638229Z node 16 :TX_DATASHARD INFO: datashard.cpp:1293: Change sender activated: at tablet: 72075186224037888 2025-05-29T15:24:19.638236Z node 16 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1933: Complete execution for [1000:281474976715657] at 72075186224037888 on unit CompleteOperation 2025-05-29T15:24:19.638251Z node 16 :TX_DATASHARD DEBUG: datashard.cpp:801: Complete [1000 : 281474976715657] from 72075186224037888 at tablet 72075186224037888 send result to client [16:406:2400], exec latency: 0 ms, propose latency: 0 ms 2025-05-29T15:24:19.638261Z node 16 :TX_DATASHARD INFO: datashard.cpp:1590: 72075186224037888 Sending notify to schemeshard 72057594046644480 txId 281474976715657 state Ready TxInFly 0 2025-05-29T15:24:19.638276Z node 16 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-05-29T15:24:19.638565Z node 16 :TX_DATASHARD TRACE: datashard_impl.h:3129: StateWork, received event# 269746185, Sender [16:683:2579], Recipient [16:664:2568]: NKikimr::TEvTxProxySchemeCache::TEvWatchNotifyUpdated 2025-05-29T15:24:19.638574Z node 16 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:129: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-05-29T15:24:19.638832Z node 16 :TX_DATASHARD TRACE: datashard_impl.h:3129: StateWork, received event# 269877760, Sender [16:707:2595], Recipient [16:664:2568]: NKikimr::TEvTabletPipe::TEvClientConnected { TabletId: 72057594046644480 Status: OK ServerId: [16:712:2600] Leader: 1 Dead: 0 Generation: 2 VersionInfo: } 2025-05-29T15:24:19.638841Z node 16 :TX_DATASHARD TRACE: datashard_impl.h:3162: StateWork, processing event TEvTabletPipe::TEvClientConnected 2025-05-29T15:24:19.639118Z node 16 :TX_DATASHARD TRACE: datashard_impl.h:3129: StateWork, received event# 269552132, Sender [16:406:2400], Recipient [16:664:2568]: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 281474976715657 2025-05-29T15:24:19.639128Z node 16 :TX_DATASHARD TRACE: datashard_impl.h:3133: StateWork, processing event TEvDataShard::TEvSchemaChangedResult 2025-05-29T15:24:19.639134Z node 16 :TX_DATASHARD DEBUG: datashard.cpp:2953: Handle TEvSchemaChangedResult 281474976715657 datashard 72075186224037888 state Ready 2025-05-29T15:24:19.639142Z node 16 :TX_DATASHARD DEBUG: datashard__schema_changed.cpp:22: 72075186224037888 Got TEvSchemaChangedResult from SS at 72075186224037888 2025-05-29T15:24:19.639173Z node 16 :TX_DATASHARD TRACE: datashard_impl.h:3129: StateWork, received event# 270270978, Sender [16:24:2071], Recipient [16:664:2568]: NKikimr::TEvMediatorTimecast::TEvSubscribeReadStepResult{ CoordinatorId# 72057594046316545 LastReadStep# 0 NextReadStep# 1000 ReadStep# 1000 } 2025-05-29T15:24:19.639178Z node 16 :TX_DATASHARD TRACE: datashard_impl.h:3168: StateWork, processing event TEvMediatorTimecast::TEvSubscribeReadStepResult 2025-05-29T15:24:19.639184Z node 16 :TX_DATASHARD DEBUG: datashard.cpp:3760: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037888 coordinator 72057594046316545 last step 0 next step 1000 2025-05-29T15:24:19.643272Z node 16 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [16:731:2613], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:24:19.643316Z node 16 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [16:741:2618], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:24:19.643330Z node 16 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:24:19.644433Z node 16 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2025-05-29T15:24:19.645643Z node 16 :TX_DATASHARD TRACE: datashard_impl.h:3129: StateWork, received event# 269746185, Sender [16:683:2579], Recipient [16:664:2568]: NKikimr::TEvTxProxySchemeCache::TEvWatchNotifyUpdated 2025-05-29T15:24:19.645661Z node 16 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:129: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-05-29T15:24:19.673186Z node 16 :TX_DATASHARD TRACE: datashard_impl.h:3129: StateWork, received event# 269877763, Sender [16:707:2595], Recipient [16:664:2568]: NKikimr::TEvTabletPipe::TEvClientDestroyed { TabletId: 72057594046644480 ClientId: [16:707:2595] ServerId: [16:712:2600] } 2025-05-29T15:24:19.673217Z node 16 :TX_DATASHARD TRACE: datashard_impl.h:3163: StateWork, processing event TEvTabletPipe::TEvClientDestroyed 2025-05-29T15:24:19.825669Z node 16 :TX_DATASHARD TRACE: datashard_impl.h:3129: StateWork, received event# 269746185, Sender [16:683:2579], Recipient [16:664:2568]: NKikimr::TEvTxProxySchemeCache::TEvWatchNotifyUpdated 2025-05-29T15:24:19.825709Z node 16 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:129: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-05-29T15:24:19.826288Z node 16 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [16:745:2621], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-05-29T15:24:19.864048Z node 16 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [16:815:2660] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-29T15:24:19.913258Z node 16 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [16:825:2669], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:24:19.914042Z node 16 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=16&id=OWFkNmQ4MGUtZjU5MDE4NTktOWQ3MTIzNTItZTI4NTg2MmE=, ActorId: [16:729:2611], ActorState: ExecuteState, TraceId: 01jwea9tst6vr2trvf325rs6k7, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: assertion failed at ydb/core/tx/datashard/ut_common/datashard_ut_common.cpp:2091, void NKikimr::ExecSQL(Tests::TServer::TPtr, TActorId, const TString &, bool, Ydb::StatusIds::StatusCode): (response.GetYdbStatus() == code) failed: (INTERNAL_ERROR != SUCCESS) Response { QueryIssues { message: "Execution" issue_code: 1060 issues { message: "yql/essentials/ast/yql_expr.h:1874: index out of range" issue_code: 1 } } TxMeta { } } YdbStatus: INTERNAL_ERROR ConsumedRu: 1 , with diff: (INT|SUCC)E(RNAL_ERROR|SS) TBackTrace::Capture()+28 (0x13C95FEC) NUnitTest::NPrivate::RaiseError(char const*, TBasicString> const&, bool)+137 (0x13E49919) NKikimr::ExecSQL(TIntrusivePtr>, NActors::TActorId, TBasicString> const&, bool, Ydb::StatusIds_StatusCode)+1300 (0x264C9A24) ??+0 (0x13ADB793) NKikimr::NTestSuiteDataShardReadIterator::TTestCaseShouldProperlyOrderConflictingTransactionsMvcc::Execute_(NUnitTest::TTestContext&)+580 (0x13AFA464) NKikimr::NTestSuiteDataShardReadIterator::TCurrentTest::Execute()::'lambda'()::operator()() const+71 (0x13AD4527) NUnitTest::TTestBase::Run(std::__y1::function, TBasicString> const&, char const*, bool)+126 (0x13E4B7CE) NKikimr::NTestSuiteDataShardReadIterator::TCurrentTest::Execute()+433 (0x13AD3D81) NUnitTest::TTestFactory::Execute()+803 (0x13E4BF43) NUnitTest::RunMain(int, char**)+3021 (0x13E5DAED) ??+0 (0x7F5704481D90) __libc_start_main+128 (0x7F5704481E40) _start+41 (0x12A96029) >> TNetClassifierUpdaterTest::TestGetUpdatesFromHttpServer [GOOD] >> TNetClassifierUpdaterTest::TestFiltrationByNetboxCustomFieldsAndTags >> TCmsTest::RequestReplaceDevices [GOOD] >> TCmsTest::RequestReplaceDevicePDisk >> TCmsTenatsTest::TestClusterRatioLimit [GOOD] >> TCmsTenatsTest::TestClusterRatioLimitForceRestartMode ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_user_attributes/unittest >> TSchemeShardUserAttrsTest::VariousUse [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:116:2058] recipient: [1:111:2141] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:116:2058] recipient: [1:111:2141] Leader for TabletID 72057594046678944 is [1:128:2152] sender: [1:132:2058] recipient: [1:111:2141] 2025-05-29T15:24:24.316522Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7488: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-05-29T15:24:24.316548Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7516: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:24:24.316554Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7402: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-05-29T15:24:24.316559Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7418: OperationsProcessing config: using default configuration 2025-05-29T15:24:24.316572Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-05-29T15:24:24.316577Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-05-29T15:24:24.316585Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7548: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:24:24.316600Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:39: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-05-29T15:24:24.316707Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7619: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-05-29T15:24:24.316780Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-05-29T15:24:24.337326Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7309: Cannot subscribe to console configs 2025-05-29T15:24:24.337352Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:24:24.340263Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-05-29T15:24:24.340358Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-05-29T15:24:24.340390Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-05-29T15:24:24.341533Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-05-29T15:24:24.341643Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:445: Clear TempDirsState with owners number: 0 2025-05-29T15:24:24.341734Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1348: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-05-29T15:24:24.341769Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:32: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-05-29T15:24:24.342086Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:157: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:24:24.342119Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:84: [RootDataErasureManager] Stop 2025-05-29T15:24:24.342313Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:24:24.342320Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:24:24.342334Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-05-29T15:24:24.342339Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-29T15:24:24.342344Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:202: TTxServerlessStorageBilling.Complete 2025-05-29T15:24:24.342369Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6671: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-05-29T15:24:24.343283Z node 1 :HIVE INFO: tablet_helpers.cpp:1130: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:128:2152] sender: [1:242:2058] recipient: [1:15:2062] 2025-05-29T15:24:24.362401Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:372: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-05-29T15:24:24.362484Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:24:24.362553Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-05-29T15:24:24.362609Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:130: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-05-29T15:24:24.362621Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:24:24.363458Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:451: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-05-29T15:24:24.363487Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-05-29T15:24:24.363536Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:24:24.363547Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:313: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-05-29T15:24:24.363553Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:367: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-05-29T15:24:24.363559Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 2 -> 3 2025-05-29T15:24:24.363960Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:24:24.363973Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-05-29T15:24:24.363979Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 3 -> 128 2025-05-29T15:24:24.364358Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:24:24.364370Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:24:24.364375Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:24:24.364383Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1650: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-05-29T15:24:24.365090Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1719: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-05-29T15:24:24.365595Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:652: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-05-29T15:24:24.365637Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1751: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-05-29T15:24:24.365830Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:676: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-05-29T15:24:24.365856Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:680: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 124 RawX2: 4294969445 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-05-29T15:24:24.365866Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:24:24.365936Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 128 -> 240 2025-05-29T15:24:24.365943Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:24:24.365980Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-05-29T15:24:24.365993Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:402: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-05-29T15:24:24.366413Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:24:24.366422Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-29T15:24:24.366466Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29 ... schemeshard__operation_side_effects.cpp:989: Publication details: tx: 112, [OwnerId: 72057594046678944, LocalPathId: 4], 18446744073709551615 2025-05-29T15:24:24.411720Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 112 2025-05-29T15:24:24.411996Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 112 2025-05-29T15:24:24.412276Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:24:24.412285Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 112, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-29T15:24:24.412313Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 112, path id: [OwnerId: 72057594046678944, LocalPathId: 3] 2025-05-29T15:24:24.412321Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 112, path id: [OwnerId: 72057594046678944, LocalPathId: 4] 2025-05-29T15:24:24.412341Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:24:24.412346Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:209:2210], at schemeshard: 72057594046678944, txId: 112, path id: 1 2025-05-29T15:24:24.412350Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:209:2210], at schemeshard: 72057594046678944, txId: 112, path id: 3 2025-05-29T15:24:24.412355Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:209:2210], at schemeshard: 72057594046678944, txId: 112, path id: 4 FAKE_COORDINATOR: Erasing txId 112 2025-05-29T15:24:24.412478Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:5834: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 9 PathOwnerId: 72057594046678944, cookie: 112 2025-05-29T15:24:24.412488Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 9 PathOwnerId: 72057594046678944, cookie: 112 2025-05-29T15:24:24.412492Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 3, at schemeshard: 72057594046678944, txId: 112 2025-05-29T15:24:24.412496Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 112, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 9 2025-05-29T15:24:24.412501Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2025-05-29T15:24:24.412551Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:5834: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 7 PathOwnerId: 72057594046678944, cookie: 112 2025-05-29T15:24:24.412558Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 7 PathOwnerId: 72057594046678944, cookie: 112 2025-05-29T15:24:24.412562Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 112 2025-05-29T15:24:24.412566Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 112, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 7 2025-05-29T15:24:24.412569Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 2 2025-05-29T15:24:24.412689Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:5834: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 112 2025-05-29T15:24:24.412699Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 112 2025-05-29T15:24:24.412703Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 112 2025-05-29T15:24:24.412706Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 112, pathId: [OwnerId: 72057594046678944, LocalPathId: 4], version: 18446744073709551615 2025-05-29T15:24:24.412710Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 1 2025-05-29T15:24:24.412720Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 112, subscribers: 0 2025-05-29T15:24:24.412879Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:123: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-05-29T15:24:24.412887Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:137: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 4], at schemeshard: 72057594046678944 2025-05-29T15:24:24.412906Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 1 2025-05-29T15:24:24.413119Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 112 2025-05-29T15:24:24.413376Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 112 2025-05-29T15:24:24.413392Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 112 2025-05-29T15:24:24.413598Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:160: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestModificationResult got TxId: 112, wait until txId: 112 TestWaitNotification wait txId: 112 2025-05-29T15:24:24.413687Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:210: tests -- TTxNotificationSubscriber for txId 112: send EvNotifyTxCompletion 2025-05-29T15:24:24.413694Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:256: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 112 2025-05-29T15:24:24.413793Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 112, at schemeshard: 72057594046678944 2025-05-29T15:24:24.413810Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:227: tests -- TTxNotificationSubscriber for txId 112: got EvNotifyTxCompletionResult 2025-05-29T15:24:24.413816Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:236: tests -- TTxNotificationSubscriber for txId 112: satisfy waiter [1:498:2488] TestWaitNotification: OK eventTxId 112 2025-05-29T15:24:24.413931Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/DirB" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-05-29T15:24:24.413957Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/DirB" took 36us result status StatusSuccess 2025-05-29T15:24:24.414026Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/DirB" PathDescription { Self { Name: "DirB" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 109 CreateStep: 5000008 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 7 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 7 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 6 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 2 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } UserAttributes { Key: "AttrB1" Value: "ValB1" } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 TestModificationResults wait txId: 113 2025-05-29T15:24:24.414780Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:372: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpRmDir Drop { Name: "DirB" } ApplyIf { PathId: 2 PathVersion: 8 } ApplyIf { PathId: 3 PathVersion: 7 } ApplyIf { PathId: 4 PathVersion: 3 } } TxId: 113 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-05-29T15:24:24.414818Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_rmdir.cpp:29: TRmDir Propose, path: /MyRoot/DirB, pathId: 0, opId: 113:0, at schemeshard: 72057594046678944 2025-05-29T15:24:24.414837Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:130: IgniteOperation, opId: 113:1, propose status:StatusPreconditionFailed, reason: fail user constraint: ApplyIf section: no path with id [OwnerId: 72057594046678944, LocalPathId: 4], at schemeshard: 72057594046678944 2025-05-29T15:24:24.415291Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:451: TTxOperationPropose Complete, txId: 113, response: Status: StatusPreconditionFailed Reason: "fail user constraint: ApplyIf section: no path with id [OwnerId: 72057594046678944, LocalPathId: 4]" TxId: 113 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2025-05-29T15:24:24.415322Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 113, database: /MyRoot, subject: , status: StatusPreconditionFailed, reason: fail user constraint: ApplyIf section: no path with id [OwnerId: 72057594046678944, LocalPathId: 4], operation: DROP DIRECTORY, path: /MyRoot/DirB TestModificationResult got TxId: 113, wait until txId: 113 >> TConsoleInMemoryConfigSubscriptionTests::TestSubscribeAfterConfigApplyWithKnownConfig [GOOD] >> TConsoleTests::TestAlterTenantModifyStorageResourcesForPending >> TCmsTest::TestTwoOrMoreDisksFromGroupAtTheSameRequestBlock42 [GOOD] >> TCmsTest::TestTwoOrMoreDisksFromGroupAtTheSameRequestMirror3dc >> TJaegerTracingConfiguratorTests::SharedThrottlingLimits [GOOD] >> TJaegerTracingConfiguratorTests::SharedSamplingLimits >> ExternalBlobsMultipleChannels::SingleChannel >> KqpPg::AlterColumnSetDefaultFromSequenceWithSchemaname [FAIL] >> KqpPg::CheckPgAutoParams+useSink |65.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/datashard/ut_external_blobs/unittest >> TCmsTest::ManagePermissions [GOOD] >> TCmsTest::ManagePermissionWrongRequest >> TCmsTest::TestKeepAvailableMode [GOOD] >> TCmsTest::TestKeepAvailableModeDisconnects >> KqpPg::ExplainColumnsReorder [GOOD] >> ExternalBlobsMultipleChannels::ExtBlobsMultipleColumns >> TCmsTenatsTest::TestTenantRatioLimit [GOOD] >> TCmsTenatsTest::TestTenantRatioLimitForceRestartMode |65.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/datashard/ut_external_blobs/unittest |65.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/datashard/ut_external_blobs/unittest >> TConsoleInMemoryConfigSubscriptionTests::TestConsoleRestartSimplified [GOOD] >> TConsoleInMemoryConfigSubscriptionTests::TestComplexYamlConfigChanges >> ExternalBlobsMultipleChannels::Simple >> ExternalBlobsMultipleChannels::WithCompaction >> TConsoleConfigHelpersTests::TestConfigSubscriptionEraser [GOOD] >> FeatureFlagsConfiguratorTest::TestFeatureFlagsUpdates |65.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/datashard/ut_external_blobs/unittest ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/datashard/ut_read_iterator/unittest >> DataShardReadIteratorBatchMode::ShouldHandleReadAck [FAIL] Test command err: 2025-05-29T15:24:00.734640Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:701:2413], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-05-29T15:24:00.734724Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-05-29T15:24:00.734776Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-05-29T15:24:00.734819Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [2:698:2355], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-05-29T15:24:00.734866Z node 2 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-05-29T15:24:00.734872Z node 2 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/001574/r3tmp/tmpRSce5O/pdisk_1.dat 2025-05-29T15:24:00.822138Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:24:00.895060Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-05-29T15:24:00.981683Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:24:00.981712Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:24:00.982707Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:24:00.982724Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:24:00.994126Z node 1 :HIVE WARN: hive_impl.cpp:771: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-05-29T15:24:00.994250Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-29T15:24:00.994341Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-29T15:24:01.254481Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-05-29T15:24:01.287947Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3097: StateInit, received event# 268828672, Sender [2:1252:2378], Recipient [2:1278:2390]: NKikimr::TEvTablet::TEvBoot 2025-05-29T15:24:01.290189Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3097: StateInit, received event# 268828673, Sender [2:1252:2378], Recipient [2:1278:2390]: NKikimr::TEvTablet::TEvRestored 2025-05-29T15:24:01.290301Z node 2 :TX_DATASHARD INFO: datashard.cpp:373: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [2:1278:2390] 2025-05-29T15:24:01.290359Z node 2 :TX_DATASHARD DEBUG: datashard__init.cpp:630: TxInitSchema.Execute 2025-05-29T15:24:01.291386Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3110: StateInactive, received event# 268828684, Sender [2:1252:2378], Recipient [2:1278:2390]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-05-29T15:24:01.299955Z node 2 :TX_DATASHARD DEBUG: datashard__init.cpp:696: TxInitSchema.Complete 2025-05-29T15:24:01.299981Z node 2 :TX_DATASHARD DEBUG: datashard__init.cpp:48: TDataShard::TTxInit::Execute 2025-05-29T15:24:01.300079Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:1315: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-05-29T15:24:01.300085Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:1371: LoadLockChangeRecords at tablet: 72075186224037888 2025-05-29T15:24:01.300089Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:1420: LoadChangeRecordCommits at tablet: 72075186224037888 2025-05-29T15:24:01.300125Z node 2 :TX_DATASHARD DEBUG: datashard__init.cpp:92: TDataShard::TTxInit::Complete 2025-05-29T15:24:01.300136Z node 2 :TX_DATASHARD DEBUG: datashard__init.cpp:100: TDataShard::TTxInitRestored::Execute 2025-05-29T15:24:01.300147Z node 2 :TX_DATASHARD DEBUG: datashard__init.cpp:111: DataShard 72075186224037888 persisting started state actor id [2:1298:2390] in generation 1 2025-05-29T15:24:01.311183Z node 2 :TX_DATASHARD DEBUG: datashard__init.cpp:120: TDataShard::TTxInitRestored::Complete 2025-05-29T15:24:01.314218Z node 2 :TX_DATASHARD INFO: datashard.cpp:417: Switched to work state WaitScheme tabletId 72075186224037888 2025-05-29T15:24:01.314256Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:457: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-05-29T15:24:01.314279Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:1250: Change sender created: at tablet: 72075186224037888, actorId: [2:1303:2406] 2025-05-29T15:24:01.314284Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:1255: Trying to activate change sender: at tablet: 72075186224037888 2025-05-29T15:24:01.314287Z node 2 :TX_DATASHARD INFO: datashard.cpp:1272: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-05-29T15:24:01.314291Z node 2 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-05-29T15:24:01.314331Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3129: StateWork, received event# 2146435072, Sender [2:1278:2390], Recipient [2:1278:2390]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-05-29T15:24:01.314337Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3154: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-05-29T15:24:01.314390Z node 2 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 72075186224037888 2025-05-29T15:24:01.314403Z node 2 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-05-29T15:24:01.314414Z node 2 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2025-05-29T15:24:01.314420Z node 2 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-05-29T15:24:01.314428Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:385: Check unit PlanQueue at 72075186224037888 2025-05-29T15:24:01.314432Z node 2 :TX_DATASHARD TRACE: plan_queue_unit.cpp:59: TPlanQueueUnit at 72075186224037888 has no attached operations 2025-05-29T15:24:01.314437Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:403: Unit PlanQueue has no ready operations at 72075186224037888 2025-05-29T15:24:01.314442Z node 2 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 72075186224037888 TxInFly 0 2025-05-29T15:24:01.314448Z node 2 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2025-05-29T15:24:01.365950Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3129: StateWork, received event# 269877761, Sender [2:1307:2407], Recipient [2:1278:2390]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-05-29T15:24:01.365968Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3165: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-05-29T15:24:01.365976Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:3710: Server connected at leader tablet# 72075186224037888, clientId# [1:1262:2772], serverId# [2:1307:2407], sessionId# [0:0:0] 2025-05-29T15:24:01.366026Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3129: StateWork, received event# 269549568, Sender [1:840:2468], Recipient [2:1307:2407] 2025-05-29T15:24:01.366030Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3135: StateWork, processing event TEvDataShard::TEvProposeTransaction 2025-05-29T15:24:01.366051Z node 2 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037888 2025-05-29T15:24:01.366097Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1827: Trying to execute [0:281474976715657] at 72075186224037888 on unit CheckSchemeTx 2025-05-29T15:24:01.366106Z node 2 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:132: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-05-29T15:24:01.366124Z node 2 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:220: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2025-05-29T15:24:01.366130Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1862: Execution status for [0:281474976715657] at 72075186224037888 is ExecutedNoMoreRestarts 2025-05-29T15:24:01.366134Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1910: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit CheckSchemeTx 2025-05-29T15:24:01.366138Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1916: Add [0:281474976715657] at 72075186224037888 to execution unit StoreSchemeTx 2025-05-29T15:24:01.366140Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1827: Trying to execute [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2025-05-29T15:24:01.366177Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1862: Execution status for [0:281474976715657] at 72075186224037888 is DelayCompleteNoMoreRestarts 2025-05-29T15:24:01.366180Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1910: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit StoreSchemeTx 2025-05-29T15:24:01.366183Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1916: Add [0:281474976715657] at 72075186224037888 to execution unit FinishPropose 2025-05-29T15:24:01.366185Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1827: Trying to execute [0:281474976715657] at 72075186224037888 on unit FinishPropose 2025-05-29T15:24:01.366193Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1862: Execution status for [0:281474976715657] at 72075186224037888 is DelayComplete 2025-05-29T15:24:01.366195Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1910: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit FinishPropose 2025-05-29T15:24:01.366198Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1916: Add [0:281474976715657] at 72075186224037888 to execution unit WaitForPlan 2025-05-29T15:24:01.366200Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1827: Trying to execute [0:281474976715657] at 72075186224037888 on unit WaitForPlan 2025-05-29T15:24:01.366204Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1832: Operation [0:281474976715657] at 72075186224037888 is not ready to execute on unit WaitForPlan 2025-05-29T15:24:01.366519Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3129: StateWork, received event# 269746185, Sender [2:1308:2408], Recipient [2:1278:2390]: NKikimr::TEvTxProxySchemeCache::TEvWatchNotifyUpdated 2025-05-29T15:24:01.366527Z node 2 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:129: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard ... 4:19.891461Z node 16 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1862: Execution status for [1000:281474976715657] at 72075186224037888 is DelayComplete 2025-05-29T15:24:19.891466Z node 16 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1910: Advance execution plan for [1000:281474976715657] at 72075186224037888 executing on unit CompleteOperation 2025-05-29T15:24:19.891470Z node 16 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1916: Add [1000:281474976715657] at 72075186224037888 to execution unit CompletedOperations 2025-05-29T15:24:19.891474Z node 16 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1827: Trying to execute [1000:281474976715657] at 72075186224037888 on unit CompletedOperations 2025-05-29T15:24:19.891479Z node 16 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1862: Execution status for [1000:281474976715657] at 72075186224037888 is Executed 2025-05-29T15:24:19.891483Z node 16 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1910: Advance execution plan for [1000:281474976715657] at 72075186224037888 executing on unit CompletedOperations 2025-05-29T15:24:19.891487Z node 16 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1922: Execution plan for [1000:281474976715657] at 72075186224037888 has finished 2025-05-29T15:24:19.891491Z node 16 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-05-29T15:24:19.891495Z node 16 :TX_DATASHARD TRACE: datashard_pipeline.cpp:327: Check candidate unit PlanQueue at 72075186224037888 2025-05-29T15:24:19.891499Z node 16 :TX_DATASHARD TRACE: plan_queue_unit.cpp:59: TPlanQueueUnit at 72075186224037888 has no attached operations 2025-05-29T15:24:19.891502Z node 16 :TX_DATASHARD TRACE: datashard_pipeline.cpp:341: Unit PlanQueue has no ready operations at 72075186224037888 2025-05-29T15:24:19.891905Z node 16 :TX_DATASHARD TRACE: datashard_impl.h:3129: StateWork, received event# 270270976, Sender [16:24:2071], Recipient [16:664:2568]: {TEvRegisterTabletResult TabletId# 72075186224037888 Entry# 0} 2025-05-29T15:24:19.891916Z node 16 :TX_DATASHARD TRACE: datashard_impl.h:3167: StateWork, processing event TEvMediatorTimecast::TEvRegisterTabletResult 2025-05-29T15:24:19.891922Z node 16 :TX_DATASHARD DEBUG: datashard.cpp:3742: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037888 time 0 2025-05-29T15:24:19.891928Z node 16 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-05-29T15:24:19.892065Z node 16 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:98: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 1000} 2025-05-29T15:24:19.892077Z node 16 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2025-05-29T15:24:19.892279Z node 16 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2025-05-29T15:24:19.892288Z node 16 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1933: Complete execution for [1000:281474976715657] at 72075186224037888 on unit CreateTable 2025-05-29T15:24:19.892294Z node 16 :TX_DATASHARD DEBUG: datashard.cpp:1255: Trying to activate change sender: at tablet: 72075186224037888 2025-05-29T15:24:19.892301Z node 16 :TX_DATASHARD INFO: datashard.cpp:1293: Change sender activated: at tablet: 72075186224037888 2025-05-29T15:24:19.892310Z node 16 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1933: Complete execution for [1000:281474976715657] at 72075186224037888 on unit CompleteOperation 2025-05-29T15:24:19.892325Z node 16 :TX_DATASHARD DEBUG: datashard.cpp:801: Complete [1000 : 281474976715657] from 72075186224037888 at tablet 72075186224037888 send result to client [16:406:2400], exec latency: 0 ms, propose latency: 0 ms 2025-05-29T15:24:19.892335Z node 16 :TX_DATASHARD INFO: datashard.cpp:1590: 72075186224037888 Sending notify to schemeshard 72057594046644480 txId 281474976715657 state Ready TxInFly 0 2025-05-29T15:24:19.892345Z node 16 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-05-29T15:24:19.892640Z node 16 :TX_DATASHARD TRACE: datashard_impl.h:3129: StateWork, received event# 269746185, Sender [16:683:2579], Recipient [16:664:2568]: NKikimr::TEvTxProxySchemeCache::TEvWatchNotifyUpdated 2025-05-29T15:24:19.892650Z node 16 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:129: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-05-29T15:24:19.892831Z node 16 :TX_DATASHARD TRACE: datashard_impl.h:3129: StateWork, received event# 269877760, Sender [16:707:2595], Recipient [16:664:2568]: NKikimr::TEvTabletPipe::TEvClientConnected { TabletId: 72057594046644480 Status: OK ServerId: [16:712:2600] Leader: 1 Dead: 0 Generation: 2 VersionInfo: } 2025-05-29T15:24:19.892838Z node 16 :TX_DATASHARD TRACE: datashard_impl.h:3162: StateWork, processing event TEvTabletPipe::TEvClientConnected 2025-05-29T15:24:19.893072Z node 16 :TX_DATASHARD TRACE: datashard_impl.h:3129: StateWork, received event# 269552132, Sender [16:406:2400], Recipient [16:664:2568]: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 281474976715657 2025-05-29T15:24:19.893079Z node 16 :TX_DATASHARD TRACE: datashard_impl.h:3133: StateWork, processing event TEvDataShard::TEvSchemaChangedResult 2025-05-29T15:24:19.893084Z node 16 :TX_DATASHARD DEBUG: datashard.cpp:2953: Handle TEvSchemaChangedResult 281474976715657 datashard 72075186224037888 state Ready 2025-05-29T15:24:19.893091Z node 16 :TX_DATASHARD DEBUG: datashard__schema_changed.cpp:22: 72075186224037888 Got TEvSchemaChangedResult from SS at 72075186224037888 2025-05-29T15:24:19.893122Z node 16 :TX_DATASHARD TRACE: datashard_impl.h:3129: StateWork, received event# 270270978, Sender [16:24:2071], Recipient [16:664:2568]: NKikimr::TEvMediatorTimecast::TEvSubscribeReadStepResult{ CoordinatorId# 72057594046316545 LastReadStep# 0 NextReadStep# 1000 ReadStep# 1000 } 2025-05-29T15:24:19.893127Z node 16 :TX_DATASHARD TRACE: datashard_impl.h:3168: StateWork, processing event TEvMediatorTimecast::TEvSubscribeReadStepResult 2025-05-29T15:24:19.893133Z node 16 :TX_DATASHARD DEBUG: datashard.cpp:3760: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037888 coordinator 72057594046316545 last step 0 next step 1000 2025-05-29T15:24:19.909350Z node 16 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [16:731:2613], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:24:19.909378Z node 16 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [16:741:2618], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:24:19.909390Z node 16 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:24:19.910487Z node 16 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2025-05-29T15:24:19.920106Z node 16 :TX_DATASHARD TRACE: datashard_impl.h:3129: StateWork, received event# 269746185, Sender [16:683:2579], Recipient [16:664:2568]: NKikimr::TEvTxProxySchemeCache::TEvWatchNotifyUpdated 2025-05-29T15:24:19.920139Z node 16 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:129: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-05-29T15:24:19.942771Z node 16 :TX_DATASHARD TRACE: datashard_impl.h:3129: StateWork, received event# 269877763, Sender [16:707:2595], Recipient [16:664:2568]: NKikimr::TEvTabletPipe::TEvClientDestroyed { TabletId: 72057594046644480 ClientId: [16:707:2595] ServerId: [16:712:2600] } 2025-05-29T15:24:19.942803Z node 16 :TX_DATASHARD TRACE: datashard_impl.h:3163: StateWork, processing event TEvTabletPipe::TEvClientDestroyed 2025-05-29T15:24:20.083097Z node 16 :TX_DATASHARD TRACE: datashard_impl.h:3129: StateWork, received event# 269746185, Sender [16:683:2579], Recipient [16:664:2568]: NKikimr::TEvTxProxySchemeCache::TEvWatchNotifyUpdated 2025-05-29T15:24:20.083128Z node 16 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:129: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-05-29T15:24:20.083666Z node 16 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [16:745:2621], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-05-29T15:24:20.115972Z node 16 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [16:815:2660] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-29T15:24:20.171562Z node 16 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [16:825:2669], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:24:20.172427Z node 16 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=16&id=NWJkNmQyNS1jM2ZlNjlmOS04YzczMjEtZTI0NTIzNGE=, ActorId: [16:729:2611], ActorState: ExecuteState, TraceId: 01jwea9v25enr7kekftpvgqsdt, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: assertion failed at ydb/core/tx/datashard/ut_common/datashard_ut_common.cpp:2091, void NKikimr::ExecSQL(Tests::TServer::TPtr, TActorId, const TString &, bool, Ydb::StatusIds::StatusCode): (response.GetYdbStatus() == code) failed: (INTERNAL_ERROR != SUCCESS) Response { QueryIssues { message: "Execution" issue_code: 1060 issues { message: "yql/essentials/ast/yql_expr.h:1874: index out of range" issue_code: 1 } } TxMeta { } } YdbStatus: INTERNAL_ERROR ConsumedRu: 1 , with diff: (INT|SUCC)E(RNAL_ERROR|SS) TBackTrace::Capture()+28 (0x13C95FEC) NUnitTest::NPrivate::RaiseError(char const*, TBasicString> const&, bool)+137 (0x13E49919) NKikimr::ExecSQL(TIntrusivePtr>, NActors::TActorId, TBasicString> const&, bool, Ydb::StatusIds_StatusCode)+1300 (0x264C9A24) ??+0 (0x13ADB793) ??+0 (0x13A2F9D1) NKikimr::NTestSuiteDataShardReadIteratorBatchMode::TTestCaseShouldHandleReadAck::Execute_(NUnitTest::TTestContext&)+34 (0x13AC94F2) NKikimr::NTestSuiteDataShardReadIteratorBatchMode::TCurrentTest::Execute()::'lambda'()::operator()() const+71 (0x13B5A4A7) NUnitTest::TTestBase::Run(std::__y1::function, TBasicString> const&, char const*, bool)+126 (0x13E4B7CE) NKikimr::NTestSuiteDataShardReadIteratorBatchMode::TCurrentTest::Execute()+481 (0x13B59E41) NUnitTest::TTestFactory::Execute()+803 (0x13E4BF43) NUnitTest::RunMain(int, char**)+3021 (0x13E5DAED) ??+0 (0x7F2FAD406D90) __libc_start_main+128 (0x7F2FAD406E40) _start+41 (0x12A96029) >> TCmsTenatsTest::TestTenantLimit [GOOD] >> TCmsTenatsTest::TestScheduledPermissionWithNonePolicy |65.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/backup/impl/ut_local_partition_reader/unittest |65.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/datashard/ut_external_blobs/unittest |65.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/backup/impl/ut_local_partition_reader/unittest >> LocalPartitionReader::FeedSlowly >> ReadIteratorExternalBlobs::ExtBlobsWithFirstRowPreloaded [FAIL] >> ReadIteratorExternalBlobs::ExtBlobsWithFirstRowPreloadedWithReboot >> TExportToS3WithRebootsTests::ForgetShouldSucceedOnMultiShardTable [GOOD] >> TExportToS3WithRebootsTests::ForgetShouldSucceedOnManyTables >> TConsoleTests::TestGetUnknownTenantStatusExtSubdomain [GOOD] >> TConsoleTests::TestRestartConsoleAndPools |65.2%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/datashard/ut_kqp_errors/ydb-core-tx-datashard-ut_kqp_errors |65.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/backup/impl/ut_local_partition_reader/unittest |65.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_kqp_errors/ydb-core-tx-datashard-ut_kqp_errors |65.2%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_kqp_errors/ydb-core-tx-datashard-ut_kqp_errors |65.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/backup/impl/ut_local_partition_reader/unittest >> LocalPartitionReader::FeedSlowly [GOOD] >> TJaegerTracingConfiguratorTests::SharedSamplingLimits [GOOD] >> TLogSettingsConfiguratorTests::TestNoChanges >> LocalPartitionReader::Booting >> LocalPartitionReader::Booting [GOOD] |65.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/backup/impl/ut_local_partition_reader/unittest >> LocalPartitionReader::FeedSlowly [GOOD] >> TLogSettingsConfiguratorTests::TestNoChanges [GOOD] >> TLogSettingsConfiguratorTests::TestAddComponentEntries >> TConsoleConfigSubscriptionTests::TestNotificationForModifiedConfigItem [GOOD] >> TConsoleConfigSubscriptionTests::TestNotificationForModifiedConfigItemScope >> KqpPg::TableSelect-useSink [GOOD] >> KqpPg::TableInsert+useSink >> FeatureFlagsConfiguratorTest::TestFeatureFlagsUpdates [GOOD] >> TConsoleInMemoryConfigSubscriptionTests::TestComplexYamlConfigChanges [GOOD] >> TConsoleInMemoryConfigSubscriptionTests::TestNoYamlResend >> TSchemeShardUserAttrsTest::UserConditionsAtCreateDropOps >> TSchemeShardUserAttrsTest::UserConditionsAtAlter |65.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/backup/impl/ut_local_partition_reader/unittest >> LocalPartitionReader::Booting [GOOD] >> TDowntimeTest::AddDowntime [GOOD] >> TDowntimeTest::HasUpcomingDowntime [GOOD] >> TDowntimeTest::CleanupOldSegments [GOOD] >> TSchemeShardUserAttrsTest::MkDir >> LocalPartitionReader::Simple [GOOD] >> TLogSettingsConfiguratorTests::TestAddComponentEntries [GOOD] >> TLogSettingsConfiguratorTests::TestRemoveComponentEntries >> TCmsTest::TestSetResetMarkers [GOOD] >> TCmsTest::TestProcessingQueue ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_export_reboots_s3/unittest >> TExportToS3WithRebootsTests::ForgetShouldSucceedOnOnSingleTopic [GOOD] Test command err: ==== RunWithTabletReboots =========== RUN: Trace =========== Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2140] Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:116:2058] recipient: [1:110:2141] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:116:2058] recipient: [1:110:2141] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:117:2058] recipient: [1:111:2142] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:117:2058] recipient: [1:111:2142] Leader for TabletID 72057594046678944 is [1:126:2151] sender: [1:127:2058] recipient: [1:109:2140] Leader for TabletID 72057594046447617 is [1:130:2154] sender: [1:132:2058] recipient: [1:110:2141] Leader for TabletID 72057594046316545 is [1:133:2155] sender: [1:135:2058] recipient: [1:111:2142] 2025-05-29T15:23:39.148214Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7488: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-05-29T15:23:39.148238Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7516: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:23:39.148244Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7402: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2025-05-29T15:23:39.148249Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7418: OperationsProcessing config: using default configuration 2025-05-29T15:23:39.148274Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-05-29T15:23:39.148279Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-05-29T15:23:39.148288Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7548: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:23:39.148302Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:39: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-05-29T15:23:39.148400Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7619: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-05-29T15:23:39.148471Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-05-29T15:23:39.162510Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7651: Got new config: QueryServiceConfig { AllExternalDataSourcesAreAvailable: true } 2025-05-29T15:23:39.162532Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:23:39.162630Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7619: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# Leader for TabletID 72057594046447617 is [1:130:2154] sender: [1:175:2058] recipient: [1:15:2062] 2025-05-29T15:23:39.166056Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-05-29T15:23:39.166090Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-05-29T15:23:39.166122Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-05-29T15:23:39.169351Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-05-29T15:23:39.169444Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:445: Clear TempDirsState with owners number: 0 2025-05-29T15:23:39.169575Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1348: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-05-29T15:23:39.169716Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:32: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-05-29T15:23:39.170252Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:157: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:23:39.170292Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:84: [RootDataErasureManager] Stop 2025-05-29T15:23:39.170454Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:23:39.170461Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:23:39.170489Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-05-29T15:23:39.170496Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-29T15:23:39.170500Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:202: TTxServerlessStorageBilling.Complete 2025-05-29T15:23:39.170514Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6671: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:214:2058] recipient: [1:212:2212] IGNORE Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:214:2058] recipient: [1:212:2212] Leader for TabletID 72057594037968897 is [1:218:2216] sender: [1:219:2058] recipient: [1:212:2212] 2025-05-29T15:23:39.171492Z node 1 :HIVE INFO: tablet_helpers.cpp:1130: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:126:2151] sender: [1:239:2058] recipient: [1:15:2062] 2025-05-29T15:23:39.192608Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:372: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-05-29T15:23:39.192664Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:23:39.192714Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-05-29T15:23:39.192756Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:130: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-05-29T15:23:39.192766Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:23:39.193333Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:451: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-05-29T15:23:39.193354Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-05-29T15:23:39.193385Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:23:39.193394Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:313: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-05-29T15:23:39.193400Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:367: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-05-29T15:23:39.193406Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 2 -> 3 2025-05-29T15:23:39.193826Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:23:39.193839Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-05-29T15:23:39.193845Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 3 -> 128 2025-05-29T15:23:39.194162Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:23:39.194170Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:23:39.194177Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:23:39.194184Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1650: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-05-29T15:23:39.194953Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1719: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-05-29T15:23:39.195356Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:652: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-05-29T15:23:39.195386Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1751: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 Leader for TabletID 72057594046316545 is [1:133:2155] sender: [1:254:2058] recipient: [1:15:2062] FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-05-29T15:23:39.195564Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:676: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-05-29T15:23:39.195587Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:680: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 133 RawX2: 4294969451 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-05-29T15:23:39.195594Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:23:39.195657Z node 1 :FLAT_TX_SCHEMESHARD INFO: sch ... cpp:30: NotifyTxCompletion operation in-flight, txId: 281474976710758, at schemeshard: 72057594046678944 2025-05-29T15:24:24.688140Z node 163 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1606: TOperation IsReadyToNotify, TxId: 281474976710758, ready parts: 0/1, is published: true 2025-05-29T15:24:24.688145Z node 163 :FLAT_TX_SCHEMESHARD INFO: schemeshard__notify.cpp:131: NotifyTxCompletion transaction is registered, txId: 281474976710758, at schemeshard: 72057594046678944 FAKE_COORDINATOR: Add transaction: 281474976710758 at step: 5000005 FAKE_COORDINATOR: advance: minStep5000005 State->FrontStep: 5000004 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710758 at step: 5000005 2025-05-29T15:24:24.688205Z node 163 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:676: TTxOperationPlanStep Execute, stepId: 5000005, transactions count in step: 1, at schemeshard: 72057594046678944 2025-05-29T15:24:24.688225Z node 163 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:680: TTxOperationPlanStep Execute, message: Transactions { TxId: 281474976710758 Coordinator: 72057594046316545 AckTo { RawX1: 134 RawX2: 700079671404 } } Step: 5000005 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-05-29T15:24:24.688232Z node 163 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_rmdir.cpp:129: TRmDir HandleReply TEvOperationPlan, opId: 281474976710758:0, step: 5000005, at schemeshard: 72057594046678944 2025-05-29T15:24:24.688254Z node 163 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_rmdir.cpp:180: RmDir is done, opId: 281474976710758:0, at schemeshard: 72057594046678944 2025-05-29T15:24:24.688263Z node 163 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:906: Part operation is done id#281474976710758:0 progress is 1/1 2025-05-29T15:24:24.688268Z node 163 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 281474976710758 ready parts: 1/1 2025-05-29T15:24:24.688273Z node 163 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:906: Part operation is done id#281474976710758:0 progress is 1/1 2025-05-29T15:24:24.688300Z node 163 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 281474976710758 ready parts: 1/1 2025-05-29T15:24:24.688308Z node 163 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2025-05-29T15:24:24.688317Z node 163 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 1 2025-05-29T15:24:24.688323Z node 163 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1606: TOperation IsReadyToNotify, TxId: 281474976710758, ready parts: 1/1, is published: false 2025-05-29T15:24:24.688329Z node 163 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 281474976710758 ready parts: 1/1 2025-05-29T15:24:24.688334Z node 163 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:973: Operation and all the parts is done, operation id: 281474976710758:0 2025-05-29T15:24:24.688338Z node 163 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5174: RemoveTx for txid 281474976710758:0 2025-05-29T15:24:24.688346Z node 163 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 2 2025-05-29T15:24:24.688351Z node 163 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:982: Publication still in progress, tx: 281474976710758, publications: 2, subscribers: 1 2025-05-29T15:24:24.688355Z node 163 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:989: Publication details: tx: 281474976710758, [OwnerId: 72057594046678944, LocalPathId: 1], 11 2025-05-29T15:24:24.688359Z node 163 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:989: Publication details: tx: 281474976710758, [OwnerId: 72057594046678944, LocalPathId: 4], 18446744073709551615 2025-05-29T15:24:24.688500Z node 163 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976710758 2025-05-29T15:24:24.688518Z node 163 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976710758 2025-05-29T15:24:24.688840Z node 163 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:24:24.688851Z node 163 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 281474976710758, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-29T15:24:24.688881Z node 163 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 281474976710758, path id: [OwnerId: 72057594046678944, LocalPathId: 4] 2025-05-29T15:24:24.688903Z node 163 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:24:24.688908Z node 163 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [163:208:2209], at schemeshard: 72057594046678944, txId: 281474976710758, path id: 1 2025-05-29T15:24:24.688913Z node 163 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [163:208:2209], at schemeshard: 72057594046678944, txId: 281474976710758, path id: 4 FAKE_COORDINATOR: Erasing txId 281474976710758 2025-05-29T15:24:24.689048Z node 163 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:5834: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 11 PathOwnerId: 72057594046678944, cookie: 281474976710758 2025-05-29T15:24:24.689061Z node 163 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 11 PathOwnerId: 72057594046678944, cookie: 281474976710758 2025-05-29T15:24:24.689065Z node 163 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 281474976710758 2025-05-29T15:24:24.689070Z node 163 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 281474976710758, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 11 2025-05-29T15:24:24.689074Z node 163 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 4 2025-05-29T15:24:24.689178Z node 163 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:5834: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 281474976710758 2025-05-29T15:24:24.689187Z node 163 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 281474976710758 2025-05-29T15:24:24.689191Z node 163 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 281474976710758 2025-05-29T15:24:24.689195Z node 163 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 281474976710758, pathId: [OwnerId: 72057594046678944, LocalPathId: 4], version: 18446744073709551615 2025-05-29T15:24:24.689202Z node 163 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 1 2025-05-29T15:24:24.689213Z node 163 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 281474976710758, subscribers: 1 2025-05-29T15:24:24.689217Z node 163 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:212: TTxAckPublishToSchemeBoard Notify send TEvNotifyTxCompletionResult, at schemeshard: 72057594046678944, to actorId: [163:126:2151] 2025-05-29T15:24:24.689271Z node 163 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:123: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-05-29T15:24:24.689278Z node 163 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:137: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 4], at schemeshard: 72057594046678944 2025-05-29T15:24:24.689289Z node 163 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2025-05-29T15:24:24.689671Z node 163 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976710758 2025-05-29T15:24:24.689937Z node 163 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976710758 2025-05-29T15:24:24.689963Z node 163 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6746: Handle: TEvNotifyTxCompletionResult: txId# 281474976710758 2025-05-29T15:24:24.689974Z node 163 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6748: Message: TxId: 281474976710758 2025-05-29T15:24:24.689981Z node 163 :EXPORT DEBUG: schemeshard_export__create.cpp:309: TExport::TTxProgress: DoExecute 2025-05-29T15:24:24.689986Z node 163 :EXPORT DEBUG: schemeshard_export__create.cpp:1232: TExport::TTxProgress: OnNotifyResult: txId# 281474976710758 2025-05-29T15:24:24.689990Z node 163 :EXPORT DEBUG: schemeshard_export__create.cpp:1263: TExport::TTxProgress: OnNotifyResult: txId# 281474976710758, id# 1003, itemIdx# 4294967295 2025-05-29T15:24:24.690041Z node 163 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:160: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 2025-05-29T15:24:24.690289Z node 163 :EXPORT DEBUG: schemeshard_export__create.cpp:329: TExport::TTxProgress: DoComplete TestWaitNotification wait txId: 1003 2025-05-29T15:24:24.690339Z node 163 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:210: tests -- TTxNotificationSubscriber for txId 1003: send EvNotifyTxCompletion 2025-05-29T15:24:24.690347Z node 163 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:256: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 1003 2025-05-29T15:24:24.690418Z node 163 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 1003, at schemeshard: 72057594046678944 2025-05-29T15:24:24.690434Z node 163 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:227: tests -- TTxNotificationSubscriber for txId 1003: got EvNotifyTxCompletionResult 2025-05-29T15:24:24.690439Z node 163 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:236: tests -- TTxNotificationSubscriber for txId 1003: satisfy waiter [163:676:2603] TestWaitNotification: OK eventTxId 1003 >> ExternalBlobsMultipleChannels::WithNewColumnFamilyAndCompaction [FAIL] >> ExternalBlobsMultipleChannels::SingleChannel [FAIL] >> TSchemeShardUserAttrsTest::UserConditionsAtAlter [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/cms/console/ut/unittest >> FeatureFlagsConfiguratorTest::TestFeatureFlagsUpdates [GOOD] Test command err: 2025-05-29T15:24:12.859437Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7488: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-05-29T15:24:12.859466Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7516: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:24:12.859472Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7402: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-05-29T15:24:12.859477Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7418: OperationsProcessing config: using default configuration 2025-05-29T15:24:12.859492Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-05-29T15:24:12.859496Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-05-29T15:24:12.859512Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7548: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:24:12.859536Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:39: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-05-29T15:24:12.859651Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7619: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-05-29T15:24:12.859723Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-05-29T15:24:12.863887Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7309: Cannot subscribe to console configs 2025-05-29T15:24:12.863911Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:24:12.865868Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-05-29T15:24:12.865943Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-05-29T15:24:12.865960Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046578944 2025-05-29T15:24:12.866936Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-05-29T15:24:12.867040Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:445: Clear TempDirsState with owners number: 0 2025-05-29T15:24:12.867142Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1348: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046578944 2025-05-29T15:24:12.867226Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:32: TTxInitRoot DoExecute, path: dc-1, pathId: [OwnerId: 72057594046578944, LocalPathId: 1], at schemeshard: 72057594046578944 2025-05-29T15:24:12.867935Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:157: TTxInitRoot DoComplete, at schemeshard: 72057594046578944 2025-05-29T15:24:12.867970Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:84: [RootDataErasureManager] Stop 2025-05-29T15:24:12.868202Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046578944 2025-05-29T15:24:12.868212Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046578944 2025-05-29T15:24:12.868242Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-05-29T15:24:12.868252Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046578944, domainId: [OwnerId: 72057594046578944, LocalPathId: 1] 2025-05-29T15:24:12.868258Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:202: TTxServerlessStorageBilling.Complete 2025-05-29T15:24:12.868291Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6671: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046578944 2025-05-29T15:24:12.907209Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:372: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "dc-1" StoragePools { Name: "" Kind: "hdd" } StoragePools { Name: "" Kind: "hdd-3" } StoragePools { Name: "" Kind: "hdd-1" } StoragePools { Name: "" Kind: "hdd-2" } } } TxId: 1 TabletId: 72057594046578944 , at schemeshard: 72057594046578944 2025-05-29T15:24:12.907276Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //dc-1, opId: 1:0, at schemeshard: 72057594046578944 2025-05-29T15:24:12.907327Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046578944, LocalPathId: 1] was 0 2025-05-29T15:24:12.907366Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:130: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046578944 2025-05-29T15:24:12.907374Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944 2025-05-29T15:24:12.908067Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:451: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046578944 PathId: 1, at schemeshard: 72057594046578944 2025-05-29T15:24:12.908093Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //dc-1 2025-05-29T15:24:12.908130Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046578944 2025-05-29T15:24:12.908138Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:313: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046578944 2025-05-29T15:24:12.908142Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:367: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-05-29T15:24:12.908145Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 2 -> 3 2025-05-29T15:24:12.908470Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046578944 2025-05-29T15:24:12.908477Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046578944 2025-05-29T15:24:12.908481Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 3 -> 128 2025-05-29T15:24:12.908711Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046578944 2025-05-29T15:24:12.908718Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046578944 2025-05-29T15:24:12.908729Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046578944 2025-05-29T15:24:12.908737Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1650: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-05-29T15:24:12.909165Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1719: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046578944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-05-29T15:24:12.909483Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:652: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046578944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-05-29T15:24:12.909536Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1751: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 2025-05-29T15:24:12.909690Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__notify.cpp:30: NotifyTxCompletion operation in-flight, txId: 1, at schemeshard: 72057594046578944 2025-05-29T15:24:12.909695Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1606: TOperation IsReadyToNotify, TxId: 1, ready parts: 0/1, is published: true 2025-05-29T15:24:12.909698Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__notify.cpp:131: NotifyTxCompletion transaction is registered, txId: 1, at schemeshard: 72057594046578944 2025-05-29T15:24:13.156150Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:676: TTxOperationPlanStep Execute, stepId: 500, transactions count in step: 1, at schemeshard: 72057594046578944 2025-05-29T15:24:13.156214Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:680: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 AckTo { RawX1: 0 RawX2: 0 } } Step: 500 MediatorID: 72057594046382081 TabletID: 72057594046578944, at schemeshard: 72057594046578944 2025-05-29T15:24:13.156226Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046578944 2025-05-29T15:24:13.156325Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 128 -> 240 2025-05-29T15:24:13.156335Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046578944 2025-05-29T15:24:13.156372Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046578944, LocalPathId: 1] was 1 2025-05-29T15:24:13.156386Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:402: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046578944, LocalPathId: 1], at schemeshard: 72057594046578944 2025-05-29T15:24:13.159365Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046578944 2025-05-29T15:24:13.159389Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046578944, txId: 1, path id: [OwnerId: 72057594046578944, LocalPathId: 1] 2025-05-29T15:24:13.159436Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046578944 2025-05-29T15:24:13.159440Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:261:2249], at schemeshard: 72057594046578944, txId: 1, path id: 1 202 ... 025-05-29T15:24:25.963466Z node 24 :TENANT_POOL DEBUG: tenant_pool.cpp:286: TDomainTenantPool(dc-1) send request to add tenant /dc-1 with resources CPU: 1 Memory: 1 Network: 1 2025-05-29T15:24:25.963507Z node 24 :LOCAL DEBUG: local.cpp:1441: TDomainLocal(dc-1): Bootstrap 2025-05-29T15:24:25.963520Z node 24 :CONFIGS_DISPATCHER TRACE: configs_dispatcher.cpp:193: StateInit, received event# 273481728, Sender [24:404:2360], Recipient [24:403:2359]: NKikimr::NConsole::TEvConfigsDispatcher::TEvSetConfigSubscriptionRequest 2025-05-29T15:24:25.963525Z node 24 :CONFIGS_DISPATCHER TRACE: configs_dispatcher.cpp:202: StateInit, processing event TEvConfigsDispatcher::TEvSetConfigSubscriptionRequest 2025-05-29T15:24:25.963593Z node 24 :LOCAL DEBUG: local.cpp:1149: TDomainLocal(dc-1): Binding to hive 72057594046578946 at domain dc-1 (allocated resources: CPU: 1 Memory: 1 Network: 1) 2025-05-29T15:24:25.963600Z node 24 :LOCAL DEBUG: local.cpp:975: TLocalNodeRegistrar::Bootstrap 2025-05-29T15:24:25.963603Z node 24 :LOCAL DEBUG: local.cpp:181: TLocalNodeRegistrar::TryToRegister 2025-05-29T15:24:25.963613Z node 24 :LOCAL DEBUG: local.cpp:213: TLocalNodeRegistrar::TryToRegister pipe to hive, pipe:[24:413:2366] 2025-05-29T15:24:25.963634Z node 24 :CONFIGS_DISPATCHER TRACE: configs_dispatcher.cpp:193: StateInit, received event# 273481728, Sender [24:407:2358], Recipient [24:403:2359]: NKikimr::NConsole::TEvConfigsDispatcher::TEvSetConfigSubscriptionRequest 2025-05-29T15:24:25.963637Z node 24 :CONFIGS_DISPATCHER TRACE: configs_dispatcher.cpp:202: StateInit, processing event TEvConfigsDispatcher::TEvSetConfigSubscriptionRequest 2025-05-29T15:24:25.963933Z node 24 :TENANT_POOL NOTICE: tenant_pool.cpp:526: TDomainTenantPool(dc-1) started tenant /dc-1 2025-05-29T15:24:25.963937Z node 24 :TENANT_POOL DEBUG: tenant_pool.cpp:274: TDomainTenantPool(dc-1) send status update to [24:404:2360] 2025-05-29T15:24:25.964048Z node 24 :LOCAL DEBUG: local.cpp:260: TEvTabletPipe::TEvClientConnected {TabletId=72057594046578946 Status=OK ClientId=[24:413:2366]} 2025-05-29T15:24:25.964059Z node 24 :LOCAL DEBUG: local.cpp:324: TLocalNodeRegistrar::Handle TEvLocal::TEvPing 2025-05-29T15:24:25.964064Z node 24 :LOCAL DEBUG: local.cpp:380: TLocalNodeRegistrar TEvPing - CONNECTED 2025-05-29T15:24:25.964067Z node 24 :LOCAL DEBUG: local.cpp:297: TLocalNodeRegistrar SendStatusOk 2025-05-29T15:24:25.965744Z node 24 :CONFIGS_DISPATCHER TRACE: configs_dispatcher.cpp:193: StateInit, received event# 273481728, Sender [24:439:2373], Recipient [24:403:2359]: NKikimr::NConsole::TEvConfigsDispatcher::TEvSetConfigSubscriptionRequest 2025-05-29T15:24:25.965758Z node 24 :CONFIGS_DISPATCHER TRACE: configs_dispatcher.cpp:202: StateInit, processing event TEvConfigsDispatcher::TEvSetConfigSubscriptionRequest 2025-05-29T15:24:25.978090Z node 24 :CONFIGS_DISPATCHER TRACE: configs_dispatcher.cpp:193: StateInit, received event# 273481728, Sender [24:462:2400], Recipient [24:403:2359]: NKikimr::NConsole::TEvConfigsDispatcher::TEvSetConfigSubscriptionRequest 2025-05-29T15:24:25.978116Z node 24 :CONFIGS_DISPATCHER TRACE: configs_dispatcher.cpp:202: StateInit, processing event TEvConfigsDispatcher::TEvSetConfigSubscriptionRequest 2025-05-29T15:24:25.991489Z node 24 :BS_CONTROLLER DEBUG: {BSC19@console_interaction.cpp:74} Console proposed config response Response# {Status: ReverseCommit ConsoleConfigVersion: 0 YAML: "" } 2025-05-29T15:24:26.001912Z node 24 :CONFIGS_DISPATCHER TRACE: configs_dispatcher.cpp:193: StateInit, received event# 273285146, Sender [24:409:2359], Recipient [24:403:2359]: NKikimr::NConsole::TEvConsole::TEvConfigSubscriptionNotification { Generation: 1 Config { FeatureFlags { EnableExternalHive: false EnableColumnStatistics: false EnableScaleRecommender: true } } RawConsoleConfig { } } 2025-05-29T15:24:26.001937Z node 24 :CONFIGS_DISPATCHER TRACE: configs_dispatcher.cpp:199: StateInit, processing event TEvConsole::TEvConfigSubscriptionNotification 2025-05-29T15:24:26.001983Z node 24 :CONFIGS_DISPATCHER TRACE: configs_dispatcher.cpp:1036: Sending for kinds: AllowEditYamlInUiItem 2025-05-29T15:24:26.001998Z node 24 :CONFIGS_DISPATCHER TRACE: configs_dispatcher.cpp:361: Send TEvConsole::TEvConfigNotificationRequest to [24:439:2373]: Config { } ItemKinds: 75 Local: true 2025-05-29T15:24:26.002029Z node 24 :CONFIGS_DISPATCHER TRACE: configs_dispatcher.cpp:1036: Sending for kinds: FeatureFlagsItem 2025-05-29T15:24:26.002043Z node 24 :CONFIGS_DISPATCHER TRACE: configs_dispatcher.cpp:361: Send TEvConsole::TEvConfigNotificationRequest to [24:462:2400]: Config { FeatureFlags { EnableExternalHive: false EnableColumnStatistics: false EnableScaleRecommender: true } } ItemKinds: 26 Local: true 2025-05-29T15:24:26.002071Z node 24 :CONFIGS_DISPATCHER TRACE: configs_dispatcher.cpp:1036: Sending for kinds: MonitoringConfigItem 2025-05-29T15:24:26.002082Z node 24 :CONFIGS_DISPATCHER TRACE: configs_dispatcher.cpp:361: Send TEvConsole::TEvConfigNotificationRequest to [24:404:2360]: Config { } ItemKinds: 10 Local: true 2025-05-29T15:24:26.002087Z node 24 :CONFIGS_DISPATCHER TRACE: configs_dispatcher.cpp:1036: Sending for kinds: MonitoringConfigItem 2025-05-29T15:24:26.002095Z node 24 :CONFIGS_DISPATCHER TRACE: configs_dispatcher.cpp:361: Send TEvConsole::TEvConfigNotificationRequest to [24:407:2358]: Config { } ItemKinds: 10 Local: true 2025-05-29T15:24:26.003076Z node 24 :TENANT_POOL DEBUG: tenant_pool.cpp:486: TDomainTenantPool(dc-1) Got new monitoring config: 2025-05-29T15:24:26.003106Z node 24 :CONFIGS_DISPATCHER TRACE: configs_dispatcher.cpp:215: StateWork, received event# 273286162, Sender [24:404:2360], Recipient [24:403:2359]: NKikimr::NConsole::TEvConsole::TEvConfigNotificationResponse { SubscriptionId: 0 ConfigId { } } 2025-05-29T15:24:26.003113Z node 24 :CONFIGS_DISPATCHER TRACE: configs_dispatcher.cpp:227: StateWork, processing event TEvConsole::TEvConfigNotificationResponse 2025-05-29T15:24:26.003156Z node 24 :CONFIGS_DISPATCHER TRACE: configs_dispatcher.cpp:215: StateWork, received event# 273286162, Sender [24:407:2358], Recipient [24:403:2359]: NKikimr::NConsole::TEvConsole::TEvConfigNotificationResponse { SubscriptionId: 0 ConfigId { } } 2025-05-29T15:24:26.003160Z node 24 :CONFIGS_DISPATCHER TRACE: configs_dispatcher.cpp:227: StateWork, processing event TEvConsole::TEvConfigNotificationResponse 2025-05-29T15:24:26.003171Z node 24 :CONFIGS_DISPATCHER TRACE: configs_dispatcher.cpp:215: StateWork, received event# 273286162, Sender [24:439:2373], Recipient [24:403:2359]: NKikimr::NConsole::TEvConsole::TEvConfigNotificationResponse { SubscriptionId: 0 ConfigId { } } 2025-05-29T15:24:26.003175Z node 24 :CONFIGS_DISPATCHER TRACE: configs_dispatcher.cpp:227: StateWork, processing event TEvConsole::TEvConfigNotificationResponse 2025-05-29T15:24:26.003189Z node 24 :CONFIGS_DISPATCHER TRACE: configs_dispatcher.cpp:215: StateWork, received event# 273286162, Sender [24:462:2400], Recipient [24:403:2359]: NKikimr::NConsole::TEvConsole::TEvConfigNotificationResponse { SubscriptionId: 0 ConfigId { } } 2025-05-29T15:24:26.003197Z node 24 :CONFIGS_DISPATCHER TRACE: configs_dispatcher.cpp:227: StateWork, processing event TEvConsole::TEvConfigNotificationResponse 2025-05-29T15:24:26.024506Z node 24 :CONFIGS_DISPATCHER TRACE: configs_dispatcher.cpp:215: StateWork, received event# 273285146, Sender [24:409:2359], Recipient [24:403:2359]: NKikimr::NConsole::TEvConsole::TEvConfigSubscriptionNotification { Generation: 1 Config { FeatureFlags { EnableExternalHive: false } Version { Items { Kind: 26 Id: 1 Generation: 1 } } } AffectedKinds: 26 RawConsoleConfig { FeatureFlags { EnableExternalHive: false } Version { Items { Kind: 26 Id: 1 Generation: 1 } } } } 2025-05-29T15:24:26.024527Z node 24 :CONFIGS_DISPATCHER TRACE: configs_dispatcher.cpp:221: StateWork, processing event TEvConsole::TEvConfigSubscriptionNotification 2025-05-29T15:24:26.024555Z node 24 :CONFIGS_DISPATCHER TRACE: configs_dispatcher.cpp:1036: Sending for kinds: FeatureFlagsItem 2025-05-29T15:24:26.024571Z node 24 :CONFIGS_DISPATCHER TRACE: configs_dispatcher.cpp:361: Send TEvConsole::TEvConfigNotificationRequest to [24:462:2400]: Config { FeatureFlags { EnableExternalHive: false } } ItemKinds: 26 Local: true 2025-05-29T15:24:26.024604Z node 24 :CONFIGS_DISPATCHER TRACE: configs_dispatcher.cpp:215: StateWork, received event# 273286162, Sender [24:462:2400], Recipient [24:403:2359]: NKikimr::NConsole::TEvConsole::TEvConfigNotificationResponse { SubscriptionId: 0 ConfigId { } } 2025-05-29T15:24:26.024610Z node 24 :CONFIGS_DISPATCHER TRACE: configs_dispatcher.cpp:227: StateWork, processing event TEvConsole::TEvConfigNotificationResponse 2025-05-29T15:24:26.046083Z node 24 :CONFIGS_DISPATCHER TRACE: configs_dispatcher.cpp:215: StateWork, received event# 273285146, Sender [24:409:2359], Recipient [24:403:2359]: NKikimr::NConsole::TEvConsole::TEvConfigSubscriptionNotification { Generation: 1 Config { FeatureFlags { EnableExternalHive: false EnableDataShardVolatileTransactions: false } Version { Items { Kind: 26 Id: 1 Generation: 1 } Items { Kind: 26 Id: 2 Generation: 1 } } } AffectedKinds: 26 RawConsoleConfig { FeatureFlags { EnableExternalHive: false EnableDataShardVolatileTransactions: false } Version { Items { Kind: 26 Id: 1 Generation: 1 } Items { Kind: 26 Id: 1 Generation: 1 } Items { Kind: 26 Id: 2 Generation: 1 } } } } 2025-05-29T15:24:26.046121Z node 24 :CONFIGS_DISPATCHER TRACE: configs_dispatcher.cpp:221: StateWork, processing event TEvConsole::TEvConfigSubscriptionNotification 2025-05-29T15:24:26.046151Z node 24 :CONFIGS_DISPATCHER TRACE: configs_dispatcher.cpp:1036: Sending for kinds: FeatureFlagsItem 2025-05-29T15:24:26.046169Z node 24 :CONFIGS_DISPATCHER TRACE: configs_dispatcher.cpp:361: Send TEvConsole::TEvConfigNotificationRequest to [24:462:2400]: Config { FeatureFlags { EnableExternalHive: false EnableDataShardVolatileTransactions: false } } ItemKinds: 26 Local: true 2025-05-29T15:24:26.046204Z node 24 :CONFIGS_DISPATCHER TRACE: configs_dispatcher.cpp:215: StateWork, received event# 273286162, Sender [24:462:2400], Recipient [24:403:2359]: NKikimr::NConsole::TEvConsole::TEvConfigNotificationResponse { SubscriptionId: 0 ConfigId { } } 2025-05-29T15:24:26.046209Z node 24 :CONFIGS_DISPATCHER TRACE: configs_dispatcher.cpp:227: StateWork, processing event TEvConsole::TEvConfigNotificationResponse 2025-05-29T15:24:26.074196Z node 24 :CONFIGS_DISPATCHER TRACE: configs_dispatcher.cpp:215: StateWork, received event# 273285146, Sender [24:409:2359], Recipient [24:403:2359]: NKikimr::NConsole::TEvConsole::TEvConfigSubscriptionNotification { Generation: 1 Config { FeatureFlags { EnableVolatileTransactionArbiters: false } Version { Items { Kind: 26 Id: 3 Generation: 1 } } } AffectedKinds: 26 RawConsoleConfig { FeatureFlags { EnableVolatileTransactionArbiters: false } Version { Items { Kind: 26 Id: 1 Generation: 1 } Items { Kind: 26 Id: 1 Generation: 1 } Items { Kind: 26 Id: 2 Generation: 1 } Items { Kind: 26 Id: 3 Generation: 1 } } } } 2025-05-29T15:24:26.074221Z node 24 :CONFIGS_DISPATCHER TRACE: configs_dispatcher.cpp:221: StateWork, processing event TEvConsole::TEvConfigSubscriptionNotification 2025-05-29T15:24:26.074251Z node 24 :CONFIGS_DISPATCHER TRACE: configs_dispatcher.cpp:1036: Sending for kinds: FeatureFlagsItem 2025-05-29T15:24:26.074267Z node 24 :CONFIGS_DISPATCHER TRACE: configs_dispatcher.cpp:361: Send TEvConsole::TEvConfigNotificationRequest to [24:462:2400]: Config { FeatureFlags { EnableVolatileTransactionArbiters: false } } ItemKinds: 26 Local: true 2025-05-29T15:24:26.074304Z node 24 :CONFIGS_DISPATCHER TRACE: configs_dispatcher.cpp:215: StateWork, received event# 273286162, Sender [24:462:2400], Recipient [24:403:2359]: NKikimr::NConsole::TEvConsole::TEvConfigNotificationResponse { SubscriptionId: 0 ConfigId { } } 2025-05-29T15:24:26.074310Z node 24 :CONFIGS_DISPATCHER TRACE: configs_dispatcher.cpp:227: StateWork, processing event TEvConsole::TEvConfigNotificationResponse >> TSchemeShardUserAttrsTest::UserConditionsAtCreateDropOps [GOOD] >> ExternalBlobsMultipleChannels::WithCompaction [FAIL] >> TCmsTest::ManageRequestsWrong >> TSchemeShardUserAttrsTest::SpecialAttributes >> TCmsTest::RequestReplaceDevicePDisk [GOOD] >> ExternalBlobsMultipleChannels::ExtBlobsMultipleColumns [FAIL] >> TCmsTest::RequestReplaceDevicePDiskByPath >> TExternalDataSourceTest::ReplaceExternalDataSourceIfNotExistsShouldFailIfFeatureFlagIsNotSet >> TSchemeShardUserAttrsTest::MkDir [GOOD] >> TExternalDataSourceTest::RemovingReferencesFromDataSources >> TExternalDataSourceTest::ReadOnlyMode >> TExternalDataSourceTest::SchemeErrors >> ExternalBlobsMultipleChannels::Simple [FAIL] >> TConsoleInMemoryConfigSubscriptionTests::TestNoYamlResend [GOOD] >> TConsoleInMemoryConfigSubscriptionTests::TestSubscribeAfterConfigApply >> TLogSettingsConfiguratorTests::TestRemoveComponentEntries [GOOD] >> TLogSettingsConfiguratorTests::TestChangeDefaults >> KqpPg::CheckPgAutoParams+useSink [FAIL] >> KqpPg::CheckPgAutoParams-useSink |65.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/cms/ut/unittest >> TDowntimeTest::CleanupOldSegments [GOOD] |65.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/backup/impl/ut_local_partition_reader/unittest >> LocalPartitionReader::Simple [GOOD] >> TExternalDataSourceTest::ReplaceExternalDataStoreShouldFailIfEntityOfAnotherTypeWithSameNameExists >> KqpPg::TableInsert+useSink [FAIL] >> TSchemeShardUserAttrsTest::SpecialAttributes [GOOD] >> KqpPg::TableInsert-useSink >> TCmsTest::ManagePermissionWrongRequest [GOOD] >> TCmsTest::ManageRequests ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_user_attributes/unittest >> TSchemeShardUserAttrsTest::UserConditionsAtCreateDropOps [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:116:2058] recipient: [1:111:2141] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:116:2058] recipient: [1:111:2141] Leader for TabletID 72057594046678944 is [1:128:2152] sender: [1:132:2058] recipient: [1:111:2141] 2025-05-29T15:24:26.640271Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7488: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-05-29T15:24:26.640301Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7516: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:24:26.640308Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7402: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-05-29T15:24:26.640314Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7418: OperationsProcessing config: using default configuration 2025-05-29T15:24:26.640329Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-05-29T15:24:26.640334Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-05-29T15:24:26.640344Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7548: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:24:26.640361Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:39: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-05-29T15:24:26.640487Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7619: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-05-29T15:24:26.640583Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-05-29T15:24:26.655463Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7309: Cannot subscribe to console configs 2025-05-29T15:24:26.655483Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:24:26.657890Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-05-29T15:24:26.657987Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-05-29T15:24:26.658024Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-05-29T15:24:26.660703Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-05-29T15:24:26.660882Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:445: Clear TempDirsState with owners number: 0 2025-05-29T15:24:26.661019Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1348: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-05-29T15:24:26.661066Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:32: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-05-29T15:24:26.661625Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:157: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:24:26.661677Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:84: [RootDataErasureManager] Stop 2025-05-29T15:24:26.661951Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:24:26.661964Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:24:26.661988Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-05-29T15:24:26.661997Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-29T15:24:26.662003Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:202: TTxServerlessStorageBilling.Complete 2025-05-29T15:24:26.662038Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6671: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-05-29T15:24:26.663556Z node 1 :HIVE INFO: tablet_helpers.cpp:1130: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:128:2152] sender: [1:242:2058] recipient: [1:15:2062] 2025-05-29T15:24:26.687600Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:372: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-05-29T15:24:26.687701Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:24:26.687791Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-05-29T15:24:26.687847Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:130: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-05-29T15:24:26.687860Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:24:26.689103Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:451: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-05-29T15:24:26.689142Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-05-29T15:24:26.689204Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:24:26.689217Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:313: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-05-29T15:24:26.689223Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:367: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-05-29T15:24:26.689230Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 2 -> 3 2025-05-29T15:24:26.689774Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:24:26.689788Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-05-29T15:24:26.689794Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 3 -> 128 2025-05-29T15:24:26.690124Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:24:26.690135Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:24:26.690140Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:24:26.690147Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1650: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-05-29T15:24:26.690860Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1719: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-05-29T15:24:26.691241Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:652: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-05-29T15:24:26.691287Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1751: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-05-29T15:24:26.691473Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:676: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-05-29T15:24:26.691500Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:680: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 124 RawX2: 4294969445 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-05-29T15:24:26.691507Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:24:26.691576Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 128 -> 240 2025-05-29T15:24:26.691584Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:24:26.691621Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-05-29T15:24:26.691634Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:402: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-05-29T15:24:26.692008Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:24:26.692017Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-29T15:24:26.692065Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29 ... 6678944, LocalPathId: 1] was 3 2025-05-29T15:24:26.722675Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 1 2025-05-29T15:24:26.722683Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1606: TOperation IsReadyToNotify, TxId: 105, ready parts: 1/1, is published: false 2025-05-29T15:24:26.722690Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 105 ready parts: 1/1 2025-05-29T15:24:26.722695Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:973: Operation and all the parts is done, operation id: 105:0 2025-05-29T15:24:26.722703Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5174: RemoveTx for txid 105:0 2025-05-29T15:24:26.722716Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 2 2025-05-29T15:24:26.722722Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:982: Publication still in progress, tx: 105, publications: 2, subscribers: 0 2025-05-29T15:24:26.722727Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:989: Publication details: tx: 105, [OwnerId: 72057594046678944, LocalPathId: 1], 11 2025-05-29T15:24:26.722731Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:989: Publication details: tx: 105, [OwnerId: 72057594046678944, LocalPathId: 4], 18446744073709551615 2025-05-29T15:24:26.722850Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 105 2025-05-29T15:24:26.722914Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 105 2025-05-29T15:24:26.723320Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:24:26.723333Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 105, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-29T15:24:26.723366Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 105, path id: [OwnerId: 72057594046678944, LocalPathId: 4] 2025-05-29T15:24:26.723409Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:24:26.723415Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:209:2210], at schemeshard: 72057594046678944, txId: 105, path id: 1 2025-05-29T15:24:26.723420Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:209:2210], at schemeshard: 72057594046678944, txId: 105, path id: 4 FAKE_COORDINATOR: Erasing txId 105 2025-05-29T15:24:26.723548Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:5834: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 11 PathOwnerId: 72057594046678944, cookie: 105 2025-05-29T15:24:26.723561Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 11 PathOwnerId: 72057594046678944, cookie: 105 2025-05-29T15:24:26.723566Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 105 2025-05-29T15:24:26.723570Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 105, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 11 2025-05-29T15:24:26.723576Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 4 2025-05-29T15:24:26.723633Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:5834: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 105 2025-05-29T15:24:26.723645Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 105 2025-05-29T15:24:26.723648Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 105 2025-05-29T15:24:26.723653Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 105, pathId: [OwnerId: 72057594046678944, LocalPathId: 4], version: 18446744073709551615 2025-05-29T15:24:26.723657Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 1 2025-05-29T15:24:26.723668Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 105, subscribers: 0 2025-05-29T15:24:26.723699Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:123: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-05-29T15:24:26.723704Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:137: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 4], at schemeshard: 72057594046678944 2025-05-29T15:24:26.723714Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2025-05-29T15:24:26.724205Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 105 2025-05-29T15:24:26.724514Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 105 2025-05-29T15:24:26.724540Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:160: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestModificationResult got TxId: 105, wait until txId: 105 TestWaitNotification wait txId: 105 2025-05-29T15:24:26.724616Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:210: tests -- TTxNotificationSubscriber for txId 105: send EvNotifyTxCompletion 2025-05-29T15:24:26.724625Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:256: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 105 2025-05-29T15:24:26.724719Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 105, at schemeshard: 72057594046678944 2025-05-29T15:24:26.724743Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:227: tests -- TTxNotificationSubscriber for txId 105: got EvNotifyTxCompletionResult 2025-05-29T15:24:26.724749Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:236: tests -- TTxNotificationSubscriber for txId 105: satisfy waiter [1:404:2394] TestWaitNotification: OK eventTxId 105 2025-05-29T15:24:26.724846Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/DirC" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-05-29T15:24:26.724879Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/DirC" took 47us result status StatusPathDoesNotExist 2025-05-29T15:24:26.724932Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/DirC\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1]), source_location: ydb/core/tx/schemeshard/schemeshard_path_describer.cpp:1157" Path: "/MyRoot/DirC" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: true } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 2025-05-29T15:24:26.725033Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-05-29T15:24:26.725058Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot" took 30us result status StatusSuccess 2025-05-29T15:24:26.725151Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot" PathDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 11 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 11 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 9 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: "DirA" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 100 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" ChildrenExist: false } Children { Name: "DirB" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 101 CreateStep: 5000003 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 2 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/MyRoot" } } } PathId: 1 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/pg/unittest >> KqpPg::ExplainColumnsReorder [GOOD] Test command err: Trying to start YDB, gRPC: 18132, MsgBus: 23409 2025-05-29T15:24:07.626407Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509888817495366660:2065];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:24:07.626436Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/002703/r3tmp/tmpHs6n1b/pdisk_1.dat 2025-05-29T15:24:07.681329Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 18132, node 1 2025-05-29T15:24:07.695004Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:24:07.695020Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-05-29T15:24:07.695022Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-05-29T15:24:07.695082Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:23409 2025-05-29T15:24:07.727556Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:24:07.727595Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:24:07.728561Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:23409 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-29T15:24:07.762094Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:24:07.764800Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-05-29T15:24:08.002646Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509888821790334596:2331], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:24:08.002654Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509888821790334585:2328], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:24:08.002673Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:24:08.003586Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2025-05-29T15:24:08.005893Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7509888821790334599:2332], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-05-29T15:24:08.075486Z node 1 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [1:7509888821790334650:2325] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-29T15:24:08.083086Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [1:7509888821790334667:2336], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:24:08.083167Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=1&id=ZTI0OWIxMGMtM2UwNWYyMDktNGM5M2Y4NTAtN2I2NDlkMTY=, ActorId: [1:7509888817495367271:2326], ActorState: ExecuteState, TraceId: 01jwea9fe2bzwvs7e19sgnxn7r, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: assertion failed at ydb/core/kqp/ut/pg/kqp_pg_ut.cpp:1043, virtual void NKikimr::NKqp::NTestSuiteKqpPg::TTestCaseEmptyQuery::Execute_(NUnitTest::TTestContext &) [useSink = true]: (result.IsSuccess())
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 0. /-S/util/system/backtrace.cpp:284: ?? @ 0x15FBD15B 1. /tmp//-S/library/cpp/testing/unittest/registar.cpp:46: RaiseError @ 0x16174FC8 2. /tmp//-S/ydb/core/kqp/ut/pg/kqp_pg_ut.cpp:1043: Execute_ @ 0x15D82EF7 3. /tmp//-S/ydb/core/kqp/ut/pg/kqp_pg_ut.cpp:198: operator() @ 0x15D71686 4. /tmp//-S/library/cpp/testing/unittest/registar.cpp:373: Run @ 0x16176E7D 5. /tmp//-S/ydb/core/kqp/ut/pg/kqp_pg_ut.cpp:198: Execute @ 0x15D70EE0 6. /tmp//-S/library/cpp/testing/unittest/registar.cpp:494: Execute @ 0x161775F2 7. /tmp//-S/library/cpp/testing/unittest/utmain.cpp:872: RunMain @ 0x1618919C 8. ??:0: ?? @ 0x7FF23B89ED8F 9. ??:0: ?? @ 0x7FF23B89EE3F 10. ??:0: ?? @ 0x14D5E028 Trying to start YDB, gRPC: 5337, MsgBus: 6672 2025-05-29T15:24:10.167066Z node 2 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7509888832462985587:2064];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:24:10.167083Z node 2 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/002703/r3tmp/tmpFXICwP/pdisk_1.dat 2025-05-29T15:24:10.182167Z node 2 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:24:10.182383Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [2:7509888832462985564:2079] 1748532250166984 != 1748532250166987 TServer::EnableGrpc on GrpcPort 5337, node 2 2025-05-29T15:24:10.194319Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:24:10.194332Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-05-29T15:24:10.194335Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-05-29T15:24:10.194389Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:6672 TClient is connected to server localhost:6672 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-29T15:24:10.272174Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:24:10.272209Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:24:10.272581Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:24:10.273158Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-29T15:24:10.526631Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7509888832462986230:2331], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:24:10.526648Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7509888832462986222:2328], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:24:10.526663Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:24:10.527393Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2025-05-29T15:24:10.529138Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7509888832462986236:2332], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-05-29T15:24:10.607094Z n ... reateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-29T15:24:23.407126Z node 5 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:24:23.407164Z node 5 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:24:23.407443Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:24:23.408062Z node 5 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-29T15:24:23.409210Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-05-29T15:24:23.702787Z node 5 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7509888886297070646:2328], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:24:23.702821Z node 5 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:24:23.705646Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-05-29T15:24:23.721668Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 2025-05-29T15:24:23.733521Z node 5 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7509888886297070819:2346], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:24:23.733546Z node 5 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:24:23.733552Z node 5 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7509888886297070824:2349], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:24:23.734140Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710660:3, at schemeshard: 72057594046644480 2025-05-29T15:24:23.737686Z node 5 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [5:7509888886297070826:2350], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710660 completed, doublechecking } 2025-05-29T15:24:23.818707Z node 5 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [5:7509888886297070877:2427] txid# 281474976710661, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 7], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-29T15:24:23.871372Z node 5 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [5:7509888886297070886:2354], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:24:23.872519Z node 5 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=5&id=YzJjODhmZjgtMTdmZDA4ZDctYTE0NjBmZDQtZDFkNTlhNTk=, ActorId: [5:7509888886297070817:2345], ActorState: ExecuteState, TraceId: 01jwea9ysm9mkh652zkxw4e9ne, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: assertion failed at ydb/core/kqp/ut/pg/kqp_pg_ut.cpp:4161, virtual void NKikimr::NKqp::NTestSuiteKqpPg::TTestCaseEquiJoin::Execute_(NUnitTest::TTestContext &) [useSink = false]: (result.GetStatus() == EStatus::SUCCESS) failed: (INTERNAL_ERROR != SUCCESS)
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 , with diff: (INT|SUCC)E(RNAL_ERROR|SS) 0. /-S/util/system/backtrace.cpp:284: ?? @ 0x15FBD15B 1. /tmp//-S/library/cpp/testing/unittest/registar.cpp:46: RaiseError @ 0x16174FC8 2. /tmp//-S/ydb/core/kqp/ut/pg/kqp_pg_ut.cpp:4161: Execute_ @ 0x15E168E0 3. /tmp//-S/ydb/core/kqp/ut/pg/kqp_pg_ut.cpp:198: operator() @ 0x15D71686 4. /tmp//-S/library/cpp/testing/unittest/registar.cpp:373: Run @ 0x16176E7D 5. /tmp//-S/ydb/core/kqp/ut/pg/kqp_pg_ut.cpp:198: Execute @ 0x15D70EE0 6. /tmp//-S/library/cpp/testing/unittest/registar.cpp:494: Execute @ 0x161775F2 7. /tmp//-S/library/cpp/testing/unittest/utmain.cpp:872: RunMain @ 0x1618919C 8. ??:0: ?? @ 0x7F90C437FD8F 9. ??:0: ?? @ 0x7F90C437FE3F 10. ??:0: ?? @ 0x14D5E028 2025-05-29T15:24:24.677017Z node 6 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [6:323:2365], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-05-29T15:24:24.677086Z node 6 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-05-29T15:24:24.677108Z node 6 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/002703/r3tmp/tmpxtH9cj/pdisk_1.dat 2025-05-29T15:24:24.783613Z node 6 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-05-29T15:24:24.803479Z node 6 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:24:24.804038Z node 6 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [6:32:2079] 1748532264253150 != 1748532264253154 2025-05-29T15:24:24.853749Z node 6 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:24:24.853794Z node 6 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:24:24.864624Z node 6 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-29T15:24:24.957705Z node 6 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [6:643:2551], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:24:24.957739Z node 6 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [6:654:2556], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:24:24.957749Z node 6 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:24:24.958850Z node 6 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715657:3, at schemeshard: 72057594046644480 2025-05-29T15:24:25.065120Z node 6 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [6:657:2559], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715657 completed, doublechecking } 2025-05-29T15:24:25.121399Z node 6 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [6:726:2597] txid# 281474976715658, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } PreparedQuery: "3ae7cecd-c7a2e43d-c435ddf8-5f736a30" QueryAst: "(\n(let $1 (PgType \'int4))\n(let $2 \'(\'(\'\"_logical_id\" \'218) \'(\'\"_id\" \'\"83263c5c-862604d1-c8219c69-79e3ae64\") \'(\'\"_partition_mode\" \'\"single\")))\n(let $3 (DqPhyStage \'() (lambda \'() (Iterator (AsList (AsStruct \'(\'\"x\" (PgConst \'1 $1)) \'(\'\"y\" (PgConst \'2 $1)))))) $2))\n(let $4 (DqCnResult (TDqOutput $3 \'\"0\") \'(\'\"y\" \'\"x\")))\n(return (KqpPhysicalQuery \'((KqpPhysicalTx \'($3) \'($4) \'() \'(\'(\'\"type\" \'\"generic\")))) \'((KqpTxResultBinding (ListType (StructType \'(\'\"x\" $1) \'(\'\"y\" $1))) \'\"0\" \'\"0\")) \'(\'(\'\"type\" \'\"query\"))))\n)\n" QueryPlan: "{\"Plan\":{\"Plans\":[{\"PlanNodeId\":2,\"Plans\":[{\"PlanNodeId\":1,\"Operators\":[{\"Inputs\":[],\"Iterator\":\"[{x: \\\"1\\\",y: \\\"2\\\"}]\",\"Name\":\"Iterator\"}],\"Node Type\":\"ConstantExpr\"}],\"Node Type\":\"ResultSet\",\"PlanNodeType\":\"ResultSet\"}],\"Node Type\":\"Query\",\"Stats\":{\"ResourcePoolId\":\"default\"},\"PlanNodeType\":\"Query\"},\"meta\":{\"version\":\"0.2\",\"type\":\"query\"},\"tables\":[],\"SimplifiedPlan\":{\"PlanNodeId\":0,\"Plans\":[{\"PlanNodeId\":1,\"Node Type\":\"ResultSet\",\"PlanNodeType\":\"ResultSet\"}],\"Node Type\":\"Query\",\"OptimizerStats\":{\"EquiJoinsCount\":0,\"JoinsCount\":0},\"PlanNodeType\":\"Query\"}}" YdbResults { columns { name: "y" type { pg_type { oid: 23 } } } columns { name: "x" type { pg_type { oid: 23 } } } } QueryDiagnostics: "" ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_user_attributes/unittest >> TSchemeShardUserAttrsTest::UserConditionsAtAlter [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:116:2058] recipient: [1:111:2141] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:116:2058] recipient: [1:111:2141] Leader for TabletID 72057594046678944 is [1:128:2152] sender: [1:132:2058] recipient: [1:111:2141] 2025-05-29T15:24:26.614392Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7488: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-05-29T15:24:26.614423Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7516: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:24:26.614429Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7402: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-05-29T15:24:26.614434Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7418: OperationsProcessing config: using default configuration 2025-05-29T15:24:26.614469Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-05-29T15:24:26.614475Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-05-29T15:24:26.614485Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7548: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:24:26.614501Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:39: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-05-29T15:24:26.614609Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7619: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-05-29T15:24:26.614697Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-05-29T15:24:26.631256Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7309: Cannot subscribe to console configs 2025-05-29T15:24:26.631284Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:24:26.634167Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-05-29T15:24:26.634303Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-05-29T15:24:26.634348Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-05-29T15:24:26.635774Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-05-29T15:24:26.635929Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:445: Clear TempDirsState with owners number: 0 2025-05-29T15:24:26.636051Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1348: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-05-29T15:24:26.636101Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:32: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-05-29T15:24:26.636565Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:157: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:24:26.636615Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:84: [RootDataErasureManager] Stop 2025-05-29T15:24:26.636887Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:24:26.636897Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:24:26.636920Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-05-29T15:24:26.636929Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-29T15:24:26.636935Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:202: TTxServerlessStorageBilling.Complete 2025-05-29T15:24:26.636971Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6671: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-05-29T15:24:26.638207Z node 1 :HIVE INFO: tablet_helpers.cpp:1130: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:128:2152] sender: [1:242:2058] recipient: [1:15:2062] 2025-05-29T15:24:26.654017Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:372: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-05-29T15:24:26.654104Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:24:26.654188Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-05-29T15:24:26.654241Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:130: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-05-29T15:24:26.654252Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:24:26.655075Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:451: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-05-29T15:24:26.655107Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-05-29T15:24:26.655161Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:24:26.655172Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:313: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-05-29T15:24:26.655178Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:367: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-05-29T15:24:26.655183Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 2 -> 3 2025-05-29T15:24:26.655570Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:24:26.655581Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-05-29T15:24:26.655586Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 3 -> 128 2025-05-29T15:24:26.655924Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:24:26.655934Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:24:26.655939Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:24:26.655944Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1650: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-05-29T15:24:26.656357Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1719: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-05-29T15:24:26.656653Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:652: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-05-29T15:24:26.656684Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1751: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-05-29T15:24:26.656817Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:676: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-05-29T15:24:26.656834Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:680: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 124 RawX2: 4294969445 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-05-29T15:24:26.656839Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:24:26.656889Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 128 -> 240 2025-05-29T15:24:26.656894Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:24:26.656919Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-05-29T15:24:26.656928Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:402: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-05-29T15:24:26.657228Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:24:26.657235Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-29T15:24:26.657271Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29 ... 545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 TestModificationResults wait txId: 103 2025-05-29T15:24:26.669315Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:372: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpAlterUserAttributes AlterUserAttributes { PathName: "DirA" UserAttributes { Key: "AttrA2" Value: "ValA2" } } ApplyIf { PathId: 2 PathVersion: 4 } } TxId: 103 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-05-29T15:24:26.669331Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_user_attrs.cpp:26: TAlterUserAttrs Propose, path: /MyRoot/DirA, operationId: 103:0, at schemeshard: 72057594046678944 2025-05-29T15:24:26.669343Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 0 2025-05-29T15:24:26.669359Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:130: IgniteOperation, opId: 103:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-05-29T15:24:26.669363Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 103:0, at schemeshard: 72057594046678944 2025-05-29T15:24:26.669640Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:451: TTxOperationPropose Complete, txId: 103, response: Status: StatusAccepted TxId: 103 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2025-05-29T15:24:26.669656Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 103, database: /MyRoot, subject: , status: StatusAccepted, operation: ALTER USER ATTRIBUTES, path: /MyRoot/DirA 2025-05-29T15:24:26.669674Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 103:0, at schemeshard: 72057594046678944 2025-05-29T15:24:26.669678Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_alter_user_attrs.cpp:97: TAlterUserAttrs ProgressState, opId: 103:0, at schemeshard: 72057594046678944 2025-05-29T15:24:26.669683Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1650: TOperation IsReadyToPropose , TxId: 103 ready parts: 1/1 2025-05-29T15:24:26.669699Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1719: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 103 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-05-29T15:24:26.669906Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:652: Send tablet strongly msg operationId: 103:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:103 msg type: 269090816 2025-05-29T15:24:26.669928Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1751: TOperation RegisterRelationByTabletId, TxId: 103, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 103 at step: 5000004 FAKE_COORDINATOR: advance: minStep5000004 State->FrontStep: 5000003 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 103 at step: 5000004 2025-05-29T15:24:26.669981Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:676: TTxOperationPlanStep Execute, stepId: 5000004, transactions count in step: 1, at schemeshard: 72057594046678944 2025-05-29T15:24:26.669993Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:680: TTxOperationPlanStep Execute, message: Transactions { TxId: 103 Coordinator: 72057594046316545 AckTo { RawX1: 124 RawX2: 4294969445 } } Step: 5000004 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-05-29T15:24:26.669996Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_alter_user_attrs.cpp:114: TAlterUserAttrs HandleReply TEvOperationPlan, opId: 103:0, stepId:5000004, at schemeshard: 72057594046678944 2025-05-29T15:24:26.670022Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:906: Part operation is done id#103:0 progress is 1/1 2025-05-29T15:24:26.670025Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2025-05-29T15:24:26.670029Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:906: Part operation is done id#103:0 progress is 1/1 2025-05-29T15:24:26.670031Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2025-05-29T15:24:26.670037Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 2025-05-29T15:24:26.670042Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1606: TOperation IsReadyToNotify, TxId: 103, ready parts: 1/1, is published: false 2025-05-29T15:24:26.670048Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:402: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2025-05-29T15:24:26.670050Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2025-05-29T15:24:26.670054Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:973: Operation and all the parts is done, operation id: 103:0 2025-05-29T15:24:26.670056Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5174: RemoveTx for txid 103:0 2025-05-29T15:24:26.670061Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2025-05-29T15:24:26.670064Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:982: Publication still in progress, tx: 103, publications: 1, subscribers: 0 2025-05-29T15:24:26.670067Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:989: Publication details: tx: 103, [OwnerId: 72057594046678944, LocalPathId: 2], 5 2025-05-29T15:24:26.670312Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:24:26.670320Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 103, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2025-05-29T15:24:26.670339Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:24:26.670342Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:209:2210], at schemeshard: 72057594046678944, txId: 103, path id: 2 FAKE_COORDINATOR: Erasing txId 103 2025-05-29T15:24:26.670413Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:5834: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 5 PathOwnerId: 72057594046678944, cookie: 103 2025-05-29T15:24:26.670420Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 5 PathOwnerId: 72057594046678944, cookie: 103 2025-05-29T15:24:26.670423Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 103 2025-05-29T15:24:26.670427Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 103, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 5 2025-05-29T15:24:26.670430Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 2025-05-29T15:24:26.670439Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 103, subscribers: 0 2025-05-29T15:24:26.670643Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 103 TestModificationResult got TxId: 103, wait until txId: 103 TestWaitNotification wait txId: 103 2025-05-29T15:24:26.670678Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:210: tests -- TTxNotificationSubscriber for txId 103: send EvNotifyTxCompletion 2025-05-29T15:24:26.670681Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:256: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 103 2025-05-29T15:24:26.670720Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 103, at schemeshard: 72057594046678944 2025-05-29T15:24:26.670730Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:227: tests -- TTxNotificationSubscriber for txId 103: got EvNotifyTxCompletionResult 2025-05-29T15:24:26.670733Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:236: tests -- TTxNotificationSubscriber for txId 103: satisfy waiter [1:350:2340] TestWaitNotification: OK eventTxId 103 2025-05-29T15:24:26.670804Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/DirA" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-05-29T15:24:26.670823Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/DirA" took 23us result status StatusSuccess 2025-05-29T15:24:26.670877Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/DirA" PathDescription { Self { Name: "DirA" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 100 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 3 ChildrenVersion: 2 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } UserAttributes { Key: "AttrA2" Value: "ValA2" } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> TCmsTenatsTest::TestClusterRatioLimitForceRestartMode [GOOD] >> TCmsTenatsTest::TestClusterLimitForceRestartModeScheduled >> TExternalDataSourceTest::ReplaceExternalDataSourceIfNotExistsShouldFailIfFeatureFlagIsNotSet [GOOD] >> TExternalDataSourceTest::SchemeErrors [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_user_attributes/unittest >> TSchemeShardUserAttrsTest::MkDir [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:116:2058] recipient: [1:111:2141] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:116:2058] recipient: [1:111:2141] Leader for TabletID 72057594046678944 is [1:128:2152] sender: [1:132:2058] recipient: [1:111:2141] 2025-05-29T15:24:26.779645Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7488: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-05-29T15:24:26.779678Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7516: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:24:26.779685Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7402: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-05-29T15:24:26.779690Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7418: OperationsProcessing config: using default configuration 2025-05-29T15:24:26.779708Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-05-29T15:24:26.779712Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-05-29T15:24:26.779724Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7548: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:24:26.779741Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:39: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-05-29T15:24:26.779873Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7619: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-05-29T15:24:26.779962Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-05-29T15:24:26.792024Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7309: Cannot subscribe to console configs 2025-05-29T15:24:26.792053Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:24:26.794709Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-05-29T15:24:26.794855Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-05-29T15:24:26.794901Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-05-29T15:24:26.796455Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-05-29T15:24:26.796591Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:445: Clear TempDirsState with owners number: 0 2025-05-29T15:24:26.796702Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1348: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-05-29T15:24:26.796746Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:32: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-05-29T15:24:26.797145Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:157: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:24:26.797190Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:84: [RootDataErasureManager] Stop 2025-05-29T15:24:26.797432Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:24:26.797440Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:24:26.797458Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-05-29T15:24:26.797465Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-29T15:24:26.797471Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:202: TTxServerlessStorageBilling.Complete 2025-05-29T15:24:26.797504Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6671: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-05-29T15:24:26.798757Z node 1 :HIVE INFO: tablet_helpers.cpp:1130: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:128:2152] sender: [1:242:2058] recipient: [1:15:2062] 2025-05-29T15:24:26.818360Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:372: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-05-29T15:24:26.818455Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:24:26.818541Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-05-29T15:24:26.818593Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:130: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-05-29T15:24:26.818605Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:24:26.819599Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:451: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-05-29T15:24:26.819631Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-05-29T15:24:26.819686Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:24:26.819697Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:313: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-05-29T15:24:26.819703Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:367: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-05-29T15:24:26.819708Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 2 -> 3 2025-05-29T15:24:26.820181Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:24:26.820192Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-05-29T15:24:26.820197Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 3 -> 128 2025-05-29T15:24:26.820551Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:24:26.820562Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:24:26.820568Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:24:26.820576Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1650: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-05-29T15:24:26.821241Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1719: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-05-29T15:24:26.821646Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:652: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-05-29T15:24:26.821691Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1751: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-05-29T15:24:26.821888Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:676: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-05-29T15:24:26.821914Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:680: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 124 RawX2: 4294969445 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-05-29T15:24:26.821923Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:24:26.821997Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 128 -> 240 2025-05-29T15:24:26.822004Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:24:26.822042Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-05-29T15:24:26.822053Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:402: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-05-29T15:24:26.822443Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:24:26.822452Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-29T15:24:26.822499Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29 ... ode 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 102, at schemeshard: 72057594046678944 2025-05-29T15:24:26.843562Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:227: tests -- TTxNotificationSubscriber for txId 101: got EvNotifyTxCompletionResult 2025-05-29T15:24:26.843566Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:236: tests -- TTxNotificationSubscriber for txId 101: satisfy waiter [1:381:2371] 2025-05-29T15:24:26.843579Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:227: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2025-05-29T15:24:26.843582Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:236: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [1:381:2371] 2025-05-29T15:24:26.843593Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 103, at schemeshard: 72057594046678944 2025-05-29T15:24:26.843605Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:227: tests -- TTxNotificationSubscriber for txId 103: got EvNotifyTxCompletionResult 2025-05-29T15:24:26.843608Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:236: tests -- TTxNotificationSubscriber for txId 103: satisfy waiter [1:381:2371] TestWaitNotification: OK eventTxId 100 TestWaitNotification: OK eventTxId 101 TestWaitNotification: OK eventTxId 102 TestWaitNotification: OK eventTxId 103 2025-05-29T15:24:26.843743Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-05-29T15:24:26.843778Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot" took 49us result status StatusSuccess 2025-05-29T15:24:26.843926Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot" PathDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 8 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 8 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 6 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: "DirA" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 100 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" ChildrenExist: true } Children { Name: "DirB" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 101 CreateStep: 5000003 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 4 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/MyRoot" } } } PathId: 1 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-05-29T15:24:26.844023Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/DirA" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-05-29T15:24:26.844043Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/DirA" took 22us result status StatusSuccess 2025-05-29T15:24:26.844089Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/DirA" PathDescription { Self { Name: "DirA" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 100 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 6 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 6 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 5 } ChildrenExist: true } Children { Name: "SubDirA" PathId: 4 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 102 CreateStep: 5000004 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 4 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } UserAttributes { Key: "AttrA1" Value: "ValA1" } UserAttributes { Key: "AttrA2" Value: "ValA2" } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-05-29T15:24:26.844155Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/DirB" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-05-29T15:24:26.844172Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/DirB" took 18us result status StatusSuccess 2025-05-29T15:24:26.844207Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/DirB" PathDescription { Self { Name: "DirB" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 101 CreateStep: 5000003 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 2 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 4 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } UserAttributes { Key: "AttrB1" Value: "ValB1" } UserAttributes { Key: "AttrB2" Value: "ValB2" } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-05-29T15:24:26.844253Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/DirA/SubDirA" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-05-29T15:24:26.844268Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/DirA/SubDirA" took 16us result status StatusSuccess 2025-05-29T15:24:26.844310Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/DirA/SubDirA" PathDescription { Self { Name: "SubDirA" PathId: 4 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 102 CreateStep: 5000004 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 } ChildrenExist: true } Children { Name: "DirB" PathId: 5 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 103 CreateStep: 5000005 ParentPathId: 4 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 4 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } UserAttributes { Key: "AttrAA1" Value: "ValAA1" } UserAttributes { Key: "AttrAA2" Value: "ValAA2" } } PathId: 4 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-05-29T15:24:26.844357Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/DirA/SubDirA/DirB" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-05-29T15:24:26.844370Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/DirA/SubDirA/DirB" took 16us result status StatusSuccess 2025-05-29T15:24:26.844403Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/DirA/SubDirA/DirB" PathDescription { Self { Name: "DirB" PathId: 5 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 103 CreateStep: 5000005 ParentPathId: 4 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 2 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 4 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } UserAttributes { Key: "AttrAB1" Value: "ValAB1" } UserAttributes { Key: "AttrAB2" Value: "ValAB2" } } PathId: 5 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 |65.2%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/schemeshard/ut_backup_collection/ydb-core-tx-schemeshard-ut_backup_collection |65.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_backup_collection/ydb-core-tx-schemeshard-ut_backup_collection |65.3%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_backup_collection/ydb-core-tx-schemeshard-ut_backup_collection |65.3%| [TA] $(B)/ydb/core/backup/impl/ut_local_partition_reader/test-results/unittest/{meta.json ... results_accumulator.log} >> TLogSettingsConfiguratorTests::TestChangeDefaults [GOOD] >> TModificationsValidatorTests::TestApplyValidators_TENANTS [GOOD] >> TModificationsValidatorTests::TestApplyValidators_TENANTS_AND_NODE_TYPES [GOOD] >> TModificationsValidatorTests::TestApplyValidatorsWithOldConfig [GOOD] >> TModificationsValidatorTests::TestChecksLimitError [GOOD] >> TModificationsValidatorTests::TestChecksLimitWarning [GOOD] >> TExternalDataSourceTest::RemovingReferencesFromDataSources [GOOD] >> TCmsTest::TestTwoOrMoreDisksFromGroupAtTheSameRequestMirror3dc [GOOD] >> TCmsTest::VDisksEviction >> TExternalDataSourceTest::ReplaceExternalDataStoreShouldFailIfEntityOfAnotherTypeWithSameNameExists [GOOD] >> TExternalDataSourceTest::CreateExternalDataSourceWithProperties >> TExternalDataSourceTest::ReadOnlyMode [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_user_attributes/unittest >> TSchemeShardUserAttrsTest::SpecialAttributes [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:116:2058] recipient: [1:111:2141] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:116:2058] recipient: [1:111:2141] Leader for TabletID 72057594046678944 is [1:128:2152] sender: [1:132:2058] recipient: [1:111:2141] 2025-05-29T15:24:26.924679Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7488: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-05-29T15:24:26.924715Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7516: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:24:26.924720Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7402: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-05-29T15:24:26.924725Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7418: OperationsProcessing config: using default configuration 2025-05-29T15:24:26.924740Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-05-29T15:24:26.924744Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-05-29T15:24:26.924752Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7548: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:24:26.924767Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:39: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-05-29T15:24:26.924870Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7619: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-05-29T15:24:26.924951Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-05-29T15:24:26.937788Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7309: Cannot subscribe to console configs 2025-05-29T15:24:26.937818Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:24:26.940952Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-05-29T15:24:26.941089Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-05-29T15:24:26.941137Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-05-29T15:24:26.943109Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-05-29T15:24:26.943312Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:445: Clear TempDirsState with owners number: 0 2025-05-29T15:24:26.943444Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1348: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-05-29T15:24:26.943498Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:32: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-05-29T15:24:26.944019Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:157: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:24:26.944067Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:84: [RootDataErasureManager] Stop 2025-05-29T15:24:26.944323Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:24:26.944332Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:24:26.944354Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-05-29T15:24:26.944361Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-29T15:24:26.944366Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:202: TTxServerlessStorageBilling.Complete 2025-05-29T15:24:26.944400Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6671: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-05-29T15:24:26.945823Z node 1 :HIVE INFO: tablet_helpers.cpp:1130: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:128:2152] sender: [1:242:2058] recipient: [1:15:2062] 2025-05-29T15:24:26.965972Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:372: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-05-29T15:24:26.966068Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:24:26.966165Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-05-29T15:24:26.966217Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:130: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-05-29T15:24:26.966229Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:24:26.967188Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:451: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-05-29T15:24:26.967221Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-05-29T15:24:26.967284Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:24:26.967295Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:313: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-05-29T15:24:26.967301Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:367: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-05-29T15:24:26.967307Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 2 -> 3 2025-05-29T15:24:26.967762Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:24:26.967776Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-05-29T15:24:26.967782Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 3 -> 128 2025-05-29T15:24:26.968086Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:24:26.968095Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:24:26.968100Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:24:26.968108Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1650: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-05-29T15:24:26.968702Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1719: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-05-29T15:24:26.969072Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:652: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-05-29T15:24:26.969113Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1751: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-05-29T15:24:26.969297Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:676: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-05-29T15:24:26.969322Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:680: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 124 RawX2: 4294969445 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-05-29T15:24:26.969329Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:24:26.969395Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 128 -> 240 2025-05-29T15:24:26.969401Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:24:26.969435Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-05-29T15:24:26.969447Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:402: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-05-29T15:24:26.969832Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:24:26.969842Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-29T15:24:26.969891Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29 ... ] was 2 2025-05-29T15:24:26.979318Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:5834: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 2 PathOwnerId: 72057594046678944, cookie: 102 2025-05-29T15:24:26.979328Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 2 PathOwnerId: 72057594046678944, cookie: 102 2025-05-29T15:24:26.979332Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72057594046678944, txId: 102 2025-05-29T15:24:26.979336Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 2 2025-05-29T15:24:26.979339Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2025-05-29T15:24:26.979349Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1606: TOperation IsReadyToNotify, TxId: 102, ready parts: 0/1, is published: true 2025-05-29T15:24:26.979884Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:652: Send tablet strongly msg operationId: 102:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:102 msg type: 269090816 2025-05-29T15:24:26.979916Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1751: TOperation RegisterRelationByTabletId, TxId: 102, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 102 at step: 5000002 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000001 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 102 at step: 5000002 2025-05-29T15:24:26.980054Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:676: TTxOperationPlanStep Execute, stepId: 5000002, transactions count in step: 1, at schemeshard: 72057594046678944 2025-05-29T15:24:26.980073Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:680: TTxOperationPlanStep Execute, message: Transactions { TxId: 102 Coordinator: 72057594046316545 AckTo { RawX1: 124 RawX2: 4294969445 } } Step: 5000002 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-05-29T15:24:26.980081Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_mkdir.cpp:33: MkDir::TPropose operationId# 102:0 HandleReply TEvPrivate::TEvOperationPlan, step: 5000002, at schemeshard: 72057594046678944 2025-05-29T15:24:26.980106Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 102:0 128 -> 240 2025-05-29T15:24:26.980140Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-05-29T15:24:26.980148Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 2025-05-29T15:24:26.980388Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2025-05-29T15:24:26.980460Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 FAKE_COORDINATOR: Erasing txId 102 2025-05-29T15:24:26.980875Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:24:26.980884Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 102, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-29T15:24:26.980916Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 102, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2025-05-29T15:24:26.980928Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:24:26.980932Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:209:2210], at schemeshard: 72057594046678944, txId: 102, path id: 1 2025-05-29T15:24:26.980936Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:209:2210], at schemeshard: 72057594046678944, txId: 102, path id: 2 2025-05-29T15:24:26.980989Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2025-05-29T15:24:26.980996Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:482: [72057594046678944] TDone opId# 102:0 ProgressState 2025-05-29T15:24:26.981011Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:906: Part operation is done id#102:0 progress is 1/1 2025-05-29T15:24:26.981016Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-05-29T15:24:26.981021Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:906: Part operation is done id#102:0 progress is 1/1 2025-05-29T15:24:26.981024Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-05-29T15:24:26.981028Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1606: TOperation IsReadyToNotify, TxId: 102, ready parts: 1/1, is published: false 2025-05-29T15:24:26.981033Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-05-29T15:24:26.981037Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:973: Operation and all the parts is done, operation id: 102:0 2025-05-29T15:24:26.981042Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5174: RemoveTx for txid 102:0 2025-05-29T15:24:26.981056Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2025-05-29T15:24:26.981062Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:982: Publication still in progress, tx: 102, publications: 2, subscribers: 0 2025-05-29T15:24:26.981067Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:989: Publication details: tx: 102, [OwnerId: 72057594046678944, LocalPathId: 1], 5 2025-05-29T15:24:26.981070Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:989: Publication details: tx: 102, [OwnerId: 72057594046678944, LocalPathId: 2], 3 2025-05-29T15:24:26.981179Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:5834: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 5 PathOwnerId: 72057594046678944, cookie: 102 2025-05-29T15:24:26.981188Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 5 PathOwnerId: 72057594046678944, cookie: 102 2025-05-29T15:24:26.981191Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 102 2025-05-29T15:24:26.981196Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 5 2025-05-29T15:24:26.981199Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-05-29T15:24:26.981278Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:5834: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046678944, cookie: 102 2025-05-29T15:24:26.981284Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046678944, cookie: 102 2025-05-29T15:24:26.981287Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 102 2025-05-29T15:24:26.981289Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 3 2025-05-29T15:24:26.981291Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 2025-05-29T15:24:26.981297Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 102, subscribers: 0 2025-05-29T15:24:26.982134Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2025-05-29T15:24:26.982214Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 TestModificationResult got TxId: 102, wait until txId: 102 TestModificationResults wait txId: 103 2025-05-29T15:24:26.982885Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:372: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: "DirD" } AlterUserAttributes { UserAttributes { Key: "__extra_path_symbols_allowed" Value: "./_" } } } TxId: 103 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-05-29T15:24:26.982936Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_mkdir.cpp:115: TMkDir Propose, path: /MyRoot/DirD, operationId: 103:0, at schemeshard: 72057594046678944 2025-05-29T15:24:26.982955Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:130: IgniteOperation, opId: 103:1, propose status:StatusInvalidParameter, reason: UserAttributes: attribute '__extra_path_symbols_allowed' has invalid value './_', forbidden symbols are found, at schemeshard: 72057594046678944 2025-05-29T15:24:26.984293Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:451: TTxOperationPropose Complete, txId: 103, response: Status: StatusInvalidParameter Reason: "UserAttributes: attribute \'__extra_path_symbols_allowed\' has invalid value \'./_\', forbidden symbols are found" TxId: 103 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2025-05-29T15:24:26.984333Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 103, database: /MyRoot, subject: , status: StatusInvalidParameter, reason: UserAttributes: attribute '__extra_path_symbols_allowed' has invalid value './_', forbidden symbols are found, operation: CREATE DIRECTORY, path: /MyRoot/DirD TestModificationResult got TxId: 103, wait until txId: 103 >> ReadIteratorExternalBlobs::ExtBlobsWithFirstRowPreloadedWithReboot [FAIL] >> ReadIteratorExternalBlobs::ExtBlobsMultipleColumns >> TCmsTest::TestKeepAvailableModeDisconnects [GOOD] >> TCmsTest::TestForceRestartModeScheduled ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_external_data_source/unittest >> TExternalDataSourceTest::RemovingReferencesFromDataSources [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:117:2058] recipient: [1:112:2142] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:117:2058] recipient: [1:112:2142] Leader for TabletID 72057594046678944 is [1:129:2153] sender: [1:133:2058] recipient: [1:112:2142] 2025-05-29T15:24:27.123912Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7488: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-05-29T15:24:27.123936Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7516: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:24:27.123941Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7402: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-05-29T15:24:27.123947Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7418: OperationsProcessing config: using default configuration 2025-05-29T15:24:27.123953Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-05-29T15:24:27.123957Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-05-29T15:24:27.123966Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7548: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:24:27.123980Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:39: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-05-29T15:24:27.124084Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7619: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-05-29T15:24:27.124151Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-05-29T15:24:27.137534Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7651: Got new config: QueryServiceConfig { AllExternalDataSourcesAreAvailable: true } 2025-05-29T15:24:27.137562Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:24:27.137660Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7619: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-05-29T15:24:27.140804Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-05-29T15:24:27.140939Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-05-29T15:24:27.140988Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-05-29T15:24:27.143757Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-05-29T15:24:27.143852Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:445: Clear TempDirsState with owners number: 0 2025-05-29T15:24:27.143983Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1348: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-05-29T15:24:27.144048Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:32: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-05-29T15:24:27.144597Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:157: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:24:27.144638Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:84: [RootDataErasureManager] Stop 2025-05-29T15:24:27.144934Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:24:27.144945Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:24:27.144966Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-05-29T15:24:27.144974Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-29T15:24:27.144980Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:202: TTxServerlessStorageBilling.Complete 2025-05-29T15:24:27.145014Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6671: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-05-29T15:24:27.146588Z node 1 :HIVE INFO: tablet_helpers.cpp:1130: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:129:2153] sender: [1:243:2058] recipient: [1:15:2062] 2025-05-29T15:24:27.168094Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:372: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-05-29T15:24:27.168177Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:24:27.168247Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-05-29T15:24:27.168298Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:130: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-05-29T15:24:27.168311Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:24:27.170189Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:451: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-05-29T15:24:27.170219Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-05-29T15:24:27.170278Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:24:27.170290Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:313: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-05-29T15:24:27.170296Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:367: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-05-29T15:24:27.170301Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 2 -> 3 2025-05-29T15:24:27.170846Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:24:27.170862Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-05-29T15:24:27.170868Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 3 -> 128 2025-05-29T15:24:27.171229Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:24:27.171241Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:24:27.171247Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:24:27.171253Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1650: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-05-29T15:24:27.171925Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1719: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-05-29T15:24:27.172321Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:652: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-05-29T15:24:27.172363Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1751: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-05-29T15:24:27.172551Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:676: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-05-29T15:24:27.172577Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:680: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 125 RawX2: 4294969446 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-05-29T15:24:27.172594Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:24:27.172664Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 128 -> 240 2025-05-29T15:24:27.172672Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:24:27.172700Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-05-29T15:24:27.172711Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:402: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-05-29T15:24:27.173096Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:24:27.173106Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594 ... 7594046316545 FAKE_COORDINATOR: Add transaction: 104 at step: 5000005 FAKE_COORDINATOR: advance: minStep5000005 State->FrontStep: 5000004 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 104 at step: 5000005 2025-05-29T15:24:27.193295Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:676: TTxOperationPlanStep Execute, stepId: 5000005, transactions count in step: 1, at schemeshard: 72057594046678944 2025-05-29T15:24:27.193315Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:680: TTxOperationPlanStep Execute, message: Transactions { TxId: 104 Coordinator: 72057594046316545 AckTo { RawX1: 125 RawX2: 4294969446 } } Step: 5000005 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-05-29T15:24:27.193323Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_drop_external_data_source.cpp:40: [72057594046678944] TDropExternalDataSource TPropose opId# 104:0 HandleReply TEvOperationPlan: step# 5000005 2025-05-29T15:24:27.193342Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2025-05-29T15:24:27.193357Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 104:0 128 -> 240 2025-05-29T15:24:27.193379Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-05-29T15:24:27.193387Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 2025-05-29T15:24:27.193713Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 104 2025-05-29T15:24:27.193844Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 104 FAKE_COORDINATOR: Erasing txId 104 2025-05-29T15:24:27.194079Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:24:27.194086Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 104, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-29T15:24:27.194106Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 104, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2025-05-29T15:24:27.194126Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:24:27.194130Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:210:2211], at schemeshard: 72057594046678944, txId: 104, path id: 1 2025-05-29T15:24:27.194135Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:210:2211], at schemeshard: 72057594046678944, txId: 104, path id: 2 2025-05-29T15:24:27.194172Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 104:0, at schemeshard: 72057594046678944 2025-05-29T15:24:27.194179Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:482: [72057594046678944] TDone opId# 104:0 ProgressState 2025-05-29T15:24:27.194191Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:906: Part operation is done id#104:0 progress is 1/1 2025-05-29T15:24:27.194195Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 104 ready parts: 1/1 2025-05-29T15:24:27.194200Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:906: Part operation is done id#104:0 progress is 1/1 2025-05-29T15:24:27.194203Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 104 ready parts: 1/1 2025-05-29T15:24:27.194207Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1606: TOperation IsReadyToNotify, TxId: 104, ready parts: 1/1, is published: false 2025-05-29T15:24:27.194212Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 104 ready parts: 1/1 2025-05-29T15:24:27.194216Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:973: Operation and all the parts is done, operation id: 104:0 2025-05-29T15:24:27.194220Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5174: RemoveTx for txid 104:0 2025-05-29T15:24:27.194231Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2025-05-29T15:24:27.194238Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:982: Publication still in progress, tx: 104, publications: 2, subscribers: 0 2025-05-29T15:24:27.194242Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:989: Publication details: tx: 104, [OwnerId: 72057594046678944, LocalPathId: 1], 11 2025-05-29T15:24:27.194246Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:989: Publication details: tx: 104, [OwnerId: 72057594046678944, LocalPathId: 2], 18446744073709551615 2025-05-29T15:24:27.194310Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:5834: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 104 2025-05-29T15:24:27.194320Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 104 2025-05-29T15:24:27.194323Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 104 2025-05-29T15:24:27.194327Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 104, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 18446744073709551615 2025-05-29T15:24:27.194331Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 2025-05-29T15:24:27.194370Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:123: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-05-29T15:24:27.194375Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:137: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2025-05-29T15:24:27.194382Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-05-29T15:24:27.194415Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:5834: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 11 PathOwnerId: 72057594046678944, cookie: 104 2025-05-29T15:24:27.194423Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 11 PathOwnerId: 72057594046678944, cookie: 104 2025-05-29T15:24:27.194426Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 104 2025-05-29T15:24:27.194430Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 104, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 11 2025-05-29T15:24:27.194433Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-05-29T15:24:27.194441Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 104, subscribers: 0 2025-05-29T15:24:27.195270Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 104 2025-05-29T15:24:27.195300Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:160: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 2025-05-29T15:24:27.195311Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 104 TestModificationResult got TxId: 104, wait until txId: 104 TestWaitNotification wait txId: 104 2025-05-29T15:24:27.195361Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:210: tests -- TTxNotificationSubscriber for txId 104: send EvNotifyTxCompletion 2025-05-29T15:24:27.195368Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:256: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 104 2025-05-29T15:24:27.195454Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 104, at schemeshard: 72057594046678944 2025-05-29T15:24:27.195472Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:227: tests -- TTxNotificationSubscriber for txId 104: got EvNotifyTxCompletionResult 2025-05-29T15:24:27.195475Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:236: tests -- TTxNotificationSubscriber for txId 104: satisfy waiter [1:391:2381] TestWaitNotification: OK eventTxId 104 2025-05-29T15:24:27.195530Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/ExternalDataSource" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-05-29T15:24:27.195549Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/ExternalDataSource" took 28us result status StatusPathDoesNotExist 2025-05-29T15:24:27.195575Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/ExternalDataSource\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1]), source_location: ydb/core/tx/schemeshard/schemeshard_path_describer.cpp:1157" Path: "/MyRoot/ExternalDataSource" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: false } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_external_data_source/unittest >> TExternalDataSourceTest::SchemeErrors [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:117:2058] recipient: [1:112:2142] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:117:2058] recipient: [1:112:2142] Leader for TabletID 72057594046678944 is [1:129:2153] sender: [1:133:2058] recipient: [1:112:2142] 2025-05-29T15:24:27.101180Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7488: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-05-29T15:24:27.101206Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7516: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:24:27.101212Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7402: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-05-29T15:24:27.101219Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7418: OperationsProcessing config: using default configuration 2025-05-29T15:24:27.101224Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-05-29T15:24:27.101229Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-05-29T15:24:27.101239Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7548: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:24:27.101254Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:39: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-05-29T15:24:27.101375Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7619: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-05-29T15:24:27.101454Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-05-29T15:24:27.116531Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7651: Got new config: QueryServiceConfig { AllExternalDataSourcesAreAvailable: true } 2025-05-29T15:24:27.116557Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:24:27.116666Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7619: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-05-29T15:24:27.119552Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-05-29T15:24:27.119681Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-05-29T15:24:27.119728Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-05-29T15:24:27.121664Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-05-29T15:24:27.121732Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:445: Clear TempDirsState with owners number: 0 2025-05-29T15:24:27.121853Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1348: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-05-29T15:24:27.121909Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:32: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-05-29T15:24:27.122416Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:157: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:24:27.122451Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:84: [RootDataErasureManager] Stop 2025-05-29T15:24:27.122693Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:24:27.122707Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:24:27.122725Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-05-29T15:24:27.122732Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-29T15:24:27.122758Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:202: TTxServerlessStorageBilling.Complete 2025-05-29T15:24:27.122789Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6671: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-05-29T15:24:27.124244Z node 1 :HIVE INFO: tablet_helpers.cpp:1130: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:129:2153] sender: [1:243:2058] recipient: [1:15:2062] 2025-05-29T15:24:27.147931Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:372: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-05-29T15:24:27.148008Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:24:27.148082Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-05-29T15:24:27.148137Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:130: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-05-29T15:24:27.148151Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:24:27.149008Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:451: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-05-29T15:24:27.149041Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-05-29T15:24:27.149110Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:24:27.149122Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:313: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-05-29T15:24:27.149129Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:367: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-05-29T15:24:27.149135Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 2 -> 3 2025-05-29T15:24:27.149682Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:24:27.149697Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-05-29T15:24:27.149703Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 3 -> 128 2025-05-29T15:24:27.150139Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:24:27.150153Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:24:27.150160Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:24:27.150169Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1650: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-05-29T15:24:27.150965Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1719: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-05-29T15:24:27.151461Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:652: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-05-29T15:24:27.151513Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1751: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-05-29T15:24:27.151723Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:676: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-05-29T15:24:27.151754Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:680: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 125 RawX2: 4294969446 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-05-29T15:24:27.151776Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:24:27.151850Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 128 -> 240 2025-05-29T15:24:27.151859Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:24:27.151895Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-05-29T15:24:27.151909Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:402: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-05-29T15:24:27.152373Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:24:27.152384Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594 ... ist: false CreateExternalDataSource { Name: "MyExternalDataSource" SourceType: "ObjectStorage" Location: "https://s3.cloud.net/my_bucket" } 2025-05-29T15:24:27.165449Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_create_external_data_source.cpp:232: [72057594046678944] TCreateExternalDataSource Propose: opId# 126:0, path# /MyRoot/DirA/MyExternalDataSource 2025-05-29T15:24:27.165474Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:130: IgniteOperation, opId: 126:1, propose status:StatusSchemeError, reason: Authorization method isn't specified, at schemeshard: 72057594046678944 2025-05-29T15:24:27.166047Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:451: TTxOperationPropose Complete, txId: 126, response: Status: StatusSchemeError Reason: "Authorization method isn\'t specified" TxId: 126 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2025-05-29T15:24:27.166070Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 126, database: /MyRoot, subject: , status: StatusSchemeError, reason: Authorization method isn't specified, operation: CREATE EXTERNAL DATA SOURCE, path: /MyRoot/DirA/MyExternalDataSource TestModificationResult got TxId: 126, wait until txId: 126 TestModificationResults wait txId: 127 2025-05-29T15:24:27.166666Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:372: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot/DirA" OperationType: ESchemeOpCreateExternalDataSource CreateExternalDataSource { Name: "MyExternalDataSource" SourceType: "ObjectStorage" Location: "aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa" Auth { None { } } } } TxId: 127 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-05-29T15:24:27.166711Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_create_external_data_source.cpp:336: [72057594046678944] CreateNewExternalDataSource, opId 127:0, feature flag EnableReplaceIfExistsForExternalEntities 0, tx WorkingDir: "/MyRoot/DirA" OperationType: ESchemeOpCreateExternalDataSource FailOnExist: false CreateExternalDataSource { Name: "MyExternalDataSource" SourceType: "ObjectStorage" Location: "aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa" Auth { None { } } } 2025-05-29T15:24:27.166720Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_create_external_data_source.cpp:232: [72057594046678944] TCreateExternalDataSource Propose: opId# 127:0, path# /MyRoot/DirA/MyExternalDataSource 2025-05-29T15:24:27.166761Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:130: IgniteOperation, opId: 127:1, propose status:StatusSchemeError, reason: Maximum length of location must be less or equal equal to 1000 but got 1001, at schemeshard: 72057594046678944 2025-05-29T15:24:27.168674Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:451: TTxOperationPropose Complete, txId: 127, response: Status: StatusSchemeError Reason: "Maximum length of location must be less or equal equal to 1000 but got 1001" TxId: 127 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2025-05-29T15:24:27.168706Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 127, database: /MyRoot, subject: , status: StatusSchemeError, reason: Maximum length of location must be less or equal equal to 1000 but got 1001, operation: CREATE EXTERNAL DATA SOURCE, path: /MyRoot/DirA/MyExternalDataSource TestModificationResult got TxId: 127, wait until txId: 127 TestModificationResults wait txId: 128 2025-05-29T15:24:27.169299Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:372: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot/DirA" OperationType: ESchemeOpCreateExternalDataSource CreateExternalDataSource { Name: "MyExternalDataSource" SourceType: "ObjectStorage" Installation: "aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa" Auth { None { } } } } TxId: 128 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-05-29T15:24:27.169342Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_create_external_data_source.cpp:336: [72057594046678944] CreateNewExternalDataSource, opId 128:0, feature flag EnableReplaceIfExistsForExternalEntities 0, tx WorkingDir: "/MyRoot/DirA" OperationType: ESchemeOpCreateExternalDataSource FailOnExist: false CreateExternalDataSource { Name: "MyExternalDataSource" SourceType: "ObjectStorage" Installation: "aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa" Auth { None { } } } 2025-05-29T15:24:27.169354Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_create_external_data_source.cpp:232: [72057594046678944] TCreateExternalDataSource Propose: opId# 128:0, path# /MyRoot/DirA/MyExternalDataSource 2025-05-29T15:24:27.169379Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:130: IgniteOperation, opId: 128:1, propose status:StatusSchemeError, reason: Maximum length of installation must be less or equal equal to 1000 but got 1001, at schemeshard: 72057594046678944 2025-05-29T15:24:27.170106Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:451: TTxOperationPropose Complete, txId: 128, response: Status: StatusSchemeError Reason: "Maximum length of installation must be less or equal equal to 1000 but got 1001" TxId: 128 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2025-05-29T15:24:27.170156Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 128, database: /MyRoot, subject: , status: StatusSchemeError, reason: Maximum length of installation must be less or equal equal to 1000 but got 1001, operation: CREATE EXTERNAL DATA SOURCE, path: /MyRoot/DirA/MyExternalDataSource TestModificationResult got TxId: 128, wait until txId: 128 TestModificationResults wait txId: 129 2025-05-29T15:24:27.170769Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:372: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot/DirA" OperationType: ESchemeOpCreateExternalDataSource CreateExternalDataSource { Name: "" SourceType: "ObjectStorage" Location: "https://s3.cloud.net/my_bucket" Auth { None { } } } } TxId: 129 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-05-29T15:24:27.170804Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_create_external_data_source.cpp:336: [72057594046678944] CreateNewExternalDataSource, opId 129:0, feature flag EnableReplaceIfExistsForExternalEntities 0, tx WorkingDir: "/MyRoot/DirA" OperationType: ESchemeOpCreateExternalDataSource CreateExternalDataSource { Name: "" SourceType: "ObjectStorage" Location: "https://s3.cloud.net/my_bucket" Auth { None { } } } 2025-05-29T15:24:27.170819Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_create_external_data_source.cpp:232: [72057594046678944] TCreateExternalDataSource Propose: opId# 129:0, path# /MyRoot/DirA/ 2025-05-29T15:24:27.170835Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:130: IgniteOperation, opId: 129:1, propose status:StatusSchemeError, reason: Check failed: path: '/MyRoot/DirA/', error: path part shouldn't be empty, source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_external_data_source.cpp:101, at schemeshard: 72057594046678944 2025-05-29T15:24:27.171536Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:451: TTxOperationPropose Complete, txId: 129, response: Status: StatusSchemeError Reason: "Check failed: path: \'/MyRoot/DirA/\', error: path part shouldn\'t be empty, source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_external_data_source.cpp:101" TxId: 129 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2025-05-29T15:24:27.171564Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 129, database: /MyRoot, subject: , status: StatusSchemeError, reason: Check failed: path: '/MyRoot/DirA/', error: path part shouldn't be empty, source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_external_data_source.cpp:101, operation: CREATE EXTERNAL DATA SOURCE, path: /MyRoot/DirA/ TestModificationResult got TxId: 129, wait until txId: 129 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_external_data_source/unittest >> TExternalDataSourceTest::ReplaceExternalDataSourceIfNotExistsShouldFailIfFeatureFlagIsNotSet [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:117:2058] recipient: [1:112:2142] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:117:2058] recipient: [1:112:2142] Leader for TabletID 72057594046678944 is [1:129:2153] sender: [1:133:2058] recipient: [1:112:2142] 2025-05-29T15:24:27.051331Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7488: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-05-29T15:24:27.051359Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7516: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:24:27.051366Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7402: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-05-29T15:24:27.051372Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7418: OperationsProcessing config: using default configuration 2025-05-29T15:24:27.051379Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-05-29T15:24:27.051383Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-05-29T15:24:27.051392Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7548: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:24:27.051408Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:39: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-05-29T15:24:27.051537Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7619: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-05-29T15:24:27.051616Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-05-29T15:24:27.063529Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7651: Got new config: QueryServiceConfig { AllExternalDataSourcesAreAvailable: true } 2025-05-29T15:24:27.063555Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:24:27.063633Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7619: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-05-29T15:24:27.067136Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-05-29T15:24:27.067269Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-05-29T15:24:27.067308Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-05-29T15:24:27.071826Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-05-29T15:24:27.071908Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:445: Clear TempDirsState with owners number: 0 2025-05-29T15:24:27.072029Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1348: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-05-29T15:24:27.072087Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:32: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-05-29T15:24:27.072583Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:157: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:24:27.072621Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:84: [RootDataErasureManager] Stop 2025-05-29T15:24:27.072859Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:24:27.072870Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:24:27.072886Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-05-29T15:24:27.072896Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-29T15:24:27.072903Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:202: TTxServerlessStorageBilling.Complete 2025-05-29T15:24:27.072934Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6671: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-05-29T15:24:27.075395Z node 1 :HIVE INFO: tablet_helpers.cpp:1130: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:129:2153] sender: [1:243:2058] recipient: [1:15:2062] 2025-05-29T15:24:27.094680Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:372: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-05-29T15:24:27.094763Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:24:27.094846Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-05-29T15:24:27.094944Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:130: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-05-29T15:24:27.094958Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:24:27.095776Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:451: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-05-29T15:24:27.095803Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-05-29T15:24:27.095862Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:24:27.095870Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:313: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-05-29T15:24:27.095874Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:367: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-05-29T15:24:27.095879Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 2 -> 3 2025-05-29T15:24:27.096379Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:24:27.096392Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-05-29T15:24:27.096398Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 3 -> 128 2025-05-29T15:24:27.096798Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:24:27.096808Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:24:27.096813Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:24:27.096818Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1650: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-05-29T15:24:27.097318Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1719: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-05-29T15:24:27.097716Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:652: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-05-29T15:24:27.097752Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1751: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-05-29T15:24:27.097908Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:676: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-05-29T15:24:27.097929Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:680: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 125 RawX2: 4294969446 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-05-29T15:24:27.097947Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:24:27.098003Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 128 -> 240 2025-05-29T15:24:27.098008Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:24:27.098033Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-05-29T15:24:27.098042Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:402: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-05-29T15:24:27.098413Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:24:27.098419Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-29T15:24:27.098456Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:24:27.098461Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:210:2211], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-05-29T15:24:27.098516Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:24:27.098521Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:482: [72057594046678944] TDone opId# 1:0 ProgressState 2025-05-29T15:24:27.098532Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:906: Part operation is done id#1:0 progress is 1/1 2025-05-29T15:24:27.098536Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-05-29T15:24:27.098539Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:906: Part operation is done id#1:0 progress is 1/1 2025-05-29T15:24:27.098542Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-05-29T15:24:27.098545Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1606: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-05-29T15:24:27.098549Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-05-29T15:24:27.098552Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:973: Operation and all the parts is done, operation id: 1:0 2025-05-29T15:24:27.098555Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5174: RemoveTx for txid 1:0 2025-05-29T15:24:27.098564Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-05-29T15:24:27.098569Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:982: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-05-29T15:24:27.098572Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:989: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-05-29T15:24:27.098882Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:5834: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-05-29T15:24:27.098901Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-05-29T15:24:27.098906Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2025-05-29T15:24:27.098910Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2025-05-29T15:24:27.098914Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-05-29T15:24:27.098925Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 1, subscribers: 0 2025-05-29T15:24:27.099517Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1 2025-05-29T15:24:27.099599Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 1, at schemeshard: 72057594046678944 TestModificationResults wait txId: 101 2025-05-29T15:24:27.099804Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:434: actor# [1:273:2263] Bootstrap 2025-05-29T15:24:27.101201Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:453: actor# [1:273:2263] Become StateWork (SchemeCache [1:278:2268]) 2025-05-29T15:24:27.101874Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:372: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpCreateExternalDataSource CreateExternalDataSource { Name: "MyExternalDataSource" SourceType: "ObjectStorage" Location: "https://s3.cloud.net/my_bucket" Auth { None { } } ReplaceIfExists: true } } TxId: 101 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-05-29T15:24:27.101931Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_create_external_data_source.cpp:336: [72057594046678944] CreateNewExternalDataSource, opId 101:0, feature flag EnableReplaceIfExistsForExternalEntities 0, tx WorkingDir: "/MyRoot" OperationType: ESchemeOpCreateExternalDataSource FailOnExist: false CreateExternalDataSource { Name: "MyExternalDataSource" SourceType: "ObjectStorage" Location: "https://s3.cloud.net/my_bucket" Auth { None { } } ReplaceIfExists: true } 2025-05-29T15:24:27.101943Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_just_reject.cpp:47: TReject Propose, opId: 101:0, explain: Invalid TCreateExternalDataSource request: Unsupported: feature flag EnableReplaceIfExistsForExternalEntities is off, at schemeshard: 72057594046678944 2025-05-29T15:24:27.101948Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:130: IgniteOperation, opId: 101:1, propose status:StatusPreconditionFailed, reason: Invalid TCreateExternalDataSource request: Unsupported: feature flag EnableReplaceIfExistsForExternalEntities is off, at schemeshard: 72057594046678944 2025-05-29T15:24:27.102079Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:213: actor# [1:273:2263] HANDLE TEvClientConnected success connect from tablet# 72057594046447617 2025-05-29T15:24:27.102695Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:451: TTxOperationPropose Complete, txId: 101, response: Status: StatusPreconditionFailed Reason: "Invalid TCreateExternalDataSource request: Unsupported: feature flag EnableReplaceIfExistsForExternalEntities is off" TxId: 101 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2025-05-29T15:24:27.102722Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 101, database: /MyRoot, subject: , status: StatusPreconditionFailed, reason: Invalid TCreateExternalDataSource request: Unsupported: feature flag EnableReplaceIfExistsForExternalEntities is off, operation: CREATE EXTERNAL DATA SOURCE, path: /MyRoot/MyExternalDataSource 2025-05-29T15:24:27.102781Z node 1 :TX_PROXY DEBUG: client.cpp:89: Handle TEvAllocateResult ACCEPTED RangeBegin# 281474976715656 RangeEnd# 281474976720656 txAllocator# 72057594046447617 TestModificationResult got TxId: 101, wait until txId: 101 TestWaitNotification wait txId: 101 2025-05-29T15:24:27.102829Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:210: tests -- TTxNotificationSubscriber for txId 101: send EvNotifyTxCompletion 2025-05-29T15:24:27.102836Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:256: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 101 2025-05-29T15:24:27.102879Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 101, at schemeshard: 72057594046678944 2025-05-29T15:24:27.102893Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:227: tests -- TTxNotificationSubscriber for txId 101: got EvNotifyTxCompletionResult 2025-05-29T15:24:27.102896Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:236: tests -- TTxNotificationSubscriber for txId 101: satisfy waiter [1:288:2278] TestWaitNotification: OK eventTxId 101 2025-05-29T15:24:27.102965Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/MyExternalDataSource" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-05-29T15:24:27.102992Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/MyExternalDataSource" took 33us result status StatusPathDoesNotExist 2025-05-29T15:24:27.103030Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/MyExternalDataSource\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1]), source_location: ydb/core/tx/schemeshard/schemeshard_path_describer.cpp:1157" Path: "/MyRoot/MyExternalDataSource" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: false } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 >> TExternalDataSourceTest::CreateExternalDataSource >> TCmsTenatsTest::TestTenantRatioLimitForceRestartMode [GOOD] >> TCmsTenatsTest::TestTenantRatioLimitForceRestartModeScheduled >> TCmsTenatsTest::TestScheduledPermissionWithNonePolicy [GOOD] >> TCmsTenatsTest::TestTenantLimitForceRestartMode >> TExternalDataSourceTest::CreateExternalDataSourceWithProperties [GOOD] >> TExternalDataSourceTest::DropExternalDataSource >> TConsoleInMemoryConfigSubscriptionTests::TestSubscribeAfterConfigApply [GOOD] >> TConsoleInMemoryConfigSubscriptionTests::TestSubscribeAfterConfigApplyWithDb ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/cms/console/ut/unittest >> TModificationsValidatorTests::TestChecksLimitWarning [GOOD] Test command err: 2025-05-29T15:24:18.933115Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7488: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-05-29T15:24:18.933146Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7516: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:24:18.933152Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7402: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-05-29T15:24:18.933157Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7418: OperationsProcessing config: using default configuration 2025-05-29T15:24:18.933174Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-05-29T15:24:18.933179Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-05-29T15:24:18.933195Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7548: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:24:18.933219Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:39: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-05-29T15:24:18.933330Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7619: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-05-29T15:24:18.933403Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-05-29T15:24:18.949114Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7309: Cannot subscribe to console configs 2025-05-29T15:24:18.949145Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:24:18.968197Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-05-29T15:24:18.968247Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-05-29T15:24:18.968267Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046578944 2025-05-29T15:24:18.969166Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-05-29T15:24:18.969305Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:445: Clear TempDirsState with owners number: 0 2025-05-29T15:24:18.969422Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1348: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046578944 2025-05-29T15:24:18.969553Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:32: TTxInitRoot DoExecute, path: dc-1, pathId: [OwnerId: 72057594046578944, LocalPathId: 1], at schemeshard: 72057594046578944 2025-05-29T15:24:18.970161Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:157: TTxInitRoot DoComplete, at schemeshard: 72057594046578944 2025-05-29T15:24:18.970210Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:84: [RootDataErasureManager] Stop 2025-05-29T15:24:18.970490Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046578944 2025-05-29T15:24:18.970502Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046578944 2025-05-29T15:24:18.970533Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-05-29T15:24:18.970542Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046578944, domainId: [OwnerId: 72057594046578944, LocalPathId: 1] 2025-05-29T15:24:18.970548Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:202: TTxServerlessStorageBilling.Complete 2025-05-29T15:24:18.970571Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6671: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046578944 2025-05-29T15:24:19.014703Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:372: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "dc-1" StoragePools { Name: "" Kind: "hdd" } StoragePools { Name: "" Kind: "hdd-3" } StoragePools { Name: "" Kind: "hdd-1" } StoragePools { Name: "" Kind: "hdd-2" } } } TxId: 1 TabletId: 72057594046578944 , at schemeshard: 72057594046578944 2025-05-29T15:24:19.014810Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //dc-1, opId: 1:0, at schemeshard: 72057594046578944 2025-05-29T15:24:19.014883Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046578944, LocalPathId: 1] was 0 2025-05-29T15:24:19.014945Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:130: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046578944 2025-05-29T15:24:19.014959Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944 2025-05-29T15:24:19.019219Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:451: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046578944 PathId: 1, at schemeshard: 72057594046578944 2025-05-29T15:24:19.019274Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //dc-1 2025-05-29T15:24:19.019344Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046578944 2025-05-29T15:24:19.019358Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:313: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046578944 2025-05-29T15:24:19.019364Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:367: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-05-29T15:24:19.019370Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 2 -> 3 2025-05-29T15:24:19.023214Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046578944 2025-05-29T15:24:19.023245Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046578944 2025-05-29T15:24:19.023255Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 3 -> 128 2025-05-29T15:24:19.027130Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046578944 2025-05-29T15:24:19.027154Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046578944 2025-05-29T15:24:19.027174Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046578944 2025-05-29T15:24:19.027182Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1650: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-05-29T15:24:19.027953Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1719: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046578944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-05-29T15:24:19.030768Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:652: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046578944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-05-29T15:24:19.030845Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1751: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 2025-05-29T15:24:19.031088Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__notify.cpp:30: NotifyTxCompletion operation in-flight, txId: 1, at schemeshard: 72057594046578944 2025-05-29T15:24:19.031097Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1606: TOperation IsReadyToNotify, TxId: 1, ready parts: 0/1, is published: true 2025-05-29T15:24:19.031102Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__notify.cpp:131: NotifyTxCompletion transaction is registered, txId: 1, at schemeshard: 72057594046578944 2025-05-29T15:24:19.299404Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:676: TTxOperationPlanStep Execute, stepId: 500, transactions count in step: 1, at schemeshard: 72057594046578944 2025-05-29T15:24:19.299477Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:680: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 AckTo { RawX1: 0 RawX2: 0 } } Step: 500 MediatorID: 72057594046382081 TabletID: 72057594046578944, at schemeshard: 72057594046578944 2025-05-29T15:24:19.299491Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046578944 2025-05-29T15:24:19.299601Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 128 -> 240 2025-05-29T15:24:19.299615Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046578944 2025-05-29T15:24:19.299656Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046578944, LocalPathId: 1] was 1 2025-05-29T15:24:19.299678Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:402: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046578944, LocalPathId: 1], at schemeshard: 72057594046578944 2025-05-29T15:24:19.307656Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046578944 2025-05-29T15:24:19.307682Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046578944, txId: 1, path id: [OwnerId: 72057594046578944, LocalPathId: 1] 2025-05-29T15:24:19.307737Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046578944 2025-05-29T15:24:19.307743Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:261:2249], at schemeshard: 72057594046578944, txId: 1, path id: 1 202 ... E to ALERT 2025-05-29T15:24:27.273332Z node 14 :CMS_CONFIGS NOTICE: log_settings_configurator.cpp:203: TLogSettingsConfigurator: Sampling priority for the component DISCOVERY_CACHE has been changed from DEBUG to ALERT 2025-05-29T15:24:27.273334Z node 14 :CMS_CONFIGS NOTICE: log_settings_configurator.cpp:209: TLogSettingsConfigurator: Sampling rate for the component DISCOVERY_CACHE has been changed from 0 to 10 2025-05-29T15:24:27.273336Z node 14 :CMS_CONFIGS NOTICE: log_settings_configurator.cpp:192: TLogSettingsConfigurator: Priority for the component EXT_INDEX has been changed from NOTICE to ALERT 2025-05-29T15:24:27.273339Z node 14 :CMS_CONFIGS NOTICE: log_settings_configurator.cpp:203: TLogSettingsConfigurator: Sampling priority for the component EXT_INDEX has been changed from DEBUG to ALERT 2025-05-29T15:24:27.273341Z node 14 :CMS_CONFIGS NOTICE: log_settings_configurator.cpp:209: TLogSettingsConfigurator: Sampling rate for the component EXT_INDEX has been changed from 0 to 10 2025-05-29T15:24:27.273343Z node 14 :CMS_CONFIGS NOTICE: log_settings_configurator.cpp:192: TLogSettingsConfigurator: Priority for the component TX_CONVEYOR has been changed from NOTICE to ALERT 2025-05-29T15:24:27.273345Z node 14 :CMS_CONFIGS NOTICE: log_settings_configurator.cpp:203: TLogSettingsConfigurator: Sampling priority for the component TX_CONVEYOR has been changed from DEBUG to ALERT 2025-05-29T15:24:27.273347Z node 14 :CMS_CONFIGS NOTICE: log_settings_configurator.cpp:209: TLogSettingsConfigurator: Sampling rate for the component TX_CONVEYOR has been changed from 0 to 10 2025-05-29T15:24:27.273350Z node 14 :CMS_CONFIGS NOTICE: log_settings_configurator.cpp:192: TLogSettingsConfigurator: Priority for the component TX_LIMITER has been changed from NOTICE to ALERT 2025-05-29T15:24:27.273352Z node 14 :CMS_CONFIGS NOTICE: log_settings_configurator.cpp:203: TLogSettingsConfigurator: Sampling priority for the component TX_LIMITER has been changed from DEBUG to ALERT 2025-05-29T15:24:27.273354Z node 14 :CMS_CONFIGS NOTICE: log_settings_configurator.cpp:209: TLogSettingsConfigurator: Sampling rate for the component TX_LIMITER has been changed from 0 to 10 2025-05-29T15:24:27.273357Z node 14 :CMS_CONFIGS NOTICE: log_settings_configurator.cpp:192: TLogSettingsConfigurator: Priority for the component ARROW_HELPER has been changed from NOTICE to ALERT 2025-05-29T15:24:27.273359Z node 14 :CMS_CONFIGS NOTICE: log_settings_configurator.cpp:203: TLogSettingsConfigurator: Sampling priority for the component ARROW_HELPER has been changed from DEBUG to ALERT 2025-05-29T15:24:27.273361Z node 14 :CMS_CONFIGS NOTICE: log_settings_configurator.cpp:209: TLogSettingsConfigurator: Sampling rate for the component ARROW_HELPER has been changed from 0 to 10 2025-05-29T15:24:27.273363Z node 14 :CMS_CONFIGS NOTICE: log_settings_configurator.cpp:192: TLogSettingsConfigurator: Priority for the component SSA_GRAPH_EXECUTION has been changed from NOTICE to ALERT 2025-05-29T15:24:27.273365Z node 14 :CMS_CONFIGS NOTICE: log_settings_configurator.cpp:203: TLogSettingsConfigurator: Sampling priority for the component SSA_GRAPH_EXECUTION has been changed from DEBUG to ALERT 2025-05-29T15:24:27.273367Z node 14 :CMS_CONFIGS NOTICE: log_settings_configurator.cpp:209: TLogSettingsConfigurator: Sampling rate for the component SSA_GRAPH_EXECUTION has been changed from 0 to 10 2025-05-29T15:24:27.273369Z node 14 :CMS_CONFIGS NOTICE: log_settings_configurator.cpp:192: TLogSettingsConfigurator: Priority for the component KAFKA_PROXY has been changed from NOTICE to ALERT 2025-05-29T15:24:27.273371Z node 14 :CMS_CONFIGS NOTICE: log_settings_configurator.cpp:203: TLogSettingsConfigurator: Sampling priority for the component KAFKA_PROXY has been changed from DEBUG to ALERT 2025-05-29T15:24:27.273373Z node 14 :CMS_CONFIGS NOTICE: log_settings_configurator.cpp:209: TLogSettingsConfigurator: Sampling rate for the component KAFKA_PROXY has been changed from 0 to 10 2025-05-29T15:24:27.273376Z node 14 :CMS_CONFIGS NOTICE: log_settings_configurator.cpp:192: TLogSettingsConfigurator: Priority for the component OBJECTS_MONITORING has been changed from NOTICE to ALERT 2025-05-29T15:24:27.273378Z node 14 :CMS_CONFIGS NOTICE: log_settings_configurator.cpp:203: TLogSettingsConfigurator: Sampling priority for the component OBJECTS_MONITORING has been changed from DEBUG to ALERT 2025-05-29T15:24:27.273380Z node 14 :CMS_CONFIGS NOTICE: log_settings_configurator.cpp:209: TLogSettingsConfigurator: Sampling rate for the component OBJECTS_MONITORING has been changed from 0 to 10 2025-05-29T15:24:27.273382Z node 14 :CMS_CONFIGS NOTICE: log_settings_configurator.cpp:192: TLogSettingsConfigurator: Priority for the component STATISTICS has been changed from NOTICE to ALERT 2025-05-29T15:24:27.273384Z node 14 :CMS_CONFIGS NOTICE: log_settings_configurator.cpp:203: TLogSettingsConfigurator: Sampling priority for the component STATISTICS has been changed from DEBUG to ALERT 2025-05-29T15:24:27.273386Z node 14 :CMS_CONFIGS NOTICE: log_settings_configurator.cpp:209: TLogSettingsConfigurator: Sampling rate for the component STATISTICS has been changed from 0 to 10 2025-05-29T15:24:27.273389Z node 14 :CMS_CONFIGS NOTICE: log_settings_configurator.cpp:192: TLogSettingsConfigurator: Priority for the component BS_REQUEST_COST has been changed from NOTICE to ALERT 2025-05-29T15:24:27.273391Z node 14 :CMS_CONFIGS NOTICE: log_settings_configurator.cpp:203: TLogSettingsConfigurator: Sampling priority for the component BS_REQUEST_COST has been changed from DEBUG to ALERT 2025-05-29T15:24:27.273394Z node 14 :CMS_CONFIGS NOTICE: log_settings_configurator.cpp:209: TLogSettingsConfigurator: Sampling rate for the component BS_REQUEST_COST has been changed from 0 to 10 2025-05-29T15:24:27.273397Z node 14 :CMS_CONFIGS NOTICE: log_settings_configurator.cpp:192: TLogSettingsConfigurator: Priority for the component BS_VDISK_BALANCING has been changed from NOTICE to ALERT 2025-05-29T15:24:27.273399Z node 14 :CMS_CONFIGS NOTICE: log_settings_configurator.cpp:203: TLogSettingsConfigurator: Sampling priority for the component BS_VDISK_BALANCING has been changed from DEBUG to ALERT 2025-05-29T15:24:27.273401Z node 14 :CMS_CONFIGS NOTICE: log_settings_configurator.cpp:209: TLogSettingsConfigurator: Sampling rate for the component BS_VDISK_BALANCING has been changed from 0 to 10 2025-05-29T15:24:27.273403Z node 14 :CMS_CONFIGS NOTICE: log_settings_configurator.cpp:192: TLogSettingsConfigurator: Priority for the component BS_PROXY_GETBLOCK has been changed from NOTICE to ALERT 2025-05-29T15:24:27.273405Z node 14 :CMS_CONFIGS NOTICE: log_settings_configurator.cpp:203: TLogSettingsConfigurator: Sampling priority for the component BS_PROXY_GETBLOCK has been changed from DEBUG to ALERT 2025-05-29T15:24:27.273407Z node 14 :CMS_CONFIGS NOTICE: log_settings_configurator.cpp:209: TLogSettingsConfigurator: Sampling rate for the component BS_PROXY_GETBLOCK has been changed from 0 to 10 2025-05-29T15:24:27.273409Z node 14 :CMS_CONFIGS NOTICE: log_settings_configurator.cpp:192: TLogSettingsConfigurator: Priority for the component BS_SHRED has been changed from NOTICE to ALERT 2025-05-29T15:24:27.273411Z node 14 :CMS_CONFIGS NOTICE: log_settings_configurator.cpp:203: TLogSettingsConfigurator: Sampling priority for the component BS_SHRED has been changed from DEBUG to ALERT 2025-05-29T15:24:27.273413Z node 14 :CMS_CONFIGS NOTICE: log_settings_configurator.cpp:209: TLogSettingsConfigurator: Sampling rate for the component BS_SHRED has been changed from 0 to 10 2025-05-29T15:24:27.273416Z node 14 :CMS_CONFIGS NOTICE: log_settings_configurator.cpp:192: TLogSettingsConfigurator: Priority for the component BS_PROXY_CHECKINTEGRITY has been changed from NOTICE to ALERT 2025-05-29T15:24:27.273418Z node 14 :CMS_CONFIGS NOTICE: log_settings_configurator.cpp:203: TLogSettingsConfigurator: Sampling priority for the component BS_PROXY_CHECKINTEGRITY has been changed from DEBUG to ALERT 2025-05-29T15:24:27.273420Z node 14 :CMS_CONFIGS NOTICE: log_settings_configurator.cpp:209: TLogSettingsConfigurator: Sampling rate for the component BS_PROXY_CHECKINTEGRITY has been changed from 0 to 10 2025-05-29T15:24:27.273422Z node 14 :CMS_CONFIGS NOTICE: log_settings_configurator.cpp:192: TLogSettingsConfigurator: Priority for the component LDAP_AUTH_PROVIDER has been changed from NOTICE to ALERT 2025-05-29T15:24:27.273424Z node 14 :CMS_CONFIGS NOTICE: log_settings_configurator.cpp:203: TLogSettingsConfigurator: Sampling priority for the component LDAP_AUTH_PROVIDER has been changed from DEBUG to ALERT 2025-05-29T15:24:27.273426Z node 14 :CMS_CONFIGS NOTICE: log_settings_configurator.cpp:209: TLogSettingsConfigurator: Sampling rate for the component LDAP_AUTH_PROVIDER has been changed from 0 to 10 2025-05-29T15:24:27.273428Z node 14 :CMS_CONFIGS NOTICE: log_settings_configurator.cpp:192: TLogSettingsConfigurator: Priority for the component GROUPED_MEMORY_LIMITER has been changed from NOTICE to ALERT 2025-05-29T15:24:27.273431Z node 14 :CMS_CONFIGS NOTICE: log_settings_configurator.cpp:203: TLogSettingsConfigurator: Sampling priority for the component GROUPED_MEMORY_LIMITER has been changed from DEBUG to ALERT 2025-05-29T15:24:27.273433Z node 14 :CMS_CONFIGS NOTICE: log_settings_configurator.cpp:209: TLogSettingsConfigurator: Sampling rate for the component GROUPED_MEMORY_LIMITER has been changed from 0 to 10 2025-05-29T15:24:27.273435Z node 14 :CMS_CONFIGS NOTICE: log_settings_configurator.cpp:192: TLogSettingsConfigurator: Priority for the component DATA_INTEGRITY has been changed from NOTICE to ALERT 2025-05-29T15:24:27.273439Z node 14 :CMS_CONFIGS NOTICE: log_settings_configurator.cpp:203: TLogSettingsConfigurator: Sampling priority for the component DATA_INTEGRITY has been changed from DEBUG to ALERT 2025-05-29T15:24:27.273441Z node 14 :CMS_CONFIGS NOTICE: log_settings_configurator.cpp:209: TLogSettingsConfigurator: Sampling rate for the component DATA_INTEGRITY has been changed from 0 to 10 2025-05-29T15:24:27.273444Z node 14 :CMS_CONFIGS NOTICE: log_settings_configurator.cpp:192: TLogSettingsConfigurator: Priority for the component TX_PRIORITIES_QUEUE has been changed from NOTICE to ALERT 2025-05-29T15:24:27.273446Z node 14 :CMS_CONFIGS NOTICE: log_settings_configurator.cpp:203: TLogSettingsConfigurator: Sampling priority for the component TX_PRIORITIES_QUEUE has been changed from DEBUG to ALERT 2025-05-29T15:24:27.273448Z node 14 :CMS_CONFIGS NOTICE: log_settings_configurator.cpp:209: TLogSettingsConfigurator: Sampling rate for the component TX_PRIORITIES_QUEUE has been changed from 0 to 10 2025-05-29T15:24:27.273450Z node 14 :CMS_CONFIGS NOTICE: log_settings_configurator.cpp:192: TLogSettingsConfigurator: Priority for the component BSCONFIG has been changed from NOTICE to ALERT 2025-05-29T15:24:27.273452Z node 14 :CMS_CONFIGS NOTICE: log_settings_configurator.cpp:203: TLogSettingsConfigurator: Sampling priority for the component BSCONFIG has been changed from DEBUG to ALERT 2025-05-29T15:24:27.273454Z node 14 :CMS_CONFIGS NOTICE: log_settings_configurator.cpp:209: TLogSettingsConfigurator: Sampling rate for the component BSCONFIG has been changed from 0 to 10 2025-05-29T15:24:27.273457Z node 14 :CMS_CONFIGS NOTICE: log_settings_configurator.cpp:192: TLogSettingsConfigurator: Priority for the component NAMESERVICE has been changed from NOTICE to ALERT 2025-05-29T15:24:27.273459Z node 14 :CMS_CONFIGS NOTICE: log_settings_configurator.cpp:203: TLogSettingsConfigurator: Sampling priority for the component NAMESERVICE has been changed from DEBUG to ALERT 2025-05-29T15:24:27.273461Z node 14 :CMS_CONFIGS NOTICE: log_settings_configurator.cpp:209: TLogSettingsConfigurator: Sampling rate for the component NAMESERVICE has been changed from 0 to 10 2025-05-29T15:24:27.273486Z node 14 :CMS_CONFIGS TRACE: log_settings_configurator.cpp:100: TLogSettingsConfigurator: Send TEvConfigNotificationResponse: SubscriptionId: 0 ConfigId { } ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_external_data_source/unittest >> TExternalDataSourceTest::ReadOnlyMode [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:117:2058] recipient: [1:112:2142] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:117:2058] recipient: [1:112:2142] Leader for TabletID 72057594046678944 is [1:129:2153] sender: [1:133:2058] recipient: [1:112:2142] 2025-05-29T15:24:27.047384Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7488: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-05-29T15:24:27.047412Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7516: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:24:27.047417Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7402: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-05-29T15:24:27.047423Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7418: OperationsProcessing config: using default configuration 2025-05-29T15:24:27.047430Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-05-29T15:24:27.047435Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-05-29T15:24:27.047445Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7548: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:24:27.047460Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:39: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-05-29T15:24:27.047584Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7619: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-05-29T15:24:27.047665Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-05-29T15:24:27.063805Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7651: Got new config: QueryServiceConfig { AllExternalDataSourcesAreAvailable: true } 2025-05-29T15:24:27.063829Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:24:27.063925Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7619: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-05-29T15:24:27.066978Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-05-29T15:24:27.067119Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-05-29T15:24:27.067168Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-05-29T15:24:27.070025Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-05-29T15:24:27.070104Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:445: Clear TempDirsState with owners number: 0 2025-05-29T15:24:27.070232Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1348: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-05-29T15:24:27.070311Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:32: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-05-29T15:24:27.071072Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:157: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:24:27.071129Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:84: [RootDataErasureManager] Stop 2025-05-29T15:24:27.071427Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:24:27.071441Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:24:27.071461Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-05-29T15:24:27.071469Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-29T15:24:27.071475Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:202: TTxServerlessStorageBilling.Complete 2025-05-29T15:24:27.071513Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6671: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-05-29T15:24:27.074967Z node 1 :HIVE INFO: tablet_helpers.cpp:1130: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:129:2153] sender: [1:243:2058] recipient: [1:15:2062] 2025-05-29T15:24:27.095293Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:372: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-05-29T15:24:27.095353Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:24:27.095404Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-05-29T15:24:27.095446Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:130: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-05-29T15:24:27.095457Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:24:27.096124Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:451: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-05-29T15:24:27.096144Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-05-29T15:24:27.096189Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:24:27.096198Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:313: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-05-29T15:24:27.096203Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:367: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-05-29T15:24:27.096209Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 2 -> 3 2025-05-29T15:24:27.096619Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:24:27.096629Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-05-29T15:24:27.096635Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 3 -> 128 2025-05-29T15:24:27.097445Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:24:27.097456Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:24:27.097461Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:24:27.097468Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1650: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-05-29T15:24:27.098180Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1719: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-05-29T15:24:27.098566Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:652: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-05-29T15:24:27.098599Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1751: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-05-29T15:24:27.098774Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:676: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-05-29T15:24:27.098800Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:680: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 125 RawX2: 4294969446 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-05-29T15:24:27.098814Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:24:27.098876Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 128 -> 240 2025-05-29T15:24:27.098884Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:24:27.098908Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-05-29T15:24:27.098920Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:402: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-05-29T15:24:27.099247Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:24:27.099253Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594 ... 28:0 ProgressState, at schemeshard: 72057594046678944 2025-05-29T15:24:27.230072Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1650: TOperation IsReadyToPropose , TxId: 128 ready parts: 1/1 2025-05-29T15:24:27.230099Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1719: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 128 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-05-29T15:24:27.230272Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:5834: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 4 LocalPathId: 1 Version: 8 PathOwnerId: 72057594046678944, cookie: 128 2025-05-29T15:24:27.230285Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 4 LocalPathId: 1 Version: 8 PathOwnerId: 72057594046678944, cookie: 128 2025-05-29T15:24:27.230290Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72057594046678944, txId: 128 2025-05-29T15:24:27.230295Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 128, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 8 2025-05-29T15:24:27.230301Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 4 2025-05-29T15:24:27.230398Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:5834: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 4 LocalPathId: 4 Version: 2 PathOwnerId: 72057594046678944, cookie: 128 2025-05-29T15:24:27.230408Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 4 LocalPathId: 4 Version: 2 PathOwnerId: 72057594046678944, cookie: 128 2025-05-29T15:24:27.230412Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72057594046678944, txId: 128 2025-05-29T15:24:27.230417Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 128, pathId: [OwnerId: 72057594046678944, LocalPathId: 4], version: 2 2025-05-29T15:24:27.230421Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 2 2025-05-29T15:24:27.230430Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1606: TOperation IsReadyToNotify, TxId: 128, ready parts: 0/1, is published: true 2025-05-29T15:24:27.231084Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:652: Send tablet strongly msg operationId: 128:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:128 msg type: 269090816 2025-05-29T15:24:27.231125Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1751: TOperation RegisterRelationByTabletId, TxId: 128, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 128 at step: 5000004 FAKE_COORDINATOR: advance: minStep5000004 State->FrontStep: 5000003 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 128 at step: 5000004 2025-05-29T15:24:27.231315Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 128 2025-05-29T15:24:27.231577Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:676: TTxOperationPlanStep Execute, stepId: 5000004, transactions count in step: 1, at schemeshard: 72057594046678944 2025-05-29T15:24:27.231613Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:680: TTxOperationPlanStep Execute, message: Transactions { TxId: 128 Coordinator: 72057594046316545 AckTo { RawX1: 125 RawX2: 4294969446 } } Step: 5000004 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-05-29T15:24:27.231624Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_mkdir.cpp:33: MkDir::TPropose operationId# 128:0 HandleReply TEvPrivate::TEvOperationPlan, step: 5000004, at schemeshard: 72057594046678944 2025-05-29T15:24:27.231652Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 128:0 128 -> 240 2025-05-29T15:24:27.231684Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2025-05-29T15:24:27.231693Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 1 2025-05-29T15:24:27.231792Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 128 2025-05-29T15:24:27.232156Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:24:27.232166Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 128, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-29T15:24:27.232199Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 128, path id: [OwnerId: 72057594046678944, LocalPathId: 4] 2025-05-29T15:24:27.232212Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:24:27.232219Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:460:2416], at schemeshard: 72057594046678944, txId: 128, path id: 1 2025-05-29T15:24:27.232224Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:460:2416], at schemeshard: 72057594046678944, txId: 128, path id: 4 FAKE_COORDINATOR: Erasing txId 128 2025-05-29T15:24:27.232307Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 128:0, at schemeshard: 72057594046678944 2025-05-29T15:24:27.232314Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:482: [72057594046678944] TDone opId# 128:0 ProgressState 2025-05-29T15:24:27.232328Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:906: Part operation is done id#128:0 progress is 1/1 2025-05-29T15:24:27.232333Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 128 ready parts: 1/1 2025-05-29T15:24:27.232338Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:906: Part operation is done id#128:0 progress is 1/1 2025-05-29T15:24:27.232342Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 128 ready parts: 1/1 2025-05-29T15:24:27.232347Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1606: TOperation IsReadyToNotify, TxId: 128, ready parts: 1/1, is published: false 2025-05-29T15:24:27.232358Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 128 ready parts: 1/1 2025-05-29T15:24:27.232364Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:973: Operation and all the parts is done, operation id: 128:0 2025-05-29T15:24:27.232368Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5174: RemoveTx for txid 128:0 2025-05-29T15:24:27.232382Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 2 2025-05-29T15:24:27.232389Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:982: Publication still in progress, tx: 128, publications: 2, subscribers: 0 2025-05-29T15:24:27.232393Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:989: Publication details: tx: 128, [OwnerId: 72057594046678944, LocalPathId: 1], 9 2025-05-29T15:24:27.232397Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:989: Publication details: tx: 128, [OwnerId: 72057594046678944, LocalPathId: 4], 3 2025-05-29T15:24:27.232476Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:5834: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 4 LocalPathId: 1 Version: 9 PathOwnerId: 72057594046678944, cookie: 128 2025-05-29T15:24:27.232487Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 4 LocalPathId: 1 Version: 9 PathOwnerId: 72057594046678944, cookie: 128 2025-05-29T15:24:27.232491Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 128 2025-05-29T15:24:27.232497Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 128, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 9 2025-05-29T15:24:27.232501Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 4 2025-05-29T15:24:27.232584Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:5834: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 4 LocalPathId: 4 Version: 3 PathOwnerId: 72057594046678944, cookie: 128 2025-05-29T15:24:27.232594Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 4 LocalPathId: 4 Version: 3 PathOwnerId: 72057594046678944, cookie: 128 2025-05-29T15:24:27.232599Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 128 2025-05-29T15:24:27.232603Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 128, pathId: [OwnerId: 72057594046678944, LocalPathId: 4], version: 3 2025-05-29T15:24:27.232607Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 1 2025-05-29T15:24:27.232615Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 128, subscribers: 0 2025-05-29T15:24:27.233551Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 128 2025-05-29T15:24:27.233580Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 128 TestModificationResult got TxId: 128, wait until txId: 128 |65.3%| [TA] $(B)/ydb/core/tx/schemeshard/ut_user_attributes/test-results/unittest/{meta.json ... results_accumulator.log} >> TExternalDataSourceTest::ReplaceExternalDataSourceIfNotExists >> TExternalDataSourceTest::CreateExternalDataSource [GOOD] >> TExternalDataSourceTest::CreateExternalDataSourceShouldFailIfSuchEntityAlreadyExists ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_external_data_source/unittest >> TExternalDataSourceTest::ReplaceExternalDataStoreShouldFailIfEntityOfAnotherTypeWithSameNameExists [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:116:2058] recipient: [1:111:2141] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:116:2058] recipient: [1:111:2141] Leader for TabletID 72057594046678944 is [1:128:2152] sender: [1:132:2058] recipient: [1:111:2141] 2025-05-29T15:24:27.274333Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7488: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-05-29T15:24:27.274365Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7516: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:24:27.274372Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7402: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-05-29T15:24:27.274378Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7418: OperationsProcessing config: using default configuration 2025-05-29T15:24:27.274385Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-05-29T15:24:27.274389Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-05-29T15:24:27.274400Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7548: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:24:27.274414Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:39: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-05-29T15:24:27.274531Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7619: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-05-29T15:24:27.274611Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-05-29T15:24:27.288485Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7309: Cannot subscribe to console configs 2025-05-29T15:24:27.288511Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:24:27.291639Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-05-29T15:24:27.291778Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-05-29T15:24:27.291819Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-05-29T15:24:27.293895Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-05-29T15:24:27.294136Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:445: Clear TempDirsState with owners number: 0 2025-05-29T15:24:27.294261Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1348: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-05-29T15:24:27.294330Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:32: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-05-29T15:24:27.294898Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:157: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:24:27.294943Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:84: [RootDataErasureManager] Stop 2025-05-29T15:24:27.295226Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:24:27.295240Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:24:27.295261Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-05-29T15:24:27.295269Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-29T15:24:27.295275Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:202: TTxServerlessStorageBilling.Complete 2025-05-29T15:24:27.295312Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6671: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-05-29T15:24:27.296786Z node 1 :HIVE INFO: tablet_helpers.cpp:1130: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:128:2152] sender: [1:242:2058] recipient: [1:15:2062] 2025-05-29T15:24:27.319657Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:372: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-05-29T15:24:27.319739Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:24:27.319811Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-05-29T15:24:27.319859Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:130: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-05-29T15:24:27.319870Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:24:27.320803Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:451: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-05-29T15:24:27.320837Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-05-29T15:24:27.320904Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:24:27.320915Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:313: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-05-29T15:24:27.320921Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:367: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-05-29T15:24:27.320926Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 2 -> 3 2025-05-29T15:24:27.321432Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:24:27.321448Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-05-29T15:24:27.321455Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 3 -> 128 2025-05-29T15:24:27.323117Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:24:27.323135Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:24:27.323142Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:24:27.323149Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1650: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-05-29T15:24:27.323925Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1719: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-05-29T15:24:27.324479Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:652: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-05-29T15:24:27.324525Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1751: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-05-29T15:24:27.324685Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:676: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-05-29T15:24:27.324709Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:680: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 124 RawX2: 4294969445 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-05-29T15:24:27.324723Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:24:27.324783Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 128 -> 240 2025-05-29T15:24:27.324790Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:24:27.324817Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-05-29T15:24:27.324825Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:402: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-05-29T15:24:27.325273Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:24:27.325283Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-29T15:24:27.325344Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29 ... 72057594046678944, txId: 101, path id: 2 2025-05-29T15:24:27.330623Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 101:0, at schemeshard: 72057594046678944 2025-05-29T15:24:27.330629Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:482: [72057594046678944] TDone opId# 101:0 ProgressState 2025-05-29T15:24:27.330638Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:906: Part operation is done id#101:0 progress is 1/1 2025-05-29T15:24:27.330641Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2025-05-29T15:24:27.330645Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:906: Part operation is done id#101:0 progress is 1/1 2025-05-29T15:24:27.330647Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2025-05-29T15:24:27.330650Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1606: TOperation IsReadyToNotify, TxId: 101, ready parts: 1/1, is published: false 2025-05-29T15:24:27.330653Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2025-05-29T15:24:27.330656Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:973: Operation and all the parts is done, operation id: 101:0 2025-05-29T15:24:27.330658Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5174: RemoveTx for txid 101:0 2025-05-29T15:24:27.330667Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2025-05-29T15:24:27.330670Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:982: Publication still in progress, tx: 101, publications: 2, subscribers: 0 2025-05-29T15:24:27.330673Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:989: Publication details: tx: 101, [OwnerId: 72057594046678944, LocalPathId: 1], 4 2025-05-29T15:24:27.330675Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:989: Publication details: tx: 101, [OwnerId: 72057594046678944, LocalPathId: 2], 2 2025-05-29T15:24:27.330785Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:5834: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 4 PathOwnerId: 72057594046678944, cookie: 101 2025-05-29T15:24:27.330798Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 4 PathOwnerId: 72057594046678944, cookie: 101 2025-05-29T15:24:27.330804Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 101 2025-05-29T15:24:27.330808Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 4 2025-05-29T15:24:27.330812Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-05-29T15:24:27.330898Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:5834: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 2 PathOwnerId: 72057594046678944, cookie: 101 2025-05-29T15:24:27.330906Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 2 PathOwnerId: 72057594046678944, cookie: 101 2025-05-29T15:24:27.330909Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 101 2025-05-29T15:24:27.330911Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 2 2025-05-29T15:24:27.330913Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 2025-05-29T15:24:27.330919Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 101, subscribers: 0 2025-05-29T15:24:27.331493Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2025-05-29T15:24:27.331570Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 TestModificationResult got TxId: 101, wait until txId: 101 TestWaitNotification wait txId: 101 2025-05-29T15:24:27.331629Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:210: tests -- TTxNotificationSubscriber for txId 101: send EvNotifyTxCompletion 2025-05-29T15:24:27.331637Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:256: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 101 2025-05-29T15:24:27.331692Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 101, at schemeshard: 72057594046678944 2025-05-29T15:24:27.331710Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:227: tests -- TTxNotificationSubscriber for txId 101: got EvNotifyTxCompletionResult 2025-05-29T15:24:27.331715Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:236: tests -- TTxNotificationSubscriber for txId 101: satisfy waiter [1:302:2292] TestWaitNotification: OK eventTxId 101 2025-05-29T15:24:27.331780Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/UniqueName" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-05-29T15:24:27.331800Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/UniqueName" took 27us result status StatusSuccess 2025-05-29T15:24:27.331871Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/UniqueName" PathDescription { Self { Name: "UniqueName" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeView CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 ViewVersion: 1 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } ViewDescription { Name: "UniqueName" PathId { OwnerId: 72057594046678944 LocalId: 2 } Version: 1 QueryText: "Some query" CapturedContext { } } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 TestModificationResults wait txId: 102 2025-05-29T15:24:27.332504Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:372: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpCreateExternalDataSource CreateExternalDataSource { Name: "UniqueName" SourceType: "ObjectStorage" Location: "https://s3.cloud.net/my_bucket" Auth { None { } } ReplaceIfExists: true } } TxId: 102 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-05-29T15:24:27.332534Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_create_external_data_source.cpp:336: [72057594046678944] CreateNewExternalDataSource, opId 102:0, feature flag EnableReplaceIfExistsForExternalEntities 1, tx WorkingDir: "/MyRoot" OperationType: ESchemeOpCreateExternalDataSource FailOnExist: false CreateExternalDataSource { Name: "UniqueName" SourceType: "ObjectStorage" Location: "https://s3.cloud.net/my_bucket" Auth { None { } } ReplaceIfExists: true } 2025-05-29T15:24:27.332543Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_external_data_source.cpp:212: [72057594046678944] TAlterExternalDataSource Propose: opId# 102:0, path# /MyRoot/UniqueName 2025-05-29T15:24:27.332560Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:130: IgniteOperation, opId: 102:1, propose status:StatusNameConflict, reason: Check failed: path: '/MyRoot/UniqueName', error: unexpected path type (id: [OwnerId: 72057594046678944, LocalPathId: 2], type: EPathTypeView, state: EPathStateNoChanges), expected types: EPathTypeExternalDataSource, source_location: ydb/core/tx/schemeshard/schemeshard__operation_alter_external_data_source.cpp:96, at schemeshard: 72057594046678944 2025-05-29T15:24:27.332899Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:451: TTxOperationPropose Complete, txId: 102, response: Status: StatusNameConflict Reason: "Check failed: path: \'/MyRoot/UniqueName\', error: unexpected path type (id: [OwnerId: 72057594046678944, LocalPathId: 2], type: EPathTypeView, state: EPathStateNoChanges), expected types: EPathTypeExternalDataSource, source_location: ydb/core/tx/schemeshard/schemeshard__operation_alter_external_data_source.cpp:96" TxId: 102 SchemeshardId: 72057594046678944 PathId: 2 PathCreateTxId: 101, at schemeshard: 72057594046678944 2025-05-29T15:24:27.332920Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 102, database: /MyRoot, subject: , status: StatusNameConflict, reason: Check failed: path: '/MyRoot/UniqueName', error: unexpected path type (id: [OwnerId: 72057594046678944, LocalPathId: 2], type: EPathTypeView, state: EPathStateNoChanges), expected types: EPathTypeExternalDataSource, source_location: ydb/core/tx/schemeshard/schemeshard__operation_alter_external_data_source.cpp:96, operation: CREATE EXTERNAL DATA SOURCE, path: /MyRoot/UniqueName TestModificationResult got TxId: 102, wait until txId: 102 TestWaitNotification wait txId: 102 2025-05-29T15:24:27.332958Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:210: tests -- TTxNotificationSubscriber for txId 102: send EvNotifyTxCompletion 2025-05-29T15:24:27.332962Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:256: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 102 2025-05-29T15:24:27.332997Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 102, at schemeshard: 72057594046678944 2025-05-29T15:24:27.333009Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:227: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2025-05-29T15:24:27.333012Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:236: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [1:310:2300] TestWaitNotification: OK eventTxId 102 >> KqpPg::TableInsert-useSink [FAIL] >> KqpPg::TempTablesSessionsIsolation >> TExternalDataSourceTest::DropTableTwice >> TExternalDataSourceTest::ParallelCreateSameExternalDataSource >> TExternalDataSourceTest::DropExternalDataSource [GOOD] >> TConsoleInMemoryConfigSubscriptionTests::TestSubscribeAfterConfigApplyWithDb [GOOD] >> TExternalDataSourceTest::ReplaceExternalDataSourceIfNotExists [GOOD] >> TConsoleTests::TestCreateServerlessTenant [GOOD] >> TConsoleTests::TestCreateServerlessTenantWrongSharedDb >> TExternalDataSourceTest::CreateExternalDataSourceShouldFailIfSuchEntityAlreadyExists [GOOD] >> TExternalTableTest::ReplaceExternalTableIfNotExists |65.3%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/kqp/ut/batch_operations/ydb-core-kqp-ut-batch_operations |65.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/ut/batch_operations/ydb-core-kqp-ut-batch_operations |65.3%| [TA] {RESULT} $(B)/ydb/core/backup/impl/ut_local_partition_reader/test-results/unittest/{meta.json ... results_accumulator.log} |65.3%| [TA] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_user_attributes/test-results/unittest/{meta.json ... results_accumulator.log} |65.3%| [LD] {RESULT} $(B)/ydb/core/kqp/ut/batch_operations/ydb-core-kqp-ut-batch_operations >> KqpErrors::ProposeError >> TExternalDataSourceTest::DropTableTwice [GOOD] >> TExternalDataSourceTest::ParallelCreateExternalDataSource >> TExternalDataSourceTest::ParallelCreateSameExternalDataSource [GOOD] >> TExternalDataSourceTest::PreventDeletionOfDependentDataSources >> KqpErrors::ProposeResultLost_RwTx+UseSink >> TCmsTest::TestProcessingQueue [GOOD] >> KqpErrors::ResolveTableError >> TExternalTableTest::SchemeErrors >> TConsoleConfigSubscriptionTests::TestNotificationForModifiedConfigItemScope [GOOD] >> TConsoleConfigSubscriptionTests::TestNotificationForRemovedConfigItem ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_external_data_source/unittest >> TExternalDataSourceTest::DropExternalDataSource [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:117:2058] recipient: [1:112:2142] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:117:2058] recipient: [1:112:2142] Leader for TabletID 72057594046678944 is [1:129:2153] sender: [1:133:2058] recipient: [1:112:2142] 2025-05-29T15:24:27.598101Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7488: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-05-29T15:24:27.598132Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7516: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:24:27.598138Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7402: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-05-29T15:24:27.598144Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7418: OperationsProcessing config: using default configuration 2025-05-29T15:24:27.598150Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-05-29T15:24:27.598155Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-05-29T15:24:27.598165Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7548: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:24:27.598179Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:39: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-05-29T15:24:27.598292Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7619: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-05-29T15:24:27.598359Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-05-29T15:24:27.612850Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7651: Got new config: QueryServiceConfig { AllExternalDataSourcesAreAvailable: true } 2025-05-29T15:24:27.612877Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:24:27.612974Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7619: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-05-29T15:24:27.622317Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-05-29T15:24:27.622474Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-05-29T15:24:27.622513Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-05-29T15:24:27.624392Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-05-29T15:24:27.624464Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:445: Clear TempDirsState with owners number: 0 2025-05-29T15:24:27.624589Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1348: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-05-29T15:24:27.624648Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:32: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-05-29T15:24:27.625123Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:157: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:24:27.625162Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:84: [RootDataErasureManager] Stop 2025-05-29T15:24:27.625435Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:24:27.625445Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:24:27.625465Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-05-29T15:24:27.625472Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-29T15:24:27.625479Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:202: TTxServerlessStorageBilling.Complete 2025-05-29T15:24:27.625529Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6671: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-05-29T15:24:27.626831Z node 1 :HIVE INFO: tablet_helpers.cpp:1130: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:129:2153] sender: [1:243:2058] recipient: [1:15:2062] 2025-05-29T15:24:27.647902Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:372: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-05-29T15:24:27.647980Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:24:27.648049Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-05-29T15:24:27.648091Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:130: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-05-29T15:24:27.648102Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:24:27.648982Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:451: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-05-29T15:24:27.649010Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-05-29T15:24:27.649072Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:24:27.649082Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:313: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-05-29T15:24:27.649088Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:367: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-05-29T15:24:27.649093Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 2 -> 3 2025-05-29T15:24:27.649550Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:24:27.649561Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-05-29T15:24:27.649566Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 3 -> 128 2025-05-29T15:24:27.649920Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:24:27.649931Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:24:27.649937Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:24:27.649944Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1650: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-05-29T15:24:27.650584Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1719: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-05-29T15:24:27.650949Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:652: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-05-29T15:24:27.650991Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1751: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-05-29T15:24:27.651170Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:676: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-05-29T15:24:27.651192Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:680: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 125 RawX2: 4294969446 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-05-29T15:24:27.651211Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:24:27.651273Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 128 -> 240 2025-05-29T15:24:27.651280Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:24:27.651311Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-05-29T15:24:27.651321Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:402: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-05-29T15:24:27.651695Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:24:27.651704Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594 ... 046316545 FAKE_COORDINATOR: Add transaction: 102 at step: 5000003 FAKE_COORDINATOR: advance: minStep5000003 State->FrontStep: 5000002 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 102 at step: 5000003 2025-05-29T15:24:27.987701Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2025-05-29T15:24:27.987830Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:676: TTxOperationPlanStep Execute, stepId: 5000003, transactions count in step: 1, at schemeshard: 72057594046678944 2025-05-29T15:24:27.987849Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:680: TTxOperationPlanStep Execute, message: Transactions { TxId: 102 Coordinator: 72057594046316545 AckTo { RawX1: 129 RawX2: 8589936745 } } Step: 5000003 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-05-29T15:24:27.987856Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_drop_external_data_source.cpp:40: [72057594046678944] TDropExternalDataSource TPropose opId# 102:0 HandleReply TEvOperationPlan: step# 5000003 2025-05-29T15:24:27.987877Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2025-05-29T15:24:27.987892Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 102:0 128 -> 240 2025-05-29T15:24:27.987914Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-05-29T15:24:27.987921Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 2025-05-29T15:24:27.988027Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 FAKE_COORDINATOR: Erasing txId 102 2025-05-29T15:24:27.988318Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:24:27.988325Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 102, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-29T15:24:27.988349Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 102, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2025-05-29T15:24:27.988371Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:24:27.988376Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [2:210:2211], at schemeshard: 72057594046678944, txId: 102, path id: 1 2025-05-29T15:24:27.988381Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [2:210:2211], at schemeshard: 72057594046678944, txId: 102, path id: 2 2025-05-29T15:24:27.988428Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2025-05-29T15:24:27.988435Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:482: [72057594046678944] TDone opId# 102:0 ProgressState 2025-05-29T15:24:27.988448Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:906: Part operation is done id#102:0 progress is 1/1 2025-05-29T15:24:27.988452Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-05-29T15:24:27.988457Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:906: Part operation is done id#102:0 progress is 1/1 2025-05-29T15:24:27.988460Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-05-29T15:24:27.988464Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1606: TOperation IsReadyToNotify, TxId: 102, ready parts: 1/1, is published: false 2025-05-29T15:24:27.988469Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-05-29T15:24:27.988474Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:973: Operation and all the parts is done, operation id: 102:0 2025-05-29T15:24:27.988478Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5174: RemoveTx for txid 102:0 2025-05-29T15:24:27.988489Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2025-05-29T15:24:27.988494Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:982: Publication still in progress, tx: 102, publications: 2, subscribers: 0 2025-05-29T15:24:27.988498Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:989: Publication details: tx: 102, [OwnerId: 72057594046678944, LocalPathId: 1], 7 2025-05-29T15:24:27.988501Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:989: Publication details: tx: 102, [OwnerId: 72057594046678944, LocalPathId: 2], 18446744073709551615 2025-05-29T15:24:27.988561Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:5834: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 102 2025-05-29T15:24:27.988570Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 102 2025-05-29T15:24:27.988574Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 102 2025-05-29T15:24:27.988577Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 18446744073709551615 2025-05-29T15:24:27.988581Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 2025-05-29T15:24:27.988618Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:123: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-05-29T15:24:27.988623Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:137: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2025-05-29T15:24:27.988633Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-05-29T15:24:27.988658Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:5834: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 7 PathOwnerId: 72057594046678944, cookie: 102 2025-05-29T15:24:27.988665Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 7 PathOwnerId: 72057594046678944, cookie: 102 2025-05-29T15:24:27.988669Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 102 2025-05-29T15:24:27.988673Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 7 2025-05-29T15:24:27.988677Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-05-29T15:24:27.988684Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 102, subscribers: 0 2025-05-29T15:24:27.989335Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2025-05-29T15:24:27.989363Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:160: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 2025-05-29T15:24:27.989375Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 TestModificationResult got TxId: 102, wait until txId: 102 TestWaitNotification wait txId: 102 2025-05-29T15:24:27.989432Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:210: tests -- TTxNotificationSubscriber for txId 102: send EvNotifyTxCompletion 2025-05-29T15:24:27.989441Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:256: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 102 2025-05-29T15:24:27.989523Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 102, at schemeshard: 72057594046678944 2025-05-29T15:24:27.989543Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:227: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2025-05-29T15:24:27.989548Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:236: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [2:335:2325] TestWaitNotification: OK eventTxId 102 2025-05-29T15:24:27.989622Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/MyExternalDataSource" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-05-29T15:24:27.989648Z node 2 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/MyExternalDataSource" took 36us result status StatusPathDoesNotExist 2025-05-29T15:24:27.989690Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/MyExternalDataSource\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1]), source_location: ydb/core/tx/schemeshard/schemeshard_path_describer.cpp:1157" Path: "/MyRoot/MyExternalDataSource" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: false } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/cms/console/ut/unittest >> TConsoleInMemoryConfigSubscriptionTests::TestSubscribeAfterConfigApplyWithDb [GOOD] Test command err: 2025-05-29T15:24:13.245150Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7488: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-05-29T15:24:13.245176Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7516: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:24:13.245183Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7402: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-05-29T15:24:13.245188Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7418: OperationsProcessing config: using default configuration 2025-05-29T15:24:13.245202Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-05-29T15:24:13.245207Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-05-29T15:24:13.245223Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7548: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:24:13.245246Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:39: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-05-29T15:24:13.245360Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7619: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-05-29T15:24:13.245428Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-05-29T15:24:13.249765Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7309: Cannot subscribe to console configs 2025-05-29T15:24:13.249790Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:24:13.251714Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-05-29T15:24:13.251755Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-05-29T15:24:13.251771Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046578944 2025-05-29T15:24:13.252645Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-05-29T15:24:13.252756Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:445: Clear TempDirsState with owners number: 0 2025-05-29T15:24:13.252867Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1348: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046578944 2025-05-29T15:24:13.252971Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:32: TTxInitRoot DoExecute, path: dc-1, pathId: [OwnerId: 72057594046578944, LocalPathId: 1], at schemeshard: 72057594046578944 2025-05-29T15:24:13.253572Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:157: TTxInitRoot DoComplete, at schemeshard: 72057594046578944 2025-05-29T15:24:13.253612Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:84: [RootDataErasureManager] Stop 2025-05-29T15:24:13.253859Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046578944 2025-05-29T15:24:13.253869Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046578944 2025-05-29T15:24:13.253896Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-05-29T15:24:13.253905Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046578944, domainId: [OwnerId: 72057594046578944, LocalPathId: 1] 2025-05-29T15:24:13.253910Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:202: TTxServerlessStorageBilling.Complete 2025-05-29T15:24:13.253939Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6671: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046578944 2025-05-29T15:24:13.292639Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:372: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "dc-1" StoragePools { Name: "" Kind: "hdd" } StoragePools { Name: "" Kind: "hdd-3" } StoragePools { Name: "" Kind: "hdd-1" } StoragePools { Name: "" Kind: "hdd-2" } } } TxId: 1 TabletId: 72057594046578944 , at schemeshard: 72057594046578944 2025-05-29T15:24:13.292729Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //dc-1, opId: 1:0, at schemeshard: 72057594046578944 2025-05-29T15:24:13.292794Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046578944, LocalPathId: 1] was 0 2025-05-29T15:24:13.292852Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:130: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046578944 2025-05-29T15:24:13.292866Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944 2025-05-29T15:24:13.293764Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:451: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046578944 PathId: 1, at schemeshard: 72057594046578944 2025-05-29T15:24:13.293803Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //dc-1 2025-05-29T15:24:13.293856Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046578944 2025-05-29T15:24:13.293867Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:313: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046578944 2025-05-29T15:24:13.293872Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:367: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-05-29T15:24:13.293878Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 2 -> 3 2025-05-29T15:24:13.294440Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046578944 2025-05-29T15:24:13.294455Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046578944 2025-05-29T15:24:13.294460Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 3 -> 128 2025-05-29T15:24:13.294881Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046578944 2025-05-29T15:24:13.294892Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046578944 2025-05-29T15:24:13.294909Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046578944 2025-05-29T15:24:13.294919Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1650: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-05-29T15:24:13.295479Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1719: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046578944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-05-29T15:24:13.295950Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:652: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046578944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-05-29T15:24:13.295999Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1751: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 2025-05-29T15:24:13.296184Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__notify.cpp:30: NotifyTxCompletion operation in-flight, txId: 1, at schemeshard: 72057594046578944 2025-05-29T15:24:13.296190Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1606: TOperation IsReadyToNotify, TxId: 1, ready parts: 0/1, is published: true 2025-05-29T15:24:13.296194Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__notify.cpp:131: NotifyTxCompletion transaction is registered, txId: 1, at schemeshard: 72057594046578944 2025-05-29T15:24:13.521164Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:676: TTxOperationPlanStep Execute, stepId: 500, transactions count in step: 1, at schemeshard: 72057594046578944 2025-05-29T15:24:13.521238Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:680: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 AckTo { RawX1: 0 RawX2: 0 } } Step: 500 MediatorID: 72057594046382081 TabletID: 72057594046578944, at schemeshard: 72057594046578944 2025-05-29T15:24:13.521252Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046578944 2025-05-29T15:24:13.521328Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 128 -> 240 2025-05-29T15:24:13.521339Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046578944 2025-05-29T15:24:13.521370Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046578944, LocalPathId: 1] was 1 2025-05-29T15:24:13.521387Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:402: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046578944, LocalPathId: 1], at schemeshard: 72057594046578944 2025-05-29T15:24:13.521995Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046578944 2025-05-29T15:24:13.522024Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046578944, txId: 1, path id: [OwnerId: 72057594046578944, LocalPathId: 1] 2025-05-29T15:24:13.522066Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046578944 2025-05-29T15:24:13.522072Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:261:2249], at schemeshard: 72057594046578944, txId: 1, path id: 1 202 ... :TTxTrimUnusedSlots 2025-05-29T15:24:27.730093Z node 24 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxTrimUnusedSlots 2025-05-29T15:24:27.730102Z node 24 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateSchemaVersion 2025-05-29T15:24:27.758155Z node 24 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateSchemaVersion 2025-05-29T15:24:27.758196Z node 24 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxGenerateInstanceId 2025-05-29T15:24:27.769347Z node 24 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxGenerateInstanceId 2025-05-29T15:24:27.769393Z node 24 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateStaticPDiskInfo 2025-05-29T15:24:27.769411Z node 24 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateStaticPDiskInfo 2025-05-29T15:24:27.769424Z node 24 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxFillInNonNullConfigForPDisk 2025-05-29T15:24:27.769456Z node 24 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxFillInNonNullConfigForPDisk 2025-05-29T15:24:27.769465Z node 24 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxDropDriveStatus 2025-05-29T15:24:27.769472Z node 24 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxDropDriveStatus 2025-05-29T15:24:27.769481Z node 24 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateCompatibilityInfo 2025-05-29T15:24:27.783656Z node 24 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateCompatibilityInfo 2025-05-29T15:24:27.783712Z node 24 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateEnableConfigV2 2025-05-29T15:24:27.795375Z node 24 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateEnableConfigV2 2025-05-29T15:24:27.795418Z node 24 :BS_CONTROLLER DEBUG: {BSCTXLE01@load_everything.cpp:19} TTxLoadEverything Execute 2025-05-29T15:24:27.795651Z node 24 :BS_CONTROLLER DEBUG: {BSCTXLE03@load_everything.cpp:557} TTxLoadEverything Complete 2025-05-29T15:24:27.795659Z node 24 :BS_CONTROLLER DEBUG: {BSC09@impl.h:2188} LoadFinished 2025-05-29T15:24:27.795767Z node 24 :BS_CONTROLLER DEBUG: {BSC18@console_interaction.cpp:31} Console connection service started 2025-05-29T15:24:27.795778Z node 24 :BS_CONTROLLER DEBUG: {BSCTXLE04@load_everything.cpp:562} TTxLoadEverything InitQueue processed 2025-05-29T15:24:27.796078Z node 24 :BS_CONTROLLER DEBUG: {BSCTXRN01@register_node.cpp:216} Handle TEvControllerRegisterNode Request# {NodeID: 24 VDiskStatus { VDiskId { GroupID: 0 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } NodeId: 24 PDiskId: 1 VSlotId: 0 PDiskGuid: 123 Status: INIT_PENDING OnlyPhantomsRemain: false } DeclarativePDiskManagement: true } 2025-05-29T15:24:27.796194Z node 24 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:398} Execute TEvControllerConfigRequest Request# {Command { DefineHostConfig { HostConfigId: 1 Drive { Path: "/home/runner/.ya/build/build_root/ciyv/000dfe/r3tmp/tmpfcdY8d/pdisk_1.dat" } } } Command { DefineBox { BoxId: 1 Host { Key { Fqdn: "::1" IcPort: 12001 } HostConfigId: 1 } } } } 2025-05-29T15:24:27.796252Z node 24 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:340} Create new pdisk PDiskId# 24:1 Path# /home/runner/.ya/build/build_root/ciyv/000dfe/r3tmp/tmpfcdY8d/pdisk_1.dat 2025-05-29T15:24:27.796510Z node 24 :BS_CONTROLLER DEBUG: {BSCTXUDM01@disk_metrics.cpp:68} Updating disk status Record# {VDisksMetrics { VDiskId { GroupID: 0 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } VSlotId { NodeId: 24 PDiskId: 1 VSlotId: 0 } State: Initial Replicated: false DiskSpace: Green } } 2025-05-29T15:24:27.796540Z node 24 :BS_CONTROLLER DEBUG: {BSC10@scrub.cpp:187} Handle(TEvControllerScrubQueryStartQuantum) Msg# {VSlotId { NodeId: 24 PDiskId: 1 VSlotId: 0 } } 2025-05-29T15:24:27.796558Z node 24 :BS_CONTROLLER DEBUG: {BSC13@scrub.cpp:597} sending TEvControllerScrubStartQuantum Msg# NKikimrBlobStorage.TEvControllerScrubStartQuantum VSlotId { NodeId: 24 PDiskId: 1 VSlotId: 0 } 2025-05-29T15:24:27.796598Z node 24 :BS_CONTROLLER DEBUG: {BSCTXUDM01@disk_metrics.cpp:68} Updating disk status Record# {VDiskStatus { VDiskId { GroupID: 0 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } NodeId: 24 PDiskId: 1 VSlotId: 0 PDiskGuid: 123 Status: REPLICATING OnlyPhantomsRemain: false } } 2025-05-29T15:24:27.796617Z node 24 :BS_CONTROLLER DEBUG: {BSCTXUDM01@disk_metrics.cpp:68} Updating disk status Record# {VDiskStatus { VDiskId { GroupID: 0 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } NodeId: 24 PDiskId: 1 VSlotId: 0 PDiskGuid: 123 Status: READY OnlyPhantomsRemain: false } } 2025-05-29T15:24:27.796941Z node 24 :BS_CONTROLLER DEBUG: {BSC11@scrub.cpp:214} Handle(TEvControllerScrubQuantumFinished) Msg# {VSlotId { NodeId: 24 PDiskId: 1 VSlotId: 0 } Success: true } 2025-05-29T15:24:27.796990Z node 24 :BS_CONTROLLER DEBUG: {BSC10@scrub.cpp:187} Handle(TEvControllerScrubQueryStartQuantum) Msg# {VSlotId { NodeId: 24 PDiskId: 1 VSlotId: 0 } } 2025-05-29T15:24:27.811609Z node 24 :BS_CONTROLLER DEBUG: {BSCTXRN05@register_node.cpp:34} Add devicesData from NodeWarden NodeId# 24 Devices# [] 2025-05-29T15:24:27.811750Z node 24 :TENANT_POOL DEBUG: tenant_pool.cpp:826: TTenantPool::Bootstrap 2025-05-29T15:24:27.811842Z node 24 :LOCAL DEBUG: local.cpp:1491: TLocal::Bootstrap 2025-05-29T15:24:27.811860Z node 24 :TENANT_POOL DEBUG: tenant_pool.cpp:412: TDomainTenantPool(dc-1) Bootstrap 2025-05-29T15:24:27.811898Z node 24 :TENANT_POOL DEBUG: tenant_pool.cpp:286: TDomainTenantPool(dc-1) send request to add tenant /dc-1/users/tenant-2 with resources CPU: 1 Memory: 1 Network: 1 2025-05-29T15:24:27.811907Z node 24 :TENANT_POOL DEBUG: tenant_pool.cpp:286: TDomainTenantPool(dc-1) send request to add tenant /dc-1/users/tenant-1 with resources CPU: 1 Memory: 1 Network: 1 2025-05-29T15:24:27.811927Z node 24 :LOCAL DEBUG: local.cpp:1441: TDomainLocal(dc-1): Bootstrap 2025-05-29T15:24:27.812824Z node 24 :LOCAL DEBUG: local.cpp:1066: TDomainLocal(dc-1): Send resolve request for /dc-1/users/tenant-1 to schemeshard 72057594046578944 2025-05-29T15:24:27.812974Z node 24 :LOCAL DEBUG: local.cpp:1207: TDomainLocal(dc-1): TDomainLocal::TEvClientConnected for dc-1 shard 72057594046578944 2025-05-29T15:24:27.812978Z node 24 :LOCAL DEBUG: local.cpp:1066: TDomainLocal(dc-1): Send resolve request for /dc-1/users/tenant-1 to schemeshard 72057594046578944 2025-05-29T15:24:27.812983Z node 24 :LOCAL DEBUG: local.cpp:1066: TDomainLocal(dc-1): Send resolve request for /dc-1/users/tenant-2 to schemeshard 72057594046578944 2025-05-29T15:24:27.813054Z node 24 :LOCAL DEBUG: local.cpp:1234: TDomainLocal(dc-1): HandleResolve from schemeshard 72057594046578944: Status: StatusSuccess Path: "/dc-1/users/tenant-1" PathDescription { Self { Name: "/dc-1/users/tenant-1" PathId: 100 SchemeshardId: 72057594046578944 PathType: EPathTypeSubDomain } DomainDescription { SchemeShardId_Depricated: 72057594046578944 PathId_Depricated: 100 DomainKey { SchemeShard: 72057594046578944 PathId: 100 } } } 2025-05-29T15:24:27.813072Z node 24 :LOCAL DEBUG: local.cpp:1172: TDomainLocal(dc-1): Binding tenant /dc-1/users/tenant-1 to hive 72057594046578946 (allocated resources: CPU: 1 Memory: 1 Network: 1) 2025-05-29T15:24:27.813146Z node 24 :LOCAL DEBUG: local.cpp:975: TLocalNodeRegistrar::Bootstrap 2025-05-29T15:24:27.813151Z node 24 :LOCAL DEBUG: local.cpp:181: TLocalNodeRegistrar::TryToRegister 2025-05-29T15:24:27.813164Z node 24 :LOCAL DEBUG: local.cpp:213: TLocalNodeRegistrar::TryToRegister pipe to hive, pipe:[24:339:2306] 2025-05-29T15:24:27.813223Z node 24 :TENANT_POOL NOTICE: tenant_pool.cpp:526: TDomainTenantPool(dc-1) started tenant /dc-1/users/tenant-1 2025-05-29T15:24:27.813231Z node 24 :TENANT_POOL DEBUG: tenant_pool.cpp:274: TDomainTenantPool(dc-1) send status update to [24:329:2301] 2025-05-29T15:24:27.813304Z node 24 :LOCAL DEBUG: local.cpp:1234: TDomainLocal(dc-1): HandleResolve from schemeshard 72057594046578944: Status: StatusSuccess Path: "/dc-1/users/tenant-1" PathDescription { Self { Name: "/dc-1/users/tenant-1" PathId: 100 SchemeshardId: 72057594046578944 PathType: EPathTypeSubDomain } DomainDescription { SchemeShardId_Depricated: 72057594046578944 PathId_Depricated: 100 DomainKey { SchemeShard: 72057594046578944 PathId: 100 } } } 2025-05-29T15:24:27.813309Z node 24 :LOCAL DEBUG: local.cpp:1238: TDomainLocal(dc-1): Missing task for /dc-1/users/tenant-1 2025-05-29T15:24:27.813338Z node 24 :LOCAL DEBUG: local.cpp:1234: TDomainLocal(dc-1): HandleResolve from schemeshard 72057594046578944: Status: StatusSuccess Path: "/dc-1/users/tenant-2" PathDescription { Self { Name: "/dc-1/users/tenant-2" PathId: 101 SchemeshardId: 72057594046578944 PathType: EPathTypeSubDomain } DomainDescription { SchemeShardId_Depricated: 72057594046578944 PathId_Depricated: 101 DomainKey { SchemeShard: 72057594046578944 PathId: 101 } } } 2025-05-29T15:24:27.813346Z node 24 :LOCAL DEBUG: local.cpp:1172: TDomainLocal(dc-1): Binding tenant /dc-1/users/tenant-2 to hive 72057594046578946 (allocated resources: CPU: 1 Memory: 1 Network: 1) 2025-05-29T15:24:27.813403Z node 24 :LOCAL DEBUG: local.cpp:975: TLocalNodeRegistrar::Bootstrap 2025-05-29T15:24:27.813408Z node 24 :LOCAL DEBUG: local.cpp:181: TLocalNodeRegistrar::TryToRegister 2025-05-29T15:24:27.813416Z node 24 :LOCAL DEBUG: local.cpp:213: TLocalNodeRegistrar::TryToRegister pipe to hive, pipe:[24:349:2308] 2025-05-29T15:24:27.813459Z node 24 :TENANT_POOL NOTICE: tenant_pool.cpp:526: TDomainTenantPool(dc-1) started tenant /dc-1/users/tenant-2 2025-05-29T15:24:27.813463Z node 24 :TENANT_POOL DEBUG: tenant_pool.cpp:274: TDomainTenantPool(dc-1) send status update to [24:329:2301] 2025-05-29T15:24:27.813641Z node 24 :LOCAL DEBUG: local.cpp:260: TEvTabletPipe::TEvClientConnected {TabletId=72057594046578946 Status=OK ClientId=[24:339:2306]} 2025-05-29T15:24:27.813665Z node 24 :LOCAL DEBUG: local.cpp:324: TLocalNodeRegistrar::Handle TEvLocal::TEvPing 2025-05-29T15:24:27.813674Z node 24 :LOCAL DEBUG: local.cpp:380: TLocalNodeRegistrar TEvPing - CONNECTED 2025-05-29T15:24:27.813677Z node 24 :LOCAL DEBUG: local.cpp:297: TLocalNodeRegistrar SendStatusOk 2025-05-29T15:24:27.813687Z node 24 :LOCAL DEBUG: local.cpp:260: TEvTabletPipe::TEvClientConnected {TabletId=72057594046578946 Status=OK ClientId=[24:349:2308]} 2025-05-29T15:24:27.813709Z node 24 :LOCAL DEBUG: local.cpp:324: TLocalNodeRegistrar::Handle TEvLocal::TEvPing 2025-05-29T15:24:27.813713Z node 24 :LOCAL DEBUG: local.cpp:380: TLocalNodeRegistrar TEvPing - CONNECTED 2025-05-29T15:24:27.813716Z node 24 :LOCAL DEBUG: local.cpp:297: TLocalNodeRegistrar SendStatusOk 2025-05-29T15:24:27.851065Z node 24 :BS_CONTROLLER DEBUG: {BSC19@console_interaction.cpp:74} Console proposed config response Response# {Status: ReverseCommit ConsoleConfigVersion: 0 YAML: "" } ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_external_data_source/unittest >> TExternalDataSourceTest::CreateExternalDataSourceShouldFailIfSuchEntityAlreadyExists [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:117:2058] recipient: [1:112:2142] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:117:2058] recipient: [1:112:2142] Leader for TabletID 72057594046678944 is [1:129:2153] sender: [1:133:2058] recipient: [1:112:2142] 2025-05-29T15:24:27.739484Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7488: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-05-29T15:24:27.739525Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7516: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:24:27.739531Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7402: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-05-29T15:24:27.739537Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7418: OperationsProcessing config: using default configuration 2025-05-29T15:24:27.739544Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-05-29T15:24:27.739548Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-05-29T15:24:27.739558Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7548: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:24:27.739573Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:39: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-05-29T15:24:27.739683Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7619: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-05-29T15:24:27.739755Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-05-29T15:24:27.754865Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7651: Got new config: QueryServiceConfig { AllExternalDataSourcesAreAvailable: true } 2025-05-29T15:24:27.754895Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:24:27.755023Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7619: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-05-29T15:24:27.759047Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-05-29T15:24:27.759232Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-05-29T15:24:27.759281Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-05-29T15:24:27.765132Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-05-29T15:24:27.765219Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:445: Clear TempDirsState with owners number: 0 2025-05-29T15:24:27.765346Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1348: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-05-29T15:24:27.765417Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:32: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-05-29T15:24:27.765954Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:157: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:24:27.766002Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:84: [RootDataErasureManager] Stop 2025-05-29T15:24:27.766303Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:24:27.766317Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:24:27.766336Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-05-29T15:24:27.766345Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-29T15:24:27.766352Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:202: TTxServerlessStorageBilling.Complete 2025-05-29T15:24:27.766389Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6671: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-05-29T15:24:27.768005Z node 1 :HIVE INFO: tablet_helpers.cpp:1130: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:129:2153] sender: [1:243:2058] recipient: [1:15:2062] 2025-05-29T15:24:27.798262Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:372: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-05-29T15:24:27.798349Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:24:27.798427Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-05-29T15:24:27.798477Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:130: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-05-29T15:24:27.798488Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:24:27.799486Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:451: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-05-29T15:24:27.799527Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-05-29T15:24:27.799604Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:24:27.799616Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:313: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-05-29T15:24:27.799622Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:367: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-05-29T15:24:27.799628Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 2 -> 3 2025-05-29T15:24:27.800150Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:24:27.800164Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-05-29T15:24:27.800170Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 3 -> 128 2025-05-29T15:24:27.800529Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:24:27.800540Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:24:27.800547Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:24:27.800556Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1650: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-05-29T15:24:27.801265Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1719: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-05-29T15:24:27.801721Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:652: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-05-29T15:24:27.801772Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1751: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-05-29T15:24:27.801994Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:676: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-05-29T15:24:27.802024Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:680: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 125 RawX2: 4294969446 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-05-29T15:24:27.802049Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:24:27.802122Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 128 -> 240 2025-05-29T15:24:27.802131Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:24:27.802167Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-05-29T15:24:27.802181Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:402: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-05-29T15:24:27.802652Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:24:27.802664Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594 ... 8944, LocalPathId: 2], 2 2025-05-29T15:24:28.036684Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:5834: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 5 PathOwnerId: 72057594046678944, cookie: 101 2025-05-29T15:24:28.036698Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 5 PathOwnerId: 72057594046678944, cookie: 101 2025-05-29T15:24:28.036704Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 101 2025-05-29T15:24:28.036709Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 5 2025-05-29T15:24:28.036716Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-05-29T15:24:28.037018Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:5834: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 2 PathOwnerId: 72057594046678944, cookie: 101 2025-05-29T15:24:28.037036Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 2 PathOwnerId: 72057594046678944, cookie: 101 2025-05-29T15:24:28.037041Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 101 2025-05-29T15:24:28.037047Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 2 2025-05-29T15:24:28.037052Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2025-05-29T15:24:28.037065Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 101, subscribers: 0 2025-05-29T15:24:28.037459Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2025-05-29T15:24:28.037751Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 TestModificationResult got TxId: 101, wait until txId: 101 TestWaitNotification wait txId: 101 2025-05-29T15:24:28.037811Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:210: tests -- TTxNotificationSubscriber for txId 101: send EvNotifyTxCompletion 2025-05-29T15:24:28.037820Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:256: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 101 2025-05-29T15:24:28.037890Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 101, at schemeshard: 72057594046678944 2025-05-29T15:24:28.037913Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:227: tests -- TTxNotificationSubscriber for txId 101: got EvNotifyTxCompletionResult 2025-05-29T15:24:28.037919Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:236: tests -- TTxNotificationSubscriber for txId 101: satisfy waiter [2:307:2297] TestWaitNotification: OK eventTxId 101 2025-05-29T15:24:28.037993Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/MyExternalDataSource" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-05-29T15:24:28.038027Z node 2 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/MyExternalDataSource" took 44us result status StatusSuccess 2025-05-29T15:24:28.038107Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/MyExternalDataSource" PathDescription { Self { Name: "MyExternalDataSource" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeExternalDataSource CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 ExternalDataSourceVersion: 1 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } ExternalDataSourceDescription { Name: "MyExternalDataSource" PathId { OwnerId: 72057594046678944 LocalId: 2 } Version: 1 SourceType: "ObjectStorage" Location: "https://s3.cloud.net/my_bucket" Installation: "" Auth { None { } } Properties { } References { } } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 TestModificationResults wait txId: 102 2025-05-29T15:24:28.038856Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:372: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpCreateExternalDataSource CreateExternalDataSource { Name: "MyExternalDataSource" SourceType: "ObjectStorage" Location: "https://s3.cloud.net/my_new_bucket" Auth { None { } } } } TxId: 102 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-05-29T15:24:28.038916Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_create_external_data_source.cpp:336: [72057594046678944] CreateNewExternalDataSource, opId 102:0, feature flag EnableReplaceIfExistsForExternalEntities 1, tx WorkingDir: "/MyRoot" OperationType: ESchemeOpCreateExternalDataSource FailOnExist: false CreateExternalDataSource { Name: "MyExternalDataSource" SourceType: "ObjectStorage" Location: "https://s3.cloud.net/my_new_bucket" Auth { None { } } } 2025-05-29T15:24:28.038929Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_create_external_data_source.cpp:232: [72057594046678944] TCreateExternalDataSource Propose: opId# 102:0, path# /MyRoot/MyExternalDataSource 2025-05-29T15:24:28.038959Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:130: IgniteOperation, opId: 102:1, propose status:StatusAlreadyExists, reason: Check failed: path: '/MyRoot/MyExternalDataSource', error: path exist, request accepts it (id: [OwnerId: 72057594046678944, LocalPathId: 2], type: EPathTypeExternalDataSource, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_external_data_source.cpp:101, at schemeshard: 72057594046678944 2025-05-29T15:24:28.039459Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:451: TTxOperationPropose Complete, txId: 102, response: Status: StatusAlreadyExists Reason: "Check failed: path: \'/MyRoot/MyExternalDataSource\', error: path exist, request accepts it (id: [OwnerId: 72057594046678944, LocalPathId: 2], type: EPathTypeExternalDataSource, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_external_data_source.cpp:101" TxId: 102 SchemeshardId: 72057594046678944 PathId: 2 PathCreateTxId: 101, at schemeshard: 72057594046678944 2025-05-29T15:24:28.039493Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 102, database: /MyRoot, subject: , status: StatusAlreadyExists, reason: Check failed: path: '/MyRoot/MyExternalDataSource', error: path exist, request accepts it (id: [OwnerId: 72057594046678944, LocalPathId: 2], type: EPathTypeExternalDataSource, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_external_data_source.cpp:101, operation: CREATE EXTERNAL DATA SOURCE, path: /MyRoot/MyExternalDataSource TestModificationResult got TxId: 102, wait until txId: 102 TestWaitNotification wait txId: 102 2025-05-29T15:24:28.039547Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:210: tests -- TTxNotificationSubscriber for txId 102: send EvNotifyTxCompletion 2025-05-29T15:24:28.039554Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:256: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 102 2025-05-29T15:24:28.039621Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 102, at schemeshard: 72057594046678944 2025-05-29T15:24:28.039639Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:227: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2025-05-29T15:24:28.039645Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:236: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [2:315:2305] TestWaitNotification: OK eventTxId 102 2025-05-29T15:24:28.039712Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/MyExternalDataSource" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-05-29T15:24:28.039741Z node 2 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/MyExternalDataSource" took 34us result status StatusSuccess 2025-05-29T15:24:28.039807Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/MyExternalDataSource" PathDescription { Self { Name: "MyExternalDataSource" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeExternalDataSource CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 ExternalDataSourceVersion: 1 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } ExternalDataSourceDescription { Name: "MyExternalDataSource" PathId { OwnerId: 72057594046678944 LocalId: 2 } Version: 1 SourceType: "ObjectStorage" Location: "https://s3.cloud.net/my_bucket" Installation: "" Auth { None { } } Properties { } References { } } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> KqpPg::CheckPgAutoParams-useSink [FAIL] >> TExternalDataSourceTest::ParallelCreateExternalDataSource [GOOD] >> TCmsTest::ManageRequestsWrong [GOOD] >> TCmsTest::ManageRequestsDry >> TExternalTableTest::ReplaceExternalTableIfNotExists [GOOD] >> TExternalTableTest::ParallelCreateExternalTable ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_external_data_source/unittest >> TExternalDataSourceTest::ReplaceExternalDataSourceIfNotExists [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:117:2058] recipient: [1:112:2142] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:117:2058] recipient: [1:112:2142] Leader for TabletID 72057594046678944 is [1:129:2153] sender: [1:133:2058] recipient: [1:112:2142] 2025-05-29T15:24:28.001794Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7488: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-05-29T15:24:28.001821Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7516: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:24:28.001827Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7402: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-05-29T15:24:28.001833Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7418: OperationsProcessing config: using default configuration 2025-05-29T15:24:28.001839Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-05-29T15:24:28.001843Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-05-29T15:24:28.001852Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7548: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:24:28.001868Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:39: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-05-29T15:24:28.001975Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7619: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-05-29T15:24:28.002046Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-05-29T15:24:28.015638Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7651: Got new config: QueryServiceConfig { AllExternalDataSourcesAreAvailable: true } 2025-05-29T15:24:28.015668Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:24:28.015771Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7619: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-05-29T15:24:28.018604Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-05-29T15:24:28.018751Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-05-29T15:24:28.018795Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-05-29T15:24:28.020339Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-05-29T15:24:28.020398Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:445: Clear TempDirsState with owners number: 0 2025-05-29T15:24:28.020502Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1348: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-05-29T15:24:28.020567Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:32: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-05-29T15:24:28.021003Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:157: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:24:28.021045Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:84: [RootDataErasureManager] Stop 2025-05-29T15:24:28.021275Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:24:28.021283Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:24:28.021298Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-05-29T15:24:28.021303Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-29T15:24:28.021307Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:202: TTxServerlessStorageBilling.Complete 2025-05-29T15:24:28.021330Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6671: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-05-29T15:24:28.022456Z node 1 :HIVE INFO: tablet_helpers.cpp:1130: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:129:2153] sender: [1:243:2058] recipient: [1:15:2062] 2025-05-29T15:24:28.041433Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:372: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-05-29T15:24:28.041488Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:24:28.041556Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-05-29T15:24:28.041599Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:130: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-05-29T15:24:28.041610Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:24:28.044080Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:451: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-05-29T15:24:28.044127Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-05-29T15:24:28.044207Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:24:28.044222Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:313: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-05-29T15:24:28.044228Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:367: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-05-29T15:24:28.044235Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 2 -> 3 2025-05-29T15:24:28.046109Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:24:28.046135Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-05-29T15:24:28.046144Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 3 -> 128 2025-05-29T15:24:28.046747Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:24:28.046763Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:24:28.046771Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:24:28.046779Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1650: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-05-29T15:24:28.047272Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1719: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-05-29T15:24:28.047579Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:652: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-05-29T15:24:28.047611Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1751: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-05-29T15:24:28.047755Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:676: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-05-29T15:24:28.047773Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:680: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 125 RawX2: 4294969446 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-05-29T15:24:28.047790Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:24:28.047839Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 128 -> 240 2025-05-29T15:24:28.047846Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:24:28.047882Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-05-29T15:24:28.047895Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:402: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-05-29T15:24:28.048186Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:24:28.048192Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594 ... INATOR: Add transaction: 102 at step: 5000003 FAKE_COORDINATOR: advance: minStep5000003 State->FrontStep: 5000002 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 102 at step: 5000003 2025-05-29T15:24:28.060611Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:676: TTxOperationPlanStep Execute, stepId: 5000003, transactions count in step: 1, at schemeshard: 72057594046678944 2025-05-29T15:24:28.060632Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:680: TTxOperationPlanStep Execute, message: Transactions { TxId: 102 Coordinator: 72057594046316545 AckTo { RawX1: 125 RawX2: 4294969446 } } Step: 5000003 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-05-29T15:24:28.060641Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_alter_external_data_source.cpp:35: [72057594046678944] TAlterExternalDataSource TPropose, operationId: 102:0HandleReply TEvOperationPlan: step# 5000003 2025-05-29T15:24:28.060662Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 102:0 128 -> 240 2025-05-29T15:24:28.060735Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-05-29T15:24:28.060742Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2025-05-29T15:24:28.061070Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2025-05-29T15:24:28.061152Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2025-05-29T15:24:28.061372Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:24:28.061380Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 102, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-29T15:24:28.061403Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 102, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2025-05-29T15:24:28.061416Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 102, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2025-05-29T15:24:28.061427Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:24:28.061432Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:210:2211], at schemeshard: 72057594046678944, txId: 102, path id: 1 2025-05-29T15:24:28.061437Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:210:2211], at schemeshard: 72057594046678944, txId: 102, path id: 2 2025-05-29T15:24:28.061440Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:210:2211], at schemeshard: 72057594046678944, txId: 102, path id: 2 FAKE_COORDINATOR: Erasing txId 102 2025-05-29T15:24:28.061486Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2025-05-29T15:24:28.061494Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:482: [72057594046678944] TDone opId# 102:0 ProgressState 2025-05-29T15:24:28.061506Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:906: Part operation is done id#102:0 progress is 1/1 2025-05-29T15:24:28.061524Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-05-29T15:24:28.061530Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:906: Part operation is done id#102:0 progress is 1/1 2025-05-29T15:24:28.061533Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-05-29T15:24:28.061537Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1606: TOperation IsReadyToNotify, TxId: 102, ready parts: 1/1, is published: false 2025-05-29T15:24:28.061542Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-05-29T15:24:28.061548Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:973: Operation and all the parts is done, operation id: 102:0 2025-05-29T15:24:28.061552Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5174: RemoveTx for txid 102:0 2025-05-29T15:24:28.061563Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2025-05-29T15:24:28.061568Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:982: Publication still in progress, tx: 102, publications: 2, subscribers: 0 2025-05-29T15:24:28.061574Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:989: Publication details: tx: 102, [OwnerId: 72057594046678944, LocalPathId: 1], 7 2025-05-29T15:24:28.061577Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:989: Publication details: tx: 102, [OwnerId: 72057594046678944, LocalPathId: 2], 3 2025-05-29T15:24:28.061689Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:5834: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 7 PathOwnerId: 72057594046678944, cookie: 102 2025-05-29T15:24:28.061699Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 7 PathOwnerId: 72057594046678944, cookie: 102 2025-05-29T15:24:28.061706Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 102 2025-05-29T15:24:28.061710Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 7 2025-05-29T15:24:28.061714Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-05-29T15:24:28.061799Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:5834: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046678944, cookie: 102 2025-05-29T15:24:28.061807Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046678944, cookie: 102 2025-05-29T15:24:28.061811Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 102 2025-05-29T15:24:28.061815Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 3 2025-05-29T15:24:28.061819Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2025-05-29T15:24:28.061826Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 102, subscribers: 0 2025-05-29T15:24:28.062369Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2025-05-29T15:24:28.062388Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 TestModificationResult got TxId: 102, wait until txId: 102 TestWaitNotification wait txId: 102 2025-05-29T15:24:28.062428Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:210: tests -- TTxNotificationSubscriber for txId 102: send EvNotifyTxCompletion 2025-05-29T15:24:28.062435Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:256: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 102 2025-05-29T15:24:28.062504Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 102, at schemeshard: 72057594046678944 2025-05-29T15:24:28.062521Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:227: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2025-05-29T15:24:28.062525Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:236: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [1:334:2324] TestWaitNotification: OK eventTxId 102 2025-05-29T15:24:28.062593Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/MyExternalDataSource" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-05-29T15:24:28.062617Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/MyExternalDataSource" took 33us result status StatusSuccess 2025-05-29T15:24:28.062687Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/MyExternalDataSource" PathDescription { Self { Name: "MyExternalDataSource" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeExternalDataSource CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 ExternalDataSourceVersion: 2 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } ExternalDataSourceDescription { Name: "MyExternalDataSource" PathId { OwnerId: 72057594046678944 LocalId: 2 } Version: 2 SourceType: "ObjectStorage" Location: "https://s3.cloud.net/my_new_bucket" Installation: "" Auth { None { } } Properties { } References { } } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> TExternalTableTest::DropExternalTable >> TExternalTableTest::ParallelCreateSameExternalTable >> TExternalTableTest::ReplaceExternalTableIfNotExistsShouldFailIfFeatureFlagIsNotSet >> TCmsTest::RequestReplaceDevicePDiskByPath [GOOD] >> TExternalDataSourceTest::PreventDeletionOfDependentDataSources [GOOD] >> TCmsTest::RequestReplaceManyDevicesOnOneNode >> TExternalTableTest::CreateExternalTable >> TConsoleTests::TestSetDefaultStorageUnitsQuota [GOOD] >> TConsoleTests::TestSetDefaultComputationalUnitsQuota >> TExternalTableTest::SchemeErrors [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/cms/ut/unittest >> TCmsTest::TestProcessingQueue [GOOD] Test command err: 2025-05-29T15:24:23.168837Z node 1 :CMS DEBUG: cms_impl.h:185: StateInit event type: 10060000 event: NKikimr::TEvTablet::TEvBoot 2025-05-29T15:24:23.169408Z node 1 :CMS DEBUG: console__init_scheme.cpp:14: TConsole::TTxInitScheme Execute 2025-05-29T15:24:23.171008Z node 1 :CMS DEBUG: cms_impl.h:185: StateInit event type: 10060001 event: NKikimr::TEvTablet::TEvRestored 2025-05-29T15:24:23.171067Z node 1 :CMS DEBUG: cms_tx_init_scheme.cpp:16: TTxInitScheme Execute 2025-05-29T15:24:23.172209Z node 1 :CMS DEBUG: cms_impl.h:185: StateInit event type: 1006000c event: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-05-29T15:24:23.172282Z node 1 :CMS DEBUG: cms_impl.h:185: StateInit event type: 104d0001 event: NKikimr::NConsole::TEvConfigsDispatcher::TEvSetConfigSubscriptionResponse 2025-05-29T15:24:23.173287Z node 1 :CMS DEBUG: console__init_scheme.cpp:23: TConsole::TTxInitScheme Complete 2025-05-29T15:24:23.173319Z node 1 :CMS DEBUG: console__load_state.cpp:28: TConsole::TTxLoadState Execute 2025-05-29T15:24:23.173355Z node 1 :CMS DEBUG: console__load_state.cpp:50: Using default config. 2025-05-29T15:24:23.173424Z node 1 :CMS DEBUG: console__load_state.cpp:66: TConsole::TTxLoadState Complete 2025-05-29T15:24:23.173598Z node 1 :CMS DEBUG: cms_tx_init_scheme.cpp:24: TTxInitScheme Complete 2025-05-29T15:24:23.173621Z node 1 :CMS DEBUG: cms_tx_load_state.cpp:33: TTxLoadState Execute 2025-05-29T15:24:23.173637Z node 1 :CMS DEBUG: cms_tx_load_state.cpp:76: Using default config 2025-05-29T15:24:23.173660Z node 1 :CMS DEBUG: cms.cpp:1147: Running CleanupWalleTasks 2025-05-29T15:24:23.207922Z node 1 :CMS DEBUG: cms_impl.h:185: StateInit event type: 104a0012 event: NKikimr::NConsole::TEvConsole::TEvConfigNotificationRequest { Config { FeatureFlags { EnableCMSRequestPriorities: true EnableSingleCompositeActionGroup: true } } ItemKinds: 25 ItemKinds: 26 Local: true } 2025-05-29T15:24:23.219031Z node 1 :CMS DEBUG: cms_tx_load_state.cpp:256: TTxLoadState Complete 2025-05-29T15:24:23.219134Z node 1 :CMS DEBUG: cms_tx_update_config.cpp:23: TTxUpdateConfig Execute 2025-05-29T15:24:23.220639Z node 1 :CMS DEBUG: cms_tx_update_config.cpp:37: TTxUpdateConfig Complete 2025-05-29T15:24:23.220729Z node 1 :CMS DEBUG: sentinel.cpp:939: [Sentinel] [Main] UpdateConfig 2025-05-29T15:24:23.220736Z node 1 :CMS DEBUG: sentinel.cpp:884: [Sentinel] [Main] Start ConfigUpdater 2025-05-29T15:24:23.220746Z node 1 :CMS DEBUG: sentinel.cpp:955: [Sentinel] [Main] UpdateState 2025-05-29T15:24:23.220750Z node 1 :CMS INFO: sentinel.cpp:879: [Sentinel] [Main] StateUpdater was delayed 2025-05-29T15:24:23.220778Z node 1 :CMS DEBUG: sentinel.cpp:464: [Sentinel] [ConfigUpdater] Request blobstorage config: attempt# 0 2025-05-29T15:24:23.220807Z node 1 :CMS DEBUG: sentinel.cpp:477: [Sentinel] [ConfigUpdater] Request CMS cluster state: attempt# 0 2025-05-29T15:24:23.223885Z node 1 :CMS DEBUG: sentinel.cpp:530: [Sentinel] [ConfigUpdater] Handle TEvBlobStorage::TEvControllerConfigResponse: response# Status { Success: true BaseConfig { PDisk { NodeId: 1 PDiskId: 1 Path: "/1/pdisk-1.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 2 PDiskId: 2 Path: "/2/pdisk-2.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 3 PDiskId: 3 Path: "/3/pdisk-3.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 4 PDiskId: 4 Path: "/4/pdisk-4.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 5 PDiskId: 5 Path: "/5/pdisk-5.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 6 PDiskId: 6 Path: "/6/pdisk-6.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 7 PDiskId: 7 Path: "/7/pdisk-7.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 8 PDiskId: 8 Path: "/8/pdisk-8.data" Guid: 1 DriveStatus: ACTIVE } VSlot { VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 1000 } GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 1001 } GroupId: 1 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 1002 } GroupId: 2 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 1003 } GroupId: 3 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 2 VSlotId: 1000 } GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 2 VSlotId: 1001 } GroupId: 1 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 2 VSlotId: 1002 } GroupId: 2 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 2 VSlotId: 1003 } GroupId: 3 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 3 PDiskId: 3 VSlotId: 1000 } GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 3 VSlotId: 1001 } GroupId: 1 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 3 VSlotId: 1002 } GroupId: 2 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 3 VSlotId: 1003 } GroupId: 3 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 4 PDiskId: 4 VSlotId: 1000 } GroupGeneration: 1 FailDomainIdx: 3 } VSlot { VSlotId { NodeId: 4 PDiskId: 4 VSlotId: 1001 } GroupId: 1 GroupGeneration: 1 FailDomainIdx: 3 } VSlot { VSlotId { NodeId: 4 PDiskId: 4 VSlotId: 1002 } GroupId: 2 GroupGeneration: 1 FailDomainIdx: 3 } VSlot { VSlotId { NodeId: 4 PDiskId: 4 VSlotId: 1003 } GroupId: 3 GroupGeneration: 1 FailDomainIdx: 3 } VSlot { VSlotId { NodeId: 5 PDiskId: 5 VSlotId: 1000 } GroupGeneration: 1 FailDomainIdx: 4 } VSlot { VSlotId { NodeId: 5 PDiskId: 5 VSlotId: 1001 } GroupId: 1 GroupGeneration: 1 FailDomainIdx: 4 } VSlot { VSlotId { NodeId: 5 PDiskId: 5 VSlotId: 1002 } GroupId: 2 GroupGeneration: 1 FailDomainIdx: 4 } VSlot { VSlotId { NodeId: 5 PDiskId: 5 VSlotId: 1003 } GroupId: 3 GroupGeneration: 1 FailDomainIdx: 4 } VSlot { VSlotId { NodeId: 6 PDiskId: 6 VSlotId: 1000 } GroupGeneration: 1 FailDomainIdx: 5 } VSlot { VSlotId { NodeId: 6 PDiskId: 6 VSlotId: 1001 } GroupId: 1 GroupGeneration: 1 FailDomainIdx: 5 } VSlot { VSlotId { NodeId: 6 PDiskId: 6 VSlotId: 1002 } GroupId: 2 GroupGeneration: 1 FailDomainIdx: 5 } VSlot { VSlotId { NodeId: 6 PDiskId: 6 VSlotId: 1003 } GroupId: 3 GroupGeneration: 1 FailDomainIdx: 5 } VSlot { VSlotId { NodeId: 7 PDiskId: 7 VSlotId: 1000 } GroupGeneration: 1 FailDomainIdx: 6 } VSlot { VSlotId { NodeId: 7 PDiskId: 7 VSlotId: 1001 } GroupId: 1 GroupGeneration: 1 FailDomainIdx: 6 } VSlot { VSlotId { NodeId: 7 PDiskId: 7 VSlotId: 1002 } GroupId: 2 GroupGeneration: 1 FailDomainIdx: 6 } VSlot { VSlotId { NodeId: 7 PDiskId: 7 VSlotId: 1003 } GroupId: 3 GroupGeneration: 1 FailDomainIdx: 6 } VSlot { VSlotId { NodeId: 8 PDiskId: 8 VSlotId: 1000 } GroupGeneration: 1 FailDomainIdx: 7 } VSlot { VSlotId { NodeId: 8 PDiskId: 8 VSlotId: 1001 } GroupId: 1 GroupGeneration: 1 FailDomainIdx: 7 } VSlot { VSlotId { NodeId: 8 PDiskId: 8 VSlotId: 1002 } GroupId: 2 GroupGeneration: 1 FailDomainIdx: 7 } VSlot { VSlotId { NodeId: 8 PDiskId: 8 VSlotId: 1003 } GroupId: 3 GroupGeneration: 1 FailDomainIdx: 7 } Group { GroupGeneration: 1 ErasureSpecies: "block-4-2" VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 1000 } VSlotId { NodeId: 2 PDiskId: 2 VSlotId: 1000 } VSlotId { NodeId: 3 PDiskId: 3 VSlotId: 1000 } VSlotId { NodeId: 4 PDiskId: 4 VSlotId: 1000 } VSlotId { NodeId: 5 PDiskId: 5 VSlotId: 1000 } VSlotId { NodeId: 6 PDiskId: 6 VSlotId: 1000 } VSlotId { NodeId: 7 PDiskId: 7 VSlotId: 1000 } VSlotId { NodeId: 8 PDiskId: 8 VSlotId: 1000 } } Group { GroupId: 1 GroupGeneration: 1 ErasureSpecies: "block-4-2" VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 1001 } VSlotId { NodeId: 2 PDiskId: 2 VSlotId: 1001 } VSlotId { NodeId: 3 PDiskId: 3 VSlotId: 1001 } VSlotId { NodeId: 4 PDiskId: 4 VSlotId: 1001 } VSlotId { NodeId: 5 PDiskId: 5 VSlotId: 1001 } VSlotId { NodeId: 6 PDiskId: 6 VSlotId: 1001 } VSlotId { NodeId: 7 PDiskId: 7 VSlotId: 1001 } VSlotId { NodeId: 8 PDiskId: 8 VSlotId: 1001 } } Group { GroupId: 2 GroupGeneration: 1 ErasureSpecies: "block-4-2" VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 1002 } VSlotId { NodeId: 2 PDiskId: 2 VSlotId: 1002 } VSlotId { NodeId: 3 PDiskId: 3 VSlotId: 1002 } VSlotId { NodeId: 4 PDiskId: 4 VSlotId: 1002 } VSlotId { NodeId: 5 PDiskId: 5 VSlotId: 1002 } VSlotId { NodeId: 6 PDiskId: 6 VSlotId: 1002 } VSlotId { NodeId: 7 PDiskId: 7 VSlotId: 1002 } VSlotId { NodeId: 8 PDiskId: 8 VSlotId: 1002 } } Group { GroupId: 3 GroupGeneration: 1 ErasureSpecies: "block-4-2" VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 1003 } VSlotId { NodeId: 2 PDiskId: 2 VSlotId: 1003 } VSlotId { NodeId: 3 PDiskId: 3 VSlotId: 1003 } VSlotId { NodeId: 4 PDiskId: 4 VSlotId: 1003 } VSlotId { NodeId: 5 PDiskId: 5 VSlotId: 1003 } VSlotId { NodeId: 6 PDiskId: 6 VSlotId: 1003 } VSlotId { NodeId: 7 PDiskId: 7 VSlotId: 1003 } VSlotId { NodeId: 8 PDiskId: 8 VSlotId: 1003 } } } } Success: true 2025-05-29T15:24:23.234222Z node 1 :CMS DEBUG: cms_tx_update_config.cpp:23: TTxUpdateConfig Execute 2025-05-29T15:24:23.268492Z node 1 :CMS DEBUG: cms_tx_update_config.cpp:37: TTxUpdateConfig Complete 2025-05-29T15:24:23.268558Z node 1 :CMS DEBUG: cms_tx_update_config.cpp:44: Updated config: TenantLimits { DisabledNodesRatioLimit: 0 } ClusterLimits { DisabledNodesRatioLimit: 0 } SentinelConfig { Enable: false } 2025-05-29T15:24:23.309454Z node 1 :CMS ERROR: info_collector.cpp:281: [InfoCollector] Couldn't get base config 2025-05-29T15:24:23.309604Z node 1 :CMS NOTICE: cms.cpp:1743: Couldn't collect cluster state. 2025-05-29T15:24:23.309649Z node 1 :CMS NOTICE: audit_log.cpp:12: [AuditLog] [CMS tablet] Reply: request# NKikimr::NCms::TEvCms::TEvClusterStateRequest { }, response# NKikimr::NCms::TEvCms::TEvClusterStateResponse { Status { Code: ERROR_TEMP Reason: "Cannot collect cluster state" } } 2025-05-29T15:24:23.309680Z node 1 :CMS NOTICE: audit_log.cpp:12: [AuditLog] [CMS tablet] Reply: request# NKikimr::NCms::TEvCms::TEvClusterStateRequest { }, response# NKikimr::NCms::TEvCms::TEvClusterStateResponse { Status { Code: ERROR_TEMP Reason: "Cannot collect cluster state" } } 2025-05-29T15:24:23.342842Z node 1 :CMS DEBUG: cms.cpp:1147: Running CleanupWalleTasks 2025-05-29T15:24:23.407752Z node 1 :CMS ERROR: info_collector.cpp:281: [InfoCollector] Couldn't get base config 2025-05-29T15:24:23.407817Z node 1 :CMS NOTICE: cms.cpp:1743: Couldn't collect cluster state. 2025-05-29T15:24:23.407859Z node 1 :CMS NOTICE: audit_log.cpp:12: [AuditLog] [CMS tablet] Reply: request# NKikimr::NCms::TEvCms::TEvPermissionRequest { User: "user" Actions { Type: RESTART_SERVICES Host: "1" Services: "storage" Duration: 60000000 } PartialPermissionAllowed: false Schedule: false DryRun: false AvailabilityMode: MODE_MAX_AVAILABILITY EvictVDisks: false }, response# NKikimr::NCms::TEvCms::TEvPermissionResponse { Status { Code: ERROR_TEMP Reason: "Cannot collect cluster state" } } 2025-05-29T15:24:23.418917Z node 1 :CMS ERROR: info_collector.cpp:281: [InfoCollector] Couldn't get base config 2025-05-29T15:24:23.418999Z node 1 :CMS NOTICE: cms.cpp:1743: Couldn't collect cluster state. 2025-05-29T15:24:23.419037Z node 1 :CMS NOTICE: audit_log.cpp:12: [AuditLog] [CMS tablet] Reply: request# NKikimr::NCms::TEvCms::TEvNotification { User: "user" Actions { Type: REPLACE_DEVICES Host: "1" Devices: "pdisk-2-2" Duration: 60000000 } Time: 720126512 }, response# NKikimr::NCms::TEvCms::TEvNotificationResponse { Status { Code: ERROR_TEMP Reason: "Cannot collect cluster state" } } 2025-05-29T15:24:23.506006Z node 1 :CMS DEBUG: cms_tx_update_downtimes.cpp:17: TTxUpdateDowntimes Execute 2025-05-29T15:24:23.506059Z node 1 :CMS DEBUG: cms_tx_update_downtimes.cpp:26: TTxUpdateDowntimes Complete 2025-05-29T15:24:23.506136Z node 1 :CMS DEBUG: cluster_info.cpp:968: Timestamp: 1970-01-01T00:02:00Z 2025-05-29T15:24:23.506323Z node 1 :CMS INFO: cms.cpp:347: Check request: User: "user" Actions { Type: REPLACE_DEVICES Host: "1" Devices: "pdisk-1-1" Duration: 60000000 } Actions { Type: REPLACE_DEVICES Host: "1" Devices: "pdisk-2-2" Duration: 60000000 } PartialPermissionAllowed: true Schedule: true DryRun: false AvailabilityMode: MODE_MAX_AVAILABILITY EvictVDisks: false 2025-05-29T15:24:23.506335Z node 1 :CMS DEBUG: cms.cpp:379: Checking action: Typ ... nknown PDisk 22:22 2025-05-29T15:24:26.958222Z node 17 :CMS ERROR: cluster_info.cpp:489: Cannot update state for unknown PDisk 23:23 2025-05-29T15:24:26.958225Z node 17 :CMS ERROR: cluster_info.cpp:489: Cannot update state for unknown PDisk 24:24 2025-05-29T15:24:26.958351Z node 17 :CMS DEBUG: cms_tx_update_downtimes.cpp:17: TTxUpdateDowntimes Execute 2025-05-29T15:24:26.958398Z node 17 :CMS DEBUG: cluster_info.cpp:968: Timestamp: 1970-01-01T00:02:00Z 2025-05-29T15:24:26.958446Z node 17 :CMS INFO: cms.cpp:347: Check request: User: "user" Actions { Type: RESTART_SERVICES Host: "17" Services: "storage" Duration: 60000000 } PartialPermissionAllowed: false Schedule: false DryRun: true Duration: 60000000 AvailabilityMode: MODE_FORCE_RESTART EvictVDisks: false 2025-05-29T15:24:26.958458Z node 17 :CMS DEBUG: cms.cpp:379: Checking action: Type: RESTART_SERVICES Host: "17" Services: "storage" Duration: 60000000 2025-05-29T15:24:26.958470Z node 17 :CMS DEBUG: node_checkers.cpp:101: [Nodes Counter] Checking Node: 17, with state: Up, with limit: 0, with ratio limit: 0, locked nodes: 0, down nodes: 0 2025-05-29T15:24:26.958487Z node 17 :CMS DEBUG: cms.cpp:729: Ring: 0; State: Ok 2025-05-29T15:24:26.958491Z node 17 :CMS DEBUG: cms.cpp:729: Ring: 1; State: Ok 2025-05-29T15:24:26.958494Z node 17 :CMS DEBUG: cms.cpp:729: Ring: 2; State: Ok 2025-05-29T15:24:26.958498Z node 17 :CMS DEBUG: cms.cpp:387: Result: ALLOW 2025-05-29T15:24:26.958546Z node 17 :CMS NOTICE: audit_log.cpp:12: [AuditLog] [CMS tablet] Reply: request# NKikimr::NCms::TEvCms::TEvPermissionRequest { User: "user" Actions { Type: RESTART_SERVICES Host: "17" Services: "storage" Duration: 60000000 } PartialPermissionAllowed: false Schedule: false DryRun: true Duration: 60000000 AvailabilityMode: MODE_FORCE_RESTART EvictVDisks: false }, response# NKikimr::NCms::TEvCms::TEvPermissionResponse { Status { Code: ALLOW } Permissions { Action { Type: RESTART_SERVICES Host: "17" Services: "storage" Duration: 60000000 } Deadline: 180130000 Extentions { Type: HostInfo Hosts { Name: "::1" State: UP NodeId: 17 InterconnectPort: 12001 } } } } 2025-05-29T15:24:26.958574Z node 17 :CMS INFO: cms.cpp:347: Check request: User: "user" Actions { Type: RESTART_SERVICES Host: "18" Services: "storage" Duration: 60000000 } PartialPermissionAllowed: false Schedule: false DryRun: true Duration: 60000000 AvailabilityMode: MODE_FORCE_RESTART EvictVDisks: false 2025-05-29T15:24:26.958581Z node 17 :CMS DEBUG: cms.cpp:379: Checking action: Type: RESTART_SERVICES Host: "18" Services: "storage" Duration: 60000000 2025-05-29T15:24:26.958586Z node 17 :CMS DEBUG: node_checkers.cpp:101: [Nodes Counter] Checking Node: 18, with state: Up, with limit: 0, with ratio limit: 0, locked nodes: 0, down nodes: 0 2025-05-29T15:24:26.958590Z node 17 :CMS DEBUG: cms.cpp:387: Result: ALLOW 2025-05-29T15:24:26.958613Z node 17 :CMS NOTICE: audit_log.cpp:12: [AuditLog] [CMS tablet] Reply: request# NKikimr::NCms::TEvCms::TEvPermissionRequest { User: "user" Actions { Type: RESTART_SERVICES Host: "18" Services: "storage" Duration: 60000000 } PartialPermissionAllowed: false Schedule: false DryRun: true Duration: 60000000 AvailabilityMode: MODE_FORCE_RESTART EvictVDisks: false }, response# NKikimr::NCms::TEvCms::TEvPermissionResponse { Status { Code: ALLOW } Permissions { Action { Type: RESTART_SERVICES Host: "18" Services: "storage" Duration: 60000000 } Deadline: 180130000 Extentions { Type: HostInfo Hosts { Name: "::1" State: UP NodeId: 18 InterconnectPort: 12002 } } } } 2025-05-29T15:24:26.958630Z node 17 :CMS INFO: cms.cpp:347: Check request: User: "user" Actions { Type: RESTART_SERVICES Host: "19" Services: "storage" Duration: 60000000 } PartialPermissionAllowed: false Schedule: false DryRun: true Duration: 60000000 AvailabilityMode: MODE_FORCE_RESTART EvictVDisks: false 2025-05-29T15:24:26.958634Z node 17 :CMS DEBUG: cms.cpp:379: Checking action: Type: RESTART_SERVICES Host: "19" Services: "storage" Duration: 60000000 2025-05-29T15:24:26.958637Z node 17 :CMS DEBUG: node_checkers.cpp:101: [Nodes Counter] Checking Node: 19, with state: Up, with limit: 0, with ratio limit: 0, locked nodes: 0, down nodes: 0 2025-05-29T15:24:26.958639Z node 17 :CMS DEBUG: cms.cpp:387: Result: ALLOW 2025-05-29T15:24:26.958652Z node 17 :CMS NOTICE: audit_log.cpp:12: [AuditLog] [CMS tablet] Reply: request# NKikimr::NCms::TEvCms::TEvPermissionRequest { User: "user" Actions { Type: RESTART_SERVICES Host: "19" Services: "storage" Duration: 60000000 } PartialPermissionAllowed: false Schedule: false DryRun: true Duration: 60000000 AvailabilityMode: MODE_FORCE_RESTART EvictVDisks: false }, response# NKikimr::NCms::TEvCms::TEvPermissionResponse { Status { Code: ALLOW } Permissions { Action { Type: RESTART_SERVICES Host: "19" Services: "storage" Duration: 60000000 } Deadline: 180130000 Extentions { Type: HostInfo Hosts { Name: "::1" State: UP NodeId: 19 InterconnectPort: 12003 } } } } 2025-05-29T15:24:26.958662Z node 17 :CMS INFO: cms.cpp:347: Check request: User: "user" Actions { Type: RESTART_SERVICES Host: "20" Services: "storage" Duration: 60000000 } PartialPermissionAllowed: false Schedule: false DryRun: true Duration: 60000000 AvailabilityMode: MODE_FORCE_RESTART EvictVDisks: false 2025-05-29T15:24:26.958665Z node 17 :CMS DEBUG: cms.cpp:379: Checking action: Type: RESTART_SERVICES Host: "20" Services: "storage" Duration: 60000000 2025-05-29T15:24:26.958668Z node 17 :CMS DEBUG: node_checkers.cpp:101: [Nodes Counter] Checking Node: 20, with state: Up, with limit: 0, with ratio limit: 0, locked nodes: 0, down nodes: 0 2025-05-29T15:24:26.958670Z node 17 :CMS DEBUG: cms.cpp:387: Result: ALLOW 2025-05-29T15:24:26.958682Z node 17 :CMS NOTICE: audit_log.cpp:12: [AuditLog] [CMS tablet] Reply: request# NKikimr::NCms::TEvCms::TEvPermissionRequest { User: "user" Actions { Type: RESTART_SERVICES Host: "20" Services: "storage" Duration: 60000000 } PartialPermissionAllowed: false Schedule: false DryRun: true Duration: 60000000 AvailabilityMode: MODE_FORCE_RESTART EvictVDisks: false }, response# NKikimr::NCms::TEvCms::TEvPermissionResponse { Status { Code: ALLOW } Permissions { Action { Type: RESTART_SERVICES Host: "20" Services: "storage" Duration: 60000000 } Deadline: 180130000 Extentions { Type: HostInfo Hosts { Name: "::1" State: UP NodeId: 20 InterconnectPort: 12004 } } } } 2025-05-29T15:24:26.958691Z node 17 :CMS INFO: cms.cpp:347: Check request: User: "user" Actions { Type: RESTART_SERVICES Host: "21" Services: "storage" Duration: 60000000 } PartialPermissionAllowed: false Schedule: false DryRun: true Duration: 60000000 AvailabilityMode: MODE_FORCE_RESTART EvictVDisks: false 2025-05-29T15:24:26.958695Z node 17 :CMS DEBUG: cms.cpp:379: Checking action: Type: RESTART_SERVICES Host: "21" Services: "storage" Duration: 60000000 2025-05-29T15:24:26.958698Z node 17 :CMS DEBUG: node_checkers.cpp:101: [Nodes Counter] Checking Node: 21, with state: Up, with limit: 0, with ratio limit: 0, locked nodes: 0, down nodes: 0 2025-05-29T15:24:26.958701Z node 17 :CMS DEBUG: cms.cpp:387: Result: ALLOW 2025-05-29T15:24:26.958713Z node 17 :CMS NOTICE: audit_log.cpp:12: [AuditLog] [CMS tablet] Reply: request# NKikimr::NCms::TEvCms::TEvPermissionRequest { User: "user" Actions { Type: RESTART_SERVICES Host: "21" Services: "storage" Duration: 60000000 } PartialPermissionAllowed: false Schedule: false DryRun: true Duration: 60000000 AvailabilityMode: MODE_FORCE_RESTART EvictVDisks: false }, response# NKikimr::NCms::TEvCms::TEvPermissionResponse { Status { Code: ALLOW } Permissions { Action { Type: RESTART_SERVICES Host: "21" Services: "storage" Duration: 60000000 } Deadline: 180130000 Extentions { Type: HostInfo Hosts { Name: "::1" State: UP NodeId: 21 InterconnectPort: 12005 } } } } 2025-05-29T15:24:26.958722Z node 17 :CMS INFO: cms.cpp:347: Check request: User: "user" Actions { Type: RESTART_SERVICES Host: "22" Services: "storage" Duration: 60000000 } PartialPermissionAllowed: false Schedule: false DryRun: true Duration: 60000000 AvailabilityMode: MODE_FORCE_RESTART EvictVDisks: false 2025-05-29T15:24:26.958726Z node 17 :CMS DEBUG: cms.cpp:379: Checking action: Type: RESTART_SERVICES Host: "22" Services: "storage" Duration: 60000000 2025-05-29T15:24:26.958729Z node 17 :CMS DEBUG: node_checkers.cpp:101: [Nodes Counter] Checking Node: 22, with state: Up, with limit: 0, with ratio limit: 0, locked nodes: 0, down nodes: 0 2025-05-29T15:24:26.958731Z node 17 :CMS DEBUG: cms.cpp:387: Result: ALLOW 2025-05-29T15:24:26.958768Z node 17 :CMS NOTICE: audit_log.cpp:12: [AuditLog] [CMS tablet] Reply: request# NKikimr::NCms::TEvCms::TEvPermissionRequest { User: "user" Actions { Type: RESTART_SERVICES Host: "22" Services: "storage" Duration: 60000000 } PartialPermissionAllowed: false Schedule: false DryRun: true Duration: 60000000 AvailabilityMode: MODE_FORCE_RESTART EvictVDisks: false }, response# NKikimr::NCms::TEvCms::TEvPermissionResponse { Status { Code: ALLOW } Permissions { Action { Type: RESTART_SERVICES Host: "22" Services: "storage" Duration: 60000000 } Deadline: 180130000 Extentions { Type: HostInfo Hosts { Name: "::1" State: UP NodeId: 22 InterconnectPort: 12006 } } } } 2025-05-29T15:24:26.958784Z node 17 :CMS INFO: cms.cpp:347: Check request: User: "user" Actions { Type: RESTART_SERVICES Host: "23" Services: "storage" Duration: 60000000 } PartialPermissionAllowed: false Schedule: false DryRun: true Duration: 60000000 AvailabilityMode: MODE_FORCE_RESTART EvictVDisks: false 2025-05-29T15:24:26.958790Z node 17 :CMS DEBUG: cms.cpp:379: Checking action: Type: RESTART_SERVICES Host: "23" Services: "storage" Duration: 60000000 2025-05-29T15:24:26.958794Z node 17 :CMS DEBUG: node_checkers.cpp:101: [Nodes Counter] Checking Node: 23, with state: Up, with limit: 0, with ratio limit: 0, locked nodes: 0, down nodes: 0 2025-05-29T15:24:26.958798Z node 17 :CMS DEBUG: cms.cpp:387: Result: ALLOW 2025-05-29T15:24:26.958818Z node 17 :CMS NOTICE: audit_log.cpp:12: [AuditLog] [CMS tablet] Reply: request# NKikimr::NCms::TEvCms::TEvPermissionRequest { User: "user" Actions { Type: RESTART_SERVICES Host: "23" Services: "storage" Duration: 60000000 } PartialPermissionAllowed: false Schedule: false DryRun: true Duration: 60000000 AvailabilityMode: MODE_FORCE_RESTART EvictVDisks: false }, response# NKikimr::NCms::TEvCms::TEvPermissionResponse { Status { Code: ALLOW } Permissions { Action { Type: RESTART_SERVICES Host: "23" Services: "storage" Duration: 60000000 } Deadline: 180130000 Extentions { Type: HostInfo Hosts { Name: "::1" State: UP NodeId: 23 InterconnectPort: 12007 } } } } 2025-05-29T15:24:26.958833Z node 17 :CMS INFO: cms.cpp:347: Check request: User: "user" Actions { Type: RESTART_SERVICES Host: "24" Services: "storage" Duration: 60000000 } PartialPermissionAllowed: false Schedule: false DryRun: true Duration: 60000000 AvailabilityMode: MODE_FORCE_RESTART EvictVDisks: false 2025-05-29T15:24:26.958839Z node 17 :CMS DEBUG: cms.cpp:379: Checking action: Type: RESTART_SERVICES Host: "24" Services: "storage" Duration: 60000000 2025-05-29T15:24:26.958844Z node 17 :CMS DEBUG: node_checkers.cpp:101: [Nodes Counter] Checking Node: 24, with state: Up, with limit: 0, with ratio limit: 0, locked nodes: 0, down nodes: 0 2025-05-29T15:24:26.958848Z node 17 :CMS DEBUG: cms.cpp:387: Result: ALLOW 2025-05-29T15:24:26.958868Z node 17 :CMS NOTICE: audit_log.cpp:12: [AuditLog] [CMS tablet] Reply: request# NKikimr::NCms::TEvCms::TEvPermissionRequest { User: "user" Actions { Type: RESTART_SERVICES Host: "24" Services: "storage" Duration: 60000000 } PartialPermissionAllowed: false Schedule: false DryRun: true Duration: 60000000 AvailabilityMode: MODE_FORCE_RESTART EvictVDisks: false }, response# NKikimr::NCms::TEvCms::TEvPermissionResponse { Status { Code: ALLOW } Permissions { Action { Type: RESTART_SERVICES Host: "24" Services: "storage" Duration: 60000000 } Deadline: 180130000 Extentions { Type: HostInfo Hosts { Name: "::1" State: UP NodeId: 24 InterconnectPort: 12008 } } } } >> TExternalTableTest::ReplaceExternalTableShouldFailIfEntityOfAnotherTypeWithSameNameExists >> KqpPg::TempTablesSessionsIsolation [GOOD] >> KqpPg::TempTablesDrop >> TExternalTableTest::ParallelCreateExternalTable [GOOD] >> TExternalTableTest::ParallelCreateSameExternalTable [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_external_data_source/unittest >> TExternalDataSourceTest::ParallelCreateExternalDataSource [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:117:2058] recipient: [1:112:2142] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:117:2058] recipient: [1:112:2142] Leader for TabletID 72057594046678944 is [1:129:2153] sender: [1:133:2058] recipient: [1:112:2142] 2025-05-29T15:24:28.232130Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7488: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-05-29T15:24:28.232162Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7516: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:24:28.232168Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7402: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-05-29T15:24:28.232173Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7418: OperationsProcessing config: using default configuration 2025-05-29T15:24:28.232179Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-05-29T15:24:28.232183Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-05-29T15:24:28.232192Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7548: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:24:28.232206Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:39: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-05-29T15:24:28.232316Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7619: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-05-29T15:24:28.232384Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-05-29T15:24:28.246283Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7651: Got new config: QueryServiceConfig { AllExternalDataSourcesAreAvailable: true } 2025-05-29T15:24:28.246311Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:24:28.246423Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7619: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-05-29T15:24:28.249391Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-05-29T15:24:28.249531Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-05-29T15:24:28.249570Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-05-29T15:24:28.251463Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-05-29T15:24:28.251563Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:445: Clear TempDirsState with owners number: 0 2025-05-29T15:24:28.251668Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1348: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-05-29T15:24:28.251724Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:32: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-05-29T15:24:28.252204Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:157: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:24:28.252240Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:84: [RootDataErasureManager] Stop 2025-05-29T15:24:28.252507Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:24:28.252519Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:24:28.252543Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-05-29T15:24:28.252552Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-29T15:24:28.252559Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:202: TTxServerlessStorageBilling.Complete 2025-05-29T15:24:28.252593Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6671: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-05-29T15:24:28.254048Z node 1 :HIVE INFO: tablet_helpers.cpp:1130: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:129:2153] sender: [1:243:2058] recipient: [1:15:2062] 2025-05-29T15:24:28.274796Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:372: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-05-29T15:24:28.274872Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:24:28.274945Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-05-29T15:24:28.274987Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:130: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-05-29T15:24:28.274997Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:24:28.275836Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:451: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-05-29T15:24:28.275862Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-05-29T15:24:28.275920Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:24:28.275930Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:313: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-05-29T15:24:28.275936Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:367: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-05-29T15:24:28.275942Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 2 -> 3 2025-05-29T15:24:28.276438Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:24:28.276450Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-05-29T15:24:28.276457Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 3 -> 128 2025-05-29T15:24:28.276836Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:24:28.276847Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:24:28.276853Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:24:28.276860Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1650: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-05-29T15:24:28.277542Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1719: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-05-29T15:24:28.277993Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:652: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-05-29T15:24:28.278031Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1751: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-05-29T15:24:28.278214Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:676: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-05-29T15:24:28.278241Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:680: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 125 RawX2: 4294969446 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-05-29T15:24:28.278262Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:24:28.278330Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 128 -> 240 2025-05-29T15:24:28.278337Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:24:28.278371Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-05-29T15:24:28.278383Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:402: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-05-29T15:24:28.278890Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:24:28.278899Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594 ... : 72057594046678944 2025-05-29T15:24:28.470017Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 126, at schemeshard: 72057594046678944 2025-05-29T15:24:28.470024Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:227: tests -- TTxNotificationSubscriber for txId 125: got EvNotifyTxCompletionResult 2025-05-29T15:24:28.470027Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:236: tests -- TTxNotificationSubscriber for txId 125: satisfy waiter [2:346:2336] 2025-05-29T15:24:28.470039Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:227: tests -- TTxNotificationSubscriber for txId 126: got EvNotifyTxCompletionResult 2025-05-29T15:24:28.470042Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:236: tests -- TTxNotificationSubscriber for txId 126: satisfy waiter [2:346:2336] TestWaitNotification: OK eventTxId 124 TestWaitNotification: OK eventTxId 125 TestWaitNotification: OK eventTxId 126 2025-05-29T15:24:28.470109Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/DirA/MyExternalDataSource1" Options { ReturnPartitioningInfo: true ReturnPartitionConfig: true BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-05-29T15:24:28.470137Z node 2 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/DirA/MyExternalDataSource1" took 37us result status StatusSuccess 2025-05-29T15:24:28.470221Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/DirA/MyExternalDataSource1" PathDescription { Self { Name: "MyExternalDataSource1" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeExternalDataSource CreateFinished: true CreateTxId: 125 CreateStep: 5000004 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 ExternalDataSourceVersion: 1 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 3 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } ExternalDataSourceDescription { Name: "MyExternalDataSource1" PathId { OwnerId: 72057594046678944 LocalId: 3 } Version: 1 SourceType: "ObjectStorage" Location: "https://s3.cloud.net/my_bucket" Installation: "" Auth { None { } } Properties { } References { } } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-05-29T15:24:28.470307Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/DirA/MyExternalDataSource2" Options { ReturnPartitioningInfo: true ReturnPartitionConfig: true BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-05-29T15:24:28.470321Z node 2 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/DirA/MyExternalDataSource2" took 15us result status StatusSuccess 2025-05-29T15:24:28.470357Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/DirA/MyExternalDataSource2" PathDescription { Self { Name: "MyExternalDataSource2" PathId: 4 SchemeshardId: 72057594046678944 PathType: EPathTypeExternalDataSource CreateFinished: true CreateTxId: 126 CreateStep: 5000003 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 ExternalDataSourceVersion: 1 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 3 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } ExternalDataSourceDescription { Name: "MyExternalDataSource2" PathId { OwnerId: 72057594046678944 LocalId: 4 } Version: 1 SourceType: "ObjectStorage" Location: "https://s3.cloud.net/my_bucket" Installation: "" Auth { None { } } Properties { } References { } } } PathId: 4 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-05-29T15:24:28.470429Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/DirA" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-05-29T15:24:28.470440Z node 2 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/DirA" took 12us result status StatusSuccess 2025-05-29T15:24:28.470501Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/DirA" PathDescription { Self { Name: "DirA" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 124 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 7 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 7 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 6 } ChildrenExist: true } Children { Name: "MyExternalDataSource1" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeExternalDataSource CreateFinished: true CreateTxId: 125 CreateStep: 5000004 ParentPathId: 2 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" ChildrenExist: false } Children { Name: "MyExternalDataSource2" PathId: 4 SchemeshardId: 72057594046678944 PathType: EPathTypeExternalDataSource CreateFinished: true CreateTxId: 126 CreateStep: 5000003 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 3 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-05-29T15:24:28.470556Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/DirA/MyExternalDataSource1" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-05-29T15:24:28.470569Z node 2 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/DirA/MyExternalDataSource1" took 15us result status StatusSuccess 2025-05-29T15:24:28.470603Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/DirA/MyExternalDataSource1" PathDescription { Self { Name: "MyExternalDataSource1" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeExternalDataSource CreateFinished: true CreateTxId: 125 CreateStep: 5000004 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 ExternalDataSourceVersion: 1 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 3 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } ExternalDataSourceDescription { Name: "MyExternalDataSource1" PathId { OwnerId: 72057594046678944 LocalId: 3 } Version: 1 SourceType: "ObjectStorage" Location: "https://s3.cloud.net/my_bucket" Installation: "" Auth { None { } } Properties { } References { } } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-05-29T15:24:28.470641Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/DirA/MyExternalDataSource2" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-05-29T15:24:28.470652Z node 2 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/DirA/MyExternalDataSource2" took 12us result status StatusSuccess 2025-05-29T15:24:28.470699Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/DirA/MyExternalDataSource2" PathDescription { Self { Name: "MyExternalDataSource2" PathId: 4 SchemeshardId: 72057594046678944 PathType: EPathTypeExternalDataSource CreateFinished: true CreateTxId: 126 CreateStep: 5000003 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 ExternalDataSourceVersion: 1 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 3 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } ExternalDataSourceDescription { Name: "MyExternalDataSource2" PathId { OwnerId: 72057594046678944 LocalId: 4 } Version: 1 SourceType: "ObjectStorage" Location: "https://s3.cloud.net/my_bucket" Installation: "" Auth { None { } } Properties { } References { } } } PathId: 4 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 |65.4%| [TA] $(B)/ydb/core/tx/schemeshard/ut_serverless_reboots/test-results/unittest/{meta.json ... results_accumulator.log} >> TExternalTableTest::DropExternalTable [GOOD] >> TExternalTableTest::Decimal >> TExternalTableTest::ReplaceExternalTableIfNotExistsShouldFailIfFeatureFlagIsNotSet [GOOD] >> TExternalTableTest::CreateExternalTable [GOOD] >> TExternalTableTest::CreateExternalTableShouldFailIfSuchEntityAlreadyExists >> TCmsTest::TestForceRestartModeScheduled [GOOD] >> TCmsTest::TestForceRestartModeScheduledDisconnects >> ReadIteratorExternalBlobs::ExtBlobsMultipleColumns [FAIL] >> ReadIteratorExternalBlobs::ExtBlobsWithCompactingMiddleRows ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_external_data_source/unittest >> TExternalDataSourceTest::PreventDeletionOfDependentDataSources [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:117:2058] recipient: [1:112:2142] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:117:2058] recipient: [1:112:2142] Leader for TabletID 72057594046678944 is [1:129:2153] sender: [1:133:2058] recipient: [1:112:2142] 2025-05-29T15:24:28.252011Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7488: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-05-29T15:24:28.252037Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7516: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:24:28.252042Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7402: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-05-29T15:24:28.252047Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7418: OperationsProcessing config: using default configuration 2025-05-29T15:24:28.252052Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-05-29T15:24:28.252056Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-05-29T15:24:28.252065Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7548: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:24:28.252078Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:39: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-05-29T15:24:28.252189Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7619: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-05-29T15:24:28.252257Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-05-29T15:24:28.265007Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7651: Got new config: QueryServiceConfig { AllExternalDataSourcesAreAvailable: true } 2025-05-29T15:24:28.265034Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:24:28.265145Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7619: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-05-29T15:24:28.268244Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-05-29T15:24:28.268386Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-05-29T15:24:28.268433Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-05-29T15:24:28.270541Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-05-29T15:24:28.270632Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:445: Clear TempDirsState with owners number: 0 2025-05-29T15:24:28.270780Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1348: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-05-29T15:24:28.270854Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:32: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-05-29T15:24:28.271399Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:157: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:24:28.271442Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:84: [RootDataErasureManager] Stop 2025-05-29T15:24:28.271744Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:24:28.271757Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:24:28.271777Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-05-29T15:24:28.271787Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-29T15:24:28.271793Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:202: TTxServerlessStorageBilling.Complete 2025-05-29T15:24:28.271828Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6671: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-05-29T15:24:28.273325Z node 1 :HIVE INFO: tablet_helpers.cpp:1130: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:129:2153] sender: [1:243:2058] recipient: [1:15:2062] 2025-05-29T15:24:28.296324Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:372: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-05-29T15:24:28.296408Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:24:28.296501Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-05-29T15:24:28.296555Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:130: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-05-29T15:24:28.296568Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:24:28.297502Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:451: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-05-29T15:24:28.297550Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-05-29T15:24:28.297623Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:24:28.297635Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:313: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-05-29T15:24:28.297642Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:367: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-05-29T15:24:28.297648Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 2 -> 3 2025-05-29T15:24:28.298165Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:24:28.298180Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-05-29T15:24:28.298186Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 3 -> 128 2025-05-29T15:24:28.298604Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:24:28.298618Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:24:28.298625Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:24:28.298633Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1650: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-05-29T15:24:28.299409Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1719: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-05-29T15:24:28.299870Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:652: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-05-29T15:24:28.299922Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1751: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-05-29T15:24:28.300152Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:676: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-05-29T15:24:28.300181Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:680: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 125 RawX2: 4294969446 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-05-29T15:24:28.300206Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:24:28.300279Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 128 -> 240 2025-05-29T15:24:28.300288Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:24:28.300325Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-05-29T15:24:28.300340Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:402: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-05-29T15:24:28.300840Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:24:28.300851Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594 ... sg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 7 PathOwnerId: 72057594046678944, cookie: 101 2025-05-29T15:24:28.533195Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 7 PathOwnerId: 72057594046678944, cookie: 101 2025-05-29T15:24:28.533200Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 3, at schemeshard: 72057594046678944, txId: 101 2025-05-29T15:24:28.533205Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 7 2025-05-29T15:24:28.533209Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2025-05-29T15:24:28.533565Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:5834: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 2 PathOwnerId: 72057594046678944, cookie: 101 2025-05-29T15:24:28.533579Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 2 PathOwnerId: 72057594046678944, cookie: 101 2025-05-29T15:24:28.533585Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 101 2025-05-29T15:24:28.533590Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 2 2025-05-29T15:24:28.533595Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 2 2025-05-29T15:24:28.533731Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:5834: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 2 PathOwnerId: 72057594046678944, cookie: 101 2025-05-29T15:24:28.533743Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 2 PathOwnerId: 72057594046678944, cookie: 101 2025-05-29T15:24:28.533748Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 101 2025-05-29T15:24:28.533755Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 2 2025-05-29T15:24:28.533759Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2025-05-29T15:24:28.533768Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 101, subscribers: 0 2025-05-29T15:24:28.534084Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2025-05-29T15:24:28.534411Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2025-05-29T15:24:28.534426Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 TestModificationResult got TxId: 101, wait until txId: 101 TestWaitNotification wait txId: 101 2025-05-29T15:24:28.534471Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:210: tests -- TTxNotificationSubscriber for txId 101: send EvNotifyTxCompletion 2025-05-29T15:24:28.534478Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:256: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 101 2025-05-29T15:24:28.534551Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 101, at schemeshard: 72057594046678944 2025-05-29T15:24:28.534570Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:227: tests -- TTxNotificationSubscriber for txId 101: got EvNotifyTxCompletionResult 2025-05-29T15:24:28.534575Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:236: tests -- TTxNotificationSubscriber for txId 101: satisfy waiter [2:337:2327] TestWaitNotification: OK eventTxId 101 2025-05-29T15:24:28.534649Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/ExternalTable" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-05-29T15:24:28.534678Z node 2 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/ExternalTable" took 38us result status StatusSuccess 2025-05-29T15:24:28.534791Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/ExternalTable" PathDescription { Self { Name: "ExternalTable" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeExternalTable CreateFinished: true CreateTxId: 101 CreateStep: 5000003 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 ExternalTableVersion: 1 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 2 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } ExternalTableDescription { Name: "ExternalTable" PathId { OwnerId: 72057594046678944 LocalId: 3 } Version: 1 SourceType: "ObjectStorage" DataSourcePath: "/MyRoot/ExternalDataSource" Location: "/" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false } Content: "" } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 TestModificationResults wait txId: 103 2025-05-29T15:24:28.535501Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:372: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpDropExternalDataSource Drop { Name: "ExternalDataSource" } } TxId: 103 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-05-29T15:24:28.535531Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_drop_external_data_source.cpp:116: [72057594046678944] TDropExternalDataSource Propose: opId# 103:0, path# /MyRoot/ExternalDataSource 2025-05-29T15:24:28.535544Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:130: IgniteOperation, opId: 103:1, propose status:StatusSchemeError, reason: Other entities depend on this data source, please remove them at the beginning: /MyRoot/ExternalTable, at schemeshard: 72057594046678944 2025-05-29T15:24:28.536026Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:451: TTxOperationPropose Complete, txId: 103, response: Status: StatusSchemeError Reason: "Other entities depend on this data source, please remove them at the beginning: /MyRoot/ExternalTable" TxId: 103 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2025-05-29T15:24:28.536055Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 103, database: /MyRoot, subject: , status: StatusSchemeError, reason: Other entities depend on this data source, please remove them at the beginning: /MyRoot/ExternalTable, operation: DROP EXTERNAL DATA SOURCE, path: /MyRoot/ExternalDataSource TestModificationResult got TxId: 103, wait until txId: 103 TestWaitNotification wait txId: 103 2025-05-29T15:24:28.536103Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:210: tests -- TTxNotificationSubscriber for txId 103: send EvNotifyTxCompletion 2025-05-29T15:24:28.536110Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:256: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 103 2025-05-29T15:24:28.536168Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 103, at schemeshard: 72057594046678944 2025-05-29T15:24:28.536180Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:227: tests -- TTxNotificationSubscriber for txId 103: got EvNotifyTxCompletionResult 2025-05-29T15:24:28.536183Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:236: tests -- TTxNotificationSubscriber for txId 103: satisfy waiter [2:345:2335] TestWaitNotification: OK eventTxId 103 2025-05-29T15:24:28.536230Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/ExternalDataSource" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-05-29T15:24:28.536248Z node 2 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/ExternalDataSource" took 23us result status StatusSuccess 2025-05-29T15:24:28.536294Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/ExternalDataSource" PathDescription { Self { Name: "ExternalDataSource" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeExternalDataSource CreateFinished: true CreateTxId: 100 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 ExternalDataSourceVersion: 1 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 2 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } ExternalDataSourceDescription { Name: "ExternalDataSource" PathId { OwnerId: 72057594046678944 LocalId: 2 } Version: 1 SourceType: "ObjectStorage" Location: "https://s3.cloud.net/my_bucket" Installation: "" Auth { None { } } Properties { } References { References { Path: "/MyRoot/ExternalTable" PathId { OwnerId: 72057594046678944 LocalId: 3 } } } } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_external_table/unittest >> TExternalTableTest::ReplaceExternalTableIfNotExists [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:117:2058] recipient: [1:112:2142] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:117:2058] recipient: [1:112:2142] Leader for TabletID 72057594046678944 is [1:129:2153] sender: [1:133:2058] recipient: [1:112:2142] 2025-05-29T15:24:28.522368Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7488: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-05-29T15:24:28.522398Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7516: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:24:28.522404Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7402: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-05-29T15:24:28.522408Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7418: OperationsProcessing config: using default configuration 2025-05-29T15:24:28.522421Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-05-29T15:24:28.522425Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-05-29T15:24:28.522434Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7548: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:24:28.522447Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:39: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-05-29T15:24:28.522552Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7619: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-05-29T15:24:28.522628Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-05-29T15:24:28.539185Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7651: Got new config: QueryServiceConfig { AllExternalDataSourcesAreAvailable: true } 2025-05-29T15:24:28.539219Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:24:28.539325Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7619: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-05-29T15:24:28.542422Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-05-29T15:24:28.542523Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-05-29T15:24:28.542560Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-05-29T15:24:28.543915Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-05-29T15:24:28.543976Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:445: Clear TempDirsState with owners number: 0 2025-05-29T15:24:28.544064Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1348: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-05-29T15:24:28.544114Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:32: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-05-29T15:24:28.544476Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:157: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:24:28.544515Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:84: [RootDataErasureManager] Stop 2025-05-29T15:24:28.544723Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:24:28.544730Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:24:28.544744Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-05-29T15:24:28.544749Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-29T15:24:28.544753Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:202: TTxServerlessStorageBilling.Complete 2025-05-29T15:24:28.544777Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6671: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-05-29T15:24:28.545838Z node 1 :HIVE INFO: tablet_helpers.cpp:1130: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:129:2153] sender: [1:243:2058] recipient: [1:15:2062] 2025-05-29T15:24:28.560133Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:372: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-05-29T15:24:28.560220Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:24:28.560291Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-05-29T15:24:28.560332Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:130: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-05-29T15:24:28.560340Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:24:28.561091Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:451: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-05-29T15:24:28.561113Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-05-29T15:24:28.561150Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:24:28.561158Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:313: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-05-29T15:24:28.561162Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:367: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-05-29T15:24:28.561167Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 2 -> 3 2025-05-29T15:24:28.561468Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:24:28.561476Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-05-29T15:24:28.561482Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 3 -> 128 2025-05-29T15:24:28.561756Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:24:28.561766Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:24:28.561772Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:24:28.561780Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1650: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-05-29T15:24:28.562251Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1719: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-05-29T15:24:28.562563Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:652: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-05-29T15:24:28.562595Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1751: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-05-29T15:24:28.562773Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:676: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-05-29T15:24:28.562795Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:680: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 125 RawX2: 4294969446 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-05-29T15:24:28.562800Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:24:28.562852Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 128 -> 240 2025-05-29T15:24:28.562858Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:24:28.562886Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-05-29T15:24:28.562895Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:402: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-05-29T15:24:28.563206Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:24:28.563212Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594 ... 005 2025-05-29T15:24:28.586563Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:676: TTxOperationPlanStep Execute, stepId: 5000005, transactions count in step: 1, at schemeshard: 72057594046678944 2025-05-29T15:24:28.586584Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:680: TTxOperationPlanStep Execute, message: Transactions { TxId: 104 Coordinator: 72057594046316545 AckTo { RawX1: 125 RawX2: 4294969446 } } Step: 5000005 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-05-29T15:24:28.586592Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_alter_external_table.cpp:58: [72057594046678944] TAlterExternalTable TPropose, operationId: 104:0 HandleReply TEvOperationPlan: step# 5000005 2025-05-29T15:24:28.586614Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 104:0 128 -> 240 2025-05-29T15:24:28.586640Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-05-29T15:24:28.586649Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 2 2025-05-29T15:24:28.587043Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 104 2025-05-29T15:24:28.587191Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 104 FAKE_COORDINATOR: Erasing txId 104 2025-05-29T15:24:28.587590Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:24:28.587599Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 104, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-29T15:24:28.587635Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 104, path id: [OwnerId: 72057594046678944, LocalPathId: 3] 2025-05-29T15:24:28.587649Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 104, path id: [OwnerId: 72057594046678944, LocalPathId: 3] 2025-05-29T15:24:28.587663Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:24:28.587669Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:210:2211], at schemeshard: 72057594046678944, txId: 104, path id: 1 2025-05-29T15:24:28.587675Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:210:2211], at schemeshard: 72057594046678944, txId: 104, path id: 3 2025-05-29T15:24:28.587680Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:210:2211], at schemeshard: 72057594046678944, txId: 104, path id: 3 2025-05-29T15:24:28.587753Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 104:0, at schemeshard: 72057594046678944 2025-05-29T15:24:28.587762Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:482: [72057594046678944] TDone opId# 104:0 ProgressState 2025-05-29T15:24:28.587776Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:906: Part operation is done id#104:0 progress is 1/1 2025-05-29T15:24:28.587781Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 104 ready parts: 1/1 2025-05-29T15:24:28.587787Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:906: Part operation is done id#104:0 progress is 1/1 2025-05-29T15:24:28.587791Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 104 ready parts: 1/1 2025-05-29T15:24:28.587797Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1606: TOperation IsReadyToNotify, TxId: 104, ready parts: 1/1, is published: false 2025-05-29T15:24:28.587802Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 104 ready parts: 1/1 2025-05-29T15:24:28.587808Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:973: Operation and all the parts is done, operation id: 104:0 2025-05-29T15:24:28.587813Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5174: RemoveTx for txid 104:0 2025-05-29T15:24:28.587827Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 3 2025-05-29T15:24:28.587832Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove txstate source path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2025-05-29T15:24:28.587838Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:982: Publication still in progress, tx: 104, publications: 2, subscribers: 0 2025-05-29T15:24:28.587843Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:989: Publication details: tx: 104, [OwnerId: 72057594046678944, LocalPathId: 1], 11 2025-05-29T15:24:28.587847Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:989: Publication details: tx: 104, [OwnerId: 72057594046678944, LocalPathId: 3], 4 2025-05-29T15:24:28.588006Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:5834: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 11 PathOwnerId: 72057594046678944, cookie: 104 2025-05-29T15:24:28.588020Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 11 PathOwnerId: 72057594046678944, cookie: 104 2025-05-29T15:24:28.588025Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 104 2025-05-29T15:24:28.588031Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 104, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 11 2025-05-29T15:24:28.588036Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2025-05-29T15:24:28.588190Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:5834: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 4 PathOwnerId: 72057594046678944, cookie: 104 2025-05-29T15:24:28.588202Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 4 PathOwnerId: 72057594046678944, cookie: 104 2025-05-29T15:24:28.588207Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 104 2025-05-29T15:24:28.588211Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 104, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 4 2025-05-29T15:24:28.588216Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 2 2025-05-29T15:24:28.588226Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 104, subscribers: 0 2025-05-29T15:24:28.588837Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 104 2025-05-29T15:24:28.589091Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 104 TestModificationResult got TxId: 104, wait until txId: 104 TestWaitNotification wait txId: 104 2025-05-29T15:24:28.589154Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:210: tests -- TTxNotificationSubscriber for txId 104: send EvNotifyTxCompletion 2025-05-29T15:24:28.589162Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:256: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 104 2025-05-29T15:24:28.589245Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 104, at schemeshard: 72057594046678944 2025-05-29T15:24:28.589264Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:227: tests -- TTxNotificationSubscriber for txId 104: got EvNotifyTxCompletionResult 2025-05-29T15:24:28.589270Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:236: tests -- TTxNotificationSubscriber for txId 104: satisfy waiter [1:393:2383] TestWaitNotification: OK eventTxId 104 2025-05-29T15:24:28.589372Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/ExternalTable" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-05-29T15:24:28.589407Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/ExternalTable" took 48us result status StatusSuccess 2025-05-29T15:24:28.589489Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/ExternalTable" PathDescription { Self { Name: "ExternalTable" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeExternalTable CreateFinished: true CreateTxId: 102 CreateStep: 5000003 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 4 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 4 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 ExternalTableVersion: 3 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 2 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } ExternalTableDescription { Name: "ExternalTable" PathId { OwnerId: 72057594046678944 LocalId: 3 } Version: 3 SourceType: "ObjectStorage" DataSourcePath: "/MyRoot/ExternalDataSource" Location: "/other_location" Columns { Name: "value" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false } Content: "" } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_external_table/unittest >> TExternalTableTest::SchemeErrors [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:117:2058] recipient: [1:112:2142] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:117:2058] recipient: [1:112:2142] Leader for TabletID 72057594046678944 is [1:129:2153] sender: [1:133:2058] recipient: [1:112:2142] 2025-05-29T15:24:28.662689Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7488: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-05-29T15:24:28.662713Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7516: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:24:28.662719Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7402: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-05-29T15:24:28.662724Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7418: OperationsProcessing config: using default configuration 2025-05-29T15:24:28.662734Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-05-29T15:24:28.662756Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-05-29T15:24:28.662765Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7548: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:24:28.662779Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:39: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-05-29T15:24:28.662886Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7619: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-05-29T15:24:28.662963Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-05-29T15:24:28.676435Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7651: Got new config: QueryServiceConfig { AllExternalDataSourcesAreAvailable: true } 2025-05-29T15:24:28.676457Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:24:28.676549Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7619: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-05-29T15:24:28.679289Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-05-29T15:24:28.679405Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-05-29T15:24:28.679443Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-05-29T15:24:28.681064Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-05-29T15:24:28.681121Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:445: Clear TempDirsState with owners number: 0 2025-05-29T15:24:28.681237Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1348: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-05-29T15:24:28.681285Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:32: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-05-29T15:24:28.681753Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:157: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:24:28.681795Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:84: [RootDataErasureManager] Stop 2025-05-29T15:24:28.682024Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:24:28.682033Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:24:28.682051Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-05-29T15:24:28.682059Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-29T15:24:28.682065Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:202: TTxServerlessStorageBilling.Complete 2025-05-29T15:24:28.682094Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6671: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-05-29T15:24:28.683410Z node 1 :HIVE INFO: tablet_helpers.cpp:1130: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:129:2153] sender: [1:243:2058] recipient: [1:15:2062] 2025-05-29T15:24:28.704806Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:372: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-05-29T15:24:28.704894Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:24:28.704954Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-05-29T15:24:28.705007Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:130: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-05-29T15:24:28.705019Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:24:28.705829Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:451: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-05-29T15:24:28.705860Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-05-29T15:24:28.705912Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:24:28.705923Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:313: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-05-29T15:24:28.705928Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:367: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-05-29T15:24:28.705934Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 2 -> 3 2025-05-29T15:24:28.706403Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:24:28.706415Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-05-29T15:24:28.706423Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 3 -> 128 2025-05-29T15:24:28.706869Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:24:28.706881Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:24:28.706888Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:24:28.706896Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1650: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-05-29T15:24:28.707522Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1719: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-05-29T15:24:28.708468Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:652: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-05-29T15:24:28.708513Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1751: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-05-29T15:24:28.708707Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:676: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-05-29T15:24:28.708736Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:680: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 125 RawX2: 4294969446 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-05-29T15:24:28.708742Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:24:28.708815Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 128 -> 240 2025-05-29T15:24:28.708823Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:24:28.708880Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-05-29T15:24:28.708893Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:402: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-05-29T15:24:28.709425Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:24:28.709433Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594 ... eration.cpp:130: IgniteOperation, opId: 126:1, propose status:StatusSchemeError, reason: Type 'BlaBlaType' specified for column 'RowId' is not supported by storage, at schemeshard: 72057594046678944 2025-05-29T15:24:28.724367Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:451: TTxOperationPropose Complete, txId: 126, response: Status: StatusSchemeError Reason: "Type \'BlaBlaType\' specified for column \'RowId\' is not supported by storage" TxId: 126 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2025-05-29T15:24:28.724396Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 126, database: /MyRoot, subject: , status: StatusSchemeError, reason: Type 'BlaBlaType' specified for column 'RowId' is not supported by storage, operation: CREATE EXTERNAL TABLE, path: /MyRoot/DirA/Table2 TestModificationResult got TxId: 126, wait until txId: 126 TestModificationResults wait txId: 127 2025-05-29T15:24:28.725052Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:372: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot/DirA" OperationType: ESchemeOpCreateExternalTable CreateExternalTable { Name: "Table2" SourceType: "General" DataSourcePath: "/MyRoot/ExternalDataSource" Location: "/" Columns { Name: "" Type: "Uint64" } } } TxId: 127 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-05-29T15:24:28.725096Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_create_external_table.cpp:427: [72057594046678944] CreateNewExternalTable, opId 127:0, feature flag EnableReplaceIfExistsForExternalEntities 0, tx WorkingDir: "/MyRoot/DirA" OperationType: ESchemeOpCreateExternalTable FailOnExist: false CreateExternalTable { Name: "Table2" SourceType: "General" DataSourcePath: "/MyRoot/ExternalDataSource" Location: "/" Columns { Name: "" Type: "Uint64" } } 2025-05-29T15:24:28.725105Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_create_external_table.cpp:300: [72057594046678944] TCreateExternalTable Propose: opId# 127:0, path# /MyRoot/DirA/Table2 2025-05-29T15:24:28.725119Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:130: IgniteOperation, opId: 127:1, propose status:StatusSchemeError, reason: Columns cannot have an empty name, at schemeshard: 72057594046678944 2025-05-29T15:24:28.725605Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:451: TTxOperationPropose Complete, txId: 127, response: Status: StatusSchemeError Reason: "Columns cannot have an empty name" TxId: 127 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2025-05-29T15:24:28.725637Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 127, database: /MyRoot, subject: , status: StatusSchemeError, reason: Columns cannot have an empty name, operation: CREATE EXTERNAL TABLE, path: /MyRoot/DirA/Table2 TestModificationResult got TxId: 127, wait until txId: 127 TestModificationResults wait txId: 128 2025-05-29T15:24:28.726248Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:372: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot/DirA" OperationType: ESchemeOpCreateExternalTable CreateExternalTable { Name: "Table2" SourceType: "General" DataSourcePath: "/MyRoot/ExternalDataSource" Location: "/" Columns { Name: "RowId" TypeId: 27 } } } TxId: 128 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-05-29T15:24:28.726293Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_create_external_table.cpp:427: [72057594046678944] CreateNewExternalTable, opId 128:0, feature flag EnableReplaceIfExistsForExternalEntities 0, tx WorkingDir: "/MyRoot/DirA" OperationType: ESchemeOpCreateExternalTable FailOnExist: false CreateExternalTable { Name: "Table2" SourceType: "General" DataSourcePath: "/MyRoot/ExternalDataSource" Location: "/" Columns { Name: "RowId" TypeId: 27 } } 2025-05-29T15:24:28.726303Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_create_external_table.cpp:300: [72057594046678944] TCreateExternalTable Propose: opId# 128:0, path# /MyRoot/DirA/Table2 2025-05-29T15:24:28.726318Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:130: IgniteOperation, opId: 128:1, propose status:StatusSchemeError, reason: Cannot set TypeId for column 'RowId', use Type, at schemeshard: 72057594046678944 2025-05-29T15:24:28.726761Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:451: TTxOperationPropose Complete, txId: 128, response: Status: StatusSchemeError Reason: "Cannot set TypeId for column \'RowId\', use Type" TxId: 128 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2025-05-29T15:24:28.726785Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 128, database: /MyRoot, subject: , status: StatusSchemeError, reason: Cannot set TypeId for column 'RowId', use Type, operation: CREATE EXTERNAL TABLE, path: /MyRoot/DirA/Table2 TestModificationResult got TxId: 128, wait until txId: 128 TestModificationResults wait txId: 129 2025-05-29T15:24:28.727288Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:372: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot/DirA" OperationType: ESchemeOpCreateExternalTable CreateExternalTable { Name: "Table2" SourceType: "General" DataSourcePath: "/MyRoot/ExternalDataSource" Location: "/" Columns { Name: "RowId" } } } TxId: 129 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-05-29T15:24:28.727326Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_create_external_table.cpp:427: [72057594046678944] CreateNewExternalTable, opId 129:0, feature flag EnableReplaceIfExistsForExternalEntities 0, tx WorkingDir: "/MyRoot/DirA" OperationType: ESchemeOpCreateExternalTable FailOnExist: false CreateExternalTable { Name: "Table2" SourceType: "General" DataSourcePath: "/MyRoot/ExternalDataSource" Location: "/" Columns { Name: "RowId" } } 2025-05-29T15:24:28.727335Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_create_external_table.cpp:300: [72057594046678944] TCreateExternalTable Propose: opId# 129:0, path# /MyRoot/DirA/Table2 2025-05-29T15:24:28.727347Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:130: IgniteOperation, opId: 129:1, propose status:StatusSchemeError, reason: Missing Type for column 'RowId', at schemeshard: 72057594046678944 2025-05-29T15:24:28.727805Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:451: TTxOperationPropose Complete, txId: 129, response: Status: StatusSchemeError Reason: "Missing Type for column \'RowId\'" TxId: 129 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2025-05-29T15:24:28.727834Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 129, database: /MyRoot, subject: , status: StatusSchemeError, reason: Missing Type for column 'RowId', operation: CREATE EXTERNAL TABLE, path: /MyRoot/DirA/Table2 TestModificationResult got TxId: 129, wait until txId: 129 TestModificationResults wait txId: 130 2025-05-29T15:24:28.728381Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:372: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot/DirA" OperationType: ESchemeOpCreateExternalTable CreateExternalTable { Name: "Table2" SourceType: "General" DataSourcePath: "/MyRoot/ExternalDataSource" Location: "/" Columns { Name: "RowId" Type: "Uint64" Id: 2 } Columns { Name: "RowId2" Type: "Uint64" Id: 2 } } } TxId: 130 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-05-29T15:24:28.728423Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_create_external_table.cpp:427: [72057594046678944] CreateNewExternalTable, opId 130:0, feature flag EnableReplaceIfExistsForExternalEntities 0, tx WorkingDir: "/MyRoot/DirA" OperationType: ESchemeOpCreateExternalTable FailOnExist: false CreateExternalTable { Name: "Table2" SourceType: "General" DataSourcePath: "/MyRoot/ExternalDataSource" Location: "/" Columns { Name: "RowId" Type: "Uint64" Id: 2 } Columns { Name: "RowId2" Type: "Uint64" Id: 2 } } 2025-05-29T15:24:28.728433Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_create_external_table.cpp:300: [72057594046678944] TCreateExternalTable Propose: opId# 130:0, path# /MyRoot/DirA/Table2 2025-05-29T15:24:28.728464Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:130: IgniteOperation, opId: 130:1, propose status:StatusSchemeError, reason: Duplicate column id: 2, at schemeshard: 72057594046678944 2025-05-29T15:24:28.728888Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:451: TTxOperationPropose Complete, txId: 130, response: Status: StatusSchemeError Reason: "Duplicate column id: 2" TxId: 130 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2025-05-29T15:24:28.728910Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 130, database: /MyRoot, subject: , status: StatusSchemeError, reason: Duplicate column id: 2, operation: CREATE EXTERNAL TABLE, path: /MyRoot/DirA/Table2 TestModificationResult got TxId: 130, wait until txId: 130 TestModificationResults wait txId: 131 2025-05-29T15:24:28.729426Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:372: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot/DirA" OperationType: ESchemeOpCreateExternalTable CreateExternalTable { Name: "Table2" SourceType: "General" DataSourcePath: "/MyRoot/ExternalDataSource1" Location: "/" Columns { Name: "RowId" Type: "Uint64" } Columns { Name: "Value" Type: "Utf8" } } } TxId: 131 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-05-29T15:24:28.729469Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_create_external_table.cpp:427: [72057594046678944] CreateNewExternalTable, opId 131:0, feature flag EnableReplaceIfExistsForExternalEntities 0, tx WorkingDir: "/MyRoot/DirA" OperationType: ESchemeOpCreateExternalTable FailOnExist: false CreateExternalTable { Name: "Table2" SourceType: "General" DataSourcePath: "/MyRoot/ExternalDataSource1" Location: "/" Columns { Name: "RowId" Type: "Uint64" } Columns { Name: "Value" Type: "Utf8" } } 2025-05-29T15:24:28.729478Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_create_external_table.cpp:300: [72057594046678944] TCreateExternalTable Propose: opId# 131:0, path# /MyRoot/DirA/Table2 2025-05-29T15:24:28.729496Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:130: IgniteOperation, opId: 131:1, propose status:StatusPathDoesNotExist, reason: Check failed: path: '/MyRoot/ExternalDataSource1', error: path hasn't been resolved, nearest resolved path: '/MyRoot' (id: [OwnerId: 72057594046678944, LocalPathId: 1]), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_external_table.cpp:166, at schemeshard: 72057594046678944 2025-05-29T15:24:28.729894Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:451: TTxOperationPropose Complete, txId: 131, response: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/ExternalDataSource1\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1]), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_external_table.cpp:166" TxId: 131 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2025-05-29T15:24:28.729916Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 131, database: /MyRoot, subject: , status: StatusPathDoesNotExist, reason: Check failed: path: '/MyRoot/ExternalDataSource1', error: path hasn't been resolved, nearest resolved path: '/MyRoot' (id: [OwnerId: 72057594046678944, LocalPathId: 1]), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_external_table.cpp:166, operation: CREATE EXTERNAL TABLE, path: /MyRoot/DirA/Table2 TestModificationResult got TxId: 131, wait until txId: 131 >> TConsoleTests::TestAlterTenantModifyStorageResourcesForPending [GOOD] >> TConsoleTests::TestAlterTenantModifyStorageResourcesForPendingExtSubdomain >> TExternalTableTest::Decimal [GOOD] >> TExternalTableTest::ReplaceExternalTableShouldFailIfEntityOfAnotherTypeWithSameNameExists [GOOD] >> TExternalTableTest::CreateExternalTableShouldFailIfSuchEntityAlreadyExists [GOOD] >> TCmsTenatsTest::TestClusterLimitForceRestartModeScheduled [GOOD] >> TCmsTenatsTest::TestClusterRatioLimitForceRestartModeScheduled ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_external_table/unittest >> TExternalTableTest::ParallelCreateExternalTable [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:117:2058] recipient: [1:112:2142] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:117:2058] recipient: [1:112:2142] Leader for TabletID 72057594046678944 is [1:129:2153] sender: [1:133:2058] recipient: [1:112:2142] 2025-05-29T15:24:28.790536Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7488: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-05-29T15:24:28.790565Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7516: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:24:28.790572Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7402: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-05-29T15:24:28.790577Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7418: OperationsProcessing config: using default configuration 2025-05-29T15:24:28.790587Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-05-29T15:24:28.790591Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-05-29T15:24:28.790601Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7548: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:24:28.790614Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:39: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-05-29T15:24:28.790732Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7619: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-05-29T15:24:28.790852Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-05-29T15:24:28.804064Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7651: Got new config: QueryServiceConfig { AllExternalDataSourcesAreAvailable: true } 2025-05-29T15:24:28.804090Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:24:28.804183Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7619: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-05-29T15:24:28.807127Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-05-29T15:24:28.807267Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-05-29T15:24:28.807305Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-05-29T15:24:28.811820Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-05-29T15:24:28.811906Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:445: Clear TempDirsState with owners number: 0 2025-05-29T15:24:28.812020Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1348: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-05-29T15:24:28.812077Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:32: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-05-29T15:24:28.812631Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:157: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:24:28.812685Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:84: [RootDataErasureManager] Stop 2025-05-29T15:24:28.812926Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:24:28.812936Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:24:28.812951Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-05-29T15:24:28.812958Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-29T15:24:28.812963Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:202: TTxServerlessStorageBilling.Complete 2025-05-29T15:24:28.812994Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6671: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-05-29T15:24:28.814328Z node 1 :HIVE INFO: tablet_helpers.cpp:1130: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:129:2153] sender: [1:243:2058] recipient: [1:15:2062] 2025-05-29T15:24:28.830849Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:372: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-05-29T15:24:28.830929Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:24:28.830987Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-05-29T15:24:28.831034Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:130: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-05-29T15:24:28.831046Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:24:28.832113Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:451: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-05-29T15:24:28.832136Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-05-29T15:24:28.832174Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:24:28.832182Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:313: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-05-29T15:24:28.832186Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:367: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-05-29T15:24:28.832190Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 2 -> 3 2025-05-29T15:24:28.832956Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:24:28.832969Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-05-29T15:24:28.832975Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 3 -> 128 2025-05-29T15:24:28.835104Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:24:28.835122Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:24:28.835128Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:24:28.835136Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1650: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-05-29T15:24:28.835791Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1719: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-05-29T15:24:28.836330Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:652: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-05-29T15:24:28.836374Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1751: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-05-29T15:24:28.836550Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:676: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-05-29T15:24:28.836577Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:680: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 125 RawX2: 4294969446 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-05-29T15:24:28.836583Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:24:28.836653Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 128 -> 240 2025-05-29T15:24:28.836661Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:24:28.836692Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-05-29T15:24:28.836704Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:402: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-05-29T15:24:28.837158Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:24:28.837166Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594 ... rd: 72057594046678944 2025-05-29T15:24:28.859101Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:227: tests -- TTxNotificationSubscriber for txId 127: got EvNotifyTxCompletionResult 2025-05-29T15:24:28.859105Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:236: tests -- TTxNotificationSubscriber for txId 127: satisfy waiter [1:379:2369] TestWaitNotification: OK eventTxId 125 TestWaitNotification: OK eventTxId 126 TestWaitNotification: OK eventTxId 127 2025-05-29T15:24:28.859177Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/DirA/ExternalTable1" Options { ReturnPartitioningInfo: true ReturnPartitionConfig: true BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-05-29T15:24:28.859211Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/DirA/ExternalTable1" took 44us result status StatusSuccess 2025-05-29T15:24:28.859286Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/DirA/ExternalTable1" PathDescription { Self { Name: "ExternalTable1" PathId: 4 SchemeshardId: 72057594046678944 PathType: EPathTypeExternalTable CreateFinished: true CreateTxId: 126 CreateStep: 5000005 ParentPathId: 3 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 ExternalTableVersion: 1 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 4 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } ExternalTableDescription { Name: "ExternalTable1" PathId { OwnerId: 72057594046678944 LocalId: 4 } Version: 1 SourceType: "ObjectStorage" DataSourcePath: "/MyRoot/ExternalDataSource" Location: "/" Columns { Name: "RowId" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false } Columns { Name: "Value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false } Content: "" } } PathId: 4 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-05-29T15:24:28.859374Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/DirA/ExternalTable2" Options { ReturnPartitioningInfo: true ReturnPartitionConfig: true BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-05-29T15:24:28.859388Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/DirA/ExternalTable2" took 15us result status StatusSuccess 2025-05-29T15:24:28.859430Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/DirA/ExternalTable2" PathDescription { Self { Name: "ExternalTable2" PathId: 5 SchemeshardId: 72057594046678944 PathType: EPathTypeExternalTable CreateFinished: true CreateTxId: 127 CreateStep: 5000004 ParentPathId: 3 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 ExternalTableVersion: 1 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 4 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } ExternalTableDescription { Name: "ExternalTable2" PathId { OwnerId: 72057594046678944 LocalId: 5 } Version: 1 SourceType: "ObjectStorage" DataSourcePath: "/MyRoot/ExternalDataSource" Location: "/" Columns { Name: "key1" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false } Columns { Name: "key2" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false } Columns { Name: "RowId" Type: "Uint64" TypeId: 4 Id: 3 NotNull: false } Columns { Name: "Value" Type: "Utf8" TypeId: 4608 Id: 4 NotNull: false } Content: "" } } PathId: 5 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-05-29T15:24:28.859513Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/DirA" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-05-29T15:24:28.859524Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/DirA" took 12us result status StatusSuccess 2025-05-29T15:24:28.859585Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/DirA" PathDescription { Self { Name: "DirA" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 125 CreateStep: 5000003 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 7 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 7 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 6 } ChildrenExist: true } Children { Name: "ExternalTable1" PathId: 4 SchemeshardId: 72057594046678944 PathType: EPathTypeExternalTable CreateFinished: true CreateTxId: 126 CreateStep: 5000005 ParentPathId: 3 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" ChildrenExist: false } Children { Name: "ExternalTable2" PathId: 5 SchemeshardId: 72057594046678944 PathType: EPathTypeExternalTable CreateFinished: true CreateTxId: 127 CreateStep: 5000004 ParentPathId: 3 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 4 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-05-29T15:24:28.859643Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/DirA/ExternalTable1" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-05-29T15:24:28.859657Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/DirA/ExternalTable1" took 15us result status StatusSuccess 2025-05-29T15:24:28.859694Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/DirA/ExternalTable1" PathDescription { Self { Name: "ExternalTable1" PathId: 4 SchemeshardId: 72057594046678944 PathType: EPathTypeExternalTable CreateFinished: true CreateTxId: 126 CreateStep: 5000005 ParentPathId: 3 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 ExternalTableVersion: 1 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 4 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } ExternalTableDescription { Name: "ExternalTable1" PathId { OwnerId: 72057594046678944 LocalId: 4 } Version: 1 SourceType: "ObjectStorage" DataSourcePath: "/MyRoot/ExternalDataSource" Location: "/" Columns { Name: "RowId" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false } Columns { Name: "Value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false } Content: "" } } PathId: 4 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-05-29T15:24:28.859771Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/DirA/ExternalTable2" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-05-29T15:24:28.859786Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/DirA/ExternalTable2" took 17us result status StatusSuccess 2025-05-29T15:24:28.859827Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/DirA/ExternalTable2" PathDescription { Self { Name: "ExternalTable2" PathId: 5 SchemeshardId: 72057594046678944 PathType: EPathTypeExternalTable CreateFinished: true CreateTxId: 127 CreateStep: 5000004 ParentPathId: 3 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 ExternalTableVersion: 1 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 4 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } ExternalTableDescription { Name: "ExternalTable2" PathId { OwnerId: 72057594046678944 LocalId: 5 } Version: 1 SourceType: "ObjectStorage" DataSourcePath: "/MyRoot/ExternalDataSource" Location: "/" Columns { Name: "key1" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false } Columns { Name: "key2" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false } Columns { Name: "RowId" Type: "Uint64" TypeId: 4 Id: 3 NotNull: false } Columns { Name: "Value" Type: "Utf8" TypeId: 4608 Id: 4 NotNull: false } Content: "" } } PathId: 5 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_external_table/unittest >> TExternalTableTest::ReplaceExternalTableIfNotExistsShouldFailIfFeatureFlagIsNotSet [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:117:2058] recipient: [1:112:2142] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:117:2058] recipient: [1:112:2142] Leader for TabletID 72057594046678944 is [1:129:2153] sender: [1:133:2058] recipient: [1:112:2142] 2025-05-29T15:24:28.848160Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7488: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-05-29T15:24:28.848194Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7516: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:24:28.848202Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7402: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-05-29T15:24:28.848208Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7418: OperationsProcessing config: using default configuration 2025-05-29T15:24:28.848219Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-05-29T15:24:28.848223Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-05-29T15:24:28.848233Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7548: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:24:28.848246Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:39: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-05-29T15:24:28.848352Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7619: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-05-29T15:24:28.848425Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-05-29T15:24:28.861724Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7651: Got new config: QueryServiceConfig { AllExternalDataSourcesAreAvailable: true } 2025-05-29T15:24:28.861761Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:24:28.861869Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7619: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-05-29T15:24:28.867350Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-05-29T15:24:28.867533Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-05-29T15:24:28.867581Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-05-29T15:24:28.869448Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-05-29T15:24:28.869530Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:445: Clear TempDirsState with owners number: 0 2025-05-29T15:24:28.869674Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1348: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-05-29T15:24:28.869732Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:32: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-05-29T15:24:28.870212Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:157: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:24:28.870259Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:84: [RootDataErasureManager] Stop 2025-05-29T15:24:28.870530Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:24:28.870541Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:24:28.870561Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-05-29T15:24:28.870570Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-29T15:24:28.870576Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:202: TTxServerlessStorageBilling.Complete 2025-05-29T15:24:28.870613Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6671: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-05-29T15:24:28.872067Z node 1 :HIVE INFO: tablet_helpers.cpp:1130: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:129:2153] sender: [1:243:2058] recipient: [1:15:2062] 2025-05-29T15:24:28.896899Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:372: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-05-29T15:24:28.896998Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:24:28.897068Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-05-29T15:24:28.897122Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:130: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-05-29T15:24:28.897133Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:24:28.897996Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:451: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-05-29T15:24:28.898025Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-05-29T15:24:28.898071Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:24:28.898082Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:313: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-05-29T15:24:28.898088Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:367: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-05-29T15:24:28.898093Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 2 -> 3 2025-05-29T15:24:28.898510Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:24:28.898520Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-05-29T15:24:28.898529Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 3 -> 128 2025-05-29T15:24:28.898905Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:24:28.898915Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:24:28.898921Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:24:28.898928Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1650: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-05-29T15:24:28.899660Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1719: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-05-29T15:24:28.902323Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:652: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-05-29T15:24:28.902390Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1751: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-05-29T15:24:28.902629Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:676: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-05-29T15:24:28.902670Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:680: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 125 RawX2: 4294969446 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-05-29T15:24:28.902680Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:24:28.902815Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 128 -> 240 2025-05-29T15:24:28.902827Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:24:28.902867Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-05-29T15:24:28.902883Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:402: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-05-29T15:24:28.904989Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:24:28.905004Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594 ... e 2025-05-29T15:24:28.931520Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2025-05-29T15:24:28.931526Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:973: Operation and all the parts is done, operation id: 101:0 2025-05-29T15:24:28.931531Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5174: RemoveTx for txid 101:0 2025-05-29T15:24:28.931557Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2025-05-29T15:24:28.931564Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:982: Publication still in progress, tx: 101, publications: 2, subscribers: 0 2025-05-29T15:24:28.931569Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:989: Publication details: tx: 101, [OwnerId: 72057594046678944, LocalPathId: 1], 5 2025-05-29T15:24:28.931574Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:989: Publication details: tx: 101, [OwnerId: 72057594046678944, LocalPathId: 2], 2 2025-05-29T15:24:28.931810Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:5834: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 5 PathOwnerId: 72057594046678944, cookie: 101 2025-05-29T15:24:28.931826Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 5 PathOwnerId: 72057594046678944, cookie: 101 2025-05-29T15:24:28.931831Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 101 2025-05-29T15:24:28.931838Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 5 2025-05-29T15:24:28.931845Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-05-29T15:24:28.935043Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:5834: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 2 PathOwnerId: 72057594046678944, cookie: 101 2025-05-29T15:24:28.935085Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 2 PathOwnerId: 72057594046678944, cookie: 101 2025-05-29T15:24:28.935091Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 101 2025-05-29T15:24:28.935098Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 2 2025-05-29T15:24:28.935105Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2025-05-29T15:24:28.935136Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 101, subscribers: 0 2025-05-29T15:24:28.939411Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2025-05-29T15:24:28.939747Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 TestModificationResult got TxId: 101, wait until txId: 101 TestWaitNotification wait txId: 101 2025-05-29T15:24:28.939819Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:210: tests -- TTxNotificationSubscriber for txId 101: send EvNotifyTxCompletion 2025-05-29T15:24:28.939829Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:256: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 101 2025-05-29T15:24:28.939917Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 101, at schemeshard: 72057594046678944 2025-05-29T15:24:28.939949Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:227: tests -- TTxNotificationSubscriber for txId 101: got EvNotifyTxCompletionResult 2025-05-29T15:24:28.939954Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:236: tests -- TTxNotificationSubscriber for txId 101: satisfy waiter [1:307:2297] TestWaitNotification: OK eventTxId 101 2025-05-29T15:24:28.940055Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/ExternalDataSource" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-05-29T15:24:28.940112Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/ExternalDataSource" took 76us result status StatusSuccess 2025-05-29T15:24:28.940243Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/ExternalDataSource" PathDescription { Self { Name: "ExternalDataSource" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeExternalDataSource CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 ExternalDataSourceVersion: 1 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } ExternalDataSourceDescription { Name: "ExternalDataSource" PathId { OwnerId: 72057594046678944 LocalId: 2 } Version: 1 SourceType: "ObjectStorage" Location: "https://s3.cloud.net/my_bucket" Installation: "" Auth { None { } } Properties { } References { } } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 TestModificationResults wait txId: 102 2025-05-29T15:24:28.941270Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:372: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpCreateExternalTable CreateExternalTable { Name: "ExternalTable" SourceType: "General" DataSourcePath: "/MyRoot/ExternalDataSource" Location: "/" Columns { Name: "key" Type: "Uint64" } ReplaceIfExists: true } } TxId: 102 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-05-29T15:24:28.941342Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_create_external_table.cpp:427: [72057594046678944] CreateNewExternalTable, opId 102:0, feature flag EnableReplaceIfExistsForExternalEntities 0, tx WorkingDir: "/MyRoot" OperationType: ESchemeOpCreateExternalTable FailOnExist: false CreateExternalTable { Name: "ExternalTable" SourceType: "General" DataSourcePath: "/MyRoot/ExternalDataSource" Location: "/" Columns { Name: "key" Type: "Uint64" } ReplaceIfExists: true } 2025-05-29T15:24:28.941357Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_just_reject.cpp:47: TReject Propose, opId: 102:0, explain: Invalid TCreateExternalTable request: Unsupported: feature flag EnableReplaceIfExistsForExternalEntities is off, at schemeshard: 72057594046678944 2025-05-29T15:24:28.941364Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:130: IgniteOperation, opId: 102:1, propose status:StatusPreconditionFailed, reason: Invalid TCreateExternalTable request: Unsupported: feature flag EnableReplaceIfExistsForExternalEntities is off, at schemeshard: 72057594046678944 2025-05-29T15:24:28.941916Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:451: TTxOperationPropose Complete, txId: 102, response: Status: StatusPreconditionFailed Reason: "Invalid TCreateExternalTable request: Unsupported: feature flag EnableReplaceIfExistsForExternalEntities is off" TxId: 102 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2025-05-29T15:24:28.941950Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 102, database: /MyRoot, subject: , status: StatusPreconditionFailed, reason: Invalid TCreateExternalTable request: Unsupported: feature flag EnableReplaceIfExistsForExternalEntities is off, operation: CREATE EXTERNAL TABLE, path: /MyRoot/ExternalTable TestModificationResult got TxId: 102, wait until txId: 102 TestWaitNotification wait txId: 102 2025-05-29T15:24:28.942004Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:210: tests -- TTxNotificationSubscriber for txId 102: send EvNotifyTxCompletion 2025-05-29T15:24:28.942014Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:256: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 102 2025-05-29T15:24:28.942083Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 102, at schemeshard: 72057594046678944 2025-05-29T15:24:28.942103Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:227: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2025-05-29T15:24:28.942108Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:236: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [1:315:2305] TestWaitNotification: OK eventTxId 102 2025-05-29T15:24:28.942173Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/ExternalTable" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-05-29T15:24:28.942202Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/ExternalTable" took 35us result status StatusPathDoesNotExist 2025-05-29T15:24:28.942242Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/ExternalTable\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1]), source_location: ydb/core/tx/schemeshard/schemeshard_path_describer.cpp:1157" Path: "/MyRoot/ExternalTable" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: true } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_external_table/unittest >> TExternalTableTest::ParallelCreateSameExternalTable [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:117:2058] recipient: [1:112:2142] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:117:2058] recipient: [1:112:2142] Leader for TabletID 72057594046678944 is [1:129:2153] sender: [1:133:2058] recipient: [1:112:2142] 2025-05-29T15:24:28.780427Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7488: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-05-29T15:24:28.780459Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7516: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:24:28.780465Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7402: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-05-29T15:24:28.780470Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7418: OperationsProcessing config: using default configuration 2025-05-29T15:24:28.780482Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-05-29T15:24:28.780486Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-05-29T15:24:28.780496Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7548: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:24:28.780512Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:39: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-05-29T15:24:28.780624Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7619: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-05-29T15:24:28.780713Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-05-29T15:24:28.795424Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7651: Got new config: QueryServiceConfig { AllExternalDataSourcesAreAvailable: true } 2025-05-29T15:24:28.795457Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:24:28.795567Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7619: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-05-29T15:24:28.798868Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-05-29T15:24:28.799023Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-05-29T15:24:28.799070Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-05-29T15:24:28.800962Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-05-29T15:24:28.801035Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:445: Clear TempDirsState with owners number: 0 2025-05-29T15:24:28.801170Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1348: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-05-29T15:24:28.801225Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:32: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-05-29T15:24:28.801840Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:157: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:24:28.801897Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:84: [RootDataErasureManager] Stop 2025-05-29T15:24:28.802175Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:24:28.802187Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:24:28.802210Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-05-29T15:24:28.802219Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-29T15:24:28.802226Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:202: TTxServerlessStorageBilling.Complete 2025-05-29T15:24:28.802261Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6671: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-05-29T15:24:28.803787Z node 1 :HIVE INFO: tablet_helpers.cpp:1130: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:129:2153] sender: [1:243:2058] recipient: [1:15:2062] 2025-05-29T15:24:28.825840Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:372: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-05-29T15:24:28.825935Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:24:28.826014Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-05-29T15:24:28.826072Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:130: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-05-29T15:24:28.826084Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:24:28.826942Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:451: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-05-29T15:24:28.826976Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-05-29T15:24:28.827030Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:24:28.827042Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:313: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-05-29T15:24:28.827049Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:367: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-05-29T15:24:28.827055Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 2 -> 3 2025-05-29T15:24:28.827551Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:24:28.827565Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-05-29T15:24:28.827571Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 3 -> 128 2025-05-29T15:24:28.827950Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:24:28.827962Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:24:28.827969Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:24:28.827977Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1650: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-05-29T15:24:28.828701Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1719: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-05-29T15:24:28.829134Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:652: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-05-29T15:24:28.829178Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1751: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-05-29T15:24:28.829375Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:676: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-05-29T15:24:28.829404Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:680: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 125 RawX2: 4294969446 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-05-29T15:24:28.829411Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:24:28.829489Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 128 -> 240 2025-05-29T15:24:28.829497Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:24:28.829551Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-05-29T15:24:28.829566Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:402: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-05-29T15:24:28.830024Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:24:28.830033Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594 ... alse ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-05-29T15:24:28.848659Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/NilNoviSubLuna" took 55us result status StatusSuccess 2025-05-29T15:24:28.848752Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/NilNoviSubLuna" PathDescription { Self { Name: "NilNoviSubLuna" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeExternalTable CreateFinished: true CreateTxId: 125 CreateStep: 5000003 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 ExternalTableVersion: 1 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 2 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } ExternalTableDescription { Name: "NilNoviSubLuna" PathId { OwnerId: 72057594046678944 LocalId: 3 } Version: 1 SourceType: "ObjectStorage" DataSourcePath: "/MyRoot/ExternalDataSource" Location: "/" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false } Columns { Name: "value" Type: "Uint64" TypeId: 4 Id: 2 NotNull: false } Content: "" } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-05-29T15:24:28.848828Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/NilNoviSubLuna" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-05-29T15:24:28.848848Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/NilNoviSubLuna" took 22us result status StatusSuccess 2025-05-29T15:24:28.848896Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/NilNoviSubLuna" PathDescription { Self { Name: "NilNoviSubLuna" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeExternalTable CreateFinished: true CreateTxId: 125 CreateStep: 5000003 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 ExternalTableVersion: 1 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 2 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } ExternalTableDescription { Name: "NilNoviSubLuna" PathId { OwnerId: 72057594046678944 LocalId: 3 } Version: 1 SourceType: "ObjectStorage" DataSourcePath: "/MyRoot/ExternalDataSource" Location: "/" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false } Columns { Name: "value" Type: "Uint64" TypeId: 4 Id: 2 NotNull: false } Content: "" } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 TestWaitNotification wait txId: 125 2025-05-29T15:24:28.848946Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:210: tests -- TTxNotificationSubscriber for txId 125: send EvNotifyTxCompletion 2025-05-29T15:24:28.848955Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:256: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 125 TestWaitNotification wait txId: 126 2025-05-29T15:24:28.848977Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:210: tests -- TTxNotificationSubscriber for txId 126: send EvNotifyTxCompletion 2025-05-29T15:24:28.848981Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:256: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 126 TestWaitNotification wait txId: 127 2025-05-29T15:24:28.848991Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:210: tests -- TTxNotificationSubscriber for txId 127: send EvNotifyTxCompletion 2025-05-29T15:24:28.848994Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:256: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 127 2025-05-29T15:24:28.849077Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 125, at schemeshard: 72057594046678944 2025-05-29T15:24:28.849102Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:227: tests -- TTxNotificationSubscriber for txId 125: got EvNotifyTxCompletionResult 2025-05-29T15:24:28.849107Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:236: tests -- TTxNotificationSubscriber for txId 125: satisfy waiter [1:347:2337] 2025-05-29T15:24:28.849131Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 126, at schemeshard: 72057594046678944 2025-05-29T15:24:28.849142Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 127, at schemeshard: 72057594046678944 2025-05-29T15:24:28.849153Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:227: tests -- TTxNotificationSubscriber for txId 126: got EvNotifyTxCompletionResult 2025-05-29T15:24:28.849157Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:236: tests -- TTxNotificationSubscriber for txId 126: satisfy waiter [1:347:2337] 2025-05-29T15:24:28.849171Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:227: tests -- TTxNotificationSubscriber for txId 127: got EvNotifyTxCompletionResult 2025-05-29T15:24:28.849175Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:236: tests -- TTxNotificationSubscriber for txId 127: satisfy waiter [1:347:2337] TestWaitNotification: OK eventTxId 125 TestWaitNotification: OK eventTxId 126 TestWaitNotification: OK eventTxId 127 2025-05-29T15:24:28.849247Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/NilNoviSubLuna" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-05-29T15:24:28.849266Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/NilNoviSubLuna" took 23us result status StatusSuccess 2025-05-29T15:24:28.849317Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/NilNoviSubLuna" PathDescription { Self { Name: "NilNoviSubLuna" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeExternalTable CreateFinished: true CreateTxId: 125 CreateStep: 5000003 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 ExternalTableVersion: 1 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 2 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } ExternalTableDescription { Name: "NilNoviSubLuna" PathId { OwnerId: 72057594046678944 LocalId: 3 } Version: 1 SourceType: "ObjectStorage" DataSourcePath: "/MyRoot/ExternalDataSource" Location: "/" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false } Columns { Name: "value" Type: "Uint64" TypeId: 4 Id: 2 NotNull: false } Content: "" } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 TestModificationResults wait txId: 128 2025-05-29T15:24:28.850222Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:372: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpCreateExternalTable CreateExternalTable { Name: "NilNoviSubLuna" SourceType: "General" DataSourcePath: "/MyRoot/ExternalDataSource" Location: "/" Columns { Name: "key" Type: "Uint64" } Columns { Name: "value" Type: "Uint64" } } } TxId: 128 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-05-29T15:24:28.850286Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_create_external_table.cpp:427: [72057594046678944] CreateNewExternalTable, opId 128:0, feature flag EnableReplaceIfExistsForExternalEntities 0, tx WorkingDir: "/MyRoot" OperationType: ESchemeOpCreateExternalTable FailOnExist: false CreateExternalTable { Name: "NilNoviSubLuna" SourceType: "General" DataSourcePath: "/MyRoot/ExternalDataSource" Location: "/" Columns { Name: "key" Type: "Uint64" } Columns { Name: "value" Type: "Uint64" } } 2025-05-29T15:24:28.850301Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_create_external_table.cpp:300: [72057594046678944] TCreateExternalTable Propose: opId# 128:0, path# /MyRoot/NilNoviSubLuna 2025-05-29T15:24:28.850327Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:130: IgniteOperation, opId: 128:1, propose status:StatusAlreadyExists, reason: Check failed: path: '/MyRoot/NilNoviSubLuna', error: path exist, request accepts it (id: [OwnerId: 72057594046678944, LocalPathId: 3], type: EPathTypeExternalTable, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_external_table.cpp:132, at schemeshard: 72057594046678944 2025-05-29T15:24:28.856758Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:451: TTxOperationPropose Complete, txId: 128, response: Status: StatusAlreadyExists Reason: "Check failed: path: \'/MyRoot/NilNoviSubLuna\', error: path exist, request accepts it (id: [OwnerId: 72057594046678944, LocalPathId: 3], type: EPathTypeExternalTable, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_external_table.cpp:132" TxId: 128 SchemeshardId: 72057594046678944 PathId: 3 PathCreateTxId: 125, at schemeshard: 72057594046678944 2025-05-29T15:24:28.856823Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 128, database: /MyRoot, subject: , status: StatusAlreadyExists, reason: Check failed: path: '/MyRoot/NilNoviSubLuna', error: path exist, request accepts it (id: [OwnerId: 72057594046678944, LocalPathId: 3], type: EPathTypeExternalTable, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_external_table.cpp:132, operation: CREATE EXTERNAL TABLE, path: /MyRoot/NilNoviSubLuna TestModificationResult got TxId: 128, wait until txId: 128 >> TCmsTest::ManageRequests [GOOD] >> TCmsTest::EnableCMSRequestPrioritiesFeatureFlag >> TConsoleTests::TestRestartConsoleAndPools [GOOD] >> TConsoleTests::TestRemoveTenantWithBorrowedStorageUnits |65.4%| [TA] $(B)/ydb/core/tx/schemeshard/ut_external_data_source/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_external_table/unittest >> TExternalTableTest::Decimal [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:117:2058] recipient: [1:112:2142] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:117:2058] recipient: [1:112:2142] Leader for TabletID 72057594046678944 is [1:129:2153] sender: [1:133:2058] recipient: [1:112:2142] 2025-05-29T15:24:28.851074Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7488: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-05-29T15:24:28.851097Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7516: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:24:28.851103Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7402: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-05-29T15:24:28.851107Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7418: OperationsProcessing config: using default configuration 2025-05-29T15:24:28.851117Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-05-29T15:24:28.851121Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-05-29T15:24:28.851132Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7548: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:24:28.851145Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:39: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-05-29T15:24:28.851249Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7619: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-05-29T15:24:28.851329Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-05-29T15:24:28.866078Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7651: Got new config: QueryServiceConfig { AllExternalDataSourcesAreAvailable: true } 2025-05-29T15:24:28.866108Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:24:28.866211Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7619: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-05-29T15:24:28.868874Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-05-29T15:24:28.868987Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-05-29T15:24:28.869024Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-05-29T15:24:28.870755Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-05-29T15:24:28.870820Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:445: Clear TempDirsState with owners number: 0 2025-05-29T15:24:28.870934Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1348: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-05-29T15:24:28.870985Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:32: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-05-29T15:24:28.871431Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:157: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:24:28.871466Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:84: [RootDataErasureManager] Stop 2025-05-29T15:24:28.871668Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:24:28.871675Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:24:28.871688Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-05-29T15:24:28.871693Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-29T15:24:28.871697Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:202: TTxServerlessStorageBilling.Complete 2025-05-29T15:24:28.871719Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6671: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-05-29T15:24:28.872770Z node 1 :HIVE INFO: tablet_helpers.cpp:1130: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:129:2153] sender: [1:243:2058] recipient: [1:15:2062] 2025-05-29T15:24:28.886282Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:372: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-05-29T15:24:28.886352Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:24:28.886401Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-05-29T15:24:28.886439Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:130: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-05-29T15:24:28.886446Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:24:28.887143Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:451: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-05-29T15:24:28.887165Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-05-29T15:24:28.887202Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:24:28.887209Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:313: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-05-29T15:24:28.887212Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:367: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-05-29T15:24:28.887216Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 2 -> 3 2025-05-29T15:24:28.887528Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:24:28.887535Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-05-29T15:24:28.887541Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 3 -> 128 2025-05-29T15:24:28.887850Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:24:28.887860Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:24:28.887863Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:24:28.887869Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1650: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-05-29T15:24:28.888302Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1719: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-05-29T15:24:28.888621Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:652: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-05-29T15:24:28.888652Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1751: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-05-29T15:24:28.888781Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:676: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-05-29T15:24:28.888798Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:680: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 125 RawX2: 4294969446 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-05-29T15:24:28.888802Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:24:28.888849Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 128 -> 240 2025-05-29T15:24:28.888853Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:24:28.888879Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-05-29T15:24:28.888890Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:402: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-05-29T15:24:28.889204Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:24:28.889210Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594 ... xId: 101, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-29T15:24:29.129781Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 101, path id: [OwnerId: 72057594046678944, LocalPathId: 3] 2025-05-29T15:24:29.129796Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 101, path id: [OwnerId: 72057594046678944, LocalPathId: 3] 2025-05-29T15:24:29.129806Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 101, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2025-05-29T15:24:29.129824Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:24:29.129829Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [2:210:2211], at schemeshard: 72057594046678944, txId: 101, path id: 1 2025-05-29T15:24:29.129834Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [2:210:2211], at schemeshard: 72057594046678944, txId: 101, path id: 3 2025-05-29T15:24:29.129838Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [2:210:2211], at schemeshard: 72057594046678944, txId: 101, path id: 3 2025-05-29T15:24:29.129845Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [2:210:2211], at schemeshard: 72057594046678944, txId: 101, path id: 2 2025-05-29T15:24:29.129893Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 101:0, at schemeshard: 72057594046678944 2025-05-29T15:24:29.129901Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:482: [72057594046678944] TDone opId# 101:0 ProgressState 2025-05-29T15:24:29.129913Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:906: Part operation is done id#101:0 progress is 1/1 2025-05-29T15:24:29.129917Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2025-05-29T15:24:29.129923Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:906: Part operation is done id#101:0 progress is 1/1 2025-05-29T15:24:29.129926Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2025-05-29T15:24:29.129931Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1606: TOperation IsReadyToNotify, TxId: 101, ready parts: 1/1, is published: false 2025-05-29T15:24:29.129936Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2025-05-29T15:24:29.129941Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:973: Operation and all the parts is done, operation id: 101:0 2025-05-29T15:24:29.129945Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5174: RemoveTx for txid 101:0 2025-05-29T15:24:29.129956Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 3 2025-05-29T15:24:29.129961Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove txstate source path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2025-05-29T15:24:29.129966Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:982: Publication still in progress, tx: 101, publications: 3, subscribers: 0 2025-05-29T15:24:29.129971Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:989: Publication details: tx: 101, [OwnerId: 72057594046678944, LocalPathId: 1], 7 2025-05-29T15:24:29.129975Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:989: Publication details: tx: 101, [OwnerId: 72057594046678944, LocalPathId: 2], 2 2025-05-29T15:24:29.129979Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:989: Publication details: tx: 101, [OwnerId: 72057594046678944, LocalPathId: 3], 2 2025-05-29T15:24:29.130137Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:5834: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 7 PathOwnerId: 72057594046678944, cookie: 101 2025-05-29T15:24:29.130149Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 7 PathOwnerId: 72057594046678944, cookie: 101 2025-05-29T15:24:29.130154Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 3, at schemeshard: 72057594046678944, txId: 101 2025-05-29T15:24:29.130159Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 7 2025-05-29T15:24:29.130166Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2025-05-29T15:24:29.130438Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:5834: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 2 PathOwnerId: 72057594046678944, cookie: 101 2025-05-29T15:24:29.130457Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 2 PathOwnerId: 72057594046678944, cookie: 101 2025-05-29T15:24:29.130462Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 101 2025-05-29T15:24:29.130468Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 2 2025-05-29T15:24:29.130473Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 2 2025-05-29T15:24:29.130613Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:5834: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 2 PathOwnerId: 72057594046678944, cookie: 101 2025-05-29T15:24:29.130626Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 2 PathOwnerId: 72057594046678944, cookie: 101 2025-05-29T15:24:29.130631Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 101 2025-05-29T15:24:29.130636Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 2 2025-05-29T15:24:29.130641Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2025-05-29T15:24:29.130650Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 101, subscribers: 0 2025-05-29T15:24:29.130989Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2025-05-29T15:24:29.131337Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2025-05-29T15:24:29.131358Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 TestModificationResult got TxId: 101, wait until txId: 101 TestWaitNotification wait txId: 101 2025-05-29T15:24:29.131402Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:210: tests -- TTxNotificationSubscriber for txId 101: send EvNotifyTxCompletion 2025-05-29T15:24:29.131410Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:256: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 101 2025-05-29T15:24:29.131477Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 101, at schemeshard: 72057594046678944 2025-05-29T15:24:29.131499Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:227: tests -- TTxNotificationSubscriber for txId 101: got EvNotifyTxCompletionResult 2025-05-29T15:24:29.131504Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:236: tests -- TTxNotificationSubscriber for txId 101: satisfy waiter [2:337:2327] TestWaitNotification: OK eventTxId 101 2025-05-29T15:24:29.131582Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/ExternalTable" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-05-29T15:24:29.131611Z node 2 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/ExternalTable" took 39us result status StatusSuccess 2025-05-29T15:24:29.131694Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/ExternalTable" PathDescription { Self { Name: "ExternalTable" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeExternalTable CreateFinished: true CreateTxId: 101 CreateStep: 5000003 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 ExternalTableVersion: 1 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 2 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } ExternalTableDescription { Name: "ExternalTable" PathId { OwnerId: 72057594046678944 LocalId: 3 } Version: 1 SourceType: "ObjectStorage" DataSourcePath: "/MyRoot/ExternalDataSource" Location: "/" Columns { Name: "key" Type: "Decimal(35,9)" TypeId: 4865 Id: 1 NotNull: false TypeInfo { DecimalPrecision: 35 DecimalScale: 9 } } Content: "" } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> TExternalTableTest::DropTableTwice >> TCmsTest::VDisksEviction [GOOD] >> TExternalTableTest::ReadOnlyMode ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_external_table/unittest >> TExternalTableTest::ReplaceExternalTableShouldFailIfEntityOfAnotherTypeWithSameNameExists [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:117:2058] recipient: [1:112:2142] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:117:2058] recipient: [1:112:2142] Leader for TabletID 72057594046678944 is [1:129:2153] sender: [1:133:2058] recipient: [1:112:2142] 2025-05-29T15:24:29.192443Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7488: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-05-29T15:24:29.192469Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7516: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:24:29.192475Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7402: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-05-29T15:24:29.192480Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7418: OperationsProcessing config: using default configuration 2025-05-29T15:24:29.192490Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-05-29T15:24:29.192494Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-05-29T15:24:29.192505Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7548: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:24:29.192519Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:39: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-05-29T15:24:29.192623Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7619: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-05-29T15:24:29.192700Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-05-29T15:24:29.205789Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7651: Got new config: QueryServiceConfig { AllExternalDataSourcesAreAvailable: true } 2025-05-29T15:24:29.205813Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:24:29.205908Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7619: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-05-29T15:24:29.208574Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-05-29T15:24:29.208691Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-05-29T15:24:29.208730Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-05-29T15:24:29.210264Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-05-29T15:24:29.210326Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:445: Clear TempDirsState with owners number: 0 2025-05-29T15:24:29.210440Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1348: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-05-29T15:24:29.210491Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:32: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-05-29T15:24:29.210957Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:157: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:24:29.211005Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:84: [RootDataErasureManager] Stop 2025-05-29T15:24:29.211230Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:24:29.211241Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:24:29.211259Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-05-29T15:24:29.211267Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-29T15:24:29.211272Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:202: TTxServerlessStorageBilling.Complete 2025-05-29T15:24:29.211301Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6671: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-05-29T15:24:29.212538Z node 1 :HIVE INFO: tablet_helpers.cpp:1130: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:129:2153] sender: [1:243:2058] recipient: [1:15:2062] 2025-05-29T15:24:29.233524Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:372: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-05-29T15:24:29.233602Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:24:29.233660Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-05-29T15:24:29.233711Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:130: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-05-29T15:24:29.233721Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:24:29.234363Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:451: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-05-29T15:24:29.234389Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-05-29T15:24:29.234433Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:24:29.234443Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:313: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-05-29T15:24:29.234449Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:367: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-05-29T15:24:29.234455Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 2 -> 3 2025-05-29T15:24:29.234890Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:24:29.234902Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-05-29T15:24:29.234908Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 3 -> 128 2025-05-29T15:24:29.235270Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:24:29.235281Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:24:29.235287Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:24:29.235294Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1650: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-05-29T15:24:29.235912Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1719: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-05-29T15:24:29.236290Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:652: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-05-29T15:24:29.236333Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1751: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-05-29T15:24:29.236505Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:676: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-05-29T15:24:29.236530Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:680: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 125 RawX2: 4294969446 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-05-29T15:24:29.236537Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:24:29.236602Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 128 -> 240 2025-05-29T15:24:29.236608Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:24:29.236637Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-05-29T15:24:29.236648Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:402: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-05-29T15:24:29.237038Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:24:29.237047Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594 ... : 102:0, at schemeshard: 72057594046678944 2025-05-29T15:24:29.247914Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:482: [72057594046678944] TDone opId# 102:0 ProgressState 2025-05-29T15:24:29.247926Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:906: Part operation is done id#102:0 progress is 1/1 2025-05-29T15:24:29.247931Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-05-29T15:24:29.247937Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:906: Part operation is done id#102:0 progress is 1/1 2025-05-29T15:24:29.247941Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-05-29T15:24:29.247945Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1606: TOperation IsReadyToNotify, TxId: 102, ready parts: 1/1, is published: false 2025-05-29T15:24:29.247951Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-05-29T15:24:29.247956Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:973: Operation and all the parts is done, operation id: 102:0 2025-05-29T15:24:29.247960Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5174: RemoveTx for txid 102:0 2025-05-29T15:24:29.247971Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 3 2025-05-29T15:24:29.247977Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:982: Publication still in progress, tx: 102, publications: 2, subscribers: 0 2025-05-29T15:24:29.247981Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:989: Publication details: tx: 102, [OwnerId: 72057594046678944, LocalPathId: 1], 6 2025-05-29T15:24:29.247987Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:989: Publication details: tx: 102, [OwnerId: 72057594046678944, LocalPathId: 3], 2 2025-05-29T15:24:29.248115Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:5834: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 6 PathOwnerId: 72057594046678944, cookie: 102 2025-05-29T15:24:29.248128Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 6 PathOwnerId: 72057594046678944, cookie: 102 2025-05-29T15:24:29.248133Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 102 2025-05-29T15:24:29.248138Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 6 2025-05-29T15:24:29.248142Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2025-05-29T15:24:29.248520Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:5834: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 2 PathOwnerId: 72057594046678944, cookie: 102 2025-05-29T15:24:29.248546Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 2 PathOwnerId: 72057594046678944, cookie: 102 2025-05-29T15:24:29.248553Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 102 2025-05-29T15:24:29.248558Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 2 2025-05-29T15:24:29.248564Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 2 2025-05-29T15:24:29.248581Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 102, subscribers: 0 2025-05-29T15:24:29.249102Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2025-05-29T15:24:29.249390Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 TestModificationResult got TxId: 102, wait until txId: 102 TestWaitNotification wait txId: 102 2025-05-29T15:24:29.249439Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:210: tests -- TTxNotificationSubscriber for txId 102: send EvNotifyTxCompletion 2025-05-29T15:24:29.249448Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:256: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 102 2025-05-29T15:24:29.249524Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 102, at schemeshard: 72057594046678944 2025-05-29T15:24:29.249544Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:227: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2025-05-29T15:24:29.249549Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:236: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [1:332:2322] TestWaitNotification: OK eventTxId 102 2025-05-29T15:24:29.249628Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/ExternalDataSource" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-05-29T15:24:29.249656Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/ExternalDataSource" took 39us result status StatusSuccess 2025-05-29T15:24:29.249734Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/ExternalDataSource" PathDescription { Self { Name: "ExternalDataSource" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeExternalDataSource CreateFinished: true CreateTxId: 102 CreateStep: 5000003 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 ExternalDataSourceVersion: 1 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 2 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } ExternalDataSourceDescription { Name: "ExternalDataSource" PathId { OwnerId: 72057594046678944 LocalId: 3 } Version: 1 SourceType: "ObjectStorage" Location: "https://s3.cloud.net/my_bucket" Installation: "" Auth { None { } } Properties { } References { } } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 TestModificationResults wait txId: 103 2025-05-29T15:24:29.250575Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:372: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpCreateExternalTable CreateExternalTable { Name: "UniqueName" SourceType: "General" DataSourcePath: "/MyRoot/ExternalDataSource" Location: "/" Columns { Name: "key" Type: "Uint64" } ReplaceIfExists: true } } TxId: 103 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-05-29T15:24:29.250635Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_create_external_table.cpp:427: [72057594046678944] CreateNewExternalTable, opId 103:0, feature flag EnableReplaceIfExistsForExternalEntities 1, tx WorkingDir: "/MyRoot" OperationType: ESchemeOpCreateExternalTable FailOnExist: false CreateExternalTable { Name: "UniqueName" SourceType: "General" DataSourcePath: "/MyRoot/ExternalDataSource" Location: "/" Columns { Name: "key" Type: "Uint64" } ReplaceIfExists: true } 2025-05-29T15:24:29.250652Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_external_table.cpp:312: [72057594046678944] TAlterExternalTable Propose: opId# 103:0, path# /MyRoot/UniqueName, ReplaceIfExists: 1 2025-05-29T15:24:29.250687Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:130: IgniteOperation, opId: 103:1, propose status:StatusNameConflict, reason: Check failed: path: '/MyRoot/UniqueName', error: unexpected path type (id: [OwnerId: 72057594046678944, LocalPathId: 2], type: EPathTypeView, state: EPathStateNoChanges), expected types: EPathTypeExternalTable, source_location: ydb/core/tx/schemeshard/schemeshard__operation_alter_external_table.cpp:133, at schemeshard: 72057594046678944 2025-05-29T15:24:29.251176Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:451: TTxOperationPropose Complete, txId: 103, response: Status: StatusNameConflict Reason: "Check failed: path: \'/MyRoot/UniqueName\', error: unexpected path type (id: [OwnerId: 72057594046678944, LocalPathId: 2], type: EPathTypeView, state: EPathStateNoChanges), expected types: EPathTypeExternalTable, source_location: ydb/core/tx/schemeshard/schemeshard__operation_alter_external_table.cpp:133" TxId: 103 SchemeshardId: 72057594046678944 PathId: 2 PathCreateTxId: 101, at schemeshard: 72057594046678944 2025-05-29T15:24:29.251219Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 103, database: /MyRoot, subject: , status: StatusNameConflict, reason: Check failed: path: '/MyRoot/UniqueName', error: unexpected path type (id: [OwnerId: 72057594046678944, LocalPathId: 2], type: EPathTypeView, state: EPathStateNoChanges), expected types: EPathTypeExternalTable, source_location: ydb/core/tx/schemeshard/schemeshard__operation_alter_external_table.cpp:133, operation: CREATE EXTERNAL TABLE, path: /MyRoot/UniqueName TestModificationResult got TxId: 103, wait until txId: 103 TestWaitNotification wait txId: 103 2025-05-29T15:24:29.251270Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:210: tests -- TTxNotificationSubscriber for txId 103: send EvNotifyTxCompletion 2025-05-29T15:24:29.251276Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:256: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 103 2025-05-29T15:24:29.251331Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 103, at schemeshard: 72057594046678944 2025-05-29T15:24:29.251346Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:227: tests -- TTxNotificationSubscriber for txId 103: got EvNotifyTxCompletionResult 2025-05-29T15:24:29.251350Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:236: tests -- TTxNotificationSubscriber for txId 103: satisfy waiter [1:340:2330] TestWaitNotification: OK eventTxId 103 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_external_table/unittest >> TExternalTableTest::CreateExternalTableShouldFailIfSuchEntityAlreadyExists [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:117:2058] recipient: [1:112:2142] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:117:2058] recipient: [1:112:2142] Leader for TabletID 72057594046678944 is [1:129:2153] sender: [1:133:2058] recipient: [1:112:2142] 2025-05-29T15:24:28.985076Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7488: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-05-29T15:24:28.985105Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7516: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:24:28.985111Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7402: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-05-29T15:24:28.985117Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7418: OperationsProcessing config: using default configuration 2025-05-29T15:24:28.985127Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-05-29T15:24:28.985131Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-05-29T15:24:28.985141Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7548: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:24:28.985156Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:39: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-05-29T15:24:28.985275Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7619: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-05-29T15:24:28.985364Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-05-29T15:24:29.001704Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7651: Got new config: QueryServiceConfig { AllExternalDataSourcesAreAvailable: true } 2025-05-29T15:24:29.001732Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:24:29.001849Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7619: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-05-29T15:24:29.005140Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-05-29T15:24:29.005269Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-05-29T15:24:29.005310Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-05-29T15:24:29.008120Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-05-29T15:24:29.008198Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:445: Clear TempDirsState with owners number: 0 2025-05-29T15:24:29.008349Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1348: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-05-29T15:24:29.008404Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:32: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-05-29T15:24:29.009019Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:157: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:24:29.009070Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:84: [RootDataErasureManager] Stop 2025-05-29T15:24:29.009328Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:24:29.009339Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:24:29.009358Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-05-29T15:24:29.009366Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-29T15:24:29.009372Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:202: TTxServerlessStorageBilling.Complete 2025-05-29T15:24:29.009404Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6671: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-05-29T15:24:29.010786Z node 1 :HIVE INFO: tablet_helpers.cpp:1130: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:129:2153] sender: [1:243:2058] recipient: [1:15:2062] 2025-05-29T15:24:29.033631Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:372: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-05-29T15:24:29.033723Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:24:29.033787Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-05-29T15:24:29.033843Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:130: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-05-29T15:24:29.033857Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:24:29.037754Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:451: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-05-29T15:24:29.037801Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-05-29T15:24:29.037866Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:24:29.037881Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:313: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-05-29T15:24:29.037887Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:367: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-05-29T15:24:29.037893Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 2 -> 3 2025-05-29T15:24:29.042219Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:24:29.042252Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-05-29T15:24:29.042261Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 3 -> 128 2025-05-29T15:24:29.047196Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:24:29.047228Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:24:29.047238Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:24:29.047250Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1650: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-05-29T15:24:29.048103Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1719: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-05-29T15:24:29.050932Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:652: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-05-29T15:24:29.051000Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1751: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-05-29T15:24:29.051225Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:676: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-05-29T15:24:29.051268Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:680: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 125 RawX2: 4294969446 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-05-29T15:24:29.051280Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:24:29.051366Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 128 -> 240 2025-05-29T15:24:29.051378Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:24:29.051414Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-05-29T15:24:29.051428Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:402: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-05-29T15:24:29.052053Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:24:29.052065Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594 ... 4046678944 Generation: 2 LocalPathId: 3 Version: 2 PathOwnerId: 72057594046678944, cookie: 102 2025-05-29T15:24:29.263746Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 2 PathOwnerId: 72057594046678944, cookie: 102 2025-05-29T15:24:29.263751Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 102 2025-05-29T15:24:29.263756Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 2 2025-05-29T15:24:29.263760Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 2 2025-05-29T15:24:29.263849Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:5834: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 2 PathOwnerId: 72057594046678944, cookie: 102 2025-05-29T15:24:29.263856Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 2 PathOwnerId: 72057594046678944, cookie: 102 2025-05-29T15:24:29.263859Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 102 2025-05-29T15:24:29.263864Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 2 2025-05-29T15:24:29.263866Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2025-05-29T15:24:29.263872Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 102, subscribers: 0 2025-05-29T15:24:29.264142Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2025-05-29T15:24:29.264441Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2025-05-29T15:24:29.264453Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 TestModificationResult got TxId: 102, wait until txId: 102 TestWaitNotification wait txId: 102 2025-05-29T15:24:29.264494Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:210: tests -- TTxNotificationSubscriber for txId 102: send EvNotifyTxCompletion 2025-05-29T15:24:29.264501Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:256: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 102 2025-05-29T15:24:29.264564Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 102, at schemeshard: 72057594046678944 2025-05-29T15:24:29.264579Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:227: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2025-05-29T15:24:29.264582Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:236: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [2:337:2327] TestWaitNotification: OK eventTxId 102 2025-05-29T15:24:29.264637Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/ExternalTable" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-05-29T15:24:29.264657Z node 2 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/ExternalTable" took 29us result status StatusSuccess 2025-05-29T15:24:29.264713Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/ExternalTable" PathDescription { Self { Name: "ExternalTable" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeExternalTable CreateFinished: true CreateTxId: 102 CreateStep: 5000003 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 ExternalTableVersion: 1 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 2 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } ExternalTableDescription { Name: "ExternalTable" PathId { OwnerId: 72057594046678944 LocalId: 3 } Version: 1 SourceType: "ObjectStorage" DataSourcePath: "/MyRoot/ExternalDataSource" Location: "/" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false } Content: "" } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 TestModificationResults wait txId: 103 2025-05-29T15:24:29.265333Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:372: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpCreateExternalTable CreateExternalTable { Name: "ExternalTable" SourceType: "General" DataSourcePath: "/MyRoot/ExternalDataSource" Location: "/new_location" Columns { Name: "key" Type: "Uint64" } Columns { Name: "value" Type: "Uint64" } } } TxId: 103 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-05-29T15:24:29.265365Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_create_external_table.cpp:427: [72057594046678944] CreateNewExternalTable, opId 103:0, feature flag EnableReplaceIfExistsForExternalEntities 1, tx WorkingDir: "/MyRoot" OperationType: ESchemeOpCreateExternalTable FailOnExist: false CreateExternalTable { Name: "ExternalTable" SourceType: "General" DataSourcePath: "/MyRoot/ExternalDataSource" Location: "/new_location" Columns { Name: "key" Type: "Uint64" } Columns { Name: "value" Type: "Uint64" } } 2025-05-29T15:24:29.265373Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_create_external_table.cpp:300: [72057594046678944] TCreateExternalTable Propose: opId# 103:0, path# /MyRoot/ExternalTable 2025-05-29T15:24:29.265399Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:130: IgniteOperation, opId: 103:1, propose status:StatusAlreadyExists, reason: Check failed: path: '/MyRoot/ExternalTable', error: path exist, request accepts it (id: [OwnerId: 72057594046678944, LocalPathId: 3], type: EPathTypeExternalTable, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_external_table.cpp:132, at schemeshard: 72057594046678944 2025-05-29T15:24:29.265789Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:451: TTxOperationPropose Complete, txId: 103, response: Status: StatusAlreadyExists Reason: "Check failed: path: \'/MyRoot/ExternalTable\', error: path exist, request accepts it (id: [OwnerId: 72057594046678944, LocalPathId: 3], type: EPathTypeExternalTable, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_external_table.cpp:132" TxId: 103 SchemeshardId: 72057594046678944 PathId: 3 PathCreateTxId: 102, at schemeshard: 72057594046678944 2025-05-29T15:24:29.265816Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 103, database: /MyRoot, subject: , status: StatusAlreadyExists, reason: Check failed: path: '/MyRoot/ExternalTable', error: path exist, request accepts it (id: [OwnerId: 72057594046678944, LocalPathId: 3], type: EPathTypeExternalTable, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_external_table.cpp:132, operation: CREATE EXTERNAL TABLE, path: /MyRoot/ExternalTable TestModificationResult got TxId: 103, wait until txId: 103 TestWaitNotification wait txId: 103 2025-05-29T15:24:29.265859Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:210: tests -- TTxNotificationSubscriber for txId 103: send EvNotifyTxCompletion 2025-05-29T15:24:29.265864Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:256: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 103 2025-05-29T15:24:29.265903Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 103, at schemeshard: 72057594046678944 2025-05-29T15:24:29.265913Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:227: tests -- TTxNotificationSubscriber for txId 103: got EvNotifyTxCompletionResult 2025-05-29T15:24:29.265916Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:236: tests -- TTxNotificationSubscriber for txId 103: satisfy waiter [2:345:2335] TestWaitNotification: OK eventTxId 103 2025-05-29T15:24:29.265956Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/ExternalTable" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-05-29T15:24:29.265971Z node 2 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/ExternalTable" took 18us result status StatusSuccess 2025-05-29T15:24:29.266025Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/ExternalTable" PathDescription { Self { Name: "ExternalTable" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeExternalTable CreateFinished: true CreateTxId: 102 CreateStep: 5000003 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 ExternalTableVersion: 1 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 2 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } ExternalTableDescription { Name: "ExternalTable" PathId { OwnerId: 72057594046678944 LocalId: 3 } Version: 1 SourceType: "ObjectStorage" DataSourcePath: "/MyRoot/ExternalDataSource" Location: "/" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false } Content: "" } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 |65.4%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/datashard/ut_snapshot/ydb-core-tx-datashard-ut_snapshot >> TBackupCollectionTests::CreateAbsolutePath >> TBackupCollectionTests::HiddenByFeatureFlag |65.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_snapshot/ydb-core-tx-datashard-ut_snapshot >> TCmsTenatsTest::TestTenantLimitForceRestartMode [GOOD] >> TCmsTenatsTest::TestTenantLimitForceRestartModeScheduled >> KqpPg::TempTablesDrop [GOOD] >> TCmsTenatsTest::TestTenantRatioLimitForceRestartModeScheduled [GOOD] >> TExternalTableTest::DropTableTwice [GOOD] >> TClusterInfoTest::DeviceId [GOOD] >> TBackupCollectionTests::HiddenByFeatureFlag [GOOD] >> TCmsTest::ActionIssue >> TClusterInfoTest::FillInfo [GOOD] >> TBackupCollectionTests::DisallowedPath >> TCmsTenatsTest::CollectInfo >> KqpPg::TempTablesWithCache >> TCmsTenatsTest::TestClusterLimit >> TCmsTest::RequestRestartServicesOk >> TBackupCollectionTests::CreateAbsolutePath [GOOD] >> TExternalTableTest::ReadOnlyMode [GOOD] >> TCmsTest::CollectInfo >> TMaintenanceApiTest::ManyActionGroupsWithSingleAction >> TBackupCollectionTests::DisallowedPath [GOOD] >> TBackupCollectionTests::Create >> TBackupCollectionTests::ParallelCreate >> TCmsTest::TestForceRestartModeScheduledDisconnects [GOOD] >> KqpPg::TempTablesWithCache [FAIL] >> KqpBatchDelete::SimplePartitions >> KqpBatchUpdate::Returning >> TBackupCollectionTests::Create [GOOD] >> TConsoleTests::TestCreateServerlessTenantWrongSharedDb [GOOD] >> TCmsTest::ManageRequestsDry [GOOD] >> TCmsTest::EnableCMSRequestPrioritiesFeatureFlag [GOOD] >> KqpBatchUpdate::ManyPartitions_1 >> TBackupCollectionTests::ParallelCreate [GOOD] >> TBackupCollectionTests::CreateTwice >> TBackupCollectionTests::CreateTwice [GOOD] >> TBackupCollectionTests::BackupAbsentCollection >> TCmsTest::Notifications >> TBackupCollectionTests::Drop >> ReadIteratorExternalBlobs::ExtBlobsWithCompactingMiddleRows [FAIL] >> KqpErrors::ResolveTableError [FAIL] >> TConsoleConfigSubscriptionTests::TestNotificationForRemovedConfigItem [GOOD] >> TCmsTenatsTest::CollectInfo [GOOD] >> TCmsTenatsTest::TestClusterRatioLimitForceRestartModeScheduled [GOOD] >> KqpBatchDelete::MultiStatement >> TConsoleTests::TestCreateTenantWrongName >> KqpPg::TableDeleteWhere+useSink >> TBackupCollectionTests::BackupAbsentCollection [GOOD] >> TBackupCollectionTests::Drop [GOOD] >> TBackupCollectionTests::DropTwice >> KqpPg::TableDeleteWhere+useSink [FAIL] >> ReadIteratorExternalBlobs::ExtBlobsEmptyTable >> TCmsTenatsTest::RequestRestartServices >> TBackupCollectionTests::BackupDroppedCollection >> TConsoleConfigSubscriptionTests::TestNotificationForRestartedClient >> KqpPg::TableDeleteWhere-useSink >> TBackupCollectionTests::DropTwice [GOOD] >> TBackupCollectionTests::BackupDroppedCollection [GOOD] >> TBackupCollectionTests::BackupAbsentDirs >> TBackupCollectionTests::TableWithSystemColumns >> TBackupCollectionTests::BackupAbsentDirs [GOOD] >> TBackupCollectionTests::TableWithSystemColumns [GOOD] >> KqpErrors::ProposeResultLost_RwTx+UseSink [FAIL] >> KqpErrors::ProposeError [FAIL] >> KqpErrors::ProposeErrorEvWrite >> TCmsTest::RequestRestartServicesOk [GOOD] >> TCmsTenatsTest::TestTenantLimitForceRestartModeScheduled [GOOD] >> TBackupCollectionTests::BackupNonIncrementalCollection >> TMaintenanceApiTest::ManyActionGroupsWithSingleAction [GOOD] >> TCmsTest::CollectInfo [GOOD] >> TCmsTest::RequestReplaceManyDevicesOnOneNode [GOOD] >> KqpPg::TableDeleteWhere-useSink [FAIL] >> TCmsTenatsTest::TestClusterLimit [GOOD] >> KqpErrors::ProposeResultLost_RwTx-UseSink >> TBackupCollectionTests::BackupNonIncrementalCollection [GOOD] >> TCmsTest::RequestRestartServicesReject >> TMaintenanceApiTest::CreateTime >> TCmsTest::DynamicConfig >> TCmsTenatsTest::RequestShutdownHost |65.4%| [TA] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_serverless_reboots/test-results/unittest/{meta.json ... results_accumulator.log} |65.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/batch_operations/unittest |65.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/batch_operations/unittest ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/datashard/ut_external_blobs/unittest >> ExternalBlobsMultipleChannels::ExtBlobsMultipleColumns [FAIL] Test command err: 2025-05-29T15:24:25.829203Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:102:2148], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-05-29T15:24:25.829233Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-05-29T15:24:25.829242Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/002450/r3tmp/tmpYni7Sq/pdisk_1.dat 2025-05-29T15:24:25.928827Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-05-29T15:24:25.942937Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:24:25.946787Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [1:32:2079] 1748532265445358 != 1748532265445362 2025-05-29T15:24:25.988440Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:24:25.988494Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:24:25.999094Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-29T15:24:26.072420Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-05-29T15:24:26.294088Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:736:2617], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:24:26.294134Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:747:2622], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:24:26.294150Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:24:26.295372Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2025-05-29T15:24:26.465636Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:750:2625], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-05-29T15:24:26.515125Z node 1 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [1:821:2665] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-29T15:24:26.592713Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [1:831:2674], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:24:26.593599Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=1&id=ZTA4ZTZkYzMtNTg0MWI3NDMtMzAxNDc5ZDYtZDdlOTJmYzY=, ActorId: [1:734:2615], ActorState: ExecuteState, TraceId: 01jweaa19n8pgd4z4nj0besvb8, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: assertion failed at ydb/core/tx/datashard/ut_common/datashard_ut_common.cpp:2091, void NKikimr::ExecSQL(Tests::TServer::TPtr, TActorId, const TString &, bool, Ydb::StatusIds::StatusCode): (response.GetYdbStatus() == code) failed: (INTERNAL_ERROR != SUCCESS) Response { QueryIssues { message: "Execution" issue_code: 1060 issues { message: "yql/essentials/ast/yql_expr.h:1874: index out of range" issue_code: 1 } } TxMeta { } } YdbStatus: INTERNAL_ERROR ConsumedRu: 1 , with diff: (INT|SUCC)E(RNAL_ERROR|SS) TBackTrace::Capture()+28 (0x13AB2F0C) NUnitTest::NPrivate::RaiseError(char const*, TBasicString> const&, bool)+137 (0x13C65E39) NKikimr::ExecSQL(TIntrusivePtr>, NActors::TActorId, TBasicString> const&, bool, Ydb::StatusIds_StatusCode)+1300 (0x262F4B14) NKikimr::NTestSuiteExternalBlobsMultipleChannels::TTestCaseExtBlobsMultipleColumns::Execute_(NUnitTest::TTestContext&)+1032 (0x139A1518) NKikimr::NTestSuiteExternalBlobsMultipleChannels::TCurrentTest::Execute()::'lambda'()::operator()() const+71 (0x139A6417) NUnitTest::TTestBase::Run(std::__y1::function, TBasicString> const&, char const*, bool)+126 (0x13C67CEE) NKikimr::NTestSuiteExternalBlobsMultipleChannels::TCurrentTest::Execute()+481 (0x139A5D11) NUnitTest::TTestFactory::Execute()+803 (0x13C68463) NUnitTest::RunMain(int, char**)+3021 (0x13C7A00D) ??+0 (0x7F961D7CFD90) __libc_start_main+128 (0x7F961D7CFE40) _start+41 (0x129FC029) ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/datashard/ut_external_blobs/unittest >> ExternalBlobsMultipleChannels::SingleChannel [FAIL] Test command err: 2025-05-29T15:24:25.693099Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:102:2148], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-05-29T15:24:25.693136Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-05-29T15:24:25.693150Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/002475/r3tmp/tmpkOhF1w/pdisk_1.dat 2025-05-29T15:24:25.793435Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-05-29T15:24:25.806149Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:24:25.809120Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [1:32:2079] 1748532265240140 != 1748532265240144 2025-05-29T15:24:25.850807Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:24:25.850865Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:24:25.861528Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-29T15:24:25.934417Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-05-29T15:24:26.146316Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:736:2617], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:24:26.146346Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:747:2622], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:24:26.146358Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:24:26.151145Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2025-05-29T15:24:26.315084Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:750:2625], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-05-29T15:24:26.355493Z node 1 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [1:821:2665] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-29T15:24:26.407849Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [1:831:2674], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:24:26.408799Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=1&id=YzMwYjAxNDYtY2YxMzg5Y2MtNWYzY2I4M2QtNzkzZTY4ZDE=, ActorId: [1:734:2615], ActorState: ExecuteState, TraceId: 01jweaa1510gktq7waq6mr8eay, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: assertion failed at ydb/core/tx/datashard/ut_common/datashard_ut_common.cpp:2091, void NKikimr::ExecSQL(Tests::TServer::TPtr, TActorId, const TString &, bool, Ydb::StatusIds::StatusCode): (response.GetYdbStatus() == code) failed: (INTERNAL_ERROR != SUCCESS) Response { QueryIssues { message: "Execution" issue_code: 1060 issues { message: "yql/essentials/ast/yql_expr.h:1874: index out of range" issue_code: 1 } } TxMeta { } } YdbStatus: INTERNAL_ERROR ConsumedRu: 1 , with diff: (INT|SUCC)E(RNAL_ERROR|SS) TBackTrace::Capture()+28 (0x13AB2F0C) NUnitTest::NPrivate::RaiseError(char const*, TBasicString> const&, bool)+137 (0x13C65E39) NKikimr::ExecSQL(TIntrusivePtr>, NActors::TActorId, TBasicString> const&, bool, Ydb::StatusIds_StatusCode)+1300 (0x262F4B14) NKikimr::NTestSuiteExternalBlobsMultipleChannels::TTestCaseSingleChannel::Execute_(NUnitTest::TTestContext&)+837 (0x139A2425) NKikimr::NTestSuiteExternalBlobsMultipleChannels::TCurrentTest::Execute()::'lambda'()::operator()() const+71 (0x139A6417) NUnitTest::TTestBase::Run(std::__y1::function, TBasicString> const&, char const*, bool)+126 (0x13C67CEE) NKikimr::NTestSuiteExternalBlobsMultipleChannels::TCurrentTest::Execute()+481 (0x139A5D11) NUnitTest::TTestFactory::Execute()+803 (0x13C68463) NUnitTest::RunMain(int, char**)+3021 (0x13C7A00D) ??+0 (0x7FCDD12F7D90) __libc_start_main+128 (0x7FCDD12F7E40) _start+41 (0x129FC029) ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/pg/unittest >> KqpPg::CheckPgAutoParams-useSink [FAIL] Test command err: Trying to start YDB, gRPC: 16050, MsgBus: 24310 2025-05-29T15:24:08.735658Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509888823332781273:2066];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:24:08.735696Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/0025bb/r3tmp/tmp8qwMNV/pdisk_1.dat 2025-05-29T15:24:08.781970Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 16050, node 1 2025-05-29T15:24:08.799600Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:24:08.799613Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-05-29T15:24:08.799617Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-05-29T15:24:08.799664Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:24310 TClient is connected to server localhost:24310 2025-05-29T15:24:08.836941Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:24:08.836983Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:24:08.838098Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-29T15:24:08.862350Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:24:09.097606Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-05-29T15:24:09.165881Z node 1 :READ_TABLE_API WARN: rpc_read_table.cpp:116: ForgetAction occurred, send TEvPoisonPill \x62797465612030 \x62797465612030 \x62797465612031 \x62797465612031 \x62797465612032 \x62797465612032 \x62797465612033 \x62797465612033 \x62797465612034 \x62797465612034 \x62797465612035 \x62797465612035 \x62797465612036 \x62797465612036 \x62797465612037 \x62797465612037 \x62797465612038 \x62797465612038 \x62797465612039 \x62797465612039 2025-05-29T15:24:09.171342Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-05-29T15:24:09.181154Z node 1 :READ_TABLE_API WARN: rpc_read_table.cpp:116: ForgetAction occurred, send TEvPoisonPill \x62797465612030 \x62797465612030 \x62797465612031 \x62797465612031 \x62797465612032 \x62797465612032 \x62797465612033 \x62797465612033 \x62797465612034 \x62797465612034 \x62797465612035 \x62797465612035 \x62797465612036 \x62797465612036 \x62797465612037 \x62797465612037 \x62797465612038 \x62797465612038 \x62797465612039 \x62797465612039 2025-05-29T15:24:09.188148Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-05-29T15:24:09.201844Z node 1 :READ_TABLE_API WARN: rpc_read_table.cpp:116: ForgetAction occurred, send TEvPoisonPill {"\\x6130","\\x623130"} {"\\x6130","\\x623130"} {"\\x6131","\\x623131"} {"\\x6131","\\x623131"} {"\\x6132","\\x623132"} {"\\x6132","\\x623132"} {"\\x6133","\\x623133"} {"\\x6133","\\x623133"} {"\\x6134","\\x623134"} {"\\x6134","\\x623134"} {"\\x6135","\\x623135"} {"\\x6135","\\x623135"} {"\\x6136","\\x623136"} {"\\x6136","\\x623136"} {"\\x6137","\\x623137"} {"\\x6137","\\x623137"} {"\\x6138","\\x623138"} {"\\x6138","\\x623138"} {"\\x6139","\\x623139"} {"\\x6139","\\x623139"} 2025-05-29T15:24:09.209063Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715670:0, at schemeshard: 72057594046644480 2025-05-29T15:24:09.222534Z node 1 :READ_TABLE_API WARN: rpc_read_table.cpp:116: ForgetAction occurred, send TEvPoisonPill {"\\x6130","\\x623130"} {"\\x6130","\\x623130"} {"\\x6131","\\x623131"} {"\\x6131","\\x623131"} {"\\x6132","\\x623132"} {"\\x6132","\\x623132"} {"\\x6133","\\x623133"} {"\\x6133","\\x623133"} {"\\x6134","\\x623134"} {"\\x6134","\\x623134"} {"\\x6135","\\x623135"} {"\\x6135","\\x623135"} {"\\x6136","\\x623136"} {"\\x6136","\\x623136"} {"\\x6137","\\x623137"} {"\\x6137","\\x623137"} {"\\x6138","\\x623138"} {"\\x6138","\\x623138"} {"\\x6139","\\x623139"} {"\\x6139","\\x623139"} 2025-05-29T15:24:09.230409Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715674:0, at schemeshard: 72057594046644480 2025-05-29T15:24:09.243550Z node 1 :READ_TABLE_API WARN: rpc_read_table.cpp:116: ForgetAction occurred, send TEvPoisonPill f f t t 2025-05-29T15:24:09.250759Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715678:0, at schemeshard: 72057594046644480 2025-05-29T15:24:09.308212Z node 1 :READ_TABLE_API WARN: rpc_read_table.cpp:116: ForgetAction occurred, send TEvPoisonPill f f t t 2025-05-29T15:24:09.321657Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715682:0, at schemeshard: 72057594046644480 2025-05-29T15:24:09.334702Z node 1 :READ_TABLE_API WARN: rpc_read_table.cpp:116: ForgetAction occurred, send TEvPoisonPill {f,f} {f,f} {t,t} {t,t} 2025-05-29T15:24:09.342234Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715686:0, at schemeshard: 72057594046644480 2025-05-29T15:24:09.399881Z node 1 :READ_TABLE_API WARN: rpc_read_table.cpp:116: ForgetAction occurred, send TEvPoisonPill {f,f} {f,f} {t,t} {t,t} 2025-05-29T15:24:09.412461Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715690:0, at schemeshard: 72057594046644480 2025-05-29T15:24:09.425734Z node 1 :READ_TABLE_API WARN: rpc_read_table.cpp:116: ForgetAction occurred, send TEvPoisonPill 0 0 1 1 2 2 3 3 4 4 5 5 6 6 7 7 8 8 9 9 2025-05-29T15:24:09.433271Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715694:0, at schemeshard: 72057594046644480 2025-05-29T15:24:09.490819Z node 1 :READ_TABLE_API WARN: rpc_read_table.cpp:116: ForgetAction occurred, send TEvPoisonPill 0 0 1 1 2 2 3 3 4 4 5 5 6 6 7 7 8 8 9 9 2025-05-29T15:24:09.502410Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715698:0, at schemeshard: 72057594046644480 2025-05-29T15:24:09.516876Z node 1 :READ_TABLE_API WARN: rpc_read_table.cpp:116: ForgetAction occurred, send TEvPoisonPill {0,0} {0,0} {1,1} {1,1} {2,2} {2,2} {3,3} {3,3} {4,4} {4,4} {5,5} {5,5} {6,6} {6,6} {7,7} {7,7} {8,8} {8,8} {9,9} {9,9} 2025-05-29T15:24:09.523285Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715702:0, at schemeshard: 72057594046644480 2025-05-29T15:24:09.537231Z node 1 :READ_TABLE_API WARN: rpc_read_table.cpp:116: ForgetAction occurred, send TEvPoisonPill {0,0} {0,0} {1,1} {1,1} {2,2} {2,2} {3,3} {3,3} {4,4} {4,4} {5,5} {5,5} {6,6} {6,6} {7,7} {7,7} {8,8} {8,8} {9,9} {9,9} 2025-05-29T15:24:09.544644Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715706:0, at schemeshard: 72057594046644480 2025-05-29T15:24:09.602524Z node 1 :READ_TABLE_API WARN: rpc_read_table.cpp:116: ForgetAction occurred, send TEvPoisonPill 0 0 1 1 2 2 3 3 4 4 5 5 6 6 7 7 8 8 9 9 2025-05-29T15:24:09.615554Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715710:0, at schemeshard: 72057594046644480 2025-05-29T15:24:09.673073Z node 1 :READ_TABLE_API WARN: rpc_read_table.cpp:116: ForgetAction occurred, send TEvPoisonPill 0 0 1 1 2 2 3 3 4 4 5 5 6 6 7 7 8 8 9 9 2025-05-29T15:24:09.685153Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715714:0, at schemeshard: 72057594046644480 2025-05-29T15:24:09.698633Z node 1 :READ_TABLE_API WARN: rpc_read_table.cpp:116: ForgetAction occurred, send TEvPoisonPill {0,0} {0,0} {1,1} {1,1} {2,2} {2,2} {3,3} {3,3} {4,4} {4,4} {5,5} {5,5} {6,6} {6,6} {7,7} {7,7} {8,8} {8,8} {9,9} {9,9} 2025-05-29T15:24:09.705691Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715718:0, at schemeshard: 72057594046644480 2025-05-29T15:24:09.719632Z node 1 :READ_TABLE_API WARN: rpc_read_table.cpp:116: ForgetAction occurred, send TEvPoisonPill {0,0} {0,0} {1,1} {1,1} {2,2} {2,2} {3,3} {3,3} {4,4} {4,4} {5,5} {5,5} {6,6} {6,6} {7,7} {7,7} {8,8} {8,8} {9,9} {9,9} 2025-05-29T15:24:09.726526Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715722:0, at schemeshard: 72057594046644480 2025-05-29T15:24:09.739962Z node 1 :READ_TABLE_API WARN: rpc_read_table.cpp:116: ForgetA ... 7 Node(14, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:24:28.004197Z node 14 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(14, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:24:28.005168Z node 14 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(14, (0,0,0,0)) VolatileState: Connecting -> Connected WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-29T15:24:28.095692Z node 14 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:24:28.097370Z node 14 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-05-29T15:24:28.276109Z node 14 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [14:7509888908616378401:2330], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:24:28.276126Z node 14 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [14:7509888908616378389:2327], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:24:28.276133Z node 14 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:24:28.277119Z node 14 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2025-05-29T15:24:28.279540Z node 14 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [14:7509888908616378417:2331], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-05-29T15:24:28.335248Z node 14 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [14:7509888908616378468:2326] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-29T15:24:28.340342Z node 14 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 2025-05-29T15:24:28.373176Z node 14 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-05-29T15:24:28.418142Z node 14 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:468: Get parsing result with error, self: [14:7509888908616378792:2390], owner: [14:7509888908616378377:2319], statement id: 0 2025-05-29T15:24:28.418186Z node 14 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=14&id=MTQ4ZmIyNTMtNWYxNGM1NmUtZmUzYzM2MDktZGEwZTViNWU=, ActorId: [14:7509888908616378790:2389], ActorState: ExecuteState, TraceId: 01jweaa3c102qxs6m5wsnj33av, ReplyQueryCompileError, status GENERIC_ERROR remove tx with tx_id: 2025-05-29T15:24:28.435348Z node 14 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [14:7509888908616378817:2400], status: GENERIC_ERROR, issues:
: Error: Type annotation, code: 1030
:1:1: Error: At function: RemovePrefixMembers, At function: PgSelect, At tuple, At tuple, At tuple, At function: PgSetItem, At tuple
: Error: At tuple
:1:1: Error: At function: PgWhere, At lambda
:2:55: Error: At function: PgOp
:2:55: Error: Unable to find an overload for operator = with given argument type(s): (text,int4) 2025-05-29T15:24:28.435412Z node 14 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=14&id=NGQyNjJhYmUtYWVmNGY2MTUtZTdkNDlmMTYtOTA4YmI2Mg==, ActorId: [14:7509888908616378814:2398], ActorState: ExecuteState, TraceId: 01jweaa3cg8cee5nnj7g60gqrg, ReplyQueryCompileError, status GENERIC_ERROR remove tx with tx_id: 2025-05-29T15:24:28.439013Z node 14 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [14:7509888908616378829:2406], status: GENERIC_ERROR, issues:
: Error: Type annotation, code: 1030
:1:1: Error: At function: RemovePrefixMembers, At function: PgSelect, At tuple, At tuple, At tuple, At function: PgSetItem, At tuple
: Error: At tuple
:1:1: Error: At function: PgWhere, At lambda
:2:57: Error: At function: PgAnd
:2:67: Error: At function: PgOp
:2:67: Error: Unable to find an overload for operator = with given argument type(s): (text,int4) 2025-05-29T15:24:28.439074Z node 14 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=14&id=ZGJmZTgxYzYtYmMxZmU1MzEtZThmZDBmM2MtNGJhNWIzYWI=, ActorId: [14:7509888908616378826:2404], ActorState: ExecuteState, TraceId: 01jweaa3cmer8psj5drtj7adft, ReplyQueryCompileError, status GENERIC_ERROR remove tx with tx_id: 2025-05-29T15:24:28.440688Z node 14 :KQP_EXECUTER CRIT: kqp_literal_executer.cpp:112: ActorId: [0:0:0] TxId: 0. Ctx: { TraceId: 01jweaa3cq22zb06tr3pxg53a2, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=14&id=MmE4Y2QwYmEtZGRhOTE1ZjAtZTdiM2ZlZTUtNDVhNWMwOWE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. TKqpLiteralExecuter, unexpected exception caught: (NKikimr::NMiniKQL::TTerminateException) Terminate was called, reason(51): ERROR: invalid input syntax for type integer: "a" 2025-05-29T15:24:28.440762Z node 14 :KQP_SESSION WARN: kqp_session_actor.cpp:2583: SessionId: ydb://session/3?node_id=14&id=MmE4Y2QwYmEtZGRhOTE1ZjAtZTdiM2ZlZTUtNDVhNWMwOWE=, ActorId: [14:7509888908616378838:2410], ActorState: ExecuteState, TraceId: 01jweaa3cq22zb06tr3pxg53a2, Create QueryResponse for error on request, msg: 2025-05-29T15:24:28.444711Z node 14 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480 2025-05-29T15:24:28.455489Z node 14 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715674:0, at schemeshard: 72057594046644480 2025-05-29T15:24:28.465410Z node 14 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [14:7509888908616379006:2435], status: GENERIC_ERROR, issues:
: Error: Type annotation, code: 1030
:1:1: Error: At function: KiWriteTable!
:1:1: Error: values have 3 columns, INSERT INTO expects: 2 2025-05-29T15:24:28.465466Z node 14 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=14&id=MTNjMjIyYS04N2ZkZjQyOS1kNWMzZTNlMi1lMTEyYzQxYw==, ActorId: [14:7509888908616379003:2433], ActorState: ExecuteState, TraceId: 01jweaa3debdzpkcsyf8hwxf31, ReplyQueryCompileError, status GENERIC_ERROR remove tx with tx_id: 2025-05-29T15:24:28.468197Z node 14 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [14:7509888908616379018:2441], status: GENERIC_ERROR, issues:
: Error: Type annotation, code: 1030
:1:1: Error: At function: KiWriteTable!
:1:1: Error: Failed to convert type: List> to List>
:1:1: Error: Failed to convert 'id': pgunknown to Optional
:1:1: Error: Row type mismatch for table: db.[/Root/PgTable2] 2025-05-29T15:24:28.468250Z node 14 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=14&id=NmYxMzIzYWItNTY1OGQyODUtNjNmYTNlMWUtNTAyMGIwMzM=, ActorId: [14:7509888908616379015:2439], ActorState: ExecuteState, TraceId: 01jweaa3djdb0c2fnzd52q5ms5, ReplyQueryCompileError, status GENERIC_ERROR remove tx with tx_id: 2025-05-29T15:24:28.486595Z node 14 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [14:7509888908616379030:2447], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:24:28.486701Z node 14 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=14&id=MjdiZjA3YzQtNDA3Y2Q4MjgtYmNhOWQ0NDktZmM4OGQ1NjQ=, ActorId: [14:7509888908616379027:2445], ActorState: ExecuteState, TraceId: 01jweaa3dm7b1z9xszke4x6nyp, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: assertion failed at ydb/core/kqp/ut/pg/kqp_pg_ut.cpp:4958, virtual void NKikimr::NKqp::NTestSuiteKqpPg::TTestCaseCheckPgAutoParams::Execute_(NUnitTest::TTestContext &) [useSink = false]: (result.GetStatus() == EStatus::PRECONDITION_FAILED) failed: (INTERNAL_ERROR != PRECONDITION_FAILED)
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 , with diff: (I|PRECO)N(|DI)T(ER|IO)N(|_F)A(|I)L(_|)E(RROR|D) 0. /-S/util/system/backtrace.cpp:284: ?? @ 0x15FBD15B 1. /tmp//-S/library/cpp/testing/unittest/registar.cpp:46: RaiseError @ 0x16174FC8 2. /tmp//-S/ydb/core/kqp/ut/pg/kqp_pg_ut.cpp:4958: Execute_ @ 0x15E7FB2E 3. /tmp//-S/ydb/core/kqp/ut/pg/kqp_pg_ut.cpp:198: operator() @ 0x15D71686 4. /tmp//-S/library/cpp/testing/unittest/registar.cpp:373: Run @ 0x16176E7D 5. /tmp//-S/ydb/core/kqp/ut/pg/kqp_pg_ut.cpp:198: Execute @ 0x15D70EE0 6. /tmp//-S/library/cpp/testing/unittest/registar.cpp:494: Execute @ 0x161775F2 7. /tmp//-S/library/cpp/testing/unittest/utmain.cpp:872: RunMain @ 0x1618919C 8. ??:0: ?? @ 0x7FD23C39AD8F 9. ??:0: ?? @ 0x7FD23C39AE3F 10. ??:0: ?? @ 0x14D5E028 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/datashard/ut_external_blobs/unittest >> ExternalBlobsMultipleChannels::Simple [FAIL] Test command err: 2025-05-29T15:24:25.922312Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:102:2148], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-05-29T15:24:25.922348Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-05-29T15:24:25.922362Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/002436/r3tmp/tmptNwFBv/pdisk_1.dat 2025-05-29T15:24:26.038083Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-05-29T15:24:26.051748Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:24:26.055598Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [1:32:2079] 1748532265499680 != 1748532265499684 2025-05-29T15:24:26.098132Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:24:26.098170Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:24:26.109241Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-29T15:24:26.187805Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-05-29T15:24:26.397991Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:736:2617], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:24:26.398023Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:747:2622], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:24:26.398036Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:24:26.398942Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2025-05-29T15:24:26.571715Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:750:2625], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-05-29T15:24:26.613149Z node 1 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [1:821:2665] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-29T15:24:26.665883Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [1:831:2674], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:24:26.666438Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=1&id=MTk1OTRmZGUtZDdlNGU4YWQtZWIzMmYzNWYtM2Q0ZjU1YzA=, ActorId: [1:734:2615], ActorState: ExecuteState, TraceId: 01jweaa1cx1xsg9dv869sk4fw7, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: assertion failed at ydb/core/tx/datashard/ut_common/datashard_ut_common.cpp:2091, void NKikimr::ExecSQL(Tests::TServer::TPtr, TActorId, const TString &, bool, Ydb::StatusIds::StatusCode): (response.GetYdbStatus() == code) failed: (INTERNAL_ERROR != SUCCESS) Response { QueryIssues { message: "Execution" issue_code: 1060 issues { message: "yql/essentials/ast/yql_expr.h:1874: index out of range" issue_code: 1 } } TxMeta { } } YdbStatus: INTERNAL_ERROR ConsumedRu: 1 , with diff: (INT|SUCC)E(RNAL_ERROR|SS) TBackTrace::Capture()+28 (0x13AB2F0C) NUnitTest::NPrivate::RaiseError(char const*, TBasicString> const&, bool)+137 (0x13C65E39) NKikimr::ExecSQL(TIntrusivePtr>, NActors::TActorId, TBasicString> const&, bool, Ydb::StatusIds_StatusCode)+1300 (0x262F4B14) NKikimr::NTestSuiteExternalBlobsMultipleChannels::TTestCaseSimple::Execute_(NUnitTest::TTestContext&)+853 (0x13996B95) NKikimr::NTestSuiteExternalBlobsMultipleChannels::TCurrentTest::Execute()::'lambda'()::operator()() const+71 (0x139A6417) NUnitTest::TTestBase::Run(std::__y1::function, TBasicString> const&, char const*, bool)+126 (0x13C67CEE) NKikimr::NTestSuiteExternalBlobsMultipleChannels::TCurrentTest::Execute()+481 (0x139A5D11) NUnitTest::TTestFactory::Execute()+803 (0x13C68463) NUnitTest::RunMain(int, char**)+3021 (0x13C7A00D) ??+0 (0x7FE718F09D90) __libc_start_main+128 (0x7FE718F09E40) _start+41 (0x129FC029) ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/datashard/ut_external_blobs/unittest >> ExternalBlobsMultipleChannels::WithCompaction [FAIL] Test command err: 2025-05-29T15:24:25.879147Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:102:2148], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-05-29T15:24:25.879180Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-05-29T15:24:25.879193Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/00243b/r3tmp/tmpB5ajjs/pdisk_1.dat 2025-05-29T15:24:25.977453Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-05-29T15:24:25.991457Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:24:25.995462Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [1:32:2079] 1748532265558548 != 1748532265558552 2025-05-29T15:24:26.037044Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:24:26.037076Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:24:26.048520Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-29T15:24:26.122519Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-05-29T15:24:26.341926Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:736:2617], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:24:26.341958Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:747:2622], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:24:26.341970Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:24:26.343049Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2025-05-29T15:24:26.510855Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:750:2625], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-05-29T15:24:26.550874Z node 1 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [1:821:2665] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-29T15:24:26.610854Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [1:831:2674], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:24:26.611422Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=1&id=NWFiNzU1MS0xOWUwNGY4Zi0yYmYyNzcxMi1kMTlkNjMwOA==, ActorId: [1:734:2615], ActorState: ExecuteState, TraceId: 01jweaa1b5ackjkhajg09c8e9t, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: assertion failed at ydb/core/tx/datashard/ut_common/datashard_ut_common.cpp:2091, void NKikimr::ExecSQL(Tests::TServer::TPtr, TActorId, const TString &, bool, Ydb::StatusIds::StatusCode): (response.GetYdbStatus() == code) failed: (INTERNAL_ERROR != SUCCESS) Response { QueryIssues { message: "Execution" issue_code: 1060 issues { message: "yql/essentials/ast/yql_expr.h:1874: index out of range" issue_code: 1 } } TxMeta { } } YdbStatus: INTERNAL_ERROR ConsumedRu: 1 , with diff: (INT|SUCC)E(RNAL_ERROR|SS) TBackTrace::Capture()+28 (0x13AB2F0C) NUnitTest::NPrivate::RaiseError(char const*, TBasicString> const&, bool)+137 (0x13C65E39) NKikimr::ExecSQL(TIntrusivePtr>, NActors::TActorId, TBasicString> const&, bool, Ydb::StatusIds_StatusCode)+1300 (0x262F4B14) NKikimr::NTestSuiteExternalBlobsMultipleChannels::TTestCaseWithCompaction::Execute_(NUnitTest::TTestContext&)+929 (0x13999D51) NKikimr::NTestSuiteExternalBlobsMultipleChannels::TCurrentTest::Execute()::'lambda'()::operator()() const+71 (0x139A6417) NUnitTest::TTestBase::Run(std::__y1::function, TBasicString> const&, char const*, bool)+126 (0x13C67CEE) NKikimr::NTestSuiteExternalBlobsMultipleChannels::TCurrentTest::Execute()+481 (0x139A5D11) NUnitTest::TTestFactory::Execute()+803 (0x13C68463) NUnitTest::RunMain(int, char**)+3021 (0x13C7A00D) ??+0 (0x7F7C59444D90) __libc_start_main+128 (0x7F7C59444E40) _start+41 (0x129FC029) |65.5%| [TA] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_external_data_source/test-results/unittest/{meta.json ... results_accumulator.log} >> TConsoleTests::TestSetDefaultComputationalUnitsQuota [GOOD] >> TConsoleTests::TestTenantConfigConsistency >> TCmsTest::DynamicConfig [GOOD] >> TCmsTest::DisabledEvictVDisks |65.5%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_snapshot/ydb-core-tx-datashard-ut_snapshot ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_external_table/unittest >> TExternalTableTest::DropTableTwice [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:117:2058] recipient: [1:112:2142] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:117:2058] recipient: [1:112:2142] Leader for TabletID 72057594046678944 is [1:129:2153] sender: [1:133:2058] recipient: [1:112:2142] 2025-05-29T15:24:29.786181Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7488: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-05-29T15:24:29.786210Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7516: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:24:29.786216Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7402: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-05-29T15:24:29.786222Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7418: OperationsProcessing config: using default configuration 2025-05-29T15:24:29.786233Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-05-29T15:24:29.786237Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-05-29T15:24:29.786246Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7548: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:24:29.786260Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:39: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-05-29T15:24:29.786367Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7619: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-05-29T15:24:29.786440Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-05-29T15:24:29.799430Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7651: Got new config: QueryServiceConfig { AllExternalDataSourcesAreAvailable: true } 2025-05-29T15:24:29.799458Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:24:29.799547Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7619: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-05-29T15:24:29.802301Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-05-29T15:24:29.802426Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-05-29T15:24:29.802468Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-05-29T15:24:29.804146Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-05-29T15:24:29.804205Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:445: Clear TempDirsState with owners number: 0 2025-05-29T15:24:29.804323Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1348: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-05-29T15:24:29.804377Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:32: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-05-29T15:24:29.804830Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:157: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:24:29.804879Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:84: [RootDataErasureManager] Stop 2025-05-29T15:24:29.805118Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:24:29.805127Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:24:29.805146Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-05-29T15:24:29.805154Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-29T15:24:29.805159Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:202: TTxServerlessStorageBilling.Complete 2025-05-29T15:24:29.805191Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6671: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-05-29T15:24:29.806512Z node 1 :HIVE INFO: tablet_helpers.cpp:1130: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:129:2153] sender: [1:243:2058] recipient: [1:15:2062] 2025-05-29T15:24:29.825560Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:372: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-05-29T15:24:29.825661Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:24:29.825734Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-05-29T15:24:29.825789Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:130: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-05-29T15:24:29.825800Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:24:29.826686Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:451: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-05-29T15:24:29.826712Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-05-29T15:24:29.826784Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:24:29.826795Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:313: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-05-29T15:24:29.826801Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:367: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-05-29T15:24:29.826806Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 2 -> 3 2025-05-29T15:24:29.827262Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:24:29.827273Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-05-29T15:24:29.827278Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 3 -> 128 2025-05-29T15:24:29.827663Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:24:29.827672Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:24:29.827677Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:24:29.827684Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1650: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-05-29T15:24:29.828317Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1719: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-05-29T15:24:29.828693Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:652: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-05-29T15:24:29.828727Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1751: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-05-29T15:24:29.828910Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:676: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-05-29T15:24:29.828933Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:680: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 125 RawX2: 4294969446 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-05-29T15:24:29.828940Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:24:29.829011Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 128 -> 240 2025-05-29T15:24:29.829018Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:24:29.829066Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-05-29T15:24:29.829078Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:402: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-05-29T15:24:29.829489Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:24:29.829497Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594 ... rd.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:24:29.847612Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 103, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-29T15:24:29.847638Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 103, path id: [OwnerId: 72057594046678944, LocalPathId: 3] 2025-05-29T15:24:29.847655Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 103, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2025-05-29T15:24:29.847674Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:24:29.847678Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:210:2211], at schemeshard: 72057594046678944, txId: 103, path id: 1 2025-05-29T15:24:29.847683Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:210:2211], at schemeshard: 72057594046678944, txId: 103, path id: 3 2025-05-29T15:24:29.847687Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:210:2211], at schemeshard: 72057594046678944, txId: 103, path id: 2 2025-05-29T15:24:29.847733Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 103:0, at schemeshard: 72057594046678944 2025-05-29T15:24:29.847740Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:482: [72057594046678944] TDone opId# 103:0 ProgressState 2025-05-29T15:24:29.847754Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:906: Part operation is done id#103:0 progress is 1/1 2025-05-29T15:24:29.847758Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2025-05-29T15:24:29.847764Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:906: Part operation is done id#103:0 progress is 1/1 2025-05-29T15:24:29.847767Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2025-05-29T15:24:29.847772Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1606: TOperation IsReadyToNotify, TxId: 103, ready parts: 1/1, is published: false 2025-05-29T15:24:29.847777Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2025-05-29T15:24:29.847782Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:973: Operation and all the parts is done, operation id: 103:0 2025-05-29T15:24:29.847787Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5174: RemoveTx for txid 103:0 2025-05-29T15:24:29.847799Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 2 2025-05-29T15:24:29.847803Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove txstate source path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2025-05-29T15:24:29.847808Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:982: Publication still in progress, tx: 103, publications: 3, subscribers: 0 2025-05-29T15:24:29.847813Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:989: Publication details: tx: 103, [OwnerId: 72057594046678944, LocalPathId: 1], 9 2025-05-29T15:24:29.847817Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:989: Publication details: tx: 103, [OwnerId: 72057594046678944, LocalPathId: 2], 2 2025-05-29T15:24:29.847821Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:989: Publication details: tx: 103, [OwnerId: 72057594046678944, LocalPathId: 3], 18446744073709551615 2025-05-29T15:24:29.847891Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:5834: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 103 2025-05-29T15:24:29.847901Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 103 2025-05-29T15:24:29.847907Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 3, at schemeshard: 72057594046678944, txId: 103 2025-05-29T15:24:29.847911Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 103, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 18446744073709551615 2025-05-29T15:24:29.847915Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 1 2025-05-29T15:24:29.847969Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:123: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-05-29T15:24:29.847975Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:137: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 3], at schemeshard: 72057594046678944 2025-05-29T15:24:29.847984Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2025-05-29T15:24:29.848076Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:5834: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 9 PathOwnerId: 72057594046678944, cookie: 103 2025-05-29T15:24:29.848085Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 9 PathOwnerId: 72057594046678944, cookie: 103 2025-05-29T15:24:29.848089Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 103 2025-05-29T15:24:29.848093Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 103, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 9 2025-05-29T15:24:29.848096Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-05-29T15:24:29.848298Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:5834: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 2 PathOwnerId: 72057594046678944, cookie: 103 2025-05-29T15:24:29.848310Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 2 PathOwnerId: 72057594046678944, cookie: 103 2025-05-29T15:24:29.848314Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 103 2025-05-29T15:24:29.848318Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 103, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 2 2025-05-29T15:24:29.848322Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2025-05-29T15:24:29.848334Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 103, subscribers: 0 2025-05-29T15:24:29.848856Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 103 2025-05-29T15:24:29.848880Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:160: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 2025-05-29T15:24:29.848998Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 103 2025-05-29T15:24:29.849066Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 103 TestModificationResult got TxId: 103, wait until txId: 103 TestWaitNotification wait txId: 103 2025-05-29T15:24:29.849121Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:210: tests -- TTxNotificationSubscriber for txId 103: send EvNotifyTxCompletion 2025-05-29T15:24:29.849130Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:256: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 103 2025-05-29T15:24:29.849198Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 103, at schemeshard: 72057594046678944 2025-05-29T15:24:29.849215Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:227: tests -- TTxNotificationSubscriber for txId 103: got EvNotifyTxCompletionResult 2025-05-29T15:24:29.849220Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:236: tests -- TTxNotificationSubscriber for txId 103: satisfy waiter [1:371:2361] TestWaitNotification: OK eventTxId 103 2025-05-29T15:24:29.849290Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/ExternalTable" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-05-29T15:24:29.849316Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/ExternalTable" took 35us result status StatusPathDoesNotExist 2025-05-29T15:24:29.849355Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/ExternalTable\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1]), source_location: ydb/core/tx/schemeshard/schemeshard_path_describer.cpp:1157" Path: "/MyRoot/ExternalTable" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: true } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/cms/ut/unittest >> TCmsTest::EnableCMSRequestPrioritiesFeatureFlag [GOOD] Test command err: 2025-05-29T15:24:23.757664Z node 1 :CMS DEBUG: cms_impl.h:185: StateInit event type: 10060000 event: NKikimr::TEvTablet::TEvBoot 2025-05-29T15:24:23.758341Z node 1 :CMS DEBUG: console__init_scheme.cpp:14: TConsole::TTxInitScheme Execute 2025-05-29T15:24:23.759885Z node 1 :CMS DEBUG: cms_impl.h:185: StateInit event type: 10060001 event: NKikimr::TEvTablet::TEvRestored 2025-05-29T15:24:23.759951Z node 1 :CMS DEBUG: cms_tx_init_scheme.cpp:16: TTxInitScheme Execute 2025-05-29T15:24:23.761176Z node 1 :CMS DEBUG: cms_impl.h:185: StateInit event type: 1006000c event: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-05-29T15:24:23.761253Z node 1 :CMS DEBUG: cms_impl.h:185: StateInit event type: 104d0001 event: NKikimr::NConsole::TEvConfigsDispatcher::TEvSetConfigSubscriptionResponse 2025-05-29T15:24:23.762331Z node 1 :CMS DEBUG: console__init_scheme.cpp:23: TConsole::TTxInitScheme Complete 2025-05-29T15:24:23.762364Z node 1 :CMS DEBUG: console__load_state.cpp:28: TConsole::TTxLoadState Execute 2025-05-29T15:24:23.762409Z node 1 :CMS DEBUG: console__load_state.cpp:50: Using default config. 2025-05-29T15:24:23.762505Z node 1 :CMS DEBUG: console__load_state.cpp:66: TConsole::TTxLoadState Complete 2025-05-29T15:24:23.762773Z node 1 :CMS DEBUG: cms_tx_init_scheme.cpp:24: TTxInitScheme Complete 2025-05-29T15:24:23.762810Z node 1 :CMS DEBUG: cms_tx_load_state.cpp:33: TTxLoadState Execute 2025-05-29T15:24:23.762840Z node 1 :CMS DEBUG: cms_tx_load_state.cpp:76: Using default config 2025-05-29T15:24:23.762881Z node 1 :CMS DEBUG: cms.cpp:1147: Running CleanupWalleTasks 2025-05-29T15:24:23.793683Z node 1 :CMS DEBUG: cms_impl.h:185: StateInit event type: 104a0012 event: NKikimr::NConsole::TEvConsole::TEvConfigNotificationRequest { Config { FeatureFlags { EnableCMSRequestPriorities: true EnableSingleCompositeActionGroup: true } } ItemKinds: 25 ItemKinds: 26 Local: true } 2025-05-29T15:24:23.805276Z node 1 :CMS DEBUG: cms_tx_load_state.cpp:256: TTxLoadState Complete 2025-05-29T15:24:23.805388Z node 1 :CMS DEBUG: cms_tx_update_config.cpp:23: TTxUpdateConfig Execute 2025-05-29T15:24:23.806436Z node 1 :CMS DEBUG: cms_tx_update_config.cpp:37: TTxUpdateConfig Complete 2025-05-29T15:24:23.806504Z node 1 :CMS DEBUG: sentinel.cpp:939: [Sentinel] [Main] UpdateConfig 2025-05-29T15:24:23.806508Z node 1 :CMS DEBUG: sentinel.cpp:884: [Sentinel] [Main] Start ConfigUpdater 2025-05-29T15:24:23.806514Z node 1 :CMS DEBUG: sentinel.cpp:955: [Sentinel] [Main] UpdateState 2025-05-29T15:24:23.806516Z node 1 :CMS INFO: sentinel.cpp:879: [Sentinel] [Main] StateUpdater was delayed 2025-05-29T15:24:23.806540Z node 1 :CMS DEBUG: sentinel.cpp:464: [Sentinel] [ConfigUpdater] Request blobstorage config: attempt# 0 2025-05-29T15:24:23.806571Z node 1 :CMS DEBUG: sentinel.cpp:477: [Sentinel] [ConfigUpdater] Request CMS cluster state: attempt# 0 2025-05-29T15:24:23.808091Z node 1 :CMS DEBUG: sentinel.cpp:530: [Sentinel] [ConfigUpdater] Handle TEvBlobStorage::TEvControllerConfigResponse: response# Status { Success: true BaseConfig { PDisk { NodeId: 1 PDiskId: 1 Path: "/1/pdisk-1.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 2 PDiskId: 2 Path: "/2/pdisk-2.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 3 PDiskId: 3 Path: "/3/pdisk-3.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 4 PDiskId: 4 Path: "/4/pdisk-4.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 5 PDiskId: 5 Path: "/5/pdisk-5.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 6 PDiskId: 6 Path: "/6/pdisk-6.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 7 PDiskId: 7 Path: "/7/pdisk-7.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 8 PDiskId: 8 Path: "/8/pdisk-8.data" Guid: 1 DriveStatus: ACTIVE } VSlot { VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 1000 } GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 1001 } GroupId: 1 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 1002 } GroupId: 2 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 1003 } GroupId: 3 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 2 VSlotId: 1000 } GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 2 VSlotId: 1001 } GroupId: 1 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 2 VSlotId: 1002 } GroupId: 2 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 2 VSlotId: 1003 } GroupId: 3 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 3 PDiskId: 3 VSlotId: 1000 } GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 3 VSlotId: 1001 } GroupId: 1 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 3 VSlotId: 1002 } GroupId: 2 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 3 VSlotId: 1003 } GroupId: 3 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 4 PDiskId: 4 VSlotId: 1000 } GroupGeneration: 1 FailDomainIdx: 3 } VSlot { VSlotId { NodeId: 4 PDiskId: 4 VSlotId: 1001 } GroupId: 1 GroupGeneration: 1 FailDomainIdx: 3 } VSlot { VSlotId { NodeId: 4 PDiskId: 4 VSlotId: 1002 } GroupId: 2 GroupGeneration: 1 FailDomainIdx: 3 } VSlot { VSlotId { NodeId: 4 PDiskId: 4 VSlotId: 1003 } GroupId: 3 GroupGeneration: 1 FailDomainIdx: 3 } VSlot { VSlotId { NodeId: 5 PDiskId: 5 VSlotId: 1000 } GroupGeneration: 1 FailDomainIdx: 4 } VSlot { VSlotId { NodeId: 5 PDiskId: 5 VSlotId: 1001 } GroupId: 1 GroupGeneration: 1 FailDomainIdx: 4 } VSlot { VSlotId { NodeId: 5 PDiskId: 5 VSlotId: 1002 } GroupId: 2 GroupGeneration: 1 FailDomainIdx: 4 } VSlot { VSlotId { NodeId: 5 PDiskId: 5 VSlotId: 1003 } GroupId: 3 GroupGeneration: 1 FailDomainIdx: 4 } VSlot { VSlotId { NodeId: 6 PDiskId: 6 VSlotId: 1000 } GroupGeneration: 1 FailDomainIdx: 5 } VSlot { VSlotId { NodeId: 6 PDiskId: 6 VSlotId: 1001 } GroupId: 1 GroupGeneration: 1 FailDomainIdx: 5 } VSlot { VSlotId { NodeId: 6 PDiskId: 6 VSlotId: 1002 } GroupId: 2 GroupGeneration: 1 FailDomainIdx: 5 } VSlot { VSlotId { NodeId: 6 PDiskId: 6 VSlotId: 1003 } GroupId: 3 GroupGeneration: 1 FailDomainIdx: 5 } VSlot { VSlotId { NodeId: 7 PDiskId: 7 VSlotId: 1000 } GroupGeneration: 1 FailDomainIdx: 6 } VSlot { VSlotId { NodeId: 7 PDiskId: 7 VSlotId: 1001 } GroupId: 1 GroupGeneration: 1 FailDomainIdx: 6 } VSlot { VSlotId { NodeId: 7 PDiskId: 7 VSlotId: 1002 } GroupId: 2 GroupGeneration: 1 FailDomainIdx: 6 } VSlot { VSlotId { NodeId: 7 PDiskId: 7 VSlotId: 1003 } GroupId: 3 GroupGeneration: 1 FailDomainIdx: 6 } VSlot { VSlotId { NodeId: 8 PDiskId: 8 VSlotId: 1000 } GroupGeneration: 1 FailDomainIdx: 7 } VSlot { VSlotId { NodeId: 8 PDiskId: 8 VSlotId: 1001 } GroupId: 1 GroupGeneration: 1 FailDomainIdx: 7 } VSlot { VSlotId { NodeId: 8 PDiskId: 8 VSlotId: 1002 } GroupId: 2 GroupGeneration: 1 FailDomainIdx: 7 } VSlot { VSlotId { NodeId: 8 PDiskId: 8 VSlotId: 1003 } GroupId: 3 GroupGeneration: 1 FailDomainIdx: 7 } Group { GroupGeneration: 1 ErasureSpecies: "block-4-2" VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 1000 } VSlotId { NodeId: 2 PDiskId: 2 VSlotId: 1000 } VSlotId { NodeId: 3 PDiskId: 3 VSlotId: 1000 } VSlotId { NodeId: 4 PDiskId: 4 VSlotId: 1000 } VSlotId { NodeId: 5 PDiskId: 5 VSlotId: 1000 } VSlotId { NodeId: 6 PDiskId: 6 VSlotId: 1000 } VSlotId { NodeId: 7 PDiskId: 7 VSlotId: 1000 } VSlotId { NodeId: 8 PDiskId: 8 VSlotId: 1000 } } Group { GroupId: 1 GroupGeneration: 1 ErasureSpecies: "block-4-2" VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 1001 } VSlotId { NodeId: 2 PDiskId: 2 VSlotId: 1001 } VSlotId { NodeId: 3 PDiskId: 3 VSlotId: 1001 } VSlotId { NodeId: 4 PDiskId: 4 VSlotId: 1001 } VSlotId { NodeId: 5 PDiskId: 5 VSlotId: 1001 } VSlotId { NodeId: 6 PDiskId: 6 VSlotId: 1001 } VSlotId { NodeId: 7 PDiskId: 7 VSlotId: 1001 } VSlotId { NodeId: 8 PDiskId: 8 VSlotId: 1001 } } Group { GroupId: 2 GroupGeneration: 1 ErasureSpecies: "block-4-2" VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 1002 } VSlotId { NodeId: 2 PDiskId: 2 VSlotId: 1002 } VSlotId { NodeId: 3 PDiskId: 3 VSlotId: 1002 } VSlotId { NodeId: 4 PDiskId: 4 VSlotId: 1002 } VSlotId { NodeId: 5 PDiskId: 5 VSlotId: 1002 } VSlotId { NodeId: 6 PDiskId: 6 VSlotId: 1002 } VSlotId { NodeId: 7 PDiskId: 7 VSlotId: 1002 } VSlotId { NodeId: 8 PDiskId: 8 VSlotId: 1002 } } Group { GroupId: 3 GroupGeneration: 1 ErasureSpecies: "block-4-2" VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 1003 } VSlotId { NodeId: 2 PDiskId: 2 VSlotId: 1003 } VSlotId { NodeId: 3 PDiskId: 3 VSlotId: 1003 } VSlotId { NodeId: 4 PDiskId: 4 VSlotId: 1003 } VSlotId { NodeId: 5 PDiskId: 5 VSlotId: 1003 } VSlotId { NodeId: 6 PDiskId: 6 VSlotId: 1003 } VSlotId { NodeId: 7 PDiskId: 7 VSlotId: 1003 } VSlotId { NodeId: 8 PDiskId: 8 VSlotId: 1003 } } } } Success: true 2025-05-29T15:24:23.818436Z node 1 :CMS DEBUG: cms_tx_update_config.cpp:23: TTxUpdateConfig Execute 2025-05-29T15:24:23.849866Z node 1 :CMS DEBUG: cms_tx_update_config.cpp:37: TTxUpdateConfig Complete 2025-05-29T15:24:23.849928Z node 1 :CMS DEBUG: cms_tx_update_config.cpp:44: Updated config: TenantLimits { DisabledNodesRatioLimit: 0 } ClusterLimits { DisabledNodesRatioLimit: 0 } SentinelConfig { Enable: false } 2025-05-29T15:24:23.891445Z node 1 :CMS DEBUG: cms_tx_update_downtimes.cpp:17: TTxUpdateDowntimes Execute 2025-05-29T15:24:23.891499Z node 1 :CMS DEBUG: cms_tx_update_downtimes.cpp:26: TTxUpdateDowntimes Complete 2025-05-29T15:24:23.891568Z node 1 :CMS DEBUG: cluster_info.cpp:968: Timestamp: 1970-01-01T00:02:00Z 2025-05-29T15:24:23.891906Z node 1 :CMS NOTICE: audit_log.cpp:12: [AuditLog] [CMS tablet] Reply: request# NKikimr::NCms::TEvCms::TEvClusterStateRequest { }, response# NKikimr::NCms::TEvCms::TEvClusterStateResponse { Status { Code: OK } State { Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120026512 } Devices { Name: "vdisk-0-1-0-0-0" State: UP Timestamp: 120026512 } Devices { Name: "vdisk-1-1-0-0-0" State: UP Timestamp: 120026512 } Devices { Name: "vdisk-2-1-0-0-0" State: UP Timestamp: 120026512 } Devices { Name: "vdisk-3-1-0-0-0" State: UP Timestamp: 120026512 } Devices { Name: "pdisk-1-1" State: UP Timestamp: 120026512 } Timestamp: 120026512 NodeId: 1 InterconnectPort: 12001 Location { DataCenter: "1" Module: "1" Rack: "1" Unit: "1" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120026512 } Devices { Name: "vdisk-0-1-0-1-0" State: UP Timestamp: 120026512 } Devices { Name: "vdisk-1-1-0-1-0" State: UP Timestamp: 120026512 } Devices { Name: "vdisk-2-1-0-1-0" State: UP Timestamp: 120026512 } Devices { Name: "vdisk-3-1-0-1-0" State: UP Timestamp: 120026512 } Devices { Name: "pdisk-2-2" State: UP Timestamp: 120026512 } Timestamp: 120026512 NodeId: 2 InterconnectPort: 12002 Location { DataCenter: "1" Module: "2" Rack: "2" Unit: "2" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120026512 } Devices { Name: "vdisk-0-1-0-2-0" State: UP Timestamp: 120026512 } Devices { Name: "vdisk-1-1-0-2-0" State: UP Timestamp: 120026512 } Devices { Name: "vdisk-2-1-0-2-0" State: UP Timestamp: 120026512 } Devices { Name: "vdisk-3-1-0-2-0" State: UP Timestamp: 120026512 } Devices { Name: "pdisk-3-3" State: UP Timestamp: 120026512 } Timestamp: 120026512 NodeId: 3 InterconnectPort: 12003 Location { DataCenter: "1" Module: "3" Rack: "3" Unit: "3" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120026512 } Devices { Name: "vdisk-0-1-0-3-0" State: UP Timestamp: 120026512 } Devices { Name: "vdisk-1-1-0-3-0" State: UP Timestamp: 120026512 } Devices { Name: "vdisk-2-1-0-3-0" State: UP Timestamp: 120026512 } Devices { Name: "vdisk-3-1-0-3-0" State: UP Timestamp: 120026512 } Devices { Name: "pdisk-4-4" State: UP Timestamp: 120026512 } Timestamp: 120026512 NodeId: 4 InterconnectPort: 12004 Location { DataCenter: "1" Module: "4" Rack: "4" Unit: "4" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: ... -05-29T15:24:28.012723Z node 17 :CMS DEBUG: node_checkers.cpp:101: [Nodes Counter] Checking Node: 18, with state: Up, with limit: 0, with ratio limit: 0, locked nodes: 0, down nodes: 0 2025-05-29T15:24:28.012828Z node 17 :CMS DEBUG: cms.cpp:387: Result: ALLOW 2025-05-29T15:24:28.012857Z node 17 :CMS DEBUG: cms.cpp:1036: Accepting permission: id# user1-p-5, requestId# user1-r-4, owner# user1 2025-05-29T15:24:28.012865Z node 17 :CMS INFO: cluster_info.cpp:777: Adding lock for Host ::1:12002 (18) (permission user1-p-5 until 1970-01-01T00:03:00Z) 2025-05-29T15:24:28.012878Z node 17 :CMS DEBUG: cms_tx_store_permissions.cpp:26: TTxStorePermissions Execute 2025-05-29T15:24:28.012922Z node 17 :CMS NOTICE: audit_log.cpp:12: [AuditLog] [CMS tablet] Store permission: id# user1-p-5, validity# 1970-01-01T00:03:00.855144Z, action# Type: SHUTDOWN_HOST Host: "18" Duration: 60000000 2025-05-29T15:24:28.012933Z node 17 :CMS NOTICE: audit_log.cpp:12: [AuditLog] [CMS tablet] Remove request: id# user1-r-4, owner# user1 2025-05-29T15:24:28.028272Z node 17 :CMS DEBUG: cms_tx_store_permissions.cpp:137: TTxStorePermissions complete 2025-05-29T15:24:28.028357Z node 17 :CMS NOTICE: audit_log.cpp:12: [AuditLog] [CMS tablet] Reply: request# NKikimr::NCms::TEvCms::TEvCheckRequest { User: "user1" RequestId: "user1-r-4" DryRun: false AvailabilityMode: MODE_MAX_AVAILABILITY }, response# NKikimr::NCms::TEvCms::TEvPermissionResponse { Status { Code: ALLOW } Permissions { Id: "user1-p-5" Action { Type: SHUTDOWN_HOST Host: "18" Duration: 60000000 } Deadline: 180855144 Extentions { Type: HostInfo Hosts { Name: "::1" State: UP NodeId: 18 InterconnectPort: 12002 } } } } 2025-05-29T15:24:28.028493Z node 17 :CMS INFO: cms.cpp:1366: Get all requests for user1 2025-05-29T15:24:28.028499Z node 17 :CMS DEBUG: cms.cpp:1392: Resulting status: OK 2025-05-29T15:24:28.028510Z node 17 :CMS NOTICE: audit_log.cpp:12: [AuditLog] [CMS tablet] Reply: request# NKikimr::NCms::TEvCms::TEvManageRequestRequest { User: "user1" Command: LIST DryRun: false }, response# NKikimr::NCms::TEvCms::TEvManageRequestResponse { Status { Code: OK } } 2025-05-29T15:24:29.564272Z node 25 :CMS DEBUG: console__init_scheme.cpp:14: TConsole::TTxInitScheme Execute 2025-05-29T15:24:29.566985Z node 25 :CMS DEBUG: cms_impl.h:185: StateInit event type: 10060000 event: NKikimr::TEvTablet::TEvBoot 2025-05-29T15:24:29.569650Z node 25 :CMS DEBUG: cms_impl.h:185: StateInit event type: 10060001 event: NKikimr::TEvTablet::TEvRestored 2025-05-29T15:24:29.569738Z node 25 :CMS DEBUG: cms_tx_init_scheme.cpp:16: TTxInitScheme Execute 2025-05-29T15:24:29.570111Z node 25 :CMS DEBUG: console__init_scheme.cpp:23: TConsole::TTxInitScheme Complete 2025-05-29T15:24:29.570155Z node 25 :CMS DEBUG: cms_impl.h:185: StateInit event type: 1006000c event: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-05-29T15:24:29.570192Z node 25 :CMS DEBUG: console__load_state.cpp:28: TConsole::TTxLoadState Execute 2025-05-29T15:24:29.570229Z node 25 :CMS DEBUG: console__load_state.cpp:50: Using default config. 2025-05-29T15:24:29.570306Z node 25 :CMS DEBUG: console__load_state.cpp:66: TConsole::TTxLoadState Complete 2025-05-29T15:24:29.570324Z node 25 :CMS DEBUG: cms_impl.h:185: StateInit event type: 104d0001 event: NKikimr::NConsole::TEvConfigsDispatcher::TEvSetConfigSubscriptionResponse 2025-05-29T15:24:29.571687Z node 25 :CMS DEBUG: cms_tx_init_scheme.cpp:24: TTxInitScheme Complete 2025-05-29T15:24:29.571746Z node 25 :CMS DEBUG: cms_tx_load_state.cpp:33: TTxLoadState Execute 2025-05-29T15:24:29.571786Z node 25 :CMS DEBUG: cms_tx_load_state.cpp:76: Using default config 2025-05-29T15:24:29.571811Z node 25 :CMS DEBUG: cms.cpp:1147: Running CleanupWalleTasks 2025-05-29T15:24:29.584093Z node 25 :CMS DEBUG: cms_impl.h:185: StateInit event type: 104a0012 event: NKikimr::NConsole::TEvConsole::TEvConfigNotificationRequest { Config { FeatureFlags { EnableCMSRequestPriorities: false EnableSingleCompositeActionGroup: true } } ItemKinds: 25 ItemKinds: 26 Local: true } 2025-05-29T15:24:29.618228Z node 25 :CMS DEBUG: cms_tx_load_state.cpp:256: TTxLoadState Complete 2025-05-29T15:24:29.618361Z node 25 :CMS DEBUG: cms_tx_update_config.cpp:23: TTxUpdateConfig Execute 2025-05-29T15:24:29.618392Z node 25 :CMS DEBUG: cms_tx_update_config.cpp:37: TTxUpdateConfig Complete 2025-05-29T15:24:29.618451Z node 25 :CMS DEBUG: cms_tx_update_config.cpp:23: TTxUpdateConfig Execute 2025-05-29T15:24:29.618538Z node 25 :CMS DEBUG: sentinel.cpp:939: [Sentinel] [Main] UpdateConfig 2025-05-29T15:24:29.618545Z node 25 :CMS DEBUG: sentinel.cpp:884: [Sentinel] [Main] Start ConfigUpdater 2025-05-29T15:24:29.618554Z node 25 :CMS DEBUG: sentinel.cpp:955: [Sentinel] [Main] UpdateState 2025-05-29T15:24:29.618559Z node 25 :CMS INFO: sentinel.cpp:879: [Sentinel] [Main] StateUpdater was delayed 2025-05-29T15:24:29.618567Z node 25 :CMS DEBUG: sentinel.cpp:464: [Sentinel] [ConfigUpdater] Request blobstorage config: attempt# 0 2025-05-29T15:24:29.618666Z node 25 :CMS DEBUG: sentinel.cpp:477: [Sentinel] [ConfigUpdater] Request CMS cluster state: attempt# 0 2025-05-29T15:24:29.618988Z node 25 :CMS DEBUG: sentinel.cpp:530: [Sentinel] [ConfigUpdater] Handle TEvBlobStorage::TEvControllerConfigResponse: response# Status { Success: true BaseConfig { PDisk { NodeId: 25 PDiskId: 25 Path: "/25/pdisk-25.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 26 PDiskId: 26 Path: "/26/pdisk-26.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 27 PDiskId: 27 Path: "/27/pdisk-27.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 28 PDiskId: 28 Path: "/28/pdisk-28.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 29 PDiskId: 29 Path: "/29/pdisk-29.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 30 PDiskId: 30 Path: "/30/pdisk-30.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 31 PDiskId: 31 Path: "/31/pdisk-31.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 32 PDiskId: 32 Path: "/32/pdisk-32.data" Guid: 1 DriveStatus: ACTIVE } VSlot { VSlotId { NodeId: 25 PDiskId: 25 VSlotId: 1000 } GroupGeneration: 1 } VSlot { VSlotId { NodeId: 25 PDiskId: 25 VSlotId: 1001 } GroupId: 1 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 25 PDiskId: 25 VSlotId: 1002 } GroupId: 2 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 25 PDiskId: 25 VSlotId: 1003 } GroupId: 3 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 26 PDiskId: 26 VSlotId: 1000 } GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 26 PDiskId: 26 VSlotId: 1001 } GroupId: 1 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 26 PDiskId: 26 VSlotId: 1002 } GroupId: 2 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 26 PDiskId: 26 VSlotId: 1003 } GroupId: 3 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 27 PDiskId: 27 VSlotId: 1000 } GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 27 PDiskId: 27 VSlotId: 1001 } GroupId: 1 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 27 PDiskId: 27 VSlotId: 1002 } GroupId: 2 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 27 PDiskId: 27 VSlotId: 1003 } GroupId: 3 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 28 PDiskId: 28 VSlotId: 1000 } GroupGeneration: 1 FailDomainIdx: 3 } VSlot { VSlotId { NodeId: 28 PDiskId: 28 VSlotId: 1001 } GroupId: 1 GroupGeneration: 1 FailDomainIdx: 3 } VSlot { VSlotId { NodeId: 28 PDiskId: 28 VSlotId: 1002 } GroupId: 2 GroupGeneration: 1 FailDomainIdx: 3 } VSlot { VSlotId { NodeId: 28 PDiskId: 28 VSlotId: 1003 } GroupId: 3 GroupGeneration: 1 FailDomainIdx: 3 } VSlot { VSlotId { NodeId: 29 PDiskId: 29 VSlotId: 1000 } GroupGeneration: 1 FailDomainIdx: 4 } VSlot { VSlotId { NodeId: 29 PDiskId: 29 VSlotId: 1001 } GroupId: 1 GroupGeneration: 1 FailDomainIdx: 4 } VSlot { VSlotId { NodeId: 29 PDiskId: 29 VSlotId: 1002 } GroupId: 2 GroupGeneration: 1 FailDomainIdx: 4 } VSlot { VSlotId { NodeId: 29 PDiskId: 29 VSlotId: 1003 } GroupId: 3 GroupGeneration: 1 FailDomainIdx: 4 } VSlot { VSlotId { NodeId: 30 PDiskId: 30 VSlotId: 1000 } GroupGeneration: 1 FailDomainIdx: 5 } VSlot { VSlotId { NodeId: 30 PDiskId: 30 VSlotId: 1001 } GroupId: 1 GroupGeneration: 1 FailDomainIdx: 5 } VSlot { VSlotId { NodeId: 30 PDiskId: 30 VSlotId: 1002 } GroupId: 2 GroupGeneration: 1 FailDomainIdx: 5 } VSlot { VSlotId { NodeId: 30 PDiskId: 30 VSlotId: 1003 } GroupId: 3 GroupGeneration: 1 FailDomainIdx: 5 } VSlot { VSlotId { NodeId: 31 PDiskId: 31 VSlotId: 1000 } GroupGeneration: 1 FailDomainIdx: 6 } VSlot { VSlotId { NodeId: 31 PDiskId: 31 VSlotId: 1001 } GroupId: 1 GroupGeneration: 1 FailDomainIdx: 6 } VSlot { VSlotId { NodeId: 31 PDiskId: 31 VSlotId: 1002 } GroupId: 2 GroupGeneration: 1 FailDomainIdx: 6 } VSlot { VSlotId { NodeId: 31 PDiskId: 31 VSlotId: 1003 } GroupId: 3 GroupGeneration: 1 FailDomainIdx: 6 } VSlot { VSlotId { NodeId: 32 PDiskId: 32 VSlotId: 1000 } GroupGeneration: 1 FailDomainIdx: 7 } VSlot { VSlotId { NodeId: 32 PDiskId: 32 VSlotId: 1001 } GroupId: 1 GroupGeneration: 1 FailDomainIdx: 7 } VSlot { VSlotId { NodeId: 32 PDiskId: 32 VSlotId: 1002 } GroupId: 2 GroupGeneration: 1 FailDomainIdx: 7 } VSlot { VSlotId { NodeId: 32 PDiskId: 32 VSlotId: 1003 } GroupId: 3 GroupGeneration: 1 FailDomainIdx: 7 } Group { GroupGeneration: 1 ErasureSpecies: "block-4-2" VSlotId { NodeId: 25 PDiskId: 25 VSlotId: 1000 } VSlotId { NodeId: 26 PDiskId: 26 VSlotId: 1000 } VSlotId { NodeId: 27 PDiskId: 27 VSlotId: 1000 } VSlotId { NodeId: 28 PDiskId: 28 VSlotId: 1000 } VSlotId { NodeId: 29 PDiskId: 29 VSlotId: 1000 } VSlotId { NodeId: 30 PDiskId: 30 VSlotId: 1000 } VSlotId { NodeId: 31 PDiskId: 31 VSlotId: 1000 } VSlotId { NodeId: 32 PDiskId: 32 VSlotId: 1000 } } Group { GroupId: 1 GroupGeneration: 1 ErasureSpecies: "block-4-2" VSlotId { NodeId: 25 PDiskId: 25 VSlotId: 1001 } VSlotId { NodeId: 26 PDiskId: 26 VSlotId: 1001 } VSlotId { NodeId: 27 PDiskId: 27 VSlotId: 1001 } VSlotId { NodeId: 28 PDiskId: 28 VSlotId: 1001 } VSlotId { NodeId: 29 PDiskId: 29 VSlotId: 1001 } VSlotId { NodeId: 30 PDiskId: 30 VSlotId: 1001 } VSlotId { NodeId: 31 PDiskId: 31 VSlotId: 1001 } VSlotId { NodeId: 32 PDiskId: 32 VSlotId: 1001 } } Group { GroupId: 2 GroupGeneration: 1 ErasureSpecies: "block-4-2" VSlotId { NodeId: 25 PDiskId: 25 VSlotId: 1002 } VSlotId { NodeId: 26 PDiskId: 26 VSlotId: 1002 } VSlotId { NodeId: 27 PDiskId: 27 VSlotId: 1002 } VSlotId { NodeId: 28 PDiskId: 28 VSlotId: 1002 } VSlotId { NodeId: 29 PDiskId: 29 VSlotId: 1002 } VSlotId { NodeId: 30 PDiskId: 30 VSlotId: 1002 } VSlotId { NodeId: 31 PDiskId: 31 VSlotId: 1002 } VSlotId { NodeId: 32 PDiskId: 32 VSlotId: 1002 } } Group { GroupId: 3 GroupGeneration: 1 ErasureSpecies: "block-4-2" VSlotId { NodeId: 25 PDiskId: 25 VSlotId: 1003 } VSlotId { NodeId: 26 PDiskId: 26 VSlotId: 1003 } VSlotId { NodeId: 27 PDiskId: 27 VSlotId: 1003 } VSlotId { NodeId: 28 PDiskId: 28 VSlotId: 1003 } VSlotId { NodeId: 29 PDiskId: 29 VSlotId: 1003 } VSlotId { NodeId: 30 PDiskId: 30 VSlotId: 1003 } VSlotId { NodeId: 31 PDiskId: 31 VSlotId: 1003 } VSlotId { NodeId: 32 PDiskId: 32 VSlotId: 1003 } } } } Success: true 2025-05-29T15:24:29.661058Z node 25 :CMS DEBUG: cms_tx_update_config.cpp:37: TTxUpdateConfig Complete 2025-05-29T15:24:29.661133Z node 25 :CMS DEBUG: cms_tx_update_config.cpp:44: Updated config: TenantLimits { DisabledNodesRatioLimit: 0 } ClusterLimits { DisabledNodesRatioLimit: 0 } SentinelConfig { Enable: false } 2025-05-29T15:24:29.661353Z node 25 :CMS NOTICE: audit_log.cpp:12: [AuditLog] [CMS tablet] Reply: request# NKikimr::NCms::TEvCms::TEvPermissionRequest { User: "user" Actions { Type: RESTART_SERVICES Host: "25" Services: "storage" Duration: 60000000 } Actions { Type: RESTART_SERVICES Host: "26" Services: "storage" Duration: 60000000 } PartialPermissionAllowed: true Schedule: true DryRun: false AvailabilityMode: MODE_MAX_AVAILABILITY EvictVDisks: false Priority: -80 }, response# NKikimr::NCms::TEvCms::TEvPermissionResponse { Status { Code: WRONG_REQUEST Reason: "Unsupported: feature flag EnableCMSRequestPriorities is off" } } ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/cms/ut/unittest >> TCmsTenatsTest::TestClusterRatioLimitForceRestartModeScheduled [GOOD] Test command err: 2025-05-29T15:24:23.153639Z node 1 :CMS DEBUG: cms_impl.h:185: StateInit event type: 10060000 event: NKikimr::TEvTablet::TEvBoot 2025-05-29T15:24:23.154069Z node 1 :CMS DEBUG: console__init_scheme.cpp:14: TConsole::TTxInitScheme Execute 2025-05-29T15:24:23.156782Z node 1 :CMS DEBUG: cms_impl.h:185: StateInit event type: 10060001 event: NKikimr::TEvTablet::TEvRestored 2025-05-29T15:24:23.156854Z node 1 :CMS DEBUG: cms_tx_init_scheme.cpp:16: TTxInitScheme Execute 2025-05-29T15:24:23.157212Z node 1 :CMS DEBUG: cms_impl.h:185: StateInit event type: 1006000c event: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-05-29T15:24:23.157256Z node 1 :CMS DEBUG: cms_impl.h:185: StateInit event type: 104d0001 event: NKikimr::NConsole::TEvConfigsDispatcher::TEvSetConfigSubscriptionResponse 2025-05-29T15:24:23.157954Z node 1 :CMS DEBUG: console__init_scheme.cpp:23: TConsole::TTxInitScheme Complete 2025-05-29T15:24:23.158026Z node 1 :CMS DEBUG: console__load_state.cpp:28: TConsole::TTxLoadState Execute 2025-05-29T15:24:23.158064Z node 1 :CMS DEBUG: console__load_state.cpp:50: Using default config. 2025-05-29T15:24:23.158152Z node 1 :CMS DEBUG: console__load_state.cpp:66: TConsole::TTxLoadState Complete 2025-05-29T15:24:23.158632Z node 1 :CMS DEBUG: cms_tx_init_scheme.cpp:24: TTxInitScheme Complete 2025-05-29T15:24:23.158715Z node 1 :CMS DEBUG: cms_tx_load_state.cpp:33: TTxLoadState Execute 2025-05-29T15:24:23.158763Z node 1 :CMS DEBUG: cms_tx_load_state.cpp:76: Using default config 2025-05-29T15:24:23.158813Z node 1 :CMS DEBUG: cms.cpp:1147: Running CleanupWalleTasks 2025-05-29T15:24:23.190376Z node 1 :CMS DEBUG: cms_impl.h:185: StateInit event type: 104a0012 event: NKikimr::NConsole::TEvConsole::TEvConfigNotificationRequest { Config { FeatureFlags { EnableCMSRequestPriorities: true EnableSingleCompositeActionGroup: true } } ItemKinds: 25 ItemKinds: 26 Local: true } 2025-05-29T15:24:23.204206Z node 1 :CMS DEBUG: cms_tx_load_state.cpp:256: TTxLoadState Complete 2025-05-29T15:24:23.204314Z node 1 :CMS DEBUG: cms_tx_update_config.cpp:23: TTxUpdateConfig Execute 2025-05-29T15:24:23.205703Z node 1 :CMS DEBUG: cms_tx_update_config.cpp:37: TTxUpdateConfig Complete 2025-05-29T15:24:23.205793Z node 1 :CMS DEBUG: sentinel.cpp:939: [Sentinel] [Main] UpdateConfig 2025-05-29T15:24:23.205799Z node 1 :CMS DEBUG: sentinel.cpp:884: [Sentinel] [Main] Start ConfigUpdater 2025-05-29T15:24:23.205807Z node 1 :CMS DEBUG: sentinel.cpp:955: [Sentinel] [Main] UpdateState 2025-05-29T15:24:23.205811Z node 1 :CMS INFO: sentinel.cpp:879: [Sentinel] [Main] StateUpdater was delayed 2025-05-29T15:24:23.205818Z node 1 :CMS DEBUG: sentinel.cpp:464: [Sentinel] [ConfigUpdater] Request blobstorage config: attempt# 0 2025-05-29T15:24:23.205845Z node 1 :CMS DEBUG: sentinel.cpp:477: [Sentinel] [ConfigUpdater] Request CMS cluster state: attempt# 0 2025-05-29T15:24:23.207542Z node 1 :CMS DEBUG: sentinel.cpp:530: [Sentinel] [ConfigUpdater] Handle TEvBlobStorage::TEvControllerConfigResponse: response# Status { Success: true BaseConfig { } } Success: true 2025-05-29T15:24:23.217853Z node 1 :CMS DEBUG: cms_tx_update_config.cpp:23: TTxUpdateConfig Execute 2025-05-29T15:24:23.251127Z node 1 :CMS DEBUG: cms_tx_update_config.cpp:37: TTxUpdateConfig Complete 2025-05-29T15:24:23.251179Z node 1 :CMS DEBUG: cms_tx_update_config.cpp:44: Updated config: TenantLimits { DisabledNodesRatioLimit: 0 } ClusterLimits { DisabledNodesRatioLimit: 0 } SentinelConfig { Enable: false } 2025-05-29T15:24:23.251408Z node 1 :CMS NOTICE: audit_log.cpp:12: [AuditLog] [CMS tablet] Reply: request# NKikimr::NCms::TEvCms::TEvGetConfigRequest { }, response# NKikimr::NCms::TEvCms::TEvGetConfigResponse { Status { Code: OK } Config { DefaultRetryTime: 300000000 DefaultPermissionDuration: 300000000 TenantLimits { DisabledNodesRatioLimit: 0 } ClusterLimits { DisabledNodesRatioLimit: 0 } InfoCollectionTimeout: 15000000 LogConfig { DefaultLevel: ENABLED TTL: 1209600000000 } SentinelConfig { Enable: false UpdateConfigInterval: 3600000000 RetryUpdateConfig: 60000000 UpdateStateInterval: 60000000 UpdateStateTimeout: 45000000 RetryChangeStatus: 10000000 ChangeStatusRetries: 5 DefaultStateLimit: 60 DataCenterRatio: 50 RoomRatio: 70 RackRatio: 90 DryRun: false EvictVDisksStatus: FAULTY GoodStateLimit: 5 FaultyPDisksThresholdPerNode: 0 } } } 2025-05-29T15:24:23.251461Z node 1 :CMS DEBUG: cms_tx_update_config.cpp:23: TTxUpdateConfig Execute 2025-05-29T15:24:23.294449Z node 1 :CMS DEBUG: cms_tx_update_downtimes.cpp:17: TTxUpdateDowntimes Execute 2025-05-29T15:24:23.294584Z node 1 :CMS DEBUG: cluster_info.cpp:968: Timestamp: 1970-01-01T00:02:00Z 2025-05-29T15:24:23.294791Z node 1 :CMS NOTICE: audit_log.cpp:12: [AuditLog] [CMS tablet] Reply: request# NKikimr::NCms::TEvCms::TEvClusterStateRequest { }, response# NKikimr::NCms::TEvCms::TEvClusterStateResponse { Status { Code: OK } State { Hosts { Name: "::1" State: UP Services { Name: "dynnode" State: UP Version: "-1" Timestamp: 120026512 } Timestamp: 120026512 NodeId: 1 InterconnectPort: 12001 Location { DataCenter: "1" Module: "1" Rack: "1" Unit: "1" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "dynnode" State: UP Version: "-1" Timestamp: 120026512 } Timestamp: 120026512 NodeId: 2 InterconnectPort: 12002 Location { DataCenter: "1" Module: "2" Rack: "2" Unit: "2" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "dynnode" State: UP Version: "-1" Timestamp: 120026512 } Timestamp: 120026512 NodeId: 3 InterconnectPort: 12003 Location { DataCenter: "1" Module: "3" Rack: "3" Unit: "3" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "dynnode" State: UP Version: "-1" Timestamp: 120026512 } Timestamp: 120026512 NodeId: 4 InterconnectPort: 12004 Location { DataCenter: "1" Module: "4" Rack: "4" Unit: "4" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "dynnode" State: UP Version: "-1" Timestamp: 120026512 } Timestamp: 120026512 NodeId: 5 InterconnectPort: 12005 Location { DataCenter: "1" Module: "5" Rack: "5" Unit: "5" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "dynnode" State: UP Version: "-1" Timestamp: 120026512 } Timestamp: 120026512 NodeId: 6 InterconnectPort: 12006 Location { DataCenter: "1" Module: "6" Rack: "6" Unit: "6" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "dynnode" State: UP Version: "-1" Timestamp: 120026512 } Timestamp: 120026512 NodeId: 7 InterconnectPort: 12007 Location { DataCenter: "1" Module: "7" Rack: "7" Unit: "7" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "dynnode" State: UP Version: "-1" Timestamp: 120026512 } Timestamp: 120026512 NodeId: 8 InterconnectPort: 12008 Location { DataCenter: "1" Module: "8" Rack: "8" Unit: "8" } StartTimeSeconds: 0 } Timestamp: 120026512 } } 2025-05-29T15:24:23.330653Z node 1 :CMS DEBUG: cms.cpp:1147: Running CleanupWalleTasks 2025-05-29T15:24:23.373100Z node 1 :CMS DEBUG: cms_tx_update_config.cpp:37: TTxUpdateConfig Complete 2025-05-29T15:24:23.373216Z node 1 :CMS DEBUG: cms_tx_update_config.cpp:44: Updated config: DefaultRetryTime: 300000000 DefaultPermissionDuration: 300000000 TenantLimits { DisabledNodesLimit: 0 DisabledNodesRatioLimit: 0 } ClusterLimits { DisabledNodesLimit: 0 DisabledNodesRatioLimit: 10 } InfoCollectionTimeout: 15000000 LogConfig { DefaultLevel: ENABLED TTL: 1209600000000 } SentinelConfig { Enable: false UpdateConfigInterval: 3600000000 RetryUpdateConfig: 60000000 UpdateStateInterval: 60000000 UpdateStateTimeout: 45000000 RetryChangeStatus: 10000000 ChangeStatusRetries: 5 DefaultStateLimit: 60 DataCenterRatio: 50 RoomRatio: 70 RackRatio: 90 DryRun: false EvictVDisksStatus: FAULTY GoodStateLimit: 5 FaultyPDisksThresholdPerNode: 0 } 2025-05-29T15:24:23.373234Z node 1 :CMS DEBUG: cms_tx_update_downtimes.cpp:26: TTxUpdateDowntimes Complete 2025-05-29T15:24:23.407312Z node 1 :CMS DEBUG: cms_tx_update_downtimes.cpp:17: TTxUpdateDowntimes Execute 2025-05-29T15:24:23.407344Z node 1 :CMS DEBUG: cms_tx_update_downtimes.cpp:26: TTxUpdateDowntimes Complete 2025-05-29T15:24:23.407370Z node 1 :CMS DEBUG: cluster_info.cpp:968: Timestamp: 1970-01-01T00:02:00Z 2025-05-29T15:24:23.407424Z node 1 :CMS INFO: cms.cpp:347: Check request: User: "user" Actions { Type: SHUTDOWN_HOST Host: "1" Duration: 60000000 } PartialPermissionAllowed: false Schedule: false DryRun: false TenantPolicy: NONE AvailabilityMode: MODE_MAX_AVAILABILITY EvictVDisks: false 2025-05-29T15:24:23.407433Z node 1 :CMS DEBUG: cms.cpp:379: Checking action: Type: SHUTDOWN_HOST Host: "1" Duration: 60000000 2025-05-29T15:24:23.407446Z node 1 :CMS DEBUG: node_checkers.cpp:101: [Nodes Counter] Checking Node: 1, with state: Up, with limit: 0, with ratio limit: 10, locked nodes: 0, down nodes: 0 2025-05-29T15:24:23.407454Z node 1 :CMS DEBUG: cms.cpp:729: Ring: 0; State: Ok 2025-05-29T15:24:23.407457Z node 1 :CMS DEBUG: cms.cpp:729: Ring: 1; State: Ok 2025-05-29T15:24:23.407460Z node 1 :CMS DEBUG: cms.cpp:729: Ring: 2; State: Ok 2025-05-29T15:24:23.407463Z node 1 :CMS DEBUG: cms.cpp:387: Result: ALLOW 2025-05-29T15:24:23.407478Z node 1 :CMS DEBUG: cms.cpp:1036: Accepting permission: id# user-p-1, requestId# user-r-1, owner# user 2025-05-29T15:24:23.407485Z node 1 :CMS INFO: cluster_info.cpp:777: Adding lock for Host ::1:12001 (1) (permission user-p-1 until 1970-01-01T00:03:00Z) 2025-05-29T15:24:23.407495Z node 1 :CMS DEBUG: cms_tx_store_permissions.cpp:26: TTxStorePermissions Execute 2025-05-29T15:24:23.407547Z node 1 :CMS NOTICE: audit_log.cpp:12: [AuditLog] [CMS tablet] Store permission: id# user-p-1, validity# 1970-01-01T00:03:00.127512Z, action# Type: SHUTDOWN_HOST Host: "1" Duration: 60000000 2025-05-29T15:24:23.418482Z node 1 :CMS DEBUG: cms_tx_store_permissions.cpp:137: TTxStorePermissions complete 2025-05-29T15:24:23.418554Z node 1 :CMS NOTICE: audit_log.cpp:12: [AuditLog] [CMS tablet] Reply: request# NKikimr::NCms::TEvCms::TEvPermissionRequest { User: "user" Actions { Type: SHUTDOWN_HOST Host: "1" Duration: 60000000 } PartialPermissionAllowed: false Schedule: false DryRun: false TenantPolicy: NONE AvailabilityMode: MODE_MAX_AVAILABILITY EvictVDisks: false }, response# NKikimr::NCms::TEvCms::TEvPermissionResponse { Status { Code: ALLOW } RequestId: "user-r-1" Permissions { Id: "user-p-1" Action { Type: SHUTDOWN_HOST Host: "1" Duration: 60000000 } Deadline: 180127512 Extentions { Type: HostInfo Hosts { Name: "::1" State: UP NodeId: 1 InterconnectPort: 12001 } } } } 2025-05-29T15:24:23.418562Z node 1 :CMS DEBUG: cms.cpp:1064: Schedule cleanup at 1970-01-01T00:05:00.127512Z 2025-05-29T15:24:23.441441Z node 1 :CMS INFO: cluster_info.cpp:777: Adding lock for Host ::1:12001 (1) (permission user-p-1 until 1970-01-01T00:03:00Z) 2025-05-29T15:24:23.441530Z node 1 :CMS DEBUG: cms_tx_update_downtimes.cpp:17: TTxUpdateDowntimes Execute 2025-05-29T15:24:23.441552Z node 1 :CMS DEBUG: cms_tx_update_downtimes.cpp:26: TTxUpdateDowntimes Complete 2025-05-29T15:24:23.441567Z node 1 :CMS DEBUG: cluster_info.cpp:968: Timestamp: 1970-01-01T00:02:00Z 2025-05-29T15:24:23.441628Z node 1 :CMS INFO: cms.cpp:347: Check request: User: "user" Actions { Type: SHUTDOWN_HOST Host: "2" Duration: 60000000 } PartialPermissionAllowed: false Schedule: false DryRun: false TenantPolicy: NONE AvailabilityMode: MODE_MAX_AVAILABILITY EvictVDisks: false 2025-05-29T15:24:23.441638Z node 1 :CMS DEBUG: cms.cpp:379: Checking action: Type: SHUTDOWN_HOST Host: "2" Duration: 60000000 2025-05-29T15:24:23.441651Z node 1 :CMS DEBUG: node_checkers.cpp:101: [Nodes Counter] Checking Node: 2, with state: Up, with limit: 0, with ratio limit: 10, locked nodes: 1, down nodes: 0 2025-05-29T15:24:23.441665Z node 1 :CMS DEBUG: cms.cpp:398: Result: DISALLOW_TEMP (reason: Cannot lock node '2': too many unavailable nodes. Locked: 1, down: 0, total: 8, limit: 10%) 2025-05-29T15:24:23.441679Z node 1 :CMS DEBUG: cms_tx_store_permissions.cpp:26: TTxStorePermissions Execute 2025-05-29T15:24:23.455143Z node 1 :CMS DEBU ... ed: 1, down: 0, total: 8, limit: 20%" } 2025-05-29T15:24:30.046324Z node 25 :CMS DEBUG: node_checkers.cpp:101: [Nodes Counter] Checking Node: 26, with state: Up, with limit: 0, with ratio limit: 20, locked nodes: 0, down nodes: 0 2025-05-29T15:24:30.046328Z node 25 :CMS DEBUG: node_checkers.cpp:101: [Nodes Counter] Checking Node: 26, with state: Up, with limit: 0, with ratio limit: 0, locked nodes: 0, down nodes: 0 2025-05-29T15:24:30.046333Z node 25 :CMS DEBUG: cms.cpp:387: Result: ALLOW 2025-05-29T15:24:30.046343Z node 25 :CMS DEBUG: cms.cpp:379: Checking action: Type: SHUTDOWN_HOST Host: "27" Duration: 60000000 Issue { Type: DISABLED_NODES_LIMIT_REACHED Message: "Cannot lock node \'27\': too many unavailable nodes. Locked: 1, down: 0, total: 8, limit: 20%" } 2025-05-29T15:24:30.046346Z node 25 :CMS DEBUG: node_checkers.cpp:101: [Nodes Counter] Checking Node: 27, with state: Up, with limit: 0, with ratio limit: 20, locked nodes: 1, down nodes: 0 2025-05-29T15:24:30.046355Z node 25 :CMS DEBUG: cms.cpp:398: Result: DISALLOW_TEMP (reason: Cannot lock node '27': too many unavailable nodes. Locked: 1, down: 0, total: 8, limit: 20%) 2025-05-29T15:24:30.046371Z node 25 :CMS DEBUG: cms.cpp:1036: Accepting permission: id# user-p-2, requestId# user-r-1, owner# user 2025-05-29T15:24:30.046377Z node 25 :CMS INFO: cluster_info.cpp:777: Adding lock for Host ::1:12002 (26) (permission user-p-2 until 1970-01-01T00:03:00Z) 2025-05-29T15:24:30.046387Z node 25 :CMS DEBUG: cms_tx_store_permissions.cpp:26: TTxStorePermissions Execute 2025-05-29T15:24:30.046421Z node 25 :CMS NOTICE: audit_log.cpp:12: [AuditLog] [CMS tablet] Store permission: id# user-p-2, validity# 1970-01-01T00:03:00.537560Z, action# Type: SHUTDOWN_HOST Host: "26" Duration: 60000000 2025-05-29T15:24:30.046439Z node 25 :CMS NOTICE: audit_log.cpp:12: [AuditLog] [CMS tablet] Store request: id# user-r-1, owner# user, order# 1, priority# 0, body# User: "user" Actions { Type: SHUTDOWN_HOST Host: "27" Duration: 60000000 Issue { Type: DISABLED_NODES_LIMIT_REACHED Message: "Cannot lock node \'27\': too many unavailable nodes. Locked: 1, down: 0, total: 8, limit: 20%" } } PartialPermissionAllowed: true Schedule: true Reason: "" TenantPolicy: DEFAULT AvailabilityMode: MODE_FORCE_RESTART EvictVDisks: false 2025-05-29T15:24:30.057580Z node 25 :CMS DEBUG: cms_tx_store_permissions.cpp:137: TTxStorePermissions complete 2025-05-29T15:24:30.057699Z node 25 :CMS NOTICE: audit_log.cpp:12: [AuditLog] [CMS tablet] Reply: request# NKikimr::NCms::TEvCms::TEvCheckRequest { User: "user" RequestId: "user-r-1" DryRun: false AvailabilityMode: MODE_FORCE_RESTART }, response# NKikimr::NCms::TEvCms::TEvPermissionResponse { Status { Code: ALLOW_PARTIAL } RequestId: "user-r-1" Permissions { Id: "user-p-2" Action { Type: SHUTDOWN_HOST Host: "26" Duration: 60000000 } Deadline: 180537560 Extentions { Type: HostInfo Hosts { Name: "::1" State: UP NodeId: 26 InterconnectPort: 12002 } } } } 2025-05-29T15:24:30.057867Z node 25 :CMS INFO: cms.cpp:1326: User user is done with permissions user-p-2 2025-05-29T15:24:30.057879Z node 25 :CMS DEBUG: cms.cpp:1349: Resulting status: OK 2025-05-29T15:24:30.057897Z node 25 :CMS DEBUG: cms_tx_remove_permissions.cpp:28: TTxRemovePermissions Execute 2025-05-29T15:24:30.057931Z node 25 :CMS NOTICE: audit_log.cpp:12: [AuditLog] [CMS tablet] Remove permission: id# user-p-2, reason# explicit remove 2025-05-29T15:24:30.068892Z node 25 :CMS DEBUG: cms_tx_remove_permissions.cpp:79: TTxRemovePermissions Complete 2025-05-29T15:24:30.068972Z node 25 :CMS NOTICE: audit_log.cpp:12: [AuditLog] [CMS tablet] Reply: request# NKikimr::NCms::TEvCms::TEvManagePermissionRequest { User: "user" Command: DONE Permissions: "user-p-2" DryRun: false }, response# NKikimr::NCms::TEvCms::TEvManagePermissionResponse { Status { Code: OK } } 2025-05-29T15:24:30.091866Z node 25 :CMS DEBUG: cms_tx_update_downtimes.cpp:17: TTxUpdateDowntimes Execute 2025-05-29T15:24:30.091963Z node 25 :CMS DEBUG: cluster_info.cpp:968: Timestamp: 1970-01-01T00:02:00Z 2025-05-29T15:24:30.092022Z node 25 :CMS INFO: cms.cpp:347: Check request: User: "user" Actions { Type: SHUTDOWN_HOST Host: "27" Duration: 60000000 Issue { Type: DISABLED_NODES_LIMIT_REACHED Message: "Cannot lock node \'27\': too many unavailable nodes. Locked: 1, down: 0, total: 8, limit: 20%" } } PartialPermissionAllowed: true Schedule: true Reason: "" TenantPolicy: DEFAULT AvailabilityMode: MODE_MAX_AVAILABILITY EvictVDisks: false 2025-05-29T15:24:30.092033Z node 25 :CMS DEBUG: cms.cpp:379: Checking action: Type: SHUTDOWN_HOST Host: "27" Duration: 60000000 Issue { Type: DISABLED_NODES_LIMIT_REACHED Message: "Cannot lock node \'27\': too many unavailable nodes. Locked: 1, down: 0, total: 8, limit: 20%" } 2025-05-29T15:24:30.092049Z node 25 :CMS DEBUG: node_checkers.cpp:101: [Nodes Counter] Checking Node: 27, with state: Up, with limit: 0, with ratio limit: 20, locked nodes: 0, down nodes: 1 2025-05-29T15:24:30.092060Z node 25 :CMS DEBUG: cms.cpp:398: Result: DISALLOW_TEMP (reason: Cannot lock node '27': too many unavailable nodes. Locked: 0, down: 1, total: 8, limit: 20%) 2025-05-29T15:24:30.092080Z node 25 :CMS DEBUG: cms_tx_store_permissions.cpp:26: TTxStorePermissions Execute 2025-05-29T15:24:30.092114Z node 25 :CMS NOTICE: audit_log.cpp:12: [AuditLog] [CMS tablet] Store request: id# user-r-1, owner# user, order# 1, priority# 0, body# User: "user" Actions { Type: SHUTDOWN_HOST Host: "27" Duration: 60000000 Issue { Type: DISABLED_NODES_LIMIT_REACHED Message: "Cannot lock node \'27\': too many unavailable nodes. Locked: 0, down: 1, total: 8, limit: 20%" } } PartialPermissionAllowed: true Schedule: true Reason: "" TenantPolicy: DEFAULT AvailabilityMode: MODE_MAX_AVAILABILITY EvictVDisks: false 2025-05-29T15:24:30.107141Z node 25 :CMS DEBUG: cms_tx_update_downtimes.cpp:26: TTxUpdateDowntimes Complete 2025-05-29T15:24:30.107171Z node 25 :CMS DEBUG: cms_tx_store_permissions.cpp:137: TTxStorePermissions complete 2025-05-29T15:24:30.107235Z node 25 :CMS NOTICE: audit_log.cpp:12: [AuditLog] [CMS tablet] Reply: request# NKikimr::NCms::TEvCms::TEvCheckRequest { User: "user" RequestId: "user-r-1" DryRun: false AvailabilityMode: MODE_MAX_AVAILABILITY }, response# NKikimr::NCms::TEvCms::TEvPermissionResponse { Status { Code: DISALLOW_TEMP Reason: "Cannot lock node \'27\': too many unavailable nodes. Locked: 0, down: 1, total: 8, limit: 20%" } RequestId: "user-r-1" Deadline: 420640584 } 2025-05-29T15:24:30.191775Z node 25 :CMS DEBUG: cms_tx_update_downtimes.cpp:17: TTxUpdateDowntimes Execute 2025-05-29T15:24:30.191871Z node 25 :CMS DEBUG: cluster_info.cpp:968: Timestamp: 1970-01-01T00:02:00Z 2025-05-29T15:24:30.191932Z node 25 :CMS INFO: cms.cpp:347: Check request: User: "user" Actions { Type: SHUTDOWN_HOST Host: "27" Duration: 60000000 Issue { Type: DISABLED_NODES_LIMIT_REACHED Message: "Cannot lock node \'27\': too many unavailable nodes. Locked: 0, down: 1, total: 8, limit: 20%" } } PartialPermissionAllowed: true Schedule: true Reason: "" TenantPolicy: DEFAULT AvailabilityMode: MODE_KEEP_AVAILABLE EvictVDisks: false 2025-05-29T15:24:30.191945Z node 25 :CMS DEBUG: cms.cpp:379: Checking action: Type: SHUTDOWN_HOST Host: "27" Duration: 60000000 Issue { Type: DISABLED_NODES_LIMIT_REACHED Message: "Cannot lock node \'27\': too many unavailable nodes. Locked: 0, down: 1, total: 8, limit: 20%" } 2025-05-29T15:24:30.191956Z node 25 :CMS DEBUG: node_checkers.cpp:101: [Nodes Counter] Checking Node: 27, with state: Up, with limit: 0, with ratio limit: 20, locked nodes: 0, down nodes: 1 2025-05-29T15:24:30.191968Z node 25 :CMS DEBUG: cms.cpp:398: Result: DISALLOW_TEMP (reason: Cannot lock node '27': too many unavailable nodes. Locked: 0, down: 1, total: 8, limit: 20%) 2025-05-29T15:24:30.191988Z node 25 :CMS DEBUG: cms_tx_store_permissions.cpp:26: TTxStorePermissions Execute 2025-05-29T15:24:30.192020Z node 25 :CMS NOTICE: audit_log.cpp:12: [AuditLog] [CMS tablet] Store request: id# user-r-1, owner# user, order# 1, priority# 0, body# User: "user" Actions { Type: SHUTDOWN_HOST Host: "27" Duration: 60000000 Issue { Type: DISABLED_NODES_LIMIT_REACHED Message: "Cannot lock node \'27\': too many unavailable nodes. Locked: 0, down: 1, total: 8, limit: 20%" } } PartialPermissionAllowed: true Schedule: true Reason: "" TenantPolicy: DEFAULT AvailabilityMode: MODE_KEEP_AVAILABLE EvictVDisks: false 2025-05-29T15:24:30.203959Z node 25 :CMS DEBUG: cms_tx_update_downtimes.cpp:26: TTxUpdateDowntimes Complete 2025-05-29T15:24:30.203988Z node 25 :CMS DEBUG: cms_tx_store_permissions.cpp:137: TTxStorePermissions complete 2025-05-29T15:24:30.204044Z node 25 :CMS NOTICE: audit_log.cpp:12: [AuditLog] [CMS tablet] Reply: request# NKikimr::NCms::TEvCms::TEvCheckRequest { User: "user" RequestId: "user-r-1" DryRun: false AvailabilityMode: MODE_KEEP_AVAILABLE }, response# NKikimr::NCms::TEvCms::TEvPermissionResponse { Status { Code: DISALLOW_TEMP Reason: "Cannot lock node \'27\': too many unavailable nodes. Locked: 0, down: 1, total: 8, limit: 20%" } RequestId: "user-r-1" Deadline: 420742096 } 2025-05-29T15:24:30.229658Z node 25 :CMS DEBUG: cms_tx_update_downtimes.cpp:17: TTxUpdateDowntimes Execute 2025-05-29T15:24:30.229738Z node 25 :CMS DEBUG: cluster_info.cpp:968: Timestamp: 1970-01-01T00:02:00Z 2025-05-29T15:24:30.229792Z node 25 :CMS INFO: cms.cpp:347: Check request: User: "user" Actions { Type: SHUTDOWN_HOST Host: "27" Duration: 60000000 Issue { Type: DISABLED_NODES_LIMIT_REACHED Message: "Cannot lock node \'27\': too many unavailable nodes. Locked: 0, down: 1, total: 8, limit: 20%" } } PartialPermissionAllowed: true Schedule: true Reason: "" TenantPolicy: DEFAULT AvailabilityMode: MODE_FORCE_RESTART EvictVDisks: false 2025-05-29T15:24:30.229805Z node 25 :CMS DEBUG: cms.cpp:379: Checking action: Type: SHUTDOWN_HOST Host: "27" Duration: 60000000 Issue { Type: DISABLED_NODES_LIMIT_REACHED Message: "Cannot lock node \'27\': too many unavailable nodes. Locked: 0, down: 1, total: 8, limit: 20%" } 2025-05-29T15:24:30.229816Z node 25 :CMS DEBUG: node_checkers.cpp:101: [Nodes Counter] Checking Node: 27, with state: Up, with limit: 0, with ratio limit: 20, locked nodes: 0, down nodes: 1 2025-05-29T15:24:30.229821Z node 25 :CMS DEBUG: node_checkers.cpp:101: [Nodes Counter] Checking Node: 27, with state: Up, with limit: 0, with ratio limit: 0, locked nodes: 0, down nodes: 1 2025-05-29T15:24:30.229827Z node 25 :CMS DEBUG: cms.cpp:387: Result: ALLOW 2025-05-29T15:24:30.229847Z node 25 :CMS DEBUG: cms.cpp:1036: Accepting permission: id# user-p-3, requestId# user-r-1, owner# user 2025-05-29T15:24:30.229856Z node 25 :CMS INFO: cluster_info.cpp:777: Adding lock for Host ::1:12003 (27) (permission user-p-3 until 1970-01-01T00:03:00Z) 2025-05-29T15:24:30.229867Z node 25 :CMS DEBUG: cms_tx_store_permissions.cpp:26: TTxStorePermissions Execute 2025-05-29T15:24:30.229891Z node 25 :CMS NOTICE: audit_log.cpp:12: [AuditLog] [CMS tablet] Store permission: id# user-p-3, validity# 1970-01-01T00:03:00.843608Z, action# Type: SHUTDOWN_HOST Host: "27" Duration: 60000000 2025-05-29T15:24:30.229901Z node 25 :CMS NOTICE: audit_log.cpp:12: [AuditLog] [CMS tablet] Remove request: id# user-r-1, owner# user 2025-05-29T15:24:30.243531Z node 25 :CMS DEBUG: cms_tx_update_downtimes.cpp:26: TTxUpdateDowntimes Complete 2025-05-29T15:24:30.243557Z node 25 :CMS DEBUG: cms_tx_store_permissions.cpp:137: TTxStorePermissions complete 2025-05-29T15:24:30.243629Z node 25 :CMS NOTICE: audit_log.cpp:12: [AuditLog] [CMS tablet] Reply: request# NKikimr::NCms::TEvCms::TEvCheckRequest { User: "user" RequestId: "user-r-1" DryRun: false AvailabilityMode: MODE_FORCE_RESTART }, response# NKikimr::NCms::TEvCms::TEvPermissionResponse { Status { Code: ALLOW } Permissions { Id: "user-p-3" Action { Type: SHUTDOWN_HOST Host: "27" Duration: 60000000 } Deadline: 180843608 Extentions { Type: HostInfo Hosts { Name: "::1" State: UP NodeId: 27 InterconnectPort: 12003 } } } } ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_backup_collection/unittest >> TBackupCollectionTests::BackupNonIncrementalCollection [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:116:2058] recipient: [1:111:2141] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:116:2058] recipient: [1:111:2141] Leader for TabletID 72057594046678944 is [1:128:2152] sender: [1:132:2058] recipient: [1:111:2141] 2025-05-29T15:24:29.968450Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7488: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-05-29T15:24:29.968477Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7516: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:24:29.968519Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7402: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-05-29T15:24:29.968525Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7418: OperationsProcessing config: using default configuration 2025-05-29T15:24:29.968531Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-05-29T15:24:29.968535Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-05-29T15:24:29.968545Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7548: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:24:29.968559Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:39: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-05-29T15:24:29.968671Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7619: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-05-29T15:24:29.968745Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-05-29T15:24:29.979097Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7309: Cannot subscribe to console configs 2025-05-29T15:24:29.979118Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:24:29.982111Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-05-29T15:24:29.982222Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-05-29T15:24:29.982258Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-05-29T15:24:29.983543Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-05-29T15:24:29.983910Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:445: Clear TempDirsState with owners number: 0 2025-05-29T15:24:29.984037Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1348: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-05-29T15:24:29.984109Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:32: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-05-29T15:24:29.984793Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:157: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:24:29.984840Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:84: [RootDataErasureManager] Stop 2025-05-29T15:24:29.985131Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:24:29.985148Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:24:29.985172Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-05-29T15:24:29.985183Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-29T15:24:29.985189Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:202: TTxServerlessStorageBilling.Complete 2025-05-29T15:24:29.985231Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6671: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-05-29T15:24:29.986889Z node 1 :HIVE INFO: tablet_helpers.cpp:1130: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:128:2152] sender: [1:242:2058] recipient: [1:15:2062] 2025-05-29T15:24:30.010211Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:372: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-05-29T15:24:30.010307Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:24:30.010380Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-05-29T15:24:30.010428Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:130: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-05-29T15:24:30.010441Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:24:30.011428Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:451: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-05-29T15:24:30.011462Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-05-29T15:24:30.011528Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:24:30.011553Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:313: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-05-29T15:24:30.011559Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:367: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-05-29T15:24:30.011565Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 2 -> 3 2025-05-29T15:24:30.012092Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:24:30.012108Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-05-29T15:24:30.012115Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 3 -> 128 2025-05-29T15:24:30.012509Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:24:30.012521Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:24:30.012528Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:24:30.012535Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1650: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-05-29T15:24:30.013151Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1719: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-05-29T15:24:30.013596Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:652: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-05-29T15:24:30.013643Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1751: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-05-29T15:24:30.013855Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:676: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-05-29T15:24:30.013886Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:680: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 124 RawX2: 4294969445 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-05-29T15:24:30.013894Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:24:30.014035Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 128 -> 240 2025-05-29T15:24:30.014047Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:24:30.014083Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-05-29T15:24:30.014100Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:402: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-05-29T15:24:30.014567Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:24:30.014577Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-29T15:24:30.014621Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29 ... BUG: schemeshard__operation.cpp:1629: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [7:301:2291] message: TxId: 105 2025-05-29T15:24:31.824888Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 105 ready parts: 2/2 2025-05-29T15:24:31.824895Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:973: Operation and all the parts is done, operation id: 105:0 2025-05-29T15:24:31.824900Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5174: RemoveTx for txid 105:0 2025-05-29T15:24:31.824914Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 6] was 2 2025-05-29T15:24:31.824919Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:973: Operation and all the parts is done, operation id: 105:1 2025-05-29T15:24:31.824923Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5174: RemoveTx for txid 105:1 2025-05-29T15:24:31.824944Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 7] was 3 2025-05-29T15:24:31.824948Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove txstate source path for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 3 2025-05-29T15:24:31.825254Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:207: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2025-05-29T15:24:31.825275Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:630: Send to actor: [7:301:2291] msg type: 271124998 msg: NKikimrScheme.TEvNotifyTxCompletionResult TxId: 105 at schemeshard: 72057594046678944 2025-05-29T15:24:31.825313Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:227: tests -- TTxNotificationSubscriber for txId 105: got EvNotifyTxCompletionResult 2025-05-29T15:24:31.825320Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:236: tests -- TTxNotificationSubscriber for txId 105: satisfy waiter [7:529:2489] 2025-05-29T15:24:31.825356Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4877: StateWork, received event# 269877764, Sender [7:531:2491], Recipient [7:129:2153]: NKikimr::TEvTabletPipe::TEvServerDisconnected 2025-05-29T15:24:31.825362Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4975: StateWork, processing event TEvTabletPipe::TEvServerDisconnected 2025-05-29T15:24:31.825366Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5801: Server pipe is reset, at schemeshard: 72057594046678944 TestWaitNotification: OK eventTxId 105 TestModificationResults wait txId: 106 2025-05-29T15:24:31.825472Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4877: StateWork, received event# 271122432, Sender [7:597:2555], Recipient [7:129:2153]: {TEvModifySchemeTransaction txid# 106 TabletId# 72057594046678944} 2025-05-29T15:24:31.825477Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4888: StateWork, processing event TEvSchemeShard::TEvModifySchemeTransaction 2025-05-29T15:24:31.826281Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:372: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpBackupIncrementalBackupCollection BackupIncrementalBackupCollection { Name: ".backups/collections/MyCollection1" } } TxId: 106 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-05-29T15:24:31.826364Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_mkdir.cpp:115: TMkDir Propose, path: /MyRoot/.backups/collections/MyCollection1/19700101000000Z_incremental, operationId: 106:0, at schemeshard: 72057594046678944 2025-05-29T15:24:31.826396Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:319: AttachChild: child attached as only one child to the parent, parent id: [OwnerId: 72057594046678944, LocalPathId: 4], parent name: MyCollection1, child name: 19700101000000Z_incremental, child id: [OwnerId: 72057594046678944, LocalPathId: 8], at schemeshard: 72057594046678944 2025-05-29T15:24:31.826410Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 8] was 0 2025-05-29T15:24:31.826424Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:130: IgniteOperation, opId: 106:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-05-29T15:24:31.826436Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_just_reject.cpp:47: TReject Propose, opId: 106:1, explain: Incremental backup is disabled on this collection, at schemeshard: 72057594046678944 2025-05-29T15:24:31.826442Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:130: IgniteOperation, opId: 106:2, propose status:StatusInvalidParameter, reason: Incremental backup is disabled on this collection, at schemeshard: 72057594046678944 2025-05-29T15:24:31.826992Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:151: Abort operation: IgniteOperation fail to propose a part, opId: 106:1, at schemeshard: 72057594046678944, already accepted parts: 1, propose result status: StatusInvalidParameter, with reason: Incremental backup is disabled on this collection, tx message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpBackupIncrementalBackupCollection BackupIncrementalBackupCollection { Name: ".backups/collections/MyCollection1" } } TxId: 106 TabletId: 72057594046678944 2025-05-29T15:24:31.827022Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_mkdir.cpp:275: MkDir AbortPropose, opId: 106:0, at schemeshard: 72057594046678944 2025-05-29T15:24:31.827056Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:165: TSideEffects ApplyOnExecute at tablet# 72057594046678944 2025-05-29T15:24:31.827480Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:451: TTxOperationPropose Complete, txId: 106, response: Status: StatusInvalidParameter Reason: "Incremental backup is disabled on this collection" TxId: 106 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2025-05-29T15:24:31.827508Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 106, database: /MyRoot, subject: , status: StatusInvalidParameter, reason: Incremental backup is disabled on this collection, operation: BACKUP INCREMENTAL, path: /MyRoot/.backups/collections/MyCollection1 2025-05-29T15:24:31.827514Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:207: TSideEffects ApplyOnComplete at tablet# 72057594046678944 TestModificationResult got TxId: 106, wait until txId: 106 TestWaitNotification wait txId: 106 2025-05-29T15:24:31.827581Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:210: tests -- TTxNotificationSubscriber for txId 106: send EvNotifyTxCompletion 2025-05-29T15:24:31.827588Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:256: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 106 2025-05-29T15:24:31.827649Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4877: StateWork, received event# 269877761, Sender [7:603:2561], Recipient [7:129:2153]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-05-29T15:24:31.827655Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4974: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-05-29T15:24:31.827659Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5753: Pipe server connected, at tablet: 72057594046678944 2025-05-29T15:24:31.827681Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4877: StateWork, received event# 271124996, Sender [7:301:2291], Recipient [7:129:2153]: NKikimrScheme.TEvNotifyTxCompletion TxId: 106 2025-05-29T15:24:31.827686Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4890: StateWork, processing event TEvSchemeShard::TEvNotifyTxCompletion 2025-05-29T15:24:31.827697Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 106, at schemeshard: 72057594046678944 2025-05-29T15:24:31.827716Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:227: tests -- TTxNotificationSubscriber for txId 106: got EvNotifyTxCompletionResult 2025-05-29T15:24:31.827721Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:236: tests -- TTxNotificationSubscriber for txId 106: satisfy waiter [7:601:2559] 2025-05-29T15:24:31.827744Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4877: StateWork, received event# 269877764, Sender [7:603:2561], Recipient [7:129:2153]: NKikimr::TEvTabletPipe::TEvServerDisconnected 2025-05-29T15:24:31.827748Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4975: StateWork, processing event TEvTabletPipe::TEvServerDisconnected 2025-05-29T15:24:31.827753Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5801: Server pipe is reset, at schemeshard: 72057594046678944 TestWaitNotification: OK eventTxId 106 2025-05-29T15:24:31.827812Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4877: StateWork, received event# 271122945, Sender [7:604:2562], Recipient [7:129:2153]: NKikimrSchemeOp.TDescribePath Path: "/MyRoot/.backups/collections/MyCollection1" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false } 2025-05-29T15:24:31.827816Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4889: StateWork, processing event TEvSchemeShard::TEvDescribeScheme 2025-05-29T15:24:31.827827Z node 7 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/.backups/collections/MyCollection1" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-05-29T15:24:31.827860Z node 7 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/.backups/collections/MyCollection1" took 32us result status StatusSuccess 2025-05-29T15:24:31.827966Z node 7 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/.backups/collections/MyCollection1" PathDescription { Self { Name: "MyCollection1" PathId: 4 SchemeshardId: 72057594046678944 PathType: EPathTypeBackupCollection CreateFinished: true CreateTxId: 103 CreateStep: 5000004 ParentPathId: 3 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 BackupCollectionVersion: 0 } ChildrenExist: true } Children { Name: "19700101000000Z_full" PathId: 6 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 105 CreateStep: 5000006 ParentPathId: 4 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 6 PathsLimit: 10000 ShardsInside: 2 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } BackupCollectionDescription { Name: "MyCollection1" ExplicitEntryList { Entries { Type: ETypeTable Path: "/MyRoot/Table1" } } Cluster { } } } PathId: 4 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> TCmsTest::Notifications [GOOD] >> TCmsTest::Mirror3dcPermissions ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/datashard/ut_external_blobs/unittest >> ExternalBlobsMultipleChannels::WithNewColumnFamilyAndCompaction [FAIL] Test command err: 2025-05-29T15:24:25.383147Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:102:2148], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-05-29T15:24:25.383196Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-05-29T15:24:25.383214Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/00248c/r3tmp/tmpxtnob2/pdisk_1.dat 2025-05-29T15:24:25.503424Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-05-29T15:24:25.518502Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:24:25.524194Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [1:32:2079] 1748532264925986 != 1748532264925990 2025-05-29T15:24:25.566849Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:24:25.566904Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:24:25.577653Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-29T15:24:25.651969Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-05-29T15:24:25.861615Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 100:0, at schemeshard: 72057594046644480 2025-05-29T15:24:26.092156Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:808:2666], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:24:26.092198Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:818:2671], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:24:26.092216Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:24:26.093336Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2025-05-29T15:24:26.231060Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:822:2674], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-05-29T15:24:26.277196Z node 1 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [1:878:2711] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-29T15:24:26.332621Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [1:888:2720], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:24:26.333607Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=1&id=M2FkY2E0YzgtZDQ3MDMyMzUtOTA1OWU4MDktY2NiZGU2NjQ=, ActorId: [1:806:2664], ActorState: ExecuteState, TraceId: 01jweaa13bf08mxg3b0pssfxx3, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: assertion failed at ydb/core/tx/datashard/ut_common/datashard_ut_common.cpp:2091, void NKikimr::ExecSQL(Tests::TServer::TPtr, TActorId, const TString &, bool, Ydb::StatusIds::StatusCode): (response.GetYdbStatus() == code) failed: (INTERNAL_ERROR != SUCCESS) Response { QueryIssues { message: "Execution" issue_code: 1060 issues { message: "yql/essentials/ast/yql_expr.h:1874: index out of range" issue_code: 1 } } TxMeta { } } YdbStatus: INTERNAL_ERROR ConsumedRu: 1 , with diff: (INT|SUCC)E(RNAL_ERROR|SS) TBackTrace::Capture()+28 (0x13AB2F0C) NUnitTest::NPrivate::RaiseError(char const*, TBasicString> const&, bool)+137 (0x13C65E39) NKikimr::ExecSQL(TIntrusivePtr>, NActors::TActorId, TBasicString> const&, bool, Ydb::StatusIds_StatusCode)+1300 (0x262F4B14) NKikimr::NTestSuiteExternalBlobsMultipleChannels::TTestCaseWithNewColumnFamilyAndCompaction::Execute_(NUnitTest::TTestContext&)+7431 (0x1399DBD7) NKikimr::NTestSuiteExternalBlobsMultipleChannels::TCurrentTest::Execute()::'lambda'()::operator()() const+71 (0x139A6417) NUnitTest::TTestBase::Run(std::__y1::function, TBasicString> const&, char const*, bool)+126 (0x13C67CEE) NKikimr::NTestSuiteExternalBlobsMultipleChannels::TCurrentTest::Execute()+481 (0x139A5D11) NUnitTest::TTestFactory::Execute()+803 (0x13C68463) NUnitTest::RunMain(int, char**)+3021 (0x13C7A00D) ??+0 (0x7FDDD14E2D90) __libc_start_main+128 (0x7FDDD14E2E40) _start+41 (0x129FC029) ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/cms/ut/unittest >> TCmsTest::RequestReplaceManyDevicesOnOneNode [GOOD] Test command err: 2025-05-29T15:24:22.662918Z node 1 :CMS DEBUG: cms_impl.h:185: StateInit event type: 10060000 event: NKikimr::TEvTablet::TEvBoot 2025-05-29T15:24:22.663434Z node 1 :CMS DEBUG: console__init_scheme.cpp:14: TConsole::TTxInitScheme Execute 2025-05-29T15:24:22.666318Z node 1 :CMS DEBUG: cms_impl.h:185: StateInit event type: 10060001 event: NKikimr::TEvTablet::TEvRestored 2025-05-29T15:24:22.666397Z node 1 :CMS DEBUG: cms_tx_init_scheme.cpp:16: TTxInitScheme Execute 2025-05-29T15:24:22.666789Z node 1 :CMS DEBUG: cms_impl.h:185: StateInit event type: 1006000c event: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-05-29T15:24:22.666830Z node 1 :CMS DEBUG: cms_impl.h:185: StateInit event type: 104d0001 event: NKikimr::NConsole::TEvConfigsDispatcher::TEvSetConfigSubscriptionResponse 2025-05-29T15:24:22.667525Z node 1 :CMS DEBUG: console__init_scheme.cpp:23: TConsole::TTxInitScheme Complete 2025-05-29T15:24:22.667604Z node 1 :CMS DEBUG: console__load_state.cpp:28: TConsole::TTxLoadState Execute 2025-05-29T15:24:22.667652Z node 1 :CMS DEBUG: console__load_state.cpp:50: Using default config. 2025-05-29T15:24:22.667746Z node 1 :CMS DEBUG: console__load_state.cpp:66: TConsole::TTxLoadState Complete 2025-05-29T15:24:22.668233Z node 1 :CMS DEBUG: cms_tx_init_scheme.cpp:24: TTxInitScheme Complete 2025-05-29T15:24:22.668334Z node 1 :CMS DEBUG: cms_tx_load_state.cpp:33: TTxLoadState Execute 2025-05-29T15:24:22.668371Z node 1 :CMS DEBUG: cms_tx_load_state.cpp:76: Using default config 2025-05-29T15:24:22.668407Z node 1 :CMS DEBUG: cms.cpp:1147: Running CleanupWalleTasks 2025-05-29T15:24:22.700044Z node 1 :CMS DEBUG: cms_impl.h:185: StateInit event type: 104a0012 event: NKikimr::NConsole::TEvConsole::TEvConfigNotificationRequest { Config { FeatureFlags { EnableCMSRequestPriorities: true EnableSingleCompositeActionGroup: true } } ItemKinds: 25 ItemKinds: 26 Local: true } 2025-05-29T15:24:22.716961Z node 1 :CMS DEBUG: cms_tx_load_state.cpp:256: TTxLoadState Complete 2025-05-29T15:24:22.717192Z node 1 :CMS DEBUG: cms_tx_update_config.cpp:23: TTxUpdateConfig Execute 2025-05-29T15:24:22.724102Z node 1 :CMS DEBUG: cms_tx_update_config.cpp:37: TTxUpdateConfig Complete 2025-05-29T15:24:22.724314Z node 1 :CMS DEBUG: sentinel.cpp:939: [Sentinel] [Main] UpdateConfig 2025-05-29T15:24:22.724325Z node 1 :CMS DEBUG: sentinel.cpp:884: [Sentinel] [Main] Start ConfigUpdater 2025-05-29T15:24:22.724336Z node 1 :CMS DEBUG: sentinel.cpp:955: [Sentinel] [Main] UpdateState 2025-05-29T15:24:22.724341Z node 1 :CMS INFO: sentinel.cpp:879: [Sentinel] [Main] StateUpdater was delayed 2025-05-29T15:24:22.724351Z node 1 :CMS DEBUG: sentinel.cpp:464: [Sentinel] [ConfigUpdater] Request blobstorage config: attempt# 0 2025-05-29T15:24:22.724397Z node 1 :CMS DEBUG: sentinel.cpp:477: [Sentinel] [ConfigUpdater] Request CMS cluster state: attempt# 0 2025-05-29T15:24:22.736034Z node 1 :CMS DEBUG: sentinel.cpp:530: [Sentinel] [ConfigUpdater] Handle TEvBlobStorage::TEvControllerConfigResponse: response# Status { Success: true BaseConfig { PDisk { NodeId: 1 PDiskId: 1 Path: "/1/pdisk-1.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 2 PDiskId: 2 Path: "/2/pdisk-2.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 3 PDiskId: 3 Path: "/3/pdisk-3.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 4 PDiskId: 4 Path: "/4/pdisk-4.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 5 PDiskId: 5 Path: "/5/pdisk-5.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 6 PDiskId: 6 Path: "/6/pdisk-6.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 7 PDiskId: 7 Path: "/7/pdisk-7.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 8 PDiskId: 8 Path: "/8/pdisk-8.data" Guid: 1 DriveStatus: ACTIVE } VSlot { VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 1000 } GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 1001 } GroupId: 1 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 1002 } GroupId: 2 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 1003 } GroupId: 3 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 2 VSlotId: 1000 } GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 2 VSlotId: 1001 } GroupId: 1 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 2 VSlotId: 1002 } GroupId: 2 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 2 VSlotId: 1003 } GroupId: 3 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 3 PDiskId: 3 VSlotId: 1000 } GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 3 VSlotId: 1001 } GroupId: 1 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 3 VSlotId: 1002 } GroupId: 2 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 3 VSlotId: 1003 } GroupId: 3 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 4 PDiskId: 4 VSlotId: 1000 } GroupGeneration: 1 FailDomainIdx: 3 } VSlot { VSlotId { NodeId: 4 PDiskId: 4 VSlotId: 1001 } GroupId: 1 GroupGeneration: 1 FailDomainIdx: 3 } VSlot { VSlotId { NodeId: 4 PDiskId: 4 VSlotId: 1002 } GroupId: 2 GroupGeneration: 1 FailDomainIdx: 3 } VSlot { VSlotId { NodeId: 4 PDiskId: 4 VSlotId: 1003 } GroupId: 3 GroupGeneration: 1 FailDomainIdx: 3 } VSlot { VSlotId { NodeId: 5 PDiskId: 5 VSlotId: 1000 } GroupGeneration: 1 FailDomainIdx: 4 } VSlot { VSlotId { NodeId: 5 PDiskId: 5 VSlotId: 1001 } GroupId: 1 GroupGeneration: 1 FailDomainIdx: 4 } VSlot { VSlotId { NodeId: 5 PDiskId: 5 VSlotId: 1002 } GroupId: 2 GroupGeneration: 1 FailDomainIdx: 4 } VSlot { VSlotId { NodeId: 5 PDiskId: 5 VSlotId: 1003 } GroupId: 3 GroupGeneration: 1 FailDomainIdx: 4 } VSlot { VSlotId { NodeId: 6 PDiskId: 6 VSlotId: 1000 } GroupGeneration: 1 FailDomainIdx: 5 } VSlot { VSlotId { NodeId: 6 PDiskId: 6 VSlotId: 1001 } GroupId: 1 GroupGeneration: 1 FailDomainIdx: 5 } VSlot { VSlotId { NodeId: 6 PDiskId: 6 VSlotId: 1002 } GroupId: 2 GroupGeneration: 1 FailDomainIdx: 5 } VSlot { VSlotId { NodeId: 6 PDiskId: 6 VSlotId: 1003 } GroupId: 3 GroupGeneration: 1 FailDomainIdx: 5 } VSlot { VSlotId { NodeId: 7 PDiskId: 7 VSlotId: 1000 } GroupGeneration: 1 FailDomainIdx: 6 } VSlot { VSlotId { NodeId: 7 PDiskId: 7 VSlotId: 1001 } GroupId: 1 GroupGeneration: 1 FailDomainIdx: 6 } VSlot { VSlotId { NodeId: 7 PDiskId: 7 VSlotId: 1002 } GroupId: 2 GroupGeneration: 1 FailDomainIdx: 6 } VSlot { VSlotId { NodeId: 7 PDiskId: 7 VSlotId: 1003 } GroupId: 3 GroupGeneration: 1 FailDomainIdx: 6 } VSlot { VSlotId { NodeId: 8 PDiskId: 8 VSlotId: 1000 } GroupGeneration: 1 FailDomainIdx: 7 } VSlot { VSlotId { NodeId: 8 PDiskId: 8 VSlotId: 1001 } GroupId: 1 GroupGeneration: 1 FailDomainIdx: 7 } VSlot { VSlotId { NodeId: 8 PDiskId: 8 VSlotId: 1002 } GroupId: 2 GroupGeneration: 1 FailDomainIdx: 7 } VSlot { VSlotId { NodeId: 8 PDiskId: 8 VSlotId: 1003 } GroupId: 3 GroupGeneration: 1 FailDomainIdx: 7 } Group { GroupGeneration: 1 ErasureSpecies: "block-4-2" VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 1000 } VSlotId { NodeId: 2 PDiskId: 2 VSlotId: 1000 } VSlotId { NodeId: 3 PDiskId: 3 VSlotId: 1000 } VSlotId { NodeId: 4 PDiskId: 4 VSlotId: 1000 } VSlotId { NodeId: 5 PDiskId: 5 VSlotId: 1000 } VSlotId { NodeId: 6 PDiskId: 6 VSlotId: 1000 } VSlotId { NodeId: 7 PDiskId: 7 VSlotId: 1000 } VSlotId { NodeId: 8 PDiskId: 8 VSlotId: 1000 } } Group { GroupId: 1 GroupGeneration: 1 ErasureSpecies: "block-4-2" VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 1001 } VSlotId { NodeId: 2 PDiskId: 2 VSlotId: 1001 } VSlotId { NodeId: 3 PDiskId: 3 VSlotId: 1001 } VSlotId { NodeId: 4 PDiskId: 4 VSlotId: 1001 } VSlotId { NodeId: 5 PDiskId: 5 VSlotId: 1001 } VSlotId { NodeId: 6 PDiskId: 6 VSlotId: 1001 } VSlotId { NodeId: 7 PDiskId: 7 VSlotId: 1001 } VSlotId { NodeId: 8 PDiskId: 8 VSlotId: 1001 } } Group { GroupId: 2 GroupGeneration: 1 ErasureSpecies: "block-4-2" VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 1002 } VSlotId { NodeId: 2 PDiskId: 2 VSlotId: 1002 } VSlotId { NodeId: 3 PDiskId: 3 VSlotId: 1002 } VSlotId { NodeId: 4 PDiskId: 4 VSlotId: 1002 } VSlotId { NodeId: 5 PDiskId: 5 VSlotId: 1002 } VSlotId { NodeId: 6 PDiskId: 6 VSlotId: 1002 } VSlotId { NodeId: 7 PDiskId: 7 VSlotId: 1002 } VSlotId { NodeId: 8 PDiskId: 8 VSlotId: 1002 } } Group { GroupId: 3 GroupGeneration: 1 ErasureSpecies: "block-4-2" VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 1003 } VSlotId { NodeId: 2 PDiskId: 2 VSlotId: 1003 } VSlotId { NodeId: 3 PDiskId: 3 VSlotId: 1003 } VSlotId { NodeId: 4 PDiskId: 4 VSlotId: 1003 } VSlotId { NodeId: 5 PDiskId: 5 VSlotId: 1003 } VSlotId { NodeId: 6 PDiskId: 6 VSlotId: 1003 } VSlotId { NodeId: 7 PDiskId: 7 VSlotId: 1003 } VSlotId { NodeId: 8 PDiskId: 8 VSlotId: 1003 } } } } Success: true 2025-05-29T15:24:22.746623Z node 1 :CMS DEBUG: cms_tx_update_config.cpp:23: TTxUpdateConfig Execute 2025-05-29T15:24:22.795621Z node 1 :CMS DEBUG: cms_tx_update_config.cpp:37: TTxUpdateConfig Complete 2025-05-29T15:24:22.795695Z node 1 :CMS DEBUG: cms_tx_update_config.cpp:44: Updated config: TenantLimits { DisabledNodesRatioLimit: 0 } ClusterLimits { DisabledNodesRatioLimit: 0 } SentinelConfig { Enable: false } 2025-05-29T15:24:22.840016Z node 1 :CMS DEBUG: cms_tx_update_downtimes.cpp:17: TTxUpdateDowntimes Execute 2025-05-29T15:24:22.840063Z node 1 :CMS DEBUG: cms_tx_update_downtimes.cpp:26: TTxUpdateDowntimes Complete 2025-05-29T15:24:22.840146Z node 1 :CMS DEBUG: cluster_info.cpp:968: Timestamp: 1970-01-01T00:02:00Z 2025-05-29T15:24:22.840502Z node 1 :CMS NOTICE: audit_log.cpp:12: [AuditLog] [CMS tablet] Reply: request# NKikimr::NCms::TEvCms::TEvClusterStateRequest { }, response# NKikimr::NCms::TEvCms::TEvClusterStateResponse { Status { Code: OK } State { Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120026512 } Devices { Name: "vdisk-0-1-0-0-0" State: UP Timestamp: 120026512 } Devices { Name: "vdisk-1-1-0-0-0" State: UP Timestamp: 120026512 } Devices { Name: "vdisk-2-1-0-0-0" State: UP Timestamp: 120026512 } Devices { Name: "vdisk-3-1-0-0-0" State: UP Timestamp: 120026512 } Devices { Name: "pdisk-1-1" State: UP Timestamp: 120026512 } Timestamp: 120026512 NodeId: 1 InterconnectPort: 12001 Location { DataCenter: "1" Module: "1" Rack: "1" Unit: "1" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120026512 } Devices { Name: "vdisk-0-1-0-1-0" State: UP Timestamp: 120026512 } Devices { Name: "vdisk-1-1-0-1-0" State: UP Timestamp: 120026512 } Devices { Name: "vdisk-2-1-0-1-0" State: UP Timestamp: 120026512 } Devices { Name: "vdisk-3-1-0-1-0" State: UP Timestamp: 120026512 } Devices { Name: "pdisk-2-2" State: UP Timestamp: 120026512 } Timestamp: 120026512 NodeId: 2 InterconnectPort: 12002 Location { DataCenter: "1" Module: "2" Rack: "2" Unit: "2" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120026512 } Devices { Name: "vdisk-0-1-0-2-0" State: UP Timestamp: 120026512 } Devices { Name: "vdisk-1-1-0-2-0" State: UP Timestamp: 120026512 } Devices { Name: "vdisk-2-1-0-2-0" State: UP Timestamp: 120026512 } Devices { Name: "vdisk-3-1-0-2-0" State: UP Timestamp: 120026512 } Devices { Name: "pdisk-3-3" State: UP Timestamp: 120026512 } Timestamp: 120026512 NodeId: 3 InterconnectPort: 12003 Location { DataCenter: "1" Module: "3" Rack: "3" Unit: "3" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120026512 } Devices { Name: "vdisk-0-1-0-3-0" State: UP Timestamp: 120026512 } Devices { Name: "vdisk-1-1-0-3-0" State: UP Timestamp: 120026512 } Devices { Name: "vdisk-2-1-0-3-0" State: UP Timestamp: 120026512 } Devices { Name: "vdisk-3-1-0-3-0" State: UP Timestamp: 120026512 } Devices { Name: "pdisk-4-4" State: UP Timestamp: 120026512 } Timestamp: 120026512 NodeId: 4 InterconnectPort: 12004 Location { DataCenter: "1" Module: "4" Rack: "4" Unit: "4" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: ... skId: 107 CreateTime: 0 ChangeTime: 0 Path: "/35/pdisk-107.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 120126 2025-05-29T15:24:29.375867Z node 25 :CMS DEBUG: sentinel.cpp:693: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 36, response# PDiskStateInfo { PDiskId: 108 CreateTime: 0 ChangeTime: 0 Path: "/36/pdisk-108.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 109 CreateTime: 0 ChangeTime: 0 Path: "/36/pdisk-109.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 110 CreateTime: 0 ChangeTime: 0 Path: "/36/pdisk-110.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 120126 2025-05-29T15:24:29.375879Z node 25 :CMS DEBUG: sentinel.cpp:693: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 37, response# PDiskStateInfo { PDiskId: 111 CreateTime: 0 ChangeTime: 0 Path: "/37/pdisk-111.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 112 CreateTime: 0 ChangeTime: 0 Path: "/37/pdisk-112.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 113 CreateTime: 0 ChangeTime: 0 Path: "/37/pdisk-113.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 120126 2025-05-29T15:24:29.375893Z node 25 :CMS DEBUG: sentinel.cpp:693: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 38, response# PDiskStateInfo { PDiskId: 114 CreateTime: 0 ChangeTime: 0 Path: "/38/pdisk-114.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 115 CreateTime: 0 ChangeTime: 0 Path: "/38/pdisk-115.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 116 CreateTime: 0 ChangeTime: 0 Path: "/38/pdisk-116.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 120126 2025-05-29T15:24:29.375906Z node 25 :CMS DEBUG: sentinel.cpp:693: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 39, response# PDiskStateInfo { PDiskId: 117 CreateTime: 0 ChangeTime: 0 Path: "/39/pdisk-117.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 118 CreateTime: 0 ChangeTime: 0 Path: "/39/pdisk-118.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 119 CreateTime: 0 ChangeTime: 0 Path: "/39/pdisk-119.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 120126 2025-05-29T15:24:29.375919Z node 25 :CMS DEBUG: sentinel.cpp:693: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 33, response# PDiskStateInfo { PDiskId: 99 CreateTime: 0 ChangeTime: 0 Path: "/33/pdisk-99.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 100 CreateTime: 0 ChangeTime: 0 Path: "/33/pdisk-100.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 101 CreateTime: 0 ChangeTime: 0 Path: "/33/pdisk-101.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 120126 2025-05-29T15:24:29.375931Z node 25 :CMS DEBUG: sentinel.cpp:693: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 26, response# PDiskStateInfo { PDiskId: 78 CreateTime: 0 ChangeTime: 0 Path: "/26/pdisk-78.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 79 CreateTime: 0 ChangeTime: 0 Path: "/26/pdisk-79.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 80 CreateTime: 0 ChangeTime: 0 Path: "/26/pdisk-80.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 120126 2025-05-29T15:24:29.375943Z node 25 :CMS DEBUG: sentinel.cpp:693: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 27, response# PDiskStateInfo { PDiskId: 81 CreateTime: 0 ChangeTime: 0 Path: "/27/pdisk-81.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 82 CreateTime: 0 ChangeTime: 0 Path: "/27/pdisk-82.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 83 CreateTime: 0 ChangeTime: 0 Path: "/27/pdisk-83.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 120126 2025-05-29T15:24:29.375955Z node 25 :CMS DEBUG: sentinel.cpp:693: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 28, response# PDiskStateInfo { PDiskId: 84 CreateTime: 0 ChangeTime: 0 Path: "/28/pdisk-84.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 85 CreateTime: 0 ChangeTime: 0 Path: "/28/pdisk-85.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 86 CreateTime: 0 ChangeTime: 0 Path: "/28/pdisk-86.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 120126 2025-05-29T15:24:29.375969Z node 25 :CMS DEBUG: sentinel.cpp:693: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 29, response# PDiskStateInfo { PDiskId: 87 CreateTime: 0 ChangeTime: 0 Path: "/29/pdisk-87.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 88 CreateTime: 0 ChangeTime: 0 Path: "/29/pdisk-88.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 89 CreateTime: 0 ChangeTime: 0 Path: "/29/pdisk-89.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 120126 2025-05-29T15:24:29.375981Z node 25 :CMS DEBUG: sentinel.cpp:693: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 30, response# PDiskStateInfo { PDiskId: 90 CreateTime: 0 ChangeTime: 0 Path: "/30/pdisk-90.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 91 CreateTime: 0 ChangeTime: 0 Path: "/30/pdisk-91.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 92 CreateTime: 0 ChangeTime: 0 Path: "/30/pdisk-92.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 120126 2025-05-29T15:24:29.375993Z node 25 :CMS DEBUG: sentinel.cpp:693: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 31, response# PDiskStateInfo { PDiskId: 93 CreateTime: 0 ChangeTime: 0 Path: "/31/pdisk-93.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 94 CreateTime: 0 ChangeTime: 0 Path: "/31/pdisk-94.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 95 CreateTime: 0 ChangeTime: 0 Path: "/31/pdisk-95.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 120126 2025-05-29T15:24:29.376005Z node 25 :CMS DEBUG: sentinel.cpp:693: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 32, response# PDiskStateInfo { PDiskId: 96 CreateTime: 0 ChangeTime: 0 Path: "/32/pdisk-96.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 97 CreateTime: 0 ChangeTime: 0 Path: "/32/pdisk-97.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 98 CreateTime: 0 ChangeTime: 0 Path: "/32/pdisk-98.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 120126 2025-05-29T15:24:29.376013Z node 25 :CMS DEBUG: sentinel.cpp:960: [Sentinel] [Main] State was updated in 0.000000s 2025-05-29T15:24:29.389031Z node 25 :CMS DEBUG: cms_tx_store_permissions.cpp:137: TTxStorePermissions complete 2025-05-29T15:24:29.389159Z node 25 :CMS NOTICE: audit_log.cpp:12: [AuditLog] [CMS tablet] Reply: request# NKikimr::NCms::TEvCms::TEvPermissionRequest { User: "user" Actions { Type: REPLACE_DEVICES Host: "25" Devices: "pdisk-25-75" Devices: "pdisk-25-76" Devices: "pdisk-25-77" Duration: 60000000 } PartialPermissionAllowed: false Schedule: false DryRun: false AvailabilityMode: MODE_MAX_AVAILABILITY EvictVDisks: false }, response# NKikimr::NCms::TEvCms::TEvPermissionResponse { Status { Code: ALLOW } RequestId: "user-r-1" Permissions { Id: "user-p-1" Action { Type: REPLACE_DEVICES Host: "25" Devices: "pdisk-25-75" Devices: "pdisk-25-76" Devices: "pdisk-25-77" Duration: 60000000 } Deadline: 180126512 } } 2025-05-29T15:24:29.389175Z node 25 :CMS DEBUG: cms.cpp:1064: Schedule cleanup at 1970-01-01T00:05:00.126512Z 2025-05-29T15:24:29.424171Z node 25 :CMS INFO: cluster_info.cpp:777: Adding lock for PDisk 25:76 (::1:/25/pdisk-76.data) (permission user-p-1 until 1970-01-01T00:03:00Z) 2025-05-29T15:24:29.424207Z node 25 :CMS INFO: cluster_info.cpp:777: Adding lock for PDisk 25:75 (::1:/25/pdisk-75.data) (permission user-p-1 until 1970-01-01T00:03:00Z) 2025-05-29T15:24:29.424215Z node 25 :CMS INFO: cluster_info.cpp:777: Adding lock for PDisk 25:77 (::1:/25/pdisk-77.data) (permission user-p-1 until 1970-01-01T00:03:00Z) 2025-05-29T15:24:29.424526Z node 25 :CMS DEBUG: cms_tx_update_downtimes.cpp:17: TTxUpdateDowntimes Execute 2025-05-29T15:24:29.424557Z node 25 :CMS DEBUG: cms_tx_update_downtimes.cpp:26: TTxUpdateDowntimes Complete 2025-05-29T15:24:29.424574Z node 25 :CMS DEBUG: cluster_info.cpp:968: Timestamp: 1970-01-01T00:02:00Z 2025-05-29T15:24:29.425408Z node 25 :CMS INFO: cms.cpp:347: Check request: User: "user" Actions { Type: SHUTDOWN_HOST Host: "34" Duration: 60000000 } PartialPermissionAllowed: false Schedule: false DryRun: false AvailabilityMode: MODE_MAX_AVAILABILITY EvictVDisks: false 2025-05-29T15:24:29.425425Z node 25 :CMS DEBUG: cms.cpp:379: Checking action: Type: SHUTDOWN_HOST Host: "34" Duration: 60000000 2025-05-29T15:24:29.425446Z node 25 :CMS DEBUG: node_checkers.cpp:101: [Nodes Counter] Checking Node: 34, with state: Up, with limit: 3, with ratio limit: 0, locked nodes: 1, down nodes: 0 2025-05-29T15:24:29.425588Z node 25 :CMS DEBUG: cms.cpp:387: Result: ALLOW 2025-05-29T15:24:29.425622Z node 25 :CMS DEBUG: cms.cpp:1036: Accepting permission: id# user-p-2, requestId# user-r-2, owner# user 2025-05-29T15:24:29.425633Z node 25 :CMS INFO: cluster_info.cpp:777: Adding lock for Host ::1:12010 (34) (permission user-p-2 until 1970-01-01T00:03:00Z) 2025-05-29T15:24:29.425653Z node 25 :CMS DEBUG: cms_tx_store_permissions.cpp:26: TTxStorePermissions Execute 2025-05-29T15:24:29.425711Z node 25 :CMS NOTICE: audit_log.cpp:12: [AuditLog] [CMS tablet] Store permission: id# user-p-2, validity# 1970-01-01T00:03:00.228024Z, action# Type: SHUTDOWN_HOST Host: "34" Duration: 60000000 2025-05-29T15:24:29.437495Z node 25 :CMS DEBUG: cms_tx_store_permissions.cpp:137: TTxStorePermissions complete 2025-05-29T15:24:29.437636Z node 25 :CMS NOTICE: audit_log.cpp:12: [AuditLog] [CMS tablet] Reply: request# NKikimr::NCms::TEvCms::TEvPermissionRequest { User: "user" Actions { Type: SHUTDOWN_HOST Host: "34" Duration: 60000000 } PartialPermissionAllowed: false Schedule: false DryRun: false AvailabilityMode: MODE_MAX_AVAILABILITY EvictVDisks: false }, response# NKikimr::NCms::TEvCms::TEvPermissionResponse { Status { Code: ALLOW } RequestId: "user-r-2" Permissions { Id: "user-p-2" Action { Type: SHUTDOWN_HOST Host: "34" Duration: 60000000 } Deadline: 180228024 Extentions { Type: HostInfo Hosts { Name: "::1" State: UP NodeId: 34 InterconnectPort: 12010 } } } } ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/cms/ut/unittest >> TCmsTest::TestForceRestartModeScheduledDisconnects [GOOD] Test command err: 2025-05-29T15:24:23.410892Z node 1 :CMS DEBUG: cms_impl.h:185: StateInit event type: 10060000 event: NKikimr::TEvTablet::TEvBoot 2025-05-29T15:24:23.412858Z node 1 :CMS DEBUG: console__init_scheme.cpp:14: TConsole::TTxInitScheme Execute 2025-05-29T15:24:23.413707Z node 1 :CMS DEBUG: cms_impl.h:185: StateInit event type: 10060001 event: NKikimr::TEvTablet::TEvRestored 2025-05-29T15:24:23.413739Z node 1 :CMS DEBUG: cms_tx_init_scheme.cpp:16: TTxInitScheme Execute 2025-05-29T15:24:23.414141Z node 1 :CMS DEBUG: cms_impl.h:185: StateInit event type: 1006000c event: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-05-29T15:24:23.414289Z node 1 :CMS DEBUG: cms_impl.h:185: StateInit event type: 104d0001 event: NKikimr::NConsole::TEvConfigsDispatcher::TEvSetConfigSubscriptionResponse 2025-05-29T15:24:23.415495Z node 1 :CMS DEBUG: console__init_scheme.cpp:23: TConsole::TTxInitScheme Complete 2025-05-29T15:24:23.415521Z node 1 :CMS DEBUG: console__load_state.cpp:28: TConsole::TTxLoadState Execute 2025-05-29T15:24:23.415554Z node 1 :CMS DEBUG: console__load_state.cpp:50: Using default config. 2025-05-29T15:24:23.415628Z node 1 :CMS DEBUG: console__load_state.cpp:66: TConsole::TTxLoadState Complete 2025-05-29T15:24:23.416424Z node 1 :CMS DEBUG: cms_tx_init_scheme.cpp:24: TTxInitScheme Complete 2025-05-29T15:24:23.416542Z node 1 :CMS DEBUG: cms_tx_load_state.cpp:33: TTxLoadState Execute 2025-05-29T15:24:23.416576Z node 1 :CMS DEBUG: cms_tx_load_state.cpp:76: Using default config 2025-05-29T15:24:23.416606Z node 1 :CMS DEBUG: cms.cpp:1147: Running CleanupWalleTasks 2025-05-29T15:24:23.448745Z node 1 :CMS DEBUG: cms_impl.h:185: StateInit event type: 104a0012 event: NKikimr::NConsole::TEvConsole::TEvConfigNotificationRequest { Config { FeatureFlags { EnableCMSRequestPriorities: true EnableSingleCompositeActionGroup: true } } ItemKinds: 25 ItemKinds: 26 Local: true } 2025-05-29T15:24:23.460873Z node 1 :CMS DEBUG: cms_tx_load_state.cpp:256: TTxLoadState Complete 2025-05-29T15:24:23.460994Z node 1 :CMS DEBUG: cms_tx_update_config.cpp:23: TTxUpdateConfig Execute 2025-05-29T15:24:23.462472Z node 1 :CMS DEBUG: cms_tx_update_config.cpp:37: TTxUpdateConfig Complete 2025-05-29T15:24:23.462558Z node 1 :CMS DEBUG: sentinel.cpp:939: [Sentinel] [Main] UpdateConfig 2025-05-29T15:24:23.462565Z node 1 :CMS DEBUG: sentinel.cpp:884: [Sentinel] [Main] Start ConfigUpdater 2025-05-29T15:24:23.462573Z node 1 :CMS DEBUG: sentinel.cpp:955: [Sentinel] [Main] UpdateState 2025-05-29T15:24:23.462577Z node 1 :CMS INFO: sentinel.cpp:879: [Sentinel] [Main] StateUpdater was delayed 2025-05-29T15:24:23.462609Z node 1 :CMS DEBUG: sentinel.cpp:464: [Sentinel] [ConfigUpdater] Request blobstorage config: attempt# 0 2025-05-29T15:24:23.462640Z node 1 :CMS DEBUG: sentinel.cpp:477: [Sentinel] [ConfigUpdater] Request CMS cluster state: attempt# 0 2025-05-29T15:24:23.464735Z node 1 :CMS DEBUG: sentinel.cpp:530: [Sentinel] [ConfigUpdater] Handle TEvBlobStorage::TEvControllerConfigResponse: response# Status { Success: true BaseConfig { PDisk { NodeId: 1 PDiskId: 1 Path: "/1/pdisk-1.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 2 PDiskId: 2 Path: "/2/pdisk-2.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 3 PDiskId: 3 Path: "/3/pdisk-3.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 4 PDiskId: 4 Path: "/4/pdisk-4.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 5 PDiskId: 5 Path: "/5/pdisk-5.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 6 PDiskId: 6 Path: "/6/pdisk-6.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 7 PDiskId: 7 Path: "/7/pdisk-7.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 8 PDiskId: 8 Path: "/8/pdisk-8.data" Guid: 1 DriveStatus: ACTIVE } VSlot { VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 1000 } GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 1001 } GroupId: 1 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 1002 } GroupId: 2 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 1003 } GroupId: 3 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 2 VSlotId: 1000 } GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 2 VSlotId: 1001 } GroupId: 1 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 2 VSlotId: 1002 } GroupId: 2 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 2 VSlotId: 1003 } GroupId: 3 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 3 PDiskId: 3 VSlotId: 1000 } GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 3 VSlotId: 1001 } GroupId: 1 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 3 VSlotId: 1002 } GroupId: 2 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 3 VSlotId: 1003 } GroupId: 3 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 4 PDiskId: 4 VSlotId: 1000 } GroupGeneration: 1 FailDomainIdx: 3 } VSlot { VSlotId { NodeId: 4 PDiskId: 4 VSlotId: 1001 } GroupId: 1 GroupGeneration: 1 FailDomainIdx: 3 } VSlot { VSlotId { NodeId: 4 PDiskId: 4 VSlotId: 1002 } GroupId: 2 GroupGeneration: 1 FailDomainIdx: 3 } VSlot { VSlotId { NodeId: 4 PDiskId: 4 VSlotId: 1003 } GroupId: 3 GroupGeneration: 1 FailDomainIdx: 3 } VSlot { VSlotId { NodeId: 5 PDiskId: 5 VSlotId: 1000 } GroupGeneration: 1 FailDomainIdx: 4 } VSlot { VSlotId { NodeId: 5 PDiskId: 5 VSlotId: 1001 } GroupId: 1 GroupGeneration: 1 FailDomainIdx: 4 } VSlot { VSlotId { NodeId: 5 PDiskId: 5 VSlotId: 1002 } GroupId: 2 GroupGeneration: 1 FailDomainIdx: 4 } VSlot { VSlotId { NodeId: 5 PDiskId: 5 VSlotId: 1003 } GroupId: 3 GroupGeneration: 1 FailDomainIdx: 4 } VSlot { VSlotId { NodeId: 6 PDiskId: 6 VSlotId: 1000 } GroupGeneration: 1 FailDomainIdx: 5 } VSlot { VSlotId { NodeId: 6 PDiskId: 6 VSlotId: 1001 } GroupId: 1 GroupGeneration: 1 FailDomainIdx: 5 } VSlot { VSlotId { NodeId: 6 PDiskId: 6 VSlotId: 1002 } GroupId: 2 GroupGeneration: 1 FailDomainIdx: 5 } VSlot { VSlotId { NodeId: 6 PDiskId: 6 VSlotId: 1003 } GroupId: 3 GroupGeneration: 1 FailDomainIdx: 5 } VSlot { VSlotId { NodeId: 7 PDiskId: 7 VSlotId: 1000 } GroupGeneration: 1 FailDomainIdx: 6 } VSlot { VSlotId { NodeId: 7 PDiskId: 7 VSlotId: 1001 } GroupId: 1 GroupGeneration: 1 FailDomainIdx: 6 } VSlot { VSlotId { NodeId: 7 PDiskId: 7 VSlotId: 1002 } GroupId: 2 GroupGeneration: 1 FailDomainIdx: 6 } VSlot { VSlotId { NodeId: 7 PDiskId: 7 VSlotId: 1003 } GroupId: 3 GroupGeneration: 1 FailDomainIdx: 6 } VSlot { VSlotId { NodeId: 8 PDiskId: 8 VSlotId: 1000 } GroupGeneration: 1 FailDomainIdx: 7 } VSlot { VSlotId { NodeId: 8 PDiskId: 8 VSlotId: 1001 } GroupId: 1 GroupGeneration: 1 FailDomainIdx: 7 } VSlot { VSlotId { NodeId: 8 PDiskId: 8 VSlotId: 1002 } GroupId: 2 GroupGeneration: 1 FailDomainIdx: 7 } VSlot { VSlotId { NodeId: 8 PDiskId: 8 VSlotId: 1003 } GroupId: 3 GroupGeneration: 1 FailDomainIdx: 7 } Group { GroupGeneration: 1 ErasureSpecies: "block-4-2" VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 1000 } VSlotId { NodeId: 2 PDiskId: 2 VSlotId: 1000 } VSlotId { NodeId: 3 PDiskId: 3 VSlotId: 1000 } VSlotId { NodeId: 4 PDiskId: 4 VSlotId: 1000 } VSlotId { NodeId: 5 PDiskId: 5 VSlotId: 1000 } VSlotId { NodeId: 6 PDiskId: 6 VSlotId: 1000 } VSlotId { NodeId: 7 PDiskId: 7 VSlotId: 1000 } VSlotId { NodeId: 8 PDiskId: 8 VSlotId: 1000 } } Group { GroupId: 1 GroupGeneration: 1 ErasureSpecies: "block-4-2" VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 1001 } VSlotId { NodeId: 2 PDiskId: 2 VSlotId: 1001 } VSlotId { NodeId: 3 PDiskId: 3 VSlotId: 1001 } VSlotId { NodeId: 4 PDiskId: 4 VSlotId: 1001 } VSlotId { NodeId: 5 PDiskId: 5 VSlotId: 1001 } VSlotId { NodeId: 6 PDiskId: 6 VSlotId: 1001 } VSlotId { NodeId: 7 PDiskId: 7 VSlotId: 1001 } VSlotId { NodeId: 8 PDiskId: 8 VSlotId: 1001 } } Group { GroupId: 2 GroupGeneration: 1 ErasureSpecies: "block-4-2" VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 1002 } VSlotId { NodeId: 2 PDiskId: 2 VSlotId: 1002 } VSlotId { NodeId: 3 PDiskId: 3 VSlotId: 1002 } VSlotId { NodeId: 4 PDiskId: 4 VSlotId: 1002 } VSlotId { NodeId: 5 PDiskId: 5 VSlotId: 1002 } VSlotId { NodeId: 6 PDiskId: 6 VSlotId: 1002 } VSlotId { NodeId: 7 PDiskId: 7 VSlotId: 1002 } VSlotId { NodeId: 8 PDiskId: 8 VSlotId: 1002 } } Group { GroupId: 3 GroupGeneration: 1 ErasureSpecies: "block-4-2" VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 1003 } VSlotId { NodeId: 2 PDiskId: 2 VSlotId: 1003 } VSlotId { NodeId: 3 PDiskId: 3 VSlotId: 1003 } VSlotId { NodeId: 4 PDiskId: 4 VSlotId: 1003 } VSlotId { NodeId: 5 PDiskId: 5 VSlotId: 1003 } VSlotId { NodeId: 6 PDiskId: 6 VSlotId: 1003 } VSlotId { NodeId: 7 PDiskId: 7 VSlotId: 1003 } VSlotId { NodeId: 8 PDiskId: 8 VSlotId: 1003 } } } } Success: true 2025-05-29T15:24:23.475708Z node 1 :CMS DEBUG: cms_tx_update_config.cpp:23: TTxUpdateConfig Execute 2025-05-29T15:24:23.509338Z node 1 :CMS DEBUG: cms_tx_update_config.cpp:37: TTxUpdateConfig Complete 2025-05-29T15:24:23.509411Z node 1 :CMS DEBUG: cms_tx_update_config.cpp:44: Updated config: TenantLimits { DisabledNodesRatioLimit: 0 } ClusterLimits { DisabledNodesRatioLimit: 0 } SentinelConfig { Enable: false } 2025-05-29T15:24:23.549926Z node 1 :CMS DEBUG: cms_tx_update_downtimes.cpp:17: TTxUpdateDowntimes Execute 2025-05-29T15:24:23.549961Z node 1 :CMS DEBUG: cms_tx_update_downtimes.cpp:26: TTxUpdateDowntimes Complete 2025-05-29T15:24:23.550022Z node 1 :CMS DEBUG: cluster_info.cpp:968: Timestamp: 1970-01-01T00:05:00Z 2025-05-29T15:24:23.550250Z node 1 :CMS NOTICE: audit_log.cpp:12: [AuditLog] [CMS tablet] Reply: request# NKikimr::NCms::TEvCms::TEvClusterStateRequest { }, response# NKikimr::NCms::TEvCms::TEvClusterStateResponse { Status { Code: OK } State { Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 300025512 } Devices { Name: "vdisk-0-1-0-0-0" State: UP Timestamp: 300025512 } Devices { Name: "vdisk-1-1-0-0-0" State: UP Timestamp: 300025512 } Devices { Name: "vdisk-2-1-0-0-0" State: UP Timestamp: 300025512 } Devices { Name: "vdisk-3-1-0-0-0" State: UP Timestamp: 300025512 } Devices { Name: "pdisk-1-1" State: UP Timestamp: 300025512 } Timestamp: 300025512 NodeId: 1 InterconnectPort: 12001 Location { DataCenter: "1" Module: "1" Rack: "1" Unit: "1" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 300025512 } Devices { Name: "vdisk-0-1-0-1-0" State: UP Timestamp: 300025512 } Devices { Name: "vdisk-1-1-0-1-0" State: UP Timestamp: 300025512 } Devices { Name: "vdisk-2-1-0-1-0" State: UP Timestamp: 300025512 } Devices { Name: "vdisk-3-1-0-1-0" State: UP Timestamp: 300025512 } Devices { Name: "pdisk-2-2" State: UP Timestamp: 300025512 } Timestamp: 300025512 NodeId: 2 InterconnectPort: 12002 Location { DataCenter: "1" Module: "2" Rack: "2" Unit: "2" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 300025512 } Devices { Name: "vdisk-0-1-0-2-0" State: UP Timestamp: 300025512 } Devices { Name: "vdisk-1-1-0-2-0" State: UP Timestamp: 300025512 } Devices { Name: "vdisk-2-1-0-2-0" State: UP Timestamp: 300025512 } Devices { Name: "vdisk-3-1-0-2-0" State: UP Timestamp: 300025512 } Devices { Name: "pdisk-3-3" State: UP Timestamp: 300025512 } Timestamp: 300025512 NodeId: 3 InterconnectPort: 12003 Location { DataCenter: "1" Module: "3" Rack: "3" Unit: "3" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 300025512 } Devices { Name: "vdisk-0-1-0-3-0" State: UP Timestamp: 300025512 } Devices { Name: "vdisk-1-1-0-3-0" State: UP Timestamp: 300025512 } Devices { Name: "vdisk-2-1-0-3-0" State: UP Timestamp: 300025512 } Devices { Name: "vdisk-3-1-0-3-0" State: UP Timestamp: 300025512 } Devices { Name: "pdisk-4-4" State: UP Timestamp: 300025512 } Timestamp: 300025512 NodeId: 4 InterconnectPort: 12004 Location { DataCenter: "1" Module: "4" Rack: "4" Unit: "4" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: ... 1-1-0-7-0" State: UP Timestamp: 300026512 } Devices { Name: "vdisk-2-1-0-7-0" State: UP Timestamp: 300026512 } Devices { Name: "vdisk-3-1-0-7-0" State: UP Timestamp: 300026512 } Devices { Name: "pdisk-32-32" State: UP Timestamp: 300026512 } Timestamp: 300026512 NodeId: 32 InterconnectPort: 12008 Location { DataCenter: "1" Module: "8" Rack: "8" Unit: "8" } StartTimeSeconds: 0 } Timestamp: 300026512 } } 2025-05-29T15:24:29.326144Z node 25 :CMS INFO: cms.cpp:347: Check request: User: "user" Actions { Type: SHUTDOWN_HOST Host: "25" Duration: 60000000 } Actions { Type: SHUTDOWN_HOST Host: "26" Duration: 60000000 } Actions { Type: SHUTDOWN_HOST Host: "27" Duration: 60000000 } PartialPermissionAllowed: true Schedule: true DryRun: false AvailabilityMode: MODE_FORCE_RESTART EvictVDisks: false 2025-05-29T15:24:29.326154Z node 25 :CMS DEBUG: cms.cpp:379: Checking action: Type: SHUTDOWN_HOST Host: "25" Duration: 60000000 2025-05-29T15:24:29.326163Z node 25 :CMS DEBUG: node_checkers.cpp:101: [Nodes Counter] Checking Node: 25, with state: Up, with limit: 0, with ratio limit: 0, locked nodes: 0, down nodes: 0 2025-05-29T15:24:29.326200Z node 25 :CMS DEBUG: cms.cpp:729: Ring: 0; State: Ok 2025-05-29T15:24:29.326204Z node 25 :CMS DEBUG: cms.cpp:729: Ring: 1; State: Ok 2025-05-29T15:24:29.326207Z node 25 :CMS DEBUG: cms.cpp:729: Ring: 2; State: Ok 2025-05-29T15:24:29.326211Z node 25 :CMS DEBUG: cms.cpp:387: Result: ALLOW 2025-05-29T15:24:29.326220Z node 25 :CMS DEBUG: cms.cpp:379: Checking action: Type: SHUTDOWN_HOST Host: "26" Duration: 60000000 2025-05-29T15:24:29.326225Z node 25 :CMS DEBUG: node_checkers.cpp:101: [Nodes Counter] Checking Node: 26, with state: Up, with limit: 0, with ratio limit: 0, locked nodes: 1, down nodes: 0 2025-05-29T15:24:29.326242Z node 25 :CMS DEBUG: cms.cpp:398: Result: DISALLOW_TEMP (reason: You cannot get two or more disks from the same group at the same time in partial permissions allowed mode) 2025-05-29T15:24:29.326249Z node 25 :CMS DEBUG: cms.cpp:379: Checking action: Type: SHUTDOWN_HOST Host: "27" Duration: 60000000 2025-05-29T15:24:29.326253Z node 25 :CMS DEBUG: node_checkers.cpp:101: [Nodes Counter] Checking Node: 27, with state: Up, with limit: 0, with ratio limit: 0, locked nodes: 1, down nodes: 0 2025-05-29T15:24:29.326263Z node 25 :CMS DEBUG: cms.cpp:398: Result: DISALLOW_TEMP (reason: You cannot get two or more disks from the same group at the same time in partial permissions allowed mode) 2025-05-29T15:24:29.326278Z node 25 :CMS DEBUG: cms.cpp:1036: Accepting permission: id# user-p-1, requestId# user-r-1, owner# user 2025-05-29T15:24:29.326286Z node 25 :CMS INFO: cluster_info.cpp:777: Adding lock for Host ::1:12001 (25) (permission user-p-1 until 1970-01-01T00:06:00Z) 2025-05-29T15:24:29.326297Z node 25 :CMS DEBUG: cms_tx_store_permissions.cpp:26: TTxStorePermissions Execute 2025-05-29T15:24:29.326332Z node 25 :CMS NOTICE: audit_log.cpp:12: [AuditLog] [CMS tablet] Store permission: id# user-p-1, validity# 1970-01-01T00:06:00.026512Z, action# Type: SHUTDOWN_HOST Host: "25" Duration: 60000000 2025-05-29T15:24:29.326359Z node 25 :CMS NOTICE: audit_log.cpp:12: [AuditLog] [CMS tablet] Store request: id# user-r-1, owner# user, order# 1, priority# 0, body# User: "user" Actions { Type: SHUTDOWN_HOST Host: "26" Duration: 60000000 Issue { Type: GENERIC Message: "You cannot get two or more disks from the same group at the same time in partial permissions allowed mode" } } Actions { Type: SHUTDOWN_HOST Host: "27" Duration: 60000000 Issue { Type: GENERIC Message: "You cannot get two or more disks from the same group at the same time in partial permissions allowed mode" } } PartialPermissionAllowed: true Schedule: true Reason: "" TenantPolicy: DEFAULT AvailabilityMode: MODE_FORCE_RESTART EvictVDisks: false 2025-05-29T15:24:29.370846Z node 25 :CMS DEBUG: cms.cpp:1147: Running CleanupWalleTasks 2025-05-29T15:24:29.416353Z node 25 :CMS DEBUG: cms_tx_store_permissions.cpp:137: TTxStorePermissions complete 2025-05-29T15:24:29.416453Z node 25 :CMS NOTICE: audit_log.cpp:12: [AuditLog] [CMS tablet] Reply: request# NKikimr::NCms::TEvCms::TEvPermissionRequest { User: "user" Actions { Type: SHUTDOWN_HOST Host: "25" Duration: 60000000 } Actions { Type: SHUTDOWN_HOST Host: "26" Duration: 60000000 } Actions { Type: SHUTDOWN_HOST Host: "27" Duration: 60000000 } PartialPermissionAllowed: true Schedule: true DryRun: false AvailabilityMode: MODE_FORCE_RESTART EvictVDisks: false }, response# NKikimr::NCms::TEvCms::TEvPermissionResponse { Status { Code: ALLOW_PARTIAL } RequestId: "user-r-1" Permissions { Id: "user-p-1" Action { Type: SHUTDOWN_HOST Host: "25" Duration: 60000000 } Deadline: 360026512 Extentions { Type: HostInfo Hosts { Name: "::1" State: UP NodeId: 25 InterconnectPort: 12001 } } } } 2025-05-29T15:24:29.416465Z node 25 :CMS DEBUG: cms.cpp:1064: Schedule cleanup at 1970-01-01T00:08:00.026512Z 2025-05-29T15:24:29.443386Z node 25 :CMS INFO: cluster_info.cpp:777: Adding lock for Host ::1:12001 (25) (permission user-p-1 until 1970-01-01T00:06:00Z) 2025-05-29T15:24:29.443530Z node 25 :CMS DEBUG: cms_tx_update_downtimes.cpp:17: TTxUpdateDowntimes Execute 2025-05-29T15:24:29.443623Z node 25 :CMS DEBUG: cluster_info.cpp:968: Timestamp: 1970-01-01T00:05:00Z 2025-05-29T15:24:29.443826Z node 25 :CMS INFO: cms.cpp:347: Check request: User: "user" Actions { Type: SHUTDOWN_HOST Host: "26" Duration: 60000000 Issue { Type: GENERIC Message: "You cannot get two or more disks from the same group at the same time in partial permissions allowed mode" } } Actions { Type: SHUTDOWN_HOST Host: "27" Duration: 60000000 Issue { Type: GENERIC Message: "You cannot get two or more disks from the same group at the same time in partial permissions allowed mode" } } PartialPermissionAllowed: true Schedule: true Reason: "" TenantPolicy: DEFAULT AvailabilityMode: MODE_FORCE_RESTART EvictVDisks: false 2025-05-29T15:24:29.443839Z node 25 :CMS DEBUG: cms.cpp:379: Checking action: Type: SHUTDOWN_HOST Host: "26" Duration: 60000000 Issue { Type: GENERIC Message: "You cannot get two or more disks from the same group at the same time in partial permissions allowed mode" } 2025-05-29T15:24:29.443853Z node 25 :CMS DEBUG: node_checkers.cpp:101: [Nodes Counter] Checking Node: 26, with state: Up, with limit: 0, with ratio limit: 0, locked nodes: 1, down nodes: 0 2025-05-29T15:24:29.443905Z node 25 :CMS DEBUG: cms.cpp:387: Result: ALLOW 2025-05-29T15:24:29.443921Z node 25 :CMS DEBUG: cms.cpp:379: Checking action: Type: SHUTDOWN_HOST Host: "27" Duration: 60000000 Issue { Type: GENERIC Message: "You cannot get two or more disks from the same group at the same time in partial permissions allowed mode" } 2025-05-29T15:24:29.443926Z node 25 :CMS DEBUG: node_checkers.cpp:101: [Nodes Counter] Checking Node: 27, with state: Up, with limit: 0, with ratio limit: 0, locked nodes: 2, down nodes: 0 2025-05-29T15:24:29.443945Z node 25 :CMS DEBUG: cms.cpp:398: Result: DISALLOW_TEMP (reason: You cannot get two or more disks from the same group at the same time in partial permissions allowed mode) 2025-05-29T15:24:29.443970Z node 25 :CMS DEBUG: cms.cpp:1036: Accepting permission: id# user-p-2, requestId# user-r-1, owner# user 2025-05-29T15:24:29.443978Z node 25 :CMS INFO: cluster_info.cpp:777: Adding lock for Host ::1:12002 (26) (permission user-p-2 until 1970-01-01T00:06:00Z) 2025-05-29T15:24:29.443990Z node 25 :CMS DEBUG: cms_tx_store_permissions.cpp:26: TTxStorePermissions Execute 2025-05-29T15:24:29.444041Z node 25 :CMS NOTICE: audit_log.cpp:12: [AuditLog] [CMS tablet] Store permission: id# user-p-2, validity# 1970-01-01T00:06:00.129024Z, action# Type: SHUTDOWN_HOST Host: "26" Duration: 60000000 2025-05-29T15:24:29.444067Z node 25 :CMS NOTICE: audit_log.cpp:12: [AuditLog] [CMS tablet] Store request: id# user-r-1, owner# user, order# 1, priority# 0, body# User: "user" Actions { Type: SHUTDOWN_HOST Host: "27" Duration: 60000000 Issue { Type: GENERIC Message: "You cannot get two or more disks from the same group at the same time in partial permissions allowed mode" } } PartialPermissionAllowed: true Schedule: true Reason: "" TenantPolicy: DEFAULT AvailabilityMode: MODE_FORCE_RESTART EvictVDisks: false 2025-05-29T15:24:29.455258Z node 25 :CMS DEBUG: cms_tx_update_downtimes.cpp:26: TTxUpdateDowntimes Complete 2025-05-29T15:24:29.455301Z node 25 :CMS DEBUG: cms_tx_store_permissions.cpp:137: TTxStorePermissions complete 2025-05-29T15:24:29.455391Z node 25 :CMS NOTICE: audit_log.cpp:12: [AuditLog] [CMS tablet] Reply: request# NKikimr::NCms::TEvCms::TEvCheckRequest { User: "user" RequestId: "user-r-1" DryRun: false AvailabilityMode: MODE_FORCE_RESTART }, response# NKikimr::NCms::TEvCms::TEvPermissionResponse { Status { Code: ALLOW_PARTIAL } RequestId: "user-r-1" Permissions { Id: "user-p-2" Action { Type: SHUTDOWN_HOST Host: "26" Duration: 60000000 } Deadline: 360129024 Extentions { Type: HostInfo Hosts { Name: "::1" State: UP NodeId: 26 InterconnectPort: 12002 } } } } 2025-05-29T15:24:29.477403Z node 25 :CMS INFO: cluster_info.cpp:777: Adding lock for Host ::1:12002 (26) (permission user-p-2 until 1970-01-01T00:06:00Z) 2025-05-29T15:24:29.477436Z node 25 :CMS INFO: cluster_info.cpp:777: Adding lock for Host ::1:12001 (25) (permission user-p-1 until 1970-01-01T00:06:00Z) 2025-05-29T15:24:29.477547Z node 25 :CMS DEBUG: cms_tx_update_downtimes.cpp:17: TTxUpdateDowntimes Execute 2025-05-29T15:24:29.477643Z node 25 :CMS DEBUG: cluster_info.cpp:968: Timestamp: 1970-01-01T00:05:00Z 2025-05-29T15:24:29.477815Z node 25 :CMS INFO: cms.cpp:347: Check request: User: "user" Actions { Type: SHUTDOWN_HOST Host: "27" Duration: 60000000 Issue { Type: GENERIC Message: "You cannot get two or more disks from the same group at the same time in partial permissions allowed mode" } } PartialPermissionAllowed: true Schedule: true Reason: "" TenantPolicy: DEFAULT AvailabilityMode: MODE_FORCE_RESTART EvictVDisks: false 2025-05-29T15:24:29.477826Z node 25 :CMS DEBUG: cms.cpp:379: Checking action: Type: SHUTDOWN_HOST Host: "27" Duration: 60000000 Issue { Type: GENERIC Message: "You cannot get two or more disks from the same group at the same time in partial permissions allowed mode" } 2025-05-29T15:24:29.477842Z node 25 :CMS DEBUG: node_checkers.cpp:101: [Nodes Counter] Checking Node: 27, with state: Up, with limit: 0, with ratio limit: 0, locked nodes: 2, down nodes: 0 2025-05-29T15:24:29.477895Z node 25 :CMS DEBUG: cms.cpp:387: Result: ALLOW 2025-05-29T15:24:29.477920Z node 25 :CMS DEBUG: cms.cpp:1036: Accepting permission: id# user-p-3, requestId# user-r-1, owner# user 2025-05-29T15:24:29.477928Z node 25 :CMS INFO: cluster_info.cpp:777: Adding lock for Host ::1:12003 (27) (permission user-p-3 until 1970-01-01T00:06:00Z) 2025-05-29T15:24:29.477939Z node 25 :CMS DEBUG: cms_tx_store_permissions.cpp:26: TTxStorePermissions Execute 2025-05-29T15:24:29.477968Z node 25 :CMS NOTICE: audit_log.cpp:12: [AuditLog] [CMS tablet] Store permission: id# user-p-3, validity# 1970-01-01T00:06:00.230536Z, action# Type: SHUTDOWN_HOST Host: "27" Duration: 60000000 2025-05-29T15:24:29.477980Z node 25 :CMS NOTICE: audit_log.cpp:12: [AuditLog] [CMS tablet] Remove request: id# user-r-1, owner# user 2025-05-29T15:24:29.499143Z node 25 :CMS DEBUG: cms_tx_update_downtimes.cpp:26: TTxUpdateDowntimes Complete 2025-05-29T15:24:29.499181Z node 25 :CMS DEBUG: cms_tx_store_permissions.cpp:137: TTxStorePermissions complete 2025-05-29T15:24:29.499283Z node 25 :CMS NOTICE: audit_log.cpp:12: [AuditLog] [CMS tablet] Reply: request# NKikimr::NCms::TEvCms::TEvCheckRequest { User: "user" RequestId: "user-r-1" DryRun: false AvailabilityMode: MODE_FORCE_RESTART }, response# NKikimr::NCms::TEvCms::TEvPermissionResponse { Status { Code: ALLOW } Permissions { Id: "user-p-3" Action { Type: SHUTDOWN_HOST Host: "27" Duration: 60000000 } Deadline: 360230536 Extentions { Type: HostInfo Hosts { Name: "::1" State: UP NodeId: 27 InterconnectPort: 12003 } } } } ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/cms/ut/unittest >> TCmsTest::VDisksEviction [GOOD] Test command err: 2025-05-29T15:24:23.214956Z node 1 :CMS DEBUG: cms_impl.h:185: StateInit event type: 10060000 event: NKikimr::TEvTablet::TEvBoot 2025-05-29T15:24:23.217045Z node 1 :CMS DEBUG: console__init_scheme.cpp:14: TConsole::TTxInitScheme Execute 2025-05-29T15:24:23.218176Z node 1 :CMS DEBUG: cms_impl.h:185: StateInit event type: 10060001 event: NKikimr::TEvTablet::TEvRestored 2025-05-29T15:24:23.218217Z node 1 :CMS DEBUG: cms_tx_init_scheme.cpp:16: TTxInitScheme Execute 2025-05-29T15:24:23.218777Z node 1 :CMS DEBUG: cms_impl.h:185: StateInit event type: 1006000c event: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-05-29T15:24:23.218964Z node 1 :CMS DEBUG: cms_impl.h:185: StateInit event type: 104d0001 event: NKikimr::NConsole::TEvConfigsDispatcher::TEvSetConfigSubscriptionResponse 2025-05-29T15:24:23.220195Z node 1 :CMS DEBUG: console__init_scheme.cpp:23: TConsole::TTxInitScheme Complete 2025-05-29T15:24:23.220219Z node 1 :CMS DEBUG: console__load_state.cpp:28: TConsole::TTxLoadState Execute 2025-05-29T15:24:23.220251Z node 1 :CMS DEBUG: console__load_state.cpp:50: Using default config. 2025-05-29T15:24:23.220320Z node 1 :CMS DEBUG: console__load_state.cpp:66: TConsole::TTxLoadState Complete 2025-05-29T15:24:23.221128Z node 1 :CMS DEBUG: cms_tx_init_scheme.cpp:24: TTxInitScheme Complete 2025-05-29T15:24:23.221251Z node 1 :CMS DEBUG: cms_tx_load_state.cpp:33: TTxLoadState Execute 2025-05-29T15:24:23.221277Z node 1 :CMS DEBUG: cms_tx_load_state.cpp:76: Using default config 2025-05-29T15:24:23.221305Z node 1 :CMS DEBUG: cms.cpp:1147: Running CleanupWalleTasks 2025-05-29T15:24:23.251527Z node 1 :CMS DEBUG: cms_impl.h:185: StateInit event type: 104a0012 event: NKikimr::NConsole::TEvConsole::TEvConfigNotificationRequest { Config { FeatureFlags { EnableCMSRequestPriorities: true EnableSingleCompositeActionGroup: true } } ItemKinds: 25 ItemKinds: 26 Local: true } 2025-05-29T15:24:23.263258Z node 1 :CMS DEBUG: cms_tx_load_state.cpp:256: TTxLoadState Complete 2025-05-29T15:24:23.263375Z node 1 :CMS DEBUG: cms_tx_update_config.cpp:23: TTxUpdateConfig Execute 2025-05-29T15:24:23.264751Z node 1 :CMS DEBUG: cms_tx_update_config.cpp:37: TTxUpdateConfig Complete 2025-05-29T15:24:23.264832Z node 1 :CMS DEBUG: sentinel.cpp:939: [Sentinel] [Main] UpdateConfig 2025-05-29T15:24:23.264839Z node 1 :CMS DEBUG: sentinel.cpp:884: [Sentinel] [Main] Start ConfigUpdater 2025-05-29T15:24:23.264847Z node 1 :CMS DEBUG: sentinel.cpp:955: [Sentinel] [Main] UpdateState 2025-05-29T15:24:23.264851Z node 1 :CMS INFO: sentinel.cpp:879: [Sentinel] [Main] StateUpdater was delayed 2025-05-29T15:24:23.264881Z node 1 :CMS DEBUG: sentinel.cpp:464: [Sentinel] [ConfigUpdater] Request blobstorage config: attempt# 0 2025-05-29T15:24:23.264910Z node 1 :CMS DEBUG: sentinel.cpp:477: [Sentinel] [ConfigUpdater] Request CMS cluster state: attempt# 0 2025-05-29T15:24:23.267004Z node 1 :CMS DEBUG: sentinel.cpp:530: [Sentinel] [ConfigUpdater] Handle TEvBlobStorage::TEvControllerConfigResponse: response# Status { Success: true BaseConfig { PDisk { NodeId: 1 PDiskId: 1 Path: "/1/pdisk-1.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 2 PDiskId: 2 Path: "/2/pdisk-2.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 3 PDiskId: 3 Path: "/3/pdisk-3.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 4 PDiskId: 4 Path: "/4/pdisk-4.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 5 PDiskId: 5 Path: "/5/pdisk-5.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 6 PDiskId: 6 Path: "/6/pdisk-6.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 7 PDiskId: 7 Path: "/7/pdisk-7.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 8 PDiskId: 8 Path: "/8/pdisk-8.data" Guid: 1 DriveStatus: ACTIVE } VSlot { VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 1000 } GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 1001 } GroupId: 1 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 1002 } GroupId: 2 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 1003 } GroupId: 3 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 2 VSlotId: 1000 } GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 2 VSlotId: 1001 } GroupId: 1 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 2 VSlotId: 1002 } GroupId: 2 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 2 VSlotId: 1003 } GroupId: 3 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 3 PDiskId: 3 VSlotId: 1000 } GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 3 VSlotId: 1001 } GroupId: 1 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 3 VSlotId: 1002 } GroupId: 2 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 3 VSlotId: 1003 } GroupId: 3 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 4 PDiskId: 4 VSlotId: 1000 } GroupGeneration: 1 FailDomainIdx: 3 } VSlot { VSlotId { NodeId: 4 PDiskId: 4 VSlotId: 1001 } GroupId: 1 GroupGeneration: 1 FailDomainIdx: 3 } VSlot { VSlotId { NodeId: 4 PDiskId: 4 VSlotId: 1002 } GroupId: 2 GroupGeneration: 1 FailDomainIdx: 3 } VSlot { VSlotId { NodeId: 4 PDiskId: 4 VSlotId: 1003 } GroupId: 3 GroupGeneration: 1 FailDomainIdx: 3 } VSlot { VSlotId { NodeId: 5 PDiskId: 5 VSlotId: 1000 } GroupGeneration: 1 FailDomainIdx: 4 } VSlot { VSlotId { NodeId: 5 PDiskId: 5 VSlotId: 1001 } GroupId: 1 GroupGeneration: 1 FailDomainIdx: 4 } VSlot { VSlotId { NodeId: 5 PDiskId: 5 VSlotId: 1002 } GroupId: 2 GroupGeneration: 1 FailDomainIdx: 4 } VSlot { VSlotId { NodeId: 5 PDiskId: 5 VSlotId: 1003 } GroupId: 3 GroupGeneration: 1 FailDomainIdx: 4 } VSlot { VSlotId { NodeId: 6 PDiskId: 6 VSlotId: 1000 } GroupGeneration: 1 FailDomainIdx: 5 } VSlot { VSlotId { NodeId: 6 PDiskId: 6 VSlotId: 1001 } GroupId: 1 GroupGeneration: 1 FailDomainIdx: 5 } VSlot { VSlotId { NodeId: 6 PDiskId: 6 VSlotId: 1002 } GroupId: 2 GroupGeneration: 1 FailDomainIdx: 5 } VSlot { VSlotId { NodeId: 6 PDiskId: 6 VSlotId: 1003 } GroupId: 3 GroupGeneration: 1 FailDomainIdx: 5 } VSlot { VSlotId { NodeId: 7 PDiskId: 7 VSlotId: 1000 } GroupGeneration: 1 FailDomainIdx: 6 } VSlot { VSlotId { NodeId: 7 PDiskId: 7 VSlotId: 1001 } GroupId: 1 GroupGeneration: 1 FailDomainIdx: 6 } VSlot { VSlotId { NodeId: 7 PDiskId: 7 VSlotId: 1002 } GroupId: 2 GroupGeneration: 1 FailDomainIdx: 6 } VSlot { VSlotId { NodeId: 7 PDiskId: 7 VSlotId: 1003 } GroupId: 3 GroupGeneration: 1 FailDomainIdx: 6 } VSlot { VSlotId { NodeId: 8 PDiskId: 8 VSlotId: 1000 } GroupGeneration: 1 FailDomainIdx: 7 } VSlot { VSlotId { NodeId: 8 PDiskId: 8 VSlotId: 1001 } GroupId: 1 GroupGeneration: 1 FailDomainIdx: 7 } VSlot { VSlotId { NodeId: 8 PDiskId: 8 VSlotId: 1002 } GroupId: 2 GroupGeneration: 1 FailDomainIdx: 7 } VSlot { VSlotId { NodeId: 8 PDiskId: 8 VSlotId: 1003 } GroupId: 3 GroupGeneration: 1 FailDomainIdx: 7 } Group { GroupGeneration: 1 ErasureSpecies: "block-4-2" VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 1000 } VSlotId { NodeId: 2 PDiskId: 2 VSlotId: 1000 } VSlotId { NodeId: 3 PDiskId: 3 VSlotId: 1000 } VSlotId { NodeId: 4 PDiskId: 4 VSlotId: 1000 } VSlotId { NodeId: 5 PDiskId: 5 VSlotId: 1000 } VSlotId { NodeId: 6 PDiskId: 6 VSlotId: 1000 } VSlotId { NodeId: 7 PDiskId: 7 VSlotId: 1000 } VSlotId { NodeId: 8 PDiskId: 8 VSlotId: 1000 } } Group { GroupId: 1 GroupGeneration: 1 ErasureSpecies: "block-4-2" VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 1001 } VSlotId { NodeId: 2 PDiskId: 2 VSlotId: 1001 } VSlotId { NodeId: 3 PDiskId: 3 VSlotId: 1001 } VSlotId { NodeId: 4 PDiskId: 4 VSlotId: 1001 } VSlotId { NodeId: 5 PDiskId: 5 VSlotId: 1001 } VSlotId { NodeId: 6 PDiskId: 6 VSlotId: 1001 } VSlotId { NodeId: 7 PDiskId: 7 VSlotId: 1001 } VSlotId { NodeId: 8 PDiskId: 8 VSlotId: 1001 } } Group { GroupId: 2 GroupGeneration: 1 ErasureSpecies: "block-4-2" VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 1002 } VSlotId { NodeId: 2 PDiskId: 2 VSlotId: 1002 } VSlotId { NodeId: 3 PDiskId: 3 VSlotId: 1002 } VSlotId { NodeId: 4 PDiskId: 4 VSlotId: 1002 } VSlotId { NodeId: 5 PDiskId: 5 VSlotId: 1002 } VSlotId { NodeId: 6 PDiskId: 6 VSlotId: 1002 } VSlotId { NodeId: 7 PDiskId: 7 VSlotId: 1002 } VSlotId { NodeId: 8 PDiskId: 8 VSlotId: 1002 } } Group { GroupId: 3 GroupGeneration: 1 ErasureSpecies: "block-4-2" VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 1003 } VSlotId { NodeId: 2 PDiskId: 2 VSlotId: 1003 } VSlotId { NodeId: 3 PDiskId: 3 VSlotId: 1003 } VSlotId { NodeId: 4 PDiskId: 4 VSlotId: 1003 } VSlotId { NodeId: 5 PDiskId: 5 VSlotId: 1003 } VSlotId { NodeId: 6 PDiskId: 6 VSlotId: 1003 } VSlotId { NodeId: 7 PDiskId: 7 VSlotId: 1003 } VSlotId { NodeId: 8 PDiskId: 8 VSlotId: 1003 } } } } Success: true 2025-05-29T15:24:23.277563Z node 1 :CMS DEBUG: cms_tx_update_config.cpp:23: TTxUpdateConfig Execute 2025-05-29T15:24:23.310103Z node 1 :CMS DEBUG: cms_tx_update_config.cpp:37: TTxUpdateConfig Complete 2025-05-29T15:24:23.310161Z node 1 :CMS DEBUG: cms_tx_update_config.cpp:44: Updated config: TenantLimits { DisabledNodesRatioLimit: 0 } ClusterLimits { DisabledNodesRatioLimit: 0 } SentinelConfig { Enable: false } 2025-05-29T15:24:23.352295Z node 1 :CMS DEBUG: cms_tx_update_downtimes.cpp:17: TTxUpdateDowntimes Execute 2025-05-29T15:24:23.352349Z node 1 :CMS DEBUG: cms_tx_update_downtimes.cpp:26: TTxUpdateDowntimes Complete 2025-05-29T15:24:23.352439Z node 1 :CMS DEBUG: cluster_info.cpp:968: Timestamp: 1970-01-01T00:02:00Z 2025-05-29T15:24:23.352821Z node 1 :CMS NOTICE: audit_log.cpp:12: [AuditLog] [CMS tablet] Reply: request# NKikimr::NCms::TEvCms::TEvClusterStateRequest { }, response# NKikimr::NCms::TEvCms::TEvClusterStateResponse { Status { Code: OK } State { Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120025512 } Devices { Name: "vdisk-0-1-0-0-0" State: UP Timestamp: 120025512 } Devices { Name: "vdisk-1-1-0-0-0" State: UP Timestamp: 120025512 } Devices { Name: "vdisk-2-1-0-0-0" State: UP Timestamp: 120025512 } Devices { Name: "vdisk-3-1-0-0-0" State: UP Timestamp: 120025512 } Devices { Name: "pdisk-1-1" State: UP Timestamp: 120025512 } Timestamp: 120025512 NodeId: 1 InterconnectPort: 12001 Location { DataCenter: "1" Module: "1" Rack: "1" Unit: "1" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120025512 } Devices { Name: "vdisk-0-1-0-1-0" State: UP Timestamp: 120025512 } Devices { Name: "vdisk-1-1-0-1-0" State: UP Timestamp: 120025512 } Devices { Name: "vdisk-2-1-0-1-0" State: UP Timestamp: 120025512 } Devices { Name: "vdisk-3-1-0-1-0" State: UP Timestamp: 120025512 } Devices { Name: "pdisk-2-2" State: UP Timestamp: 120025512 } Timestamp: 120025512 NodeId: 2 InterconnectPort: 12002 Location { DataCenter: "1" Module: "2" Rack: "2" Unit: "2" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120025512 } Devices { Name: "vdisk-0-1-0-2-0" State: UP Timestamp: 120025512 } Devices { Name: "vdisk-1-1-0-2-0" State: UP Timestamp: 120025512 } Devices { Name: "vdisk-2-1-0-2-0" State: UP Timestamp: 120025512 } Devices { Name: "vdisk-3-1-0-2-0" State: UP Timestamp: 120025512 } Devices { Name: "pdisk-3-3" State: UP Timestamp: 120025512 } Timestamp: 120025512 NodeId: 3 InterconnectPort: 12003 Location { DataCenter: "1" Module: "3" Rack: "3" Unit: "3" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120025512 } Devices { Name: "vdisk-0-1-0-3-0" State: UP Timestamp: 120025512 } Devices { Name: "vdisk-1-1-0-3-0" State: UP Timestamp: 120025512 } Devices { Name: "vdisk-2-1-0-3-0" State: UP Timestamp: 120025512 } Devices { Name: "vdisk-3-1-0-3-0" State: UP Timestamp: 120025512 } Devices { Name: "pdisk-4-4" State: UP Timestamp: 120025512 } Timestamp: 120025512 NodeId: 4 InterconnectPort: 12004 Location { DataCenter: "1" Module: "4" Rack: "4" Unit: "4" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: ... "storage" State: UP Version: "-1" Timestamp: 120542048 } Devices { Name: "vdisk-0-1-0-0-0" State: UP Timestamp: 120542048 } Devices { Name: "vdisk-1-1-0-0-0" State: UP Timestamp: 120542048 } Devices { Name: "vdisk-2-1-0-0-0" State: UP Timestamp: 120542048 } Devices { Name: "vdisk-3-1-0-0-0" State: UP Timestamp: 120542048 } Devices { Name: "pdisk-18-18" State: UP Timestamp: 120542048 } Timestamp: 120542048 NodeId: 18 InterconnectPort: 12001 Location { DataCenter: "1" Module: "1" Rack: "1" Unit: "1" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120542048 } Devices { Name: "vdisk-0-1-0-1-0" State: UP Timestamp: 120542048 } Devices { Name: "vdisk-1-1-0-1-0" State: UP Timestamp: 120542048 } Devices { Name: "vdisk-2-1-0-1-0" State: UP Timestamp: 120542048 } Devices { Name: "vdisk-3-1-0-1-0" State: UP Timestamp: 120542048 } Devices { Name: "pdisk-19-19" State: UP Timestamp: 120542048 } Timestamp: 120542048 NodeId: 19 InterconnectPort: 12002 Location { DataCenter: "1" Module: "2" Rack: "2" Unit: "2" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120542048 } Devices { Name: "vdisk-0-1-0-2-0" State: UP Timestamp: 120542048 } Devices { Name: "vdisk-1-1-0-2-0" State: UP Timestamp: 120542048 } Devices { Name: "vdisk-2-1-0-2-0" State: UP Timestamp: 120542048 } Devices { Name: "vdisk-3-1-0-2-0" State: UP Timestamp: 120542048 } Devices { Name: "pdisk-20-20" State: UP Timestamp: 120542048 } Timestamp: 120542048 NodeId: 20 InterconnectPort: 12003 Location { DataCenter: "1" Module: "3" Rack: "3" Unit: "3" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120542048 } Devices { Name: "vdisk-0-1-0-3-0" State: UP Timestamp: 120542048 } Devices { Name: "vdisk-1-1-0-3-0" State: UP Timestamp: 120542048 } Devices { Name: "vdisk-2-1-0-3-0" State: UP Timestamp: 120542048 } Devices { Name: "vdisk-3-1-0-3-0" State: UP Timestamp: 120542048 } Devices { Name: "pdisk-21-21" State: UP Timestamp: 120542048 } Timestamp: 120542048 NodeId: 21 InterconnectPort: 12004 Location { DataCenter: "1" Module: "4" Rack: "4" Unit: "4" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120542048 } Devices { Name: "vdisk-0-1-0-4-0" State: UP Timestamp: 120542048 } Devices { Name: "vdisk-1-1-0-4-0" State: UP Timestamp: 120542048 } Devices { Name: "vdisk-2-1-0-4-0" State: UP Timestamp: 120542048 } Devices { Name: "vdisk-3-1-0-4-0" State: UP Timestamp: 120542048 } Devices { Name: "pdisk-22-22" State: UP Timestamp: 120542048 } Timestamp: 120542048 NodeId: 22 InterconnectPort: 12005 Location { DataCenter: "1" Module: "5" Rack: "5" Unit: "5" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120542048 } Devices { Name: "vdisk-0-1-0-5-0" State: UP Timestamp: 120542048 } Devices { Name: "vdisk-1-1-0-5-0" State: UP Timestamp: 120542048 } Devices { Name: "vdisk-2-1-0-5-0" State: UP Timestamp: 120542048 } Devices { Name: "vdisk-3-1-0-5-0" State: UP Timestamp: 120542048 } Devices { Name: "pdisk-23-23" State: UP Timestamp: 120542048 } Timestamp: 120542048 NodeId: 23 InterconnectPort: 12006 Location { DataCenter: "1" Module: "6" Rack: "6" Unit: "6" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120542048 } Devices { Name: "vdisk-0-1-0-6-0" State: UP Timestamp: 120542048 } Devices { Name: "vdisk-1-1-0-6-0" State: UP Timestamp: 120542048 } Devices { Name: "vdisk-2-1-0-6-0" State: UP Timestamp: 120542048 } Devices { Name: "vdisk-3-1-0-6-0" State: UP Timestamp: 120542048 } Devices { Name: "pdisk-24-24" State: UP Timestamp: 120542048 } Timestamp: 120542048 NodeId: 24 InterconnectPort: 12007 Location { DataCenter: "1" Module: "7" Rack: "7" Unit: "7" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120542048 } Devices { Name: "vdisk-0-1-0-7-0" State: UP Timestamp: 120542048 } Devices { Name: "vdisk-1-1-0-7-0" State: UP Timestamp: 120542048 } Devices { Name: "vdisk-2-1-0-7-0" State: UP Timestamp: 120542048 } Devices { Name: "vdisk-3-1-0-7-0" State: UP Timestamp: 120542048 } Devices { Name: "pdisk-25-25" State: UP Timestamp: 120542048 } Timestamp: 120542048 NodeId: 25 InterconnectPort: 12008 Location { DataCenter: "1" Module: "8" Rack: "8" Unit: "8" } StartTimeSeconds: 0 } Timestamp: 120542048 } 2025-05-29T15:24:28.067873Z node 18 :CMS INFO: cms.cpp:347: Check request: User: "user" Actions { Type: RESTART_SERVICES Host: "18" Services: "storage" Duration: 600000000 } PartialPermissionAllowed: false Schedule: false DryRun: false EvictVDisks: true 2025-05-29T15:24:28.067880Z node 18 :CMS DEBUG: cms.cpp:379: Checking action: Type: RESTART_SERVICES Host: "18" Services: "storage" Duration: 600000000 2025-05-29T15:24:28.067887Z node 18 :CMS DEBUG: cms.cpp:398: Result: DISALLOW_TEMP (reason: VDisks eviction from host 18 has not yet been completed) 2025-05-29T15:24:28.067910Z node 18 :CMS DEBUG: cms_tx_store_permissions.cpp:26: TTxStorePermissions Execute 2025-05-29T15:24:28.067967Z node 18 :CMS NOTICE: audit_log.cpp:12: [AuditLog] [CMS tablet] Store request: id# user-r-3, owner# user, order# 3, priority# 0, body# User: "user" Actions { Type: RESTART_SERVICES Host: "18" Services: "storage" Duration: 600000000 Issue { Type: GENERIC Message: "VDisks eviction from host 18 has not yet been completed" } } PartialPermissionAllowed: false Schedule: false Reason: "" TenantPolicy: DEFAULT AvailabilityMode: MODE_MAX_AVAILABILITY EvictVDisks: true 2025-05-29T15:24:28.067973Z node 18 :CMS NOTICE: audit_log.cpp:12: [AuditLog] [CMS tablet] Add host marker: host# 18, marker# MARKER_DISK_FAULTY 2025-05-29T15:24:28.068026Z node 18 :CMS DEBUG: sentinel.cpp:944: [Sentinel] [Main] Config was updated in 0.100000s 2025-05-29T15:24:28.068033Z node 18 :CMS DEBUG: sentinel.cpp:884: [Sentinel] [Main] Start StateUpdater 2025-05-29T15:24:28.068048Z node 18 :CMS DEBUG: sentinel.cpp:683: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 18, wbId# [18:8388350642965737326:1634689637] 2025-05-29T15:24:28.068053Z node 18 :CMS DEBUG: sentinel.cpp:683: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 19, wbId# [19:8388350642965737326:1634689637] 2025-05-29T15:24:28.068056Z node 18 :CMS DEBUG: sentinel.cpp:683: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 20, wbId# [20:8388350642965737326:1634689637] 2025-05-29T15:24:28.068059Z node 18 :CMS DEBUG: sentinel.cpp:683: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 21, wbId# [21:8388350642965737326:1634689637] 2025-05-29T15:24:28.068062Z node 18 :CMS DEBUG: sentinel.cpp:683: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 22, wbId# [22:8388350642965737326:1634689637] 2025-05-29T15:24:28.068065Z node 18 :CMS DEBUG: sentinel.cpp:683: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 23, wbId# [23:8388350642965737326:1634689637] 2025-05-29T15:24:28.068067Z node 18 :CMS DEBUG: sentinel.cpp:683: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 24, wbId# [24:8388350642965737326:1634689637] 2025-05-29T15:24:28.068070Z node 18 :CMS DEBUG: sentinel.cpp:683: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 25, wbId# [25:8388350642965737326:1634689637] 2025-05-29T15:24:28.068149Z node 18 :CMS DEBUG: sentinel.cpp:693: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 18, response# PDiskStateInfo { PDiskId: 18 CreateTime: 120443560 ChangeTime: 120443560 Path: "/18/pdisk-18.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 120542 2025-05-29T15:24:28.068204Z node 18 :CMS DEBUG: sentinel.cpp:693: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 20, response# PDiskStateInfo { PDiskId: 20 CreateTime: 120443560 ChangeTime: 120443560 Path: "/20/pdisk-20.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 120542 2025-05-29T15:24:28.068214Z node 18 :CMS DEBUG: sentinel.cpp:693: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 21, response# PDiskStateInfo { PDiskId: 21 CreateTime: 120443560 ChangeTime: 120443560 Path: "/21/pdisk-21.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 120542 2025-05-29T15:24:28.068221Z node 18 :CMS DEBUG: sentinel.cpp:693: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 22, response# PDiskStateInfo { PDiskId: 22 CreateTime: 120443560 ChangeTime: 120443560 Path: "/22/pdisk-22.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 120542 2025-05-29T15:24:28.068229Z node 18 :CMS DEBUG: sentinel.cpp:693: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 23, response# PDiskStateInfo { PDiskId: 23 CreateTime: 120443560 ChangeTime: 120443560 Path: "/23/pdisk-23.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 120542 2025-05-29T15:24:28.068236Z node 18 :CMS DEBUG: sentinel.cpp:693: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 24, response# PDiskStateInfo { PDiskId: 24 CreateTime: 120443560 ChangeTime: 120443560 Path: "/24/pdisk-24.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 120542 2025-05-29T15:24:28.068244Z node 18 :CMS DEBUG: sentinel.cpp:693: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 25, response# PDiskStateInfo { PDiskId: 25 CreateTime: 120443560 ChangeTime: 120443560 Path: "/25/pdisk-25.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 120542 2025-05-29T15:24:28.068251Z node 18 :CMS DEBUG: sentinel.cpp:693: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 19, response# PDiskStateInfo { PDiskId: 19 CreateTime: 120443560 ChangeTime: 120443560 Path: "/19/pdisk-19.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 120542 2025-05-29T15:24:28.068257Z node 18 :CMS DEBUG: sentinel.cpp:960: [Sentinel] [Main] State was updated in 0.000000s 2025-05-29T15:24:28.079413Z node 18 :CMS DEBUG: cms_tx_store_permissions.cpp:137: TTxStorePermissions complete 2025-05-29T15:24:28.079519Z node 18 :CMS NOTICE: audit_log.cpp:12: [AuditLog] [CMS tablet] Reply: request# NKikimr::NCms::TEvCms::TEvPermissionRequest { User: "user" Actions { Type: RESTART_SERVICES Host: "18" Services: "storage" Duration: 600000000 } PartialPermissionAllowed: false Schedule: false DryRun: false EvictVDisks: true }, response# NKikimr::NCms::TEvCms::TEvPermissionResponse { Status { Code: DISALLOW_TEMP Reason: "VDisks eviction from host 18 has not yet been completed" } RequestId: "user-r-3" Deadline: 0 } 2025-05-29T15:24:28.079697Z node 18 :CMS INFO: cms.cpp:1404: User user removes request user-r-3 2025-05-29T15:24:28.079735Z node 18 :CMS DEBUG: cms.cpp:1427: Resulting status: OK 2025-05-29T15:24:28.079760Z node 18 :CMS DEBUG: cms_tx_remove_request.cpp:21: TTxRemoveRequest Execute 2025-05-29T15:24:28.079768Z node 18 :CMS NOTICE: audit_log.cpp:12: [AuditLog] [CMS tablet] Reset host markers: host# 18 2025-05-29T15:24:28.079810Z node 18 :CMS NOTICE: audit_log.cpp:12: [AuditLog] [CMS tablet] Remove request: id# user-r-3, reason# explicit remove 2025-05-29T15:24:28.090876Z node 18 :CMS DEBUG: cms_tx_remove_request.cpp:45: TTxRemoveRequest Complete 2025-05-29T15:24:28.090959Z node 18 :CMS NOTICE: audit_log.cpp:12: [AuditLog] [CMS tablet] Reply: request# NKikimr::NCms::TEvCms::TEvManageRequestRequest { User: "user" Command: REJECT RequestId: "user-r-3" DryRun: false }, response# NKikimr::NCms::TEvCms::TEvManageRequestResponse { Status { Code: OK } } ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/cms/ut/unittest >> TCmsTenatsTest::TestTenantLimitForceRestartModeScheduled [GOOD] Test command err: 2025-05-29T15:24:23.658605Z node 1 :CMS DEBUG: console__init_scheme.cpp:14: TConsole::TTxInitScheme Execute 2025-05-29T15:24:23.659644Z node 1 :CMS DEBUG: cms_impl.h:185: StateInit event type: 10060000 event: NKikimr::TEvTablet::TEvBoot 2025-05-29T15:24:23.661183Z node 1 :CMS DEBUG: console__init_scheme.cpp:23: TConsole::TTxInitScheme Complete 2025-05-29T15:24:23.661217Z node 1 :CMS DEBUG: cms_impl.h:185: StateInit event type: 10060001 event: NKikimr::TEvTablet::TEvRestored 2025-05-29T15:24:23.661262Z node 1 :CMS DEBUG: cms_tx_init_scheme.cpp:16: TTxInitScheme Execute 2025-05-29T15:24:23.661671Z node 1 :CMS DEBUG: console__load_state.cpp:28: TConsole::TTxLoadState Execute 2025-05-29T15:24:23.661716Z node 1 :CMS DEBUG: console__load_state.cpp:50: Using default config. 2025-05-29T15:24:23.661796Z node 1 :CMS DEBUG: console__load_state.cpp:66: TConsole::TTxLoadState Complete 2025-05-29T15:24:23.661826Z node 1 :CMS DEBUG: cms_impl.h:185: StateInit event type: 1006000c event: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-05-29T15:24:23.662931Z node 1 :CMS DEBUG: cms_impl.h:185: StateInit event type: 104d0001 event: NKikimr::NConsole::TEvConfigsDispatcher::TEvSetConfigSubscriptionResponse 2025-05-29T15:24:23.664238Z node 1 :CMS DEBUG: cms_tx_init_scheme.cpp:24: TTxInitScheme Complete 2025-05-29T15:24:23.664281Z node 1 :CMS DEBUG: cms_tx_load_state.cpp:33: TTxLoadState Execute 2025-05-29T15:24:23.664312Z node 1 :CMS DEBUG: cms_tx_load_state.cpp:76: Using default config 2025-05-29T15:24:23.664399Z node 1 :CMS DEBUG: cms.cpp:1147: Running CleanupWalleTasks 2025-05-29T15:24:23.695347Z node 1 :CMS DEBUG: cms_impl.h:185: StateInit event type: 104a0012 event: NKikimr::NConsole::TEvConsole::TEvConfigNotificationRequest { Config { FeatureFlags { EnableCMSRequestPriorities: true EnableSingleCompositeActionGroup: true } } ItemKinds: 25 ItemKinds: 26 Local: true } 2025-05-29T15:24:23.706456Z node 1 :CMS DEBUG: cms_tx_load_state.cpp:256: TTxLoadState Complete 2025-05-29T15:24:23.706576Z node 1 :CMS DEBUG: cms_tx_update_config.cpp:23: TTxUpdateConfig Execute 2025-05-29T15:24:23.708162Z node 1 :CMS DEBUG: cms_tx_update_config.cpp:37: TTxUpdateConfig Complete 2025-05-29T15:24:23.708302Z node 1 :CMS DEBUG: sentinel.cpp:939: [Sentinel] [Main] UpdateConfig 2025-05-29T15:24:23.708313Z node 1 :CMS DEBUG: sentinel.cpp:884: [Sentinel] [Main] Start ConfigUpdater 2025-05-29T15:24:23.708324Z node 1 :CMS DEBUG: sentinel.cpp:955: [Sentinel] [Main] UpdateState 2025-05-29T15:24:23.708328Z node 1 :CMS INFO: sentinel.cpp:879: [Sentinel] [Main] StateUpdater was delayed 2025-05-29T15:24:23.708337Z node 1 :CMS DEBUG: sentinel.cpp:464: [Sentinel] [ConfigUpdater] Request blobstorage config: attempt# 0 2025-05-29T15:24:23.708379Z node 1 :CMS DEBUG: sentinel.cpp:477: [Sentinel] [ConfigUpdater] Request CMS cluster state: attempt# 0 2025-05-29T15:24:23.710443Z node 1 :CMS DEBUG: sentinel.cpp:530: [Sentinel] [ConfigUpdater] Handle TEvBlobStorage::TEvControllerConfigResponse: response# Status { Success: true BaseConfig { } } Success: true 2025-05-29T15:24:23.720797Z node 1 :CMS DEBUG: cms_tx_update_config.cpp:23: TTxUpdateConfig Execute 2025-05-29T15:24:23.752316Z node 1 :CMS DEBUG: cms_tx_update_config.cpp:37: TTxUpdateConfig Complete 2025-05-29T15:24:23.752389Z node 1 :CMS DEBUG: cms_tx_update_config.cpp:44: Updated config: TenantLimits { DisabledNodesRatioLimit: 0 } ClusterLimits { DisabledNodesRatioLimit: 0 } SentinelConfig { Enable: false } 2025-05-29T15:24:23.752683Z node 1 :CMS NOTICE: audit_log.cpp:12: [AuditLog] [CMS tablet] Reply: request# NKikimr::NCms::TEvCms::TEvGetConfigRequest { }, response# NKikimr::NCms::TEvCms::TEvGetConfigResponse { Status { Code: OK } Config { DefaultRetryTime: 300000000 DefaultPermissionDuration: 300000000 TenantLimits { DisabledNodesRatioLimit: 0 } ClusterLimits { DisabledNodesRatioLimit: 0 } InfoCollectionTimeout: 15000000 LogConfig { DefaultLevel: ENABLED TTL: 1209600000000 } SentinelConfig { Enable: false UpdateConfigInterval: 3600000000 RetryUpdateConfig: 60000000 UpdateStateInterval: 60000000 UpdateStateTimeout: 45000000 RetryChangeStatus: 10000000 ChangeStatusRetries: 5 DefaultStateLimit: 60 DataCenterRatio: 50 RoomRatio: 70 RackRatio: 90 DryRun: false EvictVDisksStatus: FAULTY GoodStateLimit: 5 FaultyPDisksThresholdPerNode: 0 } } } 2025-05-29T15:24:23.752750Z node 1 :CMS DEBUG: cms_tx_update_config.cpp:23: TTxUpdateConfig Execute 2025-05-29T15:24:23.792193Z node 1 :CMS DEBUG: cms_tx_update_downtimes.cpp:17: TTxUpdateDowntimes Execute 2025-05-29T15:24:23.792295Z node 1 :CMS DEBUG: cluster_info.cpp:968: Timestamp: 1970-01-01T00:02:00Z 2025-05-29T15:24:23.792423Z node 1 :CMS NOTICE: audit_log.cpp:12: [AuditLog] [CMS tablet] Reply: request# NKikimr::NCms::TEvCms::TEvClusterStateRequest { }, response# NKikimr::NCms::TEvCms::TEvClusterStateResponse { Status { Code: OK } State { Hosts { Name: "::1" State: UP Services { Name: "dynnode" State: UP Version: "-1" Timestamp: 120025512 } Timestamp: 120025512 NodeId: 1 InterconnectPort: 12001 Location { DataCenter: "1" Module: "1" Rack: "1" Unit: "1" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "dynnode" State: UP Version: "-1" Timestamp: 120025512 } Timestamp: 120025512 NodeId: 2 InterconnectPort: 12002 Location { DataCenter: "1" Module: "2" Rack: "2" Unit: "2" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "dynnode" State: UP Version: "-1" Timestamp: 120025512 } Timestamp: 120025512 NodeId: 3 InterconnectPort: 12003 Location { DataCenter: "1" Module: "3" Rack: "3" Unit: "3" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "dynnode" State: UP Version: "-1" Timestamp: 120025512 } Timestamp: 120025512 NodeId: 4 InterconnectPort: 12004 Location { DataCenter: "1" Module: "4" Rack: "4" Unit: "4" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "dynnode" State: UP Version: "-1" Timestamp: 120025512 } Timestamp: 120025512 NodeId: 5 InterconnectPort: 12005 Location { DataCenter: "1" Module: "5" Rack: "5" Unit: "5" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "dynnode" State: UP Version: "-1" Timestamp: 120025512 } Timestamp: 120025512 NodeId: 6 InterconnectPort: 12006 Location { DataCenter: "1" Module: "6" Rack: "6" Unit: "6" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "dynnode" State: UP Version: "-1" Timestamp: 120025512 } Timestamp: 120025512 NodeId: 7 InterconnectPort: 12007 Location { DataCenter: "1" Module: "7" Rack: "7" Unit: "7" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "dynnode" State: UP Version: "-1" Timestamp: 120025512 } Timestamp: 120025512 NodeId: 8 InterconnectPort: 12008 Location { DataCenter: "1" Module: "8" Rack: "8" Unit: "8" } StartTimeSeconds: 0 } Timestamp: 120025512 } } 2025-05-29T15:24:23.823027Z node 1 :CMS DEBUG: cms.cpp:1147: Running CleanupWalleTasks 2025-05-29T15:24:23.865327Z node 1 :CMS DEBUG: cms_tx_update_config.cpp:37: TTxUpdateConfig Complete 2025-05-29T15:24:23.865413Z node 1 :CMS DEBUG: cms_tx_update_config.cpp:44: Updated config: DefaultRetryTime: 300000000 DefaultPermissionDuration: 300000000 TenantLimits { DisabledNodesLimit: 2 DisabledNodesRatioLimit: 0 } ClusterLimits { DisabledNodesLimit: 0 DisabledNodesRatioLimit: 0 } InfoCollectionTimeout: 15000000 LogConfig { DefaultLevel: ENABLED TTL: 1209600000000 } SentinelConfig { Enable: false UpdateConfigInterval: 3600000000 RetryUpdateConfig: 60000000 UpdateStateInterval: 60000000 UpdateStateTimeout: 45000000 RetryChangeStatus: 10000000 ChangeStatusRetries: 5 DefaultStateLimit: 60 DataCenterRatio: 50 RoomRatio: 70 RackRatio: 90 DryRun: false EvictVDisksStatus: FAULTY GoodStateLimit: 5 FaultyPDisksThresholdPerNode: 0 } 2025-05-29T15:24:23.865426Z node 1 :CMS DEBUG: cms_tx_update_downtimes.cpp:26: TTxUpdateDowntimes Complete 2025-05-29T15:24:23.903089Z node 1 :CMS DEBUG: cms_tx_update_downtimes.cpp:17: TTxUpdateDowntimes Execute 2025-05-29T15:24:23.903132Z node 1 :CMS DEBUG: cms_tx_update_downtimes.cpp:26: TTxUpdateDowntimes Complete 2025-05-29T15:24:23.903148Z node 1 :CMS DEBUG: cluster_info.cpp:968: Timestamp: 1970-01-01T00:02:00Z 2025-05-29T15:24:23.903208Z node 1 :CMS INFO: cms.cpp:347: Check request: User: "user" Actions { Type: SHUTDOWN_HOST Host: "1" Duration: 60000000 } PartialPermissionAllowed: false Schedule: false DryRun: false AvailabilityMode: MODE_MAX_AVAILABILITY EvictVDisks: false 2025-05-29T15:24:23.903219Z node 1 :CMS DEBUG: cms.cpp:379: Checking action: Type: SHUTDOWN_HOST Host: "1" Duration: 60000000 2025-05-29T15:24:23.903229Z node 1 :CMS DEBUG: node_checkers.cpp:101: [Nodes Counter] Checking Node: 1, with state: Up, with limit: 0, with ratio limit: 0, locked nodes: 0, down nodes: 0 2025-05-29T15:24:23.903235Z node 1 :CMS DEBUG: node_checkers.cpp:101: [Nodes Counter] Checking Node: 1, with state: Up, with limit: 2, with ratio limit: 0, locked nodes: 0, down nodes: 0 2025-05-29T15:24:23.903242Z node 1 :CMS DEBUG: cms.cpp:729: Ring: 0; State: Ok 2025-05-29T15:24:23.903246Z node 1 :CMS DEBUG: cms.cpp:729: Ring: 1; State: Ok 2025-05-29T15:24:23.903249Z node 1 :CMS DEBUG: cms.cpp:729: Ring: 2; State: Ok 2025-05-29T15:24:23.903253Z node 1 :CMS DEBUG: cms.cpp:387: Result: ALLOW 2025-05-29T15:24:23.903269Z node 1 :CMS DEBUG: cms.cpp:1036: Accepting permission: id# user-p-1, requestId# user-r-1, owner# user 2025-05-29T15:24:23.903277Z node 1 :CMS INFO: cluster_info.cpp:777: Adding lock for Host ::1:12001 (1) (permission user-p-1 until 1970-01-01T00:03:00Z) 2025-05-29T15:24:23.903288Z node 1 :CMS DEBUG: cms_tx_store_permissions.cpp:26: TTxStorePermissions Execute 2025-05-29T15:24:23.903326Z node 1 :CMS NOTICE: audit_log.cpp:12: [AuditLog] [CMS tablet] Store permission: id# user-p-1, validity# 1970-01-01T00:03:00.126512Z, action# Type: SHUTDOWN_HOST Host: "1" Duration: 60000000 2025-05-29T15:24:23.914379Z node 1 :CMS DEBUG: cms_tx_store_permissions.cpp:137: TTxStorePermissions complete 2025-05-29T15:24:23.914484Z node 1 :CMS NOTICE: audit_log.cpp:12: [AuditLog] [CMS tablet] Reply: request# NKikimr::NCms::TEvCms::TEvPermissionRequest { User: "user" Actions { Type: SHUTDOWN_HOST Host: "1" Duration: 60000000 } PartialPermissionAllowed: false Schedule: false DryRun: false AvailabilityMode: MODE_MAX_AVAILABILITY EvictVDisks: false }, response# NKikimr::NCms::TEvCms::TEvPermissionResponse { Status { Code: ALLOW } RequestId: "user-r-1" Permissions { Id: "user-p-1" Action { Type: SHUTDOWN_HOST Host: "1" Duration: 60000000 } Deadline: 180126512 Extentions { Type: HostInfo Hosts { Name: "::1" State: UP NodeId: 1 InterconnectPort: 12001 } } } } 2025-05-29T15:24:23.914496Z node 1 :CMS DEBUG: cms.cpp:1064: Schedule cleanup at 1970-01-01T00:05:00.126512Z 2025-05-29T15:24:23.937151Z node 1 :CMS INFO: cluster_info.cpp:777: Adding lock for Host ::1:12001 (1) (permission user-p-1 until 1970-01-01T00:03:00Z) 2025-05-29T15:24:23.937228Z node 1 :CMS DEBUG: cms_tx_update_downtimes.cpp:17: TTxUpdateDowntimes Execute 2025-05-29T15:24:23.937256Z node 1 :CMS DEBUG: cms_tx_update_downtimes.cpp:26: TTxUpdateDowntimes Complete 2025-05-29T15:24:23.937272Z node 1 :CMS DEBUG: cluster_info.cpp:968: Timestamp: 1970-01-01T00:02:00Z 2025-05-29T15:24:23.937336Z node 1 :CMS INFO: cms.cpp:347: Check request: User: "user" Actions { Type: SHUTDOWN_HOST Host: "2" Duration: 60000000 } PartialPermissionAllowed: false Schedule: false DryRun: false AvailabilityMode: MODE_MAX_AVAILABILITY EvictVDisks: false 2025-05-29T15:24:23.937345Z node 1 :CMS DEBUG: cms.cpp:379: Checking action: Type: SHUTDOWN_HOST Host: "2" Duration: 60000000 2025-05-29T15:24:23.937359Z node 1 :CMS DEBUG: node_checkers.cpp:101: [Nodes Counter] Checking Node: 2, with state: Up, with limit: 0, with ratio limit: 0, locked nodes: 1, down nodes: 0 2025-05-29T15:24:23.937365Z node 1 :CMS DEBUG: node_checkers.cpp:101: [Nodes Counter] Checking Node: 2, with state: Up, with limit: 2, with ratio limit: 0, locked nodes: 1, down nodes: 0 2025-05-29T15:24:23.9 ... nodes. Locked: 1, down: 0, limit: 1" } 2025-05-29T15:24:30.526417Z node 25 :CMS DEBUG: node_checkers.cpp:101: [Nodes Counter] Checking Node: 27, with state: Up, with limit: 0, with ratio limit: 0, locked nodes: 1, down nodes: 0 2025-05-29T15:24:30.526421Z node 25 :CMS DEBUG: node_checkers.cpp:101: [Nodes Counter] Checking Node: 27, with state: Up, with limit: 1, with ratio limit: 0, locked nodes: 1, down nodes: 0 2025-05-29T15:24:30.526434Z node 25 :CMS DEBUG: cms.cpp:398: Result: DISALLOW_TEMP (reason: Cannot lock node '27' of tenant 'user0': too many unavailable nodes. Locked: 1, down: 0, limit: 1) 2025-05-29T15:24:30.526455Z node 25 :CMS DEBUG: cms.cpp:1036: Accepting permission: id# user-p-2, requestId# user-r-1, owner# user 2025-05-29T15:24:30.526464Z node 25 :CMS INFO: cluster_info.cpp:777: Adding lock for Host ::1:12002 (26) (permission user-p-2 until 1970-01-01T00:03:00Z) 2025-05-29T15:24:30.526475Z node 25 :CMS DEBUG: cms_tx_store_permissions.cpp:26: TTxStorePermissions Execute 2025-05-29T15:24:30.526520Z node 25 :CMS NOTICE: audit_log.cpp:12: [AuditLog] [CMS tablet] Store permission: id# user-p-2, validity# 1970-01-01T00:03:00.535072Z, action# Type: SHUTDOWN_HOST Host: "26" Duration: 60000000 2025-05-29T15:24:30.526546Z node 25 :CMS NOTICE: audit_log.cpp:12: [AuditLog] [CMS tablet] Store request: id# user-r-1, owner# user, order# 1, priority# 0, body# User: "user" Actions { Type: SHUTDOWN_HOST Host: "27" Duration: 60000000 Issue { Type: TENANT_DISABLED_NODES_LIMIT_REACHED Message: "Cannot lock node \'27\' of tenant \'user0\': too many unavailable nodes. Locked: 1, down: 0, limit: 1" } } PartialPermissionAllowed: true Schedule: true Reason: "" TenantPolicy: DEFAULT AvailabilityMode: MODE_FORCE_RESTART EvictVDisks: false 2025-05-29T15:24:30.541866Z node 25 :CMS DEBUG: cms_tx_store_permissions.cpp:137: TTxStorePermissions complete 2025-05-29T15:24:30.541977Z node 25 :CMS NOTICE: audit_log.cpp:12: [AuditLog] [CMS tablet] Reply: request# NKikimr::NCms::TEvCms::TEvCheckRequest { User: "user" RequestId: "user-r-1" DryRun: false AvailabilityMode: MODE_FORCE_RESTART }, response# NKikimr::NCms::TEvCms::TEvPermissionResponse { Status { Code: ALLOW_PARTIAL } RequestId: "user-r-1" Permissions { Id: "user-p-2" Action { Type: SHUTDOWN_HOST Host: "26" Duration: 60000000 } Deadline: 180535072 Extentions { Type: HostInfo Hosts { Name: "::1" State: UP NodeId: 26 InterconnectPort: 12002 } } } } 2025-05-29T15:24:30.542141Z node 25 :CMS INFO: cms.cpp:1326: User user is done with permissions user-p-2 2025-05-29T15:24:30.542153Z node 25 :CMS DEBUG: cms.cpp:1349: Resulting status: OK 2025-05-29T15:24:30.542172Z node 25 :CMS DEBUG: cms_tx_remove_permissions.cpp:28: TTxRemovePermissions Execute 2025-05-29T15:24:30.542204Z node 25 :CMS NOTICE: audit_log.cpp:12: [AuditLog] [CMS tablet] Remove permission: id# user-p-2, reason# explicit remove 2025-05-29T15:24:30.557039Z node 25 :CMS DEBUG: cms_tx_remove_permissions.cpp:79: TTxRemovePermissions Complete 2025-05-29T15:24:30.557123Z node 25 :CMS NOTICE: audit_log.cpp:12: [AuditLog] [CMS tablet] Reply: request# NKikimr::NCms::TEvCms::TEvManagePermissionRequest { User: "user" Command: DONE Permissions: "user-p-2" DryRun: false }, response# NKikimr::NCms::TEvCms::TEvManagePermissionResponse { Status { Code: OK } } 2025-05-29T15:24:30.584396Z node 25 :CMS DEBUG: cms_tx_update_downtimes.cpp:17: TTxUpdateDowntimes Execute 2025-05-29T15:24:30.584492Z node 25 :CMS DEBUG: cluster_info.cpp:968: Timestamp: 1970-01-01T00:02:00Z 2025-05-29T15:24:30.584557Z node 25 :CMS INFO: cms.cpp:347: Check request: User: "user" Actions { Type: SHUTDOWN_HOST Host: "27" Duration: 60000000 Issue { Type: TENANT_DISABLED_NODES_LIMIT_REACHED Message: "Cannot lock node \'27\' of tenant \'user0\': too many unavailable nodes. Locked: 1, down: 0, limit: 1" } } PartialPermissionAllowed: true Schedule: true Reason: "" TenantPolicy: DEFAULT AvailabilityMode: MODE_MAX_AVAILABILITY EvictVDisks: false 2025-05-29T15:24:30.584572Z node 25 :CMS DEBUG: cms.cpp:379: Checking action: Type: SHUTDOWN_HOST Host: "27" Duration: 60000000 Issue { Type: TENANT_DISABLED_NODES_LIMIT_REACHED Message: "Cannot lock node \'27\' of tenant \'user0\': too many unavailable nodes. Locked: 1, down: 0, limit: 1" } 2025-05-29T15:24:30.584585Z node 25 :CMS DEBUG: node_checkers.cpp:101: [Nodes Counter] Checking Node: 27, with state: Up, with limit: 0, with ratio limit: 0, locked nodes: 0, down nodes: 1 2025-05-29T15:24:30.584591Z node 25 :CMS DEBUG: node_checkers.cpp:101: [Nodes Counter] Checking Node: 27, with state: Up, with limit: 1, with ratio limit: 0, locked nodes: 0, down nodes: 1 2025-05-29T15:24:30.584604Z node 25 :CMS DEBUG: cms.cpp:398: Result: DISALLOW_TEMP (reason: Cannot lock node '27' of tenant 'user0': too many unavailable nodes. Locked: 0, down: 1, limit: 1) 2025-05-29T15:24:30.584628Z node 25 :CMS DEBUG: cms_tx_store_permissions.cpp:26: TTxStorePermissions Execute 2025-05-29T15:24:30.584670Z node 25 :CMS NOTICE: audit_log.cpp:12: [AuditLog] [CMS tablet] Store request: id# user-r-1, owner# user, order# 1, priority# 0, body# User: "user" Actions { Type: SHUTDOWN_HOST Host: "27" Duration: 60000000 Issue { Type: TENANT_DISABLED_NODES_LIMIT_REACHED Message: "Cannot lock node \'27\' of tenant \'user0\': too many unavailable nodes. Locked: 0, down: 1, limit: 1" } } PartialPermissionAllowed: true Schedule: true Reason: "" TenantPolicy: DEFAULT AvailabilityMode: MODE_MAX_AVAILABILITY EvictVDisks: false 2025-05-29T15:24:30.599501Z node 25 :CMS DEBUG: cms_tx_update_downtimes.cpp:26: TTxUpdateDowntimes Complete 2025-05-29T15:24:30.599531Z node 25 :CMS DEBUG: cms_tx_store_permissions.cpp:137: TTxStorePermissions complete 2025-05-29T15:24:30.599599Z node 25 :CMS NOTICE: audit_log.cpp:12: [AuditLog] [CMS tablet] Reply: request# NKikimr::NCms::TEvCms::TEvCheckRequest { User: "user" RequestId: "user-r-1" DryRun: false AvailabilityMode: MODE_MAX_AVAILABILITY }, response# NKikimr::NCms::TEvCms::TEvPermissionResponse { Status { Code: DISALLOW_TEMP Reason: "Cannot lock node \'27\' of tenant \'user0\': too many unavailable nodes. Locked: 0, down: 1, limit: 1" } RequestId: "user-r-1" Deadline: 420638096 } 2025-05-29T15:24:30.676505Z node 25 :CMS DEBUG: cms_tx_update_downtimes.cpp:17: TTxUpdateDowntimes Execute 2025-05-29T15:24:30.676613Z node 25 :CMS DEBUG: cluster_info.cpp:968: Timestamp: 1970-01-01T00:02:00Z 2025-05-29T15:24:30.676682Z node 25 :CMS INFO: cms.cpp:347: Check request: User: "user" Actions { Type: SHUTDOWN_HOST Host: "27" Duration: 60000000 Issue { Type: TENANT_DISABLED_NODES_LIMIT_REACHED Message: "Cannot lock node \'27\' of tenant \'user0\': too many unavailable nodes. Locked: 0, down: 1, limit: 1" } } PartialPermissionAllowed: true Schedule: true Reason: "" TenantPolicy: DEFAULT AvailabilityMode: MODE_KEEP_AVAILABLE EvictVDisks: false 2025-05-29T15:24:30.676698Z node 25 :CMS DEBUG: cms.cpp:379: Checking action: Type: SHUTDOWN_HOST Host: "27" Duration: 60000000 Issue { Type: TENANT_DISABLED_NODES_LIMIT_REACHED Message: "Cannot lock node \'27\' of tenant \'user0\': too many unavailable nodes. Locked: 0, down: 1, limit: 1" } 2025-05-29T15:24:30.676712Z node 25 :CMS DEBUG: node_checkers.cpp:101: [Nodes Counter] Checking Node: 27, with state: Up, with limit: 0, with ratio limit: 0, locked nodes: 0, down nodes: 1 2025-05-29T15:24:30.676717Z node 25 :CMS DEBUG: node_checkers.cpp:101: [Nodes Counter] Checking Node: 27, with state: Up, with limit: 1, with ratio limit: 0, locked nodes: 0, down nodes: 1 2025-05-29T15:24:30.676729Z node 25 :CMS DEBUG: cms.cpp:398: Result: DISALLOW_TEMP (reason: Cannot lock node '27' of tenant 'user0': too many unavailable nodes. Locked: 0, down: 1, limit: 1) 2025-05-29T15:24:30.676756Z node 25 :CMS DEBUG: cms_tx_store_permissions.cpp:26: TTxStorePermissions Execute 2025-05-29T15:24:30.676796Z node 25 :CMS NOTICE: audit_log.cpp:12: [AuditLog] [CMS tablet] Store request: id# user-r-1, owner# user, order# 1, priority# 0, body# User: "user" Actions { Type: SHUTDOWN_HOST Host: "27" Duration: 60000000 Issue { Type: TENANT_DISABLED_NODES_LIMIT_REACHED Message: "Cannot lock node \'27\' of tenant \'user0\': too many unavailable nodes. Locked: 0, down: 1, limit: 1" } } PartialPermissionAllowed: true Schedule: true Reason: "" TenantPolicy: DEFAULT AvailabilityMode: MODE_KEEP_AVAILABLE EvictVDisks: false 2025-05-29T15:24:30.691242Z node 25 :CMS DEBUG: cms_tx_update_downtimes.cpp:26: TTxUpdateDowntimes Complete 2025-05-29T15:24:30.691279Z node 25 :CMS DEBUG: cms_tx_store_permissions.cpp:137: TTxStorePermissions complete 2025-05-29T15:24:30.691356Z node 25 :CMS NOTICE: audit_log.cpp:12: [AuditLog] [CMS tablet] Reply: request# NKikimr::NCms::TEvCms::TEvCheckRequest { User: "user" RequestId: "user-r-1" DryRun: false AvailabilityMode: MODE_KEEP_AVAILABLE }, response# NKikimr::NCms::TEvCms::TEvPermissionResponse { Status { Code: DISALLOW_TEMP Reason: "Cannot lock node \'27\' of tenant \'user0\': too many unavailable nodes. Locked: 0, down: 1, limit: 1" } RequestId: "user-r-1" Deadline: 420739608 } 2025-05-29T15:24:30.715944Z node 25 :CMS DEBUG: cms_tx_update_downtimes.cpp:17: TTxUpdateDowntimes Execute 2025-05-29T15:24:30.716045Z node 25 :CMS DEBUG: cluster_info.cpp:968: Timestamp: 1970-01-01T00:02:00Z 2025-05-29T15:24:30.716113Z node 25 :CMS INFO: cms.cpp:347: Check request: User: "user" Actions { Type: SHUTDOWN_HOST Host: "27" Duration: 60000000 Issue { Type: TENANT_DISABLED_NODES_LIMIT_REACHED Message: "Cannot lock node \'27\' of tenant \'user0\': too many unavailable nodes. Locked: 0, down: 1, limit: 1" } } PartialPermissionAllowed: true Schedule: true Reason: "" TenantPolicy: DEFAULT AvailabilityMode: MODE_FORCE_RESTART EvictVDisks: false 2025-05-29T15:24:30.716127Z node 25 :CMS DEBUG: cms.cpp:379: Checking action: Type: SHUTDOWN_HOST Host: "27" Duration: 60000000 Issue { Type: TENANT_DISABLED_NODES_LIMIT_REACHED Message: "Cannot lock node \'27\' of tenant \'user0\': too many unavailable nodes. Locked: 0, down: 1, limit: 1" } 2025-05-29T15:24:30.716141Z node 25 :CMS DEBUG: node_checkers.cpp:101: [Nodes Counter] Checking Node: 27, with state: Up, with limit: 0, with ratio limit: 0, locked nodes: 0, down nodes: 1 2025-05-29T15:24:30.716147Z node 25 :CMS DEBUG: node_checkers.cpp:101: [Nodes Counter] Checking Node: 27, with state: Up, with limit: 1, with ratio limit: 0, locked nodes: 0, down nodes: 1 2025-05-29T15:24:30.716157Z node 25 :CMS DEBUG: cms.cpp:387: Result: ALLOW 2025-05-29T15:24:30.716178Z node 25 :CMS DEBUG: cms.cpp:1036: Accepting permission: id# user-p-3, requestId# user-r-1, owner# user 2025-05-29T15:24:30.716185Z node 25 :CMS INFO: cluster_info.cpp:777: Adding lock for Host ::1:12003 (27) (permission user-p-3 until 1970-01-01T00:03:00Z) 2025-05-29T15:24:30.716197Z node 25 :CMS DEBUG: cms_tx_store_permissions.cpp:26: TTxStorePermissions Execute 2025-05-29T15:24:30.716226Z node 25 :CMS NOTICE: audit_log.cpp:12: [AuditLog] [CMS tablet] Store permission: id# user-p-3, validity# 1970-01-01T00:03:00.841120Z, action# Type: SHUTDOWN_HOST Host: "27" Duration: 60000000 2025-05-29T15:24:30.716238Z node 25 :CMS NOTICE: audit_log.cpp:12: [AuditLog] [CMS tablet] Remove request: id# user-r-1, owner# user 2025-05-29T15:24:30.735029Z node 25 :CMS DEBUG: cms_tx_update_downtimes.cpp:26: TTxUpdateDowntimes Complete 2025-05-29T15:24:30.735063Z node 25 :CMS DEBUG: cms_tx_store_permissions.cpp:137: TTxStorePermissions complete 2025-05-29T15:24:30.735147Z node 25 :CMS NOTICE: audit_log.cpp:12: [AuditLog] [CMS tablet] Reply: request# NKikimr::NCms::TEvCms::TEvCheckRequest { User: "user" RequestId: "user-r-1" DryRun: false AvailabilityMode: MODE_FORCE_RESTART }, response# NKikimr::NCms::TEvCms::TEvPermissionResponse { Status { Code: ALLOW } Permissions { Id: "user-p-3" Action { Type: SHUTDOWN_HOST Host: "27" Duration: 60000000 } Deadline: 180841120 Extentions { Type: HostInfo Hosts { Name: "::1" State: UP NodeId: 27 InterconnectPort: 12003 } } } } ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_backup_collection/unittest >> TBackupCollectionTests::TableWithSystemColumns [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:116:2058] recipient: [1:111:2141] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:116:2058] recipient: [1:111:2141] Leader for TabletID 72057594046678944 is [1:128:2152] sender: [1:132:2058] recipient: [1:111:2141] 2025-05-29T15:24:29.973348Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7488: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-05-29T15:24:29.973374Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7516: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:24:29.973381Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7402: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-05-29T15:24:29.973386Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7418: OperationsProcessing config: using default configuration 2025-05-29T15:24:29.973391Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-05-29T15:24:29.973395Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-05-29T15:24:29.973405Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7548: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:24:29.973418Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:39: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-05-29T15:24:29.973549Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7619: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-05-29T15:24:29.973629Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-05-29T15:24:29.988468Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7309: Cannot subscribe to console configs 2025-05-29T15:24:29.988486Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:24:29.991093Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-05-29T15:24:29.991206Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-05-29T15:24:29.991252Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-05-29T15:24:29.992757Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-05-29T15:24:29.992906Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:445: Clear TempDirsState with owners number: 0 2025-05-29T15:24:29.993032Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1348: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-05-29T15:24:29.993094Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:32: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-05-29T15:24:29.993584Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:157: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:24:29.993627Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:84: [RootDataErasureManager] Stop 2025-05-29T15:24:29.993898Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:24:29.993910Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:24:29.993933Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-05-29T15:24:29.993954Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-29T15:24:29.993961Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:202: TTxServerlessStorageBilling.Complete 2025-05-29T15:24:29.993999Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6671: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-05-29T15:24:29.995651Z node 1 :HIVE INFO: tablet_helpers.cpp:1130: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:128:2152] sender: [1:242:2058] recipient: [1:15:2062] 2025-05-29T15:24:30.014194Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:372: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-05-29T15:24:30.014259Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:24:30.014307Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-05-29T15:24:30.014338Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:130: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-05-29T15:24:30.014345Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:24:30.014871Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:451: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-05-29T15:24:30.014894Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-05-29T15:24:30.014941Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:24:30.014962Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:313: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-05-29T15:24:30.014968Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:367: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-05-29T15:24:30.014973Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 2 -> 3 2025-05-29T15:24:30.015433Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:24:30.015443Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-05-29T15:24:30.015447Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 3 -> 128 2025-05-29T15:24:30.015850Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:24:30.015863Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:24:30.015869Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:24:30.015877Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1650: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-05-29T15:24:30.016601Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1719: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-05-29T15:24:30.017205Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:652: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-05-29T15:24:30.017238Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1751: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-05-29T15:24:30.017388Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:676: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-05-29T15:24:30.017408Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:680: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 124 RawX2: 4294969445 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-05-29T15:24:30.017413Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:24:30.017464Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 128 -> 240 2025-05-29T15:24:30.017469Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:24:30.017493Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-05-29T15:24:30.017502Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:402: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-05-29T15:24:30.017822Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:24:30.017828Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-29T15:24:30.017862Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29 ... Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 8] Version: 3 } 2025-05-29T15:24:31.617922Z node 6 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4961: StateWork, processing event NSchemeBoard::NSchemeshardEvents::TEvUpdateAck 2025-05-29T15:24:31.617930Z node 6 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:5834: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 8 Version: 3 PathOwnerId: 72057594046678944, cookie: 106 2025-05-29T15:24:31.617939Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 8 Version: 3 PathOwnerId: 72057594046678944, cookie: 106 2025-05-29T15:24:31.617943Z node 6 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72057594046678944, txId: 106 2025-05-29T15:24:31.617947Z node 6 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 106, pathId: [OwnerId: 72057594046678944, LocalPathId: 8], version: 3 2025-05-29T15:24:31.617952Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 8] was 4 2025-05-29T15:24:31.617963Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1606: TOperation IsReadyToNotify, TxId: 106, ready parts: 1/2, is published: true 2025-05-29T15:24:31.617967Z node 6 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:165: TSideEffects ApplyOnExecute at tablet# 72057594046678944 2025-05-29T15:24:31.618279Z node 6 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4877: StateWork, received event# 269877761, Sender [6:658:2605], Recipient [6:123:2148]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-05-29T15:24:31.618319Z node 6 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4974: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-05-29T15:24:31.618324Z node 6 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5753: Pipe server connected, at tablet: 72057594046678944 2025-05-29T15:24:31.618369Z node 6 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4877: StateWork, received event# 269551620, Sender [6:593:2549], Recipient [6:123:2148]: NKikimrTxDataShard.TEvSchemaChanged Source { RawX1: 593 RawX2: 25769806325 } Origin: 72075186233409548 State: 2 TxId: 106 Step: 0 Generation: 2 2025-05-29T15:24:31.618379Z node 6 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4909: StateWork, processing event TEvDataShard::TEvSchemaChanged 2025-05-29T15:24:31.618389Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5512: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 593 RawX2: 25769806325 } Origin: 72075186233409548 State: 2 TxId: 106 Step: 0 Generation: 2 2025-05-29T15:24:31.618394Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1764: TOperation FindRelatedPartByTabletId, TxId: 106, tablet: 72075186233409548, partId: 1 2025-05-29T15:24:31.618410Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:619: TTxOperationReply execute, operationId: 106:1, at schemeshard: 72057594046678944, message: Source { RawX1: 593 RawX2: 25769806325 } Origin: 72075186233409548 State: 2 TxId: 106 Step: 0 Generation: 2 2025-05-29T15:24:31.618416Z node 6 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:1005: NTableState::TProposedWaitParts operationId# 106:1 HandleReply TEvSchemaChanged at tablet: 72057594046678944 2025-05-29T15:24:31.618425Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:1009: NTableState::TProposedWaitParts operationId# 106:1 HandleReply TEvSchemaChanged at tablet: 72057594046678944 message: Source { RawX1: 593 RawX2: 25769806325 } Origin: 72075186233409548 State: 2 TxId: 106 Step: 0 Generation: 2 2025-05-29T15:24:31.618437Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:655: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 106:1, shardIdx: 72057594046678944:3, datashard: 72075186233409548, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2025-05-29T15:24:31.618441Z node 6 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:674: all shard schema changes has been received, operationId: 106:1, at schemeshard: 72057594046678944 2025-05-29T15:24:31.618447Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:686: send schema changes ack message, operation: 106:1, datashard: 72075186233409548, at schemeshard: 72057594046678944 2025-05-29T15:24:31.618454Z node 6 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 106:1 129 -> 240 2025-05-29T15:24:31.618479Z node 6 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:165: TSideEffects ApplyOnExecute at tablet# 72057594046678944 2025-05-29T15:24:31.618641Z node 6 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:207: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2025-05-29T15:24:31.618665Z node 6 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:207: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2025-05-29T15:24:31.619516Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 106 2025-05-29T15:24:31.619538Z node 6 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:207: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2025-05-29T15:24:31.619572Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 106 2025-05-29T15:24:31.619576Z node 6 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:207: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2025-05-29T15:24:31.619597Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:647: TTxOperationReply complete, operationId: 106:1, at schemeshard: 72057594046678944 2025-05-29T15:24:31.619604Z node 6 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:207: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2025-05-29T15:24:31.619627Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 106 2025-05-29T15:24:31.619631Z node 6 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:207: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2025-05-29T15:24:31.619646Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:647: TTxOperationReply complete, operationId: 106:1, at schemeshard: 72057594046678944 2025-05-29T15:24:31.619650Z node 6 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:207: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2025-05-29T15:24:31.619655Z node 6 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:275: Activate send for 106:1 2025-05-29T15:24:31.619682Z node 6 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:630: Send to actor: [6:593:2549] msg type: 269552132 msg: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 106 at schemeshard: 72057594046678944 2025-05-29T15:24:31.619773Z node 6 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4877: StateWork, received event# 2146435072, Sender [6:123:2148], Recipient [6:123:2148]: NKikimr::NSchemeShard::TEvPrivate::TEvProgressOperation 2025-05-29T15:24:31.619780Z node 6 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4894: StateWork, processing event TEvPrivate::TEvProgressOperation 2025-05-29T15:24:31.619789Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 106:1, at schemeshard: 72057594046678944 2025-05-29T15:24:31.619797Z node 6 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:482: [72057594046678944] TDone opId# 106:1 ProgressState 2025-05-29T15:24:31.619813Z node 6 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:165: TSideEffects ApplyOnExecute at tablet# 72057594046678944 2025-05-29T15:24:31.619818Z node 6 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:906: Part operation is done id#106:1 progress is 2/2 2025-05-29T15:24:31.619823Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 106 ready parts: 2/2 2025-05-29T15:24:31.619828Z node 6 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:906: Part operation is done id#106:1 progress is 2/2 2025-05-29T15:24:31.619832Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 106 ready parts: 2/2 2025-05-29T15:24:31.619838Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1606: TOperation IsReadyToNotify, TxId: 106, ready parts: 2/2, is published: true 2025-05-29T15:24:31.619849Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1629: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [6:306:2296] message: TxId: 106 2025-05-29T15:24:31.619857Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 106 ready parts: 2/2 2025-05-29T15:24:31.619863Z node 6 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:973: Operation and all the parts is done, operation id: 106:0 2025-05-29T15:24:31.619869Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5174: RemoveTx for txid 106:0 2025-05-29T15:24:31.619886Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 7] was 2 2025-05-29T15:24:31.619891Z node 6 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:973: Operation and all the parts is done, operation id: 106:1 2025-05-29T15:24:31.619896Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5174: RemoveTx for txid 106:1 2025-05-29T15:24:31.619915Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 8] was 3 2025-05-29T15:24:31.620456Z node 6 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:207: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2025-05-29T15:24:31.620477Z node 6 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:630: Send to actor: [6:306:2296] msg type: 271124998 msg: NKikimrScheme.TEvNotifyTxCompletionResult TxId: 106 at schemeshard: 72057594046678944 2025-05-29T15:24:31.620513Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:227: tests -- TTxNotificationSubscriber for txId 106: got EvNotifyTxCompletionResult 2025-05-29T15:24:31.620519Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:236: tests -- TTxNotificationSubscriber for txId 106: satisfy waiter [6:623:2571] 2025-05-29T15:24:31.620557Z node 6 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4877: StateWork, received event# 269877764, Sender [6:625:2573], Recipient [6:123:2148]: NKikimr::TEvTabletPipe::TEvServerDisconnected 2025-05-29T15:24:31.620563Z node 6 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4975: StateWork, processing event TEvTabletPipe::TEvServerDisconnected 2025-05-29T15:24:31.620567Z node 6 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5801: Server pipe is reset, at schemeshard: 72057594046678944 TestWaitNotification: OK eventTxId 106 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_external_table/unittest >> TExternalTableTest::ReadOnlyMode [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:117:2058] recipient: [1:112:2142] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:117:2058] recipient: [1:112:2142] Leader for TabletID 72057594046678944 is [1:129:2153] sender: [1:133:2058] recipient: [1:112:2142] 2025-05-29T15:24:29.833203Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7488: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-05-29T15:24:29.833229Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7516: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:24:29.833234Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7402: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-05-29T15:24:29.833238Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7418: OperationsProcessing config: using default configuration 2025-05-29T15:24:29.833247Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-05-29T15:24:29.833250Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-05-29T15:24:29.833259Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7548: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:24:29.833272Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:39: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-05-29T15:24:29.833364Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7619: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-05-29T15:24:29.833440Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-05-29T15:24:29.846037Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7651: Got new config: QueryServiceConfig { AllExternalDataSourcesAreAvailable: true } 2025-05-29T15:24:29.846061Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:24:29.846161Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7619: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-05-29T15:24:29.848786Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-05-29T15:24:29.848902Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-05-29T15:24:29.848939Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-05-29T15:24:29.850362Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-05-29T15:24:29.850421Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:445: Clear TempDirsState with owners number: 0 2025-05-29T15:24:29.850537Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1348: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-05-29T15:24:29.850584Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:32: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-05-29T15:24:29.851015Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:157: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:24:29.851062Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:84: [RootDataErasureManager] Stop 2025-05-29T15:24:29.851282Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:24:29.851290Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:24:29.851306Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-05-29T15:24:29.851314Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-29T15:24:29.851319Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:202: TTxServerlessStorageBilling.Complete 2025-05-29T15:24:29.851347Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6671: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-05-29T15:24:29.852472Z node 1 :HIVE INFO: tablet_helpers.cpp:1130: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:129:2153] sender: [1:243:2058] recipient: [1:15:2062] 2025-05-29T15:24:29.872724Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:372: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-05-29T15:24:29.872809Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:24:29.872867Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-05-29T15:24:29.872915Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:130: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-05-29T15:24:29.872924Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:24:29.877642Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:451: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-05-29T15:24:29.877684Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-05-29T15:24:29.877742Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:24:29.877753Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:313: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-05-29T15:24:29.877759Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:367: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-05-29T15:24:29.877765Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 2 -> 3 2025-05-29T15:24:29.879078Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:24:29.879099Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-05-29T15:24:29.879105Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 3 -> 128 2025-05-29T15:24:29.880561Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:24:29.880591Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:24:29.880600Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:24:29.880611Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1650: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-05-29T15:24:29.881592Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1719: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-05-29T15:24:29.887281Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:652: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-05-29T15:24:29.887364Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1751: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-05-29T15:24:29.887619Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:676: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-05-29T15:24:29.887669Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:680: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 125 RawX2: 4294969446 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-05-29T15:24:29.887679Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:24:29.887766Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 128 -> 240 2025-05-29T15:24:29.887775Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:24:29.887811Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-05-29T15:24:29.887839Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:402: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-05-29T15:24:29.889286Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:24:29.889300Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594 ... ogressState, at schemeshard: 72057594046678944 2025-05-29T15:24:30.046343Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1650: TOperation IsReadyToPropose , TxId: 129 ready parts: 1/1 2025-05-29T15:24:30.046372Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1719: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 129 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-05-29T15:24:30.046517Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:5834: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 4 LocalPathId: 1 Version: 10 PathOwnerId: 72057594046678944, cookie: 129 2025-05-29T15:24:30.046530Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 4 LocalPathId: 1 Version: 10 PathOwnerId: 72057594046678944, cookie: 129 2025-05-29T15:24:30.046535Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72057594046678944, txId: 129 2025-05-29T15:24:30.046541Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 129, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 10 2025-05-29T15:24:30.046548Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 5 2025-05-29T15:24:30.046731Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:5834: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 4 LocalPathId: 5 Version: 2 PathOwnerId: 72057594046678944, cookie: 129 2025-05-29T15:24:30.046764Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 4 LocalPathId: 5 Version: 2 PathOwnerId: 72057594046678944, cookie: 129 2025-05-29T15:24:30.046770Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72057594046678944, txId: 129 2025-05-29T15:24:30.046775Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 129, pathId: [OwnerId: 72057594046678944, LocalPathId: 5], version: 2 2025-05-29T15:24:30.046780Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 2 2025-05-29T15:24:30.046795Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1606: TOperation IsReadyToNotify, TxId: 129, ready parts: 0/1, is published: true 2025-05-29T15:24:30.047210Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:652: Send tablet strongly msg operationId: 129:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:129 msg type: 269090816 2025-05-29T15:24:30.047250Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1751: TOperation RegisterRelationByTabletId, TxId: 129, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 129 at step: 5000005 FAKE_COORDINATOR: advance: minStep5000005 State->FrontStep: 5000004 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 129 at step: 5000005 2025-05-29T15:24:30.047862Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:676: TTxOperationPlanStep Execute, stepId: 5000005, transactions count in step: 1, at schemeshard: 72057594046678944 2025-05-29T15:24:30.047893Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:680: TTxOperationPlanStep Execute, message: Transactions { TxId: 129 Coordinator: 72057594046316545 AckTo { RawX1: 125 RawX2: 4294969446 } } Step: 5000005 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-05-29T15:24:30.047904Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_mkdir.cpp:33: MkDir::TPropose operationId# 129:0 HandleReply TEvPrivate::TEvOperationPlan, step: 5000005, at schemeshard: 72057594046678944 2025-05-29T15:24:30.047938Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 129:0 128 -> 240 2025-05-29T15:24:30.047970Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 4 2025-05-29T15:24:30.047980Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 1 2025-05-29T15:24:30.048069Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 129 2025-05-29T15:24:30.048088Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 129 FAKE_COORDINATOR: Erasing txId 129 2025-05-29T15:24:30.048456Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:24:30.048465Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 129, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-29T15:24:30.048509Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 129, path id: [OwnerId: 72057594046678944, LocalPathId: 5] 2025-05-29T15:24:30.048524Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:24:30.048529Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:487:2443], at schemeshard: 72057594046678944, txId: 129, path id: 1 2025-05-29T15:24:30.048536Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:487:2443], at schemeshard: 72057594046678944, txId: 129, path id: 5 2025-05-29T15:24:30.048593Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 129:0, at schemeshard: 72057594046678944 2025-05-29T15:24:30.048602Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:482: [72057594046678944] TDone opId# 129:0 ProgressState 2025-05-29T15:24:30.048615Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:906: Part operation is done id#129:0 progress is 1/1 2025-05-29T15:24:30.048621Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 129 ready parts: 1/1 2025-05-29T15:24:30.048626Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:906: Part operation is done id#129:0 progress is 1/1 2025-05-29T15:24:30.048632Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 129 ready parts: 1/1 2025-05-29T15:24:30.048638Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1606: TOperation IsReadyToNotify, TxId: 129, ready parts: 1/1, is published: false 2025-05-29T15:24:30.048644Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 129 ready parts: 1/1 2025-05-29T15:24:30.048649Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:973: Operation and all the parts is done, operation id: 129:0 2025-05-29T15:24:30.048654Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5174: RemoveTx for txid 129:0 2025-05-29T15:24:30.048668Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 2 2025-05-29T15:24:30.048675Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:982: Publication still in progress, tx: 129, publications: 2, subscribers: 0 2025-05-29T15:24:30.048680Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:989: Publication details: tx: 129, [OwnerId: 72057594046678944, LocalPathId: 1], 11 2025-05-29T15:24:30.048684Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:989: Publication details: tx: 129, [OwnerId: 72057594046678944, LocalPathId: 5], 3 2025-05-29T15:24:30.048822Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:5834: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 4 LocalPathId: 1 Version: 11 PathOwnerId: 72057594046678944, cookie: 129 2025-05-29T15:24:30.048872Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 4 LocalPathId: 1 Version: 11 PathOwnerId: 72057594046678944, cookie: 129 2025-05-29T15:24:30.048878Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 129 2025-05-29T15:24:30.048884Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 129, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 11 2025-05-29T15:24:30.048889Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 5 2025-05-29T15:24:30.049069Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:5834: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 4 LocalPathId: 5 Version: 3 PathOwnerId: 72057594046678944, cookie: 129 2025-05-29T15:24:30.049082Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 4 LocalPathId: 5 Version: 3 PathOwnerId: 72057594046678944, cookie: 129 2025-05-29T15:24:30.049087Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 129 2025-05-29T15:24:30.049091Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 129, pathId: [OwnerId: 72057594046678944, LocalPathId: 5], version: 3 2025-05-29T15:24:30.049096Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 1 2025-05-29T15:24:30.049107Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 129, subscribers: 0 2025-05-29T15:24:30.049709Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 129 2025-05-29T15:24:30.049731Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 129 TestModificationResult got TxId: 129, wait until txId: 129 >> ReadIteratorExternalBlobs::ExtBlobsEmptyTable [GOOD] >> ReadIteratorExternalBlobs::NotExtBlobs |65.5%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/blobstorage/vdisk/syncer/ut/ydb-core-blobstorage-vdisk-syncer-ut |65.5%| [LD] {RESULT} $(B)/ydb/core/blobstorage/vdisk/syncer/ut/ydb-core-blobstorage-vdisk-syncer-ut |65.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/blobstorage/vdisk/syncer/ut/ydb-core-blobstorage-vdisk-syncer-ut |65.6%| [TA] $(B)/ydb/core/tx/datashard/ut_external_blobs/test-results/unittest/{meta.json ... results_accumulator.log} >> TConsoleConfigSubscriptionTests::TestNotificationForRestartedClient [GOOD] >> TConsoleConfigSubscriptionTests::TestNotificationForTimeoutedNotificationResponse >> TConsoleTests::TestAlterTenantModifyStorageResourcesForPendingExtSubdomain [GOOD] >> TConsoleTests::TestAlterTenantModifyStorageResourcesForRunning |65.6%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/replication/service/ut_json_change_record/tx-replication-service-ut_json_change_record |65.6%| [TA] $(B)/ydb/core/tx/schemeshard/ut_external_table/test-results/unittest/{meta.json ... results_accumulator.log} |65.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/replication/service/ut_json_change_record/tx-replication-service-ut_json_change_record |65.6%| [TA] {RESULT} $(B)/ydb/core/tx/datashard/ut_external_blobs/test-results/unittest/{meta.json ... results_accumulator.log} |65.6%| [LD] {RESULT} $(B)/ydb/core/tx/replication/service/ut_json_change_record/tx-replication-service-ut_json_change_record |65.6%| [TA] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_external_table/test-results/unittest/{meta.json ... results_accumulator.log} |65.6%| [TA] $(B)/ydb/core/tx/schemeshard/ut_backup_collection/test-results/unittest/{meta.json ... results_accumulator.log} >> TConsoleTests::TestCreateTenantWrongName [GOOD] >> TConsoleTests::TestCreateTenantWrongNameExtSubdomain >> KqpBatchUpdate::HasTxControl |65.6%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/schemeshard/ut_serverless/ydb-core-tx-schemeshard-ut_serverless |65.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_serverless/ydb-core-tx-schemeshard-ut_serverless |65.6%| [TA] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_backup_collection/test-results/unittest/{meta.json ... results_accumulator.log} |65.6%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_serverless/ydb-core-tx-schemeshard-ut_serverless >> KqpBatchUpdate::Large_1 >> KqpBatchUpdate::ManyPartitions_2 >> DataShardSnapshots::VolatileSnapshotSplit |65.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/batch_operations/unittest |65.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/batch_operations/unittest |65.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/batch_operations/unittest >> DataShardSnapshots::LockedWriteReuseAfterCommit+UseSink >> TCmsTest::RequestRestartServicesReject [GOOD] >> TCmsTest::RequestRestartServicesPartial >> KqpBatchDelete::ManyPartitions_2 >> DataShardSnapshots::MvccSnapshotAndSplit >> KqpBatchUpdate::UpdateOn >> KqpBatchDelete::Returning >> DataShardSnapshots::LockedWriteBulkUpsertConflict+UseSink >> KqpBatchDelete::HasTxControl >> DataShardSnapshots::MvccSnapshotTailCleanup |65.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/batch_operations/unittest >> TCmsTest::ActionIssue [GOOD] >> TConsoleTests::TestRemoveTenantWithBorrowedStorageUnits [GOOD] >> TConsoleTests::TestListTenants >> KqpBatchUpdate::SimpleOnePartition >> DataShardSnapshots::UncommittedChangesRenameTable+UseSink >> TMaintenanceApiTest::CreateTime [GOOD] >> TMaintenanceApiTest::LastRefreshTime ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/cms/ut/unittest >> TCmsTest::ActionIssue [GOOD] Test command err: 2025-05-29T15:24:23.178926Z node 1 :CMS DEBUG: cms_impl.h:185: StateInit event type: 10060000 event: NKikimr::TEvTablet::TEvBoot 2025-05-29T15:24:23.179278Z node 1 :CMS DEBUG: console__init_scheme.cpp:14: TConsole::TTxInitScheme Execute 2025-05-29T15:24:23.181737Z node 1 :CMS DEBUG: cms_impl.h:185: StateInit event type: 10060001 event: NKikimr::TEvTablet::TEvRestored 2025-05-29T15:24:23.181799Z node 1 :CMS DEBUG: cms_tx_init_scheme.cpp:16: TTxInitScheme Execute 2025-05-29T15:24:23.182149Z node 1 :CMS DEBUG: cms_impl.h:185: StateInit event type: 1006000c event: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-05-29T15:24:23.182184Z node 1 :CMS DEBUG: cms_impl.h:185: StateInit event type: 104d0001 event: NKikimr::NConsole::TEvConfigsDispatcher::TEvSetConfigSubscriptionResponse 2025-05-29T15:24:23.182874Z node 1 :CMS DEBUG: console__init_scheme.cpp:23: TConsole::TTxInitScheme Complete 2025-05-29T15:24:23.182948Z node 1 :CMS DEBUG: console__load_state.cpp:28: TConsole::TTxLoadState Execute 2025-05-29T15:24:23.182987Z node 1 :CMS DEBUG: console__load_state.cpp:50: Using default config. 2025-05-29T15:24:23.183067Z node 1 :CMS DEBUG: console__load_state.cpp:66: TConsole::TTxLoadState Complete 2025-05-29T15:24:23.183548Z node 1 :CMS DEBUG: cms_tx_init_scheme.cpp:24: TTxInitScheme Complete 2025-05-29T15:24:23.183631Z node 1 :CMS DEBUG: cms_tx_load_state.cpp:33: TTxLoadState Execute 2025-05-29T15:24:23.183659Z node 1 :CMS DEBUG: cms_tx_load_state.cpp:76: Using default config 2025-05-29T15:24:23.183689Z node 1 :CMS DEBUG: cms.cpp:1147: Running CleanupWalleTasks 2025-05-29T15:24:23.216625Z node 1 :CMS DEBUG: cms_impl.h:185: StateInit event type: 104a0012 event: NKikimr::NConsole::TEvConsole::TEvConfigNotificationRequest { Config { FeatureFlags { EnableCMSRequestPriorities: true EnableSingleCompositeActionGroup: true } } ItemKinds: 25 ItemKinds: 26 Local: true } 2025-05-29T15:24:23.231138Z node 1 :CMS DEBUG: cms_tx_load_state.cpp:256: TTxLoadState Complete 2025-05-29T15:24:23.231261Z node 1 :CMS DEBUG: cms_tx_update_config.cpp:23: TTxUpdateConfig Execute 2025-05-29T15:24:23.232777Z node 1 :CMS DEBUG: cms_tx_update_config.cpp:37: TTxUpdateConfig Complete 2025-05-29T15:24:23.232877Z node 1 :CMS DEBUG: sentinel.cpp:939: [Sentinel] [Main] UpdateConfig 2025-05-29T15:24:23.232884Z node 1 :CMS DEBUG: sentinel.cpp:884: [Sentinel] [Main] Start ConfigUpdater 2025-05-29T15:24:23.232892Z node 1 :CMS DEBUG: sentinel.cpp:955: [Sentinel] [Main] UpdateState 2025-05-29T15:24:23.232897Z node 1 :CMS INFO: sentinel.cpp:879: [Sentinel] [Main] StateUpdater was delayed 2025-05-29T15:24:23.232904Z node 1 :CMS DEBUG: sentinel.cpp:464: [Sentinel] [ConfigUpdater] Request blobstorage config: attempt# 0 2025-05-29T15:24:23.232931Z node 1 :CMS DEBUG: sentinel.cpp:477: [Sentinel] [ConfigUpdater] Request CMS cluster state: attempt# 0 2025-05-29T15:24:23.234803Z node 1 :CMS DEBUG: sentinel.cpp:530: [Sentinel] [ConfigUpdater] Handle TEvBlobStorage::TEvControllerConfigResponse: response# Status { Success: true BaseConfig { } } Success: true 2025-05-29T15:24:23.245580Z node 1 :CMS DEBUG: cms_tx_update_config.cpp:23: TTxUpdateConfig Execute 2025-05-29T15:24:23.283151Z node 1 :CMS DEBUG: cms_tx_update_config.cpp:37: TTxUpdateConfig Complete 2025-05-29T15:24:23.283203Z node 1 :CMS DEBUG: cms_tx_update_config.cpp:44: Updated config: TenantLimits { DisabledNodesRatioLimit: 0 } ClusterLimits { DisabledNodesRatioLimit: 0 } SentinelConfig { Enable: false } 2025-05-29T15:24:23.283385Z node 1 :CMS NOTICE: audit_log.cpp:12: [AuditLog] [CMS tablet] Reply: request# NKikimr::NCms::TEvCms::TEvGetConfigRequest { }, response# NKikimr::NCms::TEvCms::TEvGetConfigResponse { Status { Code: OK } Config { DefaultRetryTime: 300000000 DefaultPermissionDuration: 300000000 TenantLimits { DisabledNodesRatioLimit: 0 } ClusterLimits { DisabledNodesRatioLimit: 0 } InfoCollectionTimeout: 15000000 LogConfig { DefaultLevel: ENABLED TTL: 1209600000000 } SentinelConfig { Enable: false UpdateConfigInterval: 3600000000 RetryUpdateConfig: 60000000 UpdateStateInterval: 60000000 UpdateStateTimeout: 45000000 RetryChangeStatus: 10000000 ChangeStatusRetries: 5 DefaultStateLimit: 60 DataCenterRatio: 50 RoomRatio: 70 RackRatio: 90 DryRun: false EvictVDisksStatus: FAULTY GoodStateLimit: 5 FaultyPDisksThresholdPerNode: 0 } } } 2025-05-29T15:24:23.283421Z node 1 :CMS DEBUG: cms_tx_update_config.cpp:23: TTxUpdateConfig Execute 2025-05-29T15:24:23.323398Z node 1 :CMS DEBUG: cms_tx_update_downtimes.cpp:17: TTxUpdateDowntimes Execute 2025-05-29T15:24:23.323485Z node 1 :CMS DEBUG: cluster_info.cpp:968: Timestamp: 1970-01-01T00:02:00Z 2025-05-29T15:24:23.323627Z node 1 :CMS NOTICE: audit_log.cpp:12: [AuditLog] [CMS tablet] Reply: request# NKikimr::NCms::TEvCms::TEvClusterStateRequest { }, response# NKikimr::NCms::TEvCms::TEvClusterStateResponse { Status { Code: OK } State { Hosts { Name: "::1" State: UP Services { Name: "dynnode" State: UP Version: "-1" Timestamp: 120026512 } Timestamp: 120026512 NodeId: 1 InterconnectPort: 12001 Location { DataCenter: "1" Module: "1" Rack: "1" Unit: "1" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "dynnode" State: UP Version: "-1" Timestamp: 120026512 } Timestamp: 120026512 NodeId: 2 InterconnectPort: 12002 Location { DataCenter: "1" Module: "2" Rack: "2" Unit: "2" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "dynnode" State: UP Version: "-1" Timestamp: 120026512 } Timestamp: 120026512 NodeId: 3 InterconnectPort: 12003 Location { DataCenter: "1" Module: "3" Rack: "3" Unit: "3" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "dynnode" State: UP Version: "-1" Timestamp: 120026512 } Timestamp: 120026512 NodeId: 4 InterconnectPort: 12004 Location { DataCenter: "1" Module: "4" Rack: "4" Unit: "4" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "dynnode" State: UP Version: "-1" Timestamp: 120026512 } Timestamp: 120026512 NodeId: 5 InterconnectPort: 12005 Location { DataCenter: "1" Module: "5" Rack: "5" Unit: "5" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "dynnode" State: UP Version: "-1" Timestamp: 120026512 } Timestamp: 120026512 NodeId: 6 InterconnectPort: 12006 Location { DataCenter: "1" Module: "6" Rack: "6" Unit: "6" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "dynnode" State: UP Version: "-1" Timestamp: 120026512 } Timestamp: 120026512 NodeId: 7 InterconnectPort: 12007 Location { DataCenter: "1" Module: "7" Rack: "7" Unit: "7" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "dynnode" State: UP Version: "-1" Timestamp: 120026512 } Timestamp: 120026512 NodeId: 8 InterconnectPort: 12008 Location { DataCenter: "1" Module: "8" Rack: "8" Unit: "8" } StartTimeSeconds: 0 } Timestamp: 120026512 } } 2025-05-29T15:24:23.357000Z node 1 :CMS DEBUG: cms.cpp:1147: Running CleanupWalleTasks 2025-05-29T15:24:23.398020Z node 1 :CMS DEBUG: cms_tx_update_config.cpp:37: TTxUpdateConfig Complete 2025-05-29T15:24:23.398090Z node 1 :CMS DEBUG: cms_tx_update_config.cpp:44: Updated config: DefaultRetryTime: 300000000 DefaultPermissionDuration: 300000000 TenantLimits { DisabledNodesLimit: 0 DisabledNodesRatioLimit: 10 } ClusterLimits { DisabledNodesLimit: 0 DisabledNodesRatioLimit: 0 } InfoCollectionTimeout: 15000000 LogConfig { DefaultLevel: ENABLED TTL: 1209600000000 } SentinelConfig { Enable: false UpdateConfigInterval: 3600000000 RetryUpdateConfig: 60000000 UpdateStateInterval: 60000000 UpdateStateTimeout: 45000000 RetryChangeStatus: 10000000 ChangeStatusRetries: 5 DefaultStateLimit: 60 DataCenterRatio: 50 RoomRatio: 70 RackRatio: 90 DryRun: false EvictVDisksStatus: FAULTY GoodStateLimit: 5 FaultyPDisksThresholdPerNode: 0 } 2025-05-29T15:24:23.398102Z node 1 :CMS DEBUG: cms_tx_update_downtimes.cpp:26: TTxUpdateDowntimes Complete 2025-05-29T15:24:23.433743Z node 1 :CMS DEBUG: cms_tx_update_downtimes.cpp:17: TTxUpdateDowntimes Execute 2025-05-29T15:24:23.433775Z node 1 :CMS DEBUG: cms_tx_update_downtimes.cpp:26: TTxUpdateDowntimes Complete 2025-05-29T15:24:23.433789Z node 1 :CMS DEBUG: cluster_info.cpp:968: Timestamp: 1970-01-01T00:02:00Z 2025-05-29T15:24:23.433834Z node 1 :CMS INFO: cms.cpp:347: Check request: User: "user" Actions { Type: SHUTDOWN_HOST Host: "1" Duration: 60000000 } PartialPermissionAllowed: false Schedule: false DryRun: false AvailabilityMode: MODE_MAX_AVAILABILITY EvictVDisks: false 2025-05-29T15:24:23.433842Z node 1 :CMS DEBUG: cms.cpp:379: Checking action: Type: SHUTDOWN_HOST Host: "1" Duration: 60000000 2025-05-29T15:24:23.433852Z node 1 :CMS DEBUG: node_checkers.cpp:101: [Nodes Counter] Checking Node: 1, with state: Up, with limit: 0, with ratio limit: 0, locked nodes: 0, down nodes: 0 2025-05-29T15:24:23.433856Z node 1 :CMS DEBUG: node_checkers.cpp:101: [Nodes Counter] Checking Node: 1, with state: Up, with limit: 0, with ratio limit: 10, locked nodes: 0, down nodes: 0 2025-05-29T15:24:23.433863Z node 1 :CMS DEBUG: cms.cpp:729: Ring: 0; State: Ok 2025-05-29T15:24:23.433866Z node 1 :CMS DEBUG: cms.cpp:729: Ring: 1; State: Ok 2025-05-29T15:24:23.433869Z node 1 :CMS DEBUG: cms.cpp:729: Ring: 2; State: Ok 2025-05-29T15:24:23.433874Z node 1 :CMS DEBUG: cms.cpp:387: Result: ALLOW 2025-05-29T15:24:23.433887Z node 1 :CMS DEBUG: cms.cpp:1036: Accepting permission: id# user-p-1, requestId# user-r-1, owner# user 2025-05-29T15:24:23.433894Z node 1 :CMS INFO: cluster_info.cpp:777: Adding lock for Host ::1:12001 (1) (permission user-p-1 until 1970-01-01T00:03:00Z) 2025-05-29T15:24:23.433902Z node 1 :CMS DEBUG: cms_tx_store_permissions.cpp:26: TTxStorePermissions Execute 2025-05-29T15:24:23.433934Z node 1 :CMS NOTICE: audit_log.cpp:12: [AuditLog] [CMS tablet] Store permission: id# user-p-1, validity# 1970-01-01T00:03:00.127512Z, action# Type: SHUTDOWN_HOST Host: "1" Duration: 60000000 2025-05-29T15:24:23.444949Z node 1 :CMS DEBUG: cms_tx_store_permissions.cpp:137: TTxStorePermissions complete 2025-05-29T15:24:23.445044Z node 1 :CMS NOTICE: audit_log.cpp:12: [AuditLog] [CMS tablet] Reply: request# NKikimr::NCms::TEvCms::TEvPermissionRequest { User: "user" Actions { Type: SHUTDOWN_HOST Host: "1" Duration: 60000000 } PartialPermissionAllowed: false Schedule: false DryRun: false AvailabilityMode: MODE_MAX_AVAILABILITY EvictVDisks: false }, response# NKikimr::NCms::TEvCms::TEvPermissionResponse { Status { Code: ALLOW } RequestId: "user-r-1" Permissions { Id: "user-p-1" Action { Type: SHUTDOWN_HOST Host: "1" Duration: 60000000 } Deadline: 180127512 Extentions { Type: HostInfo Hosts { Name: "::1" State: UP NodeId: 1 InterconnectPort: 12001 } } } } 2025-05-29T15:24:23.445054Z node 1 :CMS DEBUG: cms.cpp:1064: Schedule cleanup at 1970-01-01T00:05:00.127512Z 2025-05-29T15:24:23.470438Z node 1 :CMS INFO: cluster_info.cpp:777: Adding lock for Host ::1:12001 (1) (permission user-p-1 until 1970-01-01T00:03:00Z) 2025-05-29T15:24:23.470485Z node 1 :CMS DEBUG: cms_tx_update_downtimes.cpp:17: TTxUpdateDowntimes Execute 2025-05-29T15:24:23.470500Z node 1 :CMS DEBUG: cms_tx_update_downtimes.cpp:26: TTxUpdateDowntimes Complete 2025-05-29T15:24:23.470510Z node 1 :CMS DEBUG: cluster_info.cpp:968: Timestamp: 1970-01-01T00:02:00Z 2025-05-29T15:24:23.470553Z node 1 :CMS INFO: cms.cpp:347: Check request: User: "user" Actions { Type: SHUTDOWN_HOST Host: "2" Duration: 60000000 } PartialPermissionAllowed: false Schedule: false DryRun: false AvailabilityMode: MODE_MAX_AVAILABILITY EvictVDisks: false 2025-05-29T15:24:23.470561Z node 1 :CMS DEBUG: cms.cpp:379: Checking action: Type: SHUTDOWN_HOST Host: "2" Duration: 60000000 2025-05-29T15:24:23.470570Z node 1 :CMS DEBUG: node_checkers.cpp:101: [Nodes Counter] Checking Node: 2, with state: Up, with limit: 0, with ratio limit: 0, locked nodes: 1, down nodes: 0 2025-05-29T15:24:23.470574Z node 1 :CMS DEBUG: node_checkers.cpp:101: [Nodes Counter] Checking Node: 2, with state: Up, with limit: 0, with ratio limit: 10, locked nodes: 1, down nodes: 0 2025-05-29T15:24:2 ... { Code: DISALLOW_TEMP Reason: "Issue in affected group with id \'0\': too many unavailable vdisks. Locked: Host ::1:12001 (25) has planned shutdown (permission user-p-1 owned by user), VDisk [0:1:0:1:0] (::1:/26/pdisk-26.data) is locked by this request. Down: " } RequestId: "user-r-2" Deadline: 420130512 } 2025-05-29T15:24:30.594262Z node 25 :CMS INFO: cms.cpp:1366: Get selected requests for user 2025-05-29T15:24:30.594282Z node 25 :CMS DEBUG: cms.cpp:1392: Resulting status: OK 2025-05-29T15:24:30.594323Z node 25 :CMS NOTICE: audit_log.cpp:12: [AuditLog] [CMS tablet] Reply: request# NKikimr::NCms::TEvCms::TEvManageRequestRequest { User: "user" Command: GET RequestId: "user-r-2" DryRun: false }, response# NKikimr::NCms::TEvCms::TEvManageRequestResponse { Status { Code: OK } Requests { RequestId: "user-r-2" Owner: "user" Actions { Type: SHUTDOWN_HOST Host: "34" Duration: 60000000 } Actions { Type: SHUTDOWN_HOST Host: "26" Duration: 60000000 Issue { Type: TOO_MANY_UNAVAILABLE_VDISKS Message: "Issue in affected group with id \'0\': too many unavailable vdisks. Locked: Host ::1:12001 (25) has planned shutdown (permission user-p-1 owned by user), VDisk [0:1:0:1:0] (::1:/26/pdisk-26.data) is locked by this request. Down: " } } PartialPermissionAllowed: false Reason: "" AvailabilityMode: MODE_MAX_AVAILABILITY Priority: 0 } } 2025-05-29T15:24:30.642871Z node 25 :CMS INFO: cluster_info.cpp:777: Adding lock for Host ::1:12001 (25) (permission user-p-1 until 1970-01-01T00:03:00Z) 2025-05-29T15:24:30.643022Z node 25 :CMS DEBUG: cms_tx_update_downtimes.cpp:17: TTxUpdateDowntimes Execute 2025-05-29T15:24:30.643047Z node 25 :CMS DEBUG: cms_tx_update_downtimes.cpp:26: TTxUpdateDowntimes Complete 2025-05-29T15:24:30.643062Z node 25 :CMS DEBUG: cluster_info.cpp:968: Timestamp: 1970-01-01T00:02:00Z 2025-05-29T15:24:30.643381Z node 25 :CMS INFO: cms.cpp:347: Check request: User: "user" Actions { Type: SHUTDOWN_HOST Host: "34" Duration: 60000000 } Actions { Type: SHUTDOWN_HOST Host: "26" Duration: 60000000 Issue { Type: TOO_MANY_UNAVAILABLE_VDISKS Message: "Issue in affected group with id \'0\': too many unavailable vdisks. Locked: Host ::1:12001 (25) has planned shutdown (permission user-p-1 owned by user), VDisk [0:1:0:1:0] (::1:/26/pdisk-26.data) is locked by this request. Down: " } } PartialPermissionAllowed: false Schedule: true Reason: "" TenantPolicy: DEFAULT AvailabilityMode: MODE_MAX_AVAILABILITY EvictVDisks: false 2025-05-29T15:24:30.643391Z node 25 :CMS DEBUG: cms.cpp:379: Checking action: Type: SHUTDOWN_HOST Host: "34" Duration: 60000000 2025-05-29T15:24:30.643403Z node 25 :CMS DEBUG: node_checkers.cpp:101: [Nodes Counter] Checking Node: 34, with state: Up, with limit: 0, with ratio limit: 0, locked nodes: 1, down nodes: 0 2025-05-29T15:24:30.643444Z node 25 :CMS DEBUG: cms.cpp:387: Result: ALLOW 2025-05-29T15:24:30.643461Z node 25 :CMS DEBUG: cms.cpp:379: Checking action: Type: SHUTDOWN_HOST Host: "26" Duration: 60000000 Issue { Type: TOO_MANY_UNAVAILABLE_VDISKS Message: "Issue in affected group with id \'0\': too many unavailable vdisks. Locked: Host ::1:12001 (25) has planned shutdown (permission user-p-1 owned by user), VDisk [0:1:0:1:0] (::1:/26/pdisk-26.data) is locked by this request. Down: " } 2025-05-29T15:24:30.643466Z node 25 :CMS DEBUG: node_checkers.cpp:101: [Nodes Counter] Checking Node: 26, with state: Up, with limit: 0, with ratio limit: 0, locked nodes: 2, down nodes: 0 2025-05-29T15:24:30.643487Z node 25 :CMS DEBUG: cms.cpp:398: Result: DISALLOW_TEMP (reason: Issue in affected group with id '0': too many unavailable vdisks. Locked: Host ::1:12001 (25) has planned shutdown (permission user-p-1 owned by user), VDisk [0:1:0:1:0] (::1:/26/pdisk-26.data) is locked by this request. Down: ) 2025-05-29T15:24:30.643519Z node 25 :CMS DEBUG: cms_tx_store_permissions.cpp:26: TTxStorePermissions Execute 2025-05-29T15:24:30.643645Z node 25 :CMS NOTICE: audit_log.cpp:12: [AuditLog] [CMS tablet] Store request: id# user-r-2, owner# user, order# 2, priority# 0, body# User: "user" Actions { Type: SHUTDOWN_HOST Host: "34" Duration: 60000000 } Actions { Type: SHUTDOWN_HOST Host: "26" Duration: 60000000 Issue { Type: TOO_MANY_UNAVAILABLE_VDISKS Message: "Issue in affected group with id \'0\': too many unavailable vdisks. Locked: Host ::1:12001 (25) has planned shutdown (permission user-p-1 owned by user), VDisk [0:1:0:1:0] (::1:/26/pdisk-26.data) is locked by this request. Down: " } } PartialPermissionAllowed: false Schedule: true Reason: "" TenantPolicy: DEFAULT AvailabilityMode: MODE_MAX_AVAILABILITY EvictVDisks: false 2025-05-29T15:24:30.655249Z node 25 :CMS DEBUG: cms_tx_store_permissions.cpp:137: TTxStorePermissions complete 2025-05-29T15:24:30.655343Z node 25 :CMS NOTICE: audit_log.cpp:12: [AuditLog] [CMS tablet] Reply: request# NKikimr::NCms::TEvCms::TEvCheckRequest { User: "user" RequestId: "user-r-2" DryRun: false AvailabilityMode: MODE_MAX_AVAILABILITY }, response# NKikimr::NCms::TEvCms::TEvPermissionResponse { Status { Code: DISALLOW_TEMP Reason: "Issue in affected group with id \'0\': too many unavailable vdisks. Locked: Host ::1:12001 (25) has planned shutdown (permission user-p-1 owned by user), VDisk [0:1:0:1:0] (::1:/26/pdisk-26.data) is locked by this request. Down: " } RequestId: "user-r-2" Deadline: 420232024 } 2025-05-29T15:24:30.655498Z node 25 :CMS INFO: cms.cpp:1366: Get selected requests for user 2025-05-29T15:24:30.655516Z node 25 :CMS DEBUG: cms.cpp:1392: Resulting status: OK 2025-05-29T15:24:30.655560Z node 25 :CMS NOTICE: audit_log.cpp:12: [AuditLog] [CMS tablet] Reply: request# NKikimr::NCms::TEvCms::TEvManageRequestRequest { User: "user" Command: GET RequestId: "user-r-2" DryRun: false }, response# NKikimr::NCms::TEvCms::TEvManageRequestResponse { Status { Code: OK } Requests { RequestId: "user-r-2" Owner: "user" Actions { Type: SHUTDOWN_HOST Host: "34" Duration: 60000000 } Actions { Type: SHUTDOWN_HOST Host: "26" Duration: 60000000 Issue { Type: TOO_MANY_UNAVAILABLE_VDISKS Message: "Issue in affected group with id \'0\': too many unavailable vdisks. Locked: Host ::1:12001 (25) has planned shutdown (permission user-p-1 owned by user), VDisk [0:1:0:1:0] (::1:/26/pdisk-26.data) is locked by this request. Down: " } } PartialPermissionAllowed: false Reason: "" AvailabilityMode: MODE_MAX_AVAILABILITY Priority: 0 } } 2025-05-29T15:24:30.655635Z node 25 :CMS INFO: cms.cpp:1326: User user is done with permissions user-p-1 2025-05-29T15:24:30.655641Z node 25 :CMS DEBUG: cms.cpp:1349: Resulting status: OK 2025-05-29T15:24:30.655659Z node 25 :CMS DEBUG: cms_tx_remove_permissions.cpp:28: TTxRemovePermissions Execute 2025-05-29T15:24:30.655691Z node 25 :CMS NOTICE: audit_log.cpp:12: [AuditLog] [CMS tablet] Remove permission: id# user-p-1, reason# explicit remove 2025-05-29T15:24:30.669235Z node 25 :CMS DEBUG: cms_tx_remove_permissions.cpp:79: TTxRemovePermissions Complete 2025-05-29T15:24:30.669323Z node 25 :CMS NOTICE: audit_log.cpp:12: [AuditLog] [CMS tablet] Reply: request# NKikimr::NCms::TEvCms::TEvManagePermissionRequest { User: "user" Command: DONE Permissions: "user-p-1" DryRun: false }, response# NKikimr::NCms::TEvCms::TEvManagePermissionResponse { Status { Code: OK } } 2025-05-29T15:24:30.763748Z node 25 :CMS DEBUG: cms_tx_update_downtimes.cpp:17: TTxUpdateDowntimes Execute 2025-05-29T15:24:30.763796Z node 25 :CMS DEBUG: cms_tx_update_downtimes.cpp:26: TTxUpdateDowntimes Complete 2025-05-29T15:24:30.763812Z node 25 :CMS DEBUG: cluster_info.cpp:968: Timestamp: 1970-01-01T00:02:00Z 2025-05-29T15:24:30.764114Z node 25 :CMS INFO: cms.cpp:347: Check request: User: "user" Actions { Type: SHUTDOWN_HOST Host: "34" Duration: 60000000 } Actions { Type: SHUTDOWN_HOST Host: "26" Duration: 60000000 Issue { Type: TOO_MANY_UNAVAILABLE_VDISKS Message: "Issue in affected group with id \'0\': too many unavailable vdisks. Locked: Host ::1:12001 (25) has planned shutdown (permission user-p-1 owned by user), VDisk [0:1:0:1:0] (::1:/26/pdisk-26.data) is locked by this request. Down: " } } PartialPermissionAllowed: false Schedule: true Reason: "" TenantPolicy: DEFAULT AvailabilityMode: MODE_MAX_AVAILABILITY EvictVDisks: false 2025-05-29T15:24:30.764124Z node 25 :CMS DEBUG: cms.cpp:379: Checking action: Type: SHUTDOWN_HOST Host: "34" Duration: 60000000 2025-05-29T15:24:30.764135Z node 25 :CMS DEBUG: node_checkers.cpp:101: [Nodes Counter] Checking Node: 34, with state: Up, with limit: 0, with ratio limit: 0, locked nodes: 0, down nodes: 0 2025-05-29T15:24:30.764174Z node 25 :CMS DEBUG: cms.cpp:387: Result: ALLOW 2025-05-29T15:24:30.764190Z node 25 :CMS DEBUG: cms.cpp:379: Checking action: Type: SHUTDOWN_HOST Host: "26" Duration: 60000000 Issue { Type: TOO_MANY_UNAVAILABLE_VDISKS Message: "Issue in affected group with id \'0\': too many unavailable vdisks. Locked: Host ::1:12001 (25) has planned shutdown (permission user-p-1 owned by user), VDisk [0:1:0:1:0] (::1:/26/pdisk-26.data) is locked by this request. Down: " } 2025-05-29T15:24:30.764196Z node 25 :CMS DEBUG: node_checkers.cpp:101: [Nodes Counter] Checking Node: 26, with state: Up, with limit: 0, with ratio limit: 0, locked nodes: 1, down nodes: 0 2025-05-29T15:24:30.764225Z node 25 :CMS DEBUG: cms.cpp:387: Result: ALLOW 2025-05-29T15:24:30.764246Z node 25 :CMS DEBUG: cms.cpp:1036: Accepting permission: id# user-p-2, requestId# user-r-2, owner# user 2025-05-29T15:24:30.764254Z node 25 :CMS INFO: cluster_info.cpp:777: Adding lock for Host ::1:12010 (34) (permission user-p-2 until 1970-01-01T00:03:00Z) 2025-05-29T15:24:30.764260Z node 25 :CMS DEBUG: cms.cpp:1036: Accepting permission: id# user-p-3, requestId# user-r-2, owner# user 2025-05-29T15:24:30.764265Z node 25 :CMS INFO: cluster_info.cpp:777: Adding lock for Host ::1:12002 (26) (permission user-p-3 until 1970-01-01T00:03:00Z) 2025-05-29T15:24:30.764274Z node 25 :CMS DEBUG: cms_tx_store_permissions.cpp:26: TTxStorePermissions Execute 2025-05-29T15:24:30.764314Z node 25 :CMS NOTICE: audit_log.cpp:12: [AuditLog] [CMS tablet] Store permission: id# user-p-2, validity# 1970-01-01T00:03:00.335048Z, action# Type: SHUTDOWN_HOST Host: "34" Duration: 60000000 2025-05-29T15:24:30.764323Z node 25 :CMS NOTICE: audit_log.cpp:12: [AuditLog] [CMS tablet] Store permission: id# user-p-3, validity# 1970-01-01T00:03:00.335048Z, action# Type: SHUTDOWN_HOST Host: "26" Duration: 60000000 2025-05-29T15:24:30.764333Z node 25 :CMS NOTICE: audit_log.cpp:12: [AuditLog] [CMS tablet] Remove request: id# user-r-2, owner# user 2025-05-29T15:24:30.775970Z node 25 :CMS DEBUG: cms_tx_store_permissions.cpp:137: TTxStorePermissions complete 2025-05-29T15:24:30.776097Z node 25 :CMS NOTICE: audit_log.cpp:12: [AuditLog] [CMS tablet] Reply: request# NKikimr::NCms::TEvCms::TEvCheckRequest { User: "user" RequestId: "user-r-2" DryRun: false AvailabilityMode: MODE_MAX_AVAILABILITY }, response# NKikimr::NCms::TEvCms::TEvPermissionResponse { Status { Code: ALLOW } Permissions { Id: "user-p-2" Action { Type: SHUTDOWN_HOST Host: "34" Duration: 60000000 } Deadline: 180335048 Extentions { Type: HostInfo Hosts { Name: "::1" State: UP NodeId: 34 InterconnectPort: 12010 } } } Permissions { Id: "user-p-3" Action { Type: SHUTDOWN_HOST Host: "26" Duration: 60000000 } Deadline: 180335048 Extentions { Type: HostInfo Hosts { Name: "::1" State: UP NodeId: 26 InterconnectPort: 12002 } } } } 2025-05-29T15:24:30.776316Z node 25 :CMS INFO: cms.cpp:1366: Get selected requests for user 2025-05-29T15:24:30.776330Z node 25 :CMS DEBUG: cms.cpp:1392: Resulting status: WRONG_REQUEST Unknown request user-r-2 2025-05-29T15:24:30.776352Z node 25 :CMS NOTICE: audit_log.cpp:12: [AuditLog] [CMS tablet] Reply: request# NKikimr::NCms::TEvCms::TEvManageRequestRequest { User: "user" Command: GET RequestId: "user-r-2" DryRun: false }, response# NKikimr::NCms::TEvCms::TEvManageRequestResponse { Status { Code: WRONG_REQUEST Reason: "Unknown request user-r-2" } } ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/pg/unittest >> KqpPg::TableDeleteWhere-useSink [FAIL] Test command err: Trying to start YDB, gRPC: 30279, MsgBus: 21257 2025-05-29T15:24:07.633890Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509888820302623941:2071];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:24:07.633921Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/0025c3/r3tmp/tmpVUSqWV/pdisk_1.dat 2025-05-29T15:24:07.701363Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 30279, node 1 2025-05-29T15:24:07.720319Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:24:07.720334Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-05-29T15:24:07.720335Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-05-29T15:24:07.720377Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-05-29T15:24:07.734552Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:24:07.734580Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:24:07.735637Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:21257 TClient is connected to server localhost:21257 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-29T15:24:07.798817Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:24:07.801396Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-05-29T15:24:08.052538Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480
: Error: Bulk upsert to table '/Root/Coerce_pgbpchar_17472595041006102391_17823623939509273229' Unable to coerce value for pgbpchar: Error while coercing value, reason: yql/essentials/minikql/mkql_terminator.cpp:47: ERROR: value too long for type character(2) 2025-05-29T15:24:08.125599Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480
: Error: Bulk upsert to table '/Root/Coerce__pgbpchar_17472595041006102391_5352544928909966465' Unable to coerce value for _pgbpchar: Error while coercing value, reason: yql/essentials/minikql/mkql_terminator.cpp:47: ERROR: value too long for type character(2) 2025-05-29T15:24:08.138304Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 abcd 2025-05-29T15:24:08.159437Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 {abcd,abcd} 2025-05-29T15:24:08.229073Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 abcd 2025-05-29T15:24:08.257539Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480 {"abcd ","abcd "} 2025-05-29T15:24:08.285547Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480
: Error: Bulk upsert to table '/Root/Coerce_pgvarchar_17472595041006102391_17823623939509273229' Unable to coerce value for pgvarchar: Error while coercing value, reason: yql/essentials/minikql/mkql_terminator.cpp:47: ERROR: value too long for type character varying(2) 2025-05-29T15:24:08.346009Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480
: Error: Bulk upsert to table '/Root/Coerce__pgvarchar_17472595041006102391_5352544928909966465' Unable to coerce value for _pgvarchar: Error while coercing value, reason: yql/essentials/minikql/mkql_terminator.cpp:47: ERROR: value too long for type character varying(2) 2025-05-29T15:24:08.408097Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710674:0, at schemeshard: 72057594046644480 abcd 2025-05-29T15:24:08.433332Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710677:0, at schemeshard: 72057594046644480 {abcd,abcd} 2025-05-29T15:24:08.460535Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710680:0, at schemeshard: 72057594046644480 abcd 2025-05-29T15:24:08.488820Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710683:0, at schemeshard: 72057594046644480 {abcd,abcd} 2025-05-29T15:24:08.515977Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710686:0, at schemeshard: 72057594046644480
: Error: Bulk upsert to table '/Root/Coerce_pgbit_17472595041006102391_5866627432374416336' Unable to coerce value for pgbit: Error while coercing value, reason: yql/essentials/minikql/mkql_terminator.cpp:47: ERROR: bit string length 4 does not match type bit(2) 2025-05-29T15:24:08.576394Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710687:0, at schemeshard: 72057594046644480
: Error: Bulk upsert to table '/Root/Coerce__pgbit_17472595041006102391_11087201080355820517' Unable to coerce value for _pgbit: Error while coercing value, reason: yql/essentials/minikql/mkql_terminator.cpp:47: ERROR: bit string length 4 does not match type bit(2) 2025-05-29T15:24:08.587676Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710688:0, at schemeshard: 72057594046644480 1111 2025-05-29T15:24:08.615043Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710691:0, at schemeshard: 72057594046644480 {1111,1111} 2025-05-29T15:24:08.642586Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710694:0, at schemeshard: 72057594046644480
: Error: Bulk upsert to table '/Root/Coerce_pgbit_10103374131519304989_5866627432374416336' Unable to coerce value for pgbit: Error while coercing value, reason: yql/essentials/minikql/mkql_terminator.cpp:47: ERROR: bit string length 4 does not match type bit(6) 2025-05-29T15:24:08.658393Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710695:0, at schemeshard: 72057594046644480
: Error: Bulk upsert to table '/Root/Coerce__pgbit_10103374131519304989_11087201080355820517' Unable to coerce value for _pgbit: Error while coercing value, reason: yql/essentials/minikql/mkql_terminator.cpp:47: ERROR: bit string length 4 does not match type bit(6) 2025-05-29T15:24:08.672048Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710696:0, at schemeshard: 72057594046644480
: Error: Bulk upsert to table '/Root/Coerce_pgvarbit_17472595041006102391_5866627432374416336' Unable to coerce value for pgvarbit: Error while coercing value, reason: yql/essentials/minikql/mkql_terminator.cpp:47: ERROR: bit string too long for type bit varying(2) 2025-05-29T15:24:08.685956Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710697:0, at schemeshard: 72057594046644480
: Error: Bulk upsert to table '/Root/Coerce__pgvarbit_17472595041006102391_11087201080355820517' Unable to coerce value for _pgvarbit: Error while coercing value, reason: yql/essentials/minikql/mkql_terminator.cpp:47: ERROR: bit string too long for type bit varying(2) 2025-05-29T15:24:08.699633Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710698:0, at schemeshard: 72057594046644480 1111 2025-05-29T15:24:08.768114Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710701:0, at schemeshard: 72057594046644480 {1111,1111} 2025-05-29T15:24:08.795752Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation ... 97 Node(10, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-29T15:24:31.183885Z node 10 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-05-29T15:24:31.241955Z node 10 :READ_TABLE_API WARN: rpc_read_table.cpp:116: ForgetAction occurred, send TEvPoisonPill --!syntax_pg DELETE FROM Pg17_b WHERE key = 'bytea 1'::bytea 2025-05-29T15:24:31.243281Z node 10 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [10:7509888922230297662:2338], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:24:31.243283Z node 10 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [10:7509888922230297670:2341], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:24:31.243298Z node 10 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:24:31.243838Z node 10 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715660:3, at schemeshard: 72057594046644480 2025-05-29T15:24:31.248071Z node 10 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [10:7509888922230297676:2342], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715660 completed, doublechecking } 2025-05-29T15:24:31.321497Z node 10 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [10:7509888922230297727:2385] txid# 281474976715661, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-29T15:24:31.336400Z node 10 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [10:7509888922230297743:2346], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:24:31.336540Z node 10 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=10&id=YjU0NDIxY2YtZjIyNzU0NWUtOGVlNTNlYjUtMjA3YWM5MmU=, ActorId: [10:7509888922230297657:2335], ActorState: ExecuteState, TraceId: 01jweaa64a0x48dmhfpavt81vj, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: assertion failed at ydb/core/kqp/ut/pg/kqp_pg_ut.cpp:3499, auto NKikimr::NKqp::NTestSuiteKqpPg::TTestCaseTableDeleteWhere::Execute_(NUnitTest::TTestContext &)::(anonymous class)::operator()(TPgTypeTestSpec) const [useSink = true]: (result.IsSuccess())
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 0. /-S/util/system/backtrace.cpp:284: ?? @ 0x15FBD15B 1. /tmp//-S/library/cpp/testing/unittest/registar.cpp:46: RaiseError @ 0x16174FC8 2. /tmp//-S/ydb/core/kqp/ut/pg/kqp_pg_ut.cpp:3499: operator() @ 0x15DF0926 3. /tmp//-S/ydb/core/kqp/ut/pg/kqp_pg_ut.cpp:3527: operator() @ 0x15DEF9E6 4. /tmp//-S/ydb/core/kqp/ut/pg/kqp_pg_ut.cpp:3531: Execute_ @ 0x15DEF3F8 5. /tmp//-S/ydb/core/kqp/ut/pg/kqp_pg_ut.cpp:198: operator() @ 0x15D71686 6. /tmp//-S/library/cpp/testing/unittest/registar.cpp:373: Run @ 0x16176E7D 7. /tmp//-S/ydb/core/kqp/ut/pg/kqp_pg_ut.cpp:198: Execute @ 0x15D70EE0 8. /tmp//-S/library/cpp/testing/unittest/registar.cpp:494: Execute @ 0x161775F2 9. /tmp//-S/library/cpp/testing/unittest/utmain.cpp:872: RunMain @ 0x1618919C 10. ??:0: ?? @ 0x7FC2924CFD8F 11. ??:0: ?? @ 0x7FC2924CFE3F 12. ??:0: ?? @ 0x14D5E028 Trying to start YDB, gRPC: 14630, MsgBus: 22035 2025-05-29T15:24:31.503556Z node 11 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[11:7509888922493233109:2070];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:24:31.503600Z node 11 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/0025c3/r3tmp/tmprp3Rsy/pdisk_1.dat 2025-05-29T15:24:31.517898Z node 11 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 14630, node 11 2025-05-29T15:24:31.532227Z node 11 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:24:31.532243Z node 11 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-05-29T15:24:31.532245Z node 11 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-05-29T15:24:31.532288Z node 11 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:22035 TClient is connected to server localhost:22035 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-29T15:24:31.603867Z node 11 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(11, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:24:31.603897Z node 11 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(11, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:24:31.605112Z node 11 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(11, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-29T15:24:31.606805Z node 11 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:24:31.787539Z node 11 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-05-29T15:24:31.796999Z node 11 :READ_TABLE_API WARN: rpc_read_table.cpp:116: ForgetAction occurred, send TEvPoisonPill --!syntax_pg DELETE FROM Pg17_b WHERE key = 'bytea 1'::bytea 2025-05-29T15:24:31.798228Z node 11 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [11:7509888922493233831:2341], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:24:31.798229Z node 11 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [11:7509888922493233822:2338], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:24:31.798237Z node 11 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:24:31.798692Z node 11 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715660:3, at schemeshard: 72057594046644480 2025-05-29T15:24:31.801165Z node 11 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [11:7509888922493233836:2342], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715660 completed, doublechecking } 2025-05-29T15:24:31.864644Z node 11 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [11:7509888922493233887:2385] txid# 281474976715661, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-29T15:24:31.935775Z node 11 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [11:7509888922493233903:2346], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:24:31.935879Z node 11 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=11&id=MzRjMzgzZGQtMzlkNDFhNjktZWFmYTg5MGEtNWI5ZWIwZTI=, ActorId: [11:7509888922493233817:2335], ActorState: ExecuteState, TraceId: 01jweaa6nn9s43m7kykj955syg, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: assertion failed at ydb/core/kqp/ut/pg/kqp_pg_ut.cpp:3499, auto NKikimr::NKqp::NTestSuiteKqpPg::TTestCaseTableDeleteWhere::Execute_(NUnitTest::TTestContext &)::(anonymous class)::operator()(TPgTypeTestSpec) const [useSink = false]: (result.IsSuccess())
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 0. /-S/util/system/backtrace.cpp:284: ?? @ 0x15FBD15B 1. /tmp//-S/library/cpp/testing/unittest/registar.cpp:46: RaiseError @ 0x16174FC8 2. /tmp//-S/ydb/core/kqp/ut/pg/kqp_pg_ut.cpp:3499: operator() @ 0x15DF2CE6 3. /tmp//-S/ydb/core/kqp/ut/pg/kqp_pg_ut.cpp:3527: operator() @ 0x15DF1DA6 4. /tmp//-S/ydb/core/kqp/ut/pg/kqp_pg_ut.cpp:3531: Execute_ @ 0x15DF17B8 5. /tmp//-S/ydb/core/kqp/ut/pg/kqp_pg_ut.cpp:198: operator() @ 0x15D71686 6. /tmp//-S/library/cpp/testing/unittest/registar.cpp:373: Run @ 0x16176E7D 7. /tmp//-S/ydb/core/kqp/ut/pg/kqp_pg_ut.cpp:198: Execute @ 0x15D70EE0 8. /tmp//-S/library/cpp/testing/unittest/registar.cpp:494: Execute @ 0x161775F2 9. /tmp//-S/library/cpp/testing/unittest/utmain.cpp:872: RunMain @ 0x1618919C 10. ??:0: ?? @ 0x7FC2924CFD8F 11. ??:0: ?? @ 0x7FC2924CFE3F 12. ??:0: ?? @ 0x14D5E028 >> ReadIteratorExternalBlobs::NotExtBlobs [FAIL] |65.6%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/schemeshard/ut_olap_reboots/ydb-core-tx-schemeshard-ut_olap_reboots |65.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_olap_reboots/ydb-core-tx-schemeshard-ut_olap_reboots |65.7%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_olap_reboots/ydb-core-tx-schemeshard-ut_olap_reboots |65.7%| [TA] $(B)/ydb/core/kqp/ut/pg/test-results/unittest/{meta.json ... results_accumulator.log} |65.7%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/blobstorage/vdisk/skeleton/ut/ydb-core-blobstorage-vdisk-skeleton-ut |65.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/blobstorage/vdisk/skeleton/ut/ydb-core-blobstorage-vdisk-skeleton-ut ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/metadata/secret/ut/unittest >> Secret::SimpleQueryService [FAIL] Test command err: 2025-05-29T15:22:53.398477Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:102:2148], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-05-29T15:22:53.398520Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-05-29T15:22:53.398535Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/0017a8/r3tmp/tmpjErBvT/pdisk_1.dat TServer::EnableGrpc on GrpcPort 9053, node 1 TClient is connected to server localhost:15232 2025-05-29T15:22:53.539560Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-05-29T15:22:53.554861Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:22:53.555511Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:22:53.555520Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-05-29T15:22:53.555523Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-05-29T15:22:53.555608Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-05-29T15:22:53.555663Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [1:32:2079] 1748532173011744 != 1748532173011748 2025-05-29T15:22:53.597306Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:22:53.597340Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:22:53.608131Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected snapshot->GetSecrets().size() incorrect: SECRETS:ACCESS: Initialization finished REQUEST=CREATE OBJECT secret1 (TYPE SECRET) WITH value = `100`;EXPECTATION=1;WAITING=1 2025-05-29T15:23:05.322697Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:806:2674], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:23:05.322733Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:821:2683], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:23:05.322767Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:23:05.323857Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715657:3, at schemeshard: 72057594046644480 2025-05-29T15:23:05.327348Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:824:2686], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715657 completed, doublechecking } 2025-05-29T15:23:05.347061Z node 1 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [1:875:2718] txid# 281474976715658, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-29T15:23:05.391289Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:1, at schemeshard: 72057594046644480 2025-05-29T15:23:05.644318Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [1:1026:2827], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:23:05.645141Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=1&id=NDNkMDNjNWYtZDRiZjg3YmQtY2IzNTU2MmYtZTRkMDQ0Yzg=, ActorId: [1:987:2795], ActorState: ExecuteState, TraceId: 01jwea7jgxcyqfwtrkfscy8gra, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: 01jwea7jgdbp9a6zsxck11c94m 2025-05-29T15:23:05.646306Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=request_actor.h:64;event=unexpected reply;error_message=operation { ready: true status: INTERNAL_ERROR issues { message: "Execution" issue_code: 1060 issues { message: "yql/essentials/ast/yql_expr.h:1874: index out of range" issue_code: 1 } } result { [type.googleapis.com/Ydb.Table.ExecuteQueryResult] { tx_meta { } } } } ;request=session_id: "ydb://session/3?node_id=1&id=NDNkMDNjNWYtZDRiZjg3YmQtY2IzNTU2MmYtZTRkMDQ0Yzg=" tx_control { tx_id: "01jwea7jgdbp9a6zsxck11c94m" commit_tx: true } query { yql_text: "DECLARE $objects AS List>;\nUPSERT INTO `//Root/.metadata/initialization/migrations`\nSELECT componentId,modificationId,instant FROM AS_TABLE($objects)\n" } parameters { key: "$objects" value { type { list_type { item { struct_type { members { name: "componentId" type { type_id: UTF8 } } members { name: "modificationId" type { type_id: UTF8 } } members { name: "instant" type { type_id: UINT32 } } } } } } value { items { items { text_value: "INITIALIZATION" } items { text_value: "create" } items { uint32_value: 10 } } } } } ; 2025-05-29T15:23:05.646530Z node 1 :METADATA_INITIALIZER ERROR: log.cpp:784: fline=accessor_init.cpp:81;event=OnAlteringProblem;error=cannot UPSERT objects:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 ; 2025-05-29T15:23:06.869216Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7301: Cannot get console configs 2025-05-29T15:23:06.869238Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded REQUEST=CREATE OBJECT secret1 (TYPE SECRET) WITH value = `100`;RESULT=
: Error: GRpc error: (1): Cancelled on the server side ;EXPECTATION=1 GRpc shutdown warning: left infly: 1, spent: 3.121259 sec GRpc shutdown warning: left infly: 1, spent: 6.415799 sec GRpc shutdown warning: left infly: 1, spent: 9.679686 sec GRpc shutdown warning: left infly: 1, spent: 12.915717 sec GRpc shutdown warning: left infly: 1, spent: 16.102806 sec GRpc shutdown warning: left infly: 1, spent: 19.277397 sec GRpc shutdown warning: left infly: 1, spent: 22.329482 sec GRpc shutdown warning: left infly: 1, spent: 25.35162 sec GRpc shutdown warning: left infly: 1, spent: 28.37399 sec GRpc shutdown warning: failed to shutdown all connections, left infly: 1, spent: 30.006089 sec assertion failed at ydb/core/testlib/common_helper.cpp:191, void NKikimr::Tests::NCommon::THelper::StartSchemaRequestQueryServiceImpl(const TString &, const bool, const bool) const: (*rrPtr) TBackTrace::Capture()+28 (0x137BDC8C) NUnitTest::NPrivate::RaiseError(char const*, TBasicString> const&, bool)+137 (0x13971019) NKikimr::Tests::NCommon::THelper::StartSchemaRequestQueryServiceImpl(TBasicString> const&, bool, bool) const+3541 (0x1F028F05) NKikimr::NTestSuiteSecret::SimpleImpl(bool)+1425 (0x136B2DE1) NKikimr::NTestSuiteSecret::TCurrentTest::Execute()::'lambda'()::operator()() const+71 (0x136BAD87) NUnitTest::TTestBase::Run(std::__y1::function, TBasicString> const&, char const*, bool)+126 (0x13972ECE) NKikimr::NTestSuiteSecret::TCurrentTest::Execute()+421 (0x136BA5E5) NUnitTest::TTestFactory::Execute()+803 (0x13973643) NUnitTest::RunMain(int, char**)+3021 (0x139851ED) ??+0 (0x7FE3F0E58D90) __libc_start_main+128 (0x7FE3F0E58E40) _start+41 (0x12810029) >> KqpBatchDelete::ManyPartitions_1 >> KqpBatchUpdate::Large_2 >> TCmsTenatsTest::RequestRestartServices [GOOD] >> KqpErrors::ProposeResultLost_RwTx-UseSink [FAIL] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/metadata/secret/ut/unittest >> Secret::ValidationQueryService [FAIL] Test command err: 2025-05-29T15:22:54.138204Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:102:2148], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-05-29T15:22:54.138251Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-05-29T15:22:54.138271Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/001760/r3tmp/tmpzrKJqN/pdisk_1.dat TServer::EnableGrpc on GrpcPort 22532, node 1 TClient is connected to server localhost:10888 2025-05-29T15:22:54.284226Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-05-29T15:22:54.300235Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:22:54.301127Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:22:54.301143Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-05-29T15:22:54.301148Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-05-29T15:22:54.301268Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-05-29T15:22:54.301353Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [1:32:2079] 1748532173700196 != 1748532173700200 2025-05-29T15:22:54.342719Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:22:54.342777Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:22:54.353363Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected Initialization finished REQUEST=CREATE OBJECT secret-1 (TYPE SECRET) WITH value = `100`;EXPECTATION=0;WAITING=1 2025-05-29T15:23:05.970174Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:748:2628], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:23:05.970205Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:761:2635], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:23:05.970214Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:23:05.971223Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715657:3, at schemeshard: 72057594046644480 2025-05-29T15:23:05.973795Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:764:2638], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715657 completed, doublechecking } 2025-05-29T15:23:05.990891Z node 1 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [1:815:2670] txid# 281474976715658, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-29T15:23:06.000649Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [1:826:2680], status: GENERIC_ERROR, issues:
:1:20: Error: mismatched input '-' expecting '(' 2025-05-29T15:23:06.001284Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=1&id=YWE2Yzg0OTctYjM3ZWRlMWItMzM1YjIzM2EtODYwZWM1MzU=, ActorId: [1:744:2624], ActorState: ExecuteState, TraceId: 01jwea7jvf4s0qy8krva6ky4js, ReplyQueryCompileError, status GENERIC_ERROR remove tx with tx_id: REQUEST=CREATE OBJECT secret-1 (TYPE SECRET) WITH value = `100`;RESULT=
:1:20: Error: mismatched input '-' expecting '(' ;EXPECTATION=0 FINISHED_REQUEST=CREATE OBJECT secret-1 (TYPE SECRET) WITH value = `100`;EXPECTATION=0;WAITING=1 REQUEST=ALTER OBJECT secret1 (TYPE SECRET) SET value = `abcde`;EXPECTATION=0;WAITING=1 2025-05-29T15:23:16.198599Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:1, at schemeshard: 72057594046644480 2025-05-29T15:23:16.365177Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [1:983:2794], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:23:16.365952Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=1&id=NzM2MjQ1YmMtN2E4OWI4YTEtMWU2N2U0MTAtYjkzZTMxNjg=, ActorId: [1:948:2766], ActorState: ExecuteState, TraceId: 01jwea7wzx5btgsxtckn40wb0v, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: 01jwea7wz367wv6zg3gdf2fggx 2025-05-29T15:23:16.367266Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=request_actor.h:64;event=unexpected reply;error_message=operation { ready: true status: INTERNAL_ERROR issues { message: "Execution" issue_code: 1060 issues { message: "yql/essentials/ast/yql_expr.h:1874: index out of range" issue_code: 1 } } result { [type.googleapis.com/Ydb.Table.ExecuteQueryResult] { tx_meta { } } } } ;request=session_id: "ydb://session/3?node_id=1&id=NzM2MjQ1YmMtN2E4OWI4YTEtMWU2N2U0MTAtYjkzZTMxNjg=" tx_control { tx_id: "01jwea7wz367wv6zg3gdf2fggx" commit_tx: true } query { yql_text: "DECLARE $objects AS List>;\nUPSERT INTO `//Root/.metadata/initialization/migrations`\nSELECT componentId,modificationId,instant FROM AS_TABLE($objects)\n" } parameters { key: "$objects" value { type { list_type { item { struct_type { members { name: "componentId" type { type_id: UTF8 } } members { name: "modificationId" type { type_id: UTF8 } } members { name: "instant" type { type_id: UINT32 } } } } } } value { items { items { text_value: "INITIALIZATION" } items { text_value: "create" } items { uint32_value: 11 } } } } } ; 2025-05-29T15:23:16.367500Z node 1 :METADATA_INITIALIZER ERROR: log.cpp:784: fline=accessor_init.cpp:81;event=OnAlteringProblem;error=cannot UPSERT objects:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 ; 2025-05-29T15:23:17.424066Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7301: Cannot get console configs 2025-05-29T15:23:17.424088Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded REQUEST=ALTER OBJECT secret1 (TYPE SECRET) SET value = `abcde`;RESULT=
: Error: GRpc error: (1): Cancelled on the server side ;EXPECTATION=0 GRpc shutdown warning: left infly: 1, spent: 3.132829 sec GRpc shutdown warning: left infly: 1, spent: 6.327693 sec GRpc shutdown warning: left infly: 1, spent: 9.48209 sec GRpc shutdown warning: left infly: 1, spent: 12.513027 sec GRpc shutdown warning: left infly: 1, spent: 15.535049 sec GRpc shutdown warning: left infly: 1, spent: 18.557574 sec GRpc shutdown warning: left infly: 1, spent: 21.579539 sec GRpc shutdown warning: left infly: 1, spent: 24.603118 sec GRpc shutdown warning: left infly: 1, spent: 27.625168 sec GRpc shutdown warning: failed to shutdown all connections, left infly: 1, spent: 30.004341 sec assertion failed at ydb/core/testlib/common_helper.cpp:191, void NKikimr::Tests::NCommon::THelper::StartSchemaRequestQueryServiceImpl(const TString &, const bool, const bool) const: (*rrPtr) TBackTrace::Capture()+28 (0x137BDC8C) NUnitTest::NPrivate::RaiseError(char const*, TBasicString> const&, bool)+137 (0x13971019) NKikimr::Tests::NCommon::THelper::StartSchemaRequestQueryServiceImpl(TBasicString> const&, bool, bool) const+3541 (0x1F028F05) NKikimr::NTestSuiteSecret::ValidationImpl(bool)+1270 (0x136B6226) NKikimr::NTestSuiteSecret::TCurrentTest::Execute()::'lambda'()::operator()() const+71 (0x136BAD87) NUnitTest::TTestBase::Run(std::__y1::function, TBasicString> const&, char const*, bool)+126 (0x13972ECE) NKikimr::NTestSuiteSecret::TCurrentTest::Execute()+421 (0x136BA5E5) NUnitTest::TTestFactory::Execute()+803 (0x13973643) NUnitTest::RunMain(int, char**)+3021 (0x139851ED) ??+0 (0x7F80E13ADD90) __libc_start_main+128 (0x7F80E13ADE40) _start+41 (0x12810029) ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/metadata/secret/ut/unittest >> Secret::Validation [FAIL] Test command err: 2025-05-29T15:22:54.777153Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:102:2148], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-05-29T15:22:54.777181Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-05-29T15:22:54.777191Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/001720/r3tmp/tmpSc0kSd/pdisk_1.dat TServer::EnableGrpc on GrpcPort 4296, node 1 TClient is connected to server localhost:2157 2025-05-29T15:22:54.890522Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-05-29T15:22:54.906061Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:22:54.906968Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:22:54.906984Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-05-29T15:22:54.906988Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-05-29T15:22:54.907105Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-05-29T15:22:54.907181Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [1:32:2079] 1748532174414356 != 1748532174414360 2025-05-29T15:22:54.948902Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:22:54.948941Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:22:54.959498Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected Initialization finished REQUEST=CREATE OBJECT secret-1 (TYPE SECRET) WITH value = `100`;EXPECTATION=0;WAITING=1 2025-05-29T15:23:06.595095Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:752:2631], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:23:06.595127Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } REQUEST=CREATE OBJECT secret-1 (TYPE SECRET) WITH value = `100`;RESULT=
:1:20: Error: mismatched input '-' expecting '(' ;EXPECTATION=0 FINISHED_REQUEST=CREATE OBJECT secret-1 (TYPE SECRET) WITH value = `100`;EXPECTATION=0;WAITING=1 REQUEST=ALTER OBJECT secret1 (TYPE SECRET) SET value = `abcde`;EXPECTATION=0;WAITING=1 2025-05-29T15:23:16.758329Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:781:2649], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:23:16.758415Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:23:16.760077Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:2, at schemeshard: 72057594046644480 2025-05-29T15:23:16.821914Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:892:2726], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:23:16.821960Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:23:16.822004Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:897:2731], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:23:16.822968Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:2, at schemeshard: 72057594046644480 2025-05-29T15:23:16.897613Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:899:2733], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-05-29T15:23:16.983860Z node 1 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [1:992:2797] txid# 281474976715661, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 7], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-29T15:23:17.050110Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [1:1024:2821], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:23:17.050532Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=1&id=YjQyODNiMjUtYzMyMjE3NzUtOGMyYzFlZTgtMTYxNWJhY2M=, ActorId: [1:887:2721], ActorState: ExecuteState, TraceId: 01jwea7xnecppaq27g4zv4r0hs, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: 01jwea7xn28ef7jd5vtxpjzepf 2025-05-29T15:23:17.051363Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=request_actor.h:64;event=unexpected reply;error_message=operation { ready: true status: INTERNAL_ERROR issues { message: "Execution" issue_code: 1060 issues { message: "yql/essentials/ast/yql_expr.h:1874: index out of range" issue_code: 1 } } result { [type.googleapis.com/Ydb.Table.ExecuteQueryResult] { tx_meta { } } } } ;request=session_id: "ydb://session/3?node_id=1&id=YjQyODNiMjUtYzMyMjE3NzUtOGMyYzFlZTgtMTYxNWJhY2M=" tx_control { tx_id: "01jwea7xn28ef7jd5vtxpjzepf" commit_tx: true } query { yql_text: "DECLARE $objects AS List>;\nUPSERT INTO `//Root/.metadata/initialization/migrations`\nSELECT componentId,modificationId,instant FROM AS_TABLE($objects)\n" } parameters { key: "$objects" value { type { list_type { item { struct_type { members { name: "componentId" type { type_id: UTF8 } } members { name: "modificationId" type { type_id: UTF8 } } members { name: "instant" type { type_id: UINT32 } } } } } } value { items { items { text_value: "INITIALIZATION" } items { text_value: "create" } items { uint32_value: 11 } } } } } ; 2025-05-29T15:23:17.051512Z node 1 :METADATA_INITIALIZER ERROR: log.cpp:784: fline=accessor_init.cpp:81;event=OnAlteringProblem;error=cannot UPSERT objects:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 ; 2025-05-29T15:23:17.904244Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7301: Cannot get console configs 2025-05-29T15:23:17.904271Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded REQUEST=ALTER OBJECT secret1 (TYPE SECRET) SET value = `abcde`;RESULT=
: Error: GRpc error: (1): Cancelled on the server side
: Error: Grpc error response on endpoint localhost:4296 ;EXPECTATION=0 GRpc shutdown warning: left infly: 1, spent: 3.141772 sec GRpc shutdown warning: left infly: 1, spent: 6.379588 sec GRpc shutdown warning: left infly: 1, spent: 9.479972 sec GRpc shutdown warning: left infly: 1, spent: 12.510436 sec GRpc shutdown warning: left infly: 1, spent: 15.533775 sec GRpc shutdown warning: left infly: 1, spent: 18.556306 sec GRpc shutdown warning: left infly: 1, spent: 21.578527 sec GRpc shutdown warning: left infly: 1, spent: 24.602321 sec GRpc shutdown warning: left infly: 1, spent: 27.623507 sec GRpc shutdown warning: failed to shutdown all connections, left infly: 1, spent: 30.002947 sec assertion failed at ydb/core/testlib/common_helper.cpp:167, void NKikimr::Tests::NCommon::THelper::StartSchemaRequestTableServiceImpl(const TString &, const bool, const bool) const: (*rrPtr) TBackTrace::Capture()+28 (0x137BDC8C) NUnitTest::NPrivate::RaiseError(char const*, TBasicString> const&, bool)+137 (0x13971019) NKikimr::Tests::NCommon::THelper::StartSchemaRequestTableServiceImpl(TBasicString> const&, bool, bool) const+2592 (0x1F027D50) NKikimr::NTestSuiteSecret::ValidationImpl(bool)+1270 (0x136B6226) NKikimr::NTestSuiteSecret::TCurrentTest::Execute()::'lambda'()::operator()() const+71 (0x136BAD87) NUnitTest::TTestBase::Run(std::__y1::function, TBasicString> const&, char const*, bool)+126 (0x13972ECE) NKikimr::NTestSuiteSecret::TCurrentTest::Execute()+421 (0x136BA5E5) NUnitTest::TTestFactory::Execute()+803 (0x13973643) NUnitTest::RunMain(int, char**)+3021 (0x139851ED) ??+0 (0x7F5507F18D90) __libc_start_main+128 (0x7F5507F18E40) _start+41 (0x12810029) |65.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/batch_operations/unittest |65.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/batch_operations/unittest >> TConsoleConfigSubscriptionTests::TestNotificationForTimeoutedNotificationResponse [GOOD] >> TConsoleConfigSubscriptionTests::TestNotificationForRestartedServer >> DataShardSnapshots::UncommittedChangesRenameTable+UseSink [FAIL] >> DataShardSnapshots::ShardRestartWholeShardLockBasic >> KqpErrors::ProposeErrorEvWrite [FAIL] |65.7%| [TA] {RESULT} $(B)/ydb/core/kqp/ut/pg/test-results/unittest/{meta.json ... results_accumulator.log} |65.7%| [LD] {RESULT} $(B)/ydb/core/blobstorage/vdisk/skeleton/ut/ydb-core-blobstorage-vdisk-skeleton-ut ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/batch_operations/unittest >> KqpBatchDelete::SimplePartitions Test command err: Trying to start YDB, gRPC: 9883, MsgBus: 28777 2025-05-29T15:24:30.797710Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509888919671310450:2109];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:24:30.797890Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/0027a2/r3tmp/tmp4R7wxG/pdisk_1.dat 2025-05-29T15:24:30.871361Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:24:30.872943Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [1:7509888919671310379:2079] 1748532270797049 != 1748532270797052 TServer::EnableGrpc on GrpcPort 9883, node 1 2025-05-29T15:24:30.892242Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:24:30.892258Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-05-29T15:24:30.892259Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-05-29T15:24:30.892304Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:28777 2025-05-29T15:24:30.935906Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:24:30.935935Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:24:30.936968Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:28777 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-29T15:24:30.957941Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:24:30.962342Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:24:31.024758Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-05-29T15:24:31.085413Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-05-29T15:24:31.097786Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:24:31.179734Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509888923966279328:2403], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:24:31.179757Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:24:31.212151Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-05-29T15:24:31.217885Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-05-29T15:24:31.228253Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-05-29T15:24:31.242149Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-05-29T15:24:31.249077Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-05-29T15:24:31.263262Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-05-29T15:24:31.270501Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-05-29T15:24:31.286137Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509888923966279980:2466], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:24:31.286157Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:24:31.286166Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509888923966279985:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:24:31.286914Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710669:3, at schemeshard: 72057594046644480 2025-05-29T15:24:31.290496Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7509888923966279987:2470], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710669 completed, doublechecking } 2025-05-29T15:24:31.378298Z node 1 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [1:7509888923966280038:3398] txid# 281474976710670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-29T15:24:31.481741Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [1:7509888923966280054:2474], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:24:31.481835Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=1&id=ODVhNjQ4MWItNGFiN2RlMDctZWQzNTI3NzctZjFkMzU4NDY=, ActorId: [1:7509888923966279310:2401], ActorState: ExecuteState, TraceId: 01jweaa65n0gd8bymr29xfn5x0, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: VERIFY failed (2025-05-29T15:24:31.482349Z): assertion failed in non-unittest thread with message: assertion failed at ydb/core/kqp/ut/common/kqp_ut_common.h:375, void NKikimr::NKqp::AssertSuccessResult(const NYdb::TStatus &): (result.IsSuccess())
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 library/cpp/testing/unittest/registar.cpp:36 RaiseError(): requirement UnittestThread failed 0. /-S/util/system/yassert.cpp:83: InternalPanicImpl @ 0x13A9DC85 1. /-S/util/system/yassert.cpp:55: Panic @ 0x13A94C86 2. /tmp//-S/library/cpp/testing/unittest/registar.cpp:36: RaiseError @ 0x13C36A16 3. /-S/ydb/core/kqp/ut/common/kqp_ut_common.h:375: AssertSuccessResult @ 0x260D85F2 4. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:365: CreateSampleTables @ 0x260D7EF2 5. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:581: operator() @ 0x260F9ACC 6. /-S/library/cpp/threading/future/core/future-inl.h:493: SetValue @ 0x260F9ACC 7. /-S/library/cpp/threading/future/async.h:24: operator() @ 0x260F9ACC 8. /-S/util/thread/pool.h:71: Process @ 0x260F9ACC 9. /-S/util/thread/pool.cpp:418: DoExecute @ 0x13AA5609 10. /-S/util/thread/factory.h:15: Execute @ 0x13AA3FF9 11. /-S/util/thread/factory.cpp:36: ThreadProc @ 0x13AA3FF9 12. /-S/util/system/thread.cpp:244: ThreadProxy @ 0x13A9F46C 13. ??:0: ?? @ 0x7F361A82EAC2 14. ??:0: ?? @ 0x7F361A8C084F >> DataShardSnapshots::LockedWriteReuseAfterCommit+UseSink [FAIL] >> DataShardSnapshots::LockedWriteReuseAfterCommit-UseSink >> DataShardSnapshots::LockedWriteBulkUpsertConflict+UseSink [FAIL] >> DataShardSnapshots::LockedWriteBulkUpsertConflict-UseSink >> TConsoleTests::TestTenantConfigConsistency [GOOD] >> TConsoleTests::TestSetConfig ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/batch_operations/unittest >> KqpBatchUpdate::Returning Test command err: Trying to start YDB, gRPC: 2399, MsgBus: 13787 2025-05-29T15:24:30.912225Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509888916231003176:2067];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:24:30.912477Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/00287e/r3tmp/tmpIdVGcd/pdisk_1.dat 2025-05-29T15:24:30.975619Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:24:30.975706Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [1:7509888916231003150:2079] 1748532270911573 != 1748532270911576 TServer::EnableGrpc on GrpcPort 2399, node 1 2025-05-29T15:24:30.986565Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:24:30.986583Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-05-29T15:24:30.986585Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-05-29T15:24:30.986634Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:13787 TClient is connected to server localhost:13787 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-29T15:24:31.047018Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:24:31.047049Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:24:31.048078Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-29T15:24:31.048933Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:24:31.053397Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:24:31.124910Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:24:31.146285Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:24:31.158407Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:24:31.311897Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509888920525972079:2403], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:24:31.311927Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:24:31.344005Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-05-29T15:24:31.398846Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-05-29T15:24:31.410417Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-05-29T15:24:31.424750Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-05-29T15:24:31.438971Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-05-29T15:24:31.452287Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-05-29T15:24:31.466698Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-05-29T15:24:31.482101Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509888920525972734:2466], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:24:31.482118Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:24:31.482124Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509888920525972739:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:24:31.482761Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715669:3, at schemeshard: 72057594046644480 2025-05-29T15:24:31.486556Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7509888920525972741:2470], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715669 completed, doublechecking } 2025-05-29T15:24:31.546293Z node 1 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [1:7509888920525972792:3396] txid# 281474976715670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-29T15:24:31.624772Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [1:7509888920525972808:2474], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:24:31.624862Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=1&id=NDQ3NDgxZWYtNmY4N2UwYmUtZTU2YTBmODgtNzM2OWY0YTU=, ActorId: [1:7509888920525972076:2401], ActorState: ExecuteState, TraceId: 01jweaa6bs9k961a1pdt5zmp9f, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: VERIFY failed (2025-05-29T15:24:31.625521Z): assertion failed in non-unittest thread with message: assertion failed at ydb/core/kqp/ut/common/kqp_ut_common.h:375, void NKikimr::NKqp::AssertSuccessResult(const NYdb::TStatus &): (result.IsSuccess())
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 library/cpp/testing/unittest/registar.cpp:36 RaiseError(): requirement UnittestThread failed 0. /-S/util/system/yassert.cpp:83: InternalPanicImpl @ 0x13A9DC85 1. /-S/util/system/yassert.cpp:55: Panic @ 0x13A94C86 2. /tmp//-S/library/cpp/testing/unittest/registar.cpp:36: RaiseError @ 0x13C36A16 3. /-S/ydb/core/kqp/ut/common/kqp_ut_common.h:375: AssertSuccessResult @ 0x260D85F2 4. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:365: CreateSampleTables @ 0x260D7EF2 5. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:581: operator() @ 0x260F9ACC 6. /-S/library/cpp/threading/future/core/future-inl.h:493: SetValue @ 0x260F9ACC 7. /-S/library/cpp/threading/future/async.h:24: operator() @ 0x260F9ACC 8. /-S/util/thread/pool.h:71: Process @ 0x260F9ACC 9. /-S/util/thread/pool.cpp:418: DoExecute @ 0x13AA5609 10. /-S/util/thread/factory.h:15: Execute @ 0x13AA3FF9 11. /-S/util/thread/factory.cpp:36: ThreadProc @ 0x13AA3FF9 12. /-S/util/system/thread.cpp:244: ThreadProxy @ 0x13A9F46C 13. ??:0: ?? @ 0x7F1388C7AAC2 14. ??:0: ?? @ 0x7F1388D0C84F ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/batch_operations/unittest >> KqpBatchUpdate::ManyPartitions_1 Test command err: Trying to start YDB, gRPC: 9180, MsgBus: 12280 2025-05-29T15:24:31.035050Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509888921216751247:2065];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:24:31.035074Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/00284a/r3tmp/tmpphgTd3/pdisk_1.dat 2025-05-29T15:24:31.087225Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [1:7509888921216751223:2079] 1748532271034806 != 1748532271034809 2025-05-29T15:24:31.090434Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 9180, node 1 2025-05-29T15:24:31.102434Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:24:31.102450Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-05-29T15:24:31.102452Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-05-29T15:24:31.102520Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:12280 2025-05-29T15:24:31.137602Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:24:31.137635Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:24:31.138675Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:12280 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-29T15:24:31.172067Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:24:31.180248Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:24:31.194823Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:24:31.210782Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:24:31.221953Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:24:31.360994Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509888921216752855:2403], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:24:31.361017Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:24:31.402842Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-05-29T15:24:31.413925Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-05-29T15:24:31.425045Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-05-29T15:24:31.439077Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-05-29T15:24:31.453065Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-05-29T15:24:31.466849Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-05-29T15:24:31.481011Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-05-29T15:24:31.497226Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509888921216753508:2466], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:24:31.497255Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:24:31.497264Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509888921216753513:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:24:31.498042Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715669:3, at schemeshard: 72057594046644480 2025-05-29T15:24:31.500404Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7509888921216753515:2470], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715669 completed, doublechecking } 2025-05-29T15:24:31.590170Z node 1 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [1:7509888921216753566:3396] txid# 281474976715670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-29T15:24:31.666768Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [1:7509888921216753582:2474], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:24:31.666902Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=1&id=ZjIxM2Y1YWQtN2ViNjdjZWItOTU1ZWMyY2QtODllNzYzY2I=, ActorId: [1:7509888921216752837:2401], ActorState: ExecuteState, TraceId: 01jweaa6c809gvwy3qv2x22cae, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: VERIFY failed (2025-05-29T15:24:31.667540Z): assertion failed in non-unittest thread with message: assertion failed at ydb/core/kqp/ut/common/kqp_ut_common.h:375, void NKikimr::NKqp::AssertSuccessResult(const NYdb::TStatus &): (result.IsSuccess())
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 library/cpp/testing/unittest/registar.cpp:36 RaiseError(): requirement UnittestThread failed 0. /-S/util/system/yassert.cpp:83: InternalPanicImpl @ 0x13A9DC85 1. /-S/util/system/yassert.cpp:55: Panic @ 0x13A94C86 2. /tmp//-S/library/cpp/testing/unittest/registar.cpp:36: RaiseError @ 0x13C36A16 3. /-S/ydb/core/kqp/ut/common/kqp_ut_common.h:375: AssertSuccessResult @ 0x260D85F2 4. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:365: CreateSampleTables @ 0x260D7EF2 5. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:581: operator() @ 0x260F9ACC 6. /-S/library/cpp/threading/future/core/future-inl.h:493: SetValue @ 0x260F9ACC 7. /-S/library/cpp/threading/future/async.h:24: operator() @ 0x260F9ACC 8. /-S/util/thread/pool.h:71: Process @ 0x260F9ACC 9. /-S/util/thread/pool.cpp:418: DoExecute @ 0x13AA5609 10. /-S/util/thread/factory.h:15: Execute @ 0x13AA3FF9 11. /-S/util/thread/factory.cpp:36: ThreadProc @ 0x13AA3FF9 12. /-S/util/system/thread.cpp:244: ThreadProxy @ 0x13A9F46C 13. ??:0: ?? @ 0x7F280E848AC2 14. ??:0: ?? @ 0x7F280E8DA84F >> DataShardSnapshots::MvccSnapshotAndSplit [FAIL] >> DataShardSnapshots::MvccSnapshotLockedWrites+UseSink ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/cms/ut/unittest >> TCmsTenatsTest::RequestRestartServices [GOOD] Test command err: 2025-05-29T15:24:30.111831Z node 1 :CMS DEBUG: console__init_scheme.cpp:14: TConsole::TTxInitScheme Execute 2025-05-29T15:24:30.112966Z node 1 :CMS DEBUG: cms_impl.h:185: StateInit event type: 10060000 event: NKikimr::TEvTablet::TEvBoot 2025-05-29T15:24:30.116430Z node 1 :CMS DEBUG: cms_impl.h:185: StateInit event type: 10060001 event: NKikimr::TEvTablet::TEvRestored 2025-05-29T15:24:30.116559Z node 1 :CMS DEBUG: cms_tx_init_scheme.cpp:16: TTxInitScheme Execute 2025-05-29T15:24:30.116965Z node 1 :CMS DEBUG: cms_impl.h:185: StateInit event type: 1006000c event: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-05-29T15:24:30.117103Z node 1 :CMS DEBUG: console__init_scheme.cpp:23: TConsole::TTxInitScheme Complete 2025-05-29T15:24:30.117135Z node 1 :CMS DEBUG: cms_impl.h:185: StateInit event type: 104d0001 event: NKikimr::NConsole::TEvConfigsDispatcher::TEvSetConfigSubscriptionResponse 2025-05-29T15:24:30.117186Z node 1 :CMS DEBUG: console__load_state.cpp:28: TConsole::TTxLoadState Execute 2025-05-29T15:24:30.117231Z node 1 :CMS DEBUG: console__load_state.cpp:50: Using default config. 2025-05-29T15:24:30.117338Z node 1 :CMS DEBUG: console__load_state.cpp:66: TConsole::TTxLoadState Complete 2025-05-29T15:24:30.120023Z node 1 :CMS DEBUG: cms_tx_init_scheme.cpp:24: TTxInitScheme Complete 2025-05-29T15:24:30.120061Z node 1 :CMS DEBUG: cms_tx_load_state.cpp:33: TTxLoadState Execute 2025-05-29T15:24:30.120104Z node 1 :CMS DEBUG: cms_tx_load_state.cpp:76: Using default config 2025-05-29T15:24:30.120151Z node 1 :CMS DEBUG: cms.cpp:1147: Running CleanupWalleTasks 2025-05-29T15:24:30.157327Z node 1 :CMS DEBUG: cms_impl.h:185: StateInit event type: 104a0012 event: NKikimr::NConsole::TEvConsole::TEvConfigNotificationRequest { Config { FeatureFlags { EnableCMSRequestPriorities: true EnableSingleCompositeActionGroup: true } } ItemKinds: 25 ItemKinds: 26 Local: true } 2025-05-29T15:24:30.191392Z node 1 :CMS DEBUG: cms_tx_load_state.cpp:256: TTxLoadState Complete 2025-05-29T15:24:30.191531Z node 1 :CMS DEBUG: cms_tx_update_config.cpp:23: TTxUpdateConfig Execute 2025-05-29T15:24:30.193327Z node 1 :CMS DEBUG: cms_tx_update_config.cpp:37: TTxUpdateConfig Complete 2025-05-29T15:24:30.193452Z node 1 :CMS DEBUG: cms_tx_update_config.cpp:23: TTxUpdateConfig Execute 2025-05-29T15:24:30.193583Z node 1 :CMS DEBUG: sentinel.cpp:939: [Sentinel] [Main] UpdateConfig 2025-05-29T15:24:30.193592Z node 1 :CMS DEBUG: sentinel.cpp:884: [Sentinel] [Main] Start ConfigUpdater 2025-05-29T15:24:30.193603Z node 1 :CMS DEBUG: sentinel.cpp:955: [Sentinel] [Main] UpdateState 2025-05-29T15:24:30.193609Z node 1 :CMS INFO: sentinel.cpp:879: [Sentinel] [Main] StateUpdater was delayed 2025-05-29T15:24:30.193620Z node 1 :CMS DEBUG: sentinel.cpp:464: [Sentinel] [ConfigUpdater] Request blobstorage config: attempt# 0 2025-05-29T15:24:30.193660Z node 1 :CMS DEBUG: sentinel.cpp:477: [Sentinel] [ConfigUpdater] Request CMS cluster state: attempt# 0 2025-05-29T15:24:30.196245Z node 1 :CMS DEBUG: sentinel.cpp:530: [Sentinel] [ConfigUpdater] Handle TEvBlobStorage::TEvControllerConfigResponse: response# Status { Success: true BaseConfig { PDisk { NodeId: 1 PDiskId: 1 Path: "/1/pdisk-1.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 2 PDiskId: 2 Path: "/2/pdisk-2.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 3 PDiskId: 3 Path: "/3/pdisk-3.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 4 PDiskId: 4 Path: "/4/pdisk-4.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 5 PDiskId: 5 Path: "/5/pdisk-5.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 6 PDiskId: 6 Path: "/6/pdisk-6.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 7 PDiskId: 7 Path: "/7/pdisk-7.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 8 PDiskId: 8 Path: "/8/pdisk-8.data" Guid: 1 DriveStatus: ACTIVE } VSlot { VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 1000 } GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 1001 } GroupId: 1 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 1002 } GroupId: 2 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 1003 } GroupId: 3 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 2 VSlotId: 1000 } GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 2 VSlotId: 1001 } GroupId: 1 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 2 VSlotId: 1002 } GroupId: 2 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 2 VSlotId: 1003 } GroupId: 3 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 3 PDiskId: 3 VSlotId: 1000 } GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 3 VSlotId: 1001 } GroupId: 1 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 3 VSlotId: 1002 } GroupId: 2 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 3 VSlotId: 1003 } GroupId: 3 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 4 PDiskId: 4 VSlotId: 1000 } GroupGeneration: 1 FailDomainIdx: 3 } VSlot { VSlotId { NodeId: 4 PDiskId: 4 VSlotId: 1001 } GroupId: 1 GroupGeneration: 1 FailDomainIdx: 3 } VSlot { VSlotId { NodeId: 4 PDiskId: 4 VSlotId: 1002 } GroupId: 2 GroupGeneration: 1 FailDomainIdx: 3 } VSlot { VSlotId { NodeId: 4 PDiskId: 4 VSlotId: 1003 } GroupId: 3 GroupGeneration: 1 FailDomainIdx: 3 } VSlot { VSlotId { NodeId: 5 PDiskId: 5 VSlotId: 1000 } GroupGeneration: 1 FailDomainIdx: 4 } VSlot { VSlotId { NodeId: 5 PDiskId: 5 VSlotId: 1001 } GroupId: 1 GroupGeneration: 1 FailDomainIdx: 4 } VSlot { VSlotId { NodeId: 5 PDiskId: 5 VSlotId: 1002 } GroupId: 2 GroupGeneration: 1 FailDomainIdx: 4 } VSlot { VSlotId { NodeId: 5 PDiskId: 5 VSlotId: 1003 } GroupId: 3 GroupGeneration: 1 FailDomainIdx: 4 } VSlot { VSlotId { NodeId: 6 PDiskId: 6 VSlotId: 1000 } GroupGeneration: 1 FailDomainIdx: 5 } VSlot { VSlotId { NodeId: 6 PDiskId: 6 VSlotId: 1001 } GroupId: 1 GroupGeneration: 1 FailDomainIdx: 5 } VSlot { VSlotId { NodeId: 6 PDiskId: 6 VSlotId: 1002 } GroupId: 2 GroupGeneration: 1 FailDomainIdx: 5 } VSlot { VSlotId { NodeId: 6 PDiskId: 6 VSlotId: 1003 } GroupId: 3 GroupGeneration: 1 FailDomainIdx: 5 } VSlot { VSlotId { NodeId: 7 PDiskId: 7 VSlotId: 1000 } GroupGeneration: 1 FailDomainIdx: 6 } VSlot { VSlotId { NodeId: 7 PDiskId: 7 VSlotId: 1001 } GroupId: 1 GroupGeneration: 1 FailDomainIdx: 6 } VSlot { VSlotId { NodeId: 7 PDiskId: 7 VSlotId: 1002 } GroupId: 2 GroupGeneration: 1 FailDomainIdx: 6 } VSlot { VSlotId { NodeId: 7 PDiskId: 7 VSlotId: 1003 } GroupId: 3 GroupGeneration: 1 FailDomainIdx: 6 } VSlot { VSlotId { NodeId: 8 PDiskId: 8 VSlotId: 1000 } GroupGeneration: 1 FailDomainIdx: 7 } VSlot { VSlotId { NodeId: 8 PDiskId: 8 VSlotId: 1001 } GroupId: 1 GroupGeneration: 1 FailDomainIdx: 7 } VSlot { VSlotId { NodeId: 8 PDiskId: 8 VSlotId: 1002 } GroupId: 2 GroupGeneration: 1 FailDomainIdx: 7 } VSlot { VSlotId { NodeId: 8 PDiskId: 8 VSlotId: 1003 } GroupId: 3 GroupGeneration: 1 FailDomainIdx: 7 } Group { GroupGeneration: 1 ErasureSpecies: "block-4-2" VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 1000 } VSlotId { NodeId: 2 PDiskId: 2 VSlotId: 1000 } VSlotId { NodeId: 3 PDiskId: 3 VSlotId: 1000 } VSlotId { NodeId: 4 PDiskId: 4 VSlotId: 1000 } VSlotId { NodeId: 5 PDiskId: 5 VSlotId: 1000 } VSlotId { NodeId: 6 PDiskId: 6 VSlotId: 1000 } VSlotId { NodeId: 7 PDiskId: 7 VSlotId: 1000 } VSlotId { NodeId: 8 PDiskId: 8 VSlotId: 1000 } } Group { GroupId: 1 GroupGeneration: 1 ErasureSpecies: "block-4-2" VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 1001 } VSlotId { NodeId: 2 PDiskId: 2 VSlotId: 1001 } VSlotId { NodeId: 3 PDiskId: 3 VSlotId: 1001 } VSlotId { NodeId: 4 PDiskId: 4 VSlotId: 1001 } VSlotId { NodeId: 5 PDiskId: 5 VSlotId: 1001 } VSlotId { NodeId: 6 PDiskId: 6 VSlotId: 1001 } VSlotId { NodeId: 7 PDiskId: 7 VSlotId: 1001 } VSlotId { NodeId: 8 PDiskId: 8 VSlotId: 1001 } } Group { GroupId: 2 GroupGeneration: 1 ErasureSpecies: "block-4-2" VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 1002 } VSlotId { NodeId: 2 PDiskId: 2 VSlotId: 1002 } VSlotId { NodeId: 3 PDiskId: 3 VSlotId: 1002 } VSlotId { NodeId: 4 PDiskId: 4 VSlotId: 1002 } VSlotId { NodeId: 5 PDiskId: 5 VSlotId: 1002 } VSlotId { NodeId: 6 PDiskId: 6 VSlotId: 1002 } VSlotId { NodeId: 7 PDiskId: 7 VSlotId: 1002 } VSlotId { NodeId: 8 PDiskId: 8 VSlotId: 1002 } } Group { GroupId: 3 GroupGeneration: 1 ErasureSpecies: "block-4-2" VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 1003 } VSlotId { NodeId: 2 PDiskId: 2 VSlotId: 1003 } VSlotId { NodeId: 3 PDiskId: 3 VSlotId: 1003 } VSlotId { NodeId: 4 PDiskId: 4 VSlotId: 1003 } VSlotId { NodeId: 5 PDiskId: 5 VSlotId: 1003 } VSlotId { NodeId: 6 PDiskId: 6 VSlotId: 1003 } VSlotId { NodeId: 7 PDiskId: 7 VSlotId: 1003 } VSlotId { NodeId: 8 PDiskId: 8 VSlotId: 1003 } } } } Success: true 2025-05-29T15:24:30.249577Z node 1 :CMS DEBUG: cms_tx_update_config.cpp:37: TTxUpdateConfig Complete 2025-05-29T15:24:30.249657Z node 1 :CMS DEBUG: cms_tx_update_config.cpp:44: Updated config: TenantLimits { DisabledNodesRatioLimit: 0 } ClusterLimits { DisabledNodesRatioLimit: 0 } SentinelConfig { Enable: false } 2025-05-29T15:24:31.831419Z node 9 :CMS DEBUG: console__init_scheme.cpp:14: TConsole::TTxInitScheme Execute 2025-05-29T15:24:31.832239Z node 9 :CMS DEBUG: cms_impl.h:185: StateInit event type: 10060000 event: NKikimr::TEvTablet::TEvBoot 2025-05-29T15:24:31.833732Z node 9 :CMS DEBUG: console__init_scheme.cpp:23: TConsole::TTxInitScheme Complete 2025-05-29T15:24:31.833769Z node 9 :CMS DEBUG: console__load_state.cpp:28: TConsole::TTxLoadState Execute 2025-05-29T15:24:31.833794Z node 9 :CMS DEBUG: console__load_state.cpp:50: Using default config. 2025-05-29T15:24:31.833840Z node 9 :CMS DEBUG: console__load_state.cpp:66: TConsole::TTxLoadState Complete 2025-05-29T15:24:31.834237Z node 9 :CMS DEBUG: cms_impl.h:185: StateInit event type: 10060001 event: NKikimr::TEvTablet::TEvRestored 2025-05-29T15:24:31.834282Z node 9 :CMS DEBUG: cms_tx_init_scheme.cpp:16: TTxInitScheme Execute 2025-05-29T15:24:31.834512Z node 9 :CMS DEBUG: cms_impl.h:185: StateInit event type: 1006000c event: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-05-29T15:24:31.834607Z node 9 :CMS DEBUG: cms_impl.h:185: StateInit event type: 104d0001 event: NKikimr::NConsole::TEvConfigsDispatcher::TEvSetConfigSubscriptionResponse 2025-05-29T15:24:31.835737Z node 9 :CMS DEBUG: cms_tx_init_scheme.cpp:24: TTxInitScheme Complete 2025-05-29T15:24:31.835766Z node 9 :CMS DEBUG: cms_tx_load_state.cpp:33: TTxLoadState Execute 2025-05-29T15:24:31.835797Z node 9 :CMS DEBUG: cms_tx_load_state.cpp:76: Using default config 2025-05-29T15:24:31.835814Z node 9 :CMS DEBUG: cms.cpp:1147: Running CleanupWalleTasks 2025-05-29T15:24:31.847522Z node 9 :CMS DEBUG: cms_impl.h:185: StateInit event type: 104a0012 event: NKikimr::NConsole::TEvConsole::TEvConfigNotificationRequest { Config { FeatureFlags { EnableCMSRequestPriorities: true EnableSingleCompositeActionGroup: true } } ItemKinds: 25 ItemKinds: 26 Local: true } 2025-05-29T15:24:31.879485Z node 9 :CMS DEBUG: cms_tx_load_state.cpp:256: TTxLoadState Complete 2025-05-29T15:24:31.879581Z node 9 :CMS DEBUG: cms_tx_update_config.cpp:23: TTxUpdateConfig Execute 2025-05-29T15:24:31.879605Z node 9 :CMS DEBUG: cms_tx_update_config.cpp:37: TTxUpdateConfig Complete 2025-05-29T15:24:31.879680Z node 9 :CMS DEBUG: sentinel.cpp:939: [Sentinel] [Main] UpdateConfig 2025-05-29T15:24:31.879684Z node 9 :CMS DEBUG: sentinel.cpp:884: [Sentinel] [Main] Start ConfigUpdater 2025-05-29T15:24:31.879690Z node 9 :CMS DEBUG: sentinel.cpp:955: [Sentinel] [Main] UpdateState 2025-05-29T15:24:31.879693Z node 9 :CMS INFO: sentinel.cpp:879: [Sentinel] [Main] StateUpdater was delayed 2025-05-29T15:24:31.879703Z node 9 :CMS DEBUG: cms_tx_update_config.cpp:23: TTxUpdateConfig Execute 2025-05-29T15:24:31.879759Z node 9 :CMS DEBUG: sentinel.cpp:464: [Sentinel] [ConfigUpdater] Request blobstorage config: attempt# 0 2025-05-29T15:24:31.879775Z node 9 :CMS DEBUG: sentinel.cpp:477: [Sentinel] [ConfigUpdater] Request CMS cluster state: attem ... "dynnode" Duration: 60000000 2025-05-29T15:24:32.234670Z node 9 :CMS DEBUG: node_checkers.cpp:101: [Nodes Counter] Checking Node: 24, with state: Up, with limit: 0, with ratio limit: 0, locked nodes: 7, down nodes: 0 2025-05-29T15:24:32.234673Z node 9 :CMS DEBUG: node_checkers.cpp:101: [Nodes Counter] Checking Node: 24, with state: Up, with limit: 0, with ratio limit: 0, locked nodes: 7, down nodes: 0 2025-05-29T15:24:32.234676Z node 9 :CMS DEBUG: cms.cpp:387: Result: ALLOW 2025-05-29T15:24:32.234686Z node 9 :CMS DEBUG: cms.cpp:1036: Accepting permission: id# user-p-5, requestId# user-r-4, owner# user 2025-05-29T15:24:32.234690Z node 9 :CMS INFO: cluster_info.cpp:777: Adding lock for Host ::1:12013 (21) (permission user-p-5 until 1970-01-01T00:03:00Z) 2025-05-29T15:24:32.234698Z node 9 :CMS DEBUG: cms.cpp:1036: Accepting permission: id# user-p-6, requestId# user-r-4, owner# user 2025-05-29T15:24:32.234701Z node 9 :CMS INFO: cluster_info.cpp:777: Adding lock for Host ::1:12014 (22) (permission user-p-6 until 1970-01-01T00:03:00Z) 2025-05-29T15:24:32.234704Z node 9 :CMS DEBUG: cms.cpp:1036: Accepting permission: id# user-p-7, requestId# user-r-4, owner# user 2025-05-29T15:24:32.234707Z node 9 :CMS INFO: cluster_info.cpp:777: Adding lock for Host ::1:12015 (23) (permission user-p-7 until 1970-01-01T00:03:00Z) 2025-05-29T15:24:32.234711Z node 9 :CMS DEBUG: cms.cpp:1036: Accepting permission: id# user-p-8, requestId# user-r-4, owner# user 2025-05-29T15:24:32.234714Z node 9 :CMS INFO: cluster_info.cpp:777: Adding lock for Host ::1:12016 (24) (permission user-p-8 until 1970-01-01T00:03:00Z) 2025-05-29T15:24:32.234721Z node 9 :CMS DEBUG: cms_tx_store_permissions.cpp:26: TTxStorePermissions Execute 2025-05-29T15:24:32.234777Z node 9 :CMS NOTICE: audit_log.cpp:12: [AuditLog] [CMS tablet] Store permission: id# user-p-5, validity# 1970-01-01T00:03:00.337048Z, action# Type: RESTART_SERVICES Host: "21" Services: "dynnode" Duration: 60000000 2025-05-29T15:24:32.234785Z node 9 :CMS NOTICE: audit_log.cpp:12: [AuditLog] [CMS tablet] Store permission: id# user-p-6, validity# 1970-01-01T00:03:00.337048Z, action# Type: RESTART_SERVICES Host: "22" Services: "dynnode" Duration: 60000000 2025-05-29T15:24:32.234791Z node 9 :CMS NOTICE: audit_log.cpp:12: [AuditLog] [CMS tablet] Store permission: id# user-p-7, validity# 1970-01-01T00:03:00.337048Z, action# Type: RESTART_SERVICES Host: "23" Services: "dynnode" Duration: 60000000 2025-05-29T15:24:32.234796Z node 9 :CMS NOTICE: audit_log.cpp:12: [AuditLog] [CMS tablet] Store permission: id# user-p-8, validity# 1970-01-01T00:03:00.337048Z, action# Type: RESTART_SERVICES Host: "24" Services: "dynnode" Duration: 60000000 2025-05-29T15:24:32.245504Z node 9 :CMS DEBUG: cms_tx_store_permissions.cpp:137: TTxStorePermissions complete 2025-05-29T15:24:32.245621Z node 9 :CMS NOTICE: audit_log.cpp:12: [AuditLog] [CMS tablet] Reply: request# NKikimr::NCms::TEvCms::TEvPermissionRequest { User: "user" Actions { Type: RESTART_SERVICES Host: "21" Services: "dynnode" Duration: 60000000 } Actions { Type: RESTART_SERVICES Host: "22" Services: "dynnode" Duration: 60000000 } Actions { Type: RESTART_SERVICES Host: "23" Services: "dynnode" Duration: 60000000 } Actions { Type: RESTART_SERVICES Host: "24" Services: "dynnode" Duration: 60000000 } PartialPermissionAllowed: false Schedule: false DryRun: false AvailabilityMode: MODE_MAX_AVAILABILITY EvictVDisks: false }, response# NKikimr::NCms::TEvCms::TEvPermissionResponse { Status { Code: ALLOW } RequestId: "user-r-4" Permissions { Id: "user-p-5" Action { Type: RESTART_SERVICES Host: "21" Services: "dynnode" Duration: 60000000 } Deadline: 180337048 Extentions { Type: HostInfo Hosts { Name: "::1" State: UP NodeId: 21 InterconnectPort: 12013 } } } Permissions { Id: "user-p-6" Action { Type: RESTART_SERVICES Host: "22" Services: "dynnode" Duration: 60000000 } Deadline: 180337048 Extentions { Type: HostInfo Hosts { Name: "::1" State: UP NodeId: 22 InterconnectPort: 12014 } } } Permissions { Id: "user-p-7" Action { Type: RESTART_SERVICES Host: "23" Services: "dynnode" Duration: 60000000 } Deadline: 180337048 Extentions { Type: HostInfo Hosts { Name: "::1" State: UP NodeId: 23 InterconnectPort: 12015 } } } Permissions { Id: "user-p-8" Action { Type: RESTART_SERVICES Host: "24" Services: "dynnode" Duration: 60000000 } Deadline: 180337048 Extentions { Type: HostInfo Hosts { Name: "::1" State: UP NodeId: 24 InterconnectPort: 12016 } } } } 2025-05-29T15:24:32.257264Z node 9 :CMS INFO: cluster_info.cpp:777: Adding lock for Host ::1:12013 (21) (permission user-p-5 until 1970-01-01T00:03:00Z) 2025-05-29T15:24:32.257287Z node 9 :CMS INFO: cluster_info.cpp:777: Adding lock for Host ::1:12009 (17) (permission user-p-1 until 1970-01-01T00:03:00Z) 2025-05-29T15:24:32.257294Z node 9 :CMS INFO: cluster_info.cpp:777: Adding lock for Host ::1:12014 (22) (permission user-p-6 until 1970-01-01T00:03:00Z) 2025-05-29T15:24:32.257298Z node 9 :CMS INFO: cluster_info.cpp:777: Adding lock for Host ::1:12010 (18) (permission user-p-2 until 1970-01-01T00:03:00Z) 2025-05-29T15:24:32.257302Z node 9 :CMS INFO: cluster_info.cpp:777: Adding lock for Host ::1:12012 (20) (permission user-p-4 until 1970-01-01T00:03:00Z) 2025-05-29T15:24:32.257306Z node 9 :CMS INFO: cluster_info.cpp:777: Adding lock for Host ::1:12011 (19) (permission user-p-3 until 1970-01-01T00:03:00Z) 2025-05-29T15:24:32.257309Z node 9 :CMS INFO: cluster_info.cpp:777: Adding lock for Host ::1:12016 (24) (permission user-p-8 until 1970-01-01T00:03:00Z) 2025-05-29T15:24:32.257313Z node 9 :CMS INFO: cluster_info.cpp:777: Adding lock for Host ::1:12015 (23) (permission user-p-7 until 1970-01-01T00:03:00Z) 2025-05-29T15:24:32.257365Z node 9 :CMS DEBUG: cms_tx_update_downtimes.cpp:17: TTxUpdateDowntimes Execute 2025-05-29T15:24:32.257381Z node 9 :CMS DEBUG: cms_tx_update_downtimes.cpp:26: TTxUpdateDowntimes Complete 2025-05-29T15:24:32.257391Z node 9 :CMS DEBUG: cluster_info.cpp:968: Timestamp: 1970-01-01T00:02:00Z 2025-05-29T15:24:32.257495Z node 9 :CMS INFO: cms.cpp:347: Check request: User: "user" Actions { Type: RESTART_SERVICES Host: "9" Services: "storage" Duration: 60000000 } PartialPermissionAllowed: false Schedule: false DryRun: false AvailabilityMode: MODE_MAX_AVAILABILITY EvictVDisks: false 2025-05-29T15:24:32.257501Z node 9 :CMS DEBUG: cms.cpp:379: Checking action: Type: RESTART_SERVICES Host: "9" Services: "storage" Duration: 60000000 2025-05-29T15:24:32.257524Z node 9 :CMS DEBUG: node_checkers.cpp:101: [Nodes Counter] Checking Node: 9, with state: Up, with limit: 0, with ratio limit: 0, locked nodes: 8, down nodes: 0 2025-05-29T15:24:32.257557Z node 9 :CMS DEBUG: cms.cpp:729: Ring: 0; State: Ok 2025-05-29T15:24:32.257560Z node 9 :CMS DEBUG: cms.cpp:729: Ring: 1; State: Ok 2025-05-29T15:24:32.257562Z node 9 :CMS DEBUG: cms.cpp:729: Ring: 2; State: Ok 2025-05-29T15:24:32.257565Z node 9 :CMS DEBUG: cms.cpp:387: Result: ALLOW 2025-05-29T15:24:32.257578Z node 9 :CMS DEBUG: cms.cpp:1036: Accepting permission: id# user-p-9, requestId# user-r-5, owner# user 2025-05-29T15:24:32.257582Z node 9 :CMS INFO: cluster_info.cpp:777: Adding lock for Host ::1:12001 (9) (permission user-p-9 until 1970-01-01T00:03:00Z) 2025-05-29T15:24:32.257589Z node 9 :CMS DEBUG: cms_tx_store_permissions.cpp:26: TTxStorePermissions Execute 2025-05-29T15:24:32.257623Z node 9 :CMS NOTICE: audit_log.cpp:12: [AuditLog] [CMS tablet] Store permission: id# user-p-9, validity# 1970-01-01T00:03:00.438560Z, action# Type: RESTART_SERVICES Host: "9" Services: "storage" Duration: 60000000 2025-05-29T15:24:32.268623Z node 9 :CMS DEBUG: cms_tx_store_permissions.cpp:137: TTxStorePermissions complete 2025-05-29T15:24:32.268739Z node 9 :CMS NOTICE: audit_log.cpp:12: [AuditLog] [CMS tablet] Reply: request# NKikimr::NCms::TEvCms::TEvPermissionRequest { User: "user" Actions { Type: RESTART_SERVICES Host: "9" Services: "storage" Duration: 60000000 } PartialPermissionAllowed: false Schedule: false DryRun: false AvailabilityMode: MODE_MAX_AVAILABILITY EvictVDisks: false }, response# NKikimr::NCms::TEvCms::TEvPermissionResponse { Status { Code: ALLOW } RequestId: "user-r-5" Permissions { Id: "user-p-9" Action { Type: RESTART_SERVICES Host: "9" Services: "storage" Duration: 60000000 } Deadline: 180438560 Extentions { Type: HostInfo Hosts { Name: "::1" State: UP NodeId: 9 InterconnectPort: 12001 } } } } 2025-05-29T15:24:32.280847Z node 9 :CMS INFO: cluster_info.cpp:777: Adding lock for Host ::1:12013 (21) (permission user-p-5 until 1970-01-01T00:03:00Z) 2025-05-29T15:24:32.280880Z node 9 :CMS INFO: cluster_info.cpp:777: Adding lock for Host ::1:12009 (17) (permission user-p-1 until 1970-01-01T00:03:00Z) 2025-05-29T15:24:32.280888Z node 9 :CMS INFO: cluster_info.cpp:777: Adding lock for Host ::1:12001 (9) (permission user-p-9 until 1970-01-01T00:03:00Z) 2025-05-29T15:24:32.280894Z node 9 :CMS INFO: cluster_info.cpp:777: Adding lock for Host ::1:12014 (22) (permission user-p-6 until 1970-01-01T00:03:00Z) 2025-05-29T15:24:32.280901Z node 9 :CMS INFO: cluster_info.cpp:777: Adding lock for Host ::1:12010 (18) (permission user-p-2 until 1970-01-01T00:03:00Z) 2025-05-29T15:24:32.280907Z node 9 :CMS INFO: cluster_info.cpp:777: Adding lock for Host ::1:12012 (20) (permission user-p-4 until 1970-01-01T00:03:00Z) 2025-05-29T15:24:32.280913Z node 9 :CMS INFO: cluster_info.cpp:777: Adding lock for Host ::1:12011 (19) (permission user-p-3 until 1970-01-01T00:03:00Z) 2025-05-29T15:24:32.280920Z node 9 :CMS INFO: cluster_info.cpp:777: Adding lock for Host ::1:12016 (24) (permission user-p-8 until 1970-01-01T00:03:00Z) 2025-05-29T15:24:32.280925Z node 9 :CMS INFO: cluster_info.cpp:777: Adding lock for Host ::1:12015 (23) (permission user-p-7 until 1970-01-01T00:03:00Z) 2025-05-29T15:24:32.281011Z node 9 :CMS DEBUG: cms_tx_update_downtimes.cpp:17: TTxUpdateDowntimes Execute 2025-05-29T15:24:32.281033Z node 9 :CMS DEBUG: cms_tx_update_downtimes.cpp:26: TTxUpdateDowntimes Complete 2025-05-29T15:24:32.281047Z node 9 :CMS DEBUG: cluster_info.cpp:968: Timestamp: 1970-01-01T00:02:00Z 2025-05-29T15:24:32.281207Z node 9 :CMS INFO: cms.cpp:347: Check request: User: "user" Actions { Type: RESTART_SERVICES Host: "10" Services: "storage" Duration: 60000000 } PartialPermissionAllowed: false Schedule: false DryRun: false AvailabilityMode: MODE_MAX_AVAILABILITY EvictVDisks: false 2025-05-29T15:24:32.281217Z node 9 :CMS DEBUG: cms.cpp:379: Checking action: Type: RESTART_SERVICES Host: "10" Services: "storage" Duration: 60000000 2025-05-29T15:24:32.281228Z node 9 :CMS DEBUG: node_checkers.cpp:101: [Nodes Counter] Checking Node: 10, with state: Up, with limit: 0, with ratio limit: 0, locked nodes: 9, down nodes: 0 2025-05-29T15:24:32.281259Z node 9 :CMS DEBUG: cms.cpp:398: Result: DISALLOW_TEMP (reason: Issue in affected group with id '0': too many unavailable vdisks. Locked: Host ::1:12001 (9) has planned shutdown (permission user-p-9 owned by user), VDisk [0:1:0:1:0] (::1:/10/pdisk-10.data) is locked by this request. Down: ) 2025-05-29T15:24:32.281275Z node 9 :CMS DEBUG: cms_tx_store_permissions.cpp:26: TTxStorePermissions Execute 2025-05-29T15:24:32.292112Z node 9 :CMS DEBUG: cms_tx_store_permissions.cpp:137: TTxStorePermissions complete 2025-05-29T15:24:32.292196Z node 9 :CMS NOTICE: audit_log.cpp:12: [AuditLog] [CMS tablet] Reply: request# NKikimr::NCms::TEvCms::TEvPermissionRequest { User: "user" Actions { Type: RESTART_SERVICES Host: "10" Services: "storage" Duration: 60000000 } PartialPermissionAllowed: false Schedule: false DryRun: false AvailabilityMode: MODE_MAX_AVAILABILITY EvictVDisks: false }, response# NKikimr::NCms::TEvCms::TEvPermissionResponse { Status { Code: DISALLOW_TEMP Reason: "Issue in affected group with id \'0\': too many unavailable vdisks. Locked: Host ::1:12001 (9) has planned shutdown (permission user-p-9 owned by user), VDisk [0:1:0:1:0] (::1:/10/pdisk-10.data) is locked by this request. Down: " } RequestId: "user-r-6" Deadline: 420540072 } ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/datashard/ut_kqp_errors/unittest >> KqpErrors::ResolveTableError [FAIL] Test command err: 2025-05-29T15:24:29.461868Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-05-29T15:24:29.462136Z node 2 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/00195a/r3tmp/tmpNswbi9/pdisk_1.dat 2025-05-29T15:24:29.605525Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:24:29.696182Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-05-29T15:24:29.763182Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:24:29.763223Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:24:29.764693Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:24:29.764738Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:24:29.776775Z node 1 :HIVE WARN: hive_impl.cpp:771: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-05-29T15:24:29.776942Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-29T15:24:29.777042Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-29T15:24:30.051341Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-05-29T15:24:30.836403Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [1:1527:2934], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:24:30.836979Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=1&id=NDRmYWJiMTItMWU2NGM5ODUtYjdiYWYyMzctY2RlNGIxYzM=, ActorId: [1:1525:2932], ActorState: ExecuteState, TraceId: 01jweaa5nybbt9gbrh049tcb0s, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: assertion failed at ydb/core/tx/datashard/ut_common/datashard_ut_common.cpp:2091, void NKikimr::ExecSQL(Tests::TServer::TPtr, TActorId, const TString &, bool, Ydb::StatusIds::StatusCode): (response.GetYdbStatus() == code) failed: (INTERNAL_ERROR != SUCCESS) Response { QueryIssues { message: "Execution" issue_code: 1060 issues { message: "yql/essentials/ast/yql_expr.h:1874: index out of range" issue_code: 1 } } TxMeta { } } YdbStatus: INTERNAL_ERROR ConsumedRu: 1 , with diff: (INT|SUCC)E(RNAL_ERROR|SS) TBackTrace::Capture()+28 (0x13AAFD2C) NUnitTest::NPrivate::RaiseError(char const*, TBasicString> const&, bool)+137 (0x13C63AA9) NKikimr::ExecSQL(TIntrusivePtr>, NActors::TActorId, TBasicString> const&, bool, Ydb::StatusIds_StatusCode)+1300 (0x262F1694) NKikimr::NKqp::TLocalFixture::TLocalFixture(bool, std::__y1::optional)+1557 (0x13996425) NKikimr::NKqp::NTestSuiteKqpErrors::TTestCaseResolveTableError::Execute_(NUnitTest::TTestContext&)+36 (0x13995044) NKikimr::NKqp::NTestSuiteKqpErrors::TCurrentTest::Execute()::'lambda'()::operator()() const+71 (0x1399D867) NUnitTest::TTestBase::Run(std::__y1::function, TBasicString> const&, char const*, bool)+126 (0x13C6595E) NKikimr::NKqp::NTestSuiteKqpErrors::TCurrentTest::Execute()+424 (0x1399D0C8) NUnitTest::TTestFactory::Execute()+803 (0x13C660D3) NUnitTest::RunMain(int, char**)+3021 (0x13C77C7D) ??+0 (0x7FFB5CD7FD90) __libc_start_main+128 (0x7FFB5CD7FE40) _start+41 (0x129FD029) >> DataShardSnapshots::MvccSnapshotTailCleanup [FAIL] >> DataShardSnapshots::MvccSnapshotReadWithLongPlanQueue >> DataShardSnapshots::VolatileSnapshotSplit [FAIL] >> DataShardSnapshots::VolatileSnapshotMerge |65.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/batch_operations/unittest >> TCmsTest::RequestRestartServicesPartial [GOOD] >> TCmsTest::RequestRestartServicesNoUser |65.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/batch_operations/unittest ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/batch_operations/unittest >> KqpBatchDelete::MultiStatement Test command err: Trying to start YDB, gRPC: 27640, MsgBus: 24174 2025-05-29T15:24:31.152146Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509888921364259159:2059];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:24:31.152165Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/00283c/r3tmp/tmprGE1Ag/pdisk_1.dat 2025-05-29T15:24:31.204187Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [1:7509888921364259142:2079] 1748532271151865 != 1748532271151868 2025-05-29T15:24:31.205897Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 27640, node 1 2025-05-29T15:24:31.215935Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:24:31.215946Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-05-29T15:24:31.215947Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-05-29T15:24:31.215986Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:24174 TClient is connected to server localhost:24174 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-29T15:24:31.285169Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:24:31.285191Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:24:31.286259Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-29T15:24:31.286693Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:24:31.292612Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:24:31.354251Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:24:31.370456Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:24:31.383663Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:24:31.534949Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509888921364260794:2403], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:24:31.534990Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:24:31.566342Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-05-29T15:24:31.621579Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-05-29T15:24:31.676422Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-05-29T15:24:31.690459Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-05-29T15:24:31.704302Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-05-29T15:24:31.718212Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-05-29T15:24:31.732535Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-05-29T15:24:31.747997Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509888921364261450:2466], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:24:31.748022Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509888921364261455:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:24:31.748027Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:24:31.748579Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715669:3, at schemeshard: 72057594046644480 2025-05-29T15:24:31.752283Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7509888921364261457:2470], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715669 completed, doublechecking } 2025-05-29T15:24:31.824158Z node 1 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [1:7509888921364261508:3398] txid# 281474976715670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-29T15:24:31.908498Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [1:7509888921364261524:2474], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:24:31.908587Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=1&id=NTk4MDUzMTAtNWEzMWVkMmUtOTU2NzA1YTItZTQwMWQ1OQ==, ActorId: [1:7509888921364260776:2401], ActorState: ExecuteState, TraceId: 01jweaa6m30ss40jrg1362p4z2, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: VERIFY failed (2025-05-29T15:24:31.909124Z): assertion failed in non-unittest thread with message: assertion failed at ydb/core/kqp/ut/common/kqp_ut_common.h:375, void NKikimr::NKqp::AssertSuccessResult(const NYdb::TStatus &): (result.IsSuccess())
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 library/cpp/testing/unittest/registar.cpp:36 RaiseError(): requirement UnittestThread failed 0. /-S/util/system/yassert.cpp:83: InternalPanicImpl @ 0x13A9DC85 1. /-S/util/system/yassert.cpp:55: Panic @ 0x13A94C86 2. /tmp//-S/library/cpp/testing/unittest/registar.cpp:36: RaiseError @ 0x13C36A16 3. /-S/ydb/core/kqp/ut/common/kqp_ut_common.h:375: AssertSuccessResult @ 0x260D85F2 4. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:365: CreateSampleTables @ 0x260D7EF2 5. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:581: operator() @ 0x260F9ACC 6. /-S/library/cpp/threading/future/core/future-inl.h:493: SetValue @ 0x260F9ACC 7. /-S/library/cpp/threading/future/async.h:24: operator() @ 0x260F9ACC 8. /-S/util/thread/pool.h:71: Process @ 0x260F9ACC 9. /-S/util/thread/pool.cpp:418: DoExecute @ 0x13AA5609 10. /-S/util/thread/factory.h:15: Execute @ 0x13AA3FF9 11. /-S/util/thread/factory.cpp:36: ThreadProc @ 0x13AA3FF9 12. /-S/util/system/thread.cpp:244: ThreadProxy @ 0x13A9F46C 13. ??:0: ?? @ 0x7F7770095AC2 14. ??:0: ?? @ 0x7F777012784F >> TMaintenanceApiTest::LastRefreshTime [GOOD] >> TCmsTenatsTest::RequestShutdownHost [GOOD] >> TCmsTenatsTest::RequestShutdownHostWithTenantPolicy >> TConsoleTests::TestCreateTenantWrongNameExtSubdomain [GOOD] >> TConsoleTests::TestCreateTenantWrongPool >> DataShardSnapshots::ShardRestartWholeShardLockBasic [FAIL] >> DataShardSnapshots::ShardRestartLockUnrelatedUpsert >> TSchemeShardServerLess::Fake [GOOD] >> TQuorumTrackerTests::Erasure4Plus2BlockNotIncludingMyFailDomain_8_2 [GOOD] >> TQuorumTrackerTests::ErasureMirror3IncludingMyFailDomain_4_2 [GOOD] >> KqpBatchUpdate::Large_3 >> TSyncBrokerTests::ShouldProcessAfterRelease [GOOD] >> TSyncBrokerTests::ShouldReleaseInQueue [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/cms/ut/unittest >> TMaintenanceApiTest::LastRefreshTime [GOOD] Test command err: 2025-05-29T15:24:30.602464Z node 1 :CMS DEBUG: cms_impl.h:185: StateInit event type: 10060000 event: NKikimr::TEvTablet::TEvBoot 2025-05-29T15:24:30.604877Z node 1 :CMS DEBUG: console__init_scheme.cpp:14: TConsole::TTxInitScheme Execute 2025-05-29T15:24:30.606255Z node 1 :CMS DEBUG: cms_impl.h:185: StateInit event type: 10060001 event: NKikimr::TEvTablet::TEvRestored 2025-05-29T15:24:30.606303Z node 1 :CMS DEBUG: cms_tx_init_scheme.cpp:16: TTxInitScheme Execute 2025-05-29T15:24:30.606975Z node 1 :CMS DEBUG: cms_impl.h:185: StateInit event type: 1006000c event: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-05-29T15:24:30.607231Z node 1 :CMS DEBUG: cms_impl.h:185: StateInit event type: 104d0001 event: NKikimr::NConsole::TEvConfigsDispatcher::TEvSetConfigSubscriptionResponse 2025-05-29T15:24:30.612350Z node 1 :CMS DEBUG: console__init_scheme.cpp:23: TConsole::TTxInitScheme Complete 2025-05-29T15:24:30.612398Z node 1 :CMS DEBUG: console__load_state.cpp:28: TConsole::TTxLoadState Execute 2025-05-29T15:24:30.612447Z node 1 :CMS DEBUG: console__load_state.cpp:50: Using default config. 2025-05-29T15:24:30.612550Z node 1 :CMS DEBUG: console__load_state.cpp:66: TConsole::TTxLoadState Complete 2025-05-29T15:24:30.614776Z node 1 :CMS DEBUG: cms_tx_init_scheme.cpp:24: TTxInitScheme Complete 2025-05-29T15:24:30.615000Z node 1 :CMS DEBUG: cms_tx_load_state.cpp:33: TTxLoadState Execute 2025-05-29T15:24:30.615055Z node 1 :CMS DEBUG: cms_tx_load_state.cpp:76: Using default config 2025-05-29T15:24:30.615098Z node 1 :CMS DEBUG: cms.cpp:1147: Running CleanupWalleTasks 2025-05-29T15:24:30.648648Z node 1 :CMS DEBUG: cms_impl.h:185: StateInit event type: 104a0012 event: NKikimr::NConsole::TEvConsole::TEvConfigNotificationRequest { Config { FeatureFlags { EnableCMSRequestPriorities: true EnableSingleCompositeActionGroup: true } } ItemKinds: 25 ItemKinds: 26 Local: true } 2025-05-29T15:24:30.665589Z node 1 :CMS DEBUG: cms_tx_load_state.cpp:256: TTxLoadState Complete 2025-05-29T15:24:30.665717Z node 1 :CMS DEBUG: cms_tx_update_config.cpp:23: TTxUpdateConfig Execute 2025-05-29T15:24:30.667297Z node 1 :CMS DEBUG: cms_tx_update_config.cpp:37: TTxUpdateConfig Complete 2025-05-29T15:24:30.667398Z node 1 :CMS DEBUG: sentinel.cpp:939: [Sentinel] [Main] UpdateConfig 2025-05-29T15:24:30.667405Z node 1 :CMS DEBUG: sentinel.cpp:884: [Sentinel] [Main] Start ConfigUpdater 2025-05-29T15:24:30.667413Z node 1 :CMS DEBUG: sentinel.cpp:955: [Sentinel] [Main] UpdateState 2025-05-29T15:24:30.667419Z node 1 :CMS INFO: sentinel.cpp:879: [Sentinel] [Main] StateUpdater was delayed 2025-05-29T15:24:30.667452Z node 1 :CMS DEBUG: sentinel.cpp:464: [Sentinel] [ConfigUpdater] Request blobstorage config: attempt# 0 2025-05-29T15:24:30.667485Z node 1 :CMS DEBUG: sentinel.cpp:477: [Sentinel] [ConfigUpdater] Request CMS cluster state: attempt# 0 2025-05-29T15:24:30.669877Z node 1 :CMS DEBUG: sentinel.cpp:530: [Sentinel] [ConfigUpdater] Handle TEvBlobStorage::TEvControllerConfigResponse: response# Status { Success: true BaseConfig { PDisk { NodeId: 1 PDiskId: 1 Path: "/1/pdisk-1.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 2 PDiskId: 2 Path: "/2/pdisk-2.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 3 PDiskId: 3 Path: "/3/pdisk-3.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 4 PDiskId: 4 Path: "/4/pdisk-4.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 5 PDiskId: 5 Path: "/5/pdisk-5.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 6 PDiskId: 6 Path: "/6/pdisk-6.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 7 PDiskId: 7 Path: "/7/pdisk-7.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 8 PDiskId: 8 Path: "/8/pdisk-8.data" Guid: 1 DriveStatus: ACTIVE } VSlot { VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 1000 } GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 1001 } GroupId: 1 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 1002 } GroupId: 2 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 1003 } GroupId: 3 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 2 VSlotId: 1000 } GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 2 VSlotId: 1001 } GroupId: 1 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 2 VSlotId: 1002 } GroupId: 2 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 2 VSlotId: 1003 } GroupId: 3 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 3 PDiskId: 3 VSlotId: 1000 } GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 3 VSlotId: 1001 } GroupId: 1 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 3 VSlotId: 1002 } GroupId: 2 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 3 VSlotId: 1003 } GroupId: 3 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 4 PDiskId: 4 VSlotId: 1000 } GroupGeneration: 1 FailDomainIdx: 3 } VSlot { VSlotId { NodeId: 4 PDiskId: 4 VSlotId: 1001 } GroupId: 1 GroupGeneration: 1 FailDomainIdx: 3 } VSlot { VSlotId { NodeId: 4 PDiskId: 4 VSlotId: 1002 } GroupId: 2 GroupGeneration: 1 FailDomainIdx: 3 } VSlot { VSlotId { NodeId: 4 PDiskId: 4 VSlotId: 1003 } GroupId: 3 GroupGeneration: 1 FailDomainIdx: 3 } VSlot { VSlotId { NodeId: 5 PDiskId: 5 VSlotId: 1000 } GroupGeneration: 1 FailDomainIdx: 4 } VSlot { VSlotId { NodeId: 5 PDiskId: 5 VSlotId: 1001 } GroupId: 1 GroupGeneration: 1 FailDomainIdx: 4 } VSlot { VSlotId { NodeId: 5 PDiskId: 5 VSlotId: 1002 } GroupId: 2 GroupGeneration: 1 FailDomainIdx: 4 } VSlot { VSlotId { NodeId: 5 PDiskId: 5 VSlotId: 1003 } GroupId: 3 GroupGeneration: 1 FailDomainIdx: 4 } VSlot { VSlotId { NodeId: 6 PDiskId: 6 VSlotId: 1000 } GroupGeneration: 1 FailDomainIdx: 5 } VSlot { VSlotId { NodeId: 6 PDiskId: 6 VSlotId: 1001 } GroupId: 1 GroupGeneration: 1 FailDomainIdx: 5 } VSlot { VSlotId { NodeId: 6 PDiskId: 6 VSlotId: 1002 } GroupId: 2 GroupGeneration: 1 FailDomainIdx: 5 } VSlot { VSlotId { NodeId: 6 PDiskId: 6 VSlotId: 1003 } GroupId: 3 GroupGeneration: 1 FailDomainIdx: 5 } VSlot { VSlotId { NodeId: 7 PDiskId: 7 VSlotId: 1000 } GroupGeneration: 1 FailDomainIdx: 6 } VSlot { VSlotId { NodeId: 7 PDiskId: 7 VSlotId: 1001 } GroupId: 1 GroupGeneration: 1 FailDomainIdx: 6 } VSlot { VSlotId { NodeId: 7 PDiskId: 7 VSlotId: 1002 } GroupId: 2 GroupGeneration: 1 FailDomainIdx: 6 } VSlot { VSlotId { NodeId: 7 PDiskId: 7 VSlotId: 1003 } GroupId: 3 GroupGeneration: 1 FailDomainIdx: 6 } VSlot { VSlotId { NodeId: 8 PDiskId: 8 VSlotId: 1000 } GroupGeneration: 1 FailDomainIdx: 7 } VSlot { VSlotId { NodeId: 8 PDiskId: 8 VSlotId: 1001 } GroupId: 1 GroupGeneration: 1 FailDomainIdx: 7 } VSlot { VSlotId { NodeId: 8 PDiskId: 8 VSlotId: 1002 } GroupId: 2 GroupGeneration: 1 FailDomainIdx: 7 } VSlot { VSlotId { NodeId: 8 PDiskId: 8 VSlotId: 1003 } GroupId: 3 GroupGeneration: 1 FailDomainIdx: 7 } Group { GroupGeneration: 1 ErasureSpecies: "block-4-2" VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 1000 } VSlotId { NodeId: 2 PDiskId: 2 VSlotId: 1000 } VSlotId { NodeId: 3 PDiskId: 3 VSlotId: 1000 } VSlotId { NodeId: 4 PDiskId: 4 VSlotId: 1000 } VSlotId { NodeId: 5 PDiskId: 5 VSlotId: 1000 } VSlotId { NodeId: 6 PDiskId: 6 VSlotId: 1000 } VSlotId { NodeId: 7 PDiskId: 7 VSlotId: 1000 } VSlotId { NodeId: 8 PDiskId: 8 VSlotId: 1000 } } Group { GroupId: 1 GroupGeneration: 1 ErasureSpecies: "block-4-2" VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 1001 } VSlotId { NodeId: 2 PDiskId: 2 VSlotId: 1001 } VSlotId { NodeId: 3 PDiskId: 3 VSlotId: 1001 } VSlotId { NodeId: 4 PDiskId: 4 VSlotId: 1001 } VSlotId { NodeId: 5 PDiskId: 5 VSlotId: 1001 } VSlotId { NodeId: 6 PDiskId: 6 VSlotId: 1001 } VSlotId { NodeId: 7 PDiskId: 7 VSlotId: 1001 } VSlotId { NodeId: 8 PDiskId: 8 VSlotId: 1001 } } Group { GroupId: 2 GroupGeneration: 1 ErasureSpecies: "block-4-2" VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 1002 } VSlotId { NodeId: 2 PDiskId: 2 VSlotId: 1002 } VSlotId { NodeId: 3 PDiskId: 3 VSlotId: 1002 } VSlotId { NodeId: 4 PDiskId: 4 VSlotId: 1002 } VSlotId { NodeId: 5 PDiskId: 5 VSlotId: 1002 } VSlotId { NodeId: 6 PDiskId: 6 VSlotId: 1002 } VSlotId { NodeId: 7 PDiskId: 7 VSlotId: 1002 } VSlotId { NodeId: 8 PDiskId: 8 VSlotId: 1002 } } Group { GroupId: 3 GroupGeneration: 1 ErasureSpecies: "block-4-2" VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 1003 } VSlotId { NodeId: 2 PDiskId: 2 VSlotId: 1003 } VSlotId { NodeId: 3 PDiskId: 3 VSlotId: 1003 } VSlotId { NodeId: 4 PDiskId: 4 VSlotId: 1003 } VSlotId { NodeId: 5 PDiskId: 5 VSlotId: 1003 } VSlotId { NodeId: 6 PDiskId: 6 VSlotId: 1003 } VSlotId { NodeId: 7 PDiskId: 7 VSlotId: 1003 } VSlotId { NodeId: 8 PDiskId: 8 VSlotId: 1003 } } } } Success: true 2025-05-29T15:24:30.682462Z node 1 :CMS DEBUG: cms_tx_update_config.cpp:23: TTxUpdateConfig Execute 2025-05-29T15:24:30.716116Z node 1 :CMS DEBUG: cms_tx_update_config.cpp:37: TTxUpdateConfig Complete 2025-05-29T15:24:30.716177Z node 1 :CMS DEBUG: cms_tx_update_config.cpp:44: Updated config: TenantLimits { DisabledNodesRatioLimit: 0 } ClusterLimits { DisabledNodesRatioLimit: 0 } SentinelConfig { Enable: false } 2025-05-29T15:24:30.762548Z node 1 :CMS DEBUG: cms_tx_update_downtimes.cpp:17: TTxUpdateDowntimes Execute 2025-05-29T15:24:30.762591Z node 1 :CMS DEBUG: cms_tx_update_downtimes.cpp:26: TTxUpdateDowntimes Complete 2025-05-29T15:24:30.762666Z node 1 :CMS DEBUG: cluster_info.cpp:968: Timestamp: 1970-01-01T00:02:00Z 2025-05-29T15:24:30.763006Z node 1 :CMS NOTICE: audit_log.cpp:12: [AuditLog] [CMS tablet] Reply: request# NKikimr::NCms::TEvCms::TEvClusterStateRequest { }, response# NKikimr::NCms::TEvCms::TEvClusterStateResponse { Status { Code: OK } State { Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120025512 } Devices { Name: "vdisk-0-1-0-0-0" State: UP Timestamp: 120025512 } Devices { Name: "vdisk-1-1-0-0-0" State: UP Timestamp: 120025512 } Devices { Name: "vdisk-2-1-0-0-0" State: UP Timestamp: 120025512 } Devices { Name: "vdisk-3-1-0-0-0" State: UP Timestamp: 120025512 } Devices { Name: "pdisk-1-1" State: UP Timestamp: 120025512 } Timestamp: 120025512 NodeId: 1 InterconnectPort: 12001 Location { DataCenter: "1" Module: "1" Rack: "1" Unit: "1" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120025512 } Devices { Name: "vdisk-0-1-0-1-0" State: UP Timestamp: 120025512 } Devices { Name: "vdisk-1-1-0-1-0" State: UP Timestamp: 120025512 } Devices { Name: "vdisk-2-1-0-1-0" State: UP Timestamp: 120025512 } Devices { Name: "vdisk-3-1-0-1-0" State: UP Timestamp: 120025512 } Devices { Name: "pdisk-2-2" State: UP Timestamp: 120025512 } Timestamp: 120025512 NodeId: 2 InterconnectPort: 12002 Location { DataCenter: "1" Module: "2" Rack: "2" Unit: "2" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120025512 } Devices { Name: "vdisk-0-1-0-2-0" State: UP Timestamp: 120025512 } Devices { Name: "vdisk-1-1-0-2-0" State: UP Timestamp: 120025512 } Devices { Name: "vdisk-2-1-0-2-0" State: UP Timestamp: 120025512 } Devices { Name: "vdisk-3-1-0-2-0" State: UP Timestamp: 120025512 } Devices { Name: "pdisk-3-3" State: UP Timestamp: 120025512 } Timestamp: 120025512 NodeId: 3 InterconnectPort: 12003 Location { DataCenter: "1" Module: "3" Rack: "3" Unit: "3" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120025512 } Devices { Name: "vdisk-0-1-0-3-0" State: UP Timestamp: 120025512 } Devices { Name: "vdisk-1-1-0-3-0" State: UP Timestamp: 120025512 } Devices { Name: "vdisk-2-1-0-3-0" State: UP Timestamp: 120025512 } Devices { Name: "vdisk-3-1-0-3-0" State: UP Timestamp: 120025512 } Devices { Name: "pdisk-4-4" State: UP Timestamp: 120025512 } Timestamp: 120025512 NodeId: 4 InterconnectPort: 12004 Location { DataCenter: "1" Module: "4" Rack: "4" Unit: "4" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: ... erconnectPort: 12002 Location { DataCenter: "1" Module: "2" Rack: "2" Unit: "2" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 125528000 } Devices { Name: "vdisk-0-1-0-2-0" State: UP Timestamp: 125528000 } Devices { Name: "vdisk-1-1-0-2-0" State: UP Timestamp: 125528000 } Devices { Name: "vdisk-2-1-0-2-0" State: UP Timestamp: 125528000 } Devices { Name: "vdisk-3-1-0-2-0" State: UP Timestamp: 125528000 } Devices { Name: "pdisk-19-19" State: UP Timestamp: 125528000 } Timestamp: 125528000 NodeId: 19 InterconnectPort: 12003 Location { DataCenter: "1" Module: "3" Rack: "3" Unit: "3" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 125528000 } Devices { Name: "vdisk-0-1-0-3-0" State: UP Timestamp: 125528000 } Devices { Name: "vdisk-1-1-0-3-0" State: UP Timestamp: 125528000 } Devices { Name: "vdisk-2-1-0-3-0" State: UP Timestamp: 125528000 } Devices { Name: "vdisk-3-1-0-3-0" State: UP Timestamp: 125528000 } Devices { Name: "pdisk-20-20" State: UP Timestamp: 125528000 } Timestamp: 125528000 NodeId: 20 InterconnectPort: 12004 Location { DataCenter: "1" Module: "4" Rack: "4" Unit: "4" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 125528000 } Devices { Name: "vdisk-0-1-0-4-0" State: UP Timestamp: 125528000 } Devices { Name: "vdisk-1-1-0-4-0" State: UP Timestamp: 125528000 } Devices { Name: "vdisk-2-1-0-4-0" State: UP Timestamp: 125528000 } Devices { Name: "vdisk-3-1-0-4-0" State: UP Timestamp: 125528000 } Devices { Name: "pdisk-21-21" State: UP Timestamp: 125528000 } Timestamp: 125528000 NodeId: 21 InterconnectPort: 12005 Location { DataCenter: "1" Module: "5" Rack: "5" Unit: "5" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 125528000 } Devices { Name: "vdisk-0-1-0-5-0" State: UP Timestamp: 125528000 } Devices { Name: "vdisk-1-1-0-5-0" State: UP Timestamp: 125528000 } Devices { Name: "vdisk-2-1-0-5-0" State: UP Timestamp: 125528000 } Devices { Name: "vdisk-3-1-0-5-0" State: UP Timestamp: 125528000 } Devices { Name: "pdisk-22-22" State: UP Timestamp: 125528000 } Timestamp: 125528000 NodeId: 22 InterconnectPort: 12006 Location { DataCenter: "1" Module: "6" Rack: "6" Unit: "6" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 125528000 } Devices { Name: "vdisk-0-1-0-6-0" State: UP Timestamp: 125528000 } Devices { Name: "vdisk-1-1-0-6-0" State: UP Timestamp: 125528000 } Devices { Name: "vdisk-2-1-0-6-0" State: UP Timestamp: 125528000 } Devices { Name: "vdisk-3-1-0-6-0" State: UP Timestamp: 125528000 } Devices { Name: "pdisk-23-23" State: UP Timestamp: 125528000 } Timestamp: 125528000 NodeId: 23 InterconnectPort: 12007 Location { DataCenter: "1" Module: "7" Rack: "7" Unit: "7" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 125528000 } Devices { Name: "vdisk-0-1-0-7-0" State: UP Timestamp: 125528000 } Devices { Name: "vdisk-1-1-0-7-0" State: UP Timestamp: 125528000 } Devices { Name: "vdisk-2-1-0-7-0" State: UP Timestamp: 125528000 } Devices { Name: "vdisk-3-1-0-7-0" State: UP Timestamp: 125528000 } Devices { Name: "pdisk-24-24" State: UP Timestamp: 125528000 } Timestamp: 125528000 NodeId: 24 InterconnectPort: 12008 Location { DataCenter: "1" Module: "8" Rack: "8" Unit: "8" } StartTimeSeconds: 0 } Timestamp: 125528000 } } 2025-05-29T15:24:34.527429Z node 17 :CMS INFO: cms.cpp:347: Check request: User: "test-user" Actions { Type: SHUTDOWN_HOST Host: "17" Duration: 600000000 } Actions { Type: SHUTDOWN_HOST Host: "18" Duration: 600000000 } PartialPermissionAllowed: true Schedule: true DryRun: false Reason: "" AvailabilityMode: MODE_MAX_AVAILABILITY MaintenanceTaskId: "task-1" 2025-05-29T15:24:34.527438Z node 17 :CMS DEBUG: cms.cpp:379: Checking action: Type: SHUTDOWN_HOST Host: "17" Duration: 600000000 2025-05-29T15:24:34.527449Z node 17 :CMS DEBUG: node_checkers.cpp:101: [Nodes Counter] Checking Node: 17, with state: Up, with limit: 0, with ratio limit: 0, locked nodes: 0, down nodes: 0 2025-05-29T15:24:34.527486Z node 17 :CMS DEBUG: cms.cpp:729: Ring: 0; State: Ok 2025-05-29T15:24:34.527490Z node 17 :CMS DEBUG: cms.cpp:729: Ring: 1; State: Ok 2025-05-29T15:24:34.527493Z node 17 :CMS DEBUG: cms.cpp:729: Ring: 2; State: Ok 2025-05-29T15:24:34.527497Z node 17 :CMS DEBUG: cms.cpp:387: Result: ALLOW 2025-05-29T15:24:34.527507Z node 17 :CMS DEBUG: cms.cpp:379: Checking action: Type: SHUTDOWN_HOST Host: "18" Duration: 600000000 2025-05-29T15:24:34.527511Z node 17 :CMS DEBUG: node_checkers.cpp:101: [Nodes Counter] Checking Node: 18, with state: Up, with limit: 0, with ratio limit: 0, locked nodes: 1, down nodes: 0 2025-05-29T15:24:34.527528Z node 17 :CMS DEBUG: cms.cpp:398: Result: DISALLOW_TEMP (reason: Issue in affected group with id '0': too many unavailable vdisks. Locked: Host ::1:12001 (17) has temporary lock, VDisk [0:1:0:1:0] (::1:/18/pdisk-18.data) is locked by this request. Down: ) 2025-05-29T15:24:34.527544Z node 17 :CMS DEBUG: cms.cpp:1036: Accepting permission: id# test-user-p-1, requestId# test-user-r-1, owner# test-user 2025-05-29T15:24:34.527551Z node 17 :CMS INFO: cluster_info.cpp:777: Adding lock for Host ::1:12001 (17) (permission test-user-p-1 until 1970-01-01T00:12:05Z) 2025-05-29T15:24:34.527564Z node 17 :CMS DEBUG: cms_tx_store_permissions.cpp:26: TTxStorePermissions Execute 2025-05-29T15:24:34.527619Z node 17 :CMS NOTICE: audit_log.cpp:12: [AuditLog] [CMS tablet] Store permission: id# test-user-p-1, validity# 1970-01-01T00:12:05.528000Z, action# Type: SHUTDOWN_HOST Host: "17" Duration: 600000000 2025-05-29T15:24:34.527648Z node 17 :CMS NOTICE: audit_log.cpp:12: [AuditLog] [CMS tablet] Store request: id# test-user-r-1, owner# test-user, order# 1, priority# 0, body# User: "test-user" Actions { Type: SHUTDOWN_HOST Host: "18" Duration: 600000000 Issue { Type: TOO_MANY_UNAVAILABLE_VDISKS Message: "Issue in affected group with id \'0\': too many unavailable vdisks. Locked: Host ::1:12001 (17) has temporary lock, VDisk [0:1:0:1:0] (::1:/18/pdisk-18.data) is locked by this request. Down: " } } PartialPermissionAllowed: true Schedule: true Reason: "" TenantPolicy: DEFAULT AvailabilityMode: MODE_MAX_AVAILABILITY EvictVDisks: false 2025-05-29T15:24:34.568462Z node 17 :CMS DEBUG: cms.cpp:1147: Running CleanupWalleTasks 2025-05-29T15:24:34.610061Z node 17 :CMS DEBUG: cms_tx_store_permissions.cpp:137: TTxStorePermissions complete 2025-05-29T15:24:34.610145Z node 17 :CMS NOTICE: audit_log.cpp:12: [AuditLog] [CMS tablet] Reply: request# NKikimr::NCms::TEvCms::TEvPermissionRequest { User: "test-user" Actions { Type: SHUTDOWN_HOST Host: "17" Duration: 600000000 } Actions { Type: SHUTDOWN_HOST Host: "18" Duration: 600000000 } PartialPermissionAllowed: true Schedule: true DryRun: false Reason: "" AvailabilityMode: MODE_MAX_AVAILABILITY MaintenanceTaskId: "task-1" }, response# NKikimr::NCms::TEvCms::TEvPermissionResponse { Status { Code: ALLOW_PARTIAL } RequestId: "test-user-r-1" Permissions { Id: "test-user-p-1" Action { Type: SHUTDOWN_HOST Host: "17" Duration: 600000000 } Deadline: 725528000 Extentions { Type: HostInfo Hosts { Name: "::1" State: UP NodeId: 17 InterconnectPort: 12001 } } } } 2025-05-29T15:24:34.610154Z node 17 :CMS DEBUG: cms.cpp:1064: Schedule cleanup at 1970-01-01T00:32:05.528000Z 2025-05-29T15:24:34.611514Z node 17 :CMS INFO: cms.cpp:1366: Get selected requests for test-user 2025-05-29T15:24:34.611535Z node 17 :CMS DEBUG: cms.cpp:1392: Resulting status: OK 2025-05-29T15:24:34.611589Z node 17 :CMS NOTICE: audit_log.cpp:12: [AuditLog] [CMS tablet] Reply: request# NKikimr::NCms::TEvCms::TEvManageRequestRequest { User: "test-user" Command: GET RequestId: "test-user-r-1" }, response# NKikimr::NCms::TEvCms::TEvManageRequestResponse { Status { Code: OK } Requests { RequestId: "test-user-r-1" Owner: "test-user" Actions { Type: SHUTDOWN_HOST Host: "18" Duration: 600000000 Issue { Type: TOO_MANY_UNAVAILABLE_VDISKS Message: "Issue in affected group with id \'0\': too many unavailable vdisks. Locked: Host ::1:12001 (17) has temporary lock, VDisk [0:1:0:1:0] (::1:/18/pdisk-18.data) is locked by this request. Down: " } } PartialPermissionAllowed: true Reason: "" AvailabilityMode: MODE_MAX_AVAILABILITY Priority: 0 } } 2025-05-29T15:24:34.716194Z node 17 :CMS INFO: cluster_info.cpp:777: Adding lock for Host ::1:12001 (17) (permission test-user-p-1 until 1970-01-01T00:12:05Z) 2025-05-29T15:24:34.716277Z node 17 :CMS DEBUG: cms_tx_update_downtimes.cpp:17: TTxUpdateDowntimes Execute 2025-05-29T15:24:34.716293Z node 17 :CMS DEBUG: cms_tx_update_downtimes.cpp:26: TTxUpdateDowntimes Complete 2025-05-29T15:24:34.716309Z node 17 :CMS DEBUG: cluster_info.cpp:968: Timestamp: 1970-01-01T00:02:11Z 2025-05-29T15:24:34.716410Z node 17 :CMS INFO: cms.cpp:347: Check request: User: "test-user" Actions { Type: SHUTDOWN_HOST Host: "18" Duration: 600000000 Issue { Type: TOO_MANY_UNAVAILABLE_VDISKS Message: "Issue in affected group with id \'0\': too many unavailable vdisks. Locked: Host ::1:12001 (17) has temporary lock, VDisk [0:1:0:1:0] (::1:/18/pdisk-18.data) is locked by this request. Down: " } } PartialPermissionAllowed: true Schedule: true Reason: "" TenantPolicy: DEFAULT AvailabilityMode: MODE_MAX_AVAILABILITY EvictVDisks: false 2025-05-29T15:24:34.716419Z node 17 :CMS DEBUG: cms.cpp:379: Checking action: Type: SHUTDOWN_HOST Host: "18" Duration: 600000000 Issue { Type: TOO_MANY_UNAVAILABLE_VDISKS Message: "Issue in affected group with id \'0\': too many unavailable vdisks. Locked: Host ::1:12001 (17) has temporary lock, VDisk [0:1:0:1:0] (::1:/18/pdisk-18.data) is locked by this request. Down: " } 2025-05-29T15:24:34.716427Z node 17 :CMS DEBUG: node_checkers.cpp:101: [Nodes Counter] Checking Node: 18, with state: Up, with limit: 0, with ratio limit: 0, locked nodes: 1, down nodes: 0 2025-05-29T15:24:34.716447Z node 17 :CMS DEBUG: cms.cpp:398: Result: DISALLOW_TEMP (reason: Issue in affected group with id '0': too many unavailable vdisks. Locked: Host ::1:12001 (17) has planned shutdown (permission test-user-p-1 owned by test-user), VDisk [0:1:0:1:0] (::1:/18/pdisk-18.data) is locked by this request. Down: ) 2025-05-29T15:24:34.716463Z node 17 :CMS DEBUG: cms_tx_store_permissions.cpp:26: TTxStorePermissions Execute 2025-05-29T15:24:34.716506Z node 17 :CMS NOTICE: audit_log.cpp:12: [AuditLog] [CMS tablet] Store request: id# test-user-r-1, owner# test-user, order# 1, priority# 0, body# User: "test-user" Actions { Type: SHUTDOWN_HOST Host: "18" Duration: 600000000 Issue { Type: TOO_MANY_UNAVAILABLE_VDISKS Message: "Issue in affected group with id \'0\': too many unavailable vdisks. Locked: Host ::1:12001 (17) has planned shutdown (permission test-user-p-1 owned by test-user), VDisk [0:1:0:1:0] (::1:/18/pdisk-18.data) is locked by this request. Down: " } } PartialPermissionAllowed: true Schedule: true Reason: "" TenantPolicy: DEFAULT AvailabilityMode: MODE_MAX_AVAILABILITY EvictVDisks: false 2025-05-29T15:24:34.727344Z node 17 :CMS DEBUG: cms_tx_store_permissions.cpp:137: TTxStorePermissions complete 2025-05-29T15:24:34.727402Z node 17 :CMS NOTICE: audit_log.cpp:12: [AuditLog] [CMS tablet] Reply: request# NKikimr::NCms::TEvCms::TEvCheckRequest { User: "test-user" RequestId: "test-user-r-1" AvailabilityMode: MODE_MAX_AVAILABILITY }, response# NKikimr::NCms::TEvCms::TEvPermissionResponse { Status { Code: DISALLOW_TEMP Reason: "Issue in affected group with id \'0\': too many unavailable vdisks. Locked: Host ::1:12001 (17) has planned shutdown (permission test-user-p-1 owned by test-user), VDisk [0:1:0:1:0] (::1:/18/pdisk-18.data) is locked by this request. Down: " } RequestId: "test-user-r-1" Deadline: 431130512 } >> KqpOlap::BulkUpsertUpdate [GOOD] |65.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/batch_operations/unittest >> TSchemeShardServerLess::StorageBilling >> DataShardSnapshots::LockedWriteReuseAfterCommit-UseSink [FAIL] >> DataShardSnapshots::LockedWriteDistributedCommitSuccess+UseSink >> TSchemeShardServerLess::BaseCase-AlterDatabaseCreateHiveFirst-true |65.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_serverless/unittest >> TSchemeShardServerLess::Fake [GOOD] |65.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/vdisk/syncer/ut/unittest >> TQuorumTrackerTests::ErasureMirror3IncludingMyFailDomain_4_2 [GOOD] >> TCmsTest::Mirror3dcPermissions [GOOD] >> TSchemeShardServerLess::TestServerlessComputeResourcesModeFeatureFlag >> DataShardSnapshots::MvccSnapshotLockedWrites+UseSink [FAIL] >> DataShardSnapshots::MvccSnapshotLockedWrites-UseSink >> TVPatchTests::FullPatchTest [GOOD] >> TVPatchTests::FullPatchTestSpecialCase1 [GOOD] >> DataShardSnapshots::LockedWriteBulkUpsertConflict-UseSink [FAIL] >> DataShardSnapshots::LockedWriteDistributedCommitAborted+UseSink ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/vdisk/syncer/ut/unittest >> TSyncBrokerTests::ShouldReleaseInQueue [GOOD] Test command err: 2025-05-29T15:24:36.470880Z node 1 :BS_SYNCER DEBUG: blobstorage_syncer_broker.cpp:64: TEvQuerySyncToken, VDisk actor id: [0:1:1], actor id: [1:5:2052], token sent, active: 1, waiting: 0 2025-05-29T15:24:36.470955Z node 1 :BS_SYNCER DEBUG: blobstorage_syncer_broker.cpp:90: TEvQuerySyncToken, VDisk actor id: [0:1:2], actor id: [1:6:2053], enqueued, active: 1, waiting: 1 2025-05-29T15:24:36.470964Z node 1 :BS_SYNCER DEBUG: blobstorage_syncer_broker.cpp:123: TEvReleaseSyncToken, VDisk actor id: [0:1:1], actor id: [1:5:2052], token released, active: 1, waiting: 1 2025-05-29T15:24:36.470971Z node 1 :BS_SYNCER DEBUG: blobstorage_syncer_broker.cpp:105: ProcessQueue(), VDisk actor id: [0:1:2], actor id: [1:6:2053], token sent, active: 0, waiting: 1 2025-05-29T15:24:36.500301Z node 2 :BS_SYNCER DEBUG: blobstorage_syncer_broker.cpp:64: TEvQuerySyncToken, VDisk actor id: [0:1:1], actor id: [2:5:2052], token sent, active: 1, waiting: 0 2025-05-29T15:24:36.500365Z node 2 :BS_SYNCER DEBUG: blobstorage_syncer_broker.cpp:90: TEvQuerySyncToken, VDisk actor id: [0:1:2], actor id: [2:6:2053], enqueued, active: 1, waiting: 1 2025-05-29T15:24:36.500373Z node 2 :BS_SYNCER DEBUG: blobstorage_syncer_broker.cpp:146: TEvReleaseSyncToken, VDisk actor id: [0:1:2], actor id: [2:6:2053], removed from queue, active: 1, waiting: 0 |65.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_serverless/unittest |65.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_serverless/unittest >> TVPatchTests::FindingPartsWhenSeveralPartsExist >> DataShardSnapshots::MvccSnapshotReadWithLongPlanQueue [FAIL] >> DataShardSnapshots::MvccSnapshotLockedWritesWithoutConflicts-UseSink >> TSchemeShardServerLess::TestServerlessComputeResourcesModeFeatureFlag [GOOD] >> TVPatchTests::PatchPartOk [GOOD] >> TVPatchTests::FindingPartsWhenSeveralPartsExist [GOOD] >> TVPatchTests::FindingPartsWithTimeout |65.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/vdisk/skeleton/ut/unittest >> TVPatchTests::FullPatchTestSpecialCase1 [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/cms/ut/unittest >> TCmsTest::Mirror3dcPermissions [GOOD] Test command err: 2025-05-29T15:24:26.882657Z node 1 :CMS DEBUG: cms_impl.h:185: StateInit event type: 10060000 event: NKikimr::TEvTablet::TEvBoot 2025-05-29T15:24:26.884516Z node 1 :CMS DEBUG: console__init_scheme.cpp:14: TConsole::TTxInitScheme Execute 2025-05-29T15:24:26.885398Z node 1 :CMS DEBUG: cms_impl.h:185: StateInit event type: 10060001 event: NKikimr::TEvTablet::TEvRestored 2025-05-29T15:24:26.885433Z node 1 :CMS DEBUG: cms_tx_init_scheme.cpp:16: TTxInitScheme Execute 2025-05-29T15:24:26.885871Z node 1 :CMS DEBUG: cms_impl.h:185: StateInit event type: 1006000c event: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-05-29T15:24:26.886019Z node 1 :CMS DEBUG: cms_impl.h:185: StateInit event type: 104d0001 event: NKikimr::NConsole::TEvConfigsDispatcher::TEvSetConfigSubscriptionResponse 2025-05-29T15:24:26.887009Z node 1 :CMS DEBUG: console__init_scheme.cpp:23: TConsole::TTxInitScheme Complete 2025-05-29T15:24:26.887029Z node 1 :CMS DEBUG: console__load_state.cpp:28: TConsole::TTxLoadState Execute 2025-05-29T15:24:26.887059Z node 1 :CMS DEBUG: console__load_state.cpp:50: Using default config. 2025-05-29T15:24:26.887123Z node 1 :CMS DEBUG: console__load_state.cpp:66: TConsole::TTxLoadState Complete 2025-05-29T15:24:26.887855Z node 1 :CMS DEBUG: cms_tx_init_scheme.cpp:24: TTxInitScheme Complete 2025-05-29T15:24:26.887996Z node 1 :CMS DEBUG: cms_tx_load_state.cpp:33: TTxLoadState Execute 2025-05-29T15:24:26.888016Z node 1 :CMS DEBUG: cms_tx_load_state.cpp:76: Using default config 2025-05-29T15:24:26.888060Z node 1 :CMS DEBUG: cms.cpp:1147: Running CleanupWalleTasks 2025-05-29T15:24:26.920481Z node 1 :CMS DEBUG: cms_impl.h:185: StateInit event type: 104a0012 event: NKikimr::NConsole::TEvConsole::TEvConfigNotificationRequest { Config { FeatureFlags { EnableCMSRequestPriorities: true EnableSingleCompositeActionGroup: true } } ItemKinds: 25 ItemKinds: 26 Local: true } 2025-05-29T15:24:26.932087Z node 1 :CMS DEBUG: cms_tx_load_state.cpp:256: TTxLoadState Complete 2025-05-29T15:24:26.932258Z node 1 :CMS DEBUG: cms_tx_update_config.cpp:23: TTxUpdateConfig Execute 2025-05-29T15:24:26.933977Z node 1 :CMS DEBUG: cms_tx_update_config.cpp:37: TTxUpdateConfig Complete 2025-05-29T15:24:26.934092Z node 1 :CMS DEBUG: sentinel.cpp:939: [Sentinel] [Main] UpdateConfig 2025-05-29T15:24:26.934099Z node 1 :CMS DEBUG: sentinel.cpp:884: [Sentinel] [Main] Start ConfigUpdater 2025-05-29T15:24:26.934110Z node 1 :CMS DEBUG: sentinel.cpp:955: [Sentinel] [Main] UpdateState 2025-05-29T15:24:26.934115Z node 1 :CMS INFO: sentinel.cpp:879: [Sentinel] [Main] StateUpdater was delayed 2025-05-29T15:24:26.934156Z node 1 :CMS DEBUG: sentinel.cpp:464: [Sentinel] [ConfigUpdater] Request blobstorage config: attempt# 0 2025-05-29T15:24:26.934199Z node 1 :CMS DEBUG: sentinel.cpp:477: [Sentinel] [ConfigUpdater] Request CMS cluster state: attempt# 0 2025-05-29T15:24:26.936602Z node 1 :CMS DEBUG: sentinel.cpp:530: [Sentinel] [ConfigUpdater] Handle TEvBlobStorage::TEvControllerConfigResponse: response# Status { Success: true BaseConfig { PDisk { NodeId: 1 PDiskId: 1 Path: "/1/pdisk-1.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 2 PDiskId: 2 Path: "/2/pdisk-2.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 3 PDiskId: 3 Path: "/3/pdisk-3.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 4 PDiskId: 4 Path: "/4/pdisk-4.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 5 PDiskId: 5 Path: "/5/pdisk-5.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 6 PDiskId: 6 Path: "/6/pdisk-6.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 7 PDiskId: 7 Path: "/7/pdisk-7.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 8 PDiskId: 8 Path: "/8/pdisk-8.data" Guid: 1 DriveStatus: ACTIVE } VSlot { VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 1000 } GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 1001 } GroupId: 1 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 1002 } GroupId: 2 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 1003 } GroupId: 3 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 2 VSlotId: 1000 } GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 2 VSlotId: 1001 } GroupId: 1 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 2 VSlotId: 1002 } GroupId: 2 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 2 VSlotId: 1003 } GroupId: 3 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 3 PDiskId: 3 VSlotId: 1000 } GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 3 VSlotId: 1001 } GroupId: 1 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 3 VSlotId: 1002 } GroupId: 2 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 3 VSlotId: 1003 } GroupId: 3 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 4 PDiskId: 4 VSlotId: 1000 } GroupGeneration: 1 FailDomainIdx: 3 } VSlot { VSlotId { NodeId: 4 PDiskId: 4 VSlotId: 1001 } GroupId: 1 GroupGeneration: 1 FailDomainIdx: 3 } VSlot { VSlotId { NodeId: 4 PDiskId: 4 VSlotId: 1002 } GroupId: 2 GroupGeneration: 1 FailDomainIdx: 3 } VSlot { VSlotId { NodeId: 4 PDiskId: 4 VSlotId: 1003 } GroupId: 3 GroupGeneration: 1 FailDomainIdx: 3 } VSlot { VSlotId { NodeId: 5 PDiskId: 5 VSlotId: 1000 } GroupGeneration: 1 FailDomainIdx: 4 } VSlot { VSlotId { NodeId: 5 PDiskId: 5 VSlotId: 1001 } GroupId: 1 GroupGeneration: 1 FailDomainIdx: 4 } VSlot { VSlotId { NodeId: 5 PDiskId: 5 VSlotId: 1002 } GroupId: 2 GroupGeneration: 1 FailDomainIdx: 4 } VSlot { VSlotId { NodeId: 5 PDiskId: 5 VSlotId: 1003 } GroupId: 3 GroupGeneration: 1 FailDomainIdx: 4 } VSlot { VSlotId { NodeId: 6 PDiskId: 6 VSlotId: 1000 } GroupGeneration: 1 FailDomainIdx: 5 } VSlot { VSlotId { NodeId: 6 PDiskId: 6 VSlotId: 1001 } GroupId: 1 GroupGeneration: 1 FailDomainIdx: 5 } VSlot { VSlotId { NodeId: 6 PDiskId: 6 VSlotId: 1002 } GroupId: 2 GroupGeneration: 1 FailDomainIdx: 5 } VSlot { VSlotId { NodeId: 6 PDiskId: 6 VSlotId: 1003 } GroupId: 3 GroupGeneration: 1 FailDomainIdx: 5 } VSlot { VSlotId { NodeId: 7 PDiskId: 7 VSlotId: 1000 } GroupGeneration: 1 FailDomainIdx: 6 } VSlot { VSlotId { NodeId: 7 PDiskId: 7 VSlotId: 1001 } GroupId: 1 GroupGeneration: 1 FailDomainIdx: 6 } VSlot { VSlotId { NodeId: 7 PDiskId: 7 VSlotId: 1002 } GroupId: 2 GroupGeneration: 1 FailDomainIdx: 6 } VSlot { VSlotId { NodeId: 7 PDiskId: 7 VSlotId: 1003 } GroupId: 3 GroupGeneration: 1 FailDomainIdx: 6 } VSlot { VSlotId { NodeId: 8 PDiskId: 8 VSlotId: 1000 } GroupGeneration: 1 FailDomainIdx: 7 } VSlot { VSlotId { NodeId: 8 PDiskId: 8 VSlotId: 1001 } GroupId: 1 GroupGeneration: 1 FailDomainIdx: 7 } VSlot { VSlotId { NodeId: 8 PDiskId: 8 VSlotId: 1002 } GroupId: 2 GroupGeneration: 1 FailDomainIdx: 7 } VSlot { VSlotId { NodeId: 8 PDiskId: 8 VSlotId: 1003 } GroupId: 3 GroupGeneration: 1 FailDomainIdx: 7 } Group { GroupGeneration: 1 ErasureSpecies: "block-4-2" VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 1000 } VSlotId { NodeId: 2 PDiskId: 2 VSlotId: 1000 } VSlotId { NodeId: 3 PDiskId: 3 VSlotId: 1000 } VSlotId { NodeId: 4 PDiskId: 4 VSlotId: 1000 } VSlotId { NodeId: 5 PDiskId: 5 VSlotId: 1000 } VSlotId { NodeId: 6 PDiskId: 6 VSlotId: 1000 } VSlotId { NodeId: 7 PDiskId: 7 VSlotId: 1000 } VSlotId { NodeId: 8 PDiskId: 8 VSlotId: 1000 } } Group { GroupId: 1 GroupGeneration: 1 ErasureSpecies: "block-4-2" VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 1001 } VSlotId { NodeId: 2 PDiskId: 2 VSlotId: 1001 } VSlotId { NodeId: 3 PDiskId: 3 VSlotId: 1001 } VSlotId { NodeId: 4 PDiskId: 4 VSlotId: 1001 } VSlotId { NodeId: 5 PDiskId: 5 VSlotId: 1001 } VSlotId { NodeId: 6 PDiskId: 6 VSlotId: 1001 } VSlotId { NodeId: 7 PDiskId: 7 VSlotId: 1001 } VSlotId { NodeId: 8 PDiskId: 8 VSlotId: 1001 } } Group { GroupId: 2 GroupGeneration: 1 ErasureSpecies: "block-4-2" VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 1002 } VSlotId { NodeId: 2 PDiskId: 2 VSlotId: 1002 } VSlotId { NodeId: 3 PDiskId: 3 VSlotId: 1002 } VSlotId { NodeId: 4 PDiskId: 4 VSlotId: 1002 } VSlotId { NodeId: 5 PDiskId: 5 VSlotId: 1002 } VSlotId { NodeId: 6 PDiskId: 6 VSlotId: 1002 } VSlotId { NodeId: 7 PDiskId: 7 VSlotId: 1002 } VSlotId { NodeId: 8 PDiskId: 8 VSlotId: 1002 } } Group { GroupId: 3 GroupGeneration: 1 ErasureSpecies: "block-4-2" VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 1003 } VSlotId { NodeId: 2 PDiskId: 2 VSlotId: 1003 } VSlotId { NodeId: 3 PDiskId: 3 VSlotId: 1003 } VSlotId { NodeId: 4 PDiskId: 4 VSlotId: 1003 } VSlotId { NodeId: 5 PDiskId: 5 VSlotId: 1003 } VSlotId { NodeId: 6 PDiskId: 6 VSlotId: 1003 } VSlotId { NodeId: 7 PDiskId: 7 VSlotId: 1003 } VSlotId { NodeId: 8 PDiskId: 8 VSlotId: 1003 } } } } Success: true 2025-05-29T15:24:26.947047Z node 1 :CMS DEBUG: cms_tx_update_config.cpp:23: TTxUpdateConfig Execute 2025-05-29T15:24:26.979048Z node 1 :CMS DEBUG: cms_tx_update_config.cpp:37: TTxUpdateConfig Complete 2025-05-29T15:24:26.979119Z node 1 :CMS DEBUG: cms_tx_update_config.cpp:44: Updated config: TenantLimits { DisabledNodesRatioLimit: 0 } ClusterLimits { DisabledNodesRatioLimit: 0 } SentinelConfig { Enable: false } 2025-05-29T15:24:27.023515Z node 1 :CMS DEBUG: cms_tx_update_downtimes.cpp:17: TTxUpdateDowntimes Execute 2025-05-29T15:24:27.023559Z node 1 :CMS DEBUG: cms_tx_update_downtimes.cpp:26: TTxUpdateDowntimes Complete 2025-05-29T15:24:27.023639Z node 1 :CMS DEBUG: cluster_info.cpp:968: Timestamp: 1970-01-01T00:02:00Z 2025-05-29T15:24:27.023967Z node 1 :CMS NOTICE: audit_log.cpp:12: [AuditLog] [CMS tablet] Reply: request# NKikimr::NCms::TEvCms::TEvClusterStateRequest { }, response# NKikimr::NCms::TEvCms::TEvClusterStateResponse { Status { Code: OK } State { Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120025512 } Devices { Name: "vdisk-0-1-0-0-0" State: UP Timestamp: 120025512 } Devices { Name: "vdisk-1-1-0-0-0" State: UP Timestamp: 120025512 } Devices { Name: "vdisk-2-1-0-0-0" State: UP Timestamp: 120025512 } Devices { Name: "vdisk-3-1-0-0-0" State: UP Timestamp: 120025512 } Devices { Name: "pdisk-1-1" State: UP Timestamp: 120025512 } Timestamp: 120025512 NodeId: 1 InterconnectPort: 12001 Location { DataCenter: "1" Module: "1" Rack: "1" Unit: "1" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120025512 } Devices { Name: "vdisk-0-1-0-1-0" State: UP Timestamp: 120025512 } Devices { Name: "vdisk-1-1-0-1-0" State: UP Timestamp: 120025512 } Devices { Name: "vdisk-2-1-0-1-0" State: UP Timestamp: 120025512 } Devices { Name: "vdisk-3-1-0-1-0" State: UP Timestamp: 120025512 } Devices { Name: "pdisk-2-2" State: UP Timestamp: 120025512 } Timestamp: 120025512 NodeId: 2 InterconnectPort: 12002 Location { DataCenter: "1" Module: "2" Rack: "2" Unit: "2" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120025512 } Devices { Name: "vdisk-0-1-0-2-0" State: UP Timestamp: 120025512 } Devices { Name: "vdisk-1-1-0-2-0" State: UP Timestamp: 120025512 } Devices { Name: "vdisk-2-1-0-2-0" State: UP Timestamp: 120025512 } Devices { Name: "vdisk-3-1-0-2-0" State: UP Timestamp: 120025512 } Devices { Name: "pdisk-3-3" State: UP Timestamp: 120025512 } Timestamp: 120025512 NodeId: 3 InterconnectPort: 12003 Location { DataCenter: "1" Module: "3" Rack: "3" Unit: "3" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120025512 } Devices { Name: "vdisk-0-1-0-3-0" State: UP Timestamp: 120025512 } Devices { Name: "vdisk-1-1-0-3-0" State: UP Timestamp: 120025512 } Devices { Name: "vdisk-2-1-0-3-0" State: UP Timestamp: 120025512 } Devices { Name: "vdisk-3-1-0-3-0" State: UP Timestamp: 120025512 } Devices { Name: "pdisk-4-4" State: UP Timestamp: 120025512 } Timestamp: 120025512 NodeId: 4 InterconnectPort: 12004 Location { DataCenter: "1" Module: "4" Rack: "4" Unit: "4" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: ... me.cpp:24: TTxInitScheme Complete 2025-05-29T15:24:33.358860Z node 25 :CMS DEBUG: cms_tx_load_state.cpp:33: TTxLoadState Execute 2025-05-29T15:24:33.358961Z node 25 :CMS DEBUG: cms_tx_load_state.cpp:69: Loaded config: TenantLimits { DisabledNodesRatioLimit: 0 } ClusterLimits { DisabledNodesRatioLimit: 0 } SentinelConfig { Enable: false } 2025-05-29T15:24:33.358990Z node 25 :CMS DEBUG: cms.cpp:1147: Running CleanupWalleTasks 2025-05-29T15:24:33.359089Z node 25 :CMS DEBUG: cms_impl.h:185: StateInit event type: 104d0001 event: NKikimr::NConsole::TEvConfigsDispatcher::TEvSetConfigSubscriptionResponse 2025-05-29T15:24:33.359150Z node 25 :CMS DEBUG: cms_impl.h:185: StateInit event type: 104a0012 event: NKikimr::NConsole::TEvConsole::TEvConfigNotificationRequest { Config { FeatureFlags { EnableCMSRequestPriorities: true EnableSingleCompositeActionGroup: true } } ItemKinds: 25 ItemKinds: 26 } 2025-05-29T15:24:33.387293Z node 25 :CMS DEBUG: cms_tx_load_state.cpp:256: TTxLoadState Complete 2025-05-29T15:24:33.459095Z node 25 :CMS DEBUG: cms_tx_update_downtimes.cpp:17: TTxUpdateDowntimes Execute 2025-05-29T15:24:33.459198Z node 25 :CMS DEBUG: cluster_info.cpp:968: Timestamp: 1970-01-01T00:02:00Z 2025-05-29T15:24:33.460713Z node 25 :CMS INFO: cms.cpp:347: Check request: User: "user" Actions { Type: RESTART_SERVICES Host: "30" Services: "storage" Duration: 60000000 } PartialPermissionAllowed: false Schedule: false DryRun: true AvailabilityMode: MODE_KEEP_AVAILABLE EvictVDisks: false 2025-05-29T15:24:33.460730Z node 25 :CMS DEBUG: cms.cpp:379: Checking action: Type: RESTART_SERVICES Host: "30" Services: "storage" Duration: 60000000 2025-05-29T15:24:33.460744Z node 25 :CMS DEBUG: node_checkers.cpp:101: [Nodes Counter] Checking Node: 30, with state: Up, with limit: 0, with ratio limit: 0, locked nodes: 0, down nodes: 3 2025-05-29T15:24:33.460792Z node 25 :CMS DEBUG: cms.cpp:398: Result: DISALLOW_TEMP (reason: Issue in affected group with id '0': too many unavailable vdisks. Number of data centers with unavailable vdisks: 3. Locked: VDisk [0:1:1:5:0] (::1:/30/pdisk-270.data) is locked by this request. Down: Host ::1:12008 (32) is down, Host ::1:12005 (29) is down, Host ::1:12002 (26) is down) 2025-05-29T15:24:33.460849Z node 25 :CMS NOTICE: audit_log.cpp:12: [AuditLog] [CMS tablet] Reply: request# NKikimr::NCms::TEvCms::TEvPermissionRequest { User: "user" Actions { Type: RESTART_SERVICES Host: "30" Services: "storage" Duration: 60000000 } PartialPermissionAllowed: false Schedule: false DryRun: true AvailabilityMode: MODE_KEEP_AVAILABLE EvictVDisks: false }, response# NKikimr::NCms::TEvCms::TEvPermissionResponse { Status { Code: DISALLOW_TEMP Reason: "Issue in affected group with id \'0\': too many unavailable vdisks. Number of data centers with unavailable vdisks: 3. Locked: VDisk [0:1:1:5:0] (::1:/30/pdisk-270.data) is locked by this request. Down: Host ::1:12008 (32) is down, Host ::1:12005 (29) is down, Host ::1:12002 (26) is down" } Deadline: 420243000 } 2025-05-29T15:24:33.461022Z node 25 :CMS INFO: cms.cpp:104: OnTabletDead: 72057594037936128 2025-05-29T15:24:33.461031Z node 25 :CMS DEBUG: cms.cpp:1209: TCms::Cleanup 2025-05-29T15:24:33.462954Z node 25 :CMS DEBUG: cms_impl.h:185: StateInit event type: 10060000 event: NKikimr::TEvTablet::TEvBoot 2025-05-29T15:24:33.463682Z node 25 :CMS DEBUG: cms_impl.h:185: StateInit event type: 10060001 event: NKikimr::TEvTablet::TEvRestored 2025-05-29T15:24:33.463727Z node 25 :CMS DEBUG: cms_impl.h:185: StateInit event type: 1006000c event: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-05-29T15:24:33.464081Z node 25 :CMS DEBUG: cms_tx_init_scheme.cpp:16: TTxInitScheme Execute 2025-05-29T15:24:33.464159Z node 25 :CMS DEBUG: cms_tx_init_scheme.cpp:24: TTxInitScheme Complete 2025-05-29T15:24:33.464272Z node 25 :CMS DEBUG: cms_tx_load_state.cpp:33: TTxLoadState Execute 2025-05-29T15:24:33.464336Z node 25 :CMS DEBUG: cms_tx_load_state.cpp:69: Loaded config: TenantLimits { DisabledNodesRatioLimit: 0 } ClusterLimits { DisabledNodesRatioLimit: 0 } SentinelConfig { Enable: false } 2025-05-29T15:24:33.464356Z node 25 :CMS DEBUG: cms.cpp:1147: Running CleanupWalleTasks 2025-05-29T15:24:33.464415Z node 25 :CMS DEBUG: cms_impl.h:185: StateInit event type: 104d0001 event: NKikimr::NConsole::TEvConfigsDispatcher::TEvSetConfigSubscriptionResponse 2025-05-29T15:24:33.464443Z node 25 :CMS DEBUG: cms_impl.h:185: StateInit event type: 104a0012 event: NKikimr::NConsole::TEvConsole::TEvConfigNotificationRequest { Config { FeatureFlags { EnableCMSRequestPriorities: true EnableSingleCompositeActionGroup: true } } ItemKinds: 25 ItemKinds: 26 } 2025-05-29T15:24:33.486628Z node 25 :CMS DEBUG: cms_tx_load_state.cpp:256: TTxLoadState Complete 2025-05-29T15:24:33.563515Z node 25 :CMS DEBUG: cms_tx_update_downtimes.cpp:17: TTxUpdateDowntimes Execute 2025-05-29T15:24:33.563597Z node 25 :CMS DEBUG: cluster_info.cpp:968: Timestamp: 1970-01-01T00:02:00Z 2025-05-29T15:24:33.564871Z node 25 :CMS INFO: cms.cpp:347: Check request: User: "user" Actions { Type: RESTART_SERVICES Host: "32" Services: "storage" Duration: 60000000 } PartialPermissionAllowed: false Schedule: false DryRun: true AvailabilityMode: MODE_KEEP_AVAILABLE EvictVDisks: false 2025-05-29T15:24:33.564881Z node 25 :CMS DEBUG: cms.cpp:379: Checking action: Type: RESTART_SERVICES Host: "32" Services: "storage" Duration: 60000000 2025-05-29T15:24:33.564891Z node 25 :CMS DEBUG: node_checkers.cpp:101: [Nodes Counter] Checking Node: 32, with state: Up, with limit: 0, with ratio limit: 0, locked nodes: 0, down nodes: 2 2025-05-29T15:24:33.564930Z node 25 :CMS DEBUG: cms.cpp:398: Result: DISALLOW_TEMP (reason: Issue in affected group with id '0': too many unavailable vdisks. Number of data centers with unavailable vdisks: 3. Locked: VDisk [0:1:2:7:0] (::1:/32/pdisk-288.data) is locked by this request. Down: Host ::1:12005 (29) is down, Host ::1:12002 (26) is down) 2025-05-29T15:24:33.564983Z node 25 :CMS NOTICE: audit_log.cpp:12: [AuditLog] [CMS tablet] Reply: request# NKikimr::NCms::TEvCms::TEvPermissionRequest { User: "user" Actions { Type: RESTART_SERVICES Host: "32" Services: "storage" Duration: 60000000 } PartialPermissionAllowed: false Schedule: false DryRun: true AvailabilityMode: MODE_KEEP_AVAILABLE EvictVDisks: false }, response# NKikimr::NCms::TEvCms::TEvPermissionResponse { Status { Code: DISALLOW_TEMP Reason: "Issue in affected group with id \'0\': too many unavailable vdisks. Number of data centers with unavailable vdisks: 3. Locked: VDisk [0:1:2:7:0] (::1:/32/pdisk-288.data) is locked by this request. Down: Host ::1:12005 (29) is down, Host ::1:12002 (26) is down" } Deadline: 420349000 } 2025-05-29T15:24:33.575928Z node 25 :CMS DEBUG: cms_tx_update_downtimes.cpp:26: TTxUpdateDowntimes Complete 2025-05-29T15:24:33.590715Z node 25 :CMS DEBUG: cms_tx_update_downtimes.cpp:17: TTxUpdateDowntimes Execute 2025-05-29T15:24:33.590829Z node 25 :CMS DEBUG: cluster_info.cpp:968: Timestamp: 1970-01-01T00:02:00Z 2025-05-29T15:24:33.592004Z node 25 :CMS INFO: cms.cpp:347: Check request: User: "user" Actions { Type: RESTART_SERVICES Host: "30" Services: "storage" Duration: 60000000 } PartialPermissionAllowed: false Schedule: false DryRun: true AvailabilityMode: MODE_KEEP_AVAILABLE EvictVDisks: false 2025-05-29T15:24:33.592013Z node 25 :CMS DEBUG: cms.cpp:379: Checking action: Type: RESTART_SERVICES Host: "30" Services: "storage" Duration: 60000000 2025-05-29T15:24:33.592027Z node 25 :CMS DEBUG: node_checkers.cpp:101: [Nodes Counter] Checking Node: 30, with state: Up, with limit: 0, with ratio limit: 0, locked nodes: 0, down nodes: 2 2025-05-29T15:24:33.592242Z node 25 :CMS DEBUG: cms.cpp:387: Result: ALLOW 2025-05-29T15:24:33.592313Z node 25 :CMS NOTICE: audit_log.cpp:12: [AuditLog] [CMS tablet] Reply: request# NKikimr::NCms::TEvCms::TEvPermissionRequest { User: "user" Actions { Type: RESTART_SERVICES Host: "30" Services: "storage" Duration: 60000000 } PartialPermissionAllowed: false Schedule: false DryRun: true AvailabilityMode: MODE_KEEP_AVAILABLE EvictVDisks: false }, response# NKikimr::NCms::TEvCms::TEvPermissionResponse { Status { Code: ALLOW } Permissions { Action { Type: RESTART_SERVICES Host: "30" Services: "storage" Duration: 60000000 } Deadline: 180449000 Extentions { Type: HostInfo Hosts { Name: "::1" State: UP NodeId: 30 InterconnectPort: 12006 } } } } 2025-05-29T15:24:33.592482Z node 25 :CMS INFO: cms.cpp:104: OnTabletDead: 72057594037936128 2025-05-29T15:24:33.592487Z node 25 :CMS DEBUG: cms.cpp:1209: TCms::Cleanup 2025-05-29T15:24:33.594593Z node 25 :CMS DEBUG: cms_impl.h:185: StateInit event type: 10060000 event: NKikimr::TEvTablet::TEvBoot 2025-05-29T15:24:33.595289Z node 25 :CMS DEBUG: cms_impl.h:185: StateInit event type: 10060001 event: NKikimr::TEvTablet::TEvRestored 2025-05-29T15:24:33.595343Z node 25 :CMS DEBUG: cms_impl.h:185: StateInit event type: 1006000c event: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-05-29T15:24:33.595758Z node 25 :CMS DEBUG: cms_tx_init_scheme.cpp:16: TTxInitScheme Execute 2025-05-29T15:24:33.595830Z node 25 :CMS DEBUG: cms_tx_init_scheme.cpp:24: TTxInitScheme Complete 2025-05-29T15:24:33.595929Z node 25 :CMS DEBUG: cms_tx_load_state.cpp:33: TTxLoadState Execute 2025-05-29T15:24:33.595993Z node 25 :CMS DEBUG: cms_tx_load_state.cpp:69: Loaded config: TenantLimits { DisabledNodesRatioLimit: 0 } ClusterLimits { DisabledNodesRatioLimit: 0 } SentinelConfig { Enable: false } 2025-05-29T15:24:33.596015Z node 25 :CMS DEBUG: cms.cpp:1147: Running CleanupWalleTasks 2025-05-29T15:24:33.596056Z node 25 :CMS DEBUG: cms_impl.h:185: StateInit event type: 104d0001 event: NKikimr::NConsole::TEvConfigsDispatcher::TEvSetConfigSubscriptionResponse 2025-05-29T15:24:33.596094Z node 25 :CMS DEBUG: cms_impl.h:185: StateInit event type: 104a0012 event: NKikimr::NConsole::TEvConsole::TEvConfigNotificationRequest { Config { FeatureFlags { EnableCMSRequestPriorities: true EnableSingleCompositeActionGroup: true } } ItemKinds: 25 ItemKinds: 26 } 2025-05-29T15:24:33.618021Z node 25 :CMS DEBUG: cms_tx_load_state.cpp:256: TTxLoadState Complete 2025-05-29T15:24:33.645190Z node 25 :CMS DEBUG: cms_tx_update_downtimes.cpp:17: TTxUpdateDowntimes Execute 2025-05-29T15:24:33.645316Z node 25 :CMS DEBUG: cluster_info.cpp:968: Timestamp: 1970-01-01T00:02:00Z 2025-05-29T15:24:33.648106Z node 25 :CMS INFO: cms.cpp:347: Check request: User: "user" Actions { Type: RESTART_SERVICES Host: "27" Services: "storage" Duration: 60000000 } PartialPermissionAllowed: false Schedule: false DryRun: true AvailabilityMode: MODE_KEEP_AVAILABLE EvictVDisks: false 2025-05-29T15:24:33.648138Z node 25 :CMS DEBUG: cms.cpp:379: Checking action: Type: RESTART_SERVICES Host: "27" Services: "storage" Duration: 60000000 2025-05-29T15:24:33.648155Z node 25 :CMS DEBUG: node_checkers.cpp:101: [Nodes Counter] Checking Node: 27, with state: Up, with limit: 0, with ratio limit: 0, locked nodes: 0, down nodes: 3 2025-05-29T15:24:33.648227Z node 25 :CMS DEBUG: cms.cpp:398: Result: DISALLOW_TEMP (reason: Issue in affected group with id '0': too many unavailable vdisks. Locked: VDisk [0:1:0:2:0] (::1:/27/pdisk-243.data) is locked by this request. Down: Host ::1:12006 (30) is down, Host ::1:12005 (29) is down, Host ::1:12002 (26) is down) 2025-05-29T15:24:33.648298Z node 25 :CMS NOTICE: audit_log.cpp:12: [AuditLog] [CMS tablet] Reply: request# NKikimr::NCms::TEvCms::TEvPermissionRequest { User: "user" Actions { Type: RESTART_SERVICES Host: "27" Services: "storage" Duration: 60000000 } PartialPermissionAllowed: false Schedule: false DryRun: true AvailabilityMode: MODE_KEEP_AVAILABLE EvictVDisks: false }, response# NKikimr::NCms::TEvCms::TEvPermissionResponse { Status { Code: DISALLOW_TEMP Reason: "Issue in affected group with id \'0\': too many unavailable vdisks. Locked: VDisk [0:1:0:2:0] (::1:/27/pdisk-243.data) is locked by this request. Down: Host ::1:12006 (30) is down, Host ::1:12005 (29) is down, Host ::1:12002 (26) is down" } Deadline: 420555000 } >> TCmsTest::RequestRestartServicesNoUser [GOOD] >> DataShardSnapshots::VolatileSnapshotMerge [FAIL] >> DataShardSnapshots::ShardRestartLockUnrelatedUpsert [FAIL] >> DataShardSnapshots::VolatileSnapshotAndLocalMKQLUpdate >> DataShardSnapshots::ShardRestartLockBrokenByConflict >> TSchemeShardServerLess::BaseCase-AlterDatabaseCreateHiveFirst-true [GOOD] >> TVPatchTests::FindingPartsWithTimeout [GOOD] >> TConsoleTests::TestListTenants [GOOD] >> TConsoleTests::TestListTenantsExtSubdomain |65.8%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/http_proxy/ut/inside_ydb_ut/ydb-core-http_proxy-ut-inside_ydb_ut |65.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/http_proxy/ut/inside_ydb_ut/ydb-core-http_proxy-ut-inside_ydb_ut |65.8%| [LD] {RESULT} $(B)/ydb/core/http_proxy/ut/inside_ydb_ut/ydb-core-http_proxy-ut-inside_ydb_ut ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_serverless/unittest >> TSchemeShardServerLess::TestServerlessComputeResourcesModeFeatureFlag [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:116:2058] recipient: [1:111:2141] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:116:2058] recipient: [1:111:2141] Leader for TabletID 72057594046678944 is [1:128:2152] sender: [1:132:2058] recipient: [1:111:2141] 2025-05-29T15:24:36.974995Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7488: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-05-29T15:24:36.975026Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7516: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:24:36.975033Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7402: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-05-29T15:24:36.975038Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7418: OperationsProcessing config: using default configuration 2025-05-29T15:24:36.975045Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-05-29T15:24:36.975049Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-05-29T15:24:36.975058Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7548: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:24:36.975073Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:39: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-05-29T15:24:36.975193Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7619: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-05-29T15:24:36.975274Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-05-29T15:24:36.991230Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7309: Cannot subscribe to console configs 2025-05-29T15:24:36.991265Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:24:36.995915Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-05-29T15:24:36.996070Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-05-29T15:24:36.996119Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-05-29T15:24:37.000633Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-05-29T15:24:37.000863Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:445: Clear TempDirsState with owners number: 0 2025-05-29T15:24:37.001026Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1348: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-05-29T15:24:37.001114Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:32: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-05-29T15:24:37.001723Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:157: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:24:37.001772Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:84: [RootDataErasureManager] Stop 2025-05-29T15:24:37.002084Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:24:37.002097Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:24:37.002121Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-05-29T15:24:37.002131Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-29T15:24:37.002138Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:202: TTxServerlessStorageBilling.Complete 2025-05-29T15:24:37.002179Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6671: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-05-29T15:24:37.006728Z node 1 :HIVE INFO: tablet_helpers.cpp:1130: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:128:2152] sender: [1:242:2058] recipient: [1:15:2062] 2025-05-29T15:24:37.029210Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:372: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-05-29T15:24:37.029303Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:24:37.029378Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-05-29T15:24:37.029424Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:130: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-05-29T15:24:37.029436Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:24:37.030281Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:451: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-05-29T15:24:37.030314Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-05-29T15:24:37.030383Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:24:37.030396Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:313: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-05-29T15:24:37.030402Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:367: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-05-29T15:24:37.030408Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 2 -> 3 2025-05-29T15:24:37.030926Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:24:37.030942Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-05-29T15:24:37.030949Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 3 -> 128 2025-05-29T15:24:37.031329Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:24:37.031341Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:24:37.031348Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:24:37.031357Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1650: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-05-29T15:24:37.032100Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1719: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-05-29T15:24:37.032576Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:652: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-05-29T15:24:37.032627Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1751: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-05-29T15:24:37.032835Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:676: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-05-29T15:24:37.032866Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:680: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 124 RawX2: 4294969445 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-05-29T15:24:37.032874Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:24:37.032948Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 128 -> 240 2025-05-29T15:24:37.032956Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:24:37.032993Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-05-29T15:24:37.033007Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:402: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-05-29T15:24:37.033452Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:24:37.033463Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-29T15:24:37.033529Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29 ... _TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:647: TTxOperationReply complete, operationId: 104:0, at schemeshard: 72057594046678944 2025-05-29T15:24:37.092739Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 104:0, at schemeshard: 72057594046678944 2025-05-29T15:24:37.092745Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 104:0, at schemeshard: 72057594046678944 2025-05-29T15:24:37.092752Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 104:0, at tablet# 72057594046678944 2025-05-29T15:24:37.092760Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1650: TOperation IsReadyToPropose , TxId: 104 ready parts: 1/1 2025-05-29T15:24:37.092796Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1719: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 104 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-05-29T15:24:37.093209Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:652: Send tablet strongly msg operationId: 104:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:104 msg type: 269090816 2025-05-29T15:24:37.093238Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1751: TOperation RegisterRelationByTabletId, TxId: 104, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 104 at step: 5000005 FAKE_COORDINATOR: advance: minStep5000005 State->FrontStep: 5000004 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 104 at step: 5000005 2025-05-29T15:24:37.093309Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:676: TTxOperationPlanStep Execute, stepId: 5000005, transactions count in step: 1, at schemeshard: 72057594046678944 2025-05-29T15:24:37.093330Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:680: TTxOperationPlanStep Execute, message: Transactions { TxId: 104 Coordinator: 72057594046316545 AckTo { RawX1: 124 RawX2: 4294969445 } } Step: 5000005 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-05-29T15:24:37.093339Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 104:0, at tablet# 72057594046678944 2025-05-29T15:24:37.093408Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 104:0 128 -> 240 2025-05-29T15:24:37.093417Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 104:0, at tablet# 72057594046678944 2025-05-29T15:24:37.093446Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 5 2025-05-29T15:24:37.093470Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:567: DoUpdateTenant no hasChanges, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], tenantLink: TSubDomainsLinks::TLink { DomainKey: [OwnerId: 72057594046678944, LocalPathId: 3], Generation: 2, ActorId:[1:613:2540], EffectiveACLVersion: 0, SubdomainVersion: 2, UserAttributesVersion: 1, TenantHive: 18446744073709551615, TenantSysViewProcessor: 18446744073709551615, TenantStatisticsAggregator: 18446744073709551615, TenantGraphShard: 18446744073709551615, TenantRootACL: }, subDomain->GetVersion(): 2, actualEffectiveACLVersion: 0, actualUserAttrsVersion: 1, tenantHive: 18446744073709551615, tenantSysViewProcessor: 18446744073709551615, at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 104 2025-05-29T15:24:37.093868Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:24:37.093878Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 104, path id: [OwnerId: 72057594046678944, LocalPathId: 3] 2025-05-29T15:24:37.093920Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:24:37.093925Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:209:2210], at schemeshard: 72057594046678944, txId: 104, path id: 3 2025-05-29T15:24:37.093989Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 104:0, at schemeshard: 72057594046678944 2025-05-29T15:24:37.093997Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_alter_extsubdomain.cpp:787: [72057594046678944] TSyncHive, operationId 104:0, ProgressState, NeedSyncHive: 0 2025-05-29T15:24:37.094002Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 104:0 240 -> 240 2025-05-29T15:24:37.094094Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:5834: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 4 PathOwnerId: 72057594046678944, cookie: 104 2025-05-29T15:24:37.094107Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 4 PathOwnerId: 72057594046678944, cookie: 104 2025-05-29T15:24:37.094112Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72057594046678944, txId: 104 2025-05-29T15:24:37.094116Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 104, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 4 2025-05-29T15:24:37.094125Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 6 2025-05-29T15:24:37.094140Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1606: TOperation IsReadyToNotify, TxId: 104, ready parts: 0/1, is published: true 2025-05-29T15:24:37.094600Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 104:0, at schemeshard: 72057594046678944 2025-05-29T15:24:37.094613Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:482: [72057594046678944] TDone opId# 104:0 ProgressState 2025-05-29T15:24:37.094628Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:906: Part operation is done id#104:0 progress is 1/1 2025-05-29T15:24:37.094633Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 104 ready parts: 1/1 2025-05-29T15:24:37.094638Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:906: Part operation is done id#104:0 progress is 1/1 2025-05-29T15:24:37.094641Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 104 ready parts: 1/1 2025-05-29T15:24:37.094646Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1606: TOperation IsReadyToNotify, TxId: 104, ready parts: 1/1, is published: true 2025-05-29T15:24:37.094651Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 104 ready parts: 1/1 2025-05-29T15:24:37.094656Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:973: Operation and all the parts is done, operation id: 104:0 2025-05-29T15:24:37.094661Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5174: RemoveTx for txid 104:0 2025-05-29T15:24:37.094693Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 5 2025-05-29T15:24:37.094803Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 104 TestModificationResult got TxId: 104, wait until txId: 104 TestWaitNotification wait txId: 104 2025-05-29T15:24:37.095151Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:210: tests -- TTxNotificationSubscriber for txId 104: send EvNotifyTxCompletion 2025-05-29T15:24:37.095161Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:256: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 104 2025-05-29T15:24:37.095243Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 104, at schemeshard: 72057594046678944 2025-05-29T15:24:37.095260Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:227: tests -- TTxNotificationSubscriber for txId 104: got EvNotifyTxCompletionResult 2025-05-29T15:24:37.095266Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:236: tests -- TTxNotificationSubscriber for txId 104: satisfy waiter [1:762:2643] TestWaitNotification: OK eventTxId 104 TestModificationResults wait txId: 105 2025-05-29T15:24:37.096100Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:372: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpAlterExtSubDomain SubDomain { Name: "ServerLess0" ServerlessComputeResourcesMode: EServerlessComputeResourcesModeExclusive } } TxId: 105 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-05-29T15:24:37.096140Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_alter_extsubdomain.cpp:1102: [72057594046678944] CreateCompatibleAlterExtSubDomain, opId 105:0, feature flag EnableAlterDatabaseCreateHiveFirst 1, tx WorkingDir: "/MyRoot" OperationType: ESchemeOpAlterExtSubDomain SubDomain { Name: "ServerLess0" ServerlessComputeResourcesMode: EServerlessComputeResourcesModeExclusive } 2025-05-29T15:24:37.096148Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_alter_extsubdomain.cpp:1108: [72057594046678944] CreateCompatibleAlterExtSubDomain, opId 105:0, path /MyRoot/ServerLess0 2025-05-29T15:24:37.096186Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_just_reject.cpp:47: TReject Propose, opId: 105:0, explain: Invalid AlterExtSubDomain request: Unsupported: feature flag EnableServerlessExclusiveDynamicNodes is off, at schemeshard: 72057594046678944 2025-05-29T15:24:37.096194Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:130: IgniteOperation, opId: 105:1, propose status:StatusPreconditionFailed, reason: Invalid AlterExtSubDomain request: Unsupported: feature flag EnableServerlessExclusiveDynamicNodes is off, at schemeshard: 72057594046678944 2025-05-29T15:24:37.096708Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:451: TTxOperationPropose Complete, txId: 105, response: Status: StatusPreconditionFailed Reason: "Invalid AlterExtSubDomain request: Unsupported: feature flag EnableServerlessExclusiveDynamicNodes is off" TxId: 105 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2025-05-29T15:24:37.096742Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 105, database: /MyRoot, subject: , status: StatusPreconditionFailed, reason: Invalid AlterExtSubDomain request: Unsupported: feature flag EnableServerlessExclusiveDynamicNodes is off, operation: ALTER DATABASE, path: /MyRoot/ServerLess0 TestModificationResult got TxId: 105, wait until txId: 105 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/vdisk/skeleton/ut/unittest >> TVPatchTests::PatchPartOk [GOOD] Test command err: Recv 65537 2025-05-29T15:24:37.167795Z node 1 :BS_VDISK_PATCH INFO: {BSVSP03@skeleton_vpatch_actor.cpp:190} [0:1:0:0:0] TEvVPatch: bootstrapped; OriginalBlobId# [1:2:3:4:6:10:0] Deadline# 1970-01-01T00:00:01.000000Z Send NKikimr::TEvBlobStorage::TEvVGet Recv NKikimr::TEvBlobStorage::TEvVGetResult 2025-05-29T15:24:37.168275Z node 1 :BS_VDISK_PATCH INFO: {BSVSP06@skeleton_vpatch_actor.cpp:266} [0:1:0:0:0] TEvVPatch: received parts index; OriginalBlobId# [1:2:3:4:6:10:0] Status# OK ResultSize# 1 2025-05-29T15:24:37.168306Z node 1 :BS_VDISK_PATCH INFO: {BSVSP04@skeleton_vpatch_actor.cpp:226} [0:1:0:0:0] TEvVPatch: sended found parts; OriginalBlobId# [1:2:3:4:6:10:0] FoundParts# [1] Status# OK Send NKikimr::TEvBlobStorage::TEvVPatchFoundParts Recv NKikimr::TEvBlobStorage::TEvVPatchDiff 2025-05-29T15:24:37.168353Z node 1 :BS_VDISK_PATCH INFO: {BSVSP09@skeleton_vpatch_actor.cpp:577} [0:1:0:0:0] TEvVPatch: received diff; OriginalBlobId# [1:2:3:4:6:10:0] PatchedBlobId# [1:3:3:4:6:10:0] OriginalPartId# 1 PatchedPartId# 1 XorReceiver# no ParityPart# no ForceEnd# no 2025-05-29T15:24:37.168365Z node 1 :BS_VDISK_PATCH INFO: {BSVSP05@skeleton_vpatch_actor.cpp:246} [0:1:0:0:0] TEvVPatch: send vGet for pulling part data; OriginalBlobId# [1:2:3:4:6:10:0] PullingPart# 1 Send NKikimr::TEvBlobStorage::TEvVGet Recv NKikimr::TEvBlobStorage::TEvVGetResult 2025-05-29T15:24:37.168396Z node 1 :BS_VDISK_PATCH INFO: {BSVSP08@skeleton_vpatch_actor.cpp:383} [0:1:0:0:0] TEvVPatch: received part data; OriginalBlobId# [1:2:3:4:6:10:0] PatchedBlobId# [1:3:3:4:6:10:0] OriginalPartId# 1 PatchedPartId# 1 DataParts# 4 ReceivedBlobId# [1:2:3:4:6:10:1] Status# OK ResultSize# 1 ParityPart# no 2025-05-29T15:24:37.168416Z node 1 :BS_VDISK_PATCH INFO: {BSVSP14@skeleton_vpatch_actor.cpp:462} [0:1:0:0:0] TEvVPatch: send xor diffs; OriginalBlobId# [1:2:3:4:6:10:0] PatchedBlobId# [1:3:3:4:6:10:0] OriginalPartId# 1 PatchedPartId# 1 XorDiffCount# 0 2025-05-29T15:24:37.168428Z node 1 :BS_VDISK_PATCH INFO: {BSVSP15@skeleton_vpatch_actor.cpp:502} [0:1:0:0:0] TEvVPatch: send vPut; OriginalBlobId# [1:2:3:4:6:10:0] PatchedBlobId# [1:3:3:4:6:10:0] OriginalPartId# 1 PatchedPartId# 1 ReceivedXorDiffs# 0 ExpectedXorDiffs# 0 Send NKikimr::TEvBlobStorage::TEvVPut Recv NKikimr::TEvBlobStorage::TEvVPutResult 2025-05-29T15:24:37.168460Z node 1 :BS_VDISK_PATCH INFO: {BSVSP10@skeleton_vpatch_actor.cpp:627} [0:1:0:0:0] TEvVPatch: received put result; OriginalBlobId# [1:2:3:4:6:10:0] PatchedBlobId# [1:3:3:4:6:10:0] OriginalPartId# 1 PatchedPartId# 1 Status# OK 2025-05-29T15:24:37.168467Z node 1 :BS_VDISK_PATCH INFO: {BSVSP07@skeleton_vpatch_actor.cpp:315} [0:1:0:0:0] TEvVPatch: send patch result; OriginalBlobId# [1:2:3:4:6:10:0] PatchedBlobId# [1:3:3:4:6:10:0] OriginalPartId# 1 PatchedPartId# 1 Status# OK ErrorReason# Send NKikimr::TEvBlobStorage::TEvVPatchResult 2025-05-29T15:24:37.168480Z node 1 :BS_VDISK_PATCH DEBUG: {BSVSP17@skeleton_vpatch_actor.cpp:727} [0:1:0:0:0] NotifySkeletonAboutDying; Send NKikimr::TEvVPatchDyingRequest Recv NKikimr::TEvVPatchDyingConfirm >> TVPatchTests::PatchPartFastXorDiffBeyoundBlob ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/cms/ut/unittest >> TCmsTest::RequestRestartServicesNoUser [GOOD] Test command err: 2025-05-29T15:24:30.516607Z node 1 :CMS DEBUG: console__init_scheme.cpp:14: TConsole::TTxInitScheme Execute 2025-05-29T15:24:30.517713Z node 1 :CMS DEBUG: cms_impl.h:185: StateInit event type: 10060000 event: NKikimr::TEvTablet::TEvBoot 2025-05-29T15:24:30.520047Z node 1 :CMS DEBUG: cms_impl.h:185: StateInit event type: 10060001 event: NKikimr::TEvTablet::TEvRestored 2025-05-29T15:24:30.520114Z node 1 :CMS DEBUG: cms_tx_init_scheme.cpp:16: TTxInitScheme Execute 2025-05-29T15:24:30.520504Z node 1 :CMS DEBUG: console__init_scheme.cpp:23: TConsole::TTxInitScheme Complete 2025-05-29T15:24:30.520528Z node 1 :CMS DEBUG: cms_impl.h:185: StateInit event type: 1006000c event: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-05-29T15:24:30.520565Z node 1 :CMS DEBUG: console__load_state.cpp:28: TConsole::TTxLoadState Execute 2025-05-29T15:24:30.520601Z node 1 :CMS DEBUG: console__load_state.cpp:50: Using default config. 2025-05-29T15:24:30.520693Z node 1 :CMS DEBUG: console__load_state.cpp:66: TConsole::TTxLoadState Complete 2025-05-29T15:24:30.520707Z node 1 :CMS DEBUG: cms_impl.h:185: StateInit event type: 104d0001 event: NKikimr::NConsole::TEvConfigsDispatcher::TEvSetConfigSubscriptionResponse 2025-05-29T15:24:30.522012Z node 1 :CMS DEBUG: cms_tx_init_scheme.cpp:24: TTxInitScheme Complete 2025-05-29T15:24:30.522066Z node 1 :CMS DEBUG: cms_tx_load_state.cpp:33: TTxLoadState Execute 2025-05-29T15:24:30.522094Z node 1 :CMS DEBUG: cms_tx_load_state.cpp:76: Using default config 2025-05-29T15:24:30.522147Z node 1 :CMS DEBUG: cms.cpp:1147: Running CleanupWalleTasks 2025-05-29T15:24:30.557740Z node 1 :CMS DEBUG: cms_impl.h:185: StateInit event type: 104a0012 event: NKikimr::NConsole::TEvConsole::TEvConfigNotificationRequest { Config { FeatureFlags { EnableCMSRequestPriorities: true EnableSingleCompositeActionGroup: true } } ItemKinds: 25 ItemKinds: 26 Local: true } 2025-05-29T15:24:30.602215Z node 1 :CMS DEBUG: cms_tx_load_state.cpp:256: TTxLoadState Complete 2025-05-29T15:24:30.602338Z node 1 :CMS DEBUG: cms_tx_update_config.cpp:23: TTxUpdateConfig Execute 2025-05-29T15:24:30.603531Z node 1 :CMS DEBUG: cms_tx_update_config.cpp:37: TTxUpdateConfig Complete 2025-05-29T15:24:30.603643Z node 1 :CMS DEBUG: sentinel.cpp:939: [Sentinel] [Main] UpdateConfig 2025-05-29T15:24:30.603648Z node 1 :CMS DEBUG: sentinel.cpp:884: [Sentinel] [Main] Start ConfigUpdater 2025-05-29T15:24:30.603653Z node 1 :CMS DEBUG: sentinel.cpp:955: [Sentinel] [Main] UpdateState 2025-05-29T15:24:30.603656Z node 1 :CMS INFO: sentinel.cpp:879: [Sentinel] [Main] StateUpdater was delayed 2025-05-29T15:24:30.603668Z node 1 :CMS DEBUG: cms_tx_update_config.cpp:23: TTxUpdateConfig Execute 2025-05-29T15:24:30.603710Z node 1 :CMS DEBUG: sentinel.cpp:464: [Sentinel] [ConfigUpdater] Request blobstorage config: attempt# 0 2025-05-29T15:24:30.603737Z node 1 :CMS DEBUG: sentinel.cpp:477: [Sentinel] [ConfigUpdater] Request CMS cluster state: attempt# 0 2025-05-29T15:24:30.605595Z node 1 :CMS DEBUG: sentinel.cpp:530: [Sentinel] [ConfigUpdater] Handle TEvBlobStorage::TEvControllerConfigResponse: response# Status { Success: true BaseConfig { PDisk { NodeId: 1 PDiskId: 1 Path: "/1/pdisk-1.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 2 PDiskId: 2 Path: "/2/pdisk-2.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 3 PDiskId: 3 Path: "/3/pdisk-3.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 4 PDiskId: 4 Path: "/4/pdisk-4.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 5 PDiskId: 5 Path: "/5/pdisk-5.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 6 PDiskId: 6 Path: "/6/pdisk-6.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 7 PDiskId: 7 Path: "/7/pdisk-7.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 8 PDiskId: 8 Path: "/8/pdisk-8.data" Guid: 1 DriveStatus: ACTIVE } VSlot { VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 1000 } GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 1001 } GroupId: 1 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 1002 } GroupId: 2 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 1003 } GroupId: 3 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 2 VSlotId: 1000 } GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 2 VSlotId: 1001 } GroupId: 1 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 2 VSlotId: 1002 } GroupId: 2 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 2 VSlotId: 1003 } GroupId: 3 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 3 PDiskId: 3 VSlotId: 1000 } GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 3 VSlotId: 1001 } GroupId: 1 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 3 VSlotId: 1002 } GroupId: 2 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 3 VSlotId: 1003 } GroupId: 3 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 4 PDiskId: 4 VSlotId: 1000 } GroupGeneration: 1 FailDomainIdx: 3 } VSlot { VSlotId { NodeId: 4 PDiskId: 4 VSlotId: 1001 } GroupId: 1 GroupGeneration: 1 FailDomainIdx: 3 } VSlot { VSlotId { NodeId: 4 PDiskId: 4 VSlotId: 1002 } GroupId: 2 GroupGeneration: 1 FailDomainIdx: 3 } VSlot { VSlotId { NodeId: 4 PDiskId: 4 VSlotId: 1003 } GroupId: 3 GroupGeneration: 1 FailDomainIdx: 3 } VSlot { VSlotId { NodeId: 5 PDiskId: 5 VSlotId: 1000 } GroupGeneration: 1 FailDomainIdx: 4 } VSlot { VSlotId { NodeId: 5 PDiskId: 5 VSlotId: 1001 } GroupId: 1 GroupGeneration: 1 FailDomainIdx: 4 } VSlot { VSlotId { NodeId: 5 PDiskId: 5 VSlotId: 1002 } GroupId: 2 GroupGeneration: 1 FailDomainIdx: 4 } VSlot { VSlotId { NodeId: 5 PDiskId: 5 VSlotId: 1003 } GroupId: 3 GroupGeneration: 1 FailDomainIdx: 4 } VSlot { VSlotId { NodeId: 6 PDiskId: 6 VSlotId: 1000 } GroupGeneration: 1 FailDomainIdx: 5 } VSlot { VSlotId { NodeId: 6 PDiskId: 6 VSlotId: 1001 } GroupId: 1 GroupGeneration: 1 FailDomainIdx: 5 } VSlot { VSlotId { NodeId: 6 PDiskId: 6 VSlotId: 1002 } GroupId: 2 GroupGeneration: 1 FailDomainIdx: 5 } VSlot { VSlotId { NodeId: 6 PDiskId: 6 VSlotId: 1003 } GroupId: 3 GroupGeneration: 1 FailDomainIdx: 5 } VSlot { VSlotId { NodeId: 7 PDiskId: 7 VSlotId: 1000 } GroupGeneration: 1 FailDomainIdx: 6 } VSlot { VSlotId { NodeId: 7 PDiskId: 7 VSlotId: 1001 } GroupId: 1 GroupGeneration: 1 FailDomainIdx: 6 } VSlot { VSlotId { NodeId: 7 PDiskId: 7 VSlotId: 1002 } GroupId: 2 GroupGeneration: 1 FailDomainIdx: 6 } VSlot { VSlotId { NodeId: 7 PDiskId: 7 VSlotId: 1003 } GroupId: 3 GroupGeneration: 1 FailDomainIdx: 6 } VSlot { VSlotId { NodeId: 8 PDiskId: 8 VSlotId: 1000 } GroupGeneration: 1 FailDomainIdx: 7 } VSlot { VSlotId { NodeId: 8 PDiskId: 8 VSlotId: 1001 } GroupId: 1 GroupGeneration: 1 FailDomainIdx: 7 } VSlot { VSlotId { NodeId: 8 PDiskId: 8 VSlotId: 1002 } GroupId: 2 GroupGeneration: 1 FailDomainIdx: 7 } VSlot { VSlotId { NodeId: 8 PDiskId: 8 VSlotId: 1003 } GroupId: 3 GroupGeneration: 1 FailDomainIdx: 7 } Group { GroupGeneration: 1 ErasureSpecies: "block-4-2" VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 1000 } VSlotId { NodeId: 2 PDiskId: 2 VSlotId: 1000 } VSlotId { NodeId: 3 PDiskId: 3 VSlotId: 1000 } VSlotId { NodeId: 4 PDiskId: 4 VSlotId: 1000 } VSlotId { NodeId: 5 PDiskId: 5 VSlotId: 1000 } VSlotId { NodeId: 6 PDiskId: 6 VSlotId: 1000 } VSlotId { NodeId: 7 PDiskId: 7 VSlotId: 1000 } VSlotId { NodeId: 8 PDiskId: 8 VSlotId: 1000 } } Group { GroupId: 1 GroupGeneration: 1 ErasureSpecies: "block-4-2" VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 1001 } VSlotId { NodeId: 2 PDiskId: 2 VSlotId: 1001 } VSlotId { NodeId: 3 PDiskId: 3 VSlotId: 1001 } VSlotId { NodeId: 4 PDiskId: 4 VSlotId: 1001 } VSlotId { NodeId: 5 PDiskId: 5 VSlotId: 1001 } VSlotId { NodeId: 6 PDiskId: 6 VSlotId: 1001 } VSlotId { NodeId: 7 PDiskId: 7 VSlotId: 1001 } VSlotId { NodeId: 8 PDiskId: 8 VSlotId: 1001 } } Group { GroupId: 2 GroupGeneration: 1 ErasureSpecies: "block-4-2" VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 1002 } VSlotId { NodeId: 2 PDiskId: 2 VSlotId: 1002 } VSlotId { NodeId: 3 PDiskId: 3 VSlotId: 1002 } VSlotId { NodeId: 4 PDiskId: 4 VSlotId: 1002 } VSlotId { NodeId: 5 PDiskId: 5 VSlotId: 1002 } VSlotId { NodeId: 6 PDiskId: 6 VSlotId: 1002 } VSlotId { NodeId: 7 PDiskId: 7 VSlotId: 1002 } VSlotId { NodeId: 8 PDiskId: 8 VSlotId: 1002 } } Group { GroupId: 3 GroupGeneration: 1 ErasureSpecies: "block-4-2" VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 1003 } VSlotId { NodeId: 2 PDiskId: 2 VSlotId: 1003 } VSlotId { NodeId: 3 PDiskId: 3 VSlotId: 1003 } VSlotId { NodeId: 4 PDiskId: 4 VSlotId: 1003 } VSlotId { NodeId: 5 PDiskId: 5 VSlotId: 1003 } VSlotId { NodeId: 6 PDiskId: 6 VSlotId: 1003 } VSlotId { NodeId: 7 PDiskId: 7 VSlotId: 1003 } VSlotId { NodeId: 8 PDiskId: 8 VSlotId: 1003 } } } } Success: true 2025-05-29T15:24:30.653140Z node 1 :CMS DEBUG: cms_tx_update_config.cpp:37: TTxUpdateConfig Complete 2025-05-29T15:24:30.653202Z node 1 :CMS DEBUG: cms_tx_update_config.cpp:44: Updated config: TenantLimits { DisabledNodesRatioLimit: 0 } ClusterLimits { DisabledNodesRatioLimit: 0 } SentinelConfig { Enable: false } 2025-05-29T15:24:30.717266Z node 1 :CMS DEBUG: cms_tx_update_downtimes.cpp:17: TTxUpdateDowntimes Execute 2025-05-29T15:24:30.717300Z node 1 :CMS DEBUG: cms_tx_update_downtimes.cpp:26: TTxUpdateDowntimes Complete 2025-05-29T15:24:30.717373Z node 1 :CMS DEBUG: cluster_info.cpp:968: Timestamp: 1970-01-01T00:02:00Z 2025-05-29T15:24:30.717708Z node 1 :CMS NOTICE: audit_log.cpp:12: [AuditLog] [CMS tablet] Reply: request# NKikimr::NCms::TEvCms::TEvClusterStateRequest { }, response# NKikimr::NCms::TEvCms::TEvClusterStateResponse { Status { Code: OK } State { Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120029000 } Devices { Name: "vdisk-0-1-0-0-0" State: UP Timestamp: 120029000 } Devices { Name: "vdisk-1-1-0-0-0" State: UP Timestamp: 120029000 } Devices { Name: "vdisk-2-1-0-0-0" State: UP Timestamp: 120029000 } Devices { Name: "vdisk-3-1-0-0-0" State: UP Timestamp: 120029000 } Devices { Name: "pdisk-1-1" State: UP Timestamp: 120029000 } Timestamp: 120029000 NodeId: 1 InterconnectPort: 12001 Location { DataCenter: "1" Module: "1" Rack: "1" Unit: "1" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120029000 } Devices { Name: "vdisk-0-1-0-1-0" State: UP Timestamp: 120029000 } Devices { Name: "vdisk-1-1-0-1-0" State: UP Timestamp: 120029000 } Devices { Name: "vdisk-2-1-0-1-0" State: UP Timestamp: 120029000 } Devices { Name: "vdisk-3-1-0-1-0" State: UP Timestamp: 120029000 } Devices { Name: "pdisk-2-2" State: UP Timestamp: 120029000 } Timestamp: 120029000 NodeId: 2 InterconnectPort: 12002 Location { DataCenter: "1" Module: "2" Rack: "2" Unit: "2" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120029000 } Devices { Name: "vdisk-0-1-0-2-0" State: UP Timestamp: 120029000 } Devices { Name: "vdisk-1-1-0-2-0" State: UP Timestamp: 120029000 } Devices { Name: "vdisk-2-1-0-2-0" State: UP Timestamp: 120029000 } Devices { Name: "vdisk-3-1-0-2-0" State: UP Timestamp: 120029000 } Devices { Name: "pdisk-3-3" State: UP Timestamp: 120029000 } Timestamp: 120029000 NodeId: 3 InterconnectPort: 12003 Location { DataCenter: "1" Module: "3" Rack: "3" Unit: "3" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120029000 } Devices { Name: "vdisk-0-1-0-3-0" State: UP Timestamp: 120029000 } Devices { Name: "vdisk-1-1-0-3-0" State: UP Timestamp: 120029000 } Devices { Name: "vdisk-2-1-0-3-0" State: UP Timestamp: 120029000 } Devices { Name: "vdisk-3-1-0-3-0" State: UP Timestamp: 120029000 } Devices { Name: "pdisk-4-4" State: UP Timestamp: 120029000 } Timestamp: 120029000 NodeId: 4 InterconnectPort: 12004 Location { DataCenter: "1" Module: "4" Rack: "4" Unit: "4" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: ... ge" Duration: 60000000 2025-05-29T15:24:34.192644Z node 17 :CMS DEBUG: node_checkers.cpp:101: [Nodes Counter] Checking Node: 18, with state: Up, with limit: 0, with ratio limit: 0, locked nodes: 1, down nodes: 0 2025-05-29T15:24:34.192664Z node 17 :CMS DEBUG: cms.cpp:398: Result: DISALLOW_TEMP (reason: Issue in affected group with id '0': too many unavailable vdisks. Locked: Host ::1:12001 (17) has temporary lock, VDisk [0:1:0:1:0] (::1:/18/pdisk-18.data) is locked by this request. Down: ) 2025-05-29T15:24:34.192677Z node 17 :CMS DEBUG: cms.cpp:1036: Accepting permission: id# user-p-1, requestId# user-r-1, owner# user 2025-05-29T15:24:34.192686Z node 17 :CMS INFO: cluster_info.cpp:777: Adding lock for Host ::1:12001 (17) (permission user-p-1 until 1970-01-01T00:03:00Z) 2025-05-29T15:24:34.192698Z node 17 :CMS DEBUG: cms_tx_store_permissions.cpp:26: TTxStorePermissions Execute 2025-05-29T15:24:34.192738Z node 17 :CMS NOTICE: audit_log.cpp:12: [AuditLog] [CMS tablet] Store permission: id# user-p-1, validity# 1970-01-01T00:03:00.026512Z, action# Type: RESTART_SERVICES Host: "17" Services: "storage" Duration: 60000000 2025-05-29T15:24:34.246858Z node 17 :CMS DEBUG: cms.cpp:1147: Running CleanupWalleTasks 2025-05-29T15:24:34.293054Z node 17 :CMS DEBUG: cms_tx_store_permissions.cpp:137: TTxStorePermissions complete 2025-05-29T15:24:34.293167Z node 17 :CMS NOTICE: audit_log.cpp:12: [AuditLog] [CMS tablet] Reply: request# NKikimr::NCms::TEvCms::TEvPermissionRequest { User: "user" Actions { Type: RESTART_SERVICES Host: "17" Services: "storage" Duration: 60000000 } Actions { Type: RESTART_SERVICES Host: "18" Services: "storage" Duration: 60000000 } PartialPermissionAllowed: true Schedule: false DryRun: false AvailabilityMode: MODE_MAX_AVAILABILITY EvictVDisks: false }, response# NKikimr::NCms::TEvCms::TEvPermissionResponse { Status { Code: ALLOW_PARTIAL } RequestId: "user-r-1" Permissions { Id: "user-p-1" Action { Type: RESTART_SERVICES Host: "17" Services: "storage" Duration: 60000000 } Deadline: 180026512 Extentions { Type: HostInfo Hosts { Name: "::1" State: UP NodeId: 17 InterconnectPort: 12001 } } } } 2025-05-29T15:24:34.293180Z node 17 :CMS DEBUG: cms.cpp:1064: Schedule cleanup at 1970-01-01T00:05:00.026512Z 2025-05-29T15:24:35.914343Z node 25 :CMS DEBUG: console__init_scheme.cpp:14: TConsole::TTxInitScheme Execute 2025-05-29T15:24:35.916641Z node 25 :CMS DEBUG: cms_impl.h:185: StateInit event type: 10060000 event: NKikimr::TEvTablet::TEvBoot 2025-05-29T15:24:35.919034Z node 25 :CMS DEBUG: console__init_scheme.cpp:23: TConsole::TTxInitScheme Complete 2025-05-29T15:24:35.919073Z node 25 :CMS DEBUG: console__load_state.cpp:28: TConsole::TTxLoadState Execute 2025-05-29T15:24:35.919108Z node 25 :CMS DEBUG: console__load_state.cpp:50: Using default config. 2025-05-29T15:24:35.919177Z node 25 :CMS DEBUG: console__load_state.cpp:66: TConsole::TTxLoadState Complete 2025-05-29T15:24:35.919325Z node 25 :CMS DEBUG: cms_impl.h:185: StateInit event type: 10060001 event: NKikimr::TEvTablet::TEvRestored 2025-05-29T15:24:35.919391Z node 25 :CMS DEBUG: cms_tx_init_scheme.cpp:16: TTxInitScheme Execute 2025-05-29T15:24:35.919745Z node 25 :CMS DEBUG: cms_impl.h:185: StateInit event type: 1006000c event: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-05-29T15:24:35.919769Z node 25 :CMS DEBUG: cms_impl.h:185: StateInit event type: 104d0001 event: NKikimr::NConsole::TEvConfigsDispatcher::TEvSetConfigSubscriptionResponse 2025-05-29T15:24:35.921429Z node 25 :CMS DEBUG: cms_tx_init_scheme.cpp:24: TTxInitScheme Complete 2025-05-29T15:24:35.921600Z node 25 :CMS DEBUG: cms_tx_load_state.cpp:33: TTxLoadState Execute 2025-05-29T15:24:35.921645Z node 25 :CMS DEBUG: cms_tx_load_state.cpp:76: Using default config 2025-05-29T15:24:35.921666Z node 25 :CMS DEBUG: cms.cpp:1147: Running CleanupWalleTasks 2025-05-29T15:24:35.953570Z node 25 :CMS DEBUG: cms_impl.h:185: StateInit event type: 104a0012 event: NKikimr::NConsole::TEvConsole::TEvConfigNotificationRequest { Config { FeatureFlags { EnableCMSRequestPriorities: true EnableSingleCompositeActionGroup: true } } ItemKinds: 25 ItemKinds: 26 Local: true } 2025-05-29T15:24:35.985726Z node 25 :CMS DEBUG: cms_tx_load_state.cpp:256: TTxLoadState Complete 2025-05-29T15:24:35.985839Z node 25 :CMS DEBUG: cms_tx_update_config.cpp:23: TTxUpdateConfig Execute 2025-05-29T15:24:35.985879Z node 25 :CMS DEBUG: cms_tx_update_config.cpp:37: TTxUpdateConfig Complete 2025-05-29T15:24:35.985985Z node 25 :CMS DEBUG: sentinel.cpp:939: [Sentinel] [Main] UpdateConfig 2025-05-29T15:24:35.985992Z node 25 :CMS DEBUG: sentinel.cpp:884: [Sentinel] [Main] Start ConfigUpdater 2025-05-29T15:24:35.986012Z node 25 :CMS DEBUG: sentinel.cpp:955: [Sentinel] [Main] UpdateState 2025-05-29T15:24:35.986016Z node 25 :CMS INFO: sentinel.cpp:879: [Sentinel] [Main] StateUpdater was delayed 2025-05-29T15:24:35.986029Z node 25 :CMS DEBUG: cms_tx_update_config.cpp:23: TTxUpdateConfig Execute 2025-05-29T15:24:35.986079Z node 25 :CMS DEBUG: sentinel.cpp:464: [Sentinel] [ConfigUpdater] Request blobstorage config: attempt# 0 2025-05-29T15:24:35.986093Z node 25 :CMS DEBUG: sentinel.cpp:477: [Sentinel] [ConfigUpdater] Request CMS cluster state: attempt# 0 2025-05-29T15:24:35.986415Z node 25 :CMS DEBUG: sentinel.cpp:530: [Sentinel] [ConfigUpdater] Handle TEvBlobStorage::TEvControllerConfigResponse: response# Status { Success: true BaseConfig { PDisk { NodeId: 25 PDiskId: 25 Path: "/25/pdisk-25.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 26 PDiskId: 26 Path: "/26/pdisk-26.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 27 PDiskId: 27 Path: "/27/pdisk-27.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 28 PDiskId: 28 Path: "/28/pdisk-28.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 29 PDiskId: 29 Path: "/29/pdisk-29.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 30 PDiskId: 30 Path: "/30/pdisk-30.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 31 PDiskId: 31 Path: "/31/pdisk-31.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 32 PDiskId: 32 Path: "/32/pdisk-32.data" Guid: 1 DriveStatus: ACTIVE } VSlot { VSlotId { NodeId: 25 PDiskId: 25 VSlotId: 1000 } GroupGeneration: 1 } VSlot { VSlotId { NodeId: 25 PDiskId: 25 VSlotId: 1001 } GroupId: 1 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 25 PDiskId: 25 VSlotId: 1002 } GroupId: 2 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 25 PDiskId: 25 VSlotId: 1003 } GroupId: 3 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 26 PDiskId: 26 VSlotId: 1000 } GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 26 PDiskId: 26 VSlotId: 1001 } GroupId: 1 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 26 PDiskId: 26 VSlotId: 1002 } GroupId: 2 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 26 PDiskId: 26 VSlotId: 1003 } GroupId: 3 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 27 PDiskId: 27 VSlotId: 1000 } GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 27 PDiskId: 27 VSlotId: 1001 } GroupId: 1 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 27 PDiskId: 27 VSlotId: 1002 } GroupId: 2 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 27 PDiskId: 27 VSlotId: 1003 } GroupId: 3 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 28 PDiskId: 28 VSlotId: 1000 } GroupGeneration: 1 FailDomainIdx: 3 } VSlot { VSlotId { NodeId: 28 PDiskId: 28 VSlotId: 1001 } GroupId: 1 GroupGeneration: 1 FailDomainIdx: 3 } VSlot { VSlotId { NodeId: 28 PDiskId: 28 VSlotId: 1002 } GroupId: 2 GroupGeneration: 1 FailDomainIdx: 3 } VSlot { VSlotId { NodeId: 28 PDiskId: 28 VSlotId: 1003 } GroupId: 3 GroupGeneration: 1 FailDomainIdx: 3 } VSlot { VSlotId { NodeId: 29 PDiskId: 29 VSlotId: 1000 } GroupGeneration: 1 FailDomainIdx: 4 } VSlot { VSlotId { NodeId: 29 PDiskId: 29 VSlotId: 1001 } GroupId: 1 GroupGeneration: 1 FailDomainIdx: 4 } VSlot { VSlotId { NodeId: 29 PDiskId: 29 VSlotId: 1002 } GroupId: 2 GroupGeneration: 1 FailDomainIdx: 4 } VSlot { VSlotId { NodeId: 29 PDiskId: 29 VSlotId: 1003 } GroupId: 3 GroupGeneration: 1 FailDomainIdx: 4 } VSlot { VSlotId { NodeId: 30 PDiskId: 30 VSlotId: 1000 } GroupGeneration: 1 FailDomainIdx: 5 } VSlot { VSlotId { NodeId: 30 PDiskId: 30 VSlotId: 1001 } GroupId: 1 GroupGeneration: 1 FailDomainIdx: 5 } VSlot { VSlotId { NodeId: 30 PDiskId: 30 VSlotId: 1002 } GroupId: 2 GroupGeneration: 1 FailDomainIdx: 5 } VSlot { VSlotId { NodeId: 30 PDiskId: 30 VSlotId: 1003 } GroupId: 3 GroupGeneration: 1 FailDomainIdx: 5 } VSlot { VSlotId { NodeId: 31 PDiskId: 31 VSlotId: 1000 } GroupGeneration: 1 FailDomainIdx: 6 } VSlot { VSlotId { NodeId: 31 PDiskId: 31 VSlotId: 1001 } GroupId: 1 GroupGeneration: 1 FailDomainIdx: 6 } VSlot { VSlotId { NodeId: 31 PDiskId: 31 VSlotId: 1002 } GroupId: 2 GroupGeneration: 1 FailDomainIdx: 6 } VSlot { VSlotId { NodeId: 31 PDiskId: 31 VSlotId: 1003 } GroupId: 3 GroupGeneration: 1 FailDomainIdx: 6 } VSlot { VSlotId { NodeId: 32 PDiskId: 32 VSlotId: 1000 } GroupGeneration: 1 FailDomainIdx: 7 } VSlot { VSlotId { NodeId: 32 PDiskId: 32 VSlotId: 1001 } GroupId: 1 GroupGeneration: 1 FailDomainIdx: 7 } VSlot { VSlotId { NodeId: 32 PDiskId: 32 VSlotId: 1002 } GroupId: 2 GroupGeneration: 1 FailDomainIdx: 7 } VSlot { VSlotId { NodeId: 32 PDiskId: 32 VSlotId: 1003 } GroupId: 3 GroupGeneration: 1 FailDomainIdx: 7 } Group { GroupGeneration: 1 ErasureSpecies: "block-4-2" VSlotId { NodeId: 25 PDiskId: 25 VSlotId: 1000 } VSlotId { NodeId: 26 PDiskId: 26 VSlotId: 1000 } VSlotId { NodeId: 27 PDiskId: 27 VSlotId: 1000 } VSlotId { NodeId: 28 PDiskId: 28 VSlotId: 1000 } VSlotId { NodeId: 29 PDiskId: 29 VSlotId: 1000 } VSlotId { NodeId: 30 PDiskId: 30 VSlotId: 1000 } VSlotId { NodeId: 31 PDiskId: 31 VSlotId: 1000 } VSlotId { NodeId: 32 PDiskId: 32 VSlotId: 1000 } } Group { GroupId: 1 GroupGeneration: 1 ErasureSpecies: "block-4-2" VSlotId { NodeId: 25 PDiskId: 25 VSlotId: 1001 } VSlotId { NodeId: 26 PDiskId: 26 VSlotId: 1001 } VSlotId { NodeId: 27 PDiskId: 27 VSlotId: 1001 } VSlotId { NodeId: 28 PDiskId: 28 VSlotId: 1001 } VSlotId { NodeId: 29 PDiskId: 29 VSlotId: 1001 } VSlotId { NodeId: 30 PDiskId: 30 VSlotId: 1001 } VSlotId { NodeId: 31 PDiskId: 31 VSlotId: 1001 } VSlotId { NodeId: 32 PDiskId: 32 VSlotId: 1001 } } Group { GroupId: 2 GroupGeneration: 1 ErasureSpecies: "block-4-2" VSlotId { NodeId: 25 PDiskId: 25 VSlotId: 1002 } VSlotId { NodeId: 26 PDiskId: 26 VSlotId: 1002 } VSlotId { NodeId: 27 PDiskId: 27 VSlotId: 1002 } VSlotId { NodeId: 28 PDiskId: 28 VSlotId: 1002 } VSlotId { NodeId: 29 PDiskId: 29 VSlotId: 1002 } VSlotId { NodeId: 30 PDiskId: 30 VSlotId: 1002 } VSlotId { NodeId: 31 PDiskId: 31 VSlotId: 1002 } VSlotId { NodeId: 32 PDiskId: 32 VSlotId: 1002 } } Group { GroupId: 3 GroupGeneration: 1 ErasureSpecies: "block-4-2" VSlotId { NodeId: 25 PDiskId: 25 VSlotId: 1003 } VSlotId { NodeId: 26 PDiskId: 26 VSlotId: 1003 } VSlotId { NodeId: 27 PDiskId: 27 VSlotId: 1003 } VSlotId { NodeId: 28 PDiskId: 28 VSlotId: 1003 } VSlotId { NodeId: 29 PDiskId: 29 VSlotId: 1003 } VSlotId { NodeId: 30 PDiskId: 30 VSlotId: 1003 } VSlotId { NodeId: 31 PDiskId: 31 VSlotId: 1003 } VSlotId { NodeId: 32 PDiskId: 32 VSlotId: 1003 } } } } Success: true 2025-05-29T15:24:36.028782Z node 25 :CMS DEBUG: cms_tx_update_config.cpp:37: TTxUpdateConfig Complete 2025-05-29T15:24:36.028851Z node 25 :CMS DEBUG: cms_tx_update_config.cpp:44: Updated config: TenantLimits { DisabledNodesRatioLimit: 0 } ClusterLimits { DisabledNodesRatioLimit: 0 } SentinelConfig { Enable: false } 2025-05-29T15:24:36.029036Z node 25 :CMS NOTICE: audit_log.cpp:12: [AuditLog] [CMS tablet] Reply: request# NKikimr::NCms::TEvCms::TEvPermissionRequest { User: "" Actions { Type: RESTART_SERVICES Host: "::1" Services: "storage" Duration: 60000000 } PartialPermissionAllowed: false Schedule: false DryRun: false AvailabilityMode: MODE_MAX_AVAILABILITY EvictVDisks: false }, response# NKikimr::NCms::TEvCms::TEvPermissionResponse { Status { Code: WRONG_REQUEST Reason: "Missing user in request" } } >> TVPatchTests::FindingPartsWhenPartsAreDontExist ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/vdisk/skeleton/ut/unittest >> TVPatchTests::FindingPartsWithTimeout [GOOD] Test command err: Recv 65537 2025-05-29T15:24:37.034577Z node 1 :BS_VDISK_PATCH INFO: {BSVSP03@skeleton_vpatch_actor.cpp:190} [0:1:0:0:0] TEvVPatch: bootstrapped; OriginalBlobId# [1:2:3:4:6:10:0] Deadline# 1970-01-01T00:00:01.000000Z Send NKikimr::TEvBlobStorage::TEvVGet Recv NKikimr::TEvBlobStorage::TEvVGetResult 2025-05-29T15:24:37.035006Z node 1 :BS_VDISK_PATCH INFO: {BSVSP06@skeleton_vpatch_actor.cpp:266} [0:1:0:0:0] TEvVPatch: received parts index; OriginalBlobId# [1:2:3:4:6:10:0] Status# OK ResultSize# 1 2025-05-29T15:24:37.035031Z node 1 :BS_VDISK_PATCH INFO: {BSVSP04@skeleton_vpatch_actor.cpp:226} [0:1:0:0:0] TEvVPatch: sended found parts; OriginalBlobId# [1:2:3:4:6:10:0] FoundParts# [1 2] Status# OK Send NKikimr::TEvBlobStorage::TEvVPatchFoundParts Recv NKikimr::TEvBlobStorage::TEvVPatchDiff 2025-05-29T15:24:37.035069Z node 1 :BS_VDISK_PATCH INFO: {BSVSP09@skeleton_vpatch_actor.cpp:577} [0:1:0:0:0] TEvVPatch: received diff; OriginalBlobId# [1:2:3:4:6:10:0] PatchedBlobId# [1:3:3:4:6:10:0] OriginalPartId# 1 PatchedPartId# 1 XorReceiver# no ParityPart# no ForceEnd# yes 2025-05-29T15:24:37.035079Z node 1 :BS_VDISK_PATCH INFO: {BSVSP07@skeleton_vpatch_actor.cpp:315} [0:1:0:0:0] TEvVPatch: received force end; OriginalBlobId# [1:2:3:4:6:10:0] PatchedBlobId# [1:3:3:4:6:10:0] OriginalPartId# 1 PatchedPartId# 1 Status# OK ErrorReason# Send NKikimr::TEvBlobStorage::TEvVPatchResult 2025-05-29T15:24:37.035091Z node 1 :BS_VDISK_PATCH DEBUG: {BSVSP17@skeleton_vpatch_actor.cpp:727} [0:1:0:0:0] NotifySkeletonAboutDying; Send NKikimr::TEvVPatchDyingRequest Recv NKikimr::TEvVPatchDyingConfirm Recv 65537 2025-05-29T15:24:37.247511Z node 2 :BS_VDISK_PATCH INFO: {BSVSP03@skeleton_vpatch_actor.cpp:190} [0:1:0:0:0] TEvVPatch: bootstrapped; OriginalBlobId# [1:2:3:4:6:10:0] Deadline# 1970-01-01T00:00:01.000000Z Send NKikimr::TEvBlobStorage::TEvVGet Recv NActors::TEvents::TEvWakeup 2025-05-29T15:24:37.257724Z node 2 :BS_VDISK_PATCH ERROR: {BSVSP11@skeleton_vpatch_actor.cpp:734} [0:1:0:0:0] TEvVPatch: the vpatch actor died due to a deadline, before receiving diff; 2025-05-29T15:24:37.257757Z node 2 :BS_VDISK_PATCH INFO: {BSVSP04@skeleton_vpatch_actor.cpp:226} [0:1:0:0:0] TEvVPatch: sended found parts; OriginalBlobId# [1:2:3:4:6:10:0] FoundParts# [] Status# ERROR Send NKikimr::TEvBlobStorage::TEvVPatchFoundParts 2025-05-29T15:24:37.257784Z node 2 :BS_VDISK_PATCH DEBUG: {BSVSP17@skeleton_vpatch_actor.cpp:727} [0:1:0:0:0] NotifySkeletonAboutDying; Send NKikimr::TEvVPatchDyingRequest Recv NKikimr::TEvVPatchDyingConfirm ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_serverless/unittest >> TSchemeShardServerLess::BaseCase-AlterDatabaseCreateHiveFirst-true [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:116:2058] recipient: [1:111:2141] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:116:2058] recipient: [1:111:2141] Leader for TabletID 72057594046678944 is [1:128:2152] sender: [1:132:2058] recipient: [1:111:2141] 2025-05-29T15:24:36.975749Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7488: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-05-29T15:24:36.975779Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7516: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:24:36.975785Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7402: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-05-29T15:24:36.975790Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7418: OperationsProcessing config: using default configuration 2025-05-29T15:24:36.975796Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-05-29T15:24:36.975800Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-05-29T15:24:36.975809Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7548: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:24:36.975823Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:39: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-05-29T15:24:36.975935Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7619: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-05-29T15:24:36.976017Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-05-29T15:24:36.989361Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7309: Cannot subscribe to console configs 2025-05-29T15:24:36.989388Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:24:36.992703Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-05-29T15:24:36.992830Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-05-29T15:24:36.992869Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-05-29T15:24:36.995938Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-05-29T15:24:36.996106Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:445: Clear TempDirsState with owners number: 0 2025-05-29T15:24:36.996240Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1348: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-05-29T15:24:36.996336Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:32: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-05-29T15:24:36.997288Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:157: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:24:36.997334Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:84: [RootDataErasureManager] Stop 2025-05-29T15:24:36.997654Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:24:36.997666Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:24:36.997689Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-05-29T15:24:36.997698Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-29T15:24:36.997704Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:202: TTxServerlessStorageBilling.Complete 2025-05-29T15:24:36.997743Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6671: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-05-29T15:24:36.999608Z node 1 :HIVE INFO: tablet_helpers.cpp:1130: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:128:2152] sender: [1:242:2058] recipient: [1:15:2062] 2025-05-29T15:24:37.020818Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:372: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-05-29T15:24:37.020903Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:24:37.020966Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-05-29T15:24:37.021008Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:130: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-05-29T15:24:37.021018Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:24:37.021875Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:451: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-05-29T15:24:37.021900Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-05-29T15:24:37.021950Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:24:37.021959Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:313: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-05-29T15:24:37.021963Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:367: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-05-29T15:24:37.021966Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 2 -> 3 2025-05-29T15:24:37.022363Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:24:37.022375Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-05-29T15:24:37.022380Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 3 -> 128 2025-05-29T15:24:37.022693Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:24:37.022703Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:24:37.022707Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:24:37.022713Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1650: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-05-29T15:24:37.023207Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1719: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-05-29T15:24:37.023466Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:652: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-05-29T15:24:37.023495Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1751: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-05-29T15:24:37.023626Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:676: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-05-29T15:24:37.023644Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:680: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 124 RawX2: 4294969445 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-05-29T15:24:37.023649Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:24:37.023700Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 128 -> 240 2025-05-29T15:24:37.023705Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:24:37.023728Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-05-29T15:24:37.023736Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:402: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-05-29T15:24:37.023995Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:24:37.024001Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-29T15:24:37.024034Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29 ... blet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 5 TxId_Deprecated: 5 TabletID: 72075186234409549 2025-05-29T15:24:37.215472Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:445: Clear TempDirsState with owners number: 0 2025-05-29T15:24:37.216477Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5938: Free tablet reply, message: Status: OK Origin: 72075186233409546 TxId_Deprecated: 5 ShardOwnerId: 72057594046678944 ShardLocalIdx: 5, at schemeshard: 72057594046678944 2025-05-29T15:24:37.216534Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 4 Forgetting tablet 72075186234409549 2025-05-29T15:24:37.216759Z node 1 :HIVE INFO: tablet_helpers.cpp:1356: [72075186233409546] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 7 TxId_Deprecated: 7 TabletID: 72075186234409551 2025-05-29T15:24:37.216923Z node 1 :HIVE INFO: tablet_helpers.cpp:1356: [72075186233409546] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 6 TxId_Deprecated: 6 TabletID: 72075186234409550 2025-05-29T15:24:37.216973Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5938: Free tablet reply, message: Status: OK Origin: 72075186233409546 TxId_Deprecated: 7 ShardOwnerId: 72057594046678944 ShardLocalIdx: 7, at schemeshard: 72057594046678944 2025-05-29T15:24:37.217010Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 3 Forgetting tablet 72075186234409551 2025-05-29T15:24:37.217278Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5938: Free tablet reply, message: Status: OK Origin: 72075186233409546 TxId_Deprecated: 6 ShardOwnerId: 72057594046678944 ShardLocalIdx: 6, at schemeshard: 72057594046678944 2025-05-29T15:24:37.217311Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 2 Forgetting tablet 72075186234409550 2025-05-29T15:24:37.217579Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:205: TTxCleanDroppedSubDomains Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-05-29T15:24:37.217588Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:224: TTxCleanDroppedPaths: PersistRemoveSubDomain for PathId# [OwnerId: 72057594046678944, LocalPathId: 3], at schemeshard: 72057594046678944 2025-05-29T15:24:37.217610Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 1 2025-05-29T15:24:37.217661Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 106 2025-05-29T15:24:37.217711Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:123: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-05-29T15:24:37.217717Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:137: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 3], at schemeshard: 72057594046678944 2025-05-29T15:24:37.217728Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-05-29T15:24:37.218042Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:174: Deleted shardIdx 72057594046678944:5 2025-05-29T15:24:37.218055Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:180: Close pipe to deleted shardIdx 72057594046678944:5 tabletId 72075186234409549 2025-05-29T15:24:37.218656Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:174: Deleted shardIdx 72057594046678944:7 2025-05-29T15:24:37.218666Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:180: Close pipe to deleted shardIdx 72057594046678944:7 tabletId 72075186234409551 2025-05-29T15:24:37.218697Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:174: Deleted shardIdx 72057594046678944:6 2025-05-29T15:24:37.218700Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:180: Close pipe to deleted shardIdx 72057594046678944:6 tabletId 72075186234409550 2025-05-29T15:24:37.218727Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:245: TTxCleanDroppedSubDomains Complete, done PersistRemoveSubDomain for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 2025-05-29T15:24:37.218735Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:160: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestModificationResult got TxId: 106, wait until txId: 106 TestWaitNotification wait txId: 106 2025-05-29T15:24:37.219368Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:210: tests -- TTxNotificationSubscriber for txId 106: send EvNotifyTxCompletion 2025-05-29T15:24:37.219383Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:256: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 106 2025-05-29T15:24:37.219477Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 106, at schemeshard: 72057594046678944 2025-05-29T15:24:37.219502Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:227: tests -- TTxNotificationSubscriber for txId 106: got EvNotifyTxCompletionResult 2025-05-29T15:24:37.219508Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:236: tests -- TTxNotificationSubscriber for txId 106: satisfy waiter [1:940:2802] TestWaitNotification: OK eventTxId 106 2025-05-29T15:24:37.219615Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/ServerLess0/dir/table0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-05-29T15:24:37.219654Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/ServerLess0/dir/table0" took 50us result status StatusPathDoesNotExist 2025-05-29T15:24:37.219700Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/ServerLess0/dir/table0\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1]), source_location: ydb/core/tx/schemeshard/schemeshard_path_describer.cpp:1157" Path: "/MyRoot/ServerLess0/dir/table0" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: true } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 2025-05-29T15:24:37.219765Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/ServerLess0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-05-29T15:24:37.219781Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/ServerLess0" took 18us result status StatusPathDoesNotExist 2025-05-29T15:24:37.219797Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/ServerLess0\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1]), source_location: ydb/core/tx/schemeshard/schemeshard_path_describer.cpp:1157" Path: "/MyRoot/ServerLess0" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: true } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 2025-05-29T15:24:37.219838Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-05-29T15:24:37.219861Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot" took 24us result status StatusSuccess 2025-05-29T15:24:37.219932Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot" PathDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 9 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 9 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 7 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: "SharedDB" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeExtSubDomain CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 1 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/MyRoot" } } } PathId: 1 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 wait until 72075186234409549 is deleted wait until 72075186234409550 is deleted wait until 72075186234409551 is deleted wait until 72075186234409552 is deleted 2025-05-29T15:24:37.220012Z node 1 :HIVE INFO: tablet_helpers.cpp:1476: [72075186233409546] TEvSubscribeToTabletDeletion, 72075186234409549 2025-05-29T15:24:37.220031Z node 1 :HIVE INFO: tablet_helpers.cpp:1476: [72075186233409546] TEvSubscribeToTabletDeletion, 72075186234409550 2025-05-29T15:24:37.220040Z node 1 :HIVE INFO: tablet_helpers.cpp:1476: [72075186233409546] TEvSubscribeToTabletDeletion, 72075186234409551 2025-05-29T15:24:37.220048Z node 1 :HIVE INFO: tablet_helpers.cpp:1476: [72075186233409546] TEvSubscribeToTabletDeletion, 72075186234409552 Deleted tabletId 72075186234409549 Deleted tabletId 72075186234409550 Deleted tabletId 72075186234409551 Deleted tabletId 72075186234409552 >> TConsoleTests::TestAlterTenantModifyStorageResourcesForRunning [GOOD] >> TConsoleTests::TestAlterTenantModifyStorageResourcesForRunningExtSubdomain >> TVPatchTests::PatchPartFastXorDiffBeyoundBlob [GOOD] >> TVPatchTests::FullPatchTestXorDiffFasterVGetResult [GOOD] >> TVPatchTests::PatchPartFastXorDiffDisorder >> TVPatchTests::PatchPartPutError >> TVPatchTests::FindingPartsWhenPartsAreDontExist [GOOD] >> TVPatchTests::FindingPartsWhenOnlyOnePartExists ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/olap/unittest >> KqpOlap::BulkUpsertUpdate [GOOD] Test command err: Trying to start YDB, gRPC: 26094, MsgBus: 3510 2025-05-29T15:21:33.889974Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509888159372090046:2201];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:21:33.890061Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/0026f1/r3tmp/tmpXI3RDa/pdisk_1.dat 2025-05-29T15:21:33.964384Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:21:33.965320Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [1:7509888159372089874:2079] 1748532093886294 != 1748532093886297 TServer::EnableGrpc on GrpcPort 26094, node 1 2025-05-29T15:21:33.990938Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:21:33.990951Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-05-29T15:21:33.990953Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-05-29T15:21:33.991007Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:3510 2025-05-29T15:21:34.031109Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:21:34.031134Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:21:34.032148Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:3510 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-29T15:21:34.122495Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:21:34.125921Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-05-29T15:21:34.128626Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnStore, opId: 281474976710658:0, at schemeshard: 72057594046644480 Status: 53 TxId: 281474976710658 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 2 2025-05-29T15:21:34.161001Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;self_id=[1:7509888163667057857:2314];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-05-29T15:21:34.161121Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;self_id=[1:7509888163667057857:2314];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-05-29T15:21:34.161213Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;self_id=[1:7509888163667057857:2314];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-05-29T15:21:34.161248Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;self_id=[1:7509888163667057857:2314];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-05-29T15:21:34.161279Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;self_id=[1:7509888163667057857:2314];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-05-29T15:21:34.161308Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;self_id=[1:7509888163667057857:2314];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-05-29T15:21:34.161347Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;self_id=[1:7509888163667057857:2314];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-05-29T15:21:34.161376Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;self_id=[1:7509888163667057857:2314];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-05-29T15:21:34.161405Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;self_id=[1:7509888163667057857:2314];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-05-29T15:21:34.161434Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;self_id=[1:7509888163667057857:2314];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-05-29T15:21:34.161465Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;self_id=[1:7509888163667057857:2314];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2025-05-29T15:21:34.161499Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;self_id=[1:7509888163667057857:2314];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2025-05-29T15:21:34.174680Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037891;self_id=[1:7509888163667057858:2315];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-05-29T15:21:34.180932Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037891;self_id=[1:7509888163667057858:2315];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-05-29T15:21:34.181028Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037891;self_id=[1:7509888163667057858:2315];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-05-29T15:21:34.181054Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037891;self_id=[1:7509888163667057858:2315];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-05-29T15:21:34.181082Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037891;self_id=[1:7509888163667057858:2315];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-05-29T15:21:34.181107Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037891;self_id=[1:7509888163667057858:2315];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-05-29T15:21:34.181129Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037891;self_id=[1:7509888163667057858:2315];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-05-29T15:21:34.181151Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037891;self_id=[1:7509888163667057858:2315];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-05-29T15:21:34.181176Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037891;self_id=[1:7509888163667057858:2315];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-05-29T15:21:34.181198Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037891;self_id=[1:7509888163667057858:2315];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-05-29T15:21:34.181220Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037891;self_id=[1:7509888163667057858:2315];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2025-05-29T15:21:34.181242Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037891;self_id=[1:7509888163667057858:2315];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2025-05-29T15:21:34.186157Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037889;self_id=[1:7509888163667057861:2316];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-05-29T15:21:34.186176Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037889;self_id=[1:7509888163667057861:2316];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-05-29T15:21:34.186234Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037889;self_id=[1:7509888163667057861:2316];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-05-29T15:21:34.186254Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037889;self_id=[1:7509888163667057861:2316];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-05-29T15:21:34.186273Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037889;self_id=[1:7509888163667057861:2316];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline ... log.cpp:784: tablet_id=72075186224037888;self_id=[2:7509888680609536744:2335];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-05-29T15:23:35.862142Z node 2 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;self_id=[2:7509888680609536744:2335];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-05-29T15:23:35.862163Z node 2 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;self_id=[2:7509888680609536744:2335];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-05-29T15:23:35.862185Z node 2 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;self_id=[2:7509888680609536744:2335];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-05-29T15:23:35.862203Z node 2 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;self_id=[2:7509888680609536744:2335];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-05-29T15:23:35.862228Z node 2 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;self_id=[2:7509888680609536744:2335];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-05-29T15:23:35.862248Z node 2 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;self_id=[2:7509888680609536744:2335];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-05-29T15:23:35.862268Z node 2 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;self_id=[2:7509888680609536744:2335];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-05-29T15:23:35.862288Z node 2 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;self_id=[2:7509888680609536744:2335];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2025-05-29T15:23:35.862306Z node 2 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;self_id=[2:7509888680609536744:2335];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2025-05-29T15:23:35.866912Z node 2 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-05-29T15:23:35.866930Z node 2 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-05-29T15:23:35.866944Z node 2 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-05-29T15:23:35.866950Z node 2 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-05-29T15:23:35.866968Z node 2 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-05-29T15:23:35.866974Z node 2 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-05-29T15:23:35.866984Z node 2 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-05-29T15:23:35.866989Z node 2 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-05-29T15:23:35.867003Z node 2 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-05-29T15:23:35.867009Z node 2 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-05-29T15:23:35.867016Z node 2 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-05-29T15:23:35.867021Z node 2 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-05-29T15:23:35.867043Z node 2 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-05-29T15:23:35.867050Z node 2 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-05-29T15:23:35.867071Z node 2 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-05-29T15:23:35.867076Z node 2 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-05-29T15:23:35.867089Z node 2 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-05-29T15:23:35.867094Z node 2 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanDeprecatedSnapshot;id=CleanDeprecatedSnapshot; 2025-05-29T15:23:35.867102Z node 2 :TX_COLUMNSHARD CRIT: log.cpp:784: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=clean_deprecated_snapshot.cpp:30;tasks_for_rewrite=0; 2025-05-29T15:23:35.867108Z node 2 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanDeprecatedSnapshot;id=17; 2025-05-29T15:23:35.867114Z node 2 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV0ChunksMeta;id=RestoreV0ChunksMeta; 2025-05-29T15:23:35.867235Z node 2 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV0ChunksMeta;id=18; 2025-05-29T15:23:35.867243Z node 2 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-05-29T15:23:35.908531Z node 2 :TX_COLUMNSHARD_TX WARN: log.cpp:784: tablet_id=72075186224037888;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715658;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715658; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:502;T=N5arrow9Int64TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:502;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=208;columns=2; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=208;columns=2; 2025-05-29T15:23:35.924261Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7509888680609536826:2349], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:23:35.924284Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:23:35.924429Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7509888680609536831:2352], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:23:35.925352Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480 2025-05-29T15:23:35.927273Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7509888680609536833:2353], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2025-05-29T15:23:36.004704Z node 2 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [2:7509888684904504180:2388] txid# 281474976715660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-29T15:23:36.066065Z node 2 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:238: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1748532216000, txId: 18446744073709551615] shutting down FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:502;T=N5arrow9Int64TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:502;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=208;columns=2; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=208;columns=2; 2025-05-29T15:23:36.151107Z node 2 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:238: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1748532216088, txId: 18446744073709551615] shutting down 2025-05-29T15:23:50.423582Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7301: Cannot get console configs 2025-05-29T15:23:50.423596Z node 2 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:24:36.326413Z node 2 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:238: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1748532276000, txId: 18446744073709551615] shutting down >> TVPatchTests::PatchPartGetError >> TOosLogicTests::RenderHtml [GOOD] >> TVPatchTests::FindingPartsWhenError |65.9%| [TA] $(B)/ydb/services/metadata/secret/ut/test-results/unittest/{meta.json ... results_accumulator.log} |65.9%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/blobstorage/dsproxy/ut_ftol/ydb-core-blobstorage-dsproxy-ut_ftol >> DataShardSnapshots::MvccSnapshotLockedWrites-UseSink [FAIL] >> DataShardSnapshots::MvccSnapshotLockedWritesRestart+UseSink >> TVPatchTests::PatchPartFastXorDiffDisorder [GOOD] >> TVPatchTests::PatchPartPutError [GOOD] |65.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/blobstorage/dsproxy/ut_ftol/ydb-core-blobstorage-dsproxy-ut_ftol >> TVPatchTests::PatchPartFastXorDiffWithEmptyDiffBuffer >> DataShardSnapshots::LockedWriteDistributedCommitSuccess+UseSink [FAIL] >> DataShardSnapshots::LockedWriteDistributedCommitSuccess-UseSink >> TVPatchTests::FindingPartsWhenOnlyOnePartExists [GOOD] >> TVPatchTests::PatchPartGetError [GOOD] >> TVPatchTests::FindingPartsWhenError [GOOD] >> DataShardSnapshots::MvccSnapshotLockedWritesWithoutConflicts-UseSink [FAIL] >> DataShardSnapshots::MvccSnapshotReadLockedWrites+UseSink >> TVPatchTests::PatchPartFastXorDiffWithEmptyDiffBuffer [GOOD] >> DataShardSnapshots::LockedWriteDistributedCommitAborted+UseSink [FAIL] >> DataShardSnapshots::LockedWriteDistributedCommitAborted-UseSink ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/vdisk/skeleton/ut/unittest >> TVPatchTests::FullPatchTestXorDiffFasterVGetResult [GOOD] Test command err: Recv 65537 2025-05-29T15:24:37.745326Z node 1 :BS_VDISK_PATCH INFO: {BSVSP03@skeleton_vpatch_actor.cpp:190} [0:1:0:0:0] TEvVPatch: bootstrapped; OriginalBlobId# [1:2:3:4:6:100:0] Deadline# 1970-01-01T00:00:01.000000Z Send NKikimr::TEvBlobStorage::TEvVGet Recv NKikimr::TEvBlobStorage::TEvVGetResult 2025-05-29T15:24:37.745671Z node 1 :BS_VDISK_PATCH INFO: {BSVSP06@skeleton_vpatch_actor.cpp:266} [0:1:0:0:0] TEvVPatch: received parts index; OriginalBlobId# [1:2:3:4:6:100:0] Status# OK ResultSize# 1 2025-05-29T15:24:37.745690Z node 1 :BS_VDISK_PATCH INFO: {BSVSP04@skeleton_vpatch_actor.cpp:226} [0:1:0:0:0] TEvVPatch: sended found parts; OriginalBlobId# [1:2:3:4:6:100:0] FoundParts# [5] Status# OK Send NKikimr::TEvBlobStorage::TEvVPatchFoundParts Recv NKikimr::TEvBlobStorage::TEvVPatchXorDiff 2025-05-29T15:24:37.745721Z node 1 :BS_VDISK_PATCH INFO: {BSVSP13@skeleton_vpatch_actor.cpp:674} [0:1:0:0:0] TEvVPatch: received xor diff; OriginalBlobId# [1:2:3:4:6:100:0] PatchedBlobId# [1:3:3:4:6:100:0] FromPart# 4 ToPart# 0 HasBuffer# no ReceivedXorDiffCount# 1/0 Send NKikimr::TEvBlobStorage::TEvVPatchXorDiffResult 2025-05-29T15:24:37.745733Z node 1 :BS_VDISK_PATCH DEBUG: {BSVSP17@skeleton_vpatch_actor.cpp:727} [0:1:0:0:0] NotifySkeletonAboutDying; Send NKikimr::TEvVPatchDyingRequest Recv NKikimr::TEvBlobStorage::TEvVPatchDiff 2025-05-29T15:24:37.745749Z node 1 :BS_VDISK_PATCH INFO: {BSVSP07@skeleton_vpatch_actor.cpp:315} [0:1:0:0:0] TEvVPatch: send patch result; OriginalBlobId# [1:2:3:4:6:100:0] PatchedBlobId# [1:3:3:4:6:100:0] OriginalPartId# 0 PatchedPartId# 0 Status# ERROR ErrorReason# The diff at index 0 went beyound the blob part; DiffStart# 100 DiffEnd# 96 BlobPartSize# 32 Send NKikimr::TEvBlobStorage::TEvVPatchResult Recv NKikimr::TEvVPatchDyingConfirm |65.9%| [TA] {RESULT} $(B)/ydb/services/metadata/secret/ut/test-results/unittest/{meta.json ... results_accumulator.log} |65.9%| [LD] {RESULT} $(B)/ydb/core/blobstorage/dsproxy/ut_ftol/ydb-core-blobstorage-dsproxy-ut_ftol >> TOlapReboots::CreateStandaloneTable >> TOlapReboots::DropMultipleStandaloneTables >> DataShardSnapshots::ShardRestartLockBrokenByConflict [FAIL] >> DataShardSnapshots::ShardRestartWholeShardLockBrokenByUpsert ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/vdisk/skeleton/ut/unittest >> TVPatchTests::PatchPartFastXorDiffDisorder [GOOD] Test command err: Recv 65537 2025-05-29T15:24:37.946933Z node 1 :BS_VDISK_PATCH INFO: {BSVSP03@skeleton_vpatch_actor.cpp:190} [0:1:0:0:0] TEvVPatch: bootstrapped; OriginalBlobId# [1:2:3:4:6:100:0] Deadline# 1970-01-01T00:00:01.000000Z Send NKikimr::TEvBlobStorage::TEvVGet Recv NKikimr::TEvBlobStorage::TEvVGetResult 2025-05-29T15:24:37.947324Z node 1 :BS_VDISK_PATCH INFO: {BSVSP06@skeleton_vpatch_actor.cpp:266} [0:1:0:0:0] TEvVPatch: received parts index; OriginalBlobId# [1:2:3:4:6:100:0] Status# OK ResultSize# 1 2025-05-29T15:24:37.947348Z node 1 :BS_VDISK_PATCH INFO: {BSVSP04@skeleton_vpatch_actor.cpp:226} [0:1:0:0:0] TEvVPatch: sended found parts; OriginalBlobId# [1:2:3:4:6:100:0] FoundParts# [5] Status# OK Send NKikimr::TEvBlobStorage::TEvVPatchFoundParts Recv NKikimr::TEvBlobStorage::TEvVPatchXorDiff 2025-05-29T15:24:37.947388Z node 1 :BS_VDISK_PATCH INFO: {BSVSP13@skeleton_vpatch_actor.cpp:674} [0:1:0:0:0] TEvVPatch: received xor diff; OriginalBlobId# [1:2:3:4:6:100:0] PatchedBlobId# [1:3:3:4:6:100:0] FromPart# 4 ToPart# 0 HasBuffer# no ReceivedXorDiffCount# 1/0 Send NKikimr::TEvBlobStorage::TEvVPatchXorDiffResult 2025-05-29T15:24:37.947405Z node 1 :BS_VDISK_PATCH DEBUG: {BSVSP17@skeleton_vpatch_actor.cpp:727} [0:1:0:0:0] NotifySkeletonAboutDying; Send NKikimr::TEvVPatchDyingRequest Recv NKikimr::TEvBlobStorage::TEvVPatchDiff 2025-05-29T15:24:37.947428Z node 1 :BS_VDISK_PATCH INFO: {BSVSP07@skeleton_vpatch_actor.cpp:315} [0:1:0:0:0] TEvVPatch: send patch result; OriginalBlobId# [1:2:3:4:6:100:0] PatchedBlobId# [1:3:3:4:6:100:0] OriginalPartId# 0 PatchedPartId# 0 Status# ERROR ErrorReason# [XorDiff from datapart] the start of the diff at index 0 righter than the start of the diff at index 1; PrevDiffStart# 2 DiffStart# 0 Send NKikimr::TEvBlobStorage::TEvVPatchResult Recv NKikimr::TEvVPatchDyingConfirm ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/vdisk/skeleton/ut/unittest >> TVPatchTests::PatchPartPutError [GOOD] Test command err: Recv 65537 2025-05-29T15:24:37.941832Z node 1 :BS_VDISK_PATCH INFO: {BSVSP03@skeleton_vpatch_actor.cpp:190} [0:1:0:0:0] TEvVPatch: bootstrapped; OriginalBlobId# [1:2:3:4:6:10:0] Deadline# 1970-01-01T00:00:01.000000Z Send NKikimr::TEvBlobStorage::TEvVGet Recv NKikimr::TEvBlobStorage::TEvVGetResult 2025-05-29T15:24:37.942279Z node 1 :BS_VDISK_PATCH INFO: {BSVSP06@skeleton_vpatch_actor.cpp:266} [0:1:0:0:0] TEvVPatch: received parts index; OriginalBlobId# [1:2:3:4:6:10:0] Status# OK ResultSize# 1 2025-05-29T15:24:37.942307Z node 1 :BS_VDISK_PATCH INFO: {BSVSP04@skeleton_vpatch_actor.cpp:226} [0:1:0:0:0] TEvVPatch: sended found parts; OriginalBlobId# [1:2:3:4:6:10:0] FoundParts# [1] Status# OK Send NKikimr::TEvBlobStorage::TEvVPatchFoundParts Recv NKikimr::TEvBlobStorage::TEvVPatchDiff 2025-05-29T15:24:37.942350Z node 1 :BS_VDISK_PATCH INFO: {BSVSP09@skeleton_vpatch_actor.cpp:577} [0:1:0:0:0] TEvVPatch: received diff; OriginalBlobId# [1:2:3:4:6:10:0] PatchedBlobId# [1:3:3:4:6:10:0] OriginalPartId# 1 PatchedPartId# 1 XorReceiver# no ParityPart# no ForceEnd# no 2025-05-29T15:24:37.942362Z node 1 :BS_VDISK_PATCH INFO: {BSVSP05@skeleton_vpatch_actor.cpp:246} [0:1:0:0:0] TEvVPatch: send vGet for pulling part data; OriginalBlobId# [1:2:3:4:6:10:0] PullingPart# 1 Send NKikimr::TEvBlobStorage::TEvVGet Recv NKikimr::TEvBlobStorage::TEvVGetResult 2025-05-29T15:24:37.942395Z node 1 :BS_VDISK_PATCH INFO: {BSVSP08@skeleton_vpatch_actor.cpp:383} [0:1:0:0:0] TEvVPatch: received part data; OriginalBlobId# [1:2:3:4:6:10:0] PatchedBlobId# [1:3:3:4:6:10:0] OriginalPartId# 1 PatchedPartId# 1 DataParts# 4 ReceivedBlobId# [1:2:3:4:6:10:1] Status# OK ResultSize# 1 ParityPart# no 2025-05-29T15:24:37.942417Z node 1 :BS_VDISK_PATCH INFO: {BSVSP14@skeleton_vpatch_actor.cpp:462} [0:1:0:0:0] TEvVPatch: send xor diffs; OriginalBlobId# [1:2:3:4:6:10:0] PatchedBlobId# [1:3:3:4:6:10:0] OriginalPartId# 1 PatchedPartId# 1 XorDiffCount# 0 2025-05-29T15:24:37.942429Z node 1 :BS_VDISK_PATCH INFO: {BSVSP15@skeleton_vpatch_actor.cpp:502} [0:1:0:0:0] TEvVPatch: send vPut; OriginalBlobId# [1:2:3:4:6:10:0] PatchedBlobId# [1:3:3:4:6:10:0] OriginalPartId# 1 PatchedPartId# 1 ReceivedXorDiffs# 0 ExpectedXorDiffs# 0 Send NKikimr::TEvBlobStorage::TEvVPut Recv NKikimr::TEvBlobStorage::TEvVPutResult 2025-05-29T15:24:37.942460Z node 1 :BS_VDISK_PATCH INFO: {BSVSP10@skeleton_vpatch_actor.cpp:627} [0:1:0:0:0] TEvVPatch: received put result; OriginalBlobId# [1:2:3:4:6:10:0] PatchedBlobId# [1:3:3:4:6:10:0] OriginalPartId# 1 PatchedPartId# 1 Status# ERROR 2025-05-29T15:24:37.942470Z node 1 :BS_VDISK_PATCH INFO: {BSVSP07@skeleton_vpatch_actor.cpp:315} [0:1:0:0:0] TEvVPatch: send patch result; OriginalBlobId# [1:2:3:4:6:10:0] PatchedBlobId# [1:3:3:4:6:10:0] OriginalPartId# 1 PatchedPartId# 1 Status# ERROR ErrorReason# Recieve not OK status from VPutResult, received status# ERROR Send NKikimr::TEvBlobStorage::TEvVPatchResult 2025-05-29T15:24:37.942482Z node 1 :BS_VDISK_PATCH DEBUG: {BSVSP17@skeleton_vpatch_actor.cpp:727} [0:1:0:0:0] NotifySkeletonAboutDying; Send NKikimr::TEvVPatchDyingRequest Recv NKikimr::TEvVPatchDyingConfirm ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/batch_operations/unittest >> KqpBatchUpdate::Large_1 Test command err: Trying to start YDB, gRPC: 22433, MsgBus: 17198 2025-05-29T15:24:33.790028Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509888932558025297:2063];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:24:33.790386Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/002809/r3tmp/tmpVEXUhE/pdisk_1.dat 2025-05-29T15:24:33.842220Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:24:33.842380Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [1:7509888932558025275:2079] 1748532273789829 != 1748532273789832 TServer::EnableGrpc on GrpcPort 22433, node 1 2025-05-29T15:24:33.859543Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:24:33.859557Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-05-29T15:24:33.859560Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-05-29T15:24:33.859606Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:17198 TClient is connected to server localhost:17198 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: 2025-05-29T15:24:33.918557Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:24:33.918581Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:24:33.919763Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-29T15:24:33.927448Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:24:33.930126Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-05-29T15:24:33.934251Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:24:33.968029Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:24:33.996587Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:24:34.012279Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:24:34.203031Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509888936852994218:2403], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:24:34.203072Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:24:34.273702Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-05-29T15:24:34.283044Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-05-29T15:24:34.296069Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-05-29T15:24:34.308643Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-05-29T15:24:34.333002Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-05-29T15:24:34.356287Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-05-29T15:24:34.417064Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-05-29T15:24:34.475424Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509888936852994878:2466], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:24:34.475449Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509888936852994883:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:24:34.475456Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:24:34.476254Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715669:3, at schemeshard: 72057594046644480 2025-05-29T15:24:34.483007Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7509888936852994885:2470], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715669 completed, doublechecking } 2025-05-29T15:24:34.578829Z node 1 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [1:7509888936852994936:3396] txid# 281474976715670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-29T15:24:34.667250Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [1:7509888936852994952:2474], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:24:34.667347Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=1&id=ZTFjOGFlYTUtYjE0ZjcyMzAtZTJmMzhlNmUtMjlmNmUxMTY=, ActorId: [1:7509888936852994200:2401], ActorState: ExecuteState, TraceId: 01jweaa99bab8ak5xrb838r576, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: VERIFY failed (2025-05-29T15:24:34.668056Z): assertion failed in non-unittest thread with message: assertion failed at ydb/core/kqp/ut/common/kqp_ut_common.h:375, void NKikimr::NKqp::AssertSuccessResult(const NYdb::TStatus &): (result.IsSuccess())
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 library/cpp/testing/unittest/registar.cpp:36 RaiseError(): requirement UnittestThread failed 0. /-S/util/system/yassert.cpp:83: InternalPanicImpl @ 0x13A9DC85 1. /-S/util/system/yassert.cpp:55: Panic @ 0x13A94C86 2. /tmp//-S/library/cpp/testing/unittest/registar.cpp:36: RaiseError @ 0x13C36A16 3. /-S/ydb/core/kqp/ut/common/kqp_ut_common.h:375: AssertSuccessResult @ 0x260D85F2 4. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:365: CreateSampleTables @ 0x260D7EF2 5. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:581: operator() @ 0x260F9ACC 6. /-S/library/cpp/threading/future/core/future-inl.h:493: SetValue @ 0x260F9ACC 7. /-S/library/cpp/threading/future/async.h:24: operator() @ 0x260F9ACC 8. /-S/util/thread/pool.h:71: Process @ 0x260F9ACC 9. /-S/util/thread/pool.cpp:418: DoExecute @ 0x13AA5609 10. /-S/util/thread/factory.h:15: Execute @ 0x13AA3FF9 11. /-S/util/thread/factory.cpp:36: ThreadProc @ 0x13AA3FF9 12. /-S/util/system/thread.cpp:244: ThreadProxy @ 0x13A9F46C 13. ??:0: ?? @ 0x7F83972FCAC2 14. ??:0: ?? @ 0x7F839738E84F >> TConsoleTests::TestCreateTenantWrongPool [GOOD] >> TConsoleTests::TestCreateTenantWrongPoolExtSubdomain ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/vdisk/skeleton/ut/unittest >> TVPatchTests::FindingPartsWhenOnlyOnePartExists [GOOD] Test command err: Recv 65537 2025-05-29T15:24:37.785596Z node 1 :BS_VDISK_PATCH INFO: {BSVSP03@skeleton_vpatch_actor.cpp:190} [0:1:0:0:0] TEvVPatch: bootstrapped; OriginalBlobId# [1:2:3:4:6:10:0] Deadline# 1970-01-01T00:00:01.000000Z Send NKikimr::TEvBlobStorage::TEvVGet Recv NKikimr::TEvBlobStorage::TEvVGetResult 2025-05-29T15:24:37.785956Z node 1 :BS_VDISK_PATCH INFO: {BSVSP06@skeleton_vpatch_actor.cpp:266} [0:1:0:0:0] TEvVPatch: received parts index; OriginalBlobId# [1:2:3:4:6:10:0] Status# OK ResultSize# 1 2025-05-29T15:24:37.785979Z node 1 :BS_VDISK_PATCH INFO: {BSVSP04@skeleton_vpatch_actor.cpp:226} [0:1:0:0:0] TEvVPatch: sended found parts; OriginalBlobId# [1:2:3:4:6:10:0] FoundParts# [] Status# OK Send NKikimr::TEvBlobStorage::TEvVPatchFoundParts 2025-05-29T15:24:37.785993Z node 1 :BS_VDISK_PATCH DEBUG: {BSVSP17@skeleton_vpatch_actor.cpp:727} [0:1:0:0:0] NotifySkeletonAboutDying; Send NKikimr::TEvVPatchDyingRequest Recv NKikimr::TEvVPatchDyingConfirm Recv 65537 2025-05-29T15:24:37.996092Z node 2 :BS_VDISK_PATCH INFO: {BSVSP03@skeleton_vpatch_actor.cpp:190} [0:1:0:0:0] TEvVPatch: bootstrapped; OriginalBlobId# [1:2:3:4:6:10:0] Deadline# 1970-01-01T00:00:01.000000Z Send NKikimr::TEvBlobStorage::TEvVGet Recv NKikimr::TEvBlobStorage::TEvVGetResult 2025-05-29T15:24:37.996154Z node 2 :BS_VDISK_PATCH INFO: {BSVSP06@skeleton_vpatch_actor.cpp:266} [0:1:0:0:0] TEvVPatch: received parts index; OriginalBlobId# [1:2:3:4:6:10:0] Status# OK ResultSize# 1 2025-05-29T15:24:37.996163Z node 2 :BS_VDISK_PATCH INFO: {BSVSP04@skeleton_vpatch_actor.cpp:226} [0:1:0:0:0] TEvVPatch: sended found parts; OriginalBlobId# [1:2:3:4:6:10:0] FoundParts# [1] Status# OK Send NKikimr::TEvBlobStorage::TEvVPatchFoundParts Recv NKikimr::TEvBlobStorage::TEvVPatchDiff 2025-05-29T15:24:37.996200Z node 2 :BS_VDISK_PATCH INFO: {BSVSP09@skeleton_vpatch_actor.cpp:577} [0:1:0:0:0] TEvVPatch: received diff; OriginalBlobId# [1:2:3:4:6:10:0] PatchedBlobId# [1:3:3:4:6:10:0] OriginalPartId# 1 PatchedPartId# 1 XorReceiver# no ParityPart# no ForceEnd# yes 2025-05-29T15:24:37.996210Z node 2 :BS_VDISK_PATCH INFO: {BSVSP07@skeleton_vpatch_actor.cpp:315} [0:1:0:0:0] TEvVPatch: received force end; OriginalBlobId# [1:2:3:4:6:10:0] PatchedBlobId# [1:3:3:4:6:10:0] OriginalPartId# 1 PatchedPartId# 1 Status# OK ErrorReason# Send NKikimr::TEvBlobStorage::TEvVPatchResult 2025-05-29T15:24:37.996222Z node 2 :BS_VDISK_PATCH DEBUG: {BSVSP17@skeleton_vpatch_actor.cpp:727} [0:1:0:0:0] NotifySkeletonAboutDying; Send NKikimr::TEvVPatchDyingRequest Recv NKikimr::TEvVPatchDyingConfirm ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/vdisk/skeleton/ut/unittest >> TVPatchTests::PatchPartGetError [GOOD] Test command err: Recv 65537 2025-05-29T15:24:38.058133Z node 1 :BS_VDISK_PATCH INFO: {BSVSP03@skeleton_vpatch_actor.cpp:190} [0:1:0:0:0] TEvVPatch: bootstrapped; OriginalBlobId# [1:2:3:4:6:10:0] Deadline# 1970-01-01T00:00:01.000000Z Send NKikimr::TEvBlobStorage::TEvVGet Recv NKikimr::TEvBlobStorage::TEvVGetResult 2025-05-29T15:24:38.058589Z node 1 :BS_VDISK_PATCH INFO: {BSVSP06@skeleton_vpatch_actor.cpp:266} [0:1:0:0:0] TEvVPatch: received parts index; OriginalBlobId# [1:2:3:4:6:10:0] Status# OK ResultSize# 1 2025-05-29T15:24:38.058617Z node 1 :BS_VDISK_PATCH INFO: {BSVSP04@skeleton_vpatch_actor.cpp:226} [0:1:0:0:0] TEvVPatch: sended found parts; OriginalBlobId# [1:2:3:4:6:10:0] FoundParts# [1] Status# OK Send NKikimr::TEvBlobStorage::TEvVPatchFoundParts Recv NKikimr::TEvBlobStorage::TEvVPatchDiff 2025-05-29T15:24:38.058659Z node 1 :BS_VDISK_PATCH INFO: {BSVSP09@skeleton_vpatch_actor.cpp:577} [0:1:0:0:0] TEvVPatch: received diff; OriginalBlobId# [1:2:3:4:6:10:0] PatchedBlobId# [1:3:3:4:6:10:0] OriginalPartId# 1 PatchedPartId# 1 XorReceiver# no ParityPart# no ForceEnd# no 2025-05-29T15:24:38.058671Z node 1 :BS_VDISK_PATCH INFO: {BSVSP05@skeleton_vpatch_actor.cpp:246} [0:1:0:0:0] TEvVPatch: send vGet for pulling part data; OriginalBlobId# [1:2:3:4:6:10:0] PullingPart# 1 Send NKikimr::TEvBlobStorage::TEvVGet Recv NKikimr::TEvBlobStorage::TEvVGetResult 2025-05-29T15:24:38.058704Z node 1 :BS_VDISK_PATCH INFO: {BSVSP07@skeleton_vpatch_actor.cpp:315} [0:1:0:0:0] TEvVPatch: send patch result; OriginalBlobId# [1:2:3:4:6:10:0] PatchedBlobId# [1:3:3:4:6:10:0] OriginalPartId# 1 PatchedPartId# 1 Status# ERROR ErrorReason# Recieve not OK status from VGetResult, received status# ERROR Send NKikimr::TEvBlobStorage::TEvVPatchResult 2025-05-29T15:24:38.058717Z node 1 :BS_VDISK_PATCH DEBUG: {BSVSP17@skeleton_vpatch_actor.cpp:727} [0:1:0:0:0] NotifySkeletonAboutDying; Send NKikimr::TEvVPatchDyingRequest Recv NKikimr::TEvVPatchDyingConfirm >> TOlapReboots::DropMultipleTables ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/vdisk/skeleton/ut/unittest >> TVPatchTests::FindingPartsWhenError [GOOD] Test command err: Recv 65537 2025-05-29T15:24:38.151322Z node 1 :BS_VDISK_PATCH INFO: {BSVSP03@skeleton_vpatch_actor.cpp:190} [0:1:0:0:0] TEvVPatch: bootstrapped; OriginalBlobId# [1:2:3:4:6:10:0] Deadline# 1970-01-01T00:00:01.000000Z Send NKikimr::TEvBlobStorage::TEvVGet Recv NKikimr::TEvBlobStorage::TEvVGetResult 2025-05-29T15:24:38.151702Z node 1 :BS_VDISK_PATCH INFO: {BSVSP06@skeleton_vpatch_actor.cpp:266} [0:1:0:0:0] TEvVPatch: received parts index; OriginalBlobId# [1:2:3:4:6:10:0] Status# ERROR ResultSize# 1 2025-05-29T15:24:38.151716Z node 1 :BS_VDISK_PATCH INFO: {BSVSP04@skeleton_vpatch_actor.cpp:226} [0:1:0:0:0] TEvVPatch: sended found parts; OriginalBlobId# [1:2:3:4:6:10:0] FoundParts# [] Status# ERROR Send NKikimr::TEvBlobStorage::TEvVPatchFoundParts 2025-05-29T15:24:38.151731Z node 1 :BS_VDISK_PATCH DEBUG: {BSVSP17@skeleton_vpatch_actor.cpp:727} [0:1:0:0:0] NotifySkeletonAboutDying; Send NKikimr::TEvVPatchDyingRequest Recv NKikimr::TEvVPatchDyingConfirm ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/vdisk/skeleton/ut/unittest >> TVPatchTests::PatchPartFastXorDiffWithEmptyDiffBuffer [GOOD] Test command err: Recv 65537 2025-05-29T15:24:38.194320Z node 1 :BS_VDISK_PATCH INFO: {BSVSP03@skeleton_vpatch_actor.cpp:190} [0:1:0:0:0] TEvVPatch: bootstrapped; OriginalBlobId# [1:2:3:4:6:100:0] Deadline# 1970-01-01T00:00:01.000000Z Send NKikimr::TEvBlobStorage::TEvVGet Recv NKikimr::TEvBlobStorage::TEvVGetResult 2025-05-29T15:24:38.194801Z node 1 :BS_VDISK_PATCH INFO: {BSVSP06@skeleton_vpatch_actor.cpp:266} [0:1:0:0:0] TEvVPatch: received parts index; OriginalBlobId# [1:2:3:4:6:100:0] Status# OK ResultSize# 1 2025-05-29T15:24:38.194830Z node 1 :BS_VDISK_PATCH INFO: {BSVSP04@skeleton_vpatch_actor.cpp:226} [0:1:0:0:0] TEvVPatch: sended found parts; OriginalBlobId# [1:2:3:4:6:100:0] FoundParts# [5] Status# OK Send NKikimr::TEvBlobStorage::TEvVPatchFoundParts Recv NKikimr::TEvBlobStorage::TEvVPatchXorDiff 2025-05-29T15:24:38.194876Z node 1 :BS_VDISK_PATCH INFO: {BSVSP13@skeleton_vpatch_actor.cpp:674} [0:1:0:0:0] TEvVPatch: received xor diff; OriginalBlobId# [1:2:3:4:6:100:0] PatchedBlobId# [1:3:3:4:6:100:0] FromPart# 4 ToPart# 0 HasBuffer# no ReceivedXorDiffCount# 1/0 Send NKikimr::TEvBlobStorage::TEvVPatchXorDiffResult Recv NKikimr::TEvBlobStorage::TEvVPatchDiff 2025-05-29T15:24:38.194906Z node 1 :BS_VDISK_PATCH INFO: {BSVSP09@skeleton_vpatch_actor.cpp:577} [0:1:0:0:0] TEvVPatch: received diff; OriginalBlobId# [1:2:3:4:6:100:0] PatchedBlobId# [1:3:3:4:6:100:0] OriginalPartId# 5 PatchedPartId# 5 XorReceiver# yes ParityPart# yes ForceEnd# no 2025-05-29T15:24:38.194916Z node 1 :BS_VDISK_PATCH INFO: {BSVSP05@skeleton_vpatch_actor.cpp:246} [0:1:0:0:0] TEvVPatch: send vGet for pulling part data; OriginalBlobId# [1:2:3:4:6:100:0] PullingPart# 5 Send NKikimr::TEvBlobStorage::TEvVGet ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/batch_operations/unittest >> KqpBatchUpdate::HasTxControl Test command err: Trying to start YDB, gRPC: 24434, MsgBus: 3054 2025-05-29T15:24:33.751421Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509888931474515935:2151];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/00282e/r3tmp/tmppQe3Xr/pdisk_1.dat 2025-05-29T15:24:33.794466Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-05-29T15:24:33.811456Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [1:7509888931474515800:2079] 1748532273741285 != 1748532273741288 2025-05-29T15:24:33.813705Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 24434, node 1 2025-05-29T15:24:33.826936Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:24:33.826950Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-05-29T15:24:33.826952Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-05-29T15:24:33.826998Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:3054 2025-05-29T15:24:33.886367Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:24:33.886419Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:24:33.887523Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:3054 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-29T15:24:33.920343Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:24:33.935304Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-05-29T15:24:33.943904Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:24:34.020636Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:24:34.064040Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:24:34.088100Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:24:34.236196Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509888935769484738:2403], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:24:34.236218Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:24:34.296303Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-05-29T15:24:34.304852Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-05-29T15:24:34.316812Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-05-29T15:24:34.330042Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-05-29T15:24:34.345149Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-05-29T15:24:34.358010Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-05-29T15:24:34.420690Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-05-29T15:24:34.438279Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509888935769485390:2466], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:24:34.438310Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:24:34.438408Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509888935769485395:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:24:34.439247Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715669:3, at schemeshard: 72057594046644480 2025-05-29T15:24:34.441701Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7509888935769485397:2470], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715669 completed, doublechecking } 2025-05-29T15:24:34.507934Z node 1 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [1:7509888935769485448:3393] txid# 281474976715670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-29T15:24:34.592703Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [1:7509888935769485464:2474], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:24:34.592863Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=1&id=YjY1ODJhMGYtZDUxZTRlMjAtNzViMGY4OWUtYzVmN2M2Mzg=, ActorId: [1:7509888935769484710:2399], ActorState: ExecuteState, TraceId: 01jweaa9859rwrwr3q1ycj03k5, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: VERIFY failed (2025-05-29T15:24:34.593584Z): assertion failed in non-unittest thread with message: assertion failed at ydb/core/kqp/ut/common/kqp_ut_common.h:375, void NKikimr::NKqp::AssertSuccessResult(const NYdb::TStatus &): (result.IsSuccess())
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 library/cpp/testing/unittest/registar.cpp:36 RaiseError(): requirement UnittestThread failed 0. /-S/util/system/yassert.cpp:83: InternalPanicImpl @ 0x13A9DC85 1. /-S/util/system/yassert.cpp:55: Panic @ 0x13A94C86 2. /tmp//-S/library/cpp/testing/unittest/registar.cpp:36: RaiseError @ 0x13C36A16 3. /-S/ydb/core/kqp/ut/common/kqp_ut_common.h:375: AssertSuccessResult @ 0x260D85F2 4. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:365: CreateSampleTables @ 0x260D7EF2 5. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:581: operator() @ 0x260F9ACC 6. /-S/library/cpp/threading/future/core/future-inl.h:493: SetValue @ 0x260F9ACC 7. /-S/library/cpp/threading/future/async.h:24: operator() @ 0x260F9ACC 8. /-S/util/thread/pool.h:71: Process @ 0x260F9ACC 9. /-S/util/thread/pool.cpp:418: DoExecute @ 0x13AA5609 10. /-S/util/thread/factory.h:15: Execute @ 0x13AA3FF9 11. /-S/util/thread/factory.cpp:36: ThreadProc @ 0x13AA3FF9 12. /-S/util/system/thread.cpp:244: ThreadProxy @ 0x13A9F46C 13. ??:0: ?? @ 0x7F439CEA1AC2 14. ??:0: ?? @ 0x7F439CF3384F ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/datashard/ut_read_iterator/unittest >> ReadIteratorExternalBlobs::NotExtBlobs [FAIL] Test command err: 2025-05-29T15:24:10.164325Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:102:2148], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-05-29T15:24:10.164363Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-05-29T15:24:10.164380Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/001568/r3tmp/tmpuN4uOl/pdisk_1.dat 2025-05-29T15:24:10.274302Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-05-29T15:24:10.288710Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:24:10.293205Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [1:32:2079] 1748532249823315 != 1748532249823319 2025-05-29T15:24:10.335068Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:24:10.335106Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:24:10.345736Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-29T15:24:10.419033Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-05-29T15:24:10.434441Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3097: StateInit, received event# 268828672, Sender [1:656:2563], Recipient [1:664:2569]: NKikimr::TEvTablet::TEvBoot 2025-05-29T15:24:10.434624Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3097: StateInit, received event# 268828673, Sender [1:656:2563], Recipient [1:664:2569]: NKikimr::TEvTablet::TEvRestored 2025-05-29T15:24:10.434702Z node 1 :TX_DATASHARD INFO: datashard.cpp:373: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:664:2569] 2025-05-29T15:24:10.434764Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:630: TxInitSchema.Execute 2025-05-29T15:24:10.442041Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3110: StateInactive, received event# 268828684, Sender [1:656:2563], Recipient [1:664:2569]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-05-29T15:24:10.442221Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:696: TxInitSchema.Complete 2025-05-29T15:24:10.442255Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:48: TDataShard::TTxInit::Execute 2025-05-29T15:24:10.442396Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1315: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-05-29T15:24:10.442403Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1371: LoadLockChangeRecords at tablet: 72075186224037888 2025-05-29T15:24:10.442408Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1420: LoadChangeRecordCommits at tablet: 72075186224037888 2025-05-29T15:24:10.442454Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:92: TDataShard::TTxInit::Complete 2025-05-29T15:24:10.442476Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:100: TDataShard::TTxInitRestored::Execute 2025-05-29T15:24:10.442489Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:111: DataShard 72075186224037888 persisting started state actor id [1:681:2569] in generation 1 2025-05-29T15:24:10.452765Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:120: TDataShard::TTxInitRestored::Complete 2025-05-29T15:24:10.456357Z node 1 :TX_DATASHARD INFO: datashard.cpp:417: Switched to work state WaitScheme tabletId 72075186224037888 2025-05-29T15:24:10.456417Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:457: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-05-29T15:24:10.456443Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1250: Change sender created: at tablet: 72075186224037888, actorId: [1:683:2579] 2025-05-29T15:24:10.456447Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1255: Trying to activate change sender: at tablet: 72075186224037888 2025-05-29T15:24:10.456452Z node 1 :TX_DATASHARD INFO: datashard.cpp:1272: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-05-29T15:24:10.456458Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-05-29T15:24:10.456516Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3129: StateWork, received event# 2146435072, Sender [1:664:2569], Recipient [1:664:2569]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-05-29T15:24:10.456522Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3154: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-05-29T15:24:10.456595Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 72075186224037888 2025-05-29T15:24:10.456613Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-05-29T15:24:10.456625Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2025-05-29T15:24:10.456630Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-05-29T15:24:10.456637Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:385: Check unit PlanQueue at 72075186224037888 2025-05-29T15:24:10.456641Z node 1 :TX_DATASHARD TRACE: plan_queue_unit.cpp:59: TPlanQueueUnit at 72075186224037888 has no attached operations 2025-05-29T15:24:10.456644Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:403: Unit PlanQueue has no ready operations at 72075186224037888 2025-05-29T15:24:10.456647Z node 1 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 72075186224037888 TxInFly 0 2025-05-29T15:24:10.456651Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2025-05-29T15:24:10.456724Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3129: StateWork, received event# 269877761, Sender [1:674:2574], Recipient [1:664:2569]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-05-29T15:24:10.456729Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3165: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-05-29T15:24:10.456733Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3710: Server connected at leader tablet# 72075186224037888, clientId# [1:662:2567], serverId# [1:674:2574], sessionId# [0:0:0] 2025-05-29T15:24:10.456739Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3129: StateWork, received event# 269549568, Sender [1:411:2405], Recipient [1:674:2574] 2025-05-29T15:24:10.456742Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3135: StateWork, processing event TEvDataShard::TEvProposeTransaction 2025-05-29T15:24:10.456755Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037888 2025-05-29T15:24:10.456806Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1827: Trying to execute [0:281474976715657] at 72075186224037888 on unit CheckSchemeTx 2025-05-29T15:24:10.456813Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:132: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-05-29T15:24:10.456825Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:220: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2025-05-29T15:24:10.456831Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1862: Execution status for [0:281474976715657] at 72075186224037888 is ExecutedNoMoreRestarts 2025-05-29T15:24:10.456834Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1910: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit CheckSchemeTx 2025-05-29T15:24:10.456838Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1916: Add [0:281474976715657] at 72075186224037888 to execution unit StoreSchemeTx 2025-05-29T15:24:10.456841Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1827: Trying to execute [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2025-05-29T15:24:10.456876Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1862: Execution status for [0:281474976715657] at 72075186224037888 is DelayCompleteNoMoreRestarts 2025-05-29T15:24:10.456880Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1910: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit StoreSchemeTx 2025-05-29T15:24:10.456882Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1916: Add [0:281474976715657] at 72075186224037888 to execution unit FinishPropose 2025-05-29T15:24:10.456884Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1827: Trying to execute [0:281474976715657] at 72075186224037888 on unit FinishPropose 2025-05-29T15:24:10.456892Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1862: Execution status for [0:281474976715657] at 72075186224037888 is DelayComplete 2025-05-29T15:24:10.456894Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1910: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit FinishPropose 2025-05-29T15:24:10.456898Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1916: Add [0:281474976715657] at 72075186224037888 to execution unit WaitForPlan 2025-05-29T15:24:10.456901Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1827: Trying to execute [0:281474976715657] at 72075186224037888 on unit WaitForPlan 2025-05-29T15:24:10.456905Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1832: Operation [0:281474976715657] at 72075186224037888 is not ready to execute on unit WaitForPlan 2025-05-29T15:24:10.457092Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3129: StateWork, received event# 269746185, Sender [1:684:2580], Recipient [1:664:2569]: NKikimr::TEvTxProxySchemeCache::TEvWatchNotifyUpdated 2025-05-29T15:24:10.457098Z node 1 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:129: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-05-29T15:24:10.467400Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 72075186224037888 2025-05-29T15:24:10.467431Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1933: Complete execution for [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2025-05-29T15:24:10.467439Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1933: Complete execution for [0:281474976715657] at 72075186224037888 on unit FinishPropose 2025-05-29T15:24:10.467450Z node 1 :TX_DATASHARD TRACE: finish_propose_unit.cpp:167: Propose transaction complete txid 281474976715657 at tablet 72075186224037888 send to client, exec latency: 0 ms, propose latency: 0 ms, status: PREPARED 2025-05-29T15:24:10.467475Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:467: 72075186224037888 not sending time cast registration request in state WaitScheme 2025-05-29T15:24:10.610527Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3129: StateWork, received event# 269877761, Sender [1:699:2589], Recipient [1:664:2569]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-05-29T15:24:10.610555Z node 1 :TX_DATASHARD TRACE: datashard_impl. ... } YdbStatus: INTERNAL_ERROR ConsumedRu: 1 , with diff: (INT|SUCC)E(RNAL_ERROR|SS) TBackTrace::Capture()+28 (0x13C95FEC) NUnitTest::NPrivate::RaiseError(char const*, TBasicString> const&, bool)+137 (0x13E49919) NKikimr::ExecSQL(TIntrusivePtr>, NActors::TActorId, TBasicString> const&, bool, Ydb::StatusIds_StatusCode)+1300 (0x264C9A24) NKikimr::NTestSuiteReadIteratorExternalBlobs::TTestCaseExtBlobsWithCompactingMiddleRows::Execute_(NUnitTest::TTestContext&)+2971 (0x13B8F1CB) NKikimr::NTestSuiteReadIteratorExternalBlobs::TCurrentTest::Execute()::'lambda'()::operator()() const+71 (0x13B99417) NUnitTest::TTestBase::Run(std::__y1::function, TBasicString> const&, char const*, bool)+126 (0x13E4B7CE) NKikimr::NTestSuiteReadIteratorExternalBlobs::TCurrentTest::Execute()+481 (0x13B98DB1) NUnitTest::TTestFactory::Execute()+803 (0x13E4BF43) NUnitTest::RunMain(int, char**)+3021 (0x13E5DAED) ??+0 (0x7FAE9640ED90) __libc_start_main+128 (0x7FAE9640EE40) _start+41 (0x12A96029) 2025-05-29T15:24:31.472032Z node 14 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-05-29T15:24:31.472054Z node 14 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-05-29T15:24:31.472101Z node 14 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [14:302:2346], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/001568/r3tmp/tmpHe014w/pdisk_1.dat 2025-05-29T15:24:31.559247Z node 14 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-05-29T15:24:31.576310Z node 14 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:24:31.576976Z node 14 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [14:32:2079] 1748532271188357 != 1748532271188361 2025-05-29T15:24:31.619052Z node 14 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(14, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:24:31.619106Z node 14 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(14, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:24:31.629808Z node 14 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(14, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-29T15:24:31.703131Z node 14 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-05-29T15:24:31.893798Z node 14 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [14:738:2619], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:24:31.893823Z node 14 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [14:749:2624], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:24:31.893831Z node 14 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:24:31.894540Z node 14 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2025-05-29T15:24:32.041161Z node 14 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [14:752:2627], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-05-29T15:24:32.072942Z node 14 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [14:822:2666] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-29T15:24:32.494453Z node 14 :KQP_EXECUTER ERROR: kqp_planner.cpp:119: TxId: 281474976715660. Ctx: { TraceId: 01jweaa6rn88a9yhn5jhxenwtv, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=14&id=YzJhYmFhMjMtZGQyMjFkZDItNmQ2NmZjM2YtNWQyY2FjNjI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-05-29T15:24:33.332489Z node 15 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [15:324:2367], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-05-29T15:24:33.332547Z node 15 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-05-29T15:24:33.332555Z node 15 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/001568/r3tmp/tmpt2Emrw/pdisk_1.dat 2025-05-29T15:24:33.447638Z node 15 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-05-29T15:24:33.464990Z node 15 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:24:33.465594Z node 15 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [15:32:2079] 1748532272927664 != 1748532272927668 2025-05-29T15:24:33.508036Z node 15 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(15, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:24:33.508099Z node 15 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(15, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:24:33.518825Z node 15 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(15, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-29T15:24:33.592639Z node 15 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-05-29T15:24:33.787248Z node 15 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [15:734:2616], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:24:33.787274Z node 15 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [15:743:2621], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:24:33.787281Z node 15 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:24:33.788006Z node 15 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2025-05-29T15:24:33.944884Z node 15 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [15:748:2624], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-05-29T15:24:33.977478Z node 15 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [15:818:2663] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-29T15:24:34.268232Z node 15 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [15:827:2671], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:24:34.269545Z node 15 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=15&id=N2VmMjE5My01ZWM0NjM2OS0xZmI5YjcyMy0yNTVjOTljMQ==, ActorId: [15:732:2614], ActorState: ExecuteState, TraceId: 01jweaa8kv8npy6yy97vwdnq8s, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: assertion failed at ydb/core/tx/datashard/ut_common/datashard_ut_common.cpp:2091, void NKikimr::ExecSQL(Tests::TServer::TPtr, TActorId, const TString &, bool, Ydb::StatusIds::StatusCode): (response.GetYdbStatus() == code) failed: (INTERNAL_ERROR != SUCCESS) Response { QueryIssues { message: "Execution" issue_code: 1060 issues { message: "yql/essentials/ast/yql_expr.h:1874: index out of range" issue_code: 1 } } TxMeta { } } YdbStatus: INTERNAL_ERROR ConsumedRu: 1 , with diff: (INT|SUCC)E(RNAL_ERROR|SS) TBackTrace::Capture()+28 (0x13C95FEC) NUnitTest::NPrivate::RaiseError(char const*, TBasicString> const&, bool)+137 (0x13E49919) NKikimr::ExecSQL(TIntrusivePtr>, NActors::TActorId, TBasicString> const&, bool, Ydb::StatusIds_StatusCode)+1300 (0x264C9A24) NKikimr::NTestSuiteReadIteratorExternalBlobs::TTestCaseNotExtBlobs::Execute_(NUnitTest::TTestContext&)+891 (0x13B9437B) NKikimr::NTestSuiteReadIteratorExternalBlobs::TCurrentTest::Execute()::'lambda'()::operator()() const+71 (0x13B99417) NUnitTest::TTestBase::Run(std::__y1::function, TBasicString> const&, char const*, bool)+126 (0x13E4B7CE) NKikimr::NTestSuiteReadIteratorExternalBlobs::TCurrentTest::Execute()+481 (0x13B98DB1) NUnitTest::TTestFactory::Execute()+803 (0x13E4BF43) NUnitTest::RunMain(int, char**)+3021 (0x13E5DAED) ??+0 (0x7FAE9640ED90) __libc_start_main+128 (0x7FAE9640EE40) _start+41 (0x12A96029) ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/batch_operations/unittest >> KqpBatchUpdate::SimpleOnePartition Test command err: Trying to start YDB, gRPC: 20082, MsgBus: 24650 2025-05-29T15:24:34.158898Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509888935448666239:2199];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:24:34.159922Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/00279e/r3tmp/tmp6jkm2Y/pdisk_1.dat 2025-05-29T15:24:34.256958Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:24:34.257407Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [1:7509888935448666079:2079] 1748532274154222 != 1748532274154225 TServer::EnableGrpc on GrpcPort 20082, node 1 2025-05-29T15:24:34.293317Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:24:34.293330Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-05-29T15:24:34.293333Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-05-29T15:24:34.293374Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-05-29T15:24:34.303034Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:24:34.303062Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:24:34.304823Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:24650 TClient is connected to server localhost:24650 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... 2025-05-29T15:24:34.363481Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-05-29T15:24:34.370884Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-05-29T15:24:34.387825Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-05-29T15:24:34.412554Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 2025-05-29T15:24:34.438148Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:24:34.450583Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:24:34.578435Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509888935448667708:2403], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:24:34.578470Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:24:34.613414Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-05-29T15:24:34.620950Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-05-29T15:24:34.630173Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-05-29T15:24:34.644403Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-05-29T15:24:34.658331Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-05-29T15:24:34.672616Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-05-29T15:24:34.686642Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-05-29T15:24:34.702992Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509888935448668360:2466], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:24:34.703025Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:24:34.703031Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509888935448668365:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:24:34.703772Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715669:3, at schemeshard: 72057594046644480 2025-05-29T15:24:34.706388Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7509888935448668367:2470], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715669 completed, doublechecking } 2025-05-29T15:24:34.769357Z node 1 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [1:7509888935448668418:3393] txid# 281474976715670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-29T15:24:34.846617Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [1:7509888935448668434:2474], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:24:34.846710Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=1&id=ZTNiOWMwYTktNTkyZjcyYzEtNjBjNTgxY2YtMWNiYzc5NTg=, ActorId: [1:7509888935448667705:2401], ActorState: ExecuteState, TraceId: 01jweaa9ge4bx4r1qrfyw7k215, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: VERIFY failed (2025-05-29T15:24:34.847427Z): assertion failed in non-unittest thread with message: assertion failed at ydb/core/kqp/ut/common/kqp_ut_common.h:375, void NKikimr::NKqp::AssertSuccessResult(const NYdb::TStatus &): (result.IsSuccess())
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 library/cpp/testing/unittest/registar.cpp:36 RaiseError(): requirement UnittestThread failed 0. /-S/util/system/yassert.cpp:83: InternalPanicImpl @ 0x13A9DC85 1. /-S/util/system/yassert.cpp:55: Panic @ 0x13A94C86 2. /tmp//-S/library/cpp/testing/unittest/registar.cpp:36: RaiseError @ 0x13C36A16 3. /-S/ydb/core/kqp/ut/common/kqp_ut_common.h:375: AssertSuccessResult @ 0x260D85F2 4. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:365: CreateSampleTables @ 0x260D7EF2 5. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:581: operator() @ 0x260F9ACC 6. /-S/library/cpp/threading/future/core/future-inl.h:493: SetValue @ 0x260F9ACC 7. /-S/library/cpp/threading/future/async.h:24: operator() @ 0x260F9ACC 8. /-S/util/thread/pool.h:71: Process @ 0x260F9ACC 9. /-S/util/thread/pool.cpp:418: DoExecute @ 0x13AA5609 10. /-S/util/thread/factory.h:15: Execute @ 0x13AA3FF9 11. /-S/util/thread/factory.cpp:36: ThreadProc @ 0x13AA3FF9 12. /-S/util/system/thread.cpp:244: ThreadProxy @ 0x13A9F46C 13. ??:0: ?? @ 0x7FC3A4462AC2 14. ??:0: ?? @ 0x7FC3A44F484F >> TOlapReboots::CreateMultipleStandaloneTables >> DataShardSnapshots::VolatileSnapshotAndLocalMKQLUpdate [FAIL] >> DataShardSnapshots::VolatileSnapshotReadTable ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/batch_operations/unittest >> KqpBatchDelete::Returning Test command err: Trying to start YDB, gRPC: 5388, MsgBus: 17985 2025-05-29T15:24:33.971779Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509888930623058353:2209];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:24:33.977795Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/0027a6/r3tmp/tmpXZWTng/pdisk_1.dat 2025-05-29T15:24:34.061708Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 5388, node 1 2025-05-29T15:24:34.087522Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:24:34.087536Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-05-29T15:24:34.087538Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-05-29T15:24:34.087585Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:17985 2025-05-29T15:24:34.135037Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:24:34.135072Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:24:34.137776Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:17985 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-29T15:24:34.193611Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:24:34.197743Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:24:34.207892Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-05-29T15:24:34.250249Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:24:34.311350Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:24:34.332608Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:24:34.492071Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509888934918027099:2403], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:24:34.492092Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:24:34.547120Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-05-29T15:24:34.601656Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-05-29T15:24:34.657872Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-05-29T15:24:34.666193Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-05-29T15:24:34.721611Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-05-29T15:24:34.735687Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-05-29T15:24:34.749920Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-05-29T15:24:34.766026Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509888934918027757:2466], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:24:34.766064Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:24:34.766093Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509888934918027762:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:24:34.766926Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715669:3, at schemeshard: 72057594046644480 2025-05-29T15:24:34.769458Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7509888934918027764:2470], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715669 completed, doublechecking } 2025-05-29T15:24:34.864427Z node 1 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [1:7509888934918027815:3395] txid# 281474976715670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-29T15:24:34.940912Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [1:7509888934918027831:2474], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:24:34.941037Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=1&id=OWZiODc3MDMtMTAzMjE2Ni0zNWM2NjgwMS00OTRlZDZmZg==, ActorId: [1:7509888934918027081:2401], ActorState: ExecuteState, TraceId: 01jweaa9jd9dq8x082q0qdxtkj, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: VERIFY failed (2025-05-29T15:24:34.941773Z): assertion failed in non-unittest thread with message: assertion failed at ydb/core/kqp/ut/common/kqp_ut_common.h:375, void NKikimr::NKqp::AssertSuccessResult(const NYdb::TStatus &): (result.IsSuccess())
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 library/cpp/testing/unittest/registar.cpp:36 RaiseError(): requirement UnittestThread failed 0. /-S/util/system/yassert.cpp:83: InternalPanicImpl @ 0x13A9DC85 1. /-S/util/system/yassert.cpp:55: Panic @ 0x13A94C86 2. /tmp//-S/library/cpp/testing/unittest/registar.cpp:36: RaiseError @ 0x13C36A16 3. /-S/ydb/core/kqp/ut/common/kqp_ut_common.h:375: AssertSuccessResult @ 0x260D85F2 4. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:365: CreateSampleTables @ 0x260D7EF2 5. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:581: operator() @ 0x260F9ACC 6. /-S/library/cpp/threading/future/core/future-inl.h:493: SetValue @ 0x260F9ACC 7. /-S/library/cpp/threading/future/async.h:24: operator() @ 0x260F9ACC 8. /-S/util/thread/pool.h:71: Process @ 0x260F9ACC 9. /-S/util/thread/pool.cpp:418: DoExecute @ 0x13AA5609 10. /-S/util/thread/factory.h:15: Execute @ 0x13AA3FF9 11. /-S/util/thread/factory.cpp:36: ThreadProc @ 0x13AA3FF9 12. /-S/util/system/thread.cpp:244: ThreadProxy @ 0x13A9F46C 13. ??:0: ?? @ 0x7F9235130AC2 14. ??:0: ?? @ 0x7F92351C284F ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/batch_operations/unittest >> KqpBatchUpdate::UpdateOn Test command err: Trying to start YDB, gRPC: 13192, MsgBus: 3429 2025-05-29T15:24:33.873577Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509888931576861752:2063];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:24:33.874131Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/0027e9/r3tmp/tmpIFX62d/pdisk_1.dat 2025-05-29T15:24:33.950831Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [1:7509888931576861730:2079] 1748532273873057 != 1748532273873060 2025-05-29T15:24:33.951536Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 13192, node 1 2025-05-29T15:24:33.973376Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:24:33.973386Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-05-29T15:24:33.973388Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-05-29T15:24:33.973427Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:3429 2025-05-29T15:24:34.017316Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:24:34.017345Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:24:34.020288Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:3429 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-29T15:24:34.079813Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:24:34.087273Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-05-29T15:24:34.095769Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:24:34.172138Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:24:34.199666Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:24:34.222038Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:24:34.336639Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509888935871830662:2403], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:24:34.336697Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:24:34.378259Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-05-29T15:24:34.387564Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-05-29T15:24:34.403949Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-05-29T15:24:34.414028Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-05-29T15:24:34.471174Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-05-29T15:24:34.484005Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-05-29T15:24:34.497753Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-05-29T15:24:34.515987Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509888935871831316:2466], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:24:34.516017Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:24:34.516033Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509888935871831321:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:24:34.516839Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715669:3, at schemeshard: 72057594046644480 2025-05-29T15:24:34.524564Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7509888935871831323:2470], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715669 completed, doublechecking } 2025-05-29T15:24:34.624586Z node 1 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [1:7509888935871831374:3395] txid# 281474976715670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-29T15:24:34.732476Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [1:7509888935871831387:2474], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:24:34.732585Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=1&id=YjhlOTUyYmYtMjAyMzJkMjYtODE5YTZiODctMWFhNzVkNTg=, ActorId: [1:7509888935871830644:2401], ActorState: ExecuteState, TraceId: 01jweaa9ak4q8ff4ptyztkrkmf, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: VERIFY failed (2025-05-29T15:24:34.733276Z): assertion failed in non-unittest thread with message: assertion failed at ydb/core/kqp/ut/common/kqp_ut_common.h:375, void NKikimr::NKqp::AssertSuccessResult(const NYdb::TStatus &): (result.IsSuccess())
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 library/cpp/testing/unittest/registar.cpp:36 RaiseError(): requirement UnittestThread failed 0. /-S/util/system/yassert.cpp:83: InternalPanicImpl @ 0x13A9DC85 1. /-S/util/system/yassert.cpp:55: Panic @ 0x13A94C86 2. /tmp//-S/library/cpp/testing/unittest/registar.cpp:36: RaiseError @ 0x13C36A16 3. /-S/ydb/core/kqp/ut/common/kqp_ut_common.h:375: AssertSuccessResult @ 0x260D85F2 4. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:365: CreateSampleTables @ 0x260D7EF2 5. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:581: operator() @ 0x260F9ACC 6. /-S/library/cpp/threading/future/core/future-inl.h:493: SetValue @ 0x260F9ACC 7. /-S/library/cpp/threading/future/async.h:24: operator() @ 0x260F9ACC 8. /-S/util/thread/pool.h:71: Process @ 0x260F9ACC 9. /-S/util/thread/pool.cpp:418: DoExecute @ 0x13AA5609 10. /-S/util/thread/factory.h:15: Execute @ 0x13AA3FF9 11. /-S/util/thread/factory.cpp:36: ThreadProc @ 0x13AA3FF9 12. /-S/util/system/thread.cpp:244: ThreadProxy @ 0x13A9F46C 13. ??:0: ?? @ 0x7F9BD4827AC2 14. ??:0: ?? @ 0x7F9BD48B984F >> TOlapReboots::CreateDropStandaloneTable >> TOlapReboots::CreateTable >> TOlapReboots::CreateStore ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/batch_operations/unittest >> KqpBatchDelete::HasTxControl Test command err: Trying to start YDB, gRPC: 6680, MsgBus: 20509 2025-05-29T15:24:33.975216Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509888931038816052:2081];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:24:33.975480Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/0027ae/r3tmp/tmp9xxr7A/pdisk_1.dat 2025-05-29T15:24:34.093799Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:24:34.093829Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:24:34.095006Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:24:34.095318Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 6680, node 1 2025-05-29T15:24:34.114946Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:24:34.117811Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-05-29T15:24:34.117836Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-05-29T15:24:34.117906Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:20509 TClient is connected to server localhost:20509 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-29T15:24:34.223621Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:24:34.226804Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-05-29T15:24:34.238410Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:24:34.303242Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-05-29T15:24:34.330656Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 2025-05-29T15:24:34.344879Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:24:34.494478Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509888935333784919:2403], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:24:34.494538Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:24:34.547243Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-05-29T15:24:34.554410Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-05-29T15:24:34.567143Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-05-29T15:24:34.581415Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-05-29T15:24:34.595151Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-05-29T15:24:34.609471Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-05-29T15:24:34.623400Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-05-29T15:24:34.639538Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509888935333785574:2466], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:24:34.639563Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:24:34.639572Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509888935333785579:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:24:34.640360Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715669:3, at schemeshard: 72057594046644480 2025-05-29T15:24:34.643422Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7509888935333785581:2470], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715669 completed, doublechecking } 2025-05-29T15:24:34.716822Z node 1 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [1:7509888935333785632:3397] txid# 281474976715670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-29T15:24:34.811493Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [1:7509888935333785648:2474], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:24:34.811603Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=1&id=NzcwZjM3YmUtM2Y1MTA5NjEtYzI1MTA0M2YtOTI3Y2U1YTM=, ActorId: [1:7509888935333784892:2400], ActorState: ExecuteState, TraceId: 01jweaa9effhgvvg0gbghwe5zn, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: VERIFY failed (2025-05-29T15:24:34.812316Z): assertion failed in non-unittest thread with message: assertion failed at ydb/core/kqp/ut/common/kqp_ut_common.h:375, void NKikimr::NKqp::AssertSuccessResult(const NYdb::TStatus &): (result.IsSuccess())
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 library/cpp/testing/unittest/registar.cpp:36 RaiseError(): requirement UnittestThread failed 0. /-S/util/system/yassert.cpp:83: InternalPanicImpl @ 0x13A9DC85 1. /-S/util/system/yassert.cpp:55: Panic @ 0x13A94C86 2. /tmp//-S/library/cpp/testing/unittest/registar.cpp:36: RaiseError @ 0x13C36A16 3. /-S/ydb/core/kqp/ut/common/kqp_ut_common.h:375: AssertSuccessResult @ 0x260D85F2 4. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:365: CreateSampleTables @ 0x260D7EF2 5. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:581: operator() @ 0x260F9ACC 6. /-S/library/cpp/threading/future/core/future-inl.h:493: SetValue @ 0x260F9ACC 7. /-S/library/cpp/threading/future/async.h:24: operator() @ 0x260F9ACC 8. /-S/util/thread/pool.h:71: Process @ 0x260F9ACC 9. /-S/util/thread/pool.cpp:418: DoExecute @ 0x13AA5609 10. /-S/util/thread/factory.h:15: Execute @ 0x13AA3FF9 11. /-S/util/thread/factory.cpp:36: ThreadProc @ 0x13AA3FF9 12. /-S/util/system/thread.cpp:244: ThreadProxy @ 0x13A9F46C 13. ??:0: ?? @ 0x7FF94340FAC2 14. ??:0: ?? @ 0x7FF9434A184F >> TOlapReboots::CreateMultipleTables ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/batch_operations/unittest >> KqpBatchDelete::ManyPartitions_2 Test command err: Trying to start YDB, gRPC: 28947, MsgBus: 10002 2025-05-29T15:24:33.874221Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509888931646984511:2063];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:24:33.874409Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/002816/r3tmp/tmprNNoNS/pdisk_1.dat 2025-05-29T15:24:33.995858Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:24:33.995884Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:24:34.002951Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:24:34.005948Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [1:7509888931646984489:2079] 1748532273874012 != 1748532273874015 2025-05-29T15:24:34.007537Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 28947, node 1 2025-05-29T15:24:34.030952Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:24:34.030963Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-05-29T15:24:34.030965Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-05-29T15:24:34.031010Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:10002 TClient is connected to server localhost:10002 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... 2025-05-29T15:24:34.138250Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-05-29T15:24:34.141375Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:24:34.151930Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:24:34.240419Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 2025-05-29T15:24:34.284353Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:24:34.297472Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:24:34.384335Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509888935941953418:2403], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:24:34.384363Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:24:34.432575Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-05-29T15:24:34.442250Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-05-29T15:24:34.455308Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-05-29T15:24:34.469591Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-05-29T15:24:34.484107Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-05-29T15:24:34.497696Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-05-29T15:24:34.512349Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-05-29T15:24:34.527465Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509888935941954072:2466], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:24:34.527494Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509888935941954077:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:24:34.527508Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:24:34.528319Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715669:3, at schemeshard: 72057594046644480 2025-05-29T15:24:34.531629Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7509888935941954079:2470], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715669 completed, doublechecking } 2025-05-29T15:24:34.600184Z node 1 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [1:7509888935941954130:3396] txid# 281474976715670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-29T15:24:34.690386Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [1:7509888935941954146:2474], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:24:34.690499Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=1&id=ODY3NDgzZjYtZjUxOWJhNzItNGYxMWNkNTUtYmE5MjFhOWE=, ActorId: [1:7509888935941953390:2399], ActorState: ExecuteState, TraceId: 01jweaa9az14wmcd3vhxrj5cw8, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: VERIFY failed (2025-05-29T15:24:34.691287Z): assertion failed in non-unittest thread with message: assertion failed at ydb/core/kqp/ut/common/kqp_ut_common.h:375, void NKikimr::NKqp::AssertSuccessResult(const NYdb::TStatus &): (result.IsSuccess())
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 library/cpp/testing/unittest/registar.cpp:36 RaiseError(): requirement UnittestThread failed 0. /-S/util/system/yassert.cpp:83: InternalPanicImpl @ 0x13A9DC85 1. /-S/util/system/yassert.cpp:55: Panic @ 0x13A94C86 2. /tmp//-S/library/cpp/testing/unittest/registar.cpp:36: RaiseError @ 0x13C36A16 3. /-S/ydb/core/kqp/ut/common/kqp_ut_common.h:375: AssertSuccessResult @ 0x260D85F2 4. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:365: CreateSampleTables @ 0x260D7EF2 5. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:581: operator() @ 0x260F9ACC 6. /-S/library/cpp/threading/future/core/future-inl.h:493: SetValue @ 0x260F9ACC 7. /-S/library/cpp/threading/future/async.h:24: operator() @ 0x260F9ACC 8. /-S/util/thread/pool.h:71: Process @ 0x260F9ACC 9. /-S/util/thread/pool.cpp:418: DoExecute @ 0x13AA5609 10. /-S/util/thread/factory.h:15: Execute @ 0x13AA3FF9 11. /-S/util/thread/factory.cpp:36: ThreadProc @ 0x13AA3FF9 12. /-S/util/system/thread.cpp:244: ThreadProxy @ 0x13A9F46C 13. ??:0: ?? @ 0x7F0CAA258AC2 14. ??:0: ?? @ 0x7F0CAA2EA84F ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/batch_operations/unittest >> KqpBatchUpdate::ManyPartitions_2 Test command err: Trying to start YDB, gRPC: 7638, MsgBus: 22754 2025-05-29T15:24:33.919098Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509888932699231062:2211];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:24:33.921327Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/0027da/r3tmp/tmpYpuJm1/pdisk_1.dat 2025-05-29T15:24:33.986193Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [1:7509888932699230877:2079] 1748532273869276 != 1748532273869279 2025-05-29T15:24:33.994860Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 7638, node 1 2025-05-29T15:24:34.026270Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:24:34.026302Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:24:34.026921Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-29T15:24:34.034984Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:24:34.035002Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-05-29T15:24:34.035004Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-05-29T15:24:34.035053Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:22754 TClient is connected to server localhost:22754 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-29T15:24:34.224302Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:24:34.227501Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-05-29T15:24:34.235942Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:24:34.277766Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:24:34.303038Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:24:34.318624Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:24:34.405077Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509888936994199803:2403], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:24:34.405110Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:24:34.450253Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-05-29T15:24:34.457260Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-05-29T15:24:34.469592Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-05-29T15:24:34.484068Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-05-29T15:24:34.539174Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-05-29T15:24:34.553614Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-05-29T15:24:34.567599Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-05-29T15:24:34.584048Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509888936994200460:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:24:34.584043Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509888936994200455:2466], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:24:34.584074Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:24:34.584768Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715669:3, at schemeshard: 72057594046644480 2025-05-29T15:24:34.587259Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7509888936994200462:2470], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715669 completed, doublechecking } 2025-05-29T15:24:34.654174Z node 1 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [1:7509888936994200513:3395] txid# 281474976715670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-29T15:24:34.781106Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [1:7509888936994200529:2474], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:24:34.781224Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=1&id=MmJhYjIxM2QtZTBmZDgxNmUtOGRjNzU5NjctYWQ0ZGNkOGM=, ActorId: [1:7509888936994199785:2401], ActorState: ExecuteState, TraceId: 01jweaa9cqe43zvpyxg65rgjrn, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: VERIFY failed (2025-05-29T15:24:34.781994Z): assertion failed in non-unittest thread with message: assertion failed at ydb/core/kqp/ut/common/kqp_ut_common.h:375, void NKikimr::NKqp::AssertSuccessResult(const NYdb::TStatus &): (result.IsSuccess())
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 library/cpp/testing/unittest/registar.cpp:36 RaiseError(): requirement UnittestThread failed 0. /-S/util/system/yassert.cpp:83: InternalPanicImpl @ 0x13A9DC85 1. /-S/util/system/yassert.cpp:55: Panic @ 0x13A94C86 2. /tmp//-S/library/cpp/testing/unittest/registar.cpp:36: RaiseError @ 0x13C36A16 3. /-S/ydb/core/kqp/ut/common/kqp_ut_common.h:375: AssertSuccessResult @ 0x260D85F2 4. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:365: CreateSampleTables @ 0x260D7EF2 5. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:581: operator() @ 0x260F9ACC 6. /-S/library/cpp/threading/future/core/future-inl.h:493: SetValue @ 0x260F9ACC 7. /-S/library/cpp/threading/future/async.h:24: operator() @ 0x260F9ACC 8. /-S/util/thread/pool.h:71: Process @ 0x260F9ACC 9. /-S/util/thread/pool.cpp:418: DoExecute @ 0x13AA5609 10. /-S/util/thread/factory.h:15: Execute @ 0x13AA3FF9 11. /-S/util/thread/factory.cpp:36: ThreadProc @ 0x13AA3FF9 12. /-S/util/system/thread.cpp:244: ThreadProxy @ 0x13A9F46C 13. ??:0: ?? @ 0x7F3304368AC2 14. ??:0: ?? @ 0x7F33043FA84F |66.0%| [TA] $(B)/ydb/core/blobstorage/vdisk/skeleton/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> TConsoleTests::TestSetConfig [GOOD] >> TConsoleTests::TestTenantGeneration >> DataShardSnapshots::MvccSnapshotLockedWritesRestart+UseSink [FAIL] >> DataShardSnapshots::MvccSnapshotLockedWritesRestart-UseSink >> TOlapReboots::CreateDropTable ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/tx_proxy/ut_schemereq/unittest >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-CreateUser-24 [GOOD] Test command err: Starting YDB, grpc: 7506, msgbus: 6756 2025-05-29T15:23:12.068144Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509888583458144479:2075];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:23:12.068165Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/001b38/r3tmp/tmp2H3vI3/pdisk_1.dat TServer::EnableGrpc on GrpcPort 7506, node 1 2025-05-29T15:23:12.125338Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:23:12.127446Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:23:12.127456Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-05-29T15:23:12.127457Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-05-29T15:23:12.127488Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:6756 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 2025-05-29T15:23:12.142403Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:403: actor# [1:7509888583458144706:2139] Handle TEvNavigate describe path dc-1 2025-05-29T15:23:12.144108Z node 1 :TX_PROXY DEBUG: describe.cpp:272: Actor# [1:7509888583458145149:2423] HANDLE EvNavigateScheme dc-1 2025-05-29T15:23:12.144370Z node 1 :TX_PROXY DEBUG: describe.cpp:356: Actor# [1:7509888583458145149:2423] HANDLE EvNavigateKeySetResult TDescribeReq marker# P5 ErrorCount# 0 2025-05-29T15:23:12.152080Z node 1 :TX_PROXY DEBUG: describe.cpp:435: Actor# [1:7509888583458145149:2423] SEND to# 72057594046644480 shardToRequest NKikimrSchemeOp.TDescribePath Path: "dc-1" Options { ReturnBoundaries: true ShowPrivateTable: true ReturnRangeKey: true } 2025-05-29T15:23:12.153922Z node 1 :TX_PROXY DEBUG: describe.cpp:448: Actor# [1:7509888583458145149:2423] Handle TEvDescribeSchemeResult Forward to# [1:7509888583458145128:2406] Cookie: 0 TEvDescribeSchemeResult: NKikimrScheme.TEvDescribeSchemeResult PreSerializedData size# 2 Record# Status: StatusSuccess Path: "dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/dc-1" } } } PathId: 1 PathOwnerId: 72057594046644480 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-05-29T15:23:12.156841Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:315: actor# [1:7509888583458144706:2139] Handle TEvProposeTransaction 2025-05-29T15:23:12.156852Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:327: actor# [1:7509888583458144706:2139] Cookie# 0 userReqId# "" DELAY REQUEST, wait txids from allocator Type# Scheme 2025-05-29T15:23:12.168443Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:23:12.168481Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:23:12.170093Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-29T15:23:12.193920Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:213: actor# [1:7509888583458144706:2139] HANDLE TEvClientConnected success connect from tablet# 72057594046447617 2025-05-29T15:23:12.194809Z node 1 :TX_PROXY DEBUG: client.cpp:89: Handle TEvAllocateResult ACCEPTED RangeBegin# 281474976715656 RangeEnd# 281474976720656 txAllocator# 72057594046447617 2025-05-29T15:23:12.194825Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:238: actor# [1:7509888583458144706:2139] TxId# 281474976715657 ProcessProposeTransaction 2025-05-29T15:23:12.194868Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:257: actor# [1:7509888583458144706:2139] Cookie# 0 userReqId# "" txid# 281474976715657 SEND to# [1:7509888583458145177:2442] 2025-05-29T15:23:12.205743Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1562: Actor# [1:7509888583458145177:2442] txid# 281474976715657 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "dc-1" StoragePools { Name: "" Kind: "tenant-db" } StoragePools { Name: "/dc-1:test" Kind: "test" } } } } UserToken: "\n\014root@builtin\022\030\022\026\n\024all-users@well-known\032\014root@builtin\"\007Builtin*\017**** (B6C6F477)" PeerName: "" 2025-05-29T15:23:12.205793Z node 1 :TX_PROXY DEBUG: schemereq.cpp:569: Actor# [1:7509888583458145177:2442] txid# 281474976715657 Bootstrap, UserSID: root@builtin CheckAdministrator: 1 CheckDatabaseAdministrator: 0 2025-05-29T15:23:12.205799Z node 1 :TX_PROXY DEBUG: schemereq.cpp:578: Actor# [1:7509888583458145177:2442] txid# 281474976715657 Bootstrap, UserSID: root@builtin IsClusterAdministrator: 1 2025-05-29T15:23:12.205813Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1617: Actor# [1:7509888583458145177:2442] txid# 281474976715657 TEvNavigateKeySet requested from SchemeCache 2025-05-29T15:23:12.206006Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1450: Actor# [1:7509888583458145177:2442] txid# 281474976715657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-05-29T15:23:12.206110Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1497: Actor# [1:7509888583458145177:2442] HANDLE EvNavigateKeySetResult, txid# 281474976715657 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# false 2025-05-29T15:23:12.206152Z node 1 :TX_PROXY DEBUG: schemereq.cpp:103: Actor# [1:7509888583458145177:2442] txid# 281474976715657 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976715657 TabletId# 72057594046644480} 2025-05-29T15:23:12.206219Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1352: Actor# [1:7509888583458145177:2442] txid# 281474976715657 HANDLE EvClientConnected 2025-05-29T15:23:12.206457Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-05-29T15:23:12.207208Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1374: Actor# [1:7509888583458145177:2442] txid# 281474976715657 Status StatusAccepted HANDLE {TEvModifySchemeTransactionResult Status# StatusAccepted txid# 281474976715657} 2025-05-29T15:23:12.207229Z node 1 :TX_PROXY DEBUG: schemereq.cpp:549: Actor# [1:7509888583458145177:2442] txid# 281474976715657 SEND to# [1:7509888583458145165:2431] Source {TEvProposeTransactionStatus txid# 281474976715657 Status# 53} waiting... 2025-05-29T15:23:12.211128Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:315: actor# [1:7509888583458144706:2139] Handle TEvProposeTransaction 2025-05-29T15:23:12.211138Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:238: actor# [1:7509888583458144706:2139] TxId# 281474976715658 ProcessProposeTransaction 2025-05-29T15:23:12.211148Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:257: actor# [1:7509888583458144706:2139] Cookie# 0 userReqId# "" txid# 281474976715658 SEND to# [1:7509888583458145217:2478] 2025-05-29T15:23:12.212052Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1562: Actor# [1:7509888583458145217:2478] txid# 281474976715658 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/" OperationType: ESchemeOpModifyACL ModifyACL { Name: "dc-1" DiffACL: "\n\032\010\000\022\026\010\001\020\377\377\003\032\014root@builtin \003" } } } UserToken: "\n\014root@builtin\022\030\022\026\n\024all-users@well-known\032\014root@builtin\"\007Builtin*\017**** (B6C6F477)" PeerName: "" 2025-05-29T15:23:12.212079Z node 1 :TX_PROXY DEBUG: schemereq.cpp:569: Actor# [1:7509888583458145217:2478] txid# 281474976715658 Bootstrap, UserSID: root@builtin CheckAdministrator: 1 CheckDatabaseAdministrator: 0 2025-05-29T15:23:12.212085Z node 1 :TX_PROXY DEBUG: schemereq.cpp:578: Actor# [1:7509888583458145217:2478] txid# 281474976715658 Bootstrap, UserSID: root@builtin IsClusterAdministrator: 1 2025-05-29T15:23:12.212104Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1617: Actor# [1:7509888583458145217:2478] txid# 281474976715658 TEvNavigateKeySet requested from SchemeCache 2025-05-29T15:23:12.212235Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1450: Actor# [1:7509888583458145217:2478] txid# 281474976715658 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-05-29T15:23:12.212272Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1497: Actor# [1:7509888583458145217:2478] HANDLE EvNavigateKeySetResult, txid# 281474976715658 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2025-05-29T15:23:12.212284Z node 1 :TX_PROXY DEBUG: schemereq.cpp:103: Actor# [1:7509888583458145217:2478] txid# 281474976715658 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976715658 TabletId# 72057594046644480} 2025-05-29T15:23:12.212351Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1352: Actor# [1:7509888583458145217:2478] txid# 281474976715658 HANDLE EvClientConnected 2025-05-29T15:23:12.212435Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-05-29T15:23:12.212996Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1374: Actor# [1:7509888583458145217:2478] txid# 281474976715658 Status StatusSuccess HANDLE {TEvModifySchemeTransactionResul ... _PROXY DEBUG: schemereq.cpp:1374: Actor# [59:7509888726286685262:2546] txid# 281474976715660 Status StatusAlreadyExists HANDLE {TEvModifySchemeTransactionResult Status# StatusAlreadyExists txid# 281474976715660 Reason# Check failed: path: '/dc-1/.metadata/workload_manager/pools/default', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92} 2025-05-29T15:23:45.924991Z node 59 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [59:7509888726286685262:2546] txid# 281474976715660, issues: { message: "Check failed: path: \'/dc-1/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-29T15:23:45.924994Z node 59 :TX_PROXY DEBUG: schemereq.cpp:549: Actor# [59:7509888726286685262:2546] txid# 281474976715660 SEND to# [59:7509888726286685186:2337] Source {TEvProposeTransactionStatus txid# 281474976715660 Status# 48} 2025-05-29T15:23:45.927943Z node 59 :TX_PROXY DEBUG: proxy_impl.cpp:315: actor# [59:7509888726286684570:2114] Handle TEvProposeTransaction 2025-05-29T15:23:45.927955Z node 59 :TX_PROXY DEBUG: proxy_impl.cpp:238: actor# [59:7509888726286684570:2114] TxId# 281474976715661 ProcessProposeTransaction 2025-05-29T15:23:45.927968Z node 59 :TX_PROXY DEBUG: proxy_impl.cpp:257: actor# [59:7509888726286684570:2114] Cookie# 0 userReqId# "" txid# 281474976715661 SEND to# [59:7509888726286685286:2558] 2025-05-29T15:23:45.928688Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1562: Actor# [59:7509888726286685286:2558] txid# 281474976715661 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/dc-1" OperationType: ESchemeOpAlterLogin AlterLogin { CreateUser { User: "ordinaryuser" Password: "passwd" CanLogin: true IsHashedPassword: false } } } } UserToken: "\n\014root@builtin\022\030\022\026\n\024all-users@well-known\032\014root@builtin\"\007Builtin*\017**** (B6C6F477)" DatabaseName: "/dc-1" RequestType: "" PeerName: "ipv6:[::1]:35908" 2025-05-29T15:23:45.928704Z node 59 :TX_PROXY DEBUG: schemereq.cpp:569: Actor# [59:7509888726286685286:2558] txid# 281474976715661 Bootstrap, UserSID: root@builtin CheckAdministrator: 1 CheckDatabaseAdministrator: 1 2025-05-29T15:23:45.928708Z node 59 :TX_PROXY DEBUG: schemereq.cpp:578: Actor# [59:7509888726286685286:2558] txid# 281474976715661 Bootstrap, UserSID: root@builtin IsClusterAdministrator: 1 2025-05-29T15:23:45.928715Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1617: Actor# [59:7509888726286685286:2558] txid# 281474976715661 TEvNavigateKeySet requested from SchemeCache 2025-05-29T15:23:45.928816Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1450: Actor# [59:7509888726286685286:2558] txid# 281474976715661 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-05-29T15:23:45.928849Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1497: Actor# [59:7509888726286685286:2558] HANDLE EvNavigateKeySetResult, txid# 281474976715661 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2025-05-29T15:23:45.928866Z node 59 :TX_PROXY DEBUG: schemereq.cpp:103: Actor# [59:7509888726286685286:2558] txid# 281474976715661 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976715661 TabletId# 72057594046644480} 2025-05-29T15:23:45.928916Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1352: Actor# [59:7509888726286685286:2558] txid# 281474976715661 HANDLE EvClientConnected 2025-05-29T15:23:45.931679Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1374: Actor# [59:7509888726286685286:2558] txid# 281474976715661 Status StatusSuccess HANDLE {TEvModifySchemeTransactionResult Status# StatusSuccess txid# 281474976715661} 2025-05-29T15:23:45.931699Z node 59 :TX_PROXY DEBUG: schemereq.cpp:549: Actor# [59:7509888726286685286:2558] txid# 281474976715661 SEND to# [59:7509888726286685285:2330] Source {TEvProposeTransactionStatus txid# 281474976715661 Status# 48} 2025-05-29T15:23:46.093198Z node 59 :TX_PROXY DEBUG: proxy_impl.cpp:315: actor# [59:7509888726286684570:2114] Handle TEvProposeTransaction 2025-05-29T15:23:46.093211Z node 59 :TX_PROXY DEBUG: proxy_impl.cpp:238: actor# [59:7509888726286684570:2114] TxId# 281474976715662 ProcessProposeTransaction 2025-05-29T15:23:46.093228Z node 59 :TX_PROXY DEBUG: proxy_impl.cpp:257: actor# [59:7509888726286684570:2114] Cookie# 0 userReqId# "" txid# 281474976715662 SEND to# [59:7509888730581652603:2573] 2025-05-29T15:23:46.093867Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1562: Actor# [59:7509888730581652603:2573] txid# 281474976715662 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "" OperationType: ESchemeOpModifyACL ModifyACL { Name: "dc-1" DiffACL: "\n\022\010\001\022\016\032\014ordinaryuser\n\032\010\000\022\026\010\001\020\200\200\002\032\014ordinaryuser \000" } } } UserToken: "\n\014root@builtin\022\030\022\026\n\024all-users@well-known\032\014root@builtin\"\007Builtin*\017**** (B6C6F477)" DatabaseName: "/dc-1" RequestType: "" PeerName: "ipv6:[::1]:35908" 2025-05-29T15:23:46.093882Z node 59 :TX_PROXY DEBUG: schemereq.cpp:569: Actor# [59:7509888730581652603:2573] txid# 281474976715662 Bootstrap, UserSID: root@builtin CheckAdministrator: 1 CheckDatabaseAdministrator: 1 2025-05-29T15:23:46.093884Z node 59 :TX_PROXY DEBUG: schemereq.cpp:578: Actor# [59:7509888730581652603:2573] txid# 281474976715662 Bootstrap, UserSID: root@builtin IsClusterAdministrator: 1 2025-05-29T15:23:46.093898Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1617: Actor# [59:7509888730581652603:2573] txid# 281474976715662 TEvNavigateKeySet requested from SchemeCache 2025-05-29T15:23:46.094009Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1450: Actor# [59:7509888730581652603:2573] txid# 281474976715662 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-05-29T15:23:46.094063Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1497: Actor# [59:7509888730581652603:2573] HANDLE EvNavigateKeySetResult, txid# 281474976715662 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2025-05-29T15:23:46.094082Z node 59 :TX_PROXY DEBUG: schemereq.cpp:103: Actor# [59:7509888730581652603:2573] txid# 281474976715662 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976715662 TabletId# 72057594046644480} 2025-05-29T15:23:46.094131Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1352: Actor# [59:7509888730581652603:2573] txid# 281474976715662 HANDLE EvClientConnected 2025-05-29T15:23:46.094252Z node 59 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-05-29T15:23:46.095075Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1374: Actor# [59:7509888730581652603:2573] txid# 281474976715662 Status StatusSuccess HANDLE {TEvModifySchemeTransactionResult Status# StatusSuccess txid# 281474976715662} 2025-05-29T15:23:46.095088Z node 59 :TX_PROXY DEBUG: schemereq.cpp:549: Actor# [59:7509888730581652603:2573] txid# 281474976715662 SEND to# [59:7509888730581652602:2343] Source {TEvProposeTransactionStatus txid# 281474976715662 Status# 48} 2025-05-29T15:23:46.104683Z node 59 :TX_PROXY DEBUG: proxy_impl.cpp:315: actor# [59:7509888726286684570:2114] Handle TEvProposeTransaction 2025-05-29T15:23:46.104701Z node 59 :TX_PROXY DEBUG: proxy_impl.cpp:238: actor# [59:7509888726286684570:2114] TxId# 281474976715663 ProcessProposeTransaction 2025-05-29T15:23:46.104720Z node 59 :TX_PROXY DEBUG: proxy_impl.cpp:257: actor# [59:7509888726286684570:2114] Cookie# 0 userReqId# "" txid# 281474976715663 SEND to# [59:7509888730581652642:2595] 2025-05-29T15:23:46.105335Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1562: Actor# [59:7509888730581652642:2595] txid# 281474976715663 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/dc-1" OperationType: ESchemeOpAlterLogin AlterLogin { CreateUser { User: "targetuser" Password: "passwd" CanLogin: true IsHashedPassword: false } } } } UserToken: "\n\014ordinaryuser\022\030\022\026\n\024all-users@well-known\032\334\003eyJhbGciOiJQUzI1NiIsImtpZCI6IjEifQ.eyJhdWQiOlsiXC9kYy0xIl0sImV4cCI6MTc0ODU3NTQyNiwiaWF0IjoxNzQ4NTMyMjI2LCJzdWIiOiJvcmRpbmFyeXVzZXIifQ.G8DrBUr2N_jd6GFwoYw4jBmWEayPVQUL20pVXDeHc7sglXHxMshFP_yxyec8xEgj3Z0wgqTtkOf6pI5LWcNRB8cA2aAAnfLZld70BnTyvF9XU-C_iweIK4k26AP09PzmrTOphuyuanueJmB2h2JTT_T0zy-4pkCdOJlAB6GPAnRfmVmoL0oWT5DM_BQkb6ZJ7LYjB_O9XBKTZFQc_WcHsMJoz-kvokA6DYAD5OSgIj3jJf_HWRuA2xA7CN0FngF5F1Aa0U0HE05_xQqT37ya-Al-9gHKgK3u_AuK7zip3WPHusB89qS3cVy2uijEuqHoaxfGjjle_3VCzeiNPBvOSA\"\005Login*\210\001eyJhbGciOiJQUzI1NiIsImtpZCI6IjEifQ.eyJhdWQiOlsiXC9kYy0xIl0sImV4cCI6MTc0ODU3NTQyNiwiaWF0IjoxNzQ4NTMyMjI2LCJzdWIiOiJvcmRpbmFyeXVzZXIifQ.**" DatabaseName: "/dc-1" RequestType: "" PeerName: "ipv6:[::1]:35908" 2025-05-29T15:23:46.105353Z node 59 :TX_PROXY DEBUG: schemereq.cpp:569: Actor# [59:7509888730581652642:2595] txid# 281474976715663 Bootstrap, UserSID: ordinaryuser CheckAdministrator: 1 CheckDatabaseAdministrator: 1 2025-05-29T15:23:46.105356Z node 59 :TX_PROXY DEBUG: schemereq.cpp:578: Actor# [59:7509888730581652642:2595] txid# 281474976715663 Bootstrap, UserSID: ordinaryuser IsClusterAdministrator: 0 2025-05-29T15:23:46.105404Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1401: Actor# [59:7509888730581652642:2595] txid# 281474976715663 HandleResolveDatabase, ResultSet size: 1 ResultSet error count: 0 2025-05-29T15:23:46.105416Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1436: Actor# [59:7509888730581652642:2595] txid# 281474976715663 HandleResolveDatabase, UserSID: ordinaryuser CheckAdministrator: 1 CheckDatabaseAdministrator: 1 IsClusterAdministrator: 0 IsDatabaseAdministrator: 0 DatabaseOwner: root@builtin 2025-05-29T15:23:46.105426Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1617: Actor# [59:7509888730581652642:2595] txid# 281474976715663 TEvNavigateKeySet requested from SchemeCache 2025-05-29T15:23:46.105491Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1450: Actor# [59:7509888730581652642:2595] txid# 281474976715663 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-05-29T15:23:46.105500Z node 59 :TX_PROXY ERROR: schemereq.cpp:1079: Actor# [59:7509888730581652642:2595] txid# 281474976715663, Access denied for ordinaryuser, attempt to manage user 2025-05-29T15:23:46.105519Z node 59 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [59:7509888730581652642:2595] txid# 281474976715663, issues: { message: "Access denied for ordinaryuser" issue_code: 200000 severity: 1 } 2025-05-29T15:23:46.105538Z node 59 :TX_PROXY DEBUG: schemereq.cpp:549: Actor# [59:7509888730581652642:2595] txid# 281474976715663 SEND to# [59:7509888730581652641:2348] Source {TEvProposeTransactionStatus Status# 5} 2025-05-29T15:23:46.105633Z node 59 :KQP_SESSION WARN: kqp_session_actor.cpp:2583: SessionId: ydb://session/3?node_id=59&id=NzUwNWIzMDctNjAyYzAxNDAtMWU2YTUwZjgtZWVlODEwNzM=, ActorId: [59:7509888730581652627:2348], ActorState: ExecuteState, TraceId: 01jwea8t1n80g6wwey4zw5pdq4, Create QueryResponse for error on request, msg: 2025-05-29T15:23:46.105734Z node 59 :TX_PROXY DEBUG: proxy_impl.cpp:353: actor# [59:7509888726286684570:2114] Handle TEvExecuteKqpTransaction 2025-05-29T15:23:46.105745Z node 59 :TX_PROXY DEBUG: proxy_impl.cpp:342: actor# [59:7509888726286684570:2114] TxId# 281474976715664 ProcessProposeKqpTransaction ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/datashard/ut_kqp_errors/unittest >> KqpErrors::ProposeResultLost_RwTx-UseSink [FAIL] Test command err: 2025-05-29T15:24:29.243005Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:701:2413], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-05-29T15:24:29.243162Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-05-29T15:24:29.243200Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-05-29T15:24:29.243249Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [2:698:2355], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-05-29T15:24:29.243309Z node 2 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-05-29T15:24:29.243316Z node 2 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/001946/r3tmp/tmpQSWcUO/pdisk_1.dat 2025-05-29T15:24:29.356163Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:24:29.443248Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-05-29T15:24:29.542280Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:24:29.542320Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:24:29.543758Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:24:29.543783Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:24:29.555543Z node 1 :HIVE WARN: hive_impl.cpp:771: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-05-29T15:24:29.555696Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-29T15:24:29.555813Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-29T15:24:29.842146Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-05-29T15:24:30.563402Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:1582:2952], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:24:30.563452Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:1592:2957], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:24:30.563469Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:24:30.564963Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2025-05-29T15:24:31.145179Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:1596:2960], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-05-29T15:24:31.317569Z node 1 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [1:1734:3038] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-29T15:24:31.363766Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [1:1746:3049], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:24:31.364647Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=1&id=Yzk1ZjZkYzQtNjQ5NWZlYzMtMzNmY2UwYmEtNGE1ZWI1NmM=, ActorId: [1:1580:2950], ActorState: ExecuteState, TraceId: 01jweaa5exaje62te369p8np6b, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: assertion failed at ydb/core/tx/datashard/ut_common/datashard_ut_common.cpp:2091, void NKikimr::ExecSQL(Tests::TServer::TPtr, TActorId, const TString &, bool, Ydb::StatusIds::StatusCode): (response.GetYdbStatus() == code) failed: (INTERNAL_ERROR != SUCCESS) Response { QueryIssues { message: "Execution" issue_code: 1060 issues { message: "yql/essentials/ast/yql_expr.h:1874: index out of range" issue_code: 1 } } TxMeta { } } YdbStatus: INTERNAL_ERROR ConsumedRu: 1 , with diff: (INT|SUCC)E(RNAL_ERROR|SS) TBackTrace::Capture()+28 (0x13AAFD2C) NUnitTest::NPrivate::RaiseError(char const*, TBasicString> const&, bool)+137 (0x13C63AA9) NKikimr::ExecSQL(TIntrusivePtr>, NActors::TActorId, TBasicString> const&, bool, Ydb::StatusIds_StatusCode)+1300 (0x262F1694) NKikimr::NKqp::TLocalFixture::TLocalFixture(bool, std::__y1::optional)+1557 (0x13996425) NKikimr::NKqp::NTestSuiteKqpErrors::TTestCaseProposeResultLost_RwTx::Execute_(NUnitTest::TTestContext&)+42 (0x139A644A) NKikimr::NKqp::NTestSuiteKqpErrors::TCurrentTest::Execute()::'lambda'()::operator()() const+71 (0x1399D867) NUnitTest::TTestBase::Run(std::__y1::function, TBasicString> const&, char const*, bool)+126 (0x13C6595E) NKikimr::NKqp::NTestSuiteKqpErrors::TCurrentTest::Execute()+424 (0x1399D0C8) NUnitTest::TTestFactory::Execute()+803 (0x13C660D3) NUnitTest::RunMain(int, char**)+3021 (0x13C77C7D) ??+0 (0x7FB0E87C9D90) __libc_start_main+128 (0x7FB0E87C9E40) _start+41 (0x129FD029) 2025-05-29T15:24:32.671065Z node 3 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [3:701:2413], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-05-29T15:24:32.671194Z node 3 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-05-29T15:24:32.671214Z node 3 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-05-29T15:24:32.671623Z node 4 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [4:698:2355], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-05-29T15:24:32.671693Z node 4 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-05-29T15:24:32.671743Z node 4 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/001946/r3tmp/tmpigED5r/pdisk_1.dat 2025-05-29T15:24:32.782454Z node 3 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:24:32.933584Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-05-29T15:24:33.024419Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:24:33.024466Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:24:33.025458Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:24:33.025482Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:24:33.041803Z node 3 :HIVE WARN: hive_impl.cpp:771: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 4 Cookie 4 2025-05-29T15:24:33.041988Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-29T15:24:33.042106Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-29T15:24:33.364991Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-05-29T15:24:33.888285Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:1580:2951], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:24:33.888318Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:1591:2956], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:24:33.888329Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:24:33.889611Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2025-05-29T15:24:34.440086Z node 3 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [3:1594:2959], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-05-29T15:24:34.587225Z node 3 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [3:1734:3039] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-29T15:24:34.597446Z node 3 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [3:1745:3049], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:24:34.598018Z node 3 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=3&id=Y2NlNjg2ZC05YzhjMDZhYS0xMWE5MWM4My1mNjQxYThkNg==, ActorId: [3:1578:2949], ActorState: ExecuteState, TraceId: 01jweaa8pz0zg819bcb90dgpqd, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: assertion failed at ydb/core/tx/datashard/ut_common/datashard_ut_common.cpp:2091, void NKikimr::ExecSQL(Tests::TServer::TPtr, TActorId, const TString &, bool, Ydb::StatusIds::StatusCode): (response.GetYdbStatus() == code) failed: (INTERNAL_ERROR != SUCCESS) Response { QueryIssues { message: "Execution" issue_code: 1060 issues { message: "yql/essentials/ast/yql_expr.h:1874: index out of range" issue_code: 1 } } TxMeta { } } YdbStatus: INTERNAL_ERROR ConsumedRu: 1 , with diff: (INT|SUCC)E(RNAL_ERROR|SS) TBackTrace::Capture()+28 (0x13AAFD2C) NUnitTest::NPrivate::RaiseError(char const*, TBasicString> const&, bool)+137 (0x13C63AA9) NKikimr::ExecSQL(TIntrusivePtr>, NActors::TActorId, TBasicString> const&, bool, Ydb::StatusIds_StatusCode)+1300 (0x262F1694) NKikimr::NKqp::TLocalFixture::TLocalFixture(bool, std::__y1::optional)+1557 (0x13996425) NKikimr::NKqp::NTestSuiteKqpErrors::TTestCaseProposeResultLost_RwTx::Execute_(NUnitTest::TTestContext&)+42 (0x139B1BAA) NKikimr::NKqp::NTestSuiteKqpErrors::TCurrentTest::Execute()::'lambda'()::operator()() const+71 (0x1399D867) NUnitTest::TTestBase::Run(std::__y1::function, TBasicString> const&, char const*, bool)+126 (0x13C6595E) NKikimr::NKqp::NTestSuiteKqpErrors::TCurrentTest::Execute()+424 (0x1399D0C8) NUnitTest::TTestFactory::Execute()+803 (0x13C660D3) NUnitTest::RunMain(int, char**)+3021 (0x13C77C7D) ??+0 (0x7FB0E87C9D90) __libc_start_main+128 (0x7FB0E87C9E40) _start+41 (0x129FD029) >> TCmsTenatsTest::RequestShutdownHostWithTenantPolicy [GOOD] >> TCmsTenatsTest::TestClusterLimitForceRestartMode >> DataShardSnapshots::LockedWriteDistributedCommitSuccess-UseSink [FAIL] >> DataShardSnapshots::LockedWriteDistributedCommitFreeze+UseSink >> DataShardSnapshots::ShardRestartWholeShardLockBrokenByUpsert [FAIL] >> DataShardSnapshots::ShardRestartLockNotBrokenByUncommittedBeforeRead+UseSink >> DataShardSnapshots::MvccSnapshotReadLockedWrites+UseSink [FAIL] >> DataShardSnapshots::MvccSnapshotReadLockedWrites-UseSink >> TSchemeShardServerLess::StorageBillingLabels >> DataShardSnapshots::LockedWriteDistributedCommitAborted-UseSink [FAIL] >> DataShardSnapshots::LockedWriteDistributedCommitCrossConflict+UseSink ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/datashard/ut_kqp_errors/unittest >> KqpErrors::ProposeErrorEvWrite [FAIL] Test command err: 2025-05-29T15:24:29.336904Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:701:2413], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-05-29T15:24:29.337035Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-05-29T15:24:29.337074Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-05-29T15:24:29.337123Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [2:698:2355], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-05-29T15:24:29.337190Z node 2 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-05-29T15:24:29.337198Z node 2 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/00194e/r3tmp/tmpEUeHac/pdisk_1.dat 2025-05-29T15:24:29.482131Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:24:29.576491Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-05-29T15:24:29.674726Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:24:29.674784Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:24:29.676130Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:24:29.676155Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:24:29.687790Z node 1 :HIVE WARN: hive_impl.cpp:771: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-05-29T15:24:29.687935Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-29T15:24:29.688031Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-29T15:24:29.957844Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-05-29T15:24:30.625598Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:1582:2952], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:24:30.625631Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:1592:2957], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:24:30.625644Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:24:30.626953Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2025-05-29T15:24:31.200637Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:1596:2960], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-05-29T15:24:31.370410Z node 1 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [1:1734:3038] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-29T15:24:31.413686Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [1:1746:3049], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:24:31.414300Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=1&id=NWE1NmJkMWMtNDk4ZDgyZTMtMTljZWVmOWItMmZlNzQxNTQ=, ActorId: [1:1580:2950], ActorState: ExecuteState, TraceId: 01jweaa5h007pd7s42gsq7j7jn, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: assertion failed at ydb/core/tx/datashard/ut_common/datashard_ut_common.cpp:2091, void NKikimr::ExecSQL(Tests::TServer::TPtr, TActorId, const TString &, bool, Ydb::StatusIds::StatusCode): (response.GetYdbStatus() == code) failed: (INTERNAL_ERROR != SUCCESS) Response { QueryIssues { message: "Execution" issue_code: 1060 issues { message: "yql/essentials/ast/yql_expr.h:1874: index out of range" issue_code: 1 } } TxMeta { } } YdbStatus: INTERNAL_ERROR ConsumedRu: 1 , with diff: (INT|SUCC)E(RNAL_ERROR|SS) TBackTrace::Capture()+28 (0x13AAFD2C) NUnitTest::NPrivate::RaiseError(char const*, TBasicString> const&, bool)+137 (0x13C63AA9) NKikimr::ExecSQL(TIntrusivePtr>, NActors::TActorId, TBasicString> const&, bool, Ydb::StatusIds_StatusCode)+1300 (0x262F1694) NKikimr::NKqp::TLocalFixture::TLocalFixture(bool, std::__y1::optional)+1557 (0x13996425) NKikimr::NKqp::NTestSuiteKqpErrors::TTestCaseProposeError::Execute_(NUnitTest::TTestContext&)+39 (0x13996F97) NKikimr::NKqp::NTestSuiteKqpErrors::TCurrentTest::Execute()::'lambda'()::operator()() const+71 (0x1399D867) NUnitTest::TTestBase::Run(std::__y1::function, TBasicString> const&, char const*, bool)+126 (0x13C6595E) NKikimr::NKqp::NTestSuiteKqpErrors::TCurrentTest::Execute()+424 (0x1399D0C8) NUnitTest::TTestFactory::Execute()+803 (0x13C660D3) NUnitTest::RunMain(int, char**)+3021 (0x13C77C7D) ??+0 (0x7FDE8B89FD90) __libc_start_main+128 (0x7FDE8B89FE40) _start+41 (0x129FD029) 2025-05-29T15:24:32.603749Z node 3 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [3:701:2413], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-05-29T15:24:32.603838Z node 3 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-05-29T15:24:32.603854Z node 3 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-05-29T15:24:32.604217Z node 4 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [4:698:2355], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-05-29T15:24:32.604272Z node 4 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-05-29T15:24:32.604313Z node 4 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/00194e/r3tmp/tmpV0mT3c/pdisk_1.dat 2025-05-29T15:24:32.750531Z node 3 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:24:32.901937Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-05-29T15:24:33.004600Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:24:33.004656Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:24:33.005591Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:24:33.005617Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:24:33.019003Z node 3 :HIVE WARN: hive_impl.cpp:771: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 4 Cookie 4 2025-05-29T15:24:33.019168Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-29T15:24:33.019332Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-29T15:24:33.338277Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-05-29T15:24:34.008923Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:1574:2947], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:24:34.008954Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:1584:2952], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:24:34.008966Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:24:34.010170Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2025-05-29T15:24:34.584817Z node 3 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [3:1588:2955], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-05-29T15:24:34.730970Z node 3 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [3:1726:3034] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-29T15:24:34.738423Z node 3 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [3:1738:3045], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:24:34.738805Z node 3 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=3&id=YTA5NWQzM2EtYzVlMDU5YzAtZmY1M2U4MWUtNGU5NzA0ODU=, ActorId: [3:1572:2945], ActorState: ExecuteState, TraceId: 01jweaa8trbqnr3g5rssjb07tj, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: assertion failed at ydb/core/tx/datashard/ut_common/datashard_ut_common.cpp:2091, void NKikimr::ExecSQL(Tests::TServer::TPtr, TActorId, const TString &, bool, Ydb::StatusIds::StatusCode): (response.GetYdbStatus() == code) failed: (INTERNAL_ERROR != SUCCESS) Response { QueryIssues { message: "Execution" issue_code: 1060 issues { message: "yql/essentials/ast/yql_expr.h:1874: index out of range" issue_code: 1 } } TxMeta { } } YdbStatus: INTERNAL_ERROR ConsumedRu: 1 , with diff: (INT|SUCC)E(RNAL_ERROR|SS) TBackTrace::Capture()+28 (0x13AAFD2C) NUnitTest::NPrivate::RaiseError(char const*, TBasicString> const&, bool)+137 (0x13C63AA9) NKikimr::ExecSQL(TIntrusivePtr>, NActors::TActorId, TBasicString> const&, bool, Ydb::StatusIds_StatusCode)+1300 (0x262F1694) NKikimr::NKqp::TLocalFixture::TLocalFixture(bool, std::__y1::optional)+1557 (0x13996425) NKikimr::NKqp::NTestSuiteKqpErrors::TTestCaseProposeErrorEvWrite::Execute_(NUnitTest::TTestContext&)+39 (0x139986D7) NKikimr::NKqp::NTestSuiteKqpErrors::TCurrentTest::Execute()::'lambda'()::operator()() const+71 (0x1399D867) NUnitTest::TTestBase::Run(std::__y1::function, TBasicString> const&, char const*, bool)+126 (0x13C6595E) NKikimr::NKqp::NTestSuiteKqpErrors::TCurrentTest::Execute()+424 (0x1399D0C8) NUnitTest::TTestFactory::Execute()+803 (0x13C660D3) NUnitTest::RunMain(int, char**)+3021 (0x13C77C7D) ??+0 (0x7FDE8B89FD90) __libc_start_main+128 (0x7FDE8B89FE40) _start+41 (0x129FD029) >> TSchemeShardServerLess::TestServerlessComputeResourcesMode >> TestKinesisHttpProxy::CreateStreamInIncorrectDb >> TestYmqHttpProxy::TestSendMessageEmptyQueueUrl >> TestKinesisHttpProxy::DifferentContentTypes >> TestKinesisHttpProxy::UnauthorizedGetShardIteratorRequest >> TestYmqHttpProxy::TestCreateQueueWithSameNameAndSameParams >> TBsProxyFaultToleranceTest::CheckGetHardenedErasureBlock42Count6Idx1 >> TBsProxyFaultToleranceTest::CheckGetHardenedErasureBlock42Count6Idx4 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/batch_operations/unittest >> KqpBatchUpdate::Large_2 Test command err: Trying to start YDB, gRPC: 6987, MsgBus: 23321 2025-05-29T15:24:35.059365Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509888940827978394:2063];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:24:35.059384Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/00279c/r3tmp/tmp2g8yD2/pdisk_1.dat 2025-05-29T15:24:35.138038Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:24:35.138516Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [1:7509888940827978373:2079] 1748532275059239 != 1748532275059242 TServer::EnableGrpc on GrpcPort 6987, node 1 2025-05-29T15:24:35.159009Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:24:35.159031Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-05-29T15:24:35.159033Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-05-29T15:24:35.159090Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-05-29T15:24:35.161962Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:24:35.161991Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:24:35.163011Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:23321 TClient is connected to server localhost:23321 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... 2025-05-29T15:24:35.232616Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-05-29T15:24:35.241486Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:24:35.262354Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:24:35.284861Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:24:35.297202Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:24:35.539306Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509888940827980027:2403], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:24:35.539331Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:24:35.587169Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-05-29T15:24:35.597554Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-05-29T15:24:35.619426Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-05-29T15:24:35.628495Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-05-29T15:24:35.640422Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-05-29T15:24:35.704625Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-05-29T15:24:35.718402Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-05-29T15:24:35.743400Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509888940827980682:2466], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:24:35.743425Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:24:35.743600Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509888940827980687:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:24:35.744635Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715669:3, at schemeshard: 72057594046644480 2025-05-29T15:24:35.748542Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7509888940827980689:2470], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715669 completed, doublechecking } 2025-05-29T15:24:35.797658Z node 1 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [1:7509888940827980740:3398] txid# 281474976715670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-29T15:24:35.900366Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [1:7509888940827980756:2474], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:24:35.900517Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=1&id=YzRhYTE5YTktMzQxYmQzYWQtYTdhMmMxODYtZmNmYjRlZTI=, ActorId: [1:7509888940827980009:2401], ActorState: ExecuteState, TraceId: 01jweaaagy9bc60zm3a2andmxh, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: VERIFY failed (2025-05-29T15:24:35.901921Z): assertion failed in non-unittest thread with message: assertion failed at ydb/core/kqp/ut/common/kqp_ut_common.h:375, void NKikimr::NKqp::AssertSuccessResult(const NYdb::TStatus &): (result.IsSuccess())
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 library/cpp/testing/unittest/registar.cpp:36 RaiseError(): requirement UnittestThread failed 0. /-S/util/system/yassert.cpp:83: InternalPanicImpl @ 0x13A9DC85 1. /-S/util/system/yassert.cpp:55: Panic @ 0x13A94C86 2. /tmp//-S/library/cpp/testing/unittest/registar.cpp:36: RaiseError @ 0x13C36A16 3. /-S/ydb/core/kqp/ut/common/kqp_ut_common.h:375: AssertSuccessResult @ 0x260D85F2 4. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:365: CreateSampleTables @ 0x260D7EF2 5. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:581: operator() @ 0x260F9ACC 6. /-S/library/cpp/threading/future/core/future-inl.h:493: SetValue @ 0x260F9ACC 7. /-S/library/cpp/threading/future/async.h:24: operator() @ 0x260F9ACC 8. /-S/util/thread/pool.h:71: Process @ 0x260F9ACC 9. /-S/util/thread/pool.cpp:418: DoExecute @ 0x13AA5609 10. /-S/util/thread/factory.h:15: Execute @ 0x13AA3FF9 11. /-S/util/thread/factory.cpp:36: ThreadProc @ 0x13AA3FF9 12. /-S/util/system/thread.cpp:244: ThreadProxy @ 0x13A9F46C 13. ??:0: ?? @ 0x7FE9D6914AC2 14. ??:0: ?? @ 0x7FE9D69A684F >> TestYmqHttpProxy::TestCreateQueue >> TBsProxyFaultToleranceTest::CheckTPutFaultToleranceTestErasureMirror3dc ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/batch_operations/unittest >> KqpBatchDelete::ManyPartitions_1 Test command err: Trying to start YDB, gRPC: 11070, MsgBus: 4951 2025-05-29T15:24:35.032607Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509888937716080040:2065];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:24:35.032625Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/00279d/r3tmp/tmp8iC1l1/pdisk_1.dat 2025-05-29T15:24:35.104909Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:24:35.105020Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [1:7509888937716080016:2079] 1748532275032406 != 1748532275032409 TServer::EnableGrpc on GrpcPort 11070, node 1 2025-05-29T15:24:35.122560Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:24:35.122571Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-05-29T15:24:35.122573Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-05-29T15:24:35.122623Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:4951 2025-05-29T15:24:35.173908Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:24:35.173945Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:24:35.175030Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:4951 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-29T15:24:35.196583Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:24:35.199836Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-05-29T15:24:35.206191Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:24:35.268779Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-05-29T15:24:35.292034Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 2025-05-29T15:24:35.359914Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:24:35.448600Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509888937716081648:2403], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:24:35.448636Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:24:35.505904Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-05-29T15:24:35.519821Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-05-29T15:24:35.531219Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-05-29T15:24:35.590214Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-05-29T15:24:35.615531Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-05-29T15:24:35.630913Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-05-29T15:24:35.639934Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-05-29T15:24:35.710976Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509888937716082308:2466], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:24:35.711013Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:24:35.711120Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509888937716082313:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:24:35.712150Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715669:3, at schemeshard: 72057594046644480 2025-05-29T15:24:35.714681Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976715669, at schemeshard: 72057594046644480 2025-05-29T15:24:35.714766Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7509888937716082315:2470], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715669 completed, doublechecking } 2025-05-29T15:24:35.798773Z node 1 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [1:7509888937716082367:3396] txid# 281474976715670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-29T15:24:35.907117Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [1:7509888937716082383:2474], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:24:35.907220Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=1&id=ZWQxODEyNmMtNjY3YTU3MTctNDY4YTA4YzItNDkzNmQ2NDg=, ActorId: [1:7509888937716081620:2399], ActorState: ExecuteState, TraceId: 01jweaaaft31476600vhrfjmzq, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: VERIFY failed (2025-05-29T15:24:35.910806Z): assertion failed in non-unittest thread with message: assertion failed at ydb/core/kqp/ut/common/kqp_ut_common.h:375, void NKikimr::NKqp::AssertSuccessResult(const NYdb::TStatus &): (result.IsSuccess())
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 library/cpp/testing/unittest/registar.cpp:36 RaiseError(): requirement UnittestThread failed 0. /-S/util/system/yassert.cpp:83: InternalPanicImpl @ 0x13A9DC85 1. /-S/util/system/yassert.cpp:55: Panic @ 0x13A94C86 2. /tmp//-S/library/cpp/testing/unittest/registar.cpp:36: RaiseError @ 0x13C36A16 3. /-S/ydb/core/kqp/ut/common/kqp_ut_common.h:375: AssertSuccessResult @ 0x260D85F2 4. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:365: CreateSampleTables @ 0x260D7EF2 5. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:581: operator() @ 0x260F9ACC 6. /-S/library/cpp/threading/future/core/future-inl.h:493: SetValue @ 0x260F9ACC 7. /-S/library/cpp/threading/future/async.h:24: operator() @ 0x260F9ACC 8. /-S/util/thread/pool.h:71: Process @ 0x260F9ACC 9. /-S/util/thread/pool.cpp:418: DoExecute @ 0x13AA5609 10. /-S/util/thread/factory.h:15: Execute @ 0x13AA3FF9 11. /-S/util/thread/factory.cpp:36: ThreadProc @ 0x13AA3FF9 12. /-S/util/system/thread.cpp:244: ThreadProxy @ 0x13A9F46C 13. ??:0: ?? @ 0x7F5C7ACE7AC2 14. ??:0: ?? @ 0x7F5C7AD7984F |66.0%| [TA] $(B)/ydb/core/tx/datashard/ut_kqp_errors/test-results/unittest/{meta.json ... results_accumulator.log} >> TOlapReboots::DropTableThenStore >> TSchemeShardServerLess::TestServerlessComputeResourcesMode [GOOD] >> TBsProxyFaultToleranceTest::CheckGetHardenedErasureMirror3dcCount6Idx4 >> TBsProxyFaultToleranceTest::CheckTDiscoverFaultToleranceTestErasureMirror3dc >> TBsProxyFaultToleranceTest::CheckTGetWithRecoverFaultToleranceTestErasureMirror3of4 >> DataShardSnapshots::VolatileSnapshotReadTable [FAIL] >> DataShardSnapshots::VolatileSnapshotRefreshDiscard >> TBsProxyFaultToleranceTest::CheckGetHardenedErasureBlock42Count6Idx5 >> TBsProxyFaultToleranceTest::CheckGetHardenedErasureMirror3dcCount6Idx5 >> DataShardSnapshots::MvccSnapshotLockedWritesRestart-UseSink [FAIL] >> DataShardSnapshots::MvccSnapshotLockedWritesWithoutConflicts+UseSink >> TConsoleConfigSubscriptionTests::TestNotificationForRestartedServer [GOOD] >> TConsoleConfigSubscriptionTests::TestAddSubscriptionIdempotency >> DataShardSnapshots::ShardRestartLockNotBrokenByUncommittedBeforeRead+UseSink [FAIL] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_serverless/unittest >> TSchemeShardServerLess::TestServerlessComputeResourcesMode [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:116:2058] recipient: [1:111:2141] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:116:2058] recipient: [1:111:2141] Leader for TabletID 72057594046678944 is [1:128:2152] sender: [1:132:2058] recipient: [1:111:2141] 2025-05-29T15:24:40.122138Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7488: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-05-29T15:24:40.122161Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7516: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:24:40.122165Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7402: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-05-29T15:24:40.122169Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7418: OperationsProcessing config: using default configuration 2025-05-29T15:24:40.122173Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-05-29T15:24:40.122176Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-05-29T15:24:40.122183Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7548: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:24:40.122194Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:39: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-05-29T15:24:40.122293Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7619: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-05-29T15:24:40.122360Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-05-29T15:24:40.135381Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7309: Cannot subscribe to console configs 2025-05-29T15:24:40.135409Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:24:40.138458Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-05-29T15:24:40.138604Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-05-29T15:24:40.138654Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-05-29T15:24:40.140443Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-05-29T15:24:40.140615Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:445: Clear TempDirsState with owners number: 0 2025-05-29T15:24:40.140761Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1348: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-05-29T15:24:40.140840Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:32: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-05-29T15:24:40.141351Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:157: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:24:40.141393Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:84: [RootDataErasureManager] Stop 2025-05-29T15:24:40.141699Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:24:40.141710Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:24:40.141730Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-05-29T15:24:40.141742Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-29T15:24:40.141749Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:202: TTxServerlessStorageBilling.Complete 2025-05-29T15:24:40.141789Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6671: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-05-29T15:24:40.143220Z node 1 :HIVE INFO: tablet_helpers.cpp:1130: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:128:2152] sender: [1:242:2058] recipient: [1:15:2062] 2025-05-29T15:24:40.162093Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:372: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-05-29T15:24:40.162175Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:24:40.162237Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-05-29T15:24:40.162274Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:130: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-05-29T15:24:40.162283Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:24:40.163093Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:451: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-05-29T15:24:40.163129Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-05-29T15:24:40.163213Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:24:40.163226Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:313: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-05-29T15:24:40.163233Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:367: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-05-29T15:24:40.163239Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 2 -> 3 2025-05-29T15:24:40.163708Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:24:40.163723Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-05-29T15:24:40.163729Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 3 -> 128 2025-05-29T15:24:40.164115Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:24:40.164126Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:24:40.164140Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:24:40.164148Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1650: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-05-29T15:24:40.164875Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1719: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-05-29T15:24:40.165288Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:652: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-05-29T15:24:40.165334Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1751: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-05-29T15:24:40.165537Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:676: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-05-29T15:24:40.165563Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:680: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 124 RawX2: 4294969445 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-05-29T15:24:40.165571Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:24:40.165648Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 128 -> 240 2025-05-29T15:24:40.165655Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:24:40.165690Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-05-29T15:24:40.165703Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:402: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-05-29T15:24:40.166099Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:24:40.166108Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-29T15:24:40.166169Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29 ... MESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72075186234409549 2025-05-29T15:24:40.263501Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:705:2606], at schemeshard: 72075186234409549, txId: 0, path id: 1 2025-05-29T15:24:40.263565Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:652: Send tablet strongly msg operationId: 106:0 from tablet: 72057594046678944 to tablet: 72075186233409546 cookie: 72057594046678944:3 msg type: 268697640 2025-05-29T15:24:40.263586Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1751: TOperation RegisterRelationByTabletId, TxId: 106, partId: 0, tablet: 72075186233409546 2025-05-29T15:24:40.263682Z node 1 :HIVE INFO: tablet_helpers.cpp:1453: [72075186233409546] TEvUpdateDomain, msg: DomainKey { SchemeShard: 72057594046678944 PathId: 3 } ServerlessComputeResourcesMode: EServerlessComputeResourcesModeShared TxId: 106 2025-05-29T15:24:40.263736Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5994: Update domain reply, message: Origin: 72075186233409546 TxId: 106, at schemeshard: 72057594046678944 2025-05-29T15:24:40.263742Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1764: TOperation FindRelatedPartByTabletId, TxId: 106, tablet: 72075186233409546, partId: 0 2025-05-29T15:24:40.263756Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:619: TTxOperationReply execute, operationId: 106:0, at schemeshard: 72057594046678944, message: Origin: 72075186233409546 TxId: 106 2025-05-29T15:24:40.263763Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_alter_extsubdomain.cpp:822: [72057594046678944] TSyncHive, operationId 106:0, HandleReply TEvUpdateDomainReply, from hive: 72075186233409546 2025-05-29T15:24:40.263769Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 106:0 138 -> 240 2025-05-29T15:24:40.263841Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:5834: Handle TEvUpdateAck, at schemeshard: 72075186234409549, msg: Owner: 72075186234409549 Generation: 2 LocalPathId: 1 Version: 6 PathOwnerId: 72075186234409549, cookie: 0 2025-05-29T15:24:40.264071Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 106 2025-05-29T15:24:40.264085Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__sync_update_tenants.cpp:36: TTxSyncTenant DoComplete, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], at schemeshard: 72057594046678944 2025-05-29T15:24:40.264289Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:647: TTxOperationReply complete, operationId: 106:0, at schemeshard: 72057594046678944 2025-05-29T15:24:40.264314Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 106:0, at schemeshard: 72057594046678944 2025-05-29T15:24:40.264321Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:482: [72057594046678944] TDone opId# 106:0 ProgressState 2025-05-29T15:24:40.264335Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:906: Part operation is done id#106:0 progress is 1/1 2025-05-29T15:24:40.264341Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 106 ready parts: 1/1 2025-05-29T15:24:40.264347Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:906: Part operation is done id#106:0 progress is 1/1 2025-05-29T15:24:40.264351Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 106 ready parts: 1/1 2025-05-29T15:24:40.264357Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1606: TOperation IsReadyToNotify, TxId: 106, ready parts: 1/1, is published: true 2025-05-29T15:24:40.264363Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 106 ready parts: 1/1 2025-05-29T15:24:40.264369Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:973: Operation and all the parts is done, operation id: 106:0 2025-05-29T15:24:40.264374Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5174: RemoveTx for txid 106:0 2025-05-29T15:24:40.264386Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 5 TestModificationResult got TxId: 106, wait until txId: 106 TestWaitNotification wait txId: 106 2025-05-29T15:24:40.264756Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:210: tests -- TTxNotificationSubscriber for txId 106: send EvNotifyTxCompletion 2025-05-29T15:24:40.264766Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:256: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 106 2025-05-29T15:24:40.264859Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 106, at schemeshard: 72057594046678944 2025-05-29T15:24:40.264875Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:227: tests -- TTxNotificationSubscriber for txId 106: got EvNotifyTxCompletionResult 2025-05-29T15:24:40.264881Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:236: tests -- TTxNotificationSubscriber for txId 106: satisfy waiter [1:852:2733] TestWaitNotification: OK eventTxId 106 2025-05-29T15:24:40.264979Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/ServerLess0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-05-29T15:24:40.265008Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/ServerLess0" took 38us result status StatusSuccess 2025-05-29T15:24:40.265096Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/ServerLess0" PathDescription { Self { Name: "ServerLess0" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeExtSubDomain CreateFinished: true CreateTxId: 103 CreateStep: 5000004 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 6 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 6 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 4 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 4 PlanResolution: 50 Coordinators: 72075186234409550 TimeCastBucketsPerMediator: 2 Mediators: 72075186234409551 SchemeShard: 72075186234409549 } DomainKey { SchemeShard: 72057594046678944 PathId: 3 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 3 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 2 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { } SharedHive: 72075186233409546 ServerlessComputeResourcesMode: EServerlessComputeResourcesModeShared } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-05-29T15:24:40.265170Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/ServerLess0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72075186234409549 2025-05-29T15:24:40.265185Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72075186234409549 describe path "/MyRoot/ServerLess0" took 16us result status StatusSuccess 2025-05-29T15:24:40.265231Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/ServerLess0" PathDescription { Self { Name: "MyRoot/ServerLess0" PathId: 1 SchemeshardId: 72075186234409549 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 6 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 6 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 4 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 3 ProcessingParams { Version: 4 PlanResolution: 50 Coordinators: 72075186234409550 TimeCastBucketsPerMediator: 2 Mediators: 72075186234409551 SchemeShard: 72075186234409549 } DomainKey { SchemeShard: 72057594046678944 PathId: 3 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 3 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 2 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/MyRoot/ServerLess0" } SharedHive: 72075186233409546 ServerlessComputeResourcesMode: EServerlessComputeResourcesModeShared } } PathId: 1 PathOwnerId: 72075186234409549, at schemeshard: 72075186234409549 2025-05-29T15:24:40.265296Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/ServerLess0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-05-29T15:24:40.265309Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/ServerLess0" took 16us result status StatusSuccess 2025-05-29T15:24:40.265350Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/ServerLess0" PathDescription { Self { Name: "ServerLess0" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeExtSubDomain CreateFinished: true CreateTxId: 103 CreateStep: 5000004 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 6 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 6 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 4 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 4 PlanResolution: 50 Coordinators: 72075186234409550 TimeCastBucketsPerMediator: 2 Mediators: 72075186234409551 SchemeShard: 72075186234409549 } DomainKey { SchemeShard: 72057594046678944 PathId: 3 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 3 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 2 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { } SharedHive: 72075186233409546 ServerlessComputeResourcesMode: EServerlessComputeResourcesModeShared } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-05-29T15:24:40.265423Z node 1 :HIVE INFO: tablet_helpers.cpp:1470: [72075186233409546] TEvRequestDomainInfo, 72057594046678944:3 >> DataShardSnapshots::ShardRestartLockNotBrokenByUncommittedBeforeRead-UseSink >> DataShardSnapshots::MvccSnapshotReadLockedWrites-UseSink [FAIL] >> DataShardSnapshots::ReadIteratorLocalSnapshotThenRestart >> TPQTest::TestSourceIdDropBySourceIdCount [GOOD] >> TPQTest::TestTimeRetention >> TConsoleTests::TestListTenantsExtSubdomain [GOOD] >> TNetClassifierUpdaterTest::TestFiltrationByNetboxCustomFieldsAndTags [GOOD] >> TConsoleTests::TestModifyUsedZoneKind >> TNetClassifierUpdaterTest::TestFiltrationByNetboxCustomFieldsOnly >> TBsProxyFaultToleranceTest::CheckTDiscoverFaultToleranceTestErasureMirror3of4 >> TBsProxyFaultToleranceTest::CheckTDiscoverFaultToleranceTestErasureMirror3dc [GOOD] >> TConsoleTests::TestCreateTenantWrongPoolExtSubdomain [GOOD] >> TConsoleTests::TestCreateTenantAlreadyExists >> TBsProxyFaultToleranceTest::CheckTDiscoverFaultToleranceTestErasureMirror3of4 [GOOD] >> DataShardSnapshots::LockedWriteDistributedCommitFreeze+UseSink [FAIL] >> DataShardSnapshots::LockedWriteDistributedCommitFreeze-UseSink >> DataShardSnapshots::LockedWriteDistributedCommitCrossConflict+UseSink [FAIL] >> DataShardSnapshots::LockedWriteCleanupOnSplit+UseSink >> TBsProxyFaultToleranceTest::CheckTPutFaultToleranceTestErasure4Plus2Block >> TBsProxyFaultToleranceTest::CheckTGetWithRecoverFaultToleranceTestErasureMirror3dc |66.0%| [TA] $(B)/ydb/core/tx/datashard/ut_read_iterator/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/batch_operations/unittest >> KqpBatchUpdate::Large_3 Test command err: Trying to start YDB, gRPC: 21207, MsgBus: 17132 2025-05-29T15:24:36.426637Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509888945660970377:2199];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:24:36.426898Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/002795/r3tmp/tmpMGjlAB/pdisk_1.dat 2025-05-29T15:24:36.503746Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:24:36.503900Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [1:7509888945660970217:2079] 1748532276425699 != 1748532276425702 TServer::EnableGrpc on GrpcPort 21207, node 1 2025-05-29T15:24:36.520829Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:24:36.520845Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-05-29T15:24:36.520847Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-05-29T15:24:36.520898Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:17132 TClient is connected to server localhost:17132 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-29T15:24:36.572432Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:24:36.572456Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:24:36.573405Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-29T15:24:36.578879Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:24:36.592090Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:24:36.612397Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:24:36.643989Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:24:36.710112Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:24:36.888125Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509888945660971855:2403], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:24:36.888168Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:24:36.937477Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-05-29T15:24:36.956238Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-05-29T15:24:37.016782Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-05-29T15:24:37.026527Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-05-29T15:24:37.039780Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-05-29T15:24:37.055242Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-05-29T15:24:37.069189Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-05-29T15:24:37.091886Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509888949955939805:2466], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:24:37.091918Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:24:37.092048Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509888949955939810:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:24:37.096404Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715669:3, at schemeshard: 72057594046644480 2025-05-29T15:24:37.099978Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7509888949955939812:2470], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715669 completed, doublechecking } 2025-05-29T15:24:37.195998Z node 1 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [1:7509888949955939863:3394] txid# 281474976715670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-29T15:24:37.315693Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [1:7509888949955939872:2474], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:24:37.318129Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=1&id=Y2RhZWE4Ny1lOWQ2MmE5OS04MWU2MjI0LTI2YmM1YmI2, ActorId: [1:7509888945660971828:2400], ActorState: ExecuteState, TraceId: 01jweaabv3bvaq1v9zjkr6td57, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: VERIFY failed (2025-05-29T15:24:37.319445Z): assertion failed in non-unittest thread with message: assertion failed at ydb/core/kqp/ut/common/kqp_ut_common.h:375, void NKikimr::NKqp::AssertSuccessResult(const NYdb::TStatus &): (result.IsSuccess())
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 library/cpp/testing/unittest/registar.cpp:36 RaiseError(): requirement UnittestThread failed 0. /-S/util/system/yassert.cpp:83: InternalPanicImpl @ 0x13A9DC85 1. /-S/util/system/yassert.cpp:55: Panic @ 0x13A94C86 2. /tmp//-S/library/cpp/testing/unittest/registar.cpp:36: RaiseError @ 0x13C36A16 3. /-S/ydb/core/kqp/ut/common/kqp_ut_common.h:375: AssertSuccessResult @ 0x260D85F2 4. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:365: CreateSampleTables @ 0x260D7EF2 5. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:581: operator() @ 0x260F9ACC 6. /-S/library/cpp/threading/future/core/future-inl.h:493: SetValue @ 0x260F9ACC 7. /-S/library/cpp/threading/future/async.h:24: operator() @ 0x260F9ACC 8. /-S/util/thread/pool.h:71: Process @ 0x260F9ACC 9. /-S/util/thread/pool.cpp:418: DoExecute @ 0x13AA5609 10. /-S/util/thread/factory.h:15: Execute @ 0x13AA3FF9 11. /-S/util/thread/factory.cpp:36: ThreadProc @ 0x13AA3FF9 12. /-S/util/system/thread.cpp:244: ThreadProxy @ 0x13A9F46C 13. ??:0: ?? @ 0x7F527A4BCAC2 14. ??:0: ?? @ 0x7F527A54E84F >> TCmsTenatsTest::TestClusterLimitForceRestartMode [GOOD] |66.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/dsproxy/ut_ftol/unittest >> TBsProxyFaultToleranceTest::CheckTDiscoverFaultToleranceTestErasureMirror3of4 [GOOD] |66.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/dsproxy/ut_ftol/unittest >> TBsProxyFaultToleranceTest::CheckTDiscoverFaultToleranceTestErasureMirror3dc [GOOD] >> DataShardSnapshots::ShardRestartLockNotBrokenByUncommittedBeforeRead-UseSink [FAIL] >> DataShardSnapshots::ShardRestartLockBrokenByUncommittedBeforeRead+UseSink ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/cms/ut/unittest >> TCmsTenatsTest::TestClusterLimitForceRestartMode [GOOD] Test command err: 2025-05-29T15:24:30.283443Z node 1 :CMS DEBUG: console__init_scheme.cpp:14: TConsole::TTxInitScheme Execute 2025-05-29T15:24:30.284660Z node 1 :CMS DEBUG: cms_impl.h:185: StateInit event type: 10060000 event: NKikimr::TEvTablet::TEvBoot 2025-05-29T15:24:30.287210Z node 1 :CMS DEBUG: console__init_scheme.cpp:23: TConsole::TTxInitScheme Complete 2025-05-29T15:24:30.287252Z node 1 :CMS DEBUG: console__load_state.cpp:28: TConsole::TTxLoadState Execute 2025-05-29T15:24:30.287301Z node 1 :CMS DEBUG: console__load_state.cpp:50: Using default config. 2025-05-29T15:24:30.287393Z node 1 :CMS DEBUG: console__load_state.cpp:66: TConsole::TTxLoadState Complete 2025-05-29T15:24:30.287778Z node 1 :CMS DEBUG: cms_impl.h:185: StateInit event type: 10060001 event: NKikimr::TEvTablet::TEvRestored 2025-05-29T15:24:30.287848Z node 1 :CMS DEBUG: cms_tx_init_scheme.cpp:16: TTxInitScheme Execute 2025-05-29T15:24:30.288280Z node 1 :CMS DEBUG: cms_impl.h:185: StateInit event type: 1006000c event: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-05-29T15:24:30.288345Z node 1 :CMS DEBUG: cms_impl.h:185: StateInit event type: 104d0001 event: NKikimr::NConsole::TEvConfigsDispatcher::TEvSetConfigSubscriptionResponse 2025-05-29T15:24:30.289792Z node 1 :CMS DEBUG: cms_tx_init_scheme.cpp:24: TTxInitScheme Complete 2025-05-29T15:24:30.289817Z node 1 :CMS DEBUG: cms_tx_load_state.cpp:33: TTxLoadState Execute 2025-05-29T15:24:30.289851Z node 1 :CMS DEBUG: cms_tx_load_state.cpp:76: Using default config 2025-05-29T15:24:30.289928Z node 1 :CMS DEBUG: cms.cpp:1147: Running CleanupWalleTasks 2025-05-29T15:24:30.330032Z node 1 :CMS DEBUG: cms_impl.h:185: StateInit event type: 104a0012 event: NKikimr::NConsole::TEvConsole::TEvConfigNotificationRequest { Config { FeatureFlags { EnableCMSRequestPriorities: true EnableSingleCompositeActionGroup: true } } ItemKinds: 25 ItemKinds: 26 Local: true } 2025-05-29T15:24:30.353928Z node 1 :CMS DEBUG: cms_tx_load_state.cpp:256: TTxLoadState Complete 2025-05-29T15:24:30.354077Z node 1 :CMS DEBUG: cms_tx_update_config.cpp:23: TTxUpdateConfig Execute 2025-05-29T15:24:30.355769Z node 1 :CMS DEBUG: cms_tx_update_config.cpp:37: TTxUpdateConfig Complete 2025-05-29T15:24:30.355936Z node 1 :CMS DEBUG: sentinel.cpp:939: [Sentinel] [Main] UpdateConfig 2025-05-29T15:24:30.355944Z node 1 :CMS DEBUG: sentinel.cpp:884: [Sentinel] [Main] Start ConfigUpdater 2025-05-29T15:24:30.355953Z node 1 :CMS DEBUG: sentinel.cpp:955: [Sentinel] [Main] UpdateState 2025-05-29T15:24:30.355958Z node 1 :CMS INFO: sentinel.cpp:879: [Sentinel] [Main] StateUpdater was delayed 2025-05-29T15:24:30.355976Z node 1 :CMS DEBUG: cms_tx_update_config.cpp:23: TTxUpdateConfig Execute 2025-05-29T15:24:30.356031Z node 1 :CMS DEBUG: sentinel.cpp:464: [Sentinel] [ConfigUpdater] Request blobstorage config: attempt# 0 2025-05-29T15:24:30.356071Z node 1 :CMS DEBUG: sentinel.cpp:477: [Sentinel] [ConfigUpdater] Request CMS cluster state: attempt# 0 2025-05-29T15:24:30.358397Z node 1 :CMS DEBUG: sentinel.cpp:530: [Sentinel] [ConfigUpdater] Handle TEvBlobStorage::TEvControllerConfigResponse: response# Status { Success: true BaseConfig { } } Success: true 2025-05-29T15:24:30.394013Z node 1 :CMS DEBUG: cms_tx_update_config.cpp:37: TTxUpdateConfig Complete 2025-05-29T15:24:30.394088Z node 1 :CMS DEBUG: cms_tx_update_config.cpp:44: Updated config: TenantLimits { DisabledNodesRatioLimit: 0 } ClusterLimits { DisabledNodesRatioLimit: 0 } SentinelConfig { Enable: false } 2025-05-29T15:24:30.394456Z node 1 :CMS NOTICE: audit_log.cpp:12: [AuditLog] [CMS tablet] Reply: request# NKikimr::NCms::TEvCms::TEvGetConfigRequest { }, response# NKikimr::NCms::TEvCms::TEvGetConfigResponse { Status { Code: OK } Config { DefaultRetryTime: 300000000 DefaultPermissionDuration: 300000000 TenantLimits { DisabledNodesRatioLimit: 0 } ClusterLimits { DisabledNodesRatioLimit: 0 } InfoCollectionTimeout: 15000000 LogConfig { DefaultLevel: ENABLED TTL: 1209600000000 } SentinelConfig { Enable: false UpdateConfigInterval: 3600000000 RetryUpdateConfig: 60000000 UpdateStateInterval: 60000000 UpdateStateTimeout: 45000000 RetryChangeStatus: 10000000 ChangeStatusRetries: 5 DefaultStateLimit: 60 DataCenterRatio: 50 RoomRatio: 70 RackRatio: 90 DryRun: false EvictVDisksStatus: FAULTY GoodStateLimit: 5 FaultyPDisksThresholdPerNode: 0 } } } 2025-05-29T15:24:30.394519Z node 1 :CMS DEBUG: cms_tx_update_config.cpp:23: TTxUpdateConfig Execute 2025-05-29T15:24:30.451163Z node 1 :CMS DEBUG: cms_tx_update_downtimes.cpp:17: TTxUpdateDowntimes Execute 2025-05-29T15:24:30.451291Z node 1 :CMS DEBUG: cluster_info.cpp:968: Timestamp: 1970-01-01T00:02:00Z 2025-05-29T15:24:30.451464Z node 1 :CMS NOTICE: audit_log.cpp:12: [AuditLog] [CMS tablet] Reply: request# NKikimr::NCms::TEvCms::TEvClusterStateRequest { }, response# NKikimr::NCms::TEvCms::TEvClusterStateResponse { Status { Code: OK } State { Hosts { Name: "::1" State: UP Services { Name: "dynnode" State: UP Version: "-1" Timestamp: 120028512 } Timestamp: 120028512 NodeId: 1 InterconnectPort: 12001 Location { DataCenter: "1" Module: "1" Rack: "1" Unit: "1" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "dynnode" State: UP Version: "-1" Timestamp: 120028512 } Timestamp: 120028512 NodeId: 2 InterconnectPort: 12002 Location { DataCenter: "1" Module: "2" Rack: "2" Unit: "2" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "dynnode" State: UP Version: "-1" Timestamp: 120028512 } Timestamp: 120028512 NodeId: 3 InterconnectPort: 12003 Location { DataCenter: "1" Module: "3" Rack: "3" Unit: "3" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "dynnode" State: UP Version: "-1" Timestamp: 120028512 } Timestamp: 120028512 NodeId: 4 InterconnectPort: 12004 Location { DataCenter: "1" Module: "4" Rack: "4" Unit: "4" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "dynnode" State: UP Version: "-1" Timestamp: 120028512 } Timestamp: 120028512 NodeId: 5 InterconnectPort: 12005 Location { DataCenter: "1" Module: "5" Rack: "5" Unit: "5" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "dynnode" State: UP Version: "-1" Timestamp: 120028512 } Timestamp: 120028512 NodeId: 6 InterconnectPort: 12006 Location { DataCenter: "1" Module: "6" Rack: "6" Unit: "6" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "dynnode" State: UP Version: "-1" Timestamp: 120028512 } Timestamp: 120028512 NodeId: 7 InterconnectPort: 12007 Location { DataCenter: "1" Module: "7" Rack: "7" Unit: "7" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "dynnode" State: UP Version: "-1" Timestamp: 120028512 } Timestamp: 120028512 NodeId: 8 InterconnectPort: 12008 Location { DataCenter: "1" Module: "8" Rack: "8" Unit: "8" } StartTimeSeconds: 0 } Timestamp: 120028512 } } 2025-05-29T15:24:30.487537Z node 1 :CMS DEBUG: cms.cpp:1147: Running CleanupWalleTasks 2025-05-29T15:24:30.538425Z node 1 :CMS DEBUG: cms_tx_update_config.cpp:37: TTxUpdateConfig Complete 2025-05-29T15:24:30.538518Z node 1 :CMS DEBUG: cms_tx_update_config.cpp:44: Updated config: DefaultRetryTime: 300000000 DefaultPermissionDuration: 300000000 TenantLimits { DisabledNodesLimit: 0 DisabledNodesRatioLimit: 0 } ClusterLimits { DisabledNodesLimit: 2 DisabledNodesRatioLimit: 0 } InfoCollectionTimeout: 15000000 LogConfig { DefaultLevel: ENABLED TTL: 1209600000000 } SentinelConfig { Enable: false UpdateConfigInterval: 3600000000 RetryUpdateConfig: 60000000 UpdateStateInterval: 60000000 UpdateStateTimeout: 45000000 RetryChangeStatus: 10000000 ChangeStatusRetries: 5 DefaultStateLimit: 60 DataCenterRatio: 50 RoomRatio: 70 RackRatio: 90 DryRun: false EvictVDisksStatus: FAULTY GoodStateLimit: 5 FaultyPDisksThresholdPerNode: 0 } 2025-05-29T15:24:30.538534Z node 1 :CMS DEBUG: cms_tx_update_downtimes.cpp:26: TTxUpdateDowntimes Complete 2025-05-29T15:24:30.579710Z node 1 :CMS DEBUG: cms_tx_update_downtimes.cpp:17: TTxUpdateDowntimes Execute 2025-05-29T15:24:30.579739Z node 1 :CMS DEBUG: cms_tx_update_downtimes.cpp:26: TTxUpdateDowntimes Complete 2025-05-29T15:24:30.579752Z node 1 :CMS DEBUG: cluster_info.cpp:968: Timestamp: 1970-01-01T00:02:00Z 2025-05-29T15:24:30.579794Z node 1 :CMS INFO: cms.cpp:347: Check request: User: "user" Actions { Type: SHUTDOWN_HOST Host: "1" Duration: 60000000 } PartialPermissionAllowed: false Schedule: false DryRun: false TenantPolicy: NONE AvailabilityMode: MODE_MAX_AVAILABILITY EvictVDisks: false 2025-05-29T15:24:30.579803Z node 1 :CMS DEBUG: cms.cpp:379: Checking action: Type: SHUTDOWN_HOST Host: "1" Duration: 60000000 2025-05-29T15:24:30.579811Z node 1 :CMS DEBUG: node_checkers.cpp:101: [Nodes Counter] Checking Node: 1, with state: Up, with limit: 2, with ratio limit: 0, locked nodes: 0, down nodes: 0 2025-05-29T15:24:30.579820Z node 1 :CMS DEBUG: cms.cpp:729: Ring: 0; State: Ok 2025-05-29T15:24:30.579823Z node 1 :CMS DEBUG: cms.cpp:729: Ring: 1; State: Ok 2025-05-29T15:24:30.579826Z node 1 :CMS DEBUG: cms.cpp:729: Ring: 2; State: Ok 2025-05-29T15:24:30.579831Z node 1 :CMS DEBUG: cms.cpp:387: Result: ALLOW 2025-05-29T15:24:30.579846Z node 1 :CMS DEBUG: cms.cpp:1036: Accepting permission: id# user-p-1, requestId# user-r-1, owner# user 2025-05-29T15:24:30.579855Z node 1 :CMS INFO: cluster_info.cpp:777: Adding lock for Host ::1:12001 (1) (permission user-p-1 until 1970-01-01T00:03:00Z) 2025-05-29T15:24:30.579864Z node 1 :CMS DEBUG: cms_tx_store_permissions.cpp:26: TTxStorePermissions Execute 2025-05-29T15:24:30.579892Z node 1 :CMS NOTICE: audit_log.cpp:12: [AuditLog] [CMS tablet] Store permission: id# user-p-1, validity# 1970-01-01T00:03:00.129512Z, action# Type: SHUTDOWN_HOST Host: "1" Duration: 60000000 2025-05-29T15:24:30.596818Z node 1 :CMS DEBUG: cms_tx_store_permissions.cpp:137: TTxStorePermissions complete 2025-05-29T15:24:30.596937Z node 1 :CMS NOTICE: audit_log.cpp:12: [AuditLog] [CMS tablet] Reply: request# NKikimr::NCms::TEvCms::TEvPermissionRequest { User: "user" Actions { Type: SHUTDOWN_HOST Host: "1" Duration: 60000000 } PartialPermissionAllowed: false Schedule: false DryRun: false TenantPolicy: NONE AvailabilityMode: MODE_MAX_AVAILABILITY EvictVDisks: false }, response# NKikimr::NCms::TEvCms::TEvPermissionResponse { Status { Code: ALLOW } RequestId: "user-r-1" Permissions { Id: "user-p-1" Action { Type: SHUTDOWN_HOST Host: "1" Duration: 60000000 } Deadline: 180129512 Extentions { Type: HostInfo Hosts { Name: "::1" State: UP NodeId: 1 InterconnectPort: 12001 } } } } 2025-05-29T15:24:30.596950Z node 1 :CMS DEBUG: cms.cpp:1064: Schedule cleanup at 1970-01-01T00:05:00.129512Z 2025-05-29T15:24:30.627863Z node 1 :CMS INFO: cluster_info.cpp:777: Adding lock for Host ::1:12001 (1) (permission user-p-1 until 1970-01-01T00:03:00Z) 2025-05-29T15:24:30.627933Z node 1 :CMS DEBUG: cms_tx_update_downtimes.cpp:17: TTxUpdateDowntimes Execute 2025-05-29T15:24:30.627956Z node 1 :CMS DEBUG: cms_tx_update_downtimes.cpp:26: TTxUpdateDowntimes Complete 2025-05-29T15:24:30.627971Z node 1 :CMS DEBUG: cluster_info.cpp:968: Timestamp: 1970-01-01T00:02:00Z 2025-05-29T15:24:30.628039Z node 1 :CMS INFO: cms.cpp:347: Check request: User: "user" Actions { Type: SHUTDOWN_HOST Host: "2" Duration: 60000000 } PartialPermissionAllowed: false Schedule: false DryRun: false TenantPolicy: NONE AvailabilityMode: MODE_MAX_AVAILABILITY EvictVDisks: false 2025-05-29T15:24:30.628048Z node 1 :CMS DEBUG: cms.cpp:379: Checking action: Type: SHUTDOWN_HOST Host: "2" Duration: 60000000 2025-05-29T15:24:30.628062Z node 1 :CMS DEBUG: node_checkers.cpp:101: [Nodes Counter] Checking Node: 2, with state: Up, with limit: 2, with ratio limit: 0, locked nodes: 1, down nodes: 0 2025-05-29T15:24:30.628071Z node 1 :CMS DEBUG: cms.cpp:387: Result: ALLOW 2025-05-29T15:24:30.628089Z node 1 :CMS DEBUG: cms.cpp:1036: Accepting permission: id# user-p-2, requestId# user-r-2, owner# user 2025-05-29T15:24:30.628097Z node 1 :CMS INFO: cluster_info.cpp:777: Adding lock for Host ::1:12002 (2) (permission user-p-2 until 1970 ... 15:24:40.067806Z node 41 :CMS DEBUG: cms_tx_store_permissions.cpp:26: TTxStorePermissions Execute 2025-05-29T15:24:40.079337Z node 41 :CMS DEBUG: cms_tx_store_permissions.cpp:137: TTxStorePermissions complete 2025-05-29T15:24:40.079437Z node 41 :CMS NOTICE: audit_log.cpp:12: [AuditLog] [CMS tablet] Reply: request# NKikimr::NCms::TEvCms::TEvPermissionRequest { User: "user" Actions { Type: SHUTDOWN_HOST Host: "42" Duration: 60000000 } PartialPermissionAllowed: false Schedule: false DryRun: false AvailabilityMode: MODE_MAX_AVAILABILITY EvictVDisks: false }, response# NKikimr::NCms::TEvCms::TEvPermissionResponse { Status { Code: DISALLOW_TEMP Reason: "Cannot lock node \'42\': too many unavailable nodes. Locked: 1, down: 0, limit: 1" } RequestId: "user-r-2" Deadline: 420230512 } 2025-05-29T15:24:40.165592Z node 41 :CMS INFO: cluster_info.cpp:777: Adding lock for Host ::1:12001 (41) (permission user-p-1 until 1970-01-01T00:03:00Z) 2025-05-29T15:24:40.165657Z node 41 :CMS DEBUG: cms_tx_update_downtimes.cpp:17: TTxUpdateDowntimes Execute 2025-05-29T15:24:40.165678Z node 41 :CMS DEBUG: cms_tx_update_downtimes.cpp:26: TTxUpdateDowntimes Complete 2025-05-29T15:24:40.165691Z node 41 :CMS DEBUG: cluster_info.cpp:968: Timestamp: 1970-01-01T00:02:00Z 2025-05-29T15:24:40.165746Z node 41 :CMS INFO: cms.cpp:347: Check request: User: "user" Actions { Type: SHUTDOWN_HOST Host: "42" Duration: 60000000 } PartialPermissionAllowed: false Schedule: false DryRun: false AvailabilityMode: MODE_KEEP_AVAILABLE EvictVDisks: false 2025-05-29T15:24:40.165755Z node 41 :CMS DEBUG: cms.cpp:379: Checking action: Type: SHUTDOWN_HOST Host: "42" Duration: 60000000 2025-05-29T15:24:40.165765Z node 41 :CMS DEBUG: node_checkers.cpp:101: [Nodes Counter] Checking Node: 42, with state: Up, with limit: 1, with ratio limit: 0, locked nodes: 1, down nodes: 0 2025-05-29T15:24:40.165775Z node 41 :CMS DEBUG: cms.cpp:398: Result: DISALLOW_TEMP (reason: Cannot lock node '42': too many unavailable nodes. Locked: 1, down: 0, limit: 1) 2025-05-29T15:24:40.165787Z node 41 :CMS DEBUG: cms_tx_store_permissions.cpp:26: TTxStorePermissions Execute 2025-05-29T15:24:40.183224Z node 41 :CMS DEBUG: cms_tx_store_permissions.cpp:137: TTxStorePermissions complete 2025-05-29T15:24:40.183324Z node 41 :CMS NOTICE: audit_log.cpp:12: [AuditLog] [CMS tablet] Reply: request# NKikimr::NCms::TEvCms::TEvPermissionRequest { User: "user" Actions { Type: SHUTDOWN_HOST Host: "42" Duration: 60000000 } PartialPermissionAllowed: false Schedule: false DryRun: false AvailabilityMode: MODE_KEEP_AVAILABLE EvictVDisks: false }, response# NKikimr::NCms::TEvCms::TEvPermissionResponse { Status { Code: DISALLOW_TEMP Reason: "Cannot lock node \'42\': too many unavailable nodes. Locked: 1, down: 0, limit: 1" } RequestId: "user-r-3" Deadline: 420332024 } 2025-05-29T15:24:40.194822Z node 41 :CMS INFO: cluster_info.cpp:777: Adding lock for Host ::1:12001 (41) (permission user-p-1 until 1970-01-01T00:03:00Z) 2025-05-29T15:24:40.194890Z node 41 :CMS DEBUG: cms_tx_update_downtimes.cpp:17: TTxUpdateDowntimes Execute 2025-05-29T15:24:40.194914Z node 41 :CMS DEBUG: cms_tx_update_downtimes.cpp:26: TTxUpdateDowntimes Complete 2025-05-29T15:24:40.194927Z node 41 :CMS DEBUG: cluster_info.cpp:968: Timestamp: 1970-01-01T00:02:00Z 2025-05-29T15:24:40.194979Z node 41 :CMS INFO: cms.cpp:347: Check request: User: "user" Actions { Type: SHUTDOWN_HOST Host: "42" Duration: 60000000 } PartialPermissionAllowed: false Schedule: false DryRun: false AvailabilityMode: MODE_FORCE_RESTART EvictVDisks: false 2025-05-29T15:24:40.194988Z node 41 :CMS DEBUG: cms.cpp:379: Checking action: Type: SHUTDOWN_HOST Host: "42" Duration: 60000000 2025-05-29T15:24:40.194998Z node 41 :CMS DEBUG: node_checkers.cpp:101: [Nodes Counter] Checking Node: 42, with state: Up, with limit: 1, with ratio limit: 0, locked nodes: 1, down nodes: 0 2025-05-29T15:24:40.195008Z node 41 :CMS DEBUG: cms.cpp:398: Result: DISALLOW_TEMP (reason: Cannot lock node '42': too many unavailable nodes. Locked: 1, down: 0, limit: 1) 2025-05-29T15:24:40.195021Z node 41 :CMS DEBUG: cms_tx_store_permissions.cpp:26: TTxStorePermissions Execute 2025-05-29T15:24:40.207107Z node 41 :CMS DEBUG: cms_tx_store_permissions.cpp:137: TTxStorePermissions complete 2025-05-29T15:24:40.207210Z node 41 :CMS NOTICE: audit_log.cpp:12: [AuditLog] [CMS tablet] Reply: request# NKikimr::NCms::TEvCms::TEvPermissionRequest { User: "user" Actions { Type: SHUTDOWN_HOST Host: "42" Duration: 60000000 } PartialPermissionAllowed: false Schedule: false DryRun: false AvailabilityMode: MODE_FORCE_RESTART EvictVDisks: false }, response# NKikimr::NCms::TEvCms::TEvPermissionResponse { Status { Code: DISALLOW_TEMP Reason: "Cannot lock node \'42\': too many unavailable nodes. Locked: 1, down: 0, limit: 1" } RequestId: "user-r-4" Deadline: 420433536 } 2025-05-29T15:24:40.207347Z node 41 :CMS INFO: cms.cpp:1326: User user is done with permissions user-p-1 2025-05-29T15:24:40.207357Z node 41 :CMS DEBUG: cms.cpp:1349: Resulting status: OK 2025-05-29T15:24:40.207372Z node 41 :CMS DEBUG: cms_tx_remove_permissions.cpp:28: TTxRemovePermissions Execute 2025-05-29T15:24:40.207398Z node 41 :CMS NOTICE: audit_log.cpp:12: [AuditLog] [CMS tablet] Remove permission: id# user-p-1, reason# explicit remove 2025-05-29T15:24:40.221363Z node 41 :CMS DEBUG: cms_tx_remove_permissions.cpp:79: TTxRemovePermissions Complete 2025-05-29T15:24:40.221437Z node 41 :CMS NOTICE: audit_log.cpp:12: [AuditLog] [CMS tablet] Reply: request# NKikimr::NCms::TEvCms::TEvManagePermissionRequest { User: "user" Command: DONE Permissions: "user-p-1" DryRun: false }, response# NKikimr::NCms::TEvCms::TEvManagePermissionResponse { Status { Code: OK } } 2025-05-29T15:24:40.232814Z node 41 :CMS DEBUG: cms_tx_update_downtimes.cpp:17: TTxUpdateDowntimes Execute 2025-05-29T15:24:40.232931Z node 41 :CMS DEBUG: cluster_info.cpp:968: Timestamp: 1970-01-01T00:02:00Z 2025-05-29T15:24:40.233006Z node 41 :CMS INFO: cms.cpp:347: Check request: User: "user" Actions { Type: SHUTDOWN_HOST Host: "42" Duration: 60000000 } PartialPermissionAllowed: false Schedule: false DryRun: false AvailabilityMode: MODE_MAX_AVAILABILITY EvictVDisks: false 2025-05-29T15:24:40.233022Z node 41 :CMS DEBUG: cms.cpp:379: Checking action: Type: SHUTDOWN_HOST Host: "42" Duration: 60000000 2025-05-29T15:24:40.233048Z node 41 :CMS DEBUG: node_checkers.cpp:101: [Nodes Counter] Checking Node: 42, with state: Up, with limit: 1, with ratio limit: 0, locked nodes: 0, down nodes: 1 2025-05-29T15:24:40.233063Z node 41 :CMS DEBUG: cms.cpp:398: Result: DISALLOW_TEMP (reason: Cannot lock node '42': too many unavailable nodes. Locked: 0, down: 1, limit: 1) 2025-05-29T15:24:40.233083Z node 41 :CMS DEBUG: cms_tx_store_permissions.cpp:26: TTxStorePermissions Execute 2025-05-29T15:24:40.251065Z node 41 :CMS DEBUG: cms_tx_update_downtimes.cpp:26: TTxUpdateDowntimes Complete 2025-05-29T15:24:40.251096Z node 41 :CMS DEBUG: cms_tx_store_permissions.cpp:137: TTxStorePermissions complete 2025-05-29T15:24:40.251166Z node 41 :CMS NOTICE: audit_log.cpp:12: [AuditLog] [CMS tablet] Reply: request# NKikimr::NCms::TEvCms::TEvPermissionRequest { User: "user" Actions { Type: SHUTDOWN_HOST Host: "42" Duration: 60000000 } PartialPermissionAllowed: false Schedule: false DryRun: false AvailabilityMode: MODE_MAX_AVAILABILITY EvictVDisks: false }, response# NKikimr::NCms::TEvCms::TEvPermissionResponse { Status { Code: DISALLOW_TEMP Reason: "Cannot lock node \'42\': too many unavailable nodes. Locked: 0, down: 1, limit: 1" } RequestId: "user-r-5" Deadline: 420536560 } 2025-05-29T15:24:40.274281Z node 41 :CMS DEBUG: cms_tx_update_downtimes.cpp:17: TTxUpdateDowntimes Execute 2025-05-29T15:24:40.274374Z node 41 :CMS DEBUG: cluster_info.cpp:968: Timestamp: 1970-01-01T00:02:00Z 2025-05-29T15:24:40.274431Z node 41 :CMS INFO: cms.cpp:347: Check request: User: "user" Actions { Type: SHUTDOWN_HOST Host: "42" Duration: 60000000 } PartialPermissionAllowed: false Schedule: false DryRun: false AvailabilityMode: MODE_KEEP_AVAILABLE EvictVDisks: false 2025-05-29T15:24:40.274442Z node 41 :CMS DEBUG: cms.cpp:379: Checking action: Type: SHUTDOWN_HOST Host: "42" Duration: 60000000 2025-05-29T15:24:40.274454Z node 41 :CMS DEBUG: node_checkers.cpp:101: [Nodes Counter] Checking Node: 42, with state: Up, with limit: 1, with ratio limit: 0, locked nodes: 0, down nodes: 1 2025-05-29T15:24:40.274466Z node 41 :CMS DEBUG: cms.cpp:398: Result: DISALLOW_TEMP (reason: Cannot lock node '42': too many unavailable nodes. Locked: 0, down: 1, limit: 1) 2025-05-29T15:24:40.274480Z node 41 :CMS DEBUG: cms_tx_store_permissions.cpp:26: TTxStorePermissions Execute 2025-05-29T15:24:40.291625Z node 41 :CMS DEBUG: cms_tx_update_downtimes.cpp:26: TTxUpdateDowntimes Complete 2025-05-29T15:24:40.291655Z node 41 :CMS DEBUG: cms_tx_store_permissions.cpp:137: TTxStorePermissions complete 2025-05-29T15:24:40.291722Z node 41 :CMS NOTICE: audit_log.cpp:12: [AuditLog] [CMS tablet] Reply: request# NKikimr::NCms::TEvCms::TEvPermissionRequest { User: "user" Actions { Type: SHUTDOWN_HOST Host: "42" Duration: 60000000 } PartialPermissionAllowed: false Schedule: false DryRun: false AvailabilityMode: MODE_KEEP_AVAILABLE EvictVDisks: false }, response# NKikimr::NCms::TEvCms::TEvPermissionResponse { Status { Code: DISALLOW_TEMP Reason: "Cannot lock node \'42\': too many unavailable nodes. Locked: 0, down: 1, limit: 1" } RequestId: "user-r-6" Deadline: 420638072 } 2025-05-29T15:24:40.373205Z node 41 :CMS DEBUG: cms_tx_update_downtimes.cpp:17: TTxUpdateDowntimes Execute 2025-05-29T15:24:40.373296Z node 41 :CMS DEBUG: cluster_info.cpp:968: Timestamp: 1970-01-01T00:02:00Z 2025-05-29T15:24:40.373349Z node 41 :CMS INFO: cms.cpp:347: Check request: User: "user" Actions { Type: SHUTDOWN_HOST Host: "42" Duration: 60000000 } PartialPermissionAllowed: false Schedule: false DryRun: false AvailabilityMode: MODE_FORCE_RESTART EvictVDisks: false 2025-05-29T15:24:40.373360Z node 41 :CMS DEBUG: cms.cpp:379: Checking action: Type: SHUTDOWN_HOST Host: "42" Duration: 60000000 2025-05-29T15:24:40.373371Z node 41 :CMS DEBUG: node_checkers.cpp:101: [Nodes Counter] Checking Node: 42, with state: Up, with limit: 1, with ratio limit: 0, locked nodes: 0, down nodes: 1 2025-05-29T15:24:40.373376Z node 41 :CMS DEBUG: node_checkers.cpp:101: [Nodes Counter] Checking Node: 42, with state: Up, with limit: 0, with ratio limit: 0, locked nodes: 0, down nodes: 1 2025-05-29T15:24:40.373382Z node 41 :CMS DEBUG: cms.cpp:387: Result: ALLOW 2025-05-29T15:24:40.373398Z node 41 :CMS DEBUG: cms.cpp:1036: Accepting permission: id# user-p-2, requestId# user-r-7, owner# user 2025-05-29T15:24:40.373405Z node 41 :CMS INFO: cluster_info.cpp:777: Adding lock for Host ::1:12002 (42) (permission user-p-2 until 1970-01-01T00:03:00Z) 2025-05-29T15:24:40.373415Z node 41 :CMS DEBUG: cms_tx_store_permissions.cpp:26: TTxStorePermissions Execute 2025-05-29T15:24:40.373442Z node 41 :CMS NOTICE: audit_log.cpp:12: [AuditLog] [CMS tablet] Store permission: id# user-p-2, validity# 1970-01-01T00:03:00.739584Z, action# Type: SHUTDOWN_HOST Host: "42" Duration: 60000000 2025-05-29T15:24:40.387214Z node 41 :CMS DEBUG: cms_tx_update_downtimes.cpp:26: TTxUpdateDowntimes Complete 2025-05-29T15:24:40.387247Z node 41 :CMS DEBUG: cms_tx_store_permissions.cpp:137: TTxStorePermissions complete 2025-05-29T15:24:40.387347Z node 41 :CMS NOTICE: audit_log.cpp:12: [AuditLog] [CMS tablet] Reply: request# NKikimr::NCms::TEvCms::TEvPermissionRequest { User: "user" Actions { Type: SHUTDOWN_HOST Host: "42" Duration: 60000000 } PartialPermissionAllowed: false Schedule: false DryRun: false AvailabilityMode: MODE_FORCE_RESTART EvictVDisks: false }, response# NKikimr::NCms::TEvCms::TEvPermissionResponse { Status { Code: ALLOW } RequestId: "user-r-7" Permissions { Id: "user-p-2" Action { Type: SHUTDOWN_HOST Host: "42" Duration: 60000000 } Deadline: 180739584 Extentions { Type: HostInfo Hosts { Name: "::1" State: UP NodeId: 42 InterconnectPort: 12002 } } } } >> TBsProxyFaultToleranceTest::CheckGetHardenedErasureBlock42Count6Idx3 >> DataShardSnapshots::MvccSnapshotLockedWritesWithoutConflicts+UseSink [FAIL] >> DataShardSnapshots::MvccSnapshotLockedWritesWithConflicts+UseSink >> DataShardSnapshots::VolatileSnapshotRefreshDiscard [FAIL] >> DataShardSnapshots::VolatileSnapshotTimeout >> TConsoleTests::TestAlterTenantModifyStorageResourcesForRunningExtSubdomain [GOOD] >> TConsoleTests::TestAlterUnknownTenant >> TConsoleConfigSubscriptionTests::TestAddSubscriptionIdempotency [GOOD] >> TConsoleConfigSubscriptionTests::TestConfigNotificationRetries >> DataShardSnapshots::ReadIteratorLocalSnapshotThenRestart [FAIL] >> DataShardSnapshots::ReadIteratorLocalSnapshotThenWrite >> TBsProxyFaultToleranceTest::CheckTPutFaultToleranceTestErasureMirror3of4 >> TBsProxyFaultToleranceTest::CheckGetHardenedErasureMirror3dcCount6Idx2 >> TBsProxyFaultToleranceTest::CheckTRangeFaultToleranceTestErasureMirror3of4 >> TConsoleTests::TestTenantGeneration [GOOD] >> TConsoleTests::TestTenantGenerationExtSubdomain >> DataShardSnapshots::LockedWriteCleanupOnSplit+UseSink [FAIL] >> DataShardSnapshots::LockedWriteCleanupOnSplit-UseSink >> DataShardSnapshots::LockedWriteDistributedCommitFreeze-UseSink [FAIL] >> DataShardSnapshots::LockedWriteDistributedCommitCrossConflict-UseSink >> TBsProxyFaultToleranceTest::CheckTGetWithRecoverFaultToleranceTestErasure4Plus2Block >> DataShardSnapshots::ShardRestartLockBrokenByUncommittedBeforeRead+UseSink [FAIL] >> DataShardSnapshots::ShardRestartLockBrokenByUncommittedBeforeRead-UseSink >> TestKinesisHttpProxy::TestRequestWithWrongRegion >> TestKinesisHttpProxy::CreateStreamWithInvalidName >> TestYmqHttpProxy::TestSendMessageFifoQueue >> TestYmqHttpProxy::TestCreateQueueWithSameNameAndDifferentParams >> TBsProxyFaultToleranceTest::CheckTPutFaultToleranceTestErasure4Plus2Block [GOOD] >> TestKinesisHttpProxy::GoodRequestPutRecords >> DataShardSnapshots::MvccSnapshotLockedWritesWithConflicts+UseSink [FAIL] >> DataShardSnapshots::MvccSnapshotLockedWritesWithConflicts-UseSink >> TBsProxyFaultToleranceTest::CheckTPutFaultToleranceTestErasureMirror3of4 [GOOD] >> DataShardSnapshots::ReadIteratorLocalSnapshotThenWrite [FAIL] >> DataShardSnapshots::RepeatableReadAfterSplitRace >> TestYmqHttpProxy::TestCreateQueueWithBadQueueName |66.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/dsproxy/ut_ftol/unittest >> TBsProxyFaultToleranceTest::CheckTPutFaultToleranceTestErasure4Plus2Block [GOOD] >> DataShardSnapshots::VolatileSnapshotTimeout [FAIL] >> DataShardSnapshots::VolatileSnapshotTimeoutRefresh |66.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/dsproxy/ut_ftol/unittest >> TBsProxyFaultToleranceTest::CheckTPutFaultToleranceTestErasureMirror3of4 [GOOD] >> TConsoleTests::TestModifyUsedZoneKind [GOOD] >> TConsoleTests::TestMergeConfig >> DataShardSnapshots::LockedWriteCleanupOnSplit-UseSink [FAIL] >> DataShardSnapshots::LockedWriteCleanupOnCopyTable+UseSink >> TConsoleTests::TestCreateTenantAlreadyExists [GOOD] |66.1%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/services/ydb/ut/ydb-services-ydb-ut |66.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/services/ydb/ut/ydb-services-ydb-ut |66.1%| [TA] {RESULT} $(B)/ydb/core/blobstorage/vdisk/skeleton/ut/test-results/unittest/{meta.json ... results_accumulator.log} |66.1%| [TA] {RESULT} $(B)/ydb/core/tx/datashard/ut_kqp_errors/test-results/unittest/{meta.json ... results_accumulator.log} >> DataShardSnapshots::MvccSnapshotLockedWritesWithConflicts-UseSink [FAIL] >> TBsProxyFaultToleranceTest::CheckGetHardenedErasureMirror3dcCount6Idx1 >> TConsoleTests::TestAlterUnknownTenant [GOOD] >> DataShardSnapshots::LockedWriteDistributedCommitCrossConflict-UseSink [FAIL] >> TBsProxyFaultToleranceTest::CheckGetHardenedErasureMirror3dcCount6Idx0 >> TBsProxyFaultToleranceTest::CheckTRangeFaultToleranceTestErasureMirror3of4 [GOOD] >> DataShardSnapshots::ShardRestartLockBrokenByUncommittedBeforeRead-UseSink [FAIL] >> TConsoleTests::TestCreateTenantAlreadyExistsExtSubdomain >> DataShardSnapshots::MvccSnapshotLockedWritesWithReadConflicts >> DataShardSnapshots::LockedWriteWithAsyncIndex-WithRestart-UseSink >> TConsoleTests::TestAlterUnknownTenantExtSubdomain >> DataShardSnapshots::LockedWriteCleanupOnCopyTable+UseSink [FAIL] >> DataShardSnapshots::VolatileSnapshotTimeoutRefresh [FAIL] |66.1%| [TA] $(B)/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/{meta.json ... results_accumulator.log} >> DataShardSnapshots::ShardRestartLockNotBrokenByUncommittedAfterRead+UseSink >> DataShardSnapshots::LockedWriteCleanupOnCopyTable-UseSink >> DataShardSnapshots::VolatileSnapshotCleanupOnReboot >> DataShardSnapshots::RepeatableReadAfterSplitRace [FAIL] >> TestKinesisHttpProxy::DoubleCreateStream >> DataShardSnapshots::MvccSnapshotLockedWritesWithReadConflicts [FAIL] >> DataShardSnapshots::ShardRestartLockNotBrokenByUncommittedAfterRead+UseSink [FAIL] >> TConsoleTests::TestTenantGenerationExtSubdomain [GOOD] >> TConsoleTests::TestSchemeShardErrorForwarding >> DataShardSnapshots::PostMergeNotCompactedTooEarly >> DataShardSnapshots::ShardRestartLockNotBrokenByUncommittedAfterRead-UseSink >> DataShardSnapshots::LockedWritesLimitedPerKey+UseSink >> TestKinesisHttpProxy::TestRequestWithIAM |66.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/dsproxy/ut_ftol/unittest >> TBsProxyFaultToleranceTest::CheckTRangeFaultToleranceTestErasureMirror3of4 [GOOD] >> TConsoleTests::TestMergeConfig [GOOD] >> TConsoleTests::TestRemoveTenant >> TestYmqHttpProxy::TestSendMessageWithAttributes >> TestYmqHttpProxy::TestCreateQueueWithWrongBody >> DataShardSnapshots::LockedWriteWithAsyncIndex-WithRestart-UseSink [FAIL] >> DataShardSnapshots::ShardRestartLockNotBrokenByUncommittedAfterRead-UseSink [FAIL] >> TExportToS3WithRebootsTests::CancelShouldSucceedOnSingleShardTable [GOOD] >> DataShardSnapshots::PostMergeNotCompactedTooEarly [FAIL] >> TestYmqHttpProxy::TestCreateQueueWithEmptyName >> TestKinesisHttpProxy::CreateStreamWithDifferentRetentions >> TConsoleTests::TestAlterUnknownTenantExtSubdomain [GOOD] >> DataShardSnapshots::LockedWriteCleanupOnCopyTable-UseSink [FAIL] >> DataShardSnapshots::LockedWritesLimitedPerKey+UseSink [FAIL] >> TConsoleTests::TestCreateTenantAlreadyExistsExtSubdomain [GOOD] >> DataShardSnapshots::VolatileSnapshotCleanupOnReboot [FAIL] >> DataShardSnapshots::LockedWriteWithAsyncIndex+WithRestart-UseSink >> TBsProxyFaultToleranceTest::CheckGetHardenedErasureBlock42Count6Idx0 >> DataShardSnapshots::PipelineAndMediatorRestoreRace >> TExportToS3WithRebootsTests::CancelShouldSucceedOnSingleTable [GOOD] >> DataShardSnapshots::DelayedWriteReadableAfterSplit >> DataShardSnapshots::LockedWritesLimitedPerKey-UseSink >> TConsoleTests::TestCreateSubSubDomain >> DataShardSnapshots::VolatileSnapshotCleanupOnFinish >> TConsoleTests::TestAlterBorrowedStorage >> DataShardSnapshots::ShardRestartLockBrokenByUncommittedAfterRead+UseSink >> TCmsTest::DisabledEvictVDisks [GOOD] >> DataShardSnapshots::PipelineAndMediatorRestoreRace [FAIL] >> DataShardSnapshots::LockedWritesLimitedPerKey-UseSink [FAIL] >> DataShardSnapshots::LockedWriteWithAsyncIndex+WithRestart-UseSink [FAIL] >> DataShardSnapshots::LockedWriteWithPendingVolatileCommit+UseSink >> TCmsTest::EmergencyDuringRollingRestart >> DataShardSnapshots::ShardRestartLockBasic >> DataShardSnapshots::ShardRestartLockBrokenByUncommittedAfterRead+UseSink [FAIL] >> DataShardSnapshots::ShardRestartLockBrokenByUncommittedAfterRead-UseSink >> DataShardSnapshots::LockedWriteWithAsyncIndex-WithRestart+UseSink >> DataShardSnapshots::ShardRestartLockBasic [FAIL] >> DataShardSnapshots::ShardRestartAfterDropTable |66.1%| [TA] {RESULT} $(B)/ydb/core/tx/datashard/ut_read_iterator/test-results/unittest/{meta.json ... results_accumulator.log} >> TConsoleTests::TestSchemeShardErrorForwarding [GOOD] |66.1%| [LD] {RESULT} $(B)/ydb/services/ydb/ut/ydb-services-ydb-ut >> TConsoleTests::TestScaleRecommenderPolicies >> TestKinesisHttpProxy::GoodRequestGetRecords >> DataShardSnapshots::VolatileSnapshotCleanupOnFinish [FAIL] >> DataShardSnapshots::VolatileSnapshotRenameTimeout >> DataShardSnapshots::ShardRestartLockBrokenByUncommittedAfterRead-UseSink [FAIL] >> TestYmqHttpProxy::TestSetQueueAttributes >> DataShardSnapshots::ShardRestartAfterDropTable [FAIL] >> TestKinesisHttpProxy::CreateDeleteStream >> DataShardSnapshots::ShardRestartAfterDropTableAndAbort >> DataShardSnapshots::DelayedWriteReadableAfterSplit [FAIL] >> TCmsTest::EmergencyDuringRollingRestart [GOOD] >> DataShardSnapshots::DelayedWriteReplyAfterSplit ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_export_reboots_s3/unittest >> TExportToS3WithRebootsTests::CancelShouldSucceedOnSingleTable [GOOD] Test command err: ==== RunWithTabletReboots =========== RUN: Trace =========== Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2140] Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:116:2058] recipient: [1:110:2141] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:116:2058] recipient: [1:110:2141] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:117:2058] recipient: [1:111:2142] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:117:2058] recipient: [1:111:2142] Leader for TabletID 72057594046678944 is [1:126:2151] sender: [1:127:2058] recipient: [1:109:2140] Leader for TabletID 72057594046447617 is [1:130:2154] sender: [1:132:2058] recipient: [1:110:2141] Leader for TabletID 72057594046316545 is [1:133:2155] sender: [1:135:2058] recipient: [1:111:2142] 2025-05-29T15:23:32.266817Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7488: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-05-29T15:23:32.266848Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7516: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:23:32.266854Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7402: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2025-05-29T15:23:32.266860Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7418: OperationsProcessing config: using default configuration 2025-05-29T15:23:32.266872Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-05-29T15:23:32.266906Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-05-29T15:23:32.266917Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7548: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:23:32.266932Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:39: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-05-29T15:23:32.267048Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7619: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-05-29T15:23:32.267140Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-05-29T15:23:32.283274Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7651: Got new config: QueryServiceConfig { AllExternalDataSourcesAreAvailable: true } 2025-05-29T15:23:32.283308Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:23:32.283425Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7619: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# Leader for TabletID 72057594046447617 is [1:130:2154] sender: [1:175:2058] recipient: [1:15:2062] 2025-05-29T15:23:32.286936Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-05-29T15:23:32.286976Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-05-29T15:23:32.287014Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-05-29T15:23:32.289876Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-05-29T15:23:32.289957Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:445: Clear TempDirsState with owners number: 0 2025-05-29T15:23:32.290092Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1348: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-05-29T15:23:32.290260Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:32: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-05-29T15:23:32.290921Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:157: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:23:32.290973Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:84: [RootDataErasureManager] Stop 2025-05-29T15:23:32.291237Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:23:32.291249Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:23:32.291290Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-05-29T15:23:32.291298Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-29T15:23:32.291304Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:202: TTxServerlessStorageBilling.Complete 2025-05-29T15:23:32.291325Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6671: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:214:2058] recipient: [1:212:2212] IGNORE Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:214:2058] recipient: [1:212:2212] Leader for TabletID 72057594037968897 is [1:218:2216] sender: [1:219:2058] recipient: [1:212:2212] 2025-05-29T15:23:32.292702Z node 1 :HIVE INFO: tablet_helpers.cpp:1130: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:126:2151] sender: [1:239:2058] recipient: [1:15:2062] 2025-05-29T15:23:32.314542Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:372: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-05-29T15:23:32.314623Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:23:32.314692Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-05-29T15:23:32.314762Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:130: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-05-29T15:23:32.314775Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:23:32.321846Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:451: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-05-29T15:23:32.321888Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-05-29T15:23:32.321940Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:23:32.321951Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:313: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-05-29T15:23:32.321957Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:367: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-05-29T15:23:32.321963Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 2 -> 3 2025-05-29T15:23:32.327181Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:23:32.327214Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-05-29T15:23:32.327224Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 3 -> 128 2025-05-29T15:23:32.327792Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:23:32.327806Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:23:32.327813Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:23:32.327820Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1650: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-05-29T15:23:32.328554Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1719: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-05-29T15:23:32.329006Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:652: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-05-29T15:23:32.329047Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1751: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 Leader for TabletID 72057594046316545 is [1:133:2155] sender: [1:254:2058] recipient: [1:15:2062] FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-05-29T15:23:32.329252Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:676: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-05-29T15:23:32.329278Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:680: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 133 RawX2: 4294969451 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-05-29T15:23:32.329287Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:23:32.329358Z node 1 :FLAT_TX_SCHEMESHARD INFO: sch ... rd__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976710761 FAKE_COORDINATOR: Add transaction: 281474976710761 at step: 5000008 FAKE_COORDINATOR: advance: minStep5000008 State->FrontStep: 5000007 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710761 at step: 5000008 2025-05-29T15:24:47.880384Z node 165 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:676: TTxOperationPlanStep Execute, stepId: 5000008, transactions count in step: 1, at schemeshard: 72057594046678944 2025-05-29T15:24:47.880408Z node 165 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:680: TTxOperationPlanStep Execute, message: Transactions { TxId: 281474976710761 Coordinator: 72057594046316545 AckTo { RawX1: 134 RawX2: 708669605996 } } Step: 5000008 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-05-29T15:24:47.880417Z node 165 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_rmdir.cpp:129: TRmDir HandleReply TEvOperationPlan, opId: 281474976710761:0, step: 5000008, at schemeshard: 72057594046678944 2025-05-29T15:24:47.880448Z node 165 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_rmdir.cpp:180: RmDir is done, opId: 281474976710761:0, at schemeshard: 72057594046678944 2025-05-29T15:24:47.880460Z node 165 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:906: Part operation is done id#281474976710761:0 progress is 1/1 2025-05-29T15:24:47.880464Z node 165 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 281474976710761 ready parts: 1/1 2025-05-29T15:24:47.880470Z node 165 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:906: Part operation is done id#281474976710761:0 progress is 1/1 2025-05-29T15:24:47.880474Z node 165 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 281474976710761 ready parts: 1/1 2025-05-29T15:24:47.880486Z node 165 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2025-05-29T15:24:47.880496Z node 165 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 1 2025-05-29T15:24:47.880501Z node 165 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1606: TOperation IsReadyToNotify, TxId: 281474976710761, ready parts: 1/1, is published: false 2025-05-29T15:24:47.880508Z node 165 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 281474976710761 ready parts: 1/1 2025-05-29T15:24:47.880512Z node 165 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:973: Operation and all the parts is done, operation id: 281474976710761:0 2025-05-29T15:24:47.880516Z node 165 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5174: RemoveTx for txid 281474976710761:0 2025-05-29T15:24:47.880526Z node 165 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 2 2025-05-29T15:24:47.880531Z node 165 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:982: Publication still in progress, tx: 281474976710761, publications: 2, subscribers: 1 2025-05-29T15:24:47.880536Z node 165 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:989: Publication details: tx: 281474976710761, [OwnerId: 72057594046678944, LocalPathId: 1], 13 2025-05-29T15:24:47.880540Z node 165 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:989: Publication details: tx: 281474976710761, [OwnerId: 72057594046678944, LocalPathId: 4], 18446744073709551615 2025-05-29T15:24:47.880932Z node 165 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:174: Deleted shardIdx 72057594046678944:2 2025-05-29T15:24:47.880950Z node 165 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:180: Close pipe to deleted shardIdx 72057594046678944:2 tabletId 72075186233409547 2025-05-29T15:24:47.881007Z node 165 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:160: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 2025-05-29T15:24:47.881021Z node 165 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976710761 2025-05-29T15:24:47.881379Z node 165 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:24:47.881392Z node 165 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 281474976710761, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-29T15:24:47.881446Z node 165 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 281474976710761, path id: [OwnerId: 72057594046678944, LocalPathId: 4] 2025-05-29T15:24:47.881471Z node 165 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:24:47.881477Z node 165 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [165:208:2209], at schemeshard: 72057594046678944, txId: 281474976710761, path id: 1 2025-05-29T15:24:47.881482Z node 165 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [165:208:2209], at schemeshard: 72057594046678944, txId: 281474976710761, path id: 4 FAKE_COORDINATOR: Erasing txId 281474976710761 2025-05-29T15:24:47.881648Z node 165 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:5834: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 13 PathOwnerId: 72057594046678944, cookie: 281474976710761 2025-05-29T15:24:47.881662Z node 165 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 13 PathOwnerId: 72057594046678944, cookie: 281474976710761 2025-05-29T15:24:47.881667Z node 165 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 281474976710761 2025-05-29T15:24:47.881672Z node 165 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 281474976710761, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 13 2025-05-29T15:24:47.881677Z node 165 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 4 2025-05-29T15:24:47.881810Z node 165 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:5834: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 281474976710761 2025-05-29T15:24:47.881844Z node 165 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 281474976710761 2025-05-29T15:24:47.881849Z node 165 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 281474976710761 2025-05-29T15:24:47.881853Z node 165 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 281474976710761, pathId: [OwnerId: 72057594046678944, LocalPathId: 4], version: 18446744073709551615 2025-05-29T15:24:47.881857Z node 165 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 1 2025-05-29T15:24:47.881868Z node 165 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 281474976710761, subscribers: 1 2025-05-29T15:24:47.881874Z node 165 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:212: TTxAckPublishToSchemeBoard Notify send TEvNotifyTxCompletionResult, at schemeshard: 72057594046678944, to actorId: [165:126:2151] 2025-05-29T15:24:47.881921Z node 165 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:123: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-05-29T15:24:47.881927Z node 165 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:137: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 4], at schemeshard: 72057594046678944 2025-05-29T15:24:47.881937Z node 165 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2025-05-29T15:24:47.882320Z node 165 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976710761 2025-05-29T15:24:47.882581Z node 165 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976710761 2025-05-29T15:24:47.882631Z node 165 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:160: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 2025-05-29T15:24:47.882641Z node 165 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6746: Handle: TEvNotifyTxCompletionResult: txId# 281474976710761 2025-05-29T15:24:47.882653Z node 165 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6748: Message: TxId: 281474976710761 2025-05-29T15:24:47.882661Z node 165 :EXPORT DEBUG: schemeshard_export__create.cpp:309: TExport::TTxProgress: DoExecute 2025-05-29T15:24:47.882666Z node 165 :EXPORT DEBUG: schemeshard_export__create.cpp:1232: TExport::TTxProgress: OnNotifyResult: txId# 281474976710761 2025-05-29T15:24:47.882671Z node 165 :EXPORT DEBUG: schemeshard_export__create.cpp:1263: TExport::TTxProgress: OnNotifyResult: txId# 281474976710761, id# 1003, itemIdx# 4294967295 2025-05-29T15:24:47.899105Z node 165 :EXPORT DEBUG: schemeshard_export__create.cpp:329: TExport::TTxProgress: DoComplete TestWaitNotification wait txId: 1003 2025-05-29T15:24:47.899208Z node 165 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:210: tests -- TTxNotificationSubscriber for txId 1003: send EvNotifyTxCompletion 2025-05-29T15:24:47.899218Z node 165 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:256: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 1003 2025-05-29T15:24:47.899320Z node 165 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 1003, at schemeshard: 72057594046678944 2025-05-29T15:24:47.899349Z node 165 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:227: tests -- TTxNotificationSubscriber for txId 1003: got EvNotifyTxCompletionResult 2025-05-29T15:24:47.899361Z node 165 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:236: tests -- TTxNotificationSubscriber for txId 1003: satisfy waiter [165:734:2692] TestWaitNotification: OK eventTxId 1003 >> TestYmqHttpProxy::TestCreateQueueWithWrongAttribute >> DataShardSnapshots::LockedWriteWithPendingVolatileCommit+UseSink [FAIL] >> TestKinesisHttpProxy::TestRequestNoAuthorization >> DataShardSnapshots::LockedWriteWithPendingVolatileCommit-UseSink >> DataShardSnapshots::VolatileSnapshotRenameTimeout [FAIL] >> TConsoleTests::TestAlterBorrowedStorage [GOOD] >> TConsoleTests::TestCreateSubSubDomain [GOOD] >> TBsProxyFaultToleranceTest::CheckTRangeFaultToleranceTestErasure4Plus2Block >> DataShardSnapshots::ShardRestartAfterDropTableAndAbort [FAIL] >> DataShardSnapshots::LockedWriteWithPendingVolatileCommit-UseSink [FAIL] >> DataShardSnapshots::LockedWriteWithAsyncIndex-WithRestart+UseSink [FAIL] >> DataShardSnapshots::DelayedWriteReplyAfterSplit [FAIL] >> TestYmqHttpProxy::TestCreateQueueWithAllAttributes >> DataShardSnapshots::UncommittedWriteRestartDuringCommit |66.1%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/persqueue/ut/slow/ydb-core-persqueue-ut-slow |66.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/persqueue/ut/slow/ydb-core-persqueue-ut-slow >> TConsoleTests::TestCreateSubSubDomainExtSubdomain >> DataShardSnapshots::UncommittedWriteRestartDuringCommit [FAIL] >> TConsoleTests::TestAlterStorageUnitsOfSharedTenant >> DataShardSnapshots::LockedWriteWithAsyncIndex+WithRestart+UseSink >> DataShardSnapshots::DelayedWriteReadableAfterSplitAndReboot >> TestYmqHttpProxy::TestTagQueue >> TestKinesisHttpProxy::GoodRequestGetRecordsCbor >> TestKinesisHttpProxy::CreateDeleteStreamWithConsumer >> TestYmqHttpProxy::TestCreateQueueWithTags >> TConsoleTests::TestScaleRecommenderPolicies [GOOD] >> TConsoleTests::TestRemoveTenant [GOOD] >> DataShardSnapshots::UncommittedWriteRestartDuringCommitThenBulkErase >> TConsoleTests::TestScaleRecommenderPoliciesValidation >> DataShardSnapshots::LockedWriteWithAsyncIndex+WithRestart+UseSink [FAIL] >> DataShardSnapshots::UncommittedWriteRestartDuringCommitThenBulkErase [FAIL] >> TConsoleTests::TestRemoveTenantExtSubdomain >> DataShardSnapshots::LockedWriteWithAsyncIndexAndVolatileCommit+UseSink >> DataShardSnapshots::UncommittedChangesRenameTable-UseSink |66.1%| [LD] {RESULT} $(B)/ydb/core/persqueue/ut/slow/ydb-core-persqueue-ut-slow ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/cms/ut/unittest >> TCmsTest::EmergencyDuringRollingRestart [GOOD] Test command err: 2025-05-29T15:24:30.547545Z node 1 :CMS DEBUG: cms_impl.h:185: StateInit event type: 10060000 event: NKikimr::TEvTablet::TEvBoot 2025-05-29T15:24:30.548015Z node 1 :CMS DEBUG: console__init_scheme.cpp:14: TConsole::TTxInitScheme Execute 2025-05-29T15:24:30.557044Z node 1 :CMS DEBUG: cms_impl.h:185: StateInit event type: 10060001 event: NKikimr::TEvTablet::TEvRestored 2025-05-29T15:24:30.557131Z node 1 :CMS DEBUG: cms_tx_init_scheme.cpp:16: TTxInitScheme Execute 2025-05-29T15:24:30.557503Z node 1 :CMS DEBUG: cms_impl.h:185: StateInit event type: 1006000c event: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-05-29T15:24:30.557554Z node 1 :CMS DEBUG: cms_impl.h:185: StateInit event type: 104d0001 event: NKikimr::NConsole::TEvConfigsDispatcher::TEvSetConfigSubscriptionResponse 2025-05-29T15:24:30.558225Z node 1 :CMS DEBUG: console__init_scheme.cpp:23: TConsole::TTxInitScheme Complete 2025-05-29T15:24:30.558291Z node 1 :CMS DEBUG: console__load_state.cpp:28: TConsole::TTxLoadState Execute 2025-05-29T15:24:30.558333Z node 1 :CMS DEBUG: console__load_state.cpp:50: Using default config. 2025-05-29T15:24:30.558425Z node 1 :CMS DEBUG: console__load_state.cpp:66: TConsole::TTxLoadState Complete 2025-05-29T15:24:30.558934Z node 1 :CMS DEBUG: cms_tx_init_scheme.cpp:24: TTxInitScheme Complete 2025-05-29T15:24:30.559015Z node 1 :CMS DEBUG: cms_tx_load_state.cpp:33: TTxLoadState Execute 2025-05-29T15:24:30.559040Z node 1 :CMS DEBUG: cms_tx_load_state.cpp:76: Using default config 2025-05-29T15:24:30.559088Z node 1 :CMS DEBUG: cms.cpp:1147: Running CleanupWalleTasks 2025-05-29T15:24:30.592447Z node 1 :CMS DEBUG: cms_impl.h:185: StateInit event type: 104a0012 event: NKikimr::NConsole::TEvConsole::TEvConfigNotificationRequest { Config { FeatureFlags { EnableCMSRequestPriorities: true EnableSingleCompositeActionGroup: true } } ItemKinds: 25 ItemKinds: 26 Local: true } 2025-05-29T15:24:30.607532Z node 1 :CMS DEBUG: cms_tx_load_state.cpp:256: TTxLoadState Complete 2025-05-29T15:24:30.607641Z node 1 :CMS DEBUG: cms_tx_update_config.cpp:23: TTxUpdateConfig Execute 2025-05-29T15:24:30.609137Z node 1 :CMS DEBUG: cms_tx_update_config.cpp:37: TTxUpdateConfig Complete 2025-05-29T15:24:30.609256Z node 1 :CMS DEBUG: sentinel.cpp:939: [Sentinel] [Main] UpdateConfig 2025-05-29T15:24:30.609263Z node 1 :CMS DEBUG: sentinel.cpp:884: [Sentinel] [Main] Start ConfigUpdater 2025-05-29T15:24:30.609271Z node 1 :CMS DEBUG: sentinel.cpp:955: [Sentinel] [Main] UpdateState 2025-05-29T15:24:30.609276Z node 1 :CMS INFO: sentinel.cpp:879: [Sentinel] [Main] StateUpdater was delayed 2025-05-29T15:24:30.609284Z node 1 :CMS DEBUG: sentinel.cpp:464: [Sentinel] [ConfigUpdater] Request blobstorage config: attempt# 0 2025-05-29T15:24:30.609315Z node 1 :CMS DEBUG: sentinel.cpp:477: [Sentinel] [ConfigUpdater] Request CMS cluster state: attempt# 0 2025-05-29T15:24:30.611541Z node 1 :CMS DEBUG: sentinel.cpp:530: [Sentinel] [ConfigUpdater] Handle TEvBlobStorage::TEvControllerConfigResponse: response# Status { Success: true BaseConfig { PDisk { NodeId: 1 PDiskId: 1 Path: "/1/pdisk-1.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 2 PDiskId: 2 Path: "/2/pdisk-2.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 3 PDiskId: 3 Path: "/3/pdisk-3.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 4 PDiskId: 4 Path: "/4/pdisk-4.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 5 PDiskId: 5 Path: "/5/pdisk-5.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 6 PDiskId: 6 Path: "/6/pdisk-6.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 7 PDiskId: 7 Path: "/7/pdisk-7.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 8 PDiskId: 8 Path: "/8/pdisk-8.data" Guid: 1 DriveStatus: ACTIVE } VSlot { VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 1000 } GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 1001 } GroupId: 1 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 1002 } GroupId: 2 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 1003 } GroupId: 3 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 2 VSlotId: 1000 } GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 2 VSlotId: 1001 } GroupId: 1 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 2 VSlotId: 1002 } GroupId: 2 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 2 VSlotId: 1003 } GroupId: 3 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 3 PDiskId: 3 VSlotId: 1000 } GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 3 VSlotId: 1001 } GroupId: 1 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 3 VSlotId: 1002 } GroupId: 2 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 3 VSlotId: 1003 } GroupId: 3 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 4 PDiskId: 4 VSlotId: 1000 } GroupGeneration: 1 FailDomainIdx: 3 } VSlot { VSlotId { NodeId: 4 PDiskId: 4 VSlotId: 1001 } GroupId: 1 GroupGeneration: 1 FailDomainIdx: 3 } VSlot { VSlotId { NodeId: 4 PDiskId: 4 VSlotId: 1002 } GroupId: 2 GroupGeneration: 1 FailDomainIdx: 3 } VSlot { VSlotId { NodeId: 4 PDiskId: 4 VSlotId: 1003 } GroupId: 3 GroupGeneration: 1 FailDomainIdx: 3 } VSlot { VSlotId { NodeId: 5 PDiskId: 5 VSlotId: 1000 } GroupGeneration: 1 FailDomainIdx: 4 } VSlot { VSlotId { NodeId: 5 PDiskId: 5 VSlotId: 1001 } GroupId: 1 GroupGeneration: 1 FailDomainIdx: 4 } VSlot { VSlotId { NodeId: 5 PDiskId: 5 VSlotId: 1002 } GroupId: 2 GroupGeneration: 1 FailDomainIdx: 4 } VSlot { VSlotId { NodeId: 5 PDiskId: 5 VSlotId: 1003 } GroupId: 3 GroupGeneration: 1 FailDomainIdx: 4 } VSlot { VSlotId { NodeId: 6 PDiskId: 6 VSlotId: 1000 } GroupGeneration: 1 FailDomainIdx: 5 } VSlot { VSlotId { NodeId: 6 PDiskId: 6 VSlotId: 1001 } GroupId: 1 GroupGeneration: 1 FailDomainIdx: 5 } VSlot { VSlotId { NodeId: 6 PDiskId: 6 VSlotId: 1002 } GroupId: 2 GroupGeneration: 1 FailDomainIdx: 5 } VSlot { VSlotId { NodeId: 6 PDiskId: 6 VSlotId: 1003 } GroupId: 3 GroupGeneration: 1 FailDomainIdx: 5 } VSlot { VSlotId { NodeId: 7 PDiskId: 7 VSlotId: 1000 } GroupGeneration: 1 FailDomainIdx: 6 } VSlot { VSlotId { NodeId: 7 PDiskId: 7 VSlotId: 1001 } GroupId: 1 GroupGeneration: 1 FailDomainIdx: 6 } VSlot { VSlotId { NodeId: 7 PDiskId: 7 VSlotId: 1002 } GroupId: 2 GroupGeneration: 1 FailDomainIdx: 6 } VSlot { VSlotId { NodeId: 7 PDiskId: 7 VSlotId: 1003 } GroupId: 3 GroupGeneration: 1 FailDomainIdx: 6 } VSlot { VSlotId { NodeId: 8 PDiskId: 8 VSlotId: 1000 } GroupGeneration: 1 FailDomainIdx: 7 } VSlot { VSlotId { NodeId: 8 PDiskId: 8 VSlotId: 1001 } GroupId: 1 GroupGeneration: 1 FailDomainIdx: 7 } VSlot { VSlotId { NodeId: 8 PDiskId: 8 VSlotId: 1002 } GroupId: 2 GroupGeneration: 1 FailDomainIdx: 7 } VSlot { VSlotId { NodeId: 8 PDiskId: 8 VSlotId: 1003 } GroupId: 3 GroupGeneration: 1 FailDomainIdx: 7 } Group { GroupGeneration: 1 ErasureSpecies: "block-4-2" VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 1000 } VSlotId { NodeId: 2 PDiskId: 2 VSlotId: 1000 } VSlotId { NodeId: 3 PDiskId: 3 VSlotId: 1000 } VSlotId { NodeId: 4 PDiskId: 4 VSlotId: 1000 } VSlotId { NodeId: 5 PDiskId: 5 VSlotId: 1000 } VSlotId { NodeId: 6 PDiskId: 6 VSlotId: 1000 } VSlotId { NodeId: 7 PDiskId: 7 VSlotId: 1000 } VSlotId { NodeId: 8 PDiskId: 8 VSlotId: 1000 } } Group { GroupId: 1 GroupGeneration: 1 ErasureSpecies: "block-4-2" VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 1001 } VSlotId { NodeId: 2 PDiskId: 2 VSlotId: 1001 } VSlotId { NodeId: 3 PDiskId: 3 VSlotId: 1001 } VSlotId { NodeId: 4 PDiskId: 4 VSlotId: 1001 } VSlotId { NodeId: 5 PDiskId: 5 VSlotId: 1001 } VSlotId { NodeId: 6 PDiskId: 6 VSlotId: 1001 } VSlotId { NodeId: 7 PDiskId: 7 VSlotId: 1001 } VSlotId { NodeId: 8 PDiskId: 8 VSlotId: 1001 } } Group { GroupId: 2 GroupGeneration: 1 ErasureSpecies: "block-4-2" VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 1002 } VSlotId { NodeId: 2 PDiskId: 2 VSlotId: 1002 } VSlotId { NodeId: 3 PDiskId: 3 VSlotId: 1002 } VSlotId { NodeId: 4 PDiskId: 4 VSlotId: 1002 } VSlotId { NodeId: 5 PDiskId: 5 VSlotId: 1002 } VSlotId { NodeId: 6 PDiskId: 6 VSlotId: 1002 } VSlotId { NodeId: 7 PDiskId: 7 VSlotId: 1002 } VSlotId { NodeId: 8 PDiskId: 8 VSlotId: 1002 } } Group { GroupId: 3 GroupGeneration: 1 ErasureSpecies: "block-4-2" VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 1003 } VSlotId { NodeId: 2 PDiskId: 2 VSlotId: 1003 } VSlotId { NodeId: 3 PDiskId: 3 VSlotId: 1003 } VSlotId { NodeId: 4 PDiskId: 4 VSlotId: 1003 } VSlotId { NodeId: 5 PDiskId: 5 VSlotId: 1003 } VSlotId { NodeId: 6 PDiskId: 6 VSlotId: 1003 } VSlotId { NodeId: 7 PDiskId: 7 VSlotId: 1003 } VSlotId { NodeId: 8 PDiskId: 8 VSlotId: 1003 } } } } Success: true 2025-05-29T15:24:30.622062Z node 1 :CMS DEBUG: cms_tx_update_config.cpp:23: TTxUpdateConfig Execute 2025-05-29T15:24:30.663268Z node 1 :CMS DEBUG: cms_tx_update_config.cpp:37: TTxUpdateConfig Complete 2025-05-29T15:24:30.663334Z node 1 :CMS DEBUG: cms_tx_update_config.cpp:44: Updated config: TenantLimits { DisabledNodesRatioLimit: 0 } ClusterLimits { DisabledNodesRatioLimit: 0 } SentinelConfig { Enable: false } 2025-05-29T15:24:32.232047Z node 9 :CMS DEBUG: console__init_scheme.cpp:14: TConsole::TTxInitScheme Execute 2025-05-29T15:24:32.232938Z node 9 :CMS DEBUG: cms_impl.h:185: StateInit event type: 10060000 event: NKikimr::TEvTablet::TEvBoot 2025-05-29T15:24:32.234902Z node 9 :CMS DEBUG: cms_impl.h:185: StateInit event type: 10060001 event: NKikimr::TEvTablet::TEvRestored 2025-05-29T15:24:32.234963Z node 9 :CMS DEBUG: cms_tx_init_scheme.cpp:16: TTxInitScheme Execute 2025-05-29T15:24:32.235312Z node 9 :CMS DEBUG: console__init_scheme.cpp:23: TConsole::TTxInitScheme Complete 2025-05-29T15:24:32.235330Z node 9 :CMS DEBUG: cms_impl.h:185: StateInit event type: 1006000c event: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-05-29T15:24:32.235339Z node 9 :CMS DEBUG: console__load_state.cpp:28: TConsole::TTxLoadState Execute 2025-05-29T15:24:32.235377Z node 9 :CMS DEBUG: console__load_state.cpp:50: Using default config. 2025-05-29T15:24:32.235450Z node 9 :CMS DEBUG: console__load_state.cpp:66: TConsole::TTxLoadState Complete 2025-05-29T15:24:32.235466Z node 9 :CMS DEBUG: cms_impl.h:185: StateInit event type: 104d0001 event: NKikimr::NConsole::TEvConfigsDispatcher::TEvSetConfigSubscriptionResponse 2025-05-29T15:24:32.236876Z node 9 :CMS DEBUG: cms_tx_init_scheme.cpp:24: TTxInitScheme Complete 2025-05-29T15:24:32.236920Z node 9 :CMS DEBUG: cms_tx_load_state.cpp:33: TTxLoadState Execute 2025-05-29T15:24:32.236951Z node 9 :CMS DEBUG: cms_tx_load_state.cpp:76: Using default config 2025-05-29T15:24:32.236972Z node 9 :CMS DEBUG: cms.cpp:1147: Running CleanupWalleTasks 2025-05-29T15:24:32.248214Z node 9 :CMS DEBUG: cms_impl.h:185: StateInit event type: 104a0012 event: NKikimr::NConsole::TEvConsole::TEvConfigNotificationRequest { Config { FeatureFlags { EnableCMSRequestPriorities: true EnableSingleCompositeActionGroup: true } } ItemKinds: 25 ItemKinds: 26 Local: true } 2025-05-29T15:24:32.269214Z node 9 :CMS DEBUG: cms_tx_load_state.cpp:256: TTxLoadState Complete 2025-05-29T15:24:32.269355Z node 9 :CMS DEBUG: cms_tx_update_config.cpp:23: TTxUpdateConfig Execute 2025-05-29T15:24:32.269377Z node 9 :CMS DEBUG: cms_tx_update_config.cpp:37: TTxUpdateConfig Complete 2025-05-29T15:24:32.269426Z node 9 :CMS DEBUG: cms_tx_update_config.cpp:23: TTxUpdateConfig Execute 2025-05-29T15:24:32.269493Z node 9 :CMS DEBUG: sentinel.cpp:939: [Sentinel] [Main] UpdateConfig 2025-05-29T15:24:32.269499Z node 9 :CMS DEBUG: sentinel.cpp:884: [Sentinel] [Main] Start ConfigUpdater 2025-05-29T15:24:32.269522Z node 9 :CMS DEBUG: sentinel.cpp:955: [Sentinel] [Main] UpdateState 2025-05-29T15:24:32.269527Z node 9 :CMS INFO: sentinel.cpp:879: [Sentinel] [Main] StateUpdater was delayed 2025-05-29T15:24:32.269535Z node 9 :CMS DEBUG: sentinel.cpp:464: [Sentinel] [ConfigUpdater] Request blobstorage config: attempt# 0 2025-05-29T15:24:32.269549Z node 9 :CMS DEBUG: sentinel.cpp:477: [Sentinel] [ConfigUpdater] Request CMS cluster state: attem ... de 18 :CMS DEBUG: cms_tx_store_permissions.cpp:26: TTxStorePermissions Execute 2025-05-29T15:24:49.866585Z node 18 :CMS NOTICE: audit_log.cpp:12: [AuditLog] [CMS tablet] Store permission: id# user-p-1, validity# 1970-01-01T00:03:00.026512Z, action# Type: RESTART_SERVICES Host: "18" Services: "storage" Duration: 60000000 2025-05-29T15:24:49.866609Z node 18 :CMS NOTICE: audit_log.cpp:12: [AuditLog] [CMS tablet] Store request: id# user-r-1, owner# user, order# 1, priority# -80, body# User: "user" Actions { Type: RESTART_SERVICES Host: "19" Services: "storage" Duration: 60000000 Issue { Type: TOO_MANY_UNAVAILABLE_VDISKS Message: "Issue in affected group with id \'0\': too many unavailable vdisks. Locked: Host ::1:12001 (18) has temporary lock, VDisk [0:1:0:1:0] (::1:/19/pdisk-19.data) is locked by this request. Down: " } } PartialPermissionAllowed: true Schedule: true Reason: "" TenantPolicy: DEFAULT AvailabilityMode: MODE_MAX_AVAILABILITY EvictVDisks: false Priority: -80 2025-05-29T15:24:49.902870Z node 18 :CMS DEBUG: cms.cpp:1147: Running CleanupWalleTasks 2025-05-29T15:24:49.978970Z node 18 :CMS DEBUG: cms_tx_store_permissions.cpp:137: TTxStorePermissions complete 2025-05-29T15:24:49.979062Z node 18 :CMS NOTICE: audit_log.cpp:12: [AuditLog] [CMS tablet] Reply: request# NKikimr::NCms::TEvCms::TEvPermissionRequest { User: "user" Actions { Type: RESTART_SERVICES Host: "18" Services: "storage" Duration: 60000000 } Actions { Type: RESTART_SERVICES Host: "19" Services: "storage" Duration: 60000000 } PartialPermissionAllowed: true Schedule: true DryRun: false AvailabilityMode: MODE_MAX_AVAILABILITY EvictVDisks: false Priority: -80 }, response# NKikimr::NCms::TEvCms::TEvPermissionResponse { Status { Code: ALLOW_PARTIAL } RequestId: "user-r-1" Permissions { Id: "user-p-1" Action { Type: RESTART_SERVICES Host: "18" Services: "storage" Duration: 60000000 } Deadline: 180026512 Extentions { Type: HostInfo Hosts { Name: "::1" State: UP NodeId: 18 InterconnectPort: 12001 } } } } 2025-05-29T15:24:49.979073Z node 18 :CMS DEBUG: cms.cpp:1064: Schedule cleanup at 1970-01-01T00:05:00.026512Z 2025-05-29T15:24:49.987184Z node 18 :CMS INFO: cms.cpp:1326: User user is done with permissions user-p-1 2025-05-29T15:24:49.987209Z node 18 :CMS DEBUG: cms.cpp:1349: Resulting status: OK 2025-05-29T15:24:49.987220Z node 18 :CMS DEBUG: cms_tx_remove_permissions.cpp:28: TTxRemovePermissions Execute 2025-05-29T15:24:49.987235Z node 18 :CMS NOTICE: audit_log.cpp:12: [AuditLog] [CMS tablet] Remove permission: id# user-p-1, reason# explicit remove 2025-05-29T15:24:50.019334Z node 18 :CMS DEBUG: cms_tx_remove_permissions.cpp:79: TTxRemovePermissions Complete 2025-05-29T15:24:50.019393Z node 18 :CMS NOTICE: audit_log.cpp:12: [AuditLog] [CMS tablet] Reply: request# NKikimr::NCms::TEvCms::TEvManagePermissionRequest { User: "user" Command: DONE Permissions: "user-p-1" DryRun: false }, response# NKikimr::NCms::TEvCms::TEvManagePermissionResponse { Status { Code: OK } } 2025-05-29T15:24:50.032069Z node 18 :CMS DEBUG: cms_tx_update_downtimes.cpp:17: TTxUpdateDowntimes Execute 2025-05-29T15:24:50.032107Z node 18 :CMS DEBUG: cms_tx_update_downtimes.cpp:26: TTxUpdateDowntimes Complete 2025-05-29T15:24:50.032123Z node 18 :CMS DEBUG: cluster_info.cpp:968: Timestamp: 1970-01-01T00:02:00Z 2025-05-29T15:24:50.032278Z node 18 :CMS INFO: cms.cpp:347: Check request: User: "user" Actions { Type: RESTART_SERVICES Host: "19" Services: "storage" Duration: 60000000 } PartialPermissionAllowed: true Schedule: true DryRun: false AvailabilityMode: MODE_MAX_AVAILABILITY EvictVDisks: false Priority: -100 2025-05-29T15:24:50.032288Z node 18 :CMS DEBUG: cms.cpp:379: Checking action: Type: RESTART_SERVICES Host: "19" Services: "storage" Duration: 60000000 2025-05-29T15:24:50.032300Z node 18 :CMS DEBUG: node_checkers.cpp:101: [Nodes Counter] Checking Node: 19, with state: Up, with limit: 0, with ratio limit: 0, locked nodes: 0, down nodes: 0 2025-05-29T15:24:50.032340Z node 18 :CMS DEBUG: cms.cpp:387: Result: ALLOW 2025-05-29T15:24:50.032355Z node 18 :CMS DEBUG: cms.cpp:1036: Accepting permission: id# user-p-2, requestId# user-r-2, owner# user 2025-05-29T15:24:50.032363Z node 18 :CMS INFO: cluster_info.cpp:777: Adding lock for Host ::1:12002 (19) (permission user-p-2 until 1970-01-01T00:03:00Z) 2025-05-29T15:24:50.032374Z node 18 :CMS DEBUG: cms_tx_store_permissions.cpp:26: TTxStorePermissions Execute 2025-05-29T15:24:50.032408Z node 18 :CMS NOTICE: audit_log.cpp:12: [AuditLog] [CMS tablet] Store permission: id# user-p-2, validity# 1970-01-01T00:03:00.131024Z, action# Type: RESTART_SERVICES Host: "19" Services: "storage" Duration: 60000000 2025-05-29T15:24:50.046959Z node 18 :CMS DEBUG: cms_tx_store_permissions.cpp:137: TTxStorePermissions complete 2025-05-29T15:24:50.047052Z node 18 :CMS NOTICE: audit_log.cpp:12: [AuditLog] [CMS tablet] Reply: request# NKikimr::NCms::TEvCms::TEvPermissionRequest { User: "user" Actions { Type: RESTART_SERVICES Host: "19" Services: "storage" Duration: 60000000 } PartialPermissionAllowed: true Schedule: true DryRun: false AvailabilityMode: MODE_MAX_AVAILABILITY EvictVDisks: false Priority: -100 }, response# NKikimr::NCms::TEvCms::TEvPermissionResponse { Status { Code: ALLOW } RequestId: "user-r-2" Permissions { Id: "user-p-2" Action { Type: RESTART_SERVICES Host: "19" Services: "storage" Duration: 60000000 } Deadline: 180131024 Extentions { Type: HostInfo Hosts { Name: "::1" State: UP NodeId: 19 InterconnectPort: 12002 } } } } 2025-05-29T15:24:50.075787Z node 18 :CMS INFO: cluster_info.cpp:777: Adding lock for Host ::1:12002 (19) (permission user-p-2 until 1970-01-01T00:03:00Z) 2025-05-29T15:24:50.075860Z node 18 :CMS DEBUG: cms_tx_update_downtimes.cpp:17: TTxUpdateDowntimes Execute 2025-05-29T15:24:50.075876Z node 18 :CMS DEBUG: cms_tx_update_downtimes.cpp:26: TTxUpdateDowntimes Complete 2025-05-29T15:24:50.075887Z node 18 :CMS DEBUG: cluster_info.cpp:968: Timestamp: 1970-01-01T00:02:00Z 2025-05-29T15:24:50.076024Z node 18 :CMS INFO: cms.cpp:347: Check request: User: "user" Actions { Type: RESTART_SERVICES Host: "19" Services: "storage" Duration: 60000000 Issue { Type: TOO_MANY_UNAVAILABLE_VDISKS Message: "Issue in affected group with id \'0\': too many unavailable vdisks. Locked: Host ::1:12001 (18) has temporary lock, VDisk [0:1:0:1:0] (::1:/19/pdisk-19.data) is locked by this request. Down: " } } PartialPermissionAllowed: true Schedule: true Reason: "" TenantPolicy: DEFAULT AvailabilityMode: MODE_MAX_AVAILABILITY EvictVDisks: false Priority: -80 2025-05-29T15:24:50.076034Z node 18 :CMS DEBUG: cms.cpp:379: Checking action: Type: RESTART_SERVICES Host: "19" Services: "storage" Duration: 60000000 Issue { Type: TOO_MANY_UNAVAILABLE_VDISKS Message: "Issue in affected group with id \'0\': too many unavailable vdisks. Locked: Host ::1:12001 (18) has temporary lock, VDisk [0:1:0:1:0] (::1:/19/pdisk-19.data) is locked by this request. Down: " } 2025-05-29T15:24:50.076042Z node 18 :CMS DEBUG: node_checkers.cpp:101: [Nodes Counter] Checking Node: 19, with state: Locked, with limit: 0, with ratio limit: 0, locked nodes: 1, down nodes: 0 2025-05-29T15:24:50.076050Z node 18 :CMS DEBUG: cms.cpp:398: Result: DISALLOW_TEMP (reason: Cannot lock node '19': node state: 'Locked') 2025-05-29T15:24:50.076068Z node 18 :CMS DEBUG: cms_tx_store_permissions.cpp:26: TTxStorePermissions Execute 2025-05-29T15:24:50.076106Z node 18 :CMS NOTICE: audit_log.cpp:12: [AuditLog] [CMS tablet] Store request: id# user-r-1, owner# user, order# 1, priority# -80, body# User: "user" Actions { Type: RESTART_SERVICES Host: "19" Services: "storage" Duration: 60000000 Issue { Type: GENERIC Message: "Cannot lock node \'19\': node state: \'Locked\'" } } PartialPermissionAllowed: true Schedule: true Reason: "" TenantPolicy: DEFAULT AvailabilityMode: MODE_MAX_AVAILABILITY EvictVDisks: false Priority: -80 2025-05-29T15:24:50.087498Z node 18 :CMS DEBUG: cms_tx_store_permissions.cpp:137: TTxStorePermissions complete 2025-05-29T15:24:50.087568Z node 18 :CMS NOTICE: audit_log.cpp:12: [AuditLog] [CMS tablet] Reply: request# NKikimr::NCms::TEvCms::TEvCheckRequest { User: "user" RequestId: "user-r-1" DryRun: false AvailabilityMode: MODE_MAX_AVAILABILITY }, response# NKikimr::NCms::TEvCms::TEvPermissionResponse { Status { Code: DISALLOW_TEMP Reason: "Cannot lock node \'19\': node state: \'Locked\'" } RequestId: "user-r-1" Deadline: 420232536 } 2025-05-29T15:24:50.087688Z node 18 :CMS INFO: cms.cpp:1326: User user is done with permissions user-p-2 2025-05-29T15:24:50.087699Z node 18 :CMS DEBUG: cms.cpp:1349: Resulting status: OK 2025-05-29T15:24:50.087712Z node 18 :CMS DEBUG: cms_tx_remove_permissions.cpp:28: TTxRemovePermissions Execute 2025-05-29T15:24:50.087734Z node 18 :CMS NOTICE: audit_log.cpp:12: [AuditLog] [CMS tablet] Remove permission: id# user-p-2, reason# explicit remove 2025-05-29T15:24:50.108990Z node 18 :CMS DEBUG: cms_tx_remove_permissions.cpp:79: TTxRemovePermissions Complete 2025-05-29T15:24:50.109052Z node 18 :CMS NOTICE: audit_log.cpp:12: [AuditLog] [CMS tablet] Reply: request# NKikimr::NCms::TEvCms::TEvManagePermissionRequest { User: "user" Command: DONE Permissions: "user-p-2" DryRun: false }, response# NKikimr::NCms::TEvCms::TEvManagePermissionResponse { Status { Code: OK } } 2025-05-29T15:24:50.184064Z node 18 :CMS DEBUG: cms_tx_update_downtimes.cpp:17: TTxUpdateDowntimes Execute 2025-05-29T15:24:50.184101Z node 18 :CMS DEBUG: cms_tx_update_downtimes.cpp:26: TTxUpdateDowntimes Complete 2025-05-29T15:24:50.184116Z node 18 :CMS DEBUG: cluster_info.cpp:968: Timestamp: 1970-01-01T00:02:00Z 2025-05-29T15:24:50.184284Z node 18 :CMS INFO: cms.cpp:347: Check request: User: "user" Actions { Type: RESTART_SERVICES Host: "19" Services: "storage" Duration: 60000000 Issue { Type: GENERIC Message: "Cannot lock node \'19\': node state: \'Locked\'" } } PartialPermissionAllowed: true Schedule: true Reason: "" TenantPolicy: DEFAULT AvailabilityMode: MODE_MAX_AVAILABILITY EvictVDisks: false Priority: -80 2025-05-29T15:24:50.184298Z node 18 :CMS DEBUG: cms.cpp:379: Checking action: Type: RESTART_SERVICES Host: "19" Services: "storage" Duration: 60000000 Issue { Type: GENERIC Message: "Cannot lock node \'19\': node state: \'Locked\'" } 2025-05-29T15:24:50.184308Z node 18 :CMS DEBUG: node_checkers.cpp:101: [Nodes Counter] Checking Node: 19, with state: Up, with limit: 0, with ratio limit: 0, locked nodes: 0, down nodes: 0 2025-05-29T15:24:50.184348Z node 18 :CMS DEBUG: cms.cpp:387: Result: ALLOW 2025-05-29T15:24:50.184369Z node 18 :CMS DEBUG: cms.cpp:1036: Accepting permission: id# user-p-3, requestId# user-r-1, owner# user 2025-05-29T15:24:50.184378Z node 18 :CMS INFO: cluster_info.cpp:777: Adding lock for Host ::1:12002 (19) (permission user-p-3 until 1970-01-01T00:03:00Z) 2025-05-29T15:24:50.184389Z node 18 :CMS DEBUG: cms_tx_store_permissions.cpp:26: TTxStorePermissions Execute 2025-05-29T15:24:50.184427Z node 18 :CMS NOTICE: audit_log.cpp:12: [AuditLog] [CMS tablet] Store permission: id# user-p-3, validity# 1970-01-01T00:03:00.335560Z, action# Type: RESTART_SERVICES Host: "19" Services: "storage" Duration: 60000000 2025-05-29T15:24:50.184437Z node 18 :CMS NOTICE: audit_log.cpp:12: [AuditLog] [CMS tablet] Remove request: id# user-r-1, owner# user 2025-05-29T15:24:50.195495Z node 18 :CMS DEBUG: cms_tx_store_permissions.cpp:137: TTxStorePermissions complete 2025-05-29T15:24:50.195580Z node 18 :CMS NOTICE: audit_log.cpp:12: [AuditLog] [CMS tablet] Reply: request# NKikimr::NCms::TEvCms::TEvCheckRequest { User: "user" RequestId: "user-r-1" DryRun: false AvailabilityMode: MODE_MAX_AVAILABILITY }, response# NKikimr::NCms::TEvCms::TEvPermissionResponse { Status { Code: ALLOW } Permissions { Id: "user-p-3" Action { Type: RESTART_SERVICES Host: "19" Services: "storage" Duration: 60000000 } Deadline: 180335560 Extentions { Type: HostInfo Hosts { Name: "::1" State: UP NodeId: 19 InterconnectPort: 12002 } } } } >> TConsoleTests::TestAlterStorageUnitsOfSharedTenant [GOOD] >> TConsoleTests::TestAlterServerlessTenant >> DataShardSnapshots::UncommittedChangesRenameTable-UseSink [FAIL] >> DataShardSnapshots::DelayedWriteReadableAfterSplitAndReboot [FAIL] >> DataShardSnapshots::BrokenLockChangesDontLeak >> TestKinesisHttpProxy::TestUnauthorizedPutRecords >> TestYmqHttpProxy::BillingRecordsForJsonApi >> TBsProxyFaultToleranceTest::CheckTRangeFaultToleranceTestErasureMirror3dc >> TestKinesisHttpProxy::GoodRequestGetRecordsLongStreamName >> TConsoleTests::TestScaleRecommenderPoliciesValidation [GOOD] >> TNetClassifierUpdaterTest::TestFiltrationByNetboxCustomFieldsOnly [GOOD] >> TConsoleTxProcessorTests::TestTxProcessorSingle >> TNetClassifierUpdaterTest::TestFiltrationByNetboxTags >> TBsProxyFaultToleranceTest::CheckTRangeFaultToleranceTestErasure4Plus2Block [GOOD] >> DataShardSnapshots::BrokenLockChangesDontLeak [FAIL] >> DataShardSnapshots::LockedWriteWithAsyncIndexAndVolatileCommit+UseSink [FAIL] >> TConsoleTests::TestCreateSubSubDomainExtSubdomain [GOOD] >> DataShardSnapshots::LockedWriteWithAsyncIndexAndVolatileCommit-UseSink >> TConsoleTests::TestAuthorization >> TestYmqHttpProxy::TestUntagQueue >> TestKinesisHttpProxy::CreateDeleteStreamWithConsumerWithFlag >> TestYmqHttpProxy::TestDeleteMessage >> TestYmqHttpProxy::TestChangeMessageVisibility >> TConsoleTxProcessorTests::TestTxProcessorSingle [GOOD] >> TConsoleTxProcessorTests::TestTxProcessorSubProcessor >> TestKinesisHttpProxy::TestWrongStream |66.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/dsproxy/ut_ftol/unittest >> TBsProxyFaultToleranceTest::CheckTRangeFaultToleranceTestErasure4Plus2Block [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/datashard/ut_snapshot/unittest >> DataShardSnapshots::ShardRestartLockBrokenByUncommittedAfterRead-UseSink [FAIL] Test command err: 2025-05-29T15:24:34.573342Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:102:2148], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-05-29T15:24:34.573375Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-05-29T15:24:34.573386Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/00126b/r3tmp/tmpiidcBR/pdisk_1.dat 2025-05-29T15:24:34.679119Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-05-29T15:24:34.692392Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:24:34.695412Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [1:32:2079] 1748532274123048 != 1748532274123052 2025-05-29T15:24:34.730951Z node 1 :KQP_SESSION DEBUG: kqp_session_actor.cpp:223: SessionId: ydb://session/3?node_id=1&id=Y2VlMjdiM2ItYjQzOGVhOTctMzlhZDJhOWEtY2JlYmJkOGI=, ActorId: [0:0:0], ActorState: unknown state, Create session actor with id Y2VlMjdiM2ItYjQzOGVhOTctMzlhZDJhOWEtY2JlYmJkOGI= 2025-05-29T15:24:34.731090Z node 1 :KQP_SESSION DEBUG: kqp_session_actor.cpp:227: SessionId: ydb://session/3?node_id=1&id=Y2VlMjdiM2ItYjQzOGVhOTctMzlhZDJhOWEtY2JlYmJkOGI=, ActorId: [1:618:2540], ActorState: unknown state, session actor bootstrapped 2025-05-29T15:24:34.731196Z node 1 :KQP_SESSION DEBUG: kqp_session_actor.cpp:443: SessionId: ydb://session/3?node_id=1&id=Y2VlMjdiM2ItYjQzOGVhOTctMzlhZDJhOWEtY2JlYmJkOGI=, ActorId: [1:618:2540], ActorState: ReadyState, TraceId: 01jweaa9hbbh395apby3gerx3g, received request, proxyRequestId: 3 prepared: 0 tx_control: 0 action: QUERY_ACTION_EXECUTE type: QUERY_TYPE_SQL_DDL text: CREATE TABLE `/Root/table1` (key int, value int, PRIMARY KEY (key)); rpcActor: [0:0:0] database: databaseId: /Root pool id: default 2025-05-29T15:24:34.753621Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:621:2543], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:24:34.753657Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:24:34.776524Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:24:34.776570Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:24:34.777236Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-05-29T15:24:34.789420Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-29T15:24:34.802346Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3097: StateInit, received event# 268828672, Sender [1:685:2577], Recipient [1:690:2580]: NKikimr::TEvTablet::TEvBoot 2025-05-29T15:24:34.802583Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3097: StateInit, received event# 268828673, Sender [1:685:2577], Recipient [1:690:2580]: NKikimr::TEvTablet::TEvRestored 2025-05-29T15:24:34.802684Z node 1 :TX_DATASHARD INFO: datashard.cpp:373: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:690:2580] 2025-05-29T15:24:34.802781Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:630: TxInitSchema.Execute 2025-05-29T15:24:34.811643Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3110: StateInactive, received event# 268828684, Sender [1:685:2577], Recipient [1:690:2580]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-05-29T15:24:34.811838Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:696: TxInitSchema.Complete 2025-05-29T15:24:34.811867Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:48: TDataShard::TTxInit::Execute 2025-05-29T15:24:34.812037Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1315: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-05-29T15:24:34.812048Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1371: LoadLockChangeRecords at tablet: 72075186224037888 2025-05-29T15:24:34.812055Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1420: LoadChangeRecordCommits at tablet: 72075186224037888 2025-05-29T15:24:34.812118Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:92: TDataShard::TTxInit::Complete 2025-05-29T15:24:34.812147Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:100: TDataShard::TTxInitRestored::Execute 2025-05-29T15:24:34.812162Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:111: DataShard 72075186224037888 persisting started state actor id [1:704:2580] in generation 1 2025-05-29T15:24:34.812242Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:120: TDataShard::TTxInitRestored::Complete 2025-05-29T15:24:34.823405Z node 1 :TX_DATASHARD INFO: datashard.cpp:417: Switched to work state WaitScheme tabletId 72075186224037888 2025-05-29T15:24:34.823468Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:457: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-05-29T15:24:34.823500Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1250: Change sender created: at tablet: 72075186224037888, actorId: [1:706:2589] 2025-05-29T15:24:34.823506Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1255: Trying to activate change sender: at tablet: 72075186224037888 2025-05-29T15:24:34.823512Z node 1 :TX_DATASHARD INFO: datashard.cpp:1272: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-05-29T15:24:34.823517Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-05-29T15:24:34.823579Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3129: StateWork, received event# 2146435072, Sender [1:690:2580], Recipient [1:690:2580]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-05-29T15:24:34.823586Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3154: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-05-29T15:24:34.823646Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 72075186224037888 2025-05-29T15:24:34.823665Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-05-29T15:24:34.823674Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2025-05-29T15:24:34.823681Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-05-29T15:24:34.823687Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:385: Check unit PlanQueue at 72075186224037888 2025-05-29T15:24:34.823693Z node 1 :TX_DATASHARD TRACE: plan_queue_unit.cpp:59: TPlanQueueUnit at 72075186224037888 has no attached operations 2025-05-29T15:24:34.823697Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:403: Unit PlanQueue has no ready operations at 72075186224037888 2025-05-29T15:24:34.823702Z node 1 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 72075186224037888 TxInFly 0 2025-05-29T15:24:34.823708Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2025-05-29T15:24:34.854698Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3129: StateWork, received event# 269877761, Sender [1:709:2591], Recipient [1:690:2580]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-05-29T15:24:34.854726Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3165: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-05-29T15:24:34.854753Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3710: Server connected at leader tablet# 72075186224037888, clientId# [1:681:2575], serverId# [1:709:2591], sessionId# [0:0:0] 2025-05-29T15:24:34.854784Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3129: StateWork, received event# 269549568, Sender [1:411:2405], Recipient [1:709:2591] 2025-05-29T15:24:34.854790Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3135: StateWork, processing event TEvDataShard::TEvProposeTransaction 2025-05-29T15:24:34.854830Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037888 2025-05-29T15:24:34.854886Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1827: Trying to execute [0:281474976715657] at 72075186224037888 on unit CheckSchemeTx 2025-05-29T15:24:34.854897Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:132: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-05-29T15:24:34.854926Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:220: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2025-05-29T15:24:34.854934Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1862: Execution status for [0:281474976715657] at 72075186224037888 is ExecutedNoMoreRestarts 2025-05-29T15:24:34.854940Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1910: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit CheckSchemeTx 2025-05-29T15:24:34.854946Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1916: Add [0:281474976715657] at 72075186224037888 to execution unit StoreSchemeTx 2025-05-29T15:24:34.854951Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1827: Trying to execute [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2025-05-29T15:24:34.855034Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1862: Execution status for [0:281474976715657] at 72075186224037888 is DelayCompleteNoMoreRestarts 2025-05-29T15:24:34.855040Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1910: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit StoreSchemeTx 2025-05-29T15:24:34.855045Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1916: Add [0:281474976715657] at 72075186224037888 to execution unit FinishPropose 2025-05-29T15:24:34.855050Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1827: Trying to execute [0:281474976715657] at 72075186224037888 on unit FinishPropose 2025-05-29T15:24:34.855062Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1862: Execution status for [0:281474976715657] at 72075186224037888 is DelayComplete 2025-05-29T15:24:34.855066Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1910: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit FinishPropose 2025-05-29T15:24:34.855070Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1916: Add [0:281474976715657] at 72075186224037888 to execution unit WaitForPlan 2025-05-29T15:24:34.855074Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1827: Trying to execute [0:281474976715657] at 72075186224037888 on unit WaitForPlan 2025-05-29T15:24:34.855079Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1832 ... 037888 on unit CompleteOperation 2025-05-29T15:24:50.629026Z node 13 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1862: Execution status for [300:281474976715657] at 72075186224037888 is DelayComplete 2025-05-29T15:24:50.629030Z node 13 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1910: Advance execution plan for [300:281474976715657] at 72075186224037888 executing on unit CompleteOperation 2025-05-29T15:24:50.629034Z node 13 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1916: Add [300:281474976715657] at 72075186224037888 to execution unit CompletedOperations 2025-05-29T15:24:50.629037Z node 13 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1827: Trying to execute [300:281474976715657] at 72075186224037888 on unit CompletedOperations 2025-05-29T15:24:50.629043Z node 13 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1862: Execution status for [300:281474976715657] at 72075186224037888 is Executed 2025-05-29T15:24:50.629046Z node 13 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1910: Advance execution plan for [300:281474976715657] at 72075186224037888 executing on unit CompletedOperations 2025-05-29T15:24:50.629049Z node 13 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1922: Execution plan for [300:281474976715657] at 72075186224037888 has finished 2025-05-29T15:24:50.629053Z node 13 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-05-29T15:24:50.629056Z node 13 :TX_DATASHARD TRACE: datashard_pipeline.cpp:327: Check candidate unit PlanQueue at 72075186224037888 2025-05-29T15:24:50.629059Z node 13 :TX_DATASHARD TRACE: plan_queue_unit.cpp:59: TPlanQueueUnit at 72075186224037888 has no attached operations 2025-05-29T15:24:50.629062Z node 13 :TX_DATASHARD TRACE: datashard_pipeline.cpp:341: Unit PlanQueue has no ready operations at 72075186224037888 2025-05-29T15:24:50.629466Z node 13 :TX_DATASHARD TRACE: datashard_impl.h:3129: StateWork, received event# 270270976, Sender [13:24:2071], Recipient [13:689:2579]: {TEvRegisterTabletResult TabletId# 72075186224037888 Entry# 0} 2025-05-29T15:24:50.629476Z node 13 :TX_DATASHARD TRACE: datashard_impl.h:3167: StateWork, processing event TEvMediatorTimecast::TEvRegisterTabletResult 2025-05-29T15:24:50.629480Z node 13 :TX_DATASHARD DEBUG: datashard.cpp:3742: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037888 time 0 2025-05-29T15:24:50.629484Z node 13 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-05-29T15:24:50.629581Z node 13 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:98: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 300} 2025-05-29T15:24:50.629716Z node 13 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2025-05-29T15:24:50.629850Z node 13 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2025-05-29T15:24:50.629857Z node 13 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1933: Complete execution for [300:281474976715657] at 72075186224037888 on unit CreateTable 2025-05-29T15:24:50.629862Z node 13 :TX_DATASHARD DEBUG: datashard.cpp:1255: Trying to activate change sender: at tablet: 72075186224037888 2025-05-29T15:24:50.629867Z node 13 :TX_DATASHARD INFO: datashard.cpp:1293: Change sender activated: at tablet: 72075186224037888 2025-05-29T15:24:50.629874Z node 13 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1933: Complete execution for [300:281474976715657] at 72075186224037888 on unit CompleteOperation 2025-05-29T15:24:50.629887Z node 13 :TX_DATASHARD DEBUG: datashard.cpp:801: Complete [300 : 281474976715657] from 72075186224037888 at tablet 72075186224037888 send result to client [13:410:2404], exec latency: 0 ms, propose latency: 0 ms 2025-05-29T15:24:50.629896Z node 13 :TX_DATASHARD INFO: datashard.cpp:1590: 72075186224037888 Sending notify to schemeshard 72057594046644480 txId 281474976715657 state Ready TxInFly 0 2025-05-29T15:24:50.629905Z node 13 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-05-29T15:24:50.630133Z node 13 :TX_DATASHARD TRACE: datashard_impl.h:3129: StateWork, received event# 269746185, Sender [13:709:2591], Recipient [13:689:2579]: NKikimr::TEvTxProxySchemeCache::TEvWatchNotifyUpdated 2025-05-29T15:24:50.630142Z node 13 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:129: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-05-29T15:24:50.630265Z node 13 :TX_DATASHARD TRACE: datashard_impl.h:3129: StateWork, received event# 269877760, Sender [13:733:2607], Recipient [13:689:2579]: NKikimr::TEvTabletPipe::TEvClientConnected { TabletId: 72057594046644480 Status: OK ServerId: [13:737:2611] Leader: 1 Dead: 0 Generation: 2 VersionInfo: } 2025-05-29T15:24:50.630271Z node 13 :TX_DATASHARD TRACE: datashard_impl.h:3162: StateWork, processing event TEvTabletPipe::TEvClientConnected 2025-05-29T15:24:50.630403Z node 13 :TX_DATASHARD TRACE: datashard_impl.h:3129: StateWork, received event# 270270978, Sender [13:24:2071], Recipient [13:689:2579]: NKikimr::TEvMediatorTimecast::TEvSubscribeReadStepResult{ CoordinatorId# 72057594046316545 LastReadStep# 0 NextReadStep# 300 ReadStep# 300 } 2025-05-29T15:24:50.630408Z node 13 :TX_DATASHARD TRACE: datashard_impl.h:3168: StateWork, processing event TEvMediatorTimecast::TEvSubscribeReadStepResult 2025-05-29T15:24:50.630413Z node 13 :TX_DATASHARD DEBUG: datashard.cpp:3760: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037888 coordinator 72057594046316545 last step 0 next step 300 2025-05-29T15:24:50.630439Z node 13 :TX_DATASHARD TRACE: datashard_impl.h:3129: StateWork, received event# 269552132, Sender [13:410:2404], Recipient [13:689:2579]: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 281474976715657 2025-05-29T15:24:50.630444Z node 13 :TX_DATASHARD TRACE: datashard_impl.h:3133: StateWork, processing event TEvDataShard::TEvSchemaChangedResult 2025-05-29T15:24:50.630449Z node 13 :TX_DATASHARD DEBUG: datashard.cpp:2953: Handle TEvSchemaChangedResult 281474976715657 datashard 72075186224037888 state Ready 2025-05-29T15:24:50.630454Z node 13 :TX_DATASHARD DEBUG: datashard__schema_changed.cpp:22: 72075186224037888 Got TEvSchemaChangedResult from SS at 72075186224037888 2025-05-29T15:24:50.630603Z node 13 :TX_DATASHARD TRACE: datashard_impl.h:3129: StateWork, received event# 269877763, Sender [13:733:2607], Recipient [13:689:2579]: NKikimr::TEvTabletPipe::TEvClientDestroyed { TabletId: 72057594046644480 ClientId: [13:733:2607] ServerId: [13:737:2611] } 2025-05-29T15:24:50.630608Z node 13 :TX_DATASHARD TRACE: datashard_impl.h:3163: StateWork, processing event TEvTabletPipe::TEvClientDestroyed 2025-05-29T15:24:50.645030Z node 13 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [13:749:2623], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:24:50.645064Z node 13 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:24:50.645116Z node 13 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [13:754:2628], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:24:50.646163Z node 13 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2025-05-29T15:24:50.664319Z node 13 :TX_DATASHARD TRACE: datashard_impl.h:3129: StateWork, received event# 269746185, Sender [13:709:2591], Recipient [13:689:2579]: NKikimr::TEvTxProxySchemeCache::TEvWatchNotifyUpdated 2025-05-29T15:24:50.664347Z node 13 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:129: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-05-29T15:24:50.697929Z node 13 :TX_DATASHARD TRACE: datashard_impl.h:3129: StateWork, received event# 269746185, Sender [13:709:2591], Recipient [13:689:2579]: NKikimr::TEvTxProxySchemeCache::TEvWatchNotifyUpdated 2025-05-29T15:24:50.697963Z node 13 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:129: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-05-29T15:24:50.698533Z node 13 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [13:756:2630], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-05-29T15:24:50.753876Z node 13 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [13:807:2662] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-29T15:24:50.763834Z node 13 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [13:817:2671], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:24:50.764344Z node 13 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=13&id=MzE1NWFmNjEtYWJmMzAwZi03MDFiMWZlNi1hOGJkYjk2Nw==, ActorId: [13:747:2621], ActorState: ExecuteState, TraceId: 01jweaas2m0fk50nwvzg3jgc5d, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: assertion failed at ydb/core/tx/datashard/ut_common/datashard_ut_common.cpp:2091, void NKikimr::ExecSQL(Tests::TServer::TPtr, TActorId, const TString &, bool, Ydb::StatusIds::StatusCode): (response.GetYdbStatus() == code) failed: (INTERNAL_ERROR != SUCCESS) Response { QueryIssues { message: "Execution" issue_code: 1060 issues { message: "yql/essentials/ast/yql_expr.h:1874: index out of range" issue_code: 1 } } TxMeta { } } YdbStatus: INTERNAL_ERROR ConsumedRu: 1 , with diff: (INT|SUCC)E(RNAL_ERROR|SS) TBackTrace::Capture()+28 (0x13CAD19C) NUnitTest::NPrivate::RaiseError(char const*, TBasicString> const&, bool)+137 (0x13E60AC9) NKikimr::ExecSQL(TIntrusivePtr>, NActors::TActorId, TBasicString> const&, bool, Ydb::StatusIds_StatusCode)+1300 (0x264DE504) NKikimr::NTestSuiteDataShardSnapshots::TTestCaseShardRestartLockBrokenByUncommittedAfterRead::Execute_(NUnitTest::TTestContext&)+3387 (0x13B9EA7B) NKikimr::NTestSuiteDataShardSnapshots::TCurrentTest::Execute()::'lambda'()::operator()() const+71 (0x13AAB867) NUnitTest::TTestBase::Run(std::__y1::function, TBasicString> const&, char const*, bool)+126 (0x13E6297E) NKikimr::NTestSuiteDataShardSnapshots::TCurrentTest::Execute()+425 (0x13AAB0C9) NUnitTest::TTestFactory::Execute()+803 (0x13E630F3) NUnitTest::RunMain(int, char**)+3021 (0x13E74C9D) ??+0 (0x7F159087BD90) __libc_start_main+128 (0x7F159087BE40) _start+41 (0x12A82029) ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/datashard/ut_snapshot/unittest >> DataShardSnapshots::LockedWriteWithPendingVolatileCommit-UseSink [FAIL] Test command err: 2025-05-29T15:24:34.387121Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:102:2148], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-05-29T15:24:34.387163Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-05-29T15:24:34.387179Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/0012a6/r3tmp/tmpsziQt6/pdisk_1.dat 2025-05-29T15:24:34.503673Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-05-29T15:24:34.517735Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:24:34.521063Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [1:32:2079] 1748532273888708 != 1748532273888712 2025-05-29T15:24:34.562591Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:213: actor# [1:59:2106] HANDLE TEvClientConnected success connect from tablet# 72057594046447617 2025-05-29T15:24:34.562888Z node 1 :TX_PROXY DEBUG: client.cpp:89: Handle TEvAllocateResult ACCEPTED RangeBegin# 281474976715656 RangeEnd# 281474976720656 txAllocator# 72057594046447617 2025-05-29T15:24:34.562974Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:24:34.562992Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:24:34.573651Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-29T15:24:34.646843Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:315: actor# [1:59:2106] Handle TEvProposeTransaction 2025-05-29T15:24:34.646871Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:238: actor# [1:59:2106] TxId# 281474976715657 ProcessProposeTransaction 2025-05-29T15:24:34.646911Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:257: actor# [1:59:2106] Cookie# 0 userReqId# "" txid# 281474976715657 SEND to# [1:640:2548] 2025-05-29T15:24:34.661681Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1562: Actor# [1:640:2548] txid# 281474976715657 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/Root" OperationType: ESchemeOpCreateTable CreateTable { Name: "table-1" Columns { Name: "key" Type: "Uint32" FamilyName: "" NotNull: false } Columns { Name: "value" Type: "Uint32" FamilyName: "" NotNull: false } KeyColumnNames: "key" UniformPartitionsCount: 1 } } } ExecTimeoutPeriod: 18446744073709551615 2025-05-29T15:24:34.661732Z node 1 :TX_PROXY DEBUG: schemereq.cpp:569: Actor# [1:640:2548] txid# 281474976715657 Bootstrap, UserSID: CheckAdministrator: 0 CheckDatabaseAdministrator: 0 2025-05-29T15:24:34.661928Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1627: Actor# [1:640:2548] txid# 281474976715657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P6 2025-05-29T15:24:34.661940Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1617: Actor# [1:640:2548] txid# 281474976715657 TEvNavigateKeySet requested from SchemeCache 2025-05-29T15:24:34.661997Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1450: Actor# [1:640:2548] txid# 281474976715657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-05-29T15:24:34.662049Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1497: Actor# [1:640:2548] HANDLE EvNavigateKeySetResult, txid# 281474976715657 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 500 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2025-05-29T15:24:34.662063Z node 1 :TX_PROXY DEBUG: schemereq.cpp:103: Actor# [1:640:2548] txid# 281474976715657 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976715657 TabletId# 72057594046644480} 2025-05-29T15:24:34.662133Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1352: Actor# [1:640:2548] txid# 281474976715657 HANDLE EvClientConnected 2025-05-29T15:24:34.662459Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-05-29T15:24:34.662694Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1374: Actor# [1:640:2548] txid# 281474976715657 Status StatusAccepted HANDLE {TEvModifySchemeTransactionResult Status# StatusAccepted txid# 281474976715657} 2025-05-29T15:24:34.662705Z node 1 :TX_PROXY DEBUG: schemereq.cpp:549: Actor# [1:640:2548] txid# 281474976715657 SEND to# [1:592:2518] Source {TEvProposeTransactionStatus txid# 281474976715657 Status# 53} 2025-05-29T15:24:34.677485Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3097: StateInit, received event# 268828672, Sender [1:656:2563], Recipient [1:664:2569]: NKikimr::TEvTablet::TEvBoot 2025-05-29T15:24:34.677845Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3097: StateInit, received event# 268828673, Sender [1:656:2563], Recipient [1:664:2569]: NKikimr::TEvTablet::TEvRestored 2025-05-29T15:24:34.677963Z node 1 :TX_DATASHARD INFO: datashard.cpp:373: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:664:2569] 2025-05-29T15:24:34.678043Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:630: TxInitSchema.Execute 2025-05-29T15:24:34.684332Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3110: StateInactive, received event# 268828684, Sender [1:656:2563], Recipient [1:664:2569]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-05-29T15:24:34.684480Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:696: TxInitSchema.Complete 2025-05-29T15:24:34.684507Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:48: TDataShard::TTxInit::Execute 2025-05-29T15:24:34.684625Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1315: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-05-29T15:24:34.684632Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1371: LoadLockChangeRecords at tablet: 72075186224037888 2025-05-29T15:24:34.684636Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1420: LoadChangeRecordCommits at tablet: 72075186224037888 2025-05-29T15:24:34.684677Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:92: TDataShard::TTxInit::Complete 2025-05-29T15:24:34.684689Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:100: TDataShard::TTxInitRestored::Execute 2025-05-29T15:24:34.684698Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:111: DataShard 72075186224037888 persisting started state actor id [1:681:2569] in generation 1 2025-05-29T15:24:34.684749Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:120: TDataShard::TTxInitRestored::Complete 2025-05-29T15:24:34.687655Z node 1 :TX_DATASHARD INFO: datashard.cpp:417: Switched to work state WaitScheme tabletId 72075186224037888 2025-05-29T15:24:34.687707Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:457: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-05-29T15:24:34.687739Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1250: Change sender created: at tablet: 72075186224037888, actorId: [1:683:2579] 2025-05-29T15:24:34.687743Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1255: Trying to activate change sender: at tablet: 72075186224037888 2025-05-29T15:24:34.687747Z node 1 :TX_DATASHARD INFO: datashard.cpp:1272: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-05-29T15:24:34.687751Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-05-29T15:24:34.687804Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3129: StateWork, received event# 2146435072, Sender [1:664:2569], Recipient [1:664:2569]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-05-29T15:24:34.687810Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3154: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-05-29T15:24:34.687881Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 72075186224037888 2025-05-29T15:24:34.687898Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-05-29T15:24:34.687910Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2025-05-29T15:24:34.687915Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-05-29T15:24:34.687921Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:385: Check unit PlanQueue at 72075186224037888 2025-05-29T15:24:34.687926Z node 1 :TX_DATASHARD TRACE: plan_queue_unit.cpp:59: TPlanQueueUnit at 72075186224037888 has no attached operations 2025-05-29T15:24:34.687929Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:403: Unit PlanQueue has no ready operations at 72075186224037888 2025-05-29T15:24:34.687933Z node 1 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 72075186224037888 TxInFly 0 2025-05-29T15:24:34.687937Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2025-05-29T15:24:34.688028Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3129: StateWork, received event# 269877761, Sender [1:674:2574], Recipient [1:664:2569]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-05-29T15:24:34.688032Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3165: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-05-29T15:24:34.688038Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3710: Server connected at leader tablet# 72075186224037888, clientId# [1:662:2567], serverId# [1:674:2574], sessionId# [0:0:0] 2025-05-29T15:24:34.688049Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3129: StateWork, received event# 269549568, Sender [1:411:2405], Recipient [1:674:2574] 2025-05-29T15:24:34.688053Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3135: StateWork, processing event TEvDataShard::TEvProposeTransaction 2025-05-29T15:24:34.688075Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037888 2025-05-29T15:24:34.688123Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1827: Trying to execute [0:281474976715657] at 72075186224037888 on unit CheckSchemeTx 2025-05-29T15:24:34.688133Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:132: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-05-29T15:24:34.688148Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:220: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2025-05-29T15:24:34.688155Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1862: Execution status for [0:281474976715657] at 72075186224037888 is ExecutedNoMoreRestarts 2025-05-29T15:24:34.688158Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1910: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit CheckSchemeTx 2025-05-29T15:24:34.688161Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1916: Add [0:281474976715657] at 72075186224037888 to execution unit StoreSchemeTx 2025-05-29T15:24:34.688164Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1827: Trying to execute [0:281474976715657] at 7 ... pools" OperationType: ESchemeOpCreateResourcePool ModifyACL { Name: "default" DiffACL: "\n!\010\000\022\035\010\001\020\201\004\032\024all-users@well-known \003\n\031\010\000\022\025\010\001\020\201\004\032\014root@builtin \003" NewOwner: "metadata@system" } Internal: true CreateResourcePool { Name: "default" Properties { Properties { key: "concurrent_query_limit" value: "-1" } Properties { key: "database_load_cpu_threshold" value: "-1" } Properties { key: "query_cancel_after_seconds" value: "0" } Properties { key: "query_cpu_limit_percent_per_node" value: "-1" } Properties { key: "query_memory_limit_percent_per_node" value: "-1" } Properties { key: "queue_size" value: "-1" } Properties { key: "resource_weight" value: "-1" } Properties { key: "total_cpu_limit_percent_per_node" value: "-1" } } } } } UserToken: "\n\017metadata@system\022\000" DatabaseName: "/Root" 2025-05-29T15:24:52.964253Z node 13 :TX_PROXY DEBUG: schemereq.cpp:569: Actor# [13:836:2683] txid# 281474976715659 Bootstrap, UserSID: metadata@system CheckAdministrator: 0 CheckDatabaseAdministrator: 0 2025-05-29T15:24:52.964259Z node 13 :TX_PROXY DEBUG: schemereq.cpp:578: Actor# [13:836:2683] txid# 281474976715659 Bootstrap, UserSID: metadata@system IsClusterAdministrator: 1 2025-05-29T15:24:52.964747Z node 13 :TX_PROXY DEBUG: schemereq.cpp:1627: Actor# [13:836:2683] txid# 281474976715659 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P6 2025-05-29T15:24:52.964762Z node 13 :TX_PROXY DEBUG: schemereq.cpp:1617: Actor# [13:836:2683] txid# 281474976715659 TEvNavigateKeySet requested from SchemeCache 2025-05-29T15:24:52.964827Z node 13 :TX_PROXY DEBUG: schemereq.cpp:1450: Actor# [13:836:2683] txid# 281474976715659 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-05-29T15:24:52.964859Z node 13 :TX_PROXY DEBUG: schemereq.cpp:1497: Actor# [13:836:2683] HANDLE EvNavigateKeySetResult, txid# 281474976715659 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 500 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2025-05-29T15:24:52.964873Z node 13 :TX_PROXY DEBUG: schemereq.cpp:103: Actor# [13:836:2683] txid# 281474976715659 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976715659 TabletId# 72057594046644480} 2025-05-29T15:24:52.970334Z node 13 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480 2025-05-29T15:24:52.970492Z node 13 :TX_PROXY DEBUG: schemereq.cpp:1352: Actor# [13:836:2683] txid# 281474976715659 HANDLE EvClientConnected 2025-05-29T15:24:52.973692Z node 13 :TX_PROXY DEBUG: schemereq.cpp:1374: Actor# [13:836:2683] txid# 281474976715659 Status StatusAccepted HANDLE {TEvModifySchemeTransactionResult Status# StatusAccepted txid# 281474976715659} 2025-05-29T15:24:52.973716Z node 13 :TX_PROXY DEBUG: schemereq.cpp:549: Actor# [13:836:2683] txid# 281474976715659 SEND to# [13:835:2682] Source {TEvProposeTransactionStatus txid# 281474976715659 Status# 53} 2025-05-29T15:24:52.974554Z node 13 :TX_DATASHARD TRACE: datashard_impl.h:3129: StateWork, received event# 269746185, Sender [13:683:2579], Recipient [13:750:2629]: NKikimr::TEvTxProxySchemeCache::TEvWatchNotifyUpdated 2025-05-29T15:24:52.974572Z node 13 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:129: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037889 2025-05-29T15:24:52.974773Z node 13 :TX_DATASHARD TRACE: datashard_impl.h:3129: StateWork, received event# 269746185, Sender [13:683:2579], Recipient [13:664:2568]: NKikimr::TEvTxProxySchemeCache::TEvWatchNotifyUpdated 2025-05-29T15:24:52.974784Z node 13 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:129: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-05-29T15:24:53.103431Z node 13 :TX_DATASHARD TRACE: datashard_impl.h:3129: StateWork, received event# 269746185, Sender [13:683:2579], Recipient [13:750:2629]: NKikimr::TEvTxProxySchemeCache::TEvWatchNotifyUpdated 2025-05-29T15:24:53.103460Z node 13 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:129: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037889 2025-05-29T15:24:53.103496Z node 13 :TX_DATASHARD TRACE: datashard_impl.h:3129: StateWork, received event# 269746185, Sender [13:683:2579], Recipient [13:664:2568]: NKikimr::TEvTxProxySchemeCache::TEvWatchNotifyUpdated 2025-05-29T15:24:53.103503Z node 13 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:129: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-05-29T15:24:53.104089Z node 13 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [13:835:2682], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2025-05-29T15:24:53.137932Z node 13 :TX_PROXY DEBUG: proxy_impl.cpp:315: actor# [13:59:2106] Handle TEvProposeTransaction 2025-05-29T15:24:53.137954Z node 13 :TX_PROXY DEBUG: proxy_impl.cpp:238: actor# [13:59:2106] TxId# 281474976715660 ProcessProposeTransaction 2025-05-29T15:24:53.137975Z node 13 :TX_PROXY DEBUG: proxy_impl.cpp:257: actor# [13:59:2106] Cookie# 0 userReqId# "" txid# 281474976715660 SEND to# [13:890:2718] 2025-05-29T15:24:53.138725Z node 13 :TX_PROXY DEBUG: schemereq.cpp:1562: Actor# [13:890:2718] txid# 281474976715660 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/Root/.metadata/workload_manager/pools" OperationType: ESchemeOpCreateResourcePool ModifyACL { Name: "default" DiffACL: "\n!\010\000\022\035\010\001\020\201\004\032\024all-users@well-known \003\n\031\010\000\022\025\010\001\020\201\004\032\014root@builtin \003" NewOwner: "metadata@system" } Internal: true CreateResourcePool { Name: "default" Properties { Properties { key: "concurrent_query_limit" value: "-1" } Properties { key: "database_load_cpu_threshold" value: "-1" } Properties { key: "query_cancel_after_seconds" value: "0" } Properties { key: "query_cpu_limit_percent_per_node" value: "-1" } Properties { key: "query_memory_limit_percent_per_node" value: "-1" } Properties { key: "queue_size" value: "-1" } Properties { key: "resource_weight" value: "-1" } Properties { key: "total_cpu_limit_percent_per_node" value: "-1" } } } } } UserToken: "\n\017metadata@system\022\000" DatabaseName: "/Root" 2025-05-29T15:24:53.138753Z node 13 :TX_PROXY DEBUG: schemereq.cpp:569: Actor# [13:890:2718] txid# 281474976715660 Bootstrap, UserSID: metadata@system CheckAdministrator: 0 CheckDatabaseAdministrator: 0 2025-05-29T15:24:53.138760Z node 13 :TX_PROXY DEBUG: schemereq.cpp:578: Actor# [13:890:2718] txid# 281474976715660 Bootstrap, UserSID: metadata@system IsClusterAdministrator: 1 2025-05-29T15:24:53.139011Z node 13 :TX_PROXY DEBUG: schemereq.cpp:1627: Actor# [13:890:2718] txid# 281474976715660 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P6 2025-05-29T15:24:53.139024Z node 13 :TX_PROXY DEBUG: schemereq.cpp:1617: Actor# [13:890:2718] txid# 281474976715660 TEvNavigateKeySet requested from SchemeCache 2025-05-29T15:24:53.139088Z node 13 :TX_PROXY DEBUG: schemereq.cpp:1450: Actor# [13:890:2718] txid# 281474976715660 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-05-29T15:24:53.139116Z node 13 :TX_PROXY DEBUG: schemereq.cpp:1497: Actor# [13:890:2718] HANDLE EvNavigateKeySetResult, txid# 281474976715660 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 500 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2025-05-29T15:24:53.139130Z node 13 :TX_PROXY DEBUG: schemereq.cpp:103: Actor# [13:890:2718] txid# 281474976715660 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976715660 TabletId# 72057594046644480} 2025-05-29T15:24:53.139366Z node 13 :TX_PROXY DEBUG: schemereq.cpp:1352: Actor# [13:890:2718] txid# 281474976715660 HANDLE EvClientConnected 2025-05-29T15:24:53.139464Z node 13 :TX_PROXY DEBUG: schemereq.cpp:1374: Actor# [13:890:2718] txid# 281474976715660 Status StatusAlreadyExists HANDLE {TEvModifySchemeTransactionResult Status# StatusAlreadyExists txid# 281474976715660 Reason# Check failed: path: '/Root/.metadata/workload_manager/pools/default', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 7], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92} 2025-05-29T15:24:53.139495Z node 13 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [13:890:2718] txid# 281474976715660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 7], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-29T15:24:53.139504Z node 13 :TX_PROXY DEBUG: schemereq.cpp:549: Actor# [13:890:2718] txid# 281474976715660 SEND to# [13:835:2682] Source {TEvProposeTransactionStatus txid# 281474976715660 Status# 48} 2025-05-29T15:24:53.180621Z node 13 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [13:900:2727], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:24:53.181136Z node 13 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=13&id=NDU5MzA3Y2QtZjdmNjM0MDItMTUyN2YyYzItYTJkZWRjN2M=, ActorId: [13:819:2672], ActorState: ExecuteState, TraceId: 01jweaavb2fdd59a1wk9y04b6s, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: assertion failed at ydb/core/tx/datashard/ut_common/datashard_ut_common.cpp:2091, void NKikimr::ExecSQL(Tests::TServer::TPtr, TActorId, const TString &, bool, Ydb::StatusIds::StatusCode): (response.GetYdbStatus() == code) failed: (INTERNAL_ERROR != SUCCESS) Response { QueryIssues { message: "Execution" issue_code: 1060 issues { message: "yql/essentials/ast/yql_expr.h:1874: index out of range" issue_code: 1 } } TxMeta { } } YdbStatus: INTERNAL_ERROR ConsumedRu: 1 , with diff: (INT|SUCC)E(RNAL_ERROR|SS) TBackTrace::Capture()+28 (0x13CAD19C) NUnitTest::NPrivate::RaiseError(char const*, TBasicString> const&, bool)+137 (0x13E60AC9) NKikimr::ExecSQL(TIntrusivePtr>, NActors::TActorId, TBasicString> const&, bool, Ydb::StatusIds_StatusCode)+1300 (0x264DE504) NKikimr::NTestSuiteDataShardSnapshots::TTestCaseLockedWriteWithPendingVolatileCommit::Execute_(NUnitTest::TTestContext&)+4222 (0x13B7150E) NKikimr::NTestSuiteDataShardSnapshots::TCurrentTest::Execute()::'lambda'()::operator()() const+71 (0x13AAB867) NUnitTest::TTestBase::Run(std::__y1::function, TBasicString> const&, char const*, bool)+126 (0x13E6297E) NKikimr::NTestSuiteDataShardSnapshots::TCurrentTest::Execute()+425 (0x13AAB0C9) NUnitTest::TTestFactory::Execute()+803 (0x13E630F3) NUnitTest::RunMain(int, char**)+3021 (0x13E74C9D) ??+0 (0x7FECC8803D90) __libc_start_main+128 (0x7FECC8803E40) _start+41 (0x12A82029) |66.2%| [TA] {RESULT} $(B)/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/datashard/ut_snapshot/unittest >> DataShardSnapshots::ShardRestartAfterDropTableAndAbort [FAIL] Test command err: 2025-05-29T15:24:34.557288Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:102:2148], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-05-29T15:24:34.557329Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-05-29T15:24:34.557346Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/001246/r3tmp/tmpM5Gtip/pdisk_1.dat 2025-05-29T15:24:34.657455Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-05-29T15:24:34.670434Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:24:34.673964Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [1:32:2079] 1748532274038146 != 1748532274038150 2025-05-29T15:24:34.715409Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:213: actor# [1:59:2106] HANDLE TEvClientConnected success connect from tablet# 72057594046447617 2025-05-29T15:24:34.715712Z node 1 :TX_PROXY DEBUG: client.cpp:89: Handle TEvAllocateResult ACCEPTED RangeBegin# 281474976715656 RangeEnd# 281474976720656 txAllocator# 72057594046447617 2025-05-29T15:24:34.715786Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:24:34.715804Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:24:34.726327Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-29T15:24:34.798891Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:315: actor# [1:59:2106] Handle TEvProposeTransaction 2025-05-29T15:24:34.798916Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:238: actor# [1:59:2106] TxId# 281474976715657 ProcessProposeTransaction 2025-05-29T15:24:34.798968Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:257: actor# [1:59:2106] Cookie# 0 userReqId# "" txid# 281474976715657 SEND to# [1:640:2548] 2025-05-29T15:24:34.818297Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1562: Actor# [1:640:2548] txid# 281474976715657 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/Root" OperationType: ESchemeOpCreateTable CreateTable { Name: "table-1" Columns { Name: "key" Type: "Uint32" FamilyName: "" NotNull: false } Columns { Name: "value" Type: "Uint32" FamilyName: "" NotNull: false } KeyColumnNames: "key" UniformPartitionsCount: 1 } } } ExecTimeoutPeriod: 18446744073709551615 2025-05-29T15:24:34.818341Z node 1 :TX_PROXY DEBUG: schemereq.cpp:569: Actor# [1:640:2548] txid# 281474976715657 Bootstrap, UserSID: CheckAdministrator: 0 CheckDatabaseAdministrator: 0 2025-05-29T15:24:34.818521Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1627: Actor# [1:640:2548] txid# 281474976715657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P6 2025-05-29T15:24:34.818533Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1617: Actor# [1:640:2548] txid# 281474976715657 TEvNavigateKeySet requested from SchemeCache 2025-05-29T15:24:34.818594Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1450: Actor# [1:640:2548] txid# 281474976715657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-05-29T15:24:34.818641Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1497: Actor# [1:640:2548] HANDLE EvNavigateKeySetResult, txid# 281474976715657 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 500 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2025-05-29T15:24:34.818656Z node 1 :TX_PROXY DEBUG: schemereq.cpp:103: Actor# [1:640:2548] txid# 281474976715657 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976715657 TabletId# 72057594046644480} 2025-05-29T15:24:34.818715Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1352: Actor# [1:640:2548] txid# 281474976715657 HANDLE EvClientConnected 2025-05-29T15:24:34.819098Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-05-29T15:24:34.819370Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1374: Actor# [1:640:2548] txid# 281474976715657 Status StatusAccepted HANDLE {TEvModifySchemeTransactionResult Status# StatusAccepted txid# 281474976715657} 2025-05-29T15:24:34.819383Z node 1 :TX_PROXY DEBUG: schemereq.cpp:549: Actor# [1:640:2548] txid# 281474976715657 SEND to# [1:592:2518] Source {TEvProposeTransactionStatus txid# 281474976715657 Status# 53} 2025-05-29T15:24:34.833016Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3097: StateInit, received event# 268828672, Sender [1:656:2563], Recipient [1:664:2569]: NKikimr::TEvTablet::TEvBoot 2025-05-29T15:24:34.833239Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3097: StateInit, received event# 268828673, Sender [1:656:2563], Recipient [1:664:2569]: NKikimr::TEvTablet::TEvRestored 2025-05-29T15:24:34.833328Z node 1 :TX_DATASHARD INFO: datashard.cpp:373: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:664:2569] 2025-05-29T15:24:34.833391Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:630: TxInitSchema.Execute 2025-05-29T15:24:34.840729Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3110: StateInactive, received event# 268828684, Sender [1:656:2563], Recipient [1:664:2569]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-05-29T15:24:34.840867Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:696: TxInitSchema.Complete 2025-05-29T15:24:34.840892Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:48: TDataShard::TTxInit::Execute 2025-05-29T15:24:34.841027Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1315: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-05-29T15:24:34.841034Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1371: LoadLockChangeRecords at tablet: 72075186224037888 2025-05-29T15:24:34.841038Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1420: LoadChangeRecordCommits at tablet: 72075186224037888 2025-05-29T15:24:34.841082Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:92: TDataShard::TTxInit::Complete 2025-05-29T15:24:34.841103Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:100: TDataShard::TTxInitRestored::Execute 2025-05-29T15:24:34.841114Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:111: DataShard 72075186224037888 persisting started state actor id [1:681:2569] in generation 1 2025-05-29T15:24:34.841170Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:120: TDataShard::TTxInitRestored::Complete 2025-05-29T15:24:34.844764Z node 1 :TX_DATASHARD INFO: datashard.cpp:417: Switched to work state WaitScheme tabletId 72075186224037888 2025-05-29T15:24:34.844817Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:457: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-05-29T15:24:34.844844Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1250: Change sender created: at tablet: 72075186224037888, actorId: [1:683:2579] 2025-05-29T15:24:34.844848Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1255: Trying to activate change sender: at tablet: 72075186224037888 2025-05-29T15:24:34.844851Z node 1 :TX_DATASHARD INFO: datashard.cpp:1272: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-05-29T15:24:34.844855Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-05-29T15:24:34.844906Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3129: StateWork, received event# 2146435072, Sender [1:664:2569], Recipient [1:664:2569]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-05-29T15:24:34.844912Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3154: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-05-29T15:24:34.844978Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 72075186224037888 2025-05-29T15:24:34.844996Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-05-29T15:24:34.845006Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2025-05-29T15:24:34.845012Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-05-29T15:24:34.845018Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:385: Check unit PlanQueue at 72075186224037888 2025-05-29T15:24:34.845022Z node 1 :TX_DATASHARD TRACE: plan_queue_unit.cpp:59: TPlanQueueUnit at 72075186224037888 has no attached operations 2025-05-29T15:24:34.845025Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:403: Unit PlanQueue has no ready operations at 72075186224037888 2025-05-29T15:24:34.845028Z node 1 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 72075186224037888 TxInFly 0 2025-05-29T15:24:34.845032Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2025-05-29T15:24:34.845107Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3129: StateWork, received event# 269877761, Sender [1:674:2574], Recipient [1:664:2569]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-05-29T15:24:34.845112Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3165: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-05-29T15:24:34.845118Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3710: Server connected at leader tablet# 72075186224037888, clientId# [1:662:2567], serverId# [1:674:2574], sessionId# [0:0:0] 2025-05-29T15:24:34.845124Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3129: StateWork, received event# 269549568, Sender [1:411:2405], Recipient [1:674:2574] 2025-05-29T15:24:34.845128Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3135: StateWork, processing event TEvDataShard::TEvProposeTransaction 2025-05-29T15:24:34.845143Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037888 2025-05-29T15:24:34.845185Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1827: Trying to execute [0:281474976715657] at 72075186224037888 on unit CheckSchemeTx 2025-05-29T15:24:34.845193Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:132: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-05-29T15:24:34.845205Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:220: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2025-05-29T15:24:34.845214Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1862: Execution status for [0:281474976715657] at 72075186224037888 is ExecutedNoMoreRestarts 2025-05-29T15:24:34.845218Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1910: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit CheckSchemeTx 2025-05-29T15:24:34.845221Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1916: Add [0:281474976715657] at 72075186224037888 to execution unit StoreSchemeTx 2025-05-29T15:24:34.845224Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1827: Trying to execute [0:281474976715657] at 7 ... 7] at 72075186224037888 on unit CompleteOperation 2025-05-29T15:24:52.136727Z node 14 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1862: Execution status for [300:281474976715657] at 72075186224037888 is DelayComplete 2025-05-29T15:24:52.136732Z node 14 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1910: Advance execution plan for [300:281474976715657] at 72075186224037888 executing on unit CompleteOperation 2025-05-29T15:24:52.136736Z node 14 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1916: Add [300:281474976715657] at 72075186224037888 to execution unit CompletedOperations 2025-05-29T15:24:52.136740Z node 14 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1827: Trying to execute [300:281474976715657] at 72075186224037888 on unit CompletedOperations 2025-05-29T15:24:52.136746Z node 14 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1862: Execution status for [300:281474976715657] at 72075186224037888 is Executed 2025-05-29T15:24:52.136750Z node 14 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1910: Advance execution plan for [300:281474976715657] at 72075186224037888 executing on unit CompletedOperations 2025-05-29T15:24:52.136755Z node 14 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1922: Execution plan for [300:281474976715657] at 72075186224037888 has finished 2025-05-29T15:24:52.136759Z node 14 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-05-29T15:24:52.136763Z node 14 :TX_DATASHARD TRACE: datashard_pipeline.cpp:327: Check candidate unit PlanQueue at 72075186224037888 2025-05-29T15:24:52.136767Z node 14 :TX_DATASHARD TRACE: plan_queue_unit.cpp:59: TPlanQueueUnit at 72075186224037888 has no attached operations 2025-05-29T15:24:52.136772Z node 14 :TX_DATASHARD TRACE: datashard_pipeline.cpp:341: Unit PlanQueue has no ready operations at 72075186224037888 2025-05-29T15:24:52.137127Z node 14 :TX_DATASHARD TRACE: datashard_impl.h:3129: StateWork, received event# 270270976, Sender [14:24:2071], Recipient [14:689:2579]: {TEvRegisterTabletResult TabletId# 72075186224037888 Entry# 0} 2025-05-29T15:24:52.137141Z node 14 :TX_DATASHARD TRACE: datashard_impl.h:3167: StateWork, processing event TEvMediatorTimecast::TEvRegisterTabletResult 2025-05-29T15:24:52.137146Z node 14 :TX_DATASHARD DEBUG: datashard.cpp:3742: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037888 time 0 2025-05-29T15:24:52.137152Z node 14 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-05-29T15:24:52.137279Z node 14 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:98: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 300} 2025-05-29T15:24:52.137376Z node 14 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2025-05-29T15:24:52.137648Z node 14 :TX_DATASHARD TRACE: datashard_impl.h:3129: StateWork, received event# 269746185, Sender [14:707:2590], Recipient [14:689:2579]: NKikimr::TEvTxProxySchemeCache::TEvWatchNotifyUpdated 2025-05-29T15:24:52.137659Z node 14 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:129: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-05-29T15:24:52.137722Z node 14 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2025-05-29T15:24:52.137729Z node 14 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1933: Complete execution for [300:281474976715657] at 72075186224037888 on unit CreateTable 2025-05-29T15:24:52.137738Z node 14 :TX_DATASHARD DEBUG: datashard.cpp:1255: Trying to activate change sender: at tablet: 72075186224037888 2025-05-29T15:24:52.137744Z node 14 :TX_DATASHARD INFO: datashard.cpp:1293: Change sender activated: at tablet: 72075186224037888 2025-05-29T15:24:52.137749Z node 14 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1933: Complete execution for [300:281474976715657] at 72075186224037888 on unit CompleteOperation 2025-05-29T15:24:52.137764Z node 14 :TX_DATASHARD DEBUG: datashard.cpp:801: Complete [300 : 281474976715657] from 72075186224037888 at tablet 72075186224037888 send result to client [14:410:2404], exec latency: 0 ms, propose latency: 0 ms 2025-05-29T15:24:52.137775Z node 14 :TX_DATASHARD INFO: datashard.cpp:1590: 72075186224037888 Sending notify to schemeshard 72057594046644480 txId 281474976715657 state Ready TxInFly 0 2025-05-29T15:24:52.137786Z node 14 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-05-29T15:24:52.138109Z node 14 :TX_DATASHARD TRACE: datashard_impl.h:3129: StateWork, received event# 269877760, Sender [14:731:2606], Recipient [14:689:2579]: NKikimr::TEvTabletPipe::TEvClientConnected { TabletId: 72057594046644480 Status: OK ServerId: [14:735:2610] Leader: 1 Dead: 0 Generation: 2 VersionInfo: } 2025-05-29T15:24:52.138120Z node 14 :TX_DATASHARD TRACE: datashard_impl.h:3162: StateWork, processing event TEvTabletPipe::TEvClientConnected 2025-05-29T15:24:52.138346Z node 14 :TX_DATASHARD TRACE: datashard_impl.h:3129: StateWork, received event# 269552132, Sender [14:410:2404], Recipient [14:689:2579]: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 281474976715657 2025-05-29T15:24:52.138356Z node 14 :TX_DATASHARD TRACE: datashard_impl.h:3133: StateWork, processing event TEvDataShard::TEvSchemaChangedResult 2025-05-29T15:24:52.138362Z node 14 :TX_DATASHARD DEBUG: datashard.cpp:2953: Handle TEvSchemaChangedResult 281474976715657 datashard 72075186224037888 state Ready 2025-05-29T15:24:52.138370Z node 14 :TX_DATASHARD DEBUG: datashard__schema_changed.cpp:22: 72075186224037888 Got TEvSchemaChangedResult from SS at 72075186224037888 2025-05-29T15:24:52.138415Z node 14 :TX_DATASHARD TRACE: datashard_impl.h:3129: StateWork, received event# 270270978, Sender [14:24:2071], Recipient [14:689:2579]: NKikimr::TEvMediatorTimecast::TEvSubscribeReadStepResult{ CoordinatorId# 72057594046316545 LastReadStep# 0 NextReadStep# 300 ReadStep# 300 } 2025-05-29T15:24:52.138421Z node 14 :TX_DATASHARD TRACE: datashard_impl.h:3168: StateWork, processing event TEvMediatorTimecast::TEvSubscribeReadStepResult 2025-05-29T15:24:52.138426Z node 14 :TX_DATASHARD DEBUG: datashard.cpp:3760: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037888 coordinator 72057594046316545 last step 0 next step 300 2025-05-29T15:24:52.138639Z node 14 :TX_DATASHARD TRACE: datashard_impl.h:3129: StateWork, received event# 269877763, Sender [14:731:2606], Recipient [14:689:2579]: NKikimr::TEvTabletPipe::TEvClientDestroyed { TabletId: 72057594046644480 ClientId: [14:731:2606] ServerId: [14:735:2610] } 2025-05-29T15:24:52.138648Z node 14 :TX_DATASHARD TRACE: datashard_impl.h:3163: StateWork, processing event TEvTabletPipe::TEvClientDestroyed 2025-05-29T15:24:52.142286Z node 14 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [14:747:2622], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:24:52.142316Z node 14 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:24:52.142362Z node 14 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [14:752:2627], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:24:52.143420Z node 14 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2025-05-29T15:24:52.144505Z node 14 :TX_DATASHARD TRACE: datashard_impl.h:3129: StateWork, received event# 269746185, Sender [14:707:2590], Recipient [14:689:2579]: NKikimr::TEvTxProxySchemeCache::TEvWatchNotifyUpdated 2025-05-29T15:24:52.144522Z node 14 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:129: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-05-29T15:24:52.177411Z node 14 :TX_DATASHARD TRACE: datashard_impl.h:3129: StateWork, received event# 269746185, Sender [14:707:2590], Recipient [14:689:2579]: NKikimr::TEvTxProxySchemeCache::TEvWatchNotifyUpdated 2025-05-29T15:24:52.177437Z node 14 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:129: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-05-29T15:24:52.177842Z node 14 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [14:754:2629], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-05-29T15:24:52.221059Z node 14 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [14:805:2661] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-29T15:24:52.228690Z node 14 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [14:815:2670], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:24:52.229160Z node 14 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=14&id=YWNhMzNkYTEtNzgzYzYxNjUtNDE2YzkxMDYtOGRkMTRhNzU=, ActorId: [14:745:2620], ActorState: ExecuteState, TraceId: 01jweaathed8r8rh61q7c10zqz, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: assertion failed at ydb/core/tx/datashard/ut_common/datashard_ut_common.cpp:2091, void NKikimr::ExecSQL(Tests::TServer::TPtr, TActorId, const TString &, bool, Ydb::StatusIds::StatusCode): (response.GetYdbStatus() == code) failed: (INTERNAL_ERROR != SUCCESS) Response { QueryIssues { message: "Execution" issue_code: 1060 issues { message: "yql/essentials/ast/yql_expr.h:1874: index out of range" issue_code: 1 } } TxMeta { } } YdbStatus: INTERNAL_ERROR ConsumedRu: 1 , with diff: (INT|SUCC)E(RNAL_ERROR|SS) TBackTrace::Capture()+28 (0x13CAD19C) NUnitTest::NPrivate::RaiseError(char const*, TBasicString> const&, bool)+137 (0x13E60AC9) NKikimr::ExecSQL(TIntrusivePtr>, NActors::TActorId, TBasicString> const&, bool, Ydb::StatusIds_StatusCode)+1300 (0x264DE504) NKikimr::NTestSuiteDataShardSnapshots::TTestCaseShardRestartAfterDropTableAndAbort::Execute_(NUnitTest::TTestContext&)+3285 (0x13AA1A15) NKikimr::NTestSuiteDataShardSnapshots::TCurrentTest::Execute()::'lambda'()::operator()() const+71 (0x13AAB867) NUnitTest::TTestBase::Run(std::__y1::function, TBasicString> const&, char const*, bool)+126 (0x13E6297E) NKikimr::NTestSuiteDataShardSnapshots::TCurrentTest::Execute()+425 (0x13AAB0C9) NUnitTest::TTestFactory::Execute()+803 (0x13E630F3) NUnitTest::RunMain(int, char**)+3021 (0x13E74C9D) ??+0 (0x7FEED5A9FD90) __libc_start_main+128 (0x7FEED5A9FE40) _start+41 (0x12A82029) >> TConsoleTests::TestRemoveTenantExtSubdomain [GOOD] >> TConsoleTests::TestRemoveSharedTenantWoServerlessTenants >> DataShardSnapshots::LockedWriteWithAsyncIndexAndVolatileCommit-UseSink [FAIL] >> TestKinesisHttpProxy::ErroneousRequestGetRecords >> TConsoleTests::TestAlterServerlessTenant [GOOD] >> TConsoleTests::TestAttributes |66.2%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/library/ncloud/impl/ut/ydb-library-ncloud-impl-ut |66.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/library/ncloud/impl/ut/ydb-library-ncloud-impl-ut |66.2%| [LD] {RESULT} $(B)/ydb/library/ncloud/impl/ut/ydb-library-ncloud-impl-ut |66.2%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/fq/libs/actors/ut/ydb-core-fq-libs-actors-ut |66.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/fq/libs/actors/ut/ydb-core-fq-libs-actors-ut |66.2%| [LD] {RESULT} $(B)/ydb/core/fq/libs/actors/ut/ydb-core-fq-libs-actors-ut ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/datashard/ut_snapshot/unittest >> DataShardSnapshots::UncommittedChangesRenameTable-UseSink [FAIL] Test command err: 2025-05-29T15:24:34.334195Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:102:2148], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-05-29T15:24:34.334227Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-05-29T15:24:34.334238Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/00129e/r3tmp/tmp8cEiza/pdisk_1.dat 2025-05-29T15:24:34.470529Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-05-29T15:24:34.484550Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:24:34.489397Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [1:32:2079] 1748532273878452 != 1748532273878456 2025-05-29T15:24:34.530862Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:213: actor# [1:59:2106] HANDLE TEvClientConnected success connect from tablet# 72057594046447617 2025-05-29T15:24:34.531144Z node 1 :TX_PROXY DEBUG: client.cpp:89: Handle TEvAllocateResult ACCEPTED RangeBegin# 281474976715656 RangeEnd# 281474976720656 txAllocator# 72057594046447617 2025-05-29T15:24:34.531202Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:24:34.531217Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:24:34.541713Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-29T15:24:34.614189Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:315: actor# [1:59:2106] Handle TEvProposeTransaction 2025-05-29T15:24:34.614218Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:238: actor# [1:59:2106] TxId# 281474976715657 ProcessProposeTransaction 2025-05-29T15:24:34.614255Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:257: actor# [1:59:2106] Cookie# 0 userReqId# "" txid# 281474976715657 SEND to# [1:640:2548] 2025-05-29T15:24:34.629865Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1562: Actor# [1:640:2548] txid# 281474976715657 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/Root" OperationType: ESchemeOpCreateTable CreateTable { Name: "table-1" Columns { Name: "key" Type: "Uint32" FamilyName: "" NotNull: false } Columns { Name: "value" Type: "Uint32" FamilyName: "" NotNull: false } KeyColumnNames: "key" UniformPartitionsCount: 1 } } } ExecTimeoutPeriod: 18446744073709551615 2025-05-29T15:24:34.629900Z node 1 :TX_PROXY DEBUG: schemereq.cpp:569: Actor# [1:640:2548] txid# 281474976715657 Bootstrap, UserSID: CheckAdministrator: 0 CheckDatabaseAdministrator: 0 2025-05-29T15:24:34.630063Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1627: Actor# [1:640:2548] txid# 281474976715657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P6 2025-05-29T15:24:34.630074Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1617: Actor# [1:640:2548] txid# 281474976715657 TEvNavigateKeySet requested from SchemeCache 2025-05-29T15:24:34.630118Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1450: Actor# [1:640:2548] txid# 281474976715657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-05-29T15:24:34.630156Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1497: Actor# [1:640:2548] HANDLE EvNavigateKeySetResult, txid# 281474976715657 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 500 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2025-05-29T15:24:34.630169Z node 1 :TX_PROXY DEBUG: schemereq.cpp:103: Actor# [1:640:2548] txid# 281474976715657 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976715657 TabletId# 72057594046644480} 2025-05-29T15:24:34.630224Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1352: Actor# [1:640:2548] txid# 281474976715657 HANDLE EvClientConnected 2025-05-29T15:24:34.630494Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-05-29T15:24:34.630678Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1374: Actor# [1:640:2548] txid# 281474976715657 Status StatusAccepted HANDLE {TEvModifySchemeTransactionResult Status# StatusAccepted txid# 281474976715657} 2025-05-29T15:24:34.630687Z node 1 :TX_PROXY DEBUG: schemereq.cpp:549: Actor# [1:640:2548] txid# 281474976715657 SEND to# [1:592:2518] Source {TEvProposeTransactionStatus txid# 281474976715657 Status# 53} 2025-05-29T15:24:34.644691Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3097: StateInit, received event# 268828672, Sender [1:656:2563], Recipient [1:664:2569]: NKikimr::TEvTablet::TEvBoot 2025-05-29T15:24:34.644953Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3097: StateInit, received event# 268828673, Sender [1:656:2563], Recipient [1:664:2569]: NKikimr::TEvTablet::TEvRestored 2025-05-29T15:24:34.645045Z node 1 :TX_DATASHARD INFO: datashard.cpp:373: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:664:2569] 2025-05-29T15:24:34.645113Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:630: TxInitSchema.Execute 2025-05-29T15:24:34.652212Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3110: StateInactive, received event# 268828684, Sender [1:656:2563], Recipient [1:664:2569]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-05-29T15:24:34.652347Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:696: TxInitSchema.Complete 2025-05-29T15:24:34.652367Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:48: TDataShard::TTxInit::Execute 2025-05-29T15:24:34.652479Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1315: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-05-29T15:24:34.652485Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1371: LoadLockChangeRecords at tablet: 72075186224037888 2025-05-29T15:24:34.652490Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1420: LoadChangeRecordCommits at tablet: 72075186224037888 2025-05-29T15:24:34.652529Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:92: TDataShard::TTxInit::Complete 2025-05-29T15:24:34.652539Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:100: TDataShard::TTxInitRestored::Execute 2025-05-29T15:24:34.652548Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:111: DataShard 72075186224037888 persisting started state actor id [1:681:2569] in generation 1 2025-05-29T15:24:34.662804Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:120: TDataShard::TTxInitRestored::Complete 2025-05-29T15:24:34.667139Z node 1 :TX_DATASHARD INFO: datashard.cpp:417: Switched to work state WaitScheme tabletId 72075186224037888 2025-05-29T15:24:34.667198Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:457: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-05-29T15:24:34.667229Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1250: Change sender created: at tablet: 72075186224037888, actorId: [1:683:2579] 2025-05-29T15:24:34.667235Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1255: Trying to activate change sender: at tablet: 72075186224037888 2025-05-29T15:24:34.667240Z node 1 :TX_DATASHARD INFO: datashard.cpp:1272: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-05-29T15:24:34.667246Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-05-29T15:24:34.667295Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3129: StateWork, received event# 2146435072, Sender [1:664:2569], Recipient [1:664:2569]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-05-29T15:24:34.667303Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3154: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-05-29T15:24:34.667370Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 72075186224037888 2025-05-29T15:24:34.667391Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-05-29T15:24:34.667406Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2025-05-29T15:24:34.667415Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-05-29T15:24:34.667422Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:385: Check unit PlanQueue at 72075186224037888 2025-05-29T15:24:34.667428Z node 1 :TX_DATASHARD TRACE: plan_queue_unit.cpp:59: TPlanQueueUnit at 72075186224037888 has no attached operations 2025-05-29T15:24:34.667433Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:403: Unit PlanQueue has no ready operations at 72075186224037888 2025-05-29T15:24:34.667438Z node 1 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 72075186224037888 TxInFly 0 2025-05-29T15:24:34.667443Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2025-05-29T15:24:34.667548Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3129: StateWork, received event# 269877761, Sender [1:674:2574], Recipient [1:664:2569]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-05-29T15:24:34.667556Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3165: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-05-29T15:24:34.667563Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3710: Server connected at leader tablet# 72075186224037888, clientId# [1:662:2567], serverId# [1:674:2574], sessionId# [0:0:0] 2025-05-29T15:24:34.667574Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3129: StateWork, received event# 269549568, Sender [1:411:2405], Recipient [1:674:2574] 2025-05-29T15:24:34.667579Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3135: StateWork, processing event TEvDataShard::TEvProposeTransaction 2025-05-29T15:24:34.667600Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037888 2025-05-29T15:24:34.667650Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1827: Trying to execute [0:281474976715657] at 72075186224037888 on unit CheckSchemeTx 2025-05-29T15:24:34.667661Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:132: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-05-29T15:24:34.667678Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:220: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2025-05-29T15:24:34.667687Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1862: Execution status for [0:281474976715657] at 72075186224037888 is ExecutedNoMoreRestarts 2025-05-29T15:24:34.667692Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1910: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit CheckSchemeTx 2025-05-29T15:24:34.667698Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1916: Add [0:281474976715657] at 72075186224037888 to execution unit StoreSchemeTx 2025-05-29T15:24:34.667702Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1827: Trying to execute [0:281474976715657] at 7 ... bdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-05-29T15:24:55.947023Z node 13 :TX_DATASHARD TRACE: datashard_impl.h:3129: StateWork, received event# 269877760, Sender [13:733:2607], Recipient [13:689:2579]: NKikimr::TEvTabletPipe::TEvClientConnected { TabletId: 72057594046644480 Status: OK ServerId: [13:737:2611] Leader: 1 Dead: 0 Generation: 2 VersionInfo: } 2025-05-29T15:24:55.947030Z node 13 :TX_DATASHARD TRACE: datashard_impl.h:3162: StateWork, processing event TEvTabletPipe::TEvClientConnected 2025-05-29T15:24:55.947175Z node 13 :TX_DATASHARD TRACE: datashard_impl.h:3129: StateWork, received event# 270270978, Sender [13:24:2071], Recipient [13:689:2579]: NKikimr::TEvMediatorTimecast::TEvSubscribeReadStepResult{ CoordinatorId# 72057594046316545 LastReadStep# 0 NextReadStep# 300 ReadStep# 300 } 2025-05-29T15:24:55.947187Z node 13 :TX_DATASHARD TRACE: datashard_impl.h:3168: StateWork, processing event TEvMediatorTimecast::TEvSubscribeReadStepResult 2025-05-29T15:24:55.947192Z node 13 :TX_DATASHARD DEBUG: datashard.cpp:3760: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037888 coordinator 72057594046316545 last step 0 next step 300 2025-05-29T15:24:55.947223Z node 13 :TX_DATASHARD TRACE: datashard_impl.h:3129: StateWork, received event# 269552132, Sender [13:410:2404], Recipient [13:689:2579]: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 281474976715657 2025-05-29T15:24:55.947228Z node 13 :TX_DATASHARD TRACE: datashard_impl.h:3133: StateWork, processing event TEvDataShard::TEvSchemaChangedResult 2025-05-29T15:24:55.947232Z node 13 :TX_DATASHARD DEBUG: datashard.cpp:2953: Handle TEvSchemaChangedResult 281474976715657 datashard 72075186224037888 state Ready 2025-05-29T15:24:55.947239Z node 13 :TX_DATASHARD DEBUG: datashard__schema_changed.cpp:22: 72075186224037888 Got TEvSchemaChangedResult from SS at 72075186224037888 2025-05-29T15:24:55.947429Z node 13 :TX_DATASHARD TRACE: datashard_impl.h:3129: StateWork, received event# 269877763, Sender [13:733:2607], Recipient [13:689:2579]: NKikimr::TEvTabletPipe::TEvClientDestroyed { TabletId: 72057594046644480 ClientId: [13:733:2607] ServerId: [13:737:2611] } 2025-05-29T15:24:55.947437Z node 13 :TX_DATASHARD TRACE: datashard_impl.h:3163: StateWork, processing event TEvTabletPipe::TEvClientDestroyed 2025-05-29T15:24:55.947753Z node 13 :KQP_SESSION INFO: kqp_session_actor.cpp:2478: SessionId: ydb://session/3?node_id=13&id=MjYxMTQxNTgtMTk3N2RhNDEtMWUxODdmMTQtNmUyMzkzMTI=, ActorId: [13:617:2539], ActorState: ExecuteState, TraceId: 01jweaay3t4r3ajtrwasav2xr6, Cleanup start, isFinal: 0 CleanupCtx: 1 TransactionsToBeAborted.size(): 0 WorkerId: [13:620:2539] WorkloadServiceCleanup: 0 2025-05-29T15:24:55.948194Z node 13 :KQP_SESSION DEBUG: kqp_session_actor.cpp:2539: SessionId: ydb://session/3?node_id=13&id=MjYxMTQxNTgtMTk3N2RhNDEtMWUxODdmMTQtNmUyMzkzMTI=, ActorId: [13:617:2539], ActorState: CleanupState, TraceId: 01jweaay3t4r3ajtrwasav2xr6, EndCleanup, isFinal: 0 2025-05-29T15:24:55.948212Z node 13 :KQP_SESSION DEBUG: kqp_session_actor.cpp:2275: SessionId: ydb://session/3?node_id=13&id=MjYxMTQxNTgtMTk3N2RhNDEtMWUxODdmMTQtNmUyMzkzMTI=, ActorId: [13:617:2539], ActorState: CleanupState, TraceId: 01jweaay3t4r3ajtrwasav2xr6, Sent query response back to proxy, proxyRequestId: 3, proxyId: [13:57:2104] 2025-05-29T15:24:55.950816Z node 13 :KQP_SESSION DEBUG: kqp_session_actor.cpp:223: SessionId: ydb://session/3?node_id=13&id=ZWY2ZWFkMzYtY2Q4ZWI5YWEtZmI1ODFjNjAtYzExNjE0YzE=, ActorId: [0:0:0], ActorState: unknown state, Create session actor with id ZWY2ZWFkMzYtY2Q4ZWI5YWEtZmI1ODFjNjAtYzExNjE0YzE= 2025-05-29T15:24:55.950925Z node 13 :KQP_SESSION DEBUG: kqp_session_actor.cpp:227: SessionId: ydb://session/3?node_id=13&id=ZWY2ZWFkMzYtY2Q4ZWI5YWEtZmI1ODFjNjAtYzExNjE0YzE=, ActorId: [13:743:2617], ActorState: unknown state, session actor bootstrapped 2025-05-29T15:24:55.950985Z node 13 :KQP_SESSION DEBUG: kqp_session_actor.cpp:443: SessionId: ydb://session/3?node_id=13&id=ZWY2ZWFkMzYtY2Q4ZWI5YWEtZmI1ODFjNjAtYzExNjE0YzE=, ActorId: [13:743:2617], ActorState: ReadyState, TraceId: 01jweaay8e6wyras6n5ctvfbkf, received request, proxyRequestId: 4 prepared: 0 tx_control: 1 action: QUERY_ACTION_EXECUTE type: QUERY_TYPE_SQL_DML text: UPSERT INTO `/Root/table1` (key, value) VALUES (2, 22); rpcActor: [0:0:0] database: databaseId: /Root pool id: default 2025-05-29T15:24:55.951104Z node 13 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [13:745:2619], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:24:55.951121Z node 13 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:24:55.951169Z node 13 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [13:750:2624], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:24:55.952233Z node 13 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2025-05-29T15:24:55.953222Z node 13 :TX_DATASHARD TRACE: datashard_impl.h:3129: StateWork, received event# 269746185, Sender [13:709:2591], Recipient [13:689:2579]: NKikimr::TEvTxProxySchemeCache::TEvWatchNotifyUpdated 2025-05-29T15:24:55.953239Z node 13 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:129: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-05-29T15:24:55.988037Z node 13 :TX_DATASHARD TRACE: datashard_impl.h:3129: StateWork, received event# 269746185, Sender [13:709:2591], Recipient [13:689:2579]: NKikimr::TEvTxProxySchemeCache::TEvWatchNotifyUpdated 2025-05-29T15:24:55.988065Z node 13 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:129: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-05-29T15:24:55.988571Z node 13 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [13:752:2626], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-05-29T15:24:56.030578Z node 13 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [13:803:2658] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-29T15:24:56.030789Z node 13 :KQP_SESSION DEBUG: kqp_session_actor.cpp:527: SessionId: ydb://session/3?node_id=13&id=ZWY2ZWFkMzYtY2Q4ZWI5YWEtZmI1ODFjNjAtYzExNjE0YzE=, ActorId: [13:743:2617], ActorState: ExecuteState, TraceId: 01jweaay8e6wyras6n5ctvfbkf, continue request, pool id: default 2025-05-29T15:24:56.030808Z node 13 :KQP_SESSION DEBUG: kqp_session_actor.cpp:575: SessionId: ydb://session/3?node_id=13&id=ZWY2ZWFkMzYtY2Q4ZWI5YWEtZmI1ODFjNjAtYzExNjE0YzE=, ActorId: [13:743:2617], ActorState: ExecuteState, TraceId: 01jweaay8e6wyras6n5ctvfbkf, Sending CompileQuery request 2025-05-29T15:24:56.039686Z node 13 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [13:813:2667], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:24:56.040097Z node 13 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=13&id=ZWY2ZWFkMzYtY2Q4ZWI5YWEtZmI1ODFjNjAtYzExNjE0YzE=, ActorId: [13:743:2617], ActorState: ExecuteState, TraceId: 01jweaay8e6wyras6n5ctvfbkf, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: 2025-05-29T15:24:56.040119Z node 13 :KQP_SESSION INFO: kqp_session_actor.cpp:2478: SessionId: ydb://session/3?node_id=13&id=ZWY2ZWFkMzYtY2Q4ZWI5YWEtZmI1ODFjNjAtYzExNjE0YzE=, ActorId: [13:743:2617], ActorState: ExecuteState, TraceId: 01jweaay8e6wyras6n5ctvfbkf, Cleanup start, isFinal: 1 CleanupCtx: 1 TransactionsToBeAborted.size(): 0 WorkerId: [0:0:0] WorkloadServiceCleanup: 1 2025-05-29T15:24:56.040148Z node 13 :KQP_SESSION DEBUG: kqp_session_actor.cpp:2539: SessionId: ydb://session/3?node_id=13&id=ZWY2ZWFkMzYtY2Q4ZWI5YWEtZmI1ODFjNjAtYzExNjE0YzE=, ActorId: [13:743:2617], ActorState: CleanupState, TraceId: 01jweaay8e6wyras6n5ctvfbkf, EndCleanup, isFinal: 1 2025-05-29T15:24:56.040183Z node 13 :KQP_SESSION DEBUG: kqp_session_actor.cpp:2275: SessionId: ydb://session/3?node_id=13&id=ZWY2ZWFkMzYtY2Q4ZWI5YWEtZmI1ODFjNjAtYzExNjE0YzE=, ActorId: [13:743:2617], ActorState: CleanupState, TraceId: 01jweaay8e6wyras6n5ctvfbkf, Sent query response back to proxy, proxyRequestId: 4, proxyId: [13:57:2104] 2025-05-29T15:24:56.040189Z node 13 :KQP_SESSION NOTICE: kqp_session_actor.cpp:2278: SessionId: ydb://session/3?node_id=13&id=ZWY2ZWFkMzYtY2Q4ZWI5YWEtZmI1ODFjNjAtYzExNjE0YzE=, ActorId: [13:743:2617], ActorState: CleanupState, TraceId: 01jweaay8e6wyras6n5ctvfbkf, SessionActor destroyed due to INTERNAL_ERROR 2025-05-29T15:24:56.040196Z node 13 :KQP_SESSION DEBUG: kqp_session_actor.cpp:2551: SessionId: ydb://session/3?node_id=13&id=ZWY2ZWFkMzYtY2Q4ZWI5YWEtZmI1ODFjNjAtYzExNjE0YzE=, ActorId: [13:743:2617], ActorState: unknown state, TraceId: 01jweaay8e6wyras6n5ctvfbkf, Cleanup temp tables: 0 2025-05-29T15:24:56.040243Z node 13 :KQP_SESSION DEBUG: kqp_session_actor.cpp:2642: SessionId: ydb://session/3?node_id=13&id=ZWY2ZWFkMzYtY2Q4ZWI5YWEtZmI1ODFjNjAtYzExNjE0YzE=, ActorId: [13:743:2617], ActorState: unknown state, TraceId: 01jweaay8e6wyras6n5ctvfbkf, Session actor destroyed assertion failed at ydb/core/tx/datashard/ut_common/datashard_ut_common.cpp:2091, void NKikimr::ExecSQL(Tests::TServer::TPtr, TActorId, const TString &, bool, Ydb::StatusIds::StatusCode): (response.GetYdbStatus() == code) failed: (INTERNAL_ERROR != SUCCESS) Response { QueryIssues { message: "Execution" issue_code: 1060 issues { message: "yql/essentials/ast/yql_expr.h:1874: index out of range" issue_code: 1 } } TxMeta { } } YdbStatus: INTERNAL_ERROR ConsumedRu: 1 , with diff: (INT|SUCC)E(RNAL_ERROR|SS) TBackTrace::Capture()+28 (0x13CAD19C) NUnitTest::NPrivate::RaiseError(char const*, TBasicString> const&, bool)+137 (0x13E60AC9) NKikimr::ExecSQL(TIntrusivePtr>, NActors::TActorId, TBasicString> const&, bool, Ydb::StatusIds_StatusCode)+1300 (0x264DE504) NKikimr::NTestSuiteDataShardSnapshots::TTestCaseUncommittedChangesRenameTable::Execute_(NUnitTest::TTestContext&)+2221 (0x13B81BBD) NKikimr::NTestSuiteDataShardSnapshots::TCurrentTest::Execute()::'lambda'()::operator()() const+71 (0x13AAB867) NUnitTest::TTestBase::Run(std::__y1::function, TBasicString> const&, char const*, bool)+126 (0x13E6297E) NKikimr::NTestSuiteDataShardSnapshots::TCurrentTest::Execute()+425 (0x13AAB0C9) NUnitTest::TTestFactory::Execute()+803 (0x13E630F3) NUnitTest::RunMain(int, char**)+3021 (0x13E74C9D) ??+0 (0x7FA363C1FD90) __libc_start_main+128 (0x7FA363C1FE40) _start+41 (0x12A82029) |66.2%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/tests/fq/control_plane_storage/ydb-tests-fq-control_plane_storage |66.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/fq/control_plane_storage/ydb-tests-fq-control_plane_storage |66.2%| [LD] {RESULT} $(B)/ydb/tests/fq/control_plane_storage/ydb-tests-fq-control_plane_storage >> TBsProxyFaultToleranceTest::CheckGetHardenedErasureMirror3dcCount6Idx3 >> TestKinesisHttpProxy::MissingAction |66.2%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/balance_coverage/ut/ydb-core-tx-balance_coverage-ut |66.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/balance_coverage/ut/ydb-core-tx-balance_coverage-ut |66.2%| [LD] {RESULT} $(B)/ydb/core/tx/balance_coverage/ut/ydb-core-tx-balance_coverage-ut >> TConsoleTxProcessorTests::TestTxProcessorSubProcessor [GOOD] >> TConsoleTxProcessorTests::TestTxProcessorTemporary >> TBsProxyFaultToleranceTest::CheckGetHardenedErasureBlock42Count6Idx2 >> TBsProxyFaultToleranceTest::CheckTDiscoverFaultToleranceTestErasure4Plus2Block |66.2%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/blobstorage/dsproxy/ut_fat/ydb-core-blobstorage-dsproxy-ut_fat |66.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/blobstorage/dsproxy/ut_fat/ydb-core-blobstorage-dsproxy-ut_fat |66.2%| [LD] {RESULT} $(B)/ydb/core/blobstorage/dsproxy/ut_fat/ydb-core-blobstorage-dsproxy-ut_fat >> TBsProxyFaultToleranceTest::CheckTDiscoverFaultToleranceTestErasure4Plus2Block [GOOD] >> TestYmqHttpProxy::TestTagQueueMultipleQueriesInflight >> TestYmqHttpProxy::TestSendMessage |66.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/dsproxy/ut_ftol/unittest >> TBsProxyFaultToleranceTest::CheckTDiscoverFaultToleranceTestErasure4Plus2Block [GOOD] >> TestKinesisHttpProxy::BadRequestUnknownMethod >> TConsoleTests::TestAuthorization [GOOD] >> TConsoleTests::TestAuthorizationExtSubdomain >> TestYmqHttpProxy::TestDeleteMessageBatch ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/datashard/ut_snapshot/unittest >> DataShardSnapshots::BrokenLockChangesDontLeak [FAIL] Test command err: 2025-05-29T15:24:34.496770Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:102:2148], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-05-29T15:24:34.496808Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-05-29T15:24:34.496823Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/00124c/r3tmp/tmpyReviL/pdisk_1.dat 2025-05-29T15:24:34.616714Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-05-29T15:24:34.629733Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:24:34.632792Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [1:32:2079] 1748532273985295 != 1748532273985299 2025-05-29T15:24:34.674374Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:213: actor# [1:59:2106] HANDLE TEvClientConnected success connect from tablet# 72057594046447617 2025-05-29T15:24:34.674666Z node 1 :TX_PROXY DEBUG: client.cpp:89: Handle TEvAllocateResult ACCEPTED RangeBegin# 281474976715656 RangeEnd# 281474976720656 txAllocator# 72057594046447617 2025-05-29T15:24:34.674774Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:24:34.674799Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:24:34.685422Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-29T15:24:34.758364Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:315: actor# [1:59:2106] Handle TEvProposeTransaction 2025-05-29T15:24:34.758392Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:238: actor# [1:59:2106] TxId# 281474976715657 ProcessProposeTransaction 2025-05-29T15:24:34.758431Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:257: actor# [1:59:2106] Cookie# 0 userReqId# "" txid# 281474976715657 SEND to# [1:640:2548] 2025-05-29T15:24:34.779469Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1562: Actor# [1:640:2548] txid# 281474976715657 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/Root" OperationType: ESchemeOpCreateTable CreateTable { Name: "table-1" Columns { Name: "key" Type: "Uint32" FamilyName: "" NotNull: false } Columns { Name: "value" Type: "Uint32" FamilyName: "" NotNull: false } KeyColumnNames: "key" UniformPartitionsCount: 1 } } } ExecTimeoutPeriod: 18446744073709551615 2025-05-29T15:24:34.779520Z node 1 :TX_PROXY DEBUG: schemereq.cpp:569: Actor# [1:640:2548] txid# 281474976715657 Bootstrap, UserSID: CheckAdministrator: 0 CheckDatabaseAdministrator: 0 2025-05-29T15:24:34.779741Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1627: Actor# [1:640:2548] txid# 281474976715657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P6 2025-05-29T15:24:34.779759Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1617: Actor# [1:640:2548] txid# 281474976715657 TEvNavigateKeySet requested from SchemeCache 2025-05-29T15:24:34.779825Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1450: Actor# [1:640:2548] txid# 281474976715657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-05-29T15:24:34.779872Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1497: Actor# [1:640:2548] HANDLE EvNavigateKeySetResult, txid# 281474976715657 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 500 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2025-05-29T15:24:34.779889Z node 1 :TX_PROXY DEBUG: schemereq.cpp:103: Actor# [1:640:2548] txid# 281474976715657 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976715657 TabletId# 72057594046644480} 2025-05-29T15:24:34.779963Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1352: Actor# [1:640:2548] txid# 281474976715657 HANDLE EvClientConnected 2025-05-29T15:24:34.780366Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-05-29T15:24:34.780629Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1374: Actor# [1:640:2548] txid# 281474976715657 Status StatusAccepted HANDLE {TEvModifySchemeTransactionResult Status# StatusAccepted txid# 281474976715657} 2025-05-29T15:24:34.780642Z node 1 :TX_PROXY DEBUG: schemereq.cpp:549: Actor# [1:640:2548] txid# 281474976715657 SEND to# [1:592:2518] Source {TEvProposeTransactionStatus txid# 281474976715657 Status# 53} 2025-05-29T15:24:34.795382Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3097: StateInit, received event# 268828672, Sender [1:656:2563], Recipient [1:664:2569]: NKikimr::TEvTablet::TEvBoot 2025-05-29T15:24:34.795672Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3097: StateInit, received event# 268828673, Sender [1:656:2563], Recipient [1:664:2569]: NKikimr::TEvTablet::TEvRestored 2025-05-29T15:24:34.795771Z node 1 :TX_DATASHARD INFO: datashard.cpp:373: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:664:2569] 2025-05-29T15:24:34.795832Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:630: TxInitSchema.Execute 2025-05-29T15:24:34.806603Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3110: StateInactive, received event# 268828684, Sender [1:656:2563], Recipient [1:664:2569]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-05-29T15:24:34.806805Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:696: TxInitSchema.Complete 2025-05-29T15:24:34.806840Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:48: TDataShard::TTxInit::Execute 2025-05-29T15:24:34.807021Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1315: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-05-29T15:24:34.807031Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1371: LoadLockChangeRecords at tablet: 72075186224037888 2025-05-29T15:24:34.807038Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1420: LoadChangeRecordCommits at tablet: 72075186224037888 2025-05-29T15:24:34.807103Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:92: TDataShard::TTxInit::Complete 2025-05-29T15:24:34.807122Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:100: TDataShard::TTxInitRestored::Execute 2025-05-29T15:24:34.807135Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:111: DataShard 72075186224037888 persisting started state actor id [1:681:2569] in generation 1 2025-05-29T15:24:34.807208Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:120: TDataShard::TTxInitRestored::Complete 2025-05-29T15:24:34.812415Z node 1 :TX_DATASHARD INFO: datashard.cpp:417: Switched to work state WaitScheme tabletId 72075186224037888 2025-05-29T15:24:34.812489Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:457: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-05-29T15:24:34.812526Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1250: Change sender created: at tablet: 72075186224037888, actorId: [1:683:2579] 2025-05-29T15:24:34.812533Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1255: Trying to activate change sender: at tablet: 72075186224037888 2025-05-29T15:24:34.812538Z node 1 :TX_DATASHARD INFO: datashard.cpp:1272: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-05-29T15:24:34.812545Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-05-29T15:24:34.812606Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3129: StateWork, received event# 2146435072, Sender [1:664:2569], Recipient [1:664:2569]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-05-29T15:24:34.812615Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3154: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-05-29T15:24:34.812710Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 72075186224037888 2025-05-29T15:24:34.812732Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-05-29T15:24:34.812746Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2025-05-29T15:24:34.812755Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-05-29T15:24:34.812763Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:385: Check unit PlanQueue at 72075186224037888 2025-05-29T15:24:34.812769Z node 1 :TX_DATASHARD TRACE: plan_queue_unit.cpp:59: TPlanQueueUnit at 72075186224037888 has no attached operations 2025-05-29T15:24:34.812774Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:403: Unit PlanQueue has no ready operations at 72075186224037888 2025-05-29T15:24:34.812781Z node 1 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 72075186224037888 TxInFly 0 2025-05-29T15:24:34.812786Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2025-05-29T15:24:34.812903Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3129: StateWork, received event# 269877761, Sender [1:674:2574], Recipient [1:664:2569]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-05-29T15:24:34.812911Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3165: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-05-29T15:24:34.812919Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3710: Server connected at leader tablet# 72075186224037888, clientId# [1:662:2567], serverId# [1:674:2574], sessionId# [0:0:0] 2025-05-29T15:24:34.812929Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3129: StateWork, received event# 269549568, Sender [1:411:2405], Recipient [1:674:2574] 2025-05-29T15:24:34.812934Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3135: StateWork, processing event TEvDataShard::TEvProposeTransaction 2025-05-29T15:24:34.812956Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037888 2025-05-29T15:24:34.813009Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1827: Trying to execute [0:281474976715657] at 72075186224037888 on unit CheckSchemeTx 2025-05-29T15:24:34.813020Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:132: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-05-29T15:24:34.813037Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:220: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2025-05-29T15:24:34.813046Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1862: Execution status for [0:281474976715657] at 72075186224037888 is ExecutedNoMoreRestarts 2025-05-29T15:24:34.813052Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1910: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit CheckSchemeTx 2025-05-29T15:24:34.813058Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1916: Add [0:281474976715657] at 72075186224037888 to execution unit StoreSchemeTx 2025-05-29T15:24:34.813063Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1827: Trying to execute [0:281474976715657] at 7 ... 4976715657] at 72075186224037888 on unit CompleteOperation 2025-05-29T15:24:57.470191Z node 16 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1862: Execution status for [300:281474976715657] at 72075186224037888 is DelayComplete 2025-05-29T15:24:57.470196Z node 16 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1910: Advance execution plan for [300:281474976715657] at 72075186224037888 executing on unit CompleteOperation 2025-05-29T15:24:57.470201Z node 16 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1916: Add [300:281474976715657] at 72075186224037888 to execution unit CompletedOperations 2025-05-29T15:24:57.470205Z node 16 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1827: Trying to execute [300:281474976715657] at 72075186224037888 on unit CompletedOperations 2025-05-29T15:24:57.470211Z node 16 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1862: Execution status for [300:281474976715657] at 72075186224037888 is Executed 2025-05-29T15:24:57.470216Z node 16 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1910: Advance execution plan for [300:281474976715657] at 72075186224037888 executing on unit CompletedOperations 2025-05-29T15:24:57.470221Z node 16 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1922: Execution plan for [300:281474976715657] at 72075186224037888 has finished 2025-05-29T15:24:57.470226Z node 16 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-05-29T15:24:57.470231Z node 16 :TX_DATASHARD TRACE: datashard_pipeline.cpp:327: Check candidate unit PlanQueue at 72075186224037888 2025-05-29T15:24:57.470235Z node 16 :TX_DATASHARD TRACE: plan_queue_unit.cpp:59: TPlanQueueUnit at 72075186224037888 has no attached operations 2025-05-29T15:24:57.470238Z node 16 :TX_DATASHARD TRACE: datashard_pipeline.cpp:341: Unit PlanQueue has no ready operations at 72075186224037888 2025-05-29T15:24:57.470728Z node 16 :TX_DATASHARD TRACE: datashard_impl.h:3129: StateWork, received event# 270270976, Sender [16:24:2071], Recipient [16:689:2579]: {TEvRegisterTabletResult TabletId# 72075186224037888 Entry# 0} 2025-05-29T15:24:57.470757Z node 16 :TX_DATASHARD TRACE: datashard_impl.h:3167: StateWork, processing event TEvMediatorTimecast::TEvRegisterTabletResult 2025-05-29T15:24:57.470762Z node 16 :TX_DATASHARD DEBUG: datashard.cpp:3742: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037888 time 0 2025-05-29T15:24:57.470769Z node 16 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-05-29T15:24:57.470863Z node 16 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:98: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 300} 2025-05-29T15:24:57.470989Z node 16 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2025-05-29T15:24:57.472623Z node 16 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2025-05-29T15:24:57.472643Z node 16 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1933: Complete execution for [300:281474976715657] at 72075186224037888 on unit CreateTable 2025-05-29T15:24:57.472651Z node 16 :TX_DATASHARD DEBUG: datashard.cpp:1255: Trying to activate change sender: at tablet: 72075186224037888 2025-05-29T15:24:57.472658Z node 16 :TX_DATASHARD INFO: datashard.cpp:1293: Change sender activated: at tablet: 72075186224037888 2025-05-29T15:24:57.472667Z node 16 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1933: Complete execution for [300:281474976715657] at 72075186224037888 on unit CompleteOperation 2025-05-29T15:24:57.472684Z node 16 :TX_DATASHARD DEBUG: datashard.cpp:801: Complete [300 : 281474976715657] from 72075186224037888 at tablet 72075186224037888 send result to client [16:406:2400], exec latency: 0 ms, propose latency: 0 ms 2025-05-29T15:24:57.472697Z node 16 :TX_DATASHARD INFO: datashard.cpp:1590: 72075186224037888 Sending notify to schemeshard 72057594046644480 txId 281474976715657 state Ready TxInFly 0 2025-05-29T15:24:57.472723Z node 16 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-05-29T15:24:57.473018Z node 16 :TX_DATASHARD TRACE: datashard_impl.h:3129: StateWork, received event# 269746185, Sender [16:710:2592], Recipient [16:689:2579]: NKikimr::TEvTxProxySchemeCache::TEvWatchNotifyUpdated 2025-05-29T15:24:57.473030Z node 16 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:129: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-05-29T15:24:57.473197Z node 16 :TX_DATASHARD TRACE: datashard_impl.h:3129: StateWork, received event# 269877760, Sender [16:732:2606], Recipient [16:689:2579]: NKikimr::TEvTabletPipe::TEvClientConnected { TabletId: 72057594046644480 Status: OK ServerId: [16:736:2610] Leader: 1 Dead: 0 Generation: 2 VersionInfo: } 2025-05-29T15:24:57.473206Z node 16 :TX_DATASHARD TRACE: datashard_impl.h:3162: StateWork, processing event TEvTabletPipe::TEvClientConnected 2025-05-29T15:24:57.473964Z node 16 :TX_DATASHARD TRACE: datashard_impl.h:3129: StateWork, received event# 269552132, Sender [16:406:2400], Recipient [16:689:2579]: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 281474976715657 2025-05-29T15:24:57.473977Z node 16 :TX_DATASHARD TRACE: datashard_impl.h:3133: StateWork, processing event TEvDataShard::TEvSchemaChangedResult 2025-05-29T15:24:57.473984Z node 16 :TX_DATASHARD DEBUG: datashard.cpp:2953: Handle TEvSchemaChangedResult 281474976715657 datashard 72075186224037888 state Ready 2025-05-29T15:24:57.473994Z node 16 :TX_DATASHARD DEBUG: datashard__schema_changed.cpp:22: 72075186224037888 Got TEvSchemaChangedResult from SS at 72075186224037888 2025-05-29T15:24:57.474224Z node 16 :TX_DATASHARD TRACE: datashard_impl.h:3129: StateWork, received event# 270270978, Sender [16:24:2071], Recipient [16:689:2579]: NKikimr::TEvMediatorTimecast::TEvSubscribeReadStepResult{ CoordinatorId# 72057594046316545 LastReadStep# 0 NextReadStep# 300 ReadStep# 300 } 2025-05-29T15:24:57.474233Z node 16 :TX_DATASHARD TRACE: datashard_impl.h:3168: StateWork, processing event TEvMediatorTimecast::TEvSubscribeReadStepResult 2025-05-29T15:24:57.474239Z node 16 :TX_DATASHARD DEBUG: datashard.cpp:3760: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037888 coordinator 72057594046316545 last step 0 next step 300 2025-05-29T15:24:57.474423Z node 16 :TX_DATASHARD TRACE: datashard_impl.h:3129: StateWork, received event# 269877763, Sender [16:732:2606], Recipient [16:689:2579]: NKikimr::TEvTabletPipe::TEvClientDestroyed { TabletId: 72057594046644480 ClientId: [16:732:2606] ServerId: [16:736:2610] } 2025-05-29T15:24:57.474433Z node 16 :TX_DATASHARD TRACE: datashard_impl.h:3163: StateWork, processing event TEvTabletPipe::TEvClientDestroyed 2025-05-29T15:24:57.476413Z node 16 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [16:745:2619], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:24:57.476440Z node 16 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:24:57.476497Z node 16 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [16:750:2624], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:24:57.477589Z node 16 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2025-05-29T15:24:57.479491Z node 16 :TX_DATASHARD TRACE: datashard_impl.h:3129: StateWork, received event# 269746185, Sender [16:710:2592], Recipient [16:689:2579]: NKikimr::TEvTxProxySchemeCache::TEvWatchNotifyUpdated 2025-05-29T15:24:57.479509Z node 16 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:129: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-05-29T15:24:57.517493Z node 16 :TX_DATASHARD TRACE: datashard_impl.h:3129: StateWork, received event# 269746185, Sender [16:710:2592], Recipient [16:689:2579]: NKikimr::TEvTxProxySchemeCache::TEvWatchNotifyUpdated 2025-05-29T15:24:57.517534Z node 16 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:129: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-05-29T15:24:57.518113Z node 16 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [16:752:2626], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-05-29T15:24:57.561799Z node 16 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [16:803:2658] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-29T15:24:57.569596Z node 16 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [16:813:2667], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:24:57.570103Z node 16 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=16&id=M2M1ZDY4ZjctNmFkY2U0ZGEtNWQwNGZjNC0zZjI1ODM4Mg==, ActorId: [16:743:2617], ActorState: ExecuteState, TraceId: 01jweaazr46cf49fdzdpjhzqjr, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: assertion failed at ydb/core/tx/datashard/ut_common/datashard_ut_common.cpp:2091, void NKikimr::ExecSQL(Tests::TServer::TPtr, TActorId, const TString &, bool, Ydb::StatusIds::StatusCode): (response.GetYdbStatus() == code) failed: (INTERNAL_ERROR != SUCCESS) Response { QueryIssues { message: "Execution" issue_code: 1060 issues { message: "yql/essentials/ast/yql_expr.h:1874: index out of range" issue_code: 1 } } TxMeta { } } YdbStatus: INTERNAL_ERROR ConsumedRu: 1 , with diff: (INT|SUCC)E(RNAL_ERROR|SS) TBackTrace::Capture()+28 (0x13CAD19C) NUnitTest::NPrivate::RaiseError(char const*, TBasicString> const&, bool)+137 (0x13E60AC9) NKikimr::ExecSQL(TIntrusivePtr>, NActors::TActorId, TBasicString> const&, bool, Ydb::StatusIds_StatusCode)+1300 (0x264DE504) NKikimr::NTestSuiteDataShardSnapshots::TTestCaseBrokenLockChangesDontLeak::Execute_(NUnitTest::TTestContext&)+1961 (0x13AA4CB9) NKikimr::NTestSuiteDataShardSnapshots::TCurrentTest::Execute()::'lambda'()::operator()() const+71 (0x13AAB867) NUnitTest::TTestBase::Run(std::__y1::function, TBasicString> const&, char const*, bool)+126 (0x13E6297E) NKikimr::NTestSuiteDataShardSnapshots::TCurrentTest::Execute()+425 (0x13AAB0C9) NUnitTest::TTestFactory::Execute()+803 (0x13E630F3) NUnitTest::RunMain(int, char**)+3021 (0x13E74C9D) ??+0 (0x7F6E72E15D90) __libc_start_main+128 (0x7F6E72E15E40) _start+41 (0x12A82029) |66.3%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/replication/controller/ut_stream_creator/tx-replication-controller-ut_stream_creator |66.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/replication/controller/ut_stream_creator/tx-replication-controller-ut_stream_creator |66.3%| [LD] {RESULT} $(B)/ydb/core/tx/replication/controller/ut_stream_creator/tx-replication-controller-ut_stream_creator >> TestKinesisHttpProxy::TestWrongStream2 >> TestYmqHttpProxy::TestChangeMessageVisibilityBatch >> TConsoleTxProcessorTests::TestTxProcessorTemporary [GOOD] >> TConsoleTxProcessorTests::TestTxProcessorRandom >> TestYmqHttpProxy::TestGetQueueUrl >> TestKinesisHttpProxy::TestPing |66.3%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/schemeshard/ut_backup_collection_reboots/tx-schemeshard-ut_backup_collection_reboots |66.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_backup_collection_reboots/tx-schemeshard-ut_backup_collection_reboots |66.3%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_backup_collection_reboots/tx-schemeshard-ut_backup_collection_reboots >> TestKinesisHttpProxy::GoodRequestCreateStream >> TConsoleTests::TestRemoveSharedTenantWoServerlessTenants [GOOD] >> TConsoleTests::TestRemoveSharedTenantWithServerlessTenants ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/datashard/ut_snapshot/unittest >> DataShardSnapshots::LockedWriteWithAsyncIndexAndVolatileCommit-UseSink [FAIL] Test command err: 2025-05-29T15:24:34.350493Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:102:2148], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-05-29T15:24:34.350531Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-05-29T15:24:34.350545Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/00125d/r3tmp/tmpsTFruX/pdisk_1.dat 2025-05-29T15:24:34.461351Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-05-29T15:24:34.475897Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:24:34.479163Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [1:32:2079] 1748532273866831 != 1748532273866835 2025-05-29T15:24:34.521044Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:213: actor# [1:59:2106] HANDLE TEvClientConnected success connect from tablet# 72057594046447617 2025-05-29T15:24:34.521383Z node 1 :TX_PROXY DEBUG: client.cpp:89: Handle TEvAllocateResult ACCEPTED RangeBegin# 281474976715656 RangeEnd# 281474976720656 txAllocator# 72057594046447617 2025-05-29T15:24:34.521454Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:24:34.521473Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:24:34.532070Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-29T15:24:34.604606Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:315: actor# [1:59:2106] Handle TEvProposeTransaction 2025-05-29T15:24:34.604635Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:238: actor# [1:59:2106] TxId# 281474976715657 ProcessProposeTransaction 2025-05-29T15:24:34.604675Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:257: actor# [1:59:2106] Cookie# 0 userReqId# "" txid# 281474976715657 SEND to# [1:640:2548] 2025-05-29T15:24:34.621478Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1562: Actor# [1:640:2548] txid# 281474976715657 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/Root" OperationType: ESchemeOpCreateTable CreateTable { Name: "table-1" Columns { Name: "key" Type: "Uint32" FamilyName: "" NotNull: false } Columns { Name: "value" Type: "Uint32" FamilyName: "" NotNull: false } KeyColumnNames: "key" UniformPartitionsCount: 1 } } } ExecTimeoutPeriod: 18446744073709551615 2025-05-29T15:24:34.621544Z node 1 :TX_PROXY DEBUG: schemereq.cpp:569: Actor# [1:640:2548] txid# 281474976715657 Bootstrap, UserSID: CheckAdministrator: 0 CheckDatabaseAdministrator: 0 2025-05-29T15:24:34.621779Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1627: Actor# [1:640:2548] txid# 281474976715657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P6 2025-05-29T15:24:34.621795Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1617: Actor# [1:640:2548] txid# 281474976715657 TEvNavigateKeySet requested from SchemeCache 2025-05-29T15:24:34.621863Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1450: Actor# [1:640:2548] txid# 281474976715657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-05-29T15:24:34.621913Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1497: Actor# [1:640:2548] HANDLE EvNavigateKeySetResult, txid# 281474976715657 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 500 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2025-05-29T15:24:34.621930Z node 1 :TX_PROXY DEBUG: schemereq.cpp:103: Actor# [1:640:2548] txid# 281474976715657 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976715657 TabletId# 72057594046644480} 2025-05-29T15:24:34.622004Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1352: Actor# [1:640:2548] txid# 281474976715657 HANDLE EvClientConnected 2025-05-29T15:24:34.622404Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-05-29T15:24:34.622654Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1374: Actor# [1:640:2548] txid# 281474976715657 Status StatusAccepted HANDLE {TEvModifySchemeTransactionResult Status# StatusAccepted txid# 281474976715657} 2025-05-29T15:24:34.622667Z node 1 :TX_PROXY DEBUG: schemereq.cpp:549: Actor# [1:640:2548] txid# 281474976715657 SEND to# [1:592:2518] Source {TEvProposeTransactionStatus txid# 281474976715657 Status# 53} 2025-05-29T15:24:34.637286Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3097: StateInit, received event# 268828672, Sender [1:656:2563], Recipient [1:664:2569]: NKikimr::TEvTablet::TEvBoot 2025-05-29T15:24:34.637636Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3097: StateInit, received event# 268828673, Sender [1:656:2563], Recipient [1:664:2569]: NKikimr::TEvTablet::TEvRestored 2025-05-29T15:24:34.637757Z node 1 :TX_DATASHARD INFO: datashard.cpp:373: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:664:2569] 2025-05-29T15:24:34.637830Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:630: TxInitSchema.Execute 2025-05-29T15:24:34.646517Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3110: StateInactive, received event# 268828684, Sender [1:656:2563], Recipient [1:664:2569]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-05-29T15:24:34.646644Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:696: TxInitSchema.Complete 2025-05-29T15:24:34.646665Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:48: TDataShard::TTxInit::Execute 2025-05-29T15:24:34.646801Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1315: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-05-29T15:24:34.646809Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1371: LoadLockChangeRecords at tablet: 72075186224037888 2025-05-29T15:24:34.646814Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1420: LoadChangeRecordCommits at tablet: 72075186224037888 2025-05-29T15:24:34.646855Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:92: TDataShard::TTxInit::Complete 2025-05-29T15:24:34.646868Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:100: TDataShard::TTxInitRestored::Execute 2025-05-29T15:24:34.646877Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:111: DataShard 72075186224037888 persisting started state actor id [1:681:2569] in generation 1 2025-05-29T15:24:34.646938Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:120: TDataShard::TTxInitRestored::Complete 2025-05-29T15:24:34.650029Z node 1 :TX_DATASHARD INFO: datashard.cpp:417: Switched to work state WaitScheme tabletId 72075186224037888 2025-05-29T15:24:34.650087Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:457: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-05-29T15:24:34.650115Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1250: Change sender created: at tablet: 72075186224037888, actorId: [1:683:2579] 2025-05-29T15:24:34.650119Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1255: Trying to activate change sender: at tablet: 72075186224037888 2025-05-29T15:24:34.650123Z node 1 :TX_DATASHARD INFO: datashard.cpp:1272: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-05-29T15:24:34.650126Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-05-29T15:24:34.650177Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3129: StateWork, received event# 2146435072, Sender [1:664:2569], Recipient [1:664:2569]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-05-29T15:24:34.650184Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3154: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-05-29T15:24:34.650251Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 72075186224037888 2025-05-29T15:24:34.650268Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-05-29T15:24:34.650280Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2025-05-29T15:24:34.650287Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-05-29T15:24:34.650293Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:385: Check unit PlanQueue at 72075186224037888 2025-05-29T15:24:34.650297Z node 1 :TX_DATASHARD TRACE: plan_queue_unit.cpp:59: TPlanQueueUnit at 72075186224037888 has no attached operations 2025-05-29T15:24:34.650300Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:403: Unit PlanQueue has no ready operations at 72075186224037888 2025-05-29T15:24:34.650304Z node 1 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 72075186224037888 TxInFly 0 2025-05-29T15:24:34.650308Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2025-05-29T15:24:34.650397Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3129: StateWork, received event# 269877761, Sender [1:674:2574], Recipient [1:664:2569]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-05-29T15:24:34.650402Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3165: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-05-29T15:24:34.650407Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3710: Server connected at leader tablet# 72075186224037888, clientId# [1:662:2567], serverId# [1:674:2574], sessionId# [0:0:0] 2025-05-29T15:24:34.650413Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3129: StateWork, received event# 269549568, Sender [1:411:2405], Recipient [1:674:2574] 2025-05-29T15:24:34.650417Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3135: StateWork, processing event TEvDataShard::TEvProposeTransaction 2025-05-29T15:24:34.650435Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037888 2025-05-29T15:24:34.650478Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1827: Trying to execute [0:281474976715657] at 72075186224037888 on unit CheckSchemeTx 2025-05-29T15:24:34.650489Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:132: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-05-29T15:24:34.650505Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:220: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2025-05-29T15:24:34.650514Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1862: Execution status for [0:281474976715657] at 72075186224037888 is ExecutedNoMoreRestarts 2025-05-29T15:24:34.650519Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1910: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit CheckSchemeTx 2025-05-29T15:24:34.650523Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1916: Add [0:281474976715657] at 72075186224037888 to execution unit StoreSchemeTx 2025-05-29T15:24:34.650525Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1827: Trying to execute [0:281474976715657] at 7 ... 94046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 500 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2025-05-29T15:25:01.082182Z node 13 :TX_PROXY DEBUG: schemereq.cpp:103: Actor# [13:1241:3015] txid# 281474976715661 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976715661 TabletId# 72057594046644480} 2025-05-29T15:25:01.082462Z node 13 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715661:3, at schemeshard: 72057594046644480 2025-05-29T15:25:01.082635Z node 13 :TX_PROXY DEBUG: schemereq.cpp:1352: Actor# [13:1241:3015] txid# 281474976715661 HANDLE EvClientConnected 2025-05-29T15:25:01.082871Z node 13 :TX_PROXY DEBUG: schemereq.cpp:1374: Actor# [13:1241:3015] txid# 281474976715661 Status StatusAccepted HANDLE {TEvModifySchemeTransactionResult Status# StatusAccepted txid# 281474976715661} 2025-05-29T15:25:01.082884Z node 13 :TX_PROXY DEBUG: schemereq.cpp:549: Actor# [13:1241:3015] txid# 281474976715661 SEND to# [13:1240:3014] Source {TEvProposeTransactionStatus txid# 281474976715661 Status# 53} 2025-05-29T15:25:01.083546Z node 13 :TX_DATASHARD TRACE: datashard_impl.h:3129: StateWork, received event# 269746185, Sender [13:683:2579], Recipient [13:787:2655]: NKikimr::TEvTxProxySchemeCache::TEvWatchNotifyUpdated 2025-05-29T15:25:01.083560Z node 13 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:129: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037889 2025-05-29T15:25:01.083594Z node 13 :TX_DATASHARD TRACE: datashard_impl.h:3129: StateWork, received event# 269746185, Sender [13:683:2579], Recipient [13:965:2795]: NKikimr::TEvTxProxySchemeCache::TEvWatchNotifyUpdated 2025-05-29T15:25:01.083598Z node 13 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:129: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037890 2025-05-29T15:25:01.083615Z node 13 :TX_DATASHARD TRACE: datashard_impl.h:3129: StateWork, received event# 269746185, Sender [13:683:2579], Recipient [13:1058:2872]: NKikimr::TEvTxProxySchemeCache::TEvWatchNotifyUpdated 2025-05-29T15:25:01.083618Z node 13 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:129: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037891 2025-05-29T15:25:01.083640Z node 13 :TX_DATASHARD TRACE: datashard_impl.h:3129: StateWork, received event# 269746185, Sender [13:683:2579], Recipient [13:664:2568]: NKikimr::TEvTxProxySchemeCache::TEvWatchNotifyUpdated 2025-05-29T15:25:01.083643Z node 13 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:129: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-05-29T15:25:01.095010Z node 13 :TX_DATASHARD TRACE: datashard_impl.h:3129: StateWork, received event# 2146435079, Sender [0:0:0], Recipient [13:664:2568]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvPeriodicWakeup 2025-05-29T15:25:01.175487Z node 13 :TX_DATASHARD TRACE: datashard_impl.h:3129: StateWork, received event# 269746185, Sender [13:683:2579], Recipient [13:787:2655]: NKikimr::TEvTxProxySchemeCache::TEvWatchNotifyUpdated 2025-05-29T15:25:01.175514Z node 13 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:129: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037889 2025-05-29T15:25:01.175569Z node 13 :TX_DATASHARD TRACE: datashard_impl.h:3129: StateWork, received event# 269746185, Sender [13:683:2579], Recipient [13:965:2795]: NKikimr::TEvTxProxySchemeCache::TEvWatchNotifyUpdated 2025-05-29T15:25:01.175574Z node 13 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:129: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037890 2025-05-29T15:25:01.175604Z node 13 :TX_DATASHARD TRACE: datashard_impl.h:3129: StateWork, received event# 269746185, Sender [13:683:2579], Recipient [13:1058:2872]: NKikimr::TEvTxProxySchemeCache::TEvWatchNotifyUpdated 2025-05-29T15:25:01.175608Z node 13 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:129: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037891 2025-05-29T15:25:01.175631Z node 13 :TX_DATASHARD TRACE: datashard_impl.h:3129: StateWork, received event# 269746185, Sender [13:683:2579], Recipient [13:664:2568]: NKikimr::TEvTxProxySchemeCache::TEvWatchNotifyUpdated 2025-05-29T15:25:01.175636Z node 13 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:129: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-05-29T15:25:01.176089Z node 13 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [13:1240:3014], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715661 completed, doublechecking } 2025-05-29T15:25:01.197037Z node 13 :TX_PROXY DEBUG: proxy_impl.cpp:315: actor# [13:59:2106] Handle TEvProposeTransaction 2025-05-29T15:25:01.197063Z node 13 :TX_PROXY DEBUG: proxy_impl.cpp:238: actor# [13:59:2106] TxId# 281474976715662 ProcessProposeTransaction 2025-05-29T15:25:01.197085Z node 13 :TX_PROXY DEBUG: proxy_impl.cpp:257: actor# [13:59:2106] Cookie# 0 userReqId# "" txid# 281474976715662 SEND to# [13:1295:3050] 2025-05-29T15:25:01.197784Z node 13 :TX_PROXY DEBUG: schemereq.cpp:1562: Actor# [13:1295:3050] txid# 281474976715662 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/Root/.metadata/workload_manager/pools" OperationType: ESchemeOpCreateResourcePool ModifyACL { Name: "default" DiffACL: "\n!\010\000\022\035\010\001\020\201\004\032\024all-users@well-known \003\n\031\010\000\022\025\010\001\020\201\004\032\014root@builtin \003" NewOwner: "metadata@system" } Internal: true CreateResourcePool { Name: "default" Properties { Properties { key: "concurrent_query_limit" value: "-1" } Properties { key: "database_load_cpu_threshold" value: "-1" } Properties { key: "query_cancel_after_seconds" value: "0" } Properties { key: "query_cpu_limit_percent_per_node" value: "-1" } Properties { key: "query_memory_limit_percent_per_node" value: "-1" } Properties { key: "queue_size" value: "-1" } Properties { key: "resource_weight" value: "-1" } Properties { key: "total_cpu_limit_percent_per_node" value: "-1" } } } } } UserToken: "\n\017metadata@system\022\000" DatabaseName: "/Root" 2025-05-29T15:25:01.197801Z node 13 :TX_PROXY DEBUG: schemereq.cpp:569: Actor# [13:1295:3050] txid# 281474976715662 Bootstrap, UserSID: metadata@system CheckAdministrator: 0 CheckDatabaseAdministrator: 0 2025-05-29T15:25:01.197808Z node 13 :TX_PROXY DEBUG: schemereq.cpp:578: Actor# [13:1295:3050] txid# 281474976715662 Bootstrap, UserSID: metadata@system IsClusterAdministrator: 1 2025-05-29T15:25:01.198080Z node 13 :TX_PROXY DEBUG: schemereq.cpp:1627: Actor# [13:1295:3050] txid# 281474976715662 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P6 2025-05-29T15:25:01.198095Z node 13 :TX_PROXY DEBUG: schemereq.cpp:1617: Actor# [13:1295:3050] txid# 281474976715662 TEvNavigateKeySet requested from SchemeCache 2025-05-29T15:25:01.198165Z node 13 :TX_PROXY DEBUG: schemereq.cpp:1450: Actor# [13:1295:3050] txid# 281474976715662 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-05-29T15:25:01.198195Z node 13 :TX_PROXY DEBUG: schemereq.cpp:1497: Actor# [13:1295:3050] HANDLE EvNavigateKeySetResult, txid# 281474976715662 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 500 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2025-05-29T15:25:01.198211Z node 13 :TX_PROXY DEBUG: schemereq.cpp:103: Actor# [13:1295:3050] txid# 281474976715662 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976715662 TabletId# 72057594046644480} 2025-05-29T15:25:01.198556Z node 13 :TX_PROXY DEBUG: schemereq.cpp:1352: Actor# [13:1295:3050] txid# 281474976715662 HANDLE EvClientConnected 2025-05-29T15:25:01.198703Z node 13 :TX_PROXY DEBUG: schemereq.cpp:1374: Actor# [13:1295:3050] txid# 281474976715662 Status StatusAlreadyExists HANDLE {TEvModifySchemeTransactionResult Status# StatusAlreadyExists txid# 281474976715662 Reason# Check failed: path: '/Root/.metadata/workload_manager/pools/default', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 11], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92} 2025-05-29T15:25:01.198732Z node 13 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [13:1295:3050] txid# 281474976715662, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 11], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-29T15:25:01.198795Z node 13 :TX_PROXY DEBUG: schemereq.cpp:549: Actor# [13:1295:3050] txid# 281474976715662 SEND to# [13:1240:3014] Source {TEvProposeTransactionStatus txid# 281474976715662 Status# 48} 2025-05-29T15:25:01.233205Z node 13 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [13:1305:3059], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:25:01.233712Z node 13 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=13&id=MWY3MzllYmQtNTM3OWFhMjItMjRhY2JlODQtNGYzMGU3N2U=, ActorId: [13:1224:3004], ActorState: ExecuteState, TraceId: 01jweab38r02s592z529aan9zs, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: assertion failed at ydb/core/tx/datashard/ut_common/datashard_ut_common.cpp:2091, void NKikimr::ExecSQL(Tests::TServer::TPtr, TActorId, const TString &, bool, Ydb::StatusIds::StatusCode): (response.GetYdbStatus() == code) failed: (INTERNAL_ERROR != SUCCESS) Response { QueryIssues { message: "Execution" issue_code: 1060 issues { message: "yql/essentials/ast/yql_expr.h:1874: index out of range" issue_code: 1 } } TxMeta { } } YdbStatus: INTERNAL_ERROR ConsumedRu: 1 , with diff: (INT|SUCC)E(RNAL_ERROR|SS) TBackTrace::Capture()+28 (0x13CAD19C) NUnitTest::NPrivate::RaiseError(char const*, TBasicString> const&, bool)+137 (0x13E60AC9) NKikimr::ExecSQL(TIntrusivePtr>, NActors::TActorId, TBasicString> const&, bool, Ydb::StatusIds_StatusCode)+1300 (0x264DE504) NKikimr::NTestSuiteDataShardSnapshots::TTestCaseLockedWriteWithAsyncIndexAndVolatileCommit::Execute_(NUnitTest::TTestContext&)+3309 (0x13B657DD) NKikimr::NTestSuiteDataShardSnapshots::TCurrentTest::Execute()::'lambda'()::operator()() const+71 (0x13AAB867) NUnitTest::TTestBase::Run(std::__y1::function, TBasicString> const&, char const*, bool)+126 (0x13E6297E) NKikimr::NTestSuiteDataShardSnapshots::TCurrentTest::Execute()+425 (0x13AAB0C9) NUnitTest::TTestFactory::Execute()+803 (0x13E630F3) NUnitTest::RunMain(int, char**)+3021 (0x13E74C9D) ??+0 (0x7F420316CD90) __libc_start_main+128 (0x7F420316CE40) _start+41 (0x12A82029) >> TestKinesisHttpProxy::PutRecordsWithLongExplicitHashKey >> TConsoleTests::TestAttributes [GOOD] >> TConsoleTests::TestAlterTenantTooManyStorageResourcesForRunning >> TBsProxyFaultToleranceTest::CheckTGetWithRecoverFaultToleranceTestErasureMirror3dc [GOOD] |66.3%| [TA] $(B)/ydb/core/tx/datashard/ut_snapshot/test-results/unittest/{meta.json ... results_accumulator.log} |66.3%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/public/sdk/cpp/src/client/persqueue_public/ut/ydb-public-sdk-cpp-src-client-persqueue_public-ut |66.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/public/sdk/cpp/src/client/persqueue_public/ut/ydb-public-sdk-cpp-src-client-persqueue_public-ut |66.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/dsproxy/ut_ftol/unittest >> TBsProxyFaultToleranceTest::CheckTGetWithRecoverFaultToleranceTestErasureMirror3dc [GOOD] |66.3%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/replication/controller/ut_target_discoverer/replication-controller-ut_target_discoverer |66.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/replication/controller/ut_target_discoverer/replication-controller-ut_target_discoverer |66.3%| [LD] {RESULT} $(B)/ydb/public/sdk/cpp/src/client/persqueue_public/ut/ydb-public-sdk-cpp-src-client-persqueue_public-ut |66.3%| [LD] {RESULT} $(B)/ydb/core/tx/replication/controller/ut_target_discoverer/replication-controller-ut_target_discoverer >> TestYmqHttpProxy::TestReceiveMessage |66.4%| [TA] {RESULT} $(B)/ydb/core/tx/datashard/ut_snapshot/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/http_proxy/ut/inside_ydb_ut/unittest >> TestYmqHttpProxy::TestTagQueueMultipleQueriesInflight Test command err: 2025-05-29T15:24:40.008114Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509888962587456085:2062];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:24:40.008425Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/001d0b/r3tmp/tmpdXme7m/pdisk_1.dat 2025-05-29T15:24:40.075782Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:24:40.075988Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [1:7509888962587456065:2079] 1748532280007943 != 1748532280007946 TServer::EnableGrpc on GrpcPort 15121, node 1 2025-05-29T15:24:40.086063Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:24:40.086077Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-05-29T15:24:40.086080Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-05-29T15:24:40.086122Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-05-29T15:24:40.110670Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:24:40.110702Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:24:40.111772Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:26581 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-29T15:24:40.125592Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... TClient is connected to server localhost:26581 2025-05-29T15:24:40.154609Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-05-29T15:24:40.156187Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 2025-05-29T15:24:40.156721Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710659:0, at schemeshard: 72057594046644480 2025-05-29T15:24:40.165210Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:24:40.192999Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-05-29T15:24:40.233469Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-05-29T15:24:40.273825Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:24:40.302821Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:24:40.326672Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:24:40.341742Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:24:40.349628Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710670:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:24:40.364234Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-05-29T15:24:40.378810Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-05-29T15:24:40.435572Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509888962587457454:2372], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:24:40.435597Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:24:40.435642Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509888962587457466:2375], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:24:40.436395Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710673:3, at schemeshard: 72057594046644480 2025-05-29T15:24:40.438962Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7509888962587457468:2376], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710673 completed, doublechecking } 2025-05-29T15:24:40.512618Z node 1 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [1:7509888962587457519:2853] txid# 281474976710674, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 18], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-29T15:24:40.612469Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [1:7509888962587457528:2380], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 VERIFY failed (2025-05-29T15:24:40.613807Z): ydb/core/http_proxy/ut/datastreams_fixture.h:484 RunYqlDataQuery(): requirement operationResult.IsSuccess() failed 2025-05-29T15:24:40.613114Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=1&id=NTAyODA2ZWYtNGI4NWJhMjMtZDgyNGNlNDAtNjc1ZGM2OTI=, ActorId: [1:7509888962587457436:2370], ActorState: ExecuteState, TraceId: 01jweaaf3j7xywzkgvbey3dwm6, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: NPrivate::InternalPanicImpl(int, char const*, char const*, int, int, int, TBasicStringBuf>, char const*, unsigned long)+438 (0x13FAF316) NPrivate::Panic(NPrivate::TStaticBuf const&, int, char const*, char const*, char const*, ...)+263 (0x13FA6317) THttpProxyTestMock::RunYqlDataQuery(TBasicString>)+1470 (0x13E152BE) THttpProxyTestMock::InitKikimr(bool, bool)+10304 (0x13E0F460) THttpProxyTestMock::InitAll(bool, bool)+475 (0x13E0CB7B) NTestSuiteTestYmqHttpProxy::TCurrentTest::Execute()::'lambda'()::operator()() const+49 (0x13E8C281) NUnitTest::TTestBase::Run(std::__y1::function, TBasicString> const&, char const*, bool)+126 (0x14144F2E) NTestSuiteTestYmqHttpProxy::TCurrentTest::Execute()+419 (0x13E8BC53) NUnitTest::TTestFactory::Execute()+803 (0x141456A3) NUnitTest::RunMain(int, char**)+3021 (0x1415724D) ??+0 (0x7F75F1FF3D90) __libc_start_main+128 (0x7F75F1FF3E40) _start+41 (0x12EC8029) 2025-05-29T15:24:43.744338Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509888974402454271:2093];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:24:43.744578Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/001d0b/r3tmp/tmpnAhUxe/pdisk_1.dat 2025-05-29T15:24:43.897053Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 17067, node 1 2025-05-29T15:24:43.922924Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:24:43.922935Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-05-29T15:24:43.922937Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-05-29T15:24:43.922977Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:15921 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCES ... (NPrivate::TStaticBuf const&, int, char const*, char const*, char const*, ...)+263 (0x13FA6317) THttpProxyTestMock::RunYqlDataQuery(TBasicString>)+1470 (0x13E152BE) THttpProxyTestMock::InitKikimr(bool, bool)+10304 (0x13E0F460) THttpProxyTestMock::InitAll(bool, bool)+475 (0x13E0CB7B) NTestSuiteTestYmqHttpProxy::TCurrentTest::Execute()::'lambda'()::operator()() const+49 (0x13E8C281) NUnitTest::TTestBase::Run(std::__y1::function, TBasicString> const&, char const*, bool)+126 (0x14144F2E) NTestSuiteTestYmqHttpProxy::TCurrentTest::Execute()+419 (0x13E8BC53) NUnitTest::TTestFactory::Execute()+803 (0x141456A3) NUnitTest::RunMain(int, char**)+3021 (0x1415724D) ??+0 (0x7FB73D904D90) __libc_start_main+128 (0x7FB73D904E40) _start+41 (0x12EC8029) 2025-05-29T15:25:03.553354Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509889058969006207:2208];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/001d0b/r3tmp/tmpI2EV65/pdisk_1.dat 2025-05-29T15:25:03.591394Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-05-29T15:25:03.646824Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [1:7509889058969006025:2079] 1748532303393562 != 1748532303393565 2025-05-29T15:25:03.666035Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:25:03.666660Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:25:03.666674Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:25:03.667591Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 7778, node 1 2025-05-29T15:25:03.702931Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:25:03.702943Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-05-29T15:25:03.702946Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-05-29T15:25:03.702986Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:25498 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-29T15:25:03.871668Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:25:03.883063Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 TClient is connected to server localhost:25498 2025-05-29T15:25:03.939050Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:25:03.940470Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 2025-05-29T15:25:03.940843Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:25:03.952823Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:25:04.000483Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-05-29T15:25:04.028036Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-05-29T15:25:04.051697Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976710665, at schemeshard: 72057594046644480 2025-05-29T15:25:04.052708Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:25:04.067373Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:25:04.082183Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:25:04.098369Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:25:04.108820Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710670:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:25:04.123822Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:25:04.139958Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:25:04.367073Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509889063263974708:2372], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:25:04.367099Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:25:04.367208Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509889063263974720:2375], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:25:04.368020Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710673:3, at schemeshard: 72057594046644480 2025-05-29T15:25:04.370866Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976710673, at schemeshard: 72057594046644480 2025-05-29T15:25:04.370927Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7509889063263974722:2376], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710673 completed, doublechecking } 2025-05-29T15:25:04.452479Z node 1 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [1:7509889063263974773:2854] txid# 281474976710674, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 18], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-29T15:25:04.599905Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [1:7509889063263974782:2380], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:25:04.600856Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=1&id=NjUyZTEzZGYtMzkwNTdiNjItZmZiNWZhZjQtYmMwYTM5MTI=, ActorId: [1:7509889063263974682:2370], ActorState: ExecuteState, TraceId: 01jweab6fe6tjqf4yg1bnyc0ms, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: VERIFY failed (2025-05-29T15:25:04.604574Z): ydb/core/http_proxy/ut/datastreams_fixture.h:484 RunYqlDataQuery(): requirement operationResult.IsSuccess() failed NPrivate::InternalPanicImpl(int, char const*, char const*, int, int, int, TBasicStringBuf>, char const*, unsigned long)+438 (0x13FAF316) NPrivate::Panic(NPrivate::TStaticBuf const&, int, char const*, char const*, char const*, ...)+263 (0x13FA6317) THttpProxyTestMock::RunYqlDataQuery(TBasicString>)+1470 (0x13E152BE) THttpProxyTestMock::InitKikimr(bool, bool)+10304 (0x13E0F460) THttpProxyTestMock::InitAll(bool, bool)+475 (0x13E0CB7B) NTestSuiteTestYmqHttpProxy::TCurrentTest::Execute()::'lambda'()::operator()() const+49 (0x13E8C281) NUnitTest::TTestBase::Run(std::__y1::function, TBasicString> const&, char const*, bool)+126 (0x14144F2E) NTestSuiteTestYmqHttpProxy::TCurrentTest::Execute()+419 (0x13E8BC53) NUnitTest::TTestFactory::Execute()+803 (0x141456A3) NUnitTest::RunMain(int, char**)+3021 (0x1415724D) ??+0 (0x7F7400BE9D90) __libc_start_main+128 (0x7F7400BE9E40) _start+41 (0x12EC8029) ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/http_proxy/ut/inside_ydb_ut/unittest >> TestYmqHttpProxy::TestChangeMessageVisibilityBatch Test command err: 2025-05-29T15:24:40.143352Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509888959602995036:2261];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:24:40.143387Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/001ceb/r3tmp/tmploIjvv/pdisk_1.dat 2025-05-29T15:24:40.227755Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:24:40.228130Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [1:7509888959602994814:2079] 1748532280142502 != 1748532280142505 TServer::EnableGrpc on GrpcPort 23216, node 1 2025-05-29T15:24:40.250887Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:24:40.250936Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:24:40.255224Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-29T15:24:40.257165Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:24:40.257176Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-05-29T15:24:40.257178Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-05-29T15:24:40.257226Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:25608 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-29T15:24:40.307860Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:24:40.310679Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 TClient is connected to server localhost:25608 2025-05-29T15:24:40.359219Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:24:40.363169Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976715658, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:24:40.368094Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715659:0, at schemeshard: 72057594046644480 2025-05-29T15:24:40.375409Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976715660, at schemeshard: 72057594046644480 2025-05-29T15:24:40.377066Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:24:40.399262Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-05-29T15:24:40.412840Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-05-29T15:24:40.432774Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976715665, at schemeshard: 72057594046644480 2025-05-29T15:24:40.435341Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:24:40.453046Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:24:40.468402Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-05-29T15:24:40.485060Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480 2025-05-29T15:24:40.501256Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715670:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:24:40.516710Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:24:40.540885Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:24:40.697318Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509888959602996200:2372], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:24:40.697346Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:24:40.697474Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509888959602996212:2375], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:24:40.698319Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715673:3, at schemeshard: 72057594046644480 2025-05-29T15:24:40.701980Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976715673, at schemeshard: 72057594046644480 2025-05-29T15:24:40.702074Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7509888959602996214:2376], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715673 completed, doublechecking } 2025-05-29T15:24:40.783849Z node 1 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [1:7509888959602996265:2853] txid# 281474976715674, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 18], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-29T15:24:40.846033Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [1:7509888959602996274:2380], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:24:40.846555Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=1&id=MmE2MDNhMzAtZjY0OTFjN2EtOTI2ZTdhNzctYjY4MzA0OTA=, ActorId: [1:7509888959602996173:2369], ActorState: ExecuteState, TraceId: 01jweaafbr6tw7vs2hzdmnrva0, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: VERIFY failed (2025-05-29T15:24:40.851078Z): ydb/core/http_proxy/ut/datastreams_fixture.h:484 RunYqlDataQuery(): requirement operationResult.IsSuccess() failed NPrivate::InternalPanicImpl(int, char const*, char const*, int, int, int, TBasicStringBuf>, char const*, unsigned long)+438 (0x13FAF316) NPrivate::Panic(NPrivate::TStaticBuf const&, int, char const*, char const*, char const*, ...)+263 (0x13FA6317) THttpProxyTestMock::RunYqlDataQuery(TBasicString>)+1470 (0x13E152BE) THttpProxyTestMock::InitKikimr(bool, bool)+10304 (0x13E0F460) THttpProxyTestMock::InitAll(bool, bool)+475 (0x13E0CB7B) NTestSuiteTestYmqHttpProxy::TCurrentTest::Execute()::'lambda'()::operator()() const+49 (0x13E8C281) NUnitTest::TTestBase::Run(std::__y1::function, TBasicString> const&, char const*, bool)+126 (0x14144F2E) NTestSuiteTestYmqHttpProxy::TCurrentTest::Execute()+419 (0x13E8BC53) NUnitTest::TTestFactory::Execute()+803 (0x141456A3) NUnitTest::RunMain(int, char**)+3021 (0x1415724D) ??+0 (0x7EFEC2A23D90) __libc_start_main+128 (0x7EFEC2A23E40) _start+41 (0x12EC8029) test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/001ceb/r3tmp/tmpzXy6GV/pdisk_1.dat 2025-05-29T15:24:44.378830Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-05-29T15:24:44.433082Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [1:7509888978967355868:2079] 1748532284296004 != 1748532284296007 2025-05-29T15:24:44.458795Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles w ... Private::Panic(NPrivate::TStaticBuf const&, int, char const*, char const*, char const*, ...)+263 (0x13FA6317) THttpProxyTestMock::RunYqlDataQuery(TBasicString>)+1470 (0x13E152BE) THttpProxyTestMock::InitKikimr(bool, bool)+10304 (0x13E0F460) THttpProxyTestMock::InitAll(bool, bool)+475 (0x13E0CB7B) NTestSuiteTestYmqHttpProxy::TCurrentTest::Execute()::'lambda'()::operator()() const+49 (0x13E8C281) NUnitTest::TTestBase::Run(std::__y1::function, TBasicString> const&, char const*, bool)+126 (0x14144F2E) NTestSuiteTestYmqHttpProxy::TCurrentTest::Execute()+419 (0x13E8BC53) NUnitTest::TTestFactory::Execute()+803 (0x141456A3) NUnitTest::RunMain(int, char**)+3021 (0x1415724D) ??+0 (0x7F9A96E83D90) __libc_start_main+128 (0x7F9A96E83E40) _start+41 (0x12EC8029) 2025-05-29T15:25:05.289588Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509889066088895370:2205];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:25:05.289666Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/001ceb/r3tmp/tmpf8wwtP/pdisk_1.dat 2025-05-29T15:25:05.411579Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:25:05.411600Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:25:05.414928Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-29T15:25:05.431107Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 6144, node 1 2025-05-29T15:25:05.450626Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:25:05.450636Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-05-29T15:25:05.450638Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-05-29T15:25:05.450675Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:19499 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-29T15:25:05.515797Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:25:05.519096Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 TClient is connected to server localhost:19499 2025-05-29T15:25:05.587302Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:25:05.595212Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976715658, at schemeshard: 72057594046644480 2025-05-29T15:25:05.599193Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:25:05.601863Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976715660, at schemeshard: 72057594046644480 2025-05-29T15:25:05.603057Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:25:05.653286Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-05-29T15:25:05.680732Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 waiting... waiting... waiting... 2025-05-29T15:25:05.719955Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976715665, at schemeshard: 72057594046644480 2025-05-29T15:25:05.721188Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-05-29T15:25:05.736035Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:25:05.762710Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:25:05.782544Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:25:05.811121Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715670:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:25:05.826274Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:25:05.845406Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:25:05.914890Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509889066088896566:2372], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:25:05.914924Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:25:05.915025Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509889066088896578:2375], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:25:05.919577Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715673:3, at schemeshard: 72057594046644480 2025-05-29T15:25:05.923691Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976715673, at schemeshard: 72057594046644480 2025-05-29T15:25:05.923776Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7509889066088896580:2376], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715673 completed, doublechecking } 2025-05-29T15:25:06.012091Z node 1 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [1:7509889070383863927:2853] txid# 281474976715674, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 18], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-29T15:25:06.084048Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [1:7509889070383863936:2380], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:25:06.084526Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=1&id=ZjljNmQ0MzMtODUxOGQ4YjQtM2RhMDFjYzQtYjFiMWM1MjY=, ActorId: [1:7509889066088896563:2370], ActorState: ExecuteState, TraceId: 01jweab7zta6scvxb9w6pm5mqz, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: VERIFY failed (2025-05-29T15:25:06.087109Z): ydb/core/http_proxy/ut/datastreams_fixture.h:484 RunYqlDataQuery(): requirement operationResult.IsSuccess() failed NPrivate::InternalPanicImpl(int, char const*, char const*, int, int, int, TBasicStringBuf>, char const*, unsigned long)+438 (0x13FAF316) NPrivate::Panic(NPrivate::TStaticBuf const&, int, char const*, char const*, char const*, ...)+263 (0x13FA6317) THttpProxyTestMock::RunYqlDataQuery(TBasicString>)+1470 (0x13E152BE) THttpProxyTestMock::InitKikimr(bool, bool)+10304 (0x13E0F460) THttpProxyTestMock::InitAll(bool, bool)+475 (0x13E0CB7B) NTestSuiteTestYmqHttpProxy::TCurrentTest::Execute()::'lambda'()::operator()() const+49 (0x13E8C281) NUnitTest::TTestBase::Run(std::__y1::function, TBasicString> const&, char const*, bool)+126 (0x14144F2E) NTestSuiteTestYmqHttpProxy::TCurrentTest::Execute()+419 (0x13E8BC53) NUnitTest::TTestFactory::Execute()+803 (0x141456A3) NUnitTest::RunMain(int, char**)+3021 (0x1415724D) ??+0 (0x7F420D8D0D90) __libc_start_main+128 (0x7F420D8D0E40) _start+41 (0x12EC8029) ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/http_proxy/ut/inside_ydb_ut/unittest >> TestYmqHttpProxy::TestDeleteMessageBatch Test command err: 2025-05-29T15:24:40.098112Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509888962562607809:2196];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:24:40.099207Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/001cf6/r3tmp/tmpswoFiA/pdisk_1.dat 2025-05-29T15:24:40.158913Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [1:7509888962562607652:2079] 1748532280097278 != 1748532280097281 2025-05-29T15:24:40.158998Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 24234, node 1 2025-05-29T15:24:40.174894Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:24:40.174907Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-05-29T15:24:40.174909Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-05-29T15:24:40.174947Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:62318 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: 2025-05-29T15:24:40.203000Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:24:40.203025Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:24:40.207053Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-29T15:24:40.238975Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:24:40.247050Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 TClient is connected to server localhost:62318 2025-05-29T15:24:40.319658Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:24:40.325072Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976715658, at schemeshard: 72057594046644480 2025-05-29T15:24:40.331375Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:24:40.339159Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976715660, at schemeshard: 72057594046644480 2025-05-29T15:24:40.340534Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:24:40.369062Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 waiting... waiting... waiting... 2025-05-29T15:24:40.384709Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:24:40.405567Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:24:40.461211Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:24:40.475953Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:24:40.494722Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:24:40.507935Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715670:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:24:40.524744Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:24:40.550328Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:24:40.566130Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509888962562609044:2372], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:24:40.566158Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:24:40.566257Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509888962562609056:2375], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:24:40.567120Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715673:3, at schemeshard: 72057594046644480 2025-05-29T15:24:40.569315Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7509888962562609058:2376], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715673 completed, doublechecking } 2025-05-29T15:24:40.635890Z node 1 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [1:7509888962562609109:2854] txid# 281474976715674, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 18], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-29T15:24:40.748857Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [1:7509888962562609118:2380], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:24:40.749756Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=1&id=NWYzN2YxMC03MjQyZTJiNC05Mzc4MzU1Mi1hZGRkY2I2Nw==, ActorId: [1:7509888962562609041:2370], ActorState: ExecuteState, TraceId: 01jweaaf7n9prbpmprjyed2q5w, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: VERIFY failed (2025-05-29T15:24:40.750924Z): ydb/core/http_proxy/ut/datastreams_fixture.h:484 RunYqlDataQuery(): requirement operationResult.IsSuccess() failed NPrivate::InternalPanicImpl(int, char const*, char const*, int, int, int, TBasicStringBuf>, char const*, unsigned long)+438 (0x13FAF316) NPrivate::Panic(NPrivate::TStaticBuf const&, int, char const*, char const*, char const*, ...)+263 (0x13FA6317) THttpProxyTestMock::RunYqlDataQuery(TBasicString>)+1470 (0x13E152BE) THttpProxyTestMock::InitKikimr(bool, bool)+10304 (0x13E0F460) THttpProxyTestMock::InitAll(bool, bool)+475 (0x13E0CB7B) NTestSuiteTestYmqHttpProxy::TCurrentTest::Execute()::'lambda'()::operator()() const+49 (0x13E8C281) NUnitTest::TTestBase::Run(std::__y1::function, TBasicString> const&, char const*, bool)+126 (0x14144F2E) NTestSuiteTestYmqHttpProxy::TCurrentTest::Execute()+419 (0x13E8BC53) NUnitTest::TTestFactory::Execute()+803 (0x141456A3) NUnitTest::RunMain(int, char**)+3021 (0x1415724D) ??+0 (0x7FC35A090D90) __libc_start_main+128 (0x7FC35A090E40) _start+41 (0x12EC8029) 2025-05-29T15:24:43.797552Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509888972866733363:2192];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:24:43.797588Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/001cf6/r3tmp/tmp8f198W/pdisk_1.dat 2025-05-29T15:24:43.987391Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:24:43.987413Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:24:43.987852Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-29T15:24 ... NUnitTest::TTestBase::Run(std::__y1::function, TBasicString> const&, char const*, bool)+126 (0x14144F2E) NTestSuiteTestYmqHttpProxy::TCurrentTest::Execute()+419 (0x13E8BC53) NUnitTest::TTestFactory::Execute()+803 (0x141456A3) NUnitTest::RunMain(int, char**)+3021 (0x1415724D) ??+0 (0x7F1DEF8FCD90) __libc_start_main+128 (0x7F1DEF8FCE40) _start+41 (0x12EC8029) 2025-05-29T15:25:04.451648Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509889065241531469:2220];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/001cf6/r3tmp/tmpm4qkKU/pdisk_1.dat 2025-05-29T15:25:04.634475Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-05-29T15:25:04.680418Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [1:7509889065241531258:2079] 1748532304438814 != 1748532304438817 TServer::EnableGrpc on GrpcPort 9767, node 1 2025-05-29T15:25:04.716577Z node 1 :GRPC_SERVER WARN: grpc_request_proxy.cpp:529: SchemeBoardDelete /Root Strong=0 2025-05-29T15:25:04.716596Z node 1 :GRPC_SERVER WARN: grpc_request_proxy.cpp:529: SchemeBoardDelete /Root Strong=0 2025-05-29T15:25:04.723609Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:25:04.723617Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-05-29T15:25:04.723619Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-05-29T15:25:04.723656Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-05-29T15:25:04.730833Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:25:04.731055Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:25:04.731071Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:25:04.735290Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:13751 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-29T15:25:04.827591Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:25:04.843150Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 TClient is connected to server localhost:13751 2025-05-29T15:25:04.919294Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:25:04.923219Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976715658, at schemeshard: 72057594046644480 2025-05-29T15:25:04.927205Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:25:04.933292Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976715660, at schemeshard: 72057594046644480 2025-05-29T15:25:04.940094Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:25:04.971668Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-05-29T15:25:04.995459Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976715663, at schemeshard: 72057594046644480 2025-05-29T15:25:05.031776Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-05-29T15:25:05.072863Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:25:05.106227Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:25:05.123723Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:25:05.146338Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:25:05.172139Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715670:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:25:05.262506Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:25:05.286657Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:25:05.335627Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509889069536499941:2372], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:25:05.335652Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:25:05.335872Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509889069536499953:2375], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:25:05.336618Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715673:3, at schemeshard: 72057594046644480 2025-05-29T15:25:05.339550Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976715673, at schemeshard: 72057594046644480 2025-05-29T15:25:05.339624Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7509889069536499955:2376], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715673 completed, doublechecking } 2025-05-29T15:25:05.431725Z node 1 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [1:7509889069536500006:2853] txid# 281474976715674, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 18], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-29T15:25:05.523800Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [1:7509889069536500015:2380], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:25:05.524240Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=1&id=ZGI1NjAwNzUtNTFjM2E3ODEtYjE4NWJiZjEtYTU3ODc1MTc=, ActorId: [1:7509889069536499938:2370], ActorState: ExecuteState, TraceId: 01jweab7dp4dpj201q6dvk9n7e, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: VERIFY failed (2025-05-29T15:25:05.526956Z): ydb/core/http_proxy/ut/datastreams_fixture.h:484 RunYqlDataQuery(): requirement operationResult.IsSuccess() failed NPrivate::InternalPanicImpl(int, char const*, char const*, int, int, int, TBasicStringBuf>, char const*, unsigned long)+438 (0x13FAF316) NPrivate::Panic(NPrivate::TStaticBuf const&, int, char const*, char const*, char const*, ...)+263 (0x13FA6317) THttpProxyTestMock::RunYqlDataQuery(TBasicString>)+1470 (0x13E152BE) THttpProxyTestMock::InitKikimr(bool, bool)+10304 (0x13E0F460) THttpProxyTestMock::InitAll(bool, bool)+475 (0x13E0CB7B) NTestSuiteTestYmqHttpProxy::TCurrentTest::Execute()::'lambda'()::operator()() const+49 (0x13E8C281) NUnitTest::TTestBase::Run(std::__y1::function, TBasicString> const&, char const*, bool)+126 (0x14144F2E) NTestSuiteTestYmqHttpProxy::TCurrentTest::Execute()+419 (0x13E8BC53) NUnitTest::TTestFactory::Execute()+803 (0x141456A3) NUnitTest::RunMain(int, char**)+3021 (0x1415724D) ??+0 (0x7F61141A2D90) __libc_start_main+128 (0x7F61141A2E40) _start+41 (0x12EC8029) >> TConsoleTests::TestAuthorizationExtSubdomain [GOOD] >> TConsoleTests::TestAttributesExtSubdomain >> HttpProxyInsideYdb::TestIfEnvVariableSet [GOOD] >> YdbYqlClient::TestReadTableMultiShardWholeTable >> TConsoleTxProcessorTests::TestTxProcessorRandom [GOOD] >> TImmediateControlsConfiguratorTests::TestControlsInitialization >> TestKinesisHttpProxy::TestRequestBadJson >> TestKinesisHttpProxy::TestWrongRequest >> YdbYqlClient::ConnectDbAclIsStrictlyChecked ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/http_proxy/ut/inside_ydb_ut/unittest >> HttpProxyInsideYdb::TestIfEnvVariableSet [GOOD] Test command err: 2025-05-29T15:24:40.061142Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509888960326244815:2064];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:24:40.061158Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/001cf1/r3tmp/tmprBA5Y2/pdisk_1.dat 2025-05-29T15:24:40.123515Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [1:7509888960326244793:2079] 1748532280060912 != 1748532280060915 2025-05-29T15:24:40.126026Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 26807, node 1 2025-05-29T15:24:40.139743Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:24:40.139758Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-05-29T15:24:40.139760Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-05-29T15:24:40.139802Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:4594 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: 2025-05-29T15:24:40.163841Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:24:40.163868Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:24:40.166252Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-29T15:24:40.203775Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... TClient is connected to server localhost:4594 2025-05-29T15:24:40.227934Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:24:40.229069Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-05-29T15:24:40.246968Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 2025-05-29T15:24:40.290787Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-05-29T15:24:40.317014Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-05-29T15:24:40.336844Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:24:40.349995Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:24:40.363690Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:24:40.387315Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:24:40.406280Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715670:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:24:40.421089Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:24:40.483590Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:24:40.526637Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509888960326246182:2372], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:24:40.526672Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:24:40.526784Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509888960326246194:2375], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:24:40.527964Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715673:3, at schemeshard: 72057594046644480 2025-05-29T15:24:40.530417Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7509888960326246196:2376], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715673 completed, doublechecking } 2025-05-29T15:24:40.595495Z node 1 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [1:7509888960326246247:2853] txid# 281474976715674, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 18], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-29T15:24:40.663379Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [1:7509888960326246256:2380], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:24:40.664037Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=1&id=MjU0YjBiMC01NDJjYTEyNi01ZWY1MmVmYy01NGIwYjg5Zg==, ActorId: [1:7509888960326246179:2370], ActorState: ExecuteState, TraceId: 01jweaaf6d74qhp8e63qytbsfz, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: VERIFY failed (2025-05-29T15:24:40.667320Z): ydb/core/http_proxy/ut/datastreams_fixture.h:484 RunYqlDataQuery(): requirement operationResult.IsSuccess() failed NPrivate::InternalPanicImpl(int, char const*, char const*, int, int, int, TBasicStringBuf>, char const*, unsigned long)+438 (0x13FAF316) NPrivate::Panic(NPrivate::TStaticBuf const&, int, char const*, char const*, char const*, ...)+263 (0x13FA6317) THttpProxyTestMock::RunYqlDataQuery(TBasicString>)+1470 (0x13E152BE) THttpProxyTestMock::InitKikimr(bool, bool)+10304 (0x13E0F460) THttpProxyTestMock::InitAll(bool, bool)+475 (0x13E0CB7B) NTestSuiteTestKinesisHttpProxy::TCurrentTest::Execute()::'lambda'()::operator()() const+49 (0x13DFAE11) NUnitTest::TTestBase::Run(std::__y1::function, TBasicString> const&, char const*, bool)+126 (0x14144F2E) NTestSuiteTestKinesisHttpProxy::TCurrentTest::Execute()+426 (0x13DFA68A) NUnitTest::TTestFactory::Execute()+803 (0x141456A3) NUnitTest::RunMain(int, char**)+3021 (0x1415724D) ??+0 (0x7FF7CD6DAD90) __libc_start_main+128 (0x7FF7CD6DAE40) _start+41 (0x12EC8029) 2025-05-29T15:24:43.690965Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509888974488513870:2079];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:24:43.691285Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/001cf1/r3tmp/tmpx3chqa/pdisk_1.dat 2025-05-29T15:24:43.846237Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:24:43.847966Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [1:7509888974488513818:2079] 1748532283678269 != 1748532283678272 TServer::EnableGrpc on GrpcPort 3095, node 1 2025-05-29T15:24:43.882940Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:24:43.882950Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-05-29T15:24:43.882952Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-05-29T15:24:43.882990Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-05-29T15:24:43.895164Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unkno ... ticBuf const&, int, char const*, char const*, char const*, ...)+263 (0x13FA6317) THttpProxyTestMock::RunYqlDataQuery(TBasicString>)+1470 (0x13E152BE) THttpProxyTestMock::InitKikimr(bool, bool)+10304 (0x13E0F460) THttpProxyTestMock::InitAll(bool, bool)+475 (0x13E0CB7B) NTestSuiteTestKinesisHttpProxy::TCurrentTest::Execute()::'lambda'()::operator()() const+49 (0x13DFAE11) NUnitTest::TTestBase::Run(std::__y1::function, TBasicString> const&, char const*, bool)+126 (0x14144F2E) NTestSuiteTestKinesisHttpProxy::TCurrentTest::Execute()+426 (0x13DFA68A) NUnitTest::TTestFactory::Execute()+803 (0x141456A3) NUnitTest::RunMain(int, char**)+3021 (0x1415724D) ??+0 (0x7F2F6DC25D90) __libc_start_main+128 (0x7F2F6DC25E40) _start+41 (0x12EC8029) test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/001cf1/r3tmp/tmpNk93NK/pdisk_1.dat 2025-05-29T15:25:04.004284Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509889064704627735:2192];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:25:04.004318Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; TServer::EnableGrpc on GrpcPort 61688, node 1 2025-05-29T15:25:04.199020Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:25:04.199044Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:25:04.199768Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [1:7509889064704627582:2079] 1748532304002298 != 1748532304002301 2025-05-29T15:25:04.199936Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:25:04.199938Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-05-29T15:25:04.199940Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-05-29T15:25:04.199972Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-05-29T15:25:04.200124Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:25:04.200941Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:9103 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-29T15:25:04.271606Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:25:04.275107Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 TClient is connected to server localhost:9103 2025-05-29T15:25:04.323230Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:25:04.324514Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976715658, at schemeshard: 72057594046644480 2025-05-29T15:25:04.324816Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:25:04.337197Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:25:04.375228Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-05-29T15:25:04.407862Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976715663, at schemeshard: 72057594046644480 2025-05-29T15:25:04.411846Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-05-29T15:25:04.435943Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:25:04.451633Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:25:04.467902Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:25:04.504067Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:25:04.526470Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715670:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:25:04.541858Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:25:04.606850Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:25:04.671592Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509889064704628969:2372], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:25:04.671619Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:25:04.671792Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509889064704628981:2375], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:25:04.672602Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715673:3, at schemeshard: 72057594046644480 2025-05-29T15:25:04.675683Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976715673, at schemeshard: 72057594046644480 2025-05-29T15:25:04.675762Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7509889064704628983:2376], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715673 completed, doublechecking } 2025-05-29T15:25:04.759502Z node 1 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [1:7509889064704629035:2854] txid# 281474976715674, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 18], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-29T15:25:04.998331Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [1:7509889064704629044:2380], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:25:04.998801Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=1&id=YmFmM2Q5NDQtNWFkY2Q5YzYtMjM4MjgwMGQtN2ZiNGQyZGU=, ActorId: [1:7509889064704628966:2370], ActorState: ExecuteState, TraceId: 01jweab6ry1pbjqctyk48gnhwy, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: VERIFY failed (2025-05-29T15:25:05.011138Z): ydb/core/http_proxy/ut/datastreams_fixture.h:484 RunYqlDataQuery(): requirement operationResult.IsSuccess() failed NPrivate::InternalPanicImpl(int, char const*, char const*, int, int, int, TBasicStringBuf>, char const*, unsigned long)+438 (0x13FAF316) NPrivate::Panic(NPrivate::TStaticBuf const&, int, char const*, char const*, char const*, ...)+263 (0x13FA6317) THttpProxyTestMock::RunYqlDataQuery(TBasicString>)+1470 (0x13E152BE) THttpProxyTestMock::InitKikimr(bool, bool)+10304 (0x13E0F460) THttpProxyTestMock::InitAll(bool, bool)+475 (0x13E0CB7B) NTestSuiteTestKinesisHttpProxy::TCurrentTest::Execute()::'lambda'()::operator()() const+49 (0x13DFAE11) NUnitTest::TTestBase::Run(std::__y1::function, TBasicString> const&, char const*, bool)+126 (0x14144F2E) NTestSuiteTestKinesisHttpProxy::TCurrentTest::Execute()+426 (0x13DFA68A) NUnitTest::TTestFactory::Execute()+803 (0x141456A3) NUnitTest::RunMain(int, char**)+3021 (0x1415724D) ??+0 (0x7FE926E10D90) __libc_start_main+128 (0x7FE926E10E40) _start+41 (0x12EC8029) >> TImmediateControlsConfiguratorTests::TestControlsInitialization [GOOD] >> TImmediateControlsConfiguratorTests::TestModifiedControls |66.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/persqueue/ut/slow/unittest |66.4%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/public/sdk/cpp/src/client/topic/ut/ydb-public-sdk-cpp-src-client-topic-ut |66.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/public/sdk/cpp/src/client/topic/ut/ydb-public-sdk-cpp-src-client-topic-ut |66.4%| [LD] {RESULT} $(B)/ydb/public/sdk/cpp/src/client/topic/ut/ydb-public-sdk-cpp-src-client-topic-ut >> SlowTopicAutopartitioning::CDC_Write >> TestYmqHttpProxy::TestGetQueueUrlOfNotExistingQueue >> TImmediateControlsConfiguratorTests::TestModifiedControls [GOOD] >> TImmediateControlsConfiguratorTests::TestResetToDefault >> TOlapReboots::CreateStandaloneTable [GOOD] >> TConsoleTests::TestRemoveSharedTenantWithServerlessTenants [GOOD] >> TConsoleTests::TestRemoveSharedTenantAfterRemoveServerlessTenant >> TPQTestSlow::TestOnDiskStoredSourceIds ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/http_proxy/ut/inside_ydb_ut/unittest >> TestKinesisHttpProxy::GoodRequestCreateStream Test command err: 2025-05-29T15:24:40.081047Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509888959427723419:2062];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:24:40.081154Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/001d01/r3tmp/tmpknWUaU/pdisk_1.dat 2025-05-29T15:24:40.160960Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:24:40.161128Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [1:7509888959427723399:2079] 1748532280080871 != 1748532280080874 TServer::EnableGrpc on GrpcPort 7967, node 1 2025-05-29T15:24:40.187257Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:24:40.187277Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-05-29T15:24:40.187280Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-05-29T15:24:40.187333Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-05-29T15:24:40.191099Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:24:40.191137Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:24:40.192378Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:61067 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-29T15:24:40.226203Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:24:40.229203Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 TClient is connected to server localhost:61067 2025-05-29T15:24:40.279699Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:24:40.287794Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976715658, at schemeshard: 72057594046644480 2025-05-29T15:24:40.295461Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:24:40.310142Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976715660, at schemeshard: 72057594046644480 2025-05-29T15:24:40.316333Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:24:40.345884Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 waiting... waiting... waiting... 2025-05-29T15:24:40.362631Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:24:40.385188Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:24:40.403855Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:24:40.414380Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-05-29T15:24:40.427606Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480 2025-05-29T15:24:40.441014Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715670:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:24:40.455673Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-05-29T15:24:40.482810Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 2025-05-29T15:24:40.546729Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509888959427724788:2372], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:24:40.546780Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:24:40.547597Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509888959427724800:2375], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:24:40.548532Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715673:3, at schemeshard: 72057594046644480 2025-05-29T15:24:40.550551Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976715673, at schemeshard: 72057594046644480 2025-05-29T15:24:40.550620Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7509888959427724802:2376], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715673 completed, doublechecking } 2025-05-29T15:24:40.621585Z node 1 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [1:7509888959427724853:2853] txid# 281474976715674, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 18], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-29T15:24:40.740743Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [1:7509888959427724869:2380], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:24:40.741814Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=1&id=YmY1ZDUzNTMtNTEwMTYwLWI3YzNmOTItMzkwZDliZGI=, ActorId: [1:7509888959427724777:2370], ActorState: ExecuteState, TraceId: 01jweaaf718d253zyj8ytfseez, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: VERIFY failed (2025-05-29T15:24:40.751256Z): ydb/core/http_proxy/ut/datastreams_fixture.h:484 RunYqlDataQuery(): requirement operationResult.IsSuccess() failed NPrivate::InternalPanicImpl(int, char const*, char const*, int, int, int, TBasicStringBuf>, char const*, unsigned long)+438 (0x13FAF316) NPrivate::Panic(NPrivate::TStaticBuf const&, int, char const*, char const*, char const*, ...)+263 (0x13FA6317) THttpProxyTestMock::RunYqlDataQuery(TBasicString>)+1470 (0x13E152BE) THttpProxyTestMock::InitKikimr(bool, bool)+10304 (0x13E0F460) THttpProxyTestMock::InitAll(bool, bool)+475 (0x13E0CB7B) NTestSuiteTestKinesisHttpProxy::TCurrentTest::Execute()::'lambda'()::operator()() const+49 (0x13DFAE11) NUnitTest::TTestBase::Run(std::__y1::function, TBasicString> const&, char const*, bool)+126 (0x14144F2E) NTestSuiteTestKinesisHttpProxy::TCurrentTest::Execute()+426 (0x13DFA68A) NUnitTest::TTestFactory::Execute()+803 (0x141456A3) NUnitTest::RunMain(int, char**)+3021 (0x1415724D) ??+0 (0x7FD56FB54D90) __libc_start_main+128 (0x7FD56FB54E40) _start+41 (0x12EC8029) 2025-05-29T15:24:43.794846Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509888973856396896:2192];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:24:43.794885Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/001d01/r3tmp/tmpPTibI2/pdisk_1.dat 2025-05-29T15:24:43.971416Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:24:43.974414Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [1:75098889738563967 ... cess() failed NPrivate::InternalPanicImpl(int, char const*, char const*, int, int, int, TBasicStringBuf>, char const*, unsigned long)+438 (0x13FAF316) NPrivate::Panic(NPrivate::TStaticBuf const&, int, char const*, char const*, char const*, ...)+263 (0x13FA6317) THttpProxyTestMock::RunYqlDataQuery(TBasicString>)+1470 (0x13E152BE) THttpProxyTestMock::InitKikimr(bool, bool)+10304 (0x13E0F460) THttpProxyTestMock::InitAll(bool, bool)+475 (0x13E0CB7B) NTestSuiteTestKinesisHttpProxy::TCurrentTest::Execute()::'lambda'()::operator()() const+49 (0x13DFAE11) NUnitTest::TTestBase::Run(std::__y1::function, TBasicString> const&, char const*, bool)+126 (0x14144F2E) NTestSuiteTestKinesisHttpProxy::TCurrentTest::Execute()+426 (0x13DFA68A) NUnitTest::TTestFactory::Execute()+803 (0x141456A3) NUnitTest::RunMain(int, char**)+3021 (0x1415724D) ??+0 (0x7FF854C78D90) __libc_start_main+128 (0x7FF854C78E40) _start+41 (0x12EC8029) test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/001d01/r3tmp/tmpEfaQtn/pdisk_1.dat 2025-05-29T15:25:06.474817Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-05-29T15:25:06.530025Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:25:06.530132Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [1:7509889074180300649:2079] 1748532306373290 != 1748532306373293 TServer::EnableGrpc on GrpcPort 7356, node 1 2025-05-29T15:25:06.551028Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:25:06.551040Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-05-29T15:25:06.551043Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-05-29T15:25:06.551082Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-05-29T15:25:06.571119Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:25:06.571150Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:25:06.574970Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:62098 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-29T15:25:06.651703Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:25:06.671102Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 TClient is connected to server localhost:62098 2025-05-29T15:25:06.754304Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:25:06.771403Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976715658, at schemeshard: 72057594046644480 2025-05-29T15:25:06.783494Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:25:06.796557Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976715660, at schemeshard: 72057594046644480 2025-05-29T15:25:06.803743Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:25:06.872371Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-05-29T15:25:06.896513Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-05-29T15:25:06.925606Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:25:06.942957Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:25:06.967719Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:25:06.978343Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:25:06.992664Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715670:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:25:07.006276Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:25:07.020147Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:25:07.114159Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509889078475269338:2372], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:25:07.114183Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:25:07.114338Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509889078475269350:2375], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:25:07.115158Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715673:3, at schemeshard: 72057594046644480 2025-05-29T15:25:07.123155Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976715673, at schemeshard: 72057594046644480 2025-05-29T15:25:07.123252Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7509889078475269352:2376], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715673 completed, doublechecking } 2025-05-29T15:25:07.179485Z node 1 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [1:7509889078475269403:2856] txid# 281474976715674, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 18], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-29T15:25:07.274711Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [1:7509889078475269412:2380], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:25:07.275663Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=1&id=NmU0YTFiNWQtNDM1NDQ2MjctYjZiM2JkYmEtMjlmMGFjYTY=, ActorId: [1:7509889078475269335:2370], ActorState: ExecuteState, TraceId: 01jweab9596ezb1yaannzghzdw, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: VERIFY failed (2025-05-29T15:25:07.279281Z): ydb/core/http_proxy/ut/datastreams_fixture.h:484 RunYqlDataQuery(): requirement operationResult.IsSuccess() failed NPrivate::InternalPanicImpl(int, char const*, char const*, int, int, int, TBasicStringBuf>, char const*, unsigned long)+438 (0x13FAF316) NPrivate::Panic(NPrivate::TStaticBuf const&, int, char const*, char const*, char const*, ...)+263 (0x13FA6317) THttpProxyTestMock::RunYqlDataQuery(TBasicString>)+1470 (0x13E152BE) THttpProxyTestMock::InitKikimr(bool, bool)+10304 (0x13E0F460) THttpProxyTestMock::InitAll(bool, bool)+475 (0x13E0CB7B) NTestSuiteTestKinesisHttpProxy::TCurrentTest::Execute()::'lambda'()::operator()() const+49 (0x13DFAE11) NUnitTest::TTestBase::Run(std::__y1::function, TBasicString> const&, char const*, bool)+126 (0x14144F2E) NTestSuiteTestKinesisHttpProxy::TCurrentTest::Execute()+426 (0x13DFA68A) NUnitTest::TTestFactory::Execute()+803 (0x141456A3) NUnitTest::RunMain(int, char**)+3021 (0x1415724D) ??+0 (0x7FD96E668D90) __libc_start_main+128 (0x7FD96E668E40) _start+41 (0x12EC8029) >> YdbYqlClient::TestReadTableMultiShardWholeTable [FAIL] >> YdbYqlClient::TestReadTableMultiShardWholeTableUseSnapshot |66.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/persqueue/ut/slow/unittest >> TestKinesisHttpProxy::PutRecordsWithIncorrectHashKey >> TImmediateControlsConfiguratorTests::TestResetToDefault [GOOD] >> TImmediateControlsConfiguratorTests::TestMaxLimit >> YdbYqlClient::ConnectDbAclIsStrictlyChecked [GOOD] >> YdbYqlClient::ConnectDbAclIsOffWhenYdbRequestsWithoutDatabase >> TPQTestSlow::TestWriteVeryBigMessage ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_olap_reboots/unittest >> TOlapReboots::CreateStandaloneTable [GOOD] Test command err: ==== RunWithTabletReboots =========== RUN: Trace =========== Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2140] Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:116:2058] recipient: [1:110:2141] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:116:2058] recipient: [1:110:2141] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:117:2058] recipient: [1:111:2142] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:117:2058] recipient: [1:111:2142] Leader for TabletID 72057594046678944 is [1:126:2151] sender: [1:127:2058] recipient: [1:109:2140] Leader for TabletID 72057594046447617 is [1:130:2154] sender: [1:132:2058] recipient: [1:110:2141] Leader for TabletID 72057594046316545 is [1:133:2155] sender: [1:135:2058] recipient: [1:111:2142] 2025-05-29T15:24:38.640700Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7488: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-05-29T15:24:38.640731Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7516: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:24:38.640737Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7402: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2025-05-29T15:24:38.640742Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7418: OperationsProcessing config: using default configuration 2025-05-29T15:24:38.640748Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-05-29T15:24:38.640752Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-05-29T15:24:38.640761Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7548: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:24:38.640774Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:39: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-05-29T15:24:38.640874Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7619: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-05-29T15:24:38.640965Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-05-29T15:24:38.655784Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7651: Got new config: QueryServiceConfig { AllExternalDataSourcesAreAvailable: true } 2025-05-29T15:24:38.655808Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:24:38.655912Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7619: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# Leader for TabletID 72057594046447617 is [1:130:2154] sender: [1:175:2058] recipient: [1:15:2062] 2025-05-29T15:24:38.659050Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-05-29T15:24:38.659079Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-05-29T15:24:38.659108Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-05-29T15:24:38.662491Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-05-29T15:24:38.662592Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:445: Clear TempDirsState with owners number: 0 2025-05-29T15:24:38.662705Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1348: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-05-29T15:24:38.663018Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:32: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-05-29T15:24:38.663688Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:157: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:24:38.663728Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:84: [RootDataErasureManager] Stop 2025-05-29T15:24:38.663976Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:24:38.663987Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:24:38.664022Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-05-29T15:24:38.664031Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-29T15:24:38.664036Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:202: TTxServerlessStorageBilling.Complete 2025-05-29T15:24:38.664056Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6671: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:214:2058] recipient: [1:212:2212] IGNORE Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:214:2058] recipient: [1:212:2212] Leader for TabletID 72057594037968897 is [1:218:2216] sender: [1:219:2058] recipient: [1:212:2212] 2025-05-29T15:24:38.665462Z node 1 :HIVE INFO: tablet_helpers.cpp:1130: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:126:2151] sender: [1:239:2058] recipient: [1:15:2062] 2025-05-29T15:24:38.686865Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:372: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-05-29T15:24:38.686932Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:24:38.686992Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-05-29T15:24:38.687033Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:130: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-05-29T15:24:38.687043Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:24:38.687711Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:451: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-05-29T15:24:38.687739Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-05-29T15:24:38.687793Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:24:38.687812Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:313: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-05-29T15:24:38.687817Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:367: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-05-29T15:24:38.687823Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 2 -> 3 2025-05-29T15:24:38.688215Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:24:38.688225Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-05-29T15:24:38.688230Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 3 -> 128 2025-05-29T15:24:38.690335Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:24:38.690353Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:24:38.690359Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:24:38.690366Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1650: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-05-29T15:24:38.691063Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1719: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-05-29T15:24:38.691513Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:652: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-05-29T15:24:38.691557Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1751: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 Leader for TabletID 72057594046316545 is [1:133:2155] sender: [1:254:2058] recipient: [1:15:2062] FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-05-29T15:24:38.691755Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:676: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-05-29T15:24:38.691781Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:680: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 133 RawX2: 4294969451 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-05-29T15:24:38.691803Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:24:38.691871Z node 1 :FLAT_TX_SCHEMESHARD INFO: sch ... EBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-05-29T15:25:11.080881Z node 68 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 3 2025-05-29T15:25:11.081989Z node 68 :TX_COLUMNSHARD_TX WARN: log.cpp:784: tablet_id=72075186233409546;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=1002;fline=tx_controller.cpp:214;event=finished_tx;tx_id=1002; FAKE_COORDINATOR: advance: minStep5000003 State->FrontStep: 5000003 2025-05-29T15:25:11.082464Z node 68 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:25:11.082474Z node 68 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1002, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-29T15:25:11.082513Z node 68 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1002, path id: [OwnerId: 72057594046678944, LocalPathId: 3] 2025-05-29T15:25:11.082552Z node 68 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:25:11.082559Z node 68 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [68:208:2209], at schemeshard: 72057594046678944, txId: 1002, path id: 1 2025-05-29T15:25:11.082569Z node 68 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [68:208:2209], at schemeshard: 72057594046678944, txId: 1002, path id: 3 2025-05-29T15:25:11.082580Z node 68 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1002:0, at schemeshard: 72057594046678944 2025-05-29T15:25:11.082587Z node 68 :FLAT_TX_SCHEMESHARD INFO: create_table.cpp:459: TCreateColumnTable TProposedWaitParts operationId# 1002:0 ProgressState at tablet: 72057594046678944 2025-05-29T15:25:11.082595Z node 68 :FLAT_TX_SCHEMESHARD DEBUG: create_table.cpp:485: TCreateColumnTable TProposedWaitParts operationId# 1002:0 ProgressState wait for NotifyTxCompletionResult tabletId: 72075186233409546 2025-05-29T15:25:11.082918Z node 68 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:5834: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 7 PathOwnerId: 72057594046678944, cookie: 1002 2025-05-29T15:25:11.082935Z node 68 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 7 PathOwnerId: 72057594046678944, cookie: 1002 2025-05-29T15:25:11.082940Z node 68 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72057594046678944, txId: 1002 2025-05-29T15:25:11.082949Z node 68 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 1002, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 7 2025-05-29T15:25:11.082954Z node 68 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2025-05-29T15:25:11.087047Z node 68 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:5834: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 4 PathOwnerId: 72057594046678944, cookie: 1002 2025-05-29T15:25:11.087069Z node 68 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 4 PathOwnerId: 72057594046678944, cookie: 1002 2025-05-29T15:25:11.087074Z node 68 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72057594046678944, txId: 1002 2025-05-29T15:25:11.087080Z node 68 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 1002, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 4 2025-05-29T15:25:11.087086Z node 68 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 4 2025-05-29T15:25:11.087105Z node 68 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1606: TOperation IsReadyToNotify, TxId: 1002, ready parts: 0/1, is published: true 2025-05-29T15:25:11.087318Z node 68 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:652: Send tablet strongly msg operationId: 1002:0 from tablet: 72057594046678944 to tablet: 72075186233409546 cookie: 72057594046678944:1 msg type: 275382275 2025-05-29T15:25:11.087655Z node 68 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1002 2025-05-29T15:25:11.087945Z node 68 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1002 2025-05-29T15:25:11.098962Z node 68 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6146: Handle TEvNotifyTxCompletionResult, at schemeshard: 72057594046678944, message: Origin: 72075186233409546 TxId: 1002 2025-05-29T15:25:11.098982Z node 68 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1764: TOperation FindRelatedPartByTabletId, TxId: 1002, tablet: 72075186233409546, partId: 0 2025-05-29T15:25:11.099003Z node 68 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:619: TTxOperationReply execute, operationId: 1002:0, at schemeshard: 72057594046678944, message: Origin: 72075186233409546 TxId: 1002 FAKE_COORDINATOR: Erasing txId 1002 2025-05-29T15:25:11.099499Z node 68 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:647: TTxOperationReply complete, operationId: 1002:0, at schemeshard: 72057594046678944 2025-05-29T15:25:11.099532Z node 68 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1002:0, at schemeshard: 72057594046678944 2025-05-29T15:25:11.099539Z node 68 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:482: [72057594046678944] TDone opId# 1002:0 ProgressState 2025-05-29T15:25:11.099555Z node 68 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:906: Part operation is done id#1002:0 progress is 1/1 2025-05-29T15:25:11.099559Z node 68 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 1002 ready parts: 1/1 2025-05-29T15:25:11.099567Z node 68 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:906: Part operation is done id#1002:0 progress is 1/1 2025-05-29T15:25:11.099570Z node 68 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 1002 ready parts: 1/1 2025-05-29T15:25:11.099574Z node 68 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1606: TOperation IsReadyToNotify, TxId: 1002, ready parts: 1/1, is published: true 2025-05-29T15:25:11.099586Z node 68 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1629: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [68:370:2347] message: TxId: 1002 2025-05-29T15:25:11.099593Z node 68 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 1002 ready parts: 1/1 2025-05-29T15:25:11.099598Z node 68 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:973: Operation and all the parts is done, operation id: 1002:0 2025-05-29T15:25:11.099602Z node 68 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5174: RemoveTx for txid 1002:0 2025-05-29T15:25:11.099628Z node 68 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 3 2025-05-29T15:25:11.100158Z node 68 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:227: tests -- TTxNotificationSubscriber for txId 1002: got EvNotifyTxCompletionResult 2025-05-29T15:25:11.100177Z node 68 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:236: tests -- TTxNotificationSubscriber for txId 1002: satisfy waiter [68:371:2348] TestWaitNotification: OK eventTxId 1002 2025-05-29T15:25:11.100299Z node 68 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/ColumnTable" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-05-29T15:25:11.100352Z node 68 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/ColumnTable" took 76us result status StatusSuccess 2025-05-29T15:25:11.100486Z node 68 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/ColumnTable" PathDescription { Self { Name: "ColumnTable" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeColumnTable CreateFinished: true CreateTxId: 1002 CreateStep: 5000003 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 4 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 4 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 ColumnTableVersion: 1 ColumnTableSchemaVersion: 1 } ChildrenExist: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 0 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 2 PathsLimit: 10000 ShardsInside: 1 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } ColumnTableDescription { Name: "ColumnTable" Schema { Columns { Id: 1 Name: "timestamp" Type: "Timestamp" TypeId: 50 NotNull: true StorageId: "" DefaultValue { } ColumnFamilyId: 0 } Columns { Id: 2 Name: "data" Type: "Utf8" TypeId: 4608 NotNull: false StorageId: "" DefaultValue { } ColumnFamilyId: 0 } KeyColumnNames: "timestamp" NextColumnId: 3 Version: 1 Options { SchemeNeedActualization: false } ColumnFamilies { Id: 0 Name: "default" } NextColumnFamilyId: 1 } ColumnShardCount: 1 Sharding { ColumnShards: 72075186233409546 HashSharding { Function: HASH_FUNCTION_CONSISTENCY_64 Columns: "timestamp" } } StorageConfig { DataChannelCount: 64 } } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> TExportToS3WithRebootsTests::CancelShouldSucceedOnViewsAndTables [GOOD] >> SlowTopicAutopartitioning::CDC_Write [FAIL] >> TImmediateControlsConfiguratorTests::TestMaxLimit [GOOD] >> TImmediateControlsConfiguratorTests::TestDynamicMap >> TDatabaseResolverTests::Greenplum_MasterNode >> TDatabaseResolverTests::Greenplum_MasterNode [GOOD] >> TDatabaseResolverTests::Greenplum_PermissionDenied >> YdbYqlClient::ConnectDbAclIsOffWhenYdbRequestsWithoutDatabase [GOOD] >> YdbYqlClient::CopyTables >> TDatabaseResolverTests::Greenplum_PermissionDenied [GOOD] >> TBlobStorageProxyTest::TestProxyLongTailDiscover >> TImmediateControlsConfiguratorTests::TestDynamicMap [GOOD] >> TBlobStorageProxyTest::TestSingleFailureMirror >> TNetClassifierUpdaterTest::TestFiltrationByNetboxTags [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/fq/libs/actors/ut/unittest >> TDatabaseResolverTests::Greenplum_PermissionDenied [GOOD] Test command err: 2025-05-29T15:25:13.369778Z node 2 :FQ_DATABASE_RESOLVER ERROR: database_resolver.cpp:175: TraceId: traceId ResponseProcessor::Handle(HttpIncomingResponse): error=Error while trying to resolve managed Greenplum database with id etn021us5r9rhld1vgbh via HTTP request to: endpoint 'mdb.api.cloud.yandex.net:443', url '/managed-greenplum/v1/clusters/etn021us5r9rhld1vgbh/master-hosts': you have no permission to resolve database id into database endpoint. >> TOlapReboots::CreateStore [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/http_proxy/ut/inside_ydb_ut/unittest >> TestKinesisHttpProxy::TestWrongRequest Test command err: 2025-05-29T15:24:40.083796Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509888962353265587:2065];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:24:40.083813Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/001d00/r3tmp/tmptQbgGt/pdisk_1.dat 2025-05-29T15:24:40.151942Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [1:7509888962353265563:2079] 1748532280083444 != 1748532280083447 2025-05-29T15:24:40.153477Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 4031, node 1 2025-05-29T15:24:40.170962Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:24:40.170975Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-05-29T15:24:40.170977Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-05-29T15:24:40.171018Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-05-29T15:24:40.187160Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:24:40.187196Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting TClient is connected to server localhost:18708 WaitRootIsUp 'Root'... TClient::Ls request: Root 2025-05-29T15:24:40.191412Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-29T15:24:40.225535Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:24:40.228149Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 TClient is connected to server localhost:18708 waiting... 2025-05-29T15:24:40.248189Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-05-29T15:24:40.250340Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976715658, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:24:40.251033Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715659:0, at schemeshard: 72057594046644480 2025-05-29T15:24:40.259885Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:24:40.282456Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-05-29T15:24:40.304068Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-05-29T15:24:40.323225Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976715665, at schemeshard: 72057594046644480 2025-05-29T15:24:40.324312Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:24:40.347640Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:24:40.357724Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:24:40.376537Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:24:40.386278Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715670:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:24:40.446107Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:24:40.461162Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:24:40.563735Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509888962353266954:2372], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:24:40.563759Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509888962353266965:2375], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:24:40.563770Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:24:40.564584Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715673:3, at schemeshard: 72057594046644480 2025-05-29T15:24:40.572564Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976715673, at schemeshard: 72057594046644480 2025-05-29T15:24:40.572656Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7509888962353266968:2376], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715673 completed, doublechecking } 2025-05-29T15:24:40.659783Z node 1 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [1:7509888962353267019:2854] txid# 281474976715674, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 18], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-29T15:24:40.729978Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [1:7509888962353267028:2380], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:24:40.730665Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=1&id=MWVlNGZhOTMtMWE0MDMyMjEtYTY4ODc0NWMtYzczNGNmMmY=, ActorId: [1:7509888962353266926:2368], ActorState: ExecuteState, TraceId: 01jweaaf7k4gsjrg9n9594tfmh, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: VERIFY failed (2025-05-29T15:24:40.735316Z): ydb/core/http_proxy/ut/datastreams_fixture.h:484 RunYqlDataQuery(): requirement operationResult.IsSuccess() failed NPrivate::InternalPanicImpl(int, char const*, char const*, int, int, int, TBasicStringBuf>, char const*, unsigned long)+438 (0x13FAF316) NPrivate::Panic(NPrivate::TStaticBuf const&, int, char const*, char const*, char const*, ...)+263 (0x13FA6317) THttpProxyTestMock::RunYqlDataQuery(TBasicString>)+1470 (0x13E152BE) THttpProxyTestMock::InitKikimr(bool, bool)+10304 (0x13E0F460) THttpProxyTestMock::InitAll(bool, bool)+475 (0x13E0CB7B) NTestSuiteTestKinesisHttpProxy::TCurrentTest::Execute()::'lambda'()::operator()() const+49 (0x13DFAE11) NUnitTest::TTestBase::Run(std::__y1::function, TBasicString> const&, char const*, bool)+126 (0x14144F2E) NTestSuiteTestKinesisHttpProxy::TCurrentTest::Execute()+426 (0x13DFA68A) NUnitTest::TTestFactory::Execute()+803 (0x141456A3) NUnitTest::RunMain(int, char**)+3021 (0x1415724D) ??+0 (0x7FD775991D90) __libc_start_main+128 (0x7FD775991E40) _start+41 (0x12EC8029) 2025-05-29T15:24:43.755046Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509888974209132888:2085];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:24:43.755296Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/001d00/r3tmp/tmpUUUAbp/pdisk_1.dat 2025-05-29T15:24:43.887409Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 17460, node 1 2025-05-29T15:24:43.947043Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) ... roxyTestMock::InitKikimr(bool, bool)+10304 (0x13E0F460) THttpProxyTestMock::InitAll(bool, bool)+475 (0x13E0CB7B) NTestSuiteTestKinesisHttpProxy::TCurrentTest::Execute()::'lambda'()::operator()() const+49 (0x13DFAE11) NUnitTest::TTestBase::Run(std::__y1::function, TBasicString> const&, char const*, bool)+126 (0x14144F2E) NTestSuiteTestKinesisHttpProxy::TCurrentTest::Execute()+426 (0x13DFA68A) NUnitTest::TTestFactory::Execute()+803 (0x141456A3) NUnitTest::RunMain(int, char**)+3021 (0x1415724D) ??+0 (0x7F211B6B8D90) __libc_start_main+128 (0x7F211B6B8E40) _start+41 (0x12EC8029) 2025-05-29T15:25:09.624133Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509889085934295750:2192];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:25:09.624172Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/001d00/r3tmp/tmp1EqNVY/pdisk_1.dat 2025-05-29T15:25:09.795188Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:25:09.795215Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:25:09.812643Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:25:09.814266Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-29T15:25:09.814420Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [1:7509889085934295597:2079] 1748532309622034 != 1748532309622037 TServer::EnableGrpc on GrpcPort 62012, node 1 2025-05-29T15:25:09.863959Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:25:09.863972Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-05-29T15:25:09.863974Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-05-29T15:25:09.864015Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:20610 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-29T15:25:09.976450Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:25:09.982482Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 TClient is connected to server localhost:20610 waiting... 2025-05-29T15:25:10.045907Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-05-29T15:25:10.051009Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976715658, at schemeshard: 72057594046644480 2025-05-29T15:25:10.056186Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:25:10.071695Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976715660, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:25:10.075353Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 2025-05-29T15:25:10.142358Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-05-29T15:25:10.155331Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976715663, at schemeshard: 72057594046644480 2025-05-29T15:25:10.156601Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-05-29T15:25:10.178762Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:25:10.194939Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:25:10.208208Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:25:10.220547Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:25:10.235806Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715670:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:25:10.253124Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:25:10.269436Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:25:10.350187Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509889090229264297:2375], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:25:10.351069Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715673:3, at schemeshard: 72057594046644480 2025-05-29T15:25:10.354212Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976715673, at schemeshard: 72057594046644480 2025-05-29T15:25:10.354269Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7509889090229264299:2376], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715673 completed, doublechecking } 2025-05-29T15:25:10.354378Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509889090229264285:2372], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:25:10.354394Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:25:10.427786Z node 1 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [1:7509889090229264350:2855] txid# 281474976715674, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 18], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-29T15:25:10.485486Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [1:7509889090229264359:2380], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:25:10.486067Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=1&id=MzVjZjNiN2ItOTFiMTM2ZDItZGQyNjlkMTMtOTE1YWEyZDI=, ActorId: [1:7509889090229264258:2369], ActorState: ExecuteState, TraceId: 01jweabcaaap394b879f0vwy52, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: VERIFY failed (2025-05-29T15:25:10.487065Z): ydb/core/http_proxy/ut/datastreams_fixture.h:484 RunYqlDataQuery(): requirement operationResult.IsSuccess() failed NPrivate::InternalPanicImpl(int, char const*, char const*, int, int, int, TBasicStringBuf>, char const*, unsigned long)+438 (0x13FAF316) NPrivate::Panic(NPrivate::TStaticBuf const&, int, char const*, char const*, char const*, ...)+263 (0x13FA6317) THttpProxyTestMock::RunYqlDataQuery(TBasicString>)+1470 (0x13E152BE) THttpProxyTestMock::InitKikimr(bool, bool)+10304 (0x13E0F460) THttpProxyTestMock::InitAll(bool, bool)+475 (0x13E0CB7B) NTestSuiteTestKinesisHttpProxy::TCurrentTest::Execute()::'lambda'()::operator()() const+49 (0x13DFAE11) NUnitTest::TTestBase::Run(std::__y1::function, TBasicString> const&, char const*, bool)+126 (0x14144F2E) NTestSuiteTestKinesisHttpProxy::TCurrentTest::Execute()+426 (0x13DFA68A) NUnitTest::TTestFactory::Execute()+803 (0x141456A3) NUnitTest::RunMain(int, char**)+3021 (0x1415724D) ??+0 (0x7F21971A1D90) __libc_start_main+128 (0x7F21971A1E40) _start+41 (0x12EC8029) >> TestKinesisHttpProxy::TestConsumersEmptyNames >> YdbYqlClient::TestReadTableMultiShardWholeTableUseSnapshot [FAIL] >> YdbYqlClient::TestReadTableMultiShardWithDescribe >> TConsoleTests::TestAttributesExtSubdomain [GOOD] >> TConsoleTests::TestDatabaseQuotas ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/cms/console/ut/unittest >> TImmediateControlsConfiguratorTests::TestDynamicMap [GOOD] Test command err: 2025-05-29T15:24:20.995248Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7488: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-05-29T15:24:20.995275Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7516: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:24:20.995281Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7402: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-05-29T15:24:20.995286Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7418: OperationsProcessing config: using default configuration 2025-05-29T15:24:20.995300Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-05-29T15:24:20.995305Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-05-29T15:24:20.995322Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7548: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:24:20.995346Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:39: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-05-29T15:24:20.995468Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7619: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-05-29T15:24:20.995558Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-05-29T15:24:21.000241Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7309: Cannot subscribe to console configs 2025-05-29T15:24:21.000266Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:24:21.002829Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-05-29T15:24:21.002903Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-05-29T15:24:21.002927Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046578944 2025-05-29T15:24:21.004923Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-05-29T15:24:21.005056Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:445: Clear TempDirsState with owners number: 0 2025-05-29T15:24:21.005191Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1348: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046578944 2025-05-29T15:24:21.005308Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:32: TTxInitRoot DoExecute, path: dc-1, pathId: [OwnerId: 72057594046578944, LocalPathId: 1], at schemeshard: 72057594046578944 2025-05-29T15:24:21.006102Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:157: TTxInitRoot DoComplete, at schemeshard: 72057594046578944 2025-05-29T15:24:21.006146Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:84: [RootDataErasureManager] Stop 2025-05-29T15:24:21.006460Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046578944 2025-05-29T15:24:21.006471Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046578944 2025-05-29T15:24:21.006505Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-05-29T15:24:21.006516Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046578944, domainId: [OwnerId: 72057594046578944, LocalPathId: 1] 2025-05-29T15:24:21.006524Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:202: TTxServerlessStorageBilling.Complete 2025-05-29T15:24:21.006600Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6671: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046578944 2025-05-29T15:24:21.064007Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:372: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "dc-1" StoragePools { Name: "" Kind: "hdd" } StoragePools { Name: "" Kind: "hdd-3" } StoragePools { Name: "" Kind: "hdd-1" } StoragePools { Name: "" Kind: "hdd-2" } } } TxId: 1 TabletId: 72057594046578944 , at schemeshard: 72057594046578944 2025-05-29T15:24:21.064116Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //dc-1, opId: 1:0, at schemeshard: 72057594046578944 2025-05-29T15:24:21.064194Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046578944, LocalPathId: 1] was 0 2025-05-29T15:24:21.064269Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:130: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046578944 2025-05-29T15:24:21.064283Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944 2025-05-29T15:24:21.065406Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:451: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046578944 PathId: 1, at schemeshard: 72057594046578944 2025-05-29T15:24:21.065449Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //dc-1 2025-05-29T15:24:21.065537Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046578944 2025-05-29T15:24:21.065550Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:313: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046578944 2025-05-29T15:24:21.065556Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:367: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-05-29T15:24:21.065563Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 2 -> 3 2025-05-29T15:24:21.066128Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046578944 2025-05-29T15:24:21.066142Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046578944 2025-05-29T15:24:21.066148Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 3 -> 128 2025-05-29T15:24:21.066568Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046578944 2025-05-29T15:24:21.066581Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046578944 2025-05-29T15:24:21.066598Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046578944 2025-05-29T15:24:21.066607Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1650: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-05-29T15:24:21.067411Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1719: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046578944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-05-29T15:24:21.067947Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:652: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046578944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-05-29T15:24:21.068006Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1751: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 2025-05-29T15:24:21.068275Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__notify.cpp:30: NotifyTxCompletion operation in-flight, txId: 1, at schemeshard: 72057594046578944 2025-05-29T15:24:21.068284Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1606: TOperation IsReadyToNotify, TxId: 1, ready parts: 0/1, is published: true 2025-05-29T15:24:21.068289Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__notify.cpp:131: NotifyTxCompletion transaction is registered, txId: 1, at schemeshard: 72057594046578944 2025-05-29T15:24:21.690701Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:676: TTxOperationPlanStep Execute, stepId: 500, transactions count in step: 1, at schemeshard: 72057594046578944 2025-05-29T15:24:21.690778Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:680: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 AckTo { RawX1: 0 RawX2: 0 } } Step: 500 MediatorID: 72057594046382081 TabletID: 72057594046578944, at schemeshard: 72057594046578944 2025-05-29T15:24:21.690789Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046578944 2025-05-29T15:24:21.690856Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 128 -> 240 2025-05-29T15:24:21.690865Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046578944 2025-05-29T15:24:21.690901Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046578944, LocalPathId: 1] was 1 2025-05-29T15:24:21.690911Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:402: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046578944, LocalPathId: 1], at schemeshard: 72057594046578944 2025-05-29T15:24:21.691536Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046578944 2025-05-29T15:24:21.691548Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046578944, txId: 1, path id: [OwnerId: 72057594046578944, LocalPathId: 1] 2025-05-29T15:24:21.691584Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046578944 2025-05-29T15:24:21.691588Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:836:2256], at schemeshard: 72057594046578944, txId: 1, path id: 1 202 ... ediateControlsConfigurator creation. A default value may have been used before it was configured. WARNING: immediate control VDiskControls.ThrottlingMinInplacedSizeHDD was registered before TImmediateControlsConfigurator creation. A default value may have been used before it was configured. WARNING: immediate control VDiskControls.ThrottlingMaxInplacedSizeHDD was registered before TImmediateControlsConfigurator creation. A default value may have been used before it was configured. WARNING: immediate control VDiskControls.ThrottlingMinInplacedSizeSSD was registered before TImmediateControlsConfigurator creation. A default value may have been used before it was configured. WARNING: immediate control VDiskControls.ThrottlingMaxInplacedSizeSSD was registered before TImmediateControlsConfigurator creation. A default value may have been used before it was configured. WARNING: immediate control VDiskControls.ThrottlingMinOccupancyPerMille was registered before TImmediateControlsConfigurator creation. A default value may have been used before it was configured. WARNING: immediate control VDiskControls.ThrottlingMaxOccupancyPerMille was registered before TImmediateControlsConfigurator creation. A default value may have been used before it was configured. WARNING: immediate control VDiskControls.ThrottlingMinLogChunkCount was registered before TImmediateControlsConfigurator creation. A default value may have been used before it was configured. WARNING: immediate control VDiskControls.ThrottlingMaxLogChunkCount was registered before TImmediateControlsConfigurator creation. A default value may have been used before it was configured. WARNING: immediate control VDiskControls.MaxInProgressSyncCount was registered before TImmediateControlsConfigurator creation. A default value may have been used before it was configured. WARNING: immediate control TabletControls.MaxCommitRedoMB was registered before TImmediateControlsConfigurator creation. A default value may have been used before it was configured. WARNING: immediate control DSProxyControls.SlowDiskThreshold was registered before TImmediateControlsConfigurator creation. A default value may have been used before it was configured. WARNING: immediate control DSProxyControls.PredictedDelayMultiplier was registered before TImmediateControlsConfigurator creation. A default value may have been used before it was configured. WARNING: immediate control DSProxyControls.LongRequestThresholdMs was registered before TImmediateControlsConfigurator creation. A default value may have been used before it was configured. WARNING: immediate control DSProxyControls.MaxNumOfSlowDisks was registered before TImmediateControlsConfigurator creation. A default value may have been used before it was configured. WARNING: immediate control DSProxyControls.SlowDiskThresholdHDD was registered before TImmediateControlsConfigurator creation. A default value may have been used before it was configured. WARNING: immediate control DSProxyControls.PredictedDelayMultiplierHDD was registered before TImmediateControlsConfigurator creation. A default value may have been used before it was configured. WARNING: immediate control DSProxyControls.MaxNumOfSlowDisksHDD was registered before TImmediateControlsConfigurator creation. A default value may have been used before it was configured. WARNING: immediate control DSProxyControls.SlowDiskThresholdSSD was registered before TImmediateControlsConfigurator creation. A default value may have been used before it was configured. WARNING: immediate control DSProxyControls.PredictedDelayMultiplierSSD was registered before TImmediateControlsConfigurator creation. A default value may have been used before it was configured. WARNING: immediate control DSProxyControls.MaxNumOfSlowDisksSSD was registered before TImmediateControlsConfigurator creation. A default value may have been used before it was configured. WARNING: immediate control DSProxyControls.RequestReportingSettings.BucketSize was registered before TImmediateControlsConfigurator creation. A default value may have been used before it was configured. WARNING: immediate control DSProxyControls.RequestReportingSettings.LeakDurationMs was registered before TImmediateControlsConfigurator creation. A default value may have been used before it was configured. WARNING: immediate control DSProxyControls.RequestReportingSettings.LeakRate was registered before TImmediateControlsConfigurator creation. A default value may have been used before it was configured. WARNING: immediate control PDiskControls.MaxCommonLogChunksHDD was registered before TImmediateControlsConfigurator creation. A default value may have been used before it was configured. WARNING: immediate control PDiskControls.MaxCommonLogChunksSSD was registered before TImmediateControlsConfigurator creation. A default value may have been used before it was configured. WARNING: immediate control PDiskControls.UseNoopSchedulerHDD was registered before TImmediateControlsConfigurator creation. A default value may have been used before it was configured. WARNING: immediate control PDiskControls.UseNoopSchedulerSSD was registered before TImmediateControlsConfigurator creation. A default value may have been used before it was configured. WARNING: immediate control BlobStorageControllerControls.EnableSelfHealWithDegraded was registered before TImmediateControlsConfigurator creation. A default value may have been used before it was configured. WARNING: immediate control TableServiceControls.EnableMergeDatashardReads was registered before TImmediateControlsConfigurator creation. A default value may have been used before it was configured. WARNING: immediate control TestShardControls.DisableWrites was registered before TImmediateControlsConfigurator creation. A default value may have been used before it was configured. 2025-05-29T15:25:13.455921Z node 131 :CONFIGS_DISPATCHER TRACE: configs_dispatcher.cpp:193: StateInit, received event# 273481728, Sender [131:460:2398], Recipient [131:401:2357]: NKikimr::NConsole::TEvConfigsDispatcher::TEvSetConfigSubscriptionRequest 2025-05-29T15:25:13.455937Z node 131 :CONFIGS_DISPATCHER TRACE: configs_dispatcher.cpp:202: StateInit, processing event TEvConfigsDispatcher::TEvSetConfigSubscriptionRequest 2025-05-29T15:25:13.474915Z node 131 :BS_CONTROLLER DEBUG: {BSC19@console_interaction.cpp:74} Console proposed config response Response# {Status: ReverseCommit ConsoleConfigVersion: 0 YAML: "" } 2025-05-29T15:25:13.486945Z node 131 :CONFIGS_DISPATCHER TRACE: configs_dispatcher.cpp:193: StateInit, received event# 273285146, Sender [131:407:2357], Recipient [131:401:2357]: NKikimr::NConsole::TEvConsole::TEvConfigSubscriptionNotification { Generation: 1 Config { FeatureFlags { EnableExternalHive: false EnableColumnStatistics: false EnableScaleRecommender: true } } RawConsoleConfig { } } 2025-05-29T15:25:13.486956Z node 131 :CONFIGS_DISPATCHER TRACE: configs_dispatcher.cpp:199: StateInit, processing event TEvConsole::TEvConfigSubscriptionNotification 2025-05-29T15:25:13.487001Z node 131 :CONFIGS_DISPATCHER TRACE: configs_dispatcher.cpp:1036: Sending for kinds: AllowEditYamlInUiItem 2025-05-29T15:25:13.487015Z node 131 :CONFIGS_DISPATCHER TRACE: configs_dispatcher.cpp:361: Send TEvConsole::TEvConfigNotificationRequest to [131:437:2371]: Config { } ItemKinds: 75 Local: true 2025-05-29T15:25:13.487042Z node 131 :CONFIGS_DISPATCHER TRACE: configs_dispatcher.cpp:1036: Sending for kinds: MonitoringConfigItem 2025-05-29T15:25:13.487049Z node 131 :CONFIGS_DISPATCHER TRACE: configs_dispatcher.cpp:361: Send TEvConsole::TEvConfigNotificationRequest to [131:402:2358]: Config { } ItemKinds: 10 Local: true 2025-05-29T15:25:13.487053Z node 131 :CONFIGS_DISPATCHER TRACE: configs_dispatcher.cpp:1036: Sending for kinds: MonitoringConfigItem 2025-05-29T15:25:13.487064Z node 131 :CONFIGS_DISPATCHER TRACE: configs_dispatcher.cpp:361: Send TEvConsole::TEvConfigNotificationRequest to [131:405:2356]: Config { } ItemKinds: 10 Local: true 2025-05-29T15:25:13.487089Z node 131 :CONFIGS_DISPATCHER TRACE: configs_dispatcher.cpp:1036: Sending for kinds: ImmediateControlsConfigItem 2025-05-29T15:25:13.487097Z node 131 :CONFIGS_DISPATCHER TRACE: configs_dispatcher.cpp:361: Send TEvConsole::TEvConfigNotificationRequest to [131:460:2398]: Config { } ItemKinds: 39 Local: true 2025-05-29T15:25:13.488176Z node 131 :TENANT_POOL DEBUG: tenant_pool.cpp:486: TDomainTenantPool(dc-1) Got new monitoring config: 2025-05-29T15:25:13.488195Z node 131 :CONFIGS_DISPATCHER TRACE: configs_dispatcher.cpp:215: StateWork, received event# 273286162, Sender [131:402:2358], Recipient [131:401:2357]: NKikimr::NConsole::TEvConsole::TEvConfigNotificationResponse { SubscriptionId: 0 ConfigId { } } 2025-05-29T15:25:13.488201Z node 131 :CONFIGS_DISPATCHER TRACE: configs_dispatcher.cpp:227: StateWork, processing event TEvConsole::TEvConfigNotificationResponse 2025-05-29T15:25:13.488218Z node 131 :CONFIGS_DISPATCHER TRACE: configs_dispatcher.cpp:215: StateWork, received event# 273286162, Sender [131:460:2398], Recipient [131:401:2357]: NKikimr::NConsole::TEvConsole::TEvConfigNotificationResponse { SubscriptionId: 0 ConfigId { } } 2025-05-29T15:25:13.488225Z node 131 :CONFIGS_DISPATCHER TRACE: configs_dispatcher.cpp:227: StateWork, processing event TEvConsole::TEvConfigNotificationResponse 2025-05-29T15:25:13.488253Z node 131 :CONFIGS_DISPATCHER TRACE: configs_dispatcher.cpp:215: StateWork, received event# 273286162, Sender [131:405:2356], Recipient [131:401:2357]: NKikimr::NConsole::TEvConsole::TEvConfigNotificationResponse { SubscriptionId: 0 ConfigId { } } 2025-05-29T15:25:13.488257Z node 131 :CONFIGS_DISPATCHER TRACE: configs_dispatcher.cpp:227: StateWork, processing event TEvConsole::TEvConfigNotificationResponse 2025-05-29T15:25:13.488267Z node 131 :CONFIGS_DISPATCHER TRACE: configs_dispatcher.cpp:215: StateWork, received event# 273286162, Sender [131:437:2371], Recipient [131:401:2357]: NKikimr::NConsole::TEvConsole::TEvConfigNotificationResponse { SubscriptionId: 0 ConfigId { } } 2025-05-29T15:25:13.488270Z node 131 :CONFIGS_DISPATCHER TRACE: configs_dispatcher.cpp:227: StateWork, processing event TEvConsole::TEvConfigNotificationResponse 2025-05-29T15:25:13.511621Z node 131 :CONFIGS_DISPATCHER TRACE: configs_dispatcher.cpp:215: StateWork, received event# 273285146, Sender [131:407:2357], Recipient [131:401:2357]: NKikimr::NConsole::TEvConsole::TEvConfigSubscriptionNotification { Generation: 1 Config { FeatureFlags { EnableExternalHive: false EnableColumnStatistics: false EnableScaleRecommender: true } ImmediateControlsConfig { GRpcControls { RequestConfigs { key: "FooBar" value { MaxInFlight: 10 } } } } Version { Items { Kind: 39 Id: 1 Generation: 1 } } } AffectedKinds: 39 RawConsoleConfig { ImmediateControlsConfig { GRpcControls { RequestConfigs { key: "FooBar" value { MaxInFlight: 10 } } } } Version { Items { Kind: 39 Id: 1 Generation: 1 } } } } 2025-05-29T15:25:13.511645Z node 131 :CONFIGS_DISPATCHER TRACE: configs_dispatcher.cpp:221: StateWork, processing event TEvConsole::TEvConfigSubscriptionNotification 2025-05-29T15:25:13.511680Z node 131 :CONFIGS_DISPATCHER TRACE: configs_dispatcher.cpp:1036: Sending for kinds: ImmediateControlsConfigItem 2025-05-29T15:25:13.511698Z node 131 :CONFIGS_DISPATCHER TRACE: configs_dispatcher.cpp:361: Send TEvConsole::TEvConfigNotificationRequest to [131:460:2398]: Config { ImmediateControlsConfig { GRpcControls { RequestConfigs { key: "FooBar" value { MaxInFlight: 10 } } } } } ItemKinds: 39 Local: true 2025-05-29T15:25:13.511874Z node 131 :CONFIGS_DISPATCHER TRACE: configs_dispatcher.cpp:215: StateWork, received event# 273286162, Sender [131:460:2398], Recipient [131:401:2357]: NKikimr::NConsole::TEvConsole::TEvConfigNotificationResponse { SubscriptionId: 0 ConfigId { } } 2025-05-29T15:25:13.511880Z node 131 :CONFIGS_DISPATCHER TRACE: configs_dispatcher.cpp:227: StateWork, processing event TEvConsole::TEvConfigNotificationResponse >> YdbYqlClient::CopyTables [GOOD] >> YdbYqlClient::CreateAndAltertTableWithCompactionPolicy ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/cms/console/ut/unittest >> TNetClassifierUpdaterTest::TestFiltrationByNetboxTags [GOOD] Test command err: test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/000dad/r3tmp/tmpFWfJWO/pdisk_1.dat 2025-05-29T15:24:20.327766Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509888875651450192:2284];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:24:20.327801Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-05-29T15:24:20.360429Z node 1 :HTTP ERROR: http_proxy_outgoing.cpp:113: (#26,[::1]:2668) connection closed with error: Connection refused 2025-05-29T15:24:20.360618Z node 1 :CMS_CONFIGS ERROR: net_classifier_updater.cpp:278: NetClassifierUpdater failed to get subnets: Connection refused 2025-05-29T15:24:20.367043Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [1:7509888875651449919:2079] 1748532260247801 != 1748532260247804 2025-05-29T15:24:20.370794Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:24:20.426887Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:24:20.426931Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:24:20.429914Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-29T15:24:22.628907Z node 2 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7509888882669728050:2208];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:24:22.630833Z node 2 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/000dad/r3tmp/tmpNdj1Hm/pdisk_1.dat 2025-05-29T15:24:22.663912Z node 2 :HTTP ERROR: http_proxy_outgoing.cpp:113: (#28,[::1]:1744) connection closed with error: Connection refused 2025-05-29T15:24:22.669794Z node 2 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:24:22.680184Z node 2 :CMS_CONFIGS ERROR: net_classifier_updater.cpp:278: NetClassifierUpdater failed to get subnets: Connection refused 2025-05-29T15:24:22.742948Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:24:22.742985Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:24:22.744693Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-29T15:24:24.948402Z node 3 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7509888891642093106:2066];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:24:24.948418Z node 3 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/000dad/r3tmp/tmpSAbPwD/pdisk_1.dat 2025-05-29T15:24:24.966181Z node 3 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [3:7509888891642093081:2079] 1748532264948199 != 1748532264948202 2025-05-29T15:24:24.967812Z node 3 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:24:24.968325Z node 3 :HTTP ERROR: http_proxy_outgoing.cpp:113: (#26,[::1]:14602) connection closed with error: Connection refused 2025-05-29T15:24:24.968684Z node 3 :CMS_CONFIGS ERROR: net_classifier_updater.cpp:278: NetClassifierUpdater failed to get subnets: Connection refused 2025-05-29T15:24:25.050310Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:24:25.050348Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:24:25.053489Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-29T15:24:27.238261Z node 4 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7509888904292573442:2061];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:24:27.238286Z node 4 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/000dad/r3tmp/tmpZOSol9/pdisk_1.dat 2025-05-29T15:24:27.254179Z node 4 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:24:27.254394Z node 4 :HTTP ERROR: http_proxy_outgoing.cpp:113: (#28,[::1]:1708) connection closed with error: Connection refused 2025-05-29T15:24:27.255435Z node 4 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [4:7509888904292573422:2079] 1748532267238123 != 1748532267238126 2025-05-29T15:24:27.257347Z node 4 :CMS_CONFIGS ERROR: net_classifier_updater.cpp:278: NetClassifierUpdater failed to get subnets: Connection refused 2025-05-29T15:24:27.342492Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:24:27.342518Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:24:27.343563Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-29T15:24:29.553371Z node 5 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[5:7509888915177057298:2242];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:24:29.553421Z node 5 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/000dad/r3tmp/tmpcU9Ca7/pdisk_1.dat 2025-05-29T15:24:29.574109Z node 5 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:24:29.574651Z node 5 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [5:7509888915177057095:2079] 1748532269552715 != 1748532269552718 2025-05-29T15:24:29.576177Z node 5 :HTTP ERROR: http_proxy_outgoing.cpp:113: (#30,[::1]:9999) connection closed with error: Connection refused 2025-05-29T15:24:29.576266Z node 5 :CMS_CONFIGS ERROR: net_classifier_updater.cpp:278: NetClassifierUpdater failed to get subnets: Connection refused 2025-05-29T15:24:29.661926Z node 5 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:24:29.661971Z node 5 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:24:29.662986Z node 5 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-29T15:24:31.849285Z node 6 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[6:7509888921836553150:2063];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:24:31.849311Z node 6 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/000dad/r3tmp/tmpHCKUD1/pdisk_1.dat 2025-05-29T15:24:31.859879Z node 6 :HTTP ERROR: http_proxy_outgoing.cpp:113: (#32,[::1]:28275) connection closed with error: Connection refused 2025-05-29T15:24:31.859949Z node 6 :CMS_CONFIGS ERROR: net_classifier_updater.cpp:278: NetClassifierUpdater failed to get subnets: Connection refused 2025-05-29T15:24:31.864477Z node 6 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:24:31.864764Z node 6 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [6:7509888921836553129:2079] 1748532271849199 != 1748532271849202 2025-05-29T15:24:31.951892Z node 6 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:24:31.951919Z node 6 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:24:31.952937Z node 6 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Connecting -> Connected test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/000dad/r3tmp/tmpEysOoB/pdisk_1.dat 2025-05-29T15:24:34.208735Z node 7 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[7:7509888933469537748:2208];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:24:34.218770Z node 7 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-05-29T15:24:34.243439Z node 7 :HTTP ERROR: http_proxy_outgoing.cpp:113: (#34,[::1]:19445) connection closed with error: Connection refused 2025-05-29T15:24:34.243551Z node 7 :CMS_CONFIGS ERROR: net_classifier_updater.cpp:278: NetClassifierUpdater failed to get subnets: Connection refused 2025-05-29T15:24:34.258381Z node 7 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:24:34.258861Z node 7 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [7:7509888933469537566:2079] 1748532274188910 != 1748532274188913 2025-05-29T15:24:34.309325Z node 7 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:24:34.309361Z node 7 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:24:34.310246Z node 7 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Connecting -> Connected test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/000dad/r3tmp/tmptREbgI/pdisk_1.dat 2025-05-29T15:24:36.538333Z node 8 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-05-29T15:24:36.549779Z node 8 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:24:36.550046Z node 8 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [8:7509888942504993204:2079] 1748532276532623 != 1748532276532626 2025-05-29T15:24:36.552982Z node 8 :HTTP ERROR: http_proxy_outgoing.cpp:113: (#36,[::1]:28474) connection closed with error: Connection refused 2025-05-29T15:24:36.553177Z node 8 :CMS_CONFIGS ERROR: net_classifier_updater.cpp:278: NetClassifierUpdater failed to get subnets: Connection refused 2025-05-29T15:24:36.639956Z node 8 :HIVE WARN: node_info.cpp:25: ... 15 :CMS_CONFIGS ERROR: net_classifier_updater.cpp:278: NetClassifierUpdater failed to get subnets: Connection refused 2025-05-29T15:24:53.013779Z node 15 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(15, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:24:53.013807Z node 15 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(15, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:24:53.014612Z node 15 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(15, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-29T15:24:55.256482Z node 16 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[16:7509889025675027531:2066];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:24:55.256497Z node 16 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/000dad/r3tmp/tmpsH1GlD/pdisk_1.dat 2025-05-29T15:24:55.272886Z node 16 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:24:55.273166Z node 16 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [16:7509889025675027506:2079] 1748532295256251 != 1748532295256254 2025-05-29T15:24:55.277064Z node 16 :HTTP ERROR: http_proxy_outgoing.cpp:113: (#38,[::1]:13689) connection closed with error: Connection refused 2025-05-29T15:24:55.277145Z node 16 :CMS_CONFIGS ERROR: net_classifier_updater.cpp:278: NetClassifierUpdater failed to get subnets: Connection refused 2025-05-29T15:24:55.361951Z node 16 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(16, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:24:55.361975Z node 16 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(16, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:24:55.367234Z node 16 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(16, (0,0,0,0)) VolatileState: Connecting -> Connected test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/000dad/r3tmp/tmpmV6yEc/pdisk_1.dat 2025-05-29T15:24:57.642833Z node 17 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-05-29T15:24:57.648617Z node 17 :HTTP ERROR: http_proxy_outgoing.cpp:113: (#26,[::1]:11341) connection closed with error: Connection refused 2025-05-29T15:24:57.649008Z node 17 :CMS_CONFIGS ERROR: net_classifier_updater.cpp:278: NetClassifierUpdater failed to get subnets: Connection refused 2025-05-29T15:24:57.649073Z node 17 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:24:57.715158Z node 17 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(17, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:24:57.715190Z node 17 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(17, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:24:57.719026Z node 17 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(17, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-29T15:24:59.942869Z node 18 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[18:7509889041561831312:2092];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:24:59.944544Z node 18 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/000dad/r3tmp/tmpgTFYaE/pdisk_1.dat 2025-05-29T15:24:59.971758Z node 18 :HTTP ERROR: http_proxy_outgoing.cpp:113: (#28,[::1]:6089) connection closed with error: Connection refused 2025-05-29T15:24:59.974833Z node 18 :CMS_CONFIGS ERROR: net_classifier_updater.cpp:278: NetClassifierUpdater failed to get subnets: Connection refused 2025-05-29T15:24:59.977579Z node 18 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:25:00.045087Z node 18 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(18, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:25:00.045116Z node 18 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(18, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:25:00.046252Z node 18 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(18, (0,0,0,0)) VolatileState: Connecting -> Connected test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/000dad/r3tmp/tmpw4Axnq/pdisk_1.dat 2025-05-29T15:25:02.394897Z node 19 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-05-29T15:25:02.403346Z node 19 :HTTP ERROR: http_proxy_outgoing.cpp:113: (#30,[::1]:19585) connection closed with error: Connection refused 2025-05-29T15:25:02.403759Z node 19 :CMS_CONFIGS ERROR: net_classifier_updater.cpp:278: NetClassifierUpdater failed to get subnets: Connection refused 2025-05-29T15:25:02.406151Z node 19 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:25:02.475295Z node 19 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(19, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:25:02.475333Z node 19 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(19, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:25:02.476378Z node 19 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(19, (0,0,0,0)) VolatileState: Connecting -> Connected test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/000dad/r3tmp/tmpLmZwsP/pdisk_1.dat 2025-05-29T15:25:04.696114Z node 20 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[20:7509889064818620379:2072];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:25:04.696379Z node 20 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-05-29T15:25:04.731141Z node 20 :HTTP ERROR: http_proxy_outgoing.cpp:113: (#32,[::1]:26587) connection closed with error: Connection refused 2025-05-29T15:25:04.732921Z node 20 :CMS_CONFIGS ERROR: net_classifier_updater.cpp:278: NetClassifierUpdater failed to get subnets: Connection refused 2025-05-29T15:25:04.732993Z node 20 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:25:04.787124Z node 20 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(20, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:25:04.787154Z node 20 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(20, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:25:04.793896Z node 20 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(20, (0,0,0,0)) VolatileState: Connecting -> Connected test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/000dad/r3tmp/tmpOlq8Xz/pdisk_1.dat 2025-05-29T15:25:07.158873Z node 21 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-05-29T15:25:07.168729Z node 21 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:25:07.170853Z node 21 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [21:7509889078514365923:2079] 1748532307126084 != 1748532307126087 2025-05-29T15:25:07.175292Z node 21 :HTTP ERROR: http_proxy_outgoing.cpp:113: (#34,[::1]:18838) connection closed with error: Connection refused 2025-05-29T15:25:07.175503Z node 21 :CMS_CONFIGS ERROR: net_classifier_updater.cpp:278: NetClassifierUpdater failed to get subnets: Connection refused 2025-05-29T15:25:07.236707Z node 21 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(21, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:25:07.236744Z node 21 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(21, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:25:07.240754Z node 21 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(21, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-29T15:25:09.558276Z node 22 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[22:7509889084815713219:2202];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:25:09.568874Z node 22 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/000dad/r3tmp/tmp7DWviF/pdisk_1.dat 2025-05-29T15:25:09.585028Z node 22 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:25:09.585362Z node 22 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [22:7509889084815713054:2079] 1748532309556693 != 1748532309556696 2025-05-29T15:25:09.590067Z node 22 :HTTP ERROR: http_proxy_outgoing.cpp:113: (#24,[::1]:22124) connection closed with error: Connection refused 2025-05-29T15:25:09.594808Z node 22 :CMS_CONFIGS ERROR: net_classifier_updater.cpp:278: NetClassifierUpdater failed to get subnets: Connection refused 2025-05-29T15:25:09.660947Z node 22 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(22, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:25:09.660973Z node 22 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(22, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:25:09.663073Z node 22 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(22, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-29T15:25:11.926982Z node 23 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[23:7509889094310595898:2072];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:25:11.927034Z node 23 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/000dad/r3tmp/tmpJIADJN/pdisk_1.dat 2025-05-29T15:25:11.963440Z node 23 :HTTP ERROR: http_proxy_outgoing.cpp:113: (#38,[::1]:17533) connection closed with error: Connection refused 2025-05-29T15:25:11.966836Z node 23 :CMS_CONFIGS ERROR: net_classifier_updater.cpp:278: NetClassifierUpdater failed to get subnets: Connection refused 2025-05-29T15:25:11.967742Z node 23 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:25:11.970772Z node 23 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [23:7509889094310595845:2079] 1748532311922672 != 1748532311922675 2025-05-29T15:25:12.026187Z node 23 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(23, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:25:12.026216Z node 23 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(23, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:25:12.027421Z node 23 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(23, (0,0,0,0)) VolatileState: Connecting -> Connected ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_olap_reboots/unittest >> TOlapReboots::CreateStore [GOOD] Test command err: ==== RunWithTabletReboots =========== RUN: Trace =========== Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2140] Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:116:2058] recipient: [1:110:2141] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:116:2058] recipient: [1:110:2141] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:117:2058] recipient: [1:111:2142] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:117:2058] recipient: [1:111:2142] Leader for TabletID 72057594046678944 is [1:126:2151] sender: [1:127:2058] recipient: [1:109:2140] Leader for TabletID 72057594046447617 is [1:130:2154] sender: [1:132:2058] recipient: [1:110:2141] Leader for TabletID 72057594046316545 is [1:133:2155] sender: [1:135:2058] recipient: [1:111:2142] 2025-05-29T15:24:39.253429Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7488: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-05-29T15:24:39.253452Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7516: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:24:39.253459Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7402: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2025-05-29T15:24:39.253464Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7418: OperationsProcessing config: using default configuration 2025-05-29T15:24:39.253469Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-05-29T15:24:39.253474Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-05-29T15:24:39.253483Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7548: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:24:39.253499Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:39: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-05-29T15:24:39.253615Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7619: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-05-29T15:24:39.253700Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-05-29T15:24:39.268254Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7651: Got new config: QueryServiceConfig { AllExternalDataSourcesAreAvailable: true } 2025-05-29T15:24:39.268277Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:24:39.268383Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7619: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# Leader for TabletID 72057594046447617 is [1:130:2154] sender: [1:175:2058] recipient: [1:15:2062] 2025-05-29T15:24:39.275518Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-05-29T15:24:39.275563Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-05-29T15:24:39.275608Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-05-29T15:24:39.279865Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-05-29T15:24:39.279948Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:445: Clear TempDirsState with owners number: 0 2025-05-29T15:24:39.280073Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1348: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-05-29T15:24:39.280238Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:32: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-05-29T15:24:39.280910Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:157: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:24:39.280953Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:84: [RootDataErasureManager] Stop 2025-05-29T15:24:39.281222Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:24:39.281234Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:24:39.281269Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-05-29T15:24:39.281278Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-29T15:24:39.281284Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:202: TTxServerlessStorageBilling.Complete 2025-05-29T15:24:39.281303Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6671: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:214:2058] recipient: [1:212:2212] IGNORE Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:214:2058] recipient: [1:212:2212] Leader for TabletID 72057594037968897 is [1:218:2216] sender: [1:219:2058] recipient: [1:212:2212] 2025-05-29T15:24:39.282729Z node 1 :HIVE INFO: tablet_helpers.cpp:1130: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:126:2151] sender: [1:239:2058] recipient: [1:15:2062] 2025-05-29T15:24:39.314472Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:372: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-05-29T15:24:39.314554Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:24:39.314617Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-05-29T15:24:39.314667Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:130: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-05-29T15:24:39.314677Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:24:39.323275Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:451: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-05-29T15:24:39.323325Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-05-29T15:24:39.323405Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:24:39.323427Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:313: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-05-29T15:24:39.323434Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:367: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-05-29T15:24:39.323440Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 2 -> 3 2025-05-29T15:24:39.324844Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:24:39.324866Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-05-29T15:24:39.324873Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 3 -> 128 2025-05-29T15:24:39.325911Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:24:39.325931Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:24:39.325940Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:24:39.325950Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1650: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-05-29T15:24:39.326834Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1719: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-05-29T15:24:39.330953Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:652: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-05-29T15:24:39.331051Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1751: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 Leader for TabletID 72057594046316545 is [1:133:2155] sender: [1:254:2058] recipient: [1:15:2062] FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-05-29T15:24:39.331343Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:676: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-05-29T15:24:39.331409Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:680: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 133 RawX2: 4294969451 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-05-29T15:24:39.331437Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:24:39.331552Z node 1 :FLAT_TX_SCHEMESHARD INFO: sch ... ode 68 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1002:0 128 -> 129 2025-05-29T15:25:14.328231Z node 68 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-05-29T15:25:14.328244Z node 68 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 3 2025-05-29T15:25:14.328387Z node 68 :TX_COLUMNSHARD_TX WARN: log.cpp:784: tablet_id=72075186233409546;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=1002;fline=tx_controller.cpp:214;event=finished_tx;tx_id=1002; FAKE_COORDINATOR: advance: minStep5000003 State->FrontStep: 5000003 2025-05-29T15:25:14.328679Z node 68 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:25:14.328687Z node 68 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1002, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-29T15:25:14.328725Z node 68 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1002, path id: [OwnerId: 72057594046678944, LocalPathId: 3] 2025-05-29T15:25:14.328748Z node 68 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:25:14.328754Z node 68 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [68:208:2209], at schemeshard: 72057594046678944, txId: 1002, path id: 1 2025-05-29T15:25:14.328759Z node 68 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [68:208:2209], at schemeshard: 72057594046678944, txId: 1002, path id: 3 2025-05-29T15:25:14.328768Z node 68 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1002:0, at schemeshard: 72057594046678944 2025-05-29T15:25:14.328774Z node 68 :FLAT_TX_SCHEMESHARD INFO: create_store.cpp:245: TCreateOlapStore TProposedWaitParts operationId# 1002:0 ProgressState at tablet: 72057594046678944 2025-05-29T15:25:14.328781Z node 68 :FLAT_TX_SCHEMESHARD DEBUG: create_store.cpp:268: TCreateOlapStore TProposedWaitParts operationId# 1002:0 ProgressState wait for NotifyTxCompletionResult tabletId: 72075186233409546 2025-05-29T15:25:14.329065Z node 68 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:5834: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 7 PathOwnerId: 72057594046678944, cookie: 1002 2025-05-29T15:25:14.329079Z node 68 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 7 PathOwnerId: 72057594046678944, cookie: 1002 2025-05-29T15:25:14.329084Z node 68 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72057594046678944, txId: 1002 2025-05-29T15:25:14.329093Z node 68 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 1002, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 7 2025-05-29T15:25:14.329098Z node 68 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2025-05-29T15:25:14.329344Z node 68 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:5834: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 3 PathOwnerId: 72057594046678944, cookie: 1002 2025-05-29T15:25:14.329355Z node 68 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 3 PathOwnerId: 72057594046678944, cookie: 1002 2025-05-29T15:25:14.329360Z node 68 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72057594046678944, txId: 1002 2025-05-29T15:25:14.329364Z node 68 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 1002, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 3 2025-05-29T15:25:14.329368Z node 68 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 4 2025-05-29T15:25:14.329378Z node 68 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1606: TOperation IsReadyToNotify, TxId: 1002, ready parts: 0/1, is published: true 2025-05-29T15:25:14.329514Z node 68 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:652: Send tablet strongly msg operationId: 1002:0 from tablet: 72057594046678944 to tablet: 72075186233409546 cookie: 72057594046678944:1 msg type: 275382275 2025-05-29T15:25:14.329731Z node 68 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1002 2025-05-29T15:25:14.329923Z node 68 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1002 2025-05-29T15:25:14.347493Z node 68 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6146: Handle TEvNotifyTxCompletionResult, at schemeshard: 72057594046678944, message: Origin: 72075186233409546 TxId: 1002 2025-05-29T15:25:14.347518Z node 68 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1764: TOperation FindRelatedPartByTabletId, TxId: 1002, tablet: 72075186233409546, partId: 0 2025-05-29T15:25:14.347541Z node 68 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:619: TTxOperationReply execute, operationId: 1002:0, at schemeshard: 72057594046678944, message: Origin: 72075186233409546 TxId: 1002 FAKE_COORDINATOR: Erasing txId 1002 2025-05-29T15:25:14.347987Z node 68 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:647: TTxOperationReply complete, operationId: 1002:0, at schemeshard: 72057594046678944 2025-05-29T15:25:14.348026Z node 68 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1002:0, at schemeshard: 72057594046678944 2025-05-29T15:25:14.348033Z node 68 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:482: [72057594046678944] TDone opId# 1002:0 ProgressState 2025-05-29T15:25:14.348048Z node 68 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:906: Part operation is done id#1002:0 progress is 1/1 2025-05-29T15:25:14.348053Z node 68 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 1002 ready parts: 1/1 2025-05-29T15:25:14.348062Z node 68 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:906: Part operation is done id#1002:0 progress is 1/1 2025-05-29T15:25:14.348065Z node 68 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 1002 ready parts: 1/1 2025-05-29T15:25:14.348070Z node 68 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1606: TOperation IsReadyToNotify, TxId: 1002, ready parts: 1/1, is published: true 2025-05-29T15:25:14.348083Z node 68 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1629: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [68:370:2347] message: TxId: 1002 2025-05-29T15:25:14.348090Z node 68 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 1002 ready parts: 1/1 2025-05-29T15:25:14.348095Z node 68 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:973: Operation and all the parts is done, operation id: 1002:0 2025-05-29T15:25:14.348099Z node 68 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5174: RemoveTx for txid 1002:0 2025-05-29T15:25:14.348128Z node 68 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 3 2025-05-29T15:25:14.348584Z node 68 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:227: tests -- TTxNotificationSubscriber for txId 1002: got EvNotifyTxCompletionResult 2025-05-29T15:25:14.348595Z node 68 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:236: tests -- TTxNotificationSubscriber for txId 1002: satisfy waiter [68:371:2348] TestWaitNotification: OK eventTxId 1002 2025-05-29T15:25:14.348692Z node 68 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/OlapStore" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-05-29T15:25:14.348744Z node 68 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/OlapStore" took 59us result status StatusSuccess 2025-05-29T15:25:14.348875Z node 68 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/OlapStore" PathDescription { Self { Name: "OlapStore" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeColumnStore CreateFinished: true CreateTxId: 1002 CreateStep: 5000003 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 ColumnStoreVersion: 1 } ChildrenExist: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 0 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 2 PathsLimit: 10000 ShardsInside: 1 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } ColumnStoreDescription { Name: "OlapStore" ColumnShardCount: 1 ColumnShards: 72075186233409546 SchemaPresets { Id: 1 Name: "default" Schema { Columns { Id: 1 Name: "timestamp" Type: "Timestamp" TypeId: 50 NotNull: true StorageId: "" DefaultValue { } } Columns { Id: 2 Name: "data" Type: "Utf8" TypeId: 4608 NotNull: false StorageId: "" DefaultValue { } } KeyColumnNames: "timestamp" NextColumnId: 3 Version: 1 Options { SchemeNeedActualization: false } NextColumnFamilyId: 1 } } NextSchemaPresetId: 2 NextTtlSettingsPresetId: 1 } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> TestYmqHttpProxy::TestReceiveMessageWithAttributes >> TestKinesisHttpProxy::ListShards |66.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_backup_collection_reboots/unittest >> TBlobStorageProxyTest::TestProxyLongTailDiscover [GOOD] >> TBlobStorageProxyTest::TestProxyLongTailDiscoverMaxi >> ApplyClusterEndpointTest::NoPorts [GOOD] >> ApplyClusterEndpointTest::PortFromCds [GOOD] >> ApplyClusterEndpointTest::PortFromDriver [GOOD] >> BasicUsage::MaxByteSizeEqualZero >> TestYmqHttpProxy::TestGetQueueUrlWithIAM |66.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/replication/controller/ut_target_discoverer/unittest >> YdbYqlClient::CreateAndAltertTableWithCompactionPolicy [GOOD] >> YdbYqlClient::CreateAndAltertTableWithKeyBloomFilter >> YdbYqlClient::TestReadTableMultiShardWithDescribe [FAIL] >> YdbYqlClient::TestReadTableMultiShardWithDescribeAndRowLimit >> TargetDiscoverer::Dirs >> TBlobStorageProxyTest::TestSingleFailureMirror [GOOD] >> TBlobStorageProxyTest::TestVBlockVPutVGet >> TargetDiscoverer::IndexedTable >> TConsoleTests::TestRemoveSharedTenantAfterRemoveServerlessTenant [GOOD] >> TConsoleTests::TestRemoveServerlessTenant >> TargetDiscoverer::Dirs [GOOD] >> TargetDiscoverer::InvalidCredentials >> TestKinesisHttpProxy::TestListStreamConsumers |66.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/replication/controller/ut_target_discoverer/unittest >> YdbYqlClient::TestReadTableMultiShardWithDescribeAndRowLimit [FAIL] >> TBlobStorageProxyTest::TestVBlockVPutVGet [GOOD] >> TargetDiscoverer::IndexedTable [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_export_reboots_s3/unittest >> TExportToS3WithRebootsTests::CancelShouldSucceedOnViewsAndTables [GOOD] Test command err: ==== RunWithTabletReboots =========== RUN: Trace =========== Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2140] Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:116:2058] recipient: [1:110:2141] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:116:2058] recipient: [1:110:2141] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:117:2058] recipient: [1:111:2142] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:117:2058] recipient: [1:111:2142] Leader for TabletID 72057594046678944 is [1:126:2151] sender: [1:127:2058] recipient: [1:109:2140] Leader for TabletID 72057594046447617 is [1:130:2154] sender: [1:132:2058] recipient: [1:110:2141] Leader for TabletID 72057594046316545 is [1:133:2155] sender: [1:135:2058] recipient: [1:111:2142] 2025-05-29T15:23:31.699470Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7488: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-05-29T15:23:31.699502Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7516: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:23:31.699508Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7402: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2025-05-29T15:23:31.699514Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7418: OperationsProcessing config: using default configuration 2025-05-29T15:23:31.699528Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-05-29T15:23:31.699533Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-05-29T15:23:31.699543Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7548: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:23:31.699561Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:39: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-05-29T15:23:31.699667Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7619: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-05-29T15:23:31.699762Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-05-29T15:23:31.714139Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7651: Got new config: QueryServiceConfig { AllExternalDataSourcesAreAvailable: true } 2025-05-29T15:23:31.714172Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:23:31.714275Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7619: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# Leader for TabletID 72057594046447617 is [1:130:2154] sender: [1:175:2058] recipient: [1:15:2062] 2025-05-29T15:23:31.716994Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-05-29T15:23:31.717039Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-05-29T15:23:31.717077Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-05-29T15:23:31.720806Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-05-29T15:23:31.720933Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:445: Clear TempDirsState with owners number: 0 2025-05-29T15:23:31.721069Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1348: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-05-29T15:23:31.722076Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:32: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-05-29T15:23:31.723129Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:157: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:23:31.723200Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:84: [RootDataErasureManager] Stop 2025-05-29T15:23:31.723524Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:23:31.723539Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:23:31.723588Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-05-29T15:23:31.723599Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-29T15:23:31.723607Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:202: TTxServerlessStorageBilling.Complete 2025-05-29T15:23:31.723638Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6671: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:214:2058] recipient: [1:212:2212] IGNORE Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:214:2058] recipient: [1:212:2212] Leader for TabletID 72057594037968897 is [1:218:2216] sender: [1:219:2058] recipient: [1:212:2212] 2025-05-29T15:23:31.725388Z node 1 :HIVE INFO: tablet_helpers.cpp:1130: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:126:2151] sender: [1:239:2058] recipient: [1:15:2062] 2025-05-29T15:23:31.745906Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:372: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-05-29T15:23:31.745997Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:23:31.746072Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-05-29T15:23:31.746127Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:130: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-05-29T15:23:31.746141Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:23:31.748051Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:451: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-05-29T15:23:31.748093Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-05-29T15:23:31.748148Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:23:31.748160Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:313: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-05-29T15:23:31.748167Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:367: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-05-29T15:23:31.748173Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 2 -> 3 2025-05-29T15:23:31.748777Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:23:31.748793Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-05-29T15:23:31.748797Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 3 -> 128 2025-05-29T15:23:31.750072Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:23:31.750088Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:23:31.750095Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:23:31.750103Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1650: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-05-29T15:23:31.750839Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1719: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-05-29T15:23:31.752343Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:652: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-05-29T15:23:31.752393Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1751: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 Leader for TabletID 72057594046316545 is [1:133:2155] sender: [1:254:2058] recipient: [1:15:2062] FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-05-29T15:23:31.752645Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:676: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-05-29T15:23:31.752680Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:680: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 133 RawX2: 4294969451 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-05-29T15:23:31.752686Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:23:31.752765Z node 1 :FLAT_TX_SCHEMESHARD INFO: sch ... OORDINATOR: advance: minStep5000009 State->FrontStep: 5000008 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710761 at step: 5000009 2025-05-29T15:25:12.328335Z node 206 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:676: TTxOperationPlanStep Execute, stepId: 5000009, transactions count in step: 1, at schemeshard: 72057594046678944 2025-05-29T15:25:12.328362Z node 206 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:680: TTxOperationPlanStep Execute, message: Transactions { TxId: 281474976710761 Coordinator: 72057594046316545 AckTo { RawX1: 134 RawX2: 884763265132 } } Step: 5000009 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-05-29T15:25:12.328370Z node 206 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_rmdir.cpp:129: TRmDir HandleReply TEvOperationPlan, opId: 281474976710761:0, step: 5000009, at schemeshard: 72057594046678944 2025-05-29T15:25:12.328396Z node 206 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_rmdir.cpp:180: RmDir is done, opId: 281474976710761:0, at schemeshard: 72057594046678944 2025-05-29T15:25:12.328408Z node 206 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:906: Part operation is done id#281474976710761:0 progress is 1/1 2025-05-29T15:25:12.328412Z node 206 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 281474976710761 ready parts: 1/1 2025-05-29T15:25:12.328418Z node 206 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:906: Part operation is done id#281474976710761:0 progress is 1/1 2025-05-29T15:25:12.328421Z node 206 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 281474976710761 ready parts: 1/1 2025-05-29T15:25:12.328430Z node 206 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 4 2025-05-29T15:25:12.328465Z node 206 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 1 2025-05-29T15:25:12.328471Z node 206 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1606: TOperation IsReadyToNotify, TxId: 281474976710761, ready parts: 1/1, is published: false 2025-05-29T15:25:12.328477Z node 206 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 281474976710761 ready parts: 1/1 2025-05-29T15:25:12.328481Z node 206 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:973: Operation and all the parts is done, operation id: 281474976710761:0 2025-05-29T15:25:12.328485Z node 206 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5174: RemoveTx for txid 281474976710761:0 2025-05-29T15:25:12.328494Z node 206 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 2 2025-05-29T15:25:12.328499Z node 206 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:982: Publication still in progress, tx: 281474976710761, publications: 2, subscribers: 1 2025-05-29T15:25:12.328503Z node 206 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:989: Publication details: tx: 281474976710761, [OwnerId: 72057594046678944, LocalPathId: 1], 14 2025-05-29T15:25:12.328506Z node 206 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:989: Publication details: tx: 281474976710761, [OwnerId: 72057594046678944, LocalPathId: 5], 18446744073709551615 2025-05-29T15:25:12.328675Z node 206 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976710761 2025-05-29T15:25:12.328705Z node 206 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:174: Deleted shardIdx 72057594046678944:2 2025-05-29T15:25:12.328714Z node 206 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:180: Close pipe to deleted shardIdx 72057594046678944:2 tabletId 72075186233409547 2025-05-29T15:25:12.328775Z node 206 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976710761 2025-05-29T15:25:12.328965Z node 206 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:160: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 2025-05-29T15:25:12.329225Z node 206 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:25:12.329232Z node 206 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 281474976710761, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-29T15:25:12.329271Z node 206 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 281474976710761, path id: [OwnerId: 72057594046678944, LocalPathId: 5] 2025-05-29T15:25:12.329292Z node 206 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:25:12.329296Z node 206 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [206:208:2209], at schemeshard: 72057594046678944, txId: 281474976710761, path id: 1 2025-05-29T15:25:12.329301Z node 206 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [206:208:2209], at schemeshard: 72057594046678944, txId: 281474976710761, path id: 5 FAKE_COORDINATOR: Erasing txId 281474976710761 2025-05-29T15:25:12.329436Z node 206 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:5834: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 14 PathOwnerId: 72057594046678944, cookie: 281474976710761 2025-05-29T15:25:12.329446Z node 206 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 14 PathOwnerId: 72057594046678944, cookie: 281474976710761 2025-05-29T15:25:12.329450Z node 206 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 281474976710761 2025-05-29T15:25:12.329455Z node 206 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 281474976710761, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 14 2025-05-29T15:25:12.329459Z node 206 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 5 2025-05-29T15:25:12.329557Z node 206 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:5834: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 5 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 281474976710761 2025-05-29T15:25:12.329566Z node 206 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 5 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 281474976710761 2025-05-29T15:25:12.329569Z node 206 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 281474976710761 2025-05-29T15:25:12.329573Z node 206 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 281474976710761, pathId: [OwnerId: 72057594046678944, LocalPathId: 5], version: 18446744073709551615 2025-05-29T15:25:12.329577Z node 206 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 1 2025-05-29T15:25:12.329585Z node 206 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 281474976710761, subscribers: 1 2025-05-29T15:25:12.329589Z node 206 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:212: TTxAckPublishToSchemeBoard Notify send TEvNotifyTxCompletionResult, at schemeshard: 72057594046678944, to actorId: [206:126:2151] 2025-05-29T15:25:12.329650Z node 206 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:123: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-05-29T15:25:12.329654Z node 206 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:137: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 5], at schemeshard: 72057594046678944 2025-05-29T15:25:12.329662Z node 206 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 4 2025-05-29T15:25:12.330938Z node 206 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976710761 2025-05-29T15:25:12.331164Z node 206 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976710761 2025-05-29T15:25:12.331183Z node 206 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6746: Handle: TEvNotifyTxCompletionResult: txId# 281474976710761 2025-05-29T15:25:12.331193Z node 206 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6748: Message: TxId: 281474976710761 2025-05-29T15:25:12.331201Z node 206 :EXPORT DEBUG: schemeshard_export__create.cpp:309: TExport::TTxProgress: DoExecute 2025-05-29T15:25:12.331205Z node 206 :EXPORT DEBUG: schemeshard_export__create.cpp:1232: TExport::TTxProgress: OnNotifyResult: txId# 281474976710761 2025-05-29T15:25:12.331210Z node 206 :EXPORT DEBUG: schemeshard_export__create.cpp:1263: TExport::TTxProgress: OnNotifyResult: txId# 281474976710761, id# 1004, itemIdx# 4294967295 2025-05-29T15:25:12.331256Z node 206 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:160: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 2025-05-29T15:25:12.336786Z node 206 :EXPORT DEBUG: schemeshard_export__create.cpp:329: TExport::TTxProgress: DoComplete TestWaitNotification wait txId: 1004 2025-05-29T15:25:12.336860Z node 206 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:210: tests -- TTxNotificationSubscriber for txId 1004: send EvNotifyTxCompletion 2025-05-29T15:25:12.336868Z node 206 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:256: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 1004 2025-05-29T15:25:12.336942Z node 206 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 1004, at schemeshard: 72057594046678944 2025-05-29T15:25:12.336967Z node 206 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:227: tests -- TTxNotificationSubscriber for txId 1004: got EvNotifyTxCompletionResult 2025-05-29T15:25:12.336972Z node 206 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:236: tests -- TTxNotificationSubscriber for txId 1004: satisfy waiter [206:763:2720] TestWaitNotification: OK eventTxId 1004 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/replication/controller/ut_target_discoverer/unittest >> TargetDiscoverer::Dirs [GOOD] Test command err: 2025-05-29T15:25:17.616519Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509889120929381749:2196];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:25:17.618185Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/001a83/r3tmp/tmpUUqjR3/pdisk_1.dat 2025-05-29T15:25:17.714825Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [1:7509889120929381592:2079] 1748532317613331 != 1748532317613334 2025-05-29T15:25:17.717087Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded TClient is connected to server localhost:2588 TServer::EnableGrpc on GrpcPort 25021, node 1 2025-05-29T15:25:17.764882Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:25:17.764894Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-05-29T15:25:17.764896Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-05-29T15:25:17.764942Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-05-29T15:25:17.771296Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:25:17.771321Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:25:17.775128Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:2588 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-29T15:25:17.834261Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:25:17.837720Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:25:17.841738Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:25:17.880023Z node 1 :REPLICATION_CONTROLLER TRACE: target_discoverer.cpp:27: [TargetDiscoverer][rid 1] Handle NKikimr::NReplication::TEvYdbProxy::TEvDescribePathResponse { Result: { name: Root, owner: root@builtin, type: Directory, size_bytes: 0, created_at: { plan_step: 1748532317882, tx_id: 1 } } } 2025-05-29T15:25:17.880037Z node 1 :REPLICATION_CONTROLLER DEBUG: target_discoverer.cpp:42: [TargetDiscoverer][rid 1] Describe path succeeded: path# /Root 2025-05-29T15:25:17.889318Z node 1 :REPLICATION_CONTROLLER TRACE: target_discoverer.cpp:247: [TargetDiscoverer][rid 1] Handle NKikimr::NReplication::TEvYdbProxy::TEvListDirectoryResponse { Result: { children [{ name: Dir, owner: root@builtin, type: Directory, size_bytes: 0, created_at: { plan_step: 1748532317889, tx_id: 281474976715658 } }, { name: .sys, owner: , type: Directory, size_bytes: 0, created_at: { plan_step: 0, tx_id: 0 } }] } } 2025-05-29T15:25:17.889330Z node 1 :REPLICATION_CONTROLLER DEBUG: target_discoverer.cpp:260: [TargetDiscoverer][rid 1] Listing succeeded: path# /Root 2025-05-29T15:25:17.892574Z node 1 :REPLICATION_CONTROLLER TRACE: target_discoverer.cpp:247: [TargetDiscoverer][rid 1] Handle NKikimr::NReplication::TEvYdbProxy::TEvListDirectoryResponse { Result: { children [{ name: Table, owner: root@builtin, type: Table, size_bytes: 0, created_at: { plan_step: 1748532317910, tx_id: 281474976715659 } }] } } 2025-05-29T15:25:17.892582Z node 1 :REPLICATION_CONTROLLER DEBUG: target_discoverer.cpp:260: [TargetDiscoverer][rid 1] Listing succeeded: path# /Root/Dir 2025-05-29T15:25:18.083528Z node 1 :REPLICATION_CONTROLLER TRACE: target_discoverer.cpp:98: [TargetDiscoverer][rid 1] Handle NKikimr::NReplication::TEvYdbProxy::TEvDescribeTableResponse { Result: { name: Table, owner: root@builtin, type: Table, size_bytes: 0, created_at: { plan_step: 1748532317910, tx_id: 281474976715659 } } } 2025-05-29T15:25:18.083543Z node 1 :REPLICATION_CONTROLLER DEBUG: target_discoverer.cpp:113: [TargetDiscoverer][rid 1] Describe table succeeded: path# /Root/Dir/Table 2025-05-29T15:25:18.083549Z node 1 :REPLICATION_CONTROLLER INFO: target_discoverer.cpp:120: [TargetDiscoverer][rid 1] Add target: srcPath# /Root/Dir/Table, dstPath# /Root/Replicated/Dir/Table, kind# Table >> TConsoleTests::TestDatabaseQuotas [GOOD] >> TConsoleTests::TestDatabaseQuotasBadOverallQuota >> YdbYqlClient::CreateAndAltertTableWithKeyBloomFilter [GOOD] >> TargetDiscoverer::InvalidCredentials [GOOD] |66.5%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/kesus/proxy/ut/ydb-core-kesus-proxy-ut |66.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kesus/proxy/ut/ydb-core-kesus-proxy-ut ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/dsproxy/ut_fat/unittest >> TBlobStorageProxyTest::TestVBlockVPutVGet [GOOD] Test command err: 2025-05-29T15:25:14.204984Z :BS_PDISK CRIT: {BPD01@blobstorage_pdisk_actor.cpp:415} PDiskId# 1 Can not be initialized! Format is incomplete. Magic sector is not present on disk. Maybe wrong PDiskKey Config: {TPDiskConfg Path# "/home/runner/.ya/build/build_root/ciyv/001cc6/r3tmp/tmp0Xf3tP//vdisk_bad_0/pdisk.dat" ExpectedPath# "" ExpectedSerial# "" PDiskGuid# 123 PDiskId# 1 PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} MetadataOnly# 0 StartOwnerRound# 1 SectorMap# true EnableSectorEncryption # 0 ChunkSize# 134217728 SectorSize# 4096 StatisticsUpdateIntervalMs# 1000 SchedulerCfg# {TPDiskSchedulerConfig BytesSchedulerWeight# 1 LogWeight# 1 FreshWeight# 2 CompWeight# 7 SyncLogWeight# 15 HugeWeight# 2 FastReadWeight# 1 OtherReadWeight# 1 LoadWeight# 2 LowReadWeight# 1 MaxChunkReadsPerCycle# 16 MaxChunkReadsDurationPerCycleMs# 0.25 MaxChunkWritesPerCycle# 8 MaxChunkWritesDurationPerCycleMs# 1} MinLogChunksTotal# 4 MaxLogChunksPerOwnerMultiplier# 5 MaxLogChunksPerOwnerDivisor# 4 SortFreeChunksPerItems# 100 GetDriveDataSwitch# DoNotTouch WriteCacheSwitch# DoNotTouch DriveModelSeekTimeNs# 8000000 DriveModelSpeedBps# 127000000 DriveModelSpeedBpsMin# 135000000 DriveModelSpeedBpsMax# 200000000 DriveModelBulkWrieBlockSize# 2097152 DriveModelTrimSpeedBps# 0 ReorderingMs# 50 DeviceInFlight# 4 CostLimitNs# 50000000 BufferPoolBufferSizeBytes# 524288 BufferPoolBufferCount# 256 MaxQueuedCompletionActions# 128 ExpectedSlotCount# 0 ReserveLogChunksMultiplier# 56 InsaneLogChunksMultiplier# 40 RedLogChunksMultiplier# 30 OrangeLogChunksMultiplier# 20 WarningLogChunksMultiplier# 4 YellowLogChunksMultiplier# 4 MaxMetadataMegabytes# 32 SpaceColorBorder# GREEN CompletionThreadsCount# 1 UseNoopScheduler# false PlainDataChunks# 0} PDiskId# 1 2025-05-29T15:25:14.205626Z :BS_LOCALRECOVERY CRIT: localrecovery_public.cpp:103: PDiskId# 1 VDISK[0:_:0:0:0]: (0) LocalRecovery FINISHED: {RecoveryDuration# INPROGRESS RecoveredLogStartLsn# 0 SuccessfulRecovery# false EmptyLogoBlobsDb# true EmptyBlocksDb# true EmptyBarriersDb# true EmptySyncLog# true EmptySyncer# true EmptyHuge# true LogRecLogoBlob# 0 LogRecBlock# 0 LogRecGC# 0 LogRecSyncLogIdx# 0 LogRecLogoBlobsDB# 0 LogRecBlocksDB# 0 LogRecBarriersDB# 0 LogRecCutLog# 0 LogRecLocalSyncData# 0 LogRecSyncerState# 0 LogRecHandoffDel# 0 LogRecHugeBlobAllocChunk# 0 LogRecHugeBlobFreeChunk# 0 LogRecHugeBlobEntryPoint# 0 LogRecHugeLogoBlob# 0 LogRecLogoBlobOpt# 0 LogRecPhantomBlob# 0 LogRecAnubisOsirisPut# 0 LogRecAddBulkSst# 0 LogoBlobFreshApply# 0 LogoBlobFreshSkip# 0 LogoBlobsBatchFreshApply# 0 LogoBlobsBatchFreshSkip#0 LogoBlobSyncLogApply# 0 LogoBlobSyncLogSkip# 0 HugeLogoBlobFreshApply# 0 HugeLogoBlobFreshSkip# 0 HugeLogoBlobSyncLogApply# 0 HugeLogoBlobSyncLogSkip# 0 BlockFreshApply# 0 BlockFreshSkip# 0 BlocksBatchFreshApply# 0 BlocksBatchFreshSkip# 0 BlockSyncLogApply# 0 BlockSyncLogSkip# 0 BarrierFreshApply# 0 BarrierFreshSkip# 0 BarriersBatchFreshApply# 0 BarriersBatchFreshSkip# 0 BarrierSyncLogApply# 0 BarrierSyncLogSkip# 0 GCBarrierFreshApply# 0 GCBarrierFreshSkip# 0 GCLogoBlobFreshApply# 0 GCLogoBlobFreshSkip# 0 GCSyncLogApply# 0 GCSyncLogSkip# 0 TryPutLogoBlobSyncData# 0 TryPutBlockSyncData# 0 TryPutBarrierSyncData# 0 HandoffDelFreshApply# 0 HandoffDelFreshSkip# 0 HugeBlobAllocChunkApply# 0 HugeBlobAllocChunkSkip# 0 HugeBlobFreeChunkApply# 0 HugeBlobFreeChunkSkip# 0 HugeLogoBlobToHeapApply# 0 HugeLogoBlobToHeapSkip# 0 HugeSlotsDelGenericApply# 0 HugeSlotsDelGenericSkip# 0 TryPutLogoBlobPhantom# 0 RecoveryLogDiapason# [18446744073709551615 0] StartingPoints# {} ReadLogReplies# {}} reason# Yard::Init failed, errorReason# "PDisk is in StateError, reason# PDiskId# 1 Can not be initialized! Format is incomplete. Magic sector is not present on disk. Maybe wrong PDiskKey" status# CORRUPTED;VDISK LOCAL RECOVERY FAILURE DUE TO LOGICAL ERROR |66.5%| [LD] {RESULT} $(B)/ydb/core/kesus/proxy/ut/ydb-core-kesus-proxy-ut ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/replication/controller/ut_target_discoverer/unittest >> TargetDiscoverer::IndexedTable [GOOD] Test command err: 2025-05-29T15:25:18.169991Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509889124644913969:2209];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:25:18.170163Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/001a7e/r3tmp/tmpJFN9dl/pdisk_1.dat 2025-05-29T15:25:18.297111Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:25:18.297265Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:25:18.302009Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-29T15:25:18.332017Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded TClient is connected to server localhost:7333 TServer::EnableGrpc on GrpcPort 1961, node 1 2025-05-29T15:25:18.375545Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:25:18.375557Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-05-29T15:25:18.375559Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-05-29T15:25:18.375604Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:7333 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-29T15:25:18.508444Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:25:18.511705Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-05-29T15:25:18.512687Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:25:18.663827Z node 1 :REPLICATION_CONTROLLER TRACE: target_discoverer.cpp:27: [TargetDiscoverer][rid 1] Handle NKikimr::NReplication::TEvYdbProxy::TEvDescribePathResponse { Result: { name: Root, owner: root@builtin, type: Directory, size_bytes: 0, created_at: { plan_step: 1748532318554, tx_id: 1 } } } 2025-05-29T15:25:18.663839Z node 1 :REPLICATION_CONTROLLER DEBUG: target_discoverer.cpp:42: [TargetDiscoverer][rid 1] Describe path succeeded: path# /Root 2025-05-29T15:25:18.667598Z node 1 :REPLICATION_CONTROLLER TRACE: target_discoverer.cpp:247: [TargetDiscoverer][rid 1] Handle NKikimr::NReplication::TEvYdbProxy::TEvListDirectoryResponse { Result: { children [{ name: Table, owner: root@builtin, type: Table, size_bytes: 0, created_at: { plan_step: 1748532318666, tx_id: 281474976715658 } }, { name: .sys, owner: , type: Directory, size_bytes: 0, created_at: { plan_step: 0, tx_id: 0 } }] } } 2025-05-29T15:25:18.667608Z node 1 :REPLICATION_CONTROLLER DEBUG: target_discoverer.cpp:260: [TargetDiscoverer][rid 1] Listing succeeded: path# /Root 2025-05-29T15:25:18.851378Z node 1 :REPLICATION_CONTROLLER TRACE: target_discoverer.cpp:98: [TargetDiscoverer][rid 1] Handle NKikimr::NReplication::TEvYdbProxy::TEvDescribeTableResponse { Result: { name: Table, owner: root@builtin, type: Table, size_bytes: 0, created_at: { plan_step: 1748532318666, tx_id: 281474976715658 } } } 2025-05-29T15:25:18.851389Z node 1 :REPLICATION_CONTROLLER DEBUG: target_discoverer.cpp:113: [TargetDiscoverer][rid 1] Describe table succeeded: path# /Root/Table 2025-05-29T15:25:18.851395Z node 1 :REPLICATION_CONTROLLER INFO: target_discoverer.cpp:120: [TargetDiscoverer][rid 1] Add target: srcPath# /Root/Table, dstPath# /Root/Replicated/Table, kind# Table 2025-05-29T15:25:18.851406Z node 1 :REPLICATION_CONTROLLER INFO: target_discoverer.cpp:140: [TargetDiscoverer][rid 1] Add target: srcPath# /Root/Table/Index, dstPath# /Root/Replicated/Table/Index/indexImplTable, kind# IndexTable ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/ydb/ut/unittest >> YdbYqlClient::CreateAndAltertTableWithKeyBloomFilter [GOOD] Test command err: 2025-05-29T15:25:09.987193Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509889085096783481:2079];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:25:09.987214Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/001eac/r3tmp/tmpLS8VI5/pdisk_1.dat 2025-05-29T15:25:10.191432Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 3519, node 1 2025-05-29T15:25:10.217452Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:25:10.217478Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:25:10.229642Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-29T15:25:10.238803Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:25:10.238814Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-05-29T15:25:10.238816Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-05-29T15:25:10.238858Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:11770 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-29T15:25:10.355844Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:25:10.383114Z node 1 :GRPC_PROXY_NO_CONNECT_ACCESS DEBUG: grpc_request_check_actor.h:542: Skip check permission connect db, AllowYdbRequestsWithoutDatabase is off, there is no db provided from user, database: /Root, user: root@builtin, from ip: ipv6:[::1]:35822 Call 2025-05-29T15:25:10.395021Z node 1 :GRPC_PROXY_NO_CONNECT_ACCESS DEBUG: grpc_request_check_actor.h:542: Skip check permission connect db, AllowYdbRequestsWithoutDatabase is off, there is no db provided from user, database: /Root, user: root@builtin, from ip: ipv6:[::1]:35822 2025-05-29T15:25:11.145054Z node 1 :GRPC_PROXY_NO_CONNECT_ACCESS DEBUG: grpc_request_check_actor.h:542: Skip check permission connect db, AllowYdbRequestsWithoutDatabase is off, there is no db provided from user, database: /Root, user: root@builtin, from ip: ipv6:[::1]:35822 Call Call 2025-05-29T15:25:11.175538Z node 1 :GRPC_PROXY_NO_CONNECT_ACCESS DEBUG: grpc_request_check_actor.h:578: Skip check permission connect db, user is a admin, database: /Root, user: root@builtin, from ip: ipv6:[::1]:35822 2025-05-29T15:25:11.187291Z node 1 :GRPC_PROXY_NO_CONNECT_ACCESS DEBUG: grpc_request_check_actor.h:578: Skip check permission connect db, user is a admin, database: /Root, user: root@builtin, from ip: ipv6:[::1]:35830 2025-05-29T15:25:11.187899Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715658:0, at schemeshard: 72057594046644480 test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/001eac/r3tmp/tmpRLG8ix/pdisk_1.dat 2025-05-29T15:25:11.953836Z node 4 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-05-29T15:25:12.084380Z node 4 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 29708, node 4 2025-05-29T15:25:12.123292Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:25:12.123304Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-05-29T15:25:12.123306Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-05-29T15:25:12.123345Z node 4 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:18834 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-29T15:25:12.221475Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:25:12.221505Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:25:12.231166Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-29T15:25:12.539428Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/001eac/r3tmp/tmp1aCz1Z/pdisk_1.dat 2025-05-29T15:25:13.676275Z node 7 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-05-29T15:25:13.771661Z node 7 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:25:13.771697Z node 7 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:25:13.787029Z node 7 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:25:13.792751Z node 7 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 2167, node 7 2025-05-29T15:25:13.818279Z node 7 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:25:13.818287Z node 7 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-05-29T15:25:13.818289Z node 7 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-05-29T15:25:13.818331Z node 7 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:29800 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 0 Pa... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-29T15:25:13.919260Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:25:13.931293Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-05-29T15:25:14.543164Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_create_table.cpp:426: TCreateTable Propose, path: /Root/Table-1, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-05-29T15:25:14.543432Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:130: IgniteOperation, opId: 281474976715658:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-05-29T15:25:14.543438Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-05-29T15:25:14.548859Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 281474976715658, database: /Root, subject: , status: StatusAccepted, operation: CREATE TABLE, path: /Root/Table-1 2025-05-29T15:25:14.596646Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:676: TTxOperationPlanStep Execute, stepId: 1748532314641, transactions count in step: 1, at schemeshard: 72057594046644480 2025-05-29T15:25:14.605888Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:973: Operation and all the parts is done, operation id: 281474976715658:0 2025-05-29T15:25:14.612726Z node 7 :FLAT_TX_SCHEMESHARD NOT ... arrier, tx: 281474976715687, done: 0, blocked: 1 2025-05-29T15:25:15.189559Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:973: Operation and all the parts is done, operation id: 281474976715687:0 2025-05-29T15:25:15.191492Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_drop_table.cpp:492: TDropTable Propose, path: Root/Table-8, pathId: 0, opId: 281474976715688:0, at schemeshard: 72057594046644480 2025-05-29T15:25:15.191530Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:130: IgniteOperation, opId: 281474976715688:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-05-29T15:25:15.192236Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 281474976715688, database: /Root, subject: , status: StatusAccepted, operation: DROP TABLE, path: Root/Table-8 2025-05-29T15:25:15.193804Z node 7 :HIVE WARN: hive_impl.cpp:491: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 7, TabletId: 72075186224037892 not found 2025-05-29T15:25:15.194029Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:160: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046644480 2025-05-29T15:25:15.200805Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:676: TTxOperationPlanStep Execute, stepId: 1748532315250, transactions count in step: 1, at schemeshard: 72057594046644480 2025-05-29T15:25:15.201967Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1103: All parts have reached barrier, tx: 281474976715688, done: 0, blocked: 1 2025-05-29T15:25:15.203873Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:973: Operation and all the parts is done, operation id: 281474976715688:0 2025-05-29T15:25:15.223373Z node 7 :HIVE WARN: hive_impl.cpp:491: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 7, TabletId: 72075186224037893 not found 2025-05-29T15:25:15.226954Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:160: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046644480 test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/001eac/r3tmp/tmpB6Aqak/pdisk_1.dat 2025-05-29T15:25:15.901585Z node 10 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-05-29T15:25:16.019132Z node 10 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:25:16.020671Z node 10 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:25:16.020687Z node 10 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:25:16.027536Z node 10 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 27005, node 10 2025-05-29T15:25:16.063680Z node 10 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:25:16.063692Z node 10 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-05-29T15:25:16.063693Z node 10 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-05-29T15:25:16.063740Z node 10 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:17891 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-29T15:25:16.137488Z node 10 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:25:16.149239Z node 10 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 TClient is connected to server localhost:17891 2025-05-29T15:25:16.558423Z node 10 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 TClient is connected to server localhost:17891 TClient::Ls request: Root/Test TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Test" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976715658 CreateStep: 1748532316643 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "Test" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyCo... (TRUNCATED) 2025-05-29T15:25:16.662451Z node 10 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 TClient is connected to server localhost:17891 TClient::Ls request: Root/Test TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Test" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976715658 CreateStep: 1748532316643 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 4 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 4 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 2 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "Test" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyCo... (TRUNCATED) 2025-05-29T15:25:17.570830Z node 13 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[13:7509889121110849220:2224];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:25:17.572683Z node 13 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/001eac/r3tmp/tmplVwqNZ/pdisk_1.dat 2025-05-29T15:25:17.690863Z node 13 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:25:17.690887Z node 13 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:25:17.701017Z node 13 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:25:17.702398Z node 13 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 14440, node 13 2025-05-29T15:25:17.715257Z node 13 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:25:17.715270Z node 13 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-05-29T15:25:17.715272Z node 13 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-05-29T15:25:17.715317Z node 13 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:2973 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... 2025-05-29T15:25:17.799260Z node 13 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-05-29T15:25:17.811000Z node 13 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-05-29T15:25:18.684414Z node 13 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-05-29T15:25:18.747505Z node 13 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 >> TestKinesisHttpProxy::ListShardsEmptyFields >> TargetDiscoverer::Basic >> TBsProxyFaultToleranceTest::CheckTRangeFaultToleranceTestErasureMirror3dc [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/replication/controller/ut_target_discoverer/unittest >> TargetDiscoverer::InvalidCredentials [GOOD] Test command err: 2025-05-29T15:25:18.573379Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509889122864348743:2079];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:25:18.573646Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/001a72/r3tmp/tmpRJcIdz/pdisk_1.dat 2025-05-29T15:25:18.705993Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:25:18.747323Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:25:18.747353Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:25:18.752325Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:29829 TServer::EnableGrpc on GrpcPort 3174, node 1 2025-05-29T15:25:18.810968Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:25:18.810980Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-05-29T15:25:18.810982Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-05-29T15:25:18.811030Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:29829 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-29T15:25:18.897766Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:25:18.901005Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-05-29T15:25:18.901886Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:25:19.075183Z node 1 :REPLICATION_CONTROLLER TRACE: target_discoverer.cpp:27: [TargetDiscoverer][rid 1] Handle NKikimr::NReplication::TEvYdbProxy::TEvDescribePathResponse { Result: { status: CLIENT_UNAUTHENTICATED, issues: {
: Error: Can't get Authentication info from CredentialsProvider. ydb/public/sdk/cpp/src/client/types/credentials/login/login.cpp:217: Cannot find user: user } } } 2025-05-29T15:25:19.075198Z node 1 :REPLICATION_CONTROLLER ERROR: target_discoverer.cpp:78: [TargetDiscoverer][rid 1] Describe path failed: path# /Root, status# CLIENT_UNAUTHENTICATED, issues# {
: Error: Can't get Authentication info from CredentialsProvider. ydb/public/sdk/cpp/src/client/types/credentials/login/login.cpp:217: Cannot find user: user } |66.5%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/columnshard/ut_schema/ydb-core-tx-columnshard-ut_schema |66.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/columnshard/ut_schema/ydb-core-tx-columnshard-ut_schema |66.5%| [LD] {RESULT} $(B)/ydb/core/tx/columnshard/ut_schema/ydb-core-tx-columnshard-ut_schema >> BasicUsage::MaxByteSizeEqualZero [FAIL] >> BasicUsage::TSimpleWriteSession_AutoSeqNo_BasicUsage >> TargetDiscoverer::Negative |66.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/dsproxy/ut_ftol/unittest >> TBsProxyFaultToleranceTest::CheckTRangeFaultToleranceTestErasureMirror3dc [GOOD] |66.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/replication/controller/ut_target_discoverer/unittest >> TargetDiscoverer::Negative [GOOD] >> TargetDiscoverer::Basic [GOOD] >> ReadSessionImplTest::ForcefulDestroyPartitionStream >> TargetDiscoverer::SystemObjects >> ReadSessionImplTest::ForcefulDestroyPartitionStream [GOOD] >> ReadSessionImplTest::DestroyPartitionStreamRequest [GOOD] >> ReadSessionImplTest::DecompressZstdEmptyMessage [GOOD] >> ReadSessionImplTest::PacksBatches_BatchABitBiggerThanLimit [GOOD] >> ReadSessionImplTest::PacksBatches_BatchesEqualToServerBatches >> TBlobStorageProxyTest::TestProxyLongTailDiscoverMaxi [GOOD] >> ReadSessionImplTest::PacksBatches_BatchesEqualToServerBatches [GOOD] >> ReadSessionImplTest::HoleBetweenOffsets [GOOD] >> ReadSessionImplTest::LOGBROKER_7702 [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/replication/controller/ut_target_discoverer/unittest >> TargetDiscoverer::Negative [GOOD] Test command err: 2025-05-29T15:25:20.895593Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509889132455724489:2145];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:25:20.895828Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/001a5d/r3tmp/tmplIF2y5/pdisk_1.dat 2025-05-29T15:25:21.086109Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded TClient is connected to server localhost:3652 TServer::EnableGrpc on GrpcPort 1387, node 1 2025-05-29T15:25:21.171010Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:25:21.171028Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-05-29T15:25:21.171030Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-05-29T15:25:21.171088Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-05-29T15:25:21.235095Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:25:21.235135Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting TClient is connected to server localhost:3652 2025-05-29T15:25:21.239199Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-29T15:25:21.293238Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:25:21.303517Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-05-29T15:25:21.334905Z node 1 :REPLICATION_CONTROLLER TRACE: target_discoverer.cpp:27: [TargetDiscoverer][rid 1] Handle NKikimr::NReplication::TEvYdbProxy::TEvDescribePathResponse { Result: { status: SCHEME_ERROR, issues: {
: Error: Path not found } } } 2025-05-29T15:25:21.334924Z node 1 :REPLICATION_CONTROLLER ERROR: target_discoverer.cpp:78: [TargetDiscoverer][rid 1] Describe path failed: path# /Root/Table, status# SCHEME_ERROR, issues# {
: Error: Path not found } ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/persqueue/ut/slow/unittest >> SlowTopicAutopartitioning::CDC_Write [FAIL] Test command err: 2025-05-29T15:25:10.909426Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509889091637753182:2212];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/00258b/r3tmp/tmpnggG7k/pdisk_1.dat 2025-05-29T15:25:10.983220Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-05-29T15:25:10.983322Z node 1 :PQ_READ_PROXY DEBUG: caching_service.cpp:44: Direct read cache: : Created 2025-05-29T15:25:11.037725Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [1:7509889091637752995:2079] 1748532310880223 != 1748532310880226 2025-05-29T15:25:11.039808Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 25159, node 1 2025-05-29T15:25:11.058926Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/ciyv/00258b/r3tmp/yandex9XI4f3.tmp 2025-05-29T15:25:11.058937Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: /home/runner/.ya/build/build_root/ciyv/00258b/r3tmp/yandex9XI4f3.tmp 2025-05-29T15:25:11.059006Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:202: successfully initialized from file: /home/runner/.ya/build/build_root/ciyv/00258b/r3tmp/yandex9XI4f3.tmp 2025-05-29T15:25:11.059055Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-05-29T15:25:11.063925Z INFO: TTestServer started on Port 28530 GrpcPort 25159 2025-05-29T15:25:11.075263Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:25:11.075285Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:25:11.078997Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:28530 PQClient connected to localhost:25159 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-29T15:25:11.127290Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:25:11.131892Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:25:11.142803Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-05-29T15:25:11.217846Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976715660, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:25:11.610478Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509889095932721094:2336], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:25:11.610511Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509889095932721113:2339], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:25:11.610518Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:25:11.611732Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715662:3, at schemeshard: 72057594046644480 2025-05-29T15:25:11.615662Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509889095932721140:2342], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:25:11.615789Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:25:11.617219Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976715662, at schemeshard: 72057594046644480 2025-05-29T15:25:11.617759Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7509889095932721115:2340], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715662 completed, doublechecking } 2025-05-29T15:25:11.696626Z node 1 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [1:7509889095932721171:2431] txid# 281474976715663, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 9], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-29T15:25:11.710384Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-05-29T15:25:11.722021Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [1:7509889095932721189:2347], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-05-29T15:25:11.722713Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=1&id=NWYzYzQ3MDctMjVlMjZiMjQtYmNjNjIyN2QtYWZjOWI1Yzk=, ActorId: [1:7509889095932721083:2334], ActorState: ExecuteState, TraceId: 01jweabdhn0d8msaftm5rvw260, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-05-29T15:25:11.723134Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: cluster_tracker.cpp:167: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-05-29T15:25:11.728376Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-05-29T15:25:11.769687Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost", true, true, 1000), ("dc2", "dc2.logbroker.yandex.net", false, true, 1000); 2025-05-29T15:25:11.809383Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [1:7509889095932721428:2379], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:25:11.810056Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=1&id=OWYyYzU0ZmYtYzZhNmM3MDMtOGJiMTM4N2QtN2M1YWQwNDE=, ActorId: [1:7509889095932721425:2377], ActorState: ExecuteState, TraceId: 01jweabdqc870ezykmc1b5ht8w, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: assertion failed at ydb/core/testlib/test_pq_client.h:537, TMaybe NKikimr::NPersQueueTests::TFlatMsgBusPQClient::RunYqlDataQueryWithParams(TString, const NYdb::TParams &): (result.IsSuccess())
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 TBackTrace::Capture()+28 (0x139E343C) NUnitTest::NPrivate::RaiseError(char const*, TBasicString> const&, bool)+137 (0x13B967C9) NKikimr::NPersQueueTests::TFlatMsgBusPQClient::RunYqlDataQueryWithParams(TBasicString>, NYdb::Dev::TParams const&)+932 (0x26196F74) NKikimr::NPersQueueTests::TFlatMsgBusPQClient::RunYqlDataQuery(TBasicString>)+88 (0x261966D8) NKikimr::NPersQueueTests::TFlatMsgBusPQClient::InitDCs(THashMap>, NKikimr::NPersQueueTests::TPQTestClusterInfo, THash>>, TEqualTo>>, std::__y1::allocator>>>, TBasicString> const&)+1170 (0x26195922) NKikimr::NPersQueueTests::TFlatMsgBusPQClient::FullInit(THashMap>, NKikimr::NPersQueueTests::TPQTestClusterInfo, THash>>, TEqualTo>>, std::__y1::allocator>>>, TBasicString> const&, TBasicString> const&)+327 (0x26193977) NPersQueue::TTestServer::StartServer(bool, TMaybe>, NMaybe::TPolicyUndefinedExcept>)+888 (0x2618C0B8) NYdb::NTopic::NTests::TTopicSdkTestSetup::TTopicSdkTestSetup(TBasicString> const&, NKikimr::Tests::TServerSettings const&, bool)+582 (0x2618B1B6) NKikimr::NPQ::NTest::CreateSetup()+178 (0x260B1C72) NKikimr::NTestSuiteSlowTopicAutopartitioning::TTestCaseCDC_Write::Execute_(NUnitTest::TTestContext&)+33 (0x138CDE31) NKikimr::NTestSuiteSlowTopicAutopartitioning::TCurrentTest::Execute()::'lambda'()::operator()() const+71 (0x138D2D67) NUnitTest::TTestBase::Run(std::__y1::function, TBasicString> const&, char const*, bool)+126 (0x13B9867E) NKikimr::NTestSuiteSlowTopicAutopartitioning::TCurrentTest::Execute()+481 (0x138D25A1) NUnitTest::TTestFactory::Execute()+803 (0x13B98DF3) NUnitTest::RunMain(int, char**)+3021 (0x13BAA99D) ??+0 (0x7FED7ABE0D90) __libc_start_main+128 (0x7FED7ABE0E40) _start+41 (0x12A26029) ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/replication/controller/ut_target_discoverer/unittest >> TargetDiscoverer::Basic [GOOD] Test command err: 2025-05-29T15:25:20.554872Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509889132039111933:2080];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:25:20.555147Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/001a71/r3tmp/tmpfx6C4v/pdisk_1.dat 2025-05-29T15:25:20.672934Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:25:20.672960Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:25:20.687965Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-29T15:25:20.693622Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded TClient is connected to server localhost:12671 TServer::EnableGrpc on GrpcPort 22963, node 1 2025-05-29T15:25:20.762955Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:25:20.762967Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-05-29T15:25:20.762970Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-05-29T15:25:20.763015Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:12671 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-29T15:25:20.878343Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:25:20.883124Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-05-29T15:25:20.884102Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:25:20.982644Z node 1 :REPLICATION_CONTROLLER TRACE: target_discoverer.cpp:27: [TargetDiscoverer][rid 1] Handle NKikimr::NReplication::TEvYdbProxy::TEvDescribePathResponse { Result: { name: Root, owner: root@builtin, type: Directory, size_bytes: 0, created_at: { plan_step: 1748532320927, tx_id: 1 } } } 2025-05-29T15:25:20.982655Z node 1 :REPLICATION_CONTROLLER DEBUG: target_discoverer.cpp:42: [TargetDiscoverer][rid 1] Describe path succeeded: path# /Root 2025-05-29T15:25:20.985620Z node 1 :REPLICATION_CONTROLLER TRACE: target_discoverer.cpp:247: [TargetDiscoverer][rid 1] Handle NKikimr::NReplication::TEvYdbProxy::TEvListDirectoryResponse { Result: { children [{ name: Table, owner: root@builtin, type: Table, size_bytes: 0, created_at: { plan_step: 1748532320983, tx_id: 281474976715658 } }, { name: .sys, owner: , type: Directory, size_bytes: 0, created_at: { plan_step: 0, tx_id: 0 } }] } } 2025-05-29T15:25:20.985628Z node 1 :REPLICATION_CONTROLLER DEBUG: target_discoverer.cpp:260: [TargetDiscoverer][rid 1] Listing succeeded: path# /Root 2025-05-29T15:25:21.130281Z node 1 :REPLICATION_CONTROLLER TRACE: target_discoverer.cpp:98: [TargetDiscoverer][rid 1] Handle NKikimr::NReplication::TEvYdbProxy::TEvDescribeTableResponse { Result: { name: Table, owner: root@builtin, type: Table, size_bytes: 0, created_at: { plan_step: 1748532320983, tx_id: 281474976715658 } } } 2025-05-29T15:25:21.130291Z node 1 :REPLICATION_CONTROLLER DEBUG: target_discoverer.cpp:113: [TargetDiscoverer][rid 1] Describe table succeeded: path# /Root/Table 2025-05-29T15:25:21.130296Z node 1 :REPLICATION_CONTROLLER INFO: target_discoverer.cpp:120: [TargetDiscoverer][rid 1] Add target: srcPath# /Root/Table, dstPath# /Root/Replicated/Table, kind# Table >> TConsoleTests::TestDatabaseQuotasBadOverallQuota [GOOD] >> TConsoleTests::TestDatabaseQuotasBadStorageQuota >> TestYmqHttpProxy::TestReceiveMessageWithAttemptId ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/public/sdk/cpp/src/client/persqueue_public/ut/unittest >> ReadSessionImplTest::LOGBROKER_7702 [GOOD] Test command err: 2025-05-29T15:25:21.880029Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-05-29T15:25:21.880038Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-05-29T15:25:21.880043Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2025-05-29T15:25:21.890843Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2025-05-29T15:25:21.891253Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2025-05-29T15:25:21.897902Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-05-29T15:25:21.899910Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (NULL) 2025-05-29T15:25:21.900374Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-05-29T15:25:21.900380Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-05-29T15:25:21.900383Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2025-05-29T15:25:21.910591Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2025-05-29T15:25:21.911373Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2025-05-29T15:25:21.911449Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-05-29T15:25:21.914823Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (NULL) 2025-05-29T15:25:21.914968Z :INFO: [db] [sessionid] [cluster] Confirm partition stream destroy. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1 2025-05-29T15:25:21.916114Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-05-29T15:25:21.916120Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-05-29T15:25:21.916123Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2025-05-29T15:25:21.916184Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2025-05-29T15:25:21.930820Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2025-05-29T15:25:21.930914Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-05-29T15:25:21.931065Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (NULL) 2025-05-29T15:25:21.931366Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-05-29T15:25:21.934923Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-1) 2025-05-29T15:25:21.935834Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2025-05-29T15:25:21.935859Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 1, size 0 bytes 2025-05-29T15:25:21.936397Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-05-29T15:25:21.936404Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-05-29T15:25:21.936412Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2025-05-29T15:25:21.947203Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2025-05-29T15:25:21.970844Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2025-05-29T15:25:21.970944Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-05-29T15:25:21.971698Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (NULL) Message data size: 11 Compressed message data size: 31 2025-05-29T15:25:21.972055Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 Post function 2025-05-29T15:25:21.972096Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 Post function Getting new event 2025-05-29T15:25:21.972192Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (5-8) 2025-05-29T15:25:21.972203Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-4) 2025-05-29T15:25:21.972231Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2025-05-29T15:25:21.972237Z :DEBUG: Take Data. Partition 1. Read: {0, 1} (2-2) 2025-05-29T15:25:21.972246Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 2, size 22 bytes DataReceived { PartitionStreamId: 1 PartitionId: 1 Message { Data: ..11 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 1 SeqNo: 42 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k1": "v1", "k": "v" } } } Message { Data: ..11 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 2 SeqNo: 43 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k1": "v1", "k": "v" } } } } 2025-05-29T15:25:21.972286Z :DEBUG: [db] [sessionid] [cluster] Commit offsets [1, 3). Partition stream id: 1 Getting new event 2025-05-29T15:25:21.972294Z :DEBUG: Take Data. Partition 1. Read: {0, 2} (3-3) 2025-05-29T15:25:21.972299Z :DEBUG: Take Data. Partition 1. Read: {1, 0} (4-4) 2025-05-29T15:25:21.972302Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 2, size 22 bytes DataReceived { PartitionStreamId: 1 PartitionId: 1 Message { Data: ..11 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 3 SeqNo: 44 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k1": "v1", "k": "v" } } } Message { Data: ..11 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 4 SeqNo: 45 MessageGroupId: "src_id_2" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:05:21.000000Z Ip: "1.0.0.127" UncompressedSize: 0 Meta: { "v1": "k1", "v": "k" } } } } 2025-05-29T15:25:21.972321Z :DEBUG: [db] [sessionid] [cluster] Commit offsets [3, 5). Partition stream id: 1 Getting new event 2025-05-29T15:25:21.972339Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (5-5) 2025-05-29T15:25:21.972343Z :DEBUG: Take Data. Partition 1. Read: {0, 1} (6-6) 2025-05-29T15:25:21.972347Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 2, size 22 bytes DataReceived { PartitionStreamId: 1 PartitionId: 1 Message { Data: ..11 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 5 SeqNo: 46 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k1": "v1", "k": "v" } } } Message { Data: ..11 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 6 SeqNo: 47 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k1": "v1", "k": "v" } } } } 2025-05-29T15:25:21.972361Z :DEBUG: [db] [sessionid] [cluster] Commit offsets [5, 7). Partition stream id: 1 Getting new event 2025-05-29T15:25:21.972367Z :DEBUG: Take Data. Partition 1. Read: {0, 2} (7-7) 2025-05-29T15:25:21.972371Z :DEBUG: Take Data. Partition 1. Read: {1, 0} (8-8) 2025-05-29T15:25:21.972374Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 2, size 22 bytes DataReceived { PartitionStreamId: 1 PartitionId: 1 Message { Data: ..11 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 7 SeqNo: 48 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k1": "v1", "k": "v" } } } Message { Data: ..11 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 8 SeqNo: 49 MessageGroupId: "src_id_2" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:05:21.000000Z Ip: "1.0.0.127" UncompressedSize: 0 Meta: { "v1": "k1", "v": "k" } } } } 2025-05-29T15:25:21.972390Z :DEBUG: [db] [sessionid] [cluster] Commit offsets [7, 9). Partition stream id: 1 2025-05-29T15:25:21.972804Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-05-29T15:25:21.972811Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-05-29T15:25:21.972815Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2025-05-29T15:25:21.982800Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2025-05-29T15:25:21.991775Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2025-05-29T15:25:21.991856Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-05-29T15:25:21.991968Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (NULL) Message data size: 10 Compressed message data size: 30 2025-05-29T15:25:21.992176Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 Post function 2025-05-29T15:25:21.992208Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 Post function Getting new event 2025-05-29T15:25:21.994854Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (5-8) 2025-05-29T15:25:21.994878Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-4) 2025-05-29T15:25:21.995130Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2025-05-29T15:25:21.995142Z :DEBUG: Take Data. Partition 1. Read: {0, 1} (2-2) 2025-05-29T15:25:21.995148Z :DEBUG: Take Data. Partition 1. Read: {0, 2} (3-3) 2025-05-29T15:25:21.995152Z :DEBUG: Take Data. Partition 1. Read: {1, 0} (4-4) 2025-05-29T15:25:21.995165Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 4, size 40 bytes DataReceived { PartitionStreamId: 1 PartitionId: 1 Message { Data: ..10 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 1 SeqNo: 42 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k1": "v1", "k": "v" } } } Message { Data: ..10 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 2 SeqNo: 43 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k1": "v1", "k": "v" } } } Message { Data: ..10 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 3 SeqNo: 44 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k1": "v1", "k": "v" } } } Message { Data: ..10 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 4 SeqNo: 45 MessageGroupId: "src_id_2" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:05:21.000000Z Ip: "1.0.0.127" UncompressedSize: 0 Meta: { "v1": "k1", "v": "k" } } } } 2025-05-29T15:25:21.995227Z :DEBUG: [db] [sessionid] [cluster] Commit offsets [1, 5). Partition stream id: 1 Getting new event 2025-05-29T15:25:21.995265Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (5-5) 2025-05-29T15:25:21.995269Z :DEBUG: Take Data. Partition 1. Read: {0, 1} (6-6) 2025-05-29T15:25:21.995272Z :DEBUG: Take Data. Partition 1. Read: {0, 2} (7-7) 2025-05-29T15:25:21.995276Z :DEBUG: Take Data. Partition 1. Read: {1, 0} (8-8) 2025-05-29T15:25:21.995280Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 4, size 40 bytes DataReceived { PartitionStreamId: 1 PartitionId: 1 Message { Data: ..10 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 5 SeqNo: 46 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k1": "v1", "k": "v" } } } Message { Data: ..10 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 6 SeqNo: 47 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k1": "v1", "k": "v" } } } Message { Data: ..10 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 7 SeqNo: 48 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k1": "v1", "k": "v" } } } Message { Data: ..10 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 8 SeqNo: 49 MessageGroupId: "src_id_2" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:05:21.000000Z Ip: "1.0.0.127" UncompressedSize: 0 Meta: { "v1": "k1", "v": "k" } } } } 2025-05-29T15:25:21.995301Z :DEBUG: [db] [sessionid] [cluster] Commit offsets [5, 9). Partition stream id: 1 2025-05-29T15:25:21.995823Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-05-29T15:25:21.995829Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-05-29T15:25:21.995833Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2025-05-29T15:25:21.998125Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2025-05-29T15:25:21.998957Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2025-05-29T15:25:21.999019Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-05-29T15:25:21.999107Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (NULL) 2025-05-29T15:25:21.999263Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 Post function 2025-05-29T15:25:21.999331Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 Post function 2025-05-29T15:25:21.999397Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (10-11) 2025-05-29T15:25:21.999405Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-2) 2025-05-29T15:25:21.999425Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2025-05-29T15:25:21.999430Z :DEBUG: Take Data. Partition 1. Read: {0, 1} (2-2) 2025-05-29T15:25:21.999434Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (10-10) 2025-05-29T15:25:21.999437Z :DEBUG: Take Data. Partition 1. Read: {0, 1} (11-11) 2025-05-29T15:25:21.999445Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 2, size 16 bytes 2025-05-29T15:25:21.999448Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 2, size 16 bytes got data event: DataReceived { PartitionStreamId: 1 PartitionId: 1 Message { Data: ..8 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 1 SeqNo: 1 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:00:00.042000Z Ip: "::1" UncompressedSize: 0 Meta: { } } } Message { Data: ..8 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 2 SeqNo: 1 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:00:00.042000Z Ip: "::1" UncompressedSize: 0 Meta: { } } } Message { Data: ..8 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 10 SeqNo: 1 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:00:00.042000Z Ip: "::1" UncompressedSize: 0 Meta: { } } } Message { Data: ..8 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 11 SeqNo: 1 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:00:00.042000Z Ip: "::1" UncompressedSize: 0 Meta: { } } } } 2025-05-29T15:25:21.999478Z :DEBUG: [db] [sessionid] [cluster] Commit offsets [1, 3). Partition stream id: 1 Got commit req { cookies { assign_id: 1 partition_cookie: 1 } } 2025-05-29T15:25:21.999505Z :DEBUG: [db] [sessionid] [cluster] Commit offsets [10, 12). Partition stream id: 1 Got commit req { cookies { assign_id: 1 partition_cookie: 2 } } >> ReadSessionImplTest::DecompressRaw |66.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/dsproxy/ut_fat/unittest >> TBlobStorageProxyTest::TestProxyLongTailDiscoverMaxi [GOOD] >> TestYmqHttpProxy::TestGetQueueAttributes >> ReadSessionImplTest::DecompressRaw [GOOD] >> ReadSessionImplTest::DecompressGzip [GOOD] >> ReadSessionImplTest::DecompressZstd [GOOD] >> ReadSessionImplTest::DecompressRawEmptyMessage >> ReadSessionImplTest::DecompressRawEmptyMessage [GOOD] >> ReadSessionImplTest::DecompressGzipEmptyMessage [GOOD] >> ReadSessionImplTest::DecompressWithSynchronousExecutor [GOOD] >> ReadSessionImplTest::DataReceivedCallbackReal >> PersQueueSdkReadSessionTest::ReadSessionWithExplicitlySpecifiedPartitions >> TargetDiscoverer::Transfer >> TargetDiscoverer::SystemObjects [GOOD] >> ReadSessionImplTest::ProperlyOrdersDecompressedData [GOOD] >> ReadSessionImplTest::PacksBatches_ExactlyTwoMessagesInBatch [GOOD] >> ReadSessionImplTest::PacksBatches_OneMessageInEveryBatch >> ReadSessionImplTest::PacksBatches_OneMessageInEveryBatch [GOOD] >> ReadSessionImplTest::PacksBatches_BigBatchDecompressWithTwoBatchTasks >> BasicUsage::TSimpleWriteSession_AutoSeqNo_BasicUsage [FAIL] >> BasicUsage::TWriteSession_AutoBatching [GOOD] >> BasicUsage::TWriteSession_BatchingProducesContinueTokens [GOOD] >> BasicUsage::BrokenCredentialsProvider >> TestKinesisHttpProxy::TestListStreamConsumersWithMaxResults >> TargetDiscoverer::Transfer [GOOD] >> TSchemeShardServerLess::StorageBilling [GOOD] >> ReadSessionImplTest::ReconnectOnTmpError [GOOD] >> ReadSessionImplTest::ReconnectOnTmpErrorAndThenTimeout [GOOD] >> ReadSessionImplTest::ReconnectOnTimeout ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/replication/controller/ut_target_discoverer/unittest >> TargetDiscoverer::SystemObjects [GOOD] Test command err: 2025-05-29T15:25:22.023161Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509889135951444244:2087];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:25:22.023443Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/001a4b/r3tmp/tmp0tbs1t/pdisk_1.dat 2025-05-29T15:25:22.253644Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded TClient is connected to server localhost:5690 TServer::EnableGrpc on GrpcPort 6220, node 1 2025-05-29T15:25:22.318935Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:25:22.318951Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-05-29T15:25:22.318953Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-05-29T15:25:22.318999Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-05-29T15:25:22.363015Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:25:22.363044Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:25:22.367086Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:5690 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-29T15:25:22.426478Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:25:22.429424Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-05-29T15:25:22.430242Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-05-29T15:25:22.475177Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976715659, at schemeshard: 72057594046644480 2025-05-29T15:25:22.476159Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:25:22.526962Z node 1 :REPLICATION_CONTROLLER TRACE: target_discoverer.cpp:27: [TargetDiscoverer][rid 1] Handle NKikimr::NReplication::TEvYdbProxy::TEvDescribePathResponse { Result: { name: Root, owner: root@builtin, type: Directory, size_bytes: 0, created_at: { plan_step: 1748532322474, tx_id: 1 } } } 2025-05-29T15:25:22.526974Z node 1 :REPLICATION_CONTROLLER DEBUG: target_discoverer.cpp:42: [TargetDiscoverer][rid 1] Describe path succeeded: path# /Root 2025-05-29T15:25:22.534833Z node 1 :REPLICATION_CONTROLLER TRACE: target_discoverer.cpp:247: [TargetDiscoverer][rid 1] Handle NKikimr::NReplication::TEvYdbProxy::TEvListDirectoryResponse { Result: { children [{ name: Table, owner: root@builtin, type: Table, size_bytes: 0, created_at: { plan_step: 1748532322495, tx_id: 281474976715658 } }, { name: export-100500, owner: root@builtin, type: Directory, size_bytes: 0, created_at: { plan_step: 1748532322516, tx_id: 281474976715659 } }, { name: .sys, owner: , type: Directory, size_bytes: 0, created_at: { plan_step: 0, tx_id: 0 } }] } } 2025-05-29T15:25:22.534847Z node 1 :REPLICATION_CONTROLLER DEBUG: target_discoverer.cpp:260: [TargetDiscoverer][rid 1] Listing succeeded: path# /Root 2025-05-29T15:25:22.740461Z node 1 :REPLICATION_CONTROLLER TRACE: target_discoverer.cpp:98: [TargetDiscoverer][rid 1] Handle NKikimr::NReplication::TEvYdbProxy::TEvDescribeTableResponse { Result: { name: Table, owner: root@builtin, type: Table, size_bytes: 0, created_at: { plan_step: 1748532322495, tx_id: 281474976715658 } } } 2025-05-29T15:25:22.740473Z node 1 :REPLICATION_CONTROLLER DEBUG: target_discoverer.cpp:113: [TargetDiscoverer][rid 1] Describe table succeeded: path# /Root/Table 2025-05-29T15:25:22.740478Z node 1 :REPLICATION_CONTROLLER INFO: target_discoverer.cpp:120: [TargetDiscoverer][rid 1] Add target: srcPath# /Root/Table, dstPath# /Root/Replicated/Table, kind# Table >> ReadSessionImplTest::ReconnectOnTimeout [GOOD] >> ReadSessionImplTest::ReconnectOnTimeoutAndThenCreate [GOOD] >> ReadSessionImplTest::ReconnectsAfterFailure [GOOD] >> ReadSessionImplTest::SimpleDataHandlers >> ReadSessionImplTest::UsesOnRetryStateDuringRetries |66.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kesus/proxy/ut/unittest ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/replication/controller/ut_target_discoverer/unittest >> TargetDiscoverer::Transfer [GOOD] Test command err: 2025-05-29T15:25:23.179141Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509889144924873508:2228];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:25:23.181142Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/001a36/r3tmp/tmp2IfBFS/pdisk_1.dat 2025-05-29T15:25:23.245036Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded TClient is connected to server localhost:9468 TServer::EnableGrpc on GrpcPort 25621, node 1 2025-05-29T15:25:23.362939Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:25:23.362951Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-05-29T15:25:23.362953Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-05-29T15:25:23.362990Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:9468 2025-05-29T15:25:23.407101Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:25:23.407126Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:25:23.411207Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-29T15:25:23.462005Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:25:23.471271Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:25:23.534115Z node 1 :REPLICATION_CONTROLLER TRACE: target_discoverer.cpp:27: [TargetDiscoverer][rid 1] Handle NKikimr::NReplication::TEvYdbProxy::TEvDescribePathResponse { Result: { name: Topic, owner: root@builtin, type: Topic, size_bytes: 0, created_at: { plan_step: 1748532323538, tx_id: 281474976715658 } } } 2025-05-29T15:25:23.534127Z node 1 :REPLICATION_CONTROLLER DEBUG: target_discoverer.cpp:42: [TargetDiscoverer][rid 1] Describe path succeeded: path# /Root/Topic 2025-05-29T15:25:23.537148Z node 1 :REPLICATION_CONTROLLER TRACE: target_discoverer.cpp:166: [TargetDiscoverer][rid 1] Handle NKikimr::NReplication::TEvYdbProxy::TEvDescribeTopicResponse { Result: { status: SUCCESS, issues: } } 2025-05-29T15:25:23.537157Z node 1 :REPLICATION_CONTROLLER DEBUG: target_discoverer.cpp:181: [TargetDiscoverer][rid 1] Describe topic succeeded: path# /Root/Topic 2025-05-29T15:25:23.537163Z node 1 :REPLICATION_CONTROLLER INFO: target_discoverer.cpp:191: [TargetDiscoverer][rid 1] Add target: srcPath# /Root/Topic, dstPath# /Root/Replicated/Table, kind# Transfer >> ReadSessionImplTest::UsesOnRetryStateDuringRetries [GOOD] >> RetryPolicy::TWriteSession_TestPolicy >> ReadSessionImplTest::SimpleDataHandlers [GOOD] >> ReadSessionImplTest::SimpleDataHandlersWithCommit ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_serverless/unittest >> TSchemeShardServerLess::StorageBilling [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:116:2058] recipient: [1:111:2141] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:116:2058] recipient: [1:111:2141] Leader for TabletID 72057594046678944 is [1:128:2152] sender: [1:132:2058] recipient: [1:111:2141] 2025-05-29T15:24:36.778414Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7488: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-05-29T15:24:36.778448Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7516: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:24:36.778454Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7402: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-05-29T15:24:36.778459Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7418: OperationsProcessing config: using default configuration 2025-05-29T15:24:36.778474Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-05-29T15:24:36.778478Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-05-29T15:24:36.778489Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7548: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:24:36.778504Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:39: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-05-29T15:24:36.778621Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7619: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-05-29T15:24:36.778716Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-05-29T15:24:36.795639Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7309: Cannot subscribe to console configs 2025-05-29T15:24:36.795670Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:24:36.803262Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-05-29T15:24:36.803473Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-05-29T15:24:36.803553Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-05-29T15:24:36.808490Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-05-29T15:24:36.808709Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:445: Clear TempDirsState with owners number: 0 2025-05-29T15:24:36.808868Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1348: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-05-29T15:24:36.808962Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:32: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-05-29T15:24:36.809594Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:157: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:24:36.809647Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:84: [RootDataErasureManager] Stop 2025-05-29T15:24:36.809993Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:24:36.810010Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:24:36.810032Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-05-29T15:24:36.810044Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-29T15:24:36.810051Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:202: TTxServerlessStorageBilling.Complete 2025-05-29T15:24:36.810096Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6671: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-05-29T15:24:36.811798Z node 1 :HIVE INFO: tablet_helpers.cpp:1130: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:128:2152] sender: [1:242:2058] recipient: [1:15:2062] 2025-05-29T15:24:36.836996Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:372: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-05-29T15:24:36.837111Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:24:36.837200Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-05-29T15:24:36.837259Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:130: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-05-29T15:24:36.837273Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:24:36.839456Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:451: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-05-29T15:24:36.839509Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-05-29T15:24:36.839615Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:24:36.839630Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:313: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-05-29T15:24:36.839636Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:367: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-05-29T15:24:36.839643Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 2 -> 3 2025-05-29T15:24:36.842059Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:24:36.842090Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-05-29T15:24:36.842100Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 3 -> 128 2025-05-29T15:24:36.847932Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:24:36.847964Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:24:36.847975Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:24:36.847988Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1650: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-05-29T15:24:36.848878Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1719: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-05-29T15:24:36.850267Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:652: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-05-29T15:24:36.850336Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1751: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-05-29T15:24:36.850581Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:676: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-05-29T15:24:36.850626Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:680: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 124 RawX2: 4294969445 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-05-29T15:24:36.850637Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:24:36.850760Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 128 -> 240 2025-05-29T15:24:36.850771Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:24:36.850818Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-05-29T15:24:36.850834Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:402: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-05-29T15:24:36.852981Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:24:36.853000Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-29T15:24:36.853074Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29 ... d: 72075186233409549, txId: 107, path id: 1 2025-05-29T15:25:23.767776Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:664:2575], at schemeshard: 72075186233409549, txId: 107, path id: 2 2025-05-29T15:25:23.767837Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 107:0, at schemeshard: 72075186233409549 2025-05-29T15:25:23.767848Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:1036: NTableState::TProposedWaitParts operationId# 107:0 ProgressState at tablet: 72075186233409549 2025-05-29T15:25:23.767872Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:674: all shard schema changes has been received, operationId: 107:0, at schemeshard: 72075186233409549 2025-05-29T15:25:23.767876Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:686: send schema changes ack message, operation: 107:0, datashard: 72075186233409552, at schemeshard: 72075186233409549 2025-05-29T15:25:23.767882Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 107:0 129 -> 240 2025-05-29T15:25:23.768103Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:5834: Handle TEvUpdateAck, at schemeshard: 72075186233409549, msg: Owner: 72075186233409549 Generation: 2 LocalPathId: 1 Version: 9 PathOwnerId: 72075186233409549, cookie: 107 2025-05-29T15:25:23.768114Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72075186233409549, msg: Owner: 72075186233409549 Generation: 2 LocalPathId: 1 Version: 9 PathOwnerId: 72075186233409549, cookie: 107 2025-05-29T15:25:23.768119Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72075186233409549, txId: 107 2025-05-29T15:25:23.768125Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72075186233409549, txId: 107, pathId: [OwnerId: 72075186233409549, LocalPathId: 1], version: 9 2025-05-29T15:25:23.768131Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72075186233409549, LocalPathId: 1] was 5 2025-05-29T15:25:23.768261Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:5834: Handle TEvUpdateAck, at schemeshard: 72075186233409549, msg: Owner: 72075186233409549 Generation: 2 LocalPathId: 2 Version: 18446744073709551615 PathOwnerId: 72075186233409549, cookie: 107 2025-05-29T15:25:23.768271Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72075186233409549, msg: Owner: 72075186233409549 Generation: 2 LocalPathId: 2 Version: 18446744073709551615 PathOwnerId: 72075186233409549, cookie: 107 2025-05-29T15:25:23.768274Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72075186233409549, txId: 107 2025-05-29T15:25:23.768278Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72075186233409549, txId: 107, pathId: [OwnerId: 72075186233409549, LocalPathId: 2], version: 18446744073709551615 2025-05-29T15:25:23.768282Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72075186233409549, LocalPathId: 2] was 4 2025-05-29T15:25:23.768291Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1606: TOperation IsReadyToNotify, TxId: 107, ready parts: 0/1, is published: true 2025-05-29T15:25:23.780546Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 107:0, at schemeshard: 72075186233409549 2025-05-29T15:25:23.780571Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_drop_table.cpp:414: TDropTable TProposedDeletePart operationId: 107:0 ProgressState, at schemeshard: 72075186233409549 2025-05-29T15:25:23.780686Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove table for pathId [OwnerId: 72075186233409549, LocalPathId: 2] was 3 2025-05-29T15:25:23.780723Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:906: Part operation is done id#107:0 progress is 1/1 2025-05-29T15:25:23.780733Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 107 ready parts: 1/1 2025-05-29T15:25:23.780739Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:906: Part operation is done id#107:0 progress is 1/1 2025-05-29T15:25:23.780743Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 107 ready parts: 1/1 2025-05-29T15:25:23.780749Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1606: TOperation IsReadyToNotify, TxId: 107, ready parts: 1/1, is published: true 2025-05-29T15:25:23.780770Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1629: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:803:2683] message: TxId: 107 2025-05-29T15:25:23.780778Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 107 ready parts: 1/1 2025-05-29T15:25:23.780784Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:973: Operation and all the parts is done, operation id: 107:0 2025-05-29T15:25:23.780789Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5174: RemoveTx for txid 107:0 2025-05-29T15:25:23.780811Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72075186233409549, LocalPathId: 2] was 2 2025-05-29T15:25:23.781025Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72075186233409549, cookie: 107 2025-05-29T15:25:23.787186Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72075186233409549, cookie: 107 2025-05-29T15:25:23.787501Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:227: tests -- TTxNotificationSubscriber for txId 107: got EvNotifyTxCompletionResult 2025-05-29T15:25:23.787510Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:236: tests -- TTxNotificationSubscriber for txId 107: satisfy waiter [1:2195:4039] TestWaitNotification: OK eventTxId 107 2025-05-29T15:25:23.800521Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5549: Handle TEvStateChanged, at schemeshard: 72075186233409549, message: Source { RawX1: 775 RawX2: 4294969959 } TabletId: 72075186233409552 State: 4 2025-05-29T15:25:23.800556Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__state_changed_reply.cpp:78: TTxShardStateChanged DoExecute, datashard informs about state changing, datashardId: 72075186233409552, state: Offline, at schemeshard: 72075186233409549 2025-05-29T15:25:23.807089Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:46: Free shard 72075186233409549:4 hive 72057594037968897 at ss 72075186233409549 2025-05-29T15:25:23.807321Z node 1 :HIVE INFO: tablet_helpers.cpp:1356: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72075186233409549 ShardLocalIdx: 4 TxId_Deprecated: 4 TabletID: 72075186233409552 2025-05-29T15:25:23.808127Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5938: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 4 ShardOwnerId: 72075186233409549 ShardLocalIdx: 4, at schemeshard: 72075186233409549 2025-05-29T15:25:23.808203Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72075186233409549, LocalPathId: 2] was 1 2025-05-29T15:25:23.808354Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:123: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72075186233409549 2025-05-29T15:25:23.808359Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:137: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72075186233409549, LocalPathId: 2], at schemeshard: 72075186233409549 2025-05-29T15:25:23.808372Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72075186233409549, LocalPathId: 1] was 4 2025-05-29T15:25:23.811383Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:174: Deleted shardIdx 72075186233409549:4 2025-05-29T15:25:23.811402Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:180: Close pipe to deleted shardIdx 72075186233409549:4 tabletId 72075186233409552 2025-05-29T15:25:23.814921Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:160: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72075186233409549 2025-05-29T15:25:23.947041Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6632: Handle: TEvRunConditionalErase, at schemeshard: 72075186233409549 2025-05-29T15:25:23.947083Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__conditional_erase.cpp:56: TTxRunConditionalErase DoExecute: at schemeshard: 72075186233409549 2025-05-29T15:25:23.947102Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__conditional_erase.cpp:189: TTxRunConditionalErase DoComplete: at schemeshard: 72075186233409549 2025-05-29T15:25:23.947119Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6632: Handle: TEvRunConditionalErase, at schemeshard: 72075186233409546 2025-05-29T15:25:23.947126Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__conditional_erase.cpp:56: TTxRunConditionalErase DoExecute: at schemeshard: 72075186233409546 2025-05-29T15:25:23.947134Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__conditional_erase.cpp:189: TTxRunConditionalErase DoComplete: at schemeshard: 72075186233409546 2025-05-29T15:25:23.947145Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6632: Handle: TEvRunConditionalErase, at schemeshard: 72057594046678944 2025-05-29T15:25:23.947152Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__conditional_erase.cpp:56: TTxRunConditionalErase DoExecute: at schemeshard: 72057594046678944 2025-05-29T15:25:23.947161Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__conditional_erase.cpp:189: TTxRunConditionalErase DoComplete: at schemeshard: 72057594046678944 2025-05-29T15:25:24.005894Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-05-29T15:25:24.006003Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:191: TTxServerlessStorageBilling: make a bill, record: '{"usage":{"start":1600452180,"quantity":59,"finish":1600452239,"type":"delta","unit":"byte*second"},"tags":{"ydb_size":0},"id":"72057594046678944-3-1600452180-1600452239-0","cloud_id":"CLOUD_ID_VAL","source_wt":1600452240,"source_id":"sless-docapi-ydb-storage","resource_id":"DATABASE_ID_VAL","schema":"ydb.serverless.v1","folder_id":"FOLDER_ID_VAL","version":"1.0.0"} ', schemeshardId: 72075186233409549, domainId: [OwnerId: 72057594046678944, LocalPathId: 3], now: 2020-09-18T18:04:00.028000Z, LastBillTime: 2020-09-18T18:02:00.000000Z, lastBilled: 2020-09-18T18:02:00.000000Z--2020-09-18T18:02:59.000000Z, toBill: 2020-09-18T18:03:00.000000Z--2020-09-18T18:03:59.000000Z, next retry at: 2020-09-18T18:05:00.000000Z 2025-05-29T15:25:24.007846Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:202: TTxServerlessStorageBilling.Complete grabMeteringMessage has happened 2025-05-29T15:25:24.007905Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:335: tests -- TFakeMetering got TEvMetering::TEvWriteMeteringJson >> TConsoleTests::TestRemoveServerlessTenant [GOOD] >> TConsoleTests::TestRegisterComputationalUnitsForPending >> ReadSessionImplTest::SimpleDataHandlersWithCommit [GOOD] >> ReadSessionImplTest::SuccessfulInit [GOOD] >> ReadSessionImplTest::SuccessfulInitAndThenTimeoutCallback [GOOD] >> ReadSessionImplTest::StopsRetryAfterFailedAttempt [GOOD] >> ReadSessionImplTest::StopsRetryAfterTimeout >> ReadSessionImplTest::StopsRetryAfterTimeout [GOOD] >> ReadSessionImplTest::UnpackBigBatchWithTwoPartitions [GOOD] |66.6%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/services/persqueue_v1/ut/new_schemecache_ut/ydb-services-persqueue_v1-ut-new_schemecache_ut >> ReadSessionImplTest::SimpleDataHandlersWithGracefulRelease |66.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/services/persqueue_v1/ut/new_schemecache_ut/ydb-services-persqueue_v1-ut-new_schemecache_ut |66.6%| [LD] {RESULT} $(B)/ydb/services/persqueue_v1/ut/new_schemecache_ut/ydb-services-persqueue_v1-ut-new_schemecache_ut >> PersQueueSdkReadSessionTest::ReadSessionWithExplicitlySpecifiedPartitions [FAIL] >> PersQueueSdkReadSessionTest::SettingsValidation >> Compression::WriteRAW |66.6%| [TA] $(B)/ydb/core/tx/replication/controller/ut_target_discoverer/test-results/unittest/{meta.json ... results_accumulator.log} |66.6%| [TA] {RESULT} $(B)/ydb/core/tx/replication/controller/ut_target_discoverer/test-results/unittest/{meta.json ... results_accumulator.log} >> TConsoleTests::TestDatabaseQuotasBadStorageQuota [GOOD] >> ReadSessionImplTest::SimpleDataHandlersWithGracefulRelease [GOOD] >> ReadSessionImplTest::SimpleDataHandlersWithGracefulReleaseWithCommit >> ReadSessionImplTest::SimpleDataHandlersWithGracefulReleaseWithCommit [GOOD] >> TBsProxyFaultToleranceTest::CheckTGetWithRecoverFaultToleranceTestErasure4Plus2Block [GOOD] >> ReadSessionImplTest::PacksBatches_BigBatchDecompressWithTwoBatchTasks [GOOD] >> ReadSessionImplTest::PacksBatches_DecompressesOneMessagePerTime >> ReadSessionImplTest::PacksBatches_DecompressesOneMessagePerTime [GOOD] >> ReadSessionImplTest::PartitionStreamStatus [GOOD] >> ReadSessionImplTest::PartitionStreamCallbacks >> TProxyActorTest::TestCreateSemaphoreInterrupted ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/public/sdk/cpp/src/client/persqueue_public/ut/unittest >> ReadSessionImplTest::SimpleDataHandlersWithCommit [GOOD] Test command err: 2025-05-29T15:25:24.471167Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-05-29T15:25:24.471174Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-05-29T15:25:24.471177Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2025-05-29T15:25:24.471339Z :ERROR: [db] [sessionid] [cluster] Got error. Status: INTERNAL_ERROR. Description: 2025-05-29T15:25:24.471354Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-05-29T15:25:24.471358Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-05-29T15:25:24.471389Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.006926s 2025-05-29T15:25:24.471811Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2025-05-29T15:25:24.472143Z :INFO: [db] [sessionid] [cluster] Server session id: session id 2025-05-29T15:25:24.472165Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-05-29T15:25:24.479308Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-05-29T15:25:24.479314Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-05-29T15:25:24.479318Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2025-05-29T15:25:24.492404Z :ERROR: [db] [sessionid] [cluster] Got error. Status: INTERNAL_ERROR. Description: 2025-05-29T15:25:24.492426Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-05-29T15:25:24.492429Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-05-29T15:25:24.492452Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.007090s 2025-05-29T15:25:24.510810Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2025-05-29T15:25:24.522821Z :INFO: [db] [sessionid] [cluster] Server session id: session id 2025-05-29T15:25:24.522866Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-05-29T15:25:24.523281Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-05-29T15:25:24.523285Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-05-29T15:25:24.523289Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2025-05-29T15:25:24.534916Z :ERROR: [db] [sessionid] [cluster] Got error. Status: TIMEOUT. Description:
: Error: Failed to establish connection to server. Attempts done: 1 2025-05-29T15:25:24.534934Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-05-29T15:25:24.534939Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-05-29T15:25:24.534959Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.259405s 2025-05-29T15:25:24.543453Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2025-05-29T15:25:24.558827Z :INFO: [db] [sessionid] [cluster] Server session id: session id 2025-05-29T15:25:24.558875Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-05-29T15:25:24.563174Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-05-29T15:25:24.563181Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-05-29T15:25:24.563184Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2025-05-29T15:25:24.571149Z :ERROR: [db] [sessionid] [cluster] Got error. Status: TIMEOUT. Description:
: Error: Failed to establish connection to server. Attempts done: 1 2025-05-29T15:25:24.571169Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-05-29T15:25:24.571174Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-05-29T15:25:24.571198Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.160679s 2025-05-29T15:25:24.578915Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2025-05-29T15:25:24.595897Z :INFO: [db] [sessionid] [cluster] Server session id: session id 2025-05-29T15:25:24.595951Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-05-29T15:25:24.599148Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-05-29T15:25:24.599154Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-05-29T15:25:24.599159Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2025-05-29T15:25:24.606816Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2025-05-29T15:25:24.636862Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2025-05-29T15:25:24.638458Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-05-29T15:25:24.638571Z :ERROR: [db] [sessionid] [cluster] Got error. Status: TRANSPORT_UNAVAILABLE. Description:
: Error: GRpc error: (14): 2025-05-29T15:25:24.638581Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-05-29T15:25:24.638585Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-05-29T15:25:24.638592Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.203920s 2025-05-29T15:25:24.638661Z :DEBUG: [db] [sessionid] [cluster] Abort session to cluster 2025-05-29T15:25:24.647350Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-05-29T15:25:24.647357Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-05-29T15:25:24.647362Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2025-05-29T15:25:24.651683Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2025-05-29T15:25:24.666821Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2025-05-29T15:25:24.666909Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-05-29T15:25:24.667687Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (NULL) 2025-05-29T15:25:24.782840Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-05-29T15:25:24.782972Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-2) 2025-05-29T15:25:24.782994Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2025-05-29T15:25:24.783002Z :DEBUG: Take Data. Partition 1. Read: {1, 0} (2-2) 2025-05-29T15:25:24.783022Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 2, size 6 bytes 2025-05-29T15:25:24.886801Z :INFO: [db] [sessionid] [cluster] Confirm partition stream destroy. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1 2025-05-29T15:25:24.886889Z :DEBUG: [db] [sessionid] [cluster] Abort session to cluster 2025-05-29T15:25:24.887266Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-05-29T15:25:24.887271Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-05-29T15:25:24.887274Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2025-05-29T15:25:24.887474Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2025-05-29T15:25:24.888036Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2025-05-29T15:25:24.888099Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-05-29T15:25:24.898810Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (NULL) 2025-05-29T15:25:24.994511Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-05-29T15:25:24.994802Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-2) 2025-05-29T15:25:24.994827Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2025-05-29T15:25:24.994836Z :DEBUG: Take Data. Partition 1. Read: {1, 0} (2-2) 2025-05-29T15:25:24.994861Z :DEBUG: [db] [sessionid] [cluster] Commit offsets [1, 3). Partition stream id: 1 2025-05-29T15:25:24.994886Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 2, size 6 bytes 2025-05-29T15:25:24.994962Z :DEBUG: [db] [sessionid] [cluster] Committed response: cookies { assign_id: 1 partition_cookie: 1 } 2025-05-29T15:25:24.994978Z :INFO: [db] [sessionid] [cluster] Confirm partition stream destroy. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1 2025-05-29T15:25:24.995000Z :DEBUG: [db] [sessionid] [cluster] Abort session to cluster |66.6%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/tx_proxy/ut_base_tenant/ydb-core-tx-tx_proxy-ut_base_tenant >> ReadSessionImplTest::PartitionStreamCallbacks [GOOD] |66.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/tx_proxy/ut_base_tenant/ydb-core-tx-tx_proxy-ut_base_tenant |66.6%| [LD] {RESULT} $(B)/ydb/core/tx/tx_proxy/ut_base_tenant/ydb-core-tx-tx_proxy-ut_base_tenant >> ReadSessionImplTest::DataReceivedCallbackReal [FAIL] >> ReadSessionImplTest::DataReceivedCallback ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/public/sdk/cpp/src/client/persqueue_public/ut/unittest >> ReadSessionImplTest::SimpleDataHandlersWithGracefulReleaseWithCommit [GOOD] Test command err: 2025-05-29T15:25:25.118727Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-05-29T15:25:25.118734Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-05-29T15:25:25.126888Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2025-05-29T15:25:25.134999Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2025-05-29T15:25:25.135216Z :INFO: [db] [sessionid] [cluster] Server session id: session id 2025-05-29T15:25:25.135239Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-05-29T15:25:25.135553Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-05-29T15:25:25.135558Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-05-29T15:25:25.135561Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2025-05-29T15:25:25.143032Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2025-05-29T15:25:25.146537Z :INFO: [db] [sessionid] [cluster] Server session id: session id 2025-05-29T15:25:25.146560Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-05-29T15:25:25.147092Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-05-29T15:25:25.147098Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-05-29T15:25:25.147102Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2025-05-29T15:25:25.150814Z :ERROR: [db] [sessionid] [cluster] Got error. Status: INTERNAL_ERROR. Description: 2025-05-29T15:25:25.150830Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-05-29T15:25:25.150834Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-05-29T15:25:25.150868Z :INFO: [db] [sessionid] [cluster] Closing session to cluster: SessionClosed { Status: INTERNAL_ERROR Issues: "
: Error: Failed to establish connection to server "" ( cluster cluster). Attempts done: 1 " } 2025-05-29T15:25:25.151178Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-05-29T15:25:25.151182Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-05-29T15:25:25.151185Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2025-05-29T15:25:25.157691Z :ERROR: [db] [sessionid] [cluster] Got error. Status: TIMEOUT. Description:
: Error: Failed to establish connection to server. Attempts done: 1 2025-05-29T15:25:25.157705Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-05-29T15:25:25.157710Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-05-29T15:25:25.157722Z :INFO: [db] [sessionid] [cluster] Closing session to cluster: SessionClosed { Status: TIMEOUT Issues: "
: Error: Failed to establish connection to server. Attempts done: 1 " } 2025-05-29T15:25:25.167091Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 2500, ReadSizeServerDelta = 0 2025-05-29T15:25:25.167100Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 2500, ReadSizeServerDelta = 0 2025-05-29T15:25:25.167104Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2025-05-29T15:25:25.195178Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2025-05-29T15:25:25.195383Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2025-05-29T15:25:25.197017Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 2500, ReadSizeServerDelta = 0 2025-05-29T15:25:25.197677Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (NULL) 2025-05-29T15:25:25.197756Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 2. Cluster: "TestCluster". Topic: "TestTopic". Partition: 2. Read offset: (NULL) 2025-05-29T15:25:25.198288Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-50) 2025-05-29T15:25:25.198340Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2025-05-29T15:25:25.198350Z :DEBUG: Take Data. Partition 1. Read: {0, 1} (2-2) 2025-05-29T15:25:25.198355Z :DEBUG: Take Data. Partition 1. Read: {0, 2} (3-3) 2025-05-29T15:25:25.198358Z :DEBUG: Take Data. Partition 1. Read: {0, 3} (4-4) 2025-05-29T15:25:25.198363Z :DEBUG: Take Data. Partition 1. Read: {0, 4} (5-5) 2025-05-29T15:25:25.198367Z :DEBUG: Take Data. Partition 1. Read: {0, 5} (6-6) 2025-05-29T15:25:25.198370Z :DEBUG: Take Data. Partition 1. Read: {0, 6} (7-7) 2025-05-29T15:25:25.198374Z :DEBUG: Take Data. Partition 1. Read: {0, 7} (8-8) 2025-05-29T15:25:25.198381Z :DEBUG: Take Data. Partition 1. Read: {0, 8} (9-9) 2025-05-29T15:25:25.198384Z :DEBUG: Take Data. Partition 1. Read: {0, 9} (10-10) 2025-05-29T15:25:25.198388Z :DEBUG: Take Data. Partition 1. Read: {0, 10} (11-11) 2025-05-29T15:25:25.198391Z :DEBUG: Take Data. Partition 1. Read: {0, 11} (12-12) 2025-05-29T15:25:25.198395Z :DEBUG: Take Data. Partition 1. Read: {0, 12} (13-13) 2025-05-29T15:25:25.198399Z :DEBUG: Take Data. Partition 1. Read: {0, 13} (14-14) 2025-05-29T15:25:25.198402Z :DEBUG: Take Data. Partition 1. Read: {0, 14} (15-15) 2025-05-29T15:25:25.198406Z :DEBUG: Take Data. Partition 1. Read: {0, 15} (16-16) 2025-05-29T15:25:25.198412Z :DEBUG: Take Data. Partition 1. Read: {0, 16} (17-17) 2025-05-29T15:25:25.198415Z :DEBUG: Take Data. Partition 1. Read: {0, 17} (18-18) 2025-05-29T15:25:25.198418Z :DEBUG: Take Data. Partition 1. Read: {0, 18} (19-19) 2025-05-29T15:25:25.198422Z :DEBUG: Take Data. Partition 1. Read: {0, 19} (20-20) 2025-05-29T15:25:25.198425Z :DEBUG: Take Data. Partition 1. Read: {0, 20} (21-21) 2025-05-29T15:25:25.198428Z :DEBUG: Take Data. Partition 1. Read: {0, 21} (22-22) 2025-05-29T15:25:25.198432Z :DEBUG: Take Data. Partition 1. Read: {1, 0} (23-23) 2025-05-29T15:25:25.198435Z :DEBUG: Take Data. Partition 1. Read: {1, 1} (24-24) 2025-05-29T15:25:25.198438Z :DEBUG: Take Data. Partition 1. Read: {1, 2} (25-25) 2025-05-29T15:25:25.198441Z :DEBUG: Take Data. Partition 1. Read: {1, 3} (26-26) 2025-05-29T15:25:25.198444Z :DEBUG: Take Data. Partition 1. Read: {1, 4} (27-27) 2025-05-29T15:25:25.198448Z :DEBUG: Take Data. Partition 1. Read: {1, 5} (28-28) 2025-05-29T15:25:25.198451Z :DEBUG: Take Data. Partition 1. Read: {1, 6} (29-29) 2025-05-29T15:25:25.198455Z :DEBUG: Take Data. Partition 1. Read: {1, 7} (30-30) 2025-05-29T15:25:25.198458Z :DEBUG: Take Data. Partition 1. Read: {1, 8} (31-31) 2025-05-29T15:25:25.198461Z :DEBUG: Take Data. Partition 1. Read: {1, 9} (32-32) 2025-05-29T15:25:25.198476Z :DEBUG: Take Data. Partition 1. Read: {1, 10} (33-33) 2025-05-29T15:25:25.198480Z :DEBUG: Take Data. Partition 1. Read: {1, 11} (34-34) 2025-05-29T15:25:25.198483Z :DEBUG: Take Data. Partition 1. Read: {1, 12} (35-35) 2025-05-29T15:25:25.198495Z :DEBUG: Take Data. Partition 1. Read: {1, 13} (36-36) 2025-05-29T15:25:25.198501Z :DEBUG: Take Data. Partition 1. Read: {1, 14} (37-37) 2025-05-29T15:25:25.198504Z :DEBUG: Take Data. Partition 1. Read: {1, 15} (38-38) 2025-05-29T15:25:25.198507Z :DEBUG: Take Data. Partition 1. Read: {1, 16} (39-39) 2025-05-29T15:25:25.198511Z :DEBUG: Take Data. Partition 1. Read: {1, 17} (40-40) 2025-05-29T15:25:25.198514Z :DEBUG: Take Data. Partition 1. Read: {1, 18} (41-41) 2025-05-29T15:25:25.198517Z :DEBUG: Take Data. Partition 1. Read: {1, 19} (42-42) 2025-05-29T15:25:25.198521Z :DEBUG: Take Data. Partition 1. Read: {1, 20} (43-43) 2025-05-29T15:25:25.198525Z :DEBUG: Take Data. Partition 1. Read: {1, 21} (44-44) 2025-05-29T15:25:25.198528Z :DEBUG: Take Data. Partition 1. Read: {1, 22} (45-45) 2025-05-29T15:25:25.198531Z :DEBUG: Take Data. Partition 1. Read: {1, 23} (46-46) 2025-05-29T15:25:25.198535Z :DEBUG: Take Data. Partition 1. Read: {1, 24} (47-47) 2025-05-29T15:25:25.198538Z :DEBUG: Take Data. Partition 1. Read: {1, 25} (48-48) 2025-05-29T15:25:25.198541Z :DEBUG: Take Data. Partition 1. Read: {1, 26} (49-49) 2025-05-29T15:25:25.198545Z :DEBUG: Take Data. Partition 1. Read: {1, 27} (50-50) 2025-05-29T15:25:25.198555Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 50, size 5000 bytes 2025-05-29T15:25:25.198656Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 2 (51-100) 2025-05-29T15:25:25.198682Z :DEBUG: Take Data. Partition 2. Read: {0, 0} (51-51) 2025-05-29T15:25:25.198686Z :DEBUG: Take Data. Partition 2. Read: {0, 1} (52-52) 2025-05-29T15:25:25.198690Z :DEBUG: Take Data. Partition 2. Read: {0, 2} (53-53) 2025-05-29T15:25:25.198693Z :DEBUG: Take Data. Partition 2. Read: {0, 3} (54-54) 2025-05-29T15:25:25.198697Z :DEBUG: Take Data. Partition 2. Read: {0, 4} (55-55) 2025-05-29T15:25:25.198701Z :DEBUG: Take Data. Partition 2. Read: {0, 5} (56-56) 2025-05-29T15:25:25.198704Z :DEBUG: Take Data. Partition 2. Read: {0, 6} (57-57) 2025-05-29T15:25:25.198707Z :DEBUG: Take Data. Partition 2. Read: {0, 7} (58-58) 2025-05-29T15:25:25.198712Z :DEBUG: Take Data. Partition 2. Read: {0, 8} (59-59) 2025-05-29T15:25:25.198715Z :DEBUG: Take Data. Partition 2. Read: {0, 9} (60-60) 2025-05-29T15:25:25.198719Z :DEBUG: Take Data. Partition 2. Read: {0, 10} (61-61) 2025-05-29T15:25:25.198722Z :DEBUG: Take Data. Partition 2. Read: {0, 11} (62-62) 2025-05-29T15:25:25.198726Z :DEBUG: Take Data. Partition 2. Read: {0, 12} (63-63) 2025-05-29T15:25:25.198729Z :DEBUG: Take Data. Partition 2. Read: {0, 13} (64-64) 2025-05-29T15:25:25.198732Z :DEBUG: Take Data. Partition 2. Read: {0, 14} (65-65) 2025-05-29T15:25:25.198747Z :DEBUG: Take Data. Partition 2. Read: {0, 15} (66-66) 2025-05-29T15:25:25.198754Z :DEBUG: Take Data. Partition 2. Read: {0, 16} (67-67) 2025-05-29T15:25:25.198757Z :DEBUG: Take Data. Partition 2. Read: {0, 17} (68-68) 2025-05-29T15:25:25.198760Z :DEBUG: Take Data. Partition 2. Read: {0, 18} (69-69) 2025-05-29T15:25:25.198764Z :DEBUG: Take Data. Partition 2. Read: {0, 19} (70-70) 2025-05-29T15:25:25.198767Z :DEBUG: Take Data. Partition 2. Read: {0, 20} (71-71) 2025-05-29T15:25:25.198770Z :DEBUG: Take Data. Partition 2. Read: {0, 21} (72-72) 2025-05-29T15:25:25.198773Z :DEBUG: Take Data. Partition 2. Read: {1, 0} (73-73) 2025-05-29T15:25:25.198777Z :DEBUG: Take Data. Partition 2. Read: {1, 1} (74-74) 2025-05-29T15:25:25.198780Z :DEBUG: Take Data. Partition 2. Read: {1, 2} (75-75) 2025-05-29T15:25:25.198783Z :DEBUG: Take Data. Partition 2. Read: {1, 3} (76-76) 2025-05-29T15:25:25.198786Z :DEBUG: Take Data. Partition 2. Read: {1, 4} (77-77) 2025-05-29T15:25:25.198789Z :DEBUG: Take Data. Partition 2. Read: {1, 5} (78-78) 2025-05-29T15:25:25.198805Z :DEBUG: Take Data. Partition 2. Read: {1, 6} (79-79) 2025-05-29T15:25:25.198808Z :DEBUG: Take Data. Partition 2. Read: {1, 7} (80-80) 2025-05-29T15:25:25.198811Z :DEBUG: Take Data. Partition 2. Read: {1, 8} (81-81) 2025-05-29T15:25:25.198815Z :DEBUG: Take Data. Partition 2. Read: {1, 9} (82-82) 2025-05-29T15:25:25.198822Z :DEBUG: Take Data. Partition 2. Read: {1, 10} (83-83) 2025-05-29T15:25:25.198825Z :DEBUG: Take Data. Partition 2. Read: {1, 11} (84-84) 2025-05-29T15:25:25.198829Z :DEBUG: Take Data. Partition 2. Read: {1, 12} (85-85) 2025-05-29T15:25:25.198832Z :DEBUG: Take Data. Partition 2. Read: {1, 13} (86-86) 2025-05-29T15:25:25.198835Z :DEBUG: Take Data. Partition 2. Read: {1, 14} (87-87) 2025-05-29T15:25:25.198838Z :DEBUG: Take Data. Partition 2. Read: {1, 15} (88-88) 2025-05-29T15:25:25.198842Z :DEBUG: Take Data. Partition 2. Read: {1, 16} (89-89) 2025-05-29T15:25:25.198845Z :DEBUG: Take Data. Partition 2. Read: {1, 17} (90-90) 2025-05-29T15:25:25.198849Z :DEBUG: Take Data. Partition 2. Read: {1, 18} (91-91) 2025-05-29T15:25:25.198852Z :DEBUG: Take Data. Partition 2. Read: {1, 19} (92-92) 2025-05-29T15:25:25.198855Z :DEBUG: Take Data. Partition 2. Read: {1, 20} (93-93) 2025-05-29T15:25:25.198858Z :DEBUG: Take Data. Partition 2. Read: {1, 21} (94-94) 2025-05-29T15:25:25.198862Z :DEBUG: Take Data. Partition 2. Read: {1, 22} (95-95) 2025-05-29T15:25:25.198865Z :DEBUG: Take Data. Partition 2. Read: {1, 23} (96-96) 2025-05-29T15:25:25.198869Z :DEBUG: Take Data. Partition 2. Read: {1, 24} (97-97) 2025-05-29T15:25:25.198873Z :DEBUG: Take Data. Partition 2. Read: {1, 25} (98-98) 2025-05-29T15:25:25.198876Z :DEBUG: Take Data. Partition 2. Read: {1, 26} (99-99) 2025-05-29T15:25:25.198880Z :DEBUG: Take Data. Partition 2. Read: {1, 27} (100-100) 2025-05-29T15:25:25.198886Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 50, size 5000 bytes 2025-05-29T15:25:25.198916Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 2500, ReadSizeServerDelta = 0 2025-05-29T15:25:25.199243Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-05-29T15:25:25.199248Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-05-29T15:25:25.199251Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2025-05-29T15:25:25.216096Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2025-05-29T15:25:25.218560Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2025-05-29T15:25:25.218630Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-05-29T15:25:25.218781Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (NULL) 2025-05-29T15:25:25.319126Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-05-29T15:25:25.319171Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-2) 2025-05-29T15:25:25.319184Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2025-05-29T15:25:25.319190Z :DEBUG: Take Data. Partition 1. Read: {1, 0} (2-2) 2025-05-29T15:25:25.319204Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 2, size 6 bytes 2025-05-29T15:25:25.522813Z :DEBUG: [db] [sessionid] [cluster] Commit offsets [1, 3). Partition stream id: 1 2025-05-29T15:25:25.623942Z :DEBUG: [db] [sessionid] [cluster] Committed response: cookies { assign_id: 1 partition_cookie: 1 } 2025-05-29T15:25:25.623992Z :INFO: [db] [sessionid] [cluster] Confirm partition stream destroy. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1 2025-05-29T15:25:25.624052Z :DEBUG: [db] [sessionid] [cluster] Abort session to cluster 2025-05-29T15:25:25.624405Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-05-29T15:25:25.624410Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-05-29T15:25:25.624423Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2025-05-29T15:25:25.638840Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2025-05-29T15:25:25.643912Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2025-05-29T15:25:25.652837Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-05-29T15:25:25.653034Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (NULL) 2025-05-29T15:25:25.758967Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-05-29T15:25:25.759036Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-2) 2025-05-29T15:25:25.759050Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2025-05-29T15:25:25.759057Z :DEBUG: Take Data. Partition 1. Read: {1, 0} (2-2) 2025-05-29T15:25:25.759077Z :DEBUG: [db] [sessionid] [cluster] Commit offsets [1, 3). Partition stream id: 1 2025-05-29T15:25:25.759095Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 2, size 6 bytes 2025-05-29T15:25:25.759144Z :DEBUG: [db] [sessionid] [cluster] Committed response: cookies { assign_id: 1 partition_cookie: 1 } 2025-05-29T15:25:25.759162Z :INFO: [db] [sessionid] [cluster] Confirm partition stream destroy. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1 2025-05-29T15:25:25.759185Z :DEBUG: [db] [sessionid] [cluster] Abort session to cluster >> BasicUsage::BrokenCredentialsProvider [FAIL] >> TProxyActorTest::TestCreateSemaphoreInterrupted [GOOD] |66.6%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/schemeshard/ut_external_data_source_reboots/schemeshard-ut_external_data_source_reboots |66.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_external_data_source_reboots/schemeshard-ut_external_data_source_reboots |66.6%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_external_data_source_reboots/schemeshard-ut_external_data_source_reboots ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/cms/console/ut/unittest >> TConsoleTests::TestDatabaseQuotasBadStorageQuota [GOOD] Test command err: 2025-05-29T15:24:12.066143Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7488: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-05-29T15:24:12.066177Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7516: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:24:12.066183Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7402: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-05-29T15:24:12.066189Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7418: OperationsProcessing config: using default configuration 2025-05-29T15:24:12.066203Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-05-29T15:24:12.066207Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-05-29T15:24:12.066222Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7548: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:24:12.066245Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:39: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-05-29T15:24:12.066355Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7619: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-05-29T15:24:12.066483Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-05-29T15:24:12.070667Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7309: Cannot subscribe to console configs 2025-05-29T15:24:12.070694Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:24:12.072729Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-05-29T15:24:12.072790Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-05-29T15:24:12.072809Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046578944 2025-05-29T15:24:12.074033Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-05-29T15:24:12.074134Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:445: Clear TempDirsState with owners number: 0 2025-05-29T15:24:12.074247Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1348: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046578944 2025-05-29T15:24:12.074344Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:32: TTxInitRoot DoExecute, path: dc-1, pathId: [OwnerId: 72057594046578944, LocalPathId: 1], at schemeshard: 72057594046578944 2025-05-29T15:24:12.074994Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:157: TTxInitRoot DoComplete, at schemeshard: 72057594046578944 2025-05-29T15:24:12.075033Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:84: [RootDataErasureManager] Stop 2025-05-29T15:24:12.075305Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046578944 2025-05-29T15:24:12.075317Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046578944 2025-05-29T15:24:12.075347Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-05-29T15:24:12.075356Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046578944, domainId: [OwnerId: 72057594046578944, LocalPathId: 1] 2025-05-29T15:24:12.075362Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:202: TTxServerlessStorageBilling.Complete 2025-05-29T15:24:12.075429Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6671: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046578944 2025-05-29T15:24:12.126377Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:372: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "dc-1" StoragePools { Name: "" Kind: "hdd" } StoragePools { Name: "" Kind: "hdd-3" } StoragePools { Name: "" Kind: "hdd-1" } StoragePools { Name: "" Kind: "hdd-2" } } } TxId: 1 TabletId: 72057594046578944 , at schemeshard: 72057594046578944 2025-05-29T15:24:12.126466Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //dc-1, opId: 1:0, at schemeshard: 72057594046578944 2025-05-29T15:24:12.126530Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046578944, LocalPathId: 1] was 0 2025-05-29T15:24:12.126590Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:130: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046578944 2025-05-29T15:24:12.126601Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944 2025-05-29T15:24:12.127738Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:451: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046578944 PathId: 1, at schemeshard: 72057594046578944 2025-05-29T15:24:12.127777Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //dc-1 2025-05-29T15:24:12.127833Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046578944 2025-05-29T15:24:12.127845Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:313: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046578944 2025-05-29T15:24:12.127850Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:367: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-05-29T15:24:12.127856Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 2 -> 3 2025-05-29T15:24:12.128351Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046578944 2025-05-29T15:24:12.128364Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046578944 2025-05-29T15:24:12.128369Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 3 -> 128 2025-05-29T15:24:12.128767Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046578944 2025-05-29T15:24:12.128780Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046578944 2025-05-29T15:24:12.128797Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046578944 2025-05-29T15:24:12.128804Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1650: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-05-29T15:24:12.129695Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1719: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046578944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-05-29T15:24:12.130180Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:652: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046578944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-05-29T15:24:12.130233Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1751: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 2025-05-29T15:24:12.130487Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__notify.cpp:30: NotifyTxCompletion operation in-flight, txId: 1, at schemeshard: 72057594046578944 2025-05-29T15:24:12.130498Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1606: TOperation IsReadyToNotify, TxId: 1, ready parts: 0/1, is published: true 2025-05-29T15:24:12.130503Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__notify.cpp:131: NotifyTxCompletion transaction is registered, txId: 1, at schemeshard: 72057594046578944 2025-05-29T15:24:12.716719Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:676: TTxOperationPlanStep Execute, stepId: 500, transactions count in step: 1, at schemeshard: 72057594046578944 2025-05-29T15:24:12.716769Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:680: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 AckTo { RawX1: 0 RawX2: 0 } } Step: 500 MediatorID: 72057594046382081 TabletID: 72057594046578944, at schemeshard: 72057594046578944 2025-05-29T15:24:12.716777Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046578944 2025-05-29T15:24:12.716837Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 128 -> 240 2025-05-29T15:24:12.716844Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046578944 2025-05-29T15:24:12.716871Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046578944, LocalPathId: 1] was 1 2025-05-29T15:24:12.716880Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:402: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046578944, LocalPathId: 1], at schemeshard: 72057594046578944 2025-05-29T15:24:12.717395Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046578944 2025-05-29T15:24:12.717406Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046578944, txId: 1, path id: [OwnerId: 72057594046578944, LocalPathId: 1] 2025-05-29T15:24:12.717442Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046578944 2025-05-29T15:24:12.717446Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:832:2256], at schemeshard: 72057594046578944, txId: 1, path id: 1 202 ... _pool.cpp:412: TDomainTenantPool(dc-1) Bootstrap 2025-05-29T15:25:23.782169Z node 168 :TENANT_POOL DEBUG: tenant_pool.cpp:286: TDomainTenantPool(dc-1) send request to add tenant /dc-1 with resources CPU: 1 Memory: 1 Network: 1 2025-05-29T15:25:23.782179Z node 168 :LOCAL DEBUG: local.cpp:1441: TDomainLocal(dc-1): Bootstrap 2025-05-29T15:25:23.782185Z node 169 :TENANT_POOL DEBUG: tenant_pool.cpp:412: TDomainTenantPool(dc-1) Bootstrap 2025-05-29T15:25:23.782191Z node 169 :TENANT_POOL DEBUG: tenant_pool.cpp:286: TDomainTenantPool(dc-1) send request to add tenant /dc-1 with resources CPU: 1 Memory: 1 Network: 1 2025-05-29T15:25:23.782200Z node 169 :LOCAL DEBUG: local.cpp:1441: TDomainLocal(dc-1): Bootstrap 2025-05-29T15:25:23.782207Z node 170 :TENANT_POOL DEBUG: tenant_pool.cpp:412: TDomainTenantPool(dc-1) Bootstrap 2025-05-29T15:25:23.782213Z node 170 :TENANT_POOL DEBUG: tenant_pool.cpp:286: TDomainTenantPool(dc-1) send request to add tenant /dc-1 with resources CPU: 1 Memory: 1 Network: 1 2025-05-29T15:25:23.782222Z node 170 :LOCAL DEBUG: local.cpp:1441: TDomainLocal(dc-1): Bootstrap 2025-05-29T15:25:23.782227Z node 171 :TENANT_POOL DEBUG: tenant_pool.cpp:412: TDomainTenantPool(dc-1) Bootstrap 2025-05-29T15:25:23.782249Z node 171 :TENANT_POOL DEBUG: tenant_pool.cpp:286: TDomainTenantPool(dc-1) send request to add tenant /dc-1 with resources CPU: 1 Memory: 1 Network: 1 2025-05-29T15:25:23.782260Z node 171 :LOCAL DEBUG: local.cpp:1441: TDomainLocal(dc-1): Bootstrap 2025-05-29T15:25:23.782269Z node 164 :TENANT_POOL DEBUG: tenant_pool.cpp:412: TDomainTenantPool(dc-1) Bootstrap 2025-05-29T15:25:23.782275Z node 164 :TENANT_POOL DEBUG: tenant_pool.cpp:286: TDomainTenantPool(dc-1) send request to add tenant /dc-1 with resources CPU: 1 Memory: 1 Network: 1 2025-05-29T15:25:23.782285Z node 164 :LOCAL DEBUG: local.cpp:1441: TDomainLocal(dc-1): Bootstrap 2025-05-29T15:25:23.782374Z node 164 :LOCAL DEBUG: local.cpp:1149: TDomainLocal(dc-1): Binding to hive 72057594046578946 at domain dc-1 (allocated resources: CPU: 1 Memory: 1 Network: 1) 2025-05-29T15:25:23.782383Z node 164 :LOCAL DEBUG: local.cpp:975: TLocalNodeRegistrar::Bootstrap 2025-05-29T15:25:23.782388Z node 164 :LOCAL DEBUG: local.cpp:181: TLocalNodeRegistrar::TryToRegister 2025-05-29T15:25:23.782408Z node 164 :LOCAL DEBUG: local.cpp:213: TLocalNodeRegistrar::TryToRegister pipe to hive, pipe:[164:1150:2119] 2025-05-29T15:25:23.782460Z node 165 :LOCAL DEBUG: local.cpp:1149: TDomainLocal(dc-1): Binding to hive 72057594046578946 at domain dc-1 (allocated resources: CPU: 1 Memory: 1 Network: 1) 2025-05-29T15:25:23.782467Z node 165 :LOCAL DEBUG: local.cpp:975: TLocalNodeRegistrar::Bootstrap 2025-05-29T15:25:23.782470Z node 165 :LOCAL DEBUG: local.cpp:181: TLocalNodeRegistrar::TryToRegister 2025-05-29T15:25:23.782479Z node 165 :LOCAL DEBUG: local.cpp:213: TLocalNodeRegistrar::TryToRegister pipe to hive, pipe:[165:1152:2119] 2025-05-29T15:25:23.782525Z node 166 :LOCAL DEBUG: local.cpp:1149: TDomainLocal(dc-1): Binding to hive 72057594046578946 at domain dc-1 (allocated resources: CPU: 1 Memory: 1 Network: 1) 2025-05-29T15:25:23.782531Z node 166 :LOCAL DEBUG: local.cpp:975: TLocalNodeRegistrar::Bootstrap 2025-05-29T15:25:23.782535Z node 166 :LOCAL DEBUG: local.cpp:181: TLocalNodeRegistrar::TryToRegister 2025-05-29T15:25:23.782542Z node 166 :LOCAL DEBUG: local.cpp:213: TLocalNodeRegistrar::TryToRegister pipe to hive, pipe:[166:1154:2119] 2025-05-29T15:25:23.782590Z node 167 :LOCAL DEBUG: local.cpp:1149: TDomainLocal(dc-1): Binding to hive 72057594046578946 at domain dc-1 (allocated resources: CPU: 1 Memory: 1 Network: 1) 2025-05-29T15:25:23.782597Z node 167 :LOCAL DEBUG: local.cpp:975: TLocalNodeRegistrar::Bootstrap 2025-05-29T15:25:23.782601Z node 167 :LOCAL DEBUG: local.cpp:181: TLocalNodeRegistrar::TryToRegister 2025-05-29T15:25:23.782610Z node 167 :LOCAL DEBUG: local.cpp:213: TLocalNodeRegistrar::TryToRegister pipe to hive, pipe:[167:1156:2119] 2025-05-29T15:25:23.782658Z node 168 :LOCAL DEBUG: local.cpp:1149: TDomainLocal(dc-1): Binding to hive 72057594046578946 at domain dc-1 (allocated resources: CPU: 1 Memory: 1 Network: 1) 2025-05-29T15:25:23.782663Z node 168 :LOCAL DEBUG: local.cpp:975: TLocalNodeRegistrar::Bootstrap 2025-05-29T15:25:23.782667Z node 168 :LOCAL DEBUG: local.cpp:181: TLocalNodeRegistrar::TryToRegister 2025-05-29T15:25:23.782675Z node 168 :LOCAL DEBUG: local.cpp:213: TLocalNodeRegistrar::TryToRegister pipe to hive, pipe:[168:1158:2119] 2025-05-29T15:25:23.782723Z node 169 :LOCAL DEBUG: local.cpp:1149: TDomainLocal(dc-1): Binding to hive 72057594046578946 at domain dc-1 (allocated resources: CPU: 1 Memory: 1 Network: 1) 2025-05-29T15:25:23.782728Z node 169 :LOCAL DEBUG: local.cpp:975: TLocalNodeRegistrar::Bootstrap 2025-05-29T15:25:23.782732Z node 169 :LOCAL DEBUG: local.cpp:181: TLocalNodeRegistrar::TryToRegister 2025-05-29T15:25:23.782751Z node 169 :LOCAL DEBUG: local.cpp:213: TLocalNodeRegistrar::TryToRegister pipe to hive, pipe:[169:1160:2119] 2025-05-29T15:25:23.782810Z node 170 :LOCAL DEBUG: local.cpp:1149: TDomainLocal(dc-1): Binding to hive 72057594046578946 at domain dc-1 (allocated resources: CPU: 1 Memory: 1 Network: 1) 2025-05-29T15:25:23.782818Z node 170 :LOCAL DEBUG: local.cpp:975: TLocalNodeRegistrar::Bootstrap 2025-05-29T15:25:23.782824Z node 170 :LOCAL DEBUG: local.cpp:181: TLocalNodeRegistrar::TryToRegister 2025-05-29T15:25:23.782835Z node 170 :LOCAL DEBUG: local.cpp:213: TLocalNodeRegistrar::TryToRegister pipe to hive, pipe:[170:1162:2119] 2025-05-29T15:25:23.782883Z node 171 :LOCAL DEBUG: local.cpp:1149: TDomainLocal(dc-1): Binding to hive 72057594046578946 at domain dc-1 (allocated resources: CPU: 1 Memory: 1 Network: 1) 2025-05-29T15:25:23.782890Z node 171 :LOCAL DEBUG: local.cpp:975: TLocalNodeRegistrar::Bootstrap 2025-05-29T15:25:23.782894Z node 171 :LOCAL DEBUG: local.cpp:181: TLocalNodeRegistrar::TryToRegister 2025-05-29T15:25:23.782903Z node 171 :LOCAL DEBUG: local.cpp:213: TLocalNodeRegistrar::TryToRegister pipe to hive, pipe:[171:1164:2119] 2025-05-29T15:25:23.785565Z node 164 :TENANT_POOL NOTICE: tenant_pool.cpp:526: TDomainTenantPool(dc-1) started tenant /dc-1 2025-05-29T15:25:23.785583Z node 164 :TENANT_POOL DEBUG: tenant_pool.cpp:274: TDomainTenantPool(dc-1) send status update to [164:1107:2115] 2025-05-29T15:25:23.785607Z node 165 :TENANT_POOL NOTICE: tenant_pool.cpp:526: TDomainTenantPool(dc-1) started tenant /dc-1 2025-05-29T15:25:23.785613Z node 165 :TENANT_POOL DEBUG: tenant_pool.cpp:274: TDomainTenantPool(dc-1) send status update to [165:1108:2115] 2025-05-29T15:25:23.785629Z node 166 :TENANT_POOL NOTICE: tenant_pool.cpp:526: TDomainTenantPool(dc-1) started tenant /dc-1 2025-05-29T15:25:23.785635Z node 166 :TENANT_POOL DEBUG: tenant_pool.cpp:274: TDomainTenantPool(dc-1) send status update to [166:1109:2115] 2025-05-29T15:25:23.785650Z node 167 :TENANT_POOL NOTICE: tenant_pool.cpp:526: TDomainTenantPool(dc-1) started tenant /dc-1 2025-05-29T15:25:23.785656Z node 167 :TENANT_POOL DEBUG: tenant_pool.cpp:274: TDomainTenantPool(dc-1) send status update to [167:1110:2115] 2025-05-29T15:25:23.785671Z node 168 :TENANT_POOL NOTICE: tenant_pool.cpp:526: TDomainTenantPool(dc-1) started tenant /dc-1 2025-05-29T15:25:23.785677Z node 168 :TENANT_POOL DEBUG: tenant_pool.cpp:274: TDomainTenantPool(dc-1) send status update to [168:1111:2115] 2025-05-29T15:25:23.785693Z node 169 :TENANT_POOL NOTICE: tenant_pool.cpp:526: TDomainTenantPool(dc-1) started tenant /dc-1 2025-05-29T15:25:23.785700Z node 169 :TENANT_POOL DEBUG: tenant_pool.cpp:274: TDomainTenantPool(dc-1) send status update to [169:1112:2115] 2025-05-29T15:25:23.785713Z node 170 :TENANT_POOL NOTICE: tenant_pool.cpp:526: TDomainTenantPool(dc-1) started tenant /dc-1 2025-05-29T15:25:23.785718Z node 170 :TENANT_POOL DEBUG: tenant_pool.cpp:274: TDomainTenantPool(dc-1) send status update to [170:1113:2115] 2025-05-29T15:25:23.785731Z node 171 :TENANT_POOL NOTICE: tenant_pool.cpp:526: TDomainTenantPool(dc-1) started tenant /dc-1 2025-05-29T15:25:23.785736Z node 171 :TENANT_POOL DEBUG: tenant_pool.cpp:274: TDomainTenantPool(dc-1) send status update to [171:1114:2115] 2025-05-29T15:25:23.786530Z node 171 :LOCAL DEBUG: local.cpp:260: TEvTabletPipe::TEvClientConnected {TabletId=72057594046578946 Status=OK ClientId=[171:1164:2119]} 2025-05-29T15:25:23.786652Z node 164 :LOCAL DEBUG: local.cpp:260: TEvTabletPipe::TEvClientConnected {TabletId=72057594046578946 Status=OK ClientId=[164:1150:2119]} 2025-05-29T15:25:23.786658Z node 165 :LOCAL DEBUG: local.cpp:260: TEvTabletPipe::TEvClientConnected {TabletId=72057594046578946 Status=OK ClientId=[165:1152:2119]} 2025-05-29T15:25:23.786704Z node 166 :LOCAL DEBUG: local.cpp:260: TEvTabletPipe::TEvClientConnected {TabletId=72057594046578946 Status=OK ClientId=[166:1154:2119]} 2025-05-29T15:25:23.786728Z node 171 :LOCAL DEBUG: local.cpp:324: TLocalNodeRegistrar::Handle TEvLocal::TEvPing 2025-05-29T15:25:23.798804Z node 171 :LOCAL DEBUG: local.cpp:380: TLocalNodeRegistrar TEvPing - CONNECTED 2025-05-29T15:25:23.798824Z node 171 :LOCAL DEBUG: local.cpp:297: TLocalNodeRegistrar SendStatusOk 2025-05-29T15:25:23.798896Z node 167 :LOCAL DEBUG: local.cpp:260: TEvTabletPipe::TEvClientConnected {TabletId=72057594046578946 Status=OK ClientId=[167:1156:2119]} 2025-05-29T15:25:23.799143Z node 168 :LOCAL DEBUG: local.cpp:260: TEvTabletPipe::TEvClientConnected {TabletId=72057594046578946 Status=OK ClientId=[168:1158:2119]} 2025-05-29T15:25:23.799192Z node 164 :LOCAL DEBUG: local.cpp:324: TLocalNodeRegistrar::Handle TEvLocal::TEvPing 2025-05-29T15:25:23.799203Z node 164 :LOCAL DEBUG: local.cpp:380: TLocalNodeRegistrar TEvPing - CONNECTED 2025-05-29T15:25:23.799206Z node 164 :LOCAL DEBUG: local.cpp:297: TLocalNodeRegistrar SendStatusOk 2025-05-29T15:25:23.799221Z node 169 :LOCAL DEBUG: local.cpp:260: TEvTabletPipe::TEvClientConnected {TabletId=72057594046578946 Status=OK ClientId=[169:1160:2119]} 2025-05-29T15:25:23.799231Z node 170 :LOCAL DEBUG: local.cpp:260: TEvTabletPipe::TEvClientConnected {TabletId=72057594046578946 Status=OK ClientId=[170:1162:2119]} 2025-05-29T15:25:23.799256Z node 165 :LOCAL DEBUG: local.cpp:324: TLocalNodeRegistrar::Handle TEvLocal::TEvPing 2025-05-29T15:25:23.799262Z node 165 :LOCAL DEBUG: local.cpp:380: TLocalNodeRegistrar TEvPing - CONNECTED 2025-05-29T15:25:23.799265Z node 165 :LOCAL DEBUG: local.cpp:297: TLocalNodeRegistrar SendStatusOk 2025-05-29T15:25:23.799335Z node 166 :LOCAL DEBUG: local.cpp:324: TLocalNodeRegistrar::Handle TEvLocal::TEvPing 2025-05-29T15:25:23.799341Z node 166 :LOCAL DEBUG: local.cpp:380: TLocalNodeRegistrar TEvPing - CONNECTED 2025-05-29T15:25:23.799344Z node 166 :LOCAL DEBUG: local.cpp:297: TLocalNodeRegistrar SendStatusOk 2025-05-29T15:25:23.799399Z node 167 :LOCAL DEBUG: local.cpp:324: TLocalNodeRegistrar::Handle TEvLocal::TEvPing 2025-05-29T15:25:23.799405Z node 167 :LOCAL DEBUG: local.cpp:380: TLocalNodeRegistrar TEvPing - CONNECTED 2025-05-29T15:25:23.799408Z node 167 :LOCAL DEBUG: local.cpp:297: TLocalNodeRegistrar SendStatusOk 2025-05-29T15:25:23.799438Z node 168 :LOCAL DEBUG: local.cpp:324: TLocalNodeRegistrar::Handle TEvLocal::TEvPing 2025-05-29T15:25:23.799443Z node 168 :LOCAL DEBUG: local.cpp:380: TLocalNodeRegistrar TEvPing - CONNECTED 2025-05-29T15:25:23.799446Z node 168 :LOCAL DEBUG: local.cpp:297: TLocalNodeRegistrar SendStatusOk 2025-05-29T15:25:23.799503Z node 170 :LOCAL DEBUG: local.cpp:324: TLocalNodeRegistrar::Handle TEvLocal::TEvPing 2025-05-29T15:25:23.799509Z node 170 :LOCAL DEBUG: local.cpp:380: TLocalNodeRegistrar TEvPing - CONNECTED 2025-05-29T15:25:23.799512Z node 170 :LOCAL DEBUG: local.cpp:297: TLocalNodeRegistrar SendStatusOk 2025-05-29T15:25:23.799537Z node 169 :LOCAL DEBUG: local.cpp:324: TLocalNodeRegistrar::Handle TEvLocal::TEvPing 2025-05-29T15:25:23.799542Z node 169 :LOCAL DEBUG: local.cpp:380: TLocalNodeRegistrar TEvPing - CONNECTED 2025-05-29T15:25:23.799546Z node 169 :LOCAL DEBUG: local.cpp:297: TLocalNodeRegistrar SendStatusOk |66.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/dsproxy/ut_ftol/unittest >> TBsProxyFaultToleranceTest::CheckTGetWithRecoverFaultToleranceTestErasure4Plus2Block [GOOD] >> TProxyActorTest::TestAttachSession >> TestKinesisHttpProxy::ListShardsExclusiveStartShardId |66.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kesus/proxy/ut/unittest >> TProxyActorTest::TestCreateSemaphoreInterrupted [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/public/sdk/cpp/src/client/persqueue_public/ut/unittest >> ReadSessionImplTest::PartitionStreamCallbacks [GOOD] Test command err: 2025-05-29T15:25:23.354956Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-05-29T15:25:23.354965Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-05-29T15:25:23.354970Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2025-05-29T15:25:23.370855Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2025-05-29T15:25:23.374926Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2025-05-29T15:25:23.376580Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-05-29T15:25:23.377203Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (NULL) 2025-05-29T15:25:23.377508Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 Post function 2025-05-29T15:25:23.377638Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 Post function 2025-05-29T15:25:23.377700Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (2-2) 2025-05-29T15:25:23.377709Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-1) 2025-05-29T15:25:23.377828Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2025-05-29T15:25:23.377835Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (2-2) 2025-05-29T15:25:23.377844Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 1, size 8 bytes 2025-05-29T15:25:23.377848Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 1, size 8 bytes 2025-05-29T15:25:23.378273Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-05-29T15:25:23.378278Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-05-29T15:25:23.378282Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2025-05-29T15:25:23.390484Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2025-05-29T15:25:23.394828Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2025-05-29T15:25:23.394913Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-05-29T15:25:23.395511Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (NULL) Message data size: 10 Compressed message data size: 30 2025-05-29T15:25:23.395807Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 Post function 2025-05-29T15:25:23.395845Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 Post function Getting new event 2025-05-29T15:25:23.398833Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (5-8) 2025-05-29T15:25:23.398856Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-4) 2025-05-29T15:25:23.402921Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2025-05-29T15:25:23.402938Z :DEBUG: Take Data. Partition 1. Read: {0, 1} (2-2) 2025-05-29T15:25:23.402951Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 2, size 20 bytes DataReceived { PartitionStreamId: 1 PartitionId: 1 Message { Data: ..10 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 1 SeqNo: 42 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k1": "v1", "k": "v" } } } Message { Data: ..10 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 2 SeqNo: 43 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k1": "v1", "k": "v" } } } } 2025-05-29T15:25:23.403000Z :DEBUG: [db] [sessionid] [cluster] Commit offsets [1, 3). Partition stream id: 1 Getting new event 2025-05-29T15:25:23.403008Z :DEBUG: Take Data. Partition 1. Read: {0, 2} (3-3) 2025-05-29T15:25:23.403011Z :DEBUG: Take Data. Partition 1. Read: {1, 0} (4-4) 2025-05-29T15:25:23.403014Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 2, size 20 bytes DataReceived { PartitionStreamId: 1 PartitionId: 1 Message { Data: ..10 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 3 SeqNo: 44 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k1": "v1", "k": "v" } } } Message { Data: ..10 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 4 SeqNo: 45 MessageGroupId: "src_id_2" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:05:21.000000Z Ip: "1.0.0.127" UncompressedSize: 0 Meta: { "v1": "k1", "v": "k" } } } } 2025-05-29T15:25:23.403038Z :DEBUG: [db] [sessionid] [cluster] Commit offsets [3, 5). Partition stream id: 1 Getting new event 2025-05-29T15:25:23.403065Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (5-5) 2025-05-29T15:25:23.403069Z :DEBUG: Take Data. Partition 1. Read: {0, 1} (6-6) 2025-05-29T15:25:23.403072Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 2, size 20 bytes DataReceived { PartitionStreamId: 1 PartitionId: 1 Message { Data: ..10 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 5 SeqNo: 46 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k1": "v1", "k": "v" } } } Message { Data: ..10 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 6 SeqNo: 47 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k1": "v1", "k": "v" } } } } 2025-05-29T15:25:23.403082Z :DEBUG: [db] [sessionid] [cluster] Commit offsets [5, 7). Partition stream id: 1 Getting new event 2025-05-29T15:25:23.403088Z :DEBUG: Take Data. Partition 1. Read: {0, 2} (7-7) 2025-05-29T15:25:23.403091Z :DEBUG: Take Data. Partition 1. Read: {1, 0} (8-8) 2025-05-29T15:25:23.403094Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 2, size 20 bytes DataReceived { PartitionStreamId: 1 PartitionId: 1 Message { Data: ..10 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 7 SeqNo: 48 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k1": "v1", "k": "v" } } } Message { Data: ..10 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 8 SeqNo: 49 MessageGroupId: "src_id_2" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:05:21.000000Z Ip: "1.0.0.127" UncompressedSize: 0 Meta: { "v1": "k1", "v": "k" } } } } 2025-05-29T15:25:23.403112Z :DEBUG: [db] [sessionid] [cluster] Commit offsets [7, 9). Partition stream id: 1 2025-05-29T15:25:23.415176Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-05-29T15:25:23.415183Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-05-29T15:25:23.415188Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2025-05-29T15:25:23.434839Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2025-05-29T15:25:23.434988Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2025-05-29T15:25:23.435065Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-05-29T15:25:23.438826Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (NULL) Message data size: 100 Compressed message data size: 91 2025-05-29T15:25:23.439043Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 Post function 2025-05-29T15:25:23.439078Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 Post function Getting new event 2025-05-29T15:25:23.442846Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (5-8) 2025-05-29T15:25:23.442877Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-4) 2025-05-29T15:25:23.442913Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2025-05-29T15:25:23.442926Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 1, size 100 bytes DataReceived { PartitionStreamId: 1 PartitionId: 1 Message { Data: ..100 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 1 SeqNo: 42 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k1": "v1", "k": "v" } } } } 2025-05-29T15:25:23.442959Z :DEBUG: [db] [sessionid] [cluster] Commit offsets [1, 2). Partition stream id: 1 Getting new event 2025-05-29T15:25:23.442970Z :DEBUG: Take Data. Partition 1. Read: {0, 1} (2-2) 2025-05-29T15:25:23.442974Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 1, size 100 bytes DataReceived { PartitionStreamId: 1 PartitionId: 1 Message { Data: ..100 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 2 SeqNo: 43 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k1": "v1", "k": "v" } } } } 2025-05-29T15:25:23.442983Z :DEBUG: [db] [sessionid] [cluster] Commit offsets [2, 3). Partition stream id: 1 Getting new event 2025-05-29T15:25:23.442989Z :DEBUG: Take Data. Partition 1. Read: {0, 2} (3-3) 2025-05-29T15:25:23.442992Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 1, size 100 bytes DataReceived { PartitionStreamId: 1 PartitionId: 1 Message { Data: ..100 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 3 SeqNo: 44 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k1": "v1", "k": "v" } } } } 2025-05-29T15:25:23.443001Z :DEBUG: [db] [sessionid] [cluster] Commit offsets [3, 4). Partition stream id: 1 Getting new event 2025-05-29T15:25:23.443006Z :DEBUG: Take Data. Partition 1. Read: {1, 0} (4-4) 2025-05-29T15:25:23.443009Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 1, size 100 bytes DataReceived { PartitionStream ... tream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 190 SeqNo: 231 MessageGroupId: "src_id_2" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:05:21.000000Z Ip: "1.0.0.127" UncompressedSize: 0 Meta: { "v1": "k1", "v": "k" } } } Message { Data: ..1000000 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 191 SeqNo: 232 MessageGroupId: "src_id_2" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:05:21.000000Z Ip: "1.0.0.127" UncompressedSize: 0 Meta: { "v1": "k1", "v": "k" } } } Message { Data: ..1000000 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 192 SeqNo: 233 MessageGroupId: "src_id_2" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:05:21.000000Z Ip: "1.0.0.127" UncompressedSize: 0 Meta: { "v1": "k1", "v": "k" } } } Message { Data: ..1000000 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 193 SeqNo: 234 MessageGroupId: "src_id_2" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:05:21.000000Z Ip: "1.0.0.127" UncompressedSize: 0 Meta: { "v1": "k1", "v": "k" } } } Message { Data: ..1000000 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 194 SeqNo: 235 MessageGroupId: "src_id_2" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:05:21.000000Z Ip: "1.0.0.127" UncompressedSize: 0 Meta: { "v1": "k1", "v": "k" } } } Message { Data: ..1000000 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 195 SeqNo: 236 MessageGroupId: "src_id_2" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:05:21.000000Z Ip: "1.0.0.127" UncompressedSize: 0 Meta: { "v1": "k1", "v": "k" } } } Message { Data: ..1000000 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 196 SeqNo: 237 MessageGroupId: "src_id_2" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:05:21.000000Z Ip: "1.0.0.127" UncompressedSize: 0 Meta: { "v1": "k1", "v": "k" } } } Message { Data: ..1000000 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 197 SeqNo: 238 MessageGroupId: "src_id_2" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:05:21.000000Z Ip: "1.0.0.127" UncompressedSize: 0 Meta: { "v1": "k1", "v": "k" } } } Message { Data: ..1000000 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 198 SeqNo: 239 MessageGroupId: "src_id_2" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:05:21.000000Z Ip: "1.0.0.127" UncompressedSize: 0 Meta: { "v1": "k1", "v": "k" } } } Message { Data: ..1000000 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 199 SeqNo: 240 MessageGroupId: "src_id_2" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:05:21.000000Z Ip: "1.0.0.127" UncompressedSize: 0 Meta: { "v1": "k1", "v": "k" } } } Message { Data: ..1000000 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 200 SeqNo: 241 MessageGroupId: "src_id_2" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:05:21.000000Z Ip: "1.0.0.127" UncompressedSize: 0 Meta: { "v1": "k1", "v": "k" } } } } 2025-05-29T15:25:25.766710Z :DEBUG: [db] [sessionid] [cluster] Commit offsets [1, 201). Partition stream id: 1 2025-05-29T15:25:25.876045Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 5, ReadSizeServerDelta = 0 2025-05-29T15:25:25.876052Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 5, ReadSizeServerDelta = 0 2025-05-29T15:25:25.876056Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2025-05-29T15:25:25.890909Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2025-05-29T15:25:25.902804Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2025-05-29T15:25:25.902880Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 5, ReadSizeServerDelta = 0 2025-05-29T15:25:25.906835Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (NULL) Message data size: 1000000 Compressed message data size: 3028 Post function Getting new event 2025-05-29T15:25:25.980802Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-10) 2025-05-29T15:25:25.980999Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2025-05-29T15:25:25.981225Z :DEBUG: Take Data. Partition 1. Read: {0, 1} (2-2) 2025-05-29T15:25:25.981573Z :DEBUG: Take Data. Partition 1. Read: {0, 2} (3-3) 2025-05-29T15:25:25.981687Z :DEBUG: Take Data. Partition 1. Read: {0, 3} (4-4) 2025-05-29T15:25:25.982196Z :DEBUG: Take Data. Partition 1. Read: {0, 4} (5-5) 2025-05-29T15:25:25.982311Z :DEBUG: Take Data. Partition 1. Read: {0, 5} (6-6) 2025-05-29T15:25:25.982420Z :DEBUG: Take Data. Partition 1. Read: {1, 0} (7-7) 2025-05-29T15:25:25.982535Z :DEBUG: Take Data. Partition 1. Read: {1, 1} (8-8) 2025-05-29T15:25:25.983498Z :DEBUG: Take Data. Partition 1. Read: {1, 2} (9-9) 2025-05-29T15:25:25.983609Z :DEBUG: Take Data. Partition 1. Read: {1, 3} (10-10) 2025-05-29T15:25:25.983627Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 10, size 10000000 bytes 2025-05-29T15:25:25.983687Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 5, ReadSizeServerDelta = 0 DataReceived { PartitionStreamId: 1 PartitionId: 1 Message { Data: ..1000000 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 1 SeqNo: 42 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k1": "v1", "k": "v" } } } Message { Data: ..1000000 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 2 SeqNo: 43 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k1": "v1", "k": "v" } } } Message { Data: ..1000000 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 3 SeqNo: 44 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k1": "v1", "k": "v" } } } Message { Data: ..1000000 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 4 SeqNo: 45 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k1": "v1", "k": "v" } } } Message { Data: ..1000000 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 5 SeqNo: 46 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k1": "v1", "k": "v" } } } Message { Data: ..1000000 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 6 SeqNo: 47 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k1": "v1", "k": "v" } } } Message { Data: ..1000000 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 7 SeqNo: 48 MessageGroupId: "src_id_2" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:05:21.000000Z Ip: "1.0.0.127" UncompressedSize: 0 Meta: { "v1": "k1", "v": "k" } } } Message { Data: ..1000000 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 8 SeqNo: 49 MessageGroupId: "src_id_2" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:05:21.000000Z Ip: "1.0.0.127" UncompressedSize: 0 Meta: { "v1": "k1", "v": "k" } } } Message { Data: ..1000000 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 9 SeqNo: 50 MessageGroupId: "src_id_2" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:05:21.000000Z Ip: "1.0.0.127" UncompressedSize: 0 Meta: { "v1": "k1", "v": "k" } } } Message { Data: ..1000000 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 10 SeqNo: 51 MessageGroupId: "src_id_2" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:05:21.000000Z Ip: "1.0.0.127" UncompressedSize: 0 Meta: { "v1": "k1", "v": "k" } } } } 2025-05-29T15:25:25.985115Z :DEBUG: [db] [sessionid] [cluster] Commit offsets [1, 11). Partition stream id: 1 2025-05-29T15:25:25.999170Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-05-29T15:25:25.999176Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-05-29T15:25:25.999180Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2025-05-29T15:25:26.010799Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2025-05-29T15:25:26.018803Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2025-05-29T15:25:26.018871Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-05-29T15:25:26.018961Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (NULL) 2025-05-29T15:25:26.019032Z :DEBUG: [db] [sessionid] [cluster] Requesting status for partition stream id: 1 2025-05-29T15:25:26.027077Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-05-29T15:25:26.027083Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-05-29T15:25:26.027089Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2025-05-29T15:25:26.038512Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2025-05-29T15:25:26.070811Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2025-05-29T15:25:26.070885Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-05-29T15:25:26.072218Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-05-29T15:25:26.072255Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-1) 2025-05-29T15:25:26.072279Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2025-05-29T15:25:26.072288Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 1, size 8 bytes 2025-05-29T15:25:26.072315Z :INFO: [db] [sessionid] [cluster] Confirm partition stream destroy. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1 >> TestYmqHttpProxy::TestListQueues >> TColumnShardTestSchema::TTL-Reboot-Internal+FirstPkColumn+WritePortionsOnInsert >> Compression::WriteRAW [FAIL] >> Compression::WriteGZIP |66.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kesus/proxy/ut/unittest |66.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/columnshard/ut_schema/unittest >> RetryPolicy::TWriteSession_TestPolicy [FAIL] >> TProxyActorTest::TestAttachSession [GOOD] >> RetryPolicy::TWriteSession_TestBrokenPolicy >> PersQueueSdkReadSessionTest::SettingsValidation [FAIL] >> PersQueueSdkReadSessionTest::SpecifyClustersExplicitly >> TestKinesisHttpProxy::TestListStreamConsumersWithToken >> ReadSessionImplTest::DataReceivedCallback [GOOD] |66.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/columnshard/ut_schema/unittest |66.7%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/schemeshard/ut_backup/ydb-core-tx-schemeshard-ut_backup |66.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_backup/ydb-core-tx-schemeshard-ut_backup |66.7%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_backup/ydb-core-tx-schemeshard-ut_backup |66.7%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/kqp/ut/cost/ydb-core-kqp-ut-cost |66.7%| [LD] {RESULT} $(B)/ydb/core/kqp/ut/cost/ydb-core-kqp-ut-cost |66.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/ut/cost/ydb-core-kqp-ut-cost |66.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kesus/proxy/ut/unittest >> TProxyActorTest::TestAttachSession [GOOD] >> TestYmqHttpProxy::TestDeleteQueue |66.7%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/services/datastreams/ut/ydb-services-datastreams-ut |66.7%| [LD] {RESULT} $(B)/ydb/services/datastreams/ut/ydb-services-datastreams-ut |66.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/services/datastreams/ut/ydb-services-datastreams-ut ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/ydb/ut/unittest >> YdbYqlClient::TestReadTableMultiShardWithDescribeAndRowLimit [FAIL] Test command err: 2025-05-29T15:25:09.531831Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509889083767834550:2143];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:25:09.531950Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/001ec7/r3tmp/tmpOAGOQU/pdisk_1.dat 2025-05-29T15:25:09.754244Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 25918, node 1 2025-05-29T15:25:09.803838Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:25:09.803850Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-05-29T15:25:09.803852Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-05-29T15:25:09.803892Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:23848 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: 2025-05-29T15:25:09.851346Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:25:09.851376Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-29T15:25:09.856128Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-29T15:25:09.858212Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:25:09.903027Z node 1 :GRPC_SERVER INFO: grpc_request_proxy.cpp:592: Got grpc request# ListEndpointsRequest, traceId# 01jweabbwecnc7njd9mxfb1gk3, sdkBuildInfo# ydb-cpp-sdk/dev, state# AS_NOT_PERFORMED, database# undef, peer# ipv6:[::1]:58398, grpcInfo# grpc-c++/1.54.3 grpc-c/31.0.0 (linux; chttp2), timeout# 9.998723s 2025-05-29T15:25:09.913777Z node 1 :GRPC_SERVER DEBUG: grpc_request_proxy.cpp:595: Got grpc request# CreateSessionRequest, traceId# 01jweabbwqb6rcmnb9cn7eacm7, sdkBuildInfo# ydb-cpp-sdk/dev, state# AS_NOT_PERFORMED, database# undef, peer# ipv6:[::1]:58398, grpcInfo# grpc-c++/1.54.3 grpc-c/31.0.0 (linux; chttp2), timeout# undef 2025-05-29T15:25:10.214805Z node 1 :GRPC_SERVER DEBUG: grpc_request_proxy.cpp:595: Got grpc request# CreateTableRequest, traceId# 01jweabc66bkp7wtgx4m9khz2a, sdkBuildInfo# ydb-cpp-sdk/dev, state# AS_NOT_PERFORMED, database# undef, peer# ipv6:[::1]:58398, grpcInfo# grpc-c++/1.54.3 grpc-c/31.0.0 (linux; chttp2), timeout# undef 2025-05-29T15:25:10.214975Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:315: actor# [1:7509889083767834704:2115] Handle TEvProposeTransaction 2025-05-29T15:25:10.214982Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:238: actor# [1:7509889083767834704:2115] TxId# 281474976715658 ProcessProposeTransaction 2025-05-29T15:25:10.214995Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:257: actor# [1:7509889083767834704:2115] Cookie# 0 userReqId# "" txid# 281474976715658 SEND to# [1:7509889088062802752:2590] 2025-05-29T15:25:10.225073Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1562: Actor# [1:7509889088062802752:2590] txid# 281474976715658 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/Root" OperationType: ESchemeOpCreateTable CreateTable { Name: "Test" Columns { Name: "Key" Type: "Uint32" NotNull: false } Columns { Name: "Fk" Type: "Uint64" NotNull: false } Columns { Name: "Value" Type: "String" NotNull: false } KeyColumnNames: "Key" KeyColumnNames: "Fk" UniformPartitionsCount: 16 PartitionConfig { } Temporary: false } CreateIndexedTable { } } } DatabaseName: "" RequestType: "" PeerName: "ipv6:[::1]:58398" 2025-05-29T15:25:10.225093Z node 1 :TX_PROXY DEBUG: schemereq.cpp:569: Actor# [1:7509889088062802752:2590] txid# 281474976715658 Bootstrap, UserSID: CheckAdministrator: 0 CheckDatabaseAdministrator: 0 2025-05-29T15:25:10.225198Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1627: Actor# [1:7509889088062802752:2590] txid# 281474976715658 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P6 2025-05-29T15:25:10.225207Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1617: Actor# [1:7509889088062802752:2590] txid# 281474976715658 TEvNavigateKeySet requested from SchemeCache 2025-05-29T15:25:10.225234Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1450: Actor# [1:7509889088062802752:2590] txid# 281474976715658 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-05-29T15:25:10.225261Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1497: Actor# [1:7509889088062802752:2590] HANDLE EvNavigateKeySetResult, txid# 281474976715658 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2025-05-29T15:25:10.225271Z node 1 :TX_PROXY DEBUG: schemereq.cpp:103: Actor# [1:7509889088062802752:2590] txid# 281474976715658 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976715658 TabletId# 72057594046644480} 2025-05-29T15:25:10.225317Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1352: Actor# [1:7509889088062802752:2590] txid# 281474976715658 HANDLE EvClientConnected 2025-05-29T15:25:10.225797Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-05-29T15:25:10.230526Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1374: Actor# [1:7509889088062802752:2590] txid# 281474976715658 Status StatusAccepted HANDLE {TEvModifySchemeTransactionResult Status# StatusAccepted txid# 281474976715658} 2025-05-29T15:25:10.230542Z node 1 :TX_PROXY DEBUG: schemereq.cpp:549: Actor# [1:7509889088062802752:2590] txid# 281474976715658 SEND to# [1:7509889088062802751:2333] Source {TEvProposeTransactionStatus txid# 281474976715658 Status# 53} 2025-05-29T15:25:10.231224Z node 1 :GRPC_SERVER DEBUG: grpc_request_proxy.cpp:489: SchemeBoardUpdate /Root 2025-05-29T15:25:10.231239Z node 1 :GRPC_SERVER DEBUG: grpc_request_proxy.cpp:518: Can't update SecurityState for /Root - no PublicKeys 2025-05-29T15:25:10.231241Z node 1 :GRPC_SERVER DEBUG: grpc_request_proxy.cpp:489: SchemeBoardUpdate /Root 2025-05-29T15:25:10.231247Z node 1 :GRPC_SERVER DEBUG: grpc_request_proxy.cpp:518: Can't update SecurityState for /Root - no PublicKeys 2025-05-29T15:25:10.257275Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3097: StateInit, received event# 268828672, Sender [1:7509889088062802791:2627], Recipient [1:7509889088062802958:2341]: NKikimr::TEvTablet::TEvBoot 2025-05-29T15:25:10.257541Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3097: StateInit, received event# 268828672, Sender [1:7509889088062802784:2620], Recipient [1:7509889088062802973:2348]: NKikimr::TEvTablet::TEvBoot 2025-05-29T15:25:10.257636Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3097: StateInit, received event# 268828672, Sender [1:7509889088062802786:2622], Recipient [1:7509889088062802956:2339]: NKikimr::TEvTablet::TEvBoot 2025-05-29T15:25:10.257725Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3097: StateInit, received event# 268828672, Sender [1:7509889088062802781:2617], Recipient [1:7509889088062802970:2345]: NKikimr::TEvTablet::TEvBoot 2025-05-29T15:25:10.258997Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3097: StateInit, received event# 268828672, Sender [1:7509889088062802788:2624], Recipient [1:7509889088062802975:2350]: NKikimr::TEvTablet::TEvBoot 2025-05-29T15:25:10.259168Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3097: StateInit, received event# 268828672, Sender [1:7509889088062802790:2626], Recipient [1:7509889088062802957:2340]: NKikimr::TEvTablet::TEvBoot 2025-05-29T15:25:10.259260Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3097: StateInit, received event# 268828672, Sender [1:7509889088062802782:2618], Recipient [1:7509889088062802971:2346]: NKikimr::TEvTablet::TEvBoot 2025-05-29T15:25:10.259345Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3097: StateInit, received event# 268828672, Sender [1:7509889088062802776:2612], Recipient [1:7509889088062802967:2342]: NKikimr::TEvTablet::TEvBoot 2025-05-29T15:25:10.259434Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3097: StateInit, received event# 268828672, Sender [1:7509889088062802783:2619], Recipient [1:7509889088062802972:2347]: NKikimr::TEvTablet::TEvBoot 2025-05-29T15:25:10.259521Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3097: StateInit, received event# 268828672, Sender [1:7509889088062802785:2621], Recipient [1:7509889088062802955:2338]: NKikimr::TEvTablet::TEvBoot 2025-05-29T15:25:10.259606Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3097: StateInit, received event# 268828672, Sender [1:7509889088062802787:2623], Recipient [1:7509889088062802974:2349]: NKikimr::TEvTablet::TEvBoot 2025-05-29T15:25:10.259691Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3097: StateInit, received event# 268828672, Sender [1:7509889088062802777:2613], Recipient [1:7509889088062802968:2343]: NKikimr::TEvTablet::TEvBoot 2025-05-29T15:25:10.259775Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3097: StateInit, received event# 268828672, Sender [1:7509889088062802789:2625], Recipient [1:7509889088062802976:2351]: NKikimr::TEvTablet::TEvBoot 2025-05-29T15:25:10.259859Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3097: StateInit, received event# 268828672, Sender [1:7509889088062802778:2614], Recipient [1:7509889088062802969:2344]: NKikimr::TEvTablet::TEvBoot 2025-05-29T15:25:10.259942Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3097: StateInit, received event# 268828672, Sender [1:7509889088062802779:2615], Recipient [1:7509889088062802953:2336]: NKikimr::TEvTablet::TEvBoot 2025-05-29T15:25:10.260027Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3097: StateInit, received event# 268828672, Sender [1:7509889088062802780:2616], Recipient [1:7509889088062802954:2337]: NKikimr::TEvTablet::TEvBoot 2025-05-29T15:25:10.276318Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3097: StateInit, received event# 268828673, Sender [1:7509889088062802778:2614], Recipient [1:7509889088062802969:2344]: NKikimr::TEvTablet::TEvRestored 2025-05-29T15:25:10.276508Z node 1 :TX_DATASHARD INFO: datashard.cpp:373: TDataShard::OnActivateExecutor: tablet 72075186224037891 actor [1:7509889088062802969:2344] 2025-05-29T15:25:1 ... TASHARD TRACE: datashard_impl.h:3129: StateWork, received event# 269746185, Sender [10:7509889123576259830:2863], Recipient [10:7509889123576259558:2338]: NKikimr::TEvTxProxySchemeCache::TEvWatchNotifyUpdated 2025-05-29T15:25:18.148118Z node 10 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:129: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037894 2025-05-29T15:25:18.148128Z node 10 :TX_DATASHARD TRACE: datashard_impl.h:3129: StateWork, received event# 269746185, Sender [10:7509889123576259830:2863], Recipient [10:7509889123576259559:2339]: NKikimr::TEvTxProxySchemeCache::TEvWatchNotifyUpdated 2025-05-29T15:25:18.148130Z node 10 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:129: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037893 2025-05-29T15:25:18.148143Z node 10 :GRPC_SERVER DEBUG: grpc_request_proxy.cpp:489: SchemeBoardUpdate /Root 2025-05-29T15:25:18.148159Z node 10 :GRPC_SERVER DEBUG: grpc_request_proxy.cpp:518: Can't update SecurityState for /Root - no PublicKeys 2025-05-29T15:25:18.148161Z node 10 :GRPC_SERVER DEBUG: grpc_request_proxy.cpp:489: SchemeBoardUpdate /Root 2025-05-29T15:25:18.148170Z node 10 :GRPC_SERVER DEBUG: grpc_request_proxy.cpp:518: Can't update SecurityState for /Root - no PublicKeys 2025-05-29T15:25:18.150495Z node 10 :TX_DATASHARD TRACE: datashard_impl.h:3129: StateWork, received event# 269746185, Sender [10:7509889123576259830:2863], Recipient [10:7509889123576259556:2336]: NKikimr::TEvTxProxySchemeCache::TEvWatchNotifyUpdated 2025-05-29T15:25:18.150505Z node 10 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:129: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037897 2025-05-29T15:25:18.150520Z node 10 :TX_DATASHARD TRACE: datashard_impl.h:3129: StateWork, received event# 269746185, Sender [10:7509889123576259830:2863], Recipient [10:7509889123576259557:2337]: NKikimr::TEvTxProxySchemeCache::TEvWatchNotifyUpdated 2025-05-29T15:25:18.150522Z node 10 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:129: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037896 2025-05-29T15:25:18.150532Z node 10 :TX_DATASHARD TRACE: datashard_impl.h:3129: StateWork, received event# 269746185, Sender [10:7509889123576259830:2863], Recipient [10:7509889123576259558:2338]: NKikimr::TEvTxProxySchemeCache::TEvWatchNotifyUpdated 2025-05-29T15:25:18.150535Z node 10 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:129: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037894 2025-05-29T15:25:18.150544Z node 10 :TX_DATASHARD TRACE: datashard_impl.h:3129: StateWork, received event# 269746185, Sender [10:7509889123576259830:2863], Recipient [10:7509889123576259559:2339]: NKikimr::TEvTxProxySchemeCache::TEvWatchNotifyUpdated 2025-05-29T15:25:18.150546Z node 10 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:129: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037893 2025-05-29T15:25:18.150555Z node 10 :TX_DATASHARD TRACE: datashard_impl.h:3129: StateWork, received event# 269746185, Sender [10:7509889123576259830:2863], Recipient [10:7509889123576259564:2340]: NKikimr::TEvTxProxySchemeCache::TEvWatchNotifyUpdated 2025-05-29T15:25:18.150558Z node 10 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:129: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037892 2025-05-29T15:25:18.150569Z node 10 :TX_DATASHARD TRACE: datashard_impl.h:3129: StateWork, received event# 269746185, Sender [10:7509889123576259830:2863], Recipient [10:7509889123576259565:2341]: NKikimr::TEvTxProxySchemeCache::TEvWatchNotifyUpdated 2025-05-29T15:25:18.150571Z node 10 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:129: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-05-29T15:25:18.150581Z node 10 :TX_DATASHARD TRACE: datashard_impl.h:3129: StateWork, received event# 269746185, Sender [10:7509889123576259830:2863], Recipient [10:7509889123576259566:2342]: NKikimr::TEvTxProxySchemeCache::TEvWatchNotifyUpdated 2025-05-29T15:25:18.150583Z node 10 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:129: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037889 2025-05-29T15:25:18.150592Z node 10 :TX_DATASHARD TRACE: datashard_impl.h:3129: StateWork, received event# 269746185, Sender [10:7509889123576259830:2863], Recipient [10:7509889123576259567:2343]: NKikimr::TEvTxProxySchemeCache::TEvWatchNotifyUpdated 2025-05-29T15:25:18.150594Z node 10 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:129: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037891 2025-05-29T15:25:18.150604Z node 10 :TX_DATASHARD TRACE: datashard_impl.h:3129: StateWork, received event# 269746185, Sender [10:7509889123576259830:2863], Recipient [10:7509889123576259568:2344]: NKikimr::TEvTxProxySchemeCache::TEvWatchNotifyUpdated 2025-05-29T15:25:18.150606Z node 10 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:129: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037895 2025-05-29T15:25:18.150616Z node 10 :TX_DATASHARD TRACE: datashard_impl.h:3129: StateWork, received event# 269746185, Sender [10:7509889123576259830:2863], Recipient [10:7509889123576259588:2345]: NKikimr::TEvTxProxySchemeCache::TEvWatchNotifyUpdated 2025-05-29T15:25:18.150618Z node 10 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:129: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037890 2025-05-29T15:25:18.150627Z node 10 :GRPC_SERVER DEBUG: grpc_request_proxy.cpp:489: SchemeBoardUpdate /Root 2025-05-29T15:25:18.150640Z node 10 :GRPC_SERVER DEBUG: grpc_request_proxy.cpp:518: Can't update SecurityState for /Root - no PublicKeys 2025-05-29T15:25:18.150643Z node 10 :GRPC_SERVER DEBUG: grpc_request_proxy.cpp:489: SchemeBoardUpdate /Root 2025-05-29T15:25:18.150651Z node 10 :GRPC_SERVER DEBUG: grpc_request_proxy.cpp:518: Can't update SecurityState for /Root - no PublicKeys 2025-05-29T15:25:18.152513Z node 10 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [10:7509889123576260043:2371], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2025-05-29T15:25:18.224126Z node 10 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [10:7509889123576260110:3093] txid# 281474976715660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-29T15:25:18.237176Z node 10 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [10:7509889123576260121:2375], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:25:18.237833Z node 10 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=10&id=YmU1ZGMxZTEtMjdlNjIwN2EtYjM2MzUxMTgtOWIxMjYwOGY=, ActorId: [10:7509889123576259422:2332], ActorState: ExecuteState, TraceId: 01jweabky03yyf0yy07f6vyv50, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: 2025-05-29T15:25:18.349584Z node 10 :GRPC_SERVER DEBUG: grpc_server.cpp:283: [0x51491c03cb00] received request Name# SchemeOperation ok# false data# peer# current inflight# 0 2025-05-29T15:25:18.349662Z node 10 :GRPC_SERVER DEBUG: grpc_server.cpp:283: [0x51491c071700] received request Name# SchemeOperationStatus ok# false data# peer# current inflight# 0 2025-05-29T15:25:18.349692Z node 10 :GRPC_SERVER DEBUG: grpc_server.cpp:283: [0x514937310100] received request Name# SchemeDescribe ok# false data# peer# current inflight# 0 2025-05-29T15:25:18.349722Z node 10 :GRPC_SERVER DEBUG: grpc_server.cpp:283: [0x51493ac71180] received request Name# ChooseProxy ok# false data# peer# current inflight# 0 2025-05-29T15:25:18.349751Z node 10 :GRPC_SERVER DEBUG: grpc_server.cpp:283: [0x51493fc17180] received request Name# PersQueueRequest ok# false data# peer# current inflight# 0 2025-05-29T15:25:18.349778Z node 10 :GRPC_SERVER DEBUG: grpc_server.cpp:283: [0x51493ad53b80] received request Name# SchemeInitRoot ok# false data# peer# current inflight# 0 2025-05-29T15:25:18.349805Z node 10 :GRPC_SERVER DEBUG: grpc_server.cpp:283: [0x5149370d1700] received request Name# ResolveNode ok# false data# peer# current inflight# 0 2025-05-29T15:25:18.349830Z node 10 :GRPC_SERVER DEBUG: grpc_server.cpp:283: [0x5149370bf180] received request Name# FillNode ok# false data# peer# current inflight# 0 2025-05-29T15:25:18.349855Z node 10 :GRPC_SERVER DEBUG: grpc_server.cpp:283: [0x51491c03c000] received request Name# DrainNode ok# false data# peer# current inflight# 0 2025-05-29T15:25:18.349882Z node 10 :GRPC_SERVER DEBUG: grpc_server.cpp:283: [0x51493ad4e000] received request Name# BlobStorageConfig ok# false data# peer# current inflight# 0 2025-05-29T15:25:18.349909Z node 10 :GRPC_SERVER DEBUG: grpc_server.cpp:283: [0x514937310680] received request Name# HiveCreateTablet ok# false data# peer# current inflight# 0 2025-05-29T15:25:18.349933Z node 10 :GRPC_SERVER DEBUG: grpc_server.cpp:283: [0x51493ad8e580] received request Name# TestShardControl ok# false data# peer# current inflight# 0 2025-05-29T15:25:18.349956Z node 10 :GRPC_SERVER DEBUG: grpc_server.cpp:283: [0x51493f967700] received request Name# RegisterNode ok# false data# peer# current inflight# 0 2025-05-29T15:25:18.349981Z node 10 :GRPC_SERVER DEBUG: grpc_server.cpp:283: [0x51491c038b00] received request Name# CmsRequest ok# false data# peer# current inflight# 0 2025-05-29T15:25:18.350004Z node 10 :GRPC_SERVER DEBUG: grpc_server.cpp:283: [0x51493ac6f600] received request Name# ConsoleRequest ok# false data# peer# current inflight# 0 2025-05-29T15:25:18.350031Z node 10 :GRPC_SERVER DEBUG: grpc_server.cpp:283: [0x51493ad8e000] received request Name# InterconnectDebug ok# false data# peer# current inflight# 0 2025-05-29T15:25:18.350053Z node 10 :GRPC_SERVER DEBUG: grpc_server.cpp:283: [0x51493ad4f080] received request Name# TabletStateRequest ok# false data# peer# current inflight# 0 equal assertion failed at ydb/services/ydb/ydb_table_ut.cpp:1991, void NTestSuiteYdbYqlClient::TestReadTableMultiShardWithDescribe(bool): result.GetStatus() == EStatus::SUCCESS TBackTrace::Capture()+28 (0x166D044C) NUnitTest::NPrivate::RaiseError(char const*, TBasicString> const&, bool)+137 (0x16883D79) NTestSuiteYdbYqlClient::TestReadTableMultiShardWithDescribe(bool)+5196 (0x1641659C) NTestSuiteYdbYqlClient::TCurrentTest::Execute()::'lambda'()::operator()() const+71 (0x164FD547) NUnitTest::TTestBase::Run(std::__y1::function, TBasicString> const&, char const*, bool)+126 (0x16885C2E) NTestSuiteYdbYqlClient::TCurrentTest::Execute()+429 (0x164FCF0D) NUnitTest::TTestFactory::Execute()+803 (0x168863A3) NUnitTest::RunMain(int, char**)+3021 (0x16897F4D) ??+0 (0x7F08EF7B9D90) __libc_start_main+128 (0x7F08EF7B9E40) _start+41 (0x151C1029) >> TColumnShardTestSchema::CreateTable >> TBsProxyFaultToleranceTest::CheckGetHardenedErasureMirror3dcCount6Idx4 [GOOD] >> TColumnShardTestSchema::TTL-Reboot+Internal+FirstPkColumn+WritePortionsOnInsert >> PersQueueSdkReadSessionTest::SpecifyClustersExplicitly [FAIL] >> PersQueueSdkReadSessionTest::StopResumeReadingData |66.7%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/tx_proxy/ut_storage_tenant/ydb-core-tx-tx_proxy-ut_storage_tenant >> TConsoleTests::TestRegisterComputationalUnitsForPending [GOOD] >> TConsoleTests::TestNotifyOperationCompletion >> TColumnShardTestSchema::TTL-Reboot+Internal+FirstPkColumn-WritePortionsOnInsert >> TColumnShardTestSchema::ExportAfterFail >> Compression::WriteGZIP [FAIL] >> Compression::WriteZSTD |66.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/tx_proxy/ut_storage_tenant/ydb-core-tx-tx_proxy-ut_storage_tenant |66.7%| [LD] {RESULT} $(B)/ydb/core/tx/tx_proxy/ut_storage_tenant/ydb-core-tx-tx_proxy-ut_storage_tenant |66.8%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/replication/controller/ut_dst_creator/ydb-core-tx-replication-controller-ut_dst_creator |66.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/replication/controller/ut_dst_creator/ydb-core-tx-replication-controller-ut_dst_creator |66.8%| [LD] {RESULT} $(B)/ydb/core/tx/replication/controller/ut_dst_creator/ydb-core-tx-replication-controller-ut_dst_creator >> TColumnShardTestSchema::TTL+Reboot+Internal+FirstPkColumn-WritePortionsOnInsert >> TColumnShardTestSchema::RebootExportAfterFail |66.8%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/datashard/ut_volatile/ydb-core-tx-datashard-ut_volatile |66.8%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_volatile/ydb-core-tx-datashard-ut_volatile |66.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_volatile/ydb-core-tx-datashard-ut_volatile >> RetryPolicy::TWriteSession_TestBrokenPolicy [FAIL] >> RetryPolicy::TWriteSession_RetryOnTargetCluster >> TColumnShardTestSchema::Drop ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/dsproxy/ut_ftol/unittest >> TBsProxyFaultToleranceTest::CheckGetHardenedErasureMirror3dcCount6Idx4 [GOOD] Test command err: iteration# 4 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 10 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 16 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 22 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 28 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 34 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 40 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 46 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 52 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 58 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 64 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 70 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 76 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 82 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 88 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 94 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 100 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 106 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 112 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 118 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 124 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 130 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 136 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 142 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 148 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 154 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 160 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 166 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 172 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 178 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 184 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 190 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 196 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 202 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 208 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 214 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 220 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 226 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 232 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 238 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 244 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 250 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 256 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 262 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 268 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 274 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 280 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 286 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 292 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 298 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 304 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 310 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 316 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 322 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 328 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 334 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 340 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 346 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 352 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 358 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 364 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 370 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 376 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 382 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 388 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 394 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 400 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 406 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 412 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 418 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 424 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 430 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 436 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 442 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 448 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 454 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 460 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 466 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 472 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 478 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 484 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 |66.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/columnshard/ut_schema/unittest >> TestKinesisHttpProxy::ListShardsTimestamp >> TPersQueueNewSchemeCacheTest::TestReadAtTimestamp_3 >> PersQueueSdkReadSessionTest::StopResumeReadingData [FAIL] >> ReadSessionImplTest::CreatePartitionStream [GOOD] >> ReadSessionImplTest::BrokenCompressedData [GOOD] >> ReadSessionImplTest::CommitOffsetTwiceIsError [GOOD] >> ReadSessionImplTest::CommonHandler [GOOD] >> TestYmqHttpProxy::TestPurgeQueue >> TColumnShardTestSchema::CreateTable [GOOD] >> TSubDomainTest::Boot >> Compression::WriteZSTD [FAIL] >> Compression::WriteWithMixedCodecs >> TestKinesisHttpProxy::TestCounters >> TSubDomainTest::Boot [GOOD] >> TSubDomainTest::CheckAccessCopyTable ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/columnshard/ut_schema/unittest >> TColumnShardTestSchema::CreateTable [GOOD] Test command err: 2025-05-29T15:25:29.972104Z node 1 :TX_COLUMNSHARD INFO: log.cpp:784: tablet_id=9437184;self_id=[1:138:2169];fline=columnshard.cpp:102;event=initialize_shard;step=OnActivateExecutor; 2025-05-29T15:25:29.975870Z node 1 :TX_COLUMNSHARD INFO: log.cpp:784: tablet_id=9437184;self_id=[1:138:2169];fline=columnshard.cpp:120;event=initialize_shard;step=initialize_tiring_finished; 2025-05-29T15:25:29.975934Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Execute at tablet 9437184 2025-05-29T15:25:29.976677Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=9437184;self_id=[1:138:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-05-29T15:25:29.976725Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=9437184;self_id=[1:138:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-05-29T15:25:29.976762Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=9437184;self_id=[1:138:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-05-29T15:25:29.976782Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=9437184;self_id=[1:138:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-05-29T15:25:29.976800Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=9437184;self_id=[1:138:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-05-29T15:25:29.976819Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=9437184;self_id=[1:138:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-05-29T15:25:29.976836Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=9437184;self_id=[1:138:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-05-29T15:25:29.976861Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=9437184;self_id=[1:138:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-05-29T15:25:29.976879Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=9437184;self_id=[1:138:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-05-29T15:25:29.976897Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=9437184;self_id=[1:138:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-05-29T15:25:29.976915Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=9437184;self_id=[1:138:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2025-05-29T15:25:29.976933Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=9437184;self_id=[1:138:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2025-05-29T15:25:29.983847Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Complete at tablet 9437184 2025-05-29T15:25:29.983894Z node 1 :TX_COLUMNSHARD INFO: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:137;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2025-05-29T15:25:29.983904Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-05-29T15:25:29.983934Z node 1 :TX_COLUMNSHARD INFO: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-05-29T15:25:29.983970Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-05-29T15:25:29.983983Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-05-29T15:25:29.983989Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-05-29T15:25:29.983999Z node 1 :TX_COLUMNSHARD INFO: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2025-05-29T15:25:29.984007Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-05-29T15:25:29.984015Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-05-29T15:25:29.984020Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-05-29T15:25:29.984039Z node 1 :TX_COLUMNSHARD INFO: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-05-29T15:25:29.984046Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-05-29T15:25:29.984053Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-05-29T15:25:29.984057Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-05-29T15:25:29.984067Z node 1 :TX_COLUMNSHARD INFO: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-05-29T15:25:29.984074Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-05-29T15:25:29.984082Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-05-29T15:25:29.984086Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=8;type=CleanInsertionDedup; 2025-05-29T15:25:29.984109Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-05-29T15:25:29.984116Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-05-29T15:25:29.984120Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-05-29T15:25:29.984129Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-05-29T15:25:29.984138Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-05-29T15:25:29.984142Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-05-29T15:25:29.984168Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-05-29T15:25:29.984177Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-05-29T15:25:29.984182Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2025-05-29T15:25:29.984202Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-05-29T15:25:29.984210Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-05-29T15:25:29.984215Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2025-05-29T15:25:29.984229Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-05-29T15:25:29.984236Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanDeprecatedSnapshot;id=CleanDeprecatedSnapshot; 2025-05-29T15:25:29.984241Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=17;type=CleanDeprecatedSnapshot; 2025-05-29T15:25:29.984250Z node 1 :TX_COLUMNSHARD CRIT: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_deprecated_snapshot.cpp:30;tasks_for_rewrite=0; 2025-05-29T15:25:29.984258Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanDeprecatedSnapshot;id=17; 2025-05-29T15:25:29.984265Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV0ChunksMeta;id=RestoreV0ChunksMeta; 2025-05-29T15:25:29.984270Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=18;type=RestoreV0ChunksMeta; 2025-05-29T15:25:29.984327Z node 1 :TX_COLUMNSHARD INFO: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=10; 2025-05-29T15:25:29.984337Z node 1 :TX_COLUMNSHARD INFO: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=5; 2025-05-29T15:25:29.984345Z node 1 :TX_COLUMNSHARD INFO: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute ... TA" ClassName: "MAX" MaxIndex { ColumnId: 5 } } Indexes { Id: 1007 Name: "MAX::INDEX::ingested_at" StorageId: "__LOCAL_METADATA" ClassName: "MAX" MaxIndex { ColumnId: 8 } } Indexes { Id: 1008 Name: "MAX::INDEX::saved_at" StorageId: "__LOCAL_METADATA" ClassName: "MAX" MaxIndex { ColumnId: 9 } } } } TtlSettings { Version: 1 } } } } 2025-05-29T15:25:31.116049Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:784: tablet_id=9437184;self_id=[1:138:2169];ev=NKikimr::TEvColumnShard::TEvProposeTransaction;tablet_id=9437184;tx_id=119;this=17802604575936;method=TTxController::StartProposeOnExecute;tx_info=119:TX_KIND_SCHEMA;min=1748532331022;max=18446744073709551615;plan=0;src=[1:167:2190];cookie=019:0;;fline=schema.h:36;event=sync_schema; 2025-05-29T15:25:31.127593Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:784: tablet_id=9437184;request_tx=119:TX_KIND_SCHEMA;min=1748532331022;max=18446744073709551615;plan=0;src=[1:167:2190];cookie=019:0;;this=17802604575936;op_tx=119:TX_KIND_SCHEMA;min=1748532331022;max=18446744073709551615;plan=0;src=[1:167:2190];cookie=019:0;;int_op_tx=119:TX_KIND_SCHEMA;min=1748532331022;max=18446744073709551615;plan=0;src=[1:167:2190];cookie=019:0;;int_this=17802633797216;fline=columnshard__propose_transaction.cpp:105;event=actual tx operator; 2025-05-29T15:25:31.127626Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=9437184;request_tx=119:TX_KIND_SCHEMA;min=1748532331022;max=18446744073709551615;plan=0;src=[1:167:2190];cookie=019:0;;this=17802604575936;op_tx=119:TX_KIND_SCHEMA;min=1748532331022;max=18446744073709551615;plan=0;src=[1:167:2190];cookie=019:0;;int_op_tx=119:TX_KIND_SCHEMA;min=1748532331022;max=18446744073709551615;plan=0;src=[1:167:2190];cookie=019:0;;int_this=17802633797216;method=TTxController::FinishProposeOnComplete;tx_id=119;fline=propose_tx.cpp:11;event=scheme_shard_tablet_not_initialized;source=[1:167:2190]; 2025-05-29T15:25:31.127638Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:784: tablet_id=9437184;request_tx=119:TX_KIND_SCHEMA;min=1748532331022;max=18446744073709551615;plan=0;src=[1:167:2190];cookie=019:0;;this=17802604575936;op_tx=119:TX_KIND_SCHEMA;min=1748532331022;max=18446744073709551615;plan=0;src=[1:167:2190];cookie=019:0;;int_op_tx=119:TX_KIND_SCHEMA;min=1748532331022;max=18446744073709551615;plan=0;src=[1:167:2190];cookie=019:0;;int_this=17802633797216;method=TTxController::FinishProposeOnComplete;tx_id=119;fline=propose_tx.cpp:32;message=;tablet_id=9437184;tx_id=119; 2025-05-29T15:25:31.127742Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TTxNotifyTxCompletion.Execute at tablet 9437184 2025-05-29T15:25:31.127777Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: PlanStep 1748532331022 at tablet 9437184, mediator 0 2025-05-29T15:25:31.127786Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxPlanStep[36] execute at tablet 9437184 2025-05-29T15:25:31.127887Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: EnsureTable for pathId: 20 ttl settings: { Version: 1 } at tablet 9437184 2025-05-29T15:25:31.127908Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:784: tablet_id=9437184;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=119;fline=tables_manager.cpp:241;method=RegisterTable;path_id=20; 2025-05-29T15:25:31.127918Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:784: tablet_id=9437184;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=119;fline=column_engine.h:144;event=RegisterTable;path_id=20; 2025-05-29T15:25:31.128060Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:784: tablet_id=9437184;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=119;fline=column_engine_logs.cpp:485;event=OnTieringModified;path_id=20; 2025-05-29T15:25:31.128105Z node 1 :TX_COLUMNSHARD_TX WARN: log.cpp:784: tablet_id=9437184;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=119;fline=tx_controller.cpp:214;event=finished_tx;tx_id=119; 2025-05-29T15:25:31.139539Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxPlanStep[36] complete at tablet 9437184 2025-05-29T15:25:31.139600Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:784: tablet_id=9437184;tx_state=TTxProgressTx::Complete;fline=columnshard_impl.cpp:784;event=start_indexation_tasks;insert_overload_size=0; CreateTable: { SeqNo { Generation: 20 } EnsureTables { Tables { PathId: 21 SchemaPreset { Id: 1 Name: "default" Schema { Columns { Id: 1 Name: "k0" TypeId: 4609 } Columns { Id: 2 Name: "resource_type" TypeId: 4608 } Columns { Id: 3 Name: "resource_id" TypeId: 4608 DataAccessorConstructor { ClassName: "SPARSED" } } Columns { Id: 4 Name: "uid" TypeId: 4608 StorageId: "__MEMORY" } Columns { Id: 5 Name: "level" TypeId: 1 } Columns { Id: 6 Name: "message" TypeId: 4608 StorageId: "__MEMORY" } Columns { Id: 7 Name: "json_payload" TypeId: 4610 } Columns { Id: 8 Name: "ingested_at" TypeId: 50 } Columns { Id: 9 Name: "saved_at" TypeId: 50 } Columns { Id: 10 Name: "request_id" TypeId: 4608 } KeyColumnNames: "k0" KeyColumnNames: "resource_type" KeyColumnNames: "resource_id" KeyColumnNames: "uid" Indexes { Id: 1004 Name: "MAX::INDEX::level" StorageId: "__LOCAL_METADATA" ClassName: "MAX" MaxIndex { ColumnId: 5 } } Indexes { Id: 1007 Name: "MAX::INDEX::ingested_at" StorageId: "__LOCAL_METADATA" ClassName: "MAX" MaxIndex { ColumnId: 8 } } Indexes { Id: 1008 Name: "MAX::INDEX::saved_at" StorageId: "__LOCAL_METADATA" ClassName: "MAX" MaxIndex { ColumnId: 9 } } } } TtlSettings { Version: 1 } } } } 2025-05-29T15:25:31.139968Z node 1 :TX_COLUMNSHARD_TX ERROR: log.cpp:784: tablet_id=9437184;self_id=[1:138:2169];ev=NKikimr::TEvColumnShard::TEvProposeTransaction;tablet_id=9437184;tx_id=120;this=17802604576576;method=TTxController::StartProposeOnExecute;tx_info=120:TX_KIND_SCHEMA;min=1748532331025;max=18446744073709551615;plan=0;src=[1:167:2190];cookie=020:0;;fline=tx_controller.cpp:350;error=problem on start;message=Invalid schema: Column errors: key column k0 has unsupported type NKikimrSchemeOp.TOlapColumnDescription; 2025-05-29T15:25:31.151481Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=9437184;request_tx=120:TX_KIND_SCHEMA;min=1748532331025;max=18446744073709551615;plan=0;src=[1:167:2190];cookie=020:0;;this=17802604576576;op_tx=120:TX_KIND_SCHEMA;min=1748532331025;max=18446744073709551615;plan=0;src=[1:167:2190];cookie=020:0;;fline=propose_tx.cpp:11;event=scheme_shard_tablet_not_initialized;source=[1:167:2190]; 2025-05-29T15:25:31.151512Z node 1 :TX_COLUMNSHARD ERROR: log.cpp:784: tablet_id=9437184;request_tx=120:TX_KIND_SCHEMA;min=1748532331025;max=18446744073709551615;plan=0;src=[1:167:2190];cookie=020:0;;this=17802604576576;op_tx=120:TX_KIND_SCHEMA;min=1748532331025;max=18446744073709551615;plan=0;src=[1:167:2190];cookie=020:0;;fline=propose_tx.cpp:23;message=Invalid schema: Column errors: key column k0 has unsupported type NKikimrSchemeOp.TOlapColumnDescription;tablet_id=9437184;tx_id=120; CreateTable: { SeqNo { Generation: 21 } EnsureTables { Tables { PathId: 22 SchemaPreset { Id: 1 Name: "default" Schema { Columns { Id: 1 Name: "k0" TypeId: 4610 } Columns { Id: 2 Name: "resource_type" TypeId: 4608 } Columns { Id: 3 Name: "resource_id" TypeId: 4608 DataAccessorConstructor { ClassName: "SPARSED" } } Columns { Id: 4 Name: "uid" TypeId: 4608 StorageId: "__MEMORY" } Columns { Id: 5 Name: "level" TypeId: 1 } Columns { Id: 6 Name: "message" TypeId: 4608 StorageId: "__MEMORY" } Columns { Id: 7 Name: "json_payload" TypeId: 4610 } Columns { Id: 8 Name: "ingested_at" TypeId: 50 } Columns { Id: 9 Name: "saved_at" TypeId: 50 } Columns { Id: 10 Name: "request_id" TypeId: 4608 } KeyColumnNames: "k0" KeyColumnNames: "resource_type" KeyColumnNames: "resource_id" KeyColumnNames: "uid" Indexes { Id: 1004 Name: "MAX::INDEX::level" StorageId: "__LOCAL_METADATA" ClassName: "MAX" MaxIndex { ColumnId: 5 } } Indexes { Id: 1007 Name: "MAX::INDEX::ingested_at" StorageId: "__LOCAL_METADATA" ClassName: "MAX" MaxIndex { ColumnId: 8 } } Indexes { Id: 1008 Name: "MAX::INDEX::saved_at" StorageId: "__LOCAL_METADATA" ClassName: "MAX" MaxIndex { ColumnId: 9 } } } } TtlSettings { Version: 1 } } } } 2025-05-29T15:25:31.151893Z node 1 :TX_COLUMNSHARD_TX ERROR: log.cpp:784: tablet_id=9437184;self_id=[1:138:2169];ev=NKikimr::TEvColumnShard::TEvProposeTransaction;tablet_id=9437184;tx_id=121;this=17802604576576;method=TTxController::StartProposeOnExecute;tx_info=121:TX_KIND_SCHEMA;min=1748532331026;max=18446744073709551615;plan=0;src=[1:167:2190];cookie=021:0;;fline=tx_controller.cpp:350;error=problem on start;message=Invalid schema: Column errors: key column k0 has unsupported type NKikimrSchemeOp.TOlapColumnDescription; 2025-05-29T15:25:31.163542Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=9437184;request_tx=121:TX_KIND_SCHEMA;min=1748532331026;max=18446744073709551615;plan=0;src=[1:167:2190];cookie=021:0;;this=17802604576576;op_tx=121:TX_KIND_SCHEMA;min=1748532331026;max=18446744073709551615;plan=0;src=[1:167:2190];cookie=021:0;;fline=propose_tx.cpp:11;event=scheme_shard_tablet_not_initialized;source=[1:167:2190]; 2025-05-29T15:25:31.163572Z node 1 :TX_COLUMNSHARD ERROR: log.cpp:784: tablet_id=9437184;request_tx=121:TX_KIND_SCHEMA;min=1748532331026;max=18446744073709551615;plan=0;src=[1:167:2190];cookie=021:0;;this=17802604576576;op_tx=121:TX_KIND_SCHEMA;min=1748532331026;max=18446744073709551615;plan=0;src=[1:167:2190];cookie=021:0;;fline=propose_tx.cpp:23;message=Invalid schema: Column errors: key column k0 has unsupported type NKikimrSchemeOp.TOlapColumnDescription;tablet_id=9437184;tx_id=121; CreateTable: { SeqNo { Generation: 22 } EnsureTables { Tables { PathId: 23 SchemaPreset { Id: 1 Name: "default" Schema { Columns { Id: 1 Name: "k0" TypeId: 4612 } Columns { Id: 2 Name: "resource_type" TypeId: 4608 } Columns { Id: 3 Name: "resource_id" TypeId: 4608 DataAccessorConstructor { ClassName: "SPARSED" } } Columns { Id: 4 Name: "uid" TypeId: 4608 StorageId: "__MEMORY" } Columns { Id: 5 Name: "level" TypeId: 1 } Columns { Id: 6 Name: "message" TypeId: 4608 StorageId: "__MEMORY" } Columns { Id: 7 Name: "json_payload" TypeId: 4610 } Columns { Id: 8 Name: "ingested_at" TypeId: 50 } Columns { Id: 9 Name: "saved_at" TypeId: 50 } Columns { Id: 10 Name: "request_id" TypeId: 4608 } KeyColumnNames: "k0" KeyColumnNames: "resource_type" KeyColumnNames: "resource_id" KeyColumnNames: "uid" Indexes { Id: 1004 Name: "MAX::INDEX::level" StorageId: "__LOCAL_METADATA" ClassName: "MAX" MaxIndex { ColumnId: 5 } } Indexes { Id: 1007 Name: "MAX::INDEX::ingested_at" StorageId: "__LOCAL_METADATA" ClassName: "MAX" MaxIndex { ColumnId: 8 } } Indexes { Id: 1008 Name: "MAX::INDEX::saved_at" StorageId: "__LOCAL_METADATA" ClassName: "MAX" MaxIndex { ColumnId: 9 } } } } TtlSettings { Version: 1 } } } } 2025-05-29T15:25:31.163956Z node 1 :TX_COLUMNSHARD_TX ERROR: log.cpp:784: tablet_id=9437184;self_id=[1:138:2169];ev=NKikimr::TEvColumnShard::TEvProposeTransaction;tablet_id=9437184;tx_id=122;this=17802604576576;method=TTxController::StartProposeOnExecute;tx_info=122:TX_KIND_SCHEMA;min=1748532331028;max=18446744073709551615;plan=0;src=[1:167:2190];cookie=022:0;;fline=tx_controller.cpp:350;error=problem on start;message=Invalid schema: Column errors: key column k0 has unsupported type NKikimrSchemeOp.TOlapColumnDescription; 2025-05-29T15:25:31.175557Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=9437184;request_tx=122:TX_KIND_SCHEMA;min=1748532331028;max=18446744073709551615;plan=0;src=[1:167:2190];cookie=022:0;;this=17802604576576;op_tx=122:TX_KIND_SCHEMA;min=1748532331028;max=18446744073709551615;plan=0;src=[1:167:2190];cookie=022:0;;fline=propose_tx.cpp:11;event=scheme_shard_tablet_not_initialized;source=[1:167:2190]; 2025-05-29T15:25:31.175586Z node 1 :TX_COLUMNSHARD ERROR: log.cpp:784: tablet_id=9437184;request_tx=122:TX_KIND_SCHEMA;min=1748532331028;max=18446744073709551615;plan=0;src=[1:167:2190];cookie=022:0;;this=17802604576576;op_tx=122:TX_KIND_SCHEMA;min=1748532331028;max=18446744073709551615;plan=0;src=[1:167:2190];cookie=022:0;;fline=propose_tx.cpp:23;message=Invalid schema: Column errors: key column k0 has unsupported type NKikimrSchemeOp.TOlapColumnDescription;tablet_id=9437184;tx_id=122; >> RetryPolicy::TWriteSession_RetryOnTargetCluster [FAIL] >> RetryPolicy::TWriteSession_SwitchBackToLocalCluster ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/public/sdk/cpp/src/client/persqueue_public/ut/unittest >> BasicUsage::BrokenCredentialsProvider [FAIL] Test command err: 2025-05-29T15:25:16.986829Z :MaxByteSizeEqualZero INFO: Random seed for debugging is 1748532316986821 2025-05-29T15:25:17.264288Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509889121028588730:2212];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:25:17.264380Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-05-29T15:25:17.327874Z node 2 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7509889118537118570:2145];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:25:17.330863Z node 2 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-05-29T15:25:17.427213Z node 1 :PQ_READ_PROXY DEBUG: caching_service.cpp:44: Direct read cache: : Created 2025-05-29T15:25:17.427239Z node 2 :PQ_READ_PROXY DEBUG: caching_service.cpp:44: Direct read cache: : Created test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/00124d/r3tmp/tmpuyAX2M/pdisk_1.dat 2025-05-29T15:25:17.579906Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:25:17.579933Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:25:17.583092Z node 1 :HIVE WARN: hive_impl.cpp:771: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-05-29T15:25:17.583438Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 29438, node 1 2025-05-29T15:25:17.630997Z node 1 :GRPC_SERVER WARN: grpc_request_proxy.cpp:529: SchemeBoardDelete /Root Strong=0 2025-05-29T15:25:17.631006Z node 1 :GRPC_SERVER WARN: grpc_request_proxy.cpp:529: SchemeBoardDelete /Root Strong=0 2025-05-29T15:25:17.635193Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:25:17.665079Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:25:17.665112Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:25:17.680818Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-29T15:25:17.687362Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/ciyv/00124d/r3tmp/yandexSLtHWz.tmp 2025-05-29T15:25:17.687371Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: /home/runner/.ya/build/build_root/ciyv/00124d/r3tmp/yandexSLtHWz.tmp 2025-05-29T15:25:17.687429Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:202: successfully initialized from file: /home/runner/.ya/build/build_root/ciyv/00124d/r3tmp/yandexSLtHWz.tmp 2025-05-29T15:25:17.687466Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-05-29T15:25:17.706847Z INFO: TTestServer started on Port 16119 GrpcPort 29438 TClient is connected to server localhost:16119 PQClient connected to localhost:29438 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-29T15:25:17.784517Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:25:17.795732Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 waiting... waiting... waiting... 2025-05-29T15:25:18.907536Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7509889122832086113:2310], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:25:18.907555Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7509889122832086081:2307], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:25:18.907596Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:25:18.912168Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715657:3, at schemeshard: 72057594046644480 2025-05-29T15:25:18.928750Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-05-29T15:25:18.928895Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7509889122832086118:2311], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715657 completed, doublechecking } 2025-05-29T15:25:19.016117Z node 2 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [2:7509889127127053442:2128] txid# 281474976715658, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 8], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-29T15:25:19.220692Z node 2 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [2:7509889127127053449:2315], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-05-29T15:25:19.221210Z node 2 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=2&id=ZTIzZjY4NDktYTk3NDExNmYtNTE1Yzc0YmQtZDIzMjM2NjQ=, ActorId: [2:7509889122832086078:2305], ActorState: ExecuteState, TraceId: 01jweabmnk8raftp60epr4vbvp, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-05-29T15:25:19.223147Z node 2 :PERSQUEUE_CLUSTER_TRACKER ERROR: cluster_tracker.cpp:167: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-05-29T15:25:19.455730Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [1:7509889129618524210:2344], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-05-29T15:25:19.456463Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=1&id=NDUyMTFhNzAtZjllZjVlZjctODViMWJmYmMtYjk3OTM0MTQ=, ActorId: [1:7509889129618524164:2335], ActorState: ExecuteState, TraceId: 01jweabn6m7yzzj8bph8pzd06s, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-05-29T15:25:19.456590Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: cluster_tracker.cpp:167: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-05-29T15:25:19.459554Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-05-29T15:25:19.537215Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-05-29T15:25:19.632242Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost:29438", true, true, 1000); 2025-05-29T15:25:19.724570Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [1:7509889129618524606:2378], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:25:19.725462Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=1&id=YmVlYTEyNDctYjRjMmVhNDAtY2M5NzNmMTQtMWI4ZGY4YTE=, ActorId: [1:7509889129618524603:2376], ActorState: ExecuteState, TraceId: 01jweabnenbfdxyvydg2nycxma, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: assertion failed at ydb ... ting -> Connected TServer::EnableGrpc on GrpcPort 19936, node 5 2025-05-29T15:25:24.127234Z node 5 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/ciyv/00124d/r3tmp/yandexDn9kw5.tmp 2025-05-29T15:25:24.127248Z node 5 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: /home/runner/.ya/build/build_root/ciyv/00124d/r3tmp/yandexDn9kw5.tmp 2025-05-29T15:25:24.127317Z node 5 :NET_CLASSIFIER WARN: net_classifier.cpp:202: successfully initialized from file: /home/runner/.ya/build/build_root/ciyv/00124d/r3tmp/yandexDn9kw5.tmp 2025-05-29T15:25:24.127378Z node 5 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-05-29T15:25:24.142615Z INFO: TTestServer started on Port 20593 GrpcPort 19936 TClient is connected to server localhost:20593 PQClient connected to localhost:19936 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-29T15:25:24.219595Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:25:24.239349Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:25:24.251064Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976715658, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:25:24.262885Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976715659, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:25:25.108768Z node 6 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7509889151999975658:2307], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:25:25.108803Z node 6 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:25:25.108966Z node 6 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7509889151999975693:2310], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:25:25.110471Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976720657:3, at schemeshard: 72057594046644480 2025-05-29T15:25:25.134835Z node 6 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [6:7509889151999975695:2311], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976720657 completed, doublechecking } 2025-05-29T15:25:25.214949Z node 6 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [6:7509889151999975723:2131] txid# 281474976720658, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 8], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-29T15:25:25.220855Z node 6 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [6:7509889151999975730:2315], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-05-29T15:25:25.221314Z node 6 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=6&id=ODNmNWI4ZTItYjMwM2M5YjAtNzBhMGNiYmQtODUyOTUxZWI=, ActorId: [6:7509889151999975654:2304], ActorState: ExecuteState, TraceId: 01jweabtqk96c19gvtp2q8p070, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-05-29T15:25:25.221440Z node 6 :PERSQUEUE_CLUSTER_TRACKER ERROR: cluster_tracker.cpp:167: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-05-29T15:25:25.296407Z node 5 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [5:7509889153847374608:2341], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-05-29T15:25:25.297010Z node 5 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=5&id=OGNlY2JkMzQtZjJhOGIyNjItNzIwZmNkZjUtNzkzMWY0Mzg=, ActorId: [5:7509889153847374557:2332], ActorState: ExecuteState, TraceId: 01jweabtx89j4seezzwx5vn8a0, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-05-29T15:25:25.297138Z node 5 :PERSQUEUE_CLUSTER_TRACKER ERROR: cluster_tracker.cpp:167: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-05-29T15:25:25.299857Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 2025-05-29T15:25:25.411499Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-05-29T15:25:25.454885Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost:19936", true, true, 1000); 2025-05-29T15:25:25.595376Z node 5 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [5:7509889153847374954:2373], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:25:25.596218Z node 5 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=5&id=NDM5MDQ1ZmItNzc1NWQ1NzItMjMxNjJmZGUtNGNhNGQ0MzI=, ActorId: [5:7509889153847374951:2371], ActorState: ExecuteState, TraceId: 01jweabv655k15cphe2wmzpsg1, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: assertion failed at ydb/core/testlib/test_pq_client.h:537, TMaybe NKikimr::NPersQueueTests::TFlatMsgBusPQClient::RunYqlDataQueryWithParams(TString, const NYdb::TParams &): (result.IsSuccess())
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 TBackTrace::Capture()+28 (0x13A6D2CC) NUnitTest::NPrivate::RaiseError(char const*, TBasicString> const&, bool)+137 (0x13C25189) NKikimr::NPersQueueTests::TFlatMsgBusPQClient::RunYqlDataQueryWithParams(TBasicString>, NYdb::Dev::TParams const&)+932 (0x138B9A44) NKikimr::NPersQueueTests::TFlatMsgBusPQClient::RunYqlDataQuery(TBasicString>)+88 (0x138B89A8) NKikimr::NPersQueueTests::TFlatMsgBusPQClient::InitDCs(THashMap>, NKikimr::NPersQueueTests::TPQTestClusterInfo, THash>>, TEqualTo>>, std::__y1::allocator>>>, TBasicString> const&)+1170 (0x138B7BF2) NPersQueue::SDKTestSetup::Start(bool, bool)+1450 (0x138AD5DA) NPersQueue::SDKTestSetup::SDKTestSetup(TBasicString> const&, bool, TVector> const&, NActors::NLog::EPriority, unsigned int, unsigned long)+675 (0x138AB0E3) void std::__y1::allocator::construct[abi:fe200000](NYdb::NPersQueue::NTests::TPersQueueYdbSdkTestSetup*, char const*&)+72 (0x139492F8) NYdb::NPersQueue::NTests::NTestSuiteBasicUsage::TTestCaseBrokenCredentialsProvider::Execute_(NUnitTest::TTestContext&)+155 (0x13938EEB) NYdb::NPersQueue::NTests::NTestSuiteBasicUsage::TCurrentTest::Execute()::'lambda'()::operator()() const+71 (0x1393C577) NUnitTest::TTestBase::Run(std::__y1::function, TBasicString> const&, char const*, bool)+126 (0x13C2703E) NYdb::NPersQueue::NTests::NTestSuiteBasicUsage::TCurrentTest::Execute()+428 (0x1393BF3C) NUnitTest::TTestFactory::Execute()+803 (0x13C277B3) NUnitTest::RunMain(int, char**)+3021 (0x13C390FD) ??+0 (0x7FE5FD450D90) __libc_start_main+128 (0x7FE5FD450E40) _start+41 (0x129BB029) |66.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_external_data_source_reboots/unittest >> TColumnShardTestSchema::Drop [GOOD] >> TPersQueueNewSchemeCacheTest::TestReadAtTimestamp_3 [FAIL] >> TPersQueueNewSchemeCacheTest::TestReadAtTimestamp_10 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/columnshard/ut_schema/unittest >> TColumnShardTestSchema::Drop [GOOD] Test command err: 2025-05-29T15:25:30.594067Z node 1 :TX_COLUMNSHARD INFO: log.cpp:784: tablet_id=9437184;self_id=[1:139:2170];fline=columnshard.cpp:102;event=initialize_shard;step=OnActivateExecutor; 2025-05-29T15:25:30.598321Z node 1 :TX_COLUMNSHARD INFO: log.cpp:784: tablet_id=9437184;self_id=[1:139:2170];fline=columnshard.cpp:120;event=initialize_shard;step=initialize_tiring_finished; 2025-05-29T15:25:30.598417Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Execute at tablet 9437184 2025-05-29T15:25:30.607442Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=9437184;self_id=[1:139:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-05-29T15:25:30.607520Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=9437184;self_id=[1:139:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-05-29T15:25:30.607585Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=9437184;self_id=[1:139:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-05-29T15:25:30.607606Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=9437184;self_id=[1:139:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-05-29T15:25:30.607627Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=9437184;self_id=[1:139:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-05-29T15:25:30.607649Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=9437184;self_id=[1:139:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-05-29T15:25:30.607667Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=9437184;self_id=[1:139:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-05-29T15:25:30.607696Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=9437184;self_id=[1:139:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-05-29T15:25:30.607718Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=9437184;self_id=[1:139:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-05-29T15:25:30.607738Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=9437184;self_id=[1:139:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-05-29T15:25:30.607758Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=9437184;self_id=[1:139:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2025-05-29T15:25:30.607778Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=9437184;self_id=[1:139:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2025-05-29T15:25:30.623813Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Complete at tablet 9437184 2025-05-29T15:25:30.623885Z node 1 :TX_COLUMNSHARD INFO: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:137;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2025-05-29T15:25:30.623898Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-05-29T15:25:30.623940Z node 1 :TX_COLUMNSHARD INFO: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-05-29T15:25:30.623980Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-05-29T15:25:30.623996Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-05-29T15:25:30.624003Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-05-29T15:25:30.624016Z node 1 :TX_COLUMNSHARD INFO: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2025-05-29T15:25:30.624026Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-05-29T15:25:30.624035Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-05-29T15:25:30.624041Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-05-29T15:25:30.624063Z node 1 :TX_COLUMNSHARD INFO: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-05-29T15:25:30.624072Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-05-29T15:25:30.624080Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-05-29T15:25:30.624085Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-05-29T15:25:30.624100Z node 1 :TX_COLUMNSHARD INFO: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-05-29T15:25:30.624108Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-05-29T15:25:30.624117Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-05-29T15:25:30.624122Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=8;type=CleanInsertionDedup; 2025-05-29T15:25:30.624142Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-05-29T15:25:30.624151Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-05-29T15:25:30.624157Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-05-29T15:25:30.624168Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-05-29T15:25:30.624177Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-05-29T15:25:30.624184Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-05-29T15:25:30.624215Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-05-29T15:25:30.624224Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-05-29T15:25:30.624229Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2025-05-29T15:25:30.624255Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-05-29T15:25:30.624264Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-05-29T15:25:30.624270Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2025-05-29T15:25:30.624287Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-05-29T15:25:30.624295Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanDeprecatedSnapshot;id=CleanDeprecatedSnapshot; 2025-05-29T15:25:30.624300Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=17;type=CleanDeprecatedSnapshot; 2025-05-29T15:25:30.624311Z node 1 :TX_COLUMNSHARD CRIT: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_deprecated_snapshot.cpp:30;tasks_for_rewrite=0; 2025-05-29T15:25:30.624320Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanDeprecatedSnapshot;id=17; 2025-05-29T15:25:30.624328Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV0ChunksMeta;id=RestoreV0ChunksMeta; 2025-05-29T15:25:30.624333Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=18;type=RestoreV0ChunksMeta; 2025-05-29T15:25:30.624419Z node 1 :TX_COLUMNSHARD INFO: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=14; 2025-05-29T15:25:30.624432Z node 1 :TX_COLUMNSHARD INFO: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=7; 2025-05-29T15:25:30.624441Z node 1 :TX_COLUMNSHARD INFO: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute ... es;periodic=0; 2025-05-29T15:25:32.819541Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:784: tablet_id=9437184;task_id=28f1b696-3ca111f0-94cac554-e32a37b0;tablet_id=9437184;fline=columnshard_impl.cpp:784;event=start_indexation_tasks;insert_overload_size=0; 2025-05-29T15:25:32.819555Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:784: tablet_id=9437184;task_id=28f1b696-3ca111f0-94cac554-e32a37b0;tablet_id=9437184;fline=column_engine_logs.cpp:246;event=StartCleanup;portions_count=1; 2025-05-29T15:25:32.819571Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:784: tablet_id=9437184;task_id=28f1b696-3ca111f0-94cac554-e32a37b0;tablet_id=9437184;fline=column_engine_logs.cpp:288;event=StartCleanupStop;snapshot=plan_step=1748532031710;tx_id=18446744073709551615;;current_snapshot_ts=1748532331707; 2025-05-29T15:25:32.819581Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:784: tablet_id=9437184;task_id=28f1b696-3ca111f0-94cac554-e32a37b0;tablet_id=9437184;fline=column_engine_logs.cpp:321;event=StartCleanup;portions_count=1;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2025-05-29T15:25:32.819595Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:784: tablet_id=9437184;task_id=28f1b696-3ca111f0-94cac554-e32a37b0;tablet_id=9437184;fline=columnshard_impl.cpp:1060;background=cleanup;skip_reason=no_changes; 2025-05-29T15:25:32.819601Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:784: tablet_id=9437184;task_id=28f1b696-3ca111f0-94cac554-e32a37b0;tablet_id=9437184;fline=columnshard_impl.cpp:1092;background=cleanup;skip_reason=no_changes; 2025-05-29T15:25:32.819621Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:784: tablet_id=9437184;task_id=28f1b696-3ca111f0-94cac554-e32a37b0;tablet_id=9437184;queue=ttl;external_count=0;fline=granule.cpp:164;event=skip_actualization;waiting=0.882500s; 2025-05-29T15:25:32.819635Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:784: tablet_id=9437184;task_id=28f1b696-3ca111f0-94cac554-e32a37b0;tablet_id=9437184;fline=columnshard_impl.cpp:1001;background=ttl;skip_reason=no_changes; 2025-05-29T15:25:32.819673Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: BlobManager at tablet 9437184 Save Batch GenStep: 2:3 Blob count: 712 2025-05-29T15:25:32.819848Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:784: fline=task.cpp:21;event=free_resources;task_id=1;external_task_id=28f1b696-3ca111f0-94cac554-e32a37b0;mem=15169165;cpu=0; 2025-05-29T15:25:32.820054Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:784: tablet_id=9437184;tx_state=TTxProgressTx::Complete;fline=columnshard_impl.cpp:784;event=start_indexation_tasks;insert_overload_size=0; 2025-05-29T15:25:32.820071Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxPlanStep[14] complete at tablet 9437184 2025-05-29T15:25:32.820099Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:784: tablet_id=9437184;tx_state=TTxProgressTx::Complete;fline=columnshard_impl.cpp:784;event=start_indexation_tasks;insert_overload_size=0; 2025-05-29T15:25:32.820314Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: EvScan txId: 18446744073709551615 scanId: 0 version: {1748532331710:max} readable: {1748532331710:max} at tablet 9437184 2025-05-29T15:25:32.820358Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TTxScan prepare txId: 18446744073709551615 scanId: 0 at tablet 9437184 2025-05-29T15:25:32.820852Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:784: tablet_id=9437184;self_id=[1:139:2170];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1748532331710:max};tablet=9437184;timeout=0.000000s;cpu_limits=Disabled;;fline=program.cpp:33;event=parse_program;program=Command { Projection { Columns { Id: 1 } } } ; 2025-05-29T15:25:32.820865Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:784: tablet_id=9437184;self_id=[1:139:2170];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1748532331710:max};tablet=9437184;timeout=0.000000s;cpu_limits=Disabled;;fline=program.cpp:102;parse_proto_program=Command { Projection { Columns { Id: 1 } } } ; 2025-05-29T15:25:32.821023Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:784: tablet_id=9437184;self_id=[1:139:2170];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1748532331710:max};tablet=9437184;timeout=0.000000s;cpu_limits=Disabled;;fline=program.cpp:51;event=program_parsed;result={"edges":[{"owner_id":0,"inputs":[{"from":2}]},{"owner_id":1,"inputs":[{"from":3}]},{"owner_id":2,"inputs":[{"from":1}]},{"owner_id":3,"inputs":[]}],"nodes":{"1":{"p":{"i":"0","p":{"data":[{"name":"timestamp","id":1}]},"o":"1","t":"FetchOriginalData"},"w":2,"id":1},"3":{"p":{"p":{"data":[{"name":"timestamp","id":1}]},"o":"0","t":"ReserveMemory"},"w":0,"id":3},"2":{"p":{"i":"1","p":{"address":{"name":"timestamp","id":1}},"o":"1","t":"AssembleOriginalData"},"w":7,"id":2},"0":{"p":{"i":"1","t":"Projection"},"w":7,"id":0}}}; 2025-05-29T15:25:32.821043Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:784: tablet_id=9437184;self_id=[1:139:2170];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1748532331710:max};tablet=9437184;timeout=0.000000s;cpu_limits=Disabled;;fline=read_metadata.h:141;filter_limit_not_detected= range{ from {+Inf} to {-Inf}}; 2025-05-29T15:25:32.821239Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:784: tablet_id=9437184;self_id=[1:139:2170];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1748532331710:max};tablet=9437184;timeout=0.000000s;cpu_limits=Disabled;;fline=tx_scan.cpp:169;event=TTxScan started;actor_id=[1:713:2726];trace_detailed=; 2025-05-29T15:25:32.821418Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:784: fline=context.cpp:84;ff_first=(column_ids=1;column_names=timestamp;);; 2025-05-29T15:25:32.821467Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:784: fline=context.cpp:99;columns_context_info=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;; 2025-05-29T15:25:32.821541Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:784: SelfId=[1:713:2726];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:105;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2025-05-29T15:25:32.821556Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:784: SelfId=[1:713:2726];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:188;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-05-29T15:25:32.821567Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:784: SelfId=[1:713:2726];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:193;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-05-29T15:25:32.821585Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: actor.cpp:410: Scan [1:713:2726] finished for tablet 9437184 2025-05-29T15:25:32.821644Z node 1 :TX_COLUMNSHARD_SCAN INFO: log.cpp:784: SelfId=[1:713:2726];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:416;event=scan_finish;compute_actor_id=[1:707:2720];stats={"p":[{"events":["f_bootstrap","l_bootstrap","f_ack","l_ack","f_processing","l_processing","f_ProduceResults","l_ProduceResults","f_Finish","l_Finish"],"t":0}],"full":{"a":1748532332821229,"name":"_full_task","f":1748532332821229,"d_finished":0,"c":0,"l":1748532332821595,"d":366},"events":[{"name":"bootstrap","f":1748532332821279,"d_finished":218,"c":1,"l":1748532332821497,"d":218},{"a":1748532332821535,"name":"ack","f":1748532332821535,"d_finished":0,"c":0,"l":1748532332821595,"d":60},{"a":1748532332821530,"name":"processing","f":1748532332821530,"d_finished":0,"c":0,"l":1748532332821595,"d":65},{"name":"ProduceResults","f":1748532332821491,"d_finished":29,"c":2,"l":1748532332821570,"d":29},{"a":1748532332821571,"name":"Finish","f":1748532332821571,"d_finished":0,"c":0,"l":1748532332821595,"d":24}],"id":"9437184::1"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-05-29T15:25:32.821658Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:784: SelfId=[1:713:2726];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:366;event=send_data;compute_actor_id=[1:707:2720];bytes=0;rows=0;faults=0;finished=1;fault=0;schema=; 2025-05-29T15:25:32.821696Z node 1 :TX_COLUMNSHARD_SCAN INFO: log.cpp:784: SelfId=[1:713:2726];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:371;event=scan_finished;compute_actor_id=[1:707:2720];packs_sum=0;bytes=0;bytes_sum=0;rows=0;rows_sum=0;faults=0;finished=1;fault=0;stats={"p":[{"events":["f_bootstrap","l_bootstrap","f_ack","l_ack","f_processing","l_processing","f_ProduceResults","l_ProduceResults","f_Finish","l_Finish"],"t":0}],"full":{"a":1748532332821229,"name":"_full_task","f":1748532332821229,"d_finished":0,"c":0,"l":1748532332821664,"d":435},"events":[{"name":"bootstrap","f":1748532332821279,"d_finished":218,"c":1,"l":1748532332821497,"d":218},{"a":1748532332821535,"name":"ack","f":1748532332821535,"d_finished":0,"c":0,"l":1748532332821664,"d":129},{"a":1748532332821530,"name":"processing","f":1748532332821530,"d_finished":0,"c":0,"l":1748532332821664,"d":134},{"name":"ProduceResults","f":1748532332821491,"d_finished":29,"c":2,"l":1748532332821570,"d":29},{"a":1748532332821571,"name":"Finish","f":1748532332821571,"d_finished":0,"c":0,"l":1748532332821664,"d":93}],"id":"9437184::1"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-05-29T15:25:32.821713Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:784: SelfId=[1:713:2726];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=stats.cpp:8;event=statistic;begin=2025-05-29T15:25:32.821037Z;index_granules=0;index_portions=0;index_batches=0;committed_batches=0;schema_columns=1;filter_columns=0;additional_columns=0;compacted_portions_bytes=0;inserted_portions_bytes=0;committed_portions_bytes=0;data_filter_bytes=0;data_additional_bytes=0;delta_bytes=0;selected_rows=0; 2025-05-29T15:25:32.821719Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:784: SelfId=[1:713:2726];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=read_context.h:190;event=scan_aborted;reason=unexpected on destructor; 2025-05-29T15:25:32.821731Z node 1 :TX_COLUMNSHARD_SCAN INFO: log.cpp:784: SelfId=[1:713:2726];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=context.h:81;fetching=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;; >> TSubDomainTest::CheckAccessCopyTable [GOOD] >> TSubDomainTest::ConsistentCopyTable >> TOlapReboots::CreateDropTable [GOOD] >> TOlapReboots::CreateDropStore >> TestYmqHttpProxy::TestListDeadLetterSourceQueues |66.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_external_data_source_reboots/unittest >> RetryPolicy::TWriteSession_SwitchBackToLocalCluster [FAIL] >> RetryPolicy::TWriteSession_SeqNoShift >> Compression::WriteWithMixedCodecs [FAIL] >> PersQueueSdkReadSessionTest::ReadSessionWithAbort >> TExternalDataSourceTestReboots::DropExternalDataSourceWithReboots |66.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_external_data_source_reboots/unittest >> TOlapReboots::CreateMultipleTables [GOOD] >> TConsoleTests::TestNotifyOperationCompletion [GOOD] >> TConsoleTests::TestNotifyOperationCompletionExtSubdomain >> TestKinesisHttpProxy::ListShardsToken |66.9%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/services/cms/ut/ydb-services-cms-ut |66.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/services/cms/ut/ydb-services-cms-ut |66.9%| [LD] {RESULT} $(B)/ydb/services/cms/ut/ydb-services-cms-ut >> TPQTest::TestTimeRetention [GOOD] >> TPQTest::TestStorageRetention >> TPersQueueNewSchemeCacheTest::TestReadAtTimestamp_10 [FAIL] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/public/sdk/cpp/src/client/persqueue_public/ut/unittest >> ReadSessionImplTest::DataReceivedCallback [GOOD] Test command err: 2025-05-29T15:25:22.845268Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-05-29T15:25:22.845276Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-05-29T15:25:22.845281Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2025-05-29T15:25:22.858812Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2025-05-29T15:25:22.870825Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2025-05-29T15:25:22.870927Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-05-29T15:25:22.874842Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (NULL) 2025-05-29T15:25:22.875016Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-05-29T15:25:22.875059Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-1) 2025-05-29T15:25:22.875083Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2025-05-29T15:25:22.875092Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 1, size 3 bytes 2025-05-29T15:25:22.875342Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-05-29T15:25:22.875346Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-05-29T15:25:22.875350Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2025-05-29T15:25:22.883366Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2025-05-29T15:25:22.883814Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2025-05-29T15:25:22.883850Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-05-29T15:25:22.883948Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (NULL) 2025-05-29T15:25:22.884041Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-05-29T15:25:22.884126Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-1) 2025-05-29T15:25:22.884147Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2025-05-29T15:25:22.884156Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 1, size 3 bytes 2025-05-29T15:25:22.884472Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-05-29T15:25:22.884477Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-05-29T15:25:22.884481Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2025-05-29T15:25:22.885103Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2025-05-29T15:25:22.885408Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2025-05-29T15:25:22.885445Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-05-29T15:25:22.885575Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (NULL) 2025-05-29T15:25:22.885840Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-05-29T15:25:22.887580Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-1) 2025-05-29T15:25:22.887621Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2025-05-29T15:25:22.887634Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 1, size 3 bytes 2025-05-29T15:25:22.887999Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-05-29T15:25:22.888003Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-05-29T15:25:22.888008Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2025-05-29T15:25:22.902803Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2025-05-29T15:25:22.914825Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2025-05-29T15:25:22.914926Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-05-29T15:25:22.920881Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (NULL) 2025-05-29T15:25:22.921853Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-05-29T15:25:22.921972Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-1) 2025-05-29T15:25:22.924559Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2025-05-29T15:25:22.924581Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 1, size 3 bytes 2025-05-29T15:25:22.938991Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-05-29T15:25:22.938999Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-05-29T15:25:22.939004Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2025-05-29T15:25:22.946801Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2025-05-29T15:25:22.958458Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2025-05-29T15:25:22.958551Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-05-29T15:25:22.962823Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (NULL) 2025-05-29T15:25:22.962989Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-05-29T15:25:22.963428Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-1) 2025-05-29T15:25:22.966766Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2025-05-29T15:25:22.966785Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 1, size 0 bytes 2025-05-29T15:25:22.970410Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-05-29T15:25:22.970424Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-05-29T15:25:22.970429Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2025-05-29T15:25:22.998809Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2025-05-29T15:25:23.006816Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2025-05-29T15:25:23.006903Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-05-29T15:25:23.010855Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (NULL) 2025-05-29T15:25:23.011064Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-05-29T15:25:23.014837Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-1) 2025-05-29T15:25:23.015116Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2025-05-29T15:25:23.015140Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 1, size 0 bytes 2025-05-29T15:25:23.016125Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-05-29T15:25:23.016130Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-05-29T15:25:23.016134Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2025-05-29T15:25:23.018922Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2025-05-29T15:25:23.019187Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2025-05-29T15:25:23.019211Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-05-29T15:25:23.022834Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (NULL) 2025-05-29T15:25:23.023113Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-05-29T15:25:23.023183Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-1) 2025-05-29T15:25:23.023206Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2025-05-29T15:25:23.023215Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 1, size 0 bytes 2025-05-29T15:25:23.023557Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-05-29T15:25:23.023561Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-05-29T15:25:23.023568Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2025-05-29T15:25:23.023691Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2025-05-29T15:25:23.030810Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2025-05-29T15:25:23.030884Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-05-29T15:25:23.032103Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (NULL) 2025-05-29T15:25:23.032427Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-05-29T15:25:23.032488Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-1) 2025-05-29T15:25:23.032507Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2025-05-29T15:25:23.032516Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 1, size 3 bytes 2025-05-29T15:25:23.112183Z :ReadSession INFO: Random seed for debugging is 1748532323112173 2025-05-29T15:25:23.402120Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509889146834634572:2212];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:25:23.402242Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-05-29T15:25:23.421354Z node 2 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7509889145583434963:2094];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:25:23.421652Z node 2 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;p ... atus: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-29T15:25:23.920148Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... waiting... waiting... waiting... 2025-05-29T15:25:24.349673Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7509889149878402496:2306], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:25:24.349734Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:25:24.349900Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7509889149878402546:2309], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:25:24.351736Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715657:3, at schemeshard: 72057594046644480 2025-05-29T15:25:24.387978Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7509889149878402548:2310], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715657 completed, doublechecking } 2025-05-29T15:25:24.474815Z node 2 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [2:7509889149878402585:2134] txid# 281474976715658, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 8], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-29T15:25:24.484470Z node 2 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [2:7509889149878402592:2315], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-05-29T15:25:24.484925Z node 2 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=2&id=NzA0Y2FmY2QtOGU4Y2MwNjEtMzQwYTMwYWEtZDI1ZDIyMDk=, ActorId: [2:7509889149878402493:2304], ActorState: ExecuteState, TraceId: 01jweabszvfqf3h4bq6c535nw3, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-05-29T15:25:24.485000Z node 2 :PERSQUEUE_CLUSTER_TRACKER ERROR: cluster_tracker.cpp:167: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-05-29T15:25:24.483915Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [1:7509889151129602773:2341], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-05-29T15:25:24.484457Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=1&id=YmZmZTJlZjYtMzdiYzQ4YjYtNGYzMTE1MjAtYjZhOWQxOWI=, ActorId: [1:7509889151129602723:2333], ActorState: ExecuteState, TraceId: 01jweabt2a4pze75k8ksq06myx, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-05-29T15:25:24.484909Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: cluster_tracker.cpp:167: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-05-29T15:25:24.490547Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-05-29T15:25:24.591270Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-05-29T15:25:24.703049Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost:18543", true, true, 1000); 2025-05-29T15:25:24.774390Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [1:7509889151129603169:2376], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:25:24.775358Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=1&id=YjRhYzlhOTItYTdmZDM3MWMtYTg1NjhlOGItMTY1ODI1ZDk=, ActorId: [1:7509889151129603166:2374], ActorState: ExecuteState, TraceId: 01jweabtcedzry7yc1287kqmty, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: assertion failed at ydb/core/testlib/test_pq_client.h:537, TMaybe NKikimr::NPersQueueTests::TFlatMsgBusPQClient::RunYqlDataQueryWithParams(TString, const NYdb::TParams &): (result.IsSuccess())
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 TBackTrace::Capture()+28 (0x13A6D2CC) NUnitTest::NPrivate::RaiseError(char const*, TBasicString> const&, bool)+137 (0x13C25189) NKikimr::NPersQueueTests::TFlatMsgBusPQClient::RunYqlDataQueryWithParams(TBasicString>, NYdb::Dev::TParams const&)+932 (0x138B9A44) NKikimr::NPersQueueTests::TFlatMsgBusPQClient::RunYqlDataQuery(TBasicString>)+88 (0x138B89A8) NKikimr::NPersQueueTests::TFlatMsgBusPQClient::InitDCs(THashMap>, NKikimr::NPersQueueTests::TPQTestClusterInfo, THash>>, TEqualTo>>, std::__y1::allocator>>>, TBasicString> const&)+1170 (0x138B7BF2) NPersQueue::SDKTestSetup::Start(bool, bool)+1450 (0x138AD5DA) NPersQueue::SDKTestSetup::SDKTestSetup(TBasicString> const&, bool, TVector> const&, NActors::NLog::EPriority, unsigned int, unsigned long)+675 (0x138AB0E3) NTestSuiteReadSessionImplTest::TTestCaseDataReceivedCallbackReal::Execute_(NUnitTest::TTestContext&)+124 (0x13895BAC) NTestSuiteReadSessionImplTest::TCurrentTest::Execute()::'lambda'()::operator()() const+71 (0x138C5C37) NUnitTest::TTestBase::Run(std::__y1::function, TBasicString> const&, char const*, bool)+126 (0x13C2703E) NTestSuiteReadSessionImplTest::TCurrentTest::Execute()+426 (0x138C55FA) NUnitTest::TTestFactory::Execute()+803 (0x13C277B3) NUnitTest::RunMain(int, char**)+3021 (0x13C390FD) ??+0 (0x7F96BA9C2D90) __libc_start_main+128 (0x7F96BA9C2E40) _start+41 (0x129BB029) 2025-05-29T15:25:26.234417Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-05-29T15:25:26.234425Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-05-29T15:25:26.234430Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2025-05-29T15:25:26.258960Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2025-05-29T15:25:26.266822Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2025-05-29T15:25:26.266918Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-05-29T15:25:26.270842Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (NULL) 2025-05-29T15:25:26.271060Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 Post function 2025-05-29T15:25:26.271101Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 Post function 2025-05-29T15:25:26.271160Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (2-2) 2025-05-29T15:25:26.271169Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-1) 2025-05-29T15:25:26.271180Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2025-05-29T15:25:26.271189Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (2-2) 2025-05-29T15:25:26.271227Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 1, size 8 bytes 2025-05-29T15:25:26.271231Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 1, size 8 bytes >> TSubDomainTest::ConsistentCopyTable [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_olap_reboots/unittest >> TOlapReboots::CreateMultipleTables [GOOD] Test command err: ==== RunWithTabletReboots =========== RUN: Trace =========== Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2140] Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:116:2058] recipient: [1:110:2141] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:116:2058] recipient: [1:110:2141] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:117:2058] recipient: [1:111:2142] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:117:2058] recipient: [1:111:2142] Leader for TabletID 72057594046678944 is [1:126:2151] sender: [1:127:2058] recipient: [1:109:2140] Leader for TabletID 72057594046447617 is [1:130:2154] sender: [1:132:2058] recipient: [1:110:2141] Leader for TabletID 72057594046316545 is [1:133:2155] sender: [1:135:2058] recipient: [1:111:2142] 2025-05-29T15:24:39.353313Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7488: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-05-29T15:24:39.353342Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7516: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:24:39.353348Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7402: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2025-05-29T15:24:39.353355Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7418: OperationsProcessing config: using default configuration 2025-05-29T15:24:39.353361Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-05-29T15:24:39.353365Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-05-29T15:24:39.353376Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7548: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:24:39.353393Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:39: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-05-29T15:24:39.353526Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7619: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-05-29T15:24:39.353633Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-05-29T15:24:39.368654Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7651: Got new config: QueryServiceConfig { AllExternalDataSourcesAreAvailable: true } 2025-05-29T15:24:39.368682Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:24:39.368803Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7619: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# Leader for TabletID 72057594046447617 is [1:130:2154] sender: [1:175:2058] recipient: [1:15:2062] 2025-05-29T15:24:39.372557Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-05-29T15:24:39.372601Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-05-29T15:24:39.372650Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-05-29T15:24:39.376012Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-05-29T15:24:39.376110Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:445: Clear TempDirsState with owners number: 0 2025-05-29T15:24:39.376281Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1348: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-05-29T15:24:39.376482Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:32: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-05-29T15:24:39.377074Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:157: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:24:39.377120Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:84: [RootDataErasureManager] Stop 2025-05-29T15:24:39.377428Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:24:39.377443Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:24:39.377488Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-05-29T15:24:39.377499Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-29T15:24:39.377522Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:202: TTxServerlessStorageBilling.Complete 2025-05-29T15:24:39.377550Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6671: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:214:2058] recipient: [1:212:2212] IGNORE Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:214:2058] recipient: [1:212:2212] Leader for TabletID 72057594037968897 is [1:218:2216] sender: [1:219:2058] recipient: [1:212:2212] 2025-05-29T15:24:39.378923Z node 1 :HIVE INFO: tablet_helpers.cpp:1130: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:126:2151] sender: [1:239:2058] recipient: [1:15:2062] 2025-05-29T15:24:39.399368Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:372: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-05-29T15:24:39.399465Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:24:39.399553Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-05-29T15:24:39.399614Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:130: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-05-29T15:24:39.399628Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:24:39.400401Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:451: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-05-29T15:24:39.400435Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-05-29T15:24:39.400494Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:24:39.400512Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:313: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-05-29T15:24:39.400519Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:367: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-05-29T15:24:39.400527Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 2 -> 3 2025-05-29T15:24:39.401003Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:24:39.401017Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-05-29T15:24:39.401023Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 3 -> 128 2025-05-29T15:24:39.401408Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:24:39.401420Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:24:39.401427Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:24:39.401435Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1650: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-05-29T15:24:39.402224Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1719: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-05-29T15:24:39.402715Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:652: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-05-29T15:24:39.402787Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1751: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 Leader for TabletID 72057594046316545 is [1:133:2155] sender: [1:254:2058] recipient: [1:15:2062] FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-05-29T15:24:39.403032Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:676: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-05-29T15:24:39.403067Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:680: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 133 RawX2: 4294969451 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-05-29T15:24:39.403091Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:24:39.403181Z node 1 :FLAT_TX_SCHEMESHARD INFO: sch ... SHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 1003, pathId: [OwnerId: 72057594046678944, LocalPathId: 5], version: 4 2025-05-29T15:25:34.509644Z node 89 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 3 2025-05-29T15:25:34.509655Z node 89 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1606: TOperation IsReadyToNotify, TxId: 1003, ready parts: 0/1, is published: true 2025-05-29T15:25:34.510069Z node 89 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:652: Send tablet strongly msg operationId: 1003:0 from tablet: 72057594046678944 to tablet: 72075186233409546 cookie: 72057594046678944:1 msg type: 275382275 2025-05-29T15:25:34.510088Z node 89 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:652: Send tablet strongly msg operationId: 1003:0 from tablet: 72057594046678944 to tablet: 72057594037968897 cookie: 72057594046678944:5 msg type: 268697639 2025-05-29T15:25:34.510103Z node 89 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1751: TOperation RegisterRelationByTabletId, TxId: 1003, partId: 0, tablet: 72057594037968897 2025-05-29T15:25:34.510180Z node 89 :HIVE INFO: tablet_helpers.cpp:1441: [72057594037968897] TEvUpdateTabletsObject, msg: ObjectId: 7726343884038809171 TabletIds: 72075186233409546 TxId: 1003 TxPartId: 0 2025-05-29T15:25:34.510206Z node 89 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5972: Update tablets object reply, message: Status: OK TxId: 1003 TxPartId: 0, at schemeshard: 72057594046678944 2025-05-29T15:25:34.510219Z node 89 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:619: TTxOperationReply execute, operationId: 1003:0, at schemeshard: 72057594046678944, message: Status: OK TxId: 1003 TxPartId: 0 2025-05-29T15:25:34.510363Z node 89 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1003 2025-05-29T15:25:34.510705Z node 89 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1003 2025-05-29T15:25:34.510799Z node 89 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:647: TTxOperationReply complete, operationId: 1003:0, at schemeshard: 72057594046678944 2025-05-29T15:25:34.522064Z node 89 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6146: Handle TEvNotifyTxCompletionResult, at schemeshard: 72057594046678944, message: Origin: 72075186233409546 TxId: 1003 2025-05-29T15:25:34.522086Z node 89 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1764: TOperation FindRelatedPartByTabletId, TxId: 1003, tablet: 72075186233409546, partId: 0 2025-05-29T15:25:34.522106Z node 89 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:619: TTxOperationReply execute, operationId: 1003:0, at schemeshard: 72057594046678944, message: Origin: 72075186233409546 TxId: 1003 FAKE_COORDINATOR: Erasing txId 1003 2025-05-29T15:25:34.522511Z node 89 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:647: TTxOperationReply complete, operationId: 1003:0, at schemeshard: 72057594046678944 2025-05-29T15:25:34.522547Z node 89 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1003:0, at schemeshard: 72057594046678944 2025-05-29T15:25:34.522555Z node 89 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:482: [72057594046678944] TDone opId# 1003:0 ProgressState 2025-05-29T15:25:34.522571Z node 89 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:906: Part operation is done id#1003:0 progress is 1/1 2025-05-29T15:25:34.522576Z node 89 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 1003 ready parts: 1/1 2025-05-29T15:25:34.522582Z node 89 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:906: Part operation is done id#1003:0 progress is 1/1 2025-05-29T15:25:34.522585Z node 89 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 1003 ready parts: 1/1 2025-05-29T15:25:34.522590Z node 89 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1606: TOperation IsReadyToNotify, TxId: 1003, ready parts: 1/1, is published: true 2025-05-29T15:25:34.522602Z node 89 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1629: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [89:362:2339] message: TxId: 1003 2025-05-29T15:25:34.522611Z node 89 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 1003 ready parts: 1/1 2025-05-29T15:25:34.522617Z node 89 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:973: Operation and all the parts is done, operation id: 1003:0 2025-05-29T15:25:34.522621Z node 89 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5174: RemoveTx for txid 1003:0 2025-05-29T15:25:34.522651Z node 89 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 2 2025-05-29T15:25:34.523036Z node 89 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:227: tests -- TTxNotificationSubscriber for txId 1003: got EvNotifyTxCompletionResult 2025-05-29T15:25:34.523048Z node 89 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:236: tests -- TTxNotificationSubscriber for txId 1003: satisfy waiter [89:435:2405] TestWaitNotification: OK eventTxId 1003 2025-05-29T15:25:34.523158Z node 89 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/OlapStore/ColumnTable1" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-05-29T15:25:34.523223Z node 89 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/OlapStore/ColumnTable1" took 72us result status StatusSuccess 2025-05-29T15:25:34.523346Z node 89 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/OlapStore/ColumnTable1" PathDescription { Self { Name: "ColumnTable1" PathId: 4 SchemeshardId: 72057594046678944 PathType: EPathTypeColumnTable CreateFinished: true CreateTxId: 1004 CreateStep: 5000004 ParentPathId: 3 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 4 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 4 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 ColumnTableVersion: 1 ColumnTableSchemaVersion: 1 } ChildrenExist: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 0 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 4 PathsLimit: 10000 ShardsInside: 1 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } ColumnTableDescription { Name: "ColumnTable1" Schema { Columns { Id: 1 Name: "timestamp" Type: "Timestamp" TypeId: 50 NotNull: true StorageId: "" DefaultValue { } } Columns { Id: 2 Name: "data" Type: "Utf8" TypeId: 4608 NotNull: false StorageId: "" DefaultValue { } } KeyColumnNames: "timestamp" NextColumnId: 3 Version: 1 Options { SchemeNeedActualization: false } NextColumnFamilyId: 1 } SchemaPresetId: 1 SchemaPresetName: "default" ColumnStorePathId { OwnerId: 72057594046678944 LocalId: 3 } ColumnShardCount: 1 Sharding { ColumnShards: 72075186233409546 HashSharding { Function: HASH_FUNCTION_CONSISTENCY_64 Columns: "timestamp" } } } } PathId: 4 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-05-29T15:25:34.523487Z node 89 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/OlapStore/ColumnTable2" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-05-29T15:25:34.523510Z node 89 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/OlapStore/ColumnTable2" took 25us result status StatusSuccess 2025-05-29T15:25:34.523575Z node 89 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/OlapStore/ColumnTable2" PathDescription { Self { Name: "ColumnTable2" PathId: 5 SchemeshardId: 72057594046678944 PathType: EPathTypeColumnTable CreateFinished: true CreateTxId: 1003 CreateStep: 5000005 ParentPathId: 3 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 4 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 4 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 ColumnTableVersion: 1 ColumnTableSchemaVersion: 1 } ChildrenExist: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 0 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 4 PathsLimit: 10000 ShardsInside: 1 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } ColumnTableDescription { Name: "ColumnTable2" Schema { Columns { Id: 1 Name: "timestamp" Type: "Timestamp" TypeId: 50 NotNull: true StorageId: "" DefaultValue { } } Columns { Id: 2 Name: "data" Type: "Utf8" TypeId: 4608 NotNull: false StorageId: "" DefaultValue { } } KeyColumnNames: "timestamp" NextColumnId: 3 Version: 1 Options { SchemeNeedActualization: false } NextColumnFamilyId: 1 } SchemaPresetId: 1 SchemaPresetName: "default" ColumnStorePathId { OwnerId: 72057594046678944 LocalId: 3 } ColumnShardCount: 1 Sharding { ColumnShards: 72075186233409546 HashSharding { Function: HASH_FUNCTION_CONSISTENCY_64 Columns: "timestamp" } } } } PathId: 5 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 |66.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_external_data_source_reboots/unittest >> TestYmqHttpProxy::TestSendMessageBatch |66.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_external_data_source_reboots/unittest >> RetryPolicy::TWriteSession_SeqNoShift [FAIL] >> RetryPolicy::RetryWithBatching |66.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_external_data_source_reboots/unittest >> PersQueueSdkReadSessionTest::ReadSessionWithAbort [FAIL] >> PersQueueSdkReadSessionTest::ReadSessionWithClose ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/tx_proxy/ut_base_tenant/unittest >> TSubDomainTest::ConsistentCopyTable [GOOD] Test command err: 2025-05-29T15:25:31.843638Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509889177980246146:2196];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:25:31.843736Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/001a7f/r3tmp/tmp06HnhT/pdisk_1.dat 2025-05-29T15:25:31.926111Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [1:7509889177980245989:2079] 1748532331840697 != 1748532331840700 2025-05-29T15:25:31.928963Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded TClient is connected to server localhost:12517 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 2025-05-29T15:25:31.966956Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:403: actor# [1:7509889177980246218:2087] Handle TEvNavigate describe path dc-1 2025-05-29T15:25:31.968269Z node 1 :TX_PROXY DEBUG: describe.cpp:272: Actor# [1:7509889177980246526:2250] HANDLE EvNavigateScheme dc-1 2025-05-29T15:25:31.968300Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2702: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [1:7509889177980246271:2114], request# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-05-29T15:25:31.968308Z node 1 :TX_PROXY_SCHEME_CACHE TRACE: cache.cpp:2322: Create subscriber: self# [1:7509889177980246271:2114], path# /dc-1, domainOwnerId# 72057594046644480 2025-05-29T15:25:31.968345Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:960: [main][1:7509889177980246527:2251][/dc-1] Handle NKikimr::TEvStateStorage::TEvResolveReplicasList 2025-05-29T15:25:31.968642Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1061: [1:7509889177980245959:2049] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1 DomainOwnerId: 72057594046644480 }: sender# [1:7509889177980246531:2251] 2025-05-29T15:25:31.968658Z node 1 :SCHEME_BOARD_REPLICA INFO: replica.cpp:646: [1:7509889177980245959:2049] Subscribe: subscriber# [1:7509889177980246531:2251], path# /dc-1, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2025-05-29T15:25:31.968668Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1061: [1:7509889177980245962:2052] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1 DomainOwnerId: 72057594046644480 }: sender# [1:7509889177980246532:2251] 2025-05-29T15:25:31.968670Z node 1 :SCHEME_BOARD_REPLICA INFO: replica.cpp:646: [1:7509889177980245962:2052] Subscribe: subscriber# [1:7509889177980246532:2251], path# /dc-1, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2025-05-29T15:25:31.968673Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1061: [1:7509889177980245965:2055] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1 DomainOwnerId: 72057594046644480 }: sender# [1:7509889177980246533:2251] 2025-05-29T15:25:31.968675Z node 1 :SCHEME_BOARD_REPLICA INFO: replica.cpp:646: [1:7509889177980245965:2055] Subscribe: subscriber# [1:7509889177980246533:2251], path# /dc-1, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2025-05-29T15:25:31.968682Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:365: [replica][1:7509889177980246531:2251][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7509889177980245959:2049] 2025-05-29T15:25:31.968684Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:365: [replica][1:7509889177980246532:2251][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7509889177980245962:2052] 2025-05-29T15:25:31.968687Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:365: [replica][1:7509889177980246533:2251][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7509889177980245965:2055] 2025-05-29T15:25:31.968691Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:807: [main][1:7509889177980246527:2251][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7509889177980246528:2251] 2025-05-29T15:25:31.968695Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:807: [main][1:7509889177980246527:2251][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7509889177980246529:2251] 2025-05-29T15:25:31.968703Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: subscriber.cpp:836: [main][1:7509889177980246527:2251][/dc-1] Set up state: owner# [1:7509889177980246271:2114], state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 2) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements } 2025-05-29T15:25:31.968744Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:807: [main][1:7509889177980246527:2251][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7509889177980246530:2251] 2025-05-29T15:25:31.968749Z node 1 :SCHEME_BOARD_SUBSCRIBER INFO: subscriber.cpp:854: [main][1:7509889177980246527:2251][/dc-1] Path was already updated: owner# [1:7509889177980246271:2114], state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 2) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 2) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements } 2025-05-29T15:25:31.968753Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:381: [replica][1:7509889177980246531:2251][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7509889177980246528:2251], cookie# 1 2025-05-29T15:25:31.968755Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:381: [replica][1:7509889177980246532:2251][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7509889177980246529:2251], cookie# 1 2025-05-29T15:25:31.968757Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:381: [replica][1:7509889177980246533:2251][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7509889177980246530:2251], cookie# 1 2025-05-29T15:25:31.968760Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1089: [1:7509889177980245959:2049] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 2 }: sender# [1:7509889177980246531:2251] 2025-05-29T15:25:31.968762Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1128: [1:7509889177980245959:2049] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7509889177980246531:2251], cookie# 1 2025-05-29T15:25:31.968765Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1089: [1:7509889177980245962:2052] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 2 }: sender# [1:7509889177980246532:2251] 2025-05-29T15:25:31.968767Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1128: [1:7509889177980245962:2052] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7509889177980246532:2251], cookie# 1 2025-05-29T15:25:31.968768Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1089: [1:7509889177980245965:2055] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 2 }: sender# [1:7509889177980246533:2251] 2025-05-29T15:25:31.968770Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1128: [1:7509889177980245965:2055] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7509889177980246533:2251], cookie# 1 2025-05-29T15:25:31.970781Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:390: [replica][1:7509889177980246531:2251][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7509889177980245959:2049], cookie# 1 2025-05-29T15:25:31.970788Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:390: [replica][1:7509889177980246532:2251][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7509889177980245962:2052], cookie# 1 2025-05-29T15:25:31.970792Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:390: [replica][1:7509889177980246533:2251][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7509889177980245965:2055], cookie# 1 2025-05-29T15:25:31.970798Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:892: [main][1:7509889177980246527:2251][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7509889177980246528:2251], cookie# 1 2025-05-29T15:25:31.970807Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:932: [main][1:7509889177980246527:2251][/dc-1] Sync is in progress: cookie# 1, size# 3, half# 1, successes# 1, faulires# 0 2025-05-29T15:25:31.970811Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:892: [main][1:7509889177980246527:2251][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7509889177980246529:2251], cookie# 1 2025-05-29T15:25:31.970814Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:946: [main][1:7509889177980246527:2251][/dc-1] Sync is done: cookie# 1, size# 3, half# 1, successes# 2, faulires# 0, partial# 0 2025-05-29T15:25:31.970819Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:892: [main][1:7509889177980246527:2251][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7509889177980246530:2251], cookie# 1 2025-05-29T15:25:31.970821Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:906: [main][1:7509889177980246527:2251][/dc-1] Unexpected sync response: sender# [1:7509889177980246530:2251], cookie# 1 2025-05-29T15:25:31.975680Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2589: HandleNotify: self# [1:7509889177980246271:2114], notify# NKikimr::TSchemeBoardEvents::TEvNotifyUpdate { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DescribeSchemeResult: Status: StatusSuccess Path: "/dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/dc-1" } } } PathId: 1 PathOwnerId: 72057594046644480 } 2025-05-29T15:25:31.975767Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2464: ResolveCacheItem: self# [1:7509889177980246271:2114], notify ... ify { Path: /dc-1/USER_0/.metadata/initialization/migrations Version: 0 }: sender# [5:7509889189675322161:2057] 2025-05-29T15:25:35.025246Z node 7 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:807: [main][7:7509889196786863226:3044][/dc-1/USER_0/.metadata/initialization/migrations] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/USER_0/.metadata/initialization/migrations Version: 0 }: sender# [7:7509889196786863227:3044] 2025-05-29T15:25:35.025256Z node 7 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:807: [main][7:7509889196786863226:3044][/dc-1/USER_0/.metadata/initialization/migrations] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/USER_0/.metadata/initialization/migrations Version: 0 }: sender# [7:7509889196786863228:3044] 2025-05-29T15:25:35.025265Z node 7 :SCHEME_BOARD_SUBSCRIBER NOTICE: subscriber.cpp:836: [main][7:7509889196786863226:3044][/dc-1/USER_0/.metadata/initialization/migrations] Set up state: owner# [7:7509889192491894474:2100], state# { Deleted: 1 Strong: 1 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2025-05-29T15:25:35.025271Z node 7 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:807: [main][7:7509889196786863226:3044][/dc-1/USER_0/.metadata/initialization/migrations] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/USER_0/.metadata/initialization/migrations Version: 0 }: sender# [7:7509889196786863229:3044] 2025-05-29T15:25:35.025276Z node 7 :SCHEME_BOARD_SUBSCRIBER INFO: subscriber.cpp:854: [main][7:7509889196786863226:3044][/dc-1/USER_0/.metadata/initialization/migrations] Ignore empty state: owner# [7:7509889192491894474:2100], state# { Deleted: 1 Strong: 1 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 1 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2025-05-29T15:25:35.025299Z node 7 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2589: HandleNotify: self# [7:7509889192491894474:2100], notify# NKikimr::TSchemeBoardEvents::TEvNotifyDelete { Path: /dc-1/USER_0/.metadata/initialization/migrations PathId: Strong: 1 } 2025-05-29T15:25:35.025320Z node 7 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2464: ResolveCacheItem: self# [7:7509889192491894474:2100], notify# NKikimr::TSchemeBoardEvents::TEvNotifyDelete { Path: /dc-1/USER_0/.metadata/initialization/migrations PathId: Strong: 1 }, by path# { Subscriber: { Subscriber: [7:7509889196786863226:3044] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 0 Status: undefined Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, by pathId# nullptr 2025-05-29T15:25:35.025349Z node 7 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:1842: FillEntry for TNavigate: self# [7:7509889192491894474:2100], cacheItem# { Subscriber: { Subscriber: [7:7509889196786863226:3044] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/USER_0/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-05-29T15:25:35.025369Z node 7 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:265: Send result: self# [7:7509889196786863233:3045], recipient# [7:7509889196786863225:2401], result# { ErrorCount: 1 DatabaseName: /dc-1/USER_0 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_0/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2025-05-29T15:25:35.025010Z node 5 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1061: [5:7509889189675322155:2051] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1/USER_0/.metadata/initialization/migrations DomainOwnerId: 72057594046644480 }: sender# [7:7509889196786863230:3044] 2025-05-29T15:25:35.025033Z node 5 :SCHEME_BOARD_REPLICA INFO: replica.cpp:520: [5:7509889189675322155:2051] Upsert description: path# /dc-1/USER_0/.metadata/initialization/migrations 2025-05-29T15:25:35.025058Z node 5 :SCHEME_BOARD_REPLICA INFO: replica.cpp:646: [5:7509889189675322155:2051] Subscribe: subscriber# [7:7509889196786863230:3044], path# /dc-1/USER_0/.metadata/initialization/migrations, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2025-05-29T15:25:35.025079Z node 5 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1061: [5:7509889189675322158:2054] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1/USER_0/.metadata/initialization/migrations DomainOwnerId: 72057594046644480 }: sender# [7:7509889196786863231:3044] 2025-05-29T15:25:35.025092Z node 5 :SCHEME_BOARD_REPLICA INFO: replica.cpp:520: [5:7509889189675322158:2054] Upsert description: path# /dc-1/USER_0/.metadata/initialization/migrations 2025-05-29T15:25:35.025101Z node 5 :SCHEME_BOARD_REPLICA INFO: replica.cpp:646: [5:7509889189675322158:2054] Subscribe: subscriber# [7:7509889196786863231:3044], path# /dc-1/USER_0/.metadata/initialization/migrations, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2025-05-29T15:25:35.025116Z node 5 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1061: [5:7509889189675322161:2057] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1/USER_0/.metadata/initialization/migrations DomainOwnerId: 72057594046644480 }: sender# [7:7509889196786863232:3044] 2025-05-29T15:25:35.025119Z node 5 :SCHEME_BOARD_REPLICA INFO: replica.cpp:520: [5:7509889189675322161:2057] Upsert description: path# /dc-1/USER_0/.metadata/initialization/migrations 2025-05-29T15:25:35.025126Z node 5 :SCHEME_BOARD_REPLICA INFO: replica.cpp:646: [5:7509889189675322161:2057] Subscribe: subscriber# [7:7509889196786863232:3044], path# /dc-1/USER_0/.metadata/initialization/migrations, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2025-05-29T15:25:35.025636Z node 5 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1089: [5:7509889189675322155:2051] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 0 }: sender# [7:7509889196786863230:3044] 2025-05-29T15:25:35.025645Z node 5 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1089: [5:7509889189675322158:2054] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 0 }: sender# [7:7509889196786863231:3044] 2025-05-29T15:25:35.025653Z node 5 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1089: [5:7509889189675322161:2057] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 0 }: sender# [7:7509889196786863232:3044] 2025-05-29T15:25:35.121644Z node 6 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2702: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [6:7509889191308197622:2103], request# { ErrorCount: 0 DatabaseName: /dc-1/USER_1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-05-29T15:25:35.121676Z node 6 :TX_PROXY_SCHEME_CACHE TRACE: cache.cpp:2322: Create subscriber: self# [6:7509889191308197622:2103], path# /dc-1/USER_1/.metadata/initialization/migrations, domainOwnerId# 72057594046644480 2025-05-29T15:25:35.121741Z node 6 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:960: [main][6:7509889195603165167:2231][/dc-1/USER_1/.metadata/initialization/migrations] Handle NKikimr::TEvStateStorage::TEvResolveReplicasList 2025-05-29T15:25:35.121848Z node 6 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:807: [main][6:7509889195603165167:2231][/dc-1/USER_1/.metadata/initialization/migrations] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/USER_1/.metadata/initialization/migrations Version: 0 }: sender# [6:7509889195603165168:2231] 2025-05-29T15:25:35.121871Z node 6 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:807: [main][6:7509889195603165167:2231][/dc-1/USER_1/.metadata/initialization/migrations] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/USER_1/.metadata/initialization/migrations Version: 0 }: sender# [6:7509889195603165169:2231] 2025-05-29T15:25:35.121881Z node 6 :SCHEME_BOARD_SUBSCRIBER NOTICE: subscriber.cpp:836: [main][6:7509889195603165167:2231][/dc-1/USER_1/.metadata/initialization/migrations] Set up state: owner# [6:7509889191308197622:2103], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2025-05-29T15:25:35.121887Z node 6 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:807: [main][6:7509889195603165167:2231][/dc-1/USER_1/.metadata/initialization/migrations] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/USER_1/.metadata/initialization/migrations Version: 0 }: sender# [6:7509889195603165170:2231] 2025-05-29T15:25:35.121893Z node 6 :SCHEME_BOARD_SUBSCRIBER INFO: subscriber.cpp:854: [main][6:7509889195603165167:2231][/dc-1/USER_1/.metadata/initialization/migrations] Ignore empty state: owner# [6:7509889191308197622:2103], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2025-05-29T15:25:35.121902Z node 6 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2589: HandleNotify: self# [6:7509889191308197622:2103], notify# NKikimr::TSchemeBoardEvents::TEvNotifyDelete { Path: /dc-1/USER_1/.metadata/initialization/migrations PathId: Strong: 0 } 2025-05-29T15:25:35.121915Z node 6 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2464: ResolveCacheItem: self# [6:7509889191308197622:2103], notify# NKikimr::TSchemeBoardEvents::TEvNotifyDelete { Path: /dc-1/USER_1/.metadata/initialization/migrations PathId: Strong: 0 }, by path# { Subscriber: { Subscriber: [6:7509889195603165167:2231] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 0 Status: undefined Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, by pathId# nullptr 2025-05-29T15:25:35.121933Z node 6 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:1842: FillEntry for TNavigate: self# [6:7509889191308197622:2103], cacheItem# { Subscriber: { Subscriber: [6:7509889195603165167:2231] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: undefined Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/USER_1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-05-29T15:25:35.121946Z node 6 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:265: Send result: self# [6:7509889195603165174:2232], recipient# [6:7509889195603165166:2320], result# { ErrorCount: 1 DatabaseName: /dc-1/USER_1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: LookupError Kind: KindUnknown DomainInfo }] } 2025-05-29T15:25:35.122249Z node 6 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/USER_1/.metadata/initialization/migrations;error=incorrect path status: LookupError; ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/public/sdk/cpp/src/client/persqueue_public/ut/unittest >> ReadSessionImplTest::CommonHandler [GOOD] Test command err: 2025-05-29T15:25:23.098815Z :ReadSession INFO: Random seed for debugging is 1748532323098805 2025-05-29T15:25:23.478514Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509889143371064085:2084];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:25:23.478552Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/00121f/r3tmp/tmp8KjTcO/pdisk_1.dat 2025-05-29T15:25:23.595291Z node 1 :PQ_READ_PROXY DEBUG: caching_service.cpp:44: Direct read cache: : Created 2025-05-29T15:25:23.596203Z node 2 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7509889145372189357:2213];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:25:23.596307Z node 2 :PQ_READ_PROXY DEBUG: caching_service.cpp:44: Direct read cache: : Created 2025-05-29T15:25:23.598197Z node 2 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-05-29T15:25:23.714148Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [2:7509889145372189172:2071] 1748532323512060 != 1748532323512057 2025-05-29T15:25:23.734451Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:25:23.737158Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:25:23.737181Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:25:23.743222Z node 1 :HIVE WARN: hive_impl.cpp:771: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-05-29T15:25:23.743724Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 9025, node 1 2025-05-29T15:25:23.823031Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:25:23.823060Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:25:23.843513Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-29T15:25:23.868164Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/ciyv/00121f/r3tmp/yandexFmsD98.tmp 2025-05-29T15:25:23.868177Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: /home/runner/.ya/build/build_root/ciyv/00121f/r3tmp/yandexFmsD98.tmp 2025-05-29T15:25:23.868247Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:202: successfully initialized from file: /home/runner/.ya/build/build_root/ciyv/00121f/r3tmp/yandexFmsD98.tmp 2025-05-29T15:25:23.868296Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-05-29T15:25:23.882316Z INFO: TTestServer started on Port 4141 GrpcPort 9025 TClient is connected to server localhost:4141 PQClient connected to localhost:9025 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-29T15:25:23.979689Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:25:23.989372Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:25:24.004320Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:25:24.019567Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976710659, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:25:24.340542Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509889147666032293:2335], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:25:24.340569Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:25:24.340721Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509889147666032329:2339], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:25:24.341645Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710661:3, at schemeshard: 72057594046644480 2025-05-29T15:25:24.345750Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509889147666032362:2341], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:25:24.345864Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:25:24.354549Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7509889147666032331:2340], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710661 completed, doublechecking } 2025-05-29T15:25:24.396722Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-05-29T15:25:24.443381Z node 1 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [1:7509889147666032513:2720] txid# 281474976710663, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 8], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-29T15:25:24.492789Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-05-29T15:25:24.495697Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [1:7509889147666032527:2353], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:5:17: Error: At function: KiReadTable!
:5:17: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Versions]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-05-29T15:25:24.496461Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=1&id=MjhjOWVmYjUtOTY5MzE1ZmQtYTY5YzVkYi1hOTI2NTUyNw==, ActorId: [1:7509889147666032290:2333], ActorState: ExecuteState, TraceId: 01jweabsze5dqkhrvnsfst9q92, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-05-29T15:25:24.496930Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: cluster_tracker.cpp:167: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 5 column: 17 } message: "At function: KiReadTable!" end_position { row: 5 column: 17 } severity: 1 issues { position { row: 5 column: 17 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Versions]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 5 column: 17 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-05-29T15:25:24.578575Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost:9025", true, true, 1000); 2025-05-29T15:25:24.635301Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [1:7509889147666032830:2382], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:25:24.636131Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=1&id=NDQ0YTUyNjYtYzI4Nzc4NjctNDk0ZWQ5MWMtMjczNzNhMmY=, ActorId: [1:7509889147666032827:2380], ActorState: ExecuteState, TraceId: 01jweabt8b7z0wr7jzz8tb3nvm, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: 2025-05-29T15:25:24.941278Z node 2 :PERSQUEUE_CLUSTER_TRACKER ERROR: cluster_tracker.cpp:167: failed to list clusters: { Response { QueryIssues { message: "Execution" issue_code: 1060 severity: 2 issues { position { row: 3 column: 120 } message: "Cost Based Optimizer could not be applied to this query: couldn\'t load statistics" end_position { row: 3 column: 120 } issue_code: 8001 severity: 2 } } TxMeta { } YdbResults { columns { name: "C.name" type { optional_type { item { type_id: UTF8 } } } } columns { name: "C.balancer" type { optional_type { item { type_id: UTF8 } } } } columns { name: "C.local" type { optional_type { item { type_id: BOOL } } } } columns { name: "C.enabled" type { optional_type { item { type_id: BOOL } } } } columns { name: "C.weight" ty ... sition { row: 5 column: 17 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Versions]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 5 column: 17 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-05-29T15:25:30.435456Z node 7 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [7:7509889173669362356:2789] txid# 281474976710664, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 8], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-29T15:25:30.449712Z node 7 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [7:7509889173669362366:2357], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:5:17: Error: At function: KiReadTable!
:5:17: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Versions]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-05-29T15:25:30.450297Z node 7 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=7&id=NWY4ZDU2NDItNzhmMDg4OTMtMmNmYThmMTQtODIwYjFlZTA=, ActorId: [7:7509889173669362026:2331], ActorState: ExecuteState, TraceId: 01jweabztn6barh1c67q2zv7cv, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-05-29T15:25:30.450451Z node 7 :PERSQUEUE_CLUSTER_TRACKER ERROR: cluster_tracker.cpp:167: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 5 column: 17 } message: "At function: KiReadTable!" end_position { row: 5 column: 17 } severity: 1 issues { position { row: 5 column: 17 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Versions]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 5 column: 17 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-05-29T15:25:30.468907Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost:7152", true, true, 1000); 2025-05-29T15:25:30.516617Z node 7 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [7:7509889173669362561:2380], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:25:30.517466Z node 7 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=7&id=NjY0NzUxODEtMzIyYmJlOGItYjJhZjU3ZTktYjgxYWQwZmI=, ActorId: [7:7509889173669362556:2378], ActorState: ExecuteState, TraceId: 01jweac003fby43yn9mxd0c3mj, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: assertion failed at ydb/core/testlib/test_pq_client.h:537, TMaybe NKikimr::NPersQueueTests::TFlatMsgBusPQClient::RunYqlDataQueryWithParams(TString, const NYdb::TParams &): (result.IsSuccess())
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 TBackTrace::Capture()+28 (0x13A6D2CC) NUnitTest::NPrivate::RaiseError(char const*, TBasicString> const&, bool)+137 (0x13C25189) NKikimr::NPersQueueTests::TFlatMsgBusPQClient::RunYqlDataQueryWithParams(TBasicString>, NYdb::Dev::TParams const&)+932 (0x138B9A44) NKikimr::NPersQueueTests::TFlatMsgBusPQClient::RunYqlDataQuery(TBasicString>)+88 (0x138B89A8) NKikimr::NPersQueueTests::TFlatMsgBusPQClient::InitDCs(THashMap>, NKikimr::NPersQueueTests::TPQTestClusterInfo, THash>>, TEqualTo>>, std::__y1::allocator>>>, TBasicString> const&)+1170 (0x138B7BF2) NPersQueue::SDKTestSetup::Start(bool, bool)+1450 (0x138AD5DA) NPersQueue::SDKTestSetup::SDKTestSetup(TBasicString> const&, bool, TVector> const&, NActors::NLog::EPriority, unsigned int, unsigned long)+675 (0x138AB0E3) NTestSuitePersQueueSdkReadSessionTest::TTestCaseStopResumeReadingData::Execute_(NUnitTest::TTestContext&)+127 (0x1386ECBF) NTestSuitePersQueueSdkReadSessionTest::TCurrentTest::Execute()::'lambda'()::operator()() const+71 (0x138AAB07) NUnitTest::TTestBase::Run(std::__y1::function, TBasicString> const&, char const*, bool)+126 (0x13C2703E) NTestSuitePersQueueSdkReadSessionTest::TCurrentTest::Execute()+481 (0x138AA4A1) NUnitTest::TTestFactory::Execute()+803 (0x13C277B3) NUnitTest::RunMain(int, char**)+3021 (0x13C390FD) ??+0 (0x7FE3B078AD90) __libc_start_main+128 (0x7FE3B078AE40) _start+41 (0x129BB029) 2025-05-29T15:25:31.202836Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-05-29T15:25:31.202846Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-05-29T15:25:31.202852Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2025-05-29T15:25:31.202967Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2025-05-29T15:25:31.206933Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2025-05-29T15:25:31.208799Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-05-29T15:25:31.209010Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: 13. Commit offset: 31 2025-05-29T15:25:31.211181Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-05-29T15:25:31.211189Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-05-29T15:25:31.211193Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2025-05-29T15:25:31.211264Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2025-05-29T15:25:31.212214Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2025-05-29T15:25:31.212270Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-05-29T15:25:31.212376Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (NULL) 2025-05-29T15:25:31.212565Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 Post function 2025-05-29T15:25:31.212731Z :INFO: Error decompressing data: (TZLibDecompressorError) util/stream/zlib.cpp:143: inflate error(incorrect header check) 2025-05-29T15:25:31.212749Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-3) 2025-05-29T15:25:31.212800Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2025-05-29T15:25:31.212808Z :DEBUG: Take Data. Partition 1. Read: {0, 1} (2-2) 2025-05-29T15:25:31.212813Z :DEBUG: Take Data. Partition 1. Read: {0, 2} (3-3) 2025-05-29T15:25:31.212824Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 3, size 16 bytes DataReceived { PartitionStreamId: 1 PartitionId: 1 Message { DataDecompressionError: "(TZLibDecompressorError) util/stream/zlib.cpp:143: inflate error(incorrect header check)" Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 1 SeqNo: 1 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:00:00.042000Z Ip: "::1" UncompressedSize: 0 Meta: { } } } Message { Data: ..8 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 2 SeqNo: 1 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:00:00.042000Z Ip: "::1" UncompressedSize: 0 Meta: { } } } Message { Data: ..8 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 3 SeqNo: 1 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:00:00.042000Z Ip: "::1" UncompressedSize: 0 Meta: { } } } } 2025-05-29T15:25:31.216426Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-05-29T15:25:31.216432Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-05-29T15:25:31.216437Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2025-05-29T15:25:31.216532Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2025-05-29T15:25:31.230826Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2025-05-29T15:25:31.230921Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-05-29T15:25:31.234846Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (NULL) 2025-05-29T15:25:31.235058Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-05-29T15:25:31.235119Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-1) 2025-05-29T15:25:31.235143Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2025-05-29T15:25:31.235152Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 1, size 8 bytes 2025-05-29T15:25:31.235163Z :DEBUG: [db] [sessionid] [cluster] Commit offsets [1, 2). Partition stream id: 1 2025-05-29T15:25:31.235654Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-05-29T15:25:31.235659Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-05-29T15:25:31.235663Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2025-05-29T15:25:31.235734Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2025-05-29T15:25:31.235847Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2025-05-29T15:25:31.235886Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-05-29T15:25:31.236349Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-05-29T15:25:31.236385Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-1) 2025-05-29T15:25:31.236398Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2025-05-29T15:25:31.236415Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 1, size 8 bytes >> TestKinesisHttpProxy::TestEmptyHttpBody >> DataStreams::TestControlPlaneAndMeteringData >> DataShardVolatile::DistributedWrite >> TOlapReboots::DropTableThenStore [GOOD] >> DstCreator::ReplicationModeMismatch >> KqpCost::IndexLookupAndTake+useSink >> TestYmqHttpProxy::TestListQueueTags >> TBsProxyFaultToleranceTest::CheckTGetWithRecoverFaultToleranceTestErasureMirror3of4 [GOOD] >> RetryPolicy::RetryWithBatching [FAIL] >> DstCreator::NonExistentSrc >> DataShardVolatile::DistributedWriteThenImmediateUpsert |66.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/dsproxy/ut_ftol/unittest >> TBsProxyFaultToleranceTest::CheckTGetWithRecoverFaultToleranceTestErasureMirror3of4 [GOOD] >> DstCreator::WithSyncIndex >> TBsProxyFaultToleranceTest::CheckGetHardenedErasureMirror3dcCount6Idx5 [GOOD] >> PersQueueSdkReadSessionTest::ReadSessionWithClose [FAIL] >> PersQueueSdkReadSessionTest::ReadSessionWithCloseNotCommitted ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_olap_reboots/unittest >> TOlapReboots::DropTableThenStore [GOOD] Test command err: ==== RunWithTabletReboots =========== RUN: Trace =========== Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2140] Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:116:2058] recipient: [1:110:2141] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:116:2058] recipient: [1:110:2141] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:117:2058] recipient: [1:111:2142] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:117:2058] recipient: [1:111:2142] Leader for TabletID 72057594046678944 is [1:126:2151] sender: [1:127:2058] recipient: [1:109:2140] Leader for TabletID 72057594046447617 is [1:130:2154] sender: [1:132:2058] recipient: [1:110:2141] Leader for TabletID 72057594046316545 is [1:133:2155] sender: [1:135:2058] recipient: [1:111:2142] 2025-05-29T15:24:40.378667Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7488: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-05-29T15:24:40.378690Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7516: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:24:40.378697Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7402: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2025-05-29T15:24:40.378704Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7418: OperationsProcessing config: using default configuration 2025-05-29T15:24:40.378711Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-05-29T15:24:40.378715Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-05-29T15:24:40.378725Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7548: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:24:40.378762Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:39: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-05-29T15:24:40.378880Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7619: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-05-29T15:24:40.378966Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-05-29T15:24:40.394728Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7651: Got new config: QueryServiceConfig { AllExternalDataSourcesAreAvailable: true } 2025-05-29T15:24:40.394764Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:24:40.394877Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7619: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# Leader for TabletID 72057594046447617 is [1:130:2154] sender: [1:175:2058] recipient: [1:15:2062] 2025-05-29T15:24:40.397683Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-05-29T15:24:40.397716Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-05-29T15:24:40.397758Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-05-29T15:24:40.400633Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-05-29T15:24:40.400719Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:445: Clear TempDirsState with owners number: 0 2025-05-29T15:24:40.400860Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1348: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-05-29T15:24:40.401054Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:32: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-05-29T15:24:40.401770Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:157: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:24:40.401820Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:84: [RootDataErasureManager] Stop 2025-05-29T15:24:40.402109Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:24:40.402120Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:24:40.402159Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-05-29T15:24:40.402168Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-29T15:24:40.402175Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:202: TTxServerlessStorageBilling.Complete 2025-05-29T15:24:40.402198Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6671: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:214:2058] recipient: [1:212:2212] IGNORE Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:214:2058] recipient: [1:212:2212] Leader for TabletID 72057594037968897 is [1:218:2216] sender: [1:219:2058] recipient: [1:212:2212] 2025-05-29T15:24:40.403668Z node 1 :HIVE INFO: tablet_helpers.cpp:1130: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:126:2151] sender: [1:239:2058] recipient: [1:15:2062] 2025-05-29T15:24:40.426369Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:372: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-05-29T15:24:40.426445Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:24:40.426512Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-05-29T15:24:40.426559Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:130: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-05-29T15:24:40.426572Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:24:40.427244Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:451: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-05-29T15:24:40.427272Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-05-29T15:24:40.427327Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:24:40.427344Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:313: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-05-29T15:24:40.427350Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:367: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-05-29T15:24:40.427355Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 2 -> 3 2025-05-29T15:24:40.427788Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:24:40.427801Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-05-29T15:24:40.427807Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 3 -> 128 2025-05-29T15:24:40.428155Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:24:40.428165Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:24:40.428172Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:24:40.428178Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1650: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-05-29T15:24:40.428954Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1719: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-05-29T15:24:40.429381Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:652: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-05-29T15:24:40.429415Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1751: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 Leader for TabletID 72057594046316545 is [1:133:2155] sender: [1:254:2058] recipient: [1:15:2062] FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-05-29T15:24:40.429640Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:676: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-05-29T15:24:40.429667Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:680: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 133 RawX2: 4294969451 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-05-29T15:24:40.429691Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:24:40.429763Z node 1 :FLAT_TX_SCHEMESHARD INFO: sch ... 4046678944, cookie: 1005 2025-05-29T15:25:37.481157Z node 88 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72057594046678944, txId: 1005 2025-05-29T15:25:37.481161Z node 88 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 1005, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 18446744073709551615 2025-05-29T15:25:37.481166Z node 88 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 4 2025-05-29T15:25:37.481248Z node 88 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:5834: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 11 PathOwnerId: 72057594046678944, cookie: 1005 2025-05-29T15:25:37.481258Z node 88 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 11 PathOwnerId: 72057594046678944, cookie: 1005 2025-05-29T15:25:37.481261Z node 88 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72057594046678944, txId: 1005 2025-05-29T15:25:37.481265Z node 88 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 1005, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 11 2025-05-29T15:25:37.481269Z node 88 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2025-05-29T15:25:37.481278Z node 88 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1606: TOperation IsReadyToNotify, TxId: 1005, ready parts: 0/1, is published: true 2025-05-29T15:25:37.481802Z node 88 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:652: Send tablet strongly msg operationId: 1005:0 from tablet: 72057594046678944 to tablet: 72075186233409546 cookie: 72057594046678944:1 msg type: 275382275 2025-05-29T15:25:37.481827Z node 88 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1751: TOperation RegisterRelationByTabletId, TxId: 1005, partId: 0, tablet: 72075186233409546 2025-05-29T15:25:37.482020Z node 88 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6146: Handle TEvNotifyTxCompletionResult, at schemeshard: 72057594046678944, message: Origin: 72075186233409546 TxId: 1005 2025-05-29T15:25:37.482028Z node 88 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1764: TOperation FindRelatedPartByTabletId, TxId: 1005, tablet: 72075186233409546, partId: 0 2025-05-29T15:25:37.482040Z node 88 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:619: TTxOperationReply execute, operationId: 1005:0, at schemeshard: 72057594046678944, message: Origin: 72075186233409546 TxId: 1005 2025-05-29T15:25:37.482048Z node 88 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1005:0 129 -> 130 2025-05-29T15:25:37.482193Z node 88 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1005 2025-05-29T15:25:37.482242Z node 88 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1005 2025-05-29T15:25:37.482508Z node 88 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:647: TTxOperationReply complete, operationId: 1005:0, at schemeshard: 72057594046678944 2025-05-29T15:25:37.482534Z node 88 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1005:0, at schemeshard: 72057594046678944 2025-05-29T15:25:37.482540Z node 88 :FLAT_TX_SCHEMESHARD INFO: drop_store.cpp:235: TDropOlapStore TProposedDeleteParts operationId# 1005:0 ProgressState, at schemeshard: 72057594046678944 2025-05-29T15:25:37.482556Z node 88 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 3 2025-05-29T15:25:37.482588Z node 88 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:906: Part operation is done id#1005:0 progress is 1/1 2025-05-29T15:25:37.482593Z node 88 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 1005 ready parts: 1/1 2025-05-29T15:25:37.482598Z node 88 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:906: Part operation is done id#1005:0 progress is 1/1 2025-05-29T15:25:37.482601Z node 88 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 1005 ready parts: 1/1 2025-05-29T15:25:37.482606Z node 88 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1606: TOperation IsReadyToNotify, TxId: 1005, ready parts: 1/1, is published: true 2025-05-29T15:25:37.482610Z node 88 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 1005 ready parts: 1/1 2025-05-29T15:25:37.482614Z node 88 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:973: Operation and all the parts is done, operation id: 1005:0 2025-05-29T15:25:37.482619Z node 88 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5174: RemoveTx for txid 1005:0 2025-05-29T15:25:37.482642Z node 88 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 2 2025-05-29T15:25:37.483041Z node 88 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:46: Free shard 72057594046678944:1 hive 72057594037968897 at ss 72057594046678944 2025-05-29T15:25:37.483203Z node 88 :HIVE INFO: tablet_helpers.cpp:1356: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 1 TxId_Deprecated: 1 TabletID: 72075186233409546 2025-05-29T15:25:37.483316Z node 88 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5938: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 1 ShardOwnerId: 72057594046678944 ShardLocalIdx: 1, at schemeshard: 72057594046678944 2025-05-29T15:25:37.483483Z node 88 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 1 2025-05-29T15:25:37.483624Z node 88 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186233409546;self_id=[88:332:2318];ev=NKikimr::TEvTablet::TEvTabletDead;fline=columnshard_impl.cpp:1152;event=tablet_die; Forgetting tablet 72075186233409546 2025-05-29T15:25:37.485061Z node 88 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:123: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-05-29T15:25:37.485072Z node 88 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:137: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 3], at schemeshard: 72057594046678944 2025-05-29T15:25:37.485088Z node 88 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-05-29T15:25:37.485645Z node 88 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:174: Deleted shardIdx 72057594046678944:1 2025-05-29T15:25:37.485660Z node 88 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:180: Close pipe to deleted shardIdx 72057594046678944:1 tabletId 72075186233409546 2025-05-29T15:25:37.485888Z node 88 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:160: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestWaitNotification wait txId: 1005 2025-05-29T15:25:37.485937Z node 88 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:210: tests -- TTxNotificationSubscriber for txId 1005: send EvNotifyTxCompletion 2025-05-29T15:25:37.485945Z node 88 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:256: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 1005 2025-05-29T15:25:37.486017Z node 88 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 1005, at schemeshard: 72057594046678944 2025-05-29T15:25:37.486037Z node 88 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:227: tests -- TTxNotificationSubscriber for txId 1005: got EvNotifyTxCompletionResult 2025-05-29T15:25:37.486042Z node 88 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:236: tests -- TTxNotificationSubscriber for txId 1005: satisfy waiter [88:546:2515] TestWaitNotification: OK eventTxId 1005 2025-05-29T15:25:37.486131Z node 88 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/OlapStore/ColumnTable" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-05-29T15:25:37.486170Z node 88 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/OlapStore/ColumnTable" took 52us result status StatusPathDoesNotExist 2025-05-29T15:25:37.486210Z node 88 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/OlapStore/ColumnTable\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1]), source_location: ydb/core/tx/schemeshard/schemeshard_path_describer.cpp:1157" Path: "/MyRoot/OlapStore/ColumnTable" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: true } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 2025-05-29T15:25:37.486277Z node 88 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/OlapStore" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-05-29T15:25:37.486289Z node 88 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/OlapStore" took 13us result status StatusPathDoesNotExist 2025-05-29T15:25:37.486305Z node 88 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/OlapStore\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1]), source_location: ydb/core/tx/schemeshard/schemeshard_path_describer.cpp:1157" Path: "/MyRoot/OlapStore" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: true } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 >> DstCreator::ReplicationModeMismatch [GOOD] >> DstCreator::ReplicationConsistencyLevelMismatch ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/persqueue_v1/ut/new_schemecache_ut/unittest >> TPersQueueNewSchemeCacheTest::TestReadAtTimestamp_10 [FAIL] Test command err: 2025-05-29T15:25:31.456250Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509889181383555480:2212];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:25:31.456300Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-05-29T15:25:31.478053Z node 2 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7509889180823915765:2075];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:25:31.478065Z node 2 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-05-29T15:25:31.511541Z node 2 :PQ_READ_PROXY DEBUG: caching_service.cpp:44: Direct read cache: : Created test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/001671/r3tmp/tmp5XxZls/pdisk_1.dat 2025-05-29T15:25:31.523260Z node 1 :PQ_READ_PROXY DEBUG: caching_service.cpp:44: Direct read cache: : Created 2025-05-29T15:25:31.676472Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:25:31.676498Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:25:31.684458Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:25:31.685076Z node 1 :HIVE WARN: hive_impl.cpp:771: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-05-29T15:25:31.691069Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 23053, node 1 2025-05-29T15:25:31.730965Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/ciyv/001671/r3tmp/yandexum2gRr.tmp 2025-05-29T15:25:31.730978Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: /home/runner/.ya/build/build_root/ciyv/001671/r3tmp/yandexum2gRr.tmp 2025-05-29T15:25:31.731043Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:202: successfully initialized from file: /home/runner/.ya/build/build_root/ciyv/001671/r3tmp/yandexum2gRr.tmp 2025-05-29T15:25:31.731091Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-05-29T15:25:31.746379Z INFO: TTestServer started on Port 11751 GrpcPort 23053 2025-05-29T15:25:31.771184Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:25:31.771212Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:25:31.775775Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:11751 PQClient connected to localhost:23053 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-29T15:25:31.855529Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:25:31.867394Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:25:31.903141Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... waiting... waiting... 2025-05-29T15:25:32.203235Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509889185678523714:2340], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:25:32.203266Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:25:32.203525Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509889185678523741:2343], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:25:32.204456Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710662:3, at schemeshard: 72057594046644480 2025-05-29T15:25:32.214793Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509889185678523775:2346], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:25:32.214959Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:25:32.230958Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7509889185678523743:2344], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710662 completed, doublechecking } 2025-05-29T15:25:32.336724Z node 1 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [1:7509889185678523822:2768] txid# 281474976710663, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 9], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-29T15:25:32.355536Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-05-29T15:25:32.419985Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [1:7509889185678523833:2350], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-05-29T15:25:32.420536Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=1&id=MzZmMTgyNTEtMTE0NWUxNWItN2U2ZjZjYjctMTM0NWVlOWU=, ActorId: [1:7509889185678523711:2338], ActorState: ExecuteState, TraceId: 01jweac1n6csbfhv7mfyfvsdey, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-05-29T15:25:32.421024Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: cluster_tracker.cpp:167: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-05-29T15:25:32.458437Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-05-29T15:25:32.501435Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost", true, true, 1000), ("dc2", "dc2.logbroker.yandex.net", false, true, 1000); 2025-05-29T15:25:32.641532Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [1:7509889185678524172:2382], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:25:32.642240Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=1&id=MWVlNDUxZDctZDhlMGFiMDQtOWM2M2M1YmYtZDI2ODcxYzU=, ActorId: [1:7509889185678524169:2380], ActorState: ExecuteState, TraceId: 01jweac2259jw01tsjjjn046y2, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: assertion failed at ydb/core/testlib/test_pq_client.h:537, TMaybe NKikimr::NPersQueueTests::TFlatMsgBusPQClient::RunYqlDataQueryWithParams(TString, const NYdb::TParams &): (result.IsSuccess())
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 TBackTrace::Capture()+28 (0x13968F2C) NUnitTest::NPrivate::RaiseError(char const*, TBasicString> const&, bool)+137 (0x13B1B9F9) NKikimr::NPersQueueTests::TFlatMsgBusPQClient::RunYqlDataQueryWithParams(TBasicString>, NYdb::Dev::TParams const&)+932 (0x1381C3A4) NKikimr::NPersQueueTests::TFlatMsgBusPQClient::RunYqlDataQuery(TBasicString>)+88 (0x1381B788) NKikimr::NPersQueueTests::TFlatMsgBusPQClient::InitDCs(THashMap>, NKikimr::NPersQueueTests::TPQTestClusterInfo, THash>>, TEqualTo>>, std::__y1::allocator Disconnected 2025-05-29T15:25:33.791184Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:25:33.792175Z node 3 :HIVE WARN: hive_impl.cpp:771: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 4 Cookie 4 2025-05-29T15:25:33.792685Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 10036, node 3 2025-05-29T15:25:33.803568Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/ciyv/001671/r3tmp/yandexfLj2MJ.tmp 2025-05-29T15:25:33.803581Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: /home/runner/.ya/build/build_root/ciyv/001671/r3tmp/yandexfLj2MJ.tmp 2025-05-29T15:25:33.803648Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:202: successfully initialized from file: /home/runner/.ya/build/build_root/ciyv/001671/r3tmp/yandexfLj2MJ.tmp 2025-05-29T15:25:33.803692Z node 3 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-05-29T15:25:33.809710Z INFO: TTestServer started on Port 14561 GrpcPort 10036 TClient is connected to server localhost:14561 PQClient connected to localhost:10036 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-29T15:25:33.835586Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:25:33.835616Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:25:33.837993Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-29T15:25:33.855723Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976720657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-05-29T15:25:33.873464Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720659:0, at schemeshard: 72057594046644480 waiting... waiting... waiting... 2025-05-29T15:25:34.180648Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7509889190927355487:2339], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:25:34.180690Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:25:34.180788Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7509889190927355523:2343], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:25:34.181832Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976720662:3, at schemeshard: 72057594046644480 2025-05-29T15:25:34.199686Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720663:0, at schemeshard: 72057594046644480 2025-05-29T15:25:34.213358Z node 3 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7509889190927355525:2344], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976720662 completed, doublechecking } 2025-05-29T15:25:34.288809Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720664:0, at schemeshard: 72057594046644480 2025-05-29T15:25:34.295770Z node 3 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [3:7509889190927355755:2869] txid# 281474976720665, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 9], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-29T15:25:34.302435Z node 3 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [3:7509889190927355778:2359], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:5:17: Error: At function: KiReadTable!
:5:17: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Versions]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-05-29T15:25:34.302998Z node 3 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=3&id=ZWVmMTQ5ZTMtZTkyOTgzMjItZjRmNWJiNGEtZjkyMzdkM2M=, ActorId: [3:7509889190927355483:2336], ActorState: ExecuteState, TraceId: 01jweac3k27j94xkecknqw1zcn, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-05-29T15:25:34.303124Z node 3 :PERSQUEUE_CLUSTER_TRACKER ERROR: cluster_tracker.cpp:167: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 5 column: 17 } message: "At function: KiReadTable!" end_position { row: 5 column: 17 } severity: 1 issues { position { row: 5 column: 17 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Versions]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 5 column: 17 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-05-29T15:25:34.325848Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720666:0, at schemeshard: 72057594046644480 === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost", true, true, 1000), ("dc2", "dc2.logbroker.yandex.net", false, true, 1000); 2025-05-29T15:25:34.474705Z node 3 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [3:7509889190927355949:2381], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:25:34.475709Z node 3 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=3&id=MTIzNTYyOTQtYmEwZDk3MzAtMzFhNmFlMjAtY2QxZGQ0MTI=, ActorId: [3:7509889190927355946:2379], ActorState: ExecuteState, TraceId: 01jweac3vs4f3mpqfcqxb09hf7, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: assertion failed at ydb/core/testlib/test_pq_client.h:537, TMaybe NKikimr::NPersQueueTests::TFlatMsgBusPQClient::RunYqlDataQueryWithParams(TString, const NYdb::TParams &): (result.IsSuccess())
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 TBackTrace::Capture()+28 (0x13968F2C) NUnitTest::NPrivate::RaiseError(char const*, TBasicString> const&, bool)+137 (0x13B1B9F9) NKikimr::NPersQueueTests::TFlatMsgBusPQClient::RunYqlDataQueryWithParams(TBasicString>, NYdb::Dev::TParams const&)+932 (0x1381C3A4) NKikimr::NPersQueueTests::TFlatMsgBusPQClient::RunYqlDataQuery(TBasicString>)+88 (0x1381B788) NKikimr::NPersQueueTests::TFlatMsgBusPQClient::InitDCs(THashMap>, NKikimr::NPersQueueTests::TPQTestClusterInfo, THash>>, TEqualTo>>, std::__y1::allocator>>>, TBasicString> const&)+1170 (0x1381A9D2) NKikimr::NPersQueueTests::TFlatMsgBusPQClient::FullInit(THashMap>, NKikimr::NPersQueueTests::TPQTestClusterInfo, THash>>, TEqualTo>>, std::__y1::allocator>>>, TBasicString> const&, TBasicString> const&)+327 (0x137F74F7) NPersQueue::TTestServer::StartServer(bool, TMaybe>, NMaybe::TPolicyUndefinedExcept>)+888 (0x137F9978) NKikimr::NPersQueueTests::NTestSuiteTPersQueueNewSchemeCacheTest::TestReadAtTimestampImpl(unsigned int, std::__y1::function> (unsigned int)>)+98 (0x137FD1F2) NKikimr::NPersQueueTests::NTestSuiteTPersQueueNewSchemeCacheTest::TTestCaseTestReadAtTimestamp_10::Execute_(NUnitTest::TTestContext&)+43 (0x1380085B) NKikimr::NPersQueueTests::NTestSuiteTPersQueueNewSchemeCacheTest::TCurrentTest::Execute()::'lambda'()::operator()() const+71 (0x13819BD7) NUnitTest::TTestBase::Run(std::__y1::function, TBasicString> const&, char const*, bool)+126 (0x13B1D8AE) NKikimr::NPersQueueTests::NTestSuiteTPersQueueNewSchemeCacheTest::TCurrentTest::Execute()+481 (0x13819411) NUnitTest::TTestFactory::Execute()+803 (0x13B1E023) NUnitTest::RunMain(int, char**)+3021 (0x13B2F99D) ??+0 (0x7F28BFD05D90) __libc_start_main+128 (0x7F28BFD05E40) _start+41 (0x12954029) ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/dsproxy/ut_ftol/unittest >> TBsProxyFaultToleranceTest::CheckGetHardenedErasureMirror3dcCount6Idx5 [GOOD] Test command err: iteration# 5 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 11 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 17 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 23 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 29 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 35 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 41 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 47 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 53 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 59 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 65 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 71 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 77 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 83 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 89 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 95 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 101 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 107 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 113 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 119 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 125 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 131 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 137 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 143 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 149 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 155 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 161 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 167 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 173 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 179 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 185 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 191 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 197 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 203 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 209 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 215 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 221 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 227 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 233 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 239 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 245 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 251 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 257 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 263 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 269 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 275 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 281 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 287 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 293 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 299 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 305 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 311 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 317 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 323 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 329 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 335 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 341 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 347 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 353 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 359 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 365 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 371 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 377 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 383 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 389 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 395 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 401 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 407 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 413 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 419 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 425 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 431 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 437 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 443 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 449 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 455 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 461 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 467 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 473 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 479 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 485 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 >> TConsoleTests::TestNotifyOperationCompletionExtSubdomain [GOOD] >> TConsoleTests::TestRemoveAttributes >> DstCreator::NonExistentSrc [GOOD] >> DstCreator::KeyColumnsSizeMismatch >> DataStreams::TestControlPlaneAndMeteringData [GOOD] >> DataStreams::ChangeBetweenRetentionModes ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/http_proxy/ut/inside_ydb_ut/unittest >> TestKinesisHttpProxy::ListShardsToken Test command err: 2025-05-29T15:25:02.559624Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509889054462626377:2209];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:25:02.574228Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/001ce4/r3tmp/tmpw0WK2W/pdisk_1.dat 2025-05-29T15:25:02.711229Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:25:02.711255Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:25:02.713443Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-29T15:25:02.722802Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [1:7509889054462626181:2079] 1748532302554167 != 1748532302554170 2025-05-29T15:25:02.723076Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 31779, node 1 2025-05-29T15:25:02.750946Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:25:02.750957Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-05-29T15:25:02.750960Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-05-29T15:25:02.750997Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:4990 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... 2025-05-29T15:25:02.803091Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 2025-05-29T15:25:02.805871Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 TClient is connected to server localhost:4990 2025-05-29T15:25:02.880483Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:25:02.887162Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 2025-05-29T15:25:02.887699Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:25:03.015957Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:25:03.081793Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-05-29T15:25:03.100477Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-05-29T15:25:03.119877Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976710665, at schemeshard: 72057594046644480 2025-05-29T15:25:03.175861Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:25:03.186692Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:25:03.202620Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:25:03.212351Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:25:03.227163Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710670:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:25:03.240391Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:25:03.255081Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:25:03.324011Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509889058757594865:2372], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:25:03.324038Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:25:03.324206Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509889058757594877:2375], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:25:03.325027Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710673:3, at schemeshard: 72057594046644480 2025-05-29T15:25:03.328001Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976710673, at schemeshard: 72057594046644480 2025-05-29T15:25:03.328074Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7509889058757594879:2376], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710673 completed, doublechecking } 2025-05-29T15:25:03.419193Z node 1 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [1:7509889058757594930:2854] txid# 281474976710674, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 18], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } VERIFY failed (2025-05-29T15:25:03.530544Z): ydb/core/http_proxy/ut/datastreams_fixture.h:484 RunYqlDataQuery(): requirement operationResult.IsSuccess() failed 2025-05-29T15:25:03.528591Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [1:7509889058757594939:2380], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:25:03.529142Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=1&id=ZjY5NzU5YjktYjEzZjA0M2UtODZiNjZhNjItMTJkNDcwYmE=, ActorId: [1:7509889058757594862:2370], ActorState: ExecuteState, TraceId: 01jweab5evd6kq4hy2vghx0n1n, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: NPrivate::InternalPanicImpl(int, char const*, char const*, int, int, int, TBasicStringBuf>, char const*, unsigned long)+438 (0x13FAF316) NPrivate::Panic(NPrivate::TStaticBuf const&, int, char const*, char const*, char const*, ...)+263 (0x13FA6317) THttpProxyTestMock::RunYqlDataQuery(TBasicString>)+1470 (0x13E152BE) THttpProxyTestMock::InitKikimr(bool, bool)+10304 (0x13E0F460) THttpProxyTestMock::InitAll(bool, bool)+475 (0x13E0CB7B) NTestSuiteTestKinesisHttpProxy::TCurrentTest::Execute()::'lambda'()::operator()() const+49 (0x13DFAE11) NUnitTest::TTestBase::Run(std::__y1::function, TBasicString> const&, char const*, bool)+126 (0x14144F2E) NTestSuiteTestKinesisHttpProxy::TCurrentTest::Execute()+426 (0x13DFA68A) NUnitTest::TTestFactory::Execute()+803 (0x141456A3) NUnitTest::RunMain(int, char**)+3021 (0x1415724D) ??+0 (0x7F2903A87D90) __libc_start_main+128 (0x7F2903A87E40) _start+41 (0x12EC8029) test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/001ce4/r3tmp/tmpEWqMKC/pdisk_1.dat 2025-05-29T15:25:07.207471Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-05-29T15:25:07.276131Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:25:07.278939Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [1:7509889077800445118:2079] 1748532307112476 != 1748532307112479 TServer::EnableGrpc on GrpcPort 20581, node 1 2025-05-29T15:25:07.291351Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or ... cBuf const&, int, char const*, char const*, char const*, ...)+263 (0x13FA6317) THttpProxyTestMock::RunYqlDataQuery(TBasicString>)+1470 (0x13E152BE) THttpProxyTestMock::InitKikimr(bool, bool)+10304 (0x13E0F460) THttpProxyTestMock::InitAll(bool, bool)+475 (0x13E0CB7B) NTestSuiteTestKinesisHttpProxy::TCurrentTest::Execute()::'lambda'()::operator()() const+49 (0x13DFAE11) NUnitTest::TTestBase::Run(std::__y1::function, TBasicString> const&, char const*, bool)+126 (0x14144F2E) NTestSuiteTestKinesisHttpProxy::TCurrentTest::Execute()+426 (0x13DFA68A) NUnitTest::TTestFactory::Execute()+803 (0x141456A3) NUnitTest::RunMain(int, char**)+3021 (0x1415724D) ??+0 (0x7F82FB112D90) __libc_start_main+128 (0x7F82FB112E40) _start+41 (0x12EC8029) 2025-05-29T15:25:35.104322Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509889198875801248:2194];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:25:35.104351Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/001ce4/r3tmp/tmpDfgYRl/pdisk_1.dat 2025-05-29T15:25:35.188530Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:25:35.192486Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [1:7509889198875801093:2079] 1748532335103347 != 1748532335103350 TServer::EnableGrpc on GrpcPort 17043, node 1 2025-05-29T15:25:35.206905Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:25:35.206916Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-05-29T15:25:35.206918Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-05-29T15:25:35.206956Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-05-29T15:25:35.210566Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:25:35.210590Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:25:35.211484Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:13783 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-29T15:25:35.283639Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:25:35.295080Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 TClient is connected to server localhost:13783 2025-05-29T15:25:35.375281Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:25:35.383673Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976715658, at schemeshard: 72057594046644480 2025-05-29T15:25:35.392237Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:25:35.418857Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976715660, at schemeshard: 72057594046644480 2025-05-29T15:25:35.424229Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:25:35.471439Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-05-29T15:25:35.496569Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-05-29T15:25:35.529428Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-05-29T15:25:35.551180Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-05-29T15:25:35.565880Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-05-29T15:25:35.576784Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480 2025-05-29T15:25:35.590126Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715670:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-05-29T15:25:35.602942Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:25:35.615635Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 2025-05-29T15:25:35.658549Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509889198875802482:2372], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:25:35.658567Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:25:35.658687Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509889198875802494:2375], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:25:35.659439Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715673:3, at schemeshard: 72057594046644480 2025-05-29T15:25:35.662609Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976715673, at schemeshard: 72057594046644480 2025-05-29T15:25:35.662676Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7509889198875802496:2376], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715673 completed, doublechecking } 2025-05-29T15:25:35.728636Z node 1 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [1:7509889198875802547:2853] txid# 281474976715674, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 18], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-29T15:25:35.816153Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [1:7509889198875802563:2380], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:25:35.816740Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=1&id=OGVkZGYyMDEtN2VlYmNlOTUtNjg1NDRkOC00YzhkNmVkZg==, ActorId: [1:7509889198875802479:2370], ActorState: ExecuteState, TraceId: 01jweac51a9dbt6e2vqj70mkpx, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: VERIFY failed (2025-05-29T15:25:35.818043Z): ydb/core/http_proxy/ut/datastreams_fixture.h:484 RunYqlDataQuery(): requirement operationResult.IsSuccess() failed NPrivate::InternalPanicImpl(int, char const*, char const*, int, int, int, TBasicStringBuf>, char const*, unsigned long)+438 (0x13FAF316) NPrivate::Panic(NPrivate::TStaticBuf const&, int, char const*, char const*, char const*, ...)+263 (0x13FA6317) THttpProxyTestMock::RunYqlDataQuery(TBasicString>)+1470 (0x13E152BE) THttpProxyTestMock::InitKikimr(bool, bool)+10304 (0x13E0F460) THttpProxyTestMock::InitAll(bool, bool)+475 (0x13E0CB7B) NTestSuiteTestKinesisHttpProxy::TCurrentTest::Execute()::'lambda'()::operator()() const+49 (0x13DFAE11) NUnitTest::TTestBase::Run(std::__y1::function, TBasicString> const&, char const*, bool)+126 (0x14144F2E) NTestSuiteTestKinesisHttpProxy::TCurrentTest::Execute()+426 (0x13DFA68A) NUnitTest::TTestFactory::Execute()+803 (0x141456A3) NUnitTest::RunMain(int, char**)+3021 (0x1415724D) ??+0 (0x7FABC4F80D90) __libc_start_main+128 (0x7FABC4F80E40) _start+41 (0x12EC8029) >> DstCreator::WithIntermediateDir >> DstCreator::ReplicationConsistencyLevelMismatch [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/replication/controller/ut_dst_creator/unittest >> DstCreator::ReplicationConsistencyLevelMismatch [GOOD] Test command err: 2025-05-29T15:25:37.894862Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509889207025659782:2196];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:25:37.895861Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/0015ee/r3tmp/tmpBBIxyB/pdisk_1.dat 2025-05-29T15:25:38.037311Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [1:7509889207025659625:2079] 1748532337892789 != 1748532337892792 2025-05-29T15:25:38.048550Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:25:38.048580Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:25:38.048931Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:25:38.049878Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:28360 TServer::EnableGrpc on GrpcPort 31624, node 1 2025-05-29T15:25:38.130980Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:25:38.130992Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-05-29T15:25:38.130994Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-05-29T15:25:38.131036Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:28360 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-29T15:25:38.295084Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:25:38.303159Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-05-29T15:25:38.307116Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:25:38.339699Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... TClient::Ls request: /Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 1748532338343 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 7 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 7 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 5 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: "Dst" PathId: 3 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976715659 CreateStep: 1748532338406 ParentPathId: 1 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" ChildrenExist: false } Children { Name: "Src" PathId: 2 S... (TRUNCATED) TClient::Ls request: /Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 1748532338343 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 7 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 7 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 5 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: "Dst" PathId: 3 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976715659 CreateStep: 1748532338406 ParentPathId: 1 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" ChildrenExist: false } Children { Name: "Src" PathId: 2 S... (TRUNCATED) 2025-05-29T15:25:38.367039Z node 1 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:56: [DstCreator][rid 1][tid 1] Handle NKikimr::TEvTxProxySchemeCache::TEvNavigateKeySetResult: entry# { Path: Root TableId: [72057594046644480:1:0] RequestType: ByTableId Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } } 2025-05-29T15:25:38.367062Z node 1 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:56: [DstCreator][rid 1][tid 1] Handle NKikimr::TEvTxProxySchemeCache::TEvNavigateKeySetResult: entry# { Path: Root TableId: [72057594046644480:1:0] RequestType: ByTableId Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } } 2025-05-29T15:25:38.367064Z node 1 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:87: [DstCreator][rid 1][tid 1] Get table profiles 2025-05-29T15:25:38.367460Z node 1 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:106: [DstCreator][rid 1][tid 1] Handle NKikimr::NConsole::TEvConfigsDispatcher::TEvGetConfigResponse 2025-05-29T15:25:39.038042Z node 1 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:158: [DstCreator][rid 1][tid 1] Handle NKikimr::NReplication::TEvYdbProxy::TEvDescribeTableResponse { Result: { name: Src, owner: root@builtin, type: Table, size_bytes: 0, created_at: { plan_step: 1748532338371, tx_id: 281474976715658 } } } 2025-05-29T15:25:39.038138Z node 1 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:249: [DstCreator][rid 1][tid 1] Handle NKikimr::TEvTxUserProxy::TEvAllocateTxIdResult 2025-05-29T15:25:39.038532Z node 1 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:279: [DstCreator][rid 1][tid 1] Handle {TEvModifySchemeTransactionResult Status# StatusAlreadyExists txid# 281474976715660 Reason# Check failed: path: '/Root/Dst', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 3], type: EPathTypeTable, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:484} 2025-05-29T15:25:39.039010Z node 1 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:335: [DstCreator][rid 1][tid 1] Handle NKikimrScheme.TEvDescribeSchemeResult PreSerializedData size# 20 Record# Status: StatusSuccess Path: "/Root/Dst" PathDescription { Self { Name: "Dst" PathId: 3 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976715659 CreateStep: 1748532338406 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "Dst" Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact ... FIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-05-29T15:25:39.543521Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:11821 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-29T15:25:39.651899Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:25:39.653759Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-05-29T15:25:39.654559Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:25:39.682932Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... TClient::Ls request: /Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 1748532339701 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 7 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 7 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 5 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: "Dst" PathId: 3 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976715659 CreateStep: 1748532339750 ParentPathId: 1 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" ChildrenExist: false } Children { Name: "Src" PathId: 2 S... (TRUNCATED) TClient::Ls request: /Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 1748532339701 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 7 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 7 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 5 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: "Dst" PathId: 3 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976715659 CreateStep: 1748532339750 ParentPathId: 1 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" ChildrenExist: false } Children { Name: "Src" PathId: 2 S... (TRUNCATED) 2025-05-29T15:25:39.718975Z node 2 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:56: [DstCreator][rid 1][tid 1] Handle NKikimr::TEvTxProxySchemeCache::TEvNavigateKeySetResult: entry# { Path: Root TableId: [72057594046644480:1:0] RequestType: ByTableId Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } } 2025-05-29T15:25:39.719004Z node 2 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:56: [DstCreator][rid 1][tid 1] Handle NKikimr::TEvTxProxySchemeCache::TEvNavigateKeySetResult: entry# { Path: Root TableId: [72057594046644480:1:0] RequestType: ByTableId Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } } 2025-05-29T15:25:39.719006Z node 2 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:87: [DstCreator][rid 1][tid 1] Get table profiles 2025-05-29T15:25:39.719329Z node 2 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:106: [DstCreator][rid 1][tid 1] Handle NKikimr::NConsole::TEvConfigsDispatcher::TEvGetConfigResponse 2025-05-29T15:25:39.986853Z node 2 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:158: [DstCreator][rid 1][tid 1] Handle NKikimr::NReplication::TEvYdbProxy::TEvDescribeTableResponse { Result: { name: Src, owner: root@builtin, type: Table, size_bytes: 0, created_at: { plan_step: 1748532339722, tx_id: 281474976715658 } } } 2025-05-29T15:25:39.986936Z node 2 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:249: [DstCreator][rid 1][tid 1] Handle NKikimr::TEvTxUserProxy::TEvAllocateTxIdResult 2025-05-29T15:25:39.987349Z node 2 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:279: [DstCreator][rid 1][tid 1] Handle {TEvModifySchemeTransactionResult Status# StatusAlreadyExists txid# 281474976715660 Reason# Check failed: path: '/Root/Dst', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 3], type: EPathTypeTable, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:484} 2025-05-29T15:25:39.987625Z node 2 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:335: [DstCreator][rid 1][tid 1] Handle NKikimrScheme.TEvDescribeSchemeResult PreSerializedData size# 20 Record# Status: StatusSuccess Path: "/Root/Dst" PathDescription { Self { Name: "Dst" PathId: 3 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976715659 CreateStep: 1748532339750 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "Dst" Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { MinPartitionsCount: 1 } } TableSchemaVersion: 1 IsBackup: false ReplicationConfig { Mode: REPLICATION_MODE_READ_ONLY ConsistencyLevel: CONSISTENCY_LEVEL_GLOBAL } IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 2 PathsLimit: 10000 ShardsInside: 2 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } UserAttributes { Key: "__async_replica" Value: "true" } } PathId: 3 PathOwnerId: 72057594046644480 2025-05-29T15:25:39.987658Z node 2 :REPLICATION_CONTROLLER ERROR: dst_creator.cpp:594: [DstCreator][rid 1][tid 1] Error: status# StatusSchemeError, reason# Replication consistency level mismatch: expected: CONSISTENCY_LEVEL_ROW, got: 1 >> DstCreator::WithSyncIndex [GOOD] >> DstCreator::Basic >> DstCreator::WithIntermediateDir [GOOD] >> DstCreator::WithAsyncIndex >> DstCreator::SameOwner >> DstCreator::KeyColumnsSizeMismatch [GOOD] >> DataShardVolatile::DistributedWrite [FAIL] >> DataShardVolatile::DistributedWriteBrokenLock >> TGRpcCmsTest::SimpleTenantsTest ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/replication/controller/ut_dst_creator/unittest >> DstCreator::WithSyncIndex [GOOD] Test command err: 2025-05-29T15:25:38.935539Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509889209169200331:2078];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/0015d2/r3tmp/tmpX4knJ8/pdisk_1.dat 2025-05-29T15:25:39.118846Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-05-29T15:25:39.208379Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:25:39.208403Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:25:39.213750Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-29T15:25:39.213842Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [1:7509889209169200280:2079] 1748532338912571 != 1748532338912574 2025-05-29T15:25:39.243908Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded TClient is connected to server localhost:8295 TServer::EnableGrpc on GrpcPort 4152, node 1 2025-05-29T15:25:39.314781Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:25:39.314792Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-05-29T15:25:39.314794Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-05-29T15:25:39.314841Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:8295 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-29T15:25:39.449410Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:25:39.457497Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-05-29T15:25:39.458408Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... TClient::Ls request: /Root/Table TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Table" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710658 CreateStep: 1748532339575 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: true } Table { Name: "Table" Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Uint32" TypeId: 2 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyCo... (TRUNCATED) TClient::Ls request: /Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 1748532339505 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 6 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 6 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: "Table" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710658 CreateStep: 1748532339575 ParentPathId: 1 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" ChildrenExist: true } Children { Name: ".sys" PathId: 18446... (TRUNCATED) 2025-05-29T15:25:39.574315Z node 1 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:56: [DstCreator][rid 1][tid 1] Handle NKikimr::TEvTxProxySchemeCache::TEvNavigateKeySetResult: entry# { Path: Root TableId: [72057594046644480:1:0] RequestType: ByTableId Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } } 2025-05-29T15:25:39.574339Z node 1 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:56: [DstCreator][rid 1][tid 1] Handle NKikimr::TEvTxProxySchemeCache::TEvNavigateKeySetResult: entry# { Path: Root TableId: [72057594046644480:1:0] RequestType: ByTableId Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } } 2025-05-29T15:25:39.574342Z node 1 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:87: [DstCreator][rid 1][tid 1] Get table profiles 2025-05-29T15:25:39.574539Z node 1 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:106: [DstCreator][rid 1][tid 1] Handle NKikimr::NConsole::TEvConfigsDispatcher::TEvGetConfigResponse 2025-05-29T15:25:40.548604Z node 1 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:158: [DstCreator][rid 1][tid 1] Handle NKikimr::NReplication::TEvYdbProxy::TEvDescribeTableResponse { Result: { name: Table, owner: root@builtin, type: Table, size_bytes: 0, created_at: { plan_step: 1748532339575, tx_id: 281474976710658 } } } 2025-05-29T15:25:40.548714Z node 1 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:249: [DstCreator][rid 1][tid 1] Handle NKikimr::TEvTxUserProxy::TEvAllocateTxIdResult 2025-05-29T15:25:40.549214Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 2025-05-29T15:25:40.549793Z node 1 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:279: [DstCreator][rid 1][tid 1] Handle {TEvModifySchemeTransactionResult Status# StatusAccepted txid# 281474976710659} 2025-05-29T15:25:40.549796Z node 1 :REPLICATION_CONTROLLER DEBUG: dst_creator.cpp:306: [DstCreator][rid 1][tid 1] Subscribe tx: txId# 281474976710659 TClient::Ls request: /Root/Replicated 2025-05-29T15:25:40.567488Z node 1 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:311: [DstCreator][rid 1][tid 1] Handle NKikimrScheme.TEvNotifyTxCompletionResult TxId: 281474976710659 2025-05-29T15:25:40.567811Z node 1 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:335: [DstCreator][rid 1][tid 1] Handle NKikimrScheme.TEvDescribeSchemeResult PreSerializedData size# 20 Record# Status: StatusSuccess Path: "/Root/Replicated" PathDescription { Self { Name: "Replicated" PathId: 5 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710659 CreateStep: 1748532340611 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: true } Table { Name: "Replicated" Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Uint32" TypeId: 2 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourc ... untToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { SizeToSplit: 2147483648 MinPartitionsCount: 1 SplitByLoadSettings { Enabled: false } } ColumnFamilies { Id: 0 StorageConfig { SysLog { PreferredPoolKind: "test" } Log { PreferredPoolKind: "test" } Data { PreferredPoolKind: "test" } } } } TableSchemaVersion: 1 IsBackup: false ReplicationConfig { Mode: REPLICATION_MODE_READ_ONLY ConsistencyLevel: CONSISTENCY_LEVEL_ROW } IsRestore: false } TablePartitions { EndOfRangeKeyPrefix: "" IsPoint: false IsInclusive: false DatashardId: 72075186224037905 } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 6 PathsLimit: 10000 ShardsInside: 19 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } UserAttributes { Key: "__async_replica" Value: "true" } } PathId: 7 PathOwnerId: 72057594046644480 } 2025-05-29T15:25:40.584446Z node 1 :REPLICATION_CONTROLLER INFO: dst_creator.cpp:585: [DstCreator][rid 1][tid 2] Success: dstPathId# [OwnerId: 72057594046644480, LocalPathId: 7] TClient::Ls request: /Root/Replicated/index_by_value TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "index_by_value" PathId: 6 SchemeshardId: 72057594046644480 PathType: EPathTypeTableIndex CreateFinished: true CreateTxId: 281474976710659 CreateStep: 1748532340611 ParentPathId: 5 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableIndexVersion: 1 } ChildrenExist: true } Children { Name: "indexImplTable" PathId: 7 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710659 CreateStep: 1748532340611 ParentPathId: 6 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" PathSubType: EPathSubTypeSyncIndexImplTable Version { ... (TRUNCATED) TClient::Ls request: /Root/Replicated/index_by_value/indexImplTable TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "indexImplTable" PathId: 7 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710659 CreateStep: 1748532340611 ParentPathId: 6 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeSyncIndexImplTable Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "indexImplTable" Columns { Name: "value" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 2 NotNull: false IsBuildInProgress: false } ... (TRUNCATED) Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "indexImplTable" PathId: 7 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710659 CreateStep: 1748532340611 ParentPathId: 6 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeSyncIndexImplTable Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "indexImplTable" Columns { Name: "value" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "value" KeyColumnNames: "key" KeyColumnIds: 1 KeyColumnIds: 2 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { SizeToSplit: 2147483648 MinPartitionsCount: 1 SplitByLoadSettings { Enabled: false } } ColumnFamilies { Id: 0 StorageConfig { SysLog { PreferredPoolKind: "test" } Log { PreferredPoolKind: "test" } Data { PreferredPoolKind: "test" } } } } TableSchemaVersion: 1 IsBackup: false ReplicationConfig { Mode: REPLICATION_MODE_READ_ONLY ConsistencyLevel: CONSISTENCY_LEVEL_ROW } IsRestore: false } TablePartitions { EndOfRangeKeyPrefix: "" IsPoint: false IsInclusive: false DatashardId: 72075186224037905 } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 6 PathsLimit: 10000 ShardsInside: 19 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } UserAttributes { Key: "__async_replica" Value: "true" } } Path: "/Root/Replicated/index_by_value/indexImplTable" >> DataStreams::ChangeBetweenRetentionModes [GOOD] >> DataStreams::TestCreateExistingStream >> TGRpcCmsTest::AuthTokenTest |67.0%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/grpc_services/ut/ydb-core-grpc_services-ut |67.0%| [LD] {RESULT} $(B)/ydb/core/grpc_services/ut/ydb-core-grpc_services-ut |67.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/grpc_services/ut/ydb-core-grpc_services-ut >> PersQueueSdkReadSessionTest::ReadSessionWithCloseNotCommitted [FAIL] >> PersQueueSdkReadSessionTest::ClosesAfterFailedConnectionToCds ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/http_proxy/ut/inside_ydb_ut/unittest >> TestKinesisHttpProxy::TestEmptyHttpBody Test command err: test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/001cbd/r3tmp/tmpHE7Eq8/pdisk_1.dat 2025-05-29T15:25:06.082881Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-05-29T15:25:06.142565Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [1:7509889069724283726:2079] 1748532305977012 != 1748532305977015 2025-05-29T15:25:06.153591Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 18965, node 1 2025-05-29T15:25:06.190860Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:25:06.190890Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:25:06.195394Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-29T15:25:06.203771Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:25:06.203781Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-05-29T15:25:06.203784Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-05-29T15:25:06.203827Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:19199 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-29T15:25:06.311057Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:25:06.315194Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 TClient is connected to server localhost:19199 2025-05-29T15:25:06.359438Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:25:06.361104Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976715658, at schemeshard: 72057594046644480 2025-05-29T15:25:06.361548Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:25:06.368875Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:25:06.411042Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-05-29T15:25:06.453007Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-05-29T15:25:06.489476Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:25:06.504956Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:25:06.523497Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:25:06.556869Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:25:06.576920Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715670:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:25:06.586211Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:25:06.600688Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:25:06.705454Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509889074019252408:2372], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:25:06.705515Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:25:06.705707Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509889074019252420:2375], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:25:06.706605Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715673:3, at schemeshard: 72057594046644480 2025-05-29T15:25:06.709709Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976715673, at schemeshard: 72057594046644480 2025-05-29T15:25:06.709774Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7509889074019252422:2376], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715673 completed, doublechecking } 2025-05-29T15:25:06.802592Z node 1 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [1:7509889074019252473:2856] txid# 281474976715674, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 18], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-29T15:25:06.853084Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [1:7509889074019252482:2380], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:25:06.853704Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=1&id=YTc5ODk0M2MtNjU1OWVlODktZWYyMmU5ZmQtZTUwMDc2MDQ=, ActorId: [1:7509889074019252381:2369], ActorState: ExecuteState, TraceId: 01jweab8rf8kcjm7yr2w1yxbwh, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: VERIFY failed (2025-05-29T15:25:06.858928Z): ydb/core/http_proxy/ut/datastreams_fixture.h:484 RunYqlDataQuery(): requirement operationResult.IsSuccess() failed NPrivate::InternalPanicImpl(int, char const*, char const*, int, int, int, TBasicStringBuf>, char const*, unsigned long)+438 (0x13FAF316) NPrivate::Panic(NPrivate::TStaticBuf const&, int, char const*, char const*, char const*, ...)+263 (0x13FA6317) THttpProxyTestMock::RunYqlDataQuery(TBasicString>)+1470 (0x13E152BE) THttpProxyTestMock::InitKikimr(bool, bool)+10304 (0x13E0F460) THttpProxyTestMock::InitAll(bool, bool)+475 (0x13E0CB7B) NTestSuiteTestKinesisHttpProxy::TCurrentTest::Execute()::'lambda'()::operator()() const+49 (0x13DFAE11) NUnitTest::TTestBase::Run(std::__y1::function, TBasicString> const&, char const*, bool)+126 (0x14144F2E) NTestSuiteTestKinesisHttpProxy::TCurrentTest::Execute()+426 (0x13DFA68A) NUnitTest::TTestFactory::Execute()+803 (0x141456A3) NUnitTest::RunMain(int, char**)+3021 (0x1415724D) ??+0 (0x7FD01C688D90) __libc_start_main+128 (0x7FD01C688E40) _start+41 (0x12EC8029) 2025-05-29T15:25:09.440233Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509889083955982277:2195];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:25:09.440271Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/001cbd/r3tmp/tmp8r5BL5/pdisk_1.dat 2025-05-29T15:25:09.541116Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [1:7509889083955982120:2079] 1748532309437943 != 1748532309437946 2025-05-29T15:25:09.543633Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 21147, node 1 2025-05-29T15:25:09.570310Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:25:09.570323Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-05-29T15:25:09 ... ticBuf const&, int, char const*, char const*, char const*, ...)+263 (0x13FA6317) THttpProxyTestMock::RunYqlDataQuery(TBasicString>)+1470 (0x13E152BE) THttpProxyTestMock::InitKikimr(bool, bool)+10304 (0x13E0F460) THttpProxyTestMock::InitAll(bool, bool)+475 (0x13E0CB7B) NTestSuiteTestKinesisHttpProxy::TCurrentTest::Execute()::'lambda'()::operator()() const+49 (0x13DFAE11) NUnitTest::TTestBase::Run(std::__y1::function, TBasicString> const&, char const*, bool)+126 (0x14144F2E) NTestSuiteTestKinesisHttpProxy::TCurrentTest::Execute()+426 (0x13DFA68A) NUnitTest::TTestFactory::Execute()+803 (0x141456A3) NUnitTest::RunMain(int, char**)+3021 (0x1415724D) ??+0 (0x7F40CC268D90) __libc_start_main+128 (0x7F40CC268E40) _start+41 (0x12EC8029) 2025-05-29T15:25:37.176577Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509889206920483725:2217];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/001cbd/r3tmp/tmpOPcAPB/pdisk_1.dat 2025-05-29T15:25:37.273396Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-05-29T15:25:37.342126Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:25:37.342364Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [1:7509889206920483522:2079] 1748532337161227 != 1748532337161230 TServer::EnableGrpc on GrpcPort 19368, node 1 2025-05-29T15:25:37.367674Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:25:37.367699Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:25:37.368892Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-29T15:25:37.369328Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:25:37.369330Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-05-29T15:25:37.369332Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-05-29T15:25:37.369372Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:4860 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-29T15:25:37.430230Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:25:37.433493Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 TClient is connected to server localhost:4860 2025-05-29T15:25:37.493678Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:25:37.507766Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976715658, at schemeshard: 72057594046644480 2025-05-29T15:25:37.508780Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:25:37.511901Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976715660, at schemeshard: 72057594046644480 2025-05-29T15:25:37.513259Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:25:37.588893Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-05-29T15:25:37.609641Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 waiting... waiting... waiting... 2025-05-29T15:25:37.644700Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-05-29T15:25:37.665546Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:25:37.693156Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-05-29T15:25:37.705800Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480 2025-05-29T15:25:37.757648Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715670:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:25:37.795402Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:25:37.805744Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:25:37.882899Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509889206920484911:2372], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:25:37.882936Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:25:37.883151Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509889206920484923:2375], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:25:37.884133Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715673:3, at schemeshard: 72057594046644480 2025-05-29T15:25:37.887415Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976715673, at schemeshard: 72057594046644480 2025-05-29T15:25:37.887502Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7509889206920484925:2376], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715673 completed, doublechecking } 2025-05-29T15:25:37.951259Z node 1 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [1:7509889206920484976:2855] txid# 281474976715674, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 18], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-29T15:25:38.147660Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [1:7509889206920484985:2380], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:25:38.148737Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=1&id=OGE1Njk4MGItYzQ5ODZiMTQtNDAyOGFhZmUtODRjMDBhYTU=, ActorId: [1:7509889206920484883:2368], ActorState: ExecuteState, TraceId: 01jweac76s5nfm2j5924q0fb23, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: VERIFY failed (2025-05-29T15:25:38.151424Z): ydb/core/http_proxy/ut/datastreams_fixture.h:484 RunYqlDataQuery(): requirement operationResult.IsSuccess() failed NPrivate::InternalPanicImpl(int, char const*, char const*, int, int, int, TBasicStringBuf>, char const*, unsigned long)+438 (0x13FAF316) NPrivate::Panic(NPrivate::TStaticBuf const&, int, char const*, char const*, char const*, ...)+263 (0x13FA6317) THttpProxyTestMock::RunYqlDataQuery(TBasicString>)+1470 (0x13E152BE) THttpProxyTestMock::InitKikimr(bool, bool)+10304 (0x13E0F460) THttpProxyTestMock::InitAll(bool, bool)+475 (0x13E0CB7B) NTestSuiteTestKinesisHttpProxy::TCurrentTest::Execute()::'lambda'()::operator()() const+49 (0x13DFAE11) NUnitTest::TTestBase::Run(std::__y1::function, TBasicString> const&, char const*, bool)+126 (0x14144F2E) NTestSuiteTestKinesisHttpProxy::TCurrentTest::Execute()+426 (0x13DFA68A) NUnitTest::TTestFactory::Execute()+803 (0x141456A3) NUnitTest::RunMain(int, char**)+3021 (0x1415724D) ??+0 (0x7FE275E60D90) __libc_start_main+128 (0x7FE275E60E40) _start+41 (0x12EC8029) ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/http_proxy/ut/inside_ydb_ut/unittest >> TestYmqHttpProxy::TestSendMessageBatch Test command err: test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/001ce1/r3tmp/tmplVrbr1/pdisk_1.dat 2025-05-29T15:25:03.666867Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-05-29T15:25:03.711048Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:25:03.731033Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [1:7509889058891856832:2079] 1748532303530428 != 1748532303530431 TServer::EnableGrpc on GrpcPort 26957, node 1 2025-05-29T15:25:03.756061Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:25:03.756090Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:25:03.759088Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-29T15:25:03.774984Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:25:03.774995Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-05-29T15:25:03.774997Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-05-29T15:25:03.775040Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:23470 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-29T15:25:03.865365Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:25:03.868206Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 TClient is connected to server localhost:23470 waiting... 2025-05-29T15:25:03.939050Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-05-29T15:25:03.946422Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976715658, at schemeshard: 72057594046644480 2025-05-29T15:25:03.958944Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:25:03.964828Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976715660, at schemeshard: 72057594046644480 2025-05-29T15:25:03.965880Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-05-29T15:25:04.051658Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:25:04.079971Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-05-29T15:25:04.112603Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:25:04.153030Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:25:04.178079Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:25:04.193741Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:25:04.205708Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715670:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:25:04.229395Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:25:04.249514Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:25:04.859384Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509889063186825527:2373], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:25:04.859404Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:25:04.859545Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509889063186825539:2376], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:25:04.860274Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715673:3, at schemeshard: 72057594046644480 2025-05-29T15:25:04.862797Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976715673, at schemeshard: 72057594046644480 2025-05-29T15:25:04.864147Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7509889063186825541:2377], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715673 completed, doublechecking } 2025-05-29T15:25:04.953247Z node 1 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [1:7509889063186825592:2859] txid# 281474976715674, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 18], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-29T15:25:05.101703Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [1:7509889063186825601:2381], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:25:05.102207Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=1&id=YWY4NjM2ODctOTY0ZThhODAtN2IyYzdjNWEtNzYzNjU0NjI=, ActorId: [1:7509889063186825509:2371], ActorState: ExecuteState, TraceId: 01jweab6yq5fcq7ed9zk0ye36t, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: VERIFY failed (2025-05-29T15:25:05.103256Z): ydb/core/http_proxy/ut/datastreams_fixture.h:484 RunYqlDataQuery(): requirement operationResult.IsSuccess() failed NPrivate::InternalPanicImpl(int, char const*, char const*, int, int, int, TBasicStringBuf>, char const*, unsigned long)+438 (0x13FAF316) NPrivate::Panic(NPrivate::TStaticBuf const&, int, char const*, char const*, char const*, ...)+263 (0x13FA6317) THttpProxyTestMock::RunYqlDataQuery(TBasicString>)+1470 (0x13E152BE) THttpProxyTestMock::InitKikimr(bool, bool)+10304 (0x13E0F460) THttpProxyTestMock::InitAll(bool, bool)+475 (0x13E0CB7B) NTestSuiteTestYmqHttpProxy::TCurrentTest::Execute()::'lambda'()::operator()() const+49 (0x13E8C281) NUnitTest::TTestBase::Run(std::__y1::function, TBasicString> const&, char const*, bool)+126 (0x14144F2E) NTestSuiteTestYmqHttpProxy::TCurrentTest::Execute()+419 (0x13E8BC53) NUnitTest::TTestFactory::Execute()+803 (0x141456A3) NUnitTest::RunMain(int, char**)+3021 (0x1415724D) ??+0 (0x7F89C05EED90) __libc_start_main+128 (0x7F89C05EEE40) _start+41 (0x12EC8029) 2025-05-29T15:25:08.319132Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509889081487867265:2068];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:25:08.339674Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/001ce1/r3tmp/tmpVkvgPI/pdisk_1.dat 2025-05-29T15:25:08.447683Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:25:08.447710Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:25:08.448285Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-29T15:25:08.448646Z node 1 :CONFIGS_ ... NPrivate::TStaticBuf const&, int, char const*, char const*, char const*, ...)+263 (0x13FA6317) THttpProxyTestMock::RunYqlDataQuery(TBasicString>)+1470 (0x13E152BE) THttpProxyTestMock::InitKikimr(bool, bool)+10304 (0x13E0F460) THttpProxyTestMock::InitAll(bool, bool)+475 (0x13E0CB7B) NTestSuiteTestYmqHttpProxy::TCurrentTest::Execute()::'lambda'()::operator()() const+49 (0x13E8C281) NUnitTest::TTestBase::Run(std::__y1::function, TBasicString> const&, char const*, bool)+126 (0x14144F2E) NTestSuiteTestYmqHttpProxy::TCurrentTest::Execute()+419 (0x13E8BC53) NUnitTest::TTestFactory::Execute()+803 (0x141456A3) NUnitTest::RunMain(int, char**)+3021 (0x1415724D) ??+0 (0x7FE439FDFD90) __libc_start_main+128 (0x7FE439FDFE40) _start+41 (0x12EC8029) 2025-05-29T15:25:35.887528Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509889198196389854:2194];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:25:35.887603Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/001ce1/r3tmp/tmprNJrp1/pdisk_1.dat 2025-05-29T15:25:36.094150Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [1:7509889198196389699:2079] 1748532335875814 != 1748532335875817 2025-05-29T15:25:36.100600Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:25:36.106282Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:25:36.106309Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:25:36.106989Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 63293, node 1 2025-05-29T15:25:36.120207Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:25:36.120218Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-05-29T15:25:36.120220Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-05-29T15:25:36.120253Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:17607 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-29T15:25:36.223807Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:25:36.227061Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 TClient is connected to server localhost:17607 2025-05-29T15:25:36.322397Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:25:36.339400Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 2025-05-29T15:25:36.349866Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:25:36.360484Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976710660, at schemeshard: 72057594046644480 2025-05-29T15:25:36.361896Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:25:36.397971Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-05-29T15:25:36.430868Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-05-29T15:25:36.464899Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:25:36.486756Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-05-29T15:25:36.514725Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-05-29T15:25:36.529462Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:25:36.549539Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710670:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-05-29T15:25:36.566330Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:25:36.586321Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-05-29T15:25:36.905610Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509889202491358390:2373], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:25:36.906955Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509889202491358402:2376], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:25:36.907792Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710673:3, at schemeshard: 72057594046644480 2025-05-29T15:25:36.910614Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976710673, at schemeshard: 72057594046644480 2025-05-29T15:25:36.910688Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7509889202491358404:2377], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710673 completed, doublechecking } 2025-05-29T15:25:36.914815Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:25:36.967593Z node 1 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [1:7509889202491358455:2856] txid# 281474976710674, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 18], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-29T15:25:37.032658Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [1:7509889202491358464:2381], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:25:37.035414Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=1&id=NDdiZWMxODUtZjI4MjdmY2EtYjU3OWE5YmUtN2JjYWE5YzA=, ActorId: [1:7509889202491358354:2369], ActorState: ExecuteState, TraceId: 01jweac68752twa78k79vadj5f, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: VERIFY failed (2025-05-29T15:25:37.042999Z): ydb/core/http_proxy/ut/datastreams_fixture.h:484 RunYqlDataQuery(): requirement operationResult.IsSuccess() failed NPrivate::InternalPanicImpl(int, char const*, char const*, int, int, int, TBasicStringBuf>, char const*, unsigned long)+438 (0x13FAF316) NPrivate::Panic(NPrivate::TStaticBuf const&, int, char const*, char const*, char const*, ...)+263 (0x13FA6317) THttpProxyTestMock::RunYqlDataQuery(TBasicString>)+1470 (0x13E152BE) THttpProxyTestMock::InitKikimr(bool, bool)+10304 (0x13E0F460) THttpProxyTestMock::InitAll(bool, bool)+475 (0x13E0CB7B) NTestSuiteTestYmqHttpProxy::TCurrentTest::Execute()::'lambda'()::operator()() const+49 (0x13E8C281) NUnitTest::TTestBase::Run(std::__y1::function, TBasicString> const&, char const*, bool)+126 (0x14144F2E) NTestSuiteTestYmqHttpProxy::TCurrentTest::Execute()+419 (0x13E8BC53) NUnitTest::TTestFactory::Execute()+803 (0x141456A3) NUnitTest::RunMain(int, char**)+3021 (0x1415724D) ??+0 (0x7F90074DBD90) __libc_start_main+128 (0x7F90074DBE40) _start+41 (0x12EC8029) >> DstCreator::WithAsyncIndex [GOOD] >> DstCreator::SameOwner [GOOD] >> DstCreator::SamePartitionCount ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/replication/controller/ut_dst_creator/unittest >> DstCreator::KeyColumnsSizeMismatch [GOOD] Test command err: test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/0015c5/r3tmp/tmpAOqxCl/pdisk_1.dat 2025-05-29T15:25:38.510867Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-05-29T15:25:38.604094Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:25:38.604119Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:25:38.630874Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [1:7509889208592013912:2079] 1748532338298382 != 1748532338298385 2025-05-29T15:25:38.631152Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:25:38.647279Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:9732 TServer::EnableGrpc on GrpcPort 19587, node 1 2025-05-29T15:25:38.746129Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:25:38.746143Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-05-29T15:25:38.746145Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-05-29T15:25:38.746188Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:9732 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-29T15:25:38.983186Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:25:38.999451Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 TClient::Ls request: /Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 1748532339029 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version... (TRUNCATED) TClient::Ls request: /Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 1748532339029 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version... (TRUNCATED) 2025-05-29T15:25:39.015235Z node 1 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:56: [DstCreator][rid 1][tid 1] Handle NKikimr::TEvTxProxySchemeCache::TEvNavigateKeySetResult: entry# { Path: Root TableId: [72057594046644480:1:0] RequestType: ByTableId Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } } 2025-05-29T15:25:39.015263Z node 1 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:56: [DstCreator][rid 1][tid 1] Handle NKikimr::TEvTxProxySchemeCache::TEvNavigateKeySetResult: entry# { Path: Root TableId: [72057594046644480:1:0] RequestType: ByTableId Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } } 2025-05-29T15:25:39.015266Z node 1 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:87: [DstCreator][rid 1][tid 1] Get table profiles 2025-05-29T15:25:39.015512Z node 1 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:106: [DstCreator][rid 1][tid 1] Handle NKikimr::NConsole::TEvConfigsDispatcher::TEvGetConfigResponse 2025-05-29T15:25:39.561143Z node 1 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:158: [DstCreator][rid 1][tid 1] Handle NKikimr::NReplication::TEvYdbProxy::TEvDescribeTableResponse { Result: { status: SCHEME_ERROR, issues: } } 2025-05-29T15:25:39.561161Z node 1 :REPLICATION_CONTROLLER ERROR: dst_creator.cpp:594: [DstCreator][rid 1][tid 1] Error: status# StatusSchemeError, reason# Cannot describe table: status: SCHEME_ERROR, issue: test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/0015c5/r3tmp/tmprDhBFT/pdisk_1.dat 2025-05-29T15:25:40.038088Z node 2 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7509889217459126620:2093];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:25:40.038333Z node 2 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-05-29T15:25:40.112563Z node 2 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:25:40.131354Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:25:40.131386Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:25:40.135240Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:19723 TServer::EnableGrpc on GrpcPort 8844, node 2 2025-05-29T15:25:40.185463Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:25:40.185476Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-05-29T15:25:40.185478Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-05-29T15:25:40.185527Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:19723 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-29T15:25:40.247294Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:25:40.256998Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-05-29T15:25:40.258504Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:25:40.297357Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... TClient::Ls request: /Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 1748532340296 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 7 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 7 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 5 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: "Dst" PathId: 3 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976715659 CreateStep: 1748532340359 ParentPathId: 1 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" ChildrenExist: false } Children { Name: "Src" PathId: 2 S... (TRUNCATED) TClient::Ls request: /Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 1748532340296 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 7 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 7 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 5 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: "Dst" PathId: 3 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976715659 CreateStep: 1748532340359 ParentPathId: 1 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" ChildrenExist: false } Children { Name: "Src" PathId: 2 S... (TRUNCATED) 2025-05-29T15:25:40.331714Z node 2 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:56: [DstCreator][rid 1][tid 1] Handle NKikimr::TEvTxProxySchemeCache::TEvNavigateKeySetResult: entry# { Path: Root TableId: [72057594046644480:1:0] RequestType: ByTableId Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } } 2025-05-29T15:25:40.331741Z node 2 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:56: [DstCreator][rid 1][tid 1] Handle NKikimr::TEvTxProxySchemeCache::TEvNavigateKeySetResult: entry# { Path: Root TableId: [72057594046644480:1:0] RequestType: ByTableId Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } } 2025-05-29T15:25:40.331743Z node 2 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:87: [DstCreator][rid 1][tid 1] Get table profiles 2025-05-29T15:25:40.331946Z node 2 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:106: [DstCreator][rid 1][tid 1] Handle NKikimr::NConsole::TEvConfigsDispatcher::TEvGetConfigResponse 2025-05-29T15:25:40.796432Z node 2 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:158: [DstCreator][rid 1][tid 1] Handle NKikimr::NReplication::TEvYdbProxy::TEvDescribeTableResponse { Result: { name: Src, owner: root@builtin, type: Table, size_bytes: 0, created_at: { plan_step: 1748532340331, tx_id: 281474976715658 } } } 2025-05-29T15:25:40.796522Z node 2 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:249: [DstCreator][rid 1][tid 1] Handle NKikimr::TEvTxUserProxy::TEvAllocateTxIdResult 2025-05-29T15:25:40.796918Z node 2 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:279: [DstCreator][rid 1][tid 1] Handle {TEvModifySchemeTransactionResult Status# StatusAlreadyExists txid# 281474976715660 Reason# Check failed: path: '/Root/Dst', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 3], type: EPathTypeTable, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:484} 2025-05-29T15:25:40.797421Z node 2 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:335: [DstCreator][rid 1][tid 1] Handle NKikimrScheme.TEvDescribeSchemeResult PreSerializedData size# 20 Record# Status: StatusSuccess Path: "/Root/Dst" PathDescription { Self { Name: "Dst" PathId: 3 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976715659 CreateStep: 1748532340359 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "Dst" Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnNames: "value" KeyColumnIds: 1 KeyColumnIds: 2 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { MinPartitionsCount: 1 } } TableSchemaVersion: 1 IsBackup: false ReplicationConfig { Mode: REPLICATION_MODE_READ_ONLY ConsistencyLevel: CONSISTENCY_LEVEL_ROW } IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 2 PathsLimit: 10000 ShardsInside: 2 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } UserAttributes { Key: "__async_replica" Value: "true" } } PathId: 3 PathOwnerId: 72057594046644480 2025-05-29T15:25:40.797450Z node 2 :REPLICATION_CONTROLLER ERROR: dst_creator.cpp:594: [DstCreator][rid 1][tid 1] Error: status# StatusSchemeError, reason# Key columns size mismatch: expected: 1, got: 2 >> DataShardVolatile::DistributedWriteThenImmediateUpsert [FAIL] >> DataShardVolatile::DistributedWriteThenSplit >> DstCreator::Basic [GOOD] >> DstCreator::CannotFindColumn >> TOlapReboots::CreateTable [GOOD] |67.0%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/services/cms/ut/unittest |67.0%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/kqp/ut/scan/ydb-core-kqp-ut-scan |67.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/ut/scan/ydb-core-kqp-ut-scan |67.0%| [LD] {RESULT} $(B)/ydb/core/kqp/ut/scan/ydb-core-kqp-ut-scan ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/replication/controller/ut_dst_creator/unittest >> DstCreator::WithAsyncIndex [GOOD] Test command err: 2025-05-29T15:25:40.350964Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509889218358197822:2267];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:25:40.351011Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/0015be/r3tmp/tmpCZxyr4/pdisk_1.dat 2025-05-29T15:25:40.460309Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [1:7509889218358197585:2079] 1748532340346534 != 1748532340346537 2025-05-29T15:25:40.460512Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded TClient is connected to server localhost:64187 TServer::EnableGrpc on GrpcPort 31341, node 1 2025-05-29T15:25:40.503015Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:25:40.503042Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:25:40.505514Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:25:40.505522Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-05-29T15:25:40.505524Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-05-29T15:25:40.505564Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-05-29T15:25:40.511160Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:64187 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-29T15:25:40.549931Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:25:40.552895Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-05-29T15:25:40.554072Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... TClient::Ls request: /Root/Table TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Table" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976715658 CreateStep: 1748532340667 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "Table" Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" Key... (TRUNCATED) TClient::Ls request: /Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 1748532340597 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 3 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: "Table" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976715658 CreateStep: 1748532340667 ParentPathId: 1 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" ChildrenExist: false } Children { Name: ".sys" PathId: 1844... (TRUNCATED) 2025-05-29T15:25:40.646078Z node 1 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:56: [DstCreator][rid 1][tid 1] Handle NKikimr::TEvTxProxySchemeCache::TEvNavigateKeySetResult: entry# { Path: Root TableId: [72057594046644480:1:0] RequestType: ByTableId Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } } 2025-05-29T15:25:40.646102Z node 1 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:56: [DstCreator][rid 1][tid 1] Handle NKikimr::TEvTxProxySchemeCache::TEvNavigateKeySetResult: entry# { Path: Root TableId: [72057594046644480:1:0] RequestType: ByTableId Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } } 2025-05-29T15:25:40.646104Z node 1 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:87: [DstCreator][rid 1][tid 1] Get table profiles 2025-05-29T15:25:40.646304Z node 1 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:106: [DstCreator][rid 1][tid 1] Handle NKikimr::NConsole::TEvConfigsDispatcher::TEvGetConfigResponse 2025-05-29T15:25:40.821756Z node 1 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:158: [DstCreator][rid 1][tid 1] Handle NKikimr::NReplication::TEvYdbProxy::TEvDescribeTableResponse { Result: { name: Table, owner: root@builtin, type: Table, size_bytes: 0, created_at: { plan_step: 1748532340667, tx_id: 281474976715658 } } } 2025-05-29T15:25:40.821853Z node 1 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:249: [DstCreator][rid 1][tid 1] Handle NKikimr::TEvTxUserProxy::TEvAllocateTxIdResult 2025-05-29T15:25:40.822285Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:1, at schemeshard: 72057594046644480 2025-05-29T15:25:40.822665Z node 1 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:279: [DstCreator][rid 1][tid 1] Handle {TEvModifySchemeTransactionResult Status# StatusAccepted txid# 281474976715659} 2025-05-29T15:25:40.822669Z node 1 :REPLICATION_CONTROLLER DEBUG: dst_creator.cpp:306: [DstCreator][rid 1][tid 1] Subscribe tx: txId# 281474976715659 2025-05-29T15:25:40.845252Z node 1 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:311: [DstCreator][rid 1][tid 1] Handle NKikimrScheme.TEvNotifyTxCompletionResult TxId: 281474976715659 2025-05-29T15:25:40.845267Z node 1 :REPLICATION_CONTROLLER INFO: dst_creator.cpp:585: [DstCreator][rid 1][tid 1] Success: dstPathId# [OwnerId: 72057594046644480, LocalPathId: 4] TClient::Ls request: /Root/Dir/Replicated TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Replicated" PathId: 4 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976715659 CreateStep: 1748532340891 ParentPathId: 3 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "Replicated" Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "ke... (TRUNCATED) 2025-05-29T15:25:41.106184Z node 2 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7509889224874076969:2196];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:25:41.122512Z node 2 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/0015be/r3tmp/tmpoNtlIR/pdisk_1.dat 2025-05-29T15:25:41.156197Z node 2 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:25:41.157847Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [2:7509889224874076812:2079] 1748532341105458 != 1748532341105461 TClient is connected to server localhost:9746 TServer::EnableGrpc on GrpcPort 24981, node 2 2025-05-29T15:25:41.199281Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:25:41.199290Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-05-29T15:25:41.199292Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-05-29T15:25:41.199332Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:9746 2025-05-29T15:25:41.231421Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:25:41.231449Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:25:41.235351Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-29T15:25:41.250175Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:25:41.252297Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-05-29T15:25:41.255360Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... TClient::Ls request: /Root/Table TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Table" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710658 CreateStep: 1748532341353 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: true } Table { Name: "Table" Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Uint32" TypeId: 2 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyCo... (TRUNCATED) TClient::Ls request: /Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 1748532341297 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 6 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 6 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: "Table" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710658 CreateStep: 1748532341353 ParentPathId: 1 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" ChildrenExist: true } Children { Name: ".sys" PathId: 18446... (TRUNCATED) 2025-05-29T15:25:41.332554Z node 2 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:56: [DstCreator][rid 1][tid 1] Handle NKikimr::TEvTxProxySchemeCache::TEvNavigateKeySetResult: entry# { Path: Root TableId: [72057594046644480:1:0] RequestType: ByTableId Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } } 2025-05-29T15:25:41.332580Z node 2 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:56: [DstCreator][rid 1][tid 1] Handle NKikimr::TEvTxProxySchemeCache::TEvNavigateKeySetResult: entry# { Path: Root TableId: [72057594046644480:1:0] RequestType: ByTableId Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } } 2025-05-29T15:25:41.332582Z node 2 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:87: [DstCreator][rid 1][tid 1] Get table profiles 2025-05-29T15:25:41.338797Z node 2 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:106: [DstCreator][rid 1][tid 1] Handle NKikimr::NConsole::TEvConfigsDispatcher::TEvGetConfigResponse 2025-05-29T15:25:41.602821Z node 2 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:158: [DstCreator][rid 1][tid 1] Handle NKikimr::NReplication::TEvYdbProxy::TEvDescribeTableResponse { Result: { name: Table, owner: root@builtin, type: Table, size_bytes: 0, created_at: { plan_step: 1748532341353, tx_id: 281474976710658 } } } 2025-05-29T15:25:41.602952Z node 2 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:249: [DstCreator][rid 1][tid 1] Handle NKikimr::TEvTxUserProxy::TEvAllocateTxIdResult 2025-05-29T15:25:41.603367Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 2025-05-29T15:25:41.603688Z node 2 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:279: [DstCreator][rid 1][tid 1] Handle {TEvModifySchemeTransactionResult Status# StatusAccepted txid# 281474976710659} 2025-05-29T15:25:41.603690Z node 2 :REPLICATION_CONTROLLER DEBUG: dst_creator.cpp:306: [DstCreator][rid 1][tid 1] Subscribe tx: txId# 281474976710659 2025-05-29T15:25:41.640205Z node 2 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:311: [DstCreator][rid 1][tid 1] Handle NKikimrScheme.TEvNotifyTxCompletionResult TxId: 281474976710659 2025-05-29T15:25:41.640216Z node 2 :REPLICATION_CONTROLLER INFO: dst_creator.cpp:585: [DstCreator][rid 1][tid 1] Success: dstPathId# [OwnerId: 72057594046644480, LocalPathId: 5] TClient::Ls request: /Root/Replicated TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Replicated" PathId: 5 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710659 CreateStep: 1748532341675 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "Replicated" Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Uint32" TypeId: 2 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key... (TRUNCATED) >> DataStreams::TestCreateExistingStream [GOOD] >> DataStreams::ListStreamsValidation |67.0%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/services/ydb/table_split_ut/ydb-services-ydb-table_split_ut |67.0%| [LD] {RESULT} $(B)/ydb/services/ydb/table_split_ut/ydb-services-ydb-table_split_ut |67.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/services/ydb/table_split_ut/ydb-services-ydb-table_split_ut >> TGRpcCmsTest::AlterRemoveTest |67.0%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/services/cms/ut/unittest ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/http_proxy/ut/inside_ydb_ut/unittest >> TestYmqHttpProxy::TestListQueueTags Test command err: 2025-05-29T15:25:05.492841Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509889066915300412:2128];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:25:05.492904Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/001cd1/r3tmp/tmpOdfz8d/pdisk_1.dat 2025-05-29T15:25:05.740488Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [1:7509889066915300323:2079] 1748532305491146 != 1748532305491149 2025-05-29T15:25:05.752101Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 64554, node 1 2025-05-29T15:25:05.766964Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:25:05.766974Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-05-29T15:25:05.766976Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-05-29T15:25:05.767021Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:1915 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-29T15:25:05.822605Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:25:05.831234Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-05-29T15:25:05.834654Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:25:05.834677Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:25:05.835653Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:1915 2025-05-29T15:25:05.899327Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:25:05.903401Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976715658, at schemeshard: 72057594046644480 2025-05-29T15:25:05.907291Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:25:05.921207Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:25:05.958166Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-05-29T15:25:05.985855Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-05-29T15:25:06.054917Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976715665, at schemeshard: 72057594046644480 2025-05-29T15:25:06.056117Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:25:06.074884Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:25:06.088748Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:25:06.111898Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:25:06.131852Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715670:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:25:06.154379Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:25:06.171964Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:25:06.312946Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509889071210269009:2372], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:25:06.312968Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:25:06.313153Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509889071210269021:2375], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:25:06.313928Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715673:3, at schemeshard: 72057594046644480 2025-05-29T15:25:06.316456Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976715673, at schemeshard: 72057594046644480 2025-05-29T15:25:06.316523Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7509889071210269023:2376], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715673 completed, doublechecking } 2025-05-29T15:25:06.405755Z node 1 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [1:7509889071210269074:2853] txid# 281474976715674, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 18], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-29T15:25:06.515725Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [1:7509889071210269083:2380], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:25:06.516387Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=1&id=MWMzOTc2YjctMzk2ZWVkYzQtOWIxMGNlMGMtNGVkMzg4Zjc=, ActorId: [1:7509889071210268982:2369], ActorState: ExecuteState, TraceId: 01jweab8c8b64xwhada7s3tbek, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: VERIFY failed (2025-05-29T15:25:06.517740Z): ydb/core/http_proxy/ut/datastreams_fixture.h:484 RunYqlDataQuery(): requirement operationResult.IsSuccess() failed NPrivate::InternalPanicImpl(int, char const*, char const*, int, int, int, TBasicStringBuf>, char const*, unsigned long)+438 (0x13FAF316) NPrivate::Panic(NPrivate::TStaticBuf const&, int, char const*, char const*, char const*, ...)+263 (0x13FA6317) THttpProxyTestMock::RunYqlDataQuery(TBasicString>)+1470 (0x13E152BE) THttpProxyTestMock::InitKikimr(bool, bool)+10304 (0x13E0F460) THttpProxyTestMock::InitAll(bool, bool)+475 (0x13E0CB7B) NTestSuiteTestYmqHttpProxy::TCurrentTest::Execute()::'lambda'()::operator()() const+49 (0x13E8C281) NUnitTest::TTestBase::Run(std::__y1::function, TBasicString> const&, char const*, bool)+126 (0x14144F2E) NTestSuiteTestYmqHttpProxy::TCurrentTest::Execute()+419 (0x13E8BC53) NUnitTest::TTestFactory::Execute()+803 (0x141456A3) NUnitTest::RunMain(int, char**)+3021 (0x1415724D) ??+0 (0x7F04F8068D90) __libc_start_main+128 (0x7F04F8068E40) _start+41 (0x12EC8029) test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/001cd1/r3tmp/tmp7K3ZZJ/pdisk_1.dat 2025-05-29T15:25:11.109061Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-05-29T15:25:11.146438Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [1:7509889089801870163:2079] 1748532310900839 != 1748532310900842 TServer::EnableGrpc on GrpcPort 21568, node 1 2025-05-29T15:25:11.147393Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:25:11.147414Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node ... NPrivate::TStaticBuf const&, int, char const*, char const*, char const*, ...)+263 (0x13FA6317) THttpProxyTestMock::RunYqlDataQuery(TBasicString>)+1470 (0x13E152BE) THttpProxyTestMock::InitKikimr(bool, bool)+10304 (0x13E0F460) THttpProxyTestMock::InitAll(bool, bool)+475 (0x13E0CB7B) NTestSuiteTestYmqHttpProxy::TCurrentTest::Execute()::'lambda'()::operator()() const+49 (0x13E8C281) NUnitTest::TTestBase::Run(std::__y1::function, TBasicString> const&, char const*, bool)+126 (0x14144F2E) NTestSuiteTestYmqHttpProxy::TCurrentTest::Execute()+419 (0x13E8BC53) NUnitTest::TTestFactory::Execute()+803 (0x141456A3) NUnitTest::RunMain(int, char**)+3021 (0x1415724D) ??+0 (0x7F350AEFAD90) __libc_start_main+128 (0x7F350AEFAE40) _start+41 (0x12EC8029) 2025-05-29T15:25:38.226972Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509889211820270456:2192];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:25:38.227012Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/001cd1/r3tmp/tmpWbDTK8/pdisk_1.dat 2025-05-29T15:25:38.330897Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [1:7509889211820270303:2079] 1748532338224003 != 1748532338224006 TServer::EnableGrpc on GrpcPort 26177, node 1 2025-05-29T15:25:38.374969Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:25:38.383565Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:25:38.383589Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:25:38.384497Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-29T15:25:38.386934Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:25:38.386943Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-05-29T15:25:38.386945Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-05-29T15:25:38.386983Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:29220 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-29T15:25:38.480801Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:25:38.487181Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 TClient is connected to server localhost:29220 2025-05-29T15:25:38.551779Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:25:38.563511Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 2025-05-29T15:25:38.567476Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:25:38.577393Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976710660, at schemeshard: 72057594046644480 2025-05-29T15:25:38.578705Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:25:38.653112Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-05-29T15:25:38.700189Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-05-29T15:25:38.724648Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:25:38.762506Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:25:38.793985Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:25:38.824133Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:25:38.885973Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710670:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:25:38.946148Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:25:38.961494Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:25:39.283726Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509889216115239001:2373], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:25:39.283748Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:25:39.283882Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509889216115239013:2376], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:25:39.284659Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710673:3, at schemeshard: 72057594046644480 2025-05-29T15:25:39.294035Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976710673, at schemeshard: 72057594046644480 2025-05-29T15:25:39.294138Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7509889216115239015:2377], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710673 completed, doublechecking } 2025-05-29T15:25:39.351742Z node 1 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [1:7509889216115239066:2857] txid# 281474976710674, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 18], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-29T15:25:39.484566Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [1:7509889216115239075:2381], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 VERIFY failed (2025-05-29T15:25:39.489893Z): ydb/core/http_proxy/ut/datastreams_fixture.h:484 RunYqlDataQuery(): requirement operationResult.IsSuccess() failed 2025-05-29T15:25:39.485154Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=1&id=MjM0ZGU2Ni0zNzNmZmY3Ni0zMWE1YjA2Yi01ODc4MzE0YQ==, ActorId: [1:7509889216115238973:2369], ActorState: ExecuteState, TraceId: 01jweac8j7e25d7eh2k22ghv33, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: NPrivate::InternalPanicImpl(int, char const*, char const*, int, int, int, TBasicStringBuf>, char const*, unsigned long)+438 (0x13FAF316) NPrivate::Panic(NPrivate::TStaticBuf const&, int, char const*, char const*, char const*, ...)+263 (0x13FA6317) THttpProxyTestMock::RunYqlDataQuery(TBasicString>)+1470 (0x13E152BE) THttpProxyTestMock::InitKikimr(bool, bool)+10304 (0x13E0F460) THttpProxyTestMock::InitAll(bool, bool)+475 (0x13E0CB7B) NTestSuiteTestYmqHttpProxy::TCurrentTest::Execute()::'lambda'()::operator()() const+49 (0x13E8C281) NUnitTest::TTestBase::Run(std::__y1::function, TBasicString> const&, char const*, bool)+126 (0x14144F2E) NTestSuiteTestYmqHttpProxy::TCurrentTest::Execute()+419 (0x13E8BC53) NUnitTest::TTestFactory::Execute()+803 (0x141456A3) NUnitTest::RunMain(int, char**)+3021 (0x1415724D) ??+0 (0x7F35C9333D90) __libc_start_main+128 (0x7F35C9333E40) _start+41 (0x12EC8029) >> TGRpcCmsTest::SimpleTenantsTest [GOOD] >> DstCreator::SamePartitionCount [GOOD] >> DstCreator::CannotFindColumn [GOOD] >> TGRpcCmsTest::AuthTokenTest [GOOD] >> PersQueueSdkReadSessionTest::ClosesAfterFailedConnectionToCds [FAIL] >> TGRpcCmsTest::DisabledTxTest ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_olap_reboots/unittest >> TOlapReboots::CreateTable [GOOD] Test command err: ==== RunWithTabletReboots =========== RUN: Trace =========== Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2140] Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:116:2058] recipient: [1:110:2141] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:116:2058] recipient: [1:110:2141] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:117:2058] recipient: [1:111:2142] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:117:2058] recipient: [1:111:2142] Leader for TabletID 72057594046678944 is [1:126:2151] sender: [1:127:2058] recipient: [1:109:2140] Leader for TabletID 72057594046447617 is [1:130:2154] sender: [1:132:2058] recipient: [1:110:2141] Leader for TabletID 72057594046316545 is [1:133:2155] sender: [1:135:2058] recipient: [1:111:2142] 2025-05-29T15:24:39.078772Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7488: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-05-29T15:24:39.078803Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7516: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:24:39.078809Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7402: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2025-05-29T15:24:39.078816Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7418: OperationsProcessing config: using default configuration 2025-05-29T15:24:39.078822Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-05-29T15:24:39.078826Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-05-29T15:24:39.078837Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7548: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:24:39.078852Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:39: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-05-29T15:24:39.078965Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7619: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-05-29T15:24:39.079054Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-05-29T15:24:39.092367Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7651: Got new config: QueryServiceConfig { AllExternalDataSourcesAreAvailable: true } 2025-05-29T15:24:39.092386Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:24:39.092471Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7619: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# Leader for TabletID 72057594046447617 is [1:130:2154] sender: [1:175:2058] recipient: [1:15:2062] 2025-05-29T15:24:39.098842Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-05-29T15:24:39.098884Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-05-29T15:24:39.098928Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-05-29T15:24:39.101859Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-05-29T15:24:39.101924Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:445: Clear TempDirsState with owners number: 0 2025-05-29T15:24:39.102020Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1348: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-05-29T15:24:39.102133Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:32: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-05-29T15:24:39.102546Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:157: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:24:39.102578Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:84: [RootDataErasureManager] Stop 2025-05-29T15:24:39.102808Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:24:39.102819Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:24:39.102854Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-05-29T15:24:39.102862Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-29T15:24:39.102868Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:202: TTxServerlessStorageBilling.Complete 2025-05-29T15:24:39.102885Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6671: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:214:2058] recipient: [1:212:2212] IGNORE Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:214:2058] recipient: [1:212:2212] Leader for TabletID 72057594037968897 is [1:218:2216] sender: [1:219:2058] recipient: [1:212:2212] 2025-05-29T15:24:39.103923Z node 1 :HIVE INFO: tablet_helpers.cpp:1130: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:126:2151] sender: [1:239:2058] recipient: [1:15:2062] 2025-05-29T15:24:39.119114Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:372: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-05-29T15:24:39.119167Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:24:39.119212Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-05-29T15:24:39.119247Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:130: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-05-29T15:24:39.119255Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:24:39.119861Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:451: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-05-29T15:24:39.119890Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-05-29T15:24:39.119932Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:24:39.119947Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:313: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-05-29T15:24:39.119953Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:367: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-05-29T15:24:39.119958Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 2 -> 3 2025-05-29T15:24:39.120364Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:24:39.120376Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-05-29T15:24:39.120381Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 3 -> 128 2025-05-29T15:24:39.120739Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:24:39.120751Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:24:39.120757Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:24:39.120764Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1650: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-05-29T15:24:39.121429Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1719: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-05-29T15:24:39.121846Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:652: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-05-29T15:24:39.121884Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1751: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 Leader for TabletID 72057594046316545 is [1:133:2155] sender: [1:254:2058] recipient: [1:15:2062] FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-05-29T15:24:39.122077Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:676: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-05-29T15:24:39.122105Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:680: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 133 RawX2: 4294969451 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-05-29T15:24:39.122123Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:24:39.122193Z node 1 :FLAT_TX_SCHEMESHARD INFO: sch ... meBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:25:42.147711Z node 105 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [105:208:2209], at schemeshard: 72057594046678944, txId: 1003, path id: 3 2025-05-29T15:25:42.147718Z node 105 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [105:208:2209], at schemeshard: 72057594046678944, txId: 1003, path id: 4 2025-05-29T15:25:42.147837Z node 105 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1003:0, at schemeshard: 72057594046678944 2025-05-29T15:25:42.147846Z node 105 :FLAT_TX_SCHEMESHARD INFO: create_table.cpp:459: TCreateColumnTable TProposedWaitParts operationId# 1003:0 ProgressState at tablet: 72057594046678944 2025-05-29T15:25:42.147857Z node 105 :FLAT_TX_SCHEMESHARD DEBUG: create_table.cpp:485: TCreateColumnTable TProposedWaitParts operationId# 1003:0 ProgressState wait for NotifyTxCompletionResult tabletId: 72075186233409546 2025-05-29T15:25:42.148004Z node 105 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:5834: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 5 PathOwnerId: 72057594046678944, cookie: 1003 2025-05-29T15:25:42.148019Z node 105 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 5 PathOwnerId: 72057594046678944, cookie: 1003 2025-05-29T15:25:42.148024Z node 105 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72057594046678944, txId: 1003 2025-05-29T15:25:42.148031Z node 105 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 1003, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 5 2025-05-29T15:25:42.148038Z node 105 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 4 2025-05-29T15:25:42.148213Z node 105 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:5834: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 4 PathOwnerId: 72057594046678944, cookie: 1003 2025-05-29T15:25:42.148229Z node 105 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 4 PathOwnerId: 72057594046678944, cookie: 1003 2025-05-29T15:25:42.148234Z node 105 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72057594046678944, txId: 1003 2025-05-29T15:25:42.148239Z node 105 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 1003, pathId: [OwnerId: 72057594046678944, LocalPathId: 4], version: 4 2025-05-29T15:25:42.148244Z node 105 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 3 2025-05-29T15:25:42.148256Z node 105 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1606: TOperation IsReadyToNotify, TxId: 1003, ready parts: 0/1, is published: true 2025-05-29T15:25:42.148853Z node 105 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:652: Send tablet strongly msg operationId: 1003:0 from tablet: 72057594046678944 to tablet: 72075186233409546 cookie: 72057594046678944:1 msg type: 275382275 2025-05-29T15:25:42.148878Z node 105 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:652: Send tablet strongly msg operationId: 1003:0 from tablet: 72057594046678944 to tablet: 72057594037968897 cookie: 72057594046678944:4 msg type: 268697639 2025-05-29T15:25:42.148900Z node 105 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1751: TOperation RegisterRelationByTabletId, TxId: 1003, partId: 0, tablet: 72057594037968897 2025-05-29T15:25:42.149075Z node 105 :HIVE INFO: tablet_helpers.cpp:1441: [72057594037968897] TEvUpdateTabletsObject, msg: ObjectId: 7726343884038809171 TabletIds: 72075186233409546 TxId: 1003 TxPartId: 0 2025-05-29T15:25:42.149232Z node 105 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5972: Update tablets object reply, message: Status: OK TxId: 1003 TxPartId: 0, at schemeshard: 72057594046678944 2025-05-29T15:25:42.149252Z node 105 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:619: TTxOperationReply execute, operationId: 1003:0, at schemeshard: 72057594046678944, message: Status: OK TxId: 1003 TxPartId: 0 2025-05-29T15:25:42.149311Z node 105 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1003 2025-05-29T15:25:42.149401Z node 105 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1003 2025-05-29T15:25:42.149705Z node 105 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:647: TTxOperationReply complete, operationId: 1003:0, at schemeshard: 72057594046678944 2025-05-29T15:25:42.161110Z node 105 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6146: Handle TEvNotifyTxCompletionResult, at schemeshard: 72057594046678944, message: Origin: 72075186233409546 TxId: 1003 2025-05-29T15:25:42.161136Z node 105 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1764: TOperation FindRelatedPartByTabletId, TxId: 1003, tablet: 72075186233409546, partId: 0 2025-05-29T15:25:42.161162Z node 105 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:619: TTxOperationReply execute, operationId: 1003:0, at schemeshard: 72057594046678944, message: Origin: 72075186233409546 TxId: 1003 FAKE_COORDINATOR: Erasing txId 1003 2025-05-29T15:25:42.161663Z node 105 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:647: TTxOperationReply complete, operationId: 1003:0, at schemeshard: 72057594046678944 2025-05-29T15:25:42.161705Z node 105 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1003:0, at schemeshard: 72057594046678944 2025-05-29T15:25:42.161714Z node 105 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:482: [72057594046678944] TDone opId# 1003:0 ProgressState 2025-05-29T15:25:42.161733Z node 105 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:906: Part operation is done id#1003:0 progress is 1/1 2025-05-29T15:25:42.161739Z node 105 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 1003 ready parts: 1/1 2025-05-29T15:25:42.161744Z node 105 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:906: Part operation is done id#1003:0 progress is 1/1 2025-05-29T15:25:42.161747Z node 105 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 1003 ready parts: 1/1 2025-05-29T15:25:42.161753Z node 105 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1606: TOperation IsReadyToNotify, TxId: 1003, ready parts: 1/1, is published: true 2025-05-29T15:25:42.161768Z node 105 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1629: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [105:364:2341] message: TxId: 1003 2025-05-29T15:25:42.161776Z node 105 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 1003 ready parts: 1/1 2025-05-29T15:25:42.161782Z node 105 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:973: Operation and all the parts is done, operation id: 1003:0 2025-05-29T15:25:42.161787Z node 105 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5174: RemoveTx for txid 1003:0 2025-05-29T15:25:42.161823Z node 105 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 2 2025-05-29T15:25:42.162985Z node 105 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:227: tests -- TTxNotificationSubscriber for txId 1003: got EvNotifyTxCompletionResult 2025-05-29T15:25:42.163002Z node 105 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:236: tests -- TTxNotificationSubscriber for txId 1003: satisfy waiter [105:425:2395] TestWaitNotification: OK eventTxId 1003 2025-05-29T15:25:42.163130Z node 105 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/OlapStore/ColumnTable" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-05-29T15:25:42.163209Z node 105 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/OlapStore/ColumnTable" took 87us result status StatusSuccess 2025-05-29T15:25:42.163347Z node 105 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/OlapStore/ColumnTable" PathDescription { Self { Name: "ColumnTable" PathId: 4 SchemeshardId: 72057594046678944 PathType: EPathTypeColumnTable CreateFinished: true CreateTxId: 1003 CreateStep: 5000004 ParentPathId: 3 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 4 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 4 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 ColumnTableVersion: 1 ColumnTableSchemaVersion: 1 } ChildrenExist: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 0 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 3 PathsLimit: 10000 ShardsInside: 1 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } ColumnTableDescription { Name: "ColumnTable" Schema { Columns { Id: 1 Name: "timestamp" Type: "Timestamp" TypeId: 50 NotNull: true StorageId: "" DefaultValue { } } Columns { Id: 2 Name: "data" Type: "Utf8" TypeId: 4608 NotNull: false StorageId: "" DefaultValue { } } KeyColumnNames: "timestamp" NextColumnId: 3 Version: 1 Options { SchemeNeedActualization: false } NextColumnFamilyId: 1 } SchemaPresetId: 1 SchemaPresetName: "default" ColumnStorePathId { OwnerId: 72057594046678944 LocalId: 3 } ColumnShardCount: 1 Sharding { ColumnShards: 72075186233409546 HashSharding { Function: HASH_FUNCTION_CONSISTENCY_64 Columns: "timestamp" } } } } PathId: 4 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> TGRpcCmsTest::DescribeOptionsTest >> DataShardVolatile::DistributedWriteBrokenLock [FAIL] >> DataShardVolatile::DistributedWriteShardRestartBeforePlan+UseSink ------- [TS] {default-linux-x86_64, relwithdebinfo} ydb/services/cms/ut/unittest >> TGRpcCmsTest::SimpleTenantsTest [GOOD] Test command err: 2025-05-29T15:25:41.931181Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509889222435102339:2080];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:25:41.931204Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/0021e1/r3tmp/tmprM3Dfe/pdisk_1.dat 2025-05-29T15:25:42.035087Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:25:42.043828Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:25:42.043863Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:25:42.055262Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 23875, node 1 2025-05-29T15:25:42.090930Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:25:42.090945Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-05-29T15:25:42.090947Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-05-29T15:25:42.090988Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:6428 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-29T15:25:42.163684Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:25:42.200808Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.h:960: StateWork, received event# 273285120, Sender [1:7509889226730070360:2313], Recipient [1:7509889222435102757:2206]: NKikimr::NConsole::TEvConsole::TEvCreateTenantRequest { Request { path: "/Root/users/user-1" resources { storage_units { unit_kind: "hdd" count: 1 } } } UserToken: "" PeerName: "ipv6:[::1]:59768" } 2025-05-29T15:25:42.200827Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.h:963: StateWork, processing event TEvConsole::TEvCreateTenantRequest 2025-05-29T15:25:42.200833Z node 1 :CMS_TENANTS TRACE: tx_processor.cpp:51: TTxProcessor(tenants) enqueue tx 2025-05-29T15:25:42.200836Z node 1 :CMS_TENANTS TRACE: tx_processor.cpp:245: TTxProcessor(tenants) starts new tx 2025-05-29T15:25:42.200859Z node 1 :CMS_TENANTS DEBUG: console__create_tenant.cpp:71: TTxCreateTenant: Request { path: "/Root/users/user-1" resources { storage_units { unit_kind: "hdd" count: 1 } } } UserToken: "" PeerName: "ipv6:[::1]:59768" 2025-05-29T15:25:42.200903Z node 1 :CMS_TENANTS DEBUG: console__create_tenant.cpp:365: Add tenant /Root/users/user-1 (txid = 1748532342198759) 2025-05-29T15:25:42.200990Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.cpp:2577: Add tenant /Root/users/user-1 to database state=CREATING_POOLS coordinators=3 mediators=3 planresolution=10 timecastbucketspermediator=2 issue= txid=1748532342198759 subdomainversion=1 confirmedsubdomain=0 attrs= generation=1 errorcode=STATUS_CODE_UNSPECIFIED isExternalSubDomain=1 isExternalHive=1 isExternalSysViewProcessor=1 isExternalStatisticsAggregator=1 areResourcesShared=0 sharedDomainId= 2025-05-29T15:25:42.201030Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.cpp:2637: Add tenant pool /Root/users/user-1:hdd to database kind=hdd config=BoxId: 999 StoragePoolId: 4 Name: "/Root/users/user-1:hdd" ErasureSpecies: "none" VDiskKind: "Default" Kind: "hdd" NumGroups: 1 PDiskFilter { Property { Type: ROT } } allocatednumgroups=0 state=NOT_ALLOCATED 2025-05-29T15:25:42.204874Z node 1 :CMS_TENANTS DEBUG: console__create_tenant.cpp:375: TTxCreateTenant Complete 2025-05-29T15:25:42.205057Z node 1 :CMS_TENANTS TRACE: console__create_tenant.cpp:383: Send: NKikimr::NConsole::TEvConsole::TEvCreateTenantResponse { Response { operation { id: "ydb://cmsrequest/5?tenant=/Root/users/user-1&cmstid=72057594037936131&txid=1748532342198759&action=1" } } } 2025-05-29T15:25:42.205091Z node 1 :CMS_TENANTS TRACE: tx_processor.cpp:60: TTxProcessor(tenants) completed tx 2025-05-29T15:25:42.205110Z node 1 :CMS_TENANTS DEBUG: console_tenants_manager.cpp:158: TPoolManip(/Root/users/user-1:hdd) Bootstrap 2025-05-29T15:25:42.205140Z node 1 :CMS_TENANTS DEBUG: console_tenants_manager.cpp:117: TPoolManip(/Root/users/user-1:hdd) read pool state: Request { Command { ReadStoragePool { BoxId: 999 Name: "/Root/users/user-1:hdd" } } } 2025-05-29T15:25:42.205251Z node 1 :CMS_TENANTS DEBUG: console_tenants_manager.cpp:198: TPoolManip(/Root/users/user-1:hdd) got read response: Status { Success: true } Success: true ConfigTxSeqNo: 5 2025-05-29T15:25:42.205274Z node 1 :CMS_TENANTS DEBUG: console_tenants_manager.cpp:131: TPoolManip(/Root/users/user-1:hdd) send pool request: Request { Command { DefineStoragePool { BoxId: 999 StoragePoolId: 4 Name: "/Root/users/user-1:hdd" ErasureSpecies: "none" VDiskKind: "Default" Kind: "hdd" NumGroups: 1 PDiskFilter { Property { Type: ROT } } } } } 2025-05-29T15:25:42.207369Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.h:960: StateWork, received event# 273285131, Sender [1:7509889226730070368:2314], Recipient [1:7509889222435102757:2206]: NKikimr::NConsole::TEvConsole::TEvGetOperationRequest { Request { id: "ydb://cmsrequest/5?tenant=/Root/users/user-1&cmstid=72057594037936131&txid=1748532342198759&action=1" } UserToken: "" } 2025-05-29T15:25:42.207373Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.h:965: StateWork, processing event TEvConsole::TEvGetOperationRequest 2025-05-29T15:25:42.207409Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.cpp:3353: Send TEvConsole::TEvGetOperationResponse: Response { operation { id: "ydb://cmsrequest/5?tenant=/Root/users/user-1&cmstid=72057594037936131&txid=1748532342198759&action=1" } } 2025-05-29T15:25:42.211853Z node 1 :CMS_TENANTS DEBUG: console_tenants_manager.cpp:244: TPoolManip(/Root/users/user-1:hdd) got config response: Status { Success: true } Success: true ConfigTxSeqNo: 6 2025-05-29T15:25:42.211872Z node 1 :CMS_TENANTS DEBUG: console_tenants_manager.cpp:168: TPoolManip(/Root/users/user-1:hdd) reply with NKikimr::NConsole::TTenantsManager::TEvPrivate::TEvPoolAllocated 2025-05-29T15:25:42.211883Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.h:960: StateWork, received event# 2146435079, Sender [1:7509889226730070365:2206], Recipient [1:7509889222435102757:2206]: NKikimr::NConsole::TTenantsManager::TEvPrivate::TEvPoolAllocated 2025-05-29T15:25:42.211886Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.h:972: StateWork, processing event TEvPrivate::TEvPoolAllocated 2025-05-29T15:25:42.211890Z node 1 :CMS_TENANTS TRACE: tx_processor.cpp:51: TTxProcessor(tenants) enqueue tx 2025-05-29T15:25:42.211893Z node 1 :CMS_TENANTS TRACE: tx_processor.cpp:245: TTxProcessor(tenants) starts new tx 2025-05-29T15:25:42.211902Z node 1 :CMS_TENANTS DEBUG: console__update_pool_state.cpp:28: TTxUpdatePoolState for pool /Root/users/user-1:hdd of /Root/users/user-1 state=ALLOCATED 2025-05-29T15:25:42.211906Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.cpp:3047: Update pool state in database for /Root/users/user-1:hdd state=ALLOCATED allocatednumgroups=1 2025-05-29T15:25:42.211921Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.cpp:3206: Update subdomain version in database for /Root/users/user-1 subdomainversion=2 2025-05-29T15:25:42.222986Z node 1 :CMS_TENANTS DEBUG: console__update_pool_state.cpp:73: TTxUpdatePoolState complete for /Root/users/user-1:hdd 2025-05-29T15:25:42.223000Z node 1 :CMS_TENANTS TRACE: tx_processor.cpp:51: TTxProcessor(tenants) enqueue tx 2025-05-29T15:25:42.223002Z node 1 :CMS_TENANTS TRACE: tx_processor.cpp:60: TTxProcessor(tenants) completed tx 2025-05-29T15:25:42.223003Z node 1 :CMS_TENANTS TRACE: tx_processor.cpp:245: TTxProcessor(tenants) starts new tx 2025-05-29T15:25:42.223016Z node 1 :CMS_TENANTS DEBUG: console__update_tenant_state.cpp:23: TTxUpdateTenantState for tenant /Root/users/user-1 to CREATING_SUBDOMAIN 2025-05-29T15:25:42.223024Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.cpp:3146: Update tenant state in database for /Root/users/user-1 state=CREATING_SUBDOMAIN txid=1748532342198759 errorcode=STATUS_CODE_UNSPECIFIED issue= 2025-05-29T15:25:42.231054Z node 1 :CMS_TENANTS DEBUG: console__update_tenant_state.cpp:45: TTxUpdateTenantState complete for /Root/users/user-1 2025-05-29T15:25:42.231101Z node 1 :CMS_TENANTS TRACE: tx_processor.cpp:60: TTxProcessor(tenants) completed tx 2025-05-29T15:25:42.231113Z node 1 :CMS_TENANTS DEBUG: console_tenants_manager.cpp:784: TSubdomainManip(/Root/users/user-1)::Bootstrap 2025-05-29T15:25:42.231116Z node 1 :CMS_TENANTS DEBUG: console_tenants_manager.cpp:596: TSubDomainManip(/Root/users/user-1) create subdomain 2025-05-29T15:25:42.232003Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.cpp:620: TSubdomainManip(/Root/users/user-1) send subdomain creation cmd: NKikimrTxUserProxy.TEvProposeTransaction Transaction { ModifyScheme { WorkingDir: "/Root" OperationType: ESchemeOpCreateExtSubDomain SubDomain { Name: "users/user-1" ExternalSchemeShard: true ExternalHive: true ExternalSysViewProcessor: true ExternalStatisticsAggregator: true GraphShard: true } } } ExecTimeoutPeriod: 18446744073709551615 DatabaseName: "Root" 2025-05-29T15:25:42.232337Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715658:1, at schemeshard: 72057594046644480 2025-05-29T15:25:42.233153Z node 1 :CMS_TENANTS DEBUG: console_tenants_manager.cpp:832: TSubdomainManip(/Root/users/user-1) got propose result: Status: 53 TxId: 281474976715658 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 3 2025-05-29T15:25:42.233169Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.cpp:768: TSubdomainManip(/Root/users/user-1) send notification request: NKikimrScheme.TEvNotifyTxCompletion TxId: 281474976715658 2025-05-29T15:25:42.234164Z node 1 :CMS_TENANTS DEBUG: console_tenants_manager.cpp:804: TSubdomainManip(/Root/users/user-1) got TEvNotifyTxCompletionRegistered: TxId: 281474976715658 2025-05-29T15:25:42.235801Z node 1 :CMS_TENANTS DEBUG: console_tenants_manager.cpp:809: TSubdomainManip(/Root/users/user-1) got TEvNotifyTxCompletionResult: TxId: 281474976715658 2025-05-29T15:25:42.235901Z node 1 :CMS_TENANTS DEBUG: console_tenants_man ... } } 2025-05-29T15:25:42.553401Z node 1 :CMS_TENANTS DEBUG: console_tenants_manager.cpp:809: TSubdomainManip(/Root/users/user-1) got TEvNotifyTxCompletionResult: TxId: 281474976715660 2025-05-29T15:25:42.553409Z node 1 :CMS_TENANTS DEBUG: console_tenants_manager.cpp:694: TSubdomainManip(/Root/users/user-1) done 2025-05-29T15:25:42.553420Z node 1 :CMS_TENANTS DEBUG: console_tenants_manager.cpp:710: TSubdomainManip(/Root/users/user-1) reply with NKikimr::NConsole::TTenantsManager::TEvPrivate::TEvSubdomainRemoved 2025-05-29T15:25:42.553434Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.h:960: StateWork, received event# 2146435077, Sender [1:7509889226730071268:2206], Recipient [1:7509889222435102757:2206]: NKikimr::NConsole::TTenantsManager::TEvPrivate::TEvSubdomainRemoved 2025-05-29T15:25:42.553436Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.h:980: StateWork, processing event TEvPrivate::TEvSubdomainRemoved 2025-05-29T15:25:42.553441Z node 1 :CMS_TENANTS TRACE: tx_processor.cpp:51: TTxProcessor(tenants) enqueue tx 2025-05-29T15:25:42.553442Z node 1 :CMS_TENANTS TRACE: tx_processor.cpp:245: TTxProcessor(tenants) starts new tx 2025-05-29T15:25:42.553450Z node 1 :CMS_TENANTS DEBUG: console__remove_computational_units.cpp:20: TTxRemoveComputationalUnits Execute /Root/users/user-1 2025-05-29T15:25:42.553455Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.cpp:3146: Update tenant state in database for /Root/users/user-1 state=REMOVING_UNITS txid=1748532342531767 errorcode=STATUS_CODE_UNSPECIFIED issue= 2025-05-29T15:25:42.553469Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.cpp:2927: Remove computational units of /Root/users/user-1 from database txid=1748532342531767 issue= 2025-05-29T15:25:42.556438Z node 3 :HIVE WARN: tx__delete_tablet.cpp:88: HIVE#72075186224037888 THive::TTxDeleteTablet tablet (72057594046644480,1) wasn't found - using supplied 72075186224037888 2025-05-29T15:25:42.563132Z node 1 :CMS_TENANTS DEBUG: console__remove_computational_units.cpp:34: TTxRemoveComputationalUnits Complete /Root/users/user-1 2025-05-29T15:25:42.563161Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.cpp:2114: Send TEvTenantSlotBroker::TEvAlterTenant: TenantName: "/Root/users/user-1" 2025-05-29T15:25:42.563166Z node 1 :CMS_TENANTS TRACE: tx_processor.cpp:60: TTxProcessor(tenants) completed tx 2025-05-29T15:25:42.563217Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.h:960: StateWork, received event# 273154052, Sender [1:7509889222435102670:2205], Recipient [1:7509889222435102757:2206]: NKikimrTenantSlotBroker.TTenantState TenantName: "/Root/users/user-1" 2025-05-29T15:25:42.563220Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.h:984: StateWork, processing event TEvTenantSlotBroker::TEvTenantState 2025-05-29T15:25:42.563225Z node 1 :CMS_TENANTS TRACE: tx_processor.cpp:51: TTxProcessor(tenants) enqueue tx 2025-05-29T15:25:42.563227Z node 1 :CMS_TENANTS TRACE: tx_processor.cpp:245: TTxProcessor(tenants) starts new tx 2025-05-29T15:25:42.563232Z node 1 :CMS_TENANTS DEBUG: console__update_tenant_state.cpp:23: TTxUpdateTenantState for tenant /Root/users/user-1 to REMOVING_POOLS 2025-05-29T15:25:42.563238Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.cpp:3146: Update tenant state in database for /Root/users/user-1 state=REMOVING_POOLS txid=1748532342531767 errorcode=STATUS_CODE_UNSPECIFIED issue= 2025-05-29T15:25:42.564189Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__delete_tablet_reply.cpp:39: Got DeleteTabletReply with Forward response from Hive 72075186224037888 to Hive 72057594037968897 shardIdx 72057594046644480:1 2025-05-29T15:25:42.566805Z node 3 :HIVE WARN: hive_impl.cpp:491: HIVE#72075186224037888 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037894 not found 2025-05-29T15:25:42.566816Z node 3 :HIVE WARN: hive_impl.cpp:491: HIVE#72075186224037888 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037891 not found 2025-05-29T15:25:42.566819Z node 3 :HIVE WARN: hive_impl.cpp:491: HIVE#72075186224037888 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037892 not found 2025-05-29T15:25:42.566821Z node 3 :HIVE WARN: hive_impl.cpp:491: HIVE#72075186224037888 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037895 not found 2025-05-29T15:25:42.566824Z node 3 :HIVE WARN: hive_impl.cpp:491: HIVE#72075186224037888 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037889 not found 2025-05-29T15:25:42.566826Z node 3 :HIVE WARN: hive_impl.cpp:491: HIVE#72075186224037888 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037896 not found 2025-05-29T15:25:42.566829Z node 3 :HIVE WARN: hive_impl.cpp:491: HIVE#72075186224037888 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037897 not found 2025-05-29T15:25:42.566831Z node 3 :HIVE WARN: hive_impl.cpp:491: HIVE#72075186224037888 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037890 not found 2025-05-29T15:25:42.566833Z node 3 :HIVE WARN: hive_impl.cpp:491: HIVE#72075186224037888 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037893 not found 2025-05-29T15:25:42.567577Z node 1 :CMS_TENANTS DEBUG: console__update_tenant_state.cpp:45: TTxUpdateTenantState complete for /Root/users/user-1 2025-05-29T15:25:42.567591Z node 1 :CMS_TENANTS TRACE: tx_processor.cpp:60: TTxProcessor(tenants) completed tx 2025-05-29T15:25:42.567598Z node 1 :CMS_TENANTS DEBUG: console_tenants_manager.cpp:158: TPoolManip(/Root/users/user-1:hdd) Bootstrap 2025-05-29T15:25:42.567632Z node 1 :CMS_TENANTS DEBUG: console_tenants_manager.cpp:117: TPoolManip(/Root/users/user-1:hdd) read pool state: Request { Command { ReadStoragePool { BoxId: 999 Name: "/Root/users/user-1:hdd" } } } 2025-05-29T15:25:42.567993Z node 1 :CMS_TENANTS DEBUG: console_tenants_manager.cpp:198: TPoolManip(/Root/users/user-1:hdd) got read response: Status { Success: true StoragePool { BoxId: 999 StoragePoolId: 4 Name: "/Root/users/user-1:hdd" ErasureSpecies: "none" Geometry { } VDiskKind: "Default" Kind: "hdd" NumGroups: 2 PDiskFilter { Property { Type: ROT } } ScopeId { X1: 72057594046644480 X2: 3 } ItemConfigGeneration: 3 } } Success: true ConfigTxSeqNo: 13 2025-05-29T15:25:42.568003Z node 1 :CMS_TENANTS DEBUG: console_tenants_manager.cpp:151: TPoolManip(/Root/users/user-1:hdd) send pool request: Request { Command { DeleteStoragePool { BoxId: 999 StoragePoolId: 4 ItemConfigGeneration: 3 } } } 2025-05-29T15:25:42.572240Z node 1 :HIVE WARN: hive_impl.cpp:491: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037888 not found 2025-05-29T15:25:42.572626Z node 1 :CMS_TENANTS DEBUG: console_tenants_manager.cpp:306: TPoolManip(/Root/users/user-1:hdd) got config response: Status { Success: true } Success: true ConfigTxSeqNo: 14 2025-05-29T15:25:42.572646Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.h:960: StateWork, received event# 2146435081, Sender [1:7509889226730071389:2206], Recipient [1:7509889222435102757:2206]: NKikimr::NConsole::TTenantsManager::TEvPrivate::TEvPoolDeleted 2025-05-29T15:25:42.572659Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.h:973: StateWork, processing event TEvPrivate::TEvPoolDeleted 2025-05-29T15:25:42.572663Z node 1 :CMS_TENANTS TRACE: tx_processor.cpp:51: TTxProcessor(tenants) enqueue tx 2025-05-29T15:25:42.572665Z node 1 :CMS_TENANTS TRACE: tx_processor.cpp:245: TTxProcessor(tenants) starts new tx 2025-05-29T15:25:42.572672Z node 1 :CMS_TENANTS DEBUG: console__update_pool_state.cpp:28: TTxUpdatePoolState for pool /Root/users/user-1:hdd of /Root/users/user-1 state=DELETED 2025-05-29T15:25:42.572677Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.cpp:3047: Update pool state in database for /Root/users/user-1:hdd state=DELETED allocatednumgroups=0 2025-05-29T15:25:42.579387Z node 1 :CMS_TENANTS DEBUG: console__update_pool_state.cpp:73: TTxUpdatePoolState complete for /Root/users/user-1:hdd 2025-05-29T15:25:42.579399Z node 1 :CMS_TENANTS TRACE: tx_processor.cpp:51: TTxProcessor(tenants) enqueue tx 2025-05-29T15:25:42.579402Z node 1 :CMS_TENANTS TRACE: tx_processor.cpp:60: TTxProcessor(tenants) completed tx 2025-05-29T15:25:42.579403Z node 1 :CMS_TENANTS TRACE: tx_processor.cpp:245: TTxProcessor(tenants) starts new tx 2025-05-29T15:25:42.579415Z node 1 :CMS_TENANTS DEBUG: console__remove_tenant_done.cpp:22: TTxRemoveTenantDone for tenant /Root/users/user-1 txid=1748532342531767 2025-05-29T15:25:42.579417Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.cpp:2927: Remove computational units of /Root/users/user-1 from database txid=1748532342531767 issue= 2025-05-29T15:25:42.579420Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.cpp:2958: Remove tenant /Root/users/user-1 from database txid=1748532342531767 issue= 2025-05-29T15:25:42.579421Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.cpp:2963: Remove pool /Root/users/user-1:hdd from database 2025-05-29T15:25:42.579441Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.cpp:3083: Add tenant removal info for /Root/users/user-1 txid=1748532342531767 code=SUCCESS errorcode=STATUS_CODE_UNSPECIFIED issue= 2025-05-29T15:25:42.587287Z node 1 :CMS_TENANTS DEBUG: console__remove_tenant_done.cpp:34: TTxRemoveTenantDone Complete 2025-05-29T15:25:42.587319Z node 1 :CMS_TENANTS TRACE: tx_processor.cpp:60: TTxProcessor(tenants) completed tx 2025-05-29T15:25:42.599257Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.h:960: StateWork, received event# 273285131, Sender [1:7509889226730071432:2507], Recipient [1:7509889222435102757:2206]: NKikimr::NConsole::TEvConsole::TEvGetOperationRequest { Request { id: "ydb://cmsrequest/5?tenant=/Root/users/user-1&cmstid=72057594037936131&txid=1748532342531767&action=2" } UserToken: "" } 2025-05-29T15:25:42.599268Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.h:965: StateWork, processing event TEvConsole::TEvGetOperationRequest 2025-05-29T15:25:42.599304Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.cpp:3353: Send TEvConsole::TEvGetOperationResponse: Response { operation { id: "ydb://cmsrequest/5?tenant=/Root/users/user-1&cmstid=72057594037936131&txid=1748532342531767&action=2" ready: true status: SUCCESS } } 2025-05-29T15:25:42.607255Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.h:960: StateWork, received event# 273285122, Sender [1:7509889226730071435:2509], Recipient [1:7509889222435102757:2206]: NKikimr::NConsole::TEvConsole::TEvGetTenantStatusRequest { Request { path: "/Root/users/user-1" } UserToken: "" PeerName: "ipv6:[::1]:59768" } 2025-05-29T15:25:42.607267Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.h:966: StateWork, processing event TEvConsole::TEvGetTenantStatusRequest 2025-05-29T15:25:42.607304Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.cpp:3377: Send TEvConsole::TEvGetTenantStatusResponse: Response { operation { ready: true status: NOT_FOUND issues { message: "Unknown tenant /Root/users/user-1" severity: 1 } } } 2025-05-29T15:25:42.615094Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.h:960: StateWork, received event# 273285123, Sender [1:7509889226730071438:2510], Recipient [1:7509889222435102757:2206]: NKikimr::NConsole::TEvConsole::TEvListTenantsRequest { Request { } UserToken: "" PeerName: "ipv6:[::1]:59768" } 2025-05-29T15:25:42.615103Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.h:967: StateWork, processing event TEvConsole::TEvListTenantsRequest 2025-05-29T15:25:42.615154Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.cpp:3421: Send TEvConsole::TEvListTenantsResponse: Response { operation { ready: true status: SUCCESS result { [type.googleapis.com/Ydb.Cms.ListDatabasesResult] { } } } } 2025-05-29T15:25:42.619189Z node 1 :HIVE WARN: tx__status.cpp:57: HIVE#72057594037968897 THive::TTxStatus(status=2 node=Connected) - killing node 3 2025-05-29T15:25:42.619250Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connected -> Disconnected |67.1%| [TA] $(B)/ydb/core/http_proxy/ut/inside_ydb_ut/test-results/unittest/{meta.json ... results_accumulator.log} >> TGRpcCmsTest::RemoveWithAnotherTokenTest >> TOlapReboots::DropMultipleTables [GOOD] >> DataStreams::ListStreamsValidation [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/replication/controller/ut_dst_creator/unittest >> DstCreator::SamePartitionCount [GOOD] Test command err: 2025-05-29T15:25:41.266566Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509889224080315911:2209];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:25:41.266650Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/0015b2/r3tmp/tmp0hoSvw/pdisk_1.dat 2025-05-29T15:25:41.404782Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:25:41.404808Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:25:41.421573Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:25:41.422923Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:13628 TServer::EnableGrpc on GrpcPort 27292, node 1 2025-05-29T15:25:41.463078Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:25:41.463091Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-05-29T15:25:41.463093Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-05-29T15:25:41.463136Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:13628 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-29T15:25:41.599991Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:25:41.611515Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-05-29T15:25:41.612175Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-05-29T15:25:41.613455Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... TClient::Ls request: /Root/Table TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Table" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710659 CreateStep: 1748532341724 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "Table" Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" Key... (TRUNCATED) TClient::Ls request: /Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 1748532341647 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "user@builtin" ACL: "" EffectiveACL: "" PathVersion: 7 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 7 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 5 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: "Table" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710659 CreateStep: 1748532341724 ParentPathId: 1 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" ChildrenExist: false } Children { Name: ".sys" PathId: 1844... (TRUNCATED) 2025-05-29T15:25:41.714349Z node 1 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:56: [DstCreator][rid 1][tid 1] Handle NKikimr::TEvTxProxySchemeCache::TEvNavigateKeySetResult: entry# { Path: Root TableId: [72057594046644480:1:0] RequestType: ByTableId Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } } 2025-05-29T15:25:41.714372Z node 1 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:56: [DstCreator][rid 1][tid 1] Handle NKikimr::TEvTxProxySchemeCache::TEvNavigateKeySetResult: entry# { Path: Root TableId: [72057594046644480:1:0] RequestType: ByTableId Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } } 2025-05-29T15:25:41.714375Z node 1 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:87: [DstCreator][rid 1][tid 1] Get table profiles 2025-05-29T15:25:41.716905Z node 1 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:106: [DstCreator][rid 1][tid 1] Handle NKikimr::NConsole::TEvConfigsDispatcher::TEvGetConfigResponse 2025-05-29T15:25:41.902368Z node 1 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:158: [DstCreator][rid 1][tid 1] Handle NKikimr::NReplication::TEvYdbProxy::TEvDescribeTableResponse { Result: { name: Table, owner: root@builtin, type: Table, size_bytes: 0, created_at: { plan_step: 1748532341724, tx_id: 281474976710659 } } } 2025-05-29T15:25:41.902458Z node 1 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:249: [DstCreator][rid 1][tid 1] Handle NKikimr::TEvTxUserProxy::TEvAllocateTxIdResult 2025-05-29T15:25:41.902869Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-05-29T15:25:41.903225Z node 1 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:279: [DstCreator][rid 1][tid 1] Handle {TEvModifySchemeTransactionResult Status# StatusAccepted txid# 281474976710660} 2025-05-29T15:25:41.903234Z node 1 :REPLICATION_CONTROLLER DEBUG: dst_creator.cpp:306: [DstCreator][rid 1][tid 1] Subscribe tx: txId# 281474976710660 2025-05-29T15:25:41.916655Z node 1 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:311: [DstCreator][rid 1][tid 1] Handle NKikimrScheme.TEvNotifyTxCompletionResult TxId: 281474976710660 2025-05-29T15:25:41.916670Z node 1 :REPLICATION_CONTROLLER INFO: dst_creator.cpp:585: [DstCreator][rid 1][tid 1] Success: dstPathId# [OwnerId: 72057594046644480, LocalPathId: 3] TClient::Ls request: /Root/Replicated TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Replicated" PathId: 3 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710660 CreateStep: 1748532341955 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "user@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "Replicated" Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "ke... (TRUNCATED) test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/0015b2/r3tmp/tmpk227sE/pdisk_1.dat 2025-05-29T15:25:42.215618Z node 2 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:25:42.215838Z node 2 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-05-29T15:25:42.216654Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [2:7509889225856110955:2079] 1748532342175419 != 1748532342175422 TClient is connected to server localhost:27215 TServer::EnableGrpc on GrpcPort 19521, node 2 2025-05-29T15:25:42.258977Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:25:42.258989Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-05-29T15:25:42.258991Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-05-29T15:25:42.259037Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:27215 2025-05-29T15:25:42.274683Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:25:42.274712Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:25:42.275119Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-29T15:25:42.324185Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:25:42.334899Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-05-29T15:25:42.335714Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... TClient::Ls request: /Root/Table TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Table" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976715658 CreateStep: 1748532342410 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "Table" Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" Key... (TRUNCATED) TClient::Ls request: /Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 1748532342368 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 3 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: "Table" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976715658 CreateStep: 1748532342410 ParentPathId: 1 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" ChildrenExist: false } Children { Name: ".sys" PathId: 1844... (TRUNCATED) 2025-05-29T15:25:42.374828Z node 2 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:56: [DstCreator][rid 1][tid 1] Handle NKikimr::TEvTxProxySchemeCache::TEvNavigateKeySetResult: entry# { Path: Root TableId: [72057594046644480:1:0] RequestType: ByTableId Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } } 2025-05-29T15:25:42.374860Z node 2 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:56: [DstCreator][rid 1][tid 1] Handle NKikimr::TEvTxProxySchemeCache::TEvNavigateKeySetResult: entry# { Path: Root TableId: [72057594046644480:1:0] RequestType: ByTableId Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } } 2025-05-29T15:25:42.374863Z node 2 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:87: [DstCreator][rid 1][tid 1] Get table profiles 2025-05-29T15:25:42.378812Z node 2 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:106: [DstCreator][rid 1][tid 1] Handle NKikimr::NConsole::TEvConfigsDispatcher::TEvGetConfigResponse 2025-05-29T15:25:42.979769Z node 2 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:158: [DstCreator][rid 1][tid 1] Handle NKikimr::NReplication::TEvYdbProxy::TEvDescribeTableResponse { Result: { name: Table, owner: root@builtin, type: Table, size_bytes: 0, created_at: { plan_step: 1748532342410, tx_id: 281474976715658 } } } 2025-05-29T15:25:42.979900Z node 2 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:249: [DstCreator][rid 1][tid 1] Handle NKikimr::TEvTxUserProxy::TEvAllocateTxIdResult 2025-05-29T15:25:42.980352Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 2025-05-29T15:25:42.980708Z node 2 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:279: [DstCreator][rid 1][tid 1] Handle {TEvModifySchemeTransactionResult Status# StatusAccepted txid# 281474976715659} 2025-05-29T15:25:42.980711Z node 2 :REPLICATION_CONTROLLER DEBUG: dst_creator.cpp:306: [DstCreator][rid 1][tid 1] Subscribe tx: txId# 281474976715659 TClient::Ls request: /Root/Table 2025-05-29T15:25:43.002514Z node 2 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:311: [DstCreator][rid 1][tid 1] Handle NKikimrScheme.TEvNotifyTxCompletionResult TxId: 281474976715659 2025-05-29T15:25:43.002527Z node 2 :REPLICATION_CONTROLLER INFO: dst_creator.cpp:585: [DstCreator][rid 1][tid 1] Success: dstPathId# [OwnerId: 72057594046644480, LocalPathId: 3] TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Table" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976715658 CreateStep: 1748532342410 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "Table" Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" Key... (TRUNCATED) TClient::Ls request: /Root/Replicated TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Replicated" PathId: 3 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976715659 CreateStep: 1748532343047 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "Replicated" Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "ke... (TRUNCATED) ------- [TS] {default-linux-x86_64, relwithdebinfo} ydb/services/cms/ut/unittest >> TGRpcCmsTest::AuthTokenTest [GOOD] Test command err: 2025-05-29T15:25:41.905178Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509889220680982540:2076];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:25:41.905213Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/0021d9/r3tmp/tmpQh986Q/pdisk_1.dat 2025-05-29T15:25:42.072114Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:25:42.075894Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:25:42.075930Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:25:42.087152Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 8428, node 1 2025-05-29T15:25:42.102945Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:25:42.102961Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-05-29T15:25:42.102963Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-05-29T15:25:42.103011Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:17861 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-29T15:25:42.196277Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:25:42.241144Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.h:960: StateWork, received event# 273285120, Sender [1:7509889224975950567:2313], Recipient [1:7509889220680982960:2199]: NKikimr::NConsole::TEvConsole::TEvCreateTenantRequest { Request { path: "/Root/users/user-1" resources { storage_units { unit_kind: "hdd" count: 1 } } } UserToken: "\n\014root@builtin\022\030\022\026\n\024all-users@well-known\032\014root@builtin\"\007Builtin*\017**** (B6C6F477)" PeerName: "ipv6:[::1]:55918" } 2025-05-29T15:25:42.241159Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.h:963: StateWork, processing event TEvConsole::TEvCreateTenantRequest 2025-05-29T15:25:42.241164Z node 1 :CMS_TENANTS TRACE: tx_processor.cpp:51: TTxProcessor(tenants) enqueue tx 2025-05-29T15:25:42.241167Z node 1 :CMS_TENANTS TRACE: tx_processor.cpp:245: TTxProcessor(tenants) starts new tx 2025-05-29T15:25:42.241191Z node 1 :CMS_TENANTS DEBUG: console__create_tenant.cpp:71: TTxCreateTenant: Request { path: "/Root/users/user-1" resources { storage_units { unit_kind: "hdd" count: 1 } } } UserToken: "\n\014root@builtin\022\030\022\026\n\024all-users@well-known\032\014root@builtin\"\007Builtin*\017**** (B6C6F477)" PeerName: "ipv6:[::1]:55918" 2025-05-29T15:25:42.241236Z node 1 :CMS_TENANTS DEBUG: console__create_tenant.cpp:365: Add tenant /Root/users/user-1 (txid = 1748532342238769) 2025-05-29T15:25:42.241324Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.cpp:2577: Add tenant /Root/users/user-1 to database state=CREATING_POOLS coordinators=3 mediators=3 planresolution=10 timecastbucketspermediator=2 issue= txid=1748532342238769 subdomainversion=1 confirmedsubdomain=0 attrs= generation=1 errorcode=STATUS_CODE_UNSPECIFIED isExternalSubDomain=1 isExternalHive=1 isExternalSysViewProcessor=1 isExternalStatisticsAggregator=1 areResourcesShared=0 sharedDomainId= 2025-05-29T15:25:42.241363Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.cpp:2637: Add tenant pool /Root/users/user-1:hdd to database kind=hdd config=BoxId: 999 StoragePoolId: 4 Name: "/Root/users/user-1:hdd" ErasureSpecies: "none" VDiskKind: "Default" Kind: "hdd" NumGroups: 1 PDiskFilter { Property { Type: ROT } } allocatednumgroups=0 state=NOT_ALLOCATED 2025-05-29T15:25:42.247020Z node 1 :CMS_TENANTS DEBUG: console__create_tenant.cpp:375: TTxCreateTenant Complete 2025-05-29T15:25:42.247212Z node 1 :CMS_TENANTS TRACE: console__create_tenant.cpp:383: Send: NKikimr::NConsole::TEvConsole::TEvCreateTenantResponse { Response { operation { id: "ydb://cmsrequest/5?tenant=/Root/users/user-1&cmstid=72057594037936131&txid=1748532342238769&action=1" } } } 2025-05-29T15:25:42.247247Z node 1 :CMS_TENANTS TRACE: tx_processor.cpp:60: TTxProcessor(tenants) completed tx 2025-05-29T15:25:42.247266Z node 1 :CMS_TENANTS DEBUG: console_tenants_manager.cpp:158: TPoolManip(/Root/users/user-1:hdd) Bootstrap 2025-05-29T15:25:42.247303Z node 1 :CMS_TENANTS DEBUG: console_tenants_manager.cpp:117: TPoolManip(/Root/users/user-1:hdd) read pool state: Request { Command { ReadStoragePool { BoxId: 999 Name: "/Root/users/user-1:hdd" } } } 2025-05-29T15:25:42.247404Z node 1 :CMS_TENANTS DEBUG: console_tenants_manager.cpp:198: TPoolManip(/Root/users/user-1:hdd) got read response: Status { Success: true } Success: true ConfigTxSeqNo: 5 2025-05-29T15:25:42.247425Z node 1 :CMS_TENANTS DEBUG: console_tenants_manager.cpp:131: TPoolManip(/Root/users/user-1:hdd) send pool request: Request { Command { DefineStoragePool { BoxId: 999 StoragePoolId: 4 Name: "/Root/users/user-1:hdd" ErasureSpecies: "none" VDiskKind: "Default" Kind: "hdd" NumGroups: 1 PDiskFilter { Property { Type: ROT } } } } } 2025-05-29T15:25:42.255207Z node 1 :CMS_TENANTS DEBUG: console_tenants_manager.cpp:244: TPoolManip(/Root/users/user-1:hdd) got config response: Status { Success: true } Success: true ConfigTxSeqNo: 6 2025-05-29T15:25:42.255225Z node 1 :CMS_TENANTS DEBUG: console_tenants_manager.cpp:168: TPoolManip(/Root/users/user-1:hdd) reply with NKikimr::NConsole::TTenantsManager::TEvPrivate::TEvPoolAllocated 2025-05-29T15:25:42.255239Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.h:960: StateWork, received event# 2146435079, Sender [1:7509889224975950572:2199], Recipient [1:7509889220680982960:2199]: NKikimr::NConsole::TTenantsManager::TEvPrivate::TEvPoolAllocated 2025-05-29T15:25:42.255242Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.h:972: StateWork, processing event TEvPrivate::TEvPoolAllocated 2025-05-29T15:25:42.255246Z node 1 :CMS_TENANTS TRACE: tx_processor.cpp:51: TTxProcessor(tenants) enqueue tx 2025-05-29T15:25:42.255248Z node 1 :CMS_TENANTS TRACE: tx_processor.cpp:245: TTxProcessor(tenants) starts new tx 2025-05-29T15:25:42.255258Z node 1 :CMS_TENANTS DEBUG: console__update_pool_state.cpp:28: TTxUpdatePoolState for pool /Root/users/user-1:hdd of /Root/users/user-1 state=ALLOCATED 2025-05-29T15:25:42.255263Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.cpp:3047: Update pool state in database for /Root/users/user-1:hdd state=ALLOCATED allocatednumgroups=1 2025-05-29T15:25:42.255278Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.cpp:3206: Update subdomain version in database for /Root/users/user-1 subdomainversion=2 2025-05-29T15:25:42.256921Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.h:960: StateWork, received event# 273285131, Sender [1:7509889224975950578:2314], Recipient [1:7509889220680982960:2199]: NKikimr::NConsole::TEvConsole::TEvGetOperationRequest { Request { id: "ydb://cmsrequest/5?tenant=/Root/users/user-1&cmstid=72057594037936131&txid=1748532342238769&action=1" } UserToken: "\n\014root@builtin\022\030\022\026\n\024all-users@well-known\032\014root@builtin\"\007Builtin*\017**** (B6C6F477)" } 2025-05-29T15:25:42.256928Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.h:965: StateWork, processing event TEvConsole::TEvGetOperationRequest 2025-05-29T15:25:42.256965Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.cpp:3353: Send TEvConsole::TEvGetOperationResponse: Response { operation { id: "ydb://cmsrequest/5?tenant=/Root/users/user-1&cmstid=72057594037936131&txid=1748532342238769&action=1" } } 2025-05-29T15:25:42.263286Z node 1 :CMS_TENANTS DEBUG: console__update_pool_state.cpp:73: TTxUpdatePoolState complete for /Root/users/user-1:hdd 2025-05-29T15:25:42.263297Z node 1 :CMS_TENANTS TRACE: tx_processor.cpp:51: TTxProcessor(tenants) enqueue tx 2025-05-29T15:25:42.263299Z node 1 :CMS_TENANTS TRACE: tx_processor.cpp:60: TTxProcessor(tenants) completed tx 2025-05-29T15:25:42.263300Z node 1 :CMS_TENANTS TRACE: tx_processor.cpp:245: TTxProcessor(tenants) starts new tx 2025-05-29T15:25:42.263317Z node 1 :CMS_TENANTS DEBUG: console__update_tenant_state.cpp:23: TTxUpdateTenantState for tenant /Root/users/user-1 to CREATING_SUBDOMAIN 2025-05-29T15:25:42.263325Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.cpp:3146: Update tenant state in database for /Root/users/user-1 state=CREATING_SUBDOMAIN txid=1748532342238769 errorcode=STATUS_CODE_UNSPECIFIED issue= 2025-05-29T15:25:42.267224Z node 1 :CMS_TENANTS DEBUG: console__update_tenant_state.cpp:45: TTxUpdateTenantState complete for /Root/users/user-1 2025-05-29T15:25:42.267276Z node 1 :CMS_TENANTS TRACE: tx_processor.cpp:60: TTxProcessor(tenants) completed tx 2025-05-29T15:25:42.267287Z node 1 :CMS_TENANTS DEBUG: console_tenants_manager.cpp:784: TSubdomainManip(/Root/users/user-1)::Bootstrap 2025-05-29T15:25:42.267288Z node 1 :CMS_TENANTS DEBUG: console_tenants_manager.cpp:596: TSubDomainManip(/Root/users/user-1) create subdomain 2025-05-29T15:25:42.268102Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.cpp:620: TSubdomainManip(/Root/users/user-1) send subdomain creation cmd: NKikimrTxUserProxy.TEvProposeTransaction Transaction { ModifyScheme { WorkingDir: "/Root" OperationType: ESchemeOpCreateExtSubDomain SubDomain { Name: "users/user-1" ExternalSchemeShard: true ExternalHive: true ExternalSysViewProcessor: true ExternalStatisticsAggregator: true GraphShard: true } } } ExecTimeoutPeriod: 18446744073709551615 UserToken: "\n\014root@builtin\022\030\022\026\n\024all-users@well-known\032\014root@builtin\"\007Builtin*\017**** (B6C6F477)" DatabaseName: "Root" 2025-05-29T15:25:42.268471Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715658:1, at schemeshard: 72057594046644480 2025-05-29T15:25:42.269392Z node 1 :CMS_TENANTS DEBUG: console_tenants_manager.cpp:832: TSubdomainManip(/Root/users/user-1) got propose result: Status: 53 TxId: 281474976715658 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 3 2025-05-29T15:25:42.269405Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.cpp:768: TSubdomainManip(/Root/users/user-1) send notification request: NKikimrScheme.TEvNotifyTxCom ... EvConsole::TEvGetTenantStatusResponse: Response { operation { ready: true status: SUCCESS result { [type.googleapis.com/Ydb.Cms.GetDatabaseStatusResult] { path: "/Root/users/user-1" state: PENDING_RESOURCES required_resources { storage_units { unit_kind: "hdd" count: 1 } } allocated_resources { storage_units { unit_kind: "hdd" count: 1 } } generation: 1 } } } } 2025-05-29T15:25:42.536903Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.h:960: StateWork, received event# 273285122, Sender [1:7509889224975951637:2508], Recipient [1:7509889220680982960:2199]: NKikimr::NConsole::TEvConsole::TEvGetTenantStatusRequest { Request { path: "/Root/users/user-1" } UserToken: "\n\014root@builtin\022\030\022\026\n\024all-users@well-known\032\014root@builtin\"\007Builtin*\017**** (B6C6F477)" PeerName: "ipv6:[::1]:55918" } 2025-05-29T15:25:42.536909Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.h:966: StateWork, processing event TEvConsole::TEvGetTenantStatusRequest 2025-05-29T15:25:42.536914Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.cpp:2130: Send TEvTenantSlotBroker::TEvGetTenantState: TenantName: "/Root/users/user-1" 2025-05-29T15:25:42.536926Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.h:960: StateWork, received event# 273154052, Sender [1:7509889220680982859:2198], Recipient [1:7509889220680982960:2199]: NKikimrTenantSlotBroker.TTenantState TenantName: "/Root/users/user-1" 2025-05-29T15:25:42.536927Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.h:984: StateWork, processing event TEvTenantSlotBroker::TEvTenantState 2025-05-29T15:25:42.536997Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.cpp:3753: Send TEvConsole::TEvGetTenantStatusResponse: Response { operation { ready: true status: SUCCESS result { [type.googleapis.com/Ydb.Cms.GetDatabaseStatusResult] { path: "/Root/users/user-1" state: PENDING_RESOURCES required_resources { storage_units { unit_kind: "hdd" count: 1 } } allocated_resources { storage_units { unit_kind: "hdd" count: 1 } } generation: 1 } } } } 2025-05-29T15:25:42.538436Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.h:960: StateWork, received event# 273285122, Sender [1:7509889224975951641:2509], Recipient [1:7509889220680982960:2199]: NKikimr::NConsole::TEvConsole::TEvGetTenantStatusRequest { Request { path: "/Root/users/user-1" } UserToken: "\n\014root@builtin\022\030\022\026\n\024all-users@well-known\032\014root@builtin\"\007Builtin*\017**** (B6C6F477)" PeerName: "ipv6:[::1]:55918" } 2025-05-29T15:25:42.538441Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.h:966: StateWork, processing event TEvConsole::TEvGetTenantStatusRequest 2025-05-29T15:25:42.538446Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.cpp:2130: Send TEvTenantSlotBroker::TEvGetTenantState: TenantName: "/Root/users/user-1" 2025-05-29T15:25:42.538487Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.h:960: StateWork, received event# 273154052, Sender [1:7509889220680982859:2198], Recipient [1:7509889220680982960:2199]: NKikimrTenantSlotBroker.TTenantState TenantName: "/Root/users/user-1" 2025-05-29T15:25:42.538488Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.h:984: StateWork, processing event TEvTenantSlotBroker::TEvTenantState 2025-05-29T15:25:42.538568Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.cpp:3753: Send TEvConsole::TEvGetTenantStatusResponse: Response { operation { ready: true status: SUCCESS result { [type.googleapis.com/Ydb.Cms.GetDatabaseStatusResult] { path: "/Root/users/user-1" state: PENDING_RESOURCES required_resources { storage_units { unit_kind: "hdd" count: 1 } } allocated_resources { storage_units { unit_kind: "hdd" count: 1 } } generation: 1 } } } } 2025-05-29T15:25:42.540934Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.h:960: StateWork, received event# 273285122, Sender [1:7509889224975951654:2510], Recipient [1:7509889220680982960:2199]: NKikimr::NConsole::TEvConsole::TEvGetTenantStatusRequest { Request { path: "/Root/users/user-1" } UserToken: "\n\014root@builtin\022\030\022\026\n\024all-users@well-known\032\014root@builtin\"\007Builtin*\017**** (B6C6F477)" PeerName: "ipv6:[::1]:55918" } 2025-05-29T15:25:42.540939Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.h:966: StateWork, processing event TEvConsole::TEvGetTenantStatusRequest 2025-05-29T15:25:42.540944Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.cpp:2130: Send TEvTenantSlotBroker::TEvGetTenantState: TenantName: "/Root/users/user-1" 2025-05-29T15:25:42.541018Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.h:960: StateWork, received event# 273154052, Sender [1:7509889220680982859:2198], Recipient [1:7509889220680982960:2199]: NKikimrTenantSlotBroker.TTenantState TenantName: "/Root/users/user-1" 2025-05-29T15:25:42.541020Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.h:984: StateWork, processing event TEvTenantSlotBroker::TEvTenantState 2025-05-29T15:25:42.541098Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.cpp:3753: Send TEvConsole::TEvGetTenantStatusResponse: Response { operation { ready: true status: SUCCESS result { [type.googleapis.com/Ydb.Cms.GetDatabaseStatusResult] { path: "/Root/users/user-1" state: PENDING_RESOURCES required_resources { storage_units { unit_kind: "hdd" count: 1 } } allocated_resources { storage_units { unit_kind: "hdd" count: 1 } } generation: 1 } } } } 2025-05-29T15:25:42.542102Z node 1 :CMS_TENANTS DEBUG: console_tenants_manager.cpp:809: TSubdomainManip(/Root/users/user-1) got TEvNotifyTxCompletionResult: TxId: 281474976715659 2025-05-29T15:25:42.542108Z node 1 :CMS_TENANTS DEBUG: console_tenants_manager.cpp:694: TSubdomainManip(/Root/users/user-1) done 2025-05-29T15:25:42.542114Z node 1 :CMS_TENANTS DEBUG: console_tenants_manager.cpp:710: TSubdomainManip(/Root/users/user-1) reply with NKikimr::NConsole::TTenantsManager::TEvPrivate::TEvSubdomainReady 2025-05-29T15:25:42.542125Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.h:960: StateWork, received event# 2146435076, Sender [1:7509889224975950671:2199], Recipient [1:7509889220680982960:2199]: NKikimr::NConsole::TTenantsManager::TEvPrivate::TEvSubdomainReady 2025-05-29T15:25:42.542127Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.h:979: StateWork, processing event TEvPrivate::TEvSubdomainReady 2025-05-29T15:25:42.542130Z node 1 :CMS_TENANTS TRACE: tx_processor.cpp:51: TTxProcessor(tenants) enqueue tx 2025-05-29T15:25:42.542132Z node 1 :CMS_TENANTS TRACE: tx_processor.cpp:245: TTxProcessor(tenants) starts new tx 2025-05-29T15:25:42.542140Z node 1 :CMS_TENANTS DEBUG: console__update_confirmed_subdomain.cpp:22: TTxUpdateConfirmedSubdomain for tenant /Root/users/user-1 to 2 2025-05-29T15:25:42.542146Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.cpp:3146: Update tenant state in database for /Root/users/user-1 state=RUNNING txid=1748532342238769 errorcode=STATUS_CODE_UNSPECIFIED issue= 2025-05-29T15:25:42.542158Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.cpp:2913: Update database for /Root/users/user-1 confirmedsubdomain=2 2025-05-29T15:25:42.542417Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.h:960: StateWork, received event# 273285122, Sender [1:7509889224975951673:2511], Recipient [1:7509889220680982960:2199]: NKikimr::NConsole::TEvConsole::TEvGetTenantStatusRequest { Request { path: "/Root/users/user-1" } UserToken: "\n\014root@builtin\022\030\022\026\n\024all-users@well-known\032\014root@builtin\"\007Builtin*\017**** (B6C6F477)" PeerName: "ipv6:[::1]:55918" } 2025-05-29T15:25:42.542422Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.h:966: StateWork, processing event TEvConsole::TEvGetTenantStatusRequest 2025-05-29T15:25:42.542426Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.cpp:2130: Send TEvTenantSlotBroker::TEvGetTenantState: TenantName: "/Root/users/user-1" 2025-05-29T15:25:42.542438Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.h:960: StateWork, received event# 273154052, Sender [1:7509889220680982859:2198], Recipient [1:7509889220680982960:2199]: NKikimrTenantSlotBroker.TTenantState TenantName: "/Root/users/user-1" 2025-05-29T15:25:42.542440Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.h:984: StateWork, processing event TEvTenantSlotBroker::TEvTenantState 2025-05-29T15:25:42.542518Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.cpp:3753: Send TEvConsole::TEvGetTenantStatusResponse: Response { operation { ready: true status: SUCCESS result { [type.googleapis.com/Ydb.Cms.GetDatabaseStatusResult] { path: "/Root/users/user-1" state: PENDING_RESOURCES required_resources { storage_units { unit_kind: "hdd" count: 1 } } allocated_resources { storage_units { unit_kind: "hdd" count: 1 } } generation: 1 } } } } 2025-05-29T15:25:42.543631Z node 1 :CMS_TENANTS DEBUG: console__update_confirmed_subdomain.cpp:42: TTxUpdateConfirmedSubdomain complete for /Root/users/user-1 2025-05-29T15:25:42.543639Z node 1 :CMS_TENANTS TRACE: tx_processor.cpp:60: TTxProcessor(tenants) completed tx 2025-05-29T15:25:42.543747Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.h:960: StateWork, received event# 273285122, Sender [1:7509889224975951677:2512], Recipient [1:7509889220680982960:2199]: NKikimr::NConsole::TEvConsole::TEvGetTenantStatusRequest { Request { path: "/Root/users/user-1" } UserToken: "\n\014root@builtin\022\030\022\026\n\024all-users@well-known\032\014root@builtin\"\007Builtin*\017**** (B6C6F477)" PeerName: "ipv6:[::1]:55918" } 2025-05-29T15:25:42.543751Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.h:966: StateWork, processing event TEvConsole::TEvGetTenantStatusRequest 2025-05-29T15:25:42.543757Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.cpp:2130: Send TEvTenantSlotBroker::TEvGetTenantState: TenantName: "/Root/users/user-1" 2025-05-29T15:25:42.543769Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.h:960: StateWork, received event# 273154052, Sender [1:7509889220680982859:2198], Recipient [1:7509889220680982960:2199]: NKikimrTenantSlotBroker.TTenantState TenantName: "/Root/users/user-1" 2025-05-29T15:25:42.543770Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.h:984: StateWork, processing event TEvTenantSlotBroker::TEvTenantState 2025-05-29T15:25:42.543857Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.cpp:3753: Send TEvConsole::TEvGetTenantStatusResponse: Response { operation { ready: true status: SUCCESS result { [type.googleapis.com/Ydb.Cms.GetDatabaseStatusResult] { path: "/Root/users/user-1" state: RUNNING required_resources { storage_units { unit_kind: "hdd" count: 1 } } allocated_resources { storage_units { unit_kind: "hdd" count: 1 } } generation: 1 } } } } TClient is connected to server localhost:17861 TClient::Ls request: /Root/users/user-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root/users/user-1" PathId: 1 SchemeshardId: 72075186224037897 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 3 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72075186224037897 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 3 ProcessingParams { Version: 3 PlanReso... (TRUNCATED) 2025-05-29T15:25:42.624172Z node 1 :HIVE WARN: tx__status.cpp:57: HIVE#72057594037968897 THive::TTxStatus(status=2 node=Connected) - killing node 3 2025-05-29T15:25:42.624275Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connected -> Disconnected |67.1%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/schemeshard/ut_sequence_reboots/ydb-core-tx-schemeshard-ut_sequence_reboots |67.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_sequence_reboots/ydb-core-tx-schemeshard-ut_sequence_reboots |67.1%| [TA] {RESULT} $(B)/ydb/core/http_proxy/ut/inside_ydb_ut/test-results/unittest/{meta.json ... results_accumulator.log} |67.1%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_sequence_reboots/ydb-core-tx-schemeshard-ut_sequence_reboots >> TGRpcCmsTest::AlterRemoveTest [GOOD] >> DataShardVolatile::DistributedWriteThenSplit [FAIL] >> DataShardVolatile::DistributedWriteThenReadIterator >> DstCreator::ExistingDst |67.1%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/services/cms/ut/unittest ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/replication/controller/ut_dst_creator/unittest >> DstCreator::CannotFindColumn [GOOD] Test command err: 2025-05-29T15:25:40.956764Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509889216960230549:2212];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/0015ad/r3tmp/tmp8c1RLD/pdisk_1.dat 2025-05-29T15:25:40.999608Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-05-29T15:25:41.067142Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:25:41.070225Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [1:7509889216960230362:2079] 1748532340951382 != 1748532340951385 2025-05-29T15:25:41.097559Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:25:41.097613Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:25:41.098327Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:10685 TServer::EnableGrpc on GrpcPort 63805, node 1 2025-05-29T15:25:41.178970Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:25:41.178985Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-05-29T15:25:41.178989Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-05-29T15:25:41.179034Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:10685 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-29T15:25:41.329124Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:25:41.341965Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-05-29T15:25:41.356316Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... TClient::Ls request: /Root/Table TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Table" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976715658 CreateStep: 1748532341444 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "Table" Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" Key... (TRUNCATED) TClient::Ls request: /Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 1748532341381 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 3 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: "Table" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976715658 CreateStep: 1748532341444 ParentPathId: 1 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" ChildrenExist: false } Children { Name: ".sys" PathId: 1844... (TRUNCATED) 2025-05-29T15:25:41.429134Z node 1 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:56: [DstCreator][rid 1][tid 1] Handle NKikimr::TEvTxProxySchemeCache::TEvNavigateKeySetResult: entry# { Path: Root TableId: [72057594046644480:1:0] RequestType: ByTableId Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } } 2025-05-29T15:25:41.429159Z node 1 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:56: [DstCreator][rid 1][tid 1] Handle NKikimr::TEvTxProxySchemeCache::TEvNavigateKeySetResult: entry# { Path: Root TableId: [72057594046644480:1:0] RequestType: ByTableId Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } } 2025-05-29T15:25:41.429161Z node 1 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:87: [DstCreator][rid 1][tid 1] Get table profiles 2025-05-29T15:25:41.429350Z node 1 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:106: [DstCreator][rid 1][tid 1] Handle NKikimr::NConsole::TEvConfigsDispatcher::TEvGetConfigResponse 2025-05-29T15:25:41.794486Z node 1 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:158: [DstCreator][rid 1][tid 1] Handle NKikimr::NReplication::TEvYdbProxy::TEvDescribeTableResponse { Result: { name: Table, owner: root@builtin, type: Table, size_bytes: 0, created_at: { plan_step: 1748532341444, tx_id: 281474976715658 } } } 2025-05-29T15:25:41.794605Z node 1 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:249: [DstCreator][rid 1][tid 1] Handle NKikimr::TEvTxUserProxy::TEvAllocateTxIdResult 2025-05-29T15:25:41.795041Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 2025-05-29T15:25:41.795390Z node 1 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:279: [DstCreator][rid 1][tid 1] Handle {TEvModifySchemeTransactionResult Status# StatusAccepted txid# 281474976715659} 2025-05-29T15:25:41.795394Z node 1 :REPLICATION_CONTROLLER DEBUG: dst_creator.cpp:306: [DstCreator][rid 1][tid 1] Subscribe tx: txId# 281474976715659 TClient::Ls request: /Root/Replicated 2025-05-29T15:25:41.857400Z node 1 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:311: [DstCreator][rid 1][tid 1] Handle NKikimrScheme.TEvNotifyTxCompletionResult TxId: 281474976715659 2025-05-29T15:25:41.857416Z node 1 :REPLICATION_CONTROLLER INFO: dst_creator.cpp:585: [DstCreator][rid 1][tid 1] Success: dstPathId# [OwnerId: 72057594046644480, LocalPathId: 3] TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Replicated" PathId: 3 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976715659 CreateStep: 1748532341899 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "Replicated" Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "ke... (TRUNCATED) 2025-05-29T15:25:42.308655Z node 2 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7509889228830685534:2091];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:25:42.308906Z node 2 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/0015ad/r3tmp/tmpZtOPuG/pdisk_1.dat 2025-05-29T15:25:42.355652Z node 2 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded TClient is connected to server localhost:15123 TServer::EnableGrpc on GrpcPort 2712, node 2 2025-05-29T15:25:42.410965Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:25:42.410980Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-05-29T15:25:42.410982Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-05-29T15:25:42.411032Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-05-29T15:25:42.416551Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:25:42.416575Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:25:42.417466Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:15123 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-29T15:25:42.547291Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:25:42.549312Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-05-29T15:25:42.550238Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:25:42.567005Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... TClient::Ls request: /Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 1748532342592 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 7 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 7 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 5 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: "Dst" PathId: 3 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976715659 CreateStep: 1748532342627 ParentPathId: 1 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" ChildrenExist: false } Children { Name: "Src" PathId: 2 S... (TRUNCATED) TClient::Ls request: /Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 1748532342592 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 7 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 7 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 5 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: "Dst" PathId: 3 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976715659 CreateStep: 1748532342627 ParentPathId: 1 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" ChildrenExist: false } Children { Name: "Src" PathId: 2 S... (TRUNCATED) 2025-05-29T15:25:42.585848Z node 2 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:56: [DstCreator][rid 1][tid 1] Handle NKikimr::TEvTxProxySchemeCache::TEvNavigateKeySetResult: entry# { Path: Root TableId: [72057594046644480:1:0] RequestType: ByTableId Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } } 2025-05-29T15:25:42.585881Z node 2 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:56: [DstCreator][rid 1][tid 1] Handle NKikimr::TEvTxProxySchemeCache::TEvNavigateKeySetResult: entry# { Path: Root TableId: [72057594046644480:1:0] RequestType: ByTableId Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } } 2025-05-29T15:25:42.585883Z node 2 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:87: [DstCreator][rid 1][tid 1] Get table profiles 2025-05-29T15:25:42.586119Z node 2 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:106: [DstCreator][rid 1][tid 1] Handle NKikimr::NConsole::TEvConfigsDispatcher::TEvGetConfigResponse 2025-05-29T15:25:43.170086Z node 2 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:158: [DstCreator][rid 1][tid 1] Handle NKikimr::NReplication::TEvYdbProxy::TEvDescribeTableResponse { Result: { name: Src, owner: root@builtin, type: Table, size_bytes: 0, created_at: { plan_step: 1748532342613, tx_id: 281474976715658 } } } 2025-05-29T15:25:43.170161Z node 2 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:249: [DstCreator][rid 1][tid 1] Handle NKikimr::TEvTxUserProxy::TEvAllocateTxIdResult 2025-05-29T15:25:43.170528Z node 2 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:279: [DstCreator][rid 1][tid 1] Handle {TEvModifySchemeTransactionResult Status# StatusAlreadyExists txid# 281474976715660 Reason# Check failed: path: '/Root/Dst', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 3], type: EPathTypeTable, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:484} 2025-05-29T15:25:43.170790Z node 2 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:335: [DstCreator][rid 1][tid 1] Handle NKikimrScheme.TEvDescribeSchemeResult PreSerializedData size# 20 Record# Status: StatusSuccess Path: "/Root/Dst" PathDescription { Self { Name: "Dst" PathId: 3 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976715659 CreateStep: 1748532342627 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "Dst" Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value2" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { MinPartitionsCount: 1 } } TableSchemaVersion: 1 IsBackup: false ReplicationConfig { Mode: REPLICATION_MODE_READ_ONLY ConsistencyLevel: CONSISTENCY_LEVEL_ROW } IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 2 PathsLimit: 10000 ShardsInside: 2 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } UserAttributes { Key: "__async_replica" Value: "true" } } PathId: 3 PathOwnerId: 72057594046644480 2025-05-29T15:25:43.170823Z node 2 :REPLICATION_CONTROLLER ERROR: dst_creator.cpp:594: [DstCreator][rid 1][tid 1] Error: status# StatusSchemeError, reason# Cannot find column: name: value ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/datastreams/ut/unittest >> DataStreams::ListStreamsValidation [GOOD] Test command err: 2025-05-29T15:25:37.540018Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509889204517058542:2272];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:25:37.540048Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/000a4a/r3tmp/tmpaq2UAm/pdisk_1.dat 2025-05-29T15:25:37.758974Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 27211, node 1 2025-05-29T15:25:37.803125Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:25:37.803136Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-05-29T15:25:37.803138Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-05-29T15:25:37.803181Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:11575 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-29T15:25:37.875746Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-05-29T15:25:37.888947Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:25:37.888975Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:25:37.891422Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected waiting... 2025-05-29T15:25:37.899168Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-05-29T15:25:37.956967Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715658:0, at schemeshard: 72057594046644480 TClient is connected to server localhost:11575 2025-05-29T15:25:38.035448Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:25:38.539513Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 2025-05-29T15:25:38.643835Z node 1 :CHANGE_EXCHANGE WARN: change_sender_cdc_stream.cpp:398: [CdcChangeSenderMain][72075186224037890:1][1:7509889208812027203:2370] Failed entry at 'ResolveTopic': entry# { Path: TableId: [72057594046644480:6:0] RequestType: ByTableId Operation: OpTopic RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo } 2025-05-29T15:25:38.724808Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterPersQueueGroup, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-05-29T15:25:38.869446Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpDropPersQueueGroup, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-05-29T15:25:38.901880Z node 1 :HIVE WARN: hive_impl.cpp:491: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037896 not found 2025-05-29T15:25:38.901894Z node 1 :HIVE WARN: hive_impl.cpp:491: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037906 not found 2025-05-29T15:25:38.901897Z node 1 :HIVE WARN: hive_impl.cpp:491: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037894 not found 2025-05-29T15:25:38.901899Z node 1 :HIVE WARN: hive_impl.cpp:491: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037900 not found 2025-05-29T15:25:38.901902Z node 1 :HIVE WARN: hive_impl.cpp:491: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037897 not found 2025-05-29T15:25:38.901904Z node 1 :HIVE WARN: hive_impl.cpp:491: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037903 not found 2025-05-29T15:25:38.901906Z node 1 :HIVE WARN: hive_impl.cpp:491: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037908 not found 2025-05-29T15:25:38.901908Z node 1 :HIVE WARN: hive_impl.cpp:491: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037907 not found 2025-05-29T15:25:38.901910Z node 1 :HIVE WARN: hive_impl.cpp:491: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037904 not found 2025-05-29T15:25:38.901912Z node 1 :HIVE WARN: hive_impl.cpp:491: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037901 not found 2025-05-29T15:25:38.901914Z node 1 :HIVE WARN: hive_impl.cpp:491: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037898 not found 2025-05-29T15:25:38.901916Z node 1 :HIVE WARN: hive_impl.cpp:491: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037895 not found 2025-05-29T15:25:38.901918Z node 1 :HIVE WARN: hive_impl.cpp:491: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037905 not found 2025-05-29T15:25:38.901920Z node 1 :HIVE WARN: hive_impl.cpp:491: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037902 not found 2025-05-29T15:25:38.901922Z node 1 :HIVE WARN: hive_impl.cpp:491: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037899 not found 2025-05-29T15:25:38.901925Z node 1 :HIVE WARN: hive_impl.cpp:491: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037893 not found 2025-05-29T15:25:38.921602Z node 1 :HIVE WARN: tx__delete_tablet.cpp:91: HIVE#72057594037968897 THive::TTxDeleteTablet tablet (72057594046644480,19) wasn't found 2025-05-29T15:25:38.921627Z node 1 :HIVE WARN: tx__delete_tablet.cpp:91: HIVE#72057594037968897 THive::TTxDeleteTablet tablet (72057594046644480,7) wasn't found 2025-05-29T15:25:38.921631Z node 1 :HIVE WARN: tx__delete_tablet.cpp:91: HIVE#72057594037968897 THive::TTxDeleteTablet tablet (72057594046644480,13) wasn't found 2025-05-29T15:25:38.921636Z node 1 :HIVE WARN: tx__delete_tablet.cpp:91: HIVE#72057594037968897 THive::TTxDeleteTablet tablet (72057594046644480,10) wasn't found 2025-05-29T15:25:38.921640Z node 1 :HIVE WARN: tx__delete_tablet.cpp:91: HIVE#72057594037968897 THive::TTxDeleteTablet tablet (72057594046644480,16) wasn't found 2025-05-29T15:25:38.921644Z node 1 :HIVE WARN: tx__delete_tablet.cpp:91: HIVE#72057594037968897 THive::TTxDeleteTablet tablet (72057594046644480,21) wasn't found 2025-05-29T15:25:38.921649Z node 1 :HIVE WARN: tx__delete_tablet.cpp:91: HIVE#72057594037968897 THive::TTxDeleteTablet tablet (72057594046644480,18) wasn't found 2025-05-29T15:25:38.921653Z node 1 :HIVE WARN: tx__delete_tablet.cpp:91: HIVE#72057594037968897 THive::TTxDeleteTablet tablet (72057594046644480,15) wasn't found 2025-05-29T15:25:38.921657Z node 1 :HIVE WARN: tx__delete_tablet.cpp:91: HIVE#72057594037968897 THive::TTxDeleteTablet tablet (72057594046644480,12) wasn't found 2025-05-29T15:25:38.921662Z node 1 :HIVE WARN: tx__delete_tablet.cpp:91: HIVE#72057594037968897 THive::TTxDeleteTablet tablet (72057594046644480,6) wasn't found 2025-05-29T15:25:38.921666Z node 1 :HIVE WARN: tx__delete_tablet.cpp:91: HIVE#72057594037968897 THive::TTxDeleteTablet tablet (72057594046644480,9) wasn't found 2025-05-29T15:25:38.921671Z node 1 :HIVE WARN: tx__delete_tablet.cpp:91: HIVE#72057594037968897 THive::TTxDeleteTablet tablet (72057594046644480,20) wasn't found 2025-05-29T15:25:38.921675Z node 1 :HIVE WARN: tx__delete_tablet.cpp:91: HIVE#72057594037968897 THive::TTxDeleteTablet tablet (72057594046644480,17) wasn't found 2025-05-29T15:25:38.921680Z node 1 :HIVE WARN: tx__delete_tablet.cpp:91: HIVE#72057594037968897 THive::TTxDeleteTablet tablet (72057594046644480,14) wasn't found 2025-05-29T15:25:38.921685Z node 1 :HIVE WARN: tx__delete_tablet.cpp:91: HIVE#72057594037968897 THive::TTxDeleteTablet tablet (72057594046644480,11) wasn't found 2025-05-29T15:25:38.921689Z node 1 :HIVE WARN: tx__delete_tablet.cpp:91: HIVE#72057594037968897 THive::TTxDeleteTablet tablet (72057594046644480,8) wasn't found 2025-05-29T15:25:40.167965Z node 4 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7509889219344717242:2208];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:25:40.168049Z node 4 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/000a4a/r3tmp/tmpQI2AnE/pdisk_1.dat 2025-05-29T15:25:40.261070Z node 4 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:25:40.262205Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:25:40.262220Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:25:40.272464Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 8425, node 4 2025-05-29T15:25:40.355017Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:25:40.355029Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-05-29T15:25:40.355031Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-05-29T15:25:40.355092Z node 4 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:21532 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-29T15:25:40.399792Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:25:40.418881Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-05-29T15:25:40.484148Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715658:0, at schemeshard: 72057594046644480 TClient is connected to server localhost:21532 2025-05-29T15:25:40.524497Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:25:40.657303Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterPersQueueGroup, opId: 281474976715661:0, at schemeshard: 72057594046644480 2025-05-29T15:25:40.698902Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterPersQueueGroup, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-05-29T15:25:40.738264Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterPersQueueGroup, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-05-29T15:25:41.599163Z node 7 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[7:7509889224362421545:2143];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:25:41.599183Z node 7 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/000a4a/r3tmp/tmpIuKfQB/pdisk_1.dat 2025-05-29T15:25:41.729940Z node 7 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:25:41.731392Z node 7 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:25:41.731422Z node 7 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:25:41.742130Z node 7 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 5266, node 7 2025-05-29T15:25:41.767023Z node 7 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:25:41.767035Z node 7 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-05-29T15:25:41.767037Z node 7 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-05-29T15:25:41.767081Z node 7 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:5385 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-29T15:25:41.803568Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:25:41.810869Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-05-29T15:25:41.855573Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710658:0, at schemeshard: 72057594046644480 TClient is connected to server localhost:5385 2025-05-29T15:25:41.899367Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:25:41.903449Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976710659, at schemeshard: 72057594046644480 2025-05-29T15:25:42.010506Z node 7 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [7:7509889228657390651:3215] txid# 281474976710661, issues: { message: "Check failed: path: \'/Root/stream_TestCreateExistingStream\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 2], type: EPathTypePersQueueGroup, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_pq.cpp:345" severity: 1 } 2025-05-29T15:25:42.986508Z node 10 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[10:7509889226247156637:2208];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:25:42.986559Z node 10 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/000a4a/r3tmp/tmpu6o4wL/pdisk_1.dat 2025-05-29T15:25:43.037838Z node 10 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 27517, node 10 2025-05-29T15:25:43.090967Z node 10 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:25:43.090983Z node 10 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-05-29T15:25:43.090985Z node 10 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-05-29T15:25:43.091035Z node 10 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-05-29T15:25:43.091533Z node 10 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:25:43.091560Z node 10 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:25:43.095429Z node 10 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:14999 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-29T15:25:43.135388Z node 10 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:25:43.138945Z node 10 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-05-29T15:25:43.172173Z node 10 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715658:0, at schemeshard: 72057594046644480 TClient is connected to server localhost:14999 2025-05-29T15:25:43.219553Z node 10 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... >> TGRpcCmsTest::DisabledTxTest [GOOD] >> TGRpcCmsTest::SimpleTenantsTestSyncOperation >> TGRpcCmsTest::DescribeOptionsTest [GOOD] |67.1%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/schemeshard/ut_extsubdomain_reboots/ydb-core-tx-schemeshard-ut_extsubdomain_reboots |67.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_extsubdomain_reboots/ydb-core-tx-schemeshard-ut_extsubdomain_reboots |67.1%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_extsubdomain_reboots/ydb-core-tx-schemeshard-ut_extsubdomain_reboots >> DstCreator::GlobalConsistency ------- [TS] {default-linux-x86_64, relwithdebinfo} ydb/services/cms/ut/unittest >> TGRpcCmsTest::AlterRemoveTest [GOOD] Test command err: 2025-05-29T15:25:43.364877Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509889233454758530:2208];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:25:43.364969Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/00215f/r3tmp/tmpmdjkIJ/pdisk_1.dat 2025-05-29T15:25:43.488280Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:25:43.494213Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:25:43.494230Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:25:43.496287Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 28194, node 1 2025-05-29T15:25:43.520794Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:25:43.520808Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-05-29T15:25:43.520810Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-05-29T15:25:43.520862Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:15502 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-29T15:25:43.584650Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:25:43.637268Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.h:960: StateWork, received event# 273285120, Sender [1:7509889233454759125:2313], Recipient [1:7509889233454758830:2200]: NKikimr::NConsole::TEvConsole::TEvCreateTenantRequest { Request { path: "/Root/users/user-1" resources { storage_units { unit_kind: "hdd" count: 1 } } } UserToken: "" PeerName: "ipv6:[::1]:48248" } 2025-05-29T15:25:43.637286Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.h:963: StateWork, processing event TEvConsole::TEvCreateTenantRequest 2025-05-29T15:25:43.637294Z node 1 :CMS_TENANTS TRACE: tx_processor.cpp:51: TTxProcessor(tenants) enqueue tx 2025-05-29T15:25:43.637297Z node 1 :CMS_TENANTS TRACE: tx_processor.cpp:245: TTxProcessor(tenants) starts new tx 2025-05-29T15:25:43.637330Z node 1 :CMS_TENANTS DEBUG: console__create_tenant.cpp:71: TTxCreateTenant: Request { path: "/Root/users/user-1" resources { storage_units { unit_kind: "hdd" count: 1 } } } UserToken: "" PeerName: "ipv6:[::1]:48248" 2025-05-29T15:25:43.637383Z node 1 :CMS_TENANTS DEBUG: console__create_tenant.cpp:365: Add tenant /Root/users/user-1 (txid = 1748532343634780) 2025-05-29T15:25:43.637494Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.cpp:2577: Add tenant /Root/users/user-1 to database state=CREATING_POOLS coordinators=3 mediators=3 planresolution=10 timecastbucketspermediator=2 issue= txid=1748532343634780 subdomainversion=1 confirmedsubdomain=0 attrs= generation=1 errorcode=STATUS_CODE_UNSPECIFIED isExternalSubDomain=1 isExternalHive=1 isExternalSysViewProcessor=1 isExternalStatisticsAggregator=1 areResourcesShared=0 sharedDomainId= 2025-05-29T15:25:43.637586Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.cpp:2637: Add tenant pool /Root/users/user-1:hdd to database kind=hdd config=BoxId: 999 StoragePoolId: 4 Name: "/Root/users/user-1:hdd" ErasureSpecies: "none" VDiskKind: "Default" Kind: "hdd" NumGroups: 1 PDiskFilter { Property { Type: ROT } } allocatednumgroups=0 state=NOT_ALLOCATED 2025-05-29T15:25:43.639471Z node 1 :CMS_TENANTS DEBUG: console__create_tenant.cpp:375: TTxCreateTenant Complete 2025-05-29T15:25:43.639694Z node 1 :CMS_TENANTS TRACE: console__create_tenant.cpp:383: Send: NKikimr::NConsole::TEvConsole::TEvCreateTenantResponse { Response { operation { id: "ydb://cmsrequest/5?tenant=/Root/users/user-1&cmstid=72057594037936131&txid=1748532343634780&action=1" } } } 2025-05-29T15:25:43.639733Z node 1 :CMS_TENANTS TRACE: tx_processor.cpp:60: TTxProcessor(tenants) completed tx 2025-05-29T15:25:43.639758Z node 1 :CMS_TENANTS DEBUG: console_tenants_manager.cpp:158: TPoolManip(/Root/users/user-1:hdd) Bootstrap 2025-05-29T15:25:43.639793Z node 1 :CMS_TENANTS DEBUG: console_tenants_manager.cpp:117: TPoolManip(/Root/users/user-1:hdd) read pool state: Request { Command { ReadStoragePool { BoxId: 999 Name: "/Root/users/user-1:hdd" } } } 2025-05-29T15:25:43.639950Z node 1 :CMS_TENANTS DEBUG: console_tenants_manager.cpp:198: TPoolManip(/Root/users/user-1:hdd) got read response: Status { Success: true } Success: true ConfigTxSeqNo: 5 2025-05-29T15:25:43.639979Z node 1 :CMS_TENANTS DEBUG: console_tenants_manager.cpp:131: TPoolManip(/Root/users/user-1:hdd) send pool request: Request { Command { DefineStoragePool { BoxId: 999 StoragePoolId: 4 Name: "/Root/users/user-1:hdd" ErasureSpecies: "none" VDiskKind: "Default" Kind: "hdd" NumGroups: 1 PDiskFilter { Property { Type: ROT } } } } } 2025-05-29T15:25:43.641363Z node 1 :CMS_TENANTS DEBUG: console_tenants_manager.cpp:244: TPoolManip(/Root/users/user-1:hdd) got config response: Status { Success: true } Success: true ConfigTxSeqNo: 6 2025-05-29T15:25:43.641384Z node 1 :CMS_TENANTS DEBUG: console_tenants_manager.cpp:168: TPoolManip(/Root/users/user-1:hdd) reply with NKikimr::NConsole::TTenantsManager::TEvPrivate::TEvPoolAllocated 2025-05-29T15:25:43.641401Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.h:960: StateWork, received event# 2146435079, Sender [1:7509889233454759130:2200], Recipient [1:7509889233454758830:2200]: NKikimr::NConsole::TTenantsManager::TEvPrivate::TEvPoolAllocated 2025-05-29T15:25:43.641405Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.h:972: StateWork, processing event TEvPrivate::TEvPoolAllocated 2025-05-29T15:25:43.641410Z node 1 :CMS_TENANTS TRACE: tx_processor.cpp:51: TTxProcessor(tenants) enqueue tx 2025-05-29T15:25:43.641412Z node 1 :CMS_TENANTS TRACE: tx_processor.cpp:245: TTxProcessor(tenants) starts new tx 2025-05-29T15:25:43.641427Z node 1 :CMS_TENANTS DEBUG: console__update_pool_state.cpp:28: TTxUpdatePoolState for pool /Root/users/user-1:hdd of /Root/users/user-1 state=ALLOCATED 2025-05-29T15:25:43.641434Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.cpp:3047: Update pool state in database for /Root/users/user-1:hdd state=ALLOCATED allocatednumgroups=1 2025-05-29T15:25:43.641456Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.cpp:3206: Update subdomain version in database for /Root/users/user-1 subdomainversion=2 2025-05-29T15:25:43.644145Z node 1 :CMS_TENANTS DEBUG: console__update_pool_state.cpp:73: TTxUpdatePoolState complete for /Root/users/user-1:hdd 2025-05-29T15:25:43.644159Z node 1 :CMS_TENANTS TRACE: tx_processor.cpp:51: TTxProcessor(tenants) enqueue tx 2025-05-29T15:25:43.644161Z node 1 :CMS_TENANTS TRACE: tx_processor.cpp:60: TTxProcessor(tenants) completed tx 2025-05-29T15:25:43.644162Z node 1 :CMS_TENANTS TRACE: tx_processor.cpp:245: TTxProcessor(tenants) starts new tx 2025-05-29T15:25:43.644179Z node 1 :CMS_TENANTS DEBUG: console__update_tenant_state.cpp:23: TTxUpdateTenantState for tenant /Root/users/user-1 to CREATING_SUBDOMAIN 2025-05-29T15:25:43.644187Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.cpp:3146: Update tenant state in database for /Root/users/user-1 state=CREATING_SUBDOMAIN txid=1748532343634780 errorcode=STATUS_CODE_UNSPECIFIED issue= 2025-05-29T15:25:43.645165Z node 1 :CMS_TENANTS DEBUG: console__update_tenant_state.cpp:45: TTxUpdateTenantState complete for /Root/users/user-1 2025-05-29T15:25:43.645214Z node 1 :CMS_TENANTS TRACE: tx_processor.cpp:60: TTxProcessor(tenants) completed tx 2025-05-29T15:25:43.645224Z node 1 :CMS_TENANTS DEBUG: console_tenants_manager.cpp:784: TSubdomainManip(/Root/users/user-1)::Bootstrap 2025-05-29T15:25:43.645226Z node 1 :CMS_TENANTS DEBUG: console_tenants_manager.cpp:596: TSubDomainManip(/Root/users/user-1) create subdomain 2025-05-29T15:25:43.646205Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.cpp:620: TSubdomainManip(/Root/users/user-1) send subdomain creation cmd: NKikimrTxUserProxy.TEvProposeTransaction Transaction { ModifyScheme { WorkingDir: "/Root" OperationType: ESchemeOpCreateExtSubDomain SubDomain { Name: "users/user-1" ExternalSchemeShard: true ExternalHive: true ExternalSysViewProcessor: true ExternalStatisticsAggregator: true GraphShard: true } } } ExecTimeoutPeriod: 18446744073709551615 DatabaseName: "Root" 2025-05-29T15:25:43.646679Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715658:1, at schemeshard: 72057594046644480 2025-05-29T15:25:43.647744Z node 1 :CMS_TENANTS DEBUG: console_tenants_manager.cpp:832: TSubdomainManip(/Root/users/user-1) got propose result: Status: 53 TxId: 281474976715658 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 3 2025-05-29T15:25:43.647762Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.cpp:768: TSubdomainManip(/Root/users/user-1) send notification request: NKikimrScheme.TEvNotifyTxCompletion TxId: 281474976715658 2025-05-29T15:25:43.648419Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.h:960: StateWork, received event# 273285131, Sender [1:7509889233454759183:2315], Recipient [1:7509889233454758830:2200]: NKikimr::NConsole::TEvConsole::TEvGetOperationRequest { Request { id: "ydb://cmsrequest/5?tenant=/Root/users/user-1&cmstid=72057594037936131&txid=1748532343634780&action=1" } UserToken: "" } 2025-05-29T15:25:43.648424Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.h:965: StateWork, processing event TEvConsole::TEvGetOperationRequest 2025-05-29T15:25:43.648466Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.cpp:3353: Send TEvConsole::TEvGetOperationResponse: Response { operation { id: "ydb://cmsrequest/5?tenant=/Root/users/user-1&cmstid=72057594037936131&txid=1748532343634780&action=1" } } 2025-05-29T15:25:43.649421Z node 1 :CMS_TENANTS DEBUG: console_tenants_manager.cpp:804: TSubdomainManip(/Root/users/user-1) got TEvNotifyTxCompletionRegistered: TxId: 281474976715658 2025-05-29T15:25:43.658266Z node 1 :CMS_TENANTS DEBUG: console_tenants_manager.cpp:809: TSubdomainManip(/Root/users/user-1) got TEvNotifyTxCompletionResult: TxId: 281474976715658 2025-05-29T15:25:43.658420Z node 1 :CMS_TENANTS DEBUG: console_tenants_ma ... eUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { } } } PathId: 3 PathOwnerId: 72057594046644480 2025-05-29T15:25:43.719405Z node 1 :CMS_TENANTS DEBUG: console_tenants_manager.cpp:634: TSubDomainManip(/Root/users/user-1) drop subdomain 2025-05-29T15:25:43.719444Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.cpp:651: TSubdomainManip(/Root/users/user-1) send subdomain drop cmd: NKikimrTxUserProxy.TEvProposeTransaction Transaction { ModifyScheme { WorkingDir: "/Root/users" OperationType: ESchemeOpForceDropExtSubDomain Drop { Name: "user-1" } } } ExecTimeoutPeriod: 18446744073709551615 DatabaseName: "Root" 2025-05-29T15:25:43.719655Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:5412: Mark as Dropping path id [OwnerId: 72057594046644480, LocalPathId: 3] by tx: 281474976715660 2025-05-29T15:25:43.719734Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpForceDropExtSubDomain, opId: 281474976715660:0, at schemeshard: 72057594046644480 2025-05-29T15:25:43.722592Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:5412: Mark as Dropping path id [OwnerId: 72057594046644480, LocalPathId: 3] by tx: 281474976715660 2025-05-29T15:25:43.722730Z node 1 :CMS_TENANTS DEBUG: console_tenants_manager.cpp:809: TSubdomainManip(/Root/users/user-1) got TEvNotifyTxCompletionResult: TxId: 281474976715659 2025-05-29T15:25:43.722732Z node 1 :CMS_TENANTS DEBUG: console_tenants_manager.cpp:694: TSubdomainManip(/Root/users/user-1) done 2025-05-29T15:25:43.723152Z node 1 :CMS_TENANTS DEBUG: console_tenants_manager.cpp:710: TSubdomainManip(/Root/users/user-1) reply with NKikimr::NConsole::TTenantsManager::TEvPrivate::TEvSubdomainReady 2025-05-29T15:25:43.723187Z node 1 :CMS_TENANTS DEBUG: console_tenants_manager.cpp:832: TSubdomainManip(/Root/users/user-1) got propose result: Status: 53 TxId: 281474976715660 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 2025-05-29T15:25:43.723197Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.cpp:768: TSubdomainManip(/Root/users/user-1) send notification request: NKikimrScheme.TEvNotifyTxCompletion TxId: 281474976715660 2025-05-29T15:25:43.723211Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.h:960: StateWork, received event# 2146435076, Sender [1:7509889233454759228:2200], Recipient [1:7509889233454758830:2200]: NKikimr::NConsole::TTenantsManager::TEvPrivate::TEvSubdomainReady 2025-05-29T15:25:43.723218Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.h:979: StateWork, processing event TEvPrivate::TEvSubdomainReady 2025-05-29T15:25:43.723222Z node 1 :CMS_TENANTS DEBUG: console_tenants_manager.cpp:3661: Ignoring ready subdomain for tenant /Root/users/user-1 in REMOVING_SUBDOMAIN state 2025-05-29T15:25:43.723551Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.h:960: StateWork, received event# 273285131, Sender [1:7509889233454759347:2322], Recipient [1:7509889233454758830:2200]: NKikimr::NConsole::TEvConsole::TEvGetOperationRequest { Request { id: "ydb://cmsrequest/5?tenant=/Root/users/user-1&cmstid=72057594037936131&txid=1748532343716075&action=2" } UserToken: "" } 2025-05-29T15:25:43.723560Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.h:965: StateWork, processing event TEvConsole::TEvGetOperationRequest 2025-05-29T15:25:43.723607Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.cpp:3353: Send TEvConsole::TEvGetOperationResponse: Response { operation { id: "ydb://cmsrequest/5?tenant=/Root/users/user-1&cmstid=72057594037936131&txid=1748532343716075&action=2" } } 2025-05-29T15:25:43.724960Z node 1 :CMS_TENANTS DEBUG: console_tenants_manager.cpp:804: TSubdomainManip(/Root/users/user-1) got TEvNotifyTxCompletionRegistered: TxId: 281474976715660 2025-05-29T15:25:43.730726Z node 1 :CMS_TENANTS DEBUG: console_tenants_manager.cpp:809: TSubdomainManip(/Root/users/user-1) got TEvNotifyTxCompletionResult: TxId: 281474976715660 2025-05-29T15:25:43.730751Z node 1 :CMS_TENANTS DEBUG: console_tenants_manager.cpp:694: TSubdomainManip(/Root/users/user-1) done 2025-05-29T15:25:43.730766Z node 1 :CMS_TENANTS DEBUG: console_tenants_manager.cpp:710: TSubdomainManip(/Root/users/user-1) reply with NKikimr::NConsole::TTenantsManager::TEvPrivate::TEvSubdomainRemoved 2025-05-29T15:25:43.730783Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.h:960: StateWork, received event# 2146435077, Sender [1:7509889233454759337:2200], Recipient [1:7509889233454758830:2200]: NKikimr::NConsole::TTenantsManager::TEvPrivate::TEvSubdomainRemoved 2025-05-29T15:25:43.730787Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.h:980: StateWork, processing event TEvPrivate::TEvSubdomainRemoved 2025-05-29T15:25:43.730794Z node 1 :CMS_TENANTS TRACE: tx_processor.cpp:51: TTxProcessor(tenants) enqueue tx 2025-05-29T15:25:43.730796Z node 1 :CMS_TENANTS TRACE: tx_processor.cpp:245: TTxProcessor(tenants) starts new tx 2025-05-29T15:25:43.730806Z node 1 :CMS_TENANTS DEBUG: console__remove_computational_units.cpp:20: TTxRemoveComputationalUnits Execute /Root/users/user-1 2025-05-29T15:25:43.730814Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.cpp:3146: Update tenant state in database for /Root/users/user-1 state=REMOVING_UNITS txid=1748532343716075 errorcode=STATUS_CODE_UNSPECIFIED issue= 2025-05-29T15:25:43.730832Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.cpp:2927: Remove computational units of /Root/users/user-1 from database txid=1748532343716075 issue= 2025-05-29T15:25:43.735402Z node 1 :CMS_TENANTS DEBUG: console__remove_computational_units.cpp:34: TTxRemoveComputationalUnits Complete /Root/users/user-1 2025-05-29T15:25:43.735444Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.cpp:2114: Send TEvTenantSlotBroker::TEvAlterTenant: TenantName: "/Root/users/user-1" 2025-05-29T15:25:43.735451Z node 1 :CMS_TENANTS TRACE: tx_processor.cpp:60: TTxProcessor(tenants) completed tx 2025-05-29T15:25:43.735526Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.h:960: StateWork, received event# 273154052, Sender [1:7509889233454758721:2198], Recipient [1:7509889233454758830:2200]: NKikimrTenantSlotBroker.TTenantState TenantName: "/Root/users/user-1" 2025-05-29T15:25:43.735530Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.h:984: StateWork, processing event TEvTenantSlotBroker::TEvTenantState 2025-05-29T15:25:43.735538Z node 1 :CMS_TENANTS TRACE: tx_processor.cpp:51: TTxProcessor(tenants) enqueue tx 2025-05-29T15:25:43.735540Z node 1 :CMS_TENANTS TRACE: tx_processor.cpp:245: TTxProcessor(tenants) starts new tx 2025-05-29T15:25:43.735546Z node 1 :CMS_TENANTS DEBUG: console__update_tenant_state.cpp:23: TTxUpdateTenantState for tenant /Root/users/user-1 to REMOVING_POOLS 2025-05-29T15:25:43.735577Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.cpp:3146: Update tenant state in database for /Root/users/user-1 state=REMOVING_POOLS txid=1748532343716075 errorcode=STATUS_CODE_UNSPECIFIED issue= 2025-05-29T15:25:43.736716Z node 1 :CMS_TENANTS DEBUG: console__update_tenant_state.cpp:45: TTxUpdateTenantState complete for /Root/users/user-1 2025-05-29T15:25:43.736734Z node 1 :CMS_TENANTS TRACE: tx_processor.cpp:60: TTxProcessor(tenants) completed tx 2025-05-29T15:25:43.736745Z node 1 :CMS_TENANTS DEBUG: console_tenants_manager.cpp:158: TPoolManip(/Root/users/user-1:hdd) Bootstrap 2025-05-29T15:25:43.736794Z node 1 :CMS_TENANTS DEBUG: console_tenants_manager.cpp:117: TPoolManip(/Root/users/user-1:hdd) read pool state: Request { Command { ReadStoragePool { BoxId: 999 Name: "/Root/users/user-1:hdd" } } } 2025-05-29T15:25:43.736987Z node 1 :CMS_TENANTS DEBUG: console_tenants_manager.cpp:198: TPoolManip(/Root/users/user-1:hdd) got read response: Status { Success: true StoragePool { BoxId: 999 StoragePoolId: 4 Name: "/Root/users/user-1:hdd" ErasureSpecies: "none" Geometry { } VDiskKind: "Default" Kind: "hdd" NumGroups: 2 PDiskFilter { Property { Type: ROT } } ScopeId { X1: 72057594046644480 X2: 3 } ItemConfigGeneration: 3 } } Success: true ConfigTxSeqNo: 13 2025-05-29T15:25:43.736999Z node 1 :CMS_TENANTS DEBUG: console_tenants_manager.cpp:151: TPoolManip(/Root/users/user-1:hdd) send pool request: Request { Command { DeleteStoragePool { BoxId: 999 StoragePoolId: 4 ItemConfigGeneration: 3 } } } 2025-05-29T15:25:43.739384Z node 1 :CMS_TENANTS DEBUG: console_tenants_manager.cpp:306: TPoolManip(/Root/users/user-1:hdd) got config response: Status { Success: true } Success: true ConfigTxSeqNo: 14 2025-05-29T15:25:43.739414Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.h:960: StateWork, received event# 2146435081, Sender [1:7509889233454759392:2200], Recipient [1:7509889233454758830:2200]: NKikimr::NConsole::TTenantsManager::TEvPrivate::TEvPoolDeleted 2025-05-29T15:25:43.739430Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.h:973: StateWork, processing event TEvPrivate::TEvPoolDeleted 2025-05-29T15:25:43.739435Z node 1 :CMS_TENANTS TRACE: tx_processor.cpp:51: TTxProcessor(tenants) enqueue tx 2025-05-29T15:25:43.739437Z node 1 :CMS_TENANTS TRACE: tx_processor.cpp:245: TTxProcessor(tenants) starts new tx 2025-05-29T15:25:43.739449Z node 1 :CMS_TENANTS DEBUG: console__update_pool_state.cpp:28: TTxUpdatePoolState for pool /Root/users/user-1:hdd of /Root/users/user-1 state=DELETED 2025-05-29T15:25:43.739456Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.cpp:3047: Update pool state in database for /Root/users/user-1:hdd state=DELETED allocatednumgroups=0 2025-05-29T15:25:43.742309Z node 1 :CMS_TENANTS DEBUG: console__update_pool_state.cpp:73: TTxUpdatePoolState complete for /Root/users/user-1:hdd 2025-05-29T15:25:43.742321Z node 1 :CMS_TENANTS TRACE: tx_processor.cpp:51: TTxProcessor(tenants) enqueue tx 2025-05-29T15:25:43.742323Z node 1 :CMS_TENANTS TRACE: tx_processor.cpp:60: TTxProcessor(tenants) completed tx 2025-05-29T15:25:43.742325Z node 1 :CMS_TENANTS TRACE: tx_processor.cpp:245: TTxProcessor(tenants) starts new tx 2025-05-29T15:25:43.742344Z node 1 :CMS_TENANTS DEBUG: console__remove_tenant_done.cpp:22: TTxRemoveTenantDone for tenant /Root/users/user-1 txid=1748532343716075 2025-05-29T15:25:43.742348Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.cpp:2927: Remove computational units of /Root/users/user-1 from database txid=1748532343716075 issue= 2025-05-29T15:25:43.742351Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.cpp:2958: Remove tenant /Root/users/user-1 from database txid=1748532343716075 issue= 2025-05-29T15:25:43.742353Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.cpp:2963: Remove pool /Root/users/user-1:hdd from database 2025-05-29T15:25:43.742381Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.cpp:3083: Add tenant removal info for /Root/users/user-1 txid=1748532343716075 code=SUCCESS errorcode=STATUS_CODE_UNSPECIFIED issue= 2025-05-29T15:25:43.744023Z node 1 :CMS_TENANTS DEBUG: console__remove_tenant_done.cpp:34: TTxRemoveTenantDone Complete 2025-05-29T15:25:43.744056Z node 1 :CMS_TENANTS TRACE: tx_processor.cpp:60: TTxProcessor(tenants) completed tx 2025-05-29T15:25:43.778976Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.h:960: StateWork, received event# 273285131, Sender [1:7509889233454759411:2324], Recipient [1:7509889233454758830:2200]: NKikimr::NConsole::TEvConsole::TEvGetOperationRequest { Request { id: "ydb://cmsrequest/5?tenant=/Root/users/user-1&cmstid=72057594037936131&txid=1748532343716075&action=2" } UserToken: "" } 2025-05-29T15:25:43.778994Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.h:965: StateWork, processing event TEvConsole::TEvGetOperationRequest 2025-05-29T15:25:43.779088Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.cpp:3353: Send TEvConsole::TEvGetOperationResponse: Response { operation { id: "ydb://cmsrequest/5?tenant=/Root/users/user-1&cmstid=72057594037936131&txid=1748532343716075&action=2" ready: true status: SUCCESS } } ------- [TS] {default-linux-x86_64, relwithdebinfo} ydb/services/cms/ut/unittest >> TGRpcCmsTest::DisabledTxTest [GOOD] Test command err: 2025-05-29T15:25:43.835342Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509889231270079625:2141];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:25:43.835362Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/002141/r3tmp/tmpz6tuI6/pdisk_1.dat 2025-05-29T15:25:44.050038Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 15030, node 1 2025-05-29T15:25:44.102900Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:25:44.102911Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-05-29T15:25:44.102913Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-05-29T15:25:44.102953Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:17648 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-29T15:25:44.175023Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:25:44.175049Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:25:44.175680Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-05-29T15:25:44.180552Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected waiting... 2025-05-29T15:25:44.230450Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateSubDomain, opId: 281474976715658:1, at schemeshard: 72057594046644480 2025-05-29T15:25:44.239819Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715659:0, at schemeshard: 72057594046644480 >> TConsoleTests::TestRemoveAttributes [GOOD] >> TConsoleTests::TestRemoveAttributesExtSubdomain ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/public/sdk/cpp/src/client/persqueue_public/ut/unittest >> RetryPolicy::RetryWithBatching [FAIL] Test command err: 2025-05-29T15:25:24.838980Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-05-29T15:25:24.838995Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-05-29T15:25:24.839938Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2025-05-29T15:25:24.850851Z :ERROR: [db] [sessionid] [cluster] Got error. Status: INTERNAL_ERROR. Description: 2025-05-29T15:25:24.850876Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-05-29T15:25:24.850880Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-05-29T15:25:24.850919Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.008119s 2025-05-29T15:25:24.860089Z :ERROR: [db] [sessionid] [cluster] Got error. Status: INTERNAL_ERROR. Description: 2025-05-29T15:25:24.860111Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-05-29T15:25:24.860116Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-05-29T15:25:24.860143Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.007903s 2025-05-29T15:25:24.869164Z :ERROR: [db] [sessionid] [cluster] Got error. Status: INTERNAL_ERROR. Description: 2025-05-29T15:25:24.869184Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-05-29T15:25:24.869188Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-05-29T15:25:24.869213Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.009315s 2025-05-29T15:25:24.920792Z :TWriteSession_TestPolicy INFO: Random seed for debugging is 1748532324920783 2025-05-29T15:25:25.384484Z node 2 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7509889154787892344:2093];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:25:25.389050Z node 2 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-05-29T15:25:25.500316Z node 1 :PQ_READ_PROXY DEBUG: caching_service.cpp:44: Direct read cache: : Created test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/001210/r3tmp/tmpx41U5C/pdisk_1.dat 2025-05-29T15:25:25.509253Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-05-29T15:25:25.518531Z node 2 :PQ_READ_PROXY DEBUG: caching_service.cpp:44: Direct read cache: : Created 2025-05-29T15:25:25.662979Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:25:25.667698Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:25:25.667727Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:25:25.687470Z node 1 :HIVE WARN: hive_impl.cpp:771: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-05-29T15:25:25.687937Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 21259, node 1 2025-05-29T15:25:25.727073Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:25:25.727095Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:25:25.727705Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/ciyv/001210/r3tmp/yandexOGacx8.tmp 2025-05-29T15:25:25.727713Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: /home/runner/.ya/build/build_root/ciyv/001210/r3tmp/yandexOGacx8.tmp 2025-05-29T15:25:25.727787Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:202: successfully initialized from file: /home/runner/.ya/build/build_root/ciyv/001210/r3tmp/yandexOGacx8.tmp 2025-05-29T15:25:25.727825Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-05-29T15:25:25.736803Z INFO: TTestServer started on Port 10432 GrpcPort 21259 2025-05-29T15:25:25.739551Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:10432 PQClient connected to localhost:21259 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-29T15:25:25.844980Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:25:25.866655Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 waiting... waiting... waiting... 2025-05-29T15:25:26.459373Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7509889159082859935:2310], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:25:26.459406Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7509889159082859903:2307], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:25:26.459420Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:25:26.461791Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715657:3, at schemeshard: 72057594046644480 2025-05-29T15:25:26.475467Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7509889159082859940:2311], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715657 completed, doublechecking } 2025-05-29T15:25:26.589938Z node 2 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [2:7509889159082859968:2128] txid# 281474976715658, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 8], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-29T15:25:26.672272Z node 2 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [2:7509889159082859975:2315], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-05-29T15:25:26.672932Z node 2 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=2&id=OGVjZmNiZTctN2YwMTc5OTUtNDY5ZGQ4N2YtMWU4ZjljZjU=, ActorId: [2:7509889159082859901:2306], ActorState: ExecuteState, TraceId: 01jweabw1hf0ww446m4jrwfb1m, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-05-29T15:25:26.673490Z node 2 :PERSQUEUE_CLUSTER_TRACKER ERROR: cluster_tracker.cpp:167: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-05-29T15:25:26.676383Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [1:7509889159751702823:2341], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-05-29T15:25:26.677569Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=1&id=ZWE1Mzk2ZTctNGU2NTJiYzAtNzA3ODhlN2QtNTliZmZkNTg=, ActorId: [1:7509889159751702797:2334], ActorState: ExecuteState, TraceId: 01jweabw5jccc5p4ne5f477jpv, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-05-29T15:25:26.677778Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: cluster_tracker.cpp:167: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-05-29T15:25:26.683719Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-05-29T15:25:26.762864Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo ... EnableGrpc on GrpcPort 1338, node 11 2025-05-29T15:25:36.501998Z node 11 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/ciyv/001210/r3tmp/yandexxJeMQb.tmp 2025-05-29T15:25:36.502010Z node 11 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: /home/runner/.ya/build/build_root/ciyv/001210/r3tmp/yandexxJeMQb.tmp 2025-05-29T15:25:36.502055Z node 11 :NET_CLASSIFIER WARN: net_classifier.cpp:202: successfully initialized from file: /home/runner/.ya/build/build_root/ciyv/001210/r3tmp/yandexxJeMQb.tmp 2025-05-29T15:25:36.502072Z node 11 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-05-29T15:25:36.520722Z INFO: TTestServer started on Port 5308 GrpcPort 1338 TClient is connected to server localhost:5308 PQClient connected to localhost:1338 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-29T15:25:36.587760Z node 11 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-05-29T15:25:36.618973Z node 11 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976715658, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:25:36.629480Z node 11 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976715659, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:25:37.025173Z node 12 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [12:7509889204261601121:2306], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:25:37.025200Z node 12 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:25:37.025316Z node 12 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [12:7509889204261601149:2309], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:25:37.026825Z node 11 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976720657:3, at schemeshard: 72057594046644480 2025-05-29T15:25:37.038587Z node 11 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976720657, at schemeshard: 72057594046644480 2025-05-29T15:25:37.038993Z node 12 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [12:7509889204261601151:2310], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976720657 completed, doublechecking } 2025-05-29T15:25:37.140668Z node 11 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 2025-05-29T15:25:37.142870Z node 11 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [11:7509889203639025422:2340], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-05-29T15:25:37.143185Z node 11 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=11&id=MjQyOWI4Yi01ZjFlNTliNi0yNjE4NTU5Mi03N2JjMzE1ZQ==, ActorId: [11:7509889203639025381:2333], ActorState: ExecuteState, TraceId: 01jweac6fca2ab8bn0ntj1pnwe, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-05-29T15:25:37.143325Z node 11 :PERSQUEUE_CLUSTER_TRACKER ERROR: cluster_tracker.cpp:167: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-05-29T15:25:37.143507Z node 12 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [12:7509889204261601179:2126] txid# 281474976720658, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 8], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-29T15:25:37.154462Z node 12 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [12:7509889204261601186:2314], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-05-29T15:25:37.154952Z node 12 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=12&id=N2RmMTc2ZjQtYzkwOTYwZjMtMzY5NDA2MTQtNzNiYTE5YTE=, ActorId: [12:7509889204261601119:2305], ActorState: ExecuteState, TraceId: 01jweac6c0b314s50drfyvwjx3, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-05-29T15:25:37.155083Z node 12 :PERSQUEUE_CLUSTER_TRACKER ERROR: cluster_tracker.cpp:167: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-05-29T15:25:37.240101Z node 11 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-05-29T15:25:37.285528Z node 11 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost:1338", true, true, 1000); 2025-05-29T15:25:37.354508Z node 11 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [11:7509889203639025793:2373], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:25:37.355240Z node 11 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=11&id=NGU5Mjk2MGItOTk1ZDExMWQtNDU0YzAwMjAtZjFmMWMxNw==, ActorId: [11:7509889203639025790:2371], ActorState: ExecuteState, TraceId: 01jweac6nndgb9dtxtg5jtpmry, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: assertion failed at ydb/core/testlib/test_pq_client.h:537, TMaybe NKikimr::NPersQueueTests::TFlatMsgBusPQClient::RunYqlDataQueryWithParams(TString, const NYdb::TParams &): (result.IsSuccess())
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 TBackTrace::Capture()+28 (0x13A6D2CC) NUnitTest::NPrivate::RaiseError(char const*, TBasicString> const&, bool)+137 (0x13C25189) NKikimr::NPersQueueTests::TFlatMsgBusPQClient::RunYqlDataQueryWithParams(TBasicString>, NYdb::Dev::TParams const&)+932 (0x138B9A44) NKikimr::NPersQueueTests::TFlatMsgBusPQClient::RunYqlDataQuery(TBasicString>)+88 (0x138B89A8) NKikimr::NPersQueueTests::TFlatMsgBusPQClient::InitDCs(THashMap>, NKikimr::NPersQueueTests::TPQTestClusterInfo, THash>>, TEqualTo>>, std::__y1::allocator>>>, TBasicString> const&)+1170 (0x138B7BF2) NPersQueue::SDKTestSetup::Start(bool, bool)+1450 (0x138AD5DA) NPersQueue::SDKTestSetup::SDKTestSetup(TBasicString> const&, bool, TVector> const&, NActors::NLog::EPriority, unsigned int, unsigned long)+675 (0x138AB0E3) void std::__y1::allocator::construct[abi:fe200000](NYdb::NPersQueue::NTests::TPersQueueYdbSdkTestSetup*, char const*&)+72 (0x139492F8) NYdb::NPersQueue::NTests::NTestSuiteRetryPolicy::TTestCaseRetryWithBatching::Execute_(NUnitTest::TTestContext&)+79 (0x13966A8F) NYdb::NPersQueue::NTests::NTestSuiteRetryPolicy::TCurrentTest::Execute()::'lambda'()::operator()() const+71 (0x1396A947) NUnitTest::TTestBase::Run(std::__y1::function, TBasicString> const&, char const*, bool)+126 (0x13C2703E) NYdb::NPersQueue::NTests::NTestSuiteRetryPolicy::TCurrentTest::Execute()+429 (0x1396A30D) NUnitTest::TTestFactory::Execute()+803 (0x13C277B3) NUnitTest::RunMain(int, char**)+3021 (0x13C390FD) ??+0 (0x7FAB26387D90) __libc_start_main+128 (0x7FAB26387E40) _start+41 (0x129BB029) >> DstCreator::ExistingDst [GOOD] >> DstCreator::EmptyReplicationConfig >> DstCreator::ColumnsSizeMismatch ------- [TS] {default-linux-x86_64, relwithdebinfo} ydb/services/cms/ut/unittest >> TGRpcCmsTest::DescribeOptionsTest [GOOD] Test command err: 2025-05-29T15:25:44.011317Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509889236961433437:2145];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:25:44.011422Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/00214f/r3tmp/tmpOSS0FZ/pdisk_1.dat 2025-05-29T15:25:44.167033Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:25:44.168946Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:25:44.168967Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:25:44.176274Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 28305, node 1 2025-05-29T15:25:44.210907Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:25:44.210917Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-05-29T15:25:44.210919Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-05-29T15:25:44.210959Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:10528 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-29T15:25:44.302545Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:25:44.316666Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 TClient is connected to server localhost:10528 2025-05-29T15:25:44.358974Z node 1 :CMS_TENANTS TRACE: tx_processor.cpp:131: TTxProcessor(tenants) is now locking 2025-05-29T15:25:44.358985Z node 1 :CMS_TENANTS TRACE: tx_processor.cpp:143: TTxProcessor(tenants) is now locked by parent 2025-05-29T15:25:44.370977Z node 1 :CMS_TENANTS TRACE: tx_processor.cpp:102: TTxProcessor(tenants) is now active 2025-05-29T15:25:44.416545Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.h:960: StateWork, received event# 273285140, Sender [1:7509889236961434130:2314], Recipient [1:7509889236961433838:2193]: NKikimr::NConsole::TEvConsole::TEvDescribeTenantOptionsRequest { Request { } UserToken: "" PeerName: "ipv6:[::1]:43252" } 2025-05-29T15:25:44.416562Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.h:964: StateWork, processing event TEvConsole::TEvDescribeTenantOptionsRequest 2025-05-29T15:25:44.417323Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.cpp:3335: Send TEvConsole::TEvDescribeTenantOptionsResponse: Response { operation { ready: true status: SUCCESS result { [type.googleapis.com/Ydb.Cms.DescribeDatabaseOptionsResult] { storage_units { kind: "hdd2" labels { key: "disk_type" value: "ROT" } labels { key: "erasure" value: "none" } } storage_units { kind: "hdd" labels { key: "disk_type" value: "ROT" } labels { key: "erasure" value: "none" } } storage_units { kind: "hdd1" labels { key: "disk_type" value: "ROT" } labels { key: "erasure" value: "none" } } storage_units { kind: "ssd" labels { key: "disk_type" value: "ROT" } labels { key: "erasure" value: "none" } } storage_units { kind: "test" labels { key: "disk_type" value: "ROT" } labels { key: "erasure" value: "none" } } availability_zones { name: "dc-1" labels { key: "collocation" value: "disabled" } labels { key: "fixed_data_center" value: "DC-1" } } availability_zones { name: "any" labels { key: "any_data_center" value: "true" } labels { key: "collocation" value: "disabled" } } computational_units { kind: "slot" labels { key: "slot_type" value: "default" } labels { key: "type" value: "dynamic_slot" } allowed_availability_zones: "any" allowed_availability_zones: "dc-1" } } } } } >> OperationMapping::IndexBuildRejected [GOOD] >> TGRpcCmsTest::RemoveWithAnotherTokenTest [GOOD] >> SplitPathTests::WithDatabaseShouldFail [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_olap_reboots/unittest >> TOlapReboots::DropMultipleTables [GOOD] Test command err: ==== RunWithTabletReboots =========== RUN: Trace =========== Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2140] Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:116:2058] recipient: [1:110:2141] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:116:2058] recipient: [1:110:2141] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:117:2058] recipient: [1:111:2142] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:117:2058] recipient: [1:111:2142] Leader for TabletID 72057594046678944 is [1:126:2151] sender: [1:127:2058] recipient: [1:109:2140] Leader for TabletID 72057594046447617 is [1:130:2154] sender: [1:132:2058] recipient: [1:110:2141] Leader for TabletID 72057594046316545 is [1:133:2155] sender: [1:135:2058] recipient: [1:111:2142] 2025-05-29T15:24:38.969214Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7488: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-05-29T15:24:38.969242Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7516: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:24:38.969248Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7402: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2025-05-29T15:24:38.969254Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7418: OperationsProcessing config: using default configuration 2025-05-29T15:24:38.969262Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-05-29T15:24:38.969268Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-05-29T15:24:38.969278Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7548: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:24:38.969294Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:39: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-05-29T15:24:38.969400Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7619: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-05-29T15:24:38.969473Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-05-29T15:24:38.982081Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7651: Got new config: QueryServiceConfig { AllExternalDataSourcesAreAvailable: true } 2025-05-29T15:24:38.982110Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:24:38.982233Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7619: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# Leader for TabletID 72057594046447617 is [1:130:2154] sender: [1:175:2058] recipient: [1:15:2062] 2025-05-29T15:24:38.986015Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-05-29T15:24:38.986058Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-05-29T15:24:38.986115Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-05-29T15:24:38.990091Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-05-29T15:24:38.990191Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:445: Clear TempDirsState with owners number: 0 2025-05-29T15:24:38.990322Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1348: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-05-29T15:24:38.990498Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:32: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-05-29T15:24:38.991180Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:157: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:24:38.991228Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:84: [RootDataErasureManager] Stop 2025-05-29T15:24:38.991532Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:24:38.991545Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:24:38.991585Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-05-29T15:24:38.991595Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-29T15:24:38.991601Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:202: TTxServerlessStorageBilling.Complete 2025-05-29T15:24:38.991621Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6671: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:214:2058] recipient: [1:212:2212] IGNORE Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:214:2058] recipient: [1:212:2212] Leader for TabletID 72057594037968897 is [1:218:2216] sender: [1:219:2058] recipient: [1:212:2212] 2025-05-29T15:24:38.993040Z node 1 :HIVE INFO: tablet_helpers.cpp:1130: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:126:2151] sender: [1:239:2058] recipient: [1:15:2062] 2025-05-29T15:24:39.015978Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:372: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-05-29T15:24:39.016074Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:24:39.016153Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-05-29T15:24:39.016205Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:130: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-05-29T15:24:39.016218Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:24:39.017103Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:451: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-05-29T15:24:39.017130Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-05-29T15:24:39.017190Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:24:39.017208Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:313: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-05-29T15:24:39.017215Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:367: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-05-29T15:24:39.017222Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 2 -> 3 2025-05-29T15:24:39.017669Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:24:39.017682Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-05-29T15:24:39.017688Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 3 -> 128 2025-05-29T15:24:39.018016Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:24:39.018026Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:24:39.018034Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:24:39.018043Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1650: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-05-29T15:24:39.018825Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1719: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-05-29T15:24:39.019308Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:652: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-05-29T15:24:39.019356Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1751: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 Leader for TabletID 72057594046316545 is [1:133:2155] sender: [1:254:2058] recipient: [1:15:2062] FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-05-29T15:24:39.019594Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:676: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-05-29T15:24:39.019623Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:680: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 133 RawX2: 4294969451 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-05-29T15:24:39.019644Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:24:39.019718Z node 1 :FLAT_TX_SCHEMESHARD INFO: sch ... 78944 Generation: 2 LocalPathId: 5 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 1005 2025-05-29T15:25:43.826025Z node 83 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 5 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 1005 2025-05-29T15:25:43.826030Z node 83 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72057594046678944, txId: 1005 2025-05-29T15:25:43.826037Z node 83 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 1005, pathId: [OwnerId: 72057594046678944, LocalPathId: 5], version: 18446744073709551615 2025-05-29T15:25:43.826043Z node 83 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 3 2025-05-29T15:25:43.826134Z node 83 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:5834: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 9 PathOwnerId: 72057594046678944, cookie: 1005 2025-05-29T15:25:43.826144Z node 83 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 9 PathOwnerId: 72057594046678944, cookie: 1005 2025-05-29T15:25:43.826148Z node 83 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72057594046678944, txId: 1005 2025-05-29T15:25:43.826152Z node 83 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 1005, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 9 2025-05-29T15:25:43.826157Z node 83 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2025-05-29T15:25:43.826507Z node 83 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:5834: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 11 PathOwnerId: 72057594046678944, cookie: 1005 2025-05-29T15:25:43.826527Z node 83 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 11 PathOwnerId: 72057594046678944, cookie: 1005 2025-05-29T15:25:43.826532Z node 83 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72057594046678944, txId: 1005 2025-05-29T15:25:43.826536Z node 83 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 1005, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 11 2025-05-29T15:25:43.826542Z node 83 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 4 2025-05-29T15:25:43.826560Z node 83 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1606: TOperation IsReadyToNotify, TxId: 1005, ready parts: 0/1, is published: true 2025-05-29T15:25:43.826854Z node 83 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:652: Send tablet strongly msg operationId: 1005:0 from tablet: 72057594046678944 to tablet: 72075186233409546 cookie: 72057594046678944:1 msg type: 275382275 2025-05-29T15:25:43.827684Z node 83 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1005 2025-05-29T15:25:43.827842Z node 83 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1005 2025-05-29T15:25:43.827909Z node 83 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1005 2025-05-29T15:25:43.843785Z node 83 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6146: Handle TEvNotifyTxCompletionResult, at schemeshard: 72057594046678944, message: Origin: 72075186233409546 TxId: 1005 2025-05-29T15:25:43.843815Z node 83 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1764: TOperation FindRelatedPartByTabletId, TxId: 1005, tablet: 72075186233409546, partId: 0 2025-05-29T15:25:43.843847Z node 83 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:619: TTxOperationReply execute, operationId: 1005:0, at schemeshard: 72057594046678944, message: Origin: 72075186233409546 TxId: 1005 2025-05-29T15:25:43.843859Z node 83 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1005:0 129 -> 130 FAKE_COORDINATOR: Erasing txId 1005 2025-05-29T15:25:43.844490Z node 83 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:647: TTxOperationReply complete, operationId: 1005:0, at schemeshard: 72057594046678944 2025-05-29T15:25:43.844536Z node 83 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1005:0, at schemeshard: 72057594046678944 2025-05-29T15:25:43.844544Z node 83 :FLAT_TX_SCHEMESHARD INFO: drop_table.cpp:315: TDropColumnTable TProposedDeleteParts operationId# 1005:0 ProgressState, at schemeshard: 72057594046678944 2025-05-29T15:25:43.844569Z node 83 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 2 2025-05-29T15:25:43.844590Z node 83 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:906: Part operation is done id#1005:0 progress is 1/1 2025-05-29T15:25:43.844595Z node 83 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 1005 ready parts: 1/1 2025-05-29T15:25:43.844602Z node 83 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:906: Part operation is done id#1005:0 progress is 1/1 2025-05-29T15:25:43.844606Z node 83 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 1005 ready parts: 1/1 2025-05-29T15:25:43.844612Z node 83 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1606: TOperation IsReadyToNotify, TxId: 1005, ready parts: 1/1, is published: true 2025-05-29T15:25:43.844629Z node 83 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1629: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [83:366:2343] message: TxId: 1005 2025-05-29T15:25:43.844636Z node 83 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 1005 ready parts: 1/1 2025-05-29T15:25:43.844642Z node 83 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:973: Operation and all the parts is done, operation id: 1005:0 2025-05-29T15:25:43.844648Z node 83 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5174: RemoveTx for txid 1005:0 2025-05-29T15:25:43.844678Z node 83 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 1 2025-05-29T15:25:43.844759Z node 83 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:123: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-05-29T15:25:43.844766Z node 83 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:137: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 5], at schemeshard: 72057594046678944 2025-05-29T15:25:43.844779Z node 83 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 3 2025-05-29T15:25:43.845364Z node 83 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:227: tests -- TTxNotificationSubscriber for txId 1005: got EvNotifyTxCompletionResult 2025-05-29T15:25:43.845378Z node 83 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:236: tests -- TTxNotificationSubscriber for txId 1005: satisfy waiter [83:527:2496] 2025-05-29T15:25:43.845497Z node 83 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:160: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestWaitNotification: OK eventTxId 1005 2025-05-29T15:25:43.845601Z node 83 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/OlapStore/ColumnTable1" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-05-29T15:25:43.845662Z node 83 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/OlapStore/ColumnTable1" took 71us result status StatusPathDoesNotExist 2025-05-29T15:25:43.845701Z node 83 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/OlapStore/ColumnTable1\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot/OlapStore\' (id: [OwnerId: 72057594046678944, LocalPathId: 3]), source_location: ydb/core/tx/schemeshard/schemeshard_path_describer.cpp:1157" Path: "/MyRoot/OlapStore/ColumnTable1" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot/OlapStore" LastExistedPrefixPathId: 3 LastExistedPrefixDescription { Self { Name: "OlapStore" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeColumnStore CreateFinished: true CreateTxId: 1002 CreateStep: 5000003 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: false } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 2025-05-29T15:25:43.845778Z node 83 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/OlapStore/ColumnTable2" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-05-29T15:25:43.845792Z node 83 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/OlapStore/ColumnTable2" took 16us result status StatusPathDoesNotExist 2025-05-29T15:25:43.845809Z node 83 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/OlapStore/ColumnTable2\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot/OlapStore\' (id: [OwnerId: 72057594046678944, LocalPathId: 3]), source_location: ydb/core/tx/schemeshard/schemeshard_path_describer.cpp:1157" Path: "/MyRoot/OlapStore/ColumnTable2" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot/OlapStore" LastExistedPrefixPathId: 3 LastExistedPrefixDescription { Self { Name: "OlapStore" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeColumnStore CreateFinished: true CreateTxId: 1002 CreateStep: 5000003 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: false } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 >> SplitPathTests::WithoutDatabaseShouldSuccess [GOOD] |67.2%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/grpc_services/ut/unittest >> OperationMapping::IndexBuildRejected [GOOD] >> DstCreator::WithSyncIndexAndIntermediateDir >> DataShardVolatile::DistributedWriteShardRestartBeforePlan+UseSink [FAIL] >> DataShardVolatile::DistributedWriteShardRestartBeforePlan-UseSink |67.2%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/grpc_services/ut/unittest >> SplitPathTests::WithDatabaseShouldFail [GOOD] |67.2%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/grpc_services/ut/unittest >> SplitPathTests::WithoutDatabaseShouldSuccess [GOOD] ------- [TS] {default-linux-x86_64, relwithdebinfo} ydb/services/cms/ut/unittest >> TGRpcCmsTest::RemoveWithAnotherTokenTest [GOOD] Test command err: 2025-05-29T15:25:44.580982Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509889235878916924:2241];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:25:44.581004Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/002132/r3tmp/tmpbi5tnT/pdisk_1.dat 2025-05-29T15:25:44.792708Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 19045, node 1 2025-05-29T15:25:44.839599Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:25:44.839616Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-05-29T15:25:44.839619Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-05-29T15:25:44.839668Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:15418 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: 2025-05-29T15:25:44.920104Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:25:44.920133Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:25:44.927620Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-29T15:25:44.932719Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... TClient is connected to server localhost:15418 2025-05-29T15:25:44.991385Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-05-29T15:25:45.033323Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.h:960: StateWork, received event# 273285120, Sender [1:7509889240173884802:2313], Recipient [1:7509889235878917185:2198]: NKikimr::NConsole::TEvConsole::TEvCreateTenantRequest { Request { path: "/Root/users/user-1" resources { storage_units { unit_kind: "hdd" count: 1 } } } UserToken: "\n\016user-1@builtin\022\030\022\026\n\024all-users@well-known\032\016user-1@builtin\"\007Builtin*\017**** (E3DE7296)" PeerName: "ipv6:[::1]:54524" } 2025-05-29T15:25:45.033350Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.h:963: StateWork, processing event TEvConsole::TEvCreateTenantRequest 2025-05-29T15:25:45.033357Z node 1 :CMS_TENANTS TRACE: tx_processor.cpp:51: TTxProcessor(tenants) enqueue tx 2025-05-29T15:25:45.033361Z node 1 :CMS_TENANTS TRACE: tx_processor.cpp:245: TTxProcessor(tenants) starts new tx 2025-05-29T15:25:45.033401Z node 1 :CMS_TENANTS DEBUG: console__create_tenant.cpp:71: TTxCreateTenant: Request { path: "/Root/users/user-1" resources { storage_units { unit_kind: "hdd" count: 1 } } } UserToken: "\n\016user-1@builtin\022\030\022\026\n\024all-users@well-known\032\016user-1@builtin\"\007Builtin*\017**** (E3DE7296)" PeerName: "ipv6:[::1]:54524" 2025-05-29T15:25:45.033447Z node 1 :CMS_TENANTS DEBUG: console__create_tenant.cpp:365: Add tenant /Root/users/user-1 (txid = 1748532345028440) 2025-05-29T15:25:45.033561Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.cpp:2577: Add tenant /Root/users/user-1 to database state=CREATING_POOLS coordinators=3 mediators=3 planresolution=10 timecastbucketspermediator=2 issue= txid=1748532345028440 subdomainversion=1 confirmedsubdomain=0 attrs= generation=1 errorcode=STATUS_CODE_UNSPECIFIED isExternalSubDomain=1 isExternalHive=1 isExternalSysViewProcessor=1 isExternalStatisticsAggregator=1 areResourcesShared=0 sharedDomainId= 2025-05-29T15:25:45.033640Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.cpp:2637: Add tenant pool /Root/users/user-1:hdd to database kind=hdd config=BoxId: 999 StoragePoolId: 4 Name: "/Root/users/user-1:hdd" ErasureSpecies: "none" VDiskKind: "Default" Kind: "hdd" NumGroups: 1 PDiskFilter { Property { Type: ROT } } allocatednumgroups=0 state=NOT_ALLOCATED 2025-05-29T15:25:45.039034Z node 1 :CMS_TENANTS DEBUG: console__create_tenant.cpp:375: TTxCreateTenant Complete 2025-05-29T15:25:45.039324Z node 1 :CMS_TENANTS TRACE: console__create_tenant.cpp:383: Send: NKikimr::NConsole::TEvConsole::TEvCreateTenantResponse { Response { operation { id: "ydb://cmsrequest/5?tenant=/Root/users/user-1&cmstid=72057594037936131&txid=1748532345028440&action=1" } } } 2025-05-29T15:25:45.039374Z node 1 :CMS_TENANTS TRACE: tx_processor.cpp:60: TTxProcessor(tenants) completed tx 2025-05-29T15:25:45.039409Z node 1 :CMS_TENANTS DEBUG: console_tenants_manager.cpp:158: TPoolManip(/Root/users/user-1:hdd) Bootstrap 2025-05-29T15:25:45.039452Z node 1 :CMS_TENANTS DEBUG: console_tenants_manager.cpp:117: TPoolManip(/Root/users/user-1:hdd) read pool state: Request { Command { ReadStoragePool { BoxId: 999 Name: "/Root/users/user-1:hdd" } } } 2025-05-29T15:25:45.039642Z node 1 :CMS_TENANTS DEBUG: console_tenants_manager.cpp:198: TPoolManip(/Root/users/user-1:hdd) got read response: Status { Success: true } Success: true ConfigTxSeqNo: 5 2025-05-29T15:25:45.039676Z node 1 :CMS_TENANTS DEBUG: console_tenants_manager.cpp:131: TPoolManip(/Root/users/user-1:hdd) send pool request: Request { Command { DefineStoragePool { BoxId: 999 StoragePoolId: 4 Name: "/Root/users/user-1:hdd" ErasureSpecies: "none" VDiskKind: "Default" Kind: "hdd" NumGroups: 1 PDiskFilter { Property { Type: ROT } } } } } 2025-05-29T15:25:45.045701Z node 1 :CMS_TENANTS DEBUG: console_tenants_manager.cpp:244: TPoolManip(/Root/users/user-1:hdd) got config response: Status { Success: true } Success: true ConfigTxSeqNo: 6 2025-05-29T15:25:45.045723Z node 1 :CMS_TENANTS DEBUG: console_tenants_manager.cpp:168: TPoolManip(/Root/users/user-1:hdd) reply with NKikimr::NConsole::TTenantsManager::TEvPrivate::TEvPoolAllocated 2025-05-29T15:25:45.045739Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.h:960: StateWork, received event# 2146435079, Sender [1:7509889240173884807:2198], Recipient [1:7509889235878917185:2198]: NKikimr::NConsole::TTenantsManager::TEvPrivate::TEvPoolAllocated 2025-05-29T15:25:45.045742Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.h:972: StateWork, processing event TEvPrivate::TEvPoolAllocated 2025-05-29T15:25:45.045747Z node 1 :CMS_TENANTS TRACE: tx_processor.cpp:51: TTxProcessor(tenants) enqueue tx 2025-05-29T15:25:45.045749Z node 1 :CMS_TENANTS TRACE: tx_processor.cpp:245: TTxProcessor(tenants) starts new tx 2025-05-29T15:25:45.045766Z node 1 :CMS_TENANTS DEBUG: console__update_pool_state.cpp:28: TTxUpdatePoolState for pool /Root/users/user-1:hdd of /Root/users/user-1 state=ALLOCATED 2025-05-29T15:25:45.045772Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.cpp:3047: Update pool state in database for /Root/users/user-1:hdd state=ALLOCATED allocatednumgroups=1 2025-05-29T15:25:45.045797Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.cpp:3206: Update subdomain version in database for /Root/users/user-1 subdomainversion=2 2025-05-29T15:25:45.047938Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.h:960: StateWork, received event# 273285131, Sender [1:7509889240173884824:2315], Recipient [1:7509889235878917185:2198]: NKikimr::NConsole::TEvConsole::TEvGetOperationRequest { Request { id: "ydb://cmsrequest/5?tenant=/Root/users/user-1&cmstid=72057594037936131&txid=1748532345028440&action=1" } UserToken: "\n\016user-1@builtin\022\030\022\026\n\024all-users@well-known\032\016user-1@builtin\"\007Builtin*\017**** (E3DE7296)" } 2025-05-29T15:25:45.047953Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.h:965: StateWork, processing event TEvConsole::TEvGetOperationRequest 2025-05-29T15:25:45.047988Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.cpp:3353: Send TEvConsole::TEvGetOperationResponse: Response { operation { id: "ydb://cmsrequest/5?tenant=/Root/users/user-1&cmstid=72057594037936131&txid=1748532345028440&action=1" } } 2025-05-29T15:25:45.049816Z node 1 :CMS_TENANTS DEBUG: console__update_pool_state.cpp:73: TTxUpdatePoolState complete for /Root/users/user-1:hdd 2025-05-29T15:25:45.049832Z node 1 :CMS_TENANTS TRACE: tx_processor.cpp:51: TTxProcessor(tenants) enqueue tx 2025-05-29T15:25:45.049834Z node 1 :CMS_TENANTS TRACE: tx_processor.cpp:60: TTxProcessor(tenants) completed tx 2025-05-29T15:25:45.049835Z node 1 :CMS_TENANTS TRACE: tx_processor.cpp:245: TTxProcessor(tenants) starts new tx 2025-05-29T15:25:45.049857Z node 1 :CMS_TENANTS DEBUG: console__update_tenant_state.cpp:23: TTxUpdateTenantState for tenant /Root/users/user-1 to CREATING_SUBDOMAIN 2025-05-29T15:25:45.049866Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.cpp:3146: Update tenant state in database for /Root/users/user-1 state=CREATING_SUBDOMAIN txid=1748532345028440 errorcode=STATUS_CODE_UNSPECIFIED issue= 2025-05-29T15:25:45.050702Z node 1 :CMS_TENANTS DEBUG: console__update_tenant_state.cpp:45: TTxUpdateTenantState complete for /Root/users/user-1 2025-05-29T15:25:45.050759Z node 1 :CMS_TENANTS TRACE: tx_processor.cpp:60: TTxProcessor(tenants) completed tx 2025-05-29T15:25:45.050768Z node 1 :CMS_TENANTS DEBUG: console_tenants_manager.cpp:784: TSubdomainManip(/Root/users/user-1)::Bootstrap 2025-05-29T15:25:45.050770Z node 1 :CMS_TENANTS DEBUG: console_tenants_manager.cpp:596: TSubDomainManip(/Root/users/user-1) create subdomain 2025-05-29T15:25:45.051633Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.cpp:620: TSubdomainManip(/Root/users/user-1) send subdomain creation cmd: NKikimrTxUserProxy.TEvProposeTransaction Transaction { ModifyScheme { WorkingDir: "/Root" OperationType: ESchemeOpCreateExtSubDomain SubDomain { Name: "users/user-1" ExternalSchemeShard: true ExternalHive: true ExternalSysViewProcessor: true ExternalStatisticsAggregator: true GraphShard: true } } } ExecTimeoutPeriod: 18446744073709551615 UserToken: "\n\016user-1@builtin\022\030\022\026\n\024all-users@well-known\032\016user-1@builtin\"\007Builtin*\017**** (E3DE7296)" DatabaseName: "Root" 2025-05-29T15:25:45.052115Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976710659:1, at schemeshard: 72057594046644480 2025-05-29T15:25:45.056339Z node 1 :CMS_TENANTS DEBUG: console_tenants_manager.cpp:832: TSubdomainManip(/R ... -1) send notification request: NKikimrScheme.TEvNotifyTxCompletion TxId: 281474976710663 2025-05-29T15:25:45.444403Z node 1 :CMS_TENANTS DEBUG: console_tenants_manager.cpp:804: TSubdomainManip(/Root/users/user-1) got TEvNotifyTxCompletionRegistered: TxId: 281474976710663 2025-05-29T15:25:45.444971Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.h:960: StateWork, received event# 273285131, Sender [1:7509889240173885639:2445], Recipient [1:7509889235878917185:2198]: NKikimr::NConsole::TEvConsole::TEvGetOperationRequest { Request { id: "ydb://cmsrequest/5?tenant=/Root/users/user-1&cmstid=72057594037936131&txid=1748532345440385&action=2" } UserToken: "" } 2025-05-29T15:25:45.444979Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.h:965: StateWork, processing event TEvConsole::TEvGetOperationRequest 2025-05-29T15:25:45.445026Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.cpp:3353: Send TEvConsole::TEvGetOperationResponse: Response { operation { id: "ydb://cmsrequest/5?tenant=/Root/users/user-1&cmstid=72057594037936131&txid=1748532345440385&action=2" } } 2025-05-29T15:25:45.449094Z node 1 :CMS_TENANTS DEBUG: console_tenants_manager.cpp:809: TSubdomainManip(/Root/users/user-1) got TEvNotifyTxCompletionResult: TxId: 281474976710663 2025-05-29T15:25:45.449101Z node 1 :CMS_TENANTS DEBUG: console_tenants_manager.cpp:694: TSubdomainManip(/Root/users/user-1) done 2025-05-29T15:25:45.449108Z node 1 :CMS_TENANTS DEBUG: console_tenants_manager.cpp:710: TSubdomainManip(/Root/users/user-1) reply with NKikimr::NConsole::TTenantsManager::TEvPrivate::TEvSubdomainRemoved 2025-05-29T15:25:45.449121Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.h:960: StateWork, received event# 2146435077, Sender [1:7509889240173885629:2198], Recipient [1:7509889235878917185:2198]: NKikimr::NConsole::TTenantsManager::TEvPrivate::TEvSubdomainRemoved 2025-05-29T15:25:45.449124Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.h:980: StateWork, processing event TEvPrivate::TEvSubdomainRemoved 2025-05-29T15:25:45.449128Z node 1 :CMS_TENANTS TRACE: tx_processor.cpp:51: TTxProcessor(tenants) enqueue tx 2025-05-29T15:25:45.449130Z node 1 :CMS_TENANTS TRACE: tx_processor.cpp:245: TTxProcessor(tenants) starts new tx 2025-05-29T15:25:45.449137Z node 1 :CMS_TENANTS DEBUG: console__remove_computational_units.cpp:20: TTxRemoveComputationalUnits Execute /Root/users/user-1 2025-05-29T15:25:45.449144Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.cpp:3146: Update tenant state in database for /Root/users/user-1 state=REMOVING_UNITS txid=1748532345440385 errorcode=UNAUTHORIZED issue=AccessDenied: Access denied for request 2025-05-29T15:25:45.449161Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.cpp:2927: Remove computational units of /Root/users/user-1 from database txid=1748532345440385 issue=AccessDenied: Access denied for request 2025-05-29T15:25:45.451922Z node 3 :HIVE WARN: tx__delete_tablet.cpp:88: HIVE#72075186224037888 THive::TTxDeleteTablet tablet (72057594046644480,1) wasn't found - using supplied 72075186224037888 2025-05-29T15:25:45.452176Z node 1 :CMS_TENANTS DEBUG: console__remove_computational_units.cpp:34: TTxRemoveComputationalUnits Complete /Root/users/user-1 2025-05-29T15:25:45.452214Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.cpp:2114: Send TEvTenantSlotBroker::TEvAlterTenant: TenantName: "/Root/users/user-1" 2025-05-29T15:25:45.452219Z node 1 :CMS_TENANTS TRACE: tx_processor.cpp:60: TTxProcessor(tenants) completed tx 2025-05-29T15:25:45.452269Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.h:960: StateWork, received event# 273154052, Sender [1:7509889235878917090:2197], Recipient [1:7509889235878917185:2198]: NKikimrTenantSlotBroker.TTenantState TenantName: "/Root/users/user-1" 2025-05-29T15:25:45.452280Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.h:984: StateWork, processing event TEvTenantSlotBroker::TEvTenantState 2025-05-29T15:25:45.452286Z node 1 :CMS_TENANTS TRACE: tx_processor.cpp:51: TTxProcessor(tenants) enqueue tx 2025-05-29T15:25:45.452288Z node 1 :CMS_TENANTS TRACE: tx_processor.cpp:245: TTxProcessor(tenants) starts new tx 2025-05-29T15:25:45.452294Z node 1 :CMS_TENANTS DEBUG: console__update_tenant_state.cpp:23: TTxUpdateTenantState for tenant /Root/users/user-1 to REMOVING_POOLS 2025-05-29T15:25:45.452303Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.cpp:3146: Update tenant state in database for /Root/users/user-1 state=REMOVING_POOLS txid=1748532345440385 errorcode=UNAUTHORIZED issue=AccessDenied: Access denied for request 2025-05-29T15:25:45.458968Z node 1 :CMS_TENANTS DEBUG: console__update_tenant_state.cpp:45: TTxUpdateTenantState complete for /Root/users/user-1 2025-05-29T15:25:45.458996Z node 1 :CMS_TENANTS TRACE: tx_processor.cpp:60: TTxProcessor(tenants) completed tx 2025-05-29T15:25:45.459015Z node 1 :CMS_TENANTS DEBUG: console_tenants_manager.cpp:158: TPoolManip(/Root/users/user-1:hdd) Bootstrap 2025-05-29T15:25:45.459068Z node 1 :CMS_TENANTS DEBUG: console_tenants_manager.cpp:117: TPoolManip(/Root/users/user-1:hdd) read pool state: Request { Command { ReadStoragePool { BoxId: 999 Name: "/Root/users/user-1:hdd" } } } 2025-05-29T15:25:45.459383Z node 1 :CMS_TENANTS DEBUG: console_tenants_manager.cpp:198: TPoolManip(/Root/users/user-1:hdd) got read response: Status { Success: true StoragePool { BoxId: 999 StoragePoolId: 4 Name: "/Root/users/user-1:hdd" ErasureSpecies: "none" Geometry { } VDiskKind: "Default" Kind: "hdd" NumGroups: 1 PDiskFilter { Property { Type: ROT } } ScopeId { X1: 72057594046644480 X2: 3 } ItemConfigGeneration: 2 } } Success: true ConfigTxSeqNo: 10 2025-05-29T15:25:45.459398Z node 1 :CMS_TENANTS DEBUG: console_tenants_manager.cpp:151: TPoolManip(/Root/users/user-1:hdd) send pool request: Request { Command { DeleteStoragePool { BoxId: 999 StoragePoolId: 4 ItemConfigGeneration: 2 } } } 2025-05-29T15:25:45.464307Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__delete_tablet_reply.cpp:39: Got DeleteTabletReply with Forward response from Hive 72075186224037888 to Hive 72057594037968897 shardIdx 72057594046644480:1 2025-05-29T15:25:45.464456Z node 3 :HIVE WARN: hive_impl.cpp:491: HIVE#72075186224037888 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037897 not found 2025-05-29T15:25:45.464469Z node 3 :HIVE WARN: hive_impl.cpp:491: HIVE#72075186224037888 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037890 not found 2025-05-29T15:25:45.464471Z node 3 :HIVE WARN: hive_impl.cpp:491: HIVE#72075186224037888 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037893 not found 2025-05-29T15:25:45.464473Z node 3 :HIVE WARN: hive_impl.cpp:491: HIVE#72075186224037888 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037894 not found 2025-05-29T15:25:45.464476Z node 3 :HIVE WARN: hive_impl.cpp:491: HIVE#72075186224037888 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037891 not found 2025-05-29T15:25:45.464478Z node 3 :HIVE WARN: hive_impl.cpp:491: HIVE#72075186224037888 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037892 not found 2025-05-29T15:25:45.464481Z node 3 :HIVE WARN: hive_impl.cpp:491: HIVE#72075186224037888 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037895 not found 2025-05-29T15:25:45.464483Z node 3 :HIVE WARN: hive_impl.cpp:491: HIVE#72075186224037888 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037889 not found 2025-05-29T15:25:45.464486Z node 3 :HIVE WARN: hive_impl.cpp:491: HIVE#72075186224037888 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037896 not found 2025-05-29T15:25:45.471798Z node 1 :CMS_TENANTS DEBUG: console_tenants_manager.cpp:306: TPoolManip(/Root/users/user-1:hdd) got config response: Status { Success: true } Success: true ConfigTxSeqNo: 11 2025-05-29T15:25:45.471837Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.h:960: StateWork, received event# 2146435081, Sender [1:7509889240173885690:2198], Recipient [1:7509889235878917185:2198]: NKikimr::NConsole::TTenantsManager::TEvPrivate::TEvPoolDeleted 2025-05-29T15:25:45.471853Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.h:973: StateWork, processing event TEvPrivate::TEvPoolDeleted 2025-05-29T15:25:45.471858Z node 1 :CMS_TENANTS TRACE: tx_processor.cpp:51: TTxProcessor(tenants) enqueue tx 2025-05-29T15:25:45.471860Z node 1 :CMS_TENANTS TRACE: tx_processor.cpp:245: TTxProcessor(tenants) starts new tx 2025-05-29T15:25:45.471876Z node 1 :CMS_TENANTS DEBUG: console__update_pool_state.cpp:28: TTxUpdatePoolState for pool /Root/users/user-1:hdd of /Root/users/user-1 state=DELETED 2025-05-29T15:25:45.471883Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.cpp:3047: Update pool state in database for /Root/users/user-1:hdd state=DELETED allocatednumgroups=0 2025-05-29T15:25:45.473296Z node 1 :CMS_TENANTS DEBUG: console__update_pool_state.cpp:73: TTxUpdatePoolState complete for /Root/users/user-1:hdd 2025-05-29T15:25:45.473312Z node 1 :CMS_TENANTS TRACE: tx_processor.cpp:51: TTxProcessor(tenants) enqueue tx 2025-05-29T15:25:45.473314Z node 1 :CMS_TENANTS TRACE: tx_processor.cpp:60: TTxProcessor(tenants) completed tx 2025-05-29T15:25:45.473316Z node 1 :CMS_TENANTS TRACE: tx_processor.cpp:245: TTxProcessor(tenants) starts new tx 2025-05-29T15:25:45.473333Z node 1 :CMS_TENANTS DEBUG: console__remove_tenant_done.cpp:22: TTxRemoveTenantDone for tenant /Root/users/user-1 txid=1748532345440385 2025-05-29T15:25:45.473336Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.cpp:2927: Remove computational units of /Root/users/user-1 from database txid=1748532345440385 issue=AccessDenied: Access denied for request 2025-05-29T15:25:45.473339Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.cpp:2958: Remove tenant /Root/users/user-1 from database txid=1748532345440385 issue=AccessDenied: Access denied for request 2025-05-29T15:25:45.473340Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.cpp:2963: Remove pool /Root/users/user-1:hdd from database 2025-05-29T15:25:45.473357Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.cpp:3083: Add tenant removal info for /Root/users/user-1 txid=1748532345440385 code=SUCCESS errorcode=UNAUTHORIZED issue=AccessDenied: Access denied for request 2025-05-29T15:25:45.473834Z node 1 :HIVE WARN: tx__block_storage_result.cpp:43: HIVE#72057594037968897 THive::TTxBlockStorageResult Complete status was NO_GROUP for TabletId 72075186224037888 2025-05-29T15:25:45.480526Z node 1 :CMS_TENANTS DEBUG: console__remove_tenant_done.cpp:34: TTxRemoveTenantDone Complete 2025-05-29T15:25:45.480550Z node 1 :CMS_TENANTS TRACE: tx_processor.cpp:60: TTxProcessor(tenants) completed tx 2025-05-29T15:25:45.481439Z node 1 :HIVE WARN: hive_impl.cpp:491: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037888 not found 2025-05-29T15:25:45.499093Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.h:960: StateWork, received event# 273285131, Sender [1:7509889240173885749:2451], Recipient [1:7509889235878917185:2198]: NKikimr::NConsole::TEvConsole::TEvGetOperationRequest { Request { id: "ydb://cmsrequest/5?tenant=/Root/users/user-1&cmstid=72057594037936131&txid=1748532345440385&action=2" } UserToken: "" } 2025-05-29T15:25:45.499105Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.h:965: StateWork, processing event TEvConsole::TEvGetOperationRequest 2025-05-29T15:25:45.499161Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.cpp:3353: Send TEvConsole::TEvGetOperationResponse: Response { operation { id: "ydb://cmsrequest/5?tenant=/Root/users/user-1&cmstid=72057594037936131&txid=1748532345440385&action=2" ready: true status: SUCCESS } } 2025-05-29T15:25:45.509952Z node 1 :HIVE WARN: tx__status.cpp:57: HIVE#72057594037968897 THive::TTxStatus(status=2 node=Connected) - killing node 3 2025-05-29T15:25:45.510022Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connected -> Disconnected >> DstCreator::GlobalConsistency [GOOD] >> DstCreator::KeyColumnNameMismatch >> TGRpcCmsTest::SimpleTenantsTestSyncOperation [GOOD] |67.2%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/schemeshard/ut_bsvolume_reboots/ydb-core-tx-schemeshard-ut_bsvolume_reboots |67.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_bsvolume_reboots/ydb-core-tx-schemeshard-ut_bsvolume_reboots |67.2%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/mind/hive/ut/ydb-core-mind-hive-ut |67.2%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_bsvolume_reboots/ydb-core-tx-schemeshard-ut_bsvolume_reboots |67.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/mind/hive/ut/ydb-core-mind-hive-ut |67.2%| [LD] {RESULT} $(B)/ydb/core/mind/hive/ut/ydb-core-mind-hive-ut >> YdbTableSplit::MergeByNoLoadAfterSplit >> DstCreator::EmptyReplicationConfig [GOOD] >> DataShardVolatile::DistributedWriteThenReadIterator [FAIL] >> DataShardVolatile::DistributedWriteThenReadIteratorStream |67.2%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/grpc_services/ut/unittest >> YdbTableSplit::SplitByLoadWithDeletes >> DstCreator::ColumnsSizeMismatch [GOOD] >> DstCreator::ColumnTypeMismatch >> DstCreator::WithSyncIndexAndIntermediateDir [GOOD] >> YdbTableSplit::SplitByLoadWithReads >> KqpSplit::BorderKeys+Ascending ------- [TS] {default-linux-x86_64, relwithdebinfo} ydb/services/cms/ut/unittest >> TGRpcCmsTest::SimpleTenantsTestSyncOperation [GOOD] Test command err: 2025-05-29T15:25:45.541829Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509889239665428476:2211];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:25:45.541937Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/002110/r3tmp/tmp0IeMPD/pdisk_1.dat 2025-05-29T15:25:45.739191Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 2361, node 1 2025-05-29T15:25:45.763372Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:25:45.763387Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-05-29T15:25:45.763388Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-05-29T15:25:45.763428Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-05-29T15:25:45.771510Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:25:45.771542Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:25:45.774190Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:6208 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-29T15:25:45.862839Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:25:45.924288Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.h:960: StateWork, received event# 273285120, Sender [1:7509889239665429077:2313], Recipient [1:7509889239665428805:2213]: NKikimr::NConsole::TEvConsole::TEvCreateTenantRequest { Request { operation_params { operation_mode: SYNC } path: "/Root/users/user-1" resources { storage_units { unit_kind: "hdd" count: 1 } } } UserToken: "" PeerName: "ipv6:[::1]:52150" } 2025-05-29T15:25:45.924307Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.h:963: StateWork, processing event TEvConsole::TEvCreateTenantRequest 2025-05-29T15:25:45.924315Z node 1 :CMS_TENANTS TRACE: tx_processor.cpp:51: TTxProcessor(tenants) enqueue tx 2025-05-29T15:25:45.924318Z node 1 :CMS_TENANTS TRACE: tx_processor.cpp:245: TTxProcessor(tenants) starts new tx 2025-05-29T15:25:45.924350Z node 1 :CMS_TENANTS DEBUG: console__create_tenant.cpp:71: TTxCreateTenant: Request { operation_params { operation_mode: SYNC } path: "/Root/users/user-1" resources { storage_units { unit_kind: "hdd" count: 1 } } } UserToken: "" PeerName: "ipv6:[::1]:52150" 2025-05-29T15:25:45.924398Z node 1 :CMS_TENANTS DEBUG: console__create_tenant.cpp:365: Add tenant /Root/users/user-1 (txid = 1748532345922893) 2025-05-29T15:25:45.924482Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.cpp:2577: Add tenant /Root/users/user-1 to database state=CREATING_POOLS coordinators=3 mediators=3 planresolution=10 timecastbucketspermediator=2 issue= txid=1748532345922893 subdomainversion=1 confirmedsubdomain=0 attrs= generation=1 errorcode=STATUS_CODE_UNSPECIFIED isExternalSubDomain=1 isExternalHive=1 isExternalSysViewProcessor=1 isExternalStatisticsAggregator=1 areResourcesShared=0 sharedDomainId= 2025-05-29T15:25:45.924543Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.cpp:2637: Add tenant pool /Root/users/user-1:hdd to database kind=hdd config=BoxId: 999 StoragePoolId: 4 Name: "/Root/users/user-1:hdd" ErasureSpecies: "none" VDiskKind: "Default" Kind: "hdd" NumGroups: 1 PDiskFilter { Property { Type: ROT } } allocatednumgroups=0 state=NOT_ALLOCATED 2025-05-29T15:25:45.936832Z node 1 :CMS_TENANTS DEBUG: console__create_tenant.cpp:375: TTxCreateTenant Complete 2025-05-29T15:25:45.937032Z node 1 :CMS_TENANTS TRACE: console__create_tenant.cpp:383: Send: NKikimr::NConsole::TEvConsole::TEvCreateTenantResponse { Response { operation { id: "ydb://cmsrequest/5?tenant=/Root/users/user-1&cmstid=72057594037936131&txid=1748532345922893&action=1" } } } 2025-05-29T15:25:45.937080Z node 1 :CMS_TENANTS TRACE: tx_processor.cpp:60: TTxProcessor(tenants) completed tx 2025-05-29T15:25:45.937114Z node 1 :CMS_TENANTS DEBUG: console_tenants_manager.cpp:158: TPoolManip(/Root/users/user-1:hdd) Bootstrap 2025-05-29T15:25:45.937151Z node 1 :CMS_TENANTS DEBUG: console_tenants_manager.cpp:117: TPoolManip(/Root/users/user-1:hdd) read pool state: Request { Command { ReadStoragePool { BoxId: 999 Name: "/Root/users/user-1:hdd" } } } 2025-05-29T15:25:45.937323Z node 1 :CMS_TENANTS DEBUG: console_tenants_manager.cpp:198: TPoolManip(/Root/users/user-1:hdd) got read response: Status { Success: true } Success: true ConfigTxSeqNo: 5 2025-05-29T15:25:45.937353Z node 1 :CMS_TENANTS DEBUG: console_tenants_manager.cpp:131: TPoolManip(/Root/users/user-1:hdd) send pool request: Request { Command { DefineStoragePool { BoxId: 999 StoragePoolId: 4 Name: "/Root/users/user-1:hdd" ErasureSpecies: "none" VDiskKind: "Default" Kind: "hdd" NumGroups: 1 PDiskFilter { Property { Type: ROT } } } } } 2025-05-29T15:25:45.938145Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.h:960: StateWork, received event# 273285139, Sender [1:7509889239665429077:2313], Recipient [1:7509889239665428805:2213]: NKikimr::NConsole::TEvConsole::TEvNotifyOperationCompletionRequest { Request { id: "ydb://cmsrequest/5?tenant=/Root/users/user-1&cmstid=72057594037936131&txid=1748532345922893&action=1" } UserToken: "" PeerName: "ipv6:[::1]:52150" } 2025-05-29T15:25:45.938152Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.h:968: StateWork, processing event TEvConsole::TEvNotifyOperationCompletionRequest 2025-05-29T15:25:45.938230Z node 1 :CMS_TENANTS DEBUG: console_tenants_manager.cpp:3443: Add subscription to /Root/users/user-1 for [1:7509889239665429077:2313] 2025-05-29T15:25:45.938245Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.cpp:3451: Send TEvConsole::TEvNotifyOperationCompletionResponse: Response { operation { id: "ydb://cmsrequest/5?tenant=/Root/users/user-1&cmstid=72057594037936131&txid=1748532345922893&action=1" } } 2025-05-29T15:25:45.943739Z node 1 :CMS_TENANTS DEBUG: console_tenants_manager.cpp:244: TPoolManip(/Root/users/user-1:hdd) got config response: Status { Success: true } Success: true ConfigTxSeqNo: 6 2025-05-29T15:25:45.943761Z node 1 :CMS_TENANTS DEBUG: console_tenants_manager.cpp:168: TPoolManip(/Root/users/user-1:hdd) reply with NKikimr::NConsole::TTenantsManager::TEvPrivate::TEvPoolAllocated 2025-05-29T15:25:45.943781Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.h:960: StateWork, received event# 2146435079, Sender [1:7509889239665429082:2213], Recipient [1:7509889239665428805:2213]: NKikimr::NConsole::TTenantsManager::TEvPrivate::TEvPoolAllocated 2025-05-29T15:25:45.943784Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.h:972: StateWork, processing event TEvPrivate::TEvPoolAllocated 2025-05-29T15:25:45.943788Z node 1 :CMS_TENANTS TRACE: tx_processor.cpp:51: TTxProcessor(tenants) enqueue tx 2025-05-29T15:25:45.943791Z node 1 :CMS_TENANTS TRACE: tx_processor.cpp:245: TTxProcessor(tenants) starts new tx 2025-05-29T15:25:45.943809Z node 1 :CMS_TENANTS DEBUG: console__update_pool_state.cpp:28: TTxUpdatePoolState for pool /Root/users/user-1:hdd of /Root/users/user-1 state=ALLOCATED 2025-05-29T15:25:45.943816Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.cpp:3047: Update pool state in database for /Root/users/user-1:hdd state=ALLOCATED allocatednumgroups=1 2025-05-29T15:25:45.943843Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.cpp:3206: Update subdomain version in database for /Root/users/user-1 subdomainversion=2 2025-05-29T15:25:45.945577Z node 1 :CMS_TENANTS DEBUG: console__update_pool_state.cpp:73: TTxUpdatePoolState complete for /Root/users/user-1:hdd 2025-05-29T15:25:45.945589Z node 1 :CMS_TENANTS TRACE: tx_processor.cpp:51: TTxProcessor(tenants) enqueue tx 2025-05-29T15:25:45.945591Z node 1 :CMS_TENANTS TRACE: tx_processor.cpp:60: TTxProcessor(tenants) completed tx 2025-05-29T15:25:45.945592Z node 1 :CMS_TENANTS TRACE: tx_processor.cpp:245: TTxProcessor(tenants) starts new tx 2025-05-29T15:25:45.945624Z node 1 :CMS_TENANTS DEBUG: console__update_tenant_state.cpp:23: TTxUpdateTenantState for tenant /Root/users/user-1 to CREATING_SUBDOMAIN 2025-05-29T15:25:45.945632Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.cpp:3146: Update tenant state in database for /Root/users/user-1 state=CREATING_SUBDOMAIN txid=1748532345922893 errorcode=STATUS_CODE_UNSPECIFIED issue= 2025-05-29T15:25:45.946991Z node 1 :CMS_TENANTS DEBUG: console__update_tenant_state.cpp:45: TTxUpdateTenantState complete for /Root/users/user-1 2025-05-29T15:25:45.947028Z node 1 :CMS_TENANTS TRACE: tx_processor.cpp:60: TTxProcessor(tenants) completed tx 2025-05-29T15:25:45.947038Z node 1 :CMS_TENANTS DEBUG: console_tenants_manager.cpp:784: TSubdomainManip(/Root/users/user-1)::Bootstrap 2025-05-29T15:25:45.947040Z node 1 :CMS_TENANTS DEBUG: console_tenants_manager.cpp:596: TSubDomainManip(/Root/users/user-1) create subdomain 2025-05-29T15:25:45.947924Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.cpp:620: TSubdomainManip(/Root/users/user-1) send subdomain creation cmd: NKikimrTxUserProxy.TEvProposeTransaction Transaction { ModifyScheme { WorkingDir: "/Root" OperationType: ESchemeOpCreateExtSubDomain SubDomain { Name: "users/user-1" ExternalSchemeShard: true ExternalHive: true ExternalSysViewProcessor: true ExternalStatisticsAggregator: true GraphShard: true } } } ExecTimeoutPeriod: 18446744073709551615 DatabaseName: "Root" 2025-05-29T15:25:45.948379Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976710658:1, at schemeshard: 72057594046644480 2025-05-29T15:25:45.952179Z node 1 :CMS_TENANTS DEBUG: console_tenants_manager.cpp:832: TSubdomainManip(/Root/users/user-1) got propose result: Status: 53 TxId: 281474976710658 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 3 2025-05-29T15:25:45.952201Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.cpp:768: TSubdomainManip(/Root/users/user-1) send notification request: NKikimrScheme.TEvNotifyTxCompletion TxId: 281474976710658 2025-05-29T15:25:45.960199Z node 1 :CMS_TENANTS DEBUG: console_tenants_manager.cpp:804: TSubdomainManip(/Root/users/user-1) ... te in database for /Root/users/user-1:hdd state=ALLOCATED allocatednumgroups=2 2025-05-29T15:25:46.227154Z node 1 :CMS_TENANTS DEBUG: console_tenants_manager.cpp:804: TSubdomainManip(/Root/users/user-1) got TEvNotifyTxCompletionRegistered: TxId: 281474976710660 2025-05-29T15:25:46.231221Z node 1 :CMS_TENANTS DEBUG: console__update_pool_state.cpp:73: TTxUpdatePoolState complete for /Root/users/user-1:hdd 2025-05-29T15:25:46.231232Z node 1 :CMS_TENANTS TRACE: tx_processor.cpp:60: TTxProcessor(tenants) completed tx 2025-05-29T15:25:46.233889Z node 1 :CMS_TENANTS DEBUG: console_tenants_manager.cpp:809: TSubdomainManip(/Root/users/user-1) got TEvNotifyTxCompletionResult: TxId: 281474976710660 2025-05-29T15:25:46.233899Z node 1 :CMS_TENANTS DEBUG: console_tenants_manager.cpp:694: TSubdomainManip(/Root/users/user-1) done 2025-05-29T15:25:46.233913Z node 1 :CMS_TENANTS DEBUG: console_tenants_manager.cpp:710: TSubdomainManip(/Root/users/user-1) reply with NKikimr::NConsole::TTenantsManager::TEvPrivate::TEvSubdomainRemoved 2025-05-29T15:25:46.233930Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.h:960: StateWork, received event# 2146435077, Sender [1:7509889243960397007:2213], Recipient [1:7509889239665428805:2213]: NKikimr::NConsole::TTenantsManager::TEvPrivate::TEvSubdomainRemoved 2025-05-29T15:25:46.233934Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.h:980: StateWork, processing event TEvPrivate::TEvSubdomainRemoved 2025-05-29T15:25:46.233940Z node 1 :CMS_TENANTS TRACE: tx_processor.cpp:51: TTxProcessor(tenants) enqueue tx 2025-05-29T15:25:46.233942Z node 1 :CMS_TENANTS TRACE: tx_processor.cpp:245: TTxProcessor(tenants) starts new tx 2025-05-29T15:25:46.233952Z node 1 :CMS_TENANTS DEBUG: console__remove_computational_units.cpp:20: TTxRemoveComputationalUnits Execute /Root/users/user-1 2025-05-29T15:25:46.233960Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.cpp:3146: Update tenant state in database for /Root/users/user-1 state=REMOVING_UNITS txid=1748532346214772 errorcode=STATUS_CODE_UNSPECIFIED issue= 2025-05-29T15:25:46.233977Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.cpp:2927: Remove computational units of /Root/users/user-1 from database txid=1748532346214772 issue= 2025-05-29T15:25:46.234375Z node 3 :HIVE WARN: tx__delete_tablet.cpp:88: HIVE#72075186224037888 THive::TTxDeleteTablet tablet (72057594046644480,1) wasn't found - using supplied 72075186224037888 2025-05-29T15:25:46.235650Z node 1 :CMS_TENANTS DEBUG: console__remove_computational_units.cpp:34: TTxRemoveComputationalUnits Complete /Root/users/user-1 2025-05-29T15:25:46.235679Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.cpp:2114: Send TEvTenantSlotBroker::TEvAlterTenant: TenantName: "/Root/users/user-1" 2025-05-29T15:25:46.235683Z node 1 :CMS_TENANTS TRACE: tx_processor.cpp:60: TTxProcessor(tenants) completed tx 2025-05-29T15:25:46.235750Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.h:960: StateWork, received event# 273154052, Sender [1:7509889239665428672:2198], Recipient [1:7509889239665428805:2213]: NKikimrTenantSlotBroker.TTenantState TenantName: "/Root/users/user-1" 2025-05-29T15:25:46.235753Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.h:984: StateWork, processing event TEvTenantSlotBroker::TEvTenantState 2025-05-29T15:25:46.235758Z node 1 :CMS_TENANTS TRACE: tx_processor.cpp:51: TTxProcessor(tenants) enqueue tx 2025-05-29T15:25:46.235760Z node 1 :CMS_TENANTS TRACE: tx_processor.cpp:245: TTxProcessor(tenants) starts new tx 2025-05-29T15:25:46.235766Z node 1 :CMS_TENANTS DEBUG: console__update_tenant_state.cpp:23: TTxUpdateTenantState for tenant /Root/users/user-1 to REMOVING_POOLS 2025-05-29T15:25:46.235774Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.cpp:3146: Update tenant state in database for /Root/users/user-1 state=REMOVING_POOLS txid=1748532346214772 errorcode=STATUS_CODE_UNSPECIFIED issue= 2025-05-29T15:25:46.238419Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__delete_tablet_reply.cpp:39: Got DeleteTabletReply with Forward response from Hive 72075186224037888 to Hive 72057594037968897 shardIdx 72057594046644480:1 2025-05-29T15:25:46.239925Z node 1 :CMS_TENANTS DEBUG: console__update_tenant_state.cpp:45: TTxUpdateTenantState complete for /Root/users/user-1 2025-05-29T15:25:46.239944Z node 1 :CMS_TENANTS TRACE: tx_processor.cpp:60: TTxProcessor(tenants) completed tx 2025-05-29T15:25:46.239954Z node 1 :CMS_TENANTS DEBUG: console_tenants_manager.cpp:158: TPoolManip(/Root/users/user-1:hdd) Bootstrap 2025-05-29T15:25:46.240003Z node 1 :CMS_TENANTS DEBUG: console_tenants_manager.cpp:117: TPoolManip(/Root/users/user-1:hdd) read pool state: Request { Command { ReadStoragePool { BoxId: 999 Name: "/Root/users/user-1:hdd" } } } 2025-05-29T15:25:46.240325Z node 1 :CMS_TENANTS DEBUG: console_tenants_manager.cpp:198: TPoolManip(/Root/users/user-1:hdd) got read response: Status { Success: true StoragePool { BoxId: 999 StoragePoolId: 4 Name: "/Root/users/user-1:hdd" ErasureSpecies: "none" Geometry { } VDiskKind: "Default" Kind: "hdd" NumGroups: 2 PDiskFilter { Property { Type: ROT } } ScopeId { X1: 72057594046644480 X2: 3 } ItemConfigGeneration: 3 } } Success: true ConfigTxSeqNo: 13 2025-05-29T15:25:46.240338Z node 1 :CMS_TENANTS DEBUG: console_tenants_manager.cpp:151: TPoolManip(/Root/users/user-1:hdd) send pool request: Request { Command { DeleteStoragePool { BoxId: 999 StoragePoolId: 4 ItemConfigGeneration: 3 } } } 2025-05-29T15:25:46.245505Z node 3 :HIVE WARN: hive_impl.cpp:491: HIVE#72075186224037888 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037893 not found 2025-05-29T15:25:46.245518Z node 3 :HIVE WARN: hive_impl.cpp:491: HIVE#72075186224037888 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037894 not found 2025-05-29T15:25:46.245522Z node 3 :HIVE WARN: hive_impl.cpp:491: HIVE#72075186224037888 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037891 not found 2025-05-29T15:25:46.245524Z node 3 :HIVE WARN: hive_impl.cpp:491: HIVE#72075186224037888 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037892 not found 2025-05-29T15:25:46.245526Z node 3 :HIVE WARN: hive_impl.cpp:491: HIVE#72075186224037888 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037895 not found 2025-05-29T15:25:46.245530Z node 3 :HIVE WARN: hive_impl.cpp:491: HIVE#72075186224037888 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037889 not found 2025-05-29T15:25:46.245533Z node 3 :HIVE WARN: hive_impl.cpp:491: HIVE#72075186224037888 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037896 not found 2025-05-29T15:25:46.245535Z node 3 :HIVE WARN: hive_impl.cpp:491: HIVE#72075186224037888 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037897 not found 2025-05-29T15:25:46.245537Z node 3 :HIVE WARN: hive_impl.cpp:491: HIVE#72075186224037888 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037890 not found 2025-05-29T15:25:46.246463Z node 1 :HIVE WARN: hive_impl.cpp:491: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037888 not found 2025-05-29T15:25:46.247079Z node 1 :CMS_TENANTS DEBUG: console_tenants_manager.cpp:306: TPoolManip(/Root/users/user-1:hdd) got config response: Status { Success: true } Success: true ConfigTxSeqNo: 14 2025-05-29T15:25:46.247115Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.h:960: StateWork, received event# 2146435081, Sender [1:7509889243960397131:2213], Recipient [1:7509889239665428805:2213]: NKikimr::NConsole::TTenantsManager::TEvPrivate::TEvPoolDeleted 2025-05-29T15:25:46.247133Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.h:973: StateWork, processing event TEvPrivate::TEvPoolDeleted 2025-05-29T15:25:46.247140Z node 1 :CMS_TENANTS TRACE: tx_processor.cpp:51: TTxProcessor(tenants) enqueue tx 2025-05-29T15:25:46.247143Z node 1 :CMS_TENANTS TRACE: tx_processor.cpp:245: TTxProcessor(tenants) starts new tx 2025-05-29T15:25:46.247158Z node 1 :CMS_TENANTS DEBUG: console__update_pool_state.cpp:28: TTxUpdatePoolState for pool /Root/users/user-1:hdd of /Root/users/user-1 state=DELETED 2025-05-29T15:25:46.247165Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.cpp:3047: Update pool state in database for /Root/users/user-1:hdd state=DELETED allocatednumgroups=0 2025-05-29T15:25:46.256191Z node 1 :CMS_TENANTS DEBUG: console__update_pool_state.cpp:73: TTxUpdatePoolState complete for /Root/users/user-1:hdd 2025-05-29T15:25:46.256211Z node 1 :CMS_TENANTS TRACE: tx_processor.cpp:51: TTxProcessor(tenants) enqueue tx 2025-05-29T15:25:46.256214Z node 1 :CMS_TENANTS TRACE: tx_processor.cpp:60: TTxProcessor(tenants) completed tx 2025-05-29T15:25:46.256216Z node 1 :CMS_TENANTS TRACE: tx_processor.cpp:245: TTxProcessor(tenants) starts new tx 2025-05-29T15:25:46.256247Z node 1 :CMS_TENANTS DEBUG: console__remove_tenant_done.cpp:22: TTxRemoveTenantDone for tenant /Root/users/user-1 txid=1748532346214772 2025-05-29T15:25:46.256250Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.cpp:2927: Remove computational units of /Root/users/user-1 from database txid=1748532346214772 issue= 2025-05-29T15:25:46.256253Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.cpp:2958: Remove tenant /Root/users/user-1 from database txid=1748532346214772 issue= 2025-05-29T15:25:46.256255Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.cpp:2963: Remove pool /Root/users/user-1:hdd from database 2025-05-29T15:25:46.256296Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.cpp:3083: Add tenant removal info for /Root/users/user-1 txid=1748532346214772 code=SUCCESS errorcode=STATUS_CODE_UNSPECIFIED issue= 2025-05-29T15:25:46.258991Z node 1 :CMS_TENANTS DEBUG: console__remove_tenant_done.cpp:34: TTxRemoveTenantDone Complete 2025-05-29T15:25:46.259062Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.cpp:2431: Send /Root/users/user-1 notification to [1:7509889243960397000:2414]: Response { operation { id: "ydb://cmsrequest/5?tenant=/Root/users/user-1&cmstid=72057594037936131&txid=1748532346214772&action=2" ready: true status: SUCCESS } } 2025-05-29T15:25:46.259095Z node 1 :CMS_TENANTS TRACE: tx_processor.cpp:60: TTxProcessor(tenants) completed tx 2025-05-29T15:25:46.260405Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.h:960: StateWork, received event# 273285122, Sender [1:7509889243960397172:2417], Recipient [1:7509889239665428805:2213]: NKikimr::NConsole::TEvConsole::TEvGetTenantStatusRequest { Request { path: "/Root/users/user-1" } UserToken: "" PeerName: "ipv6:[::1]:52150" } 2025-05-29T15:25:46.260416Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.h:966: StateWork, processing event TEvConsole::TEvGetTenantStatusRequest 2025-05-29T15:25:46.260455Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.cpp:3377: Send TEvConsole::TEvGetTenantStatusResponse: Response { operation { ready: true status: NOT_FOUND issues { message: "Unknown tenant /Root/users/user-1" severity: 1 } } } 2025-05-29T15:25:46.261384Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.h:960: StateWork, received event# 273285123, Sender [1:7509889243960397175:2418], Recipient [1:7509889239665428805:2213]: NKikimr::NConsole::TEvConsole::TEvListTenantsRequest { Request { } UserToken: "" PeerName: "ipv6:[::1]:52150" } 2025-05-29T15:25:46.261393Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.h:967: StateWork, processing event TEvConsole::TEvListTenantsRequest 2025-05-29T15:25:46.261453Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.cpp:3421: Send TEvConsole::TEvListTenantsResponse: Response { operation { ready: true status: SUCCESS result { [type.googleapis.com/Ydb.Cms.ListDatabasesResult] { } } } } 2025-05-29T15:25:46.263065Z node 1 :HIVE WARN: tx__status.cpp:57: HIVE#72057594037968897 THive::TTxStatus(status=2 node=Connected) - killing node 3 2025-05-29T15:25:46.263145Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connected -> Disconnected ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/cost/unittest >> KqpCost::IndexLookupAndTake+useSink Test command err: Trying to start YDB, gRPC: 8859, MsgBus: 27319 2025-05-29T15:25:38.103388Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509889209562575201:2214];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:25:38.183113Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/0024ef/r3tmp/tmporA4uI/pdisk_1.dat 2025-05-29T15:25:38.293821Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:25:38.294570Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:25:38.294591Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:25:38.295218Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-29T15:25:38.295686Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [1:7509889209562575009:2079] 1748532338067151 != 1748532338067154 TServer::EnableGrpc on GrpcPort 8859, node 1 2025-05-29T15:25:38.315024Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:25:38.315035Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-05-29T15:25:38.315037Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-05-29T15:25:38.315072Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:27319 TClient is connected to server localhost:27319 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-29T15:25:38.495643Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:25:38.503248Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-05-29T15:25:38.519722Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:25:38.576336Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:25:38.653827Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-05-29T15:25:38.696555Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 2025-05-29T15:25:39.527023Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509889213857543953:2404], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:25:39.527089Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:25:39.680191Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-05-29T15:25:39.702928Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-05-29T15:25:39.718464Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-05-29T15:25:39.738071Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-05-29T15:25:39.750867Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-05-29T15:25:39.810677Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-05-29T15:25:39.826798Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-05-29T15:25:39.863299Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509889213857544604:2467], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:25:39.863348Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:25:39.863542Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509889213857544612:2470], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:25:39.864423Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715669:3, at schemeshard: 72057594046644480 2025-05-29T15:25:39.871613Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976715669, at schemeshard: 72057594046644480 2025-05-29T15:25:39.871688Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7509889213857544614:2471], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715669 completed, doublechecking } 2025-05-29T15:25:39.971993Z node 1 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [1:7509889213857544665:3398] txid# 281474976715670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-29T15:25:40.225152Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [1:7509889213857544674:2475], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:25:40.226843Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=1&id=MjNmNDg3NzgtYTlmNWY1ZTItNjAyNjVhZWQtNGRhZTNiZjc=, ActorId: [1:7509889213857543935:2402], ActorState: ExecuteState, TraceId: 01jweac94j8g83nv94nt7gytgh, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: VERIFY failed (2025-05-29T15:25:40.227700Z): assertion failed in non-unittest thread with message: assertion failed at ydb/core/kqp/ut/common/kqp_ut_common.h:375, void NKikimr::NKqp::AssertSuccessResult(const NYdb::TStatus &): (result.IsSuccess())
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 library/cpp/testing/unittest/registar.cpp:36 RaiseError(): requirement UnittestThread failed 2025-05-29T15:25:43.090876Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7509889209562575201:2214];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:25:43.090918Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 0. /-S/util/system/yassert.cpp:83: InternalPanicImpl @ 0x13ACE825 1. /-S/util/system/yassert.cpp:55: Panic @ 0x13AC5826 2. /tmp//-S/library/cpp/testing/unittest/registar.cpp:36: RaiseError @ 0x13C675B6 3. /-S/ydb/core/kqp/ut/common/kqp_ut_common.h:375: AssertSuccessResult @ 0x2612A222 4. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:365: CreateSampleTables @ 0x26129B22 5. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:581: operator() @ 0x2614B47C 6. /-S/library/cpp/threading/future/core/future-inl.h:493: SetValue @ 0x2614B47C 7. /-S/library/cpp/threading/future/async.h:24: operator() @ 0x2614B47C 8. /-S/util/thread/pool.h:71: Process @ 0x2614B47C 9. /-S/util/thread/pool.cpp:418: DoExecute @ 0x13AD61A9 10. /-S/util/thread/factory.h:15: Execute @ 0x13AD4B99 11. /-S/util/thread/factory.cpp:36: ThreadProc @ 0x13AD4B99 12. /-S/util/system/thread.cpp:244: ThreadProxy @ 0x13AD000C 13. ??:0: ?? @ 0x7F1D0BB5BAC2 14. ??:0: ?? @ 0x7F1D0BBED84F >> TBsProxyFaultToleranceTest::CheckGetHardenedErasureMirror3dcCount6Idx2 [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/replication/controller/ut_dst_creator/unittest >> DstCreator::EmptyReplicationConfig [GOOD] Test command err: 2025-05-29T15:25:44.785287Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509889234870141523:2196];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:25:44.785379Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/00159c/r3tmp/tmpilYsn4/pdisk_1.dat 2025-05-29T15:25:44.991328Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [1:7509889234870141366:2079] 1748532344778335 != 1748532344778338 2025-05-29T15:25:45.001536Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:25:45.002687Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:25:45.002702Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:25:45.009189Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:11071 TServer::EnableGrpc on GrpcPort 27657, node 1 2025-05-29T15:25:45.067097Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:25:45.067111Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-05-29T15:25:45.067114Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-05-29T15:25:45.067163Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:11071 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-29T15:25:45.195702Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:25:45.202894Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-05-29T15:25:45.207822Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-05-29T15:25:45.243598Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 TClient::Ls request: /Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 1748532345245 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 7 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 7 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 5 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: "Dst" PathId: 3 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976715659 CreateStep: 1748532345308 ParentPathId: 1 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" ChildrenExist: false } Children { Name: "Src" PathId: 2 S... (TRUNCATED) TClient::Ls request: /Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 1748532345245 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 7 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 7 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 5 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: "Dst" PathId: 3 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976715659 CreateStep: 1748532345308 ParentPathId: 1 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" ChildrenExist: false } Children { Name: "Src" PathId: 2 S... (TRUNCATED) 2025-05-29T15:25:45.268274Z node 1 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:56: [DstCreator][rid 1][tid 1] Handle NKikimr::TEvTxProxySchemeCache::TEvNavigateKeySetResult: entry# { Path: Root TableId: [72057594046644480:1:0] RequestType: ByTableId Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } } 2025-05-29T15:25:45.268302Z node 1 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:56: [DstCreator][rid 1][tid 1] Handle NKikimr::TEvTxProxySchemeCache::TEvNavigateKeySetResult: entry# { Path: Root TableId: [72057594046644480:1:0] RequestType: ByTableId Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } } 2025-05-29T15:25:45.268305Z node 1 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:87: [DstCreator][rid 1][tid 1] Get table profiles 2025-05-29T15:25:45.268516Z node 1 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:106: [DstCreator][rid 1][tid 1] Handle NKikimr::NConsole::TEvConfigsDispatcher::TEvGetConfigResponse 2025-05-29T15:25:45.378924Z node 1 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:158: [DstCreator][rid 1][tid 1] Handle NKikimr::NReplication::TEvYdbProxy::TEvDescribeTableResponse { Result: { name: Src, owner: root@builtin, type: Table, size_bytes: 0, created_at: { plan_step: 1748532345273, tx_id: 281474976715658 } } } 2025-05-29T15:25:45.379015Z node 1 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:249: [DstCreator][rid 1][tid 1] Handle NKikimr::TEvTxUserProxy::TEvAllocateTxIdResult 2025-05-29T15:25:45.379468Z node 1 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:279: [DstCreator][rid 1][tid 1] Handle {TEvModifySchemeTransactionResult Status# StatusAlreadyExists txid# 281474976715660 Reason# Check failed: path: '/Root/Dst', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 3], type: EPathTypeTable, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:484} 2025-05-29T15:25:45.380022Z node 1 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:335: [DstCreator][rid 1][tid 1] Handle NKikimrScheme.TEvDescribeSchemeResult PreSerializedData size# 20 Record# Status: StatusSuccess Path: "/Root/Dst" PathDescription { Self { Name: "Dst" PathId: 3 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976715659 CreateStep: 1748532345308 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "Dst" Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact ... 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:25:45.923221Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:25:45.926389Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:13675 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-29T15:25:45.954410Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:25:45.956564Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-05-29T15:25:45.957393Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:25:45.975509Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... TClient::Ls request: /Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 1748532346001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 7 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 7 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 5 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: "Dst" PathId: 3 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976715659 CreateStep: 1748532346036 ParentPathId: 1 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" ChildrenExist: false } Children { Name: "Src" PathId: 2 S... (TRUNCATED) TClient::Ls request: /Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 1748532346001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 7 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 7 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 5 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: "Dst" PathId: 3 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976715659 CreateStep: 1748532346036 ParentPathId: 1 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" ChildrenExist: false } Children { Name: "Src" PathId: 2 S... (TRUNCATED) 2025-05-29T15:25:45.994009Z node 2 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:56: [DstCreator][rid 1][tid 1] Handle NKikimr::TEvTxProxySchemeCache::TEvNavigateKeySetResult: entry# { Path: Root TableId: [72057594046644480:1:0] RequestType: ByTableId Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } } 2025-05-29T15:25:45.994038Z node 2 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:56: [DstCreator][rid 1][tid 1] Handle NKikimr::TEvTxProxySchemeCache::TEvNavigateKeySetResult: entry# { Path: Root TableId: [72057594046644480:1:0] RequestType: ByTableId Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } } 2025-05-29T15:25:45.994040Z node 2 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:87: [DstCreator][rid 1][tid 1] Get table profiles 2025-05-29T15:25:45.994240Z node 2 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:106: [DstCreator][rid 1][tid 1] Handle NKikimr::NConsole::TEvConfigsDispatcher::TEvGetConfigResponse 2025-05-29T15:25:46.740527Z node 2 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:158: [DstCreator][rid 1][tid 1] Handle NKikimr::NReplication::TEvYdbProxy::TEvDescribeTableResponse { Result: { name: Src, owner: root@builtin, type: Table, size_bytes: 0, created_at: { plan_step: 1748532346022, tx_id: 281474976715658 } } } 2025-05-29T15:25:46.740630Z node 2 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:249: [DstCreator][rid 1][tid 1] Handle NKikimr::TEvTxUserProxy::TEvAllocateTxIdResult 2025-05-29T15:25:46.741059Z node 2 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:279: [DstCreator][rid 1][tid 1] Handle {TEvModifySchemeTransactionResult Status# StatusAlreadyExists txid# 281474976715660 Reason# Check failed: path: '/Root/Dst', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 3], type: EPathTypeTable, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:484} 2025-05-29T15:25:46.741563Z node 2 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:335: [DstCreator][rid 1][tid 1] Handle NKikimrScheme.TEvDescribeSchemeResult PreSerializedData size# 20 Record# Status: StatusSuccess Path: "/Root/Dst" PathDescription { Self { Name: "Dst" PathId: 3 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976715659 CreateStep: 1748532346036 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "Dst" Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { MinPartitionsCount: 1 } } TableSchemaVersion: 1 IsBackup: false IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 2 PathsLimit: 10000 ShardsInside: 2 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 3 PathOwnerId: 72057594046644480 2025-05-29T15:25:46.741605Z node 2 :REPLICATION_CONTROLLER ERROR: dst_creator.cpp:594: [DstCreator][rid 1][tid 1] Error: status# StatusSchemeError, reason# Empty replication config >> DstCreator::ColumnTypeMismatch [GOOD] >> DstCreator::KeyColumnNameMismatch [GOOD] |67.2%| [TA] $(B)/ydb/services/cms/ut/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/replication/controller/ut_dst_creator/unittest >> DstCreator::WithSyncIndexAndIntermediateDir [GOOD] Test command err: test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/001584/r3tmp/tmpo7iiFI/pdisk_1.dat 2025-05-29T15:25:46.576657Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509889242230738798:2208];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:25:46.578622Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-05-29T15:25:46.638812Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [1:7509889242230738616:2079] 1748532346488136 != 1748532346488139 2025-05-29T15:25:46.640010Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:25:46.690024Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:25:46.690050Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting TClient is connected to server localhost:11190 2025-05-29T15:25:46.699091Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 11623, node 1 2025-05-29T15:25:46.730022Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:25:46.730032Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-05-29T15:25:46.730034Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-05-29T15:25:46.730070Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:11190 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-29T15:25:46.825314Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:25:46.829703Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-05-29T15:25:46.839881Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... TClient::Ls request: /Root/Table TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Table" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976715658 CreateStep: 1748532346974 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: true } Table { Name: "Table" Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Uint32" TypeId: 2 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyCo... (TRUNCATED) TClient::Ls request: /Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 1748532346876 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 6 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 6 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: "Table" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976715658 CreateStep: 1748532346974 ParentPathId: 1 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" ChildrenExist: true } Children { Name: ".sys" PathId: 18446... (TRUNCATED) 2025-05-29T15:25:46.966694Z node 1 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:56: [DstCreator][rid 1][tid 1] Handle NKikimr::TEvTxProxySchemeCache::TEvNavigateKeySetResult: entry# { Path: Root TableId: [72057594046644480:1:0] RequestType: ByTableId Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } } 2025-05-29T15:25:46.966717Z node 1 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:56: [DstCreator][rid 1][tid 1] Handle NKikimr::TEvTxProxySchemeCache::TEvNavigateKeySetResult: entry# { Path: Root TableId: [72057594046644480:1:0] RequestType: ByTableId Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } } 2025-05-29T15:25:46.966719Z node 1 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:87: [DstCreator][rid 1][tid 1] Get table profiles 2025-05-29T15:25:46.966924Z node 1 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:106: [DstCreator][rid 1][tid 1] Handle NKikimr::NConsole::TEvConfigsDispatcher::TEvGetConfigResponse 2025-05-29T15:25:47.058916Z node 1 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:158: [DstCreator][rid 1][tid 1] Handle NKikimr::NReplication::TEvYdbProxy::TEvDescribeTableResponse { Result: { name: Table, owner: root@builtin, type: Table, size_bytes: 0, created_at: { plan_step: 1748532346974, tx_id: 281474976715658 } } } 2025-05-29T15:25:47.059031Z node 1 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:249: [DstCreator][rid 1][tid 1] Handle NKikimr::TEvTxUserProxy::TEvAllocateTxIdResult 2025-05-29T15:25:47.059543Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:1, at schemeshard: 72057594046644480 2025-05-29T15:25:47.060128Z node 1 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:279: [DstCreator][rid 1][tid 1] Handle {TEvModifySchemeTransactionResult Status# StatusAccepted txid# 281474976715659} 2025-05-29T15:25:47.060131Z node 1 :REPLICATION_CONTROLLER DEBUG: dst_creator.cpp:306: [DstCreator][rid 1][tid 1] Subscribe tx: txId# 281474976715659 2025-05-29T15:25:47.085437Z node 1 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:311: [DstCreator][rid 1][tid 1] Handle NKikimrScheme.TEvNotifyTxCompletionResult TxId: 281474976715659 2025-05-29T15:25:47.085902Z node 1 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:335: [DstCreator][rid 1][tid 1] Handle NKikimrScheme.TEvDescribeSchemeResult PreSerializedData size# 20 Record# Status: StatusSuccess Path: "/Root/Dir/Replicated" PathDescription { Self { Name: "Replicated" PathId: 6 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976715659 CreateStep: 1748532347128 ParentPathId: 5 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: true } Table { Name: "Replicated" Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Uint32" TypeId: 2 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverhead ... : 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { SizeToSplit: 2147483648 MinPartitionsCount: 1 SplitByLoadSettings { Enabled: false } } ColumnFamilies { Id: 0 StorageConfig { SysLog { PreferredPoolKind: "test" } Log { PreferredPoolKind: "test" } Data { PreferredPoolKind: "test" } } } } TableSchemaVersion: 1 IsBackup: false ReplicationConfig { Mode: REPLICATION_MODE_READ_ONLY ConsistencyLevel: CONSISTENCY_LEVEL_ROW } IsRestore: false } TablePartitions { EndOfRangeKeyPrefix: "" IsPoint: false IsInclusive: false DatashardId: 72075186224037905 } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 7 PathsLimit: 10000 ShardsInside: 19 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } UserAttributes { Key: "__async_replica" Value: "true" } } PathId: 8 PathOwnerId: 72057594046644480 } 2025-05-29T15:25:47.096096Z node 1 :REPLICATION_CONTROLLER INFO: dst_creator.cpp:585: [DstCreator][rid 1][tid 2] Success: dstPathId# [OwnerId: 72057594046644480, LocalPathId: 8] TClient::Ls request: /Root/Dir/Replicated/index_by_value TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "index_by_value" PathId: 7 SchemeshardId: 72057594046644480 PathType: EPathTypeTableIndex CreateFinished: true CreateTxId: 281474976715659 CreateStep: 1748532347128 ParentPathId: 6 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableIndexVersion: 1 } ChildrenExist: true } Children { Name: "indexImplTable" PathId: 8 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976715659 CreateStep: 1748532347128 ParentPathId: 7 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" PathSubType: EPathSubTypeSyncIndexImplTable Version { ... (TRUNCATED) TClient::Ls request: /Root/Dir/Replicated/index_by_value/indexImplTable TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "indexImplTable" PathId: 8 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976715659 CreateStep: 1748532347128 ParentPathId: 7 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeSyncIndexImplTable Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "indexImplTable" Columns { Name: "value" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 2 NotNull: false IsBuildInProgress: false } ... (TRUNCATED) Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "indexImplTable" PathId: 8 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976715659 CreateStep: 1748532347128 ParentPathId: 7 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeSyncIndexImplTable Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "indexImplTable" Columns { Name: "value" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "value" KeyColumnNames: "key" KeyColumnIds: 1 KeyColumnIds: 2 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { SizeToSplit: 2147483648 MinPartitionsCount: 1 SplitByLoadSettings { Enabled: false } } ColumnFamilies { Id: 0 StorageConfig { SysLog { PreferredPoolKind: "test" } Log { PreferredPoolKind: "test" } Data { PreferredPoolKind: "test" } } } } TableSchemaVersion: 1 IsBackup: false ReplicationConfig { Mode: REPLICATION_MODE_READ_ONLY ConsistencyLevel: CONSISTENCY_LEVEL_ROW } IsRestore: false } TablePartitions { EndOfRangeKeyPrefix: "" IsPoint: false IsInclusive: false DatashardId: 72075186224037905 } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 7 PathsLimit: 10000 ShardsInside: 19 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } UserAttributes { Key: "__async_replica" Value: "true" } } Path: "/Root/Dir/Replicated/index_by_value/indexImplTable" >> TSequenceReboots::CreateSequence >> YdbTableSplit::SplitByLoadWithNonEmptyRangeReads >> YdbTableSplit::RenameTablesAndSplit ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/dsproxy/ut_ftol/unittest >> TBsProxyFaultToleranceTest::CheckGetHardenedErasureMirror3dcCount6Idx2 [GOOD] Test command err: iteration# 2 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 8 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 14 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 20 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 26 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 32 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 38 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 44 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 50 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 56 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 62 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 68 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 74 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 80 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 86 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 92 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 98 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 104 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 110 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 116 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 122 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 128 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 134 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 140 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 146 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 152 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 158 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 164 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 170 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 176 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 182 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 188 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 194 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 200 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 206 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 212 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 218 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 224 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 230 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 236 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 242 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 248 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 254 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 260 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 266 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 272 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 278 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 284 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 290 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 296 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 302 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 308 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 314 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 320 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 326 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 332 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 338 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 344 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 350 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 356 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 362 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 368 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 374 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 380 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 386 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 392 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 398 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 404 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 410 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 416 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 422 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 428 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 434 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 440 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 446 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 452 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 458 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 464 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 470 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 476 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 482 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 488 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 >> YdbTableSplit::SplitByLoadWithReadsMultipleSplitsWithData ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/replication/controller/ut_dst_creator/unittest >> DstCreator::KeyColumnNameMismatch [GOOD] Test command err: 2025-05-29T15:25:45.570927Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509889238203827122:2206];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:25:45.572954Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/00158c/r3tmp/tmpzRmRI8/pdisk_1.dat 2025-05-29T15:25:45.780334Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:25:45.780356Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:25:45.807352Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:25:45.807903Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-29T15:25:45.808174Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [1:7509889238203826943:2079] 1748532345560641 != 1748532345560644 TClient is connected to server localhost:29535 TServer::EnableGrpc on GrpcPort 15717, node 1 2025-05-29T15:25:45.906452Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:25:45.906464Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-05-29T15:25:45.906466Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-05-29T15:25:45.906512Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:29535 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-29T15:25:46.089897Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:25:46.098259Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-05-29T15:25:46.099418Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... TClient::Ls request: /Root/Table TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Table" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710658 CreateStep: 1748532346211 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "Table" Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" Key... (TRUNCATED) TClient::Ls request: /Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 1748532346141 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 3 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: "Table" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710658 CreateStep: 1748532346211 ParentPathId: 1 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" ChildrenExist: false } Children { Name: ".sys" PathId: 1844... (TRUNCATED) 2025-05-29T15:25:46.186542Z node 1 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:56: [DstCreator][rid 1][tid 1] Handle NKikimr::TEvTxProxySchemeCache::TEvNavigateKeySetResult: entry# { Path: Root TableId: [72057594046644480:1:0] RequestType: ByTableId Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } } 2025-05-29T15:25:46.186565Z node 1 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:56: [DstCreator][rid 1][tid 1] Handle NKikimr::TEvTxProxySchemeCache::TEvNavigateKeySetResult: entry# { Path: Root TableId: [72057594046644480:1:0] RequestType: ByTableId Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } } 2025-05-29T15:25:46.186567Z node 1 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:87: [DstCreator][rid 1][tid 1] Get table profiles 2025-05-29T15:25:46.186759Z node 1 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:106: [DstCreator][rid 1][tid 1] Handle NKikimr::NConsole::TEvConfigsDispatcher::TEvGetConfigResponse 2025-05-29T15:25:46.670654Z node 1 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:158: [DstCreator][rid 1][tid 1] Handle NKikimr::NReplication::TEvYdbProxy::TEvDescribeTableResponse { Result: { name: Table, owner: root@builtin, type: Table, size_bytes: 0, created_at: { plan_step: 1748532346211, tx_id: 281474976710658 } } } 2025-05-29T15:25:46.670780Z node 1 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:249: [DstCreator][rid 1][tid 1] Handle NKikimr::TEvTxUserProxy::TEvAllocateTxIdResult 2025-05-29T15:25:46.671268Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 2025-05-29T15:25:46.671612Z node 1 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:279: [DstCreator][rid 1][tid 1] Handle {TEvModifySchemeTransactionResult Status# StatusAccepted txid# 281474976710659} 2025-05-29T15:25:46.671614Z node 1 :REPLICATION_CONTROLLER DEBUG: dst_creator.cpp:306: [DstCreator][rid 1][tid 1] Subscribe tx: txId# 281474976710659 TClient::Ls request: /Root/Replicated 2025-05-29T15:25:46.689713Z node 1 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:311: [DstCreator][rid 1][tid 1] Handle NKikimrScheme.TEvNotifyTxCompletionResult TxId: 281474976710659 2025-05-29T15:25:46.689727Z node 1 :REPLICATION_CONTROLLER INFO: dst_creator.cpp:585: [DstCreator][rid 1][tid 1] Success: dstPathId# [OwnerId: 72057594046644480, LocalPathId: 3] TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Replicated" PathId: 3 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710659 CreateStep: 1748532346736 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "Replicated" Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "ke... (TRUNCATED) 2025-05-29T15:25:46.875132Z node 2 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7509889246077996935:2203];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:25:46.875600Z node 2 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/00158c/r3tmp/tmpV4KcdE/pdisk_1.dat 2025-05-29T15:25:46.919644Z node 2 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded TClient is connected to server localhost:16993 TServer::EnableGrpc on GrpcPort 27257, node 2 2025-05-29T15:25:46.963301Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:25:46.963313Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-05-29T15:25:46.963315Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-05-29T15:25:46.963359Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-05-29T15:25:46.991233Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:25:46.991260Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting TClient is connected to server localhost:16993 2025-05-29T15:25:46.999063Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... 2025-05-29T15:25:47.064898Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-05-29T15:25:47.066937Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-05-29T15:25:47.067724Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:25:47.091530Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... TClient::Ls request: /Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 1748532347114 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 7 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 7 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 5 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: "Dst" PathId: 3 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976715659 CreateStep: 1748532347156 ParentPathId: 1 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" ChildrenExist: false } Children { Name: "Src" PathId: 2 S... (TRUNCATED) TClient::Ls request: /Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 1748532347114 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 7 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 7 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 5 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: "Dst" PathId: 3 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976715659 CreateStep: 1748532347156 ParentPathId: 1 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" ChildrenExist: false } Children { Name: "Src" PathId: 2 S... (TRUNCATED) 2025-05-29T15:25:47.115674Z node 2 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:56: [DstCreator][rid 1][tid 1] Handle NKikimr::TEvTxProxySchemeCache::TEvNavigateKeySetResult: entry# { Path: Root TableId: [72057594046644480:1:0] RequestType: ByTableId Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } } 2025-05-29T15:25:47.115697Z node 2 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:56: [DstCreator][rid 1][tid 1] Handle NKikimr::TEvTxProxySchemeCache::TEvNavigateKeySetResult: entry# { Path: Root TableId: [72057594046644480:1:0] RequestType: ByTableId Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } } 2025-05-29T15:25:47.115699Z node 2 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:87: [DstCreator][rid 1][tid 1] Get table profiles 2025-05-29T15:25:47.119029Z node 2 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:106: [DstCreator][rid 1][tid 1] Handle NKikimr::NConsole::TEvConfigsDispatcher::TEvGetConfigResponse 2025-05-29T15:25:47.699178Z node 2 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:158: [DstCreator][rid 1][tid 1] Handle NKikimr::NReplication::TEvYdbProxy::TEvDescribeTableResponse { Result: { name: Src, owner: root@builtin, type: Table, size_bytes: 0, created_at: { plan_step: 1748532347135, tx_id: 281474976715658 } } } 2025-05-29T15:25:47.699274Z node 2 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:249: [DstCreator][rid 1][tid 1] Handle NKikimr::TEvTxUserProxy::TEvAllocateTxIdResult 2025-05-29T15:25:47.699738Z node 2 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:279: [DstCreator][rid 1][tid 1] Handle {TEvModifySchemeTransactionResult Status# StatusAlreadyExists txid# 281474976715660 Reason# Check failed: path: '/Root/Dst', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 3], type: EPathTypeTable, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:484} 2025-05-29T15:25:47.700004Z node 2 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:335: [DstCreator][rid 1][tid 1] Handle NKikimrScheme.TEvDescribeSchemeResult PreSerializedData size# 20 Record# Status: StatusSuccess Path: "/Root/Dst" PathDescription { Self { Name: "Dst" PathId: 3 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976715659 CreateStep: 1748532347156 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "Dst" Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "value" KeyColumnIds: 2 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { MinPartitionsCount: 1 } } TableSchemaVersion: 1 IsBackup: false ReplicationConfig { Mode: REPLICATION_MODE_READ_ONLY ConsistencyLevel: CONSISTENCY_LEVEL_ROW } IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 2 PathsLimit: 10000 ShardsInside: 2 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } UserAttributes { Key: "__async_replica" Value: "true" } } PathId: 3 PathOwnerId: 72057594046644480 2025-05-29T15:25:47.700041Z node 2 :REPLICATION_CONTROLLER ERROR: dst_creator.cpp:594: [DstCreator][rid 1][tid 1] Error: status# StatusSchemeError, reason# Key column name mismatch: position: 0, expected: key, got: value ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/replication/controller/ut_dst_creator/unittest >> DstCreator::ColumnTypeMismatch [GOOD] Test command err: 2025-05-29T15:25:46.078855Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509889243614410717:2206];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:25:46.145588Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/001597/r3tmp/tmpcqZEte/pdisk_1.dat 2025-05-29T15:25:46.251242Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:25:46.251266Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:25:46.262002Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-29T15:25:46.266357Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded TClient is connected to server localhost:9915 TServer::EnableGrpc on GrpcPort 18834, node 1 2025-05-29T15:25:46.339006Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:25:46.339017Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-05-29T15:25:46.339020Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-05-29T15:25:46.339059Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:9915 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-29T15:25:46.475776Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:25:46.483287Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-05-29T15:25:46.484257Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:25:46.534115Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... TClient::Ls request: /Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 1748532346526 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 7 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 7 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 5 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: "Dst" PathId: 3 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710659 CreateStep: 1748532346596 ParentPathId: 1 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" ChildrenExist: false } Children { Name: "Src" PathId: 2 S... (TRUNCATED) TClient::Ls request: /Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 1748532346526 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 7 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 7 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 5 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: "Dst" PathId: 3 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710659 CreateStep: 1748532346596 ParentPathId: 1 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" ChildrenExist: false } Children { Name: "Src" PathId: 2 S... (TRUNCATED) 2025-05-29T15:25:46.559190Z node 1 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:56: [DstCreator][rid 1][tid 1] Handle NKikimr::TEvTxProxySchemeCache::TEvNavigateKeySetResult: entry# { Path: Root TableId: [72057594046644480:1:0] RequestType: ByTableId Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } } 2025-05-29T15:25:46.559220Z node 1 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:56: [DstCreator][rid 1][tid 1] Handle NKikimr::TEvTxProxySchemeCache::TEvNavigateKeySetResult: entry# { Path: Root TableId: [72057594046644480:1:0] RequestType: ByTableId Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } } 2025-05-29T15:25:46.559222Z node 1 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:87: [DstCreator][rid 1][tid 1] Get table profiles 2025-05-29T15:25:46.559432Z node 1 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:106: [DstCreator][rid 1][tid 1] Handle NKikimr::NConsole::TEvConfigsDispatcher::TEvGetConfigResponse 2025-05-29T15:25:47.114038Z node 1 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:158: [DstCreator][rid 1][tid 1] Handle NKikimr::NReplication::TEvYdbProxy::TEvDescribeTableResponse { Result: { name: Src, owner: root@builtin, type: Table, size_bytes: 0, created_at: { plan_step: 1748532346561, tx_id: 281474976710658 } } } 2025-05-29T15:25:47.114115Z node 1 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:249: [DstCreator][rid 1][tid 1] Handle NKikimr::TEvTxUserProxy::TEvAllocateTxIdResult 2025-05-29T15:25:47.114432Z node 1 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:279: [DstCreator][rid 1][tid 1] Handle {TEvModifySchemeTransactionResult Status# StatusAlreadyExists txid# 281474976710660 Reason# Check failed: path: '/Root/Dst', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 3], type: EPathTypeTable, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:484} 2025-05-29T15:25:47.114865Z node 1 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:335: [DstCreator][rid 1][tid 1] Handle NKikimrScheme.TEvDescribeSchemeResult PreSerializedData size# 20 Record# Status: StatusSuccess Path: "/Root/Dst" PathDescription { Self { Name: "Dst" PathId: 3 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710659 CreateStep: 1748532346596 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "Dst" Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } Columns { Name: "extra" Type: "Utf8" TypeId: 4608 Id: 3 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { MinPartitionsCount: 1 } } ... node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:25:47.490959Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-29T15:25:47.523263Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:25:47.524968Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-05-29T15:25:47.525737Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:25:47.551229Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... TClient::Ls request: /Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 1748532347569 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 7 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 7 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 5 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: "Dst" PathId: 3 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976715659 CreateStep: 1748532347611 ParentPathId: 1 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" ChildrenExist: false } Children { Name: "Src" PathId: 2 S... (TRUNCATED) TClient::Ls request: /Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 1748532347569 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 7 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 7 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 5 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: "Dst" PathId: 3 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976715659 CreateStep: 1748532347611 ParentPathId: 1 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" ChildrenExist: false } Children { Name: "Src" PathId: 2 S... (TRUNCATED) 2025-05-29T15:25:47.579766Z node 2 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:56: [DstCreator][rid 1][tid 1] Handle NKikimr::TEvTxProxySchemeCache::TEvNavigateKeySetResult: entry# { Path: Root TableId: [72057594046644480:1:0] RequestType: ByTableId Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } } 2025-05-29T15:25:47.579796Z node 2 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:56: [DstCreator][rid 1][tid 1] Handle NKikimr::TEvTxProxySchemeCache::TEvNavigateKeySetResult: entry# { Path: Root TableId: [72057594046644480:1:0] RequestType: ByTableId Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } } 2025-05-29T15:25:47.579799Z node 2 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:87: [DstCreator][rid 1][tid 1] Get table profiles 2025-05-29T15:25:47.580075Z node 2 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:106: [DstCreator][rid 1][tid 1] Handle NKikimr::NConsole::TEvConfigsDispatcher::TEvGetConfigResponse 2025-05-29T15:25:47.948037Z node 2 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:158: [DstCreator][rid 1][tid 1] Handle NKikimr::NReplication::TEvYdbProxy::TEvDescribeTableResponse { Result: { name: Src, owner: root@builtin, type: Table, size_bytes: 0, created_at: { plan_step: 1748532347597, tx_id: 281474976715658 } } } 2025-05-29T15:25:47.948131Z node 2 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:249: [DstCreator][rid 1][tid 1] Handle NKikimr::TEvTxUserProxy::TEvAllocateTxIdResult 2025-05-29T15:25:47.948571Z node 2 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:279: [DstCreator][rid 1][tid 1] Handle {TEvModifySchemeTransactionResult Status# StatusAlreadyExists txid# 281474976715660 Reason# Check failed: path: '/Root/Dst', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 3], type: EPathTypeTable, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:484} 2025-05-29T15:25:47.948832Z node 2 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:335: [DstCreator][rid 1][tid 1] Handle NKikimrScheme.TEvDescribeSchemeResult PreSerializedData size# 20 Record# Status: StatusSuccess Path: "/Root/Dst" PathDescription { Self { Name: "Dst" PathId: 3 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976715659 CreateStep: 1748532347611 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "Dst" Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Uint32" TypeId: 2 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { MinPartitionsCount: 1 } } TableSchemaVersion: 1 IsBackup: false ReplicationConfig { Mode: REPLICATION_MODE_READ_ONLY ConsistencyLevel: CONSISTENCY_LEVEL_ROW } IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 2 PathsLimit: 10000 ShardsInside: 2 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } UserAttributes { Key: "__async_replica" Value: "true" } } PathId: 3 PathOwnerId: 72057594046644480 2025-05-29T15:25:47.948873Z node 2 :REPLICATION_CONTROLLER ERROR: dst_creator.cpp:594: [DstCreator][rid 1][tid 1] Error: status# StatusSchemeError, reason# Column type mismatch: name: value, expected: Utf8, got: Uint32 >> TSequenceReboots::CreateSequencesWithIndexedTable >> TExternalDataSourceTestReboots::DropExternalDataSourceWithReboots [GOOD] >> TSequenceReboots::CopyTableWithSequence >> DataShardVolatile::DistributedWriteShardRestartBeforePlan-UseSink [FAIL] >> DataShardVolatile::DistributedWriteShardRestartAfterExpectation+UseSink |67.3%| [TA] $(B)/ydb/core/tx/replication/controller/ut_dst_creator/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/public/sdk/cpp/src/client/persqueue_public/ut/unittest >> PersQueueSdkReadSessionTest::ClosesAfterFailedConnectionToCds [FAIL] Test command err: 2025-05-29T15:25:25.522908Z :WriteRAW INFO: Random seed for debugging is 1748532325522893 2025-05-29T15:25:25.980389Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509889153883600805:2271];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:25:25.980412Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-05-29T15:25:26.000812Z node 2 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7509889153437529567:2221];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/00121c/r3tmp/tmphxiyjq/pdisk_1.dat 2025-05-29T15:25:26.071133Z node 2 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-05-29T15:25:26.071185Z node 2 :PQ_READ_PROXY DEBUG: caching_service.cpp:44: Direct read cache: : Created 2025-05-29T15:25:26.072262Z node 1 :PQ_READ_PROXY DEBUG: caching_service.cpp:44: Direct read cache: : Created 2025-05-29T15:25:26.171104Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:25:26.179981Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:25:26.180003Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:25:26.184294Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 1504, node 1 2025-05-29T15:25:26.212937Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:25:26.212955Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:25:26.219339Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/ciyv/00121c/r3tmp/yandexgM7Jnj.tmp 2025-05-29T15:25:26.219350Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: /home/runner/.ya/build/build_root/ciyv/00121c/r3tmp/yandexgM7Jnj.tmp 2025-05-29T15:25:26.219400Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:202: successfully initialized from file: /home/runner/.ya/build/build_root/ciyv/00121c/r3tmp/yandexgM7Jnj.tmp 2025-05-29T15:25:26.219433Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-05-29T15:25:26.220648Z node 1 :HIVE WARN: hive_impl.cpp:771: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-05-29T15:25:26.226931Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-29T15:25:26.247257Z INFO: TTestServer started on Port 16517 GrpcPort 1504 TClient is connected to server localhost:16517 PQClient connected to localhost:1504 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-29T15:25:26.291396Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976720657:0, at schemeshard: 72057594046644480 waiting... waiting... waiting... waiting... 2025-05-29T15:25:26.728816Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7509889157732497013:2306], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:25:26.728841Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7509889157732497026:2309], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:25:26.728848Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:25:26.730598Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710657:3, at schemeshard: 72057594046644480 2025-05-29T15:25:26.735726Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-05-29T15:25:26.735871Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7509889157732497029:2310], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710657 completed, doublechecking } 2025-05-29T15:25:26.801702Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [1:7509889158178568861:2340], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-05-29T15:25:26.802223Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=1&id=MzJhNWJmNWEtNTU1YTNhNTYtMTczMjRmNTEtMzU0YzUwZWQ=, ActorId: [1:7509889158178568819:2332], ActorState: ExecuteState, TraceId: 01jweabwaq1kpbkh6jwkt40sq4, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-05-29T15:25:26.802610Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: cluster_tracker.cpp:167: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-05-29T15:25:26.804965Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720661:0, at schemeshard: 72057594046644480 2025-05-29T15:25:26.812351Z node 2 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [2:7509889157732497057:2129] txid# 281474976710658, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 8], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-29T15:25:26.821111Z node 2 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [2:7509889157732497064:2314], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-05-29T15:25:26.821478Z node 2 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=2&id=MmI1N2FlNTktZTU5ZGNiYjgtMTBlOWY4MDAtNzJmNWY0YmI=, ActorId: [2:7509889157732497003:2305], ActorState: ExecuteState, TraceId: 01jweabwa7bs8q6ydj9yn12zdt, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-05-29T15:25:26.821592Z node 2 :PERSQUEUE_CLUSTER_TRACKER ERROR: cluster_tracker.cpp:167: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-05-29T15:25:26.905334Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720662:0, at schemeshard: 72057594046644480 2025-05-29T15:25:26.959057Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720663:0, at schemeshard: 72057594046644480 === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost:1504", true, true, 1000); 2025-05-29T15:25:27.066999Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [1:7509889162473536524:2373], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:25:27.067629Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=1&id=YjA5YTc2MmEtY2FlZDA3ZTMtOTdiMmZjNDYtNDdjZDIzNjc=, ActorId: [1:7509889162473536521:2371], ActorState: ExecuteState, TraceId: 01jweabwm5an9c0w67bz1cxbx2, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: assertion failed at ydb/core/testlib/test_pq_client.h:537, TMaybe NKikimr::NPersQueueTests::TFlatMsgBusPQClient::RunYqlDataQueryWithParams(TString, const NYdb::TParams &): (result.IsSuccess())
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 TBackTrace::Capture()+28 (0x13A6D2CC) NUnitTest::NPrivate::RaiseError(char const*, TBasicString: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:25:42.476800Z node 16 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:25:42.477020Z node 16 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [16:7509889225556775537:2309], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:25:42.478833Z node 15 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976720657:3, at schemeshard: 72057594046644480 2025-05-29T15:25:42.479588Z node 15 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [15:7509889225926460172:2335], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:25:42.479624Z node 15 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:25:42.481121Z node 15 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [15:7509889225926460210:2338], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:25:42.482717Z node 15 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [15:7509889225926460232:2612] txid# 281474976715661, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exists but creating right now (id: [OwnerId: 72057594046644480, LocalPathId: 8], type: EPathTypeResourcePool, state: EPathStateCreate), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-29T15:25:42.484873Z node 15 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [15:7509889225926460231:2339], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976720657 completed, doublechecking } 2025-05-29T15:25:42.484972Z node 16 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [16:7509889225556775539:2310], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976720657 completed, doublechecking } 2025-05-29T15:25:42.492993Z node 15 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-05-29T15:25:42.551775Z node 15 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [15:7509889225926460364:2688] txid# 281474976715663, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 8], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-29T15:25:42.561069Z node 15 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [15:7509889225926460375:2350], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-05-29T15:25:42.561504Z node 15 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=15&id=ODU2ODkyMTMtZTEyYmM2OGItNmU5YzlmZGYtNDhmMmY3YWI=, ActorId: [15:7509889225926460166:2331], ActorState: ExecuteState, TraceId: 01jweacbpe8nrwda45yhra49nz, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-05-29T15:25:42.561628Z node 15 :PERSQUEUE_CLUSTER_TRACKER ERROR: cluster_tracker.cpp:167: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-05-29T15:25:42.569339Z node 15 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-05-29T15:25:42.584562Z node 16 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [16:7509889225556775629:2165] txid# 281474976720658, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 8], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-29T15:25:42.590396Z node 16 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [16:7509889225556775638:2318], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:5:17: Error: At function: KiReadTable!
:5:17: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Versions]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-05-29T15:25:42.590912Z node 16 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=16&id=M2Y1Mjk3MzUtMTA5ZmMwZGMtZWYxMzE0MTUtNGVhNGM1ODA=, ActorId: [16:7509889225556775499:2304], ActorState: ExecuteState, TraceId: 01jweacbpb6bpqt9ykc0yacc75, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-05-29T15:25:42.591032Z node 16 :PERSQUEUE_CLUSTER_TRACKER ERROR: cluster_tracker.cpp:167: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 5 column: 17 } message: "At function: KiReadTable!" end_position { row: 5 column: 17 } severity: 1 issues { position { row: 5 column: 17 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Versions]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 5 column: 17 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-05-29T15:25:42.619470Z node 15 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost:11850", true, true, 1000); 2025-05-29T15:25:42.747398Z node 15 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [15:7509889225926460660:2376], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:25:42.747970Z node 15 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=15&id=ZTczZjliNDYtNzhiOWNjMTktNjI3Y2YwYmMtNjZkMjMzY2E=, ActorId: [15:7509889225926460657:2374], ActorState: ExecuteState, TraceId: 01jweacbya6kf3n515r6nnnrva, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: assertion failed at ydb/core/testlib/test_pq_client.h:537, TMaybe NKikimr::NPersQueueTests::TFlatMsgBusPQClient::RunYqlDataQueryWithParams(TString, const NYdb::TParams &): (result.IsSuccess())
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 TBackTrace::Capture()+28 (0x13A6D2CC) NUnitTest::NPrivate::RaiseError(char const*, TBasicString> const&, bool)+137 (0x13C25189) NKikimr::NPersQueueTests::TFlatMsgBusPQClient::RunYqlDataQueryWithParams(TBasicString>, NYdb::Dev::TParams const&)+932 (0x138B9A44) NKikimr::NPersQueueTests::TFlatMsgBusPQClient::RunYqlDataQuery(TBasicString>)+88 (0x138B89A8) NKikimr::NPersQueueTests::TFlatMsgBusPQClient::InitDCs(THashMap>, NKikimr::NPersQueueTests::TPQTestClusterInfo, THash>>, TEqualTo>>, std::__y1::allocator>>>, TBasicString> const&)+1170 (0x138B7BF2) NPersQueue::SDKTestSetup::Start(bool, bool)+1450 (0x138AD5DA) NPersQueue::SDKTestSetup::SDKTestSetup(TBasicString> const&, bool, TVector> const&, NActors::NLog::EPriority, unsigned int, unsigned long)+675 (0x138AB0E3) NTestSuitePersQueueSdkReadSessionTest::TTestCaseClosesAfterFailedConnectionToCds::Execute_(NUnitTest::TTestContext&)+151 (0x1386CAF7) NTestSuitePersQueueSdkReadSessionTest::TCurrentTest::Execute()::'lambda'()::operator()() const+71 (0x138AAB07) NUnitTest::TTestBase::Run(std::__y1::function, TBasicString> const&, char const*, bool)+126 (0x13C2703E) NTestSuitePersQueueSdkReadSessionTest::TCurrentTest::Execute()+481 (0x138AA4A1) NUnitTest::TTestFactory::Execute()+803 (0x13C277B3) NUnitTest::RunMain(int, char**)+3021 (0x13C390FD) ??+0 (0x7FEF0BEE3D90) __libc_start_main+128 (0x7FEF0BEE3E40) _start+41 (0x129BB029) >> TSchemeShardTestExtSubdomainReboots::CreateForceDrop-AlterDatabaseCreateHiveFirst-true >> TSchemeShardTestExtSubdomainReboots::AlterForceDrop-AlterDatabaseCreateHiveFirst-false >> TSchemeShardTestExtSubdomainReboots::Fake [GOOD] >> DataShardVolatile::DistributedWriteThenReadIteratorStream [FAIL] >> DataShardVolatile::DistributedWriteThenScanQuery ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_external_data_source_reboots/unittest >> TExternalDataSourceTestReboots::DropExternalDataSourceWithReboots [GOOD] Test command err: ==== RunWithTabletReboots =========== RUN: Trace =========== Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2140] Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:116:2058] recipient: [1:110:2141] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:116:2058] recipient: [1:110:2141] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:117:2058] recipient: [1:111:2142] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:117:2058] recipient: [1:111:2142] Leader for TabletID 72057594046678944 is [1:126:2151] sender: [1:127:2058] recipient: [1:109:2140] Leader for TabletID 72057594046447617 is [1:130:2154] sender: [1:132:2058] recipient: [1:110:2141] Leader for TabletID 72057594046316545 is [1:133:2155] sender: [1:135:2058] recipient: [1:111:2142] 2025-05-29T15:25:34.922598Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7488: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-05-29T15:25:34.922625Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7516: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:25:34.922630Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7402: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2025-05-29T15:25:34.922636Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7418: OperationsProcessing config: using default configuration 2025-05-29T15:25:34.922652Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-05-29T15:25:34.922657Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-05-29T15:25:34.922666Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7548: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:25:34.922679Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:39: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-05-29T15:25:34.922801Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7619: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-05-29T15:25:34.922876Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-05-29T15:25:34.941970Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7651: Got new config: QueryServiceConfig { AllExternalDataSourcesAreAvailable: true } 2025-05-29T15:25:34.941996Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:25:34.942103Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7619: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# Leader for TabletID 72057594046447617 is [1:130:2154] sender: [1:175:2058] recipient: [1:15:2062] 2025-05-29T15:25:34.955107Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-05-29T15:25:34.955151Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-05-29T15:25:34.955194Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-05-29T15:25:34.957981Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-05-29T15:25:34.958053Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:445: Clear TempDirsState with owners number: 0 2025-05-29T15:25:34.958182Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1348: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-05-29T15:25:34.958332Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:32: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-05-29T15:25:34.958887Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:157: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:25:34.958935Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:84: [RootDataErasureManager] Stop 2025-05-29T15:25:34.959210Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:25:34.959219Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:25:34.959255Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-05-29T15:25:34.959263Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-29T15:25:34.959269Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:202: TTxServerlessStorageBilling.Complete 2025-05-29T15:25:34.959286Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6671: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:214:2058] recipient: [1:212:2212] IGNORE Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:214:2058] recipient: [1:212:2212] Leader for TabletID 72057594037968897 is [1:218:2216] sender: [1:219:2058] recipient: [1:212:2212] 2025-05-29T15:25:34.960540Z node 1 :HIVE INFO: tablet_helpers.cpp:1130: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:126:2151] sender: [1:239:2058] recipient: [1:15:2062] 2025-05-29T15:25:34.989371Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:372: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-05-29T15:25:34.989470Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:25:34.989547Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-05-29T15:25:34.989603Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:130: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-05-29T15:25:34.989614Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:25:34.990425Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:451: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-05-29T15:25:34.990450Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-05-29T15:25:34.990515Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:25:34.990525Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:313: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-05-29T15:25:34.990531Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:367: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-05-29T15:25:34.990536Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 2 -> 3 2025-05-29T15:25:34.990879Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:25:34.990889Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-05-29T15:25:34.990894Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 3 -> 128 2025-05-29T15:25:34.991173Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:25:34.991180Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:25:34.991186Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:25:34.991193Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1650: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-05-29T15:25:34.995411Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1719: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-05-29T15:25:34.995817Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:652: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-05-29T15:25:34.995856Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1751: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 Leader for TabletID 72057594046316545 is [1:133:2155] sender: [1:254:2058] recipient: [1:15:2062] FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-05-29T15:25:34.996065Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:676: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-05-29T15:25:34.996088Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:680: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 133 RawX2: 4294969451 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-05-29T15:25:34.996095Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:25:34.996151Z node 1 :FLAT_TX_SCHEMESHARD INFO: sch ... ep5000006 State->FrontStep: 5000005 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1004 at step: 5000006 2025-05-29T15:25:48.731088Z node 33 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1004 2025-05-29T15:25:48.731112Z node 33 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:676: TTxOperationPlanStep Execute, stepId: 5000006, transactions count in step: 1, at schemeshard: 72057594046678944 2025-05-29T15:25:48.731131Z node 33 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:680: TTxOperationPlanStep Execute, message: Transactions { TxId: 1004 Coordinator: 72057594046316545 AckTo { RawX1: 133 RawX2: 141733922924 } } Step: 5000006 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-05-29T15:25:48.731139Z node 33 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_drop_external_data_source.cpp:40: [72057594046678944] TDropExternalDataSource TPropose opId# 1004:0 HandleReply TEvOperationPlan: step# 5000006 2025-05-29T15:25:48.731166Z node 33 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 2 2025-05-29T15:25:48.731179Z node 33 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1004:0 128 -> 240 2025-05-29T15:25:48.731205Z node 33 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-05-29T15:25:48.731213Z node 33 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 1 2025-05-29T15:25:48.731301Z node 33 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1004 FAKE_COORDINATOR: Erasing txId 1004 2025-05-29T15:25:48.731684Z node 33 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:25:48.731693Z node 33 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1004, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-29T15:25:48.731716Z node 33 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1004, path id: [OwnerId: 72057594046678944, LocalPathId: 4] 2025-05-29T15:25:48.731740Z node 33 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:25:48.731745Z node 33 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [33:206:2207], at schemeshard: 72057594046678944, txId: 1004, path id: 1 2025-05-29T15:25:48.731751Z node 33 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [33:206:2207], at schemeshard: 72057594046678944, txId: 1004, path id: 4 2025-05-29T15:25:48.731797Z node 33 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1004:0, at schemeshard: 72057594046678944 2025-05-29T15:25:48.731805Z node 33 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:482: [72057594046678944] TDone opId# 1004:0 ProgressState 2025-05-29T15:25:48.731817Z node 33 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:906: Part operation is done id#1004:0 progress is 1/1 2025-05-29T15:25:48.731821Z node 33 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 1004 ready parts: 1/1 2025-05-29T15:25:48.731827Z node 33 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:906: Part operation is done id#1004:0 progress is 1/1 2025-05-29T15:25:48.731843Z node 33 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 1004 ready parts: 1/1 2025-05-29T15:25:48.731848Z node 33 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1606: TOperation IsReadyToNotify, TxId: 1004, ready parts: 1/1, is published: false 2025-05-29T15:25:48.731854Z node 33 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 1004 ready parts: 1/1 2025-05-29T15:25:48.731859Z node 33 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:973: Operation and all the parts is done, operation id: 1004:0 2025-05-29T15:25:48.731864Z node 33 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5174: RemoveTx for txid 1004:0 2025-05-29T15:25:48.731876Z node 33 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 2 2025-05-29T15:25:48.731885Z node 33 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:982: Publication still in progress, tx: 1004, publications: 2, subscribers: 0 2025-05-29T15:25:48.731889Z node 33 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:989: Publication details: tx: 1004, [OwnerId: 72057594046678944, LocalPathId: 1], 13 2025-05-29T15:25:48.731893Z node 33 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:989: Publication details: tx: 1004, [OwnerId: 72057594046678944, LocalPathId: 4], 18446744073709551615 2025-05-29T15:25:48.731946Z node 33 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:5834: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 1004 2025-05-29T15:25:48.731957Z node 33 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 1004 2025-05-29T15:25:48.731962Z node 33 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 1004 2025-05-29T15:25:48.731967Z node 33 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 1004, pathId: [OwnerId: 72057594046678944, LocalPathId: 4], version: 18446744073709551615 2025-05-29T15:25:48.731972Z node 33 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 1 2025-05-29T15:25:48.732020Z node 33 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:123: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-05-29T15:25:48.732026Z node 33 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:137: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 4], at schemeshard: 72057594046678944 2025-05-29T15:25:48.732034Z node 33 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2025-05-29T15:25:48.732061Z node 33 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:5834: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 13 PathOwnerId: 72057594046678944, cookie: 1004 2025-05-29T15:25:48.732069Z node 33 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 13 PathOwnerId: 72057594046678944, cookie: 1004 2025-05-29T15:25:48.732072Z node 33 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1004 2025-05-29T15:25:48.732076Z node 33 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 1004, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 13 2025-05-29T15:25:48.732080Z node 33 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-05-29T15:25:48.732088Z node 33 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 1004, subscribers: 0 2025-05-29T15:25:48.732823Z node 33 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1004 2025-05-29T15:25:48.732870Z node 33 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:160: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 2025-05-29T15:25:48.732881Z node 33 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1004 TestModificationResult got TxId: 1004, wait until txId: 1004 TestWaitNotification wait txId: 1004 2025-05-29T15:25:48.732929Z node 33 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:210: tests -- TTxNotificationSubscriber for txId 1004: send EvNotifyTxCompletion 2025-05-29T15:25:48.732937Z node 33 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:256: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 1004 2025-05-29T15:25:48.733010Z node 33 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 1004, at schemeshard: 72057594046678944 2025-05-29T15:25:48.733028Z node 33 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:227: tests -- TTxNotificationSubscriber for txId 1004: got EvNotifyTxCompletionResult 2025-05-29T15:25:48.733032Z node 33 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:236: tests -- TTxNotificationSubscriber for txId 1004: satisfy waiter [33:411:2401] TestWaitNotification: OK eventTxId 1004 2025-05-29T15:25:48.733111Z node 33 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/ExternalDataSource" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-05-29T15:25:48.733138Z node 33 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/ExternalDataSource" took 35us result status StatusPathDoesNotExist 2025-05-29T15:25:48.733169Z node 33 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/ExternalDataSource\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1]), source_location: ydb/core/tx/schemeshard/schemeshard_path_describer.cpp:1157" Path: "/MyRoot/ExternalDataSource" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: true } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 |67.3%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/kqp/ut/indexes/ydb-core-kqp-ut-indexes >> TOlapReboots::CreateDropStandaloneTable [GOOD] >> TOlapReboots::AlterTtlSettings >> TSchemeShardTestExtSubdomainReboots::CreateExternalSubdomain-AlterDatabaseCreateHiveFirst-false >> TSchemeShardTestExtSubdomainReboots::CreateExternalSubdomain-AlterDatabaseCreateHiveFirst-true |67.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/ut/indexes/ydb-core-kqp-ut-indexes |67.3%| [TA] {RESULT} $(B)/ydb/services/cms/ut/test-results/unittest/{meta.json ... results_accumulator.log} |67.3%| [TA] {RESULT} $(B)/ydb/core/tx/replication/controller/ut_dst_creator/test-results/unittest/{meta.json ... results_accumulator.log} |67.3%| [LD] {RESULT} $(B)/ydb/core/kqp/ut/indexes/ydb-core-kqp-ut-indexes >> TSchemeShardTestExtSubdomainReboots::CreateExternalSubdomainWithoutHive-AlterDatabaseCreateHiveFirst-true >> TSchemeShardTestExtSubdomainReboots::CreateExternalSubdomainWithoutHive-AlterDatabaseCreateHiveFirst-false |67.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_extsubdomain_reboots/unittest >> TSchemeShardTestExtSubdomainReboots::Fake [GOOD] >> TSchemeShardTestExtSubdomainReboots::SchemeLimits-AlterDatabaseCreateHiveFirst-true >> TPQTestSlow::TestWriteVeryBigMessage [GOOD] >> DataShardVolatile::DistributedWriteShardRestartAfterExpectation+UseSink [FAIL] >> DataShardVolatile::DistributedWriteShardRestartAfterExpectation-UseSink ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/persqueue/ut/slow/unittest >> TPQTestSlow::TestWriteVeryBigMessage [GOOD] Test command err: Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:104:2057] recipient: [1:102:2135] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:104:2057] recipient: [1:102:2135] Leader for TabletID 72057594037927937 is [1:108:2139] sender: [1:109:2057] recipient: [1:102:2135] 2025-05-29T15:25:12.147747Z node 1 :PERSQUEUE NOTICE: pq_impl.cpp:1099: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-05-29T15:25:12.147773Z node 1 :PERSQUEUE INFO: pq_impl.cpp:800: [PQ: 72057594037927937] doesn't have tx writes info Leader for TabletID 72057594037927938 is [0:0:0] sender: [1:150:2057] recipient: [1:148:2170] IGNORE Leader for TabletID 72057594037927938 is [0:0:0] sender: [1:150:2057] recipient: [1:148:2170] Leader for TabletID 72057594037927938 is [1:154:2174] sender: [1:155:2057] recipient: [1:148:2170] Leader for TabletID 72057594037927937 is [1:108:2139] sender: [1:180:2057] recipient: [1:14:2061] 2025-05-29T15:25:12.156759Z node 1 :PERSQUEUE NOTICE: pq_impl.cpp:1099: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-05-29T15:25:12.159680Z node 1 :PERSQUEUE INFO: pq_impl.cpp:1487: [PQ: 72057594037927937] Config applied version 1 actor [1:178:2192] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 PartitionIds: 1 TopicName: "rt3.dc1--asdfgs--topic" Version: 1 LocalDC: true Topic: "topic" TopicPath: "/Root/PQ/rt3.dc1--asdfgs--topic" Partitions { PartitionId: 0 } Partitions { PartitionId: 1 } ReadRuleGenerations: 1 MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } AllPartitions { PartitionId: 1 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 1 Important: false } 2025-05-29T15:25:12.159907Z node 1 :PERSQUEUE INFO: partition_init.cpp:880: [PQ: 72057594037927937, Partition: 0, State: StateInit] bootstrapping 0 [1:186:2198] 2025-05-29T15:25:12.162158Z node 1 :PERSQUEUE INFO: partition.cpp:560: [PQ: 72057594037927937, Partition: 0, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 0 generation 2 [1:186:2198] 2025-05-29T15:25:12.162816Z node 1 :PERSQUEUE INFO: partition_init.cpp:880: [PQ: 72057594037927937, Partition: 1, State: StateInit] bootstrapping 1 [1:187:2199] 2025-05-29T15:25:12.165046Z node 1 :PERSQUEUE INFO: partition.cpp:560: [PQ: 72057594037927937, Partition: 1, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 1 generation 2 [1:187:2199] 2025-05-29T15:25:12.166597Z node 1 :PERSQUEUE INFO: ownerinfo.cpp:30: new Cookie default|fb22917c-b2fb3031-d3378439-c9d3cbfc_0 generated for partition 1 topic 'rt3.dc1--asdfgs--topic' owner default 2025-05-29T15:25:12.218914Z node 1 :PERSQUEUE INFO: ownerinfo.cpp:30: new Cookie default|93248261-c7fe141c-213fbe67-82885b53_1 generated for partition 1 topic 'rt3.dc1--asdfgs--topic' owner default 2025-05-29T15:25:12.328743Z node 1 :PERSQUEUE NOTICE: read.h:361: Have to remove new data from cache. Topic rt3.dc1--asdfgs--topic, tablet id72057594037927937, cookie 0 2025-05-29T15:25:12.337897Z node 1 :PERSQUEUE INFO: ownerinfo.cpp:30: new Cookie default|b9b308f4-3b489c61-7637fdc0-8a330f71_0 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default 2025-05-29T15:25:12.454413Z node 1 :PERSQUEUE NOTICE: read.h:361: Have to remove new data from cache. Topic rt3.dc1--asdfgs--topic, tablet id72057594037927937, cookie 0 Leader for TabletID 72057594037927937 is [1:108:2139] sender: [1:286:2057] recipient: [1:100:2134] Leader for TabletID 72057594037927937 is [1:108:2139] sender: [1:288:2057] recipient: [1:14:2061] Leader for TabletID 72057594037927937 is [1:108:2139] sender: [1:290:2057] recipient: [1:289:2283] Leader for TabletID 72057594037927937 is [1:291:2284] sender: [1:292:2057] recipient: [1:289:2283] 2025-05-29T15:25:12.468377Z node 1 :PERSQUEUE NOTICE: pq_impl.cpp:1099: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-05-29T15:25:12.468398Z node 1 :PERSQUEUE INFO: pq_impl.cpp:800: [PQ: 72057594037927937] doesn't have tx writes info 2025-05-29T15:25:12.468501Z node 1 :PERSQUEUE INFO: partition_init.cpp:880: [PQ: 72057594037927937, Partition: 0, State: StateInit] bootstrapping 0 [1:340:2325] 2025-05-29T15:25:12.469559Z node 1 :PERSQUEUE INFO: partition_init.cpp:880: [PQ: 72057594037927937, Partition: 1, State: StateInit] bootstrapping 1 [1:341:2326] 2025-05-29T15:25:12.473581Z node 1 :PERSQUEUE INFO: partition_init.cpp:774: [rt3.dc1--asdfgs--topic:0:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp skipped because already initialized. 2025-05-29T15:25:12.473604Z node 1 :PERSQUEUE INFO: partition.cpp:560: [PQ: 72057594037927937, Partition: 0, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 0 generation 3 [1:340:2325] 2025-05-29T15:25:12.474110Z node 1 :PERSQUEUE INFO: partition_init.cpp:774: [rt3.dc1--asdfgs--topic:1:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp skipped because already initialized. 2025-05-29T15:25:12.474126Z node 1 :PERSQUEUE INFO: partition.cpp:560: [PQ: 72057594037927937, Partition: 1, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 1 generation 3 [1:341:2326] Leader for TabletID 72057594037927937 is [1:291:2284] sender: [1:371:2057] recipient: [1:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:104:2057] recipient: [2:102:2135] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:104:2057] recipient: [2:102:2135] Leader for TabletID 72057594037927937 is [2:108:2139] sender: [2:109:2057] recipient: [2:102:2135] 2025-05-29T15:25:12.796155Z node 2 :PERSQUEUE NOTICE: pq_impl.cpp:1099: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-05-29T15:25:12.796177Z node 2 :PERSQUEUE INFO: pq_impl.cpp:800: [PQ: 72057594037927937] doesn't have tx writes info Leader for TabletID 72057594037927938 is [0:0:0] sender: [2:150:2057] recipient: [2:148:2170] IGNORE Leader for TabletID 72057594037927938 is [0:0:0] sender: [2:150:2057] recipient: [2:148:2170] Leader for TabletID 72057594037927938 is [2:154:2174] sender: [2:155:2057] recipient: [2:148:2170] Leader for TabletID 72057594037927937 is [2:108:2139] sender: [2:178:2057] recipient: [2:14:2061] 2025-05-29T15:25:12.799492Z node 2 :PERSQUEUE NOTICE: pq_impl.cpp:1099: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-05-29T15:25:12.799675Z node 2 :PERSQUEUE INFO: pq_impl.cpp:1487: [PQ: 72057594037927937] Config applied version 2 actor [2:176:2190] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 PartitionIds: 1 TopicName: "rt3.dc1--asdfgs--topic" Version: 2 LocalDC: true Topic: "topic" TopicPath: "/Root/PQ/rt3.dc1--asdfgs--topic" Partitions { PartitionId: 0 } Partitions { PartitionId: 1 } ReadRuleGenerations: 2 MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } AllPartitions { PartitionId: 1 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 2 Important: false } 2025-05-29T15:25:12.799780Z node 2 :PERSQUEUE INFO: partition_init.cpp:880: [PQ: 72057594037927937, Partition: 0, State: StateInit] bootstrapping 0 [2:184:2196] 2025-05-29T15:25:12.800432Z node 2 :PERSQUEUE INFO: partition.cpp:560: [PQ: 72057594037927937, Partition: 0, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 0 generation 2 [2:184:2196] 2025-05-29T15:25:12.800775Z node 2 :PERSQUEUE INFO: partition_init.cpp:880: [PQ: 72057594037927937, Partition: 1, State: StateInit] bootstrapping 1 [2:185:2197] 2025-05-29T15:25:12.801280Z node 2 :PERSQUEUE INFO: partition.cpp:560: [PQ: 72057594037927937, Partition: 1, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 1 generation 2 [2:185:2197] 2025-05-29T15:25:12.802491Z node 2 :PERSQUEUE INFO: ownerinfo.cpp:30: new Cookie default|747a1b0b-c8041fd9-a8296e9f-1a2f96a6_0 generated for partition 1 topic 'rt3.dc1--asdfgs--topic' owner default 2025-05-29T15:25:12.846140Z node 2 :PERSQUEUE INFO: ownerinfo.cpp:30: new Cookie default|4552bf97-ff23d871-5fc09d69-f192ca0b_1 generated for partition 1 topic 'rt3.dc1--asdfgs--topic' owner default 2025-05-29T15:25:13.005068Z node 2 :PERSQUEUE NOTICE: read.h:361: Have to remove new data from cache. Topic rt3.dc1--asdfgs--topic, tablet id72057594037927937, cookie 0 2025-05-29T15:25:13.027676Z node 2 :PERSQUEUE INFO: ownerinfo.cpp:30: new Cookie default|e5ea7205-1e8b3a4a-92135e38-ebee298_0 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default 2025-05-29T15:25:13.169111Z node 2 :PERSQUEUE NOTICE: read.h:361: Have to remove new data from cache. Topic rt3.dc1--asdfgs--topic, tablet id72057594037927937, cookie 0 !Reboot 72057594037927937 (actor [2:108:2139]) on event NKikimr::TEvPersQueue::TEvOffsets ! Leader for TabletID 72057594037927937 is [2:108:2139] sender: [2:285:2057] recipient: [2:100:2134] Leader for TabletID 72057594037927937 is [2:108:2139] sender: [2:288:2057] recipient: [2:14:2061] Leader for TabletID 72057594037927937 is [2:108:2139] sender: [2:289:2057] recipient: [2:287:2282] Leader for TabletID 72057594037927937 is [2:290:2283] sender: [2:291:2057] recipient: [2:287:2282] 2025-05-29T15:25:13.185189Z node 2 :PERSQUEUE NOTICE: pq_impl.cpp:1099: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-05-29T15:25:13.185210Z node 2 :PERSQUEUE INFO: pq_impl.cpp:800: [PQ: 72057594037927937] doesn't have tx writes info 2025-05-29T15:25:13.185337Z node 2 :PERSQUEUE INFO: partition_init.cpp:880: [PQ: 72057594037927937, Partition: 0, State: StateInit] bootstrapping 0 [2:339:2324] 2025-05-29T15:25:13.185988Z node 2 :PERSQUEUE INFO: partition_init.cpp:880: [PQ: 72057594037927937, Partition: 1, State: StateInit] bootstrapping 1 [2:340:2325] 2025-05-29T15:25:13.189664Z node 2 :PERSQUEUE INFO: partition_init.cpp:774: [rt3.dc1--asdfgs--topic:0:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp skipped because already initialized. 2025-05-29T15:25:13.189690Z node 2 :PERSQUEUE INFO: partition.cpp:560: [PQ: 72057594037927937, Partition: 0, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 0 generation 3 [2:339:2324] 2025-05-29T15:25:13.191063Z node 2 :PERSQUEUE INFO: partition_init.cpp:774: [rt3.dc1--asdfgs--topic:1:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp skipped because already initialized. 2025-05-29T15:25:13.191087Z node 2 :PERSQUEUE INFO: partition.cpp:560: [PQ: 72057594037927937, Partition: 1, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 1 generation 3 [2:340:2325] !Reboot 72057594037927937 (actor [2:108:2139]) rebooted! !Reboot 72057594037927937 (actor [2:108:2139]) tablet resolver refreshed! new actor is[2:290:2283] Leader for TabletID 72057594037927937 is [2:290:2283] sender: [2:397:2057] recipient: [2:14:2061] Leader for TabletID 72057594037927937 is [2:290:2283] sender: [2:400:2057] recipient: [2:100:2134] Leader for TabletID 72057594037927937 is [2:290:2283] sender: [2:403:2057] recipient: [2:402:2359] Leader for TabletID 72057594037927937 is [2:290:2283] sender: [2:404:2057] recipient: [2:14:2061] Leader for TabletID 72057594037927937 is [2:405:2360] sender: [2:406:2057] recipient: [2:402:2359] 2025-05-29T15:25:14.640116Z node 2 :PERSQUEUE NOTICE: pq_impl.cpp:1099: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-05-29T15:25:14.640146Z node 2 ... ' partition 0 generation 4 [53:431:2391] 2025-05-29T15:25:47.726136Z node 53 :PERSQUEUE INFO: partition_init.cpp:774: [rt3.dc1--asdfgs--topic:1:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp skipped because already initialized. 2025-05-29T15:25:47.726151Z node 53 :PERSQUEUE INFO: partition.cpp:560: [PQ: 72057594037927937, Partition: 1, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 1 generation 4 [53:432:2392] !Reboot 72057594037927937 (actor [53:293:2286]) rebooted! !Reboot 72057594037927937 (actor [53:293:2286]) tablet resolver refreshed! new actor is[53:380:2348] Leader for TabletID 72057594037927937 is [53:380:2348] sender: [53:487:2057] recipient: [53:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [54:104:2057] recipient: [54:102:2135] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [54:104:2057] recipient: [54:102:2135] Leader for TabletID 72057594037927937 is [54:108:2139] sender: [54:109:2057] recipient: [54:102:2135] 2025-05-29T15:25:49.662520Z node 54 :PERSQUEUE NOTICE: pq_impl.cpp:1099: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-05-29T15:25:49.662553Z node 54 :PERSQUEUE INFO: pq_impl.cpp:800: [PQ: 72057594037927937] doesn't have tx writes info Leader for TabletID 72057594037927938 is [0:0:0] sender: [54:150:2057] recipient: [54:148:2170] IGNORE Leader for TabletID 72057594037927938 is [0:0:0] sender: [54:150:2057] recipient: [54:148:2170] Leader for TabletID 72057594037927938 is [54:154:2174] sender: [54:155:2057] recipient: [54:148:2170] Leader for TabletID 72057594037927937 is [54:108:2139] sender: [54:180:2057] recipient: [54:14:2061] 2025-05-29T15:25:49.666553Z node 54 :PERSQUEUE NOTICE: pq_impl.cpp:1099: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-05-29T15:25:49.666803Z node 54 :PERSQUEUE INFO: pq_impl.cpp:1487: [PQ: 72057594037927937] Config applied version 54 actor [54:178:2192] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 PartitionIds: 1 TopicName: "rt3.dc1--asdfgs--topic" Version: 54 LocalDC: true Topic: "topic" TopicPath: "/Root/PQ/rt3.dc1--asdfgs--topic" Partitions { PartitionId: 0 } Partitions { PartitionId: 1 } ReadRuleGenerations: 54 MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } AllPartitions { PartitionId: 1 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 54 Important: false } 2025-05-29T15:25:49.666930Z node 54 :PERSQUEUE INFO: partition_init.cpp:880: [PQ: 72057594037927937, Partition: 0, State: StateInit] bootstrapping 0 [54:186:2198] 2025-05-29T15:25:49.667521Z node 54 :PERSQUEUE INFO: partition.cpp:560: [PQ: 72057594037927937, Partition: 0, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 0 generation 2 [54:186:2198] 2025-05-29T15:25:49.667935Z node 54 :PERSQUEUE INFO: partition_init.cpp:880: [PQ: 72057594037927937, Partition: 1, State: StateInit] bootstrapping 1 [54:187:2199] 2025-05-29T15:25:49.668364Z node 54 :PERSQUEUE INFO: partition.cpp:560: [PQ: 72057594037927937, Partition: 1, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 1 generation 2 [54:187:2199] 2025-05-29T15:25:49.676130Z node 54 :PERSQUEUE INFO: ownerinfo.cpp:30: new Cookie default|1b8b4650-329f671a-aa75c1ee-d6c486d9_0 generated for partition 1 topic 'rt3.dc1--asdfgs--topic' owner default 2025-05-29T15:25:49.692305Z node 54 :PERSQUEUE INFO: ownerinfo.cpp:30: new Cookie default|d0a5214f-a7a64252-40420bd4-be75d87e_1 generated for partition 1 topic 'rt3.dc1--asdfgs--topic' owner default 2025-05-29T15:25:49.743944Z node 54 :PERSQUEUE NOTICE: read.h:361: Have to remove new data from cache. Topic rt3.dc1--asdfgs--topic, tablet id72057594037927937, cookie 0 2025-05-29T15:25:49.751329Z node 54 :PERSQUEUE INFO: ownerinfo.cpp:30: new Cookie default|abb1ea3b-856f4892-1608acd3-f6f62603_0 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default 2025-05-29T15:25:49.808615Z node 54 :PERSQUEUE NOTICE: read.h:361: Have to remove new data from cache. Topic rt3.dc1--asdfgs--topic, tablet id72057594037927937, cookie 0 Leader for TabletID 72057594037927937 is [54:108:2139] sender: [54:288:2057] recipient: [54:100:2134] Leader for TabletID 72057594037927937 is [54:108:2139] sender: [54:291:2057] recipient: [54:14:2061] Leader for TabletID 72057594037927937 is [54:108:2139] sender: [54:292:2057] recipient: [54:290:2285] Leader for TabletID 72057594037927937 is [54:293:2286] sender: [54:294:2057] recipient: [54:290:2285] 2025-05-29T15:25:49.817384Z node 54 :PERSQUEUE NOTICE: pq_impl.cpp:1099: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-05-29T15:25:49.817407Z node 54 :PERSQUEUE INFO: pq_impl.cpp:800: [PQ: 72057594037927937] doesn't have tx writes info 2025-05-29T15:25:49.817548Z node 54 :PERSQUEUE INFO: partition_init.cpp:880: [PQ: 72057594037927937, Partition: 0, State: StateInit] bootstrapping 0 [54:342:2327] 2025-05-29T15:25:49.818194Z node 54 :PERSQUEUE INFO: partition_init.cpp:880: [PQ: 72057594037927937, Partition: 1, State: StateInit] bootstrapping 1 [54:343:2328] 2025-05-29T15:25:49.821949Z node 54 :PERSQUEUE INFO: partition_init.cpp:774: [rt3.dc1--asdfgs--topic:0:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp skipped because already initialized. 2025-05-29T15:25:49.821978Z node 54 :PERSQUEUE INFO: partition.cpp:560: [PQ: 72057594037927937, Partition: 0, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 0 generation 3 [54:342:2327] 2025-05-29T15:25:49.823009Z node 54 :PERSQUEUE INFO: partition_init.cpp:774: [rt3.dc1--asdfgs--topic:1:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp skipped because already initialized. 2025-05-29T15:25:49.823028Z node 54 :PERSQUEUE INFO: partition.cpp:560: [PQ: 72057594037927937, Partition: 1, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 1 generation 3 [54:343:2328] Leader for TabletID 72057594037927937 is [54:293:2286] sender: [54:373:2057] recipient: [54:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [55:104:2057] recipient: [55:102:2135] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [55:104:2057] recipient: [55:102:2135] Leader for TabletID 72057594037927937 is [55:108:2139] sender: [55:109:2057] recipient: [55:102:2135] 2025-05-29T15:25:50.124038Z node 55 :PERSQUEUE NOTICE: pq_impl.cpp:1099: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-05-29T15:25:50.124073Z node 55 :PERSQUEUE INFO: pq_impl.cpp:800: [PQ: 72057594037927937] doesn't have tx writes info Leader for TabletID 72057594037927938 is [0:0:0] sender: [55:150:2057] recipient: [55:148:2170] IGNORE Leader for TabletID 72057594037927938 is [0:0:0] sender: [55:150:2057] recipient: [55:148:2170] Leader for TabletID 72057594037927938 is [55:154:2174] sender: [55:155:2057] recipient: [55:148:2170] Leader for TabletID 72057594037927937 is [55:108:2139] sender: [55:180:2057] recipient: [55:14:2061] 2025-05-29T15:25:50.128434Z node 55 :PERSQUEUE NOTICE: pq_impl.cpp:1099: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-05-29T15:25:50.128663Z node 55 :PERSQUEUE INFO: pq_impl.cpp:1487: [PQ: 72057594037927937] Config applied version 55 actor [55:178:2192] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 PartitionIds: 1 TopicName: "rt3.dc1--asdfgs--topic" Version: 55 LocalDC: true Topic: "topic" TopicPath: "/Root/PQ/rt3.dc1--asdfgs--topic" Partitions { PartitionId: 0 } Partitions { PartitionId: 1 } ReadRuleGenerations: 55 MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } AllPartitions { PartitionId: 1 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 55 Important: false } 2025-05-29T15:25:50.128791Z node 55 :PERSQUEUE INFO: partition_init.cpp:880: [PQ: 72057594037927937, Partition: 0, State: StateInit] bootstrapping 0 [55:186:2198] 2025-05-29T15:25:50.129446Z node 55 :PERSQUEUE INFO: partition.cpp:560: [PQ: 72057594037927937, Partition: 0, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 0 generation 2 [55:186:2198] 2025-05-29T15:25:50.129898Z node 55 :PERSQUEUE INFO: partition_init.cpp:880: [PQ: 72057594037927937, Partition: 1, State: StateInit] bootstrapping 1 [55:187:2199] 2025-05-29T15:25:50.130349Z node 55 :PERSQUEUE INFO: partition.cpp:560: [PQ: 72057594037927937, Partition: 1, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 1 generation 2 [55:187:2199] 2025-05-29T15:25:50.131752Z node 55 :PERSQUEUE INFO: ownerinfo.cpp:30: new Cookie default|9199a6b5-f997bcd5-bd617cdd-dd375ddc_0 generated for partition 1 topic 'rt3.dc1--asdfgs--topic' owner default 2025-05-29T15:25:50.143661Z node 55 :PERSQUEUE INFO: ownerinfo.cpp:30: new Cookie default|fae9838a-148d4a18-eb4860fe-4a852db_1 generated for partition 1 topic 'rt3.dc1--asdfgs--topic' owner default 2025-05-29T15:25:50.195297Z node 55 :PERSQUEUE NOTICE: read.h:361: Have to remove new data from cache. Topic rt3.dc1--asdfgs--topic, tablet id72057594037927937, cookie 0 2025-05-29T15:25:50.203135Z node 55 :PERSQUEUE INFO: ownerinfo.cpp:30: new Cookie default|f11d856b-bd34b6e1-f7bfaa68-c3f3dc8b_0 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default 2025-05-29T15:25:50.300215Z node 55 :PERSQUEUE NOTICE: read.h:361: Have to remove new data from cache. Topic rt3.dc1--asdfgs--topic, tablet id72057594037927937, cookie 0 Leader for TabletID 72057594037927937 is [55:108:2139] sender: [55:288:2057] recipient: [55:100:2134] Leader for TabletID 72057594037927937 is [55:108:2139] sender: [55:291:2057] recipient: [55:14:2061] Leader for TabletID 72057594037927937 is [55:108:2139] sender: [55:292:2057] recipient: [55:290:2285] Leader for TabletID 72057594037927937 is [55:293:2286] sender: [55:294:2057] recipient: [55:290:2285] 2025-05-29T15:25:50.334693Z node 55 :PERSQUEUE NOTICE: pq_impl.cpp:1099: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-05-29T15:25:50.334724Z node 55 :PERSQUEUE INFO: pq_impl.cpp:800: [PQ: 72057594037927937] doesn't have tx writes info 2025-05-29T15:25:50.334896Z node 55 :PERSQUEUE INFO: partition_init.cpp:880: [PQ: 72057594037927937, Partition: 0, State: StateInit] bootstrapping 0 [55:342:2327] 2025-05-29T15:25:50.335520Z node 55 :PERSQUEUE INFO: partition_init.cpp:880: [PQ: 72057594037927937, Partition: 1, State: StateInit] bootstrapping 1 [55:343:2328] 2025-05-29T15:25:50.344392Z node 55 :PERSQUEUE INFO: partition_init.cpp:774: [rt3.dc1--asdfgs--topic:0:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp skipped because already initialized. 2025-05-29T15:25:50.344426Z node 55 :PERSQUEUE INFO: partition.cpp:560: [PQ: 72057594037927937, Partition: 0, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 0 generation 3 [55:342:2327] 2025-05-29T15:25:50.345229Z node 55 :PERSQUEUE INFO: partition_init.cpp:774: [rt3.dc1--asdfgs--topic:1:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp skipped because already initialized. 2025-05-29T15:25:50.345246Z node 55 :PERSQUEUE INFO: partition.cpp:560: [PQ: 72057594037927937, Partition: 1, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 1 generation 3 [55:343:2328] Leader for TabletID 72057594037927937 is [55:293:2286] sender: [55:373:2057] recipient: [55:14:2061] >> TConsoleTests::TestRemoveAttributesExtSubdomain [GOOD] >> DataShardVolatile::DistributedWriteThenScanQuery [FAIL] >> DataShardVolatile::DistributedWriteWithAsyncIndex >> TSchemeShardTestExtSubdomainReboots::SchemeLimits-AlterDatabaseCreateHiveFirst-false >> TBsProxyFaultToleranceTest::CheckGetHardenedErasureMirror3dcCount6Idx0 [GOOD] >> DataShardVolatile::DistributedWriteShardRestartAfterExpectation-UseSink [FAIL] >> DataShardVolatile::DistributedWriteEarlierSnapshotNotBlocked ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/cms/console/ut/unittest >> TConsoleTests::TestRemoveAttributesExtSubdomain [GOOD] Test command err: 2025-05-29T15:24:20.559574Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7488: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-05-29T15:24:20.559605Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7516: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:24:20.559612Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7402: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-05-29T15:24:20.559617Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7418: OperationsProcessing config: using default configuration 2025-05-29T15:24:20.559633Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-05-29T15:24:20.559638Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-05-29T15:24:20.559657Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7548: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:24:20.559685Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:39: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-05-29T15:24:20.559819Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7619: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-05-29T15:24:20.559914Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-05-29T15:24:20.564898Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7309: Cannot subscribe to console configs 2025-05-29T15:24:20.564928Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:24:20.575588Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-05-29T15:24:20.575754Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-05-29T15:24:20.575783Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046578944 2025-05-29T15:24:20.581615Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-05-29T15:24:20.581745Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:445: Clear TempDirsState with owners number: 0 2025-05-29T15:24:20.581870Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1348: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046578944 2025-05-29T15:24:20.581982Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:32: TTxInitRoot DoExecute, path: dc-1, pathId: [OwnerId: 72057594046578944, LocalPathId: 1], at schemeshard: 72057594046578944 2025-05-29T15:24:20.582811Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:157: TTxInitRoot DoComplete, at schemeshard: 72057594046578944 2025-05-29T15:24:20.582850Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:84: [RootDataErasureManager] Stop 2025-05-29T15:24:20.583154Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046578944 2025-05-29T15:24:20.583167Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046578944 2025-05-29T15:24:20.583187Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-05-29T15:24:20.583198Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046578944, domainId: [OwnerId: 72057594046578944, LocalPathId: 1] 2025-05-29T15:24:20.583206Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:202: TTxServerlessStorageBilling.Complete 2025-05-29T15:24:20.583261Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6671: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046578944 2025-05-29T15:24:20.644200Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:372: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "dc-1" StoragePools { Name: "" Kind: "hdd" } StoragePools { Name: "" Kind: "hdd-3" } StoragePools { Name: "" Kind: "hdd-1" } StoragePools { Name: "" Kind: "hdd-2" } } } TxId: 1 TabletId: 72057594046578944 , at schemeshard: 72057594046578944 2025-05-29T15:24:20.644313Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //dc-1, opId: 1:0, at schemeshard: 72057594046578944 2025-05-29T15:24:20.644392Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046578944, LocalPathId: 1] was 0 2025-05-29T15:24:20.644477Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:130: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046578944 2025-05-29T15:24:20.644490Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944 2025-05-29T15:24:20.647367Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:451: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046578944 PathId: 1, at schemeshard: 72057594046578944 2025-05-29T15:24:20.647432Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //dc-1 2025-05-29T15:24:20.647509Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046578944 2025-05-29T15:24:20.647523Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:313: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046578944 2025-05-29T15:24:20.647531Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:367: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-05-29T15:24:20.647539Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 2 -> 3 2025-05-29T15:24:20.648296Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046578944 2025-05-29T15:24:20.648313Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046578944 2025-05-29T15:24:20.648324Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 3 -> 128 2025-05-29T15:24:20.648746Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046578944 2025-05-29T15:24:20.648760Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046578944 2025-05-29T15:24:20.648779Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046578944 2025-05-29T15:24:20.648790Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1650: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-05-29T15:24:20.649609Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1719: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046578944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-05-29T15:24:20.650348Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:652: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046578944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-05-29T15:24:20.650403Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1751: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 2025-05-29T15:24:20.650683Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__notify.cpp:30: NotifyTxCompletion operation in-flight, txId: 1, at schemeshard: 72057594046578944 2025-05-29T15:24:20.650696Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1606: TOperation IsReadyToNotify, TxId: 1, ready parts: 0/1, is published: true 2025-05-29T15:24:20.650702Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__notify.cpp:131: NotifyTxCompletion transaction is registered, txId: 1, at schemeshard: 72057594046578944 2025-05-29T15:24:21.290909Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:676: TTxOperationPlanStep Execute, stepId: 500, transactions count in step: 1, at schemeshard: 72057594046578944 2025-05-29T15:24:21.290984Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:680: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 AckTo { RawX1: 0 RawX2: 0 } } Step: 500 MediatorID: 72057594046382081 TabletID: 72057594046578944, at schemeshard: 72057594046578944 2025-05-29T15:24:21.290999Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046578944 2025-05-29T15:24:21.291097Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 128 -> 240 2025-05-29T15:24:21.291111Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046578944 2025-05-29T15:24:21.291155Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046578944, LocalPathId: 1] was 1 2025-05-29T15:24:21.291182Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:402: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046578944, LocalPathId: 1], at schemeshard: 72057594046578944 2025-05-29T15:24:21.292687Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046578944 2025-05-29T15:24:21.292712Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046578944, txId: 1, path id: [OwnerId: 72057594046578944, LocalPathId: 1] 2025-05-29T15:24:21.292763Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046578944 2025-05-29T15:24:21.292767Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:841:2259], at schemeshard: 72057594046578944, txId: 1, path id: 1 202 ... 046578944, LocalPathId: 3] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 } 2025-05-29T15:25:49.855471Z node 163 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2540: ResolveCacheItemForNotify: this is update from GSS, the update us ignored, TSS is prefered: self# [163:984:2302], path# /dc-1/users/tenant-1, pathId# [OwnerId: 72057594046578944, LocalPathId: 3] 2025-05-29T15:25:49.855885Z node 163 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5809: Handle TEvSyncTenantSchemeShard, at schemeshard: 72057594046578944, msg: DomainSchemeShard: 72057594046578944 DomainPathId: 3 TabletID: 72075186233409546 Generation: 2 EffectiveACLVersion: 0 SubdomainVersion: 3 UserAttributesVersion: 3 TenantHive: 18446744073709551615 TenantSysViewProcessor: 72075186233409553 TenantRootACL: "" TenantStatisticsAggregator: 72075186233409554 TenantGraphShard: 18446744073709551615 2025-05-29T15:25:49.855906Z node 163 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__sync_update_tenants.cpp:26: TTxSyncTenant DoExecute, pathId: [OwnerId: 72057594046578944, LocalPathId: 3], at schemeshard: 72057594046578944 2025-05-29T15:25:49.855935Z node 163 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:567: DoUpdateTenant no hasChanges, pathId: [OwnerId: 72057594046578944, LocalPathId: 3], tenantLink: TSubDomainsLinks::TLink { DomainKey: [OwnerId: 72057594046578944, LocalPathId: 3], Generation: 2, ActorId:[163:1584:2639], EffectiveACLVersion: 0, SubdomainVersion: 3, UserAttributesVersion: 3, TenantHive: 18446744073709551615, TenantSysViewProcessor: 72075186233409553, TenantStatisticsAggregator: 72075186233409554, TenantGraphShard: 18446744073709551615, TenantRootACL: }, subDomain->GetVersion(): 3, actualEffectiveACLVersion: 0, actualUserAttrsVersion: 3, tenantHive: 18446744073709551615, tenantSysViewProcessor: 72075186233409553, at schemeshard: 72057594046578944 2025-05-29T15:25:49.855973Z node 163 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72075186233409546 2025-05-29T15:25:49.855979Z node 163 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72075186233409546, txId: 0, path id: [OwnerId: 72075186233409546, LocalPathId: 1] 2025-05-29T15:25:49.856020Z node 163 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72075186233409546 2025-05-29T15:25:49.856025Z node 163 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [163:1885:2858], at schemeshard: 72075186233409546, txId: 0, path id: 1 2025-05-29T15:25:49.856516Z node 163 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:5834: Handle TEvUpdateAck, at schemeshard: 72075186233409546, msg: Owner: 72075186233409546 Generation: 2 LocalPathId: 1 Version: 7 PathOwnerId: 72075186233409546, cookie: 0 2025-05-29T15:25:49.856684Z node 163 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046578944, cookie: 281474976715661 2025-05-29T15:25:49.856700Z node 163 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__sync_update_tenants.cpp:36: TTxSyncTenant DoComplete, pathId: [OwnerId: 72057594046578944, LocalPathId: 3], at schemeshard: 72057594046578944 2025-05-29T15:25:49.856779Z node 163 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2589: HandleNotify: self# [163:984:2302], notify# NKikimr::TSchemeBoardEvents::TEvNotifyUpdate { Path: /dc-1/users/tenant-1 PathId: [OwnerId: 72075186233409546, LocalPathId: 1] DescribeSchemeResult: Status: StatusSuccess Path: "/dc-1/users/tenant-1" PathDescription { Self { Name: "dc-1/users/tenant-1" PathId: 1 SchemeshardId: 72075186233409546 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 7 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 7 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 3 ChildrenVersion: 1 SubDomainVersion: 3 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: 72057594046578944 PathId_Depricated: 3 ProcessingParams { Version: 3 PlanResolution: 10 Coordinators: 72075186233409547 Coordinators: 72075186233409548 Coordinators: 72075186233409549 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409550 Mediators: 72075186233409551 Mediators: 72075186233409552 SchemeShard: 72075186233409546 SysViewProcessor: 72075186233409553 StatisticsAggregator: 72075186233409554 } DomainKey { SchemeShard: 72057594046578944 PathId: 3 } StoragePools { Name: "/dc-1/users/tenant-1:hdd" Kind: "hdd" } StoragePools { Name: "/dc-1/users/tenant-1:hdd-1" Kind: "hdd-1" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 9 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046578944 PathId: 3 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 DatabaseQuotas { } SecurityState { Audience: "/dc-1/users/tenant-1" } } UserAttributes { Key: "name1" Value: "value1" } } PathId: 1 PathOwnerId: 72075186233409546 } 2025-05-29T15:25:49.856830Z node 163 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2464: ResolveCacheItem: self# [163:984:2302], notify# NKikimr::TSchemeBoardEvents::TEvNotifyUpdate { Path: /dc-1/users/tenant-1 PathId: [OwnerId: 72075186233409546, LocalPathId: 1] DescribeSchemeResult: Status: StatusSuccess Path: "/dc-1/users/tenant-1" PathDescription { Self { Name: "dc-1/users/tenant-1" PathId: 1 SchemeshardId: 72075186233409546 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 7 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 7 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 3 ChildrenVersion: 1 SubDomainVersion: 3 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: 72057594046578944 PathId_Depricated: 3 ProcessingParams { Version: 3 PlanResolution: 10 Coordinators: 72075186233409547 Coordinators: 72075186233409548 Coordinators: 72075186233409549 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409550 Mediators: 72075186233409551 Mediators: 72075186233409552 SchemeShard: 72075186233409546 SysViewProcessor: 72075186233409553 StatisticsAggregator: 72075186233409554 } DomainKey { SchemeShard: 72057594046578944 PathId: 3 } StoragePools { Name: "/dc-1/users/tenant-1:hdd" Kind: "hdd" } StoragePools { Name: "/dc-1/users/tenant-1:hdd-1" Kind: "hdd-1" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 9 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046578944 PathId: 3 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 DatabaseQuotas { } SecurityState { Audience: "/dc-1/users/tenant-1" } } UserAttributes { Key: "name1" Value: "value1" } } PathId: 1 PathOwnerId: 72075186233409546 }, by path# { Subscriber: { Subscriber: [163:1448:2556] DomainOwnerId: 72057594046578944 Type: 2 SyncCookie: 4 } Filled: 1 Status: StatusSuccess Kind: 8 TableKind: 0 Created: 1 CreateStep: 0 PathId: [OwnerId: 72075186233409546, LocalPathId: 1] DomainId: [OwnerId: 72057594046578944, LocalPathId: 3] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, by pathId# { Subscriber: { Subscriber: [163:1448:2556] DomainOwnerId: 72057594046578944 Type: 2 SyncCookie: 4 } Filled: 1 Status: StatusSuccess Kind: 8 TableKind: 0 Created: 1 CreateStep: 0 PathId: [OwnerId: 72075186233409546, LocalPathId: 1] DomainId: [OwnerId: 72057594046578944, LocalPathId: 3] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 } Reply: Status: StatusSuccess Path: "/dc-1/users/tenant-1" PathDescription { Self { Name: "tenant-1" PathId: 3 SchemeshardId: 72057594046578944 PathType: EPathTypeExtSubDomain CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1000 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 7 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 7 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 3 ChildrenVersion: 1 SubDomainVersion: 3 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: 72057594046578944 PathId_Depricated: 1 ProcessingParams { Version: 3 PlanResolution: 10 Coordinators: 72075186233409547 Coordinators: 72075186233409548 Coordinators: 72075186233409549 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409550 Mediators: 72075186233409551 Mediators: 72075186233409552 SchemeShard: 72075186233409546 SysViewProcessor: 72075186233409553 StatisticsAggregator: 72075186233409554 } DomainKey { SchemeShard: 72057594046578944 PathId: 3 } StoragePools { Name: "/dc-1/users/tenant-1:hdd" Kind: "hdd" } StoragePools { Name: "/dc-1/users/tenant-1:hdd-1" Kind: "hdd-1" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 9 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046578944 PathId: 3 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 DatabaseQuotas { } SecurityState { } } UserAttributes { Key: "name1" Value: "value1" } } PathId: 3 PathOwnerId: 72057594046578944 Reply: Status: StatusSuccess Path: "/dc-1/users/tenant-1" PathDescription { Self { Name: "tenant-1" PathId: 3 SchemeshardId: 72057594046578944 PathType: EPathTypeExtSubDomain CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1000 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 7 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 7 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 3 ChildrenVersion: 1 SubDomainVersion: 3 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: 72057594046578944 PathId_Depricated: 1 ProcessingParams { Version: 3 PlanResolution: 10 Coordinators: 72075186233409547 Coordinators: 72075186233409548 Coordinators: 72075186233409549 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409550 Mediators: 72075186233409551 Mediators: 72075186233409552 SchemeShard: 72075186233409546 SysViewProcessor: 72075186233409553 StatisticsAggregator: 72075186233409554 } DomainKey { SchemeShard: 72057594046578944 PathId: 3 } StoragePools { Name: "/dc-1/users/tenant-1:hdd" Kind: "hdd" } StoragePools { Name: "/dc-1/users/tenant-1:hdd-1" Kind: "hdd-1" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 9 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046578944 PathId: 3 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 DatabaseQuotas { } SecurityState { } } UserAttributes { Key: "name1" Value: "value1" } } PathId: 3 PathOwnerId: 72057594046578944 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/dsproxy/ut_ftol/unittest >> TBsProxyFaultToleranceTest::CheckGetHardenedErasureMirror3dcCount6Idx0 [GOOD] Test command err: iteration# 0 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 6 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 12 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 18 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 24 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 30 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 36 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 42 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 48 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 54 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 60 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 66 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 72 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 78 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 84 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 90 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 96 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 102 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 108 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 114 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 120 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 126 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 132 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 138 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 144 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 150 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 156 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 162 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 168 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 174 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 180 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 186 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 192 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 198 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 204 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 210 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 216 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 222 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 228 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 234 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 240 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 246 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 252 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 258 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 264 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 270 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 276 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 282 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 288 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 294 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 300 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 306 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 312 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 318 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 324 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 330 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 336 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 342 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 348 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 354 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 360 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 366 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 372 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 378 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 384 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 390 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 396 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 402 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 408 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 414 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 420 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 426 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 432 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 438 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 444 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 450 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 456 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 462 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 468 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 474 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 480 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 486 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 >> TBsProxyFaultToleranceTest::CheckGetHardenedErasureMirror3dcCount6Idx1 [GOOD] >> DataShardVolatile::DistributedWriteWithAsyncIndex [FAIL] >> DataShardVolatile::DistributedWriteThenLateWriteReadCommit ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/dsproxy/ut_ftol/unittest >> TBsProxyFaultToleranceTest::CheckGetHardenedErasureMirror3dcCount6Idx1 [GOOD] Test command err: iteration# 1 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 7 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 13 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 19 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 25 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 31 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 37 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 43 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 49 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 55 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 61 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 67 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 73 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 79 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 85 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 91 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 97 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 103 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 109 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 115 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 121 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 127 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 133 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 139 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 145 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 151 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 157 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 163 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 169 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 175 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 181 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 187 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 193 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 199 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 205 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 211 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 217 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 223 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 229 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 235 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 241 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 247 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 253 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 259 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 265 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 271 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 277 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 283 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 289 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 295 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 301 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 307 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 313 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 319 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 325 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 331 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 337 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 343 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 349 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 355 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 361 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 367 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 373 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 379 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 385 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 391 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 397 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 403 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 409 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 415 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 421 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 427 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 433 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 439 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 445 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 451 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 457 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 463 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 469 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 475 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 481 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 487 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 >> TColumnShardTestSchema::TTL-Reboot-Internal+FirstPkColumn+WritePortionsOnInsert [GOOD] |67.3%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/statistics/aggregator/ut/ydb-core-statistics-aggregator-ut |67.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/statistics/aggregator/ut/ydb-core-statistics-aggregator-ut |67.3%| [LD] {RESULT} $(B)/ydb/core/statistics/aggregator/ut/ydb-core-statistics-aggregator-ut |67.3%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/kqp/ut/opt/ydb-core-kqp-ut-opt |67.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/ut/opt/ydb-core-kqp-ut-opt |67.3%| [LD] {RESULT} $(B)/ydb/core/kqp/ut/opt/ydb-core-kqp-ut-opt >> KqpSplit::BorderKeys+Descending >> DataShardVolatile::DistributedWriteEarlierSnapshotNotBlocked [FAIL] >> DataShardVolatile::DistributedWriteLaterSnapshotBlockedThenCommit+UseSink ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/columnshard/ut_schema/unittest >> TColumnShardTestSchema::TTL-Reboot-Internal+FirstPkColumn+WritePortionsOnInsert [GOOD] Test command err: Running TestTtl ttlColumnType=Timestamp 2025-05-29T15:25:27.880616Z node 1 :TX_COLUMNSHARD TRACE: columnshard_impl.h:389: StateInit, received event# 268828672, Sender [1:103:2136], Recipient [1:139:2170]: NKikimr::TEvTablet::TEvBoot 2025-05-29T15:25:27.883040Z node 1 :TX_COLUMNSHARD TRACE: columnshard_impl.h:389: StateInit, received event# 268828673, Sender [1:103:2136], Recipient [1:139:2170]: NKikimr::TEvTablet::TEvRestored 2025-05-29T15:25:27.883148Z node 1 :TX_COLUMNSHARD INFO: log.cpp:784: tablet_id=9437184;self_id=[1:139:2170];fline=columnshard.cpp:102;event=initialize_shard;step=OnActivateExecutor; 2025-05-29T15:25:27.887114Z node 1 :TX_COLUMNSHARD INFO: log.cpp:784: tablet_id=9437184;self_id=[1:139:2170];fline=columnshard.cpp:120;event=initialize_shard;step=initialize_tiring_finished; 2025-05-29T15:25:27.887178Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Execute at tablet 9437184 2025-05-29T15:25:27.887897Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=9437184;self_id=[1:139:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-05-29T15:25:27.887943Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=9437184;self_id=[1:139:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-05-29T15:25:27.887982Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=9437184;self_id=[1:139:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-05-29T15:25:27.888003Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=9437184;self_id=[1:139:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-05-29T15:25:27.888021Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=9437184;self_id=[1:139:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-05-29T15:25:27.888039Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=9437184;self_id=[1:139:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-05-29T15:25:27.888056Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=9437184;self_id=[1:139:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-05-29T15:25:27.888121Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=9437184;self_id=[1:139:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-05-29T15:25:27.888144Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=9437184;self_id=[1:139:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-05-29T15:25:27.888161Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=9437184;self_id=[1:139:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-05-29T15:25:27.888180Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=9437184;self_id=[1:139:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2025-05-29T15:25:27.888198Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=9437184;self_id=[1:139:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2025-05-29T15:25:27.906004Z node 1 :TX_COLUMNSHARD TRACE: columnshard_impl.h:389: StateInit, received event# 268828684, Sender [1:103:2136], Recipient [1:139:2170]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-05-29T15:25:27.919016Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Complete at tablet 9437184 2025-05-29T15:25:27.919093Z node 1 :TX_COLUMNSHARD INFO: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:137;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2025-05-29T15:25:27.919104Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-05-29T15:25:27.919137Z node 1 :TX_COLUMNSHARD INFO: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-05-29T15:25:27.919177Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-05-29T15:25:27.919190Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-05-29T15:25:27.919196Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-05-29T15:25:27.919206Z node 1 :TX_COLUMNSHARD INFO: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2025-05-29T15:25:27.919216Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-05-29T15:25:27.919224Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-05-29T15:25:27.919228Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-05-29T15:25:27.919247Z node 1 :TX_COLUMNSHARD INFO: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-05-29T15:25:27.919255Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-05-29T15:25:27.919262Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-05-29T15:25:27.919267Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-05-29T15:25:27.919277Z node 1 :TX_COLUMNSHARD INFO: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-05-29T15:25:27.919284Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-05-29T15:25:27.919292Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-05-29T15:25:27.919297Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=8;type=CleanInsertionDedup; 2025-05-29T15:25:27.919309Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-05-29T15:25:27.919319Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-05-29T15:25:27.919324Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-05-29T15:25:27.919333Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-05-29T15:25:27.919340Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-05-29T15:25:27.919344Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-05-29T15:25:27.919370Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-05-29T15:25:27.919378Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-05-29T15:25:27.919383Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2025-05-29T15:25:27.919404Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-05-29T15:25:27.919411Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-05-29T15:25:27.919416Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2025-05-29T15:25:27.919431Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-05-29T15:25:27.919438Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanDeprecatedSnapshot;id=CleanDeprecatedSnapshot; 2025-05-29T15:25:27.919442Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=17;type=CleanDeprecatedSnapshot; 2025-05-29T15:25:27.919451Z node 1 :TX_COLUMNSHARD CRIT: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_deprecated_snapshot.cpp:30;tasks_for_rewrite=0; 2025-05-29T15:25:27.919459Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanDeprecatedSnapshot;id=17; 2025-05-29T15:25:27.919467Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV0ChunksMeta;id=RestoreV0ChunksMeta; 2025-05-29T15:25:27.919471Z node ... OLUMNSHARD_SCAN DEBUG: log.cpp:784: SelfId=[5:477:2481];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=scanner.cpp:21;event=interval_result_received;interval_idx=0;intervalId=90; 2025-05-29T15:25:54.938706Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:784: SelfId=[5:477:2481];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=scanner.cpp:47;event=interval_result;interval_idx=0;count=1000;merger=0;interval_id=90; 2025-05-29T15:25:54.938713Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:784: SelfId=[5:477:2481];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=scanner.cpp:65;event=intervals_finished; 2025-05-29T15:25:54.938724Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:784: SelfId=[5:477:2481];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:188;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-05-29T15:25:54.938729Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:784: SelfId=[5:477:2481];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=plain_read_data.cpp:73;event=DoExtractReadyResults;result=1;count=1000;finished=1; 2025-05-29T15:25:54.938735Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:784: SelfId=[5:477:2481];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:199;stage=limit exhausted;limit=limits:(bytes=0;chunks=0);; 2025-05-29T15:25:54.938848Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:784: SelfId=[5:477:2481];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:105;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2025-05-29T15:25:54.938872Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:784: SelfId=[5:477:2481];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:188;stage=start;iterator=ready_results:(count:1;records_count:1000;schema=timestamp: uint64;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-05-29T15:25:54.938879Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:784: SelfId=[5:477:2481];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=plain_read_data.cpp:73;event=DoExtractReadyResults;result=0;count=0;finished=1; 2025-05-29T15:25:54.938892Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:784: SelfId=[5:477:2481];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:230;stage=ready result;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;);columns=1;rows=1000; 2025-05-29T15:25:54.938904Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:784: SelfId=[5:477:2481];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:250;stage=data_format;batch_size=8000;num_rows=1000;batch_columns=timestamp; 2025-05-29T15:25:54.938958Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:784: SelfId=[5:477:2481];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:366;event=send_data;compute_actor_id=[5:476:2480];bytes=8000;rows=1000;faults=0;finished=0;fault=0;schema=timestamp: uint64; 2025-05-29T15:25:54.938974Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:784: SelfId=[5:477:2481];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:270;stage=finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-05-29T15:25:54.938987Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:784: SelfId=[5:477:2481];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:188;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-05-29T15:25:54.938997Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:784: SelfId=[5:477:2481];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:193;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-05-29T15:25:54.939029Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:784: SelfId=[5:477:2481];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:105;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2025-05-29T15:25:54.939039Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:784: SelfId=[5:477:2481];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:188;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-05-29T15:25:54.939049Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:784: SelfId=[5:477:2481];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:193;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-05-29T15:25:54.939056Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: actor.cpp:410: Scan [5:477:2481] finished for tablet 9437184 2025-05-29T15:25:54.939141Z node 5 :TX_COLUMNSHARD_SCAN INFO: log.cpp:784: SelfId=[5:477:2481];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:416;event=scan_finish;compute_actor_id=[5:476:2480];stats={"p":[{"events":["f_bootstrap","l_bootstrap","f_processing","f_ProduceResults","f_task_result"],"t":0},{"events":["f_ack","l_ack","l_processing","l_ProduceResults","f_Finish","l_Finish","l_task_result"],"t":0.01}],"full":{"a":1748532354928710,"name":"_full_task","f":1748532354928710,"d_finished":0,"c":0,"l":1748532354939069,"d":10359},"events":[{"name":"bootstrap","f":1748532354928768,"d_finished":499,"c":1,"l":1748532354929267,"d":499},{"a":1748532354939026,"name":"ack","f":1748532354938840,"d_finished":160,"c":1,"l":1748532354939000,"d":203},{"a":1748532354939024,"name":"processing","f":1748532354929482,"d_finished":8839,"c":8,"l":1748532354939000,"d":8884},{"name":"ProduceResults","f":1748532354929075,"d_finished":401,"c":11,"l":1748532354939052,"d":401},{"a":1748532354939053,"name":"Finish","f":1748532354939053,"d_finished":0,"c":0,"l":1748532354939069,"d":16},{"name":"task_result","f":1748532354929486,"d_finished":8654,"c":7,"l":1748532354938773,"d":8654}],"id":"9437184::30"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-05-29T15:25:54.939156Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:784: SelfId=[5:477:2481];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:366;event=send_data;compute_actor_id=[5:476:2480];bytes=0;rows=0;faults=0;finished=1;fault=0;schema=; 2025-05-29T15:25:54.939198Z node 5 :TX_COLUMNSHARD_SCAN INFO: log.cpp:784: SelfId=[5:477:2481];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:371;event=scan_finished;compute_actor_id=[5:476:2480];packs_sum=0;bytes=0;bytes_sum=0;rows=0;rows_sum=0;faults=0;finished=1;fault=0;stats={"p":[{"events":["f_bootstrap","l_bootstrap","f_processing","f_ProduceResults","f_task_result"],"t":0},{"events":["f_ack","l_ack","l_processing","l_ProduceResults","f_Finish","l_Finish","l_task_result"],"t":0.01}],"full":{"a":1748532354928710,"name":"_full_task","f":1748532354928710,"d_finished":0,"c":0,"l":1748532354939163,"d":10453},"events":[{"name":"bootstrap","f":1748532354928768,"d_finished":499,"c":1,"l":1748532354929267,"d":499},{"a":1748532354939026,"name":"ack","f":1748532354938840,"d_finished":160,"c":1,"l":1748532354939000,"d":297},{"a":1748532354939024,"name":"processing","f":1748532354929482,"d_finished":8839,"c":8,"l":1748532354939000,"d":8978},{"name":"ProduceResults","f":1748532354929075,"d_finished":401,"c":11,"l":1748532354939052,"d":401},{"a":1748532354939053,"name":"Finish","f":1748532354939053,"d_finished":0,"c":0,"l":1748532354939163,"d":110},{"name":"task_result","f":1748532354929486,"d_finished":8654,"c":7,"l":1748532354938773,"d":8654}],"id":"9437184::30"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-05-29T15:25:54.939215Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:784: SelfId=[5:477:2481];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=stats.cpp:8;event=statistic;begin=2025-05-29T15:25:54.928376Z;index_granules=0;index_portions=1;index_batches=10;committed_batches=0;schema_columns=1;filter_columns=0;additional_columns=0;compacted_portions_bytes=0;inserted_portions_bytes=59184;committed_portions_bytes=0;data_filter_bytes=0;data_additional_bytes=0;delta_bytes=59184;selected_rows=0; 2025-05-29T15:25:54.939227Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:784: SelfId=[5:477:2481];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=read_context.h:190;event=scan_aborted;reason=unexpected on destructor; 2025-05-29T15:25:54.939310Z node 5 :TX_COLUMNSHARD_SCAN INFO: log.cpp:784: SelfId=[5:477:2481];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=context.h:81;fetching=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;; >> DataShardVolatile::DistributedWriteThenLateWriteReadCommit [FAIL] >> DataShardVolatile::TwoAppendsMustBeVolatile+UseSink >> TOlapReboots::CreateMultipleStandaloneTables [GOOD] >> THiveTest::TestCreateTablet >> DataShardVolatile::DistributedWriteLaterSnapshotBlockedThenCommit+UseSink [FAIL] >> DataShardVolatile::DistributedWriteLaterSnapshotBlockedThenCommit-UseSink >> TargetTrackingScaleRecommenderPolicy::ScaleOut [GOOD] >> TargetTrackingScaleRecommenderPolicy::ScaleIn [GOOD] >> TargetTrackingScaleRecommenderPolicy::BigNumbersScaleOut [GOOD] >> TargetTrackingScaleRecommenderPolicy::BigNumbersScaleIn [GOOD] >> TargetTrackingScaleRecommenderPolicy::SpikeResistance [GOOD] >> TargetTrackingScaleRecommenderPolicy::NearTarget [GOOD] >> TargetTrackingScaleRecommenderPolicy::AtTarget [GOOD] >> TargetTrackingScaleRecommenderPolicy::Fluctuations [GOOD] >> TargetTrackingScaleRecommenderPolicy::FluctuationsBigNumbers [GOOD] >> TargetTrackingScaleRecommenderPolicy::ScaleInToMaxSeen [GOOD] >> TargetTrackingScaleRecommenderPolicy::Idle [GOOD] >> TStorageBalanceTest::TestScenario1 >> THiveTest::TestCreateTablet [GOOD] >> THiveTest::TestCreate100Tablets ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_olap_reboots/unittest >> TOlapReboots::CreateMultipleStandaloneTables [GOOD] Test command err: ==== RunWithTabletReboots =========== RUN: Trace =========== Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2140] Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:116:2058] recipient: [1:110:2141] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:116:2058] recipient: [1:110:2141] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:117:2058] recipient: [1:111:2142] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:117:2058] recipient: [1:111:2142] Leader for TabletID 72057594046678944 is [1:126:2151] sender: [1:127:2058] recipient: [1:109:2140] Leader for TabletID 72057594046447617 is [1:130:2154] sender: [1:132:2058] recipient: [1:110:2141] Leader for TabletID 72057594046316545 is [1:133:2155] sender: [1:135:2058] recipient: [1:111:2142] 2025-05-29T15:24:39.073014Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7488: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-05-29T15:24:39.073037Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7516: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:24:39.073042Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7402: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2025-05-29T15:24:39.073047Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7418: OperationsProcessing config: using default configuration 2025-05-29T15:24:39.073053Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-05-29T15:24:39.073057Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-05-29T15:24:39.073066Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7548: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:24:39.073080Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:39: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-05-29T15:24:39.073178Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7619: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-05-29T15:24:39.073253Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-05-29T15:24:39.085929Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7651: Got new config: QueryServiceConfig { AllExternalDataSourcesAreAvailable: true } 2025-05-29T15:24:39.085949Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:24:39.086020Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7619: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# Leader for TabletID 72057594046447617 is [1:130:2154] sender: [1:175:2058] recipient: [1:15:2062] 2025-05-29T15:24:39.088989Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-05-29T15:24:39.089020Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-05-29T15:24:39.089054Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-05-29T15:24:39.091614Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-05-29T15:24:39.091683Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:445: Clear TempDirsState with owners number: 0 2025-05-29T15:24:39.091787Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1348: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-05-29T15:24:39.091943Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:32: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-05-29T15:24:39.092494Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:157: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:24:39.092531Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:84: [RootDataErasureManager] Stop 2025-05-29T15:24:39.092765Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:24:39.092775Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:24:39.092806Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-05-29T15:24:39.092814Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-29T15:24:39.092821Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:202: TTxServerlessStorageBilling.Complete 2025-05-29T15:24:39.092838Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6671: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:214:2058] recipient: [1:212:2212] IGNORE Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:214:2058] recipient: [1:212:2212] Leader for TabletID 72057594037968897 is [1:218:2216] sender: [1:219:2058] recipient: [1:212:2212] 2025-05-29T15:24:39.094030Z node 1 :HIVE INFO: tablet_helpers.cpp:1130: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:126:2151] sender: [1:239:2058] recipient: [1:15:2062] 2025-05-29T15:24:39.115680Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:372: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-05-29T15:24:39.115753Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:24:39.115812Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-05-29T15:24:39.115856Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:130: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-05-29T15:24:39.115866Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:24:39.116571Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:451: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-05-29T15:24:39.116601Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-05-29T15:24:39.116651Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:24:39.116668Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:313: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-05-29T15:24:39.116674Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:367: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-05-29T15:24:39.116679Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 2 -> 3 2025-05-29T15:24:39.117066Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:24:39.117077Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-05-29T15:24:39.117082Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 3 -> 128 2025-05-29T15:24:39.117390Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:24:39.117400Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:24:39.117407Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:24:39.117414Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1650: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-05-29T15:24:39.118118Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1719: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-05-29T15:24:39.118506Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:652: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-05-29T15:24:39.118545Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1751: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 Leader for TabletID 72057594046316545 is [1:133:2155] sender: [1:254:2058] recipient: [1:15:2062] FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-05-29T15:24:39.118766Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:676: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-05-29T15:24:39.118791Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:680: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 133 RawX2: 4294969451 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-05-29T15:24:39.118807Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:24:39.118870Z node 1 :FLAT_TX_SCHEMESHARD INFO: sch ... o_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72057594046678944, txId: 1002 2025-05-29T15:25:57.391082Z node 120 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 1002, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 9 2025-05-29T15:25:57.391088Z node 120 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 4 2025-05-29T15:25:57.391227Z node 120 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:5834: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 4 PathOwnerId: 72057594046678944, cookie: 1002 2025-05-29T15:25:57.391237Z node 120 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 4 PathOwnerId: 72057594046678944, cookie: 1002 2025-05-29T15:25:57.391240Z node 120 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72057594046678944, txId: 1002 2025-05-29T15:25:57.391244Z node 120 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 1002, pathId: [OwnerId: 72057594046678944, LocalPathId: 4], version: 4 2025-05-29T15:25:57.391248Z node 120 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 4 2025-05-29T15:25:57.391256Z node 120 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1606: TOperation IsReadyToNotify, TxId: 1002, ready parts: 0/1, is published: true 2025-05-29T15:25:57.391314Z node 120 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:652: Send tablet strongly msg operationId: 1002:0 from tablet: 72057594046678944 to tablet: 72075186233409547 cookie: 72057594046678944:2 msg type: 275382275 2025-05-29T15:25:57.391667Z node 120 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1002 2025-05-29T15:25:57.391688Z node 120 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1002 TestWaitNotification: OK eventTxId 1003 2025-05-29T15:25:57.402386Z node 120 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6146: Handle TEvNotifyTxCompletionResult, at schemeshard: 72057594046678944, message: Origin: 72075186233409547 TxId: 1002 2025-05-29T15:25:57.402407Z node 120 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1764: TOperation FindRelatedPartByTabletId, TxId: 1002, tablet: 72075186233409547, partId: 0 2025-05-29T15:25:57.402429Z node 120 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:619: TTxOperationReply execute, operationId: 1002:0, at schemeshard: 72057594046678944, message: Origin: 72075186233409547 TxId: 1002 FAKE_COORDINATOR: Erasing txId 1002 2025-05-29T15:25:57.402856Z node 120 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:647: TTxOperationReply complete, operationId: 1002:0, at schemeshard: 72057594046678944 2025-05-29T15:25:57.402890Z node 120 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1002:0, at schemeshard: 72057594046678944 2025-05-29T15:25:57.402899Z node 120 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:482: [72057594046678944] TDone opId# 1002:0 ProgressState 2025-05-29T15:25:57.402914Z node 120 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:906: Part operation is done id#1002:0 progress is 1/1 2025-05-29T15:25:57.402919Z node 120 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 1002 ready parts: 1/1 2025-05-29T15:25:57.402924Z node 120 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:906: Part operation is done id#1002:0 progress is 1/1 2025-05-29T15:25:57.402927Z node 120 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 1002 ready parts: 1/1 2025-05-29T15:25:57.402932Z node 120 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1606: TOperation IsReadyToNotify, TxId: 1002, ready parts: 1/1, is published: true 2025-05-29T15:25:57.402943Z node 120 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1629: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [120:427:2392] message: TxId: 1002 2025-05-29T15:25:57.402950Z node 120 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 1002 ready parts: 1/1 2025-05-29T15:25:57.402955Z node 120 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:973: Operation and all the parts is done, operation id: 1002:0 2025-05-29T15:25:57.402960Z node 120 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5174: RemoveTx for txid 1002:0 2025-05-29T15:25:57.402989Z node 120 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 3 2025-05-29T15:25:57.403299Z node 120 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:227: tests -- TTxNotificationSubscriber for txId 1002: got EvNotifyTxCompletionResult 2025-05-29T15:25:57.403308Z node 120 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:236: tests -- TTxNotificationSubscriber for txId 1002: satisfy waiter [120:428:2393] TestWaitNotification: OK eventTxId 1002 2025-05-29T15:25:57.403405Z node 120 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/ColumnTable1" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-05-29T15:25:57.403460Z node 120 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/ColumnTable1" took 62us result status StatusSuccess 2025-05-29T15:25:57.403575Z node 120 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/ColumnTable1" PathDescription { Self { Name: "ColumnTable1" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeColumnTable CreateFinished: true CreateTxId: 1003 CreateStep: 5000003 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 4 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 4 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 ColumnTableVersion: 1 ColumnTableSchemaVersion: 1 } ChildrenExist: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 0 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 3 PathsLimit: 10000 ShardsInside: 2 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } ColumnTableDescription { Name: "ColumnTable1" Schema { Columns { Id: 1 Name: "timestamp" Type: "Timestamp" TypeId: 50 NotNull: true StorageId: "" DefaultValue { } ColumnFamilyId: 0 } Columns { Id: 2 Name: "data" Type: "Utf8" TypeId: 4608 NotNull: false StorageId: "" DefaultValue { } ColumnFamilyId: 0 } KeyColumnNames: "timestamp" NextColumnId: 3 Version: 1 Options { SchemeNeedActualization: false } ColumnFamilies { Id: 0 Name: "default" } NextColumnFamilyId: 1 } ColumnShardCount: 1 Sharding { ColumnShards: 72075186233409546 HashSharding { Function: HASH_FUNCTION_CONSISTENCY_64 Columns: "timestamp" } } StorageConfig { DataChannelCount: 64 } } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-05-29T15:25:57.403684Z node 120 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/ColumnTable2" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-05-29T15:25:57.403701Z node 120 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/ColumnTable2" took 18us result status StatusSuccess 2025-05-29T15:25:57.403753Z node 120 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/ColumnTable2" PathDescription { Self { Name: "ColumnTable2" PathId: 4 SchemeshardId: 72057594046678944 PathType: EPathTypeColumnTable CreateFinished: true CreateTxId: 1002 CreateStep: 5000004 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 4 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 4 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 ColumnTableVersion: 1 ColumnTableSchemaVersion: 1 } ChildrenExist: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 0 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 3 PathsLimit: 10000 ShardsInside: 2 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } ColumnTableDescription { Name: "ColumnTable2" Schema { Columns { Id: 1 Name: "timestamp" Type: "Timestamp" TypeId: 50 NotNull: true StorageId: "" DefaultValue { } ColumnFamilyId: 0 } Columns { Id: 2 Name: "data" Type: "Utf8" TypeId: 4608 NotNull: false StorageId: "" DefaultValue { } ColumnFamilyId: 0 } KeyColumnNames: "timestamp" NextColumnId: 3 Version: 1 Options { SchemeNeedActualization: false } ColumnFamilies { Id: 0 Name: "default" } NextColumnFamilyId: 1 } ColumnShardCount: 1 Sharding { ColumnShards: 72075186233409547 HashSharding { Function: HASH_FUNCTION_CONSISTENCY_64 Columns: "timestamp" } } StorageConfig { DataChannelCount: 64 } } } PathId: 4 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 |67.4%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/keyvalue/ut_trace/ydb-core-keyvalue-ut_trace |67.4%| [LD] {RESULT} $(B)/ydb/core/keyvalue/ut_trace/ydb-core-keyvalue-ut_trace |67.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/keyvalue/ut_trace/ydb-core-keyvalue-ut_trace >> TSchemeShardServerLess::StorageBillingLabels [GOOD] >> THiveTest::TestCreate100Tablets [GOOD] >> THiveTest::TestCreateSubHiveCreateTablet >> DataShardVolatile::DistributedWriteLaterSnapshotBlockedThenCommit-UseSink [FAIL] >> DataShardVolatile::DistributedWriteLaterSnapshotBlockedThenAbort >> DataShardVolatile::TwoAppendsMustBeVolatile+UseSink [FAIL] >> DataShardVolatile::TwoAppendsMustBeVolatile-UseSink ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_serverless/unittest >> TSchemeShardServerLess::StorageBillingLabels [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:116:2058] recipient: [1:111:2141] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:116:2058] recipient: [1:111:2141] Leader for TabletID 72057594046678944 is [1:128:2152] sender: [1:132:2058] recipient: [1:111:2141] 2025-05-29T15:24:39.964805Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7488: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-05-29T15:24:39.964824Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7516: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:24:39.964828Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7402: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-05-29T15:24:39.964831Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7418: OperationsProcessing config: using default configuration 2025-05-29T15:24:39.964835Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-05-29T15:24:39.964837Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-05-29T15:24:39.964843Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7548: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:24:39.964851Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:39: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-05-29T15:24:39.964933Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7619: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-05-29T15:24:39.964992Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-05-29T15:24:39.977368Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7309: Cannot subscribe to console configs 2025-05-29T15:24:39.977390Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:24:39.979718Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-05-29T15:24:39.979822Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-05-29T15:24:39.979855Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-05-29T15:24:39.982863Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-05-29T15:24:39.983077Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:445: Clear TempDirsState with owners number: 0 2025-05-29T15:24:39.983939Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1348: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-05-29T15:24:39.984126Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:32: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-05-29T15:24:39.985039Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:157: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:24:39.985092Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:84: [RootDataErasureManager] Stop 2025-05-29T15:24:39.985412Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:24:39.985425Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:24:39.985446Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-05-29T15:24:39.985455Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-29T15:24:39.985463Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:202: TTxServerlessStorageBilling.Complete 2025-05-29T15:24:39.985518Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6671: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-05-29T15:24:39.987188Z node 1 :HIVE INFO: tablet_helpers.cpp:1130: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:128:2152] sender: [1:242:2058] recipient: [1:15:2062] 2025-05-29T15:24:40.011000Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:372: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-05-29T15:24:40.011107Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:24:40.011194Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-05-29T15:24:40.011248Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:130: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-05-29T15:24:40.011260Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:24:40.012274Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:451: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-05-29T15:24:40.012309Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-05-29T15:24:40.012386Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:24:40.012402Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:313: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-05-29T15:24:40.012408Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:367: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-05-29T15:24:40.012415Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 2 -> 3 2025-05-29T15:24:40.012986Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:24:40.013001Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-05-29T15:24:40.013008Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 3 -> 128 2025-05-29T15:24:40.013418Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:24:40.013427Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:24:40.013434Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:24:40.013442Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1650: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-05-29T15:24:40.014229Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1719: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-05-29T15:24:40.014798Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:652: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-05-29T15:24:40.014860Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1751: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-05-29T15:24:40.015095Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:676: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-05-29T15:24:40.015130Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:680: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 124 RawX2: 4294969445 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-05-29T15:24:40.015138Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:24:40.015225Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 128 -> 240 2025-05-29T15:24:40.015235Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:24:40.015276Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-05-29T15:24:40.015307Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:402: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-05-29T15:24:40.015942Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:24:40.015956Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-29T15:24:40.016011Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29 ... 72075186233409549 2025-05-29T15:24:40.086059Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:588: Cannot publish paths for unknown operation id#0 FAKE_COORDINATOR: Erasing txId 105 2025-05-29T15:24:40.086193Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:5834: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 5 PathOwnerId: 72057594046678944, cookie: 105 2025-05-29T15:24:40.086205Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 5 PathOwnerId: 72057594046678944, cookie: 105 2025-05-29T15:24:40.086210Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 105 2025-05-29T15:24:40.086215Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 105, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 5 2025-05-29T15:24:40.086221Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 5 2025-05-29T15:24:40.086235Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 105, subscribers: 0 2025-05-29T15:24:40.086855Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5809: Handle TEvSyncTenantSchemeShard, at schemeshard: 72057594046678944, msg: DomainSchemeShard: 72057594046678944 DomainPathId: 3 TabletID: 72075186233409549 Generation: 2 EffectiveACLVersion: 0 SubdomainVersion: 2 UserAttributesVersion: 2 TenantHive: 18446744073709551615 TenantSysViewProcessor: 18446744073709551615 TenantRootACL: "" TenantStatisticsAggregator: 18446744073709551615 TenantGraphShard: 18446744073709551615 2025-05-29T15:24:40.086872Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__sync_update_tenants.cpp:26: TTxSyncTenant DoExecute, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], at schemeshard: 72057594046678944 2025-05-29T15:24:40.086890Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:567: DoUpdateTenant no hasChanges, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], tenantLink: TSubDomainsLinks::TLink { DomainKey: [OwnerId: 72057594046678944, LocalPathId: 3], Generation: 2, ActorId:[1:564:2501], EffectiveACLVersion: 0, SubdomainVersion: 2, UserAttributesVersion: 2, TenantHive: 18446744073709551615, TenantSysViewProcessor: 18446744073709551615, TenantStatisticsAggregator: 18446744073709551615, TenantGraphShard: 18446744073709551615, TenantRootACL: }, subDomain->GetVersion(): 2, actualEffectiveACLVersion: 0, actualUserAttrsVersion: 2, tenantHive: 18446744073709551615, tenantSysViewProcessor: 18446744073709551615, at schemeshard: 72057594046678944 2025-05-29T15:24:40.086924Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72075186233409549 2025-05-29T15:24:40.086928Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72075186233409549, txId: 0, path id: [OwnerId: 72075186233409549, LocalPathId: 1] 2025-05-29T15:24:40.086954Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72075186233409549 2025-05-29T15:24:40.086960Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:664:2575], at schemeshard: 72075186233409549, txId: 0, path id: 1 2025-05-29T15:24:40.087070Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:5834: Handle TEvUpdateAck, at schemeshard: 72075186233409549, msg: Owner: 72075186233409549 Generation: 2 LocalPathId: 1 Version: 5 PathOwnerId: 72075186233409549, cookie: 0 2025-05-29T15:24:40.087085Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 105 2025-05-29T15:24:40.087095Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__sync_update_tenants.cpp:36: TTxSyncTenant DoComplete, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], at schemeshard: 72057594046678944 TestModificationResult got TxId: 105, wait until txId: 105 TestWaitNotification wait txId: 105 2025-05-29T15:24:40.087156Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:210: tests -- TTxNotificationSubscriber for txId 105: send EvNotifyTxCompletion 2025-05-29T15:24:40.087164Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:256: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 105 2025-05-29T15:24:40.087244Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 105, at schemeshard: 72057594046678944 2025-05-29T15:24:40.087262Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:227: tests -- TTxNotificationSubscriber for txId 105: got EvNotifyTxCompletionResult 2025-05-29T15:24:40.087267Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:236: tests -- TTxNotificationSubscriber for txId 105: satisfy waiter [1:741:2632] TestWaitNotification: OK eventTxId 105 ... waiting for metering 2025-05-29T15:24:44.846952Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7301: Cannot get console configs 2025-05-29T15:24:44.846982Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:24:44.901987Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7301: Cannot get console configs 2025-05-29T15:24:44.902016Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:24:44.942875Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7301: Cannot get console configs 2025-05-29T15:24:44.942905Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:25:04.752543Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-05-29T15:25:04.752604Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__serverless_storage_billing.cpp:90: TTxServerlessStorageBilling: initiate at first time, schemeshardId: 72075186233409549, domainId: [OwnerId: 72057594046678944, LocalPathId: 3], now: 1970-01-01T00:01:00.000000Z, set LastBillTime: 1970-01-01T00:01:00.000000Z, next retry at: 1970-01-01T00:02:00.000000Z 2025-05-29T15:25:04.753714Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:202: TTxServerlessStorageBilling.Complete 2025-05-29T15:25:04.844985Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6632: Handle: TEvRunConditionalErase, at schemeshard: 72057594046678944 2025-05-29T15:25:04.845033Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__conditional_erase.cpp:56: TTxRunConditionalErase DoExecute: at schemeshard: 72057594046678944 2025-05-29T15:25:04.845050Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__conditional_erase.cpp:189: TTxRunConditionalErase DoComplete: at schemeshard: 72057594046678944 2025-05-29T15:25:04.915600Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6632: Handle: TEvRunConditionalErase, at schemeshard: 72075186233409546 2025-05-29T15:25:04.915664Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__conditional_erase.cpp:56: TTxRunConditionalErase DoExecute: at schemeshard: 72075186233409546 2025-05-29T15:25:04.915689Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__conditional_erase.cpp:189: TTxRunConditionalErase DoComplete: at schemeshard: 72075186233409546 2025-05-29T15:25:04.947779Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6632: Handle: TEvRunConditionalErase, at schemeshard: 72075186233409549 2025-05-29T15:25:04.947843Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__conditional_erase.cpp:56: TTxRunConditionalErase DoExecute: at schemeshard: 72075186233409549 2025-05-29T15:25:04.947867Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__conditional_erase.cpp:189: TTxRunConditionalErase DoComplete: at schemeshard: 72075186233409549 2025-05-29T15:25:31.819278Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-05-29T15:25:31.819344Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:121: TTxServerlessStorageBilling: too soon call, wait until current period ends, schemeshardId: 72075186233409549, domainId: [OwnerId: 72057594046678944, LocalPathId: 3], now: 1970-01-01T00:02:00.000000Z, LastBillTime: 1970-01-01T00:01:00.000000Z, lastBilled: 1970-01-01T00:01:00.000000Z--1970-01-01T00:01:59.000000Z, toBill: 1970-01-01T00:01:00.000000Z--1970-01-01T00:01:59.000000Z, next retry at: 1970-01-01T00:03:00.000000Z 2025-05-29T15:25:31.819368Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:202: TTxServerlessStorageBilling.Complete 2025-05-29T15:25:31.923037Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6632: Handle: TEvRunConditionalErase, at schemeshard: 72057594046678944 2025-05-29T15:25:31.923100Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__conditional_erase.cpp:56: TTxRunConditionalErase DoExecute: at schemeshard: 72057594046678944 2025-05-29T15:25:31.923125Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__conditional_erase.cpp:189: TTxRunConditionalErase DoComplete: at schemeshard: 72057594046678944 2025-05-29T15:25:31.982951Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6632: Handle: TEvRunConditionalErase, at schemeshard: 72075186233409546 2025-05-29T15:25:31.983018Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__conditional_erase.cpp:56: TTxRunConditionalErase DoExecute: at schemeshard: 72075186233409546 2025-05-29T15:25:31.983043Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__conditional_erase.cpp:189: TTxRunConditionalErase DoComplete: at schemeshard: 72075186233409546 2025-05-29T15:25:32.030391Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6632: Handle: TEvRunConditionalErase, at schemeshard: 72075186233409549 2025-05-29T15:25:32.030453Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__conditional_erase.cpp:56: TTxRunConditionalErase DoExecute: at schemeshard: 72075186233409549 2025-05-29T15:25:32.030478Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__conditional_erase.cpp:189: TTxRunConditionalErase DoComplete: at schemeshard: 72075186233409549 2025-05-29T15:25:59.918144Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-05-29T15:25:59.918283Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:191: TTxServerlessStorageBilling: make a bill, record: '{"usage":{"start":120,"quantity":59,"finish":179,"type":"delta","unit":"byte*second"},"tags":{"ydb_size":0},"id":"72057594046678944-3-120-179-0","cloud_id":"CLOUD_ID_VAL","source_wt":180,"source_id":"sless-docapi-ydb-storage","resource_id":"DATABASE_ID_VAL","schema":"ydb.serverless.v1","labels":{"k":"v"},"folder_id":"FOLDER_ID_VAL","version":"1.0.0"} ', schemeshardId: 72075186233409549, domainId: [OwnerId: 72057594046678944, LocalPathId: 3], now: 1970-01-01T00:03:00.000000Z, LastBillTime: 1970-01-01T00:01:00.000000Z, lastBilled: 1970-01-01T00:01:00.000000Z--1970-01-01T00:01:59.000000Z, toBill: 1970-01-01T00:02:00.000000Z--1970-01-01T00:02:59.000000Z, next retry at: 1970-01-01T00:04:00.000000Z 2025-05-29T15:25:59.919440Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:202: TTxServerlessStorageBilling.Complete ... blocking NKikimr::NMetering::TEvMetering::TEvWriteMeteringJson from FLAT_SCHEMESHARD_ACTOR to TFakeMetering cookie 0 ... waiting for metering (done) >> THiveTest::TestHiveBalancerWithPrefferedDC1 >> THiveTest::TestCreateSubHiveCreateTablet [GOOD] >> THiveTest::TestCreateSubHiveCreateManyTablets >> THiveTest::TestUpdateChannelValues >> TColumnShardTestSchema::TTL-Reboot+Internal+FirstPkColumn+WritePortionsOnInsert [GOOD] |67.4%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/kqp/proxy_service/ut/ydb-core-kqp-proxy_service-ut |67.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/proxy_service/ut/ydb-core-kqp-proxy_service-ut |67.4%| [LD] {RESULT} $(B)/ydb/core/kqp/proxy_service/ut/ydb-core-kqp-proxy_service-ut |67.4%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/datashard/ut_write/ydb-core-tx-datashard-ut_write |67.4%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_write/ydb-core-tx-datashard-ut_write |67.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_write/ydb-core-tx-datashard-ut_write >> TColumnShardTestSchema::TTL-Reboot+Internal+FirstPkColumn-WritePortionsOnInsert [GOOD] |67.4%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/public/lib/ydb_cli/topic/ut/ydb-public-lib-ydb_cli-topic-ut |67.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/public/lib/ydb_cli/topic/ut/ydb-public-lib-ydb_cli-topic-ut |67.4%| [LD] {RESULT} $(B)/ydb/public/lib/ydb_cli/topic/ut/ydb-public-lib-ydb_cli-topic-ut >> THiveTest::TestUpdateChannelValues [GOOD] >> THiveTest::TestReassignUseRelativeSpace ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/scan/unittest >> KqpSplit::BorderKeys+Descending Test command err: Trying to start YDB, gRPC: 28777, MsgBus: 20488 test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/001eb4/r3tmp/tmpoMspCR/pdisk_1.dat 2025-05-29T15:25:47.859038Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-05-29T15:25:47.863672Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [1:7509889247570113900:2079] 1748532347760958 != 1748532347760961 2025-05-29T15:25:47.870184Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 28777, node 1 2025-05-29T15:25:47.900626Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:25:47.900636Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-05-29T15:25:47.900638Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-05-29T15:25:47.900680Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-05-29T15:25:47.905293Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:25:47.905323Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:25:47.907036Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:20488 TClient is connected to server localhost:20488 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-29T15:25:48.045613Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:25:48.055095Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-05-29T15:25:48.071791Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:25:48.128983Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:25:48.184631Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:25:48.244077Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:25:48.373404Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509889251865082830:2403], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:25:48.373434Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:25:48.422096Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-05-29T15:25:48.433475Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-05-29T15:25:48.447507Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-05-29T15:25:48.461133Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-05-29T15:25:48.475337Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-05-29T15:25:48.490188Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-05-29T15:25:48.504421Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-05-29T15:25:48.524685Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509889251865083481:2466], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:25:48.524717Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:25:48.524923Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509889251865083486:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:25:48.525805Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715669:3, at schemeshard: 72057594046644480 2025-05-29T15:25:48.529573Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7509889251865083488:2470], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715669 completed, doublechecking } 2025-05-29T15:25:48.591964Z node 1 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [1:7509889251865083539:3396] txid# 281474976715670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-29T15:25:48.779000Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [1:7509889251865083548:2474], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:25:48.780806Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=1&id=ZjllOWVjYWMtMTcwYTY3NWEtNGM3OWJmYzgtMTA2ZmY4MDg=, ActorId: [1:7509889251865082802:2399], ActorState: ExecuteState, TraceId: 01jweachkc9pkygj3xh87fafyj, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: VERIFY failed (2025-05-29T15:25:48.781810Z): assertion failed in non-unittest thread with message: assertion failed at ydb/core/kqp/ut/common/kqp_ut_common.h:375, void NKikimr::NKqp::AssertSuccessResult(const NYdb::TStatus &): (result.IsSuccess())
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 library/cpp/testing/unittest/registar.cpp:36 RaiseError(): requirement UnittestThread failed 0. /-S/util/system/yassert.cpp:83: InternalPanicImpl @ 0x13B4AA75 1. /-S/util/system/yassert.cpp:55: Panic @ 0x13B41A76 2. /tmp//-S/library/cpp/testing/unittest/registar.cpp:36: RaiseError @ 0x13CE3136 3. /-S/ydb/core/kqp/ut/common/kqp_ut_common.h:375: AssertSuccessResult @ 0x139B03B2 4. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:365: CreateSampleTables @ 0x26246832 5. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:581: operator() @ 0x262654DC 6. /-S/library/cpp/threading/future/core/future-inl.h:493: SetValue @ 0x262654DC 7. /-S/library/cpp/threading/future/async.h:24: operator() @ 0x262654DC 8. /-S/util/thread/pool.h:71: Process @ 0x262654DC 9. /-S/util/thread/pool.cpp:418: DoExecute @ 0x13B523F9 10. /-S/util/thread/factory.h:15: Execute @ 0x13B50DE9 11. /-S/util/thread/factory.cpp:36: ThreadProc @ 0x13B50DE9 12. /-S/util/system/thread.cpp:244: ThreadProxy @ 0x13B4C25C 13. ??:0: ?? @ 0x7F3D21443AC2 14. ??:0: ?? @ 0x7F3D214D584F Trying to start YDB, gRPC: 5412, MsgBus: 16910 2025-05-29T15:25:55.589038Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509889284302483268:2208];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:25:55.595338Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/001eb4/r3tmp/tmpfE5jeq/pdisk_1.dat 2025-05-29T15:25:55.656767Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:25:55.659951Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [1:7509889284302483086:2079] 1748532355496954 != 1748532355496957 TServer::EnableGrpc on GrpcPort 5412, node 1 2025-05-29T15:25:55.698749Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:25:55.698761Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-05-29T15:25:55.698762Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-05-29T15:25:55.698808Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-05-29T15:25:55.699043Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:25:55.699060Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:25:55.699976Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:16910 TClient is connected to server localhost:16910 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-29T15:25:55.891762Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:25:55.895898Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-05-29T15:25:55.901595Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:25:55.961681Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:25:56.028004Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:25:56.062397Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:25:56.185816Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509889288597452017:2403], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:25:56.185848Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:25:56.287413Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-05-29T15:25:56.352418Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-05-29T15:25:56.370069Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-05-29T15:25:56.395874Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-05-29T15:25:56.409487Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-05-29T15:25:56.472594Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-05-29T15:25:56.497226Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-05-29T15:25:56.524473Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509889288597452672:2466], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:25:56.524519Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:25:56.524640Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509889288597452680:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:25:56.525567Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710669:3, at schemeshard: 72057594046644480 2025-05-29T15:25:56.530121Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7509889288597452682:2470], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710669 completed, doublechecking } 2025-05-29T15:25:56.632096Z node 1 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [1:7509889288597452742:3396] txid# 281474976710670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-29T15:25:56.866019Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [1:7509889288597452758:2475], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:25:56.867848Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=1&id=MTRmYzE3ZjYtOTUzMWNiYzQtYzM4MzMwNTgtZDNiZDFkZGE=, ActorId: [1:7509889288597451989:2399], ActorState: ExecuteState, TraceId: 01jweacsdbecbqcy7bare473sh, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: VERIFY failed (2025-05-29T15:25:56.875670Z): assertion failed in non-unittest thread with message: assertion failed at ydb/core/kqp/ut/common/kqp_ut_common.h:375, void NKikimr::NKqp::AssertSuccessResult(const NYdb::TStatus &): (result.IsSuccess())
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 library/cpp/testing/unittest/registar.cpp:36 RaiseError(): requirement UnittestThread failed 0. /-S/util/system/yassert.cpp:83: InternalPanicImpl @ 0x13B4AA75 1. /-S/util/system/yassert.cpp:55: Panic @ 0x13B41A76 2. /tmp//-S/library/cpp/testing/unittest/registar.cpp:36: RaiseError @ 0x13CE3136 3. /-S/ydb/core/kqp/ut/common/kqp_ut_common.h:375: AssertSuccessResult @ 0x139B03B2 4. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:365: CreateSampleTables @ 0x26246832 5. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:581: operator() @ 0x262654DC 6. /-S/library/cpp/threading/future/core/future-inl.h:493: SetValue @ 0x262654DC 7. /-S/library/cpp/threading/future/async.h:24: operator() @ 0x262654DC 8. /-S/util/thread/pool.h:71: Process @ 0x262654DC 9. /-S/util/thread/pool.cpp:418: DoExecute @ 0x13B523F9 10. /-S/util/thread/factory.h:15: Execute @ 0x13B50DE9 11. /-S/util/thread/factory.cpp:36: ThreadProc @ 0x13B50DE9 12. /-S/util/system/thread.cpp:244: ThreadProxy @ 0x13B4C25C 13. ??:0: ?? @ 0x7F45D472AAC2 14. ??:0: ?? @ 0x7F45D47BC84F |67.4%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/security/certificate_check/ut/ydb-core-security-certificate_check-ut |67.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/security/certificate_check/ut/ydb-core-security-certificate_check-ut |67.4%| [LD] {RESULT} $(B)/ydb/core/security/certificate_check/ut/ydb-core-security-certificate_check-ut |67.4%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/schemeshard/ut_filestore_reboots/ydb-core-tx-schemeshard-ut_filestore_reboots |67.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_filestore_reboots/ydb-core-tx-schemeshard-ut_filestore_reboots |67.5%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_filestore_reboots/ydb-core-tx-schemeshard-ut_filestore_reboots ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/columnshard/ut_schema/unittest >> TColumnShardTestSchema::TTL-Reboot+Internal+FirstPkColumn+WritePortionsOnInsert [GOOD] Test command err: Running TestTtl ttlColumnType=Timestamp 2025-05-29T15:25:29.875626Z node 1 :TX_COLUMNSHARD TRACE: columnshard_impl.h:389: StateInit, received event# 268828672, Sender [1:103:2136], Recipient [1:139:2170]: NKikimr::TEvTablet::TEvBoot 2025-05-29T15:25:29.877691Z node 1 :TX_COLUMNSHARD TRACE: columnshard_impl.h:389: StateInit, received event# 268828673, Sender [1:103:2136], Recipient [1:139:2170]: NKikimr::TEvTablet::TEvRestored 2025-05-29T15:25:29.877784Z node 1 :TX_COLUMNSHARD INFO: log.cpp:784: tablet_id=9437184;self_id=[1:139:2170];fline=columnshard.cpp:102;event=initialize_shard;step=OnActivateExecutor; 2025-05-29T15:25:29.887995Z node 1 :TX_COLUMNSHARD INFO: log.cpp:784: tablet_id=9437184;self_id=[1:139:2170];fline=columnshard.cpp:120;event=initialize_shard;step=initialize_tiring_finished; 2025-05-29T15:25:29.888076Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Execute at tablet 9437184 2025-05-29T15:25:29.888802Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=9437184;self_id=[1:139:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-05-29T15:25:29.888856Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=9437184;self_id=[1:139:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-05-29T15:25:29.888897Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=9437184;self_id=[1:139:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-05-29T15:25:29.888915Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=9437184;self_id=[1:139:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-05-29T15:25:29.888931Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=9437184;self_id=[1:139:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-05-29T15:25:29.888948Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=9437184;self_id=[1:139:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-05-29T15:25:29.888963Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=9437184;self_id=[1:139:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-05-29T15:25:29.888982Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=9437184;self_id=[1:139:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-05-29T15:25:29.889003Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=9437184;self_id=[1:139:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-05-29T15:25:29.889019Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=9437184;self_id=[1:139:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-05-29T15:25:29.889036Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=9437184;self_id=[1:139:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2025-05-29T15:25:29.889053Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=9437184;self_id=[1:139:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2025-05-29T15:25:29.898584Z node 1 :TX_COLUMNSHARD TRACE: columnshard_impl.h:389: StateInit, received event# 268828684, Sender [1:103:2136], Recipient [1:139:2170]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-05-29T15:25:29.904072Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Complete at tablet 9437184 2025-05-29T15:25:29.904136Z node 1 :TX_COLUMNSHARD INFO: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:137;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2025-05-29T15:25:29.904147Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-05-29T15:25:29.904178Z node 1 :TX_COLUMNSHARD INFO: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-05-29T15:25:29.904211Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-05-29T15:25:29.904224Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-05-29T15:25:29.904229Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-05-29T15:25:29.904238Z node 1 :TX_COLUMNSHARD INFO: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2025-05-29T15:25:29.904248Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-05-29T15:25:29.904255Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-05-29T15:25:29.904259Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-05-29T15:25:29.904276Z node 1 :TX_COLUMNSHARD INFO: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-05-29T15:25:29.904284Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-05-29T15:25:29.904290Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-05-29T15:25:29.904294Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-05-29T15:25:29.904304Z node 1 :TX_COLUMNSHARD INFO: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-05-29T15:25:29.904311Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-05-29T15:25:29.904320Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-05-29T15:25:29.904324Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=8;type=CleanInsertionDedup; 2025-05-29T15:25:29.904335Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-05-29T15:25:29.904344Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-05-29T15:25:29.904348Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-05-29T15:25:29.904356Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-05-29T15:25:29.904362Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-05-29T15:25:29.904367Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-05-29T15:25:29.904389Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-05-29T15:25:29.904397Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-05-29T15:25:29.904401Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2025-05-29T15:25:29.904435Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-05-29T15:25:29.904442Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-05-29T15:25:29.904446Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2025-05-29T15:25:29.904458Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-05-29T15:25:29.904465Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanDeprecatedSnapshot;id=CleanDeprecatedSnapshot; 2025-05-29T15:25:29.904470Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=17;type=CleanDeprecatedSnapshot; 2025-05-29T15:25:29.904478Z node 1 :TX_COLUMNSHARD CRIT: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_deprecated_snapshot.cpp:30;tasks_for_rewrite=0; 2025-05-29T15:25:29.904485Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanDeprecatedSnapshot;id=17; 2025-05-29T15:25:29.904492Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV0ChunksMeta;id=RestoreV0ChunksMeta; 2025-05-29T15:25:29.904496Z node ... UMNSHARD_SCAN DEBUG: log.cpp:784: SelfId=[5:507:2511];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=scanner.cpp:21;event=interval_result_received;interval_idx=0;intervalId=170; 2025-05-29T15:26:01.294284Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:784: SelfId=[5:507:2511];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=scanner.cpp:47;event=interval_result;interval_idx=0;count=1000;merger=0;interval_id=170; 2025-05-29T15:26:01.294291Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:784: SelfId=[5:507:2511];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=scanner.cpp:65;event=intervals_finished; 2025-05-29T15:26:01.294302Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:784: SelfId=[5:507:2511];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:188;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-05-29T15:26:01.294308Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:784: SelfId=[5:507:2511];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=plain_read_data.cpp:73;event=DoExtractReadyResults;result=1;count=1000;finished=1; 2025-05-29T15:26:01.294314Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:784: SelfId=[5:507:2511];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:199;stage=limit exhausted;limit=limits:(bytes=0;chunks=0);; 2025-05-29T15:26:01.294405Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:784: SelfId=[5:507:2511];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:105;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2025-05-29T15:26:01.294437Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:784: SelfId=[5:507:2511];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:188;stage=start;iterator=ready_results:(count:1;records_count:1000;schema=timestamp: uint64;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-05-29T15:26:01.294443Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:784: SelfId=[5:507:2511];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=plain_read_data.cpp:73;event=DoExtractReadyResults;result=0;count=0;finished=1; 2025-05-29T15:26:01.294454Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:784: SelfId=[5:507:2511];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:230;stage=ready result;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;);columns=1;rows=1000; 2025-05-29T15:26:01.294466Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:784: SelfId=[5:507:2511];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:250;stage=data_format;batch_size=8000;num_rows=1000;batch_columns=timestamp; 2025-05-29T15:26:01.294514Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:784: SelfId=[5:507:2511];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:366;event=send_data;compute_actor_id=[5:506:2510];bytes=8000;rows=1000;faults=0;finished=0;fault=0;schema=timestamp: uint64; 2025-05-29T15:26:01.294529Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:784: SelfId=[5:507:2511];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:270;stage=finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-05-29T15:26:01.294542Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:784: SelfId=[5:507:2511];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:188;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-05-29T15:26:01.294552Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:784: SelfId=[5:507:2511];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:193;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-05-29T15:26:01.294580Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:784: SelfId=[5:507:2511];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:105;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2025-05-29T15:26:01.294593Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:784: SelfId=[5:507:2511];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:188;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-05-29T15:26:01.294602Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:784: SelfId=[5:507:2511];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:193;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-05-29T15:26:01.294609Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: actor.cpp:410: Scan [5:507:2511] finished for tablet 9437184 2025-05-29T15:26:01.294690Z node 5 :TX_COLUMNSHARD_SCAN INFO: log.cpp:784: SelfId=[5:507:2511];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:416;event=scan_finish;compute_actor_id=[5:506:2510];stats={"p":[{"events":["f_bootstrap","l_bootstrap","f_processing","f_ProduceResults","f_task_result"],"t":0},{"events":["f_ack","l_ack","l_processing","l_ProduceResults","f_Finish","l_Finish","l_task_result"],"t":0.009}],"full":{"a":1748532361285320,"name":"_full_task","f":1748532361285320,"d_finished":0,"c":0,"l":1748532361294621,"d":9301},"events":[{"name":"bootstrap","f":1748532361285377,"d_finished":383,"c":1,"l":1748532361285760,"d":383},{"a":1748532361294578,"name":"ack","f":1748532361294398,"d_finished":157,"c":1,"l":1748532361294555,"d":200},{"a":1748532361294576,"name":"processing","f":1748532361285944,"d_finished":8002,"c":8,"l":1748532361294556,"d":8047},{"name":"ProduceResults","f":1748532361285588,"d_finished":349,"c":11,"l":1748532361294605,"d":349},{"a":1748532361294606,"name":"Finish","f":1748532361294606,"d_finished":0,"c":0,"l":1748532361294621,"d":15},{"name":"task_result","f":1748532361285948,"d_finished":7821,"c":7,"l":1748532361294331,"d":7821}],"id":"9437184::35"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-05-29T15:26:01.294705Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:784: SelfId=[5:507:2511];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:366;event=send_data;compute_actor_id=[5:506:2510];bytes=0;rows=0;faults=0;finished=1;fault=0;schema=; 2025-05-29T15:26:01.294780Z node 5 :TX_COLUMNSHARD_SCAN INFO: log.cpp:784: SelfId=[5:507:2511];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:371;event=scan_finished;compute_actor_id=[5:506:2510];packs_sum=0;bytes=0;bytes_sum=0;rows=0;rows_sum=0;faults=0;finished=1;fault=0;stats={"p":[{"events":["f_bootstrap","l_bootstrap","f_processing","f_ProduceResults","f_task_result"],"t":0},{"events":["f_ack","l_ack","l_processing","l_ProduceResults","f_Finish","l_Finish","l_task_result"],"t":0.009}],"full":{"a":1748532361285320,"name":"_full_task","f":1748532361285320,"d_finished":0,"c":0,"l":1748532361294712,"d":9392},"events":[{"name":"bootstrap","f":1748532361285377,"d_finished":383,"c":1,"l":1748532361285760,"d":383},{"a":1748532361294578,"name":"ack","f":1748532361294398,"d_finished":157,"c":1,"l":1748532361294555,"d":291},{"a":1748532361294576,"name":"processing","f":1748532361285944,"d_finished":8002,"c":8,"l":1748532361294556,"d":8138},{"name":"ProduceResults","f":1748532361285588,"d_finished":349,"c":11,"l":1748532361294605,"d":349},{"a":1748532361294606,"name":"Finish","f":1748532361294606,"d_finished":0,"c":0,"l":1748532361294712,"d":106},{"name":"task_result","f":1748532361285948,"d_finished":7821,"c":7,"l":1748532361294331,"d":7821}],"id":"9437184::35"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-05-29T15:26:01.294798Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:784: SelfId=[5:507:2511];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=stats.cpp:8;event=statistic;begin=2025-05-29T15:26:01.285034Z;index_granules=0;index_portions=1;index_batches=10;committed_batches=0;schema_columns=1;filter_columns=0;additional_columns=0;compacted_portions_bytes=0;inserted_portions_bytes=59184;committed_portions_bytes=0;data_filter_bytes=0;data_additional_bytes=0;delta_bytes=59184;selected_rows=0; 2025-05-29T15:26:01.294805Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:784: SelfId=[5:507:2511];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=read_context.h:190;event=scan_aborted;reason=unexpected on destructor; 2025-05-29T15:26:01.294870Z node 5 :TX_COLUMNSHARD_SCAN INFO: log.cpp:784: SelfId=[5:507:2511];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=context.h:81;fetching=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;; >> TCutHistoryRestrictions::BasicTest [GOOD] >> TCutHistoryRestrictions::EmptyAllowList [GOOD] >> TCutHistoryRestrictions::EmptyDenyList [GOOD] >> TCutHistoryRestrictions::BothListsEmpty [GOOD] >> ObjectDistribution::TestImbalanceCalcualtion [GOOD] >> ObjectDistribution::TestAllowedDomainsAndDown [GOOD] >> ObjectDistribution::TestAddSameNode [GOOD] >> ObjectDistribution::TestManyIrrelevantNodes ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/columnshard/ut_schema/unittest >> TColumnShardTestSchema::TTL-Reboot+Internal+FirstPkColumn-WritePortionsOnInsert [GOOD] Test command err: Running TestTtl ttlColumnType=Timestamp 2025-05-29T15:25:30.098569Z node 1 :TX_COLUMNSHARD TRACE: columnshard_impl.h:389: StateInit, received event# 268828672, Sender [1:103:2136], Recipient [1:139:2170]: NKikimr::TEvTablet::TEvBoot 2025-05-29T15:25:30.100773Z node 1 :TX_COLUMNSHARD TRACE: columnshard_impl.h:389: StateInit, received event# 268828673, Sender [1:103:2136], Recipient [1:139:2170]: NKikimr::TEvTablet::TEvRestored 2025-05-29T15:25:30.100862Z node 1 :TX_COLUMNSHARD INFO: log.cpp:784: tablet_id=9437184;self_id=[1:139:2170];fline=columnshard.cpp:102;event=initialize_shard;step=OnActivateExecutor; 2025-05-29T15:25:30.105229Z node 1 :TX_COLUMNSHARD INFO: log.cpp:784: tablet_id=9437184;self_id=[1:139:2170];fline=columnshard.cpp:120;event=initialize_shard;step=initialize_tiring_finished; 2025-05-29T15:25:30.105297Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Execute at tablet 9437184 2025-05-29T15:25:30.106043Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=9437184;self_id=[1:139:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-05-29T15:25:30.106083Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=9437184;self_id=[1:139:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-05-29T15:25:30.106119Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=9437184;self_id=[1:139:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-05-29T15:25:30.106141Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=9437184;self_id=[1:139:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-05-29T15:25:30.106161Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=9437184;self_id=[1:139:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-05-29T15:25:30.106181Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=9437184;self_id=[1:139:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-05-29T15:25:30.106199Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=9437184;self_id=[1:139:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-05-29T15:25:30.106227Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=9437184;self_id=[1:139:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-05-29T15:25:30.106247Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=9437184;self_id=[1:139:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-05-29T15:25:30.106265Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=9437184;self_id=[1:139:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-05-29T15:25:30.106284Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=9437184;self_id=[1:139:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2025-05-29T15:25:30.106304Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=9437184;self_id=[1:139:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2025-05-29T15:25:30.112211Z node 1 :TX_COLUMNSHARD TRACE: columnshard_impl.h:389: StateInit, received event# 268828684, Sender [1:103:2136], Recipient [1:139:2170]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-05-29T15:25:30.113501Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Complete at tablet 9437184 2025-05-29T15:25:30.113586Z node 1 :TX_COLUMNSHARD INFO: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:137;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2025-05-29T15:25:30.113597Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-05-29T15:25:30.113631Z node 1 :TX_COLUMNSHARD INFO: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-05-29T15:25:30.113665Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-05-29T15:25:30.113678Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-05-29T15:25:30.113684Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-05-29T15:25:30.113694Z node 1 :TX_COLUMNSHARD INFO: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2025-05-29T15:25:30.113703Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-05-29T15:25:30.113711Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-05-29T15:25:30.113715Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-05-29T15:25:30.113732Z node 1 :TX_COLUMNSHARD INFO: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-05-29T15:25:30.113740Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-05-29T15:25:30.113747Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-05-29T15:25:30.113752Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-05-29T15:25:30.113761Z node 1 :TX_COLUMNSHARD INFO: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-05-29T15:25:30.113768Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-05-29T15:25:30.113776Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-05-29T15:25:30.113780Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=8;type=CleanInsertionDedup; 2025-05-29T15:25:30.113800Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-05-29T15:25:30.113807Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-05-29T15:25:30.113812Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-05-29T15:25:30.113821Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-05-29T15:25:30.113828Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-05-29T15:25:30.113833Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-05-29T15:25:30.113858Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-05-29T15:25:30.113867Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-05-29T15:25:30.113872Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2025-05-29T15:25:30.113893Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-05-29T15:25:30.113900Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-05-29T15:25:30.113905Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2025-05-29T15:25:30.113919Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-05-29T15:25:30.113927Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanDeprecatedSnapshot;id=CleanDeprecatedSnapshot; 2025-05-29T15:25:30.113932Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=17;type=CleanDeprecatedSnapshot; 2025-05-29T15:25:30.113941Z node 1 :TX_COLUMNSHARD CRIT: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_deprecated_snapshot.cpp:30;tasks_for_rewrite=0; 2025-05-29T15:25:30.113949Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanDeprecatedSnapshot;id=17; 2025-05-29T15:25:30.113957Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV0ChunksMeta;id=RestoreV0ChunksMeta; 2025-05-29T15:25:30.113963Z node ... 631];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=scanner.cpp:21;event=interval_result_received;interval_idx=0;intervalId=220; 2025-05-29T15:26:01.866509Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:784: SelfId=[5:627:2631];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=scanner.cpp:47;event=interval_result;interval_idx=0;count=1000;merger=0;interval_id=220; 2025-05-29T15:26:01.866515Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:784: SelfId=[5:627:2631];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=scanner.cpp:65;event=intervals_finished; 2025-05-29T15:26:01.866526Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:784: SelfId=[5:627:2631];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:188;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-05-29T15:26:01.866531Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:784: SelfId=[5:627:2631];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=plain_read_data.cpp:73;event=DoExtractReadyResults;result=1;count=1000;finished=1; 2025-05-29T15:26:01.866536Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:784: SelfId=[5:627:2631];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:199;stage=limit exhausted;limit=limits:(bytes=0;chunks=0);; 2025-05-29T15:26:01.866599Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:784: SelfId=[5:627:2631];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:105;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2025-05-29T15:26:01.866616Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:784: SelfId=[5:627:2631];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:188;stage=start;iterator=ready_results:(count:1;records_count:1000;schema=timestamp: uint64;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-05-29T15:26:01.866623Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:784: SelfId=[5:627:2631];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=plain_read_data.cpp:73;event=DoExtractReadyResults;result=0;count=0;finished=1; 2025-05-29T15:26:01.866633Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:784: SelfId=[5:627:2631];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:230;stage=ready result;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;);columns=1;rows=1000; 2025-05-29T15:26:01.866641Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:784: SelfId=[5:627:2631];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:250;stage=data_format;batch_size=8000;num_rows=1000;batch_columns=timestamp; 2025-05-29T15:26:01.866681Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:784: SelfId=[5:627:2631];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:366;event=send_data;compute_actor_id=[5:626:2630];bytes=8000;rows=1000;faults=0;finished=0;fault=0;schema=timestamp: uint64; 2025-05-29T15:26:01.866695Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:784: SelfId=[5:627:2631];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:270;stage=finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-05-29T15:26:01.866707Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:784: SelfId=[5:627:2631];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:188;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-05-29T15:26:01.866716Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:784: SelfId=[5:627:2631];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:193;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-05-29T15:26:01.866757Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:784: SelfId=[5:627:2631];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:105;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2025-05-29T15:26:01.866768Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:784: SelfId=[5:627:2631];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:188;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-05-29T15:26:01.866779Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:784: SelfId=[5:627:2631];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:193;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-05-29T15:26:01.866785Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: actor.cpp:410: Scan [5:627:2631] finished for tablet 9437184 2025-05-29T15:26:01.866845Z node 5 :TX_COLUMNSHARD_SCAN INFO: log.cpp:784: SelfId=[5:627:2631];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:416;event=scan_finish;compute_actor_id=[5:626:2630];stats={"p":[{"events":["f_bootstrap","f_ProduceResults"],"t":0},{"events":["l_bootstrap","f_processing","f_task_result"],"t":0.001},{"events":["f_ack","l_ack","l_processing","l_ProduceResults","f_Finish","l_Finish","l_task_result"],"t":0.004}],"full":{"a":1748532361862403,"name":"_full_task","f":1748532361862403,"d_finished":0,"c":0,"l":1748532361866795,"d":4392},"events":[{"name":"bootstrap","f":1748532361862447,"d_finished":1714,"c":1,"l":1748532361864161,"d":1714},{"a":1748532361866754,"name":"ack","f":1748532361866594,"d_finished":126,"c":1,"l":1748532361866720,"d":167},{"a":1748532361866752,"name":"processing","f":1748532361864373,"d_finished":1764,"c":8,"l":1748532361866720,"d":1807},{"name":"ProduceResults","f":1748532361862641,"d_finished":304,"c":11,"l":1748532361866782,"d":304},{"a":1748532361866782,"name":"Finish","f":1748532361866782,"d_finished":0,"c":0,"l":1748532361866795,"d":13},{"name":"task_result","f":1748532361864377,"d_finished":1618,"c":7,"l":1748532361866549,"d":1618}],"id":"9437184::35"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-05-29T15:26:01.866857Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:784: SelfId=[5:627:2631];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:366;event=send_data;compute_actor_id=[5:626:2630];bytes=0;rows=0;faults=0;finished=1;fault=0;schema=; 2025-05-29T15:26:01.866897Z node 5 :TX_COLUMNSHARD_SCAN INFO: log.cpp:784: SelfId=[5:627:2631];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:371;event=scan_finished;compute_actor_id=[5:626:2630];packs_sum=0;bytes=0;bytes_sum=0;rows=0;rows_sum=0;faults=0;finished=1;fault=0;stats={"p":[{"events":["f_bootstrap","f_ProduceResults"],"t":0},{"events":["l_bootstrap","f_processing","f_task_result"],"t":0.001},{"events":["f_ack","l_ack","l_processing","l_ProduceResults","f_Finish","l_Finish","l_task_result"],"t":0.004}],"full":{"a":1748532361862403,"name":"_full_task","f":1748532361862403,"d_finished":0,"c":0,"l":1748532361866863,"d":4460},"events":[{"name":"bootstrap","f":1748532361862447,"d_finished":1714,"c":1,"l":1748532361864161,"d":1714},{"a":1748532361866754,"name":"ack","f":1748532361866594,"d_finished":126,"c":1,"l":1748532361866720,"d":235},{"a":1748532361866752,"name":"processing","f":1748532361864373,"d_finished":1764,"c":8,"l":1748532361866720,"d":1875},{"name":"ProduceResults","f":1748532361862641,"d_finished":304,"c":11,"l":1748532361866782,"d":304},{"a":1748532361866782,"name":"Finish","f":1748532361866782,"d_finished":0,"c":0,"l":1748532361866863,"d":81},{"name":"task_result","f":1748532361864377,"d_finished":1618,"c":7,"l":1748532361866549,"d":1618}],"id":"9437184::35"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-05-29T15:26:01.866910Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:784: SelfId=[5:627:2631];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=stats.cpp:8;event=statistic;begin=2025-05-29T15:26:01.862165Z;index_granules=0;index_portions=1;index_batches=10;committed_batches=0;schema_columns=1;filter_columns=0;additional_columns=0;compacted_portions_bytes=0;inserted_portions_bytes=59648;committed_portions_bytes=0;data_filter_bytes=0;data_additional_bytes=0;delta_bytes=59648;selected_rows=0; 2025-05-29T15:26:01.866916Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:784: SelfId=[5:627:2631];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=read_context.h:190;event=scan_aborted;reason=unexpected on destructor; 2025-05-29T15:26:01.866956Z node 5 :TX_COLUMNSHARD_SCAN INFO: log.cpp:784: SelfId=[5:627:2631];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=context.h:81;fetching=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;; >> DataShardVolatile::DistributedWriteLaterSnapshotBlockedThenAbort [FAIL] >> DataShardVolatile::DistributedWriteAsymmetricExecute >> TOlapReboots::DropMultipleStandaloneTables [GOOD] >> THiveTest::TestReassignUseRelativeSpace [GOOD] >> THiveTest::TestStorageBalancer >> TCutHistoryRestrictions::SameTabletInBothLists [GOOD] >> THeavyPerfTest::TTestLoadEverything >> KqpVectorIndexes::CoveredVectorIndexWithFollowers+StaleRO >> DataShardVolatile::TwoAppendsMustBeVolatile-UseSink [FAIL] >> THiveTest::TestHiveBalancerWithPrefferedDC1 [GOOD] >> DataShardVolatile::VolatileCommitOnBlobStorageFailure+UseSink >> THiveTest::TestHiveBalancerWithPrefferedDC2 >> THiveTest::TestStorageBalancer [GOOD] >> THiveTest::TestRestartsWithFollower |67.5%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/health_check/ut/ydb-core-health_check-ut |67.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/health_check/ut/ydb-core-health_check-ut |67.5%| [LD] {RESULT} $(B)/ydb/core/health_check/ut/ydb-core-health_check-ut ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_olap_reboots/unittest >> TOlapReboots::DropMultipleStandaloneTables [GOOD] Test command err: ==== RunWithTabletReboots =========== RUN: Trace =========== Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2140] Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:116:2058] recipient: [1:110:2141] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:116:2058] recipient: [1:110:2141] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:117:2058] recipient: [1:111:2142] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:117:2058] recipient: [1:111:2142] Leader for TabletID 72057594046678944 is [1:126:2151] sender: [1:127:2058] recipient: [1:109:2140] Leader for TabletID 72057594046447617 is [1:130:2154] sender: [1:132:2058] recipient: [1:110:2141] Leader for TabletID 72057594046316545 is [1:133:2155] sender: [1:135:2058] recipient: [1:111:2142] 2025-05-29T15:24:38.690002Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7488: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-05-29T15:24:38.690027Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7516: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:24:38.690033Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7402: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2025-05-29T15:24:38.690038Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7418: OperationsProcessing config: using default configuration 2025-05-29T15:24:38.690043Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-05-29T15:24:38.690047Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-05-29T15:24:38.690055Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7548: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:24:38.690069Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:39: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-05-29T15:24:38.690163Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7619: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-05-29T15:24:38.690233Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-05-29T15:24:38.704369Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7651: Got new config: QueryServiceConfig { AllExternalDataSourcesAreAvailable: true } 2025-05-29T15:24:38.704394Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:24:38.704498Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7619: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# Leader for TabletID 72057594046447617 is [1:130:2154] sender: [1:175:2058] recipient: [1:15:2062] 2025-05-29T15:24:38.707331Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-05-29T15:24:38.707360Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-05-29T15:24:38.707394Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-05-29T15:24:38.710420Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-05-29T15:24:38.710494Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:445: Clear TempDirsState with owners number: 0 2025-05-29T15:24:38.710607Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1348: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-05-29T15:24:38.710828Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:32: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-05-29T15:24:38.711517Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:157: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:24:38.711557Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:84: [RootDataErasureManager] Stop 2025-05-29T15:24:38.711795Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:24:38.711805Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:24:38.711841Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-05-29T15:24:38.711850Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-29T15:24:38.711856Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:202: TTxServerlessStorageBilling.Complete 2025-05-29T15:24:38.711872Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6671: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:214:2058] recipient: [1:212:2212] IGNORE Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:214:2058] recipient: [1:212:2212] Leader for TabletID 72057594037968897 is [1:218:2216] sender: [1:219:2058] recipient: [1:212:2212] 2025-05-29T15:24:38.713252Z node 1 :HIVE INFO: tablet_helpers.cpp:1130: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:126:2151] sender: [1:239:2058] recipient: [1:15:2062] 2025-05-29T15:24:38.732596Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:372: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-05-29T15:24:38.732671Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:24:38.732732Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-05-29T15:24:38.732782Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:130: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-05-29T15:24:38.732795Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:24:38.733700Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:451: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-05-29T15:24:38.733729Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-05-29T15:24:38.733782Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:24:38.733799Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:313: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-05-29T15:24:38.733805Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:367: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-05-29T15:24:38.733809Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 2 -> 3 2025-05-29T15:24:38.734267Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:24:38.734279Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-05-29T15:24:38.734286Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 3 -> 128 2025-05-29T15:24:38.734628Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:24:38.734639Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:24:38.734645Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:24:38.734652Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1650: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-05-29T15:24:38.735296Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1719: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-05-29T15:24:38.735754Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:652: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-05-29T15:24:38.735792Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1751: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 Leader for TabletID 72057594046316545 is [1:133:2155] sender: [1:254:2058] recipient: [1:15:2062] FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-05-29T15:24:38.736011Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:676: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-05-29T15:24:38.736038Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:680: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 133 RawX2: 4294969451 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-05-29T15:24:38.736055Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:24:38.736115Z node 1 :FLAT_TX_SCHEMESHARD INFO: sch ... schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 4 2025-05-29T15:26:03.128840Z node 111 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1606: TOperation IsReadyToNotify, TxId: 1004, ready parts: 0/1, is published: true 2025-05-29T15:26:03.129010Z node 111 :HIVE INFO: tablet_helpers.cpp:1356: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 1 TxId_Deprecated: 1 TabletID: 72075186233409546 2025-05-29T15:26:03.129349Z node 111 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186233409546;self_id=[111:331:2317];ev=NKikimr::TEvTablet::TEvTabletDead;fline=columnshard_impl.cpp:1152;event=tablet_die; Forgetting tablet 72075186233409546 2025-05-29T15:26:03.135484Z node 111 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5938: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 1 ShardOwnerId: 72057594046678944 ShardLocalIdx: 1, at schemeshard: 72057594046678944 2025-05-29T15:26:03.135751Z node 111 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 1 2025-05-29T15:26:03.135973Z node 111 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:123: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-05-29T15:26:03.135984Z node 111 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:137: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 3], at schemeshard: 72057594046678944 2025-05-29T15:26:03.136003Z node 111 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2025-05-29T15:26:03.136558Z node 111 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:652: Send tablet strongly msg operationId: 1004:0 from tablet: 72057594046678944 to tablet: 72075186233409547 cookie: 72057594046678944:2 msg type: 275382275 2025-05-29T15:26:03.143751Z node 111 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1004 2025-05-29T15:26:03.144113Z node 111 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1004 2025-05-29T15:26:03.144282Z node 111 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:174: Deleted shardIdx 72057594046678944:1 2025-05-29T15:26:03.144301Z node 111 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:180: Close pipe to deleted shardIdx 72057594046678944:1 tabletId 72075186233409546 2025-05-29T15:26:03.144371Z node 111 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:160: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestWaitNotification: OK eventTxId 1005 2025-05-29T15:26:03.169778Z node 111 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6146: Handle TEvNotifyTxCompletionResult, at schemeshard: 72057594046678944, message: Origin: 72075186233409547 TxId: 1004 2025-05-29T15:26:03.169810Z node 111 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1764: TOperation FindRelatedPartByTabletId, TxId: 1004, tablet: 72075186233409547, partId: 0 2025-05-29T15:26:03.169851Z node 111 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:619: TTxOperationReply execute, operationId: 1004:0, at schemeshard: 72057594046678944, message: Origin: 72075186233409547 TxId: 1004 2025-05-29T15:26:03.169866Z node 111 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1004:0 129 -> 130 FAKE_COORDINATOR: Erasing txId 1004 2025-05-29T15:26:03.172746Z node 111 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:647: TTxOperationReply complete, operationId: 1004:0, at schemeshard: 72057594046678944 2025-05-29T15:26:03.172832Z node 111 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1004:0, at schemeshard: 72057594046678944 2025-05-29T15:26:03.172845Z node 111 :FLAT_TX_SCHEMESHARD INFO: drop_table.cpp:315: TDropColumnTable TProposedDeleteParts operationId# 1004:0 ProgressState, at schemeshard: 72057594046678944 2025-05-29T15:26:03.172891Z node 111 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 3 2025-05-29T15:26:03.172933Z node 111 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:906: Part operation is done id#1004:0 progress is 1/1 2025-05-29T15:26:03.172939Z node 111 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 1004 ready parts: 1/1 2025-05-29T15:26:03.172946Z node 111 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:906: Part operation is done id#1004:0 progress is 1/1 2025-05-29T15:26:03.172950Z node 111 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 1004 ready parts: 1/1 2025-05-29T15:26:03.172958Z node 111 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1606: TOperation IsReadyToNotify, TxId: 1004, ready parts: 1/1, is published: true 2025-05-29T15:26:03.172988Z node 111 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1629: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [111:365:2342] message: TxId: 1004 2025-05-29T15:26:03.172997Z node 111 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 1004 ready parts: 1/1 2025-05-29T15:26:03.173003Z node 111 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:973: Operation and all the parts is done, operation id: 1004:0 2025-05-29T15:26:03.173009Z node 111 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5174: RemoveTx for txid 1004:0 2025-05-29T15:26:03.173047Z node 111 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 2 2025-05-29T15:26:03.179314Z node 111 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:46: Free shard 72057594046678944:2 hive 72057594037968897 at ss 72057594046678944 2025-05-29T15:26:03.179392Z node 111 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:227: tests -- TTxNotificationSubscriber for txId 1004: got EvNotifyTxCompletionResult 2025-05-29T15:26:03.179401Z node 111 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:236: tests -- TTxNotificationSubscriber for txId 1004: satisfy waiter [111:537:2494] 2025-05-29T15:26:03.179554Z node 111 :HIVE INFO: tablet_helpers.cpp:1356: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 2 TxId_Deprecated: 2 TabletID: 72075186233409547 Forgetting tablet 72075186233409547 2025-05-29T15:26:03.179828Z node 111 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186233409547;self_id=[111:436:2403];ev=NKikimr::TEvTablet::TEvTabletDead;fline=columnshard_impl.cpp:1152;event=tablet_die; 2025-05-29T15:26:03.181267Z node 111 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5938: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 2 ShardOwnerId: 72057594046678944 ShardLocalIdx: 2, at schemeshard: 72057594046678944 2025-05-29T15:26:03.181525Z node 111 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 1 2025-05-29T15:26:03.181783Z node 111 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:123: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-05-29T15:26:03.181797Z node 111 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:137: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 4], at schemeshard: 72057594046678944 2025-05-29T15:26:03.181816Z node 111 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-05-29T15:26:03.185238Z node 111 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:174: Deleted shardIdx 72057594046678944:2 2025-05-29T15:26:03.185269Z node 111 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:180: Close pipe to deleted shardIdx 72057594046678944:2 tabletId 72075186233409547 2025-05-29T15:26:03.185475Z node 111 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:160: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestWaitNotification: OK eventTxId 1004 2025-05-29T15:26:03.185616Z node 111 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/ColumnTable1" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-05-29T15:26:03.185690Z node 111 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/ColumnTable1" took 74us result status StatusPathDoesNotExist 2025-05-29T15:26:03.185742Z node 111 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/ColumnTable1\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1]), source_location: ydb/core/tx/schemeshard/schemeshard_path_describer.cpp:1157" Path: "/MyRoot/ColumnTable1" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: true } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 2025-05-29T15:26:03.185826Z node 111 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/ColumnTable2" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-05-29T15:26:03.185842Z node 111 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/ColumnTable2" took 17us result status StatusPathDoesNotExist 2025-05-29T15:26:03.185862Z node 111 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/ColumnTable2\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1]), source_location: ydb/core/tx/schemeshard/schemeshard_path_describer.cpp:1157" Path: "/MyRoot/ColumnTable2" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: true } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 >> THiveTest::TestLocalDisconnect >> DataShardVolatile::DistributedWriteAsymmetricExecute [FAIL] >> DataShardVolatile::DistributedWriteThenDropTable |67.5%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/replication/service/ut_worker/ydb-core-tx-replication-service-ut_worker |67.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/replication/service/ut_worker/ydb-core-tx-replication-service-ut_worker |67.5%| [LD] {RESULT} $(B)/ydb/core/tx/replication/service/ut_worker/ydb-core-tx-replication-service-ut_worker >> TBsProxyFaultToleranceTest::CheckGetHardenedErasureMirror3dcCount6Idx3 [GOOD] >> THiveTest::TestRestartsWithFollower [GOOD] >> THiveTest::TestStartTabletTwiceInARow >> THiveTest::TestNoMigrationToSelf >> THiveTest::TestLocalDisconnect [GOOD] >> THiveTest::TestLocalReplacement >> TSchemeShardTestExtSubdomainReboots::CreateForceDrop-AlterDatabaseCreateHiveFirst-true [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/dsproxy/ut_ftol/unittest >> TBsProxyFaultToleranceTest::CheckGetHardenedErasureMirror3dcCount6Idx3 [GOOD] Test command err: iteration# 3 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 9 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 15 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 21 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 27 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 33 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 39 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 45 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 51 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 57 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 63 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 69 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 75 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 81 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 87 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 93 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 99 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 105 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 111 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 117 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 123 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 129 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 135 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 141 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 147 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 153 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 159 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 165 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 171 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 177 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 183 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 189 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 195 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 201 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 207 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 213 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 219 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 225 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 231 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 237 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 243 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 249 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 255 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 261 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 267 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 273 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 279 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 285 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 291 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 297 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 303 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 309 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 315 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 321 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 327 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 333 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 339 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 345 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 351 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 357 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 363 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 369 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 375 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 381 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 387 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 393 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 399 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 405 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 411 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 417 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 423 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 429 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 435 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 441 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 447 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 453 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 459 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 465 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 471 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 477 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 483 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 489 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 >> THiveTest::TestStartTabletTwiceInARow [GOOD] >> THiveTest::TestSpreadNeighboursWithUpdateTabletsObject >> KqpVectorIndexes::OrderByCosineLevel2-Nullable-UseSimilarity >> DataShardVolatile::VolatileCommitOnBlobStorageFailure+UseSink [FAIL] >> DataShardVolatile::VolatileCommitOnBlobStorageFailure-UseSink >> THiveTest::TestNoMigrationToSelf [GOOD] >> THiveTest::TestReCreateTablet ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_extsubdomain_reboots/unittest >> TSchemeShardTestExtSubdomainReboots::CreateForceDrop-AlterDatabaseCreateHiveFirst-true [GOOD] Test command err: ==== RunWithTabletReboots =========== RUN: Trace =========== Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2140] Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:116:2058] recipient: [1:110:2141] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:116:2058] recipient: [1:110:2141] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:117:2058] recipient: [1:111:2142] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:117:2058] recipient: [1:111:2142] Leader for TabletID 72057594046678944 is [1:126:2151] sender: [1:127:2058] recipient: [1:109:2140] Leader for TabletID 72057594046447617 is [1:130:2154] sender: [1:132:2058] recipient: [1:110:2141] Leader for TabletID 72057594046316545 is [1:133:2155] sender: [1:135:2058] recipient: [1:111:2142] 2025-05-29T15:25:49.661218Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7488: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-05-29T15:25:49.661244Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7516: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:25:49.661249Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7402: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2025-05-29T15:25:49.661254Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7418: OperationsProcessing config: using default configuration 2025-05-29T15:25:49.661264Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-05-29T15:25:49.661268Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-05-29T15:25:49.661277Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7548: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:25:49.661290Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:39: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-05-29T15:25:49.661387Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7619: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-05-29T15:25:49.661458Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-05-29T15:25:49.691207Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7651: Got new config: QueryServiceConfig { AllExternalDataSourcesAreAvailable: true } 2025-05-29T15:25:49.691232Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:25:49.691348Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7619: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# Leader for TabletID 72057594046447617 is [1:130:2154] sender: [1:175:2058] recipient: [1:15:2062] 2025-05-29T15:25:49.702995Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-05-29T15:25:49.703031Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-05-29T15:25:49.703062Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-05-29T15:25:49.705943Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-05-29T15:25:49.706016Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:445: Clear TempDirsState with owners number: 0 2025-05-29T15:25:49.706127Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1348: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-05-29T15:25:49.706275Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:32: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-05-29T15:25:49.706859Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:157: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:25:49.706911Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:84: [RootDataErasureManager] Stop 2025-05-29T15:25:49.707141Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:25:49.707150Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:25:49.707183Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-05-29T15:25:49.707190Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-29T15:25:49.707196Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:202: TTxServerlessStorageBilling.Complete 2025-05-29T15:25:49.707214Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6671: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:214:2058] recipient: [1:212:2212] IGNORE Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:214:2058] recipient: [1:212:2212] Leader for TabletID 72057594037968897 is [1:218:2216] sender: [1:219:2058] recipient: [1:212:2212] 2025-05-29T15:25:49.708388Z node 1 :HIVE INFO: tablet_helpers.cpp:1130: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:126:2151] sender: [1:239:2058] recipient: [1:15:2062] 2025-05-29T15:25:49.728596Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:372: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-05-29T15:25:49.728681Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:25:49.728742Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-05-29T15:25:49.728792Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:130: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-05-29T15:25:49.728803Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:25:49.731278Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:451: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-05-29T15:25:49.731317Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-05-29T15:25:49.731368Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:25:49.731379Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:313: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-05-29T15:25:49.731385Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:367: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-05-29T15:25:49.731390Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 2 -> 3 2025-05-29T15:25:49.732074Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:25:49.732088Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-05-29T15:25:49.732094Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 3 -> 128 2025-05-29T15:25:49.733591Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:25:49.733606Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:25:49.733613Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:25:49.733621Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1650: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-05-29T15:25:49.734278Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1719: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-05-29T15:25:49.736754Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:652: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-05-29T15:25:49.736798Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1751: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 Leader for TabletID 72057594046316545 is [1:133:2155] sender: [1:254:2058] recipient: [1:15:2062] FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-05-29T15:25:49.736999Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:676: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-05-29T15:25:49.737030Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:680: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 133 RawX2: 4294969451 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-05-29T15:25:49.737041Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:25:49.737111Z node 1 :FLAT_TX_SCHEMESHARD INFO: sch ... 25-05-29T15:26:06.818824Z node 41 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1003:0 135 -> 240 2025-05-29T15:26:06.818945Z node 41 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:5834: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 8 PathOwnerId: 72057594046678944, cookie: 1003 2025-05-29T15:26:06.818958Z node 41 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 8 PathOwnerId: 72057594046678944, cookie: 1003 2025-05-29T15:26:06.818966Z node 41 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72057594046678944, txId: 1003 2025-05-29T15:26:06.818971Z node 41 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 1003, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 8 2025-05-29T15:26:06.818976Z node 41 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2025-05-29T15:26:06.819113Z node 41 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:5834: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 1003 2025-05-29T15:26:06.819127Z node 41 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 1003 2025-05-29T15:26:06.819132Z node 41 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72057594046678944, txId: 1003 2025-05-29T15:26:06.819136Z node 41 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 1003, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 18446744073709551615 2025-05-29T15:26:06.819141Z node 41 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 3 2025-05-29T15:26:06.819151Z node 41 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1606: TOperation IsReadyToNotify, TxId: 1003, ready parts: 0/1, is published: true 2025-05-29T15:26:06.824113Z node 41 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1003:0, at schemeshard: 72057594046678944 2025-05-29T15:26:06.824137Z node 41 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:482: [72057594046678944] TDone opId# 1003:0 ProgressState 2025-05-29T15:26:06.824171Z node 41 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:906: Part operation is done id#1003:0 progress is 1/1 2025-05-29T15:26:06.824177Z node 41 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 1003 ready parts: 1/1 2025-05-29T15:26:06.824185Z node 41 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:906: Part operation is done id#1003:0 progress is 1/1 2025-05-29T15:26:06.824189Z node 41 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 1003 ready parts: 1/1 2025-05-29T15:26:06.824195Z node 41 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1606: TOperation IsReadyToNotify, TxId: 1003, ready parts: 1/1, is published: true 2025-05-29T15:26:06.824203Z node 41 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 1003 ready parts: 1/1 2025-05-29T15:26:06.824209Z node 41 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:973: Operation and all the parts is done, operation id: 1003:0 2025-05-29T15:26:06.824215Z node 41 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5174: RemoveTx for txid 1003:0 2025-05-29T15:26:06.824236Z node 41 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 2 2025-05-29T15:26:06.824329Z node 41 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:205: TTxCleanDroppedSubDomains Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-05-29T15:26:06.824342Z node 41 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:224: TTxCleanDroppedPaths: PersistRemoveSubDomain for PathId# [OwnerId: 72057594046678944, LocalPathId: 3], at schemeshard: 72057594046678944 2025-05-29T15:26:06.824359Z node 41 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 1 2025-05-29T15:26:06.824410Z node 41 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:123: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-05-29T15:26:06.824417Z node 41 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:137: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 3], at schemeshard: 72057594046678944 2025-05-29T15:26:06.824429Z node 41 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-05-29T15:26:06.824571Z node 41 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1003 2025-05-29T15:26:06.824597Z node 41 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1003 2025-05-29T15:26:06.827638Z node 41 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:245: TTxCleanDroppedSubDomains Complete, done PersistRemoveSubDomain for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 2025-05-29T15:26:06.827664Z node 41 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:160: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestWaitNotification wait txId: 1002 2025-05-29T15:26:06.827717Z node 41 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:210: tests -- TTxNotificationSubscriber for txId 1002: send EvNotifyTxCompletion 2025-05-29T15:26:06.827727Z node 41 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:256: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 1002 TestWaitNotification wait txId: 1003 2025-05-29T15:26:06.827743Z node 41 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:210: tests -- TTxNotificationSubscriber for txId 1003: send EvNotifyTxCompletion 2025-05-29T15:26:06.827748Z node 41 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:256: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 1003 2025-05-29T15:26:06.827828Z node 41 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 1002, at schemeshard: 72057594046678944 2025-05-29T15:26:06.827850Z node 41 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 1003, at schemeshard: 72057594046678944 2025-05-29T15:26:06.827858Z node 41 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:227: tests -- TTxNotificationSubscriber for txId 1002: got EvNotifyTxCompletionResult 2025-05-29T15:26:06.827863Z node 41 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:236: tests -- TTxNotificationSubscriber for txId 1002: satisfy waiter [41:344:2334] 2025-05-29T15:26:06.827884Z node 41 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:227: tests -- TTxNotificationSubscriber for txId 1003: got EvNotifyTxCompletionResult 2025-05-29T15:26:06.827888Z node 41 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:236: tests -- TTxNotificationSubscriber for txId 1003: satisfy waiter [41:344:2334] TestWaitNotification: OK eventTxId 1002 TestWaitNotification: OK eventTxId 1003 2025-05-29T15:26:06.827962Z node 41 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-05-29T15:26:06.828004Z node 41 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/USER_0" took 57us result status StatusPathDoesNotExist 2025-05-29T15:26:06.828039Z node 41 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/USER_0\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1]), source_location: ydb/core/tx/schemeshard/schemeshard_path_describer.cpp:1157" Path: "/MyRoot/USER_0" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: true } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 2025-05-29T15:26:06.828087Z node 41 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-05-29T15:26:06.828113Z node 41 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot" took 28us result status StatusSuccess 2025-05-29T15:26:06.828196Z node 41 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot" PathDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 8 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 8 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 6 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: "DirA" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1000 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 1 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/MyRoot" } } } PathId: 1 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> THiveTest::TestLocalReplacement [GOOD] >> THiveTest::TestHiveRestart >> KqpIndexes::InnerJoinWithNonIndexWherePredicate >> TColumnShardTestSchema::ExportAfterFail [GOOD] >> DataShardVolatile::DistributedWriteThenDropTable [FAIL] >> DataShardVolatile::DistributedWriteThenCopyTable >> THiveTest::TestReCreateTablet [GOOD] >> THiveTest::TestReCreateTabletError >> TColumnShardTestSchema::TTL+Reboot+Internal+FirstPkColumn-WritePortionsOnInsert [GOOD] >> THiveTest::TestHiveRestart [GOOD] >> THiveTest::TestLimitedNodeList >> THiveTest::TestHiveBalancerWithPrefferedDC2 [GOOD] >> THiveTest::TestHiveBalancerWithPreferredDC3 >> THiveTest::TestReCreateTabletError [GOOD] >> THiveTest::TestNodeDisconnect >> KqpUniqueIndex::UpdateOnFkSelectResultSameValue >> THiveTest::TestFollowers |67.5%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/datashard/ut_change_exchange/ydb-core-tx-datashard-ut_change_exchange |67.5%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_change_exchange/ydb-core-tx-datashard-ut_change_exchange |67.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_change_exchange/ydb-core-tx-datashard-ut_change_exchange ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/columnshard/ut_schema/unittest >> TColumnShardTestSchema::ExportAfterFail [GOOD] Test command err: FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=6442960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=6442960;columns=10; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=6442960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=6442960;columns=10; WaitEmptyAfter=0;Tiers=;TTL={Column=timestamp;EvictAfter=0.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=148532929.000000s;Name=cold;Codec=};};TTL={Column=timestamp;EvictAfter=0.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=128532929.000000s;Name=cold;Codec=};};TTL={Column=timestamp;EvictAfter=0.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=128531729.000000s;Name=cold;Codec=};};TTL={Column=timestamp;EvictAfter=0.000000s;Name=;Codec=}; 2025-05-29T15:25:30.988438Z node 1 :TX_COLUMNSHARD INFO: log.cpp:784: tablet_id=9437184;self_id=[1:139:2170];fline=columnshard.cpp:102;event=initialize_shard;step=OnActivateExecutor; 2025-05-29T15:25:30.992182Z node 1 :TX_COLUMNSHARD INFO: log.cpp:784: tablet_id=9437184;self_id=[1:139:2170];fline=columnshard.cpp:120;event=initialize_shard;step=initialize_tiring_finished; 2025-05-29T15:25:30.992246Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Execute at tablet 9437184 2025-05-29T15:25:30.992896Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=9437184;self_id=[1:139:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-05-29T15:25:30.992937Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=9437184;self_id=[1:139:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-05-29T15:25:30.992967Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=9437184;self_id=[1:139:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-05-29T15:25:30.992990Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=9437184;self_id=[1:139:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-05-29T15:25:30.993010Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=9437184;self_id=[1:139:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-05-29T15:25:30.993029Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=9437184;self_id=[1:139:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-05-29T15:25:30.993047Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=9437184;self_id=[1:139:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-05-29T15:25:30.993066Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=9437184;self_id=[1:139:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-05-29T15:25:30.993088Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=9437184;self_id=[1:139:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-05-29T15:25:30.993107Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=9437184;self_id=[1:139:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-05-29T15:25:30.993127Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=9437184;self_id=[1:139:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2025-05-29T15:25:30.993146Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=9437184;self_id=[1:139:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2025-05-29T15:25:31.025668Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Complete at tablet 9437184 2025-05-29T15:25:31.025736Z node 1 :TX_COLUMNSHARD INFO: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:137;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2025-05-29T15:25:31.025746Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-05-29T15:25:31.025777Z node 1 :TX_COLUMNSHARD INFO: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-05-29T15:25:31.025809Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-05-29T15:25:31.025821Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-05-29T15:25:31.025826Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-05-29T15:25:31.025835Z node 1 :TX_COLUMNSHARD INFO: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2025-05-29T15:25:31.025844Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-05-29T15:25:31.025851Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-05-29T15:25:31.025855Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-05-29T15:25:31.025873Z node 1 :TX_COLUMNSHARD INFO: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-05-29T15:25:31.025881Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-05-29T15:25:31.025888Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-05-29T15:25:31.025892Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-05-29T15:25:31.025903Z node 1 :TX_COLUMNSHARD INFO: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-05-29T15:25:31.025910Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-05-29T15:25:31.025917Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-05-29T15:25:31.025922Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=8;type=CleanInsertionDedup; 2025-05-29T15:25:31.025933Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-05-29T15:25:31.025942Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-05-29T15:25:31.025947Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-05-29T15:25:31.025955Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-05-29T15:25:31.025962Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-05-29T15:25:31.025966Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-05-29T15:25:31.025989Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-05-29T15:25:31.025996Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-05-29T15:25:31.026001Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2025-05-29T15:25:31.026030Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-05-29T15:25:31.026038Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-05-29T15:25:31.026043Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2025-05-29T15:25:31.026055Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-05-29T15:25:31.026063Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanDeprecatedSnapshot;id=CleanDeprecatedSnapshot; 2025-05-29T15:25:31.026067Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=17;type=CleanDeprecated ... pp:784: TEST_STEP=3;SelfId=[1:847:2816];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:188;stage=start;iterator=ready_results:(count:1;records_count:80000;schema=timestamp: timestamp[us];);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-05-29T15:26:08.232331Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:784: TEST_STEP=3;SelfId=[1:847:2816];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=plain_read_data.cpp:73;event=DoExtractReadyResults;result=0;count=0;finished=1; 2025-05-29T15:26:08.232338Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:784: TEST_STEP=3;SelfId=[1:847:2816];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:199;stage=limit exhausted;limit=limits:(bytes=0;chunks=0);; 2025-05-29T15:26:08.232362Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:784: TEST_STEP=3;SelfId=[1:847:2816];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:105;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2025-05-29T15:26:08.232372Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:784: TEST_STEP=3;SelfId=[1:847:2816];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:188;stage=start;iterator=ready_results:(count:1;records_count:80000;schema=timestamp: timestamp[us];);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-05-29T15:26:08.232376Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:784: TEST_STEP=3;SelfId=[1:847:2816];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=plain_read_data.cpp:73;event=DoExtractReadyResults;result=0;count=0;finished=1; 2025-05-29T15:26:08.232384Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:784: TEST_STEP=3;SelfId=[1:847:2816];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:230;stage=ready result;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;);columns=1;rows=80000; 2025-05-29T15:26:08.232390Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:784: TEST_STEP=3;SelfId=[1:847:2816];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:250;stage=data_format;batch_size=640000;num_rows=80000;batch_columns=timestamp; 2025-05-29T15:26:08.232418Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:784: TEST_STEP=3;SelfId=[1:847:2816];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:366;event=send_data;compute_actor_id=[1:846:2815];bytes=640000;rows=80000;faults=0;finished=0;fault=0;schema=timestamp: timestamp[us]; Got TEvKqpCompute::TEvScanData [1:847:2816]->[1:846:2815] 2025-05-29T15:26:08.232429Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:784: TEST_STEP=3;SelfId=[1:847:2816];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:270;stage=finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-05-29T15:26:08.232438Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:784: TEST_STEP=3;SelfId=[1:847:2816];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:188;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-05-29T15:26:08.232447Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:784: TEST_STEP=3;SelfId=[1:847:2816];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:193;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-05-29T15:26:08.232461Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:784: TEST_STEP=3;SelfId=[1:847:2816];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:105;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2025-05-29T15:26:08.232469Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:784: TEST_STEP=3;SelfId=[1:847:2816];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:188;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-05-29T15:26:08.232475Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:784: TEST_STEP=3;SelfId=[1:847:2816];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:193;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-05-29T15:26:08.232480Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: actor.cpp:410: Scan [1:847:2816] finished for tablet 9437184 2025-05-29T15:26:08.232544Z node 1 :TX_COLUMNSHARD_SCAN INFO: log.cpp:784: TEST_STEP=3;SelfId=[1:847:2816];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:416;event=scan_finish;compute_actor_id=[1:846:2815];stats={"p":[{"events":["f_bootstrap","l_bootstrap","f_processing","f_ProduceResults","f_task_result"],"t":0},{"events":["f_ack","l_task_result"],"t":0.267},{"events":["l_ack","l_processing","l_ProduceResults","f_Finish","l_Finish"],"t":0.268}],"full":{"a":1748532367964368,"name":"_full_task","f":1748532367964368,"d_finished":0,"c":0,"l":1748532368232491,"d":268123},"events":[{"name":"bootstrap","f":1748532367964436,"d_finished":624,"c":1,"l":1748532367965060,"d":624},{"a":1748532368232459,"name":"ack","f":1748532368232190,"d_finished":242,"c":2,"l":1748532368232450,"d":274},{"a":1748532368232457,"name":"processing","f":1748532367965335,"d_finished":237229,"c":16,"l":1748532368232450,"d":237263},{"name":"ProduceResults","f":1748532367964749,"d_finished":727,"c":20,"l":1748532368232478,"d":727},{"a":1748532368232478,"name":"Finish","f":1748532368232478,"d_finished":0,"c":0,"l":1748532368232491,"d":13},{"name":"task_result","f":1748532367965339,"d_finished":236907,"c":14,"l":1748532368232125,"d":236907}],"id":"9437184::7"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-05-29T15:26:08.232558Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:784: TEST_STEP=3;SelfId=[1:847:2816];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:366;event=send_data;compute_actor_id=[1:846:2815];bytes=0;rows=0;faults=0;finished=1;fault=0;schema=; 2025-05-29T15:26:08.232596Z node 1 :TX_COLUMNSHARD_SCAN INFO: log.cpp:784: TEST_STEP=3;SelfId=[1:847:2816];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:371;event=scan_finished;compute_actor_id=[1:846:2815];packs_sum=0;bytes=0;bytes_sum=0;rows=0;rows_sum=0;faults=0;finished=1;fault=0;stats={"p":[{"events":["f_bootstrap","l_bootstrap","f_processing","f_ProduceResults","f_task_result"],"t":0},{"events":["f_ack","l_task_result"],"t":0.267},{"events":["l_ack","l_processing","l_ProduceResults","f_Finish","l_Finish"],"t":0.268}],"full":{"a":1748532367964368,"name":"_full_task","f":1748532367964368,"d_finished":0,"c":0,"l":1748532368232564,"d":268196},"events":[{"name":"bootstrap","f":1748532367964436,"d_finished":624,"c":1,"l":1748532367965060,"d":624},{"a":1748532368232459,"name":"ack","f":1748532368232190,"d_finished":242,"c":2,"l":1748532368232450,"d":347},{"a":1748532368232457,"name":"processing","f":1748532367965335,"d_finished":237229,"c":16,"l":1748532368232450,"d":237336},{"name":"ProduceResults","f":1748532367964749,"d_finished":727,"c":20,"l":1748532368232478,"d":727},{"a":1748532368232478,"name":"Finish","f":1748532368232478,"d_finished":0,"c":0,"l":1748532368232564,"d":86},{"name":"task_result","f":1748532367965339,"d_finished":236907,"c":14,"l":1748532368232125,"d":236907}],"id":"9437184::7"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); Got TEvKqpCompute::TEvScanData [1:847:2816]->[1:846:2815] 2025-05-29T15:26:08.232612Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:784: TEST_STEP=3;SelfId=[1:847:2816];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=stats.cpp:8;event=statistic;begin=2025-05-29T15:26:07.964246Z;index_granules=0;index_portions=2;index_batches=1038;committed_batches=0;schema_columns=1;filter_columns=0;additional_columns=0;compacted_portions_bytes=0;inserted_portions_bytes=9739224;committed_portions_bytes=0;data_filter_bytes=0;data_additional_bytes=0;delta_bytes=9739224;selected_rows=0; 2025-05-29T15:26:08.232618Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:784: TEST_STEP=3;SelfId=[1:847:2816];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=read_context.h:190;event=scan_aborted;reason=unexpected on destructor; 2025-05-29T15:26:08.232672Z node 1 :TX_COLUMNSHARD_SCAN INFO: log.cpp:784: TEST_STEP=3;SelfId=[1:847:2816];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=context.h:81;fetching=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:150 :Tier '/cold' stopped at tablet 9437184 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:150 :Tier '/cold' stopped at tablet 9437184 160000/9739224 160000/9739224 160000/9739224 160000/9739224 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/columnshard/ut_schema/unittest >> TColumnShardTestSchema::TTL+Reboot+Internal+FirstPkColumn-WritePortionsOnInsert [GOOD] Test command err: Running TestTtl ttlColumnType=Timestamp 2025-05-29T15:25:30.275305Z node 1 :TX_COLUMNSHARD TRACE: columnshard_impl.h:389: StateInit, received event# 268828672, Sender [1:103:2136], Recipient [1:139:2170]: NKikimr::TEvTablet::TEvBoot 2025-05-29T15:25:30.287649Z node 1 :TX_COLUMNSHARD TRACE: columnshard_impl.h:389: StateInit, received event# 268828673, Sender [1:103:2136], Recipient [1:139:2170]: NKikimr::TEvTablet::TEvRestored 2025-05-29T15:25:30.287768Z node 1 :TX_COLUMNSHARD INFO: log.cpp:784: tablet_id=9437184;self_id=[1:139:2170];fline=columnshard.cpp:102;event=initialize_shard;step=OnActivateExecutor; 2025-05-29T15:25:30.299725Z node 1 :TX_COLUMNSHARD INFO: log.cpp:784: tablet_id=9437184;self_id=[1:139:2170];fline=columnshard.cpp:120;event=initialize_shard;step=initialize_tiring_finished; 2025-05-29T15:25:30.299807Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Execute at tablet 9437184 2025-05-29T15:25:30.300548Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=9437184;self_id=[1:139:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-05-29T15:25:30.300587Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=9437184;self_id=[1:139:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-05-29T15:25:30.300633Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=9437184;self_id=[1:139:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-05-29T15:25:30.300654Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=9437184;self_id=[1:139:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-05-29T15:25:30.300672Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=9437184;self_id=[1:139:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-05-29T15:25:30.300691Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=9437184;self_id=[1:139:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-05-29T15:25:30.300707Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=9437184;self_id=[1:139:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-05-29T15:25:30.300732Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=9437184;self_id=[1:139:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-05-29T15:25:30.300751Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=9437184;self_id=[1:139:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-05-29T15:25:30.300769Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=9437184;self_id=[1:139:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-05-29T15:25:30.300787Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=9437184;self_id=[1:139:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2025-05-29T15:25:30.300805Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=9437184;self_id=[1:139:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2025-05-29T15:25:30.318232Z node 1 :TX_COLUMNSHARD TRACE: columnshard_impl.h:389: StateInit, received event# 268828684, Sender [1:103:2136], Recipient [1:139:2170]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-05-29T15:25:30.330947Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Complete at tablet 9437184 2025-05-29T15:25:30.331031Z node 1 :TX_COLUMNSHARD INFO: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:137;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2025-05-29T15:25:30.331044Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-05-29T15:25:30.331075Z node 1 :TX_COLUMNSHARD INFO: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-05-29T15:25:30.331112Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-05-29T15:25:30.331123Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-05-29T15:25:30.331129Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-05-29T15:25:30.331138Z node 1 :TX_COLUMNSHARD INFO: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2025-05-29T15:25:30.331148Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-05-29T15:25:30.331156Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-05-29T15:25:30.331161Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-05-29T15:25:30.331177Z node 1 :TX_COLUMNSHARD INFO: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-05-29T15:25:30.331185Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-05-29T15:25:30.331191Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-05-29T15:25:30.331195Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-05-29T15:25:30.331204Z node 1 :TX_COLUMNSHARD INFO: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-05-29T15:25:30.331211Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-05-29T15:25:30.331219Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-05-29T15:25:30.331223Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=8;type=CleanInsertionDedup; 2025-05-29T15:25:30.331234Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-05-29T15:25:30.331240Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-05-29T15:25:30.331246Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-05-29T15:25:30.331255Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-05-29T15:25:30.331262Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-05-29T15:25:30.331266Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-05-29T15:25:30.331288Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-05-29T15:25:30.331296Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-05-29T15:25:30.331300Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2025-05-29T15:25:30.331319Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-05-29T15:25:30.331326Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-05-29T15:25:30.331330Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2025-05-29T15:25:30.331343Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-05-29T15:25:30.331350Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanDeprecatedSnapshot;id=CleanDeprecatedSnapshot; 2025-05-29T15:25:30.331354Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=17;type=CleanDeprecatedSnapshot; 2025-05-29T15:25:30.331362Z node 1 :TX_COLUMNSHARD CRIT: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_deprecated_snapshot.cpp:30;tasks_for_rewrite=0; 2025-05-29T15:25:30.331370Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanDeprecatedSnapshot;id=17; 2025-05-29T15:25:30.331378Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV0ChunksMeta;id=RestoreV0ChunksMeta; 2025-05-29T15:25:30.331383Z node ... COLUMNSHARD_SCAN DEBUG: log.cpp:784: SelfId=[5:746:2724];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=scanner.cpp:21;event=interval_result_received;interval_idx=0;intervalId=220; 2025-05-29T15:26:08.246091Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:784: SelfId=[5:746:2724];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=scanner.cpp:47;event=interval_result;interval_idx=0;count=1000;merger=0;interval_id=220; 2025-05-29T15:26:08.246095Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:784: SelfId=[5:746:2724];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=scanner.cpp:65;event=intervals_finished; 2025-05-29T15:26:08.246100Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:784: SelfId=[5:746:2724];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:188;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-05-29T15:26:08.246103Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:784: SelfId=[5:746:2724];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=plain_read_data.cpp:73;event=DoExtractReadyResults;result=1;count=1000;finished=1; 2025-05-29T15:26:08.246106Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:784: SelfId=[5:746:2724];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:199;stage=limit exhausted;limit=limits:(bytes=0;chunks=0);; 2025-05-29T15:26:08.246164Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:784: SelfId=[5:746:2724];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:105;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2025-05-29T15:26:08.246176Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:784: SelfId=[5:746:2724];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:188;stage=start;iterator=ready_results:(count:1;records_count:1000;schema=timestamp: uint64;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-05-29T15:26:08.246180Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:784: SelfId=[5:746:2724];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=plain_read_data.cpp:73;event=DoExtractReadyResults;result=0;count=0;finished=1; 2025-05-29T15:26:08.246187Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:784: SelfId=[5:746:2724];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:230;stage=ready result;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;);columns=1;rows=1000; 2025-05-29T15:26:08.246193Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:784: SelfId=[5:746:2724];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:250;stage=data_format;batch_size=8000;num_rows=1000;batch_columns=timestamp; 2025-05-29T15:26:08.246227Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:784: SelfId=[5:746:2724];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:366;event=send_data;compute_actor_id=[5:745:2723];bytes=8000;rows=1000;faults=0;finished=0;fault=0;schema=timestamp: uint64; 2025-05-29T15:26:08.246238Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:784: SelfId=[5:746:2724];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:270;stage=finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-05-29T15:26:08.246246Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:784: SelfId=[5:746:2724];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:188;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-05-29T15:26:08.246253Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:784: SelfId=[5:746:2724];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:193;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-05-29T15:26:08.246275Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:784: SelfId=[5:746:2724];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:105;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2025-05-29T15:26:08.246283Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:784: SelfId=[5:746:2724];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:188;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-05-29T15:26:08.246292Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:784: SelfId=[5:746:2724];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:193;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-05-29T15:26:08.246297Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: actor.cpp:410: Scan [5:746:2724] finished for tablet 9437184 2025-05-29T15:26:08.246355Z node 5 :TX_COLUMNSHARD_SCAN INFO: log.cpp:784: SelfId=[5:746:2724];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:416;event=scan_finish;compute_actor_id=[5:745:2723];stats={"p":[{"events":["f_bootstrap","l_bootstrap","f_processing","f_ProduceResults","f_task_result"],"t":0},{"events":["f_ack","l_ack","l_processing","l_ProduceResults","f_Finish","l_Finish","l_task_result"],"t":0.002}],"full":{"a":1748532368243803,"name":"_full_task","f":1748532368243803,"d_finished":0,"c":0,"l":1748532368246307,"d":2504},"events":[{"name":"bootstrap","f":1748532368243866,"d_finished":396,"c":1,"l":1748532368244262,"d":396},{"a":1748532368246273,"name":"ack","f":1748532368246160,"d_finished":96,"c":1,"l":1748532368246256,"d":130},{"a":1748532368246271,"name":"processing","f":1748532368244443,"d_finished":1355,"c":8,"l":1748532368246256,"d":1391},{"name":"ProduceResults","f":1748532368244111,"d_finished":239,"c":11,"l":1748532368246295,"d":239},{"a":1748532368246295,"name":"Finish","f":1748532368246295,"d_finished":0,"c":0,"l":1748532368246307,"d":12},{"name":"task_result","f":1748532368244446,"d_finished":1245,"c":7,"l":1748532368246115,"d":1245}],"id":"9437184::35"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-05-29T15:26:08.246364Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:784: SelfId=[5:746:2724];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:366;event=send_data;compute_actor_id=[5:745:2723];bytes=0;rows=0;faults=0;finished=1;fault=0;schema=; 2025-05-29T15:26:08.246387Z node 5 :TX_COLUMNSHARD_SCAN INFO: log.cpp:784: SelfId=[5:746:2724];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:371;event=scan_finished;compute_actor_id=[5:745:2723];packs_sum=0;bytes=0;bytes_sum=0;rows=0;rows_sum=0;faults=0;finished=1;fault=0;stats={"p":[{"events":["f_bootstrap","l_bootstrap","f_processing","f_ProduceResults","f_task_result"],"t":0},{"events":["f_ack","l_ack","l_processing","l_ProduceResults","f_Finish","l_Finish","l_task_result"],"t":0.002}],"full":{"a":1748532368243803,"name":"_full_task","f":1748532368243803,"d_finished":0,"c":0,"l":1748532368246368,"d":2565},"events":[{"name":"bootstrap","f":1748532368243866,"d_finished":396,"c":1,"l":1748532368244262,"d":396},{"a":1748532368246273,"name":"ack","f":1748532368246160,"d_finished":96,"c":1,"l":1748532368246256,"d":191},{"a":1748532368246271,"name":"processing","f":1748532368244443,"d_finished":1355,"c":8,"l":1748532368246256,"d":1452},{"name":"ProduceResults","f":1748532368244111,"d_finished":239,"c":11,"l":1748532368246295,"d":239},{"a":1748532368246295,"name":"Finish","f":1748532368246295,"d_finished":0,"c":0,"l":1748532368246368,"d":73},{"name":"task_result","f":1748532368244446,"d_finished":1245,"c":7,"l":1748532368246115,"d":1245}],"id":"9437184::35"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-05-29T15:26:08.246398Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:784: SelfId=[5:746:2724];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=stats.cpp:8;event=statistic;begin=2025-05-29T15:26:08.243526Z;index_granules=0;index_portions=1;index_batches=10;committed_batches=0;schema_columns=1;filter_columns=0;additional_columns=0;compacted_portions_bytes=0;inserted_portions_bytes=59648;committed_portions_bytes=0;data_filter_bytes=0;data_additional_bytes=0;delta_bytes=59648;selected_rows=0; 2025-05-29T15:26:08.246404Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:784: SelfId=[5:746:2724];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=read_context.h:190;event=scan_aborted;reason=unexpected on destructor; 2025-05-29T15:26:08.246442Z node 5 :TX_COLUMNSHARD_SCAN INFO: log.cpp:784: SelfId=[5:746:2724];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=context.h:81;fetching=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;; >> TColumnShardTestSchema::RebootExportAfterFail [GOOD] >> THiveTest::TestLimitedNodeList [GOOD] >> THiveTest::TestHiveFollowersWithChangingDC >> THiveTest::TestDrain >> DataShardVolatile::VolatileCommitOnBlobStorageFailure-UseSink [FAIL] >> DataShardVolatile::VolatileTxAbortedOnSplit >> THiveTest::TestHiveBalancerWithPreferredDC3 [GOOD] >> THiveTest::TestHiveBalancerWithFollowers >> THiveTest::TestNodeDisconnect [GOOD] >> THiveTest::TestReassignGroupsWithRecreateTablet >> KqpOlapJson::BrokenJsonWriting [GOOD] >> THiveTest::TestFollowers [GOOD] >> THiveTest::TestFollowersReconfiguration >> THiveTest::TestSpreadNeighboursWithUpdateTabletsObject [GOOD] >> THiveTest::TestSpreadNeighboursDifferentOwners ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/columnshard/ut_schema/unittest >> TColumnShardTestSchema::RebootExportAfterFail [GOOD] Test command err: FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=6442960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=6442960;columns=10; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=6442960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=6442960;columns=10; WaitEmptyAfter=0;Tiers=;TTL={Column=timestamp;EvictAfter=0.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=148532930.000000s;Name=cold;Codec=};};TTL={Column=timestamp;EvictAfter=0.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=128532930.000000s;Name=cold;Codec=};};TTL={Column=timestamp;EvictAfter=0.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=128531730.000000s;Name=cold;Codec=};};TTL={Column=timestamp;EvictAfter=0.000000s;Name=;Codec=}; 2025-05-29T15:25:30.633289Z node 1 :TX_COLUMNSHARD INFO: log.cpp:784: tablet_id=9437184;self_id=[1:139:2170];fline=columnshard.cpp:102;event=initialize_shard;step=OnActivateExecutor; 2025-05-29T15:25:30.640299Z node 1 :TX_COLUMNSHARD INFO: log.cpp:784: tablet_id=9437184;self_id=[1:139:2170];fline=columnshard.cpp:120;event=initialize_shard;step=initialize_tiring_finished; 2025-05-29T15:25:30.640370Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Execute at tablet 9437184 2025-05-29T15:25:30.640857Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=9437184;self_id=[1:139:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-05-29T15:25:30.640889Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=9437184;self_id=[1:139:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-05-29T15:25:30.640923Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=9437184;self_id=[1:139:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-05-29T15:25:30.640946Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=9437184;self_id=[1:139:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-05-29T15:25:30.640960Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=9437184;self_id=[1:139:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-05-29T15:25:30.640973Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=9437184;self_id=[1:139:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-05-29T15:25:30.640984Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=9437184;self_id=[1:139:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-05-29T15:25:30.640996Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=9437184;self_id=[1:139:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-05-29T15:25:30.641011Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=9437184;self_id=[1:139:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-05-29T15:25:30.641024Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=9437184;self_id=[1:139:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-05-29T15:25:30.641036Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=9437184;self_id=[1:139:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2025-05-29T15:25:30.641054Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=9437184;self_id=[1:139:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2025-05-29T15:25:30.650103Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Complete at tablet 9437184 2025-05-29T15:25:30.650418Z node 1 :TX_COLUMNSHARD INFO: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:137;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2025-05-29T15:25:30.650437Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-05-29T15:25:30.650477Z node 1 :TX_COLUMNSHARD INFO: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-05-29T15:25:30.650522Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-05-29T15:25:30.650538Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-05-29T15:25:30.650544Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-05-29T15:25:30.650555Z node 1 :TX_COLUMNSHARD INFO: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2025-05-29T15:25:30.650567Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-05-29T15:25:30.650576Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-05-29T15:25:30.650581Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-05-29T15:25:30.650602Z node 1 :TX_COLUMNSHARD INFO: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-05-29T15:25:30.650611Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-05-29T15:25:30.650619Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-05-29T15:25:30.650624Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-05-29T15:25:30.650637Z node 1 :TX_COLUMNSHARD INFO: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-05-29T15:25:30.650645Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-05-29T15:25:30.650653Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-05-29T15:25:30.650658Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=8;type=CleanInsertionDedup; 2025-05-29T15:25:30.650672Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-05-29T15:25:30.650680Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-05-29T15:25:30.650685Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-05-29T15:25:30.650694Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-05-29T15:25:30.650705Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-05-29T15:25:30.650710Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-05-29T15:25:30.650752Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-05-29T15:25:30.650762Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-05-29T15:25:30.650767Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2025-05-29T15:25:30.650790Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-05-29T15:25:30.650798Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-05-29T15:25:30.650803Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2025-05-29T15:25:30.650818Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-05-29T15:25:30.650826Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanDeprecatedSnapshot;id=CleanDeprecatedSnapshot; 2025-05-29T15:25:30.650830Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=17;type=CleanDeprecated ... 0; 2025-05-29T15:26:09.107978Z node 1 :TX_COLUMNSHARD INFO: log.cpp:784: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;fline=common_data.cpp:29;EXECUTE:granulesLoadingTime=562; 2025-05-29T15:26:09.107986Z node 1 :TX_COLUMNSHARD INFO: log.cpp:784: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;fline=common_data.cpp:29;PRECHARGE:finishLoadingTime=1; 2025-05-29T15:26:09.108019Z node 1 :TX_COLUMNSHARD INFO: log.cpp:784: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;fline=common_data.cpp:29;EXECUTE:finishLoadingTime=29; 2025-05-29T15:26:09.108023Z node 1 :TX_COLUMNSHARD INFO: log.cpp:784: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:column_enginesLoadingTime=672; 2025-05-29T15:26:09.108039Z node 1 :TX_COLUMNSHARD INFO: log.cpp:784: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:insert_tableLoadingTime=9; 2025-05-29T15:26:09.108160Z node 1 :TX_COLUMNSHARD INFO: log.cpp:784: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:composite_init/insert_table;fline=common_data.cpp:29;InsertTableLoadingTime=11; 2025-05-29T15:26:09.108172Z node 1 :TX_COLUMNSHARD INFO: log.cpp:784: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:insert_tableLoadingTime=127; 2025-05-29T15:26:09.108188Z node 1 :TX_COLUMNSHARD INFO: log.cpp:784: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:tx_controllerLoadingTime=11; 2025-05-29T15:26:09.108202Z node 1 :TX_COLUMNSHARD INFO: log.cpp:784: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:tx_controllerLoadingTime=8; 2025-05-29T15:26:09.108220Z node 1 :TX_COLUMNSHARD INFO: log.cpp:784: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:operations_managerLoadingTime=13; 2025-05-29T15:26:09.108232Z node 1 :TX_COLUMNSHARD INFO: log.cpp:784: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:operations_managerLoadingTime=7; 2025-05-29T15:26:09.108626Z node 1 :TX_COLUMNSHARD INFO: log.cpp:784: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:storages_managerLoadingTime=385; 2025-05-29T15:26:09.109002Z node 1 :TX_COLUMNSHARD INFO: log.cpp:784: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:storages_managerLoadingTime=363; 2025-05-29T15:26:09.109018Z node 1 :TX_COLUMNSHARD INFO: log.cpp:784: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:long_txLoadingTime=6; 2025-05-29T15:26:09.109026Z node 1 :TX_COLUMNSHARD INFO: log.cpp:784: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:long_txLoadingTime=3; 2025-05-29T15:26:09.109033Z node 1 :TX_COLUMNSHARD INFO: log.cpp:784: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:db_locksLoadingTime=1; 2025-05-29T15:26:09.109039Z node 1 :TX_COLUMNSHARD INFO: log.cpp:784: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:db_locksLoadingTime=1; 2025-05-29T15:26:09.109046Z node 1 :TX_COLUMNSHARD INFO: log.cpp:784: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:bg_sessionsLoadingTime=1; 2025-05-29T15:26:09.109062Z node 1 :TX_COLUMNSHARD INFO: log.cpp:784: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:bg_sessionsLoadingTime=8; 2025-05-29T15:26:09.109068Z node 1 :TX_COLUMNSHARD INFO: log.cpp:784: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:sharing_sessionsLoadingTime=1; 2025-05-29T15:26:09.109085Z node 1 :TX_COLUMNSHARD INFO: log.cpp:784: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:sharing_sessionsLoadingTime=10; 2025-05-29T15:26:09.109092Z node 1 :TX_COLUMNSHARD INFO: log.cpp:784: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:in_flight_readsLoadingTime=0; 2025-05-29T15:26:09.109103Z node 1 :TX_COLUMNSHARD INFO: log.cpp:784: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:in_flight_readsLoadingTime=5; 2025-05-29T15:26:09.109116Z node 1 :TX_COLUMNSHARD INFO: log.cpp:784: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:tiers_managerLoadingTime=7; 2025-05-29T15:26:09.109158Z node 1 :TX_COLUMNSHARD INFO: log.cpp:784: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:tiers_managerLoadingTime=37; 2025-05-29T15:26:09.109163Z node 1 :TX_COLUMNSHARD INFO: log.cpp:784: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;fline=common_data.cpp:29;EXECUTE:composite_initLoadingTime=2968; 2025-05-29T15:26:09.109199Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: Index: tables 1 inserted {blob_bytes=9739224;raw_bytes=13544452;count=2;records=160000} compacted {blob_bytes=0;raw_bytes=0;count=0;records=0} s-compacted {blob_bytes=0;raw_bytes=0;count=0;records=0} inactive {blob_bytes=0;raw_bytes=0;count=0;records=0} evicted {blob_bytes=0;raw_bytes=0;count=0;records=0} at tablet 9437184 2025-05-29T15:26:09.109228Z node 1 :TX_COLUMNSHARD INFO: log.cpp:784: TEST_STEP=3;tablet_id=9437184;self_id=[1:1317:3177];process=SwitchToWork;fline=columnshard.cpp:77;event=initialize_shard;step=SwitchToWork; 2025-05-29T15:26:09.109237Z node 1 :TX_COLUMNSHARD INFO: log.cpp:784: TEST_STEP=3;tablet_id=9437184;self_id=[1:1317:3177];process=SwitchToWork;fline=columnshard.cpp:80;event=initialize_shard;step=SignalTabletActive; 2025-05-29T15:26:09.109252Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:784: TEST_STEP=3;tablet_id=9437184;self_id=[1:1317:3177];process=SwitchToWork;fline=columnshard_impl.cpp:1614;event=OnTieringModified;path_id=NO_VALUE_OPTIONAL; 2025-05-29T15:26:09.110314Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:784: TEST_STEP=3;tablet_id=9437184;self_id=[1:1317:3177];process=SwitchToWork;fline=column_engine_logs.cpp:493;event=OnTieringModified;new_count_tierings=1; 2025-05-29T15:26:09.110368Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:784: TEST_STEP=3;tablet_id=9437184;fline=columnshard_impl.cpp:515;event=EnqueueBackgroundActivities;periodic=0; 2025-05-29T15:26:09.110382Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:784: TEST_STEP=3;tablet_id=9437184;fline=columnshard_impl.cpp:784;event=start_indexation_tasks;insert_overload_size=0; 2025-05-29T15:26:09.110402Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:784: TEST_STEP=3;tablet_id=9437184;fline=column_engine_logs.cpp:246;event=StartCleanup;portions_count=0; 2025-05-29T15:26:09.110420Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:784: TEST_STEP=3;tablet_id=9437184;fline=column_engine_logs.cpp:321;event=StartCleanup;portions_count=0;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2025-05-29T15:26:09.110433Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:784: TEST_STEP=3;tablet_id=9437184;fline=columnshard_impl.cpp:1060;background=cleanup;skip_reason=no_changes; 2025-05-29T15:26:09.110441Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:784: TEST_STEP=3;tablet_id=9437184;fline=columnshard_impl.cpp:1092;background=cleanup;skip_reason=no_changes; 2025-05-29T15:26:09.110464Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:784: TEST_STEP=3;tablet_id=9437184;fline=columnshard_impl.cpp:1001;background=ttl;skip_reason=no_changes; 2025-05-29T15:26:09.110634Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:784: TEST_STEP=3;tablet_id=9437184;tx_state=TTxProgressTx::Complete;fline=columnshard_impl.cpp:784;event=start_indexation_tasks;insert_overload_size=0; 2025-05-29T15:26:09.110656Z node 1 :TX_COLUMNSHARD INFO: log.cpp:784: TEST_STEP=3;self_id=[1:1352:3204];tablet_id=9437184;parent=[1:1317:3177];fline=manager.cpp:85;event=ask_data;request=request_id=48;1={portions_count=2};; 2025-05-29T15:26:09.111088Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:784: TEST_STEP=3;tablet_id=9437184;self_id=[1:1317:3177];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:254;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=9437184; 2025-05-29T15:26:09.111205Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:784: TEST_STEP=3;tablet_id=9437184;self_id=[1:1317:3177];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;fline=columnshard.cpp:243;event=TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184; 2025-05-29T15:26:09.111211Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: Send periodic stats. 2025-05-29T15:26:09.111215Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: Disabled periodic stats at tablet 9437184 2025-05-29T15:26:09.111221Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:784: TEST_STEP=3;tablet_id=9437184;self_id=[1:1317:3177];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:515;event=EnqueueBackgroundActivities;periodic=0; 2025-05-29T15:26:09.111232Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:784: TEST_STEP=3;tablet_id=9437184;self_id=[1:1317:3177];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:784;event=start_indexation_tasks;insert_overload_size=0; 2025-05-29T15:26:09.111243Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:784: TEST_STEP=3;tablet_id=9437184;self_id=[1:1317:3177];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:246;event=StartCleanup;portions_count=0; 2025-05-29T15:26:09.111254Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:784: TEST_STEP=3;tablet_id=9437184;self_id=[1:1317:3177];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:321;event=StartCleanup;portions_count=0;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2025-05-29T15:26:09.111262Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:784: TEST_STEP=3;tablet_id=9437184;self_id=[1:1317:3177];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:1060;background=cleanup;skip_reason=no_changes; 2025-05-29T15:26:09.111268Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:784: TEST_STEP=3;tablet_id=9437184;self_id=[1:1317:3177];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:1092;background=cleanup;skip_reason=no_changes; 2025-05-29T15:26:09.111286Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:784: TEST_STEP=3;tablet_id=9437184;self_id=[1:1317:3177];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:1001;background=ttl;skip_reason=no_changes; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:150 :Tier '/cold' stopped at tablet 9437184 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:150 :Tier '/cold' stopped at tablet 9437184 160000/9739224 160000/9739224 160000/9739224 160000/9739224 >> KqpIndexes::UpsertWithoutExtraNullDelete+UseSink >> DataShardVolatile::DistributedWriteThenCopyTable [FAIL] >> DataShardVolatile::DistributedWriteThenBulkUpsert >> THiveTest::TestReassignGroupsWithRecreateTablet [GOOD] >> THiveTest::TestManyFollowersOnOneNode >> TSequenceReboots::CreateSequence [GOOD] >> KqpNewEngine::Update-UseSink >> THiveTest::TestFollowersReconfiguration [GOOD] >> THiveTest::TestFollowerPromotion >> THiveTest::TestSpreadNeighboursDifferentOwners [GOOD] >> THiveTest::TestUpdateTabletsObjectUpdatesMetrics ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_sequence_reboots/unittest >> TSequenceReboots::CreateSequence [GOOD] Test command err: ==== RunWithTabletReboots =========== RUN: Trace =========== Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2140] Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:116:2058] recipient: [1:110:2141] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:116:2058] recipient: [1:110:2141] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:117:2058] recipient: [1:111:2142] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:117:2058] recipient: [1:111:2142] Leader for TabletID 72057594046678944 is [1:126:2151] sender: [1:127:2058] recipient: [1:109:2140] Leader for TabletID 72057594046447617 is [1:130:2154] sender: [1:132:2058] recipient: [1:110:2141] Leader for TabletID 72057594046316545 is [1:133:2155] sender: [1:135:2058] recipient: [1:111:2142] 2025-05-29T15:25:48.493804Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7488: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-05-29T15:25:48.493829Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7516: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:25:48.493835Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7402: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2025-05-29T15:25:48.493841Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7418: OperationsProcessing config: using default configuration 2025-05-29T15:25:48.493855Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-05-29T15:25:48.493859Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-05-29T15:25:48.493867Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7548: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:25:48.493880Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:39: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-05-29T15:25:48.493998Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7619: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-05-29T15:25:48.494071Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-05-29T15:25:48.511705Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7651: Got new config: QueryServiceConfig { AllExternalDataSourcesAreAvailable: true } 2025-05-29T15:25:48.511730Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:25:48.511841Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7619: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# Leader for TabletID 72057594046447617 is [1:130:2154] sender: [1:175:2058] recipient: [1:15:2062] 2025-05-29T15:25:48.515538Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-05-29T15:25:48.515572Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-05-29T15:25:48.515606Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-05-29T15:25:48.518265Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-05-29T15:25:48.518349Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:445: Clear TempDirsState with owners number: 0 2025-05-29T15:25:48.518451Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1348: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-05-29T15:25:48.518619Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:32: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-05-29T15:25:48.519174Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:157: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:25:48.519224Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:84: [RootDataErasureManager] Stop 2025-05-29T15:25:48.519478Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:25:48.519487Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:25:48.519532Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-05-29T15:25:48.519540Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-29T15:25:48.519546Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:202: TTxServerlessStorageBilling.Complete 2025-05-29T15:25:48.519562Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6671: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:214:2058] recipient: [1:212:2212] IGNORE Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:214:2058] recipient: [1:212:2212] Leader for TabletID 72057594037968897 is [1:218:2216] sender: [1:219:2058] recipient: [1:212:2212] 2025-05-29T15:25:48.520840Z node 1 :HIVE INFO: tablet_helpers.cpp:1130: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:126:2151] sender: [1:239:2058] recipient: [1:15:2062] 2025-05-29T15:25:48.541048Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:372: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-05-29T15:25:48.541141Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:25:48.541218Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-05-29T15:25:48.541261Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:130: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-05-29T15:25:48.541271Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:25:48.542188Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:451: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-05-29T15:25:48.542219Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-05-29T15:25:48.542273Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:25:48.542284Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:313: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-05-29T15:25:48.542290Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:367: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-05-29T15:25:48.542296Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 2 -> 3 2025-05-29T15:25:48.542722Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:25:48.542735Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-05-29T15:25:48.542767Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 3 -> 128 2025-05-29T15:25:48.543180Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:25:48.543193Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:25:48.543199Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:25:48.543206Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1650: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-05-29T15:25:48.543839Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1719: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-05-29T15:25:48.544312Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:652: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-05-29T15:25:48.544360Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1751: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 Leader for TabletID 72057594046316545 is [1:133:2155] sender: [1:254:2058] recipient: [1:15:2062] FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-05-29T15:25:48.544594Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:676: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-05-29T15:25:48.544640Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:680: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 133 RawX2: 4294969451 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-05-29T15:25:48.544648Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:25:48.544716Z node 1 :FLAT_TX_SCHEMESHARD INFO: sch ... publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1002, path id: [OwnerId: 72057594046678944, LocalPathId: 3] 2025-05-29T15:26:10.885548Z node 52 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:26:10.885553Z node 52 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [52:206:2207], at schemeshard: 72057594046678944, txId: 1002, path id: 1 2025-05-29T15:26:10.885564Z node 52 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [52:206:2207], at schemeshard: 72057594046678944, txId: 1002, path id: 3 2025-05-29T15:26:10.885661Z node 52 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1002:0, at schemeshard: 72057594046678944 2025-05-29T15:26:10.885668Z node 52 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:482: [72057594046678944] TDone opId# 1002:0 ProgressState 2025-05-29T15:26:10.885679Z node 52 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:165: TSideEffects ApplyOnExecute at tablet# 72057594046678944 2025-05-29T15:26:10.885697Z node 52 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:906: Part operation is done id#1002:0 progress is 1/1 2025-05-29T15:26:10.885701Z node 52 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 1002 ready parts: 1/1 2025-05-29T15:26:10.885707Z node 52 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:906: Part operation is done id#1002:0 progress is 1/1 2025-05-29T15:26:10.885710Z node 52 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 1002 ready parts: 1/1 2025-05-29T15:26:10.885715Z node 52 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1606: TOperation IsReadyToNotify, TxId: 1002, ready parts: 1/1, is published: false 2025-05-29T15:26:10.885720Z node 52 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 1002 ready parts: 1/1 2025-05-29T15:26:10.885725Z node 52 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:973: Operation and all the parts is done, operation id: 1002:0 2025-05-29T15:26:10.885729Z node 52 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5174: RemoveTx for txid 1002:0 2025-05-29T15:26:10.885759Z node 52 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 3 2025-05-29T15:26:10.885765Z node 52 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:982: Publication still in progress, tx: 1002, publications: 2, subscribers: 1 2025-05-29T15:26:10.885770Z node 52 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:989: Publication details: tx: 1002, [OwnerId: 72057594046678944, LocalPathId: 1], 7 2025-05-29T15:26:10.885773Z node 52 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:989: Publication details: tx: 1002, [OwnerId: 72057594046678944, LocalPathId: 3], 2 2025-05-29T15:26:10.885896Z node 52 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4877: StateWork, received event# 274137603, Sender [52:206:2207], Recipient [52:124:2149]: NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 1] Version: 7 } 2025-05-29T15:26:10.885902Z node 52 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4961: StateWork, processing event NSchemeBoard::NSchemeshardEvents::TEvUpdateAck 2025-05-29T15:26:10.885917Z node 52 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:5834: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 7 PathOwnerId: 72057594046678944, cookie: 1002 2025-05-29T15:26:10.885928Z node 52 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 7 PathOwnerId: 72057594046678944, cookie: 1002 2025-05-29T15:26:10.885933Z node 52 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 1002 2025-05-29T15:26:10.885941Z node 52 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 1002, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 7 2025-05-29T15:26:10.885946Z node 52 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 4 2025-05-29T15:26:10.885961Z node 52 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:165: TSideEffects ApplyOnExecute at tablet# 72057594046678944 2025-05-29T15:26:10.886061Z node 52 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4877: StateWork, received event# 274137603, Sender [52:206:2207], Recipient [52:124:2149]: NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 3] Version: 2 } 2025-05-29T15:26:10.886067Z node 52 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4961: StateWork, processing event NSchemeBoard::NSchemeshardEvents::TEvUpdateAck 2025-05-29T15:26:10.886074Z node 52 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:5834: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 2 PathOwnerId: 72057594046678944, cookie: 1002 2025-05-29T15:26:10.886084Z node 52 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 2 PathOwnerId: 72057594046678944, cookie: 1002 2025-05-29T15:26:10.886088Z node 52 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1002 2025-05-29T15:26:10.886092Z node 52 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 1002, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 2 2025-05-29T15:26:10.886096Z node 52 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 2 2025-05-29T15:26:10.886105Z node 52 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 1002, subscribers: 1 2025-05-29T15:26:10.886110Z node 52 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:212: TTxAckPublishToSchemeBoard Notify send TEvNotifyTxCompletionResult, at schemeshard: 72057594046678944, to actorId: [52:355:2334] 2025-05-29T15:26:10.886115Z node 52 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:165: TSideEffects ApplyOnExecute at tablet# 72057594046678944 2025-05-29T15:26:10.886466Z node 52 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:207: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2025-05-29T15:26:10.886762Z node 52 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1002 2025-05-29T15:26:10.886772Z node 52 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:207: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2025-05-29T15:26:10.886787Z node 52 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1002 2025-05-29T15:26:10.886791Z node 52 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:207: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2025-05-29T15:26:10.886806Z node 52 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:630: Send to actor: [52:355:2334] msg type: 271124998 msg: NKikimrScheme.TEvNotifyTxCompletionResult TxId: 1002 at schemeshard: 72057594046678944 2025-05-29T15:26:10.886822Z node 52 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:227: tests -- TTxNotificationSubscriber for txId 1002: got EvNotifyTxCompletionResult 2025-05-29T15:26:10.886828Z node 52 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:236: tests -- TTxNotificationSubscriber for txId 1002: satisfy waiter [52:356:2335] 2025-05-29T15:26:10.886863Z node 52 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4877: StateWork, received event# 269877764, Sender [52:358:2337], Recipient [52:124:2149]: NKikimr::TEvTabletPipe::TEvServerDisconnected 2025-05-29T15:26:10.886869Z node 52 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4975: StateWork, processing event TEvTabletPipe::TEvServerDisconnected 2025-05-29T15:26:10.886873Z node 52 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5801: Server pipe is reset, at schemeshard: 72057594046678944 TestWaitNotification: OK eventTxId 1002 2025-05-29T15:26:10.886947Z node 52 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4877: StateWork, received event# 271122945, Sender [52:378:2356], Recipient [52:124:2149]: NKikimrSchemeOp.TDescribePath Path: "/MyRoot/seq" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false } 2025-05-29T15:26:10.886952Z node 52 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4889: StateWork, processing event TEvSchemeShard::TEvDescribeScheme 2025-05-29T15:26:10.886965Z node 52 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/seq" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-05-29T15:26:10.887003Z node 52 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/seq" took 32us result status StatusSuccess 2025-05-29T15:26:10.887087Z node 52 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/seq" PathDescription { Self { Name: "seq" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeSequence CreateFinished: true CreateTxId: 1002 CreateStep: 5000003 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 SequenceVersion: 1 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 2 PathsLimit: 10000 ShardsInside: 1 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } SequenceDescription { Name: "seq" PathId { OwnerId: 72057594046678944 LocalId: 3 } Version: 1 SequenceShard: 72075186233409546 MinValue: 1 MaxValue: 9223372036854775807 StartValue: 1 Cache: 1 Increment: 1 Cycle: false DataType: "Int64" } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> KqpRanges::UpdateMulti >> THiveTest::TestManyFollowersOnOneNode [GOOD] >> THiveTest::TestLockTabletExecutionTimeout |67.6%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/schemeshard/ut_olap/ydb-core-tx-schemeshard-ut_olap |67.6%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_olap/ydb-core-tx-schemeshard-ut_olap |67.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_olap/ydb-core-tx-schemeshard-ut_olap >> DataShardVolatile::VolatileTxAbortedOnSplit [FAIL] >> DataShardVolatile::VolatileTxAbortedOnDrop >> KqpVectorIndexes::CoveredVectorIndexWithFollowers-StaleRO >> THiveTest::TestUpdateTabletsObjectUpdatesMetrics [GOOD] >> THiveTest::TestRestartTablets >> THiveTest::TestFollowerPromotion [GOOD] >> THiveTest::TestFollowerPromotionFollowerDies |67.6%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/persqueue/dread_cache_service/ut/ydb-core-persqueue-dread_cache_service-ut |67.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/persqueue/dread_cache_service/ut/ydb-core-persqueue-dread_cache_service-ut |67.6%| [LD] {RESULT} $(B)/ydb/core/persqueue/dread_cache_service/ut/ydb-core-persqueue-dread_cache_service-ut >> DataShardVolatile::DistributedWriteThenBulkUpsert [FAIL] >> DataShardVolatile::DistributedWriteThenBulkUpsertWithCdc >> THiveTest::TestRestartTablets [GOOD] >> THiveTest::TestServerlessComputeResourcesMode |67.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/keyvalue/ut_trace/unittest >> THiveTest::TestHiveFollowersWithChangingDC [GOOD] >> THiveTest::TestFollowerPromotionFollowerDies [GOOD] >> THiveTest::TestHiveBalancerWithSystemTablets >> THiveTest::TestHiveBalancer >> TBsProxyFaultToleranceTest::CheckGetHardenedErasureBlock42Count6Idx1 [GOOD] >> KqpVectorIndexes::SimpleVectorIndexOrderByCosineDistanceWithCover+Nullable >> DataShardVolatile::VolatileTxAbortedOnDrop [FAIL] >> DataShardVolatile::UpsertNoLocksArbiter+UseSink >> THiveTest::TestLockTabletExecutionTimeout [GOOD] >> THiveTest::TestLockTabletExecutionRebootTimeout ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/olap/unittest >> KqpOlapJson::BrokenJsonWriting [GOOD] Test command err: FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:502;T=N5arrow10UInt64TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:502;T=N5arrow10StringTypeE; Trying to start YDB, gRPC: 7956, MsgBus: 7082 test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/0026d6/r3tmp/tmpxCywJZ/pdisk_1.dat 2025-05-29T15:21:33.597158Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509888158350999994:2208];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:21:33.599245Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-05-29T15:21:33.693491Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [1:7509888158350999812:2079] 1748532093547908 != 1748532093547911 2025-05-29T15:21:33.699926Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:21:33.706871Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:21:33.706893Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting TServer::EnableGrpc on GrpcPort 7956, node 1 2025-05-29T15:21:33.711464Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-29T15:21:33.718889Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:21:33.718899Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-05-29T15:21:33.718901Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-05-29T15:21:33.718933Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:7082 TClient is connected to server localhost:7082 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-29T15:21:33.902384Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:21:33.905100Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 EXECUTE: CREATE TABLE `/Root/ColumnTable` ( Col1 Uint64 NOT NULL, Col2 JsonDocument, PRIMARY KEY (Col1) ) PARTITION BY HASH(Col1) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 1); 2025-05-29T15:21:34.196907Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509888162645967766:2328], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:21:34.196934Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:21:34.272751Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-05-29T15:21:34.288463Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;self_id=[1:7509888162645967839:2332];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-05-29T15:21:34.288514Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;self_id=[1:7509888162645967839:2332];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-05-29T15:21:34.288578Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;self_id=[1:7509888162645967839:2332];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-05-29T15:21:34.288599Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;self_id=[1:7509888162645967839:2332];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-05-29T15:21:34.288622Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;self_id=[1:7509888162645967839:2332];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-05-29T15:21:34.288642Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;self_id=[1:7509888162645967839:2332];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-05-29T15:21:34.288664Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;self_id=[1:7509888162645967839:2332];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-05-29T15:21:34.288684Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;self_id=[1:7509888162645967839:2332];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-05-29T15:21:34.288704Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;self_id=[1:7509888162645967839:2332];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-05-29T15:21:34.288726Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;self_id=[1:7509888162645967839:2332];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-05-29T15:21:34.288744Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;self_id=[1:7509888162645967839:2332];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2025-05-29T15:21:34.288763Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;self_id=[1:7509888162645967839:2332];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2025-05-29T15:21:34.294429Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-05-29T15:21:34.294453Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-05-29T15:21:34.294467Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-05-29T15:21:34.294478Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-05-29T15:21:34.294498Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-05-29T15:21:34.294509Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-05-29T15:21:34.294521Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-05-29T15:21:34.294528Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-05-29T15:21:34.294538Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-05-29T15:21:34.294543Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-05-29T15:21:34.294550Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-05-29T15:21:34.294555Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-05-29T15:21:34.294582Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-05-29T15:21:34.294592Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-05-29T15:21:34.294624Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-05-29T15:21:34.294629Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-05-29T15:21: ... : log.cpp:784: tablet_id=72075186224037896;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV0ChunksMeta;id=18; 2025-05-29T15:26:09.512897Z node 324 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037896;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-05-29T15:26:09.524047Z node 324 :TX_COLUMNSHARD_TX WARN: log.cpp:784: tablet_id=72075186224037896;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715658;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715658; 2025-05-29T15:26:09.524047Z node 324 :TX_COLUMNSHARD_TX WARN: log.cpp:784: tablet_id=72075186224037890;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715658;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715658; 2025-05-29T15:26:09.525200Z node 324 :TX_COLUMNSHARD_TX WARN: log.cpp:784: tablet_id=72075186224037892;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715658;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715658; 2025-05-29T15:26:09.525273Z node 324 :TX_COLUMNSHARD_TX WARN: log.cpp:784: tablet_id=72075186224037893;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715658;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715658; 2025-05-29T15:26:09.526441Z node 324 :TX_COLUMNSHARD_TX WARN: log.cpp:784: tablet_id=72075186224037897;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715658;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715658; 2025-05-29T15:26:09.526463Z node 324 :TX_COLUMNSHARD_TX WARN: log.cpp:784: tablet_id=72075186224037889;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715658;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715658; 2025-05-29T15:26:09.527325Z node 324 :TX_COLUMNSHARD_TX WARN: log.cpp:784: tablet_id=72075186224037891;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715658;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715658; 2025-05-29T15:26:09.528006Z node 324 :TX_COLUMNSHARD_TX WARN: log.cpp:784: tablet_id=72075186224037888;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715658;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715658; 2025-05-29T15:26:09.528139Z node 324 :TX_COLUMNSHARD_TX WARN: log.cpp:784: tablet_id=72075186224037895;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715658;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715658; 2025-05-29T15:26:09.528978Z node 324 :TX_COLUMNSHARD_TX WARN: log.cpp:784: tablet_id=72075186224037894;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715658;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715658; EXECUTE: ALTER OBJECT `/Root/ColumnTable` (TYPE TABLE) SET (ACTION=UPSERT_OPTIONS, `SCAN_READER_POLICY_NAME`=`SIMPLE`) 2025-05-29T15:26:09.540168Z node 324 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [324:7509889341574551674:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:26:09.540215Z node 324 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:26:09.541168Z node 324 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 2025-05-29T15:26:09.550939Z node 324 :TX_COLUMNSHARD_TX WARN: log.cpp:784: tablet_id=72075186224037891;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715659;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715659; 2025-05-29T15:26:09.551066Z node 324 :TX_COLUMNSHARD_TX WARN: log.cpp:784: tablet_id=72075186224037893;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715659;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715659; 2025-05-29T15:26:09.551189Z node 324 :TX_COLUMNSHARD_TX WARN: log.cpp:784: tablet_id=72075186224037895;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715659;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715659; 2025-05-29T15:26:09.551285Z node 324 :TX_COLUMNSHARD_TX WARN: log.cpp:784: tablet_id=72075186224037897;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715659;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715659; 2025-05-29T15:26:09.551418Z node 324 :TX_COLUMNSHARD_TX WARN: log.cpp:784: tablet_id=72075186224037890;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715659;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715659; 2025-05-29T15:26:09.551488Z node 324 :TX_COLUMNSHARD_TX WARN: log.cpp:784: tablet_id=72075186224037888;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715659;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715659; 2025-05-29T15:26:09.551521Z node 324 :TX_COLUMNSHARD_TX WARN: log.cpp:784: tablet_id=72075186224037892;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715659;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715659; 2025-05-29T15:26:09.551603Z node 324 :TX_COLUMNSHARD_TX WARN: log.cpp:784: tablet_id=72075186224037894;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715659;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715659; 2025-05-29T15:26:09.551637Z node 324 :TX_COLUMNSHARD_TX WARN: log.cpp:784: tablet_id=72075186224037896;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715659;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715659; 2025-05-29T15:26:09.551725Z node 324 :TX_COLUMNSHARD_TX WARN: log.cpp:784: tablet_id=72075186224037889;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715659;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715659; EXECUTE: ALTER OBJECT `/Root/ColumnTable` (TYPE TABLE) SET (ACTION=ALTER_COLUMN, NAME=Col2, `DATA_EXTRACTOR_CLASS_NAME`=`JSON_SCANNER`, `SCAN_FIRST_LEVEL_ONLY`=`false`, `DATA_ACCESSOR_CONSTRUCTOR.CLASS_NAME`=`SUB_COLUMNS`, `FORCE_SIMD_PARSING`=`false`, `COLUMNS_LIMIT`=`1`, `SPARSED_DETECTOR_KFF`=`1000`, `MEM_LIMIT_CHUNK`=`1000000`, `OTHERS_ALLOWED_FRACTION`=`0.5`) 2025-05-29T15:26:09.571993Z node 324 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [324:7509889341574551740:2404], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:26:09.572016Z node 324 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:26:09.572715Z node 324 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 2025-05-29T15:26:09.581787Z node 324 :TX_COLUMNSHARD_TX WARN: log.cpp:784: tablet_id=72075186224037892;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715660;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715660; 2025-05-29T15:26:09.581967Z node 324 :TX_COLUMNSHARD_TX WARN: log.cpp:784: tablet_id=72075186224037894;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715660;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715660; 2025-05-29T15:26:09.582106Z node 324 :TX_COLUMNSHARD_TX WARN: log.cpp:784: tablet_id=72075186224037896;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715660;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715660; 2025-05-29T15:26:09.582234Z node 324 :TX_COLUMNSHARD_TX WARN: log.cpp:784: tablet_id=72075186224037888;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715660;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715660; 2025-05-29T15:26:09.582355Z node 324 :TX_COLUMNSHARD_TX WARN: log.cpp:784: tablet_id=72075186224037890;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715660;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715660; 2025-05-29T15:26:09.582471Z node 324 :TX_COLUMNSHARD_TX WARN: log.cpp:784: tablet_id=72075186224037891;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715660;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715660; 2025-05-29T15:26:09.582586Z node 324 :TX_COLUMNSHARD_TX WARN: log.cpp:784: tablet_id=72075186224037893;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715660;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715660; 2025-05-29T15:26:09.582692Z node 324 :TX_COLUMNSHARD_TX WARN: log.cpp:784: tablet_id=72075186224037895;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715660;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715660; 2025-05-29T15:26:09.582838Z node 324 :TX_COLUMNSHARD_TX WARN: log.cpp:784: tablet_id=72075186224037889;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715660;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715660; 2025-05-29T15:26:09.583188Z node 324 :TX_COLUMNSHARD_TX WARN: log.cpp:784: tablet_id=72075186224037897;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715660;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715660; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=240;columns=2; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=240;columns=2; 2025-05-29T15:26:09.597710Z node 324 :TX_COLUMNSHARD ERROR: log.cpp:784: tablet_id=72075186224037894;parent_id=[324:7509889341574551334:2336];path_id=2;fline=abstract_scheme.cpp:343;event=cannot build accessor;reason=json parsing error: UNCLOSED_STRING: A string is opened, but never closed.; 2025-05-29T15:26:09.597730Z node 324 :TX_COLUMNSHARD ERROR: log.cpp:784: tablet_id=72075186224037894;parent_id=[324:7509889341574551334:2336];path_id=2;fline=pack_builder.cpp:106;event=cannot prepare for write;reason=json parsing error: UNCLOSED_STRING: A string is opened, but never closed.; 2025-05-29T15:26:09.597734Z node 324 :TX_COLUMNSHARD ERROR: log.cpp:784: tablet_id=72075186224037894;parent_id=[324:7509889341574551334:2336];path_id=2;fline=pack_builder.cpp:217;event=cannot build slice;reason=json parsing error: UNCLOSED_STRING: A string is opened, but never closed.; 2025-05-29T15:26:09.597773Z node 324 :TX_COLUMNSHARD_WRITE WARN: log.cpp:784: tablet_id=72075186224037894;self_id=[324:7509889341574551334:2336];ev=NKikimr::NColumnShard::NPrivateEvents::NWrite::TEvWritePortionResult;tablet_id=72075186224037894;event=TEvWritePortionResult;fline=columnshard__write.cpp:124;writing_size=240;event=data_write_error;writing_id=3f2fa54e-3ca111f0-b7f17833-f7a161cc;reason=json parsing error: UNCLOSED_STRING: A string is opened, but never closed.; 2025-05-29T15:26:09.597830Z node 324 :TX_COLUMNSHARD_WRITE WARN: log.cpp:784: tablet_id=72075186224037894;self_id=[324:7509889341574551334:2336];ev=NKikimr::NColumnShard::NPrivateEvents::NWrite::TEvWritePortionResult;tablet_id=72075186224037894;event=TEvWritePortionResult;tablet_id=72075186224037894;local_tx_no=8;method=execute;tx_info=;fline=events.h:103;event=ev_write_error;status=STATUS_BAD_REQUEST;details=json parsing error: UNCLOSED_STRING: A string is opened, but never closed.;tx_id=140737488355652; Cannot write data into shard(Incorrect request) 72075186224037894 in longTx ydb://long-tx/01jwead65w3j2cfha5g2jerpbj?node_id=324 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/dsproxy/ut_ftol/unittest >> TBsProxyFaultToleranceTest::CheckGetHardenedErasureBlock42Count6Idx1 [GOOD] Test command err: iteration# 1 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 7 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 13 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 19 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 25 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 31 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 37 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 43 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 49 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 55 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 61 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 67 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 73 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 79 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 85 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 91 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 97 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 103 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 109 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 115 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 121 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 127 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 133 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 139 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 145 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 151 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 157 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 163 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 169 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 175 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 181 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 187 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 193 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 199 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 205 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 211 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 217 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 223 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 229 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 235 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 241 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 247 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 253 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 259 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 265 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 271 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 277 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 283 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 289 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 295 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 301 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 307 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 313 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 319 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 325 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 331 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 337 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 343 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 349 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 355 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 361 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 367 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 373 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 379 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 385 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 391 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 397 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 403 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 409 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 415 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 421 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 427 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 433 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 439 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 445 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 451 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 457 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 463 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 469 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 475 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 481 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 487 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 493 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 499 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 505 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 511 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 517 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 523 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 529 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 535 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 541 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 547 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 553 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 559 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 565 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 571 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 577 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 583 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 589 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 595 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 601 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 607 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 613 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 619 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 625 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 631 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 637 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 643 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 649 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 655 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 661 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 667 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 673 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 679 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 685 BlobsWritten# 2041 blobsWrittenFul ... blobsUnwritten# 1218 iteration# 1363 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1369 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1375 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1381 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1387 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1393 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1399 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1405 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1411 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1417 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1423 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1429 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1435 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1441 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1447 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1453 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1459 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1465 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1471 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1477 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1483 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1489 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1495 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1501 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1507 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1513 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1519 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1525 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1531 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1537 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1543 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1549 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1555 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1561 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1567 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1573 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1579 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1585 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1591 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1597 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1603 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1609 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1615 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1621 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1627 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1633 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1639 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1645 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1651 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1657 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1663 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1669 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1675 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1681 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1687 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1693 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1699 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1705 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1711 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1717 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1723 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1729 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1735 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1741 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1747 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1753 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1759 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1765 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1771 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1777 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1783 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1789 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1795 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1801 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1807 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1813 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1819 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1825 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1831 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1837 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1843 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1849 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1855 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1861 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1867 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1873 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1879 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1885 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1891 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1897 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1903 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1909 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1915 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1921 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1927 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1933 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1939 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1945 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1951 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1957 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1963 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1969 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1975 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1981 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1987 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1993 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1999 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 2005 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 2011 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 2017 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 2023 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 2029 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 2035 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 >> DataShardVolatile::DistributedWriteThenBulkUpsertWithCdc [FAIL] >> DataShardVolatile::DistributedWriteLostPlanThenDrop >> THiveTest::TestHiveBalancerWithSystemTablets [GOOD] >> THiveTest::TestHiveNoBalancingWithLowResourceUsage >> KqpIndexes::JoinWithNonPKColumnsInPredicate+UseStreamJoin >> THiveTest::TestServerlessComputeResourcesMode [GOOD] >> THiveTest::TestResetServerlessComputeResourcesMode |67.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_filestore_reboots/unittest >> KqpUniqueIndex::UpdateOnFkAlreadyExist >> TTopicWriterTests::TestEnterMessage_With_Base64_Transform_Invalid_Encode [GOOD] >> TTopicWriterTests::TestEnterMessage_With_Base64_Transform [GOOD] |67.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_filestore_reboots/unittest >> TSchemeShardTestExtSubdomainReboots::AlterForceDrop-AlterDatabaseCreateHiveFirst-false [GOOD] >> TSchemeShardTestExtSubdomainReboots::AlterForceDrop-AlterDatabaseCreateHiveFirst-true >> THiveTest::TestDrain [GOOD] >> THiveTest::TestDrainWithMaxTabletsScheduled >> ObjectDistribution::TestManyIrrelevantNodes [GOOD] >> Sequencer::Basic1 [GOOD] >> StoragePool::TestDistributionRandomProbability >> DataShardVolatile::UpsertNoLocksArbiter+UseSink [FAIL] >> DataShardVolatile::UpsertNoLocksArbiter-UseSink |67.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/public/lib/ydb_cli/topic/ut/unittest >> TTopicWriterTests::TestEnterMessage_With_Base64_Transform [GOOD] >> THiveTest::TestHiveBalancer [GOOD] >> THiveTest::TestHiveBalancerDifferentResources >> THiveTest::TestResetServerlessComputeResourcesMode [GOOD] >> THiveTest::TestSkipBadNode >> KqpIndexes::UpsertWithNullKeysSimple |67.6%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/schemeshard/ut_base_reboots/ydb-core-tx-schemeshard-ut_base_reboots |67.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_base_reboots/ydb-core-tx-schemeshard-ut_base_reboots |67.6%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_base_reboots/ydb-core-tx-schemeshard-ut_base_reboots >> THiveTest::TestHiveBalancerWithFollowers [GOOD] >> THiveTest::TestHiveBalancerWithLimit >> KqpRanges::UpdateWhereInBigLiteralList |67.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/replication/service/ut_worker/unittest >> THiveTest::TestSkipBadNode [GOOD] >> THiveTest::TestStopTenant >> THealthCheckTest::OneIssueListing |67.6%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/scheme_board/ut_replica/ydb-core-tx-scheme_board-ut_replica |67.7%| [LD] {RESULT} $(B)/ydb/core/tx/scheme_board/ut_replica/ydb-core-tx-scheme_board-ut_replica |67.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/scheme_board/ut_replica/ydb-core-tx-scheme_board-ut_replica |67.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/replication/service/ut_worker/unittest >> TPQTestSlow::TestOnDiskStoredSourceIds [GOOD] >> DataShardVolatile::UpsertNoLocksArbiter-UseSink [FAIL] >> DataShardVolatile::UpsertBrokenLockArbiter+UseSink ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/persqueue/ut/slow/unittest >> TPQTestSlow::TestOnDiskStoredSourceIds [GOOD] Test command err: Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:104:2057] recipient: [1:102:2135] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:104:2057] recipient: [1:102:2135] Leader for TabletID 72057594037927937 is [1:108:2139] sender: [1:109:2057] recipient: [1:102:2135] 2025-05-29T15:25:11.779914Z node 1 :PERSQUEUE NOTICE: pq_impl.cpp:1099: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-05-29T15:25:11.779942Z node 1 :PERSQUEUE INFO: pq_impl.cpp:800: [PQ: 72057594037927937] doesn't have tx writes info Leader for TabletID 72057594037927938 is [0:0:0] sender: [1:150:2057] recipient: [1:148:2170] IGNORE Leader for TabletID 72057594037927938 is [0:0:0] sender: [1:150:2057] recipient: [1:148:2170] Leader for TabletID 72057594037927938 is [1:154:2174] sender: [1:155:2057] recipient: [1:148:2170] Leader for TabletID 72057594037927937 is [1:108:2139] sender: [1:180:2057] recipient: [1:14:2061] 2025-05-29T15:25:11.790495Z node 1 :PERSQUEUE NOTICE: pq_impl.cpp:1099: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-05-29T15:25:11.793255Z node 1 :PERSQUEUE INFO: pq_impl.cpp:1487: [PQ: 72057594037927937] Config applied version 1 actor [1:178:2192] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 SourceIdMaxCounts: 3 } PartitionIds: 0 PartitionIds: 1 TopicName: "rt3.dc1--asdfgs--topic" Version: 1 LocalDC: true Topic: "topic" TopicPath: "/Root/PQ/rt3.dc1--asdfgs--topic" Partitions { PartitionId: 0 } Partitions { PartitionId: 1 } ReadRuleGenerations: 1 MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } AllPartitions { PartitionId: 1 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 1 Important: false } 2025-05-29T15:25:11.800542Z node 1 :PERSQUEUE INFO: partition_init.cpp:880: [PQ: 72057594037927937, Partition: 0, State: StateInit] bootstrapping 0 [1:186:2198] 2025-05-29T15:25:11.801176Z node 1 :PERSQUEUE INFO: partition.cpp:560: [PQ: 72057594037927937, Partition: 0, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 0 generation 2 [1:186:2198] 2025-05-29T15:25:11.801652Z node 1 :PERSQUEUE INFO: partition_init.cpp:880: [PQ: 72057594037927937, Partition: 1, State: StateInit] bootstrapping 1 [1:187:2199] 2025-05-29T15:25:11.802092Z node 1 :PERSQUEUE INFO: partition.cpp:560: [PQ: 72057594037927937, Partition: 1, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 1 generation 2 [1:187:2199] 2025-05-29T15:25:11.807598Z node 1 :PERSQUEUE INFO: ownerinfo.cpp:30: new Cookie default|96e47be9-a2fb377-2b61472e-84419b5f_0 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default 2025-05-29T15:25:11.812551Z node 1 :PERSQUEUE INFO: ownerinfo.cpp:30: new Cookie default|f6181a04-675cf03e-3530dca8-6a0d4c8a_1 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default 2025-05-29T15:25:11.834392Z node 1 :PERSQUEUE INFO: ownerinfo.cpp:30: new Cookie default|a2d32a0d-6321b33c-ea6fc8a5-98856ec4_2 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default 2025-05-29T15:25:11.841770Z node 1 :PERSQUEUE INFO: ownerinfo.cpp:30: new Cookie default|eb1fc783-19959636-6dfffa5a-983318a2_3 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default 2025-05-29T15:25:11.846460Z node 1 :PERSQUEUE INFO: ownerinfo.cpp:30: new Cookie default|8d5d1abb-8eebe45-d4de81f1-ef23a199_4 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default 2025-05-29T15:25:11.852763Z node 1 :PERSQUEUE INFO: ownerinfo.cpp:30: new Cookie default|428aec0d-607715d8-fdcab70c-50c509d9_5 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:104:2057] recipient: [2:102:2135] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:104:2057] recipient: [2:102:2135] Leader for TabletID 72057594037927937 is [2:108:2139] sender: [2:109:2057] recipient: [2:102:2135] 2025-05-29T15:25:12.125241Z node 2 :PERSQUEUE NOTICE: pq_impl.cpp:1099: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-05-29T15:25:12.125270Z node 2 :PERSQUEUE INFO: pq_impl.cpp:800: [PQ: 72057594037927937] doesn't have tx writes info Leader for TabletID 72057594037927938 is [0:0:0] sender: [2:150:2057] recipient: [2:148:2170] IGNORE Leader for TabletID 72057594037927938 is [0:0:0] sender: [2:150:2057] recipient: [2:148:2170] Leader for TabletID 72057594037927938 is [2:154:2174] sender: [2:155:2057] recipient: [2:148:2170] Leader for TabletID 72057594037927937 is [2:108:2139] sender: [2:178:2057] recipient: [2:14:2061] !Reboot 72057594037927937 (actor [2:108:2139]) on event NKikimr::TEvPersQueue::TEvUpdateConfigBuilder ! Leader for TabletID 72057594037927937 is [2:108:2139] sender: [2:180:2057] recipient: [2:100:2134] Leader for TabletID 72057594037927937 is [2:108:2139] sender: [2:183:2057] recipient: [2:14:2061] Leader for TabletID 72057594037927937 is [2:108:2139] sender: [2:184:2057] recipient: [2:182:2193] Leader for TabletID 72057594037927937 is [2:185:2194] sender: [2:186:2057] recipient: [2:182:2193] 2025-05-29T15:25:12.159306Z node 2 :PERSQUEUE NOTICE: pq_impl.cpp:1099: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-05-29T15:25:12.159336Z node 2 :PERSQUEUE INFO: pq_impl.cpp:800: [PQ: 72057594037927937] doesn't have tx writes info !Reboot 72057594037927937 (actor [2:108:2139]) rebooted! !Reboot 72057594037927937 (actor [2:108:2139]) tablet resolver refreshed! new actor is[2:185:2194] Leader for TabletID 72057594037927937 is [2:185:2194] sender: [2:265:2057] recipient: [2:14:2061] 2025-05-29T15:25:14.027489Z node 2 :PERSQUEUE NOTICE: pq_impl.cpp:1099: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-05-29T15:25:14.027692Z node 2 :PERSQUEUE INFO: pq_impl.cpp:1487: [PQ: 72057594037927937] Config applied version 2 actor [2:176:2190] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 SourceIdMaxCounts: 3 } PartitionIds: 0 PartitionIds: 1 TopicName: "rt3.dc1--asdfgs--topic" Version: 2 LocalDC: true Topic: "topic" TopicPath: "/Root/PQ/rt3.dc1--asdfgs--topic" Partitions { PartitionId: 0 } Partitions { PartitionId: 1 } ReadRuleGenerations: 2 MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } AllPartitions { PartitionId: 1 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 2 Important: false } 2025-05-29T15:25:14.027833Z node 2 :PERSQUEUE INFO: partition_init.cpp:880: [PQ: 72057594037927937, Partition: 0, State: StateInit] bootstrapping 0 [2:271:2256] 2025-05-29T15:25:14.028492Z node 2 :PERSQUEUE INFO: partition.cpp:560: [PQ: 72057594037927937, Partition: 0, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 0 generation 3 [2:271:2256] 2025-05-29T15:25:14.028875Z node 2 :PERSQUEUE INFO: partition_init.cpp:880: [PQ: 72057594037927937, Partition: 1, State: StateInit] bootstrapping 1 [2:272:2257] 2025-05-29T15:25:14.029333Z node 2 :PERSQUEUE INFO: partition.cpp:560: [PQ: 72057594037927937, Partition: 1, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 1 generation 3 [2:272:2257] 2025-05-29T15:25:14.048071Z node 2 :PERSQUEUE INFO: ownerinfo.cpp:30: new Cookie default|d890331b-23ee562-30094702-7f7f3d24_0 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default 2025-05-29T15:25:14.048930Z node 2 :PERSQUEUE INFO: ownerinfo.cpp:30: new Cookie default|5d6ff204-d9b63b6-f2128640-6a161802_1 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default 2025-05-29T15:25:14.071491Z node 2 :PERSQUEUE INFO: ownerinfo.cpp:30: new Cookie default|32128023-464bd43a-417a390d-54d901d6_2 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default 2025-05-29T15:25:14.087314Z node 2 :PERSQUEUE INFO: ownerinfo.cpp:30: new Cookie default|74ba14b3-d9a79925-500d8e86-b7e961bb_3 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default 2025-05-29T15:25:14.090631Z node 2 :PERSQUEUE INFO: ownerinfo.cpp:30: new Cookie default|6a3594c-7774424e-59e10180-fbcb3266_4 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default 2025-05-29T15:25:14.097363Z node 2 :PERSQUEUE INFO: ownerinfo.cpp:30: new Cookie default|a8b4200a-3021baab-e00df6e9-e06cd66d_5 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:104:2057] recipient: [3:102:2135] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:104:2057] recipient: [3:102:2135] Leader for TabletID 72057594037927937 is [3:108:2139] sender: [3:109:2057] recipient: [3:102:2135] 2025-05-29T15:25:14.533256Z node 3 :PERSQUEUE NOTICE: pq_impl.cpp:1099: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-05-29T15:25:14.533285Z node 3 :PERSQUEUE INFO: pq_impl.cpp:800: [PQ: 72057594037927937] doesn't have tx writes info Leader for TabletID 72057594037927938 is [0:0:0] sender: [3:150:2057] recipient: [3:148:2170] IGNORE Leader for TabletID 72057594037927938 is [0:0:0] sender: [3:150:2057] recipient: [3:148:2170] Leader for TabletID 72057594037927938 is [3:154:2174] sender: [3:155:2057] recipient: [3:148:2170] Leader for TabletID 72057594037927937 is [3:108:2139] sender: [3:180:2057] recipient: [3:14:2061] !Reboot 72057594037927937 (actor [3:108:2139]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [3:108:2139] sender: [3:182:2057] recipient: [3:100:2134] Leader for TabletID 72057594037927937 is [3:108:2139] sender: [3:185:2057] recipient: [3:14:2061] Leader for TabletID 72057594037927937 is [3:108:2139] sender: [3:186:2057] recipient: [3:184:2195] Leader for TabletID 72057594037927937 is [3:187:2196] sender: [3:188:2057] recipient: [3:184:2195] 2025-05-29T15:25:14.541212Z node 3 :PERSQUEUE NOTICE: pq_impl.cpp:1099: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-05-29T15:25:14.541247Z node 3 :PERSQUEUE INFO: pq_impl.cpp:800: [PQ: 72057594037927937] doesn't have tx writes info !Reboot 72057594037927937 (actor [3:108:2139]) rebooted! !Reboot 72057594037927937 (actor [3:108:2139]) tablet resolver refreshed! new actor is[3:187:2196] Leader for TabletID 72057594037927937 is [3:187:2196] sender: [3:267:2057] recipient: [3:14:2061] 2025-05-29T15:25:16.405920Z node 3 :PERSQUEUE NOTICE: pq_impl.cpp:1099: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-05-29T15:25:16.406164Z node 3 :PERSQUEUE INFO: pq_impl.cpp:1487: [PQ: 72057594037927937] Config applied version 3 actor [3:178:2192] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 SourceIdMaxCounts: 3 } PartitionIds: 0 PartitionIds: 1 TopicName: "rt3.dc1--asdfgs--topic" Version: 3 LocalDC: true Topic: "topic" TopicPath: "/Root/PQ/rt3.dc1--asdfgs--topic" Partitions { PartitionId: 0 } Partitions { PartitionId: 1 } ReadRuleGenerations: 3 MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } AllPartitions { PartitionId: 1 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 3 Important: false } 2025-05-29T15:25:16.406364Z node 3 :PERSQUEUE INFO: partition_init.cpp:880: [PQ: 7205759403 ... 7594037927937, Partition: 0, State: StateInit] bootstrapping 0 [47:186:2198] 2025-05-29T15:26:16.768654Z node 47 :PERSQUEUE INFO: partition.cpp:560: [PQ: 72057594037927937, Partition: 0, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 0 generation 2 [47:186:2198] 2025-05-29T15:26:16.769101Z node 47 :PERSQUEUE INFO: partition_init.cpp:880: [PQ: 72057594037927937, Partition: 1, State: StateInit] bootstrapping 1 [47:187:2199] 2025-05-29T15:26:16.769511Z node 47 :PERSQUEUE INFO: partition.cpp:560: [PQ: 72057594037927937, Partition: 1, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 1 generation 2 [47:187:2199] 2025-05-29T15:26:16.771283Z node 47 :PERSQUEUE INFO: ownerinfo.cpp:30: new Cookie default|1298faf9-6999e01f-3597db06-5c18132f_0 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default 2025-05-29T15:26:16.779464Z node 47 :PERSQUEUE INFO: ownerinfo.cpp:30: new Cookie default|bf606c35-24b5d5cc-78440f6a-61dd3e9e_1 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default 2025-05-29T15:26:16.784454Z node 47 :PERSQUEUE INFO: ownerinfo.cpp:30: new Cookie default|a6f1590b-3b4a85c-6cd47201-956d9524_2 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default 2025-05-29T15:26:16.790196Z node 47 :PERSQUEUE INFO: ownerinfo.cpp:30: new Cookie default|458f3d4a-30a40643-e431ec34-676d1a60_3 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default 2025-05-29T15:26:16.792578Z node 47 :PERSQUEUE INFO: ownerinfo.cpp:30: new Cookie default|adf35923-b9b69474-3ca0fbf8-cba91920_4 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default 2025-05-29T15:26:16.794023Z node 47 :PERSQUEUE INFO: ownerinfo.cpp:30: new Cookie default|71ddec2b-a31466b1-c250a215-881ae680_5 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default !Reboot 72057594037927937 (actor [47:108:2139]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [47:108:2139] sender: [47:285:2057] recipient: [47:100:2134] Leader for TabletID 72057594037927937 is [47:108:2139] sender: [47:288:2057] recipient: [47:14:2061] Leader for TabletID 72057594037927937 is [47:108:2139] sender: [47:289:2057] recipient: [47:287:2281] Leader for TabletID 72057594037927937 is [47:290:2282] sender: [47:291:2057] recipient: [47:287:2281] 2025-05-29T15:26:16.852829Z node 47 :PERSQUEUE NOTICE: pq_impl.cpp:1099: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-05-29T15:26:16.852854Z node 47 :PERSQUEUE INFO: pq_impl.cpp:800: [PQ: 72057594037927937] doesn't have tx writes info 2025-05-29T15:26:16.852995Z node 47 :PERSQUEUE INFO: partition_init.cpp:880: [PQ: 72057594037927937, Partition: 0, State: StateInit] bootstrapping 0 [47:339:2323] 2025-05-29T15:26:16.853561Z node 47 :PERSQUEUE INFO: partition_init.cpp:880: [PQ: 72057594037927937, Partition: 1, State: StateInit] bootstrapping 1 [47:340:2324] 2025-05-29T15:26:16.855205Z node 47 :PERSQUEUE INFO: partition_init.cpp:774: [rt3.dc1--asdfgs--topic:1:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp skipped because already initialized. 2025-05-29T15:26:16.855222Z node 47 :PERSQUEUE INFO: partition.cpp:560: [PQ: 72057594037927937, Partition: 1, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 1 generation 3 [47:340:2324] 2025-05-29T15:26:16.855723Z node 47 :PERSQUEUE INFO: partition_init.cpp:774: [rt3.dc1--asdfgs--topic:0:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp skipped because already initialized. 2025-05-29T15:26:16.855736Z node 47 :PERSQUEUE INFO: partition.cpp:560: [PQ: 72057594037927937, Partition: 0, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 0 generation 3 [47:339:2323] !Reboot 72057594037927937 (actor [47:108:2139]) rebooted! !Reboot 72057594037927937 (actor [47:108:2139]) tablet resolver refreshed! new actor is[47:290:2282] Leader for TabletID 72057594037927937 is [47:290:2282] sender: [47:390:2057] recipient: [47:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [48:104:2057] recipient: [48:102:2135] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [48:104:2057] recipient: [48:102:2135] Leader for TabletID 72057594037927937 is [48:108:2139] sender: [48:109:2057] recipient: [48:102:2135] 2025-05-29T15:26:18.422441Z node 48 :PERSQUEUE NOTICE: pq_impl.cpp:1099: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-05-29T15:26:18.422471Z node 48 :PERSQUEUE INFO: pq_impl.cpp:800: [PQ: 72057594037927937] doesn't have tx writes info Leader for TabletID 72057594037927938 is [0:0:0] sender: [48:150:2057] recipient: [48:148:2170] IGNORE Leader for TabletID 72057594037927938 is [0:0:0] sender: [48:150:2057] recipient: [48:148:2170] Leader for TabletID 72057594037927938 is [48:154:2174] sender: [48:155:2057] recipient: [48:148:2170] Leader for TabletID 72057594037927937 is [48:108:2139] sender: [48:178:2057] recipient: [48:14:2061] 2025-05-29T15:26:18.438613Z node 48 :PERSQUEUE NOTICE: pq_impl.cpp:1099: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-05-29T15:26:18.438850Z node 48 :PERSQUEUE INFO: pq_impl.cpp:1487: [PQ: 72057594037927937] Config applied version 48 actor [48:176:2190] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 SourceIdMaxCounts: 3 } PartitionIds: 0 PartitionIds: 1 TopicName: "rt3.dc1--asdfgs--topic" Version: 48 LocalDC: true Topic: "topic" TopicPath: "/Root/PQ/rt3.dc1--asdfgs--topic" Partitions { PartitionId: 0 } Partitions { PartitionId: 1 } ReadRuleGenerations: 48 MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } AllPartitions { PartitionId: 1 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 48 Important: false } 2025-05-29T15:26:18.439067Z node 48 :PERSQUEUE INFO: partition_init.cpp:880: [PQ: 72057594037927937, Partition: 0, State: StateInit] bootstrapping 0 [48:184:2196] 2025-05-29T15:26:18.439679Z node 48 :PERSQUEUE INFO: partition.cpp:560: [PQ: 72057594037927937, Partition: 0, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 0 generation 2 [48:184:2196] 2025-05-29T15:26:18.440076Z node 48 :PERSQUEUE INFO: partition_init.cpp:880: [PQ: 72057594037927937, Partition: 1, State: StateInit] bootstrapping 1 [48:185:2197] 2025-05-29T15:26:18.440520Z node 48 :PERSQUEUE INFO: partition.cpp:560: [PQ: 72057594037927937, Partition: 1, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 1 generation 2 [48:185:2197] 2025-05-29T15:26:18.442872Z node 48 :PERSQUEUE INFO: ownerinfo.cpp:30: new Cookie default|5902b051-523f017a-4f07a830-5198618b_0 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default 2025-05-29T15:26:18.444904Z node 48 :PERSQUEUE INFO: ownerinfo.cpp:30: new Cookie default|486d4936-e03bb087-8e44a815-dbb21699_1 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default 2025-05-29T15:26:18.453541Z node 48 :PERSQUEUE INFO: ownerinfo.cpp:30: new Cookie default|4707f4cc-b4b717ec-9ca3190-231ee9c3_2 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default 2025-05-29T15:26:18.458175Z node 48 :PERSQUEUE INFO: ownerinfo.cpp:30: new Cookie default|21000905-7166ac8-fc496fba-54bb1a01_3 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default 2025-05-29T15:26:18.460883Z node 48 :PERSQUEUE INFO: ownerinfo.cpp:30: new Cookie default|da4f36d6-2b3a6df-67fe0652-a661a2c2_4 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default 2025-05-29T15:26:18.467192Z node 48 :PERSQUEUE INFO: ownerinfo.cpp:30: new Cookie default|3a9e6d1f-477702ea-49dde0ca-55a9d2f4_5 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default Leader for TabletID 72057594037927937 is [0:0:0] sender: [49:104:2057] recipient: [49:102:2135] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [49:104:2057] recipient: [49:102:2135] Leader for TabletID 72057594037927937 is [49:108:2139] sender: [49:109:2057] recipient: [49:102:2135] 2025-05-29T15:26:18.757695Z node 49 :PERSQUEUE NOTICE: pq_impl.cpp:1099: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-05-29T15:26:18.757741Z node 49 :PERSQUEUE INFO: pq_impl.cpp:800: [PQ: 72057594037927937] doesn't have tx writes info Leader for TabletID 72057594037927938 is [0:0:0] sender: [49:150:2057] recipient: [49:148:2170] IGNORE Leader for TabletID 72057594037927938 is [0:0:0] sender: [49:150:2057] recipient: [49:148:2170] Leader for TabletID 72057594037927938 is [49:154:2174] sender: [49:155:2057] recipient: [49:148:2170] Leader for TabletID 72057594037927937 is [49:108:2139] sender: [49:180:2057] recipient: [49:14:2061] 2025-05-29T15:26:18.762586Z node 49 :PERSQUEUE NOTICE: pq_impl.cpp:1099: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-05-29T15:26:18.762805Z node 49 :PERSQUEUE INFO: pq_impl.cpp:1487: [PQ: 72057594037927937] Config applied version 49 actor [49:178:2192] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 SourceIdMaxCounts: 3 } PartitionIds: 0 PartitionIds: 1 TopicName: "rt3.dc1--asdfgs--topic" Version: 49 LocalDC: true Topic: "topic" TopicPath: "/Root/PQ/rt3.dc1--asdfgs--topic" Partitions { PartitionId: 0 } Partitions { PartitionId: 1 } ReadRuleGenerations: 49 MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } AllPartitions { PartitionId: 1 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 49 Important: false } 2025-05-29T15:26:18.762955Z node 49 :PERSQUEUE INFO: partition_init.cpp:880: [PQ: 72057594037927937, Partition: 0, State: StateInit] bootstrapping 0 [49:186:2198] 2025-05-29T15:26:18.763655Z node 49 :PERSQUEUE INFO: partition.cpp:560: [PQ: 72057594037927937, Partition: 0, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 0 generation 2 [49:186:2198] 2025-05-29T15:26:18.764102Z node 49 :PERSQUEUE INFO: partition_init.cpp:880: [PQ: 72057594037927937, Partition: 1, State: StateInit] bootstrapping 1 [49:187:2199] 2025-05-29T15:26:18.764579Z node 49 :PERSQUEUE INFO: partition.cpp:560: [PQ: 72057594037927937, Partition: 1, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 1 generation 2 [49:187:2199] 2025-05-29T15:26:18.766563Z node 49 :PERSQUEUE INFO: ownerinfo.cpp:30: new Cookie default|62038f2d-62bed453-14d0be0a-6c0953da_0 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default 2025-05-29T15:26:18.767934Z node 49 :PERSQUEUE INFO: ownerinfo.cpp:30: new Cookie default|4ab61551-4f9131d7-9af8d8be-b386fc9a_1 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default 2025-05-29T15:26:18.772555Z node 49 :PERSQUEUE INFO: ownerinfo.cpp:30: new Cookie default|75439ecc-cb69dd8c-f3fb7c1d-74140b05_2 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default 2025-05-29T15:26:18.774122Z node 49 :PERSQUEUE INFO: ownerinfo.cpp:30: new Cookie default|1cdc608b-6f7866f6-cde4043e-cdcb1e40_3 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default 2025-05-29T15:26:18.776474Z node 49 :PERSQUEUE INFO: ownerinfo.cpp:30: new Cookie default|6b3bfe38-db68b60-67502cd0-6411c833_4 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default 2025-05-29T15:26:18.778141Z node 49 :PERSQUEUE INFO: ownerinfo.cpp:30: new Cookie default|71f4b39a-3af8effb-ddeb85ba-6cdc36f5_5 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/indexes/unittest >> KqpVectorIndexes::CoveredVectorIndexWithFollowers-StaleRO Test command err: Trying to start YDB, gRPC: 28579, MsgBus: 19932 2025-05-29T15:26:04.931779Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509889322015244733:2214];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:26:04.932122Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/001db2/r3tmp/tmpPpH2Ff/pdisk_1.dat 2025-05-29T15:26:05.010953Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [1:7509889322015244543:2079] 1748532364803708 != 1748532364803711 2025-05-29T15:26:05.011927Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 28579, node 1 2025-05-29T15:26:05.027023Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:26:05.027037Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-05-29T15:26:05.027040Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-05-29T15:26:05.027094Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-05-29T15:26:05.039153Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:26:05.039222Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:26:05.041501Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:19932 TClient is connected to server localhost:19932 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-29T15:26:05.262979Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:26:05.275509Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-05-29T15:26:05.283186Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-05-29T15:26:05.372461Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 2025-05-29T15:26:05.444165Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:26:05.475460Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:26:05.725488Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509889326310213474:2403], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:26:05.725521Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:26:05.796477Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-05-29T15:26:05.819776Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-05-29T15:26:05.843237Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-05-29T15:26:05.872012Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-05-29T15:26:05.896676Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-05-29T15:26:05.922294Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-05-29T15:26:05.944388Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-05-29T15:26:05.975559Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509889326310214134:2467], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:26:05.975598Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:26:05.975802Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509889326310214142:2470], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:26:05.976843Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710669:3, at schemeshard: 72057594046644480 2025-05-29T15:26:05.981296Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976710669, at schemeshard: 72057594046644480 2025-05-29T15:26:05.981386Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7509889326310214144:2471], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710669 completed, doublechecking } 2025-05-29T15:26:06.067144Z node 1 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [1:7509889330605181491:3398] txid# 281474976710670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } VERIFY failed (2025-05-29T15:26:06.233192Z): assertion failed in non-unittest thread with message: assertion failed at ydb/core/kqp/ut/common/kqp_ut_common.h:375, void NKikimr::NKqp::AssertSuccessResult(const NYdb::TStatus &): (result.IsSuccess())
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 library/cpp/testing/unittest/registar.cpp:36 RaiseError(): requirement UnittestThread failed 2025-05-29T15:26:06.231769Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [1:7509889330605181500:2475], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:26:06.232387Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=1&id=ZGYzNmQzMGYtZGZmZWNkZDktOWI5YmE1MjAtZmMyYjkyOTQ=, ActorId: [1:7509889326310213447:2400], ActorState: ExecuteState, TraceId: 01jwead2mkbsq4g07m5xwbex8v, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: 0. /-S/util/system/yassert.cpp:83: InternalPanicImpl @ 0x13DDD325 1. /-S/util/system/yassert.cpp:55: Panic @ 0x13DD4326 2. /tmp//-S/library/cpp/testing/unittest/registar.cpp:36: RaiseError @ 0x13F75616 3. /-S/ydb/core/kqp/ut/common/kqp_ut_common.h:375: AssertSuccessResult @ 0x13C2F4E2 4. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:365: CreateSampleTables @ 0x26431D72 5. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:581: operator() @ 0x26452CCC 6. /-S/library/cpp/threading/future/core/future-inl.h:493: SetValue @ 0x26452CCC 7. /-S/library/cpp/threading/future/async.h:24: operator() @ 0x26452CCC 8. /-S/util/thread/pool.h:71: Process @ 0x26452CCC 9. /-S/util/thread/pool.cpp:418: DoExecute @ 0x13DE4AC9 10. /-S/util/thread/factory.h:15: Execute @ 0x13DE34B9 11. /-S/util/thread/factory.cpp:36: ThreadProc @ 0x13DE34B9 12. /-S/util/system/thread.cpp:244: ThreadProxy @ 0x13DDE92C 13. ??:0: ?? @ 0x7F1D833C1AC2 14. ??:0: ?? @ 0x7F1D8345384F Trying to start YDB, gRPC: 12717, MsgBus: 30357 test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/001db2/r3tmp/tmpRg72Ak/pdisk_1.dat 2025-05-29T15:26:12.220638Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509889357577909938:2208];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:26:12.226854Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-05-29T15:26:12.315274Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:26:12.317931Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [1:7509889357577909756:2079] 1748532372125102 != 1748532372125105 2025-05-29T15:26:12.327178Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:26:12.327204Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:26:12.331329Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 12717, node 1 2025-05-29T15:26:12.370982Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:26:12.370992Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-05-29T15:26:12.370994Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-05-29T15:26:12.371035Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:30357 TClient is connected to server localhost:30357 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-29T15:26:12.566301Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:26:12.574928Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-05-29T15:26:12.589484Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:26:12.660186Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-05-29T15:26:12.750035Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 2025-05-29T15:26:12.766374Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:26:12.879502Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509889357577911399:2403], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:26:12.879527Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:26:12.930124Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-05-29T15:26:12.941778Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-05-29T15:26:12.961198Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-05-29T15:26:12.983211Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-05-29T15:26:13.011809Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-05-29T15:26:13.025556Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-05-29T15:26:13.041107Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-05-29T15:26:13.083682Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509889361872879347:2466], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:26:13.083715Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:26:13.083824Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509889361872879355:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:26:13.084706Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715669:3, at schemeshard: 72057594046644480 2025-05-29T15:26:13.096902Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7509889361872879357:2470], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715669 completed, doublechecking } 2025-05-29T15:26:13.158357Z node 1 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [1:7509889361872879408:3399] txid# 281474976715670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-29T15:26:13.311781Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [1:7509889361872879424:2474], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:26:13.313505Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=1&id=MTlkOGU2MDItNTczMjhiZWQtM2Q2YjgzNjMtODU0NDhhZTM=, ActorId: [1:7509889357577911371:2399], ActorState: ExecuteState, TraceId: 01jwead9jn8m7g0fhhpb577841, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: VERIFY failed (2025-05-29T15:26:13.315278Z): assertion failed in non-unittest thread with message: assertion failed at ydb/core/kqp/ut/common/kqp_ut_common.h:375, void NKikimr::NKqp::AssertSuccessResult(const NYdb::TStatus &): (result.IsSuccess())
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 library/cpp/testing/unittest/registar.cpp:36 RaiseError(): requirement UnittestThread failed 0. /-S/util/system/yassert.cpp:83: InternalPanicImpl @ 0x13DDD325 1. /-S/util/system/yassert.cpp:55: Panic @ 0x13DD4326 2. /tmp//-S/library/cpp/testing/unittest/registar.cpp:36: RaiseError @ 0x13F75616 3. /-S/ydb/core/kqp/ut/common/kqp_ut_common.h:375: AssertSuccessResult @ 0x13C2F4E2 4. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:365: CreateSampleTables @ 0x26431D72 5. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:581: operator() @ 0x26452CCC 6. /-S/library/cpp/threading/future/core/future-inl.h:493: SetValue @ 0x26452CCC 7. /-S/library/cpp/threading/future/async.h:24: operator() @ 0x26452CCC 8. /-S/util/thread/pool.h:71: Process @ 0x26452CCC 9. /-S/util/thread/pool.cpp:418: DoExecute @ 0x13DE4AC9 10. /-S/util/thread/factory.h:15: Execute @ 0x13DE34B9 11. /-S/util/thread/factory.cpp:36: ThreadProc @ 0x13DE34B9 12. /-S/util/system/thread.cpp:244: ThreadProxy @ 0x13DDE92C 13. ??:0: ?? @ 0x7F391332DAC2 14. ??:0: ?? @ 0x7F39133BF84F >> StoragePool::TestDistributionRandomProbability [GOOD] >> StoragePool::TestDistributionRandomProbabilityWithOverflow [GOOD] >> StoragePool::TestDistributionExactMin >> THiveTest::TestStopTenant [GOOD] >> TScaleRecommenderTest::BasicTest >> KqpNewEngine::UpdateFromParams >> THiveTest::TestHiveBalancerWithLimit [GOOD] >> THiveTest::TestHiveBalancerIgnoreTablet |67.7%| [TA] $(B)/ydb/core/persqueue/ut/slow/test-results/unittest/{meta.json ... results_accumulator.log} >> THiveTest::TestHiveBalancerDifferentResources [GOOD] >> THiveTest::TestFollowersCrossDC_Easy >> Cdc::DocApi[PqRunner] >> Cdc::UuidExchange[PqRunner] >> TScaleRecommenderTest::BasicTest [GOOD] |67.7%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/fq/libs/ydb/ut/ydb-core-fq-libs-ydb-ut |67.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/fq/libs/ydb/ut/ydb-core-fq-libs-ydb-ut >> THiveTest::TestCreateSubHiveCreateManyTablets [GOOD] >> THiveTest::TestCreateSubHiveCreateManyTabletsWithReboots >> Cdc::KeysOnlyLog[PqRunner] >> DataShardVolatile::UpsertBrokenLockArbiter+UseSink [FAIL] >> DataShardVolatile::UpsertBrokenLockArbiter-UseSink |67.7%| [TA] {RESULT} $(B)/ydb/core/persqueue/ut/slow/test-results/unittest/{meta.json ... results_accumulator.log} |67.7%| [LD] {RESULT} $(B)/ydb/core/fq/libs/ydb/ut/ydb-core-fq-libs-ydb-ut >> THealthCheckTest::OneIssueListing [GOOD] >> THealthCheckTest::OnlyDiskIssueOnFaultyPDisks >> AsyncIndexChangeExchange::SenderShouldBeActivatedOnTableWoIndexes >> TBsProxyFaultToleranceTest::CheckGetHardenedErasureBlock42Count6Idx3 [GOOD] >> Cdc::DocApi[PqRunner] [FAIL] >> Cdc::DocApi[YdsRunner] >> TPQTest::TestStorageRetention [GOOD] >> TPQTest::TestStatusWithMultipleConsumers ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/dsproxy/ut_ftol/unittest >> TBsProxyFaultToleranceTest::CheckGetHardenedErasureBlock42Count6Idx3 [GOOD] Test command err: iteration# 3 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 9 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 15 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 21 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 27 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 33 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 39 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 45 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 51 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 57 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 63 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 69 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 75 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 81 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 87 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 93 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 99 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 105 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 111 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 117 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 123 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 129 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 135 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 141 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 147 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 153 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 159 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 165 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 171 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 177 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 183 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 189 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 195 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 201 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 207 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 213 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 219 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 225 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 231 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 237 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 243 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 249 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 255 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 261 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 267 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 273 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 279 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 285 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 291 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 297 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 303 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 309 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 315 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 321 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 327 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 333 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 339 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 345 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 351 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 357 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 363 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 369 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 375 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 381 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 387 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 393 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 399 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 405 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 411 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 417 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 423 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 429 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 435 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 441 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 447 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 453 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 459 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 465 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 471 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 477 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 483 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 489 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 495 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 501 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 507 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 513 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 519 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 525 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 531 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 537 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 543 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 549 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 555 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 561 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 567 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 573 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 579 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 585 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 591 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 597 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 603 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 609 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 615 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 621 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 627 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 633 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 639 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 645 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 651 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 657 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 663 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 669 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 675 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 681 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 687 BlobsWritten# 2041 blobsWrittenFul ... blobsUnwritten# 1218 iteration# 1365 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1371 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1377 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1383 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1389 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1395 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1401 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1407 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1413 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1419 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1425 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1431 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1437 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1443 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1449 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1455 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1461 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1467 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1473 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1479 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1485 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1491 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1497 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1503 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1509 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1515 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1521 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1527 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1533 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1539 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1545 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1551 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1557 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1563 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1569 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1575 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1581 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1587 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1593 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1599 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1605 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1611 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1617 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1623 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1629 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1635 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1641 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1647 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1653 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1659 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1665 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1671 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1677 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1683 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1689 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1695 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1701 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1707 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1713 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1719 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1725 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1731 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1737 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1743 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1749 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1755 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1761 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1767 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1773 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1779 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1785 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1791 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1797 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1803 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1809 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1815 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1821 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1827 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1833 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1839 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1845 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1851 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1857 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1863 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1869 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1875 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1881 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1887 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1893 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1899 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1905 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1911 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1917 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1923 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1929 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1935 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1941 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1947 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1953 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1959 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1965 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1971 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1977 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1983 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1989 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1995 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 2001 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 2007 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 2013 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 2019 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 2025 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 2031 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 2037 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 >> KqpIndexes::JoinWithNonPKColumnsInPredicate-UseStreamJoin >> TStorageBalanceTest::TestScenario1 [GOOD] >> TStorageBalanceTest::TestScenario2 >> Cdc::UuidExchange[PqRunner] [FAIL] >> Cdc::UuidExchange[YdsRunner] >> TPQTest::TestStatusWithMultipleConsumers [GOOD] >> TPQTest::TestTabletRestoreEventsOrder >> Cdc::KeysOnlyLog[PqRunner] [FAIL] >> Cdc::KeysOnlyLog[YdsRunner] >> StoragePool::TestDistributionExactMin [GOOD] >> StoragePool::TestDistributionExactMinWithOverflow [GOOD] >> StoragePool::TestDistributionRandomMin7p ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/mind/hive/ut/unittest >> TScaleRecommenderTest::BasicTest [GOOD] Test command err: 2025-05-29T15:26:01.657076Z node 1 :LOCAL DEBUG: local.cpp:1491: TLocal::Bootstrap 2025-05-29T15:26:01.657126Z node 1 :BS_NODE DEBUG: {NW26@node_warden_impl.cpp:321} Bootstrap 2025-05-29T15:26:01.658204Z node 1 :BS_NODE DEBUG: {NW18@node_warden_resource.cpp:51} ApplyServiceSet IsStatic# true Comprehensive# false Origin# initial ServiceSet# {PDisks { NodeID: 1 PDiskID: 1 Path: "/tmp/pdisk.dat" PDiskGuid: 1 } VDisks { VDiskID { GroupID: 0 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } VDiskLocation { NodeID: 1 PDiskID: 1 VDiskSlotID: 0 PDiskGuid: 1 } } Groups { GroupID: 0 GroupGeneration: 1 ErasureSpecies: 0 Rings { FailDomains { VDiskLocations { NodeID: 1 PDiskID: 1 VDiskSlotID: 0 PDiskGuid: 1 } } } } AvailabilityDomains: 0 } 2025-05-29T15:26:01.658301Z node 1 :BS_NODE DEBUG: {NW04@node_warden_pdisk.cpp:196} StartLocalPDisk NodeId# 1 PDiskId# 1 Path# "/tmp/pdisk.dat" PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} Temporary# false 2025-05-29T15:26:01.658505Z node 1 :BS_NODE DEBUG: {NW23@node_warden_vdisk.cpp:67} StartLocalVDiskActor SlayInFlight# false VDiskId# [0:1:0:0:0] VSlotId# 1:1:0 PDiskGuid# 1 DonorMode# false PDiskRestartInFlight# false PDisksWaitingToStart# false 2025-05-29T15:26:01.659330Z node 1 :BS_NODE DEBUG: {NW24@node_warden_vdisk.cpp:265} StartLocalVDiskActor done VDiskId# [0:1:0:0:0] VSlotId# 1:1:0 PDiskGuid# 1 2025-05-29T15:26:01.659354Z node 1 :BS_NODE DEBUG: {NW12@node_warden_proxy.cpp:23} StartLocalProxy GroupId# 0 2025-05-29T15:26:01.659569Z node 1 :BS_NODE DEBUG: {NW21@node_warden_pipe.cpp:23} EstablishPipe AvailDomainId# 0 PipeClientId# [1:29:2075] ControllerId# 72057594037932033 2025-05-29T15:26:01.659574Z node 1 :BS_NODE DEBUG: {NW20@node_warden_pipe.cpp:72} SendRegisterNode 2025-05-29T15:26:01.659612Z node 1 :BS_NODE DEBUG: {NW11@node_warden_impl.cpp:296} StartInvalidGroupProxy GroupId# 4294967295 2025-05-29T15:26:01.659644Z node 1 :BS_NODE DEBUG: {NW62@node_warden_impl.cpp:308} StartRequestReportingThrottler 2025-05-29T15:26:01.661235Z node 1 :BS_NODE DEBUG: {NWDC00@distconf.cpp:20} Bootstrap 2025-05-29T15:26:01.661288Z node 1 :BS_NODE DEBUG: {NWDC40@distconf_persistent_storage.cpp:25} TReaderActor bootstrap Paths# [] 2025-05-29T15:26:01.664005Z node 1 :BS_PROXY INFO: dsproxy_state.cpp:146: Group# 0 TEvConfigureProxy received GroupGeneration# 1 IsLimitedKeyless# false Marker# DSP02 2025-05-29T15:26:01.664023Z node 1 :BS_PROXY NOTICE: dsproxy_state.cpp:294: EnsureMonitoring Group# 0 IsLimitedKeyless# 0 fullIfPossible# 0 Marker# DSP58 2025-05-29T15:26:01.664445Z node 1 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [1:28:2074] Create Queue# [1:39:2082] targetNodeId# 1 Marker# DSP01 2025-05-29T15:26:01.664502Z node 1 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [1:28:2074] Create Queue# [1:40:2083] targetNodeId# 1 Marker# DSP01 2025-05-29T15:26:01.664534Z node 1 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [1:28:2074] Create Queue# [1:41:2084] targetNodeId# 1 Marker# DSP01 2025-05-29T15:26:01.664565Z node 1 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [1:28:2074] Create Queue# [1:42:2085] targetNodeId# 1 Marker# DSP01 2025-05-29T15:26:01.664598Z node 1 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [1:28:2074] Create Queue# [1:43:2086] targetNodeId# 1 Marker# DSP01 2025-05-29T15:26:01.664635Z node 1 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [1:28:2074] Create Queue# [1:44:2087] targetNodeId# 1 Marker# DSP01 2025-05-29T15:26:01.664676Z node 1 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [1:28:2074] Create Queue# [1:45:2088] targetNodeId# 1 Marker# DSP01 2025-05-29T15:26:01.664683Z node 1 :BS_PROXY INFO: dsproxy_state.cpp:29: Group# 0 SetStateEstablishingSessions Marker# DSP03 2025-05-29T15:26:01.664703Z node 1 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:50: TClient[72057594037932033] ::Bootstrap [1:29:2075] 2025-05-29T15:26:01.664709Z node 1 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:533: TClient[72057594037932033] lookup [1:29:2075] 2025-05-29T15:26:01.664722Z node 1 :BS_PROXY NOTICE: dsproxy_state.cpp:234: Group# 4294967295 HasInvalidGroupId# 1 Bootstrap -> StateEjected Marker# DSP42 2025-05-29T15:26:01.664764Z node 1 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:149: TClient[72057594037932033] queue send [1:29:2075] 2025-05-29T15:26:01.664778Z node 1 :BS_NODE DEBUG: {NWDC53@distconf.cpp:300} StateWaitForInit event Type# 131082 StorageConfigLoaded# false NodeListObtained# false PendingEvents.size# 0 2025-05-29T15:26:01.664785Z node 1 :BS_NODE DEBUG: {NWDC11@distconf_binding.cpp:6} TEvNodesInfo 2025-05-29T15:26:01.664796Z node 1 :LOCAL DEBUG: local.cpp:1441: TDomainLocal(dc-1): Bootstrap 2025-05-29T15:26:01.665018Z node 1 :TABLET_RESOLVER DEBUG: tablet_resolver.cpp:484: Handle TEvForward tabletId: 72057594037932033 entry.State: StInit ev: {EvForward TabletID: 72057594037932033 Ev: nullptr Flags: 1:2:0} 2025-05-29T15:26:01.665042Z node 1 :BS_NODE DEBUG: {NWDC53@distconf.cpp:300} StateWaitForInit event Type# 2146435074 StorageConfigLoaded# false NodeListObtained# false PendingEvents.size# 0 2025-05-29T15:26:01.665050Z node 1 :BS_NODE DEBUG: {NWDC32@distconf_persistent_storage.cpp:221} TEvStorageConfigLoaded Cookie# 0 NumItemsRead# 0 2025-05-29T15:26:01.665979Z node 1 :BS_NODE DEBUG: {NWDC35@distconf_persistent_storage.cpp:184} PersistConfig Record# {} Drives# [] 2025-05-29T15:26:01.666658Z node 1 :LOCAL DEBUG: local.cpp:1149: TDomainLocal(dc-1): Binding to hive 72057594037927937 at domain dc-1 (allocated resources: ) 2025-05-29T15:26:01.666714Z node 1 :BS_NODE DEBUG: {NWDC51@distconf_persistent_storage.cpp:103} TWriterActor bootstrap Drives# [] Record# {} 2025-05-29T15:26:01.666733Z node 1 :LOCAL DEBUG: local.cpp:975: TLocalNodeRegistrar::Bootstrap 2025-05-29T15:26:01.666757Z node 1 :LOCAL DEBUG: local.cpp:181: TLocalNodeRegistrar::TryToRegister 2025-05-29T15:26:01.666810Z node 1 :LOCAL DEBUG: local.cpp:213: TLocalNodeRegistrar::TryToRegister pipe to hive, pipe:[1:50:2091] 2025-05-29T15:26:01.676616Z node 1 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:149: TClient[72057594037932033] queue send [1:29:2075] 2025-05-29T15:26:01.676729Z node 1 :BS_NODE DEBUG: {NWDC53@distconf.cpp:300} StateWaitForInit event Type# 2146435075 StorageConfigLoaded# true NodeListObtained# false PendingEvents.size# 0 2025-05-29T15:26:01.676790Z node 1 :STATESTORAGE DEBUG: statestorage_proxy.cpp:246: ProxyRequest::HandleInit ringGroup:0 ev: {EvLookup TabletID: 72057594037932033 Cookie: 0 ProxyOptions: SigNone} 2025-05-29T15:26:01.677432Z node 1 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:50: TClient[72057594037936129] ::Bootstrap [1:33:2063] 2025-05-29T15:26:01.677451Z node 1 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:533: TClient[72057594037936129] lookup [1:33:2063] 2025-05-29T15:26:01.677466Z node 1 :STATESTORAGE DEBUG: statestorage_replica.cpp:183: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037932033 Cookie: 1} 2025-05-29T15:26:01.677478Z node 1 :STATESTORAGE DEBUG: statestorage_replica.cpp:183: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037932033 Cookie: 2} 2025-05-29T15:26:01.677487Z node 1 :STATESTORAGE DEBUG: statestorage_proxy.cpp:355: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 1 TabletID: 72057594037932033} 2025-05-29T15:26:01.677570Z node 1 :TABLET_RESOLVER DEBUG: tablet_resolver.cpp:484: Handle TEvForward tabletId: 72057594037936129 entry.State: StInit ev: {EvForward TabletID: 72057594037936129 Ev: nullptr Flags: 1:2:0} 2025-05-29T15:26:01.677584Z node 1 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:50: TClient[72057594037927937] ::Bootstrap [1:50:2091] 2025-05-29T15:26:01.677588Z node 1 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:533: TClient[72057594037927937] lookup [1:50:2091] 2025-05-29T15:26:01.677622Z node 1 :BS_NODE DEBUG: {NWDC53@distconf.cpp:300} StateWaitForInit event Type# 131082 StorageConfigLoaded# true NodeListObtained# false PendingEvents.size# 1 2025-05-29T15:26:01.677630Z node 1 :BS_NODE DEBUG: {NWDC11@distconf_binding.cpp:6} TEvNodesInfo 2025-05-29T15:26:01.677680Z node 1 :BS_NODE DEBUG: {NWDC18@distconf_binding.cpp:322} UpdateBound RefererNodeId# 1 NodeId# ::1:12001/1 Meta# {Fingerprint: "\371$\224\316I\335\243.)W\014\261m\013\346Osy\0160" } 2025-05-29T15:26:01.677701Z node 1 :STATESTORAGE DEBUG: statestorage_replica.cpp:183: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037932033 Cookie: 0} 2025-05-29T15:26:01.677715Z node 1 :BS_NODE DEBUG: {NWDC53@distconf.cpp:300} StateWaitForInit event Type# 2146435072 StorageConfigLoaded# true NodeListObtained# true PendingEvents.size# 1 2025-05-29T15:26:01.677725Z node 1 :BS_NODE DEBUG: {NWDC15@distconf.cpp:361} StateFunc Type# 2146435075 Sender# [1:47:2090] SessionId# [0:0:0] Cookie# 0 2025-05-29T15:26:01.677735Z node 1 :BS_NODE DEBUG: {NWDC36@distconf_persistent_storage.cpp:205} TEvStorageConfigStored NumOk# 0 NumError# 0 Passed# 0.012662s 2025-05-29T15:26:01.677807Z node 1 :STATESTORAGE DEBUG: statestorage_proxy.cpp:355: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 1 TabletID: 72057594037932033} 2025-05-29T15:26:01.677867Z node 1 :BS_NODE DEBUG: {NW18@node_warden_resource.cpp:51} ApplyServiceSet IsStatic# true Comprehensive# true Origin# distconf ServiceSet# {PDisks { NodeID: 1 PDiskID: 1 Path: "/tmp/pdisk.dat" PDiskGuid: 1 } VDisks { VDiskID { GroupID: 0 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } VDiskLocation { NodeID: 1 PDiskID: 1 VDiskSlotID: 0 PDiskGuid: 1 } } Groups { GroupID: 0 GroupGeneration: 1 ErasureSpecies: 0 Rings { FailDomains { VDiskLocations { NodeID: 1 PDiskID: 1 VDiskSlotID: 0 PDiskGuid: 1 } } } } AvailabilityDomains: 0 } 2025-05-29T15:26:01.677904Z node 1 :TABLET_RESOLVER DEBUG: tablet_resolver.cpp:484: Handle TEvForward tabletId: 72057594037927937 entry.State: StInit ev: {EvForward TabletID: 72057594037927937 Ev: nullptr Flags: 1:2:0} 2025-05-29T15:26:01.677916Z node 1 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:149: TClient[72057594037927937] queue send [1:50:2091] 2025-05-29T15:26:01.677936Z node 1 :BS_NODE DEBUG: {NWDC15@distconf.cpp:361} StateFunc Type# 268639248 Sender# [1:12:2059] SessionId# [0:0:0] Cookie# 0 2025-05-29T15:26:01.680236Z node 1 :STATESTORAGE DEBUG: statestorage_proxy.cpp:355: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 1 TabletID: 72057594037932033} 2025-05-29T15:26:01.680325Z node 1 :TABLET_RESOLVER DEBUG: tablet_resolver.cpp:610: Handle TEvInfo tabletId: 72057594037932033 entry.State: StInitResolve success: false ev: {EvInfo Status: 5 TabletID: 72057594037932033 Cookie: 0 CurrentLeader: [0:0:0] CurrentLeaderTablet: [0:0:0] CurrentGeneration: 0 CurrentStep: 0 Locked: false LockedFor: 0 Signature: { Size: 3 Signature: {{[1:24343667:0] : 2}, {[1:2199047599219:0] : 8}, {[1:1099535971443:0] : 5}}}} 2025-05-29T15:26:01.680334Z node 1 :TABLET_RESOLVER DEBUG: tablet_resolver.cpp:361: DropEntry tabletId: 72057594037932033 followers: 0 2025-05-29T15:26:01.680373Z node 1 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:172: TClient[72057594037932033] forward result error, check reconnect [1:29:2075] 2025-05-29T15:26:01.680381Z node 1 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:562: TClient[72057594037932033] schedule retry [1:29:2075] 2025-05-29T15:26:01.680450Z node 1 :STATESTORAGE DEBUG: statestorage_proxy.cpp:246: ProxyRequest::HandleInit ringGroup:0 ev: {EvLookup TabletID: 72057594037936129 Cookie: 0 ProxyOptions: SigNone} 2025-05-29T15:26:01.682044Z node 1 :STATESTORAGE DEBUG: statestorage_replica.cpp:183: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936129 Cookie: 0} 2025-05-29T15:26:01.682065Z node 1 :STATESTORAGE DEBUG: statestorage_replica.cpp:183: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936129 Cookie: 1} 2025-05-29T15:26:01.682683Z node 1 :STATESTORAGE DEBUG: statestorage_replica.cpp:183: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936129 Cookie: 2} 2025-05-29T15:26:01.682878Z node 1 :STATESTORAGE DEBUG: statestorage_proxy.cpp:246: ProxyRequest::HandleInit r ... 71: TClient[72075186224037888]::SendEvent [24:568:2486] 2025-05-29T15:26:20.472410Z node 24 :PIPE_SERVER DEBUG: tablet_pipe_server.cpp:291: [72075186224037888] Accept Connect Originator# [24:568:2486] 2025-05-29T15:26:20.472426Z node 24 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:310: TClient[72075186224037888] connected with status OK role: Leader [24:568:2486] 2025-05-29T15:26:20.472431Z node 24 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:325: TClient[72075186224037888] send queued [24:568:2486] 2025-05-29T15:26:20.472435Z node 24 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:629: TClient[72075186224037888] push event to server [24:568:2486] 2025-05-29T15:26:20.472439Z node 24 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:332: TClient[72075186224037888] shutdown pipe due to pending shutdown request [24:568:2486] 2025-05-29T15:26:20.472443Z node 24 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:505: TClient[72075186224037888] notify reset [24:568:2486] 2025-05-29T15:26:20.472448Z node 24 :PIPE_SERVER DEBUG: tablet_pipe_server.cpp:141: [72075186224037888] HandleSend Sender# [24:567:2485] EventType# 268697612 2025-05-29T15:26:20.472463Z node 24 :HIVE TRACE: hive_impl.cpp:114: HIVE#72075186224037888 Handle TEvTabletPipe::TEvServerConnected([24:568:2486]) [24:569:2487] 2025-05-29T15:26:20.472492Z node 24 :HIVE TRACE: hive_impl.cpp:755: HIVE#72075186224037888 THive::Handle::TEvTabletMetrics, NodeId 24 TotalNodeCpuUsage: 0.95 2025-05-29T15:26:20.472506Z node 24 :TABLET_EXECUTOR DEBUG: Leader{72075186224037888:1:12} Tx{22, NKikimr::NHive::TTxUpdateTabletMetrics} queued, type NKikimr::NHive::TTxUpdateTabletMetrics 2025-05-29T15:26:20.472513Z node 24 :TABLET_EXECUTOR DEBUG: Leader{72075186224037888:1:12} Tx{22, NKikimr::NHive::TTxUpdateTabletMetrics} took 4194304b of static mem, Memory{4194304 dyn 0} 2025-05-29T15:26:20.472526Z node 24 :HIVE TRACE: tx__update_tablet_metrics.cpp:66: HIVE#72075186224037888 THive::TTxUpdateTabletMetrics UpdateResourceTotalUsage node 24 value (0,0,0,0) accumulated to (0,0,0,0) 2025-05-29T15:26:20.472567Z node 24 :TABLET_EXECUTOR DEBUG: Leader{72075186224037888:1:12} Tx{22, NKikimr::NHive::TTxUpdateTabletMetrics} hope 1 -> done Change{15, redo 82b alter 0b annex 0, ~{ 4 } -{ }, 0 gb} 2025-05-29T15:26:20.472575Z node 24 :TABLET_EXECUTOR DEBUG: Leader{72075186224037888:1:12} Tx{22, NKikimr::NHive::TTxUpdateTabletMetrics} release 4194304b of static, Memory{0 dyn 0} 2025-05-29T15:26:20.491039Z node 24 :BS_PROXY_PUT INFO: dsproxy_put.cpp:645: [5db9675187442428] bootstrap ActorId# [24:571:2489] Group# 2147483648 BlobCount# 1 BlobIDs# [[72075186224037888:1:12:0:0:92:0]] HandleClass# TabletLog Tactic# MinLatency RestartCounter# 0 Marker# BPP13 2025-05-29T15:26:20.491081Z node 24 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:42: [5db9675187442428] Id# [72075186224037888:1:12:0:0:92:0] restore disk# 0 part# 0 situation# ESituation::Unknown Marker# BPG51 2025-05-29T15:26:20.491089Z node 24 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:65: [5db9675187442428] restore Id# [72075186224037888:1:12:0:0:92:0] optimisticReplicas# 1 optimisticState# EBS_FULL Marker# BPG55 2025-05-29T15:26:20.491098Z node 24 :BS_PROXY_PUT DEBUG: dsproxy_strategy_base.cpp:324: [5db9675187442428] partPlacement record partSituation# ESituation::Unknown to# 0 blob Id# [72075186224037888:1:12:0:0:92:1] Marker# BPG33 2025-05-29T15:26:20.491104Z node 24 :BS_PROXY_PUT DEBUG: dsproxy_strategy_base.cpp:345: [5db9675187442428] Sending missing VPut part# 0 to# 0 blob Id# [72075186224037888:1:12:0:0:92:1] Marker# BPG32 2025-05-29T15:26:20.491129Z node 24 :BS_PROXY DEBUG: group_sessions.h:165: Send to queueActorId# [24:423:2369] NKikimr::TEvBlobStorage::TEvVPut# {ID# [72075186224037888:1:12:0:0:92:1] FDS# 92 HandleClass# TabletLog {MsgQoS ExtQueueId# PutTabletLog} DataSize# 0 Data# } cookie# 0 2025-05-29T15:26:20.491564Z node 24 :BS_PROXY_PUT DEBUG: dsproxy_put.cpp:260: [5db9675187442428] received {EvVPutResult Status# OK ID# [72075186224037888:1:12:0:0:92:1] {MsgQoS MsgId# { SequenceId: 1 MsgId: 18 } Cost# 80724 ExtQueueId# PutTabletLog IntQueueId# IntPutLog Window# { Status# Processed ActualWindowSize# 0 MaxWindowSize# 150000000 ExpectedMsgId# { SequenceId: 1 MsgId: 19 }}}} from# [80000000:1:0:0:0] Marker# BPP01 2025-05-29T15:26:20.491588Z node 24 :BS_PROXY_PUT DEBUG: dsproxy_put_impl.cpp:72: [5db9675187442428] Result# TEvPutResult {Id# [72075186224037888:1:12:0:0:92:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0.998955} GroupId# 2147483648 Marker# BPP12 2025-05-29T15:26:20.491597Z node 24 :BS_PROXY_PUT INFO: dsproxy_put.cpp:486: [5db9675187442428] SendReply putResult# TEvPutResult {Id# [72075186224037888:1:12:0:0:92:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0.998955} ResponsesSent# 0 PutImpl.Blobs.size# 1 Last# true Marker# BPP21 2025-05-29T15:26:20.491622Z node 24 :BS_PROXY_PUT DEBUG: {BPP72@dsproxy_put.cpp:470} Query history GroupId# 2147483648 HandleClass# TabletLog Tactic# MinLatency History# THistory { Entries# [ TEvVPut{ TimestampMs# 0.126 sample PartId# [72075186224037888:1:12:0:0:92:1] QueryCount# 1 VDiskId# [80000000:1:0:0:0] NodeId# 24 } TEvVPutResult{ TimestampMs# 0.569 VDiskId# [80000000:1:0:0:0] NodeId# 24 Status# OK } ] } 2025-05-29T15:26:20.491647Z node 24 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:54: Put Result: TEvPutResult {Id# [72075186224037888:1:12:0:0:92:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0.998955} 2025-05-29T15:26:20.491674Z node 24 :TABLET_EXECUTOR DEBUG: Leader{72075186224037888:1:13} commited cookie 1 for step 12 2025-05-29T15:26:20.491773Z node 24 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:50: TClient[72075186224037888] ::Bootstrap [24:573:2491] 2025-05-29T15:26:20.491794Z node 24 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:533: TClient[72075186224037888] lookup [24:573:2491] 2025-05-29T15:26:20.491813Z node 24 :TABLET_RESOLVER DEBUG: tablet_resolver.cpp:484: Handle TEvForward tabletId: 72075186224037888 entry.State: StNormal ev: {EvForward TabletID: 72075186224037888 Ev: nullptr Flags: 1:2:0} 2025-05-29T15:26:20.491824Z node 24 :TABLET_RESOLVER DEBUG: tablet_resolver.cpp:279: SelectForward node 24 selfDC leaderDC 1:2:0 local 1 localDc 1 other 0 disallowed 0 tabletId: 72075186224037888 followers: 0 countLeader 1 allowFollowers 0 winner: [24:417:2366] 2025-05-29T15:26:20.491834Z node 24 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:149: TClient[72075186224037888] queue send [24:573:2491] 2025-05-29T15:26:20.491843Z node 24 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:411: TClient[72075186224037888] received pending shutdown [24:573:2491] 2025-05-29T15:26:20.491852Z node 24 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:190: TClient[72075186224037888] forward result local node, try to connect [24:573:2491] 2025-05-29T15:26:20.491857Z node 24 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:671: TClient[72075186224037888]::SendEvent [24:573:2491] 2025-05-29T15:26:20.491872Z node 24 :PIPE_SERVER DEBUG: tablet_pipe_server.cpp:291: [72075186224037888] Accept Connect Originator# [24:573:2491] 2025-05-29T15:26:20.491901Z node 24 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:310: TClient[72075186224037888] connected with status OK role: Leader [24:573:2491] 2025-05-29T15:26:20.491906Z node 24 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:325: TClient[72075186224037888] send queued [24:573:2491] 2025-05-29T15:26:20.491910Z node 24 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:629: TClient[72075186224037888] push event to server [24:573:2491] 2025-05-29T15:26:20.491915Z node 24 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:332: TClient[72075186224037888] shutdown pipe due to pending shutdown request [24:573:2491] 2025-05-29T15:26:20.491920Z node 24 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:505: TClient[72075186224037888] notify reset [24:573:2491] 2025-05-29T15:26:20.491927Z node 24 :PIPE_SERVER DEBUG: tablet_pipe_server.cpp:141: [72075186224037888] HandleSend Sender# [24:572:2490] EventType# 2146435094 2025-05-29T15:26:20.491943Z node 24 :HIVE TRACE: hive_impl.cpp:114: HIVE#72075186224037888 Handle TEvTabletPipe::TEvServerConnected([24:573:2491]) [24:574:2492] 2025-05-29T15:26:20.491957Z node 24 :HIVE DEBUG: hive_impl.cpp:3579: HIVE#72075186224037888 [MSR] Started 2025-05-29T15:26:20.491965Z node 24 :HIVE TRACE: hive_impl.cpp:3619: HIVE#72075186224037888 [MSR] Node 24 is ready, avg CPU usage: 0.95 2025-05-29T15:26:20.491970Z node 24 :HIVE TRACE: hive_impl.cpp:3627: HIVE#72075186224037888 [MSR] Total avg CPU usage: 0.95, ready nodes: 1 2025-05-29T15:26:20.491975Z node 24 :HIVE TRACE: hive_impl.cpp:3636: HIVE#72075186224037888 [MSR] Avg CPU usage history: [0.95] 2025-05-29T15:26:20.491983Z node 24 :HIVE TRACE: domain_info.cpp:74: HIVE#0 [TargetTracking] [MSR] Scale in window: [0.95], bottom threshold: 0.5 2025-05-29T15:26:20.491989Z node 24 :HIVE TRACE: domain_info.cpp:99: HIVE#0 [TargetTracking] [MSR] Scale out window: [0.95], target: 0.6 2025-05-29T15:26:20.491995Z node 24 :HIVE TRACE: domain_info.cpp:113: HIVE#0 [TargetTracking] [MSR] Need scale out, rounded recommended nodes: 2 2025-05-29T15:26:20.491999Z node 24 :HIVE TRACE: hive_impl.cpp:3649: HIVE#72075186224037888 [MSR] Recommended nodes: 2, current nodes: 1 2025-05-29T15:26:20.492048Z node 24 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:50: TClient[72075186224037888] ::Bootstrap [24:576:2494] 2025-05-29T15:26:20.492053Z node 24 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:533: TClient[72075186224037888] lookup [24:576:2494] 2025-05-29T15:26:20.492064Z node 24 :TABLET_RESOLVER DEBUG: tablet_resolver.cpp:484: Handle TEvForward tabletId: 72075186224037888 entry.State: StNormal ev: {EvForward TabletID: 72075186224037888 Ev: nullptr Flags: 1:2:0} 2025-05-29T15:26:20.492071Z node 24 :TABLET_RESOLVER DEBUG: tablet_resolver.cpp:279: SelectForward node 24 selfDC leaderDC 1:2:0 local 1 localDc 1 other 0 disallowed 0 tabletId: 72075186224037888 followers: 0 countLeader 1 allowFollowers 0 winner: [24:417:2366] 2025-05-29T15:26:20.492077Z node 24 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:149: TClient[72075186224037888] queue send [24:576:2494] 2025-05-29T15:26:20.492083Z node 24 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:411: TClient[72075186224037888] received pending shutdown [24:576:2494] 2025-05-29T15:26:20.492091Z node 24 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:190: TClient[72075186224037888] forward result local node, try to connect [24:576:2494] 2025-05-29T15:26:20.492095Z node 24 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:671: TClient[72075186224037888]::SendEvent [24:576:2494] 2025-05-29T15:26:20.492108Z node 24 :PIPE_SERVER DEBUG: tablet_pipe_server.cpp:291: [72075186224037888] Accept Connect Originator# [24:576:2494] 2025-05-29T15:26:20.492133Z node 24 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:310: TClient[72075186224037888] connected with status OK role: Leader [24:576:2494] 2025-05-29T15:26:20.492138Z node 24 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:325: TClient[72075186224037888] send queued [24:576:2494] 2025-05-29T15:26:20.492142Z node 24 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:629: TClient[72075186224037888] push event to server [24:576:2494] 2025-05-29T15:26:20.492150Z node 24 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:332: TClient[72075186224037888] shutdown pipe due to pending shutdown request [24:576:2494] 2025-05-29T15:26:20.492154Z node 24 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:505: TClient[72075186224037888] notify reset [24:576:2494] 2025-05-29T15:26:20.492160Z node 24 :PIPE_SERVER DEBUG: tablet_pipe_server.cpp:141: [72075186224037888] HandleSend Sender# [24:575:2493] EventType# 268697642 2025-05-29T15:26:20.492171Z node 24 :HIVE TRACE: hive_impl.cpp:114: HIVE#72075186224037888 Handle TEvTabletPipe::TEvServerConnected([24:576:2494]) [24:577:2495] 2025-05-29T15:26:20.492195Z node 24 :HIVE DEBUG: hive_impl.cpp:3668: HIVE#72075186224037888 Handle TEvHive::TEvRequestScaleRecommendation(DomainKey { SchemeShard: 72057594046678944 PathId: 2 }) >> TPQTest::TestTabletRestoreEventsOrder [GOOD] >> AsyncIndexChangeExchange::SenderShouldBeActivatedOnTableWoIndexes [GOOD] >> AsyncIndexChangeExchange::SenderShouldBeActivatedOnTableWithSyncIndex >> THiveTest::TestFollowersCrossDC_Easy [GOOD] >> THiveTest::TestFollowers_LocalNodeOnly |67.7%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/schemeshard/ut_export/ydb-core-tx-schemeshard-ut_export |67.7%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_export/ydb-core-tx-schemeshard-ut_export |67.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_export/ydb-core-tx-schemeshard-ut_export ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/indexes/unittest >> KqpVectorIndexes::SimpleVectorIndexOrderByCosineDistanceWithCover+Nullable Test command err: Trying to start YDB, gRPC: 3357, MsgBus: 9131 2025-05-29T15:26:07.232182Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509889335815758583:2220];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/001dae/r3tmp/tmpEFqV5u/pdisk_1.dat 2025-05-29T15:26:07.267490Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-05-29T15:26:07.328533Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:26:07.329382Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [1:7509889335815758372:2079] 1748532367217125 != 1748532367217128 TServer::EnableGrpc on GrpcPort 3357, node 1 2025-05-29T15:26:07.346723Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:26:07.346735Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-05-29T15:26:07.346753Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-05-29T15:26:07.346797Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-05-29T15:26:07.370975Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:26:07.371002Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:26:07.375149Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:9131 TClient is connected to server localhost:9131 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-29T15:26:07.504114Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:26:07.515182Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-05-29T15:26:07.527501Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:26:07.572567Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:26:07.627745Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:26:07.647373Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:26:07.759291Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509889335815760014:2403], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:26:07.759316Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:26:07.851610Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-05-29T15:26:07.870682Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-05-29T15:26:07.895163Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-05-29T15:26:07.919133Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-05-29T15:26:07.933840Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-05-29T15:26:07.947457Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-05-29T15:26:07.974007Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-05-29T15:26:07.993781Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509889335815760665:2466], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:26:07.993812Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:26:07.993915Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509889335815760670:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:26:07.994971Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715669:3, at schemeshard: 72057594046644480 2025-05-29T15:26:07.997840Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7509889335815760672:2470], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715669 completed, doublechecking } 2025-05-29T15:26:08.088596Z node 1 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [1:7509889340110728019:3396] txid# 281474976715670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-29T15:26:08.264565Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [1:7509889340110728028:2474], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:26:08.267231Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=1&id=ZTE0YTBiZWEtYTc3OGIzZDgtOTcwOGU5NDUtMjNjODY5ODg=, ActorId: [1:7509889335815759987:2400], ActorState: ExecuteState, TraceId: 01jwead4ks7cpnjwadwhtww1ap, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: VERIFY failed (2025-05-29T15:26:08.269591Z): assertion failed in non-unittest thread with message: assertion failed at ydb/core/kqp/ut/common/kqp_ut_common.h:375, void NKikimr::NKqp::AssertSuccessResult(const NYdb::TStatus &): (result.IsSuccess())
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 library/cpp/testing/unittest/registar.cpp:36 RaiseError(): requirement UnittestThread failed 0. /-S/util/system/yassert.cpp:83: InternalPanicImpl @ 0x13DDD325 1. /-S/util/system/yassert.cpp:55: Panic @ 0x13DD4326 2. /tmp//-S/library/cpp/testing/unittest/registar.cpp:36: RaiseError @ 0x13F75616 3. /-S/ydb/core/kqp/ut/common/kqp_ut_common.h:375: AssertSuccessResult @ 0x13C2F4E2 4. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:365: CreateSampleTables @ 0x26431D72 5. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:581: operator() @ 0x26452CCC 6. /-S/library/cpp/threading/future/core/future-inl.h:493: SetValue @ 0x26452CCC 7. /-S/library/cpp/threading/future/async.h:24: operator() @ 0x26452CCC 8. /-S/util/thread/pool.h:71: Process @ 0x26452CCC 9. /-S/util/thread/pool.cpp:418: DoExecute @ 0x13DE4AC9 10. /-S/util/thread/factory.h:15: Execute @ 0x13DE34B9 11. /-S/util/thread/factory.cpp:36: ThreadProc @ 0x13DE34B9 12. /-S/util/system/thread.cpp:244: ThreadProxy @ 0x13DDE92C 13. ??:0: ?? @ 0x7F61A4D32AC2 14. ??:0: ?? @ 0x7F61A4DC484F Trying to start YDB, gRPC: 29109, MsgBus: 28127 2025-05-29T15:26:14.488964Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509889365187993273:2220];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/001dae/r3tmp/tmpcynSzG/pdisk_1.dat 2025-05-29T15:26:14.653028Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-05-29T15:26:14.784190Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:26:14.784219Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:26:14.794320Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:26:14.794805Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [1:7509889365187993064:2079] 1748532374468520 != 1748532374468523 2025-05-29T15:26:14.795178Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 29109, node 1 2025-05-29T15:26:14.835154Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:26:14.835167Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-05-29T15:26:14.835169Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-05-29T15:26:14.835220Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:28127 TClient is connected to server localhost:28127 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-29T15:26:15.173838Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:26:15.177294Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-05-29T15:26:15.182926Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:26:15.231638Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:26:15.348181Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:26:15.375951Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:26:15.509313Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509889369482961999:2403], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:26:15.509367Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:26:15.579195Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-05-29T15:26:15.589203Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-05-29T15:26:15.600214Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-05-29T15:26:15.647669Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-05-29T15:26:15.658722Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-05-29T15:26:15.670264Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-05-29T15:26:15.684039Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-05-29T15:26:15.759260Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509889369482962663:2467], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:26:15.759284Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:26:15.759386Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509889369482962668:2470], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:26:15.760394Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710669:3, at schemeshard: 72057594046644480 2025-05-29T15:26:15.767391Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7509889369482962670:2471], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710669 completed, doublechecking } 2025-05-29T15:26:15.863125Z node 1 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [1:7509889369482962721:3401] txid# 281474976710670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-29T15:26:15.991523Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [1:7509889369482962730:2475], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:26:15.991904Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=1&id=ZWEwOGFmOGYtM2UxYzgyNDAtNDRjODE4MDctNDFkY2RkMjI=, ActorId: [1:7509889369482961996:2401], ActorState: ExecuteState, TraceId: 01jweadc6df670fbzkz0qwzt6k, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: VERIFY failed (2025-05-29T15:26:15.995411Z): assertion failed in non-unittest thread with message: assertion failed at ydb/core/kqp/ut/common/kqp_ut_common.h:375, void NKikimr::NKqp::AssertSuccessResult(const NYdb::TStatus &): (result.IsSuccess())
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 library/cpp/testing/unittest/registar.cpp:36 RaiseError(): requirement UnittestThread failed 0. /-S/util/system/yassert.cpp:83: InternalPanicImpl @ 0x13DDD325 1. /-S/util/system/yassert.cpp:55: Panic @ 0x13DD4326 2. /tmp//-S/library/cpp/testing/unittest/registar.cpp:36: RaiseError @ 0x13F75616 3. /-S/ydb/core/kqp/ut/common/kqp_ut_common.h:375: AssertSuccessResult @ 0x13C2F4E2 4. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:365: CreateSampleTables @ 0x26431D72 5. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:581: operator() @ 0x26452CCC 6. /-S/library/cpp/threading/future/core/future-inl.h:493: SetValue @ 0x26452CCC 7. /-S/library/cpp/threading/future/async.h:24: operator() @ 0x26452CCC 8. /-S/util/thread/pool.h:71: Process @ 0x26452CCC 9. /-S/util/thread/pool.cpp:418: DoExecute @ 0x13DE4AC9 10. /-S/util/thread/factory.h:15: Execute @ 0x13DE34B9 11. /-S/util/thread/factory.cpp:36: ThreadProc @ 0x13DE34B9 12. /-S/util/system/thread.cpp:244: ThreadProxy @ 0x13DDE92C 13. ??:0: ?? @ 0x7F5168D3CAC2 14. ??:0: ?? @ 0x7F5168DCE84F >> DataShardVolatile::UpsertBrokenLockArbiter-UseSink [FAIL] >> DataShardVolatile::UpsertNoLocksArbiterRestart+UseSink >> TPQCachingProxyTest::TestWrongSessionOrGeneration >> Cdc::KeysOnlyLog[YdsRunner] [FAIL] >> Cdc::KeysOnlyLog[TopicRunner] >> Cdc::DocApi[YdsRunner] [FAIL] >> Cdc::DocApi[TopicRunner] >> Cdc::UuidExchange[YdsRunner] [FAIL] >> Cdc::UuidExchange[TopicRunner] >> TPQCachingProxyTest::TestWrongSessionOrGeneration [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/persqueue/ut/unittest >> TPQTest::TestTabletRestoreEventsOrder [GOOD] Test command err: Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:104:2057] recipient: [1:102:2135] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:104:2057] recipient: [1:102:2135] Leader for TabletID 72057594037927937 is [1:108:2139] sender: [1:109:2057] recipient: [1:102:2135] 2025-05-29T15:22:14.047845Z node 1 :PERSQUEUE NOTICE: pq_impl.cpp:1099: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-05-29T15:22:14.047876Z node 1 :PERSQUEUE INFO: pq_impl.cpp:800: [PQ: 72057594037927937] doesn't have tx writes info Leader for TabletID 72057594037927938 is [0:0:0] sender: [1:150:2057] recipient: [1:148:2170] IGNORE Leader for TabletID 72057594037927938 is [0:0:0] sender: [1:150:2057] recipient: [1:148:2170] Leader for TabletID 72057594037927938 is [1:154:2174] sender: [1:155:2057] recipient: [1:148:2170] Leader for TabletID 72057594037927937 is [1:108:2139] sender: [1:180:2057] recipient: [1:14:2061] 2025-05-29T15:22:14.052068Z node 1 :PERSQUEUE NOTICE: pq_impl.cpp:1099: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-05-29T15:22:14.054925Z node 1 :PERSQUEUE INFO: pq_impl.cpp:1487: [PQ: 72057594037927937] Config applied version 1 actor [1:178:2192] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 PartitionIds: 1 TopicName: "rt3.dc1--asdfgs--topic" Version: 1 LocalDC: true Topic: "topic" TopicPath: "/Root/PQ/rt3.dc1--asdfgs--topic" Partitions { PartitionId: 0 } Partitions { PartitionId: 1 } ReadRuleGenerations: 1 MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } AllPartitions { PartitionId: 1 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 1 Important: false } 2025-05-29T15:22:14.055163Z node 1 :PERSQUEUE INFO: partition_init.cpp:880: [PQ: 72057594037927937, Partition: 0, State: StateInit] bootstrapping 0 [1:186:2198] 2025-05-29T15:22:14.055828Z node 1 :PERSQUEUE INFO: partition.cpp:560: [PQ: 72057594037927937, Partition: 0, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 0 generation 2 [1:186:2198] 2025-05-29T15:22:14.056284Z node 1 :PERSQUEUE INFO: partition_init.cpp:880: [PQ: 72057594037927937, Partition: 1, State: StateInit] bootstrapping 1 [1:187:2199] 2025-05-29T15:22:14.056596Z node 1 :PERSQUEUE INFO: partition.cpp:560: [PQ: 72057594037927937, Partition: 1, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 1 generation 2 [1:187:2199] 2025-05-29T15:22:14.057778Z node 1 :PERSQUEUE INFO: ownerinfo.cpp:30: new Cookie default|526ec0d6-a0beedf1-6b587cbf-d5278ee4_0 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default 2025-05-29T15:22:14.058582Z node 1 :PERSQUEUE INFO: ownerinfo.cpp:30: new Cookie default|f5d345d2-57d95b75-17acc24a-36f658ef_1 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default 2025-05-29T15:22:14.059969Z node 1 :PERSQUEUE INFO: ownerinfo.cpp:30: new Cookie default|19bae9e0-8ab048f9-9e348454-fa3339a7_2 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default 2025-05-29T15:22:14.061789Z node 1 :PERSQUEUE INFO: ownerinfo.cpp:30: new Cookie default|5b8591c0-b8ba6a7-a0c25f4d-79bddff2_3 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default 2025-05-29T15:22:14.062469Z node 1 :PERSQUEUE INFO: ownerinfo.cpp:30: new Cookie default|ac97a6a4-d5bfc1fb-6dae0219-26aaecd9_4 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default 2025-05-29T15:22:14.063123Z node 1 :PERSQUEUE INFO: ownerinfo.cpp:30: new Cookie default|c39b38e0-3a8a3dc-ba2a4864-f3a253a0_5 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:104:2057] recipient: [2:102:2135] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:104:2057] recipient: [2:102:2135] Leader for TabletID 72057594037927937 is [2:108:2139] sender: [2:109:2057] recipient: [2:102:2135] 2025-05-29T15:22:14.288415Z node 2 :PERSQUEUE NOTICE: pq_impl.cpp:1099: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-05-29T15:22:14.288447Z node 2 :PERSQUEUE INFO: pq_impl.cpp:800: [PQ: 72057594037927937] doesn't have tx writes info Leader for TabletID 72057594037927938 is [0:0:0] sender: [2:150:2057] recipient: [2:148:2170] IGNORE Leader for TabletID 72057594037927938 is [0:0:0] sender: [2:150:2057] recipient: [2:148:2170] Leader for TabletID 72057594037927938 is [2:154:2174] sender: [2:155:2057] recipient: [2:148:2170] Leader for TabletID 72057594037927937 is [2:108:2139] sender: [2:178:2057] recipient: [2:14:2061] !Reboot 72057594037927937 (actor [2:108:2139]) on event NKikimr::TEvPersQueue::TEvUpdateConfigBuilder ! Leader for TabletID 72057594037927937 is [2:108:2139] sender: [2:180:2057] recipient: [2:100:2134] Leader for TabletID 72057594037927937 is [2:108:2139] sender: [2:183:2057] recipient: [2:14:2061] Leader for TabletID 72057594037927937 is [2:108:2139] sender: [2:184:2057] recipient: [2:182:2193] Leader for TabletID 72057594037927937 is [2:185:2194] sender: [2:186:2057] recipient: [2:182:2193] 2025-05-29T15:22:14.298147Z node 2 :PERSQUEUE NOTICE: pq_impl.cpp:1099: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-05-29T15:22:14.298171Z node 2 :PERSQUEUE INFO: pq_impl.cpp:800: [PQ: 72057594037927937] doesn't have tx writes info !Reboot 72057594037927937 (actor [2:108:2139]) rebooted! !Reboot 72057594037927937 (actor [2:108:2139]) tablet resolver refreshed! new actor is[2:185:2194] Leader for TabletID 72057594037927937 is [2:185:2194] sender: [2:265:2057] recipient: [2:14:2061] 2025-05-29T15:22:15.835399Z node 2 :PERSQUEUE NOTICE: pq_impl.cpp:1099: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-05-29T15:22:15.835608Z node 2 :PERSQUEUE INFO: pq_impl.cpp:1487: [PQ: 72057594037927937] Config applied version 2 actor [2:176:2190] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 PartitionIds: 1 TopicName: "rt3.dc1--asdfgs--topic" Version: 2 LocalDC: true Topic: "topic" TopicPath: "/Root/PQ/rt3.dc1--asdfgs--topic" Partitions { PartitionId: 0 } Partitions { PartitionId: 1 } ReadRuleGenerations: 2 MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } AllPartitions { PartitionId: 1 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 2 Important: false } 2025-05-29T15:22:15.835791Z node 2 :PERSQUEUE INFO: partition_init.cpp:880: [PQ: 72057594037927937, Partition: 0, State: StateInit] bootstrapping 0 [2:271:2256] 2025-05-29T15:22:15.836490Z node 2 :PERSQUEUE INFO: partition.cpp:560: [PQ: 72057594037927937, Partition: 0, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 0 generation 3 [2:271:2256] 2025-05-29T15:22:15.836953Z node 2 :PERSQUEUE INFO: partition_init.cpp:880: [PQ: 72057594037927937, Partition: 1, State: StateInit] bootstrapping 1 [2:272:2257] 2025-05-29T15:22:15.837485Z node 2 :PERSQUEUE INFO: partition.cpp:560: [PQ: 72057594037927937, Partition: 1, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 1 generation 3 [2:272:2257] 2025-05-29T15:22:15.839328Z node 2 :PERSQUEUE INFO: ownerinfo.cpp:30: new Cookie default|3c422c51-acd3ccd9-d2647f6f-2ada2626_0 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default 2025-05-29T15:22:15.840576Z node 2 :PERSQUEUE INFO: ownerinfo.cpp:30: new Cookie default|384ab6be-2b3ac46d-485e239a-2f1c27ca_1 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default 2025-05-29T15:22:15.842520Z node 2 :PERSQUEUE INFO: ownerinfo.cpp:30: new Cookie default|21f04270-3eebe223-c6f06feb-e2cd93df_2 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default 2025-05-29T15:22:15.845795Z node 2 :PERSQUEUE INFO: ownerinfo.cpp:30: new Cookie default|77b3b34f-21e68a78-94fee4bc-f0cef416_3 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default 2025-05-29T15:22:15.846821Z node 2 :PERSQUEUE INFO: ownerinfo.cpp:30: new Cookie default|6c290cc6-663b648b-56ae82ca-48b21027_4 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default 2025-05-29T15:22:15.847758Z node 2 :PERSQUEUE INFO: ownerinfo.cpp:30: new Cookie default|c7bfbbc6-407818de-6bcb901-89a299e7_5 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:104:2057] recipient: [3:102:2135] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:104:2057] recipient: [3:102:2135] Leader for TabletID 72057594037927937 is [3:108:2139] sender: [3:109:2057] recipient: [3:102:2135] 2025-05-29T15:22:15.954052Z node 3 :PERSQUEUE NOTICE: pq_impl.cpp:1099: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-05-29T15:22:15.954076Z node 3 :PERSQUEUE INFO: pq_impl.cpp:800: [PQ: 72057594037927937] doesn't have tx writes info Leader for TabletID 72057594037927938 is [0:0:0] sender: [3:150:2057] recipient: [3:148:2170] IGNORE Leader for TabletID 72057594037927938 is [0:0:0] sender: [3:150:2057] recipient: [3:148:2170] Leader for TabletID 72057594037927938 is [3:154:2174] sender: [3:155:2057] recipient: [3:148:2170] Leader for TabletID 72057594037927937 is [3:108:2139] sender: [3:180:2057] recipient: [3:14:2061] !Reboot 72057594037927937 (actor [3:108:2139]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [3:108:2139] sender: [3:182:2057] recipient: [3:100:2134] Leader for TabletID 72057594037927937 is [3:108:2139] sender: [3:185:2057] recipient: [3:14:2061] Leader for TabletID 72057594037927937 is [3:108:2139] sender: [3:186:2057] recipient: [3:184:2195] Leader for TabletID 72057594037927937 is [3:187:2196] sender: [3:188:2057] recipient: [3:184:2195] 2025-05-29T15:22:15.964023Z node 3 :PERSQUEUE NOTICE: pq_impl.cpp:1099: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-05-29T15:22:15.964046Z node 3 :PERSQUEUE INFO: pq_impl.cpp:800: [PQ: 72057594037927937] doesn't have tx writes info !Reboot 72057594037927937 (actor [3:108:2139]) rebooted! !Reboot 72057594037927937 (actor [3:108:2139]) tablet resolver refreshed! new actor is[3:187:2196] Leader for TabletID 72057594037927937 is [3:187:2196] sender: [3:267:2057] recipient: [3:14:2061] 2025-05-29T15:22:17.499974Z node 3 :PERSQUEUE NOTICE: pq_impl.cpp:1099: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-05-29T15:22:17.500144Z node 3 :PERSQUEUE INFO: pq_impl.cpp:1487: [PQ: 72057594037927937] Config applied version 3 actor [3:178:2192] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 PartitionIds: 1 TopicName: "rt3.dc1--asdfgs--topic" Version: 3 LocalDC: true Topic: "topic" TopicPath: "/Root/PQ/rt3.dc1--asdfgs--topic" Partitions { PartitionId: 0 } Partitions { PartitionId: 1 } ReadRuleGenerations: 3 MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } AllPartitions { PartitionId: 1 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 3 Important: false } 2025-05-29T15:22:17.500255Z node 3 :PERSQUEUE INFO: partition_init.cpp:880: [PQ: 72057594037927937, Partition: 0, State: StateInit] bootstrapping 0 [3:273:225 ... 1 2025-05-29T15:26:22.022677Z node 240 :PERSQUEUE_READ_BALANCER INFO: read_balancer__balancing.cpp:1652: [72057594037927938][rt3.dc1--topic] pipe [240:563:2556] connected; active server actors: 1 2025-05-29T15:26:22.023330Z node 240 :PERSQUEUE_READ_BALANCER INFO: read_balancer__balancing.cpp:1652: [72057594037927938][rt3.dc1--topic] pipe [240:568:2561] connected; active server actors: 1 2025-05-29T15:26:22.026554Z node 240 :PERSQUEUE_READ_BALANCER INFO: read_balancer__balancing.cpp:1652: [72057594037927938][rt3.dc1--topic] pipe [240:573:2566] connected; active server actors: 1 2025-05-29T15:26:22.027248Z node 240 :PERSQUEUE_READ_BALANCER INFO: read_balancer__balancing.cpp:1652: [72057594037927938][rt3.dc1--topic] pipe [240:578:2571] connected; active server actors: 1 2025-05-29T15:26:22.028048Z node 240 :PERSQUEUE_READ_BALANCER INFO: read_balancer__balancing.cpp:1652: [72057594037927938][rt3.dc1--topic] pipe [240:583:2576] connected; active server actors: 1 2025-05-29T15:26:22.028525Z node 240 :PERSQUEUE_READ_BALANCER INFO: read_balancer__balancing.cpp:1652: [72057594037927938][rt3.dc1--topic] pipe [240:588:2581] connected; active server actors: 1 2025-05-29T15:26:22.028865Z node 240 :PERSQUEUE_READ_BALANCER INFO: read_balancer__balancing.cpp:1652: [72057594037927938][rt3.dc1--topic] pipe [240:593:2586] connected; active server actors: 1 2025-05-29T15:26:22.029216Z node 240 :PERSQUEUE_READ_BALANCER INFO: read_balancer__balancing.cpp:1652: [72057594037927938][rt3.dc1--topic] pipe [240:598:2591] connected; active server actors: 1 2025-05-29T15:26:22.029543Z node 240 :PERSQUEUE_READ_BALANCER INFO: read_balancer__balancing.cpp:1652: [72057594037927938][rt3.dc1--topic] pipe [240:603:2596] connected; active server actors: 1 2025-05-29T15:26:22.030096Z node 240 :PERSQUEUE_READ_BALANCER INFO: read_balancer__balancing.cpp:1652: [72057594037927938][rt3.dc1--topic] pipe [240:608:2601] connected; active server actors: 1 2025-05-29T15:26:22.030481Z node 240 :PERSQUEUE_READ_BALANCER INFO: read_balancer__balancing.cpp:1652: [72057594037927938][rt3.dc1--topic] pipe [240:613:2606] connected; active server actors: 1 2025-05-29T15:26:22.030849Z node 240 :PERSQUEUE_READ_BALANCER INFO: read_balancer__balancing.cpp:1652: [72057594037927938][rt3.dc1--topic] pipe [240:618:2611] connected; active server actors: 1 2025-05-29T15:26:22.032025Z node 240 :PERSQUEUE_READ_BALANCER INFO: read_balancer__balancing.cpp:1652: [72057594037927938][rt3.dc1--topic] pipe [240:623:2616] connected; active server actors: 1 2025-05-29T15:26:22.032439Z node 240 :PERSQUEUE_READ_BALANCER INFO: read_balancer__balancing.cpp:1652: [72057594037927938][rt3.dc1--topic] pipe [240:628:2621] connected; active server actors: 1 2025-05-29T15:26:22.032824Z node 240 :PERSQUEUE_READ_BALANCER INFO: read_balancer__balancing.cpp:1652: [72057594037927938][rt3.dc1--topic] pipe [240:633:2626] connected; active server actors: 1 2025-05-29T15:26:22.033638Z node 240 :PERSQUEUE_READ_BALANCER INFO: read_balancer__balancing.cpp:1652: [72057594037927938][rt3.dc1--topic] pipe [240:638:2631] connected; active server actors: 1 2025-05-29T15:26:22.033986Z node 240 :PERSQUEUE_READ_BALANCER INFO: read_balancer__balancing.cpp:1652: [72057594037927938][rt3.dc1--topic] pipe [240:643:2636] connected; active server actors: 1 2025-05-29T15:26:22.034302Z node 240 :PERSQUEUE_READ_BALANCER INFO: read_balancer__balancing.cpp:1652: [72057594037927938][rt3.dc1--topic] pipe [240:648:2641] connected; active server actors: 1 2025-05-29T15:26:22.034581Z node 240 :PERSQUEUE_READ_BALANCER INFO: read_balancer__balancing.cpp:1652: [72057594037927938][rt3.dc1--topic] pipe [240:653:2646] connected; active server actors: 1 2025-05-29T15:26:22.034905Z node 240 :PERSQUEUE_READ_BALANCER INFO: read_balancer__balancing.cpp:1652: [72057594037927938][rt3.dc1--topic] pipe [240:658:2651] connected; active server actors: 1 2025-05-29T15:26:22.035233Z node 240 :PERSQUEUE_READ_BALANCER INFO: read_balancer__balancing.cpp:1652: [72057594037927938][rt3.dc1--topic] pipe [240:663:2656] connected; active server actors: 1 2025-05-29T15:26:22.036162Z node 240 :PERSQUEUE_READ_BALANCER INFO: read_balancer__balancing.cpp:1652: [72057594037927938][rt3.dc1--topic] pipe [240:668:2661] connected; active server actors: 1 2025-05-29T15:26:22.036596Z node 240 :PERSQUEUE_READ_BALANCER INFO: read_balancer__balancing.cpp:1652: [72057594037927938][rt3.dc1--topic] pipe [240:673:2666] connected; active server actors: 1 2025-05-29T15:26:22.036916Z node 240 :PERSQUEUE_READ_BALANCER INFO: read_balancer__balancing.cpp:1652: [72057594037927938][rt3.dc1--topic] pipe [240:678:2671] connected; active server actors: 1 2025-05-29T15:26:22.037196Z node 240 :PERSQUEUE_READ_BALANCER INFO: read_balancer__balancing.cpp:1652: [72057594037927938][rt3.dc1--topic] pipe [240:683:2676] connected; active server actors: 1 2025-05-29T15:26:22.037488Z node 240 :PERSQUEUE_READ_BALANCER INFO: read_balancer__balancing.cpp:1652: [72057594037927938][rt3.dc1--topic] pipe [240:688:2681] connected; active server actors: 1 2025-05-29T15:26:22.037976Z node 240 :PERSQUEUE_READ_BALANCER INFO: read_balancer__balancing.cpp:1652: [72057594037927938][rt3.dc1--topic] pipe [240:693:2686] connected; active server actors: 1 2025-05-29T15:26:22.038367Z node 240 :PERSQUEUE_READ_BALANCER INFO: read_balancer__balancing.cpp:1652: [72057594037927938][rt3.dc1--topic] pipe [240:698:2691] connected; active server actors: 1 2025-05-29T15:26:22.038853Z node 240 :PERSQUEUE_READ_BALANCER INFO: read_balancer__balancing.cpp:1652: [72057594037927938][rt3.dc1--topic] pipe [240:703:2696] connected; active server actors: 1 2025-05-29T15:26:22.040144Z node 240 :PERSQUEUE_READ_BALANCER INFO: read_balancer__balancing.cpp:1652: [72057594037927938][rt3.dc1--topic] pipe [240:708:2701] connected; active server actors: 1 2025-05-29T15:26:22.040559Z node 240 :PERSQUEUE_READ_BALANCER INFO: read_balancer__balancing.cpp:1652: [72057594037927938][rt3.dc1--topic] pipe [240:713:2706] connected; active server actors: 1 2025-05-29T15:26:22.041240Z node 240 :PERSQUEUE_READ_BALANCER INFO: read_balancer__balancing.cpp:1652: [72057594037927938][rt3.dc1--topic] pipe [240:718:2711] connected; active server actors: 1 2025-05-29T15:26:22.041880Z node 240 :PERSQUEUE_READ_BALANCER INFO: read_balancer__balancing.cpp:1652: [72057594037927938][rt3.dc1--topic] pipe [240:723:2716] connected; active server actors: 1 2025-05-29T15:26:22.042313Z node 240 :PERSQUEUE_READ_BALANCER INFO: read_balancer__balancing.cpp:1652: [72057594037927938][rt3.dc1--topic] pipe [240:728:2721] connected; active server actors: 1 2025-05-29T15:26:22.047288Z node 240 :PERSQUEUE_READ_BALANCER INFO: read_balancer__balancing.cpp:1652: [72057594037927938][rt3.dc1--topic] pipe [240:733:2726] connected; active server actors: 1 2025-05-29T15:26:22.049789Z node 240 :PERSQUEUE_READ_BALANCER INFO: read_balancer__balancing.cpp:1652: [72057594037927938][rt3.dc1--topic] pipe [240:738:2731] connected; active server actors: 1 2025-05-29T15:26:22.050574Z node 240 :PERSQUEUE_READ_BALANCER INFO: read_balancer__balancing.cpp:1652: [72057594037927938][rt3.dc1--topic] pipe [240:743:2736] connected; active server actors: 1 2025-05-29T15:26:22.051065Z node 240 :PERSQUEUE_READ_BALANCER INFO: read_balancer__balancing.cpp:1652: [72057594037927938][rt3.dc1--topic] pipe [240:748:2741] connected; active server actors: 1 2025-05-29T15:26:22.052224Z node 240 :PERSQUEUE DEBUG: pq_impl.cpp:2882: [PQ: 72057594037927937] server connected, pipe [240:753:2746], now have 1 active actors on pipe 2025-05-29T15:26:22.052435Z node 240 :PERSQUEUE DEBUG: pq_impl.cpp:2882: [PQ: 72057594037927937] server connected, pipe [240:756:2749], now have 1 active actors on pipe 2025-05-29T15:26:22.052579Z node 240 :PERSQUEUE DEBUG: pq_impl.cpp:2882: [PQ: 72057594037927937] server connected, pipe [240:759:2752], now have 1 active actors on pipe 2025-05-29T15:26:22.052761Z node 240 :PERSQUEUE_READ_BALANCER INFO: read_balancer__balancing.cpp:1652: [72057594037927938][rt3.dc1--topic] pipe [240:762:2755] connected; active server actors: 1 2025-05-29T15:26:22.171430Z node 241 :PERSQUEUE NOTICE: pq_impl.cpp:1099: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-05-29T15:26:22.171459Z node 241 :PERSQUEUE INFO: pq_impl.cpp:800: [PQ: 72057594037927937] doesn't have tx writes info 2025-05-29T15:26:22.179317Z node 241 :PERSQUEUE NOTICE: pq_impl.cpp:1099: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-05-29T15:26:22.179345Z node 241 :PERSQUEUE INFO: pq_impl.cpp:800: [PQ: 72057594037927937] doesn't have tx writes info 2025-05-29T15:26:22.179925Z node 241 :PERSQUEUE NOTICE: pq_impl.cpp:1099: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-05-29T15:26:22.180161Z node 241 :PERSQUEUE INFO: pq_impl.cpp:1487: [PQ: 72057594037927937] Config applied version 353 actor [241:176:2190] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 ImportantClientId: "aaa" LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 PartitionIds: 1 TopicName: "topic" Version: 353 LocalDC: true Topic: "topic" TopicPath: "/topic" YcCloudId: "somecloud" YcFolderId: "somefolder" YdbDatabaseId: "PQ" YdbDatabasePath: "/Root/PQ" Partitions { PartitionId: 0 } Partitions { PartitionId: 1 } ReadRuleGenerations: 353 ReadRuleGenerations: 353 FederationAccount: "federationAccount" MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } AllPartitions { PartitionId: 1 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 353 Important: false } Consumers { Name: "aaa" Generation: 353 Important: true } 2025-05-29T15:26:22.180301Z node 241 :PERSQUEUE INFO: partition_init.cpp:880: [PQ: 72057594037927937, Partition: 0, State: StateInit] bootstrapping 0 [241:244:2243] 2025-05-29T15:26:22.180520Z node 241 :PERSQUEUE INFO: partition.cpp:560: [PQ: 72057594037927937, Partition: 0, State: StateInit] init complete for topic 'topic' partition 0 generation 3 [241:244:2243] 2025-05-29T15:26:22.180746Z node 241 :PERSQUEUE INFO: partition_init.cpp:880: [PQ: 72057594037927937, Partition: 1, State: StateInit] bootstrapping 1 [241:246:2245] 2025-05-29T15:26:22.180920Z node 241 :PERSQUEUE INFO: partition.cpp:560: [PQ: 72057594037927937, Partition: 1, State: StateInit] init complete for topic 'topic' partition 1 generation 3 [241:246:2245] 2025-05-29T15:26:22.187310Z node 241 :PERSQUEUE NOTICE: pq_impl.cpp:1099: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-05-29T15:26:22.187334Z node 241 :PERSQUEUE INFO: pq_impl.cpp:800: [PQ: 72057594037927937] doesn't have tx writes info 2025-05-29T15:26:22.187444Z node 241 :PERSQUEUE INFO: partition_init.cpp:880: [PQ: 72057594037927937, Partition: 0, State: StateInit] bootstrapping 0 [241:323:2305] 2025-05-29T15:26:22.187685Z node 241 :PERSQUEUE INFO: partition_init.cpp:880: [PQ: 72057594037927937, Partition: 1, State: StateInit] bootstrapping 1 [241:325:2307] 2025-05-29T15:26:22.188405Z node 241 :PERSQUEUE INFO: partition_init.cpp:774: [topic:0:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp skipped because already initialized. 2025-05-29T15:26:22.188415Z node 241 :PERSQUEUE INFO: partition.cpp:560: [PQ: 72057594037927937, Partition: 0, State: StateInit] init complete for topic 'topic' partition 0 generation 4 [241:323:2305] 2025-05-29T15:26:22.188496Z node 241 :PERSQUEUE INFO: partition_init.cpp:774: [topic:1:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp skipped because already initialized. 2025-05-29T15:26:22.188502Z node 241 :PERSQUEUE INFO: partition.cpp:560: [PQ: 72057594037927937, Partition: 1, State: StateInit] init complete for topic 'topic' partition 1 generation 4 [241:325:2307] |67.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/persqueue/dread_cache_service/ut/unittest >> AsyncIndexChangeExchange::SenderShouldBeActivatedOnTableWithSyncIndex [GOOD] >> AsyncIndexChangeExchange::SenderShouldBeActivatedOnTableWithAsyncIndex >> THealthCheckTest::OnlyDiskIssueOnFaultyPDisks [GOOD] >> THealthCheckTest::NoStoragePools >> TPQCachingProxyTest::MultipleSessions ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/indexes/unittest >> KqpUniqueIndex::UpdateOnFkAlreadyExist Test command err: Trying to start YDB, gRPC: 18645, MsgBus: 1444 test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/001d89/r3tmp/tmpkd2ME8/pdisk_1.dat 2025-05-29T15:26:09.112582Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509889341987005576:2201];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:26:09.112699Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-05-29T15:26:09.224241Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 18645, node 1 2025-05-29T15:26:09.242162Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:26:09.242175Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-05-29T15:26:09.242177Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-05-29T15:26:09.242222Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:1444 2025-05-29T15:26:09.263438Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:26:09.263467Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:26:09.264669Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:1444 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-29T15:26:09.324283Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:26:09.327651Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-05-29T15:26:09.332196Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-05-29T15:26:09.381829Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 2025-05-29T15:26:09.423583Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:26:09.457753Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:26:09.676573Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509889341987007045:2403], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:26:09.676598Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:26:09.765009Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-05-29T15:26:09.783975Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-05-29T15:26:09.800616Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-05-29T15:26:09.811743Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-05-29T15:26:09.873266Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-05-29T15:26:09.900586Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-05-29T15:26:09.932915Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-05-29T15:26:09.976493Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509889341987007701:2466], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:26:09.976516Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:26:09.976756Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509889341987007706:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:26:09.977817Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715669:3, at schemeshard: 72057594046644480 2025-05-29T15:26:09.981821Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976715669, at schemeshard: 72057594046644480 2025-05-29T15:26:09.982489Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7509889341987007708:2470], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715669 completed, doublechecking } 2025-05-29T15:26:10.060882Z node 1 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [1:7509889346281975055:3397] txid# 281474976715670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-29T15:26:10.180288Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [1:7509889346281975064:2474], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:26:10.182072Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=1&id=Y2RjZjZmYTUtNTU5ODhmNTUtYjlkY2I3OTgtMWEzMjFkNTg=, ActorId: [1:7509889341987007042:2401], ActorState: ExecuteState, TraceId: 01jwead6hqd5xj06jx7g922f75, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: VERIFY failed (2025-05-29T15:26:10.183056Z): assertion failed in non-unittest thread with message: assertion failed at ydb/core/kqp/ut/common/kqp_ut_common.h:375, void NKikimr::NKqp::AssertSuccessResult(const NYdb::TStatus &): (result.IsSuccess())
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 library/cpp/testing/unittest/registar.cpp:36 RaiseError(): requirement UnittestThread failed 0. /-S/util/system/yassert.cpp:83: InternalPanicImpl @ 0x13DDD325 1. /-S/util/system/yassert.cpp:55: Panic @ 0x13DD4326 2. /tmp//-S/library/cpp/testing/unittest/registar.cpp:36: RaiseError @ 0x13F75616 3. /-S/ydb/core/kqp/ut/common/kqp_ut_common.h:375: AssertSuccessResult @ 0x13C2F4E2 4. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:365: CreateSampleTables @ 0x26431D72 5. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:581: operator() @ 0x26452CCC 6. /-S/library/cpp/threading/future/core/future-inl.h:493: SetValue @ 0x26452CCC 7. /-S/library/cpp/threading/future/async.h:24: operator() @ 0x26452CCC 8. /-S/util/thread/pool.h:71: Process @ 0x26452CCC 9. /-S/util/thread/pool.cpp:418: DoExecute @ 0x13DE4AC9 10. /-S/util/thread/factory.h:15: Execute @ 0x13DE34B9 11. /-S/util/thread/factory.cpp:36: ThreadProc @ 0x13DE34B9 12. /-S/util/system/thread.cpp:244: ThreadProxy @ 0x13DDE92C 13. ??:0: ?? @ 0x7FA629BE9AC2 14. ??:0: ?? @ 0x7FA629C7B84F Trying to start YDB, gRPC: 19828, MsgBus: 7694 2025-05-29T15:26:16.535415Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509889373078557704:2198];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:26:16.535453Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/001d89/r3tmp/tmpk08PC6/pdisk_1.dat 2025-05-29T15:26:16.638997Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:26:16.640800Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [1:7509889373078557545:2079] 1748532376532436 != 1748532376532439 TServer::EnableGrpc on GrpcPort 19828, node 1 2025-05-29T15:26:16.669020Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:26:16.669032Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-05-29T15:26:16.669035Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-05-29T15:26:16.669074Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-05-29T15:26:16.695238Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:26:16.695266Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting TClient is connected to server localhost:7694 2025-05-29T15:26:16.699240Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:7694 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-29T15:26:16.765367Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:26:16.768737Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-05-29T15:26:16.783608Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:26:16.897548Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:26:16.956496Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:26:17.025198Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:26:17.203093Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509889377373526480:2403], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:26:17.203164Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:26:17.312944Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-05-29T15:26:17.327442Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-05-29T15:26:17.340425Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-05-29T15:26:17.354930Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-05-29T15:26:17.379151Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-05-29T15:26:17.405253Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-05-29T15:26:17.429168Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-05-29T15:26:17.499536Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509889377373527139:2466], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:26:17.499559Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:26:17.499656Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509889377373527144:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:26:17.500538Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715669:3, at schemeshard: 72057594046644480 2025-05-29T15:26:17.513393Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976715669, at schemeshard: 72057594046644480 2025-05-29T15:26:17.518859Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7509889377373527146:2470], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715669 completed, doublechecking } 2025-05-29T15:26:17.581373Z node 1 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [1:7509889377373527206:3400] txid# 281474976715670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-29T15:26:17.752667Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [1:7509889377373527215:2475], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:26:17.754316Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=1&id=OGYzYjBmMGYtMjgyNWNlNzYtOTFiZTkzYzItMjJkZDA2YzQ=, ActorId: [1:7509889377373526462:2401], ActorState: ExecuteState, TraceId: 01jweaddwtadptvzb9zaacwxzn, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: VERIFY failed (2025-05-29T15:26:17.755173Z): assertion failed in non-unittest thread with message: assertion failed at ydb/core/kqp/ut/common/kqp_ut_common.h:375, void NKikimr::NKqp::AssertSuccessResult(const NYdb::TStatus &): (result.IsSuccess())
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 library/cpp/testing/unittest/registar.cpp:36 RaiseError(): requirement UnittestThread failed 0. /-S/util/system/yassert.cpp:83: InternalPanicImpl @ 0x13DDD325 1. /-S/util/system/yassert.cpp:55: Panic @ 0x13DD4326 2. /tmp//-S/library/cpp/testing/unittest/registar.cpp:36: RaiseError @ 0x13F75616 3. /-S/ydb/core/kqp/ut/common/kqp_ut_common.h:375: AssertSuccessResult @ 0x13C2F4E2 4. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:365: CreateSampleTables @ 0x26431D72 5. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:581: operator() @ 0x26452CCC 6. /-S/library/cpp/threading/future/core/future-inl.h:493: SetValue @ 0x26452CCC 7. /-S/library/cpp/threading/future/async.h:24: operator() @ 0x26452CCC 8. /-S/util/thread/pool.h:71: Process @ 0x26452CCC 9. /-S/util/thread/pool.cpp:418: DoExecute @ 0x13DE4AC9 10. /-S/util/thread/factory.h:15: Execute @ 0x13DE34B9 11. /-S/util/thread/factory.cpp:36: ThreadProc @ 0x13DE34B9 12. /-S/util/system/thread.cpp:244: ThreadProxy @ 0x13DDE92C 13. ??:0: ?? @ 0x7FEBA4F57AC2 14. ??:0: ?? @ 0x7FEBA4FE984F ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/persqueue/dread_cache_service/ut/unittest >> TPQCachingProxyTest::TestWrongSessionOrGeneration [GOOD] Test command err: 2025-05-29T15:26:23.309087Z node 1 :PERSQUEUE NOTICE: pq_impl.cpp:1099: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-05-29T15:26:23.309118Z node 1 :PERSQUEUE INFO: pq_impl.cpp:800: [PQ: 72057594037927937] doesn't have tx writes info 2025-05-29T15:26:23.312790Z node 1 :PQ_READ_PROXY DEBUG: caching_service.cpp:44: Direct read cache: : Created 2025-05-29T15:26:23.312817Z node 1 :PQ_READ_PROXY DEBUG: caching_service.cpp:283: Direct read cache: registered server session: session1:1 with generation 2 2025-05-29T15:26:23.312833Z node 1 :PQ_READ_PROXY DEBUG: caching_service.cpp:171: Direct read cache: staged direct read id 1 for session: session1 2025-05-29T15:26:23.312839Z node 1 :PQ_READ_PROXY DEBUG: caching_service.cpp:179: Direct read cache: publish read: 1 for session session1, Generation: 2 2025-05-29T15:26:23.312851Z node 1 :PQ_READ_PROXY INFO: caching_service.cpp:297: Direct read cache: attempted to register server session: session1:1 with stale generation 1, ignored 2025-05-29T15:26:23.312863Z node 1 :PQ_READ_PROXY ALERT: caching_service.cpp:159: Direct read cache: tried to stage direct read for session session1 with generation 1, previously had this session with generation 2. Data ignored 2025-05-29T15:26:23.312870Z node 1 :PQ_READ_PROXY DEBUG: caching_service.cpp:179: Direct read cache: publish read: 1 for session session1, Generation: 1 2025-05-29T15:26:23.312890Z node 1 :PQ_READ_PROXY DEBUG: caching_service.cpp:218: Direct read cache: forget read: 1 for session session1 >> Cdc::KeysOnlyLog[TopicRunner] [FAIL] >> Cdc::KeysOnlyLogDebezium >> TPQCachingProxyTest::MultipleSessions [GOOD] >> KqpIndexes::UpsertWithNullKeysComplex >> DataShardVolatile::UpsertNoLocksArbiterRestart+UseSink [FAIL] >> DataShardVolatile::UpsertNoLocksArbiterRestart-UseSink >> Cdc::UuidExchange[TopicRunner] [FAIL] >> Cdc::DocApi[TopicRunner] [FAIL] >> Cdc::HugeKey[PqRunner] >> Cdc::UpdatesLog[PqRunner] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/persqueue/dread_cache_service/ut/unittest >> TPQCachingProxyTest::MultipleSessions [GOOD] Test command err: 2025-05-29T15:26:23.975456Z node 1 :PERSQUEUE NOTICE: pq_impl.cpp:1099: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-05-29T15:26:23.975485Z node 1 :PERSQUEUE INFO: pq_impl.cpp:800: [PQ: 72057594037927937] doesn't have tx writes info 2025-05-29T15:26:23.979518Z node 1 :PQ_READ_PROXY DEBUG: caching_service.cpp:44: Direct read cache: : Created 2025-05-29T15:26:23.979548Z node 1 :PQ_READ_PROXY DEBUG: caching_service.cpp:283: Direct read cache: registered server session: session1:1 with generation 1 2025-05-29T15:26:23.979568Z node 1 :PQ_READ_PROXY DEBUG: caching_service.cpp:171: Direct read cache: staged direct read id 1 for session: session1 2025-05-29T15:26:23.979580Z node 1 :PQ_READ_PROXY DEBUG: caching_service.cpp:171: Direct read cache: staged direct read id 2 for session: session1 2025-05-29T15:26:23.979588Z node 1 :PQ_READ_PROXY DEBUG: caching_service.cpp:179: Direct read cache: publish read: 1 for session session1, Generation: 1 2025-05-29T15:26:23.979601Z node 1 :PQ_READ_PROXY DEBUG: caching_service.cpp:179: Direct read cache: publish read: 2 for session session1, Generation: 1 2025-05-29T15:26:23.979611Z node 1 :PQ_READ_PROXY DEBUG: caching_service.cpp:283: Direct read cache: registered server session: session2:1 with generation 2 2025-05-29T15:26:23.979642Z node 1 :PQ_READ_PROXY DEBUG: caching_service.cpp:171: Direct read cache: staged direct read id 3 for session: session2 2025-05-29T15:26:23.979648Z node 1 :PQ_READ_PROXY DEBUG: caching_service.cpp:179: Direct read cache: publish read: 3 for session session2, Generation: 2 >> THiveTest::TestHiveBalancerIgnoreTablet [GOOD] >> THiveTest::TestHiveBalancerNodeRestarts >> THiveTest::TestHiveNoBalancingWithLowResourceUsage [GOOD] >> THiveTest::TestLockTabletExecution >> THiveTest::TestFollowers_LocalNodeOnly [GOOD] >> THiveTest::TestFollowersCrossDC_Tight >> StoragePool::TestDistributionRandomMin7p [GOOD] >> StoragePool::TestDistributionRandomMin7pWithOverflow [GOOD] >> AsyncIndexChangeExchange::SenderShouldBeActivatedOnTableWithAsyncIndex [GOOD] >> AsyncIndexChangeExchange::SenderShouldShakeHandsOnce >> THealthCheckTest::NoStoragePools [GOOD] >> THealthCheckTest::NoBscResponse |67.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/persqueue/dread_cache_service/ut/unittest >> KqpRanges::UpdateWhereInBigLiteralListPrefix >> TTablesWithReboots::AlterTableSchemaFreezeUnfreezeWithReboots >> TPQCachingProxyTest::TestPublishAndForget >> TPQCachingProxyTest::TestPublishAndForget [GOOD] |67.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/persqueue/dread_cache_service/ut/unittest >> Cdc::KeysOnlyLogDebezium [FAIL] >> Cdc::NewAndOldImagesLog[PqRunner] >> THiveTest::TestLockTabletExecution [GOOD] >> THiveTest::TestLockTabletExecutionBadOwner ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/mind/hive/ut/unittest >> StoragePool::TestDistributionRandomMin7pWithOverflow [GOOD] Test command err: (1,1): 1 on 2 (1,1): 1 on 1 RemoveNode 7 (1,1): 1 on 3 (1,3): 1 on 5 (1,2): 1 on 6 (1,1): 1 on 0 (1,2): 1 on 9 RemoveNode 0 (1,3): 1 on 9 RemoveNode 2 (1,3): 1 on 3 (1,3): 1 on 4 (1,1): -1 on 0 (1,2): 1 on 6 RemoveNode 1 (1,3): 1 on 6 (1,3): 1 on 2 (1,2): 1 on 8 (1,2): -1 on 6 (1,2): 1 on 6 (1,2): 1 on 8 (1,1): 1 on 1 (1,1): 1 on 3 AddNode 0 (1,1): 1 on 0 (1,3): 1 on 2 (1,1): 1 on 2 (1,1): 1 on 3 (1,2): 1 on 5 (1,3): 1 on 7 (1,2): 1 on 7 (1,3): 1 on 4 (1,1): 1 on 1 (1,3): 1 on 8 (1,2): -1 on 6 RemoveNode 6 (1,1): 1 on 3 (1,1): 1 on 4 (1,1): -1 on 3 AddNode 2 (1,2): 1 on 8 RemoveNode 8 (1,1): 1 on 3 (1,2): 1 on 7 (1,1): 1 on 0 (1,1): 1 on 3 RemoveNode 5 (1,3): 1 on 8 RemoveNode 9 (1,3): 1 on 5 (1,1): 1 on 0 AddNode 6 (1,2): -1 on 7 (1,1): 1 on 2 (1,2): 1 on 9 AddNode 1 (1,1): 1 on 3 RemoveNode 6 (1,3): 1 on 7 (1,1): 1 on 3 (1,3): 1 on 0 (1,3): -1 on 5 (1,3): 1 on 3 (1,3): -1 on 3 (1,1): -1 on 1 (1,1): 1 on 0 (1,2): 1 on 7 (1,1): 1 on 3 (1,2): 1 on 8 (1,3): 1 on 8 (1,3): 1 on 3 (1,3): 1 on 0 (1,2): -1 on 7 (1,1): -1 on 4 (1,2): 1 on 7 RemoveNode 0 (1,1): 1 on 4 (1,3): 1 on 2 (1,3): 1 on 2 AddNode 5 (1,2): 1 on 9 (1,3): 1 on 1 (1,1): 1 on 3 RemoveNode 2 (1,3): -1 on 0 (1,2): 1 on 9 (1,1): 1 on 4 (1,1): 1 on 2 RemoveNode 4 (1,3): 1 on 0 (1,1): 1 on 1 (1,2): 1 on 6 (1,2): 1 on 7 (1,1): 1 on 0 (1,1): 1 on 1 AddNode 8 (1,1): -1 on 3 (1,1): -1 on 3 (1,2): 1 on 8 (1,2): 1 on 6 AddNode 2 (1,1): 1 on 1 (1,2): 1 on 6 RemoveNode 5 (1,3): -1 on 5 (1,1): 1 on 3 (1,2): 1 on 9 (1,3): 1 on 6 (1,2): 1 on 8 (1,3): 1 on 2 AddNode 4 (1,2): 1 on 9 (1,1): 1 on 0 (1,2): 1 on 5 (1,1): 1 on 4 AddNode 9 (1,3): -1 on 4 RemoveNode 4 (1,3): 1 on 0 (1,1): -1 on 3 (1,1): 1 on 3 (1,3): 1 on 9 (1,2): -1 on 8 (1,2): 1 on 5 AddNode 7 (1,3): 1 on 1 (1,3): 1 on 3 (1,1): 1 on 0 (1,3): 1 on 0 RemoveNode 1 (1,1): 1 on 2 RemoveNode 9 (1,1): -1 on 1 (1,3): -1 on 8 (1,2): 1 on 9 (1,1): 1 on 4 AddNode 4 (1,1): 1 on 2 AddNode 1 (1,1): 1 on 4 (1,3): -1 on 8 (1,1): 1 on 3 RemoveNode 2 (1,1): 1 on 3 (1,1): 1 on 3 (1,3): -1 on 9 AddNode 2 (1,1): 1 on 3 (1,3): -1 on 8 (1,2): 1 on 6 (1,3): -1 on 1 (1,2): -1 on 8 RemoveNode 7 (1,2): 1 on 5 RemoveNode 4 (1,1): 1 on 4 (1,1): 1 on 0 (1,3): 1 on 8 (1,2): 1 on 8 (1,2): 1 on 7 RemoveNode 2 (1,1): 1 on 4 RemoveNode 3 (1,1): 1 on 0 (1,2): 1 on 5 AddNode 7 (1,1): 1 on 4 (1,1): 1 on 4 (1,1): -1 on 2 (1,3): 1 on 3 (1,1): 1 on 0 (1,3): 1 on 8 (1,2): 1 on 8 AddNode 9 (1,2): 1 on 6 AddNode 4 (1,1): 1 on 3 AddNode 0 (1,1): 1 on 0 (1,1): -1 on 4 (1,2): 1 on 7 (1,2): 1 on 6 (1,1): 1 on 4 (1,1): 1 on 1 (1,1): 1 on 1 (1,3): -1 on 0 AddNode 2 (1,3): -1 on 0 (1,1): 1 on 4 (1,1): 1 on 4 (1,3): 1 on 0 RemoveNode 0 (1,3): 1 on 3 (1,2): -1 on 8 (1,2): 1 on 6 (1,2): 1 on 8 (1,2): 1 on 7 (1,3): 1 on 6 (1,2): 1 on 7 (1,2): 1 on 6 (1,3): 1 on 2 (1,2): 1 on 5 AddNode 6 (1,2): 1 on 7 RemoveNode 9 (1,1): 1 on 1 (1,1): 1 on 1 (1,2): 1 on 7 RemoveNode 8 (1,3): -1 on 6 RemoveNode 7 (1,1): 1 on 2 (1,2): 1 on 9 (1,1): 1 on 1 RemoveNode 4 (1,2): 1 on 5 RemoveNode 1 (1,1): 1 on 1 AddNode 8 (1,3): -1 on 8 (1,3): 1 on 0 AddNode 0 (1,3): 1 on 3 (1,2): 1 on 6 (1,2): -1 on 9 RemoveNode 2 (1,3): 1 on 2 (1,3): 1 on 1 (1,3): 1 on 8 RemoveNode 6 (1,1): -1 on 0 (1,1): 1 on 0 (1,2): 1 on 7 AddNode 3 (1,3): 1 on 5 (1,1): 1 on 3 (1,2): 1 on 8 AddNode 7 (1,3): 1 on 5 AddNode 5 (1,1): -1 on 3 RemoveNode 7 (1,3): -1 on 8 AddNode 7 (1,1): -1 on 3 (1,3): 1 on 3 RemoveNode 7 (1,2): 1 on 7 (1,3): 1 on 7 (1,1): 1 on 1 RemoveNode 0 (1,1): 1 on 3 (1,2): -1 on 9 (1,1): -1 on 2 (1,2): 1 on 9 AddNode 7 (1,2): -1 on 8 AddNode 0 (1,1): 1 on 2 (1,3): 1 on 0 (1,2): 1 on 9 AddNode 2 (1,3): 1 on 0 RemoveNode 7 (1,3): 1 on 8 RemoveNode 2 (1,1): 1 on 4 (1,2): 1 on 8 (1,2): 1 on 7 (1,3): 1 on 0 (1,3): -1 on 1 AddNode 2 (1,3): 1 on 2 (1,3): -1 on 7 (1,3): 1 on 0 (1,1): 1 on 0 (1,3): 1 on 0 (1,2): 1 on 9 RemoveNode 5 (1,1): -1 on 3 (1,3): 1 on 7 (1,1): 1 on 1 (1,2): 1 on 7 AddNode 9 (1,2): 1 on 6 (1,1): 1 on 1 (1,3): 1 on 3 (1,1): 1 on 1 (1,1): -1 on 2 (1,2): -1 on 7 AddNode 4 (1,2): 1 on 8 (1,3): 1 on 5 (1,1): 1 on 0 (1,1): 1 on 4 (1,1): 1 on 1 (1,2): 1 on 7 (1,3): -1 on 2 (1,2): 1 on 9 (1,3): -1 on 5 (1,1): 1 on 0 (1,2): 1 on 8 (1,3): -1 on 0 (1,3): 1 on 7 (1,1): 1 on 0 (1,3): 1 on 1 (1,2): 1 on 6 (1,2): -1 on 7 (1,1): 1 on 2 (1,2): 1 on 6 (1,2): -1 on 9 RemoveNode 2 (1,3): 1 on 7 (1,3): 1 on 2 (1,2): 1 on 7 (1,2): 1 on 7 (1,2): 1 on 9 (1,2): 1 on 6 (1,3): 1 on 2 (1,2): 1 on 5 (1,2): 1 on 6 RemoveNode 3 (1,1): 1 on 4 (1,2): 1 on 9 (1,2): -1 on 8 (1,3): -1 on 6 (1,3): 1 on 0 (1,1): 1 on 0 (1,3): 1 on 3 AddNode 1 (1,3): 1 on 4 (1,1): 1 on 1 RemoveNode 0 (1,1): 1 on 0 RemoveNode 4 (1,2): 1 on 9 (1,3): 1 on 7 (1,1): 1 on 1 (1,2): -1 on 6 AddNode 5 (1,3): -1 on 0 (1,2): 1 on 9 (1,1): 1 on 2 (1,2): 1 on 9 AddNode 3 (1,3): -1 on 2 (1,3): 1 on 1 RemoveNode 8 (1,1): 1 on 0 (1,2): -1 on 5 AddNode 4 (1,3): -1 on 5 (1,3): 1 on 0 (1,3): -1 on 0 (1,3): 1 on 7 (1,1): 1 on 0 RemoveNode 9 (1,1): -1 on 4 (1,3): 1 on 0 (1,2): 1 on 9 (1,2): 1 on 7 (1,2): 1 on 8 (1,2): 1 on 5 RemoveNode 1 (1,3): 1 on 4 (1,3): 1 on 6 (1,1): 1 on 0 (1,1): 1 on 1 AddNode 2 (1,2): -1 on 6 AddNode 1 (1,3): 1 on 6 (1,1): 1 on 4 (1,3): -1 on 8 (1,3): 1 on 3 RemoveNode 2 (1,1): 1 on 1 (1,1): 1 on 0 (1,3): 1 on 0 AddNode 9 (1,1): -1 on 4 (1,3): 1 on 7 (1,2): -1 on 9 (1,3): 1 on 7 RemoveNode 4 (1,3): -1 on 9 AddNode 8 (1,1): 1 on 1 (1,1): 1 on 0 (1,1): 1 on 4 (1,2): 1 on 5 (1,2): 1 on 9 RemoveNode 8 (1,2): 1 on 9 (1,3): 1 on 8 (1,2): 1 on 5 (1,3): 1 on 1 AddNode 7 (1,3): 1 on 4 AddNode 4 (1,1): 1 on 3 RemoveNode 7 (1,1): 1 on 4 (1,2): 1 on 7 (1,3): 1 on 7 (1,1): 1 on 4 (1,2): 1 on 8 AddNode 6 (1,1): 1 on 2 RemoveNode 6 (1,2): 1 on 6 (1,3): 1 on 1 (1,1): -1 on 3 AddNode 0 (1,1): 1 on 0 (1,1): -1 on 2 (1,3): 1 on 9 (1,2): -1 on 8 (1,1): 1 on 3 RemoveNode 3 (1,3): -1 on 0 (1,2): 1 on 5 RemoveNode 1 (1,2): 1 on 9 AddNode 3 (1,1): -1 on 3 (1,2): 1 on 7 (1,2): 1 on 6 AddNode 8 (1,3): 1 on 6 AddNode 1 (1,3): -1 on 3 (1,1): 1 on 3 (1,3): 1 on 4 (1,1): 1 on 4 (1,2): 1 on 6 (1,1): 1 on 3 (1,3): -1 on 7 (1,1): 1 on 4 (1,2): 1 on 8 RemoveNode 4 (1,2): 1 on 7 (1,2): 1 on 5 (1,1): -1 on 0 (1,1): 1 on 4 (1,1): 1 on 0 (1,2): 1 on 7 (1,2): 1 on 5 (1,1): 1 on 0 RemoveNode 5 (1,2): 1 on 8 (1,2): 1 on 8 RemoveNode 8 (1,1): -1 on 0 (1,3): 1 on 1 (1,2): 1 on 6 RemoveNode 0 (1,3): -1 on 2 (1,1): 1 on 0 (1,2): 1 on 8 (1,3): 1 on 6 (1,2): 1 on 6 (1,3): 1 on 8 RemoveNode 1 (1,2): 1 on 8 (1,1): 1 on 2 (1,1): 1 on 4 AddNode 2 (1,2): 1 on 6 (1,1): -1 on 2 (1,3): 1 on 5 (1,1): 1 on 4 (1,1): 1 on 3 (1,2): 1 on 5 (1,2): 1 on 9 (1,3): 1 on 1 RemoveNode 2 (1,3): 1 on 9 (1,1): 1 on 1 AddNode 2 (1,2): 1 on 8 (1,2): 1 on 6 AddNode 8 (1,2): 1 on 8 (1,3): 1 on 8 AddNode 0 (1,3): 1 on 8 (1,1): 1 on 4 (1,1): -1 on 2 RemoveNode 9 (1,1): 1 on 1 (1,1): 1 on 3 (1,1): -1 on 3 (1,3): 1 on 4 (1,3): 1 on 5 AddNode 1 (1,2): 1 on 6 (1,2): -1 on 9 (1,1): 1 on 4 (1,3): 1 on 9 (1,3): 1 on 1 (1,3): 1 on 7 (1,2): -1 on 8 (1,2): 1 on 6 (1,1): 1 on 0 (1,2): -1 on 9 (1,1): 1 on 1 (1,2): 1 on 5 (1,1): 1 on 3 (1,1): 1 on 0 (1,1): -1 on 4 (1,2): -1 on 6 (1,1): 1 on 0 (1,1): 1 on 4 (1,2): 1 on 9 (1,3): 1 on 5 (1,3): 1 on 2 AddNode 5 (1,3): 1 on 8 (1,2): 1 on 9 (1,1): 1 on 0 RemoveNode 5 (1,2): -1 on 7 (1,2): 1 on 6 (1,2): 1 on 6 (1,2): -1 on 5 (1,1): 1 on 3 (1,3): 1 on 5 (1,3): 1 on 4 (1,3): 1 on 4 (1,3): -1 on 2 (1,2): -1 on 7 (1,1): 1 on 3 (1,3): -1 on 7 (1,2): 1 on 6 (1,1): 1 on 2 AddNode 6 (1,1): -1 on 0 (1,2): -1 on 5 (1,3): 1 on 6 (1,1): 1 on 1 AddNode 9 (1,1): 1 on 4 (1,1): 1 on 1 AddNode 7 (1,3): 1 on 3 (1,2): -1 on 7 (1,2): 1 on 9 (1,2): 1 on 5 AddNode 4 (1,3): 1 on 2 (1,2): 1 on 5 RemoveNode 8 (1,2): 1 on 7 (1,2): 1 on 9 RemoveNode 6 (1,2): 1 on 7 (1,3): 1 on 3 (1,3): 1 on 8 (1,1): 1 on 3 RemoveNode 4 (1,3): 1 on 4 (1,3): 1 on 5 (1,2): 1 on 6 (1,1): 1 on 2 (1,2): 1 on 8 AddNode 8 (1,3): 1 on 9 (1,1): 1 on 3 (1,2): 1 on 9 AddNode 5 (1,1): 1 on 3 RemoveNode 8 (1,2): 1 on 9 RemoveNode 7 (1,3): 1 on 0 (1,2): -1 on 9 RemoveNode 1 (1,1): -1 on 1 (1,1): 1 on 0 (1,2): 1 on 6 RemoveNode 2 (1,2): 1 on 7 (1,2): 1 on 8 (1,2): 1 on 9 (1,2): 1 on 7 (1,1): -1 on 4 (1,3): 1 on 1 (1,2): -1 on 5 (1,1): 1 on 3 (1,2): 1 on 9 (1,2): 1 on 5 AddNode 4 (1,2): 1 on 8 RemoveNode 3 (1,2): 1 on 9 AddNode 1 (1,3): -1 on 2 (1,2): -1 on 6 (1,2): 1 on 9 (1,3): -1 on 2 AddNode 2 (1,3): 1 on 0 RemoveNode 0 (1,1): -1 on 3 (1,2): 1 on 6 (1,2): 1 on 9 (1,2): 1 on 9 AddNode 6 (1,2): -1 on 7 RemoveNode 4 (1,2): 1 on 6 AddNode 4 (1,2): 1 on 6 (1,1): 1 on 4 AddNode 0 (1,3): 1 on 4 RemoveNode 9 (1,2): 1 on 8 (1,2): 1 on 7 (1,2): 1 on 6 AddNode 8 (1,1): 1 on 0 (1,1): 1 on 0 AddNode 7 (1,1): 1 on 3 (1,3): 1 on 5 (1,3): -1 on 7 (1,1): -1 on 4 RemoveNode 6 (1,3): 1 on 0 RemoveNode 7 (1,1): 1 on 4 (1,3): 1 on 3 (1,3): 1 on 2 (1,1): 1 on 4 AddNode 9 (1,2): 1 on 8 (1,1): 1 on 0 RemoveNode 0 (1,2): -1 on 8 (1,2): 1 on 6 AddNode 7 (1,2): 1 on 9 (1,2): 1 on 7 (1,2): 1 on 5 (1,2): 1 on 5 AddNode 3 (1,1): 1 on 3 RemoveNode 7 (1,1): 1 on 2 (1,3): 1 on 0 RemoveNode 4 (1,1): 1 on 4 (1,2): -1 on 8 (1,2): 1 on 7 RemoveNode 1 (1,2): 1 on 9 (1,2): 1 on 7 (1,2): 1 on 7 (1,3): -1 on 5 AddNode 6 (1,3): 1 on 8 RemoveNode 9 (1,2): 1 on 5 (1,3): 1 on 9 (1,1): 1 on 2 (1,1): 1 on 0 (1,2): 1 on 6 RemoveNode 8 (1,2): 1 on 9 (1,3): 1 on 6 (1,2): 1 on 8 (1,2): 1 on 7 (1,2): 1 on 9 (1,1): 1 on 4 (1,1): -1 on 2 RemoveNode 6 (1,2): 1 on 6 RemoveNode 2 (1,1): 1 on 2 (1,2): 1 on 6 (1,3): 1 on 3 (1,1): 1 on 1 (1,3): 1 on 8 AddNode 0 (1,2): 1 on 8 RemoveNode 5 (1,3): 1 on 6 AddNode 8 (1,1): -1 on 1 (1,1): 1 on 4 AddNode 2 (1,1): 1 on 2 RemoveNode 2 (1,1): -1 on 4 (1,1): 1 on 1 AddNode 9 (1,2): 1 on 6 (1,2): 1 on 5 RemoveNode 8 (1,3): 1 on 3 AddNode 7 (1,2): 1 on 8 (1,2): 1 on 6 (1,2): 1 on 5 RemoveNode 3 (1,3): 1 on 3 AddNode 4 (1,2): 1 on 5 (1,3): 1 on 1 (1,3): -1 on 6 (1,2): 1 on 5 RemoveNode 4 (1,3): -1 on 6 (1,3): 1 on 2 (1,1): -1 on 4 (1,3): 1 on 9 (1,1): -1 on 0 (1,2): 1 on 7 (1,1): 1 on 1 (1,1): -1 on 1 (1,1): 1 on 1 (1,3): 1 on 6 (1,2): 1 on 8 AddNode 2 (1,3): 1 on 0 (1,2): 1 on 8 RemoveNode 7 (1,1): 1 on 3 (1,1): 1 on 1 AddNode 1 (1,1): 1 on 2 (1,3): 1 on 5 (1,3): 1 on 1 (1,3): 1 on 7 AddNode 8 (1,1): 1 on 4 (1,3): -1 on 7 (1,2): 1 on 8 (1,1): 1 on 2 (1,2): 1 on 5 AddNode 7 (1,1): -1 on 3 (1,2): -1 on 7 (1,2): 1 on 5 AddNode 4 (1,2): -1 on 9 (1,2): -1 on 7 (1,1): -1 on 2 (1,2): 1 on 6 (1,1): 1 on 3 (1,2): 1 on 7 (1,2): 1 on 8 RemoveNode 1 (1,2): 1 on 7 RemoveNode 4 (1,2): 1 on 5 (1,1): -1 on 2 (1,1): 1 on 0 (1,3): -1 on 2 (1,2): 1 on 6 AddNode 6 (1,2): 1 on 5 (1,3): 1 on 6 (1,2): 1 on 5 AddNode 4 (1,3): 1 on 8 (1,2): 1 on 8 (1,3): 1 on 1 (1,3): -1 on 6 (1,2): 1 on 8 (1,3): -1 on 3 (1,2): 1 on 6 (1,1): 1 on 2 (1,3): -1 on 8 (1,2): 1 on 5 (1,3): 1 on 3 AddNode 3 (1,1): 1 on 0 RemoveNode 8 (1,2): 1 on 7 AddNode 8 (1,3): 1 on 3 (1,1): -1 on 0 RemoveNode 0 (1,2): 1 on 8 (1,2): 1 on 9 RemoveNode 3 (1,1): -1 on 2 RemoveNode 8 (1,1): 1 on 0 RemoveNode 7 (1,1): 1 on 4 (1,2): 1 on 8 (1,3): 1 on 9 (1,1): 1 on 2 (1,3): 1 on 3 AddNode 3 (1,2): 1 on 8 AddNode 1 (1,2): 1 on 7 RemoveNode 6 (1,2): 1 on 5 (1,2): -1 on 6 RemoveNode 9 (1,1): 1 on 0 (1,2): 1 on 7 AddNode 0 (1,1): 1 on 4 AddNode 7 (1,3): 1 on 3 (1,1): 1 on 4 (1,3): 1 on 5 (1,1): 1 on 0 (1,3): 1 on 6 (1,3): -1 on 2 RemoveNode 0 (1,3): 1 on 7 AddNode 0 (1,1): 1 on 1 (1,1): -1 on 2 AddNode 6 (1,1): 1 on 3 (1,2): 1 on 5 RemoveNode 2 (1,3): 1 on 7 (1,2): 1 on 7 (1,3): 1 on 8 (1,1): 1 on 1 (1,1): 1 on 0 (1,2): 1 on 7 AddNode 2 (1,2): 1 on 6 (1,3): 1 on 0 RemoveNode 4 (1,1): 1 on 0 (1,1): 1 on 3 (1,3): 1 on 6 (1,2): 1 on 8 (1,3): -1 on 8 (1,2): -1 on 7 (1,1): -1 on 1 (1,1): 1 on 0 (1,1): 1 on 1 RemoveNode 0 (1,1): 1 on 4 RemoveNode 7 (1,2): 1 on 5 RemoveNode 1 (1,1): 1 on 2 (1,2): -1 on 7 (1,3): -1 on 9 (1,1): -1 on 4 (1,1): 1 on 4 (1,3): 1 on 5 (1,1): 1 on 4 RemoveNode 3 (1,1): 1 on 4 (1,1): 1 on 4 (1,1): 1 on 4 (1,2): 1 on 6 AddNode 0 (1,1): 1 on 3 (1,3): 1 on 0 (1,3): 1 on 1 (1,1): 1 on 2 (1,1): 1 on 1 (1,1): 1 on 0 AddNode 4 (1,2): 1 on 7 (1,2): 1 on 9 RemoveNode 4 (1,1): 1 on 3 (1,2): 1 on 5 (1,2): 1 on 6 AddNode 7 (1,3): 1 on 8 (1,1): 1 on 2 (1,3): 1 on 3 AddNode 8 (1,1 ... ,3): 1 on 4 (1,3): 1 on 3 (1,2): 1 on 8 AddNode 5 (1,2): 1 on 7 (1,2): -1 on 8 RemoveNode 9 (1,2): -1 on 6 (1,2): 1 on 6 (1,3): 1 on 9 RemoveNode 6 (1,1): 1 on 2 (1,3): -1 on 2 (1,1): -1 on 4 RemoveNode 5 (1,2): 1 on 8 (1,2): 1 on 5 AddNode 2 (1,1): 1 on 4 (1,3): 1 on 3 RemoveNode 2 (1,3): 1 on 6 (1,2): 1 on 6 (1,3): 1 on 2 RemoveNode 7 (1,1): 1 on 3 AddNode 2 (1,2): 1 on 6 (1,2): 1 on 9 (1,3): 1 on 0 (1,2): 1 on 7 RemoveNode 4 (1,2): 1 on 5 (1,2): 1 on 9 AddNode 4 (1,3): 1 on 2 (1,3): 1 on 5 (1,3): 1 on 3 (1,2): 1 on 7 (1,1): 1 on 4 (1,3): 1 on 6 (1,3): 1 on 4 (1,1): 1 on 1 (1,3): 1 on 5 (1,1): -1 on 0 RemoveNode 3 (1,1): 1 on 3 (1,1): 1 on 3 (1,3): -1 on 1 RemoveNode 4 (1,3): 1 on 1 (1,1): 1 on 3 (1,1): 1 on 0 AddNode 0 (1,3): 1 on 2 (1,2): 1 on 5 AddNode 1 (1,2): 1 on 7 (1,2): 1 on 9 AddNode 4 (1,1): 1 on 2 (1,3): 1 on 0 AddNode 9 (1,3): -1 on 6 AddNode 7 (1,2): 1 on 8 (1,1): 1 on 2 RemoveNode 4 (1,3): 1 on 6 (1,1): 1 on 2 (1,1): 1 on 3 (1,2): 1 on 8 (1,2): 1 on 6 (1,2): 1 on 9 RemoveNode 7 (1,3): 1 on 9 (1,3): 1 on 3 (1,3): 1 on 5 (1,3): 1 on 5 (1,2): 1 on 6 (1,3): 1 on 7 (1,3): -1 on 2 (1,2): -1 on 9 (1,1): -1 on 4 (1,2): 1 on 7 RemoveNode 9 (1,3): 1 on 0 RemoveNode 1 (1,1): 1 on 0 AddNode 7 (1,3): 1 on 8 (1,2): 1 on 6 (1,1): 1 on 1 RemoveNode 2 (1,3): -1 on 0 (1,2): -1 on 6 (1,3): 1 on 5 AddNode 3 (1,1): -1 on 3 AddNode 4 (1,3): 1 on 1 (1,1): 1 on 2 (1,2): 1 on 5 AddNode 9 (1,1): 1 on 4 (1,2): 1 on 6 RemoveNode 7 (1,3): -1 on 5 (1,1): 1 on 1 (1,3): 1 on 6 RemoveNode 9 (1,3): 1 on 9 RemoveNode 8 (1,1): 1 on 2 AddNode 6 (1,1): 1 on 2 (1,3): 1 on 7 (1,2): 1 on 7 AddNode 8 (1,2): 1 on 5 AddNode 5 (1,2): 1 on 7 (1,2): 1 on 6 (1,2): 1 on 5 (1,3): 1 on 5 (1,1): 1 on 4 (1,2): -1 on 5 RemoveNode 4 (1,2): 1 on 5 (1,3): 1 on 2 (1,1): 1 on 1 (1,3): 1 on 3 (1,2): -1 on 9 (1,2): -1 on 6 AddNode 4 (1,3): 1 on 9 RemoveNode 4 (1,3): -1 on 1 RemoveNode 0 (1,3): 1 on 8 (1,2): 1 on 7 AddNode 2 (1,3): 1 on 1 (1,2): 1 on 6 AddNode 7 (1,2): 1 on 9 AddNode 1 (1,2): 1 on 9 (1,2): 1 on 8 (1,1): 1 on 0 (1,3): 1 on 9 RemoveNode 6 (1,2): 1 on 8 AddNode 6 (1,3): -1 on 7 (1,2): 1 on 8 (1,3): -1 on 5 (1,2): 1 on 8 AddNode 0 (1,1): 1 on 2 (1,1): 1 on 1 (1,2): 1 on 5 RemoveNode 0 (1,3): -1 on 9 (1,3): 1 on 0 AddNode 0 (1,3): 1 on 8 RemoveNode 7 (1,2): -1 on 5 (1,1): 1 on 1 (1,1): -1 on 3 RemoveNode 2 (1,1): 1 on 0 (1,2): -1 on 7 (1,3): 1 on 2 (1,1): 1 on 2 (1,3): 1 on 1 (1,1): -1 on 1 (1,2): 1 on 6 (1,3): 1 on 4 (1,2): 1 on 9 (1,3): -1 on 4 RemoveNode 3 (1,2): 1 on 6 (1,3): 1 on 4 RemoveNode 5 (1,1): 1 on 0 (1,3): 1 on 3 RemoveNode 1 (1,3): -1 on 0 (1,1): 1 on 2 (1,2): 1 on 6 (1,3): 1 on 0 (1,2): -1 on 5 AddNode 3 (1,2): 1 on 9 (1,1): 1 on 1 AddNode 2 (1,2): 1 on 8 RemoveNode 0 (1,3): 1 on 8 RemoveNode 2 (1,3): 1 on 0 RemoveNode 6 (1,2): 1 on 6 (1,3): 1 on 5 (1,1): 1 on 1 AddNode 7 (1,1): 1 on 0 (1,2): 1 on 6 (1,1): 1 on 3 (1,2): 1 on 5 RemoveNode 8 (1,2): 1 on 8 (1,1): 1 on 3 (1,1): 1 on 0 AddNode 4 (1,2): 1 on 8 AddNode 8 (1,2): 1 on 6 RemoveNode 8 (1,3): 1 on 9 AddNode 9 (1,2): 1 on 9 RemoveNode 9 (1,2): 1 on 9 AddNode 8 (1,2): 1 on 8 (1,3): -1 on 5 AddNode 6 (1,3): 1 on 3 RemoveNode 7 (1,3): 1 on 8 (1,2): 1 on 7 AddNode 0 (1,2): -1 on 9 (1,3): 1 on 4 (1,2): 1 on 7 (1,3): -1 on 5 (1,1): -1 on 1 (1,1): 1 on 2 (1,3): 1 on 6 AddNode 5 (1,3): 1 on 7 RemoveNode 3 (1,2): 1 on 8 (1,2): 1 on 5 (1,1): 1 on 3 (1,3): 1 on 5 (1,1): 1 on 4 (1,3): 1 on 8 (1,3): 1 on 7 (1,2): -1 on 8 AddNode 3 (1,1): 1 on 0 RemoveNode 0 (1,2): 1 on 6 (1,1): 1 on 2 (1,3): 1 on 8 RemoveNode 4 (1,3): 1 on 2 (1,2): -1 on 6 (1,3): 1 on 3 AddNode 2 (1,3): 1 on 5 (1,1): 1 on 2 (1,3): 1 on 2 RemoveNode 3 (1,3): 1 on 3 (1,2): 1 on 6 RemoveNode 5 (1,2): 1 on 9 (1,3): -1 on 9 (1,2): 1 on 6 (1,2): -1 on 6 AddNode 0 (1,2): 1 on 5 AddNode 3 (1,3): -1 on 4 (1,3): 1 on 7 RemoveNode 2 (1,1): 1 on 1 (1,3): 1 on 5 RemoveNode 8 (1,1): 1 on 2 (1,2): 1 on 7 (1,2): 1 on 9 RemoveNode 0 (1,1): -1 on 2 RemoveNode 6 (1,2): 1 on 6 AddNode 0 (1,1): 1 on 4 (1,1): 1 on 2 (1,1): 1 on 2 (1,2): 1 on 5 (1,1): 1 on 0 (1,2): 1 on 6 (1,3): -1 on 8 (1,3): 1 on 5 (1,3): 1 on 1 (1,1): 1 on 3 AddNode 4 (1,1): -1 on 3 (1,1): 1 on 2 (1,3): -1 on 5 RemoveNode 4 (1,3): 1 on 2 (1,1): 1 on 0 (1,3): -1 on 6 (1,1): 1 on 1 (1,2): 1 on 6 (1,2): -1 on 6 (1,2): 1 on 7 (1,3): -1 on 3 AddNode 7 (1,2): 1 on 7 RemoveNode 0 (1,3): 1 on 0 (1,1): 1 on 2 (1,2): 1 on 6 (1,3): 1 on 7 (1,1): 1 on 2 (1,2): 1 on 9 RemoveNode 7 (1,2): -1 on 6 AddNode 7 (1,2): 1 on 7 (1,2): 1 on 7 (1,2): -1 on 6 RemoveNode 3 (1,3): 1 on 9 (1,2): 1 on 5 (1,1): 1 on 3 (1,2): 1 on 5 AddNode 0 (1,1): 1 on 3 (1,1): 1 on 1 (1,2): 1 on 5 RemoveNode 7 (1,3): -1 on 7 AddNode 7 (1,1): 1 on 1 (1,2): 1 on 9 (1,2): -1 on 6 AddNode 5 (1,1): 1 on 1 (1,3): 1 on 2 (1,3): 1 on 2 (1,2): 1 on 9 AddNode 4 (1,3): 1 on 9 (1,3): -1 on 2 RemoveNode 4 (1,3): -1 on 4 RemoveNode 7 (1,1): -1 on 4 (1,3): 1 on 2 (1,3): -1 on 2 (1,1): 1 on 4 (1,1): 1 on 0 (1,2): 1 on 9 (1,3): 1 on 0 (1,2): -1 on 7 AddNode 9 (1,1): 1 on 4 (1,3): 1 on 7 (1,3): 1 on 8 (1,1): 1 on 3 AddNode 2 (1,3): 1 on 2 RemoveNode 0 (1,2): -1 on 7 (1,1): 1 on 2 (1,3): 1 on 7 (1,2): 1 on 6 (1,2): -1 on 6 AddNode 6 (1,1): 1 on 3 (1,1): 1 on 4 (1,2): -1 on 6 (1,3): 1 on 0 AddNode 7 (1,1): 1 on 0 (1,3): -1 on 8 RemoveNode 9 (1,2): 1 on 7 (1,2): 1 on 5 (1,1): 1 on 2 (1,1): 1 on 0 (1,3): 1 on 4 (1,1): 1 on 0 AddNode 0 (1,1): 1 on 3 RemoveNode 7 (1,3): 1 on 0 (1,2): 1 on 7 (1,2): 1 on 9 (1,2): 1 on 5 AddNode 7 (1,3): 1 on 8 (1,1): 1 on 1 RemoveNode 0 (1,2): 1 on 9 (1,2): -1 on 5 AddNode 1 (1,2): 1 on 5 (1,2): 1 on 6 (1,1): 1 on 4 (1,1): 1 on 3 (1,3): 1 on 0 (1,2): 1 on 9 (1,1): 1 on 1 (1,1): 1 on 1 AddNode 9 (1,3): 1 on 2 RemoveNode 7 (1,2): 1 on 5 RemoveNode 1 (1,1): -1 on 1 (1,1): -1 on 3 (1,3): 1 on 2 AddNode 0 (1,2): 1 on 7 (1,3): -1 on 0 (1,1): 1 on 3 AddNode 8 (1,2): 1 on 7 (1,3): 1 on 5 (1,2): 1 on 6 (1,3): 1 on 2 (1,3): 1 on 2 RemoveNode 0 (1,2): 1 on 5 AddNode 0 (1,1): -1 on 1 RemoveNode 2 (1,1): 1 on 2 (1,1): -1 on 2 (1,3): 1 on 8 (1,2): 1 on 9 (1,3): -1 on 6 (1,3): -1 on 8 (1,1): 1 on 2 RemoveNode 0 (1,1): -1 on 3 (1,2): 1 on 9 (1,1): 1 on 4 (1,1): 1 on 0 (1,1): 1 on 0 (1,2): -1 on 8 (1,2): 1 on 5 (1,1): 1 on 3 (1,2): 1 on 7 (1,2): 1 on 6 (1,2): 1 on 9 (1,1): 1 on 2 (1,2): 1 on 8 (1,3): 1 on 9 RemoveNode 5 (1,2): 1 on 7 (1,2): 1 on 5 (1,2): -1 on 9 (1,3): 1 on 5 (1,2): 1 on 8 (1,3): 1 on 8 RemoveNode 6 (1,2): -1 on 6 (1,3): 1 on 6 (1,3): 1 on 3 (1,2): 1 on 8 (1,1): 1 on 1 (1,3): 1 on 1 (1,1): 1 on 1 AddNode 6 (1,1): 1 on 4 AddNode 3 (1,2): 1 on 8 (1,1): 1 on 2 RemoveNode 9 (1,3): 1 on 1 AddNode 2 (1,1): 1 on 0 (1,3): 1 on 7 AddNode 9 (1,1): -1 on 2 AddNode 1 (1,1): -1 on 1 (1,2): 1 on 8 RemoveNode 2 (1,1): 1 on 3 (1,2): 1 on 7 (1,2): 1 on 7 (1,2): 1 on 9 AddNode 2 (1,2): 1 on 8 (1,2): 1 on 9 (1,3): 1 on 3 RemoveNode 2 (1,1): 1 on 4 AddNode 7 (1,1): 1 on 1 RemoveNode 9 (1,2): 1 on 9 (1,3): 1 on 7 AddNode 4 (1,2): 1 on 6 (1,3): -1 on 7 (1,2): -1 on 6 (1,3): 1 on 5 (1,2): -1 on 8 (1,1): 1 on 3 AddNode 2 (1,1): 1 on 1 (1,2): 1 on 8 (1,3): 1 on 2 (1,1): 1 on 4 (1,3): -1 on 8 (1,1): 1 on 3 (1,1): 1 on 4 RemoveNode 8 (1,1): 1 on 3 RemoveNode 4 (1,2): 1 on 8 (1,2): 1 on 9 (1,3): -1 on 2 (1,1): -1 on 0 (1,2): 1 on 5 AddNode 0 (1,1): 1 on 3 (1,1): 1 on 3 (1,1): 1 on 4 (1,1): -1 on 1 (1,1): 1 on 0 (1,1): -1 on 4 (1,2): 1 on 9 (1,3): 1 on 7 (1,3): 1 on 8 (1,1): 1 on 1 (1,3): -1 on 4 (1,1): 1 on 0 (1,1): 1 on 3 (1,1): 1 on 3 RemoveNode 1 (1,3): 1 on 3 (1,3): 1 on 0 (1,1): 1 on 3 RemoveNode 2 (1,3): 1 on 5 (1,1): -1 on 2 (1,2): 1 on 8 (1,1): 1 on 1 RemoveNode 7 (1,3): -1 on 2 (1,1): 1 on 2 (1,1): 1 on 0 (1,1): 1 on 3 (1,1): 1 on 3 (1,1): 1 on 0 AddNode 9 (1,3): -1 on 7 (1,1): 1 on 1 RemoveNode 0 (1,3): -1 on 9 AddNode 2 (1,1): 1 on 3 (1,1): -1 on 0 (1,1): 1 on 0 (1,3): -1 on 1 (1,2): 1 on 8 (1,2): -1 on 8 (1,2): 1 on 9 (1,1): -1 on 4 RemoveNode 2 (1,3): 1 on 2 (1,3): 1 on 3 (1,2): 1 on 8 (1,3): 1 on 5 (1,2): 1 on 9 AddNode 2 (1,2): -1 on 8 RemoveNode 9 (1,3): -1 on 3 (1,3): -1 on 1 RemoveNode 3 (1,1): 1 on 0 AddNode 5 (1,3): 1 on 4 RemoveNode 6 (1,2): 1 on 7 (1,1): 1 on 2 AddNode 1 (1,1): 1 on 1 RemoveNode 2 (1,3): -1 on 1 AddNode 2 (1,3): -1 on 7 (1,2): -1 on 5 (1,1): -1 on 1 (1,1): -1 on 1 (1,1): 1 on 1 AddNode 8 (1,1): 1 on 3 AddNode 3 (1,2): 1 on 9 (1,3): 1 on 5 (1,1): 1 on 2 (1,2): 1 on 6 (1,2): -1 on 7 AddNode 6 (1,3): 1 on 9 (1,1): 1 on 0 AddNode 0 (1,2): 1 on 9 AddNode 7 (1,2): 1 on 7 (1,3): 1 on 1 (1,3): 1 on 1 (1,2): -1 on 9 (1,1): -1 on 3 RemoveNode 7 (1,2): 1 on 8 (1,3): 1 on 0 RemoveNode 0 (1,3): 1 on 1 (1,1): -1 on 3 RemoveNode 1 (1,1): 1 on 4 (1,3): 1 on 0 (1,3): 1 on 5 (1,1): 1 on 0 (1,3): 1 on 8 (1,1): 1 on 2 AddNode 9 (1,3): 1 on 6 (1,3): 1 on 6 (1,1): -1 on 1 (1,1): 1 on 4 AddNode 7 (1,2): 1 on 9 (1,2): -1 on 8 (1,2): 1 on 8 (1,2): -1 on 5 AddNode 4 (1,1): 1 on 0 (1,3): 1 on 5 (1,3): 1 on 2 RemoveNode 4 (1,3): -1 on 9 (1,1): 1 on 4 (1,2): 1 on 9 RemoveNode 9 (1,2): 1 on 7 (1,1): 1 on 2 (1,1): 1 on 3 (1,2): 1 on 7 RemoveNode 6 (1,3): 1 on 2 (1,1): 1 on 2 (1,1): 1 on 0 (1,1): 1 on 0 (1,2): -1 on 5 (1,1): 1 on 2 (1,2): 1 on 9 (1,1): -1 on 3 (1,1): 1 on 1 RemoveNode 5 (1,1): 1 on 4 (1,2): 1 on 7 (1,2): 1 on 7 (1,3): 1 on 1 (1,2): 1 on 9 (1,1): 1 on 1 (1,3): 1 on 4 (1,3): 1 on 4 RemoveNode 2 (1,1): 1 on 0 (1,3): 1 on 0 AddNode 4 (1,1): 1 on 0 (1,1): 1 on 3 RemoveNode 4 (1,2): 1 on 8 (1,2): -1 on 9 (1,2): -1 on 7 AddNode 5 (1,1): 1 on 0 AddNode 0 (1,2): 1 on 7 (1,2): -1 on 5 (1,1): 1 on 2 (1,3): 1 on 8 (1,1): -1 on 2 RemoveNode 0 (1,2): 1 on 5 (1,2): -1 on 7 RemoveNode 5 (1,3): 1 on 5 AddNode 2 (1,2): 1 on 6 AddNode 0 (1,1): 1 on 3 (1,1): 1 on 4 (1,2): 1 on 5 (1,3): 1 on 3 AddNode 9 (1,1): 1 on 0 (1,2): 1 on 6 RemoveNode 0 (1,1): 1 on 1 (1,3): 1 on 2 (1,1): -1 on 4 (1,3): 1 on 3 AddNode 5 (1,2): 1 on 9 (1,3): 1 on 2 (1,2): -1 on 5 (1,2): 1 on 6 AddNode 1 (1,1): -1 on 0 RemoveNode 1 (1,2): -1 on 7 AddNode 4 (1,1): 1 on 0 (1,2): 1 on 9 (1,1): 1 on 2 RemoveNode 3 (1,3): -1 on 6 RemoveNode 8 (1,2): 1 on 8 (1,3): 1 on 7 (1,3): 1 on 0 RemoveNode 5 (1,2): -1 on 7 (1,3): 1 on 3 AddNode 6 (1,1): 1 on 2 AddNode 5 (1,2): 1 on 6 AddNode 3 (1,3): 1 on 2 RemoveNode 4 (1,3): 1 on 3 (1,2): 1 on 9 (1,3): 1 on 4 AddNode 0 (1,1): -1 on 4 RemoveNode 0 (1,2): 1 on 6 RemoveNode 5 (1,1): 1 on 0 (1,1): -1 on 4 (1,3): 1 on 1 (1,1): 1 on 0 AddNode 8 (1,1): -1 on 2 (1,3): -1 on 0 (1,3): 1 on 6 (1,1): 1 on 2 (1,2): 1 on 7 AddNode 5 (1,2): 1 on 9 (1,1): 1 on 2 (1,3): 1 on 5 (1,3): 1 on 1 RemoveNode 9 (1,2): 1 on 6 AddNode 0 (1,1): 1 on 1 (1,3): 1 on 3 (1,2): 1 on 6 (1,1): -1 on 0 (1,2): 1 on 9 (1,3): 1 on 1 (1,2): 1 on 8 (1,1): 1 on 3 (1,1): 1 on 3 (1,3): 1 on 4 RemoveNode 5 (1,2): -1 on 6 (1,3): 1 on 4 (1,1): -1 on 1 (1,1): 1 on 3 AddNode 5 (1,1): 1 on 4 (1,3): 1 on 3 (1,1): -1 on 2 (1,3): -1 on 1 (1,1): 1 on 1 (1,2): 1 on 9 (1,2): 1 on 7 (1,1): 1 on 0 (1,3): 1 on 1 RemoveNode 0 (1,2): 1 on 5 (1,3): -1 on 8 (1,2): 1 on 6 (1,1): 1 on 4 (1,1): -1 on 3 RemoveNode 6 (1,3): 1 on 9 AddNode 9 (1,1): 1 on 0 RemoveNode 5 (1,3): 1 on 0 RemoveNode 3 (1,3): -1 on 4 (1,2): 1 on 8 (1,2): 1 on 7 (1,2): -1 on 7 (1,3): -1 on 6 Final state: 403 387 397 417 400 0 0 0 0 0 0 0 0 0 0 359 427 442 433 410 192 199 174 233 198 205 200 154 185 175 - - + - - - - + + + Took 3.871844 seconds avg = 4800 min = 4800 max = 4800 std-dev = 0 ch.0 avg = 1600 ch.0 min = 1515 ch.0 max = 1715 ch.0 std-dev = 35.31939977 ch.1 avg = 1600 ch.1 min = 1511 ch.1 max = 1678 ch.1 std-dev = 32.16240041 ch.2 avg = 1600 ch.2 min = 1509 ch.2 max = 1687 ch.2 std-dev = 33.5925587 avg = 1250 std-dev = 0 avg = 4800 min = 4800 max = 4800 std-dev = 0 ch.0 avg = 1600 ch.0 min = 1600 ch.0 max = 1600 ch.0 std-dev = 0 ch.1 avg = 1600 ch.1 min = 1600 ch.1 max = 1600 ch.1 std-dev = 0 ch.2 avg = 1600 ch.2 min = 1600 ch.2 max = 1600 ch.2 std-dev = 0 avg = 1250 std-dev = 0 avg = 4800 min = 4799 max = 4801 std-dev = 0.2 ch.0 avg = 1600 ch.0 min = 1534 ch.0 max = 1660 ch.0 std-dev = 27.21874354 ch.1 avg = 1600 ch.1 min = 1507 ch.1 max = 1712 ch.1 std-dev = 34.6488095 ch.2 avg = 1600 ch.2 min = 1510 ch.2 max = 1679 ch.2 std-dev = 34.19649105 avg = 1250 std-dev = 0 >> Cdc::UpdatesLog[PqRunner] [FAIL] >> Cdc::UpdatesLog[YdsRunner] >> TReplicaTest::CommitWithoutHandshake ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/persqueue/dread_cache_service/ut/unittest >> TPQCachingProxyTest::TestPublishAndForget [GOOD] Test command err: 2025-05-29T15:26:25.380061Z node 1 :PERSQUEUE NOTICE: pq_impl.cpp:1099: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-05-29T15:26:25.380089Z node 1 :PERSQUEUE INFO: pq_impl.cpp:800: [PQ: 72057594037927937] doesn't have tx writes info 2025-05-29T15:26:25.383592Z node 1 :PQ_READ_PROXY DEBUG: caching_service.cpp:44: Direct read cache: : Created 2025-05-29T15:26:25.383631Z node 1 :PQ_READ_PROXY DEBUG: caching_service.cpp:283: Direct read cache: registered server session: session1:1 with generation 1 2025-05-29T15:26:25.383653Z node 1 :PQ_READ_PROXY DEBUG: caching_service.cpp:171: Direct read cache: staged direct read id 1 for session: session1 2025-05-29T15:26:25.383662Z node 1 :PQ_READ_PROXY DEBUG: caching_service.cpp:179: Direct read cache: publish read: 1 for session session1, Generation: 1 2025-05-29T15:26:25.383687Z node 1 :PQ_READ_PROXY DEBUG: caching_service.cpp:218: Direct read cache: forget read: 1 for session session1 >> TReplicaTest::CommitWithoutHandshake [GOOD] >> TReplicaTest::CommitWithStaleGeneration >> Cdc::HugeKey[PqRunner] [FAIL] >> Cdc::HugeKey[YdsRunner] >> THiveTest::TestLockTabletExecutionBadOwner [GOOD] >> THiveTest::TestLockTabletExecutionDelete >> TReplicaTest::CommitWithStaleGeneration [GOOD] >> TReplicaTest::Delete >> KqpNewEngine::UpsertEmptyInput >> DataShardVolatile::UpsertNoLocksArbiterRestart-UseSink [FAIL] >> DataShardVolatile::UpsertBrokenLockArbiterRestart+UseSink >> TReplicaTest::Delete [GOOD] >> TReplicaTest::UpdateWithoutHandshake >> TReplicaTest::Merge >> Cdc::NewAndOldImagesLog[PqRunner] [FAIL] >> Cdc::NewAndOldImagesLog[YdsRunner] >> AsyncIndexChangeExchange::SenderShouldShakeHandsOnce [FAIL] >> AsyncIndexChangeExchange::SenderShouldShakeHandsTwice >> TReplicaTest::UpdateWithoutHandshake [GOOD] >> TReplicaTest::UpdateWithStaleGeneration >> Cdc::UpdatesLog[YdsRunner] [FAIL] >> Cdc::UpdatesLog[TopicRunner] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/scheme_board/ut_replica/unittest >> TReplicaTest::Delete [GOOD] Test command err: 2025-05-29T15:26:25.877579Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:958: [1:6:2053] Handle NKikimrSchemeBoard.TEvCommitGeneration { Owner: 1 Generation: 1 }: sender# [1:7:2054] 2025-05-29T15:26:25.877601Z node 1 :SCHEME_BOARD_REPLICA ERROR: replica.cpp:969: [1:6:2053] Reject commit from unknown populator: sender# [1:7:2054], owner# 1, generation# 1 2025-05-29T15:26:25.877609Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:751: [1:6:2053] Handle NKikimrSchemeBoard.TEvHandshake { Owner: 1 Generation: 1 }: sender# [1:7:2054] 2025-05-29T15:26:25.877613Z node 1 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:769: [1:6:2053] Successful handshake: owner# 1, generation# 1 2025-05-29T15:26:26.088404Z node 2 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:751: [2:6:2053] Handle NKikimrSchemeBoard.TEvHandshake { Owner: 1 Generation: 0 }: sender# [2:7:2054] 2025-05-29T15:26:26.088429Z node 2 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:769: [2:6:2053] Successful handshake: owner# 1, generation# 0 2025-05-29T15:26:26.088442Z node 2 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:751: [2:6:2053] Handle NKikimrSchemeBoard.TEvHandshake { Owner: 1 Generation: 1 }: sender# [2:8:2055] 2025-05-29T15:26:26.088447Z node 2 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:769: [2:6:2053] Successful handshake: owner# 1, generation# 1 2025-05-29T15:26:26.088457Z node 2 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:958: [2:6:2053] Handle NKikimrSchemeBoard.TEvCommitGeneration { Owner: 1 Generation: 1 }: sender# [2:8:2055] 2025-05-29T15:26:26.088463Z node 2 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:985: [2:6:2053] Commit generation: owner# 1, generation# 1 2025-05-29T15:26:26.088470Z node 2 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:958: [2:6:2053] Handle NKikimrSchemeBoard.TEvCommitGeneration { Owner: 1 Generation: 0 }: sender# [2:7:2054] 2025-05-29T15:26:26.088475Z node 2 :SCHEME_BOARD_REPLICA ERROR: replica.cpp:979: [2:6:2053] Reject commit from stale populator: sender# [2:7:2054], owner# 1, generation# 0, pending generation# 1 2025-05-29T15:26:26.088481Z node 2 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:751: [2:6:2053] Handle NKikimrSchemeBoard.TEvHandshake { Owner: 1 Generation: 2 }: sender# [2:7:2054] 2025-05-29T15:26:26.088485Z node 2 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:769: [2:6:2053] Successful handshake: owner# 1, generation# 2 2025-05-29T15:26:26.304564Z node 3 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:751: [3:6:2053] Handle NKikimrSchemeBoard.TEvHandshake { Owner: 1 Generation: 1 }: sender# [3:7:2054] 2025-05-29T15:26:26.304585Z node 3 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:769: [3:6:2053] Successful handshake: owner# 1, generation# 1 2025-05-29T15:26:26.304645Z node 3 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:782: [3:6:2053] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 1 Generation: 1 }: sender# [3:7:2054], cookie# 0, event size# 72 2025-05-29T15:26:26.304653Z node 3 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:822: [3:6:2053] Update description: path# path, pathId# [OwnerId: 42, LocalPathId: 1], deletion# false 2025-05-29T15:26:26.305819Z node 3 :SCHEME_BOARD_REPLICA INFO: replica.cpp:550: [3:6:2053] Upsert description: path# path, pathId# [OwnerId: 42, LocalPathId: 1], pathDescription# {Status StatusSuccess, Path path, PathId [OwnerId: 42, LocalPathId: 1], PathVersion 1, SubdomainPathId , PathAbandonedTenantsSchemeShards size 0, DescribeSchemeResultSerialized size 30} 2025-05-29T15:26:26.305854Z node 3 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1061: [3:6:2053] Handle NKikimrSchemeBoard.TEvSubscribe { Path: path DomainOwnerId: 0 }: sender# [3:8:2055] 2025-05-29T15:26:26.305877Z node 3 :SCHEME_BOARD_REPLICA INFO: replica.cpp:646: [3:6:2053] Subscribe: subscriber# [3:8:2055], path# path, domainOwnerId# 0, capabilities# 2025-05-29T15:26:26.305905Z node 3 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1061: [3:6:2053] Handle NKikimrSchemeBoard.TEvSubscribe { PathId: [OwnerId: 42, LocalPathId: 1] DomainOwnerId: 0 }: sender# [3:9:2056] 2025-05-29T15:26:26.305911Z node 3 :SCHEME_BOARD_REPLICA INFO: replica.cpp:646: [3:6:2053] Subscribe: subscriber# [3:9:2056], path# [OwnerId: 42, LocalPathId: 1], domainOwnerId# 0, capabilities# 2025-05-29T15:26:26.305927Z node 3 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:782: [3:6:2053] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 1 Generation: 1 }: sender# [3:7:2054], cookie# 0, event size# 40 2025-05-29T15:26:26.305932Z node 3 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:822: [3:6:2053] Update description: path# path, pathId# [OwnerId: 42, LocalPathId: 1], deletion# true 2025-05-29T15:26:26.305936Z node 3 :SCHEME_BOARD_REPLICA INFO: replica.cpp:575: [3:6:2053] Delete description: path# path, pathId# [OwnerId: 42, LocalPathId: 1] 2025-05-29T15:26:26.305958Z node 3 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1061: [3:6:2053] Handle NKikimrSchemeBoard.TEvSubscribe { Path: path DomainOwnerId: 0 }: sender# [3:10:2057] 2025-05-29T15:26:26.305963Z node 3 :SCHEME_BOARD_REPLICA INFO: replica.cpp:646: [3:6:2053] Subscribe: subscriber# [3:10:2057], path# path, domainOwnerId# 0, capabilities# 2025-05-29T15:26:26.305976Z node 3 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1061: [3:6:2053] Handle NKikimrSchemeBoard.TEvSubscribe { PathId: [OwnerId: 42, LocalPathId: 1] DomainOwnerId: 0 }: sender# [3:11:2058] 2025-05-29T15:26:26.305981Z node 3 :SCHEME_BOARD_REPLICA INFO: replica.cpp:646: [3:6:2053] Subscribe: subscriber# [3:11:2058], path# [OwnerId: 42, LocalPathId: 1], domainOwnerId# 0, capabilities# 2025-05-29T15:26:26.305991Z node 3 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1061: [3:6:2053] Handle NKikimrSchemeBoard.TEvSubscribe { Path: path DomainOwnerId: 0 }: sender# [3:12:2059] 2025-05-29T15:26:26.305995Z node 3 :SCHEME_BOARD_REPLICA INFO: replica.cpp:646: [3:6:2053] Subscribe: subscriber# [3:12:2059], path# path, domainOwnerId# 0, capabilities# >> TReplicaTest::Merge [GOOD] >> TReplicaTest::IdempotencyUpdatesWithoutSubscribers >> TReplicaTest::UpdateWithStaleGeneration [GOOD] >> TReplicaTest::Commit >> THiveTest::TestLockTabletExecutionDelete [GOOD] >> THiveTest::TestLockTabletExecutionDeleteReboot >> TReplicaTest::IdempotencyUpdatesWithoutSubscribers [GOOD] >> TReplicaTest::StrongNotificationAfterCommit >> THiveTest::TestFollowersCrossDC_Tight [GOOD] >> THiveTest::TestFollowersCrossDC_MovingLeader >> TReplicaTest::StrongNotificationAfterCommit [GOOD] >> TReplicaTest::Commit [GOOD] >> TReplicaTest::AckNotifications >> TReplicaCombinationTest::UpdatesCombinationsDomainRoot >> Cdc::HugeKey[YdsRunner] [FAIL] >> Cdc::HugeKey[TopicRunner] >> TReplicaTest::AckNotifications [GOOD] >> TReplicaTest::AckNotificationsUponPathRecreation ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/indexes/unittest >> KqpIndexes::JoinWithNonPKColumnsInPredicate-UseStreamJoin Test command err: Trying to start YDB, gRPC: 7792, MsgBus: 7824 2025-05-29T15:26:08.450390Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509889340662926464:2202];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:26:08.450729Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/001d8b/r3tmp/tmpzjdzTT/pdisk_1.dat 2025-05-29T15:26:08.538856Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [1:7509889340662926299:2079] 1748532368367886 != 1748532368367889 2025-05-29T15:26:08.539453Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 7792, node 1 2025-05-29T15:26:08.558928Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:26:08.558941Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-05-29T15:26:08.558943Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-05-29T15:26:08.558986Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-05-29T15:26:08.573059Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:26:08.573085Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:26:08.575223Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:7824 TClient is connected to server localhost:7824 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-29T15:26:08.667015Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:26:08.672359Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-05-29T15:26:08.676720Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:26:08.749903Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:26:08.788009Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:26:08.820097Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:26:08.911035Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509889340662927944:2403], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:26:08.911075Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:26:08.965885Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-05-29T15:26:08.979894Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-05-29T15:26:09.004740Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-05-29T15:26:09.023260Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-05-29T15:26:09.041111Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-05-29T15:26:09.057498Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-05-29T15:26:09.119453Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-05-29T15:26:09.160509Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509889344957895895:2466], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:26:09.160541Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:26:09.160764Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509889344957895900:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:26:09.161816Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715669:3, at schemeshard: 72057594046644480 2025-05-29T15:26:09.165348Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976715669, at schemeshard: 72057594046644480 2025-05-29T15:26:09.165453Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7509889344957895902:2470], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715669 completed, doublechecking } 2025-05-29T15:26:09.235858Z node 1 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [1:7509889344957895953:3397] txid# 281474976715670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-29T15:26:09.370476Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [1:7509889344957895962:2474], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:26:09.370867Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=1&id=MmMzZmFmYjUtZjNhYzUxYzgtNGU5YTM0NzYtYjIyYjgyOTc=, ActorId: [1:7509889340662927917:2400], ActorState: ExecuteState, TraceId: 01jwead5r77tt8hpk7b5qaczf9, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: VERIFY failed (2025-05-29T15:26:09.371822Z): assertion failed in non-unittest thread with message: assertion failed at ydb/core/kqp/ut/common/kqp_ut_common.h:375, void NKikimr::NKqp::AssertSuccessResult(const NYdb::TStatus &): (result.IsSuccess())
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 library/cpp/testing/unittest/registar.cpp:36 RaiseError(): requirement UnittestThread failed 0. /-S/util/system/yassert.cpp:83: InternalPanicImpl @ 0x13DDD325 1. /-S/util/system/yassert.cpp:55: Panic @ 0x13DD4326 2. /tmp//-S/library/cpp/testing/unittest/registar.cpp:36: RaiseError @ 0x13F75616 3. /-S/ydb/core/kqp/ut/common/kqp_ut_common.h:375: AssertSuccessResult @ 0x13C2F4E2 4. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:365: CreateSampleTables @ 0x26431D72 5. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:581: operator() @ 0x26452CCC 6. /-S/library/cpp/threading/future/core/future-inl.h:493: SetValue @ 0x26452CCC 7. /-S/library/cpp/threading/future/async.h:24: operator() @ 0x26452CCC 8. /-S/util/thread/pool.h:71: Process @ 0x26452CCC 9. /-S/util/thread/pool.cpp:418: DoExecute @ 0x13DE4AC9 10. /-S/util/thread/factory.h:15: Execute @ 0x13DE34B9 11. /-S/util/thread/factory.cpp:36: ThreadProc @ 0x13DE34B9 12. /-S/util/system/thread.cpp:244: ThreadProxy @ 0x13DDE92C 13. ??:0: ?? @ 0x7F31FD912AC2 14. ??:0: ?? @ 0x7F31FD9A484F Trying to start YDB, gRPC: 7538, MsgBus: 62624 2025-05-29T15:26:16.210081Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509889374158438368:2207];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:26:16.210927Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/001d8b/r3tmp/tmpRJBfAC/pdisk_1 ... n.h:375: AssertSuccessResult @ 0x13C2F4E2 4. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:365: CreateSampleTables @ 0x26431D72 5. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:581: operator() @ 0x26452CCC 6. /-S/library/cpp/threading/future/core/future-inl.h:493: SetValue @ 0x26452CCC 7. /-S/library/cpp/threading/future/async.h:24: operator() @ 0x26452CCC 8. /-S/util/thread/pool.h:71: Process @ 0x26452CCC 9. /-S/util/thread/pool.cpp:418: DoExecute @ 0x13DE4AC9 10. /-S/util/thread/factory.h:15: Execute @ 0x13DE34B9 11. /-S/util/thread/factory.cpp:36: ThreadProc @ 0x13DE34B9 12. /-S/util/system/thread.cpp:244: ThreadProxy @ 0x13DDE92C 13. ??:0: ?? @ 0x7F909BA19AC2 14. ??:0: ?? @ 0x7F909BAAB84F Trying to start YDB, gRPC: 11840, MsgBus: 5405 2025-05-29T15:26:22.011442Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509889398426966729:2068];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:26:22.011463Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/001d8b/r3tmp/tmpESpN6D/pdisk_1.dat 2025-05-29T15:26:22.071775Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [1:7509889398426966688:2079] 1748532382011210 != 1748532382011213 2025-05-29T15:26:22.074424Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 11840, node 1 2025-05-29T15:26:22.090953Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:26:22.090965Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-05-29T15:26:22.090967Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-05-29T15:26:22.091011Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-05-29T15:26:22.111115Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:26:22.111159Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting TClient is connected to server localhost:5405 2025-05-29T15:26:22.115213Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:5405 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-29T15:26:22.178409Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:26:22.184780Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-05-29T15:26:22.196265Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:26:22.267074Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:26:22.286866Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:26:22.308971Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:26:22.563534Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509889398426968322:2403], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:26:22.563588Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:26:22.606107Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-05-29T15:26:22.674629Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-05-29T15:26:22.684653Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-05-29T15:26:22.698031Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-05-29T15:26:22.713338Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-05-29T15:26:22.776393Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-05-29T15:26:22.789334Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-05-29T15:26:22.807590Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509889398426968981:2466], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:26:22.807610Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:26:22.807723Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509889398426968986:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:26:22.808617Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715669:3, at schemeshard: 72057594046644480 2025-05-29T15:26:22.817114Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7509889398426968988:2470], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715669 completed, doublechecking } 2025-05-29T15:26:22.896574Z node 1 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [1:7509889398426969039:3398] txid# 281474976715670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-29T15:26:23.001523Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [1:7509889398426969048:2474], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:26:23.003623Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=1&id=Y2YyYTRmZjMtNDVhNWQ3YmMtZjBhZjllNWEtY2I5MDk4Njc=, ActorId: [1:7509889398426968294:2399], ActorState: ExecuteState, TraceId: 01jweadk2p15qe59f3ssxxmqkd, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: VERIFY failed (2025-05-29T15:26:23.007282Z): assertion failed in non-unittest thread with message: assertion failed at ydb/core/kqp/ut/common/kqp_ut_common.h:375, void NKikimr::NKqp::AssertSuccessResult(const NYdb::TStatus &): (result.IsSuccess())
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 library/cpp/testing/unittest/registar.cpp:36 RaiseError(): requirement UnittestThread failed 0. /-S/util/system/yassert.cpp:83: InternalPanicImpl @ 0x13DDD325 1. /-S/util/system/yassert.cpp:55: Panic @ 0x13DD4326 2. /tmp//-S/library/cpp/testing/unittest/registar.cpp:36: RaiseError @ 0x13F75616 3. /-S/ydb/core/kqp/ut/common/kqp_ut_common.h:375: AssertSuccessResult @ 0x13C2F4E2 4. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:365: CreateSampleTables @ 0x26431D72 5. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:581: operator() @ 0x26452CCC 6. /-S/library/cpp/threading/future/core/future-inl.h:493: SetValue @ 0x26452CCC 7. /-S/library/cpp/threading/future/async.h:24: operator() @ 0x26452CCC 8. /-S/util/thread/pool.h:71: Process @ 0x26452CCC 9. /-S/util/thread/pool.cpp:418: DoExecute @ 0x13DE4AC9 10. /-S/util/thread/factory.h:15: Execute @ 0x13DE34B9 11. /-S/util/thread/factory.cpp:36: ThreadProc @ 0x13DE34B9 12. /-S/util/system/thread.cpp:244: ThreadProxy @ 0x13DDE92C 13. ??:0: ?? @ 0x7FDC18AE5AC2 14. ??:0: ?? @ 0x7FDC18B7784F ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/scheme_board/ut_replica/unittest >> TReplicaTest::UpdateWithStaleGeneration [GOOD] Test command err: 2025-05-29T15:26:26.630529Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:782: [1:6:2053] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 1 Generation: 1 }: sender# [1:7:2054], cookie# 0, event size# 72 2025-05-29T15:26:26.630560Z node 1 :SCHEME_BOARD_REPLICA ERROR: replica.cpp:797: [1:6:2053] Reject update from unknown populator: sender# [1:7:2054], owner# 1, generation# 1 2025-05-29T15:26:26.630579Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1061: [1:6:2053] Handle NKikimrSchemeBoard.TEvSubscribe { Path: path DomainOwnerId: 0 }: sender# [1:7:2054] 2025-05-29T15:26:26.630586Z node 1 :SCHEME_BOARD_REPLICA INFO: replica.cpp:520: [1:6:2053] Upsert description: path# path 2025-05-29T15:26:26.630617Z node 1 :SCHEME_BOARD_REPLICA INFO: replica.cpp:646: [1:6:2053] Subscribe: subscriber# [1:7:2054], path# path, domainOwnerId# 0, capabilities# 2025-05-29T15:26:26.630637Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1075: [1:6:2053] Handle NKikimrSchemeBoard.TEvUnsubscribe { Path: path }: sender# [1:7:2054] 2025-05-29T15:26:26.630653Z node 1 :SCHEME_BOARD_REPLICA INFO: replica.cpp:662: [1:6:2053] Unsubscribe: subscriber# [1:7:2054], path# path 2025-05-29T15:26:26.630662Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1061: [1:6:2053] Handle NKikimrSchemeBoard.TEvSubscribe { PathId: [OwnerId: 1, LocalPathId: 1] DomainOwnerId: 0 }: sender# [1:7:2054] 2025-05-29T15:26:26.630666Z node 1 :SCHEME_BOARD_REPLICA INFO: replica.cpp:520: [1:6:2053] Upsert description: path# [OwnerId: 1, LocalPathId: 1] 2025-05-29T15:26:26.630672Z node 1 :SCHEME_BOARD_REPLICA INFO: replica.cpp:646: [1:6:2053] Subscribe: subscriber# [1:7:2054], path# [OwnerId: 1, LocalPathId: 1], domainOwnerId# 0, capabilities# 2025-05-29T15:26:26.630681Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1075: [1:6:2053] Handle NKikimrSchemeBoard.TEvUnsubscribe { PathId: [OwnerId: 1, LocalPathId: 1] }: sender# [1:7:2054] 2025-05-29T15:26:26.630685Z node 1 :SCHEME_BOARD_REPLICA INFO: replica.cpp:662: [1:6:2053] Unsubscribe: subscriber# [1:7:2054], path# [OwnerId: 1, LocalPathId: 1] 2025-05-29T15:26:26.841628Z node 2 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:751: [2:6:2053] Handle NKikimrSchemeBoard.TEvHandshake { Owner: 1 Generation: 1 }: sender# [2:7:2054] 2025-05-29T15:26:26.841650Z node 2 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:769: [2:6:2053] Successful handshake: owner# 1, generation# 1 2025-05-29T15:26:26.841689Z node 2 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:782: [2:6:2053] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 1 Generation: 0 }: sender# [2:7:2054], cookie# 0, event size# 72 2025-05-29T15:26:26.841697Z node 2 :SCHEME_BOARD_REPLICA ERROR: replica.cpp:805: [2:6:2053] Reject update from stale populator: sender# [2:7:2054], owner# 1, generation# 0, pending generation# 1 2025-05-29T15:26:26.841712Z node 2 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1061: [2:6:2053] Handle NKikimrSchemeBoard.TEvSubscribe { Path: path DomainOwnerId: 0 }: sender# [2:7:2054] 2025-05-29T15:26:26.841718Z node 2 :SCHEME_BOARD_REPLICA INFO: replica.cpp:520: [2:6:2053] Upsert description: path# path 2025-05-29T15:26:26.841745Z node 2 :SCHEME_BOARD_REPLICA INFO: replica.cpp:646: [2:6:2053] Subscribe: subscriber# [2:7:2054], path# path, domainOwnerId# 0, capabilities# 2025-05-29T15:26:26.841763Z node 2 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1075: [2:6:2053] Handle NKikimrSchemeBoard.TEvUnsubscribe { Path: path }: sender# [2:7:2054] 2025-05-29T15:26:26.841770Z node 2 :SCHEME_BOARD_REPLICA INFO: replica.cpp:662: [2:6:2053] Unsubscribe: subscriber# [2:7:2054], path# path 2025-05-29T15:26:26.841779Z node 2 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1061: [2:6:2053] Handle NKikimrSchemeBoard.TEvSubscribe { PathId: [OwnerId: 1, LocalPathId: 1] DomainOwnerId: 0 }: sender# [2:7:2054] 2025-05-29T15:26:26.841784Z node 2 :SCHEME_BOARD_REPLICA INFO: replica.cpp:520: [2:6:2053] Upsert description: path# [OwnerId: 1, LocalPathId: 1] 2025-05-29T15:26:26.841790Z node 2 :SCHEME_BOARD_REPLICA INFO: replica.cpp:646: [2:6:2053] Subscribe: subscriber# [2:7:2054], path# [OwnerId: 1, LocalPathId: 1], domainOwnerId# 0, capabilities# 2025-05-29T15:26:26.841799Z node 2 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1075: [2:6:2053] Handle NKikimrSchemeBoard.TEvUnsubscribe { PathId: [OwnerId: 1, LocalPathId: 1] }: sender# [2:7:2054] 2025-05-29T15:26:26.841803Z node 2 :SCHEME_BOARD_REPLICA INFO: replica.cpp:662: [2:6:2053] Unsubscribe: subscriber# [2:7:2054], path# [OwnerId: 1, LocalPathId: 1] >> TReplicaTest::AckNotificationsUponPathRecreation [GOOD] >> Cdc::NewAndOldImagesLog[YdsRunner] [FAIL] >> Cdc::NewAndOldImagesLog[TopicRunner] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/scheme_board/ut_replica/unittest >> TReplicaTest::StrongNotificationAfterCommit [GOOD] Test command err: 2025-05-29T15:26:26.770115Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1061: [1:6:2053] Handle NKikimrSchemeBoard.TEvSubscribe { Path: path DomainOwnerId: 0 }: sender# [1:8:2055] 2025-05-29T15:26:26.770144Z node 1 :SCHEME_BOARD_REPLICA INFO: replica.cpp:520: [1:6:2053] Upsert description: path# path 2025-05-29T15:26:26.770179Z node 1 :SCHEME_BOARD_REPLICA INFO: replica.cpp:646: [1:6:2053] Subscribe: subscriber# [1:8:2055], path# path, domainOwnerId# 0, capabilities# 2025-05-29T15:26:26.770216Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1061: [1:6:2053] Handle NKikimrSchemeBoard.TEvSubscribe { PathId: [OwnerId: 1, LocalPathId: 1] DomainOwnerId: 0 }: sender# [1:9:2056] 2025-05-29T15:26:26.770221Z node 1 :SCHEME_BOARD_REPLICA INFO: replica.cpp:520: [1:6:2053] Upsert description: path# [OwnerId: 1, LocalPathId: 1] 2025-05-29T15:26:26.770227Z node 1 :SCHEME_BOARD_REPLICA INFO: replica.cpp:646: [1:6:2053] Subscribe: subscriber# [1:9:2056], path# [OwnerId: 1, LocalPathId: 1], domainOwnerId# 0, capabilities# 2025-05-29T15:26:26.770247Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:751: [1:6:2053] Handle NKikimrSchemeBoard.TEvHandshake { Owner: 1 Generation: 1 }: sender# [1:7:2054] 2025-05-29T15:26:26.770254Z node 1 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:769: [1:6:2053] Successful handshake: owner# 1, generation# 1 2025-05-29T15:26:26.770298Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:782: [1:6:2053] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 1 Generation: 1 }: sender# [1:7:2054], cookie# 0, event size# 72 2025-05-29T15:26:26.770304Z node 1 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:822: [1:6:2053] Update description: path# path, pathId# [OwnerId: 1, LocalPathId: 1], deletion# false 2025-05-29T15:26:26.771424Z node 1 :SCHEME_BOARD_REPLICA INFO: replica.cpp:550: [1:6:2053] Upsert description: path# path, pathId# [OwnerId: 1, LocalPathId: 1], pathDescription# {Status StatusSuccess, Path path, PathId [OwnerId: 1, LocalPathId: 1], PathVersion 1, SubdomainPathId , PathAbandonedTenantsSchemeShards size 0, DescribeSchemeResultSerialized size 30} 2025-05-29T15:26:26.771501Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:782: [1:6:2053] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 1 Generation: 1 }: sender# [1:7:2054], cookie# 0, event size# 40 2025-05-29T15:26:26.771507Z node 1 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:822: [1:6:2053] Update description: path# path, pathId# [OwnerId: 1, LocalPathId: 1], deletion# true 2025-05-29T15:26:26.771512Z node 1 :SCHEME_BOARD_REPLICA INFO: replica.cpp:575: [1:6:2053] Delete description: path# path, pathId# [OwnerId: 1, LocalPathId: 1] 2025-05-29T15:26:26.997409Z node 2 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:751: [2:6:2053] Handle NKikimrSchemeBoard.TEvHandshake { Owner: 1 Generation: 1 }: sender# [2:7:2054] 2025-05-29T15:26:26.997436Z node 2 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:769: [2:6:2053] Successful handshake: owner# 1, generation# 1 2025-05-29T15:26:26.997465Z node 2 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1061: [2:6:2053] Handle NKikimrSchemeBoard.TEvSubscribe { PathId: [OwnerId: 1, LocalPathId: 1] DomainOwnerId: 0 }: sender# [2:8:2055] 2025-05-29T15:26:26.997471Z node 2 :SCHEME_BOARD_REPLICA INFO: replica.cpp:520: [2:6:2053] Upsert description: path# [OwnerId: 1, LocalPathId: 1] 2025-05-29T15:26:26.997491Z node 2 :SCHEME_BOARD_REPLICA INFO: replica.cpp:646: [2:6:2053] Subscribe: subscriber# [2:8:2055], path# [OwnerId: 1, LocalPathId: 1], domainOwnerId# 0, capabilities# 2025-05-29T15:26:26.997535Z node 2 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:782: [2:6:2053] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 1 Generation: 1 }: sender# [2:7:2054], cookie# 0, event size# 72 2025-05-29T15:26:26.997543Z node 2 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:822: [2:6:2053] Update description: path# path, pathId# [OwnerId: 1, LocalPathId: 1], deletion# false 2025-05-29T15:26:26.997567Z node 2 :SCHEME_BOARD_REPLICA INFO: replica.cpp:550: [2:6:2053] Upsert description: path# path, pathId# [OwnerId: 1, LocalPathId: 1], pathDescription# {Status StatusSuccess, Path path, PathId [OwnerId: 1, LocalPathId: 1], PathVersion 1, SubdomainPathId , PathAbandonedTenantsSchemeShards size 0, DescribeSchemeResultSerialized size 30} 2025-05-29T15:26:26.997601Z node 2 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:782: [2:6:2053] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 1 Generation: 1 }: sender# [2:7:2054], cookie# 0, event size# 40 2025-05-29T15:26:26.997606Z node 2 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:822: [2:6:2053] Update description: path# path, pathId# [OwnerId: 1, LocalPathId: 1], deletion# true 2025-05-29T15:26:26.997611Z node 2 :SCHEME_BOARD_REPLICA INFO: replica.cpp:575: [2:6:2053] Delete description: path# path, pathId# [OwnerId: 1, LocalPathId: 1] 2025-05-29T15:26:26.997624Z node 2 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1075: [2:6:2053] Handle NKikimrSchemeBoard.TEvUnsubscribe { PathId: [OwnerId: 1, LocalPathId: 1] }: sender# [2:8:2055] 2025-05-29T15:26:26.997645Z node 2 :SCHEME_BOARD_REPLICA INFO: replica.cpp:662: [2:6:2053] Unsubscribe: subscriber# [2:8:2055], path# [OwnerId: 1, LocalPathId: 1] 2025-05-29T15:26:26.997654Z node 2 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:782: [2:6:2053] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 1 Generation: 1 }: sender# [2:7:2054], cookie# 0, event size# 72 2025-05-29T15:26:26.997658Z node 2 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:822: [2:6:2053] Update description: path# path, pathId# [OwnerId: 1, LocalPathId: 1], deletion# false 2025-05-29T15:26:26.997663Z node 2 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:834: [2:6:2053] Path was explicitly deleted, ignoring: path# path, pathId# [OwnerId: 1, LocalPathId: 1] 2025-05-29T15:26:26.997671Z node 2 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:782: [2:6:2053] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 1 Generation: 1 }: sender# [2:7:2054], cookie# 0, event size# 72 2025-05-29T15:26:26.997675Z node 2 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:822: [2:6:2053] Update description: path# path, pathId# [OwnerId: 1, LocalPathId: 2], deletion# false 2025-05-29T15:26:26.997682Z node 2 :SCHEME_BOARD_REPLICA INFO: replica.cpp:550: [2:6:2053] Upsert description: path# path, pathId# [OwnerId: 1, LocalPathId: 2], pathDescription# {Status StatusSuccess, Path path, PathId [OwnerId: 1, LocalPathId: 2], PathVersion 1, SubdomainPathId , PathAbandonedTenantsSchemeShards size 0, DescribeSchemeResultSerialized size 30} 2025-05-29T15:26:26.997693Z node 2 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1061: [2:6:2053] Handle NKikimrSchemeBoard.TEvSubscribe { PathId: [OwnerId: 1, LocalPathId: 2] DomainOwnerId: 0 }: sender# [2:9:2056] 2025-05-29T15:26:26.997703Z node 2 :SCHEME_BOARD_REPLICA INFO: replica.cpp:646: [2:6:2053] Subscribe: subscriber# [2:9:2056], path# [OwnerId: 1, LocalPathId: 2], domainOwnerId# 0, capabilities# 2025-05-29T15:26:27.218359Z node 3 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1061: [3:6:2053] Handle NKikimrSchemeBoard.TEvSubscribe { Path: path DomainOwnerId: 1 }: sender# [3:8:2055] 2025-05-29T15:26:27.218385Z node 3 :SCHEME_BOARD_REPLICA INFO: replica.cpp:520: [3:6:2053] Upsert description: path# path 2025-05-29T15:26:27.218406Z node 3 :SCHEME_BOARD_REPLICA INFO: replica.cpp:646: [3:6:2053] Subscribe: subscriber# [3:8:2055], path# path, domainOwnerId# 1, capabilities# 2025-05-29T15:26:27.218435Z node 3 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:751: [3:6:2053] Handle NKikimrSchemeBoard.TEvHandshake { Owner: 1 Generation: 1 }: sender# [3:7:2054] 2025-05-29T15:26:27.218442Z node 3 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:769: [3:6:2053] Successful handshake: owner# 1, generation# 1 2025-05-29T15:26:27.218453Z node 3 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:958: [3:6:2053] Handle NKikimrSchemeBoard.TEvCommitGeneration { Owner: 1 Generation: 1 }: sender# [3:7:2054] 2025-05-29T15:26:27.218458Z node 3 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:985: [3:6:2053] Commit generation: owner# 1, generation# 1 2025-05-29T15:26:27.218474Z node 3 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:997: [3:6:2053] Handle NKikimr::NSchemeBoard::TReplica::TEvPrivate::TEvSendStrongNotifications { Owner: 1 } ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/scheme_board/ut_replica/unittest >> TReplicaTest::AckNotificationsUponPathRecreation [GOOD] Test command err: 2025-05-29T15:26:27.050244Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:751: [1:6:2053] Handle NKikimrSchemeBoard.TEvHandshake { Owner: 1 Generation: 1 }: sender# [1:7:2054] 2025-05-29T15:26:27.050271Z node 1 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:769: [1:6:2053] Successful handshake: owner# 1, generation# 1 2025-05-29T15:26:27.050290Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:958: [1:6:2053] Handle NKikimrSchemeBoard.TEvCommitGeneration { Owner: 1 Generation: 1 }: sender# [1:7:2054] 2025-05-29T15:26:27.050296Z node 1 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:985: [1:6:2053] Commit generation: owner# 1, generation# 1 2025-05-29T15:26:27.050303Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:751: [1:6:2053] Handle NKikimrSchemeBoard.TEvHandshake { Owner: 1 Generation: 2 }: sender# [1:7:2054] 2025-05-29T15:26:27.050307Z node 1 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:769: [1:6:2053] Successful handshake: owner# 1, generation# 2 2025-05-29T15:26:27.263129Z node 2 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1061: [2:6:2053] Handle NKikimrSchemeBoard.TEvSubscribe { Path: path DomainOwnerId: 0 }: sender# [2:8:2055] 2025-05-29T15:26:27.263153Z node 2 :SCHEME_BOARD_REPLICA INFO: replica.cpp:520: [2:6:2053] Upsert description: path# path 2025-05-29T15:26:27.263189Z node 2 :SCHEME_BOARD_REPLICA INFO: replica.cpp:646: [2:6:2053] Subscribe: subscriber# [2:8:2055], path# path, domainOwnerId# 0, capabilities# AckNotifications: true 2025-05-29T15:26:27.263222Z node 2 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:751: [2:6:2053] Handle NKikimrSchemeBoard.TEvHandshake { Owner: 1 Generation: 1 }: sender# [2:7:2054] 2025-05-29T15:26:27.263226Z node 2 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:769: [2:6:2053] Successful handshake: owner# 1, generation# 1 2025-05-29T15:26:27.263264Z node 2 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:782: [2:6:2053] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 1 Generation: 1 }: sender# [2:7:2054], cookie# 0, event size# 72 2025-05-29T15:26:27.263269Z node 2 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:822: [2:6:2053] Update description: path# path, pathId# [OwnerId: 1, LocalPathId: 1], deletion# false 2025-05-29T15:26:27.264420Z node 2 :SCHEME_BOARD_REPLICA INFO: replica.cpp:550: [2:6:2053] Upsert description: path# path, pathId# [OwnerId: 1, LocalPathId: 1], pathDescription# {Status StatusSuccess, Path path, PathId [OwnerId: 1, LocalPathId: 1], PathVersion 1, SubdomainPathId , PathAbandonedTenantsSchemeShards size 0, DescribeSchemeResultSerialized size 30} 2025-05-29T15:26:27.264473Z node 2 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1089: [2:6:2053] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 0 }: sender# [2:8:2055] 2025-05-29T15:26:27.264493Z node 2 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:782: [2:6:2053] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 1 Generation: 1 }: sender# [2:7:2054], cookie# 0, event size# 40 2025-05-29T15:26:27.264498Z node 2 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:822: [2:6:2053] Update description: path# path, pathId# [OwnerId: 1, LocalPathId: 1], deletion# true 2025-05-29T15:26:27.264503Z node 2 :SCHEME_BOARD_REPLICA INFO: replica.cpp:575: [2:6:2053] Delete description: path# path, pathId# [OwnerId: 1, LocalPathId: 1] 2025-05-29T15:26:27.264513Z node 2 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1089: [2:6:2053] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 1 }: sender# [2:8:2055] 2025-05-29T15:26:27.476170Z node 3 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:751: [3:6:2053] Handle NKikimrSchemeBoard.TEvHandshake { Owner: 1 Generation: 1 }: sender# [3:7:2054] 2025-05-29T15:26:27.476193Z node 3 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:769: [3:6:2053] Successful handshake: owner# 1, generation# 1 2025-05-29T15:26:27.476227Z node 3 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:782: [3:6:2053] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 1 Generation: 1 }: sender# [3:7:2054], cookie# 0, event size# 72 2025-05-29T15:26:27.476235Z node 3 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:822: [3:6:2053] Update description: path# path, pathId# [OwnerId: 1, LocalPathId: 1], deletion# false 2025-05-29T15:26:27.476247Z node 3 :SCHEME_BOARD_REPLICA INFO: replica.cpp:550: [3:6:2053] Upsert description: path# path, pathId# [OwnerId: 1, LocalPathId: 1], pathDescription# {Status StatusSuccess, Path path, PathId [OwnerId: 1, LocalPathId: 1], PathVersion 2, SubdomainPathId , PathAbandonedTenantsSchemeShards size 0, DescribeSchemeResultSerialized size 30} 2025-05-29T15:26:27.476273Z node 3 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1061: [3:6:2053] Handle NKikimrSchemeBoard.TEvSubscribe { Path: path DomainOwnerId: 0 }: sender# [3:8:2055] 2025-05-29T15:26:27.476296Z node 3 :SCHEME_BOARD_REPLICA INFO: replica.cpp:646: [3:6:2053] Subscribe: subscriber# [3:8:2055], path# path, domainOwnerId# 0, capabilities# AckNotifications: true 2025-05-29T15:26:27.476311Z node 3 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:782: [3:6:2053] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 1 Generation: 1 }: sender# [3:7:2054], cookie# 0, event size# 72 2025-05-29T15:26:27.476316Z node 3 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:822: [3:6:2053] Update description: path# path, pathId# [OwnerId: 1, LocalPathId: 1], deletion# false 2025-05-29T15:26:27.476322Z node 3 :SCHEME_BOARD_REPLICA INFO: replica.cpp:550: [3:6:2053] Upsert description: path# path, pathId# [OwnerId: 1, LocalPathId: 1], pathDescription# {Status StatusSuccess, Path path, PathId [OwnerId: 1, LocalPathId: 1], PathVersion 3, SubdomainPathId , PathAbandonedTenantsSchemeShards size 0, DescribeSchemeResultSerialized size 30} 2025-05-29T15:26:27.476358Z node 3 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:782: [3:6:2053] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 1 Generation: 1 }: sender# [3:7:2054], cookie# 0, event size# 72 2025-05-29T15:26:27.476363Z node 3 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:822: [3:6:2053] Update description: path# path, pathId# [OwnerId: 1, LocalPathId: 2], deletion# false 2025-05-29T15:26:27.476367Z node 3 :SCHEME_BOARD_REPLICA INFO: replica.cpp:575: [3:6:2053] Delete description: path# path, pathId# [OwnerId: 1, LocalPathId: 1] 2025-05-29T15:26:27.476377Z node 3 :SCHEME_BOARD_REPLICA INFO: replica.cpp:520: [3:6:2053] Upsert description: path# path 2025-05-29T15:26:27.476385Z node 3 :SCHEME_BOARD_REPLICA INFO: replica.cpp:646: [3:6:2053] Subscribe: subscriber# [3:8:2055], path# path, domainOwnerId# 0, capabilities# AckNotifications: true 2025-05-29T15:26:27.476391Z node 3 :SCHEME_BOARD_REPLICA INFO: replica.cpp:550: [3:6:2053] Upsert description: path# path, pathId# [OwnerId: 1, LocalPathId: 2], pathDescription# {Status StatusSuccess, Path path, PathId [OwnerId: 1, LocalPathId: 2], PathVersion 1, SubdomainPathId , PathAbandonedTenantsSchemeShards size 0, DescribeSchemeResultSerialized size 30} 2025-05-29T15:26:27.476403Z node 3 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1089: [3:6:2053] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 3 }: sender# [3:8:2055] >> TExportToS3WithRebootsTests::ForgetShouldSucceedOnManyTables [GOOD] >> Cdc::UpdatesLog[TopicRunner] [FAIL] >> Cdc::VirtualTimestamps[PqRunner] >> DataShardVolatile::UpsertBrokenLockArbiterRestart+UseSink [FAIL] >> THiveTest::TestLockTabletExecutionDeleteReboot [GOOD] >> DataShardVolatile::UpsertBrokenLockArbiterRestart-UseSink >> THiveTest::TestLockTabletExecutionBadUnlock >> AsyncIndexChangeExchange::SenderShouldShakeHandsTwice [FAIL] >> AsyncIndexChangeExchange::SenderShouldShakeHandsAfterAddingIndex >> Cdc::NewAndOldImagesLog[TopicRunner] [FAIL] >> Cdc::NewAndOldImagesLogDebezium >> TExportToS3Tests::RebootDuringCompletion >> Cdc::HugeKey[TopicRunner] [FAIL] >> Cdc::HugeKeyDebezium >> THiveTest::TestLockTabletExecutionBadUnlock [GOOD] >> THiveTest::TestLockTabletExecutionGoodUnlock >> Cdc::VirtualTimestamps[PqRunner] [FAIL] >> Cdc::VirtualTimestamps[YdsRunner] >> TExportToS3Tests::ExportPartitioningSettings >> TExportToS3Tests::RebootDuringCompletion [GOOD] >> THiveTest::TestLockTabletExecutionGoodUnlock [GOOD] >> THiveTest::TestLocalRegistrationInSharedHive >> TExportToS3Tests::CancelUponCreatingExportDirShouldSucceed >> TExportToS3Tests::RebootDuringAbortion >> TExportToS3Tests::CancelUponTransferringSingleShardTableShouldSucceed >> TExportToS3Tests::ShouldSucceedOnConcurrentTxs >> TExportToS3Tests::ExportPartitioningSettings [GOOD] >> THiveTest::TestHiveBalancerNodeRestarts [GOOD] >> THiveTest::TestHiveBalancerDifferentResources2 >> Cdc::NewAndOldImagesLogDebezium [FAIL] >> Cdc::OldImageLogDebezium >> DataShardVolatile::UpsertBrokenLockArbiterRestart-UseSink [FAIL] >> DataShardVolatile::UpsertDependenciesShardsRestart+UseSink >> TExportToS3Tests::ExportIndexTablePartitioningSettings >> TExportToS3Tests::CancelUponCreatingExportDirShouldSucceed [GOOD] >> TExportToS3Tests::RebootDuringAbortion [GOOD] >> TExportToS3Tests::ExportStartTime >> TExportToS3Tests::CancelUponCopyingTablesShouldSucceed >> Cdc::HugeKeyDebezium [FAIL] >> Cdc::Drop[PqRunner] >> TExportToS3Tests::ShouldSucceedOnConcurrentTxs [GOOD] >> Cdc::VirtualTimestamps[YdsRunner] [FAIL] >> Cdc::VirtualTimestamps[TopicRunner] >> TExportToS3Tests::ShouldSucceedOnConcurrentExport >> THiveTest::TestLocalRegistrationInSharedHive [GOOD] >> TExportToS3Tests::ExportIndexTablePartitioningSettings [GOOD] >> TExportToS3Tests::ExportStartTime [GOOD] >> AsyncIndexChangeExchange::SenderShouldShakeHandsAfterAddingIndex [FAIL] >> AsyncIndexChangeExchange::ShouldDeliverChangesOnFreshTable >> TExportToS3Tests::EnableChecksumsPersistance >> TExportToS3Tests::SchemaMapping >> Cdc::Drop[PqRunner] [GOOD] >> Cdc::Drop[YdsRunner] >> KqpRanges::UpdateWhereInMultipleUpdate ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/indexes/unittest >> KqpIndexes::UpsertWithNullKeysComplex Test command err: Trying to start YDB, gRPC: 13630, MsgBus: 29405 2025-05-29T15:26:10.574938Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509889347685528572:2196];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:26:10.583766Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/001d86/r3tmp/tmpabpPdL/pdisk_1.dat 2025-05-29T15:26:10.688261Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [1:7509889347685528415:2079] 1748532370571095 != 1748532370571098 2025-05-29T15:26:10.688560Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 13630, node 1 2025-05-29T15:26:10.705511Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:26:10.705525Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-05-29T15:26:10.705527Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-05-29T15:26:10.705567Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:29405 2025-05-29T15:26:10.731707Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:26:10.731735Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:26:10.733150Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:29405 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-29T15:26:10.775436Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:26:10.781011Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-05-29T15:26:10.810681Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:26:10.863734Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:26:10.908910Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:26:10.929277Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:26:11.023015Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509889351980497341:2403], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:26:11.023054Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:26:11.072519Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-05-29T15:26:11.084139Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-05-29T15:26:11.093807Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-05-29T15:26:11.107893Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-05-29T15:26:11.128767Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-05-29T15:26:11.151822Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-05-29T15:26:11.166416Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-05-29T15:26:11.183040Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509889351980497994:2466], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:26:11.183061Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:26:11.183133Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509889351980497999:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:26:11.184000Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710669:3, at schemeshard: 72057594046644480 2025-05-29T15:26:11.188868Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7509889351980498001:2470], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710669 completed, doublechecking } 2025-05-29T15:26:11.272107Z node 1 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [1:7509889351980498052:3394] txid# 281474976710670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-29T15:26:11.416980Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [1:7509889351980498061:2474], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:26:11.418464Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=1&id=NjEwOWZlMzctMjU4YjMwMzAtMzQ3ZGRlODktZWVmMjJlNTc=, ActorId: [1:7509889351980497314:2400], ActorState: ExecuteState, TraceId: 01jwead7qe0b8g4wbv60q890d0, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: VERIFY failed (2025-05-29T15:26:11.419232Z): assertion failed in non-unittest thread with message: assertion failed at ydb/core/kqp/ut/common/kqp_ut_common.h:375, void NKikimr::NKqp::AssertSuccessResult(const NYdb::TStatus &): (result.IsSuccess())
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 library/cpp/testing/unittest/registar.cpp:36 RaiseError(): requirement UnittestThread failed 0. /-S/util/system/yassert.cpp:83: InternalPanicImpl @ 0x13DDD325 1. /-S/util/system/yassert.cpp:55: Panic @ 0x13DD4326 2. /tmp//-S/library/cpp/testing/unittest/registar.cpp:36: RaiseError @ 0x13F75616 3. /-S/ydb/core/kqp/ut/common/kqp_ut_common.h:375: AssertSuccessResult @ 0x13C2F4E2 4. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:365: CreateSampleTables @ 0x26431D72 5. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:581: operator() @ 0x26452CCC 6. /-S/library/cpp/threading/future/core/future-inl.h:493: SetValue @ 0x26452CCC 7. /-S/library/cpp/threading/future/async.h:24: operator() @ 0x26452CCC 8. /-S/util/thread/pool.h:71: Process @ 0x26452CCC 9. /-S/util/thread/pool.cpp:418: DoExecute @ 0x13DE4AC9 10. /-S/util/thread/factory.h:15: Execute @ 0x13DE34B9 11. /-S/util/thread/factory.cpp:36: ThreadProc @ 0x13DE34B9 12. /-S/util/system/thread.cpp:244: ThreadProxy @ 0x13DDE92C 13. ??:0: ?? @ 0x7F834C1B9AC2 14. ??:0: ?? @ 0x7F834C24B84F Trying to start YDB, gRPC: 32202, MsgBus: 32486 2025-05-29T15:26:17.627113Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509889379434882402:2093];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:26:17.627479Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/001d86/r3tmp/tmpn5SXpf/pdisk_1.dat 2025-05-29T15:26:17.677399Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 32202, node 1 2025-05-29T15:26:17.7069 ... mon/kqp_ut_common.h:375: AssertSuccessResult @ 0x13C2F4E2 4. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:365: CreateSampleTables @ 0x26431D72 5. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:581: operator() @ 0x26452CCC 6. /-S/library/cpp/threading/future/core/future-inl.h:493: SetValue @ 0x26452CCC 7. /-S/library/cpp/threading/future/async.h:24: operator() @ 0x26452CCC 8. /-S/util/thread/pool.h:71: Process @ 0x26452CCC 9. /-S/util/thread/pool.cpp:418: DoExecute @ 0x13DE4AC9 10. /-S/util/thread/factory.h:15: Execute @ 0x13DE34B9 11. /-S/util/thread/factory.cpp:36: ThreadProc @ 0x13DE34B9 12. /-S/util/system/thread.cpp:244: ThreadProxy @ 0x13DDE92C 13. ??:0: ?? @ 0x7FED42346AC2 14. ??:0: ?? @ 0x7FED423D884F Trying to start YDB, gRPC: 1779, MsgBus: 1068 2025-05-29T15:26:24.435007Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509889408345548409:2204];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:26:24.437546Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/001d86/r3tmp/tmpCaiuW1/pdisk_1.dat 2025-05-29T15:26:24.553536Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 1779, node 1 2025-05-29T15:26:24.599575Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:26:24.599604Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:26:24.600632Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-29T15:26:24.602871Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:26:24.602880Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-05-29T15:26:24.602883Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-05-29T15:26:24.602926Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:1068 TClient is connected to server localhost:1068 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-29T15:26:24.700208Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:26:24.708448Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-05-29T15:26:24.716064Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:26:24.795581Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-05-29T15:26:24.831627Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:26:24.851661Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-05-29T15:26:24.933039Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509889408345549874:2403], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:26:24.933057Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:26:24.984495Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-05-29T15:26:24.994568Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-05-29T15:26:25.011528Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-05-29T15:26:25.067933Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-05-29T15:26:25.082952Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-05-29T15:26:25.094765Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-05-29T15:26:25.122025Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-05-29T15:26:25.143911Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509889412640517823:2466], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:26:25.143939Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:26:25.144010Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509889412640517828:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:26:25.144895Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710669:3, at schemeshard: 72057594046644480 2025-05-29T15:26:25.152262Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976710669, at schemeshard: 72057594046644480 2025-05-29T15:26:25.152354Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7509889412640517830:2470], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710669 completed, doublechecking } 2025-05-29T15:26:25.246027Z node 1 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [1:7509889412640517881:3397] txid# 281474976710670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-29T15:26:25.411704Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [1:7509889412640517890:2474], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:26:25.413285Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=1&id=YjRhNzc5NzItY2U1YTg3MTQtNjhlZGNhOTMtZjcyYTA5MTE=, ActorId: [1:7509889408345549870:2401], ActorState: ExecuteState, TraceId: 01jweadnbqfa346tx91gpk8s67, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: VERIFY failed (2025-05-29T15:26:25.418930Z): assertion failed in non-unittest thread with message: assertion failed at ydb/core/kqp/ut/common/kqp_ut_common.h:375, void NKikimr::NKqp::AssertSuccessResult(const NYdb::TStatus &): (result.IsSuccess())
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 library/cpp/testing/unittest/registar.cpp:36 RaiseError(): requirement UnittestThread failed 0. /-S/util/system/yassert.cpp:83: InternalPanicImpl @ 0x13DDD325 1. /-S/util/system/yassert.cpp:55: Panic @ 0x13DD4326 2. /tmp//-S/library/cpp/testing/unittest/registar.cpp:36: RaiseError @ 0x13F75616 3. /-S/ydb/core/kqp/ut/common/kqp_ut_common.h:375: AssertSuccessResult @ 0x13C2F4E2 4. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:365: CreateSampleTables @ 0x26431D72 5. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:581: operator() @ 0x26452CCC 6. /-S/library/cpp/threading/future/core/future-inl.h:493: SetValue @ 0x26452CCC 7. /-S/library/cpp/threading/future/async.h:24: operator() @ 0x26452CCC 8. /-S/util/thread/pool.h:71: Process @ 0x26452CCC 9. /-S/util/thread/pool.cpp:418: DoExecute @ 0x13DE4AC9 10. /-S/util/thread/factory.h:15: Execute @ 0x13DE34B9 11. /-S/util/thread/factory.cpp:36: ThreadProc @ 0x13DE34B9 12. /-S/util/system/thread.cpp:244: ThreadProxy @ 0x13DDE92C 13. ??:0: ?? @ 0x7F737351DAC2 14. ??:0: ?? @ 0x7F73735AF84F >> Cdc::OldImageLogDebezium [FAIL] >> Cdc::NewImageLogDebezium >> TOlapReboots::CreateDropStore [GOOD] >> TExportToS3Tests::ShouldSucceedOnConcurrentExport [GOOD] >> TExportToS3Tests::ShouldSucceedOnConcurrentImport >> TExportToS3Tests::EnableChecksumsPersistance [GOOD] >> TExportToS3Tests::EncryptedExport >> TExportToS3Tests::SchemaMapping [GOOD] >> Cdc::VirtualTimestamps[TopicRunner] [FAIL] >> Cdc::Write[PqRunner] >> Cdc::Drop[YdsRunner] [GOOD] >> Cdc::Drop[TopicRunner] >> DataShardVolatile::UpsertDependenciesShardsRestart+UseSink [FAIL] >> DataShardVolatile::UpsertDependenciesShardsRestart-UseSink ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/mind/hive/ut/unittest >> THiveTest::TestLocalRegistrationInSharedHive [GOOD] Test command err: 2025-05-29T15:26:06.131232Z node 1 :LOCAL DEBUG: local.cpp:1491: TLocal::Bootstrap 2025-05-29T15:26:06.131267Z node 1 :BS_NODE DEBUG: {NW26@node_warden_impl.cpp:321} Bootstrap 2025-05-29T15:26:06.132109Z node 1 :BS_NODE DEBUG: {NW18@node_warden_resource.cpp:51} ApplyServiceSet IsStatic# true Comprehensive# false Origin# initial ServiceSet# {PDisks { NodeID: 1 PDiskID: 1 Path: "/tmp/pdisk.dat" PDiskGuid: 1 } VDisks { VDiskID { GroupID: 0 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } VDiskLocation { NodeID: 1 PDiskID: 1 VDiskSlotID: 0 PDiskGuid: 1 } } Groups { GroupID: 0 GroupGeneration: 1 ErasureSpecies: 0 Rings { FailDomains { VDiskLocations { NodeID: 1 PDiskID: 1 VDiskSlotID: 0 PDiskGuid: 1 } } } } AvailabilityDomains: 0 } 2025-05-29T15:26:06.132176Z node 1 :BS_NODE DEBUG: {NW04@node_warden_pdisk.cpp:196} StartLocalPDisk NodeId# 1 PDiskId# 1 Path# "/tmp/pdisk.dat" PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} Temporary# false 2025-05-29T15:26:06.132327Z node 1 :BS_NODE DEBUG: {NW23@node_warden_vdisk.cpp:67} StartLocalVDiskActor SlayInFlight# false VDiskId# [0:1:0:0:0] VSlotId# 1:1:0 PDiskGuid# 1 DonorMode# false PDiskRestartInFlight# false PDisksWaitingToStart# false 2025-05-29T15:26:06.132571Z node 1 :BS_NODE DEBUG: {NW24@node_warden_vdisk.cpp:265} StartLocalVDiskActor done VDiskId# [0:1:0:0:0] VSlotId# 1:1:0 PDiskGuid# 1 2025-05-29T15:26:06.132582Z node 1 :BS_NODE DEBUG: {NW12@node_warden_proxy.cpp:23} StartLocalProxy GroupId# 0 2025-05-29T15:26:06.132774Z node 1 :BS_NODE DEBUG: {NW21@node_warden_pipe.cpp:23} EstablishPipe AvailDomainId# 0 PipeClientId# [1:29:2075] ControllerId# 72057594037932033 2025-05-29T15:26:06.132780Z node 1 :BS_NODE DEBUG: {NW20@node_warden_pipe.cpp:72} SendRegisterNode 2025-05-29T15:26:06.132806Z node 1 :BS_NODE DEBUG: {NW11@node_warden_impl.cpp:296} StartInvalidGroupProxy GroupId# 4294967295 2025-05-29T15:26:06.132833Z node 1 :BS_NODE DEBUG: {NW62@node_warden_impl.cpp:308} StartRequestReportingThrottler 2025-05-29T15:26:06.134415Z node 1 :BS_NODE DEBUG: {NWDC00@distconf.cpp:20} Bootstrap 2025-05-29T15:26:06.134453Z node 1 :BS_NODE DEBUG: {NWDC40@distconf_persistent_storage.cpp:25} TReaderActor bootstrap Paths# [] 2025-05-29T15:26:06.136831Z node 1 :BS_PROXY INFO: dsproxy_state.cpp:146: Group# 0 TEvConfigureProxy received GroupGeneration# 1 IsLimitedKeyless# false Marker# DSP02 2025-05-29T15:26:06.136845Z node 1 :BS_PROXY NOTICE: dsproxy_state.cpp:294: EnsureMonitoring Group# 0 IsLimitedKeyless# 0 fullIfPossible# 0 Marker# DSP58 2025-05-29T15:26:06.137219Z node 1 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [1:28:2074] Create Queue# [1:39:2082] targetNodeId# 1 Marker# DSP01 2025-05-29T15:26:06.137253Z node 1 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [1:28:2074] Create Queue# [1:40:2083] targetNodeId# 1 Marker# DSP01 2025-05-29T15:26:06.137287Z node 1 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [1:28:2074] Create Queue# [1:41:2084] targetNodeId# 1 Marker# DSP01 2025-05-29T15:26:06.137320Z node 1 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [1:28:2074] Create Queue# [1:42:2085] targetNodeId# 1 Marker# DSP01 2025-05-29T15:26:06.137353Z node 1 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [1:28:2074] Create Queue# [1:43:2086] targetNodeId# 1 Marker# DSP01 2025-05-29T15:26:06.137387Z node 1 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [1:28:2074] Create Queue# [1:44:2087] targetNodeId# 1 Marker# DSP01 2025-05-29T15:26:06.137420Z node 1 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [1:28:2074] Create Queue# [1:45:2088] targetNodeId# 1 Marker# DSP01 2025-05-29T15:26:06.137425Z node 1 :BS_PROXY INFO: dsproxy_state.cpp:29: Group# 0 SetStateEstablishingSessions Marker# DSP03 2025-05-29T15:26:06.137439Z node 1 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:50: TClient[72057594037932033] ::Bootstrap [1:29:2075] 2025-05-29T15:26:06.137445Z node 1 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:533: TClient[72057594037932033] lookup [1:29:2075] 2025-05-29T15:26:06.137454Z node 1 :BS_PROXY NOTICE: dsproxy_state.cpp:234: Group# 4294967295 HasInvalidGroupId# 1 Bootstrap -> StateEjected Marker# DSP42 2025-05-29T15:26:06.137483Z node 1 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:149: TClient[72057594037932033] queue send [1:29:2075] 2025-05-29T15:26:06.137493Z node 1 :BS_NODE DEBUG: {NWDC53@distconf.cpp:300} StateWaitForInit event Type# 131082 StorageConfigLoaded# false NodeListObtained# false PendingEvents.size# 0 2025-05-29T15:26:06.137498Z node 1 :BS_NODE DEBUG: {NWDC11@distconf_binding.cpp:6} TEvNodesInfo 2025-05-29T15:26:06.137506Z node 1 :LOCAL DEBUG: local.cpp:1441: TDomainLocal(dc-1): Bootstrap 2025-05-29T15:26:06.137658Z node 1 :TABLET_RESOLVER DEBUG: tablet_resolver.cpp:484: Handle TEvForward tabletId: 72057594037932033 entry.State: StInit ev: {EvForward TabletID: 72057594037932033 Ev: nullptr Flags: 1:2:0} 2025-05-29T15:26:06.137693Z node 1 :BS_NODE DEBUG: {NWDC53@distconf.cpp:300} StateWaitForInit event Type# 2146435074 StorageConfigLoaded# false NodeListObtained# false PendingEvents.size# 0 2025-05-29T15:26:06.137699Z node 1 :BS_NODE DEBUG: {NWDC32@distconf_persistent_storage.cpp:221} TEvStorageConfigLoaded Cookie# 0 NumItemsRead# 0 2025-05-29T15:26:06.138589Z node 1 :BS_NODE DEBUG: {NWDC35@distconf_persistent_storage.cpp:184} PersistConfig Record# {} Drives# [] 2025-05-29T15:26:06.139232Z node 1 :LOCAL DEBUG: local.cpp:1149: TDomainLocal(dc-1): Binding to hive 72057594037927937 at domain dc-1 (allocated resources: ) 2025-05-29T15:26:06.139266Z node 1 :BS_NODE DEBUG: {NWDC51@distconf_persistent_storage.cpp:103} TWriterActor bootstrap Drives# [] Record# {} 2025-05-29T15:26:06.139280Z node 1 :LOCAL DEBUG: local.cpp:975: TLocalNodeRegistrar::Bootstrap 2025-05-29T15:26:06.139285Z node 1 :LOCAL DEBUG: local.cpp:181: TLocalNodeRegistrar::TryToRegister 2025-05-29T15:26:06.139328Z node 1 :LOCAL DEBUG: local.cpp:213: TLocalNodeRegistrar::TryToRegister pipe to hive, pipe:[1:50:2091] 2025-05-29T15:26:06.172420Z node 1 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:149: TClient[72057594037932033] queue send [1:29:2075] 2025-05-29T15:26:06.172863Z node 1 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:50: TClient[72057594037936129] ::Bootstrap [1:33:2063] 2025-05-29T15:26:06.172878Z node 1 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:533: TClient[72057594037936129] lookup [1:33:2063] 2025-05-29T15:26:06.172937Z node 1 :BS_NODE DEBUG: {NWDC53@distconf.cpp:300} StateWaitForInit event Type# 2146435075 StorageConfigLoaded# true NodeListObtained# false PendingEvents.size# 0 2025-05-29T15:26:06.172978Z node 1 :STATESTORAGE DEBUG: statestorage_proxy.cpp:246: ProxyRequest::HandleInit ringGroup:0 ev: {EvLookup TabletID: 72057594037932033 Cookie: 0 ProxyOptions: SigNone} 2025-05-29T15:26:06.173006Z node 1 :STATESTORAGE DEBUG: statestorage_replica.cpp:183: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037932033 Cookie: 0} 2025-05-29T15:26:06.173014Z node 1 :STATESTORAGE DEBUG: statestorage_replica.cpp:183: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037932033 Cookie: 1} 2025-05-29T15:26:06.173023Z node 1 :STATESTORAGE DEBUG: statestorage_replica.cpp:183: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037932033 Cookie: 2} 2025-05-29T15:26:06.173031Z node 1 :STATESTORAGE DEBUG: statestorage_proxy.cpp:355: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 1 TabletID: 72057594037932033} 2025-05-29T15:26:06.173090Z node 1 :TABLET_RESOLVER DEBUG: tablet_resolver.cpp:484: Handle TEvForward tabletId: 72057594037936129 entry.State: StInit ev: {EvForward TabletID: 72057594037936129 Ev: nullptr Flags: 1:2:0} 2025-05-29T15:26:06.173103Z node 1 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:50: TClient[72057594037927937] ::Bootstrap [1:50:2091] 2025-05-29T15:26:06.173107Z node 1 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:533: TClient[72057594037927937] lookup [1:50:2091] 2025-05-29T15:26:06.173126Z node 1 :BS_NODE DEBUG: {NWDC53@distconf.cpp:300} StateWaitForInit event Type# 131082 StorageConfigLoaded# true NodeListObtained# false PendingEvents.size# 1 2025-05-29T15:26:06.173133Z node 1 :BS_NODE DEBUG: {NWDC11@distconf_binding.cpp:6} TEvNodesInfo 2025-05-29T15:26:06.173163Z node 1 :BS_NODE DEBUG: {NWDC18@distconf_binding.cpp:322} UpdateBound RefererNodeId# 1 NodeId# ::1:12001/1 Meta# {Fingerprint: "\371$\224\316I\335\243.)W\014\261m\013\346Osy\0160" } 2025-05-29T15:26:06.173176Z node 1 :STATESTORAGE DEBUG: statestorage_proxy.cpp:355: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 1 TabletID: 72057594037932033} 2025-05-29T15:26:06.173236Z node 1 :BS_NODE DEBUG: {NW18@node_warden_resource.cpp:51} ApplyServiceSet IsStatic# true Comprehensive# true Origin# distconf ServiceSet# {PDisks { NodeID: 1 PDiskID: 1 Path: "/tmp/pdisk.dat" PDiskGuid: 1 } VDisks { VDiskID { GroupID: 0 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } VDiskLocation { NodeID: 1 PDiskID: 1 VDiskSlotID: 0 PDiskGuid: 1 } } Groups { GroupID: 0 GroupGeneration: 1 ErasureSpecies: 0 Rings { FailDomains { VDiskLocations { NodeID: 1 PDiskID: 1 VDiskSlotID: 0 PDiskGuid: 1 } } } } AvailabilityDomains: 0 } 2025-05-29T15:26:06.173272Z node 1 :TABLET_RESOLVER DEBUG: tablet_resolver.cpp:484: Handle TEvForward tabletId: 72057594037927937 entry.State: StInit ev: {EvForward TabletID: 72057594037927937 Ev: nullptr Flags: 1:2:0} 2025-05-29T15:26:06.173282Z node 1 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:149: TClient[72057594037927937] queue send [1:50:2091] 2025-05-29T15:26:06.173296Z node 1 :STATESTORAGE DEBUG: statestorage_proxy.cpp:355: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 1 TabletID: 72057594037932033} 2025-05-29T15:26:06.173324Z node 1 :TABLET_RESOLVER DEBUG: tablet_resolver.cpp:610: Handle TEvInfo tabletId: 72057594037932033 entry.State: StInitResolve success: false ev: {EvInfo Status: 5 TabletID: 72057594037932033 Cookie: 0 CurrentLeader: [0:0:0] CurrentLeaderTablet: [0:0:0] CurrentGeneration: 0 CurrentStep: 0 Locked: false LockedFor: 0 Signature: { Size: 3 Signature: {{[1:24343667:0] : 2}, {[1:2199047599219:0] : 8}, {[1:1099535971443:0] : 5}}}} 2025-05-29T15:26:06.173336Z node 1 :TABLET_RESOLVER DEBUG: tablet_resolver.cpp:361: DropEntry tabletId: 72057594037932033 followers: 0 2025-05-29T15:26:06.173770Z node 1 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:172: TClient[72057594037932033] forward result error, check reconnect [1:29:2075] 2025-05-29T15:26:06.173783Z node 1 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:562: TClient[72057594037932033] schedule retry [1:29:2075] 2025-05-29T15:26:06.173795Z node 1 :BS_NODE DEBUG: {NWDC53@distconf.cpp:300} StateWaitForInit event Type# 2146435072 StorageConfigLoaded# true NodeListObtained# true PendingEvents.size# 1 2025-05-29T15:26:06.173805Z node 1 :BS_NODE DEBUG: {NWDC15@distconf.cpp:361} StateFunc Type# 2146435075 Sender# [1:47:2090] SessionId# [0:0:0] Cookie# 0 2025-05-29T15:26:06.173814Z node 1 :BS_NODE DEBUG: {NWDC36@distconf_persistent_storage.cpp:205} TEvStorageConfigStored NumOk# 0 NumError# 0 Passed# 0.036090s 2025-05-29T15:26:06.173861Z node 1 :BS_NODE DEBUG: {NWDC15@distconf.cpp:361} StateFunc Type# 268639248 Sender# [1:12:2059] SessionId# [0:0:0] Cookie# 0 2025-05-29T15:26:06.174598Z node 1 :STATESTORAGE DEBUG: statestorage_proxy.cpp:246: ProxyRequest::HandleInit ringGroup:0 ev: {EvLookup TabletID: 72057594037936129 Cookie: 0 ProxyOptions: SigNone} 2025-05-29T15:26:06.177560Z node 1 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:149: TClient[72057594037932033] queue send [1:29:2075] 2025-05-29T15:26:06.177714Z node 1 :STATESTORAGE DEBUG: statestorage_replica.cpp:183: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936129 Cookie: 0} 2025-05-29T15:26:06.177725Z node 1 :STATESTORAGE DEBUG: statestorage_replica.cpp:183: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936129 Cookie: 1} 2025-05-29T15:26:06.177750Z node 1 :STATESTORAGE DEBUG: statestorage_replica.cpp:183: Replica::Handle ev: {EvReplicaLookup TabletID: 720575 ... 37]::SendEvent [38:549:2091] 2025-05-29T15:26:30.033853Z node 37 :STATESTORAGE DEBUG: statestorage_replica.cpp:183: Replica::Handle ev: {EvReplicaLookup TabletID: 72075186224037888 Cookie: 0} 2025-05-29T15:26:30.033885Z node 37 :STATESTORAGE DEBUG: statestorage_replica.cpp:183: Replica::Handle ev: {EvReplicaLookup TabletID: 72075186224037888 Cookie: 1} 2025-05-29T15:26:30.033900Z node 37 :STATESTORAGE DEBUG: statestorage_replica.cpp:183: Replica::Handle ev: {EvReplicaLookup TabletID: 72075186224037888 Cookie: 2} 2025-05-29T15:26:30.033921Z node 37 :PIPE_SERVER DEBUG: tablet_pipe_server.cpp:291: [72057594037927937] Accept Connect Originator# [38:549:2091] 2025-05-29T15:26:30.033981Z node 38 :STATESTORAGE DEBUG: statestorage_proxy.cpp:355: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 0 TabletID: 72075186224037888 CurrentLeader: [37:460:2365] CurrentLeaderTablet: [37:475:2377] CurrentGeneration: 1 CurrentStep: 0} 2025-05-29T15:26:30.034024Z node 38 :STATESTORAGE DEBUG: statestorage_proxy.cpp:355: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 0 TabletID: 72075186224037888 CurrentLeader: [37:460:2365] CurrentLeaderTablet: [37:475:2377] CurrentGeneration: 1 CurrentStep: 0} 2025-05-29T15:26:30.034052Z node 38 :TABLET_RESOLVER DEBUG: tablet_resolver.cpp:610: Handle TEvInfo tabletId: 72075186224037888 entry.State: StInitResolve success: true ev: {EvInfo Status: 0 TabletID: 72075186224037888 Cookie: 0 CurrentLeader: [37:460:2365] CurrentLeaderTablet: [37:475:2377] CurrentGeneration: 1 CurrentStep: 0 Locked: false LockedFor: 0 Signature: { Size: 2 Signature: {{[37:24343667:0] : 3}, {[37:1099535971443:0] : 6}}}} 2025-05-29T15:26:30.034059Z node 38 :TABLET_RESOLVER DEBUG: tablet_resolver.cpp:354: ApplyEntry leader tabletId: 72075186224037888 followers: 0 2025-05-29T15:26:30.034066Z node 38 :TABLET_RESOLVER DEBUG: tablet_resolver.cpp:279: SelectForward node 38 selfDC 2 leaderDC 1 1:2:0 local 0 localDc 0 other 1 disallowed 0 tabletId: 72075186224037888 followers: 0 countLeader 1 allowFollowers 0 winner: [37:460:2365] 2025-05-29T15:26:30.034083Z node 38 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:195: TClient[72075186224037888] forward result remote node 37 [38:550:2092] 2025-05-29T15:26:30.034161Z node 37 :HIVE TRACE: hive_impl.cpp:114: HIVE#72057594037927937 Handle TEvTabletPipe::TEvServerConnected([38:549:2091]) [37:558:2426] 2025-05-29T15:26:30.034177Z node 38 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:229: TClient[72075186224037888] remote node connected [38:550:2092] 2025-05-29T15:26:30.034182Z node 38 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:671: TClient[72075186224037888]::SendEvent [38:550:2092] 2025-05-29T15:26:30.034234Z node 38 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:310: TClient[72057594037927937] connected with status OK role: Leader [38:549:2091] 2025-05-29T15:26:30.034241Z node 38 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:325: TClient[72057594037927937] send queued [38:549:2091] 2025-05-29T15:26:30.034246Z node 38 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:629: TClient[72057594037927937] push event to server [38:549:2091] 2025-05-29T15:26:30.034260Z node 38 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:671: TClient[72057594037927937]::SendEvent [38:549:2091] 2025-05-29T15:26:30.034283Z node 37 :PIPE_SERVER DEBUG: tablet_pipe_server.cpp:291: [72075186224037888] Accept Connect Originator# [38:550:2092] 2025-05-29T15:26:30.034300Z node 38 :LOCAL DEBUG: local.cpp:260: TEvTabletPipe::TEvClientConnected {TabletId=72057594037927937 Status=OK ClientId=[38:549:2091]} 2025-05-29T15:26:30.034360Z node 37 :HIVE TRACE: hive_impl.cpp:114: HIVE#72075186224037888 Handle TEvTabletPipe::TEvServerConnected([38:550:2092]) [37:559:2427] 2025-05-29T15:26:30.034417Z node 37 :PIPE_SERVER DEBUG: tablet_pipe_server.cpp:72: [72057594037927937] Push Sender# [38:546:2091] EventType# 268959744 2025-05-29T15:26:30.034447Z node 38 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:310: TClient[72075186224037888] connected with status OK role: Leader [38:550:2092] 2025-05-29T15:26:30.034452Z node 38 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:325: TClient[72075186224037888] send queued [38:550:2092] 2025-05-29T15:26:30.034456Z node 38 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:629: TClient[72075186224037888] push event to server [38:550:2092] 2025-05-29T15:26:30.034464Z node 38 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:671: TClient[72075186224037888]::SendEvent [38:550:2092] 2025-05-29T15:26:30.034497Z node 37 :HIVE DEBUG: hive_impl.cpp:141: HIVE#72057594037927937 Handle TEvLocal::TEvRegisterNode from [38:546:2091] HiveId: 72057594037927937 ServicedDomains { SchemeShard: 72057594046678944 PathId: 2 } TabletAvailability { Type: Dummy Priority: 0 } TabletAvailability { Type: Hive Priority: 0 } 2025-05-29T15:26:30.034524Z node 37 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:10} Tx{25, NKikimr::NHive::TTxRegisterNode} queued, type NKikimr::NHive::TTxRegisterNode 2025-05-29T15:26:30.034533Z node 37 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:10} Tx{25, NKikimr::NHive::TTxRegisterNode} took 4194304b of static mem, Memory{4194304 dyn 0} 2025-05-29T15:26:30.034546Z node 37 :HIVE DEBUG: tx__register_node.cpp:21: HIVE#72057594037927937 THive::TTxRegisterNode(38)::Execute 2025-05-29T15:26:30.034586Z node 37 :HIVE DEBUG: hive_impl.cpp:361: HIVE#72057594037927937 ProcessWaitQueue (0) 2025-05-29T15:26:30.034593Z node 37 :HIVE DEBUG: hive_impl.cpp:342: HIVE#72057594037927937 ProcessBootQueue (0) 2025-05-29T15:26:30.034597Z node 37 :HIVE TRACE: hive_impl.cpp:344: HIVE#72057594037927937 ProcessBootQueue - sending 2025-05-29T15:26:30.034603Z node 37 :HIVE DEBUG: hive_impl.cpp:361: HIVE#72057594037927937 ProcessWaitQueue (0) 2025-05-29T15:26:30.034607Z node 37 :HIVE DEBUG: hive_impl.cpp:342: HIVE#72057594037927937 ProcessBootQueue (0) 2025-05-29T15:26:30.034620Z node 37 :HIVE WARN: node_info.cpp:25: HIVE#72057594037927937 Node(38, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:26:30.034639Z node 37 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:10} Tx{25, NKikimr::NHive::TTxRegisterNode} hope 1 -> done Change{14, redo 208b alter 0b annex 0, ~{ 4 } -{ }, 0 gb} 2025-05-29T15:26:30.034649Z node 37 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:10} Tx{25, NKikimr::NHive::TTxRegisterNode} release 4194304b of static, Memory{0 dyn 0} 2025-05-29T15:26:30.034704Z node 38 :LOCAL DEBUG: local.cpp:260: TEvTabletPipe::TEvClientConnected {TabletId=72075186224037888 Status=OK ClientId=[38:550:2092]} 2025-05-29T15:26:30.034729Z node 37 :HIVE TRACE: hive_impl.cpp:328: HIVE#72057594037927937 ProcessBootQueue - executing 2025-05-29T15:26:30.034801Z node 37 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:10} Tx{26, NKikimr::NHive::TTxProcessBootQueue} queued, type NKikimr::NHive::TTxProcessBootQueue 2025-05-29T15:26:30.034810Z node 37 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:10} Tx{26, NKikimr::NHive::TTxProcessBootQueue} took 4194304b of static mem, Memory{4194304 dyn 0} 2025-05-29T15:26:30.034817Z node 37 :HIVE DEBUG: tx__process_boot_queue.cpp:18: HIVE#72057594037927937 THive::TTxProcessBootQueue()::Execute 2025-05-29T15:26:30.034823Z node 37 :HIVE DEBUG: hive_impl.cpp:222: HIVE#72057594037927937 Handle ProcessBootQueue (size: 0) 2025-05-29T15:26:30.034828Z node 37 :HIVE DEBUG: hive_impl.cpp:225: HIVE#72057594037927937 Handle ProcessWaitQueue (size: 0) 2025-05-29T15:26:30.034834Z node 37 :HIVE DEBUG: hive_impl.cpp:302: HIVE#72057594037927937 ProcessBootQueue - BootQueue empty (WaitQueue: 0) 2025-05-29T15:26:30.034843Z node 37 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:10} Tx{26, NKikimr::NHive::TTxProcessBootQueue} hope 1 -> done Change{15, redo 0b alter 0b annex 0, ~{ } -{ }, 0 gb} 2025-05-29T15:26:30.034849Z node 37 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:10} Tx{26, NKikimr::NHive::TTxProcessBootQueue} release 4194304b of static, Memory{0 dyn 0} 2025-05-29T15:26:30.034875Z node 37 :PIPE_SERVER DEBUG: tablet_pipe_server.cpp:72: [72075186224037888] Push Sender# [38:547:2092] EventType# 268959744 2025-05-29T15:26:30.034903Z node 37 :HIVE DEBUG: hive_impl.cpp:798: HIVE#72057594037927937 TEvInterconnect::TEvNodeInfo NodeId 38 Location DataCenter: "2" Module: "2" Rack: "2" Unit: "2" 2025-05-29T15:26:30.034936Z node 37 :HIVE DEBUG: hive_impl.cpp:141: HIVE#72075186224037888 Handle TEvLocal::TEvRegisterNode from [38:547:2092] HiveId: 72075186224037888 ServicedDomains { SchemeShard: 72057594046678944 PathId: 2 } TabletAvailability { Type: Dummy Priority: 0 } TabletAvailability { Type: Hive Priority: 0 } 2025-05-29T15:26:30.034947Z node 37 :TABLET_EXECUTOR DEBUG: Leader{72075186224037888:1:5} Tx{6, NKikimr::NHive::TTxRegisterNode} queued, type NKikimr::NHive::TTxRegisterNode 2025-05-29T15:26:30.034953Z node 37 :TABLET_EXECUTOR DEBUG: Leader{72075186224037888:1:5} Tx{6, NKikimr::NHive::TTxRegisterNode} took 4194304b of static mem, Memory{4194304 dyn 0} 2025-05-29T15:26:30.034959Z node 37 :HIVE DEBUG: tx__register_node.cpp:21: HIVE#72075186224037888 THive::TTxRegisterNode(38)::Execute 2025-05-29T15:26:30.034980Z node 37 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(38, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:26:30.034985Z node 37 :HIVE DEBUG: hive_impl.cpp:361: HIVE#72075186224037888 ProcessWaitQueue (0) 2025-05-29T15:26:30.034989Z node 37 :HIVE DEBUG: hive_impl.cpp:342: HIVE#72075186224037888 ProcessBootQueue (0) 2025-05-29T15:26:30.034992Z node 37 :HIVE TRACE: hive_impl.cpp:344: HIVE#72075186224037888 ProcessBootQueue - sending 2025-05-29T15:26:30.034997Z node 37 :HIVE DEBUG: hive_impl.cpp:361: HIVE#72075186224037888 ProcessWaitQueue (0) 2025-05-29T15:26:30.035001Z node 37 :HIVE DEBUG: hive_impl.cpp:342: HIVE#72075186224037888 ProcessBootQueue (0) 2025-05-29T15:26:30.035010Z node 37 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(38, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:26:30.035020Z node 37 :TABLET_EXECUTOR DEBUG: Leader{72075186224037888:1:5} Tx{6, NKikimr::NHive::TTxRegisterNode} hope 1 -> done Change{6, redo 199b alter 0b annex 0, ~{ 4 } -{ }, 0 gb} 2025-05-29T15:26:30.035026Z node 37 :TABLET_EXECUTOR DEBUG: Leader{72075186224037888:1:5} Tx{6, NKikimr::NHive::TTxRegisterNode} release 4194304b of static, Memory{0 dyn 0} 2025-05-29T15:26:30.035057Z node 37 :HIVE TRACE: hive_impl.cpp:328: HIVE#72075186224037888 ProcessBootQueue - executing 2025-05-29T15:26:30.035063Z node 37 :TABLET_EXECUTOR DEBUG: Leader{72075186224037888:1:6} Tx{7, NKikimr::NHive::TTxProcessBootQueue} queued, type NKikimr::NHive::TTxProcessBootQueue 2025-05-29T15:26:30.035068Z node 37 :TABLET_EXECUTOR DEBUG: Leader{72075186224037888:1:6} Tx{7, NKikimr::NHive::TTxProcessBootQueue} took 4194304b of static mem, Memory{4194304 dyn 0} 2025-05-29T15:26:30.035073Z node 37 :HIVE DEBUG: tx__process_boot_queue.cpp:18: HIVE#72075186224037888 THive::TTxProcessBootQueue()::Execute 2025-05-29T15:26:30.035078Z node 37 :HIVE DEBUG: hive_impl.cpp:199: HIVE#72075186224037888 ProcessBootQueue: 0 nodes connected out of 0 2025-05-29T15:26:30.035085Z node 37 :HIVE DEBUG: hive_impl.cpp:216: HIVE#72075186224037888 ProcessBootQueue - waiting until 586524-01-19T08:01:49.551615Z because of warmup, now: 1970-01-01T00:00:00.142448Z 2025-05-29T15:26:30.035091Z node 37 :HIVE DEBUG: hive_impl.cpp:353: HIVE#72075186224037888 PostponeProcessBootQueue (18446744073709.409167s) 2025-05-29T15:26:30.035100Z node 37 :TABLET_EXECUTOR DEBUG: Leader{72075186224037888:1:6} Tx{7, NKikimr::NHive::TTxProcessBootQueue} hope 1 -> done Change{7, redo 0b alter 0b annex 0, ~{ } -{ }, 0 gb} 2025-05-29T15:26:30.035105Z node 37 :TABLET_EXECUTOR DEBUG: Leader{72075186224037888:1:6} Tx{7, NKikimr::NHive::TTxProcessBootQueue} release 4194304b of static, Memory{0 dyn 0} 2025-05-29T15:26:30.035124Z node 37 :HIVE DEBUG: hive_impl.cpp:798: HIVE#72075186224037888 TEvInterconnect::TEvNodeInfo NodeId 38 Location DataCenter: "2" Module: "2" Rack: "2" Unit: "2" >> KqpNotNullColumns::AlterAddNotNullColumn >> TExportToS3Tests::ShouldSucceedOnConcurrentImport [GOOD] >> TReplicaCombinationTest::UpdatesCombinationsDomainRoot [GOOD] >> TReplicaCombinationTest::UpdatesCombinationsMigratedPath >> TExportToS3Tests::ShouldRetryAtFinalStage >> Cdc::NewImageLogDebezium [FAIL] >> Cdc::NaN[PqRunner] >> TExportToS3Tests::EncryptedExport [GOOD] >> Cdc::Write[PqRunner] [GOOD] >> Cdc::Write[YdsRunner] >> Cdc::Drop[TopicRunner] [GOOD] >> Cdc::DescribeStream ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_export/unittest >> TExportToS3Tests::SchemaMapping [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:116:2058] recipient: [1:111:2141] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:116:2058] recipient: [1:111:2141] Leader for TabletID 72057594046678944 is [1:128:2152] sender: [1:132:2058] recipient: [1:111:2141] 2025-05-29T15:26:28.970984Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7488: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-05-29T15:26:28.971030Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7516: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:26:28.971036Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7402: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-05-29T15:26:28.971041Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7418: OperationsProcessing config: using default configuration 2025-05-29T15:26:28.971060Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-05-29T15:26:28.971064Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-05-29T15:26:28.971074Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7548: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:26:28.971087Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:39: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-05-29T15:26:28.971182Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7619: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-05-29T15:26:28.971280Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-05-29T15:26:28.984688Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7309: Cannot subscribe to console configs 2025-05-29T15:26:28.984707Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:26:28.987135Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-05-29T15:26:28.987241Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-05-29T15:26:28.987284Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-05-29T15:26:28.990004Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-05-29T15:26:28.990240Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:445: Clear TempDirsState with owners number: 0 2025-05-29T15:26:28.990371Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1348: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-05-29T15:26:28.990426Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:32: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-05-29T15:26:28.991140Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:157: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:26:28.991198Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:84: [RootDataErasureManager] Stop 2025-05-29T15:26:28.991476Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:26:28.991491Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:26:28.991517Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-05-29T15:26:28.991526Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-29T15:26:28.991533Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:202: TTxServerlessStorageBilling.Complete 2025-05-29T15:26:28.991594Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6671: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-05-29T15:26:28.994886Z node 1 :HIVE INFO: tablet_helpers.cpp:1130: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:128:2152] sender: [1:242:2058] recipient: [1:15:2062] 2025-05-29T15:26:29.016045Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:372: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-05-29T15:26:29.016148Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:26:29.016219Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-05-29T15:26:29.016279Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:130: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-05-29T15:26:29.016292Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:26:29.017209Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:451: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-05-29T15:26:29.017241Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-05-29T15:26:29.017308Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:26:29.017320Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:313: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-05-29T15:26:29.017327Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:367: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-05-29T15:26:29.017333Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 2 -> 3 2025-05-29T15:26:29.017890Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:26:29.017903Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-05-29T15:26:29.017909Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 3 -> 128 2025-05-29T15:26:29.018324Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:26:29.018335Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:26:29.018341Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:26:29.018348Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1650: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-05-29T15:26:29.019028Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1719: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-05-29T15:26:29.019452Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:652: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-05-29T15:26:29.019497Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1751: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-05-29T15:26:29.019702Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:676: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-05-29T15:26:29.019728Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:680: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 124 RawX2: 4294969445 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-05-29T15:26:29.019735Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:26:29.019806Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 128 -> 240 2025-05-29T15:26:29.019814Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:26:29.019845Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-05-29T15:26:29.019856Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:402: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-05-29T15:26:29.020410Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:26:29.020427Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-29T15:26:29.020472Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29 ... 15:26:31.262614Z node 4 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72057594046678944, txId: 281474976710763 2025-05-29T15:26:31.262618Z node 4 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 281474976710763, pathId: [OwnerId: 72057594046678944, LocalPathId: 4], version: 11 2025-05-29T15:26:31.262622Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 4 2025-05-29T15:26:31.262635Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1606: TOperation IsReadyToNotify, TxId: 281474976710763, ready parts: 0/1, is published: true 2025-05-29T15:26:31.262978Z node 4 :EXPORT DEBUG: schemeshard_export__create.cpp:329: TExport::TTxProgress: DoComplete 2025-05-29T15:26:31.263041Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__notify.cpp:30: NotifyTxCompletion operation in-flight, txId: 281474976710763, at schemeshard: 72057594046678944 2025-05-29T15:26:31.263048Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1606: TOperation IsReadyToNotify, TxId: 281474976710763, ready parts: 0/1, is published: true 2025-05-29T15:26:31.263054Z node 4 :FLAT_TX_SCHEMESHARD INFO: schemeshard__notify.cpp:131: NotifyTxCompletion transaction is registered, txId: 281474976710763, at schemeshard: 72057594046678944 2025-05-29T15:26:31.263286Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:652: Send tablet strongly msg operationId: 281474976710763:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:281474976710763 msg type: 269090816 2025-05-29T15:26:31.263317Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1751: TOperation RegisterRelationByTabletId, TxId: 281474976710763, partId: 4294967295, tablet: 72057594046316545 2025-05-29T15:26:31.263379Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976710763 FAKE_COORDINATOR: Add transaction: 281474976710763 at step: 5000010 FAKE_COORDINATOR: advance: minStep5000010 State->FrontStep: 5000009 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710763 at step: 5000010 2025-05-29T15:26:31.263462Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:676: TTxOperationPlanStep Execute, stepId: 5000010, transactions count in step: 1, at schemeshard: 72057594046678944 2025-05-29T15:26:31.263483Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:680: TTxOperationPlanStep Execute, message: Transactions { TxId: 281474976710763 Coordinator: 72057594046316545 AckTo { RawX1: 133 RawX2: 17179871339 } } Step: 5000010 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-05-29T15:26:31.263490Z node 4 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_rmdir.cpp:129: TRmDir HandleReply TEvOperationPlan, opId: 281474976710763:0, step: 5000010, at schemeshard: 72057594046678944 2025-05-29T15:26:31.263515Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_rmdir.cpp:180: RmDir is done, opId: 281474976710763:0, at schemeshard: 72057594046678944 2025-05-29T15:26:31.263526Z node 4 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:906: Part operation is done id#281474976710763:0 progress is 1/1 2025-05-29T15:26:31.263530Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 281474976710763 ready parts: 1/1 2025-05-29T15:26:31.263536Z node 4 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:906: Part operation is done id#281474976710763:0 progress is 1/1 2025-05-29T15:26:31.263540Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 281474976710763 ready parts: 1/1 2025-05-29T15:26:31.263548Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2025-05-29T15:26:31.263556Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 3 2025-05-29T15:26:31.263562Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1606: TOperation IsReadyToNotify, TxId: 281474976710763, ready parts: 1/1, is published: false 2025-05-29T15:26:31.263568Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 281474976710763 ready parts: 1/1 2025-05-29T15:26:31.263574Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:973: Operation and all the parts is done, operation id: 281474976710763:0 2025-05-29T15:26:31.263578Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5174: RemoveTx for txid 281474976710763:0 2025-05-29T15:26:31.263585Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 4 2025-05-29T15:26:31.263591Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:982: Publication still in progress, tx: 281474976710763, publications: 2, subscribers: 1 2025-05-29T15:26:31.263596Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:989: Publication details: tx: 281474976710763, [OwnerId: 72057594046678944, LocalPathId: 1], 13 2025-05-29T15:26:31.263602Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:989: Publication details: tx: 281474976710763, [OwnerId: 72057594046678944, LocalPathId: 4], 18446744073709551615 2025-05-29T15:26:31.263845Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976710763 2025-05-29T15:26:31.264134Z node 4 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:26:31.264145Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 281474976710763, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-29T15:26:31.264177Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 281474976710763, path id: [OwnerId: 72057594046678944, LocalPathId: 4] 2025-05-29T15:26:31.264199Z node 4 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:26:31.264204Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [4:208:2209], at schemeshard: 72057594046678944, txId: 281474976710763, path id: 1 2025-05-29T15:26:31.264210Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [4:208:2209], at schemeshard: 72057594046678944, txId: 281474976710763, path id: 4 FAKE_COORDINATOR: Erasing txId 281474976710763 2025-05-29T15:26:31.264345Z node 4 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:5834: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 13 PathOwnerId: 72057594046678944, cookie: 281474976710763 2025-05-29T15:26:31.264356Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 13 PathOwnerId: 72057594046678944, cookie: 281474976710763 2025-05-29T15:26:31.264361Z node 4 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 281474976710763 2025-05-29T15:26:31.264365Z node 4 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 281474976710763, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 13 2025-05-29T15:26:31.264370Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 4 2025-05-29T15:26:31.264548Z node 4 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:5834: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 281474976710763 2025-05-29T15:26:31.264559Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 281474976710763 2025-05-29T15:26:31.264564Z node 4 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 281474976710763 2025-05-29T15:26:31.264568Z node 4 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 281474976710763, pathId: [OwnerId: 72057594046678944, LocalPathId: 4], version: 18446744073709551615 2025-05-29T15:26:31.264578Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 3 2025-05-29T15:26:31.264589Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 281474976710763, subscribers: 1 2025-05-29T15:26:31.264593Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:212: TTxAckPublishToSchemeBoard Notify send TEvNotifyTxCompletionResult, at schemeshard: 72057594046678944, to actorId: [4:126:2151] 2025-05-29T15:26:31.265026Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976710763 2025-05-29T15:26:31.265092Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976710763 2025-05-29T15:26:31.265106Z node 4 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6746: Handle: TEvNotifyTxCompletionResult: txId# 281474976710763 2025-05-29T15:26:31.265116Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6748: Message: TxId: 281474976710763 2025-05-29T15:26:31.265123Z node 4 :EXPORT DEBUG: schemeshard_export__create.cpp:309: TExport::TTxProgress: DoExecute 2025-05-29T15:26:31.265128Z node 4 :EXPORT DEBUG: schemeshard_export__create.cpp:1232: TExport::TTxProgress: OnNotifyResult: txId# 281474976710763 2025-05-29T15:26:31.265133Z node 4 :EXPORT DEBUG: schemeshard_export__create.cpp:1263: TExport::TTxProgress: OnNotifyResult: txId# 281474976710763, id# 103, itemIdx# 4294967295 2025-05-29T15:26:31.265428Z node 4 :EXPORT DEBUG: schemeshard_export__create.cpp:329: TExport::TTxProgress: DoComplete 2025-05-29T15:26:31.265445Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:227: tests -- TTxNotificationSubscriber for txId 103: got EvNotifyTxCompletionResult 2025-05-29T15:26:31.265452Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:236: tests -- TTxNotificationSubscriber for txId 103: satisfy waiter [4:853:2781] TestWaitNotification: OK eventTxId 103 >> THiveTest::TestHiveBalancerDifferentResources2 [GOOD] >> THiveTest::TestHiveBalancerUselessNeighbourMoves >> AsyncIndexChangeExchange::ShouldDeliverChangesOnFreshTable [FAIL] >> AsyncIndexChangeExchange::ShouldDeliverChangesOnAlteredTable >> THiveTest::TestDrainWithMaxTabletsScheduled [GOOD] >> THiveTest::TestDownAfterDrain >> Cdc::Write[YdsRunner] [GOOD] >> Cdc::Write[TopicRunner] >> TExportToS3Tests::UidAsIdempotencyKey >> Cdc::DescribeStream [GOOD] >> Cdc::DecimalKey >> Cdc::NaN[PqRunner] [FAIL] >> Cdc::NaN[YdsRunner] >> DataShardVolatile::UpsertDependenciesShardsRestart-UseSink [FAIL] >> DataShardVolatile::NotCachingAbortingDeletes+UseSink ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_export/unittest >> TExportToS3Tests::EncryptedExport [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:116:2058] recipient: [1:111:2141] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:116:2058] recipient: [1:111:2141] Leader for TabletID 72057594046678944 is [1:128:2152] sender: [1:132:2058] recipient: [1:111:2141] 2025-05-29T15:26:29.394927Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7488: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-05-29T15:26:29.394956Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7516: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:26:29.394963Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7402: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-05-29T15:26:29.394969Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7418: OperationsProcessing config: using default configuration 2025-05-29T15:26:29.394976Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-05-29T15:26:29.394980Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-05-29T15:26:29.394990Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7548: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:26:29.395006Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:39: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-05-29T15:26:29.395122Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7619: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-05-29T15:26:29.395206Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-05-29T15:26:29.408671Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7309: Cannot subscribe to console configs 2025-05-29T15:26:29.408699Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:26:29.415078Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-05-29T15:26:29.415215Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-05-29T15:26:29.415254Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-05-29T15:26:29.416694Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-05-29T15:26:29.416859Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:445: Clear TempDirsState with owners number: 0 2025-05-29T15:26:29.416973Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1348: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-05-29T15:26:29.417022Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:32: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-05-29T15:26:29.417490Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:157: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:26:29.417545Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:84: [RootDataErasureManager] Stop 2025-05-29T15:26:29.417849Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:26:29.417860Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:26:29.417881Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-05-29T15:26:29.417890Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-29T15:26:29.417897Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:202: TTxServerlessStorageBilling.Complete 2025-05-29T15:26:29.417937Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6671: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-05-29T15:26:29.419481Z node 1 :HIVE INFO: tablet_helpers.cpp:1130: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:128:2152] sender: [1:242:2058] recipient: [1:15:2062] 2025-05-29T15:26:29.442085Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:372: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-05-29T15:26:29.442202Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:26:29.442276Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-05-29T15:26:29.442327Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:130: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-05-29T15:26:29.442340Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:26:29.444638Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:451: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-05-29T15:26:29.444680Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-05-29T15:26:29.444760Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:26:29.444774Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:313: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-05-29T15:26:29.444780Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:367: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-05-29T15:26:29.444787Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 2 -> 3 2025-05-29T15:26:29.445904Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:26:29.445921Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-05-29T15:26:29.445927Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 3 -> 128 2025-05-29T15:26:29.446283Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:26:29.446292Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:26:29.446298Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:26:29.446305Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1650: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-05-29T15:26:29.447030Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1719: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-05-29T15:26:29.451055Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:652: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-05-29T15:26:29.451118Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1751: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-05-29T15:26:29.451341Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:676: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-05-29T15:26:29.451384Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:680: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 124 RawX2: 4294969445 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-05-29T15:26:29.451396Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:26:29.451484Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 128 -> 240 2025-05-29T15:26:29.451493Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:26:29.451531Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-05-29T15:26:29.451543Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:402: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-05-29T15:26:29.452065Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:26:29.452075Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-29T15:26:29.452128Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29 ... 5:26:32.044469Z node 4 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72057594046678944, txId: 281474976710763 2025-05-29T15:26:32.044474Z node 4 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 281474976710763, pathId: [OwnerId: 72057594046678944, LocalPathId: 4], version: 11 2025-05-29T15:26:32.044479Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 4 2025-05-29T15:26:32.044496Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1606: TOperation IsReadyToNotify, TxId: 281474976710763, ready parts: 0/1, is published: true 2025-05-29T15:26:32.044935Z node 4 :EXPORT DEBUG: schemeshard_export__create.cpp:329: TExport::TTxProgress: DoComplete 2025-05-29T15:26:32.045023Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__notify.cpp:30: NotifyTxCompletion operation in-flight, txId: 281474976710763, at schemeshard: 72057594046678944 2025-05-29T15:26:32.045031Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1606: TOperation IsReadyToNotify, TxId: 281474976710763, ready parts: 0/1, is published: true 2025-05-29T15:26:32.045036Z node 4 :FLAT_TX_SCHEMESHARD INFO: schemeshard__notify.cpp:131: NotifyTxCompletion transaction is registered, txId: 281474976710763, at schemeshard: 72057594046678944 2025-05-29T15:26:32.045527Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:652: Send tablet strongly msg operationId: 281474976710763:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:281474976710763 msg type: 269090816 2025-05-29T15:26:32.045562Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1751: TOperation RegisterRelationByTabletId, TxId: 281474976710763, partId: 4294967295, tablet: 72057594046316545 2025-05-29T15:26:32.045597Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976710763 FAKE_COORDINATOR: Add transaction: 281474976710763 at step: 5000010 FAKE_COORDINATOR: advance: minStep5000010 State->FrontStep: 5000009 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710763 at step: 5000010 2025-05-29T15:26:32.045718Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:676: TTxOperationPlanStep Execute, stepId: 5000010, transactions count in step: 1, at schemeshard: 72057594046678944 2025-05-29T15:26:32.045760Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:680: TTxOperationPlanStep Execute, message: Transactions { TxId: 281474976710763 Coordinator: 72057594046316545 AckTo { RawX1: 133 RawX2: 17179871339 } } Step: 5000010 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-05-29T15:26:32.045768Z node 4 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_rmdir.cpp:129: TRmDir HandleReply TEvOperationPlan, opId: 281474976710763:0, step: 5000010, at schemeshard: 72057594046678944 2025-05-29T15:26:32.045802Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_rmdir.cpp:180: RmDir is done, opId: 281474976710763:0, at schemeshard: 72057594046678944 2025-05-29T15:26:32.045812Z node 4 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:906: Part operation is done id#281474976710763:0 progress is 1/1 2025-05-29T15:26:32.045816Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 281474976710763 ready parts: 1/1 2025-05-29T15:26:32.045821Z node 4 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:906: Part operation is done id#281474976710763:0 progress is 1/1 2025-05-29T15:26:32.045824Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 281474976710763 ready parts: 1/1 2025-05-29T15:26:32.045834Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2025-05-29T15:26:32.045844Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 3 2025-05-29T15:26:32.045850Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1606: TOperation IsReadyToNotify, TxId: 281474976710763, ready parts: 1/1, is published: false 2025-05-29T15:26:32.045857Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 281474976710763 ready parts: 1/1 2025-05-29T15:26:32.045862Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:973: Operation and all the parts is done, operation id: 281474976710763:0 2025-05-29T15:26:32.045866Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5174: RemoveTx for txid 281474976710763:0 2025-05-29T15:26:32.045877Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 4 2025-05-29T15:26:32.045882Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:982: Publication still in progress, tx: 281474976710763, publications: 2, subscribers: 1 2025-05-29T15:26:32.045886Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:989: Publication details: tx: 281474976710763, [OwnerId: 72057594046678944, LocalPathId: 1], 13 2025-05-29T15:26:32.045890Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:989: Publication details: tx: 281474976710763, [OwnerId: 72057594046678944, LocalPathId: 4], 18446744073709551615 2025-05-29T15:26:32.046518Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976710763 2025-05-29T15:26:32.046906Z node 4 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:26:32.046918Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 281474976710763, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-29T15:26:32.046956Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 281474976710763, path id: [OwnerId: 72057594046678944, LocalPathId: 4] 2025-05-29T15:26:32.046980Z node 4 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:26:32.046985Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [4:208:2209], at schemeshard: 72057594046678944, txId: 281474976710763, path id: 1 2025-05-29T15:26:32.046995Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [4:208:2209], at schemeshard: 72057594046678944, txId: 281474976710763, path id: 4 FAKE_COORDINATOR: Erasing txId 281474976710763 2025-05-29T15:26:32.047171Z node 4 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:5834: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 13 PathOwnerId: 72057594046678944, cookie: 281474976710763 2025-05-29T15:26:32.047185Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 13 PathOwnerId: 72057594046678944, cookie: 281474976710763 2025-05-29T15:26:32.047190Z node 4 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 281474976710763 2025-05-29T15:26:32.047196Z node 4 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 281474976710763, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 13 2025-05-29T15:26:32.047201Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 4 2025-05-29T15:26:32.047294Z node 4 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:5834: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 281474976710763 2025-05-29T15:26:32.047305Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 281474976710763 2025-05-29T15:26:32.047310Z node 4 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 281474976710763 2025-05-29T15:26:32.047314Z node 4 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 281474976710763, pathId: [OwnerId: 72057594046678944, LocalPathId: 4], version: 18446744073709551615 2025-05-29T15:26:32.047319Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 3 2025-05-29T15:26:32.047328Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 281474976710763, subscribers: 1 2025-05-29T15:26:32.047334Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:212: TTxAckPublishToSchemeBoard Notify send TEvNotifyTxCompletionResult, at schemeshard: 72057594046678944, to actorId: [4:126:2151] 2025-05-29T15:26:32.047896Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976710763 2025-05-29T15:26:32.048092Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976710763 2025-05-29T15:26:32.048110Z node 4 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6746: Handle: TEvNotifyTxCompletionResult: txId# 281474976710763 2025-05-29T15:26:32.048121Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6748: Message: TxId: 281474976710763 2025-05-29T15:26:32.048132Z node 4 :EXPORT DEBUG: schemeshard_export__create.cpp:309: TExport::TTxProgress: DoExecute 2025-05-29T15:26:32.048137Z node 4 :EXPORT DEBUG: schemeshard_export__create.cpp:1232: TExport::TTxProgress: OnNotifyResult: txId# 281474976710763 2025-05-29T15:26:32.048143Z node 4 :EXPORT DEBUG: schemeshard_export__create.cpp:1263: TExport::TTxProgress: OnNotifyResult: txId# 281474976710763, id# 103, itemIdx# 4294967295 2025-05-29T15:26:32.048465Z node 4 :EXPORT DEBUG: schemeshard_export__create.cpp:329: TExport::TTxProgress: DoComplete 2025-05-29T15:26:32.048482Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:227: tests -- TTxNotificationSubscriber for txId 103: got EvNotifyTxCompletionResult 2025-05-29T15:26:32.048489Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:236: tests -- TTxNotificationSubscriber for txId 103: satisfy waiter [4:1119:2992] TestWaitNotification: OK eventTxId 103 >> TExportToS3Tests::UidAsIdempotencyKey [GOOD] >> TExportToS3Tests::CheckItemProgress >> TExportToS3Tests::UserSID >> Cdc::Write[TopicRunner] [GOOD] >> Cdc::UpdateStream >> TExportToS3Tests::UserSID [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_olap_reboots/unittest >> TOlapReboots::CreateDropStore [GOOD] Test command err: ==== RunWithTabletReboots =========== RUN: Trace =========== Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2140] Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:116:2058] recipient: [1:110:2141] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:116:2058] recipient: [1:110:2141] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:117:2058] recipient: [1:111:2142] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:117:2058] recipient: [1:111:2142] Leader for TabletID 72057594046678944 is [1:126:2151] sender: [1:127:2058] recipient: [1:109:2140] Leader for TabletID 72057594046447617 is [1:130:2154] sender: [1:132:2058] recipient: [1:110:2141] Leader for TabletID 72057594046316545 is [1:133:2155] sender: [1:135:2058] recipient: [1:111:2142] 2025-05-29T15:24:39.673435Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7488: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-05-29T15:24:39.673462Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7516: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:24:39.673468Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7402: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2025-05-29T15:24:39.673475Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7418: OperationsProcessing config: using default configuration 2025-05-29T15:24:39.673483Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-05-29T15:24:39.673487Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-05-29T15:24:39.673498Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7548: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:24:39.673533Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:39: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-05-29T15:24:39.673661Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7619: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-05-29T15:24:39.673749Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-05-29T15:24:39.693816Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7651: Got new config: QueryServiceConfig { AllExternalDataSourcesAreAvailable: true } 2025-05-29T15:24:39.693841Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:24:39.693966Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7619: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# Leader for TabletID 72057594046447617 is [1:130:2154] sender: [1:175:2058] recipient: [1:15:2062] 2025-05-29T15:24:39.697460Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-05-29T15:24:39.697492Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-05-29T15:24:39.697551Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-05-29T15:24:39.700988Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-05-29T15:24:39.701076Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:445: Clear TempDirsState with owners number: 0 2025-05-29T15:24:39.701226Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1348: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-05-29T15:24:39.701486Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:32: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-05-29T15:24:39.702293Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:157: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:24:39.702346Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:84: [RootDataErasureManager] Stop 2025-05-29T15:24:39.702651Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:24:39.702665Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:24:39.702707Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-05-29T15:24:39.702716Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-29T15:24:39.702723Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:202: TTxServerlessStorageBilling.Complete 2025-05-29T15:24:39.702763Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6671: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:214:2058] recipient: [1:212:2212] IGNORE Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:214:2058] recipient: [1:212:2212] Leader for TabletID 72057594037968897 is [1:218:2216] sender: [1:219:2058] recipient: [1:212:2212] 2025-05-29T15:24:39.704292Z node 1 :HIVE INFO: tablet_helpers.cpp:1130: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:126:2151] sender: [1:239:2058] recipient: [1:15:2062] 2025-05-29T15:24:39.728343Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:372: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-05-29T15:24:39.728424Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:24:39.728502Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-05-29T15:24:39.728554Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:130: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-05-29T15:24:39.728566Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:24:39.729433Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:451: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-05-29T15:24:39.729463Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-05-29T15:24:39.729549Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:24:39.729569Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:313: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-05-29T15:24:39.729577Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:367: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-05-29T15:24:39.729583Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 2 -> 3 2025-05-29T15:24:39.730143Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:24:39.730155Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-05-29T15:24:39.730161Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 3 -> 128 2025-05-29T15:24:39.730523Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:24:39.730534Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:24:39.730541Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:24:39.730550Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1650: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-05-29T15:24:39.731336Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1719: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-05-29T15:24:39.731828Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:652: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-05-29T15:24:39.731887Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1751: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 Leader for TabletID 72057594046316545 is [1:133:2155] sender: [1:254:2058] recipient: [1:15:2062] FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-05-29T15:24:39.732120Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:676: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-05-29T15:24:39.732155Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:680: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 133 RawX2: 4294969451 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-05-29T15:24:39.732180Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:24:39.732269Z node 1 :FLAT_TX_SCHEMESHARD INFO: sch ... nd, to populator: [192:206:2207], at schemeshard: 72057594046678944, txId: 1003, path id: 1 2025-05-29T15:26:30.957217Z node 192 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [192:206:2207], at schemeshard: 72057594046678944, txId: 1003, path id: 3 2025-05-29T15:26:30.957310Z node 192 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1003:0, at schemeshard: 72057594046678944 2025-05-29T15:26:30.957318Z node 192 :FLAT_TX_SCHEMESHARD INFO: drop_store.cpp:182: TDropOlapStore TProposedWaitParts operationId# 1003:0 ProgressState at schemeshard: 72057594046678944 2025-05-29T15:26:30.957327Z node 192 :FLAT_TX_SCHEMESHARD DEBUG: drop_store.cpp:202: TDropOlapStore TProposedWaitParts operationId# 1003:0 ProgressState wait for NotifyTxCompletionResult tabletId: 72075186233409546 2025-05-29T15:26:30.957400Z node 192 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:5834: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 1003 2025-05-29T15:26:30.957430Z node 192 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 1003 2025-05-29T15:26:30.957435Z node 192 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72057594046678944, txId: 1003 2025-05-29T15:26:30.957441Z node 192 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 1003, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 18446744073709551615 2025-05-29T15:26:30.957447Z node 192 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 4 2025-05-29T15:26:30.957516Z node 192 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:5834: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 9 PathOwnerId: 72057594046678944, cookie: 1003 2025-05-29T15:26:30.957527Z node 192 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 9 PathOwnerId: 72057594046678944, cookie: 1003 2025-05-29T15:26:30.957531Z node 192 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72057594046678944, txId: 1003 2025-05-29T15:26:30.957535Z node 192 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 1003, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 9 2025-05-29T15:26:30.957539Z node 192 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2025-05-29T15:26:30.957548Z node 192 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1606: TOperation IsReadyToNotify, TxId: 1003, ready parts: 0/1, is published: true 2025-05-29T15:26:30.958152Z node 192 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:652: Send tablet strongly msg operationId: 1003:0 from tablet: 72057594046678944 to tablet: 72075186233409546 cookie: 72057594046678944:1 msg type: 275382275 2025-05-29T15:26:30.958182Z node 192 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1751: TOperation RegisterRelationByTabletId, TxId: 1003, partId: 0, tablet: 72075186233409546 2025-05-29T15:26:30.958379Z node 192 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6146: Handle TEvNotifyTxCompletionResult, at schemeshard: 72057594046678944, message: Origin: 72075186233409546 TxId: 1003 2025-05-29T15:26:30.958387Z node 192 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1764: TOperation FindRelatedPartByTabletId, TxId: 1003, tablet: 72075186233409546, partId: 0 2025-05-29T15:26:30.958401Z node 192 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:619: TTxOperationReply execute, operationId: 1003:0, at schemeshard: 72057594046678944, message: Origin: 72075186233409546 TxId: 1003 2025-05-29T15:26:30.958409Z node 192 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1003:0 129 -> 130 2025-05-29T15:26:30.958798Z node 192 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1003 2025-05-29T15:26:30.958843Z node 192 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1003 2025-05-29T15:26:30.958918Z node 192 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:647: TTxOperationReply complete, operationId: 1003:0, at schemeshard: 72057594046678944 2025-05-29T15:26:30.958949Z node 192 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1003:0, at schemeshard: 72057594046678944 2025-05-29T15:26:30.958955Z node 192 :FLAT_TX_SCHEMESHARD INFO: drop_store.cpp:235: TDropOlapStore TProposedDeleteParts operationId# 1003:0 ProgressState, at schemeshard: 72057594046678944 2025-05-29T15:26:30.958970Z node 192 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 3 2025-05-29T15:26:30.958996Z node 192 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:906: Part operation is done id#1003:0 progress is 1/1 2025-05-29T15:26:30.959001Z node 192 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 1003 ready parts: 1/1 2025-05-29T15:26:30.959006Z node 192 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:906: Part operation is done id#1003:0 progress is 1/1 2025-05-29T15:26:30.959010Z node 192 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 1003 ready parts: 1/1 2025-05-29T15:26:30.959014Z node 192 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1606: TOperation IsReadyToNotify, TxId: 1003, ready parts: 1/1, is published: true 2025-05-29T15:26:30.959019Z node 192 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 1003 ready parts: 1/1 2025-05-29T15:26:30.959024Z node 192 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:973: Operation and all the parts is done, operation id: 1003:0 2025-05-29T15:26:30.959029Z node 192 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5174: RemoveTx for txid 1003:0 2025-05-29T15:26:30.959050Z node 192 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 2 2025-05-29T15:26:30.959513Z node 192 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:46: Free shard 72057594046678944:1 hive 72057594037968897 at ss 72057594046678944 2025-05-29T15:26:30.959600Z node 192 :HIVE INFO: tablet_helpers.cpp:1356: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 1 TxId_Deprecated: 1 TabletID: 72075186233409546 2025-05-29T15:26:30.959675Z node 192 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5938: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 1 ShardOwnerId: 72057594046678944 ShardLocalIdx: 1, at schemeshard: 72057594046678944 2025-05-29T15:26:30.959852Z node 192 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 1 2025-05-29T15:26:30.960035Z node 192 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186233409546;self_id=[192:335:2321];ev=NKikimr::TEvTablet::TEvTabletDead;fline=columnshard_impl.cpp:1152;event=tablet_die; Forgetting tablet 72075186233409546 2025-05-29T15:26:30.961032Z node 192 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:123: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-05-29T15:26:30.961044Z node 192 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:137: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 3], at schemeshard: 72057594046678944 2025-05-29T15:26:30.961059Z node 192 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-05-29T15:26:30.961971Z node 192 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:174: Deleted shardIdx 72057594046678944:1 2025-05-29T15:26:30.961987Z node 192 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:180: Close pipe to deleted shardIdx 72057594046678944:1 tabletId 72075186233409546 2025-05-29T15:26:30.962124Z node 192 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:160: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestWaitNotification wait txId: 1003 2025-05-29T15:26:30.962167Z node 192 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:210: tests -- TTxNotificationSubscriber for txId 1003: send EvNotifyTxCompletion 2025-05-29T15:26:30.962175Z node 192 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:256: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 1003 2025-05-29T15:26:30.962240Z node 192 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 1003, at schemeshard: 72057594046678944 2025-05-29T15:26:30.962257Z node 192 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:227: tests -- TTxNotificationSubscriber for txId 1003: got EvNotifyTxCompletionResult 2025-05-29T15:26:30.962262Z node 192 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:236: tests -- TTxNotificationSubscriber for txId 1003: satisfy waiter [192:455:2425] TestWaitNotification: OK eventTxId 1003 2025-05-29T15:26:30.962334Z node 192 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/OlapStore" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-05-29T15:26:30.962362Z node 192 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/OlapStore" took 41us result status StatusPathDoesNotExist 2025-05-29T15:26:30.962395Z node 192 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/OlapStore\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1]), source_location: ydb/core/tx/schemeshard/schemeshard_path_describer.cpp:1157" Path: "/MyRoot/OlapStore" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: true } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 >> TExportToS3Tests::Topics >> TBsProxyFaultToleranceTest::CheckGetHardenedErasureBlock42Count6Idx5 [GOOD] >> TExportToS3Tests::Topics [GOOD] >> THealthCheckTest::NoBscResponse [GOOD] >> THealthCheckTest::LayoutIncorrect >> TReplicaCombinationTest::UpdatesCombinationsMigratedPath [GOOD] >> TReplicaCombinationTest::MigratedPathRecreation >> TExportToS3Tests::TopicsWithPermissions >> Cdc::NaN[YdsRunner] [FAIL] >> Cdc::NaN[TopicRunner] >> Cdc::UpdateStream [GOOD] >> Cdc::UpdateShardCount >> TExportToS3Tests::TopicsWithPermissions [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/dsproxy/ut_ftol/unittest >> TBsProxyFaultToleranceTest::CheckGetHardenedErasureBlock42Count6Idx5 [GOOD] Test command err: iteration# 5 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 11 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 17 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 23 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 29 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 35 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 41 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 47 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 53 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 59 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 65 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 71 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 77 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 83 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 89 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 95 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 101 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 107 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 113 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 119 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 125 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 131 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 137 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 143 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 149 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 155 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 161 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 167 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 173 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 179 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 185 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 191 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 197 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 203 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 209 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 215 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 221 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 227 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 233 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 239 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 245 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 251 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 257 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 263 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 269 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 275 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 281 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 287 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 293 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 299 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 305 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 311 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 317 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 323 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 329 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 335 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 341 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 347 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 353 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 359 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 365 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 371 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 377 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 383 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 389 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 395 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 401 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 407 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 413 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 419 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 425 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 431 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 437 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 443 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 449 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 455 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 461 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 467 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 473 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 479 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 485 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 491 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 497 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 503 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 509 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 515 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 521 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 527 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 533 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 539 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 545 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 551 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 557 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 563 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 569 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 575 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 581 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 587 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 593 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 599 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 605 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 611 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 617 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 623 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 629 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 635 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 641 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 647 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 653 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 659 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 665 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 671 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 677 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 683 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 689 BlobsWritten# 2041 blobsWrittenF ... blobsUnwritten# 1218 iteration# 1367 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1373 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1379 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1385 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1391 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1397 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1403 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1409 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1415 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1421 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1427 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1433 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1439 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1445 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1451 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1457 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1463 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1469 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1475 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1481 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1487 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1493 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1499 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1505 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1511 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1517 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1523 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1529 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1535 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1541 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1547 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1553 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1559 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1565 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1571 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1577 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1583 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1589 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1595 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1601 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1607 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1613 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1619 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1625 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1631 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1637 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1643 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1649 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1655 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1661 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1667 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1673 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1679 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1685 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1691 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1697 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1703 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1709 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1715 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1721 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1727 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1733 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1739 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1745 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1751 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1757 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1763 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1769 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1775 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1781 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1787 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1793 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1799 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1805 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1811 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1817 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1823 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1829 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1835 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1841 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1847 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1853 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1859 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1865 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1871 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1877 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1883 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1889 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1895 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1901 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1907 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1913 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1919 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1925 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1931 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1937 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1943 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1949 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1955 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1961 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1967 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1973 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1979 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1985 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1991 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1997 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 2003 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 2009 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 2015 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 2021 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 2027 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 2033 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 2039 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 >> Cdc::DecimalKey [FAIL] >> Cdc::DropColumn >> TReplicaCombinationTest::MigratedPathRecreation [GOOD] >> TExportToS3Tests::ShouldCheckQuotasExportsLimited >> THiveTest::TestDownAfterDrain [GOOD] >> THiveTest::TestCreateTabletsWithRaceForStoragePoolsKIKIMR_9659 >> TExportToS3Tests::CheckItemProgress [GOOD] >> TExportToS3Tests::CancelUponTransferringSingleShardTableShouldSucceed [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/scheme_board/ut_replica/unittest >> TReplicaCombinationTest::MigratedPathRecreation [GOOD] Test command err: 2025-05-29T15:26:27.357578Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:751: [1:6:2053] Handle NKikimrSchemeBoard.TEvHandshake { Owner: 800 Generation: 1 }: sender# [1:7:2054] 2025-05-29T15:26:27.357607Z node 1 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:769: [1:6:2053] Successful handshake: owner# 800, generation# 1 2025-05-29T15:26:27.357626Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:958: [1:6:2053] Handle NKikimrSchemeBoard.TEvCommitGeneration { Owner: 800 Generation: 1 }: sender# [1:7:2054] 2025-05-29T15:26:27.357631Z node 1 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:985: [1:6:2053] Commit generation: owner# 800, generation# 1 2025-05-29T15:26:27.357641Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:751: [1:6:2053] Handle NKikimrSchemeBoard.TEvHandshake { Owner: 800 Generation: 1 }: sender# [1:8:2055] 2025-05-29T15:26:27.357645Z node 1 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:769: [1:6:2053] Successful handshake: owner# 800, generation# 1 2025-05-29T15:26:27.357651Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:958: [1:6:2053] Handle NKikimrSchemeBoard.TEvCommitGeneration { Owner: 800 Generation: 1 }: sender# [1:8:2055] 2025-05-29T15:26:27.357655Z node 1 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:985: [1:6:2053] Commit generation: owner# 800, generation# 1 2025-05-29T15:26:27.357742Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:782: [1:6:2053] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 800 Generation: 1 }: sender# [1:7:2054], cookie# 0, event size# 103 2025-05-29T15:26:27.357750Z node 1 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:822: [1:6:2053] Update description: path# /Root/Tenant, pathId# [OwnerId: 800, LocalPathId: 2], deletion# false 2025-05-29T15:26:27.365460Z node 1 :SCHEME_BOARD_REPLICA INFO: replica.cpp:550: [1:6:2053] Upsert description: path# /Root/Tenant, pathId# [OwnerId: 800, LocalPathId: 2], pathDescription# {Status StatusSuccess, Path /Root/Tenant, PathId [OwnerId: 800, LocalPathId: 2], PathVersion 1, SubdomainPathId [OwnerId: 800, LocalPathId: 2], PathAbandonedTenantsSchemeShards size 0, DescribeSchemeResultSerialized size 60} 2025-05-29T15:26:27.365558Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:782: [1:6:2053] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 800 Generation: 1 }: sender# [1:8:2055], cookie# 0, event size# 103 2025-05-29T15:26:27.365565Z node 1 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:822: [1:6:2053] Update description: path# /Root/Tenant, pathId# [OwnerId: 800, LocalPathId: 2], deletion# false 2025-05-29T15:26:27.365573Z node 1 :SCHEME_BOARD_REPLICA INFO: replica.cpp:550: [1:6:2053] Upsert description: path# /Root/Tenant, pathId# [OwnerId: 800, LocalPathId: 2], pathDescription# {Status StatusSuccess, Path /Root/Tenant, PathId [OwnerId: 800, LocalPathId: 2], PathVersion 1, SubdomainPathId [OwnerId: 800, LocalPathId: 2], PathAbandonedTenantsSchemeShards size 0, DescribeSchemeResultSerialized size 60} 2025-05-29T15:26:27.365609Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1061: [1:6:2053] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /Root/Tenant DomainOwnerId: 0 }: sender# [1:9:2056] 2025-05-29T15:26:27.365637Z node 1 :SCHEME_BOARD_REPLICA INFO: replica.cpp:646: [1:6:2053] Subscribe: subscriber# [1:9:2056], path# /Root/Tenant, domainOwnerId# 0, capabilities# =========== Path: "/Root/Tenant" PathDescription { Self { PathVersion: 1 } DomainDescription { DomainKey { SchemeShard: 800 PathId: 2 } } } PathId: 2 PathOwnerId: 800 =========== Path: "/Root/Tenant" PathDescription { Self { PathVersion: 1 } DomainDescription { DomainKey { SchemeShard: 800 PathId: 2 } } } PathId: 2 PathOwnerId: 800 2025-05-29T15:26:27.383882Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:751: [1:10:2057] Handle NKikimrSchemeBoard.TEvHandshake { Owner: 800 Generation: 1 }: sender# [1:11:2058] 2025-05-29T15:26:27.383906Z node 1 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:769: [1:10:2057] Successful handshake: owner# 800, generation# 1 2025-05-29T15:26:27.383921Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:958: [1:10:2057] Handle NKikimrSchemeBoard.TEvCommitGeneration { Owner: 800 Generation: 1 }: sender# [1:11:2058] 2025-05-29T15:26:27.383928Z node 1 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:985: [1:10:2057] Commit generation: owner# 800, generation# 1 2025-05-29T15:26:27.383937Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:751: [1:10:2057] Handle NKikimrSchemeBoard.TEvHandshake { Owner: 900 Generation: 1 }: sender# [1:12:2059] 2025-05-29T15:26:27.383941Z node 1 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:769: [1:10:2057] Successful handshake: owner# 900, generation# 1 2025-05-29T15:26:27.383947Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:958: [1:10:2057] Handle NKikimrSchemeBoard.TEvCommitGeneration { Owner: 900 Generation: 1 }: sender# [1:12:2059] 2025-05-29T15:26:27.383951Z node 1 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:985: [1:10:2057] Commit generation: owner# 900, generation# 1 2025-05-29T15:26:27.383975Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:782: [1:10:2057] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 800 Generation: 1 }: sender# [1:11:2058], cookie# 0, event size# 103 2025-05-29T15:26:27.383982Z node 1 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:822: [1:10:2057] Update description: path# /Root/Tenant, pathId# [OwnerId: 800, LocalPathId: 2], deletion# false 2025-05-29T15:26:27.383996Z node 1 :SCHEME_BOARD_REPLICA INFO: replica.cpp:550: [1:10:2057] Upsert description: path# /Root/Tenant, pathId# [OwnerId: 800, LocalPathId: 2], pathDescription# {Status StatusSuccess, Path /Root/Tenant, PathId [OwnerId: 800, LocalPathId: 2], PathVersion 1, SubdomainPathId [OwnerId: 800, LocalPathId: 2], PathAbandonedTenantsSchemeShards size 0, DescribeSchemeResultSerialized size 60} 2025-05-29T15:26:27.384007Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:782: [1:10:2057] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 900 Generation: 1 }: sender# [1:12:2059], cookie# 0, event size# 103 2025-05-29T15:26:27.384011Z node 1 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:822: [1:10:2057] Update description: path# /Root/Tenant, pathId# [OwnerId: 900, LocalPathId: 1], deletion# false 2025-05-29T15:26:27.384018Z node 1 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:884: [1:10:2057] Replace GSS by TSS description: path# /Root/Tenant, pathId# [OwnerId: 900, LocalPathId: 1], domainId# [OwnerId: 800, LocalPathId: 2], curPathId# [OwnerId: 800, LocalPathId: 2], curDomainId# [OwnerId: 800, LocalPathId: 2] 2025-05-29T15:26:27.384024Z node 1 :SCHEME_BOARD_REPLICA INFO: replica.cpp:550: [1:10:2057] Upsert description: path# /Root/Tenant, pathId# [OwnerId: 900, LocalPathId: 1], pathDescription# {Status StatusSuccess, Path /Root/Tenant, PathId [OwnerId: 900, LocalPathId: 1], PathVersion 1, SubdomainPathId [OwnerId: 800, LocalPathId: 2], PathAbandonedTenantsSchemeShards size 0, DescribeSchemeResultSerialized size 60} 2025-05-29T15:26:27.384037Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1061: [1:10:2057] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /Root/Tenant DomainOwnerId: 0 }: sender# [1:13:2060] 2025-05-29T15:26:27.384045Z node 1 :SCHEME_BOARD_REPLICA INFO: replica.cpp:646: [1:10:2057] Subscribe: subscriber# [1:13:2060], path# /Root/Tenant, domainOwnerId# 0, capabilities# =========== Path: "/Root/Tenant" PathDescription { Self { PathVersion: 1 } DomainDescription { DomainKey { SchemeShard: 800 PathId: 2 } } } PathId: 2 PathOwnerId: 800 =========== Path: "/Root/Tenant" PathDescription { Self { PathVersion: 1 } DomainDescription { DomainKey { SchemeShard: 800 PathId: 2 } } } PathId: 1 PathOwnerId: 900 2025-05-29T15:26:27.384096Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:751: [1:14:2061] Handle NKikimrSchemeBoard.TEvHandshake { Owner: 800 Generation: 1 }: sender# [1:15:2062] 2025-05-29T15:26:27.384101Z node 1 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:769: [1:14:2061] Successful handshake: owner# 800, generation# 1 2025-05-29T15:26:27.384107Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:958: [1:14:2061] Handle NKikimrSchemeBoard.TEvCommitGeneration { Owner: 800 Generation: 1 }: sender# [1:15:2062] 2025-05-29T15:26:27.439620Z node 1 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:985: [1:14:2061] Commit generation: owner# 800, generation# 1 2025-05-29T15:26:27.439699Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:751: [1:14:2061] Handle NKikimrSchemeBoard.TEvHandshake { Owner: 800 Generation: 1 }: sender# [1:16:2063] 2025-05-29T15:26:27.439706Z node 1 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:769: [1:14:2061] Successful handshake: owner# 800, generation# 1 2025-05-29T15:26:27.439722Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:958: [1:14:2061] Handle NKikimrSchemeBoard.TEvCommitGeneration { Owner: 800 Generation: 1 }: sender# [1:16:2063] 2025-05-29T15:26:27.439726Z node 1 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:985: [1:14:2061] Commit generation: owner# 800, generation# 1 2025-05-29T15:26:27.439767Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:782: [1:14:2061] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 800 Generation: 1 }: sender# [1:15:2062], cookie# 0, event size# 103 2025-05-29T15:26:27.439775Z node 1 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:822: [1:14:2061] Update description: path# /Root/Tenant, pathId# [OwnerId: 800, LocalPathId: 2], deletion# false 2025-05-29T15:26:27.439788Z node 1 :SCHEME_BOARD_REPLICA INFO: replica.cpp:550: [1:14:2061] Upsert description: path# /Root/Tenant, pathId# [OwnerId: 800, LocalPathId: 2], pathDescription# {Status StatusSuccess, Path /Root/Tenant, PathId [OwnerId: 800, LocalPathId: 2], PathVersion 1, SubdomainPathId [OwnerId: 800, LocalPathId: 2], PathAbandonedTenantsSchemeShards size 0, DescribeSchemeResultSerialized size 60} 2025-05-29T15:26:27.439804Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:782: [1:14:2061] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 800 Generation: 1 }: sender# [1:16:2063], cookie# 0, event size# 103 2025-05-29T15:26:27.439808Z node 1 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:822: [1:14:2061] Update description: path# /Root/Tenant, pathId# [OwnerId: 800, LocalPathId: 2], deletion# false 2025-05-29T15:26:27.439814Z node 1 :SCHEME_BOARD_REPLICA INFO: replica.cpp:550: [1:14:2061] Upsert description: path# /Root/Tenant, pathId# [OwnerId: 800, LocalPathId: 2], pathDescription# {Status StatusSuccess, Path /Root/Tenant, PathId [OwnerId: 800, LocalPathId: 2], PathVersion 2, SubdomainPathId [OwnerId: 800, LocalPathId: 2], PathAbandonedTenantsSchemeShards size 0, DescribeSchemeResultSerialized size 60} 2025-05-29T15:26:27.439830Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1061: [1:14:2061] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /Root/Tenant DomainOwnerId: 0 }: sender# [1:17:2064] 2025-05-29T15:26:27.439849Z node 1 :SCHEME_BOARD_REPLICA INFO: replica.cpp:646: [1:14:2061] Subscribe: subscriber# [1:17:2064], path# /Root/Tenant, domainOwnerId# 0, capabilities# =========== Path: "/Root/Tenant" PathDescription { Self { PathVersion: 1 } DomainDescription { DomainKey { SchemeShard: 800 PathId: 2 } } } PathId: 2 PathOwnerId: 800 =========== Path: "/Root/Tenant" PathDescription { Self { PathVersion: 2 } DomainDescription { DomainKey { SchemeShard: 800 PathId: 2 } } } PathId: 2 PathOwnerId: 800 2025-05-29T15:26:27.439982Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:751: [1:18:2065] Handle NKikimrSchemeBoard.TEvHandshake { Owner: 800 Generation: 1 }: sender# [1:19:2066] 2025-05-29T15:26:27.439987Z node 1 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:769: [1:18:2065] Successful handshake: owner# 800, generation# 1 2025-05-29T15:26:27.439993Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:958: [1:18:2065] Handle NKikimrSchemeBoard.TEvCommitGeneration { Owner: 800 Generation: 1 }: sender# [1:19:2066] 2025-05-29T15:26:27.439997Z node 1 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:985: [1:18:2065] Commit generation: owner# 800, generation# 1 2025-05-29T15:26:27.440004Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:751: [1:18:2065] Handle NKikimrSchemeBoard.TEvHandshake { Owner: 900 Generat ... { DomainKey { SchemeShard: 800 PathId: 333 } } } PathId: 9 PathOwnerId: 910 =========== super id == DomainId: [OwnerId: 800, LocalPathId: 333] IsDeletion: 1 PathId: [OwnerId: 910, LocalPathId: 9] Verions: 18446744073709551615 =========== WIN ==/Root/Tenant/table_inside PathID: [OwnerId: 0, LocalPathId: 0] deleted: 1 version: 0 domainId: [OwnerId: 0, LocalPathId: 0] 2025-05-29T15:26:33.328978Z node 2 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:751: [2:398:2445] Handle NKikimrSchemeBoard.TEvHandshake { Owner: 910 Generation: 1 }: sender# [2:399:2446] 2025-05-29T15:26:33.328985Z node 2 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:769: [2:398:2445] Successful handshake: owner# 910, generation# 1 2025-05-29T15:26:33.328992Z node 2 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:958: [2:398:2445] Handle NKikimrSchemeBoard.TEvCommitGeneration { Owner: 910 Generation: 1 }: sender# [2:399:2446] 2025-05-29T15:26:33.328996Z node 2 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:985: [2:398:2445] Commit generation: owner# 910, generation# 1 2025-05-29T15:26:33.329004Z node 2 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:751: [2:398:2445] Handle NKikimrSchemeBoard.TEvHandshake { Owner: 910 Generation: 1 }: sender# [2:400:2447] 2025-05-29T15:26:33.329008Z node 2 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:769: [2:398:2445] Successful handshake: owner# 910, generation# 1 2025-05-29T15:26:33.329014Z node 2 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:958: [2:398:2445] Handle NKikimrSchemeBoard.TEvCommitGeneration { Owner: 910 Generation: 1 }: sender# [2:400:2447] 2025-05-29T15:26:33.427776Z node 2 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:985: [2:398:2445] Commit generation: owner# 910, generation# 1 2025-05-29T15:26:33.427861Z node 2 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:782: [2:398:2445] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 910 Generation: 1 }: sender# [2:399:2446], cookie# 0, event size# 64 2025-05-29T15:26:33.427871Z node 2 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:822: [2:398:2445] Update description: path# /Root/Tenant/table_inside, pathId# [OwnerId: 910, LocalPathId: 9], deletion# true 2025-05-29T15:26:33.427877Z node 2 :SCHEME_BOARD_REPLICA INFO: replica.cpp:520: [2:398:2445] Upsert description: path# [OwnerId: 910, LocalPathId: 9] 2025-05-29T15:26:33.427898Z node 2 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:782: [2:398:2445] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 910 Generation: 1 }: sender# [2:400:2447], cookie# 0, event size# 130 2025-05-29T15:26:33.427904Z node 2 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:822: [2:398:2445] Update description: path# /Root/Tenant/table_inside, pathId# [OwnerId: 910, LocalPathId: 9], deletion# false 2025-05-29T15:26:33.427910Z node 2 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:834: [2:398:2445] Path was explicitly deleted, ignoring: path# /Root/Tenant/table_inside, pathId# [OwnerId: 910, LocalPathId: 9] 2025-05-29T15:26:33.427943Z node 2 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1061: [2:398:2445] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /Root/Tenant/table_inside DomainOwnerId: 0 }: sender# [2:401:2448] 2025-05-29T15:26:33.427949Z node 2 :SCHEME_BOARD_REPLICA INFO: replica.cpp:520: [2:398:2445] Upsert description: path# /Root/Tenant/table_inside 2025-05-29T15:26:33.427967Z node 2 :SCHEME_BOARD_REPLICA INFO: replica.cpp:646: [2:398:2445] Subscribe: subscriber# [2:401:2448], path# /Root/Tenant/table_inside, domainOwnerId# 0, capabilities# =========== Left ==Path: "/Root/Tenant/table_inside" PathDescription { Self { PathVersion: 18446744073709551615 } DomainDescription { DomainKey { SchemeShard: 800 PathId: 333 } } } PathId: 9 PathOwnerId: 910 =========== Right ==Path: "/Root/Tenant/table_inside" PathDescription { Self { PathVersion: 2 } DomainDescription { DomainKey { SchemeShard: 800 PathId: 333 } } } PathId: 9 PathOwnerId: 910 =========== super id == DomainId: [OwnerId: 800, LocalPathId: 333] IsDeletion: 1 PathId: [OwnerId: 910, LocalPathId: 9] Verions: 18446744073709551615 =========== WIN ==/Root/Tenant/table_inside PathID: [OwnerId: 0, LocalPathId: 0] deleted: 1 version: 0 domainId: [OwnerId: 0, LocalPathId: 0] 2025-05-29T15:26:33.428650Z node 2 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:751: [2:402:2449] Handle NKikimrSchemeBoard.TEvHandshake { Owner: 910 Generation: 1 }: sender# [2:403:2450] 2025-05-29T15:26:33.428659Z node 2 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:769: [2:402:2449] Successful handshake: owner# 910, generation# 1 2025-05-29T15:26:33.428671Z node 2 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:958: [2:402:2449] Handle NKikimrSchemeBoard.TEvCommitGeneration { Owner: 910 Generation: 1 }: sender# [2:403:2450] 2025-05-29T15:26:33.428676Z node 2 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:985: [2:402:2449] Commit generation: owner# 910, generation# 1 2025-05-29T15:26:33.428684Z node 2 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:751: [2:402:2449] Handle NKikimrSchemeBoard.TEvHandshake { Owner: 910 Generation: 1 }: sender# [2:404:2451] 2025-05-29T15:26:33.428689Z node 2 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:769: [2:402:2449] Successful handshake: owner# 910, generation# 1 2025-05-29T15:26:33.428696Z node 2 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:958: [2:402:2449] Handle NKikimrSchemeBoard.TEvCommitGeneration { Owner: 910 Generation: 1 }: sender# [2:404:2451] 2025-05-29T15:26:33.428700Z node 2 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:985: [2:402:2449] Commit generation: owner# 910, generation# 1 2025-05-29T15:26:33.428712Z node 2 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:782: [2:402:2449] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 910 Generation: 1 }: sender# [2:403:2450], cookie# 0, event size# 64 2025-05-29T15:26:33.428717Z node 2 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:822: [2:402:2449] Update description: path# /Root/Tenant/table_inside, pathId# [OwnerId: 910, LocalPathId: 9], deletion# true 2025-05-29T15:26:33.428722Z node 2 :SCHEME_BOARD_REPLICA INFO: replica.cpp:520: [2:402:2449] Upsert description: path# [OwnerId: 910, LocalPathId: 9] 2025-05-29T15:26:33.428731Z node 2 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:782: [2:402:2449] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 910 Generation: 1 }: sender# [2:404:2451], cookie# 0, event size# 64 2025-05-29T15:26:33.428735Z node 2 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:822: [2:402:2449] Update description: path# /Root/Tenant/table_inside, pathId# [OwnerId: 910, LocalPathId: 9], deletion# true 2025-05-29T15:26:33.428746Z node 2 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1061: [2:402:2449] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /Root/Tenant/table_inside DomainOwnerId: 0 }: sender# [2:405:2452] 2025-05-29T15:26:33.428751Z node 2 :SCHEME_BOARD_REPLICA INFO: replica.cpp:520: [2:402:2449] Upsert description: path# /Root/Tenant/table_inside 2025-05-29T15:26:33.428758Z node 2 :SCHEME_BOARD_REPLICA INFO: replica.cpp:646: [2:402:2449] Subscribe: subscriber# [2:405:2452], path# /Root/Tenant/table_inside, domainOwnerId# 0, capabilities# =========== Left ==Path: "/Root/Tenant/table_inside" PathDescription { Self { PathVersion: 18446744073709551615 } DomainDescription { DomainKey { SchemeShard: 800 PathId: 333 } } } PathId: 9 PathOwnerId: 910 =========== Right ==Path: "/Root/Tenant/table_inside" PathDescription { Self { PathVersion: 18446744073709551615 } DomainDescription { DomainKey { SchemeShard: 800 PathId: 333 } } } PathId: 9 PathOwnerId: 910 =========== super id == DomainId: [OwnerId: 800, LocalPathId: 333] IsDeletion: 1 PathId: [OwnerId: 910, LocalPathId: 9] Verions: 18446744073709551615 =========== WIN ==/Root/Tenant/table_inside PathID: [OwnerId: 0, LocalPathId: 0] deleted: 1 version: 0 domainId: [OwnerId: 0, LocalPathId: 0] 2025-05-29T15:26:33.633722Z node 3 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:751: [3:6:2053] Handle NKikimrSchemeBoard.TEvHandshake { Owner: 800 Generation: 1 }: sender# [3:7:2054] 2025-05-29T15:26:33.633755Z node 3 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:769: [3:6:2053] Successful handshake: owner# 800, generation# 1 2025-05-29T15:26:33.633770Z node 3 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:958: [3:6:2053] Handle NKikimrSchemeBoard.TEvCommitGeneration { Owner: 800 Generation: 1 }: sender# [3:7:2054] 2025-05-29T15:26:33.633776Z node 3 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:985: [3:6:2053] Commit generation: owner# 800, generation# 1 2025-05-29T15:26:33.633786Z node 3 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:751: [3:6:2053] Handle NKikimrSchemeBoard.TEvHandshake { Owner: 900 Generation: 1 }: sender# [3:8:2055] 2025-05-29T15:26:33.633790Z node 3 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:769: [3:6:2053] Successful handshake: owner# 900, generation# 1 2025-05-29T15:26:33.633797Z node 3 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:958: [3:6:2053] Handle NKikimrSchemeBoard.TEvCommitGeneration { Owner: 900 Generation: 1 }: sender# [3:8:2055] 2025-05-29T15:26:33.633801Z node 3 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:985: [3:6:2053] Commit generation: owner# 900, generation# 1 2025-05-29T15:26:33.633832Z node 3 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:782: [3:6:2053] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 800 Generation: 1 }: sender# [3:7:2054], cookie# 0, event size# 118 2025-05-29T15:26:33.633839Z node 3 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:822: [3:6:2053] Update description: path# /root/db/dir_inside, pathId# [OwnerId: 800, LocalPathId: 1111], deletion# false 2025-05-29T15:26:33.633851Z node 3 :SCHEME_BOARD_REPLICA INFO: replica.cpp:550: [3:6:2053] Upsert description: path# /root/db/dir_inside, pathId# [OwnerId: 800, LocalPathId: 1111], pathDescription# {Status StatusSuccess, Path /root/db/dir_inside, PathId [OwnerId: 800, LocalPathId: 1111], PathVersion 1, SubdomainPathId [OwnerId: 800, LocalPathId: 1], PathAbandonedTenantsSchemeShards size 0, DescribeSchemeResultSerialized size 67} 2025-05-29T15:26:33.633863Z node 3 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:782: [3:6:2053] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 900 Generation: 1 }: sender# [3:8:2055], cookie# 0, event size# 117 2025-05-29T15:26:33.633867Z node 3 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:822: [3:6:2053] Update description: path# /root/db/dir_inside, pathId# [OwnerId: 900, LocalPathId: 11], deletion# false 2025-05-29T15:26:33.633874Z node 3 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:884: [3:6:2053] Update description by newest path form tenant schemeshard: path# /root/db/dir_inside, pathId# [OwnerId: 900, LocalPathId: 11], domainId# [OwnerId: 800, LocalPathId: 1], curPathId# [OwnerId: 800, LocalPathId: 1111], curDomainId# [OwnerId: 800, LocalPathId: 1] 2025-05-29T15:26:33.633880Z node 3 :SCHEME_BOARD_REPLICA INFO: replica.cpp:575: [3:6:2053] Delete description: path# /root/db/dir_inside, pathId# [OwnerId: 800, LocalPathId: 1111] 2025-05-29T15:26:33.633887Z node 3 :SCHEME_BOARD_REPLICA INFO: replica.cpp:550: [3:6:2053] Upsert description: path# /root/db/dir_inside, pathId# [OwnerId: 900, LocalPathId: 11], pathDescription# {Status StatusSuccess, Path /root/db/dir_inside, PathId [OwnerId: 900, LocalPathId: 11], PathVersion 1, SubdomainPathId [OwnerId: 800, LocalPathId: 1], PathAbandonedTenantsSchemeShards size 0, DescribeSchemeResultSerialized size 67} 2025-05-29T15:26:33.633900Z node 3 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1061: [3:6:2053] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /root/db/dir_inside DomainOwnerId: 0 }: sender# [3:9:2056] 2025-05-29T15:26:33.633914Z node 3 :SCHEME_BOARD_REPLICA INFO: replica.cpp:646: [3:6:2053] Subscribe: subscriber# [3:9:2056], path# /root/db/dir_inside, domainOwnerId# 0, capabilities# =========== Path: "/root/db/dir_inside" PathDescription { Self { PathVersion: 1 } DomainDescription { DomainKey { SchemeShard: 800 PathId: 1 } } } PathId: 1111 PathOwnerId: 800 =========== Path: "/root/db/dir_inside" PathDescription { Self { PathVersion: 1 } DomainDescription { DomainKey { SchemeShard: 800 PathId: 1 } } } PathId: 11 PathOwnerId: 900 =========== DomainId: [OwnerId: 800, LocalPathId: 1] IsDeletion: 0 PathId: [OwnerId: 900, LocalPathId: 11] Versions: 1 >> TExportToS3Tests::CompletedExportEndTime >> AsyncIndexChangeExchange::ShouldDeliverChangesOnAlteredTable [FAIL] >> AsyncIndexChangeExchange::ShouldRemoveRecordsAfterDroppingIndex >> THiveTest::TestHiveBalancerUselessNeighbourMoves [GOOD] >> THiveTest::TestHiveBalancerWithImmovableTablets >> TExportToS3Tests::CancelUponTransferringMultiShardTableShouldSucceed >> Cdc::UpdateShardCount [GOOD] >> Cdc::UpdateRetentionPeriod >> TExportToS3Tests::ShouldCheckQuotasExportsLimited [GOOD] >> DataShardVolatile::NotCachingAbortingDeletes+UseSink [FAIL] >> DataShardVolatile::NotCachingAbortingDeletes-UseSink >> TExportToS3Tests::CancelUponCopyingTablesShouldSucceed [GOOD] >> TExportToS3Tests::ShouldOmitNonStrictStorageSettings >> TExportToS3Tests::AuditCompletedExport >> TExportToS3Tests::ShouldCheckQuotasChildrenLimited >> Cdc::NaN[TopicRunner] [FAIL] >> Cdc::RacyRebootAndSplitWithTxInflight ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_export/unittest >> TExportToS3Tests::TopicsWithPermissions [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:116:2058] recipient: [1:111:2141] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:116:2058] recipient: [1:111:2141] Leader for TabletID 72057594046678944 is [1:128:2152] sender: [1:132:2058] recipient: [1:111:2141] 2025-05-29T15:26:32.790374Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7488: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-05-29T15:26:32.790400Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7516: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:26:32.790406Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7402: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-05-29T15:26:32.790411Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7418: OperationsProcessing config: using default configuration 2025-05-29T15:26:32.790417Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-05-29T15:26:32.790421Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-05-29T15:26:32.790429Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7548: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:26:32.790441Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:39: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-05-29T15:26:32.790535Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7619: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-05-29T15:26:32.790613Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-05-29T15:26:32.803610Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7309: Cannot subscribe to console configs 2025-05-29T15:26:32.803634Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:26:32.806723Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-05-29T15:26:32.806867Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-05-29T15:26:32.806907Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-05-29T15:26:32.809069Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-05-29T15:26:32.809241Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:445: Clear TempDirsState with owners number: 0 2025-05-29T15:26:32.809370Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1348: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-05-29T15:26:32.809417Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:32: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-05-29T15:26:32.809950Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:157: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:26:32.810001Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:84: [RootDataErasureManager] Stop 2025-05-29T15:26:32.810248Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:26:32.810257Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:26:32.810280Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-05-29T15:26:32.810290Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-29T15:26:32.810297Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:202: TTxServerlessStorageBilling.Complete 2025-05-29T15:26:32.810328Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6671: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-05-29T15:26:32.811801Z node 1 :HIVE INFO: tablet_helpers.cpp:1130: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:128:2152] sender: [1:242:2058] recipient: [1:15:2062] 2025-05-29T15:26:32.841890Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:372: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-05-29T15:26:32.842012Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:26:32.842089Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-05-29T15:26:32.842139Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:130: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-05-29T15:26:32.842152Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:26:32.847181Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:451: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-05-29T15:26:32.847224Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-05-29T15:26:32.847316Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:26:32.847330Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:313: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-05-29T15:26:32.847336Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:367: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-05-29T15:26:32.847342Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 2 -> 3 2025-05-29T15:26:32.848076Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:26:32.848092Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-05-29T15:26:32.848098Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 3 -> 128 2025-05-29T15:26:32.848702Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:26:32.848728Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:26:32.848737Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:26:32.848746Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1650: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-05-29T15:26:32.849540Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1719: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-05-29T15:26:32.850060Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:652: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-05-29T15:26:32.850104Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1751: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-05-29T15:26:32.850311Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:676: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-05-29T15:26:32.850338Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:680: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 124 RawX2: 4294969445 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-05-29T15:26:32.850346Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:26:32.850424Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 128 -> 240 2025-05-29T15:26:32.850433Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:26:32.850467Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-05-29T15:26:32.850479Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:402: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-05-29T15:26:32.850876Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:26:32.850884Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-29T15:26:32.850934Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29 ... INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:26:33.832999Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 281474976710757, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-29T15:26:33.833036Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 281474976710757, path id: [OwnerId: 72057594046678944, LocalPathId: 3] 2025-05-29T15:26:33.833050Z node 4 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:26:33.833055Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [4:208:2209], at schemeshard: 72057594046678944, txId: 281474976710757, path id: 1 2025-05-29T15:26:33.833061Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [4:208:2209], at schemeshard: 72057594046678944, txId: 281474976710757, path id: 3 2025-05-29T15:26:33.833071Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 281474976710757:0, at schemeshard: 72057594046678944 2025-05-29T15:26:33.833077Z node 4 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:482: [72057594046678944] TDone opId# 281474976710757:0 ProgressState 2025-05-29T15:26:33.833090Z node 4 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:906: Part operation is done id#281474976710757:0 progress is 1/1 2025-05-29T15:26:33.833094Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 281474976710757 ready parts: 1/1 2025-05-29T15:26:33.833100Z node 4 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:906: Part operation is done id#281474976710757:0 progress is 1/1 2025-05-29T15:26:33.833107Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 281474976710757 ready parts: 1/1 2025-05-29T15:26:33.833112Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1606: TOperation IsReadyToNotify, TxId: 281474976710757, ready parts: 1/1, is published: false 2025-05-29T15:26:33.833130Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 281474976710757 ready parts: 1/1 2025-05-29T15:26:33.833134Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:973: Operation and all the parts is done, operation id: 281474976710757:0 2025-05-29T15:26:33.833139Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5174: RemoveTx for txid 281474976710757:0 2025-05-29T15:26:33.833152Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 2 2025-05-29T15:26:33.833158Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:982: Publication still in progress, tx: 281474976710757, publications: 2, subscribers: 1 2025-05-29T15:26:33.833162Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:989: Publication details: tx: 281474976710757, [OwnerId: 72057594046678944, LocalPathId: 1], 7 2025-05-29T15:26:33.833166Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:989: Publication details: tx: 281474976710757, [OwnerId: 72057594046678944, LocalPathId: 3], 3 2025-05-29T15:26:33.833361Z node 4 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:5834: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 7 PathOwnerId: 72057594046678944, cookie: 281474976710757 2025-05-29T15:26:33.833377Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 7 PathOwnerId: 72057594046678944, cookie: 281474976710757 2025-05-29T15:26:33.833383Z node 4 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 281474976710757 2025-05-29T15:26:33.833388Z node 4 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 281474976710757, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 7 2025-05-29T15:26:33.833393Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2025-05-29T15:26:33.833515Z node 4 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:5834: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 3 PathOwnerId: 72057594046678944, cookie: 281474976710757 2025-05-29T15:26:33.833528Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 3 PathOwnerId: 72057594046678944, cookie: 281474976710757 2025-05-29T15:26:33.833533Z node 4 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 281474976710757 2025-05-29T15:26:33.833537Z node 4 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 281474976710757, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 3 2025-05-29T15:26:33.833541Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 1 2025-05-29T15:26:33.833556Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 281474976710757, subscribers: 1 2025-05-29T15:26:33.833561Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:212: TTxAckPublishToSchemeBoard Notify send TEvNotifyTxCompletionResult, at schemeshard: 72057594046678944, to actorId: [4:126:2151] 2025-05-29T15:26:33.842751Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976710757 2025-05-29T15:26:33.842809Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976710757 2025-05-29T15:26:33.842829Z node 4 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6746: Handle: TEvNotifyTxCompletionResult: txId# 281474976710757 2025-05-29T15:26:33.842850Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6748: Message: TxId: 281474976710757 2025-05-29T15:26:33.844494Z node 4 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: PathId: 2 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2025-05-29T15:26:33.845933Z node 4 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:44: Tablet 72057594046678944 describe pathId 2 took 78us result status StatusSuccess 2025-05-29T15:26:33.846053Z node 4 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Topic" PathDescription { Self { Name: "Topic" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypePersQueueGroup CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 PQVersion: 1 } ChildrenExist: false BalancerTabletID: 72075186233409548 } PersQueueGroup { Name: "Topic" PathId: 2 TotalGroupCount: 2 PartitionPerTablet: 1 PQTabletConfig { PartitionConfig { LifetimeSeconds: 10 } YdbDatabasePath: "/MyRoot" } Partitions { PartitionId: 0 TabletId: 72075186233409547 Status: Active } Partitions { PartitionId: 1 TabletId: 72075186233409546 Status: Active } AlterVersion: 1 BalancerTabletID: 72075186233409548 NextPartitionId: 2 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 2 PathsLimit: 10000 ShardsInside: 3 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 2 PQPartitionsLimit: 1000000 } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 TestWaitNotification wait txId: 102 2025-05-29T15:26:33.849347Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:210: tests -- TTxNotificationSubscriber for txId 102: send EvNotifyTxCompletion 2025-05-29T15:26:33.849363Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:256: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 102 2025-05-29T15:26:33.849457Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__notify.cpp:62: NotifyTxCompletion export in-flight, txId: 102, at schemeshard: 72057594046678944 2025-05-29T15:26:33.849465Z node 4 :FLAT_TX_SCHEMESHARD INFO: schemeshard__notify.cpp:131: NotifyTxCompletion transaction is registered, txId: 102, at schemeshard: 72057594046678944 REQUEST: PUT /create_topic.pb HTTP/1.1 HEADERS: Host: localhost:30179 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: 0FF7DE03-5003-42F5-8B46-58EB50C7F13A amz-sdk-request: attempt=1 content-length: 468 content-md5: eolrX6cGdcMGCBM8sb+6PQ== content-type: binary/octet-stream user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-139-generic x86_64 Clang/18.1.8 x-amz-storage-class: STANDARD S3_MOCK::HttpServeWrite: /create_topic.pb / / 468 REQUEST: PUT /permissions.pb HTTP/1.1 HEADERS: Host: localhost:30179 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: BCC18DA9-6E31-41F6-8EC6-7145E1566B97 amz-sdk-request: attempt=1 content-length: 43 content-md5: JIqMFsQjXF0c+sG0y+coog== content-type: binary/octet-stream user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-139-generic x86_64 Clang/18.1.8 x-amz-storage-class: STANDARD S3_MOCK::HttpServeWrite: /permissions.pb / / 43 REQUEST: PUT /metadata.json HTTP/1.1 HEADERS: Host: localhost:30179 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: A976BD67-C7C6-4C51-9D9B-C01EED06BEF8 amz-sdk-request: attempt=1 content-length: 31 content-md5: NIbLWVScnysfZNPAOZgBoA== content-type: binary/octet-stream user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-139-generic x86_64 Clang/18.1.8 x-amz-storage-class: STANDARD S3_MOCK::HttpServeWrite: /metadata.json / / 31 2025-05-29T15:26:33.857395Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:227: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2025-05-29T15:26:33.857414Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:236: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [4:611:2539] TestWaitNotification: OK eventTxId 102 >> THiveTest::TestCreateTabletsWithRaceForStoragePoolsKIKIMR_9659 [GOOD] >> THiveTest::TestDeleteTablet >> THealthCheckTest::LayoutIncorrect [GOOD] >> TExportToS3Tests::DropSourceTableBeforeTransferring >> Cdc::UpdateRetentionPeriod [GOOD] >> Cdc::SupportedTypes >> TExportToS3Tests::ShouldCheckQuotasChildrenLimited [GOOD] >> TExportToS3Tests::ShouldOmitNonStrictStorageSettings [GOOD] >> TExportToS3Tests::SchemaMappingEncryption >> THiveTest::TestDeleteTablet [GOOD] >> THiveTest::TestDeleteOwnerTablets ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/health_check/ut/unittest >> THealthCheckTest::LayoutIncorrect [GOOD] Test command err: 2025-05-29T15:26:20.254694Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:701:2413], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-05-29T15:26:20.255029Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-05-29T15:26:20.255076Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-05-29T15:26:20.255138Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [2:698:2355], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-05-29T15:26:20.255221Z node 2 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-05-29T15:26:20.255229Z node 2 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/00164d/r3tmp/tmpsTRxm2/pdisk_1.dat 2025-05-29T15:26:20.556787Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 4836, node 1 TClient is connected to server localhost:23879 2025-05-29T15:26:20.719184Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:26:20.719206Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-05-29T15:26:20.719210Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-05-29T15:26:20.719302Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-05-29T15:26:22.685068Z node 3 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [3:701:2413], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-05-29T15:26:22.685206Z node 3 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-05-29T15:26:22.685232Z node 3 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-05-29T15:26:22.685697Z node 4 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [4:698:2355], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-05-29T15:26:22.685789Z node 4 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-05-29T15:26:22.685845Z node 4 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/00164d/r3tmp/tmpoKWnvn/pdisk_1.dat 2025-05-29T15:26:22.935007Z node 3 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 26487, node 3 TClient is connected to server localhost:29653 2025-05-29T15:26:23.132171Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:26:23.132188Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-05-29T15:26:23.132192Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-05-29T15:26:23.132303Z node 3 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration self_check_result: GOOD issue_log { id: "YELLOW-f489-1231c6b1" status: YELLOW message: "Database has compute issues" location { database { name: "/Root" } } reason: "YELLOW-1ba8-1231c6b1" type: "DATABASE" level: 1 } issue_log { id: "YELLOW-1ba8-1231c6b1" status: YELLOW message: "Compute is overloaded" location { database { name: "/Root" } } reason: "YELLOW-e9e2-1231c6b1-3" reason: "YELLOW-e9e2-1231c6b1-4" type: "COMPUTE" level: 2 } issue_log { id: "YELLOW-e9e2-1231c6b1-3" status: YELLOW message: "LoadAverage above 100%" location { compute { node { id: 3 host: "::1" port: 12001 } } database { name: "/Root" } } type: "LOAD_AVERAGE" level: 4 } issue_log { id: "YELLOW-e9e2-1231c6b1-4" status: YELLOW message: "LoadAverage above 100%" location { compute { node { id: 4 host: "::1" port: 12002 } } database { name: "/Root" } } type: "LOAD_AVERAGE" level: 4 } issue_log { id: "RED-a594-3-3-42" status: RED message: "PDisk state is FAULTY" location { storage { node { id: 3 host: "::1" port: 12001 } pool { group { vdisk { pdisk { id: "3-42" path: "/home/runner/.ya/build/build_root/ciyv/00164d/r3tmp/tmpoKWnvn/pdisk_1.dat" } } } } } } type: "PDISK" level: 6 } issue_log { id: "RED-a594-3-3-43" status: RED message: "PDisk state is FAULTY" location { storage { node { id: 3 host: "::1" port: 12001 } pool { group { vdisk { pdisk { id: "3-43" path: "/home/runner/.ya/build/build_root/ciyv/00164d/r3tmp/tmpoKWnvn/pdisk_1.dat" } } } } } } type: "PDISK" level: 6 } issue_log { id: "RED-a594-3-3-44" status: RED message: "PDisk state is FAULTY" location { storage { node { id: 3 host: "::1" port: 12001 } pool { group { vdisk { pdisk { id: "3-44" path: "/home/runner/.ya/build/build_root/ciyv/00164d/r3tmp/tmpoKWnvn/pdisk_1.dat" } } } } } } type: "PDISK" level: 6 } location { id: 3 host: "::1" port: 12001 } 2025-05-29T15:26:24.307428Z node 5 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [5:455:2415], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-05-29T15:26:24.307499Z node 5 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-05-29T15:26:24.307509Z node 5 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/00164d/r3tmp/tmpjHkYAA/pdisk_1.dat 2025-05-29T15:26:24.453988Z node 5 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 61265, node 5 TClient is connected to server localhost:4828 2025-05-29T15:26:24.637772Z node 5 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:26:24.637789Z node 5 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-05-29T15:26:24.637794Z node 5 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-05-29T15:26:24.637920Z node 5 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration self_check_result: EMERGENCY issue_log { id: "RED-be81" status: RED message: "Database has storage issues" reason: "RED-caea" type: "DATABASE" level: 1 } issue_log { id: "RED-caea" status: RED message: "There are no storage pools" type: "STORAGE" level: 2 } database_status { name: "/Root/database" overall: RED storage { overall: RED } compute { overall: GREEN nodes { id: "6" overall: GREEN load { overall: GREEN cores: 64 } } } } location { id: 5 host: "::1" port: 12001 } 2025-05-29T15:26:25.693452Z node 7 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [7:453:2413], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-05-29T15:26:25.693499Z node 7 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-05-29T15:26:25.693522Z node 7 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/00164d/r3tmp/tmpJS1nTS/pdisk_1.dat 2025-05-29T15:26:25.804742Z node 7 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 65235, node 7 TClient is connected to server localhost:23175 2025-05-29T15:26:25.947275Z node 7 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:26:25.947291Z node 7 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-05-29T15:26:25.947294Z node 7 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-05-29T15:26:25.947464Z node 7 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-05-29T15:26:26.033583Z node 7 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:26:26.033618Z node 7 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:26:26.046431Z node 7 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Connecting -> Connected self_check_result: EMERGENCY issue_log { id: "RED-70fb-1231c6b1" status: RED message: "Database has multiple issues" location { database { name: "/Root" } } reason: "RED-4e47-1231c6b1" reason: "RED-53b5-1231c6b1" reason: "YELLOW-1ba8-1231c6b1" type: "DATABASE" level: 1 } issue_log { id: "YELLOW-1ba8-1231c6b1" status: YELLOW message: "Compute is overloaded" location { database { name: "/Root" } } reason: "YELLOW-e9e2-1231c6b1-7" type: "COMPUTE" level: 2 } issue_log { id: "RED-4e47-1231c6b1" status: RED message: "Compute has issues with system tablets" location { database { name: "/Root" } } reason: "RED-c138-1231c6b1-BSController" type: "COMPUTE" level: 2 } issue_log { id: "YELLOW-e9e2-1231c6b1-7" status: YELLOW message: "LoadAverage above 100%" location { compute { node { id: 7 host: "::1" port: 12001 } } database { name: "/Root" } } type: "LOAD_AVERAGE" level: 4 } issue_log { id: "RED-c138-1231c6b1-BSController" status: RED message: "System tablet is unresponsive" location { compute { tablet { type: "BSController" id: "72057594037932033" } } database { name: "/Root" } } type: "SYSTEM_TABLET" level: 3 } issue_log { id: "RED-53b5-1231c6b1" status: RED message: "System tablet BSC didn\'t provide information" location { database { name: "/Root" } } type: "STORAGE" level: 2 } database_status { name: "/Root" overall: RED storage { overall: RED pools { id: "static" overall: GREEN groups { id: "0" overall: GREEN vdisks { id: "0-1-0-0-0" overall: GREEN pdisk { id: "7-1" overall: GREEN } } } } } compute { overall: RED nodes { id: "7" overall: YELLOW load { overall: YELLOW load: 233.228027 cores: 64 } } } } location { id: 7 host: "::1" port: 12001 } 2025-05-29T15:26:34.153070Z node 9 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [9:454:2413], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-05-29T15:26:34.153165Z node 9 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-05-29T15:26:34.153174Z node 9 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/00164d/r3tmp/tmp0UD19u/pdisk_1.dat 2025-05-29T15:26:34.251249Z node 9 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 10108, node 9 TClient is connected to server localhost:5139 2025-05-29T15:26:34.376371Z node 9 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:26:34.376390Z node 9 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-05-29T15:26:34.376394Z node 9 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-05-29T15:26:34.376485Z node 9 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration >> TExportToS3Tests::ShouldPreserveIncrBackupFlag >> TExportToS3Tests::CompletedExportEndTime [GOOD] >> TExportToS3Tests::Checksums >> TExportToS3Tests::ShouldSucceedOnSingleShardTable >> TReplicaTest::Update >> Cdc::DropColumn [FAIL] >> Cdc::DropIndex >> TExportToS3Tests::ShouldPreserveIncrBackupFlag [GOOD] >> TExportToS3Tests::Checksums [GOOD] >> TExportToS3Tests::DropSourceTableBeforeTransferring [GOOD] >> TExportToS3Tests::ShouldExcludeBackupTableFromStats >> TReplicaTest::Update [GOOD] >> TReplicaTest::UnsubscribeWithoutSubscribe >> TExportToS3Tests::SchemaMappingEncryption [GOOD] >> THiveTest::TestDeleteOwnerTablets [GOOD] >> THiveTest::TestDeleteOwnerTabletsMany >> AsyncIndexChangeExchange::ShouldRemoveRecordsAfterDroppingIndex [FAIL] >> AsyncIndexChangeExchange::ShouldRemoveRecordsAfterCancelIndexBuild >> TExportToS3Tests::ChecksumsWithCompression >> TExportToS3Tests::DropCopiesBeforeTransferring1 >> TReplicaTest::HandshakeWithStaleGeneration >> DataShardVolatile::NotCachingAbortingDeletes-UseSink [FAIL] >> DataShardVolatile::GracefulShardRestartNoEarlyReadSetAck >> TExportToS3Tests::ShouldSucceedOnSingleShardTable [GOOD] >> TExportToS3Tests::ShouldSucceedOnMultiShardTable >> TReplicaTest::UnsubscribeWithoutSubscribe [GOOD] >> TExportToS3Tests::SchemaMappingEncryptionIncorrectKey >> Cdc::SupportedTypes [FAIL] >> Cdc::SplitTopicPartition_TopicAutoPartitioning >> TExportToS3Tests::ChecksumsWithCompression [GOOD] >> TExportToS3Tests::DropCopiesBeforeTransferring1 [GOOD] >> TExportToS3Tests::SchemaMappingEncryptionIncorrectKey [GOOD] >> TReplicaTest::HandshakeWithStaleGeneration [GOOD] >> TReplicaTest::IdempotencyUpdatesAliveSubscriber >> TExportToS3Tests::Changefeeds >> TExportToS3Tests::DropCopiesBeforeTransferring2 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/scheme_board/ut_replica/unittest >> TReplicaTest::UnsubscribeWithoutSubscribe [GOOD] Test command err: 2025-05-29T15:26:35.377212Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:751: [1:6:2053] Handle NKikimrSchemeBoard.TEvHandshake { Owner: 1 Generation: 1 }: sender# [1:7:2054] 2025-05-29T15:26:35.377236Z node 1 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:769: [1:6:2053] Successful handshake: owner# 1, generation# 1 2025-05-29T15:26:35.377299Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:782: [1:6:2053] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 1 Generation: 1 }: sender# [1:7:2054], cookie# 0, event size# 72 2025-05-29T15:26:35.377307Z node 1 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:822: [1:6:2053] Update description: path# path, pathId# [OwnerId: 1, LocalPathId: 1], deletion# false 2025-05-29T15:26:35.378317Z node 1 :SCHEME_BOARD_REPLICA INFO: replica.cpp:550: [1:6:2053] Upsert description: path# path, pathId# [OwnerId: 1, LocalPathId: 1], pathDescription# {Status StatusSuccess, Path path, PathId [OwnerId: 1, LocalPathId: 1], PathVersion 1, SubdomainPathId , PathAbandonedTenantsSchemeShards size 0, DescribeSchemeResultSerialized size 30} 2025-05-29T15:26:35.378365Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1061: [1:6:2053] Handle NKikimrSchemeBoard.TEvSubscribe { Path: path DomainOwnerId: 0 }: sender# [1:7:2054] 2025-05-29T15:26:35.378387Z node 1 :SCHEME_BOARD_REPLICA INFO: replica.cpp:646: [1:6:2053] Subscribe: subscriber# [1:7:2054], path# path, domainOwnerId# 0, capabilities# 2025-05-29T15:26:35.378415Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1075: [1:6:2053] Handle NKikimrSchemeBoard.TEvUnsubscribe { Path: path }: sender# [1:7:2054] 2025-05-29T15:26:35.378430Z node 1 :SCHEME_BOARD_REPLICA INFO: replica.cpp:662: [1:6:2053] Unsubscribe: subscriber# [1:7:2054], path# path 2025-05-29T15:26:35.378438Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1061: [1:6:2053] Handle NKikimrSchemeBoard.TEvSubscribe { PathId: [OwnerId: 1, LocalPathId: 1] DomainOwnerId: 0 }: sender# [1:7:2054] 2025-05-29T15:26:35.378443Z node 1 :SCHEME_BOARD_REPLICA INFO: replica.cpp:646: [1:6:2053] Subscribe: subscriber# [1:7:2054], path# [OwnerId: 1, LocalPathId: 1], domainOwnerId# 0, capabilities# 2025-05-29T15:26:35.378451Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1075: [1:6:2053] Handle NKikimrSchemeBoard.TEvUnsubscribe { PathId: [OwnerId: 1, LocalPathId: 1] }: sender# [1:7:2054] 2025-05-29T15:26:35.378456Z node 1 :SCHEME_BOARD_REPLICA INFO: replica.cpp:662: [1:6:2053] Unsubscribe: subscriber# [1:7:2054], path# [OwnerId: 1, LocalPathId: 1] 2025-05-29T15:26:35.586640Z node 2 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:751: [2:6:2053] Handle NKikimrSchemeBoard.TEvHandshake { Owner: 1 Generation: 1 }: sender# [2:7:2054] 2025-05-29T15:26:35.586662Z node 2 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:769: [2:6:2053] Successful handshake: owner# 1, generation# 1 2025-05-29T15:26:35.586695Z node 2 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:782: [2:6:2053] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 1 Generation: 1 }: sender# [2:7:2054], cookie# 0, event size# 72 2025-05-29T15:26:35.586703Z node 2 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:822: [2:6:2053] Update description: path# path, pathId# [OwnerId: 1, LocalPathId: 1], deletion# false 2025-05-29T15:26:35.586717Z node 2 :SCHEME_BOARD_REPLICA INFO: replica.cpp:550: [2:6:2053] Upsert description: path# path, pathId# [OwnerId: 1, LocalPathId: 1], pathDescription# {Status StatusSuccess, Path path, PathId [OwnerId: 1, LocalPathId: 1], PathVersion 1, SubdomainPathId , PathAbandonedTenantsSchemeShards size 0, DescribeSchemeResultSerialized size 30} 2025-05-29T15:26:35.586732Z node 2 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1075: [2:6:2053] Handle NKikimrSchemeBoard.TEvUnsubscribe { Path: path }: sender# [2:7:2054] >> TReplicaTest::Handshake >> TExportToS3Tests::ShouldSucceedOnMultiShardTable [GOOD] >> TReplicaTest::IdempotencyUpdatesAliveSubscriber [GOOD] >> TReplicaTest::IdempotencyUpdatesVariant2 >> KqpRanges::UpdateWhereInFullScan+UseSink >> TExportToS3Tests::ShouldSucceedOnManyTables >> TReplicaTest::Handshake [GOOD] >> TReplicaTest::DoubleUnsubscribe >> TReplicaTest::IdempotencyUpdatesVariant2 [GOOD] >> Cdc::RacyRebootAndSplitWithTxInflight [FAIL] >> Cdc::RacyActivateAndEnqueue >> TExportToS3Tests::DropCopiesBeforeTransferring2 [GOOD] >> TReplicaTest::DoubleUnsubscribe [GOOD] >> TReplicaTest::DoubleDelete >> KqpNotNullColumns::AlterAddNotNullColumnPg >> TExportToS3Tests::Changefeeds [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/scheme_board/ut_replica/unittest >> TReplicaTest::IdempotencyUpdatesVariant2 [GOOD] Test command err: 2025-05-29T15:26:35.815930Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:751: [1:6:2053] Handle NKikimrSchemeBoard.TEvHandshake { Owner: 1 Generation: 2 }: sender# [1:7:2054] 2025-05-29T15:26:35.815954Z node 1 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:769: [1:6:2053] Successful handshake: owner# 1, generation# 2 2025-05-29T15:26:35.815969Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:751: [1:6:2053] Handle NKikimrSchemeBoard.TEvHandshake { Owner: 1 Generation: 1 }: sender# [1:7:2054] 2025-05-29T15:26:35.815975Z node 1 :SCHEME_BOARD_REPLICA ERROR: replica.cpp:763: [1:6:2053] Reject handshake from stale populator: sender# [1:7:2054], owner# 1, generation# 1, pending generation# 2 2025-05-29T15:26:36.024560Z node 2 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:751: [2:6:2053] Handle NKikimrSchemeBoard.TEvHandshake { Owner: 1 Generation: 1 }: sender# [2:7:2054] 2025-05-29T15:26:36.024580Z node 2 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:769: [2:6:2053] Successful handshake: owner# 1, generation# 1 2025-05-29T15:26:36.024601Z node 2 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1061: [2:6:2053] Handle NKikimrSchemeBoard.TEvSubscribe { PathId: [OwnerId: 1, LocalPathId: 1] DomainOwnerId: 0 }: sender# [2:8:2055] 2025-05-29T15:26:36.024608Z node 2 :SCHEME_BOARD_REPLICA INFO: replica.cpp:520: [2:6:2053] Upsert description: path# [OwnerId: 1, LocalPathId: 1] 2025-05-29T15:26:36.024636Z node 2 :SCHEME_BOARD_REPLICA INFO: replica.cpp:646: [2:6:2053] Subscribe: subscriber# [2:8:2055], path# [OwnerId: 1, LocalPathId: 1], domainOwnerId# 0, capabilities# 2025-05-29T15:26:36.024693Z node 2 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:782: [2:6:2053] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 1 Generation: 1 }: sender# [2:7:2054], cookie# 0, event size# 72 2025-05-29T15:26:36.024700Z node 2 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:822: [2:6:2053] Update description: path# path, pathId# [OwnerId: 1, LocalPathId: 1], deletion# false 2025-05-29T15:26:36.025998Z node 2 :SCHEME_BOARD_REPLICA INFO: replica.cpp:550: [2:6:2053] Upsert description: path# path, pathId# [OwnerId: 1, LocalPathId: 1], pathDescription# {Status StatusSuccess, Path path, PathId [OwnerId: 1, LocalPathId: 1], PathVersion 1, SubdomainPathId , PathAbandonedTenantsSchemeShards size 0, DescribeSchemeResultSerialized size 30} 2025-05-29T15:26:36.026044Z node 2 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:782: [2:6:2053] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 1 Generation: 1 }: sender# [2:7:2054], cookie# 0, event size# 40 2025-05-29T15:26:36.026050Z node 2 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:822: [2:6:2053] Update description: path# path, pathId# [OwnerId: 1, LocalPathId: 1], deletion# true 2025-05-29T15:26:36.026055Z node 2 :SCHEME_BOARD_REPLICA INFO: replica.cpp:575: [2:6:2053] Delete description: path# path, pathId# [OwnerId: 1, LocalPathId: 1] 2025-05-29T15:26:36.026067Z node 2 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:782: [2:6:2053] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 1 Generation: 1 }: sender# [2:7:2054], cookie# 0, event size# 72 2025-05-29T15:26:36.026072Z node 2 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:822: [2:6:2053] Update description: path# path, pathId# [OwnerId: 1, LocalPathId: 1], deletion# false 2025-05-29T15:26:36.026077Z node 2 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:834: [2:6:2053] Path was explicitly deleted, ignoring: path# path, pathId# [OwnerId: 1, LocalPathId: 1] 2025-05-29T15:26:36.026085Z node 2 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:782: [2:6:2053] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 1 Generation: 1 }: sender# [2:7:2054], cookie# 0, event size# 72 2025-05-29T15:26:36.026089Z node 2 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:822: [2:6:2053] Update description: path# path, pathId# [OwnerId: 1, LocalPathId: 2], deletion# false 2025-05-29T15:26:36.026097Z node 2 :SCHEME_BOARD_REPLICA INFO: replica.cpp:550: [2:6:2053] Upsert description: path# path, pathId# [OwnerId: 1, LocalPathId: 2], pathDescription# {Status StatusSuccess, Path path, PathId [OwnerId: 1, LocalPathId: 2], PathVersion 1, SubdomainPathId , PathAbandonedTenantsSchemeShards size 0, DescribeSchemeResultSerialized size 30} 2025-05-29T15:26:36.026111Z node 2 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1061: [2:6:2053] Handle NKikimrSchemeBoard.TEvSubscribe { PathId: [OwnerId: 1, LocalPathId: 2] DomainOwnerId: 0 }: sender# [2:9:2056] 2025-05-29T15:26:36.026120Z node 2 :SCHEME_BOARD_REPLICA INFO: replica.cpp:646: [2:6:2053] Subscribe: subscriber# [2:9:2056], path# [OwnerId: 1, LocalPathId: 2], domainOwnerId# 0, capabilities# 2025-05-29T15:26:36.235049Z node 3 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:751: [3:6:2053] Handle NKikimrSchemeBoard.TEvHandshake { Owner: 1 Generation: 1 }: sender# [3:7:2054] 2025-05-29T15:26:36.235069Z node 3 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:769: [3:6:2053] Successful handshake: owner# 1, generation# 1 2025-05-29T15:26:36.235101Z node 3 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:782: [3:6:2053] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 1 Generation: 1 }: sender# [3:7:2054], cookie# 0, event size# 72 2025-05-29T15:26:36.235109Z node 3 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:822: [3:6:2053] Update description: path# path, pathId# [OwnerId: 1, LocalPathId: 1], deletion# false 2025-05-29T15:26:36.235121Z node 3 :SCHEME_BOARD_REPLICA INFO: replica.cpp:550: [3:6:2053] Upsert description: path# path, pathId# [OwnerId: 1, LocalPathId: 1], pathDescription# {Status StatusSuccess, Path path, PathId [OwnerId: 1, LocalPathId: 1], PathVersion 1, SubdomainPathId , PathAbandonedTenantsSchemeShards size 0, DescribeSchemeResultSerialized size 30} 2025-05-29T15:26:36.235134Z node 3 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:782: [3:6:2053] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 1 Generation: 1 }: sender# [3:7:2054], cookie# 0, event size# 72 2025-05-29T15:26:36.235139Z node 3 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:822: [3:6:2053] Update description: path# path, pathId# [OwnerId: 1, LocalPathId: 2], deletion# false 2025-05-29T15:26:36.235145Z node 3 :SCHEME_BOARD_REPLICA INFO: replica.cpp:575: [3:6:2053] Delete description: path# path, pathId# [OwnerId: 1, LocalPathId: 1] 2025-05-29T15:26:36.235152Z node 3 :SCHEME_BOARD_REPLICA INFO: replica.cpp:550: [3:6:2053] Upsert description: path# path, pathId# [OwnerId: 1, LocalPathId: 2], pathDescription# {Status StatusSuccess, Path path, PathId [OwnerId: 1, LocalPathId: 2], PathVersion 1, SubdomainPathId , PathAbandonedTenantsSchemeShards size 0, DescribeSchemeResultSerialized size 30} 2025-05-29T15:26:36.235161Z node 3 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:782: [3:6:2053] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 1 Generation: 1 }: sender# [3:7:2054], cookie# 0, event size# 40 2025-05-29T15:26:36.235166Z node 3 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:822: [3:6:2053] Update description: path# path, pathId# [OwnerId: 1, LocalPathId: 2], deletion# true 2025-05-29T15:26:36.235170Z node 3 :SCHEME_BOARD_REPLICA INFO: replica.cpp:575: [3:6:2053] Delete description: path# path, pathId# [OwnerId: 1, LocalPathId: 2] 2025-05-29T15:26:36.235176Z node 3 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:782: [3:6:2053] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 1 Generation: 1 }: sender# [3:7:2054], cookie# 0, event size# 72 2025-05-29T15:26:36.235181Z node 3 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:822: [3:6:2053] Update description: path# path, pathId# [OwnerId: 1, LocalPathId: 1], deletion# false 2025-05-29T15:26:36.235186Z node 3 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:834: [3:6:2053] Path was explicitly deleted, ignoring: path# path, pathId# [OwnerId: 1, LocalPathId: 1] 2025-05-29T15:26:36.235194Z node 3 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:782: [3:6:2053] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 1 Generation: 1 }: sender# [3:7:2054], cookie# 0, event size# 72 2025-05-29T15:26:36.235198Z node 3 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:822: [3:6:2053] Update description: path# path, pathId# [OwnerId: 1, LocalPathId: 2], deletion# false 2025-05-29T15:26:36.235203Z node 3 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:834: [3:6:2053] Path was explicitly deleted, ignoring: path# path, pathId# [OwnerId: 1, LocalPathId: 2] >> TExportToS3Tests::CorruptedDyNumber >> TReplicaTest::DoubleDelete [GOOD] >> TExportToS3Tests::ShouldSucceedOnManyTables [GOOD] >> THiveTest::TestFollowersCrossDC_MovingLeader [GOOD] >> THiveTest::TestFollowersCrossDC_KillingHiveAndFollower ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_export/unittest >> TExportToS3Tests::SchemaMappingEncryptionIncorrectKey [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:116:2058] recipient: [1:111:2141] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:116:2058] recipient: [1:111:2141] Leader for TabletID 72057594046678944 is [1:128:2152] sender: [1:132:2058] recipient: [1:111:2141] 2025-05-29T15:26:34.066112Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7488: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-05-29T15:26:34.066138Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7516: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:26:34.066144Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7402: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-05-29T15:26:34.066150Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7418: OperationsProcessing config: using default configuration 2025-05-29T15:26:34.066156Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-05-29T15:26:34.066160Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-05-29T15:26:34.066169Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7548: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:26:34.066181Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:39: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-05-29T15:26:34.066274Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7619: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-05-29T15:26:34.066351Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-05-29T15:26:34.081172Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7309: Cannot subscribe to console configs 2025-05-29T15:26:34.081198Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:26:34.087107Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-05-29T15:26:34.087266Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-05-29T15:26:34.087308Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-05-29T15:26:34.089395Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-05-29T15:26:34.089649Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:445: Clear TempDirsState with owners number: 0 2025-05-29T15:26:34.089789Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1348: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-05-29T15:26:34.089839Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:32: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-05-29T15:26:34.090383Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:157: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:26:34.090438Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:84: [RootDataErasureManager] Stop 2025-05-29T15:26:34.090708Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:26:34.090722Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:26:34.090766Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-05-29T15:26:34.090775Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-29T15:26:34.090782Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:202: TTxServerlessStorageBilling.Complete 2025-05-29T15:26:34.090817Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6671: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-05-29T15:26:34.092276Z node 1 :HIVE INFO: tablet_helpers.cpp:1130: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:128:2152] sender: [1:242:2058] recipient: [1:15:2062] 2025-05-29T15:26:34.114569Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:372: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-05-29T15:26:34.114666Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:26:34.114730Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-05-29T15:26:34.114799Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:130: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-05-29T15:26:34.114811Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:26:34.118313Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:451: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-05-29T15:26:34.118348Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-05-29T15:26:34.118413Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:26:34.118426Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:313: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-05-29T15:26:34.118432Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:367: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-05-29T15:26:34.118438Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 2 -> 3 2025-05-29T15:26:34.118964Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:26:34.118976Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-05-29T15:26:34.118983Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 3 -> 128 2025-05-29T15:26:34.120572Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:26:34.120588Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:26:34.120595Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:26:34.120602Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1650: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-05-29T15:26:34.121314Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1719: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-05-29T15:26:34.121830Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:652: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-05-29T15:26:34.121877Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1751: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-05-29T15:26:34.122089Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:676: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-05-29T15:26:34.122119Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:680: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 124 RawX2: 4294969445 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-05-29T15:26:34.122127Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:26:34.122200Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 128 -> 240 2025-05-29T15:26:34.122207Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:26:34.122237Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-05-29T15:26:34.122249Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:402: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-05-29T15:26:34.122792Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:26:34.122804Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-29T15:26:34.122849Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29 ... hemeshard__notify.cpp:131: NotifyTxCompletion transaction is registered, txId: 281474976710758, at schemeshard: 72057594046678944 2025-05-29T15:26:36.051120Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:652: Send tablet strongly msg operationId: 281474976710758:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:281474976710758 msg type: 269090816 2025-05-29T15:26:36.051146Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1751: TOperation RegisterRelationByTabletId, TxId: 281474976710758, partId: 4294967295, tablet: 72057594046316545 2025-05-29T15:26:36.051361Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976710758 FAKE_COORDINATOR: Add transaction: 281474976710758 at step: 5000005 FAKE_COORDINATOR: advance: minStep5000005 State->FrontStep: 5000004 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710758 at step: 5000005 2025-05-29T15:26:36.051490Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:676: TTxOperationPlanStep Execute, stepId: 5000005, transactions count in step: 1, at schemeshard: 72057594046678944 2025-05-29T15:26:36.051515Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:680: TTxOperationPlanStep Execute, message: Transactions { TxId: 281474976710758 Coordinator: 72057594046316545 AckTo { RawX1: 135 RawX2: 17179871341 } } Step: 5000005 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-05-29T15:26:36.051524Z node 4 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_rmdir.cpp:129: TRmDir HandleReply TEvOperationPlan, opId: 281474976710758:0, step: 5000005, at schemeshard: 72057594046678944 2025-05-29T15:26:36.051553Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_rmdir.cpp:180: RmDir is done, opId: 281474976710758:0, at schemeshard: 72057594046678944 2025-05-29T15:26:36.051564Z node 4 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:906: Part operation is done id#281474976710758:0 progress is 1/1 2025-05-29T15:26:36.051569Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 281474976710758 ready parts: 1/1 2025-05-29T15:26:36.051575Z node 4 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:906: Part operation is done id#281474976710758:0 progress is 1/1 2025-05-29T15:26:36.051578Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 281474976710758 ready parts: 1/1 2025-05-29T15:26:36.051588Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2025-05-29T15:26:36.051599Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 1 2025-05-29T15:26:36.051605Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1606: TOperation IsReadyToNotify, TxId: 281474976710758, ready parts: 1/1, is published: false 2025-05-29T15:26:36.051612Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 281474976710758 ready parts: 1/1 2025-05-29T15:26:36.051618Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:973: Operation and all the parts is done, operation id: 281474976710758:0 2025-05-29T15:26:36.051626Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5174: RemoveTx for txid 281474976710758:0 2025-05-29T15:26:36.051636Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 2 2025-05-29T15:26:36.051642Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:982: Publication still in progress, tx: 281474976710758, publications: 2, subscribers: 1 2025-05-29T15:26:36.051646Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:989: Publication details: tx: 281474976710758, [OwnerId: 72057594046678944, LocalPathId: 1], 11 2025-05-29T15:26:36.051650Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:989: Publication details: tx: 281474976710758, [OwnerId: 72057594046678944, LocalPathId: 4], 18446744073709551615 2025-05-29T15:26:36.051771Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976710758 2025-05-29T15:26:36.052164Z node 4 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:26:36.052176Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 281474976710758, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-29T15:26:36.052212Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 281474976710758, path id: [OwnerId: 72057594046678944, LocalPathId: 4] 2025-05-29T15:26:36.052235Z node 4 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:26:36.052241Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [4:208:2209], at schemeshard: 72057594046678944, txId: 281474976710758, path id: 1 2025-05-29T15:26:36.052247Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [4:208:2209], at schemeshard: 72057594046678944, txId: 281474976710758, path id: 4 FAKE_COORDINATOR: Erasing txId 281474976710758 2025-05-29T15:26:36.052387Z node 4 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:5834: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 11 PathOwnerId: 72057594046678944, cookie: 281474976710758 2025-05-29T15:26:36.052395Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 11 PathOwnerId: 72057594046678944, cookie: 281474976710758 2025-05-29T15:26:36.052399Z node 4 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 281474976710758 2025-05-29T15:26:36.052402Z node 4 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 281474976710758, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 11 2025-05-29T15:26:36.052406Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 4 2025-05-29T15:26:36.052501Z node 4 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:5834: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 281474976710758 2025-05-29T15:26:36.052510Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 281474976710758 2025-05-29T15:26:36.052513Z node 4 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 281474976710758 2025-05-29T15:26:36.052516Z node 4 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 281474976710758, pathId: [OwnerId: 72057594046678944, LocalPathId: 4], version: 18446744073709551615 2025-05-29T15:26:36.052518Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 1 2025-05-29T15:26:36.052526Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 281474976710758, subscribers: 1 2025-05-29T15:26:36.052530Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:212: TTxAckPublishToSchemeBoard Notify send TEvNotifyTxCompletionResult, at schemeshard: 72057594046678944, to actorId: [4:125:2150] 2025-05-29T15:26:36.052562Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:123: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-05-29T15:26:36.052565Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:137: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 4], at schemeshard: 72057594046678944 2025-05-29T15:26:36.052574Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2025-05-29T15:26:36.053160Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976710758 2025-05-29T15:26:36.053266Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976710758 2025-05-29T15:26:36.053281Z node 4 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6746: Handle: TEvNotifyTxCompletionResult: txId# 281474976710758 2025-05-29T15:26:36.053292Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6748: Message: TxId: 281474976710758 2025-05-29T15:26:36.053300Z node 4 :EXPORT DEBUG: schemeshard_export__create.cpp:309: TExport::TTxProgress: DoExecute 2025-05-29T15:26:36.053305Z node 4 :EXPORT DEBUG: schemeshard_export__create.cpp:1232: TExport::TTxProgress: OnNotifyResult: txId# 281474976710758 2025-05-29T15:26:36.053310Z node 4 :EXPORT DEBUG: schemeshard_export__create.cpp:1263: TExport::TTxProgress: OnNotifyResult: txId# 281474976710758, id# 103, itemIdx# 4294967295 2025-05-29T15:26:36.053358Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:160: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 2025-05-29T15:26:36.053653Z node 4 :EXPORT DEBUG: schemeshard_export__create.cpp:329: TExport::TTxProgress: DoComplete TestWaitNotification wait txId: 103 2025-05-29T15:26:36.053703Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:210: tests -- TTxNotificationSubscriber for txId 103: send EvNotifyTxCompletion 2025-05-29T15:26:36.053712Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:256: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 103 2025-05-29T15:26:36.053801Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 103, at schemeshard: 72057594046678944 2025-05-29T15:26:36.053818Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:227: tests -- TTxNotificationSubscriber for txId 103: got EvNotifyTxCompletionResult 2025-05-29T15:26:36.053823Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:236: tests -- TTxNotificationSubscriber for txId 103: satisfy waiter [4:554:2512] TestWaitNotification: OK eventTxId 103 >> AsyncIndexChangeExchange::ShouldRemoveRecordsAfterCancelIndexBuild [FAIL] >> AsyncIndexChangeExchange::ShouldDeliverChangesOnSplitMerge >> TExportToS3Tests::TablePermissions >> Cdc::DropIndex [FAIL] >> Cdc::DisableStream >> DataShardVolatile::GracefulShardRestartNoEarlyReadSetAck [FAIL] >> TExportToS3Tests::CorruptedDyNumber [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/scheme_board/ut_replica/unittest >> TReplicaTest::DoubleDelete [GOOD] Test command err: 2025-05-29T15:26:36.214501Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:751: [1:6:2053] Handle NKikimrSchemeBoard.TEvHandshake { Owner: 1 Generation: 1 }: sender# [1:7:2054] 2025-05-29T15:26:36.214525Z node 1 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:769: [1:6:2053] Successful handshake: owner# 1, generation# 1 2025-05-29T15:26:36.421364Z node 2 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:751: [2:6:2053] Handle NKikimrSchemeBoard.TEvHandshake { Owner: 1 Generation: 1 }: sender# [2:7:2054] 2025-05-29T15:26:36.421386Z node 2 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:769: [2:6:2053] Successful handshake: owner# 1, generation# 1 2025-05-29T15:26:36.421440Z node 2 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:782: [2:6:2053] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 1 Generation: 1 }: sender# [2:7:2054], cookie# 0, event size# 72 2025-05-29T15:26:36.421447Z node 2 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:822: [2:6:2053] Update description: path# path, pathId# [OwnerId: 1, LocalPathId: 1], deletion# false 2025-05-29T15:26:36.422668Z node 2 :SCHEME_BOARD_REPLICA INFO: replica.cpp:550: [2:6:2053] Upsert description: path# path, pathId# [OwnerId: 1, LocalPathId: 1], pathDescription# {Status StatusSuccess, Path path, PathId [OwnerId: 1, LocalPathId: 1], PathVersion 1, SubdomainPathId , PathAbandonedTenantsSchemeShards size 0, DescribeSchemeResultSerialized size 30} 2025-05-29T15:26:36.422710Z node 2 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1061: [2:6:2053] Handle NKikimrSchemeBoard.TEvSubscribe { Path: path DomainOwnerId: 0 }: sender# [2:7:2054] 2025-05-29T15:26:36.422733Z node 2 :SCHEME_BOARD_REPLICA INFO: replica.cpp:646: [2:6:2053] Subscribe: subscriber# [2:7:2054], path# path, domainOwnerId# 0, capabilities# 2025-05-29T15:26:36.422770Z node 2 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1075: [2:6:2053] Handle NKikimrSchemeBoard.TEvUnsubscribe { Path: path }: sender# [2:7:2054] 2025-05-29T15:26:36.422784Z node 2 :SCHEME_BOARD_REPLICA INFO: replica.cpp:662: [2:6:2053] Unsubscribe: subscriber# [2:7:2054], path# path 2025-05-29T15:26:36.422788Z node 2 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1075: [2:6:2053] Handle NKikimrSchemeBoard.TEvUnsubscribe { Path: path }: sender# [2:7:2054] 2025-05-29T15:26:36.632639Z node 3 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:751: [3:6:2053] Handle NKikimrSchemeBoard.TEvHandshake { Owner: 1 Generation: 1 }: sender# [3:7:2054] 2025-05-29T15:26:36.632661Z node 3 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:769: [3:6:2053] Successful handshake: owner# 1, generation# 1 2025-05-29T15:26:36.632680Z node 3 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1061: [3:6:2053] Handle NKikimrSchemeBoard.TEvSubscribe { Path: path DomainOwnerId: 0 }: sender# [3:8:2055] 2025-05-29T15:26:36.632686Z node 3 :SCHEME_BOARD_REPLICA INFO: replica.cpp:520: [3:6:2053] Upsert description: path# path 2025-05-29T15:26:36.632703Z node 3 :SCHEME_BOARD_REPLICA INFO: replica.cpp:646: [3:6:2053] Subscribe: subscriber# [3:8:2055], path# path, domainOwnerId# 0, capabilities# 2025-05-29T15:26:36.632733Z node 3 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:782: [3:6:2053] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 1 Generation: 1 }: sender# [3:7:2054], cookie# 0, event size# 72 2025-05-29T15:26:36.632741Z node 3 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:822: [3:6:2053] Update description: path# path, pathId# [OwnerId: 1, LocalPathId: 1], deletion# false 2025-05-29T15:26:36.632752Z node 3 :SCHEME_BOARD_REPLICA INFO: replica.cpp:550: [3:6:2053] Upsert description: path# path, pathId# [OwnerId: 1, LocalPathId: 1], pathDescription# {Status StatusSuccess, Path path, PathId [OwnerId: 1, LocalPathId: 1], PathVersion 1, SubdomainPathId , PathAbandonedTenantsSchemeShards size 0, DescribeSchemeResultSerialized size 30} 2025-05-29T15:26:36.632779Z node 3 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:782: [3:6:2053] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 1 Generation: 1 }: sender# [3:7:2054], cookie# 0, event size# 40 2025-05-29T15:26:36.632784Z node 3 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:822: [3:6:2053] Update description: path# path, pathId# [OwnerId: 1, LocalPathId: 1], deletion# true 2025-05-29T15:26:36.632789Z node 3 :SCHEME_BOARD_REPLICA INFO: replica.cpp:575: [3:6:2053] Delete description: path# path, pathId# [OwnerId: 1, LocalPathId: 1] 2025-05-29T15:26:36.632801Z node 3 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1061: [3:6:2053] Handle NKikimrSchemeBoard.TEvSubscribe { Path: path DomainOwnerId: 0 }: sender# [3:9:2056] 2025-05-29T15:26:36.632806Z node 3 :SCHEME_BOARD_REPLICA INFO: replica.cpp:646: [3:6:2053] Subscribe: subscriber# [3:9:2056], path# path, domainOwnerId# 0, capabilities# 2025-05-29T15:26:36.632815Z node 3 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:782: [3:6:2053] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 1 Generation: 1 }: sender# [3:7:2054], cookie# 0, event size# 40 2025-05-29T15:26:36.632820Z node 3 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:822: [3:6:2053] Update description: path# path, pathId# [OwnerId: 1, LocalPathId: 1], deletion# true >> TReplicaTest::Subscribe >> TSchemeShardTestExtSubdomainReboots::AlterForceDrop-AlterDatabaseCreateHiveFirst-true [GOOD] >> TExportToS3Tests::DisableAutoDropping >> TExportToS3Tests::TablePermissions [GOOD] >> TReplicaTest::Subscribe [GOOD] >> TReplicaTest::SubscribeUnknownPath >> THiveTest::TestHiveBalancerWithImmovableTablets [GOOD] >> THiveTest::TestHiveBalancerHighUsage >> TExportToS3Tests::DisableAutoDropping [GOOD] >> Cdc::SplitTopicPartition_TopicAutoPartitioning [FAIL] >> Cdc::ShouldDeliverChangesOnSplitMerge >> TReplicaTest::SubscribeUnknownPath [GOOD] >> TReplicaTest::SyncVersion >> TReplicaTest::Unsubscribe >> TReplicaTest::SyncVersion [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_export/unittest >> TExportToS3Tests::Changefeeds [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:116:2058] recipient: [1:111:2141] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:116:2058] recipient: [1:111:2141] Leader for TabletID 72057594046678944 is [1:128:2152] sender: [1:132:2058] recipient: [1:111:2141] 2025-05-29T15:26:33.334511Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7488: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-05-29T15:26:33.334537Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7516: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:26:33.334543Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7402: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-05-29T15:26:33.334549Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7418: OperationsProcessing config: using default configuration 2025-05-29T15:26:33.334555Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-05-29T15:26:33.334559Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-05-29T15:26:33.334568Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7548: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:26:33.334581Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:39: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-05-29T15:26:33.334697Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7619: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-05-29T15:26:33.334815Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-05-29T15:26:33.345930Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7309: Cannot subscribe to console configs 2025-05-29T15:26:33.345953Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:26:33.349034Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-05-29T15:26:33.349183Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-05-29T15:26:33.349230Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-05-29T15:26:33.352935Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-05-29T15:26:33.353135Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:445: Clear TempDirsState with owners number: 0 2025-05-29T15:26:33.353260Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1348: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-05-29T15:26:33.353307Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:32: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-05-29T15:26:33.353947Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:157: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:26:33.354004Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:84: [RootDataErasureManager] Stop 2025-05-29T15:26:33.354293Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:26:33.354304Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:26:33.354324Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-05-29T15:26:33.354332Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-29T15:26:33.354339Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:202: TTxServerlessStorageBilling.Complete 2025-05-29T15:26:33.354374Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6671: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-05-29T15:26:33.355799Z node 1 :HIVE INFO: tablet_helpers.cpp:1130: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:128:2152] sender: [1:242:2058] recipient: [1:15:2062] 2025-05-29T15:26:33.377378Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:372: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-05-29T15:26:33.377474Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:26:33.377541Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-05-29T15:26:33.377590Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:130: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-05-29T15:26:33.377602Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:26:33.378394Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:451: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-05-29T15:26:33.378424Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-05-29T15:26:33.378483Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:26:33.378494Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:313: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-05-29T15:26:33.378499Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:367: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-05-29T15:26:33.378505Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 2 -> 3 2025-05-29T15:26:33.378983Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:26:33.378996Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-05-29T15:26:33.379002Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 3 -> 128 2025-05-29T15:26:33.379367Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:26:33.379379Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:26:33.379385Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:26:33.379392Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1650: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-05-29T15:26:33.380126Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1719: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-05-29T15:26:33.380546Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:652: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-05-29T15:26:33.380585Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1751: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-05-29T15:26:33.380779Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:676: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-05-29T15:26:33.380804Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:680: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 124 RawX2: 4294969445 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-05-29T15:26:33.380814Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:26:33.380884Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 128 -> 240 2025-05-29T15:26:33.380892Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:26:33.380924Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-05-29T15:26:33.380936Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:402: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-05-29T15:26:33.381337Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:26:33.381347Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-29T15:26:33.381386Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29 ... 15:26:36.757435Z node 5 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72057594046678944, txId: 281474976710761 2025-05-29T15:26:36.757441Z node 5 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 281474976710761, pathId: [OwnerId: 72057594046678944, LocalPathId: 9], version: 7 2025-05-29T15:26:36.757445Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 9] was 3 2025-05-29T15:26:36.757459Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1606: TOperation IsReadyToNotify, TxId: 281474976710761, ready parts: 0/1, is published: true 2025-05-29T15:26:36.758181Z node 5 :EXPORT DEBUG: schemeshard_export__create.cpp:329: TExport::TTxProgress: DoComplete 2025-05-29T15:26:36.758226Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__notify.cpp:30: NotifyTxCompletion operation in-flight, txId: 281474976710761, at schemeshard: 72057594046678944 2025-05-29T15:26:36.758233Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1606: TOperation IsReadyToNotify, TxId: 281474976710761, ready parts: 0/1, is published: true 2025-05-29T15:26:36.758239Z node 5 :FLAT_TX_SCHEMESHARD INFO: schemeshard__notify.cpp:131: NotifyTxCompletion transaction is registered, txId: 281474976710761, at schemeshard: 72057594046678944 2025-05-29T15:26:36.758298Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:652: Send tablet strongly msg operationId: 281474976710761:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:281474976710761 msg type: 269090816 2025-05-29T15:26:36.758325Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1751: TOperation RegisterRelationByTabletId, TxId: 281474976710761, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 281474976710761 at step: 5000010 FAKE_COORDINATOR: advance: minStep5000010 State->FrontStep: 5000009 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710761 at step: 5000010 2025-05-29T15:26:36.758546Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:676: TTxOperationPlanStep Execute, stepId: 5000010, transactions count in step: 1, at schemeshard: 72057594046678944 2025-05-29T15:26:36.758569Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:680: TTxOperationPlanStep Execute, message: Transactions { TxId: 281474976710761 Coordinator: 72057594046316545 AckTo { RawX1: 135 RawX2: 21474838637 } } Step: 5000010 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-05-29T15:26:36.758579Z node 5 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_rmdir.cpp:129: TRmDir HandleReply TEvOperationPlan, opId: 281474976710761:0, step: 5000010, at schemeshard: 72057594046678944 2025-05-29T15:26:36.758608Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_rmdir.cpp:180: RmDir is done, opId: 281474976710761:0, at schemeshard: 72057594046678944 2025-05-29T15:26:36.758617Z node 5 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:906: Part operation is done id#281474976710761:0 progress is 1/1 2025-05-29T15:26:36.758622Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 281474976710761 ready parts: 1/1 2025-05-29T15:26:36.758628Z node 5 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:906: Part operation is done id#281474976710761:0 progress is 1/1 2025-05-29T15:26:36.758631Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 281474976710761 ready parts: 1/1 2025-05-29T15:26:36.758640Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-05-29T15:26:36.758650Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 9] was 2 2025-05-29T15:26:36.758656Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1606: TOperation IsReadyToNotify, TxId: 281474976710761, ready parts: 1/1, is published: false 2025-05-29T15:26:36.758662Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 281474976710761 ready parts: 1/1 2025-05-29T15:26:36.758667Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:973: Operation and all the parts is done, operation id: 281474976710761:0 2025-05-29T15:26:36.758671Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5174: RemoveTx for txid 281474976710761:0 2025-05-29T15:26:36.758680Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 9] was 3 2025-05-29T15:26:36.758686Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:982: Publication still in progress, tx: 281474976710761, publications: 2, subscribers: 1 2025-05-29T15:26:36.758691Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:989: Publication details: tx: 281474976710761, [OwnerId: 72057594046678944, LocalPathId: 1], 12 2025-05-29T15:26:36.758695Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:989: Publication details: tx: 281474976710761, [OwnerId: 72057594046678944, LocalPathId: 9], 18446744073709551615 2025-05-29T15:26:36.758865Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976710761 2025-05-29T15:26:36.758886Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976710761 2025-05-29T15:26:36.759262Z node 5 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:26:36.759275Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 281474976710761, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-29T15:26:36.759312Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 281474976710761, path id: [OwnerId: 72057594046678944, LocalPathId: 9] 2025-05-29T15:26:36.759342Z node 5 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:26:36.759348Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [5:208:2209], at schemeshard: 72057594046678944, txId: 281474976710761, path id: 1 2025-05-29T15:26:36.759353Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [5:208:2209], at schemeshard: 72057594046678944, txId: 281474976710761, path id: 9 FAKE_COORDINATOR: Erasing txId 281474976710761 2025-05-29T15:26:36.759532Z node 5 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:5834: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 12 PathOwnerId: 72057594046678944, cookie: 281474976710761 2025-05-29T15:26:36.759548Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 12 PathOwnerId: 72057594046678944, cookie: 281474976710761 2025-05-29T15:26:36.759553Z node 5 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 281474976710761 2025-05-29T15:26:36.759558Z node 5 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 281474976710761, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 12 2025-05-29T15:26:36.759563Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2025-05-29T15:26:36.759657Z node 5 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:5834: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 9 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 281474976710761 2025-05-29T15:26:36.759669Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 9 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 281474976710761 2025-05-29T15:26:36.759673Z node 5 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 281474976710761 2025-05-29T15:26:36.759678Z node 5 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 281474976710761, pathId: [OwnerId: 72057594046678944, LocalPathId: 9], version: 18446744073709551615 2025-05-29T15:26:36.759682Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 9] was 2 2025-05-29T15:26:36.759692Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 281474976710761, subscribers: 1 2025-05-29T15:26:36.759698Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:212: TTxAckPublishToSchemeBoard Notify send TEvNotifyTxCompletionResult, at schemeshard: 72057594046678944, to actorId: [5:126:2151] 2025-05-29T15:26:36.760244Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976710761 2025-05-29T15:26:36.760482Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976710761 2025-05-29T15:26:36.760506Z node 5 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6746: Handle: TEvNotifyTxCompletionResult: txId# 281474976710761 2025-05-29T15:26:36.760517Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6748: Message: TxId: 281474976710761 2025-05-29T15:26:36.760525Z node 5 :EXPORT DEBUG: schemeshard_export__create.cpp:309: TExport::TTxProgress: DoExecute 2025-05-29T15:26:36.760530Z node 5 :EXPORT DEBUG: schemeshard_export__create.cpp:1232: TExport::TTxProgress: OnNotifyResult: txId# 281474976710761 2025-05-29T15:26:36.760536Z node 5 :EXPORT DEBUG: schemeshard_export__create.cpp:1263: TExport::TTxProgress: OnNotifyResult: txId# 281474976710761, id# 105, itemIdx# 4294967295 2025-05-29T15:26:36.760890Z node 5 :EXPORT DEBUG: schemeshard_export__create.cpp:329: TExport::TTxProgress: DoComplete 2025-05-29T15:26:36.760912Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:227: tests -- TTxNotificationSubscriber for txId 105: got EvNotifyTxCompletionResult 2025-05-29T15:26:36.760920Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:236: tests -- TTxNotificationSubscriber for txId 105: satisfy waiter [5:1387:3175] TestWaitNotification: OK eventTxId 105 >> TReplicaTest::Unsubscribe [GOOD] >> TReplicaTest::UnsubscribeUnknownPath >> TSchemeShardTestExtSubdomainReboots::SchemeLimits-AlterDatabaseCreateHiveFirst-false [GOOD] |67.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_base_reboots/unittest ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/scheme_board/ut_replica/unittest >> TReplicaTest::SyncVersion [GOOD] Test command err: 2025-05-29T15:26:37.205507Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:751: [1:6:2053] Handle NKikimrSchemeBoard.TEvHandshake { Owner: 1 Generation: 1 }: sender# [1:7:2054] 2025-05-29T15:26:37.205532Z node 1 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:769: [1:6:2053] Successful handshake: owner# 1, generation# 1 2025-05-29T15:26:37.205591Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:782: [1:6:2053] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 1 Generation: 1 }: sender# [1:7:2054], cookie# 0, event size# 72 2025-05-29T15:26:37.205598Z node 1 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:822: [1:6:2053] Update description: path# path, pathId# [OwnerId: 1, LocalPathId: 1], deletion# false 2025-05-29T15:26:37.206671Z node 1 :SCHEME_BOARD_REPLICA INFO: replica.cpp:550: [1:6:2053] Upsert description: path# path, pathId# [OwnerId: 1, LocalPathId: 1], pathDescription# {Status StatusSuccess, Path path, PathId [OwnerId: 1, LocalPathId: 1], PathVersion 1, SubdomainPathId , PathAbandonedTenantsSchemeShards size 0, DescribeSchemeResultSerialized size 30} 2025-05-29T15:26:37.206713Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1061: [1:6:2053] Handle NKikimrSchemeBoard.TEvSubscribe { Path: path DomainOwnerId: 0 }: sender# [1:7:2054] 2025-05-29T15:26:37.206734Z node 1 :SCHEME_BOARD_REPLICA INFO: replica.cpp:646: [1:6:2053] Subscribe: subscriber# [1:7:2054], path# path, domainOwnerId# 0, capabilities# 2025-05-29T15:26:37.206775Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:782: [1:6:2053] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 1 Generation: 1 }: sender# [1:7:2054], cookie# 0, event size# 40 2025-05-29T15:26:37.206780Z node 1 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:822: [1:6:2053] Update description: path# path, pathId# [OwnerId: 1, LocalPathId: 1], deletion# true 2025-05-29T15:26:37.206784Z node 1 :SCHEME_BOARD_REPLICA INFO: replica.cpp:575: [1:6:2053] Delete description: path# path, pathId# [OwnerId: 1, LocalPathId: 1] 2025-05-29T15:26:37.413514Z node 2 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1061: [2:6:2053] Handle NKikimrSchemeBoard.TEvSubscribe { Path: path DomainOwnerId: 0 }: sender# [2:7:2054] 2025-05-29T15:26:37.413535Z node 2 :SCHEME_BOARD_REPLICA INFO: replica.cpp:520: [2:6:2053] Upsert description: path# path 2025-05-29T15:26:37.413551Z node 2 :SCHEME_BOARD_REPLICA INFO: replica.cpp:646: [2:6:2053] Subscribe: subscriber# [2:7:2054], path# path, domainOwnerId# 0, capabilities# 2025-05-29T15:26:37.623982Z node 3 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:751: [3:6:2053] Handle NKikimrSchemeBoard.TEvHandshake { Owner: 1 Generation: 1 }: sender# [3:7:2054] 2025-05-29T15:26:37.624003Z node 3 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:769: [3:6:2053] Successful handshake: owner# 1, generation# 1 2025-05-29T15:26:37.624035Z node 3 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:782: [3:6:2053] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 1 Generation: 1 }: sender# [3:7:2054], cookie# 0, event size# 76 2025-05-29T15:26:37.624043Z node 3 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:822: [3:6:2053] Update description: path# path, pathId# [OwnerId: 1, LocalPathId: 1], deletion# false 2025-05-29T15:26:37.624055Z node 3 :SCHEME_BOARD_REPLICA INFO: replica.cpp:550: [3:6:2053] Upsert description: path# path, pathId# [OwnerId: 1, LocalPathId: 1], pathDescription# {Status StatusSuccess, Path path, PathId [OwnerId: 1, LocalPathId: 1], PathVersion 100500, SubdomainPathId , PathAbandonedTenantsSchemeShards size 0, DescribeSchemeResultSerialized size 32} 2025-05-29T15:26:37.624071Z node 3 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1061: [3:6:2053] Handle NKikimrSchemeBoard.TEvSubscribe { Path: path DomainOwnerId: 0 }: sender# [3:7:2054] 2025-05-29T15:26:37.624083Z node 3 :SCHEME_BOARD_REPLICA INFO: replica.cpp:646: [3:6:2053] Subscribe: subscriber# [3:7:2054], path# path, domainOwnerId# 0, capabilities# 2025-05-29T15:26:37.624097Z node 3 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1128: [3:6:2053] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: path }: sender# [3:7:2054], cookie# 1 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_extsubdomain_reboots/unittest >> TSchemeShardTestExtSubdomainReboots::AlterForceDrop-AlterDatabaseCreateHiveFirst-true [GOOD] Test command err: ==== RunWithTabletReboots =========== RUN: Trace =========== Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2140] Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:116:2058] recipient: [1:110:2141] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:116:2058] recipient: [1:110:2141] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:117:2058] recipient: [1:111:2142] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:117:2058] recipient: [1:111:2142] Leader for TabletID 72057594046678944 is [1:126:2151] sender: [1:127:2058] recipient: [1:109:2140] Leader for TabletID 72057594046447617 is [1:130:2154] sender: [1:132:2058] recipient: [1:110:2141] Leader for TabletID 72057594046316545 is [1:133:2155] sender: [1:135:2058] recipient: [1:111:2142] 2025-05-29T15:25:49.790699Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7488: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-05-29T15:25:49.790726Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7516: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:25:49.790731Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7402: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2025-05-29T15:25:49.790753Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7418: OperationsProcessing config: using default configuration 2025-05-29T15:25:49.790764Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-05-29T15:25:49.790768Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-05-29T15:25:49.790776Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7548: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:25:49.790788Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:39: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-05-29T15:25:49.790881Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7619: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-05-29T15:25:49.790954Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-05-29T15:25:49.831021Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7651: Got new config: QueryServiceConfig { AllExternalDataSourcesAreAvailable: true } 2025-05-29T15:25:49.831044Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:25:49.831139Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7619: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# Leader for TabletID 72057594046447617 is [1:130:2154] sender: [1:175:2058] recipient: [1:15:2062] 2025-05-29T15:25:49.834833Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-05-29T15:25:49.834866Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-05-29T15:25:49.834893Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-05-29T15:25:49.837897Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-05-29T15:25:49.837966Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:445: Clear TempDirsState with owners number: 0 2025-05-29T15:25:49.838070Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1348: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-05-29T15:25:49.838206Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:32: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-05-29T15:25:49.838754Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:157: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:25:49.838799Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:84: [RootDataErasureManager] Stop 2025-05-29T15:25:49.839007Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:25:49.839016Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:25:49.839046Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-05-29T15:25:49.839052Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-29T15:25:49.839057Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:202: TTxServerlessStorageBilling.Complete 2025-05-29T15:25:49.839073Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6671: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:214:2058] recipient: [1:212:2212] IGNORE Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:214:2058] recipient: [1:212:2212] Leader for TabletID 72057594037968897 is [1:218:2216] sender: [1:219:2058] recipient: [1:212:2212] 2025-05-29T15:25:49.841967Z node 1 :HIVE INFO: tablet_helpers.cpp:1130: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:126:2151] sender: [1:239:2058] recipient: [1:15:2062] 2025-05-29T15:25:49.879454Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:372: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-05-29T15:25:49.879532Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:25:49.879588Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-05-29T15:25:49.879636Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:130: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-05-29T15:25:49.879648Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:25:49.880356Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:451: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-05-29T15:25:49.880383Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-05-29T15:25:49.880427Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:25:49.880437Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:313: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-05-29T15:25:49.880442Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:367: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-05-29T15:25:49.880447Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 2 -> 3 2025-05-29T15:25:49.880823Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:25:49.880835Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-05-29T15:25:49.880840Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 3 -> 128 2025-05-29T15:25:49.881228Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:25:49.881238Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:25:49.881244Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:25:49.881255Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1650: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-05-29T15:25:49.881865Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1719: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-05-29T15:25:49.882233Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:652: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-05-29T15:25:49.882270Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1751: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 Leader for TabletID 72057594046316545 is [1:133:2155] sender: [1:254:2058] recipient: [1:15:2062] FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-05-29T15:25:49.882453Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:676: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-05-29T15:25:49.882476Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:680: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 133 RawX2: 4294969451 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-05-29T15:25:49.882482Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:25:49.882539Z node 1 :FLAT_TX_SCHEMESHARD INFO: sch ... ablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 3 ShardOwnerId: 72057594046678944 ShardLocalIdx: 3, at schemeshard: 72057594046678944 2025-05-29T15:26:37.073873Z node 134 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 5 2025-05-29T15:26:37.073895Z node 134 :HIVE INFO: tablet_helpers.cpp:1356: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 2 TxId_Deprecated: 2 2025-05-29T15:26:37.074122Z node 134 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5938: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 6 ShardOwnerId: 72057594046678944 ShardLocalIdx: 6, at schemeshard: 72057594046678944 2025-05-29T15:26:37.074148Z node 134 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 4 2025-05-29T15:26:37.074186Z node 134 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5938: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 2 ShardOwnerId: 72057594046678944 ShardLocalIdx: 2, at schemeshard: 72057594046678944 2025-05-29T15:26:37.074203Z node 134 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 3 2025-05-29T15:26:37.074221Z node 134 :HIVE INFO: tablet_helpers.cpp:1356: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 4 TxId_Deprecated: 4 2025-05-29T15:26:37.074262Z node 134 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5938: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 4 ShardOwnerId: 72057594046678944 ShardLocalIdx: 4, at schemeshard: 72057594046678944 2025-05-29T15:26:37.074281Z node 134 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 2 2025-05-29T15:26:37.074311Z node 134 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:205: TTxCleanDroppedSubDomains Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-05-29T15:26:37.074317Z node 134 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:224: TTxCleanDroppedPaths: PersistRemoveSubDomain for PathId# [OwnerId: 72057594046678944, LocalPathId: 3], at schemeshard: 72057594046678944 2025-05-29T15:26:37.074342Z node 134 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 1 2025-05-29T15:26:37.074397Z node 134 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:123: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-05-29T15:26:37.074404Z node 134 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:137: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 3], at schemeshard: 72057594046678944 2025-05-29T15:26:37.074414Z node 134 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-05-29T15:26:37.074909Z node 134 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:174: Deleted shardIdx 72057594046678944:5 2025-05-29T15:26:37.074928Z node 134 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:174: Deleted shardIdx 72057594046678944:1 2025-05-29T15:26:37.074940Z node 134 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:174: Deleted shardIdx 72057594046678944:3 2025-05-29T15:26:37.074948Z node 134 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:174: Deleted shardIdx 72057594046678944:6 2025-05-29T15:26:37.075322Z node 134 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:174: Deleted shardIdx 72057594046678944:2 2025-05-29T15:26:37.075336Z node 134 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:174: Deleted shardIdx 72057594046678944:4 2025-05-29T15:26:37.075375Z node 134 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:245: TTxCleanDroppedSubDomains Complete, done PersistRemoveSubDomain for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 2025-05-29T15:26:37.075385Z node 134 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:160: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestWaitNotification wait txId: 1003 2025-05-29T15:26:37.075435Z node 134 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:210: tests -- TTxNotificationSubscriber for txId 1003: send EvNotifyTxCompletion 2025-05-29T15:26:37.075441Z node 134 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:256: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 1003 TestWaitNotification wait txId: 1004 2025-05-29T15:26:37.075454Z node 134 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:210: tests -- TTxNotificationSubscriber for txId 1004: send EvNotifyTxCompletion 2025-05-29T15:26:37.075458Z node 134 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:256: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 1004 2025-05-29T15:26:37.075511Z node 134 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 1003, at schemeshard: 72057594046678944 2025-05-29T15:26:37.075530Z node 134 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:227: tests -- TTxNotificationSubscriber for txId 1003: got EvNotifyTxCompletionResult 2025-05-29T15:26:37.075535Z node 134 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:236: tests -- TTxNotificationSubscriber for txId 1003: satisfy waiter [134:380:2369] 2025-05-29T15:26:37.075559Z node 134 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 1004, at schemeshard: 72057594046678944 2025-05-29T15:26:37.075570Z node 134 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:227: tests -- TTxNotificationSubscriber for txId 1004: got EvNotifyTxCompletionResult 2025-05-29T15:26:37.075574Z node 134 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:236: tests -- TTxNotificationSubscriber for txId 1004: satisfy waiter [134:380:2369] TestWaitNotification: OK eventTxId 1003 TestWaitNotification: OK eventTxId 1004 wait until 72075186233409546 is deleted wait until 72075186233409547 is deleted wait until 72075186233409548 is deleted wait until 72075186233409549 is deleted wait until 72075186233409550 is deleted wait until 72075186233409551 is deleted 2025-05-29T15:26:37.075635Z node 134 :HIVE INFO: tablet_helpers.cpp:1476: [72057594037968897] TEvSubscribeToTabletDeletion, 72075186233409546 2025-05-29T15:26:37.075649Z node 134 :HIVE INFO: tablet_helpers.cpp:1476: [72057594037968897] TEvSubscribeToTabletDeletion, 72075186233409547 2025-05-29T15:26:37.075658Z node 134 :HIVE INFO: tablet_helpers.cpp:1476: [72057594037968897] TEvSubscribeToTabletDeletion, 72075186233409548 2025-05-29T15:26:37.075664Z node 134 :HIVE INFO: tablet_helpers.cpp:1476: [72057594037968897] TEvSubscribeToTabletDeletion, 72075186233409549 2025-05-29T15:26:37.075672Z node 134 :HIVE INFO: tablet_helpers.cpp:1476: [72057594037968897] TEvSubscribeToTabletDeletion, 72075186233409550 2025-05-29T15:26:37.075680Z node 134 :HIVE INFO: tablet_helpers.cpp:1476: [72057594037968897] TEvSubscribeToTabletDeletion, 72075186233409551 Deleted tabletId 72075186233409546 Deleted tabletId 72075186233409547 Deleted tabletId 72075186233409548 Deleted tabletId 72075186233409549 Deleted tabletId 72075186233409550 Deleted tabletId 72075186233409551 2025-05-29T15:26:37.075754Z node 134 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-05-29T15:26:37.075778Z node 134 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/USER_0" took 33us result status StatusPathDoesNotExist 2025-05-29T15:26:37.075809Z node 134 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/USER_0\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1]), source_location: ydb/core/tx/schemeshard/schemeshard_path_describer.cpp:1157" Path: "/MyRoot/USER_0" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: true } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 2025-05-29T15:26:37.075846Z node 134 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-05-29T15:26:37.075861Z node 134 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot" took 16us result status StatusSuccess 2025-05-29T15:26:37.075928Z node 134 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot" PathDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 9 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 9 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 7 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: "DirA" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1000 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 1 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/MyRoot" } } } PathId: 1 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 Waiting until shard idx 72057594046678944:1 is deleted Waiting until shard idx 72057594046678944:2 is deleted Waiting until shard idx 72057594046678944:3 is deleted Waiting until shard idx 72057594046678944:4 is deleted Waiting until shard idx 72057594046678944:5 is deleted Waiting until shard idx 72057594046678944:6 is deleted Deleted shard idx 72057594046678944:1 Deleted shard idx 72057594046678944:2 Deleted shard idx 72057594046678944:3 Deleted shard idx 72057594046678944:4 Deleted shard idx 72057594046678944:5 Deleted shard idx 72057594046678944:6 >> TReplicaTest::UnsubscribeUnknownPath [GOOD] >> THiveTest::TestFollowersCrossDC_KillingHiveAndFollower [GOOD] >> THiveTest::TestFollowerCompatability3 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_export/unittest >> TExportToS3Tests::TablePermissions [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:116:2058] recipient: [1:111:2141] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:116:2058] recipient: [1:111:2141] Leader for TabletID 72057594046678944 is [1:128:2152] sender: [1:132:2058] recipient: [1:111:2141] 2025-05-29T15:26:35.467316Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7488: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-05-29T15:26:35.467338Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7516: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:26:35.467344Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7402: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-05-29T15:26:35.467349Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7418: OperationsProcessing config: using default configuration 2025-05-29T15:26:35.467354Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-05-29T15:26:35.467358Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-05-29T15:26:35.467366Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7548: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:26:35.467379Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:39: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-05-29T15:26:35.467480Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7619: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-05-29T15:26:35.467571Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-05-29T15:26:35.481776Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7309: Cannot subscribe to console configs 2025-05-29T15:26:35.481796Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:26:35.487782Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-05-29T15:26:35.487922Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-05-29T15:26:35.487961Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-05-29T15:26:35.490093Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-05-29T15:26:35.490292Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:445: Clear TempDirsState with owners number: 0 2025-05-29T15:26:35.490414Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1348: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-05-29T15:26:35.490460Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:32: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-05-29T15:26:35.491062Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:157: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:26:35.491112Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:84: [RootDataErasureManager] Stop 2025-05-29T15:26:35.491368Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:26:35.491382Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:26:35.491403Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-05-29T15:26:35.491412Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-29T15:26:35.491418Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:202: TTxServerlessStorageBilling.Complete 2025-05-29T15:26:35.491447Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6671: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-05-29T15:26:35.492960Z node 1 :HIVE INFO: tablet_helpers.cpp:1130: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:128:2152] sender: [1:242:2058] recipient: [1:15:2062] 2025-05-29T15:26:35.514534Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:372: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-05-29T15:26:35.514619Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:26:35.514673Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-05-29T15:26:35.514718Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:130: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-05-29T15:26:35.514730Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:26:35.515397Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:451: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-05-29T15:26:35.515425Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-05-29T15:26:35.515477Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:26:35.515487Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:313: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-05-29T15:26:35.515493Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:367: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-05-29T15:26:35.515498Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 2 -> 3 2025-05-29T15:26:35.515938Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:26:35.515949Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-05-29T15:26:35.515955Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 3 -> 128 2025-05-29T15:26:35.516343Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:26:35.516358Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:26:35.516364Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:26:35.516371Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1650: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-05-29T15:26:35.517067Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1719: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-05-29T15:26:35.517465Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:652: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-05-29T15:26:35.517500Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1751: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-05-29T15:26:35.517665Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:676: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-05-29T15:26:35.517691Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:680: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 124 RawX2: 4294969445 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-05-29T15:26:35.517697Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:26:35.517774Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 128 -> 240 2025-05-29T15:26:35.517781Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:26:35.517809Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-05-29T15:26:35.517821Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:402: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-05-29T15:26:35.518292Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:26:35.518301Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-29T15:26:35.518343Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29 ... 0005 FAKE_COORDINATOR: advance: minStep5000005 State->FrontStep: 5000004 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710759 at step: 5000005 FAKE_COORDINATOR: Send Plan to tablet 72075186233409547 for txId: 281474976710759 at step: 5000005 2025-05-29T15:26:37.315254Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:676: TTxOperationPlanStep Execute, stepId: 5000005, transactions count in step: 1, at schemeshard: 72057594046678944 2025-05-29T15:26:37.315285Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:680: TTxOperationPlanStep Execute, message: Transactions { TxId: 281474976710759 Coordinator: 72057594046316545 AckTo { RawX1: 133 RawX2: 17179871339 } } Step: 5000005 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-05-29T15:26:37.315294Z node 4 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_backup_restore_common.h:412: TBackup TPropose, opId: 281474976710759:0 HandleReply TEvOperationPlan, stepId: 5000005, at schemeshard: 72057594046678944 2025-05-29T15:26:37.315334Z node 4 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 281474976710759:0 128 -> 129 2025-05-29T15:26:37.315370Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 3 REQUEST: PUT /metadata.json HTTP/1.1 HEADERS: Host: localhost:30240 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: 1CC4B948-510C-4026-8220-CCB0BB207E8D amz-sdk-request: attempt=1 content-length: 73 content-md5: q/ySd5GvS6I/qOVxS/4Thg== content-type: binary/octet-stream user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-139-generic x86_64 Clang/18.1.8 S3_MOCK::HttpServeWrite: /metadata.json / / 73 FAKE_COORDINATOR: advance: minStep5000005 State->FrontStep: 5000005 2025-05-29T15:26:37.324659Z node 4 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:26:37.324683Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 281474976710759, path id: [OwnerId: 72057594046678944, LocalPathId: 4] 2025-05-29T15:26:37.324776Z node 4 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:26:37.324785Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [4:208:2209], at schemeshard: 72057594046678944, txId: 281474976710759, path id: 4 2025-05-29T15:26:37.324959Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 281474976710759:0, at schemeshard: 72057594046678944 2025-05-29T15:26:37.324974Z node 4 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_backup_restore_common.h:258: TBackup TProposedWaitParts, opId: 281474976710759:0 ProgressState, at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 281474976710759 2025-05-29T15:26:37.325157Z node 4 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:5834: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 3 PathOwnerId: 72057594046678944, cookie: 281474976710759 2025-05-29T15:26:37.325174Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 3 PathOwnerId: 72057594046678944, cookie: 281474976710759 2025-05-29T15:26:37.325179Z node 4 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72057594046678944, txId: 281474976710759 2025-05-29T15:26:37.325185Z node 4 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 281474976710759, pathId: [OwnerId: 72057594046678944, LocalPathId: 4], version: 3 2025-05-29T15:26:37.325196Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 4 2025-05-29T15:26:37.325218Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1606: TOperation IsReadyToNotify, TxId: 281474976710759, ready parts: 0/1, is published: true REQUEST: PUT /permissions.pb HTTP/1.1 HEADERS: Host: localhost:30240 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: 94DA2FF8-9E89-4D89-8FDF-FCB2DED70EAF amz-sdk-request: attempt=1 content-length: 137 content-md5: WeIr3D5bqIjvqMGEjx2JrA== content-type: binary/octet-stream user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-139-generic x86_64 Clang/18.1.8 S3_MOCK::HttpServeWrite: /permissions.pb / / 137 REQUEST: PUT /scheme.pb HTTP/1.1 HEADERS: Host: localhost:30240 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: B83B6F23-B3A8-46FE-81B9-41344A82FE73 amz-sdk-request: attempt=1 content-length: 355 content-md5: 4DhJNWgTpoG3PVvZ0uCHUA== content-type: binary/octet-stream user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-139-generic x86_64 Clang/18.1.8 S3_MOCK::HttpServeWrite: /scheme.pb / / 355 2025-05-29T15:26:37.330942Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976710759 REQUEST: PUT /data_00.csv HTTP/1.1 HEADERS: Host: localhost:30240 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: 64EC421E-F81E-49EE-8973-E9F76F1E5DB2 amz-sdk-request: attempt=1 content-length: 0 content-md5: 1B2M2Y8AsgTpgAmY7PhCfg== content-type: binary/octet-stream user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-139-generic x86_64 Clang/18.1.8 S3_MOCK::HttpServeWrite: /data_00.csv / / 0 2025-05-29T15:26:37.344719Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5512: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 459 RawX2: 17179871611 } Origin: 72075186233409547 State: 2 TxId: 281474976710759 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 0 RowsProcessed: 0 } 2025-05-29T15:26:37.344756Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1764: TOperation FindRelatedPartByTabletId, TxId: 281474976710759, tablet: 72075186233409547, partId: 0 2025-05-29T15:26:37.344787Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:619: TTxOperationReply execute, operationId: 281474976710759:0, at schemeshard: 72057594046678944, message: Source { RawX1: 459 RawX2: 17179871611 } Origin: 72075186233409547 State: 2 TxId: 281474976710759 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 0 RowsProcessed: 0 } 2025-05-29T15:26:37.344804Z node 4 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_backup_restore_common.h:233: TBackup TProposedWaitParts, opId: 281474976710759:0 HandleReply TEvSchemaChanged at tablet# 72057594046678944 message# Source { RawX1: 459 RawX2: 17179871611 } Origin: 72075186233409547 State: 2 TxId: 281474976710759 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 0 RowsProcessed: 0 } 2025-05-29T15:26:37.344819Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:655: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 281474976710759:0, shardIdx: 72057594046678944:2, datashard: 72075186233409547, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2025-05-29T15:26:37.344824Z node 4 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:674: all shard schema changes has been received, operationId: 281474976710759:0, at schemeshard: 72057594046678944 2025-05-29T15:26:37.344832Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:686: send schema changes ack message, operation: 281474976710759:0, datashard: 72075186233409547, at schemeshard: 72057594046678944 2025-05-29T15:26:37.344841Z node 4 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 281474976710759:0 129 -> 240 2025-05-29T15:26:37.344897Z node 4 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_backup_restore_common.h:116: Unable to make a bill: kind# TBackup, opId# 281474976710759:0, reason# domain is not a serverless db, domain# /MyRoot, domainPathId# [OwnerId: 72057594046678944, LocalPathId: 1], IsDomainSchemeShard: 1, ParentDomainId: [OwnerId: 72057594046678944, LocalPathId: 1], ResourcesDomainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-29T15:26:37.345389Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:647: TTxOperationReply complete, operationId: 281474976710759:0, at schemeshard: 72057594046678944 2025-05-29T15:26:37.345473Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 281474976710759:0, at schemeshard: 72057594046678944 2025-05-29T15:26:37.345482Z node 4 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:482: [72057594046678944] TDone opId# 281474976710759:0 ProgressState 2025-05-29T15:26:37.345496Z node 4 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:906: Part operation is done id#281474976710759:0 progress is 1/1 2025-05-29T15:26:37.345501Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 281474976710759 ready parts: 1/1 2025-05-29T15:26:37.345506Z node 4 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:906: Part operation is done id#281474976710759:0 progress is 1/1 2025-05-29T15:26:37.345510Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 281474976710759 ready parts: 1/1 2025-05-29T15:26:37.345516Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1606: TOperation IsReadyToNotify, TxId: 281474976710759, ready parts: 1/1, is published: true 2025-05-29T15:26:37.345529Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1629: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [4:126:2151] message: TxId: 281474976710759 2025-05-29T15:26:37.345536Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 281474976710759 ready parts: 1/1 2025-05-29T15:26:37.345542Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:973: Operation and all the parts is done, operation id: 281474976710759:0 2025-05-29T15:26:37.345547Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5174: RemoveTx for txid 281474976710759:0 2025-05-29T15:26:37.345572Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 3 2025-05-29T15:26:37.345964Z node 4 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6746: Handle: TEvNotifyTxCompletionResult: txId# 281474976710759 2025-05-29T15:26:37.345980Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6748: Message: TxId: 281474976710759 2025-05-29T15:26:37.346374Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:227: tests -- TTxNotificationSubscriber for txId 103: got EvNotifyTxCompletionResult 2025-05-29T15:26:37.346387Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:236: tests -- TTxNotificationSubscriber for txId 103: satisfy waiter [4:490:2450] TestWaitNotification: OK eventTxId 103 >> Cdc::RacyActivateAndEnqueue [FAIL] >> Cdc::RacyCreateAndSend >> TTablesWithReboots::AlterTableFollowersWithReboots >> AsyncIndexChangeExchange::ShouldDeliverChangesOnSplitMerge [FAIL] >> AsyncIndexChangeExchange::ShouldRejectChangesOnQueueOverflowByCount ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_export/unittest >> TExportToS3Tests::DisableAutoDropping [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:116:2058] recipient: [1:111:2141] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:116:2058] recipient: [1:111:2141] Leader for TabletID 72057594046678944 is [1:128:2152] sender: [1:132:2058] recipient: [1:111:2141] 2025-05-29T15:26:35.088583Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7488: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-05-29T15:26:35.088608Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7516: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:26:35.088613Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7402: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-05-29T15:26:35.088619Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7418: OperationsProcessing config: using default configuration 2025-05-29T15:26:35.088625Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-05-29T15:26:35.088629Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-05-29T15:26:35.088638Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7548: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:26:35.088651Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:39: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-05-29T15:26:35.088747Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7619: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-05-29T15:26:35.088827Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-05-29T15:26:35.102422Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7309: Cannot subscribe to console configs 2025-05-29T15:26:35.102443Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:26:35.104857Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-05-29T15:26:35.105005Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-05-29T15:26:35.105053Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-05-29T15:26:35.107486Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-05-29T15:26:35.107744Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:445: Clear TempDirsState with owners number: 0 2025-05-29T15:26:35.107891Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1348: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-05-29T15:26:35.107948Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:32: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-05-29T15:26:35.108477Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:157: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:26:35.108526Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:84: [RootDataErasureManager] Stop 2025-05-29T15:26:35.108772Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:26:35.108783Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:26:35.108802Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-05-29T15:26:35.108810Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-29T15:26:35.108816Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:202: TTxServerlessStorageBilling.Complete 2025-05-29T15:26:35.108847Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6671: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-05-29T15:26:35.110175Z node 1 :HIVE INFO: tablet_helpers.cpp:1130: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:128:2152] sender: [1:242:2058] recipient: [1:15:2062] 2025-05-29T15:26:35.133473Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:372: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-05-29T15:26:35.133562Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:26:35.133630Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-05-29T15:26:35.133677Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:130: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-05-29T15:26:35.133688Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:26:35.134517Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:451: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-05-29T15:26:35.134547Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-05-29T15:26:35.134605Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:26:35.134614Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:313: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-05-29T15:26:35.134619Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:367: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-05-29T15:26:35.134625Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 2 -> 3 2025-05-29T15:26:35.135216Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:26:35.135234Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-05-29T15:26:35.135241Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 3 -> 128 2025-05-29T15:26:35.135771Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:26:35.135788Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:26:35.135794Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:26:35.135802Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1650: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-05-29T15:26:35.136451Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1719: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-05-29T15:26:35.138160Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:652: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-05-29T15:26:35.138207Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1751: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-05-29T15:26:35.138402Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:676: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-05-29T15:26:35.138434Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:680: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 124 RawX2: 4294969445 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-05-29T15:26:35.138444Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:26:35.138516Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 128 -> 240 2025-05-29T15:26:35.138526Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:26:35.138558Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-05-29T15:26:35.138571Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:402: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-05-29T15:26:35.141282Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:26:35.141296Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-29T15:26:35.141347Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29 ... T15:26:37.589804Z node 5 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72057594046678944, txId: 281474976710761 2025-05-29T15:26:37.589809Z node 5 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 281474976710761, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 7 2025-05-29T15:26:37.589813Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 3 2025-05-29T15:26:37.589826Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1606: TOperation IsReadyToNotify, TxId: 281474976710761, ready parts: 0/1, is published: true 2025-05-29T15:26:37.590257Z node 5 :EXPORT DEBUG: schemeshard_export__create.cpp:329: TExport::TTxProgress: DoComplete 2025-05-29T15:26:37.590295Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__notify.cpp:30: NotifyTxCompletion operation in-flight, txId: 281474976710761, at schemeshard: 72057594046678944 2025-05-29T15:26:37.590300Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1606: TOperation IsReadyToNotify, TxId: 281474976710761, ready parts: 0/1, is published: true 2025-05-29T15:26:37.590305Z node 5 :FLAT_TX_SCHEMESHARD INFO: schemeshard__notify.cpp:131: NotifyTxCompletion transaction is registered, txId: 281474976710761, at schemeshard: 72057594046678944 2025-05-29T15:26:37.590709Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:652: Send tablet strongly msg operationId: 281474976710761:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:281474976710761 msg type: 269090816 2025-05-29T15:26:37.590756Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1751: TOperation RegisterRelationByTabletId, TxId: 281474976710761, partId: 4294967295, tablet: 72057594046316545 2025-05-29T15:26:37.590812Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976710761 FAKE_COORDINATOR: Add transaction: 281474976710761 at step: 5000007 FAKE_COORDINATOR: advance: minStep5000007 State->FrontStep: 5000006 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710761 at step: 5000007 2025-05-29T15:26:37.590895Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:676: TTxOperationPlanStep Execute, stepId: 5000007, transactions count in step: 1, at schemeshard: 72057594046678944 2025-05-29T15:26:37.590920Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:680: TTxOperationPlanStep Execute, message: Transactions { TxId: 281474976710761 Coordinator: 72057594046316545 AckTo { RawX1: 135 RawX2: 21474838637 } } Step: 5000007 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-05-29T15:26:37.590926Z node 5 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_rmdir.cpp:129: TRmDir HandleReply TEvOperationPlan, opId: 281474976710761:0, step: 5000007, at schemeshard: 72057594046678944 2025-05-29T15:26:37.590953Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_rmdir.cpp:180: RmDir is done, opId: 281474976710761:0, at schemeshard: 72057594046678944 2025-05-29T15:26:37.590963Z node 5 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:906: Part operation is done id#281474976710761:0 progress is 1/1 2025-05-29T15:26:37.590967Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 281474976710761 ready parts: 1/1 2025-05-29T15:26:37.590972Z node 5 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:906: Part operation is done id#281474976710761:0 progress is 1/1 2025-05-29T15:26:37.590975Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 281474976710761 ready parts: 1/1 2025-05-29T15:26:37.590983Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-05-29T15:26:37.590991Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 2 2025-05-29T15:26:37.590996Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1606: TOperation IsReadyToNotify, TxId: 281474976710761, ready parts: 1/1, is published: false 2025-05-29T15:26:37.591002Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 281474976710761 ready parts: 1/1 2025-05-29T15:26:37.591006Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:973: Operation and all the parts is done, operation id: 281474976710761:0 2025-05-29T15:26:37.591009Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5174: RemoveTx for txid 281474976710761:0 2025-05-29T15:26:37.591017Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 3 2025-05-29T15:26:37.591022Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:982: Publication still in progress, tx: 281474976710761, publications: 2, subscribers: 1 2025-05-29T15:26:37.591026Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:989: Publication details: tx: 281474976710761, [OwnerId: 72057594046678944, LocalPathId: 1], 11 2025-05-29T15:26:37.591029Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:989: Publication details: tx: 281474976710761, [OwnerId: 72057594046678944, LocalPathId: 3], 18446744073709551615 2025-05-29T15:26:37.591094Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976710761 2025-05-29T15:26:37.591555Z node 5 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:26:37.591567Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 281474976710761, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-29T15:26:37.591597Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 281474976710761, path id: [OwnerId: 72057594046678944, LocalPathId: 3] 2025-05-29T15:26:37.591615Z node 5 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:26:37.591618Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [5:208:2209], at schemeshard: 72057594046678944, txId: 281474976710761, path id: 1 2025-05-29T15:26:37.591622Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [5:208:2209], at schemeshard: 72057594046678944, txId: 281474976710761, path id: 3 FAKE_COORDINATOR: Erasing txId 281474976710761 2025-05-29T15:26:37.591734Z node 5 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:5834: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 11 PathOwnerId: 72057594046678944, cookie: 281474976710761 2025-05-29T15:26:37.591743Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 11 PathOwnerId: 72057594046678944, cookie: 281474976710761 2025-05-29T15:26:37.591746Z node 5 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 281474976710761 2025-05-29T15:26:37.591750Z node 5 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 281474976710761, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 11 2025-05-29T15:26:37.591753Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2025-05-29T15:26:37.591831Z node 5 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:5834: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 281474976710761 2025-05-29T15:26:37.591837Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 281474976710761 2025-05-29T15:26:37.591840Z node 5 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 281474976710761 2025-05-29T15:26:37.591843Z node 5 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 281474976710761, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 18446744073709551615 2025-05-29T15:26:37.591845Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 2 2025-05-29T15:26:37.591851Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 281474976710761, subscribers: 1 2025-05-29T15:26:37.591855Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:212: TTxAckPublishToSchemeBoard Notify send TEvNotifyTxCompletionResult, at schemeshard: 72057594046678944, to actorId: [5:126:2151] 2025-05-29T15:26:37.592231Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976710761 2025-05-29T15:26:37.592443Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976710761 2025-05-29T15:26:37.592464Z node 5 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6746: Handle: TEvNotifyTxCompletionResult: txId# 281474976710761 2025-05-29T15:26:37.592474Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6748: Message: TxId: 281474976710761 2025-05-29T15:26:37.592482Z node 5 :EXPORT DEBUG: schemeshard_export__create.cpp:309: TExport::TTxProgress: DoExecute 2025-05-29T15:26:37.592487Z node 5 :EXPORT DEBUG: schemeshard_export__create.cpp:1232: TExport::TTxProgress: OnNotifyResult: txId# 281474976710761 2025-05-29T15:26:37.592492Z node 5 :EXPORT DEBUG: schemeshard_export__create.cpp:1263: TExport::TTxProgress: OnNotifyResult: txId# 281474976710761, id# 102, itemIdx# 4294967295 2025-05-29T15:26:37.592810Z node 5 :EXPORT DEBUG: schemeshard_export__create.cpp:329: TExport::TTxProgress: DoComplete 2025-05-29T15:26:37.592827Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:227: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2025-05-29T15:26:37.592834Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:236: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [5:615:2570] TestWaitNotification: OK eventTxId 102 >> TOlapReboots::AlterTtlSettings [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/scheme_board/ut_replica/unittest >> TReplicaTest::UnsubscribeUnknownPath [GOOD] Test command err: 2025-05-29T15:26:37.797181Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:751: [1:6:2053] Handle NKikimrSchemeBoard.TEvHandshake { Owner: 1 Generation: 1 }: sender# [1:7:2054] 2025-05-29T15:26:37.797204Z node 1 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:769: [1:6:2053] Successful handshake: owner# 1, generation# 1 2025-05-29T15:26:37.797224Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1061: [1:6:2053] Handle NKikimrSchemeBoard.TEvSubscribe { Path: path DomainOwnerId: 0 }: sender# [1:8:2055] 2025-05-29T15:26:37.797230Z node 1 :SCHEME_BOARD_REPLICA INFO: replica.cpp:520: [1:6:2053] Upsert description: path# path 2025-05-29T15:26:37.797256Z node 1 :SCHEME_BOARD_REPLICA INFO: replica.cpp:646: [1:6:2053] Subscribe: subscriber# [1:8:2055], path# path, domainOwnerId# 0, capabilities# 2025-05-29T15:26:37.797277Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1061: [1:6:2053] Handle NKikimrSchemeBoard.TEvSubscribe { Path: path DomainOwnerId: 0 }: sender# [1:9:2056] 2025-05-29T15:26:37.797282Z node 1 :SCHEME_BOARD_REPLICA INFO: replica.cpp:646: [1:6:2053] Subscribe: subscriber# [1:9:2056], path# path, domainOwnerId# 0, capabilities# 2025-05-29T15:26:37.797321Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:782: [1:6:2053] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 1 Generation: 1 }: sender# [1:7:2054], cookie# 0, event size# 72 2025-05-29T15:26:37.797328Z node 1 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:822: [1:6:2053] Update description: path# path, pathId# [OwnerId: 1, LocalPathId: 1], deletion# false 2025-05-29T15:26:37.798350Z node 1 :SCHEME_BOARD_REPLICA INFO: replica.cpp:550: [1:6:2053] Upsert description: path# path, pathId# [OwnerId: 1, LocalPathId: 1], pathDescription# {Status StatusSuccess, Path path, PathId [OwnerId: 1, LocalPathId: 1], PathVersion 1, SubdomainPathId , PathAbandonedTenantsSchemeShards size 0, DescribeSchemeResultSerialized size 30} 2025-05-29T15:26:37.798404Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1075: [1:6:2053] Handle NKikimrSchemeBoard.TEvUnsubscribe { Path: path }: sender# [1:8:2055] 2025-05-29T15:26:37.798410Z node 1 :SCHEME_BOARD_REPLICA INFO: replica.cpp:662: [1:6:2053] Unsubscribe: subscriber# [1:8:2055], path# path 2025-05-29T15:26:37.798425Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:782: [1:6:2053] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 1 Generation: 1 }: sender# [1:7:2054], cookie# 0, event size# 40 2025-05-29T15:26:37.798431Z node 1 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:822: [1:6:2053] Update description: path# path, pathId# [OwnerId: 1, LocalPathId: 1], deletion# true 2025-05-29T15:26:37.798436Z node 1 :SCHEME_BOARD_REPLICA INFO: replica.cpp:575: [1:6:2053] Delete description: path# path, pathId# [OwnerId: 1, LocalPathId: 1] 2025-05-29T15:26:38.008006Z node 2 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1075: [2:6:2053] Handle NKikimrSchemeBoard.TEvUnsubscribe { Path: path }: sender# [2:7:2054] >> Cdc::DisableStream [FAIL] >> Cdc::InitialScan >> TExportToS3Tests::CancelUponTransferringMultiShardTableShouldSucceed [GOOD] >> TExportToS3Tests::CancelUponTransferringSingleTableShouldSucceed [GOOD] >> TExportToS3Tests::CancelUponTransferringManyTablesShouldSucceed >> TTablesWithReboots::AlterCopyWithReboots ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_extsubdomain_reboots/unittest >> TSchemeShardTestExtSubdomainReboots::SchemeLimits-AlterDatabaseCreateHiveFirst-false [GOOD] Test command err: ==== RunWithTabletReboots =========== RUN: Trace =========== Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2140] Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:116:2058] recipient: [1:110:2141] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:116:2058] recipient: [1:110:2141] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:117:2058] recipient: [1:111:2142] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:117:2058] recipient: [1:111:2142] Leader for TabletID 72057594046678944 is [1:126:2151] sender: [1:127:2058] recipient: [1:109:2140] Leader for TabletID 72057594046447617 is [1:130:2154] sender: [1:132:2058] recipient: [1:110:2141] Leader for TabletID 72057594046316545 is [1:133:2155] sender: [1:135:2058] recipient: [1:111:2142] 2025-05-29T15:25:52.681867Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7488: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-05-29T15:25:52.681892Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7516: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:25:52.681898Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7402: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2025-05-29T15:25:52.681903Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7418: OperationsProcessing config: using default configuration 2025-05-29T15:25:52.681913Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-05-29T15:25:52.681918Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-05-29T15:25:52.681927Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7548: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:25:52.681955Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:39: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-05-29T15:25:52.682061Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7619: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-05-29T15:25:52.682136Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-05-29T15:25:52.697059Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7651: Got new config: QueryServiceConfig { AllExternalDataSourcesAreAvailable: true } 2025-05-29T15:25:52.697079Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:25:52.697180Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7619: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# Leader for TabletID 72057594046447617 is [1:130:2154] sender: [1:175:2058] recipient: [1:15:2062] 2025-05-29T15:25:52.707021Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-05-29T15:25:52.707051Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-05-29T15:25:52.707079Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-05-29T15:25:52.714258Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-05-29T15:25:52.714330Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:445: Clear TempDirsState with owners number: 0 2025-05-29T15:25:52.714441Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1348: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-05-29T15:25:52.714582Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:32: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-05-29T15:25:52.715163Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:157: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:25:52.715206Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:84: [RootDataErasureManager] Stop 2025-05-29T15:25:52.715413Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:25:52.715421Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:25:52.715450Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-05-29T15:25:52.715456Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-29T15:25:52.715462Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:202: TTxServerlessStorageBilling.Complete 2025-05-29T15:25:52.715478Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6671: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:214:2058] recipient: [1:212:2212] IGNORE Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:214:2058] recipient: [1:212:2212] Leader for TabletID 72057594037968897 is [1:218:2216] sender: [1:219:2058] recipient: [1:212:2212] 2025-05-29T15:25:52.716606Z node 1 :HIVE INFO: tablet_helpers.cpp:1130: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:126:2151] sender: [1:239:2058] recipient: [1:15:2062] 2025-05-29T15:25:52.739330Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:372: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-05-29T15:25:52.739398Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:25:52.739446Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-05-29T15:25:52.739504Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:130: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-05-29T15:25:52.739514Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:25:52.740094Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:451: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-05-29T15:25:52.740117Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-05-29T15:25:52.740149Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:25:52.740158Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:313: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-05-29T15:25:52.740163Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:367: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-05-29T15:25:52.740169Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 2 -> 3 2025-05-29T15:25:52.740540Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:25:52.740550Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-05-29T15:25:52.740556Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 3 -> 128 2025-05-29T15:25:52.740841Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:25:52.740848Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:25:52.740855Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:25:52.740862Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1650: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-05-29T15:25:52.741541Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1719: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-05-29T15:25:52.741927Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:652: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-05-29T15:25:52.741962Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1751: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 Leader for TabletID 72057594046316545 is [1:133:2155] sender: [1:254:2058] recipient: [1:15:2062] FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-05-29T15:25:52.742142Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:676: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-05-29T15:25:52.742164Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:680: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 133 RawX2: 4294969451 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-05-29T15:25:52.742171Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:25:52.742233Z node 1 :FLAT_TX_SCHEMESHARD INFO: sch ... ntPathDbRefCount reason remove publishing for pathId [OwnerId: 72075186233409546, LocalPathId: 1] was 5 2025-05-29T15:26:37.906394Z node 96 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:5834: Handle TEvUpdateAck, at schemeshard: 72075186233409546, msg: Owner: 72075186233409546 Generation: 2 LocalPathId: 2 Version: 2 PathOwnerId: 72075186233409546, cookie: 1005 2025-05-29T15:26:37.906410Z node 96 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72075186233409546, msg: Owner: 72075186233409546 Generation: 2 LocalPathId: 2 Version: 2 PathOwnerId: 72075186233409546, cookie: 1005 2025-05-29T15:26:37.906416Z node 96 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72075186233409546, txId: 1005 2025-05-29T15:26:37.906420Z node 96 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72075186233409546, txId: 1005, pathId: [OwnerId: 72075186233409546, LocalPathId: 2], version: 2 2025-05-29T15:26:37.906425Z node 96 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72075186233409546, LocalPathId: 2] was 2 2025-05-29T15:26:37.906449Z node 96 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1606: TOperation IsReadyToNotify, TxId: 1005, ready parts: 0/1, is published: true 2025-05-29T15:26:37.906786Z node 96 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:652: Send tablet strongly msg operationId: 1005:4294967295 from tablet: 72075186233409546 to tablet: 72075186233409547 cookie: 0:1005 msg type: 269090816 2025-05-29T15:26:37.906828Z node 96 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1751: TOperation RegisterRelationByTabletId, TxId: 1005, partId: 4294967295, tablet: 72075186233409547 2025-05-29T15:26:37.907212Z node 96 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72075186233409546, cookie: 1005 2025-05-29T15:26:37.907331Z node 96 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72075186233409546, cookie: 1005 TestModificationResult got TxId: 1005, wait until txId: 1005 TestModificationResults wait txId: 1006 2025-05-29T15:26:37.907996Z node 96 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:372: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot/USER_0" OperationType: ESchemeOpMkDir MkDir { Name: "B" } } TxId: 1006 TabletId: 72075186233409546 , at schemeshard: 72075186233409546 2025-05-29T15:26:37.908040Z node 96 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_mkdir.cpp:115: TMkDir Propose, path: /MyRoot/USER_0/B, operationId: 1006:0, at schemeshard: 72075186233409546 2025-05-29T15:26:37.908066Z node 96 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:319: AttachChild: child attached as only one child to the parent, parent id: [OwnerId: 72075186233409546, LocalPathId: 1], parent name: MyRoot/USER_0, child name: B, child id: [OwnerId: 72075186233409546, LocalPathId: 3], at schemeshard: 72075186233409546 2025-05-29T15:26:37.908079Z node 96 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72075186233409546, LocalPathId: 3] was 0 2025-05-29T15:26:37.908090Z node 96 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:130: IgniteOperation, opId: 1006:1, propose status:StatusAccepted, reason: , at schemeshard: 72075186233409546 2025-05-29T15:26:37.908138Z node 96 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72075186233409546, LocalPathId: 1] was 5 2025-05-29T15:26:37.908147Z node 96 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72075186233409546, LocalPathId: 3] was 1 2025-05-29T15:26:37.908825Z node 96 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:451: TTxOperationPropose Complete, txId: 1006, response: Status: StatusAccepted TxId: 1006 SchemeshardId: 72075186233409546 PathId: 3, at schemeshard: 72075186233409546 2025-05-29T15:26:37.908854Z node 96 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1006, database: /MyRoot/USER_0, subject: , status: StatusAccepted, operation: CREATE DIRECTORY, path: /MyRoot/USER_0/B 2025-05-29T15:26:37.908887Z node 96 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72075186233409546 2025-05-29T15:26:37.908893Z node 96 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72075186233409546, txId: 1006, path id: [OwnerId: 72075186233409546, LocalPathId: 1] 2025-05-29T15:26:37.908923Z node 96 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72075186233409546, txId: 1006, path id: [OwnerId: 72075186233409546, LocalPathId: 3] 2025-05-29T15:26:37.908937Z node 96 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72075186233409546 2025-05-29T15:26:37.908942Z node 96 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [96:547:2480], at schemeshard: 72075186233409546, txId: 1006, path id: 1 2025-05-29T15:26:37.908948Z node 96 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [96:547:2480], at schemeshard: 72075186233409546, txId: 1006, path id: 3 2025-05-29T15:26:37.909012Z node 96 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1006:0, at schemeshard: 72075186233409546 2025-05-29T15:26:37.909020Z node 96 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_mkdir.cpp:63: MkDir::TPropose operationId# 1006:0 ProgressState, at schemeshard: 72075186233409546 2025-05-29T15:26:37.909031Z node 96 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1650: TOperation IsReadyToPropose , TxId: 1006 ready parts: 1/1 2025-05-29T15:26:37.909056Z node 96 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1719: TOperation DoPropose send propose to coordinator: 72075186233409547 message:Transaction { AffectedSet { TabletId: 72075186233409546 Flags: 2 } ExecLevel: 0 TxId: 1006 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72075186233409547 2025-05-29T15:26:37.909179Z node 96 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:5834: Handle TEvUpdateAck, at schemeshard: 72075186233409546, msg: Owner: 72075186233409546 Generation: 2 LocalPathId: 1 Version: 6 PathOwnerId: 72075186233409546, cookie: 1006 2025-05-29T15:26:37.909191Z node 96 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72075186233409546, msg: Owner: 72075186233409546 Generation: 2 LocalPathId: 1 Version: 6 PathOwnerId: 72075186233409546, cookie: 1006 2025-05-29T15:26:37.909195Z node 96 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72075186233409546, txId: 1006 2025-05-29T15:26:37.909200Z node 96 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72075186233409546, txId: 1006, pathId: [OwnerId: 72075186233409546, LocalPathId: 1], version: 6 2025-05-29T15:26:37.909204Z node 96 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72075186233409546, LocalPathId: 1] was 6 2025-05-29T15:26:37.909332Z node 96 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:5834: Handle TEvUpdateAck, at schemeshard: 72075186233409546, msg: Owner: 72075186233409546 Generation: 2 LocalPathId: 3 Version: 2 PathOwnerId: 72075186233409546, cookie: 1006 2025-05-29T15:26:37.909344Z node 96 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72075186233409546, msg: Owner: 72075186233409546 Generation: 2 LocalPathId: 3 Version: 2 PathOwnerId: 72075186233409546, cookie: 1006 2025-05-29T15:26:37.909348Z node 96 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72075186233409546, txId: 1006 2025-05-29T15:26:37.909353Z node 96 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72075186233409546, txId: 1006, pathId: [OwnerId: 72075186233409546, LocalPathId: 3], version: 2 2025-05-29T15:26:37.909357Z node 96 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72075186233409546, LocalPathId: 3] was 2 2025-05-29T15:26:37.909367Z node 96 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1606: TOperation IsReadyToNotify, TxId: 1006, ready parts: 0/1, is published: true 2025-05-29T15:26:37.909869Z node 96 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:652: Send tablet strongly msg operationId: 1006:4294967295 from tablet: 72075186233409546 to tablet: 72075186233409547 cookie: 0:1006 msg type: 269090816 2025-05-29T15:26:37.909918Z node 96 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1751: TOperation RegisterRelationByTabletId, TxId: 1006, partId: 4294967295, tablet: 72075186233409547 2025-05-29T15:26:37.910333Z node 96 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72075186233409546, cookie: 1006 2025-05-29T15:26:37.910359Z node 96 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72075186233409546, cookie: 1006 TestModificationResult got TxId: 1006, wait until txId: 1006 TestModificationResults wait txId: 1007 2025-05-29T15:26:37.911026Z node 96 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:372: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot/USER_0" OperationType: ESchemeOpMkDir MkDir { Name: "C" } } TxId: 1007 TabletId: 72075186233409546 , at schemeshard: 72075186233409546 2025-05-29T15:26:37.911070Z node 96 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_mkdir.cpp:115: TMkDir Propose, path: /MyRoot/USER_0/C, operationId: 1007:0, at schemeshard: 72075186233409546 2025-05-29T15:26:37.911088Z node 96 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:130: IgniteOperation, opId: 1007:1, propose status:StatusResourceExhausted, reason: Check failed: path: '/MyRoot/USER_0/C', error: paths count limit exceeded, limit: 2, paths: 2, delta: 1, source_location: ydb/core/tx/schemeshard/schemeshard__operation_mkdir.cpp:155, at schemeshard: 72075186233409546 2025-05-29T15:26:37.911582Z node 96 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:451: TTxOperationPropose Complete, txId: 1007, response: Status: StatusResourceExhausted Reason: "Check failed: path: \'/MyRoot/USER_0/C\', error: paths count limit exceeded, limit: 2, paths: 2, delta: 1, source_location: ydb/core/tx/schemeshard/schemeshard__operation_mkdir.cpp:155" TxId: 1007 SchemeshardId: 72075186233409546, at schemeshard: 72075186233409546 2025-05-29T15:26:37.911612Z node 96 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1007, database: /MyRoot/USER_0, subject: , status: StatusResourceExhausted, reason: Check failed: path: '/MyRoot/USER_0/C', error: paths count limit exceeded, limit: 2, paths: 2, delta: 1, source_location: ydb/core/tx/schemeshard/schemeshard__operation_mkdir.cpp:155, operation: CREATE DIRECTORY, path: /MyRoot/USER_0/C TestModificationResult got TxId: 1007, wait until txId: 1007 >> THiveTest::TestHiveBalancerHighUsage [GOOD] >> THiveTest::TestHiveBalancerHighUsageAndColumnShards >> THiveTest::TestDeleteOwnerTabletsMany [GOOD] >> THiveTest::TestDeleteTabletWithFollowers >> Cdc::ShouldDeliverChangesOnSplitMerge [FAIL] >> Cdc::ResolvedTimestamps |67.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_base_reboots/unittest |67.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_base_reboots/unittest ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_export_reboots_s3/unittest >> TExportToS3WithRebootsTests::ForgetShouldSucceedOnManyTables [GOOD] Test command err: ==== RunWithTabletReboots =========== RUN: Trace =========== Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2140] Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:116:2058] recipient: [1:110:2141] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:116:2058] recipient: [1:110:2141] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:117:2058] recipient: [1:111:2142] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:117:2058] recipient: [1:111:2142] Leader for TabletID 72057594046678944 is [1:126:2151] sender: [1:127:2058] recipient: [1:109:2140] Leader for TabletID 72057594046447617 is [1:130:2154] sender: [1:132:2058] recipient: [1:110:2141] Leader for TabletID 72057594046316545 is [1:133:2155] sender: [1:135:2058] recipient: [1:111:2142] 2025-05-29T15:23:41.436568Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7488: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-05-29T15:23:41.436601Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7516: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:23:41.436609Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7402: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2025-05-29T15:23:41.436616Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7418: OperationsProcessing config: using default configuration 2025-05-29T15:23:41.436632Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-05-29T15:23:41.436638Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-05-29T15:23:41.436650Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7548: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:23:41.436669Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:39: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-05-29T15:23:41.436784Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7619: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-05-29T15:23:41.436887Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-05-29T15:23:41.452818Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7651: Got new config: QueryServiceConfig { AllExternalDataSourcesAreAvailable: true } 2025-05-29T15:23:41.452848Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:23:41.452967Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7619: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# Leader for TabletID 72057594046447617 is [1:130:2154] sender: [1:175:2058] recipient: [1:15:2062] 2025-05-29T15:23:41.459802Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-05-29T15:23:41.459848Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-05-29T15:23:41.459886Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-05-29T15:23:41.463861Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-05-29T15:23:41.463938Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:445: Clear TempDirsState with owners number: 0 2025-05-29T15:23:41.464031Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1348: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-05-29T15:23:41.464180Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:32: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-05-29T15:23:41.464763Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:157: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:23:41.464810Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:84: [RootDataErasureManager] Stop 2025-05-29T15:23:41.465044Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:23:41.465057Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:23:41.465093Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-05-29T15:23:41.465104Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-29T15:23:41.465111Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:202: TTxServerlessStorageBilling.Complete 2025-05-29T15:23:41.465134Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6671: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:214:2058] recipient: [1:212:2212] IGNORE Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:214:2058] recipient: [1:212:2212] Leader for TabletID 72057594037968897 is [1:218:2216] sender: [1:219:2058] recipient: [1:212:2212] 2025-05-29T15:23:41.466597Z node 1 :HIVE INFO: tablet_helpers.cpp:1130: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:126:2151] sender: [1:239:2058] recipient: [1:15:2062] 2025-05-29T15:23:41.485115Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:372: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-05-29T15:23:41.485199Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:23:41.485267Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-05-29T15:23:41.485333Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:130: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-05-29T15:23:41.485345Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:23:41.486227Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:451: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-05-29T15:23:41.486256Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-05-29T15:23:41.486309Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:23:41.486321Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:313: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-05-29T15:23:41.486329Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:367: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-05-29T15:23:41.486335Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 2 -> 3 2025-05-29T15:23:41.486812Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:23:41.486826Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-05-29T15:23:41.486831Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 3 -> 128 2025-05-29T15:23:41.487193Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:23:41.487205Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:23:41.487211Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:23:41.487219Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1650: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-05-29T15:23:41.487931Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1719: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-05-29T15:23:41.488417Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:652: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-05-29T15:23:41.488460Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1751: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 Leader for TabletID 72057594046316545 is [1:133:2155] sender: [1:254:2058] recipient: [1:15:2062] FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-05-29T15:23:41.488687Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:676: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-05-29T15:23:41.488715Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:680: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 133 RawX2: 4294969451 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-05-29T15:23:41.488724Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:23:41.488798Z node 1 :FLAT_TX_SCHEMESHARD INFO: sch ... 25-05-29T15:26:27.691765Z node 326 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:652: Send tablet strongly msg operationId: 281474976710763:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:281474976710763 msg type: 269090816 2025-05-29T15:26:27.691791Z node 326 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1751: TOperation RegisterRelationByTabletId, TxId: 281474976710763, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 281474976710763 at step: 5000011 FAKE_COORDINATOR: advance: minStep5000011 State->FrontStep: 5000010 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710763 at step: 5000011 2025-05-29T15:26:27.691909Z node 326 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:676: TTxOperationPlanStep Execute, stepId: 5000011, transactions count in step: 1, at schemeshard: 72057594046678944 2025-05-29T15:26:27.691933Z node 326 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:680: TTxOperationPlanStep Execute, message: Transactions { TxId: 281474976710763 Coordinator: 72057594046316545 AckTo { RawX1: 132 RawX2: 1400159340651 } } Step: 5000011 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-05-29T15:26:27.691942Z node 326 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_rmdir.cpp:129: TRmDir HandleReply TEvOperationPlan, opId: 281474976710763:0, step: 5000011, at schemeshard: 72057594046678944 2025-05-29T15:26:27.691969Z node 326 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_rmdir.cpp:180: RmDir is done, opId: 281474976710763:0, at schemeshard: 72057594046678944 2025-05-29T15:26:27.691979Z node 326 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:906: Part operation is done id#281474976710763:0 progress is 1/1 2025-05-29T15:26:27.691984Z node 326 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 281474976710763 ready parts: 1/1 2025-05-29T15:26:27.691989Z node 326 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:906: Part operation is done id#281474976710763:0 progress is 1/1 2025-05-29T15:26:27.691992Z node 326 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 281474976710763 ready parts: 1/1 2025-05-29T15:26:27.692001Z node 326 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 4 2025-05-29T15:26:27.692012Z node 326 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 1 2025-05-29T15:26:27.692018Z node 326 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1606: TOperation IsReadyToNotify, TxId: 281474976710763, ready parts: 1/1, is published: false 2025-05-29T15:26:27.692026Z node 326 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 281474976710763 ready parts: 1/1 2025-05-29T15:26:27.692034Z node 326 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:973: Operation and all the parts is done, operation id: 281474976710763:0 2025-05-29T15:26:27.692038Z node 326 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5174: RemoveTx for txid 281474976710763:0 2025-05-29T15:26:27.692048Z node 326 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 2 2025-05-29T15:26:27.692054Z node 326 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:982: Publication still in progress, tx: 281474976710763, publications: 2, subscribers: 1 2025-05-29T15:26:27.692058Z node 326 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:989: Publication details: tx: 281474976710763, [OwnerId: 72057594046678944, LocalPathId: 1], 15 2025-05-29T15:26:27.692062Z node 326 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:989: Publication details: tx: 281474976710763, [OwnerId: 72057594046678944, LocalPathId: 5], 18446744073709551615 2025-05-29T15:26:27.692177Z node 326 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976710763 2025-05-29T15:26:27.692196Z node 326 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976710763 2025-05-29T15:26:27.692515Z node 326 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:26:27.692527Z node 326 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 281474976710763, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-29T15:26:27.692565Z node 326 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 281474976710763, path id: [OwnerId: 72057594046678944, LocalPathId: 5] 2025-05-29T15:26:27.692588Z node 326 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:26:27.692592Z node 326 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [326:208:2209], at schemeshard: 72057594046678944, txId: 281474976710763, path id: 1 2025-05-29T15:26:27.692597Z node 326 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [326:208:2209], at schemeshard: 72057594046678944, txId: 281474976710763, path id: 5 FAKE_COORDINATOR: Erasing txId 281474976710763 2025-05-29T15:26:27.692744Z node 326 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:5834: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 15 PathOwnerId: 72057594046678944, cookie: 281474976710763 2025-05-29T15:26:27.692755Z node 326 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 15 PathOwnerId: 72057594046678944, cookie: 281474976710763 2025-05-29T15:26:27.692760Z node 326 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 281474976710763 2025-05-29T15:26:27.692764Z node 326 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 281474976710763, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 15 2025-05-29T15:26:27.692772Z node 326 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 5 2025-05-29T15:26:27.692880Z node 326 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:5834: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 5 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 281474976710763 2025-05-29T15:26:27.692889Z node 326 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 5 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 281474976710763 2025-05-29T15:26:27.692893Z node 326 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 281474976710763 2025-05-29T15:26:27.692897Z node 326 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 281474976710763, pathId: [OwnerId: 72057594046678944, LocalPathId: 5], version: 18446744073709551615 2025-05-29T15:26:27.692901Z node 326 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 1 2025-05-29T15:26:27.692911Z node 326 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 281474976710763, subscribers: 1 2025-05-29T15:26:27.692916Z node 326 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:212: TTxAckPublishToSchemeBoard Notify send TEvNotifyTxCompletionResult, at schemeshard: 72057594046678944, to actorId: [326:134:2156] 2025-05-29T15:26:27.692935Z node 326 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:123: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-05-29T15:26:27.692939Z node 326 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:137: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 5], at schemeshard: 72057594046678944 2025-05-29T15:26:27.692947Z node 326 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 4 2025-05-29T15:26:27.693357Z node 326 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976710763 2025-05-29T15:26:27.693558Z node 326 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976710763 2025-05-29T15:26:27.693579Z node 326 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6746: Handle: TEvNotifyTxCompletionResult: txId# 281474976710763 2025-05-29T15:26:27.693589Z node 326 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6748: Message: TxId: 281474976710763 2025-05-29T15:26:27.693597Z node 326 :EXPORT DEBUG: schemeshard_export__create.cpp:309: TExport::TTxProgress: DoExecute 2025-05-29T15:26:27.693602Z node 326 :EXPORT DEBUG: schemeshard_export__create.cpp:1232: TExport::TTxProgress: OnNotifyResult: txId# 281474976710763 2025-05-29T15:26:27.693607Z node 326 :EXPORT DEBUG: schemeshard_export__create.cpp:1263: TExport::TTxProgress: OnNotifyResult: txId# 281474976710763, id# 1004, itemIdx# 4294967295 2025-05-29T15:26:27.693706Z node 326 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:160: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 2025-05-29T15:26:27.693984Z node 326 :EXPORT DEBUG: schemeshard_export__create.cpp:329: TExport::TTxProgress: DoComplete TestWaitNotification wait txId: 1004 2025-05-29T15:26:27.694036Z node 326 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:210: tests -- TTxNotificationSubscriber for txId 1004: send EvNotifyTxCompletion 2025-05-29T15:26:27.694044Z node 326 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:256: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 1004 2025-05-29T15:26:27.694113Z node 326 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 1004, at schemeshard: 72057594046678944 2025-05-29T15:26:27.694127Z node 326 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:227: tests -- TTxNotificationSubscriber for txId 1004: got EvNotifyTxCompletionResult 2025-05-29T15:26:27.694132Z node 326 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:236: tests -- TTxNotificationSubscriber for txId 1004: satisfy waiter [326:997:2930] TestWaitNotification: OK eventTxId 1004 >> TTablesWithReboots::CopyTableAndDropWithReboots >> TTablesWithReboots::CopyTableAndDropWithReboots2 |67.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_base_reboots/unittest >> TSchemeShardTestExtSubdomainReboots::SchemeLimits-AlterDatabaseCreateHiveFirst-true [GOOD] >> Cdc::RacyCreateAndSend [FAIL] >> Cdc::RacySplitAndDropTable >> AsyncIndexChangeExchange::ShouldRejectChangesOnQueueOverflowByCount [FAIL] >> AsyncIndexChangeExchange::ShouldRejectChangesOnQueueOverflowBySize |67.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_base_reboots/unittest |68.0%| [TA] $(B)/ydb/core/tx/scheme_board/ut_replica/test-results/unittest/{meta.json ... results_accumulator.log} >> Cdc::InitialScan [FAIL] >> Cdc::InitialScanDebezium |68.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_base_reboots/unittest >> THiveTest::TestDeleteTabletWithFollowers [GOOD] >> THiveTest::TestCreateTabletReboots >> THiveTest::TestFollowerCompatability3 [GOOD] >> THiveTest::TestGetStorageInfo >> TTablesWithReboots::TwiceRmDirWithReboots >> THiveTest::TestGetStorageInfo [GOOD] >> THiveTest::TestGetStorageInfoDeleteTabletBeforeAssigned |68.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_base_reboots/unittest |68.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_base_reboots/unittest >> TTablesWithReboots::CopyIndexedTableWithReboots >> Cdc::InitialScanDebezium [FAIL] >> Cdc::InitialScanRacyCompleteAndRequest >> THiveTest::TestGetStorageInfoDeleteTabletBeforeAssigned [GOOD] >> KqpRanges::UpdateWhereInFullScan-UseSink >> AsyncIndexChangeExchange::ShouldRejectChangesOnQueueOverflowBySize [FAIL] >> AsyncIndexChangeExchange::ShouldNotReorderChangesOnRace ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_extsubdomain_reboots/unittest >> TSchemeShardTestExtSubdomainReboots::SchemeLimits-AlterDatabaseCreateHiveFirst-true [GOOD] Test command err: ==== RunWithTabletReboots =========== RUN: Trace =========== Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2140] Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:116:2058] recipient: [1:110:2141] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:116:2058] recipient: [1:110:2141] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:117:2058] recipient: [1:111:2142] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:117:2058] recipient: [1:111:2142] Leader for TabletID 72057594046678944 is [1:126:2151] sender: [1:127:2058] recipient: [1:109:2140] Leader for TabletID 72057594046447617 is [1:130:2154] sender: [1:132:2058] recipient: [1:110:2141] Leader for TabletID 72057594046316545 is [1:133:2155] sender: [1:135:2058] recipient: [1:111:2142] 2025-05-29T15:25:50.700931Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7488: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-05-29T15:25:50.700957Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7516: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:25:50.700963Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7402: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2025-05-29T15:25:50.700968Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7418: OperationsProcessing config: using default configuration 2025-05-29T15:25:50.700978Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-05-29T15:25:50.700982Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-05-29T15:25:50.700991Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7548: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:25:50.701003Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:39: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-05-29T15:25:50.701097Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7619: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-05-29T15:25:50.701169Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-05-29T15:25:50.739251Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7651: Got new config: QueryServiceConfig { AllExternalDataSourcesAreAvailable: true } 2025-05-29T15:25:50.739273Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:25:50.739371Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7619: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# Leader for TabletID 72057594046447617 is [1:130:2154] sender: [1:175:2058] recipient: [1:15:2062] 2025-05-29T15:25:50.751113Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-05-29T15:25:50.751149Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-05-29T15:25:50.751185Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-05-29T15:25:50.758151Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-05-29T15:25:50.758230Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:445: Clear TempDirsState with owners number: 0 2025-05-29T15:25:50.758341Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1348: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-05-29T15:25:50.758515Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:32: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-05-29T15:25:50.759100Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:157: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:25:50.759142Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:84: [RootDataErasureManager] Stop 2025-05-29T15:25:50.759364Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:25:50.759373Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:25:50.759405Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-05-29T15:25:50.759412Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-29T15:25:50.759418Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:202: TTxServerlessStorageBilling.Complete 2025-05-29T15:25:50.759435Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6671: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:214:2058] recipient: [1:212:2212] IGNORE Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:214:2058] recipient: [1:212:2212] Leader for TabletID 72057594037968897 is [1:218:2216] sender: [1:219:2058] recipient: [1:212:2212] 2025-05-29T15:25:50.760651Z node 1 :HIVE INFO: tablet_helpers.cpp:1130: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:126:2151] sender: [1:239:2058] recipient: [1:15:2062] 2025-05-29T15:25:50.781145Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:372: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-05-29T15:25:50.781214Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:25:50.781270Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-05-29T15:25:50.781318Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:130: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-05-29T15:25:50.781327Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:25:50.782078Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:451: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-05-29T15:25:50.782106Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-05-29T15:25:50.782152Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:25:50.782160Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:313: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-05-29T15:25:50.782166Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:367: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-05-29T15:25:50.782171Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 2 -> 3 2025-05-29T15:25:50.782533Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:25:50.782543Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-05-29T15:25:50.782548Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 3 -> 128 2025-05-29T15:25:50.782869Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:25:50.782881Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:25:50.782888Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:25:50.782896Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1650: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-05-29T15:25:50.783532Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1719: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-05-29T15:25:50.783945Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:652: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-05-29T15:25:50.783984Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1751: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 Leader for TabletID 72057594046316545 is [1:133:2155] sender: [1:254:2058] recipient: [1:15:2062] FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-05-29T15:25:50.784175Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:676: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-05-29T15:25:50.784199Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:680: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 133 RawX2: 4294969451 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-05-29T15:25:50.784207Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:25:50.784269Z node 1 :FLAT_TX_SCHEMESHARD INFO: sch ... ntPathDbRefCount reason remove publishing for pathId [OwnerId: 72075186233409546, LocalPathId: 1] was 5 2025-05-29T15:26:39.665546Z node 96 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:5834: Handle TEvUpdateAck, at schemeshard: 72075186233409546, msg: Owner: 72075186233409546 Generation: 2 LocalPathId: 2 Version: 2 PathOwnerId: 72075186233409546, cookie: 1005 2025-05-29T15:26:39.665561Z node 96 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72075186233409546, msg: Owner: 72075186233409546 Generation: 2 LocalPathId: 2 Version: 2 PathOwnerId: 72075186233409546, cookie: 1005 2025-05-29T15:26:39.665565Z node 96 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72075186233409546, txId: 1005 2025-05-29T15:26:39.665569Z node 96 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72075186233409546, txId: 1005, pathId: [OwnerId: 72075186233409546, LocalPathId: 2], version: 2 2025-05-29T15:26:39.665573Z node 96 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72075186233409546, LocalPathId: 2] was 2 2025-05-29T15:26:39.665583Z node 96 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1606: TOperation IsReadyToNotify, TxId: 1005, ready parts: 0/1, is published: true 2025-05-29T15:26:39.666073Z node 96 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:652: Send tablet strongly msg operationId: 1005:4294967295 from tablet: 72075186233409546 to tablet: 72075186233409547 cookie: 0:1005 msg type: 269090816 2025-05-29T15:26:39.666106Z node 96 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1751: TOperation RegisterRelationByTabletId, TxId: 1005, partId: 4294967295, tablet: 72075186233409547 2025-05-29T15:26:39.666446Z node 96 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72075186233409546, cookie: 1005 2025-05-29T15:26:39.666507Z node 96 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72075186233409546, cookie: 1005 TestModificationResult got TxId: 1005, wait until txId: 1005 TestModificationResults wait txId: 1006 2025-05-29T15:26:39.667058Z node 96 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:372: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot/USER_0" OperationType: ESchemeOpMkDir MkDir { Name: "B" } } TxId: 1006 TabletId: 72075186233409546 , at schemeshard: 72075186233409546 2025-05-29T15:26:39.667095Z node 96 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_mkdir.cpp:115: TMkDir Propose, path: /MyRoot/USER_0/B, operationId: 1006:0, at schemeshard: 72075186233409546 2025-05-29T15:26:39.667117Z node 96 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:319: AttachChild: child attached as only one child to the parent, parent id: [OwnerId: 72075186233409546, LocalPathId: 1], parent name: MyRoot/USER_0, child name: B, child id: [OwnerId: 72075186233409546, LocalPathId: 3], at schemeshard: 72075186233409546 2025-05-29T15:26:39.667128Z node 96 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72075186233409546, LocalPathId: 3] was 0 2025-05-29T15:26:39.667139Z node 96 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:130: IgniteOperation, opId: 1006:1, propose status:StatusAccepted, reason: , at schemeshard: 72075186233409546 2025-05-29T15:26:39.667181Z node 96 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72075186233409546, LocalPathId: 1] was 5 2025-05-29T15:26:39.667190Z node 96 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72075186233409546, LocalPathId: 3] was 1 2025-05-29T15:26:39.667579Z node 96 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:451: TTxOperationPropose Complete, txId: 1006, response: Status: StatusAccepted TxId: 1006 SchemeshardId: 72075186233409546 PathId: 3, at schemeshard: 72075186233409546 2025-05-29T15:26:39.667606Z node 96 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1006, database: /MyRoot/USER_0, subject: , status: StatusAccepted, operation: CREATE DIRECTORY, path: /MyRoot/USER_0/B 2025-05-29T15:26:39.667640Z node 96 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72075186233409546 2025-05-29T15:26:39.667646Z node 96 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72075186233409546, txId: 1006, path id: [OwnerId: 72075186233409546, LocalPathId: 1] 2025-05-29T15:26:39.667670Z node 96 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72075186233409546, txId: 1006, path id: [OwnerId: 72075186233409546, LocalPathId: 3] 2025-05-29T15:26:39.667683Z node 96 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72075186233409546 2025-05-29T15:26:39.667688Z node 96 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [96:543:2476], at schemeshard: 72075186233409546, txId: 1006, path id: 1 2025-05-29T15:26:39.667693Z node 96 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [96:543:2476], at schemeshard: 72075186233409546, txId: 1006, path id: 3 2025-05-29T15:26:39.667735Z node 96 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1006:0, at schemeshard: 72075186233409546 2025-05-29T15:26:39.667742Z node 96 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_mkdir.cpp:63: MkDir::TPropose operationId# 1006:0 ProgressState, at schemeshard: 72075186233409546 2025-05-29T15:26:39.667750Z node 96 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1650: TOperation IsReadyToPropose , TxId: 1006 ready parts: 1/1 2025-05-29T15:26:39.667771Z node 96 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1719: TOperation DoPropose send propose to coordinator: 72075186233409547 message:Transaction { AffectedSet { TabletId: 72075186233409546 Flags: 2 } ExecLevel: 0 TxId: 1006 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72075186233409547 2025-05-29T15:26:39.667948Z node 96 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:5834: Handle TEvUpdateAck, at schemeshard: 72075186233409546, msg: Owner: 72075186233409546 Generation: 2 LocalPathId: 1 Version: 6 PathOwnerId: 72075186233409546, cookie: 1006 2025-05-29T15:26:39.667961Z node 96 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72075186233409546, msg: Owner: 72075186233409546 Generation: 2 LocalPathId: 1 Version: 6 PathOwnerId: 72075186233409546, cookie: 1006 2025-05-29T15:26:39.667966Z node 96 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72075186233409546, txId: 1006 2025-05-29T15:26:39.667970Z node 96 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72075186233409546, txId: 1006, pathId: [OwnerId: 72075186233409546, LocalPathId: 1], version: 6 2025-05-29T15:26:39.667975Z node 96 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72075186233409546, LocalPathId: 1] was 6 2025-05-29T15:26:39.668182Z node 96 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:5834: Handle TEvUpdateAck, at schemeshard: 72075186233409546, msg: Owner: 72075186233409546 Generation: 2 LocalPathId: 3 Version: 2 PathOwnerId: 72075186233409546, cookie: 1006 2025-05-29T15:26:39.668196Z node 96 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72075186233409546, msg: Owner: 72075186233409546 Generation: 2 LocalPathId: 3 Version: 2 PathOwnerId: 72075186233409546, cookie: 1006 2025-05-29T15:26:39.668201Z node 96 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72075186233409546, txId: 1006 2025-05-29T15:26:39.668206Z node 96 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72075186233409546, txId: 1006, pathId: [OwnerId: 72075186233409546, LocalPathId: 3], version: 2 2025-05-29T15:26:39.668210Z node 96 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72075186233409546, LocalPathId: 3] was 2 2025-05-29T15:26:39.668221Z node 96 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1606: TOperation IsReadyToNotify, TxId: 1006, ready parts: 0/1, is published: true 2025-05-29T15:26:39.668513Z node 96 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:652: Send tablet strongly msg operationId: 1006:4294967295 from tablet: 72075186233409546 to tablet: 72075186233409547 cookie: 0:1006 msg type: 269090816 2025-05-29T15:26:39.668537Z node 96 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1751: TOperation RegisterRelationByTabletId, TxId: 1006, partId: 4294967295, tablet: 72075186233409547 2025-05-29T15:26:39.668683Z node 96 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72075186233409546, cookie: 1006 2025-05-29T15:26:39.668928Z node 96 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72075186233409546, cookie: 1006 TestModificationResult got TxId: 1006, wait until txId: 1006 TestModificationResults wait txId: 1007 2025-05-29T15:26:39.669459Z node 96 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:372: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot/USER_0" OperationType: ESchemeOpMkDir MkDir { Name: "C" } } TxId: 1007 TabletId: 72075186233409546 , at schemeshard: 72075186233409546 2025-05-29T15:26:39.669492Z node 96 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_mkdir.cpp:115: TMkDir Propose, path: /MyRoot/USER_0/C, operationId: 1007:0, at schemeshard: 72075186233409546 2025-05-29T15:26:39.669508Z node 96 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:130: IgniteOperation, opId: 1007:1, propose status:StatusResourceExhausted, reason: Check failed: path: '/MyRoot/USER_0/C', error: paths count limit exceeded, limit: 2, paths: 2, delta: 1, source_location: ydb/core/tx/schemeshard/schemeshard__operation_mkdir.cpp:155, at schemeshard: 72075186233409546 2025-05-29T15:26:39.669917Z node 96 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:451: TTxOperationPropose Complete, txId: 1007, response: Status: StatusResourceExhausted Reason: "Check failed: path: \'/MyRoot/USER_0/C\', error: paths count limit exceeded, limit: 2, paths: 2, delta: 1, source_location: ydb/core/tx/schemeshard/schemeshard__operation_mkdir.cpp:155" TxId: 1007 SchemeshardId: 72075186233409546, at schemeshard: 72075186233409546 2025-05-29T15:26:39.669941Z node 96 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1007, database: /MyRoot/USER_0, subject: , status: StatusResourceExhausted, reason: Check failed: path: '/MyRoot/USER_0/C', error: paths count limit exceeded, limit: 2, paths: 2, delta: 1, source_location: ydb/core/tx/schemeshard/schemeshard__operation_mkdir.cpp:155, operation: CREATE DIRECTORY, path: /MyRoot/USER_0/C TestModificationResult got TxId: 1007, wait until txId: 1007 >> Cdc::ResolvedTimestamps [FAIL] >> Cdc::ResolvedTimestampsMultiplePartitions |68.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_base_reboots/unittest >> Cdc::RacySplitAndDropTable [FAIL] >> Cdc::RenameTable |68.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_base_reboots/unittest ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_olap_reboots/unittest >> TOlapReboots::AlterTtlSettings [GOOD] Test command err: ==== RunWithTabletReboots =========== RUN: Trace =========== Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2140] Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:116:2058] recipient: [1:110:2141] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:116:2058] recipient: [1:110:2141] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:117:2058] recipient: [1:111:2142] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:117:2058] recipient: [1:111:2142] Leader for TabletID 72057594046678944 is [1:126:2151] sender: [1:127:2058] recipient: [1:109:2140] Leader for TabletID 72057594046447617 is [1:130:2154] sender: [1:132:2058] recipient: [1:110:2141] Leader for TabletID 72057594046316545 is [1:133:2155] sender: [1:135:2058] recipient: [1:111:2142] 2025-05-29T15:24:39.127283Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7488: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-05-29T15:24:39.127307Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7516: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:24:39.127313Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7402: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2025-05-29T15:24:39.127319Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7418: OperationsProcessing config: using default configuration 2025-05-29T15:24:39.127325Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-05-29T15:24:39.127329Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-05-29T15:24:39.127342Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7548: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:24:39.127357Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:39: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-05-29T15:24:39.127450Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7619: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-05-29T15:24:39.127520Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-05-29T15:24:39.142492Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7651: Got new config: QueryServiceConfig { AllExternalDataSourcesAreAvailable: true } 2025-05-29T15:24:39.142515Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:24:39.142617Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7619: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# Leader for TabletID 72057594046447617 is [1:130:2154] sender: [1:175:2058] recipient: [1:15:2062] 2025-05-29T15:24:39.151360Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-05-29T15:24:39.151390Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-05-29T15:24:39.151418Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-05-29T15:24:39.154345Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-05-29T15:24:39.154419Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:445: Clear TempDirsState with owners number: 0 2025-05-29T15:24:39.154536Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1348: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-05-29T15:24:39.154696Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:32: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-05-29T15:24:39.155325Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:157: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:24:39.155366Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:84: [RootDataErasureManager] Stop 2025-05-29T15:24:39.155603Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:24:39.155612Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:24:39.155646Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-05-29T15:24:39.155654Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-29T15:24:39.155660Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:202: TTxServerlessStorageBilling.Complete 2025-05-29T15:24:39.155679Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6671: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:214:2058] recipient: [1:212:2212] IGNORE Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:214:2058] recipient: [1:212:2212] Leader for TabletID 72057594037968897 is [1:218:2216] sender: [1:219:2058] recipient: [1:212:2212] 2025-05-29T15:24:39.156967Z node 1 :HIVE INFO: tablet_helpers.cpp:1130: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:126:2151] sender: [1:239:2058] recipient: [1:15:2062] 2025-05-29T15:24:39.178702Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:372: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-05-29T15:24:39.178792Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:24:39.178854Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-05-29T15:24:39.178898Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:130: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-05-29T15:24:39.178909Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:24:39.179800Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:451: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-05-29T15:24:39.179830Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-05-29T15:24:39.179882Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:24:39.179900Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:313: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-05-29T15:24:39.179906Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:367: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-05-29T15:24:39.179912Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 2 -> 3 2025-05-29T15:24:39.180491Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:24:39.180505Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-05-29T15:24:39.180511Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 3 -> 128 2025-05-29T15:24:39.180849Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:24:39.180860Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:24:39.180867Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:24:39.180874Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1650: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-05-29T15:24:39.181657Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1719: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-05-29T15:24:39.182083Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:652: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-05-29T15:24:39.182123Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1751: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 Leader for TabletID 72057594046316545 is [1:133:2155] sender: [1:254:2058] recipient: [1:15:2062] FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-05-29T15:24:39.182335Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:676: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-05-29T15:24:39.182365Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:680: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 133 RawX2: 4294969451 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-05-29T15:24:39.182384Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:24:39.182455Z node 1 :FLAT_TX_SCHEMESHARD INFO: sch ... 5-29T15:26:38.298702Z node 189 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 2 2025-05-29T15:26:38.298995Z node 189 :TX_COLUMNSHARD_TX WARN: log.cpp:784: tablet_id=72075186233409546;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=1005;fline=tx_controller.cpp:214;event=finished_tx;tx_id=1005; FAKE_COORDINATOR: advance: minStep5000006 State->FrontStep: 5000006 2025-05-29T15:26:38.299598Z node 189 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:26:38.299610Z node 189 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1005, path id: [OwnerId: 72057594046678944, LocalPathId: 3] 2025-05-29T15:26:38.299655Z node 189 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1005, path id: [OwnerId: 72057594046678944, LocalPathId: 4] 2025-05-29T15:26:38.299688Z node 189 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:26:38.299694Z node 189 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [189:208:2209], at schemeshard: 72057594046678944, txId: 1005, path id: 3 2025-05-29T15:26:38.299701Z node 189 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [189:208:2209], at schemeshard: 72057594046678944, txId: 1005, path id: 4 2025-05-29T15:26:38.299765Z node 189 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1005:0, at schemeshard: 72057594046678944 2025-05-29T15:26:38.299774Z node 189 :FLAT_TX_SCHEMESHARD INFO: alter_table.cpp:199: TAlterColumnTable TProposedWaitParts operationId# 1005:0 ProgressState at tablet: 72057594046678944 2025-05-29T15:26:38.299782Z node 189 :FLAT_TX_SCHEMESHARD DEBUG: alter_table.cpp:222: TAlterColumnTable TProposedWaitParts operationId# 1005:0 ProgressState wait for NotifyTxCompletionResult tabletId: 72075186233409546 2025-05-29T15:26:38.299937Z node 189 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:5834: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 7 PathOwnerId: 72057594046678944, cookie: 1005 2025-05-29T15:26:38.299952Z node 189 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 7 PathOwnerId: 72057594046678944, cookie: 1005 2025-05-29T15:26:38.299957Z node 189 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72057594046678944, txId: 1005 2025-05-29T15:26:38.299962Z node 189 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 1005, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 7 2025-05-29T15:26:38.299968Z node 189 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 4 2025-05-29T15:26:38.300122Z node 189 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:5834: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 11 PathOwnerId: 72057594046678944, cookie: 1005 2025-05-29T15:26:38.300136Z node 189 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 11 PathOwnerId: 72057594046678944, cookie: 1005 2025-05-29T15:26:38.300140Z node 189 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72057594046678944, txId: 1005 2025-05-29T15:26:38.300145Z node 189 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 1005, pathId: [OwnerId: 72057594046678944, LocalPathId: 4], version: 11 2025-05-29T15:26:38.300150Z node 189 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 3 2025-05-29T15:26:38.300163Z node 189 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1606: TOperation IsReadyToNotify, TxId: 1005, ready parts: 0/1, is published: true 2025-05-29T15:26:38.300702Z node 189 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:652: Send tablet strongly msg operationId: 1005:0 from tablet: 72057594046678944 to tablet: 72075186233409546 cookie: 72057594046678944:1 msg type: 275382275 2025-05-29T15:26:38.301409Z node 189 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1005 2025-05-29T15:26:38.301521Z node 189 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1005 2025-05-29T15:26:38.312350Z node 189 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6146: Handle TEvNotifyTxCompletionResult, at schemeshard: 72057594046678944, message: Origin: 72075186233409546 TxId: 1005 2025-05-29T15:26:38.312373Z node 189 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1764: TOperation FindRelatedPartByTabletId, TxId: 1005, tablet: 72075186233409546, partId: 0 2025-05-29T15:26:38.312396Z node 189 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:619: TTxOperationReply execute, operationId: 1005:0, at schemeshard: 72057594046678944, message: Origin: 72075186233409546 TxId: 1005 FAKE_COORDINATOR: Erasing txId 1005 2025-05-29T15:26:38.313126Z node 189 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:647: TTxOperationReply complete, operationId: 1005:0, at schemeshard: 72057594046678944 2025-05-29T15:26:38.313169Z node 189 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1005:0, at schemeshard: 72057594046678944 2025-05-29T15:26:38.313189Z node 189 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:482: [72057594046678944] TDone opId# 1005:0 ProgressState 2025-05-29T15:26:38.313210Z node 189 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:906: Part operation is done id#1005:0 progress is 1/1 2025-05-29T15:26:38.313215Z node 189 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 1005 ready parts: 1/1 2025-05-29T15:26:38.313220Z node 189 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:906: Part operation is done id#1005:0 progress is 1/1 2025-05-29T15:26:38.313223Z node 189 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 1005 ready parts: 1/1 2025-05-29T15:26:38.313229Z node 189 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1606: TOperation IsReadyToNotify, TxId: 1005, ready parts: 1/1, is published: true 2025-05-29T15:26:38.313242Z node 189 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1629: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [189:362:2339] message: TxId: 1005 2025-05-29T15:26:38.313249Z node 189 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 1005 ready parts: 1/1 2025-05-29T15:26:38.313255Z node 189 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:973: Operation and all the parts is done, operation id: 1005:0 2025-05-29T15:26:38.313259Z node 189 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5174: RemoveTx for txid 1005:0 2025-05-29T15:26:38.313292Z node 189 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 2 2025-05-29T15:26:38.313932Z node 189 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:227: tests -- TTxNotificationSubscriber for txId 1005: got EvNotifyTxCompletionResult 2025-05-29T15:26:38.313946Z node 189 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:236: tests -- TTxNotificationSubscriber for txId 1005: satisfy waiter [189:507:2476] TestWaitNotification: OK eventTxId 1005 2025-05-29T15:26:38.314086Z node 189 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/OlapStore/ColumnTable" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-05-29T15:26:38.314161Z node 189 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/OlapStore/ColumnTable" took 83us result status StatusSuccess 2025-05-29T15:26:38.314282Z node 189 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/OlapStore/ColumnTable" PathDescription { Self { Name: "ColumnTable" PathId: 4 SchemeshardId: 72057594046678944 PathType: EPathTypeColumnTable CreateFinished: true CreateTxId: 1003 CreateStep: 5000004 ParentPathId: 3 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 11 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 11 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 3 ColumnTableVersion: 3 ColumnTableSchemaVersion: 1 ColumnTableTtlSettingsVersion: 3 } ChildrenExist: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 0 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 3 PathsLimit: 10000 ShardsInside: 1 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } ColumnTableDescription { Name: "ColumnTable" Schema { Columns { Id: 1 Name: "timestamp" Type: "Timestamp" TypeId: 50 NotNull: true StorageId: "" DefaultValue { } } Columns { Id: 2 Name: "data" Type: "Utf8" TypeId: 4608 NotNull: false StorageId: "" DefaultValue { } } KeyColumnNames: "timestamp" NextColumnId: 3 Version: 1 Options { SchemeNeedActualization: false } NextColumnFamilyId: 1 } TtlSettings { Disabled { } Version: 3 } SchemaPresetId: 1 SchemaPresetName: "default" ColumnStorePathId { OwnerId: 72057594046678944 LocalId: 3 } ColumnShardCount: 1 Sharding { ColumnShards: 72075186233409546 HashSharding { Function: HASH_FUNCTION_CONSISTENCY_64 Columns: "timestamp" } } } } PathId: 4 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> KqpNotNullColumns::AlterDropNotNullColumn >> TTablesWithReboots::AlterTableSchemaWithReboots >> TTablesWithReboots::AlterTableConfigWithReboots ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/mind/hive/ut/unittest >> THiveTest::TestGetStorageInfoDeleteTabletBeforeAssigned [GOOD] Test command err: 2025-05-29T15:26:09.171898Z node 2 :BS_NODE DEBUG: {NW26@node_warden_impl.cpp:321} Bootstrap 2025-05-29T15:26:09.172667Z node 2 :BS_NODE DEBUG: {NW18@node_warden_resource.cpp:51} ApplyServiceSet IsStatic# true Comprehensive# false Origin# initial ServiceSet# {PDisks { NodeID: 1 PDiskID: 1 Path: "/tmp/pdisk.dat" PDiskGuid: 1 } VDisks { VDiskID { GroupID: 0 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } VDiskLocation { NodeID: 1 PDiskID: 1 VDiskSlotID: 0 PDiskGuid: 1 } } Groups { GroupID: 0 GroupGeneration: 1 ErasureSpecies: 0 Rings { FailDomains { VDiskLocations { NodeID: 1 PDiskID: 1 VDiskSlotID: 0 PDiskGuid: 1 } } } } AvailabilityDomains: 0 } 2025-05-29T15:26:09.172732Z node 2 :BS_NODE DEBUG: {NW12@node_warden_proxy.cpp:23} StartLocalProxy GroupId# 0 2025-05-29T15:26:09.172912Z node 2 :BS_NODE DEBUG: {NW21@node_warden_pipe.cpp:23} EstablishPipe AvailDomainId# 0 PipeClientId# [2:71:2074] ControllerId# 72057594037932033 2025-05-29T15:26:09.172919Z node 2 :BS_NODE DEBUG: {NW20@node_warden_pipe.cpp:72} SendRegisterNode 2025-05-29T15:26:09.172947Z node 2 :BS_NODE DEBUG: {NW11@node_warden_impl.cpp:296} StartInvalidGroupProxy GroupId# 4294967295 2025-05-29T15:26:09.172967Z node 2 :BS_NODE DEBUG: {NW62@node_warden_impl.cpp:308} StartRequestReportingThrottler 2025-05-29T15:26:09.174312Z node 2 :LOCAL DEBUG: local.cpp:1491: TLocal::Bootstrap 2025-05-29T15:26:09.175374Z node 1 :BS_NODE DEBUG: {NW26@node_warden_impl.cpp:321} Bootstrap 2025-05-29T15:26:09.175983Z node 1 :BS_NODE DEBUG: {NW18@node_warden_resource.cpp:51} ApplyServiceSet IsStatic# true Comprehensive# false Origin# initial ServiceSet# {PDisks { NodeID: 1 PDiskID: 1 Path: "/tmp/pdisk.dat" PDiskGuid: 1 } VDisks { VDiskID { GroupID: 0 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } VDiskLocation { NodeID: 1 PDiskID: 1 VDiskSlotID: 0 PDiskGuid: 1 } } Groups { GroupID: 0 GroupGeneration: 1 ErasureSpecies: 0 Rings { FailDomains { VDiskLocations { NodeID: 1 PDiskID: 1 VDiskSlotID: 0 PDiskGuid: 1 } } } } AvailabilityDomains: 0 } 2025-05-29T15:26:09.176028Z node 1 :BS_NODE DEBUG: {NW04@node_warden_pdisk.cpp:196} StartLocalPDisk NodeId# 1 PDiskId# 1 Path# "/tmp/pdisk.dat" PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} Temporary# false 2025-05-29T15:26:09.176192Z node 1 :BS_NODE DEBUG: {NW23@node_warden_vdisk.cpp:67} StartLocalVDiskActor SlayInFlight# false VDiskId# [0:1:0:0:0] VSlotId# 1:1:0 PDiskGuid# 1 DonorMode# false PDiskRestartInFlight# false PDisksWaitingToStart# false 2025-05-29T15:26:09.176445Z node 1 :BS_NODE DEBUG: {NW24@node_warden_vdisk.cpp:265} StartLocalVDiskActor done VDiskId# [0:1:0:0:0] VSlotId# 1:1:0 PDiskGuid# 1 2025-05-29T15:26:09.176458Z node 1 :BS_NODE DEBUG: {NW12@node_warden_proxy.cpp:23} StartLocalProxy GroupId# 0 2025-05-29T15:26:09.176639Z node 1 :BS_NODE DEBUG: {NW21@node_warden_pipe.cpp:23} EstablishPipe AvailDomainId# 0 PipeClientId# [1:85:2077] ControllerId# 72057594037932033 2025-05-29T15:26:09.176645Z node 1 :BS_NODE DEBUG: {NW20@node_warden_pipe.cpp:72} SendRegisterNode 2025-05-29T15:26:09.176663Z node 1 :BS_NODE DEBUG: {NW11@node_warden_impl.cpp:296} StartInvalidGroupProxy GroupId# 4294967295 2025-05-29T15:26:09.176689Z node 1 :BS_NODE DEBUG: {NW62@node_warden_impl.cpp:308} StartRequestReportingThrottler 2025-05-29T15:26:09.178118Z node 1 :LOCAL DEBUG: local.cpp:1491: TLocal::Bootstrap 2025-05-29T15:26:09.180883Z node 1 :BS_PROXY INFO: dsproxy_state.cpp:146: Group# 0 TEvConfigureProxy received GroupGeneration# 1 IsLimitedKeyless# false Marker# DSP02 2025-05-29T15:26:09.180899Z node 1 :BS_PROXY NOTICE: dsproxy_state.cpp:294: EnsureMonitoring Group# 0 IsLimitedKeyless# 0 fullIfPossible# 0 Marker# DSP58 2025-05-29T15:26:09.181242Z node 1 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [1:84:2076] Create Queue# [1:93:2082] targetNodeId# 1 Marker# DSP01 2025-05-29T15:26:09.181274Z node 1 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [1:84:2076] Create Queue# [1:94:2083] targetNodeId# 1 Marker# DSP01 2025-05-29T15:26:09.181305Z node 1 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [1:84:2076] Create Queue# [1:95:2084] targetNodeId# 1 Marker# DSP01 2025-05-29T15:26:09.181339Z node 1 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [1:84:2076] Create Queue# [1:96:2085] targetNodeId# 1 Marker# DSP01 2025-05-29T15:26:09.181367Z node 1 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [1:84:2076] Create Queue# [1:97:2086] targetNodeId# 1 Marker# DSP01 2025-05-29T15:26:09.181418Z node 1 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [1:84:2076] Create Queue# [1:98:2087] targetNodeId# 1 Marker# DSP01 2025-05-29T15:26:09.181444Z node 1 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [1:84:2076] Create Queue# [1:99:2088] targetNodeId# 1 Marker# DSP01 2025-05-29T15:26:09.181450Z node 1 :BS_PROXY INFO: dsproxy_state.cpp:29: Group# 0 SetStateEstablishingSessions Marker# DSP03 2025-05-29T15:26:09.181472Z node 1 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:50: TClient[72057594037932033] ::Bootstrap [1:85:2077] 2025-05-29T15:26:09.181477Z node 1 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:533: TClient[72057594037932033] lookup [1:85:2077] 2025-05-29T15:26:09.181486Z node 1 :BS_PROXY NOTICE: dsproxy_state.cpp:234: Group# 4294967295 HasInvalidGroupId# 1 Bootstrap -> StateEjected Marker# DSP42 2025-05-29T15:26:09.181494Z node 1 :BS_NODE DEBUG: {NWDC00@distconf.cpp:20} Bootstrap 2025-05-29T15:26:09.181666Z node 1 :BS_NODE DEBUG: {NWDC40@distconf_persistent_storage.cpp:25} TReaderActor bootstrap Paths# [] 2025-05-29T15:26:09.181743Z node 2 :BS_PROXY INFO: dsproxy_state.cpp:146: Group# 0 TEvConfigureProxy received GroupGeneration# 1 IsLimitedKeyless# false Marker# DSP02 2025-05-29T15:26:09.181749Z node 2 :BS_PROXY NOTICE: dsproxy_state.cpp:294: EnsureMonitoring Group# 0 IsLimitedKeyless# 0 fullIfPossible# 0 Marker# DSP58 2025-05-29T15:26:09.182067Z node 2 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [2:70:2073] Create Queue# [2:104:2079] targetNodeId# 1 Marker# DSP01 2025-05-29T15:26:09.182099Z node 2 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [2:70:2073] Create Queue# [2:105:2080] targetNodeId# 1 Marker# DSP01 2025-05-29T15:26:09.182142Z node 2 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [2:70:2073] Create Queue# [2:106:2081] targetNodeId# 1 Marker# DSP01 2025-05-29T15:26:09.182171Z node 2 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [2:70:2073] Create Queue# [2:107:2082] targetNodeId# 1 Marker# DSP01 2025-05-29T15:26:09.182203Z node 2 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [2:70:2073] Create Queue# [2:108:2083] targetNodeId# 1 Marker# DSP01 2025-05-29T15:26:09.182232Z node 2 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [2:70:2073] Create Queue# [2:109:2084] targetNodeId# 1 Marker# DSP01 2025-05-29T15:26:09.182265Z node 2 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [2:70:2073] Create Queue# [2:110:2085] targetNodeId# 1 Marker# DSP01 2025-05-29T15:26:09.182270Z node 2 :BS_PROXY INFO: dsproxy_state.cpp:29: Group# 0 SetStateEstablishingSessions Marker# DSP03 2025-05-29T15:26:09.182281Z node 2 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:50: TClient[72057594037932033] ::Bootstrap [2:71:2074] 2025-05-29T15:26:09.182285Z node 2 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:533: TClient[72057594037932033] lookup [2:71:2074] 2025-05-29T15:26:09.182293Z node 2 :BS_PROXY NOTICE: dsproxy_state.cpp:234: Group# 4294967295 HasInvalidGroupId# 1 Bootstrap -> StateEjected Marker# DSP42 2025-05-29T15:26:09.182299Z node 2 :BS_NODE DEBUG: {NWDC00@distconf.cpp:20} Bootstrap 2025-05-29T15:26:09.182315Z node 2 :LOCAL DEBUG: local.cpp:1441: TDomainLocal(dc-1): Bootstrap 2025-05-29T15:26:09.182413Z node 2 :BS_NODE DEBUG: {NWDC40@distconf_persistent_storage.cpp:25} TReaderActor bootstrap Paths# [] 2025-05-29T15:26:09.182437Z node 3 :BS_NODE DEBUG: {NW26@node_warden_impl.cpp:321} Bootstrap 2025-05-29T15:26:09.184121Z node 3 :BS_NODE DEBUG: {NW18@node_warden_resource.cpp:51} ApplyServiceSet IsStatic# true Comprehensive# false Origin# initial ServiceSet# {PDisks { NodeID: 1 PDiskID: 1 Path: "/tmp/pdisk.dat" PDiskGuid: 1 } VDisks { VDiskID { GroupID: 0 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } VDiskLocation { NodeID: 1 PDiskID: 1 VDiskSlotID: 0 PDiskGuid: 1 } } Groups { GroupID: 0 GroupGeneration: 1 ErasureSpecies: 0 Rings { FailDomains { VDiskLocations { NodeID: 1 PDiskID: 1 VDiskSlotID: 0 PDiskGuid: 1 } } } } AvailabilityDomains: 0 } 2025-05-29T15:26:09.184154Z node 3 :BS_NODE DEBUG: {NW12@node_warden_proxy.cpp:23} StartLocalProxy GroupId# 0 2025-05-29T15:26:09.184310Z node 3 :BS_NODE DEBUG: {NW21@node_warden_pipe.cpp:23} EstablishPipe AvailDomainId# 0 PipeClientId# [3:118:2074] ControllerId# 72057594037932033 2025-05-29T15:26:09.184315Z node 3 :BS_NODE DEBUG: {NW20@node_warden_pipe.cpp:72} SendRegisterNode 2025-05-29T15:26:09.184328Z node 3 :BS_NODE DEBUG: {NW11@node_warden_impl.cpp:296} StartInvalidGroupProxy GroupId# 4294967295 2025-05-29T15:26:09.184344Z node 3 :BS_NODE DEBUG: {NW62@node_warden_impl.cpp:308} StartRequestReportingThrottler 2025-05-29T15:26:09.184388Z node 3 :BS_PROXY NOTICE: dsproxy_state.cpp:234: Group# 4294967295 HasInvalidGroupId# 1 Bootstrap -> StateEjected Marker# DSP42 2025-05-29T15:26:09.184396Z node 3 :BS_NODE DEBUG: {NWDC00@distconf.cpp:20} Bootstrap 2025-05-29T15:26:09.184410Z node 3 :BS_NODE DEBUG: {NWDC40@distconf_persistent_storage.cpp:25} TReaderActor bootstrap Paths# [] 2025-05-29T15:26:09.184447Z node 1 :TABLET_RESOLVER DEBUG: tablet_resolver.cpp:484: Handle TEvForward tabletId: 72057594037932033 entry.State: StInit ev: {EvForward TabletID: 72057594037932033 Ev: nullptr Flags: 1:2:0} 2025-05-29T15:26:09.227438Z node 1 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:149: TClient[72057594037932033] queue send [1:85:2077] 2025-05-29T15:26:09.227502Z node 1 :BS_NODE DEBUG: {NWDC53@distconf.cpp:300} StateWaitForInit event Type# 131082 StorageConfigLoaded# false NodeListObtained# false PendingEvents.size# 0 2025-05-29T15:26:09.227509Z node 1 :BS_NODE DEBUG: {NWDC11@distconf_binding.cpp:6} TEvNodesInfo 2025-05-29T15:26:09.227809Z node 1 :LOCAL DEBUG: local.cpp:1441: TDomainLocal(dc-1): Bootstrap 2025-05-29T15:26:09.227859Z node 2 :TABLET_RESOLVER DEBUG: tablet_resolver.cpp:484: Handle TEvForward tabletId: 72057594037932033 entry.State: StInit ev: {EvForward TabletID: 72057594037932033 Ev: nullptr Flags: 1:2:0} 2025-05-29T15:26:09.227905Z node 2 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:149: TClient[72057594037932033] queue send [2:71:2074] 2025-05-29T15:26:09.227914Z node 2 :BS_NODE DEBUG: {NWDC53@distconf.cpp:300} StateWaitForInit event Type# 131082 StorageConfigLoaded# false NodeListObtained# false PendingEvents.size# 0 2025-05-29T15:26:09.227919Z node 2 :BS_NODE DEBUG: {NWDC11@distconf_binding.cpp:6} TEvNodesInfo 2025-05-29T15:26:09.228681Z node 2 :LOCAL DEBUG: local.cpp:1149: TDomainLocal(dc-1): Binding to hive 72057594037927937 at domain dc-1 (allocated resources: ) 2025-05-29T15:26:09.228723Z node 2 :LOCAL DEBUG: local.cpp:975: TLocalNodeRegistrar::Bootstrap 2025-05-29T15:26:09.228728Z node 2 :LOCAL DEBUG: local.cpp:181: TLocalNodeRegistrar::TryToRegister 2025-05-29T15:26:09.228770Z node 2 :LOCAL DEBUG: local.cpp:213: TLocalNodeRegistrar::TryToRegister pipe to hive, pipe:[2:127:2087] 2025-05-29T15:26:09.230529Z node 3 :LOCAL DEBUG: local.cpp:1491: TLocal::Bootstrap 2025-05-29T15:26:09.230578Z node 3 :BS_PROXY INFO: dsproxy_state.cpp:146: Group# 0 TEvConfigureProxy received GroupGeneration# 1 IsLimitedKeyless# false Marker# DSP02 2025-05-29T15:26:09.230586Z node 3 :BS_PROXY NOTICE: dsproxy_state.cpp:294: EnsureMonitoring Group# 0 IsLimitedKeyless# 0 fullIfPossible# 0 Marker# DSP58 2025-05-29T15:26:09.231145Z node 3 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [3:117:2073] Create Queue# [3:129:2079] targetNodeId# 1 Marker# DSP01 2025-05-29T15:26:09.231187Z node 3 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [3:117:2073] Create Queue# [3:130:2080] targetNo ... are# 0.998955} 2025-05-29T15:26:40.761710Z node 61 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:5} commited cookie 1 for step 4 2025-05-29T15:26:40.761726Z node 61 :HIVE DEBUG: tx__create_tablet.cpp:503: HIVE#72057594037927937 THive::TTxCreateTablet::Complete (72057594037927937,0) TabletId: 72075186224037888 SideEffects: {Notifications: 0x10040201 [61:263:2259] {EvCreateTabletReply Status: OK Owner: 72057594037927937 OwnerIdx: 0 TabletID: 72075186224037888 Origin: 72057594037927937}} 2025-05-29T15:26:40.761796Z node 61 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:50: TClient[72057594037927937] ::Bootstrap [61:311:2293] 2025-05-29T15:26:40.761801Z node 61 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:533: TClient[72057594037927937] lookup [61:311:2293] 2025-05-29T15:26:40.761813Z node 61 :TABLET_RESOLVER DEBUG: tablet_resolver.cpp:484: Handle TEvForward tabletId: 72057594037927937 entry.State: StNormal ev: {EvForward TabletID: 72057594037927937 Ev: nullptr Flags: 1:2:0} 2025-05-29T15:26:40.761821Z node 61 :TABLET_RESOLVER DEBUG: tablet_resolver.cpp:279: SelectForward node 61 selfDC leaderDC 1:2:0 local 1 localDc 1 other 0 disallowed 0 tabletId: 72057594037927937 followers: 0 countLeader 1 allowFollowers 0 winner: [61:267:2261] 2025-05-29T15:26:40.761832Z node 61 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:149: TClient[72057594037927937] queue send [61:311:2293] 2025-05-29T15:26:40.761838Z node 61 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:411: TClient[72057594037927937] received pending shutdown [61:311:2293] 2025-05-29T15:26:40.761845Z node 61 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:190: TClient[72057594037927937] forward result local node, try to connect [61:311:2293] 2025-05-29T15:26:40.761849Z node 61 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:671: TClient[72057594037927937]::SendEvent [61:311:2293] 2025-05-29T15:26:40.761861Z node 61 :PIPE_SERVER DEBUG: tablet_pipe_server.cpp:291: [72057594037927937] Accept Connect Originator# [61:311:2293] 2025-05-29T15:26:40.761874Z node 61 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:310: TClient[72057594037927937] connected with status OK role: Leader [61:311:2293] 2025-05-29T15:26:40.761879Z node 61 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:325: TClient[72057594037927937] send queued [61:311:2293] 2025-05-29T15:26:40.761883Z node 61 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:629: TClient[72057594037927937] push event to server [61:311:2293] 2025-05-29T15:26:40.761887Z node 61 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:332: TClient[72057594037927937] shutdown pipe due to pending shutdown request [61:311:2293] 2025-05-29T15:26:40.761891Z node 61 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:505: TClient[72057594037927937] notify reset [61:311:2293] 2025-05-29T15:26:40.761897Z node 61 :PIPE_SERVER DEBUG: tablet_pipe_server.cpp:141: [72057594037927937] HandleSend Sender# [61:310:2292] EventType# 268697621 2025-05-29T15:26:40.761918Z node 61 :HIVE TRACE: hive_impl.cpp:114: HIVE#72057594037927937 Handle TEvTabletPipe::TEvServerConnected([61:311:2293]) [61:312:2294] 2025-05-29T15:26:40.761927Z node 61 :HIVE DEBUG: hive_impl.cpp:894: HIVE#72057594037927937 THive::Handle::TEvGetTabletStorageInfo TabletId=72075186224037888 2025-05-29T15:26:40.761957Z node 61 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:50: TClient[72057594037927937] ::Bootstrap [61:314:2296] 2025-05-29T15:26:40.761961Z node 61 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:533: TClient[72057594037927937] lookup [61:314:2296] 2025-05-29T15:26:40.761966Z node 61 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:149: TClient[72057594037927937] queue send [61:314:2296] 2025-05-29T15:26:40.761971Z node 61 :TABLET_RESOLVER DEBUG: tablet_resolver.cpp:484: Handle TEvForward tabletId: 72057594037927937 entry.State: StNormal ev: {EvForward TabletID: 72057594037927937 Ev: nullptr Flags: 1:2:0} 2025-05-29T15:26:40.761976Z node 61 :TABLET_RESOLVER DEBUG: tablet_resolver.cpp:279: SelectForward node 61 selfDC leaderDC 1:2:0 local 1 localDc 1 other 0 disallowed 0 tabletId: 72057594037927937 followers: 0 countLeader 1 allowFollowers 0 winner: [61:267:2261] 2025-05-29T15:26:40.761982Z node 61 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:411: TClient[72057594037927937] received pending shutdown [61:314:2296] 2025-05-29T15:26:40.761987Z node 61 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:190: TClient[72057594037927937] forward result local node, try to connect [61:314:2296] 2025-05-29T15:26:40.761991Z node 61 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:671: TClient[72057594037927937]::SendEvent [61:314:2296] 2025-05-29T15:26:40.762000Z node 61 :PIPE_SERVER DEBUG: tablet_pipe_server.cpp:291: [72057594037927937] Accept Connect Originator# [61:314:2296] 2025-05-29T15:26:40.762011Z node 61 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:310: TClient[72057594037927937] connected with status OK role: Leader [61:314:2296] 2025-05-29T15:26:40.762015Z node 61 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:325: TClient[72057594037927937] send queued [61:314:2296] 2025-05-29T15:26:40.762019Z node 61 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:629: TClient[72057594037927937] push event to server [61:314:2296] 2025-05-29T15:26:40.762024Z node 61 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:332: TClient[72057594037927937] shutdown pipe due to pending shutdown request [61:314:2296] 2025-05-29T15:26:40.762028Z node 61 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:505: TClient[72057594037927937] notify reset [61:314:2296] 2025-05-29T15:26:40.762033Z node 61 :PIPE_SERVER DEBUG: tablet_pipe_server.cpp:141: [72057594037927937] HandleSend Sender# [61:313:2295] EventType# 268697615 2025-05-29T15:26:40.762044Z node 61 :HIVE TRACE: hive_impl.cpp:114: HIVE#72057594037927937 Handle TEvTabletPipe::TEvServerConnected([61:314:2296]) [61:315:2297] 2025-05-29T15:26:40.762055Z node 61 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:5} Tx{5, NKikimr::NHive::TTxDeleteTablet} queued, type NKikimr::NHive::TTxDeleteTablet 2025-05-29T15:26:40.762060Z node 61 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:5} Tx{5, NKikimr::NHive::TTxDeleteTablet} took 4194304b of static mem, Memory{4194304 dyn 0} 2025-05-29T15:26:40.762075Z node 61 :HIVE DEBUG: tx__delete_tablet.cpp:74: HIVE#72057594037927937 THive::TTxDeleteTablet::Execute() ShardOwnerId: 72057594037927937 ShardLocalIdx: 0 TxId_Deprecated: 0 2025-05-29T15:26:40.762082Z node 61 :HIVE DEBUG: tx__delete_tablet.cpp:19: HIVE#72057594037927937 THive::TTxDeleteTablet::Execute Tablet 72075186224037888 2025-05-29T15:26:40.762103Z node 61 :HIVE DEBUG: tablet_info.cpp:125: HIVE#72057594037927937 Tablet(Dummy.72075186224037888.Leader.0) VolatileState: Unknown -> Stopped 2025-05-29T15:26:40.762115Z node 61 :HIVE DEBUG: tx__delete_tablet.cpp:67: HIVE#72057594037927937 THive::TTxDeleteTablet::Execute() result Status: OK Origin: 72057594037927937 TxId_Deprecated: 0 ShardOwnerId: 72057594037927937 ShardLocalIdx: 0 2025-05-29T15:26:40.762127Z node 61 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:5} Tx{5, NKikimr::NHive::TTxDeleteTablet} hope 1 -> done Change{5, redo 102b alter 0b annex 0, ~{ 1 } -{ }, 0 gb} 2025-05-29T15:26:40.762133Z node 61 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:5} Tx{5, NKikimr::NHive::TTxDeleteTablet} release 4194304b of static, Memory{0 dyn 0} 2025-05-29T15:26:40.762155Z node 61 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:6} Tx{6, NKikimr::NHive::TTxDeleteTabletResult} queued, type NKikimr::NHive::TTxDeleteTabletResult 2025-05-29T15:26:40.762158Z node 61 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:6} Tx{6, NKikimr::NHive::TTxDeleteTabletResult} took 4194304b of static mem, Memory{4194304 dyn 0} 2025-05-29T15:26:40.762163Z node 61 :HIVE DEBUG: tx__delete_tablet_result.cpp:26: HIVE#72057594037927937 THive::TTxDeleteTabletResult::Execute(72075186224037888 OK) 2025-05-29T15:26:40.762193Z node 61 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:6} Tx{6, NKikimr::NHive::TTxDeleteTabletResult} hope 1 -> done Change{6, redo 106b alter 0b annex 0, ~{ 16, 1 } -{ }, 0 gb} 2025-05-29T15:26:40.762198Z node 61 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:6} Tx{6, NKikimr::NHive::TTxDeleteTabletResult} release 4194304b of static, Memory{0 dyn 0} 2025-05-29T15:26:40.772492Z node 61 :BS_PROXY_PUT INFO: dsproxy_put.cpp:645: [185eac4b9c06d110] bootstrap ActorId# [61:317:2299] Group# 0 BlobCount# 1 BlobIDs# [[72057594037927937:2:5:0:0:157:0]] HandleClass# TabletLog Tactic# MinLatency RestartCounter# 0 Marker# BPP13 2025-05-29T15:26:40.772543Z node 61 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:42: [185eac4b9c06d110] Id# [72057594037927937:2:5:0:0:157:0] restore disk# 0 part# 0 situation# ESituation::Unknown Marker# BPG51 2025-05-29T15:26:40.772551Z node 61 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:65: [185eac4b9c06d110] restore Id# [72057594037927937:2:5:0:0:157:0] optimisticReplicas# 1 optimisticState# EBS_FULL Marker# BPG55 2025-05-29T15:26:40.772561Z node 61 :BS_PROXY_PUT DEBUG: dsproxy_strategy_base.cpp:324: [185eac4b9c06d110] partPlacement record partSituation# ESituation::Unknown to# 0 blob Id# [72057594037927937:2:5:0:0:157:1] Marker# BPG33 2025-05-29T15:26:40.772569Z node 61 :BS_PROXY_PUT DEBUG: dsproxy_strategy_base.cpp:345: [185eac4b9c06d110] Sending missing VPut part# 0 to# 0 blob Id# [72057594037927937:2:5:0:0:157:1] Marker# BPG32 2025-05-29T15:26:40.772601Z node 61 :BS_PROXY DEBUG: group_sessions.h:165: Send to queueActorId# [61:35:2079] NKikimr::TEvBlobStorage::TEvVPut# {ID# [72057594037927937:2:5:0:0:157:1] FDS# 157 HandleClass# TabletLog {MsgQoS ExtQueueId# PutTabletLog} DataSize# 0 Data# } cookie# 0 2025-05-29T15:26:40.772961Z node 61 :BS_PROXY_PUT DEBUG: dsproxy_put.cpp:260: [185eac4b9c06d110] received {EvVPutResult Status# OK ID# [72057594037927937:2:5:0:0:157:1] {MsgQoS MsgId# { SequenceId: 1 MsgId: 20 } Cost# 81236 ExtQueueId# PutTabletLog IntQueueId# IntPutLog Window# { Status# Processed ActualWindowSize# 0 MaxWindowSize# 150000000 ExpectedMsgId# { SequenceId: 1 MsgId: 21 }}}} from# [0:1:0:0:0] Marker# BPP01 2025-05-29T15:26:40.772984Z node 61 :BS_PROXY_PUT DEBUG: dsproxy_put_impl.cpp:72: [185eac4b9c06d110] Result# TEvPutResult {Id# [72057594037927937:2:5:0:0:157:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0.998955} GroupId# 0 Marker# BPP12 2025-05-29T15:26:40.772992Z node 61 :BS_PROXY_PUT INFO: dsproxy_put.cpp:486: [185eac4b9c06d110] SendReply putResult# TEvPutResult {Id# [72057594037927937:2:5:0:0:157:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0.998955} ResponsesSent# 0 PutImpl.Blobs.size# 1 Last# true Marker# BPP21 2025-05-29T15:26:40.773015Z node 61 :BS_PROXY_PUT DEBUG: {BPP72@dsproxy_put.cpp:470} Query history GroupId# 0 HandleClass# TabletLog Tactic# MinLatency History# THistory { Entries# [ TEvVPut{ TimestampMs# 0.146 sample PartId# [72057594037927937:2:5:0:0:157:1] QueryCount# 1 VDiskId# [0:1:0:0:0] NodeId# 61 } TEvVPutResult{ TimestampMs# 0.511 VDiskId# [0:1:0:0:0] NodeId# 61 Status# OK } ] } 2025-05-29T15:26:40.773040Z node 61 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:54: Put Result: TEvPutResult {Id# [72057594037927937:2:5:0:0:157:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0.998955} 2025-05-29T15:26:40.773064Z node 61 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:6} commited cookie 1 for step 5 2025-05-29T15:26:40.773087Z node 61 :HIVE DEBUG: tx__delete_tablet.cpp:136: HIVE#72057594037927937 THive::TTxDeleteTablet::Complete() SideEffects: {Notifications: 0x10040206 [61:313:2295] NKikimrHive.TEvDeleteTabletReply Status: OK Origin: 72057594037927937 TxId_Deprecated: 0 ShardOwnerId: 72057594037927937 ShardLocalIdx: 0} 2025-05-29T15:26:40.773109Z node 61 :HIVE DEBUG: tx__delete_tablet_result.cpp:72: HIVE#72057594037927937 THive::TTxDeleteTabletResult(72075186224037888)::Complete SideEffects {Notifications: 0x1004020B [61:310:2292] NKikimrHive.TEvGetTabletStorageInfoResult TabletID: 72075186224037888 Status: ERROR StatusMessage: "Tablet deleted"} >> THiveTest::TestCreateTabletReboots [GOOD] >> THiveTest::TestCreateTabletWithWrongSPoolsAndReassignGroupsFailButDeletionIsOk >> Cdc::InitialScanRacyCompleteAndRequest [GOOD] >> Cdc::InitialScanAndLimits >> THiveTest::TestCreateTabletWithWrongSPoolsAndReassignGroupsFailButDeletionIsOk [GOOD] >> THiveTest::TestFollowerCompatability1 >> TTablesWithReboots::Fake [GOOD] >> KqpNotNullColumns::AlterDropNotNullColumn [GOOD] >> KqpNotNullColumns::AlterAddIndex |68.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_base_reboots/unittest >> TTablesWithReboots::CopyWithRebootsAtCommit >> AsyncIndexChangeExchange::ShouldNotReorderChangesOnRace [FAIL] >> Cdc::AreJsonsEqualReturnsTrueOnEqual [GOOD] >> Cdc::AreJsonsEqualReturnsFalseOnDifferent [GOOD] >> Cdc::AreJsonsEqualFailsOnWildcardInArray [GOOD] >> Cdc::AlterViaTopicService >> TTablesWithReboots::DropTableWithReboots |68.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_base_reboots/unittest >> TTablesWithReboots::Fake [GOOD] >> Cdc::RenameTable [FAIL] >> Cdc::InitialScan_WithTopicSchemeTx >> Cdc::ResolvedTimestampsMultiplePartitions [GOOD] >> Cdc::ResolvedTimestampsVolatileOutOfOrder |68.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_base_reboots/unittest >> Cdc::AlterViaTopicService [GOOD] >> Cdc::Alter >> TExportToS3Tests::CancelUponTransferringManyTablesShouldSucceed [GOOD] >> Cdc::InitialScanAndLimits [FAIL] >> Cdc::InitialScanComplete >> TExportToS3Tests::CancelledExportEndTime >> Cdc::Alter [GOOD] >> Cdc::AddColumn |68.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_base_reboots/unittest >> TTablesWithReboots::SimpleDropTableWithReboots2 >> THiveTest::TestHiveBalancerHighUsageAndColumnShards [GOOD] >> THiveTest::TestHiveBalancerOneTabletHighUsage >> Cdc::InitialScan_WithTopicSchemeTx [FAIL] >> Cdc::InitialScan_TopicAutoPartitioning >> TExportToS3Tests::CancelledExportEndTime [GOOD] |68.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_base_reboots/unittest >> THiveTest::TestFollowerCompatability1 [GOOD] >> THiveTest::TestFollowerCompatability2 >> Cdc::InitialScanComplete [FAIL] >> Cdc::InitialScanEnqueuesZeroRecords |68.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_base_reboots/unittest >> THiveTest::TestLockTabletExecutionRebootTimeout [GOOD] >> THiveTest::TestLockTabletExecutionReconnect >> Cdc::AddColumn [FAIL] >> Cdc::AddColumn_TopicAutoPartitioning >> Cdc::ResolvedTimestampsVolatileOutOfOrder [FAIL] >> Cdc::SequentialSplitMerge ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_export/unittest >> TExportToS3Tests::CancelledExportEndTime [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:116:2058] recipient: [1:110:2141] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:116:2058] recipient: [1:110:2141] Leader for TabletID 72057594046678944 is [1:127:2152] sender: [1:129:2058] recipient: [1:110:2141] 2025-05-29T15:26:29.822485Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7488: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-05-29T15:26:29.822512Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7516: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:26:29.822518Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7402: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-05-29T15:26:29.822524Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7418: OperationsProcessing config: using default configuration 2025-05-29T15:26:29.822530Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-05-29T15:26:29.822535Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-05-29T15:26:29.822546Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7548: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:26:29.822559Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:39: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-05-29T15:26:29.822658Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7619: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-05-29T15:26:29.822734Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-05-29T15:26:29.837050Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7309: Cannot subscribe to console configs 2025-05-29T15:26:29.837070Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:26:29.853691Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-05-29T15:26:29.853791Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-05-29T15:26:29.853841Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-05-29T15:26:29.860417Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-05-29T15:26:29.860488Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:445: Clear TempDirsState with owners number: 0 2025-05-29T15:26:29.860600Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1348: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-05-29T15:26:29.860649Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:32: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-05-29T15:26:29.861094Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:157: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:26:29.861147Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:84: [RootDataErasureManager] Stop 2025-05-29T15:26:29.861442Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:26:29.861451Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:26:29.861459Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-05-29T15:26:29.861469Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-29T15:26:29.861475Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:202: TTxServerlessStorageBilling.Complete 2025-05-29T15:26:29.861512Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6671: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-05-29T15:26:29.862712Z node 1 :HIVE INFO: tablet_helpers.cpp:1130: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:127:2152] sender: [1:241:2058] recipient: [1:15:2062] 2025-05-29T15:26:29.905579Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:372: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-05-29T15:26:29.905678Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:26:29.905758Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-05-29T15:26:29.905805Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:130: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-05-29T15:26:29.905817Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:26:29.911147Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:451: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-05-29T15:26:29.911211Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-05-29T15:26:29.911312Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:26:29.911326Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:313: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-05-29T15:26:29.911332Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:367: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-05-29T15:26:29.911340Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 2 -> 3 2025-05-29T15:26:29.913632Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:26:29.913653Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-05-29T15:26:29.913661Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 3 -> 128 2025-05-29T15:26:29.914089Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:26:29.914100Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:26:29.914106Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:26:29.914114Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1650: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-05-29T15:26:29.914842Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1719: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-05-29T15:26:29.923017Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:652: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-05-29T15:26:29.923073Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1751: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-05-29T15:26:29.923284Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:676: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-05-29T15:26:29.923317Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:680: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 134 RawX2: 4294969452 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-05-29T15:26:29.923327Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:26:29.923401Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 128 -> 240 2025-05-29T15:26:29.923409Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:26:29.923450Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-05-29T15:26:29.923463Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:402: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-05-29T15:26:29.931109Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:26:29.931126Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-29T15:26:29.931192Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29 ... 58:0 2025-05-29T15:26:44.199656Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 3 2025-05-29T15:26:44.199662Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove txstate source path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2025-05-29T15:26:44.200219Z node 4 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6746: Handle: TEvNotifyTxCompletionResult: txId# 281474976710758 2025-05-29T15:26:44.200239Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6748: Message: TxId: 281474976710758 2025-05-29T15:26:44.200705Z node 4 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6671: Handle: TEvAllocateResult: Cookie# 102, at schemeshard: 72057594046678944 TestWaitNotification wait txId: 102 2025-05-29T15:26:44.212080Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:210: tests -- TTxNotificationSubscriber for txId 102: send EvNotifyTxCompletion 2025-05-29T15:26:44.212099Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:256: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 102 2025-05-29T15:26:44.213055Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:372: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot/export-102" OperationType: ESchemeOpBackup Backup { TableName: "0" NumberOfRetries: 0 S3Settings { Endpoint: "localhost:8194" Scheme: HTTP Bucket: "" ObjectKeyPattern: "" AccessKey: "" SecretKey: "" StorageClass: STORAGE_CLASS_UNSPECIFIED UseVirtualAddressing: true } Table { Self { Name: "Table" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "Table" Columns { Name: "key" Type: "Utf8" TypeId: 4608 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { MinPartitionsCount: 1 } } TableSchemaVersion: 1 IsBackup: false IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 3 PathsLimit: 10000 ShardsInside: 2 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } NeedToBill: true SnapshotStep: 0 SnapshotTxId: 0 EnableChecksums: false EnablePermissions: false } Internal: true } TxId: 281474976710759 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-05-29T15:26:44.213135Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_backup_restore_common.h:586: TBackup Propose, path: /MyRoot/export-102/0, opId: 281474976710759:0, at schemeshard: 72057594046678944 2025-05-29T15:26:44.213169Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 2 2025-05-29T15:26:44.213290Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:130: IgniteOperation, opId: 281474976710759:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-05-29T15:26:44.213299Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpBackup, opId: 281474976710759:0, at schemeshard: 72057594046678944 2025-05-29T15:26:44.213591Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__notify.cpp:62: NotifyTxCompletion export in-flight, txId: 102, at schemeshard: 72057594046678944 2025-05-29T15:26:44.213600Z node 4 :FLAT_TX_SCHEMESHARD INFO: schemeshard__notify.cpp:131: NotifyTxCompletion transaction is registered, txId: 102, at schemeshard: 72057594046678944 2025-05-29T15:26:44.214000Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:451: TTxOperationPropose Complete, txId: 281474976710759, response: Status: StatusAccepted TxId: 281474976710759 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2025-05-29T15:26:44.214046Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 281474976710759, database: /MyRoot, subject: , status: StatusAccepted, operation: BACKUP TABLE, path: /MyRoot/export-102/0 2025-05-29T15:26:44.214095Z node 4 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6697: Handle: TEvModifySchemeTransactionResult: txId# 281474976710759, status# StatusAccepted 2025-05-29T15:26:44.214102Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6699: Message: Status: StatusAccepted TxId: 281474976710759 SchemeshardId: 72057594046678944 2025-05-29T15:26:44.214161Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 281474976710759:0, at schemeshard: 72057594046678944 2025-05-29T15:26:44.214171Z node 4 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:313: TCreateParts opId# 281474976710759:0 ProgressState, operation type: TxBackup, at tablet# 72057594046678944 2025-05-29T15:26:44.214178Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:367: TCreateParts opId# 281474976710759:0 ProgressState no shards to create, do next state 2025-05-29T15:26:44.214183Z node 4 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 281474976710759:0 2 -> 3 2025-05-29T15:26:44.214775Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:66: TTxOperationProposeCancelTx Execute, at schemeshard: 72057594046678944, message: TargetTxId: 281474976710759 TxId: 102 2025-05-29T15:26:44.214785Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_cancel_tx.cpp:37: Execute cancel tx: opId# 102:0, target opId# 281474976710759:0 2025-05-29T15:26:44.214884Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 281474976710759:0, at schemeshard: 72057594046678944 2025-05-29T15:26:44.214891Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_backup_restore_common.h:58: TBackup TConfigurePart ProgressState, opId: 281474976710759:0, at schemeshard: 72057594046678944 2025-05-29T15:26:44.214915Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_create_backup.cpp:41: Propose backup to datashard 72075186233409547 txid 281474976710759:0 at schemeshard 72057594046678944 2025-05-29T15:26:44.215521Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:83: TTxOperationProposeCancelTx Complete, at schemeshard: 72057594046678944 2025-05-29T15:26:44.215550Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 281474976710759:0, at schemeshard: 72057594046678944 2025-05-29T15:26:44.215555Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_backup_restore_common.h:58: TBackup TConfigurePart ProgressState, opId: 281474976710759:0, at schemeshard: 72057594046678944 2025-05-29T15:26:44.215569Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_create_backup.cpp:41: Propose backup to datashard 72075186233409547 txid 281474976710759:0 at schemeshard 72057594046678944 2025-05-29T15:26:44.215649Z node 4 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6784: Handle: TEvCancelTxResult: Cookie: 102, at schemeshard: 72057594046678944 2025-05-29T15:26:44.215666Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6786: Message: Status: StatusAccepted Result: "Cancelled at SchemeShard" TargetTxId: 281474976710759 TxId: 102 2025-05-29T15:26:44.215766Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:652: Send tablet strongly msg operationId: 281474976710759:0 from tablet: 72057594046678944 to tablet: 72075186233409547 cookie: 72057594046678944:2 msg type: 269549568 2025-05-29T15:26:44.215795Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1751: TOperation RegisterRelationByTabletId, TxId: 281474976710759, partId: 0, tablet: 72075186233409547 2025-05-29T15:26:44.216340Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:652: Send tablet strongly msg operationId: 281474976710759:0 from tablet: 72057594046678944 to tablet: 72075186233409547 cookie: 72057594046678944:2 msg type: 269549568 2025-05-29T15:26:44.216439Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:227: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2025-05-29T15:26:44.216448Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:236: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [4:558:2515] TestWaitNotification: OK eventTxId 102 >> TTablesWithReboots::CreateTableWithReboots >> Cdc::InitialScan_TopicAutoPartitioning [FAIL] >> Cdc::InitialScanUpdatedRows >> THiveTest::TestFollowerCompatability2 [GOOD] >> THiveTest::TestCreateTabletChangeToExternal >> KqpRanges::ScanKeyPrefix >> TTablesWithReboots::SimultaneousDropForceDrop >> Cdc::InitialScanEnqueuesZeroRecords [FAIL] >> Cdc::InitialScanRacyProgressAndDrop >> THiveTest::TestCreateTabletChangeToExternal [GOOD] >> THiveTest::TestExternalBoot |68.0%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/schemeshard/ut_subdomain_reboots/ydb-core-tx-schemeshard-ut_subdomain_reboots |68.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_subdomain_reboots/ydb-core-tx-schemeshard-ut_subdomain_reboots |68.0%| [TA] {RESULT} $(B)/ydb/core/tx/scheme_board/ut_replica/test-results/unittest/{meta.json ... results_accumulator.log} |68.1%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_subdomain_reboots/ydb-core-tx-schemeshard-ut_subdomain_reboots >> THiveTest::TestExternalBoot [GOOD] >> THiveTest::TestExternalBootWhenLocked >> TTablesWithReboots::CreateWithRebootsAtCommit >> Cdc::AddColumn_TopicAutoPartitioning [FAIL] >> Cdc::AddIndex >> THiveTest::TestLockTabletExecutionReconnect [GOOD] >> THiveTest::TestLockTabletExecutionRebootReconnect >> Cdc::InitialScanUpdatedRows [FAIL] >> Cdc::MustNotLoseSchemaSnapshot >> Cdc::SequentialSplitMerge [FAIL] >> Cdc::ShouldBreakLocksOnConcurrentSchemeTx >> THiveTest::TestExternalBootWhenLocked [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/opt/unittest >> KqpNotNullColumns::AlterAddIndex Test command err: Trying to start YDB, gRPC: 10000, MsgBus: 30085 2025-05-29T15:26:11.339145Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509889353584828158:2067];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:26:11.339163Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/0012b6/r3tmp/tmpEJ5Omc/pdisk_1.dat 2025-05-29T15:26:11.442995Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:26:11.443023Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:26:11.443453Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 10000, node 1 2025-05-29T15:26:11.453900Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:26:11.455489Z node 1 :GRPC_SERVER WARN: grpc_request_proxy.cpp:529: SchemeBoardDelete /Root Strong=0 2025-05-29T15:26:11.455499Z node 1 :GRPC_SERVER WARN: grpc_request_proxy.cpp:529: SchemeBoardDelete /Root Strong=0 2025-05-29T15:26:11.490925Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:26:11.490941Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-05-29T15:26:11.490943Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-05-29T15:26:11.490985Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:30085 TClient is connected to server localhost:30085 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-29T15:26:11.672873Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:26:11.684312Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-05-29T15:26:11.701674Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:26:11.768476Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-05-29T15:26:11.798183Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:26:11.815309Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-05-29T15:26:11.923337Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509889353584829759:2403], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:26:11.923361Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:26:11.973119Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-05-29T15:26:11.982833Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-05-29T15:26:11.998853Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-05-29T15:26:12.014037Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-05-29T15:26:12.037193Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-05-29T15:26:12.060433Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-05-29T15:26:12.092400Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-05-29T15:26:12.129068Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509889357879797706:2466], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:26:12.129097Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:26:12.129211Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509889357879797711:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:26:12.130266Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710669:3, at schemeshard: 72057594046644480 2025-05-29T15:26:12.133489Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7509889357879797713:2470], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710669 completed, doublechecking } 2025-05-29T15:26:12.229544Z node 1 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [1:7509889357879797764:3399] txid# 281474976710670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-29T15:26:12.528115Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [1:7509889357879797773:2474], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 VERIFY failed (2025-05-29T15:26:12.532468Z): assertion failed in non-unittest thread with message: assertion failed at ydb/core/kqp/ut/common/kqp_ut_common.h:375, void NKikimr::NKqp::AssertSuccessResult(const NYdb::TStatus &): (result.IsSuccess())
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 library/cpp/testing/unittest/registar.cpp:36 RaiseError(): requirement UnittestThread failed 2025-05-29T15:26:12.531271Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=1&id=MzhjODExMzEtM2I1YzAxYWUtNDEwMDA5MjUtZGVmZDRhNTc=, ActorId: [1:7509889353584829732:2400], ActorState: ExecuteState, TraceId: 01jwead8n01rjrde1zd2qrgzdy, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: 2025-05-29T15:26:16.340886Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7509889353584828158:2067];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:26:16.340942Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 0. /-S/util/system/yassert.cpp:83: InternalPanicImpl @ 0x161A2C15 1. /-S/util/system/yassert.cpp:55: Panic @ 0x16199C16 2. /tmp//-S/library/cpp/testing/unittest/registar.cpp:36: RaiseError @ 0x1633B0E6 3. /-S/ydb/core/kqp/ut/common/kqp_ut_common.h:375: AssertSuccessResult @ 0x15D46F92 4. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:365: CreateSampleTables @ 0x287C0C82 5. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:581: operator() @ 0x287E1B1C 6. /-S/library/cpp/threading/future/core/future-inl.h:493: SetValue @ 0x287E1B1C 7. /-S/library/cpp/threading/future/async.h:24: operator() @ 0x287E1B1C 8. /-S/util/thread/pool.h:71: Process @ 0x287E1B1C 9. /-S/util/thread/pool.cpp:418: DoExecute @ 0x161AA599 10. /-S/util/thread/factory.h:15: Execute @ 0x161A8F89 11. /-S/util/thread/factory.cpp:36: ThreadProc @ 0x161A8F89 12. /-S/util/system/thread.cpp:244: ThreadProxy @ 0x161A43FC 13. ??:0: ?? @ 0x7F8E3FD78AC2 14. ??:0: ?? @ 0x7F8E3FE0A84F Trying to start YDB, gRPC: 1289, MsgBus: 3244 test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/0012b6/r3tmp/tmpj0TxK2/pdisk_1.dat 2025-05-29T15:26:19.882877Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initializatio ... nsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-05-29T15:26:42.146810Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509889483765502218:2337], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:26:42.146849Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:26:42.152884Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 21124, MsgBus: 16440 2025-05-29T15:26:42.363873Z node 2 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7509889485299364973:2072];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:26:42.364114Z node 2 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/0012b6/r3tmp/tmpGA7lUp/pdisk_1.dat 2025-05-29T15:26:42.381592Z node 2 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 21124, node 2 2025-05-29T15:26:42.406973Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:26:42.406988Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-05-29T15:26:42.406990Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-05-29T15:26:42.407053Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:16440 TClient is connected to server localhost:16440 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-29T15:26:42.468734Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:26:42.468765Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:26:42.469185Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:26:42.469645Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-29T15:26:42.470997Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-05-29T15:26:42.474247Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:26:42.491197Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:26:42.510950Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:26:42.527726Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:26:42.724900Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7509889485299366556:2403], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:26:42.724926Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:26:42.738973Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-05-29T15:26:42.748382Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-05-29T15:26:42.763975Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-05-29T15:26:42.781790Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-05-29T15:26:42.796788Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-05-29T15:26:42.816647Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-05-29T15:26:42.834098Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-05-29T15:26:42.848187Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7509889485299367210:2466], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:26:42.848213Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:26:42.849651Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7509889485299367215:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:26:42.850466Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715669:3, at schemeshard: 72057594046644480 2025-05-29T15:26:42.856327Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7509889485299367217:2470], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715669 completed, doublechecking } 2025-05-29T15:26:42.954008Z node 2 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [2:7509889485299367268:3394] txid# 281474976715670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } VERIFY failed (2025-05-29T15:26:43.068653Z): assertion failed in non-unittest thread with message: assertion failed at ydb/core/kqp/ut/common/kqp_ut_common.h:375, void NKikimr::NKqp::AssertSuccessResult(const NYdb::TStatus &): (result.IsSuccess())
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 library/cpp/testing/unittest/registar.cpp:36 RaiseError(): requirement UnittestThread failed 2025-05-29T15:26:43.067593Z node 2 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [2:7509889485299367284:2474], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:26:43.067718Z node 2 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=2&id=YTkxNjEwZTUtZjMyMzdhNDEtODM0ZmE4NDQtZDliODliOWQ=, ActorId: [2:7509889485299366538:2401], ActorState: ExecuteState, TraceId: 01jweae6mz1avn97ddhkd2w0ay, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: 0. /-S/util/system/yassert.cpp:83: InternalPanicImpl @ 0x161A2C15 1. /-S/util/system/yassert.cpp:55: Panic @ 0x16199C16 2. /tmp//-S/library/cpp/testing/unittest/registar.cpp:36: RaiseError @ 0x1633B0E6 3. /-S/ydb/core/kqp/ut/common/kqp_ut_common.h:375: AssertSuccessResult @ 0x15D46F92 4. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:365: CreateSampleTables @ 0x287C0C82 5. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:581: operator() @ 0x287E1B1C 6. /-S/library/cpp/threading/future/core/future-inl.h:493: SetValue @ 0x287E1B1C 7. /-S/library/cpp/threading/future/async.h:24: operator() @ 0x287E1B1C 8. /-S/util/thread/pool.h:71: Process @ 0x287E1B1C 9. /-S/util/thread/pool.cpp:418: DoExecute @ 0x161AA599 10. /-S/util/thread/factory.h:15: Execute @ 0x161A8F89 11. /-S/util/thread/factory.cpp:36: ThreadProc @ 0x161A8F89 12. /-S/util/system/thread.cpp:244: ThreadProxy @ 0x161A43FC 13. ??:0: ?? @ 0x7FF7E213AAC2 14. ??:0: ?? @ 0x7FF7E21CC84F >> Cdc::InitialScanRacyProgressAndDrop [FAIL] >> Cdc::EnqueueRequestProcessSend >> THiveTest::TestLockTabletExecutionRebootReconnect [GOOD] >> THiveTest::TestLockTabletExecutionReconnectExpire >> TSchemeShardTestExtSubdomainReboots::CreateExternalSubdomain-AlterDatabaseCreateHiveFirst-true [GOOD] >> THiveTest::TestHiveBalancerOneTabletHighUsage [GOOD] >> THiveTest::TestHiveBalancerWithSpareNodes >> Cdc::AddIndex [FAIL] >> Cdc::AddStream |68.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_base_reboots/unittest >> Cdc::MustNotLoseSchemaSnapshot [FAIL] >> Cdc::MustNotLoseSchemaSnapshotWithVolatileTx >> Cdc::ShouldBreakLocksOnConcurrentSchemeTx [FAIL] >> Cdc::ResolvedTimestampsContinueAfterMerge >> Cdc::EnqueueRequestProcessSend [FAIL] >> Cdc::InitialScanAndResolvedTimestamps >> THiveTest::TestLockTabletExecutionReconnectExpire [GOOD] >> THiveTest::TestLockTabletExecutionStealLock ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/mind/hive/ut/unittest >> THiveTest::TestExternalBootWhenLocked [GOOD] Test command err: 2025-05-29T15:26:09.758207Z node 2 :BS_NODE DEBUG: {NW26@node_warden_impl.cpp:321} Bootstrap 2025-05-29T15:26:09.771288Z node 2 :BS_NODE DEBUG: {NW18@node_warden_resource.cpp:51} ApplyServiceSet IsStatic# true Comprehensive# false Origin# initial ServiceSet# {PDisks { NodeID: 1 PDiskID: 1 Path: "/tmp/pdisk.dat" PDiskGuid: 1 } VDisks { VDiskID { GroupID: 0 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } VDiskLocation { NodeID: 1 PDiskID: 1 VDiskSlotID: 0 PDiskGuid: 1 } } Groups { GroupID: 0 GroupGeneration: 1 ErasureSpecies: 0 Rings { FailDomains { VDiskLocations { NodeID: 1 PDiskID: 1 VDiskSlotID: 0 PDiskGuid: 1 } } } } AvailabilityDomains: 0 } 2025-05-29T15:26:09.771377Z node 2 :BS_NODE DEBUG: {NW12@node_warden_proxy.cpp:23} StartLocalProxy GroupId# 0 2025-05-29T15:26:09.771584Z node 2 :BS_NODE DEBUG: {NW21@node_warden_pipe.cpp:23} EstablishPipe AvailDomainId# 0 PipeClientId# [2:71:2074] ControllerId# 72057594037932033 2025-05-29T15:26:09.771591Z node 2 :BS_NODE DEBUG: {NW20@node_warden_pipe.cpp:72} SendRegisterNode 2025-05-29T15:26:09.771627Z node 2 :BS_NODE DEBUG: {NW11@node_warden_impl.cpp:296} StartInvalidGroupProxy GroupId# 4294967295 2025-05-29T15:26:09.771651Z node 2 :BS_NODE DEBUG: {NW62@node_warden_impl.cpp:308} StartRequestReportingThrottler 2025-05-29T15:26:09.773264Z node 2 :LOCAL DEBUG: local.cpp:1491: TLocal::Bootstrap 2025-05-29T15:26:09.773410Z node 1 :BS_NODE DEBUG: {NW26@node_warden_impl.cpp:321} Bootstrap 2025-05-29T15:26:09.774046Z node 1 :BS_NODE DEBUG: {NW18@node_warden_resource.cpp:51} ApplyServiceSet IsStatic# true Comprehensive# false Origin# initial ServiceSet# {PDisks { NodeID: 1 PDiskID: 1 Path: "/tmp/pdisk.dat" PDiskGuid: 1 } VDisks { VDiskID { GroupID: 0 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } VDiskLocation { NodeID: 1 PDiskID: 1 VDiskSlotID: 0 PDiskGuid: 1 } } Groups { GroupID: 0 GroupGeneration: 1 ErasureSpecies: 0 Rings { FailDomains { VDiskLocations { NodeID: 1 PDiskID: 1 VDiskSlotID: 0 PDiskGuid: 1 } } } } AvailabilityDomains: 0 } 2025-05-29T15:26:09.774098Z node 1 :BS_NODE DEBUG: {NW04@node_warden_pdisk.cpp:196} StartLocalPDisk NodeId# 1 PDiskId# 1 Path# "/tmp/pdisk.dat" PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} Temporary# false 2025-05-29T15:26:09.774289Z node 1 :BS_NODE DEBUG: {NW23@node_warden_vdisk.cpp:67} StartLocalVDiskActor SlayInFlight# false VDiskId# [0:1:0:0:0] VSlotId# 1:1:0 PDiskGuid# 1 DonorMode# false PDiskRestartInFlight# false PDisksWaitingToStart# false 2025-05-29T15:26:09.774587Z node 1 :BS_NODE DEBUG: {NW24@node_warden_vdisk.cpp:265} StartLocalVDiskActor done VDiskId# [0:1:0:0:0] VSlotId# 1:1:0 PDiskGuid# 1 2025-05-29T15:26:09.774604Z node 1 :BS_NODE DEBUG: {NW12@node_warden_proxy.cpp:23} StartLocalProxy GroupId# 0 2025-05-29T15:26:09.774824Z node 1 :BS_NODE DEBUG: {NW21@node_warden_pipe.cpp:23} EstablishPipe AvailDomainId# 0 PipeClientId# [1:85:2077] ControllerId# 72057594037932033 2025-05-29T15:26:09.774830Z node 1 :BS_NODE DEBUG: {NW20@node_warden_pipe.cpp:72} SendRegisterNode 2025-05-29T15:26:09.774854Z node 1 :BS_NODE DEBUG: {NW11@node_warden_impl.cpp:296} StartInvalidGroupProxy GroupId# 4294967295 2025-05-29T15:26:09.774878Z node 1 :BS_NODE DEBUG: {NW62@node_warden_impl.cpp:308} StartRequestReportingThrottler 2025-05-29T15:26:09.776356Z node 1 :LOCAL DEBUG: local.cpp:1491: TLocal::Bootstrap 2025-05-29T15:26:09.785091Z node 1 :BS_PROXY INFO: dsproxy_state.cpp:146: Group# 0 TEvConfigureProxy received GroupGeneration# 1 IsLimitedKeyless# false Marker# DSP02 2025-05-29T15:26:09.785118Z node 1 :BS_PROXY NOTICE: dsproxy_state.cpp:294: EnsureMonitoring Group# 0 IsLimitedKeyless# 0 fullIfPossible# 0 Marker# DSP58 2025-05-29T15:26:09.785543Z node 1 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [1:84:2076] Create Queue# [1:93:2082] targetNodeId# 1 Marker# DSP01 2025-05-29T15:26:09.785588Z node 1 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [1:84:2076] Create Queue# [1:94:2083] targetNodeId# 1 Marker# DSP01 2025-05-29T15:26:09.785624Z node 1 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [1:84:2076] Create Queue# [1:95:2084] targetNodeId# 1 Marker# DSP01 2025-05-29T15:26:09.785671Z node 1 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [1:84:2076] Create Queue# [1:96:2085] targetNodeId# 1 Marker# DSP01 2025-05-29T15:26:09.785723Z node 1 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [1:84:2076] Create Queue# [1:97:2086] targetNodeId# 1 Marker# DSP01 2025-05-29T15:26:09.785774Z node 1 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [1:84:2076] Create Queue# [1:98:2087] targetNodeId# 1 Marker# DSP01 2025-05-29T15:26:09.785805Z node 1 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [1:84:2076] Create Queue# [1:99:2088] targetNodeId# 1 Marker# DSP01 2025-05-29T15:26:09.785812Z node 1 :BS_PROXY INFO: dsproxy_state.cpp:29: Group# 0 SetStateEstablishingSessions Marker# DSP03 2025-05-29T15:26:09.785840Z node 1 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:50: TClient[72057594037932033] ::Bootstrap [1:85:2077] 2025-05-29T15:26:09.785847Z node 1 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:533: TClient[72057594037932033] lookup [1:85:2077] 2025-05-29T15:26:09.785857Z node 1 :BS_PROXY NOTICE: dsproxy_state.cpp:234: Group# 4294967295 HasInvalidGroupId# 1 Bootstrap -> StateEjected Marker# DSP42 2025-05-29T15:26:09.785867Z node 1 :BS_NODE DEBUG: {NWDC00@distconf.cpp:20} Bootstrap 2025-05-29T15:26:09.786110Z node 1 :BS_NODE DEBUG: {NWDC40@distconf_persistent_storage.cpp:25} TReaderActor bootstrap Paths# [] 2025-05-29T15:26:09.786195Z node 2 :BS_PROXY INFO: dsproxy_state.cpp:146: Group# 0 TEvConfigureProxy received GroupGeneration# 1 IsLimitedKeyless# false Marker# DSP02 2025-05-29T15:26:09.786204Z node 2 :BS_PROXY NOTICE: dsproxy_state.cpp:294: EnsureMonitoring Group# 0 IsLimitedKeyless# 0 fullIfPossible# 0 Marker# DSP58 2025-05-29T15:26:09.790905Z node 2 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [2:70:2073] Create Queue# [2:104:2079] targetNodeId# 1 Marker# DSP01 2025-05-29T15:26:09.790974Z node 2 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [2:70:2073] Create Queue# [2:105:2080] targetNodeId# 1 Marker# DSP01 2025-05-29T15:26:09.791013Z node 2 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [2:70:2073] Create Queue# [2:106:2081] targetNodeId# 1 Marker# DSP01 2025-05-29T15:26:09.791044Z node 2 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [2:70:2073] Create Queue# [2:107:2082] targetNodeId# 1 Marker# DSP01 2025-05-29T15:26:09.791085Z node 2 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [2:70:2073] Create Queue# [2:108:2083] targetNodeId# 1 Marker# DSP01 2025-05-29T15:26:09.791117Z node 2 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [2:70:2073] Create Queue# [2:109:2084] targetNodeId# 1 Marker# DSP01 2025-05-29T15:26:09.791154Z node 2 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [2:70:2073] Create Queue# [2:110:2085] targetNodeId# 1 Marker# DSP01 2025-05-29T15:26:09.791161Z node 2 :BS_PROXY INFO: dsproxy_state.cpp:29: Group# 0 SetStateEstablishingSessions Marker# DSP03 2025-05-29T15:26:09.791185Z node 2 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:50: TClient[72057594037932033] ::Bootstrap [2:71:2074] 2025-05-29T15:26:09.791191Z node 2 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:533: TClient[72057594037932033] lookup [2:71:2074] 2025-05-29T15:26:09.791203Z node 2 :BS_PROXY NOTICE: dsproxy_state.cpp:234: Group# 4294967295 HasInvalidGroupId# 1 Bootstrap -> StateEjected Marker# DSP42 2025-05-29T15:26:09.791214Z node 2 :BS_NODE DEBUG: {NWDC00@distconf.cpp:20} Bootstrap 2025-05-29T15:26:09.791248Z node 2 :LOCAL DEBUG: local.cpp:1441: TDomainLocal(dc-1): Bootstrap 2025-05-29T15:26:09.791388Z node 2 :BS_NODE DEBUG: {NWDC40@distconf_persistent_storage.cpp:25} TReaderActor bootstrap Paths# [] 2025-05-29T15:26:09.791419Z node 3 :BS_NODE DEBUG: {NW26@node_warden_impl.cpp:321} Bootstrap 2025-05-29T15:26:09.792163Z node 3 :BS_NODE DEBUG: {NW18@node_warden_resource.cpp:51} ApplyServiceSet IsStatic# true Comprehensive# false Origin# initial ServiceSet# {PDisks { NodeID: 1 PDiskID: 1 Path: "/tmp/pdisk.dat" PDiskGuid: 1 } VDisks { VDiskID { GroupID: 0 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } VDiskLocation { NodeID: 1 PDiskID: 1 VDiskSlotID: 0 PDiskGuid: 1 } } Groups { GroupID: 0 GroupGeneration: 1 ErasureSpecies: 0 Rings { FailDomains { VDiskLocations { NodeID: 1 PDiskID: 1 VDiskSlotID: 0 PDiskGuid: 1 } } } } AvailabilityDomains: 0 } 2025-05-29T15:26:09.792215Z node 3 :BS_NODE DEBUG: {NW12@node_warden_proxy.cpp:23} StartLocalProxy GroupId# 0 2025-05-29T15:26:09.792467Z node 3 :BS_NODE DEBUG: {NW21@node_warden_pipe.cpp:23} EstablishPipe AvailDomainId# 0 PipeClientId# [3:118:2074] ControllerId# 72057594037932033 2025-05-29T15:26:09.792474Z node 3 :BS_NODE DEBUG: {NW20@node_warden_pipe.cpp:72} SendRegisterNode 2025-05-29T15:26:09.792494Z node 3 :BS_NODE DEBUG: {NW11@node_warden_impl.cpp:296} StartInvalidGroupProxy GroupId# 4294967295 2025-05-29T15:26:09.792516Z node 3 :BS_NODE DEBUG: {NW62@node_warden_impl.cpp:308} StartRequestReportingThrottler 2025-05-29T15:26:09.792579Z node 3 :BS_PROXY NOTICE: dsproxy_state.cpp:234: Group# 4294967295 HasInvalidGroupId# 1 Bootstrap -> StateEjected Marker# DSP42 2025-05-29T15:26:09.792589Z node 3 :BS_NODE DEBUG: {NWDC00@distconf.cpp:20} Bootstrap 2025-05-29T15:26:09.792610Z node 3 :BS_NODE DEBUG: {NWDC40@distconf_persistent_storage.cpp:25} TReaderActor bootstrap Paths# [] 2025-05-29T15:26:09.792667Z node 1 :TABLET_RESOLVER DEBUG: tablet_resolver.cpp:484: Handle TEvForward tabletId: 72057594037932033 entry.State: StInit ev: {EvForward TabletID: 72057594037932033 Ev: nullptr Flags: 1:2:0} 2025-05-29T15:26:09.815821Z node 1 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:149: TClient[72057594037932033] queue send [1:85:2077] 2025-05-29T15:26:09.815904Z node 1 :BS_NODE DEBUG: {NWDC53@distconf.cpp:300} StateWaitForInit event Type# 131082 StorageConfigLoaded# false NodeListObtained# false PendingEvents.size# 0 2025-05-29T15:26:09.815915Z node 1 :BS_NODE DEBUG: {NWDC11@distconf_binding.cpp:6} TEvNodesInfo 2025-05-29T15:26:09.816312Z node 1 :LOCAL DEBUG: local.cpp:1441: TDomainLocal(dc-1): Bootstrap 2025-05-29T15:26:09.816371Z node 2 :TABLET_RESOLVER DEBUG: tablet_resolver.cpp:484: Handle TEvForward tabletId: 72057594037932033 entry.State: StInit ev: {EvForward TabletID: 72057594037932033 Ev: nullptr Flags: 1:2:0} 2025-05-29T15:26:09.816422Z node 2 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:149: TClient[72057594037932033] queue send [2:71:2074] 2025-05-29T15:26:09.816432Z node 2 :BS_NODE DEBUG: {NWDC53@distconf.cpp:300} StateWaitForInit event Type# 131082 StorageConfigLoaded# false NodeListObtained# false PendingEvents.size# 0 2025-05-29T15:26:09.816438Z node 2 :BS_NODE DEBUG: {NWDC11@distconf_binding.cpp:6} TEvNodesInfo 2025-05-29T15:26:09.817453Z node 2 :LOCAL DEBUG: local.cpp:1149: TDomainLocal(dc-1): Binding to hive 72057594037927937 at domain dc-1 (allocated resources: ) 2025-05-29T15:26:09.817503Z node 2 :LOCAL DEBUG: local.cpp:975: TLocalNodeRegistrar::Bootstrap 2025-05-29T15:26:09.817511Z node 2 :LOCAL DEBUG: local.cpp:181: TLocalNodeRegistrar::TryToRegister 2025-05-29T15:26:09.817578Z node 2 :LOCAL DEBUG: local.cpp:213: TLocalNodeRegistrar::TryToRegister pipe to hive, pipe:[2:127:2087] 2025-05-29T15:26:09.819637Z node 3 :LOCAL DEBUG: local.cpp:1491: TLocal::Bootstrap 2025-05-29T15:26:09.819696Z node 3 :BS_PROXY INFO: dsproxy_state.cpp:146: Group# 0 TEvConfigureProxy received GroupGeneration# 1 IsLimitedKeyless# false Marker# DSP02 2025-05-29T15:26:09.819705Z node 3 :BS_PROXY NOTICE: dsproxy_state.cpp:294: EnsureMonitoring Group# 0 IsLimitedKeyless# 0 fullIfPossible# 0 Marker# DSP58 2025-05-29T15:26:09.820151Z node 3 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [3:117:2073] Create Queue# [3:129:2079] targetNodeId# 1 Marker# DSP01 2025-05-29T15:26:09.820194Z node 3 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [3:117:2073] Create Queue# [3:130:2080] targetNo ... 4304b of static, Memory{0 dyn 0} 2025-05-29T15:26:46.867821Z node 30 :HIVE DEBUG: tx__process_boot_queue.cpp:26: HIVE#72057594037927937 THive::TTxProcessBootQueue()::Complete 2025-05-29T15:26:46.867887Z node 30 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:50: TClient[72075186224037888] ::Bootstrap [30:451:2358] 2025-05-29T15:26:46.867892Z node 30 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:533: TClient[72075186224037888] lookup [30:451:2358] 2025-05-29T15:26:46.867907Z node 30 :TABLET_RESOLVER DEBUG: tablet_resolver.cpp:484: Handle TEvForward tabletId: 72075186224037888 entry.State: StNormal ev: {EvForward TabletID: 72075186224037888 Ev: nullptr Flags: 1:2:0} 2025-05-29T15:26:46.867918Z node 30 :TABLET_RESOLVER DEBUG: tablet_resolver.cpp:279: SelectForward node 30 selfDC 1 leaderDC 1 1:2:0 local 1 localDc 1 other 0 disallowed 0 tabletId: 72075186224037888 followers: 0 countLeader 1 allowFollowers 0 winner: [30:371:2299] 2025-05-29T15:26:46.867929Z node 30 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:190: TClient[72075186224037888] forward result local node, try to connect [30:451:2358] 2025-05-29T15:26:46.867934Z node 30 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:671: TClient[72075186224037888]::SendEvent [30:451:2358] 2025-05-29T15:26:46.867946Z node 30 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:349: TClient[72075186224037888] connect request undelivered [30:451:2358] 2025-05-29T15:26:46.867951Z node 30 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:498: TClient[72075186224037888] connect failed [30:451:2358] 2025-05-29T15:26:46.867962Z node 30 :TABLET_RESOLVER DEBUG: tablet_resolver.cpp:536: Handle TEvTabletProblem tabletId: 72075186224037888 entry.State: StNormal 2025-05-29T15:26:46.867985Z node 30 :STATESTORAGE DEBUG: statestorage_proxy.cpp:246: ProxyRequest::HandleInit ringGroup:0 ev: {EvLookup TabletID: 72075186224037888 Cookie: 0 ProxyOptions: SigNone} 2025-05-29T15:26:46.868002Z node 30 :STATESTORAGE DEBUG: statestorage_replica.cpp:183: Replica::Handle ev: {EvReplicaLookup TabletID: 72075186224037888 Cookie: 0} 2025-05-29T15:26:46.868010Z node 30 :STATESTORAGE DEBUG: statestorage_replica.cpp:183: Replica::Handle ev: {EvReplicaLookup TabletID: 72075186224037888 Cookie: 1} 2025-05-29T15:26:46.868016Z node 30 :STATESTORAGE DEBUG: statestorage_replica.cpp:183: Replica::Handle ev: {EvReplicaLookup TabletID: 72075186224037888 Cookie: 2} 2025-05-29T15:26:46.868025Z node 30 :STATESTORAGE DEBUG: statestorage_proxy.cpp:355: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 0 TabletID: 72075186224037888 CurrentLeader: [30:371:2299] CurrentLeaderTablet: [30:386:2311] CurrentGeneration: 1 CurrentStep: 0} 2025-05-29T15:26:46.868036Z node 30 :STATESTORAGE DEBUG: statestorage_proxy.cpp:355: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 0 TabletID: 72075186224037888 CurrentLeader: [30:371:2299] CurrentLeaderTablet: [30:386:2311] CurrentGeneration: 1 CurrentStep: 0} 2025-05-29T15:26:46.868051Z node 30 :TABLET_RESOLVER DEBUG: tablet_resolver.cpp:610: Handle TEvInfo tabletId: 72075186224037888 entry.State: StProblemResolve success: true ev: {EvInfo Status: 0 TabletID: 72075186224037888 Cookie: 0 CurrentLeader: [30:371:2299] CurrentLeaderTablet: [30:386:2311] CurrentGeneration: 1 CurrentStep: 0 Locked: false LockedFor: 0 Signature: { Size: 2 Signature: {{[30:1099535971443:0] : 6}, {[30:24343667:0] : 3}}}} 2025-05-29T15:26:46.868068Z node 30 :TABLET_RESOLVER DEBUG: tablet_resolver.cpp:361: DropEntry tabletId: 72075186224037888 followers: 0 2025-05-29T15:26:46.868101Z node 31 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:50: TClient[72057594037927937] ::Bootstrap [31:453:2093] 2025-05-29T15:26:46.868107Z node 31 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:533: TClient[72057594037927937] lookup [31:453:2093] 2025-05-29T15:26:46.868116Z node 31 :TABLET_RESOLVER DEBUG: tablet_resolver.cpp:484: Handle TEvForward tabletId: 72057594037927937 entry.State: StNormal ev: {EvForward TabletID: 72057594037927937 Ev: nullptr Flags: 1:2:0} 2025-05-29T15:26:46.868123Z node 31 :TABLET_RESOLVER DEBUG: tablet_resolver.cpp:279: SelectForward node 31 selfDC 2 leaderDC 1 1:2:0 local 0 localDc 0 other 1 disallowed 0 tabletId: 72057594037927937 followers: 0 countLeader 1 allowFollowers 0 winner: [30:321:2263] 2025-05-29T15:26:46.868130Z node 31 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:149: TClient[72057594037927937] queue send [31:453:2093] 2025-05-29T15:26:46.868137Z node 31 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:411: TClient[72057594037927937] received pending shutdown [31:453:2093] 2025-05-29T15:26:46.868144Z node 31 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:195: TClient[72057594037927937] forward result remote node 30 [31:453:2093] 2025-05-29T15:26:46.868158Z node 31 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:229: TClient[72057594037927937] remote node connected [31:453:2093] 2025-05-29T15:26:46.868163Z node 31 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:671: TClient[72057594037927937]::SendEvent [31:453:2093] 2025-05-29T15:26:46.868191Z node 30 :PIPE_SERVER DEBUG: tablet_pipe_server.cpp:291: [72057594037927937] Accept Connect Originator# [31:453:2093] 2025-05-29T15:26:46.868219Z node 30 :HIVE TRACE: hive_impl.cpp:114: HIVE#72057594037927937 Handle TEvTabletPipe::TEvServerConnected([31:453:2093]) [30:454:2359] 2025-05-29T15:26:46.868234Z node 31 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:310: TClient[72057594037927937] connected with status OK role: Leader [31:453:2093] 2025-05-29T15:26:46.868239Z node 31 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:325: TClient[72057594037927937] send queued [31:453:2093] 2025-05-29T15:26:46.868243Z node 31 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:629: TClient[72057594037927937] push event to server [31:453:2093] 2025-05-29T15:26:46.868251Z node 31 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:671: TClient[72057594037927937]::SendEvent [31:453:2093] 2025-05-29T15:26:46.868256Z node 31 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:332: TClient[72057594037927937] shutdown pipe due to pending shutdown request [31:453:2093] 2025-05-29T15:26:46.868260Z node 31 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:505: TClient[72057594037927937] notify reset [31:453:2093] 2025-05-29T15:26:46.868296Z node 30 :PIPE_SERVER DEBUG: tablet_pipe_server.cpp:72: [72057594037927937] Push Sender# [31:441:2088] EventType# 268697624 2025-05-29T15:26:46.868313Z node 30 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:9} Tx{23, NKikimr::NHive::TTxStartTablet} queued, type NKikimr::NHive::TTxStartTablet 2025-05-29T15:26:46.868318Z node 30 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:9} Tx{23, NKikimr::NHive::TTxStartTablet} took 4194304b of static mem, Memory{4194304 dyn 0} 2025-05-29T15:26:46.868323Z node 30 :HIVE DEBUG: tx__start_tablet.cpp:29: HIVE#72057594037927937 THive::TTxStartTablet::Execute Tablet (72075186224037888,0) 2025-05-29T15:26:46.868361Z node 30 :HIVE DEBUG: tx__start_tablet.cpp:70: HIVE#72057594037927937 THive::TTxStartTablet::Execute, Sending TEvBootTablet(Dummy.72075186224037888.Leader.2) to node 31 storage {Version# 1 TabletID# 72075186224037888 TabletType# Dummy Channels# {0:{Channel# 0 Type# none StoragePool# def1 History# {0:{FromGeneration# 0 GroupID# 2147483648 Timestamp# 1970-01-01T00:00:00.059536Z}}, 1:{Channel# 1 Type# none StoragePool# def2 History# {0:{FromGeneration# 0 GroupID# 2147483649 Timestamp# 1970-01-01T00:00:00.059536Z}}, 2:{Channel# 2 Type# none StoragePool# def3 History# {0:{FromGeneration# 0 GroupID# 2147483650 Timestamp# 1970-01-01T00:00:00.059536Z}}} Tenant: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-29T15:26:46.868377Z node 30 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:9} Tx{23, NKikimr::NHive::TTxStartTablet} hope 1 -> done Change{13, redo 144b alter 0b annex 0, ~{ 1, 16 } -{ }, 0 gb} 2025-05-29T15:26:46.868383Z node 30 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:9} Tx{23, NKikimr::NHive::TTxStartTablet} release 4194304b of static, Memory{0 dyn 0} 2025-05-29T15:26:46.878688Z node 30 :BS_PROXY_PUT INFO: dsproxy_put.cpp:645: [db158bc7997c188e] bootstrap ActorId# [30:456:2361] Group# 0 BlobCount# 1 BlobIDs# [[72057594037927937:2:9:0:0:127:0]] HandleClass# TabletLog Tactic# MinLatency RestartCounter# 0 Marker# BPP13 2025-05-29T15:26:46.878734Z node 30 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:42: [db158bc7997c188e] Id# [72057594037927937:2:9:0:0:127:0] restore disk# 0 part# 0 situation# ESituation::Unknown Marker# BPG51 2025-05-29T15:26:46.878755Z node 30 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:65: [db158bc7997c188e] restore Id# [72057594037927937:2:9:0:0:127:0] optimisticReplicas# 1 optimisticState# EBS_FULL Marker# BPG55 2025-05-29T15:26:46.878764Z node 30 :BS_PROXY_PUT DEBUG: dsproxy_strategy_base.cpp:324: [db158bc7997c188e] partPlacement record partSituation# ESituation::Unknown to# 0 blob Id# [72057594037927937:2:9:0:0:127:1] Marker# BPG33 2025-05-29T15:26:46.878770Z node 30 :BS_PROXY_PUT DEBUG: dsproxy_strategy_base.cpp:345: [db158bc7997c188e] Sending missing VPut part# 0 to# 0 blob Id# [72057594037927937:2:9:0:0:127:1] Marker# BPG32 2025-05-29T15:26:46.878801Z node 30 :BS_PROXY DEBUG: group_sessions.h:165: Send to queueActorId# [30:81:2082] NKikimr::TEvBlobStorage::TEvVPut# {ID# [72057594037927937:2:9:0:0:127:1] FDS# 127 HandleClass# TabletLog {MsgQoS ExtQueueId# PutTabletLog} DataSize# 0 Data# } cookie# 0 2025-05-29T15:26:46.879147Z node 30 :BS_PROXY_PUT DEBUG: dsproxy_put.cpp:260: [db158bc7997c188e] received {EvVPutResult Status# OK ID# [72057594037927937:2:9:0:0:127:1] {MsgQoS MsgId# { SequenceId: 1 MsgId: 24 } Cost# 81000 ExtQueueId# PutTabletLog IntQueueId# IntPutLog Window# { Status# Processed ActualWindowSize# 0 MaxWindowSize# 150000000 ExpectedMsgId# { SequenceId: 1 MsgId: 25 }}}} from# [0:1:0:0:0] Marker# BPP01 2025-05-29T15:26:46.879171Z node 30 :BS_PROXY_PUT DEBUG: dsproxy_put_impl.cpp:72: [db158bc7997c188e] Result# TEvPutResult {Id# [72057594037927937:2:9:0:0:127:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0.998955} GroupId# 0 Marker# BPP12 2025-05-29T15:26:46.879180Z node 30 :BS_PROXY_PUT INFO: dsproxy_put.cpp:486: [db158bc7997c188e] SendReply putResult# TEvPutResult {Id# [72057594037927937:2:9:0:0:127:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0.998955} ResponsesSent# 0 PutImpl.Blobs.size# 1 Last# true Marker# BPP21 2025-05-29T15:26:46.879205Z node 30 :BS_PROXY_PUT DEBUG: {BPP72@dsproxy_put.cpp:470} Query history GroupId# 0 HandleClass# TabletLog Tactic# MinLatency History# THistory { Entries# [ TEvVPut{ TimestampMs# 0.152 sample PartId# [72057594037927937:2:9:0:0:127:1] QueryCount# 1 VDiskId# [0:1:0:0:0] NodeId# 30 } TEvVPutResult{ TimestampMs# 0.507 VDiskId# [0:1:0:0:0] NodeId# 30 Status# OK } ] } 2025-05-29T15:26:46.879230Z node 30 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:54: Put Result: TEvPutResult {Id# [72057594037927937:2:9:0:0:127:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0.998955} 2025-05-29T15:26:46.879258Z node 30 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:10} commited cookie 1 for step 9 2025-05-29T15:26:46.879322Z node 30 :HIVE DEBUG: tx__start_tablet.cpp:119: HIVE#72057594037927937 THive::TTxStartTablet::Complete Tablet (72075186224037888,0) SideEffects: {Notifications: 0x10080002 [31:441:2088] NKikimrLocal.TEvBootTablet Info { TabletID: 72075186224037888 Channels { Channel: 0 ChannelType: 0 History { FromGeneration: 0 GroupID: 2147483648 } StoragePool: "def1" } Channels { Channel: 1 ChannelType: 0 History { FromGeneration: 0 GroupID: 2147483649 } StoragePool: "def2" } Channels { Channel: 2 ChannelType: 0 History { FromGeneration: 0 GroupID: 2147483650 } StoragePool: "def3" } TabletType: Dummy Version: 1 TenantIdOwner: 72057594046678944 TenantIdLocalId: 1 } SuggestedGeneration: 2 BootMode: BOOT_MODE_LEADER FollowerId: 0} 2025-05-29T15:26:46.879373Z node 30 :HIVE TRACE: hive_impl.cpp:775: HIVE#72057594037927937 Handle TEvInterconnect::TEvNodeConnected (duplicate), NodeId 31 Cookie 0 >> TTablesWithReboots::TwiceRmDirWithReboots [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_extsubdomain_reboots/unittest >> TSchemeShardTestExtSubdomainReboots::CreateExternalSubdomain-AlterDatabaseCreateHiveFirst-true [GOOD] Test command err: ==== RunWithTabletReboots =========== RUN: Trace =========== Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2140] Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:116:2058] recipient: [1:110:2141] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:116:2058] recipient: [1:110:2141] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:117:2058] recipient: [1:111:2142] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:117:2058] recipient: [1:111:2142] Leader for TabletID 72057594046678944 is [1:126:2151] sender: [1:127:2058] recipient: [1:109:2140] Leader for TabletID 72057594046447617 is [1:130:2154] sender: [1:132:2058] recipient: [1:110:2141] Leader for TabletID 72057594046316545 is [1:133:2155] sender: [1:135:2058] recipient: [1:111:2142] 2025-05-29T15:25:50.079466Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7488: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-05-29T15:25:50.079491Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7516: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:25:50.079496Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7402: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2025-05-29T15:25:50.079502Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7418: OperationsProcessing config: using default configuration 2025-05-29T15:25:50.079513Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-05-29T15:25:50.079517Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-05-29T15:25:50.079528Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7548: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:25:50.079543Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:39: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-05-29T15:25:50.079641Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7619: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-05-29T15:25:50.079729Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-05-29T15:25:50.095200Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7651: Got new config: QueryServiceConfig { AllExternalDataSourcesAreAvailable: true } 2025-05-29T15:25:50.095224Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:25:50.095333Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7619: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# Leader for TabletID 72057594046447617 is [1:130:2154] sender: [1:175:2058] recipient: [1:15:2062] 2025-05-29T15:25:50.100010Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-05-29T15:25:50.100046Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-05-29T15:25:50.100080Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-05-29T15:25:50.104511Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-05-29T15:25:50.104607Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:445: Clear TempDirsState with owners number: 0 2025-05-29T15:25:50.104750Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1348: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-05-29T15:25:50.105016Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:32: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-05-29T15:25:50.106079Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:157: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:25:50.106134Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:84: [RootDataErasureManager] Stop 2025-05-29T15:25:50.106417Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:25:50.106435Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:25:50.106472Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-05-29T15:25:50.106483Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-29T15:25:50.106490Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:202: TTxServerlessStorageBilling.Complete 2025-05-29T15:25:50.106514Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6671: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:214:2058] recipient: [1:212:2212] IGNORE Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:214:2058] recipient: [1:212:2212] Leader for TabletID 72057594037968897 is [1:218:2216] sender: [1:219:2058] recipient: [1:212:2212] 2025-05-29T15:25:50.115470Z node 1 :HIVE INFO: tablet_helpers.cpp:1130: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:126:2151] sender: [1:239:2058] recipient: [1:15:2062] 2025-05-29T15:25:50.141179Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:372: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-05-29T15:25:50.141271Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:25:50.141345Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-05-29T15:25:50.141405Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:130: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-05-29T15:25:50.141418Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:25:50.146132Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:451: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-05-29T15:25:50.146176Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-05-29T15:25:50.146244Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:25:50.146258Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:313: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-05-29T15:25:50.146265Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:367: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-05-29T15:25:50.146271Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 2 -> 3 2025-05-29T15:25:50.148739Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:25:50.148760Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-05-29T15:25:50.148769Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 3 -> 128 2025-05-29T15:25:50.149285Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:25:50.149301Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:25:50.149308Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:25:50.149316Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1650: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-05-29T15:25:50.150117Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1719: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-05-29T15:25:50.150581Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:652: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-05-29T15:25:50.150626Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1751: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 Leader for TabletID 72057594046316545 is [1:133:2155] sender: [1:254:2058] recipient: [1:15:2062] FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-05-29T15:25:50.150856Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:676: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-05-29T15:25:50.150885Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:680: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 133 RawX2: 4294969451 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-05-29T15:25:50.150893Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:25:50.150963Z node 1 :FLAT_TX_SCHEMESHARD INFO: sch ... ator: [144:206:2207], at schemeshard: 72057594046678944, txId: 1003, path id: 3 2025-05-29T15:26:47.522548Z node 144 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1003:1, at schemeshard: 72057594046678944 2025-05-29T15:26:47.522556Z node 144 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_alter_extsubdomain.cpp:787: [72057594046678944] TSyncHive, operationId 1003:1, ProgressState, NeedSyncHive: 0 2025-05-29T15:26:47.522560Z node 144 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1003:1 240 -> 240 2025-05-29T15:26:47.522645Z node 144 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:5834: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 5 PathOwnerId: 72057594046678944, cookie: 1003 2025-05-29T15:26:47.522657Z node 144 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 5 PathOwnerId: 72057594046678944, cookie: 1003 2025-05-29T15:26:47.522661Z node 144 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72057594046678944, txId: 1003 2025-05-29T15:26:47.522666Z node 144 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 1003, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 5 2025-05-29T15:26:47.522672Z node 144 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 11 2025-05-29T15:26:47.522686Z node 144 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1606: TOperation IsReadyToNotify, TxId: 1003, ready parts: 1/2, is published: true 2025-05-29T15:26:47.524733Z node 144 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1003:1, at schemeshard: 72057594046678944 2025-05-29T15:26:47.524750Z node 144 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:482: [72057594046678944] TDone opId# 1003:1 ProgressState 2025-05-29T15:26:47.524764Z node 144 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:906: Part operation is done id#1003:1 progress is 2/2 2025-05-29T15:26:47.524769Z node 144 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 1003 ready parts: 2/2 2025-05-29T15:26:47.524774Z node 144 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:906: Part operation is done id#1003:1 progress is 2/2 2025-05-29T15:26:47.524777Z node 144 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 1003 ready parts: 2/2 2025-05-29T15:26:47.524782Z node 144 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1606: TOperation IsReadyToNotify, TxId: 1003, ready parts: 2/2, is published: true 2025-05-29T15:26:47.524788Z node 144 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 1003 ready parts: 2/2 2025-05-29T15:26:47.524796Z node 144 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:973: Operation and all the parts is done, operation id: 1003:0 2025-05-29T15:26:47.524800Z node 144 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5174: RemoveTx for txid 1003:0 2025-05-29T15:26:47.524828Z node 144 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 10 2025-05-29T15:26:47.524837Z node 144 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:973: Operation and all the parts is done, operation id: 1003:1 2025-05-29T15:26:47.524841Z node 144 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5174: RemoveTx for txid 1003:1 2025-05-29T15:26:47.524856Z node 144 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 9 2025-05-29T15:26:47.524985Z node 144 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1003 TestModificationResult got TxId: 1003, wait until txId: 1003 TestWaitNotification wait txId: 1003 2025-05-29T15:26:47.525481Z node 144 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:210: tests -- TTxNotificationSubscriber for txId 1003: send EvNotifyTxCompletion 2025-05-29T15:26:47.525493Z node 144 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:256: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 1003 2025-05-29T15:26:47.525560Z node 144 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 1003, at schemeshard: 72057594046678944 2025-05-29T15:26:47.525580Z node 144 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:227: tests -- TTxNotificationSubscriber for txId 1003: got EvNotifyTxCompletionResult 2025-05-29T15:26:47.525585Z node 144 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:236: tests -- TTxNotificationSubscriber for txId 1003: satisfy waiter [144:697:2596] TestWaitNotification: OK eventTxId 1003 2025-05-29T15:26:47.525667Z node 144 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-05-29T15:26:47.525705Z node 144 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/USER_0" took 49us result status StatusSuccess 2025-05-29T15:26:47.525809Z node 144 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/USER_0" PathDescription { Self { Name: "USER_0" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeExtSubDomain CreateFinished: true CreateTxId: 1002 CreateStep: 5000003 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 3 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 3 PlanResolution: 50 Coordinators: 72075186234409547 Coordinators: 72075186234409548 Coordinators: 72075186234409549 TimeCastBucketsPerMediator: 2 Mediators: 72075186234409550 Mediators: 72075186234409551 SchemeShard: 72075186234409546 Hive: 72075186233409546 } DomainKey { SchemeShard: 72057594046678944 PathId: 3 } StoragePools { Name: "tenant-1:hdd" Kind: "hdd" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 7 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 3 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { } } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-05-29T15:26:47.525907Z node 144 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72075186234409546 2025-05-29T15:26:47.525939Z node 144 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72075186234409546 describe path "/MyRoot/USER_0" took 35us result status StatusSuccess 2025-05-29T15:26:47.525992Z node 144 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/USER_0" PathDescription { Self { Name: "MyRoot/USER_0" PathId: 1 SchemeshardId: 72075186234409546 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 3 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 3 ProcessingParams { Version: 3 PlanResolution: 50 Coordinators: 72075186234409547 Coordinators: 72075186234409548 Coordinators: 72075186234409549 TimeCastBucketsPerMediator: 2 Mediators: 72075186234409550 Mediators: 72075186234409551 SchemeShard: 72075186234409546 Hive: 72075186233409546 } DomainKey { SchemeShard: 72057594046678944 PathId: 3 } StoragePools { Name: "tenant-1:hdd" Kind: "hdd" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 7 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 3 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/MyRoot/USER_0" } } } PathId: 1 PathOwnerId: 72075186234409546, at schemeshard: 72075186234409546 2025-05-29T15:26:47.526060Z node 144 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-05-29T15:26:47.526083Z node 144 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot" took 26us result status StatusSuccess 2025-05-29T15:26:47.526140Z node 144 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot" PathDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 7 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 7 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 5 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: "DirA" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1000 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" ChildrenExist: false } Children { Name: "USER_0" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeExtSubDomain CreateFinished: true CreateTxId: 1002 CreateStep: 5000003 ParentPathId: 1 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 2 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/MyRoot" } } } PathId: 1 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 |68.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_base_reboots/unittest >> THiveTest::TestLockTabletExecutionStealLock [GOOD] >> THiveTest::TestLockTabletExecutionLocalGone |68.1%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/datashard/ut_minikql/ydb-core-tx-datashard-ut_minikql |68.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_minikql/ydb-core-tx-datashard-ut_minikql |68.1%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_minikql/ydb-core-tx-datashard-ut_minikql >> Cdc::AddStream [FAIL] >> Cdc::AwsRegion >> TBsProxyFaultToleranceTest::CheckGetHardenedErasureBlock42Count6Idx4 [GOOD] >> Cdc::MustNotLoseSchemaSnapshotWithVolatileTx [FAIL] >> Cdc::ResolvedTimestampForDisplacedUpsert ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_base_reboots/unittest >> TTablesWithReboots::TwiceRmDirWithReboots [GOOD] Test command err: ==== RunWithTabletReboots =========== RUN: Trace =========== Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2140] Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:116:2058] recipient: [1:110:2141] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:116:2058] recipient: [1:110:2141] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:117:2058] recipient: [1:111:2142] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:117:2058] recipient: [1:111:2142] Leader for TabletID 72057594046678944 is [1:126:2151] sender: [1:127:2058] recipient: [1:109:2140] Leader for TabletID 72057594046447617 is [1:130:2154] sender: [1:132:2058] recipient: [1:110:2141] Leader for TabletID 72057594046316545 is [1:133:2155] sender: [1:135:2058] recipient: [1:111:2142] 2025-05-29T15:26:40.514540Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7488: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-05-29T15:26:40.514560Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7516: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:26:40.514566Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7402: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2025-05-29T15:26:40.514571Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7418: OperationsProcessing config: using default configuration 2025-05-29T15:26:40.514577Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-05-29T15:26:40.514581Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-05-29T15:26:40.514590Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7548: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:26:40.514604Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:39: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-05-29T15:26:40.514706Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7619: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-05-29T15:26:40.514789Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-05-29T15:26:40.527741Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7651: Got new config: QueryServiceConfig { AllExternalDataSourcesAreAvailable: true } 2025-05-29T15:26:40.527757Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:26:40.527854Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7619: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# Leader for TabletID 72057594046447617 is [1:130:2154] sender: [1:175:2058] recipient: [1:15:2062] 2025-05-29T15:26:40.530443Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-05-29T15:26:40.530468Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-05-29T15:26:40.530504Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-05-29T15:26:40.533648Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-05-29T15:26:40.533723Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:445: Clear TempDirsState with owners number: 0 2025-05-29T15:26:40.533861Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1348: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-05-29T15:26:40.534068Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:32: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-05-29T15:26:40.535046Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:157: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:26:40.535093Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:84: [RootDataErasureManager] Stop 2025-05-29T15:26:40.535350Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:26:40.535362Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:26:40.535399Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-05-29T15:26:40.535408Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-29T15:26:40.535415Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:202: TTxServerlessStorageBilling.Complete 2025-05-29T15:26:40.535434Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6671: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:214:2058] recipient: [1:212:2212] IGNORE Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:214:2058] recipient: [1:212:2212] Leader for TabletID 72057594037968897 is [1:218:2216] sender: [1:219:2058] recipient: [1:212:2212] 2025-05-29T15:26:40.537024Z node 1 :HIVE INFO: tablet_helpers.cpp:1130: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:126:2151] sender: [1:239:2058] recipient: [1:15:2062] 2025-05-29T15:26:40.557210Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:372: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-05-29T15:26:40.557278Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:26:40.557336Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-05-29T15:26:40.557391Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:130: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-05-29T15:26:40.557403Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:26:40.558200Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:451: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-05-29T15:26:40.558236Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-05-29T15:26:40.558292Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:26:40.558303Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:313: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-05-29T15:26:40.558309Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:367: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-05-29T15:26:40.558315Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 2 -> 3 2025-05-29T15:26:40.558879Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:26:40.558895Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-05-29T15:26:40.558901Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 3 -> 128 2025-05-29T15:26:40.560999Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:26:40.561016Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:26:40.561023Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:26:40.561030Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1650: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-05-29T15:26:40.561795Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1719: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-05-29T15:26:40.562465Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:652: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-05-29T15:26:40.562510Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1751: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 Leader for TabletID 72057594046316545 is [1:133:2155] sender: [1:254:2058] recipient: [1:15:2062] FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-05-29T15:26:40.562728Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:676: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-05-29T15:26:40.562777Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:680: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 133 RawX2: 4294969451 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-05-29T15:26:40.562785Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:26:40.562853Z node 1 :FLAT_TX_SCHEMESHARD INFO: sch ... parts: 1/1, is published: false 2025-05-29T15:26:49.265532Z node 35 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 1003 ready parts: 1/1 2025-05-29T15:26:49.265537Z node 35 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:973: Operation and all the parts is done, operation id: 1003:0 2025-05-29T15:26:49.265540Z node 35 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5174: RemoveTx for txid 1003:0 2025-05-29T15:26:49.265549Z node 35 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 2 2025-05-29T15:26:49.265554Z node 35 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:982: Publication still in progress, tx: 1003, publications: 2, subscribers: 0 2025-05-29T15:26:49.265559Z node 35 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:989: Publication details: tx: 1003, [OwnerId: 72057594046678944, LocalPathId: 1], 9 2025-05-29T15:26:49.265563Z node 35 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:989: Publication details: tx: 1003, [OwnerId: 72057594046678944, LocalPathId: 3], 18446744073709551615 2025-05-29T15:26:49.265977Z node 35 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1003 2025-05-29T15:26:49.266023Z node 35 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1003 2025-05-29T15:26:49.266091Z node 35 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:26:49.266098Z node 35 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1003, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-29T15:26:49.266122Z node 35 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1003, path id: [OwnerId: 72057594046678944, LocalPathId: 3] 2025-05-29T15:26:49.266145Z node 35 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:26:49.266150Z node 35 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [35:208:2209], at schemeshard: 72057594046678944, txId: 1003, path id: 1 2025-05-29T15:26:49.266155Z node 35 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [35:208:2209], at schemeshard: 72057594046678944, txId: 1003, path id: 3 FAKE_COORDINATOR: Erasing txId 1003 2025-05-29T15:26:49.266247Z node 35 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:5834: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 9 PathOwnerId: 72057594046678944, cookie: 1003 2025-05-29T15:26:49.266259Z node 35 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 9 PathOwnerId: 72057594046678944, cookie: 1003 2025-05-29T15:26:49.266264Z node 35 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 1003 2025-05-29T15:26:49.266273Z node 35 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 1003, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 9 2025-05-29T15:26:49.266277Z node 35 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2025-05-29T15:26:49.266341Z node 35 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:5834: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 1003 2025-05-29T15:26:49.266352Z node 35 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 1003 2025-05-29T15:26:49.266356Z node 35 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1003 2025-05-29T15:26:49.266361Z node 35 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 1003, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 18446744073709551615 2025-05-29T15:26:49.266366Z node 35 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 1 2025-05-29T15:26:49.266377Z node 35 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 1003, subscribers: 0 2025-05-29T15:26:49.266397Z node 35 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:123: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-05-29T15:26:49.266403Z node 35 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:137: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 3], at schemeshard: 72057594046678944 2025-05-29T15:26:49.266413Z node 35 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-05-29T15:26:49.266787Z node 35 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1003 2025-05-29T15:26:49.267108Z node 35 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1003 2025-05-29T15:26:49.267138Z node 35 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:160: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestWaitNotification wait txId: 1003 2025-05-29T15:26:49.267194Z node 35 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:210: tests -- TTxNotificationSubscriber for txId 1003: send EvNotifyTxCompletion 2025-05-29T15:26:49.267202Z node 35 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:256: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 1003 TestWaitNotification wait txId: 1004 2025-05-29T15:26:49.267217Z node 35 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:210: tests -- TTxNotificationSubscriber for txId 1004: send EvNotifyTxCompletion 2025-05-29T15:26:49.267221Z node 35 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:256: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 1004 2025-05-29T15:26:49.267795Z node 35 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:372: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpRmDir Drop { Name: "Victim" } } TxId: 1004 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-05-29T15:26:49.267831Z node 35 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_rmdir.cpp:29: TRmDir Propose, path: /MyRoot/Victim, pathId: 0, opId: 1004:0, at schemeshard: 72057594046678944 2025-05-29T15:26:49.267851Z node 35 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:130: IgniteOperation, opId: 1004:1, propose status:StatusPathDoesNotExist, reason: Check failed: path: '/MyRoot/Victim', error: path hasn't been resolved, nearest resolved path: '/MyRoot' (id: [OwnerId: 72057594046678944, LocalPathId: 1]), source_location: ydb/core/tx/schemeshard/schemeshard__operation_rmdir.cpp:37, at schemeshard: 72057594046678944 2025-05-29T15:26:49.268019Z node 35 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 1003, at schemeshard: 72057594046678944 2025-05-29T15:26:49.268088Z node 35 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 1004, at schemeshard: 72057594046678944 2025-05-29T15:26:49.268441Z node 35 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:451: TTxOperationPropose Complete, txId: 1004, response: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/Victim\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1]), source_location: ydb/core/tx/schemeshard/schemeshard__operation_rmdir.cpp:37" TxId: 1004 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2025-05-29T15:26:49.268472Z node 35 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1004, database: /MyRoot, subject: , status: StatusPathDoesNotExist, reason: Check failed: path: '/MyRoot/Victim', error: path hasn't been resolved, nearest resolved path: '/MyRoot' (id: [OwnerId: 72057594046678944, LocalPathId: 1]), source_location: ydb/core/tx/schemeshard/schemeshard__operation_rmdir.cpp:37, operation: DROP DIRECTORY, path: /MyRoot/Victim 2025-05-29T15:26:49.268499Z node 35 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:227: tests -- TTxNotificationSubscriber for txId 1003: got EvNotifyTxCompletionResult 2025-05-29T15:26:49.268504Z node 35 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:236: tests -- TTxNotificationSubscriber for txId 1003: satisfy waiter [35:355:2345] 2025-05-29T15:26:49.268532Z node 35 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:227: tests -- TTxNotificationSubscriber for txId 1004: got EvNotifyTxCompletionResult 2025-05-29T15:26:49.268536Z node 35 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:236: tests -- TTxNotificationSubscriber for txId 1004: satisfy waiter [35:355:2345] TestWaitNotification: OK eventTxId 1003 TestWaitNotification: OK eventTxId 1004 2025-05-29T15:26:49.268619Z node 35 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Victim" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-05-29T15:26:49.268644Z node 35 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/Victim" took 28us result status StatusPathDoesNotExist 2025-05-29T15:26:49.268673Z node 35 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/Victim\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1]), source_location: ydb/core/tx/schemeshard/schemeshard_path_describer.cpp:1157" Path: "/MyRoot/Victim" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: true } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 >> TTablesWithReboots::ParallelCreateDrop >> Cdc::InitialScanAndResolvedTimestamps [FAIL] |68.1%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/datashard/ut_incremental_backup/ydb-core-tx-datashard-ut_incremental_backup |68.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_incremental_backup/ydb-core-tx-datashard-ut_incremental_backup |68.1%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_incremental_backup/ydb-core-tx-datashard-ut_incremental_backup ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/dsproxy/ut_ftol/unittest >> TBsProxyFaultToleranceTest::CheckGetHardenedErasureBlock42Count6Idx4 [GOOD] Test command err: iteration# 4 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 10 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 16 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 22 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 28 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 34 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 40 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 46 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 52 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 58 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 64 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 70 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 76 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 82 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 88 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 94 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 100 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 106 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 112 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 118 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 124 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 130 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 136 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 142 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 148 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 154 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 160 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 166 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 172 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 178 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 184 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 190 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 196 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 202 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 208 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 214 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 220 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 226 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 232 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 238 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 244 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 250 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 256 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 262 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 268 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 274 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 280 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 286 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 292 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 298 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 304 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 310 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 316 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 322 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 328 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 334 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 340 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 346 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 352 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 358 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 364 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 370 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 376 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 382 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 388 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 394 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 400 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 406 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 412 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 418 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 424 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 430 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 436 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 442 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 448 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 454 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 460 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 466 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 472 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 478 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 484 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 490 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 496 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 502 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 508 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 514 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 520 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 526 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 532 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 538 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 544 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 550 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 556 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 562 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 568 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 574 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 580 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 586 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 592 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 598 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 604 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 610 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 616 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 622 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 628 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 634 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 640 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 646 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 652 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 658 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 664 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 670 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 676 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 682 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 688 BlobsWritten# 2041 blobsWrittenF ... blobsUnwritten# 1218 iteration# 1366 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1372 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1378 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1384 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1390 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1396 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1402 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1408 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1414 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1420 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1426 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1432 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1438 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1444 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1450 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1456 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1462 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1468 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1474 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1480 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1486 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1492 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1498 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1504 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1510 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1516 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1522 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1528 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1534 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1540 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1546 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1552 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1558 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1564 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1570 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1576 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1582 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1588 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1594 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1600 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1606 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1612 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1618 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1624 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1630 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1636 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1642 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1648 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1654 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1660 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1666 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1672 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1678 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1684 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1690 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1696 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1702 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1708 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1714 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1720 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1726 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1732 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1738 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1744 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1750 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1756 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1762 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1768 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1774 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1780 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1786 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1792 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1798 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1804 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1810 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1816 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1822 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1828 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1834 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1840 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1846 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1852 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1858 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1864 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1870 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1876 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1882 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1888 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1894 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1900 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1906 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1912 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1918 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1924 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1930 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1936 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1942 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1948 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1954 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1960 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1966 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1972 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1978 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1984 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1990 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1996 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 2002 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 2008 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 2014 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 2020 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 2026 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 2032 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 2038 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 >> TTablesWithReboots::ChainedCopyTableAndDropWithReboots ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/opt/unittest >> KqpRanges::ScanKeyPrefix Test command err: Trying to start YDB, gRPC: 29652, MsgBus: 26795 2025-05-29T15:26:11.859680Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509889350897720403:2143];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:26:11.868162Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/00126c/r3tmp/tmpPZ8IOt/pdisk_1.dat 2025-05-29T15:26:12.007732Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:26:12.007838Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [1:7509889350897720285:2079] 1748532371852870 != 1748532371852873 TServer::EnableGrpc on GrpcPort 29652, node 1 2025-05-29T15:26:12.061209Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:26:12.061257Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:26:12.064675Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-29T15:26:12.072239Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:26:12.072251Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-05-29T15:26:12.072253Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-05-29T15:26:12.072296Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:26795 TClient is connected to server localhost:26795 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-29T15:26:12.152021Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:26:12.159246Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-05-29T15:26:12.171845Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:26:12.260338Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:26:12.330405Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-05-29T15:26:12.350820Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 2025-05-29T15:26:12.600216Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509889355192689230:2403], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:26:12.600275Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:26:12.658764Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-05-29T15:26:12.676061Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-05-29T15:26:12.691096Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-05-29T15:26:12.707103Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-05-29T15:26:12.719906Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-05-29T15:26:12.740782Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-05-29T15:26:12.801201Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-05-29T15:26:12.879730Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509889355192689897:2467], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:26:12.879768Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:26:12.880062Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509889355192689902:2470], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:26:12.881177Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715669:3, at schemeshard: 72057594046644480 2025-05-29T15:26:12.885162Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976715669, at schemeshard: 72057594046644480 2025-05-29T15:26:12.885262Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7509889355192689904:2471], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715669 completed, doublechecking } 2025-05-29T15:26:12.948701Z node 1 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [1:7509889355192689955:3400] txid# 281474976715670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-29T15:26:13.128413Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [1:7509889355192689964:2475], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:26:13.130537Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=1&id=ZjkxNzRkZGQtN2UyZTkzMjUtMmVhYzg0Y2QtZDgyYWM1ZGE=, ActorId: [1:7509889355192689203:2400], ActorState: ExecuteState, TraceId: 01jwead9cfdsbtv7e879cj9j3p, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: VERIFY failed (2025-05-29T15:26:13.131542Z): assertion failed in non-unittest thread with message: assertion failed at ydb/core/kqp/ut/common/kqp_ut_common.h:375, void NKikimr::NKqp::AssertSuccessResult(const NYdb::TStatus &): (result.IsSuccess())
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 library/cpp/testing/unittest/registar.cpp:36 RaiseError(): requirement UnittestThread failed 0. /-S/util/system/yassert.cpp:83: InternalPanicImpl @ 0x161A2C15 1. /-S/util/system/yassert.cpp:55: Panic @ 0x16199C16 2. /tmp//-S/library/cpp/testing/unittest/registar.cpp:36: RaiseError @ 0x1633B0E6 3. /-S/ydb/core/kqp/ut/common/kqp_ut_common.h:375: AssertSuccessResult @ 0x15D46F92 4. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:365: CreateSampleTables @ 0x287C0C82 5. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:581: operator() @ 0x287E1B1C 6. /-S/library/cpp/threading/future/core/future-inl.h:493: SetValue @ 0x287E1B1C 7. /-S/library/cpp/threading/future/async.h:24: operator() @ 0x287E1B1C 8. /-S/util/thread/pool.h:71: Process @ 0x287E1B1C 9. /-S/util/thread/pool.cpp:418: DoExecute @ 0x161AA599 10. /-S/util/thread/factory.h:15: Execute @ 0x161A8F89 11. /-S/util/thread/factory.cpp:36: ThreadProc @ 0x161A8F89 12. /-S/util/system/thread.cpp:244: ThreadProxy @ 0x161A43FC 13. ??:0: ?? @ 0x7F0E2AF84AC2 14. ??:0: ?? @ 0x7F0E2B01684F Trying to start YDB, gRPC: 7827, MsgBus: 15078 2025-05-29T15:26:18.491499Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509889380278766222:2198];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:26:18.491610Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/00126c/r3tmp/tmpbUh4Wa/pd ... lPanicImpl @ 0x161A2C15 1. /-S/util/system/yassert.cpp:55: Panic @ 0x16199C16 2. /tmp//-S/library/cpp/testing/unittest/registar.cpp:36: RaiseError @ 0x1633B0E6 3. /-S/ydb/core/kqp/ut/common/kqp_ut_common.h:375: AssertSuccessResult @ 0x15D46F92 4. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:365: CreateSampleTables @ 0x287C0C82 5. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:581: operator() @ 0x287E1B1C 6. /-S/library/cpp/threading/future/core/future-inl.h:493: SetValue @ 0x287E1B1C 7. /-S/library/cpp/threading/future/async.h:24: operator() @ 0x287E1B1C 8. /-S/util/thread/pool.h:71: Process @ 0x287E1B1C 9. /-S/util/thread/pool.cpp:418: DoExecute @ 0x161AA599 10. /-S/util/thread/factory.h:15: Execute @ 0x161A8F89 11. /-S/util/thread/factory.cpp:36: ThreadProc @ 0x161A8F89 12. /-S/util/system/thread.cpp:244: ThreadProxy @ 0x161A43FC 13. ??:0: ?? @ 0x7FEB8E1A2AC2 14. ??:0: ?? @ 0x7FEB8E23484F Trying to start YDB, gRPC: 2123, MsgBus: 27165 2025-05-29T15:26:45.728860Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509889498833130266:2075];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:26:45.730318Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/00126c/r3tmp/tmpC3HW0B/pdisk_1.dat 2025-05-29T15:26:45.787575Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 2123, node 1 2025-05-29T15:26:45.812040Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:26:45.812053Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-05-29T15:26:45.812054Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-05-29T15:26:45.812099Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-05-29T15:26:45.828547Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:26:45.828576Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:26:45.829739Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:27165 TClient is connected to server localhost:27165 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-29T15:26:45.892432Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:26:45.896259Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-05-29T15:26:45.908474Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:26:45.977428Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:26:45.998518Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:26:46.013527Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:26:46.189103Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509889503128099148:2403], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:26:46.189132Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:26:46.234065Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-05-29T15:26:46.242276Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-05-29T15:26:46.252190Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-05-29T15:26:46.266687Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-05-29T15:26:46.336112Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-05-29T15:26:46.351499Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-05-29T15:26:46.366938Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-05-29T15:26:46.425434Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509889503128099809:2466], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:26:46.425457Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509889503128099814:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:26:46.425464Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:26:46.426303Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715669:3, at schemeshard: 72057594046644480 2025-05-29T15:26:46.432984Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7509889503128099816:2470], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715669 completed, doublechecking } 2025-05-29T15:26:46.514223Z node 1 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [1:7509889503128099867:3398] txid# 281474976715670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-29T15:26:46.610668Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [1:7509889503128099883:2474], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:26:46.610814Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=1&id=MzBiOTNlNTEtNGY4MGUxNzktYzMwNzk5MDctMjY2N2Q3NDc=, ActorId: [1:7509889503128099130:2401], ActorState: ExecuteState, TraceId: 01jweaea4r37ms4fh2q6xh17kz, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: VERIFY failed (2025-05-29T15:26:46.614800Z): assertion failed in non-unittest thread with message: assertion failed at ydb/core/kqp/ut/common/kqp_ut_common.h:375, void NKikimr::NKqp::AssertSuccessResult(const NYdb::TStatus &): (result.IsSuccess())
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 library/cpp/testing/unittest/registar.cpp:36 RaiseError(): requirement UnittestThread failed 0. /-S/util/system/yassert.cpp:83: InternalPanicImpl @ 0x161A2C15 1. /-S/util/system/yassert.cpp:55: Panic @ 0x16199C16 2. /tmp//-S/library/cpp/testing/unittest/registar.cpp:36: RaiseError @ 0x1633B0E6 3. /-S/ydb/core/kqp/ut/common/kqp_ut_common.h:375: AssertSuccessResult @ 0x15D46F92 4. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:365: CreateSampleTables @ 0x287C0C82 5. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:581: operator() @ 0x287E1B1C 6. /-S/library/cpp/threading/future/core/future-inl.h:493: SetValue @ 0x287E1B1C 7. /-S/library/cpp/threading/future/async.h:24: operator() @ 0x287E1B1C 8. /-S/util/thread/pool.h:71: Process @ 0x287E1B1C 9. /-S/util/thread/pool.cpp:418: DoExecute @ 0x161AA599 10. /-S/util/thread/factory.h:15: Execute @ 0x161A8F89 11. /-S/util/thread/factory.cpp:36: ThreadProc @ 0x161A8F89 12. /-S/util/system/thread.cpp:244: ThreadProxy @ 0x161A43FC 13. ??:0: ?? @ 0x7FB53C972AC2 14. ??:0: ?? @ 0x7FB53CA0484F |68.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_base_reboots/unittest >> THiveTest::TestLockTabletExecutionLocalGone [GOOD] >> THiveTest::TestProgressWithMaxTabletsScheduled >> THiveTest::TestHiveBalancerWithSpareNodes [GOOD] >> TTablesWithReboots::LostBorrowAckWithReboots >> Cdc::AwsRegion [FAIL] |68.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_base_reboots/unittest |68.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_base_reboots/unittest |68.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_base_reboots/unittest >> THiveTest::TestProgressWithMaxTabletsScheduled [GOOD] >> Cdc::ResolvedTimestampsContinueAfterMerge [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/mind/hive/ut/unittest >> THiveTest::TestHiveBalancerWithSpareNodes [GOOD] Test command err: 2025-05-29T15:26:01.174652Z node 2 :BS_NODE DEBUG: {NW26@node_warden_impl.cpp:321} Bootstrap 2025-05-29T15:26:01.175544Z node 2 :BS_NODE DEBUG: {NW18@node_warden_resource.cpp:51} ApplyServiceSet IsStatic# true Comprehensive# false Origin# initial ServiceSet# {PDisks { NodeID: 1 PDiskID: 1 Path: "/tmp/pdisk.dat" PDiskGuid: 1 } VDisks { VDiskID { GroupID: 0 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } VDiskLocation { NodeID: 1 PDiskID: 1 VDiskSlotID: 0 PDiskGuid: 1 } } Groups { GroupID: 0 GroupGeneration: 1 ErasureSpecies: 0 Rings { FailDomains { VDiskLocations { NodeID: 1 PDiskID: 1 VDiskSlotID: 0 PDiskGuid: 1 } } } } AvailabilityDomains: 0 } 2025-05-29T15:26:01.175606Z node 2 :BS_NODE DEBUG: {NW12@node_warden_proxy.cpp:23} StartLocalProxy GroupId# 0 2025-05-29T15:26:01.175777Z node 2 :BS_NODE DEBUG: {NW21@node_warden_pipe.cpp:23} EstablishPipe AvailDomainId# 0 PipeClientId# [2:150:2076] ControllerId# 72057594037932033 2025-05-29T15:26:01.175782Z node 2 :BS_NODE DEBUG: {NW20@node_warden_pipe.cpp:72} SendRegisterNode 2025-05-29T15:26:01.175803Z node 2 :BS_NODE DEBUG: {NW11@node_warden_impl.cpp:296} StartInvalidGroupProxy GroupId# 4294967295 2025-05-29T15:26:01.175824Z node 2 :BS_NODE DEBUG: {NW62@node_warden_impl.cpp:308} StartRequestReportingThrottler 2025-05-29T15:26:01.177357Z node 2 :BS_PROXY INFO: dsproxy_state.cpp:146: Group# 0 TEvConfigureProxy received GroupGeneration# 1 IsLimitedKeyless# false Marker# DSP02 2025-05-29T15:26:01.177367Z node 2 :BS_PROXY NOTICE: dsproxy_state.cpp:294: EnsureMonitoring Group# 0 IsLimitedKeyless# 0 fullIfPossible# 0 Marker# DSP58 2025-05-29T15:26:01.177729Z node 2 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [2:149:2075] Create Queue# [2:156:2080] targetNodeId# 1 Marker# DSP01 2025-05-29T15:26:01.177765Z node 2 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [2:149:2075] Create Queue# [2:157:2081] targetNodeId# 1 Marker# DSP01 2025-05-29T15:26:01.177794Z node 2 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [2:149:2075] Create Queue# [2:158:2082] targetNodeId# 1 Marker# DSP01 2025-05-29T15:26:01.177825Z node 2 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [2:149:2075] Create Queue# [2:159:2083] targetNodeId# 1 Marker# DSP01 2025-05-29T15:26:01.177873Z node 2 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [2:149:2075] Create Queue# [2:160:2084] targetNodeId# 1 Marker# DSP01 2025-05-29T15:26:01.177904Z node 2 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [2:149:2075] Create Queue# [2:161:2085] targetNodeId# 1 Marker# DSP01 2025-05-29T15:26:01.177935Z node 2 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [2:149:2075] Create Queue# [2:162:2086] targetNodeId# 1 Marker# DSP01 2025-05-29T15:26:01.177940Z node 2 :BS_PROXY INFO: dsproxy_state.cpp:29: Group# 0 SetStateEstablishingSessions Marker# DSP03 2025-05-29T15:26:01.177952Z node 2 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:50: TClient[72057594037932033] ::Bootstrap [2:150:2076] 2025-05-29T15:26:01.177957Z node 2 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:533: TClient[72057594037932033] lookup [2:150:2076] 2025-05-29T15:26:01.177965Z node 2 :BS_PROXY NOTICE: dsproxy_state.cpp:234: Group# 4294967295 HasInvalidGroupId# 1 Bootstrap -> StateEjected Marker# DSP42 2025-05-29T15:26:01.177971Z node 2 :BS_NODE DEBUG: {NWDC00@distconf.cpp:20} Bootstrap 2025-05-29T15:26:01.178098Z node 2 :BS_NODE DEBUG: {NWDC40@distconf_persistent_storage.cpp:25} TReaderActor bootstrap Paths# [] 2025-05-29T15:26:01.178119Z node 3 :BS_NODE DEBUG: {NW26@node_warden_impl.cpp:321} Bootstrap 2025-05-29T15:26:01.178654Z node 3 :BS_NODE DEBUG: {NW18@node_warden_resource.cpp:51} ApplyServiceSet IsStatic# true Comprehensive# false Origin# initial ServiceSet# {PDisks { NodeID: 1 PDiskID: 1 Path: "/tmp/pdisk.dat" PDiskGuid: 1 } VDisks { VDiskID { GroupID: 0 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } VDiskLocation { NodeID: 1 PDiskID: 1 VDiskSlotID: 0 PDiskGuid: 1 } } Groups { GroupID: 0 GroupGeneration: 1 ErasureSpecies: 0 Rings { FailDomains { VDiskLocations { NodeID: 1 PDiskID: 1 VDiskSlotID: 0 PDiskGuid: 1 } } } } AvailabilityDomains: 0 } 2025-05-29T15:26:01.178686Z node 3 :BS_NODE DEBUG: {NW12@node_warden_proxy.cpp:23} StartLocalProxy GroupId# 0 2025-05-29T15:26:01.178908Z node 3 :BS_NODE DEBUG: {NW21@node_warden_pipe.cpp:23} EstablishPipe AvailDomainId# 0 PipeClientId# [3:170:2077] ControllerId# 72057594037932033 2025-05-29T15:26:01.178914Z node 3 :BS_NODE DEBUG: {NW20@node_warden_pipe.cpp:72} SendRegisterNode 2025-05-29T15:26:01.178927Z node 3 :BS_NODE DEBUG: {NW11@node_warden_impl.cpp:296} StartInvalidGroupProxy GroupId# 4294967295 2025-05-29T15:26:01.178942Z node 3 :BS_NODE DEBUG: {NW62@node_warden_impl.cpp:308} StartRequestReportingThrottler 2025-05-29T15:26:01.180352Z node 3 :LOCAL DEBUG: local.cpp:1491: TLocal::Bootstrap 2025-05-29T15:26:01.180393Z node 3 :BS_PROXY INFO: dsproxy_state.cpp:146: Group# 0 TEvConfigureProxy received GroupGeneration# 1 IsLimitedKeyless# false Marker# DSP02 2025-05-29T15:26:01.180399Z node 3 :BS_PROXY NOTICE: dsproxy_state.cpp:294: EnsureMonitoring Group# 0 IsLimitedKeyless# 0 fullIfPossible# 0 Marker# DSP58 2025-05-29T15:26:01.180728Z node 3 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [3:169:2076] Create Queue# [3:176:2081] targetNodeId# 1 Marker# DSP01 2025-05-29T15:26:01.180758Z node 3 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [3:169:2076] Create Queue# [3:177:2082] targetNodeId# 1 Marker# DSP01 2025-05-29T15:26:01.180787Z node 3 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [3:169:2076] Create Queue# [3:178:2083] targetNodeId# 1 Marker# DSP01 2025-05-29T15:26:01.180819Z node 3 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [3:169:2076] Create Queue# [3:179:2084] targetNodeId# 1 Marker# DSP01 2025-05-29T15:26:01.180851Z node 3 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [3:169:2076] Create Queue# [3:180:2085] targetNodeId# 1 Marker# DSP01 2025-05-29T15:26:01.180883Z node 3 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [3:169:2076] Create Queue# [3:181:2086] targetNodeId# 1 Marker# DSP01 2025-05-29T15:26:01.180911Z node 3 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [3:169:2076] Create Queue# [3:182:2087] targetNodeId# 1 Marker# DSP01 2025-05-29T15:26:01.180915Z node 3 :BS_PROXY INFO: dsproxy_state.cpp:29: Group# 0 SetStateEstablishingSessions Marker# DSP03 2025-05-29T15:26:01.180925Z node 3 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:50: TClient[72057594037932033] ::Bootstrap [3:170:2077] 2025-05-29T15:26:01.180931Z node 3 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:533: TClient[72057594037932033] lookup [3:170:2077] 2025-05-29T15:26:01.180937Z node 3 :BS_PROXY NOTICE: dsproxy_state.cpp:234: Group# 4294967295 HasInvalidGroupId# 1 Bootstrap -> StateEjected Marker# DSP42 2025-05-29T15:26:01.180943Z node 3 :BS_NODE DEBUG: {NWDC00@distconf.cpp:20} Bootstrap 2025-05-29T15:26:01.181068Z node 3 :BS_NODE DEBUG: {NWDC40@distconf_persistent_storage.cpp:25} TReaderActor bootstrap Paths# [] 2025-05-29T15:26:01.181087Z node 4 :BS_NODE DEBUG: {NW26@node_warden_impl.cpp:321} Bootstrap 2025-05-29T15:26:01.184176Z node 4 :BS_NODE DEBUG: {NW18@node_warden_resource.cpp:51} ApplyServiceSet IsStatic# true Comprehensive# false Origin# initial ServiceSet# {PDisks { NodeID: 1 PDiskID: 1 Path: "/tmp/pdisk.dat" PDiskGuid: 1 } VDisks { VDiskID { GroupID: 0 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } VDiskLocation { NodeID: 1 PDiskID: 1 VDiskSlotID: 0 PDiskGuid: 1 } } Groups { GroupID: 0 GroupGeneration: 1 ErasureSpecies: 0 Rings { FailDomains { VDiskLocations { NodeID: 1 PDiskID: 1 VDiskSlotID: 0 PDiskGuid: 1 } } } } AvailabilityDomains: 0 } 2025-05-29T15:26:01.184207Z node 4 :BS_NODE DEBUG: {NW12@node_warden_proxy.cpp:23} StartLocalProxy GroupId# 0 2025-05-29T15:26:01.184377Z node 4 :BS_NODE DEBUG: {NW21@node_warden_pipe.cpp:23} EstablishPipe AvailDomainId# 0 PipeClientId# [4:190:2077] ControllerId# 72057594037932033 2025-05-29T15:26:01.184381Z node 4 :BS_NODE DEBUG: {NW20@node_warden_pipe.cpp:72} SendRegisterNode 2025-05-29T15:26:01.184394Z node 4 :BS_NODE DEBUG: {NW11@node_warden_impl.cpp:296} StartInvalidGroupProxy GroupId# 4294967295 2025-05-29T15:26:01.184414Z node 4 :BS_NODE DEBUG: {NW62@node_warden_impl.cpp:308} StartRequestReportingThrottler 2025-05-29T15:26:01.185839Z node 4 :LOCAL DEBUG: local.cpp:1491: TLocal::Bootstrap 2025-05-29T15:26:01.185884Z node 4 :BS_PROXY INFO: dsproxy_state.cpp:146: Group# 0 TEvConfigureProxy received GroupGeneration# 1 IsLimitedKeyless# false Marker# DSP02 2025-05-29T15:26:01.185890Z node 4 :BS_PROXY NOTICE: dsproxy_state.cpp:294: EnsureMonitoring Group# 0 IsLimitedKeyless# 0 fullIfPossible# 0 Marker# DSP58 2025-05-29T15:26:01.186237Z node 4 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [4:189:2076] Create Queue# [4:196:2081] targetNodeId# 1 Marker# DSP01 2025-05-29T15:26:01.186267Z node 4 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [4:189:2076] Create Queue# [4:197:2082] targetNodeId# 1 Marker# DSP01 2025-05-29T15:26:01.186302Z node 4 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [4:189:2076] Create Queue# [4:198:2083] targetNodeId# 1 Marker# DSP01 2025-05-29T15:26:01.186333Z node 4 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [4:189:2076] Create Queue# [4:199:2084] targetNodeId# 1 Marker# DSP01 2025-05-29T15:26:01.186360Z node 4 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [4:189:2076] Create Queue# [4:200:2085] targetNodeId# 1 Marker# DSP01 2025-05-29T15:26:01.186399Z node 4 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [4:189:2076] Create Queue# [4:201:2086] targetNodeId# 1 Marker# DSP01 2025-05-29T15:26:01.186429Z node 4 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [4:189:2076] Create Queue# [4:202:2087] targetNodeId# 1 Marker# DSP01 2025-05-29T15:26:01.186433Z node 4 :BS_PROXY INFO: dsproxy_state.cpp:29: Group# 0 SetStateEstablishingSessions Marker# DSP03 2025-05-29T15:26:01.186443Z node 4 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:50: TClient[72057594037932033] ::Bootstrap [4:190:2077] 2025-05-29T15:26:01.186448Z node 4 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:533: TClient[72057594037932033] lookup [4:190:2077] 2025-05-29T15:26:01.186455Z node 4 :BS_PROXY NOTICE: dsproxy_state.cpp:234: Group# 4294967295 HasInvalidGroupId# 1 Bootstrap -> StateEjected Marker# DSP42 2025-05-29T15:26:01.186461Z node 4 :BS_NODE DEBUG: {NWDC00@distconf.cpp:20} Bootstrap 2025-05-29T15:26:01.188298Z node 4 :BS_NODE DEBUG: {NWDC40@distconf_persistent_storage.cpp:25} TReaderActor bootstrap Paths# [] 2025-05-29T15:26:01.188345Z node 5 :BS_NODE DEBUG: {NW26@node_warden_impl.cpp:321} Bootstrap 2025-05-29T15:26:01.189609Z node 5 :BS_NODE DEBUG: {NW18@node_warden_resource.cpp:51} ApplyServiceSet IsStatic# true Comprehensive# false Origin# initial ServiceSet# {PDisks { NodeID: 1 PDiskID: 1 Path: "/tmp/pdisk.dat" PDiskGuid: 1 } VDisks { VDiskID { GroupID: 0 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } VDiskLocation { NodeID: 1 PDiskID: 1 VDiskSlotID: 0 PDiskGuid: 1 } } Groups { GroupID: 0 GroupGeneration: 1 ErasureSpecies: 0 Rings { FailDomains { VDiskLocations { NodeID: 1 PDiskID: 1 VDiskSlotID: 0 PDiskGuid: 1 } } } } AvailabilityDomains: 0 } 2025-05-29T15:26:01.189637Z node 5 :BS_NODE DEBUG: {NW12@node_warden_proxy.cpp:23} StartLocalProxy GroupId# 0 2025-05-29T15:26:01.189859Z node 5 :BS_NODE DEBUG: {NW21@node_warden_pipe.cpp:23} EstablishPipe AvailDomainId# 0 PipeClientId# [5:210:2077] ControllerId# 72057594037932033 2025-05-29T15:26:01.189865Z node 5 :BS_NODE DEBUG: {NW20@node_warden_pipe.cpp:72} SendRegisterNode 2025-05-29T15:26:01.189876Z node 5 :BS_NODE DEBUG: {NW11@node_warden_impl.cpp:296} StartInvalidGroupProxy GroupId# 4294967295 2025-05-29T15:26:01.189891Z node 5 :BS_NODE DEBUG: {NW62@node_warden_impl.cpp:308} StartRequestReportingThrottler 2025-05-29T15:26:01.191862Z node 5 :LOCAL DEBUG: local.cpp:1491: TLocal::Bootstrap 2025-05-29T15:26:01.191910Z node 5 :B ... R DEBUG: tablet_resolver.cpp:279: SelectForward node 56 selfDC 1 leaderDC 3 1:2:0 local 0 localDc 0 other 1 disallowed 0 tabletId: 72075186224037893 followers: 0 countLeader 1 allowFollowers 0 winner: [61:1305:2098] 2025-05-29T15:26:49.943565Z node 56 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:195: TClient[72075186224037893] forward result remote node 61 [56:2093:2739] 2025-05-29T15:26:49.943581Z node 56 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:229: TClient[72075186224037893] remote node connected [56:2093:2739] 2025-05-29T15:26:49.943585Z node 56 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:671: TClient[72075186224037893]::SendEvent [56:2093:2739] 2025-05-29T15:26:49.943627Z node 61 :PIPE_SERVER DEBUG: tablet_pipe_server.cpp:291: [72075186224037893] Accept Connect Originator# [56:2093:2739] 2025-05-29T15:26:49.943723Z node 56 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:310: TClient[72075186224037893] connected with status OK role: Leader [56:2093:2739] 2025-05-29T15:26:49.943729Z node 56 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:325: TClient[72075186224037893] send queued [56:2093:2739] 2025-05-29T15:26:49.943840Z node 56 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:50: TClient[72075186224037894] ::Bootstrap [56:2096:2741] 2025-05-29T15:26:49.943845Z node 56 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:533: TClient[72075186224037894] lookup [56:2096:2741] 2025-05-29T15:26:49.943852Z node 56 :TABLET_RESOLVER DEBUG: tablet_resolver.cpp:484: Handle TEvForward tabletId: 72075186224037894 entry.State: StNormal ev: {EvForward TabletID: 72075186224037894 Ev: nullptr Flags: 1:2:0} 2025-05-29T15:26:49.943857Z node 56 :TABLET_RESOLVER DEBUG: tablet_resolver.cpp:279: SelectForward node 56 selfDC 1 leaderDC 3 1:2:0 local 0 localDc 0 other 1 disallowed 0 tabletId: 72075186224037894 followers: 0 countLeader 1 allowFollowers 0 winner: [60:1304:2099] 2025-05-29T15:26:49.943872Z node 56 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:195: TClient[72075186224037894] forward result remote node 60 [56:2096:2741] 2025-05-29T15:26:49.943892Z node 56 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:229: TClient[72075186224037894] remote node connected [56:2096:2741] 2025-05-29T15:26:49.943897Z node 56 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:671: TClient[72075186224037894]::SendEvent [56:2096:2741] 2025-05-29T15:26:49.943988Z node 56 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:349: TClient[72075186224037894] connect request undelivered [56:2096:2741] 2025-05-29T15:26:49.943994Z node 56 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:559: TClient[72075186224037894] immediate retry [56:2096:2741] 2025-05-29T15:26:49.943997Z node 56 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:533: TClient[72075186224037894] lookup [56:2096:2741] 2025-05-29T15:26:49.944003Z node 56 :TABLET_RESOLVER DEBUG: tablet_resolver.cpp:536: Handle TEvTabletProblem tabletId: 72075186224037894 entry.State: StNormal 2025-05-29T15:26:49.944024Z node 56 :TABLET_RESOLVER DEBUG: tablet_resolver.cpp:484: Handle TEvForward tabletId: 72075186224037894 entry.State: StProblemResolve ev: {EvForward TabletID: 72075186224037894 Ev: nullptr Flags: 1:2:0} 2025-05-29T15:26:49.944042Z node 56 :STATESTORAGE DEBUG: statestorage_proxy.cpp:246: ProxyRequest::HandleInit ringGroup:0 ev: {EvLookup TabletID: 72075186224037894 Cookie: 0 ProxyOptions: SigNone} 2025-05-29T15:26:49.944062Z node 56 :STATESTORAGE DEBUG: statestorage_replica.cpp:183: Replica::Handle ev: {EvReplicaLookup TabletID: 72075186224037894 Cookie: 0} 2025-05-29T15:26:49.944074Z node 56 :STATESTORAGE DEBUG: statestorage_replica.cpp:183: Replica::Handle ev: {EvReplicaLookup TabletID: 72075186224037894 Cookie: 1} 2025-05-29T15:26:49.944080Z node 56 :STATESTORAGE DEBUG: statestorage_replica.cpp:183: Replica::Handle ev: {EvReplicaLookup TabletID: 72075186224037894 Cookie: 2} 2025-05-29T15:26:49.944088Z node 56 :STATESTORAGE DEBUG: statestorage_proxy.cpp:355: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 0 TabletID: 72075186224037894 CurrentLeader: [61:1962:2265] CurrentLeaderTablet: [61:1967:2268] CurrentGeneration: 3 CurrentStep: 0} 2025-05-29T15:26:49.944108Z node 56 :STATESTORAGE DEBUG: statestorage_proxy.cpp:355: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 0 TabletID: 72075186224037894 CurrentLeader: [61:1962:2265] CurrentLeaderTablet: [61:1967:2268] CurrentGeneration: 3 CurrentStep: 0} 2025-05-29T15:26:49.944120Z node 56 :TABLET_RESOLVER DEBUG: tablet_resolver.cpp:610: Handle TEvInfo tabletId: 72075186224037894 entry.State: StProblemResolve success: true ev: {EvInfo Status: 0 TabletID: 72075186224037894 Cookie: 0 CurrentLeader: [61:1962:2265] CurrentLeaderTablet: [61:1967:2268] CurrentGeneration: 3 CurrentStep: 0 Locked: false LockedFor: 0 Signature: { Size: 2 Signature: {{[56:1099535971443:0] : 10}, {[56:24343667:0] : 7}}}} 2025-05-29T15:26:49.944125Z node 56 :TABLET_RESOLVER DEBUG: tablet_resolver.cpp:354: ApplyEntry leader tabletId: 72075186224037894 followers: 0 2025-05-29T15:26:49.944131Z node 56 :TABLET_RESOLVER DEBUG: tablet_resolver.cpp:279: SelectForward node 56 selfDC 1 leaderDC 3 1:2:0 local 0 localDc 0 other 1 disallowed 0 tabletId: 72075186224037894 followers: 0 countLeader 1 allowFollowers 0 winner: [61:1962:2265] 2025-05-29T15:26:49.944149Z node 56 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:195: TClient[72075186224037894] forward result remote node 61 [56:2096:2741] 2025-05-29T15:26:49.944173Z node 56 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:229: TClient[72075186224037894] remote node connected [56:2096:2741] 2025-05-29T15:26:49.944177Z node 56 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:671: TClient[72075186224037894]::SendEvent [56:2096:2741] 2025-05-29T15:26:49.944221Z node 61 :PIPE_SERVER DEBUG: tablet_pipe_server.cpp:291: [72075186224037894] Accept Connect Originator# [56:2096:2741] 2025-05-29T15:26:49.944300Z node 56 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:310: TClient[72075186224037894] connected with status OK role: Leader [56:2096:2741] 2025-05-29T15:26:49.944305Z node 56 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:325: TClient[72075186224037894] send queued [56:2096:2741] 2025-05-29T15:26:49.944424Z node 56 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:50: TClient[72075186224037895] ::Bootstrap [56:2100:2743] 2025-05-29T15:26:49.944429Z node 56 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:533: TClient[72075186224037895] lookup [56:2100:2743] 2025-05-29T15:26:49.944436Z node 56 :TABLET_RESOLVER DEBUG: tablet_resolver.cpp:484: Handle TEvForward tabletId: 72075186224037895 entry.State: StNormal ev: {EvForward TabletID: 72075186224037895 Ev: nullptr Flags: 1:2:0} 2025-05-29T15:26:49.944441Z node 56 :TABLET_RESOLVER DEBUG: tablet_resolver.cpp:279: SelectForward node 56 selfDC 1 leaderDC 3 1:2:0 local 0 localDc 0 other 1 disallowed 0 tabletId: 72075186224037895 followers: 0 countLeader 1 allowFollowers 0 winner: [61:1805:2193] 2025-05-29T15:26:49.944460Z node 56 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:195: TClient[72075186224037895] forward result remote node 61 [56:2100:2743] 2025-05-29T15:26:49.944478Z node 56 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:229: TClient[72075186224037895] remote node connected [56:2100:2743] 2025-05-29T15:26:49.944483Z node 56 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:671: TClient[72075186224037895]::SendEvent [56:2100:2743] 2025-05-29T15:26:49.944522Z node 61 :PIPE_SERVER DEBUG: tablet_pipe_server.cpp:291: [72075186224037895] Accept Connect Originator# [56:2100:2743] 2025-05-29T15:26:49.944590Z node 56 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:310: TClient[72075186224037895] connected with status OK role: Leader [56:2100:2743] 2025-05-29T15:26:49.944596Z node 56 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:325: TClient[72075186224037895] send queued [56:2100:2743] 2025-05-29T15:26:49.944704Z node 56 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:50: TClient[72075186224037896] ::Bootstrap [56:2103:2745] 2025-05-29T15:26:49.944709Z node 56 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:533: TClient[72075186224037896] lookup [56:2103:2745] 2025-05-29T15:26:49.944716Z node 56 :TABLET_RESOLVER DEBUG: tablet_resolver.cpp:484: Handle TEvForward tabletId: 72075186224037896 entry.State: StNormal ev: {EvForward TabletID: 72075186224037896 Ev: nullptr Flags: 1:2:0} 2025-05-29T15:26:49.944722Z node 56 :TABLET_RESOLVER DEBUG: tablet_resolver.cpp:279: SelectForward node 56 selfDC 1 leaderDC 3 1:2:0 local 0 localDc 0 other 1 disallowed 0 tabletId: 72075186224037896 followers: 0 countLeader 1 allowFollowers 0 winner: [61:1809:2195] 2025-05-29T15:26:49.944736Z node 56 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:195: TClient[72075186224037896] forward result remote node 61 [56:2103:2745] 2025-05-29T15:26:49.944755Z node 56 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:229: TClient[72075186224037896] remote node connected [56:2103:2745] 2025-05-29T15:26:49.944762Z node 56 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:671: TClient[72075186224037896]::SendEvent [56:2103:2745] 2025-05-29T15:26:49.944792Z node 61 :PIPE_SERVER DEBUG: tablet_pipe_server.cpp:291: [72075186224037896] Accept Connect Originator# [56:2103:2745] 2025-05-29T15:26:49.944880Z node 56 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:310: TClient[72075186224037896] connected with status OK role: Leader [56:2103:2745] 2025-05-29T15:26:49.944886Z node 56 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:325: TClient[72075186224037896] send queued [56:2103:2745] 2025-05-29T15:26:49.945000Z node 56 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:50: TClient[72057594037927937] ::Bootstrap [56:2105:2746] 2025-05-29T15:26:49.945005Z node 56 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:533: TClient[72057594037927937] lookup [56:2105:2746] 2025-05-29T15:26:49.945011Z node 56 :TABLET_RESOLVER DEBUG: tablet_resolver.cpp:484: Handle TEvForward tabletId: 72057594037927937 entry.State: StNormal ev: {EvForward TabletID: 72057594037927937 Ev: nullptr Flags: 1:2:0} 2025-05-29T15:26:49.945017Z node 56 :TABLET_RESOLVER DEBUG: tablet_resolver.cpp:279: SelectForward node 56 selfDC 1 leaderDC 1 1:2:0 local 1 localDc 1 other 0 disallowed 0 tabletId: 72057594037927937 followers: 0 countLeader 1 allowFollowers 0 winner: [56:599:2274] 2025-05-29T15:26:49.945030Z node 56 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:149: TClient[72057594037927937] queue send [56:2105:2746] 2025-05-29T15:26:49.945047Z node 56 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:411: TClient[72057594037927937] received pending shutdown [56:2105:2746] 2025-05-29T15:26:49.945056Z node 56 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:190: TClient[72057594037927937] forward result local node, try to connect [56:2105:2746] 2025-05-29T15:26:49.945061Z node 56 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:671: TClient[72057594037927937]::SendEvent [56:2105:2746] 2025-05-29T15:26:49.945078Z node 56 :PIPE_SERVER DEBUG: tablet_pipe_server.cpp:291: [72057594037927937] Accept Connect Originator# [56:2105:2746] 2025-05-29T15:26:49.945117Z node 56 :HIVE TRACE: hive_impl.cpp:114: HIVE#72057594037927937 Handle TEvTabletPipe::TEvServerConnected([56:2105:2746]) [56:2106:2747] 2025-05-29T15:26:49.945127Z node 56 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:310: TClient[72057594037927937] connected with status OK role: Leader [56:2105:2746] 2025-05-29T15:26:49.945131Z node 56 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:325: TClient[72057594037927937] send queued [56:2105:2746] 2025-05-29T15:26:49.945135Z node 56 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:629: TClient[72057594037927937] push event to server [56:2105:2746] 2025-05-29T15:26:49.945141Z node 56 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:332: TClient[72057594037927937] shutdown pipe due to pending shutdown request [56:2105:2746] 2025-05-29T15:26:49.945145Z node 56 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:505: TClient[72057594037927937] notify reset [56:2105:2746] 2025-05-29T15:26:49.945153Z node 56 :PIPE_SERVER DEBUG: tablet_pipe_server.cpp:141: [72057594037927937] HandleSend Sender# [56:571:2269] EventType# 268697616 |68.2%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/kqp/ut/service/ydb-core-kqp-ut-service |68.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/ut/service/ydb-core-kqp-ut-service |68.2%| [LD] {RESULT} $(B)/ydb/core/kqp/ut/service/ydb-core-kqp-ut-service |68.2%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/schemeshard/ut_cdc_stream/ydb-core-tx-schemeshard-ut_cdc_stream |68.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_cdc_stream/ydb-core-tx-schemeshard-ut_cdc_stream |68.2%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_cdc_stream/ydb-core-tx-schemeshard-ut_cdc_stream >> TTablesWithReboots::CreateDroppedTableWithReboots |68.2%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/replication/controller/ut_assign_tx_id/core-tx-replication-controller-ut_assign_tx_id |68.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/replication/controller/ut_assign_tx_id/core-tx-replication-controller-ut_assign_tx_id |68.2%| [LD] {RESULT} $(B)/ydb/core/tx/replication/controller/ut_assign_tx_id/core-tx-replication-controller-ut_assign_tx_id ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/mind/hive/ut/unittest >> THiveTest::TestProgressWithMaxTabletsScheduled [GOOD] Test command err: 2025-05-29T15:26:06.869349Z node 2 :BS_NODE DEBUG: {NW26@node_warden_impl.cpp:321} Bootstrap 2025-05-29T15:26:06.870191Z node 2 :BS_NODE DEBUG: {NW18@node_warden_resource.cpp:51} ApplyServiceSet IsStatic# true Comprehensive# false Origin# initial ServiceSet# {PDisks { NodeID: 1 PDiskID: 1 Path: "/tmp/pdisk.dat" PDiskGuid: 1 } VDisks { VDiskID { GroupID: 0 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } VDiskLocation { NodeID: 1 PDiskID: 1 VDiskSlotID: 0 PDiskGuid: 1 } } Groups { GroupID: 0 GroupGeneration: 1 ErasureSpecies: 0 Rings { FailDomains { VDiskLocations { NodeID: 1 PDiskID: 1 VDiskSlotID: 0 PDiskGuid: 1 } } } } AvailabilityDomains: 0 } 2025-05-29T15:26:06.870255Z node 2 :BS_NODE DEBUG: {NW12@node_warden_proxy.cpp:23} StartLocalProxy GroupId# 0 2025-05-29T15:26:06.870444Z node 2 :BS_NODE DEBUG: {NW21@node_warden_pipe.cpp:23} EstablishPipe AvailDomainId# 0 PipeClientId# [2:48:2073] ControllerId# 72057594037932033 2025-05-29T15:26:06.870449Z node 2 :BS_NODE DEBUG: {NW20@node_warden_pipe.cpp:72} SendRegisterNode 2025-05-29T15:26:06.870476Z node 2 :BS_NODE DEBUG: {NW11@node_warden_impl.cpp:296} StartInvalidGroupProxy GroupId# 4294967295 2025-05-29T15:26:06.870496Z node 2 :BS_NODE DEBUG: {NW62@node_warden_impl.cpp:308} StartRequestReportingThrottler 2025-05-29T15:26:06.883268Z node 2 :LOCAL DEBUG: local.cpp:1491: TLocal::Bootstrap 2025-05-29T15:26:06.883361Z node 2 :BS_PROXY INFO: dsproxy_state.cpp:146: Group# 0 TEvConfigureProxy received GroupGeneration# 1 IsLimitedKeyless# false Marker# DSP02 2025-05-29T15:26:06.883372Z node 2 :BS_PROXY NOTICE: dsproxy_state.cpp:294: EnsureMonitoring Group# 0 IsLimitedKeyless# 0 fullIfPossible# 0 Marker# DSP58 2025-05-29T15:26:06.883740Z node 2 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [2:47:2072] Create Queue# [2:56:2077] targetNodeId# 1 Marker# DSP01 2025-05-29T15:26:06.883772Z node 2 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [2:47:2072] Create Queue# [2:57:2078] targetNodeId# 1 Marker# DSP01 2025-05-29T15:26:06.883799Z node 2 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [2:47:2072] Create Queue# [2:58:2079] targetNodeId# 1 Marker# DSP01 2025-05-29T15:26:06.883823Z node 2 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [2:47:2072] Create Queue# [2:59:2080] targetNodeId# 1 Marker# DSP01 2025-05-29T15:26:06.883855Z node 2 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [2:47:2072] Create Queue# [2:60:2081] targetNodeId# 1 Marker# DSP01 2025-05-29T15:26:06.883880Z node 2 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [2:47:2072] Create Queue# [2:61:2082] targetNodeId# 1 Marker# DSP01 2025-05-29T15:26:06.883909Z node 2 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [2:47:2072] Create Queue# [2:62:2083] targetNodeId# 1 Marker# DSP01 2025-05-29T15:26:06.883914Z node 2 :BS_PROXY INFO: dsproxy_state.cpp:29: Group# 0 SetStateEstablishingSessions Marker# DSP03 2025-05-29T15:26:06.883932Z node 2 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:50: TClient[72057594037932033] ::Bootstrap [2:48:2073] 2025-05-29T15:26:06.883938Z node 2 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:533: TClient[72057594037932033] lookup [2:48:2073] 2025-05-29T15:26:06.883954Z node 2 :BS_PROXY NOTICE: dsproxy_state.cpp:234: Group# 4294967295 HasInvalidGroupId# 1 Bootstrap -> StateEjected Marker# DSP42 2025-05-29T15:26:06.883965Z node 2 :BS_NODE DEBUG: {NWDC00@distconf.cpp:20} Bootstrap 2025-05-29T15:26:06.884063Z node 1 :BS_NODE DEBUG: {NW26@node_warden_impl.cpp:321} Bootstrap 2025-05-29T15:26:06.884627Z node 1 :BS_NODE DEBUG: {NW18@node_warden_resource.cpp:51} ApplyServiceSet IsStatic# true Comprehensive# false Origin# initial ServiceSet# {PDisks { NodeID: 1 PDiskID: 1 Path: "/tmp/pdisk.dat" PDiskGuid: 1 } VDisks { VDiskID { GroupID: 0 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } VDiskLocation { NodeID: 1 PDiskID: 1 VDiskSlotID: 0 PDiskGuid: 1 } } Groups { GroupID: 0 GroupGeneration: 1 ErasureSpecies: 0 Rings { FailDomains { VDiskLocations { NodeID: 1 PDiskID: 1 VDiskSlotID: 0 PDiskGuid: 1 } } } } AvailabilityDomains: 0 } 2025-05-29T15:26:06.884668Z node 1 :BS_NODE DEBUG: {NW04@node_warden_pdisk.cpp:196} StartLocalPDisk NodeId# 1 PDiskId# 1 Path# "/tmp/pdisk.dat" PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} Temporary# false 2025-05-29T15:26:06.884815Z node 1 :BS_NODE DEBUG: {NW23@node_warden_vdisk.cpp:67} StartLocalVDiskActor SlayInFlight# false VDiskId# [0:1:0:0:0] VSlotId# 1:1:0 PDiskGuid# 1 DonorMode# false PDiskRestartInFlight# false PDisksWaitingToStart# false 2025-05-29T15:26:06.885040Z node 1 :BS_NODE DEBUG: {NW24@node_warden_vdisk.cpp:265} StartLocalVDiskActor done VDiskId# [0:1:0:0:0] VSlotId# 1:1:0 PDiskGuid# 1 2025-05-29T15:26:06.885050Z node 1 :BS_NODE DEBUG: {NW12@node_warden_proxy.cpp:23} StartLocalProxy GroupId# 0 2025-05-29T15:26:06.885219Z node 1 :BS_NODE DEBUG: {NW21@node_warden_pipe.cpp:23} EstablishPipe AvailDomainId# 0 PipeClientId# [1:71:2076] ControllerId# 72057594037932033 2025-05-29T15:26:06.885223Z node 1 :BS_NODE DEBUG: {NW20@node_warden_pipe.cpp:72} SendRegisterNode 2025-05-29T15:26:06.885239Z node 1 :BS_NODE DEBUG: {NW11@node_warden_impl.cpp:296} StartInvalidGroupProxy GroupId# 4294967295 2025-05-29T15:26:06.885255Z node 1 :BS_NODE DEBUG: {NW62@node_warden_impl.cpp:308} StartRequestReportingThrottler 2025-05-29T15:26:06.885312Z node 2 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:149: TClient[72057594037932033] queue send [2:48:2073] 2025-05-29T15:26:06.885323Z node 2 :BS_NODE DEBUG: {NWDC53@distconf.cpp:300} StateWaitForInit event Type# 131082 StorageConfigLoaded# false NodeListObtained# false PendingEvents.size# 0 2025-05-29T15:26:06.885327Z node 2 :BS_NODE DEBUG: {NWDC11@distconf_binding.cpp:6} TEvNodesInfo 2025-05-29T15:26:06.885346Z node 2 :BS_NODE DEBUG: {NWDC40@distconf_persistent_storage.cpp:25} TReaderActor bootstrap Paths# [] 2025-05-29T15:26:06.895084Z node 1 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:50: TClient[72057594037936129] ::Bootstrap [1:41:2064] 2025-05-29T15:26:06.895107Z node 1 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:533: TClient[72057594037936129] lookup [1:41:2064] 2025-05-29T15:26:06.895122Z node 1 :LOCAL DEBUG: local.cpp:1491: TLocal::Bootstrap 2025-05-29T15:26:06.897176Z node 1 :BS_PROXY INFO: dsproxy_state.cpp:146: Group# 0 TEvConfigureProxy received GroupGeneration# 1 IsLimitedKeyless# false Marker# DSP02 2025-05-29T15:26:06.897195Z node 1 :BS_PROXY NOTICE: dsproxy_state.cpp:294: EnsureMonitoring Group# 0 IsLimitedKeyless# 0 fullIfPossible# 0 Marker# DSP58 2025-05-29T15:26:06.897516Z node 1 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [1:70:2075] Create Queue# [1:78:2081] targetNodeId# 1 Marker# DSP01 2025-05-29T15:26:06.897547Z node 1 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [1:70:2075] Create Queue# [1:79:2082] targetNodeId# 1 Marker# DSP01 2025-05-29T15:26:06.897571Z node 1 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [1:70:2075] Create Queue# [1:80:2083] targetNodeId# 1 Marker# DSP01 2025-05-29T15:26:06.897595Z node 1 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [1:70:2075] Create Queue# [1:81:2084] targetNodeId# 1 Marker# DSP01 2025-05-29T15:26:06.897622Z node 1 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [1:70:2075] Create Queue# [1:82:2085] targetNodeId# 1 Marker# DSP01 2025-05-29T15:26:06.897647Z node 1 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [1:70:2075] Create Queue# [1:83:2086] targetNodeId# 1 Marker# DSP01 2025-05-29T15:26:06.897691Z node 1 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [1:70:2075] Create Queue# [1:84:2087] targetNodeId# 1 Marker# DSP01 2025-05-29T15:26:06.897698Z node 1 :BS_PROXY INFO: dsproxy_state.cpp:29: Group# 0 SetStateEstablishingSessions Marker# DSP03 2025-05-29T15:26:06.897714Z node 1 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:50: TClient[72057594037932033] ::Bootstrap [1:71:2076] 2025-05-29T15:26:06.897719Z node 1 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:533: TClient[72057594037932033] lookup [1:71:2076] 2025-05-29T15:26:06.897727Z node 1 :BS_PROXY NOTICE: dsproxy_state.cpp:234: Group# 4294967295 HasInvalidGroupId# 1 Bootstrap -> StateEjected Marker# DSP42 2025-05-29T15:26:06.897736Z node 1 :BS_NODE DEBUG: {NWDC00@distconf.cpp:20} Bootstrap 2025-05-29T15:26:06.897921Z node 1 :BS_NODE DEBUG: {NWDC40@distconf_persistent_storage.cpp:25} TReaderActor bootstrap Paths# [] 2025-05-29T15:26:06.897951Z node 2 :TABLET_RESOLVER DEBUG: tablet_resolver.cpp:484: Handle TEvForward tabletId: 72057594037932033 entry.State: StInit ev: {EvForward TabletID: 72057594037932033 Ev: nullptr Flags: 1:2:0} 2025-05-29T15:26:06.898013Z node 1 :TABLET_RESOLVER DEBUG: tablet_resolver.cpp:484: Handle TEvForward tabletId: 72057594037936129 entry.State: StInit ev: {EvForward TabletID: 72057594037936129 Ev: nullptr Flags: 1:2:0} 2025-05-29T15:26:06.947687Z node 1 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:149: TClient[72057594037932033] queue send [1:71:2076] 2025-05-29T15:26:06.947714Z node 1 :BS_NODE DEBUG: {NWDC53@distconf.cpp:300} StateWaitForInit event Type# 131082 StorageConfigLoaded# false NodeListObtained# false PendingEvents.size# 0 2025-05-29T15:26:06.947721Z node 1 :BS_NODE DEBUG: {NWDC11@distconf_binding.cpp:6} TEvNodesInfo 2025-05-29T15:26:06.948021Z node 1 :LOCAL DEBUG: local.cpp:1441: TDomainLocal(dc-1): Bootstrap 2025-05-29T15:26:06.948100Z node 2 :BS_NODE DEBUG: {NWDC53@distconf.cpp:300} StateWaitForInit event Type# 2146435074 StorageConfigLoaded# false NodeListObtained# false PendingEvents.size# 0 2025-05-29T15:26:06.948109Z node 2 :BS_NODE DEBUG: {NWDC32@distconf_persistent_storage.cpp:221} TEvStorageConfigLoaded Cookie# 0 NumItemsRead# 0 2025-05-29T15:26:06.949021Z node 2 :BS_NODE DEBUG: {NWDC35@distconf_persistent_storage.cpp:184} PersistConfig Record# {} Drives# [] 2025-05-29T15:26:06.949073Z node 2 :LOCAL DEBUG: local.cpp:1441: TDomainLocal(dc-1): Bootstrap 2025-05-29T15:26:06.949083Z node 2 :BS_NODE DEBUG: {NWDC51@distconf_persistent_storage.cpp:103} TWriterActor bootstrap Drives# [] Record# {} 2025-05-29T15:26:06.949124Z node 1 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:149: TClient[72057594037932033] queue send [1:71:2076] 2025-05-29T15:26:06.949133Z node 1 :BS_NODE DEBUG: {NWDC53@distconf.cpp:300} StateWaitForInit event Type# 2146435074 StorageConfigLoaded# false NodeListObtained# false PendingEvents.size# 0 2025-05-29T15:26:06.949138Z node 1 :BS_NODE DEBUG: {NWDC32@distconf_persistent_storage.cpp:221} TEvStorageConfigLoaded Cookie# 0 NumItemsRead# 0 2025-05-29T15:26:06.949152Z node 1 :BS_NODE DEBUG: {NWDC35@distconf_persistent_storage.cpp:184} PersistConfig Record# {} Drives# [] 2025-05-29T15:26:06.949818Z node 1 :LOCAL DEBUG: local.cpp:1149: TDomainLocal(dc-1): Binding to hive 72057594037927937 at domain dc-1 (allocated resources: ) 2025-05-29T15:26:06.949848Z node 2 :STATESTORAGE DEBUG: statestorage_proxy.cpp:246: ProxyRequest::HandleInit ringGroup:0 ev: {EvLookup TabletID: 72057594037932033 Cookie: 0 ProxyOptions: SigNone} 2025-05-29T15:26:06.949873Z node 1 :BS_NODE DEBUG: {NWDC51@distconf_persistent_storage.cpp:103} TWriterActor bootstrap Drives# [] Record# {} 2025-05-29T15:26:06.949883Z node 1 :LOCAL DEBUG: local.cpp:975: TLocalNodeRegistrar::Bootstrap 2025-05-29T15:26:06.949888Z node 1 :LOCAL DEBUG: local.cpp:181: TLocalNodeRegistrar::TryToRegister 2025-05-29T15:26:06.949927Z node 1 :LOCAL DEBUG: local.cpp:213: TLocalNodeRegistrar::TryToRegister pipe to hive, pipe:[1:97:2093] 2025-05-29T15:26:06.949954Z node 2 :BS_NODE DEBUG: {NWDC53@distconf.cpp:300} StateWaitForInit event Type# 2146435075 StorageConfigLoaded# true NodeListObtained# false PendingEvents.size# 0 2025-05-29T15:26:06.950016Z node 2 :LOCAL DEBUG: local.cpp:1149: TDomainLocal(dc-1): Binding to hive 72057594037927937 at domain dc-1 (allocated resources: ) 2025-05-29T15:26:06.950023Z node 2 :LOCAL DEBUG: local.cpp:975: TLocalNodeRegistrar::Bootstrap 2025-05-29T15:26:06.950027Z node 2 :LOCAL DEBUG: local.cpp:181: TLocalNodeRegistrar::TryToRegister 2025-05 ... 2176] CurrentLeaderTablet: [25:684:2179] CurrentGeneration: 1 CurrentStep: 0} 2025-05-29T15:26:51.922387Z node 25 :STATESTORAGE DEBUG: statestorage_proxy.cpp:355: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 0 TabletID: 72075186224037894 CurrentLeader: [25:678:2176] CurrentLeaderTablet: [25:684:2179] CurrentGeneration: 1 CurrentStep: 0} 2025-05-29T15:26:51.922397Z node 25 :TABLET_RESOLVER DEBUG: tablet_resolver.cpp:610: Handle TEvInfo tabletId: 72075186224037894 entry.State: StInitResolve success: true ev: {EvInfo Status: 0 TabletID: 72075186224037894 Cookie: 0 CurrentLeader: [25:678:2176] CurrentLeaderTablet: [25:684:2179] CurrentGeneration: 1 CurrentStep: 0 Locked: false LockedFor: 0 Signature: { Size: 2 Signature: {{[24:24343667:0] : 3}, {[24:1099535971443:0] : 6}}}} 2025-05-29T15:26:51.922401Z node 25 :TABLET_RESOLVER DEBUG: tablet_resolver.cpp:354: ApplyEntry leader tabletId: 72075186224037894 followers: 0 2025-05-29T15:26:51.922406Z node 25 :TABLET_RESOLVER DEBUG: tablet_resolver.cpp:279: SelectForward node 25 selfDC 2 leaderDC 2 1:2:0 local 1 localDc 1 other 0 disallowed 0 tabletId: 72075186224037894 followers: 0 countLeader 1 allowFollowers 0 winner: [25:678:2176] 2025-05-29T15:26:51.922418Z node 25 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:190: TClient[72075186224037894] forward result local node, try to connect [25:1082:2359] 2025-05-29T15:26:51.922422Z node 25 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:671: TClient[72075186224037894]::SendEvent [25:1082:2359] 2025-05-29T15:26:51.922437Z node 25 :PIPE_SERVER DEBUG: tablet_pipe_server.cpp:291: [72075186224037894] Accept Connect Originator# [25:1082:2359] 2025-05-29T15:26:51.922456Z node 25 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:310: TClient[72075186224037894] connected with status OK role: Leader [25:1082:2359] 2025-05-29T15:26:51.922460Z node 25 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:325: TClient[72075186224037894] send queued [25:1082:2359] 2025-05-29T15:26:51.922490Z node 25 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:50: TClient[72075186224037895] ::Bootstrap [25:1086:2362] 2025-05-29T15:26:51.922494Z node 25 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:533: TClient[72075186224037895] lookup [25:1086:2362] 2025-05-29T15:26:51.922500Z node 25 :TABLET_RESOLVER DEBUG: tablet_resolver.cpp:484: Handle TEvForward tabletId: 72075186224037895 entry.State: StInit ev: {EvForward TabletID: 72075186224037895 Ev: nullptr Flags: 1:2:0} 2025-05-29T15:26:51.922524Z node 25 :STATESTORAGE DEBUG: statestorage_proxy.cpp:246: ProxyRequest::HandleInit ringGroup:0 ev: {EvLookup TabletID: 72075186224037895 Cookie: 0 ProxyOptions: SigNone} 2025-05-29T15:26:51.922569Z node 24 :STATESTORAGE DEBUG: statestorage_replica.cpp:183: Replica::Handle ev: {EvReplicaLookup TabletID: 72075186224037895 Cookie: 0} 2025-05-29T15:26:51.922576Z node 24 :STATESTORAGE DEBUG: statestorage_replica.cpp:183: Replica::Handle ev: {EvReplicaLookup TabletID: 72075186224037895 Cookie: 1} 2025-05-29T15:26:51.922582Z node 24 :STATESTORAGE DEBUG: statestorage_replica.cpp:183: Replica::Handle ev: {EvReplicaLookup TabletID: 72075186224037895 Cookie: 2} 2025-05-29T15:26:51.922616Z node 25 :STATESTORAGE DEBUG: statestorage_proxy.cpp:355: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 0 TabletID: 72075186224037895 CurrentLeader: [25:932:2270] CurrentLeaderTablet: [25:934:2271] CurrentGeneration: 2 CurrentStep: 0} 2025-05-29T15:26:51.922627Z node 25 :STATESTORAGE DEBUG: statestorage_proxy.cpp:355: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 0 TabletID: 72075186224037895 CurrentLeader: [25:932:2270] CurrentLeaderTablet: [25:934:2271] CurrentGeneration: 2 CurrentStep: 0} 2025-05-29T15:26:51.922637Z node 25 :TABLET_RESOLVER DEBUG: tablet_resolver.cpp:610: Handle TEvInfo tabletId: 72075186224037895 entry.State: StInitResolve success: true ev: {EvInfo Status: 0 TabletID: 72075186224037895 Cookie: 0 CurrentLeader: [25:932:2270] CurrentLeaderTablet: [25:934:2271] CurrentGeneration: 2 CurrentStep: 0 Locked: false LockedFor: 0 Signature: { Size: 2 Signature: {{[24:24343667:0] : 3}, {[24:1099535971443:0] : 6}}}} 2025-05-29T15:26:51.922641Z node 25 :TABLET_RESOLVER DEBUG: tablet_resolver.cpp:354: ApplyEntry leader tabletId: 72075186224037895 followers: 0 2025-05-29T15:26:51.922646Z node 25 :TABLET_RESOLVER DEBUG: tablet_resolver.cpp:279: SelectForward node 25 selfDC 2 leaderDC 2 1:2:0 local 1 localDc 1 other 0 disallowed 0 tabletId: 72075186224037895 followers: 0 countLeader 1 allowFollowers 0 winner: [25:932:2270] 2025-05-29T15:26:51.922657Z node 25 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:190: TClient[72075186224037895] forward result local node, try to connect [25:1086:2362] 2025-05-29T15:26:51.922661Z node 25 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:671: TClient[72075186224037895]::SendEvent [25:1086:2362] 2025-05-29T15:26:51.922678Z node 25 :PIPE_SERVER DEBUG: tablet_pipe_server.cpp:291: [72075186224037895] Accept Connect Originator# [25:1086:2362] 2025-05-29T15:26:51.922699Z node 25 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:310: TClient[72075186224037895] connected with status OK role: Leader [25:1086:2362] 2025-05-29T15:26:51.922704Z node 25 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:325: TClient[72075186224037895] send queued [25:1086:2362] 2025-05-29T15:26:51.922761Z node 25 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:50: TClient[72075186224037896] ::Bootstrap [25:1090:2365] 2025-05-29T15:26:51.922766Z node 25 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:533: TClient[72075186224037896] lookup [25:1090:2365] 2025-05-29T15:26:51.922774Z node 25 :TABLET_RESOLVER DEBUG: tablet_resolver.cpp:484: Handle TEvForward tabletId: 72075186224037896 entry.State: StInit ev: {EvForward TabletID: 72075186224037896 Ev: nullptr Flags: 1:2:0} 2025-05-29T15:26:51.922800Z node 25 :STATESTORAGE DEBUG: statestorage_proxy.cpp:246: ProxyRequest::HandleInit ringGroup:0 ev: {EvLookup TabletID: 72075186224037896 Cookie: 0 ProxyOptions: SigNone} 2025-05-29T15:26:51.922849Z node 24 :STATESTORAGE DEBUG: statestorage_replica.cpp:183: Replica::Handle ev: {EvReplicaLookup TabletID: 72075186224037896 Cookie: 0} 2025-05-29T15:26:51.922865Z node 24 :STATESTORAGE DEBUG: statestorage_replica.cpp:183: Replica::Handle ev: {EvReplicaLookup TabletID: 72075186224037896 Cookie: 1} 2025-05-29T15:26:51.922871Z node 24 :STATESTORAGE DEBUG: statestorage_replica.cpp:183: Replica::Handle ev: {EvReplicaLookup TabletID: 72075186224037896 Cookie: 2} 2025-05-29T15:26:51.922908Z node 25 :STATESTORAGE DEBUG: statestorage_proxy.cpp:355: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 0 TabletID: 72075186224037896 CurrentLeader: [25:760:2198] CurrentLeaderTablet: [25:766:2201] CurrentGeneration: 1 CurrentStep: 0} 2025-05-29T15:26:51.922919Z node 25 :STATESTORAGE DEBUG: statestorage_proxy.cpp:355: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 0 TabletID: 72075186224037896 CurrentLeader: [25:760:2198] CurrentLeaderTablet: [25:766:2201] CurrentGeneration: 1 CurrentStep: 0} 2025-05-29T15:26:51.922931Z node 25 :TABLET_RESOLVER DEBUG: tablet_resolver.cpp:610: Handle TEvInfo tabletId: 72075186224037896 entry.State: StInitResolve success: true ev: {EvInfo Status: 0 TabletID: 72075186224037896 Cookie: 0 CurrentLeader: [25:760:2198] CurrentLeaderTablet: [25:766:2201] CurrentGeneration: 1 CurrentStep: 0 Locked: false LockedFor: 0 Signature: { Size: 2 Signature: {{[24:24343667:0] : 3}, {[24:1099535971443:0] : 6}}}} 2025-05-29T15:26:51.922936Z node 25 :TABLET_RESOLVER DEBUG: tablet_resolver.cpp:354: ApplyEntry leader tabletId: 72075186224037896 followers: 0 2025-05-29T15:26:51.922941Z node 25 :TABLET_RESOLVER DEBUG: tablet_resolver.cpp:279: SelectForward node 25 selfDC 2 leaderDC 2 1:2:0 local 1 localDc 1 other 0 disallowed 0 tabletId: 72075186224037896 followers: 0 countLeader 1 allowFollowers 0 winner: [25:760:2198] 2025-05-29T15:26:51.922951Z node 25 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:190: TClient[72075186224037896] forward result local node, try to connect [25:1090:2365] 2025-05-29T15:26:51.922955Z node 25 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:671: TClient[72075186224037896]::SendEvent [25:1090:2365] 2025-05-29T15:26:51.922974Z node 25 :PIPE_SERVER DEBUG: tablet_pipe_server.cpp:291: [72075186224037896] Accept Connect Originator# [25:1090:2365] 2025-05-29T15:26:51.922996Z node 25 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:310: TClient[72075186224037896] connected with status OK role: Leader [25:1090:2365] 2025-05-29T15:26:51.923000Z node 25 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:325: TClient[72075186224037896] send queued [25:1090:2365] 2025-05-29T15:26:51.923039Z node 25 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:50: TClient[72075186224037897] ::Bootstrap [25:1094:2368] 2025-05-29T15:26:51.923044Z node 25 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:533: TClient[72075186224037897] lookup [25:1094:2368] 2025-05-29T15:26:51.923051Z node 25 :TABLET_RESOLVER DEBUG: tablet_resolver.cpp:484: Handle TEvForward tabletId: 72075186224037897 entry.State: StInit ev: {EvForward TabletID: 72075186224037897 Ev: nullptr Flags: 1:2:0} 2025-05-29T15:26:51.923073Z node 25 :STATESTORAGE DEBUG: statestorage_proxy.cpp:246: ProxyRequest::HandleInit ringGroup:0 ev: {EvLookup TabletID: 72075186224037897 Cookie: 0 ProxyOptions: SigNone} 2025-05-29T15:26:51.923117Z node 24 :STATESTORAGE DEBUG: statestorage_replica.cpp:183: Replica::Handle ev: {EvReplicaLookup TabletID: 72075186224037897 Cookie: 0} 2025-05-29T15:26:51.923124Z node 24 :STATESTORAGE DEBUG: statestorage_replica.cpp:183: Replica::Handle ev: {EvReplicaLookup TabletID: 72075186224037897 Cookie: 1} 2025-05-29T15:26:51.923129Z node 24 :STATESTORAGE DEBUG: statestorage_replica.cpp:183: Replica::Handle ev: {EvReplicaLookup TabletID: 72075186224037897 Cookie: 2} 2025-05-29T15:26:51.923163Z node 25 :STATESTORAGE DEBUG: statestorage_proxy.cpp:355: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 0 TabletID: 72075186224037897 CurrentLeader: [25:843:2222] CurrentLeaderTablet: [25:845:2223] CurrentGeneration: 2 CurrentStep: 0} 2025-05-29T15:26:51.923176Z node 25 :STATESTORAGE DEBUG: statestorage_proxy.cpp:355: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 0 TabletID: 72075186224037897 CurrentLeader: [25:843:2222] CurrentLeaderTablet: [25:845:2223] CurrentGeneration: 2 CurrentStep: 0} 2025-05-29T15:26:51.923187Z node 25 :TABLET_RESOLVER DEBUG: tablet_resolver.cpp:610: Handle TEvInfo tabletId: 72075186224037897 entry.State: StInitResolve success: true ev: {EvInfo Status: 0 TabletID: 72075186224037897 Cookie: 0 CurrentLeader: [25:843:2222] CurrentLeaderTablet: [25:845:2223] CurrentGeneration: 2 CurrentStep: 0 Locked: false LockedFor: 0 Signature: { Size: 2 Signature: {{[24:24343667:0] : 3}, {[24:1099535971443:0] : 6}}}} 2025-05-29T15:26:51.923192Z node 25 :TABLET_RESOLVER DEBUG: tablet_resolver.cpp:354: ApplyEntry leader tabletId: 72075186224037897 followers: 0 2025-05-29T15:26:51.923197Z node 25 :TABLET_RESOLVER DEBUG: tablet_resolver.cpp:279: SelectForward node 25 selfDC 2 leaderDC 2 1:2:0 local 1 localDc 1 other 0 disallowed 0 tabletId: 72075186224037897 followers: 0 countLeader 1 allowFollowers 0 winner: [25:843:2222] 2025-05-29T15:26:51.923209Z node 25 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:190: TClient[72075186224037897] forward result local node, try to connect [25:1094:2368] 2025-05-29T15:26:51.923215Z node 25 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:671: TClient[72075186224037897]::SendEvent [25:1094:2368] 2025-05-29T15:26:51.923230Z node 25 :PIPE_SERVER DEBUG: tablet_pipe_server.cpp:291: [72075186224037897] Accept Connect Originator# [25:1094:2368] 2025-05-29T15:26:51.923248Z node 25 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:310: TClient[72075186224037897] connected with status OK role: Leader [25:1094:2368] 2025-05-29T15:26:51.923252Z node 25 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:325: TClient[72075186224037897] send queued [25:1094:2368] >> SubDomainWithReboots::Delete >> Cdc::ResolvedTimestampForDisplacedUpsert [FAIL] |68.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_subdomain_reboots/unittest >> SubDomainWithReboots::RootWithStoragePools |68.3%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/public/sdk/cpp/src/client/federated_topic/ut/ydb-public-sdk-cpp-src-client-federated_topic-ut |68.3%| [LD] {RESULT} $(B)/ydb/public/sdk/cpp/src/client/federated_topic/ut/ydb-public-sdk-cpp-src-client-federated_topic-ut |68.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/public/sdk/cpp/src/client/federated_topic/ut/ydb-public-sdk-cpp-src-client-federated_topic-ut |68.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_subdomain_reboots/unittest |68.3%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/schemeshard/ut_auditsettings/ydb-core-tx-schemeshard-ut_auditsettings |68.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_auditsettings/ydb-core-tx-schemeshard-ut_auditsettings |68.3%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_auditsettings/ydb-core-tx-schemeshard-ut_auditsettings >> SubDomainWithReboots::CreateWithStoragePools |68.3%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/security/ut/ydb-core-security-ut |68.3%| [LD] {RESULT} $(B)/ydb/core/security/ut/ydb-core-security-ut |68.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/security/ut/ydb-core-security-ut >> TTxDataShardMiniKQL::Write |68.3%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/sys_view/ut/ydb-core-sys_view-ut |68.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/sys_view/ut/ydb-core-sys_view-ut |68.3%| [LD] {RESULT} $(B)/ydb/core/sys_view/ut/ydb-core-sys_view-ut |68.3%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/load_test/ut_ycsb/ydb-core-load_test-ut_ycsb |68.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/load_test/ut_ycsb/ydb-core-load_test-ut_ycsb |68.3%| [LD] {RESULT} $(B)/ydb/core/load_test/ut_ycsb/ydb-core-load_test-ut_ycsb |68.3%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/statistics/service/ut/ydb-core-statistics-service-ut |68.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/statistics/service/ut/ydb-core-statistics-service-ut |68.4%| [LD] {RESULT} $(B)/ydb/core/statistics/service/ut/ydb-core-statistics-service-ut >> TTxDataShardMiniKQL::Write [GOOD] >> TTxDataShardMiniKQL::TableStats |68.4%| [TA] $(B)/ydb/core/persqueue/ut/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/datashard/ut_change_exchange/unittest >> Cdc::ResolvedTimestampsContinueAfterMerge [GOOD] Test command err: 2025-05-29T15:26:20.647178Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509889389389178671:2081];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:26:20.647452Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/001298/r3tmp/tmpXVAe4U/pdisk_1.dat 2025-05-29T15:26:20.953450Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 6290, node 1 2025-05-29T15:26:21.027087Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:26:21.027101Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-05-29T15:26:21.027104Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-05-29T15:26:21.027153Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-05-29T15:26:21.067096Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:26:21.067126Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:26:21.068994Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-29T15:26:21.075815Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-05-29T15:26:21.084693Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710657:0, at schemeshard: 72057594046644480 2025-05-29T15:26:21.107157Z node 1 :TX_DATASHARD INFO: datashard.cpp:373: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:7509889393684146527:2308] 2025-05-29T15:26:21.107237Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:630: TxInitSchema.Execute 2025-05-29T15:26:21.112290Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:696: TxInitSchema.Complete 2025-05-29T15:26:21.112329Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:48: TDataShard::TTxInit::Execute 2025-05-29T15:26:21.112521Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1315: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-05-29T15:26:21.112526Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1371: LoadLockChangeRecords at tablet: 72075186224037888 2025-05-29T15:26:21.112531Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1420: LoadChangeRecordCommits at tablet: 72075186224037888 2025-05-29T15:26:21.112608Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:92: TDataShard::TTxInit::Complete 2025-05-29T15:26:21.112617Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:100: TDataShard::TTxInitRestored::Execute 2025-05-29T15:26:21.112624Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:111: DataShard 72075186224037888 persisting started state actor id [1:7509889393684146541:2308] in generation 1 2025-05-29T15:26:21.114407Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:120: TDataShard::TTxInitRestored::Complete 2025-05-29T15:26:21.127095Z node 1 :TX_DATASHARD INFO: datashard.cpp:417: Switched to work state WaitScheme tabletId 72075186224037888 2025-05-29T15:26:21.127184Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:457: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-05-29T15:26:21.127201Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1250: Change sender created: at tablet: 72075186224037888, actorId: [1:7509889393684146543:2309] 2025-05-29T15:26:21.127205Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1255: Trying to activate change sender: at tablet: 72075186224037888 2025-05-29T15:26:21.127211Z node 1 :TX_DATASHARD INFO: datashard.cpp:1272: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-05-29T15:26:21.127215Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-05-29T15:26:21.127285Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 72075186224037888 2025-05-29T15:26:21.127312Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-05-29T15:26:21.127317Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2025-05-29T15:26:21.127322Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-05-29T15:26:21.127332Z node 1 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 72075186224037888 TxInFly 0 2025-05-29T15:26:21.127335Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2025-05-29T15:26:21.137480Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3710: Server connected at leader tablet# 72075186224037888, clientId# [1:7509889393684146524:2296], serverId# [1:7509889393684146546:2306], sessionId# [0:0:0] 2025-05-29T15:26:21.137536Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037888 2025-05-29T15:26:21.137629Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:132: Propose scheme transaction at tablet 72075186224037888 txId 281474976710657 ssId 72057594046644480 seqNo 2:1 2025-05-29T15:26:21.137663Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:220: Prepared scheme transaction txId 281474976710657 at tablet 72075186224037888 2025-05-29T15:26:21.138162Z node 1 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:129: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-05-29T15:26:21.143116Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 72075186224037888 2025-05-29T15:26:21.143153Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:467: 72075186224037888 not sending time cast registration request in state WaitScheme 2025-05-29T15:26:21.144610Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3710: Server connected at leader tablet# 72075186224037888, clientId# [1:7509889393684146559:2313], serverId# [1:7509889393684146560:2314], sessionId# [0:0:0] 2025-05-29T15:26:21.145431Z node 1 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:69: Planned transaction txId 281474976710657 at step 1748532381190 at tablet 72075186224037888 { Transactions { TxId: 281474976710657 AckTo { RawX1: 0 RawX2: 0 } } Step: 1748532381190 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2025-05-29T15:26:21.145441Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-05-29T15:26:21.145478Z node 1 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:129: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-05-29T15:26:21.145490Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2025-05-29T15:26:21.145497Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2025-05-29T15:26:21.145507Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:333: Found ready operation [1748532381190:281474976710657] in PlanQueue unit at 72075186224037888 2025-05-29T15:26:21.145580Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:634: LoadTxDetails at 72075186224037888 loaded tx from db 1748532381190:281474976710657 keys extracted: 0 2025-05-29T15:26:21.145614Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 1 immediate 0 planned 1 2025-05-29T15:26:21.145642Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2025-05-29T15:26:21.145653Z node 1 :TX_DATASHARD INFO: create_table_unit.cpp:69: Trying to CREATE TABLE at 72075186224037888 tableId# [OwnerId: 72057594046644480, LocalPathId: 2] schema version# 1 2025-05-29T15:26:21.146077Z node 1 :TX_DATASHARD INFO: datashard.cpp:475: Send registration request to time cast Ready tabletId 72075186224037888 mediators count is 1 coordinators count is 1 buckets per mediator 2 2025-05-29T15:26:21.146207Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-05-29T15:26:21.147034Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3742: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037888 time 1748532381189 2025-05-29T15:26:21.147045Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-05-29T15:26:21.147058Z node 1 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:98: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 1748532381190} 2025-05-29T15:26:21.147069Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2025-05-29T15:26:21.147079Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2025-05-29T15:26:21.147084Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1255: Trying to activate change sender: at tablet: 72075186224037888 2025-05-29T15:26:21.147087Z node 1 :TX_DATASHARD INFO: datashard.cpp:1293: Change sender activated: at tablet: 72075186224037888 2025-05-29T15:26:21.147103Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:801: Complete [1748532381190 : 281474976710657] from 72075186224037888 at tablet 72075186224037888 send result to client [1:7509889389389179033:2185], exec latency: 0 ms, propose latency: 1 ms 2025-05-29T15:26:21.147113Z node 1 :TX_DATASHARD INFO: datashard.cpp:1590: 72075186224037888 Sending notify to schemeshard 72057594046644480 txId 281474976710657 state Ready TxInFly 0 2025-05-29T15:26:21.147120Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-05-29T15:26:21.147130Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3760: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037888 coordinator 72057594046316545 last step 0 next step 1748532381190 2025-05-29T15:26:21.147364Z node 1 :CHANGE_EXCHANGE DEBUG: change_sender.cpp:153: [ChangeSender][72075186224037888:1][1:7509889393684146543:2309][Inactive] Handle NKikimrChangeExchange.TEvActivateSender 2025-05-29T15:26:21.150143Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:2953: Handle TEvSchemaChangedResult 281474976710657 datashard 72075186224037888 state Ready 2025-05-29T15:26:21.150157Z node 1 :TX_DATASHARD DEBUG: datashard__schema_changed.cpp:22: 72075186224037888 Got TEvSchemaChangedResult from SS at 72075186224037888 2025-05-29T15:26:21.154043Z node 1 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:129: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-05-29T15:26:21.154083Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037888 2025-05-29T15:26:21.154124Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:132: Propose sc ... 89, Partition: 0, State: StateIdle] m0000000000p72075186224037893 2025-05-29T15:26:51.723005Z node 24 :PERSQUEUE DEBUG: partition.cpp:2197: [PQ: 72075186224037889, Partition: 0, State: StateIdle] i0000000000 2025-05-29T15:26:51.723009Z node 24 :PERSQUEUE DEBUG: partition.cpp:2199: [PQ: 72075186224037889, Partition: 0, State: StateIdle] --- rename ---------------- 2025-05-29T15:26:51.723014Z node 24 :PERSQUEUE DEBUG: partition.cpp:2204: [PQ: 72075186224037889, Partition: 0, State: StateIdle] =========================== 2025-05-29T15:26:51.723042Z node 24 :PERSQUEUE DEBUG: read.h:262: CacheProxy. Passthrough write request to KV 2025-05-29T15:26:51.723313Z node 24 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976715660, at schemeshard: 72057594046644480 ... wait for final heartbeat >>>>> GetRecords path=/Root/Table/Stream partitionId=0 2025-05-29T15:26:51.723647Z node 24 :PERSQUEUE DEBUG: pq_impl.cpp:347: Handle TEvRequest topic: 'streamImpl' requestId: 2025-05-29T15:26:51.723654Z node 24 :PERSQUEUE DEBUG: pq_impl.cpp:2796: [PQ: 72075186224037889] got client message batch for topic 'Table/Stream/streamImpl' partition 0 2025-05-29T15:26:51.723751Z node 24 :PERSQUEUE DEBUG: partition_read.cpp:736: [PQ: 72075186224037889, Partition: 0, State: StateIdle] read cookie 0 Topic 'Table/Stream/streamImpl' partition 0 user $without_consumer offset 0 count 10000 size 26214400 endOffset 2 max time lag 0ms effective offset 0 2025-05-29T15:26:51.723759Z node 24 :PERSQUEUE DEBUG: partition_read.cpp:936: [PQ: 72075186224037889, Partition: 0, State: StateIdle] read cookie 0 added 0 blobs, size 0 count 0 last offset 0, current partition end offset: 2 2025-05-29T15:26:51.723769Z node 24 :PERSQUEUE DEBUG: partition_read.cpp:953: [PQ: 72075186224037889, Partition: 0, State: StateIdle] Reading cookie 0. All data is from uncompacted head. 2025-05-29T15:26:51.723775Z node 24 :PERSQUEUE DEBUG: partition_read.cpp:420: FormAnswer for 0 blobs 2025-05-29T15:26:51.723866Z node 24 :PERSQUEUE DEBUG: pq_impl.cpp:382: Answer ok topic: 'streamImpl' partition: 0 messageNo: 0 requestId: cookie: 0 2025-05-29T15:26:51.735365Z node 24 :PERSQUEUE DEBUG: partition_write.cpp:524: [PQ: 72075186224037889, Partition: 0, State: StateIdle] TPartition::HandleWriteResponse writeNewSize# 0 WriteNewSizeFromSupportivePartitions# 0 2025-05-29T15:26:51.735438Z node 24 :PERSQUEUE DEBUG: pq_impl.cpp:382: Answer ok topic: 'streamImpl' partition: 0 messageNo: 0 requestId: cookie: 0 2025-05-29T15:26:51.735486Z node 24 :PERSQUEUE DEBUG: pq_impl.cpp:347: Handle TEvRequest topic: 'streamImpl' requestId: 2025-05-29T15:26:51.735494Z node 24 :PERSQUEUE DEBUG: pq_impl.cpp:2796: [PQ: 72075186224037889] got client message batch for topic 'Table/Stream/streamImpl' partition 0 2025-05-29T15:26:51.735518Z node 24 :PERSQUEUE DEBUG: pq_impl.cpp:382: Answer ok topic: 'streamImpl' partition: 0 messageNo: 0 requestId: cookie: 0 2025-05-29T15:26:51.735569Z node 24 :CHANGE_EXCHANGE DEBUG: change_sender_cdc_stream.cpp:61: [CdcChangeSenderPartition][72075186224037893:1][0][72075186224037889][24:1297:3021] Handle NKikimr::NPQ::TEvPartitionWriter::TEvInitResult { SessionId: TxId: Success { OwnerCookie: 72075186224037893|cfa458bb-9e788fae-89a11a6e-904c321d_0 SourceIdInfo: SourceId: "\00072075186224037893" SeqNo: 0 Offset: 2 WriteTimestampMS: 0 Explicit: true State: STATE_REGISTERED } } 2025-05-29T15:26:51.735587Z node 24 :CHANGE_EXCHANGE DEBUG: change_sender_cdc_stream.cpp:643: [CdcChangeSenderMain][72075186224037893:1][24:1294:3021] Handle NKikimr::NChangeExchange::TEvChangeExchangePrivate::TEvReady { PartitionId: 0 } 2025-05-29T15:26:51.735612Z node 24 :CHANGE_EXCHANGE DEBUG: change_sender_cdc_stream.cpp:111: [CdcChangeSenderPartition][72075186224037893:1][0][72075186224037889][24:1297:3021] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRecords { Records [{ Order: 1 Group: 0 Step: 6000 TxId: 0 PathId: [OwnerId: 72057594046644480, LocalPathId: 3] Kind: CdcHeartbeat Source: Unspecified Body: 0b TableId: [OwnerId: 72057594046644480, LocalPathId: 2] SchemaVersion: 0 LockId: 0 LockOffset: 0 }] } 2025-05-29T15:26:51.735659Z node 24 :PERSQUEUE DEBUG: pq_impl.cpp:347: Handle TEvRequest topic: 'streamImpl' requestId: 2025-05-29T15:26:51.735663Z node 24 :PERSQUEUE DEBUG: pq_impl.cpp:2796: [PQ: 72075186224037889] got client message batch for topic 'Table/Stream/streamImpl' partition 0 2025-05-29T15:26:51.735685Z node 24 :PERSQUEUE DEBUG: pq_impl.cpp:382: Answer ok topic: 'streamImpl' partition: 0 messageNo: 0 requestId: cookie: 1 2025-05-29T15:26:51.735704Z node 24 :PERSQUEUE DEBUG: pq_impl.cpp:347: Handle TEvRequest topic: 'streamImpl' requestId: 2025-05-29T15:26:51.735708Z node 24 :PERSQUEUE DEBUG: pq_impl.cpp:2796: [PQ: 72075186224037889] got client message batch for topic 'Table/Stream/streamImpl' partition 0 2025-05-29T15:26:51.735720Z node 24 :PERSQUEUE DEBUG: pq_impl.cpp:2198: [PQ: 72075186224037889] got client message topic: Table/Stream/streamImpl partition: 0 SourceId: '\00072075186224037893' SeqNo: 1 partNo : 0 messageNo: 1 size 26 offset: -1 2025-05-29T15:26:51.735753Z node 24 :PERSQUEUE DEBUG: partition_write.cpp:1162: [PQ: 72075186224037889, Partition: 0, State: StateIdle] Topic 'Table/Stream/streamImpl' partition 0 process heartbeat sourceId '\00072075186224037893' version v6000/0 2025-05-29T15:26:51.735769Z node 24 :PERSQUEUE INFO: partition_write.cpp:1658: [PQ: 72075186224037889, Partition: 0, State: StateIdle] Topic 'Table/Stream/streamImpl' partition 0 emit heartbeat v6000/0 2025-05-29T15:26:51.735786Z node 24 :PERSQUEUE DEBUG: partition_write.cpp:1233: [PQ: 72075186224037889, Partition: 0, State: StateIdle] Topic 'Table/Stream/streamImpl' partition 0 part blob processing sourceId '\00072075186224037889' seqNo 0 partNo 0 2025-05-29T15:26:51.735836Z node 24 :PERSQUEUE DEBUG: partition_write.cpp:1333: [PQ: 72075186224037889, Partition: 0, State: StateIdle] Topic 'Table/Stream/streamImpl' partition 0 part blob complete sourceId '\00072075186224037889' seqNo 0 partNo 0 FormedBlobsCount 0 NewHead: Offset 2 PartNo 0 PackedSize 107 count 1 nextOffset 3 batches 1 2025-05-29T15:26:51.735881Z node 24 :PERSQUEUE DEBUG: partition_write.cpp:1623: [PQ: 72075186224037889, Partition: 0, State: StateIdle] Add new write blob: topic 'Table/Stream/streamImpl' partition 0 compactOffset 2,1 HeadOffset 0 endOffset 2 curOffset 3 d0000000000_00000000000000000002_00000_0000000001_00000| size 93 WTime 6505 2025-05-29T15:26:51.735901Z node 24 :PERSQUEUE DEBUG: partition.cpp:2185: [PQ: 72075186224037889, Partition: 0, State: StateIdle] === DumpKeyValueRequest === 2025-05-29T15:26:51.735905Z node 24 :PERSQUEUE DEBUG: partition.cpp:2186: [PQ: 72075186224037889, Partition: 0, State: StateIdle] --- delete ---------------- 2025-05-29T15:26:51.735910Z node 24 :PERSQUEUE DEBUG: partition.cpp:2192: [PQ: 72075186224037889, Partition: 0, State: StateIdle] [x0000000000, x0000000001) 2025-05-29T15:26:51.735914Z node 24 :PERSQUEUE DEBUG: partition.cpp:2194: [PQ: 72075186224037889, Partition: 0, State: StateIdle] --- write ----------------- 2025-05-29T15:26:51.735918Z node 24 :PERSQUEUE DEBUG: partition.cpp:2197: [PQ: 72075186224037889, Partition: 0, State: StateIdle] m0000000000p72075186224037889 2025-05-29T15:26:51.735922Z node 24 :PERSQUEUE DEBUG: partition.cpp:2197: [PQ: 72075186224037889, Partition: 0, State: StateIdle] d0000000000_00000000000000000002_00000_0000000001_00000| 2025-05-29T15:26:51.735926Z node 24 :PERSQUEUE DEBUG: partition.cpp:2197: [PQ: 72075186224037889, Partition: 0, State: StateIdle] i0000000000 2025-05-29T15:26:51.735929Z node 24 :PERSQUEUE DEBUG: partition.cpp:2199: [PQ: 72075186224037889, Partition: 0, State: StateIdle] --- rename ---------------- 2025-05-29T15:26:51.735934Z node 24 :PERSQUEUE DEBUG: partition.cpp:2204: [PQ: 72075186224037889, Partition: 0, State: StateIdle] =========================== 2025-05-29T15:26:51.735947Z node 24 :PERSQUEUE DEBUG: read.h:262: CacheProxy. Passthrough write request to KV 2025-05-29T15:26:51.735957Z node 24 :PERSQUEUE DEBUG: read.h:300: CacheProxy. Passthrough blob. Partition 0 offset 2 partNo 0 count 1 size 93 2025-05-29T15:26:51.736259Z node 24 :PERSQUEUE DEBUG: cache_eviction.h:315: Caching head blob in L1. Partition 0 offset 2 count 1 size 93 actorID [24:1266:3001] 2025-05-29T15:26:51.736293Z node 24 :PERSQUEUE DEBUG: pq_l2_cache.cpp:120: PQ Cache (L2). Adding blob. Tablet '72075186224037889' partition 0 offset 2 partno 0 count 1 parts 0 size 93 2025-05-29T15:26:51.746505Z node 24 :PERSQUEUE DEBUG: partition_write.cpp:524: [PQ: 72075186224037889, Partition: 0, State: StateIdle] TPartition::HandleWriteResponse writeNewSize# 44 WriteNewSizeFromSupportivePartitions# 0 2025-05-29T15:26:51.746548Z node 24 :PERSQUEUE DEBUG: partition_write.cpp:58: [PQ: 72075186224037889, Partition: 0, State: StateIdle] TPartition::ReplyWrite. Partition: 0 2025-05-29T15:26:51.746568Z node 24 :PERSQUEUE DEBUG: partition_write.cpp:324: [PQ: 72075186224037889, Partition: 0, State: StateIdle] Answering for message sourceid: '\00072075186224037893', Topic: 'Table/Stream/streamImpl', Partition: 0, SeqNo: 1, partNo: 0, Offset: 2 is stored on disk 2025-05-29T15:26:51.746628Z node 24 :PERSQUEUE DEBUG: pq_impl.cpp:382: Answer ok topic: 'streamImpl' partition: 0 messageNo: 1 requestId: cookie: 1 2025-05-29T15:26:51.746727Z node 24 :CHANGE_EXCHANGE DEBUG: change_sender_cdc_stream.cpp:160: [CdcChangeSenderPartition][72075186224037893:1][0][72075186224037889][24:1297:3021] Handle NKikimrClient.TResponse { SessionId: TxId: Success { Response: Status: 1 ErrorCode: OK PartitionResponse { CmdWriteResult { AlreadyWritten: false SourceId: "\00072075186224037893" SeqNo: 1 Offset: 2 WriteTimestampMS: 6505 PartitionQuotedTimeMs: 0 TotalTimeInPartitionQueueMs: 0 WriteTimeMs: 0 TopicQuotedTimeMs: 0 WrittenInTx: false } Cookie: 1 } } } 2025-05-29T15:26:51.746764Z node 24 :CHANGE_EXCHANGE DEBUG: change_sender_cdc_stream.cpp:643: [CdcChangeSenderMain][72075186224037893:1][24:1294:3021] Handle NKikimr::NChangeExchange::TEvChangeExchangePrivate::TEvReady { PartitionId: 0 } 2025-05-29T15:26:51.746815Z node 24 :TX_DATASHARD INFO: datashard_change_sending.cpp:310: TTxRemoveChangeRecords Execute: records# 1, at tablet# 72075186224037893 2025-05-29T15:26:51.746826Z node 24 :TX_DATASHARD DEBUG: datashard.cpp:1087: RemoveChangeRecord: order: 1, at tablet: 72075186224037893 2025-05-29T15:26:51.757277Z node 24 :TX_DATASHARD INFO: datashard_change_sending.cpp:335: TTxRemoveChangeRecords Complete: removed# 1, left# 0, at tablet# 72075186224037893 >>>>> GetRecords path=/Root/Table/Stream partitionId=0 2025-05-29T15:26:52.202384Z node 24 :PERSQUEUE DEBUG: pq_impl.cpp:347: Handle TEvRequest topic: 'streamImpl' requestId: 2025-05-29T15:26:52.202410Z node 24 :PERSQUEUE DEBUG: pq_impl.cpp:2796: [PQ: 72075186224037889] got client message batch for topic 'Table/Stream/streamImpl' partition 0 2025-05-29T15:26:52.202479Z node 24 :PERSQUEUE DEBUG: partition_read.cpp:736: [PQ: 72075186224037889, Partition: 0, State: StateIdle] read cookie 1 Topic 'Table/Stream/streamImpl' partition 0 user $without_consumer offset 0 count 10000 size 26214400 endOffset 3 max time lag 0ms effective offset 0 2025-05-29T15:26:52.202488Z node 24 :PERSQUEUE DEBUG: partition_read.cpp:936: [PQ: 72075186224037889, Partition: 0, State: StateIdle] read cookie 1 added 0 blobs, size 0 count 0 last offset 0, current partition end offset: 3 2025-05-29T15:26:52.202509Z node 24 :PERSQUEUE DEBUG: partition_read.cpp:953: [PQ: 72075186224037889, Partition: 0, State: StateIdle] Reading cookie 1. All data is from uncompacted head. 2025-05-29T15:26:52.202518Z node 24 :PERSQUEUE DEBUG: partition_read.cpp:420: FormAnswer for 0 blobs 2025-05-29T15:26:52.202569Z node 24 :PERSQUEUE DEBUG: pq_impl.cpp:382: Answer ok topic: 'streamImpl' partition: 0 messageNo: 0 requestId: cookie: 0 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/datashard/ut_change_exchange/unittest >> Cdc::AwsRegion [FAIL] Test command err: 2025-05-29T15:26:21.768040Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:102:2148], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-05-29T15:26:21.768089Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-05-29T15:26:21.768110Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/001254/r3tmp/tmpAzf4RI/pdisk_1.dat 2025-05-29T15:26:21.914640Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-05-29T15:26:21.932226Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:26:21.937306Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [1:32:2079] 1748532381381446 != 1748532381381450 2025-05-29T15:26:21.991422Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:26:21.991465Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:26:22.003231Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-29T15:26:22.100264Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-05-29T15:26:22.180112Z node 1 :TX_DATASHARD INFO: datashard.cpp:373: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:664:2569] 2025-05-29T15:26:22.181548Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:630: TxInitSchema.Execute 2025-05-29T15:26:22.192976Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:696: TxInitSchema.Complete 2025-05-29T15:26:22.193036Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:48: TDataShard::TTxInit::Execute 2025-05-29T15:26:22.193244Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1315: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-05-29T15:26:22.193256Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1371: LoadLockChangeRecords at tablet: 72075186224037888 2025-05-29T15:26:22.193265Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1420: LoadChangeRecordCommits at tablet: 72075186224037888 2025-05-29T15:26:22.193339Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:92: TDataShard::TTxInit::Complete 2025-05-29T15:26:22.193361Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:100: TDataShard::TTxInitRestored::Execute 2025-05-29T15:26:22.193377Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:111: DataShard 72075186224037888 persisting started state actor id [1:681:2569] in generation 1 2025-05-29T15:26:22.206226Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:120: TDataShard::TTxInitRestored::Complete 2025-05-29T15:26:22.211900Z node 1 :TX_DATASHARD INFO: datashard.cpp:417: Switched to work state WaitScheme tabletId 72075186224037888 2025-05-29T15:26:22.212020Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:457: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-05-29T15:26:22.212056Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1250: Change sender created: at tablet: 72075186224037888, actorId: [1:683:2579] 2025-05-29T15:26:22.212062Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1255: Trying to activate change sender: at tablet: 72075186224037888 2025-05-29T15:26:22.212067Z node 1 :TX_DATASHARD INFO: datashard.cpp:1272: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-05-29T15:26:22.212074Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-05-29T15:26:22.212256Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 72075186224037888 2025-05-29T15:26:22.212286Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-05-29T15:26:22.212304Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2025-05-29T15:26:22.212314Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-05-29T15:26:22.212324Z node 1 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 72075186224037888 TxInFly 0 2025-05-29T15:26:22.212329Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2025-05-29T15:26:22.212452Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3710: Server connected at leader tablet# 72075186224037888, clientId# [1:662:2567], serverId# [1:674:2574], sessionId# [0:0:0] 2025-05-29T15:26:22.212482Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037888 2025-05-29T15:26:22.212562Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:132: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-05-29T15:26:22.212583Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:220: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2025-05-29T15:26:22.212971Z node 1 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:129: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-05-29T15:26:22.223321Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 72075186224037888 2025-05-29T15:26:22.223392Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:467: 72075186224037888 not sending time cast registration request in state WaitScheme 2025-05-29T15:26:22.371342Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3710: Server connected at leader tablet# 72075186224037888, clientId# [1:697:2587], serverId# [1:699:2589], sessionId# [0:0:0] 2025-05-29T15:26:22.372493Z node 1 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:69: Planned transaction txId 281474976715657 at step 1000 at tablet 72075186224037888 { Transactions { TxId: 281474976715657 AckTo { RawX1: 0 RawX2: 0 } } Step: 1000 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2025-05-29T15:26:22.372533Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-05-29T15:26:22.381246Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2025-05-29T15:26:22.381290Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2025-05-29T15:26:22.381306Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:333: Found ready operation [1000:281474976715657] in PlanQueue unit at 72075186224037888 2025-05-29T15:26:22.381406Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:634: LoadTxDetails at 72075186224037888 loaded tx from db 1000:281474976715657 keys extracted: 0 2025-05-29T15:26:22.381467Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 1 immediate 0 planned 1 2025-05-29T15:26:22.381651Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2025-05-29T15:26:22.381668Z node 1 :TX_DATASHARD INFO: create_table_unit.cpp:69: Trying to CREATE TABLE at 72075186224037888 tableId# [OwnerId: 72057594046644480, LocalPathId: 2] schema version# 1 2025-05-29T15:26:22.382280Z node 1 :TX_DATASHARD INFO: datashard.cpp:475: Send registration request to time cast Ready tabletId 72075186224037888 mediators count is 1 coordinators count is 1 buckets per mediator 2 2025-05-29T15:26:22.382395Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-05-29T15:26:22.382888Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3742: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037888 time 0 2025-05-29T15:26:22.382904Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-05-29T15:26:22.383167Z node 1 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:98: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 1000} 2025-05-29T15:26:22.383185Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2025-05-29T15:26:22.383461Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2025-05-29T15:26:22.383470Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1255: Trying to activate change sender: at tablet: 72075186224037888 2025-05-29T15:26:22.383478Z node 1 :TX_DATASHARD INFO: datashard.cpp:1293: Change sender activated: at tablet: 72075186224037888 2025-05-29T15:26:22.383498Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:801: Complete [1000 : 281474976715657] from 72075186224037888 at tablet 72075186224037888 send result to client [1:411:2405], exec latency: 0 ms, propose latency: 0 ms 2025-05-29T15:26:22.383510Z node 1 :TX_DATASHARD INFO: datashard.cpp:1590: 72075186224037888 Sending notify to schemeshard 72057594046644480 txId 281474976715657 state Ready TxInFly 0 2025-05-29T15:26:22.383522Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-05-29T15:26:22.383862Z node 1 :CHANGE_EXCHANGE DEBUG: change_sender.cpp:153: [ChangeSender][72075186224037888:1][1:683:2579][Inactive] Handle NKikimrChangeExchange.TEvActivateSender 2025-05-29T15:26:22.384906Z node 1 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:129: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-05-29T15:26:22.385209Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:2953: Handle TEvSchemaChangedResult 281474976715657 datashard 72075186224037888 state Ready 2025-05-29T15:26:22.385221Z node 1 :TX_DATASHARD DEBUG: datashard__schema_changed.cpp:22: 72075186224037888 Got TEvSchemaChangedResult from SS at 72075186224037888 2025-05-29T15:26:22.385409Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3760: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037888 coordinator 72057594046316545 last step 0 next step 1000 2025-05-29T15:26:22.984084Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [2:310:2353], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-05-29T15:26:22.984152Z node 2 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-05-29T15:26:22.984181Z node 2 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; tes ... artitionKeySchema { Name: "__Hash" TypeId: 4 } PartitionKeySchema { Name: "id_shard" TypeId: 4608 } PartitionKeySchema { Name: "id_sort" TypeId: 4608 } Partitions { PartitionId: 0 Status: Active CreateVersion: 1 TabletId: 0 } MeteringMode: METERING_MODE_REQUEST_UNITS AllPartitions { PartitionId: 0 Status: Active CreateVersion: 1 TabletId: 0 } } BootstrapConfig { ExplicitMessageGroups { Id: "\00072075186224037888" } } SourceActor { RawX1: 411 RawX2: 90194315621 } Partitions { Partition { PartitionId: 0 } } 2025-05-29T15:26:51.069449Z node 21 :PERSQUEUE DEBUG: pq_impl.cpp:3635: [PQ: 72075186224037891] Send TEvKeyValue::TEvRequest (WRITE_TX_COOKIE) 2025-05-29T15:26:51.070537Z node 21 :PERSQUEUE DEBUG: pq_impl.cpp:1231: [PQ: 72075186224037891] Handle TEvKeyValue::TEvResponse (WRITE_TX_COOKIE) 2025-05-29T15:26:51.070556Z node 21 :PERSQUEUE DEBUG: pq_impl.cpp:4293: [PQ: 72075186224037891] Try execute txs with state EXECUTED 2025-05-29T15:26:51.070561Z node 21 :PERSQUEUE DEBUG: pq_impl.cpp:4338: [PQ: 72075186224037891] TxId 281474976715659, State EXECUTED 2025-05-29T15:26:51.070566Z node 21 :PERSQUEUE DEBUG: pq_impl.cpp:4285: [PQ: 72075186224037891] TxId 281474976715659 State EXECUTED FrontTxId 281474976715659 2025-05-29T15:26:51.070571Z node 21 :PERSQUEUE DEBUG: pq_impl.cpp:3989: [PQ: 72075186224037891] TPersQueue::SendEvReadSetAckToSenders 2025-05-29T15:26:51.070576Z node 21 :PERSQUEUE DEBUG: pq_impl.cpp:4228: [PQ: 72075186224037891] TxId 281474976715659, NewState WAIT_RS_ACKS 2025-05-29T15:26:51.070580Z node 21 :PERSQUEUE DEBUG: pq_impl.cpp:4263: [PQ: 72075186224037891] TxId 281474976715659 moved from EXECUTED to WAIT_RS_ACKS 2025-05-29T15:26:51.070586Z node 21 :PERSQUEUE DEBUG: transaction.cpp:366: [TxId: 281474976715659] PredicateAcks: 0/0 2025-05-29T15:26:51.070590Z node 21 :PERSQUEUE DEBUG: pq_impl.cpp:4539: [PQ: 72075186224037891] HaveAllRecipientsReceive 1, AllSupportivePartitionsHaveBeenDeleted 1 2025-05-29T15:26:51.070594Z node 21 :PERSQUEUE DEBUG: transaction.cpp:366: [TxId: 281474976715659] PredicateAcks: 0/0 2025-05-29T15:26:51.070599Z node 21 :PERSQUEUE DEBUG: pq_impl.cpp:4600: [PQ: 72075186224037891] add an TxId 281474976715659 to the list for deletion 2025-05-29T15:26:51.070605Z node 21 :PERSQUEUE DEBUG: pq_impl.cpp:4228: [PQ: 72075186224037891] TxId 281474976715659, NewState DELETING 2025-05-29T15:26:51.070611Z node 21 :PERSQUEUE DEBUG: pq_impl.cpp:3834: [PQ: 72075186224037891] delete key for TxId 281474976715659 2025-05-29T15:26:51.070621Z node 21 :PERSQUEUE DEBUG: pq_impl.cpp:3635: [PQ: 72075186224037891] Send TEvKeyValue::TEvRequest (WRITE_TX_COOKIE) 2025-05-29T15:26:51.070974Z node 21 :PERSQUEUE DEBUG: pq_impl.cpp:1231: [PQ: 72075186224037891] Handle TEvKeyValue::TEvResponse (WRITE_TX_COOKIE) 2025-05-29T15:26:51.070987Z node 21 :PERSQUEUE DEBUG: pq_impl.cpp:4293: [PQ: 72075186224037891] Try execute txs with state DELETING 2025-05-29T15:26:51.070992Z node 21 :PERSQUEUE DEBUG: pq_impl.cpp:4338: [PQ: 72075186224037891] TxId 281474976715659, State DELETING 2025-05-29T15:26:51.070997Z node 21 :PERSQUEUE DEBUG: pq_impl.cpp:4550: [PQ: 72075186224037891] delete TxId 281474976715659 2025-05-29T15:26:51.081529Z node 21 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:98: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 2000} 2025-05-29T15:26:51.081567Z node 21 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2025-05-29T15:26:51.081575Z node 21 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2025-05-29T15:26:51.081591Z node 21 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-05-29T15:26:51.081615Z node 21 :TX_DATASHARD DEBUG: datashard.cpp:801: Complete [2000 : 281474976715659] from 72075186224037888 at tablet 72075186224037888 send result to client [21:411:2405], exec latency: 0 ms, propose latency: 0 ms 2025-05-29T15:26:51.081630Z node 21 :TX_DATASHARD INFO: datashard.cpp:1590: 72075186224037888 Sending notify to schemeshard 72057594046644480 txId 281474976715659 state Ready TxInFly 0 2025-05-29T15:26:51.081649Z node 21 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-05-29T15:26:51.081688Z node 21 :CHANGE_EXCHANGE DEBUG: change_sender.cpp:104: [ChangeSender][72075186224037888:1][21:683:2579] Handle NKikimr::NDataShard::TEvChangeExchange::TEvAddSender { UserTableId: [72057594046644480:2:0] Type: CdcStream PathId: [OwnerId: 72057594046644480, LocalPathId: 5] } 2025-05-29T15:26:51.081697Z node 21 :CHANGE_EXCHANGE NOTICE: change_sender.cpp:122: [ChangeSender][72075186224037888:1][21:683:2579] Add sender: userTableId# [72057594046644480:2:0], type# CdcStream, pathId# [OwnerId: 72057594046644480, LocalPathId: 5] 2025-05-29T15:26:51.082632Z node 21 :TX_DATASHARD DEBUG: datashard.cpp:2953: Handle TEvSchemaChangedResult 281474976715659 datashard 72075186224037888 state Ready 2025-05-29T15:26:51.082651Z node 21 :TX_DATASHARD DEBUG: datashard__schema_changed.cpp:22: 72075186224037888 Got TEvSchemaChangedResult from SS at 72075186224037888 2025-05-29T15:26:51.082957Z node 21 :CHANGE_EXCHANGE DEBUG: change_sender_cdc_stream.cpp:468: [CdcChangeSenderMain][72075186224037888:1][21:1021:2803] Handle TEvTxProxySchemeCache::TEvNavigateKeySetResult: result# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root/Table/Stream2 TableId: [72057594046644480:5:0] RequestType: ByTableId Operation: OpList RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Ok Kind: KindCdcStream DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 500 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } Children [streamImpl] }] } 2025-05-29T15:26:51.083388Z node 21 :CHANGE_EXCHANGE DEBUG: change_sender_cdc_stream.cpp:538: [CdcChangeSenderMain][72075186224037888:1][21:1021:2803] Handle TEvTxProxySchemeCache::TEvNavigateKeySetResult: result# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root/Table/Stream2/streamImpl TableId: [72057594046644480:6:0] RequestType: ByTableId Operation: OpTopic RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Ok Kind: KindTopic DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 500 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } }] } 2025-05-29T15:26:51.084459Z node 21 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [21:1059:2817], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:26:51.084481Z node 21 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [21:1070:2822], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:26:51.084492Z node 21 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:26:51.085536Z node 21 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715660:3, at schemeshard: 72057594046644480 2025-05-29T15:26:51.087807Z node 21 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:129: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-05-29T15:26:51.087849Z node 21 :PERSQUEUE DEBUG: pq_impl.cpp:3091: [PQ: 72075186224037889] Registered with mediator time cast 2025-05-29T15:26:51.087865Z node 21 :PERSQUEUE DEBUG: pq_impl.cpp:3091: [PQ: 72075186224037891] Registered with mediator time cast 2025-05-29T15:26:51.228052Z node 21 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:129: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-05-29T15:26:51.228195Z node 21 :PERSQUEUE DEBUG: pq_impl.cpp:3091: [PQ: 72075186224037891] Registered with mediator time cast 2025-05-29T15:26:51.228277Z node 21 :PERSQUEUE DEBUG: pq_impl.cpp:3091: [PQ: 72075186224037889] Registered with mediator time cast 2025-05-29T15:26:51.228575Z node 21 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [21:1073:2825], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715660 completed, doublechecking } 2025-05-29T15:26:51.250119Z node 21 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [21:1129:2862] txid# 281474976715661, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 10], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-29T15:26:51.263108Z node 21 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [21:1139:2871], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:26:51.263632Z node 21 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=21&id=MTc0MTZmYTAtZDFlOWExNmUtNGZhYWFkZi1iYWJhYTZkZA==, ActorId: [21:1057:2815], ActorState: ExecuteState, TraceId: 01jweaeepc4peefeac3ecfqvqp, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: assertion failed at ydb/core/tx/datashard/ut_common/datashard_ut_common.cpp:2091, void NKikimr::ExecSQL(Tests::TServer::TPtr, TActorId, const TString &, bool, Ydb::StatusIds::StatusCode): (response.GetYdbStatus() == code) failed: (INTERNAL_ERROR != SUCCESS) Response { QueryIssues { message: "Execution" issue_code: 1060 issues { message: "yql/essentials/ast/yql_expr.h:1874: index out of range" issue_code: 1 } } TxMeta { } } YdbStatus: INTERNAL_ERROR ConsumedRu: 1 , with diff: (INT|SUCC)E(RNAL_ERROR|SS) TBackTrace::Capture()+28 (0x160E81AC) NUnitTest::NPrivate::RaiseError(char const*, TBasicString> const&, bool)+137 (0x1629BEB9) NKikimr::ExecSQL(TIntrusivePtr>, NActors::TActorId, TBasicString> const&, bool, Ydb::StatusIds_StatusCode)+1300 (0x289DCC14) NKikimr::NTestSuiteCdc::TTestCaseAwsRegion::Execute_(NUnitTest::TTestContext&)+3099 (0x15F5A11B) NKikimr::NTestSuiteCdc::TCurrentTest::Execute()::'lambda'()::operator()() const+71 (0x15F86457) NUnitTest::TTestBase::Run(std::__y1::function, TBasicString> const&, char const*, bool)+126 (0x1629DD6E) NKikimr::NTestSuiteCdc::TCurrentTest::Execute()+411 (0x15F85E1B) NUnitTest::TTestFactory::Execute()+803 (0x1629E4E3) NUnitTest::RunMain(int, char**)+3021 (0x162B008D) ??+0 (0x7F7A03AE9D90) __libc_start_main+128 (0x7F7A03AE9E40) _start+41 (0x14F5B029) ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/datashard/ut_change_exchange/unittest >> Cdc::InitialScanAndResolvedTimestamps [FAIL] Test command err: 2025-05-29T15:26:20.399208Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509889391095276973:2081];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:26:20.399452Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/001290/r3tmp/tmp26Bifj/pdisk_1.dat 2025-05-29T15:26:20.492464Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:26:20.501785Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected TServer::EnableGrpc on GrpcPort 8389, node 1 2025-05-29T15:26:20.506854Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:26:20.508066Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-29T15:26:20.522491Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:26:20.522511Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-05-29T15:26:20.522514Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-05-29T15:26:20.522556Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-05-29T15:26:20.543596Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-05-29T15:26:20.547869Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 1, at schemeshard: 72057594046644480 2025-05-29T15:26:20.549142Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-05-29T15:26:20.564447Z node 1 :TX_DATASHARD INFO: datashard.cpp:373: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:7509889391095277540:2308] 2025-05-29T15:26:20.564523Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:630: TxInitSchema.Execute 2025-05-29T15:26:20.567848Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:696: TxInitSchema.Complete 2025-05-29T15:26:20.567870Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:48: TDataShard::TTxInit::Execute 2025-05-29T15:26:20.568119Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1315: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-05-29T15:26:20.568124Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1371: LoadLockChangeRecords at tablet: 72075186224037888 2025-05-29T15:26:20.568130Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1420: LoadChangeRecordCommits at tablet: 72075186224037888 2025-05-29T15:26:20.568196Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:92: TDataShard::TTxInit::Complete 2025-05-29T15:26:20.568204Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:100: TDataShard::TTxInitRestored::Execute 2025-05-29T15:26:20.568211Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:111: DataShard 72075186224037888 persisting started state actor id [1:7509889391095277554:2308] in generation 1 2025-05-29T15:26:20.568777Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:120: TDataShard::TTxInitRestored::Complete 2025-05-29T15:26:20.573797Z node 1 :TX_DATASHARD INFO: datashard.cpp:417: Switched to work state WaitScheme tabletId 72075186224037888 2025-05-29T15:26:20.586772Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:457: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-05-29T15:26:20.586816Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1250: Change sender created: at tablet: 72075186224037888, actorId: [1:7509889391095277557:2309] 2025-05-29T15:26:20.586838Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1255: Trying to activate change sender: at tablet: 72075186224037888 2025-05-29T15:26:20.586842Z node 1 :TX_DATASHARD INFO: datashard.cpp:1272: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-05-29T15:26:20.586846Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-05-29T15:26:20.586920Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 72075186224037888 2025-05-29T15:26:20.586943Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-05-29T15:26:20.586947Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2025-05-29T15:26:20.586953Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-05-29T15:26:20.586962Z node 1 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 72075186224037888 TxInFly 0 2025-05-29T15:26:20.586966Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2025-05-29T15:26:20.618821Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3710: Server connected at leader tablet# 72075186224037888, clientId# [1:7509889391095277536:2295], serverId# [1:7509889391095277559:2306], sessionId# [0:0:0] 2025-05-29T15:26:20.618886Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037888 2025-05-29T15:26:20.618961Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:132: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-05-29T15:26:20.618998Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:220: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2025-05-29T15:26:20.619481Z node 1 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:129: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-05-29T15:26:20.623935Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 72075186224037888 2025-05-29T15:26:20.623970Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:467: 72075186224037888 not sending time cast registration request in state WaitScheme 2025-05-29T15:26:20.625288Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3710: Server connected at leader tablet# 72075186224037888, clientId# [1:7509889391095277572:2313], serverId# [1:7509889391095277573:2314], sessionId# [0:0:0] 2025-05-29T15:26:20.626172Z node 1 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:69: Planned transaction txId 281474976715657 at step 1748532380672 at tablet 72075186224037888 { Transactions { TxId: 281474976715657 AckTo { RawX1: 0 RawX2: 0 } } Step: 1748532380672 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2025-05-29T15:26:20.626178Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-05-29T15:26:20.626202Z node 1 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:129: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-05-29T15:26:20.626212Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2025-05-29T15:26:20.626219Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2025-05-29T15:26:20.626227Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:333: Found ready operation [1748532380672:281474976715657] in PlanQueue unit at 72075186224037888 2025-05-29T15:26:20.626296Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:634: LoadTxDetails at 72075186224037888 loaded tx from db 1748532380672:281474976715657 keys extracted: 0 2025-05-29T15:26:20.626334Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 1 immediate 0 planned 1 2025-05-29T15:26:20.626346Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2025-05-29T15:26:20.626355Z node 1 :TX_DATASHARD INFO: create_table_unit.cpp:69: Trying to CREATE TABLE at 72075186224037888 tableId# [OwnerId: 72057594046644480, LocalPathId: 2] schema version# 1 2025-05-29T15:26:20.626768Z node 1 :TX_DATASHARD INFO: datashard.cpp:475: Send registration request to time cast Ready tabletId 72075186224037888 mediators count is 1 coordinators count is 1 buckets per mediator 2 2025-05-29T15:26:20.626921Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-05-29T15:26:20.628631Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3742: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037888 time 1748532380671 2025-05-29T15:26:20.628642Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-05-29T15:26:20.628658Z node 1 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:98: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 1748532380672} 2025-05-29T15:26:20.628674Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2025-05-29T15:26:20.628688Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2025-05-29T15:26:20.628693Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1255: Trying to activate change sender: at tablet: 72075186224037888 2025-05-29T15:26:20.628697Z node 1 :TX_DATASHARD INFO: datashard.cpp:1293: Change sender activated: at tablet: 72075186224037888 2025-05-29T15:26:20.628716Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:801: Complete [1748532380672 : 281474976715657] from 72075186224037888 at tablet 72075186224037888 send result to client [1:7509889391095277334:2183], exec latency: 0 ms, propose latency: 2 ms 2025-05-29T15:26:20.628728Z node 1 :TX_DATASHARD INFO: datashard.cpp:1590: 72075186224037888 Sending notify to schemeshard 72057594046644480 txId 281474976715657 state Ready TxInFly 0 2025-05-29T15:26:20.628738Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-05-29T15:26:20.628757Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3760: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037888 coordinator 72057594046316545 last step 0 next step 1748532380672 2025-05-29T15:26:20.629065Z node 1 :CHANGE_EXCHANGE DEBUG: change_sender.cpp:153: [ChangeSender][72075186224037888:1][1:7509889391095277557:2309][Inactive] Handle NKikimrChangeExchange.TEvActivateSender 2025-05-29T15:26:20.638808Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:2953: Handle TEvSchemaChangedResult 281474976715657 datashard 72075186224037888 state Ready 2025-05-29T15:26:20.638835Z node 1 :TX_DATASHARD DEBUG: datashard__schema_changed.cpp:22: 72075186224037888 Got TEvSchemaChangedResult from SS at 72075186224037888 2025-05-29T15:26:20.638891Z node 1 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:129: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-05-29T15:26:20.638921Z node 1 :TX_DATASHARD DEBUG: datashard__p ... 72075186224037888, actorId: [24:682:2578] 2025-05-29T15:26:49.601276Z node 24 :TX_DATASHARD DEBUG: datashard.cpp:1255: Trying to activate change sender: at tablet: 72075186224037888 2025-05-29T15:26:49.601281Z node 24 :TX_DATASHARD INFO: datashard.cpp:1272: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-05-29T15:26:49.601287Z node 24 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-05-29T15:26:49.601433Z node 24 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 72075186224037888 2025-05-29T15:26:49.601462Z node 24 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-05-29T15:26:49.601577Z node 24 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2025-05-29T15:26:49.601589Z node 24 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-05-29T15:26:49.601600Z node 24 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 72075186224037888 TxInFly 0 2025-05-29T15:26:49.601607Z node 24 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2025-05-29T15:26:49.601625Z node 24 :TX_DATASHARD DEBUG: datashard.cpp:3710: Server connected at leader tablet# 72075186224037888, clientId# [24:661:2566], serverId# [24:671:2572], sessionId# [0:0:0] 2025-05-29T15:26:49.601666Z node 24 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037888 2025-05-29T15:26:49.601736Z node 24 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:132: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-05-29T15:26:49.601760Z node 24 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:220: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2025-05-29T15:26:49.602130Z node 24 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:129: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-05-29T15:26:49.615240Z node 24 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 72075186224037888 2025-05-29T15:26:49.615304Z node 24 :TX_DATASHARD DEBUG: datashard.cpp:467: 72075186224037888 not sending time cast registration request in state WaitScheme 2025-05-29T15:26:49.775376Z node 24 :TX_DATASHARD DEBUG: datashard.cpp:3710: Server connected at leader tablet# 72075186224037888, clientId# [24:698:2588], serverId# [24:699:2589], sessionId# [0:0:0] 2025-05-29T15:26:49.775823Z node 24 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:69: Planned transaction txId 281474976715657 at step 1000 at tablet 72075186224037888 { Transactions { TxId: 281474976715657 AckTo { RawX1: 0 RawX2: 0 } } Step: 1000 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2025-05-29T15:26:49.775839Z node 24 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-05-29T15:26:49.775913Z node 24 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2025-05-29T15:26:49.775921Z node 24 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2025-05-29T15:26:49.775930Z node 24 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:333: Found ready operation [1000:281474976715657] in PlanQueue unit at 72075186224037888 2025-05-29T15:26:49.775995Z node 24 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:634: LoadTxDetails at 72075186224037888 loaded tx from db 1000:281474976715657 keys extracted: 0 2025-05-29T15:26:49.776026Z node 24 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 1 immediate 0 planned 1 2025-05-29T15:26:49.776148Z node 24 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2025-05-29T15:26:49.776165Z node 24 :TX_DATASHARD INFO: create_table_unit.cpp:69: Trying to CREATE TABLE at 72075186224037888 tableId# [OwnerId: 72057594046644480, LocalPathId: 2] schema version# 1 2025-05-29T15:26:49.776272Z node 24 :TX_DATASHARD INFO: datashard.cpp:475: Send registration request to time cast Ready tabletId 72075186224037888 mediators count is 1 coordinators count is 1 buckets per mediator 2 2025-05-29T15:26:49.776365Z node 24 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-05-29T15:26:49.776597Z node 24 :TX_DATASHARD DEBUG: datashard.cpp:3742: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037888 time 0 2025-05-29T15:26:49.776604Z node 24 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-05-29T15:26:49.776701Z node 24 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:98: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 1000} 2025-05-29T15:26:49.776711Z node 24 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2025-05-29T15:26:49.776935Z node 24 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:129: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-05-29T15:26:49.776953Z node 24 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2025-05-29T15:26:49.776958Z node 24 :TX_DATASHARD DEBUG: datashard.cpp:1255: Trying to activate change sender: at tablet: 72075186224037888 2025-05-29T15:26:49.776962Z node 24 :TX_DATASHARD INFO: datashard.cpp:1293: Change sender activated: at tablet: 72075186224037888 2025-05-29T15:26:49.776975Z node 24 :TX_DATASHARD DEBUG: datashard.cpp:801: Complete [1000 : 281474976715657] from 72075186224037888 at tablet 72075186224037888 send result to client [24:410:2404], exec latency: 0 ms, propose latency: 0 ms 2025-05-29T15:26:49.776982Z node 24 :TX_DATASHARD INFO: datashard.cpp:1590: 72075186224037888 Sending notify to schemeshard 72057594046644480 txId 281474976715657 state Ready TxInFly 0 2025-05-29T15:26:49.776991Z node 24 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-05-29T15:26:49.777013Z node 24 :CHANGE_EXCHANGE DEBUG: change_sender.cpp:153: [ChangeSender][72075186224037888:1][24:682:2578][Inactive] Handle NKikimrChangeExchange.TEvActivateSender 2025-05-29T15:26:49.777456Z node 24 :TX_DATASHARD DEBUG: datashard.cpp:2953: Handle TEvSchemaChangedResult 281474976715657 datashard 72075186224037888 state Ready 2025-05-29T15:26:49.777470Z node 24 :TX_DATASHARD DEBUG: datashard__schema_changed.cpp:22: 72075186224037888 Got TEvSchemaChangedResult from SS at 72075186224037888 2025-05-29T15:26:49.777494Z node 24 :TX_DATASHARD DEBUG: datashard.cpp:3760: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037888 coordinator 72057594046316545 last step 0 next step 1000 2025-05-29T15:26:49.780563Z node 24 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [24:731:2613], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:26:49.780583Z node 24 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [24:741:2618], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:26:49.780593Z node 24 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:26:49.781520Z node 24 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2025-05-29T15:26:49.782569Z node 24 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:129: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-05-29T15:26:49.932828Z node 24 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:129: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-05-29T15:26:49.933272Z node 24 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [24:745:2621], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-05-29T15:26:49.965235Z node 24 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [24:815:2660] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-29T15:26:49.974002Z node 24 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [24:825:2669], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:26:49.974470Z node 24 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=24&id=MWI5Y2YyMzItYzQxNDBhMGYtZTE4NThiMS1kYjkwMjE3Mg==, ActorId: [24:729:2611], ActorState: ExecuteState, TraceId: 01jweaeddmb1sy6dt1bxcstjt7, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: assertion failed at ydb/core/tx/datashard/ut_common/datashard_ut_common.cpp:2091, void NKikimr::ExecSQL(Tests::TServer::TPtr, TActorId, const TString &, bool, Ydb::StatusIds::StatusCode): (response.GetYdbStatus() == code) failed: (INTERNAL_ERROR != SUCCESS) Response { QueryIssues { message: "Execution" issue_code: 1060 issues { message: "yql/essentials/ast/yql_expr.h:1874: index out of range" issue_code: 1 } } TxMeta { } } YdbStatus: INTERNAL_ERROR ConsumedRu: 1 , with diff: (INT|SUCC)E(RNAL_ERROR|SS) TBackTrace::Capture()+28 (0x160E81AC) NUnitTest::NPrivate::RaiseError(char const*, TBasicString> const&, bool)+137 (0x1629BEB9) NKikimr::ExecSQL(TIntrusivePtr>, NActors::TActorId, TBasicString> const&, bool, Ydb::StatusIds_StatusCode)+1300 (0x289DCC14) NKikimr::NTestSuiteCdc::TTestCaseInitialScanAndResolvedTimestamps::Execute_(NUnitTest::TTestContext&)+1376 (0x15F60140) NKikimr::NTestSuiteCdc::TCurrentTest::Execute()::'lambda'()::operator()() const+71 (0x15F86457) NUnitTest::TTestBase::Run(std::__y1::function, TBasicString> const&, char const*, bool)+126 (0x1629DD6E) NKikimr::NTestSuiteCdc::TCurrentTest::Execute()+411 (0x15F85E1B) NUnitTest::TTestFactory::Execute()+803 (0x1629E4E3) NUnitTest::RunMain(int, char**)+3021 (0x162B008D) ??+0 (0x7FB6DC411D90) __libc_start_main+128 (0x7FB6DC411E40) _start+41 (0x14F5B029) >> TTxDataShardMiniKQL::TableStats [GOOD] >> TTxDataShardMiniKQL::TableStatsHistograms ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/datashard/ut_change_exchange/unittest >> Cdc::ResolvedTimestampForDisplacedUpsert [FAIL] Test command err: 2025-05-29T15:26:20.981891Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509889392061575628:2080];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:26:20.982134Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/001244/r3tmp/tmpiz9BSK/pdisk_1.dat 2025-05-29T15:26:21.132336Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:26:21.132370Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:26:21.133319Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 12346, node 1 2025-05-29T15:26:21.152786Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:26:21.160543Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:26:21.160557Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-05-29T15:26:21.160561Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-05-29T15:26:21.160610Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-05-29T15:26:21.173938Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-05-29T15:26:21.201862Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-05-29T15:26:21.211660Z node 1 :TX_DATASHARD INFO: datashard.cpp:373: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:7509889396356543498:2308] 2025-05-29T15:26:21.211741Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:630: TxInitSchema.Execute 2025-05-29T15:26:21.218855Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:696: TxInitSchema.Complete 2025-05-29T15:26:21.218885Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:48: TDataShard::TTxInit::Execute 2025-05-29T15:26:21.219094Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1315: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-05-29T15:26:21.219101Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1371: LoadLockChangeRecords at tablet: 72075186224037888 2025-05-29T15:26:21.219109Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1420: LoadChangeRecordCommits at tablet: 72075186224037888 2025-05-29T15:26:21.219186Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:92: TDataShard::TTxInit::Complete 2025-05-29T15:26:21.219198Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:100: TDataShard::TTxInitRestored::Execute 2025-05-29T15:26:21.219207Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:111: DataShard 72075186224037888 persisting started state actor id [1:7509889396356543512:2308] in generation 1 2025-05-29T15:26:21.223315Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:120: TDataShard::TTxInitRestored::Complete 2025-05-29T15:26:21.229724Z node 1 :TX_DATASHARD INFO: datashard.cpp:417: Switched to work state WaitScheme tabletId 72075186224037888 2025-05-29T15:26:21.229804Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:457: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-05-29T15:26:21.229820Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1250: Change sender created: at tablet: 72075186224037888, actorId: [1:7509889396356543514:2309] 2025-05-29T15:26:21.229823Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1255: Trying to activate change sender: at tablet: 72075186224037888 2025-05-29T15:26:21.229825Z node 1 :TX_DATASHARD INFO: datashard.cpp:1272: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-05-29T15:26:21.229829Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-05-29T15:26:21.229882Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 72075186224037888 2025-05-29T15:26:21.229905Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-05-29T15:26:21.229909Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2025-05-29T15:26:21.229914Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-05-29T15:26:21.229921Z node 1 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 72075186224037888 TxInFly 0 2025-05-29T15:26:21.229924Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2025-05-29T15:26:21.259372Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3710: Server connected at leader tablet# 72075186224037888, clientId# [1:7509889396356543495:2296], serverId# [1:7509889396356543517:2306], sessionId# [0:0:0] 2025-05-29T15:26:21.259421Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037888 2025-05-29T15:26:21.259487Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:132: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-05-29T15:26:21.259521Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:220: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2025-05-29T15:26:21.260223Z node 1 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:129: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-05-29T15:26:21.260832Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 72075186224037888 2025-05-29T15:26:21.260861Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:467: 72075186224037888 not sending time cast registration request in state WaitScheme 2025-05-29T15:26:21.262345Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3710: Server connected at leader tablet# 72075186224037888, clientId# [1:7509889396356543530:2313], serverId# [1:7509889396356543532:2315], sessionId# [0:0:0] 2025-05-29T15:26:21.263182Z node 1 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:69: Planned transaction txId 281474976715657 at step 1748532381309 at tablet 72075186224037888 { Transactions { TxId: 281474976715657 AckTo { RawX1: 0 RawX2: 0 } } Step: 1748532381309 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2025-05-29T15:26:21.263190Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-05-29T15:26:21.263218Z node 1 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:129: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-05-29T15:26:21.263227Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2025-05-29T15:26:21.263232Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2025-05-29T15:26:21.263239Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:333: Found ready operation [1748532381309:281474976715657] in PlanQueue unit at 72075186224037888 2025-05-29T15:26:21.263295Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:634: LoadTxDetails at 72075186224037888 loaded tx from db 1748532381309:281474976715657 keys extracted: 0 2025-05-29T15:26:21.263318Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 1 immediate 0 planned 1 2025-05-29T15:26:21.263328Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2025-05-29T15:26:21.263336Z node 1 :TX_DATASHARD INFO: create_table_unit.cpp:69: Trying to CREATE TABLE at 72075186224037888 tableId# [OwnerId: 72057594046644480, LocalPathId: 2] schema version# 1 2025-05-29T15:26:21.263628Z node 1 :TX_DATASHARD INFO: datashard.cpp:475: Send registration request to time cast Ready tabletId 72075186224037888 mediators count is 1 coordinators count is 1 buckets per mediator 2 2025-05-29T15:26:21.263702Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-05-29T15:26:21.268558Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3742: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037888 time 1748532381308 2025-05-29T15:26:21.268573Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-05-29T15:26:21.268588Z node 1 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:98: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 1748532381309} 2025-05-29T15:26:21.268605Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2025-05-29T15:26:21.268618Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2025-05-29T15:26:21.268623Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1255: Trying to activate change sender: at tablet: 72075186224037888 2025-05-29T15:26:21.268627Z node 1 :TX_DATASHARD INFO: datashard.cpp:1293: Change sender activated: at tablet: 72075186224037888 2025-05-29T15:26:21.268647Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:801: Complete [1748532381309 : 281474976715657] from 72075186224037888 at tablet 72075186224037888 send result to client [1:7509889396356543306:2190], exec latency: 0 ms, propose latency: 5 ms 2025-05-29T15:26:21.268662Z node 1 :TX_DATASHARD INFO: datashard.cpp:1590: 72075186224037888 Sending notify to schemeshard 72057594046644480 txId 281474976715657 state Ready TxInFly 0 2025-05-29T15:26:21.268669Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-05-29T15:26:21.268683Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3760: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037888 coordinator 72057594046316545 last step 0 next step 1748532381316 2025-05-29T15:26:21.269002Z node 1 :CHANGE_EXCHANGE DEBUG: change_sender.cpp:153: [ChangeSender][72075186224037888:1][1:7509889396356543514:2309][Inactive] Handle NKikimrChangeExchange.TEvActivateSender 2025-05-29T15:26:21.276757Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:2953: Handle TEvSchemaChangedResult 281474976715657 datashard 72075186224037888 state Ready 2025-05-29T15:26:21.276784Z node 1 :TX_DATASHARD DEBUG: datashard__schema_changed.cpp:22: 72075186224037888 Got TEvSchemaChangedResult from SS at 72075186224037888 2025-05-29T15:26:21.276837Z node 1 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:129: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-05-29T15:26:21.276867Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037888 2025-05-29T15:26:21.276919Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:132: Propose s ... eChangeRecords: at tablet: 72075186224037888, records: { Order: 3 PathId: [OwnerId: 72057594046644480, LocalPathId: 3] BodySize: 0 TableId: [OwnerId: 72057594046644480, LocalPathId: 2] SchemaVersion: 0 } 2025-05-29T15:26:52.913275Z node 24 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-05-29T15:26:52.913286Z node 24 :TX_DATASHARD DEBUG: datashard.cpp:3812: Waiting for PlanStep# 12000 from mediator time cast 2025-05-29T15:26:52.913318Z node 24 :CHANGE_EXCHANGE DEBUG: change_sender.cpp:71: [ChangeSender][72075186224037888:1][24:682:2578] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvEnqueueRecords { Records [{ Order: 3 PathId: [OwnerId: 72057594046644480, LocalPathId: 3] BodySize: 0 }] } 2025-05-29T15:26:52.913335Z node 24 :CHANGE_EXCHANGE DEBUG: change_sender_cdc_stream.cpp:628: [CdcChangeSenderMain][72075186224037888:1][24:842:2685] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvEnqueueRecords { Records [{ Order: 3 PathId: [OwnerId: 72057594046644480, LocalPathId: 3] BodySize: 0 }] } 2025-05-29T15:26:52.913387Z node 24 :TX_DATASHARD INFO: datashard_change_sending.cpp:215: TTxRequestChangeRecords Execute: at tablet# 72075186224037888 2025-05-29T15:26:52.913444Z node 24 :TX_DATASHARD DEBUG: datashard_change_sending.cpp:235: Send 1 change records: to# [24:842:2685], at tablet# 72075186224037888 2025-05-29T15:26:52.913453Z node 24 :TX_DATASHARD INFO: datashard_change_sending.cpp:260: TTxRequestChangeRecords Complete: sent# 1, forgotten# 0, left# 0, at tablet# 72075186224037888 2025-05-29T15:26:52.913472Z node 24 :CHANGE_EXCHANGE DEBUG: change_sender_cdc_stream.cpp:633: [CdcChangeSenderMain][72075186224037888:1][24:842:2685] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRecords { Records [{ Order: 3 Group: 0 Step: 9000 TxId: 0 PathId: [OwnerId: 72057594046644480, LocalPathId: 3] Kind: CdcHeartbeat Source: Unspecified Body: 0b TableId: [OwnerId: 72057594046644480, LocalPathId: 2] SchemaVersion: 0 LockId: 0 LockOffset: 0 }] } 2025-05-29T15:26:52.913511Z node 24 :CHANGE_EXCHANGE DEBUG: change_sender_cdc_stream.cpp:111: [CdcChangeSenderPartition][72075186224037888:1][0][72075186224037889][24:928:2685] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRecords { Records [{ Order: 3 Group: 0 Step: 9000 TxId: 0 PathId: [OwnerId: 72057594046644480, LocalPathId: 3] Kind: CdcHeartbeat Source: Unspecified Body: 0b TableId: [OwnerId: 72057594046644480, LocalPathId: 2] SchemaVersion: 0 LockId: 0 LockOffset: 0 }] } 2025-05-29T15:26:52.913583Z node 24 :PERSQUEUE DEBUG: pq_impl.cpp:347: Handle TEvRequest topic: 'streamImpl' requestId: 2025-05-29T15:26:52.913593Z node 24 :PERSQUEUE DEBUG: pq_impl.cpp:2796: [PQ: 72075186224037889] got client message batch for topic 'Table/Stream/streamImpl' partition 0 2025-05-29T15:26:52.913633Z node 24 :PERSQUEUE DEBUG: pq_impl.cpp:382: Answer ok topic: 'streamImpl' partition: 0 messageNo: 4 requestId: cookie: 3 2025-05-29T15:26:52.913669Z node 24 :PERSQUEUE DEBUG: pq_impl.cpp:347: Handle TEvRequest topic: 'streamImpl' requestId: 2025-05-29T15:26:52.913674Z node 24 :PERSQUEUE DEBUG: pq_impl.cpp:2796: [PQ: 72075186224037889] got client message batch for topic 'Table/Stream/streamImpl' partition 0 2025-05-29T15:26:52.913687Z node 24 :PERSQUEUE DEBUG: pq_impl.cpp:2198: [PQ: 72075186224037889] got client message topic: Table/Stream/streamImpl partition: 0 SourceId: '\00072075186224037888' SeqNo: 3 partNo : 0 messageNo: 5 size 26 offset: -1 2025-05-29T15:26:52.913738Z node 24 :PERSQUEUE DEBUG: partition_write.cpp:1162: [PQ: 72075186224037889, Partition: 0, State: StateIdle] Topic 'Table/Stream/streamImpl' partition 0 process heartbeat sourceId '\00072075186224037888' version v9000/0 2025-05-29T15:26:52.913754Z node 24 :PERSQUEUE INFO: partition_write.cpp:1658: [PQ: 72075186224037889, Partition: 0, State: StateIdle] Topic 'Table/Stream/streamImpl' partition 0 emit heartbeat v9000/0 2025-05-29T15:26:52.913775Z node 24 :PERSQUEUE DEBUG: partition_write.cpp:1233: [PQ: 72075186224037889, Partition: 0, State: StateIdle] Topic 'Table/Stream/streamImpl' partition 0 part blob processing sourceId '\00072075186224037889' seqNo 0 partNo 0 2025-05-29T15:26:52.913845Z node 24 :PERSQUEUE DEBUG: partition_write.cpp:1333: [PQ: 72075186224037889, Partition: 0, State: StateIdle] Topic 'Table/Stream/streamImpl' partition 0 part blob complete sourceId '\00072075186224037889' seqNo 0 partNo 0 FormedBlobsCount 0 NewHead: Offset 2 PartNo 0 PackedSize 107 count 1 nextOffset 3 batches 1 2025-05-29T15:26:52.913894Z node 24 :PERSQUEUE DEBUG: partition_write.cpp:1623: [PQ: 72075186224037889, Partition: 0, State: StateIdle] Add new write blob: topic 'Table/Stream/streamImpl' partition 0 compactOffset 2,1 HeadOffset 0 endOffset 2 curOffset 3 d0000000000_00000000000000000002_00000_0000000001_00000| size 93 WTime 8950 2025-05-29T15:26:52.913917Z node 24 :PERSQUEUE DEBUG: partition.cpp:2185: [PQ: 72075186224037889, Partition: 0, State: StateIdle] === DumpKeyValueRequest === 2025-05-29T15:26:52.913922Z node 24 :PERSQUEUE DEBUG: partition.cpp:2186: [PQ: 72075186224037889, Partition: 0, State: StateIdle] --- delete ---------------- 2025-05-29T15:26:52.913927Z node 24 :PERSQUEUE DEBUG: partition.cpp:2192: [PQ: 72075186224037889, Partition: 0, State: StateIdle] [x0000000000, x0000000001) 2025-05-29T15:26:52.913932Z node 24 :PERSQUEUE DEBUG: partition.cpp:2194: [PQ: 72075186224037889, Partition: 0, State: StateIdle] --- write ----------------- 2025-05-29T15:26:52.913937Z node 24 :PERSQUEUE DEBUG: partition.cpp:2197: [PQ: 72075186224037889, Partition: 0, State: StateIdle] m0000000000p72075186224037889 2025-05-29T15:26:52.913941Z node 24 :PERSQUEUE DEBUG: partition.cpp:2197: [PQ: 72075186224037889, Partition: 0, State: StateIdle] d0000000000_00000000000000000002_00000_0000000001_00000| 2025-05-29T15:26:52.913944Z node 24 :PERSQUEUE DEBUG: partition.cpp:2197: [PQ: 72075186224037889, Partition: 0, State: StateIdle] i0000000000 2025-05-29T15:26:52.913948Z node 24 :PERSQUEUE DEBUG: partition.cpp:2199: [PQ: 72075186224037889, Partition: 0, State: StateIdle] --- rename ---------------- 2025-05-29T15:26:52.913953Z node 24 :PERSQUEUE DEBUG: partition.cpp:2204: [PQ: 72075186224037889, Partition: 0, State: StateIdle] =========================== 2025-05-29T15:26:52.913972Z node 24 :PERSQUEUE DEBUG: read.h:262: CacheProxy. Passthrough write request to KV 2025-05-29T15:26:52.913983Z node 24 :PERSQUEUE DEBUG: read.h:300: CacheProxy. Passthrough blob. Partition 0 offset 2 partNo 0 count 1 size 93 2025-05-29T15:26:52.914228Z node 24 :PERSQUEUE DEBUG: cache_eviction.h:315: Caching head blob in L1. Partition 0 offset 2 count 1 size 93 actorID [24:790:2655] 2025-05-29T15:26:52.914264Z node 24 :PERSQUEUE DEBUG: pq_l2_cache.cpp:120: PQ Cache (L2). Adding blob. Tablet '72075186224037889' partition 0 offset 2 partno 0 count 1 parts 0 size 93 2025-05-29T15:26:52.914282Z node 24 :PERSQUEUE DEBUG: pq_impl.cpp:1372: [PQ: 72075186224037889] Topic 'Table/Stream/streamImpl' counters. CacheSize 279 CachedBlobs 3 2025-05-29T15:26:52.924832Z node 24 :PERSQUEUE DEBUG: partition_write.cpp:524: [PQ: 72075186224037889, Partition: 0, State: StateIdle] TPartition::HandleWriteResponse writeNewSize# 44 WriteNewSizeFromSupportivePartitions# 0 2025-05-29T15:26:52.924878Z node 24 :PERSQUEUE DEBUG: partition_write.cpp:58: [PQ: 72075186224037889, Partition: 0, State: StateIdle] TPartition::ReplyWrite. Partition: 0 2025-05-29T15:26:52.924900Z node 24 :PERSQUEUE DEBUG: partition_write.cpp:324: [PQ: 72075186224037889, Partition: 0, State: StateIdle] Answering for message sourceid: '\00072075186224037888', Topic: 'Table/Stream/streamImpl', Partition: 0, SeqNo: 3, partNo: 0, Offset: 2 is stored on disk 2025-05-29T15:26:52.924968Z node 24 :PERSQUEUE DEBUG: pq_impl.cpp:382: Answer ok topic: 'streamImpl' partition: 0 messageNo: 5 requestId: cookie: 3 2025-05-29T15:26:52.925067Z node 24 :CHANGE_EXCHANGE DEBUG: change_sender_cdc_stream.cpp:160: [CdcChangeSenderPartition][72075186224037888:1][0][72075186224037889][24:928:2685] Handle NKikimrClient.TResponse { SessionId: TxId: Success { Response: Status: 1 ErrorCode: OK PartitionResponse { CmdWriteResult { AlreadyWritten: false SourceId: "\00072075186224037888" SeqNo: 3 Offset: 2 WriteTimestampMS: 8950 PartitionQuotedTimeMs: 0 TotalTimeInPartitionQueueMs: 0 WriteTimeMs: 0 TopicQuotedTimeMs: 0 WrittenInTx: false } Cookie: 3 } } } 2025-05-29T15:26:52.925090Z node 24 :CHANGE_EXCHANGE DEBUG: change_sender_cdc_stream.cpp:643: [CdcChangeSenderMain][72075186224037888:1][24:842:2685] Handle NKikimr::NChangeExchange::TEvChangeExchangePrivate::TEvReady { PartitionId: 0 } 2025-05-29T15:26:52.925142Z node 24 :TX_DATASHARD INFO: datashard_change_sending.cpp:310: TTxRemoveChangeRecords Execute: records# 1, at tablet# 72075186224037888 2025-05-29T15:26:52.925151Z node 24 :TX_DATASHARD DEBUG: datashard.cpp:1087: RemoveChangeRecord: order: 3, at tablet: 72075186224037888 2025-05-29T15:26:52.925342Z node 24 :TX_DATASHARD INFO: datashard_change_sending.cpp:335: TTxRemoveChangeRecords Complete: removed# 1, left# 0, at tablet# 72075186224037888 >>>>> GetRecords path=/Root/Table/Stream partitionId=0 2025-05-29T15:26:53.059991Z node 24 :PERSQUEUE DEBUG: pq_impl.cpp:347: Handle TEvRequest topic: 'streamImpl' requestId: 2025-05-29T15:26:53.060021Z node 24 :PERSQUEUE DEBUG: pq_impl.cpp:2796: [PQ: 72075186224037889] got client message batch for topic 'Table/Stream/streamImpl' partition 0 2025-05-29T15:26:53.060081Z node 24 :PERSQUEUE DEBUG: partition_read.cpp:736: [PQ: 72075186224037889, Partition: 0, State: StateIdle] read cookie 10 Topic 'Table/Stream/streamImpl' partition 0 user $without_consumer offset 0 count 10000 size 26214400 endOffset 3 max time lag 0ms effective offset 0 2025-05-29T15:26:53.060091Z node 24 :PERSQUEUE DEBUG: partition_read.cpp:936: [PQ: 72075186224037889, Partition: 0, State: StateIdle] read cookie 10 added 0 blobs, size 0 count 0 last offset 0, current partition end offset: 3 2025-05-29T15:26:53.060121Z node 24 :PERSQUEUE DEBUG: partition_read.cpp:953: [PQ: 72075186224037889, Partition: 0, State: StateIdle] Reading cookie 10. All data is from uncompacted head. 2025-05-29T15:26:53.060130Z node 24 :PERSQUEUE DEBUG: partition_read.cpp:420: FormAnswer for 0 blobs 2025-05-29T15:26:53.060182Z node 24 :PERSQUEUE DEBUG: pq_impl.cpp:382: Answer ok topic: 'streamImpl' partition: 0 messageNo: 0 requestId: cookie: 0 assertion failed at ydb/core/tx/datashard/datashard_ut_change_exchange.cpp:1078, void NKikimr::NTestSuiteCdc::AssertJsonsEqual(const TString &, const TString &): (AreJsonsEqual(actual, expected)) Jsons are different: {"resolved":[6000,0]} != {"update":{"value":10},"key":[1],"ts":"***"} TBackTrace::Capture()+28 (0x160E81AC) NUnitTest::NPrivate::RaiseError(char const*, TBasicString> const&, bool)+137 (0x1629BEB9) ??+0 (0x15F34242) NKikimr::NTestSuiteCdc::WaitForContent(TIntrusivePtr>, NActors::TActorId const&, TBasicString> const&, TVector>, std::__y1::allocator>>> const&)+725 (0x15F33455) NKikimr::NTestSuiteCdc::TTestCaseResolvedTimestampForDisplacedUpsert::Execute_(NUnitTest::TTestContext&)+4054 (0x15F746E6) NKikimr::NTestSuiteCdc::TCurrentTest::Execute()::'lambda'()::operator()() const+71 (0x15F86457) NUnitTest::TTestBase::Run(std::__y1::function, TBasicString> const&, char const*, bool)+126 (0x1629DD6E) NKikimr::NTestSuiteCdc::TCurrentTest::Execute()+411 (0x15F85E1B) NUnitTest::TTestFactory::Execute()+803 (0x1629E4E3) NUnitTest::RunMain(int, char**)+3021 (0x162B008D) ??+0 (0x7FDC972E4D90) __libc_start_main+128 (0x7FDC972E4E40) _start+41 (0x14F5B029) >> IncrementalBackup::BackupRestore >> IncrementalBackup::SimpleRestore |68.4%| [TA] $(B)/ydb/core/tx/datashard/ut_change_exchange/test-results/unittest/{meta.json ... results_accumulator.log} |68.4%| [TA] $(B)/ydb/core/tx/schemeshard/ut_olap_reboots/test-results/unittest/{meta.json ... results_accumulator.log} >> IncrementalBackup::SimpleRestoreBackupCollection+WithIncremental >> TExportToS3Tests::ShouldExcludeBackupTableFromStats [GOOD] >> TExportToS3Tests::ShouldRestartOnScanErrors >> IncrementalBackup::SimpleBackup >> YdbTableSplit::SplitByLoadWithReads [GOOD] |68.4%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/control/ut/ydb-core-control-ut |68.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/control/ut/ydb-core-control-ut >> TExportToS3Tests::ShouldRestartOnScanErrors [GOOD] >> TExportToS3Tests::AuditCompletedExport [GOOD] >> TExportToS3Tests::AuditCancelledExport >> IncrementalBackup::SimpleRestore [FAIL] >> IncrementalBackup::SimpleBackupBackupCollection+WithIncremental >> IncrementalBackup::BackupRestore [FAIL] >> IncrementalBackup::ComplexRestoreBackupCollection+WithIncremental |68.4%| [TA] {RESULT} $(B)/ydb/core/persqueue/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> IncrementalBackup::SimpleRestoreBackupCollection+WithIncremental [FAIL] >> IncrementalBackup::SimpleRestoreBackupCollection-WithIncremental |68.4%| [TA] {RESULT} $(B)/ydb/core/tx/datashard/ut_change_exchange/test-results/unittest/{meta.json ... results_accumulator.log} >> TTablesWithReboots::AlterTableSchemaFreezeUnfreezeWithReboots [GOOD] >> IncrementalBackup::SimpleBackup [FAIL] >> IncrementalBackup::MultiRestore >> TExportToS3Tests::AuditCancelledExport [GOOD] >> TExportToS3Tests::AutoDropping ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/ydb/table_split_ut/unittest >> YdbTableSplit::SplitByLoadWithReads [GOOD] Test command err: 2025-05-29T15:25:47.774471Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509889246418827880:2212];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:25:47.774536Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/0026d2/r3tmp/tmpPVSLo4/pdisk_1.dat 2025-05-29T15:25:47.887585Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:25:47.887612Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:25:47.892932Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:25:47.898345Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 31229, node 1 2025-05-29T15:25:47.933211Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:25:47.933222Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-05-29T15:25:47.933224Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-05-29T15:25:47.933269Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:32108 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-29T15:25:48.011929Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:25:48.023556Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 TClient is connected to server localhost:32108 2025-05-29T15:25:48.359087Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509889250713796008:2334], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:25:48.359124Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:25:48.404940Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-05-29T15:25:48.502397Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509889250713796174:2347], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:25:48.502427Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:25:48.508414Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 TClient::Ls request: /Root/Foo TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Foo" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976715658 CreateStep: 1748532348521 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 4 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 4 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 2 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "Foo" Columns { Name: "NameHash" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "Name" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } Columns { Name: "Versio... (TRUNCATED) Table has 1 shards TClient::Ls request: /Root/Foo TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Foo" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976715658 CreateStep: 1748532348521 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 4 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 4 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 2 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "Foo" Columns { Name: "NameHash" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "Name" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } Columns { Name: "Versio... (TRUNCATED) 2025-05-29T15:25:48.530915Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509889250713796261:2368], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:25:48.530992Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:25:48.532809Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509889250713796285:2385], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:25:48.532822Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509889250713796283:2383], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:25:48.532828Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509889250713796284:2384], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:25:48.532834Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509889250713796294:2394], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:25:48.532838Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509889250713796295:2395], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:25:48.532844Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509889250713796282:2382], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:25:48.532868Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:25:48.534212Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_mkdir.cpp:115: TMkDir Propose, path: /Root/.metadata, operationId: 281474976715661:0, at schemeshard: 72057594046644480 2025-05-29T15:25:48.534272Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:130: IgniteOperation, opId: 281474976715661:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-05-29T15:25:48.534281Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_mkdir.cpp:115: TMkDir Propose, path: /Root/.metadata/workload_manager, operationId: 281474976715661:1, at schemeshard: 72057594046644480 2025-05-29T15:25:48.534296Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:130: IgniteOperation, opId: 281474976715661:2, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-05-29T15:25:48.534304Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_mkdir.cpp:115: TMkDir Propose, path: /Root/.metadata/workload_manager/pools, operationId: 281474976715661:2, at schemeshard: 72057594046644480 2025-05-29T15:25:48.534314Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:130: IgniteOperation, opId: 281474976715661:3, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-05-29T15:25:48.534327Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_create_resource_pool.cpp:148: [72057594046644480] TCreateResourcePool Propose: opId# 281474976715661:3, path# /Root/.metadata/workload_manager/pools/default 2025-05-29T15:25:48.534378Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 281474976715661:3 1 -> 128 2025-05-29T15:25:48.534448Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:130: IgniteOperation, opId: 281474976715661:4, propose status:S ... t, SessionId: ydb://session/3?node_id=1&id=ZDcxNjNmZWYtNDE1MWJkNDItOTA2MzRiYjctZjczODM4YWY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-05-29T15:26:53.623398Z node 1 :TX_DATASHARD INFO: datashard.cpp:197: OnTabletStop: 72075186224037888 reason = ReasonStop 2025-05-29T15:26:53.623688Z node 1 :TX_DATASHARD INFO: datashard.cpp:257: OnTabletDead: 72075186224037888 2025-05-29T15:26:53.623725Z node 1 :TX_DATASHARD INFO: datashard.cpp:1301: Change sender killed: at tablet: 72075186224037888 2025-05-29T15:26:53.623994Z node 1 :HIVE WARN: hive_impl.cpp:491: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037888 not found 2025-05-29T15:26:53.624124Z node 1 :KQP_COMPUTE WARN: kqp_read_actor.cpp:1077: TxId: 281474976821193, task: 1, CA Id [1:7509889529887728427:2360]. Got EvDeliveryProblem, TabletId: 72075186224037888, NotDelivered: 0 2025-05-29T15:26:53.624142Z node 1 :KQP_COMPUTE WARN: kqp_read_actor.cpp:1077: TxId: 281474976821195, task: 1, CA Id [1:7509889529887728430:2391]. Got EvDeliveryProblem, TabletId: 72075186224037888, NotDelivered: 0 2025-05-29T15:26:53.624148Z node 1 :KQP_COMPUTE WARN: kqp_read_actor.cpp:1077: TxId: 281474976821194, task: 1, CA Id [1:7509889529887728437:2392]. Got EvDeliveryProblem, TabletId: 72075186224037888, NotDelivered: 0 2025-05-29T15:26:53.624153Z node 1 :KQP_COMPUTE WARN: kqp_read_actor.cpp:1077: TxId: 281474976821190, task: 1, CA Id [1:7509889529887728392:2381]. Got EvDeliveryProblem, TabletId: 72075186224037888, NotDelivered: 0 2025-05-29T15:26:53.625092Z node 1 :KQP_EXECUTER ERROR: kqp_planner.cpp:119: TxId: 281474976821206. Ctx: { TraceId: 01jweaeh5q4tjjfp0crrtrrve3, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ODFkY2I5MGEtN2ExYzVhOWMtYmNkMGRiODItNzExYzlhYjc=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-05-29T15:26:53.625131Z node 1 :KQP_EXECUTER ERROR: kqp_planner.cpp:119: TxId: 281474976821207. Ctx: { TraceId: 01jweaeh5qfdwvs1yne8k5skws, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YmUzNjM3MDItMzY0ZTZhOWQtNWExNDNjY2ItYWEwNTUxYjk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Foo" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976715658 CreateStep: 1748532348521 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 2 TablePartitionVersion: 2 } ChildrenExist: false } Table { Name: "Foo" Columns { Name: "NameHash" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "Name" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } Columns { Name: "Versio... (TRUNCATED) 2025-05-29T15:26:53.627843Z node 1 :KQP_EXECUTER ERROR: kqp_planner.cpp:119: TxId: 281474976821208. Ctx: { TraceId: 01jweaeh5v6w4msffbn2v1tx8h, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=OGE4N2YxN2EtOGEzMzMyODctODdlM2Y0ZDAtMWE4YTVhZGE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-05-29T15:26:53.627851Z node 1 :KQP_EXECUTER ERROR: kqp_planner.cpp:119: TxId: 281474976821209. Ctx: { TraceId: 01jweaeh5v85dmtrsxzn6hdva4, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MzJmNDhiN2MtMWRlZTVhNjQtNTBkNGVlYzAtNzcwOTRkOWQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-05-29T15:26:53.662316Z node 1 :KQP_COMPUTE WARN: kqp_read_actor.cpp:1077: TxId: 281474976821194, task: 1, CA Id [1:7509889529887728437:2392]. Got EvDeliveryProblem, TabletId: 72075186224037888, NotDelivered: 1 2025-05-29T15:26:53.662325Z node 1 :KQP_COMPUTE WARN: kqp_read_actor.cpp:1077: TxId: 281474976821190, task: 1, CA Id [1:7509889529887728392:2381]. Got EvDeliveryProblem, TabletId: 72075186224037888, NotDelivered: 1 2025-05-29T15:26:53.662343Z node 1 :KQP_COMPUTE WARN: kqp_read_actor.cpp:1077: TxId: 281474976821195, task: 1, CA Id [1:7509889529887728430:2391]. Got EvDeliveryProblem, TabletId: 72075186224037888, NotDelivered: 1 2025-05-29T15:26:53.662350Z node 1 :KQP_COMPUTE WARN: kqp_read_actor.cpp:1077: TxId: 281474976821193, task: 1, CA Id [1:7509889529887728427:2360]. Got EvDeliveryProblem, TabletId: 72075186224037888, NotDelivered: 1 2025-05-29T15:26:53.742086Z node 1 :KQP_COMPUTE WARN: kqp_read_actor.cpp:1077: TxId: 281474976821193, task: 1, CA Id [1:7509889529887728427:2360]. Got EvDeliveryProblem, TabletId: 72075186224037888, NotDelivered: 1 2025-05-29T15:26:53.742123Z node 1 :KQP_COMPUTE WARN: kqp_read_actor.cpp:1077: TxId: 281474976821194, task: 1, CA Id [1:7509889529887728437:2392]. Got EvDeliveryProblem, TabletId: 72075186224037888, NotDelivered: 1 2025-05-29T15:26:53.742133Z node 1 :KQP_COMPUTE WARN: kqp_read_actor.cpp:1077: TxId: 281474976821190, task: 1, CA Id [1:7509889529887728392:2381]. Got EvDeliveryProblem, TabletId: 72075186224037888, NotDelivered: 1 2025-05-29T15:26:53.742140Z node 1 :KQP_COMPUTE WARN: kqp_read_actor.cpp:1077: TxId: 281474976821195, task: 1, CA Id [1:7509889529887728430:2391]. Got EvDeliveryProblem, TabletId: 72075186224037888, NotDelivered: 1 2025-05-29T15:26:53.902934Z node 1 :KQP_COMPUTE WARN: kqp_read_actor.cpp:1077: TxId: 281474976821193, task: 1, CA Id [1:7509889529887728427:2360]. Got EvDeliveryProblem, TabletId: 72075186224037888, NotDelivered: 1 2025-05-29T15:26:53.902935Z node 1 :KQP_COMPUTE WARN: kqp_read_actor.cpp:1077: TxId: 281474976821194, task: 1, CA Id [1:7509889529887728437:2392]. Got EvDeliveryProblem, TabletId: 72075186224037888, NotDelivered: 1 2025-05-29T15:26:53.902951Z node 1 :KQP_COMPUTE WARN: kqp_read_actor.cpp:1077: TxId: 281474976821190, task: 1, CA Id [1:7509889529887728392:2381]. Got EvDeliveryProblem, TabletId: 72075186224037888, NotDelivered: 1 2025-05-29T15:26:53.902960Z node 1 :KQP_COMPUTE WARN: kqp_read_actor.cpp:1077: TxId: 281474976821195, task: 1, CA Id [1:7509889529887728430:2391]. Got EvDeliveryProblem, TabletId: 72075186224037888, NotDelivered: 1 2025-05-29T15:26:54.095082Z node 1 :KQP_COMPUTE WARN: kqp_read_actor.cpp:1077: TxId: 281474976821194, task: 1, CA Id [1:7509889529887728437:2392]. Got EvDeliveryProblem, TabletId: 72075186224037888, NotDelivered: 1 2025-05-29T15:26:54.095121Z node 1 :KQP_COMPUTE WARN: kqp_read_actor.cpp:1077: TxId: 281474976821193, task: 1, CA Id [1:7509889529887728427:2360]. Got EvDeliveryProblem, TabletId: 72075186224037888, NotDelivered: 1 2025-05-29T15:26:54.197932Z node 1 :KQP_COMPUTE WARN: kqp_read_actor.cpp:1077: TxId: 281474976821195, task: 1, CA Id [1:7509889529887728430:2391]. Got EvDeliveryProblem, TabletId: 72075186224037888, NotDelivered: 1 2025-05-29T15:26:54.197974Z node 1 :KQP_COMPUTE WARN: kqp_read_actor.cpp:1077: TxId: 281474976821190, task: 1, CA Id [1:7509889529887728392:2381]. Got EvDeliveryProblem, TabletId: 72075186224037888, NotDelivered: 1 2025-05-29T15:26:54.502958Z node 1 :KQP_COMPUTE WARN: kqp_read_actor.cpp:1077: TxId: 281474976821194, task: 1, CA Id [1:7509889529887728437:2392]. Got EvDeliveryProblem, TabletId: 72075186224037888, NotDelivered: 1 2025-05-29T15:26:54.718422Z node 1 :KQP_COMPUTE WARN: kqp_read_actor.cpp:1077: TxId: 281474976821193, task: 1, CA Id [1:7509889529887728427:2360]. Got EvDeliveryProblem, TabletId: 72075186224037888, NotDelivered: 1 2025-05-29T15:26:54.778971Z node 1 :KQP_COMPUTE WARN: kqp_read_actor.cpp:1077: TxId: 281474976821190, task: 1, CA Id [1:7509889529887728392:2381]. Got EvDeliveryProblem, TabletId: 72075186224037888, NotDelivered: 1 2025-05-29T15:26:54.846792Z node 1 :KQP_COMPUTE WARN: kqp_read_actor.cpp:1077: TxId: 281474976821195, task: 1, CA Id [1:7509889529887728430:2391]. Got EvDeliveryProblem, TabletId: 72075186224037888, NotDelivered: 1 2025-05-29T15:26:55.422710Z node 1 :KQP_COMPUTE WARN: kqp_read_actor.cpp:1077: TxId: 281474976821194, task: 1, CA Id [1:7509889529887728437:2392]. Got EvDeliveryProblem, TabletId: 72075186224037888, NotDelivered: 1 2025-05-29T15:26:55.545434Z node 1 :KQP_COMPUTE WARN: kqp_read_actor.cpp:1077: TxId: 281474976821193, task: 1, CA Id [1:7509889529887728427:2360]. Got EvDeliveryProblem, TabletId: 72075186224037888, NotDelivered: 1 2025-05-29T15:26:55.617360Z node 1 :KQP_COMPUTE WARN: kqp_read_actor.cpp:1077: TxId: 281474976821190, task: 1, CA Id [1:7509889529887728392:2381]. Got EvDeliveryProblem, TabletId: 72075186224037888, NotDelivered: 1 2025-05-29T15:26:55.783532Z node 1 :KQP_COMPUTE WARN: kqp_read_actor.cpp:1077: TxId: 281474976821195, task: 1, CA Id [1:7509889529887728430:2391]. Got EvDeliveryProblem, TabletId: 72075186224037888, NotDelivered: 1 2025-05-29T15:26:56.036271Z node 1 :KQP_COMPUTE WARN: kqp_read_actor.cpp:1077: TxId: 281474976821194, task: 1, CA Id [1:7509889529887728437:2392]. Got EvDeliveryProblem, TabletId: 72075186224037888, NotDelivered: 1 2025-05-29T15:26:56.216442Z node 1 :KQP_COMPUTE WARN: kqp_read_actor.cpp:1077: TxId: 281474976821190, task: 1, CA Id [1:7509889529887728392:2381]. Got EvDeliveryProblem, TabletId: 72075186224037888, NotDelivered: 1 2025-05-29T15:26:56.487520Z node 1 :KQP_COMPUTE WARN: kqp_read_actor.cpp:1077: TxId: 281474976821195, task: 1, CA Id [1:7509889529887728430:2391]. Got EvDeliveryProblem, TabletId: 72075186224037888, NotDelivered: 1 2025-05-29T15:26:56.547879Z node 1 :KQP_COMPUTE WARN: kqp_read_actor.cpp:1077: TxId: 281474976821193, task: 1, CA Id [1:7509889529887728427:2360]. Got EvDeliveryProblem, TabletId: 72075186224037888, NotDelivered: 1 2025-05-29T15:26:56.657507Z node 1 :KQP_COMPUTE WARN: kqp_read_actor.cpp:1077: TxId: 281474976821194, task: 1, CA Id [1:7509889529887728437:2392]. Got EvDeliveryProblem, TabletId: 72075186224037888, NotDelivered: 1 2025-05-29T15:26:56.855627Z node 1 :KQP_COMPUTE WARN: kqp_read_actor.cpp:1077: TxId: 281474976821190, task: 1, CA Id [1:7509889529887728392:2381]. Got EvDeliveryProblem, TabletId: 72075186224037888, NotDelivered: 1 2025-05-29T15:26:57.145613Z node 1 :KQP_COMPUTE WARN: kqp_read_actor.cpp:1077: TxId: 281474976821195, task: 1, CA Id [1:7509889529887728430:2391]. Got EvDeliveryProblem, TabletId: 72075186224037888, NotDelivered: 1 2025-05-29T15:26:57.503524Z node 1 :KQP_COMPUTE WARN: kqp_read_actor.cpp:1077: TxId: 281474976821193, task: 1, CA Id [1:7509889529887728427:2360]. Got EvDeliveryProblem, TabletId: 72075186224037888, NotDelivered: 1 TClient::Ls request: /Root/Foo TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Foo" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976715658 CreateStep: 1748532348521 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 2 TablePartitionVersion: 2 } ChildrenExist: false } Table { Name: "Foo" Columns { Name: "NameHash" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "Name" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } Columns { Name: "Versio... (TRUNCATED) Table has 2 shards >> TExportToS3Tests::AutoDropping [GOOD] >> IncrementalBackup::SimpleBackupBackupCollection+WithIncremental [FAIL] >> IncrementalBackup::SimpleBackupBackupCollection-WithIncremental >> IncrementalBackup::SimpleRestoreBackupCollection-WithIncremental [FAIL] >> IncrementalBackup::ComplexRestoreBackupCollection+WithIncremental [FAIL] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_export/unittest >> TExportToS3Tests::ShouldRestartOnScanErrors [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:116:2058] recipient: [1:111:2141] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:116:2058] recipient: [1:111:2141] Leader for TabletID 72057594046678944 is [1:128:2152] sender: [1:132:2058] recipient: [1:111:2141] 2025-05-29T15:26:34.737069Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7488: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-05-29T15:26:34.737095Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7516: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:26:34.737101Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7402: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-05-29T15:26:34.737106Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7418: OperationsProcessing config: using default configuration 2025-05-29T15:26:34.737112Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-05-29T15:26:34.737116Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-05-29T15:26:34.737125Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7548: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:26:34.737138Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:39: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-05-29T15:26:34.737244Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7619: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-05-29T15:26:34.737325Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-05-29T15:26:34.750636Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7309: Cannot subscribe to console configs 2025-05-29T15:26:34.750656Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:26:34.753206Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-05-29T15:26:34.753327Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-05-29T15:26:34.753360Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-05-29T15:26:34.755129Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-05-29T15:26:34.755264Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:445: Clear TempDirsState with owners number: 0 2025-05-29T15:26:34.755353Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1348: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-05-29T15:26:34.755382Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:32: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-05-29T15:26:34.755719Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:157: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:26:34.755761Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:84: [RootDataErasureManager] Stop 2025-05-29T15:26:34.755976Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:26:34.755984Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:26:34.755997Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-05-29T15:26:34.756003Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-29T15:26:34.756007Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:202: TTxServerlessStorageBilling.Complete 2025-05-29T15:26:34.756028Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6671: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-05-29T15:26:34.757275Z node 1 :HIVE INFO: tablet_helpers.cpp:1130: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:128:2152] sender: [1:242:2058] recipient: [1:15:2062] 2025-05-29T15:26:34.773452Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:372: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-05-29T15:26:34.777813Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:26:34.777894Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-05-29T15:26:34.777956Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:130: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-05-29T15:26:34.777977Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:26:34.778678Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:451: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-05-29T15:26:34.778712Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-05-29T15:26:34.778790Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:26:34.778802Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:313: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-05-29T15:26:34.778809Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:367: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-05-29T15:26:34.778815Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 2 -> 3 2025-05-29T15:26:34.779349Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:26:34.779363Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-05-29T15:26:34.779370Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 3 -> 128 2025-05-29T15:26:34.779787Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:26:34.779798Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:26:34.779804Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:26:34.779811Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1650: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-05-29T15:26:34.780649Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1719: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-05-29T15:26:34.781089Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:652: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-05-29T15:26:34.781127Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1751: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-05-29T15:26:34.781316Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:676: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-05-29T15:26:34.781345Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:680: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 124 RawX2: 4294969445 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-05-29T15:26:34.781353Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:26:34.781429Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 128 -> 240 2025-05-29T15:26:34.781437Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:26:34.781470Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-05-29T15:26:34.781483Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:402: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-05-29T15:26:34.782016Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:26:34.782028Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-29T15:26:34.782075Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29 ... id 281474976710759:0 128 -> 129 2025-05-29T15:26:59.404490Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 3 REQUEST: PUT /metadata.json HTTP/1.1 HEADERS: Host: localhost:15431 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: 9E8E9B10-6DE5-4175-A2B5-08AC74BFC3A9 amz-sdk-request: attempt=1 content-length: 73 content-md5: q/ySd5GvS6I/qOVxS/4Thg== content-type: binary/octet-stream user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-139-generic x86_64 Clang/18.1.8 S3_MOCK::HttpServeWrite: /metadata.json / / 73 FAKE_COORDINATOR: advance: minStep5000005 State->FrontStep: 5000005 REQUEST: PUT /scheme.pb HTTP/1.1 HEADERS: Host: localhost:15431 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: 49C95EF0-ABA4-4FFC-8A11-89BEB2873894 amz-sdk-request: attempt=1 content-length: 357 content-md5: csvC5nqNTZsSLy4ymlp0/Q== content-type: binary/octet-stream user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-139-generic x86_64 Clang/18.1.8 S3_MOCK::HttpServeWrite: /scheme.pb / / 357 2025-05-29T15:26:59.413928Z node 4 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:26:59.413953Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 281474976710759, path id: [OwnerId: 72057594046678944, LocalPathId: 4] 2025-05-29T15:26:59.414081Z node 4 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:26:59.414089Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [4:208:2209], at schemeshard: 72057594046678944, txId: 281474976710759, path id: 4 2025-05-29T15:26:59.414344Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 281474976710759:0, at schemeshard: 72057594046678944 2025-05-29T15:26:59.414358Z node 4 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_backup_restore_common.h:258: TBackup TProposedWaitParts, opId: 281474976710759:0 ProgressState, at schemeshard: 72057594046678944 2025-05-29T15:26:59.414598Z node 4 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:5834: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 3 PathOwnerId: 72057594046678944, cookie: 281474976710759 2025-05-29T15:26:59.414618Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 3 PathOwnerId: 72057594046678944, cookie: 281474976710759 2025-05-29T15:26:59.414624Z node 4 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72057594046678944, txId: 281474976710759 2025-05-29T15:26:59.414631Z node 4 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 281474976710759, pathId: [OwnerId: 72057594046678944, LocalPathId: 4], version: 3 2025-05-29T15:26:59.414639Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 4 2025-05-29T15:26:59.414661Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1606: TOperation IsReadyToNotify, TxId: 281474976710759, ready parts: 0/1, is published: true FAKE_COORDINATOR: Erasing txId 281474976710759 2025-05-29T15:26:59.415518Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976710759 TestWaitNotification wait txId: 102 2025-05-29T15:26:59.415590Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:210: tests -- TTxNotificationSubscriber for txId 102: send EvNotifyTxCompletion 2025-05-29T15:26:59.415600Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:256: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 102 2025-05-29T15:26:59.417169Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__notify.cpp:62: NotifyTxCompletion export in-flight, txId: 102, at schemeshard: 72057594046678944 2025-05-29T15:26:59.417184Z node 4 :FLAT_TX_SCHEMESHARD INFO: schemeshard__notify.cpp:131: NotifyTxCompletion transaction is registered, txId: 102, at schemeshard: 72057594046678944 REQUEST: PUT /metadata.json HTTP/1.1 HEADERS: Host: localhost:15431 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: BD3F5845-10D6-4FC3-9AFE-69F6F8C5773B amz-sdk-request: attempt=1 content-length: 73 content-md5: q/ySd5GvS6I/qOVxS/4Thg== content-type: binary/octet-stream user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-139-generic x86_64 Clang/18.1.8 S3_MOCK::HttpServeWrite: /metadata.json / / 73 REQUEST: PUT /scheme.pb HTTP/1.1 HEADERS: Host: localhost:15431 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: 9854751E-2B1A-4D4F-9A18-53EB0B29ADE1 amz-sdk-request: attempt=1 content-length: 357 content-md5: csvC5nqNTZsSLy4ymlp0/Q== content-type: binary/octet-stream user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-139-generic x86_64 Clang/18.1.8 S3_MOCK::HttpServeWrite: /scheme.pb / / 357 REQUEST: PUT /data_00.csv HTTP/1.1 HEADERS: Host: localhost:15431 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: 1C16E650-762E-42AA-A5C1-7CE0C6CFF17F amz-sdk-request: attempt=1 content-length: 11 content-md5: bj4KQf2rit2DOGLxvSlUww== content-type: binary/octet-stream user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-139-generic x86_64 Clang/18.1.8 S3_MOCK::HttpServeWrite: /data_00.csv / / 11 2025-05-29T15:27:00.003487Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5512: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 447 RawX2: 17179871599 } Origin: 72075186233409547 State: 2 TxId: 281474976710759 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 10 RowsProcessed: 1 } 2025-05-29T15:27:00.003529Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1764: TOperation FindRelatedPartByTabletId, TxId: 281474976710759, tablet: 72075186233409547, partId: 0 2025-05-29T15:27:00.003570Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:619: TTxOperationReply execute, operationId: 281474976710759:0, at schemeshard: 72057594046678944, message: Source { RawX1: 447 RawX2: 17179871599 } Origin: 72075186233409547 State: 2 TxId: 281474976710759 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 10 RowsProcessed: 1 } 2025-05-29T15:27:00.003588Z node 4 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_backup_restore_common.h:233: TBackup TProposedWaitParts, opId: 281474976710759:0 HandleReply TEvSchemaChanged at tablet# 72057594046678944 message# Source { RawX1: 447 RawX2: 17179871599 } Origin: 72075186233409547 State: 2 TxId: 281474976710759 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 10 RowsProcessed: 1 } 2025-05-29T15:27:00.003607Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:655: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 281474976710759:0, shardIdx: 72057594046678944:2, datashard: 72075186233409547, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2025-05-29T15:27:00.003612Z node 4 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:674: all shard schema changes has been received, operationId: 281474976710759:0, at schemeshard: 72057594046678944 2025-05-29T15:27:00.003619Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:686: send schema changes ack message, operation: 281474976710759:0, datashard: 72075186233409547, at schemeshard: 72057594046678944 2025-05-29T15:27:00.003628Z node 4 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 281474976710759:0 129 -> 240 2025-05-29T15:27:00.003699Z node 4 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_backup_restore_common.h:116: Unable to make a bill: kind# TBackup, opId# 281474976710759:0, reason# domain is not a serverless db, domain# /MyRoot, domainPathId# [OwnerId: 72057594046678944, LocalPathId: 1], IsDomainSchemeShard: 1, ParentDomainId: [OwnerId: 72057594046678944, LocalPathId: 1], ResourcesDomainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-29T15:27:00.009386Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:647: TTxOperationReply complete, operationId: 281474976710759:0, at schemeshard: 72057594046678944 2025-05-29T15:27:00.009600Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 281474976710759:0, at schemeshard: 72057594046678944 2025-05-29T15:27:00.009613Z node 4 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:482: [72057594046678944] TDone opId# 281474976710759:0 ProgressState 2025-05-29T15:27:00.009640Z node 4 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:906: Part operation is done id#281474976710759:0 progress is 1/1 2025-05-29T15:27:00.009646Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 281474976710759 ready parts: 1/1 2025-05-29T15:27:00.009653Z node 4 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:906: Part operation is done id#281474976710759:0 progress is 1/1 2025-05-29T15:27:00.009657Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 281474976710759 ready parts: 1/1 2025-05-29T15:27:00.009663Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1606: TOperation IsReadyToNotify, TxId: 281474976710759, ready parts: 1/1, is published: true 2025-05-29T15:27:00.009696Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1629: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [4:126:2151] message: TxId: 281474976710759 2025-05-29T15:27:00.009707Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 281474976710759 ready parts: 1/1 2025-05-29T15:27:00.009713Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:973: Operation and all the parts is done, operation id: 281474976710759:0 2025-05-29T15:27:00.009718Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5174: RemoveTx for txid 281474976710759:0 2025-05-29T15:27:00.009760Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 3 2025-05-29T15:27:00.015182Z node 4 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6746: Handle: TEvNotifyTxCompletionResult: txId# 281474976710759 2025-05-29T15:27:00.015240Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6748: Message: TxId: 281474976710759 2025-05-29T15:27:00.023168Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:227: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2025-05-29T15:27:00.023208Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:236: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [4:587:2542] TestWaitNotification: OK eventTxId 102 |68.4%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/keyvalue/ut/ydb-core-keyvalue-ut >> IncrementalBackup::ComplexRestoreBackupCollection-WithIncremental ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_base_reboots/unittest >> TTablesWithReboots::AlterTableSchemaFreezeUnfreezeWithReboots [GOOD] Test command err: ==== RunWithTabletReboots =========== RUN: Trace =========== Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2140] Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:116:2058] recipient: [1:110:2141] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:116:2058] recipient: [1:110:2141] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:117:2058] recipient: [1:111:2142] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:117:2058] recipient: [1:111:2142] Leader for TabletID 72057594046678944 is [1:126:2151] sender: [1:127:2058] recipient: [1:109:2140] Leader for TabletID 72057594046447617 is [1:130:2154] sender: [1:132:2058] recipient: [1:110:2141] Leader for TabletID 72057594046316545 is [1:133:2155] sender: [1:135:2058] recipient: [1:111:2142] 2025-05-29T15:26:25.365273Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7488: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-05-29T15:26:25.365300Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7516: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:26:25.365306Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7402: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2025-05-29T15:26:25.365312Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7418: OperationsProcessing config: using default configuration 2025-05-29T15:26:25.365319Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-05-29T15:26:25.365323Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-05-29T15:26:25.365332Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7548: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:26:25.365347Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:39: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-05-29T15:26:25.365460Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7619: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-05-29T15:26:25.365540Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-05-29T15:26:25.379049Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7651: Got new config: QueryServiceConfig { AllExternalDataSourcesAreAvailable: true } 2025-05-29T15:26:25.379074Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:26:25.379185Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7619: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# Leader for TabletID 72057594046447617 is [1:130:2154] sender: [1:175:2058] recipient: [1:15:2062] 2025-05-29T15:26:25.382532Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-05-29T15:26:25.382569Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-05-29T15:26:25.382611Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-05-29T15:26:25.385578Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-05-29T15:26:25.385657Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:445: Clear TempDirsState with owners number: 0 2025-05-29T15:26:25.385771Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1348: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-05-29T15:26:25.385903Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:32: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-05-29T15:26:25.386393Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:157: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:26:25.386428Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:84: [RootDataErasureManager] Stop 2025-05-29T15:26:25.386646Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:26:25.386656Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:26:25.386695Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-05-29T15:26:25.386703Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-29T15:26:25.386710Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:202: TTxServerlessStorageBilling.Complete 2025-05-29T15:26:25.386728Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6671: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:214:2058] recipient: [1:212:2212] IGNORE Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:214:2058] recipient: [1:212:2212] Leader for TabletID 72057594037968897 is [1:218:2216] sender: [1:219:2058] recipient: [1:212:2212] 2025-05-29T15:26:25.388358Z node 1 :HIVE INFO: tablet_helpers.cpp:1130: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:126:2151] sender: [1:239:2058] recipient: [1:15:2062] 2025-05-29T15:26:25.409191Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:372: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-05-29T15:26:25.409271Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:26:25.409345Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-05-29T15:26:25.409402Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:130: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-05-29T15:26:25.409413Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:26:25.411061Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:451: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-05-29T15:26:25.411094Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-05-29T15:26:25.411160Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:26:25.411172Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:313: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-05-29T15:26:25.411180Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:367: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-05-29T15:26:25.411186Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 2 -> 3 2025-05-29T15:26:25.411839Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:26:25.411862Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-05-29T15:26:25.411870Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 3 -> 128 2025-05-29T15:26:25.412600Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:26:25.412615Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:26:25.412622Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:26:25.412631Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1650: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-05-29T15:26:25.413357Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1719: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-05-29T15:26:25.414351Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:652: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-05-29T15:26:25.414400Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1751: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 Leader for TabletID 72057594046316545 is [1:133:2155] sender: [1:254:2058] recipient: [1:15:2062] FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-05-29T15:26:25.414622Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:676: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-05-29T15:26:25.414654Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:680: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 133 RawX2: 4294969451 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-05-29T15:26:25.414663Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:26:25.414732Z node 1 :FLAT_TX_SCHEMESHARD INFO: sch ... 1, at schemeshard: 72057594046678944 2025-05-29T15:27:00.903008Z node 124 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:1014: NTableState::TProposedWaitParts operationId# 1004:0 HandleReply TEvSchemaChanged CollectSchemaChanged: false 2025-05-29T15:27:00.903060Z node 124 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5512: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 352 RawX2: 532575947037 } Origin: 72075186233409547 State: 2 TxId: 1004 Step: 0 Generation: 2 2025-05-29T15:27:00.903066Z node 124 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1764: TOperation FindRelatedPartByTabletId, TxId: 1004, tablet: 72075186233409547, partId: 0 2025-05-29T15:27:00.903078Z node 124 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:619: TTxOperationReply execute, operationId: 1004:0, at schemeshard: 72057594046678944, message: Source { RawX1: 352 RawX2: 532575947037 } Origin: 72075186233409547 State: 2 TxId: 1004 Step: 0 Generation: 2 2025-05-29T15:27:00.903082Z node 124 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:1005: NTableState::TProposedWaitParts operationId# 1004:0 HandleReply TEvSchemaChanged at tablet: 72057594046678944 2025-05-29T15:27:00.903090Z node 124 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:1009: NTableState::TProposedWaitParts operationId# 1004:0 HandleReply TEvSchemaChanged at tablet: 72057594046678944 message: Source { RawX1: 352 RawX2: 532575947037 } Origin: 72075186233409547 State: 2 TxId: 1004 Step: 0 Generation: 2 2025-05-29T15:27:00.903099Z node 124 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:655: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 1004:0, shardIdx: 72057594046678944:2, datashard: 72075186233409547, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2025-05-29T15:27:00.903104Z node 124 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:674: all shard schema changes has been received, operationId: 1004:0, at schemeshard: 72057594046678944 2025-05-29T15:27:00.903109Z node 124 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:686: send schema changes ack message, operation: 1004:0, datashard: 72075186233409546, at schemeshard: 72057594046678944 2025-05-29T15:27:00.903114Z node 124 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:686: send schema changes ack message, operation: 1004:0, datashard: 72075186233409547, at schemeshard: 72057594046678944 2025-05-29T15:27:00.903118Z node 124 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1004:0 129 -> 240 2025-05-29T15:27:00.903596Z node 124 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1004 2025-05-29T15:27:00.904075Z node 124 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:647: TTxOperationReply complete, operationId: 1004:0, at schemeshard: 72057594046678944 2025-05-29T15:27:00.904102Z node 124 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:647: TTxOperationReply complete, operationId: 1004:0, at schemeshard: 72057594046678944 2025-05-29T15:27:00.904113Z node 124 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:647: TTxOperationReply complete, operationId: 1004:0, at schemeshard: 72057594046678944 2025-05-29T15:27:00.904123Z node 124 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:647: TTxOperationReply complete, operationId: 1004:0, at schemeshard: 72057594046678944 2025-05-29T15:27:00.904198Z node 124 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1004:0, at schemeshard: 72057594046678944 2025-05-29T15:27:00.904204Z node 124 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:482: [72057594046678944] TDone opId# 1004:0 ProgressState 2025-05-29T15:27:00.904218Z node 124 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:906: Part operation is done id#1004:0 progress is 1/1 2025-05-29T15:27:00.904221Z node 124 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 1004 ready parts: 1/1 2025-05-29T15:27:00.904225Z node 124 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:906: Part operation is done id#1004:0 progress is 1/1 2025-05-29T15:27:00.904227Z node 124 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 1004 ready parts: 1/1 2025-05-29T15:27:00.904230Z node 124 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1606: TOperation IsReadyToNotify, TxId: 1004, ready parts: 1/1, is published: true 2025-05-29T15:27:00.904235Z node 124 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 1004 ready parts: 1/1 2025-05-29T15:27:00.904239Z node 124 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:973: Operation and all the parts is done, operation id: 1004:0 2025-05-29T15:27:00.904242Z node 124 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5174: RemoveTx for txid 1004:0 2025-05-29T15:27:00.904262Z node 124 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 4 TestModificationResult got TxId: 1004, wait until txId: 1004 TestWaitNotification wait txId: 1004 2025-05-29T15:27:00.904804Z node 124 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:210: tests -- TTxNotificationSubscriber for txId 1004: send EvNotifyTxCompletion 2025-05-29T15:27:00.904814Z node 124 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:256: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 1004 2025-05-29T15:27:00.904875Z node 124 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 1004, at schemeshard: 72057594046678944 2025-05-29T15:27:00.904896Z node 124 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:227: tests -- TTxNotificationSubscriber for txId 1004: got EvNotifyTxCompletionResult 2025-05-29T15:27:00.904901Z node 124 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:236: tests -- TTxNotificationSubscriber for txId 1004: satisfy waiter [124:585:2545] TestWaitNotification: OK eventTxId 1004 2025-05-29T15:27:00.904962Z node 124 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "MyRoot/Table" Options { ReturnPartitioningInfo: true ReturnPartitionConfig: true BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-05-29T15:27:00.905000Z node 124 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "MyRoot/Table" took 46us result status StatusSuccess 2025-05-29T15:27:00.905158Z node 124 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "MyRoot/Table" PathDescription { Self { Name: "Table" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 1002 CreateStep: 5000003 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 3 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "Table" Columns { Name: "key2" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "key1" Type: "Uint64" TypeId: 4 Id: 2 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Utf8" TypeId: 4608 Id: 3 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key1" KeyColumnNames: "key2" KeyColumnIds: 2 KeyColumnIds: 1 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } ExecutorCacheSize: 42 TxReadSizeLimit: 100 PartitioningPolicy { MinPartitionsCount: 2 } FreezeState: Unfreeze } TableSchemaVersion: 3 IsBackup: false IsRestore: false } TablePartitions { EndOfRangeKeyPrefix: "\002\000\010\000\000\000\000\000\000\000\000\000\000\200\000\000\000\200" IsPoint: false IsInclusive: false DatashardId: 72075186233409546 } TablePartitions { EndOfRangeKeyPrefix: "" IsPoint: false IsInclusive: false DatashardId: 72075186233409547 } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 2 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 2 PathsLimit: 10000 ShardsInside: 2 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 |68.4%| [LD] {RESULT} $(B)/ydb/core/control/ut/ydb-core-control-ut |68.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/keyvalue/ut/ydb-core-keyvalue-ut |68.4%| [LD] {RESULT} $(B)/ydb/core/keyvalue/ut/ydb-core-keyvalue-ut |68.4%| [TA] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_olap_reboots/test-results/unittest/{meta.json ... results_accumulator.log} >> SubDomainWithReboots::RootWithStoragePools [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_export/unittest >> TExportToS3Tests::AutoDropping [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:116:2058] recipient: [1:110:2141] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:116:2058] recipient: [1:110:2141] Leader for TabletID 72057594046678944 is [1:127:2152] sender: [1:129:2058] recipient: [1:110:2141] 2025-05-29T15:26:29.719462Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7488: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-05-29T15:26:29.719492Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7516: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:26:29.719498Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7402: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-05-29T15:26:29.719503Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7418: OperationsProcessing config: using default configuration 2025-05-29T15:26:29.719510Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-05-29T15:26:29.719514Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-05-29T15:26:29.719525Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7548: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:26:29.719540Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:39: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-05-29T15:26:29.719646Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7619: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-05-29T15:26:29.719733Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-05-29T15:26:29.733900Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7309: Cannot subscribe to console configs 2025-05-29T15:26:29.733922Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:26:29.746817Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-05-29T15:26:29.746901Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-05-29T15:26:29.746943Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-05-29T15:26:29.763399Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-05-29T15:26:29.763473Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:445: Clear TempDirsState with owners number: 0 2025-05-29T15:26:29.763614Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1348: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-05-29T15:26:29.763661Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:32: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-05-29T15:26:29.764212Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:157: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:26:29.764288Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:84: [RootDataErasureManager] Stop 2025-05-29T15:26:29.764635Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:26:29.764653Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:26:29.764663Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-05-29T15:26:29.764672Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-29T15:26:29.764679Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:202: TTxServerlessStorageBilling.Complete 2025-05-29T15:26:29.764715Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6671: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-05-29T15:26:29.766475Z node 1 :HIVE INFO: tablet_helpers.cpp:1130: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:127:2152] sender: [1:241:2058] recipient: [1:15:2062] 2025-05-29T15:26:29.792924Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:372: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-05-29T15:26:29.793026Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:26:29.793100Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-05-29T15:26:29.793153Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:130: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-05-29T15:26:29.793165Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:26:29.794518Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:451: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-05-29T15:26:29.794573Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-05-29T15:26:29.794654Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:26:29.794667Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:313: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-05-29T15:26:29.794674Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:367: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-05-29T15:26:29.794680Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 2 -> 3 2025-05-29T15:26:29.795367Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:26:29.795384Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-05-29T15:26:29.795391Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 3 -> 128 2025-05-29T15:26:29.795786Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:26:29.795799Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:26:29.795805Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:26:29.795813Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1650: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-05-29T15:26:29.796501Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1719: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-05-29T15:26:29.796917Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:652: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-05-29T15:26:29.796956Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1751: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-05-29T15:26:29.797107Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:676: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-05-29T15:26:29.797129Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:680: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 134 RawX2: 4294969452 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-05-29T15:26:29.797136Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:26:29.797193Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 128 -> 240 2025-05-29T15:26:29.797199Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:26:29.797226Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-05-29T15:26:29.797235Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:402: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-05-29T15:26:29.797544Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:26:29.797551Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-29T15:26:29.797591Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29 ... -29T15:27:01.780994Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976710761 2025-05-29T15:27:01.781012Z node 5 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6746: Handle: TEvNotifyTxCompletionResult: txId# 281474976710761 2025-05-29T15:27:01.781026Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6748: Message: TxId: 281474976710761 2025-05-29T15:27:01.781037Z node 5 :EXPORT DEBUG: schemeshard_export__create.cpp:309: TExport::TTxProgress: DoExecute 2025-05-29T15:27:01.781043Z node 5 :EXPORT DEBUG: schemeshard_export__create.cpp:1232: TExport::TTxProgress: OnNotifyResult: txId# 281474976710761 2025-05-29T15:27:01.781049Z node 5 :EXPORT DEBUG: schemeshard_export__create.cpp:1263: TExport::TTxProgress: OnNotifyResult: txId# 281474976710761, id# 102, itemIdx# 4294967295 2025-05-29T15:27:01.781551Z node 5 :EXPORT DEBUG: schemeshard_export__create.cpp:329: TExport::TTxProgress: DoComplete 2025-05-29T15:27:01.781578Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:227: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2025-05-29T15:27:01.781586Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:236: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [5:473:2433] TestWaitNotification: OK eventTxId 102 2025-05-29T15:27:01.781787Z node 5 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-05-29T15:27:01.781844Z node 5 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot" took 70us result status StatusSuccess 2025-05-29T15:27:01.781969Z node 5 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot" PathDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 11 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 11 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 9 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: "Table" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 1 PathsLimit: 10000 ShardsInside: 2 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/MyRoot" } } } PathId: 1 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 desc: 1 2025-05-29T15:27:01.782052Z node 5 :EXPORT DEBUG: schemeshard_export__forget.cpp:79: TExport::TTxForget, dropping export tables, info: { Id: 102 Uid: '' Kind: S3 DomainPathId: [OwnerId: 72057594046678944, LocalPathId: 1] ExportPathId: [OwnerId: 72057594046678944, LocalPathId: 3] UserSID: '(empty maybe)' PeerName: '' State: Done WaitTxId: 281474976710761 Issue: '' Items: 1 PendingItems: 0 PendingDropItems: 0 } 2025-05-29T15:27:01.782571Z node 5 :EXPORT DEBUG: schemeshard_export__create.cpp:309: TExport::TTxProgress: DoExecute 2025-05-29T15:27:01.782588Z node 5 :EXPORT DEBUG: schemeshard_export__create.cpp:732: TExport::TTxProgress: Resume: id# 102 2025-05-29T15:27:01.782599Z node 5 :EXPORT INFO: schemeshard_export__create.cpp:530: TExport::TTxProgress: Allocate txId: info# { Id: 102 Uid: '' Kind: S3 DomainPathId: [OwnerId: 72057594046678944, LocalPathId: 1] ExportPathId: [OwnerId: 72057594046678944, LocalPathId: 3] UserSID: '(empty maybe)' PeerName: '' State: Dropping WaitTxId: 0 Issue: '' Items: 1 PendingItems: 0 PendingDropItems: 0 } 2025-05-29T15:27:01.782609Z node 5 :EXPORT DEBUG: schemeshard_export__create.cpp:329: TExport::TTxProgress: DoComplete 2025-05-29T15:27:01.782630Z node 5 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6671: Handle: TEvAllocateResult: Cookie# 102, at schemeshard: 72057594046678944 2025-05-29T15:27:01.782636Z node 5 :EXPORT DEBUG: schemeshard_export__create.cpp:309: TExport::TTxProgress: DoExecute 2025-05-29T15:27:01.782641Z node 5 :EXPORT DEBUG: schemeshard_export__create.cpp:852: TExport::TTxProgress: OnAllocateResult: txId# 281474976710762, id# 102 2025-05-29T15:27:01.782648Z node 5 :EXPORT INFO: schemeshard_export__create.cpp:522: TExport::TTxProgress: Drop propose: info# { Id: 102 Uid: '' Kind: S3 DomainPathId: [OwnerId: 72057594046678944, LocalPathId: 1] ExportPathId: [OwnerId: 72057594046678944, LocalPathId: 3] UserSID: '(empty maybe)' PeerName: '' State: Dropping WaitTxId: 0 Issue: '' Items: 1 PendingItems: 0 PendingDropItems: 0 }, txId# 281474976710762 2025-05-29T15:27:01.782664Z node 5 :EXPORT DEBUG: schemeshard_export__create.cpp:329: TExport::TTxProgress: DoComplete 2025-05-29T15:27:01.783376Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:372: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpRmDir Drop { Name: "export-102" } Internal: true } TxId: 281474976710762 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-05-29T15:27:01.783409Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_rmdir.cpp:29: TRmDir Propose, path: /MyRoot/export-102, pathId: 0, opId: 281474976710762:0, at schemeshard: 72057594046678944 2025-05-29T15:27:01.783440Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:130: IgniteOperation, opId: 281474976710762:1, propose status:StatusPathDoesNotExist, reason: Check failed: path: '/MyRoot/export-102', error: path has been deleted (id: [OwnerId: 72057594046678944, LocalPathId: 3], type: EPathTypeDir, state: EPathStateNotExist), drop stepId: 5000007, drop txId: 281474976710761, source_location: ydb/core/tx/schemeshard/schemeshard__operation_rmdir.cpp:37, at schemeshard: 72057594046678944 2025-05-29T15:27:01.784086Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:451: TTxOperationPropose Complete, txId: 281474976710762, response: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/export-102\', error: path has been deleted (id: [OwnerId: 72057594046678944, LocalPathId: 3], type: EPathTypeDir, state: EPathStateNotExist), drop stepId: 5000007, drop txId: 281474976710761, source_location: ydb/core/tx/schemeshard/schemeshard__operation_rmdir.cpp:37" TxId: 281474976710762 SchemeshardId: 72057594046678944 PathId: 3 PathDropTxId: 281474976710761, at schemeshard: 72057594046678944 2025-05-29T15:27:01.784137Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 281474976710762, database: /MyRoot, subject: , status: StatusPathDoesNotExist, reason: Check failed: path: '/MyRoot/export-102', error: path has been deleted (id: [OwnerId: 72057594046678944, LocalPathId: 3], type: EPathTypeDir, state: EPathStateNotExist), drop stepId: 5000007, drop txId: 281474976710761, source_location: ydb/core/tx/schemeshard/schemeshard__operation_rmdir.cpp:37, operation: DROP DIRECTORY, path: /MyRoot/export-102 2025-05-29T15:27:01.784169Z node 5 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6697: Handle: TEvModifySchemeTransactionResult: txId# 281474976710762, status# StatusPathDoesNotExist 2025-05-29T15:27:01.784179Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6699: Message: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/export-102\', error: path has been deleted (id: [OwnerId: 72057594046678944, LocalPathId: 3], type: EPathTypeDir, state: EPathStateNotExist), drop stepId: 5000007, drop txId: 281474976710761, source_location: ydb/core/tx/schemeshard/schemeshard__operation_rmdir.cpp:37" TxId: 281474976710762 SchemeshardId: 72057594046678944 PathId: 3 PathDropTxId: 281474976710761 2025-05-29T15:27:01.784189Z node 5 :EXPORT DEBUG: schemeshard_export__create.cpp:309: TExport::TTxProgress: DoExecute 2025-05-29T15:27:01.784194Z node 5 :EXPORT DEBUG: schemeshard_export__create.cpp:913: TExport::TTxProgress: OnModifyResult: txId# 281474976710762, status# StatusPathDoesNotExist 2025-05-29T15:27:01.784202Z node 5 :EXPORT TRACE: schemeshard_export__create.cpp:914: Message: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/export-102\', error: path has been deleted (id: [OwnerId: 72057594046678944, LocalPathId: 3], type: EPathTypeDir, state: EPathStateNotExist), drop stepId: 5000007, drop txId: 281474976710761, source_location: ydb/core/tx/schemeshard/schemeshard__operation_rmdir.cpp:37" TxId: 281474976710762 SchemeshardId: 72057594046678944 PathId: 3 PathDropTxId: 281474976710761 2025-05-29T15:27:01.784230Z node 5 :EXPORT INFO: schemeshard_export__create.cpp:1095: TExport::TTxProgress: Wait for completion: info# { Id: 102 Uid: '' Kind: S3 DomainPathId: [OwnerId: 72057594046678944, LocalPathId: 1] ExportPathId: [OwnerId: 72057594046678944, LocalPathId: 3] UserSID: '(empty maybe)' PeerName: '' State: Dropping WaitTxId: 281474976710761 Issue: '' Items: 1 PendingItems: 0 PendingDropItems: 0 }, itemIdx# 4294967295, txId# 281474976710761 2025-05-29T15:27:01.784583Z node 5 :EXPORT DEBUG: schemeshard_export__create.cpp:329: TExport::TTxProgress: DoComplete 2025-05-29T15:27:01.784614Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976710761, at schemeshard: 72057594046678944 2025-05-29T15:27:01.784632Z node 5 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6746: Handle: TEvNotifyTxCompletionResult: txId# 281474976710761 2025-05-29T15:27:01.784643Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6748: Message: TxId: 281474976710761 2025-05-29T15:27:01.784648Z node 5 :EXPORT DEBUG: schemeshard_export__create.cpp:309: TExport::TTxProgress: DoExecute 2025-05-29T15:27:01.784653Z node 5 :EXPORT DEBUG: schemeshard_export__create.cpp:1232: TExport::TTxProgress: OnNotifyResult: txId# 281474976710761 2025-05-29T15:27:01.784658Z node 5 :EXPORT DEBUG: schemeshard_export__create.cpp:1263: TExport::TTxProgress: OnNotifyResult: txId# 281474976710761, id# 102, itemIdx# 4294967295 2025-05-29T15:27:01.785020Z node 5 :EXPORT DEBUG: schemeshard_export__create.cpp:329: TExport::TTxProgress: DoComplete TestWaitNotification wait txId: 102 2025-05-29T15:27:01.785083Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:210: tests -- TTxNotificationSubscriber for txId 102: send EvNotifyTxCompletion 2025-05-29T15:27:01.785092Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:256: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 102 2025-05-29T15:27:01.785172Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 102, at schemeshard: 72057594046678944 2025-05-29T15:27:01.785193Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:227: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2025-05-29T15:27:01.785199Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:236: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [5:693:2648] TestWaitNotification: OK eventTxId 102 >> IncrementalBackup::MultiRestore [FAIL] >> IncrementalBackup::E2EBackupCollection >> TTxDataShardMiniKQL::WriteEraseRead >> TTxDataShardMiniKQL::WriteEraseRead [GOOD] >> TTxDataShardMiniKQL::WriteAndReadMultipleShards >> IncrementalBackup::SimpleBackupBackupCollection-WithIncremental [FAIL] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_subdomain_reboots/unittest >> SubDomainWithReboots::RootWithStoragePools [GOOD] Test command err: ==== RunWithTabletReboots =========== RUN: Trace =========== Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2140] Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:116:2058] recipient: [1:110:2141] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:116:2058] recipient: [1:110:2141] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:117:2058] recipient: [1:111:2142] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:117:2058] recipient: [1:111:2142] Leader for TabletID 72057594046678944 is [1:126:2151] sender: [1:127:2058] recipient: [1:109:2140] Leader for TabletID 72057594046447617 is [1:130:2154] sender: [1:132:2058] recipient: [1:110:2141] Leader for TabletID 72057594046316545 is [1:133:2155] sender: [1:135:2058] recipient: [1:111:2142] 2025-05-29T15:26:53.877594Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7488: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-05-29T15:26:53.877614Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7516: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:26:53.877618Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7402: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2025-05-29T15:26:53.877621Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7418: OperationsProcessing config: using default configuration 2025-05-29T15:26:53.877625Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-05-29T15:26:53.877628Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-05-29T15:26:53.877635Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7548: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:26:53.877644Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:39: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-05-29T15:26:53.877756Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7619: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-05-29T15:26:53.877820Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-05-29T15:26:53.887266Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7651: Got new config: QueryServiceConfig { AllExternalDataSourcesAreAvailable: true } 2025-05-29T15:26:53.887291Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:26:53.887386Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7619: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# Leader for TabletID 72057594046447617 is [1:130:2154] sender: [1:175:2058] recipient: [1:15:2062] 2025-05-29T15:26:53.889572Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-05-29T15:26:53.889596Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-05-29T15:26:53.889624Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-05-29T15:26:53.891811Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-05-29T15:26:53.891878Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:445: Clear TempDirsState with owners number: 0 2025-05-29T15:26:53.891987Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1348: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-05-29T15:26:53.892150Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:32: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-05-29T15:26:53.892729Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:157: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:26:53.892765Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:84: [RootDataErasureManager] Stop 2025-05-29T15:26:53.892969Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:26:53.892977Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:26:53.893001Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-05-29T15:26:53.893009Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-29T15:26:53.893014Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:202: TTxServerlessStorageBilling.Complete 2025-05-29T15:26:53.893035Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6671: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:214:2058] recipient: [1:212:2212] IGNORE Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:214:2058] recipient: [1:212:2212] Leader for TabletID 72057594037968897 is [1:218:2216] sender: [1:219:2058] recipient: [1:212:2212] 2025-05-29T15:26:53.894101Z node 1 :HIVE INFO: tablet_helpers.cpp:1130: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:126:2151] sender: [1:239:2058] recipient: [1:15:2062] 2025-05-29T15:26:53.908641Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:372: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-05-29T15:26:53.908705Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:26:53.908775Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-05-29T15:26:53.908831Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:130: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-05-29T15:26:53.908859Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:26:53.909681Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:451: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-05-29T15:26:53.909722Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-05-29T15:26:53.909789Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:26:53.909811Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:313: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-05-29T15:26:53.909817Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:367: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-05-29T15:26:53.909823Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 2 -> 3 2025-05-29T15:26:53.910385Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:26:53.910402Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-05-29T15:26:53.910408Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 3 -> 128 2025-05-29T15:26:53.910782Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:26:53.910794Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:26:53.910801Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:26:53.910808Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1650: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-05-29T15:26:53.911412Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1719: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-05-29T15:26:53.911828Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:652: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-05-29T15:26:53.911869Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1751: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 Leader for TabletID 72057594046316545 is [1:133:2155] sender: [1:254:2058] recipient: [1:15:2062] FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-05-29T15:26:53.912105Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:676: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-05-29T15:26:53.912132Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:680: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 133 RawX2: 4294969451 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-05-29T15:26:53.912139Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:26:53.912210Z node 1 :FLAT_TX_SCHEMESHARD INFO: sch ... eshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1002:0, at schemeshard: 72057594046678944 2025-05-29T15:27:02.552552Z node 31 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:313: TCreateParts opId# 1002:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-05-29T15:27:02.552557Z node 31 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:367: TCreateParts opId# 1002:0 ProgressState no shards to create, do next state 2025-05-29T15:27:02.552561Z node 31 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1002:0 2 -> 3 2025-05-29T15:27:02.552970Z node 31 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1002:0, at schemeshard: 72057594046678944 2025-05-29T15:27:02.552982Z node 31 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1002:0 ProgressState, at schemeshard: 72057594046678944 2025-05-29T15:27:02.552989Z node 31 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1002:0 3 -> 128 2025-05-29T15:27:02.553376Z node 31 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1002:0, at schemeshard: 72057594046678944 2025-05-29T15:27:02.553392Z node 31 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1002:0, at schemeshard: 72057594046678944 2025-05-29T15:27:02.553397Z node 31 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1002:0, at tablet# 72057594046678944 2025-05-29T15:27:02.553403Z node 31 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1650: TOperation IsReadyToPropose , TxId: 1002 ready parts: 1/1 2025-05-29T15:27:02.553431Z node 31 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1719: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1002 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-05-29T15:27:02.553773Z node 31 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:652: Send tablet strongly msg operationId: 1002:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1002 msg type: 269090816 2025-05-29T15:27:02.553799Z node 31 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1751: TOperation RegisterRelationByTabletId, TxId: 1002, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1002 at step: 5000003 FAKE_COORDINATOR: advance: minStep5000003 State->FrontStep: 5000002 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1002 at step: 5000003 2025-05-29T15:27:02.553881Z node 31 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:676: TTxOperationPlanStep Execute, stepId: 5000003, transactions count in step: 1, at schemeshard: 72057594046678944 2025-05-29T15:27:02.553903Z node 31 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:680: TTxOperationPlanStep Execute, message: Transactions { TxId: 1002 Coordinator: 72057594046316545 AckTo { RawX1: 132 RawX2: 133143988331 } } Step: 5000003 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-05-29T15:27:02.553909Z node 31 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1002:0, at tablet# 72057594046678944 2025-05-29T15:27:02.553977Z node 31 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1002:0 128 -> 240 2025-05-29T15:27:02.553985Z node 31 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1002:0, at tablet# 72057594046678944 2025-05-29T15:27:02.554008Z node 31 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-05-29T15:27:02.554024Z node 31 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:402: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1002 2025-05-29T15:27:02.554427Z node 31 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:27:02.554437Z node 31 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1002, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-29T15:27:02.554473Z node 31 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:27:02.554478Z node 31 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [31:206:2207], at schemeshard: 72057594046678944, txId: 1002, path id: 1 2025-05-29T15:27:02.554552Z node 31 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1002:0, at schemeshard: 72057594046678944 2025-05-29T15:27:02.554560Z node 31 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:482: [72057594046678944] TDone opId# 1002:0 ProgressState 2025-05-29T15:27:02.554571Z node 31 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:906: Part operation is done id#1002:0 progress is 1/1 2025-05-29T15:27:02.554576Z node 31 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 1002 ready parts: 1/1 2025-05-29T15:27:02.554581Z node 31 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:906: Part operation is done id#1002:0 progress is 1/1 2025-05-29T15:27:02.554584Z node 31 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 1002 ready parts: 1/1 2025-05-29T15:27:02.554589Z node 31 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1606: TOperation IsReadyToNotify, TxId: 1002, ready parts: 1/1, is published: false 2025-05-29T15:27:02.554594Z node 31 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 1002 ready parts: 1/1 2025-05-29T15:27:02.554598Z node 31 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:973: Operation and all the parts is done, operation id: 1002:0 2025-05-29T15:27:02.554602Z node 31 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5174: RemoveTx for txid 1002:0 2025-05-29T15:27:02.554616Z node 31 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2025-05-29T15:27:02.554622Z node 31 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:982: Publication still in progress, tx: 1002, publications: 1, subscribers: 1 2025-05-29T15:27:02.554626Z node 31 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:989: Publication details: tx: 1002, [OwnerId: 72057594046678944, LocalPathId: 1], 6 2025-05-29T15:27:02.554701Z node 31 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:5834: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 6 PathOwnerId: 72057594046678944, cookie: 1002 2025-05-29T15:27:02.554719Z node 31 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 6 PathOwnerId: 72057594046678944, cookie: 1002 2025-05-29T15:27:02.554724Z node 31 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1002 2025-05-29T15:27:02.554729Z node 31 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 1002, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 6 2025-05-29T15:27:02.554734Z node 31 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-05-29T15:27:02.554770Z node 31 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 1002, subscribers: 1 2025-05-29T15:27:02.554776Z node 31 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:212: TTxAckPublishToSchemeBoard Notify send TEvNotifyTxCompletionResult, at schemeshard: 72057594046678944, to actorId: [31:302:2292] 2025-05-29T15:27:02.555420Z node 31 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1002 2025-05-29T15:27:02.555445Z node 31 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:227: tests -- TTxNotificationSubscriber for txId 1002: got EvNotifyTxCompletionResult 2025-05-29T15:27:02.555452Z node 31 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:236: tests -- TTxNotificationSubscriber for txId 1002: satisfy waiter [31:303:2293] TestWaitNotification: OK eventTxId 1002 2025-05-29T15:27:02.555555Z node 31 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-05-29T15:27:02.555591Z node 31 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot" took 45us result status StatusSuccess 2025-05-29T15:27:02.555702Z node 31 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot" PathDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 6 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 6 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 3 SubDomainVersion: 2 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: "DirA" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1000 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 2 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } StoragePools { Name: "name_USER_0_kind_hdd-1" Kind: "hdd-1" } StoragePools { Name: "name_USER_0_kind_hdd-2" Kind: "hdd-2" } PathsInside: 1 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/MyRoot" } } } PathId: 1 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 |68.5%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/datashard/ut_read_table/ydb-core-tx-datashard-ut_read_table |68.5%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_read_table/ydb-core-tx-datashard-ut_read_table |68.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_read_table/ydb-core-tx-datashard-ut_read_table >> TTxDataShardMiniKQL::CrossShard_5_AllToAll >> TTxDataShardMiniKQL::MemoryUsageImmediateSmallTx >> IncrementalBackup::ComplexRestoreBackupCollection-WithIncremental [FAIL] >> TBsProxyFaultToleranceTest::CheckGetHardenedErasureBlock42Count6Idx0 [GOOD] >> IncrementalBackup::E2EBackupCollection [FAIL] >> TTxDataShardMiniKQL::WriteAndReadMultipleShards [GOOD] >> TTxDataShardMiniKQL::WriteAndReadMany >> TTxDataShardMiniKQL::MemoryUsageImmediateSmallTx [GOOD] >> TTxDataShardMiniKQL::MemoryUsageImmediateMediumTx ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/dsproxy/ut_ftol/unittest >> TBsProxyFaultToleranceTest::CheckGetHardenedErasureBlock42Count6Idx0 [GOOD] Test command err: iteration# 0 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 6 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 12 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 18 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 24 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 30 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 36 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 42 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 48 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 54 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 60 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 66 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 72 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 78 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 84 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 90 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 96 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 102 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 108 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 114 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 120 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 126 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 132 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 138 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 144 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 150 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 156 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 162 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 168 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 174 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 180 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 186 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 192 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 198 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 204 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 210 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 216 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 222 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 228 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 234 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 240 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 246 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 252 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 258 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 264 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 270 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 276 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 282 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 288 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 294 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 300 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 306 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 312 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 318 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 324 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 330 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 336 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 342 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 348 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 354 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 360 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 366 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 372 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 378 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 384 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 390 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 396 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 402 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 408 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 414 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 420 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 426 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 432 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 438 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 444 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 450 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 456 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 462 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 468 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 474 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 480 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 486 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 492 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 498 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 504 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 510 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 516 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 522 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 528 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 534 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 540 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 546 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 552 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 558 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 564 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 570 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 576 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 582 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 588 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 594 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 600 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 606 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 612 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 618 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 624 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 630 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 636 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 642 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 648 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 654 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 660 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 666 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 672 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 678 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 684 BlobsWritten# 2041 blobsWrittenFul ... blobsUnwritten# 1218 iteration# 1368 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1374 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1380 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1386 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1392 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1398 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1404 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1410 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1416 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1422 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1428 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1434 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1440 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1446 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1452 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1458 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1464 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1470 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1476 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1482 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1488 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1494 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1500 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1506 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1512 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1518 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1524 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1530 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1536 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1542 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1548 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1554 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1560 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1566 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1572 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1578 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1584 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1590 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1596 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1602 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1608 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1614 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1620 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1626 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1632 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1638 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1644 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1650 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1656 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1662 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1668 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1674 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1680 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1686 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1692 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1698 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1704 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1710 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1716 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1722 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1728 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1734 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1740 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1746 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1752 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1758 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1764 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1770 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1776 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1782 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1788 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1794 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1800 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1806 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1812 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1818 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1824 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1830 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1836 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1842 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1848 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1854 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1860 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1866 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1872 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1878 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1884 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1890 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1896 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1902 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1908 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1914 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1920 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1926 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1932 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1938 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1944 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1950 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1956 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1962 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1968 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1974 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1980 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1986 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1992 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1998 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 2004 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 2010 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 2016 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 2022 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 2028 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 2034 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 2040 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 |68.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/replication/controller/ut_assign_tx_id/unittest >> TTxDataShardMiniKQL::MemoryUsageImmediateMediumTx [GOOD] >> TTxDataShardMiniKQL::MemoryUsageMultiShard |68.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/replication/controller/ut_assign_tx_id/unittest |68.5%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/kesus/tablet/ut/ydb-core-kesus-tablet-ut |68.5%| [LD] {RESULT} $(B)/ydb/core/kesus/tablet/ut/ydb-core-kesus-tablet-ut |68.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kesus/tablet/ut/ydb-core-kesus-tablet-ut >> TTxDataShardMiniKQL::MemoryUsageMultiShard [GOOD] >> BasicUsage::WriteSessionCloseWaitsForWrites >> ColumnStatistics::CountMinSketchServerlessStatistics >> TTablesWithReboots::CreateTableWithReboots [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/datashard/ut_minikql/unittest >> TTxDataShardMiniKQL::MemoryUsageMultiShard [GOOD] Test command err: 2025-05-29T15:27:04.488775Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3097: StateInit, received event# 268828672, Sender [1:110:2140], Recipient [1:130:2153]: NKikimr::TEvTablet::TEvBoot 2025-05-29T15:27:04.519959Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7309: Cannot subscribe to console configs 2025-05-29T15:27:04.519982Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:27:04.520530Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3097: StateInit, received event# 268828673, Sender [1:110:2140], Recipient [1:130:2153]: NKikimr::TEvTablet::TEvRestored 2025-05-29T15:27:04.520622Z node 1 :TX_DATASHARD INFO: datashard.cpp:373: TDataShard::OnActivateExecutor: tablet 9437184 actor [1:130:2153] 2025-05-29T15:27:04.520674Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:630: TxInitSchema.Execute 2025-05-29T15:27:04.521652Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3110: StateInactive, received event# 268828684, Sender [1:110:2140], Recipient [1:130:2153]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-05-29T15:27:04.563296Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:696: TxInitSchema.Complete 2025-05-29T15:27:04.563396Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:48: TDataShard::TTxInit::Execute 2025-05-29T15:27:04.563565Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1315: LoadChangeRecords: QueueSize: 0, at tablet: 9437184 2025-05-29T15:27:04.563575Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1371: LoadLockChangeRecords at tablet: 9437184 2025-05-29T15:27:04.563583Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1420: LoadChangeRecordCommits at tablet: 9437184 2025-05-29T15:27:04.563643Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:92: TDataShard::TTxInit::Complete 2025-05-29T15:27:04.563657Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:100: TDataShard::TTxInitRestored::Execute 2025-05-29T15:27:04.563668Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:111: DataShard 9437184 persisting started state actor id [1:197:2153] in generation 2 2025-05-29T15:27:04.598115Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:120: TDataShard::TTxInitRestored::Complete 2025-05-29T15:27:04.605501Z node 1 :TX_DATASHARD INFO: datashard.cpp:417: Switched to work state WaitScheme tabletId 9437184 2025-05-29T15:27:04.605572Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:457: 9437184 not sending time cast registration request in state WaitScheme: missing processing params 2025-05-29T15:27:04.605592Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1250: Change sender created: at tablet: 9437184, actorId: [1:215:2212] 2025-05-29T15:27:04.605598Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1255: Trying to activate change sender: at tablet: 9437184 2025-05-29T15:27:04.605604Z node 1 :TX_DATASHARD INFO: datashard.cpp:1272: Cannot activate change sender: at tablet: 9437184, state: WaitScheme 2025-05-29T15:27:04.605608Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-05-29T15:27:04.605652Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3129: StateWork, received event# 2146435072, Sender [1:130:2153], Recipient [1:130:2153]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-05-29T15:27:04.605668Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3154: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-05-29T15:27:04.605729Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 9437184 2025-05-29T15:27:04.605748Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 9437184 2025-05-29T15:27:04.605758Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 9437184 2025-05-29T15:27:04.605763Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 0 2025-05-29T15:27:04.605769Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:385: Check unit PlanQueue at 9437184 2025-05-29T15:27:04.605774Z node 1 :TX_DATASHARD TRACE: plan_queue_unit.cpp:59: TPlanQueueUnit at 9437184 has no attached operations 2025-05-29T15:27:04.605778Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:403: Unit PlanQueue has no ready operations at 9437184 2025-05-29T15:27:04.605783Z node 1 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 9437184 TxInFly 0 2025-05-29T15:27:04.605788Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 9437184 2025-05-29T15:27:04.605799Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3129: StateWork, received event# 269877761, Sender [1:211:2209], Recipient [1:130:2153]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-05-29T15:27:04.605804Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3165: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-05-29T15:27:04.605809Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3710: Server connected at leader tablet# 9437184, clientId# [1:209:2208], serverId# [1:211:2209], sessionId# [0:0:0] 2025-05-29T15:27:04.606294Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3129: StateWork, received event# 269549568, Sender [1:100:2134], Recipient [1:130:2153]: NKikimrTxDataShard.TEvProposeTransaction TxKind: TX_KIND_SCHEME SourceDeprecated { RawX1: 100 RawX2: 4294969430 } TxBody: "\nI\n\006table1\020\r\032\t\n\003key\030\002 \"\032\014\n\005value\030\200$ 8\032\n\n\004uint\030\002 9(\":\010Z\006\010\000\030\000(\000J\014/Root/table1" TxId: 1 ExecLevel: 0 Flags: 0 SchemeShardId: 4200 ProcessingParams { } 2025-05-29T15:27:04.606307Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3135: StateWork, processing event TEvDataShard::TEvProposeTransaction 2025-05-29T15:27:04.606316Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 9437184 2025-05-29T15:27:04.606342Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1827: Trying to execute [0:1] at 9437184 on unit CheckSchemeTx 2025-05-29T15:27:04.606351Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:132: Propose scheme transaction at tablet 9437184 txId 1 ssId 4200 seqNo 0:0 2025-05-29T15:27:04.606359Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:220: Prepared scheme transaction txId 1 at tablet 9437184 2025-05-29T15:27:04.606383Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1862: Execution status for [0:1] at 9437184 is ExecutedNoMoreRestarts 2025-05-29T15:27:04.606388Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1910: Advance execution plan for [0:1] at 9437184 executing on unit CheckSchemeTx 2025-05-29T15:27:04.606393Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1916: Add [0:1] at 9437184 to execution unit StoreSchemeTx 2025-05-29T15:27:04.606398Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1827: Trying to execute [0:1] at 9437184 on unit StoreSchemeTx 2025-05-29T15:27:04.606459Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1862: Execution status for [0:1] at 9437184 is DelayCompleteNoMoreRestarts 2025-05-29T15:27:04.606465Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1910: Advance execution plan for [0:1] at 9437184 executing on unit StoreSchemeTx 2025-05-29T15:27:04.606469Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1916: Add [0:1] at 9437184 to execution unit FinishPropose 2025-05-29T15:27:04.606473Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1827: Trying to execute [0:1] at 9437184 on unit FinishPropose 2025-05-29T15:27:04.606482Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1862: Execution status for [0:1] at 9437184 is DelayComplete 2025-05-29T15:27:04.606486Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1910: Advance execution plan for [0:1] at 9437184 executing on unit FinishPropose 2025-05-29T15:27:04.606490Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1916: Add [0:1] at 9437184 to execution unit WaitForPlan 2025-05-29T15:27:04.606494Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1827: Trying to execute [0:1] at 9437184 on unit WaitForPlan 2025-05-29T15:27:04.606499Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1832: Operation [0:1] at 9437184 is not ready to execute on unit WaitForPlan 2025-05-29T15:27:04.622992Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 9437184 2025-05-29T15:27:04.623018Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1933: Complete execution for [0:1] at 9437184 on unit StoreSchemeTx 2025-05-29T15:27:04.623026Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1933: Complete execution for [0:1] at 9437184 on unit FinishPropose 2025-05-29T15:27:04.623039Z node 1 :TX_DATASHARD TRACE: finish_propose_unit.cpp:167: Propose transaction complete txid 1 at tablet 9437184 send to client, exec latency: 0 ms, propose latency: 1 ms, status: PREPARED 2025-05-29T15:27:04.623065Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:467: 9437184 not sending time cast registration request in state WaitScheme 2025-05-29T15:27:04.623185Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3129: StateWork, received event# 269877761, Sender [1:221:2218], Recipient [1:130:2153]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-05-29T15:27:04.623196Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3165: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-05-29T15:27:04.623205Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3710: Server connected at leader tablet# 9437184, clientId# [1:220:2217], serverId# [1:221:2218], sessionId# [0:0:0] 2025-05-29T15:27:04.623226Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3129: StateWork, received event# 269287424, Sender [1:100:2134], Recipient [1:130:2153]: {TEvPlanStep step# 2 MediatorId# 0 TabletID 9437184} 2025-05-29T15:27:04.623232Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3147: StateWork, processing event TEvTxProcessing::TEvPlanStep 2025-05-29T15:27:04.623273Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1790: Trying to execute [2:1] at 9437184 on unit WaitForPlan 2025-05-29T15:27:04.623282Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1805: Execution status for [2:1] at 9437184 is Executed 2025-05-29T15:27:04.623286Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1910: Advance execution plan for [2:1] at 9437184 executing on unit WaitForPlan 2025-05-29T15:27:04.623291Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1916: Add [2:1] at 9437184 to execution unit PlanQueue 2025-05-29T15:27:04.624086Z node 1 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:69: Planned transaction txId 1 at step 2 at tablet 9437184 { Transactions { TxId: 1 AckTo { RawX1: 100 RawX2: 4294969430 } } Step: 2 MediatorID: 0 TabletID: 9437184 } 2025-05-29T15:27:04.624097Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-05-29T15:27:04.624144Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3129: StateWork, received event# 2146435072, Sender [1:130:2153], Recipient [1:130:2153]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-05-29T15:27:04.624150Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3154: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-05-29T15:27:04.624159Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 9437184 2025-05-29T15:27:04.624166Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 1 2025-05-29T15:27:04.624171Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:327: Check candidate unit PlanQueue at 9437184 2025-05-29T15:27:04.624178Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:333: Found ready operation [2:1] in PlanQueue unit at 9437184 2025-05-29T15:27:04.624184Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1827: Trying to execute [2:1] at 9437184 on unit PlanQueue 2025-05-29T15:27:04. ... imr::NDataShard::TDataShard::TTxProgressTransaction} at tablet 9437185 (3 by [3:368:2312]) to queue queue_transaction 2025-05-29T15:27:06.168164Z node 3 :RESOURCE_BROKER DEBUG: resource_broker.cpp:711: Updated planned resource usage for queue queue_transaction from 16.936776 to 33.873553 (insert task Tx{19, NKikimr::NDataShard::TDataShard::TTxProgressTransaction} at tablet 9437185 (3 by [3:368:2312])) 2025-05-29T15:27:06.168173Z node 3 :TABLET_EXECUTOR DEBUG: Leader{9437185:3:9} Tx{19, NKikimr::NDataShard::TDataShard::TTxProgressTransaction} acquired dyn mem Res{3 96990534b}, Memory{0 dyn 96990534} 2025-05-29T15:27:06.168180Z node 3 :TABLET_EXECUTOR DEBUG: Leader{9437184:3:9} Tx{19, NKikimr::NDataShard::TDataShard::TTxProgressTransaction} acquired dyn mem Res{3 96990534b}, Memory{0 dyn 96990534} 2025-05-29T15:27:06.168188Z node 3 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 9437185 2025-05-29T15:27:06.168192Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1827: Trying to execute [6:5] at 9437185 on unit ExecuteDataTx 2025-05-29T15:27:06.168301Z node 3 :TX_DATASHARD DEBUG: datashard_active_transaction.cpp:661: tx 5 at 9437185 restored its data 2025-05-29T15:27:06.237357Z node 3 :TX_DATASHARD TRACE: execute_data_tx_unit.cpp:306: Executed operation [6:5] at tablet 9437185 with status COMPLETE 2025-05-29T15:27:06.237410Z node 3 :TX_DATASHARD TRACE: execute_data_tx_unit.cpp:312: Datashard execution counters for [6:5] at 9437185: {NSelectRow: 0, NSelectRange: 0, NUpdateRow: 2, NEraseRow: 0, SelectRowRows: 0, SelectRowBytes: 0, SelectRangeRows: 0, SelectRangeBytes: 0, UpdateRowBytes: 22, EraseRowBytes: 0, SelectRangeDeletedRowSkips: 0, InvisibleRowSkips: 0} 2025-05-29T15:27:06.237439Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1862: Execution status for [6:5] at 9437185 is ExecutedNoMoreRestarts 2025-05-29T15:27:06.237448Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1910: Advance execution plan for [6:5] at 9437185 executing on unit ExecuteDataTx 2025-05-29T15:27:06.237456Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1916: Add [6:5] at 9437185 to execution unit CompleteOperation 2025-05-29T15:27:06.237462Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1827: Trying to execute [6:5] at 9437185 on unit CompleteOperation 2025-05-29T15:27:06.237565Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1862: Execution status for [6:5] at 9437185 is DelayComplete 2025-05-29T15:27:06.237571Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1910: Advance execution plan for [6:5] at 9437185 executing on unit CompleteOperation 2025-05-29T15:27:06.237575Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1916: Add [6:5] at 9437185 to execution unit CompletedOperations 2025-05-29T15:27:06.237580Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1827: Trying to execute [6:5] at 9437185 on unit CompletedOperations 2025-05-29T15:27:06.237588Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1862: Execution status for [6:5] at 9437185 is Executed 2025-05-29T15:27:06.237592Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1910: Advance execution plan for [6:5] at 9437185 executing on unit CompletedOperations 2025-05-29T15:27:06.237596Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1922: Execution plan for [6:5] at 9437185 has finished 2025-05-29T15:27:06.237605Z node 3 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 9437185 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-05-29T15:27:06.237610Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:327: Check candidate unit PlanQueue at 9437185 2025-05-29T15:27:06.237616Z node 3 :TX_DATASHARD TRACE: plan_queue_unit.cpp:59: TPlanQueueUnit at 9437185 has no attached operations 2025-05-29T15:27:06.237621Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:341: Unit PlanQueue has no ready operations at 9437185 2025-05-29T15:27:06.237661Z node 3 :TABLET_EXECUTOR DEBUG: Leader{9437185:3:9} Tx{19, NKikimr::NDataShard::TDataShard::TTxProgressTransaction} hope 5 -> done Change{16, redo 636b alter 0b annex 0, ~{ 1001, 1, 3, 4, 12, 7, 8, 5 } -{ }, 0 gb} 2025-05-29T15:27:06.237677Z node 3 :TABLET_EXECUTOR DEBUG: Leader{9437185:3:9} Tx{19, NKikimr::NDataShard::TDataShard::TTxProgressTransaction} release Res{3 96990534b}, Memory{0 dyn 0} 2025-05-29T15:27:06.237746Z node 3 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 9437184 2025-05-29T15:27:06.237752Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1827: Trying to execute [6:5] at 9437184 on unit ExecuteDataTx 2025-05-29T15:27:06.238114Z node 3 :TX_DATASHARD DEBUG: datashard_active_transaction.cpp:661: tx 5 at 9437184 restored its data 2025-05-29T15:27:06.299591Z node 3 :TX_DATASHARD TRACE: execute_data_tx_unit.cpp:306: Executed operation [6:5] at tablet 9437184 with status COMPLETE 2025-05-29T15:27:06.299639Z node 3 :TX_DATASHARD TRACE: execute_data_tx_unit.cpp:312: Datashard execution counters for [6:5] at 9437184: {NSelectRow: 0, NSelectRange: 0, NUpdateRow: 2, NEraseRow: 0, SelectRowRows: 0, SelectRowBytes: 0, SelectRangeRows: 0, SelectRangeBytes: 0, UpdateRowBytes: 22, EraseRowBytes: 0, SelectRangeDeletedRowSkips: 0, InvisibleRowSkips: 0} 2025-05-29T15:27:06.299669Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1862: Execution status for [6:5] at 9437184 is ExecutedNoMoreRestarts 2025-05-29T15:27:06.299683Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1910: Advance execution plan for [6:5] at 9437184 executing on unit ExecuteDataTx 2025-05-29T15:27:06.299691Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1916: Add [6:5] at 9437184 to execution unit CompleteOperation 2025-05-29T15:27:06.299697Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1827: Trying to execute [6:5] at 9437184 on unit CompleteOperation 2025-05-29T15:27:06.299821Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1862: Execution status for [6:5] at 9437184 is DelayComplete 2025-05-29T15:27:06.299827Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1910: Advance execution plan for [6:5] at 9437184 executing on unit CompleteOperation 2025-05-29T15:27:06.299831Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1916: Add [6:5] at 9437184 to execution unit CompletedOperations 2025-05-29T15:27:06.299836Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1827: Trying to execute [6:5] at 9437184 on unit CompletedOperations 2025-05-29T15:27:06.299843Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1862: Execution status for [6:5] at 9437184 is Executed 2025-05-29T15:27:06.299847Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1910: Advance execution plan for [6:5] at 9437184 executing on unit CompletedOperations 2025-05-29T15:27:06.299851Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1922: Execution plan for [6:5] at 9437184 has finished 2025-05-29T15:27:06.299860Z node 3 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 9437184 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-05-29T15:27:06.299865Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:327: Check candidate unit PlanQueue at 9437184 2025-05-29T15:27:06.299871Z node 3 :TX_DATASHARD TRACE: plan_queue_unit.cpp:59: TPlanQueueUnit at 9437184 has no attached operations 2025-05-29T15:27:06.299876Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:341: Unit PlanQueue has no ready operations at 9437184 2025-05-29T15:27:06.299917Z node 3 :TABLET_EXECUTOR DEBUG: Leader{9437184:3:9} Tx{19, NKikimr::NDataShard::TDataShard::TTxProgressTransaction} hope 5 -> done Change{16, redo 636b alter 0b annex 0, ~{ 1001, 1, 3, 4, 12, 7, 8, 5 } -{ }, 0 gb} 2025-05-29T15:27:06.299933Z node 3 :TABLET_EXECUTOR DEBUG: Leader{9437184:3:9} Tx{19, NKikimr::NDataShard::TDataShard::TTxProgressTransaction} release Res{3 96990534b}, Memory{0 dyn 0} 2025-05-29T15:27:06.300012Z node 3 :RESOURCE_BROKER DEBUG: resource_broker.cpp:528: Finish task Tx{19, NKikimr::NDataShard::TDataShard::TTxProgressTransaction} at tablet 9437185 (3 by [3:368:2312]) (release resources {0, 96990534}) 2025-05-29T15:27:06.300032Z node 3 :RESOURCE_BROKER DEBUG: resource_broker.cpp:577: Updated planned resource usage for queue queue_transaction from 33.873553 to 16.936776 (remove task Tx{19, NKikimr::NDataShard::TDataShard::TTxProgressTransaction} at tablet 9437185 (3 by [3:368:2312])) 2025-05-29T15:27:06.300066Z node 3 :RESOURCE_BROKER DEBUG: resource_broker.cpp:528: Finish task Tx{19, NKikimr::NDataShard::TDataShard::TTxProgressTransaction} at tablet 9437184 (3 by [3:256:2226]) (release resources {0, 96990534}) 2025-05-29T15:27:06.300072Z node 3 :RESOURCE_BROKER DEBUG: resource_broker.cpp:577: Updated planned resource usage for queue queue_transaction from 16.936776 to 0.000000 (remove task Tx{19, NKikimr::NDataShard::TDataShard::TTxProgressTransaction} at tablet 9437184 (3 by [3:256:2226])) 2025-05-29T15:27:06.317727Z node 3 :TABLET_EXECUTOR DEBUG: Leader{9437185:3:10} commited cookie 1 for step 9 2025-05-29T15:27:06.317773Z node 3 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 9437185 2025-05-29T15:27:06.317787Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1933: Complete execution for [6:5] at 9437185 on unit CompleteOperation 2025-05-29T15:27:06.317816Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:801: Complete [6 : 5] from 9437185 at tablet 9437185 send result to client [3:100:2134], exec latency: 2 ms, propose latency: 4 ms 2025-05-29T15:27:06.317857Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:563: Send delayed Ack RS Ack at 9437185 {TEvReadSet step# 6 txid# 5 TabletSource# 9437186 TabletDest# 9437185 SetTabletConsumer# 9437185 Flags# 0 Seqno# 2} 2025-05-29T15:27:06.317868Z node 3 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437185 2025-05-29T15:27:06.317970Z node 3 :TABLET_EXECUTOR DEBUG: Leader{9437184:3:10} commited cookie 1 for step 9 2025-05-29T15:27:06.317978Z node 3 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 9437184 2025-05-29T15:27:06.317984Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1933: Complete execution for [6:5] at 9437184 on unit CompleteOperation 2025-05-29T15:27:06.317993Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:801: Complete [6 : 5] from 9437184 at tablet 9437184 send result to client [3:100:2134], exec latency: 2 ms, propose latency: 4 ms 2025-05-29T15:27:06.318000Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:563: Send delayed Ack RS Ack at 9437184 {TEvReadSet step# 6 txid# 5 TabletSource# 9437186 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 1} 2025-05-29T15:27:06.318005Z node 3 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-05-29T15:27:06.318057Z node 3 :TX_DATASHARD TRACE: datashard_impl.h:3129: StateWork, received event# 269287938, Sender [3:346:2312], Recipient [3:458:2398]: {TEvReadSet step# 6 txid# 5 TabletSource# 9437186 TabletDest# 9437185 SetTabletConsumer# 9437185 Flags# 0 Seqno# 2} 2025-05-29T15:27:06.318067Z node 3 :TX_DATASHARD TRACE: datashard_impl.h:3149: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-05-29T15:27:06.318074Z node 3 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437186 source 9437186 dest 9437185 consumer 9437185 txId 5 2025-05-29T15:27:06.318092Z node 3 :TX_DATASHARD TRACE: datashard_impl.h:3129: StateWork, received event# 269287938, Sender [3:234:2226], Recipient [3:458:2398]: {TEvReadSet step# 6 txid# 5 TabletSource# 9437186 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 1} 2025-05-29T15:27:06.318096Z node 3 :TX_DATASHARD TRACE: datashard_impl.h:3149: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-05-29T15:27:06.318101Z node 3 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437186 source 9437186 dest 9437184 consumer 9437184 txId 5 |68.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/statistics/service/ut/unittest ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_base_reboots/unittest >> TTablesWithReboots::CreateTableWithReboots [GOOD] Test command err: ==== RunWithTabletReboots =========== RUN: Trace =========== Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2140] Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:116:2058] recipient: [1:110:2141] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:116:2058] recipient: [1:110:2141] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:117:2058] recipient: [1:111:2142] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:117:2058] recipient: [1:111:2142] Leader for TabletID 72057594046678944 is [1:126:2151] sender: [1:127:2058] recipient: [1:109:2140] Leader for TabletID 72057594046447617 is [1:130:2154] sender: [1:132:2058] recipient: [1:110:2141] Leader for TabletID 72057594046316545 is [1:133:2155] sender: [1:135:2058] recipient: [1:111:2142] 2025-05-29T15:26:45.501359Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7488: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-05-29T15:26:45.501390Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7516: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:26:45.501396Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7402: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2025-05-29T15:26:45.501402Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7418: OperationsProcessing config: using default configuration 2025-05-29T15:26:45.501408Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-05-29T15:26:45.501413Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-05-29T15:26:45.501421Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7548: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:26:45.501437Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:39: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-05-29T15:26:45.501547Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7619: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-05-29T15:26:45.501630Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-05-29T15:26:45.513675Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7651: Got new config: QueryServiceConfig { AllExternalDataSourcesAreAvailable: true } 2025-05-29T15:26:45.513695Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:26:45.513759Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7619: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# Leader for TabletID 72057594046447617 is [1:130:2154] sender: [1:175:2058] recipient: [1:15:2062] 2025-05-29T15:26:45.516223Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-05-29T15:26:45.516257Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-05-29T15:26:45.516300Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-05-29T15:26:45.519112Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-05-29T15:26:45.519194Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:445: Clear TempDirsState with owners number: 0 2025-05-29T15:26:45.519303Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1348: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-05-29T15:26:45.519565Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:32: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-05-29T15:26:45.520386Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:157: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:26:45.520432Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:84: [RootDataErasureManager] Stop 2025-05-29T15:26:45.520692Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:26:45.520702Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:26:45.520728Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-05-29T15:26:45.520734Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-29T15:26:45.520738Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:202: TTxServerlessStorageBilling.Complete 2025-05-29T15:26:45.520753Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6671: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:214:2058] recipient: [1:212:2212] IGNORE Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:214:2058] recipient: [1:212:2212] Leader for TabletID 72057594037968897 is [1:218:2216] sender: [1:219:2058] recipient: [1:212:2212] 2025-05-29T15:26:45.521960Z node 1 :HIVE INFO: tablet_helpers.cpp:1130: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:126:2151] sender: [1:239:2058] recipient: [1:15:2062] 2025-05-29T15:26:45.546061Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:372: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-05-29T15:26:45.546125Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:26:45.546181Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-05-29T15:26:45.546234Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:130: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-05-29T15:26:45.546247Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:26:45.547044Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:451: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-05-29T15:26:45.547070Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-05-29T15:26:45.547117Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:26:45.547126Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:313: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-05-29T15:26:45.547131Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:367: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-05-29T15:26:45.547136Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 2 -> 3 2025-05-29T15:26:45.547610Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:26:45.547628Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-05-29T15:26:45.547635Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 3 -> 128 2025-05-29T15:26:45.549031Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:26:45.549045Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:26:45.549051Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:26:45.549058Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1650: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-05-29T15:26:45.549792Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1719: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-05-29T15:26:45.550304Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:652: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-05-29T15:26:45.550337Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1751: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 Leader for TabletID 72057594046316545 is [1:133:2155] sender: [1:254:2058] recipient: [1:15:2062] FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-05-29T15:26:45.550509Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:676: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-05-29T15:26:45.550535Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:680: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 133 RawX2: 4294969451 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-05-29T15:26:45.550541Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:26:45.550594Z node 1 :FLAT_TX_SCHEMESHARD INFO: sch ... ToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 5 PathOwnerId: 72057594046678944, cookie: 1002 2025-05-29T15:27:06.494823Z node 72 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72057594046678944, txId: 1002 2025-05-29T15:27:06.494828Z node 72 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 1002, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 5 2025-05-29T15:27:06.494835Z node 72 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 FAKE_COORDINATOR: Erasing txId 1002 2025-05-29T15:27:06.494930Z node 72 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:5834: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 3 PathOwnerId: 72057594046678944, cookie: 1002 2025-05-29T15:27:06.494941Z node 72 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 3 PathOwnerId: 72057594046678944, cookie: 1002 2025-05-29T15:27:06.494946Z node 72 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72057594046678944, txId: 1002 2025-05-29T15:27:06.494951Z node 72 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 1002, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 3 2025-05-29T15:27:06.494955Z node 72 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 4 2025-05-29T15:27:06.494966Z node 72 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1606: TOperation IsReadyToNotify, TxId: 1002, ready parts: 0/1, is published: true 2025-05-29T15:27:06.496395Z node 72 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6285: Handle TEvProposeTransactionResult, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409546 Status: COMPLETE TxId: 1002 Step: 5000003 OrderId: 1002 ExecLatency: 1 ProposeLatency: 3 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409546 CpuTimeUsec: 243 } } CommitVersion { Step: 5000003 TxId: 1002 } 2025-05-29T15:27:06.496411Z node 72 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1764: TOperation FindRelatedPartByTabletId, TxId: 1002, tablet: 72075186233409546, partId: 0 2025-05-29T15:27:06.496434Z node 72 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:619: TTxOperationReply execute, operationId: 1002:0, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409546 Status: COMPLETE TxId: 1002 Step: 5000003 OrderId: 1002 ExecLatency: 1 ProposeLatency: 3 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409546 CpuTimeUsec: 243 } } CommitVersion { Step: 5000003 TxId: 1002 } 2025-05-29T15:27:06.496462Z node 72 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_part.cpp:108: HandleReply TEvDataShard::TEvProposeTransactionResult Ignore message: tablet# 72057594046678944, ev# TxKind: TX_KIND_SCHEME Origin: 72075186233409546 Status: COMPLETE TxId: 1002 Step: 5000003 OrderId: 1002 ExecLatency: 1 ProposeLatency: 3 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409546 CpuTimeUsec: 243 } } CommitVersion { Step: 5000003 TxId: 1002 } 2025-05-29T15:27:06.496695Z node 72 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5512: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 338 RawX2: 309237647636 } Origin: 72075186233409546 State: 2 TxId: 1002 Step: 0 Generation: 2 2025-05-29T15:27:06.496705Z node 72 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1764: TOperation FindRelatedPartByTabletId, TxId: 1002, tablet: 72075186233409546, partId: 0 2025-05-29T15:27:06.496722Z node 72 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:619: TTxOperationReply execute, operationId: 1002:0, at schemeshard: 72057594046678944, message: Source { RawX1: 338 RawX2: 309237647636 } Origin: 72075186233409546 State: 2 TxId: 1002 Step: 0 Generation: 2 2025-05-29T15:27:06.496731Z node 72 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:1005: NTableState::TProposedWaitParts operationId# 1002:0 HandleReply TEvSchemaChanged at tablet: 72057594046678944 2025-05-29T15:27:06.496741Z node 72 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:1009: NTableState::TProposedWaitParts operationId# 1002:0 HandleReply TEvSchemaChanged at tablet: 72057594046678944 message: Source { RawX1: 338 RawX2: 309237647636 } Origin: 72075186233409546 State: 2 TxId: 1002 Step: 0 Generation: 2 2025-05-29T15:27:06.496752Z node 72 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:655: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 1002:0, shardIdx: 72057594046678944:1, datashard: 72075186233409546, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2025-05-29T15:27:06.496756Z node 72 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:674: all shard schema changes has been received, operationId: 1002:0, at schemeshard: 72057594046678944 2025-05-29T15:27:06.496761Z node 72 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:686: send schema changes ack message, operation: 1002:0, datashard: 72075186233409546, at schemeshard: 72057594046678944 2025-05-29T15:27:06.496767Z node 72 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1002:0 129 -> 240 2025-05-29T15:27:06.497255Z node 72 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1002 2025-05-29T15:27:06.497275Z node 72 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1002 2025-05-29T15:27:06.497633Z node 72 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:647: TTxOperationReply complete, operationId: 1002:0, at schemeshard: 72057594046678944 2025-05-29T15:27:06.497670Z node 72 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:647: TTxOperationReply complete, operationId: 1002:0, at schemeshard: 72057594046678944 2025-05-29T15:27:06.497691Z node 72 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1002:0, at schemeshard: 72057594046678944 2025-05-29T15:27:06.497699Z node 72 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:482: [72057594046678944] TDone opId# 1002:0 ProgressState 2025-05-29T15:27:06.497713Z node 72 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:906: Part operation is done id#1002:0 progress is 1/1 2025-05-29T15:27:06.497717Z node 72 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 1002 ready parts: 1/1 2025-05-29T15:27:06.497723Z node 72 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:906: Part operation is done id#1002:0 progress is 1/1 2025-05-29T15:27:06.497727Z node 72 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 1002 ready parts: 1/1 2025-05-29T15:27:06.497731Z node 72 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1606: TOperation IsReadyToNotify, TxId: 1002, ready parts: 1/1, is published: true 2025-05-29T15:27:06.497746Z node 72 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1629: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [72:305:2295] message: TxId: 1002 2025-05-29T15:27:06.497754Z node 72 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 1002 ready parts: 1/1 2025-05-29T15:27:06.497762Z node 72 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:973: Operation and all the parts is done, operation id: 1002:0 2025-05-29T15:27:06.497767Z node 72 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5174: RemoveTx for txid 1002:0 2025-05-29T15:27:06.497800Z node 72 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 3 2025-05-29T15:27:06.498921Z node 72 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:227: tests -- TTxNotificationSubscriber for txId 1002: got EvNotifyTxCompletionResult 2025-05-29T15:27:06.498935Z node 72 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:236: tests -- TTxNotificationSubscriber for txId 1002: satisfy waiter [72:306:2296] TestWaitNotification: OK eventTxId 1002 2025-05-29T15:27:06.499062Z node 72 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/DirA/Table1" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-05-29T15:27:06.499113Z node 72 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/DirA/Table1" took 60us result status StatusSuccess 2025-05-29T15:27:06.499287Z node 72 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/DirA/Table1" PathDescription { Self { Name: "Table1" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 1002 CreateStep: 5000003 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "Table1" Columns { Name: "RowId" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "Value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "RowId" KeyColumnIds: 1 TableSchemaVersion: 1 IsBackup: false IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 2 PathsLimit: 10000 ShardsInside: 1 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> TBsProxyFaultToleranceTest::CheckTPutFaultToleranceTestErasureMirror3dc [GOOD] >> TTablesWithReboots::AlterTableConfigWithReboots [GOOD] >> TBsProxyFaultToleranceTest::CheckGetHardenedErasureBlock42Count6Idx2 [GOOD] >> BasicUsage::WriteSessionCloseWaitsForWrites [FAIL] >> BasicUsage::WriteSessionCloseIgnoresWrites |68.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/dsproxy/ut_ftol/unittest >> TBsProxyFaultToleranceTest::CheckTPutFaultToleranceTestErasureMirror3dc [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/dsproxy/ut_ftol/unittest >> TBsProxyFaultToleranceTest::CheckGetHardenedErasureBlock42Count6Idx2 [GOOD] Test command err: iteration# 2 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 8 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 14 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 20 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 26 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 32 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 38 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 44 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 50 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 56 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 62 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 68 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 74 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 80 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 86 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 92 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 98 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 104 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 110 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 116 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 122 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 128 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 134 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 140 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 146 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 152 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 158 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 164 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 170 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 176 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 182 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 188 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 194 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 200 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 206 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 212 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 218 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 224 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 230 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 236 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 242 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 248 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 254 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 260 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 266 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 272 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 278 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 284 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 290 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 296 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 302 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 308 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 314 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 320 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 326 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 332 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 338 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 344 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 350 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 356 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 362 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 368 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 374 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 380 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 386 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 392 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 398 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 404 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 410 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 416 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 422 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 428 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 434 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 440 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 446 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 452 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 458 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 464 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 470 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 476 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 482 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 488 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 494 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 500 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 506 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 512 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 518 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 524 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 530 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 536 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 542 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 548 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 554 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 560 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 566 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 572 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 578 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 584 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 590 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 596 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 602 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 608 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 614 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 620 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 626 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 632 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 638 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 644 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 650 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 656 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 662 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 668 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 674 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 680 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 686 BlobsWritten# 2041 blobsWrittenFul ... blobsUnwritten# 1218 iteration# 1364 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1370 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1376 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1382 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1388 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1394 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1400 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1406 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1412 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1418 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1424 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1430 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1436 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1442 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1448 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1454 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1460 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1466 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1472 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1478 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1484 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1490 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1496 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1502 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1508 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1514 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1520 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1526 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1532 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1538 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1544 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1550 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1556 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1562 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1568 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1574 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1580 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1586 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1592 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1598 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1604 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1610 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1616 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1622 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1628 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1634 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1640 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1646 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1652 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1658 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1664 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1670 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1676 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1682 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1688 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1694 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1700 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1706 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1712 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1718 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1724 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1730 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1736 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1742 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1748 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1754 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1760 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1766 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1772 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1778 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1784 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1790 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1796 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1802 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1808 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1814 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1820 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1826 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1832 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1838 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1844 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1850 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1856 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1862 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1868 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1874 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1880 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1886 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1892 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1898 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1904 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1910 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1916 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1922 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1928 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1934 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1940 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1946 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1952 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1958 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1964 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1970 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1976 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1982 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1988 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1994 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 2000 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 2006 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 2012 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 2018 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 2024 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 2030 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 2036 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 |68.5%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/schemeshard/ut_sysview/ydb-core-tx-schemeshard-ut_sysview |68.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_sysview/ydb-core-tx-schemeshard-ut_sysview |68.6%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_sysview/ydb-core-tx-schemeshard-ut_sysview ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/datashard/ut_incremental_backup/unittest >> IncrementalBackup::SimpleRestoreBackupCollection-WithIncremental [FAIL] Test command err: 2025-05-29T15:26:59.210626Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:102:2148], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-05-29T15:26:59.210665Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-05-29T15:26:59.210680Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/0012a4/r3tmp/tmpJxxV5D/pdisk_1.dat 2025-05-29T15:26:59.335200Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4877: StateWork, received event# 269877761, Sender [1:595:2520], Recipient [1:411:2405]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-05-29T15:26:59.335236Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4974: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-05-29T15:26:59.335254Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5753: Pipe server connected, at tablet: 72057594046644480 2025-05-29T15:26:59.335270Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4877: StateWork, received event# 271122432, Sender [1:592:2518], Recipient [1:411:2405]: {TEvModifySchemeTransaction txid# 1 TabletId# 72057594046644480} 2025-05-29T15:26:59.335274Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4888: StateWork, processing event TEvSchemeShard::TEvModifySchemeTransaction 2025-05-29T15:26:59.355028Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:372: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "Root" StoragePools { Name: "/Root:test" Kind: "test" } } } TxId: 1 TabletId: 72057594046644480 , at schemeshard: 72057594046644480 2025-05-29T15:26:59.355110Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //Root, opId: 1:0, at schemeshard: 72057594046644480 2025-05-29T15:26:59.355176Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 0 2025-05-29T15:26:59.355255Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:130: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-05-29T15:26:59.355268Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-05-29T15:26:59.355285Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:165: TSideEffects ApplyOnExecute at tablet# 72057594046644480 2025-05-29T15:26:59.355517Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:451: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046644480 PathId: 1, at schemeshard: 72057594046644480 2025-05-29T15:26:59.355540Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2025-05-29T15:26:59.355547Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:207: TSideEffects ApplyOnComplete at tablet# 72057594046644480 2025-05-29T15:26:59.355553Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:275: Activate send for 1:0 2025-05-29T15:26:59.355594Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4877: StateWork, received event# 2146435072, Sender [1:411:2405], Recipient [1:411:2405]: NKikimr::NSchemeShard::TEvPrivate::TEvProgressOperation 2025-05-29T15:26:59.355601Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4894: StateWork, processing event TEvPrivate::TEvProgressOperation 2025-05-29T15:26:59.355615Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046644480 2025-05-29T15:26:59.355624Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:313: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046644480 2025-05-29T15:26:59.355630Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:367: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-05-29T15:26:59.355636Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 2 -> 3 2025-05-29T15:26:59.355662Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:165: TSideEffects ApplyOnExecute at tablet# 72057594046644480 2025-05-29T15:26:59.355736Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:207: TSideEffects ApplyOnComplete at tablet# 72057594046644480 2025-05-29T15:26:59.355742Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:275: Activate send for 1:0 2025-05-29T15:26:59.355760Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4877: StateWork, received event# 2146435072, Sender [1:411:2405], Recipient [1:411:2405]: NKikimr::NSchemeShard::TEvPrivate::TEvProgressOperation 2025-05-29T15:26:59.355764Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4894: StateWork, processing event TEvPrivate::TEvProgressOperation 2025-05-29T15:26:59.355770Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046644480 2025-05-29T15:26:59.355775Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046644480 2025-05-29T15:26:59.355780Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 3 -> 128 2025-05-29T15:26:59.355789Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:165: TSideEffects ApplyOnExecute at tablet# 72057594046644480 2025-05-29T15:26:59.355831Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:207: TSideEffects ApplyOnComplete at tablet# 72057594046644480 2025-05-29T15:26:59.355836Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:275: Activate send for 1:0 2025-05-29T15:26:59.355849Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4877: StateWork, received event# 2146435072, Sender [1:411:2405], Recipient [1:411:2405]: NKikimr::NSchemeShard::TEvPrivate::TEvProgressOperation 2025-05-29T15:26:59.355853Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4894: StateWork, processing event TEvPrivate::TEvProgressOperation 2025-05-29T15:26:59.355858Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046644480 2025-05-29T15:26:59.355862Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046644480 2025-05-29T15:26:59.355868Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046644480 2025-05-29T15:26:59.355872Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:165: TSideEffects ApplyOnExecute at tablet# 72057594046644480 2025-05-29T15:26:59.355877Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1650: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-05-29T15:26:59.356577Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1719: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046644480 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-05-29T15:26:59.356723Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:207: TSideEffects ApplyOnComplete at tablet# 72057594046644480 2025-05-29T15:26:59.356734Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:652: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046644480 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-05-29T15:26:59.356787Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1751: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 2025-05-29T15:26:59.357055Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4877: StateWork, received event# 269877760, Sender [1:600:2525], Recipient [1:411:2405]: NKikimr::TEvTabletPipe::TEvClientConnected { TabletId: 72057594046316545 Status: OK ServerId: [1:602:2526] Leader: 1 Dead: 0 Generation: 2 VersionInfo: } 2025-05-29T15:26:59.357067Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4972: StateWork, processing event TEvTabletPipe::TEvClientConnected 2025-05-29T15:26:59.357074Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5703: Handle TEvClientConnected, tabletId: 72057594046316545, status: OK, at schemeshard: 72057594046644480 2025-05-29T15:26:59.357094Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4877: StateWork, received event# 269091328, Sender [1:407:2401], Recipient [1:411:2405]: NKikimrTx.TEvProposeTransactionStatus Status: 16 StepId: 500 TxId: 1 2025-05-29T15:26:59.357165Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4877: StateWork, received event# 269877761, Sender [1:604:2528], Recipient [1:411:2405]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-05-29T15:26:59.357171Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4974: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-05-29T15:26:59.357176Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5753: Pipe server connected, at tablet: 72057594046644480 2025-05-29T15:26:59.357191Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4877: StateWork, received event# 271124996, Sender [1:592:2518], Recipient [1:411:2405]: NKikimrScheme.TEvNotifyTxCompletion TxId: 1 2025-05-29T15:26:59.357195Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4890: StateWork, processing event TEvSchemeShard::TEvNotifyTxCompletion 2025-05-29T15:26:59.357209Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__notify.cpp:30: NotifyTxCompletion operation in-flight, txId: 1, at schemeshard: 72057594046644480 2025-05-29T15:26:59.357215Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1606: TOperation IsReadyToNotify, TxId: 1, ready parts: 0/1, is published: true 2025-05-29T15:26:59.357220Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__notify.cpp:131: NotifyTxCompletion transaction is registered, txId: 1, at schemeshard: 72057594046644480 2025-05-29T15:26:59.372616Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4877: StateWork, received event# 273285138, Sender [1:43:2090], Recipient [1:411:2405]: NKikimr::NConsole::TEvConsole::TEvConfigNotificationRequest { Config { QueryServiceConfig { AvailableExternalDataSources: "ObjectStorage" } } ItemKinds: 26 ItemKinds: 34 ItemKinds: 52 ItemKinds: 54 ItemKinds: 73 Local: true } 2025-05-29T15:26:59.372655Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7651: Got new config: QueryServiceConfig { AvailableExternalDataSources: "ObjectStorage" } 2025-05-29T15:26:59.372663Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-2 ... d: [OwnerId: 72057594046644480, LocalPathId: 10] Version: 2 } 2025-05-29T15:27:01.933643Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4961: StateWork, processing event NSchemeBoard::NSchemeshardEvents::TEvUpdateAck 2025-05-29T15:27:01.933650Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:5834: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 10 Version: 2 PathOwnerId: 72057594046644480, cookie: 281474976715659 2025-05-29T15:27:01.933658Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 10 Version: 2 PathOwnerId: 72057594046644480, cookie: 281474976715659 2025-05-29T15:27:01.933661Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976715659 2025-05-29T15:27:01.933666Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715659, pathId: [OwnerId: 72057594046644480, LocalPathId: 10], version: 2 2025-05-29T15:27:01.933670Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046644480, LocalPathId: 10] was 2 2025-05-29T15:27:01.933678Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976715659, subscribers: 1 2025-05-29T15:27:01.933683Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:212: TTxAckPublishToSchemeBoard Notify send TEvNotifyTxCompletionResult, at schemeshard: 72057594046644480, to actorId: [2:828:2665] 2025-05-29T15:27:01.933689Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:165: TSideEffects ApplyOnExecute at tablet# 72057594046644480 2025-05-29T15:27:01.933720Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046644480, cookie: 281474976715659 2025-05-29T15:27:01.933724Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:207: TSideEffects ApplyOnComplete at tablet# 72057594046644480 2025-05-29T15:27:01.933744Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046644480, cookie: 281474976715659 2025-05-29T15:27:01.933747Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:207: TSideEffects ApplyOnComplete at tablet# 72057594046644480 2025-05-29T15:27:01.933768Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046644480, cookie: 281474976715659 2025-05-29T15:27:01.933774Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:207: TSideEffects ApplyOnComplete at tablet# 72057594046644480 2025-05-29T15:27:01.933788Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:630: Send to actor: [2:828:2665] msg type: 271124998 msg: NKikimrScheme.TEvNotifyTxCompletionResult TxId: 281474976715659 at schemeshard: 72057594046644480 2025-05-29T15:27:01.933842Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [2:828:2665], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2025-05-29T15:27:01.933875Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4877: StateWork, received event# 269877764, Sender [2:857:2675], Recipient [2:410:2404]: NKikimr::TEvTabletPipe::TEvServerDisconnected 2025-05-29T15:27:01.933880Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4975: StateWork, processing event TEvTabletPipe::TEvServerDisconnected 2025-05-29T15:27:01.933884Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5801: Server pipe is reset, at schemeshard: 72057594046644480 2025-05-29T15:27:01.965665Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4877: StateWork, received event# 269877761, Sender [2:888:2705], Recipient [2:410:2404]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-05-29T15:27:01.965687Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4974: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-05-29T15:27:01.965692Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5753: Pipe server connected, at tablet: 72057594046644480 2025-05-29T15:27:01.965703Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4877: StateWork, received event# 271122432, Sender [2:884:2702], Recipient [2:410:2404]: {TEvModifySchemeTransaction txid# 281474976715660 TabletId# 72057594046644480} 2025-05-29T15:27:01.965706Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4888: StateWork, processing event TEvSchemeShard::TEvModifySchemeTransaction 2025-05-29T15:27:01.966365Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:372: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/Root/.metadata/workload_manager/pools" OperationType: ESchemeOpCreateResourcePool ModifyACL { Name: "default" DiffACL: "\n!\010\000\022\035\010\001\020\201\004\032\024all-users@well-known \003\n\031\010\000\022\025\010\001\020\201\004\032\014root@builtin \003" NewOwner: "metadata@system" } Internal: true CreateResourcePool { Name: "default" Properties { Properties { key: "concurrent_query_limit" value: "-1" } Properties { key: "database_load_cpu_threshold" value: "-1" } Properties { key: "query_cancel_after_seconds" value: "0" } Properties { key: "query_cpu_limit_percent_per_node" value: "-1" } Properties { key: "query_memory_limit_percent_per_node" value: "-1" } Properties { key: "queue_size" value: "-1" } Properties { key: "resource_weight" value: "-1" } Properties { key: "total_cpu_limit_percent_per_node" value: "-1" } } } } TxId: 281474976715660 TabletId: 72057594046644480 Owner: "metadata@system" UserToken: "***" PeerName: "" , at schemeshard: 72057594046644480 2025-05-29T15:27:01.966434Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_create_resource_pool.cpp:148: [72057594046644480] TCreateResourcePool Propose: opId# 281474976715660:0, path# /Root/.metadata/workload_manager/pools/default 2025-05-29T15:27:01.966460Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:130: IgniteOperation, opId: 281474976715660:1, propose status:StatusAlreadyExists, reason: Check failed: path: '/Root/.metadata/workload_manager/pools/default', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 10], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92, at schemeshard: 72057594046644480 2025-05-29T15:27:01.966511Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:165: TSideEffects ApplyOnExecute at tablet# 72057594046644480 2025-05-29T15:27:01.966673Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:451: TTxOperationPropose Complete, txId: 281474976715660, response: Status: StatusAlreadyExists Reason: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 10], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" TxId: 281474976715660 SchemeshardId: 72057594046644480 PathId: 10 PathCreateTxId: 281474976715659, at schemeshard: 72057594046644480 2025-05-29T15:27:01.966707Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 281474976715660, database: /Root, subject: metadata@system, status: StatusAlreadyExists, reason: Check failed: path: '/Root/.metadata/workload_manager/pools/default', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 10], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92, operation: CREATE RESOURCE POOL, path: default, set owner:metadata@system, add access: +(SR|DS):all-users@well-known, add access: +(SR|DS):root@builtin 2025-05-29T15:27:01.966715Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:207: TSideEffects ApplyOnComplete at tablet# 72057594046644480 2025-05-29T15:27:01.966780Z node 2 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [2:884:2702] txid# 281474976715660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 10], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-29T15:27:01.966829Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4877: StateWork, received event# 269877764, Sender [2:888:2705], Recipient [2:410:2404]: NKikimr::TEvTabletPipe::TEvServerDisconnected 2025-05-29T15:27:01.966833Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4975: StateWork, processing event TEvTabletPipe::TEvServerDisconnected 2025-05-29T15:27:01.966836Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5801: Server pipe is reset, at schemeshard: 72057594046644480 2025-05-29T15:27:01.975935Z node 2 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [2:893:2710], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:27:01.976573Z node 2 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=2&id=ZDcyY2FhOC04Y2FiMTc2OC1iODk4MTdkZS0xNmNkODRjYg==, ActorId: [2:819:2656], ActorState: ExecuteState, TraceId: 01jweaes5f6mvkkeyg7qa4w5de, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: assertion failed at ydb/core/tx/datashard/ut_common/datashard_ut_common.cpp:2091, void NKikimr::ExecSQL(Tests::TServer::TPtr, TActorId, const TString &, bool, Ydb::StatusIds::StatusCode): (response.GetYdbStatus() == code) failed: (INTERNAL_ERROR != SUCCESS) Response { QueryIssues { message: "Execution" issue_code: 1060 issues { message: "yql/essentials/ast/yql_expr.h:1874: index out of range" issue_code: 1 } } TxMeta { } } YdbStatus: INTERNAL_ERROR ConsumedRu: 1 , with diff: (INT|SUCC)E(RNAL_ERROR|SS) TBackTrace::Capture()+28 (0x13ACEA4C) NUnitTest::NPrivate::RaiseError(char const*, TBasicString> const&, bool)+137 (0x13C82379) NKikimr::ExecSQL(TIntrusivePtr>, NActors::TActorId, TBasicString> const&, bool, Ydb::StatusIds_StatusCode)+1300 (0x2630FFE4) NKikimr::NTestSuiteIncrementalBackup::TTestCaseSimpleRestoreBackupCollection::Execute_(NUnitTest::TTestContext&)+1983 (0x139C6EBF) NKikimr::NTestSuiteIncrementalBackup::TCurrentTest::Execute()::'lambda'()::operator()() const+71 (0x139AF5F7) NUnitTest::TTestBase::Run(std::__y1::function, TBasicString> const&, char const*, bool)+126 (0x13C8422E) NKikimr::NTestSuiteIncrementalBackup::TCurrentTest::Execute()+421 (0x139AEE55) NUnitTest::TTestFactory::Execute()+803 (0x13C849A3) NUnitTest::RunMain(int, char**)+3021 (0x13C9654D) ??+0 (0x7F9EF6EE1D90) __libc_start_main+128 (0x7F9EF6EE1E40) _start+41 (0x12A02029) >> HttpRequest::Probe >> TExportToS3Tests::ShouldRetryAtFinalStage [GOOD] |68.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/statistics/service/ut/unittest ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_base_reboots/unittest >> TTablesWithReboots::AlterTableConfigWithReboots [GOOD] Test command err: ==== RunWithTabletReboots =========== RUN: Trace =========== Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2140] Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:116:2058] recipient: [1:110:2141] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:116:2058] recipient: [1:110:2141] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:117:2058] recipient: [1:111:2142] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:117:2058] recipient: [1:111:2142] Leader for TabletID 72057594046678944 is [1:126:2151] sender: [1:127:2058] recipient: [1:109:2140] Leader for TabletID 72057594046447617 is [1:130:2154] sender: [1:132:2058] recipient: [1:110:2141] Leader for TabletID 72057594046316545 is [1:133:2155] sender: [1:135:2058] recipient: [1:111:2142] 2025-05-29T15:26:41.849974Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7488: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-05-29T15:26:41.849997Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7516: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:26:41.850003Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7402: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2025-05-29T15:26:41.850008Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7418: OperationsProcessing config: using default configuration 2025-05-29T15:26:41.850014Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-05-29T15:26:41.850018Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-05-29T15:26:41.850027Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7548: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:26:41.850041Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:39: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-05-29T15:26:41.850139Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7619: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-05-29T15:26:41.850203Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-05-29T15:26:41.861953Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7651: Got new config: QueryServiceConfig { AllExternalDataSourcesAreAvailable: true } 2025-05-29T15:26:41.861969Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:26:41.862046Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7619: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# Leader for TabletID 72057594046447617 is [1:130:2154] sender: [1:175:2058] recipient: [1:15:2062] 2025-05-29T15:26:41.864487Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-05-29T15:26:41.864509Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-05-29T15:26:41.864537Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-05-29T15:26:41.867082Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-05-29T15:26:41.867138Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:445: Clear TempDirsState with owners number: 0 2025-05-29T15:26:41.867237Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1348: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-05-29T15:26:41.867382Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:32: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-05-29T15:26:41.867847Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:157: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:26:41.867876Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:84: [RootDataErasureManager] Stop 2025-05-29T15:26:41.868046Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:26:41.868052Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:26:41.868073Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-05-29T15:26:41.868078Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-29T15:26:41.868083Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:202: TTxServerlessStorageBilling.Complete 2025-05-29T15:26:41.868095Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6671: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:214:2058] recipient: [1:212:2212] IGNORE Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:214:2058] recipient: [1:212:2212] Leader for TabletID 72057594037968897 is [1:218:2216] sender: [1:219:2058] recipient: [1:212:2212] 2025-05-29T15:26:41.869053Z node 1 :HIVE INFO: tablet_helpers.cpp:1130: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:126:2151] sender: [1:239:2058] recipient: [1:15:2062] 2025-05-29T15:26:41.884875Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:372: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-05-29T15:26:41.884934Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:26:41.885013Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-05-29T15:26:41.885064Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:130: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-05-29T15:26:41.885076Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:26:41.885900Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:451: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-05-29T15:26:41.885927Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-05-29T15:26:41.885972Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:26:41.885982Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:313: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-05-29T15:26:41.885988Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:367: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-05-29T15:26:41.885993Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 2 -> 3 2025-05-29T15:26:41.886493Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:26:41.886509Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-05-29T15:26:41.886516Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 3 -> 128 2025-05-29T15:26:41.887502Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:26:41.887515Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:26:41.887522Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:26:41.887529Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1650: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-05-29T15:26:41.888245Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1719: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-05-29T15:26:41.888735Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:652: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-05-29T15:26:41.888770Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1751: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 Leader for TabletID 72057594046316545 is [1:133:2155] sender: [1:254:2058] recipient: [1:15:2062] FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-05-29T15:26:41.888959Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:676: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-05-29T15:26:41.888986Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:680: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 133 RawX2: 4294969451 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-05-29T15:26:41.888993Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:26:41.889051Z node 1 :FLAT_TX_SCHEMESHARD INFO: sch ... d: 72075186233409546 Flags: 2 } ExecLevel: 0 TxId: 1004 MinStep: 5000005 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-05-29T15:27:07.582524Z node 86 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:652: Send tablet strongly msg operationId: 1004:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1004 msg type: 269090816 2025-05-29T15:27:07.582547Z node 86 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1751: TOperation RegisterRelationByTabletId, TxId: 1004, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1004 at step: 5000005 FAKE_COORDINATOR: advance: minStep5000005 State->FrontStep: 5000004 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1004 at step: 5000005 FAKE_COORDINATOR: Send Plan to tablet 72075186233409546 for txId: 1004 at step: 5000005 2025-05-29T15:27:07.582626Z node 86 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:676: TTxOperationPlanStep Execute, stepId: 5000005, transactions count in step: 1, at schemeshard: 72057594046678944 2025-05-29T15:27:07.582647Z node 86 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:680: TTxOperationPlanStep Execute, message: Transactions { TxId: 1004 Coordinator: 72057594046316545 AckTo { RawX1: 133 RawX2: 369367189612 } } Step: 5000005 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-05-29T15:27:07.582654Z node 86 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_alter_table.cpp:359: TAlterTable TPropose operationId# 1004:0 HandleReply TEvOperationPlan, operationId: 1004:0, stepId: 5000005, at schemeshard: 72057594046678944 2025-05-29T15:27:07.582722Z node 86 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1004:0 128 -> 129 2025-05-29T15:27:07.582757Z node 86 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 3 FAKE_COORDINATOR: advance: minStep5000005 State->FrontStep: 5000005 2025-05-29T15:27:07.583749Z node 86 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:27:07.583758Z node 86 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1004, path id: [OwnerId: 72057594046678944, LocalPathId: 3] 2025-05-29T15:27:07.583799Z node 86 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:27:07.583807Z node 86 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [86:206:2207], at schemeshard: 72057594046678944, txId: 1004, path id: 3 2025-05-29T15:27:07.584013Z node 86 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1004:0, at schemeshard: 72057594046678944 2025-05-29T15:27:07.584023Z node 86 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:1036: NTableState::TProposedWaitParts operationId# 1004:0 ProgressState at tablet: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1004 2025-05-29T15:27:07.584106Z node 86 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:5834: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 5 PathOwnerId: 72057594046678944, cookie: 1004 2025-05-29T15:27:07.584115Z node 86 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 5 PathOwnerId: 72057594046678944, cookie: 1004 2025-05-29T15:27:07.584118Z node 86 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72057594046678944, txId: 1004 2025-05-29T15:27:07.584122Z node 86 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 1004, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 5 2025-05-29T15:27:07.584125Z node 86 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 4 2025-05-29T15:27:07.584138Z node 86 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1606: TOperation IsReadyToNotify, TxId: 1004, ready parts: 0/1, is published: true 2025-05-29T15:27:07.584269Z node 86 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6285: Handle TEvProposeTransactionResult, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409546 Status: COMPLETE TxId: 1004 Step: 5000005 OrderId: 1004 ExecLatency: 0 ProposeLatency: 2 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409546 CpuTimeUsec: 191 } } CommitVersion { Step: 5000005 TxId: 1004 } 2025-05-29T15:27:07.584274Z node 86 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1764: TOperation FindRelatedPartByTabletId, TxId: 1004, tablet: 72075186233409546, partId: 0 2025-05-29T15:27:07.584287Z node 86 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:619: TTxOperationReply execute, operationId: 1004:0, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409546 Status: COMPLETE TxId: 1004 Step: 5000005 OrderId: 1004 ExecLatency: 0 ProposeLatency: 2 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409546 CpuTimeUsec: 191 } } CommitVersion { Step: 5000005 TxId: 1004 } 2025-05-29T15:27:07.584295Z node 86 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_part.cpp:108: HandleReply TEvDataShard::TEvProposeTransactionResult Ignore message: tablet# 72057594046678944, ev# TxKind: TX_KIND_SCHEME Origin: 72075186233409546 Status: COMPLETE TxId: 1004 Step: 5000005 OrderId: 1004 ExecLatency: 0 ProposeLatency: 2 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409546 CpuTimeUsec: 191 } } CommitVersion { Step: 5000005 TxId: 1004 } 2025-05-29T15:27:07.584349Z node 86 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5512: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 332 RawX2: 369367189774 } Origin: 72075186233409546 State: 2 TxId: 1004 Step: 0 Generation: 2 2025-05-29T15:27:07.584352Z node 86 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1764: TOperation FindRelatedPartByTabletId, TxId: 1004, tablet: 72075186233409546, partId: 0 2025-05-29T15:27:07.584360Z node 86 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:619: TTxOperationReply execute, operationId: 1004:0, at schemeshard: 72057594046678944, message: Source { RawX1: 332 RawX2: 369367189774 } Origin: 72075186233409546 State: 2 TxId: 1004 Step: 0 Generation: 2 2025-05-29T15:27:07.584364Z node 86 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:1005: NTableState::TProposedWaitParts operationId# 1004:0 HandleReply TEvSchemaChanged at tablet: 72057594046678944 2025-05-29T15:27:07.584369Z node 86 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:1009: NTableState::TProposedWaitParts operationId# 1004:0 HandleReply TEvSchemaChanged at tablet: 72057594046678944 message: Source { RawX1: 332 RawX2: 369367189774 } Origin: 72075186233409546 State: 2 TxId: 1004 Step: 0 Generation: 2 2025-05-29T15:27:07.584376Z node 86 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:655: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 1004:0, shardIdx: 72057594046678944:1, datashard: 72075186233409546, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2025-05-29T15:27:07.584379Z node 86 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:674: all shard schema changes has been received, operationId: 1004:0, at schemeshard: 72057594046678944 2025-05-29T15:27:07.584381Z node 86 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:686: send schema changes ack message, operation: 1004:0, datashard: 72075186233409546, at schemeshard: 72057594046678944 2025-05-29T15:27:07.584385Z node 86 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1004:0 129 -> 240 2025-05-29T15:27:07.584819Z node 86 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1004 2025-05-29T15:27:07.584878Z node 86 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:647: TTxOperationReply complete, operationId: 1004:0, at schemeshard: 72057594046678944 2025-05-29T15:27:07.584892Z node 86 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:647: TTxOperationReply complete, operationId: 1004:0, at schemeshard: 72057594046678944 2025-05-29T15:27:07.584938Z node 86 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1004:0, at schemeshard: 72057594046678944 2025-05-29T15:27:07.584945Z node 86 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:482: [72057594046678944] TDone opId# 1004:0 ProgressState 2025-05-29T15:27:07.584956Z node 86 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:906: Part operation is done id#1004:0 progress is 1/1 2025-05-29T15:27:07.584960Z node 86 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 1004 ready parts: 1/1 2025-05-29T15:27:07.584964Z node 86 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:906: Part operation is done id#1004:0 progress is 1/1 2025-05-29T15:27:07.584970Z node 86 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 1004 ready parts: 1/1 2025-05-29T15:27:07.584974Z node 86 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1606: TOperation IsReadyToNotify, TxId: 1004, ready parts: 1/1, is published: true 2025-05-29T15:27:07.584979Z node 86 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 1004 ready parts: 1/1 2025-05-29T15:27:07.584983Z node 86 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:973: Operation and all the parts is done, operation id: 1004:0 2025-05-29T15:27:07.584987Z node 86 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5174: RemoveTx for txid 1004:0 2025-05-29T15:27:07.585009Z node 86 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 3 TestModificationResult got TxId: 1004, wait until txId: 1004 TestWaitNotification wait txId: 1004 2025-05-29T15:27:07.585445Z node 86 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:210: tests -- TTxNotificationSubscriber for txId 1004: send EvNotifyTxCompletion 2025-05-29T15:27:07.585452Z node 86 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:256: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 1004 2025-05-29T15:27:07.585492Z node 86 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 1004, at schemeshard: 72057594046678944 2025-05-29T15:27:07.585502Z node 86 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:227: tests -- TTxNotificationSubscriber for txId 1004: got EvNotifyTxCompletionResult 2025-05-29T15:27:07.585506Z node 86 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:236: tests -- TTxNotificationSubscriber for txId 1004: satisfy waiter [86:501:2473] TestWaitNotification: OK eventTxId 1004 |68.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/control/ut/unittest ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/datashard/ut_incremental_backup/unittest >> IncrementalBackup::SimpleBackupBackupCollection-WithIncremental [FAIL] Test command err: 2025-05-29T15:26:59.088111Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:102:2148], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-05-29T15:26:59.088151Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-05-29T15:26:59.088167Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/001293/r3tmp/tmpePHIme/pdisk_1.dat 2025-05-29T15:26:59.210369Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4877: StateWork, received event# 269877761, Sender [1:595:2520], Recipient [1:411:2405]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-05-29T15:26:59.210404Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4974: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-05-29T15:26:59.210410Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5753: Pipe server connected, at tablet: 72057594046644480 2025-05-29T15:26:59.210424Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4877: StateWork, received event# 271122432, Sender [1:592:2518], Recipient [1:411:2405]: {TEvModifySchemeTransaction txid# 1 TabletId# 72057594046644480} 2025-05-29T15:26:59.210430Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4888: StateWork, processing event TEvSchemeShard::TEvModifySchemeTransaction 2025-05-29T15:26:59.231587Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:372: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "Root" StoragePools { Name: "/Root:test" Kind: "test" } } } TxId: 1 TabletId: 72057594046644480 , at schemeshard: 72057594046644480 2025-05-29T15:26:59.231667Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //Root, opId: 1:0, at schemeshard: 72057594046644480 2025-05-29T15:26:59.231732Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 0 2025-05-29T15:26:59.231810Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:130: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-05-29T15:26:59.231820Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-05-29T15:26:59.231837Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:165: TSideEffects ApplyOnExecute at tablet# 72057594046644480 2025-05-29T15:26:59.232060Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:451: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046644480 PathId: 1, at schemeshard: 72057594046644480 2025-05-29T15:26:59.232080Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2025-05-29T15:26:59.232087Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:207: TSideEffects ApplyOnComplete at tablet# 72057594046644480 2025-05-29T15:26:59.232093Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:275: Activate send for 1:0 2025-05-29T15:26:59.232131Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4877: StateWork, received event# 2146435072, Sender [1:411:2405], Recipient [1:411:2405]: NKikimr::NSchemeShard::TEvPrivate::TEvProgressOperation 2025-05-29T15:26:59.232139Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4894: StateWork, processing event TEvPrivate::TEvProgressOperation 2025-05-29T15:26:59.232152Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046644480 2025-05-29T15:26:59.232160Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:313: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046644480 2025-05-29T15:26:59.232166Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:367: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-05-29T15:26:59.232171Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 2 -> 3 2025-05-29T15:26:59.232192Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:165: TSideEffects ApplyOnExecute at tablet# 72057594046644480 2025-05-29T15:26:59.232246Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:207: TSideEffects ApplyOnComplete at tablet# 72057594046644480 2025-05-29T15:26:59.232250Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:275: Activate send for 1:0 2025-05-29T15:26:59.232267Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4877: StateWork, received event# 2146435072, Sender [1:411:2405], Recipient [1:411:2405]: NKikimr::NSchemeShard::TEvPrivate::TEvProgressOperation 2025-05-29T15:26:59.232271Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4894: StateWork, processing event TEvPrivate::TEvProgressOperation 2025-05-29T15:26:59.232276Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046644480 2025-05-29T15:26:59.232281Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046644480 2025-05-29T15:26:59.232286Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 3 -> 128 2025-05-29T15:26:59.232294Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:165: TSideEffects ApplyOnExecute at tablet# 72057594046644480 2025-05-29T15:26:59.232332Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:207: TSideEffects ApplyOnComplete at tablet# 72057594046644480 2025-05-29T15:26:59.232335Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:275: Activate send for 1:0 2025-05-29T15:26:59.232348Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4877: StateWork, received event# 2146435072, Sender [1:411:2405], Recipient [1:411:2405]: NKikimr::NSchemeShard::TEvPrivate::TEvProgressOperation 2025-05-29T15:26:59.232352Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4894: StateWork, processing event TEvPrivate::TEvProgressOperation 2025-05-29T15:26:59.232357Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046644480 2025-05-29T15:26:59.232362Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046644480 2025-05-29T15:26:59.232368Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046644480 2025-05-29T15:26:59.232372Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:165: TSideEffects ApplyOnExecute at tablet# 72057594046644480 2025-05-29T15:26:59.232377Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1650: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-05-29T15:26:59.233120Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1719: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046644480 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-05-29T15:26:59.233232Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:207: TSideEffects ApplyOnComplete at tablet# 72057594046644480 2025-05-29T15:26:59.233241Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:652: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046644480 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-05-29T15:26:59.233284Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1751: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 2025-05-29T15:26:59.233579Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4877: StateWork, received event# 269877760, Sender [1:600:2525], Recipient [1:411:2405]: NKikimr::TEvTabletPipe::TEvClientConnected { TabletId: 72057594046316545 Status: OK ServerId: [1:602:2526] Leader: 1 Dead: 0 Generation: 2 VersionInfo: } 2025-05-29T15:26:59.233590Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4972: StateWork, processing event TEvTabletPipe::TEvClientConnected 2025-05-29T15:26:59.233596Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5703: Handle TEvClientConnected, tabletId: 72057594046316545, status: OK, at schemeshard: 72057594046644480 2025-05-29T15:26:59.233615Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4877: StateWork, received event# 269091328, Sender [1:407:2401], Recipient [1:411:2405]: NKikimrTx.TEvProposeTransactionStatus Status: 16 StepId: 500 TxId: 1 2025-05-29T15:26:59.233683Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4877: StateWork, received event# 269877761, Sender [1:604:2528], Recipient [1:411:2405]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-05-29T15:26:59.233688Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4974: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-05-29T15:26:59.233693Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5753: Pipe server connected, at tablet: 72057594046644480 2025-05-29T15:26:59.233708Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4877: StateWork, received event# 271124996, Sender [1:592:2518], Recipient [1:411:2405]: NKikimrScheme.TEvNotifyTxCompletion TxId: 1 2025-05-29T15:26:59.233720Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4890: StateWork, processing event TEvSchemeShard::TEvNotifyTxCompletion 2025-05-29T15:26:59.233735Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__notify.cpp:30: NotifyTxCompletion operation in-flight, txId: 1, at schemeshard: 72057594046644480 2025-05-29T15:26:59.233740Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1606: TOperation IsReadyToNotify, TxId: 1, ready parts: 0/1, is published: true 2025-05-29T15:26:59.233745Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__notify.cpp:131: NotifyTxCompletion transaction is registered, txId: 1, at schemeshard: 72057594046644480 2025-05-29T15:26:59.247397Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4877: StateWork, received event# 273285138, Sender [1:43:2090], Recipient [1:411:2405]: NKikimr::NConsole::TEvConsole::TEvConfigNotificationRequest { Config { QueryServiceConfig { AvailableExternalDataSources: "ObjectStorage" } } ItemKinds: 26 ItemKinds: 34 ItemKinds: 52 ItemKinds: 54 ItemKinds: 73 Local: true } 2025-05-29T15:26:59.247431Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7651: Got new config: QueryServiceConfig { AvailableExternalDataSources: "ObjectStorage" } 2025-05-29T15:26:59.247438Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-2 ... :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:207: TSideEffects ApplyOnComplete at tablet# 72057594046644480 2025-05-29T15:27:03.163742Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4877: StateWork, received event# 274137603, Sender [3:571:2499], Recipient [3:413:2406]: NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046644480 Generation: 2 PathId: [OwnerId: 72057594046644480, LocalPathId: 6] Version: 2 } 2025-05-29T15:27:03.163746Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4961: StateWork, processing event NSchemeBoard::NSchemeshardEvents::TEvUpdateAck 2025-05-29T15:27:03.163754Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:5834: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 6 Version: 2 PathOwnerId: 72057594046644480, cookie: 281474976715658 2025-05-29T15:27:03.163764Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 6 Version: 2 PathOwnerId: 72057594046644480, cookie: 281474976715658 2025-05-29T15:27:03.163768Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976715658 2025-05-29T15:27:03.163772Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715658, pathId: [OwnerId: 72057594046644480, LocalPathId: 6], version: 2 2025-05-29T15:27:03.163777Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046644480, LocalPathId: 6] was 2 2025-05-29T15:27:03.163786Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976715658, subscribers: 1 2025-05-29T15:27:03.163793Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:212: TTxAckPublishToSchemeBoard Notify send TEvNotifyTxCompletionResult, at schemeshard: 72057594046644480, to actorId: [3:745:2621] 2025-05-29T15:27:03.163800Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:165: TSideEffects ApplyOnExecute at tablet# 72057594046644480 2025-05-29T15:27:03.163851Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046644480, cookie: 281474976715658 2025-05-29T15:27:03.163856Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:207: TSideEffects ApplyOnComplete at tablet# 72057594046644480 2025-05-29T15:27:03.163876Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046644480, cookie: 281474976715658 2025-05-29T15:27:03.163880Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:207: TSideEffects ApplyOnComplete at tablet# 72057594046644480 2025-05-29T15:27:03.163894Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:630: Send to actor: [3:745:2621] msg type: 271124998 msg: NKikimrScheme.TEvNotifyTxCompletionResult TxId: 281474976715658 at schemeshard: 72057594046644480 2025-05-29T15:27:03.163953Z node 3 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [3:745:2621], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-05-29T15:27:03.163989Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4877: StateWork, received event# 269877764, Sender [3:774:2631], Recipient [3:413:2406]: NKikimr::TEvTabletPipe::TEvServerDisconnected 2025-05-29T15:27:03.163997Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4975: StateWork, processing event TEvTabletPipe::TEvServerDisconnected 2025-05-29T15:27:03.164002Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5801: Server pipe is reset, at schemeshard: 72057594046644480 2025-05-29T15:27:03.203561Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4877: StateWork, received event# 269877761, Sender [3:819:2663], Recipient [3:413:2406]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-05-29T15:27:03.203591Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4974: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-05-29T15:27:03.203599Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5753: Pipe server connected, at tablet: 72057594046644480 2025-05-29T15:27:03.203617Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4877: StateWork, received event# 271122432, Sender [3:815:2660], Recipient [3:413:2406]: {TEvModifySchemeTransaction txid# 281474976715659 TabletId# 72057594046644480} 2025-05-29T15:27:03.203623Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4888: StateWork, processing event TEvSchemeShard::TEvModifySchemeTransaction 2025-05-29T15:27:03.204485Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:372: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/Root/.metadata/workload_manager/pools" OperationType: ESchemeOpCreateResourcePool ModifyACL { Name: "default" DiffACL: "\n!\010\000\022\035\010\001\020\201\004\032\024all-users@well-known \003\n\031\010\000\022\025\010\001\020\201\004\032\014root@builtin \003" NewOwner: "metadata@system" } Internal: true CreateResourcePool { Name: "default" Properties { Properties { key: "concurrent_query_limit" value: "-1" } Properties { key: "database_load_cpu_threshold" value: "-1" } Properties { key: "query_cancel_after_seconds" value: "0" } Properties { key: "query_cpu_limit_percent_per_node" value: "-1" } Properties { key: "query_memory_limit_percent_per_node" value: "-1" } Properties { key: "queue_size" value: "-1" } Properties { key: "resource_weight" value: "-1" } Properties { key: "total_cpu_limit_percent_per_node" value: "-1" } } } } TxId: 281474976715659 TabletId: 72057594046644480 Owner: "metadata@system" UserToken: "***" PeerName: "" , at schemeshard: 72057594046644480 2025-05-29T15:27:03.204589Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_create_resource_pool.cpp:148: [72057594046644480] TCreateResourcePool Propose: opId# 281474976715659:0, path# /Root/.metadata/workload_manager/pools/default 2025-05-29T15:27:03.204628Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:130: IgniteOperation, opId: 281474976715659:1, propose status:StatusAlreadyExists, reason: Check failed: path: '/Root/.metadata/workload_manager/pools/default', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92, at schemeshard: 72057594046644480 2025-05-29T15:27:03.204707Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:165: TSideEffects ApplyOnExecute at tablet# 72057594046644480 2025-05-29T15:27:03.204930Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:451: TTxOperationPropose Complete, txId: 281474976715659, response: Status: StatusAlreadyExists Reason: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" TxId: 281474976715659 SchemeshardId: 72057594046644480 PathId: 6 PathCreateTxId: 281474976715658, at schemeshard: 72057594046644480 2025-05-29T15:27:03.204980Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 281474976715659, database: /Root, subject: metadata@system, status: StatusAlreadyExists, reason: Check failed: path: '/Root/.metadata/workload_manager/pools/default', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92, operation: CREATE RESOURCE POOL, path: default, set owner:metadata@system, add access: +(SR|DS):all-users@well-known, add access: +(SR|DS):root@builtin 2025-05-29T15:27:03.204989Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:207: TSideEffects ApplyOnComplete at tablet# 72057594046644480 2025-05-29T15:27:03.205086Z node 3 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [3:815:2660] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-29T15:27:03.205162Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4877: StateWork, received event# 269877764, Sender [3:819:2663], Recipient [3:413:2406]: NKikimr::TEvTabletPipe::TEvServerDisconnected 2025-05-29T15:27:03.205175Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4975: StateWork, processing event TEvTabletPipe::TEvServerDisconnected 2025-05-29T15:27:03.205180Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5801: Server pipe is reset, at schemeshard: 72057594046644480 2025-05-29T15:27:03.222848Z node 3 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [3:825:2669], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:27:03.223454Z node 3 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=3&id=MTFiMzE0YTMtNGZjZWNjMTQtZGQxNzFlMmEtYWUyYjlmMDI=, ActorId: [3:729:2611], ActorState: ExecuteState, TraceId: 01jweaeta2an0t3f3t3d6n3xf5, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: assertion failed at ydb/core/tx/datashard/ut_common/datashard_ut_common.cpp:2091, void NKikimr::ExecSQL(Tests::TServer::TPtr, TActorId, const TString &, bool, Ydb::StatusIds::StatusCode): (response.GetYdbStatus() == code) failed: (INTERNAL_ERROR != SUCCESS) Response { QueryIssues { message: "Execution" issue_code: 1060 issues { message: "yql/essentials/ast/yql_expr.h:1874: index out of range" issue_code: 1 } } TxMeta { } } YdbStatus: INTERNAL_ERROR ConsumedRu: 1 , with diff: (INT|SUCC)E(RNAL_ERROR|SS) TBackTrace::Capture()+28 (0x13ACEA4C) NUnitTest::NPrivate::RaiseError(char const*, TBasicString> const&, bool)+137 (0x13C82379) NKikimr::ExecSQL(TIntrusivePtr>, NActors::TActorId, TBasicString> const&, bool, Ydb::StatusIds_StatusCode)+1300 (0x2630FFE4) NKikimr::NTestSuiteIncrementalBackup::TTestCaseSimpleBackupBackupCollection::Execute_(NUnitTest::TTestContext&)+1432 (0x139C2118) NKikimr::NTestSuiteIncrementalBackup::TCurrentTest::Execute()::'lambda'()::operator()() const+71 (0x139AF5F7) NUnitTest::TTestBase::Run(std::__y1::function, TBasicString> const&, char const*, bool)+126 (0x13C8422E) NKikimr::NTestSuiteIncrementalBackup::TCurrentTest::Execute()+421 (0x139AEE55) NUnitTest::TTestFactory::Execute()+803 (0x13C849A3) NUnitTest::RunMain(int, char**)+3021 (0x13C9654D) ??+0 (0x7FBDE996FD90) __libc_start_main+128 (0x7FBDE996FE40) _start+41 (0x12A02029) |68.6%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/kqp/ut/runtime/ydb-core-kqp-ut-runtime |68.6%| [LD] {RESULT} $(B)/ydb/core/kqp/ut/runtime/ydb-core-kqp-ut-runtime |68.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/ut/runtime/ydb-core-kqp-ut-runtime >> SubDomainWithReboots::Delete [GOOD] >> TKeyValueTest::TestWriteReadPatchRead ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/datashard/ut_incremental_backup/unittest >> IncrementalBackup::ComplexRestoreBackupCollection-WithIncremental [FAIL] Test command err: 2025-05-29T15:26:59.065934Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:102:2148], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-05-29T15:26:59.065973Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-05-29T15:26:59.065988Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/00125f/r3tmp/tmpQaT1Iv/pdisk_1.dat 2025-05-29T15:26:59.177250Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4877: StateWork, received event# 269877761, Sender [1:595:2520], Recipient [1:411:2405]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-05-29T15:26:59.177287Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4974: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-05-29T15:26:59.177293Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5753: Pipe server connected, at tablet: 72057594046644480 2025-05-29T15:26:59.177307Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4877: StateWork, received event# 271122432, Sender [1:592:2518], Recipient [1:411:2405]: {TEvModifySchemeTransaction txid# 1 TabletId# 72057594046644480} 2025-05-29T15:26:59.177311Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4888: StateWork, processing event TEvSchemeShard::TEvModifySchemeTransaction 2025-05-29T15:26:59.197053Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:372: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "Root" StoragePools { Name: "/Root:test" Kind: "test" } } } TxId: 1 TabletId: 72057594046644480 , at schemeshard: 72057594046644480 2025-05-29T15:26:59.197153Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //Root, opId: 1:0, at schemeshard: 72057594046644480 2025-05-29T15:26:59.197225Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 0 2025-05-29T15:26:59.197312Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:130: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-05-29T15:26:59.197324Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-05-29T15:26:59.197342Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:165: TSideEffects ApplyOnExecute at tablet# 72057594046644480 2025-05-29T15:26:59.197597Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:451: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046644480 PathId: 1, at schemeshard: 72057594046644480 2025-05-29T15:26:59.197622Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2025-05-29T15:26:59.197629Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:207: TSideEffects ApplyOnComplete at tablet# 72057594046644480 2025-05-29T15:26:59.197636Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:275: Activate send for 1:0 2025-05-29T15:26:59.197681Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4877: StateWork, received event# 2146435072, Sender [1:411:2405], Recipient [1:411:2405]: NKikimr::NSchemeShard::TEvPrivate::TEvProgressOperation 2025-05-29T15:26:59.197689Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4894: StateWork, processing event TEvPrivate::TEvProgressOperation 2025-05-29T15:26:59.197705Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046644480 2025-05-29T15:26:59.197714Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:313: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046644480 2025-05-29T15:26:59.197719Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:367: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-05-29T15:26:59.197725Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 2 -> 3 2025-05-29T15:26:59.197752Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:165: TSideEffects ApplyOnExecute at tablet# 72057594046644480 2025-05-29T15:26:59.197830Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:207: TSideEffects ApplyOnComplete at tablet# 72057594046644480 2025-05-29T15:26:59.197834Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:275: Activate send for 1:0 2025-05-29T15:26:59.197852Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4877: StateWork, received event# 2146435072, Sender [1:411:2405], Recipient [1:411:2405]: NKikimr::NSchemeShard::TEvPrivate::TEvProgressOperation 2025-05-29T15:26:59.197856Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4894: StateWork, processing event TEvPrivate::TEvProgressOperation 2025-05-29T15:26:59.197862Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046644480 2025-05-29T15:26:59.197867Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046644480 2025-05-29T15:26:59.197872Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 3 -> 128 2025-05-29T15:26:59.197881Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:165: TSideEffects ApplyOnExecute at tablet# 72057594046644480 2025-05-29T15:26:59.197919Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:207: TSideEffects ApplyOnComplete at tablet# 72057594046644480 2025-05-29T15:26:59.197923Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:275: Activate send for 1:0 2025-05-29T15:26:59.197936Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4877: StateWork, received event# 2146435072, Sender [1:411:2405], Recipient [1:411:2405]: NKikimr::NSchemeShard::TEvPrivate::TEvProgressOperation 2025-05-29T15:26:59.197940Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4894: StateWork, processing event TEvPrivate::TEvProgressOperation 2025-05-29T15:26:59.197944Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046644480 2025-05-29T15:26:59.197949Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046644480 2025-05-29T15:26:59.197955Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046644480 2025-05-29T15:26:59.197959Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:165: TSideEffects ApplyOnExecute at tablet# 72057594046644480 2025-05-29T15:26:59.197965Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1650: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-05-29T15:26:59.198621Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1719: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046644480 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-05-29T15:26:59.203894Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:207: TSideEffects ApplyOnComplete at tablet# 72057594046644480 2025-05-29T15:26:59.203922Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:652: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046644480 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-05-29T15:26:59.204013Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1751: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 2025-05-29T15:26:59.204349Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4877: StateWork, received event# 269877760, Sender [1:600:2525], Recipient [1:411:2405]: NKikimr::TEvTabletPipe::TEvClientConnected { TabletId: 72057594046316545 Status: OK ServerId: [1:602:2526] Leader: 1 Dead: 0 Generation: 2 VersionInfo: } 2025-05-29T15:26:59.204360Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4972: StateWork, processing event TEvTabletPipe::TEvClientConnected 2025-05-29T15:26:59.204367Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5703: Handle TEvClientConnected, tabletId: 72057594046316545, status: OK, at schemeshard: 72057594046644480 2025-05-29T15:26:59.204393Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4877: StateWork, received event# 269091328, Sender [1:407:2401], Recipient [1:411:2405]: NKikimrTx.TEvProposeTransactionStatus Status: 16 StepId: 500 TxId: 1 2025-05-29T15:26:59.204461Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4877: StateWork, received event# 269877761, Sender [1:604:2528], Recipient [1:411:2405]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-05-29T15:26:59.204466Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4974: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-05-29T15:26:59.204470Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5753: Pipe server connected, at tablet: 72057594046644480 2025-05-29T15:26:59.204484Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4877: StateWork, received event# 271124996, Sender [1:592:2518], Recipient [1:411:2405]: NKikimrScheme.TEvNotifyTxCompletion TxId: 1 2025-05-29T15:26:59.204488Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4890: StateWork, processing event TEvSchemeShard::TEvNotifyTxCompletion 2025-05-29T15:26:59.204507Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__notify.cpp:30: NotifyTxCompletion operation in-flight, txId: 1, at schemeshard: 72057594046644480 2025-05-29T15:26:59.204513Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1606: TOperation IsReadyToNotify, TxId: 1, ready parts: 0/1, is published: true 2025-05-29T15:26:59.204518Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__notify.cpp:131: NotifyTxCompletion transaction is registered, txId: 1, at schemeshard: 72057594046644480 2025-05-29T15:26:59.221727Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4877: StateWork, received event# 273285138, Sender [1:43:2090], Recipient [1:411:2405]: NKikimr::NConsole::TEvConsole::TEvConfigNotificationRequest { Config { QueryServiceConfig { AvailableExternalDataSources: "ObjectStorage" } } ItemKinds: 26 ItemKinds: 34 ItemKinds: 52 ItemKinds: 54 ItemKinds: 73 Local: true } 2025-05-29T15:26:59.221771Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7651: Got new config: QueryServiceConfig { AvailableExternalDataSources: "ObjectStorage" } 2025-05-29T15:26:59.221778Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-2 ... : [OwnerId: 72057594046644480, LocalPathId: 10] Version: 2 } 2025-05-29T15:27:03.695247Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4961: StateWork, processing event NSchemeBoard::NSchemeshardEvents::TEvUpdateAck 2025-05-29T15:27:03.695254Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:5834: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 10 Version: 2 PathOwnerId: 72057594046644480, cookie: 281474976715659 2025-05-29T15:27:03.695261Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 10 Version: 2 PathOwnerId: 72057594046644480, cookie: 281474976715659 2025-05-29T15:27:03.695265Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976715659 2025-05-29T15:27:03.695269Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715659, pathId: [OwnerId: 72057594046644480, LocalPathId: 10], version: 2 2025-05-29T15:27:03.695273Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046644480, LocalPathId: 10] was 2 2025-05-29T15:27:03.695281Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976715659, subscribers: 1 2025-05-29T15:27:03.695287Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:212: TTxAckPublishToSchemeBoard Notify send TEvNotifyTxCompletionResult, at schemeshard: 72057594046644480, to actorId: [3:828:2665] 2025-05-29T15:27:03.695293Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:165: TSideEffects ApplyOnExecute at tablet# 72057594046644480 2025-05-29T15:27:03.695316Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046644480, cookie: 281474976715659 2025-05-29T15:27:03.695320Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:207: TSideEffects ApplyOnComplete at tablet# 72057594046644480 2025-05-29T15:27:03.695338Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046644480, cookie: 281474976715659 2025-05-29T15:27:03.695342Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:207: TSideEffects ApplyOnComplete at tablet# 72057594046644480 2025-05-29T15:27:03.695360Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046644480, cookie: 281474976715659 2025-05-29T15:27:03.695367Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:207: TSideEffects ApplyOnComplete at tablet# 72057594046644480 2025-05-29T15:27:03.695380Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:630: Send to actor: [3:828:2665] msg type: 271124998 msg: NKikimrScheme.TEvNotifyTxCompletionResult TxId: 281474976715659 at schemeshard: 72057594046644480 2025-05-29T15:27:03.695426Z node 3 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [3:828:2665], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2025-05-29T15:27:03.695462Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4877: StateWork, received event# 269877764, Sender [3:857:2675], Recipient [3:413:2406]: NKikimr::TEvTabletPipe::TEvServerDisconnected 2025-05-29T15:27:03.695468Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4975: StateWork, processing event TEvTabletPipe::TEvServerDisconnected 2025-05-29T15:27:03.695473Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5801: Server pipe is reset, at schemeshard: 72057594046644480 2025-05-29T15:27:03.729588Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4877: StateWork, received event# 269877761, Sender [3:888:2705], Recipient [3:413:2406]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-05-29T15:27:03.729614Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4974: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-05-29T15:27:03.729621Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5753: Pipe server connected, at tablet: 72057594046644480 2025-05-29T15:27:03.729645Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4877: StateWork, received event# 271122432, Sender [3:884:2702], Recipient [3:413:2406]: {TEvModifySchemeTransaction txid# 281474976715660 TabletId# 72057594046644480} 2025-05-29T15:27:03.729649Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4888: StateWork, processing event TEvSchemeShard::TEvModifySchemeTransaction 2025-05-29T15:27:03.730409Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:372: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/Root/.metadata/workload_manager/pools" OperationType: ESchemeOpCreateResourcePool ModifyACL { Name: "default" DiffACL: "\n!\010\000\022\035\010\001\020\201\004\032\024all-users@well-known \003\n\031\010\000\022\025\010\001\020\201\004\032\014root@builtin \003" NewOwner: "metadata@system" } Internal: true CreateResourcePool { Name: "default" Properties { Properties { key: "concurrent_query_limit" value: "-1" } Properties { key: "database_load_cpu_threshold" value: "-1" } Properties { key: "query_cancel_after_seconds" value: "0" } Properties { key: "query_cpu_limit_percent_per_node" value: "-1" } Properties { key: "query_memory_limit_percent_per_node" value: "-1" } Properties { key: "queue_size" value: "-1" } Properties { key: "resource_weight" value: "-1" } Properties { key: "total_cpu_limit_percent_per_node" value: "-1" } } } } TxId: 281474976715660 TabletId: 72057594046644480 Owner: "metadata@system" UserToken: "***" PeerName: "" , at schemeshard: 72057594046644480 2025-05-29T15:27:03.730495Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_create_resource_pool.cpp:148: [72057594046644480] TCreateResourcePool Propose: opId# 281474976715660:0, path# /Root/.metadata/workload_manager/pools/default 2025-05-29T15:27:03.730533Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:130: IgniteOperation, opId: 281474976715660:1, propose status:StatusAlreadyExists, reason: Check failed: path: '/Root/.metadata/workload_manager/pools/default', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 10], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92, at schemeshard: 72057594046644480 2025-05-29T15:27:03.730621Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:165: TSideEffects ApplyOnExecute at tablet# 72057594046644480 2025-05-29T15:27:03.730802Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:451: TTxOperationPropose Complete, txId: 281474976715660, response: Status: StatusAlreadyExists Reason: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 10], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" TxId: 281474976715660 SchemeshardId: 72057594046644480 PathId: 10 PathCreateTxId: 281474976715659, at schemeshard: 72057594046644480 2025-05-29T15:27:03.730849Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 281474976715660, database: /Root, subject: metadata@system, status: StatusAlreadyExists, reason: Check failed: path: '/Root/.metadata/workload_manager/pools/default', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 10], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92, operation: CREATE RESOURCE POOL, path: default, set owner:metadata@system, add access: +(SR|DS):all-users@well-known, add access: +(SR|DS):root@builtin 2025-05-29T15:27:03.730857Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:207: TSideEffects ApplyOnComplete at tablet# 72057594046644480 2025-05-29T15:27:03.730915Z node 3 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [3:884:2702] txid# 281474976715660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 10], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-29T15:27:03.730978Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4877: StateWork, received event# 269877764, Sender [3:888:2705], Recipient [3:413:2406]: NKikimr::TEvTabletPipe::TEvServerDisconnected 2025-05-29T15:27:03.730983Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4975: StateWork, processing event TEvTabletPipe::TEvServerDisconnected 2025-05-29T15:27:03.730988Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5801: Server pipe is reset, at schemeshard: 72057594046644480 2025-05-29T15:27:03.750281Z node 3 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [3:893:2710], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:27:03.755044Z node 3 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=3&id=NDhjZGNmNmYtMmVhNWZkMWYtNGNjMWRlMGUtNmMwYjVkYzA=, ActorId: [3:819:2656], ActorState: ExecuteState, TraceId: 01jweaetw29h3gskwczy8p7cv9, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: assertion failed at ydb/core/tx/datashard/ut_common/datashard_ut_common.cpp:2091, void NKikimr::ExecSQL(Tests::TServer::TPtr, TActorId, const TString &, bool, Ydb::StatusIds::StatusCode): (response.GetYdbStatus() == code) failed: (INTERNAL_ERROR != SUCCESS) Response { QueryIssues { message: "Execution" issue_code: 1060 issues { message: "yql/essentials/ast/yql_expr.h:1874: index out of range" issue_code: 1 } } TxMeta { } } YdbStatus: INTERNAL_ERROR ConsumedRu: 1 , with diff: (INT|SUCC)E(RNAL_ERROR|SS) TBackTrace::Capture()+28 (0x13ACEA4C) NUnitTest::NPrivate::RaiseError(char const*, TBasicString> const&, bool)+137 (0x13C82379) NKikimr::ExecSQL(TIntrusivePtr>, NActors::TActorId, TBasicString> const&, bool, Ydb::StatusIds_StatusCode)+1300 (0x2630FFE4) NKikimr::NTestSuiteIncrementalBackup::TTestCaseComplexRestoreBackupCollection::Execute_(NUnitTest::TTestContext&)+1999 (0x139CEB5F) NKikimr::NTestSuiteIncrementalBackup::TCurrentTest::Execute()::'lambda'()::operator()() const+71 (0x139AF5F7) NUnitTest::TTestBase::Run(std::__y1::function, TBasicString> const&, char const*, bool)+126 (0x13C8422E) NKikimr::NTestSuiteIncrementalBackup::TCurrentTest::Execute()+421 (0x139AEE55) NUnitTest::TTestFactory::Execute()+803 (0x13C849A3) NUnitTest::RunMain(int, char**)+3021 (0x13C9654D) ??+0 (0x7F9FC54ACD90) __libc_start_main+128 (0x7F9FC54ACE40) _start+41 (0x12A02029) >> BasicUsage::WriteSessionCloseIgnoresWrites [FAIL] >> TKeyValueTest::TestInlineWriteReadRangeLimitThenLimitWorks |68.6%| [TA] $(B)/ydb/core/blobstorage/dsproxy/ut_ftol/test-results/unittest/{meta.json ... results_accumulator.log} |68.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/control/ut/unittest ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_export/unittest >> TExportToS3Tests::ShouldRetryAtFinalStage [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:116:2058] recipient: [1:111:2141] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:116:2058] recipient: [1:111:2141] Leader for TabletID 72057594046678944 is [1:128:2152] sender: [1:132:2058] recipient: [1:111:2141] 2025-05-29T15:26:29.864257Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7488: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-05-29T15:26:29.864282Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7516: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:26:29.864288Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7402: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-05-29T15:26:29.864293Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7418: OperationsProcessing config: using default configuration 2025-05-29T15:26:29.864299Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-05-29T15:26:29.864304Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-05-29T15:26:29.864313Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7548: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:26:29.864343Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:39: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-05-29T15:26:29.864452Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7619: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-05-29T15:26:29.864536Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-05-29T15:26:29.877392Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7309: Cannot subscribe to console configs 2025-05-29T15:26:29.877420Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:26:29.880484Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-05-29T15:26:29.880634Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-05-29T15:26:29.880690Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-05-29T15:26:29.882447Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-05-29T15:26:29.882588Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:445: Clear TempDirsState with owners number: 0 2025-05-29T15:26:29.882711Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1348: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-05-29T15:26:29.882787Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:32: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-05-29T15:26:29.886983Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:157: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:26:29.887066Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:84: [RootDataErasureManager] Stop 2025-05-29T15:26:29.887425Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:26:29.887442Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:26:29.887467Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-05-29T15:26:29.887478Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-29T15:26:29.887485Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:202: TTxServerlessStorageBilling.Complete 2025-05-29T15:26:29.887544Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6671: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-05-29T15:26:29.889301Z node 1 :HIVE INFO: tablet_helpers.cpp:1130: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:128:2152] sender: [1:242:2058] recipient: [1:15:2062] 2025-05-29T15:26:29.913187Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:372: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-05-29T15:26:29.913293Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:26:29.913359Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-05-29T15:26:29.913414Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:130: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-05-29T15:26:29.913428Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:26:29.914334Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:451: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-05-29T15:26:29.914366Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-05-29T15:26:29.914452Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:26:29.914464Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:313: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-05-29T15:26:29.914480Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:367: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-05-29T15:26:29.914485Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 2 -> 3 2025-05-29T15:26:29.914959Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:26:29.914972Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-05-29T15:26:29.914990Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 3 -> 128 2025-05-29T15:26:29.915447Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:26:29.915459Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:26:29.915465Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:26:29.915472Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1650: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-05-29T15:26:29.916232Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1719: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-05-29T15:26:29.916691Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:652: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-05-29T15:26:29.916731Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1751: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-05-29T15:26:29.916929Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:676: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-05-29T15:26:29.916955Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:680: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 124 RawX2: 4294969445 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-05-29T15:26:29.916962Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:26:29.917052Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 128 -> 240 2025-05-29T15:26:29.917060Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:26:29.917092Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-05-29T15:26:29.917106Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:402: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-05-29T15:26:29.917545Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:26:29.917556Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-29T15:26:29.917607Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29 ... d: 72057594046678944, LocalPathId: 4] state 'Ready' dataSize 0 rowCount 0 cpuUsage 0.0009 2025-05-29T15:27:01.656786Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:590: Started TEvPersistStats at tablet 72057594046678944, queue size# 2 2025-05-29T15:27:01.656900Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:267: PersistSingleStats for pathId 2 shard idx 72057594046678944:1 data size 70 row count 2 2025-05-29T15:27:01.656931Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:292: TTxStoreTableStats.PersistSingleStats: main stats from datashardId(TabletID)=72075186233409546 maps to shardIdx: 72057594046678944:1 followerId=0, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], pathId map=Table, is column=0, is olap=0, RowCount 2, DataSize 70 2025-05-29T15:27:01.656978Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:485: Do not want to split tablet 72075186233409546 2025-05-29T15:27:01.656994Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:267: PersistSingleStats for pathId 4 shard idx 72057594046678944:2 data size 0 row count 0 2025-05-29T15:27:01.657003Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:292: TTxStoreTableStats.PersistSingleStats: main stats from datashardId(TabletID)=72075186233409547 maps to shardIdx: 72057594046678944:2 followerId=0, pathId: [OwnerId: 72057594046678944, LocalPathId: 4], pathId map=0, is column=0, is olap=0, RowCount 0, DataSize 0, with borrowed parts 2025-05-29T15:27:01.657022Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:485: Do not want to split tablet 72075186233409547 2025-05-29T15:27:01.667470Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:590: Started TEvPersistStats at tablet 72057594046678944, queue size# 0 2025-05-29T15:27:05.468059Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:563: Got periodic table stats at tablet 72057594046678944 from shard 72075186233409546 followerId 0 pathId [OwnerId: 72057594046678944, LocalPathId: 2] state 'Ready' dataSize 70 rowCount 2 cpuUsage 0.0009 2025-05-29T15:27:05.491248Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:563: Got periodic table stats at tablet 72057594046678944 from shard 72075186233409547 followerId 0 pathId [OwnerId: 72057594046678944, LocalPathId: 4] state 'Ready' dataSize 0 rowCount 0 cpuUsage 0.0008 2025-05-29T15:27:05.532212Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:590: Started TEvPersistStats at tablet 72057594046678944, queue size# 2 2025-05-29T15:27:05.532295Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:267: PersistSingleStats for pathId 2 shard idx 72057594046678944:1 data size 70 row count 2 2025-05-29T15:27:05.532326Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:292: TTxStoreTableStats.PersistSingleStats: main stats from datashardId(TabletID)=72075186233409546 maps to shardIdx: 72057594046678944:1 followerId=0, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], pathId map=Table, is column=0, is olap=0, RowCount 2, DataSize 70 2025-05-29T15:27:05.532369Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:485: Do not want to split tablet 72075186233409546 2025-05-29T15:27:05.532384Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:267: PersistSingleStats for pathId 4 shard idx 72057594046678944:2 data size 0 row count 0 2025-05-29T15:27:05.532393Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:292: TTxStoreTableStats.PersistSingleStats: main stats from datashardId(TabletID)=72075186233409547 maps to shardIdx: 72057594046678944:2 followerId=0, pathId: [OwnerId: 72057594046678944, LocalPathId: 4], pathId map=0, is column=0, is olap=0, RowCount 0, DataSize 0, with borrowed parts 2025-05-29T15:27:05.532402Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:485: Do not want to split tablet 72075186233409547 2025-05-29T15:27:05.543368Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:590: Started TEvPersistStats at tablet 72057594046678944, queue size# 0 2025-05-29T15:27:08.338001Z node 4 :DATASHARD_BACKUP DEBUG: export_s3_uploader.cpp:783: [Export] [s3] Bootstrap: self# [4:576:2533], attempt# 1 2025-05-29T15:27:08.342869Z node 4 :DATASHARD_BACKUP DEBUG: export_scan.cpp:118: [Export] [scanner] Handle TEvExportScan::TEvReset: self# [4:575:2532] 2025-05-29T15:27:08.344344Z node 4 :DATASHARD_BACKUP DEBUG: export_s3_uploader.cpp:427: [Export] [s3] Handle TEvExportScan::TEvReady: self# [4:576:2533], sender# [4:575:2532] 2025-05-29T15:27:08.344369Z node 4 :DATASHARD_BACKUP DEBUG: export_scan.cpp:130: [Export] [scanner] Handle TEvExportScan::TEvFeed: self# [4:575:2532] 2025-05-29T15:27:08.344395Z node 4 :DATASHARD_BACKUP DEBUG: export_s3_uploader.cpp:445: [Export] [s3] Handle TEvExportScan::TEvBuffer: self# [4:576:2533], sender# [4:575:2532], msg# NKikimr::NDataShard::TEvExportScan::TEvBuffer { Last: 0 Checksum: } 2025-05-29T15:27:08.344459Z node 4 :DATASHARD_BACKUP DEBUG: export_s3_uploader.cpp:512: [Export] [s3] Handle TEvDataShard::TEvS3Upload: self# [4:576:2533], upload# { Id: 1 Status: Complete Error: (empty maybe) Parts: [6e3e0a41fdab8add833862f1bd2954c3,1d8dd09e584ce6a47582a31b591900e2,d41d8cd98f00b204e9800998ecf8427e] } REQUEST: POST /data_00.csv?uploadId=1 HTTP/1.1 HEADERS: Host: localhost:19457 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: B2DF72E4-D7DF-4682-B783-A6130662C546 amz-sdk-request: attempt=1 content-length: 459 content-type: application/xml user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-139-generic x86_64 Clang/18.1.8 x-amz-api-version: 2006-03-01 S3_MOCK::HttpServeAction: 4 / /data_00.csv / uploadId=1 2025-05-29T15:27:08.355173Z node 4 :DATASHARD_BACKUP DEBUG: export_s3_uploader.cpp:609: [Export] [s3] Handle TEvExternalStorage::TEvCompleteMultipartUploadResponse: self# [4:576:2533], result# 2025-05-29T15:27:08.355266Z node 4 :DATASHARD_BACKUP DEBUG: export_scan.cpp:144: [Export] [scanner] Handle TEvExportScan::TEvFinish: self# [4:575:2532], msg# NKikimr::NDataShard::TEvExportScan::TEvFinish { Success: 1 Error: } 2025-05-29T15:27:08.359600Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5512: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 451 RawX2: 17179871603 } Origin: 72075186233409547 State: 2 TxId: 281474976710759 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 10 RowsProcessed: 1 } 2025-05-29T15:27:08.359627Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1764: TOperation FindRelatedPartByTabletId, TxId: 281474976710759, tablet: 72075186233409547, partId: 0 2025-05-29T15:27:08.359657Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:619: TTxOperationReply execute, operationId: 281474976710759:0, at schemeshard: 72057594046678944, message: Source { RawX1: 451 RawX2: 17179871603 } Origin: 72075186233409547 State: 2 TxId: 281474976710759 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 10 RowsProcessed: 1 } 2025-05-29T15:27:08.359690Z node 4 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_backup_restore_common.h:233: TBackup TProposedWaitParts, opId: 281474976710759:0 HandleReply TEvSchemaChanged at tablet# 72057594046678944 message# Source { RawX1: 451 RawX2: 17179871603 } Origin: 72075186233409547 State: 2 TxId: 281474976710759 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 10 RowsProcessed: 1 } 2025-05-29T15:27:08.359706Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:655: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 281474976710759:0, shardIdx: 72057594046678944:2, datashard: 72075186233409547, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2025-05-29T15:27:08.359712Z node 4 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:674: all shard schema changes has been received, operationId: 281474976710759:0, at schemeshard: 72057594046678944 2025-05-29T15:27:08.359722Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:686: send schema changes ack message, operation: 281474976710759:0, datashard: 72075186233409547, at schemeshard: 72057594046678944 2025-05-29T15:27:08.359730Z node 4 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 281474976710759:0 129 -> 240 2025-05-29T15:27:08.359790Z node 4 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_backup_restore_common.h:116: Unable to make a bill: kind# TBackup, opId# 281474976710759:0, reason# domain is not a serverless db, domain# /MyRoot, domainPathId# [OwnerId: 72057594046678944, LocalPathId: 1], IsDomainSchemeShard: 1, ParentDomainId: [OwnerId: 72057594046678944, LocalPathId: 1], ResourcesDomainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-29T15:27:08.360396Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:647: TTxOperationReply complete, operationId: 281474976710759:0, at schemeshard: 72057594046678944 2025-05-29T15:27:08.360491Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 281474976710759:0, at schemeshard: 72057594046678944 2025-05-29T15:27:08.360502Z node 4 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:482: [72057594046678944] TDone opId# 281474976710759:0 ProgressState 2025-05-29T15:27:08.360515Z node 4 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:906: Part operation is done id#281474976710759:0 progress is 1/1 2025-05-29T15:27:08.360520Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 281474976710759 ready parts: 1/1 2025-05-29T15:27:08.360525Z node 4 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:906: Part operation is done id#281474976710759:0 progress is 1/1 2025-05-29T15:27:08.360529Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 281474976710759 ready parts: 1/1 2025-05-29T15:27:08.360535Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1606: TOperation IsReadyToNotify, TxId: 281474976710759, ready parts: 1/1, is published: true 2025-05-29T15:27:08.360550Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1629: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [4:126:2151] message: TxId: 281474976710759 2025-05-29T15:27:08.360556Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 281474976710759 ready parts: 1/1 2025-05-29T15:27:08.360563Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:973: Operation and all the parts is done, operation id: 281474976710759:0 2025-05-29T15:27:08.360567Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5174: RemoveTx for txid 281474976710759:0 2025-05-29T15:27:08.360594Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 3 2025-05-29T15:27:08.361214Z node 4 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6746: Handle: TEvNotifyTxCompletionResult: txId# 281474976710759 2025-05-29T15:27:08.361232Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6748: Message: TxId: 281474976710759 2025-05-29T15:27:08.361666Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:227: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2025-05-29T15:27:08.361678Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:236: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [4:597:2550] TestWaitNotification: OK eventTxId 102 >> TKeyValueTest::TestWriteReadPatchRead [GOOD] >> TKeyValueTest::TestWriteReadDeleteWithRestartsThenResponseOkWithNewApi >> TKeyValueCollectorTest::TestKeyValueCollectorEmpty ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/datashard/ut_incremental_backup/unittest >> IncrementalBackup::E2EBackupCollection [FAIL] Test command err: 2025-05-29T15:26:59.976373Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:102:2148], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-05-29T15:26:59.976410Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-05-29T15:26:59.976448Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/001247/r3tmp/tmpjRtxZQ/pdisk_1.dat 2025-05-29T15:27:00.106958Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4877: StateWork, received event# 269877761, Sender [1:595:2520], Recipient [1:411:2405]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-05-29T15:27:00.106992Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4974: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-05-29T15:27:00.106999Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5753: Pipe server connected, at tablet: 72057594046644480 2025-05-29T15:27:00.107014Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4877: StateWork, received event# 271122432, Sender [1:592:2518], Recipient [1:411:2405]: {TEvModifySchemeTransaction txid# 1 TabletId# 72057594046644480} 2025-05-29T15:27:00.107019Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4888: StateWork, processing event TEvSchemeShard::TEvModifySchemeTransaction 2025-05-29T15:27:00.127275Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:372: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "Root" StoragePools { Name: "/Root:test" Kind: "test" } } } TxId: 1 TabletId: 72057594046644480 , at schemeshard: 72057594046644480 2025-05-29T15:27:00.127356Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //Root, opId: 1:0, at schemeshard: 72057594046644480 2025-05-29T15:27:00.127419Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 0 2025-05-29T15:27:00.127516Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:130: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-05-29T15:27:00.127529Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-05-29T15:27:00.127545Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:165: TSideEffects ApplyOnExecute at tablet# 72057594046644480 2025-05-29T15:27:00.127768Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:451: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046644480 PathId: 1, at schemeshard: 72057594046644480 2025-05-29T15:27:00.127787Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2025-05-29T15:27:00.127793Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:207: TSideEffects ApplyOnComplete at tablet# 72057594046644480 2025-05-29T15:27:00.127799Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:275: Activate send for 1:0 2025-05-29T15:27:00.127836Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4877: StateWork, received event# 2146435072, Sender [1:411:2405], Recipient [1:411:2405]: NKikimr::NSchemeShard::TEvPrivate::TEvProgressOperation 2025-05-29T15:27:00.127843Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4894: StateWork, processing event TEvPrivate::TEvProgressOperation 2025-05-29T15:27:00.127856Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046644480 2025-05-29T15:27:00.127864Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:313: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046644480 2025-05-29T15:27:00.127870Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:367: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-05-29T15:27:00.127875Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 2 -> 3 2025-05-29T15:27:00.127896Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:165: TSideEffects ApplyOnExecute at tablet# 72057594046644480 2025-05-29T15:27:00.127947Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:207: TSideEffects ApplyOnComplete at tablet# 72057594046644480 2025-05-29T15:27:00.127951Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:275: Activate send for 1:0 2025-05-29T15:27:00.127966Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4877: StateWork, received event# 2146435072, Sender [1:411:2405], Recipient [1:411:2405]: NKikimr::NSchemeShard::TEvPrivate::TEvProgressOperation 2025-05-29T15:27:00.127971Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4894: StateWork, processing event TEvPrivate::TEvProgressOperation 2025-05-29T15:27:00.127975Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046644480 2025-05-29T15:27:00.127981Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046644480 2025-05-29T15:27:00.127985Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 3 -> 128 2025-05-29T15:27:00.127993Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:165: TSideEffects ApplyOnExecute at tablet# 72057594046644480 2025-05-29T15:27:00.128030Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:207: TSideEffects ApplyOnComplete at tablet# 72057594046644480 2025-05-29T15:27:00.128034Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:275: Activate send for 1:0 2025-05-29T15:27:00.128046Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4877: StateWork, received event# 2146435072, Sender [1:411:2405], Recipient [1:411:2405]: NKikimr::NSchemeShard::TEvPrivate::TEvProgressOperation 2025-05-29T15:27:00.128051Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4894: StateWork, processing event TEvPrivate::TEvProgressOperation 2025-05-29T15:27:00.128055Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046644480 2025-05-29T15:27:00.128060Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046644480 2025-05-29T15:27:00.128065Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046644480 2025-05-29T15:27:00.128069Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:165: TSideEffects ApplyOnExecute at tablet# 72057594046644480 2025-05-29T15:27:00.128075Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1650: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-05-29T15:27:00.128753Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1719: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046644480 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-05-29T15:27:00.130169Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:207: TSideEffects ApplyOnComplete at tablet# 72057594046644480 2025-05-29T15:27:00.130191Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:652: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046644480 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-05-29T15:27:00.130239Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1751: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 2025-05-29T15:27:00.130546Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4877: StateWork, received event# 269877760, Sender [1:600:2525], Recipient [1:411:2405]: NKikimr::TEvTabletPipe::TEvClientConnected { TabletId: 72057594046316545 Status: OK ServerId: [1:602:2526] Leader: 1 Dead: 0 Generation: 2 VersionInfo: } 2025-05-29T15:27:00.130556Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4972: StateWork, processing event TEvTabletPipe::TEvClientConnected 2025-05-29T15:27:00.130562Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5703: Handle TEvClientConnected, tabletId: 72057594046316545, status: OK, at schemeshard: 72057594046644480 2025-05-29T15:27:00.130585Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4877: StateWork, received event# 269091328, Sender [1:407:2401], Recipient [1:411:2405]: NKikimrTx.TEvProposeTransactionStatus Status: 16 StepId: 500 TxId: 1 2025-05-29T15:27:00.130657Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4877: StateWork, received event# 269877761, Sender [1:604:2528], Recipient [1:411:2405]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-05-29T15:27:00.130662Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4974: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-05-29T15:27:00.130667Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5753: Pipe server connected, at tablet: 72057594046644480 2025-05-29T15:27:00.130681Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4877: StateWork, received event# 271124996, Sender [1:592:2518], Recipient [1:411:2405]: NKikimrScheme.TEvNotifyTxCompletion TxId: 1 2025-05-29T15:27:00.130686Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4890: StateWork, processing event TEvSchemeShard::TEvNotifyTxCompletion 2025-05-29T15:27:00.130701Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__notify.cpp:30: NotifyTxCompletion operation in-flight, txId: 1, at schemeshard: 72057594046644480 2025-05-29T15:27:00.130707Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1606: TOperation IsReadyToNotify, TxId: 1, ready parts: 0/1, is published: true 2025-05-29T15:27:00.130712Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__notify.cpp:131: NotifyTxCompletion transaction is registered, txId: 1, at schemeshard: 72057594046644480 2025-05-29T15:27:00.150499Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4877: StateWork, received event# 273285138, Sender [1:43:2090], Recipient [1:411:2405]: NKikimr::NConsole::TEvConsole::TEvConfigNotificationRequest { Config { QueryServiceConfig { AvailableExternalDataSources: "ObjectStorage" } } ItemKinds: 26 ItemKinds: 34 ItemKinds: 52 ItemKinds: 54 ItemKinds: 73 Local: true } 2025-05-29T15:27:00.150540Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7651: Got new config: QueryServiceConfig { AvailableExternalDataSources: "ObjectStorage" } 2025-05-29T15:27:00.150548Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-2 ... .104494Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:207: TSideEffects ApplyOnComplete at tablet# 72057594046644480 2025-05-29T15:27:04.104509Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4877: StateWork, received event# 274137603, Sender [3:571:2499], Recipient [3:413:2406]: NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046644480 Generation: 2 PathId: [OwnerId: 72057594046644480, LocalPathId: 6] Version: 2 } 2025-05-29T15:27:04.104513Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4961: StateWork, processing event NSchemeBoard::NSchemeshardEvents::TEvUpdateAck 2025-05-29T15:27:04.104519Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:5834: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 6 Version: 2 PathOwnerId: 72057594046644480, cookie: 281474976715658 2025-05-29T15:27:04.104526Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 6 Version: 2 PathOwnerId: 72057594046644480, cookie: 281474976715658 2025-05-29T15:27:04.104530Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976715658 2025-05-29T15:27:04.104533Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715658, pathId: [OwnerId: 72057594046644480, LocalPathId: 6], version: 2 2025-05-29T15:27:04.104537Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046644480, LocalPathId: 6] was 2 2025-05-29T15:27:04.104545Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976715658, subscribers: 1 2025-05-29T15:27:04.104550Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:212: TTxAckPublishToSchemeBoard Notify send TEvNotifyTxCompletionResult, at schemeshard: 72057594046644480, to actorId: [3:745:2621] 2025-05-29T15:27:04.104556Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:165: TSideEffects ApplyOnExecute at tablet# 72057594046644480 2025-05-29T15:27:04.104595Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046644480, cookie: 281474976715658 2025-05-29T15:27:04.104598Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:207: TSideEffects ApplyOnComplete at tablet# 72057594046644480 2025-05-29T15:27:04.104617Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046644480, cookie: 281474976715658 2025-05-29T15:27:04.104620Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:207: TSideEffects ApplyOnComplete at tablet# 72057594046644480 2025-05-29T15:27:04.104633Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:630: Send to actor: [3:745:2621] msg type: 271124998 msg: NKikimrScheme.TEvNotifyTxCompletionResult TxId: 281474976715658 at schemeshard: 72057594046644480 2025-05-29T15:27:04.104675Z node 3 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [3:745:2621], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-05-29T15:27:04.104709Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4877: StateWork, received event# 269877764, Sender [3:774:2631], Recipient [3:413:2406]: NKikimr::TEvTabletPipe::TEvServerDisconnected 2025-05-29T15:27:04.104715Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4975: StateWork, processing event TEvTabletPipe::TEvServerDisconnected 2025-05-29T15:27:04.104719Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5801: Server pipe is reset, at schemeshard: 72057594046644480 2025-05-29T15:27:04.137410Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4877: StateWork, received event# 269877761, Sender [3:819:2663], Recipient [3:413:2406]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-05-29T15:27:04.137441Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4974: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-05-29T15:27:04.137448Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5753: Pipe server connected, at tablet: 72057594046644480 2025-05-29T15:27:04.137467Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4877: StateWork, received event# 271122432, Sender [3:815:2660], Recipient [3:413:2406]: {TEvModifySchemeTransaction txid# 281474976715659 TabletId# 72057594046644480} 2025-05-29T15:27:04.137472Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4888: StateWork, processing event TEvSchemeShard::TEvModifySchemeTransaction 2025-05-29T15:27:04.138418Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:372: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/Root/.metadata/workload_manager/pools" OperationType: ESchemeOpCreateResourcePool ModifyACL { Name: "default" DiffACL: "\n!\010\000\022\035\010\001\020\201\004\032\024all-users@well-known \003\n\031\010\000\022\025\010\001\020\201\004\032\014root@builtin \003" NewOwner: "metadata@system" } Internal: true CreateResourcePool { Name: "default" Properties { Properties { key: "concurrent_query_limit" value: "-1" } Properties { key: "database_load_cpu_threshold" value: "-1" } Properties { key: "query_cancel_after_seconds" value: "0" } Properties { key: "query_cpu_limit_percent_per_node" value: "-1" } Properties { key: "query_memory_limit_percent_per_node" value: "-1" } Properties { key: "queue_size" value: "-1" } Properties { key: "resource_weight" value: "-1" } Properties { key: "total_cpu_limit_percent_per_node" value: "-1" } } } } TxId: 281474976715659 TabletId: 72057594046644480 Owner: "metadata@system" UserToken: "***" PeerName: "" , at schemeshard: 72057594046644480 2025-05-29T15:27:04.138531Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_create_resource_pool.cpp:148: [72057594046644480] TCreateResourcePool Propose: opId# 281474976715659:0, path# /Root/.metadata/workload_manager/pools/default 2025-05-29T15:27:04.138581Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:130: IgniteOperation, opId: 281474976715659:1, propose status:StatusAlreadyExists, reason: Check failed: path: '/Root/.metadata/workload_manager/pools/default', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92, at schemeshard: 72057594046644480 2025-05-29T15:27:04.138674Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:165: TSideEffects ApplyOnExecute at tablet# 72057594046644480 2025-05-29T15:27:04.138954Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:451: TTxOperationPropose Complete, txId: 281474976715659, response: Status: StatusAlreadyExists Reason: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" TxId: 281474976715659 SchemeshardId: 72057594046644480 PathId: 6 PathCreateTxId: 281474976715658, at schemeshard: 72057594046644480 2025-05-29T15:27:04.139014Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 281474976715659, database: /Root, subject: metadata@system, status: StatusAlreadyExists, reason: Check failed: path: '/Root/.metadata/workload_manager/pools/default', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92, operation: CREATE RESOURCE POOL, path: default, set owner:metadata@system, add access: +(SR|DS):all-users@well-known, add access: +(SR|DS):root@builtin 2025-05-29T15:27:04.139024Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:207: TSideEffects ApplyOnComplete at tablet# 72057594046644480 2025-05-29T15:27:04.139099Z node 3 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [3:815:2660] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-29T15:27:04.139178Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4877: StateWork, received event# 269877764, Sender [3:819:2663], Recipient [3:413:2406]: NKikimr::TEvTabletPipe::TEvServerDisconnected 2025-05-29T15:27:04.139186Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4975: StateWork, processing event TEvTabletPipe::TEvServerDisconnected 2025-05-29T15:27:04.139191Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5801: Server pipe is reset, at schemeshard: 72057594046644480 2025-05-29T15:27:04.159263Z node 3 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [3:825:2669], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:27:04.159950Z node 3 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=3&id=YTI1MjMxNGYtYjc2MDY1M2QtNmNmN2JkNzQtODc1OTEwM2Y=, ActorId: [3:729:2611], ActorState: ExecuteState, TraceId: 01jweaev7qcynrmwr2pm8azemt, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: assertion failed at ydb/core/tx/datashard/ut_common/datashard_ut_common.cpp:2091, void NKikimr::ExecSQL(Tests::TServer::TPtr, TActorId, const TString &, bool, Ydb::StatusIds::StatusCode): (response.GetYdbStatus() == code) failed: (INTERNAL_ERROR != SUCCESS) Response { QueryIssues { message: "Execution" issue_code: 1060 issues { message: "yql/essentials/ast/yql_expr.h:1874: index out of range" issue_code: 1 } } TxMeta { } } YdbStatus: INTERNAL_ERROR ConsumedRu: 1 , with diff: (INT|SUCC)E(RNAL_ERROR|SS) TBackTrace::Capture()+28 (0x13ACEA4C) NUnitTest::NPrivate::RaiseError(char const*, TBasicString> const&, bool)+137 (0x13C82379) NKikimr::ExecSQL(TIntrusivePtr>, NActors::TActorId, TBasicString> const&, bool, Ydb::StatusIds_StatusCode)+1300 (0x2630FFE4) NKikimr::NTestSuiteIncrementalBackup::TTestCaseE2EBackupCollection::Execute_(NUnitTest::TTestContext&)+1432 (0x139AABF8) NKikimr::NTestSuiteIncrementalBackup::TCurrentTest::Execute()::'lambda'()::operator()() const+71 (0x139AF5F7) NUnitTest::TTestBase::Run(std::__y1::function, TBasicString> const&, char const*, bool)+126 (0x13C8422E) NKikimr::NTestSuiteIncrementalBackup::TCurrentTest::Execute()+421 (0x139AEE55) NUnitTest::TTestFactory::Execute()+803 (0x13C849A3) NUnitTest::RunMain(int, char**)+3021 (0x13C9654D) ??+0 (0x7F79D8EA5D90) __libc_start_main+128 (0x7F79D8EA5E40) _start+41 (0x12A02029) >> TKeyValueTest::TestObtainLockNewApi >> TKeyValueCollectorTest::TestKeyValueCollectorEmpty [GOOD] >> TKeyValueCollectorTest::TestKeyValueCollectorMany >> DataShardReadTableSnapshots::ReadTableDropColumnLatePropose ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_subdomain_reboots/unittest >> SubDomainWithReboots::Delete [GOOD] Test command err: ==== RunWithTabletReboots =========== RUN: Trace =========== Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2140] Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:116:2058] recipient: [1:110:2141] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:116:2058] recipient: [1:110:2141] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:117:2058] recipient: [1:111:2142] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:117:2058] recipient: [1:111:2142] Leader for TabletID 72057594046678944 is [1:126:2151] sender: [1:127:2058] recipient: [1:109:2140] Leader for TabletID 72057594046447617 is [1:130:2154] sender: [1:132:2058] recipient: [1:110:2141] Leader for TabletID 72057594046316545 is [1:133:2155] sender: [1:135:2058] recipient: [1:111:2142] 2025-05-29T15:26:53.451716Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7488: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-05-29T15:26:53.451742Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7516: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:26:53.451749Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7402: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2025-05-29T15:26:53.451755Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7418: OperationsProcessing config: using default configuration 2025-05-29T15:26:53.451761Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-05-29T15:26:53.451765Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-05-29T15:26:53.451775Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7548: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:26:53.451787Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:39: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-05-29T15:26:53.451887Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7619: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-05-29T15:26:53.451959Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-05-29T15:26:53.466577Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7651: Got new config: QueryServiceConfig { AllExternalDataSourcesAreAvailable: true } 2025-05-29T15:26:53.466600Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:26:53.466690Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7619: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# Leader for TabletID 72057594046447617 is [1:130:2154] sender: [1:175:2058] recipient: [1:15:2062] 2025-05-29T15:26:53.469438Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-05-29T15:26:53.469467Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-05-29T15:26:53.469496Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-05-29T15:26:53.472314Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-05-29T15:26:53.472394Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:445: Clear TempDirsState with owners number: 0 2025-05-29T15:26:53.472514Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1348: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-05-29T15:26:53.472683Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:32: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-05-29T15:26:53.473325Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:157: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:26:53.473367Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:84: [RootDataErasureManager] Stop 2025-05-29T15:26:53.473641Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:26:53.473652Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:26:53.473685Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-05-29T15:26:53.473694Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-29T15:26:53.473700Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:202: TTxServerlessStorageBilling.Complete 2025-05-29T15:26:53.473721Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6671: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:214:2058] recipient: [1:212:2212] IGNORE Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:214:2058] recipient: [1:212:2212] Leader for TabletID 72057594037968897 is [1:218:2216] sender: [1:219:2058] recipient: [1:212:2212] 2025-05-29T15:26:53.475131Z node 1 :HIVE INFO: tablet_helpers.cpp:1130: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:126:2151] sender: [1:239:2058] recipient: [1:15:2062] 2025-05-29T15:26:53.496018Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:372: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-05-29T15:26:53.496086Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:26:53.496151Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-05-29T15:26:53.496208Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:130: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-05-29T15:26:53.496219Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:26:53.497004Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:451: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-05-29T15:26:53.497031Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-05-29T15:26:53.497095Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:26:53.497106Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:313: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-05-29T15:26:53.497112Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:367: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-05-29T15:26:53.497118Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 2 -> 3 2025-05-29T15:26:53.497645Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:26:53.497658Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-05-29T15:26:53.497664Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 3 -> 128 2025-05-29T15:26:53.498038Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:26:53.498048Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:26:53.498055Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:26:53.498062Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1650: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-05-29T15:26:53.498753Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1719: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-05-29T15:26:53.499163Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:652: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-05-29T15:26:53.499204Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1751: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 Leader for TabletID 72057594046316545 is [1:133:2155] sender: [1:254:2058] recipient: [1:15:2062] FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-05-29T15:26:53.499411Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:676: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-05-29T15:26:53.499437Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:680: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 133 RawX2: 4294969451 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-05-29T15:26:53.499445Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:26:53.499508Z node 1 :FLAT_TX_SCHEMESHARD INFO: sch ... LAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:989: Publication details: tx: 1003, [OwnerId: 72057594046678944, LocalPathId: 2], 7 2025-05-29T15:27:08.898676Z node 50 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:989: Publication details: tx: 1003, [OwnerId: 72057594046678944, LocalPathId: 3], 18446744073709551615 2025-05-29T15:27:08.898859Z node 50 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:5834: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 7 PathOwnerId: 72057594046678944, cookie: 1003 2025-05-29T15:27:08.898876Z node 50 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 7 PathOwnerId: 72057594046678944, cookie: 1003 2025-05-29T15:27:08.898882Z node 50 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 1003 2025-05-29T15:27:08.898888Z node 50 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 1003, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 7 2025-05-29T15:27:08.898894Z node 50 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2025-05-29T15:27:08.899017Z node 50 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:5834: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 1003 2025-05-29T15:27:08.899031Z node 50 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 1003 2025-05-29T15:27:08.899036Z node 50 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1003 2025-05-29T15:27:08.899041Z node 50 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 1003, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 18446744073709551615 2025-05-29T15:27:08.899045Z node 50 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 4 2025-05-29T15:27:08.899056Z node 50 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 1003, subscribers: 1 2025-05-29T15:27:08.899061Z node 50 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:212: TTxAckPublishToSchemeBoard Notify send TEvNotifyTxCompletionResult, at schemeshard: 72057594046678944, to actorId: [50:435:2401] 2025-05-29T15:27:08.899699Z node 50 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:46: Free shard 72057594046678944:1 hive 72057594037968897 at ss 72057594046678944 2025-05-29T15:27:08.899712Z node 50 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:46: Free shard 72057594046678944:2 hive 72057594037968897 at ss 72057594046678944 2025-05-29T15:27:08.899910Z node 50 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1003 2025-05-29T15:27:08.900071Z node 50 :HIVE INFO: tablet_helpers.cpp:1356: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 1 TxId_Deprecated: 1 TabletID: 72075186233409546 2025-05-29T15:27:08.900129Z node 50 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1003 2025-05-29T15:27:08.900183Z node 50 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5938: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 1 ShardOwnerId: 72057594046678944 ShardLocalIdx: 1, at schemeshard: 72057594046678944 2025-05-29T15:27:08.900255Z node 50 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 3 Forgetting tablet 72075186233409546 2025-05-29T15:27:08.900563Z node 50 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:227: tests -- TTxNotificationSubscriber for txId 1003: got EvNotifyTxCompletionResult 2025-05-29T15:27:08.900573Z node 50 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:236: tests -- TTxNotificationSubscriber for txId 1003: satisfy waiter [50:440:2406] 2025-05-29T15:27:08.900632Z node 50 :HIVE INFO: tablet_helpers.cpp:1356: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 2 TxId_Deprecated: 2 TabletID: 72075186233409547 2025-05-29T15:27:08.900722Z node 50 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5938: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 2 ShardOwnerId: 72057594046678944 ShardLocalIdx: 2, at schemeshard: 72057594046678944 2025-05-29T15:27:08.900768Z node 50 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 2 Forgetting tablet 72075186233409547 2025-05-29T15:27:08.901065Z node 50 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:205: TTxCleanDroppedSubDomains Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-05-29T15:27:08.901075Z node 50 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:224: TTxCleanDroppedPaths: PersistRemoveSubDomain for PathId# [OwnerId: 72057594046678944, LocalPathId: 3], at schemeshard: 72057594046678944 2025-05-29T15:27:08.901104Z node 50 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 1 2025-05-29T15:27:08.901215Z node 50 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:123: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-05-29T15:27:08.901222Z node 50 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:137: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 3], at schemeshard: 72057594046678944 2025-05-29T15:27:08.901235Z node 50 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 2025-05-29T15:27:08.901830Z node 50 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:174: Deleted shardIdx 72057594046678944:1 2025-05-29T15:27:08.901856Z node 50 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:180: Close pipe to deleted shardIdx 72057594046678944:1 tabletId 72075186233409546 2025-05-29T15:27:08.901955Z node 50 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:174: Deleted shardIdx 72057594046678944:2 2025-05-29T15:27:08.901962Z node 50 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:180: Close pipe to deleted shardIdx 72057594046678944:2 tabletId 72075186233409547 2025-05-29T15:27:08.902058Z node 50 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:245: TTxCleanDroppedSubDomains Complete, done PersistRemoveSubDomain for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 2025-05-29T15:27:08.902294Z node 50 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:160: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestWaitNotification: OK eventTxId 1003 wait until 72075186233409546 is deleted wait until 72075186233409547 is deleted 2025-05-29T15:27:08.902382Z node 50 :HIVE INFO: tablet_helpers.cpp:1476: [72057594037968897] TEvSubscribeToTabletDeletion, 72075186233409546 2025-05-29T15:27:08.902392Z node 50 :HIVE INFO: tablet_helpers.cpp:1476: [72057594037968897] TEvSubscribeToTabletDeletion, 72075186233409547 Deleted tabletId 72075186233409546 Deleted tabletId 72075186233409547 2025-05-29T15:27:08.902474Z node 50 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/DirA/USER_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-05-29T15:27:08.902524Z node 50 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/DirA/USER_0" took 62us result status StatusPathDoesNotExist 2025-05-29T15:27:08.902569Z node 50 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/DirA/USER_0\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot/DirA\' (id: [OwnerId: 72057594046678944, LocalPathId: 2]), source_location: ydb/core/tx/schemeshard/schemeshard_path_describer.cpp:1157" Path: "/MyRoot/DirA/USER_0" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot/DirA" LastExistedPrefixPathId: 2 LastExistedPrefixDescription { Self { Name: "DirA" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1000 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: false } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 2025-05-29T15:27:08.902626Z node 50 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/DirA" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-05-29T15:27:08.902649Z node 50 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/DirA" took 24us result status StatusSuccess 2025-05-29T15:27:08.902717Z node 50 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/DirA" PathDescription { Self { Name: "DirA" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1000 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 7 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 7 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 6 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 Waiting until shard idx 72057594046678944:1 is deleted Waiting until shard idx 72057594046678944:2 is deleted Deleted shard idx 72057594046678944:1 Deleted shard idx 72057594046678944:2 |68.6%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/driver_lib/run/ut/ydb-core-driver_lib-run-ut |68.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/driver_lib/run/ut/ydb-core-driver_lib-run-ut |68.7%| [TA] {RESULT} $(B)/ydb/core/blobstorage/dsproxy/ut_ftol/test-results/unittest/{meta.json ... results_accumulator.log} |68.7%| [LD] {RESULT} $(B)/ydb/core/driver_lib/run/ut/ydb-core-driver_lib-run-ut >> DataShardReadTableSnapshots::ReadTableDropColumn >> TKeyValueCollectorTest::TestKeyValueCollectorMany [GOOD] >> KeyValueReadStorage::ReadWithTwoPartsOk [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/keyvalue/ut/unittest >> KeyValueReadStorage::ReadWithTwoPartsOk [GOOD] Test command err: 2025-05-29T15:27:09.860665Z 1 00h00m00.000000s :KEYVALUE INFO: {KV20@keyvalue_storage_read_request.cpp:209} Received GetResult KeyValue# 1 GroupId# 3 Status# OK ResponseSz# 2 ErrorReason# ReadRequestCookie# 0 2025-05-29T15:27:09.861036Z 1 00h00m00.000000s :KEYVALUE INFO: {KV34@keyvalue_storage_read_request.cpp:492} Send respose KeyValue# 1 Status# RSTATUS_OK ReadRequestCookie# 0 >> DataShardReadTableSnapshots::ReadTableSplitBefore >> TConsoleTests::TestAlterTenantTooManyStorageResourcesForRunning [GOOD] >> TConsoleTests::TestAlterTenantTooManyStorageResourcesForRunningExtSubdomain >> THDRRQuoterResourceTreeRuntimeTest::TestAllocateResource [GOOD] >> THDRRQuoterResourceTreeRuntimeTest::TestAllocationGranularity [GOOD] >> THDRRQuoterResourceTreeRuntimeTest::TestAmountIsLessThanEpsilon [GOOD] >> THDRRQuoterResourceTreeRuntimeTest::TestActiveSessionDisconnectsAndThenConnectsAgain [GOOD] >> THDRRQuoterResourceTreeRuntimeTest::TestActiveMultiresourceSessionDisconnectsAndThenConnectsAgain [GOOD] |68.7%| [TA] $(B)/ydb/core/tx/datashard/ut_incremental_backup/test-results/unittest/{meta.json ... results_accumulator.log} >> TKesusTest::TestRegisterProxy >> TKesusTest::TestQuoterResourceDescribe >> YdbTableSplit::MergeByNoLoadAfterSplit [GOOD] >> THDRRQuoterResourceTreeRuntimeTest::TestHierarchicalQuotas [GOOD] >> THDRRQuoterResourceTreeRuntimeTest::TestHangDefence [GOOD] >> THDRRQuoterResourceTreeRuntimeTest::TestMoreStrongChildLimit [GOOD] >> THDRRQuoterResourceTreeRuntimeTest::TestInactiveSessionDisconnectsAndThenConnectsAgain [GOOD] >> THDRRQuoterResourceTreeRuntimeTest::TestInactiveMultiresourceSessionDisconnectsAndThenConnectsAgain [GOOD] >> TKesusTest::TestRegisterProxy [GOOD] >> TKesusTest::TestRegisterProxyBadGeneration >> TKesusTest::TestQuoterResourceDescribe [GOOD] >> TKesusTest::TestQuoterResourceCreation |68.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kesus/tablet/ut/unittest >> THDRRQuoterResourceTreeRuntimeTest::TestActiveMultiresourceSessionDisconnectsAndThenConnectsAgain [GOOD] >> TKesusTest::TestRegisterProxyBadGeneration [GOOD] >> TKesusTest::TestRegisterProxyFromDeadActor >> TKesusTest::TestQuoterResourceCreation [GOOD] >> TKesusTest::TestQuoterResourceModification |68.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kesus/tablet/ut/unittest >> THDRRQuoterResourceTreeRuntimeTest::TestInactiveMultiresourceSessionDisconnectsAndThenConnectsAgain [GOOD] >> DataShardReadTableSnapshots::ReadTableDropColumnLatePropose [FAIL] >> DataShardReadTableSnapshots::ReadTableMaxRows >> TKesusTest::TestRegisterProxyFromDeadActor [GOOD] >> TKesusTest::TestRegisterProxyLinkFailure >> THDRRQuoterResourceTreeRuntimeTest::TestWeights [GOOD] >> THDRRQuoterResourceTreeRuntimeTest::TestWeightsChange [GOOD] >> THDRRQuoterResourceTreeRuntimeTest::TestVerySmallSpeed [GOOD] >> TKesusTest::TestAcquireBeforeTimeoutViaRelease >> DataShardReadTableSnapshots::ReadTableDropColumn [FAIL] >> DataShardReadTableSnapshots::CorruptedDyNumber >> TKesusTest::TestRegisterProxyLinkFailure [GOOD] |68.7%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/scheme_board/ut_populator/ydb-core-tx-scheme_board-ut_populator |68.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/scheme_board/ut_populator/ydb-core-tx-scheme_board-ut_populator >> TKesusTest::TestQuoterResourceModification [GOOD] >> DataShardReadTableSnapshots::ReadTableSplitBefore [FAIL] >> TKesusTest::TestRegisterProxyLinkFailureRace >> TKesusTest::TestQuoterResourceDeletion >> DataShardReadTableSnapshots::ReadTableSplitFinished >> TKesusTest::TestAcquireBeforeTimeoutViaRelease [GOOD] >> TKesusTest::TestAcquireBeforeTimeoutViaModeChange >> TSchemeShardSysViewTest::CreateSysView >> TKesusTest::TestAcquireBeforeTimeoutViaModeChange [GOOD] >> TKesusTest::TestQuoterResourceDeletion [GOOD] >> TKesusTest::TestQuoterSubscribeOnResource >> TSchemeShardSysViewTest::DropSysView >> TSchemeShardSysViewTest::CreateSysView [GOOD] >> DataShardReadTableSnapshots::ReadTableMaxRows [FAIL] |68.7%| [TA] {RESULT} $(B)/ydb/core/tx/datashard/ut_incremental_backup/test-results/unittest/{meta.json ... results_accumulator.log} |68.7%| [LD] {RESULT} $(B)/ydb/core/tx/scheme_board/ut_populator/ydb-core-tx-scheme_board-ut_populator >> TKesusTest::TestQuoterSubscribeOnResource [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kesus/tablet/ut/unittest >> TKesusTest::TestAcquireBeforeTimeoutViaModeChange [GOOD] Test command err: 2025-05-29T15:27:11.390424Z node 1 :KESUS_TABLET INFO: tablet_impl.cpp:71: OnActivateExecutor: 72057594037927937 2025-05-29T15:27:11.390464Z node 1 :KESUS_TABLET DEBUG: tx_init_schema.cpp:14: [72057594037927937] TTxInitSchema::Execute 2025-05-29T15:27:11.400874Z node 1 :KESUS_TABLET DEBUG: tx_init_schema.cpp:21: [72057594037927937] TTxInitSchema::Complete 2025-05-29T15:27:11.400920Z node 1 :KESUS_TABLET DEBUG: tx_init.cpp:30: [72057594037927937] TTxInit::Execute 2025-05-29T15:27:11.412546Z node 1 :KESUS_TABLET DEBUG: tx_init.cpp:242: [72057594037927937] TTxInit::Complete 2025-05-29T15:27:11.412746Z node 1 :KESUS_TABLET DEBUG: tx_session_attach.cpp:35: [72057594037927937] TTxSessionAttach::Execute (sender=[1:131:2156], cookie=11561761728306400987, session=0, seqNo=0) 2025-05-29T15:27:11.412800Z node 1 :KESUS_TABLET DEBUG: tx_session_attach.cpp:118: [72057594037927937] Created new session 1 2025-05-29T15:27:11.433688Z node 1 :KESUS_TABLET DEBUG: tx_session_attach.cpp:140: [72057594037927937] TTxSessionAttach::Complete (sender=[1:131:2156], cookie=11561761728306400987, session=1) 2025-05-29T15:27:11.433812Z node 1 :KESUS_TABLET DEBUG: tx_session_attach.cpp:35: [72057594037927937] TTxSessionAttach::Execute (sender=[1:132:2157], cookie=4363992507437580712, session=0, seqNo=0) 2025-05-29T15:27:11.433861Z node 1 :KESUS_TABLET DEBUG: tx_session_attach.cpp:118: [72057594037927937] Created new session 2 2025-05-29T15:27:11.444823Z node 1 :KESUS_TABLET DEBUG: tx_session_attach.cpp:140: [72057594037927937] TTxSessionAttach::Complete (sender=[1:132:2157], cookie=4363992507437580712, session=2) 2025-05-29T15:27:11.445058Z node 1 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:48: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[1:131:2156], cookie=111, session=1, semaphore="Lock1" count=18446744073709551615) 2025-05-29T15:27:11.445110Z node 1 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:126: [72057594037927937] Created new ephemeral semaphore 1 "Lock1" 2025-05-29T15:27:11.445126Z node 1 :KESUS_TABLET DEBUG: tablet_db.cpp:152: [72057594037927937] Processing semaphore 1 "Lock1" queue: next order #1 session 1 2025-05-29T15:27:11.462971Z node 1 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:263: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[1:131:2156], cookie=111) 2025-05-29T15:27:11.463066Z node 1 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:48: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[1:131:2156], cookie=112, session=1, semaphore="Lock2" count=1) 2025-05-29T15:27:11.463106Z node 1 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:126: [72057594037927937] Created new ephemeral semaphore 2 "Lock2" 2025-05-29T15:27:11.463121Z node 1 :KESUS_TABLET DEBUG: tablet_db.cpp:152: [72057594037927937] Processing semaphore 2 "Lock2" queue: next order #2 session 1 2025-05-29T15:27:11.482205Z node 1 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:263: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[1:131:2156], cookie=112) 2025-05-29T15:27:11.482294Z node 1 :KESUS_TABLET DEBUG: tx_semaphore_release.cpp:37: [72057594037927937] TTxSemaphoreRelease::Execute (sender=[1:131:2156], cookie=333, name="Lock1") 2025-05-29T15:27:11.482319Z node 1 :KESUS_TABLET DEBUG: tablet_db.cpp:98: [72057594037927937] Deleting session 1 / semaphore 1 "Lock1" owner link 2025-05-29T15:27:11.482364Z node 1 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:48: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[1:132:2157], cookie=222, session=2, semaphore="Lock1" count=1) 2025-05-29T15:27:11.482377Z node 1 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:126: [72057594037927937] Created new ephemeral semaphore 3 "Lock1" 2025-05-29T15:27:11.482386Z node 1 :KESUS_TABLET DEBUG: tablet_db.cpp:152: [72057594037927937] Processing semaphore 3 "Lock1" queue: next order #3 session 2 2025-05-29T15:27:11.482398Z node 1 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:48: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[1:132:2157], cookie=223, session=2, semaphore="Lock2" count=18446744073709551615) 2025-05-29T15:27:11.501485Z node 1 :KESUS_TABLET DEBUG: tx_semaphore_release.cpp:93: [72057594037927937] TTxSemaphoreRelease::Complete (sender=[1:131:2156], cookie=333) 2025-05-29T15:27:11.501522Z node 1 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:263: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[1:132:2157], cookie=222) 2025-05-29T15:27:11.501531Z node 1 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:263: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[1:132:2157], cookie=223) 2025-05-29T15:27:11.501612Z node 1 :KESUS_TABLET DEBUG: tx_semaphore_release.cpp:37: [72057594037927937] TTxSemaphoreRelease::Execute (sender=[1:131:2156], cookie=334, name="Lock2") 2025-05-29T15:27:11.501643Z node 1 :KESUS_TABLET DEBUG: tablet_db.cpp:98: [72057594037927937] Deleting session 1 / semaphore 2 "Lock2" owner link 2025-05-29T15:27:11.501654Z node 1 :KESUS_TABLET DEBUG: tablet_db.cpp:152: [72057594037927937] Processing semaphore 2 "Lock2" queue: next order #4 session 2 2025-05-29T15:27:11.519818Z node 1 :KESUS_TABLET DEBUG: tx_semaphore_release.cpp:93: [72057594037927937] TTxSemaphoreRelease::Complete (sender=[1:131:2156], cookie=334) 2025-05-29T15:27:11.519983Z node 1 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:29: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[1:160:2183], cookie=722924259601381496, name="Lock1") 2025-05-29T15:27:11.520008Z node 1 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:134: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[1:160:2183], cookie=722924259601381496) 2025-05-29T15:27:11.520060Z node 1 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:29: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[1:163:2186], cookie=13672844038122967971, name="Lock2") 2025-05-29T15:27:11.520066Z node 1 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:134: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[1:163:2186], cookie=13672844038122967971) 2025-05-29T15:27:11.529597Z node 1 :KESUS_TABLET INFO: tablet_impl.cpp:71: OnActivateExecutor: 72057594037927937 2025-05-29T15:27:11.529639Z node 1 :KESUS_TABLET DEBUG: tx_init_schema.cpp:14: [72057594037927937] TTxInitSchema::Execute 2025-05-29T15:27:11.529706Z node 1 :KESUS_TABLET DEBUG: tx_init_schema.cpp:21: [72057594037927937] TTxInitSchema::Complete 2025-05-29T15:27:11.529805Z node 1 :KESUS_TABLET DEBUG: tx_init.cpp:30: [72057594037927937] TTxInit::Execute 2025-05-29T15:27:11.587283Z node 1 :KESUS_TABLET DEBUG: tx_init.cpp:242: [72057594037927937] TTxInit::Complete 2025-05-29T15:27:11.587340Z node 1 :KESUS_TABLET DEBUG: tablet_db.cpp:152: [72057594037927937] Processing semaphore 2 "Lock2" queue: next order #4 session 2 2025-05-29T15:27:11.587350Z node 1 :KESUS_TABLET DEBUG: tablet_db.cpp:152: [72057594037927937] Processing semaphore 3 "Lock1" queue: next order #3 session 2 2025-05-29T15:27:11.587465Z node 1 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:29: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[1:203:2216], cookie=752590956100556063, name="Lock1") 2025-05-29T15:27:11.587489Z node 1 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:134: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[1:203:2216], cookie=752590956100556063) 2025-05-29T15:27:11.587596Z node 1 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:29: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[1:211:2223], cookie=16907567067183161415, name="Lock2") 2025-05-29T15:27:11.587603Z node 1 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:134: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[1:211:2223], cookie=16907567067183161415) 2025-05-29T15:27:11.864364Z node 2 :KESUS_TABLET INFO: tablet_impl.cpp:71: OnActivateExecutor: 72057594037927937 2025-05-29T15:27:11.864405Z node 2 :KESUS_TABLET DEBUG: tx_init_schema.cpp:14: [72057594037927937] TTxInitSchema::Execute 2025-05-29T15:27:11.875168Z node 2 :KESUS_TABLET DEBUG: tx_init_schema.cpp:21: [72057594037927937] TTxInitSchema::Complete 2025-05-29T15:27:11.875244Z node 2 :KESUS_TABLET DEBUG: tx_init.cpp:30: [72057594037927937] TTxInit::Execute 2025-05-29T15:27:11.903184Z node 2 :KESUS_TABLET DEBUG: tx_init.cpp:242: [72057594037927937] TTxInit::Complete 2025-05-29T15:27:11.903417Z node 2 :KESUS_TABLET DEBUG: tx_session_attach.cpp:35: [72057594037927937] TTxSessionAttach::Execute (sender=[2:133:2158], cookie=9074165651791335369, session=0, seqNo=0) 2025-05-29T15:27:11.903461Z node 2 :KESUS_TABLET DEBUG: tx_session_attach.cpp:118: [72057594037927937] Created new session 1 2025-05-29T15:27:11.915087Z node 2 :KESUS_TABLET DEBUG: tx_session_attach.cpp:140: [72057594037927937] TTxSessionAttach::Complete (sender=[2:133:2158], cookie=9074165651791335369, session=1) 2025-05-29T15:27:11.915190Z node 2 :KESUS_TABLET DEBUG: tx_session_attach.cpp:35: [72057594037927937] TTxSessionAttach::Execute (sender=[2:134:2159], cookie=7542501206531065790, session=0, seqNo=0) 2025-05-29T15:27:11.915228Z node 2 :KESUS_TABLET DEBUG: tx_session_attach.cpp:118: [72057594037927937] Created new session 2 2025-05-29T15:27:11.926002Z node 2 :KESUS_TABLET DEBUG: tx_session_attach.cpp:140: [72057594037927937] TTxSessionAttach::Complete (sender=[2:134:2159], cookie=7542501206531065790, session=2) 2025-05-29T15:27:11.926210Z node 2 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:48: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[2:133:2158], cookie=111, session=1, semaphore="Lock1" count=18446744073709551615) 2025-05-29T15:27:11.926254Z node 2 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:126: [72057594037927937] Created new ephemeral semaphore 1 "Lock1" 2025-05-29T15:27:11.926270Z node 2 :KESUS_TABLET DEBUG: tablet_db.cpp:152: [72057594037927937] Processing semaphore 1 "Lock1" queue: next order #1 session 1 2025-05-29T15:27:11.937194Z node 2 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:263: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[2:133:2158], cookie=111) 2025-05-29T15:27:11.937283Z node 2 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:48: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[2:133:2158], cookie=112, session=1, semaphore="Lock2" count=1) 2025-05-29T15:27:11.937329Z node 2 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:126: [72057594037927937] Created new ephemeral semaphore 2 "Lock2" 2025-05-29T15:27:11.937342Z node 2 :KESUS_TABLET DEBUG: tablet_db.cpp:152: [72057594037927937] Processing semaphore 2 "Lock2" queue: next order #2 session 1 2025-05-29T15:27:11.948261Z node 2 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:263: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[2:133:2158], cookie=112) 2025-05-29T15:27:11.948378Z node 2 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:48: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[2:133:2158], cookie=333, session=1, semaphore="Lock1" count=1) 2025-05-29T15:27:11.948459Z node 2 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:48: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[2:134:2159], cookie=222, session=2, semaphore="Lock1" count=1) 2025-05-29T15:27:11.948479Z node 2 :KESUS_TABLET DEBUG: tablet_db.cpp:152: [72057594037927937] Processing semaphore 1 "Lock1" queue: next order #3 session 2 2025-05-29T15:27:11.948495Z node 2 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:48: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[2:134:2159], cookie=223, session=2, semaphore="Lock2" count=18446744073709551615) 2025-05-29T15:27:11.959413Z node 2 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:263: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[2:133:2158], cookie=333) 2025-05-29T15:27:11.959444Z node 2 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:263: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[2:134:2159], cookie=222) 2025-05-29T15:27:11.959448Z node 2 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:263: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[2:134:2159], cookie=223) 2025-05-29T15:27:11.959558Z node 2 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:29: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[2:160:2183], cookie=805109728424485041, name="Lock1") 2025-05-29T15:27:11.959575Z node 2 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:134: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[2:160:2183], cookie=805109728424485041) 2025-05-29T15:27:11.959632Z node 2 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:29: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[2:163:2186], cookie=234038993416263957, name="Lock2") 2025-05-29T15:27:11.959638Z node 2 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:134: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[2:163:2186], cookie=234038993416263957) 2025-05-29T15:27:11.959680Z node 2 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:29: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[2:166:2189], cookie=17881218421957945835, name="Lock1") 2025-05-29T15:27:11.959684Z node 2 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:134: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[2:166:2189], cookie=17881218421957945835) 2025-05-29T15:27:11.959717Z node 2 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:29: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[2:169:2192], cookie=14870037109392984250, name="Lock2") 2025-05-29T15:27:11.959731Z node 2 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:134: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[2:169:2192], cookie=14870037109392984250) 2025-05-29T15:27:11.959767Z node 2 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:48: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[2:134:2159], cookie=444, session=2, semaphore="Lock2" count=1) 2025-05-29T15:27:11.959799Z node 2 :KESUS_TABLET DEBUG: tablet_db.cpp:152: [72057594037927937] Processing semaphore 2 "Lock2" queue: next order #4 session 2 2025-05-29T15:27:11.970762Z node 2 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:263: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[2:134:2159], cookie=444) 2025-05-29T15:27:11.970945Z node 2 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:29: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[2:174:2197], cookie=3639149770410546972, name="Lock2") 2025-05-29T15:27:11.970969Z node 2 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:134: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[2:174:2197], cookie=3639149770410546972) 2025-05-29T15:27:11.971038Z node 2 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:29: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[2:177:2200], cookie=8414791862876859672, name="Lock2") 2025-05-29T15:27:11.971044Z node 2 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:134: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[2:177:2200], cookie=8414791862876859672) 2025-05-29T15:27:11.974451Z node 2 :KESUS_TABLET INFO: tablet_impl.cpp:71: OnActivateExecutor: 72057594037927937 2025-05-29T15:27:11.974486Z node 2 :KESUS_TABLET DEBUG: tx_init_schema.cpp:14: [72057594037927937] TTxInitSchema::Execute 2025-05-29T15:27:11.974552Z node 2 :KESUS_TABLET DEBUG: tx_init_schema.cpp:21: [72057594037927937] TTxInitSchema::Complete 2025-05-29T15:27:11.974704Z node 2 :KESUS_TABLET DEBUG: tx_init.cpp:30: [72057594037927937] TTxInit::Execute 2025-05-29T15:27:12.019832Z node 2 :KESUS_TABLET DEBUG: tx_init.cpp:242: [72057594037927937] TTxInit::Complete 2025-05-29T15:27:12.019892Z node 2 :KESUS_TABLET DEBUG: tablet_db.cpp:152: [72057594037927937] Processing semaphore 1 "Lock1" queue: next order #1 session 1 2025-05-29T15:27:12.019898Z node 2 :KESUS_TABLET DEBUG: tablet_db.cpp:152: [72057594037927937] Processing semaphore 1 "Lock1" queue: next order #3 session 2 2025-05-29T15:27:12.019902Z node 2 :KESUS_TABLET DEBUG: tablet_db.cpp:152: [72057594037927937] Processing semaphore 2 "Lock2" queue: next order #2 session 1 2025-05-29T15:27:12.019905Z node 2 :KESUS_TABLET DEBUG: tablet_db.cpp:152: [72057594037927937] Processing semaphore 2 "Lock2" queue: next order #4 session 2 2025-05-29T15:27:12.019995Z node 2 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:29: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[2:217:2230], cookie=8832710665334979729, name="Lock1") 2025-05-29T15:27:12.020012Z node 2 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:134: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[2:217:2230], cookie=8832710665334979729) 2025-05-29T15:27:12.020102Z node 2 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:29: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[2:225:2237], cookie=4512509698225494726, name="Lock2") 2025-05-29T15:27:12.020106Z node 2 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:134: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[2:225:2237], cookie=4512509698225494726) >> TKesusTest::TestRegisterProxyLinkFailureRace [GOOD] >> TSchemeShardSysViewTest::DropSysView [GOOD] >> DataShardReadTableSnapshots::CorruptedDyNumber [GOOD] >> YdbTableSplit::RenameTablesAndSplit [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_sysview/unittest >> TSchemeShardSysViewTest::CreateSysView [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:116:2058] recipient: [1:111:2141] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:116:2058] recipient: [1:111:2141] Leader for TabletID 72057594046678944 is [1:128:2152] sender: [1:132:2058] recipient: [1:111:2141] 2025-05-29T15:27:12.183814Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7488: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-05-29T15:27:12.183843Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7516: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:27:12.183848Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7402: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-05-29T15:27:12.183853Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7418: OperationsProcessing config: using default configuration 2025-05-29T15:27:12.183868Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-05-29T15:27:12.183871Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-05-29T15:27:12.183880Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7548: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:27:12.183896Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:39: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-05-29T15:27:12.184018Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7619: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-05-29T15:27:12.184103Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-05-29T15:27:12.196548Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7309: Cannot subscribe to console configs 2025-05-29T15:27:12.196576Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:27:12.199798Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-05-29T15:27:12.200015Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-05-29T15:27:12.200105Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-05-29T15:27:12.202927Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-05-29T15:27:12.203192Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:445: Clear TempDirsState with owners number: 0 2025-05-29T15:27:12.203348Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1348: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-05-29T15:27:12.203403Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:32: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-05-29T15:27:12.204081Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:157: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:27:12.204147Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:84: [RootDataErasureManager] Stop 2025-05-29T15:27:12.204458Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:27:12.204471Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:27:12.204491Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-05-29T15:27:12.204500Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-29T15:27:12.204507Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:202: TTxServerlessStorageBilling.Complete 2025-05-29T15:27:12.204544Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6671: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-05-29T15:27:12.206177Z node 1 :HIVE INFO: tablet_helpers.cpp:1130: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:128:2152] sender: [1:242:2058] recipient: [1:15:2062] 2025-05-29T15:27:12.226906Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:372: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-05-29T15:27:12.226997Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:27:12.227086Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-05-29T15:27:12.227138Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:130: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-05-29T15:27:12.227153Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:27:12.228424Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:451: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-05-29T15:27:12.228461Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-05-29T15:27:12.228536Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:27:12.228550Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:313: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-05-29T15:27:12.228557Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:367: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-05-29T15:27:12.228563Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 2 -> 3 2025-05-29T15:27:12.229096Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:27:12.229107Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-05-29T15:27:12.229113Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 3 -> 128 2025-05-29T15:27:12.229504Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:27:12.229514Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:27:12.229521Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:27:12.229530Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1650: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-05-29T15:27:12.230251Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1719: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-05-29T15:27:12.230657Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:652: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-05-29T15:27:12.230701Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1751: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-05-29T15:27:12.230934Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:676: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-05-29T15:27:12.230963Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:680: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 124 RawX2: 4294969445 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-05-29T15:27:12.230973Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:27:12.231035Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 128 -> 240 2025-05-29T15:27:12.231042Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:27:12.231077Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-05-29T15:27:12.231090Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:402: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-05-29T15:27:12.231610Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:27:12.231621Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-29T15:27:12.231669Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29 ... Inside: 0 PQPartitionsLimit: 1000000 } SysViewDescription { Name: "new_sys_view" Type: EPartitionStats } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-05-29T15:27:12.245624Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:445: Clear TempDirsState with owners number: 0 Leader for TabletID 72057594046678944 is [1:128:2152] sender: [1:336:2058] recipient: [1:103:2137] Leader for TabletID 72057594046678944 is [1:128:2152] sender: [1:338:2058] recipient: [1:15:2062] Leader for TabletID 72057594046678944 is [1:128:2152] sender: [1:340:2058] recipient: [1:339:2326] Leader for TabletID 72057594046678944 is [1:341:2327] sender: [1:342:2058] recipient: [1:339:2326] 2025-05-29T15:27:12.252184Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7488: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-05-29T15:27:12.252210Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7516: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:27:12.252216Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7402: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-05-29T15:27:12.252221Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7418: OperationsProcessing config: using default configuration 2025-05-29T15:27:12.252228Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-05-29T15:27:12.252232Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-05-29T15:27:12.252242Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7548: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:27:12.252257Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:39: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-05-29T15:27:12.252345Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7619: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-05-29T15:27:12.252413Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-05-29T15:27:12.253737Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-05-29T15:27:12.254128Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-05-29T15:27:12.254182Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-05-29T15:27:12.254210Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7309: Cannot subscribe to console configs 2025-05-29T15:27:12.254216Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:27:12.254248Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:445: Clear TempDirsState with owners number: 0 2025-05-29T15:27:12.254331Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit for Paths, read records: 3, at schemeshard: 72057594046678944 2025-05-29T15:27:12.254352Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:319: AttachChild: child attached as only one child to the parent, parent id: [OwnerId: 72057594046678944, LocalPathId: 1], parent name: MyRoot, child name: .sys, child id: [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2025-05-29T15:27:12.254359Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:319: AttachChild: child attached as only one child to the parent, parent id: [OwnerId: 72057594046678944, LocalPathId: 2], parent name: .sys, child name: new_sys_view, child id: [OwnerId: 72057594046678944, LocalPathId: 3], at schemeshard: 72057594046678944 2025-05-29T15:27:12.254368Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1457: TTxInit for UserAttributes, read records: 0, at schemeshard: 72057594046678944 2025-05-29T15:27:12.254378Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1483: TTxInit for UserAttributesAlterData, read records: 0, at schemeshard: 72057594046678944 2025-05-29T15:27:12.254446Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1785: TTxInit for Tables, read records: 0, at schemeshard: 72057594046678944 2025-05-29T15:27:12.254458Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__root_data_erasure_manager.cpp:452: [RootDataErasureManager] Restore: Generation# 0, Status# 0, WakeupInterval# 604800 s, NumberDataErasureTenantsInRunning# 0 2025-05-29T15:27:12.254479Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 0 2025-05-29T15:27:12.254501Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:2033: TTxInit for Columns, read records: 0, at schemeshard: 72057594046678944 2025-05-29T15:27:12.254512Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:2093: TTxInit for ColumnsAlters, read records: 0, at schemeshard: 72057594046678944 2025-05-29T15:27:12.254534Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:2151: TTxInit for Shards, read records: 0, at schemeshard: 72057594046678944 2025-05-29T15:27:12.254546Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:2237: TTxInit for TablePartitions, read records: 0, at schemeshard: 72057594046678944 2025-05-29T15:27:12.254555Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:2303: TTxInit for TableShardPartitionConfigs, read records: 0, at schemeshard: 72057594046678944 2025-05-29T15:27:12.254591Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:2453: TTxInit for ChannelsBinding, read records: 0, at schemeshard: 72057594046678944 2025-05-29T15:27:12.254633Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:2832: TTxInit for TableIndexes, read records: 0, at schemeshard: 72057594046678944 2025-05-29T15:27:12.254645Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:2911: TTxInit for TableIndexKeys, read records: 0, at schemeshard: 72057594046678944 2025-05-29T15:27:12.254692Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:3409: TTxInit for KesusInfos, read records: 0, at schemeshard: 72057594046678944 2025-05-29T15:27:12.254708Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:3445: TTxInit for KesusAlters, read records: 0, at schemeshard: 72057594046678944 2025-05-29T15:27:12.254726Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:3659: TTxInit for TxShards, read records: 0, at schemeshard: 72057594046678944 2025-05-29T15:27:12.254795Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:3804: TTxInit for ShardToDelete, read records: 0, at schemeshard: 72057594046678944 2025-05-29T15:27:12.254810Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:3821: TTxInit for BackupSettings, read records: 0, at schemeshard: 72057594046678944 2025-05-29T15:27:12.254852Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:3981: TTxInit for ShardBackupStatus, read records: 0, at schemeshard: 72057594046678944 2025-05-29T15:27:12.254862Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:3997: TTxInit for CompletedBackup, read records: 0, at schemeshard: 72057594046678944 2025-05-29T15:27:12.254888Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4282: TTxInit for Publications, read records: 0, at schemeshard: 72057594046678944 2025-05-29T15:27:12.254917Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4587: IndexBuild , records: 0, at schemeshard: 72057594046678944 2025-05-29T15:27:12.254925Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4645: KMeansTreeSample records: 0, at schemeshard: 72057594046678944 2025-05-29T15:27:12.254942Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4740: SnapshotTables: snapshots: 0 tables: 0, at schemeshard: 72057594046678944 2025-05-29T15:27:12.254948Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4767: SnapshotSteps: snapshots: 0, at schemeshard: 72057594046678944 2025-05-29T15:27:12.254955Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4794: LongLocks: records: 0, at schemeshard: 72057594046678944 2025-05-29T15:27:12.257009Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:84: [RootDataErasureManager] Stop 2025-05-29T15:27:12.257391Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:27:12.257408Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:27:12.257543Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-05-29T15:27:12.257556Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-29T15:27:12.257564Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:202: TTxServerlessStorageBilling.Complete 2025-05-29T15:27:12.258001Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6671: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594046678944 is [1:341:2327] sender: [1:402:2058] recipient: [1:15:2062] 2025-05-29T15:27:12.323301Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/.sys/new_sys_view" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-05-29T15:27:12.323391Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/.sys/new_sys_view" took 117us result status StatusSuccess 2025-05-29T15:27:12.323469Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/.sys/new_sys_view" PathDescription { Self { Name: "new_sys_view" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeSysView CreateFinished: true CreateTxId: 102 CreateStep: 5000003 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 SysViewVersion: 1 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 2 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } SysViewDescription { Name: "new_sys_view" Type: EPartitionStats } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kesus/tablet/ut/unittest >> TKesusTest::TestQuoterSubscribeOnResource [GOOD] Test command err: 2025-05-29T15:27:10.627917Z node 1 :KESUS_TABLET INFO: tablet_impl.cpp:71: OnActivateExecutor: 72057594037927937 2025-05-29T15:27:10.627956Z node 1 :KESUS_TABLET DEBUG: tx_init_schema.cpp:14: [72057594037927937] TTxInitSchema::Execute 2025-05-29T15:27:10.632822Z node 1 :KESUS_TABLET DEBUG: tx_init_schema.cpp:21: [72057594037927937] TTxInitSchema::Complete 2025-05-29T15:27:10.632863Z node 1 :KESUS_TABLET DEBUG: tx_init.cpp:30: [72057594037927937] TTxInit::Execute 2025-05-29T15:27:10.644507Z node 1 :KESUS_TABLET DEBUG: tx_init.cpp:242: [72057594037927937] TTxInit::Complete 2025-05-29T15:27:10.645748Z node 1 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:36: [72057594037927937] TTxQuoterResourceAdd::Execute (sender=[1:131:2156], cookie=13140471568603118591, path="/Root", config={ MaxUnitsPerSecond: 100500 MaxBurstSizeCoefficient: 1.5 }) 2025-05-29T15:27:10.645813Z node 1 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:76: [72057594037927937] Created new quoter resource 1 "Root" 2025-05-29T15:27:10.666893Z node 1 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:85: [72057594037927937] TTxQuoterResourceAdd::Complete (sender=[1:131:2156], cookie=13140471568603118591) 2025-05-29T15:27:10.667109Z node 1 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:36: [72057594037927937] TTxQuoterResourceAdd::Execute (sender=[1:140:2163], cookie=8321169001926493714, path="/Root/Folder", config={ MaxUnitsPerSecond: 100500 MaxBurstSizeCoefficient: 1.5 }) 2025-05-29T15:27:10.667173Z node 1 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:76: [72057594037927937] Created new quoter resource 2 "Root/Folder" 2025-05-29T15:27:10.677931Z node 1 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:85: [72057594037927937] TTxQuoterResourceAdd::Complete (sender=[1:140:2163], cookie=8321169001926493714) 2025-05-29T15:27:10.678066Z node 1 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:36: [72057594037927937] TTxQuoterResourceAdd::Execute (sender=[1:145:2168], cookie=13668578913337438375, path="/Root/Q1", config={ MaxUnitsPerSecond: 10 }) 2025-05-29T15:27:10.678111Z node 1 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:76: [72057594037927937] Created new quoter resource 3 "Root/Q1" 2025-05-29T15:27:10.688887Z node 1 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:85: [72057594037927937] TTxQuoterResourceAdd::Complete (sender=[1:145:2168], cookie=13668578913337438375) 2025-05-29T15:27:10.689068Z node 1 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:36: [72057594037927937] TTxQuoterResourceAdd::Execute (sender=[1:150:2173], cookie=4438889968634636171, path="/Root/Folder/Q1", config={ MaxUnitsPerSecond: 10 }) 2025-05-29T15:27:10.689130Z node 1 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:76: [72057594037927937] Created new quoter resource 4 "Root/Folder/Q1" 2025-05-29T15:27:10.699948Z node 1 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:85: [72057594037927937] TTxQuoterResourceAdd::Complete (sender=[1:150:2173], cookie=4438889968634636171) 2025-05-29T15:27:10.700099Z node 1 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:36: [72057594037927937] TTxQuoterResourceAdd::Execute (sender=[1:155:2178], cookie=17908457645455536156, path="/Root/Folder/Q2", config={ MaxUnitsPerSecond: 10 }) 2025-05-29T15:27:10.700144Z node 1 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:76: [72057594037927937] Created new quoter resource 5 "Root/Folder/Q2" 2025-05-29T15:27:10.710973Z node 1 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:85: [72057594037927937] TTxQuoterResourceAdd::Complete (sender=[1:155:2178], cookie=17908457645455536156) 2025-05-29T15:27:10.711143Z node 1 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:36: [72057594037927937] TTxQuoterResourceAdd::Execute (sender=[1:160:2183], cookie=15785468205867074717, path="/Root/Folder/Q3", config={ MaxUnitsPerSecond: 10 }) 2025-05-29T15:27:10.711209Z node 1 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:76: [72057594037927937] Created new quoter resource 6 "Root/Folder/Q3" 2025-05-29T15:27:10.722130Z node 1 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:85: [72057594037927937] TTxQuoterResourceAdd::Complete (sender=[1:160:2183], cookie=15785468205867074717) 2025-05-29T15:27:10.722325Z node 1 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:36: [72057594037927937] TTxQuoterResourceAdd::Execute (sender=[1:165:2188], cookie=5021425067837112357, path="/Root2", config={ MaxUnitsPerSecond: 100500 MaxBurstSizeCoefficient: 1.5 }) 2025-05-29T15:27:10.722383Z node 1 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:76: [72057594037927937] Created new quoter resource 7 "Root2" 2025-05-29T15:27:10.733190Z node 1 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:85: [72057594037927937] TTxQuoterResourceAdd::Complete (sender=[1:165:2188], cookie=5021425067837112357) 2025-05-29T15:27:10.733365Z node 1 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:36: [72057594037927937] TTxQuoterResourceAdd::Execute (sender=[1:170:2193], cookie=1314368679233582942, path="/Root2/Q", config={ MaxUnitsPerSecond: 10 }) 2025-05-29T15:27:10.733419Z node 1 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:76: [72057594037927937] Created new quoter resource 8 "Root2/Q" 2025-05-29T15:27:10.744186Z node 1 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:85: [72057594037927937] TTxQuoterResourceAdd::Complete (sender=[1:170:2193], cookie=1314368679233582942) 2025-05-29T15:27:10.744317Z node 1 :KESUS_TABLET DEBUG: tx_quoter_resource_describe.cpp:78: [72057594037927937] TTxQuoterResourceDescribe::Execute (sender=[1:175:2198], cookie=9383781989233095557, ids=[100], paths=[], recursive=0) 2025-05-29T15:27:10.744338Z node 1 :KESUS_TABLET DEBUG: tx_quoter_resource_describe.cpp:115: [72057594037927937] TTxQuoterResourceDescribe::Complete (sender=[1:175:2198], cookie=9383781989233095557) 2025-05-29T15:27:10.744401Z node 1 :KESUS_TABLET DEBUG: tx_quoter_resource_describe.cpp:78: [72057594037927937] TTxQuoterResourceDescribe::Execute (sender=[1:178:2201], cookie=4531833645346566136, ids=[], paths=[Nonexistent/Path], recursive=0) 2025-05-29T15:27:10.744414Z node 1 :KESUS_TABLET DEBUG: tx_quoter_resource_describe.cpp:115: [72057594037927937] TTxQuoterResourceDescribe::Complete (sender=[1:178:2201], cookie=4531833645346566136) 2025-05-29T15:27:10.744454Z node 1 :KESUS_TABLET DEBUG: tx_quoter_resource_describe.cpp:78: [72057594037927937] TTxQuoterResourceDescribe::Execute (sender=[1:181:2204], cookie=8917150656759814951, ids=[], paths=[/Root, ], recursive=0) 2025-05-29T15:27:10.744464Z node 1 :KESUS_TABLET DEBUG: tx_quoter_resource_describe.cpp:115: [72057594037927937] TTxQuoterResourceDescribe::Complete (sender=[1:181:2204], cookie=8917150656759814951) 2025-05-29T15:27:10.744538Z node 1 :KESUS_TABLET DEBUG: tx_quoter_resource_describe.cpp:78: [72057594037927937] TTxQuoterResourceDescribe::Execute (sender=[1:184:2207], cookie=16645909053044733813, ids=[1, 1], paths=[], recursive=0) 2025-05-29T15:27:10.744547Z node 1 :KESUS_TABLET DEBUG: tx_quoter_resource_describe.cpp:115: [72057594037927937] TTxQuoterResourceDescribe::Complete (sender=[1:184:2207], cookie=16645909053044733813) 2025-05-29T15:27:10.744600Z node 1 :KESUS_TABLET DEBUG: tx_quoter_resource_describe.cpp:78: [72057594037927937] TTxQuoterResourceDescribe::Execute (sender=[1:187:2210], cookie=17956244117585133679, ids=[], paths=[/Root2/Q, /Root2/Q], recursive=0) 2025-05-29T15:27:10.744609Z node 1 :KESUS_TABLET DEBUG: tx_quoter_resource_describe.cpp:115: [72057594037927937] TTxQuoterResourceDescribe::Complete (sender=[1:187:2210], cookie=17956244117585133679) 2025-05-29T15:27:10.744656Z node 1 :KESUS_TABLET DEBUG: tx_quoter_resource_describe.cpp:78: [72057594037927937] TTxQuoterResourceDescribe::Execute (sender=[1:190:2213], cookie=1461134931335689747, ids=[], paths=[], recursive=1) 2025-05-29T15:27:10.744667Z node 1 :KESUS_TABLET DEBUG: tx_quoter_resource_describe.cpp:115: [72057594037927937] TTxQuoterResourceDescribe::Complete (sender=[1:190:2213], cookie=1461134931335689747) 2025-05-29T15:27:10.744767Z node 1 :KESUS_TABLET DEBUG: tx_quoter_resource_describe.cpp:78: [72057594037927937] TTxQuoterResourceDescribe::Execute (sender=[1:193:2216], cookie=2941450024527782412, ids=[], paths=[], recursive=0) 2025-05-29T15:27:10.744774Z node 1 :KESUS_TABLET DEBUG: tx_quoter_resource_describe.cpp:115: [72057594037927937] TTxQuoterResourceDescribe::Complete (sender=[1:193:2216], cookie=2941450024527782412) 2025-05-29T15:27:10.744850Z node 1 :KESUS_TABLET DEBUG: tx_quoter_resource_describe.cpp:78: [72057594037927937] TTxQuoterResourceDescribe::Execute (sender=[1:196:2219], cookie=17773281516976234094, ids=[3, 2], paths=[], recursive=1) 2025-05-29T15:27:10.744859Z node 1 :KESUS_TABLET DEBUG: tx_quoter_resource_describe.cpp:115: [72057594037927937] TTxQuoterResourceDescribe::Complete (sender=[1:196:2219], cookie=17773281516976234094) 2025-05-29T15:27:10.744923Z node 1 :KESUS_TABLET DEBUG: tx_quoter_resource_describe.cpp:78: [72057594037927937] TTxQuoterResourceDescribe::Execute (sender=[1:199:2222], cookie=17747287718507568048, ids=[3, 2], paths=[], recursive=0) 2025-05-29T15:27:10.744929Z node 1 :KESUS_TABLET DEBUG: tx_quoter_resource_describe.cpp:115: [72057594037927937] TTxQuoterResourceDescribe::Complete (sender=[1:199:2222], cookie=17747287718507568048) 2025-05-29T15:27:10.744989Z node 1 :KESUS_TABLET DEBUG: tx_quoter_resource_describe.cpp:78: [72057594037927937] TTxQuoterResourceDescribe::Execute (sender=[1:202:2225], cookie=4538181149390854432, ids=[], paths=[Root2/], recursive=1) 2025-05-29T15:27:10.744996Z node 1 :KESUS_TABLET DEBUG: tx_quoter_resource_describe.cpp:115: [72057594037927937] TTxQuoterResourceDescribe::Complete (sender=[1:202:2225], cookie=4538181149390854432) 2025-05-29T15:27:10.745059Z node 1 :KESUS_TABLET DEBUG: tx_quoter_resource_describe.cpp:78: [72057594037927937] TTxQuoterResourceDescribe::Execute (sender=[1:205:2228], cookie=4119046264650851539, ids=[], paths=[Root2/], recursive=0) 2025-05-29T15:27:10.745067Z node 1 :KESUS_TABLET DEBUG: tx_quoter_resource_describe.cpp:115: [72057594037927937] TTxQuoterResourceDescribe::Complete (sender=[1:205:2228], cookie=4119046264650851539) 2025-05-29T15:27:10.747937Z node 1 :KESUS_TABLET INFO: tablet_impl.cpp:71: OnActivateExecutor: 72057594037927937 2025-05-29T15:27:10.747973Z node 1 :KESUS_TABLET DEBUG: tx_init_schema.cpp:14: [72057594037927937] TTxInitSchema::Execute 2025-05-29T15:27:10.748034Z node 1 :KESUS_TABLET DEBUG: tx_init_schema.cpp:21: [72057594037927937] TTxInitSchema::Complete 2025-05-29T15:27:10.748226Z node 1 :KESUS_TABLET DEBUG: tx_init.cpp:30: [72057594037927937] TTxInit::Execute 2025-05-29T15:27:10.790621Z node 1 :KESUS_TABLET DEBUG: tx_init.cpp:242: [72057594037927937] TTxInit::Complete 2025-05-29T15:27:10.790770Z node 1 :KESUS_TABLET DEBUG: tx_quoter_resource_describe.cpp:78: [72057594037927937] TTxQuoterResourceDescribe::Execute (sender=[1:245:2258], cookie=14203285788407318490, ids=[100], paths=[], recursive=0) 2025-05-29T15:27:10.790797Z node 1 :KESUS_TABLET DEBUG: tx_quoter_resource_describe.cpp:115: [72057594037927937] TTxQuoterResourceDescribe::Complete (sender=[1:245:2258], cookie=14203285788407318490) 2025-05-29T15:27:10.790930Z node 1 :KESUS_TABLET DEBUG: tx_quoter_resource_describe.cpp:78: [72057594037927937] TTxQuoterResourceDescribe::Execute (sender=[1:251:2263], cookie=17862413994330987219, ids=[], paths=[Nonexistent/Path], recursive=0) 2025-05-29T15:27:10.790945Z node 1 :KESUS_TABLET DEBUG: tx_quoter_resource_describe.cpp:115: [72057594037927937] TTxQuoterResourceDescribe::Complete (sender=[1:251:2263], cookie=17862413994330987219) 2025-05-29T15:27:10.791014Z node 1 :KESUS_TABLET DEBUG: tx_quoter_resource_describe.cpp:78: [72057594037927937] TTxQuoterResourceDescribe::Execute (sender=[1:254:2266], cookie=9559635019436829945, ids=[], paths=[/Root, ], recursive=0) 2025-05-29T15:27:10.791028Z node 1 :KESUS_TABLET DEBUG: tx_quoter_resource_describe.cpp:115: [72057594037927937] TTxQuoterResourceDescribe::Complete (sender=[1:254:2266], cookie=9559635019436829945) 2025-05-29T15:27:10.791106Z n ... UG: tx_quoter_resource_add.cpp:76: [72057594037927937] Created new quoter resource 4 "Root/Folder/Q1" 2025-05-29T15:27:11.907336Z node 4 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:85: [72057594037927937] TTxQuoterResourceAdd::Complete (sender=[4:152:2175], cookie=12356573423513473689) 2025-05-29T15:27:11.907515Z node 4 :KESUS_TABLET DEBUG: tx_quoter_resource_describe.cpp:78: [72057594037927937] TTxQuoterResourceDescribe::Execute (sender=[4:157:2180], cookie=6661138214865353127, ids=[], paths=[], recursive=1) 2025-05-29T15:27:11.907543Z node 4 :KESUS_TABLET DEBUG: tx_quoter_resource_describe.cpp:115: [72057594037927937] TTxQuoterResourceDescribe::Complete (sender=[4:157:2180], cookie=6661138214865353127) 2025-05-29T15:27:11.907672Z node 4 :KESUS_TABLET DEBUG: tx_quoter_resource_describe.cpp:78: [72057594037927937] TTxQuoterResourceDescribe::Execute (sender=[4:163:2186], cookie=12926243495546856429, ids=[], paths=[], recursive=1) 2025-05-29T15:27:11.907685Z node 4 :KESUS_TABLET DEBUG: tx_quoter_resource_describe.cpp:115: [72057594037927937] TTxQuoterResourceDescribe::Complete (sender=[4:163:2186], cookie=12926243495546856429) 2025-05-29T15:27:11.907823Z node 4 :KESUS_TABLET DEBUG: tx_quoter_resource_describe.cpp:78: [72057594037927937] TTxQuoterResourceDescribe::Execute (sender=[4:169:2192], cookie=8659108415565022642, ids=[], paths=[], recursive=1) 2025-05-29T15:27:11.907834Z node 4 :KESUS_TABLET DEBUG: tx_quoter_resource_describe.cpp:115: [72057594037927937] TTxQuoterResourceDescribe::Complete (sender=[4:169:2192], cookie=8659108415565022642) 2025-05-29T15:27:11.907901Z node 4 :KESUS_TABLET DEBUG: tx_quoter_resource_delete.cpp:32: [72057594037927937] TTxQuoterResourceDelete::Execute (sender=[4:172:2195], cookie=2656980584876890756, id=0, path="/Root/Folder/NonexistingRes") 2025-05-29T15:27:11.907917Z node 4 :KESUS_TABLET DEBUG: tx_quoter_resource_delete.cpp:70: [72057594037927937] TTxQuoterResourceDelete::Complete (sender=[4:172:2195], cookie=2656980584876890756) 2025-05-29T15:27:11.907974Z node 4 :KESUS_TABLET DEBUG: tx_quoter_resource_describe.cpp:78: [72057594037927937] TTxQuoterResourceDescribe::Execute (sender=[4:175:2198], cookie=12401217254816292622, ids=[], paths=[], recursive=1) 2025-05-29T15:27:11.907986Z node 4 :KESUS_TABLET DEBUG: tx_quoter_resource_describe.cpp:115: [72057594037927937] TTxQuoterResourceDescribe::Complete (sender=[4:175:2198], cookie=12401217254816292622) 2025-05-29T15:27:11.908050Z node 4 :KESUS_TABLET DEBUG: tx_quoter_resource_delete.cpp:32: [72057594037927937] TTxQuoterResourceDelete::Execute (sender=[4:178:2201], cookie=4926369736060579424, id=100, path="") 2025-05-29T15:27:11.908057Z node 4 :KESUS_TABLET DEBUG: tx_quoter_resource_delete.cpp:70: [72057594037927937] TTxQuoterResourceDelete::Complete (sender=[4:178:2201], cookie=4926369736060579424) 2025-05-29T15:27:11.908119Z node 4 :KESUS_TABLET DEBUG: tx_quoter_resource_describe.cpp:78: [72057594037927937] TTxQuoterResourceDescribe::Execute (sender=[4:181:2204], cookie=4826276682172767889, ids=[], paths=[], recursive=1) 2025-05-29T15:27:11.908125Z node 4 :KESUS_TABLET DEBUG: tx_quoter_resource_describe.cpp:115: [72057594037927937] TTxQuoterResourceDescribe::Complete (sender=[4:181:2204], cookie=4826276682172767889) 2025-05-29T15:27:11.908197Z node 4 :KESUS_TABLET DEBUG: tx_quoter_resource_delete.cpp:32: [72057594037927937] TTxQuoterResourceDelete::Execute (sender=[4:184:2207], cookie=12667560169676501223, id=3, path="") 2025-05-29T15:27:11.908205Z node 4 :KESUS_TABLET DEBUG: tx_quoter_resource_delete.cpp:70: [72057594037927937] TTxQuoterResourceDelete::Complete (sender=[4:184:2207], cookie=12667560169676501223) 2025-05-29T15:27:11.908254Z node 4 :KESUS_TABLET DEBUG: tx_quoter_resource_describe.cpp:78: [72057594037927937] TTxQuoterResourceDescribe::Execute (sender=[4:187:2210], cookie=15630511481957571613, ids=[], paths=[], recursive=1) 2025-05-29T15:27:11.908261Z node 4 :KESUS_TABLET DEBUG: tx_quoter_resource_describe.cpp:115: [72057594037927937] TTxQuoterResourceDescribe::Complete (sender=[4:187:2210], cookie=15630511481957571613) 2025-05-29T15:27:11.908314Z node 4 :KESUS_TABLET DEBUG: tx_quoter_resource_delete.cpp:32: [72057594037927937] TTxQuoterResourceDelete::Execute (sender=[4:190:2213], cookie=16148484201543372083, id=0, path="/Root/Folder/Q1") 2025-05-29T15:27:11.908355Z node 4 :KESUS_TABLET DEBUG: tx_quoter_resource_delete.cpp:61: [72057594037927937] Deleted quoter resource 4 "Root/Folder/Q1" 2025-05-29T15:27:11.919480Z node 4 :KESUS_TABLET DEBUG: tx_quoter_resource_delete.cpp:70: [72057594037927937] TTxQuoterResourceDelete::Complete (sender=[4:190:2213], cookie=16148484201543372083) 2025-05-29T15:27:11.919653Z node 4 :KESUS_TABLET DEBUG: tx_quoter_resource_describe.cpp:78: [72057594037927937] TTxQuoterResourceDescribe::Execute (sender=[4:195:2218], cookie=16150278209132585263, ids=[], paths=[], recursive=1) 2025-05-29T15:27:11.919672Z node 4 :KESUS_TABLET DEBUG: tx_quoter_resource_describe.cpp:115: [72057594037927937] TTxQuoterResourceDescribe::Complete (sender=[4:195:2218], cookie=16150278209132585263) 2025-05-29T15:27:11.922116Z node 4 :KESUS_TABLET INFO: tablet_impl.cpp:71: OnActivateExecutor: 72057594037927937 2025-05-29T15:27:11.922144Z node 4 :KESUS_TABLET DEBUG: tx_init_schema.cpp:14: [72057594037927937] TTxInitSchema::Execute 2025-05-29T15:27:11.922187Z node 4 :KESUS_TABLET DEBUG: tx_init_schema.cpp:21: [72057594037927937] TTxInitSchema::Complete 2025-05-29T15:27:11.922301Z node 4 :KESUS_TABLET DEBUG: tx_init.cpp:30: [72057594037927937] TTxInit::Execute 2025-05-29T15:27:11.964900Z node 4 :KESUS_TABLET DEBUG: tx_init.cpp:242: [72057594037927937] TTxInit::Complete 2025-05-29T15:27:11.964994Z node 4 :KESUS_TABLET DEBUG: tx_quoter_resource_describe.cpp:78: [72057594037927937] TTxQuoterResourceDescribe::Execute (sender=[4:235:2248], cookie=8195579590579721685, ids=[], paths=[], recursive=1) 2025-05-29T15:27:11.965012Z node 4 :KESUS_TABLET DEBUG: tx_quoter_resource_describe.cpp:115: [72057594037927937] TTxQuoterResourceDescribe::Complete (sender=[4:235:2248], cookie=8195579590579721685) 2025-05-29T15:27:11.965137Z node 4 :KESUS_TABLET DEBUG: tx_quoter_resource_delete.cpp:32: [72057594037927937] TTxQuoterResourceDelete::Execute (sender=[4:241:2253], cookie=13177566656403523344, id=3, path="") 2025-05-29T15:27:11.965177Z node 4 :KESUS_TABLET DEBUG: tx_quoter_resource_delete.cpp:61: [72057594037927937] Deleted quoter resource 3 "Root/Folder" 2025-05-29T15:27:11.975865Z node 4 :KESUS_TABLET DEBUG: tx_quoter_resource_delete.cpp:70: [72057594037927937] TTxQuoterResourceDelete::Complete (sender=[4:241:2253], cookie=13177566656403523344) 2025-05-29T15:27:11.976048Z node 4 :KESUS_TABLET DEBUG: tx_quoter_resource_describe.cpp:78: [72057594037927937] TTxQuoterResourceDescribe::Execute (sender=[4:246:2258], cookie=11216642255694097881, ids=[], paths=[], recursive=1) 2025-05-29T15:27:11.976065Z node 4 :KESUS_TABLET DEBUG: tx_quoter_resource_describe.cpp:115: [72057594037927937] TTxQuoterResourceDescribe::Complete (sender=[4:246:2258], cookie=11216642255694097881) 2025-05-29T15:27:11.978750Z node 4 :KESUS_TABLET INFO: tablet_impl.cpp:71: OnActivateExecutor: 72057594037927937 2025-05-29T15:27:11.978786Z node 4 :KESUS_TABLET DEBUG: tx_init_schema.cpp:14: [72057594037927937] TTxInitSchema::Execute 2025-05-29T15:27:11.978845Z node 4 :KESUS_TABLET DEBUG: tx_init_schema.cpp:21: [72057594037927937] TTxInitSchema::Complete 2025-05-29T15:27:11.978987Z node 4 :KESUS_TABLET DEBUG: tx_init.cpp:30: [72057594037927937] TTxInit::Execute 2025-05-29T15:27:12.022999Z node 4 :KESUS_TABLET DEBUG: tx_init.cpp:242: [72057594037927937] TTxInit::Complete 2025-05-29T15:27:12.023116Z node 4 :KESUS_TABLET DEBUG: tx_quoter_resource_describe.cpp:78: [72057594037927937] TTxQuoterResourceDescribe::Execute (sender=[4:286:2288], cookie=16060632233046909359, ids=[], paths=[], recursive=1) 2025-05-29T15:27:12.023134Z node 4 :KESUS_TABLET DEBUG: tx_quoter_resource_describe.cpp:115: [72057594037927937] TTxQuoterResourceDescribe::Complete (sender=[4:286:2288], cookie=16060632233046909359) 2025-05-29T15:27:12.320596Z node 5 :KESUS_TABLET INFO: tablet_impl.cpp:71: OnActivateExecutor: 72057594037927937 2025-05-29T15:27:12.320627Z node 5 :KESUS_TABLET DEBUG: tx_init_schema.cpp:14: [72057594037927937] TTxInitSchema::Execute 2025-05-29T15:27:12.323940Z node 5 :KESUS_TABLET DEBUG: tx_init_schema.cpp:21: [72057594037927937] TTxInitSchema::Complete 2025-05-29T15:27:12.323976Z node 5 :KESUS_TABLET DEBUG: tx_init.cpp:30: [72057594037927937] TTxInit::Execute 2025-05-29T15:27:12.346229Z node 5 :KESUS_TABLET DEBUG: tx_init.cpp:242: [72057594037927937] TTxInit::Complete 2025-05-29T15:27:12.346411Z node 5 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:36: [72057594037927937] TTxQuoterResourceAdd::Execute (sender=[5:131:2156], cookie=10918499844365219146, path="/Q1", config={ MaxUnitsPerSecond: 10 }) 2025-05-29T15:27:12.346474Z node 5 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:76: [72057594037927937] Created new quoter resource 1 "Q1" 2025-05-29T15:27:12.357463Z node 5 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:85: [72057594037927937] TTxQuoterResourceAdd::Complete (sender=[5:131:2156], cookie=10918499844365219146) 2025-05-29T15:27:12.357665Z node 5 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:36: [72057594037927937] TTxQuoterResourceAdd::Execute (sender=[5:140:2163], cookie=12148169739237890208, path="/Q2", config={ MaxUnitsPerSecond: 10 }) 2025-05-29T15:27:12.357723Z node 5 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:76: [72057594037927937] Created new quoter resource 2 "Q2" 2025-05-29T15:27:12.371308Z node 5 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:85: [72057594037927937] TTxQuoterResourceAdd::Complete (sender=[5:140:2163], cookie=12148169739237890208) 2025-05-29T15:27:12.371857Z node 5 :KESUS_TABLET TRACE: quoter_runtime.cpp:145: [72057594037927937] Send TEvSubscribeOnResourcesResult to [5:145:2168]. Cookie: 4907897338537607223. Data: { Results { ResourceId: 1 Error { Status: SUCCESS } EffectiveProps { ResourceId: 1 ResourcePath: "Q1" HierarchicalDRRResourceConfig { MaxUnitsPerSecond: 10 MaxBurstSizeCoefficient: 1 Weight: 1 } AccountingConfig { ReportPeriodMs: 5000 AccountPeriodMs: 1000 CollectPeriodSec: 30 ProvisionedCoefficient: 60 OvershootCoefficient: 1.1 Provisioned { BillingPeriodSec: 60 } OnDemand { BillingPeriodSec: 60 } Overshoot { BillingPeriodSec: 60 } } } } ProtocolVersion: 1 } 2025-05-29T15:27:12.371875Z node 5 :KESUS_TABLET DEBUG: quoter_runtime.cpp:150: [72057594037927937] Subscribe on quoter resources (sender=[5:145:2168], cookie=4907897338537607223) 2025-05-29T15:27:12.372023Z node 5 :KESUS_TABLET TRACE: quoter_runtime.cpp:145: [72057594037927937] Send TEvSubscribeOnResourcesResult to [5:145:2168]. Cookie: 10331740964707227171. Data: { Results { ResourceId: 1 Error { Status: SUCCESS } EffectiveProps { ResourceId: 1 ResourcePath: "Q1" HierarchicalDRRResourceConfig { MaxUnitsPerSecond: 10 MaxBurstSizeCoefficient: 1 Weight: 1 } AccountingConfig { ReportPeriodMs: 5000 AccountPeriodMs: 1000 CollectPeriodSec: 30 ProvisionedCoefficient: 60 OvershootCoefficient: 1.1 Provisioned { BillingPeriodSec: 60 } OnDemand { BillingPeriodSec: 60 } Overshoot { BillingPeriodSec: 60 } } } } Results { ResourceId: 2 Error { Status: SUCCESS } EffectiveProps { ResourceId: 2 ResourcePath: "Q2" HierarchicalDRRResourceConfig { MaxUnitsPerSecond: 10 MaxBurstSizeCoefficient: 1 Weight: 1 } AccountingConfig { ReportPeriodMs: 5000 AccountPeriodMs: 1000 CollectPeriodSec: 30 ProvisionedCoefficient: 60 OvershootCoefficient: 1.1 Provisioned { BillingPeriodSec: 60 } OnDemand { BillingPeriodSec: 60 } Overshoot { BillingPeriodSec: 60 } } } } Results { Error { Status: NOT_FOUND Issues { message: "Resource \"/Q3\" doesn\'t exist." } } } ProtocolVersion: 1 } 2025-05-29T15:27:12.372031Z node 5 :KESUS_TABLET DEBUG: quoter_runtime.cpp:150: [72057594037927937] Subscribe on quoter resources (sender=[5:145:2168], cookie=10331740964707227171) |68.7%| [TA] $(B)/ydb/core/tx/schemeshard/ut_export/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kesus/tablet/ut/unittest >> TKesusTest::TestRegisterProxyLinkFailureRace [GOOD] Test command err: 2025-05-29T15:27:10.557113Z node 1 :KESUS_TABLET INFO: tablet_impl.cpp:71: OnActivateExecutor: 72057594037927937 2025-05-29T15:27:10.557149Z node 1 :KESUS_TABLET DEBUG: tx_init_schema.cpp:14: [72057594037927937] TTxInitSchema::Execute 2025-05-29T15:27:10.561208Z node 1 :KESUS_TABLET DEBUG: tx_init_schema.cpp:21: [72057594037927937] TTxInitSchema::Complete 2025-05-29T15:27:10.561245Z node 1 :KESUS_TABLET DEBUG: tx_init.cpp:30: [72057594037927937] TTxInit::Execute 2025-05-29T15:27:10.572564Z node 1 :KESUS_TABLET DEBUG: tx_init.cpp:242: [72057594037927937] TTxInit::Complete 2025-05-29T15:27:10.796196Z node 2 :KESUS_TABLET INFO: tablet_impl.cpp:71: OnActivateExecutor: 72057594037927937 2025-05-29T15:27:10.796237Z node 2 :KESUS_TABLET DEBUG: tx_init_schema.cpp:14: [72057594037927937] TTxInitSchema::Execute 2025-05-29T15:27:10.800510Z node 2 :KESUS_TABLET DEBUG: tx_init_schema.cpp:21: [72057594037927937] TTxInitSchema::Complete 2025-05-29T15:27:10.800566Z node 2 :KESUS_TABLET DEBUG: tx_init.cpp:30: [72057594037927937] TTxInit::Execute 2025-05-29T15:27:10.824264Z node 2 :KESUS_TABLET DEBUG: tx_init.cpp:242: [72057594037927937] TTxInit::Complete 2025-05-29T15:27:11.041349Z node 3 :KESUS_TABLET INFO: tablet_impl.cpp:71: OnActivateExecutor: 72057594037927937 2025-05-29T15:27:11.041389Z node 3 :KESUS_TABLET DEBUG: tx_init_schema.cpp:14: [72057594037927937] TTxInitSchema::Execute 2025-05-29T15:27:11.045095Z node 3 :KESUS_TABLET DEBUG: tx_init_schema.cpp:21: [72057594037927937] TTxInitSchema::Complete 2025-05-29T15:27:11.045294Z node 3 :KESUS_TABLET DEBUG: tx_init.cpp:30: [72057594037927937] TTxInit::Execute 2025-05-29T15:27:11.066636Z node 3 :KESUS_TABLET DEBUG: tx_init.cpp:242: [72057594037927937] TTxInit::Complete 2025-05-29T15:27:11.305772Z node 4 :KESUS_TABLET INFO: tablet_impl.cpp:71: OnActivateExecutor: 72057594037927937 2025-05-29T15:27:11.305809Z node 4 :KESUS_TABLET DEBUG: tx_init_schema.cpp:14: [72057594037927937] TTxInitSchema::Execute 2025-05-29T15:27:11.310297Z node 4 :KESUS_TABLET DEBUG: tx_init_schema.cpp:21: [72057594037927937] TTxInitSchema::Complete 2025-05-29T15:27:11.310491Z node 4 :KESUS_TABLET DEBUG: tx_init.cpp:30: [72057594037927937] TTxInit::Execute 2025-05-29T15:27:11.334142Z node 4 :KESUS_TABLET DEBUG: tx_init.cpp:242: [72057594037927937] TTxInit::Complete 2025-05-29T15:27:11.334576Z node 4 :PIPE_SERVER ERROR: tablet_pipe_server.cpp:228: [72057594037927937] NodeDisconnected NodeId# 5 2025-05-29T15:27:11.334807Z node 4 :KESUS_TABLET TRACE: quoter_runtime.cpp:318: Got TEvServerDisconnected([4:189:2159]) 2025-05-29T15:27:11.777102Z node 6 :KESUS_TABLET INFO: tablet_impl.cpp:71: OnActivateExecutor: 72057594037927937 2025-05-29T15:27:11.777142Z node 6 :KESUS_TABLET DEBUG: tx_init_schema.cpp:14: [72057594037927937] TTxInitSchema::Execute 2025-05-29T15:27:11.781288Z node 6 :KESUS_TABLET DEBUG: tx_init_schema.cpp:21: [72057594037927937] TTxInitSchema::Complete 2025-05-29T15:27:11.781331Z node 6 :KESUS_TABLET DEBUG: tx_init.cpp:30: [72057594037927937] TTxInit::Execute ... waiting for register request 2025-05-29T15:27:11.792891Z node 6 :KESUS_TABLET DEBUG: tx_init.cpp:242: [72057594037927937] TTxInit::Complete ... blocking NKikimr::NKesus::TEvKesus::TEvRegisterProxy from TEST_ACTOR_RUNTIME to KESUS_TABLET_ACTOR cookie 6463654989720387713 ... waiting for register request (done) ... unblocking NKikimr::NKesus::TEvKesus::TEvRegisterProxy from TEST_ACTOR_RUNTIME to KESUS_TABLET_ACTOR 2025-05-29T15:27:11.793110Z node 6 :PIPE_SERVER ERROR: tablet_pipe_server.cpp:228: [72057594037927937] NodeDisconnected NodeId# 7 2025-05-29T15:27:11.793286Z node 6 :KESUS_TABLET TRACE: quoter_runtime.cpp:318: Got TEvServerDisconnected([6:187:2157]) ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_sysview/unittest >> TSchemeShardSysViewTest::DropSysView [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:116:2058] recipient: [1:111:2141] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:116:2058] recipient: [1:111:2141] Leader for TabletID 72057594046678944 is [1:128:2152] sender: [1:132:2058] recipient: [1:111:2141] 2025-05-29T15:27:12.512498Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7488: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-05-29T15:27:12.512527Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7516: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:27:12.512532Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7402: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-05-29T15:27:12.512538Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7418: OperationsProcessing config: using default configuration 2025-05-29T15:27:12.512555Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-05-29T15:27:12.512560Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-05-29T15:27:12.512570Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7548: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:27:12.512584Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:39: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-05-29T15:27:12.512712Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7619: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-05-29T15:27:12.512781Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-05-29T15:27:12.527268Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7309: Cannot subscribe to console configs 2025-05-29T15:27:12.527294Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:27:12.530320Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-05-29T15:27:12.530465Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-05-29T15:27:12.530519Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-05-29T15:27:12.532138Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-05-29T15:27:12.532298Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:445: Clear TempDirsState with owners number: 0 2025-05-29T15:27:12.532427Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1348: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-05-29T15:27:12.532483Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:32: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-05-29T15:27:12.532971Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:157: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:27:12.533027Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:84: [RootDataErasureManager] Stop 2025-05-29T15:27:12.533313Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:27:12.533325Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:27:12.533346Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-05-29T15:27:12.533358Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-29T15:27:12.533364Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:202: TTxServerlessStorageBilling.Complete 2025-05-29T15:27:12.533408Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6671: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-05-29T15:27:12.534836Z node 1 :HIVE INFO: tablet_helpers.cpp:1130: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:128:2152] sender: [1:242:2058] recipient: [1:15:2062] 2025-05-29T15:27:12.556209Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:372: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-05-29T15:27:12.556311Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:27:12.556381Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-05-29T15:27:12.556427Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:130: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-05-29T15:27:12.556440Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:27:12.557315Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:451: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-05-29T15:27:12.557346Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-05-29T15:27:12.557419Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:27:12.557431Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:313: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-05-29T15:27:12.557438Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:367: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-05-29T15:27:12.557444Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 2 -> 3 2025-05-29T15:27:12.557951Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:27:12.557967Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-05-29T15:27:12.557974Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 3 -> 128 2025-05-29T15:27:12.558365Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:27:12.558377Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:27:12.558386Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:27:12.558395Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1650: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-05-29T15:27:12.559117Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1719: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-05-29T15:27:12.559580Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:652: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-05-29T15:27:12.559628Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1751: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-05-29T15:27:12.559827Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:676: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-05-29T15:27:12.559857Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:680: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 124 RawX2: 4294969445 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-05-29T15:27:12.559865Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:27:12.559932Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 128 -> 240 2025-05-29T15:27:12.559940Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:27:12.559978Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-05-29T15:27:12.559992Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:402: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-05-29T15:27:12.560440Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:27:12.560451Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-29T15:27:12.560504Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29 ... SCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/.sys/new_sys_view" took 43us result status StatusPathDoesNotExist 2025-05-29T15:27:12.579440Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/.sys/new_sys_view\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot/.sys\' (id: [OwnerId: 72057594046678944, LocalPathId: 2]), source_location: ydb/core/tx/schemeshard/schemeshard_path_describer.cpp:1157" Path: "/MyRoot/.sys/new_sys_view" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot/.sys" LastExistedPrefixPathId: 2 LastExistedPrefixDescription { Self { Name: ".sys" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: false } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 2025-05-29T15:27:12.579640Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:445: Clear TempDirsState with owners number: 0 Leader for TabletID 72057594046678944 is [1:128:2152] sender: [1:363:2058] recipient: [1:103:2137] Leader for TabletID 72057594046678944 is [1:128:2152] sender: [1:366:2058] recipient: [1:15:2062] Leader for TabletID 72057594046678944 is [1:128:2152] sender: [1:367:2058] recipient: [1:365:2353] Leader for TabletID 72057594046678944 is [1:368:2354] sender: [1:369:2058] recipient: [1:365:2353] 2025-05-29T15:27:12.587014Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7488: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-05-29T15:27:12.587044Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7516: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:27:12.587050Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7402: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-05-29T15:27:12.587056Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7418: OperationsProcessing config: using default configuration 2025-05-29T15:27:12.587063Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-05-29T15:27:12.587068Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-05-29T15:27:12.587080Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7548: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:27:12.587094Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:39: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-05-29T15:27:12.587184Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7619: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-05-29T15:27:12.587249Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-05-29T15:27:12.588587Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-05-29T15:27:12.589016Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-05-29T15:27:12.589068Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-05-29T15:27:12.589099Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7309: Cannot subscribe to console configs 2025-05-29T15:27:12.589105Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:27:12.589146Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:445: Clear TempDirsState with owners number: 0 2025-05-29T15:27:12.589248Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit for Paths, read records: 2, at schemeshard: 72057594046678944 2025-05-29T15:27:12.589271Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:319: AttachChild: child attached as only one child to the parent, parent id: [OwnerId: 72057594046678944, LocalPathId: 1], parent name: MyRoot, child name: .sys, child id: [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2025-05-29T15:27:12.589282Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1457: TTxInit for UserAttributes, read records: 0, at schemeshard: 72057594046678944 2025-05-29T15:27:12.589292Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1483: TTxInit for UserAttributesAlterData, read records: 0, at schemeshard: 72057594046678944 2025-05-29T15:27:12.589364Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1785: TTxInit for Tables, read records: 0, at schemeshard: 72057594046678944 2025-05-29T15:27:12.589377Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__root_data_erasure_manager.cpp:452: [RootDataErasureManager] Restore: Generation# 0, Status# 0, WakeupInterval# 604800 s, NumberDataErasureTenantsInRunning# 0 2025-05-29T15:27:12.589418Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:2033: TTxInit for Columns, read records: 0, at schemeshard: 72057594046678944 2025-05-29T15:27:12.589429Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:2093: TTxInit for ColumnsAlters, read records: 0, at schemeshard: 72057594046678944 2025-05-29T15:27:12.589451Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:2151: TTxInit for Shards, read records: 0, at schemeshard: 72057594046678944 2025-05-29T15:27:12.589464Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:2237: TTxInit for TablePartitions, read records: 0, at schemeshard: 72057594046678944 2025-05-29T15:27:12.589473Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:2303: TTxInit for TableShardPartitionConfigs, read records: 0, at schemeshard: 72057594046678944 2025-05-29T15:27:12.589489Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:2453: TTxInit for ChannelsBinding, read records: 0, at schemeshard: 72057594046678944 2025-05-29T15:27:12.589534Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:2832: TTxInit for TableIndexes, read records: 0, at schemeshard: 72057594046678944 2025-05-29T15:27:12.589551Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:2911: TTxInit for TableIndexKeys, read records: 0, at schemeshard: 72057594046678944 2025-05-29T15:27:12.589600Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:3409: TTxInit for KesusInfos, read records: 0, at schemeshard: 72057594046678944 2025-05-29T15:27:12.589614Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:3445: TTxInit for KesusAlters, read records: 0, at schemeshard: 72057594046678944 2025-05-29T15:27:12.589633Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:3659: TTxInit for TxShards, read records: 0, at schemeshard: 72057594046678944 2025-05-29T15:27:12.589644Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:3804: TTxInit for ShardToDelete, read records: 0, at schemeshard: 72057594046678944 2025-05-29T15:27:12.589655Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:3821: TTxInit for BackupSettings, read records: 0, at schemeshard: 72057594046678944 2025-05-29T15:27:12.589690Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:3981: TTxInit for ShardBackupStatus, read records: 0, at schemeshard: 72057594046678944 2025-05-29T15:27:12.589700Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:3997: TTxInit for CompletedBackup, read records: 0, at schemeshard: 72057594046678944 2025-05-29T15:27:12.589727Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4282: TTxInit for Publications, read records: 0, at schemeshard: 72057594046678944 2025-05-29T15:27:12.589759Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4587: IndexBuild , records: 0, at schemeshard: 72057594046678944 2025-05-29T15:27:12.589769Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4645: KMeansTreeSample records: 0, at schemeshard: 72057594046678944 2025-05-29T15:27:12.589784Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4740: SnapshotTables: snapshots: 0 tables: 0, at schemeshard: 72057594046678944 2025-05-29T15:27:12.589791Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4767: SnapshotSteps: snapshots: 0, at schemeshard: 72057594046678944 2025-05-29T15:27:12.589798Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4794: LongLocks: records: 0, at schemeshard: 72057594046678944 2025-05-29T15:27:12.591199Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:84: [RootDataErasureManager] Stop 2025-05-29T15:27:12.591686Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:27:12.591703Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:27:12.592025Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-05-29T15:27:12.592042Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-29T15:27:12.592052Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:202: TTxServerlessStorageBilling.Complete 2025-05-29T15:27:12.592488Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6671: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594046678944 is [1:368:2354] sender: [1:429:2058] recipient: [1:15:2062] 2025-05-29T15:27:12.659583Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/.sys/new_sys_view" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-05-29T15:27:12.659656Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/.sys/new_sys_view" took 94us result status StatusPathDoesNotExist 2025-05-29T15:27:12.659708Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/.sys/new_sys_view\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot/.sys\' (id: [OwnerId: 72057594046678944, LocalPathId: 2]), source_location: ydb/core/tx/schemeshard/schemeshard_path_describer.cpp:1157" Path: "/MyRoot/.sys/new_sys_view" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot/.sys" LastExistedPrefixPathId: 2 LastExistedPrefixDescription { Self { Name: ".sys" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: false } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 >> DataShardReadTableSnapshots::ReadTableSplitFinished [FAIL] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/ydb/table_split_ut/unittest >> YdbTableSplit::RenameTablesAndSplit [GOOD] Test command err: 2025-05-29T15:25:48.575126Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509889251253232039:2078];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:25:48.575232Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/0026c4/r3tmp/tmpwzH57P/pdisk_1.dat 2025-05-29T15:25:48.675054Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:25:48.676818Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:25:48.676839Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:25:48.678926Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 4315, node 1 2025-05-29T15:25:48.698566Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:25:48.698584Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-05-29T15:25:48.698587Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-05-29T15:25:48.698624Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:20595 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 0 Pa... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-29T15:25:48.763657Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:25:49.063729Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509889255548200327:2334], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:25:49.063755Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:25:49.115335Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_create_table.cpp:426: TCreateTable Propose, path: /Root/Foo, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-05-29T15:25:49.115672Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:130: IgniteOperation, opId: 281474976715658:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-05-29T15:25:49.115678Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-05-29T15:25:49.119158Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 281474976715658, database: /Root, subject: , status: StatusAccepted, operation: CREATE TABLE, path: /Root/Foo 2025-05-29T15:25:49.188443Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:676: TTxOperationPlanStep Execute, stepId: 1748532349235, transactions count in step: 1, at schemeshard: 72057594046644480 2025-05-29T15:25:49.210188Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:973: Operation and all the parts is done, operation id: 281474976715658:0 2025-05-29T15:25:49.215111Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509889255548200548:2347], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:25:49.215132Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:25:49.217452Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_table.cpp:508: TAlterTable Propose, path: /Root/Foo, pathId: , opId: 281474976715659:0, at schemeshard: 72057594046644480 2025-05-29T15:25:49.217584Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:130: IgniteOperation, opId: 281474976715659:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-05-29T15:25:49.217591Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 2025-05-29T15:25:49.218225Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 281474976715659, database: /Root, subject: , status: StatusAccepted, operation: ALTER TABLE, path: /Root/Foo 2025-05-29T15:25:49.237046Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:676: TTxOperationPlanStep Execute, stepId: 1748532349284, transactions count in step: 1, at schemeshard: 72057594046644480 2025-05-29T15:25:49.241357Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:973: Operation and all the parts is done, operation id: 281474976715659:0 Fast forward 1m partitions 2 Fast forward 1m 2025-05-29T15:25:53.578581Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7509889251253232039:2078];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:25:53.578624Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; partitions 2 Fast forward 1m partitions 2 Fast forward 1m 2025-05-29T15:25:59.282298Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_split_merge.cpp:798: TSplitMerge Propose, tableStr: /Root/Foo, tableId: , opId: 281474976710657:0, at schemeshard: 72057594046644480, request: TablePath: "/Root/Foo" SourceTabletId: 72075186224037888 SourceTabletId: 72075186224037889 SchemeshardId: 72057594046644480 2025-05-29T15:25:59.282455Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_split_merge.cpp:1077: TSplitMerge Propose accepted, tableStr: /Root/Foo, tableId: , opId: 281474976710657:0, at schemeshard: 72057594046644480, op: SourceRanges { KeyRangeBegin: "\002\000\000\000\000\200\000\000\000\200" KeyRangeEnd: "\002\000\004\000\000\000\377\377\377\177\000\000\000\200" TabletID: 72075186224037888 ShardIdx: 1 } SourceRanges { KeyRangeBegin: "\002\000\004\000\000\000\377\377\377\177\000\000\000\200" KeyRangeEnd: "" TabletID: 72075186224037889 ShardIdx: 2 } DestinationRanges { KeyRangeBegin: "\002\000\000\000\000\200\000\000\000\200" KeyRangeEnd: "" ShardIdx: 3 }, request: TablePath: "/Root/Foo" SourceTabletId: 72075186224037888 SourceTabletId: 72075186224037889 SchemeshardId: 72057594046644480 2025-05-29T15:25:59.282463Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:130: IgniteOperation, opId: 281474976710657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-05-29T15:25:59.334136Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:973: Operation and all the parts is done, operation id: 281474976710657:0 2025-05-29T15:25:59.344359Z node 1 :HIVE WARN: hive_impl.cpp:491: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037888 not found 2025-05-29T15:25:59.344518Z node 1 :HIVE WARN: hive_impl.cpp:491: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037889 not found partitions 1 2025-05-29T15:26:01.268079Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_move_table.cpp:569: TMoveTable Propose, from: /Root/Foo, to: /Root/Bar, opId: 281474976715660:0, at schemeshard: 72057594046644480 2025-05-29T15:26:01.268148Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:130: IgniteOperation, opId: 281474976715660:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-05-29T15:26:01.271179Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 281474976715660, database: /Root, subject: , status: StatusAccepted, operation: ALTER TABLE RENAME, dst path: /Root/Foo, dst path: /Root/Bar 2025-05-29T15:26:01.281313Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:676: TTxOperationPlanStep Execute, stepId: 1748532841328, transactions count in step: 1, at schemeshard: 72057594046644480 2025-05-29T15:26:01.283836Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1103: All parts have reached barrier, tx: 281474976715660, done: 0, blocked: 1 2025-05-29T15:26:01.287090Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:973: Operation and all the parts is done, operation id: 281474976715660:0 2025-05-29T15:26:01.287704Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:160: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046644480 Fast forward 1m 2025-05-29T15:26:03.674977Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7301: Cannot get console configs 2025-05-29T15:26:03.674997Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:26:04.309423Z node 1 :TX_DATASHARD DEBUG: datashard_impl.h:3344: SendPeriodicTableStats register new pipe at datashard 72075186224037890 FollowerId 0, TableInfos size = 1 2025-05-29T15:26:04.309862Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:563: Got periodic table stats at tablet 72057594046644480 from shard 72075186224037890 followerId 0 pathId [OwnerId: 72057594046644480, LocalPathId: 3] state 'Ready' dataSize 0 rowCount 0 cpuUsage 0.3979 2025-05-29T15:26:04.410069Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:590: Started TEvPersistStats at tablet 72057594046644480, queue size# 1 2025-05-29T15:26:04.410127Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:267: PersistSingleStats for pathId 3 shard idx 72057594046644480:3 data size 0 row count 0 2025-05-29T15:26:04.410156Z node 1 :FLAT_ ... rver disconnected at leader tablet# 72075186224037890, clientId# [1:7509889612030490002:2725], serverId# [1:7509889612030490007:4670], sessionId# [0:0:0] 2025-05-29T15:27:12.108794Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3728: Server disconnected at leader tablet# 72075186224037890, clientId# [1:7509889612030489995:2723], serverId# [1:7509889612030489998:4663], sessionId# [0:0:0] 2025-05-29T15:27:12.108902Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5549: Handle TEvStateChanged, at schemeshard: 72057594046644480, message: Source { RawX1: 7509889298497873922 RawX2: 4503603922340179 } TabletId: 72075186224037890 State: 4 2025-05-29T15:27:12.108919Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__state_changed_reply.cpp:78: TTxShardStateChanged DoExecute, datashard informs about state changing, datashardId: 72075186224037890, state: Offline, at schemeshard: 72057594046644480 2025-05-29T15:27:12.108968Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5549: Handle TEvStateChanged, at schemeshard: 72057594046644480, message: Source { RawX1: 7509889298497873922 RawX2: 4503603922340179 } TabletId: 72075186224037890 State: 4 2025-05-29T15:27:12.108980Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__state_changed_reply.cpp:78: TTxShardStateChanged DoExecute, datashard informs about state changing, datashardId: 72075186224037890, state: Offline, at schemeshard: 72057594046644480 2025-05-29T15:27:12.109512Z node 1 :TX_DATASHARD INFO: datashard_impl.h:3306: 72075186224037891 Reporting state Offline to schemeshard 72057594046644480 2025-05-29T15:27:12.109527Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:46: Free shard 72057594046644480:3 hive 72057594037968897 at ss 72057594046644480 2025-05-29T15:27:12.109528Z node 1 :TX_DATASHARD INFO: datashard_impl.h:3306: 72075186224037891 Reporting state Offline to schemeshard 72057594046644480 2025-05-29T15:27:12.109549Z node 1 :TX_DATASHARD INFO: datashard_impl.h:3306: 72075186224037892 Reporting state Offline to schemeshard 72057594046644480 2025-05-29T15:27:12.109550Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:46: Free shard 72057594046644480:3 hive 72057594037968897 at ss 72057594046644480 2025-05-29T15:27:12.109553Z node 1 :TX_DATASHARD INFO: datashard_impl.h:3306: 72075186224037892 Reporting state Offline to schemeshard 72057594046644480 2025-05-29T15:27:12.109563Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:2962: Handle TEvStateChangedResult datashard 72075186224037890 state Offline 2025-05-29T15:27:12.109566Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:2962: Handle TEvStateChangedResult datashard 72075186224037890 state Offline 2025-05-29T15:27:12.109718Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5549: Handle TEvStateChanged, at schemeshard: 72057594046644480, message: Source { RawX1: 7509889599145587666 RawX2: 4503603922340483 } TabletId: 72075186224037891 State: 4 2025-05-29T15:27:12.109722Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__state_changed_reply.cpp:78: TTxShardStateChanged DoExecute, datashard informs about state changing, datashardId: 72075186224037891, state: Offline, at schemeshard: 72057594046644480 2025-05-29T15:27:12.109744Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5549: Handle TEvStateChanged, at schemeshard: 72057594046644480, message: Source { RawX1: 7509889599145587666 RawX2: 4503603922340483 } TabletId: 72075186224037891 State: 4 2025-05-29T15:27:12.109746Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__state_changed_reply.cpp:78: TTxShardStateChanged DoExecute, datashard informs about state changing, datashardId: 72075186224037891, state: Offline, at schemeshard: 72057594046644480 2025-05-29T15:27:12.109757Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5549: Handle TEvStateChanged, at schemeshard: 72057594046644480, message: Source { RawX1: 7509889599145587665 RawX2: 4503603922340482 } TabletId: 72075186224037892 State: 4 2025-05-29T15:27:12.109760Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__state_changed_reply.cpp:78: TTxShardStateChanged DoExecute, datashard informs about state changing, datashardId: 72075186224037892, state: Offline, at schemeshard: 72057594046644480 2025-05-29T15:27:12.109771Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5549: Handle TEvStateChanged, at schemeshard: 72057594046644480, message: Source { RawX1: 7509889599145587665 RawX2: 4503603922340482 } TabletId: 72075186224037892 State: 4 2025-05-29T15:27:12.109774Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__state_changed_reply.cpp:78: TTxShardStateChanged DoExecute, datashard informs about state changing, datashardId: 72075186224037892, state: Offline, at schemeshard: 72057594046644480 2025-05-29T15:27:12.110281Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:46: Free shard 72057594046644480:4 hive 72057594037968897 at ss 72057594046644480 2025-05-29T15:27:12.110293Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:46: Free shard 72057594046644480:4 hive 72057594037968897 at ss 72057594046644480 2025-05-29T15:27:12.110296Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:2962: Handle TEvStateChangedResult datashard 72075186224037891 state Offline 2025-05-29T15:27:12.110307Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:2962: Handle TEvStateChangedResult datashard 72075186224037891 state Offline 2025-05-29T15:27:12.110364Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:46: Free shard 72057594046644480:5 hive 72057594037968897 at ss 72057594046644480 2025-05-29T15:27:12.110371Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:2962: Handle TEvStateChangedResult datashard 72075186224037892 state Offline 2025-05-29T15:27:12.110372Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:46: Free shard 72057594046644480:5 hive 72057594037968897 at ss 72057594046644480 2025-05-29T15:27:12.110378Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:2962: Handle TEvStateChangedResult datashard 72075186224037892 state Offline 2025-05-29T15:27:12.111041Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5938: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 3 ShardOwnerId: 72057594046644480 ShardLocalIdx: 3, at schemeshard: 72057594046644480 2025-05-29T15:27:12.111120Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046644480, LocalPathId: 3] was 3 2025-05-29T15:27:12.111172Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5938: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 3 ShardOwnerId: 72057594046644480 ShardLocalIdx: 3, at schemeshard: 72057594046644480 2025-05-29T15:27:12.111192Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5938: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 4 ShardOwnerId: 72057594046644480 ShardLocalIdx: 4, at schemeshard: 72057594046644480 2025-05-29T15:27:12.111210Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046644480, LocalPathId: 3] was 2 2025-05-29T15:27:12.111231Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5938: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 4 ShardOwnerId: 72057594046644480 ShardLocalIdx: 4, at schemeshard: 72057594046644480 2025-05-29T15:27:12.111244Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5938: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 5 ShardOwnerId: 72057594046644480 ShardLocalIdx: 5, at schemeshard: 72057594046644480 2025-05-29T15:27:12.111260Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046644480, LocalPathId: 3] was 1 2025-05-29T15:27:12.111275Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5938: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 5 ShardOwnerId: 72057594046644480 ShardLocalIdx: 5, at schemeshard: 72057594046644480 2025-05-29T15:27:12.111287Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:123: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046644480 2025-05-29T15:27:12.111290Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:137: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046644480, LocalPathId: 3], at schemeshard: 72057594046644480 2025-05-29T15:27:12.111301Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 2 2025-05-29T15:27:12.111380Z node 1 :TX_DATASHARD INFO: datashard.cpp:197: OnTabletStop: 72075186224037890 reason = ReasonStop 2025-05-29T15:27:12.111387Z node 1 :TX_DATASHARD INFO: datashard.cpp:197: OnTabletStop: 72075186224037891 reason = ReasonStop 2025-05-29T15:27:12.111391Z node 1 :TX_DATASHARD INFO: datashard.cpp:197: OnTabletStop: 72075186224037892 reason = ReasonStop 2025-05-29T15:27:12.111469Z node 1 :TX_DATASHARD INFO: datashard.cpp:257: OnTabletDead: 72075186224037891 2025-05-29T15:27:12.111492Z node 1 :TX_DATASHARD INFO: datashard.cpp:1301: Change sender killed: at tablet: 72075186224037891 2025-05-29T15:27:12.111693Z node 1 :TX_DATASHARD INFO: datashard.cpp:257: OnTabletDead: 72075186224037890 2025-05-29T15:27:12.111711Z node 1 :TX_DATASHARD INFO: datashard.cpp:1301: Change sender killed: at tablet: 72075186224037890 2025-05-29T15:27:12.111803Z node 1 :HIVE WARN: hive_impl.cpp:491: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037891 not found 2025-05-29T15:27:12.111807Z node 1 :HIVE WARN: hive_impl.cpp:491: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037890 not found 2025-05-29T15:27:12.111809Z node 1 :HIVE WARN: hive_impl.cpp:491: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037892 not found 2025-05-29T15:27:12.111901Z node 1 :TX_DATASHARD INFO: datashard.cpp:257: OnTabletDead: 72075186224037892 2025-05-29T15:27:12.111910Z node 1 :TX_DATASHARD INFO: datashard.cpp:1301: Change sender killed: at tablet: 72075186224037892 2025-05-29T15:27:12.114001Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:174: Deleted shardIdx 72057594046644480:3 2025-05-29T15:27:12.114013Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:180: Close pipe to deleted shardIdx 72057594046644480:3 tabletId 72075186224037890 2025-05-29T15:27:12.114025Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:174: Deleted shardIdx 72057594046644480:3 2025-05-29T15:27:12.114029Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:174: Deleted shardIdx 72057594046644480:4 2025-05-29T15:27:12.114030Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:180: Close pipe to deleted shardIdx 72057594046644480:4 tabletId 72075186224037891 2025-05-29T15:27:12.114035Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:174: Deleted shardIdx 72057594046644480:4 2025-05-29T15:27:12.114039Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:174: Deleted shardIdx 72057594046644480:5 2025-05-29T15:27:12.114041Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:180: Close pipe to deleted shardIdx 72057594046644480:5 tabletId 72075186224037892 2025-05-29T15:27:12.114044Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:174: Deleted shardIdx 72057594046644480:5 2025-05-29T15:27:12.114052Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:160: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046644480 >> TSchemeShardSysViewTest::AsyncDropSameSysView ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/public/sdk/cpp/src/client/federated_topic/ut/unittest >> BasicUsage::WriteSessionCloseIgnoresWrites [FAIL] Test command err: 2025-05-29T15:27:06.486368Z :WriteSessionCloseWaitsForWrites INFO: Random seed for debugging is 1748532426486357 2025-05-29T15:27:06.651121Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509889587532528318:2070];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:27:06.651152Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-05-29T15:27:06.674364Z node 2 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7509889589797502754:2222];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/002481/r3tmp/tmpvXi1Ir/pdisk_1.dat 2025-05-29T15:27:06.721268Z node 2 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-05-29T15:27:06.721381Z node 2 :PQ_READ_PROXY DEBUG: caching_service.cpp:44: Direct read cache: : Created 2025-05-29T15:27:06.731016Z node 1 :PQ_READ_PROXY DEBUG: caching_service.cpp:44: Direct read cache: : Created 2025-05-29T15:27:06.772869Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:27:06.772901Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:27:06.776868Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:27:06.777752Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 24818, node 1 2025-05-29T15:27:06.811244Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/ciyv/002481/r3tmp/yandexZWMobi.tmp 2025-05-29T15:27:06.811259Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: /home/runner/.ya/build/build_root/ciyv/002481/r3tmp/yandexZWMobi.tmp 2025-05-29T15:27:06.811355Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:202: successfully initialized from file: /home/runner/.ya/build/build_root/ciyv/002481/r3tmp/yandexZWMobi.tmp 2025-05-29T15:27:06.811412Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-05-29T15:27:06.816950Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:27:06.816978Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:27:06.819208Z node 1 :HIVE WARN: hive_impl.cpp:771: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-05-29T15:27:06.819593Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-29T15:27:06.830344Z INFO: TTestServer started on Port 25784 GrpcPort 24818 TClient is connected to server localhost:25784 PQClient connected to localhost:24818 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-29T15:27:06.875015Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976720657:0, at schemeshard: 72057594046644480 waiting... waiting... waiting... waiting... 2025-05-29T15:27:07.106858Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509889591827496595:2338], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:27:07.106874Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509889591827496584:2335], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:27:07.106892Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:27:07.107878Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976720661:3, at schemeshard: 72057594046644480 2025-05-29T15:27:07.108797Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509889591827496627:2341], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:27:07.108823Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:27:07.118386Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7509889591827496598:2339], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976720661 completed, doublechecking } 2025-05-29T15:27:07.155717Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720662:0, at schemeshard: 72057594046644480 2025-05-29T15:27:07.160822Z node 2 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [2:7509889594092470209:2311], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-05-29T15:27:07.160961Z node 2 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=2&id=NGU0YmMyOTUtOThkZGMwMzAtMTI2NWM5ZDctNDYwZWEzYjM=, ActorId: [2:7509889594092470169:2305], ActorState: ExecuteState, TraceId: 01jweaeyc52k4ngxc7zxqpw9s0, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-05-29T15:27:07.161605Z node 2 :PERSQUEUE_CLUSTER_TRACKER ERROR: cluster_tracker.cpp:167: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-05-29T15:27:07.196400Z node 1 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [1:7509889591827496736:2696] txid# 281474976720663, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 8], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-29T15:27:07.203777Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [1:7509889591827496747:2350], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-05-29T15:27:07.203883Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=1&id=Zjk3MjUyMjMtMzk5MGI2YzAtNGViODM0LWJlMjUxZGVk, ActorId: [1:7509889591827496581:2333], ActorState: ExecuteState, TraceId: 01jweaeyb18rg8t0jnnd6qzd8y, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-05-29T15:27:07.204049Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: cluster_tracker.cpp:167: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-05-29T15:27:07.231045Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720664:0, at schemeshard: 72057594046644480 2025-05-29T15:27:07.267619Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720665:0, at schemeshard: 72057594046644480 === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost:24818", true, true, 1000), ("dc2", "dc2.logbroker.yandex.net", false, false, 1000); 2025-05-29T15:27:07.353749Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [1:7509889591827497039:2377], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:27:07.354490Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=1&id=MjA2ZDY5NTAtMWUxZjRmZGEtMWI0NmVjZjgtY2Y1NzE5ZTQ=, ActorId: [1:7509889591827497036:2375], ActorState: ExecuteState, TraceId: 01jweaeyj990w1v80ssqzspx48, ReplyQueryCompileError, s ... shardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-29T15:27:08.121911Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:27:08.121945Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:27:08.127459Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-29T15:27:08.130097Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:27:08.130123Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:27:08.130776Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-05-29T15:27:08.131119Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:202: successfully initialized from file: /home/runner/.ya/build/build_root/ciyv/002481/r3tmp/yandexcyreyn.tmp 2025-05-29T15:27:08.131206Z node 3 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-05-29T15:27:08.131482Z node 3 :HIVE WARN: hive_impl.cpp:771: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 4 Cookie 4 2025-05-29T15:27:08.131856Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected waiting... 2025-05-29T15:27:08.139033Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... waiting... waiting... 2025-05-29T15:27:08.469463Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7509889596946275335:2335], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:27:08.469493Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:27:08.469580Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7509889596946275362:2338], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:27:08.470523Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715661:3, at schemeshard: 72057594046644480 2025-05-29T15:27:08.471794Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7509889596946275389:2341], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:27:08.471840Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:27:08.476678Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-05-29T15:27:08.480844Z node 3 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7509889596946275364:2339], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715661 completed, doublechecking } 2025-05-29T15:27:08.501383Z node 4 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [4:7509889595422584753:2314], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-05-29T15:27:08.501464Z node 4 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=4&id=N2IyMmE2NzItNzE4MGU5ZWItNjk0OGJhZjgtNWM5NjQ1ZTc=, ActorId: [4:7509889595422584713:2308], ActorState: ExecuteState, TraceId: 01jweaezpf55q1chvs44yx7prw, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-05-29T15:27:08.501629Z node 4 :PERSQUEUE_CLUSTER_TRACKER ERROR: cluster_tracker.cpp:167: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-05-29T15:27:08.544650Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-05-29T15:27:08.544852Z node 3 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [3:7509889596946275548:2725] txid# 281474976715663, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 8], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-29T15:27:08.549791Z node 3 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [3:7509889596946275580:2353], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:5:17: Error: At function: KiReadTable!
:5:17: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Versions]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-05-29T15:27:08.550261Z node 3 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=3&id=MzRlYjdmMDAtNjU3NjRiYy05YjYyODY0Ni03ZTkwOGY1MA==, ActorId: [3:7509889596946275332:2333], ActorState: ExecuteState, TraceId: 01jweaeznmbckz8ct2wfmzjp1m, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-05-29T15:27:08.550370Z node 3 :PERSQUEUE_CLUSTER_TRACKER ERROR: cluster_tracker.cpp:167: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 5 column: 17 } message: "At function: KiReadTable!" end_position { row: 5 column: 17 } severity: 1 issues { position { row: 5 column: 17 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Versions]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 5 column: 17 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-05-29T15:27:08.614117Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost:23940", true, true, 1000), ("dc2", "dc2.logbroker.yandex.net", false, false, 1000); 2025-05-29T15:27:08.674227Z node 3 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [3:7509889596946275817:2378], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:27:08.674350Z node 3 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=3&id=MTk4MDljN2ItNGYxZGU0NWYtYjMzMTVkNS0xMzdiOWY2Mg==, ActorId: [3:7509889596946275814:2376], ActorState: ExecuteState, TraceId: 01jweaezvmfpe47fww2a56hvnq, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: assertion failed at ydb/core/testlib/test_pq_client.h:537, TMaybe NKikimr::NPersQueueTests::TFlatMsgBusPQClient::RunYqlDataQueryWithParams(TString, const NYdb::TParams &): (result.IsSuccess())
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 TBackTrace::Capture()+28 (0x13940ACC) NUnitTest::NPrivate::RaiseError(char const*, TBasicString> const&, bool)+137 (0x13AF4859) NKikimr::NPersQueueTests::TFlatMsgBusPQClient::RunYqlDataQueryWithParams(TBasicString>, NYdb::Dev::TParams const&)+932 (0x13824994) NKikimr::NPersQueueTests::TFlatMsgBusPQClient::RunYqlDataQuery(TBasicString>)+88 (0x13823768) NKikimr::NPersQueueTests::TFlatMsgBusPQClient::InitDCs(THashMap>, NKikimr::NPersQueueTests::TPQTestClusterInfo, THash>>, TEqualTo>>, std::__y1::allocator>>>, TBasicString> const&)+1170 (0x1381DD92) NPersQueue::SDKTestSetup::Start(bool, bool)+1450 (0x137F1C8A) NYdb::NFederatedTopic::NTests::NTestSuiteBasicUsage::TTestCaseWriteSessionCloseIgnoresWrites::Execute_(NUnitTest::TTestContext&)+169 (0x13806D79) NYdb::NFederatedTopic::NTests::NTestSuiteBasicUsage::TCurrentTest::Execute()::'lambda'()::operator()() const+71 (0x1381C7C7) NUnitTest::TTestBase::Run(std::__y1::function, TBasicString> const&, char const*, bool)+126 (0x13AF670E) NYdb::NFederatedTopic::NTests::NTestSuiteBasicUsage::TCurrentTest::Execute()+428 (0x1381C02C) NUnitTest::TTestFactory::Execute()+803 (0x13AF6E83) NUnitTest::RunMain(int, char**)+3021 (0x13B08A2D) ??+0 (0x7F0A48A19D90) __libc_start_main+128 (0x7F0A48A19E40) _start+41 (0x1294D029) >> TSchemeShardSysViewTest::AsyncDropSameSysView [GOOD] >> TSchemeShardSysViewTest::AsyncCreateDirWithSysView |68.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_sysview/unittest >> KqpScanSpilling::HandleErrorsCorrectly >> KqpScanSpilling::SelfJoin >> TSchemeShardSysViewTest::AsyncCreateDirWithSysView [GOOD] >> TTxDataShardMiniKQL::TableStatsHistograms [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_sysview/unittest >> TSchemeShardSysViewTest::AsyncDropSameSysView [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:116:2058] recipient: [1:111:2141] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:116:2058] recipient: [1:111:2141] Leader for TabletID 72057594046678944 is [1:128:2152] sender: [1:132:2058] recipient: [1:111:2141] 2025-05-29T15:27:13.902456Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7488: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-05-29T15:27:13.902484Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7516: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:27:13.902490Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7402: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-05-29T15:27:13.902495Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7418: OperationsProcessing config: using default configuration 2025-05-29T15:27:13.902510Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-05-29T15:27:13.902514Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-05-29T15:27:13.902523Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7548: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:27:13.902539Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:39: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-05-29T15:27:13.902659Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7619: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-05-29T15:27:13.902728Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-05-29T15:27:13.915883Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7309: Cannot subscribe to console configs 2025-05-29T15:27:13.915914Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:27:13.918505Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-05-29T15:27:13.918622Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-05-29T15:27:13.918680Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-05-29T15:27:13.920078Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-05-29T15:27:13.920209Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:445: Clear TempDirsState with owners number: 0 2025-05-29T15:27:13.920334Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1348: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-05-29T15:27:13.920376Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:32: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-05-29T15:27:13.920796Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:157: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:27:13.920846Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:84: [RootDataErasureManager] Stop 2025-05-29T15:27:13.921099Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:27:13.921109Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:27:13.921127Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-05-29T15:27:13.921138Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-29T15:27:13.921144Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:202: TTxServerlessStorageBilling.Complete 2025-05-29T15:27:13.921177Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6671: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-05-29T15:27:13.922391Z node 1 :HIVE INFO: tablet_helpers.cpp:1130: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:128:2152] sender: [1:242:2058] recipient: [1:15:2062] 2025-05-29T15:27:13.941888Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:372: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-05-29T15:27:13.941983Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:27:13.942057Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-05-29T15:27:13.942102Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:130: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-05-29T15:27:13.942114Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:27:13.942936Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:451: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-05-29T15:27:13.942970Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-05-29T15:27:13.943031Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:27:13.943041Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:313: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-05-29T15:27:13.943047Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:367: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-05-29T15:27:13.943054Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 2 -> 3 2025-05-29T15:27:13.943462Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:27:13.943474Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-05-29T15:27:13.943478Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 3 -> 128 2025-05-29T15:27:13.943801Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:27:13.943812Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:27:13.943820Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:27:13.943828Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1650: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-05-29T15:27:13.944466Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1719: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-05-29T15:27:13.945115Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:652: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-05-29T15:27:13.945170Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1751: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-05-29T15:27:13.945405Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:676: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-05-29T15:27:13.945436Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:680: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 124 RawX2: 4294969445 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-05-29T15:27:13.945445Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:27:13.945504Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 128 -> 240 2025-05-29T15:27:13.945512Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:27:13.945546Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-05-29T15:27:13.945557Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:402: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-05-29T15:27:13.946233Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:27:13.946245Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-29T15:27:13.946302Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29 ... 594046678944, LocalPathId: 3] 2025-05-29T15:27:13.968558Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:27:13.968563Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:209:2210], at schemeshard: 72057594046678944, txId: 103, path id: 1 2025-05-29T15:27:13.968569Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:209:2210], at schemeshard: 72057594046678944, txId: 103, path id: 2 2025-05-29T15:27:13.968572Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:209:2210], at schemeshard: 72057594046678944, txId: 103, path id: 3 2025-05-29T15:27:13.968626Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 103:0, at schemeshard: 72057594046678944 2025-05-29T15:27:13.968633Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:482: [72057594046678944] TDone opId# 103:0 ProgressState 2025-05-29T15:27:13.968646Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:906: Part operation is done id#103:0 progress is 1/1 2025-05-29T15:27:13.968651Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2025-05-29T15:27:13.968656Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:906: Part operation is done id#103:0 progress is 1/1 2025-05-29T15:27:13.968660Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2025-05-29T15:27:13.968664Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1606: TOperation IsReadyToNotify, TxId: 103, ready parts: 1/1, is published: false 2025-05-29T15:27:13.968669Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2025-05-29T15:27:13.968674Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:973: Operation and all the parts is done, operation id: 103:0 2025-05-29T15:27:13.968679Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5174: RemoveTx for txid 103:0 2025-05-29T15:27:13.968693Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 2 2025-05-29T15:27:13.968702Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:982: Publication still in progress, tx: 103, publications: 3, subscribers: 0 2025-05-29T15:27:13.968706Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:989: Publication details: tx: 103, [OwnerId: 72057594046678944, LocalPathId: 1], 7 2025-05-29T15:27:13.968710Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:989: Publication details: tx: 103, [OwnerId: 72057594046678944, LocalPathId: 2], 5 2025-05-29T15:27:13.968713Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:989: Publication details: tx: 103, [OwnerId: 72057594046678944, LocalPathId: 3], 18446744073709551615 2025-05-29T15:27:13.968844Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:5834: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 7 PathOwnerId: 72057594046678944, cookie: 103 2025-05-29T15:27:13.968856Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 7 PathOwnerId: 72057594046678944, cookie: 103 2025-05-29T15:27:13.968861Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 3, at schemeshard: 72057594046678944, txId: 103 2025-05-29T15:27:13.968866Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 103, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 7 2025-05-29T15:27:13.968870Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-05-29T15:27:13.969263Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:5834: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 5 PathOwnerId: 72057594046678944, cookie: 103 2025-05-29T15:27:13.969278Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 5 PathOwnerId: 72057594046678944, cookie: 103 2025-05-29T15:27:13.969282Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 103 2025-05-29T15:27:13.969287Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 103, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 5 2025-05-29T15:27:13.969291Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2025-05-29T15:27:13.969432Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:5834: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 103 2025-05-29T15:27:13.969442Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 103 2025-05-29T15:27:13.969447Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 103 2025-05-29T15:27:13.969452Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 103, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 18446744073709551615 2025-05-29T15:27:13.969459Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 1 2025-05-29T15:27:13.969470Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 103, subscribers: 0 2025-05-29T15:27:13.969494Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:123: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-05-29T15:27:13.969499Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:137: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 3], at schemeshard: 72057594046678944 2025-05-29T15:27:13.969508Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 2025-05-29T15:27:13.969919Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 103 2025-05-29T15:27:13.970458Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 103 2025-05-29T15:27:13.970527Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 103 2025-05-29T15:27:13.970762Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:160: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestModificationResult got TxId: 103, wait until txId: 103 TestModificationResults wait txId: 104 TestModificationResult got TxId: 104, wait until txId: 104 TestWaitNotification wait txId: 103 2025-05-29T15:27:13.970835Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:210: tests -- TTxNotificationSubscriber for txId 103: send EvNotifyTxCompletion 2025-05-29T15:27:13.970843Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:256: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 103 TestWaitNotification wait txId: 104 2025-05-29T15:27:13.970863Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:210: tests -- TTxNotificationSubscriber for txId 104: send EvNotifyTxCompletion 2025-05-29T15:27:13.970866Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:256: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 104 2025-05-29T15:27:13.970955Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 103, at schemeshard: 72057594046678944 2025-05-29T15:27:13.970977Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 104, at schemeshard: 72057594046678944 2025-05-29T15:27:13.970986Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:227: tests -- TTxNotificationSubscriber for txId 103: got EvNotifyTxCompletionResult 2025-05-29T15:27:13.970991Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:236: tests -- TTxNotificationSubscriber for txId 103: satisfy waiter [1:362:2352] 2025-05-29T15:27:13.971010Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:227: tests -- TTxNotificationSubscriber for txId 104: got EvNotifyTxCompletionResult 2025-05-29T15:27:13.971014Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:236: tests -- TTxNotificationSubscriber for txId 104: satisfy waiter [1:362:2352] TestWaitNotification: OK eventTxId 103 TestWaitNotification: OK eventTxId 104 2025-05-29T15:27:13.971096Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/.sys/new_sys_view" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-05-29T15:27:13.971138Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/.sys/new_sys_view" took 56us result status StatusPathDoesNotExist 2025-05-29T15:27:13.971183Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/.sys/new_sys_view\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot/.sys\' (id: [OwnerId: 72057594046678944, LocalPathId: 2]), source_location: ydb/core/tx/schemeshard/schemeshard_path_describer.cpp:1157" Path: "/MyRoot/.sys/new_sys_view" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot/.sys" LastExistedPrefixPathId: 2 LastExistedPrefixDescription { Self { Name: ".sys" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: false } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 >> KqpScanSpilling::SpillingPragmaParseError >> KqpScanLogs::WideCombine+EnabledLogs ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_sysview/unittest >> TSchemeShardSysViewTest::AsyncCreateDirWithSysView [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:116:2058] recipient: [1:111:2141] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:116:2058] recipient: [1:111:2141] Leader for TabletID 72057594046678944 is [1:128:2152] sender: [1:132:2058] recipient: [1:111:2141] 2025-05-29T15:27:14.221448Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7488: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-05-29T15:27:14.221478Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7516: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:27:14.221484Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7402: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-05-29T15:27:14.221490Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7418: OperationsProcessing config: using default configuration 2025-05-29T15:27:14.221508Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-05-29T15:27:14.221513Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-05-29T15:27:14.221523Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7548: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:27:14.221539Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:39: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-05-29T15:27:14.221670Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7619: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-05-29T15:27:14.221754Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-05-29T15:27:14.236138Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7309: Cannot subscribe to console configs 2025-05-29T15:27:14.236167Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:27:14.239148Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-05-29T15:27:14.239322Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-05-29T15:27:14.239379Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-05-29T15:27:14.241302Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-05-29T15:27:14.241475Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:445: Clear TempDirsState with owners number: 0 2025-05-29T15:27:14.241633Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1348: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-05-29T15:27:14.241690Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:32: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-05-29T15:27:14.242284Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:157: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:27:14.242344Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:84: [RootDataErasureManager] Stop 2025-05-29T15:27:14.242662Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:27:14.242674Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:27:14.242697Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-05-29T15:27:14.242710Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-29T15:27:14.242716Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:202: TTxServerlessStorageBilling.Complete 2025-05-29T15:27:14.242779Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6671: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-05-29T15:27:14.244250Z node 1 :HIVE INFO: tablet_helpers.cpp:1130: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:128:2152] sender: [1:242:2058] recipient: [1:15:2062] 2025-05-29T15:27:14.262239Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:372: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-05-29T15:27:14.262338Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:27:14.262405Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-05-29T15:27:14.262452Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:130: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-05-29T15:27:14.262465Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:27:14.263353Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:451: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-05-29T15:27:14.263378Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-05-29T15:27:14.263437Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:27:14.263445Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:313: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-05-29T15:27:14.263449Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:367: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-05-29T15:27:14.263454Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 2 -> 3 2025-05-29T15:27:14.263800Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:27:14.263827Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-05-29T15:27:14.263833Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 3 -> 128 2025-05-29T15:27:14.264113Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:27:14.264123Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:27:14.264132Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:27:14.264142Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1650: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-05-29T15:27:14.264768Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1719: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-05-29T15:27:14.265166Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:652: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-05-29T15:27:14.265205Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1751: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-05-29T15:27:14.265400Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:676: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-05-29T15:27:14.265425Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:680: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 124 RawX2: 4294969445 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-05-29T15:27:14.265433Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:27:14.265510Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 128 -> 240 2025-05-29T15:27:14.265518Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:27:14.265552Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-05-29T15:27:14.265564Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:402: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-05-29T15:27:14.265961Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:27:14.265967Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-29T15:27:14.266003Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29 ... 7:14.275948Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 102, path id: [OwnerId: 72057594046678944, LocalPathId: 3] 2025-05-29T15:27:14.275961Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:27:14.275964Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:209:2210], at schemeshard: 72057594046678944, txId: 102, path id: 2 2025-05-29T15:27:14.275969Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:209:2210], at schemeshard: 72057594046678944, txId: 102, path id: 3 FAKE_COORDINATOR: Erasing txId 102 2025-05-29T15:27:14.276026Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2025-05-29T15:27:14.276031Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:482: [72057594046678944] TDone opId# 102:0 ProgressState 2025-05-29T15:27:14.276041Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:906: Part operation is done id#102:0 progress is 1/1 2025-05-29T15:27:14.276044Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-05-29T15:27:14.276048Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:906: Part operation is done id#102:0 progress is 1/1 2025-05-29T15:27:14.276050Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-05-29T15:27:14.276053Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1606: TOperation IsReadyToNotify, TxId: 102, ready parts: 1/1, is published: false 2025-05-29T15:27:14.276056Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-05-29T15:27:14.276059Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:973: Operation and all the parts is done, operation id: 102:0 2025-05-29T15:27:14.276062Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5174: RemoveTx for txid 102:0 2025-05-29T15:27:14.276071Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 2 2025-05-29T15:27:14.276075Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:982: Publication still in progress, tx: 102, publications: 2, subscribers: 0 2025-05-29T15:27:14.276078Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:989: Publication details: tx: 102, [OwnerId: 72057594046678944, LocalPathId: 2], 4 2025-05-29T15:27:14.276081Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:989: Publication details: tx: 102, [OwnerId: 72057594046678944, LocalPathId: 3], 2 2025-05-29T15:27:14.276158Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:5834: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 4 PathOwnerId: 72057594046678944, cookie: 102 2025-05-29T15:27:14.276166Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 4 PathOwnerId: 72057594046678944, cookie: 102 2025-05-29T15:27:14.276169Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 102 2025-05-29T15:27:14.276174Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 4 2025-05-29T15:27:14.276177Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2025-05-29T15:27:14.276269Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:5834: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 2 PathOwnerId: 72057594046678944, cookie: 102 2025-05-29T15:27:14.276276Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 2 PathOwnerId: 72057594046678944, cookie: 102 2025-05-29T15:27:14.276293Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 102 2025-05-29T15:27:14.276298Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 2 2025-05-29T15:27:14.276302Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 1 2025-05-29T15:27:14.276313Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 102, subscribers: 0 2025-05-29T15:27:14.276941Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2025-05-29T15:27:14.276969Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 TestModificationResult got TxId: 101, wait until txId: 101 TestModificationResults wait txId: 102 TestModificationResult got TxId: 102, wait until txId: 102 TestWaitNotification wait txId: 101 2025-05-29T15:27:14.277016Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:210: tests -- TTxNotificationSubscriber for txId 101: send EvNotifyTxCompletion 2025-05-29T15:27:14.277024Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:256: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 101 TestWaitNotification wait txId: 102 2025-05-29T15:27:14.277040Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:210: tests -- TTxNotificationSubscriber for txId 102: send EvNotifyTxCompletion 2025-05-29T15:27:14.277044Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:256: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 102 2025-05-29T15:27:14.277116Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 101, at schemeshard: 72057594046678944 2025-05-29T15:27:14.277137Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:227: tests -- TTxNotificationSubscriber for txId 101: got EvNotifyTxCompletionResult 2025-05-29T15:27:14.277141Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:236: tests -- TTxNotificationSubscriber for txId 101: satisfy waiter [1:324:2314] 2025-05-29T15:27:14.277170Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 102, at schemeshard: 72057594046678944 2025-05-29T15:27:14.277179Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:227: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2025-05-29T15:27:14.277182Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:236: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [1:324:2314] TestWaitNotification: OK eventTxId 101 TestWaitNotification: OK eventTxId 102 2025-05-29T15:27:14.277233Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/.sys" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-05-29T15:27:14.277257Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/.sys" took 32us result status StatusSuccess 2025-05-29T15:27:14.277334Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/.sys" PathDescription { Self { Name: ".sys" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 4 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 4 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 3 } ChildrenExist: true } Children { Name: "new_sys_view" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeSysView CreateFinished: true CreateTxId: 102 CreateStep: 5000003 ParentPathId: 2 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 2 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-05-29T15:27:14.277382Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/.sys/new_sys_view" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-05-29T15:27:14.277398Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/.sys/new_sys_view" took 19us result status StatusSuccess 2025-05-29T15:27:14.277431Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/.sys/new_sys_view" PathDescription { Self { Name: "new_sys_view" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeSysView CreateFinished: true CreateTxId: 102 CreateStep: 5000003 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 SysViewVersion: 1 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 2 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } SysViewDescription { Name: "new_sys_view" Type: EPartitionStats } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/ydb/table_split_ut/unittest >> YdbTableSplit::MergeByNoLoadAfterSplit [GOOD] Test command err: 2025-05-29T15:25:47.128840Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509889247375080254:2231];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:25:47.128887Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/002707/r3tmp/tmpTnp5FI/pdisk_1.dat 2025-05-29T15:25:47.243518Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:25:47.243543Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:25:47.246945Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-29T15:25:47.256972Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 24500, node 1 2025-05-29T15:25:47.289330Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:25:47.289342Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-05-29T15:25:47.289345Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-05-29T15:25:47.289389Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:12740 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-29T15:25:47.351778Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:25:47.377740Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 Triggering split by load TClient is connected to server localhost:12740 2025-05-29T15:25:47.963433Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509889247375081084:2334], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:25:47.963462Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:25:48.035189Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-05-29T15:25:48.097374Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509889251670048544:2347], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:25:48.097394Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:25:48.100639Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 TClient::Ls request: /Root/Foo TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Foo" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976715658 CreateStep: 1748532348115 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 4 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 4 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 2 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "Foo" Columns { Name: "NameHash" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "Name" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } Columns { Name: "Versio... (TRUNCATED) Table has 1 shards TClient::Ls request: /Root/Foo TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Foo" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976715658 CreateStep: 1748532348115 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 4 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 4 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 2 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "Foo" Columns { Name: "NameHash" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "Name" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } Columns { Name: "Versio... (TRUNCATED) 2025-05-29T15:25:48.149229Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509889251670048634:2364], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:25:48.149374Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:25:48.149825Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509889251670048651:2379], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:25:48.150550Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_mkdir.cpp:115: TMkDir Propose, path: /Root/.metadata, operationId: 281474976715660:0, at schemeshard: 72057594046644480 2025-05-29T15:25:48.150597Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:130: IgniteOperation, opId: 281474976715660:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-05-29T15:25:48.150601Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_mkdir.cpp:115: TMkDir Propose, path: /Root/.metadata/workload_manager, operationId: 281474976715660:1, at schemeshard: 72057594046644480 2025-05-29T15:25:48.150614Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:130: IgniteOperation, opId: 281474976715660:2, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-05-29T15:25:48.150616Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_mkdir.cpp:115: TMkDir Propose, path: /Root/.metadata/workload_manager/pools, operationId: 281474976715660:2, at schemeshard: 72057594046644480 2025-05-29T15:25:48.150632Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:130: IgniteOperation, opId: 281474976715660:3, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-05-29T15:25:48.150640Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_create_resource_pool.cpp:148: [72057594046644480] TCreateResourcePool Propose: opId# 281474976715660:3, path# /Root/.metadata/workload_manager/pools/default 2025-05-29T15:25:48.150682Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 281474976715660:3 1 -> 128 2025-05-29T15:25:48.150756Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:130: IgniteOperation, opId: 281474976715660:4, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-05-29T15:25:48.150760Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715660:3, at schemeshard: 72057594046644480 2025-05-29T15:25:48.151512Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 281474976715660, database: /Root, subject: metadata@system, status: StatusAccepted, operation: CREATE RESOURCE POOL, path: .metadata/workload_manager/pools/default, set owner:metadata@system, add access: +(SR|DS):all-users@well-known, add access: +(SR|DS):root@builtin 2025-05-29T15:25:48.151575Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2025-05-29T15:25:48.151699Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2025-05-29T15:25:48.151717Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_mkdir.cpp:63: MkDir::TPropose operationId# 281474976715660:0 ProgressState, at schemeshard: 72057594046644480 2025-05-29T15:25:48.151756Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_mkdir.cpp:63: MkDir::TPropose operationId# 281474976715660:2 ProgressState, at schemeshard: 72057594046644480 2025-05-29T15:25:48.151772Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_create_resource_pool.cpp:45: [72057594046644480] TCreateResourcePool TPropose, operationId: 281474976715660:3, ProgressState 2025-05-29T15:25:48.151783Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_mkdir.cpp:63: MkDir::TPropose operationId# 281474976715660:1 ProgressState, at schemeshard: 72057594046644480 2025-05-29T15:25:48.152797Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_im ... 5 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 2 TablePartitionVersion: 2 } ChildrenExist: false } Table { Name: "Foo" Columns { Name: "NameHash" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "Name" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } Columns { Name: "Versio... (TRUNCATED) 2025-05-29T15:27:08.298910Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__table_stats.cpp:450: Propose merge request : Transaction { WorkingDir: "/Root" OperationType: ESchemeOpSplitMergeTablePartitions SplitMergeTablePartitions { TablePath: "/Root/Foo" SourceTabletId: 72075186224037889 SourceTabletId: 72075186224037890 SchemeshardId: 72057594046644480 } Internal: true FailOnExist: false } TxId: 281474976710658 TabletId: 72057594046644480, reason: shard with tabletId: 72075186224037890 merge by load (shardLoad: 0.02), shardToMergeCount: 2, totalSize: 0, sizeToMerge: 0, totalLoad: 0.04, loadThreshold: 0.07 2025-05-29T15:27:08.298968Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_split_merge.cpp:798: TSplitMerge Propose, tableStr: /Root/Foo, tableId: , opId: 281474976710658:0, at schemeshard: 72057594046644480, request: TablePath: "/Root/Foo" SourceTabletId: 72075186224037889 SourceTabletId: 72075186224037890 SchemeshardId: 72057594046644480 2025-05-29T15:27:08.299127Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_split_merge.cpp:1077: TSplitMerge Propose accepted, tableStr: /Root/Foo, tableId: , opId: 281474976710658:0, at schemeshard: 72057594046644480, op: SourceRanges { KeyRangeBegin: "\002\000\000\000\000\200\000\000\000\200" KeyRangeEnd: "\002\000\004\000\000\000K\230u~\000\000\000\200" TabletID: 72075186224037889 ShardIdx: 2 } SourceRanges { KeyRangeBegin: "\002\000\004\000\000\000K\230u~\000\000\000\200" KeyRangeEnd: "" TabletID: 72075186224037890 ShardIdx: 3 } DestinationRanges { KeyRangeBegin: "\002\000\000\000\000\200\000\000\000\200" KeyRangeEnd: "" ShardIdx: 4 }, request: TablePath: "/Root/Foo" SourceTabletId: 72075186224037889 SourceTabletId: 72075186224037890 SchemeshardId: 72057594046644480 2025-05-29T15:27:08.299142Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:130: IgniteOperation, opId: 281474976710658:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-05-29T15:27:08.304786Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:313: TCreateParts opId# 281474976710658:0 ProgressState, operation type: TxSplitTablePartition, at tablet# 72057594046644480 2025-05-29T15:27:08.312634Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:175: TCreateParts opId# 281474976710658:0 HandleReply TEvCreateTabletReply, at tabletId: 72057594046644480 2025-05-29T15:27:08.312689Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 281474976710658:0 2 -> 3 2025-05-29T15:27:08.315019Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_split_merge.cpp:83: TSplitMerge TConfigureDestination ProgressState, operationId: 281474976710658:0, at schemeshard: 72057594046644480 2025-05-29T15:27:08.315911Z node 1 :TX_DATASHARD INFO: datashard.cpp:373: TDataShard::OnActivateExecutor: tablet 72075186224037891 actor [1:7509889595268353897:10067] 2025-05-29T15:27:08.321913Z node 1 :TX_DATASHARD INFO: datashard.cpp:417: Switched to work state WaitScheme tabletId 72075186224037891 2025-05-29T15:27:08.321971Z node 1 :TX_DATASHARD INFO: datashard.cpp:1272: Cannot activate change sender: at tablet: 72075186224037891, state: WaitScheme 2025-05-29T15:27:08.322043Z node 1 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 72075186224037891 TxInFly 0 2025-05-29T15:27:08.323941Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_split_merge.cpp:37: TSplitMerge TConfigureDestination operationId# 281474976710658:0 HandleReply TEvInitSplitMergeDestinationAck, operationId: 281474976710658:0, at schemeshard: 72057594046644480 message# OperationCookie: 281474976710658 TabletId: 72075186224037891 2025-05-29T15:27:08.323964Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 281474976710658:0 3 -> 131 2025-05-29T15:27:08.324454Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_split_merge.cpp:328: TSplitMerge TTransferData operationId# 281474976710658:0 ProgressState, at schemeshard: 72057594046644480 2025-05-29T15:27:08.329997Z node 1 :TX_DATASHARD INFO: datashard.cpp:417: Switched to work state Ready tabletId 72075186224037891 2025-05-29T15:27:08.330039Z node 1 :TX_DATASHARD INFO: datashard.cpp:475: Send registration request to time cast Ready tabletId 72075186224037891 mediators count is 1 coordinators count is 1 buckets per mediator 2 2025-05-29T15:27:08.330063Z node 1 :TX_DATASHARD INFO: datashard.cpp:1301: Change sender killed: at tablet: 72075186224037891 2025-05-29T15:27:08.330071Z node 1 :TX_DATASHARD INFO: datashard.cpp:1293: Change sender activated: at tablet: 72075186224037891 2025-05-29T15:27:08.330161Z node 1 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 72075186224037891 TxInFly 0 2025-05-29T15:27:08.331520Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_split_merge.cpp:206: TSplitMerge TTransferData operationId# 281474976710658:0 HandleReply TEvSplitAck, at schemeshard: 72057594046644480, message: OperationCookie: 281474976710658 TabletId: 72075186224037890 2025-05-29T15:27:08.331614Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_split_merge.cpp:206: TSplitMerge TTransferData operationId# 281474976710658:0 HandleReply TEvSplitAck, at schemeshard: 72057594046644480, message: OperationCookie: 281474976710658 TabletId: 72075186224037889 2025-05-29T15:27:08.331733Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 281474976710658:0 131 -> 132 2025-05-29T15:27:08.332313Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2025-05-29T15:27:08.332397Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2025-05-29T15:27:08.332416Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_split_merge.cpp:431: TSplitMerge TNotifySrc, operationId: 281474976710658:0 ProgressState, at schemeshard: 72057594046644480 2025-05-29T15:27:08.332812Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:5834: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 2 Version: 6 PathOwnerId: 72057594046644480, cookie: 281474976710658 2025-05-29T15:27:08.332832Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976710658 2025-05-29T15:27:08.332838Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710658, pathId: [OwnerId: 72057594046644480, LocalPathId: 2], version: 6 2025-05-29T15:27:08.334702Z node 1 :TX_DATASHARD INFO: datashard_loans.cpp:177: 72075186224037889 Initiating switch from PreOffline to Offline state 2025-05-29T15:27:08.334789Z node 1 :TX_DATASHARD INFO: datashard_loans.cpp:177: 72075186224037890 Initiating switch from PreOffline to Offline state 2025-05-29T15:27:08.335074Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_split_merge.cpp:386: TSplitMerge TNotifySrc, operationId: 281474976710658:0 HandleReply TEvSplitPartitioningChangedAck, from datashard: 72075186224037889, at schemeshard: 72057594046644480 2025-05-29T15:27:08.335126Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_split_merge.cpp:386: TSplitMerge TNotifySrc, operationId: 281474976710658:0 HandleReply TEvSplitPartitioningChangedAck, from datashard: 72075186224037890, at schemeshard: 72057594046644480 2025-05-29T15:27:08.335139Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:906: Part operation is done id#281474976710658:0 progress is 1/1 2025-05-29T15:27:08.335144Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:906: Part operation is done id#281474976710658:0 progress is 1/1 2025-05-29T15:27:08.335152Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:973: Operation and all the parts is done, operation id: 281474976710658:0 2025-05-29T15:27:08.335622Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:260: Unable to activate 281474976710658:0 2025-05-29T15:27:08.335687Z node 1 :TX_DATASHARD INFO: datashard_impl.h:3306: 72075186224037889 Reporting state Offline to schemeshard 72057594046644480 2025-05-29T15:27:08.335738Z node 1 :TX_DATASHARD INFO: datashard_impl.h:3306: 72075186224037890 Reporting state Offline to schemeshard 72057594046644480 2025-05-29T15:27:08.335822Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__state_changed_reply.cpp:78: TTxShardStateChanged DoExecute, datashard informs about state changing, datashardId: 72075186224037889, state: Offline, at schemeshard: 72057594046644480 2025-05-29T15:27:08.335871Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__state_changed_reply.cpp:78: TTxShardStateChanged DoExecute, datashard informs about state changing, datashardId: 72075186224037890, state: Offline, at schemeshard: 72057594046644480 2025-05-29T15:27:08.346487Z node 1 :TX_DATASHARD INFO: datashard.cpp:197: OnTabletStop: 72075186224037889 reason = ReasonStop 2025-05-29T15:27:08.346484Z node 1 :TX_DATASHARD INFO: datashard.cpp:197: OnTabletStop: 72075186224037890 reason = ReasonStop 2025-05-29T15:27:08.347151Z node 1 :HIVE WARN: hive_impl.cpp:491: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037890 not found 2025-05-29T15:27:08.347166Z node 1 :HIVE WARN: hive_impl.cpp:491: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037889 not found 2025-05-29T15:27:08.347249Z node 1 :TX_DATASHARD INFO: datashard.cpp:257: OnTabletDead: 72075186224037890 2025-05-29T15:27:08.347281Z node 1 :TX_DATASHARD INFO: datashard.cpp:1301: Change sender killed: at tablet: 72075186224037890 2025-05-29T15:27:08.347558Z node 1 :TX_DATASHARD INFO: datashard.cpp:257: OnTabletDead: 72075186224037889 2025-05-29T15:27:08.347574Z node 1 :TX_DATASHARD INFO: datashard.cpp:1301: Change sender killed: at tablet: 72075186224037889 TClient::Ls request: /Root/Foo TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Foo" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976715658 CreateStep: 1748532348115 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 6 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 6 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 2 TablePartitionVersion: 3 } ChildrenExist: false } Table { Name: "Foo" Columns { Name: "NameHash" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "Name" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } Columns { Name: "Versio... (TRUNCATED) ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/datashard/ut_minikql/unittest >> TTxDataShardMiniKQL::TableStatsHistograms [GOOD] Test command err: 2025-05-29T15:26:56.284790Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3097: StateInit, received event# 268828672, Sender [1:110:2140], Recipient [1:130:2153]: NKikimr::TEvTablet::TEvBoot 2025-05-29T15:26:56.301302Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7309: Cannot subscribe to console configs 2025-05-29T15:26:56.301324Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:26:56.301877Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3097: StateInit, received event# 268828673, Sender [1:110:2140], Recipient [1:130:2153]: NKikimr::TEvTablet::TEvRestored 2025-05-29T15:26:56.301965Z node 1 :TX_DATASHARD INFO: datashard.cpp:373: TDataShard::OnActivateExecutor: tablet 9437184 actor [1:130:2153] 2025-05-29T15:26:56.302023Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:630: TxInitSchema.Execute 2025-05-29T15:26:56.303137Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3110: StateInactive, received event# 268828684, Sender [1:110:2140], Recipient [1:130:2153]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-05-29T15:26:56.314052Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:696: TxInitSchema.Complete 2025-05-29T15:26:56.314140Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:48: TDataShard::TTxInit::Execute 2025-05-29T15:26:56.314302Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1315: LoadChangeRecords: QueueSize: 0, at tablet: 9437184 2025-05-29T15:26:56.314312Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1371: LoadLockChangeRecords at tablet: 9437184 2025-05-29T15:26:56.314320Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1420: LoadChangeRecordCommits at tablet: 9437184 2025-05-29T15:26:56.314380Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:92: TDataShard::TTxInit::Complete 2025-05-29T15:26:56.314394Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:100: TDataShard::TTxInitRestored::Execute 2025-05-29T15:26:56.314406Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:111: DataShard 9437184 persisting started state actor id [1:197:2153] in generation 2 2025-05-29T15:26:56.342672Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:120: TDataShard::TTxInitRestored::Complete 2025-05-29T15:26:56.349018Z node 1 :TX_DATASHARD INFO: datashard.cpp:417: Switched to work state WaitScheme tabletId 9437184 2025-05-29T15:26:56.349111Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:457: 9437184 not sending time cast registration request in state WaitScheme: missing processing params 2025-05-29T15:26:56.349139Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1250: Change sender created: at tablet: 9437184, actorId: [1:215:2212] 2025-05-29T15:26:56.349144Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1255: Trying to activate change sender: at tablet: 9437184 2025-05-29T15:26:56.349149Z node 1 :TX_DATASHARD INFO: datashard.cpp:1272: Cannot activate change sender: at tablet: 9437184, state: WaitScheme 2025-05-29T15:26:56.349154Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-05-29T15:26:56.349228Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3129: StateWork, received event# 2146435072, Sender [1:130:2153], Recipient [1:130:2153]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-05-29T15:26:56.349243Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3154: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-05-29T15:26:56.349321Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 9437184 2025-05-29T15:26:56.349345Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 9437184 2025-05-29T15:26:56.349353Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 9437184 2025-05-29T15:26:56.349359Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 0 2025-05-29T15:26:56.349366Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:385: Check unit PlanQueue at 9437184 2025-05-29T15:26:56.349371Z node 1 :TX_DATASHARD TRACE: plan_queue_unit.cpp:59: TPlanQueueUnit at 9437184 has no attached operations 2025-05-29T15:26:56.349375Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:403: Unit PlanQueue has no ready operations at 9437184 2025-05-29T15:26:56.349379Z node 1 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 9437184 TxInFly 0 2025-05-29T15:26:56.349384Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 9437184 2025-05-29T15:26:56.349394Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3129: StateWork, received event# 269877761, Sender [1:211:2209], Recipient [1:130:2153]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-05-29T15:26:56.349399Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3165: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-05-29T15:26:56.349406Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3710: Server connected at leader tablet# 9437184, clientId# [1:209:2208], serverId# [1:211:2209], sessionId# [0:0:0] 2025-05-29T15:26:56.349836Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3129: StateWork, received event# 269549568, Sender [1:100:2134], Recipient [1:130:2153]: NKikimrTxDataShard.TEvProposeTransaction TxKind: TX_KIND_SCHEME SourceDeprecated { RawX1: 100 RawX2: 4294969430 } TxBody: "\nI\n\006table1\020\r\032\t\n\003key\030\002 \"\032\014\n\005value\030\200$ 8\032\n\n\004uint\030\002 9(\":\010Z\006\010\000\030\000(\000J\014/Root/table1" TxId: 1 ExecLevel: 0 Flags: 0 SchemeShardId: 4200 ProcessingParams { } 2025-05-29T15:26:56.349850Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3135: StateWork, processing event TEvDataShard::TEvProposeTransaction 2025-05-29T15:26:56.349864Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 9437184 2025-05-29T15:26:56.349895Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1827: Trying to execute [0:1] at 9437184 on unit CheckSchemeTx 2025-05-29T15:26:56.349906Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:132: Propose scheme transaction at tablet 9437184 txId 1 ssId 4200 seqNo 0:0 2025-05-29T15:26:56.349920Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:220: Prepared scheme transaction txId 1 at tablet 9437184 2025-05-29T15:26:56.349928Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1862: Execution status for [0:1] at 9437184 is ExecutedNoMoreRestarts 2025-05-29T15:26:56.349932Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1910: Advance execution plan for [0:1] at 9437184 executing on unit CheckSchemeTx 2025-05-29T15:26:56.349937Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1916: Add [0:1] at 9437184 to execution unit StoreSchemeTx 2025-05-29T15:26:56.349941Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1827: Trying to execute [0:1] at 9437184 on unit StoreSchemeTx 2025-05-29T15:26:56.350007Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1862: Execution status for [0:1] at 9437184 is DelayCompleteNoMoreRestarts 2025-05-29T15:26:56.350011Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1910: Advance execution plan for [0:1] at 9437184 executing on unit StoreSchemeTx 2025-05-29T15:26:56.350015Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1916: Add [0:1] at 9437184 to execution unit FinishPropose 2025-05-29T15:26:56.350018Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1827: Trying to execute [0:1] at 9437184 on unit FinishPropose 2025-05-29T15:26:56.350028Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1862: Execution status for [0:1] at 9437184 is DelayComplete 2025-05-29T15:26:56.350031Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1910: Advance execution plan for [0:1] at 9437184 executing on unit FinishPropose 2025-05-29T15:26:56.350034Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1916: Add [0:1] at 9437184 to execution unit WaitForPlan 2025-05-29T15:26:56.350038Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1827: Trying to execute [0:1] at 9437184 on unit WaitForPlan 2025-05-29T15:26:56.350042Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1832: Operation [0:1] at 9437184 is not ready to execute on unit WaitForPlan 2025-05-29T15:26:56.362202Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 9437184 2025-05-29T15:26:56.362230Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1933: Complete execution for [0:1] at 9437184 on unit StoreSchemeTx 2025-05-29T15:26:56.362238Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1933: Complete execution for [0:1] at 9437184 on unit FinishPropose 2025-05-29T15:26:56.362252Z node 1 :TX_DATASHARD TRACE: finish_propose_unit.cpp:167: Propose transaction complete txid 1 at tablet 9437184 send to client, exec latency: 0 ms, propose latency: 1 ms, status: PREPARED 2025-05-29T15:26:56.362281Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:467: 9437184 not sending time cast registration request in state WaitScheme 2025-05-29T15:26:56.362412Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3129: StateWork, received event# 269877761, Sender [1:221:2218], Recipient [1:130:2153]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-05-29T15:26:56.362422Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3165: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-05-29T15:26:56.362430Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3710: Server connected at leader tablet# 9437184, clientId# [1:220:2217], serverId# [1:221:2218], sessionId# [0:0:0] 2025-05-29T15:26:56.362452Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3129: StateWork, received event# 269287424, Sender [1:100:2134], Recipient [1:130:2153]: {TEvPlanStep step# 1000001 MediatorId# 0 TabletID 9437184} 2025-05-29T15:26:56.362458Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3147: StateWork, processing event TEvTxProcessing::TEvPlanStep 2025-05-29T15:26:56.362512Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1790: Trying to execute [1000001:1] at 9437184 on unit WaitForPlan 2025-05-29T15:26:56.362522Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1805: Execution status for [1000001:1] at 9437184 is Executed 2025-05-29T15:26:56.362530Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1910: Advance execution plan for [1000001:1] at 9437184 executing on unit WaitForPlan 2025-05-29T15:26:56.362535Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1916: Add [1000001:1] at 9437184 to execution unit PlanQueue 2025-05-29T15:26:56.363365Z node 1 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:69: Planned transaction txId 1 at step 1000001 at tablet 9437184 { Transactions { TxId: 1 AckTo { RawX1: 100 RawX2: 4294969430 } } Step: 1000001 MediatorID: 0 TabletID: 9437184 } 2025-05-29T15:26:56.363389Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-05-29T15:26:56.363459Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3129: StateWork, received event# 2146435072, Sender [1:130:2153], Recipient [1:130:2153]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-05-29T15:26:56.363467Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3154: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-05-29T15:26:56.363476Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 9437184 2025-05-29T15:26:56.363483Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 1 2025-05-29T15:26:56.363488Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:327: Check candidate unit PlanQueue at 9437184 2025-05-29T15:26:56.363497Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:333: Found ready operation [1000001:1] in PlanQueue unit at 9437184 2025-05-29T15:26:56.363503Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1827: Trying to execute [100000 ... pp:1862: Execution status for [0:1002] at 9437184 is Executed 2025-05-29T15:27:14.287536Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1910: Advance execution plan for [0:1002] at 9437184 executing on unit BuildAndWaitDependencies 2025-05-29T15:27:14.287539Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1916: Add [0:1002] at 9437184 to execution unit ExecuteDataTx 2025-05-29T15:27:14.287543Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1827: Trying to execute [0:1002] at 9437184 on unit ExecuteDataTx 2025-05-29T15:27:14.287551Z node 3 :TX_DATASHARD TRACE: datashard.cpp:2365: GetMvccTxVersion at 9437184 CompleteEdge# v1000001/1 IncompleteEdge# v{min} UnprotectedReadEdge# v{min} ImmediateWriteEdge# v1000001/18446744073709551615 ImmediateWriteEdgeReplied# v1000001/18446744073709551615 2025-05-29T15:27:14.287647Z node 3 :TX_DATASHARD TRACE: execute_data_tx_unit.cpp:306: Executed operation [0:1002] at tablet 9437184 with status COMPLETE 2025-05-29T15:27:14.287658Z node 3 :TX_DATASHARD TRACE: execute_data_tx_unit.cpp:312: Datashard execution counters for [0:1002] at 9437184: {NSelectRow: 0, NSelectRange: 0, NUpdateRow: 1, NEraseRow: 0, SelectRowRows: 0, SelectRowBytes: 0, SelectRangeRows: 0, SelectRangeBytes: 0, UpdateRowBytes: 109, EraseRowBytes: 0, SelectRangeDeletedRowSkips: 0, InvisibleRowSkips: 0} 2025-05-29T15:27:14.287670Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1862: Execution status for [0:1002] at 9437184 is ExecutedNoMoreRestarts 2025-05-29T15:27:14.287674Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1910: Advance execution plan for [0:1002] at 9437184 executing on unit ExecuteDataTx 2025-05-29T15:27:14.287679Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1916: Add [0:1002] at 9437184 to execution unit FinishPropose 2025-05-29T15:27:14.287683Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1827: Trying to execute [0:1002] at 9437184 on unit FinishPropose 2025-05-29T15:27:14.287692Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1862: Execution status for [0:1002] at 9437184 is DelayComplete 2025-05-29T15:27:14.287696Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1910: Advance execution plan for [0:1002] at 9437184 executing on unit FinishPropose 2025-05-29T15:27:14.287700Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1916: Add [0:1002] at 9437184 to execution unit CompletedOperations 2025-05-29T15:27:14.287703Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1827: Trying to execute [0:1002] at 9437184 on unit CompletedOperations 2025-05-29T15:27:14.287711Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1862: Execution status for [0:1002] at 9437184 is Executed 2025-05-29T15:27:14.287715Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1910: Advance execution plan for [0:1002] at 9437184 executing on unit CompletedOperations 2025-05-29T15:27:14.287718Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1922: Execution plan for [0:1002] at 9437184 has finished 2025-05-29T15:27:14.299466Z node 3 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 9437184 2025-05-29T15:27:14.299496Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1933: Complete execution for [0:1002] at 9437184 on unit FinishPropose 2025-05-29T15:27:14.299508Z node 3 :TX_DATASHARD TRACE: finish_propose_unit.cpp:167: Propose transaction complete txid 1002 at tablet 9437184 send to client, exec latency: 0 ms, propose latency: 1 ms, status: COMPLETE 2025-05-29T15:27:14.299536Z node 3 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 .2025-05-29T15:27:14.300906Z node 3 :TX_DATASHARD TRACE: datashard_impl.h:3129: StateWork, received event# 269551617, Sender [3:100:2134], Recipient [3:234:2226]: NKikimrTxDataShard.TEvGetShardState Source { RawX1: 100 RawX2: 12884904022 } 2025-05-29T15:27:14.300923Z node 3 :TX_DATASHARD TRACE: datashard_impl.h:3132: StateWork, processing event TEvDataShard::TEvGetShardState 2025-05-29T15:27:14.301174Z node 3 :TX_DATASHARD TRACE: datashard_impl.h:3129: StateWork, received event# 269877761, Sender [3:4549:6464], Recipient [3:234:2226]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-05-29T15:27:14.301182Z node 3 :TX_DATASHARD TRACE: datashard_impl.h:3165: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-05-29T15:27:14.301189Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:3710: Server connected at leader tablet# 9437184, clientId# [3:4548:6463], serverId# [3:4549:6464], sessionId# [0:0:0] 2025-05-29T15:27:14.301240Z node 3 :TX_DATASHARD TRACE: datashard_impl.h:3129: StateWork, received event# 269549568, Sender [3:100:2134], Recipient [3:234:2226]: NKikimrTxDataShard.TEvProposeTransaction TxKind: TX_KIND_DATA SourceDeprecated { RawX1: 100 RawX2: 12884904022 } TxBody: "\032\265\002\037\000\005\205\n\205\000\205\004?\000\205\002\202\0041\034MyReads MyWrites\205\004?\000\206\202\024Reply\024Write?\000?\000 AllReads\030MyKeys\014Run4ShardsForRead4ShardsToWrite\005?\000\005?\004?\014\005?\002)\211\006\202\203\005\004\213\002\203\004\205\002\203\001H\01056$UpdateRow\000\003?\016 h\020\000\000\000\000\000\000\r\000\000\000\000\000\000\000\013?\022\003?\020\235\017\001\005?\026\003?\024\322ImInShard111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111\001\007\002\000\003\005?\010?\014\006\002?\006?$\000\003?\014?\014\037/ \0018\000" TxId: 1003 ExecLevel: 0 Flags: 0 2025-05-29T15:27:14.301258Z node 3 :TX_DATASHARD TRACE: datashard_impl.h:3135: StateWork, processing event TEvDataShard::TEvProposeTransaction 2025-05-29T15:27:14.301303Z node 3 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 9437184 2025-05-29T15:27:14.301426Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1827: Trying to execute [0:1003] at 9437184 on unit CheckDataTx 2025-05-29T15:27:14.301444Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1862: Execution status for [0:1003] at 9437184 is Executed 2025-05-29T15:27:14.301449Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1910: Advance execution plan for [0:1003] at 9437184 executing on unit CheckDataTx 2025-05-29T15:27:14.301454Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1916: Add [0:1003] at 9437184 to execution unit BuildAndWaitDependencies 2025-05-29T15:27:14.301458Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1827: Trying to execute [0:1003] at 9437184 on unit BuildAndWaitDependencies 2025-05-29T15:27:14.301469Z node 3 :TX_DATASHARD TRACE: datashard.cpp:2365: GetMvccTxVersion at 9437184 CompleteEdge# v1000001/1 IncompleteEdge# v{min} UnprotectedReadEdge# v{min} ImmediateWriteEdge# v1000001/18446744073709551615 ImmediateWriteEdgeReplied# v1000001/18446744073709551615 2025-05-29T15:27:14.301483Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:483: Activated operation [0:1003] at 9437184 2025-05-29T15:27:14.301488Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1862: Execution status for [0:1003] at 9437184 is Executed 2025-05-29T15:27:14.301492Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1910: Advance execution plan for [0:1003] at 9437184 executing on unit BuildAndWaitDependencies 2025-05-29T15:27:14.301496Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1916: Add [0:1003] at 9437184 to execution unit ExecuteDataTx 2025-05-29T15:27:14.301499Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1827: Trying to execute [0:1003] at 9437184 on unit ExecuteDataTx 2025-05-29T15:27:14.301508Z node 3 :TX_DATASHARD TRACE: datashard.cpp:2365: GetMvccTxVersion at 9437184 CompleteEdge# v1000001/1 IncompleteEdge# v{min} UnprotectedReadEdge# v{min} ImmediateWriteEdge# v1000001/18446744073709551615 ImmediateWriteEdgeReplied# v1000001/18446744073709551615 2025-05-29T15:27:14.301597Z node 3 :TX_DATASHARD TRACE: execute_data_tx_unit.cpp:306: Executed operation [0:1003] at tablet 9437184 with status COMPLETE 2025-05-29T15:27:14.301608Z node 3 :TX_DATASHARD TRACE: execute_data_tx_unit.cpp:312: Datashard execution counters for [0:1003] at 9437184: {NSelectRow: 0, NSelectRange: 0, NUpdateRow: 1, NEraseRow: 0, SelectRowRows: 0, SelectRowBytes: 0, SelectRangeRows: 0, SelectRangeBytes: 0, UpdateRowBytes: 109, EraseRowBytes: 0, SelectRangeDeletedRowSkips: 0, InvisibleRowSkips: 0} 2025-05-29T15:27:14.301620Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1862: Execution status for [0:1003] at 9437184 is ExecutedNoMoreRestarts 2025-05-29T15:27:14.301624Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1910: Advance execution plan for [0:1003] at 9437184 executing on unit ExecuteDataTx 2025-05-29T15:27:14.301627Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1916: Add [0:1003] at 9437184 to execution unit FinishPropose 2025-05-29T15:27:14.301631Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1827: Trying to execute [0:1003] at 9437184 on unit FinishPropose 2025-05-29T15:27:14.301639Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1862: Execution status for [0:1003] at 9437184 is DelayComplete 2025-05-29T15:27:14.301643Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1910: Advance execution plan for [0:1003] at 9437184 executing on unit FinishPropose 2025-05-29T15:27:14.301647Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1916: Add [0:1003] at 9437184 to execution unit CompletedOperations 2025-05-29T15:27:14.301650Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1827: Trying to execute [0:1003] at 9437184 on unit CompletedOperations 2025-05-29T15:27:14.301657Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1862: Execution status for [0:1003] at 9437184 is Executed 2025-05-29T15:27:14.301663Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1910: Advance execution plan for [0:1003] at 9437184 executing on unit CompletedOperations 2025-05-29T15:27:14.301667Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1922: Execution plan for [0:1003] at 9437184 has finished 2025-05-29T15:27:14.304777Z node 3 :TX_DATASHARD DEBUG: datashard__compaction.cpp:203: CompactionComplete of tablet# 9437184, table# 1001, finished edge# 0, ts 1970-01-01T00:00:00.000000Z 2025-05-29T15:27:14.304794Z node 3 :TX_DATASHARD DEBUG: datashard__compaction.cpp:240: ReplyCompactionWaiters of tablet# 9437184, table# 1001, finished edge# 0, front# 0 2025-05-29T15:27:14.305018Z node 3 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 9437184 2025-05-29T15:27:14.305028Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1933: Complete execution for [0:1003] at 9437184 on unit FinishPropose 2025-05-29T15:27:14.305035Z node 3 :TX_DATASHARD TRACE: finish_propose_unit.cpp:167: Propose transaction complete txid 1003 at tablet 9437184 send to client, exec latency: 0 ms, propose latency: 3 ms, status: COMPLETE 2025-05-29T15:27:14.305052Z node 3 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-05-29T15:27:14.305798Z node 3 :TX_DATASHARD TRACE: datashard_impl.h:3129: StateWork, received event# 268828683, Sender [3:231:2225], Recipient [3:234:2226]: NKikimr::TEvTablet::TEvFollowerGcApplied .2025-05-29T15:27:14.306564Z node 3 :TX_DATASHARD TRACE: datashard_impl.h:3129: StateWork, received event# 269877761, Sender [3:4563:6477], Recipient [3:234:2226]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-05-29T15:27:14.306575Z node 3 :TX_DATASHARD TRACE: datashard_impl.h:3165: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-05-29T15:27:14.306583Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:3710: Server connected at leader tablet# 9437184, clientId# [3:4562:6476], serverId# [3:4563:6477], sessionId# [0:0:0] 2025-05-29T15:27:14.306680Z node 3 :TX_DATASHARD TRACE: datashard_impl.h:3129: StateWork, received event# 269553160, Sender [3:4561:6475], Recipient [3:234:2226]: NKikimrTxDataShard.TEvGetTableStats TableId: 13 { InMemSize: 0 LastAccessTime: 1719 LastUpdateTime: 1719 } >> KqpScanSpilling::SelfJoinQueryService |68.8%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/library/ycloud/impl/ut/ydb-library-ycloud-impl-ut |68.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/library/ycloud/impl/ut/ydb-library-ycloud-impl-ut >> AutoConfig::GetASPoolsWith3CPUs [GOOD] >> SubDomainWithReboots::CreateWithStoragePools [GOOD] |68.8%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/driver_lib/run/ut/unittest >> AutoConfig::GetASPoolsWith3CPUs [GOOD] |68.8%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/driver_lib/run/ut/unittest |68.8%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/driver_lib/run/ut/unittest |68.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/scheme_board/ut_populator/unittest ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/datashard/ut_read_table/unittest >> DataShardReadTableSnapshots::CorruptedDyNumber [GOOD] Test command err: 2025-05-29T15:27:10.252608Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:102:2148], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-05-29T15:27:10.252643Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-05-29T15:27:10.252658Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/00122e/r3tmp/tmpTk7NHr/pdisk_1.dat 2025-05-29T15:27:10.363361Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-05-29T15:27:10.377055Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:27:10.381780Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [1:32:2079] 1748532429841835 != 1748532429841839 2025-05-29T15:27:10.423678Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:213: actor# [1:59:2106] HANDLE TEvClientConnected success connect from tablet# 72057594046447617 2025-05-29T15:27:10.423998Z node 1 :TX_PROXY DEBUG: client.cpp:89: Handle TEvAllocateResult ACCEPTED RangeBegin# 281474976715656 RangeEnd# 281474976720656 txAllocator# 72057594046447617 2025-05-29T15:27:10.424112Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:27:10.424134Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:27:10.434946Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-29T15:27:10.508367Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:315: actor# [1:59:2106] Handle TEvProposeTransaction 2025-05-29T15:27:10.508396Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:238: actor# [1:59:2106] TxId# 281474976715657 ProcessProposeTransaction 2025-05-29T15:27:10.508435Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:257: actor# [1:59:2106] Cookie# 0 userReqId# "" txid# 281474976715657 SEND to# [1:640:2548] 2025-05-29T15:27:10.528309Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1562: Actor# [1:640:2548] txid# 281474976715657 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/Root" OperationType: ESchemeOpCreateTable CreateTable { Name: "table-1" Columns { Name: "key" Type: "Uint32" FamilyName: "" NotNull: false } Columns { Name: "value" Type: "Uint32" FamilyName: "" NotNull: false } KeyColumnNames: "key" UniformPartitionsCount: 1 } } } ExecTimeoutPeriod: 18446744073709551615 2025-05-29T15:27:10.528350Z node 1 :TX_PROXY DEBUG: schemereq.cpp:569: Actor# [1:640:2548] txid# 281474976715657 Bootstrap, UserSID: CheckAdministrator: 0 CheckDatabaseAdministrator: 0 2025-05-29T15:27:10.528526Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1627: Actor# [1:640:2548] txid# 281474976715657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P6 2025-05-29T15:27:10.528536Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1617: Actor# [1:640:2548] txid# 281474976715657 TEvNavigateKeySet requested from SchemeCache 2025-05-29T15:27:10.528581Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1450: Actor# [1:640:2548] txid# 281474976715657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-05-29T15:27:10.528615Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1497: Actor# [1:640:2548] HANDLE EvNavigateKeySetResult, txid# 281474976715657 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 500 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2025-05-29T15:27:10.528632Z node 1 :TX_PROXY DEBUG: schemereq.cpp:103: Actor# [1:640:2548] txid# 281474976715657 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976715657 TabletId# 72057594046644480} 2025-05-29T15:27:10.528696Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1352: Actor# [1:640:2548] txid# 281474976715657 HANDLE EvClientConnected 2025-05-29T15:27:10.529072Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-05-29T15:27:10.529291Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1374: Actor# [1:640:2548] txid# 281474976715657 Status StatusAccepted HANDLE {TEvModifySchemeTransactionResult Status# StatusAccepted txid# 281474976715657} 2025-05-29T15:27:10.529301Z node 1 :TX_PROXY DEBUG: schemereq.cpp:549: Actor# [1:640:2548] txid# 281474976715657 SEND to# [1:592:2518] Source {TEvProposeTransactionStatus txid# 281474976715657 Status# 53} 2025-05-29T15:27:10.544040Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3097: StateInit, received event# 268828672, Sender [1:656:2563], Recipient [1:664:2569]: NKikimr::TEvTablet::TEvBoot 2025-05-29T15:27:10.544339Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3097: StateInit, received event# 268828673, Sender [1:656:2563], Recipient [1:664:2569]: NKikimr::TEvTablet::TEvRestored 2025-05-29T15:27:10.544435Z node 1 :TX_DATASHARD INFO: datashard.cpp:373: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:664:2569] 2025-05-29T15:27:10.544526Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:630: TxInitSchema.Execute 2025-05-29T15:27:10.556138Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3110: StateInactive, received event# 268828684, Sender [1:656:2563], Recipient [1:664:2569]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-05-29T15:27:10.556334Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:696: TxInitSchema.Complete 2025-05-29T15:27:10.556367Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:48: TDataShard::TTxInit::Execute 2025-05-29T15:27:10.556549Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1315: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-05-29T15:27:10.556559Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1371: LoadLockChangeRecords at tablet: 72075186224037888 2025-05-29T15:27:10.556567Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1420: LoadChangeRecordCommits at tablet: 72075186224037888 2025-05-29T15:27:10.556623Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:92: TDataShard::TTxInit::Complete 2025-05-29T15:27:10.556641Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:100: TDataShard::TTxInitRestored::Execute 2025-05-29T15:27:10.556653Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:111: DataShard 72075186224037888 persisting started state actor id [1:681:2569] in generation 1 2025-05-29T15:27:10.567028Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:120: TDataShard::TTxInitRestored::Complete 2025-05-29T15:27:10.572162Z node 1 :TX_DATASHARD INFO: datashard.cpp:417: Switched to work state WaitScheme tabletId 72075186224037888 2025-05-29T15:27:10.572261Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:457: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-05-29T15:27:10.572292Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1250: Change sender created: at tablet: 72075186224037888, actorId: [1:683:2579] 2025-05-29T15:27:10.572298Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1255: Trying to activate change sender: at tablet: 72075186224037888 2025-05-29T15:27:10.572304Z node 1 :TX_DATASHARD INFO: datashard.cpp:1272: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-05-29T15:27:10.572310Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-05-29T15:27:10.572391Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3129: StateWork, received event# 2146435072, Sender [1:664:2569], Recipient [1:664:2569]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-05-29T15:27:10.572400Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3154: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-05-29T15:27:10.572510Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 72075186224037888 2025-05-29T15:27:10.572536Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-05-29T15:27:10.572554Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2025-05-29T15:27:10.572562Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-05-29T15:27:10.572571Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:385: Check unit PlanQueue at 72075186224037888 2025-05-29T15:27:10.572576Z node 1 :TX_DATASHARD TRACE: plan_queue_unit.cpp:59: TPlanQueueUnit at 72075186224037888 has no attached operations 2025-05-29T15:27:10.572581Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:403: Unit PlanQueue has no ready operations at 72075186224037888 2025-05-29T15:27:10.572587Z node 1 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 72075186224037888 TxInFly 0 2025-05-29T15:27:10.572594Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2025-05-29T15:27:10.572725Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3129: StateWork, received event# 269877761, Sender [1:674:2574], Recipient [1:664:2569]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-05-29T15:27:10.572733Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3165: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-05-29T15:27:10.572740Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3710: Server connected at leader tablet# 72075186224037888, clientId# [1:662:2567], serverId# [1:674:2574], sessionId# [0:0:0] 2025-05-29T15:27:10.572754Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3129: StateWork, received event# 269549568, Sender [1:411:2405], Recipient [1:674:2574] 2025-05-29T15:27:10.572758Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3135: StateWork, processing event TEvDataShard::TEvProposeTransaction 2025-05-29T15:27:10.572782Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037888 2025-05-29T15:27:10.572832Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1827: Trying to execute [0:281474976715657] at 72075186224037888 on unit CheckSchemeTx 2025-05-29T15:27:10.572844Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:132: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-05-29T15:27:10.572861Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:220: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2025-05-29T15:27:10.572870Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1862: Execution status for [0:281474976715657] at 72075186224037888 is ExecutedNoMoreRestarts 2025-05-29T15:27:10.572875Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1910: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit CheckSchemeTx 2025-05-29T15:27:10.572881Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1916: Add [0:281474976715657] at 72075186224037888 to execution unit StoreSchemeTx 2025-05-29T15:27:10.572886Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1827: Trying to execute [0:281474976715657] at 7 ... ing event TEvTxProcessing::TEvStreamClearancePending 2025-05-29T15:27:12.443796Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3129: StateWork, received event# 269287940, Sender [2:739:2620], Recipient [2:663:2568]: NKikimrTx.TEvStreamClearanceResponse TxId: 281474976715659 Cleared: true 2025-05-29T15:27:12.443801Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3150: StateWork, processing event TEvTxProcessing::TEvStreamClearanceResponse 2025-05-29T15:27:12.443841Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3129: StateWork, received event# 2146435072, Sender [2:663:2568], Recipient [2:663:2568]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-05-29T15:27:12.443845Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3154: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-05-29T15:27:12.443852Z node 2 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2025-05-29T15:27:12.443861Z node 2 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 1 active planned 0 immediate 1 planned 0 2025-05-29T15:27:12.443868Z node 2 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:307: Found ready candidate operation [0:281474976715659] at 72075186224037888 for WaitForStreamClearance 2025-05-29T15:27:12.443872Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1827: Trying to execute [0:281474976715659] at 72075186224037888 on unit WaitForStreamClearance 2025-05-29T15:27:12.443879Z node 2 :TX_DATASHARD TRACE: wait_for_stream_clearance_unit.cpp:156: Got stream clearance for [0:281474976715659] at 72075186224037888 2025-05-29T15:27:12.443883Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1862: Execution status for [0:281474976715659] at 72075186224037888 is Executed 2025-05-29T15:27:12.443886Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1910: Advance execution plan for [0:281474976715659] at 72075186224037888 executing on unit WaitForStreamClearance 2025-05-29T15:27:12.443891Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1916: Add [0:281474976715659] at 72075186224037888 to execution unit ReadTableScan 2025-05-29T15:27:12.443895Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1827: Trying to execute [0:281474976715659] at 72075186224037888 on unit ReadTableScan 2025-05-29T15:27:12.443934Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1862: Execution status for [0:281474976715659] at 72075186224037888 is Continue 2025-05-29T15:27:12.443938Z node 2 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 0 immediate 1 planned 0 2025-05-29T15:27:12.443942Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:327: Check candidate unit PlanQueue at 72075186224037888 2025-05-29T15:27:12.443946Z node 2 :TX_DATASHARD TRACE: plan_queue_unit.cpp:59: TPlanQueueUnit at 72075186224037888 has no attached operations 2025-05-29T15:27:12.443949Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:341: Unit PlanQueue has no ready operations at 72075186224037888 2025-05-29T15:27:12.443959Z node 2 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2025-05-29T15:27:12.444040Z node 2 :TX_PROXY TRACE: read_table_impl.cpp:1232: StateReadTable, received event# 269287428, Sender [2:769:2637], Recipient [2:739:2620]: NKikimrTx.TEvStreamQuotaRequest TxId: 281474976715659 ShardId: 72075186224037888 2025-05-29T15:27:12.444045Z node 2 :TX_PROXY DEBUG: read_table_impl.cpp:2069: [ReadTable [2:739:2620] TxId# 281474976715658] Received TEvStreamQuotaRequest from ShardId# 72075186224037888 2025-05-29T15:27:12.444061Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3129: StateWork, received event# 2146435082, Sender [2:769:2637], Recipient [2:663:2568]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvRegisterScanActor 2025-05-29T15:27:12.444066Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3159: StateWork, processing event TEvPrivate::TEvRegisterScanActor 2025-05-29T15:27:12.444127Z node 2 :TX_PROXY TRACE: read_table_impl.cpp:1232: StateReadTable, received event# 269287941, Sender [2:738:2620], Recipient [2:739:2620]: NKikimrTx.TEvStreamQuotaResponse TxId: 281474976715658 MessageSizeLimit: 1 ReservedMessages: 1 2025-05-29T15:27:12.444135Z node 2 :TX_PROXY DEBUG: read_table_impl.cpp:2097: [ReadTable [2:739:2620] TxId# 281474976715658] Updated quotas, allocated = 1, message size = 1, message rows = 0, available = 1 2025-05-29T15:27:12.444139Z node 2 :TX_PROXY DEBUG: read_table_impl.cpp:2160: [ReadTable [2:739:2620] TxId# 281474976715658] Reserving quota 1 messages for ShardId# 72075186224037888 2025-05-29T15:27:12.444147Z node 2 :TX_DATASHARD DEBUG: read_table_scan.cpp:514: Got quota for read table scan ShardId: 72075186224037888, TxId: 281474976715659, MessageQuota: 1 2025-05-29T15:27:12.444186Z node 2 :TX_DATASHARD ERROR: read_table_scan.cpp:681: Got scan fatal error: Invalid DyNumber binary representation 2025-05-29T15:27:12.444193Z node 2 :TX_DATASHARD DEBUG: read_table_scan.cpp:718: Finish scan ShardId: 72075186224037888, TxId: 281474976715659, MessageQuota: 1 2025-05-29T15:27:12.444220Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:4471: FullScan complete at 72075186224037888 2025-05-29T15:27:12.444225Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:4477: Found op: cookie: 281474976715659, at: 72075186224037888 2025-05-29T15:27:12.444240Z node 2 :TX_PROXY TRACE: read_table_impl.cpp:1232: StateReadTable, received event# 269287429, Sender [2:769:2637], Recipient [2:739:2620]: NKikimrTx.TEvStreamQuotaRelease TxId: 281474976715659 ShardId: 72075186224037888 2025-05-29T15:27:12.444244Z node 2 :TX_PROXY DEBUG: read_table_impl.cpp:2120: [ReadTable [2:739:2620] TxId# 281474976715658] Received TEvStreamQuotaRelease from ShardId# 72075186224037888 2025-05-29T15:27:12.444249Z node 2 :TX_PROXY DEBUG: read_table_impl.cpp:2188: [ReadTable [2:739:2620] TxId# 281474976715658] Released quota 1 reserved messages from ShardId# 72075186224037888 2025-05-29T15:27:12.444275Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3129: StateWork, received event# 2146435072, Sender [2:663:2568], Recipient [2:663:2568]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-05-29T15:27:12.444279Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3154: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-05-29T15:27:12.444286Z node 2 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2025-05-29T15:27:12.444290Z node 2 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 1 active planned 0 immediate 1 planned 0 2025-05-29T15:27:12.444295Z node 2 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:307: Found ready candidate operation [0:281474976715659] at 72075186224037888 for ReadTableScan 2025-05-29T15:27:12.444299Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1827: Trying to execute [0:281474976715659] at 72075186224037888 on unit ReadTableScan 2025-05-29T15:27:12.444307Z node 2 :TX_DATASHARD TRACE: read_table_scan_unit.cpp:158: ReadTable scan complete for [0:281474976715659] at 72075186224037888 error: Invalid DyNumber binary representation, IsFatalError: 1 2025-05-29T15:27:12.444316Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1862: Execution status for [0:281474976715659] at 72075186224037888 is Executed 2025-05-29T15:27:12.444320Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1910: Advance execution plan for [0:281474976715659] at 72075186224037888 executing on unit ReadTableScan 2025-05-29T15:27:12.444324Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1916: Add [0:281474976715659] at 72075186224037888 to execution unit FinishPropose 2025-05-29T15:27:12.444328Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1827: Trying to execute [0:281474976715659] at 72075186224037888 on unit FinishPropose 2025-05-29T15:27:12.444336Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1862: Execution status for [0:281474976715659] at 72075186224037888 is DelayComplete 2025-05-29T15:27:12.444340Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1910: Advance execution plan for [0:281474976715659] at 72075186224037888 executing on unit FinishPropose 2025-05-29T15:27:12.444343Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1916: Add [0:281474976715659] at 72075186224037888 to execution unit CompletedOperations 2025-05-29T15:27:12.444346Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1827: Trying to execute [0:281474976715659] at 72075186224037888 on unit CompletedOperations 2025-05-29T15:27:12.444359Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1862: Execution status for [0:281474976715659] at 72075186224037888 is Executed 2025-05-29T15:27:12.444363Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1910: Advance execution plan for [0:281474976715659] at 72075186224037888 executing on unit CompletedOperations 2025-05-29T15:27:12.444366Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1922: Execution plan for [0:281474976715659] at 72075186224037888 has finished 2025-05-29T15:27:12.444370Z node 2 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-05-29T15:27:12.444374Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:327: Check candidate unit PlanQueue at 72075186224037888 2025-05-29T15:27:12.444377Z node 2 :TX_DATASHARD TRACE: plan_queue_unit.cpp:59: TPlanQueueUnit at 72075186224037888 has no attached operations 2025-05-29T15:27:12.444380Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:341: Unit PlanQueue has no ready operations at 72075186224037888 2025-05-29T15:27:12.444388Z node 2 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2025-05-29T15:27:12.444392Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1933: Complete execution for [0:281474976715659] at 72075186224037888 on unit FinishPropose 2025-05-29T15:27:12.444399Z node 2 :TX_DATASHARD TRACE: finish_propose_unit.cpp:167: Propose transaction complete txid 281474976715659 at tablet 72075186224037888 send to client, exec latency: 0 ms, propose latency: 0 ms, status: EXEC_ERROR 2025-05-29T15:27:12.444408Z node 2 :TX_DATASHARD ERROR: finish_propose_unit.cpp:174: Errors while proposing transaction txid 281474976715659 at tablet 72075186224037888 status: EXEC_ERROR errors: PROGRAM_ERROR (Invalid DyNumber binary representation) | 2025-05-29T15:27:12.444422Z node 2 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-05-29T15:27:12.444483Z node 2 :TX_PROXY TRACE: read_table_impl.cpp:1232: StateReadTable, received event# 269550080, Sender [2:663:2568], Recipient [2:739:2620]: NKikimrTxDataShard.TEvProposeTransactionResult TxKind: TX_KIND_SCAN Origin: 72075186224037888 Status: EXEC_ERROR Error { Kind: PROGRAM_ERROR Reason: "Invalid DyNumber binary representation" } TxId: 281474976715659 Step: 0 OrderId: 281474976715659 ExecLatency: 0 ProposeLatency: 0 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186224037888 CpuTimeUsec: 87 } } CommitVersion { Step: 0 TxId: 281474976715659 } 2025-05-29T15:27:12.444491Z node 2 :TX_PROXY DEBUG: read_table_impl.cpp:1921: [ReadTable [2:739:2620] TxId# 281474976715658] Received TEvProposeTransactionResult Status# EXEC_ERROR ShardId# 72075186224037888 2025-05-29T15:27:12.444505Z node 2 :TX_PROXY ERROR: read_table_impl.cpp:2919: [ReadTable [2:739:2620] TxId# 281474976715658] RESPONSE Status# ExecError shard: 72075186224037888 table: /Root/Table 2025-05-29T15:27:12.444579Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3129: StateWork, received event# 269553190, Sender [2:739:2620], Recipient [2:663:2568]: NKikimrTxDataShard.TEvDiscardVolatileSnapshotRequest OwnerId: 72057594046644480 PathId: 2 Step: 1500 TxId: 281474976715658 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/datashard/ut_read_table/unittest >> DataShardReadTableSnapshots::ReadTableSplitFinished [FAIL] Test command err: 2025-05-29T15:27:10.724171Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:102:2148], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-05-29T15:27:10.724205Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-05-29T15:27:10.724220Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/001223/r3tmp/tmpHsBTV1/pdisk_1.dat 2025-05-29T15:27:10.836532Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-05-29T15:27:10.850557Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:27:10.855501Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [1:32:2079] 1748532430310604 != 1748532430310608 2025-05-29T15:27:10.897684Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:213: actor# [1:59:2106] HANDLE TEvClientConnected success connect from tablet# 72057594046447617 2025-05-29T15:27:10.898005Z node 1 :TX_PROXY DEBUG: client.cpp:89: Handle TEvAllocateResult ACCEPTED RangeBegin# 281474976715656 RangeEnd# 281474976720656 txAllocator# 72057594046447617 2025-05-29T15:27:10.898093Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:27:10.898114Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:27:10.908774Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-29T15:27:10.992245Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:315: actor# [1:59:2106] Handle TEvProposeTransaction 2025-05-29T15:27:10.992271Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:238: actor# [1:59:2106] TxId# 281474976715657 ProcessProposeTransaction 2025-05-29T15:27:10.992305Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:257: actor# [1:59:2106] Cookie# 0 userReqId# "" txid# 281474976715657 SEND to# [1:640:2548] 2025-05-29T15:27:11.036424Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1562: Actor# [1:640:2548] txid# 281474976715657 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/Root" OperationType: ESchemeOpCreateTable CreateTable { Name: "table-1" Columns { Name: "key" Type: "Uint32" FamilyName: "" NotNull: false } Columns { Name: "value" Type: "Uint32" FamilyName: "" NotNull: false } KeyColumnNames: "key" UniformPartitionsCount: 1 } } } ExecTimeoutPeriod: 18446744073709551615 2025-05-29T15:27:11.036465Z node 1 :TX_PROXY DEBUG: schemereq.cpp:569: Actor# [1:640:2548] txid# 281474976715657 Bootstrap, UserSID: CheckAdministrator: 0 CheckDatabaseAdministrator: 0 2025-05-29T15:27:11.036675Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1627: Actor# [1:640:2548] txid# 281474976715657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P6 2025-05-29T15:27:11.036689Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1617: Actor# [1:640:2548] txid# 281474976715657 TEvNavigateKeySet requested from SchemeCache 2025-05-29T15:27:11.036746Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1450: Actor# [1:640:2548] txid# 281474976715657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-05-29T15:27:11.036786Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1497: Actor# [1:640:2548] HANDLE EvNavigateKeySetResult, txid# 281474976715657 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 500 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2025-05-29T15:27:11.036802Z node 1 :TX_PROXY DEBUG: schemereq.cpp:103: Actor# [1:640:2548] txid# 281474976715657 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976715657 TabletId# 72057594046644480} 2025-05-29T15:27:11.036878Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1352: Actor# [1:640:2548] txid# 281474976715657 HANDLE EvClientConnected 2025-05-29T15:27:11.037305Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-05-29T15:27:11.037582Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1374: Actor# [1:640:2548] txid# 281474976715657 Status StatusAccepted HANDLE {TEvModifySchemeTransactionResult Status# StatusAccepted txid# 281474976715657} 2025-05-29T15:27:11.037594Z node 1 :TX_PROXY DEBUG: schemereq.cpp:549: Actor# [1:640:2548] txid# 281474976715657 SEND to# [1:592:2518] Source {TEvProposeTransactionStatus txid# 281474976715657 Status# 53} 2025-05-29T15:27:11.052146Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3097: StateInit, received event# 268828672, Sender [1:656:2563], Recipient [1:664:2569]: NKikimr::TEvTablet::TEvBoot 2025-05-29T15:27:11.052426Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3097: StateInit, received event# 268828673, Sender [1:656:2563], Recipient [1:664:2569]: NKikimr::TEvTablet::TEvRestored 2025-05-29T15:27:11.052518Z node 1 :TX_DATASHARD INFO: datashard.cpp:373: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:664:2569] 2025-05-29T15:27:11.052596Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:630: TxInitSchema.Execute 2025-05-29T15:27:11.063476Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3110: StateInactive, received event# 268828684, Sender [1:656:2563], Recipient [1:664:2569]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-05-29T15:27:11.063682Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:696: TxInitSchema.Complete 2025-05-29T15:27:11.063720Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:48: TDataShard::TTxInit::Execute 2025-05-29T15:27:11.063932Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1315: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-05-29T15:27:11.063942Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1371: LoadLockChangeRecords at tablet: 72075186224037888 2025-05-29T15:27:11.063950Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1420: LoadChangeRecordCommits at tablet: 72075186224037888 2025-05-29T15:27:11.064015Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:92: TDataShard::TTxInit::Complete 2025-05-29T15:27:11.064043Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:100: TDataShard::TTxInitRestored::Execute 2025-05-29T15:27:11.064055Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:111: DataShard 72075186224037888 persisting started state actor id [1:681:2569] in generation 1 2025-05-29T15:27:11.074988Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:120: TDataShard::TTxInitRestored::Complete 2025-05-29T15:27:11.083528Z node 1 :TX_DATASHARD INFO: datashard.cpp:417: Switched to work state WaitScheme tabletId 72075186224037888 2025-05-29T15:27:11.083625Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:457: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-05-29T15:27:11.083660Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1250: Change sender created: at tablet: 72075186224037888, actorId: [1:683:2579] 2025-05-29T15:27:11.083667Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1255: Trying to activate change sender: at tablet: 72075186224037888 2025-05-29T15:27:11.083672Z node 1 :TX_DATASHARD INFO: datashard.cpp:1272: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-05-29T15:27:11.083678Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-05-29T15:27:11.083760Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3129: StateWork, received event# 2146435072, Sender [1:664:2569], Recipient [1:664:2569]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-05-29T15:27:11.083769Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3154: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-05-29T15:27:11.083886Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 72075186224037888 2025-05-29T15:27:11.083919Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-05-29T15:27:11.083936Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2025-05-29T15:27:11.083943Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-05-29T15:27:11.083951Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:385: Check unit PlanQueue at 72075186224037888 2025-05-29T15:27:11.083957Z node 1 :TX_DATASHARD TRACE: plan_queue_unit.cpp:59: TPlanQueueUnit at 72075186224037888 has no attached operations 2025-05-29T15:27:11.083964Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:403: Unit PlanQueue has no ready operations at 72075186224037888 2025-05-29T15:27:11.083971Z node 1 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 72075186224037888 TxInFly 0 2025-05-29T15:27:11.083977Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2025-05-29T15:27:11.084122Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3129: StateWork, received event# 269877761, Sender [1:674:2574], Recipient [1:664:2569]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-05-29T15:27:11.084131Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3165: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-05-29T15:27:11.084138Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3710: Server connected at leader tablet# 72075186224037888, clientId# [1:662:2567], serverId# [1:674:2574], sessionId# [0:0:0] 2025-05-29T15:27:11.084149Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3129: StateWork, received event# 269549568, Sender [1:411:2405], Recipient [1:674:2574] 2025-05-29T15:27:11.084154Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3135: StateWork, processing event TEvDataShard::TEvProposeTransaction 2025-05-29T15:27:11.084176Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037888 2025-05-29T15:27:11.084226Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1827: Trying to execute [0:281474976715657] at 72075186224037888 on unit CheckSchemeTx 2025-05-29T15:27:11.084239Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:132: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-05-29T15:27:11.084258Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:220: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2025-05-29T15:27:11.084267Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1862: Execution status for [0:281474976715657] at 72075186224037888 is ExecutedNoMoreRestarts 2025-05-29T15:27:11.084272Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1910: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit CheckSchemeTx 2025-05-29T15:27:11.084277Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1916: Add [0:281474976715657] at 72075186224037888 to execution unit StoreSchemeTx 2025-05-29T15:27:11.084282Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1827: Trying to execute [0:281474976715657] at 7 ... 2.630948Z node 2 :TX_PROXY DEBUG: proxy_impl.cpp:238: actor# [2:59:2106] TxId# 281474976715658 ProcessProposeTransaction 2025-05-29T15:27:12.630960Z node 2 :TX_PROXY DEBUG: proxy_impl.cpp:257: actor# [2:59:2106] Cookie# 0 userReqId# "" txid# 281474976715658 SEND to# [2:748:2624] 2025-05-29T15:27:12.631795Z node 2 :TX_PROXY DEBUG: schemereq.cpp:1562: Actor# [2:748:2624] txid# 281474976715658 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/Root/.metadata/workload_manager/pools" OperationType: ESchemeOpCreateResourcePool ModifyACL { Name: "default" DiffACL: "\n!\010\000\022\035\010\001\020\201\004\032\024all-users@well-known \003\n\031\010\000\022\025\010\001\020\201\004\032\014root@builtin \003" NewOwner: "metadata@system" } Internal: true CreateResourcePool { Name: "default" Properties { Properties { key: "concurrent_query_limit" value: "-1" } Properties { key: "database_load_cpu_threshold" value: "-1" } Properties { key: "query_cancel_after_seconds" value: "0" } Properties { key: "query_cpu_limit_percent_per_node" value: "-1" } Properties { key: "query_memory_limit_percent_per_node" value: "-1" } Properties { key: "queue_size" value: "-1" } Properties { key: "resource_weight" value: "-1" } Properties { key: "total_cpu_limit_percent_per_node" value: "-1" } } } } } UserToken: "\n\017metadata@system\022\000" DatabaseName: "/Root" 2025-05-29T15:27:12.631822Z node 2 :TX_PROXY DEBUG: schemereq.cpp:569: Actor# [2:748:2624] txid# 281474976715658 Bootstrap, UserSID: metadata@system CheckAdministrator: 0 CheckDatabaseAdministrator: 0 2025-05-29T15:27:12.631830Z node 2 :TX_PROXY DEBUG: schemereq.cpp:578: Actor# [2:748:2624] txid# 281474976715658 Bootstrap, UserSID: metadata@system IsClusterAdministrator: 1 2025-05-29T15:27:12.632454Z node 2 :TX_PROXY DEBUG: schemereq.cpp:1627: Actor# [2:748:2624] txid# 281474976715658 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P6 2025-05-29T15:27:12.632477Z node 2 :TX_PROXY DEBUG: schemereq.cpp:1617: Actor# [2:748:2624] txid# 281474976715658 TEvNavigateKeySet requested from SchemeCache 2025-05-29T15:27:12.632550Z node 2 :TX_PROXY DEBUG: schemereq.cpp:1450: Actor# [2:748:2624] txid# 281474976715658 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-05-29T15:27:12.632592Z node 2 :TX_PROXY DEBUG: schemereq.cpp:1497: Actor# [2:748:2624] HANDLE EvNavigateKeySetResult, txid# 281474976715658 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 500 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2025-05-29T15:27:12.632611Z node 2 :TX_PROXY DEBUG: schemereq.cpp:103: Actor# [2:748:2624] txid# 281474976715658 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976715658 TabletId# 72057594046644480} 2025-05-29T15:27:12.632682Z node 2 :TX_PROXY DEBUG: schemereq.cpp:1352: Actor# [2:748:2624] txid# 281474976715658 HANDLE EvClientConnected 2025-05-29T15:27:12.632972Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2025-05-29T15:27:12.633327Z node 2 :TX_PROXY DEBUG: schemereq.cpp:1374: Actor# [2:748:2624] txid# 281474976715658 Status StatusAccepted HANDLE {TEvModifySchemeTransactionResult Status# StatusAccepted txid# 281474976715658} 2025-05-29T15:27:12.633342Z node 2 :TX_PROXY DEBUG: schemereq.cpp:549: Actor# [2:748:2624] txid# 281474976715658 SEND to# [2:747:2623] Source {TEvProposeTransactionStatus txid# 281474976715658 Status# 53} 2025-05-29T15:27:12.634149Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3129: StateWork, received event# 269746185, Sender [2:683:2579], Recipient [2:663:2568]: NKikimr::TEvTxProxySchemeCache::TEvWatchNotifyUpdated 2025-05-29T15:27:12.634163Z node 2 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:129: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-05-29T15:27:12.655350Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3129: StateWork, received event# 269877763, Sender [2:706:2594], Recipient [2:663:2568]: NKikimr::TEvTabletPipe::TEvClientDestroyed { TabletId: 72057594046644480 ClientId: [2:706:2594] ServerId: [2:712:2600] } 2025-05-29T15:27:12.655392Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3163: StateWork, processing event TEvTabletPipe::TEvClientDestroyed 2025-05-29T15:27:12.784910Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3129: StateWork, received event# 269746185, Sender [2:683:2579], Recipient [2:663:2568]: NKikimr::TEvTxProxySchemeCache::TEvWatchNotifyUpdated 2025-05-29T15:27:12.784942Z node 2 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:129: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-05-29T15:27:12.785442Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [2:747:2623], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-05-29T15:27:12.818965Z node 2 :TX_PROXY DEBUG: proxy_impl.cpp:315: actor# [2:59:2106] Handle TEvProposeTransaction 2025-05-29T15:27:12.818999Z node 2 :TX_PROXY DEBUG: proxy_impl.cpp:238: actor# [2:59:2106] TxId# 281474976715659 ProcessProposeTransaction 2025-05-29T15:27:12.819024Z node 2 :TX_PROXY DEBUG: proxy_impl.cpp:257: actor# [2:59:2106] Cookie# 0 userReqId# "" txid# 281474976715659 SEND to# [2:817:2662] 2025-05-29T15:27:12.819863Z node 2 :TX_PROXY DEBUG: schemereq.cpp:1562: Actor# [2:817:2662] txid# 281474976715659 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/Root/.metadata/workload_manager/pools" OperationType: ESchemeOpCreateResourcePool ModifyACL { Name: "default" DiffACL: "\n!\010\000\022\035\010\001\020\201\004\032\024all-users@well-known \003\n\031\010\000\022\025\010\001\020\201\004\032\014root@builtin \003" NewOwner: "metadata@system" } Internal: true CreateResourcePool { Name: "default" Properties { Properties { key: "concurrent_query_limit" value: "-1" } Properties { key: "database_load_cpu_threshold" value: "-1" } Properties { key: "query_cancel_after_seconds" value: "0" } Properties { key: "query_cpu_limit_percent_per_node" value: "-1" } Properties { key: "query_memory_limit_percent_per_node" value: "-1" } Properties { key: "queue_size" value: "-1" } Properties { key: "resource_weight" value: "-1" } Properties { key: "total_cpu_limit_percent_per_node" value: "-1" } } } } } UserToken: "\n\017metadata@system\022\000" DatabaseName: "/Root" 2025-05-29T15:27:12.819896Z node 2 :TX_PROXY DEBUG: schemereq.cpp:569: Actor# [2:817:2662] txid# 281474976715659 Bootstrap, UserSID: metadata@system CheckAdministrator: 0 CheckDatabaseAdministrator: 0 2025-05-29T15:27:12.819902Z node 2 :TX_PROXY DEBUG: schemereq.cpp:578: Actor# [2:817:2662] txid# 281474976715659 Bootstrap, UserSID: metadata@system IsClusterAdministrator: 1 2025-05-29T15:27:12.820217Z node 2 :TX_PROXY DEBUG: schemereq.cpp:1627: Actor# [2:817:2662] txid# 281474976715659 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P6 2025-05-29T15:27:12.820237Z node 2 :TX_PROXY DEBUG: schemereq.cpp:1617: Actor# [2:817:2662] txid# 281474976715659 TEvNavigateKeySet requested from SchemeCache 2025-05-29T15:27:12.820308Z node 2 :TX_PROXY DEBUG: schemereq.cpp:1450: Actor# [2:817:2662] txid# 281474976715659 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-05-29T15:27:12.820351Z node 2 :TX_PROXY DEBUG: schemereq.cpp:1497: Actor# [2:817:2662] HANDLE EvNavigateKeySetResult, txid# 281474976715659 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 500 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2025-05-29T15:27:12.820370Z node 2 :TX_PROXY DEBUG: schemereq.cpp:103: Actor# [2:817:2662] txid# 281474976715659 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976715659 TabletId# 72057594046644480} 2025-05-29T15:27:12.820685Z node 2 :TX_PROXY DEBUG: schemereq.cpp:1352: Actor# [2:817:2662] txid# 281474976715659 HANDLE EvClientConnected 2025-05-29T15:27:12.820850Z node 2 :TX_PROXY DEBUG: schemereq.cpp:1374: Actor# [2:817:2662] txid# 281474976715659 Status StatusAlreadyExists HANDLE {TEvModifySchemeTransactionResult Status# StatusAlreadyExists txid# 281474976715659 Reason# Check failed: path: '/Root/.metadata/workload_manager/pools/default', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92} 2025-05-29T15:27:12.820887Z node 2 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [2:817:2662] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-29T15:27:12.820898Z node 2 :TX_PROXY DEBUG: schemereq.cpp:549: Actor# [2:817:2662] txid# 281474976715659 SEND to# [2:747:2623] Source {TEvProposeTransactionStatus txid# 281474976715659 Status# 48} 2025-05-29T15:27:12.850676Z node 2 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [2:827:2671], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:27:12.851310Z node 2 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=2&id=ODhiYmE2NmMtODc3MWVkMGEtMTEzN2U0Y2QtYTdlMTk0ODg=, ActorId: [2:731:2613], ActorState: ExecuteState, TraceId: 01jweaf3qpc39vm1487a8m191w, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: assertion failed at ydb/core/tx/datashard/ut_common/datashard_ut_common.cpp:2091, void NKikimr::ExecSQL(Tests::TServer::TPtr, TActorId, const TString &, bool, Ydb::StatusIds::StatusCode): (response.GetYdbStatus() == code) failed: (INTERNAL_ERROR != SUCCESS) Response { QueryIssues { message: "Execution" issue_code: 1060 issues { message: "yql/essentials/ast/yql_expr.h:1874: index out of range" issue_code: 1 } } TxMeta { } } YdbStatus: INTERNAL_ERROR ConsumedRu: 1 , with diff: (INT|SUCC)E(RNAL_ERROR|SS) TBackTrace::Capture()+28 (0x13AC660C) NUnitTest::NPrivate::RaiseError(char const*, TBasicString> const&, bool)+137 (0x13C7A1A9) NKikimr::ExecSQL(TIntrusivePtr>, NActors::TActorId, TBasicString> const&, bool, Ydb::StatusIds_StatusCode)+1300 (0x2630CE94) NKikimr::NTestSuiteDataShardReadTableSnapshots::TTestCaseReadTableSplitFinished::Execute_(NUnitTest::TTestContext&)+2446 (0x139A240E) NKikimr::NTestSuiteDataShardReadTableSnapshots::TCurrentTest::Execute()::'lambda'()::operator()() const+71 (0x139BE577) NUnitTest::TTestBase::Run(std::__y1::function, TBasicString> const&, char const*, bool)+126 (0x13C7C05E) NKikimr::NTestSuiteDataShardReadTableSnapshots::TCurrentTest::Execute()+481 (0x139BDDB1) NUnitTest::TTestFactory::Execute()+803 (0x13C7C7D3) NUnitTest::RunMain(int, char**)+3021 (0x13C8E37D) ??+0 (0x7F5A7E648D90) __libc_start_main+128 (0x7F5A7E648E40) _start+41 (0x12A01029) ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_subdomain_reboots/unittest >> SubDomainWithReboots::CreateWithStoragePools [GOOD] Test command err: ==== RunWithTabletReboots =========== RUN: Trace =========== Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2140] Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:116:2058] recipient: [1:110:2141] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:116:2058] recipient: [1:110:2141] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:117:2058] recipient: [1:111:2142] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:117:2058] recipient: [1:111:2142] Leader for TabletID 72057594046678944 is [1:126:2151] sender: [1:127:2058] recipient: [1:109:2140] Leader for TabletID 72057594046447617 is [1:130:2154] sender: [1:132:2058] recipient: [1:110:2141] Leader for TabletID 72057594046316545 is [1:133:2155] sender: [1:135:2058] recipient: [1:111:2142] 2025-05-29T15:26:55.271246Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7488: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-05-29T15:26:55.271273Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7516: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:26:55.271278Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7402: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2025-05-29T15:26:55.271283Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7418: OperationsProcessing config: using default configuration 2025-05-29T15:26:55.271289Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-05-29T15:26:55.271294Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-05-29T15:26:55.271303Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7548: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:26:55.271317Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:39: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-05-29T15:26:55.271417Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7619: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-05-29T15:26:55.271486Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-05-29T15:26:55.300089Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7651: Got new config: QueryServiceConfig { AllExternalDataSourcesAreAvailable: true } 2025-05-29T15:26:55.300115Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:26:55.300193Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7619: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# Leader for TabletID 72057594046447617 is [1:130:2154] sender: [1:175:2058] recipient: [1:15:2062] 2025-05-29T15:26:55.302951Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-05-29T15:26:55.302984Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-05-29T15:26:55.303015Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-05-29T15:26:55.305607Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-05-29T15:26:55.305680Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:445: Clear TempDirsState with owners number: 0 2025-05-29T15:26:55.305821Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1348: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-05-29T15:26:55.305981Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:32: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-05-29T15:26:55.306640Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:157: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:26:55.306683Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:84: [RootDataErasureManager] Stop 2025-05-29T15:26:55.306954Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:26:55.306965Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:26:55.306997Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-05-29T15:26:55.307005Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-29T15:26:55.307010Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:202: TTxServerlessStorageBilling.Complete 2025-05-29T15:26:55.307027Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6671: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:214:2058] recipient: [1:212:2212] IGNORE Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:214:2058] recipient: [1:212:2212] Leader for TabletID 72057594037968897 is [1:218:2216] sender: [1:219:2058] recipient: [1:212:2212] 2025-05-29T15:26:55.308184Z node 1 :HIVE INFO: tablet_helpers.cpp:1130: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:126:2151] sender: [1:239:2058] recipient: [1:15:2062] 2025-05-29T15:26:55.333068Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:372: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-05-29T15:26:55.333140Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:26:55.333200Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-05-29T15:26:55.333253Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:130: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-05-29T15:26:55.333263Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:26:55.335455Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:451: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-05-29T15:26:55.335492Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-05-29T15:26:55.335564Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:26:55.335576Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:313: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-05-29T15:26:55.335583Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:367: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-05-29T15:26:55.335589Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 2 -> 3 2025-05-29T15:26:55.336129Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:26:55.336144Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-05-29T15:26:55.336151Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 3 -> 128 2025-05-29T15:26:55.336534Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:26:55.336546Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:26:55.336553Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:26:55.336561Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1650: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-05-29T15:26:55.337223Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1719: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-05-29T15:26:55.337612Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:652: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-05-29T15:26:55.337651Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1751: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 Leader for TabletID 72057594046316545 is [1:133:2155] sender: [1:254:2058] recipient: [1:15:2062] FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-05-29T15:26:55.337847Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:676: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-05-29T15:26:55.337873Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:680: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 133 RawX2: 4294969451 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-05-29T15:26:55.337880Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:26:55.337939Z node 1 :FLAT_TX_SCHEMESHARD INFO: sch ... :27:15.980222Z node 72 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 2025-05-29T15:27:15.980236Z node 72 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 4 2025-05-29T15:27:15.980250Z node 72 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:402: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 3], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1002 2025-05-29T15:27:15.980726Z node 72 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:27:15.980734Z node 72 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1002, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2025-05-29T15:27:15.980775Z node 72 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1002, path id: [OwnerId: 72057594046678944, LocalPathId: 3] 2025-05-29T15:27:15.980800Z node 72 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:27:15.980806Z node 72 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [72:206:2207], at schemeshard: 72057594046678944, txId: 1002, path id: 2 2025-05-29T15:27:15.980814Z node 72 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [72:206:2207], at schemeshard: 72057594046678944, txId: 1002, path id: 3 2025-05-29T15:27:15.980854Z node 72 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1002:0, at schemeshard: 72057594046678944 2025-05-29T15:27:15.980860Z node 72 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:482: [72057594046678944] TDone opId# 1002:0 ProgressState 2025-05-29T15:27:15.980871Z node 72 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:906: Part operation is done id#1002:0 progress is 1/1 2025-05-29T15:27:15.980874Z node 72 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 1002 ready parts: 1/1 2025-05-29T15:27:15.980877Z node 72 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:906: Part operation is done id#1002:0 progress is 1/1 2025-05-29T15:27:15.980879Z node 72 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 1002 ready parts: 1/1 2025-05-29T15:27:15.980883Z node 72 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1606: TOperation IsReadyToNotify, TxId: 1002, ready parts: 1/1, is published: false 2025-05-29T15:27:15.980887Z node 72 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 1002 ready parts: 1/1 2025-05-29T15:27:15.980892Z node 72 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:973: Operation and all the parts is done, operation id: 1002:0 2025-05-29T15:27:15.980895Z node 72 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5174: RemoveTx for txid 1002:0 2025-05-29T15:27:15.980923Z node 72 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 5 2025-05-29T15:27:15.980928Z node 72 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:982: Publication still in progress, tx: 1002, publications: 2, subscribers: 1 2025-05-29T15:27:15.980932Z node 72 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:989: Publication details: tx: 1002, [OwnerId: 72057594046678944, LocalPathId: 2], 5 2025-05-29T15:27:15.980936Z node 72 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:989: Publication details: tx: 1002, [OwnerId: 72057594046678944, LocalPathId: 3], 3 2025-05-29T15:27:15.981154Z node 72 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:5834: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 5 PathOwnerId: 72057594046678944, cookie: 1002 2025-05-29T15:27:15.981171Z node 72 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 5 PathOwnerId: 72057594046678944, cookie: 1002 2025-05-29T15:27:15.981176Z node 72 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 1002 2025-05-29T15:27:15.981182Z node 72 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 1002, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 5 2025-05-29T15:27:15.981189Z node 72 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2025-05-29T15:27:15.981536Z node 72 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:5834: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 3 PathOwnerId: 72057594046678944, cookie: 1002 2025-05-29T15:27:15.981573Z node 72 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 3 PathOwnerId: 72057594046678944, cookie: 1002 2025-05-29T15:27:15.981581Z node 72 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1002 2025-05-29T15:27:15.981588Z node 72 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 1002, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 3 2025-05-29T15:27:15.981594Z node 72 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 4 2025-05-29T15:27:15.981617Z node 72 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 1002, subscribers: 1 2025-05-29T15:27:15.981623Z node 72 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:212: TTxAckPublishToSchemeBoard Notify send TEvNotifyTxCompletionResult, at schemeshard: 72057594046678944, to actorId: [72:305:2295] 2025-05-29T15:27:15.982547Z node 72 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1002 2025-05-29T15:27:15.982636Z node 72 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1002 2025-05-29T15:27:15.982654Z node 72 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:227: tests -- TTxNotificationSubscriber for txId 1002: got EvNotifyTxCompletionResult 2025-05-29T15:27:15.982661Z node 72 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:236: tests -- TTxNotificationSubscriber for txId 1002: satisfy waiter [72:306:2296] TestWaitNotification: OK eventTxId 1002 2025-05-29T15:27:15.982826Z node 72 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/DirA/USER_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-05-29T15:27:15.982897Z node 72 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/DirA/USER_0" took 81us result status StatusSuccess 2025-05-29T15:27:15.983011Z node 72 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/DirA/USER_0" PathDescription { Self { Name: "USER_0" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 1002 CreateStep: 5000003 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 1 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72075186233409546 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409547 } DomainKey { SchemeShard: 72057594046678944 PathId: 3 } StoragePools { Name: "name_USER_0_kind_hdd-1" Kind: "hdd-1" } StoragePools { Name: "name_USER_0_kind_hdd-2" Kind: "hdd-2" } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 2 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 3 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { } } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-05-29T15:27:15.983084Z node 72 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/DirA" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-05-29T15:27:15.983102Z node 72 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/DirA" took 21us result status StatusSuccess 2025-05-29T15:27:15.983162Z node 72 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/DirA" PathDescription { Self { Name: "DirA" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1000 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 } ChildrenExist: true } Children { Name: "USER_0" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 1002 CreateStep: 5000003 ParentPathId: 2 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 2 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> TPopulatorTest::MakeDir >> TTxDataShardMiniKQL::CrossShard_5_AllToAll [GOOD] >> TPopulatorTest::MakeDir [GOOD] >> TTablesWithReboots::SimpleDropTableWithReboots2 [GOOD] >> TStorageBalanceTest::TestScenario2 [GOOD] |68.8%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/base/ut_board_subscriber/ydb-core-base-ut_board_subscriber |68.8%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/schemeshard/ut_compaction/ydb-core-tx-schemeshard-ut_compaction >> TTxDataShardMiniKQL::CrossShard_6_Local >> TPopulatorTestWithResets::UpdateAck >> TPopulatorTest::RemoveDir >> TTablesWithReboots::AlterTableFollowersWithReboots [GOOD] >> TStorageBalanceTest::TestScenario3 >> TPopulatorTest::Boot >> TPopulatorTest::RemoveDir [GOOD] >> TPopulatorTestWithResets::UpdateAck [GOOD] >> TPopulatorTest::Boot [GOOD] >> TTablesWithReboots::CreateWithRebootsAtCommit [GOOD] |68.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/base/ut_board_subscriber/ydb-core-base-ut_board_subscriber |68.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_compaction/ydb-core-tx-schemeshard-ut_compaction |68.8%| [LD] {RESULT} $(B)/ydb/library/ycloud/impl/ut/ydb-library-ycloud-impl-ut |68.9%| [LD] {RESULT} $(B)/ydb/core/base/ut_board_subscriber/ydb-core-base-ut_board_subscriber |68.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/scheme_board/ut_populator/unittest |68.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/scheme_board/ut_populator/unittest |68.9%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_compaction/ydb-core-tx-schemeshard-ut_compaction |68.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/scheme_board/ut_populator/unittest ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/scheme_board/ut_populator/unittest >> TPopulatorTest::MakeDir [GOOD] Test command err: 2025-05-29T15:27:17.286432Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7309: Cannot subscribe to console configs 2025-05-29T15:27:17.286463Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded TestModificationResults wait txId: 100 2025-05-29T15:27:17.326579Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:660: [1:96:2121] Handle TEvSchemeShard::TEvDescribeSchemeResult { Status: StatusSuccess Path: "/Root" PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 2 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/Root" } } } PathId: 1 PathOwnerId: 72057594046678944 }: sender# [1:71:2109], cookie# 100, event size# 330, preserialized size# 51 2025-05-29T15:27:17.326624Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: populator.cpp:677: [1:96:2121] Update description: owner# 72057594046678944, pathId# [OwnerId: 72057594046678944, LocalPathId: 1], cookie# 100, is deletion# false, version: 3 2025-05-29T15:27:17.326978Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:271: [1:97:2122] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 72057594046678944 Generation: 2 }: sender# [1:96:2121], cookie# 100 2025-05-29T15:27:17.326997Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:271: [1:98:2123] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 72057594046678944 Generation: 2 }: sender# [1:96:2121], cookie# 100 2025-05-29T15:27:17.327224Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:660: [1:96:2121] Handle TEvSchemeShard::TEvDescribeSchemeResult { Status: StatusSuccess Path: "/Root/DirA" PathDescription { Self { Name: "DirA" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: false CreateTxId: 100 ParentPathId: 1 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 2 PathOwnerId: 72057594046678944 }: sender# [1:71:2109], cookie# 100, event size# 220, preserialized size# 2 2025-05-29T15:27:17.327231Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: populator.cpp:677: [1:96:2121] Update description: owner# 72057594046678944, pathId# [OwnerId: 72057594046678944, LocalPathId: 2], cookie# 100, is deletion# false, version: 2 2025-05-29T15:27:17.327252Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:279: [1:97:2122] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 1] Version: 3 }: sender# [1:12:2059], cookie# 100 2025-05-29T15:27:17.327263Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:279: [1:98:2123] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 1] Version: 3 }: sender# [1:15:2062], cookie# 100 2025-05-29T15:27:17.327269Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:271: [1:99:2124] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 72057594046678944 Generation: 2 }: sender# [1:96:2121], cookie# 100 2025-05-29T15:27:17.327278Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:271: [1:99:2124] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 72057594046678944 Generation: 2 }: sender# [1:96:2121], cookie# 100 2025-05-29T15:27:17.327317Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:747: [1:96:2121] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 1] Version: 3 }: sender# [1:97:2122], cookie# 100 2025-05-29T15:27:17.327323Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:271: [1:97:2122] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 72057594046678944 Generation: 2 }: sender# [1:96:2121], cookie# 100 2025-05-29T15:27:17.327328Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:271: [1:98:2123] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 72057594046678944 Generation: 2 }: sender# [1:96:2121], cookie# 100 2025-05-29T15:27:17.327352Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:747: [1:96:2121] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 1] Version: 3 }: sender# [1:98:2123], cookie# 100 2025-05-29T15:27:17.327358Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: populator.cpp:771: [1:96:2121] Ack update: ack to# [1:71:2109], cookie# 100, pathId# [OwnerId: 72057594046678944, LocalPathId: 1], version# 3 2025-05-29T15:27:17.327364Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:279: [1:97:2122] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 2] Version: 2 }: sender# [1:12:2059], cookie# 100 2025-05-29T15:27:17.327369Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:279: [1:98:2123] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 2] Version: 2 }: sender# [1:15:2062], cookie# 100 2025-05-29T15:27:17.327375Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:279: [1:99:2124] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 1] Version: 3 }: sender# [1:18:2065], cookie# 100 2025-05-29T15:27:17.327455Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:747: [1:96:2121] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 2] Version: 2 }: sender# [1:97:2122], cookie# 100 2025-05-29T15:27:17.327461Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:279: [1:99:2124] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 2] Version: 2 }: sender# [1:18:2065], cookie# 100 2025-05-29T15:27:17.327486Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:747: [1:96:2121] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 2] Version: 2 }: sender# [1:98:2123], cookie# 100 2025-05-29T15:27:17.327490Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: populator.cpp:771: [1:96:2121] Ack update: ack to# [1:71:2109], cookie# 100, pathId# [OwnerId: 72057594046678944, LocalPathId: 2], version# 2 2025-05-29T15:27:17.327604Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:747: [1:96:2121] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 1] Version: 3 }: sender# [1:99:2124], cookie# 100 2025-05-29T15:27:17.327612Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:753: [1:96:2121] Ack for unknown update (already acked?): sender# [1:99:2124], cookie# 100 2025-05-29T15:27:17.327629Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:747: [1:96:2121] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 2] Version: 2 }: sender# [1:99:2124], cookie# 100 2025-05-29T15:27:17.327632Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:753: [1:96:2121] Ack for unknown update (already acked?): sender# [1:99:2124], cookie# 100 FAKE_COORDINATOR: Add transaction: 100 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 100 at step: 5000001 FAKE_COORDINATOR: Erasing txId 100 2025-05-29T15:27:17.328104Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:660: [1:96:2121] Handle TEvSchemeShard::TEvDescribeSchemeResult { Status: StatusSuccess Path: "/Root" PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 4 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 4 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 3 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/Root" } } } PathId: 1 PathOwnerId: 72057594046678944 }: sender# [1:71:2109], cookie# 100, event size# 340, preserialized size# 56 2025-05-29T15:27:17.328115Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: populator.cpp:677: [1:96:2121] Update description: owner# 72057594046678944, pathId# [OwnerId: 72057594046678944, LocalPathId: 1], cookie# 100, is deletion# false, version: 4 2025-05-29T15:27:17.328135Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:271: [1:97:2122] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 72057594046678944 Generation: 2 }: sender# [1:96:2121], cookie# 100 2025-05-29T15:27:17.328143Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:271: [1:98:2123] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 72057594046678944 Generation: 2 }: sender# [1:96:2121], cookie# 100 2025-05-29T15:27:17.328148Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:271: [1:99:2124] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 72057594046678944 Generation: 2 }: sender# [1:96:2121], cookie# 100 2025-05-29T15:27:17.328242Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:660: [1:96:2121] Handle TEvSchemeShard::TEvDescribeSchemeResult { Status: StatusSuccess Path: "/Root/DirA" PathDescription { Self { Name: "DirA" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 100 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 2 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 2 PathOwnerId: 72057594046678944 }: sender# [1:71:2109], cookie# 100, event size# 225, preserialized size# 2 2025-05-29T15:27:17.328249Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: populator.cpp:677: [1:96:2121] Update description: owner# 72057594046678944, pathId# [OwnerId: 72057594046678944, LocalPathId: 2], cookie# 100, is deletion# false, version: 3 2025-05-29T15:27:17.328262Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:279: [1:97:2122] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 1] Version: 4 }: sender# [1:12:2059], cookie# 100 2025-05-29T15:27:17.328268Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:279: [1:98:2123] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 1] Version: 4 }: sender# [1:15:2062], cookie# 100 2025-05-29T15:27:17.328285Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:747: [1:96:2121] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 1] Version: 4 }: sender# [1:97:2122], cookie# 100 2025-05-29T15:27:17.328291Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:271: [1:97:2122] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 72057594046678944 Generation: 2 }: sender# [1:96:2121], cookie# 100 2025-05-29T15:27:17.328296Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:271: [1:98:2123] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 72057594046678944 Generation: 2 }: sender# [1:96:2121], cookie# 100 2025-05-29T15:27:17.328301Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:279: [1:99:2124] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 1] Version: 4 }: sender# [1:18:2065], cookie# 100 2025-05-29T15:27:17.328318Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:747: [1:96:2121] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 1] Version: 4 }: sender# [1:98:2123], cookie# 100 2025-05-29T15:27:17.328323Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: populator.cpp:771: [1:96:2121] Ack update: ack to# [1:71:2109], cookie# 100, pathId# [OwnerId: 72057594046678944, LocalPathId: 1], version# 4 2025-05-29T15:27:17.328329Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:279: [1:97:2122] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 2] Version: 3 }: sender# [1:12:2059], cookie# 100 2025-05-29T15:27:17.328334Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:279: [1:98:2123] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 2] Version: 3 }: sender# [1:15:2062], cookie# 100 2025-05-29T15:27:17.328341Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:271: [1:99:2124] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 72057594046678944 Generation: 2 }: sender# [1:96:2121], cookie# 100 2025-05-29T15:27:17.328412Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:747: [1:96:2121] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 1] Version: 4 }: sender# [1:99:2124], cookie# 100 2025-05-29T15:27:17.328421Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:279: [1:99:2124] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 2] Version: 3 }: sender# [1:18:2065], cookie# 100 2025-05-29T15:27:17.328428Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:747: [1:96:2121] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 2] Version: 3 }: sender# [1:97:2122], cookie# 100 2025-05-29T15:27:17.328501Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:747: [1:96:2121] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 2] Version: 3 }: sender# [1:98:2123], cookie# 100 2025-05-29T15:27:17.328508Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: populator.cpp:771: [1:96:2121] Ack update: ack to# [1:71:2109], cookie# 100, pathId# [OwnerId: 72057594046678944, LocalPathId: 2], version# 3 2025-05-29T15:27:17.328542Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:747: [1:96:2121] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 2] Version: 3 }: sender# [1:99:2124], cookie# 100 2025-05-29T15:27:17.328547Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:753: [1:96:2121] Ack for unknown update (already acked?): sender# [1:99:2124], cookie# 100 TestModificationResult got TxId: 100, wait until txId: 100 |68.9%| [TA] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_export/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/datashard/ut_read_table/unittest >> DataShardReadTableSnapshots::ReadTableMaxRows [FAIL] Test command err: 2025-05-29T15:27:10.077862Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:102:2148], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-05-29T15:27:10.077895Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-05-29T15:27:10.077906Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/001236/r3tmp/tmpjdbjmV/pdisk_1.dat 2025-05-29T15:27:10.183721Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-05-29T15:27:10.201017Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:27:10.205261Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [1:32:2079] 1748532429672531 != 1748532429672535 2025-05-29T15:27:10.253599Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:213: actor# [1:59:2106] HANDLE TEvClientConnected success connect from tablet# 72057594046447617 2025-05-29T15:27:10.253929Z node 1 :TX_PROXY DEBUG: client.cpp:89: Handle TEvAllocateResult ACCEPTED RangeBegin# 281474976715656 RangeEnd# 281474976720656 txAllocator# 72057594046447617 2025-05-29T15:27:10.254029Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:27:10.254052Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:27:10.264671Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-29T15:27:10.337097Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:315: actor# [1:59:2106] Handle TEvProposeTransaction 2025-05-29T15:27:10.337120Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:238: actor# [1:59:2106] TxId# 281474976715657 ProcessProposeTransaction 2025-05-29T15:27:10.337154Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:257: actor# [1:59:2106] Cookie# 0 userReqId# "" txid# 281474976715657 SEND to# [1:640:2548] 2025-05-29T15:27:10.353305Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1562: Actor# [1:640:2548] txid# 281474976715657 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/Root" OperationType: ESchemeOpCreateTable CreateTable { Name: "table-1" Columns { Name: "key" Type: "Uint32" FamilyName: "" NotNull: false } Columns { Name: "value" Type: "Uint32" FamilyName: "" NotNull: false } KeyColumnNames: "key" UniformPartitionsCount: 1 } } } ExecTimeoutPeriod: 18446744073709551615 2025-05-29T15:27:10.353336Z node 1 :TX_PROXY DEBUG: schemereq.cpp:569: Actor# [1:640:2548] txid# 281474976715657 Bootstrap, UserSID: CheckAdministrator: 0 CheckDatabaseAdministrator: 0 2025-05-29T15:27:10.353494Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1627: Actor# [1:640:2548] txid# 281474976715657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P6 2025-05-29T15:27:10.353509Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1617: Actor# [1:640:2548] txid# 281474976715657 TEvNavigateKeySet requested from SchemeCache 2025-05-29T15:27:10.353558Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1450: Actor# [1:640:2548] txid# 281474976715657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-05-29T15:27:10.353592Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1497: Actor# [1:640:2548] HANDLE EvNavigateKeySetResult, txid# 281474976715657 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 500 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2025-05-29T15:27:10.353604Z node 1 :TX_PROXY DEBUG: schemereq.cpp:103: Actor# [1:640:2548] txid# 281474976715657 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976715657 TabletId# 72057594046644480} 2025-05-29T15:27:10.353646Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1352: Actor# [1:640:2548] txid# 281474976715657 HANDLE EvClientConnected 2025-05-29T15:27:10.353950Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-05-29T15:27:10.354130Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1374: Actor# [1:640:2548] txid# 281474976715657 Status StatusAccepted HANDLE {TEvModifySchemeTransactionResult Status# StatusAccepted txid# 281474976715657} 2025-05-29T15:27:10.354138Z node 1 :TX_PROXY DEBUG: schemereq.cpp:549: Actor# [1:640:2548] txid# 281474976715657 SEND to# [1:592:2518] Source {TEvProposeTransactionStatus txid# 281474976715657 Status# 53} 2025-05-29T15:27:10.367250Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3097: StateInit, received event# 268828672, Sender [1:656:2563], Recipient [1:664:2569]: NKikimr::TEvTablet::TEvBoot 2025-05-29T15:27:10.367448Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3097: StateInit, received event# 268828673, Sender [1:656:2563], Recipient [1:664:2569]: NKikimr::TEvTablet::TEvRestored 2025-05-29T15:27:10.367509Z node 1 :TX_DATASHARD INFO: datashard.cpp:373: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:664:2569] 2025-05-29T15:27:10.367558Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:630: TxInitSchema.Execute 2025-05-29T15:27:10.375621Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3110: StateInactive, received event# 268828684, Sender [1:656:2563], Recipient [1:664:2569]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-05-29T15:27:10.375806Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:696: TxInitSchema.Complete 2025-05-29T15:27:10.375839Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:48: TDataShard::TTxInit::Execute 2025-05-29T15:27:10.376021Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1315: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-05-29T15:27:10.376031Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1371: LoadLockChangeRecords at tablet: 72075186224037888 2025-05-29T15:27:10.376038Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1420: LoadChangeRecordCommits at tablet: 72075186224037888 2025-05-29T15:27:10.376097Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:92: TDataShard::TTxInit::Complete 2025-05-29T15:27:10.376115Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:100: TDataShard::TTxInitRestored::Execute 2025-05-29T15:27:10.376127Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:111: DataShard 72075186224037888 persisting started state actor id [1:681:2569] in generation 1 2025-05-29T15:27:10.386420Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:120: TDataShard::TTxInitRestored::Complete 2025-05-29T15:27:10.390137Z node 1 :TX_DATASHARD INFO: datashard.cpp:417: Switched to work state WaitScheme tabletId 72075186224037888 2025-05-29T15:27:10.390212Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:457: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-05-29T15:27:10.390238Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1250: Change sender created: at tablet: 72075186224037888, actorId: [1:683:2579] 2025-05-29T15:27:10.390243Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1255: Trying to activate change sender: at tablet: 72075186224037888 2025-05-29T15:27:10.390249Z node 1 :TX_DATASHARD INFO: datashard.cpp:1272: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-05-29T15:27:10.390254Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-05-29T15:27:10.390316Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3129: StateWork, received event# 2146435072, Sender [1:664:2569], Recipient [1:664:2569]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-05-29T15:27:10.390324Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3154: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-05-29T15:27:10.390412Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 72075186224037888 2025-05-29T15:27:10.390435Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-05-29T15:27:10.390449Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2025-05-29T15:27:10.390456Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-05-29T15:27:10.390463Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:385: Check unit PlanQueue at 72075186224037888 2025-05-29T15:27:10.390468Z node 1 :TX_DATASHARD TRACE: plan_queue_unit.cpp:59: TPlanQueueUnit at 72075186224037888 has no attached operations 2025-05-29T15:27:10.390473Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:403: Unit PlanQueue has no ready operations at 72075186224037888 2025-05-29T15:27:10.390478Z node 1 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 72075186224037888 TxInFly 0 2025-05-29T15:27:10.390483Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2025-05-29T15:27:10.390587Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3129: StateWork, received event# 269877761, Sender [1:674:2574], Recipient [1:664:2569]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-05-29T15:27:10.390593Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3165: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-05-29T15:27:10.390600Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3710: Server connected at leader tablet# 72075186224037888, clientId# [1:662:2567], serverId# [1:674:2574], sessionId# [0:0:0] 2025-05-29T15:27:10.390612Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3129: StateWork, received event# 269549568, Sender [1:411:2405], Recipient [1:674:2574] 2025-05-29T15:27:10.390617Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3135: StateWork, processing event TEvDataShard::TEvProposeTransaction 2025-05-29T15:27:10.390636Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037888 2025-05-29T15:27:10.390682Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1827: Trying to execute [0:281474976715657] at 72075186224037888 on unit CheckSchemeTx 2025-05-29T15:27:10.390693Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:132: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-05-29T15:27:10.390709Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:220: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2025-05-29T15:27:10.390717Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1862: Execution status for [0:281474976715657] at 72075186224037888 is ExecutedNoMoreRestarts 2025-05-29T15:27:10.390722Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1910: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit CheckSchemeTx 2025-05-29T15:27:10.390728Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1916: Add [0:281474976715657] at 72075186224037888 to execution unit StoreSchemeTx 2025-05-29T15:27:10.390732Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1827: Trying to execute [0:281474976715657] at 7 ... 5:27:12.052121Z node 2 :TX_PROXY DEBUG: proxy_impl.cpp:238: actor# [2:59:2106] TxId# 281474976715658 ProcessProposeTransaction 2025-05-29T15:27:12.052129Z node 2 :TX_PROXY DEBUG: proxy_impl.cpp:257: actor# [2:59:2106] Cookie# 0 userReqId# "" txid# 281474976715658 SEND to# [2:748:2624] 2025-05-29T15:27:12.052744Z node 2 :TX_PROXY DEBUG: schemereq.cpp:1562: Actor# [2:748:2624] txid# 281474976715658 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/Root/.metadata/workload_manager/pools" OperationType: ESchemeOpCreateResourcePool ModifyACL { Name: "default" DiffACL: "\n!\010\000\022\035\010\001\020\201\004\032\024all-users@well-known \003\n\031\010\000\022\025\010\001\020\201\004\032\014root@builtin \003" NewOwner: "metadata@system" } Internal: true CreateResourcePool { Name: "default" Properties { Properties { key: "concurrent_query_limit" value: "-1" } Properties { key: "database_load_cpu_threshold" value: "-1" } Properties { key: "query_cancel_after_seconds" value: "0" } Properties { key: "query_cpu_limit_percent_per_node" value: "-1" } Properties { key: "query_memory_limit_percent_per_node" value: "-1" } Properties { key: "queue_size" value: "-1" } Properties { key: "resource_weight" value: "-1" } Properties { key: "total_cpu_limit_percent_per_node" value: "-1" } } } } } UserToken: "\n\017metadata@system\022\000" DatabaseName: "/Root" 2025-05-29T15:27:12.052757Z node 2 :TX_PROXY DEBUG: schemereq.cpp:569: Actor# [2:748:2624] txid# 281474976715658 Bootstrap, UserSID: metadata@system CheckAdministrator: 0 CheckDatabaseAdministrator: 0 2025-05-29T15:27:12.052761Z node 2 :TX_PROXY DEBUG: schemereq.cpp:578: Actor# [2:748:2624] txid# 281474976715658 Bootstrap, UserSID: metadata@system IsClusterAdministrator: 1 2025-05-29T15:27:12.053159Z node 2 :TX_PROXY DEBUG: schemereq.cpp:1627: Actor# [2:748:2624] txid# 281474976715658 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P6 2025-05-29T15:27:12.053172Z node 2 :TX_PROXY DEBUG: schemereq.cpp:1617: Actor# [2:748:2624] txid# 281474976715658 TEvNavigateKeySet requested from SchemeCache 2025-05-29T15:27:12.053220Z node 2 :TX_PROXY DEBUG: schemereq.cpp:1450: Actor# [2:748:2624] txid# 281474976715658 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-05-29T15:27:12.053244Z node 2 :TX_PROXY DEBUG: schemereq.cpp:1497: Actor# [2:748:2624] HANDLE EvNavigateKeySetResult, txid# 281474976715658 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 500 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2025-05-29T15:27:12.053256Z node 2 :TX_PROXY DEBUG: schemereq.cpp:103: Actor# [2:748:2624] txid# 281474976715658 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976715658 TabletId# 72057594046644480} 2025-05-29T15:27:12.053304Z node 2 :TX_PROXY DEBUG: schemereq.cpp:1352: Actor# [2:748:2624] txid# 281474976715658 HANDLE EvClientConnected 2025-05-29T15:27:12.053497Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2025-05-29T15:27:12.053793Z node 2 :TX_PROXY DEBUG: schemereq.cpp:1374: Actor# [2:748:2624] txid# 281474976715658 Status StatusAccepted HANDLE {TEvModifySchemeTransactionResult Status# StatusAccepted txid# 281474976715658} 2025-05-29T15:27:12.053806Z node 2 :TX_PROXY DEBUG: schemereq.cpp:549: Actor# [2:748:2624] txid# 281474976715658 SEND to# [2:747:2623] Source {TEvProposeTransactionStatus txid# 281474976715658 Status# 53} 2025-05-29T15:27:12.054477Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3129: StateWork, received event# 269746185, Sender [2:683:2579], Recipient [2:663:2568]: NKikimr::TEvTxProxySchemeCache::TEvWatchNotifyUpdated 2025-05-29T15:27:12.054491Z node 2 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:129: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-05-29T15:27:12.075406Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3129: StateWork, received event# 269877763, Sender [2:706:2594], Recipient [2:663:2568]: NKikimr::TEvTabletPipe::TEvClientDestroyed { TabletId: 72057594046644480 ClientId: [2:706:2594] ServerId: [2:712:2600] } 2025-05-29T15:27:12.075431Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3163: StateWork, processing event TEvTabletPipe::TEvClientDestroyed 2025-05-29T15:27:12.209913Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3129: StateWork, received event# 269746185, Sender [2:683:2579], Recipient [2:663:2568]: NKikimr::TEvTxProxySchemeCache::TEvWatchNotifyUpdated 2025-05-29T15:27:12.209946Z node 2 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:129: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-05-29T15:27:12.210528Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [2:747:2623], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-05-29T15:27:12.242307Z node 2 :TX_PROXY DEBUG: proxy_impl.cpp:315: actor# [2:59:2106] Handle TEvProposeTransaction 2025-05-29T15:27:12.242331Z node 2 :TX_PROXY DEBUG: proxy_impl.cpp:238: actor# [2:59:2106] TxId# 281474976715659 ProcessProposeTransaction 2025-05-29T15:27:12.242352Z node 2 :TX_PROXY DEBUG: proxy_impl.cpp:257: actor# [2:59:2106] Cookie# 0 userReqId# "" txid# 281474976715659 SEND to# [2:817:2662] 2025-05-29T15:27:12.243105Z node 2 :TX_PROXY DEBUG: schemereq.cpp:1562: Actor# [2:817:2662] txid# 281474976715659 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/Root/.metadata/workload_manager/pools" OperationType: ESchemeOpCreateResourcePool ModifyACL { Name: "default" DiffACL: "\n!\010\000\022\035\010\001\020\201\004\032\024all-users@well-known \003\n\031\010\000\022\025\010\001\020\201\004\032\014root@builtin \003" NewOwner: "metadata@system" } Internal: true CreateResourcePool { Name: "default" Properties { Properties { key: "concurrent_query_limit" value: "-1" } Properties { key: "database_load_cpu_threshold" value: "-1" } Properties { key: "query_cancel_after_seconds" value: "0" } Properties { key: "query_cpu_limit_percent_per_node" value: "-1" } Properties { key: "query_memory_limit_percent_per_node" value: "-1" } Properties { key: "queue_size" value: "-1" } Properties { key: "resource_weight" value: "-1" } Properties { key: "total_cpu_limit_percent_per_node" value: "-1" } } } } } UserToken: "\n\017metadata@system\022\000" DatabaseName: "/Root" 2025-05-29T15:27:12.243130Z node 2 :TX_PROXY DEBUG: schemereq.cpp:569: Actor# [2:817:2662] txid# 281474976715659 Bootstrap, UserSID: metadata@system CheckAdministrator: 0 CheckDatabaseAdministrator: 0 2025-05-29T15:27:12.243136Z node 2 :TX_PROXY DEBUG: schemereq.cpp:578: Actor# [2:817:2662] txid# 281474976715659 Bootstrap, UserSID: metadata@system IsClusterAdministrator: 1 2025-05-29T15:27:12.243399Z node 2 :TX_PROXY DEBUG: schemereq.cpp:1627: Actor# [2:817:2662] txid# 281474976715659 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P6 2025-05-29T15:27:12.243414Z node 2 :TX_PROXY DEBUG: schemereq.cpp:1617: Actor# [2:817:2662] txid# 281474976715659 TEvNavigateKeySet requested from SchemeCache 2025-05-29T15:27:12.243479Z node 2 :TX_PROXY DEBUG: schemereq.cpp:1450: Actor# [2:817:2662] txid# 281474976715659 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-05-29T15:27:12.243513Z node 2 :TX_PROXY DEBUG: schemereq.cpp:1497: Actor# [2:817:2662] HANDLE EvNavigateKeySetResult, txid# 281474976715659 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 500 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2025-05-29T15:27:12.243529Z node 2 :TX_PROXY DEBUG: schemereq.cpp:103: Actor# [2:817:2662] txid# 281474976715659 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976715659 TabletId# 72057594046644480} 2025-05-29T15:27:12.243857Z node 2 :TX_PROXY DEBUG: schemereq.cpp:1352: Actor# [2:817:2662] txid# 281474976715659 HANDLE EvClientConnected 2025-05-29T15:27:12.243994Z node 2 :TX_PROXY DEBUG: schemereq.cpp:1374: Actor# [2:817:2662] txid# 281474976715659 Status StatusAlreadyExists HANDLE {TEvModifySchemeTransactionResult Status# StatusAlreadyExists txid# 281474976715659 Reason# Check failed: path: '/Root/.metadata/workload_manager/pools/default', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92} 2025-05-29T15:27:12.244026Z node 2 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [2:817:2662] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-29T15:27:12.244036Z node 2 :TX_PROXY DEBUG: schemereq.cpp:549: Actor# [2:817:2662] txid# 281474976715659 SEND to# [2:747:2623] Source {TEvProposeTransactionStatus txid# 281474976715659 Status# 48} 2025-05-29T15:27:12.255119Z node 2 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [2:827:2671], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:27:12.255660Z node 2 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=2&id=Y2IxNGNlNGItOTc5NDQ2YjMtYmYwNjBjZjEtNmQ3M2NkMjk=, ActorId: [2:731:2613], ActorState: ExecuteState, TraceId: 01jweaf35kccy7zkk6vh8xxmwn, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: assertion failed at ydb/core/tx/datashard/ut_common/datashard_ut_common.cpp:2091, void NKikimr::ExecSQL(Tests::TServer::TPtr, TActorId, const TString &, bool, Ydb::StatusIds::StatusCode): (response.GetYdbStatus() == code) failed: (INTERNAL_ERROR != SUCCESS) Response { QueryIssues { message: "Execution" issue_code: 1060 issues { message: "yql/essentials/ast/yql_expr.h:1874: index out of range" issue_code: 1 } } TxMeta { } } YdbStatus: INTERNAL_ERROR ConsumedRu: 1 , with diff: (INT|SUCC)E(RNAL_ERROR|SS) TBackTrace::Capture()+28 (0x13AC660C) NUnitTest::NPrivate::RaiseError(char const*, TBasicString> const&, bool)+137 (0x13C7A1A9) NKikimr::ExecSQL(TIntrusivePtr>, NActors::TActorId, TBasicString> const&, bool, Ydb::StatusIds_StatusCode)+1300 (0x2630CE94) NKikimr::NTestSuiteDataShardReadTableSnapshots::TTestCaseReadTableMaxRows::Execute_(NUnitTest::TTestContext&)+2461 (0x139AE42D) NKikimr::NTestSuiteDataShardReadTableSnapshots::TCurrentTest::Execute()::'lambda'()::operator()() const+71 (0x139BE577) NUnitTest::TTestBase::Run(std::__y1::function, TBasicString> const&, char const*, bool)+126 (0x13C7C05E) NKikimr::NTestSuiteDataShardReadTableSnapshots::TCurrentTest::Execute()+481 (0x139BDDB1) NUnitTest::TTestFactory::Execute()+803 (0x13C7C7D3) NUnitTest::RunMain(int, char**)+3021 (0x13C8E37D) ??+0 (0x7F3E08081D90) __libc_start_main+128 (0x7F3E08081E40) _start+41 (0x12A01029) ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_base_reboots/unittest >> TTablesWithReboots::SimpleDropTableWithReboots2 [GOOD] Test command err: ==== RunWithTabletReboots =========== RUN: Trace =========== Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2140] Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:116:2058] recipient: [1:110:2141] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:116:2058] recipient: [1:110:2141] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:117:2058] recipient: [1:111:2142] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:117:2058] recipient: [1:111:2142] Leader for TabletID 72057594046678944 is [1:126:2151] sender: [1:127:2058] recipient: [1:109:2140] Leader for TabletID 72057594046447617 is [1:130:2154] sender: [1:132:2058] recipient: [1:110:2141] Leader for TabletID 72057594046316545 is [1:133:2155] sender: [1:135:2058] recipient: [1:111:2142] 2025-05-29T15:26:43.742137Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7488: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-05-29T15:26:43.742159Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7516: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:26:43.742164Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7402: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2025-05-29T15:26:43.742170Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7418: OperationsProcessing config: using default configuration 2025-05-29T15:26:43.742176Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-05-29T15:26:43.742180Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-05-29T15:26:43.742188Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7548: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:26:43.742202Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:39: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-05-29T15:26:43.742303Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7619: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-05-29T15:26:43.742365Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-05-29T15:26:43.754496Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7651: Got new config: QueryServiceConfig { AllExternalDataSourcesAreAvailable: true } 2025-05-29T15:26:43.754516Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:26:43.754599Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7619: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# Leader for TabletID 72057594046447617 is [1:130:2154] sender: [1:175:2058] recipient: [1:15:2062] 2025-05-29T15:26:43.757159Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-05-29T15:26:43.757194Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-05-29T15:26:43.757223Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-05-29T15:26:43.759543Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-05-29T15:26:43.759633Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:445: Clear TempDirsState with owners number: 0 2025-05-29T15:26:43.759752Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1348: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-05-29T15:26:43.759933Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:32: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-05-29T15:26:43.760595Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:157: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:26:43.760637Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:84: [RootDataErasureManager] Stop 2025-05-29T15:26:43.760866Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:26:43.760873Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:26:43.760896Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-05-29T15:26:43.760902Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-29T15:26:43.760906Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:202: TTxServerlessStorageBilling.Complete 2025-05-29T15:26:43.760921Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6671: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:214:2058] recipient: [1:212:2212] IGNORE Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:214:2058] recipient: [1:212:2212] Leader for TabletID 72057594037968897 is [1:218:2216] sender: [1:219:2058] recipient: [1:212:2212] 2025-05-29T15:26:43.762118Z node 1 :HIVE INFO: tablet_helpers.cpp:1130: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:126:2151] sender: [1:239:2058] recipient: [1:15:2062] 2025-05-29T15:26:43.785674Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:372: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-05-29T15:26:43.785739Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:26:43.785816Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-05-29T15:26:43.785869Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:130: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-05-29T15:26:43.785880Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:26:43.786520Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:451: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-05-29T15:26:43.786543Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-05-29T15:26:43.786599Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:26:43.786610Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:313: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-05-29T15:26:43.786616Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:367: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-05-29T15:26:43.786622Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 2 -> 3 2025-05-29T15:26:43.787139Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:26:43.787152Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-05-29T15:26:43.787157Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 3 -> 128 2025-05-29T15:26:43.787502Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:26:43.787512Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:26:43.787518Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:26:43.787525Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1650: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-05-29T15:26:43.788133Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1719: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-05-29T15:26:43.791097Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:652: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-05-29T15:26:43.791145Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1751: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 Leader for TabletID 72057594046316545 is [1:133:2155] sender: [1:254:2058] recipient: [1:15:2062] FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-05-29T15:26:43.791347Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:676: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-05-29T15:26:43.791379Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:680: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 133 RawX2: 4294969451 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-05-29T15:26:43.791387Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:26:43.791450Z node 1 :FLAT_TX_SCHEMESHARD INFO: sch ... schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 9 PathOwnerId: 72057594046678944, cookie: 1003 2025-05-29T15:27:17.403944Z node 118 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 9 PathOwnerId: 72057594046678944, cookie: 1003 2025-05-29T15:27:17.403948Z node 118 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72057594046678944, txId: 1003 2025-05-29T15:27:17.403952Z node 118 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 1003, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 9 2025-05-29T15:27:17.403957Z node 118 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2025-05-29T15:27:17.404155Z node 118 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:5834: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 1003 2025-05-29T15:27:17.404166Z node 118 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 1003 2025-05-29T15:27:17.404170Z node 118 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72057594046678944, txId: 1003 2025-05-29T15:27:17.404175Z node 118 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 1003, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 18446744073709551615 2025-05-29T15:27:17.404181Z node 118 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 5 2025-05-29T15:27:17.404192Z node 118 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1606: TOperation IsReadyToNotify, TxId: 1003, ready parts: 0/1, is published: true 2025-05-29T15:27:17.404645Z node 118 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1003:0, at schemeshard: 72057594046678944 2025-05-29T15:27:17.404654Z node 118 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_drop_table.cpp:414: TDropTable TProposedDeletePart operationId: 1003:0 ProgressState, at schemeshard: 72057594046678944 2025-05-29T15:27:17.404716Z node 118 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove table for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 4 2025-05-29T15:27:17.404742Z node 118 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:906: Part operation is done id#1003:0 progress is 1/1 2025-05-29T15:27:17.404746Z node 118 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 1003 ready parts: 1/1 2025-05-29T15:27:17.404751Z node 118 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:906: Part operation is done id#1003:0 progress is 1/1 2025-05-29T15:27:17.404754Z node 118 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 1003 ready parts: 1/1 2025-05-29T15:27:17.404758Z node 118 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1606: TOperation IsReadyToNotify, TxId: 1003, ready parts: 1/1, is published: true 2025-05-29T15:27:17.404762Z node 118 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 1003 ready parts: 1/1 2025-05-29T15:27:17.404767Z node 118 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:973: Operation and all the parts is done, operation id: 1003:0 2025-05-29T15:27:17.404770Z node 118 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5174: RemoveTx for txid 1003:0 2025-05-29T15:27:17.404790Z node 118 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 3 2025-05-29T15:27:17.405031Z node 118 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1003 2025-05-29T15:27:17.405227Z node 118 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1003 2025-05-29T15:27:17.406653Z node 118 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5549: Handle TEvStateChanged, at schemeshard: 72057594046678944, message: Source { RawX1: 344 RawX2: 506806143254 } TabletId: 72075186233409546 State: 4 2025-05-29T15:27:17.406674Z node 118 :FLAT_TX_SCHEMESHARD INFO: schemeshard__state_changed_reply.cpp:78: TTxShardStateChanged DoExecute, datashard informs about state changing, datashardId: 72075186233409546, state: Offline, at schemeshard: 72057594046678944 2025-05-29T15:27:17.406726Z node 118 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5549: Handle TEvStateChanged, at schemeshard: 72057594046678944, message: Source { RawX1: 346 RawX2: 506806143255 } TabletId: 72075186233409547 State: 4 2025-05-29T15:27:17.406732Z node 118 :FLAT_TX_SCHEMESHARD INFO: schemeshard__state_changed_reply.cpp:78: TTxShardStateChanged DoExecute, datashard informs about state changing, datashardId: 72075186233409547, state: Offline, at schemeshard: 72057594046678944 2025-05-29T15:27:17.407199Z node 118 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:46: Free shard 72057594046678944:1 hive 72057594037968897 at ss 72057594046678944 2025-05-29T15:27:17.407313Z node 118 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:46: Free shard 72057594046678944:2 hive 72057594037968897 at ss 72057594046678944 2025-05-29T15:27:17.407358Z node 118 :HIVE INFO: tablet_helpers.cpp:1356: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 1 TxId_Deprecated: 1 TabletID: 72075186233409546 2025-05-29T15:27:17.407395Z node 118 :HIVE INFO: tablet_helpers.cpp:1356: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 2 TxId_Deprecated: 2 TabletID: 72075186233409547 2025-05-29T15:27:17.407483Z node 118 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5938: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 1 ShardOwnerId: 72057594046678944 ShardLocalIdx: 1, at schemeshard: 72057594046678944 2025-05-29T15:27:17.407533Z node 118 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 2 Forgetting tablet 72075186233409546 2025-05-29T15:27:17.408385Z node 118 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5938: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 2 ShardOwnerId: 72057594046678944 ShardLocalIdx: 2, at schemeshard: 72057594046678944 2025-05-29T15:27:17.408429Z node 118 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 1 Forgetting tablet 72075186233409547 2025-05-29T15:27:17.408612Z node 118 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:123: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-05-29T15:27:17.408619Z node 118 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:137: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 3], at schemeshard: 72057594046678944 2025-05-29T15:27:17.408632Z node 118 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-05-29T15:27:17.408981Z node 118 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:174: Deleted shardIdx 72057594046678944:1 2025-05-29T15:27:17.408992Z node 118 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:180: Close pipe to deleted shardIdx 72057594046678944:1 tabletId 72075186233409546 2025-05-29T15:27:17.409214Z node 118 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:174: Deleted shardIdx 72057594046678944:2 2025-05-29T15:27:17.409223Z node 118 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:180: Close pipe to deleted shardIdx 72057594046678944:2 tabletId 72075186233409547 2025-05-29T15:27:17.409258Z node 118 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:160: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestModificationResult got TxId: 1003, wait until txId: 1003 TestWaitNotification wait txId: 1003 2025-05-29T15:27:17.409308Z node 118 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:210: tests -- TTxNotificationSubscriber for txId 1003: send EvNotifyTxCompletion 2025-05-29T15:27:17.409316Z node 118 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:256: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 1003 2025-05-29T15:27:17.409366Z node 118 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 1003, at schemeshard: 72057594046678944 2025-05-29T15:27:17.409385Z node 118 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:227: tests -- TTxNotificationSubscriber for txId 1003: got EvNotifyTxCompletionResult 2025-05-29T15:27:17.409390Z node 118 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:236: tests -- TTxNotificationSubscriber for txId 1003: satisfy waiter [118:557:2516] TestWaitNotification: OK eventTxId 1003 wait until 72075186233409546 is deleted wait until 72075186233409547 is deleted 2025-05-29T15:27:17.409467Z node 118 :HIVE INFO: tablet_helpers.cpp:1476: [72057594037968897] TEvSubscribeToTabletDeletion, 72075186233409546 2025-05-29T15:27:17.409480Z node 118 :HIVE INFO: tablet_helpers.cpp:1476: [72057594037968897] TEvSubscribeToTabletDeletion, 72075186233409547 Deleted tabletId 72075186233409546 Deleted tabletId 72075186233409547 2025-05-29T15:27:17.409539Z node 118 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Table" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-05-29T15:27:17.409571Z node 118 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/Table" took 39us result status StatusPathDoesNotExist 2025-05-29T15:27:17.409603Z node 118 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/Table\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1]), source_location: ydb/core/tx/schemeshard/schemeshard_path_describer.cpp:1157" Path: "/MyRoot/Table" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: true } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/scheme_board/ut_populator/unittest >> TPopulatorTestWithResets::UpdateAck [GOOD] Test command err: 2025-05-29T15:27:18.151308Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7309: Cannot subscribe to console configs 2025-05-29T15:27:18.151342Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded TestModificationResults wait txId: 100 2025-05-29T15:27:18.172976Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:660: [1:96:2121] Handle TEvSchemeShard::TEvDescribeSchemeResult { Status: StatusSuccess Path: "/Root" PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 2 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/Root" } } } PathId: 1 PathOwnerId: 72057594046678944 }: sender# [1:71:2109], cookie# 100, event size# 330, preserialized size# 51 2025-05-29T15:27:18.173018Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: populator.cpp:677: [1:96:2121] Update description: owner# 72057594046678944, pathId# [OwnerId: 72057594046678944, LocalPathId: 1], cookie# 100, is deletion# false, version: 3 2025-05-29T15:27:18.173318Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:271: [1:97:2122] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 72057594046678944 Generation: 2 }: sender# [1:96:2121], cookie# 100 2025-05-29T15:27:18.173337Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:271: [1:98:2123] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 72057594046678944 Generation: 2 }: sender# [1:96:2121], cookie# 100 2025-05-29T15:27:18.173342Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:271: [1:99:2124] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 72057594046678944 Generation: 2 }: sender# [1:96:2121], cookie# 100 2025-05-29T15:27:18.173504Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:660: [1:96:2121] Handle TEvSchemeShard::TEvDescribeSchemeResult { Status: StatusSuccess Path: "/Root/DirC" PathDescription { Self { Name: "DirC" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: false CreateTxId: 100 ParentPathId: 1 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 2 PathOwnerId: 72057594046678944 }: sender# [1:71:2109], cookie# 100, event size# 220, preserialized size# 2 2025-05-29T15:27:18.173510Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: populator.cpp:677: [1:96:2121] Update description: owner# 72057594046678944, pathId# [OwnerId: 72057594046678944, LocalPathId: 2], cookie# 100, is deletion# false, version: 2 FAKE_COORDINATOR: Add transaction: 100 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 100 at step: 5000001 2025-05-29T15:27:18.174060Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:660: [1:96:2121] Handle TEvSchemeShard::TEvDescribeSchemeResult { Status: StatusSuccess Path: "/Root" PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 4 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 4 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 3 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/Root" } } } PathId: 1 PathOwnerId: 72057594046678944 }: sender# [1:71:2109], cookie# 100, event size# 340, preserialized size# 56 2025-05-29T15:27:18.174070Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: populator.cpp:677: [1:96:2121] Update description: owner# 72057594046678944, pathId# [OwnerId: 72057594046678944, LocalPathId: 1], cookie# 100, is deletion# false, version: 4 FAKE_COORDINATOR: Erasing txId 100 2025-05-29T15:27:18.174160Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:660: [1:96:2121] Handle TEvSchemeShard::TEvDescribeSchemeResult { Status: StatusSuccess Path: "/Root/DirC" PathDescription { Self { Name: "DirC" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 100 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 2 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 2 PathOwnerId: 72057594046678944 }: sender# [1:71:2109], cookie# 100, event size# 225, preserialized size# 2 2025-05-29T15:27:18.174165Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: populator.cpp:677: [1:96:2121] Update description: owner# 72057594046678944, pathId# [OwnerId: 72057594046678944, LocalPathId: 2], cookie# 100, is deletion# false, version: 3 TestModificationResult got TxId: 100, wait until txId: 100 TestWaitNotification wait txId: 100 2025-05-29T15:27:18.194691Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:231: [1:97:2122] Handle NKikimrSchemeBoard.TEvHandshake { Owner: 72057594046678944 Generation: 2 }: sender# [1:12:2059] 2025-05-29T15:27:18.194726Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: populator.cpp:243: [1:97:2122] Successful handshake: replica# [1:12:2059] 2025-05-29T15:27:18.194734Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: populator.cpp:252: [1:97:2122] Resume sync: replica# [1:12:2059], fromPathId# [OwnerId: 72057594046678944, LocalPathId: 2] 2025-05-29T15:27:18.194760Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:231: [1:98:2123] Handle NKikimrSchemeBoard.TEvHandshake { Owner: 72057594046678944 Generation: 2 }: sender# [1:15:2062] 2025-05-29T15:27:18.194767Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: populator.cpp:243: [1:98:2123] Successful handshake: replica# [1:15:2062] 2025-05-29T15:27:18.194771Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: populator.cpp:252: [1:98:2123] Resume sync: replica# [1:15:2062], fromPathId# [OwnerId: 72057594046678944, LocalPathId: 2] 2025-05-29T15:27:18.194779Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:231: [1:99:2124] Handle NKikimrSchemeBoard.TEvHandshake { Owner: 72057594046678944 Generation: 2 }: sender# [1:18:2065] 2025-05-29T15:27:18.194783Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: populator.cpp:243: [1:99:2124] Successful handshake: replica# [1:18:2065] 2025-05-29T15:27:18.194787Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: populator.cpp:252: [1:99:2124] Resume sync: replica# [1:18:2065], fromPathId# [OwnerId: 72057594046678944, LocalPathId: 2] 2025-05-29T15:27:18.194803Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:528: [1:96:2121] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvRequestDescribe { PathId: [OwnerId: 72057594046678944, LocalPathId: 2] Replica: [1:24339059:0] }: sender# [1:97:2122] 2025-05-29T15:27:18.194818Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:263: [1:97:2122] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvDescribeResult { Commit: false DeletedPathBegin: 0 DeletedPathEnd: 0 { Path: /Root/DirC PathId: [OwnerId: 72057594046678944, LocalPathId: 2] PathVersion: 3 } }: sender# [1:96:2121] 2025-05-29T15:27:18.194839Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:622: [1:96:2121] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvRequestUpdate { PathId: [OwnerId: 72057594046678944, LocalPathId: 1] }: sender# [1:97:2122] 2025-05-29T15:27:18.194846Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:271: [1:97:2122] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 72057594046678944 Generation: 2 }: sender# [1:96:2121], cookie# 0 2025-05-29T15:27:18.194856Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:622: [1:96:2121] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvRequestUpdate { PathId: [OwnerId: 72057594046678944, LocalPathId: 2] }: sender# [1:97:2122] 2025-05-29T15:27:18.194862Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:279: [1:97:2122] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 1] Version: 4 }: sender# [1:12:2059], cookie# 0 2025-05-29T15:27:18.194869Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:528: [1:96:2121] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvRequestDescribe { PathId: [OwnerId: 72057594046678944, LocalPathId: 2] Replica: [1:1099535966835:0] }: sender# [1:98:2123] 2025-05-29T15:27:18.194873Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:271: [1:97:2122] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 72057594046678944 Generation: 2 }: sender# [1:96:2121], cookie# 0 2025-05-29T15:27:18.194877Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:263: [1:98:2123] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvDescribeResult { Commit: false DeletedPathBegin: 0 DeletedPathEnd: 0 { Path: /Root/DirC PathId: [OwnerId: 72057594046678944, LocalPathId: 2] PathVersion: 3 } }: sender# [1:96:2121] 2025-05-29T15:27:18.194885Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:622: [1:96:2121] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvRequestUpdate { PathId: [OwnerId: 72057594046678944, LocalPathId: 1] }: sender# [1:98:2123] 2025-05-29T15:27:18.194889Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:279: [1:97:2122] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 2] Version: 3 }: sender# [1:12:2059], cookie# 0 2025-05-29T15:27:18.194893Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:271: [1:98:2123] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 72057594046678944 Generation: 2 }: sender# [1:96:2121], cookie# 0 2025-05-29T15:27:18.194901Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:622: [1:96:2121] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvRequestUpdate { PathId: [OwnerId: 72057594046678944, LocalPathId: 2] }: sender# [1:98:2123] 2025-05-29T15:27:18.194905Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:279: [1:98:2123] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 1] Version: 4 }: sender# [1:15:2062], cookie# 0 2025-05-29T15:27:18.194910Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:528: [1:96:2121] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvRequestDescribe { PathId: [OwnerId: 72057594046678944, LocalPathId: 2] Replica: [1:2199047594611:0] }: sender# [1:99:2124] 2025-05-29T15:27:18.194914Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:271: [1:98:2123] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 72057594046678944 Generation: 2 }: sender# [1:96:2121], cookie# 0 2025-05-29T15:27:18.194918Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:263: [1:99:2124] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvDescribeResult { Commit: false DeletedPathBegin: 0 DeletedPathEnd: 0 { Path: /Root/DirC PathId: [OwnerId: 72057594046678944, LocalPathId: 2] PathVersion: 3 } }: sender# [1:96:2121] 2025-05-29T15:27:18.194926Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:622: [1:96:2121] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvRequestUpdate { PathId: [OwnerId: 72057594046678944, LocalPathId: 1] }: sender# [1:99:2124] 2025-05-29T15:27:18.194929Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:279: [1:98:2123] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 2] Version: 3 }: sender# [1:15:2062], cookie# 0 2025-05-29T15:27:18.194933Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:271: [1:99:2124] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 72057594046678944 Generation: 2 }: sender# [1:96:2121], cookie# 0 2025-05-29T15:27:18.194939Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:622: [1:96:2121] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvRequestUpdate { PathId: [OwnerId: 72057594046678944, LocalPathId: 2] }: sender# [1:99:2124] 2025-05-29T15:27:18.194943Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:279: [1:99:2124] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 1] Version: 4 }: sender# [1:18:2065], cookie# 0 2025-05-29T15:27:18.194947Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:271: [1:99:2124] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 72057594046678944 Generation: 2 }: sender# [1:96:2121], cookie# 0 2025-05-29T15:27:18.194952Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:528: [1:96:2121] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvRequestDescribe { PathId: [OwnerId: 72057594046678944, LocalPathId: 3] Replica: [1:24339059:0] }: sender# [1:97:2122] 2025-05-29T15:27:18.194957Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:263: [1:97:2122] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvDescribeResult { Commit: true DeletedPathBegin: 0 DeletedPathEnd: 0 }: sender# [1:96:2121] 2025-05-29T15:27:18.194972Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:747: [1:96:2121] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 1] Version: 4 }: sender# [1:97:2122], cookie# 0 2025-05-29T15:27:18.194975Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:753: [1:96:2121] Ack for unknown update (already acked?): sender# [1:97:2122], cookie# 0 2025-05-29T15:27:18.194980Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:297: [1:97:2122] Handle NKikimrSchemeBoard.TEvCommitGeneration { Owner: 72057594046678944 Generation: 2 }: sender# [1:12:2059] 2025-05-29T15:27:18.194985Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:279: [1:99:2124] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 2] Version: 3 }: sender# [1:18:2065], cookie# 0 2025-05-29T15:27:18.194989Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:747: [1:96:2121] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 1] Version: 4 }: sender# [1:97:2122], cookie# 100 2025-05-29T15:27:18.194995Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:528: [1:96:2121] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvRequestDescribe { PathId: [OwnerId: 72057594046678944, LocalPathId: 3] Replica: [1:1099535966835:0] }: sender# [1:98:2123] 2025-05-29T15:27:18.194999Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:263: [1:98:2123] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvDescribeResult { Commit: true DeletedPathBegin: 0 DeletedPathEnd: 0 }: sender# [1:96:2121] 2025-05-29T15:27:18.195004Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:747: [1:96:2121] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 2] Version: 3 }: sender# [1:97:2122], cookie# 0 2025-05-29T15:27:18.195008Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:753: [1:96:2121] Ack for unknown update (already acked?): sender# [1:97:2122], cookie# 0 2025-05-29T15:27:18.195013Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:297: [1:98:2123] Handle NKikimrSchemeBoard.TEvCommitGeneration { Owner: 72057594046678944 Generation: 2 }: sender# [1:15:2062] 2025-05-29T15:27:18.195019Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:747: [1:96:2121] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 2] Version: 3 }: sender# [1:97:2122], cookie# 100 2025-05-29T15:27:18.195025Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:747: [1:96:2121] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 1] Version: 4 }: sender# [1:98:2123], cookie# 0 2025-05-29T15:27:18.195029Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:753: [1:96:2121] Ack for unknown update (already acked?): sender# [1:98:2123], cookie# 0 2025-05-29T15:27:18.195034Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:747: [1:96:2121] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 1] Version: 4 }: sender# [1:98:2123], cookie# 100 2025-05-29T15:27:18.195039Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: populator.cpp:771: [1:96:2121] Ack update: ack to# [1:71:2109], cookie# 100, pathId# [OwnerId: 72057594046678944, LocalPathId: 1], version# 3 2025-05-29T15:27:18.195045Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: populator.cpp:771: [1:96:2121] Ack update: ack to# [1:71:2109], cookie# 100, pathId# [OwnerId: 72057594046678944, LocalPathId: 1], version# 4 2025-05-29T15:27:18.195052Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:528: [1:96:2121] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvRequestDescribe { PathId: [OwnerId: 72057594046678944, LocalPathId: 3] Replica: [1:2199047594611:0] }: sender# [1:99:2124] 2025-05-29T15:27:18.195056Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:263: [1:99:2124] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvDescribeResult { Commit: true DeletedPathBegin: 0 DeletedPathEnd: 0 }: sender# [1:96:2121] 2025-05-29T15:27:18.195247Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:747: [1:96:2121] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 2] Version: 3 }: sender# [1:98:2123], cookie# 0 2025-05-29T15:27:18.195254Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:753: [1:96:2121] Ack for unknown update (already acked?): sender# [1:98:2123], cookie# 0 2025-05-29T15:27:18.195257Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:297: [1:99:2124] Handle NKikimrSchemeBoard.TEvCommitGeneration { Owner: 72057594046678944 Generation: 2 }: sender# [1:18:2065] 2025-05-29T15:27:18.195294Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:747: [1:96:2121] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 2] Version: 3 }: sender# [1:98:2123], cookie# 100 2025-05-29T15:27:18.195298Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: populator.cpp:771: [1:96:2121] Ack update: ack to# [1:71:2109], cookie# 100, pathId# [OwnerId: 72057594046678944, LocalPathId: 2], version# 2 2025-05-29T15:27:18.195304Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: populator.cpp:771: [1:96:2121] Ack update: ack to# [1:71:2109], cookie# 100, pathId# [OwnerId: 72057594046678944, LocalPathId: 2], version# 3 2025-05-29T15:27:18.195338Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:747: [1:96:2121] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 1] Version: 4 }: sender# [1:99:2124], cookie# 0 2025-05-29T15:27:18.195341Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:753: [1:96:2121] Ack for unknown update (already acked?): sender# [1:99:2124], cookie# 0 2025-05-29T15:27:18.195371Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:747: [1:96:2121] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 1] Version: 4 }: sender# [1:99:2124], cookie# 100 2025-05-29T15:27:18.195376Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:753: [1:96:2121] Ack for unknown update (already acked?): sender# [1:99:2124], cookie# 100 2025-05-29T15:27:18.195383Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:747: [1:96:2121] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 2] Version: 3 }: sender# [1:99:2124], cookie# 0 2025-05-29T15:27:18.195387Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:753: [1:96:2121] Ack for unknown update (already acked?): sender# [1:99:2124], cookie# 0 2025-05-29T15:27:18.195432Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:747: [1:96:2121] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 2] Version: 3 }: sender# [1:99:2124], cookie# 100 2025-05-29T15:27:18.195435Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:753: [1:96:2121] Ack for unknown update (already acked?): sender# [1:99:2124], cookie# 100 TestWaitNotification: OK eventTxId 100 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/scheme_board/ut_populator/unittest >> TPopulatorTest::RemoveDir [GOOD] Test command err: 2025-05-29T15:27:18.272240Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7309: Cannot subscribe to console configs 2025-05-29T15:27:18.272269Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded TestModificationResults wait txId: 100 2025-05-29T15:27:18.295664Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:660: [1:96:2121] Handle TEvSchemeShard::TEvDescribeSchemeResult { Status: StatusSuccess Path: "/Root" PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 2 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/Root" } } } PathId: 1 PathOwnerId: 72057594046678944 }: sender# [1:71:2109], cookie# 100, event size# 330, preserialized size# 51 2025-05-29T15:27:18.295696Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: populator.cpp:677: [1:96:2121] Update description: owner# 72057594046678944, pathId# [OwnerId: 72057594046678944, LocalPathId: 1], cookie# 100, is deletion# false, version: 3 2025-05-29T15:27:18.295951Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:271: [1:97:2122] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 72057594046678944 Generation: 2 }: sender# [1:96:2121], cookie# 100 2025-05-29T15:27:18.295969Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:271: [1:98:2123] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 72057594046678944 Generation: 2 }: sender# [1:96:2121], cookie# 100 2025-05-29T15:27:18.296164Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:660: [1:96:2121] Handle TEvSchemeShard::TEvDescribeSchemeResult { Status: StatusSuccess Path: "/Root/DirB" PathDescription { Self { Name: "DirB" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: false CreateTxId: 100 ParentPathId: 1 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 2 PathOwnerId: 72057594046678944 }: sender# [1:71:2109], cookie# 100, event size# 220, preserialized size# 2 2025-05-29T15:27:18.296177Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: populator.cpp:677: [1:96:2121] Update description: owner# 72057594046678944, pathId# [OwnerId: 72057594046678944, LocalPathId: 2], cookie# 100, is deletion# false, version: 2 2025-05-29T15:27:18.296201Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:279: [1:97:2122] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 1] Version: 3 }: sender# [1:12:2059], cookie# 100 2025-05-29T15:27:18.296210Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:279: [1:98:2123] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 1] Version: 3 }: sender# [1:15:2062], cookie# 100 2025-05-29T15:27:18.296219Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:271: [1:99:2124] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 72057594046678944 Generation: 2 }: sender# [1:96:2121], cookie# 100 2025-05-29T15:27:18.296229Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:271: [1:99:2124] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 72057594046678944 Generation: 2 }: sender# [1:96:2121], cookie# 100 2025-05-29T15:27:18.296266Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:747: [1:96:2121] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 1] Version: 3 }: sender# [1:97:2122], cookie# 100 2025-05-29T15:27:18.296273Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:271: [1:97:2122] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 72057594046678944 Generation: 2 }: sender# [1:96:2121], cookie# 100 2025-05-29T15:27:18.296279Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:271: [1:98:2123] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 72057594046678944 Generation: 2 }: sender# [1:96:2121], cookie# 100 2025-05-29T15:27:18.296302Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:747: [1:96:2121] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 1] Version: 3 }: sender# [1:98:2123], cookie# 100 2025-05-29T15:27:18.296308Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: populator.cpp:771: [1:96:2121] Ack update: ack to# [1:71:2109], cookie# 100, pathId# [OwnerId: 72057594046678944, LocalPathId: 1], version# 3 2025-05-29T15:27:18.296314Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:279: [1:97:2122] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 2] Version: 2 }: sender# [1:12:2059], cookie# 100 2025-05-29T15:27:18.296320Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:279: [1:98:2123] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 2] Version: 2 }: sender# [1:15:2062], cookie# 100 2025-05-29T15:27:18.296326Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:279: [1:99:2124] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 1] Version: 3 }: sender# [1:18:2065], cookie# 100 2025-05-29T15:27:18.296411Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:747: [1:96:2121] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 2] Version: 2 }: sender# [1:97:2122], cookie# 100 2025-05-29T15:27:18.296418Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:279: [1:99:2124] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 2] Version: 2 }: sender# [1:18:2065], cookie# 100 2025-05-29T15:27:18.296445Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:747: [1:96:2121] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 2] Version: 2 }: sender# [1:98:2123], cookie# 100 2025-05-29T15:27:18.296449Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: populator.cpp:771: [1:96:2121] Ack update: ack to# [1:71:2109], cookie# 100, pathId# [OwnerId: 72057594046678944, LocalPathId: 2], version# 2 2025-05-29T15:27:18.296571Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:747: [1:96:2121] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 1] Version: 3 }: sender# [1:99:2124], cookie# 100 2025-05-29T15:27:18.296579Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:753: [1:96:2121] Ack for unknown update (already acked?): sender# [1:99:2124], cookie# 100 2025-05-29T15:27:18.296600Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:747: [1:96:2121] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 2] Version: 2 }: sender# [1:99:2124], cookie# 100 2025-05-29T15:27:18.296604Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:753: [1:96:2121] Ack for unknown update (already acked?): sender# [1:99:2124], cookie# 100 FAKE_COORDINATOR: Add transaction: 100 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 100 at step: 5000001 FAKE_COORDINATOR: Erasing txId 100 2025-05-29T15:27:18.297106Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:660: [1:96:2121] Handle TEvSchemeShard::TEvDescribeSchemeResult { Status: StatusSuccess Path: "/Root" PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 4 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 4 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 3 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/Root" } } } PathId: 1 PathOwnerId: 72057594046678944 }: sender# [1:71:2109], cookie# 100, event size# 340, preserialized size# 56 2025-05-29T15:27:18.297119Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: populator.cpp:677: [1:96:2121] Update description: owner# 72057594046678944, pathId# [OwnerId: 72057594046678944, LocalPathId: 1], cookie# 100, is deletion# false, version: 4 2025-05-29T15:27:18.297142Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:271: [1:97:2122] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 72057594046678944 Generation: 2 }: sender# [1:96:2121], cookie# 100 2025-05-29T15:27:18.297150Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:271: [1:98:2123] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 72057594046678944 Generation: 2 }: sender# [1:96:2121], cookie# 100 2025-05-29T15:27:18.297155Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:271: [1:99:2124] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 72057594046678944 Generation: 2 }: sender# [1:96:2121], cookie# 100 2025-05-29T15:27:18.297266Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:660: [1:96:2121] Handle TEvSchemeShard::TEvDescribeSchemeResult { Status: StatusSuccess Path: "/Root/DirB" PathDescription { Self { Name: "DirB" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 100 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 2 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanRes ... Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 1] Version: 5 }: sender# [1:97:2122], cookie# 101 2025-05-29T15:27:18.298698Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:271: [1:97:2122] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 72057594046678944 Generation: 2 }: sender# [1:96:2121], cookie# 101 2025-05-29T15:27:18.298705Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:271: [1:98:2123] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 72057594046678944 Generation: 2 }: sender# [1:96:2121], cookie# 101 2025-05-29T15:27:18.298711Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:271: [1:99:2124] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 72057594046678944 Generation: 2 }: sender# [1:96:2121], cookie# 101 2025-05-29T15:27:18.298729Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:747: [1:96:2121] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 1] Version: 5 }: sender# [1:98:2123], cookie# 101 2025-05-29T15:27:18.298764Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: populator.cpp:771: [1:96:2121] Ack update: ack to# [1:71:2109], cookie# 101, pathId# [OwnerId: 72057594046678944, LocalPathId: 1], version# 5 2025-05-29T15:27:18.298837Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:747: [1:96:2121] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 1] Version: 5 }: sender# [1:99:2124], cookie# 101 2025-05-29T15:27:18.298845Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:279: [1:97:2122] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 2] Version: 3 }: sender# [1:12:2059], cookie# 101 2025-05-29T15:27:18.298860Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:279: [1:98:2123] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 2] Version: 3 }: sender# [1:15:2062], cookie# 101 2025-05-29T15:27:18.298866Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:279: [1:99:2124] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 2] Version: 3 }: sender# [1:18:2065], cookie# 101 2025-05-29T15:27:18.298914Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:747: [1:96:2121] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 2] Version: 3 }: sender# [1:97:2122], cookie# 101 2025-05-29T15:27:18.298929Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:747: [1:96:2121] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 2] Version: 3 }: sender# [1:98:2123], cookie# 101 2025-05-29T15:27:18.298934Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: populator.cpp:771: [1:96:2121] Ack update: ack to# [1:71:2109], cookie# 101, pathId# [OwnerId: 72057594046678944, LocalPathId: 2], version# 3 FAKE_COORDINATOR: Add transaction: 101 at step: 5000002 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000001 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 101 at step: 5000002 2025-05-29T15:27:18.298977Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:747: [1:96:2121] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 2] Version: 3 }: sender# [1:99:2124], cookie# 101 2025-05-29T15:27:18.298983Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:753: [1:96:2121] Ack for unknown update (already acked?): sender# [1:99:2124], cookie# 101 2025-05-29T15:27:18.299263Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:660: [1:96:2121] Handle TEvSchemeShard::TEvDescribeSchemeResult { Status: StatusSuccess Path: "/Root" PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 6 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 6 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 5 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/Root" } } } PathId: 1 PathOwnerId: 72057594046678944 }: sender# [1:71:2109], cookie# 101, event size# 232, preserialized size# 2 2025-05-29T15:27:18.299277Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: populator.cpp:677: [1:96:2121] Update description: owner# 72057594046678944, pathId# [OwnerId: 72057594046678944, LocalPathId: 1], cookie# 101, is deletion# false, version: 6 2025-05-29T15:27:18.299296Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:271: [1:97:2122] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 72057594046678944 Generation: 2 }: sender# [1:96:2121], cookie# 101 2025-05-29T15:27:18.299304Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:271: [1:98:2123] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 72057594046678944 Generation: 2 }: sender# [1:96:2121], cookie# 101 2025-05-29T15:27:18.299310Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:271: [1:99:2124] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 72057594046678944 Generation: 2 }: sender# [1:96:2121], cookie# 101 FAKE_COORDINATOR: Erasing txId 101 2025-05-29T15:27:18.299353Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:660: [1:96:2121] Handle TEvSchemeShard::TEvDescribeSchemeResult { Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/Root/DirB\', error: path has been deleted (id: [OwnerId: 72057594046678944, LocalPathId: 2], type: EPathTypeDir, state: EPathStateNotExist), drop stepId: 5000002, drop txId: 101, source_location: ydb/core/tx/schemeshard/schemeshard_path_describer.cpp:1157" Path: "/Root/DirB" PathId: 2 LastExistedPrefixPath: "/Root" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: false } } PathOwnerId: 72057594046678944 }: sender# [1:71:2109], cookie# 101, event size# 384, preserialized size# 0 2025-05-29T15:27:18.299358Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: populator.cpp:677: [1:96:2121] Update description: owner# 72057594046678944, pathId# [OwnerId: 72057594046678944, LocalPathId: 2], cookie# 101, is deletion# true, version: 0 2025-05-29T15:27:18.299369Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:279: [1:97:2122] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 1] Version: 6 }: sender# [1:12:2059], cookie# 101 2025-05-29T15:27:18.299376Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:279: [1:98:2123] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 1] Version: 6 }: sender# [1:15:2062], cookie# 101 2025-05-29T15:27:18.299385Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:747: [1:96:2121] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 1] Version: 6 }: sender# [1:97:2122], cookie# 101 2025-05-29T15:27:18.299392Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:271: [1:97:2122] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 72057594046678944 Generation: 2 }: sender# [1:96:2121], cookie# 101 2025-05-29T15:27:18.299398Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:271: [1:98:2123] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 72057594046678944 Generation: 2 }: sender# [1:96:2121], cookie# 101 2025-05-29T15:27:18.299404Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:279: [1:99:2124] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 1] Version: 6 }: sender# [1:18:2065], cookie# 101 2025-05-29T15:27:18.299435Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:747: [1:96:2121] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 1] Version: 6 }: sender# [1:98:2123], cookie# 101 2025-05-29T15:27:18.299440Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: populator.cpp:771: [1:96:2121] Ack update: ack to# [1:71:2109], cookie# 101, pathId# [OwnerId: 72057594046678944, LocalPathId: 1], version# 6 2025-05-29T15:27:18.299446Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:279: [1:97:2122] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 2] Version: 18446744073709551615 }: sender# [1:12:2059], cookie# 101 2025-05-29T15:27:18.299453Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:279: [1:98:2123] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 2] Version: 18446744073709551615 }: sender# [1:15:2062], cookie# 101 2025-05-29T15:27:18.299459Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:271: [1:99:2124] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 72057594046678944 Generation: 2 }: sender# [1:96:2121], cookie# 101 2025-05-29T15:27:18.299528Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:747: [1:96:2121] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 1] Version: 6 }: sender# [1:99:2124], cookie# 101 2025-05-29T15:27:18.299536Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:279: [1:99:2124] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 2] Version: 18446744073709551615 }: sender# [1:18:2065], cookie# 101 2025-05-29T15:27:18.299552Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:747: [1:96:2121] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 2] Version: 18446744073709551615 }: sender# [1:97:2122], cookie# 101 2025-05-29T15:27:18.299565Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:747: [1:96:2121] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 2] Version: 18446744073709551615 }: sender# [1:98:2123], cookie# 101 2025-05-29T15:27:18.299570Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: populator.cpp:771: [1:96:2121] Ack update: ack to# [1:71:2109], cookie# 101, pathId# [OwnerId: 72057594046678944, LocalPathId: 2], version# 18446744073709551615 2025-05-29T15:27:18.299645Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:747: [1:96:2121] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 2] Version: 18446744073709551615 }: sender# [1:99:2124], cookie# 101 2025-05-29T15:27:18.299652Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:753: [1:96:2121] Ack for unknown update (already acked?): sender# [1:99:2124], cookie# 101 TestModificationResult got TxId: 101, wait until txId: 101 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/scheme_board/ut_populator/unittest >> TPopulatorTest::Boot [GOOD] Test command err: 2025-05-29T15:27:18.401272Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7309: Cannot subscribe to console configs 2025-05-29T15:27:18.401302Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_base_reboots/unittest >> TTablesWithReboots::AlterTableFollowersWithReboots [GOOD] Test command err: ==== RunWithTabletReboots =========== RUN: Trace =========== Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2140] Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:116:2058] recipient: [1:110:2141] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:116:2058] recipient: [1:110:2141] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:117:2058] recipient: [1:111:2142] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:117:2058] recipient: [1:111:2142] Leader for TabletID 72057594046678944 is [1:126:2151] sender: [1:127:2058] recipient: [1:109:2140] Leader for TabletID 72057594046447617 is [1:130:2154] sender: [1:132:2058] recipient: [1:110:2141] Leader for TabletID 72057594046316545 is [1:133:2155] sender: [1:135:2058] recipient: [1:111:2142] 2025-05-29T15:26:38.395116Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7488: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-05-29T15:26:38.395140Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7516: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:26:38.395146Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7402: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2025-05-29T15:26:38.395151Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7418: OperationsProcessing config: using default configuration 2025-05-29T15:26:38.395157Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-05-29T15:26:38.395161Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-05-29T15:26:38.395171Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7548: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:26:38.395184Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:39: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-05-29T15:26:38.395281Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7619: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-05-29T15:26:38.395347Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-05-29T15:26:38.412439Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7651: Got new config: QueryServiceConfig { AllExternalDataSourcesAreAvailable: true } 2025-05-29T15:26:38.412463Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:26:38.412573Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7619: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# Leader for TabletID 72057594046447617 is [1:130:2154] sender: [1:175:2058] recipient: [1:15:2062] 2025-05-29T15:26:38.415692Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-05-29T15:26:38.415727Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-05-29T15:26:38.415764Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-05-29T15:26:38.418692Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-05-29T15:26:38.418803Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:445: Clear TempDirsState with owners number: 0 2025-05-29T15:26:38.418930Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1348: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-05-29T15:26:38.419111Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:32: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-05-29T15:26:38.419897Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:157: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:26:38.419944Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:84: [RootDataErasureManager] Stop 2025-05-29T15:26:38.420241Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:26:38.420256Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:26:38.420297Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-05-29T15:26:38.420309Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-29T15:26:38.420317Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:202: TTxServerlessStorageBilling.Complete 2025-05-29T15:26:38.420342Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6671: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:214:2058] recipient: [1:212:2212] IGNORE Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:214:2058] recipient: [1:212:2212] Leader for TabletID 72057594037968897 is [1:218:2216] sender: [1:219:2058] recipient: [1:212:2212] 2025-05-29T15:26:38.421997Z node 1 :HIVE INFO: tablet_helpers.cpp:1130: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:126:2151] sender: [1:239:2058] recipient: [1:15:2062] 2025-05-29T15:26:38.441892Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:372: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-05-29T15:26:38.441933Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:26:38.441972Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-05-29T15:26:38.442014Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:130: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-05-29T15:26:38.442024Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:26:38.442552Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:451: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-05-29T15:26:38.442569Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-05-29T15:26:38.442607Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:26:38.442616Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:313: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-05-29T15:26:38.442623Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:367: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-05-29T15:26:38.442628Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 2 -> 3 2025-05-29T15:26:38.443118Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:26:38.443135Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-05-29T15:26:38.443141Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 3 -> 128 2025-05-29T15:26:38.443615Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:26:38.443625Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:26:38.443631Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:26:38.443637Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1650: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-05-29T15:26:38.444285Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1719: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-05-29T15:26:38.444652Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:652: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-05-29T15:26:38.444681Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1751: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 Leader for TabletID 72057594046316545 is [1:133:2155] sender: [1:254:2058] recipient: [1:15:2062] FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-05-29T15:26:38.444863Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:676: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-05-29T15:26:38.444885Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:680: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 133 RawX2: 4294969451 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-05-29T15:26:38.444891Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:26:38.444941Z node 1 :FLAT_TX_SCHEMESHARD INFO: sch ... 04 MinStep: 5000006 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-05-29T15:27:18.170941Z node 133 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:652: Send tablet strongly msg operationId: 1004:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1004 msg type: 269090816 2025-05-29T15:27:18.170972Z node 133 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1751: TOperation RegisterRelationByTabletId, TxId: 1004, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1004 at step: 5000006 FAKE_COORDINATOR: advance: minStep5000006 State->FrontStep: 5000005 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1004 at step: 5000006 FAKE_COORDINATOR: Send Plan to tablet 72075186233409546 for txId: 1004 at step: 5000006 2025-05-29T15:27:18.171068Z node 133 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:676: TTxOperationPlanStep Execute, stepId: 5000006, transactions count in step: 1, at schemeshard: 72057594046678944 2025-05-29T15:27:18.171091Z node 133 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:680: TTxOperationPlanStep Execute, message: Transactions { TxId: 1004 Coordinator: 72057594046316545 AckTo { RawX1: 133 RawX2: 571230652523 } } Step: 5000006 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-05-29T15:27:18.171100Z node 133 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_alter_table.cpp:359: TAlterTable TPropose operationId# 1004:0 HandleReply TEvOperationPlan, operationId: 1004:0, stepId: 5000006, at schemeshard: 72057594046678944 2025-05-29T15:27:18.171168Z node 133 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1004:0 128 -> 129 2025-05-29T15:27:18.171190Z node 133 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 3 FAKE_COORDINATOR: advance: minStep5000006 State->FrontStep: 5000006 2025-05-29T15:27:18.172988Z node 133 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:27:18.173002Z node 133 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1004, path id: [OwnerId: 72057594046678944, LocalPathId: 3] 2025-05-29T15:27:18.173061Z node 133 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:27:18.173068Z node 133 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [133:205:2206], at schemeshard: 72057594046678944, txId: 1004, path id: 3 2025-05-29T15:27:18.173187Z node 133 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1004:0, at schemeshard: 72057594046678944 2025-05-29T15:27:18.173194Z node 133 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:1036: NTableState::TProposedWaitParts operationId# 1004:0 ProgressState at tablet: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1004 2025-05-29T15:27:18.173343Z node 133 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:5834: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 6 PathOwnerId: 72057594046678944, cookie: 1004 2025-05-29T15:27:18.173353Z node 133 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 6 PathOwnerId: 72057594046678944, cookie: 1004 2025-05-29T15:27:18.173360Z node 133 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72057594046678944, txId: 1004 2025-05-29T15:27:18.173364Z node 133 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 1004, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 6 2025-05-29T15:27:18.173367Z node 133 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 4 2025-05-29T15:27:18.173382Z node 133 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1606: TOperation IsReadyToNotify, TxId: 1004, ready parts: 0/1, is published: true 2025-05-29T15:27:18.173433Z node 133 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6285: Handle TEvProposeTransactionResult, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409546 Status: COMPLETE TxId: 1004 Step: 5000006 OrderId: 1004 ExecLatency: 0 ProposeLatency: 2 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409546 CpuTimeUsec: 250 } } CommitVersion { Step: 5000006 TxId: 1004 } 2025-05-29T15:27:18.173437Z node 133 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1764: TOperation FindRelatedPartByTabletId, TxId: 1004, tablet: 72075186233409546, partId: 0 2025-05-29T15:27:18.173449Z node 133 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:619: TTxOperationReply execute, operationId: 1004:0, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409546 Status: COMPLETE TxId: 1004 Step: 5000006 OrderId: 1004 ExecLatency: 0 ProposeLatency: 2 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409546 CpuTimeUsec: 250 } } CommitVersion { Step: 5000006 TxId: 1004 } 2025-05-29T15:27:18.173458Z node 133 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_part.cpp:108: HandleReply TEvDataShard::TEvProposeTransactionResult Ignore message: tablet# 72057594046678944, ev# TxKind: TX_KIND_SCHEME Origin: 72075186233409546 Status: COMPLETE TxId: 1004 Step: 5000006 OrderId: 1004 ExecLatency: 0 ProposeLatency: 2 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409546 CpuTimeUsec: 250 } } CommitVersion { Step: 5000006 TxId: 1004 } 2025-05-29T15:27:18.173510Z node 133 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5512: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 331 RawX2: 571230652685 } Origin: 72075186233409546 State: 2 TxId: 1004 Step: 0 Generation: 2 2025-05-29T15:27:18.173513Z node 133 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1764: TOperation FindRelatedPartByTabletId, TxId: 1004, tablet: 72075186233409546, partId: 0 2025-05-29T15:27:18.173522Z node 133 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:619: TTxOperationReply execute, operationId: 1004:0, at schemeshard: 72057594046678944, message: Source { RawX1: 331 RawX2: 571230652685 } Origin: 72075186233409546 State: 2 TxId: 1004 Step: 0 Generation: 2 2025-05-29T15:27:18.173525Z node 133 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:1005: NTableState::TProposedWaitParts operationId# 1004:0 HandleReply TEvSchemaChanged at tablet: 72057594046678944 2025-05-29T15:27:18.173530Z node 133 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:1009: NTableState::TProposedWaitParts operationId# 1004:0 HandleReply TEvSchemaChanged at tablet: 72057594046678944 message: Source { RawX1: 331 RawX2: 571230652685 } Origin: 72075186233409546 State: 2 TxId: 1004 Step: 0 Generation: 2 2025-05-29T15:27:18.173539Z node 133 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:655: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 1004:0, shardIdx: 72057594046678944:1, datashard: 72075186233409546, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2025-05-29T15:27:18.173542Z node 133 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:674: all shard schema changes has been received, operationId: 1004:0, at schemeshard: 72057594046678944 2025-05-29T15:27:18.173544Z node 133 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:686: send schema changes ack message, operation: 1004:0, datashard: 72075186233409546, at schemeshard: 72057594046678944 2025-05-29T15:27:18.173548Z node 133 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1004:0 129 -> 240 2025-05-29T15:27:18.174336Z node 133 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1004 2025-05-29T15:27:18.174364Z node 133 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:647: TTxOperationReply complete, operationId: 1004:0, at schemeshard: 72057594046678944 2025-05-29T15:27:18.174385Z node 133 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:647: TTxOperationReply complete, operationId: 1004:0, at schemeshard: 72057594046678944 2025-05-29T15:27:18.174475Z node 133 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1004:0, at schemeshard: 72057594046678944 2025-05-29T15:27:18.174484Z node 133 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:482: [72057594046678944] TDone opId# 1004:0 ProgressState 2025-05-29T15:27:18.174497Z node 133 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:906: Part operation is done id#1004:0 progress is 1/1 2025-05-29T15:27:18.174501Z node 133 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 1004 ready parts: 1/1 2025-05-29T15:27:18.174506Z node 133 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:906: Part operation is done id#1004:0 progress is 1/1 2025-05-29T15:27:18.174509Z node 133 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 1004 ready parts: 1/1 2025-05-29T15:27:18.174514Z node 133 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1606: TOperation IsReadyToNotify, TxId: 1004, ready parts: 1/1, is published: true 2025-05-29T15:27:18.174520Z node 133 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 1004 ready parts: 1/1 2025-05-29T15:27:18.174525Z node 133 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:973: Operation and all the parts is done, operation id: 1004:0 2025-05-29T15:27:18.174530Z node 133 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5174: RemoveTx for txid 1004:0 2025-05-29T15:27:18.174556Z node 133 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 3 TestModificationResult got TxId: 1004, wait until txId: 1004 TestWaitNotification wait txId: 1004 2025-05-29T15:27:18.175118Z node 133 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:210: tests -- TTxNotificationSubscriber for txId 1004: send EvNotifyTxCompletion 2025-05-29T15:27:18.175128Z node 133 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:256: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 1004 2025-05-29T15:27:18.175188Z node 133 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 1004, at schemeshard: 72057594046678944 2025-05-29T15:27:18.175209Z node 133 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:227: tests -- TTxNotificationSubscriber for txId 1004: got EvNotifyTxCompletionResult 2025-05-29T15:27:18.175214Z node 133 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:236: tests -- TTxNotificationSubscriber for txId 1004: satisfy waiter [133:551:2523] TestWaitNotification: OK eventTxId 1004 >> TTablesWithReboots::AlterTableSchemaWithReboots [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/runtime/unittest >> KqpScanSpilling::HandleErrorsCorrectly Test command err: cwd: /home/runner/.ya/build/build_root/ciyv/000af9/ydb/core/kqp/ut/runtime/test-results/unittest/testing_out_stuff/chunk4 Trying to start YDB, gRPC: 24820, MsgBus: 23834 2025-05-29T15:27:14.312363Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509889621529924985:2134];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:27:14.313525Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/000af9/r3tmp/tmpm0Yqo4/pdisk_1.dat 2025-05-29T15:27:14.376275Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [1:7509889621529924890:2079] 1748532434311766 != 1748532434311769 2025-05-29T15:27:14.378242Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 24820, node 1 2025-05-29T15:27:14.388061Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:27:14.388077Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-05-29T15:27:14.388079Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-05-29T15:27:14.388124Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:23834 2025-05-29T15:27:14.414130Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:27:14.414165Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:27:14.419227Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:23834 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-29T15:27:14.446544Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:27:14.453388Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:27:14.539596Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:27:14.583852Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:27:14.603288Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:27:14.759497Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509889621529926521:2403], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:27:14.759543Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:27:14.810948Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-05-29T15:27:14.819680Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-05-29T15:27:14.833287Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-05-29T15:27:14.847326Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-05-29T15:27:14.861980Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-05-29T15:27:14.874409Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-05-29T15:27:14.930220Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-05-29T15:27:14.949056Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509889621529927176:2466], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:27:14.949091Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:27:14.949094Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509889621529927181:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:27:14.950007Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710669:3, at schemeshard: 72057594046644480 2025-05-29T15:27:14.958689Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7509889621529927183:2470], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710669 completed, doublechecking } 2025-05-29T15:27:15.039159Z node 1 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [1:7509889625824894530:3394] txid# 281474976710670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-29T15:27:15.178589Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [1:7509889625824894539:2474], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:27:15.178855Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=1&id=OGNmZjk3MjAtNzdkZDcxNy0yYWM0ODA5ZS02OWNlMzU1NQ==, ActorId: [1:7509889621529926494:2400], ActorState: ExecuteState, TraceId: 01jweaf604bkrvf47s03dwbcmn, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: VERIFY failed (2025-05-29T15:27:15.180959Z): assertion failed in non-unittest thread with message: assertion failed at ydb/core/kqp/ut/common/kqp_ut_common.h:375, void NKikimr::NKqp::AssertSuccessResult(const NYdb::TStatus &): (result.IsSuccess())
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 library/cpp/testing/unittest/registar.cpp:36 RaiseError(): requirement UnittestThread failed 0. /-S/util/system/yassert.cpp:83: InternalPanicImpl @ 0x13A7AA65 1. /-S/util/system/yassert.cpp:55: Panic @ 0x13A71A66 2. /tmp//-S/library/cpp/testing/unittest/registar.cpp:36: RaiseError @ 0x13C137F6 3. /-S/ydb/core/kqp/ut/common/kqp_ut_common.h:375: AssertSuccessResult @ 0x260B19E2 4. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:365: CreateSampleTables @ 0x260B12E2 5. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:581: operator() @ 0x260D2F1C 6. /-S/library/cpp/threading/future/core/future-inl.h:493: SetValue @ 0x260D2F1C 7. /-S/library/cpp/threading/future/async.h:24: operator() @ 0x260D2F1C 8. /-S/util/thread/pool.h:71: Process @ 0x260D2F1C 9. /-S/util/thread/pool.cpp:418: DoExecute @ 0x13A823E9 10. /-S/util/thread/factory.h:15: Execute @ 0x13A80DD9 11. /-S/util/thread/factory.cpp:36: ThreadProc @ 0x13A80DD9 12. /-S/util/system/thread.cpp:244: ThreadProxy @ 0x13A7C24C 13. ??:0: ?? @ 0x7FA926942AC2 14. ??:0: ?? @ 0x7FA9269D484F ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_base_reboots/unittest >> TTablesWithReboots::CreateWithRebootsAtCommit [GOOD] Test command err: ==== RunWithTabletReboots =========== RUN: Trace =========== Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2140] Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:116:2058] recipient: [1:110:2141] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:116:2058] recipient: [1:110:2141] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:117:2058] recipient: [1:111:2142] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:117:2058] recipient: [1:111:2142] Leader for TabletID 72057594046678944 is [1:126:2151] sender: [1:127:2058] recipient: [1:109:2140] Leader for TabletID 72057594046447617 is [1:130:2154] sender: [1:132:2058] recipient: [1:110:2141] Leader for TabletID 72057594046316545 is [1:133:2155] sender: [1:135:2058] recipient: [1:111:2142] 2025-05-29T15:26:46.586024Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7488: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-05-29T15:26:46.586048Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7516: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:26:46.586054Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7402: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2025-05-29T15:26:46.586060Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7418: OperationsProcessing config: using default configuration 2025-05-29T15:26:46.586066Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-05-29T15:26:46.586070Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-05-29T15:26:46.586079Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7548: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:26:46.586094Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:39: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-05-29T15:26:46.586207Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7619: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-05-29T15:26:46.586280Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-05-29T15:26:46.601189Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7651: Got new config: QueryServiceConfig { AllExternalDataSourcesAreAvailable: true } 2025-05-29T15:26:46.601209Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:26:46.601314Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7619: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# Leader for TabletID 72057594046447617 is [1:130:2154] sender: [1:175:2058] recipient: [1:15:2062] 2025-05-29T15:26:46.613582Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-05-29T15:26:46.613613Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-05-29T15:26:46.613645Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-05-29T15:26:46.621926Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-05-29T15:26:46.622014Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:445: Clear TempDirsState with owners number: 0 2025-05-29T15:26:46.622149Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1348: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-05-29T15:26:46.622356Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:32: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-05-29T15:26:46.623261Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:157: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:26:46.623302Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:84: [RootDataErasureManager] Stop 2025-05-29T15:26:46.623559Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:26:46.623569Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:26:46.623604Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-05-29T15:26:46.623611Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-29T15:26:46.623618Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:202: TTxServerlessStorageBilling.Complete 2025-05-29T15:26:46.623638Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6671: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:214:2058] recipient: [1:212:2212] IGNORE Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:214:2058] recipient: [1:212:2212] Leader for TabletID 72057594037968897 is [1:218:2216] sender: [1:219:2058] recipient: [1:212:2212] 2025-05-29T15:26:46.625118Z node 1 :HIVE INFO: tablet_helpers.cpp:1130: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:126:2151] sender: [1:239:2058] recipient: [1:15:2062] 2025-05-29T15:26:46.648202Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:372: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-05-29T15:26:46.648263Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:26:46.648320Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-05-29T15:26:46.648368Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:130: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-05-29T15:26:46.648379Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:26:46.648974Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:451: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-05-29T15:26:46.648996Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-05-29T15:26:46.649041Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:26:46.649050Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:313: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-05-29T15:26:46.649056Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:367: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-05-29T15:26:46.649062Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 2 -> 3 2025-05-29T15:26:46.649472Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:26:46.649484Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-05-29T15:26:46.649489Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 3 -> 128 2025-05-29T15:26:46.649856Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:26:46.649868Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:26:46.649883Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:26:46.649890Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1650: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-05-29T15:26:46.650569Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1719: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-05-29T15:26:46.650976Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:652: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-05-29T15:26:46.651010Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1751: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 Leader for TabletID 72057594046316545 is [1:133:2155] sender: [1:254:2058] recipient: [1:15:2062] FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-05-29T15:26:46.651192Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:676: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-05-29T15:26:46.651217Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:680: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 133 RawX2: 4294969451 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-05-29T15:26:46.651224Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:26:46.651280Z node 1 :FLAT_TX_SCHEMESHARD INFO: sch ... SchemaChanged CollectSchemaChanged: false 2025-05-29T15:27:18.513632Z node 108 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5512: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 354 RawX2: 463856470301 } Origin: 72075186233409547 State: 2 TxId: 1002 Step: 0 Generation: 2 2025-05-29T15:27:18.513640Z node 108 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1764: TOperation FindRelatedPartByTabletId, TxId: 1002, tablet: 72075186233409547, partId: 0 2025-05-29T15:27:18.513652Z node 108 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:619: TTxOperationReply execute, operationId: 1002:0, at schemeshard: 72057594046678944, message: Source { RawX1: 354 RawX2: 463856470301 } Origin: 72075186233409547 State: 2 TxId: 1002 Step: 0 Generation: 2 2025-05-29T15:27:18.513657Z node 108 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:1005: NTableState::TProposedWaitParts operationId# 1002:0 HandleReply TEvSchemaChanged at tablet: 72057594046678944 2025-05-29T15:27:18.513673Z node 108 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:1009: NTableState::TProposedWaitParts operationId# 1002:0 HandleReply TEvSchemaChanged at tablet: 72057594046678944 message: Source { RawX1: 354 RawX2: 463856470301 } Origin: 72075186233409547 State: 2 TxId: 1002 Step: 0 Generation: 2 2025-05-29T15:27:18.513681Z node 108 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:655: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 1002:0, shardIdx: 72057594046678944:2, datashard: 72075186233409547, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2025-05-29T15:27:18.513685Z node 108 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:674: all shard schema changes has been received, operationId: 1002:0, at schemeshard: 72057594046678944 2025-05-29T15:27:18.513690Z node 108 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:686: send schema changes ack message, operation: 1002:0, datashard: 72075186233409546, at schemeshard: 72057594046678944 2025-05-29T15:27:18.513698Z node 108 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:686: send schema changes ack message, operation: 1002:0, datashard: 72075186233409547, at schemeshard: 72057594046678944 2025-05-29T15:27:18.513704Z node 108 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1002:0 129 -> 240 2025-05-29T15:27:18.513903Z node 108 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1002 2025-05-29T15:27:18.513920Z node 108 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1002 2025-05-29T15:27:18.514489Z node 108 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:647: TTxOperationReply complete, operationId: 1002:0, at schemeshard: 72057594046678944 2025-05-29T15:27:18.514520Z node 108 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:647: TTxOperationReply complete, operationId: 1002:0, at schemeshard: 72057594046678944 2025-05-29T15:27:18.514533Z node 108 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:647: TTxOperationReply complete, operationId: 1002:0, at schemeshard: 72057594046678944 2025-05-29T15:27:18.514546Z node 108 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:647: TTxOperationReply complete, operationId: 1002:0, at schemeshard: 72057594046678944 2025-05-29T15:27:18.514563Z node 108 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1002:0, at schemeshard: 72057594046678944 2025-05-29T15:27:18.514570Z node 108 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:482: [72057594046678944] TDone opId# 1002:0 ProgressState 2025-05-29T15:27:18.514583Z node 108 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:906: Part operation is done id#1002:0 progress is 1/1 2025-05-29T15:27:18.514587Z node 108 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 1002 ready parts: 1/1 2025-05-29T15:27:18.514591Z node 108 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:906: Part operation is done id#1002:0 progress is 1/1 2025-05-29T15:27:18.514594Z node 108 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 1002 ready parts: 1/1 2025-05-29T15:27:18.514599Z node 108 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1606: TOperation IsReadyToNotify, TxId: 1002, ready parts: 1/1, is published: true 2025-05-29T15:27:18.514604Z node 108 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 1002 ready parts: 1/1 2025-05-29T15:27:18.514609Z node 108 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:973: Operation and all the parts is done, operation id: 1002:0 2025-05-29T15:27:18.514613Z node 108 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5174: RemoveTx for txid 1002:0 2025-05-29T15:27:18.514683Z node 108 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 4 TestWaitNotification wait txId: 1002 2025-05-29T15:27:18.515415Z node 108 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:210: tests -- TTxNotificationSubscriber for txId 1002: send EvNotifyTxCompletion 2025-05-29T15:27:18.515425Z node 108 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:256: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 1002 2025-05-29T15:27:18.515477Z node 108 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 1002, at schemeshard: 72057594046678944 2025-05-29T15:27:18.515491Z node 108 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:227: tests -- TTxNotificationSubscriber for txId 1002: got EvNotifyTxCompletionResult 2025-05-29T15:27:18.515496Z node 108 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:236: tests -- TTxNotificationSubscriber for txId 1002: satisfy waiter [108:466:2426] TestWaitNotification: OK eventTxId 1002 2025-05-29T15:27:18.515554Z node 108 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Table" Options { ReturnPartitioningInfo: true ReturnPartitionConfig: true BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-05-29T15:27:18.515596Z node 108 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/Table" took 52us result status StatusSuccess 2025-05-29T15:27:18.515784Z node 108 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Table" PathDescription { Self { Name: "Table" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 1002 CreateStep: 5000003 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "Table" Columns { Name: "key1" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "key2" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } Columns { Name: "key3" Type: "Uint64" TypeId: 4 Id: 3 NotNull: false IsBuildInProgress: false } Columns { Name: "Value" Type: "Utf8" TypeId: 4608 Id: 4 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key1" KeyColumnNames: "key2" KeyColumnNames: "key3" KeyColumnIds: 1 KeyColumnIds: 2 KeyColumnIds: 3 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { MinPartitionsCount: 2 } } TableSchemaVersion: 1 IsBackup: false IsRestore: false } TablePartitions { EndOfRangeKeyPrefix: "\003\000\004\000\000\000\377\377\377\177\000\000\000\200\000\000\000\200" IsPoint: false IsInclusive: false DatashardId: 72075186233409546 } TablePartitions { EndOfRangeKeyPrefix: "" IsPoint: false IsInclusive: false DatashardId: 72075186233409547 } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 2 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 2 PathsLimit: 10000 ShardsInside: 2 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/runtime/unittest >> KqpScanSpilling::SelfJoin Test command err: cwd: /home/runner/.ya/build/build_root/ciyv/000aef/ydb/core/kqp/ut/runtime/test-results/unittest/testing_out_stuff/chunk5 Trying to start YDB, gRPC: 16959, MsgBus: 16744 2025-05-29T15:27:14.328969Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509889620773954109:2138];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:27:14.331473Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/000aef/r3tmp/tmp3rQ9i9/pdisk_1.dat 2025-05-29T15:27:14.386580Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [1:7509889620773954006:2079] 1748532434327849 != 1748532434327852 2025-05-29T15:27:14.388912Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 16959, node 1 2025-05-29T15:27:14.400746Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:27:14.400782Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-05-29T15:27:14.400784Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-05-29T15:27:14.400834Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:16744 TClient is connected to server localhost:16744 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-29T15:27:14.463878Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:27:14.463910Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:27:14.464885Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-29T15:27:14.465079Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:27:14.473389Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-05-29T15:27:14.504771Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 2025-05-29T15:27:14.568005Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:27:14.588006Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:27:14.751776Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509889620773955664:2403], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:27:14.751804Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:27:14.822871Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-05-29T15:27:14.831800Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-05-29T15:27:14.840636Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-05-29T15:27:14.854305Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-05-29T15:27:14.910834Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-05-29T15:27:14.924151Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-05-29T15:27:14.938170Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-05-29T15:27:14.955330Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509889620773956319:2466], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:27:14.955364Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:27:14.955437Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509889620773956324:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:27:14.956475Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715669:3, at schemeshard: 72057594046644480 2025-05-29T15:27:14.960327Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976715669, at schemeshard: 72057594046644480 2025-05-29T15:27:14.960385Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7509889620773956326:2470], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715669 completed, doublechecking } 2025-05-29T15:27:15.029071Z node 1 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [1:7509889625068923673:3397] txid# 281474976715670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-29T15:27:15.149243Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [1:7509889625068923689:2474], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:27:15.149511Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=1&id=YzViMmYxNmQtYzJiODY5YTUtZjBlMWI2OGMtZTg0NTVkZDI=, ActorId: [1:7509889620773955638:2401], ActorState: ExecuteState, TraceId: 01jweaf60acrhgsh9txvdztk45, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: VERIFY failed (2025-05-29T15:27:15.154596Z): assertion failed in non-unittest thread with message: assertion failed at ydb/core/kqp/ut/common/kqp_ut_common.h:375, void NKikimr::NKqp::AssertSuccessResult(const NYdb::TStatus &): (result.IsSuccess())
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 library/cpp/testing/unittest/registar.cpp:36 RaiseError(): requirement UnittestThread failed 0. /-S/util/system/yassert.cpp:83: InternalPanicImpl @ 0x13A7AA65 1. /-S/util/system/yassert.cpp:55: Panic @ 0x13A71A66 2. /tmp//-S/library/cpp/testing/unittest/registar.cpp:36: RaiseError @ 0x13C137F6 3. /-S/ydb/core/kqp/ut/common/kqp_ut_common.h:375: AssertSuccessResult @ 0x260B19E2 4. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:365: CreateSampleTables @ 0x260B12E2 5. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:581: operator() @ 0x260D2F1C 6. /-S/library/cpp/threading/future/core/future-inl.h:493: SetValue @ 0x260D2F1C 7. /-S/library/cpp/threading/future/async.h:24: operator() @ 0x260D2F1C 8. /-S/util/thread/pool.h:71: Process @ 0x260D2F1C 9. /-S/util/thread/pool.cpp:418: DoExecute @ 0x13A823E9 10. /-S/util/thread/factory.h:15: Execute @ 0x13A80DD9 11. /-S/util/thread/factory.cpp:36: ThreadProc @ 0x13A80DD9 12. /-S/util/system/thread.cpp:244: ThreadProxy @ 0x13A7C24C 13. ??:0: ?? @ 0x7F735CF28AC2 14. ??:0: ?? @ 0x7F735CFBA84F |68.9%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/mind/address_classification/ut/ydb-core-mind-address_classification-ut |68.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/mind/address_classification/ut/ydb-core-mind-address_classification-ut |68.9%| [LD] {RESULT} $(B)/ydb/core/mind/address_classification/ut/ydb-core-mind-address_classification-ut ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/runtime/unittest >> KqpScanLogs::WideCombine+EnabledLogs Test command err: cwd: /home/runner/.ya/build/build_root/ciyv/000ae2/ydb/core/kqp/ut/runtime/test-results/unittest/testing_out_stuff/chunk2 Trying to start YDB, gRPC: 4628, MsgBus: 32437 test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/000ae2/r3tmp/tmpEes5Fo/pdisk_1.dat TServer::EnableGrpc on GrpcPort 4628, node 1 TClient is connected to server localhost:32437 TClient is connected to server localhost:32437 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... waiting... waiting... waiting... waiting... VERIFY failed (2025-05-29T15:27:15.779306Z): assertion failed in non-unittest thread with message: assertion failed at ydb/core/kqp/ut/common/kqp_ut_common.h:375, void NKikimr::NKqp::AssertSuccessResult(const NYdb::TStatus &): (result.IsSuccess())
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 library/cpp/testing/unittest/registar.cpp:36 RaiseError(): requirement UnittestThread failed 0. /-S/util/system/yassert.cpp:83: InternalPanicImpl @ 0x13A7AA65 1. /-S/util/system/yassert.cpp:55: Panic @ 0x13A71A66 2. /tmp//-S/library/cpp/testing/unittest/registar.cpp:36: RaiseError @ 0x13C137F6 3. /-S/ydb/core/kqp/ut/common/kqp_ut_common.h:375: AssertSuccessResult @ 0x260B19E2 4. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:365: CreateSampleTables @ 0x260B12E2 5. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:581: operator() @ 0x260D2F1C 6. /-S/library/cpp/threading/future/core/future-inl.h:493: SetValue @ 0x260D2F1C 7. /-S/library/cpp/threading/future/async.h:24: operator() @ 0x260D2F1C 8. /-S/util/thread/pool.h:71: Process @ 0x260D2F1C 9. /-S/util/thread/pool.cpp:418: DoExecute @ 0x13A823E9 10. /-S/util/thread/factory.h:15: Execute @ 0x13A80DD9 11. /-S/util/thread/factory.cpp:36: ThreadProc @ 0x13A80DD9 12. /-S/util/system/thread.cpp:244: ThreadProxy @ 0x13A7C24C 13. ??:0: ?? @ 0x7FF9DC5BCAC2 14. ??:0: ?? @ 0x7FF9DC64E84F >> AutoConfig::GetASPoolsith1CPU [GOOD] >> AutoConfig::GetASPoolsWith4AndMoreCPUs [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/runtime/unittest >> KqpScanSpilling::SpillingPragmaParseError Test command err: cwd: /home/runner/.ya/build/build_root/ciyv/000ae4/ydb/core/kqp/ut/runtime/test-results/unittest/testing_out_stuff/chunk9 Trying to start YDB, gRPC: 10318, MsgBus: 61643 2025-05-29T15:27:14.567065Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509889620960734201:2068];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:27:14.567311Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/000ae4/r3tmp/tmpiV2oVA/pdisk_1.dat 2025-05-29T15:27:14.623390Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 10318, node 1 2025-05-29T15:27:14.647459Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:27:14.647471Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-05-29T15:27:14.647473Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-05-29T15:27:14.647516Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:61643 2025-05-29T15:27:14.700654Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:27:14.700690Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:27:14.701915Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:61643 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... 2025-05-29T15:27:14.724903Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-05-29T15:27:14.732049Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-05-29T15:27:14.743571Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:27:14.825586Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:27:14.845905Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:27:14.862010Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:27:14.993131Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509889620960735797:2403], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:27:14.993167Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:27:15.072416Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-05-29T15:27:15.093789Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-05-29T15:27:15.111507Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-05-29T15:27:15.124910Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-05-29T15:27:15.140096Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-05-29T15:27:15.155798Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-05-29T15:27:15.170277Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-05-29T15:27:15.199094Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509889625255703751:2466], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:27:15.199131Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:27:15.199333Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509889625255703756:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:27:15.200374Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715669:3, at schemeshard: 72057594046644480 2025-05-29T15:27:15.206441Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976715669, at schemeshard: 72057594046644480 2025-05-29T15:27:15.206565Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7509889625255703758:2470], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715669 completed, doublechecking } 2025-05-29T15:27:15.285351Z node 1 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [1:7509889625255703809:3398] txid# 281474976715670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-29T15:27:15.416797Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [1:7509889625255703818:2474], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:27:15.417399Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=1&id=ODM1MWIzYzItOTMwZjQ3ZC1hNTVkNWY0Mi1jMGJhMTgyNw==, ActorId: [1:7509889620960735779:2401], ActorState: ExecuteState, TraceId: 01jweaf67wb5b6dhrqpcdc30nx, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: VERIFY failed (2025-05-29T15:27:15.425885Z): assertion failed in non-unittest thread with message: assertion failed at ydb/core/kqp/ut/common/kqp_ut_common.h:375, void NKikimr::NKqp::AssertSuccessResult(const NYdb::TStatus &): (result.IsSuccess())
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 library/cpp/testing/unittest/registar.cpp:36 RaiseError(): requirement UnittestThread failed 0. /-S/util/system/yassert.cpp:83: InternalPanicImpl @ 0x13A7AA65 1. /-S/util/system/yassert.cpp:55: Panic @ 0x13A71A66 2. /tmp//-S/library/cpp/testing/unittest/registar.cpp:36: RaiseError @ 0x13C137F6 3. /-S/ydb/core/kqp/ut/common/kqp_ut_common.h:375: AssertSuccessResult @ 0x260B19E2 4. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:365: CreateSampleTables @ 0x260B12E2 5. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:581: operator() @ 0x260D2F1C 6. /-S/library/cpp/threading/future/core/future-inl.h:493: SetValue @ 0x260D2F1C 7. /-S/library/cpp/threading/future/async.h:24: operator() @ 0x260D2F1C 8. /-S/util/thread/pool.h:71: Process @ 0x260D2F1C 9. /-S/util/thread/pool.cpp:418: DoExecute @ 0x13A823E9 10. /-S/util/thread/factory.h:15: Execute @ 0x13A80DD9 11. /-S/util/thread/factory.cpp:36: ThreadProc @ 0x13A80DD9 12. /-S/util/system/thread.cpp:244: ThreadProxy @ 0x13A7C24C 13. ??:0: ?? @ 0x7FAF8A860AC2 14. ??:0: ?? @ 0x7FAF8A8F284F |69.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/scheme_board/ut_populator/unittest |69.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/scheme_board/ut_populator/unittest ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_base_reboots/unittest >> TTablesWithReboots::AlterTableSchemaWithReboots [GOOD] Test command err: ==== RunWithTabletReboots =========== RUN: Trace =========== Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2140] Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:116:2058] recipient: [1:110:2141] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:116:2058] recipient: [1:110:2141] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:117:2058] recipient: [1:111:2142] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:117:2058] recipient: [1:111:2142] Leader for TabletID 72057594046678944 is [1:126:2151] sender: [1:127:2058] recipient: [1:109:2140] Leader for TabletID 72057594046447617 is [1:130:2154] sender: [1:132:2058] recipient: [1:110:2141] Leader for TabletID 72057594046316545 is [1:133:2155] sender: [1:135:2058] recipient: [1:111:2142] 2025-05-29T15:26:41.769149Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7488: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-05-29T15:26:41.769175Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7516: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:26:41.769180Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7402: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2025-05-29T15:26:41.769186Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7418: OperationsProcessing config: using default configuration 2025-05-29T15:26:41.769191Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-05-29T15:26:41.769194Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-05-29T15:26:41.769203Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7548: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:26:41.769218Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:39: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-05-29T15:26:41.769330Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7619: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-05-29T15:26:41.769418Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-05-29T15:26:41.781786Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7651: Got new config: QueryServiceConfig { AllExternalDataSourcesAreAvailable: true } 2025-05-29T15:26:41.781811Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:26:41.781906Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7619: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# Leader for TabletID 72057594046447617 is [1:130:2154] sender: [1:175:2058] recipient: [1:15:2062] 2025-05-29T15:26:41.785730Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-05-29T15:26:41.785760Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-05-29T15:26:41.785803Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-05-29T15:26:41.788444Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-05-29T15:26:41.788512Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:445: Clear TempDirsState with owners number: 0 2025-05-29T15:26:41.788622Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1348: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-05-29T15:26:41.788787Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:32: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-05-29T15:26:41.794477Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:157: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:26:41.794528Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:84: [RootDataErasureManager] Stop 2025-05-29T15:26:41.794815Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:26:41.794829Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:26:41.794862Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-05-29T15:26:41.794871Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-29T15:26:41.794877Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:202: TTxServerlessStorageBilling.Complete 2025-05-29T15:26:41.794894Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6671: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:214:2058] recipient: [1:212:2212] IGNORE Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:214:2058] recipient: [1:212:2212] Leader for TabletID 72057594037968897 is [1:218:2216] sender: [1:219:2058] recipient: [1:212:2212] 2025-05-29T15:26:41.798059Z node 1 :HIVE INFO: tablet_helpers.cpp:1130: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:126:2151] sender: [1:239:2058] recipient: [1:15:2062] 2025-05-29T15:26:41.831618Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:372: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-05-29T15:26:41.831692Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:26:41.831757Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-05-29T15:26:41.831809Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:130: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-05-29T15:26:41.831821Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:26:41.832603Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:451: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-05-29T15:26:41.832625Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-05-29T15:26:41.832667Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:26:41.832676Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:313: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-05-29T15:26:41.832680Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:367: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-05-29T15:26:41.832683Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 2 -> 3 2025-05-29T15:26:41.833054Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:26:41.833061Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-05-29T15:26:41.833065Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 3 -> 128 2025-05-29T15:26:41.833298Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:26:41.833305Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:26:41.833309Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:26:41.833314Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1650: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-05-29T15:26:41.833793Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1719: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-05-29T15:26:41.834154Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:652: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-05-29T15:26:41.834182Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1751: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 Leader for TabletID 72057594046316545 is [1:133:2155] sender: [1:254:2058] recipient: [1:15:2062] FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-05-29T15:26:41.834327Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:676: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-05-29T15:26:41.834344Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:680: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 133 RawX2: 4294969451 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-05-29T15:26:41.834351Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:26:41.834394Z node 1 :FLAT_TX_SCHEMESHARD INFO: sch ... actionResult> execute, operationId: 1004:0, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409547 Status: COMPLETE TxId: 1004 Step: 5000005 OrderId: 1004 ExecLatency: 0 ProposeLatency: 2 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409547 CpuTimeUsec: 166 } } CommitVersion { Step: 5000005 TxId: 1004 } 2025-05-29T15:27:19.140465Z node 124 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_part.cpp:108: HandleReply TEvDataShard::TEvProposeTransactionResult Ignore message: tablet# 72057594046678944, ev# TxKind: TX_KIND_SCHEME Origin: 72075186233409547 Status: COMPLETE TxId: 1004 Step: 5000005 OrderId: 1004 ExecLatency: 0 ProposeLatency: 2 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409547 CpuTimeUsec: 166 } } CommitVersion { Step: 5000005 TxId: 1004 } 2025-05-29T15:27:19.140584Z node 124 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5512: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 349 RawX2: 532575947036 } Origin: 72075186233409546 State: 2 TxId: 1004 Step: 0 Generation: 2 2025-05-29T15:27:19.140591Z node 124 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1764: TOperation FindRelatedPartByTabletId, TxId: 1004, tablet: 72075186233409546, partId: 0 2025-05-29T15:27:19.140607Z node 124 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:619: TTxOperationReply execute, operationId: 1004:0, at schemeshard: 72057594046678944, message: Source { RawX1: 349 RawX2: 532575947036 } Origin: 72075186233409546 State: 2 TxId: 1004 Step: 0 Generation: 2 2025-05-29T15:27:19.140614Z node 124 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:1005: NTableState::TProposedWaitParts operationId# 1004:0 HandleReply TEvSchemaChanged at tablet: 72057594046678944 2025-05-29T15:27:19.140623Z node 124 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:1009: NTableState::TProposedWaitParts operationId# 1004:0 HandleReply TEvSchemaChanged at tablet: 72057594046678944 message: Source { RawX1: 349 RawX2: 532575947036 } Origin: 72075186233409546 State: 2 TxId: 1004 Step: 0 Generation: 2 2025-05-29T15:27:19.140636Z node 124 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:655: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 1004:0, shardIdx: 72057594046678944:1, datashard: 72075186233409546, left await: 1, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2025-05-29T15:27:19.140641Z node 124 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:1014: NTableState::TProposedWaitParts operationId# 1004:0 HandleReply TEvSchemaChanged CollectSchemaChanged: false 2025-05-29T15:27:19.140691Z node 124 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5512: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 352 RawX2: 532575947037 } Origin: 72075186233409547 State: 2 TxId: 1004 Step: 0 Generation: 2 2025-05-29T15:27:19.140697Z node 124 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1764: TOperation FindRelatedPartByTabletId, TxId: 1004, tablet: 72075186233409547, partId: 0 2025-05-29T15:27:19.140709Z node 124 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:619: TTxOperationReply execute, operationId: 1004:0, at schemeshard: 72057594046678944, message: Source { RawX1: 352 RawX2: 532575947037 } Origin: 72075186233409547 State: 2 TxId: 1004 Step: 0 Generation: 2 2025-05-29T15:27:19.140714Z node 124 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:1005: NTableState::TProposedWaitParts operationId# 1004:0 HandleReply TEvSchemaChanged at tablet: 72057594046678944 2025-05-29T15:27:19.140722Z node 124 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:1009: NTableState::TProposedWaitParts operationId# 1004:0 HandleReply TEvSchemaChanged at tablet: 72057594046678944 message: Source { RawX1: 352 RawX2: 532575947037 } Origin: 72075186233409547 State: 2 TxId: 1004 Step: 0 Generation: 2 2025-05-29T15:27:19.140731Z node 124 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:655: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 1004:0, shardIdx: 72057594046678944:2, datashard: 72075186233409547, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2025-05-29T15:27:19.140736Z node 124 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:674: all shard schema changes has been received, operationId: 1004:0, at schemeshard: 72057594046678944 2025-05-29T15:27:19.140741Z node 124 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:686: send schema changes ack message, operation: 1004:0, datashard: 72075186233409546, at schemeshard: 72057594046678944 2025-05-29T15:27:19.140747Z node 124 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:686: send schema changes ack message, operation: 1004:0, datashard: 72075186233409547, at schemeshard: 72057594046678944 2025-05-29T15:27:19.140752Z node 124 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1004:0 129 -> 240 2025-05-29T15:27:19.141491Z node 124 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1004 2025-05-29T15:27:19.141691Z node 124 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:647: TTxOperationReply complete, operationId: 1004:0, at schemeshard: 72057594046678944 2025-05-29T15:27:19.141835Z node 124 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:647: TTxOperationReply complete, operationId: 1004:0, at schemeshard: 72057594046678944 2025-05-29T15:27:19.141873Z node 124 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:647: TTxOperationReply complete, operationId: 1004:0, at schemeshard: 72057594046678944 2025-05-29T15:27:19.141888Z node 124 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:647: TTxOperationReply complete, operationId: 1004:0, at schemeshard: 72057594046678944 2025-05-29T15:27:19.141987Z node 124 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1004:0, at schemeshard: 72057594046678944 2025-05-29T15:27:19.141996Z node 124 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:482: [72057594046678944] TDone opId# 1004:0 ProgressState 2025-05-29T15:27:19.142011Z node 124 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:906: Part operation is done id#1004:0 progress is 1/1 2025-05-29T15:27:19.142016Z node 124 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 1004 ready parts: 1/1 2025-05-29T15:27:19.142022Z node 124 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:906: Part operation is done id#1004:0 progress is 1/1 2025-05-29T15:27:19.142026Z node 124 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 1004 ready parts: 1/1 2025-05-29T15:27:19.142031Z node 124 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1606: TOperation IsReadyToNotify, TxId: 1004, ready parts: 1/1, is published: true 2025-05-29T15:27:19.142037Z node 124 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 1004 ready parts: 1/1 2025-05-29T15:27:19.142044Z node 124 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:973: Operation and all the parts is done, operation id: 1004:0 2025-05-29T15:27:19.142049Z node 124 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5174: RemoveTx for txid 1004:0 2025-05-29T15:27:19.142082Z node 124 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 4 TestModificationResult got TxId: 1004, wait until txId: 1004 TestWaitNotification wait txId: 1004 2025-05-29T15:27:19.142866Z node 124 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:210: tests -- TTxNotificationSubscriber for txId 1004: send EvNotifyTxCompletion 2025-05-29T15:27:19.142876Z node 124 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:256: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 1004 2025-05-29T15:27:19.142943Z node 124 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 1004, at schemeshard: 72057594046678944 2025-05-29T15:27:19.142960Z node 124 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:227: tests -- TTxNotificationSubscriber for txId 1004: got EvNotifyTxCompletionResult 2025-05-29T15:27:19.142966Z node 124 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:236: tests -- TTxNotificationSubscriber for txId 1004: satisfy waiter [124:585:2545] TestWaitNotification: OK eventTxId 1004 2025-05-29T15:27:19.143037Z node 124 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Table" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-05-29T15:27:19.143085Z node 124 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/Table" took 56us result status StatusSuccess 2025-05-29T15:27:19.143205Z node 124 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Table" PathDescription { Self { Name: "Table" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 1002 CreateStep: 5000003 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 3 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "Table" Columns { Name: "key2" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "key1" Type: "Uint64" TypeId: 4 Id: 2 NotNull: false IsBuildInProgress: false } Columns { Name: "add_2" Type: "Uint64" TypeId: 4 Id: 4 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key1" KeyColumnNames: "key2" KeyColumnIds: 2 KeyColumnIds: 1 TableSchemaVersion: 3 IsBackup: false IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 2 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 2 PathsLimit: 10000 ShardsInside: 2 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 |69.0%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/driver_lib/run/ut/unittest >> AutoConfig::GetASPoolsWith4AndMoreCPUs [GOOD] |69.0%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/driver_lib/run/ut/unittest >> AutoConfig::GetASPoolsith1CPU [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/runtime/unittest >> KqpScanSpilling::SelfJoinQueryService Test command err: cwd: /home/runner/.ya/build/build_root/ciyv/000ad6/ydb/core/kqp/ut/runtime/test-results/unittest/testing_out_stuff/chunk6 Trying to start YDB, gRPC: 63900, MsgBus: 14895 2025-05-29T15:27:15.322715Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509889625032443619:2203];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:27:15.338876Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/000ad6/r3tmp/tmpFjU43l/pdisk_1.dat 2025-05-29T15:27:15.405009Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:27:15.405108Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [1:7509889625032443454:2079] 1748532435319153 != 1748532435319156 TServer::EnableGrpc on GrpcPort 63900, node 1 2025-05-29T15:27:15.432038Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:27:15.432053Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-05-29T15:27:15.432055Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-05-29T15:27:15.432105Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:14895 2025-05-29T15:27:15.471055Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:27:15.471085Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:27:15.472054Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:14895 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-29T15:27:15.490050Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:27:15.494211Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-05-29T15:27:15.499723Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:27:15.573845Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:27:15.635933Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:27:15.650132Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:27:15.766630Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509889625032445093:2403], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:27:15.766659Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:27:15.805277Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-05-29T15:27:15.814158Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-05-29T15:27:15.826928Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-05-29T15:27:15.841023Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-05-29T15:27:15.854801Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-05-29T15:27:15.869368Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-05-29T15:27:15.883854Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-05-29T15:27:15.899954Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509889625032445744:2466], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:27:15.899981Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:27:15.899988Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509889625032445749:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:27:15.900947Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715669:3, at schemeshard: 72057594046644480 2025-05-29T15:27:15.910181Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7509889625032445751:2470], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715669 completed, doublechecking } 2025-05-29T15:27:16.009847Z node 1 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [1:7509889629327413098:3397] txid# 281474976715670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-29T15:27:16.130810Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [1:7509889629327413114:2474], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:27:16.132995Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=1&id=Mjk4NjA1ODUtMzBiNmRhYWEtYTRkMDI4Mi05ZDc5NDVk, ActorId: [1:7509889625032445066:2400], ActorState: ExecuteState, TraceId: 01jweaf6xvcbm6tm54ddxmf9yz, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: VERIFY failed (2025-05-29T15:27:16.135475Z): assertion failed in non-unittest thread with message: assertion failed at ydb/core/kqp/ut/common/kqp_ut_common.h:375, void NKikimr::NKqp::AssertSuccessResult(const NYdb::TStatus &): (result.IsSuccess())
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 library/cpp/testing/unittest/registar.cpp:36 RaiseError(): requirement UnittestThread failed 0. /-S/util/system/yassert.cpp:83: InternalPanicImpl @ 0x13A7AA65 1. /-S/util/system/yassert.cpp:55: Panic @ 0x13A71A66 2. /tmp//-S/library/cpp/testing/unittest/registar.cpp:36: RaiseError @ 0x13C137F6 3. /-S/ydb/core/kqp/ut/common/kqp_ut_common.h:375: AssertSuccessResult @ 0x260B19E2 4. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:365: CreateSampleTables @ 0x260B12E2 5. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:581: operator() @ 0x260D2F1C 6. /-S/library/cpp/threading/future/core/future-inl.h:493: SetValue @ 0x260D2F1C 7. /-S/library/cpp/threading/future/async.h:24: operator() @ 0x260D2F1C 8. /-S/util/thread/pool.h:71: Process @ 0x260D2F1C 9. /-S/util/thread/pool.cpp:418: DoExecute @ 0x13A823E9 10. /-S/util/thread/factory.h:15: Execute @ 0x13A80DD9 11. /-S/util/thread/factory.cpp:36: ThreadProc @ 0x13A80DD9 12. /-S/util/system/thread.cpp:244: ThreadProxy @ 0x13A7C24C 13. ??:0: ?? @ 0x7FB19C746AC2 14. ??:0: ?? @ 0x7FB19C7D884F >> TTxDataShardMiniKQL::CrossShard_6_Local [GOOD] >> TTxDataShardMiniKQL::MemoryUsageImmediateHugeTx >> TSchemeshardCompactionQueueTest::EnqueueEmptyShard [GOOD] >> TSchemeshardCompactionQueueTest::EnqueueSinglePartedShard [GOOD] >> TSchemeshardCompactionQueueTest::EnqueueSinglePartedShardWhenEnabled [GOOD] |69.0%| [TA] $(B)/ydb/core/tx/scheme_board/ut_populator/test-results/unittest/{meta.json ... results_accumulator.log} >> TSchemeshardBackgroundCompactionTest::SchemeshardShouldRequestCompactionsSchemeshardRestart |69.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_compaction/unittest >> TSchemeshardCompactionQueueTest::EnqueueSinglePartedShardWhenEnabled [GOOD] >> TTxDataShardMiniKQL::MemoryUsageImmediateHugeTx [GOOD] >> TSchemeshardCompactionQueueTest::UpdateBelowThreshold [GOOD] >> TSchemeshardCompactionQueueTest::UpdateWithEmptyShard [GOOD] >> TSchemeshardBorrowedCompactionTest::SchemeshardShouldCompactBorrowedBeforeSplit >> TUserAccountServiceTest::Get >> FolderServiceTest::TFolderService >> FolderServiceTest::TFolderServiceTransitional >> TSchemeshardBorrowedCompactionTest::SchemeshardShouldNotCompactBorrowedAfterSplitMergeWhenDisabled >> TSchemeshardBackgroundCompactionTest::SchemeshardShouldNotCompactBackups >> TSchemeshardCompactionQueueTest::EnqueueBelowSearchHeightThreshold [GOOD] >> TSchemeshardCompactionQueueTest::EnqueueBelowRowDeletesThreshold [GOOD] >> TSchemeshardCompactionQueueTest::CheckOrderWhenAllQueues [GOOD] |69.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/library/ycloud/impl/ut/unittest |69.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_compaction/unittest >> TSchemeshardCompactionQueueTest::UpdateWithEmptyShard [GOOD] >> TAccessServiceTest::PassRequestId >> TSchemeshardCompactionQueueTest::ShouldNotEnqueueEmptyShard [GOOD] >> TSchemeshardCompactionQueueTest::RemoveLastShardFromSubQueues [GOOD] >> TSchemeshardCompactionQueueTest::ShouldNotEnqueueSinglePartedShardWithMemData [GOOD] >> TSchemeshardCompactionQueueTest::ShouldPopWhenOnlyLastCompactionQueue [GOOD] >> TAccessServiceTest::Authenticate >> TSchemeshardBackgroundCompactionTest::ShouldNotCompactServerless ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/datashard/ut_minikql/unittest >> TTxDataShardMiniKQL::MemoryUsageImmediateHugeTx [GOOD] Test command err: Leader for TabletID 9437184 is [0:0:0] sender: [1:115:2057] recipient: [1:110:2140] IGNORE Leader for TabletID 9437184 is [0:0:0] sender: [1:115:2057] recipient: [1:110:2140] Leader for TabletID 9437184 is [1:130:2153] sender: [1:133:2057] recipient: [1:110:2140] 2025-05-29T15:27:04.297626Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3097: StateInit, received event# 268828672, Sender [1:110:2140], Recipient [1:130:2153]: NKikimr::TEvTablet::TEvBoot 2025-05-29T15:27:04.311103Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7309: Cannot subscribe to console configs 2025-05-29T15:27:04.311127Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:27:04.312894Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3097: StateInit, received event# 268828673, Sender [1:110:2140], Recipient [1:130:2153]: NKikimr::TEvTablet::TEvRestored 2025-05-29T15:27:04.313019Z node 1 :TX_DATASHARD INFO: datashard.cpp:373: TDataShard::OnActivateExecutor: tablet 9437184 actor [1:130:2153] 2025-05-29T15:27:04.313075Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:630: TxInitSchema.Execute 2025-05-29T15:27:04.321878Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3110: StateInactive, received event# 268828684, Sender [1:110:2140], Recipient [1:130:2153]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-05-29T15:27:04.325586Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:696: TxInitSchema.Complete 2025-05-29T15:27:04.325620Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:48: TDataShard::TTxInit::Execute 2025-05-29T15:27:04.325768Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1315: LoadChangeRecords: QueueSize: 0, at tablet: 9437184 2025-05-29T15:27:04.325790Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1371: LoadLockChangeRecords at tablet: 9437184 2025-05-29T15:27:04.325797Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1420: LoadChangeRecordCommits at tablet: 9437184 2025-05-29T15:27:04.325865Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:92: TDataShard::TTxInit::Complete 2025-05-29T15:27:04.325877Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:100: TDataShard::TTxInitRestored::Execute 2025-05-29T15:27:04.325890Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:111: DataShard 9437184 persisting started state actor id [1:195:2153] in generation 2 Leader for TabletID 9437184 is [1:130:2153] sender: [1:210:2057] recipient: [1:14:2061] 2025-05-29T15:27:04.359973Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:120: TDataShard::TTxInitRestored::Complete 2025-05-29T15:27:04.370413Z node 1 :TX_DATASHARD INFO: datashard.cpp:417: Switched to work state WaitScheme tabletId 9437184 2025-05-29T15:27:04.370498Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:457: 9437184 not sending time cast registration request in state WaitScheme: missing processing params 2025-05-29T15:27:04.370524Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1250: Change sender created: at tablet: 9437184, actorId: [1:215:2212] 2025-05-29T15:27:04.370529Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1255: Trying to activate change sender: at tablet: 9437184 2025-05-29T15:27:04.370534Z node 1 :TX_DATASHARD INFO: datashard.cpp:1272: Cannot activate change sender: at tablet: 9437184, state: WaitScheme 2025-05-29T15:27:04.370538Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-05-29T15:27:04.370592Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3129: StateWork, received event# 2146435072, Sender [1:130:2153], Recipient [1:130:2153]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-05-29T15:27:04.370609Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3154: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-05-29T15:27:04.370669Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 9437184 2025-05-29T15:27:04.370694Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 9437184 2025-05-29T15:27:04.370721Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 9437184 2025-05-29T15:27:04.370727Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 0 2025-05-29T15:27:04.370733Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:385: Check unit PlanQueue at 9437184 2025-05-29T15:27:04.371609Z node 1 :TX_DATASHARD TRACE: plan_queue_unit.cpp:59: TPlanQueueUnit at 9437184 has no attached operations 2025-05-29T15:27:04.371630Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:403: Unit PlanQueue has no ready operations at 9437184 2025-05-29T15:27:04.371636Z node 1 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 9437184 TxInFly 0 2025-05-29T15:27:04.371652Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 9437184 2025-05-29T15:27:04.371687Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3129: StateWork, received event# 269877761, Sender [1:211:2209], Recipient [1:130:2153]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-05-29T15:27:04.371694Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3165: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-05-29T15:27:04.371701Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3710: Server connected at leader tablet# 9437184, clientId# [1:209:2208], serverId# [1:211:2209], sessionId# [0:0:0] 2025-05-29T15:27:04.372222Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3129: StateWork, received event# 269549568, Sender [1:100:2134], Recipient [1:130:2153]: NKikimrTxDataShard.TEvProposeTransaction TxKind: TX_KIND_SCHEME SourceDeprecated { RawX1: 100 RawX2: 4294969430 } TxBody: "\nI\n\006table1\020\r\032\t\n\003key\030\002 \"\032\014\n\005value\030\200$ 8\032\n\n\004uint\030\002 9(\":\010Z\006\010\000\030\000(\000J\014/Root/table1" TxId: 1 ExecLevel: 0 Flags: 0 SchemeShardId: 4200 ProcessingParams { } 2025-05-29T15:27:04.372238Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3135: StateWork, processing event TEvDataShard::TEvProposeTransaction 2025-05-29T15:27:04.372255Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 9437184 2025-05-29T15:27:04.372284Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1827: Trying to execute [0:1] at 9437184 on unit CheckSchemeTx 2025-05-29T15:27:04.372295Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:132: Propose scheme transaction at tablet 9437184 txId 1 ssId 4200 seqNo 0:0 2025-05-29T15:27:04.372308Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:220: Prepared scheme transaction txId 1 at tablet 9437184 2025-05-29T15:27:04.372316Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1862: Execution status for [0:1] at 9437184 is ExecutedNoMoreRestarts 2025-05-29T15:27:04.372320Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1910: Advance execution plan for [0:1] at 9437184 executing on unit CheckSchemeTx 2025-05-29T15:27:04.372325Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1916: Add [0:1] at 9437184 to execution unit StoreSchemeTx 2025-05-29T15:27:04.372329Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1827: Trying to execute [0:1] at 9437184 on unit StoreSchemeTx 2025-05-29T15:27:04.372401Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1862: Execution status for [0:1] at 9437184 is DelayCompleteNoMoreRestarts 2025-05-29T15:27:04.372406Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1910: Advance execution plan for [0:1] at 9437184 executing on unit StoreSchemeTx 2025-05-29T15:27:04.372410Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1916: Add [0:1] at 9437184 to execution unit FinishPropose 2025-05-29T15:27:04.372414Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1827: Trying to execute [0:1] at 9437184 on unit FinishPropose 2025-05-29T15:27:04.372426Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1862: Execution status for [0:1] at 9437184 is DelayComplete 2025-05-29T15:27:04.372429Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1910: Advance execution plan for [0:1] at 9437184 executing on unit FinishPropose 2025-05-29T15:27:04.372433Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1916: Add [0:1] at 9437184 to execution unit WaitForPlan 2025-05-29T15:27:04.372436Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1827: Trying to execute [0:1] at 9437184 on unit WaitForPlan 2025-05-29T15:27:04.372441Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1832: Operation [0:1] at 9437184 is not ready to execute on unit WaitForPlan 2025-05-29T15:27:04.389011Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 9437184 2025-05-29T15:27:04.389036Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1933: Complete execution for [0:1] at 9437184 on unit StoreSchemeTx 2025-05-29T15:27:04.389043Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1933: Complete execution for [0:1] at 9437184 on unit FinishPropose 2025-05-29T15:27:04.389056Z node 1 :TX_DATASHARD TRACE: finish_propose_unit.cpp:167: Propose transaction complete txid 1 at tablet 9437184 send to client, exec latency: 0 ms, propose latency: 1 ms, status: PREPARED 2025-05-29T15:27:04.389082Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:467: 9437184 not sending time cast registration request in state WaitScheme 2025-05-29T15:27:04.389203Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3129: StateWork, received event# 269877761, Sender [1:221:2218], Recipient [1:130:2153]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-05-29T15:27:04.389212Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3165: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-05-29T15:27:04.389219Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3710: Server connected at leader tablet# 9437184, clientId# [1:220:2217], serverId# [1:221:2218], sessionId# [0:0:0] 2025-05-29T15:27:04.389240Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3129: StateWork, received event# 269287424, Sender [1:100:2134], Recipient [1:130:2153]: {TEvPlanStep step# 2 MediatorId# 0 TabletID 9437184} 2025-05-29T15:27:04.389245Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3147: StateWork, processing event TEvTxProcessing::TEvPlanStep 2025-05-29T15:27:04.389289Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1790: Trying to execute [2:1] at 9437184 on unit WaitForPlan 2025-05-29T15:27:04.389297Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1805: Execution status for [2:1] at 9437184 is Executed 2025-05-29T15:27:04.389304Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1910: Advance execution plan for [2:1] at 9437184 executing on unit WaitForPlan 2025-05-29T15:27:04.389309Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1916: Add [2:1] at 9437184 to execution unit PlanQueue 2025-05-29T15:27:04.390104Z node 1 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:69: Planned transaction txId 1 at step 2 at tablet 9437184 { Transactions { TxId: 1 AckTo { RawX1: 100 RawX2: 4294969430 } } Step: 2 MediatorID: 0 TabletID: 9437184 } 2025-05-29T15:27:04.390123Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-05-29T15:27:04.390190Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3129: StateWork, received event# 2146435072, Sender [1:130:2153], Recipient [1:130:2153]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-05-29T15:27:04.390197Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3154: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-05-29T15:27:04.390205Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 9437184 2025-05-29T15:27:04.390212Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 1 2025-05-29T15:27:04.390217Z node 1 :TX_DATASHARD TRACE: datashard_pipelin ... shard_impl.h:3129: StateWork, received event# 268830214, Sender [21:288:2270], Recipient [21:234:2226]: NKikimrTabletBase.TEvGetCounters 2025-05-29T15:27:20.508492Z node 21 :TX_DATASHARD TRACE: datashard_impl.h:3129: StateWork, received event# 269551617, Sender [21:100:2134], Recipient [21:234:2226]: NKikimrTxDataShard.TEvGetShardState Source { RawX1: 100 RawX2: 90194315350 } 2025-05-29T15:27:20.508506Z node 21 :TX_DATASHARD TRACE: datashard_impl.h:3132: StateWork, processing event TEvDataShard::TEvGetShardState 2025-05-29T15:27:20.508561Z node 21 :TX_DATASHARD TRACE: datashard_impl.h:3129: StateWork, received event# 269877761, Sender [21:293:2274], Recipient [21:234:2226]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-05-29T15:27:20.508566Z node 21 :TX_DATASHARD TRACE: datashard_impl.h:3165: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-05-29T15:27:20.508570Z node 21 :TX_DATASHARD DEBUG: datashard.cpp:3710: Server connected at leader tablet# 9437184, clientId# [21:292:2273], serverId# [21:293:2274], sessionId# [0:0:0] 2025-05-29T15:27:20.508612Z node 21 :TX_DATASHARD TRACE: datashard_impl.h:3129: StateWork, received event# 269549568, Sender [21:100:2134], Recipient [21:234:2226]: NKikimrTxDataShard.TEvProposeTransaction TxKind: TX_KIND_DATA SourceDeprecated { RawX1: 100 RawX2: 90194315350 } TxBody: "\032\324\002\037\002\006Arg\005\205\n\205\000\205\004?\000\205\002\202\0047\034MyReads MyWrites\205\004?\000\206\202\024Reply\024Write?\000?\000 AllReads\030MyKeys\014Run4ShardsForRead4ShardsToWrite\005?\000\005?\004?\014\005?\002)\211\006\202\203\005\004\213\002\203\004\205\002\203\004\01057$UpdateRow\000\003?\016 h\020\000\000\000\000\000\000\r\000\000\000\000\000\000\000\013?\022\003?\020T\001\005?\026)\211\n?\024\206\203\004?\024? ?\024\203\004\020Fold\000)\211\002?\"\206? \034Collect\000)\211\006?(? \203\004\203\0024ListFromRange\000\003? \000\003?,\003\022z\003?.\004\007\010\000\n\003?\024\000)\251\000? \002\000\004)\251\000?\024\002\000\002)\211\006?$\203\005@? ?\024\030Invoke\000\003?F\006Add?@?D\001\006\002\014\000\007\016\000\003\005?\010?\014\006\002?\006?R\000\003?\014?\014\037/ \0018\000" TxId: 2 ExecLevel: 0 Flags: 0 2025-05-29T15:27:20.508618Z node 21 :TX_DATASHARD TRACE: datashard_impl.h:3135: StateWork, processing event TEvDataShard::TEvProposeTransaction 2025-05-29T15:27:20.508639Z node 21 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 9437184 2025-05-29T15:27:20.508855Z node 21 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1827: Trying to execute [0:2] at 9437184 on unit CheckDataTx 2025-05-29T15:27:20.508875Z node 21 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1862: Execution status for [0:2] at 9437184 is Executed 2025-05-29T15:27:20.508880Z node 21 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1910: Advance execution plan for [0:2] at 9437184 executing on unit CheckDataTx 2025-05-29T15:27:20.508885Z node 21 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1916: Add [0:2] at 9437184 to execution unit BuildAndWaitDependencies 2025-05-29T15:27:20.508889Z node 21 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1827: Trying to execute [0:2] at 9437184 on unit BuildAndWaitDependencies 2025-05-29T15:27:20.508898Z node 21 :TX_DATASHARD TRACE: datashard.cpp:2365: GetMvccTxVersion at 9437184 CompleteEdge# v2/1 IncompleteEdge# v{min} UnprotectedReadEdge# v{min} ImmediateWriteEdge# v{min} ImmediateWriteEdgeReplied# v{min} 2025-05-29T15:27:20.508911Z node 21 :TX_DATASHARD TRACE: datashard_pipeline.cpp:483: Activated operation [0:2] at 9437184 2025-05-29T15:27:20.508918Z node 21 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1862: Execution status for [0:2] at 9437184 is Executed 2025-05-29T15:27:20.508920Z node 21 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1910: Advance execution plan for [0:2] at 9437184 executing on unit BuildAndWaitDependencies 2025-05-29T15:27:20.508923Z node 21 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1916: Add [0:2] at 9437184 to execution unit ExecuteDataTx 2025-05-29T15:27:20.508925Z node 21 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1827: Trying to execute [0:2] at 9437184 on unit ExecuteDataTx 2025-05-29T15:27:20.508931Z node 21 :TX_DATASHARD TRACE: datashard.cpp:2365: GetMvccTxVersion at 9437184 CompleteEdge# v2/1 IncompleteEdge# v{min} UnprotectedReadEdge# v{min} ImmediateWriteEdge# v{min} ImmediateWriteEdgeReplied# v{min} 2025-05-29T15:27:20.508937Z node 21 :TX_DATASHARD TRACE: execute_data_tx_unit.cpp:124: Operation [0:2] at 9437184 requested 132374 more memory 2025-05-29T15:27:20.508940Z node 21 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1862: Execution status for [0:2] at 9437184 is Restart 2025-05-29T15:27:20.508993Z node 21 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 9437184 2025-05-29T15:27:20.508996Z node 21 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1827: Trying to execute [0:2] at 9437184 on unit ExecuteDataTx 2025-05-29T15:27:20.508998Z node 21 :TX_DATASHARD TRACE: datashard.cpp:2365: GetMvccTxVersion at 9437184 CompleteEdge# v2/1 IncompleteEdge# v{min} UnprotectedReadEdge# v{min} ImmediateWriteEdge# v{min} ImmediateWriteEdgeReplied# v{min} 2025-05-29T15:27:20.509421Z node 21 :TX_DATASHARD TRACE: execute_data_tx_unit.cpp:175: Operation [0:2] at 9437184 exceeded memory limit 132502 and requests 1060016 more for the next try 2025-05-29T15:27:20.509446Z node 21 :TX_DATASHARD DEBUG: datashard_active_transaction.cpp:561: tx 2 released its data 2025-05-29T15:27:20.509451Z node 21 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1862: Execution status for [0:2] at 9437184 is Restart 2025-05-29T15:27:20.509482Z node 21 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 9437184 2025-05-29T15:27:20.509486Z node 21 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1827: Trying to execute [0:2] at 9437184 on unit ExecuteDataTx 2025-05-29T15:27:20.509560Z node 21 :TX_DATASHARD DEBUG: datashard_active_transaction.cpp:661: tx 2 at 9437184 restored its data 2025-05-29T15:27:20.509565Z node 21 :TX_DATASHARD TRACE: datashard.cpp:2365: GetMvccTxVersion at 9437184 CompleteEdge# v2/1 IncompleteEdge# v{min} UnprotectedReadEdge# v{min} ImmediateWriteEdge# v{min} ImmediateWriteEdgeReplied# v{min} 2025-05-29T15:27:20.509641Z node 21 :TX_DATASHARD TRACE: execute_data_tx_unit.cpp:175: Operation [0:2] at 9437184 exceeded memory limit 1192518 and requests 9540144 more for the next try 2025-05-29T15:27:20.509651Z node 21 :TX_DATASHARD DEBUG: datashard_active_transaction.cpp:561: tx 2 released its data 2025-05-29T15:27:20.509655Z node 21 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1862: Execution status for [0:2] at 9437184 is Restart 2025-05-29T15:27:20.509677Z node 21 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 9437184 2025-05-29T15:27:20.509681Z node 21 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1827: Trying to execute [0:2] at 9437184 on unit ExecuteDataTx 2025-05-29T15:27:20.509726Z node 21 :TX_DATASHARD DEBUG: datashard_active_transaction.cpp:661: tx 2 at 9437184 restored its data 2025-05-29T15:27:20.509730Z node 21 :TX_DATASHARD TRACE: datashard.cpp:2365: GetMvccTxVersion at 9437184 CompleteEdge# v2/1 IncompleteEdge# v{min} UnprotectedReadEdge# v{min} ImmediateWriteEdge# v{min} ImmediateWriteEdgeReplied# v{min} 2025-05-29T15:27:20.509800Z node 21 :TX_DATASHARD TRACE: execute_data_tx_unit.cpp:175: Operation [0:2] at 9437184 exceeded memory limit 10732662 and requests 85861296 more for the next try 2025-05-29T15:27:20.509809Z node 21 :TX_DATASHARD DEBUG: datashard_active_transaction.cpp:561: tx 2 released its data 2025-05-29T15:27:20.509812Z node 21 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1862: Execution status for [0:2] at 9437184 is Restart 2025-05-29T15:27:20.509826Z node 21 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 9437184 2025-05-29T15:27:20.509828Z node 21 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1827: Trying to execute [0:2] at 9437184 on unit ExecuteDataTx 2025-05-29T15:27:20.509872Z node 21 :TX_DATASHARD DEBUG: datashard_active_transaction.cpp:661: tx 2 at 9437184 restored its data 2025-05-29T15:27:20.509876Z node 21 :TX_DATASHARD TRACE: datashard.cpp:2365: GetMvccTxVersion at 9437184 CompleteEdge# v2/1 IncompleteEdge# v{min} UnprotectedReadEdge# v{min} ImmediateWriteEdge# v{min} ImmediateWriteEdgeReplied# v{min} 2025-05-29T15:27:20.578085Z node 21 :TX_DATASHARD TRACE: execute_data_tx_unit.cpp:306: Executed operation [0:2] at tablet 9437184 with status COMPLETE 2025-05-29T15:27:20.578128Z node 21 :TX_DATASHARD TRACE: execute_data_tx_unit.cpp:312: Datashard execution counters for [0:2] at 9437184: {NSelectRow: 0, NSelectRange: 0, NUpdateRow: 1, NEraseRow: 0, SelectRowRows: 0, SelectRowBytes: 0, SelectRangeRows: 0, SelectRangeBytes: 0, UpdateRowBytes: 8, EraseRowBytes: 0, SelectRangeDeletedRowSkips: 0, InvisibleRowSkips: 0} 2025-05-29T15:27:20.578152Z node 21 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1862: Execution status for [0:2] at 9437184 is ExecutedNoMoreRestarts 2025-05-29T15:27:20.578160Z node 21 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1910: Advance execution plan for [0:2] at 9437184 executing on unit ExecuteDataTx 2025-05-29T15:27:20.578167Z node 21 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1916: Add [0:2] at 9437184 to execution unit FinishPropose 2025-05-29T15:27:20.578172Z node 21 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1827: Trying to execute [0:2] at 9437184 on unit FinishPropose 2025-05-29T15:27:20.578211Z node 21 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1862: Execution status for [0:2] at 9437184 is DelayCompleteNoMoreRestarts 2025-05-29T15:27:20.578216Z node 21 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1910: Advance execution plan for [0:2] at 9437184 executing on unit FinishPropose 2025-05-29T15:27:20.578219Z node 21 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1916: Add [0:2] at 9437184 to execution unit CompletedOperations 2025-05-29T15:27:20.578223Z node 21 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1827: Trying to execute [0:2] at 9437184 on unit CompletedOperations 2025-05-29T15:27:20.578237Z node 21 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1862: Execution status for [0:2] at 9437184 is Executed 2025-05-29T15:27:20.578241Z node 21 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1910: Advance execution plan for [0:2] at 9437184 executing on unit CompletedOperations 2025-05-29T15:27:20.578245Z node 21 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1922: Execution plan for [0:2] at 9437184 has finished 2025-05-29T15:27:20.589150Z node 21 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 9437184 2025-05-29T15:27:20.589175Z node 21 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1933: Complete execution for [0:2] at 9437184 on unit FinishPropose 2025-05-29T15:27:20.589186Z node 21 :TX_DATASHARD TRACE: finish_propose_unit.cpp:167: Propose transaction complete txid 2 at tablet 9437184 send to client, exec latency: 0 ms, propose latency: 1 ms, status: COMPLETE 2025-05-29T15:27:20.589211Z node 21 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-05-29T15:27:20.589443Z node 21 :TX_DATASHARD TRACE: datashard_impl.h:3129: StateWork, received event# 269877761, Sender [21:298:2279], Recipient [21:234:2226]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-05-29T15:27:20.589457Z node 21 :TX_DATASHARD TRACE: datashard_impl.h:3165: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-05-29T15:27:20.589464Z node 21 :TX_DATASHARD DEBUG: datashard.cpp:3710: Server connected at leader tablet# 9437184, clientId# [21:297:2278], serverId# [21:298:2279], sessionId# [0:0:0] 2025-05-29T15:27:20.589493Z node 21 :TX_DATASHARD TRACE: datashard_impl.h:3129: StateWork, received event# 268830214, Sender [21:296:2277], Recipient [21:234:2226]: NKikimrTabletBase.TEvGetCounters >> TTablesWithReboots::AlterCopyWithReboots [GOOD] >> TUserAccountServiceTest::Get [GOOD] |69.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_compaction/unittest >> TSchemeshardCompactionQueueTest::RemoveLastShardFromSubQueues [GOOD] |69.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_compaction/unittest >> TSchemeshardCompactionQueueTest::CheckOrderWhenAllQueues [GOOD] |69.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_compaction/unittest >> TSchemeshardCompactionQueueTest::ShouldPopWhenOnlyLastCompactionQueue [GOOD] >> TAccessServiceTest::PassRequestId [GOOD] >> TSchemeShardTestExtSubdomainReboots::CreateExternalSubdomainWithoutHive-AlterDatabaseCreateHiveFirst-true [GOOD] >> TAccessServiceTest::Authenticate [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/library/ycloud/impl/ut/unittest >> TUserAccountServiceTest::Get [GOOD] Test command err: 2025-05-29T15:27:20.958726Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509889648797657775:2196];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:27:20.960371Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/000db4/r3tmp/tmpcSQGge/pdisk_1.dat 2025-05-29T15:27:21.019295Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [1:7509889648797657618:2079] 1748532440958185 != 1748532440958188 2025-05-29T15:27:21.019441Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded TClient is connected to server localhost:18514 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-29T15:27:21.061056Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:27:21.061091Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:27:21.062258Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected waiting... 2025-05-29T15:27:21.095575Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 >> TBoardSubscriberTest::SimpleSubscriber ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/library/ycloud/impl/ut/unittest >> TAccessServiceTest::PassRequestId [GOOD] Test command err: 2025-05-29T15:27:21.114967Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509889650782437283:2062];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:27:21.114992Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/000d61/r3tmp/tmp4P5mCR/pdisk_1.dat 2025-05-29T15:27:21.175817Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [1:7509889650782437263:2079] 1748532441114830 != 1748532441114833 2025-05-29T15:27:21.179895Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded TClient is connected to server localhost:2697 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-29T15:27:21.217307Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:27:21.217328Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:27:21.218412Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-29T15:27:21.243717Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:27:21.248579Z node 1 :GRPC_CLIENT DEBUG: grpc_service_client.h:81: [12367c51a330]{trololo} Connect to grpc://localhost:25830 2025-05-29T15:27:21.248911Z node 1 :GRPC_CLIENT DEBUG: grpc_service_client.h:120: [12367c51a330]{trololo} Request AuthenticateRequest { iam_token: "**** (717F937C)" } 2025-05-29T15:27:21.250800Z node 1 :GRPC_CLIENT DEBUG: grpc_service_client.h:107: [12367c51a330]{trololo} Response AuthenticateResponse { subject { user_account { id: "1234" } } } >> TServiceAccountServiceTest::IssueToken [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/library/ycloud/impl/ut/unittest >> TAccessServiceTest::Authenticate [GOOD] Test command err: 2025-05-29T15:27:21.204125Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509889652241069376:2061];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:27:21.204588Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/000d53/r3tmp/tmpezlevC/pdisk_1.dat 2025-05-29T15:27:21.269885Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [1:7509889652241069356:2079] 1748532441203945 != 1748532441203948 2025-05-29T15:27:21.271953Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded TClient is connected to server localhost:3612 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-29T15:27:21.346082Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:27:21.346135Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:27:21.347187Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-29T15:27:21.347889Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:27:21.353576Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-05-29T15:27:21.357049Z node 1 :GRPC_CLIENT DEBUG: grpc_service_client.h:81: [72493ce36b70] Connect to grpc://localhost:24087 2025-05-29T15:27:21.357522Z node 1 :GRPC_CLIENT DEBUG: grpc_service_client.h:120: [72493ce36b70] Request AuthenticateRequest { iam_token: "**** (047D44F1)" } 2025-05-29T15:27:21.363332Z node 1 :GRPC_CLIENT DEBUG: grpc_service_client.h:109: [72493ce36b70] Status 7 Permission Denied 2025-05-29T15:27:21.363535Z node 1 :GRPC_CLIENT DEBUG: grpc_service_client.h:120: [72493ce36b70] Request AuthenticateRequest { iam_token: "**** (342498C1)" } 2025-05-29T15:27:21.364109Z node 1 :GRPC_CLIENT DEBUG: grpc_service_client.h:107: [72493ce36b70] Response AuthenticateResponse { subject { user_account { id: "1234" } } } |69.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/base/ut_board_subscriber/unittest >> FolderServiceTest::TFolderServiceTransitional [GOOD] >> FolderServiceTest::TFolderService [GOOD] |69.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/base/ut_board_subscriber/unittest >> TBoardSubscriberTest::SimpleSubscriber [GOOD] |69.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/mind/address_classification/ut/unittest ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/library/ycloud/impl/ut/unittest >> FolderServiceTest::TFolderService [GOOD] Test command err: 2025-05-29T15:27:20.955432Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509889650082369022:2214];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:27:20.998006Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/000de8/r3tmp/tmpjQJKXY/pdisk_1.dat 2025-05-29T15:27:21.031195Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:27:21.031380Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [1:7509889650082368833:2079] 1748532440951264 != 1748532440951267 TClient is connected to server localhost:25907 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-29T15:27:21.067643Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:27:21.072225Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-05-29T15:27:21.072798Z node 1 :GRPC_CLIENT DEBUG: grpc_service_client.h:81: [117dfc35e070] Connect to grpc://localhost:25733 2025-05-29T15:27:21.075163Z node 1 :GRPC_CLIENT DEBUG: grpc_service_client.h:120: [117dfc35e070] Request ResolveFoldersRequest { folder_ids: "i_am_not_exists" } 2025-05-29T15:27:21.077032Z node 1 :GRPC_CLIENT DEBUG: grpc_service_client.h:109: [117dfc35e070] Status 14 failed to connect to all addresses; last error: UNKNOWN: ipv4:127.0.0.1:25733: Failed to connect to remote host: Connection refused 2025-05-29T15:27:21.077505Z node 1 :GRPC_CLIENT DEBUG: grpc_service_client.h:120: [117dfc35e070] Request ResolveFoldersRequest { folder_ids: "i_am_not_exists" } 2025-05-29T15:27:21.077665Z node 1 :GRPC_CLIENT DEBUG: grpc_service_client.h:109: [117dfc35e070] Status 14 failed to connect to all addresses; last error: UNKNOWN: ipv4:127.0.0.1:25733: Failed to connect to remote host: Connection refused 2025-05-29T15:27:21.101637Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:27:21.101665Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:27:21.102843Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-29T15:27:22.077905Z node 1 :GRPC_CLIENT DEBUG: grpc_service_client.h:120: [117dfc35e070] Request ResolveFoldersRequest { folder_ids: "i_am_not_exists" } 2025-05-29T15:27:22.078612Z node 1 :GRPC_CLIENT DEBUG: grpc_service_client.h:109: [117dfc35e070] Status 5 Not Found 2025-05-29T15:27:22.078722Z node 1 :GRPC_CLIENT DEBUG: grpc_service_client.h:120: [117dfc35e070] Request ResolveFoldersRequest { folder_ids: "i_am_exists" } 2025-05-29T15:27:22.079527Z node 1 :GRPC_CLIENT DEBUG: grpc_service_client.h:107: [117dfc35e070] Response ResolveFoldersResponse { resolved_folders { cloud_id: "response_cloud_id" } } >> TNetClassifierTest::TestInitFromBadlyFormattedFile ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_base_reboots/unittest >> TTablesWithReboots::AlterCopyWithReboots [GOOD] Test command err: ==== RunWithTabletReboots =========== RUN: Trace =========== Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2140] Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:116:2058] recipient: [1:110:2141] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:116:2058] recipient: [1:110:2141] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:117:2058] recipient: [1:111:2142] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:117:2058] recipient: [1:111:2142] Leader for TabletID 72057594046678944 is [1:126:2151] sender: [1:127:2058] recipient: [1:109:2140] Leader for TabletID 72057594046447617 is [1:130:2154] sender: [1:132:2058] recipient: [1:110:2141] Leader for TabletID 72057594046316545 is [1:133:2155] sender: [1:135:2058] recipient: [1:111:2142] 2025-05-29T15:26:39.050851Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7488: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-05-29T15:26:39.050875Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7516: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:26:39.050881Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7402: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2025-05-29T15:26:39.050887Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7418: OperationsProcessing config: using default configuration 2025-05-29T15:26:39.050893Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-05-29T15:26:39.050898Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-05-29T15:26:39.050907Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7548: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:26:39.050922Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:39: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-05-29T15:26:39.051027Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7619: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-05-29T15:26:39.051098Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-05-29T15:26:39.065634Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7651: Got new config: QueryServiceConfig { AllExternalDataSourcesAreAvailable: true } 2025-05-29T15:26:39.065656Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:26:39.065775Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7619: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# Leader for TabletID 72057594046447617 is [1:130:2154] sender: [1:175:2058] recipient: [1:15:2062] 2025-05-29T15:26:39.068707Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-05-29T15:26:39.068738Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-05-29T15:26:39.068775Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-05-29T15:26:39.071700Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-05-29T15:26:39.071778Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:445: Clear TempDirsState with owners number: 0 2025-05-29T15:26:39.071910Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1348: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-05-29T15:26:39.072119Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:32: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-05-29T15:26:39.072889Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:157: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:26:39.072931Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:84: [RootDataErasureManager] Stop 2025-05-29T15:26:39.073155Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:26:39.073165Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:26:39.073196Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-05-29T15:26:39.073202Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-29T15:26:39.073206Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:202: TTxServerlessStorageBilling.Complete 2025-05-29T15:26:39.073219Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6671: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:214:2058] recipient: [1:212:2212] IGNORE Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:214:2058] recipient: [1:212:2212] Leader for TabletID 72057594037968897 is [1:218:2216] sender: [1:219:2058] recipient: [1:212:2212] 2025-05-29T15:26:39.074504Z node 1 :HIVE INFO: tablet_helpers.cpp:1130: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:126:2151] sender: [1:239:2058] recipient: [1:15:2062] 2025-05-29T15:26:39.094027Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:372: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-05-29T15:26:39.094102Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:26:39.094174Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-05-29T15:26:39.094227Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:130: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-05-29T15:26:39.094238Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:26:39.096857Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:451: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-05-29T15:26:39.096899Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-05-29T15:26:39.096968Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:26:39.096981Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:313: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-05-29T15:26:39.096987Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:367: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-05-29T15:26:39.096994Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 2 -> 3 2025-05-29T15:26:39.097565Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:26:39.097577Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-05-29T15:26:39.097582Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 3 -> 128 2025-05-29T15:26:39.097937Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:26:39.097947Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:26:39.097954Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:26:39.097964Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1650: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-05-29T15:26:39.098637Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1719: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-05-29T15:26:39.099100Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:652: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-05-29T15:26:39.099146Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1751: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 Leader for TabletID 72057594046316545 is [1:133:2155] sender: [1:254:2058] recipient: [1:15:2062] FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-05-29T15:26:39.099352Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:676: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-05-29T15:26:39.099378Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:680: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 133 RawX2: 4294969451 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-05-29T15:26:39.099384Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:26:39.099441Z node 1 :FLAT_TX_SCHEMESHARD INFO: sch ... LAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-05-29T15:27:21.242040Z node 132 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 3 2025-05-29T15:27:21.242399Z node 132 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:647: TTxOperationReply complete, operationId: 1006:0, at schemeshard: 72057594046678944 2025-05-29T15:27:21.242451Z node 132 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:647: TTxOperationReply complete, operationId: 1006:0, at schemeshard: 72057594046678944 2025-05-29T15:27:21.242666Z node 132 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:27:21.242674Z node 132 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1006, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-29T15:27:21.242712Z node 132 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1006, path id: [OwnerId: 72057594046678944, LocalPathId: 4] 2025-05-29T15:27:21.242755Z node 132 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:27:21.242761Z node 132 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [132:206:2207], at schemeshard: 72057594046678944, txId: 1006, path id: 1 2025-05-29T15:27:21.242768Z node 132 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [132:206:2207], at schemeshard: 72057594046678944, txId: 1006, path id: 4 2025-05-29T15:27:21.242872Z node 132 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1006:0, at schemeshard: 72057594046678944 2025-05-29T15:27:21.242879Z node 132 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:1036: NTableState::TProposedWaitParts operationId# 1006:0 ProgressState at tablet: 72057594046678944 2025-05-29T15:27:21.242896Z node 132 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:674: all shard schema changes has been received, operationId: 1006:0, at schemeshard: 72057594046678944 2025-05-29T15:27:21.242901Z node 132 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:686: send schema changes ack message, operation: 1006:0, datashard: 72075186233409547, at schemeshard: 72057594046678944 2025-05-29T15:27:21.242907Z node 132 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1006:0 129 -> 240 2025-05-29T15:27:21.243028Z node 132 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:5834: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 13 PathOwnerId: 72057594046678944, cookie: 1006 2025-05-29T15:27:21.243040Z node 132 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 13 PathOwnerId: 72057594046678944, cookie: 1006 2025-05-29T15:27:21.243045Z node 132 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72057594046678944, txId: 1006 2025-05-29T15:27:21.243050Z node 132 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 1006, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 13 2025-05-29T15:27:21.243056Z node 132 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2025-05-29T15:27:21.243243Z node 132 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:5834: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 1006 2025-05-29T15:27:21.243258Z node 132 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 1006 2025-05-29T15:27:21.243263Z node 132 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72057594046678944, txId: 1006 2025-05-29T15:27:21.243268Z node 132 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 1006, pathId: [OwnerId: 72057594046678944, LocalPathId: 4], version: 18446744073709551615 2025-05-29T15:27:21.243272Z node 132 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 4 2025-05-29T15:27:21.243283Z node 132 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1606: TOperation IsReadyToNotify, TxId: 1006, ready parts: 0/1, is published: true 2025-05-29T15:27:21.243910Z node 132 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1006:0, at schemeshard: 72057594046678944 2025-05-29T15:27:21.243928Z node 132 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_drop_table.cpp:414: TDropTable TProposedDeletePart operationId: 1006:0 ProgressState, at schemeshard: 72057594046678944 2025-05-29T15:27:21.244023Z node 132 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove table for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 3 2025-05-29T15:27:21.244060Z node 132 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:906: Part operation is done id#1006:0 progress is 1/1 2025-05-29T15:27:21.244066Z node 132 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 1006 ready parts: 1/1 2025-05-29T15:27:21.244073Z node 132 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:906: Part operation is done id#1006:0 progress is 1/1 2025-05-29T15:27:21.244076Z node 132 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 1006 ready parts: 1/1 2025-05-29T15:27:21.244082Z node 132 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1606: TOperation IsReadyToNotify, TxId: 1006, ready parts: 1/1, is published: true 2025-05-29T15:27:21.244086Z node 132 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 1006 ready parts: 1/1 2025-05-29T15:27:21.244092Z node 132 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:973: Operation and all the parts is done, operation id: 1006:0 2025-05-29T15:27:21.244097Z node 132 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5174: RemoveTx for txid 1006:0 2025-05-29T15:27:21.244119Z node 132 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 2 2025-05-29T15:27:21.244414Z node 132 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1006 2025-05-29T15:27:21.244765Z node 132 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1006 2025-05-29T15:27:21.246102Z node 132 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5549: Handle TEvStateChanged, at schemeshard: 72057594046678944, message: Source { RawX1: 481 RawX2: 566935685522 } TabletId: 72075186233409547 State: 4 2025-05-29T15:27:21.246126Z node 132 :FLAT_TX_SCHEMESHARD INFO: schemeshard__state_changed_reply.cpp:78: TTxShardStateChanged DoExecute, datashard informs about state changing, datashardId: 72075186233409547, state: Offline, at schemeshard: 72057594046678944 2025-05-29T15:27:21.246545Z node 132 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:46: Free shard 72057594046678944:2 hive 72057594037968897 at ss 72057594046678944 2025-05-29T15:27:21.246644Z node 132 :HIVE INFO: tablet_helpers.cpp:1356: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 2 TxId_Deprecated: 2 TabletID: 72075186233409547 2025-05-29T15:27:21.247313Z node 132 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5938: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 2 ShardOwnerId: 72057594046678944 ShardLocalIdx: 2, at schemeshard: 72057594046678944 2025-05-29T15:27:21.247380Z node 132 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 1 Forgetting tablet 72075186233409547 2025-05-29T15:27:21.247488Z node 132 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:123: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-05-29T15:27:21.247495Z node 132 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:137: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 4], at schemeshard: 72057594046678944 2025-05-29T15:27:21.247509Z node 132 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-05-29T15:27:21.248189Z node 132 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:174: Deleted shardIdx 72057594046678944:2 2025-05-29T15:27:21.248206Z node 132 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:180: Close pipe to deleted shardIdx 72057594046678944:2 tabletId 72075186233409547 2025-05-29T15:27:21.248309Z node 132 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:160: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestModificationResult got TxId: 1006, wait until txId: 1006 TestWaitNotification wait txId: 1006 2025-05-29T15:27:21.248367Z node 132 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:210: tests -- TTxNotificationSubscriber for txId 1006: send EvNotifyTxCompletion 2025-05-29T15:27:21.248375Z node 132 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:256: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 1006 2025-05-29T15:27:21.248461Z node 132 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 1006, at schemeshard: 72057594046678944 2025-05-29T15:27:21.248480Z node 132 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:227: tests -- TTxNotificationSubscriber for txId 1006: got EvNotifyTxCompletionResult 2025-05-29T15:27:21.248485Z node 132 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:236: tests -- TTxNotificationSubscriber for txId 1006: satisfy waiter [132:726:2686] TestWaitNotification: OK eventTxId 1006 wait until 72075186233409546 is deleted wait until 72075186233409547 is deleted 2025-05-29T15:27:21.248550Z node 132 :HIVE INFO: tablet_helpers.cpp:1476: [72057594037968897] TEvSubscribeToTabletDeletion, 72075186233409546 2025-05-29T15:27:21.248560Z node 132 :HIVE INFO: tablet_helpers.cpp:1476: [72057594037968897] TEvSubscribeToTabletDeletion, 72075186233409547 Deleted tabletId 72075186233409546 Deleted tabletId 72075186233409547 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/library/ycloud/impl/ut/unittest >> FolderServiceTest::TFolderServiceTransitional [GOOD] Test command err: 2025-05-29T15:27:20.955064Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509889648944869713:2070];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:27:20.955105Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/000d9f/r3tmp/tmpj9EXxa/pdisk_1.dat 2025-05-29T15:27:21.020642Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded TClient is connected to server localhost:4477 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-29T15:27:21.052661Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:27:21.056918Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:27:21.056946Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:27:21.058061Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-29T15:27:21.062921Z node 1 :GRPC_CLIENT DEBUG: grpc_service_client.h:81: [13837c756b70] Connect to grpc://localhost:30280 2025-05-29T15:27:21.064952Z node 1 :GRPC_CLIENT DEBUG: grpc_service_client.h:120: [13837c756b70] Request ListFoldersRequest { id: "i_am_not_exists" } 2025-05-29T15:27:21.066795Z node 1 :GRPC_CLIENT DEBUG: grpc_service_client.h:109: [13837c756b70] Status 14 failed to connect to all addresses; last error: UNKNOWN: ipv4:127.0.0.1:30280: Failed to connect to remote host: Connection refused 2025-05-29T15:27:21.071755Z node 1 :GRPC_CLIENT DEBUG: grpc_service_client.h:120: [13837c756b70] Request ListFoldersRequest { id: "i_am_not_exists" } 2025-05-29T15:27:21.072012Z node 1 :GRPC_CLIENT DEBUG: grpc_service_client.h:109: [13837c756b70] Status 14 failed to connect to all addresses; last error: UNKNOWN: ipv4:127.0.0.1:30280: Failed to connect to remote host: Connection refused 2025-05-29T15:27:22.072240Z node 1 :GRPC_CLIENT DEBUG: grpc_service_client.h:120: [13837c756b70] Request ListFoldersRequest { id: "i_am_not_exists" } 2025-05-29T15:27:22.073073Z node 1 :GRPC_CLIENT DEBUG: grpc_service_client.h:109: [13837c756b70] Status 5 Not Found 2025-05-29T15:27:22.073167Z node 1 :GRPC_CLIENT DEBUG: grpc_service_client.h:120: [13837c756b70] Request ListFoldersRequest { id: "i_am_exists" } 2025-05-29T15:27:22.073645Z node 1 :GRPC_CLIENT DEBUG: grpc_service_client.h:107: [13837c756b70] Response ListFoldersResponse { result { cloud_id: "response_cloud_id" } } |69.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/mind/address_classification/ut/unittest |69.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/mind/address_classification/ut/unittest ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/library/ycloud/impl/ut/unittest >> TServiceAccountServiceTest::IssueToken [GOOD] Test command err: 2025-05-29T15:27:21.345723Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509889653753898474:2061];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:27:21.345804Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/000d42/r3tmp/tmpVkXR4Z/pdisk_1.dat 2025-05-29T15:27:21.405065Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:27:21.405281Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [1:7509889653753898455:2079] 1748532441345613 != 1748532441345616 TClient is connected to server localhost:23342 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-29T15:27:21.437065Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:27:21.448782Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:27:21.448810Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:27:21.449932Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-29T15:27:21.843352Z node 2 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7509889651929251644:2061];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:27:21.843375Z node 2 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/000d42/r3tmp/tmpYIDyAQ/pdisk_1.dat 2025-05-29T15:27:21.857839Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [2:7509889651929251616:2079] 1748532441843271 != 1748532441843274 2025-05-29T15:27:21.859064Z node 2 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded TClient is connected to server localhost:6612 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-29T15:27:21.946551Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:27:21.946582Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:27:21.947669Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-29T15:27:21.948312Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... |69.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/base/ut_board_subscriber/unittest >> TBoardSubscriberTest::SimpleSubscriber [GOOD] |69.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/mind/address_classification/ut/unittest |69.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/mind/address_classification/ut/unittest >> TNetClassifierTest::TestInitFromBadlyFormattedFile [GOOD] >> TNetClassifierTest::TestInitFromFile >> TNetClassifierTest::TestInitFromRemoteSource |69.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/mind/address_classification/ut/unittest >> TBoardSubscriberTest::ManySubscribersManyPublisher >> TBoardSubscriberTest::ReconnectReplica ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/mind/address_classification/ut/unittest >> TNetClassifierTest::TestInitFromBadlyFormattedFile [GOOD] Test command err: 2025-05-29T15:27:22.570050Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509889658296720249:2064];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:27:22.570080Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/001cf5/r3tmp/tmpjMG27p/pdisk_1.dat 2025-05-29T15:27:22.635407Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:27:22.635647Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [1:7509889658296720227:2079] 1748532442569847 != 1748532442569850 2025-05-29T15:27:22.641442Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/ciyv/001cf5/r3tmp/yandexmOKtd3.tmp 2025-05-29T15:27:22.641455Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: /home/runner/.ya/build/build_root/ciyv/001cf5/r3tmp/yandexmOKtd3.tmp 2025-05-29T15:27:22.641499Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:344: invalid NetData format 2025-05-29T15:27:22.641505Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: /home/runner/.ya/build/build_root/ciyv/001cf5/r3tmp/yandexmOKtd3.tmp 2025-05-29T15:27:22.641542Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-05-29T15:27:22.673095Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:27:22.673130Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:27:22.674255Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected |69.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/mind/address_classification/ut/unittest |69.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/base/ut_board_subscriber/unittest >> TBoardSubscriberTest::DropByDisconnect >> TBoardSubscriberTest::NotAvailableByShutdown ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_extsubdomain_reboots/unittest >> TSchemeShardTestExtSubdomainReboots::CreateExternalSubdomainWithoutHive-AlterDatabaseCreateHiveFirst-true [GOOD] Test command err: ==== RunWithTabletReboots =========== RUN: Trace =========== Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2140] Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:116:2058] recipient: [1:110:2141] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:116:2058] recipient: [1:110:2141] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:117:2058] recipient: [1:111:2142] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:117:2058] recipient: [1:111:2142] Leader for TabletID 72057594046678944 is [1:126:2151] sender: [1:127:2058] recipient: [1:109:2140] Leader for TabletID 72057594046447617 is [1:130:2154] sender: [1:132:2058] recipient: [1:110:2141] Leader for TabletID 72057594046316545 is [1:133:2155] sender: [1:135:2058] recipient: [1:111:2142] 2025-05-29T15:25:50.452079Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7488: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-05-29T15:25:50.452103Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7516: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:25:50.452121Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7402: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2025-05-29T15:25:50.452127Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7418: OperationsProcessing config: using default configuration 2025-05-29T15:25:50.452137Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-05-29T15:25:50.452141Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-05-29T15:25:50.452151Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7548: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:25:50.452167Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:39: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-05-29T15:25:50.452255Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7619: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-05-29T15:25:50.452325Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-05-29T15:25:50.477059Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7651: Got new config: QueryServiceConfig { AllExternalDataSourcesAreAvailable: true } 2025-05-29T15:25:50.477086Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:25:50.477184Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7619: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# Leader for TabletID 72057594046447617 is [1:130:2154] sender: [1:175:2058] recipient: [1:15:2062] 2025-05-29T15:25:50.479866Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-05-29T15:25:50.479897Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-05-29T15:25:50.479926Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-05-29T15:25:50.484779Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-05-29T15:25:50.484889Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:445: Clear TempDirsState with owners number: 0 2025-05-29T15:25:50.485009Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1348: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-05-29T15:25:50.485186Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:32: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-05-29T15:25:50.485821Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:157: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:25:50.485878Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:84: [RootDataErasureManager] Stop 2025-05-29T15:25:50.486093Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:25:50.486105Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:25:50.486137Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-05-29T15:25:50.486144Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-29T15:25:50.486150Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:202: TTxServerlessStorageBilling.Complete 2025-05-29T15:25:50.486167Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6671: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:214:2058] recipient: [1:212:2212] IGNORE Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:214:2058] recipient: [1:212:2212] Leader for TabletID 72057594037968897 is [1:218:2216] sender: [1:219:2058] recipient: [1:212:2212] 2025-05-29T15:25:50.495956Z node 1 :HIVE INFO: tablet_helpers.cpp:1130: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:126:2151] sender: [1:239:2058] recipient: [1:15:2062] 2025-05-29T15:25:50.531362Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:372: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-05-29T15:25:50.531436Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:25:50.531495Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-05-29T15:25:50.531544Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:130: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-05-29T15:25:50.531554Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:25:50.534453Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:451: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-05-29T15:25:50.534504Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-05-29T15:25:50.534549Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:25:50.534559Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:313: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-05-29T15:25:50.534565Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:367: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-05-29T15:25:50.534570Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 2 -> 3 2025-05-29T15:25:50.535071Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:25:50.535085Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-05-29T15:25:50.535090Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 3 -> 128 2025-05-29T15:25:50.535434Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:25:50.535445Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:25:50.535451Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:25:50.535459Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1650: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-05-29T15:25:50.536124Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1719: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-05-29T15:25:50.536747Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:652: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-05-29T15:25:50.536783Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1751: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 Leader for TabletID 72057594046316545 is [1:133:2155] sender: [1:254:2058] recipient: [1:15:2062] FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-05-29T15:25:50.536956Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:676: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-05-29T15:25:50.536983Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:680: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 133 RawX2: 4294969451 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-05-29T15:25:50.537000Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:25:50.537071Z node 1 :FLAT_TX_SCHEMESHARD INFO: sch ... 0, actualUserAttrsVersion: 1, tenantHive: 18446744073709551615, tenantSysViewProcessor: 18446744073709551615, at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1003 2025-05-29T15:27:21.355098Z node 224 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:27:21.355120Z node 224 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1003, path id: [OwnerId: 72057594046678944, LocalPathId: 3] 2025-05-29T15:27:21.355163Z node 224 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:27:21.355169Z node 224 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [224:206:2207], at schemeshard: 72057594046678944, txId: 1003, path id: 3 2025-05-29T15:27:21.355264Z node 224 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1003:0, at schemeshard: 72057594046678944 2025-05-29T15:27:21.355272Z node 224 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_alter_extsubdomain.cpp:787: [72057594046678944] TSyncHive, operationId 1003:0, ProgressState, NeedSyncHive: 0 2025-05-29T15:27:21.355277Z node 224 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1003:0 240 -> 240 2025-05-29T15:27:21.355375Z node 224 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:5834: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 4 PathOwnerId: 72057594046678944, cookie: 1003 2025-05-29T15:27:21.355388Z node 224 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 4 PathOwnerId: 72057594046678944, cookie: 1003 2025-05-29T15:27:21.355393Z node 224 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72057594046678944, txId: 1003 2025-05-29T15:27:21.355398Z node 224 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 1003, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 4 2025-05-29T15:27:21.355404Z node 224 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 9 2025-05-29T15:27:21.355420Z node 224 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1606: TOperation IsReadyToNotify, TxId: 1003, ready parts: 0/1, is published: true 2025-05-29T15:27:21.356238Z node 224 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1003:0, at schemeshard: 72057594046678944 2025-05-29T15:27:21.356255Z node 224 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:482: [72057594046678944] TDone opId# 1003:0 ProgressState 2025-05-29T15:27:21.356280Z node 224 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:906: Part operation is done id#1003:0 progress is 1/1 2025-05-29T15:27:21.356289Z node 224 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 1003 ready parts: 1/1 2025-05-29T15:27:21.356296Z node 224 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:906: Part operation is done id#1003:0 progress is 1/1 2025-05-29T15:27:21.356299Z node 224 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 1003 ready parts: 1/1 2025-05-29T15:27:21.356305Z node 224 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1606: TOperation IsReadyToNotify, TxId: 1003, ready parts: 1/1, is published: true 2025-05-29T15:27:21.356319Z node 224 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1629: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [224:325:2315] message: TxId: 1003 2025-05-29T15:27:21.356328Z node 224 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 1003 ready parts: 1/1 2025-05-29T15:27:21.356335Z node 224 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:973: Operation and all the parts is done, operation id: 1003:0 2025-05-29T15:27:21.356339Z node 224 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5174: RemoveTx for txid 1003:0 2025-05-29T15:27:21.356382Z node 224 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 8 2025-05-29T15:27:21.356618Z node 224 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1003 2025-05-29T15:27:21.356998Z node 224 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:227: tests -- TTxNotificationSubscriber for txId 1003: got EvNotifyTxCompletionResult 2025-05-29T15:27:21.357010Z node 224 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:236: tests -- TTxNotificationSubscriber for txId 1003: satisfy waiter [224:605:2516] TestWaitNotification: OK eventTxId 1003 2025-05-29T15:27:21.357109Z node 224 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-05-29T15:27:21.357144Z node 224 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/USER_0" took 43us result status StatusSuccess 2025-05-29T15:27:21.357204Z node 224 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/USER_0" PathDescription { Self { Name: "USER_0" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeExtSubDomain CreateFinished: true CreateTxId: 1002 CreateStep: 5000003 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 4 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 4 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 2 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 2 PlanResolution: 50 Coordinators: 72075186233409547 Coordinators: 72075186233409548 Coordinators: 72075186233409549 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409550 Mediators: 72075186233409551 SchemeShard: 72075186233409546 } DomainKey { SchemeShard: 72057594046678944 PathId: 3 } StoragePools { Name: "tenant-1:hdd" Kind: "hdd" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 6 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 3 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { } } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-05-29T15:27:21.357286Z node 224 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72075186233409546 2025-05-29T15:27:21.357310Z node 224 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72075186233409546 describe path "/MyRoot/USER_0" took 38us result status StatusSuccess 2025-05-29T15:27:21.357339Z node 224 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/USER_0" PathDescription { Self { Name: "MyRoot/USER_0" PathId: 1 SchemeshardId: 72075186233409546 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 4 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 4 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 2 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 3 ProcessingParams { Version: 2 PlanResolution: 50 Coordinators: 72075186233409547 Coordinators: 72075186233409548 Coordinators: 72075186233409549 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409550 Mediators: 72075186233409551 SchemeShard: 72075186233409546 } DomainKey { SchemeShard: 72057594046678944 PathId: 3 } StoragePools { Name: "tenant-1:hdd" Kind: "hdd" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 6 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 3 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/MyRoot/USER_0" } } } PathId: 1 PathOwnerId: 72075186233409546, at schemeshard: 72075186233409546 2025-05-29T15:27:21.357377Z node 224 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-05-29T15:27:21.357388Z node 224 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot" took 12us result status StatusSuccess 2025-05-29T15:27:21.357425Z node 224 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot" PathDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 7 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 7 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 5 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: "DirA" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1000 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" ChildrenExist: false } Children { Name: "USER_0" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeExtSubDomain CreateFinished: true CreateTxId: 1002 CreateStep: 5000003 ParentPathId: 1 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 2 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/MyRoot" } } } PathId: 1 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 |69.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/base/ut_board_subscriber/unittest |69.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/base/ut_board_subscriber/unittest >> TBoardSubscriberTest::ReconnectReplica [GOOD] >> TBoardSubscriberTest::ManySubscribersManyPublisher [GOOD] >> TBoardSubscriberTest::DropByDisconnect [GOOD] >> TBoardSubscriberTest::NotAvailableByShutdown [GOOD] |69.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/base/ut_board_subscriber/unittest >> TBoardSubscriberTest::ReconnectReplica [GOOD] |69.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/base/ut_board_subscriber/unittest >> TBoardSubscriberTest::ManySubscribersManyPublisher [GOOD] >> AutoConfig::GetServicePoolsWith4AndMoreCPUs [GOOD] |69.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/base/ut_board_subscriber/unittest >> TBoardSubscriberTest::DropByDisconnect [GOOD] >> AutoConfig::GetServicePoolsWith1CPU [GOOD] >> AutoConfig::GetServicePoolsWith3CPUs [GOOD] >> AutoConfig::GetServicePoolsWith2CPUs [GOOD] |69.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/base/ut_board_subscriber/unittest >> TBoardSubscriberTest::NotAvailableByShutdown [GOOD] >> TNetClassifierTest::TestInitFromFile [GOOD] |69.2%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/driver_lib/run/ut/unittest >> AutoConfig::GetServicePoolsWith4AndMoreCPUs [GOOD] |69.2%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/driver_lib/run/ut/unittest >> AutoConfig::GetServicePoolsWith3CPUs [GOOD] >> AutoConfig::GetASPoolsWith2CPUs [GOOD] >> FolderServiceTest::TFolderServiceAdapter >> KqpScanLogs::GraceJoin+EnabledLogs |69.2%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/driver_lib/run/ut/unittest >> AutoConfig::GetServicePoolsWith2CPUs [GOOD] |69.2%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/driver_lib/run/ut/unittest >> AutoConfig::GetServicePoolsWith1CPU [GOOD] |69.2%| [TA] $(B)/ydb/core/base/ut_board_subscriber/test-results/unittest/{meta.json ... results_accumulator.log} >> TServiceAccountServiceTest::Get [GOOD] >> TSchemeShardTestExtSubdomainReboots::CreateExternalSubdomainWithoutHive-AlterDatabaseCreateHiveFirst-false [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/mind/address_classification/ut/unittest >> TNetClassifierTest::TestInitFromFile [GOOD] Test command err: 2025-05-29T15:27:23.205945Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509889660366039393:2059];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:27:23.205973Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/001cbc/r3tmp/tmpmWCScG/pdisk_1.dat 2025-05-29T15:27:23.269923Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [1:7509889660366039375:2079] 1748532443205745 != 1748532443205748 2025-05-29T15:27:23.270194Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:27:23.277616Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/ciyv/001cbc/r3tmp/yandexO66d4T.tmp 2025-05-29T15:27:23.277632Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: /home/runner/.ya/build/build_root/ciyv/001cbc/r3tmp/yandexO66d4T.tmp 2025-05-29T15:27:23.277709Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:202: successfully initialized from file: /home/runner/.ya/build/build_root/ciyv/001cbc/r3tmp/yandexO66d4T.tmp 2025-05-29T15:27:23.277756Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-05-29T15:27:23.345464Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:27:23.345499Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:27:23.346696Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected |69.2%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/driver_lib/run/ut/unittest >> AutoConfig::GetASPoolsWith2CPUs [GOOD] >> FolderServiceTest::TFolderServiceAdapter [GOOD] >> KqpScanSpilling::SpillingInRuntimeNodes-EnabledSpilling >> KqpScanLogs::GraceJoin-EnabledLogs >> KqpScanSpilling::SpillingInRuntimeNodes+EnabledSpilling |69.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/library/ycloud/impl/ut/unittest |69.2%| [TA] $(B)/ydb/core/driver_lib/run/ut/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/library/ycloud/impl/ut/unittest >> FolderServiceTest::TFolderServiceAdapter [GOOD] Test command err: 2025-05-29T15:27:24.983168Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509889666433466066:2075];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:27:24.983535Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/000d35/r3tmp/tmpWfOcGT/pdisk_1.dat 2025-05-29T15:27:25.052516Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [1:7509889666433466029:2079] 1748532444981552 != 1748532444981555 2025-05-29T15:27:25.054029Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded TClient is connected to server localhost:8353 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-29T15:27:25.126247Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:27:25.126275Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:27:25.127337Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-05-29T15:27:25.127351Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected waiting... 2025-05-29T15:27:25.132430Z node 1 :GRPC_CLIENT DEBUG: grpc_service_client.h:81: [538e7d0205f0] Connect to grpc://localhost:16639 2025-05-29T15:27:25.132624Z node 1 :GRPC_CLIENT DEBUG: grpc_service_client.h:120: [538e7d0205f0] Request ListFoldersRequest { id: "i_am_exists" } 2025-05-29T15:27:25.134823Z node 1 :GRPC_CLIENT DEBUG: grpc_service_client.h:107: [538e7d0205f0] Response ListFoldersResponse { result { cloud_id: "cloud_from_old_service" } } 2025-05-29T15:27:25.135162Z node 1 :GRPC_CLIENT DEBUG: grpc_service_client.h:81: [538e7d027930] Connect to grpc://localhost:4193 2025-05-29T15:27:25.135311Z node 1 :GRPC_CLIENT DEBUG: grpc_service_client.h:120: [538e7d027930] Request ResolveFoldersRequest { folder_ids: "i_am_exists" } 2025-05-29T15:27:25.137094Z node 1 :GRPC_CLIENT DEBUG: grpc_service_client.h:107: [538e7d027930] Response ResolveFoldersResponse { resolved_folders { cloud_id: "cloud_from_new_service" } } 2025-05-29T15:27:25.137255Z node 1 :GRPC_CLIENT DEBUG: grpc_service_client.h:120: [538e7d027930] Request ResolveFoldersRequest { folder_ids: "i_am_not_exists" } 2025-05-29T15:27:25.137838Z node 1 :GRPC_CLIENT DEBUG: grpc_service_client.h:109: [538e7d027930] Status 5 Not Found 2025-05-29T15:27:25.137995Z node 1 :GRPC_CLIENT DEBUG: grpc_service_client.h:120: [538e7d0205f0] Request ListFoldersRequest { id: "i_am_not_exists" } 2025-05-29T15:27:25.138528Z node 1 :GRPC_CLIENT DEBUG: grpc_service_client.h:109: [538e7d0205f0] Status 5 Not Found >> KqpScanLogs::WideCombine-EnabledLogs >> TSchemeShardSysViewTest::EmptyName >> TSchemeShardSysViewTest::CreateExistingSysView >> TSchemeShardSysViewTest::AsyncCreateDifferentSysViews >> TNetClassifierTest::TestInitFromRemoteSource [GOOD] >> TSchemeShardSysViewTest::AsyncCreateSameSysView >> TTablesWithReboots::SimultaneousDropForceDrop [GOOD] >> TSchemeShardSysViewTest::EmptyName [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/library/ycloud/impl/ut/unittest >> TServiceAccountServiceTest::Get [GOOD] Test command err: 2025-05-29T15:27:24.982832Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509889666298476218:2266];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:27:24.982857Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/000d41/r3tmp/tmp0FtT6B/pdisk_1.dat 2025-05-29T15:27:25.039139Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:27:25.039332Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [1:7509889666298475990:2079] 1748532444980945 != 1748532444980948 TClient is connected to server localhost:4592 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-29T15:27:25.084012Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:27:25.084042Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:27:25.085138Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-29T15:27:25.118342Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:27:25.439050Z node 2 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7509889667346677350:2063];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:27:25.439086Z node 2 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/000d41/r3tmp/tmpTrNWKr/pdisk_1.dat 2025-05-29T15:27:25.453468Z node 2 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:27:25.453519Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [2:7509889667346677329:2079] 1748532445438921 != 1748532445438924 TClient is connected to server localhost:24407 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... 2025-05-29T15:27:25.543838Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:27:25.543869Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:27:25.544147Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-05-29T15:27:25.544778Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected >> TSchemeShardSysViewTest::CreateExistingSysView [GOOD] >> TKesusTest::TestUnregisterProxy >> TSchemeShardSysViewTest::ReadOnlyMode >> TSchemeShardSysViewTest::AsyncCreateDifferentSysViews [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/mind/address_classification/ut/unittest >> TNetClassifierTest::TestInitFromRemoteSource [GOOD] Test command err: 2025-05-29T15:27:23.407775Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509889661059495395:2067];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:27:23.408246Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/001caa/r3tmp/tmpwvTTIX/pdisk_1.dat 2025-05-29T15:27:23.476866Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:27:23.479769Z node 1 :HTTP ERROR: http_proxy_outgoing.cpp:113: (#26,[::1]:19664) connection closed with error: Connection refused 2025-05-29T15:27:23.479911Z node 1 :CMS_CONFIGS ERROR: net_classifier_updater.cpp:278: NetClassifierUpdater failed to get subnets: Connection refused 2025-05-29T15:27:23.490272Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:27:23.490283Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-05-29T15:27:23.490285Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-05-29T15:27:23.490331Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-05-29T15:27:23.549486Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:27:23.549514Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:27:23.550556Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected >> TSchemeShardSysViewTest::AsyncCreateSameSysView [GOOD] >> TKesusTest::TestKesusConfig |69.3%| [TA] $(B)/ydb/library/ycloud/impl/ut/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_sysview/unittest >> TSchemeShardSysViewTest::EmptyName [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:116:2058] recipient: [1:111:2141] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:116:2058] recipient: [1:111:2141] Leader for TabletID 72057594046678944 is [1:128:2152] sender: [1:132:2058] recipient: [1:111:2141] 2025-05-29T15:27:26.050363Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7488: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-05-29T15:27:26.050389Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7516: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:27:26.050394Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7402: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-05-29T15:27:26.050400Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7418: OperationsProcessing config: using default configuration 2025-05-29T15:27:26.050417Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-05-29T15:27:26.050422Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-05-29T15:27:26.050432Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7548: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:27:26.050445Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:39: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-05-29T15:27:26.050567Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7619: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-05-29T15:27:26.050638Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-05-29T15:27:26.063908Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7309: Cannot subscribe to console configs 2025-05-29T15:27:26.063930Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:27:26.066834Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-05-29T15:27:26.066973Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-05-29T15:27:26.067023Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-05-29T15:27:26.068921Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-05-29T15:27:26.069119Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:445: Clear TempDirsState with owners number: 0 2025-05-29T15:27:26.069254Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1348: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-05-29T15:27:26.069306Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:32: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-05-29T15:27:26.069800Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:157: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:27:26.069852Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:84: [RootDataErasureManager] Stop 2025-05-29T15:27:26.070149Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:27:26.070160Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:27:26.070183Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-05-29T15:27:26.070191Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-29T15:27:26.070198Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:202: TTxServerlessStorageBilling.Complete 2025-05-29T15:27:26.070236Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6671: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-05-29T15:27:26.071785Z node 1 :HIVE INFO: tablet_helpers.cpp:1130: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:128:2152] sender: [1:242:2058] recipient: [1:15:2062] 2025-05-29T15:27:26.092613Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:372: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-05-29T15:27:26.092696Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:27:26.092766Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-05-29T15:27:26.092812Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:130: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-05-29T15:27:26.092825Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:27:26.093542Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:451: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-05-29T15:27:26.093572Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-05-29T15:27:26.093642Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:27:26.093654Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:313: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-05-29T15:27:26.093660Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:367: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-05-29T15:27:26.093665Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 2 -> 3 2025-05-29T15:27:26.094175Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:27:26.094191Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-05-29T15:27:26.094196Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 3 -> 128 2025-05-29T15:27:26.094607Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:27:26.094619Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:27:26.094627Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:27:26.094635Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1650: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-05-29T15:27:26.095336Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1719: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-05-29T15:27:26.095734Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:652: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-05-29T15:27:26.095767Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1751: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-05-29T15:27:26.095909Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:676: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-05-29T15:27:26.095950Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:680: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 124 RawX2: 4294969445 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-05-29T15:27:26.095958Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:27:26.096015Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 128 -> 240 2025-05-29T15:27:26.096023Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:27:26.096057Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-05-29T15:27:26.096070Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:402: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-05-29T15:27:26.096470Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:27:26.096478Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-29T15:27:26.096514Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29 ... eshard__operation.cpp:676: TTxOperationPlanStep Execute, stepId: 5000002, transactions count in step: 1, at schemeshard: 72057594046678944 2025-05-29T15:27:26.102649Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:680: TTxOperationPlanStep Execute, message: Transactions { TxId: 101 Coordinator: 72057594046316545 AckTo { RawX1: 124 RawX2: 4294969445 } } Step: 5000002 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-05-29T15:27:26.102657Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_mkdir.cpp:33: MkDir::TPropose operationId# 101:0 HandleReply TEvPrivate::TEvOperationPlan, step: 5000002, at schemeshard: 72057594046678944 2025-05-29T15:27:26.102686Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 101:0 128 -> 240 2025-05-29T15:27:26.102716Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-05-29T15:27:26.102725Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 2025-05-29T15:27:26.103077Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2025-05-29T15:27:26.103097Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 FAKE_COORDINATOR: Erasing txId 101 2025-05-29T15:27:26.103355Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:27:26.103363Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 101, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-29T15:27:26.103394Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 101, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2025-05-29T15:27:26.103425Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:27:26.103430Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:209:2210], at schemeshard: 72057594046678944, txId: 101, path id: 1 2025-05-29T15:27:26.103436Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:209:2210], at schemeshard: 72057594046678944, txId: 101, path id: 2 2025-05-29T15:27:26.103480Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 101:0, at schemeshard: 72057594046678944 2025-05-29T15:27:26.103487Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:482: [72057594046678944] TDone opId# 101:0 ProgressState 2025-05-29T15:27:26.103500Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:906: Part operation is done id#101:0 progress is 1/1 2025-05-29T15:27:26.103505Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2025-05-29T15:27:26.103510Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:906: Part operation is done id#101:0 progress is 1/1 2025-05-29T15:27:26.103514Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2025-05-29T15:27:26.103518Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1606: TOperation IsReadyToNotify, TxId: 101, ready parts: 1/1, is published: false 2025-05-29T15:27:26.103523Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2025-05-29T15:27:26.103528Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:973: Operation and all the parts is done, operation id: 101:0 2025-05-29T15:27:26.103532Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5174: RemoveTx for txid 101:0 2025-05-29T15:27:26.103545Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2025-05-29T15:27:26.103551Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:982: Publication still in progress, tx: 101, publications: 2, subscribers: 0 2025-05-29T15:27:26.103558Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:989: Publication details: tx: 101, [OwnerId: 72057594046678944, LocalPathId: 1], 5 2025-05-29T15:27:26.103561Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:989: Publication details: tx: 101, [OwnerId: 72057594046678944, LocalPathId: 2], 3 2025-05-29T15:27:26.103689Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:5834: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 5 PathOwnerId: 72057594046678944, cookie: 101 2025-05-29T15:27:26.103701Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 5 PathOwnerId: 72057594046678944, cookie: 101 2025-05-29T15:27:26.103706Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 101 2025-05-29T15:27:26.103710Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 5 2025-05-29T15:27:26.103714Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-05-29T15:27:26.103826Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:5834: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046678944, cookie: 101 2025-05-29T15:27:26.103836Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046678944, cookie: 101 2025-05-29T15:27:26.103840Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 101 2025-05-29T15:27:26.103845Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 3 2025-05-29T15:27:26.103848Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 2025-05-29T15:27:26.103858Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 101, subscribers: 0 2025-05-29T15:27:26.104672Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2025-05-29T15:27:26.104777Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 TestModificationResult got TxId: 101, wait until txId: 101 TestWaitNotification wait txId: 101 2025-05-29T15:27:26.104826Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:210: tests -- TTxNotificationSubscriber for txId 101: send EvNotifyTxCompletion 2025-05-29T15:27:26.104832Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:256: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 101 2025-05-29T15:27:26.104893Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 101, at schemeshard: 72057594046678944 2025-05-29T15:27:26.104913Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:227: tests -- TTxNotificationSubscriber for txId 101: got EvNotifyTxCompletionResult 2025-05-29T15:27:26.104921Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:236: tests -- TTxNotificationSubscriber for txId 101: satisfy waiter [1:306:2296] TestWaitNotification: OK eventTxId 101 TestModificationResults wait txId: 102 2025-05-29T15:27:26.105495Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:372: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView CreateSysView { Name: "" Type: EPartitionStats } } TxId: 102 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-05-29T15:27:26.105523Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_create_sysview.cpp:116: [72057594046678944] TCreateSysView Propose, path: /MyRoot/.sys/, opId: 102:0 2025-05-29T15:27:26.105529Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_create_sysview.cpp:122: [72057594046678944] TCreateSysView Propose, path: /MyRoot/.sys/, opId: 102:0, sysViewDescription: Name: "" Type: EPartitionStats 2025-05-29T15:27:26.105541Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:130: IgniteOperation, opId: 102:1, propose status:StatusSchemeError, reason: Check failed: path: '/MyRoot/.sys/', error: path part shouldn't be empty, source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_sysview.cpp:148, at schemeshard: 72057594046678944 2025-05-29T15:27:26.106068Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:451: TTxOperationPropose Complete, txId: 102, response: Status: StatusSchemeError Reason: "Check failed: path: \'/MyRoot/.sys/\', error: path part shouldn\'t be empty, source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_sysview.cpp:148" TxId: 102 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2025-05-29T15:27:26.106101Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 102, database: /MyRoot, subject: , status: StatusSchemeError, reason: Check failed: path: '/MyRoot/.sys/', error: path part shouldn't be empty, source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_sysview.cpp:148, operation: CREATE SYSTEM VIEW, path: /MyRoot/.sys/ TestModificationResult got TxId: 102, wait until txId: 102 TestWaitNotification wait txId: 102 2025-05-29T15:27:26.106148Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:210: tests -- TTxNotificationSubscriber for txId 102: send EvNotifyTxCompletion 2025-05-29T15:27:26.106154Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:256: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 102 2025-05-29T15:27:26.106206Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 102, at schemeshard: 72057594046678944 2025-05-29T15:27:26.106222Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:227: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2025-05-29T15:27:26.106227Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:236: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [1:313:2303] TestWaitNotification: OK eventTxId 102 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_sysview/unittest >> TSchemeShardSysViewTest::CreateExistingSysView [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:116:2058] recipient: [1:111:2141] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:116:2058] recipient: [1:111:2141] Leader for TabletID 72057594046678944 is [1:128:2152] sender: [1:132:2058] recipient: [1:111:2141] 2025-05-29T15:27:26.092380Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7488: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-05-29T15:27:26.092403Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7516: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:27:26.092407Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7402: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-05-29T15:27:26.092411Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7418: OperationsProcessing config: using default configuration 2025-05-29T15:27:26.092424Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-05-29T15:27:26.092427Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-05-29T15:27:26.092433Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7548: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:27:26.092443Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:39: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-05-29T15:27:26.092567Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7619: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-05-29T15:27:26.092619Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-05-29T15:27:26.104356Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7309: Cannot subscribe to console configs 2025-05-29T15:27:26.104376Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:27:26.106729Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-05-29T15:27:26.106863Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-05-29T15:27:26.106913Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-05-29T15:27:26.109628Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-05-29T15:27:26.110042Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:445: Clear TempDirsState with owners number: 0 2025-05-29T15:27:26.110177Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1348: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-05-29T15:27:26.110229Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:32: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-05-29T15:27:26.110708Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:157: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:27:26.110772Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:84: [RootDataErasureManager] Stop 2025-05-29T15:27:26.111064Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:27:26.111079Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:27:26.111107Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-05-29T15:27:26.111116Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-29T15:27:26.111123Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:202: TTxServerlessStorageBilling.Complete 2025-05-29T15:27:26.111164Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6671: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-05-29T15:27:26.112658Z node 1 :HIVE INFO: tablet_helpers.cpp:1130: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:128:2152] sender: [1:242:2058] recipient: [1:15:2062] 2025-05-29T15:27:26.127713Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:372: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-05-29T15:27:26.127804Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:27:26.127874Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-05-29T15:27:26.127921Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:130: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-05-29T15:27:26.127933Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:27:26.128668Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:451: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-05-29T15:27:26.128690Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-05-29T15:27:26.128747Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:27:26.128757Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:313: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-05-29T15:27:26.128763Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:367: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-05-29T15:27:26.128769Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 2 -> 3 2025-05-29T15:27:26.129133Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:27:26.129143Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-05-29T15:27:26.129148Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 3 -> 128 2025-05-29T15:27:26.129524Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:27:26.129540Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:27:26.129546Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:27:26.129554Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1650: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-05-29T15:27:26.130086Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1719: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-05-29T15:27:26.130465Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:652: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-05-29T15:27:26.130496Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1751: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-05-29T15:27:26.130631Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:676: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-05-29T15:27:26.130649Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:680: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 124 RawX2: 4294969445 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-05-29T15:27:26.130654Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:27:26.130694Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 128 -> 240 2025-05-29T15:27:26.130699Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:27:26.130725Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-05-29T15:27:26.130733Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:402: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-05-29T15:27:26.131147Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:27:26.131154Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-29T15:27:26.131190Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29 ... on and all the parts is done, operation id: 102:0 2025-05-29T15:27:26.140752Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5174: RemoveTx for txid 102:0 2025-05-29T15:27:26.140764Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 2 2025-05-29T15:27:26.140769Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:982: Publication still in progress, tx: 102, publications: 2, subscribers: 0 2025-05-29T15:27:26.140774Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:989: Publication details: tx: 102, [OwnerId: 72057594046678944, LocalPathId: 2], 4 2025-05-29T15:27:26.140778Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:989: Publication details: tx: 102, [OwnerId: 72057594046678944, LocalPathId: 3], 2 2025-05-29T15:27:26.140880Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:5834: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 4 PathOwnerId: 72057594046678944, cookie: 102 2025-05-29T15:27:26.140892Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 4 PathOwnerId: 72057594046678944, cookie: 102 2025-05-29T15:27:26.140896Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 102 2025-05-29T15:27:26.140901Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 4 2025-05-29T15:27:26.140907Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2025-05-29T15:27:26.140993Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:5834: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 2 PathOwnerId: 72057594046678944, cookie: 102 2025-05-29T15:27:26.141002Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 2 PathOwnerId: 72057594046678944, cookie: 102 2025-05-29T15:27:26.141007Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 102 2025-05-29T15:27:26.141011Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 2 2025-05-29T15:27:26.141015Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 1 2025-05-29T15:27:26.141023Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 102, subscribers: 0 2025-05-29T15:27:26.141505Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2025-05-29T15:27:26.141759Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 TestModificationResult got TxId: 102, wait until txId: 102 TestWaitNotification wait txId: 102 2025-05-29T15:27:26.141797Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:210: tests -- TTxNotificationSubscriber for txId 102: send EvNotifyTxCompletion 2025-05-29T15:27:26.141802Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:256: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 102 2025-05-29T15:27:26.141859Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 102, at schemeshard: 72057594046678944 2025-05-29T15:27:26.141887Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:227: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2025-05-29T15:27:26.141892Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:236: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [1:331:2321] TestWaitNotification: OK eventTxId 102 2025-05-29T15:27:26.141961Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/.sys/new_sys_view" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-05-29T15:27:26.141987Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/.sys/new_sys_view" took 35us result status StatusSuccess 2025-05-29T15:27:26.142048Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/.sys/new_sys_view" PathDescription { Self { Name: "new_sys_view" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeSysView CreateFinished: true CreateTxId: 102 CreateStep: 5000003 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 SysViewVersion: 1 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 2 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } SysViewDescription { Name: "new_sys_view" Type: EPartitionStats } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 TestModificationResults wait txId: 103 2025-05-29T15:27:26.142708Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:372: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView CreateSysView { Name: "new_sys_view" Type: ENodes } } TxId: 103 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-05-29T15:27:26.142756Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_create_sysview.cpp:116: [72057594046678944] TCreateSysView Propose, path: /MyRoot/.sys/new_sys_view, opId: 103:0 2025-05-29T15:27:26.142765Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_create_sysview.cpp:122: [72057594046678944] TCreateSysView Propose, path: /MyRoot/.sys/new_sys_view, opId: 103:0, sysViewDescription: Name: "new_sys_view" Type: ENodes 2025-05-29T15:27:26.142789Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:130: IgniteOperation, opId: 103:1, propose status:StatusSchemeError, reason: Check failed: path: '/MyRoot/.sys/new_sys_view', error: path exist, request doesn't accept it (id: [OwnerId: 72057594046678944, LocalPathId: 3], type: EPathTypeSysView, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_sysview.cpp:148, at schemeshard: 72057594046678944 2025-05-29T15:27:26.143228Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:451: TTxOperationPropose Complete, txId: 103, response: Status: StatusSchemeError Reason: "Check failed: path: \'/MyRoot/.sys/new_sys_view\', error: path exist, request doesn\'t accept it (id: [OwnerId: 72057594046678944, LocalPathId: 3], type: EPathTypeSysView, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_sysview.cpp:148" TxId: 103 SchemeshardId: 72057594046678944 PathId: 3 PathCreateTxId: 102, at schemeshard: 72057594046678944 2025-05-29T15:27:26.143258Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 103, database: /MyRoot, subject: , status: StatusSchemeError, reason: Check failed: path: '/MyRoot/.sys/new_sys_view', error: path exist, request doesn't accept it (id: [OwnerId: 72057594046678944, LocalPathId: 3], type: EPathTypeSysView, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_sysview.cpp:148, operation: CREATE SYSTEM VIEW, path: /MyRoot/.sys/new_sys_view TestModificationResult got TxId: 103, wait until txId: 103 TestWaitNotification wait txId: 103 2025-05-29T15:27:26.143306Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:210: tests -- TTxNotificationSubscriber for txId 103: send EvNotifyTxCompletion 2025-05-29T15:27:26.143312Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:256: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 103 2025-05-29T15:27:26.143371Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 103, at schemeshard: 72057594046678944 2025-05-29T15:27:26.143387Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:227: tests -- TTxNotificationSubscriber for txId 103: got EvNotifyTxCompletionResult 2025-05-29T15:27:26.143392Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:236: tests -- TTxNotificationSubscriber for txId 103: satisfy waiter [1:339:2329] TestWaitNotification: OK eventTxId 103 2025-05-29T15:27:26.143451Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/.sys/new_sys_view" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-05-29T15:27:26.143472Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/.sys/new_sys_view" took 26us result status StatusSuccess 2025-05-29T15:27:26.143517Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/.sys/new_sys_view" PathDescription { Self { Name: "new_sys_view" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeSysView CreateFinished: true CreateTxId: 102 CreateStep: 5000003 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 SysViewVersion: 1 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 2 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } SysViewDescription { Name: "new_sys_view" Type: EPartitionStats } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_sysview/unittest >> TSchemeShardSysViewTest::AsyncCreateDifferentSysViews [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:116:2058] recipient: [1:111:2141] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:116:2058] recipient: [1:111:2141] Leader for TabletID 72057594046678944 is [1:128:2152] sender: [1:132:2058] recipient: [1:111:2141] 2025-05-29T15:27:26.129521Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7488: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-05-29T15:27:26.129547Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7516: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:27:26.129553Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7402: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-05-29T15:27:26.129558Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7418: OperationsProcessing config: using default configuration 2025-05-29T15:27:26.129571Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-05-29T15:27:26.129575Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-05-29T15:27:26.129585Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7548: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:27:26.129599Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:39: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-05-29T15:27:26.129710Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7619: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-05-29T15:27:26.129791Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-05-29T15:27:26.142689Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7309: Cannot subscribe to console configs 2025-05-29T15:27:26.142713Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:27:26.145664Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-05-29T15:27:26.145793Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-05-29T15:27:26.145851Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-05-29T15:27:26.149109Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-05-29T15:27:26.149392Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:445: Clear TempDirsState with owners number: 0 2025-05-29T15:27:26.149571Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1348: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-05-29T15:27:26.149635Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:32: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-05-29T15:27:26.150297Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:157: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:27:26.150355Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:84: [RootDataErasureManager] Stop 2025-05-29T15:27:26.150659Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:27:26.150674Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:27:26.150701Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-05-29T15:27:26.150710Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-29T15:27:26.150716Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:202: TTxServerlessStorageBilling.Complete 2025-05-29T15:27:26.150781Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6671: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-05-29T15:27:26.152523Z node 1 :HIVE INFO: tablet_helpers.cpp:1130: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:128:2152] sender: [1:242:2058] recipient: [1:15:2062] 2025-05-29T15:27:26.167702Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:372: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-05-29T15:27:26.167774Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:27:26.167842Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-05-29T15:27:26.167880Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:130: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-05-29T15:27:26.167887Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:27:26.168733Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:451: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-05-29T15:27:26.168765Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-05-29T15:27:26.168836Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:27:26.168848Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:313: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-05-29T15:27:26.168854Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:367: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-05-29T15:27:26.168860Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 2 -> 3 2025-05-29T15:27:26.169408Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:27:26.169424Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-05-29T15:27:26.169431Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 3 -> 128 2025-05-29T15:27:26.169920Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:27:26.169934Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:27:26.169941Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:27:26.169949Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1650: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-05-29T15:27:26.170677Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1719: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-05-29T15:27:26.171225Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:652: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-05-29T15:27:26.171278Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1751: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-05-29T15:27:26.171484Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:676: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-05-29T15:27:26.171514Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:680: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 124 RawX2: 4294969445 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-05-29T15:27:26.171523Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:27:26.171589Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 128 -> 240 2025-05-29T15:27:26.171599Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:27:26.171634Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-05-29T15:27:26.171649Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:402: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-05-29T15:27:26.172140Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:27:26.172151Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-29T15:27:26.172203Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29 ... 94046678944, LocalPathId: 4] 2025-05-29T15:27:26.189433Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2025-05-29T15:27:26.189460Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 103:0, at schemeshard: 72057594046678944 2025-05-29T15:27:26.189467Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:482: [72057594046678944] TDone opId# 103:0 ProgressState 2025-05-29T15:27:26.189477Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:906: Part operation is done id#103:0 progress is 1/1 2025-05-29T15:27:26.189481Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2025-05-29T15:27:26.189485Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:906: Part operation is done id#103:0 progress is 1/1 2025-05-29T15:27:26.189488Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2025-05-29T15:27:26.189493Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1606: TOperation IsReadyToNotify, TxId: 103, ready parts: 1/1, is published: false 2025-05-29T15:27:26.189499Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2025-05-29T15:27:26.189503Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:973: Operation and all the parts is done, operation id: 103:0 2025-05-29T15:27:26.189507Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5174: RemoveTx for txid 103:0 2025-05-29T15:27:26.189518Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 2 2025-05-29T15:27:26.189523Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:982: Publication still in progress, tx: 103, publications: 2, subscribers: 0 2025-05-29T15:27:26.189527Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:989: Publication details: tx: 103, [OwnerId: 72057594046678944, LocalPathId: 2], 5 2025-05-29T15:27:26.189530Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:989: Publication details: tx: 103, [OwnerId: 72057594046678944, LocalPathId: 4], 2 2025-05-29T15:27:26.189647Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2025-05-29T15:27:26.189656Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:27:26.189660Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:209:2210], at schemeshard: 72057594046678944, txId: 103, path id: 2 2025-05-29T15:27:26.189669Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:209:2210], at schemeshard: 72057594046678944, txId: 103, path id: 4 2025-05-29T15:27:26.189790Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:5834: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 5 PathOwnerId: 72057594046678944, cookie: 103 2025-05-29T15:27:26.189799Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 5 PathOwnerId: 72057594046678944, cookie: 103 2025-05-29T15:27:26.189802Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 103 2025-05-29T15:27:26.189805Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 103, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 5 2025-05-29T15:27:26.189808Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2025-05-29T15:27:26.190117Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:5834: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 2 PathOwnerId: 72057594046678944, cookie: 103 2025-05-29T15:27:26.190142Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 2 PathOwnerId: 72057594046678944, cookie: 103 2025-05-29T15:27:26.190148Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 103 2025-05-29T15:27:26.190154Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 103, pathId: [OwnerId: 72057594046678944, LocalPathId: 4], version: 2 2025-05-29T15:27:26.190159Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 1 2025-05-29T15:27:26.190176Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 103, subscribers: 0 2025-05-29T15:27:26.190781Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 103 2025-05-29T15:27:26.191143Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 103 TestModificationResult got TxId: 102, wait until txId: 102 TestModificationResults wait txId: 103 TestModificationResult got TxId: 103, wait until txId: 103 TestWaitNotification wait txId: 102 2025-05-29T15:27:26.191237Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:210: tests -- TTxNotificationSubscriber for txId 102: send EvNotifyTxCompletion 2025-05-29T15:27:26.191247Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:256: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 102 TestWaitNotification wait txId: 103 2025-05-29T15:27:26.191264Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:210: tests -- TTxNotificationSubscriber for txId 103: send EvNotifyTxCompletion 2025-05-29T15:27:26.191268Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:256: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 103 2025-05-29T15:27:26.191372Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 103, at schemeshard: 72057594046678944 2025-05-29T15:27:26.191392Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:227: tests -- TTxNotificationSubscriber for txId 103: got EvNotifyTxCompletionResult 2025-05-29T15:27:26.191397Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:236: tests -- TTxNotificationSubscriber for txId 103: satisfy waiter [1:348:2338] 2025-05-29T15:27:26.191405Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 102, at schemeshard: 72057594046678944 2025-05-29T15:27:26.191424Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:227: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2025-05-29T15:27:26.191427Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:236: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [1:348:2338] TestWaitNotification: OK eventTxId 103 TestWaitNotification: OK eventTxId 102 2025-05-29T15:27:26.191487Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/.sys/sys_view_1" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-05-29T15:27:26.191514Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/.sys/sys_view_1" took 37us result status StatusSuccess 2025-05-29T15:27:26.191598Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/.sys/sys_view_1" PathDescription { Self { Name: "sys_view_1" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeSysView CreateFinished: true CreateTxId: 102 CreateStep: 5000003 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 SysViewVersion: 1 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 3 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } SysViewDescription { Name: "sys_view_1" Type: EPartitionStats } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-05-29T15:27:26.191686Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/.sys/sys_view_2" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-05-29T15:27:26.191703Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/.sys/sys_view_2" took 19us result status StatusSuccess 2025-05-29T15:27:26.191739Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/.sys/sys_view_2" PathDescription { Self { Name: "sys_view_2" PathId: 4 SchemeshardId: 72057594046678944 PathType: EPathTypeSysView CreateFinished: true CreateTxId: 103 CreateStep: 5000004 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 SysViewVersion: 1 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 3 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } SysViewDescription { Name: "sys_view_2" Type: ENodes } } PathId: 4 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> TKesusTest::TestUnregisterProxy [GOOD] >> TKesusTest::TestUnregisterProxyBadGeneration |69.3%| [TA] $(B)/ydb/core/mind/address_classification/ut/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_sysview/unittest >> TSchemeShardSysViewTest::AsyncCreateSameSysView [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:116:2058] recipient: [1:111:2141] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:116:2058] recipient: [1:111:2141] Leader for TabletID 72057594046678944 is [1:128:2152] sender: [1:132:2058] recipient: [1:111:2141] 2025-05-29T15:27:26.188887Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7488: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-05-29T15:27:26.188914Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7516: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:27:26.188919Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7402: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-05-29T15:27:26.188925Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7418: OperationsProcessing config: using default configuration 2025-05-29T15:27:26.188942Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-05-29T15:27:26.188946Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-05-29T15:27:26.188957Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7548: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:27:26.188970Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:39: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-05-29T15:27:26.189085Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7619: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-05-29T15:27:26.189152Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-05-29T15:27:26.199257Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7309: Cannot subscribe to console configs 2025-05-29T15:27:26.199277Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:27:26.201464Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-05-29T15:27:26.201625Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-05-29T15:27:26.201671Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-05-29T15:27:26.203490Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-05-29T15:27:26.203642Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:445: Clear TempDirsState with owners number: 0 2025-05-29T15:27:26.203775Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1348: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-05-29T15:27:26.203826Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:32: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-05-29T15:27:26.204293Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:157: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:27:26.204342Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:84: [RootDataErasureManager] Stop 2025-05-29T15:27:26.204618Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:27:26.204628Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:27:26.204652Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-05-29T15:27:26.204660Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-29T15:27:26.204666Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:202: TTxServerlessStorageBilling.Complete 2025-05-29T15:27:26.204704Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6671: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-05-29T15:27:26.206071Z node 1 :HIVE INFO: tablet_helpers.cpp:1130: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:128:2152] sender: [1:242:2058] recipient: [1:15:2062] 2025-05-29T15:27:26.228311Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:372: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-05-29T15:27:26.228403Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:27:26.228476Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-05-29T15:27:26.228522Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:130: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-05-29T15:27:26.228534Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:27:26.229449Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:451: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-05-29T15:27:26.229476Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-05-29T15:27:26.229563Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:27:26.229575Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:313: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-05-29T15:27:26.229581Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:367: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-05-29T15:27:26.229587Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 2 -> 3 2025-05-29T15:27:26.230036Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:27:26.230047Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-05-29T15:27:26.230052Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 3 -> 128 2025-05-29T15:27:26.230420Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:27:26.230432Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:27:26.230438Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:27:26.230446Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1650: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-05-29T15:27:26.231200Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1719: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-05-29T15:27:26.231631Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:652: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-05-29T15:27:26.231672Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1751: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-05-29T15:27:26.231867Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:676: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-05-29T15:27:26.231892Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:680: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 124 RawX2: 4294969445 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-05-29T15:27:26.231900Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:27:26.231961Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 128 -> 240 2025-05-29T15:27:26.231969Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:27:26.232003Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-05-29T15:27:26.232015Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:402: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-05-29T15:27:26.232423Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:27:26.232432Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-29T15:27:26.232476Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29 ... NATOR: advance: minStep5000003 State->FrontStep: 5000002 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 102 at step: 5000003 2025-05-29T15:27:26.244727Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:676: TTxOperationPlanStep Execute, stepId: 5000003, transactions count in step: 1, at schemeshard: 72057594046678944 2025-05-29T15:27:26.244744Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:680: TTxOperationPlanStep Execute, message: Transactions { TxId: 102 Coordinator: 72057594046316545 AckTo { RawX1: 124 RawX2: 4294969445 } } Step: 5000003 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-05-29T15:27:26.244750Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_create_sysview.cpp:45: [72057594046678944] TCreateSysView::TPropose, opId: 102:0 HandleReply TEvPrivate::TEvOperationPlan, step: 5000003 2025-05-29T15:27:26.244771Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 102:0 128 -> 240 2025-05-29T15:27:26.244792Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 2025-05-29T15:27:26.244798Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 1 2025-05-29T15:27:26.245175Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 FAKE_COORDINATOR: Erasing txId 102 2025-05-29T15:27:26.245556Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:27:26.245565Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 102, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2025-05-29T15:27:26.245592Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 102, path id: [OwnerId: 72057594046678944, LocalPathId: 3] 2025-05-29T15:27:26.245610Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:27:26.245616Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:209:2210], at schemeshard: 72057594046678944, txId: 102, path id: 2 2025-05-29T15:27:26.245640Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:209:2210], at schemeshard: 72057594046678944, txId: 102, path id: 3 2025-05-29T15:27:26.245694Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2025-05-29T15:27:26.245702Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:482: [72057594046678944] TDone opId# 102:0 ProgressState 2025-05-29T15:27:26.245715Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:906: Part operation is done id#102:0 progress is 1/1 2025-05-29T15:27:26.245720Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-05-29T15:27:26.245727Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:906: Part operation is done id#102:0 progress is 1/1 2025-05-29T15:27:26.245731Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-05-29T15:27:26.245762Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1606: TOperation IsReadyToNotify, TxId: 102, ready parts: 1/1, is published: false 2025-05-29T15:27:26.245769Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-05-29T15:27:26.245775Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:973: Operation and all the parts is done, operation id: 102:0 2025-05-29T15:27:26.245780Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5174: RemoveTx for txid 102:0 2025-05-29T15:27:26.245795Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 2 2025-05-29T15:27:26.245801Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:982: Publication still in progress, tx: 102, publications: 2, subscribers: 0 2025-05-29T15:27:26.245807Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:989: Publication details: tx: 102, [OwnerId: 72057594046678944, LocalPathId: 2], 4 2025-05-29T15:27:26.245811Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:989: Publication details: tx: 102, [OwnerId: 72057594046678944, LocalPathId: 3], 2 2025-05-29T15:27:26.245945Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:5834: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 4 PathOwnerId: 72057594046678944, cookie: 102 2025-05-29T15:27:26.245959Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 4 PathOwnerId: 72057594046678944, cookie: 102 2025-05-29T15:27:26.245965Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 102 2025-05-29T15:27:26.245970Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 4 2025-05-29T15:27:26.245975Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2025-05-29T15:27:26.246120Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:5834: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 2 PathOwnerId: 72057594046678944, cookie: 102 2025-05-29T15:27:26.246133Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 2 PathOwnerId: 72057594046678944, cookie: 102 2025-05-29T15:27:26.246138Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 102 2025-05-29T15:27:26.246143Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 2 2025-05-29T15:27:26.246148Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 1 2025-05-29T15:27:26.246158Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 102, subscribers: 0 2025-05-29T15:27:26.246836Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2025-05-29T15:27:26.247133Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 TestModificationResult got TxId: 102, wait until txId: 102 TestModificationResults wait txId: 103 TestModificationResult got TxId: 103, wait until txId: 103 TestWaitNotification wait txId: 102 2025-05-29T15:27:26.247199Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:210: tests -- TTxNotificationSubscriber for txId 102: send EvNotifyTxCompletion 2025-05-29T15:27:26.247207Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:256: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 102 TestWaitNotification wait txId: 103 2025-05-29T15:27:26.247224Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:210: tests -- TTxNotificationSubscriber for txId 103: send EvNotifyTxCompletion 2025-05-29T15:27:26.247228Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:256: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 103 2025-05-29T15:27:26.247311Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 102, at schemeshard: 72057594046678944 2025-05-29T15:27:26.247332Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 103, at schemeshard: 72057594046678944 2025-05-29T15:27:26.247345Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:227: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2025-05-29T15:27:26.247351Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:236: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [1:335:2325] 2025-05-29T15:27:26.247360Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:227: tests -- TTxNotificationSubscriber for txId 103: got EvNotifyTxCompletionResult 2025-05-29T15:27:26.247364Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:236: tests -- TTxNotificationSubscriber for txId 103: satisfy waiter [1:335:2325] TestWaitNotification: OK eventTxId 102 TestWaitNotification: OK eventTxId 103 2025-05-29T15:27:26.247451Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/.sys/new_sys_view" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-05-29T15:27:26.247482Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/.sys/new_sys_view" took 41us result status StatusSuccess 2025-05-29T15:27:26.247561Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/.sys/new_sys_view" PathDescription { Self { Name: "new_sys_view" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeSysView CreateFinished: true CreateTxId: 102 CreateStep: 5000003 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 SysViewVersion: 1 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 2 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } SysViewDescription { Name: "new_sys_view" Type: EPartitionStats } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> TSchemeShardSysViewTest::ReadOnlyMode [GOOD] >> TKesusTest::TestSessionTimeoutAfterDetach >> TKesusTest::TestUnregisterProxyBadGeneration [GOOD] >> TKesusTest::TestSessionTimeoutAfterUnregister >> TKesusTest::TestKesusConfig [GOOD] >> TKesusTest::TestLockNotFound ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_base_reboots/unittest >> TTablesWithReboots::SimultaneousDropForceDrop [GOOD] Test command err: ==== RunWithTabletReboots =========== RUN: Trace =========== Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2140] Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:116:2058] recipient: [1:110:2141] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:116:2058] recipient: [1:110:2141] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:117:2058] recipient: [1:111:2142] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:117:2058] recipient: [1:111:2142] Leader for TabletID 72057594046678944 is [1:126:2151] sender: [1:127:2058] recipient: [1:109:2140] Leader for TabletID 72057594046447617 is [1:130:2154] sender: [1:132:2058] recipient: [1:110:2141] Leader for TabletID 72057594046316545 is [1:133:2155] sender: [1:135:2058] recipient: [1:111:2142] 2025-05-29T15:26:45.958650Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7488: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-05-29T15:26:45.958671Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7516: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:26:45.958677Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7402: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2025-05-29T15:26:45.958683Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7418: OperationsProcessing config: using default configuration 2025-05-29T15:26:45.958689Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-05-29T15:26:45.958693Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-05-29T15:26:45.958703Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7548: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:26:45.958718Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:39: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-05-29T15:26:45.958845Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7619: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-05-29T15:26:45.958921Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-05-29T15:26:45.971773Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7651: Got new config: QueryServiceConfig { AllExternalDataSourcesAreAvailable: true } 2025-05-29T15:26:45.971789Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:26:45.971856Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7619: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# Leader for TabletID 72057594046447617 is [1:130:2154] sender: [1:175:2058] recipient: [1:15:2062] 2025-05-29T15:26:45.974044Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-05-29T15:26:45.974066Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-05-29T15:26:45.974093Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-05-29T15:26:45.976237Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-05-29T15:26:45.976307Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:445: Clear TempDirsState with owners number: 0 2025-05-29T15:26:45.976430Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1348: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-05-29T15:26:45.976570Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:32: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-05-29T15:26:45.977098Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:157: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:26:45.977127Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:84: [RootDataErasureManager] Stop 2025-05-29T15:26:45.977293Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:26:45.977299Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:26:45.977318Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-05-29T15:26:45.977323Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-29T15:26:45.977328Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:202: TTxServerlessStorageBilling.Complete 2025-05-29T15:26:45.977346Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6671: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:214:2058] recipient: [1:212:2212] IGNORE Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:214:2058] recipient: [1:212:2212] Leader for TabletID 72057594037968897 is [1:218:2216] sender: [1:219:2058] recipient: [1:212:2212] 2025-05-29T15:26:45.978382Z node 1 :HIVE INFO: tablet_helpers.cpp:1130: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:126:2151] sender: [1:239:2058] recipient: [1:15:2062] 2025-05-29T15:26:45.996837Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:372: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-05-29T15:26:45.996911Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:26:45.996966Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-05-29T15:26:45.997015Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:130: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-05-29T15:26:45.997026Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:26:45.997809Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:451: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-05-29T15:26:45.997837Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-05-29T15:26:45.997886Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:26:45.997896Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:313: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-05-29T15:26:45.997901Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:367: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-05-29T15:26:45.997907Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 2 -> 3 2025-05-29T15:26:45.998487Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:26:45.998506Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-05-29T15:26:45.998514Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 3 -> 128 2025-05-29T15:26:45.999051Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:26:45.999063Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:26:45.999067Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:26:45.999072Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1650: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-05-29T15:26:45.999866Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1719: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-05-29T15:26:46.000499Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:652: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-05-29T15:26:46.000548Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1751: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 Leader for TabletID 72057594046316545 is [1:133:2155] sender: [1:254:2058] recipient: [1:15:2062] FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-05-29T15:26:46.000759Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:676: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-05-29T15:26:46.000792Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:680: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 133 RawX2: 4294969451 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-05-29T15:26:46.000802Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:26:46.000864Z node 1 :FLAT_TX_SCHEMESHARD INFO: sch ... Z node 138 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1606: TOperation IsReadyToNotify, TxId: 1003, ready parts: 1/1, is published: true 2025-05-29T15:27:25.946816Z node 138 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 1003 ready parts: 1/1 2025-05-29T15:27:25.946821Z node 138 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:973: Operation and all the parts is done, operation id: 1003:0 2025-05-29T15:27:25.946824Z node 138 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5174: RemoveTx for txid 1003:0 2025-05-29T15:27:25.946844Z node 138 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 2 2025-05-29T15:27:25.947008Z node 138 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1003 2025-05-29T15:27:25.947235Z node 138 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1003 2025-05-29T15:27:25.948490Z node 138 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5549: Handle TEvStateChanged, at schemeshard: 72057594046678944, message: Source { RawX1: 333 RawX2: 592705489167 } TabletId: 72075186233409546 State: 4 2025-05-29T15:27:25.948512Z node 138 :FLAT_TX_SCHEMESHARD INFO: schemeshard__state_changed_reply.cpp:78: TTxShardStateChanged DoExecute, datashard informs about state changing, datashardId: 72075186233409546, state: Offline, at schemeshard: 72057594046678944 2025-05-29T15:27:25.948866Z node 138 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:46: Free shard 72057594046678944:1 hive 72057594037968897 at ss 72057594046678944 2025-05-29T15:27:25.948958Z node 138 :HIVE INFO: tablet_helpers.cpp:1356: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 1 TxId_Deprecated: 1 TabletID: 72075186233409546 2025-05-29T15:27:25.949013Z node 138 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5938: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 1 ShardOwnerId: 72057594046678944 ShardLocalIdx: 1, at schemeshard: 72057594046678944 2025-05-29T15:27:25.949069Z node 138 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 1 Forgetting tablet 72075186233409546 2025-05-29T15:27:25.949607Z node 138 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:123: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-05-29T15:27:25.949615Z node 138 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:137: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 3], at schemeshard: 72057594046678944 2025-05-29T15:27:25.949631Z node 138 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-05-29T15:27:25.950451Z node 138 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:174: Deleted shardIdx 72057594046678944:1 2025-05-29T15:27:25.950466Z node 138 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:180: Close pipe to deleted shardIdx 72057594046678944:1 tabletId 72075186233409546 2025-05-29T15:27:25.950527Z node 138 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:160: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestModificationResult got TxId: 1003, wait until txId: 1003 TestWaitNotification wait txId: 1003 2025-05-29T15:27:25.950589Z node 138 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:210: tests -- TTxNotificationSubscriber for txId 1003: send EvNotifyTxCompletion 2025-05-29T15:27:25.950595Z node 138 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:256: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 1003 TestWaitNotification wait txId: 1004 2025-05-29T15:27:25.950609Z node 138 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:210: tests -- TTxNotificationSubscriber for txId 1004: send EvNotifyTxCompletion 2025-05-29T15:27:25.950613Z node 138 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:256: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 1004 2025-05-29T15:27:25.951458Z node 138 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:372: TTxOperationPropose Execute, message: Transaction { OperationType: ESchemeOpForceDropUnsafe Drop { Id: 3 } } TxId: 1004 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-05-29T15:27:25.951485Z node 138 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_drop_unsafe.cpp:150: TDropForceUnsafe Propose, path: /, pathId: 3, opId: 1004:0, at schemeshard: 72057594046678944 2025-05-29T15:27:25.951491Z node 138 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation_drop_unsafe.cpp:165: UNSAFE DELETION IS CALLED. TDropForceUnsafe is UNSAFE operation. Usually it is called for deleting user's DB (tenant). But it could be triggered by administrator for special emergency cases. And there is that case. I hope you are aware of the problems with it. 1: Shared transactions among the tables could be broken if one of the tables is force dropped. Dependent transactions on other tables could be blocked forever. 2: Loans are going to be lost. Force dropped tablets are never return loans. Some tablets would be waiting for borrowed blocks forever. Details: path: /, pathId: 3, opId: 1004:0, at schemeshard: 72057594046678944 2025-05-29T15:27:25.951508Z node 138 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:130: IgniteOperation, opId: 1004:1, propose status:StatusNameConflict, reason: Check failed: path: '', error: path is empty, source_location: ydb/core/tx/schemeshard/schemeshard__operation_drop_unsafe.cpp:175, at schemeshard: 72057594046678944 2025-05-29T15:27:25.951659Z node 138 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 1003, at schemeshard: 72057594046678944 2025-05-29T15:27:25.951736Z node 138 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 1004, at schemeshard: 72057594046678944 2025-05-29T15:27:25.952036Z node 138 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:451: TTxOperationPropose Complete, txId: 1004, response: Status: StatusNameConflict Reason: "Check failed: path: \'\', error: path is empty, source_location: ydb/core/tx/schemeshard/schemeshard__operation_drop_unsafe.cpp:175" TxId: 1004 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2025-05-29T15:27:25.952054Z node 138 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1004, subject: , status: StatusNameConflict, reason: Check failed: path: '', error: path is empty, source_location: ydb/core/tx/schemeshard/schemeshard__operation_drop_unsafe.cpp:175, operation: DROP PATH UNSAFE, path: 2025-05-29T15:27:25.952080Z node 138 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:227: tests -- TTxNotificationSubscriber for txId 1003: got EvNotifyTxCompletionResult 2025-05-29T15:27:25.952086Z node 138 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:236: tests -- TTxNotificationSubscriber for txId 1003: satisfy waiter [138:488:2460] 2025-05-29T15:27:25.952106Z node 138 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:227: tests -- TTxNotificationSubscriber for txId 1004: got EvNotifyTxCompletionResult 2025-05-29T15:27:25.952110Z node 138 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:236: tests -- TTxNotificationSubscriber for txId 1004: satisfy waiter [138:488:2460] TestWaitNotification: OK eventTxId 1003 TestWaitNotification: OK eventTxId 1004 wait until 72075186233409546 is deleted wait until 72075186233409547 is deleted 2025-05-29T15:27:25.952190Z node 138 :HIVE INFO: tablet_helpers.cpp:1476: [72057594037968897] TEvSubscribeToTabletDeletion, 72075186233409546 2025-05-29T15:27:25.952203Z node 138 :HIVE INFO: tablet_helpers.cpp:1476: [72057594037968897] TEvSubscribeToTabletDeletion, 72075186233409547 Deleted tabletId 72075186233409546 Deleted tabletId 72075186233409547 2025-05-29T15:27:25.952267Z node 138 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Table1" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-05-29T15:27:25.952293Z node 138 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/Table1" took 31us result status StatusPathDoesNotExist 2025-05-29T15:27:25.952327Z node 138 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/Table1\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1]), source_location: ydb/core/tx/schemeshard/schemeshard_path_describer.cpp:1157" Path: "/MyRoot/Table1" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: true } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 2025-05-29T15:27:25.952374Z node 138 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-05-29T15:27:25.952394Z node 138 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot" took 21us result status StatusSuccess 2025-05-29T15:27:25.952468Z node 138 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot" PathDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 9 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 9 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 7 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: "DirA" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1000 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 1 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/MyRoot" } } } PathId: 1 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_sysview/unittest >> TSchemeShardSysViewTest::ReadOnlyMode [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:116:2058] recipient: [1:111:2141] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:116:2058] recipient: [1:111:2141] Leader for TabletID 72057594046678944 is [1:128:2152] sender: [1:132:2058] recipient: [1:111:2141] 2025-05-29T15:27:26.377428Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7488: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-05-29T15:27:26.377457Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7516: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:27:26.377463Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7402: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-05-29T15:27:26.377468Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7418: OperationsProcessing config: using default configuration 2025-05-29T15:27:26.377485Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-05-29T15:27:26.377490Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-05-29T15:27:26.377499Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7548: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:27:26.377514Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:39: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-05-29T15:27:26.377633Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7619: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-05-29T15:27:26.377695Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-05-29T15:27:26.392100Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7309: Cannot subscribe to console configs 2025-05-29T15:27:26.392128Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:27:26.395360Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-05-29T15:27:26.395504Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-05-29T15:27:26.395573Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-05-29T15:27:26.397623Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-05-29T15:27:26.397772Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:445: Clear TempDirsState with owners number: 0 2025-05-29T15:27:26.397925Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1348: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-05-29T15:27:26.397992Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:32: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-05-29T15:27:26.398520Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:157: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:27:26.398579Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:84: [RootDataErasureManager] Stop 2025-05-29T15:27:26.398885Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:27:26.398896Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:27:26.398913Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-05-29T15:27:26.398920Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-29T15:27:26.398924Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:202: TTxServerlessStorageBilling.Complete 2025-05-29T15:27:26.398956Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6671: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-05-29T15:27:26.400279Z node 1 :HIVE INFO: tablet_helpers.cpp:1130: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:128:2152] sender: [1:242:2058] recipient: [1:15:2062] 2025-05-29T15:27:26.420372Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:372: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-05-29T15:27:26.420483Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:27:26.420557Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-05-29T15:27:26.420600Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:130: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-05-29T15:27:26.420609Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:27:26.421357Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:451: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-05-29T15:27:26.421379Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-05-29T15:27:26.421433Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:27:26.421441Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:313: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-05-29T15:27:26.421445Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:367: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-05-29T15:27:26.421450Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 2 -> 3 2025-05-29T15:27:26.421758Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:27:26.421767Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-05-29T15:27:26.421771Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 3 -> 128 2025-05-29T15:27:26.422059Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:27:26.422067Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:27:26.422071Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:27:26.422077Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1650: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-05-29T15:27:26.422559Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1719: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-05-29T15:27:26.422978Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:652: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-05-29T15:27:26.423012Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1751: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-05-29T15:27:26.423175Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:676: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-05-29T15:27:26.423198Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:680: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 124 RawX2: 4294969445 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-05-29T15:27:26.423205Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:27:26.423256Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 128 -> 240 2025-05-29T15:27:26.423264Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:27:26.423300Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-05-29T15:27:26.423313Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:402: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-05-29T15:27:26.423798Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:27:26.423809Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-29T15:27:26.423862Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29 ... 78944, txId: 103, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 6 2025-05-29T15:27:26.555473Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-05-29T15:27:26.555494Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1606: TOperation IsReadyToNotify, TxId: 103, ready parts: 0/1, is published: true 2025-05-29T15:27:26.556457Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:652: Send tablet strongly msg operationId: 103:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:103 msg type: 269090816 2025-05-29T15:27:26.556509Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1751: TOperation RegisterRelationByTabletId, TxId: 103, partId: 4294967295, tablet: 72057594046316545 2025-05-29T15:27:26.556602Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 103 FAKE_COORDINATOR: Add transaction: 103 at step: 5000003 FAKE_COORDINATOR: advance: minStep5000003 State->FrontStep: 5000002 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 103 at step: 5000003 2025-05-29T15:27:26.556734Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:676: TTxOperationPlanStep Execute, stepId: 5000003, transactions count in step: 1, at schemeshard: 72057594046678944 2025-05-29T15:27:26.556761Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:680: TTxOperationPlanStep Execute, message: Transactions { TxId: 103 Coordinator: 72057594046316545 AckTo { RawX1: 124 RawX2: 4294969445 } } Step: 5000003 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-05-29T15:27:26.556770Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_create_sysview.cpp:45: [72057594046678944] TCreateSysView::TPropose, opId: 103:0 HandleReply TEvPrivate::TEvOperationPlan, step: 5000003 2025-05-29T15:27:26.556808Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 103:0 128 -> 240 2025-05-29T15:27:26.556843Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 2025-05-29T15:27:26.556857Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 1 FAKE_COORDINATOR: Erasing txId 103 2025-05-29T15:27:26.557333Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:27:26.557343Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 103, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2025-05-29T15:27:26.557380Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 103, path id: [OwnerId: 72057594046678944, LocalPathId: 3] 2025-05-29T15:27:26.557398Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:27:26.557403Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:440:2397], at schemeshard: 72057594046678944, txId: 103, path id: 2 2025-05-29T15:27:26.557410Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:440:2397], at schemeshard: 72057594046678944, txId: 103, path id: 3 2025-05-29T15:27:26.557420Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 103:0, at schemeshard: 72057594046678944 2025-05-29T15:27:26.557427Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:482: [72057594046678944] TDone opId# 103:0 ProgressState 2025-05-29T15:27:26.557440Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:906: Part operation is done id#103:0 progress is 1/1 2025-05-29T15:27:26.557445Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2025-05-29T15:27:26.557450Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:906: Part operation is done id#103:0 progress is 1/1 2025-05-29T15:27:26.557454Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2025-05-29T15:27:26.557458Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1606: TOperation IsReadyToNotify, TxId: 103, ready parts: 1/1, is published: false 2025-05-29T15:27:26.557464Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2025-05-29T15:27:26.557469Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:973: Operation and all the parts is done, operation id: 103:0 2025-05-29T15:27:26.557473Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5174: RemoveTx for txid 103:0 2025-05-29T15:27:26.557487Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 2 2025-05-29T15:27:26.557493Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:982: Publication still in progress, tx: 103, publications: 2, subscribers: 0 2025-05-29T15:27:26.557498Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:989: Publication details: tx: 103, [OwnerId: 72057594046678944, LocalPathId: 2], 4 2025-05-29T15:27:26.557501Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:989: Publication details: tx: 103, [OwnerId: 72057594046678944, LocalPathId: 3], 2 2025-05-29T15:27:26.557717Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:5834: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 4 LocalPathId: 2 Version: 4 PathOwnerId: 72057594046678944, cookie: 103 2025-05-29T15:27:26.557734Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 4 LocalPathId: 2 Version: 4 PathOwnerId: 72057594046678944, cookie: 103 2025-05-29T15:27:26.557740Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 103 2025-05-29T15:27:26.557746Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 103, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 4 2025-05-29T15:27:26.557750Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2025-05-29T15:27:26.557850Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:5834: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 4 LocalPathId: 3 Version: 2 PathOwnerId: 72057594046678944, cookie: 103 2025-05-29T15:27:26.557859Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 4 LocalPathId: 3 Version: 2 PathOwnerId: 72057594046678944, cookie: 103 2025-05-29T15:27:26.557864Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 103 2025-05-29T15:27:26.557868Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 103, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 2 2025-05-29T15:27:26.557871Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 1 2025-05-29T15:27:26.557896Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 103, subscribers: 0 2025-05-29T15:27:26.558553Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 103 2025-05-29T15:27:26.558809Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 103 TestModificationResult got TxId: 103, wait until txId: 103 TestWaitNotification wait txId: 103 2025-05-29T15:27:26.558883Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:210: tests -- TTxNotificationSubscriber for txId 103: send EvNotifyTxCompletion 2025-05-29T15:27:26.558892Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:256: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 103 2025-05-29T15:27:26.558981Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 103, at schemeshard: 72057594046678944 2025-05-29T15:27:26.559006Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:227: tests -- TTxNotificationSubscriber for txId 103: got EvNotifyTxCompletionResult 2025-05-29T15:27:26.559011Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:236: tests -- TTxNotificationSubscriber for txId 103: satisfy waiter [1:474:2428] TestWaitNotification: OK eventTxId 103 2025-05-29T15:27:26.559100Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/.sys/new_sys_view" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-05-29T15:27:26.559144Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/.sys/new_sys_view" took 57us result status StatusSuccess 2025-05-29T15:27:26.559232Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/.sys/new_sys_view" PathDescription { Self { Name: "new_sys_view" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeSysView CreateFinished: true CreateTxId: 103 CreateStep: 5000003 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 SysViewVersion: 1 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 2 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } SysViewDescription { Name: "new_sys_view" Type: EPartitionStats } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> TKesusTest::TestLockNotFound [GOOD] >> TKesusTest::TestDeleteSemaphore >> TKesusTest::TestAttachNewSessions >> TKesusTest::TestDeleteSemaphore [GOOD] >> TKesusTest::TestDescribeSemaphoreWatches >> TKesusTest::TestAcquireUpgrade ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_extsubdomain_reboots/unittest >> TSchemeShardTestExtSubdomainReboots::CreateExternalSubdomainWithoutHive-AlterDatabaseCreateHiveFirst-false [GOOD] Test command err: ==== RunWithTabletReboots =========== RUN: Trace =========== Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2140] Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:116:2058] recipient: [1:110:2141] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:116:2058] recipient: [1:110:2141] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:117:2058] recipient: [1:111:2142] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:117:2058] recipient: [1:111:2142] Leader for TabletID 72057594046678944 is [1:126:2151] sender: [1:127:2058] recipient: [1:109:2140] Leader for TabletID 72057594046447617 is [1:130:2154] sender: [1:132:2058] recipient: [1:110:2141] Leader for TabletID 72057594046316545 is [1:133:2155] sender: [1:135:2058] recipient: [1:111:2142] 2025-05-29T15:25:50.753290Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7488: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-05-29T15:25:50.753313Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7516: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:25:50.753319Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7402: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2025-05-29T15:25:50.753324Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7418: OperationsProcessing config: using default configuration 2025-05-29T15:25:50.753334Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-05-29T15:25:50.753338Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-05-29T15:25:50.753346Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7548: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:25:50.753359Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:39: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-05-29T15:25:50.753457Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7619: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-05-29T15:25:50.753527Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-05-29T15:25:50.788550Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7651: Got new config: QueryServiceConfig { AllExternalDataSourcesAreAvailable: true } 2025-05-29T15:25:50.788570Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:25:50.788665Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7619: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# Leader for TabletID 72057594046447617 is [1:130:2154] sender: [1:175:2058] recipient: [1:15:2062] 2025-05-29T15:25:50.819639Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-05-29T15:25:50.819671Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-05-29T15:25:50.819713Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-05-29T15:25:50.835373Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-05-29T15:25:50.835464Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:445: Clear TempDirsState with owners number: 0 2025-05-29T15:25:50.835602Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1348: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-05-29T15:25:50.835766Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:32: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-05-29T15:25:50.836497Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:157: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:25:50.836542Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:84: [RootDataErasureManager] Stop 2025-05-29T15:25:50.836798Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:25:50.836807Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:25:50.836841Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-05-29T15:25:50.836849Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-29T15:25:50.836855Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:202: TTxServerlessStorageBilling.Complete 2025-05-29T15:25:50.836871Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6671: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:214:2058] recipient: [1:212:2212] IGNORE Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:214:2058] recipient: [1:212:2212] Leader for TabletID 72057594037968897 is [1:218:2216] sender: [1:219:2058] recipient: [1:212:2212] 2025-05-29T15:25:50.838141Z node 1 :HIVE INFO: tablet_helpers.cpp:1130: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:126:2151] sender: [1:239:2058] recipient: [1:15:2062] 2025-05-29T15:25:50.859436Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:372: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-05-29T15:25:50.859511Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:25:50.859561Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-05-29T15:25:50.859610Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:130: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-05-29T15:25:50.859621Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:25:50.860340Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:451: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-05-29T15:25:50.860367Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-05-29T15:25:50.860400Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:25:50.860409Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:313: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-05-29T15:25:50.860414Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:367: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-05-29T15:25:50.860420Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 2 -> 3 2025-05-29T15:25:50.860771Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:25:50.860781Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-05-29T15:25:50.860786Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 3 -> 128 2025-05-29T15:25:50.861110Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:25:50.861118Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:25:50.861124Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:25:50.861131Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1650: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-05-29T15:25:50.861813Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1719: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-05-29T15:25:50.862194Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:652: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-05-29T15:25:50.862226Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1751: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 Leader for TabletID 72057594046316545 is [1:133:2155] sender: [1:254:2058] recipient: [1:15:2062] FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-05-29T15:25:50.862391Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:676: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-05-29T15:25:50.862413Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:680: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 133 RawX2: 4294969451 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-05-29T15:25:50.862420Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:25:50.862481Z node 1 :FLAT_TX_SCHEMESHARD INFO: sch ... 0, actualUserAttrsVersion: 1, tenantHive: 18446744073709551615, tenantSysViewProcessor: 18446744073709551615, at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1003 2025-05-29T15:27:25.084923Z node 225 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:27:25.084931Z node 225 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1003, path id: [OwnerId: 72057594046678944, LocalPathId: 3] 2025-05-29T15:27:25.084975Z node 225 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:27:25.084982Z node 225 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [225:207:2208], at schemeshard: 72057594046678944, txId: 1003, path id: 3 2025-05-29T15:27:25.085071Z node 225 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1003:0, at schemeshard: 72057594046678944 2025-05-29T15:27:25.085079Z node 225 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_alter_extsubdomain.cpp:787: [72057594046678944] TSyncHive, operationId 1003:0, ProgressState, NeedSyncHive: 0 2025-05-29T15:27:25.085082Z node 225 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1003:0 240 -> 240 2025-05-29T15:27:25.085207Z node 225 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:5834: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 4 PathOwnerId: 72057594046678944, cookie: 1003 2025-05-29T15:27:25.085222Z node 225 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 4 PathOwnerId: 72057594046678944, cookie: 1003 2025-05-29T15:27:25.085227Z node 225 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72057594046678944, txId: 1003 2025-05-29T15:27:25.085233Z node 225 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 1003, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 4 2025-05-29T15:27:25.085239Z node 225 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 9 2025-05-29T15:27:25.085255Z node 225 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1606: TOperation IsReadyToNotify, TxId: 1003, ready parts: 0/1, is published: true 2025-05-29T15:27:25.086061Z node 225 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1003:0, at schemeshard: 72057594046678944 2025-05-29T15:27:25.086078Z node 225 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:482: [72057594046678944] TDone opId# 1003:0 ProgressState 2025-05-29T15:27:25.086095Z node 225 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:906: Part operation is done id#1003:0 progress is 1/1 2025-05-29T15:27:25.086100Z node 225 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 1003 ready parts: 1/1 2025-05-29T15:27:25.086106Z node 225 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:906: Part operation is done id#1003:0 progress is 1/1 2025-05-29T15:27:25.086109Z node 225 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 1003 ready parts: 1/1 2025-05-29T15:27:25.086115Z node 225 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1606: TOperation IsReadyToNotify, TxId: 1003, ready parts: 1/1, is published: true 2025-05-29T15:27:25.086130Z node 225 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1629: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [225:324:2314] message: TxId: 1003 2025-05-29T15:27:25.086139Z node 225 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 1003 ready parts: 1/1 2025-05-29T15:27:25.086145Z node 225 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:973: Operation and all the parts is done, operation id: 1003:0 2025-05-29T15:27:25.086154Z node 225 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5174: RemoveTx for txid 1003:0 2025-05-29T15:27:25.086200Z node 225 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 8 2025-05-29T15:27:25.086361Z node 225 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1003 2025-05-29T15:27:25.086803Z node 225 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:227: tests -- TTxNotificationSubscriber for txId 1003: got EvNotifyTxCompletionResult 2025-05-29T15:27:25.086817Z node 225 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:236: tests -- TTxNotificationSubscriber for txId 1003: satisfy waiter [225:614:2523] TestWaitNotification: OK eventTxId 1003 2025-05-29T15:27:25.086955Z node 225 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-05-29T15:27:25.087005Z node 225 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/USER_0" took 60us result status StatusSuccess 2025-05-29T15:27:25.087105Z node 225 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/USER_0" PathDescription { Self { Name: "USER_0" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeExtSubDomain CreateFinished: true CreateTxId: 1002 CreateStep: 5000003 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 4 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 4 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 2 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 2 PlanResolution: 50 Coordinators: 72075186233409547 Coordinators: 72075186233409548 Coordinators: 72075186233409549 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409550 Mediators: 72075186233409551 SchemeShard: 72075186233409546 } DomainKey { SchemeShard: 72057594046678944 PathId: 3 } StoragePools { Name: "tenant-1:hdd" Kind: "hdd" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 6 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 3 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { } } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-05-29T15:27:25.087204Z node 225 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72075186233409546 2025-05-29T15:27:25.087232Z node 225 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72075186233409546 describe path "/MyRoot/USER_0" took 29us result status StatusSuccess 2025-05-29T15:27:25.087280Z node 225 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/USER_0" PathDescription { Self { Name: "MyRoot/USER_0" PathId: 1 SchemeshardId: 72075186233409546 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 4 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 4 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 2 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 3 ProcessingParams { Version: 2 PlanResolution: 50 Coordinators: 72075186233409547 Coordinators: 72075186233409548 Coordinators: 72075186233409549 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409550 Mediators: 72075186233409551 SchemeShard: 72075186233409546 } DomainKey { SchemeShard: 72057594046678944 PathId: 3 } StoragePools { Name: "tenant-1:hdd" Kind: "hdd" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 6 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 3 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/MyRoot/USER_0" } } } PathId: 1 PathOwnerId: 72075186233409546, at schemeshard: 72075186233409546 2025-05-29T15:27:25.087357Z node 225 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-05-29T15:27:25.087380Z node 225 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot" took 25us result status StatusSuccess 2025-05-29T15:27:25.087450Z node 225 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot" PathDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 7 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 7 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 5 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: "DirA" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1000 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" ChildrenExist: false } Children { Name: "USER_0" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeExtSubDomain CreateFinished: true CreateTxId: 1002 CreateStep: 5000003 ParentPathId: 1 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 2 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/MyRoot" } } } PathId: 1 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> TKesusTest::TestAttachNewSessions [GOOD] >> TKesusTest::TestAttachMissingSession >> TKesusTest::TestAcquireUpgrade [GOOD] >> TKesusTest::TestAcquireTimeout >> TKesusTest::TestSessionDetach >> TKesusTest::TestAcquireWaiterDowngrade >> TKesusTest::TestQuoterHDRRParametersValidation >> TSchemeShardTestExtSubdomainReboots::CreateExternalSubdomain-AlterDatabaseCreateHiveFirst-false [GOOD] >> TKesusTest::TestAttachMissingSession [GOOD] >> TKesusTest::TestAttachOldGeneration >> TKesusTest::TestAcquireSemaphoreTimeout >> TKesusTest::TestSessionDetach [GOOD] >> TKesusTest::TestSessionDetachFutureId >> TKesusTest::TestAcquireWaiterDowngrade [GOOD] >> TKesusTest::TestAcquireWaiterUpgrade >> TKesusTest::TestQuoterHDRRParametersValidation [GOOD] >> TKesusTest::TestQuoterAccountResourcesOnDemand >> THDRRQuoterResourceTreeRuntimeTest::TestCreateInactiveSession [GOOD] >> THDRRQuoterResourceTreeRuntimeTest::TestDeleteResourceSessions [GOOD] >> THDRRQuoterResourceTreeRuntimeTest::TestDistributeResourcesBetweenConsumers [GOOD] >> THDRRQuoterResourceTreeRuntimeTest::TestEffectiveProps [GOOD] >> THDRRQuoterResourceTreeRuntimeTest::TestDeleteResourceWithActiveChildren [GOOD] >> TKesusTest::TestAttachOldGeneration [GOOD] >> TKesusTest::TestAttachFastPath |69.3%| [TA] $(B)/ydb/core/tx/schemeshard/ut_sysview/test-results/unittest/{meta.json ... results_accumulator.log} >> TKesusTest::TestQuoterAccountResourcesBurst >> TKesusTest::TestSessionDetachFutureId [GOOD] >> TKesusTest::TestSessionDestroy >> TKesusTest::TestAcquireWaiterUpgrade [GOOD] >> TKesusTest::TestAcquireWaiterChangeTimeoutToZero >> TKeyValueTest::TestInlineWriteReadRangeLimitThenLimitWorks [GOOD] >> TKeyValueTest::TestInlineWriteReadRangeLimitThenLimitWorksNewApi >> TKesusTest::TestAttachFastPath [GOOD] >> TKesusTest::TestAttachFastPathBlocked >> TKesusTest::TestSessionDestroy [GOOD] >> TKesusTest::TestSessionStealing >> TKesusTest::TestAcquireWaiterChangeTimeoutToZero [GOOD] >> TKesusTest::TestAcquireWaiterRelease |69.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kesus/tablet/ut/unittest >> THDRRQuoterResourceTreeRuntimeTest::TestDeleteResourceWithActiveChildren [GOOD] >> TKesusTest::TestReleaseLockFailure >> TKesusTest::TestAttachFastPathBlocked [GOOD] >> TKesusTest::TestSessionStealing [GOOD] >> TKesusTest::TestSessionStealingAnyKey >> TKesusTest::TestReleaseLockFailure [GOOD] >> TKesusTest::TestReleaseSemaphore >> TKesusTest::TestAcquireWaiterRelease [GOOD] >> TKesusTest::TestAllocatesResources >> TKesusTest::TestSessionStealingAnyKey [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kesus/tablet/ut/unittest >> TKesusTest::TestAttachFastPathBlocked [GOOD] Test command err: 2025-05-29T15:27:27.266560Z node 1 :KESUS_TABLET INFO: tablet_impl.cpp:71: OnActivateExecutor: 72057594037927937 2025-05-29T15:27:27.266593Z node 1 :KESUS_TABLET DEBUG: tx_init_schema.cpp:14: [72057594037927937] TTxInitSchema::Execute 2025-05-29T15:27:27.269948Z node 1 :KESUS_TABLET DEBUG: tx_init_schema.cpp:21: [72057594037927937] TTxInitSchema::Complete 2025-05-29T15:27:27.269978Z node 1 :KESUS_TABLET DEBUG: tx_init.cpp:30: [72057594037927937] TTxInit::Execute 2025-05-29T15:27:27.281438Z node 1 :KESUS_TABLET DEBUG: tx_init.cpp:242: [72057594037927937] TTxInit::Complete 2025-05-29T15:27:27.281587Z node 1 :KESUS_TABLET DEBUG: tx_session_attach.cpp:35: [72057594037927937] TTxSessionAttach::Execute (sender=[1:131:2156], cookie=10265853861344666125, session=0, seqNo=0) 2025-05-29T15:27:27.281627Z node 1 :KESUS_TABLET DEBUG: tx_session_attach.cpp:118: [72057594037927937] Created new session 1 2025-05-29T15:27:27.302778Z node 1 :KESUS_TABLET DEBUG: tx_session_attach.cpp:140: [72057594037927937] TTxSessionAttach::Complete (sender=[1:131:2156], cookie=10265853861344666125, session=1) 2025-05-29T15:27:27.302897Z node 1 :KESUS_TABLET DEBUG: tx_session_attach.cpp:35: [72057594037927937] TTxSessionAttach::Execute (sender=[1:131:2156], cookie=7038787446290808235, session=0, seqNo=0) 2025-05-29T15:27:27.302935Z node 1 :KESUS_TABLET DEBUG: tx_session_attach.cpp:118: [72057594037927937] Created new session 2 2025-05-29T15:27:27.313682Z node 1 :KESUS_TABLET DEBUG: tx_session_attach.cpp:140: [72057594037927937] TTxSessionAttach::Complete (sender=[1:131:2156], cookie=7038787446290808235, session=2) 2025-05-29T15:27:27.521000Z node 2 :KESUS_TABLET INFO: tablet_impl.cpp:71: OnActivateExecutor: 72057594037927937 2025-05-29T15:27:27.521032Z node 2 :KESUS_TABLET DEBUG: tx_init_schema.cpp:14: [72057594037927937] TTxInitSchema::Execute 2025-05-29T15:27:27.524582Z node 2 :KESUS_TABLET DEBUG: tx_init_schema.cpp:21: [72057594037927937] TTxInitSchema::Complete 2025-05-29T15:27:27.524618Z node 2 :KESUS_TABLET DEBUG: tx_init.cpp:30: [72057594037927937] TTxInit::Execute 2025-05-29T15:27:27.545990Z node 2 :KESUS_TABLET DEBUG: tx_init.cpp:242: [72057594037927937] TTxInit::Complete 2025-05-29T15:27:27.546141Z node 2 :KESUS_TABLET DEBUG: tx_session_attach.cpp:35: [72057594037927937] TTxSessionAttach::Execute (sender=[2:133:2158], cookie=8287022332330585018, session=1, seqNo=0) 2025-05-29T15:27:27.556985Z node 2 :KESUS_TABLET DEBUG: tx_session_attach.cpp:140: [72057594037927937] TTxSessionAttach::Complete (sender=[2:133:2158], cookie=8287022332330585018, session=1) 2025-05-29T15:27:27.758280Z node 3 :KESUS_TABLET INFO: tablet_impl.cpp:71: OnActivateExecutor: 72057594037927937 2025-05-29T15:27:27.758305Z node 3 :KESUS_TABLET DEBUG: tx_init_schema.cpp:14: [72057594037927937] TTxInitSchema::Execute 2025-05-29T15:27:27.760958Z node 3 :KESUS_TABLET DEBUG: tx_init_schema.cpp:21: [72057594037927937] TTxInitSchema::Complete 2025-05-29T15:27:27.761058Z node 3 :KESUS_TABLET DEBUG: tx_init.cpp:30: [72057594037927937] TTxInit::Execute 2025-05-29T15:27:27.782243Z node 3 :KESUS_TABLET DEBUG: tx_init.cpp:242: [72057594037927937] TTxInit::Complete 2025-05-29T15:27:27.782423Z node 3 :KESUS_TABLET DEBUG: tx_session_attach.cpp:35: [72057594037927937] TTxSessionAttach::Execute (sender=[3:133:2158], cookie=17541071375701578792, session=0, seqNo=0) 2025-05-29T15:27:27.782460Z node 3 :KESUS_TABLET DEBUG: tx_session_attach.cpp:118: [72057594037927937] Created new session 1 2025-05-29T15:27:27.793151Z node 3 :KESUS_TABLET DEBUG: tx_session_attach.cpp:140: [72057594037927937] TTxSessionAttach::Complete (sender=[3:133:2158], cookie=17541071375701578792, session=1) 2025-05-29T15:27:27.993941Z node 4 :KESUS_TABLET INFO: tablet_impl.cpp:71: OnActivateExecutor: 72057594037927937 2025-05-29T15:27:27.993969Z node 4 :KESUS_TABLET DEBUG: tx_init_schema.cpp:14: [72057594037927937] TTxInitSchema::Execute 2025-05-29T15:27:27.996734Z node 4 :KESUS_TABLET DEBUG: tx_init_schema.cpp:21: [72057594037927937] TTxInitSchema::Complete 2025-05-29T15:27:27.996787Z node 4 :KESUS_TABLET DEBUG: tx_init.cpp:30: [72057594037927937] TTxInit::Execute 2025-05-29T15:27:28.018006Z node 4 :KESUS_TABLET DEBUG: tx_init.cpp:242: [72057594037927937] TTxInit::Complete 2025-05-29T15:27:28.018096Z node 4 :KESUS_TABLET DEBUG: tx_config_set.cpp:28: [72057594037927937] TTxConfigSet::Execute (sender=[4:133:2158], cookie=11853997420561846477, path="") 2025-05-29T15:27:28.028954Z node 4 :KESUS_TABLET DEBUG: tx_config_set.cpp:94: [72057594037927937] TTxConfigSet::Complete (sender=[4:133:2158], cookie=11853997420561846477, status=SUCCESS) 2025-05-29T15:27:28.029119Z node 4 :KESUS_TABLET DEBUG: tx_session_attach.cpp:35: [72057594037927937] TTxSessionAttach::Execute (sender=[4:142:2165], cookie=920135057287382318, session=0, seqNo=0) 2025-05-29T15:27:28.029147Z node 4 :KESUS_TABLET DEBUG: tx_session_attach.cpp:118: [72057594037927937] Created new session 1 2025-05-29T15:27:28.039906Z node 4 :KESUS_TABLET DEBUG: tx_session_attach.cpp:140: [72057594037927937] TTxSessionAttach::Complete (sender=[4:142:2165], cookie=920135057287382318, session=1) 2025-05-29T15:27:28.040063Z node 4 :KESUS_TABLET DEBUG: tx_session_attach.cpp:35: [72057594037927937] TTxSessionAttach::Execute (sender=[4:143:2166], cookie=111, session=0, seqNo=0) 2025-05-29T15:27:28.040089Z node 4 :KESUS_TABLET DEBUG: tx_session_attach.cpp:118: [72057594037927937] Created new session 2 2025-05-29T15:27:28.040119Z node 4 :KESUS_TABLET DEBUG: tx_session_attach.cpp:262: [72057594037927937] Fast-path attach session=1 to sender=[4:143:2166], cookie=222, seqNo=0 2025-05-29T15:27:28.050796Z node 4 :KESUS_TABLET DEBUG: tx_session_attach.cpp:140: [72057594037927937] TTxSessionAttach::Complete (sender=[4:143:2166], cookie=111, session=2) 2025-05-29T15:27:28.232026Z node 5 :KESUS_TABLET INFO: tablet_impl.cpp:71: OnActivateExecutor: 72057594037927937 2025-05-29T15:27:28.232059Z node 5 :KESUS_TABLET DEBUG: tx_init_schema.cpp:14: [72057594037927937] TTxInitSchema::Execute 2025-05-29T15:27:28.235635Z node 5 :KESUS_TABLET DEBUG: tx_init_schema.cpp:21: [72057594037927937] TTxInitSchema::Complete 2025-05-29T15:27:28.235671Z node 5 :KESUS_TABLET DEBUG: tx_init.cpp:30: [72057594037927937] TTxInit::Execute 2025-05-29T15:27:28.257236Z node 5 :KESUS_TABLET DEBUG: tx_init.cpp:242: [72057594037927937] TTxInit::Complete 2025-05-29T15:27:28.257359Z node 5 :KESUS_TABLET DEBUG: tx_config_set.cpp:28: [72057594037927937] TTxConfigSet::Execute (sender=[5:131:2156], cookie=10532156055780527314, path="") 2025-05-29T15:27:28.268109Z node 5 :KESUS_TABLET DEBUG: tx_config_set.cpp:94: [72057594037927937] TTxConfigSet::Complete (sender=[5:131:2156], cookie=10532156055780527314, status=SUCCESS) 2025-05-29T15:27:28.268322Z node 5 :KESUS_TABLET DEBUG: tx_session_attach.cpp:35: [72057594037927937] TTxSessionAttach::Execute (sender=[5:140:2163], cookie=6189551285643870036, session=0, seqNo=0) 2025-05-29T15:27:28.268367Z node 5 :KESUS_TABLET DEBUG: tx_session_attach.cpp:118: [72057594037927937] Created new session 1 2025-05-29T15:27:28.279320Z node 5 :KESUS_TABLET DEBUG: tx_session_attach.cpp:140: [72057594037927937] TTxSessionAttach::Complete (sender=[5:140:2163], cookie=6189551285643870036, session=1) 2025-05-29T15:27:28.279524Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:48: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[5:140:2163], cookie=123, session=1, semaphore="Lock1" count=18446744073709551615) 2025-05-29T15:27:28.279584Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:126: [72057594037927937] Created new ephemeral semaphore 1 "Lock1" 2025-05-29T15:27:28.279601Z node 5 :KESUS_TABLET DEBUG: tablet_db.cpp:152: [72057594037927937] Processing semaphore 1 "Lock1" queue: next order #1 session 1 2025-05-29T15:27:28.279667Z node 5 :KESUS_TABLET DEBUG: tx_session_attach.cpp:35: [72057594037927937] TTxSessionAttach::Execute (sender=[5:141:2164], cookie=111, session=0, seqNo=0) 2025-05-29T15:27:28.279681Z node 5 :KESUS_TABLET DEBUG: tx_session_attach.cpp:118: [72057594037927937] Created new session 2 2025-05-29T15:27:28.279697Z node 5 :KESUS_TABLET DEBUG: tx_session_attach.cpp:35: [72057594037927937] TTxSessionAttach::Execute (sender=[5:141:2164], cookie=222, session=1, seqNo=0) 2025-05-29T15:27:28.290485Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:263: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[5:140:2163], cookie=123) 2025-05-29T15:27:28.290516Z node 5 :KESUS_TABLET DEBUG: tx_session_attach.cpp:140: [72057594037927937] TTxSessionAttach::Complete (sender=[5:141:2164], cookie=111, session=2) 2025-05-29T15:27:28.290527Z node 5 :KESUS_TABLET DEBUG: tx_session_attach.cpp:140: [72057594037927937] TTxSessionAttach::Complete (sender=[5:141:2164], cookie=222, session=1) >> TKesusTest::TestReleaseSemaphore [GOOD] >> TKesusTest::TestSemaphoreData >> TKesusTest::TestDescribeSemaphoreWatches [GOOD] >> TKesusTest::TestGetQuoterResourceCounters >> TKesusTest::TestAllocatesResources [GOOD] >> TKesusTest::TestSemaphoreData [GOOD] >> TKesusTest::TestSemaphoreReleaseReacquire ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kesus/tablet/ut/unittest >> TKesusTest::TestSessionStealingAnyKey [GOOD] Test command err: 2025-05-29T15:27:27.615587Z node 1 :KESUS_TABLET INFO: tablet_impl.cpp:71: OnActivateExecutor: 72057594037927937 2025-05-29T15:27:27.615613Z node 1 :KESUS_TABLET DEBUG: tx_init_schema.cpp:14: [72057594037927937] TTxInitSchema::Execute 2025-05-29T15:27:27.618474Z node 1 :KESUS_TABLET DEBUG: tx_init_schema.cpp:21: [72057594037927937] TTxInitSchema::Complete 2025-05-29T15:27:27.618500Z node 1 :KESUS_TABLET DEBUG: tx_init.cpp:30: [72057594037927937] TTxInit::Execute 2025-05-29T15:27:27.629816Z node 1 :KESUS_TABLET DEBUG: tx_init.cpp:242: [72057594037927937] TTxInit::Complete 2025-05-29T15:27:27.630017Z node 1 :KESUS_TABLET DEBUG: tx_session_attach.cpp:35: [72057594037927937] TTxSessionAttach::Execute (sender=[1:131:2156], cookie=4239447458025152376, session=0, seqNo=0) 2025-05-29T15:27:27.630074Z node 1 :KESUS_TABLET DEBUG: tx_session_attach.cpp:118: [72057594037927937] Created new session 1 2025-05-29T15:27:27.650865Z node 1 :KESUS_TABLET DEBUG: tx_session_attach.cpp:140: [72057594037927937] TTxSessionAttach::Complete (sender=[1:131:2156], cookie=4239447458025152376, session=1) 2025-05-29T15:27:27.651168Z node 1 :KESUS_TABLET DEBUG: tx_session_detach.cpp:28: [72057594037927937] TTxSessionDetach::Execute (sender=[1:131:2156], cookie=11157189232934563834, session=2) 2025-05-29T15:27:27.651185Z node 1 :KESUS_TABLET DEBUG: tx_session_detach.cpp:59: [72057594037927937] TTxSessionDetach::Complete (sender=[1:131:2156], cookie=11157189232934563834) 2025-05-29T15:27:27.651248Z node 1 :KESUS_TABLET DEBUG: tx_session_detach.cpp:100: [72057594037927937] Fast-path detach session=1 from sender=[1:131:2156], cookie=13737951488635251176 2025-05-29T15:27:27.651324Z node 1 :KESUS_TABLET DEBUG: tx_session_attach.cpp:35: [72057594037927937] TTxSessionAttach::Execute (sender=[1:131:2156], cookie=10004483441722625284, session=1, seqNo=0) 2025-05-29T15:27:27.661951Z node 1 :KESUS_TABLET DEBUG: tx_session_attach.cpp:140: [72057594037927937] TTxSessionAttach::Complete (sender=[1:131:2156], cookie=10004483441722625284, session=1) 2025-05-29T15:27:27.662028Z node 1 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:48: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[1:131:2156], cookie=111, session=1, semaphore="Lock1" count=18446744073709551615) 2025-05-29T15:27:27.662060Z node 1 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:126: [72057594037927937] Created new ephemeral semaphore 1 "Lock1" 2025-05-29T15:27:27.662072Z node 1 :KESUS_TABLET DEBUG: tablet_db.cpp:152: [72057594037927937] Processing semaphore 1 "Lock1" queue: next order #1 session 1 2025-05-29T15:27:27.662099Z node 1 :KESUS_TABLET DEBUG: tx_session_detach.cpp:28: [72057594037927937] TTxSessionDetach::Execute (sender=[1:131:2156], cookie=6841751257012132269, session=1) 2025-05-29T15:27:27.672242Z node 1 :KESUS_TABLET DEBUG: tx_session_timeout.cpp:27: [72057594037927937] TTxSessionTimeout::Execute (session=1) 2025-05-29T15:27:27.672266Z node 1 :KESUS_TABLET DEBUG: tablet_db.cpp:32: [72057594037927937] Deleting session 1 2025-05-29T15:27:27.672274Z node 1 :KESUS_TABLET DEBUG: tablet_db.cpp:98: [72057594037927937] Deleting session 1 / semaphore 1 "Lock1" owner link 2025-05-29T15:27:27.682919Z node 1 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:263: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[1:131:2156], cookie=111) 2025-05-29T15:27:27.682943Z node 1 :KESUS_TABLET DEBUG: tx_session_detach.cpp:59: [72057594037927937] TTxSessionDetach::Complete (sender=[1:131:2156], cookie=6841751257012132269) 2025-05-29T15:27:27.682951Z node 1 :KESUS_TABLET DEBUG: tx_session_timeout.cpp:56: [72057594037927937] TTxSessionTimeout::Complete (session=1) 2025-05-29T15:27:27.853962Z node 2 :KESUS_TABLET INFO: tablet_impl.cpp:71: OnActivateExecutor: 72057594037927937 2025-05-29T15:27:27.854005Z node 2 :KESUS_TABLET DEBUG: tx_init_schema.cpp:14: [72057594037927937] TTxInitSchema::Execute 2025-05-29T15:27:27.856769Z node 2 :KESUS_TABLET DEBUG: tx_init_schema.cpp:21: [72057594037927937] TTxInitSchema::Complete 2025-05-29T15:27:27.856803Z node 2 :KESUS_TABLET DEBUG: tx_init.cpp:30: [72057594037927937] TTxInit::Execute 2025-05-29T15:27:27.877707Z node 2 :KESUS_TABLET DEBUG: tx_init.cpp:242: [72057594037927937] TTxInit::Complete 2025-05-29T15:27:27.877787Z node 2 :KESUS_TABLET DEBUG: tx_config_set.cpp:28: [72057594037927937] TTxConfigSet::Execute (sender=[2:133:2158], cookie=6972940516248711907, path="") 2025-05-29T15:27:27.888510Z node 2 :KESUS_TABLET DEBUG: tx_config_set.cpp:94: [72057594037927937] TTxConfigSet::Complete (sender=[2:133:2158], cookie=6972940516248711907, status=SUCCESS) 2025-05-29T15:27:27.888654Z node 2 :KESUS_TABLET DEBUG: tx_session_attach.cpp:35: [72057594037927937] TTxSessionAttach::Execute (sender=[2:142:2165], cookie=111, session=0, seqNo=0) 2025-05-29T15:27:27.888685Z node 2 :KESUS_TABLET DEBUG: tx_session_attach.cpp:118: [72057594037927937] Created new session 1 2025-05-29T15:27:27.888711Z node 2 :KESUS_TABLET DEBUG: tx_session_detach.cpp:28: [72057594037927937] TTxSessionDetach::Execute (sender=[2:142:2165], cookie=1243914390010712976, session=1) 2025-05-29T15:27:27.898814Z node 2 :KESUS_TABLET DEBUG: tx_session_timeout.cpp:27: [72057594037927937] TTxSessionTimeout::Execute (session=1) 2025-05-29T15:27:27.898830Z node 2 :KESUS_TABLET DEBUG: tablet_db.cpp:32: [72057594037927937] Deleting session 1 2025-05-29T15:27:27.909454Z node 2 :KESUS_TABLET DEBUG: tx_session_attach.cpp:140: [72057594037927937] TTxSessionAttach::Complete (sender=[2:142:2165], cookie=111, session=1) 2025-05-29T15:27:27.909480Z node 2 :KESUS_TABLET DEBUG: tx_session_detach.cpp:59: [72057594037927937] TTxSessionDetach::Complete (sender=[2:142:2165], cookie=1243914390010712976) 2025-05-29T15:27:27.909486Z node 2 :KESUS_TABLET DEBUG: tx_session_timeout.cpp:56: [72057594037927937] TTxSessionTimeout::Complete (session=1) 2025-05-29T15:27:28.088823Z node 3 :KESUS_TABLET INFO: tablet_impl.cpp:71: OnActivateExecutor: 72057594037927937 2025-05-29T15:27:28.088851Z node 3 :KESUS_TABLET DEBUG: tx_init_schema.cpp:14: [72057594037927937] TTxInitSchema::Execute 2025-05-29T15:27:28.091649Z node 3 :KESUS_TABLET DEBUG: tx_init_schema.cpp:21: [72057594037927937] TTxInitSchema::Complete 2025-05-29T15:27:28.091729Z node 3 :KESUS_TABLET DEBUG: tx_init.cpp:30: [72057594037927937] TTxInit::Execute 2025-05-29T15:27:28.112665Z node 3 :KESUS_TABLET DEBUG: tx_init.cpp:242: [72057594037927937] TTxInit::Complete 2025-05-29T15:27:28.112764Z node 3 :KESUS_TABLET DEBUG: tx_session_attach.cpp:35: [72057594037927937] TTxSessionAttach::Execute (sender=[3:133:2158], cookie=7395718717081950909, session=0, seqNo=0) 2025-05-29T15:27:28.112790Z node 3 :KESUS_TABLET DEBUG: tx_session_attach.cpp:118: [72057594037927937] Created new session 1 2025-05-29T15:27:28.123485Z node 3 :KESUS_TABLET DEBUG: tx_session_attach.cpp:140: [72057594037927937] TTxSessionAttach::Complete (sender=[3:133:2158], cookie=7395718717081950909, session=1) 2025-05-29T15:27:28.123652Z node 3 :KESUS_TABLET DEBUG: tx_session_destroy.cpp:37: [72057594037927937] TTxSessionDestroy::Execute (sender=[3:133:2158], cookie=8714450436090518899, session=1) 2025-05-29T15:27:28.123682Z node 3 :KESUS_TABLET DEBUG: tablet_db.cpp:32: [72057594037927937] Deleting session 1 2025-05-29T15:27:28.134270Z node 3 :KESUS_TABLET DEBUG: tx_session_destroy.cpp:75: [72057594037927937] TTxSessionDestroy::Complete (sender=[3:133:2158], cookie=8714450436090518899) 2025-05-29T15:27:28.134420Z node 3 :KESUS_TABLET DEBUG: tx_sessions_describe.cpp:23: [72057594037927937] TTxSessionsDescribe::Execute (sender=[3:150:2173], cookie=2025857466281485054) 2025-05-29T15:27:28.134433Z node 3 :KESUS_TABLET DEBUG: tx_sessions_describe.cpp:48: [72057594037927937] TTxSessionsDescribe::Complete (sender=[3:150:2173], cookie=2025857466281485054) 2025-05-29T15:27:28.134495Z node 3 :KESUS_TABLET DEBUG: tx_session_attach.cpp:35: [72057594037927937] TTxSessionAttach::Execute (sender=[3:153:2176], cookie=1451280717176771921, session=0, seqNo=0) 2025-05-29T15:27:28.134519Z node 3 :KESUS_TABLET DEBUG: tx_session_attach.cpp:118: [72057594037927937] Created new session 2 2025-05-29T15:27:28.145051Z node 3 :KESUS_TABLET DEBUG: tx_session_attach.cpp:140: [72057594037927937] TTxSessionAttach::Complete (sender=[3:153:2176], cookie=1451280717176771921, session=2) 2025-05-29T15:27:28.145212Z node 3 :KESUS_TABLET DEBUG: tx_session_destroy.cpp:37: [72057594037927937] TTxSessionDestroy::Execute (sender=[3:133:2158], cookie=6277678825710095558, session=2) 2025-05-29T15:27:28.145231Z node 3 :KESUS_TABLET DEBUG: tablet_db.cpp:32: [72057594037927937] Deleting session 2 2025-05-29T15:27:28.155819Z node 3 :KESUS_TABLET DEBUG: tx_session_destroy.cpp:75: [72057594037927937] TTxSessionDestroy::Complete (sender=[3:133:2158], cookie=6277678825710095558) 2025-05-29T15:27:28.326442Z node 4 :KESUS_TABLET INFO: tablet_impl.cpp:71: OnActivateExecutor: 72057594037927937 2025-05-29T15:27:28.326469Z node 4 :KESUS_TABLET DEBUG: tx_init_schema.cpp:14: [72057594037927937] TTxInitSchema::Execute 2025-05-29T15:27:28.329743Z node 4 :KESUS_TABLET DEBUG: tx_init_schema.cpp:21: [72057594037927937] TTxInitSchema::Complete 2025-05-29T15:27:28.329799Z node 4 :KESUS_TABLET DEBUG: tx_init.cpp:30: [72057594037927937] TTxInit::Execute 2025-05-29T15:27:28.350921Z node 4 :KESUS_TABLET DEBUG: tx_init.cpp:242: [72057594037927937] TTxInit::Complete 2025-05-29T15:27:28.351084Z node 4 :KESUS_TABLET DEBUG: tx_session_attach.cpp:35: [72057594037927937] TTxSessionAttach::Execute (sender=[4:133:2158], cookie=12345, session=0, seqNo=0) 2025-05-29T15:27:28.351111Z node 4 :KESUS_TABLET DEBUG: tx_session_attach.cpp:118: [72057594037927937] Created new session 1 2025-05-29T15:27:28.361825Z node 4 :KESUS_TABLET DEBUG: tx_session_attach.cpp:140: [72057594037927937] TTxSessionAttach::Complete (sender=[4:133:2158], cookie=12345, session=1) 2025-05-29T15:27:28.362002Z node 4 :KESUS_TABLET DEBUG: tx_session_attach.cpp:35: [72057594037927937] TTxSessionAttach::Execute (sender=[4:140:2163], cookie=23456, session=1, seqNo=0) 2025-05-29T15:27:28.372765Z node 4 :KESUS_TABLET DEBUG: tx_session_attach.cpp:140: [72057594037927937] TTxSessionAttach::Complete (sender=[4:140:2163], cookie=23456, session=1) 2025-05-29T15:27:28.562656Z node 5 :KESUS_TABLET INFO: tablet_impl.cpp:71: OnActivateExecutor: 72057594037927937 2025-05-29T15:27:28.562683Z node 5 :KESUS_TABLET DEBUG: tx_init_schema.cpp:14: [72057594037927937] TTxInitSchema::Execute 2025-05-29T15:27:28.565528Z node 5 :KESUS_TABLET DEBUG: tx_init_schema.cpp:21: [72057594037927937] TTxInitSchema::Complete 2025-05-29T15:27:28.565557Z node 5 :KESUS_TABLET DEBUG: tx_init.cpp:30: [72057594037927937] TTxInit::Execute 2025-05-29T15:27:28.586769Z node 5 :KESUS_TABLET DEBUG: tx_init.cpp:242: [72057594037927937] TTxInit::Complete 2025-05-29T15:27:28.586920Z node 5 :KESUS_TABLET DEBUG: tx_session_attach.cpp:35: [72057594037927937] TTxSessionAttach::Execute (sender=[5:131:2156], cookie=12345, session=0, seqNo=0) 2025-05-29T15:27:28.586948Z node 5 :KESUS_TABLET DEBUG: tx_session_attach.cpp:118: [72057594037927937] Created new session 1 2025-05-29T15:27:28.597618Z node 5 :KESUS_TABLET DEBUG: tx_session_attach.cpp:140: [72057594037927937] TTxSessionAttach::Complete (sender=[5:131:2156], cookie=12345, session=1) 2025-05-29T15:27:28.597756Z node 5 :KESUS_TABLET DEBUG: tx_session_attach.cpp:35: [72057594037927937] TTxSessionAttach::Execute (sender=[5:138:2161], cookie=23456, session=1, seqNo=0) 2025-05-29T15:27:28.608391Z node 5 :KESUS_TABLET DEBUG: tx_session_attach.cpp:140: [72057594037927937] TTxSessionAttach::Complete (sender=[5:138:2161], cookie=23456, session=1) ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kesus/tablet/ut/unittest >> TKesusTest::TestAllocatesResources [GOOD] Test command err: 2025-05-29T15:27:27.621764Z node 1 :KESUS_TABLET INFO: tablet_impl.cpp:71: OnActivateExecutor: 72057594037927937 2025-05-29T15:27:27.621792Z node 1 :KESUS_TABLET DEBUG: tx_init_schema.cpp:14: [72057594037927937] TTxInitSchema::Execute 2025-05-29T15:27:27.624609Z node 1 :KESUS_TABLET DEBUG: tx_init_schema.cpp:21: [72057594037927937] TTxInitSchema::Complete 2025-05-29T15:27:27.624647Z node 1 :KESUS_TABLET DEBUG: tx_init.cpp:30: [72057594037927937] TTxInit::Execute 2025-05-29T15:27:27.635780Z node 1 :KESUS_TABLET DEBUG: tx_init.cpp:242: [72057594037927937] TTxInit::Complete 2025-05-29T15:27:27.635918Z node 1 :KESUS_TABLET DEBUG: tx_session_attach.cpp:35: [72057594037927937] TTxSessionAttach::Execute (sender=[1:131:2156], cookie=12429485970142781364, session=0, seqNo=0) 2025-05-29T15:27:27.635950Z node 1 :KESUS_TABLET DEBUG: tx_session_attach.cpp:118: [72057594037927937] Created new session 1 2025-05-29T15:27:27.656618Z node 1 :KESUS_TABLET DEBUG: tx_session_attach.cpp:140: [72057594037927937] TTxSessionAttach::Complete (sender=[1:131:2156], cookie=12429485970142781364, session=1) 2025-05-29T15:27:27.656693Z node 1 :KESUS_TABLET DEBUG: tx_session_attach.cpp:35: [72057594037927937] TTxSessionAttach::Execute (sender=[1:131:2156], cookie=11132672269470581841, session=0, seqNo=0) 2025-05-29T15:27:27.656715Z node 1 :KESUS_TABLET DEBUG: tx_session_attach.cpp:118: [72057594037927937] Created new session 2 2025-05-29T15:27:27.667351Z node 1 :KESUS_TABLET DEBUG: tx_session_attach.cpp:140: [72057594037927937] TTxSessionAttach::Complete (sender=[1:131:2156], cookie=11132672269470581841, session=2) 2025-05-29T15:27:27.667455Z node 1 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:48: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[1:131:2156], cookie=111, session=1, semaphore="Lock1" count=1) 2025-05-29T15:27:27.667514Z node 1 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:126: [72057594037927937] Created new ephemeral semaphore 1 "Lock1" 2025-05-29T15:27:27.667529Z node 1 :KESUS_TABLET DEBUG: tablet_db.cpp:152: [72057594037927937] Processing semaphore 1 "Lock1" queue: next order #1 session 1 2025-05-29T15:27:27.678207Z node 1 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:263: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[1:131:2156], cookie=111) 2025-05-29T15:27:27.678280Z node 1 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:48: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[1:131:2156], cookie=222, session=2, semaphore="Lock1" count=18446744073709551615) 2025-05-29T15:27:27.678349Z node 1 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:48: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[1:131:2156], cookie=333, session=2, semaphore="Lock1" count=1) 2025-05-29T15:27:27.678367Z node 1 :KESUS_TABLET DEBUG: tablet_db.cpp:152: [72057594037927937] Processing semaphore 1 "Lock1" queue: next order #2 session 2 2025-05-29T15:27:27.688934Z node 1 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:263: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[1:131:2156], cookie=222) 2025-05-29T15:27:27.688957Z node 1 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:263: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[1:131:2156], cookie=333) 2025-05-29T15:27:27.689050Z node 1 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:29: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[1:147:2170], cookie=1940746255275244589, name="Lock1") 2025-05-29T15:27:27.689065Z node 1 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:134: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[1:147:2170], cookie=1940746255275244589) 2025-05-29T15:27:27.877581Z node 2 :KESUS_TABLET INFO: tablet_impl.cpp:71: OnActivateExecutor: 72057594037927937 2025-05-29T15:27:27.877609Z node 2 :KESUS_TABLET DEBUG: tx_init_schema.cpp:14: [72057594037927937] TTxInitSchema::Execute 2025-05-29T15:27:27.880526Z node 2 :KESUS_TABLET DEBUG: tx_init_schema.cpp:21: [72057594037927937] TTxInitSchema::Complete 2025-05-29T15:27:27.880555Z node 2 :KESUS_TABLET DEBUG: tx_init.cpp:30: [72057594037927937] TTxInit::Execute 2025-05-29T15:27:27.901557Z node 2 :KESUS_TABLET DEBUG: tx_init.cpp:242: [72057594037927937] TTxInit::Complete 2025-05-29T15:27:27.901660Z node 2 :KESUS_TABLET DEBUG: tx_session_attach.cpp:35: [72057594037927937] TTxSessionAttach::Execute (sender=[2:133:2158], cookie=10176563844082973196, session=0, seqNo=0) 2025-05-29T15:27:27.901681Z node 2 :KESUS_TABLET DEBUG: tx_session_attach.cpp:118: [72057594037927937] Created new session 1 2025-05-29T15:27:27.912208Z node 2 :KESUS_TABLET DEBUG: tx_session_attach.cpp:140: [72057594037927937] TTxSessionAttach::Complete (sender=[2:133:2158], cookie=10176563844082973196, session=1) 2025-05-29T15:27:27.912269Z node 2 :KESUS_TABLET DEBUG: tx_session_attach.cpp:35: [72057594037927937] TTxSessionAttach::Execute (sender=[2:133:2158], cookie=5941184488154922121, session=0, seqNo=0) 2025-05-29T15:27:27.912288Z node 2 :KESUS_TABLET DEBUG: tx_session_attach.cpp:118: [72057594037927937] Created new session 2 2025-05-29T15:27:27.922810Z node 2 :KESUS_TABLET DEBUG: tx_session_attach.cpp:140: [72057594037927937] TTxSessionAttach::Complete (sender=[2:133:2158], cookie=5941184488154922121, session=2) 2025-05-29T15:27:27.922873Z node 2 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:48: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[2:133:2158], cookie=111, session=1, semaphore="Lock1" count=18446744073709551615) 2025-05-29T15:27:27.922897Z node 2 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:126: [72057594037927937] Created new ephemeral semaphore 1 "Lock1" 2025-05-29T15:27:27.922908Z node 2 :KESUS_TABLET DEBUG: tablet_db.cpp:152: [72057594037927937] Processing semaphore 1 "Lock1" queue: next order #1 session 1 2025-05-29T15:27:27.933469Z node 2 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:263: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[2:133:2158], cookie=111) 2025-05-29T15:27:27.933536Z node 2 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:48: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[2:133:2158], cookie=222, session=2, semaphore="Lock1" count=1) 2025-05-29T15:27:27.933609Z node 2 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:48: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[2:133:2158], cookie=333, session=2, semaphore="Lock1" count=18446744073709551615) 2025-05-29T15:27:27.944339Z node 2 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:263: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[2:133:2158], cookie=222) 2025-05-29T15:27:27.944365Z node 2 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:263: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[2:133:2158], cookie=333) 2025-05-29T15:27:27.944450Z node 2 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:29: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[2:149:2172], cookie=16406826531300645821, name="Lock1") 2025-05-29T15:27:27.944468Z node 2 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:134: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[2:149:2172], cookie=16406826531300645821) 2025-05-29T15:27:27.944524Z node 2 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:29: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[2:152:2175], cookie=5881990041603214692, name="Lock1") 2025-05-29T15:27:27.944532Z node 2 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:134: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[2:152:2175], cookie=5881990041603214692) 2025-05-29T15:27:28.107067Z node 3 :KESUS_TABLET INFO: tablet_impl.cpp:71: OnActivateExecutor: 72057594037927937 2025-05-29T15:27:28.107089Z node 3 :KESUS_TABLET DEBUG: tx_init_schema.cpp:14: [72057594037927937] TTxInitSchema::Execute 2025-05-29T15:27:28.109691Z node 3 :KESUS_TABLET DEBUG: tx_init_schema.cpp:21: [72057594037927937] TTxInitSchema::Complete 2025-05-29T15:27:28.109789Z node 3 :KESUS_TABLET DEBUG: tx_init.cpp:30: [72057594037927937] TTxInit::Execute 2025-05-29T15:27:28.130894Z node 3 :KESUS_TABLET DEBUG: tx_init.cpp:242: [72057594037927937] TTxInit::Complete 2025-05-29T15:27:28.131045Z node 3 :KESUS_TABLET DEBUG: tx_session_attach.cpp:35: [72057594037927937] TTxSessionAttach::Execute (sender=[3:133:2158], cookie=3754897954118856424, session=0, seqNo=0) 2025-05-29T15:27:28.131081Z node 3 :KESUS_TABLET DEBUG: tx_session_attach.cpp:118: [72057594037927937] Created new session 1 2025-05-29T15:27:28.141738Z node 3 :KESUS_TABLET DEBUG: tx_session_attach.cpp:140: [72057594037927937] TTxSessionAttach::Complete (sender=[3:133:2158], cookie=3754897954118856424, session=1) 2025-05-29T15:27:28.141806Z node 3 :KESUS_TABLET DEBUG: tx_session_attach.cpp:35: [72057594037927937] TTxSessionAttach::Execute (sender=[3:133:2158], cookie=15918837521219163427, session=0, seqNo=0) 2025-05-29T15:27:28.141848Z node 3 :KESUS_TABLET DEBUG: tx_session_attach.cpp:118: [72057594037927937] Created new session 2 2025-05-29T15:27:28.152417Z node 3 :KESUS_TABLET DEBUG: tx_session_attach.cpp:140: [72057594037927937] TTxSessionAttach::Complete (sender=[3:133:2158], cookie=15918837521219163427, session=2) 2025-05-29T15:27:28.152536Z node 3 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:48: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[3:133:2158], cookie=111, session=1, semaphore="Lock1" count=18446744073709551615) 2025-05-29T15:27:28.152564Z node 3 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:126: [72057594037927937] Created new ephemeral semaphore 1 "Lock1" 2025-05-29T15:27:28.152576Z node 3 :KESUS_TABLET DEBUG: tablet_db.cpp:152: [72057594037927937] Processing semaphore 1 "Lock1" queue: next order #1 session 1 2025-05-29T15:27:28.163255Z node 3 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:263: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[3:133:2158], cookie=111) 2025-05-29T15:27:28.163349Z node 3 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:48: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[3:133:2158], cookie=222, session=2, semaphore="Lock1" count=1) 2025-05-29T15:27:28.163414Z node 3 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:48: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[3:133:2158], cookie=333, session=2, semaphore="Lock1" count=1) 2025-05-29T15:27:28.163423Z node 3 :KESUS_TABLET DEBUG: tablet_db.cpp:124: [72057594037927937] Deleting session 2 / semaphore 1 "Lock1" waiter link 2025-05-29T15:27:28.174151Z node 3 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:263: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[3:133:2158], cookie=222) 2025-05-29T15:27:28.174185Z node 3 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:263: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[3:133:2158], cookie=333) 2025-05-29T15:27:28.174279Z node 3 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:29: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[3:152:2175], cookie=12742459482408679095, name="Lock1") 2025-05-29T15:27:28.174295Z node 3 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:134: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[3:152:2175], cookie=12742459482408679095) 2025-05-29T15:27:28.174332Z node 3 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:29: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[3:155:2178], cookie=8880851593774977836, name="Lock1") 2025-05-29T15:27:28.174336Z node 3 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:134: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[3:155:2178], cookie=8880851593774977836) 2025-05-29T15:27:28.176677Z node 3 :KESUS_TABLET INFO: tablet_impl.cpp:71: OnActivateExecutor: 72057594037927937 2025-05-29T15:27:28.176702Z node 3 :KESUS_TABLET DEBUG: tx_init_schema.cpp:14: [72057594037927937] TTxInitSchema::Execute 2025-05-29T15:27:28.176751Z node 3 :KESUS_TABLET DEBUG: tx_init_schema.cpp:21: [72057594037927937] TTxInitSchema::Complete 2025-05-29T15:27:28.176861Z node 3 :KESUS_TABLET DEBUG: tx_init.cpp:30: [72057594037927937] TTxInit::Execute 2025-05-29T15:27:28.218898Z node 3 :KESUS_TABLET DEBUG: tx_init.cpp:242: [72057594037927937] TTxInit::Complete 2025-05-29T15:27:28.218947Z node 3 :KESUS_TABLET DEBUG: tablet_db.cpp:152: [72057594037927937] Processing semaphore 1 "Lock1" queue: next order #1 session 1 2025-05-29T15:27:28.219053Z node 3 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:29: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[3:195:2208], cookie=13858890082063530561, name="Lock1") 2025-05-29T15:27:28.219076Z node 3 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:134: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[3:195:2208], cookie=13858890082063530561) 2025-05-29T15:27:28.219185Z node 3 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:29: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[3:203:2215], cookie=13494030120163328844, name="Lock1") 2025-05-29T15:27:28.219195Z node 3 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:134: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[3:203:2215], cookie=13494030120163328844) 2025-05-29T15:27:28.337569Z node 4 :KESUS_TABLET INFO: tablet_impl.cpp:71: OnActivateExecutor: 72057594037927937 2025-05-29T15:27:28.337590Z node 4 :KESUS_TABLET DEBUG: tx_init_schema.cpp:14: [72057594037927937] TTxInitSchema::Execute 2025-05-29T15:27:28.340828Z node 4 :KESUS_TABLET DEBUG: tx_init_schema.cpp:21: [72057594037927937] TTxInitSchema::Complete 2025-05-29T15:27:28.340894Z node 4 :KESUS_TABLET DEBUG: tx_init.cpp:30: [72057594037927937] TTxInit::Execute 2025-05-29T15:27:28.361986Z node 4 :KESUS_TABLET DEBUG: tx_init.cpp:242: [72057594037927937] TTxInit::Complete 2025-05-29T15:27:28.362088Z node 4 :KESUS_TABLET DEBUG: tx_session_attach.cpp:35: [72057594037927937] TTxSessionAttach::Execute (sender=[4:133:2158], cookie=15555571170537934525, session=0, seqNo=0) 2025-05-29T15:27:28.362113Z node 4 :KESUS_TABLET DEBUG: tx_session_attach.cpp:118: [72057594037927937] Created new session 1 2025-05-29T15:27:28.372754Z node 4 :KESUS_TABLET DEBUG: tx_session_attach.cpp:140: [72057594037927937] TTxSessionAttach::Complete (sender=[4:133:2158], cookie=15555571170537934525, session=1) 2025-05-29T15:27:28.372844Z node 4 :KESUS_TABLET DEBUG: tx_session_attach.cpp:35: [72057594037927937] TTxSessionAttach::Execute (sender=[4:133:2158], cookie=2508854339189750095, session=0, seqNo=0) 2025-05-29T15:27:28.372880Z node 4 :KESUS_TABLET DEBUG: tx_session_attach.cpp:118: [72057594037927937] Created new session 2 2025-05-29T15:27:28.383634Z node 4 :KESUS_TABLET DEBUG: tx_session_attach.cpp:140: [72057594037927937] TTxSessionAttach::Complete (sender=[4:133:2158], cookie=2508854339189750095, session=2) 2025-05-29T15:27:28.383720Z node 4 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:48: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[4:133:2158], cookie=111, session=1, semaphore="Lock1" count=18446744073709551615) 2025-05-29T15:27:28.383750Z node 4 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:126: [72057594037927937] Created new ephemeral semaphore 1 "Lock1" 2025-05-29T15:27:28.383761Z node 4 :KESUS_TABLET DEBUG: tablet_db.cpp:152: [72057594037927937] Processing semaphore 1 "Lock1" queue: next order #1 session 1 2025-05-29T15:27:28.394503Z node 4 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:263: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[4:133:2158], cookie=111) 2025-05-29T15:27:28.394574Z node 4 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:48: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[4:133:2158], cookie=222, session=2, semaphore="Lock1" count=1) 2025-05-29T15:27:28.394690Z node 4 :KESUS_TABLET DEBUG: tx_semaphore_release.cpp:37: [72057594037927937] TTxSemaphoreRelease::Execute (sender=[4:133:2158], cookie=333, name="Lock1") 2025-05-29T15:27:28.394705Z node 4 :KESUS_TABLET DEBUG: tablet_db.cpp:124: [72057594037927937] Deleting session 2 / semaphore 1 "Lock1" waiter link 2025-05-29T15:27:28.405323Z node 4 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:263: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[4:133:2158], cookie=222) 2025-05-29T15:27:28.405346Z node 4 :KESUS_TABLET DEBUG: tx_semaphore_release.cpp:93: [72057594037927937] TTxSemaphoreRelease::Complete (sender=[4:133:2158], cookie=333) 2025-05-29T15:27:28.583371Z node 5 :KESUS_TABLET INFO: tablet_impl.cpp:71: OnActivateExecutor: 72057594037927937 2025-05-29T15:27:28.583410Z node 5 :KESUS_TABLET DEBUG: tx_init_schema.cpp:14: [72057594037927937] TTxInitSchema::Execute 2025-05-29T15:27:28.587110Z node 5 :KESUS_TABLET DEBUG: tx_init_schema.cpp:21: [72057594037927937] TTxInitSchema::Complete 2025-05-29T15:27:28.587145Z node 5 :KESUS_TABLET DEBUG: tx_init.cpp:30: [72057594037927937] TTxInit::Execute 2025-05-29T15:27:28.609479Z node 5 :KESUS_TABLET DEBUG: tx_init.cpp:242: [72057594037927937] TTxInit::Complete 2025-05-29T15:27:28.610864Z node 5 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:36: [72057594037927937] TTxQuoterResourceAdd::Execute (sender=[5:131:2156], cookie=8695424230309351439, path="/Root", config={ MaxUnitsPerSecond: 100 }) 2025-05-29T15:27:28.610938Z node 5 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:76: [72057594037927937] Created new quoter resource 1 "Root" 2025-05-29T15:27:28.621917Z node 5 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:85: [72057594037927937] TTxQuoterResourceAdd::Complete (sender=[5:131:2156], cookie=8695424230309351439) 2025-05-29T15:27:28.622095Z node 5 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:36: [72057594037927937] TTxQuoterResourceAdd::Execute (sender=[5:140:2163], cookie=18001617145979574114, path="/Root/Res", config={ }) 2025-05-29T15:27:28.622161Z node 5 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:76: [72057594037927937] Created new quoter resource 2 "Root/Res" 2025-05-29T15:27:28.633011Z node 5 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:85: [72057594037927937] TTxQuoterResourceAdd::Complete (sender=[5:140:2163], cookie=18001617145979574114) 2025-05-29T15:27:28.633445Z node 5 :KESUS_TABLET TRACE: quoter_runtime.cpp:145: [72057594037927937] Send TEvSubscribeOnResourcesResult to [5:145:2168]. Cookie: 12055565908707679818. Data: { Results { ResourceId: 2 Error { Status: SUCCESS } EffectiveProps { ResourceId: 2 ResourcePath: "Root/Res" HierarchicalDRRResourceConfig { MaxUnitsPerSecond: 100 MaxBurstSizeCoefficient: 1 Weight: 1 } AccountingConfig { ReportPeriodMs: 5000 AccountPeriodMs: 1000 CollectPeriodSec: 30 ProvisionedCoefficient: 60 OvershootCoefficient: 1.1 Provisioned { BillingPeriodSec: 60 } OnDemand { BillingPeriodSec: 60 } Overshoot { BillingPeriodSec: 60 } } } } ProtocolVersion: 1 } 2025-05-29T15:27:28.633463Z node 5 :KESUS_TABLET DEBUG: quoter_runtime.cpp:150: [72057594037927937] Subscribe on quoter resources (sender=[5:145:2168], cookie=12055565908707679818) 2025-05-29T15:27:28.633558Z node 5 :KESUS_TABLET TRACE: quoter_runtime.cpp:193: [72057594037927937] Send TEvUpdateConsumptionStateAck to [5:145:2168]. Cookie: 15514448028815425904. Data: { } 2025-05-29T15:27:28.633566Z node 5 :KESUS_TABLET DEBUG: quoter_runtime.cpp:198: [72057594037927937] Update quoter resources consumption state (sender=[5:145:2168], cookie=15514448028815425904) 2025-05-29T15:27:28.674530Z node 5 :KESUS_TABLET TRACE: quoter_runtime.cpp:93: [72057594037927937] Send TEvResourcesAllocated to [5:145:2168]. Cookie: 0. Data: { ResourcesInfo { ResourceId: 2 Amount: 10 StateNotification { Status: SUCCESS } } } 2025-05-29T15:27:28.725670Z node 5 :KESUS_TABLET TRACE: quoter_runtime.cpp:93: [72057594037927937] Send TEvResourcesAllocated to [5:145:2168]. Cookie: 0. Data: { ResourcesInfo { ResourceId: 2 Amount: 10 StateNotification { Status: SUCCESS } } } 2025-05-29T15:27:28.756277Z node 5 :KESUS_TABLET TRACE: quoter_runtime.cpp:93: [72057594037927937] Send TEvResourcesAllocated to [5:145:2168]. Cookie: 0. Data: { ResourcesInfo { ResourceId: 2 Amount: 10 StateNotification { Status: SUCCESS } } } 2025-05-29T15:27:28.797167Z node 5 :KESUS_TABLET TRACE: quoter_runtime.cpp:93: [72057594037927937] Send TEvResourcesAllocated to [5:145:2168]. Cookie: 0. Data: { ResourcesInfo { ResourceId: 2 Amount: 10 StateNotification { Status: SUCCESS } } } 2025-05-29T15:27:28.838182Z node 5 :KESUS_TABLET TRACE: quoter_runtime.cpp:93: [72057594037927937] Send TEvResourcesAllocated to [5:145:2168]. Cookie: 0. Data: { ResourcesInfo { ResourceId: 2 Amount: 10 StateNotification { Status: SUCCESS } } } ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/runtime/unittest >> KqpScanLogs::GraceJoin+EnabledLogs Test command err: cwd: /home/runner/.ya/build/build_root/ciyv/000a62/ydb/core/kqp/ut/runtime/test-results/unittest/testing_out_stuff/chunk0 Trying to start YDB, gRPC: 17542, MsgBus: 6117 test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/000a62/r3tmp/tmpubAVqS/pdisk_1.dat TServer::EnableGrpc on GrpcPort 17542, node 1 TClient is connected to server localhost:6117 TClient is connected to server localhost:6117 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... waiting... waiting... waiting... waiting... VERIFY failed (2025-05-29T15:27:25.706085Z): assertion failed in non-unittest thread with message: assertion failed at ydb/core/kqp/ut/common/kqp_ut_common.h:375, void NKikimr::NKqp::AssertSuccessResult(const NYdb::TStatus &): (result.IsSuccess())
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 library/cpp/testing/unittest/registar.cpp:36 RaiseError(): requirement UnittestThread failed 0. /-S/util/system/yassert.cpp:83: InternalPanicImpl @ 0x13A7AA65 1. /-S/util/system/yassert.cpp:55: Panic @ 0x13A71A66 2. /tmp//-S/library/cpp/testing/unittest/registar.cpp:36: RaiseError @ 0x13C137F6 3. /-S/ydb/core/kqp/ut/common/kqp_ut_common.h:375: AssertSuccessResult @ 0x260B19E2 4. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:365: CreateSampleTables @ 0x260B12E2 5. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:581: operator() @ 0x260D2F1C 6. /-S/library/cpp/threading/future/core/future-inl.h:493: SetValue @ 0x260D2F1C 7. /-S/library/cpp/threading/future/async.h:24: operator() @ 0x260D2F1C 8. /-S/util/thread/pool.h:71: Process @ 0x260D2F1C 9. /-S/util/thread/pool.cpp:418: DoExecute @ 0x13A823E9 10. /-S/util/thread/factory.h:15: Execute @ 0x13A80DD9 11. /-S/util/thread/factory.cpp:36: ThreadProc @ 0x13A80DD9 12. /-S/util/system/thread.cpp:244: ThreadProxy @ 0x13A7C24C 13. ??:0: ?? @ 0x7FBB2D205AC2 14. ??:0: ?? @ 0x7FBB2D29784F >> TKesusTest::TestAttachOutOfSequence >> TKesusTest::TestGetQuoterResourceCounters [GOOD] >> TKesusTest::TestSemaphoreReleaseReacquire [GOOD] >> TKesusTest::TestSemaphoreSessionFailures >> TKesusTest::TestAttachOutOfSequence [GOOD] >> TKesusTest::TestAttachOutOfSequenceInTx ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/runtime/unittest >> KqpScanSpilling::SpillingInRuntimeNodes-EnabledSpilling Test command err: cwd: /home/runner/.ya/build/build_root/ciyv/000a50/ydb/core/kqp/ut/runtime/test-results/unittest/testing_out_stuff/chunk8 Trying to start YDB, gRPC: 4722, MsgBus: 8152 2025-05-29T15:27:25.487493Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509889667920337413:2062];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:27:25.487516Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/000a50/r3tmp/tmp63isXQ/pdisk_1.dat 2025-05-29T15:27:25.549189Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:27:25.550841Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [1:7509889667920337393:2079] 1748532445487355 != 1748532445487358 TServer::EnableGrpc on GrpcPort 4722, node 1 2025-05-29T15:27:25.562767Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:27:25.562781Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-05-29T15:27:25.562783Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-05-29T15:27:25.562823Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:8152 TClient is connected to server localhost:8152 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... 2025-05-29T15:27:25.621115Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:27:25.621146Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:27:25.621718Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 2025-05-29T15:27:25.622782Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-29T15:27:25.630097Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:27:25.693401Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:27:25.716638Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:27:25.730764Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:27:25.859715Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509889667920339026:2403], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:27:25.859750Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:27:25.907678Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-05-29T15:27:25.915802Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-05-29T15:27:25.927562Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-05-29T15:27:25.941636Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-05-29T15:27:25.955983Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-05-29T15:27:25.969646Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-05-29T15:27:25.984011Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-05-29T15:27:26.000070Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509889667920339678:2466], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:27:26.000095Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:27:26.000157Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509889667920339683:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:27:26.000895Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710669:3, at schemeshard: 72057594046644480 2025-05-29T15:27:26.003614Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7509889667920339685:2470], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710669 completed, doublechecking } 2025-05-29T15:27:26.059300Z node 1 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [1:7509889672215307032:3396] txid# 281474976710670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-29T15:27:26.145091Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [1:7509889672215307048:2474], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:27:26.146842Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=1&id=ZGUxMmI1ZGMtYTUxZmJjYzctMTRjZDkzZWEtMWM3MjU3MzU=, ActorId: [1:7509889667920339008:2401], ActorState: ExecuteState, TraceId: 01jweafgsfaxfg7bmymwqc6h60, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: VERIFY failed (2025-05-29T15:27:26.147534Z): assertion failed in non-unittest thread with message: assertion failed at ydb/core/kqp/ut/common/kqp_ut_common.h:375, void NKikimr::NKqp::AssertSuccessResult(const NYdb::TStatus &): (result.IsSuccess())
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 library/cpp/testing/unittest/registar.cpp:36 RaiseError(): requirement UnittestThread failed 0. /-S/util/system/yassert.cpp:83: InternalPanicImpl @ 0x13A7AA65 1. /-S/util/system/yassert.cpp:55: Panic @ 0x13A71A66 2. /tmp//-S/library/cpp/testing/unittest/registar.cpp:36: RaiseError @ 0x13C137F6 3. /-S/ydb/core/kqp/ut/common/kqp_ut_common.h:375: AssertSuccessResult @ 0x260B19E2 4. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:365: CreateSampleTables @ 0x260B12E2 5. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:581: operator() @ 0x260D2F1C 6. /-S/library/cpp/threading/future/core/future-inl.h:493: SetValue @ 0x260D2F1C 7. /-S/library/cpp/threading/future/async.h:24: operator() @ 0x260D2F1C 8. /-S/util/thread/pool.h:71: Process @ 0x260D2F1C 9. /-S/util/thread/pool.cpp:418: DoExecute @ 0x13A823E9 10. /-S/util/thread/factory.h:15: Execute @ 0x13A80DD9 11. /-S/util/thread/factory.cpp:36: ThreadProc @ 0x13A80DD9 12. /-S/util/system/thread.cpp:244: ThreadProxy @ 0x13A7C24C 13. ??:0: ?? @ 0x7F4A1BEE5AC2 14. ??:0: ?? @ 0x7F4A1BF7784F >> TKesusTest::TestAcquireLocks >> TKesusTest::TestQuoterAccountResourcesOnDemand [GOOD] >> TKesusTest::TestQuoterAccountResourcesPaced ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kesus/tablet/ut/unittest >> TKesusTest::TestGetQuoterResourceCounters [GOOD] Test command err: 2025-05-29T15:27:26.602976Z node 1 :KESUS_TABLET INFO: tablet_impl.cpp:71: OnActivateExecutor: 72057594037927937 2025-05-29T15:27:26.603014Z node 1 :KESUS_TABLET DEBUG: tx_init_schema.cpp:14: [72057594037927937] TTxInitSchema::Execute 2025-05-29T15:27:26.607100Z node 1 :KESUS_TABLET DEBUG: tx_init_schema.cpp:21: [72057594037927937] TTxInitSchema::Complete 2025-05-29T15:27:26.607133Z node 1 :KESUS_TABLET DEBUG: tx_init.cpp:30: [72057594037927937] TTxInit::Execute 2025-05-29T15:27:26.618501Z node 1 :KESUS_TABLET DEBUG: tx_init.cpp:242: [72057594037927937] TTxInit::Complete 2025-05-29T15:27:26.618678Z node 1 :KESUS_TABLET DEBUG: tx_config_set.cpp:28: [72057594037927937] TTxConfigSet::Execute (sender=[1:131:2156], cookie=7837782839814061009, path="/foo/bar/baz") 2025-05-29T15:27:26.639948Z node 1 :KESUS_TABLET DEBUG: tx_config_set.cpp:94: [72057594037927937] TTxConfigSet::Complete (sender=[1:131:2156], cookie=7837782839814061009, status=SUCCESS) 2025-05-29T15:27:26.640115Z node 1 :KESUS_TABLET DEBUG: tx_config_get.cpp:23: [72057594037927937] TTxConfigGet::Execute (sender=[1:140:2163], cookie=13786906067247227983) 2025-05-29T15:27:26.650866Z node 1 :KESUS_TABLET DEBUG: tx_config_get.cpp:44: [72057594037927937] TTxConfigGet::Complete (sender=[1:140:2163], cookie=13786906067247227983) 2025-05-29T15:27:26.651009Z node 1 :KESUS_TABLET DEBUG: tx_config_set.cpp:28: [72057594037927937] TTxConfigSet::Execute (sender=[1:145:2168], cookie=17021346727039963719, path="/foo/bar/baz") 2025-05-29T15:27:26.661841Z node 1 :KESUS_TABLET DEBUG: tx_config_set.cpp:94: [72057594037927937] TTxConfigSet::Complete (sender=[1:145:2168], cookie=17021346727039963719, status=SUCCESS) 2025-05-29T15:27:26.661973Z node 1 :KESUS_TABLET DEBUG: tx_config_get.cpp:23: [72057594037927937] TTxConfigGet::Execute (sender=[1:150:2173], cookie=3048600391816594481) 2025-05-29T15:27:26.672662Z node 1 :KESUS_TABLET DEBUG: tx_config_get.cpp:44: [72057594037927937] TTxConfigGet::Complete (sender=[1:150:2173], cookie=3048600391816594481) 2025-05-29T15:27:26.674723Z node 1 :KESUS_TABLET INFO: tablet_impl.cpp:71: OnActivateExecutor: 72057594037927937 2025-05-29T15:27:26.674770Z node 1 :KESUS_TABLET DEBUG: tx_init_schema.cpp:14: [72057594037927937] TTxInitSchema::Execute 2025-05-29T15:27:26.674815Z node 1 :KESUS_TABLET DEBUG: tx_init_schema.cpp:21: [72057594037927937] TTxInitSchema::Complete 2025-05-29T15:27:26.674924Z node 1 :KESUS_TABLET DEBUG: tx_init.cpp:30: [72057594037927937] TTxInit::Execute 2025-05-29T15:27:26.717511Z node 1 :KESUS_TABLET DEBUG: tx_init.cpp:242: [72057594037927937] TTxInit::Complete 2025-05-29T15:27:26.717598Z node 1 :KESUS_TABLET DEBUG: tx_config_get.cpp:23: [72057594037927937] TTxConfigGet::Execute (sender=[1:192:2205], cookie=4050643175584147092) 2025-05-29T15:27:26.728288Z node 1 :KESUS_TABLET DEBUG: tx_config_get.cpp:44: [72057594037927937] TTxConfigGet::Complete (sender=[1:192:2205], cookie=4050643175584147092) 2025-05-29T15:27:26.728407Z node 1 :KESUS_TABLET DEBUG: tx_config_set.cpp:28: [72057594037927937] TTxConfigSet::Execute (sender=[1:200:2212], cookie=993669231377634879, path="/foo/bar/baz") 2025-05-29T15:27:26.739482Z node 1 :KESUS_TABLET DEBUG: tx_config_set.cpp:94: [72057594037927937] TTxConfigSet::Complete (sender=[1:200:2212], cookie=993669231377634879, status=SUCCESS) 2025-05-29T15:27:26.739670Z node 1 :KESUS_TABLET DEBUG: tx_config_set.cpp:28: [72057594037927937] TTxConfigSet::Execute (sender=[1:205:2217], cookie=15440680911091271479, path="/foo/bar/baz") 2025-05-29T15:27:26.739692Z node 1 :KESUS_TABLET DEBUG: tx_config_set.cpp:94: [72057594037927937] TTxConfigSet::Complete (sender=[1:205:2217], cookie=15440680911091271479, status=PRECONDITION_FAILED) 2025-05-29T15:27:26.836024Z node 2 :KESUS_TABLET INFO: tablet_impl.cpp:71: OnActivateExecutor: 72057594037927937 2025-05-29T15:27:26.836053Z node 2 :KESUS_TABLET DEBUG: tx_init_schema.cpp:14: [72057594037927937] TTxInitSchema::Execute 2025-05-29T15:27:26.839293Z node 2 :KESUS_TABLET DEBUG: tx_init_schema.cpp:21: [72057594037927937] TTxInitSchema::Complete 2025-05-29T15:27:26.839333Z node 2 :KESUS_TABLET DEBUG: tx_init.cpp:30: [72057594037927937] TTxInit::Execute 2025-05-29T15:27:26.860724Z node 2 :KESUS_TABLET DEBUG: tx_init.cpp:242: [72057594037927937] TTxInit::Complete 2025-05-29T15:27:26.860828Z node 2 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:29: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[2:133:2158], cookie=12280477097183460730, name="Lock1") 2025-05-29T15:27:26.860850Z node 2 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:134: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[2:133:2158], cookie=12280477097183460730) 2025-05-29T15:27:27.072612Z node 3 :KESUS_TABLET INFO: tablet_impl.cpp:71: OnActivateExecutor: 72057594037927937 2025-05-29T15:27:27.072640Z node 3 :KESUS_TABLET DEBUG: tx_init_schema.cpp:14: [72057594037927937] TTxInitSchema::Execute 2025-05-29T15:27:27.076102Z node 3 :KESUS_TABLET DEBUG: tx_init_schema.cpp:21: [72057594037927937] TTxInitSchema::Complete 2025-05-29T15:27:27.076214Z node 3 :KESUS_TABLET DEBUG: tx_init.cpp:30: [72057594037927937] TTxInit::Execute 2025-05-29T15:27:27.097513Z node 3 :KESUS_TABLET DEBUG: tx_init.cpp:242: [72057594037927937] TTxInit::Complete 2025-05-29T15:27:27.097646Z node 3 :KESUS_TABLET DEBUG: tx_session_attach.cpp:35: [72057594037927937] TTxSessionAttach::Execute (sender=[3:133:2158], cookie=13554626291142613943, session=0, seqNo=0) 2025-05-29T15:27:27.097680Z node 3 :KESUS_TABLET DEBUG: tx_session_attach.cpp:118: [72057594037927937] Created new session 1 2025-05-29T15:27:27.108408Z node 3 :KESUS_TABLET DEBUG: tx_session_attach.cpp:140: [72057594037927937] TTxSessionAttach::Complete (sender=[3:133:2158], cookie=13554626291142613943, session=1) 2025-05-29T15:27:27.108496Z node 3 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:48: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[3:133:2158], cookie=111, session=1, semaphore="Lock1" count=18446744073709551615) 2025-05-29T15:27:27.108549Z node 3 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:126: [72057594037927937] Created new ephemeral semaphore 1 "Lock1" 2025-05-29T15:27:27.108565Z node 3 :KESUS_TABLET DEBUG: tablet_db.cpp:152: [72057594037927937] Processing semaphore 1 "Lock1" queue: next order #1 session 1 2025-05-29T15:27:27.119277Z node 3 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:263: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[3:133:2158], cookie=111) 2025-05-29T15:27:27.119406Z node 3 :KESUS_TABLET DEBUG: tx_semaphore_delete.cpp:28: [72057594037927937] TTxSemaphoreDelete::Execute (sender=[3:144:2167], cookie=7374135655382077897, name="Lock1", force=0) 2025-05-29T15:27:27.130187Z node 3 :KESUS_TABLET DEBUG: tx_semaphore_delete.cpp:95: [72057594037927937] TTxSemaphoreDelete::Complete (sender=[3:144:2167], cookie=7374135655382077897) 2025-05-29T15:27:27.130341Z node 3 :KESUS_TABLET DEBUG: tx_semaphore_delete.cpp:28: [72057594037927937] TTxSemaphoreDelete::Execute (sender=[3:149:2172], cookie=1676464050221878929, name="Sem1", force=0) 2025-05-29T15:27:27.141261Z node 3 :KESUS_TABLET DEBUG: tx_semaphore_delete.cpp:95: [72057594037927937] TTxSemaphoreDelete::Complete (sender=[3:149:2172], cookie=1676464050221878929) 2025-05-29T15:27:27.141428Z node 3 :KESUS_TABLET DEBUG: tx_semaphore_create.cpp:32: [72057594037927937] TTxSemaphoreCreate::Execute (sender=[3:154:2177], cookie=10528574141780219368, name="Sem1", limit=42) 2025-05-29T15:27:27.141477Z node 3 :KESUS_TABLET DEBUG: tx_semaphore_create.cpp:104: [72057594037927937] Created new semaphore 2 "Sem1" 2025-05-29T15:27:27.152183Z node 3 :KESUS_TABLET DEBUG: tx_semaphore_create.cpp:112: [72057594037927937] TTxSemaphoreCreate::Complete (sender=[3:154:2177], cookie=10528574141780219368) 2025-05-29T15:27:27.152293Z node 3 :KESUS_TABLET DEBUG: tx_semaphore_delete.cpp:28: [72057594037927937] TTxSemaphoreDelete::Execute (sender=[3:159:2182], cookie=14880347078836794470, name="Sem1", force=0) 2025-05-29T15:27:27.152314Z node 3 :KESUS_TABLET DEBUG: tablet_db.cpp:58: [72057594037927937] Deleting semaphore 2 "Sem1" 2025-05-29T15:27:27.162991Z node 3 :KESUS_TABLET DEBUG: tx_semaphore_delete.cpp:95: [72057594037927937] TTxSemaphoreDelete::Complete (sender=[3:159:2182], cookie=14880347078836794470) 2025-05-29T15:27:27.163098Z node 3 :KESUS_TABLET DEBUG: tx_semaphore_delete.cpp:28: [72057594037927937] TTxSemaphoreDelete::Execute (sender=[3:164:2187], cookie=12155621575823347487, name="Sem1", force=0) 2025-05-29T15:27:27.173938Z node 3 :KESUS_TABLET DEBUG: tx_semaphore_delete.cpp:95: [72057594037927937] TTxSemaphoreDelete::Complete (sender=[3:164:2187], cookie=12155621575823347487) 2025-05-29T15:27:27.313618Z node 4 :KESUS_TABLET INFO: tablet_impl.cpp:71: OnActivateExecutor: 72057594037927937 2025-05-29T15:27:27.313645Z node 4 :KESUS_TABLET DEBUG: tx_init_schema.cpp:14: [72057594037927937] TTxInitSchema::Execute 2025-05-29T15:27:27.316734Z node 4 :KESUS_TABLET DEBUG: tx_init_schema.cpp:21: [72057594037927937] TTxInitSchema::Complete 2025-05-29T15:27:27.316818Z node 4 :KESUS_TABLET DEBUG: tx_init.cpp:30: [72057594037927937] TTxInit::Execute 2025-05-29T15:27:27.338094Z node 4 :KESUS_TABLET DEBUG: tx_init.cpp:242: [72057594037927937] TTxInit::Complete 2025-05-29T15:27:27.338226Z node 4 :KESUS_TABLET DEBUG: tx_session_attach.cpp:35: [72057594037927937] TTxSessionAttach::Execute (sender=[4:133:2158], cookie=1719575054148773301, session=0, seqNo=0) 2025-05-29T15:27:27.338266Z node 4 :KESUS_TABLET DEBUG: tx_session_attach.cpp:118: [72057594037927937] Created new session 1 2025-05-29T15:27:27.348902Z node 4 :KESUS_TABLET DEBUG: tx_session_attach.cpp:140: [72057594037927937] TTxSessionAttach::Complete (sender=[4:133:2158], cookie=1719575054148773301, session=1) 2025-05-29T15:27:27.348991Z node 4 :KESUS_TABLET DEBUG: tx_session_attach.cpp:35: [72057594037927937] TTxSessionAttach::Execute (sender=[4:133:2158], cookie=3873948393336394797, session=0, seqNo=0) 2025-05-29T15:27:27.349026Z node 4 :KESUS_TABLET DEBUG: tx_session_attach.cpp:118: [72057594037927937] Created new session 2 2025-05-29T15:27:27.359546Z node 4 :KESUS_TABLET DEBUG: tx_session_attach.cpp:140: [72057594037927937] TTxSessionAttach::Complete (sender=[4:133:2158], cookie=3873948393336394797, session=2) 2025-05-29T15:27:27.359617Z node 4 :KESUS_TABLET DEBUG: tx_session_detach.cpp:100: [72057594037927937] Fast-path detach session=2 from sender=[4:133:2158], cookie=7437093547543429149 2025-05-29T15:27:27.359696Z node 4 :KESUS_TABLET DEBUG: tx_semaphore_create.cpp:32: [72057594037927937] TTxSemaphoreCreate::Execute (sender=[4:145:2168], cookie=274663321447317163, name="Sem1", limit=3) 2025-05-29T15:27:27.359725Z node 4 :KESUS_TABLET DEBUG: tx_semaphore_create.cpp:104: [72057594037927937] Created new semaphore 1 "Sem1" 2025-05-29T15:27:27.370277Z node 4 :KESUS_TABLET DEBUG: tx_semaphore_create.cpp:112: [72057594037927937] TTxSemaphoreCreate::Complete (sender=[4:145:2168], cookie=274663321447317163) 2025-05-29T15:27:27.370342Z node 4 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:29: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[4:133:2158], cookie=112, name="Sem1") 2025-05-29T15:27:27.370358Z node 4 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:134: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[4:133:2158], cookie=112) 2025-05-29T15:27:27.370378Z node 4 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:29: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[4:133:2158], cookie=113, name="Sem1") 2025-05-29T15:27:27.370384Z node 4 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:134: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[4:133:2158], cookie=113) 2025-05-29T15:27:27.370402Z node 4 :KESUS_TABLET DEBUG: tx_session_attach.cpp:35: [72057594037927937] TTxSessionAttach::Execute (sender=[4:133:2158], cookie=6458570841009406070, session=2, seqNo=0) 2025-05-29T15:27:27.381094Z node 4 :KESUS_TABLET DEBUG: tx_session_attach.cpp:140: [72057594037927937] TTxSess ... node 4 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-05-29T15:27:28.722568Z node 4 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-05-29T15:27:28.732808Z node 4 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:48: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[4:133:2158], cookie=129, session=1, semaphore="Sem2" count=2) 2025-05-29T15:27:28.743616Z node 4 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:263: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[4:133:2158], cookie=129) 2025-05-29T15:27:28.743719Z node 4 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:29: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[4:133:2158], cookie=130, name="Sem2") 2025-05-29T15:27:28.743737Z node 4 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:134: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[4:133:2158], cookie=130) 2025-05-29T15:27:28.743767Z node 4 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:48: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[4:133:2158], cookie=131, session=1, semaphore="Sem2" count=1) 2025-05-29T15:27:28.754502Z node 4 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:263: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[4:133:2158], cookie=131) 2025-05-29T15:27:28.754597Z node 4 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:29: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[4:133:2158], cookie=132, name="Sem2") 2025-05-29T15:27:28.754613Z node 4 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:134: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[4:133:2158], cookie=132) 2025-05-29T15:27:28.754635Z node 4 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:29: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[4:133:2158], cookie=133, name="Sem2") 2025-05-29T15:27:28.754639Z node 4 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:134: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[4:133:2158], cookie=133) 2025-05-29T15:27:28.956350Z node 5 :KESUS_TABLET INFO: tablet_impl.cpp:71: OnActivateExecutor: 72057594037927937 2025-05-29T15:27:28.956378Z node 5 :KESUS_TABLET DEBUG: tx_init_schema.cpp:14: [72057594037927937] TTxInitSchema::Execute 2025-05-29T15:27:28.959718Z node 5 :KESUS_TABLET DEBUG: tx_init_schema.cpp:21: [72057594037927937] TTxInitSchema::Complete 2025-05-29T15:27:28.959746Z node 5 :KESUS_TABLET DEBUG: tx_init.cpp:30: [72057594037927937] TTxInit::Execute 2025-05-29T15:27:28.981325Z node 5 :KESUS_TABLET DEBUG: tx_init.cpp:242: [72057594037927937] TTxInit::Complete 2025-05-29T15:27:28.982598Z node 5 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:36: [72057594037927937] TTxQuoterResourceAdd::Execute (sender=[5:131:2156], cookie=3401694766231975480, path="/Root1", config={ MaxUnitsPerSecond: 1000 }) 2025-05-29T15:27:28.982665Z node 5 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:76: [72057594037927937] Created new quoter resource 1 "Root1" 2025-05-29T15:27:28.993449Z node 5 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:85: [72057594037927937] TTxQuoterResourceAdd::Complete (sender=[5:131:2156], cookie=3401694766231975480) 2025-05-29T15:27:28.993597Z node 5 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:36: [72057594037927937] TTxQuoterResourceAdd::Execute (sender=[5:140:2163], cookie=15035176475498939902, path="/Root1/Res", config={ }) 2025-05-29T15:27:28.993651Z node 5 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:76: [72057594037927937] Created new quoter resource 2 "Root1/Res" 2025-05-29T15:27:29.004468Z node 5 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:85: [72057594037927937] TTxQuoterResourceAdd::Complete (sender=[5:140:2163], cookie=15035176475498939902) 2025-05-29T15:27:29.004656Z node 5 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:36: [72057594037927937] TTxQuoterResourceAdd::Execute (sender=[5:145:2168], cookie=576919846374930680, path="/Root2", config={ MaxUnitsPerSecond: 1000 }) 2025-05-29T15:27:29.004713Z node 5 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:76: [72057594037927937] Created new quoter resource 3 "Root2" 2025-05-29T15:27:29.015480Z node 5 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:85: [72057594037927937] TTxQuoterResourceAdd::Complete (sender=[5:145:2168], cookie=576919846374930680) 2025-05-29T15:27:29.015636Z node 5 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:36: [72057594037927937] TTxQuoterResourceAdd::Execute (sender=[5:150:2173], cookie=8401296746342997624, path="/Root2/Res", config={ }) 2025-05-29T15:27:29.015690Z node 5 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:76: [72057594037927937] Created new quoter resource 4 "Root2/Res" 2025-05-29T15:27:29.026387Z node 5 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:85: [72057594037927937] TTxQuoterResourceAdd::Complete (sender=[5:150:2173], cookie=8401296746342997624) 2025-05-29T15:27:29.026533Z node 5 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:36: [72057594037927937] TTxQuoterResourceAdd::Execute (sender=[5:155:2178], cookie=8405167855526629072, path="/Root2/Res/Subres", config={ }) 2025-05-29T15:27:29.026583Z node 5 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:76: [72057594037927937] Created new quoter resource 5 "Root2/Res/Subres" 2025-05-29T15:27:29.037409Z node 5 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:85: [72057594037927937] TTxQuoterResourceAdd::Complete (sender=[5:155:2178], cookie=8405167855526629072) 2025-05-29T15:27:29.037754Z node 5 :KESUS_TABLET TRACE: quoter_runtime.cpp:145: [72057594037927937] Send TEvSubscribeOnResourcesResult to [5:160:2183]. Cookie: 5636180486669494381. Data: { Results { ResourceId: 2 Error { Status: SUCCESS } EffectiveProps { ResourceId: 2 ResourcePath: "Root1/Res" HierarchicalDRRResourceConfig { MaxUnitsPerSecond: 1000 MaxBurstSizeCoefficient: 1 Weight: 1 } AccountingConfig { ReportPeriodMs: 5000 AccountPeriodMs: 1000 CollectPeriodSec: 30 ProvisionedCoefficient: 60 OvershootCoefficient: 1.1 Provisioned { BillingPeriodSec: 60 } OnDemand { BillingPeriodSec: 60 } Overshoot { BillingPeriodSec: 60 } } } } ProtocolVersion: 1 } 2025-05-29T15:27:29.037765Z node 5 :KESUS_TABLET DEBUG: quoter_runtime.cpp:150: [72057594037927937] Subscribe on quoter resources (sender=[5:160:2183], cookie=5636180486669494381) 2025-05-29T15:27:29.078691Z node 5 :KESUS_TABLET TRACE: quoter_runtime.cpp:93: [72057594037927937] Send TEvResourcesAllocated to [5:160:2183]. Cookie: 0. Data: { ResourcesInfo { ResourceId: 2 Amount: 100 StateNotification { Status: SUCCESS } } } 2025-05-29T15:27:29.129791Z node 5 :KESUS_TABLET TRACE: quoter_runtime.cpp:93: [72057594037927937] Send TEvResourcesAllocated to [5:160:2183]. Cookie: 0. Data: { ResourcesInfo { ResourceId: 2 Amount: 100 StateNotification { Status: SUCCESS } } } 2025-05-29T15:27:29.160385Z node 5 :KESUS_TABLET TRACE: quoter_runtime.cpp:93: [72057594037927937] Send TEvResourcesAllocated to [5:160:2183]. Cookie: 0. Data: { ResourcesInfo { ResourceId: 2 Amount: 100 StateNotification { Status: SUCCESS } } } 2025-05-29T15:27:29.160597Z node 5 :KESUS_TABLET TRACE: quoter_runtime.cpp:313: [72057594037927937] Send TEvGetQuoterResourceCountersResult to [5:168:2187]. Cookie: 536326242802370490. Data: { ResourceCounters { ResourcePath: "Root2/Res" } ResourceCounters { ResourcePath: "Root2/Res/Subres" } ResourceCounters { ResourcePath: "Root2" } ResourceCounters { ResourcePath: "Root1/Res" Allocated: 300 } ResourceCounters { ResourcePath: "Root1" Allocated: 300 } } 2025-05-29T15:27:29.160797Z node 5 :KESUS_TABLET TRACE: quoter_runtime.cpp:145: [72057594037927937] Send TEvSubscribeOnResourcesResult to [5:171:2190]. Cookie: 12409753148473765788. Data: { Results { ResourceId: 5 Error { Status: SUCCESS } EffectiveProps { ResourceId: 5 ResourcePath: "Root2/Res/Subres" HierarchicalDRRResourceConfig { MaxUnitsPerSecond: 1000 MaxBurstSizeCoefficient: 1 Weight: 1 } AccountingConfig { ReportPeriodMs: 5000 AccountPeriodMs: 1000 CollectPeriodSec: 30 ProvisionedCoefficient: 60 OvershootCoefficient: 1.1 Provisioned { BillingPeriodSec: 60 } OnDemand { BillingPeriodSec: 60 } Overshoot { BillingPeriodSec: 60 } } } } ProtocolVersion: 1 } 2025-05-29T15:27:29.160808Z node 5 :KESUS_TABLET DEBUG: quoter_runtime.cpp:150: [72057594037927937] Subscribe on quoter resources (sender=[5:171:2190], cookie=12409753148473765788) 2025-05-29T15:27:29.201576Z node 5 :KESUS_TABLET TRACE: quoter_runtime.cpp:93: [72057594037927937] Send TEvResourcesAllocated to [5:171:2190]. Cookie: 0. Data: { ResourcesInfo { ResourceId: 5 Amount: 100 StateNotification { Status: SUCCESS } } } 2025-05-29T15:27:29.242352Z node 5 :KESUS_TABLET TRACE: quoter_runtime.cpp:93: [72057594037927937] Send TEvResourcesAllocated to [5:171:2190]. Cookie: 0. Data: { ResourcesInfo { ResourceId: 5 Amount: 100 StateNotification { Status: SUCCESS } } } 2025-05-29T15:27:29.242527Z node 5 :KESUS_TABLET TRACE: quoter_runtime.cpp:313: [72057594037927937] Send TEvGetQuoterResourceCountersResult to [5:177:2194]. Cookie: 18039192844747334188. Data: { ResourceCounters { ResourcePath: "Root2/Res" Allocated: 200 } ResourceCounters { ResourcePath: "Root2/Res/Subres" Allocated: 200 } ResourceCounters { ResourcePath: "Root2" Allocated: 200 } ResourceCounters { ResourcePath: "Root1/Res" Allocated: 300 } ResourceCounters { ResourcePath: "Root1" Allocated: 300 } } 2025-05-29T15:27:29.242668Z node 5 :KESUS_TABLET TRACE: quoter_runtime.cpp:145: [72057594037927937] Send TEvSubscribeOnResourcesResult to [5:160:2183]. Cookie: 10659737123606665433. Data: { Results { ResourceId: 2 Error { Status: SUCCESS } EffectiveProps { ResourceId: 2 ResourcePath: "Root1/Res" HierarchicalDRRResourceConfig { MaxUnitsPerSecond: 1000 MaxBurstSizeCoefficient: 1 Weight: 1 } AccountingConfig { ReportPeriodMs: 5000 AccountPeriodMs: 1000 CollectPeriodSec: 30 ProvisionedCoefficient: 60 OvershootCoefficient: 1.1 Provisioned { BillingPeriodSec: 60 } OnDemand { BillingPeriodSec: 60 } Overshoot { BillingPeriodSec: 60 } } } } ProtocolVersion: 1 } 2025-05-29T15:27:29.242681Z node 5 :KESUS_TABLET DEBUG: quoter_runtime.cpp:150: [72057594037927937] Subscribe on quoter resources (sender=[5:160:2183], cookie=10659737123606665433) 2025-05-29T15:27:29.242814Z node 5 :KESUS_TABLET TRACE: quoter_runtime.cpp:145: [72057594037927937] Send TEvSubscribeOnResourcesResult to [5:171:2190]. Cookie: 9906703136597694336. Data: { Results { ResourceId: 5 Error { Status: SUCCESS } EffectiveProps { ResourceId: 5 ResourcePath: "Root2/Res/Subres" HierarchicalDRRResourceConfig { MaxUnitsPerSecond: 1000 MaxBurstSizeCoefficient: 1 Weight: 1 } AccountingConfig { ReportPeriodMs: 5000 AccountPeriodMs: 1000 CollectPeriodSec: 30 ProvisionedCoefficient: 60 OvershootCoefficient: 1.1 Provisioned { BillingPeriodSec: 60 } OnDemand { BillingPeriodSec: 60 } Overshoot { BillingPeriodSec: 60 } } } } ProtocolVersion: 1 } 2025-05-29T15:27:29.242819Z node 5 :KESUS_TABLET DEBUG: quoter_runtime.cpp:150: [72057594037927937] Subscribe on quoter resources (sender=[5:171:2190], cookie=9906703136597694336) 2025-05-29T15:27:29.273394Z node 5 :KESUS_TABLET TRACE: quoter_runtime.cpp:93: [72057594037927937] Send TEvResourcesAllocated to [5:160:2183]. Cookie: 0. Data: { ResourcesInfo { ResourceId: 2 Amount: 20 StateNotification { Status: SUCCESS } } } 2025-05-29T15:27:29.273431Z node 5 :KESUS_TABLET TRACE: quoter_runtime.cpp:93: [72057594037927937] Send TEvResourcesAllocated to [5:171:2190]. Cookie: 0. Data: { ResourcesInfo { ResourceId: 5 Amount: 50 StateNotification { Status: SUCCESS } } } 2025-05-29T15:27:29.273599Z node 5 :KESUS_TABLET TRACE: quoter_runtime.cpp:313: [72057594037927937] Send TEvGetQuoterResourceCountersResult to [5:184:2201]. Cookie: 16105869711933574810. Data: { ResourceCounters { ResourcePath: "Root2/Res" Allocated: 250 } ResourceCounters { ResourcePath: "Root2/Res/Subres" Allocated: 250 } ResourceCounters { ResourcePath: "Root2" Allocated: 250 } ResourceCounters { ResourcePath: "Root1/Res" Allocated: 320 } ResourceCounters { ResourcePath: "Root1" Allocated: 320 } } ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/runtime/unittest >> KqpScanLogs::GraceJoin-EnabledLogs Test command err: cwd: /home/runner/.ya/build/build_root/ciyv/000a57/ydb/core/kqp/ut/runtime/test-results/unittest/testing_out_stuff/chunk1 Trying to start YDB, gRPC: 10258, MsgBus: 4129 test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/000a57/r3tmp/tmpoNpaJc/pdisk_1.dat TServer::EnableGrpc on GrpcPort 10258, node 1 TClient is connected to server localhost:4129 TClient is connected to server localhost:4129 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... waiting... waiting... waiting... waiting... VERIFY failed (2025-05-29T15:27:26.311034Z): assertion failed in non-unittest thread with message: assertion failed at ydb/core/kqp/ut/common/kqp_ut_common.h:375, void NKikimr::NKqp::AssertSuccessResult(const NYdb::TStatus &): (result.IsSuccess())
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 library/cpp/testing/unittest/registar.cpp:36 RaiseError(): requirement UnittestThread failed 0. /-S/util/system/yassert.cpp:83: InternalPanicImpl @ 0x13A7AA65 1. /-S/util/system/yassert.cpp:55: Panic @ 0x13A71A66 2. /tmp//-S/library/cpp/testing/unittest/registar.cpp:36: RaiseError @ 0x13C137F6 3. /-S/ydb/core/kqp/ut/common/kqp_ut_common.h:375: AssertSuccessResult @ 0x260B19E2 4. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:365: CreateSampleTables @ 0x260B12E2 5. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:581: operator() @ 0x260D2F1C 6. /-S/library/cpp/threading/future/core/future-inl.h:493: SetValue @ 0x260D2F1C 7. /-S/library/cpp/threading/future/async.h:24: operator() @ 0x260D2F1C 8. /-S/util/thread/pool.h:71: Process @ 0x260D2F1C 9. /-S/util/thread/pool.cpp:418: DoExecute @ 0x13A823E9 10. /-S/util/thread/factory.h:15: Execute @ 0x13A80DD9 11. /-S/util/thread/factory.cpp:36: ThreadProc @ 0x13A80DD9 12. /-S/util/system/thread.cpp:244: ThreadProxy @ 0x13A7C24C 13. ??:0: ?? @ 0x7FDBC1574AC2 14. ??:0: ?? @ 0x7FDBC160684F >> TKesusTest::TestAttachOutOfSequenceInTx [GOOD] >> TKesusTest::TestAttachThenReRegister >> TSequenceReboots::CopyTableWithSequence [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/runtime/unittest >> KqpScanSpilling::SpillingInRuntimeNodes+EnabledSpilling Test command err: cwd: /home/runner/.ya/build/build_root/ciyv/0008bd/ydb/core/kqp/ut/runtime/test-results/unittest/testing_out_stuff/chunk7 Trying to start YDB, gRPC: 25038, MsgBus: 26565 2025-05-29T15:27:25.594358Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509889667680213830:2061];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:27:25.594383Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/0008bd/r3tmp/tmpb1ggaL/pdisk_1.dat 2025-05-29T15:27:25.656550Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:27:25.656973Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [1:7509889667680213811:2079] 1748532445594230 != 1748532445594233 TServer::EnableGrpc on GrpcPort 25038, node 1 2025-05-29T15:27:25.672350Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:27:25.672374Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-05-29T15:27:25.672376Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-05-29T15:27:25.672426Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:26565 2025-05-29T15:27:25.696784Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:27:25.696812Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:27:25.697914Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:26565 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-29T15:27:25.743355Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:27:25.749690Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:27:25.754489Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-05-29T15:27:25.824295Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-05-29T15:27:25.848351Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:27:25.859436Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-05-29T15:27:25.971780Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509889667680215446:2403], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:27:25.971816Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:27:26.014515Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-05-29T15:27:26.070109Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-05-29T15:27:26.081775Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-05-29T15:27:26.095828Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-05-29T15:27:26.109514Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-05-29T15:27:26.123651Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-05-29T15:27:26.137792Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-05-29T15:27:26.155982Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509889671975183397:2466], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:27:26.156008Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:27:26.156056Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509889671975183402:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:27:26.157105Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710669:3, at schemeshard: 72057594046644480 2025-05-29T15:27:26.164811Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7509889671975183404:2470], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710669 completed, doublechecking } 2025-05-29T15:27:26.239034Z node 1 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [1:7509889671975183455:3397] txid# 281474976710670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-29T15:27:26.349944Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [1:7509889671975183471:2474], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:27:26.350041Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=1&id=ZTAyNmE0Y2ItODMxNjE5ZTEtYTJmZjlkNDgtNDFlODA5Mjc=, ActorId: [1:7509889667680215428:2401], ActorState: ExecuteState, TraceId: 01jweafgybcqq7vvttwembs0xw, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: VERIFY failed (2025-05-29T15:27:26.350729Z): assertion failed in non-unittest thread with message: assertion failed at ydb/core/kqp/ut/common/kqp_ut_common.h:375, void NKikimr::NKqp::AssertSuccessResult(const NYdb::TStatus &): (result.IsSuccess())
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 library/cpp/testing/unittest/registar.cpp:36 RaiseError(): requirement UnittestThread failed 0. /-S/util/system/yassert.cpp:83: InternalPanicImpl @ 0x13A7AA65 1. /-S/util/system/yassert.cpp:55: Panic @ 0x13A71A66 2. /tmp//-S/library/cpp/testing/unittest/registar.cpp:36: RaiseError @ 0x13C137F6 3. /-S/ydb/core/kqp/ut/common/kqp_ut_common.h:375: AssertSuccessResult @ 0x260B19E2 4. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:365: CreateSampleTables @ 0x260B12E2 5. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:581: operator() @ 0x260D2F1C 6. /-S/library/cpp/threading/future/core/future-inl.h:493: SetValue @ 0x260D2F1C 7. /-S/library/cpp/threading/future/async.h:24: operator() @ 0x260D2F1C 8. /-S/util/thread/pool.h:71: Process @ 0x260D2F1C 9. /-S/util/thread/pool.cpp:418: DoExecute @ 0x13A823E9 10. /-S/util/thread/factory.h:15: Execute @ 0x13A80DD9 11. /-S/util/thread/factory.cpp:36: ThreadProc @ 0x13A80DD9 12. /-S/util/system/thread.cpp:244: ThreadProxy @ 0x13A7C24C 13. ??:0: ?? @ 0x7F9CAF1A6AC2 14. ??:0: ?? @ 0x7F9CAF23884F >> TKesusTest::TestQuoterAccountResourcesBurst [GOOD] >> TKesusTest::TestQuoterAccountResourcesAggregateClients >> TKesusTest::TestSemaphoreSessionFailures [GOOD] >> TKesusTest::TestAttachThenReRegister [GOOD] >> TKesusTest::TestAttachTimeoutTooBig |69.4%| [TA] {RESULT} $(B)/ydb/core/tx/scheme_board/ut_populator/test-results/unittest/{meta.json ... results_accumulator.log} |69.4%| [TA] {RESULT} $(B)/ydb/core/base/ut_board_subscriber/test-results/unittest/{meta.json ... results_accumulator.log} |69.4%| [TA] {RESULT} $(B)/ydb/core/driver_lib/run/ut/test-results/unittest/{meta.json ... results_accumulator.log} |69.4%| [TA] {RESULT} $(B)/ydb/library/ycloud/impl/ut/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kesus/tablet/ut/unittest >> TKesusTest::TestSemaphoreSessionFailures [GOOD] Test command err: 2025-05-29T15:27:28.397703Z node 1 :KESUS_TABLET INFO: tablet_impl.cpp:71: OnActivateExecutor: 72057594037927937 2025-05-29T15:27:28.397732Z node 1 :KESUS_TABLET DEBUG: tx_init_schema.cpp:14: [72057594037927937] TTxInitSchema::Execute 2025-05-29T15:27:28.400699Z node 1 :KESUS_TABLET DEBUG: tx_init_schema.cpp:21: [72057594037927937] TTxInitSchema::Complete 2025-05-29T15:27:28.400726Z node 1 :KESUS_TABLET DEBUG: tx_init.cpp:30: [72057594037927937] TTxInit::Execute 2025-05-29T15:27:28.411849Z node 1 :KESUS_TABLET DEBUG: tx_init.cpp:242: [72057594037927937] TTxInit::Complete 2025-05-29T15:27:28.411966Z node 1 :KESUS_TABLET DEBUG: tx_session_attach.cpp:35: [72057594037927937] TTxSessionAttach::Execute (sender=[1:131:2156], cookie=13958004562601237378, session=0, seqNo=0) 2025-05-29T15:27:28.411998Z node 1 :KESUS_TABLET DEBUG: tx_session_attach.cpp:118: [72057594037927937] Created new session 1 2025-05-29T15:27:28.432818Z node 1 :KESUS_TABLET DEBUG: tx_session_attach.cpp:140: [72057594037927937] TTxSessionAttach::Complete (sender=[1:131:2156], cookie=13958004562601237378, session=1) 2025-05-29T15:27:28.432911Z node 1 :KESUS_TABLET DEBUG: tx_session_attach.cpp:35: [72057594037927937] TTxSessionAttach::Execute (sender=[1:131:2156], cookie=5529003210925318362, session=0, seqNo=0) 2025-05-29T15:27:28.432943Z node 1 :KESUS_TABLET DEBUG: tx_session_attach.cpp:118: [72057594037927937] Created new session 2 2025-05-29T15:27:28.445486Z node 1 :KESUS_TABLET DEBUG: tx_session_attach.cpp:140: [72057594037927937] TTxSessionAttach::Complete (sender=[1:131:2156], cookie=5529003210925318362, session=2) 2025-05-29T15:27:28.445593Z node 1 :KESUS_TABLET DEBUG: tx_semaphore_release.cpp:37: [72057594037927937] TTxSemaphoreRelease::Execute (sender=[1:131:2156], cookie=111, name="Lock1") 2025-05-29T15:27:28.456377Z node 1 :KESUS_TABLET DEBUG: tx_semaphore_release.cpp:93: [72057594037927937] TTxSemaphoreRelease::Complete (sender=[1:131:2156], cookie=111) 2025-05-29T15:27:28.456467Z node 1 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:48: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[1:131:2156], cookie=222, session=1, semaphore="Lock1" count=18446744073709551615) 2025-05-29T15:27:28.456513Z node 1 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:126: [72057594037927937] Created new ephemeral semaphore 1 "Lock1" 2025-05-29T15:27:28.456527Z node 1 :KESUS_TABLET DEBUG: tablet_db.cpp:152: [72057594037927937] Processing semaphore 1 "Lock1" queue: next order #1 session 1 2025-05-29T15:27:28.467320Z node 1 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:263: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[1:131:2156], cookie=222) 2025-05-29T15:27:28.467418Z node 1 :KESUS_TABLET DEBUG: tx_semaphore_release.cpp:37: [72057594037927937] TTxSemaphoreRelease::Execute (sender=[1:131:2156], cookie=333, name="Lock1") 2025-05-29T15:27:28.478187Z node 1 :KESUS_TABLET DEBUG: tx_semaphore_release.cpp:93: [72057594037927937] TTxSemaphoreRelease::Complete (sender=[1:131:2156], cookie=333) 2025-05-29T15:27:28.637724Z node 2 :KESUS_TABLET INFO: tablet_impl.cpp:71: OnActivateExecutor: 72057594037927937 2025-05-29T15:27:28.637751Z node 2 :KESUS_TABLET DEBUG: tx_init_schema.cpp:14: [72057594037927937] TTxInitSchema::Execute 2025-05-29T15:27:28.641169Z node 2 :KESUS_TABLET DEBUG: tx_init_schema.cpp:21: [72057594037927937] TTxInitSchema::Complete 2025-05-29T15:27:28.641209Z node 2 :KESUS_TABLET DEBUG: tx_init.cpp:30: [72057594037927937] TTxInit::Execute 2025-05-29T15:27:28.662373Z node 2 :KESUS_TABLET DEBUG: tx_init.cpp:242: [72057594037927937] TTxInit::Complete 2025-05-29T15:27:28.662512Z node 2 :KESUS_TABLET DEBUG: tx_session_attach.cpp:35: [72057594037927937] TTxSessionAttach::Execute (sender=[2:133:2158], cookie=1420706312417656601, session=0, seqNo=0) 2025-05-29T15:27:28.662546Z node 2 :KESUS_TABLET DEBUG: tx_session_attach.cpp:118: [72057594037927937] Created new session 1 2025-05-29T15:27:28.673238Z node 2 :KESUS_TABLET DEBUG: tx_session_attach.cpp:140: [72057594037927937] TTxSessionAttach::Complete (sender=[2:133:2158], cookie=1420706312417656601, session=1) 2025-05-29T15:27:28.673319Z node 2 :KESUS_TABLET DEBUG: tx_session_attach.cpp:35: [72057594037927937] TTxSessionAttach::Execute (sender=[2:133:2158], cookie=16409578138731131492, session=0, seqNo=0) 2025-05-29T15:27:28.673351Z node 2 :KESUS_TABLET DEBUG: tx_session_attach.cpp:118: [72057594037927937] Created new session 2 2025-05-29T15:27:28.684129Z node 2 :KESUS_TABLET DEBUG: tx_session_attach.cpp:140: [72057594037927937] TTxSessionAttach::Complete (sender=[2:133:2158], cookie=16409578138731131492, session=2) 2025-05-29T15:27:28.684278Z node 2 :KESUS_TABLET DEBUG: tx_semaphore_create.cpp:32: [72057594037927937] TTxSemaphoreCreate::Execute (sender=[2:144:2167], cookie=16862338900048480919, name="Sem1", limit=1) 2025-05-29T15:27:28.684309Z node 2 :KESUS_TABLET DEBUG: tx_semaphore_create.cpp:104: [72057594037927937] Created new semaphore 1 "Sem1" 2025-05-29T15:27:28.695096Z node 2 :KESUS_TABLET DEBUG: tx_semaphore_create.cpp:112: [72057594037927937] TTxSemaphoreCreate::Complete (sender=[2:144:2167], cookie=16862338900048480919) 2025-05-29T15:27:28.695189Z node 2 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:48: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[2:133:2158], cookie=111, session=1, semaphore="Sem1" count=1) 2025-05-29T15:27:28.695243Z node 2 :KESUS_TABLET DEBUG: tablet_db.cpp:152: [72057594037927937] Processing semaphore 1 "Sem1" queue: next order #1 session 1 2025-05-29T15:27:28.695286Z node 2 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:48: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[2:133:2158], cookie=222, session=2, semaphore="Sem1" count=1) 2025-05-29T15:27:28.706027Z node 2 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:263: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[2:133:2158], cookie=111) 2025-05-29T15:27:28.706058Z node 2 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:263: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[2:133:2158], cookie=222) 2025-05-29T15:27:28.706171Z node 2 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:29: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[2:152:2175], cookie=2492195986798651696, name="Sem1") 2025-05-29T15:27:28.706191Z node 2 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:134: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[2:152:2175], cookie=2492195986798651696) 2025-05-29T15:27:28.706237Z node 2 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:29: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[2:155:2178], cookie=15352911521886810418, name="Sem1") 2025-05-29T15:27:28.706245Z node 2 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:134: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[2:155:2178], cookie=15352911521886810418) 2025-05-29T15:27:28.706272Z node 2 :KESUS_TABLET DEBUG: tx_semaphore_release.cpp:37: [72057594037927937] TTxSemaphoreRelease::Execute (sender=[2:133:2158], cookie=333, name="Sem1") 2025-05-29T15:27:28.706299Z node 2 :KESUS_TABLET DEBUG: tablet_db.cpp:124: [72057594037927937] Deleting session 2 / semaphore 1 "Sem1" waiter link 2025-05-29T15:27:28.716972Z node 2 :KESUS_TABLET DEBUG: tx_semaphore_release.cpp:93: [72057594037927937] TTxSemaphoreRelease::Complete (sender=[2:133:2158], cookie=333) 2025-05-29T15:27:28.717109Z node 2 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:29: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[2:160:2183], cookie=12871454325012763670, name="Sem1") 2025-05-29T15:27:28.717129Z node 2 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:134: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[2:160:2183], cookie=12871454325012763670) 2025-05-29T15:27:28.717172Z node 2 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:29: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[2:163:2186], cookie=4311272768804022066, name="Sem1") 2025-05-29T15:27:28.717177Z node 2 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:134: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[2:163:2186], cookie=4311272768804022066) 2025-05-29T15:27:28.717202Z node 2 :KESUS_TABLET DEBUG: tx_semaphore_release.cpp:37: [72057594037927937] TTxSemaphoreRelease::Execute (sender=[2:133:2158], cookie=444, name="Sem1") 2025-05-29T15:27:28.717226Z node 2 :KESUS_TABLET DEBUG: tablet_db.cpp:98: [72057594037927937] Deleting session 1 / semaphore 1 "Sem1" owner link 2025-05-29T15:27:28.727920Z node 2 :KESUS_TABLET DEBUG: tx_semaphore_release.cpp:93: [72057594037927937] TTxSemaphoreRelease::Complete (sender=[2:133:2158], cookie=444) 2025-05-29T15:27:28.728029Z node 2 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:29: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[2:168:2191], cookie=11765635235372052317, name="Sem1") 2025-05-29T15:27:28.728045Z node 2 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:134: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[2:168:2191], cookie=11765635235372052317) 2025-05-29T15:27:28.728087Z node 2 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:29: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[2:171:2194], cookie=15117097624279486835, name="Sem1") 2025-05-29T15:27:28.728091Z node 2 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:134: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[2:171:2194], cookie=15117097624279486835) 2025-05-29T15:27:28.871328Z node 3 :KESUS_TABLET INFO: tablet_impl.cpp:71: OnActivateExecutor: 72057594037927937 2025-05-29T15:27:28.871367Z node 3 :KESUS_TABLET DEBUG: tx_init_schema.cpp:14: [72057594037927937] TTxInitSchema::Execute 2025-05-29T15:27:28.875386Z node 3 :KESUS_TABLET DEBUG: tx_init_schema.cpp:21: [72057594037927937] TTxInitSchema::Complete 2025-05-29T15:27:28.875566Z node 3 :KESUS_TABLET DEBUG: tx_init.cpp:30: [72057594037927937] TTxInit::Execute 2025-05-29T15:27:28.897207Z node 3 :KESUS_TABLET DEBUG: tx_init.cpp:242: [72057594037927937] TTxInit::Complete 2025-05-29T15:27:28.897345Z node 3 :KESUS_TABLET DEBUG: tx_semaphore_create.cpp:32: [72057594037927937] TTxSemaphoreCreate::Execute (sender=[3:133:2158], cookie=2783075191485700395, name="Sem1", limit=1) 2025-05-29T15:27:28.897393Z node 3 :KESUS_TABLET DEBUG: tx_semaphore_create.cpp:104: [72057594037927937] Created new semaphore 1 "Sem1" 2025-05-29T15:27:28.908298Z node 3 :KESUS_TABLET DEBUG: tx_semaphore_create.cpp:112: [72057594037927937] TTxSemaphoreCreate::Complete (sender=[3:133:2158], cookie=2783075191485700395) 2025-05-29T15:27:28.908460Z node 3 :KESUS_TABLET DEBUG: tx_semaphore_create.cpp:32: [72057594037927937] TTxSemaphoreCreate::Execute (sender=[3:142:2165], cookie=5300601785651832697, name="Sem2", limit=1) 2025-05-29T15:27:28.908506Z node 3 :KESUS_TABLET DEBUG: tx_semaphore_create.cpp:104: [72057594037927937] Created new semaphore 2 "Sem2" 2025-05-29T15:27:28.919425Z node 3 :KESUS_TABLET DEBUG: tx_semaphore_create.cpp:112: [72057594037927937] TTxSemaphoreCreate::Complete (sender=[3:142:2165], cookie=5300601785651832697) 2025-05-29T15:27:28.919609Z node 3 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:29: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[3:147:2170], cookie=6452443239683060453, name="Sem1") 2025-05-29T15:27:28.919635Z node 3 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:134: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[3:147:2170], cookie=6452443239683060453) 2025-05-29T15:27:28.919698Z node 3 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:29: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[3:150:2173], cookie=12908575485789731973, name="Sem2") 2025-05-29T15:27:28.919705Z node 3 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:134: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[3:150:2173], cookie=12908575485789731973) 2025-05-29T15:27:28.922416Z node 3 :KESUS_TABLET INFO: tablet_impl.cpp:71: OnActivateExecutor: 72057594037927937 2025-05-29T15:27:28.922442Z node 3 :KESUS_TABLET DEBUG: tx_init_schema.cpp:14: [72057594037927937] TTxInitSchema: ... TTxSemaphoreCreate::Complete (sender=[4:245:2267], cookie=7970187518771939335) 2025-05-29T15:27:29.403736Z node 4 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:48: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[4:133:2158], cookie=111, session=1, semaphore="Sem1" count=1) 2025-05-29T15:27:29.403791Z node 4 :KESUS_TABLET DEBUG: tablet_db.cpp:152: [72057594037927937] Processing semaphore 11 "Sem1" queue: next order #1 session 1 2025-05-29T15:27:29.414874Z node 4 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:263: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[4:133:2158], cookie=111) 2025-05-29T15:27:29.414991Z node 4 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:48: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[4:133:2158], cookie=222, session=2, semaphore="Sem1" count=1) 2025-05-29T15:27:29.436095Z node 4 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:263: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[4:133:2158], cookie=222) 2025-05-29T15:27:29.436252Z node 4 :KESUS_TABLET DEBUG: tx_semaphore_release.cpp:37: [72057594037927937] TTxSemaphoreRelease::Execute (sender=[4:133:2158], cookie=333, name="Sem1") 2025-05-29T15:27:29.436287Z node 4 :KESUS_TABLET DEBUG: tablet_db.cpp:124: [72057594037927937] Deleting session 2 / semaphore 11 "Sem1" waiter link 2025-05-29T15:27:29.447027Z node 4 :KESUS_TABLET DEBUG: tx_semaphore_release.cpp:93: [72057594037927937] TTxSemaphoreRelease::Complete (sender=[4:133:2158], cookie=333) 2025-05-29T15:27:29.447145Z node 4 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:48: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[4:133:2158], cookie=444, session=2, semaphore="Sem1" count=1) 2025-05-29T15:27:29.457953Z node 4 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:263: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[4:133:2158], cookie=444) 2025-05-29T15:27:29.458048Z node 4 :KESUS_TABLET DEBUG: tx_semaphore_release.cpp:37: [72057594037927937] TTxSemaphoreRelease::Execute (sender=[4:133:2158], cookie=555, name="Sem1") 2025-05-29T15:27:29.458071Z node 4 :KESUS_TABLET DEBUG: tablet_db.cpp:98: [72057594037927937] Deleting session 1 / semaphore 11 "Sem1" owner link 2025-05-29T15:27:29.458079Z node 4 :KESUS_TABLET DEBUG: tablet_db.cpp:152: [72057594037927937] Processing semaphore 11 "Sem1" queue: next order #3 session 2 2025-05-29T15:27:29.468753Z node 4 :KESUS_TABLET DEBUG: tx_semaphore_release.cpp:93: [72057594037927937] TTxSemaphoreRelease::Complete (sender=[4:133:2158], cookie=555) 2025-05-29T15:27:29.560513Z node 5 :KESUS_TABLET INFO: tablet_impl.cpp:71: OnActivateExecutor: 72057594037927937 2025-05-29T15:27:29.560550Z node 5 :KESUS_TABLET DEBUG: tx_init_schema.cpp:14: [72057594037927937] TTxInitSchema::Execute 2025-05-29T15:27:29.564398Z node 5 :KESUS_TABLET DEBUG: tx_init_schema.cpp:21: [72057594037927937] TTxInitSchema::Complete 2025-05-29T15:27:29.564440Z node 5 :KESUS_TABLET DEBUG: tx_init.cpp:30: [72057594037927937] TTxInit::Execute 2025-05-29T15:27:29.586196Z node 5 :KESUS_TABLET DEBUG: tx_init.cpp:242: [72057594037927937] TTxInit::Complete 2025-05-29T15:27:29.586378Z node 5 :KESUS_TABLET DEBUG: tx_session_attach.cpp:35: [72057594037927937] TTxSessionAttach::Execute (sender=[5:131:2156], cookie=15444785421818420053, session=0, seqNo=0) 2025-05-29T15:27:29.586428Z node 5 :KESUS_TABLET DEBUG: tx_session_attach.cpp:118: [72057594037927937] Created new session 1 2025-05-29T15:27:29.597429Z node 5 :KESUS_TABLET DEBUG: tx_session_attach.cpp:140: [72057594037927937] TTxSessionAttach::Complete (sender=[5:131:2156], cookie=15444785421818420053, session=1) 2025-05-29T15:27:29.597545Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_create.cpp:32: [72057594037927937] TTxSemaphoreCreate::Execute (sender=[5:131:2156], cookie=112, name="Sem1", limit=5) 2025-05-29T15:27:29.597593Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_create.cpp:104: [72057594037927937] Created new semaphore 1 "Sem1" 2025-05-29T15:27:29.608489Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_create.cpp:112: [72057594037927937] TTxSemaphoreCreate::Complete (sender=[5:131:2156], cookie=112) 2025-05-29T15:27:29.608605Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_update.cpp:28: [72057594037927937] TTxSemaphoreUpdate::Execute (sender=[5:131:2156], cookie=113, name="Sem1") 2025-05-29T15:27:29.619589Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_update.cpp:84: [72057594037927937] TTxSemaphoreUpdate::Complete (sender=[5:131:2156], cookie=113) 2025-05-29T15:27:29.619691Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_delete.cpp:28: [72057594037927937] TTxSemaphoreDelete::Execute (sender=[5:131:2156], cookie=114, name="Sem1", force=0) 2025-05-29T15:27:29.619725Z node 5 :KESUS_TABLET DEBUG: tablet_db.cpp:58: [72057594037927937] Deleting semaphore 1 "Sem1" 2025-05-29T15:27:29.630588Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_delete.cpp:95: [72057594037927937] TTxSemaphoreDelete::Complete (sender=[5:131:2156], cookie=114) 2025-05-29T15:27:29.630689Z node 5 :KESUS_TABLET DEBUG: tx_session_detach.cpp:100: [72057594037927937] Fast-path detach session=1 from sender=[5:131:2156], cookie=6625137815235915911 2025-05-29T15:27:29.630761Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_create.cpp:32: [72057594037927937] TTxSemaphoreCreate::Execute (sender=[5:131:2156], cookie=115, name="Sem1", limit=5) 2025-05-29T15:27:29.641764Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_create.cpp:112: [72057594037927937] TTxSemaphoreCreate::Complete (sender=[5:131:2156], cookie=115) 2025-05-29T15:27:29.641873Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_update.cpp:28: [72057594037927937] TTxSemaphoreUpdate::Execute (sender=[5:131:2156], cookie=116, name="Sem1") 2025-05-29T15:27:29.652837Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_update.cpp:84: [72057594037927937] TTxSemaphoreUpdate::Complete (sender=[5:131:2156], cookie=116) 2025-05-29T15:27:29.652941Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_delete.cpp:28: [72057594037927937] TTxSemaphoreDelete::Execute (sender=[5:131:2156], cookie=117, name="Sem1", force=0) 2025-05-29T15:27:29.663848Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_delete.cpp:95: [72057594037927937] TTxSemaphoreDelete::Complete (sender=[5:131:2156], cookie=117) 2025-05-29T15:27:29.663960Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:48: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[5:131:2156], cookie=118, session=1, semaphore="Sem1" count=1) 2025-05-29T15:27:29.674942Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:263: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[5:131:2156], cookie=118) 2025-05-29T15:27:29.675062Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_release.cpp:37: [72057594037927937] TTxSemaphoreRelease::Execute (sender=[5:131:2156], cookie=119, name="Sem1") 2025-05-29T15:27:29.686864Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_release.cpp:93: [72057594037927937] TTxSemaphoreRelease::Complete (sender=[5:131:2156], cookie=119) 2025-05-29T15:27:29.686976Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:29: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[5:131:2156], cookie=120, name="Sem1") 2025-05-29T15:27:29.686998Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:134: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[5:131:2156], cookie=120) 2025-05-29T15:27:29.687047Z node 5 :KESUS_TABLET DEBUG: tx_session_destroy.cpp:37: [72057594037927937] TTxSessionDestroy::Execute (sender=[5:131:2156], cookie=8016962182394023409, session=1) 2025-05-29T15:27:29.687075Z node 5 :KESUS_TABLET DEBUG: tablet_db.cpp:32: [72057594037927937] Deleting session 1 2025-05-29T15:27:29.697829Z node 5 :KESUS_TABLET DEBUG: tx_session_destroy.cpp:75: [72057594037927937] TTxSessionDestroy::Complete (sender=[5:131:2156], cookie=8016962182394023409) 2025-05-29T15:27:29.697918Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_create.cpp:32: [72057594037927937] TTxSemaphoreCreate::Execute (sender=[5:131:2156], cookie=121, name="Sem1", limit=5) 2025-05-29T15:27:29.708744Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_create.cpp:112: [72057594037927937] TTxSemaphoreCreate::Complete (sender=[5:131:2156], cookie=121) 2025-05-29T15:27:29.708833Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_update.cpp:28: [72057594037927937] TTxSemaphoreUpdate::Execute (sender=[5:131:2156], cookie=122, name="Sem1") 2025-05-29T15:27:29.719632Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_update.cpp:84: [72057594037927937] TTxSemaphoreUpdate::Complete (sender=[5:131:2156], cookie=122) 2025-05-29T15:27:29.719721Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_delete.cpp:28: [72057594037927937] TTxSemaphoreDelete::Execute (sender=[5:131:2156], cookie=123, name="Sem1", force=0) 2025-05-29T15:27:29.730407Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_delete.cpp:95: [72057594037927937] TTxSemaphoreDelete::Complete (sender=[5:131:2156], cookie=123) 2025-05-29T15:27:29.730482Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:48: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[5:131:2156], cookie=124, session=1, semaphore="Sem1" count=1) 2025-05-29T15:27:29.741231Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:263: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[5:131:2156], cookie=124) 2025-05-29T15:27:29.741320Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_release.cpp:37: [72057594037927937] TTxSemaphoreRelease::Execute (sender=[5:131:2156], cookie=125, name="Sem1") 2025-05-29T15:27:29.762244Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_release.cpp:93: [72057594037927937] TTxSemaphoreRelease::Complete (sender=[5:131:2156], cookie=125) 2025-05-29T15:27:29.762320Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:29: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[5:131:2156], cookie=126, name="Sem1") 2025-05-29T15:27:29.762340Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:134: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[5:131:2156], cookie=126) 2025-05-29T15:27:29.762437Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_create.cpp:32: [72057594037927937] TTxSemaphoreCreate::Execute (sender=[5:131:2156], cookie=127, name="Sem1", limit=5) 2025-05-29T15:27:29.762448Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_create.cpp:112: [72057594037927937] TTxSemaphoreCreate::Complete (sender=[5:131:2156], cookie=127) 2025-05-29T15:27:29.762475Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_update.cpp:28: [72057594037927937] TTxSemaphoreUpdate::Execute (sender=[5:131:2156], cookie=128, name="Sem1") 2025-05-29T15:27:29.762483Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_update.cpp:84: [72057594037927937] TTxSemaphoreUpdate::Complete (sender=[5:131:2156], cookie=128) 2025-05-29T15:27:29.762506Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_delete.cpp:28: [72057594037927937] TTxSemaphoreDelete::Execute (sender=[5:131:2156], cookie=129, name="Sem1", force=0) 2025-05-29T15:27:29.762513Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_delete.cpp:95: [72057594037927937] TTxSemaphoreDelete::Complete (sender=[5:131:2156], cookie=129) 2025-05-29T15:27:29.762539Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:48: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[5:131:2156], cookie=130, session=1, semaphore="Sem1" count=1) 2025-05-29T15:27:29.762547Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:263: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[5:131:2156], cookie=130) 2025-05-29T15:27:29.762570Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_release.cpp:37: [72057594037927937] TTxSemaphoreRelease::Execute (sender=[5:131:2156], cookie=131, name="Sem1") 2025-05-29T15:27:29.762577Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_release.cpp:93: [72057594037927937] TTxSemaphoreRelease::Complete (sender=[5:131:2156], cookie=131) 2025-05-29T15:27:29.762598Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:29: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[5:131:2156], cookie=132, name="Sem1") 2025-05-29T15:27:29.762604Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:134: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[5:131:2156], cookie=132) >> THDRRQuoterResourceTreeRuntimeTest::TestUpdateResourceSessions [GOOD] >> THDRRQuoterResourceTreeRuntimeTest::TestStopConsuming [GOOD] >> THDRRQuoterResourceTreeRuntimeTest::TestUpdateConsumptionState [GOOD] >> THDRRQuoterResourceTreeRuntimeTest::TestUpdateConsumptionStateAfterAllResourceAllocated [GOOD] >> THDRRQuoterResourceTreeRuntimeTest::TestVeryBigWeights [GOOD] >> DataShardReadTableSnapshots::ReadTableSplitNewTxIdResolveResultReorder >> TKesusTest::TestAttachTimeoutTooBig [GOOD] >> TKesusTest::TestCreateSemaphore |69.4%| [TA] {RESULT} $(B)/ydb/core/mind/address_classification/ut/test-results/unittest/{meta.json ... results_accumulator.log} |69.4%| [TA] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_sysview/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/runtime/unittest >> KqpScanLogs::WideCombine-EnabledLogs Test command err: cwd: /home/runner/.ya/build/build_root/ciyv/0008b4/ydb/core/kqp/ut/runtime/test-results/unittest/testing_out_stuff/chunk3 Trying to start YDB, gRPC: 31697, MsgBus: 17275 test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/0008b4/r3tmp/tmpdIUD9d/pdisk_1.dat TServer::EnableGrpc on GrpcPort 31697, node 1 TClient is connected to server localhost:17275 TClient is connected to server localhost:17275 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... waiting... waiting... waiting... waiting... VERIFY failed (2025-05-29T15:27:26.602000Z): assertion failed in non-unittest thread with message: assertion failed at ydb/core/kqp/ut/common/kqp_ut_common.h:375, void NKikimr::NKqp::AssertSuccessResult(const NYdb::TStatus &): (result.IsSuccess())
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 library/cpp/testing/unittest/registar.cpp:36 RaiseError(): requirement UnittestThread failed 0. /-S/util/system/yassert.cpp:83: InternalPanicImpl @ 0x13A7AA65 1. /-S/util/system/yassert.cpp:55: Panic @ 0x13A71A66 2. /tmp//-S/library/cpp/testing/unittest/registar.cpp:36: RaiseError @ 0x13C137F6 3. /-S/ydb/core/kqp/ut/common/kqp_ut_common.h:375: AssertSuccessResult @ 0x260B19E2 4. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:365: CreateSampleTables @ 0x260B12E2 5. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:581: operator() @ 0x260D2F1C 6. /-S/library/cpp/threading/future/core/future-inl.h:493: SetValue @ 0x260D2F1C 7. /-S/library/cpp/threading/future/async.h:24: operator() @ 0x260D2F1C 8. /-S/util/thread/pool.h:71: Process @ 0x260D2F1C 9. /-S/util/thread/pool.cpp:418: DoExecute @ 0x13A823E9 10. /-S/util/thread/factory.h:15: Execute @ 0x13A80DD9 11. /-S/util/thread/factory.cpp:36: ThreadProc @ 0x13A80DD9 12. /-S/util/system/thread.cpp:244: ThreadProxy @ 0x13A7C24C 13. ??:0: ?? @ 0x7FC579CCFAC2 14. ??:0: ?? @ 0x7FC579D6184F >> DataShardReadTableSnapshots::ReadTableSnapshot >> TKeyValueTest::TestWriteReadDeleteWithRestartsThenResponseOkWithNewApi [GOOD] |69.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kesus/tablet/ut/unittest >> THDRRQuoterResourceTreeRuntimeTest::TestVeryBigWeights [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_extsubdomain_reboots/unittest >> TSchemeShardTestExtSubdomainReboots::CreateExternalSubdomain-AlterDatabaseCreateHiveFirst-false [GOOD] Test command err: ==== RunWithTabletReboots =========== RUN: Trace =========== Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2140] Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:116:2058] recipient: [1:110:2141] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:116:2058] recipient: [1:110:2141] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:117:2058] recipient: [1:111:2142] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:117:2058] recipient: [1:111:2142] Leader for TabletID 72057594046678944 is [1:126:2151] sender: [1:127:2058] recipient: [1:109:2140] Leader for TabletID 72057594046447617 is [1:130:2154] sender: [1:132:2058] recipient: [1:110:2141] Leader for TabletID 72057594046316545 is [1:133:2155] sender: [1:135:2058] recipient: [1:111:2142] 2025-05-29T15:25:50.131302Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7488: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-05-29T15:25:50.131325Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7516: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:25:50.131330Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7402: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2025-05-29T15:25:50.131334Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7418: OperationsProcessing config: using default configuration 2025-05-29T15:25:50.131344Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-05-29T15:25:50.131348Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-05-29T15:25:50.131357Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7548: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:25:50.131368Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:39: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-05-29T15:25:50.131460Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7619: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-05-29T15:25:50.131534Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-05-29T15:25:50.145867Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7651: Got new config: QueryServiceConfig { AllExternalDataSourcesAreAvailable: true } 2025-05-29T15:25:50.145888Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:25:50.145979Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7619: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# Leader for TabletID 72057594046447617 is [1:130:2154] sender: [1:175:2058] recipient: [1:15:2062] 2025-05-29T15:25:50.148556Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-05-29T15:25:50.148587Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-05-29T15:25:50.148616Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-05-29T15:25:50.151167Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-05-29T15:25:50.151240Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:445: Clear TempDirsState with owners number: 0 2025-05-29T15:25:50.151350Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1348: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-05-29T15:25:50.151502Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:32: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-05-29T15:25:50.152095Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:157: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:25:50.152143Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:84: [RootDataErasureManager] Stop 2025-05-29T15:25:50.152360Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:25:50.152370Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:25:50.152400Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-05-29T15:25:50.152408Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-29T15:25:50.152413Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:202: TTxServerlessStorageBilling.Complete 2025-05-29T15:25:50.152430Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6671: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:214:2058] recipient: [1:212:2212] IGNORE Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:214:2058] recipient: [1:212:2212] Leader for TabletID 72057594037968897 is [1:218:2216] sender: [1:219:2058] recipient: [1:212:2212] 2025-05-29T15:25:50.153661Z node 1 :HIVE INFO: tablet_helpers.cpp:1130: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:126:2151] sender: [1:239:2058] recipient: [1:15:2062] 2025-05-29T15:25:50.178473Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:372: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-05-29T15:25:50.178547Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:25:50.178600Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-05-29T15:25:50.178649Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:130: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-05-29T15:25:50.178659Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:25:50.182924Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:451: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-05-29T15:25:50.182954Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-05-29T15:25:50.182992Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:25:50.183001Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:313: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-05-29T15:25:50.183006Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:367: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-05-29T15:25:50.183010Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 2 -> 3 2025-05-29T15:25:50.183443Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:25:50.183455Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-05-29T15:25:50.183460Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 3 -> 128 2025-05-29T15:25:50.184026Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:25:50.184040Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:25:50.184045Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:25:50.184051Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1650: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-05-29T15:25:50.184717Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1719: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-05-29T15:25:50.185120Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:652: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-05-29T15:25:50.185161Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1751: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 Leader for TabletID 72057594046316545 is [1:133:2155] sender: [1:254:2058] recipient: [1:15:2062] FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-05-29T15:25:50.185348Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:676: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-05-29T15:25:50.185373Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:680: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 133 RawX2: 4294969451 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-05-29T15:25:50.185380Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:25:50.185447Z node 1 :FLAT_TX_SCHEMESHARD INFO: sch ... 5186233409546, tenantSysViewProcessor: 18446744073709551615, at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1003 2025-05-29T15:27:27.635355Z node 231 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:27:27.635365Z node 231 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1003, path id: [OwnerId: 72057594046678944, LocalPathId: 3] 2025-05-29T15:27:27.635417Z node 231 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:27:27.635422Z node 231 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [231:206:2207], at schemeshard: 72057594046678944, txId: 1003, path id: 3 2025-05-29T15:27:27.635509Z node 231 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1003:0, at schemeshard: 72057594046678944 2025-05-29T15:27:27.635518Z node 231 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_alter_extsubdomain.cpp:787: [72057594046678944] TSyncHive, operationId 1003:0, ProgressState, NeedSyncHive: 0 2025-05-29T15:27:27.635523Z node 231 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1003:0 240 -> 240 2025-05-29T15:27:27.635628Z node 231 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:5834: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 4 PathOwnerId: 72057594046678944, cookie: 1003 2025-05-29T15:27:27.635640Z node 231 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 4 PathOwnerId: 72057594046678944, cookie: 1003 2025-05-29T15:27:27.635644Z node 231 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72057594046678944, txId: 1003 2025-05-29T15:27:27.635648Z node 231 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 1003, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 4 2025-05-29T15:27:27.635651Z node 231 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 10 2025-05-29T15:27:27.635666Z node 231 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1606: TOperation IsReadyToNotify, TxId: 1003, ready parts: 0/1, is published: true 2025-05-29T15:27:27.636199Z node 231 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1003:0, at schemeshard: 72057594046678944 2025-05-29T15:27:27.636210Z node 231 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:482: [72057594046678944] TDone opId# 1003:0 ProgressState 2025-05-29T15:27:27.636224Z node 231 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:906: Part operation is done id#1003:0 progress is 1/1 2025-05-29T15:27:27.636229Z node 231 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 1003 ready parts: 1/1 2025-05-29T15:27:27.636235Z node 231 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:906: Part operation is done id#1003:0 progress is 1/1 2025-05-29T15:27:27.636238Z node 231 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 1003 ready parts: 1/1 2025-05-29T15:27:27.636242Z node 231 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1606: TOperation IsReadyToNotify, TxId: 1003, ready parts: 1/1, is published: true 2025-05-29T15:27:27.636255Z node 231 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1629: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [231:325:2315] message: TxId: 1003 2025-05-29T15:27:27.636262Z node 231 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 1003 ready parts: 1/1 2025-05-29T15:27:27.636268Z node 231 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:973: Operation and all the parts is done, operation id: 1003:0 2025-05-29T15:27:27.636272Z node 231 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5174: RemoveTx for txid 1003:0 2025-05-29T15:27:27.636313Z node 231 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 9 2025-05-29T15:27:27.636389Z node 231 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1003 2025-05-29T15:27:27.636878Z node 231 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:227: tests -- TTxNotificationSubscriber for txId 1003: got EvNotifyTxCompletionResult 2025-05-29T15:27:27.636889Z node 231 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:236: tests -- TTxNotificationSubscriber for txId 1003: satisfy waiter [231:637:2537] TestWaitNotification: OK eventTxId 1003 2025-05-29T15:27:27.637022Z node 231 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-05-29T15:27:27.637064Z node 231 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/USER_0" took 54us result status StatusSuccess 2025-05-29T15:27:27.637158Z node 231 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/USER_0" PathDescription { Self { Name: "USER_0" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeExtSubDomain CreateFinished: true CreateTxId: 1002 CreateStep: 5000003 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 4 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 4 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 2 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 2 PlanResolution: 50 Coordinators: 72075186233409548 Coordinators: 72075186233409549 Coordinators: 72075186233409550 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409551 Mediators: 72075186233409552 SchemeShard: 72075186233409547 Hive: 72075186233409546 } DomainKey { SchemeShard: 72057594046678944 PathId: 3 } StoragePools { Name: "tenant-1:hdd" Kind: "hdd" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 7 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 3 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { } } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-05-29T15:27:27.637244Z node 231 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72075186233409547 2025-05-29T15:27:27.637264Z node 231 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72075186233409547 describe path "/MyRoot/USER_0" took 19us result status StatusSuccess 2025-05-29T15:27:27.637292Z node 231 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/USER_0" PathDescription { Self { Name: "MyRoot/USER_0" PathId: 1 SchemeshardId: 72075186233409547 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 4 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 4 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 2 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 3 ProcessingParams { Version: 2 PlanResolution: 50 Coordinators: 72075186233409548 Coordinators: 72075186233409549 Coordinators: 72075186233409550 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409551 Mediators: 72075186233409552 SchemeShard: 72075186233409547 Hive: 72075186233409546 } DomainKey { SchemeShard: 72057594046678944 PathId: 3 } StoragePools { Name: "tenant-1:hdd" Kind: "hdd" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 7 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 3 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/MyRoot/USER_0" } } } PathId: 1 PathOwnerId: 72075186233409547, at schemeshard: 72075186233409547 2025-05-29T15:27:27.637328Z node 231 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-05-29T15:27:27.637338Z node 231 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot" took 11us result status StatusSuccess 2025-05-29T15:27:27.637386Z node 231 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot" PathDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 7 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 7 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 5 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: "DirA" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1000 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" ChildrenExist: false } Children { Name: "USER_0" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeExtSubDomain CreateFinished: true CreateTxId: 1002 CreateStep: 5000003 ParentPathId: 1 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 2 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/MyRoot" } } } PathId: 1 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> TKeyValueTest::TestInlineCopyRangeWorks >> TKesusTest::TestCreateSemaphore [GOOD] >> TKeyValueTest::TestWriteReadWithRestartsThenResponseOk ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/keyvalue/ut/unittest >> TKeyValueTest::TestWriteReadDeleteWithRestartsThenResponseOkWithNewApi [GOOD] Test command err: Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:55:2057] recipient: [2:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:55:2057] recipient: [2:51:2095] Leader for TabletID 72057594037927937 is [2:57:2097] sender: [2:58:2057] recipient: [2:51:2095] Leader for TabletID 72057594037927937 is [2:57:2097] sender: [2:75:2057] recipient: [2:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:55:2057] recipient: [3:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:55:2057] recipient: [3:51:2095] Leader for TabletID 72057594037927937 is [3:57:2097] sender: [3:58:2057] recipient: [3:51:2095] Leader for TabletID 72057594037927937 is [3:57:2097] sender: [3:75:2057] recipient: [3:14:2061] !Reboot 72057594037927937 (actor [3:57:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [3:57:2097] sender: [3:77:2057] recipient: [3:36:2083] Leader for TabletID 72057594037927937 is [3:57:2097] sender: [3:80:2057] recipient: [3:79:2110] Leader for TabletID 72057594037927937 is [3:57:2097] sender: [3:81:2057] recipient: [3:14:2061] Leader for TabletID 72057594037927937 is [3:82:2111] sender: [3:83:2057] recipient: [3:79:2110] !Reboot 72057594037927937 (actor [3:57:2097]) rebooted! !Reboot 72057594037927937 (actor [3:57:2097]) tablet resolver refreshed! new actor is[3:82:2111] Leader for TabletID 72057594037927937 is [3:82:2111] sender: [3:168:2057] recipient: [3:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [4:55:2057] recipient: [4:50:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [4:55:2057] recipient: [4:50:2095] Leader for TabletID 72057594037927937 is [4:57:2097] sender: [4:58:2057] recipient: [4:50:2095] Leader for TabletID 72057594037927937 is [4:57:2097] sender: [4:75:2057] recipient: [4:14:2061] !Reboot 72057594037927937 (actor [4:57:2097]) on event NKikimr::TEvKeyValue::TEvExecuteTransaction ! Leader for TabletID 72057594037927937 is [4:57:2097] sender: [4:77:2057] recipient: [4:36:2083] Leader for TabletID 72057594037927937 is [4:57:2097] sender: [4:80:2057] recipient: [4:14:2061] Leader for TabletID 72057594037927937 is [4:57:2097] sender: [4:81:2057] recipient: [4:79:2110] Leader for TabletID 72057594037927937 is [4:82:2111] sender: [4:83:2057] recipient: [4:79:2110] !Reboot 72057594037927937 (actor [4:57:2097]) rebooted! !Reboot 72057594037927937 (actor [4:57:2097]) tablet resolver refreshed! new actor is[4:82:2111] Leader for TabletID 72057594037927937 is [4:82:2111] sender: [4:168:2057] recipient: [4:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [5:55:2057] recipient: [5:52:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [5:55:2057] recipient: [5:52:2095] Leader for TabletID 72057594037927937 is [5:57:2097] sender: [5:58:2057] recipient: [5:52:2095] Leader for TabletID 72057594037927937 is [5:57:2097] sender: [5:75:2057] recipient: [5:14:2061] !Reboot 72057594037927937 (actor [5:57:2097]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [5:57:2097] sender: [5:78:2057] recipient: [5:36:2083] Leader for TabletID 72057594037927937 is [5:57:2097] sender: [5:81:2057] recipient: [5:14:2061] Leader for TabletID 72057594037927937 is [5:57:2097] sender: [5:82:2057] recipient: [5:80:2110] Leader for TabletID 72057594037927937 is [5:83:2111] sender: [5:84:2057] recipient: [5:80:2110] !Reboot 72057594037927937 (actor [5:57:2097]) rebooted! !Reboot 72057594037927937 (actor [5:57:2097]) tablet resolver refreshed! new actor is[5:83:2111] Leader for TabletID 72057594037927937 is [5:83:2111] sender: [5:169:2057] recipient: [5:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [6:55:2057] recipient: [6:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [6:55:2057] recipient: [6:51:2095] Leader for TabletID 72057594037927937 is [6:57:2097] sender: [6:58:2057] recipient: [6:51:2095] Leader for TabletID 72057594037927937 is [6:57:2097] sender: [6:75:2057] recipient: [6:14:2061] !Reboot 72057594037927937 (actor [6:57:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [6:57:2097] sender: [6:81:2057] recipient: [6:36:2083] Leader for TabletID 72057594037927937 is [6:57:2097] sender: [6:84:2057] recipient: [6:14:2061] Leader for TabletID 72057594037927937 is [6:57:2097] sender: [6:85:2057] recipient: [6:83:2113] Leader for TabletID 72057594037927937 is [6:86:2114] sender: [6:87:2057] recipient: [6:83:2113] !Reboot 72057594037927937 (actor [6:57:2097]) rebooted! !Reboot 72057594037927937 (actor [6:57:2097]) tablet resolver refreshed! new actor is[6:86:2114] Leader for TabletID 72057594037927937 is [6:86:2114] sender: [6:172:2057] recipient: [6:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [7:55:2057] recipient: [7:52:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [7:55:2057] recipient: [7:52:2095] Leader for TabletID 72057594037927937 is [7:57:2097] sender: [7:58:2057] recipient: [7:52:2095] Leader for TabletID 72057594037927937 is [7:57:2097] sender: [7:75:2057] recipient: [7:14:2061] !Reboot 72057594037927937 (actor [7:57:2097]) on event NKikimr::TEvKeyValue::TEvRead ! Leader for TabletID 72057594037927937 is [7:57:2097] sender: [7:81:2057] recipient: [7:36:2083] Leader for TabletID 72057594037927937 is [7:57:2097] sender: [7:83:2057] recipient: [7:14:2061] Leader for TabletID 72057594037927937 is [7:57:2097] sender: [7:85:2057] recipient: [7:84:2113] Leader for TabletID 72057594037927937 is [7:86:2114] sender: [7:87:2057] recipient: [7:84:2113] !Reboot 72057594037927937 (actor [7:57:2097]) rebooted! !Reboot 72057594037927937 (actor [7:57:2097]) tablet resolver refreshed! new actor is[7:86:2114] Leader for TabletID 72057594037927937 is [7:86:2114] sender: [7:172:2057] recipient: [7:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [8:55:2057] recipient: [8:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [8:55:2057] recipient: [8:51:2095] Leader for TabletID 72057594037927937 is [8:57:2097] sender: [8:58:2057] recipient: [8:51:2095] Leader for TabletID 72057594037927937 is [8:57:2097] sender: [8:75:2057] recipient: [8:14:2061] !Reboot 72057594037927937 (actor [8:57:2097]) on event NKikimr::TEvKeyValue::TEvNotify ! Leader for TabletID 72057594037927937 is [8:57:2097] sender: [8:82:2057] recipient: [8:36:2083] Leader for TabletID 72057594037927937 is [8:57:2097] sender: [8:85:2057] recipient: [8:14:2061] Leader for TabletID 72057594037927937 is [8:57:2097] sender: [8:86:2057] recipient: [8:84:2113] Leader for TabletID 72057594037927937 is [8:87:2114] sender: [8:88:2057] recipient: [8:84:2113] !Reboot 72057594037927937 (actor [8:57:2097]) rebooted! !Reboot 72057594037927937 (actor [8:57:2097]) tablet resolver refreshed! new actor is[8:87:2114] Leader for TabletID 72057594037927937 is [8:87:2114] sender: [8:105:2057] recipient: [8:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [9:55:2057] recipient: [9:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [9:55:2057] recipient: [9:51:2095] Leader for TabletID 72057594037927937 is [9:57:2097] sender: [9:58:2057] recipient: [9:51:2095] Leader for TabletID 72057594037927937 is [9:57:2097] sender: [9:75:2057] recipient: [9:14:2061] !Reboot 72057594037927937 (actor [9:57:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [9:57:2097] sender: [9:84:2057] recipient: [9:36:2083] Leader for TabletID 72057594037927937 is [9:57:2097] sender: [9:87:2057] recipient: [9:14:2061] Leader for TabletID 72057594037927937 is [9:57:2097] sender: [9:88:2057] recipient: [9:86:2115] Leader for TabletID 72057594037927937 is [9:89:2116] sender: [9:90:2057] recipient: [9:86:2115] !Reboot 72057594037927937 (actor [9:57:2097]) rebooted! !Reboot 72057594037927937 (actor [9:57:2097]) tablet resolver refreshed! new actor is[9:89:2116] Leader for TabletID 72057594037927937 is [9:89:2116] sender: [9:175:2057] recipient: [9:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [10:55:2057] recipient: [10:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [10:55:2057] recipient: [10:51:2095] Leader for TabletID 72057594037927937 is [10:57:2097] sender: [10:58:2057] recipient: [10:51:2095] Leader for TabletID 72057594037927937 is [10:57:2097] sender: [10:75:2057] recipient: [10:14:2061] !Reboot 72057594037927937 (actor [10:57:2097]) on event NKikimr::TEvKeyValue::TEvExecuteTransaction ! Leader for TabletID 72057594037927937 is [10:57:2097] sender: [10:84:2057] recipient: [10:36:2083] Leader for TabletID 72057594037927937 is [10:57:2097] sender: [10:86:2057] recipient: [10:14:2061] Leader for TabletID 72057594037927937 is [10:57:2097] sender: [10:88:2057] recipient: [10:87:2115] Leader for TabletID 72057594037927937 is [10:89:2116] sender: [10:90:2057] recipient: [10:87:2115] !Reboot 72057594037927937 (actor [10:57:2097]) rebooted! !Reboot 72057594037927937 (actor [10:57:2097]) tablet resolver refreshed! new actor is[10:89:2116] Leader for TabletID 72057594037927937 is [10:89:2116] sender: [10:175:2057] recipient: [10:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [11:55:2057] recipient: [11:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [11:55:2057] recipient: [11:51:2095] Leader for TabletID 72057594037927937 is [11:57:2097] sender: [11:58:2057] recipient: [11:51:2095] Leader for TabletID 72057594037927937 is [11:57:2097] sender: [11:75:2057] recipient: [11:14:2061] !Reboot 72057594037927937 (actor [11:57:2097]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [11:57:2097] sender: [11:85:2057] recipient: [11:36:2083] Leader for TabletID 72057594037927937 is [11:57:2097] sender: [11:88:2057] recipient: [11:14:2061] Leader for TabletID 72057594037927937 is [11:57:2097] sender: [11:89:2057] recipient: [11:87:2115] Leader for TabletID 72057594037927937 is [11:90:2116] sender: [11:91:2057] recipient: [11:87:2115] !Reboot 72057594037927937 (actor [11:57:2097]) rebooted! !Reboot 72057594037927937 (actor [11:57:2097]) tablet resolver refreshed! new actor is[11:90:2116] Leader for TabletID 72057594037927937 is [11:90:2116] sender: [11:176:2057] recipient: [11:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [12:55:2057] recipient: [12:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [12:55:2057] recipient: [12:51:2095] Leader for TabletID 72057594037927937 is [12:57:2097] sender: [12:58:2057] recipient: [12:51:2095] Leader for TabletID 72057594037927937 is [12:57:2097] sender: [12:75:2057] recipient: [12:14:2061] !Reboot 72057594037927937 (actor [12:57:2097]) on event NKikimr::TEvKeyValue::TEvCollect ! Leader for TabletID 72057594037927937 is [12:57:2097] sender: [12:86:2057] recipient: [12:36:2083] Leader for TabletID 72057594037927937 is [12:57:2097] sender: [12:89:2057] recipient: [12:14:2061] Leader for TabletID 72057594037927937 is [12:57:2097] sender: [12:90:2057] recipient: [12:88:2116] Leader for TabletID 72057594037927937 is [12:91:2117] sender: [12:92:2057] recipient: [12:88:2116] !Reboot 72057594037927937 (actor [12:57:2097]) rebooted! !Reboot 72057594037927937 (actor [12:57:2097]) tablet resolver refreshed! new actor is[12:91:2117] Leader for TabletID 72057594037927937 is [12:91:2117] sender: [12:111:2057] recipient: [12:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [13:55:2057] recipient: [13:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [13:55:2057] recipient: [13:51:2095] Leader for TabletID 72057594037927937 is [13:57:2097] sender: [13:58:2057] recipient: [13:51:2095] Leader for TabletID 72057594037927937 is [13:57:2097] sender: [13:75:2057] recipient: [13:14:2061] !Reboot 72057594037927937 (actor [13:57:2097]) on event NKikimr::TEvKeyValue::TEvCompleteGC ! Leader for TabletID 72057594037927937 is [13:57:2097] sender: [13:87:2057] recipient: [13:36:2083] Leader for TabletID 72057594037927937 is [13:57:2097] sender: [13:89:2057] recipient: [13:14:2061] Leader for TabletID 72057594037927937 is [13:57:2097] sender: [13:91:2057] recipient: [13:90:2117] Leader for TabletID 72057594037927937 is [13:92:2118] sender: [13:93:2057] recipient: [13:90:2117] !Reboot 72057594037927937 (actor [13:57:2097]) rebooted! !Reboot 72057594037927937 (actor [13:57:2097]) tablet resolver refreshed! new actor is[13:92:2118] Leader for TabletID 72057594037927937 is [13:92:2118] sender: [13:112:2057] recipient: [13:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [14:55:2057] recipient: [14:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [14:55:2057] recipient: [14:51:2095] Leader for TabletID 72057594037927937 is [14:57:2097] sender: [14:58:2057] recipient: [14:51:2095] Leader for TabletID 72057594037927937 is [14:57:2097] sender: [14:75:2057] recipient: [14:14:2061] !Reboot 72057594037927937 (actor [14:57:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [14:57:2097] sender: [14:90:2057] recipient: [14:36:2083] Leader for TabletID 72057594037927937 is [14:57:2097] sender: [14:93:2057] recipient: [14:14:2061] Leader for TabletID 72057594037927937 is [14:57:2097] sender: [14:94:2057] recipient: [14:92:2120] Leader for TabletID 72057594037927937 is [14:95:2121] sender: [14:96:2057] recipient: [14:92:2120] !Reboot 72057594037927937 (actor [14:57:2097]) rebooted! !Reboot 72057594037927937 (actor [14:57:2097]) tablet resolver refreshed! new actor is[14:95:2121] Leader for TabletID 72057594037927937 is [14:95:2121] sender: [14:181:2057] recipient: [14:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [15:55:2057] recipient: [15:52:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [15:55:2057] recipient: [15:52:2095] Leader for TabletID 72057594037927937 is [15:57:2097] sender: [15:58:2057] recipient: [15:52:2095] Leader for TabletID 72057594037927937 is [15:57:2097] sender: [15:75:2057] recipient: [15:14:2061] !Reboot 72057594037927937 (actor [15:57:2097]) on event NKikimr::TEvKeyValue::TEvRead ! Leader for TabletID 72057594037927937 is [15:57:2097] sender: [15:90:2057] recipient: [15:36:2083] Leader for TabletID 72057594037927937 is [15:57:2097] sender: [15:93:2057] recipient: [15:14:2061] Leader for TabletID 72057594037927937 is [15:57:2097] sender: [15:94:2057] recipient: [15:92:2120] Leader for TabletID 72057594037927937 is [15:95:2121] sender: [15:96:2057] recipient: [15:92:2120] !Reboot 72057594037927937 (actor [15:57:2097]) rebooted! !Reboot 72057594037927937 (actor [15:57:2097]) tablet resolver refreshed! new actor is[15:95:2121] Leader for TabletID 72057594037927937 is [15:95:2121] sender: [15:181:2057] recipient: [15:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [16:55:2057] recipient: [16:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [16:55:2057] recipient: [16:51:2095] Leader for TabletID 72057594037927937 is [16:57:2097] sender: [16:58:2057] recipient: [16:51:2095] Leader for TabletID 72057594037927937 is [16:57:2097] sender: [16:75:2057] recipient: [16:14:2061] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kesus/tablet/ut/unittest >> TKesusTest::TestCreateSemaphore [GOOD] Test command err: 2025-05-29T15:27:29.334069Z node 1 :KESUS_TABLET INFO: tablet_impl.cpp:71: OnActivateExecutor: 72057594037927937 2025-05-29T15:27:29.334100Z node 1 :KESUS_TABLET DEBUG: tx_init_schema.cpp:14: [72057594037927937] TTxInitSchema::Execute 2025-05-29T15:27:29.337834Z node 1 :KESUS_TABLET DEBUG: tx_init_schema.cpp:21: [72057594037927937] TTxInitSchema::Complete 2025-05-29T15:27:29.337866Z node 1 :KESUS_TABLET DEBUG: tx_init.cpp:30: [72057594037927937] TTxInit::Execute 2025-05-29T15:27:29.348998Z node 1 :KESUS_TABLET DEBUG: tx_init.cpp:242: [72057594037927937] TTxInit::Complete 2025-05-29T15:27:29.349167Z node 1 :KESUS_TABLET DEBUG: tx_session_attach.cpp:35: [72057594037927937] TTxSessionAttach::Execute (sender=[1:131:2156], cookie=13773563767490093414, session=0, seqNo=222) 2025-05-29T15:27:29.349221Z node 1 :KESUS_TABLET DEBUG: tx_session_attach.cpp:118: [72057594037927937] Created new session 1 2025-05-29T15:27:29.370094Z node 1 :KESUS_TABLET DEBUG: tx_session_attach.cpp:140: [72057594037927937] TTxSessionAttach::Complete (sender=[1:131:2156], cookie=13773563767490093414, session=1) 2025-05-29T15:27:29.370225Z node 1 :KESUS_TABLET DEBUG: tx_session_attach.cpp:35: [72057594037927937] TTxSessionAttach::Execute (sender=[1:132:2157], cookie=5155604303810069663, session=1, seqNo=111) 2025-05-29T15:27:29.380963Z node 1 :KESUS_TABLET DEBUG: tx_session_attach.cpp:140: [72057594037927937] TTxSessionAttach::Complete (sender=[1:132:2157], cookie=5155604303810069663, session=1) 2025-05-29T15:27:29.580799Z node 2 :KESUS_TABLET INFO: tablet_impl.cpp:71: OnActivateExecutor: 72057594037927937 2025-05-29T15:27:29.580829Z node 2 :KESUS_TABLET DEBUG: tx_init_schema.cpp:14: [72057594037927937] TTxInitSchema::Execute 2025-05-29T15:27:29.584406Z node 2 :KESUS_TABLET DEBUG: tx_init_schema.cpp:21: [72057594037927937] TTxInitSchema::Complete 2025-05-29T15:27:29.584452Z node 2 :KESUS_TABLET DEBUG: tx_init.cpp:30: [72057594037927937] TTxInit::Execute 2025-05-29T15:27:29.605759Z node 2 :KESUS_TABLET DEBUG: tx_init.cpp:242: [72057594037927937] TTxInit::Complete 2025-05-29T15:27:29.605893Z node 2 :KESUS_TABLET DEBUG: tx_session_attach.cpp:35: [72057594037927937] TTxSessionAttach::Execute (sender=[2:133:2158], cookie=111, session=0, seqNo=42) 2025-05-29T15:27:29.605924Z node 2 :KESUS_TABLET DEBUG: tx_session_attach.cpp:118: [72057594037927937] Created new session 1 2025-05-29T15:27:29.605955Z node 2 :KESUS_TABLET DEBUG: tx_session_attach.cpp:35: [72057594037927937] TTxSessionAttach::Execute (sender=[2:133:2158], cookie=222, session=1, seqNo=41) 2025-05-29T15:27:29.616733Z node 2 :KESUS_TABLET DEBUG: tx_session_attach.cpp:140: [72057594037927937] TTxSessionAttach::Complete (sender=[2:133:2158], cookie=111, session=1) 2025-05-29T15:27:29.616768Z node 2 :KESUS_TABLET DEBUG: tx_session_attach.cpp:140: [72057594037927937] TTxSessionAttach::Complete (sender=[2:133:2158], cookie=222, session=1) 2025-05-29T15:27:29.818167Z node 3 :KESUS_TABLET INFO: tablet_impl.cpp:71: OnActivateExecutor: 72057594037927937 2025-05-29T15:27:29.818208Z node 3 :KESUS_TABLET DEBUG: tx_init_schema.cpp:14: [72057594037927937] TTxInitSchema::Execute 2025-05-29T15:27:29.822697Z node 3 :KESUS_TABLET DEBUG: tx_init_schema.cpp:21: [72057594037927937] TTxInitSchema::Complete 2025-05-29T15:27:29.823454Z node 3 :KESUS_TABLET DEBUG: tx_init.cpp:30: [72057594037927937] TTxInit::Execute 2025-05-29T15:27:29.845192Z node 3 :KESUS_TABLET DEBUG: tx_init.cpp:242: [72057594037927937] TTxInit::Complete 2025-05-29T15:27:29.845341Z node 3 :KESUS_TABLET DEBUG: tx_session_attach.cpp:35: [72057594037927937] TTxSessionAttach::Execute (sender=[3:133:2158], cookie=16005682083703560927, session=0, seqNo=0) 2025-05-29T15:27:29.845380Z node 3 :KESUS_TABLET DEBUG: tx_session_attach.cpp:118: [72057594037927937] Created new session 1 2025-05-29T15:27:29.856046Z node 3 :KESUS_TABLET DEBUG: tx_session_attach.cpp:140: [72057594037927937] TTxSessionAttach::Complete (sender=[3:133:2158], cookie=16005682083703560927, session=1) 2025-05-29T15:27:29.856246Z node 3 :KESUS_TABLET DEBUG: tx_sessions_describe.cpp:23: [72057594037927937] TTxSessionsDescribe::Execute (sender=[3:150:2173], cookie=12407807584147782406) 2025-05-29T15:27:29.856264Z node 3 :KESUS_TABLET DEBUG: tx_sessions_describe.cpp:48: [72057594037927937] TTxSessionsDescribe::Complete (sender=[3:150:2173], cookie=12407807584147782406) 2025-05-29T15:27:30.055915Z node 4 :KESUS_TABLET INFO: tablet_impl.cpp:71: OnActivateExecutor: 72057594037927937 2025-05-29T15:27:30.055968Z node 4 :KESUS_TABLET DEBUG: tx_init_schema.cpp:14: [72057594037927937] TTxInitSchema::Execute 2025-05-29T15:27:30.058907Z node 4 :KESUS_TABLET DEBUG: tx_init_schema.cpp:21: [72057594037927937] TTxInitSchema::Complete 2025-05-29T15:27:30.059000Z node 4 :KESUS_TABLET DEBUG: tx_init.cpp:30: [72057594037927937] TTxInit::Execute 2025-05-29T15:27:30.080923Z node 4 :KESUS_TABLET DEBUG: tx_init.cpp:242: [72057594037927937] TTxInit::Complete 2025-05-29T15:27:30.296913Z node 5 :KESUS_TABLET INFO: tablet_impl.cpp:71: OnActivateExecutor: 72057594037927937 2025-05-29T15:27:30.296950Z node 5 :KESUS_TABLET DEBUG: tx_init_schema.cpp:14: [72057594037927937] TTxInitSchema::Execute 2025-05-29T15:27:30.300556Z node 5 :KESUS_TABLET DEBUG: tx_init_schema.cpp:21: [72057594037927937] TTxInitSchema::Complete 2025-05-29T15:27:30.300597Z node 5 :KESUS_TABLET DEBUG: tx_init.cpp:30: [72057594037927937] TTxInit::Execute 2025-05-29T15:27:30.322158Z node 5 :KESUS_TABLET DEBUG: tx_init.cpp:242: [72057594037927937] TTxInit::Complete 2025-05-29T15:27:30.322269Z node 5 :KESUS_TABLET DEBUG: tx_session_attach.cpp:35: [72057594037927937] TTxSessionAttach::Execute (sender=[5:131:2156], cookie=9942726813345385263, session=0, seqNo=0) 2025-05-29T15:27:30.322300Z node 5 :KESUS_TABLET DEBUG: tx_session_attach.cpp:118: [72057594037927937] Created new session 1 2025-05-29T15:27:30.333058Z node 5 :KESUS_TABLET DEBUG: tx_session_attach.cpp:140: [72057594037927937] TTxSessionAttach::Complete (sender=[5:131:2156], cookie=9942726813345385263, session=1) 2025-05-29T15:27:30.333130Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:48: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[5:131:2156], cookie=111, session=1, semaphore="Lock1" count=18446744073709551615) 2025-05-29T15:27:30.333169Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:126: [72057594037927937] Created new ephemeral semaphore 1 "Lock1" 2025-05-29T15:27:30.333181Z node 5 :KESUS_TABLET DEBUG: tablet_db.cpp:152: [72057594037927937] Processing semaphore 1 "Lock1" queue: next order #1 session 1 2025-05-29T15:27:30.343938Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:263: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[5:131:2156], cookie=111) 2025-05-29T15:27:30.344099Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_create.cpp:32: [72057594037927937] TTxSemaphoreCreate::Execute (sender=[5:145:2168], cookie=14788886778492267677, name="Sem1", limit=42) 2025-05-29T15:27:30.344128Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_create.cpp:104: [72057594037927937] Created new semaphore 2 "Sem1" 2025-05-29T15:27:30.354826Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_create.cpp:112: [72057594037927937] TTxSemaphoreCreate::Complete (sender=[5:145:2168], cookie=14788886778492267677) 2025-05-29T15:27:30.354941Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_create.cpp:32: [72057594037927937] TTxSemaphoreCreate::Execute (sender=[5:150:2173], cookie=15166872691501595437, name="Sem1", limit=42) 2025-05-29T15:27:30.365724Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_create.cpp:112: [72057594037927937] TTxSemaphoreCreate::Complete (sender=[5:150:2173], cookie=15166872691501595437) 2025-05-29T15:27:30.365848Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_create.cpp:32: [72057594037927937] TTxSemaphoreCreate::Execute (sender=[5:155:2178], cookie=15699478929600707543, name="Sem1", limit=51) 2025-05-29T15:27:30.376741Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_create.cpp:112: [72057594037927937] TTxSemaphoreCreate::Complete (sender=[5:155:2178], cookie=15699478929600707543) 2025-05-29T15:27:30.376894Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_create.cpp:32: [72057594037927937] TTxSemaphoreCreate::Execute (sender=[5:160:2183], cookie=1922792237259715075, name="Lock1", limit=42) 2025-05-29T15:27:30.387686Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_create.cpp:112: [72057594037927937] TTxSemaphoreCreate::Complete (sender=[5:160:2183], cookie=1922792237259715075) 2025-05-29T15:27:30.387833Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_create.cpp:32: [72057594037927937] TTxSemaphoreCreate::Execute (sender=[5:165:2188], cookie=2210066909292395080, name="Lock1", limit=18446744073709551615) 2025-05-29T15:27:30.398755Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_create.cpp:112: [72057594037927937] TTxSemaphoreCreate::Complete (sender=[5:165:2188], cookie=2210066909292395080) 2025-05-29T15:27:30.398905Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:29: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[5:170:2193], cookie=9951493485176403203, name="Sem1") 2025-05-29T15:27:30.398925Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:134: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[5:170:2193], cookie=9951493485176403203) 2025-05-29T15:27:30.398989Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:29: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[5:173:2196], cookie=2632644134077769127, name="Sem2") 2025-05-29T15:27:30.398997Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:134: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[5:173:2196], cookie=2632644134077769127) 2025-05-29T15:27:30.401232Z node 5 :KESUS_TABLET INFO: tablet_impl.cpp:71: OnActivateExecutor: 72057594037927937 2025-05-29T15:27:30.401255Z node 5 :KESUS_TABLET DEBUG: tx_init_schema.cpp:14: [72057594037927937] TTxInitSchema::Execute 2025-05-29T15:27:30.401293Z node 5 :KESUS_TABLET DEBUG: tx_init_schema.cpp:21: [72057594037927937] TTxInitSchema::Complete 2025-05-29T15:27:30.401392Z node 5 :KESUS_TABLET DEBUG: tx_init.cpp:30: [72057594037927937] TTxInit::Execute 2025-05-29T15:27:30.443846Z node 5 :KESUS_TABLET DEBUG: tx_init.cpp:242: [72057594037927937] TTxInit::Complete 2025-05-29T15:27:30.443918Z node 5 :KESUS_TABLET DEBUG: tablet_db.cpp:152: [72057594037927937] Processing semaphore 1 "Lock1" queue: next order #1 session 1 2025-05-29T15:27:30.444040Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:29: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[5:213:2226], cookie=11338937293554210848, name="Sem1") 2025-05-29T15:27:30.444062Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:134: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[5:213:2226], cookie=11338937293554210848) 2025-05-29T15:27:30.444187Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:29: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[5:220:2232], cookie=14328171675138353201, name="Sem2") 2025-05-29T15:27:30.444197Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:134: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[5:220:2232], cookie=14328171675138353201) >> TKeyValueTest::TestIncorrectRequestThenResponseError >> TKeyValueTest::TestWriteReadRangeDataLimitThenLimitWorks |69.5%| [TA] $(B)/ydb/core/kqp/ut/runtime/test-results/unittest/{meta.json ... results_accumulator.log} >> TKeyValueTest::TestIncorrectRequestThenResponseError [GOOD] >> TKeyValueTest::TestIncrementalKeySet >> TKeyValueTest::TestEmptyWriteReadDeleteWithRestartsThenResponseOk >> TKeyValueTest::TestWriteReadDeleteWithRestartsAndCatchCollectGarbageEvents >> TKeyValueTest::TestInlineWriteReadDeleteWithRestartsThenResponseOk >> TSchemeshardBackgroundCompactionTest::ShouldNotCompactServerless [GOOD] >> TSchemeshardBackgroundCompactionTest::ShouldCompactServerless >> TKesusTest::TestQuoterAccountResourcesPaced [GOOD] >> TKesusTest::TestQuoterAccountResourcesDeduplicateClient >> TKeyValueTest::TestWriteReadWithRestartsThenResponseOkNewApi |69.4%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/schemeshard/ut_vector_index_build_reboots/tx-schemeshard-ut_vector_index_build_reboots |69.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_vector_index_build_reboots/tx-schemeshard-ut_vector_index_build_reboots |69.5%| [TA] {RESULT} $(B)/ydb/core/kqp/ut/runtime/test-results/unittest/{meta.json ... results_accumulator.log} |69.5%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_vector_index_build_reboots/tx-schemeshard-ut_vector_index_build_reboots >> TKeyValueTest::TestIncrementalKeySet [GOOD] >> TKeyValueTest::TestGetStatusWorksNewApi >> TKeyValueTest::TestCleanUpDataOnEmptyTablet >> TKeyValueTest::TestWriteReadDeleteWithRestartsAndCatchCollectGarbageEvents [GOOD] >> TKeyValueTest::TestWriteLongKey >> DataShardReadTableSnapshots::ReadTableSnapshot [FAIL] >> DataShardReadTableSnapshots::ReadTableSplitAfter >> DataShardReadTableSnapshots::ReadTableSplitNewTxIdResolveResultReorder [FAIL] >> DataShardReadTableSnapshots::ReadTableUUID >> TKeyValueTest::TestBasicWriteRead >> TKesusTest::TestQuoterAccountResourcesAggregateClients [GOOD] >> TKesusTest::TestQuoterAccountResourcesAggregateResources >> DataShardReadTableSnapshots::ReadTableSplitAfter [FAIL] >> DataShardReadTableSnapshots::ReadTableUUID [FAIL] |69.5%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tablet/ut/ydb-core-tablet-ut |69.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tablet/ut/ydb-core-tablet-ut |69.5%| [LD] {RESULT} $(B)/ydb/core/tablet/ut/ydb-core-tablet-ut >> TSchemeshardBackgroundCompactionTest::SchemeshardShouldRequestCompactionsSchemeshardRestart [GOOD] >> TSchemeshardBackgroundCompactionTest::SchemeshardShouldRequestCompactionsConfigRequest >> TKesusTest::TestQuoterAccountResourcesDeduplicateClient [GOOD] >> TKesusTest::TestQuoterAccountResourcesForgetClient >> TKesusTest::TestQuoterAccountResourcesAggregateResources [GOOD] >> TKesusTest::TestQuoterAccountLabels ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/datashard/ut_volatile/unittest >> DataShardVolatile::GracefulShardRestartNoEarlyReadSetAck [FAIL] Test command err: 2025-05-29T15:25:40.245618Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:102:2148], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-05-29T15:25:40.245650Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-05-29T15:25:40.245664Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/0017ed/r3tmp/tmpGv5Mgn/pdisk_1.dat 2025-05-29T15:25:40.483787Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-05-29T15:25:40.497585Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:25:40.505421Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [1:32:2079] 1748532338647579 != 1748532338647583 2025-05-29T15:25:40.555354Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:213: actor# [1:59:2106] HANDLE TEvClientConnected success connect from tablet# 72057594046447617 2025-05-29T15:25:40.555641Z node 1 :TX_PROXY DEBUG: client.cpp:89: Handle TEvAllocateResult ACCEPTED RangeBegin# 281474976715656 RangeEnd# 281474976720656 txAllocator# 72057594046447617 2025-05-29T15:25:40.555706Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:25:40.555728Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:25:40.566312Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-29T15:25:40.768153Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:315: actor# [1:59:2106] Handle TEvProposeTransaction 2025-05-29T15:25:40.768178Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:238: actor# [1:59:2106] TxId# 281474976715657 ProcessProposeTransaction 2025-05-29T15:25:40.768213Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:257: actor# [1:59:2106] Cookie# 0 userReqId# "" txid# 281474976715657 SEND to# [1:640:2548] 2025-05-29T15:25:40.789858Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1562: Actor# [1:640:2548] txid# 281474976715657 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/Root" OperationType: ESchemeOpCreateTable CreateTable { Name: "table-1" Columns { Name: "key" Type: "Uint32" FamilyName: "" NotNull: false } Columns { Name: "value" Type: "Uint32" FamilyName: "" NotNull: false } Columns { Name: "value2" Type: "Uint32" FamilyName: "" NotNull: false } KeyColumnNames: "key" UniformPartitionsCount: 1 } } } ExecTimeoutPeriod: 18446744073709551615 2025-05-29T15:25:40.789904Z node 1 :TX_PROXY DEBUG: schemereq.cpp:569: Actor# [1:640:2548] txid# 281474976715657 Bootstrap, UserSID: CheckAdministrator: 0 CheckDatabaseAdministrator: 0 2025-05-29T15:25:40.790120Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1627: Actor# [1:640:2548] txid# 281474976715657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P6 2025-05-29T15:25:40.790135Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1617: Actor# [1:640:2548] txid# 281474976715657 TEvNavigateKeySet requested from SchemeCache 2025-05-29T15:25:40.790197Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1450: Actor# [1:640:2548] txid# 281474976715657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-05-29T15:25:40.790230Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1497: Actor# [1:640:2548] HANDLE EvNavigateKeySetResult, txid# 281474976715657 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 1000 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2025-05-29T15:25:40.790250Z node 1 :TX_PROXY DEBUG: schemereq.cpp:103: Actor# [1:640:2548] txid# 281474976715657 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976715657 TabletId# 72057594046644480} 2025-05-29T15:25:40.790728Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-05-29T15:25:40.791194Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1352: Actor# [1:640:2548] txid# 281474976715657 HANDLE EvClientConnected 2025-05-29T15:25:40.791353Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1374: Actor# [1:640:2548] txid# 281474976715657 Status StatusAccepted HANDLE {TEvModifySchemeTransactionResult Status# StatusAccepted txid# 281474976715657} 2025-05-29T15:25:40.791363Z node 1 :TX_PROXY DEBUG: schemereq.cpp:549: Actor# [1:640:2548] txid# 281474976715657 SEND to# [1:592:2518] Source {TEvProposeTransactionStatus txid# 281474976715657 Status# 53} 2025-05-29T15:25:40.806142Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3097: StateInit, received event# 268828672, Sender [1:656:2563], Recipient [1:664:2569]: NKikimr::TEvTablet::TEvBoot 2025-05-29T15:25:40.806411Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3097: StateInit, received event# 268828673, Sender [1:656:2563], Recipient [1:664:2569]: NKikimr::TEvTablet::TEvRestored 2025-05-29T15:25:40.806492Z node 1 :TX_DATASHARD INFO: datashard.cpp:373: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:664:2569] 2025-05-29T15:25:40.806559Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:630: TxInitSchema.Execute 2025-05-29T15:25:40.807828Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3110: StateInactive, received event# 268828684, Sender [1:656:2563], Recipient [1:664:2569]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-05-29T15:25:40.819698Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:696: TxInitSchema.Complete 2025-05-29T15:25:40.819748Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:48: TDataShard::TTxInit::Execute 2025-05-29T15:25:40.819914Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1315: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-05-29T15:25:40.819924Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1371: LoadLockChangeRecords at tablet: 72075186224037888 2025-05-29T15:25:40.819931Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1420: LoadChangeRecordCommits at tablet: 72075186224037888 2025-05-29T15:25:40.819995Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:92: TDataShard::TTxInit::Complete 2025-05-29T15:25:40.820026Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:100: TDataShard::TTxInitRestored::Execute 2025-05-29T15:25:40.820040Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:111: DataShard 72075186224037888 persisting started state actor id [1:681:2569] in generation 1 2025-05-29T15:25:40.830338Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:120: TDataShard::TTxInitRestored::Complete 2025-05-29T15:25:40.839664Z node 1 :TX_DATASHARD INFO: datashard.cpp:417: Switched to work state WaitScheme tabletId 72075186224037888 2025-05-29T15:25:40.839745Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:457: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-05-29T15:25:40.839771Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1250: Change sender created: at tablet: 72075186224037888, actorId: [1:683:2579] 2025-05-29T15:25:40.839777Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1255: Trying to activate change sender: at tablet: 72075186224037888 2025-05-29T15:25:40.839782Z node 1 :TX_DATASHARD INFO: datashard.cpp:1272: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-05-29T15:25:40.839788Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-05-29T15:25:40.839859Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3129: StateWork, received event# 2146435072, Sender [1:664:2569], Recipient [1:664:2569]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-05-29T15:25:40.839866Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3154: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-05-29T15:25:40.839961Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 72075186224037888 2025-05-29T15:25:40.839983Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-05-29T15:25:40.839999Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2025-05-29T15:25:40.840006Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-05-29T15:25:40.840014Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:385: Check unit PlanQueue at 72075186224037888 2025-05-29T15:25:40.840019Z node 1 :TX_DATASHARD TRACE: plan_queue_unit.cpp:59: TPlanQueueUnit at 72075186224037888 has no attached operations 2025-05-29T15:25:40.840023Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:403: Unit PlanQueue has no ready operations at 72075186224037888 2025-05-29T15:25:40.840028Z node 1 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 72075186224037888 TxInFly 0 2025-05-29T15:25:40.840033Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2025-05-29T15:25:40.840139Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3129: StateWork, received event# 269877761, Sender [1:672:2573], Recipient [1:664:2569]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-05-29T15:25:40.840148Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3165: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-05-29T15:25:40.840155Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3710: Server connected at leader tablet# 72075186224037888, clientId# [1:662:2567], serverId# [1:672:2573], sessionId# [0:0:0] 2025-05-29T15:25:40.840169Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3129: StateWork, received event# 269549568, Sender [1:411:2405], Recipient [1:672:2573] 2025-05-29T15:25:40.840174Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3135: StateWork, processing event TEvDataShard::TEvProposeTransaction 2025-05-29T15:25:40.840193Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037888 2025-05-29T15:25:40.840237Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1827: Trying to execute [0:281474976715657] at 72075186224037888 on unit CheckSchemeTx 2025-05-29T15:25:40.840247Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:132: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-05-29T15:25:40.840264Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:220: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2025-05-29T15:25:40.840278Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1862: Execution status for [0:281474976715657] at 72075186224037888 is ExecutedNoMoreRestarts 2025-05-29T15:25:40.840283Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1910: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit CheckSchemeTx 2025-05-29T15:25:40.840288Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1916: Add [0:281474976715657] at 72075186224037888 to execution unit StoreSchemeTx 2025-05-29T15:25:40.840293Z node 1 :TX_DATASHARD TRACE ... OfSpace} release 4194304b of static, Memory{0 dyn 0} 2025-05-29T15:26:36.764170Z node 28 :TX_DATASHARD TRACE: datashard_impl.h:3129: StateWork, received event# 269746185, Sender [28:743:2610], Recipient [28:708:2590]: NKikimr::TEvTxProxySchemeCache::TEvWatchNotifyUpdated 2025-05-29T15:26:36.764174Z node 28 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:129: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037890 2025-05-29T15:26:36.764181Z node 28 :TABLET_EXECUTOR DEBUG: Leader{72075186224037890:1:9} Tx{15, NKikimr::NDataShard::TDataShard::TTxPersistSubDomainOutOfSpace} queued, type NKikimr::NDataShard::TDataShard::TTxPersistSubDomainOutOfSpace 2025-05-29T15:26:36.764187Z node 28 :TABLET_EXECUTOR DEBUG: Leader{72075186224037890:1:9} Tx{15, NKikimr::NDataShard::TDataShard::TTxPersistSubDomainOutOfSpace} took 4194304b of static mem, Memory{4194304 dyn 0} 2025-05-29T15:26:36.764195Z node 28 :TABLET_EXECUTOR DEBUG: Leader{72075186224037890:1:9} Tx{15, NKikimr::NDataShard::TDataShard::TTxPersistSubDomainOutOfSpace} hope 1 -> done Change{9, redo 0b alter 0b annex 0, ~{ } -{ }, 0 gb} 2025-05-29T15:26:36.764203Z node 28 :TABLET_EXECUTOR DEBUG: Leader{72075186224037890:1:9} Tx{15, NKikimr::NDataShard::TDataShard::TTxPersistSubDomainOutOfSpace} release 4194304b of static, Memory{0 dyn 0} 2025-05-29T15:26:36.764247Z node 28 :TABLET_EXECUTOR DEBUG: Leader{72057594046644480:2:50} commited cookie 1 for step 49 2025-05-29T15:26:36.764310Z node 28 :TABLET_EXECUTOR DEBUG: Leader{72057594046644480:2:50} Tx{62, NKikimr::NSchemeShard::TSchemeShard::TTxAckPublishToSchemeBoard} queued, type NKikimr::NSchemeShard::TSchemeShard::TTxAckPublishToSchemeBoard 2025-05-29T15:26:36.764316Z node 28 :TABLET_EXECUTOR DEBUG: Leader{72057594046644480:2:50} Tx{62, NKikimr::NSchemeShard::TSchemeShard::TTxAckPublishToSchemeBoard} took 4194304b of static mem, Memory{4194304 dyn 0} 2025-05-29T15:26:36.764342Z node 28 :TABLET_EXECUTOR DEBUG: Leader{72057594046644480:2:50} Tx{62, NKikimr::NSchemeShard::TSchemeShard::TTxAckPublishToSchemeBoard} hope 1 -> done Change{49, redo 166b alter 0b annex 0, ~{ 48, 59 } -{ }, 0 gb} 2025-05-29T15:26:36.764348Z node 28 :TABLET_EXECUTOR DEBUG: Leader{72057594046644480:2:50} Tx{62, NKikimr::NSchemeShard::TSchemeShard::TTxAckPublishToSchemeBoard} release 4194304b of static, Memory{0 dyn 0} 2025-05-29T15:26:36.764497Z node 28 :TABLET_EXECUTOR DEBUG: Leader{72057594046644480:2:51} Tx{63, NKikimr::NSchemeShard::TSchemeShard::TTxAckPublishToSchemeBoard} queued, type NKikimr::NSchemeShard::TSchemeShard::TTxAckPublishToSchemeBoard 2025-05-29T15:26:36.764505Z node 28 :TABLET_EXECUTOR DEBUG: Leader{72057594046644480:2:51} Tx{63, NKikimr::NSchemeShard::TSchemeShard::TTxAckPublishToSchemeBoard} took 4194304b of static mem, Memory{4194304 dyn 0} 2025-05-29T15:26:36.764524Z node 28 :TABLET_EXECUTOR DEBUG: Leader{72057594046644480:2:51} Tx{63, NKikimr::NSchemeShard::TSchemeShard::TTxAckPublishToSchemeBoard} hope 1 -> done Change{50, redo 166b alter 0b annex 0, ~{ 48, 59 } -{ }, 0 gb} 2025-05-29T15:26:36.764529Z node 28 :TABLET_EXECUTOR DEBUG: Leader{72057594046644480:2:51} Tx{63, NKikimr::NSchemeShard::TSchemeShard::TTxAckPublishToSchemeBoard} release 4194304b of static, Memory{0 dyn 0} 2025-05-29T15:26:36.764624Z node 28 :TABLET_EXECUTOR DEBUG: Leader{72057594046644480:2:52} commited cookie 1 for step 50 2025-05-29T15:26:36.764667Z node 28 :TABLET_EXECUTOR DEBUG: Leader{72057594046644480:2:52} Tx{64, NKikimr::NSchemeShard::TSchemeShard::TTxAckPublishToSchemeBoard} queued, type NKikimr::NSchemeShard::TSchemeShard::TTxAckPublishToSchemeBoard 2025-05-29T15:26:36.764673Z node 28 :TABLET_EXECUTOR DEBUG: Leader{72057594046644480:2:52} Tx{64, NKikimr::NSchemeShard::TSchemeShard::TTxAckPublishToSchemeBoard} took 4194304b of static mem, Memory{4194304 dyn 0} 2025-05-29T15:26:36.764691Z node 28 :TABLET_EXECUTOR DEBUG: Leader{72057594046644480:2:52} Tx{64, NKikimr::NSchemeShard::TSchemeShard::TTxAckPublishToSchemeBoard} hope 1 -> done Change{51, redo 166b alter 0b annex 0, ~{ 48, 59 } -{ }, 0 gb} 2025-05-29T15:26:36.764696Z node 28 :TABLET_EXECUTOR DEBUG: Leader{72057594046644480:2:52} Tx{64, NKikimr::NSchemeShard::TSchemeShard::TTxAckPublishToSchemeBoard} release 4194304b of static, Memory{0 dyn 0} 2025-05-29T15:26:36.764766Z node 28 :TABLET_EXECUTOR DEBUG: Leader{72057594046644480:2:53} commited cookie 1 for step 51 2025-05-29T15:26:36.764785Z node 28 :TABLET_EXECUTOR DEBUG: Leader{72057594046644480:2:53} Tx{65, NKikimr::NSchemeShard::TSchemeShard::TTxAckPublishToSchemeBoard} queued, type NKikimr::NSchemeShard::TSchemeShard::TTxAckPublishToSchemeBoard 2025-05-29T15:26:36.764790Z node 28 :TABLET_EXECUTOR DEBUG: Leader{72057594046644480:2:53} Tx{65, NKikimr::NSchemeShard::TSchemeShard::TTxAckPublishToSchemeBoard} took 4194304b of static mem, Memory{4194304 dyn 0} 2025-05-29T15:26:36.764804Z node 28 :TABLET_EXECUTOR DEBUG: Leader{72057594046644480:2:53} Tx{65, NKikimr::NSchemeShard::TSchemeShard::TTxAckPublishToSchemeBoard} hope 1 -> done Change{52, redo 166b alter 0b annex 0, ~{ 48, 59 } -{ }, 0 gb} 2025-05-29T15:26:36.764812Z node 28 :TABLET_EXECUTOR DEBUG: Leader{72057594046644480:2:53} Tx{65, NKikimr::NSchemeShard::TSchemeShard::TTxAckPublishToSchemeBoard} release 4194304b of static, Memory{0 dyn 0} 2025-05-29T15:26:36.764859Z node 28 :TABLET_EXECUTOR DEBUG: Leader{72057594046644480:2:54} Tx{66, NKikimr::NSchemeShard::TSchemeShard::TTxAckPublishToSchemeBoard} queued, type NKikimr::NSchemeShard::TSchemeShard::TTxAckPublishToSchemeBoard 2025-05-29T15:26:36.764864Z node 28 :TABLET_EXECUTOR DEBUG: Leader{72057594046644480:2:54} Tx{66, NKikimr::NSchemeShard::TSchemeShard::TTxAckPublishToSchemeBoard} took 4194304b of static mem, Memory{4194304 dyn 0} 2025-05-29T15:26:36.764880Z node 28 :TABLET_EXECUTOR DEBUG: Leader{72057594046644480:2:54} Tx{66, NKikimr::NSchemeShard::TSchemeShard::TTxAckPublishToSchemeBoard} hope 1 -> done Change{53, redo 166b alter 0b annex 0, ~{ 48, 59 } -{ }, 0 gb} 2025-05-29T15:26:36.764885Z node 28 :TABLET_EXECUTOR DEBUG: Leader{72057594046644480:2:54} Tx{66, NKikimr::NSchemeShard::TSchemeShard::TTxAckPublishToSchemeBoard} release 4194304b of static, Memory{0 dyn 0} 2025-05-29T15:26:36.764909Z node 28 :TABLET_EXECUTOR DEBUG: Leader{72057594046644480:2:55} commited cookie 1 for step 52 2025-05-29T15:26:36.764935Z node 28 :TABLET_EXECUTOR DEBUG: Leader{72057594046644480:2:55} commited cookie 1 for step 53 2025-05-29T15:26:36.764952Z node 28 :TABLET_EXECUTOR DEBUG: Leader{72057594046644480:2:55} commited cookie 1 for step 54 2025-05-29T15:26:36.765020Z node 28 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [28:838:2690], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-05-29T15:26:36.765032Z node 28 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:397: TClient[72057594046644480] received poison pill [28:866:2699] 2025-05-29T15:26:36.765041Z node 28 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:505: TClient[72057594046644480] notify reset [28:866:2699] 2025-05-29T15:26:36.796624Z node 28 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:50: TClient[72057594046644480] ::Bootstrap [28:910:2728] 2025-05-29T15:26:36.796650Z node 28 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:533: TClient[72057594046644480] lookup [28:910:2728] 2025-05-29T15:26:36.796671Z node 28 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:149: TClient[72057594046644480] queue send [28:910:2728] 2025-05-29T15:26:36.796678Z node 28 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:190: TClient[72057594046644480] forward result local node, try to connect [28:910:2728] 2025-05-29T15:26:36.796687Z node 28 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:671: TClient[72057594046644480]::SendEvent [28:910:2728] 2025-05-29T15:26:36.796728Z node 28 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:310: TClient[72057594046644480] connected with status OK role: Leader [28:910:2728] 2025-05-29T15:26:36.796734Z node 28 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:325: TClient[72057594046644480] send queued [28:910:2728] 2025-05-29T15:26:36.796738Z node 28 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:629: TClient[72057594046644480] push event to server [28:910:2728] 2025-05-29T15:26:36.796767Z node 28 :TABLET_EXECUTOR DEBUG: Leader{72057594046644480:2:55} Tx{67, NKikimr::NSchemeShard::TSchemeShard::TTxOperationPropose} queued, type NKikimr::NSchemeShard::TSchemeShard::TTxOperationPropose 2025-05-29T15:26:36.796775Z node 28 :TABLET_EXECUTOR DEBUG: Leader{72057594046644480:2:55} Tx{67, NKikimr::NSchemeShard::TSchemeShard::TTxOperationPropose} took 4194304b of static mem, Memory{4194304 dyn 0} 2025-05-29T15:26:36.796970Z node 28 :TABLET_EXECUTOR DEBUG: Leader{72057594046644480:2:55} Tx{67, NKikimr::NSchemeShard::TSchemeShard::TTxOperationPropose} hope 1 -> done Change{54, redo 120b alter 0b annex 0, ~{ 4 } -{ }, 0 gb} 2025-05-29T15:26:36.796985Z node 28 :TABLET_EXECUTOR DEBUG: Leader{72057594046644480:2:55} Tx{67, NKikimr::NSchemeShard::TSchemeShard::TTxOperationPropose} release 4194304b of static, Memory{0 dyn 0} 2025-05-29T15:26:36.797129Z node 28 :TABLET_EXECUTOR DEBUG: Leader{72057594046644480:2:56} commited cookie 1 for step 55 2025-05-29T15:26:36.797237Z node 28 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [28:907:2728] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-29T15:26:36.797274Z node 28 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:397: TClient[72057594046644480] received poison pill [28:910:2728] 2025-05-29T15:26:36.797281Z node 28 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:505: TClient[72057594046644480] notify reset [28:910:2728] 2025-05-29T15:26:36.806762Z node 28 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [28:917:2737], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:26:36.807230Z node 28 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=28&id=YWQyNTc3Yi01MGU0MGY2LWIzNTY0ODQ3LTc4NjNmOWU3, ActorId: [28:828:2680], ActorState: ExecuteState, TraceId: 01jweae0j275expa3mqh3m7b25, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: assertion failed at ydb/core/tx/datashard/datashard_ut_volatile.cpp:3860, virtual void NKikimr::NTestSuiteDataShardVolatile::TTestCaseGracefulShardRestartNoEarlyReadSetAck::Execute_(NUnitTest::TTestContext &): (KqpSimpleExec(runtime, R"( UPSERT INTO `/Root/table` (key, value) VALUES (1, 1), (11, 11), (21, 21); )") == "") failed: ("ERROR: INTERNAL_ERROR" != ) , with diff: ("ERROR: INTERNAL_ERROR"|) TBackTrace::Capture()+28 (0x13C60C3C) NUnitTest::NPrivate::RaiseError(char const*, TBasicString> const&, bool)+137 (0x13E14569) NKikimr::NTestSuiteDataShardVolatile::TTestCaseGracefulShardRestartNoEarlyReadSetAck::Execute_(NUnitTest::TTestContext&)+4410 (0x13ABDCEA) NKikimr::NTestSuiteDataShardVolatile::TCurrentTest::Execute()::'lambda'()::operator()() const+71 (0x13AC80B7) NUnitTest::TTestBase::Run(std::__y1::function, TBasicString> const&, char const*, bool)+126 (0x13E1641E) NKikimr::NTestSuiteDataShardVolatile::TCurrentTest::Execute()+421 (0x13AC7915) NUnitTest::TTestFactory::Execute()+803 (0x13E16B93) NUnitTest::RunMain(int, char**)+3021 (0x13E2873D) ??+0 (0x7FEE8DA01D90) __libc_start_main+128 (0x7FEE8DA01E40) _start+41 (0x12AB8029) ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_sequence_reboots/unittest >> TSequenceReboots::CopyTableWithSequence [GOOD] Test command err: ==== RunWithTabletReboots =========== RUN: Trace =========== Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2140] Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:116:2058] recipient: [1:110:2141] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:116:2058] recipient: [1:110:2141] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:117:2058] recipient: [1:111:2142] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:117:2058] recipient: [1:111:2142] Leader for TabletID 72057594046678944 is [1:126:2151] sender: [1:127:2058] recipient: [1:109:2140] Leader for TabletID 72057594046447617 is [1:130:2154] sender: [1:132:2058] recipient: [1:110:2141] Leader for TabletID 72057594046316545 is [1:133:2155] sender: [1:135:2058] recipient: [1:111:2142] 2025-05-29T15:25:49.113332Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7488: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-05-29T15:25:49.113354Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7516: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:25:49.113360Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7402: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2025-05-29T15:25:49.113366Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7418: OperationsProcessing config: using default configuration 2025-05-29T15:25:49.113377Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-05-29T15:25:49.113381Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-05-29T15:25:49.113391Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7548: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:25:49.113404Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:39: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-05-29T15:25:49.113512Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7619: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-05-29T15:25:49.113580Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-05-29T15:25:49.127457Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7651: Got new config: QueryServiceConfig { AllExternalDataSourcesAreAvailable: true } 2025-05-29T15:25:49.127477Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:25:49.127576Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7619: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# Leader for TabletID 72057594046447617 is [1:130:2154] sender: [1:175:2058] recipient: [1:15:2062] 2025-05-29T15:25:49.130051Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-05-29T15:25:49.130080Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-05-29T15:25:49.130109Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-05-29T15:25:49.132339Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-05-29T15:25:49.132392Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:445: Clear TempDirsState with owners number: 0 2025-05-29T15:25:49.132465Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1348: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-05-29T15:25:49.132588Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:32: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-05-29T15:25:49.133074Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:157: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:25:49.133107Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:84: [RootDataErasureManager] Stop 2025-05-29T15:25:49.133262Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:25:49.133268Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:25:49.133287Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-05-29T15:25:49.133292Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-29T15:25:49.133296Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:202: TTxServerlessStorageBilling.Complete 2025-05-29T15:25:49.133309Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6671: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:214:2058] recipient: [1:212:2212] IGNORE Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:214:2058] recipient: [1:212:2212] Leader for TabletID 72057594037968897 is [1:218:2216] sender: [1:219:2058] recipient: [1:212:2212] 2025-05-29T15:25:49.134204Z node 1 :HIVE INFO: tablet_helpers.cpp:1130: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:126:2151] sender: [1:239:2058] recipient: [1:15:2062] 2025-05-29T15:25:49.154155Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:372: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-05-29T15:25:49.154234Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:25:49.154297Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-05-29T15:25:49.154339Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:130: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-05-29T15:25:49.154349Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:25:49.155093Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:451: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-05-29T15:25:49.155122Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-05-29T15:25:49.155174Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:25:49.155183Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:313: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-05-29T15:25:49.155189Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:367: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-05-29T15:25:49.155194Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 2 -> 3 2025-05-29T15:25:49.155572Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:25:49.155582Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-05-29T15:25:49.155587Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 3 -> 128 2025-05-29T15:25:49.155871Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:25:49.155879Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:25:49.155884Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:25:49.155891Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1650: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-05-29T15:25:49.156567Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1719: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-05-29T15:25:49.156914Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:652: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-05-29T15:25:49.156949Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1751: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 Leader for TabletID 72057594046316545 is [1:133:2155] sender: [1:254:2058] recipient: [1:15:2062] FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-05-29T15:25:49.157131Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:676: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-05-29T15:25:49.157153Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:680: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 133 RawX2: 4294969451 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-05-29T15:25:49.157159Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:25:49.157217Z node 1 :FLAT_TX_SCHEMESHARD INFO: sch ... 75807 StartValue: 1 NextValue: 2 Cache: 1 Increment: 1 2025-05-29T15:27:29.771289Z node 191 :SEQUENCESHARD NOTICE: tx_restore_sequence.cpp:98: [sequenceshard 72075186233409546] TTxRestoreSequence.Execute SUCCESS PathId# [OwnerId: 72057594046678944, LocalPathId: 10] Record# PathId { OwnerId: 72057594046678944 LocalId: 10 } TxId: 1003 TxPartId: 3 MinValue: 1 MaxValue: 9223372036854775807 StartValue: 1 NextValue: 2 Cache: 1 Increment: 1 2025-05-29T15:27:29.802418Z node 191 :SEQUENCESHARD TRACE: tx_restore_sequence.cpp:103: [sequenceshard 72075186233409546] TTxRestoreSequence.Complete 2025-05-29T15:27:29.802477Z node 191 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4877: StateWork, received event# 276299788, Sender [191:354:2334], Recipient [191:125:2150]: NKikimrTxSequenceShard.TEvRestoreSequenceResult Status: SUCCESS Origin: 72075186233409546 TxId: 1003 TxPartId: 3 2025-05-29T15:27:29.802484Z node 191 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4927: StateWork, processing event NSequenceShard::TEvSequenceShard::TEvRestoreSequenceResult 2025-05-29T15:27:29.802490Z node 191 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6219: Handle TEvRestoreSequenceResult, at schemeshard: 72057594046678944, message: Status: SUCCESS Origin: 72075186233409546 TxId: 1003 TxPartId: 3 2025-05-29T15:27:29.802509Z node 191 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:619: TTxOperationReply execute, operationId: 1003:3, at schemeshard: 72057594046678944, message: Status: SUCCESS Origin: 72075186233409546 TxId: 1003 TxPartId: 3 2025-05-29T15:27:29.802516Z node 191 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_copy_sequence.cpp:310: TCopySequence TProposedCopySequence HandleReply TEvRestoreSequenceResult shardId# 72075186233409546 status# SUCCESS operationId# 1003:3 at tablet 72057594046678944 2025-05-29T15:27:29.802545Z node 191 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1003:3 140 -> 240 2025-05-29T15:27:29.802565Z node 191 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:165: TSideEffects ApplyOnExecute at tablet# 72057594046678944 2025-05-29T15:27:29.802570Z node 191 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:693: Ack tablet strongly msg opId: 1003:3 from tablet: 72057594046678944 to tablet: 72075186233409546 cookie: 72057594046678944:10 2025-05-29T15:27:29.802934Z node 191 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:647: TTxOperationReply complete, operationId: 1003:3, at schemeshard: 72057594046678944 2025-05-29T15:27:29.802944Z node 191 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:207: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2025-05-29T15:27:29.802948Z node 191 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:275: Activate send for 1003:3 2025-05-29T15:27:29.802970Z node 191 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4877: StateWork, received event# 2146435072, Sender [191:125:2150], Recipient [191:125:2150]: NKikimr::NSchemeShard::TEvPrivate::TEvProgressOperation 2025-05-29T15:27:29.802974Z node 191 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4894: StateWork, processing event TEvPrivate::TEvProgressOperation 2025-05-29T15:27:29.802981Z node 191 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1003:3, at schemeshard: 72057594046678944 2025-05-29T15:27:29.802987Z node 191 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:482: [72057594046678944] TDone opId# 1003:3 ProgressState 2025-05-29T15:27:29.802997Z node 191 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:165: TSideEffects ApplyOnExecute at tablet# 72057594046678944 2025-05-29T15:27:29.803001Z node 191 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:906: Part operation is done id#1003:3 progress is 4/4 2025-05-29T15:27:29.803004Z node 191 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 1003 ready parts: 4/4 2025-05-29T15:27:29.803007Z node 191 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:906: Part operation is done id#1003:3 progress is 4/4 2025-05-29T15:27:29.803010Z node 191 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 1003 ready parts: 4/4 2025-05-29T15:27:29.803013Z node 191 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1606: TOperation IsReadyToNotify, TxId: 1003, ready parts: 4/4, is published: true 2025-05-29T15:27:29.803027Z node 191 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1629: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [191:469:2420] message: TxId: 1003 2025-05-29T15:27:29.803032Z node 191 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 1003 ready parts: 4/4 2025-05-29T15:27:29.803039Z node 191 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:973: Operation and all the parts is done, operation id: 1003:0 2025-05-29T15:27:29.803043Z node 191 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5174: RemoveTx for txid 1003:0 2025-05-29T15:27:29.803069Z node 191 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 7] was 5 2025-05-29T15:27:29.803072Z node 191 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove txstate source path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 5 2025-05-29T15:27:29.803075Z node 191 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:973: Operation and all the parts is done, operation id: 1003:1 2025-05-29T15:27:29.803078Z node 191 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5174: RemoveTx for txid 1003:1 2025-05-29T15:27:29.803081Z node 191 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 8] was 3 2025-05-29T15:27:29.803083Z node 191 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:973: Operation and all the parts is done, operation id: 1003:2 2025-05-29T15:27:29.803085Z node 191 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5174: RemoveTx for txid 1003:2 2025-05-29T15:27:29.803092Z node 191 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 9] was 3 2025-05-29T15:27:29.803094Z node 191 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove txstate source path for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 3 2025-05-29T15:27:29.803097Z node 191 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:973: Operation and all the parts is done, operation id: 1003:3 2025-05-29T15:27:29.803099Z node 191 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5174: RemoveTx for txid 1003:3 2025-05-29T15:27:29.803104Z node 191 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 10] was 2 2025-05-29T15:27:29.803106Z node 191 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove txstate source path for pathId [OwnerId: 72057594046678944, LocalPathId: 6] was 2 2025-05-29T15:27:29.803404Z node 191 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:207: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2025-05-29T15:27:29.803422Z node 191 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:630: Send to actor: [191:469:2420] msg type: 271124998 msg: NKikimrScheme.TEvNotifyTxCompletionResult TxId: 1003 at schemeshard: 72057594046678944 2025-05-29T15:27:29.803453Z node 191 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:227: tests -- TTxNotificationSubscriber for txId 1003: got EvNotifyTxCompletionResult 2025-05-29T15:27:29.803457Z node 191 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:236: tests -- TTxNotificationSubscriber for txId 1003: satisfy waiter [191:662:2584] 2025-05-29T15:27:29.803483Z node 191 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4877: StateWork, received event# 269877764, Sender [191:664:2586], Recipient [191:125:2150]: NKikimr::TEvTabletPipe::TEvServerDisconnected 2025-05-29T15:27:29.803489Z node 191 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4975: StateWork, processing event TEvTabletPipe::TEvServerDisconnected 2025-05-29T15:27:29.803492Z node 191 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5801: Server pipe is reset, at schemeshard: 72057594046678944 TestWaitNotification: OK eventTxId 1003 2025-05-29T15:27:29.803546Z node 191 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4877: StateWork, received event# 271122945, Sender [191:769:2687], Recipient [191:125:2150]: NKikimrSchemeOp.TDescribePath Path: "/MyRoot/copy/myseq" Options { ShowPrivateTable: true } 2025-05-29T15:27:29.803550Z node 191 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4889: StateWork, processing event TEvSchemeShard::TEvDescribeScheme 2025-05-29T15:27:29.803558Z node 191 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/copy/myseq" Options { ShowPrivateTable: true }, at schemeshard: 72057594046678944 2025-05-29T15:27:29.803592Z node 191 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/copy/myseq" took 28us result status StatusSuccess 2025-05-29T15:27:29.803644Z node 191 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/copy/myseq" PathDescription { Self { Name: "myseq" PathId: 10 SchemeshardId: 72057594046678944 PathType: EPathTypeSequence CreateFinished: true CreateTxId: 1003 CreateStep: 5000004 ParentPathId: 7 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 SequenceVersion: 1 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 9 PathsLimit: 10000 ShardsInside: 5 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } SequenceDescription { Name: "myseq" PathId { OwnerId: 72057594046678944 LocalId: 10 } Version: 1 SequenceShard: 72075186233409546 MinValue: 1 MaxValue: 9223372036854775807 StartValue: 1 Cache: 1 Increment: 1 Cycle: false DataType: "Int64" } } PathId: 10 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-05-29T15:27:29.804003Z node 191 :SEQUENCESHARD TRACE: tx_allocate_sequence.cpp:22: [sequenceshard 72075186233409546] TTxAllocateSequence.Execute PathId# [OwnerId: 72057594046678944, LocalPathId: 10] Cache# 1 2025-05-29T15:27:29.804019Z node 191 :SEQUENCESHARD TRACE: tx_allocate_sequence.cpp:89: [sequenceshard 72075186233409546] TTxAllocateSequence.Execute SUCCESS PathId# [OwnerId: 72057594046678944, LocalPathId: 10] AllocationStart# 2 AllocationCount# 1 AllocationIncrement# 1 2025-05-29T15:27:29.814664Z node 191 :SEQUENCESHARD TRACE: tx_allocate_sequence.cpp:174: [sequenceshard 72075186233409546] TTxAllocateSequence.Complete >> TKeyValueTest::TestInlineWriteReadWithRestartsThenResponseOk >> TKesusTest::TestQuoterAccountLabels [GOOD] >> TKesusTest::TestPassesUpdatedPropsToSession >> TKeyValueTest::TestInlineWriteReadWithRestartsWithNotCorrectUTF8NewApi >> TKesusTest::TestPassesUpdatedPropsToSession [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/datashard/ut_read_table/unittest >> DataShardReadTableSnapshots::ReadTableSplitAfter [FAIL] Test command err: 2025-05-29T15:27:30.785927Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:102:2148], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-05-29T15:27:30.785959Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-05-29T15:27:30.785970Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/0011ee/r3tmp/tmpBMTu0v/pdisk_1.dat 2025-05-29T15:27:30.894277Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-05-29T15:27:30.907305Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:27:30.910003Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [1:32:2079] 1748532450408548 != 1748532450408552 2025-05-29T15:27:30.951338Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:213: actor# [1:59:2106] HANDLE TEvClientConnected success connect from tablet# 72057594046447617 2025-05-29T15:27:30.951687Z node 1 :TX_PROXY DEBUG: client.cpp:89: Handle TEvAllocateResult ACCEPTED RangeBegin# 281474976715656 RangeEnd# 281474976720656 txAllocator# 72057594046447617 2025-05-29T15:27:30.951797Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:27:30.951821Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:27:30.962446Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-29T15:27:31.034776Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:315: actor# [1:59:2106] Handle TEvProposeTransaction 2025-05-29T15:27:31.034799Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:238: actor# [1:59:2106] TxId# 281474976715657 ProcessProposeTransaction 2025-05-29T15:27:31.034838Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:257: actor# [1:59:2106] Cookie# 0 userReqId# "" txid# 281474976715657 SEND to# [1:640:2548] 2025-05-29T15:27:31.046588Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1562: Actor# [1:640:2548] txid# 281474976715657 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/Root" OperationType: ESchemeOpCreateTable CreateTable { Name: "table-1" Columns { Name: "key" Type: "Uint32" FamilyName: "" NotNull: false } Columns { Name: "value" Type: "Uint32" FamilyName: "" NotNull: false } KeyColumnNames: "key" UniformPartitionsCount: 1 } } } ExecTimeoutPeriod: 18446744073709551615 2025-05-29T15:27:31.046622Z node 1 :TX_PROXY DEBUG: schemereq.cpp:569: Actor# [1:640:2548] txid# 281474976715657 Bootstrap, UserSID: CheckAdministrator: 0 CheckDatabaseAdministrator: 0 2025-05-29T15:27:31.046793Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1627: Actor# [1:640:2548] txid# 281474976715657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P6 2025-05-29T15:27:31.046804Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1617: Actor# [1:640:2548] txid# 281474976715657 TEvNavigateKeySet requested from SchemeCache 2025-05-29T15:27:31.046852Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1450: Actor# [1:640:2548] txid# 281474976715657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-05-29T15:27:31.046887Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1497: Actor# [1:640:2548] HANDLE EvNavigateKeySetResult, txid# 281474976715657 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 500 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2025-05-29T15:27:31.046899Z node 1 :TX_PROXY DEBUG: schemereq.cpp:103: Actor# [1:640:2548] txid# 281474976715657 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976715657 TabletId# 72057594046644480} 2025-05-29T15:27:31.046949Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1352: Actor# [1:640:2548] txid# 281474976715657 HANDLE EvClientConnected 2025-05-29T15:27:31.047222Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-05-29T15:27:31.047390Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1374: Actor# [1:640:2548] txid# 281474976715657 Status StatusAccepted HANDLE {TEvModifySchemeTransactionResult Status# StatusAccepted txid# 281474976715657} 2025-05-29T15:27:31.047398Z node 1 :TX_PROXY DEBUG: schemereq.cpp:549: Actor# [1:640:2548] txid# 281474976715657 SEND to# [1:592:2518] Source {TEvProposeTransactionStatus txid# 281474976715657 Status# 53} 2025-05-29T15:27:31.060846Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3097: StateInit, received event# 268828672, Sender [1:656:2563], Recipient [1:664:2569]: NKikimr::TEvTablet::TEvBoot 2025-05-29T15:27:31.061036Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3097: StateInit, received event# 268828673, Sender [1:656:2563], Recipient [1:664:2569]: NKikimr::TEvTablet::TEvRestored 2025-05-29T15:27:31.061100Z node 1 :TX_DATASHARD INFO: datashard.cpp:373: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:664:2569] 2025-05-29T15:27:31.061152Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:630: TxInitSchema.Execute 2025-05-29T15:27:31.067471Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3110: StateInactive, received event# 268828684, Sender [1:656:2563], Recipient [1:664:2569]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-05-29T15:27:31.067612Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:696: TxInitSchema.Complete 2025-05-29T15:27:31.067640Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:48: TDataShard::TTxInit::Execute 2025-05-29T15:27:31.067767Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1315: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-05-29T15:27:31.067774Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1371: LoadLockChangeRecords at tablet: 72075186224037888 2025-05-29T15:27:31.067778Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1420: LoadChangeRecordCommits at tablet: 72075186224037888 2025-05-29T15:27:31.067818Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:92: TDataShard::TTxInit::Complete 2025-05-29T15:27:31.067831Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:100: TDataShard::TTxInitRestored::Execute 2025-05-29T15:27:31.067840Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:111: DataShard 72075186224037888 persisting started state actor id [1:681:2569] in generation 1 2025-05-29T15:27:31.078103Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:120: TDataShard::TTxInitRestored::Complete 2025-05-29T15:27:31.083183Z node 1 :TX_DATASHARD INFO: datashard.cpp:417: Switched to work state WaitScheme tabletId 72075186224037888 2025-05-29T15:27:31.083273Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:457: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-05-29T15:27:31.083307Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1250: Change sender created: at tablet: 72075186224037888, actorId: [1:683:2579] 2025-05-29T15:27:31.083313Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1255: Trying to activate change sender: at tablet: 72075186224037888 2025-05-29T15:27:31.083319Z node 1 :TX_DATASHARD INFO: datashard.cpp:1272: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-05-29T15:27:31.083325Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-05-29T15:27:31.083397Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3129: StateWork, received event# 2146435072, Sender [1:664:2569], Recipient [1:664:2569]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-05-29T15:27:31.083407Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3154: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-05-29T15:27:31.083507Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 72075186224037888 2025-05-29T15:27:31.083534Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-05-29T15:27:31.083551Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2025-05-29T15:27:31.083558Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-05-29T15:27:31.083566Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:385: Check unit PlanQueue at 72075186224037888 2025-05-29T15:27:31.083571Z node 1 :TX_DATASHARD TRACE: plan_queue_unit.cpp:59: TPlanQueueUnit at 72075186224037888 has no attached operations 2025-05-29T15:27:31.083576Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:403: Unit PlanQueue has no ready operations at 72075186224037888 2025-05-29T15:27:31.083582Z node 1 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 72075186224037888 TxInFly 0 2025-05-29T15:27:31.083587Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2025-05-29T15:27:31.083711Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3129: StateWork, received event# 269877761, Sender [1:674:2574], Recipient [1:664:2569]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-05-29T15:27:31.083720Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3165: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-05-29T15:27:31.083727Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3710: Server connected at leader tablet# 72075186224037888, clientId# [1:662:2567], serverId# [1:674:2574], sessionId# [0:0:0] 2025-05-29T15:27:31.083738Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3129: StateWork, received event# 269549568, Sender [1:411:2405], Recipient [1:674:2574] 2025-05-29T15:27:31.083743Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3135: StateWork, processing event TEvDataShard::TEvProposeTransaction 2025-05-29T15:27:31.083764Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037888 2025-05-29T15:27:31.083815Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1827: Trying to execute [0:281474976715657] at 72075186224037888 on unit CheckSchemeTx 2025-05-29T15:27:31.083827Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:132: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-05-29T15:27:31.083847Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:220: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2025-05-29T15:27:31.083859Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1862: Execution status for [0:281474976715657] at 72075186224037888 is ExecutedNoMoreRestarts 2025-05-29T15:27:31.083864Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1910: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit CheckSchemeTx 2025-05-29T15:27:31.083870Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1916: Add [0:281474976715657] at 72075186224037888 to execution unit StoreSchemeTx 2025-05-29T15:27:31.083875Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1827: Trying to execute [0:281474976715657] at 7 ... 7:32.372433Z node 2 :TX_PROXY DEBUG: proxy_impl.cpp:238: actor# [2:59:2106] TxId# 281474976715658 ProcessProposeTransaction 2025-05-29T15:27:32.372440Z node 2 :TX_PROXY DEBUG: proxy_impl.cpp:257: actor# [2:59:2106] Cookie# 0 userReqId# "" txid# 281474976715658 SEND to# [2:748:2624] 2025-05-29T15:27:32.373009Z node 2 :TX_PROXY DEBUG: schemereq.cpp:1562: Actor# [2:748:2624] txid# 281474976715658 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/Root/.metadata/workload_manager/pools" OperationType: ESchemeOpCreateResourcePool ModifyACL { Name: "default" DiffACL: "\n!\010\000\022\035\010\001\020\201\004\032\024all-users@well-known \003\n\031\010\000\022\025\010\001\020\201\004\032\014root@builtin \003" NewOwner: "metadata@system" } Internal: true CreateResourcePool { Name: "default" Properties { Properties { key: "concurrent_query_limit" value: "-1" } Properties { key: "database_load_cpu_threshold" value: "-1" } Properties { key: "query_cancel_after_seconds" value: "0" } Properties { key: "query_cpu_limit_percent_per_node" value: "-1" } Properties { key: "query_memory_limit_percent_per_node" value: "-1" } Properties { key: "queue_size" value: "-1" } Properties { key: "resource_weight" value: "-1" } Properties { key: "total_cpu_limit_percent_per_node" value: "-1" } } } } } UserToken: "\n\017metadata@system\022\000" DatabaseName: "/Root" 2025-05-29T15:27:32.373022Z node 2 :TX_PROXY DEBUG: schemereq.cpp:569: Actor# [2:748:2624] txid# 281474976715658 Bootstrap, UserSID: metadata@system CheckAdministrator: 0 CheckDatabaseAdministrator: 0 2025-05-29T15:27:32.373025Z node 2 :TX_PROXY DEBUG: schemereq.cpp:578: Actor# [2:748:2624] txid# 281474976715658 Bootstrap, UserSID: metadata@system IsClusterAdministrator: 1 2025-05-29T15:27:32.373378Z node 2 :TX_PROXY DEBUG: schemereq.cpp:1627: Actor# [2:748:2624] txid# 281474976715658 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P6 2025-05-29T15:27:32.373387Z node 2 :TX_PROXY DEBUG: schemereq.cpp:1617: Actor# [2:748:2624] txid# 281474976715658 TEvNavigateKeySet requested from SchemeCache 2025-05-29T15:27:32.373435Z node 2 :TX_PROXY DEBUG: schemereq.cpp:1450: Actor# [2:748:2624] txid# 281474976715658 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-05-29T15:27:32.373456Z node 2 :TX_PROXY DEBUG: schemereq.cpp:1497: Actor# [2:748:2624] HANDLE EvNavigateKeySetResult, txid# 281474976715658 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 500 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2025-05-29T15:27:32.373464Z node 2 :TX_PROXY DEBUG: schemereq.cpp:103: Actor# [2:748:2624] txid# 281474976715658 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976715658 TabletId# 72057594046644480} 2025-05-29T15:27:32.373500Z node 2 :TX_PROXY DEBUG: schemereq.cpp:1352: Actor# [2:748:2624] txid# 281474976715658 HANDLE EvClientConnected 2025-05-29T15:27:32.373670Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2025-05-29T15:27:32.373857Z node 2 :TX_PROXY DEBUG: schemereq.cpp:1374: Actor# [2:748:2624] txid# 281474976715658 Status StatusAccepted HANDLE {TEvModifySchemeTransactionResult Status# StatusAccepted txid# 281474976715658} 2025-05-29T15:27:32.373864Z node 2 :TX_PROXY DEBUG: schemereq.cpp:549: Actor# [2:748:2624] txid# 281474976715658 SEND to# [2:747:2623] Source {TEvProposeTransactionStatus txid# 281474976715658 Status# 53} 2025-05-29T15:27:32.374392Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3129: StateWork, received event# 269746185, Sender [2:683:2579], Recipient [2:663:2568]: NKikimr::TEvTxProxySchemeCache::TEvWatchNotifyUpdated 2025-05-29T15:27:32.374403Z node 2 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:129: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-05-29T15:27:32.395230Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3129: StateWork, received event# 269877763, Sender [2:706:2594], Recipient [2:663:2568]: NKikimr::TEvTabletPipe::TEvClientDestroyed { TabletId: 72057594046644480 ClientId: [2:706:2594] ServerId: [2:712:2600] } 2025-05-29T15:27:32.395252Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3163: StateWork, processing event TEvTabletPipe::TEvClientDestroyed 2025-05-29T15:27:32.518759Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3129: StateWork, received event# 269746185, Sender [2:683:2579], Recipient [2:663:2568]: NKikimr::TEvTxProxySchemeCache::TEvWatchNotifyUpdated 2025-05-29T15:27:32.518783Z node 2 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:129: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-05-29T15:27:32.519188Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [2:747:2623], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-05-29T15:27:32.549732Z node 2 :TX_PROXY DEBUG: proxy_impl.cpp:315: actor# [2:59:2106] Handle TEvProposeTransaction 2025-05-29T15:27:32.549758Z node 2 :TX_PROXY DEBUG: proxy_impl.cpp:238: actor# [2:59:2106] TxId# 281474976715659 ProcessProposeTransaction 2025-05-29T15:27:32.549778Z node 2 :TX_PROXY DEBUG: proxy_impl.cpp:257: actor# [2:59:2106] Cookie# 0 userReqId# "" txid# 281474976715659 SEND to# [2:817:2662] 2025-05-29T15:27:32.550376Z node 2 :TX_PROXY DEBUG: schemereq.cpp:1562: Actor# [2:817:2662] txid# 281474976715659 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/Root/.metadata/workload_manager/pools" OperationType: ESchemeOpCreateResourcePool ModifyACL { Name: "default" DiffACL: "\n!\010\000\022\035\010\001\020\201\004\032\024all-users@well-known \003\n\031\010\000\022\025\010\001\020\201\004\032\014root@builtin \003" NewOwner: "metadata@system" } Internal: true CreateResourcePool { Name: "default" Properties { Properties { key: "concurrent_query_limit" value: "-1" } Properties { key: "database_load_cpu_threshold" value: "-1" } Properties { key: "query_cancel_after_seconds" value: "0" } Properties { key: "query_cpu_limit_percent_per_node" value: "-1" } Properties { key: "query_memory_limit_percent_per_node" value: "-1" } Properties { key: "queue_size" value: "-1" } Properties { key: "resource_weight" value: "-1" } Properties { key: "total_cpu_limit_percent_per_node" value: "-1" } } } } } UserToken: "\n\017metadata@system\022\000" DatabaseName: "/Root" 2025-05-29T15:27:32.550392Z node 2 :TX_PROXY DEBUG: schemereq.cpp:569: Actor# [2:817:2662] txid# 281474976715659 Bootstrap, UserSID: metadata@system CheckAdministrator: 0 CheckDatabaseAdministrator: 0 2025-05-29T15:27:32.550396Z node 2 :TX_PROXY DEBUG: schemereq.cpp:578: Actor# [2:817:2662] txid# 281474976715659 Bootstrap, UserSID: metadata@system IsClusterAdministrator: 1 2025-05-29T15:27:32.550589Z node 2 :TX_PROXY DEBUG: schemereq.cpp:1627: Actor# [2:817:2662] txid# 281474976715659 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P6 2025-05-29T15:27:32.550600Z node 2 :TX_PROXY DEBUG: schemereq.cpp:1617: Actor# [2:817:2662] txid# 281474976715659 TEvNavigateKeySet requested from SchemeCache 2025-05-29T15:27:32.550649Z node 2 :TX_PROXY DEBUG: schemereq.cpp:1450: Actor# [2:817:2662] txid# 281474976715659 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-05-29T15:27:32.550673Z node 2 :TX_PROXY DEBUG: schemereq.cpp:1497: Actor# [2:817:2662] HANDLE EvNavigateKeySetResult, txid# 281474976715659 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 500 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2025-05-29T15:27:32.550687Z node 2 :TX_PROXY DEBUG: schemereq.cpp:103: Actor# [2:817:2662] txid# 281474976715659 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976715659 TabletId# 72057594046644480} 2025-05-29T15:27:32.550973Z node 2 :TX_PROXY DEBUG: schemereq.cpp:1352: Actor# [2:817:2662] txid# 281474976715659 HANDLE EvClientConnected 2025-05-29T15:27:32.551093Z node 2 :TX_PROXY DEBUG: schemereq.cpp:1374: Actor# [2:817:2662] txid# 281474976715659 Status StatusAlreadyExists HANDLE {TEvModifySchemeTransactionResult Status# StatusAlreadyExists txid# 281474976715659 Reason# Check failed: path: '/Root/.metadata/workload_manager/pools/default', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92} 2025-05-29T15:27:32.551119Z node 2 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [2:817:2662] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-29T15:27:32.551125Z node 2 :TX_PROXY DEBUG: schemereq.cpp:549: Actor# [2:817:2662] txid# 281474976715659 SEND to# [2:747:2623] Source {TEvProposeTransactionStatus txid# 281474976715659 Status# 48} 2025-05-29T15:27:32.557810Z node 2 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [2:827:2671], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:27:32.558174Z node 2 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=2&id=MWVmYzA1OTItOTQ2NDcwZS1jMDE4ZGI0OS05YzM5ZWRlMA==, ActorId: [2:731:2613], ActorState: ExecuteState, TraceId: 01jweafq0md2cgz1epjxymqff3, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: assertion failed at ydb/core/tx/datashard/ut_common/datashard_ut_common.cpp:2091, void NKikimr::ExecSQL(Tests::TServer::TPtr, TActorId, const TString &, bool, Ydb::StatusIds::StatusCode): (response.GetYdbStatus() == code) failed: (INTERNAL_ERROR != SUCCESS) Response { QueryIssues { message: "Execution" issue_code: 1060 issues { message: "yql/essentials/ast/yql_expr.h:1874: index out of range" issue_code: 1 } } TxMeta { } } YdbStatus: INTERNAL_ERROR ConsumedRu: 1 , with diff: (INT|SUCC)E(RNAL_ERROR|SS) TBackTrace::Capture()+28 (0x13AC660C) NUnitTest::NPrivate::RaiseError(char const*, TBasicString> const&, bool)+137 (0x13C7A1A9) NKikimr::ExecSQL(TIntrusivePtr>, NActors::TActorId, TBasicString> const&, bool, Ydb::StatusIds_StatusCode)+1300 (0x2630CE94) NKikimr::NTestSuiteDataShardReadTableSnapshots::TTestCaseReadTableSplitAfter::Execute_(NUnitTest::TTestContext&)+1312 (0x1399D290) NKikimr::NTestSuiteDataShardReadTableSnapshots::TCurrentTest::Execute()::'lambda'()::operator()() const+71 (0x139BE577) NUnitTest::TTestBase::Run(std::__y1::function, TBasicString> const&, char const*, bool)+126 (0x13C7C05E) NKikimr::NTestSuiteDataShardReadTableSnapshots::TCurrentTest::Execute()+481 (0x139BDDB1) NUnitTest::TTestFactory::Execute()+803 (0x13C7C7D3) NUnitTest::RunMain(int, char**)+3021 (0x13C8E37D) ??+0 (0x7FA3FD8F6D90) __libc_start_main+128 (0x7FA3FD8F6E40) _start+41 (0x12A01029) ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kesus/tablet/ut/unittest >> TKesusTest::TestPassesUpdatedPropsToSession [GOOD] Test command err: 2025-05-29T15:27:28.150364Z node 1 :KESUS_TABLET INFO: tablet_impl.cpp:71: OnActivateExecutor: 72057594037927937 2025-05-29T15:27:28.150396Z node 1 :KESUS_TABLET DEBUG: tx_init_schema.cpp:14: [72057594037927937] TTxInitSchema::Execute 2025-05-29T15:27:28.154380Z node 1 :KESUS_TABLET DEBUG: tx_init_schema.cpp:21: [72057594037927937] TTxInitSchema::Complete 2025-05-29T15:27:28.154408Z node 1 :KESUS_TABLET DEBUG: tx_init.cpp:30: [72057594037927937] TTxInit::Execute 2025-05-29T15:27:28.165638Z node 1 :KESUS_TABLET DEBUG: tx_init.cpp:242: [72057594037927937] TTxInit::Complete 2025-05-29T15:27:28.166442Z node 1 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:36: [72057594037927937] TTxQuoterResourceAdd::Execute (sender=[1:131:2156], cookie=10079499739074041206, path="/Root", config={ MaxUnitsPerSecond: 300 PrefetchCoefficient: 1 }) 2025-05-29T15:27:28.166507Z node 1 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:76: [72057594037927937] Created new quoter resource 1 "Root" 2025-05-29T15:27:28.187466Z node 1 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:85: [72057594037927937] TTxQuoterResourceAdd::Complete (sender=[1:131:2156], cookie=10079499739074041206) 2025-05-29T15:27:28.187649Z node 1 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:36: [72057594037927937] TTxQuoterResourceAdd::Execute (sender=[1:141:2164], cookie=11761836485234323980, path="/Root/Res", config={ }) 2025-05-29T15:27:28.187705Z node 1 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:76: [72057594037927937] Created new quoter resource 2 "Root/Res" 2025-05-29T15:27:28.198410Z node 1 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:85: [72057594037927937] TTxQuoterResourceAdd::Complete (sender=[1:141:2164], cookie=11761836485234323980) 2025-05-29T15:27:28.198814Z node 1 :KESUS_TABLET TRACE: quoter_runtime.cpp:145: [72057594037927937] Send TEvSubscribeOnResourcesResult to [1:146:2169]. Cookie: 17902167504157784298. Data: { Results { ResourceId: 2 Error { Status: SUCCESS } EffectiveProps { ResourceId: 2 ResourcePath: "Root/Res" HierarchicalDRRResourceConfig { MaxUnitsPerSecond: 300 MaxBurstSizeCoefficient: 1 Weight: 1 PrefetchCoefficient: 1 } AccountingConfig { ReportPeriodMs: 1000 AccountPeriodMs: 1000 CollectPeriodSec: 2 ProvisionedCoefficient: 1 OvershootCoefficient: 1 Provisioned { BillingPeriodSec: 2 } OnDemand { BillingPeriodSec: 2 } Overshoot { BillingPeriodSec: 2 } } } } ProtocolVersion: 1 } 2025-05-29T15:27:28.198827Z node 1 :KESUS_TABLET DEBUG: quoter_runtime.cpp:150: [72057594037927937] Subscribe on quoter resources (sender=[1:146:2169], cookie=17902167504157784298) 2025-05-29T15:27:28.198899Z node 1 :KESUS_TABLET TRACE: quoter_runtime.cpp:224: [72057594037927937] Send TEvAccountResourcesAck to [1:146:2169]. Cookie: 17911038854582505908. Data: { ResourcesInfo { ResourceId: 2 AcceptedUs: 28000 } } 2025-05-29T15:27:28.198904Z node 1 :KESUS_TABLET DEBUG: quoter_runtime.cpp:229: [72057594037927937] Account quoter resources (sender=[1:146:2169], cookie=17911038854582505908) 2025-05-29T15:27:29.993421Z node 2 :KESUS_TABLET INFO: tablet_impl.cpp:71: OnActivateExecutor: 72057594037927937 2025-05-29T15:27:29.993452Z node 2 :KESUS_TABLET DEBUG: tx_init_schema.cpp:14: [72057594037927937] TTxInitSchema::Execute 2025-05-29T15:27:29.996495Z node 2 :KESUS_TABLET DEBUG: tx_init_schema.cpp:21: [72057594037927937] TTxInitSchema::Complete 2025-05-29T15:27:29.996525Z node 2 :KESUS_TABLET DEBUG: tx_init.cpp:30: [72057594037927937] TTxInit::Execute 2025-05-29T15:27:30.017623Z node 2 :KESUS_TABLET DEBUG: tx_init.cpp:242: [72057594037927937] TTxInit::Complete 2025-05-29T15:27:30.017740Z node 2 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:36: [72057594037927937] TTxQuoterResourceAdd::Execute (sender=[2:133:2158], cookie=8145505205791584383, path="/Root", config={ MaxUnitsPerSecond: 300 PrefetchCoefficient: 1 }) 2025-05-29T15:27:30.017818Z node 2 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:76: [72057594037927937] Created new quoter resource 1 "Root" 2025-05-29T15:27:30.028571Z node 2 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:85: [72057594037927937] TTxQuoterResourceAdd::Complete (sender=[2:133:2158], cookie=8145505205791584383) 2025-05-29T15:27:30.028789Z node 2 :KESUS_TABLET TRACE: quoter_runtime.cpp:145: [72057594037927937] Send TEvSubscribeOnResourcesResult to [2:143:2166]. Cookie: 14002347773060434633. Data: { Results { ResourceId: 1 Error { Status: SUCCESS } EffectiveProps { ResourceId: 1 ResourcePath: "Root" HierarchicalDRRResourceConfig { MaxUnitsPerSecond: 300 MaxBurstSizeCoefficient: 1 Weight: 1 PrefetchCoefficient: 1 } AccountingConfig { Enabled: true ReportPeriodMs: 1000 AccountPeriodMs: 1000 CollectPeriodSec: 2 ProvisionedUnitsPerSecond: 100 ProvisionedCoefficient: 1 OvershootCoefficient: 1 Provisioned { Enabled: true BillingPeriodSec: 2 } OnDemand { Enabled: true BillingPeriodSec: 2 } Overshoot { Enabled: true BillingPeriodSec: 2 } } } } ProtocolVersion: 1 } 2025-05-29T15:27:30.028798Z node 2 :KESUS_TABLET DEBUG: quoter_runtime.cpp:150: [72057594037927937] Subscribe on quoter resources (sender=[2:143:2166], cookie=14002347773060434633) 2025-05-29T15:27:30.028863Z node 2 :KESUS_TABLET TRACE: quoter_runtime.cpp:145: [72057594037927937] Send TEvSubscribeOnResourcesResult to [2:143:2166]. Cookie: 6525611922718553159. Data: { Results { ResourceId: 1 Error { Status: SUCCESS } EffectiveProps { ResourceId: 1 ResourcePath: "Root" HierarchicalDRRResourceConfig { MaxUnitsPerSecond: 300 MaxBurstSizeCoefficient: 1 Weight: 1 PrefetchCoefficient: 1 } AccountingConfig { Enabled: true ReportPeriodMs: 1000 AccountPeriodMs: 1000 CollectPeriodSec: 2 ProvisionedUnitsPerSecond: 100 ProvisionedCoefficient: 1 OvershootCoefficient: 1 Provisioned { Enabled: true BillingPeriodSec: 2 } OnDemand { Enabled: true BillingPeriodSec: 2 } Overshoot { Enabled: true BillingPeriodSec: 2 } } } } ProtocolVersion: 1 } 2025-05-29T15:27:30.028868Z node 2 :KESUS_TABLET DEBUG: quoter_runtime.cpp:150: [72057594037927937] Subscribe on quoter resources (sender=[2:143:2166], cookie=6525611922718553159) 2025-05-29T15:27:30.028925Z node 2 :KESUS_TABLET TRACE: quoter_runtime.cpp:224: [72057594037927937] Send TEvAccountResourcesAck to [2:143:2166]. Cookie: 13932717193510210795. Data: { ResourcesInfo { ResourceId: 1 AcceptedUs: 1017500 } } 2025-05-29T15:27:30.028930Z node 2 :KESUS_TABLET DEBUG: quoter_runtime.cpp:229: [72057594037927937] Account quoter resources (sender=[2:143:2166], cookie=13932717193510210795) 2025-05-29T15:27:30.028973Z node 2 :KESUS_TABLET TRACE: quoter_runtime.cpp:224: [72057594037927937] Send TEvAccountResourcesAck to [2:143:2166]. Cookie: 11228746768131868885. Data: { ResourcesInfo { ResourceId: 1 AcceptedUs: 1017500 } } 2025-05-29T15:27:30.028978Z node 2 :KESUS_TABLET DEBUG: quoter_runtime.cpp:229: [72057594037927937] Account quoter resources (sender=[2:143:2166], cookie=11228746768131868885) 2025-05-29T15:27:31.838806Z node 3 :KESUS_TABLET INFO: tablet_impl.cpp:71: OnActivateExecutor: 72057594037927937 2025-05-29T15:27:31.838834Z node 3 :KESUS_TABLET DEBUG: tx_init_schema.cpp:14: [72057594037927937] TTxInitSchema::Execute 2025-05-29T15:27:31.841770Z node 3 :KESUS_TABLET DEBUG: tx_init_schema.cpp:21: [72057594037927937] TTxInitSchema::Complete 2025-05-29T15:27:31.841913Z node 3 :KESUS_TABLET DEBUG: tx_init.cpp:30: [72057594037927937] TTxInit::Execute 2025-05-29T15:27:31.863298Z node 3 :KESUS_TABLET DEBUG: tx_init.cpp:242: [72057594037927937] TTxInit::Complete 2025-05-29T15:27:31.863416Z node 3 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:36: [72057594037927937] TTxQuoterResourceAdd::Execute (sender=[3:133:2158], cookie=4184468553835784871, path="/Root", config={ MaxUnitsPerSecond: 300 PrefetchCoefficient: 1 }) 2025-05-29T15:27:31.863489Z node 3 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:76: [72057594037927937] Created new quoter resource 1 "Root" 2025-05-29T15:27:31.874297Z node 3 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:85: [72057594037927937] TTxQuoterResourceAdd::Complete (sender=[3:133:2158], cookie=4184468553835784871) 2025-05-29T15:27:31.874462Z node 3 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:36: [72057594037927937] TTxQuoterResourceAdd::Execute (sender=[3:143:2166], cookie=13756036275998956050, path="/Root/Res1", config={ }) 2025-05-29T15:27:31.874523Z node 3 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:76: [72057594037927937] Created new quoter resource 2 "Root/Res1" 2025-05-29T15:27:31.885258Z node 3 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:85: [72057594037927937] TTxQuoterResourceAdd::Complete (sender=[3:143:2166], cookie=13756036275998956050) 2025-05-29T15:27:31.885440Z node 3 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:36: [72057594037927937] TTxQuoterResourceAdd::Execute (sender=[3:148:2171], cookie=11335162347395049467, path="/Root/Res2", config={ }) 2025-05-29T15:27:31.885496Z node 3 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:76: [72057594037927937] Created new quoter resource 3 "Root/Res2" 2025-05-29T15:27:31.896294Z node 3 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:85: [72057594037927937] TTxQuoterResourceAdd::Complete (sender=[3:148:2171], cookie=11335162347395049467) 2025-05-29T15:27:31.896496Z node 3 :KESUS_TABLET TRACE: quoter_runtime.cpp:145: [72057594037927937] Send TEvSubscribeOnResourcesResult to [3:153:2176]. Cookie: 3162017672504743219. Data: { Results { ResourceId: 2 Error { Status: SUCCESS } EffectiveProps { ResourceId: 2 ResourcePath: "Root/Res1" HierarchicalDRRResourceConfig { MaxUnitsPerSecond: 300 MaxBurstSizeCoefficient: 1 Weight: 1 PrefetchCoefficient: 1 } AccountingConfig { ReportPeriodMs: 1000 AccountPeriodMs: 1000 CollectPeriodSec: 2 ProvisionedCoefficient: 1 OvershootCoefficient: 1 Provisioned { BillingPeriodSec: 2 } OnDemand { BillingPeriodSec: 2 } Overshoot { BillingPeriodSec: 2 } } } } ProtocolVersion: 1 } 2025-05-29T15:27:31.896505Z node 3 :KESUS_TABLET DEBUG: quoter_runtime.cpp:150: [72057594037927937] Subscribe on quoter resources (sender=[3:153:2176], cookie=3162017672504743219) 2025-05-29T15:27:31.896585Z node 3 :KESUS_TABLET TRACE: quoter_runtime.cpp:145: [72057594037927937] Send TEvSubscribeOnResourcesResult to [3:153:2176]. Cookie: 6992305450216159513. Data: { Results { ResourceId: 3 Error { Status: SUCCESS } EffectiveProps { ResourceId: 3 ResourcePath: "Root/Res2" HierarchicalDRRResourceConfig { MaxUnitsPerSecond: 300 MaxBurstSizeCoefficient: 1 Weight: 1 PrefetchCoefficient: 1 } AccountingConfig { ReportPeriodMs: 1000 AccountPeriodMs: 1000 CollectPeriodSec: 2 ProvisionedCoefficient: 1 OvershootCoefficient: 1 Provisioned { BillingPeriodSec: 2 } OnDemand { BillingPeriodSec: 2 } Overshoot { BillingPeriodSec: 2 } } } } ProtocolVersion: 1 } 2025-05-29T15:27:31.896590Z node 3 :KESUS_TABLET DEBUG: quoter_runtime.cpp:150: [72057594037927937] Subscribe on quoter resources (sender=[3:153:2176], cookie=6992305450216159513) 2025-05-29T15:27:31.896640Z node 3 :KESUS_TABLET TRACE: quoter_runtime.cpp:224: [72057594037927937] Send TEvAccountResourcesAck to [3:153:2176]. Cookie: 17319791001730793948. Data: { ResourcesInfo { ResourceId: 2 AcceptedUs: 1020500 } ResourcesInfo { ResourceId: 3 AcceptedUs: 1020500 } } 2025-05-29T15:27:31.896644Z node 3 :KESUS_TABLET DEBUG: quoter_runtime.cpp:229: [72057594037927937] Account quoter resources (sender=[3:153:2176], cookie=17319791001730793948) 2025-05-29T15:27:33.690950Z node 4 :KESUS_TABLET INFO: tablet_impl.cpp:71: OnActivateExecutor: 72057594037927937 2025-05-29T15:27:33.690997Z node 4 :KESUS_TABLET DEBUG: tx_init_schema.cpp:14: [72057594037927937] TTxInitSchema::Execute 2025-05-29T15:27:33.695558Z node 4 :KESUS_TABLET DEBUG: tx_init_schema.cpp:21: [72057594037927937] TTxInitSchema::Complete 2025-05-29T15:27:33.695639Z node 4 :KESUS_TABLET DEBUG: tx_init.cpp:30: [72057594037927937] TTxInit::Execute 2025-05-29T15:27:33.717335Z node 4 :KESUS_TABLET DEBUG: tx_init.cpp:242: [72057594037927937] TTxInit::Complete 2025-05-29T15:27:33.717540Z node 4 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:36: [72057594037927937] TTxQuoterResourceAdd::Execute (sender=[4:133:2158], cookie=17731402448973908258, path="/Root", config={ MaxUnitsPerSecond: 100 PrefetchCoefficient: 300 }) 2025-05-29T15:27:33.717630Z node 4 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:76: [72057594037927937] Created new quoter resource 1 "Root" 2025-05-29T15:27:33.731153Z node 4 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:85: [72057594037927937] TTxQuoterResourceAdd::Complete (sender=[4:133:2158], cookie=17731402448973908258) 2025-05-29T15:27:33.731571Z node 4 :KESUS_TABLET TRACE: quoter_runtime.cpp:145: [72057594037927937] Send TEvSubscribeOnResourcesResult to [4:143:2166]. Cookie: 10976770633389281986. Data: { Results { ResourceId: 1 Error { Status: SUCCESS } EffectiveProps { ResourceId: 1 ResourcePath: "Root" HierarchicalDRRResourceConfig { MaxUnitsPerSecond: 100 MaxBurstSizeCoefficient: 1 Weight: 1 PrefetchCoefficient: 300 } AccountingConfig { Enabled: true ReportPeriodMs: 1000 AccountPeriodMs: 1000 CollectPeriodSec: 2 ProvisionedCoefficient: 60 OvershootCoefficient: 1.1 Provisioned { BillingPeriodSec: 60 } OnDemand { Enabled: true BillingPeriodSec: 2 Labels { key: "k1" value: "v1" } Labels { key: "k2" value: "v2" } } Overshoot { BillingPeriodSec: 60 } } } } ProtocolVersion: 1 } 2025-05-29T15:27:33.731587Z node 4 :KESUS_TABLET DEBUG: quoter_runtime.cpp:150: [72057594037927937] Subscribe on quoter resources (sender=[4:143:2166], cookie=10976770633389281986) 2025-05-29T15:27:33.731683Z node 4 :KESUS_TABLET TRACE: quoter_runtime.cpp:224: [72057594037927937] Send TEvAccountResourcesAck to [4:143:2166]. Cookie: 5031458009669474682. Data: { ResourcesInfo { ResourceId: 1 AcceptedUs: 27500 } } 2025-05-29T15:27:33.731692Z node 4 :KESUS_TABLET DEBUG: quoter_runtime.cpp:229: [72057594037927937] Account quoter resources (sender=[4:143:2166], cookie=5031458009669474682) 2025-05-29T15:27:35.532629Z node 5 :KESUS_TABLET INFO: tablet_impl.cpp:71: OnActivateExecutor: 72057594037927937 2025-05-29T15:27:35.532657Z node 5 :KESUS_TABLET DEBUG: tx_init_schema.cpp:14: [72057594037927937] TTxInitSchema::Execute 2025-05-29T15:27:35.535234Z node 5 :KESUS_TABLET DEBUG: tx_init_schema.cpp:21: [72057594037927937] TTxInitSchema::Complete 2025-05-29T15:27:35.535255Z node 5 :KESUS_TABLET DEBUG: tx_init.cpp:30: [72057594037927937] TTxInit::Execute 2025-05-29T15:27:35.556837Z node 5 :KESUS_TABLET DEBUG: tx_init.cpp:242: [72057594037927937] TTxInit::Complete 2025-05-29T15:27:35.556992Z node 5 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:36: [72057594037927937] TTxQuoterResourceAdd::Execute (sender=[5:131:2156], cookie=13148662284578751758, path="/Root", config={ MaxUnitsPerSecond: 100 }) 2025-05-29T15:27:35.557045Z node 5 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:76: [72057594037927937] Created new quoter resource 1 "Root" 2025-05-29T15:27:35.567745Z node 5 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:85: [72057594037927937] TTxQuoterResourceAdd::Complete (sender=[5:131:2156], cookie=13148662284578751758) 2025-05-29T15:27:35.567869Z node 5 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:36: [72057594037927937] TTxQuoterResourceAdd::Execute (sender=[5:140:2163], cookie=16395491856139823408, path="/Root/Res", config={ }) 2025-05-29T15:27:35.567913Z node 5 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:76: [72057594037927937] Created new quoter resource 2 "Root/Res" 2025-05-29T15:27:35.578568Z node 5 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:85: [72057594037927937] TTxQuoterResourceAdd::Complete (sender=[5:140:2163], cookie=16395491856139823408) 2025-05-29T15:27:35.578752Z node 5 :KESUS_TABLET TRACE: quoter_runtime.cpp:145: [72057594037927937] Send TEvSubscribeOnResourcesResult to [5:145:2168]. Cookie: 16604504388017861646. Data: { Results { ResourceId: 2 Error { Status: SUCCESS } EffectiveProps { ResourceId: 2 ResourcePath: "Root/Res" HierarchicalDRRResourceConfig { MaxUnitsPerSecond: 100 MaxBurstSizeCoefficient: 1 Weight: 1 } AccountingConfig { ReportPeriodMs: 5000 AccountPeriodMs: 1000 CollectPeriodSec: 30 ProvisionedCoefficient: 60 OvershootCoefficient: 1.1 Provisioned { BillingPeriodSec: 60 } OnDemand { BillingPeriodSec: 60 } Overshoot { BillingPeriodSec: 60 } } } } ProtocolVersion: 1 } 2025-05-29T15:27:35.578763Z node 5 :KESUS_TABLET DEBUG: quoter_runtime.cpp:150: [72057594037927937] Subscribe on quoter resources (sender=[5:145:2168], cookie=16604504388017861646) 2025-05-29T15:27:35.578824Z node 5 :KESUS_TABLET DEBUG: tx_quoter_resource_update.cpp:34: [72057594037927937] TTxQuoterResourceUpdate::Execute (sender=[5:149:2172], cookie=6720290162940349004, id=0, path="/Root", config={ MaxUnitsPerSecond: 150 }) 2025-05-29T15:27:35.578865Z node 5 :KESUS_TABLET DEBUG: tx_quoter_resource_update.cpp:61: [72057594037927937] Updated quoter resource 1 "Root" 2025-05-29T15:27:35.578884Z node 5 :KESUS_TABLET TRACE: quoter_runtime.cpp:93: [72057594037927937] Send TEvResourcesAllocated to [5:145:2168]. Cookie: 0. Data: { ResourcesInfo { ResourceId: 2 EffectiveProps { ResourceId: 2 ResourcePath: "Root/Res" HierarchicalDRRResourceConfig { MaxUnitsPerSecond: 150 MaxBurstSizeCoefficient: 1 Weight: 1 } AccountingConfig { ReportPeriodMs: 5000 AccountPeriodMs: 1000 CollectPeriodSec: 30 ProvisionedCoefficient: 60 OvershootCoefficient: 1.1 Provisioned { BillingPeriodSec: 60 } OnDemand { BillingPeriodSec: 60 } Overshoot { BillingPeriodSec: 60 } } } StateNotification { Status: SUCCESS } } } 2025-05-29T15:27:35.589409Z node 5 :KESUS_TABLET DEBUG: tx_quoter_resource_update.cpp:75: [72057594037927937] TTxQuoterResourceUpdate::Complete (sender=[5:149:2172], cookie=6720290162940349004) 2025-05-29T15:27:35.589534Z node 5 :KESUS_TABLET TRACE: quoter_runtime.cpp:193: [72057594037927937] Send TEvUpdateConsumptionStateAck to [5:145:2168]. Cookie: 2358801507426037989. Data: { } 2025-05-29T15:27:35.589540Z node 5 :KESUS_TABLET DEBUG: quoter_runtime.cpp:198: [72057594037927937] Update quoter resources consumption state (sender=[5:145:2168], cookie=2358801507426037989) ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/datashard/ut_read_table/unittest >> DataShardReadTableSnapshots::ReadTableUUID [FAIL] Test command err: 2025-05-29T15:27:30.628724Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:102:2148], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-05-29T15:27:30.628760Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-05-29T15:27:30.628774Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/001200/r3tmp/tmpV5jaHL/pdisk_1.dat 2025-05-29T15:27:30.740806Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-05-29T15:27:30.754935Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:27:30.759046Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [1:32:2079] 1748532450222440 != 1748532450222444 2025-05-29T15:27:30.800553Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:213: actor# [1:59:2106] HANDLE TEvClientConnected success connect from tablet# 72057594046447617 2025-05-29T15:27:30.800936Z node 1 :TX_PROXY DEBUG: client.cpp:89: Handle TEvAllocateResult ACCEPTED RangeBegin# 281474976715656 RangeEnd# 281474976720656 txAllocator# 72057594046447617 2025-05-29T15:27:30.801025Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:27:30.801040Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:27:30.811687Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-29T15:27:30.884503Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:315: actor# [1:59:2106] Handle TEvProposeTransaction 2025-05-29T15:27:30.884525Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:238: actor# [1:59:2106] TxId# 281474976715657 ProcessProposeTransaction 2025-05-29T15:27:30.884555Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:257: actor# [1:59:2106] Cookie# 0 userReqId# "" txid# 281474976715657 SEND to# [1:640:2548] 2025-05-29T15:27:30.902635Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1562: Actor# [1:640:2548] txid# 281474976715657 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/Root" OperationType: ESchemeOpCreateTable CreateTable { Name: "table-1" Columns { Name: "key" Type: "Uint32" FamilyName: "" NotNull: false } Columns { Name: "value" Type: "Uint32" FamilyName: "" NotNull: false } KeyColumnNames: "key" UniformPartitionsCount: 1 } } } ExecTimeoutPeriod: 18446744073709551615 2025-05-29T15:27:30.902669Z node 1 :TX_PROXY DEBUG: schemereq.cpp:569: Actor# [1:640:2548] txid# 281474976715657 Bootstrap, UserSID: CheckAdministrator: 0 CheckDatabaseAdministrator: 0 2025-05-29T15:27:30.902890Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1627: Actor# [1:640:2548] txid# 281474976715657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P6 2025-05-29T15:27:30.902909Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1617: Actor# [1:640:2548] txid# 281474976715657 TEvNavigateKeySet requested from SchemeCache 2025-05-29T15:27:30.902978Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1450: Actor# [1:640:2548] txid# 281474976715657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-05-29T15:27:30.903022Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1497: Actor# [1:640:2548] HANDLE EvNavigateKeySetResult, txid# 281474976715657 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 500 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2025-05-29T15:27:30.903040Z node 1 :TX_PROXY DEBUG: schemereq.cpp:103: Actor# [1:640:2548] txid# 281474976715657 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976715657 TabletId# 72057594046644480} 2025-05-29T15:27:30.903119Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1352: Actor# [1:640:2548] txid# 281474976715657 HANDLE EvClientConnected 2025-05-29T15:27:30.903524Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-05-29T15:27:30.903805Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1374: Actor# [1:640:2548] txid# 281474976715657 Status StatusAccepted HANDLE {TEvModifySchemeTransactionResult Status# StatusAccepted txid# 281474976715657} 2025-05-29T15:27:30.903819Z node 1 :TX_PROXY DEBUG: schemereq.cpp:549: Actor# [1:640:2548] txid# 281474976715657 SEND to# [1:592:2518] Source {TEvProposeTransactionStatus txid# 281474976715657 Status# 53} 2025-05-29T15:27:30.918099Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3097: StateInit, received event# 268828672, Sender [1:656:2563], Recipient [1:664:2569]: NKikimr::TEvTablet::TEvBoot 2025-05-29T15:27:30.918366Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3097: StateInit, received event# 268828673, Sender [1:656:2563], Recipient [1:664:2569]: NKikimr::TEvTablet::TEvRestored 2025-05-29T15:27:30.918453Z node 1 :TX_DATASHARD INFO: datashard.cpp:373: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:664:2569] 2025-05-29T15:27:30.918520Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:630: TxInitSchema.Execute 2025-05-29T15:27:30.927422Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3110: StateInactive, received event# 268828684, Sender [1:656:2563], Recipient [1:664:2569]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-05-29T15:27:30.927580Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:696: TxInitSchema.Complete 2025-05-29T15:27:30.927613Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:48: TDataShard::TTxInit::Execute 2025-05-29T15:27:30.927811Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1315: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-05-29T15:27:30.927820Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1371: LoadLockChangeRecords at tablet: 72075186224037888 2025-05-29T15:27:30.927826Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1420: LoadChangeRecordCommits at tablet: 72075186224037888 2025-05-29T15:27:30.927880Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:92: TDataShard::TTxInit::Complete 2025-05-29T15:27:30.927899Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:100: TDataShard::TTxInitRestored::Execute 2025-05-29T15:27:30.927911Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:111: DataShard 72075186224037888 persisting started state actor id [1:681:2569] in generation 1 2025-05-29T15:27:30.938204Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:120: TDataShard::TTxInitRestored::Complete 2025-05-29T15:27:30.942386Z node 1 :TX_DATASHARD INFO: datashard.cpp:417: Switched to work state WaitScheme tabletId 72075186224037888 2025-05-29T15:27:30.942464Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:457: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-05-29T15:27:30.942490Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1250: Change sender created: at tablet: 72075186224037888, actorId: [1:683:2579] 2025-05-29T15:27:30.942496Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1255: Trying to activate change sender: at tablet: 72075186224037888 2025-05-29T15:27:30.942502Z node 1 :TX_DATASHARD INFO: datashard.cpp:1272: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-05-29T15:27:30.942508Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-05-29T15:27:30.942565Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3129: StateWork, received event# 2146435072, Sender [1:664:2569], Recipient [1:664:2569]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-05-29T15:27:30.942573Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3154: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-05-29T15:27:30.942669Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 72075186224037888 2025-05-29T15:27:30.942695Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-05-29T15:27:30.942709Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2025-05-29T15:27:30.942715Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-05-29T15:27:30.942723Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:385: Check unit PlanQueue at 72075186224037888 2025-05-29T15:27:30.942729Z node 1 :TX_DATASHARD TRACE: plan_queue_unit.cpp:59: TPlanQueueUnit at 72075186224037888 has no attached operations 2025-05-29T15:27:30.942734Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:403: Unit PlanQueue has no ready operations at 72075186224037888 2025-05-29T15:27:30.942754Z node 1 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 72075186224037888 TxInFly 0 2025-05-29T15:27:30.942759Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2025-05-29T15:27:30.942885Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3129: StateWork, received event# 269877761, Sender [1:674:2574], Recipient [1:664:2569]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-05-29T15:27:30.942894Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3165: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-05-29T15:27:30.942902Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3710: Server connected at leader tablet# 72075186224037888, clientId# [1:662:2567], serverId# [1:674:2574], sessionId# [0:0:0] 2025-05-29T15:27:30.942916Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3129: StateWork, received event# 269549568, Sender [1:411:2405], Recipient [1:674:2574] 2025-05-29T15:27:30.942920Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3135: StateWork, processing event TEvDataShard::TEvProposeTransaction 2025-05-29T15:27:30.942942Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037888 2025-05-29T15:27:30.942992Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1827: Trying to execute [0:281474976715657] at 72075186224037888 on unit CheckSchemeTx 2025-05-29T15:27:30.943003Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:132: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-05-29T15:27:30.943021Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:220: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2025-05-29T15:27:30.943029Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1862: Execution status for [0:281474976715657] at 72075186224037888 is ExecutedNoMoreRestarts 2025-05-29T15:27:30.943034Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1910: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit CheckSchemeTx 2025-05-29T15:27:30.943041Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1916: Add [0:281474976715657] at 72075186224037888 to execution unit StoreSchemeTx 2025-05-29T15:27:30.943045Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1827: Trying to execute [0:281474976715657] at 7 ... 9T15:27:32.433585Z node 2 :TX_PROXY DEBUG: proxy_impl.cpp:238: actor# [2:59:2106] TxId# 281474976715658 ProcessProposeTransaction 2025-05-29T15:27:32.433596Z node 2 :TX_PROXY DEBUG: proxy_impl.cpp:257: actor# [2:59:2106] Cookie# 0 userReqId# "" txid# 281474976715658 SEND to# [2:748:2624] 2025-05-29T15:27:32.434498Z node 2 :TX_PROXY DEBUG: schemereq.cpp:1562: Actor# [2:748:2624] txid# 281474976715658 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/Root/.metadata/workload_manager/pools" OperationType: ESchemeOpCreateResourcePool ModifyACL { Name: "default" DiffACL: "\n!\010\000\022\035\010\001\020\201\004\032\024all-users@well-known \003\n\031\010\000\022\025\010\001\020\201\004\032\014root@builtin \003" NewOwner: "metadata@system" } Internal: true CreateResourcePool { Name: "default" Properties { Properties { key: "concurrent_query_limit" value: "-1" } Properties { key: "database_load_cpu_threshold" value: "-1" } Properties { key: "query_cancel_after_seconds" value: "0" } Properties { key: "query_cpu_limit_percent_per_node" value: "-1" } Properties { key: "query_memory_limit_percent_per_node" value: "-1" } Properties { key: "queue_size" value: "-1" } Properties { key: "resource_weight" value: "-1" } Properties { key: "total_cpu_limit_percent_per_node" value: "-1" } } } } } UserToken: "\n\017metadata@system\022\000" DatabaseName: "/Root" 2025-05-29T15:27:32.434521Z node 2 :TX_PROXY DEBUG: schemereq.cpp:569: Actor# [2:748:2624] txid# 281474976715658 Bootstrap, UserSID: metadata@system CheckAdministrator: 0 CheckDatabaseAdministrator: 0 2025-05-29T15:27:32.434527Z node 2 :TX_PROXY DEBUG: schemereq.cpp:578: Actor# [2:748:2624] txid# 281474976715658 Bootstrap, UserSID: metadata@system IsClusterAdministrator: 1 2025-05-29T15:27:32.434951Z node 2 :TX_PROXY DEBUG: schemereq.cpp:1627: Actor# [2:748:2624] txid# 281474976715658 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P6 2025-05-29T15:27:32.434962Z node 2 :TX_PROXY DEBUG: schemereq.cpp:1617: Actor# [2:748:2624] txid# 281474976715658 TEvNavigateKeySet requested from SchemeCache 2025-05-29T15:27:32.435010Z node 2 :TX_PROXY DEBUG: schemereq.cpp:1450: Actor# [2:748:2624] txid# 281474976715658 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-05-29T15:27:32.435036Z node 2 :TX_PROXY DEBUG: schemereq.cpp:1497: Actor# [2:748:2624] HANDLE EvNavigateKeySetResult, txid# 281474976715658 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 500 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2025-05-29T15:27:32.435050Z node 2 :TX_PROXY DEBUG: schemereq.cpp:103: Actor# [2:748:2624] txid# 281474976715658 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976715658 TabletId# 72057594046644480} 2025-05-29T15:27:32.435105Z node 2 :TX_PROXY DEBUG: schemereq.cpp:1352: Actor# [2:748:2624] txid# 281474976715658 HANDLE EvClientConnected 2025-05-29T15:27:32.435293Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2025-05-29T15:27:32.435522Z node 2 :TX_PROXY DEBUG: schemereq.cpp:1374: Actor# [2:748:2624] txid# 281474976715658 Status StatusAccepted HANDLE {TEvModifySchemeTransactionResult Status# StatusAccepted txid# 281474976715658} 2025-05-29T15:27:32.435531Z node 2 :TX_PROXY DEBUG: schemereq.cpp:549: Actor# [2:748:2624] txid# 281474976715658 SEND to# [2:747:2623] Source {TEvProposeTransactionStatus txid# 281474976715658 Status# 53} 2025-05-29T15:27:32.436136Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3129: StateWork, received event# 269746185, Sender [2:683:2579], Recipient [2:663:2568]: NKikimr::TEvTxProxySchemeCache::TEvWatchNotifyUpdated 2025-05-29T15:27:32.436149Z node 2 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:129: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-05-29T15:27:32.457090Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3129: StateWork, received event# 269877763, Sender [2:706:2594], Recipient [2:663:2568]: NKikimr::TEvTabletPipe::TEvClientDestroyed { TabletId: 72057594046644480 ClientId: [2:706:2594] ServerId: [2:712:2600] } 2025-05-29T15:27:32.457123Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3163: StateWork, processing event TEvTabletPipe::TEvClientDestroyed 2025-05-29T15:27:32.581245Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3129: StateWork, received event# 269746185, Sender [2:683:2579], Recipient [2:663:2568]: NKikimr::TEvTxProxySchemeCache::TEvWatchNotifyUpdated 2025-05-29T15:27:32.581274Z node 2 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:129: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-05-29T15:27:32.581828Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [2:747:2623], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-05-29T15:27:32.612536Z node 2 :TX_PROXY DEBUG: proxy_impl.cpp:315: actor# [2:59:2106] Handle TEvProposeTransaction 2025-05-29T15:27:32.612560Z node 2 :TX_PROXY DEBUG: proxy_impl.cpp:238: actor# [2:59:2106] TxId# 281474976715659 ProcessProposeTransaction 2025-05-29T15:27:32.612580Z node 2 :TX_PROXY DEBUG: proxy_impl.cpp:257: actor# [2:59:2106] Cookie# 0 userReqId# "" txid# 281474976715659 SEND to# [2:817:2662] 2025-05-29T15:27:32.613569Z node 2 :TX_PROXY DEBUG: schemereq.cpp:1562: Actor# [2:817:2662] txid# 281474976715659 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/Root/.metadata/workload_manager/pools" OperationType: ESchemeOpCreateResourcePool ModifyACL { Name: "default" DiffACL: "\n!\010\000\022\035\010\001\020\201\004\032\024all-users@well-known \003\n\031\010\000\022\025\010\001\020\201\004\032\014root@builtin \003" NewOwner: "metadata@system" } Internal: true CreateResourcePool { Name: "default" Properties { Properties { key: "concurrent_query_limit" value: "-1" } Properties { key: "database_load_cpu_threshold" value: "-1" } Properties { key: "query_cancel_after_seconds" value: "0" } Properties { key: "query_cpu_limit_percent_per_node" value: "-1" } Properties { key: "query_memory_limit_percent_per_node" value: "-1" } Properties { key: "queue_size" value: "-1" } Properties { key: "resource_weight" value: "-1" } Properties { key: "total_cpu_limit_percent_per_node" value: "-1" } } } } } UserToken: "\n\017metadata@system\022\000" DatabaseName: "/Root" 2025-05-29T15:27:32.613594Z node 2 :TX_PROXY DEBUG: schemereq.cpp:569: Actor# [2:817:2662] txid# 281474976715659 Bootstrap, UserSID: metadata@system CheckAdministrator: 0 CheckDatabaseAdministrator: 0 2025-05-29T15:27:32.613600Z node 2 :TX_PROXY DEBUG: schemereq.cpp:578: Actor# [2:817:2662] txid# 281474976715659 Bootstrap, UserSID: metadata@system IsClusterAdministrator: 1 2025-05-29T15:27:32.613841Z node 2 :TX_PROXY DEBUG: schemereq.cpp:1627: Actor# [2:817:2662] txid# 281474976715659 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P6 2025-05-29T15:27:32.613857Z node 2 :TX_PROXY DEBUG: schemereq.cpp:1617: Actor# [2:817:2662] txid# 281474976715659 TEvNavigateKeySet requested from SchemeCache 2025-05-29T15:27:32.613940Z node 2 :TX_PROXY DEBUG: schemereq.cpp:1450: Actor# [2:817:2662] txid# 281474976715659 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-05-29T15:27:32.613974Z node 2 :TX_PROXY DEBUG: schemereq.cpp:1497: Actor# [2:817:2662] HANDLE EvNavigateKeySetResult, txid# 281474976715659 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 500 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2025-05-29T15:27:32.613992Z node 2 :TX_PROXY DEBUG: schemereq.cpp:103: Actor# [2:817:2662] txid# 281474976715659 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976715659 TabletId# 72057594046644480} 2025-05-29T15:27:32.614280Z node 2 :TX_PROXY DEBUG: schemereq.cpp:1352: Actor# [2:817:2662] txid# 281474976715659 HANDLE EvClientConnected 2025-05-29T15:27:32.614400Z node 2 :TX_PROXY DEBUG: schemereq.cpp:1374: Actor# [2:817:2662] txid# 281474976715659 Status StatusAlreadyExists HANDLE {TEvModifySchemeTransactionResult Status# StatusAlreadyExists txid# 281474976715659 Reason# Check failed: path: '/Root/.metadata/workload_manager/pools/default', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92} 2025-05-29T15:27:32.614428Z node 2 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [2:817:2662] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-29T15:27:32.614433Z node 2 :TX_PROXY DEBUG: schemereq.cpp:549: Actor# [2:817:2662] txid# 281474976715659 SEND to# [2:747:2623] Source {TEvProposeTransactionStatus txid# 281474976715659 Status# 48} 2025-05-29T15:27:32.624528Z node 2 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [2:827:2671], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:27:32.625001Z node 2 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=2&id=ZWExNDQxNjQtOTE5YzhkNzgtZDI5NmEwNzItYzk3ODczYTg=, ActorId: [2:731:2613], ActorState: ExecuteState, TraceId: 01jweafq2h71apn81pyese3q17, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: assertion failed at ydb/core/tx/datashard/ut_common/datashard_ut_common.cpp:2091, void NKikimr::ExecSQL(Tests::TServer::TPtr, TActorId, const TString &, bool, Ydb::StatusIds::StatusCode): (response.GetYdbStatus() == code) failed: (INTERNAL_ERROR != SUCCESS) Response { QueryIssues { message: "Execution" issue_code: 1060 issues { message: "yql/essentials/ast/yql_expr.h:1874: index out of range" issue_code: 1 } } TxMeta { } } YdbStatus: INTERNAL_ERROR ConsumedRu: 1 , with diff: (INT|SUCC)E(RNAL_ERROR|SS) TBackTrace::Capture()+28 (0x13AC660C) NUnitTest::NPrivate::RaiseError(char const*, TBasicString> const&, bool)+137 (0x13C7A1A9) NKikimr::ExecSQL(TIntrusivePtr>, NActors::TActorId, TBasicString> const&, bool, Ydb::StatusIds_StatusCode)+1300 (0x2630CE94) NKikimr::NTestSuiteDataShardReadTableSnapshots::TTestCaseReadTableUUID::Execute_(NUnitTest::TTestContext&)+2984 (0x139B9B88) NKikimr::NTestSuiteDataShardReadTableSnapshots::TCurrentTest::Execute()::'lambda'()::operator()() const+71 (0x139BE577) NUnitTest::TTestBase::Run(std::__y1::function, TBasicString> const&, char const*, bool)+126 (0x13C7C05E) NKikimr::NTestSuiteDataShardReadTableSnapshots::TCurrentTest::Execute()+481 (0x139BDDB1) NUnitTest::TTestFactory::Execute()+803 (0x13C7C7D3) NUnitTest::RunMain(int, char**)+3021 (0x13C8E37D) ??+0 (0x7FCDF3B7CD90) __libc_start_main+128 (0x7FCDF3B7CE40) _start+41 (0x12A01029) |69.5%| [TA] $(B)/ydb/core/tx/datashard/ut_read_table/test-results/unittest/{meta.json ... results_accumulator.log} >> KeyValueReadStorage::ReadRangeOk1Key [GOOD] >> KeyValueReadStorage::ReadRangeOk [GOOD] >> KeyValueReadStorage::ReadRangeNoData [GOOD] >> KeyValueReadStorage::ReadOk [GOOD] >> KeyValueReadStorage::ReadNotWholeBlobOk [GOOD] >> KeyValueReadStorage::ReadOneItemError [GOOD] >> TKeyValueCollectorTest::TestKeyValueCollectorSingle >> TKeyValueCollectorTest::TestKeyValueCollectorSingle [GOOD] >> TKeyValueCollectorTest::TestKeyValueCollectorSingleWithOneError ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/keyvalue/ut/unittest >> KeyValueReadStorage::ReadRangeNoData [GOOD] Test command err: 2025-05-29T15:27:36.950819Z 1 00h00m00.000000s :KEYVALUE INFO: {KV20@keyvalue_storage_read_request.cpp:209} Received GetResult KeyValue# 1 GroupId# 3 Status# OK ResponseSz# 1 ErrorReason# ReadRequestCookie# 0 2025-05-29T15:27:36.951236Z 1 00h00m00.000000s :KEYVALUE INFO: {KV34@keyvalue_storage_read_request.cpp:492} Send respose KeyValue# 1 Status# RSTATUS_OK ReadRequestCookie# 0 2025-05-29T15:27:36.952712Z 1 00h00m00.000000s :KEYVALUE INFO: {KV20@keyvalue_storage_read_request.cpp:209} Received GetResult KeyValue# 1 GroupId# 3 Status# OK ResponseSz# 2 ErrorReason# ReadRequestCookie# 0 2025-05-29T15:27:36.952731Z 1 00h00m00.000000s :KEYVALUE INFO: {KV34@keyvalue_storage_read_request.cpp:492} Send respose KeyValue# 1 Status# RSTATUS_OK ReadRequestCookie# 0 2025-05-29T15:27:36.953881Z 1 00h00m00.000000s :KEYVALUE INFO: {KV320@keyvalue_storage_read_request.cpp:122} Inline read request KeyValue# 1 Status# OK 2025-05-29T15:27:36.953894Z 1 00h00m00.000000s :KEYVALUE DEBUG: {KV322@keyvalue_storage_read_request.cpp:134} Expected OK or UNKNOWN and given OK readCount# 0 2025-05-29T15:27:36.953916Z 1 00h00m00.000000s :KEYVALUE INFO: {KV34@keyvalue_storage_read_request.cpp:492} Send respose KeyValue# 1 Status# RSTATUS_OK ReadRequestCookie# 0 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/keyvalue/ut/unittest >> KeyValueReadStorage::ReadOneItemError [GOOD] Test command err: 2025-05-29T15:27:36.945186Z 1 00h00m00.000000s :KEYVALUE INFO: {KV20@keyvalue_storage_read_request.cpp:209} Received GetResult KeyValue# 1 GroupId# 3 Status# OK ResponseSz# 1 ErrorReason# ReadRequestCookie# 0 2025-05-29T15:27:36.945561Z 1 00h00m00.000000s :KEYVALUE INFO: {KV34@keyvalue_storage_read_request.cpp:492} Send respose KeyValue# 1 Status# RSTATUS_OK ReadRequestCookie# 0 2025-05-29T15:27:36.946816Z 1 00h00m00.000000s :KEYVALUE INFO: {KV20@keyvalue_storage_read_request.cpp:209} Received GetResult KeyValue# 1 GroupId# 3 Status# OK ResponseSz# 1 ErrorReason# ReadRequestCookie# 0 2025-05-29T15:27:36.946834Z 1 00h00m00.000000s :KEYVALUE INFO: {KV34@keyvalue_storage_read_request.cpp:492} Send respose KeyValue# 1 Status# RSTATUS_OK ReadRequestCookie# 0 2025-05-29T15:27:36.947776Z 1 00h00m00.000000s :KEYVALUE INFO: {KV20@keyvalue_storage_read_request.cpp:209} Received GetResult KeyValue# 1 GroupId# 3 Status# OK ResponseSz# 1 ErrorReason# ReadRequestCookie# 0 2025-05-29T15:27:36.947797Z 1 00h00m00.000000s :KEYVALUE ERROR: {KV317@keyvalue_storage_read_request.cpp:310} Unexpected EvGetResult. KeyValue# 1 Status# OK Id# [1:2:3:2:0:1:0] ResponseStatus# ERROR Deadline# 586524-01-19T08:01:49.551615Z Now# 1970-01-01T00:00:00.000000Z SentAt# 1970-01-01T00:00:00.000000Z GotAt# 2025-05-29T15:27:36.947750Z ErrorReason# >> TKeyValueCollectorTest::TestKeyValueCollectorSingleWithOneError [GOOD] >> TKeyValueCollectorTest::TestKeyValueCollectorMultiple >> TKeyValueTest::TestGetStatusWorksNewApi [GOOD] >> TKeyValueCollectorTest::TestKeyValueCollectorMultiple [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/keyvalue/ut/unittest >> TKeyValueTest::TestGetStatusWorksNewApi [GOOD] Test command err: Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:55:2057] recipient: [1:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:55:2057] recipient: [1:51:2095] Leader for TabletID 72057594037927937 is [1:57:2097] sender: [1:58:2057] recipient: [1:51:2095] Leader for TabletID 72057594037927937 is [1:57:2097] sender: [1:75:2057] recipient: [1:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:55:2057] recipient: [2:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:55:2057] recipient: [2:51:2095] Leader for TabletID 72057594037927937 is [2:57:2097] sender: [2:58:2057] recipient: [2:51:2095] Leader for TabletID 72057594037927937 is [2:57:2097] sender: [2:75:2057] recipient: [2:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:55:2057] recipient: [3:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:55:2057] recipient: [3:51:2095] Leader for TabletID 72057594037927937 is [3:57:2097] sender: [3:58:2057] recipient: [3:51:2095] Leader for TabletID 72057594037927937 is [3:57:2097] sender: [3:75:2057] recipient: [3:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [4:55:2057] recipient: [4:50:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [4:55:2057] recipient: [4:50:2095] Leader for TabletID 72057594037927937 is [4:57:2097] sender: [4:58:2057] recipient: [4:50:2095] Leader for TabletID 72057594037927937 is [4:57:2097] sender: [4:75:2057] recipient: [4:14:2061] !Reboot 72057594037927937 (actor [4:57:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [4:57:2097] sender: [4:77:2057] recipient: [4:36:2083] Leader for TabletID 72057594037927937 is [4:57:2097] sender: [4:80:2057] recipient: [4:14:2061] Leader for TabletID 72057594037927937 is [4:57:2097] sender: [4:81:2057] recipient: [4:79:2110] Leader for TabletID 72057594037927937 is [4:82:2111] sender: [4:83:2057] recipient: [4:79:2110] !Reboot 72057594037927937 (actor [4:57:2097]) rebooted! !Reboot 72057594037927937 (actor [4:57:2097]) tablet resolver refreshed! new actor is[4:82:2111] Leader for TabletID 72057594037927937 is [4:82:2111] sender: [4:168:2057] recipient: [4:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [5:55:2057] recipient: [5:52:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [5:55:2057] recipient: [5:52:2095] Leader for TabletID 72057594037927937 is [5:57:2097] sender: [5:58:2057] recipient: [5:52:2095] Leader for TabletID 72057594037927937 is [5:57:2097] sender: [5:75:2057] recipient: [5:14:2061] !Reboot 72057594037927937 (actor [5:57:2097]) on event NKikimr::TEvKeyValue::TEvGetStorageChannelStatus ! Leader for TabletID 72057594037927937 is [5:57:2097] sender: [5:77:2057] recipient: [5:36:2083] Leader for TabletID 72057594037927937 is [5:57:2097] sender: [5:80:2057] recipient: [5:14:2061] Leader for TabletID 72057594037927937 is [5:57:2097] sender: [5:81:2057] recipient: [5:79:2110] Leader for TabletID 72057594037927937 is [5:82:2111] sender: [5:83:2057] recipient: [5:79:2110] !Reboot 72057594037927937 (actor [5:57:2097]) rebooted! !Reboot 72057594037927937 (actor [5:57:2097]) tablet resolver refreshed! new actor is[5:82:2111] Leader for TabletID 72057594037927937 is [5:82:2111] sender: [5:168:2057] recipient: [5:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [6:55:2057] recipient: [6:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [6:55:2057] recipient: [6:51:2095] Leader for TabletID 72057594037927937 is [6:57:2097] sender: [6:58:2057] recipient: [6:51:2095] Leader for TabletID 72057594037927937 is [6:57:2097] sender: [6:75:2057] recipient: [6:14:2061] !Reboot 72057594037927937 (actor [6:57:2097]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [6:57:2097] sender: [6:78:2057] recipient: [6:36:2083] Leader for TabletID 72057594037927937 is [6:57:2097] sender: [6:81:2057] recipient: [6:14:2061] Leader for TabletID 72057594037927937 is [6:57:2097] sender: [6:82:2057] recipient: [6:80:2110] Leader for TabletID 72057594037927937 is [6:83:2111] sender: [6:84:2057] recipient: [6:80:2110] !Reboot 72057594037927937 (actor [6:57:2097]) rebooted! !Reboot 72057594037927937 (actor [6:57:2097]) tablet resolver refreshed! new actor is[6:83:2111] Leader for TabletID 72057594037927937 is [6:83:2111] sender: [6:169:2057] recipient: [6:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [7:55:2057] recipient: [7:52:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [7:55:2057] recipient: [7:52:2095] Leader for TabletID 72057594037927937 is [7:57:2097] sender: [7:58:2057] recipient: [7:52:2095] Leader for TabletID 72057594037927937 is [7:57:2097] sender: [7:75:2057] recipient: [7:14:2061] |69.5%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/tx_allocator/ut/ydb-core-tx-tx_allocator-ut |69.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/tx_allocator/ut/ydb-core-tx-tx_allocator-ut |69.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/keyvalue/ut/unittest >> TKeyValueCollectorTest::TestKeyValueCollectorMultiple [GOOD] >> TKeyValueTest::TestCopyRangeWorks >> TKeyValueTest::TestInlineEmptyWriteReadDeleteWithRestartsThenResponseOk |69.5%| [TA] {RESULT} $(B)/ydb/core/tx/datashard/ut_read_table/test-results/unittest/{meta.json ... results_accumulator.log} |69.6%| [LD] {RESULT} $(B)/ydb/core/tx/tx_allocator/ut/ydb-core-tx-tx_allocator-ut >> TKeyValueTest::TestWriteReadRangeLimitThenLimitWorks >> TKeyValueTest::TestConcatWorks |69.5%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/wrappers/ut/ydb-core-wrappers-ut |69.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/wrappers/ut/ydb-core-wrappers-ut |69.6%| [LD] {RESULT} $(B)/ydb/core/wrappers/ut/ydb-core-wrappers-ut |69.6%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/tiering/ut/ydb-core-tx-tiering-ut |69.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/tiering/ut/ydb-core-tx-tiering-ut |69.6%| [LD] {RESULT} $(B)/ydb/core/tx/tiering/ut/ydb-core-tx-tiering-ut >> TKesusTest::TestQuoterAccountResourcesForgetClient [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kesus/tablet/ut/unittest >> TKesusTest::TestQuoterAccountResourcesForgetClient [GOOD] Test command err: 2025-05-29T15:27:27.654556Z node 1 :KESUS_TABLET INFO: tablet_impl.cpp:71: OnActivateExecutor: 72057594037927937 2025-05-29T15:27:27.654578Z node 1 :KESUS_TABLET DEBUG: tx_init_schema.cpp:14: [72057594037927937] TTxInitSchema::Execute 2025-05-29T15:27:27.657359Z node 1 :KESUS_TABLET DEBUG: tx_init_schema.cpp:21: [72057594037927937] TTxInitSchema::Complete 2025-05-29T15:27:27.657380Z node 1 :KESUS_TABLET DEBUG: tx_init.cpp:30: [72057594037927937] TTxInit::Execute 2025-05-29T15:27:27.668422Z node 1 :KESUS_TABLET DEBUG: tx_init.cpp:242: [72057594037927937] TTxInit::Complete 2025-05-29T15:27:27.669273Z node 1 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:36: [72057594037927937] TTxQuoterResourceAdd::Execute (sender=[1:131:2156], cookie=10211466200748430793, path="/Res", config={ MaxUnitsPerSecond: -100 }) 2025-05-29T15:27:27.669306Z node 1 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:85: [72057594037927937] TTxQuoterResourceAdd::Complete (sender=[1:131:2156], cookie=10211466200748430793) 2025-05-29T15:27:27.669387Z node 1 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:36: [72057594037927937] TTxQuoterResourceAdd::Execute (sender=[1:137:2161], cookie=17137546370134779411, path="/ResWithoutMaxUnitsPerSecond", config={ }) 2025-05-29T15:27:27.669395Z node 1 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:85: [72057594037927937] TTxQuoterResourceAdd::Complete (sender=[1:137:2161], cookie=17137546370134779411) 2025-05-29T15:27:27.669426Z node 1 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:36: [72057594037927937] TTxQuoterResourceAdd::Execute (sender=[1:140:2164], cookie=13420637782046295896, path="/ResWithMaxUnitsPerSecond", config={ MaxUnitsPerSecond: 1 }) 2025-05-29T15:27:27.669451Z node 1 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:76: [72057594037927937] Created new quoter resource 1 "ResWithMaxUnitsPerSecond" 2025-05-29T15:27:27.690187Z node 1 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:85: [72057594037927937] TTxQuoterResourceAdd::Complete (sender=[1:140:2164], cookie=13420637782046295896) 2025-05-29T15:27:27.690306Z node 1 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:36: [72057594037927937] TTxQuoterResourceAdd::Execute (sender=[1:146:2169], cookie=12204901881247024120, path="/ResWithMaxUnitsPerSecond/ChildWithoutMaxUnitsPerSecond", config={ }) 2025-05-29T15:27:27.690360Z node 1 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:76: [72057594037927937] Created new quoter resource 2 "ResWithMaxUnitsPerSecond/ChildWithoutMaxUnitsPerSecond" 2025-05-29T15:27:27.701059Z node 1 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:85: [72057594037927937] TTxQuoterResourceAdd::Complete (sender=[1:146:2169], cookie=12204901881247024120) 2025-05-29T15:27:27.886832Z node 2 :KESUS_TABLET INFO: tablet_impl.cpp:71: OnActivateExecutor: 72057594037927937 2025-05-29T15:27:27.886863Z node 2 :KESUS_TABLET DEBUG: tx_init_schema.cpp:14: [72057594037927937] TTxInitSchema::Execute 2025-05-29T15:27:27.889748Z node 2 :KESUS_TABLET DEBUG: tx_init_schema.cpp:21: [72057594037927937] TTxInitSchema::Complete 2025-05-29T15:27:27.889775Z node 2 :KESUS_TABLET DEBUG: tx_init.cpp:30: [72057594037927937] TTxInit::Execute 2025-05-29T15:27:27.910805Z node 2 :KESUS_TABLET DEBUG: tx_init.cpp:242: [72057594037927937] TTxInit::Complete 2025-05-29T15:27:27.910907Z node 2 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:36: [72057594037927937] TTxQuoterResourceAdd::Execute (sender=[2:133:2158], cookie=7410845094030874865, path="/Root", config={ MaxUnitsPerSecond: 100 PrefetchCoefficient: 300 }) 2025-05-29T15:27:27.910985Z node 2 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:76: [72057594037927937] Created new quoter resource 1 "Root" 2025-05-29T15:27:27.921652Z node 2 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:85: [72057594037927937] TTxQuoterResourceAdd::Complete (sender=[2:133:2158], cookie=7410845094030874865) 2025-05-29T15:27:27.921772Z node 2 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:36: [72057594037927937] TTxQuoterResourceAdd::Execute (sender=[2:143:2166], cookie=6962813563933943361, path="/Root/Res", config={ }) 2025-05-29T15:27:27.921815Z node 2 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:76: [72057594037927937] Created new quoter resource 2 "Root/Res" 2025-05-29T15:27:27.932495Z node 2 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:85: [72057594037927937] TTxQuoterResourceAdd::Complete (sender=[2:143:2166], cookie=6962813563933943361) 2025-05-29T15:27:27.932976Z node 2 :KESUS_TABLET TRACE: quoter_runtime.cpp:145: [72057594037927937] Send TEvSubscribeOnResourcesResult to [2:148:2171]. Cookie: 10543554786013440651. Data: { Results { ResourceId: 2 Error { Status: SUCCESS } EffectiveProps { ResourceId: 2 ResourcePath: "Root/Res" HierarchicalDRRResourceConfig { MaxUnitsPerSecond: 100 MaxBurstSizeCoefficient: 1 Weight: 1 PrefetchCoefficient: 300 } AccountingConfig { ReportPeriodMs: 1000 AccountPeriodMs: 1000 CollectPeriodSec: 2 ProvisionedCoefficient: 60 OvershootCoefficient: 1.1 Provisioned { BillingPeriodSec: 60 } OnDemand { BillingPeriodSec: 2 Version: "version" Schema: "schema" CloudId: "cloud" FolderId: "folder" ResourceId: "resource" SourceId: "source" Tags { key: "key" value: "value" } } Overshoot { BillingPeriodSec: 60 } } } } ProtocolVersion: 1 } 2025-05-29T15:27:27.932991Z node 2 :KESUS_TABLET DEBUG: quoter_runtime.cpp:150: [72057594037927937] Subscribe on quoter resources (sender=[2:148:2171], cookie=10543554786013440651) 2025-05-29T15:27:27.933083Z node 2 :KESUS_TABLET TRACE: quoter_runtime.cpp:224: [72057594037927937] Send TEvAccountResourcesAck to [2:148:2171]. Cookie: 16400808949597321596. Data: { ResourcesInfo { ResourceId: 2 AcceptedUs: 29000 } } 2025-05-29T15:27:27.933091Z node 2 :KESUS_TABLET DEBUG: quoter_runtime.cpp:229: [72057594037927937] Account quoter resources (sender=[2:148:2171], cookie=16400808949597321596) 2025-05-29T15:27:29.731255Z node 3 :KESUS_TABLET INFO: tablet_impl.cpp:71: OnActivateExecutor: 72057594037927937 2025-05-29T15:27:29.731282Z node 3 :KESUS_TABLET DEBUG: tx_init_schema.cpp:14: [72057594037927937] TTxInitSchema::Execute 2025-05-29T15:27:29.735212Z node 3 :KESUS_TABLET DEBUG: tx_init_schema.cpp:21: [72057594037927937] TTxInitSchema::Complete 2025-05-29T15:27:29.735372Z node 3 :KESUS_TABLET DEBUG: tx_init.cpp:30: [72057594037927937] TTxInit::Execute 2025-05-29T15:27:29.756645Z node 3 :KESUS_TABLET DEBUG: tx_init.cpp:242: [72057594037927937] TTxInit::Complete 2025-05-29T15:27:29.756761Z node 3 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:36: [72057594037927937] TTxQuoterResourceAdd::Execute (sender=[3:133:2158], cookie=460468917605171796, path="/Root", config={ MaxUnitsPerSecond: 300 PrefetchCoefficient: 1 }) 2025-05-29T15:27:29.756827Z node 3 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:76: [72057594037927937] Created new quoter resource 1 "Root" 2025-05-29T15:27:29.767562Z node 3 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:85: [72057594037927937] TTxQuoterResourceAdd::Complete (sender=[3:133:2158], cookie=460468917605171796) 2025-05-29T15:27:29.767704Z node 3 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:36: [72057594037927937] TTxQuoterResourceAdd::Execute (sender=[3:143:2166], cookie=13106925890867181202, path="/Root/Res", config={ }) 2025-05-29T15:27:29.767775Z node 3 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:76: [72057594037927937] Created new quoter resource 2 "Root/Res" 2025-05-29T15:27:29.778493Z node 3 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:85: [72057594037927937] TTxQuoterResourceAdd::Complete (sender=[3:143:2166], cookie=13106925890867181202) 2025-05-29T15:27:29.778706Z node 3 :KESUS_TABLET TRACE: quoter_runtime.cpp:145: [72057594037927937] Send TEvSubscribeOnResourcesResult to [3:148:2171]. Cookie: 1879733354763551245. Data: { Results { ResourceId: 2 Error { Status: SUCCESS } EffectiveProps { ResourceId: 2 ResourcePath: "Root/Res" HierarchicalDRRResourceConfig { MaxUnitsPerSecond: 300 MaxBurstSizeCoefficient: 1 Weight: 1 PrefetchCoefficient: 1 } AccountingConfig { ReportPeriodMs: 1000 AccountPeriodMs: 1000 CollectPeriodSec: 2 ProvisionedCoefficient: 1 OvershootCoefficient: 1 Provisioned { BillingPeriodSec: 2 } OnDemand { BillingPeriodSec: 2 } Overshoot { BillingPeriodSec: 2 } } } } ProtocolVersion: 1 } 2025-05-29T15:27:29.778715Z node 3 :KESUS_TABLET DEBUG: quoter_runtime.cpp:150: [72057594037927937] Subscribe on quoter resources (sender=[3:148:2171], cookie=1879733354763551245) 2025-05-29T15:27:29.778818Z node 3 :KESUS_TABLET TRACE: quoter_runtime.cpp:224: [72057594037927937] Send TEvAccountResourcesAck to [3:148:2171]. Cookie: 1718193380741089509. Data: { ResourcesInfo { ResourceId: 2 AcceptedUs: 1019000 } } 2025-05-29T15:27:29.778826Z node 3 :KESUS_TABLET DEBUG: quoter_runtime.cpp:229: [72057594037927937] Account quoter resources (sender=[3:148:2171], cookie=1718193380741089509) 2025-05-29T15:27:31.576461Z node 4 :KESUS_TABLET INFO: tablet_impl.cpp:71: OnActivateExecutor: 72057594037927937 2025-05-29T15:27:31.576500Z node 4 :KESUS_TABLET DEBUG: tx_init_schema.cpp:14: [72057594037927937] TTxInitSchema::Execute 2025-05-29T15:27:31.580614Z node 4 :KESUS_TABLET DEBUG: tx_init_schema.cpp:21: [72057594037927937] TTxInitSchema::Complete 2025-05-29T15:27:31.580726Z node 4 :KESUS_TABLET DEBUG: tx_init.cpp:30: [72057594037927937] TTxInit::Execute 2025-05-29T15:27:31.602434Z node 4 :KESUS_TABLET DEBUG: tx_init.cpp:242: [72057594037927937] TTxInit::Complete 2025-05-29T15:27:31.602604Z node 4 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:36: [72057594037927937] TTxQuoterResourceAdd::Execute (sender=[4:133:2158], cookie=7645992250681291209, path="/Root", config={ MaxUnitsPerSecond: 300 PrefetchCoefficient: 1 }) 2025-05-29T15:27:31.602697Z node 4 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:76: [72057594037927937] Created new quoter resource 1 "Root" 2025-05-29T15:27:31.613660Z node 4 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:85: [72057594037927937] TTxQuoterResourceAdd::Complete (sender=[4:133:2158], cookie=7645992250681291209) 2025-05-29T15:27:31.613958Z node 4 :KESUS_TABLET TRACE: quoter_runtime.cpp:145: [72057594037927937] Send TEvSubscribeOnResourcesResult to [4:143:2166]. Cookie: 14280273205902952686. Data: { Results { ResourceId: 1 Error { Status: SUCCESS } EffectiveProps { ResourceId: 1 ResourcePath: "Root" HierarchicalDRRResourceConfig { MaxUnitsPerSecond: 300 MaxBurstSizeCoefficient: 1 Weight: 1 PrefetchCoefficient: 1 } AccountingConfig { Enabled: true ReportPeriodMs: 1000 AccountPeriodMs: 1000 CollectPeriodSec: 2 ProvisionedUnitsPerSecond: 100 ProvisionedCoefficient: 1 OvershootCoefficient: 1 Provisioned { Enabled: true BillingPeriodSec: 2 } OnDemand { Enabled: true BillingPeriodSec: 2 } Overshoot { Enabled: true BillingPeriodSec: 2 } } } } ProtocolVersion: 1 } 2025-05-29T15:27:31.613972Z node 4 :KESUS_TABLET DEBUG: quoter_runtime.cpp:150: [72057594037927937] Subscribe on quoter resources (sender=[4:143:2166], cookie=14280273205902952686) 2025-05-29T15:27:31.614063Z node 4 :KESUS_TABLET TRACE: quoter_runtime.cpp:224: [72057594037927937] Send TEvAccountResourcesAck to [4:143:2166]. Cookie: 807211636771003527. Data: { ResourcesInfo { ResourceId: 1 AcceptedUs: 1017500 } } 2025-05-29T15:27:31.614072Z node 4 :KESUS_TABLET DEBUG: quoter_runtime.cpp:229: [72057594037927937] Account quoter resources (sender=[4:143:2166], cookie=807211636771003527) 2025-05-29T15:27:31.614125Z node 4 :KESUS_TABLET TRACE: quoter_runtime.cpp:224: [72057594037927937] Send TEvAccountResourcesAck to [4:143:2166]. Cookie: 10034933505881710154. Data: { ResourcesInfo { ResourceId: 1 AcceptedUs: 1017500 } } 2025-05-29T15:27:31.614130Z node 4 :KESUS_TABLET DEBUG: quoter_runtime.cpp:229: [72057594037927937] Account quoter resources (sender=[4:143:2166], cookie=10034933505881710154) 2025-05-29T15:27:33.418302Z node 5 :KESUS_TABLET INFO: tablet_impl.cpp:71: OnActivateExecutor: 72057594037927937 2025-05-29T15:27:33.418337Z node 5 :KESUS_TABLET DEBUG: tx_init_schema.cpp:14: [72057594037927937] TTxInitSchema::Execute 2025-05-29T15:27:33.421853Z node 5 :KESUS_TABLET DEBUG: tx_init_schema.cpp:21: [72057594037927937] TTxInitSchema::Complete 2025-05-29T15:27:33.421886Z node 5 :KESUS_TABLET DEBUG: tx_init.cpp:30: [72057594037927937] TTxInit::Execute 2025-05-29T15:27:33.443302Z node 5 :KESUS_TABLET DEBUG: tx_init.cpp:242: [72057594037927937] TTxInit::Complete 2025-05-29T15:27:33.443437Z node 5 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:36: [72057594037927937] TTxQuoterResourceAdd::Execute (sender=[5:131:2156], cookie=1857736359457634376, path="/Root", config={ MaxUnitsPerSecond: 300 PrefetchCoefficient: 1 }) 2025-05-29T15:27:33.443514Z node 5 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:76: [72057594037927937] Created new quoter resource 1 "Root" 2025-05-29T15:27:33.454292Z node 5 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:85: [72057594037927937] TTxQuoterResourceAdd::Complete (sender=[5:131:2156], cookie=1857736359457634376) 2025-05-29T15:27:33.454510Z node 5 :KESUS_TABLET TRACE: quoter_runtime.cpp:145: [72057594037927937] Send TEvSubscribeOnResourcesResult to [5:141:2164]. Cookie: 11465456958039577651. Data: { Results { ResourceId: 1 Error { Status: SUCCESS } EffectiveProps { ResourceId: 1 ResourcePath: "Root" HierarchicalDRRResourceConfig { MaxUnitsPerSecond: 300 MaxBurstSizeCoefficient: 1 Weight: 1 PrefetchCoefficient: 1 } AccountingConfig { Enabled: true ReportPeriodMs: 1000 AccountPeriodMs: 1000 CollectPeriodSec: 2 ProvisionedUnitsPerSecond: 100 ProvisionedCoefficient: 1 OvershootCoefficient: 1 Provisioned { Enabled: true BillingPeriodSec: 2 } OnDemand { Enabled: true BillingPeriodSec: 2 } Overshoot { Enabled: true BillingPeriodSec: 2 } } } } ProtocolVersion: 1 } 2025-05-29T15:27:33.454521Z node 5 :KESUS_TABLET DEBUG: quoter_runtime.cpp:150: [72057594037927937] Subscribe on quoter resources (sender=[5:141:2164], cookie=11465456958039577651) 2025-05-29T15:27:33.454591Z node 5 :KESUS_TABLET TRACE: quoter_runtime.cpp:224: [72057594037927937] Send TEvAccountResourcesAck to [5:141:2164]. Cookie: 12975545525377656642. Data: { ResourcesInfo { ResourceId: 1 AcceptedUs: 3000000 } } 2025-05-29T15:27:33.454598Z node 5 :KESUS_TABLET DEBUG: quoter_runtime.cpp:229: [72057594037927937] Account quoter resources (sender=[5:141:2164], cookie=12975545525377656642) 2025-05-29T15:27:35.876969Z node 5 :KESUS_TABLET TRACE: quoter_runtime.cpp:145: [72057594037927937] Send TEvSubscribeOnResourcesResult to [5:180:2188]. Cookie: 15925984625588974215. Data: { Results { ResourceId: 1 Error { Status: SUCCESS } EffectiveProps { ResourceId: 1 ResourcePath: "Root" HierarchicalDRRResourceConfig { MaxUnitsPerSecond: 300 MaxBurstSizeCoefficient: 1 Weight: 1 PrefetchCoefficient: 1 } AccountingConfig { Enabled: true ReportPeriodMs: 1000 AccountPeriodMs: 1000 CollectPeriodSec: 2 ProvisionedUnitsPerSecond: 100 ProvisionedCoefficient: 1 OvershootCoefficient: 1 Provisioned { Enabled: true BillingPeriodSec: 2 } OnDemand { Enabled: true BillingPeriodSec: 2 } Overshoot { Enabled: true BillingPeriodSec: 2 } } } } ProtocolVersion: 1 } 2025-05-29T15:27:35.876993Z node 5 :KESUS_TABLET DEBUG: quoter_runtime.cpp:150: [72057594037927937] Subscribe on quoter resources (sender=[5:180:2188], cookie=15925984625588974215) 2025-05-29T15:27:35.877073Z node 5 :KESUS_TABLET TRACE: quoter_runtime.cpp:224: [72057594037927937] Send TEvAccountResourcesAck to [5:180:2188]. Cookie: 10747746567238836314. Data: { ResourcesInfo { ResourceId: 1 AcceptedUs: 9000000 } } 2025-05-29T15:27:35.877080Z node 5 :KESUS_TABLET DEBUG: quoter_runtime.cpp:229: [72057594037927937] Account quoter resources (sender=[5:180:2188], cookie=10747746567238836314) 2025-05-29T15:27:37.929590Z node 5 :KESUS_TABLET TRACE: quoter_runtime.cpp:145: [72057594037927937] Send TEvSubscribeOnResourcesResult to [5:213:2214]. Cookie: 15280722447336903677. Data: { Results { ResourceId: 1 Error { Status: SUCCESS } EffectiveProps { ResourceId: 1 ResourcePath: "Root" HierarchicalDRRResourceConfig { MaxUnitsPerSecond: 300 MaxBurstSizeCoefficient: 1 Weight: 1 PrefetchCoefficient: 1 } AccountingConfig { Enabled: true ReportPeriodMs: 1000 AccountPeriodMs: 1000 CollectPeriodSec: 2 ProvisionedUnitsPerSecond: 100 ProvisionedCoefficient: 1 OvershootCoefficient: 1 Provisioned { Enabled: true BillingPeriodSec: 2 } OnDemand { Enabled: true BillingPeriodSec: 2 } Overshoot { Enabled: true BillingPeriodSec: 2 } } } } ProtocolVersion: 1 } 2025-05-29T15:27:37.929615Z node 5 :KESUS_TABLET DEBUG: quoter_runtime.cpp:150: [72057594037927937] Subscribe on quoter resources (sender=[5:213:2214], cookie=15280722447336903677) 2025-05-29T15:27:37.929711Z node 5 :KESUS_TABLET TRACE: quoter_runtime.cpp:224: [72057594037927937] Send TEvAccountResourcesAck to [5:213:2214]. Cookie: 15907987479887825133. Data: { ResourcesInfo { ResourceId: 1 AcceptedUs: 15000000 } } 2025-05-29T15:27:37.929719Z node 5 :KESUS_TABLET DEBUG: quoter_runtime.cpp:229: [72057594037927937] Account quoter resources (sender=[5:213:2214], cookie=15907987479887825133) |69.6%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/schemeshard/ut_restore/ydb-core-tx-schemeshard-ut_restore |69.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_restore/ydb-core-tx-schemeshard-ut_restore |69.6%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_restore/ydb-core-tx-schemeshard-ut_restore >> TTablesWithReboots::CreateDroppedTableWithReboots [GOOD] >> TTablesWithReboots::DropTableWithReboots [GOOD] |69.6%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/apps/ydbd/ydbd |69.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/apps/ydbd/ydbd |69.6%| [LD] {RESULT} $(B)/ydb/apps/ydbd/ydbd >> TKeyValueTest::TestRewriteThenLastValue ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_base_reboots/unittest >> TTablesWithReboots::CreateDroppedTableWithReboots [GOOD] Test command err: ==== RunWithTabletReboots =========== RUN: Trace =========== Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2140] Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:116:2058] recipient: [1:110:2141] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:116:2058] recipient: [1:110:2141] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:117:2058] recipient: [1:111:2142] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:117:2058] recipient: [1:111:2142] Leader for TabletID 72057594046678944 is [1:126:2151] sender: [1:127:2058] recipient: [1:109:2140] Leader for TabletID 72057594046447617 is [1:130:2154] sender: [1:132:2058] recipient: [1:110:2141] Leader for TabletID 72057594046316545 is [1:133:2155] sender: [1:135:2058] recipient: [1:111:2142] 2025-05-29T15:26:53.133952Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7488: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-05-29T15:26:53.133973Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7516: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:26:53.133978Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7402: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2025-05-29T15:26:53.133983Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7418: OperationsProcessing config: using default configuration 2025-05-29T15:26:53.133987Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-05-29T15:26:53.133991Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-05-29T15:26:53.133999Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7548: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:26:53.134011Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:39: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-05-29T15:26:53.134101Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7619: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-05-29T15:26:53.134164Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-05-29T15:26:53.148210Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7651: Got new config: QueryServiceConfig { AllExternalDataSourcesAreAvailable: true } 2025-05-29T15:26:53.148230Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:26:53.148326Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7619: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# Leader for TabletID 72057594046447617 is [1:130:2154] sender: [1:175:2058] recipient: [1:15:2062] 2025-05-29T15:26:53.151163Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-05-29T15:26:53.151192Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-05-29T15:26:53.151230Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-05-29T15:26:53.160591Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-05-29T15:26:53.160674Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:445: Clear TempDirsState with owners number: 0 2025-05-29T15:26:53.160791Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1348: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-05-29T15:26:53.160978Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:32: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-05-29T15:26:53.162220Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:157: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:26:53.162259Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:84: [RootDataErasureManager] Stop 2025-05-29T15:26:53.162495Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:26:53.162507Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:26:53.162538Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-05-29T15:26:53.162546Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-29T15:26:53.162552Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:202: TTxServerlessStorageBilling.Complete 2025-05-29T15:26:53.162569Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6671: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:214:2058] recipient: [1:212:2212] IGNORE Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:214:2058] recipient: [1:212:2212] Leader for TabletID 72057594037968897 is [1:218:2216] sender: [1:219:2058] recipient: [1:212:2212] 2025-05-29T15:26:53.164493Z node 1 :HIVE INFO: tablet_helpers.cpp:1130: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:126:2151] sender: [1:239:2058] recipient: [1:15:2062] 2025-05-29T15:26:53.185249Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:372: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-05-29T15:26:53.185306Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:26:53.185359Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-05-29T15:26:53.185408Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:130: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-05-29T15:26:53.185419Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:26:53.186101Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:451: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-05-29T15:26:53.186128Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-05-29T15:26:53.186172Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:26:53.186181Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:313: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-05-29T15:26:53.186187Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:367: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-05-29T15:26:53.186192Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 2 -> 3 2025-05-29T15:26:53.186725Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:26:53.186778Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-05-29T15:26:53.186784Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 3 -> 128 2025-05-29T15:26:53.187386Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:26:53.187399Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:26:53.187405Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:26:53.187412Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1650: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-05-29T15:26:53.188073Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1719: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-05-29T15:26:53.188543Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:652: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-05-29T15:26:53.188583Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1751: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 Leader for TabletID 72057594046316545 is [1:133:2155] sender: [1:254:2058] recipient: [1:15:2062] FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-05-29T15:26:53.188779Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:676: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-05-29T15:26:53.188810Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:680: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 133 RawX2: 4294969451 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-05-29T15:26:53.188818Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:26:53.188876Z node 1 :FLAT_TX_SCHEMESHARD INFO: sch ... 05 2025-05-29T15:27:40.246552Z node 140 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 1005 2025-05-29T15:27:40.246556Z node 140 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72057594046678944, txId: 1005 2025-05-29T15:27:40.246562Z node 140 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 1005, pathId: [OwnerId: 72057594046678944, LocalPathId: 4], version: 18446744073709551615 2025-05-29T15:27:40.246569Z node 140 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 6 2025-05-29T15:27:40.246583Z node 140 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1606: TOperation IsReadyToNotify, TxId: 1005, ready parts: 0/1, is published: true 2025-05-29T15:27:40.247246Z node 140 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1005:0, at schemeshard: 72057594046678944 2025-05-29T15:27:40.247258Z node 140 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_drop_table.cpp:414: TDropTable TProposedDeletePart operationId: 1005:0 ProgressState, at schemeshard: 72057594046678944 2025-05-29T15:27:40.247327Z node 140 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove table for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 5 2025-05-29T15:27:40.247354Z node 140 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:906: Part operation is done id#1005:0 progress is 1/1 2025-05-29T15:27:40.247359Z node 140 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 1005 ready parts: 1/1 2025-05-29T15:27:40.247364Z node 140 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:906: Part operation is done id#1005:0 progress is 1/1 2025-05-29T15:27:40.247367Z node 140 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 1005 ready parts: 1/1 2025-05-29T15:27:40.247371Z node 140 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1606: TOperation IsReadyToNotify, TxId: 1005, ready parts: 1/1, is published: true 2025-05-29T15:27:40.247376Z node 140 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 1005 ready parts: 1/1 2025-05-29T15:27:40.247381Z node 140 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:973: Operation and all the parts is done, operation id: 1005:0 2025-05-29T15:27:40.247385Z node 140 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5174: RemoveTx for txid 1005:0 2025-05-29T15:27:40.247411Z node 140 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 4 2025-05-29T15:27:40.247783Z node 140 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1005 2025-05-29T15:27:40.247820Z node 140 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1005 2025-05-29T15:27:40.249163Z node 140 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5549: Handle TEvStateChanged, at schemeshard: 72057594046678944, message: Source { RawX1: 604 RawX2: 601295423993 } TabletId: 72075186233409548 State: 4 2025-05-29T15:27:40.249179Z node 140 :FLAT_TX_SCHEMESHARD INFO: schemeshard__state_changed_reply.cpp:78: TTxShardStateChanged DoExecute, datashard informs about state changing, datashardId: 72075186233409548, state: Offline, at schemeshard: 72057594046678944 2025-05-29T15:27:40.249208Z node 140 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5549: Handle TEvStateChanged, at schemeshard: 72057594046678944, message: Source { RawX1: 610 RawX2: 601295423996 } TabletId: 72075186233409549 State: 4 2025-05-29T15:27:40.249212Z node 140 :FLAT_TX_SCHEMESHARD INFO: schemeshard__state_changed_reply.cpp:78: TTxShardStateChanged DoExecute, datashard informs about state changing, datashardId: 72075186233409549, state: Offline, at schemeshard: 72057594046678944 2025-05-29T15:27:40.249227Z node 140 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5549: Handle TEvStateChanged, at schemeshard: 72057594046678944, message: Source { RawX1: 611 RawX2: 601295423997 } TabletId: 72075186233409550 State: 4 2025-05-29T15:27:40.249235Z node 140 :FLAT_TX_SCHEMESHARD INFO: schemeshard__state_changed_reply.cpp:78: TTxShardStateChanged DoExecute, datashard informs about state changing, datashardId: 72075186233409550, state: Offline, at schemeshard: 72057594046678944 2025-05-29T15:27:40.249486Z node 140 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:46: Free shard 72057594046678944:3 hive 72057594037968897 at ss 72057594046678944 2025-05-29T15:27:40.249692Z node 140 :HIVE INFO: tablet_helpers.cpp:1356: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 3 TxId_Deprecated: 3 TabletID: 72075186233409548 Forgetting tablet 72075186233409548 2025-05-29T15:27:40.249747Z node 140 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5938: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 3 ShardOwnerId: 72057594046678944 ShardLocalIdx: 3, at schemeshard: 72057594046678944 2025-05-29T15:27:40.249791Z node 140 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 3 2025-05-29T15:27:40.250352Z node 140 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:46: Free shard 72057594046678944:4 hive 72057594037968897 at ss 72057594046678944 2025-05-29T15:27:40.250379Z node 140 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:46: Free shard 72057594046678944:5 hive 72057594037968897 at ss 72057594046678944 2025-05-29T15:27:40.250393Z node 140 :HIVE INFO: tablet_helpers.cpp:1356: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 4 TxId_Deprecated: 4 TabletID: 72075186233409549 2025-05-29T15:27:40.250426Z node 140 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5938: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 4 ShardOwnerId: 72057594046678944 ShardLocalIdx: 4, at schemeshard: 72057594046678944 2025-05-29T15:27:40.250456Z node 140 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 2 Forgetting tablet 72075186233409549 2025-05-29T15:27:40.250826Z node 140 :HIVE INFO: tablet_helpers.cpp:1356: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 5 TxId_Deprecated: 5 TabletID: 72075186233409550 2025-05-29T15:27:40.250887Z node 140 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5938: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 5 ShardOwnerId: 72057594046678944 ShardLocalIdx: 5, at schemeshard: 72057594046678944 2025-05-29T15:27:40.250914Z node 140 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 1 Forgetting tablet 72075186233409550 2025-05-29T15:27:40.251298Z node 140 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:123: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-05-29T15:27:40.251305Z node 140 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:137: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 4], at schemeshard: 72057594046678944 2025-05-29T15:27:40.251316Z node 140 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-05-29T15:27:40.251710Z node 140 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:174: Deleted shardIdx 72057594046678944:3 2025-05-29T15:27:40.251718Z node 140 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:180: Close pipe to deleted shardIdx 72057594046678944:3 tabletId 72075186233409548 2025-05-29T15:27:40.251850Z node 140 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:174: Deleted shardIdx 72057594046678944:4 2025-05-29T15:27:40.251857Z node 140 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:180: Close pipe to deleted shardIdx 72057594046678944:4 tabletId 72075186233409549 2025-05-29T15:27:40.251915Z node 140 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:174: Deleted shardIdx 72057594046678944:5 2025-05-29T15:27:40.251919Z node 140 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:180: Close pipe to deleted shardIdx 72057594046678944:5 tabletId 72075186233409550 2025-05-29T15:27:40.251941Z node 140 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:160: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestModificationResult got TxId: 1005, wait until txId: 1005 TestWaitNotification wait txId: 1005 2025-05-29T15:27:40.251980Z node 140 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:210: tests -- TTxNotificationSubscriber for txId 1005: send EvNotifyTxCompletion 2025-05-29T15:27:40.251984Z node 140 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:256: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 1005 2025-05-29T15:27:40.252021Z node 140 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 1005, at schemeshard: 72057594046678944 2025-05-29T15:27:40.252031Z node 140 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:227: tests -- TTxNotificationSubscriber for txId 1005: got EvNotifyTxCompletionResult 2025-05-29T15:27:40.252035Z node 140 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:236: tests -- TTxNotificationSubscriber for txId 1005: satisfy waiter [140:864:2787] TestWaitNotification: OK eventTxId 1005 wait until 72075186233409546 is deleted wait until 72075186233409547 is deleted wait until 72075186233409548 is deleted wait until 72075186233409549 is deleted wait until 72075186233409550 is deleted wait until 72075186233409551 is deleted wait until 72075186233409552 is deleted 2025-05-29T15:27:40.252082Z node 140 :HIVE INFO: tablet_helpers.cpp:1476: [72057594037968897] TEvSubscribeToTabletDeletion, 72075186233409546 2025-05-29T15:27:40.252090Z node 140 :HIVE INFO: tablet_helpers.cpp:1476: [72057594037968897] TEvSubscribeToTabletDeletion, 72075186233409547 2025-05-29T15:27:40.252094Z node 140 :HIVE INFO: tablet_helpers.cpp:1476: [72057594037968897] TEvSubscribeToTabletDeletion, 72075186233409548 2025-05-29T15:27:40.252102Z node 140 :HIVE INFO: tablet_helpers.cpp:1476: [72057594037968897] TEvSubscribeToTabletDeletion, 72075186233409549 2025-05-29T15:27:40.252106Z node 140 :HIVE INFO: tablet_helpers.cpp:1476: [72057594037968897] TEvSubscribeToTabletDeletion, 72075186233409550 2025-05-29T15:27:40.252111Z node 140 :HIVE INFO: tablet_helpers.cpp:1476: [72057594037968897] TEvSubscribeToTabletDeletion, 72075186233409551 2025-05-29T15:27:40.252116Z node 140 :HIVE INFO: tablet_helpers.cpp:1476: [72057594037968897] TEvSubscribeToTabletDeletion, 72075186233409552 Deleted tabletId 72075186233409546 Deleted tabletId 72075186233409547 Deleted tabletId 72075186233409548 Deleted tabletId 72075186233409549 Deleted tabletId 72075186233409550 Deleted tabletId 72075186233409551 Deleted tabletId 72075186233409552 >> TSchemeshardBackgroundCompactionTest::ShouldCompactServerless [GOOD] >> TSchemeshardBackgroundCompactionTest::ShouldNotCompactServerlessAfterDisable ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_base_reboots/unittest >> TTablesWithReboots::DropTableWithReboots [GOOD] Test command err: ==== RunWithTabletReboots =========== RUN: Trace =========== Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2140] Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:116:2058] recipient: [1:110:2141] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:116:2058] recipient: [1:110:2141] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:117:2058] recipient: [1:111:2142] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:117:2058] recipient: [1:111:2142] Leader for TabletID 72057594046678944 is [1:126:2151] sender: [1:127:2058] recipient: [1:109:2140] Leader for TabletID 72057594046447617 is [1:130:2154] sender: [1:132:2058] recipient: [1:110:2141] Leader for TabletID 72057594046316545 is [1:133:2155] sender: [1:135:2058] recipient: [1:111:2142] 2025-05-29T15:26:42.719998Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7488: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-05-29T15:26:42.720019Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7516: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:26:42.720025Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7402: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2025-05-29T15:26:42.720030Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7418: OperationsProcessing config: using default configuration 2025-05-29T15:26:42.720035Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-05-29T15:26:42.720039Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-05-29T15:26:42.720047Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7548: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:26:42.720060Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:39: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-05-29T15:26:42.720155Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7619: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-05-29T15:26:42.720220Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-05-29T15:26:42.734540Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7651: Got new config: QueryServiceConfig { AllExternalDataSourcesAreAvailable: true } 2025-05-29T15:26:42.734558Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:26:42.734648Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7619: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# Leader for TabletID 72057594046447617 is [1:130:2154] sender: [1:175:2058] recipient: [1:15:2062] 2025-05-29T15:26:42.741195Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-05-29T15:26:42.741221Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-05-29T15:26:42.741254Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-05-29T15:26:42.743968Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-05-29T15:26:42.744042Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:445: Clear TempDirsState with owners number: 0 2025-05-29T15:26:42.744143Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1348: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-05-29T15:26:42.744304Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:32: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-05-29T15:26:42.744902Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:157: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:26:42.744931Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:84: [RootDataErasureManager] Stop 2025-05-29T15:26:42.745140Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:26:42.745149Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:26:42.745179Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-05-29T15:26:42.745186Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-29T15:26:42.745192Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:202: TTxServerlessStorageBilling.Complete 2025-05-29T15:26:42.745208Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6671: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:214:2058] recipient: [1:212:2212] IGNORE Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:214:2058] recipient: [1:212:2212] Leader for TabletID 72057594037968897 is [1:218:2216] sender: [1:219:2058] recipient: [1:212:2212] 2025-05-29T15:26:42.746261Z node 1 :HIVE INFO: tablet_helpers.cpp:1130: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:126:2151] sender: [1:239:2058] recipient: [1:15:2062] 2025-05-29T15:26:42.765668Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:372: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-05-29T15:26:42.765725Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:26:42.765788Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-05-29T15:26:42.765835Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:130: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-05-29T15:26:42.765845Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:26:42.766382Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:451: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-05-29T15:26:42.766405Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-05-29T15:26:42.766443Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:26:42.766455Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:313: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-05-29T15:26:42.766460Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:367: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-05-29T15:26:42.766465Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 2 -> 3 2025-05-29T15:26:42.766797Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:26:42.766807Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-05-29T15:26:42.766812Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 3 -> 128 2025-05-29T15:26:42.767080Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:26:42.767088Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:26:42.767094Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:26:42.767101Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1650: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-05-29T15:26:42.767753Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1719: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-05-29T15:26:42.768087Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:652: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-05-29T15:26:42.768119Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1751: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 Leader for TabletID 72057594046316545 is [1:133:2155] sender: [1:254:2058] recipient: [1:15:2062] FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-05-29T15:26:42.768297Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:676: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-05-29T15:26:42.768318Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:680: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 133 RawX2: 4294969451 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-05-29T15:26:42.768324Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:26:42.768376Z node 1 :FLAT_TX_SCHEMESHARD INFO: sch ... 4:0 ProgressState, at schemeshard: 72057594046678944 2025-05-29T15:27:40.814467Z node 118 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove table for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 5 2025-05-29T15:27:40.814505Z node 118 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:906: Part operation is done id#1004:0 progress is 1/1 2025-05-29T15:27:40.814510Z node 118 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 1004 ready parts: 1/1 2025-05-29T15:27:40.814516Z node 118 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:906: Part operation is done id#1004:0 progress is 1/1 2025-05-29T15:27:40.814519Z node 118 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 1004 ready parts: 1/1 2025-05-29T15:27:40.814524Z node 118 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1606: TOperation IsReadyToNotify, TxId: 1004, ready parts: 1/1, is published: true 2025-05-29T15:27:40.814529Z node 118 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 1004 ready parts: 1/1 2025-05-29T15:27:40.814535Z node 118 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:973: Operation and all the parts is done, operation id: 1004:0 2025-05-29T15:27:40.814540Z node 118 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5174: RemoveTx for txid 1004:0 2025-05-29T15:27:40.814569Z node 118 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 4 2025-05-29T15:27:40.814715Z node 118 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1004 2025-05-29T15:27:40.814835Z node 118 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1004 2025-05-29T15:27:40.816633Z node 118 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5549: Handle TEvStateChanged, at schemeshard: 72057594046678944, message: Source { RawX1: 607 RawX2: 506806143484 } TabletId: 72075186233409548 State: 4 2025-05-29T15:27:40.816651Z node 118 :FLAT_TX_SCHEMESHARD INFO: schemeshard__state_changed_reply.cpp:78: TTxShardStateChanged DoExecute, datashard informs about state changing, datashardId: 72075186233409548, state: Offline, at schemeshard: 72057594046678944 2025-05-29T15:27:40.816714Z node 118 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5549: Handle TEvStateChanged, at schemeshard: 72057594046678944, message: Source { RawX1: 612 RawX2: 506806143486 } TabletId: 72075186233409549 State: 4 2025-05-29T15:27:40.816722Z node 118 :FLAT_TX_SCHEMESHARD INFO: schemeshard__state_changed_reply.cpp:78: TTxShardStateChanged DoExecute, datashard informs about state changing, datashardId: 72075186233409549, state: Offline, at schemeshard: 72057594046678944 2025-05-29T15:27:40.816784Z node 118 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5549: Handle TEvStateChanged, at schemeshard: 72057594046678944, message: Source { RawX1: 613 RawX2: 506806143487 } TabletId: 72075186233409550 State: 4 2025-05-29T15:27:40.816790Z node 118 :FLAT_TX_SCHEMESHARD INFO: schemeshard__state_changed_reply.cpp:78: TTxShardStateChanged DoExecute, datashard informs about state changing, datashardId: 72075186233409550, state: Offline, at schemeshard: 72057594046678944 2025-05-29T15:27:40.817203Z node 118 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:46: Free shard 72057594046678944:3 hive 72057594037968897 at ss 72057594046678944 2025-05-29T15:27:40.817448Z node 118 :HIVE INFO: tablet_helpers.cpp:1356: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 3 TxId_Deprecated: 3 TabletID: 72075186233409548 2025-05-29T15:27:40.817491Z node 118 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5938: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 3 ShardOwnerId: 72057594046678944 ShardLocalIdx: 3, at schemeshard: 72057594046678944 2025-05-29T15:27:40.817541Z node 118 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 3 Forgetting tablet 72075186233409548 2025-05-29T15:27:40.818229Z node 118 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:46: Free shard 72057594046678944:4 hive 72057594037968897 at ss 72057594046678944 2025-05-29T15:27:40.818285Z node 118 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:46: Free shard 72057594046678944:5 hive 72057594037968897 at ss 72057594046678944 2025-05-29T15:27:40.818303Z node 118 :HIVE INFO: tablet_helpers.cpp:1356: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 4 TxId_Deprecated: 4 TabletID: 72075186233409549 Forgetting tablet 72075186233409549 2025-05-29T15:27:40.818734Z node 118 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5938: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 4 ShardOwnerId: 72057594046678944 ShardLocalIdx: 4, at schemeshard: 72057594046678944 2025-05-29T15:27:40.818802Z node 118 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 2 2025-05-29T15:27:40.818847Z node 118 :HIVE INFO: tablet_helpers.cpp:1356: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 5 TxId_Deprecated: 5 TabletID: 72075186233409550 Forgetting tablet 72075186233409550 2025-05-29T15:27:40.819103Z node 118 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5938: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 5 ShardOwnerId: 72057594046678944 ShardLocalIdx: 5, at schemeshard: 72057594046678944 2025-05-29T15:27:40.819140Z node 118 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 1 2025-05-29T15:27:40.819713Z node 118 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:123: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-05-29T15:27:40.819724Z node 118 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:137: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 4], at schemeshard: 72057594046678944 2025-05-29T15:27:40.819742Z node 118 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-05-29T15:27:40.819823Z node 118 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:174: Deleted shardIdx 72057594046678944:3 2025-05-29T15:27:40.819832Z node 118 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:180: Close pipe to deleted shardIdx 72057594046678944:3 tabletId 72075186233409548 2025-05-29T15:27:40.820232Z node 118 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:174: Deleted shardIdx 72057594046678944:4 2025-05-29T15:27:40.820240Z node 118 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:180: Close pipe to deleted shardIdx 72057594046678944:4 tabletId 72075186233409549 2025-05-29T15:27:40.820423Z node 118 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:174: Deleted shardIdx 72057594046678944:5 2025-05-29T15:27:40.820430Z node 118 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:180: Close pipe to deleted shardIdx 72057594046678944:5 tabletId 72075186233409550 2025-05-29T15:27:40.820513Z node 118 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:160: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestModificationResult got TxId: 1004, wait until txId: 1004 TestWaitNotification wait txId: 1004 2025-05-29T15:27:40.820565Z node 118 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:210: tests -- TTxNotificationSubscriber for txId 1004: send EvNotifyTxCompletion 2025-05-29T15:27:40.820570Z node 118 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:256: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 1004 2025-05-29T15:27:40.820627Z node 118 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 1004, at schemeshard: 72057594046678944 2025-05-29T15:27:40.820647Z node 118 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:227: tests -- TTxNotificationSubscriber for txId 1004: got EvNotifyTxCompletionResult 2025-05-29T15:27:40.820652Z node 118 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:236: tests -- TTxNotificationSubscriber for txId 1004: satisfy waiter [118:862:2785] TestWaitNotification: OK eventTxId 1004 2025-05-29T15:27:40.820709Z node 118 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Table" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-05-29T15:27:40.820741Z node 118 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/Table" took 41us result status StatusPathDoesNotExist 2025-05-29T15:27:40.820766Z node 118 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/Table\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1]), source_location: ydb/core/tx/schemeshard/schemeshard_path_describer.cpp:1157" Path: "/MyRoot/Table" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: true } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 wait until 72075186233409546 is deleted wait until 72075186233409547 is deleted wait until 72075186233409548 is deleted wait until 72075186233409549 is deleted wait until 72075186233409550 is deleted wait until 72075186233409551 is deleted wait until 72075186233409552 is deleted 2025-05-29T15:27:40.820829Z node 118 :HIVE INFO: tablet_helpers.cpp:1476: [72057594037968897] TEvSubscribeToTabletDeletion, 72075186233409546 2025-05-29T15:27:40.820841Z node 118 :HIVE INFO: tablet_helpers.cpp:1476: [72057594037968897] TEvSubscribeToTabletDeletion, 72075186233409547 2025-05-29T15:27:40.820850Z node 118 :HIVE INFO: tablet_helpers.cpp:1476: [72057594037968897] TEvSubscribeToTabletDeletion, 72075186233409548 2025-05-29T15:27:40.820859Z node 118 :HIVE INFO: tablet_helpers.cpp:1476: [72057594037968897] TEvSubscribeToTabletDeletion, 72075186233409549 2025-05-29T15:27:40.820865Z node 118 :HIVE INFO: tablet_helpers.cpp:1476: [72057594037968897] TEvSubscribeToTabletDeletion, 72075186233409550 2025-05-29T15:27:40.820874Z node 118 :HIVE INFO: tablet_helpers.cpp:1476: [72057594037968897] TEvSubscribeToTabletDeletion, 72075186233409551 2025-05-29T15:27:40.820885Z node 118 :HIVE INFO: tablet_helpers.cpp:1476: [72057594037968897] TEvSubscribeToTabletDeletion, 72075186233409552 Deleted tabletId 72075186233409546 Deleted tabletId 72075186233409547 Deleted tabletId 72075186233409548 Deleted tabletId 72075186233409549 Deleted tabletId 72075186233409550 Deleted tabletId 72075186233409551 Deleted tabletId 72075186233409552 >> TKeyValueTest::TestBasicWriteRead [GOOD] >> TKeyValueTest::TestBasicWriteReadOverrun >> TKeyValueTest::TestInlineWriteReadRangeLimitThenLimitWorksNewApi [GOOD] >> KeyValueReadStorage::ReadError [GOOD] >> KeyValueReadStorage::ReadErrorWithWrongGroupId [GOOD] >> KeyValueReadStorage::ReadErrorWithUncorrectCookie [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/keyvalue/ut/unittest >> TKeyValueTest::TestInlineWriteReadRangeLimitThenLimitWorksNewApi [GOOD] Test command err: Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:55:2057] recipient: [1:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:55:2057] recipient: [1:51:2095] Leader for TabletID 72057594037927937 is [1:57:2097] sender: [1:58:2057] recipient: [1:51:2095] Leader for TabletID 72057594037927937 is [1:57:2097] sender: [1:75:2057] recipient: [1:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:55:2057] recipient: [2:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:55:2057] recipient: [2:51:2095] Leader for TabletID 72057594037927937 is [2:57:2097] sender: [2:58:2057] recipient: [2:51:2095] Leader for TabletID 72057594037927937 is [2:57:2097] sender: [2:75:2057] recipient: [2:14:2061] !Reboot 72057594037927937 (actor [2:57:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [2:57:2097] sender: [2:77:2057] recipient: [2:36:2083] Leader for TabletID 72057594037927937 is [2:57:2097] sender: [2:80:2057] recipient: [2:14:2061] Leader for TabletID 72057594037927937 is [2:57:2097] sender: [2:81:2057] recipient: [2:79:2110] Leader for TabletID 72057594037927937 is [2:82:2111] sender: [2:83:2057] recipient: [2:79:2110] !Reboot 72057594037927937 (actor [2:57:2097]) rebooted! !Reboot 72057594037927937 (actor [2:57:2097]) tablet resolver refreshed! new actor is[2:82:2111] Leader for TabletID 72057594037927937 is [2:82:2111] sender: [2:168:2057] recipient: [2:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:55:2057] recipient: [3:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:55:2057] recipient: [3:51:2095] Leader for TabletID 72057594037927937 is [3:57:2097] sender: [3:58:2057] recipient: [3:51:2095] Leader for TabletID 72057594037927937 is [3:57:2097] sender: [3:75:2057] recipient: [3:14:2061] !Reboot 72057594037927937 (actor [3:57:2097]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [3:57:2097] sender: [3:77:2057] recipient: [3:36:2083] Leader for TabletID 72057594037927937 is [3:57:2097] sender: [3:80:2057] recipient: [3:79:2110] Leader for TabletID 72057594037927937 is [3:57:2097] sender: [3:81:2057] recipient: [3:14:2061] Leader for TabletID 72057594037927937 is [3:82:2111] sender: [3:83:2057] recipient: [3:79:2110] !Reboot 72057594037927937 (actor [3:57:2097]) rebooted! !Reboot 72057594037927937 (actor [3:57:2097]) tablet resolver refreshed! new actor is[3:82:2111] Leader for TabletID 72057594037927937 is [3:82:2111] sender: [3:168:2057] recipient: [3:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [4:55:2057] recipient: [4:50:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [4:55:2057] recipient: [4:50:2095] Leader for TabletID 72057594037927937 is [4:57:2097] sender: [4:58:2057] recipient: [4:50:2095] Leader for TabletID 72057594037927937 is [4:57:2097] sender: [4:75:2057] recipient: [4:14:2061] !Reboot 72057594037927937 (actor [4:57:2097]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [4:57:2097] sender: [4:78:2057] recipient: [4:36:2083] Leader for TabletID 72057594037927937 is [4:57:2097] sender: [4:81:2057] recipient: [4:80:2110] Leader for TabletID 72057594037927937 is [4:57:2097] sender: [4:82:2057] recipient: [4:14:2061] Leader for TabletID 72057594037927937 is [4:83:2111] sender: [4:84:2057] recipient: [4:80:2110] !Reboot 72057594037927937 (actor [4:57:2097]) rebooted! !Reboot 72057594037927937 (actor [4:57:2097]) tablet resolver refreshed! new actor is[4:83:2111] Leader for TabletID 72057594037927937 is [4:83:2111] sender: [4:169:2057] recipient: [4:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [5:55:2057] recipient: [5:52:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [5:55:2057] recipient: [5:52:2095] Leader for TabletID 72057594037927937 is [5:57:2097] sender: [5:58:2057] recipient: [5:52:2095] Leader for TabletID 72057594037927937 is [5:57:2097] sender: [5:75:2057] recipient: [5:14:2061] !Reboot 72057594037927937 (actor [5:57:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [5:57:2097] sender: [5:81:2057] recipient: [5:36:2083] Leader for TabletID 72057594037927937 is [5:57:2097] sender: [5:84:2057] recipient: [5:14:2061] Leader for TabletID 72057594037927937 is [5:57:2097] sender: [5:85:2057] recipient: [5:83:2113] Leader for TabletID 72057594037927937 is [5:86:2114] sender: [5:87:2057] recipient: [5:83:2113] !Reboot 72057594037927937 (actor [5:57:2097]) rebooted! !Reboot 72057594037927937 (actor [5:57:2097]) tablet resolver refreshed! new actor is[5:86:2114] Leader for TabletID 72057594037927937 is [5:86:2114] sender: [5:172:2057] recipient: [5:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [6:55:2057] recipient: [6:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [6:55:2057] recipient: [6:51:2095] Leader for TabletID 72057594037927937 is [6:57:2097] sender: [6:58:2057] recipient: [6:51:2095] Leader for TabletID 72057594037927937 is [6:57:2097] sender: [6:75:2057] recipient: [6:14:2061] !Reboot 72057594037927937 (actor [6:57:2097]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [6:57:2097] sender: [6:81:2057] recipient: [6:36:2083] Leader for TabletID 72057594037927937 is [6:57:2097] sender: [6:84:2057] recipient: [6:14:2061] Leader for TabletID 72057594037927937 is [6:57:2097] sender: [6:85:2057] recipient: [6:83:2113] Leader for TabletID 72057594037927937 is [6:86:2114] sender: [6:87:2057] recipient: [6:83:2113] !Reboot 72057594037927937 (actor [6:57:2097]) rebooted! !Reboot 72057594037927937 (actor [6:57:2097]) tablet resolver refreshed! new actor is[6:86:2114] Leader for TabletID 72057594037927937 is [6:86:2114] sender: [6:172:2057] recipient: [6:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [7:55:2057] recipient: [7:52:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [7:55:2057] recipient: [7:52:2095] Leader for TabletID 72057594037927937 is [7:57:2097] sender: [7:58:2057] recipient: [7:52:2095] Leader for TabletID 72057594037927937 is [7:57:2097] sender: [7:75:2057] recipient: [7:14:2061] !Reboot 72057594037927937 (actor [7:57:2097]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [7:57:2097] sender: [7:82:2057] recipient: [7:36:2083] Leader for TabletID 72057594037927937 is [7:57:2097] sender: [7:85:2057] recipient: [7:84:2113] Leader for TabletID 72057594037927937 is [7:57:2097] sender: [7:86:2057] recipient: [7:14:2061] Leader for TabletID 72057594037927937 is [7:87:2114] sender: [7:88:2057] recipient: [7:84:2113] !Reboot 72057594037927937 (actor [7:57:2097]) rebooted! !Reboot 72057594037927937 (actor [7:57:2097]) tablet resolver refreshed! new actor is[7:87:2114] Leader for TabletID 72057594037927937 is [7:87:2114] sender: [7:173:2057] recipient: [7:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [8:55:2057] recipient: [8:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [8:55:2057] recipient: [8:51:2095] Leader for TabletID 72057594037927937 is [8:57:2097] sender: [8:58:2057] recipient: [8:51:2095] Leader for TabletID 72057594037927937 is [8:57:2097] sender: [8:75:2057] recipient: [8:14:2061] !Reboot 72057594037927937 (actor [8:57:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [8:57:2097] sender: [8:84:2057] recipient: [8:36:2083] Leader for TabletID 72057594037927937 is [8:57:2097] sender: [8:87:2057] recipient: [8:14:2061] Leader for TabletID 72057594037927937 is [8:57:2097] sender: [8:88:2057] recipient: [8:86:2115] Leader for TabletID 72057594037927937 is [8:89:2116] sender: [8:90:2057] recipient: [8:86:2115] !Reboot 72057594037927937 (actor [8:57:2097]) rebooted! !Reboot 72057594037927937 (actor [8:57:2097]) tablet resolver refreshed! new actor is[8:89:2116] Leader for TabletID 72057594037927937 is [8:89:2116] sender: [8:175:2057] recipient: [8:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [9:55:2057] recipient: [9:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [9:55:2057] recipient: [9:51:2095] Leader for TabletID 72057594037927937 is [9:57:2097] sender: [9:58:2057] recipient: [9:51:2095] Leader for TabletID 72057594037927937 is [9:57:2097] sender: [9:75:2057] recipient: [9:14:2061] !Reboot 72057594037927937 (actor [9:57:2097]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [9:57:2097] sender: [9:84:2057] recipient: [9:36:2083] Leader for TabletID 72057594037927937 is [9:57:2097] sender: [9:87:2057] recipient: [9:14:2061] Leader for TabletID 72057594037927937 is [9:57:2097] sender: [9:88:2057] recipient: [9:86:2115] Leader for TabletID 72057594037927937 is [9:89:2116] sender: [9:90:2057] recipient: [9:86:2115] !Reboot 72057594037927937 (actor [9:57:2097]) rebooted! !Reboot 72057594037927937 (actor [9:57:2097]) tablet resolver refreshed! new actor is[9:89:2116] Leader for TabletID 72057594037927937 is [9:89:2116] sender: [9:175:2057] recipient: [9:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [10:55:2057] recipient: [10:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [10:55:2057] recipient: [10:51:2095] Leader for TabletID 72057594037927937 is [10:57:2097] sender: [10:58:2057] recipient: [10:51:2095] Leader for TabletID 72057594037927937 is [10:57:2097] sender: [10:75:2057] recipient: [10:14:2061] !Reboot 72057594037927937 (actor [10:57:2097]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [10:57:2097] sender: [10:85:2057] recipient: [10:36:2083] Leader for TabletID 72057594037927937 is [10:57:2097] sender: [10:88:2057] recipient: [10:14:2061] Leader for TabletID 72057594037927937 is [10:57:2097] sender: [10:89:2057] recipient: [10:87:2115] Leader for TabletID 72057594037927937 is [10:90:2116] sender: [10:91:2057] recipient: [10:87:2115] !Reboot 72057594037927937 (actor [10:57:2097]) rebooted! !Reboot 72057594037927937 (actor [10:57:2097]) tablet resolver refreshed! new actor is[10:90:2116] Leader for TabletID 72057594037927937 is [10:90:2116] sender: [10:176:2057] recipient: [10:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [11:55:2057] recipient: [11:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [11:55:2057] recipient: [11:51:2095] Leader for TabletID 72057594037927937 is [11:57:2097] sender: [11:58:2057] recipient: [11:51:2095] Leader for TabletID 72057594037927937 is [11:57:2097] sender: [11:75:2057] recipient: [11:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [12:55:2057] recipient: [12:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [12:55:2057] recipient: [12:51:2095] Leader for TabletID 72057594037927937 is [12:57:2097] sender: [12:58:2057] recipient: [12:51:2095] Leader for TabletID 72057594037927937 is [12:57:2097] sender: [12:75:2057] recipient: [12:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [13:55:2057] recipient: [13:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [13:55:2057] recipient: [13:51:2095] Leader for TabletID 72057594037927937 is [13:57:2097] sender: [13:58:2057] recipient: [13:51:2095] Leader for TabletID 72057594037927937 is [13:57:2097] sender: [13:75:2057] recipient: [13:14:2061] !Reboot 72057594037927937 (actor [13:57:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [13:57:2097] sender: [13:77:2057] recipient: [13:36:2083] Leader for TabletID 72057594037927937 is [13:57:2097] sender: [13:80:2057] recipient: [13:79:2110] Leader for TabletID 72057594037927937 is [13:57:2097] sender: [13:81:2057] recipient: [13:14:2061] Leader for TabletID 72057594037927937 is [13:82:2111] sender: [13:83:2057] recipient: [13:79:2110] !Reboot 72057594037927937 (actor [13:57:2097]) rebooted! !Reboot 72057594037927937 (actor [13:57:2097]) tablet resolver refreshed! new actor is[13:82:2111] Leader for TabletID 72057594037927937 is [13:82:2111] sender: [13:168:2057] recipient: [13:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [14:55:2057] recipient: [14:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [14:55:2057] recipient: [14:51:2095] Leader for TabletID 72057594037927937 is [14:57:2097] sender: [14:58:2057] recipient: [14:51:2095] Leader for TabletID 72057594037927937 is [14:57:2097] sender: [14:75:2057] recipient: [14:14:2061] !Reboot 72057594037927937 (actor [14:57:2097]) on event NKikimr::TEvKeyValue::TEvExecuteTransaction ! Leader for TabletID 72057594037927937 is [14:57:2097] sender: [14:77:2057] recipient: [14:36:2083] Leader for TabletID 72057594037927937 is [14:57:2097] sender: [14:80:2057] recipient: [14:14:2061] Leader for TabletID 72057594037927937 is [14:57:2097] sender: [14:81:2057] recipient: [14:79:2110] Leader for TabletID 72057594037927937 is [14:82:2111] sender: [14:83:2057] recipient: [14:79:2110] !Reboot 72057594037927937 (actor [14:57:2097]) rebooted! !Reboot 72057594037927937 (actor [14:57:2097]) tablet resolver refreshed! new actor is[14:82:2111] Leader for TabletID 72057594037927937 is [14:82:2111] sender: [14:168:2057] recipient: [14:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [15:55:2057] recipient: [15:52:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [15:55:2057] recipient: [15:52:2095] Leader for TabletID 72057594037927937 is [15:57:2097] sender: [15:58:2057] recipient: [15:52:2095] Leader for TabletID 72057594037927937 is [15:57:2097] sender: [15:75:2057] recipient: [15:14:2061] !Reboot 72057594037927937 (actor [15:57:2097]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [15:57:2097] sender: [15:78:2057] recipient: [15:36:2083] Leader for TabletID 72057594037927937 is [15:57:2097] sender: [15:80:2057] recipient: [15:14:2061] Leader for TabletID 72057594037927937 is [15:57:2097] sender: [15:82:2057] recipient: [15:81:2110] Leader for TabletID 72057594037927937 is [15:83:2111] sender: [15:84:2057] recipient: [15:81:2110] !Reboot 72057594037927937 (actor [15:57:2097]) rebooted! !Reboot 72057594037927937 (actor [15:57:2097]) tablet resolver refreshed! new actor is[15:83:2111] Leader for TabletID 72057594037927937 is [15:83:2111] sender: [15:169:2057] recipient: [15:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [16:55:2057] recipient: [16:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [16:55:2057] recipient: [16:51:2095] Leader for TabletID 72057594037927937 is [16:57:2097] sender: [16:58:2057] recipient: [16:51:2095] Leader for TabletID 72057594037927937 is [16:57:2097] sender: [16:75:2057] recipient: [16:14:2061] !Reboot 72057594037927937 (actor [16:57:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [16:57:2097] sender: [16:81:2057] recipient: [16:36:2083] Leader for TabletID 72057594037927937 is [16:57:2097] sender: [16:84:2057] recipient: [16:14:2061] Leader for TabletID 72057594037927937 is [16:57:2097] sender: [16:85:2057] recipient: [16:83:2113] Leader for TabletID 72057594037927937 is [16:86:2114] sender: [16:87:2057] recipient: [16:83:2113] !Reboot 72057594037927937 (actor [16:57:2097]) rebooted! !Reboot 72057594037927937 (actor [16:57:2097]) tablet resolver refreshed! new actor is[16:86:2114] Leader for TabletID 72057594037927937 is [16:86:2114] sender: [16:172:2057] recipient: [16:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [17:55:2057] recipient: [17:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [17:55:2057] recipient: [17:51:2095] Leader for TabletID 72057594037927937 is [17:57:2097] sender: [17:58:2057] recipient: [17:51:2095] Leader for TabletID 72057594037927937 is [17:57:2097] sender: [17:75:2057] recipient: [17:14:2061] !Reboot 72057594037927937 (actor [17:57:2097]) on event NKikimr::TEvKeyValue::TEvReadRange ! Leader for TabletID 72057594037927937 is [17:57:2097] sender: [17:81:2057] recipient: [17:36:2083] Leader for TabletID 72057594037927937 is [17:57:2097] sender: [17:84:2057] recipient: [17:83:2113] Leader for TabletID 72057594037927937 is [17:57:2097] sender: [17:85:2057] recipient: [17:14:2061] Leader for TabletID 72057594037927937 is [17:86:2114] sender: [17:87:2057] recipient: [17:83:2113] !Reboot 72057594037927937 (actor [17:57:2097]) rebooted! !Reboot 72057594037927937 (actor [17:57:2097]) tablet resolver refreshed! new actor is[17:86:2114] Leader for TabletID 72057594037927937 is [17:86:2114] sender: [17:172:2057] recipient: [17:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [18:55:2057] recipient: [18:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [18:55:2057] recipient: [18:51:2095] Leader for TabletID 72057594037927937 is [18:57:2097] sender: [18:58:2057] recipient: [18:51:2095] Leader for TabletID 72057594037927937 is [18:57:2097] sender: [18:75:2057] recipient: [18:14:2061] !Reboot 72057594037927937 (actor [18:57:2097]) on event NKikimr::TEvKeyValue::TEvNotify ! Leader for TabletID 72057594037927937 is [18:57:2097] sender: [18:82:2057] recipient: [18:36:2083] Leader for TabletID 72057594037927937 is [18:57:2097] sender: [18:85:2057] recipient: [18:14:2061] Leader for TabletID 72057594037927937 is [18:57:2097] sender: [18:86:2057] recipient: [18:84:2113] Leader for TabletID 72057594037927937 is [18:87:2114] sender: [18:88:2057] recipient: [18:84:2113] !Reboot 72057594037927937 (actor [18:57:2097]) rebooted! !Reboot 72057594037927937 (actor [18:57:2097]) tablet resolver refreshed! new actor is[18:87:2114] Leader for TabletID 72057594037927937 is [18:87:2114] sender: [18:105:2057] recipient: [18:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [19:55:2057] recipient: [19:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [19:55:2057] recipient: [19:51:2095] Leader for TabletID 72057594037927937 is [19:57:2097] sender: [19:58:2057] recipient: [19:51:2095] Leader for TabletID 72057594037927937 is [19:57:2097] sender: [19:75:2057] recipient: [19:14:2061] !Reboot 72057594037927937 (actor [19:57:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [19:57:2097] sender: [19:84:2057] recipient: [19:36:2083] Leader for TabletID 72057594037927937 is [19:57:2097] sender: [19:87:2057] recipient: [19:14:2061] Leader for TabletID 72057594037927937 is [19:57:2097] sender: [19:88:2057] recipient: [19:86:2115] Leader for TabletID 72057594037927937 is [19:89:2116] sender: [19:90:2057] recipient: [19:86:2115] !Reboot 72057594037927937 (actor [19:57:2097]) rebooted! !Reboot 72057594037927937 (actor [19:57:2097]) tablet resolver refreshed! new actor is[19:89:2116] Leader for TabletID 72057594037927937 is [19:89:2116] sender: [19:175:2057] recipient: [19:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [20:55:2057] recipient: [20:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [20:55:2057] recipient: [20:51:2095] Leader for TabletID 72057594037927937 is [20:57:2097] sender: [20:58:2057] recipient: [20:51:2095] Leader for TabletID 72057594037927937 is [20:57:2097] sender: [20:75:2057] recipient: [20:14:2061] !Reboot 72057594037927937 (actor [20:57:2097]) on event NKikimr::TEvKeyValue::TEvReadRange ! Leader for TabletID 72057594037927937 is [20:57:2097] sender: [20:84:2057] recipient: [20:36:2083] Leader for TabletID 72057594037927937 is [20:57:2097] sender: [20:87:2057] recipient: [20:14:2061] Leader for TabletID 72057594037927937 is [20:57:2097] sender: [20:88:2057] recipient: [20:86:2115] Leader for TabletID 72057594037927937 is [20:89:2116] sender: [20:90:2057] recipient: [20:86:2115] !Reboot 72057594037927937 (actor [20:57:2097]) rebooted! !Reboot 72057594037927937 (actor [20:57:2097]) tablet resolver refreshed! new actor is[20:89:2116] Leader for TabletID 72057594037927937 is [20:89:2116] sender: [20:175:2057] recipient: [20:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [21:55:2057] recipient: [21:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [21:55:2057] recipient: [21:51:2095] Leader for TabletID 72057594037927937 is [21:57:2097] sender: [21:58:2057] recipient: [21:51:2095] Leader for TabletID 72057594037927937 is [21:57:2097] sender: [21:75:2057] recipient: [21:14:2061] !Reboot 72057594037927937 (actor [21:57:2097]) on event NKikimr::TEvKeyValue::TEvNotify ! Leader for TabletID 72057594037927937 is [21:57:2097] sender: [21:85:2057] recipient: [21:36:2083] Leader for TabletID 72057594037927937 is [21:57:2097] sender: [21:88:2057] recipient: [21:14:2061] Leader for TabletID 72057594037927937 is [21:57:2097] sender: [21:89:2057] recipient: [21:87:2115] Leader for TabletID 72057594037927937 is [21:90:2116] sender: [21:91:2057] recipient: [21:87:2115] !Reboot 72057594037927937 (actor [21:57:2097]) rebooted! !Reboot 72057594037927937 (actor [21:57:2097]) tablet resolver refreshed! new actor is[21:90:2116] Leader for TabletID 72057594037927937 is [0:0:0] sender: [22:55:2057] recipient: [22:52:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [22:55:2057] recipient: [22:52:2095] Leader for TabletID 72057594037927937 is [22:57:2097] sender: [22:58:2057] recipient: [22:52:2095] Leader for TabletID 72057594037927937 is [22:57:2097] sender: [22:75:2057] recipient: [22:14:2061] >> TFlatMetrics::TimeSeriesAvg16x60 [GOOD] >> TFlatMetrics::TimeSeriesAvg16Signed [GOOD] >> TKeyValueTest::TestObtainLockNewApi [GOOD] >> TKeyValueTest::TestRenameToLongKey ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/keyvalue/ut/unittest >> KeyValueReadStorage::ReadErrorWithUncorrectCookie [GOOD] Test command err: 2025-05-29T15:27:42.342205Z 1 00h00m00.000000s :KEYVALUE INFO: {KV20@keyvalue_storage_read_request.cpp:209} Received GetResult KeyValue# 1 GroupId# 3 Status# ERROR ResponseSz# 1 ErrorReason# ReadRequestCookie# 0 2025-05-29T15:27:42.342227Z 1 00h00m00.000000s :KEYVALUE ERROR: {KV316@keyvalue_storage_read_request.cpp:270} Unexpected EvGetResult. KeyValue# 1 Status# ERROR Deadline# 18446744073709551 Now# 0 SentAt# 1970-01-01T00:00:00.000000Z GotAt# 1748532462342 ErrorReason# 2025-05-29T15:27:42.343634Z 1 00h00m00.000000s :KEYVALUE INFO: {KV20@keyvalue_storage_read_request.cpp:209} Received GetResult KeyValue# 1 GroupId# 2 Status# OK ResponseSz# 1 ErrorReason# ReadRequestCookie# 0 2025-05-29T15:27:42.343651Z 1 00h00m00.000000s :KEYVALUE ERROR: {KV318@keyvalue_storage_read_request.cpp:240} Received EvGetResult from an unexpected storage group. KeyValue# 1 GroupId# 2 ExpecetedGroupId# 3 Status# OK Deadline# 18446744073709551 Now# 0 SentAt# 1970-01-01T00:00:00.000000Z GotAt# 1748532462343 ErrorReason# 2025-05-29T15:27:42.344507Z 1 00h00m00.000000s :KEYVALUE INFO: {KV20@keyvalue_storage_read_request.cpp:209} Received GetResult KeyValue# 1 GroupId# 3 Status# OK ResponseSz# 1 ErrorReason# ReadRequestCookie# 0 2025-05-29T15:27:42.344520Z 1 00h00m00.000000s :KEYVALUE ERROR: {KV319@keyvalue_storage_read_request.cpp:222} Received EvGetResult with an unexpected cookie. KeyValue# 1 Cookie# 1000 SentGets# 1 GroupId# 3 Status# OK Deadline# 18446744073709551 Now# 0 GotAt# 1748532462344 ErrorReason# |69.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tablet/ut/unittest >> TFlatMetrics::TimeSeriesAvg16Signed [GOOD] >> TKeyValueTest::TestWriteLongKey [GOOD] |69.7%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/kqp/ut/sysview/ydb-core-kqp-ut-sysview |69.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/ut/sysview/ydb-core-kqp-ut-sysview |69.7%| [LD] {RESULT} $(B)/ydb/core/kqp/ut/sysview/ydb-core-kqp-ut-sysview ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/keyvalue/ut/unittest >> TKeyValueTest::TestWriteLongKey [GOOD] Test command err: Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:55:2057] recipient: [2:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:55:2057] recipient: [2:51:2095] Leader for TabletID 72057594037927937 is [2:57:2097] sender: [2:58:2057] recipient: [2:51:2095] Leader for TabletID 72057594037927937 is [2:57:2097] sender: [2:75:2057] recipient: [2:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:55:2057] recipient: [3:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:55:2057] recipient: [3:51:2095] Leader for TabletID 72057594037927937 is [3:57:2097] sender: [3:58:2057] recipient: [3:51:2095] Leader for TabletID 72057594037927937 is [3:57:2097] sender: [3:75:2057] recipient: [3:14:2061] !Reboot 72057594037927937 (actor [3:57:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [3:57:2097] sender: [3:77:2057] recipient: [3:36:2083] Leader for TabletID 72057594037927937 is [3:57:2097] sender: [3:80:2057] recipient: [3:79:2110] Leader for TabletID 72057594037927937 is [3:57:2097] sender: [3:81:2057] recipient: [3:14:2061] Leader for TabletID 72057594037927937 is [3:82:2111] sender: [3:83:2057] recipient: [3:79:2110] !Reboot 72057594037927937 (actor [3:57:2097]) rebooted! !Reboot 72057594037927937 (actor [3:57:2097]) tablet resolver refreshed! new actor is[3:82:2111] Leader for TabletID 72057594037927937 is [3:82:2111] sender: [3:168:2057] recipient: [3:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [4:55:2057] recipient: [4:50:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [4:55:2057] recipient: [4:50:2095] Leader for TabletID 72057594037927937 is [4:57:2097] sender: [4:58:2057] recipient: [4:50:2095] Leader for TabletID 72057594037927937 is [4:57:2097] sender: [4:75:2057] recipient: [4:14:2061] !Reboot 72057594037927937 (actor [4:57:2097]) on event NKikimr::TEvKeyValue::TEvAcquireLock ! Leader for TabletID 72057594037927937 is [4:57:2097] sender: [4:77:2057] recipient: [4:36:2083] Leader for TabletID 72057594037927937 is [4:57:2097] sender: [4:80:2057] recipient: [4:14:2061] Leader for TabletID 72057594037927937 is [4:57:2097] sender: [4:81:2057] recipient: [4:79:2110] Leader for TabletID 72057594037927937 is [4:82:2111] sender: [4:83:2057] recipient: [4:79:2110] !Reboot 72057594037927937 (actor [4:57:2097]) rebooted! !Reboot 72057594037927937 (actor [4:57:2097]) tablet resolver refreshed! new actor is[4:82:2111] Leader for TabletID 72057594037927937 is [4:82:2111] sender: [4:168:2057] recipient: [4:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [5:55:2057] recipient: [5:52:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [5:55:2057] recipient: [5:52:2095] Leader for TabletID 72057594037927937 is [5:57:2097] sender: [5:58:2057] recipient: [5:52:2095] Leader for TabletID 72057594037927937 is [5:57:2097] sender: [5:75:2057] recipient: [5:14:2061] !Reboot 72057594037927937 (actor [5:57:2097]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [5:57:2097] sender: [5:78:2057] recipient: [5:36:2083] Leader for TabletID 72057594037927937 is [5:57:2097] sender: [5:81:2057] recipient: [5:14:2061] Leader for TabletID 72057594037927937 is [5:57:2097] sender: [5:82:2057] recipient: [5:80:2110] Leader for TabletID 72057594037927937 is [5:83:2111] sender: [5:84:2057] recipient: [5:80:2110] !Reboot 72057594037927937 (actor [5:57:2097]) rebooted! !Reboot 72057594037927937 (actor [5:57:2097]) tablet resolver refreshed! new actor is[5:83:2111] Leader for TabletID 72057594037927937 is [5:83:2111] sender: [5:169:2057] recipient: [5:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [6:55:2057] recipient: [6:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [6:55:2057] recipient: [6:51:2095] Leader for TabletID 72057594037927937 is [6:57:2097] sender: [6:58:2057] recipient: [6:51:2095] Leader for TabletID 72057594037927937 is [6:57:2097] sender: [6:75:2057] recipient: [6:14:2061] !Reboot 72057594037927937 (actor [6:57:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [6:57:2097] sender: [6:81:2057] recipient: [6:36:2083] Leader for TabletID 72057594037927937 is [6:57:2097] sender: [6:84:2057] recipient: [6:14:2061] Leader for TabletID 72057594037927937 is [6:57:2097] sender: [6:85:2057] recipient: [6:83:2113] Leader for TabletID 72057594037927937 is [6:86:2114] sender: [6:87:2057] recipient: [6:83:2113] !Reboot 72057594037927937 (actor [6:57:2097]) rebooted! !Reboot 72057594037927937 (actor [6:57:2097]) tablet resolver refreshed! new actor is[6:86:2114] Leader for TabletID 72057594037927937 is [6:86:2114] sender: [6:172:2057] recipient: [6:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [7:55:2057] recipient: [7:52:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [7:55:2057] recipient: [7:52:2095] Leader for TabletID 72057594037927937 is [7:57:2097] sender: [7:58:2057] recipient: [7:52:2095] Leader for TabletID 72057594037927937 is [7:57:2097] sender: [7:75:2057] recipient: [7:14:2061] !Reboot 72057594037927937 (actor [7:57:2097]) on event NKikimr::TEvKeyValue::TEvExecuteTransaction ! Leader for TabletID 72057594037927937 is [7:57:2097] sender: [7:81:2057] recipient: [7:36:2083] Leader for TabletID 72057594037927937 is [7:57:2097] sender: [7:84:2057] recipient: [7:14:2061] Leader for TabletID 72057594037927937 is [7:57:2097] sender: [7:85:2057] recipient: [7:83:2113] Leader for TabletID 72057594037927937 is [7:86:2114] sender: [7:87:2057] recipient: [7:83:2113] !Reboot 72057594037927937 (actor [7:57:2097]) rebooted! !Reboot 72057594037927937 (actor [7:57:2097]) tablet resolver refreshed! new actor is[7:86:2114] Leader for TabletID 72057594037927937 is [7:86:2114] sender: [7:172:2057] recipient: [7:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [8:55:2057] recipient: [8:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [8:55:2057] recipient: [8:51:2095] Leader for TabletID 72057594037927937 is [8:57:2097] sender: [8:58:2057] recipient: [8:51:2095] Leader for TabletID 72057594037927937 is [8:57:2097] sender: [8:75:2057] recipient: [8:14:2061] !Reboot 72057594037927937 (actor [8:57:2097]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [8:57:2097] sender: [8:82:2057] recipient: [8:36:2083] Leader for TabletID 72057594037927937 is [8:57:2097] sender: [8:85:2057] recipient: [8:14:2061] Leader for TabletID 72057594037927937 is [8:57:2097] sender: [8:86:2057] recipient: [8:84:2113] Leader for TabletID 72057594037927937 is [8:87:2114] sender: [8:88:2057] recipient: [8:84:2113] !Reboot 72057594037927937 (actor [8:57:2097]) rebooted! !Reboot 72057594037927937 (actor [8:57:2097]) tablet resolver refreshed! new actor is[8:87:2114] Leader for TabletID 72057594037927937 is [8:87:2114] sender: [8:173:2057] recipient: [8:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [9:55:2057] recipient: [9:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [9:55:2057] recipient: [9:51:2095] Leader for TabletID 72057594037927937 is [9:57:2097] sender: [9:58:2057] recipient: [9:51:2095] Leader for TabletID 72057594037927937 is [9:57:2097] sender: [9:75:2057] recipient: [9:14:2061] |69.7%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/schemeshard/ut_cdc_stream_reboots/ydb-core-tx-schemeshard-ut_cdc_stream_reboots >> TResourceBroker::TestOverusage >> TTablesWithReboots::CopyWithRebootsAtCommit [GOOD] |69.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_cdc_stream_reboots/ydb-core-tx-schemeshard-ut_cdc_stream_reboots |69.7%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_cdc_stream_reboots/ydb-core-tx-schemeshard-ut_cdc_stream_reboots >> TTabletPipeTest::TestTwoNodes |69.7%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/datashard/ut_incremental_restore_scan/ydb-core-tx-datashard-ut_incremental_restore_scan |69.7%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_read_only_vdisk/ut_blobstorage-ut_read_only_vdisk |69.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_incremental_restore_scan/ydb-core-tx-datashard-ut_incremental_restore_scan |69.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_read_only_vdisk/ut_blobstorage-ut_read_only_vdisk |69.7%| [LD] {RESULT} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_read_only_vdisk/ut_blobstorage-ut_read_only_vdisk |69.7%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_incremental_restore_scan/ydb-core-tx-datashard-ut_incremental_restore_scan >> TResourceBroker::TestOverusage [GOOD] >> TResourceBroker::TestNotifyActorDied >> TTabletPipeTest::TestTwoNodesAndRebootOfProducer >> TResourceBroker::TestNotifyActorDied [GOOD] >> TTabletPipeTest::TestTwoNodes [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tablet/ut/unittest >> TResourceBroker::TestNotifyActorDied [GOOD] Test command err: 2025-05-29T15:27:43.608976Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:1115: TResourceBrokerActor bootstrap 2025-05-29T15:27:43.609079Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:393: Submitted new compaction0 task task-1 (1 by [1:100:2134]) priority=5 resources={50, 50} 2025-05-29T15:27:43.609085Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:679: Assigning waiting task task-1 (1 by [1:100:2134]) to queue queue_compaction0 2025-05-29T15:27:43.609090Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:637: Allocate resources {50, 50} for task task-1 (1 by [1:100:2134]) from queue queue_compaction0 2025-05-29T15:27:43.609095Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:679: Assigning in-fly task task-1 (1 by [1:100:2134]) to queue queue_compaction0 2025-05-29T15:27:43.609102Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:711: Updated planned resource usage for queue queue_compaction0 from 0.000000 to 100.000000 (insert task task-1 (1 by [1:100:2134])) 2025-05-29T15:27:43.609108Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:393: Submitted new compaction0 task task-2 (2 by [1:100:2134]) priority=5 resources={410, 410} 2025-05-29T15:27:43.609111Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:679: Assigning waiting task task-2 (2 by [1:100:2134]) to queue queue_compaction0 2025-05-29T15:27:43.609114Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:630: Skip queue queue_compaction0 due to exceeded limits 2025-05-29T15:27:43.609118Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:393: Submitted new compaction1 task task-3 (3 by [1:100:2134]) priority=5 resources={550, 550} 2025-05-29T15:27:43.609120Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:679: Assigning waiting task task-3 (3 by [1:100:2134]) to queue queue_compaction1 2025-05-29T15:27:43.609124Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:619: Not enough resources to start task task-3 (3 by [1:100:2134]) 2025-05-29T15:27:43.609126Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:608: Skip queue queue_compaction0 blocked by an earlier queue 2025-05-29T15:27:43.609136Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:528: Finish task task-1 (1 by [1:100:2134]) (release resources {50, 50}) 2025-05-29T15:27:43.609140Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:582: Updated real resource usage for queue queue_compaction0 from 0.000000 to 100.000000 2025-05-29T15:27:43.609143Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:637: Allocate resources {550, 550} for task task-3 (3 by [1:100:2134]) from queue queue_compaction1 2025-05-29T15:27:43.609146Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:679: Assigning in-fly task task-3 (3 by [1:100:2134]) to queue queue_compaction1 2025-05-29T15:27:43.609149Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:711: Updated planned resource usage for queue queue_compaction1 from 0.000000 to 1100.000000 (insert task task-3 (3 by [1:100:2134])) 2025-05-29T15:27:43.609152Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:619: Not enough resources to start task task-2 (2 by [1:100:2134]) 2025-05-29T15:27:43.609156Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:528: Finish task task-3 (3 by [1:100:2134]) (release resources {550, 550}) 2025-05-29T15:27:43.609160Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:577: Updated planned resource usage for queue queue_compaction1 from 1100.000000 to 550.000000 (remove task task-3 (3 by [1:100:2134])) 2025-05-29T15:27:43.609163Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:582: Updated real resource usage for queue queue_compaction1 from 0.000000 to 550.000000 2025-05-29T15:27:43.609166Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:637: Allocate resources {410, 410} for task task-2 (2 by [1:100:2134]) from queue queue_compaction0 2025-05-29T15:27:43.609169Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:679: Assigning in-fly task task-2 (2 by [1:100:2134]) to queue queue_compaction0 2025-05-29T15:27:43.609172Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:711: Updated planned resource usage for queue queue_compaction0 from 100.000000 to 920.000000 (insert task task-2 (2 by [1:100:2134])) 2025-05-29T15:27:43.860265Z node 2 :RESOURCE_BROKER DEBUG: resource_broker.cpp:1115: TResourceBrokerActor bootstrap 2025-05-29T15:27:43.860338Z node 2 :RESOURCE_BROKER DEBUG: resource_broker.cpp:393: Submitted new compaction0 task task-1 (1 by [2:100:2134]) priority=5 resources={500, 500} 2025-05-29T15:27:43.860346Z node 2 :RESOURCE_BROKER DEBUG: resource_broker.cpp:679: Assigning waiting task task-1 (1 by [2:100:2134]) to queue queue_compaction0 2025-05-29T15:27:43.860352Z node 2 :RESOURCE_BROKER DEBUG: resource_broker.cpp:637: Allocate resources {500, 500} for task task-1 (1 by [2:100:2134]) from queue queue_compaction0 2025-05-29T15:27:43.860355Z node 2 :RESOURCE_BROKER DEBUG: resource_broker.cpp:679: Assigning in-fly task task-1 (1 by [2:100:2134]) to queue queue_compaction0 2025-05-29T15:27:43.860364Z node 2 :RESOURCE_BROKER DEBUG: resource_broker.cpp:711: Updated planned resource usage for queue queue_compaction0 from 0.000000 to 1000.000000 (insert task task-1 (1 by [2:100:2134])) 2025-05-29T15:27:43.860370Z node 2 :RESOURCE_BROKER DEBUG: resource_broker.cpp:393: Submitted new compaction1 task task-2 (2 by [2:100:2134]) priority=5 resources={200, 200} 2025-05-29T15:27:43.860373Z node 2 :RESOURCE_BROKER DEBUG: resource_broker.cpp:679: Assigning waiting task task-2 (2 by [2:100:2134]) to queue queue_compaction1 2025-05-29T15:27:43.860377Z node 2 :RESOURCE_BROKER DEBUG: resource_broker.cpp:619: Not enough resources to start task task-2 (2 by [2:100:2134]) 2025-05-29T15:27:43.860381Z node 2 :RESOURCE_BROKER DEBUG: resource_broker.cpp:393: Submitted new compaction0 task task-3 (3 by [2:101:2135]) priority=5 resources={200, 200} 2025-05-29T15:27:43.860384Z node 2 :RESOURCE_BROKER DEBUG: resource_broker.cpp:679: Assigning waiting task task-3 (3 by [2:101:2135]) to queue queue_compaction0 2025-05-29T15:27:43.860387Z node 2 :RESOURCE_BROKER DEBUG: resource_broker.cpp:619: Not enough resources to start task task-2 (2 by [2:100:2134]) 2025-05-29T15:27:43.860389Z node 2 :RESOURCE_BROKER DEBUG: resource_broker.cpp:608: Skip queue queue_compaction0 blocked by an earlier queue 2025-05-29T15:27:43.860393Z node 2 :RESOURCE_BROKER DEBUG: resource_broker.cpp:393: Submitted new compaction1 task task-4 (4 by [2:101:2135]) priority=5 resources={200, 200} 2025-05-29T15:27:43.860395Z node 2 :RESOURCE_BROKER DEBUG: resource_broker.cpp:679: Assigning waiting task task-4 (4 by [2:101:2135]) to queue queue_compaction1 2025-05-29T15:27:43.860398Z node 2 :RESOURCE_BROKER DEBUG: resource_broker.cpp:619: Not enough resources to start task task-2 (2 by [2:100:2134]) 2025-05-29T15:27:43.860400Z node 2 :RESOURCE_BROKER DEBUG: resource_broker.cpp:608: Skip queue queue_compaction0 blocked by an earlier queue 2025-05-29T15:27:43.860410Z node 2 :RESOURCE_BROKER DEBUG: resource_broker.cpp:499: Removing task task-2 (2 by [2:100:2134]) 2025-05-29T15:27:43.860416Z node 2 :RESOURCE_BROKER DEBUG: resource_broker.cpp:528: Finish task task-1 (1 by [2:100:2134]) (release resources {500, 500}) 2025-05-29T15:27:43.860424Z node 2 :RESOURCE_BROKER DEBUG: resource_broker.cpp:577: Updated planned resource usage for queue queue_compaction0 from 1000.000000 to 100.000000 (remove task task-1 (1 by [2:100:2134])) 2025-05-29T15:27:43.860429Z node 2 :RESOURCE_BROKER DEBUG: resource_broker.cpp:582: Updated real resource usage for queue queue_compaction0 from 0.000000 to 100.000000 2025-05-29T15:27:43.860434Z node 2 :RESOURCE_BROKER DEBUG: resource_broker.cpp:637: Allocate resources {200, 200} for task task-4 (4 by [2:101:2135]) from queue queue_compaction1 2025-05-29T15:27:43.860439Z node 2 :RESOURCE_BROKER DEBUG: resource_broker.cpp:679: Assigning in-fly task task-4 (4 by [2:101:2135]) to queue queue_compaction1 2025-05-29T15:27:43.860444Z node 2 :RESOURCE_BROKER DEBUG: resource_broker.cpp:711: Updated planned resource usage for queue queue_compaction1 from 0.000000 to 400.000000 (insert task task-4 (4 by [2:101:2135])) 2025-05-29T15:27:43.860449Z node 2 :RESOURCE_BROKER DEBUG: resource_broker.cpp:637: Allocate resources {200, 200} for task task-3 (3 by [2:101:2135]) from queue queue_compaction0 2025-05-29T15:27:43.860457Z node 2 :RESOURCE_BROKER DEBUG: resource_broker.cpp:679: Assigning in-fly task task-3 (3 by [2:101:2135]) to queue queue_compaction0 2025-05-29T15:27:43.860462Z node 2 :RESOURCE_BROKER DEBUG: resource_broker.cpp:711: Updated planned resource usage for queue queue_compaction0 from 100.000000 to 500.000000 (insert task task-3 (3 by [2:101:2135])) >> TSchemeshardBackgroundCompactionTest::SchemeshardShouldNotCompactBackups [GOOD] >> TSchemeshardBackgroundCompactionTest::SchemeshardShouldNotCompactBorrowed >> TTabletPipeTest::TestSendAfterOpen >> TTabletPipeTest::TestTwoNodesAndRebootOfProducer [GOOD] |69.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tablet/ut/unittest >> TTabletPipeTest::TestTwoNodes [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_base_reboots/unittest >> TTablesWithReboots::CopyWithRebootsAtCommit [GOOD] Test command err: ==== RunWithTabletReboots =========== RUN: Trace =========== Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2140] Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:116:2058] recipient: [1:110:2141] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:116:2058] recipient: [1:110:2141] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:117:2058] recipient: [1:111:2142] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:117:2058] recipient: [1:111:2142] Leader for TabletID 72057594046678944 is [1:126:2151] sender: [1:127:2058] recipient: [1:109:2140] Leader for TabletID 72057594046447617 is [1:130:2154] sender: [1:132:2058] recipient: [1:110:2141] Leader for TabletID 72057594046316545 is [1:133:2155] sender: [1:135:2058] recipient: [1:111:2142] 2025-05-29T15:26:42.632635Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7488: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-05-29T15:26:42.632656Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7516: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:26:42.632661Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7402: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2025-05-29T15:26:42.632666Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7418: OperationsProcessing config: using default configuration 2025-05-29T15:26:42.632672Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-05-29T15:26:42.632676Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-05-29T15:26:42.632684Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7548: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:26:42.632697Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:39: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-05-29T15:26:42.632799Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7619: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-05-29T15:26:42.632857Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-05-29T15:26:42.647936Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7651: Got new config: QueryServiceConfig { AllExternalDataSourcesAreAvailable: true } 2025-05-29T15:26:42.647954Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:26:42.648045Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7619: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# Leader for TabletID 72057594046447617 is [1:130:2154] sender: [1:175:2058] recipient: [1:15:2062] 2025-05-29T15:26:42.650631Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-05-29T15:26:42.650657Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-05-29T15:26:42.650690Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-05-29T15:26:42.653836Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-05-29T15:26:42.653913Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:445: Clear TempDirsState with owners number: 0 2025-05-29T15:26:42.654036Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1348: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-05-29T15:26:42.654228Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:32: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-05-29T15:26:42.655048Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:157: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:26:42.655087Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:84: [RootDataErasureManager] Stop 2025-05-29T15:26:42.655342Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:26:42.655354Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:26:42.655390Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-05-29T15:26:42.655398Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-29T15:26:42.655405Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:202: TTxServerlessStorageBilling.Complete 2025-05-29T15:26:42.655425Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6671: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:214:2058] recipient: [1:212:2212] IGNORE Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:214:2058] recipient: [1:212:2212] Leader for TabletID 72057594037968897 is [1:218:2216] sender: [1:219:2058] recipient: [1:212:2212] 2025-05-29T15:26:42.656951Z node 1 :HIVE INFO: tablet_helpers.cpp:1130: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:126:2151] sender: [1:239:2058] recipient: [1:15:2062] 2025-05-29T15:26:42.675588Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:372: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-05-29T15:26:42.675642Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:26:42.675693Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-05-29T15:26:42.675735Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:130: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-05-29T15:26:42.675743Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:26:42.676297Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:451: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-05-29T15:26:42.676318Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-05-29T15:26:42.676361Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:26:42.676368Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:313: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-05-29T15:26:42.676372Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:367: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-05-29T15:26:42.676376Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 2 -> 3 2025-05-29T15:26:42.676678Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:26:42.676687Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-05-29T15:26:42.676691Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 3 -> 128 2025-05-29T15:26:42.677014Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:26:42.677023Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:26:42.677027Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:26:42.677033Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1650: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-05-29T15:26:42.677649Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1719: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-05-29T15:26:42.678057Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:652: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-05-29T15:26:42.678093Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1751: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 Leader for TabletID 72057594046316545 is [1:133:2155] sender: [1:254:2058] recipient: [1:15:2062] FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-05-29T15:26:42.678262Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:676: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-05-29T15:26:42.678287Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:680: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 133 RawX2: 4294969451 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-05-29T15:26:42.678294Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:26:42.678345Z node 1 :FLAT_TX_SCHEMESHARD INFO: sch ... bleBarrier operationId: 1003:0ProgressState, operation type TxCopyTable 2025-05-29T15:27:43.562262Z node 158 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1059: Set barrier, OperationId: 1003:0, name: CopyTableBarrier, done: 0, blocked: 1, parts count: 1 2025-05-29T15:27:43.562265Z node 158 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1103: All parts have reached barrier, tx: 1003, done: 0, blocked: 1 2025-05-29T15:27:43.562273Z node 158 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_copy_table.cpp:289: TCopyTable TCopyTableBarrier operationId: 1003:0 HandleReply TEvPrivate::TEvCompleteBarrier, msg: NKikimr::NSchemeShard::TEvPrivate::TEvCompleteBarrier { TxId: 1003 Name: CopyTableBarrier }, at tablet# 72057594046678944 2025-05-29T15:27:43.562276Z node 158 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1003:0 240 -> 240 2025-05-29T15:27:43.563038Z node 158 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1003:0, at schemeshard: 72057594046678944 2025-05-29T15:27:43.563054Z node 158 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:482: [72057594046678944] TDone opId# 1003:0 ProgressState 2025-05-29T15:27:43.563068Z node 158 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:906: Part operation is done id#1003:0 progress is 1/1 2025-05-29T15:27:43.563072Z node 158 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 1003 ready parts: 1/1 2025-05-29T15:27:43.563077Z node 158 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:906: Part operation is done id#1003:0 progress is 1/1 2025-05-29T15:27:43.563080Z node 158 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 1003 ready parts: 1/1 2025-05-29T15:27:43.563085Z node 158 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1606: TOperation IsReadyToNotify, TxId: 1003, ready parts: 1/1, is published: true 2025-05-29T15:27:43.563094Z node 158 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 1003 ready parts: 1/1 2025-05-29T15:27:43.563101Z node 158 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:973: Operation and all the parts is done, operation id: 1003:0 2025-05-29T15:27:43.563105Z node 158 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5174: RemoveTx for txid 1003:0 2025-05-29T15:27:43.563145Z node 158 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 4 2025-05-29T15:27:43.563151Z node 158 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove txstate source path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 4 TestWaitNotification wait txId: 1003 2025-05-29T15:27:43.563767Z node 158 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:210: tests -- TTxNotificationSubscriber for txId 1003: send EvNotifyTxCompletion 2025-05-29T15:27:43.563779Z node 158 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:256: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 1003 2025-05-29T15:27:43.563852Z node 158 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 1003, at schemeshard: 72057594046678944 2025-05-29T15:27:43.563870Z node 158 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:227: tests -- TTxNotificationSubscriber for txId 1003: got EvNotifyTxCompletionResult 2025-05-29T15:27:43.563875Z node 158 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:236: tests -- TTxNotificationSubscriber for txId 1003: satisfy waiter [158:659:2594] TestWaitNotification: OK eventTxId 1003 2025-05-29T15:27:43.563944Z node 158 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-05-29T15:27:43.563988Z node 158 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot" took 57us result status StatusSuccess 2025-05-29T15:27:43.564105Z node 158 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot" PathDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 9 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 9 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 7 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: "DirA" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1000 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" ChildrenExist: false } Children { Name: "NewTable" PathId: 4 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 1003 CreateStep: 5000004 ParentPathId: 1 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" ChildrenExist: false } Children { Name: "Table" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 1002 CreateStep: 5000003 ParentPathId: 1 PathState: EPathStateCopying Owner: "root@builtin" ACL: "" ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 3 PathsLimit: 10000 ShardsInside: 4 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/MyRoot" } } } PathId: 1 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-05-29T15:27:43.564161Z node 158 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/NewTable" Options { ReturnPartitioningInfo: true ReturnPartitionConfig: true BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-05-29T15:27:43.564201Z node 158 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/NewTable" took 41us result status StatusSuccess 2025-05-29T15:27:43.564382Z node 158 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/NewTable" PathDescription { Self { Name: "NewTable" PathId: 4 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 1003 CreateStep: 5000004 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "NewTable" Columns { Name: "key1" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "key2" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } Columns { Name: "key3" Type: "Uint64" TypeId: 4 Id: 3 NotNull: false IsBuildInProgress: false } Columns { Name: "Value" Type: "Utf8" TypeId: 4608 Id: 4 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key1" KeyColumnNames: "key2" KeyColumnNames: "key3" KeyColumnIds: 1 KeyColumnIds: 2 KeyColumnIds: 3 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { MinPartitionsCount: 2 } } TableSchemaVersion: 1 IsBackup: false IsRestore: false } TablePartitions { EndOfRangeKeyPrefix: "\003\000\004\000\000\000\377\377\377\177\000\000\000\200\000\000\000\200" IsPoint: false IsInclusive: false DatashardId: 72075186233409548 } TablePartitions { EndOfRangeKeyPrefix: "" IsPoint: false IsInclusive: false DatashardId: 72075186233409549 } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 2 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 3 PathsLimit: 10000 ShardsInside: 4 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 4 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> TTabletPipeTest::TestSendAfterOpen [GOOD] |69.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tablet/ut/unittest >> TTabletPipeTest::TestTwoNodesAndRebootOfProducer [GOOD] |69.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tablet/ut/unittest >> TTabletPipeTest::TestSendAfterOpen [GOOD] >> YdbTableSplit::SplitByLoadWithNonEmptyRangeReads [GOOD] >> THiveTest::TestCreateSubHiveCreateManyTabletsWithReboots [GOOD] >> THiveTest::TestCreateAndDeleteTabletWithStoragePoolsReboots >> BootstrapperTest::LoneBootstrapper >> TResourceBroker::TestResubmitTask >> TTabletCountersAggregator::IntegralPercentileAggregationHistNamedSingleBucket >> BootstrapperTest::LoneBootstrapper [GOOD] >> BootstrapperTest::MultipleBootstrappers >> TResourceBroker::TestResubmitTask [GOOD] >> TResourceBroker::TestUpdateCookie >> TTxLocatorTest::TestAllocateAllByPieces >> TTabletCountersAggregator::IntegralPercentileAggregationHistNamedSingleBucket [GOOD] >> TTabletCountersAggregator::IntegralPercentileAggregationRegular >> TSchemeshardBackgroundCompactionTest::SchemeshardShouldRequestCompactionsConfigRequest [GOOD] >> TSchemeshardBackgroundCompactionTest::SchemeshardShouldNotRequestCompactionsAfterDisable >> TResourceBroker::TestUpdateCookie [GOOD] >> TTxLocatorTest::TestAllocateAllByPieces [GOOD] >> TTabletCountersAggregator::IntegralPercentileAggregationRegular [GOOD] >> TKeyValueTest::TestInlineWriteReadWithRestartsWithNotCorrectUTF8NewApi [GOOD] >> TKeyValueTest::TestLargeWriteAndDelete |69.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/tiering/ut/unittest ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/tx_allocator/ut/unittest >> TTxLocatorTest::TestAllocateAllByPieces [GOOD] Test command err: 2025-05-29T15:27:45.727405Z node 1 :TABLET_MAIN DEBUG: tablet_sys.cpp:1925: Tablet: 72057594046447617 LockedInitializationPath Marker# TSYS32 2025-05-29T15:27:45.727485Z node 1 :TABLET_MAIN DEBUG: tablet_sys.cpp:911: Tablet: 72057594046447617 HandleFindLatestLogEntry, NODATA Promote Marker# TSYS19 2025-05-29T15:27:45.727578Z node 1 :TABLET_MAIN DEBUG: tablet_sys.cpp:225: Tablet: 72057594046447617 TTablet::WriteZeroEntry. logid# [72057594046447617:2:0:0:0:0:0] Marker# TSYS01 2025-05-29T15:27:45.727900Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:54: Put Result: TEvPutResult {Id# [72057594046447617:2:0:0:0:20:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-05-29T15:27:45.727971Z node 1 :TX_ALLOCATOR DEBUG: txallocator_impl.cpp:17: tablet# 72057594046447617 OnActivateExecutor 2025-05-29T15:27:45.729431Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:54: Put Result: TEvPutResult {Id# [72057594046447617:2:1:1:28672:35:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-05-29T15:27:45.729453Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:54: Put Result: TEvPutResult {Id# [72057594046447617:2:1:0:0:42:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-05-29T15:27:45.729467Z node 1 :TABLET_MAIN DEBUG: tablet_sys.cpp:1396: Tablet: 72057594046447617 GcCollect 0 channel, tablet:gen:step => 2:0 Marker# TSYS28 2025-05-29T15:27:45.729485Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:54: Put Result: TEvPutResult {Id# [72057594046447617:2:2:1:8192:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-05-29T15:27:45.729495Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:54: Put Result: TEvPutResult {Id# [72057594046447617:2:2:0:0:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-05-29T15:27:45.729510Z node 1 :TX_ALLOCATOR DEBUG: txallocator__scheme.cpp:22: tablet# 72057594046447617 TTxSchema Complete 2025-05-29T15:27:45.729528Z node 1 :TABLET_MAIN INFO: tablet_sys.cpp:1009: Tablet: 72057594046447617 Active! Generation: 2, Type: TxAllocator started in 0msec Marker# TSYS24 2025-05-29T15:27:45.729604Z node 1 :TX_ALLOCATOR DEBUG: txallocator_impl.cpp:60: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:71:2105] requested range size#8796093022207 2025-05-29T15:27:45.729660Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:54: Put Result: TEvPutResult {Id# [72057594046447617:2:3:1:24576:70:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-05-29T15:27:45.729665Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:54: Put Result: TEvPutResult {Id# [72057594046447617:2:3:0:0:69:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-05-29T15:27:45.729673Z node 1 :TX_ALLOCATOR DEBUG: txallocator__reserve.cpp:56: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 0 Reserved to# 8796093022207 2025-05-29T15:27:45.729677Z node 1 :TX_ALLOCATOR DEBUG: txallocator_impl.cpp:70: tablet# 72057594046447617 Send to Sender# [1:71:2105] TEvAllocateResult from# 0 to# 8796093022207 expected SUCCESS 2025-05-29T15:27:45.730150Z node 1 :TX_ALLOCATOR DEBUG: txallocator_impl.cpp:60: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:76:2109] requested range size#8796093022207 2025-05-29T15:27:45.730192Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:54: Put Result: TEvPutResult {Id# [72057594046447617:2:4:1:24576:76:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-05-29T15:27:45.730198Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:54: Put Result: TEvPutResult {Id# [72057594046447617:2:4:0:0:69:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-05-29T15:27:45.730206Z node 1 :TX_ALLOCATOR DEBUG: txallocator__reserve.cpp:56: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 8796093022207 Reserved to# 17592186044414 2025-05-29T15:27:45.730209Z node 1 :TX_ALLOCATOR DEBUG: txallocator_impl.cpp:70: tablet# 72057594046447617 Send to Sender# [1:76:2109] TEvAllocateResult from# 8796093022207 to# 17592186044414 expected SUCCESS 2025-05-29T15:27:45.730239Z node 1 :TX_ALLOCATOR DEBUG: txallocator_impl.cpp:60: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:80:2113] requested range size#8796093022207 2025-05-29T15:27:45.730260Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:54: Put Result: TEvPutResult {Id# [72057594046447617:2:5:1:24576:76:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-05-29T15:27:45.730265Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:54: Put Result: TEvPutResult {Id# [72057594046447617:2:5:0:0:69:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-05-29T15:27:45.730271Z node 1 :TX_ALLOCATOR DEBUG: txallocator__reserve.cpp:56: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 17592186044414 Reserved to# 26388279066621 2025-05-29T15:27:45.730274Z node 1 :TX_ALLOCATOR DEBUG: txallocator_impl.cpp:70: tablet# 72057594046447617 Send to Sender# [1:80:2113] TEvAllocateResult from# 17592186044414 to# 26388279066621 expected SUCCESS 2025-05-29T15:27:45.730299Z node 1 :TX_ALLOCATOR DEBUG: txallocator_impl.cpp:60: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:84:2117] requested range size#8796093022207 2025-05-29T15:27:45.730316Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:54: Put Result: TEvPutResult {Id# [72057594046447617:2:6:1:24576:76:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-05-29T15:27:45.730322Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:54: Put Result: TEvPutResult {Id# [72057594046447617:2:6:0:0:69:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-05-29T15:27:45.730328Z node 1 :TX_ALLOCATOR DEBUG: txallocator__reserve.cpp:56: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 26388279066621 Reserved to# 35184372088828 2025-05-29T15:27:45.730331Z node 1 :TX_ALLOCATOR DEBUG: txallocator_impl.cpp:70: tablet# 72057594046447617 Send to Sender# [1:84:2117] TEvAllocateResult from# 26388279066621 to# 35184372088828 expected SUCCESS 2025-05-29T15:27:45.730357Z node 1 :TX_ALLOCATOR DEBUG: txallocator_impl.cpp:60: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:88:2121] requested range size#8796093022207 2025-05-29T15:27:45.730374Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:54: Put Result: TEvPutResult {Id# [72057594046447617:2:7:1:24576:76:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-05-29T15:27:45.730380Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:54: Put Result: TEvPutResult {Id# [72057594046447617:2:7:0:0:69:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-05-29T15:27:45.730386Z node 1 :TX_ALLOCATOR DEBUG: txallocator__reserve.cpp:56: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 35184372088828 Reserved to# 43980465111035 2025-05-29T15:27:45.730388Z node 1 :TX_ALLOCATOR DEBUG: txallocator_impl.cpp:70: tablet# 72057594046447617 Send to Sender# [1:88:2121] TEvAllocateResult from# 35184372088828 to# 43980465111035 expected SUCCESS 2025-05-29T15:27:45.730414Z node 1 :TX_ALLOCATOR DEBUG: txallocator_impl.cpp:60: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:92:2125] requested range size#8796093022207 2025-05-29T15:27:45.730431Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:54: Put Result: TEvPutResult {Id# [72057594046447617:2:8:1:24576:74:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-05-29T15:27:45.730437Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:54: Put Result: TEvPutResult {Id# [72057594046447617:2:8:0:0:69:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-05-29T15:27:45.730442Z node 1 :TX_ALLOCATOR DEBUG: txallocator__reserve.cpp:56: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 43980465111035 Reserved to# 52776558133242 2025-05-29T15:27:45.730445Z node 1 :TX_ALLOCATOR DEBUG: txallocator_impl.cpp:70: tablet# 72057594046447617 Send to Sender# [1:92:2125] TEvAllocateResult from# 43980465111035 to# 52776558133242 expected SUCCESS 2025-05-29T15:27:45.730476Z node 1 :TX_ALLOCATOR DEBUG: txallocator_impl.cpp:60: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:96:2129] requested range size#8796093022207 2025-05-29T15:27:45.730494Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:54: Put Result: TEvPutResult {Id# [72057594046447617:2:9:1:24576:74:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-05-29T15:27:45.730499Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:54: Put Result: TEvPutResult {Id# [72057594046447617:2:9:0:0:69:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-05-29T15:27:45.730505Z node 1 :TX_ALLOCATOR DEBUG: txallocator__reserve.cpp:56: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 52776558133242 Reserved to# 61572651155449 2025-05-29T15:27:45.730508Z node 1 :TX_ALLOCATOR DEBUG: txallocator_impl.cpp:70: tablet# 72057594046447617 Send to Sender# [1:96:2129] TEvAllocateResult from# 52776558133242 to# 61572651155449 expected SUCCESS 2025-05-29T15:27:45.730536Z node 1 :TX_ALLOCATOR DEBUG: txallocator_impl.cpp:60: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:100:2133] requested range size#8796093022207 2025-05-29T15:27:45.730554Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:54: Put Result: TEvPutResult {Id# [72057594046447617:2:10:1:24576:76:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-05-29T15:27:45.730560Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:54: Put Result: TEvPutResult {Id# [72057594046447617:2:10:0:0:69:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-05-29T15:27:45.730566Z node 1 :TX_ALLOCATOR DEBUG: txallocator__reserve.cpp:56: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 61572651155449 Reserved to# 70368744177656 2025-05-29T15:27:45.730568Z node 1 :TX_ALLOCATOR DEBUG: txallocator_impl.cpp:70: tablet# 72057594046447617 Send to Sender# [1:100:2133] TEvAllocateResult from# 61572651155449 to# 70368744177656 expected SUCCESS 2025-05-29T15:27:45.730596Z node 1 :TX_ALLOCATOR DEBUG: txallocator_impl.cpp:60: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:104:2137] requested range size#8796093022207 2025-05-29T15:27:45.730623Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:54: Put Result: TEvPutResult {Id# [72057594046447617:2:11:1:24576:76:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-05-29T15:27:45.730628Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:54: Put Result: TEvPutResult {Id# [72057594046447617:2:11:0:0:69:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-05-29T15:27:45.730634Z node 1 :TX_ALLOCATOR DEBUG: txallocator__reserve.cpp:56: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 70368744177656 Reserved to# 79164837199863 2025-05-29T15:27:45.730637Z node 1 :TX_ALLOCATOR DEBUG: txallocator_impl.cpp:70: tablet# 72057594046447617 Send to Sender# [1:104:2137] TEvAllocateResult from# 70368744177656 to# 79164837199863 expected SUCCESS 2025-05-29T15:27:45.730671Z node 1 :TX_ALLOCATOR DEBUG: txallocator_impl.cpp:60: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:108:2141] requested range size#8796093022207 2025-05-29T15:27:45.730693Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:54: Put Result: TEvPutResult {Id# [72057594046447617:2:12:1:24576:76:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-05-29T15:27:45.730698Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:54: Put Result: TEvPutResult {Id# [72057594046447617:2:12:0:0:69:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-05-29T15:27:45.730704Z node 1 :TX_ALLOCATOR DEBUG: txallocator__reserve.cpp:56: tablet# 72057594046447617 TTxReserve Complete Suc ... node 1 :TX_ALLOCATOR DEBUG: txallocator_impl.cpp:70: tablet# 72057594046447617 Send to Sender# [1:156:2189] TEvAllocateResult from# 184717953466347 to# 193514046488554 expected SUCCESS 2025-05-29T15:27:45.731575Z node 1 :TX_ALLOCATOR DEBUG: txallocator_impl.cpp:60: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:160:2193] requested range size#8796093022207 2025-05-29T15:27:45.731595Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:54: Put Result: TEvPutResult {Id# [72057594046447617:2:25:1:24576:76:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-05-29T15:27:45.731602Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:54: Put Result: TEvPutResult {Id# [72057594046447617:2:25:0:0:69:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-05-29T15:27:45.731607Z node 1 :TX_ALLOCATOR DEBUG: txallocator__reserve.cpp:56: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 193514046488554 Reserved to# 202310139510761 2025-05-29T15:27:45.731609Z node 1 :TX_ALLOCATOR DEBUG: txallocator_impl.cpp:70: tablet# 72057594046447617 Send to Sender# [1:160:2193] TEvAllocateResult from# 193514046488554 to# 202310139510761 expected SUCCESS 2025-05-29T15:27:45.731646Z node 1 :TX_ALLOCATOR DEBUG: txallocator_impl.cpp:60: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:164:2197] requested range size#8796093022207 2025-05-29T15:27:45.731669Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:54: Put Result: TEvPutResult {Id# [72057594046447617:2:26:1:24576:76:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-05-29T15:27:45.731675Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:54: Put Result: TEvPutResult {Id# [72057594046447617:2:26:0:0:69:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-05-29T15:27:45.731680Z node 1 :TX_ALLOCATOR DEBUG: txallocator__reserve.cpp:56: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 202310139510761 Reserved to# 211106232532968 2025-05-29T15:27:45.731682Z node 1 :TX_ALLOCATOR DEBUG: txallocator_impl.cpp:70: tablet# 72057594046447617 Send to Sender# [1:164:2197] TEvAllocateResult from# 202310139510761 to# 211106232532968 expected SUCCESS 2025-05-29T15:27:45.731721Z node 1 :TX_ALLOCATOR DEBUG: txallocator_impl.cpp:60: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:168:2201] requested range size#8796093022207 2025-05-29T15:27:45.731738Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:54: Put Result: TEvPutResult {Id# [72057594046447617:2:27:1:24576:76:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-05-29T15:27:45.731743Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:54: Put Result: TEvPutResult {Id# [72057594046447617:2:27:0:0:69:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-05-29T15:27:45.731750Z node 1 :TX_ALLOCATOR DEBUG: txallocator__reserve.cpp:56: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 211106232532968 Reserved to# 219902325555175 2025-05-29T15:27:45.731753Z node 1 :TX_ALLOCATOR DEBUG: txallocator_impl.cpp:70: tablet# 72057594046447617 Send to Sender# [1:168:2201] TEvAllocateResult from# 211106232532968 to# 219902325555175 expected SUCCESS 2025-05-29T15:27:45.731791Z node 1 :TX_ALLOCATOR DEBUG: txallocator_impl.cpp:60: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:172:2205] requested range size#8796093022207 2025-05-29T15:27:45.731808Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:54: Put Result: TEvPutResult {Id# [72057594046447617:2:28:1:24576:75:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-05-29T15:27:45.731815Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:54: Put Result: TEvPutResult {Id# [72057594046447617:2:28:0:0:69:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-05-29T15:27:45.731821Z node 1 :TX_ALLOCATOR DEBUG: txallocator__reserve.cpp:56: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 219902325555175 Reserved to# 228698418577382 2025-05-29T15:27:45.731824Z node 1 :TX_ALLOCATOR DEBUG: txallocator_impl.cpp:70: tablet# 72057594046447617 Send to Sender# [1:172:2205] TEvAllocateResult from# 219902325555175 to# 228698418577382 expected SUCCESS 2025-05-29T15:27:45.731864Z node 1 :TX_ALLOCATOR DEBUG: txallocator_impl.cpp:60: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:176:2209] requested range size#8796093022207 2025-05-29T15:27:45.731884Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:54: Put Result: TEvPutResult {Id# [72057594046447617:2:29:1:24576:73:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-05-29T15:27:45.731889Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:54: Put Result: TEvPutResult {Id# [72057594046447617:2:29:0:0:69:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-05-29T15:27:45.731896Z node 1 :TX_ALLOCATOR DEBUG: txallocator__reserve.cpp:56: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 228698418577382 Reserved to# 237494511599589 2025-05-29T15:27:45.731898Z node 1 :TX_ALLOCATOR DEBUG: txallocator_impl.cpp:70: tablet# 72057594046447617 Send to Sender# [1:176:2209] TEvAllocateResult from# 228698418577382 to# 237494511599589 expected SUCCESS 2025-05-29T15:27:45.731944Z node 1 :TX_ALLOCATOR DEBUG: txallocator_impl.cpp:60: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:180:2213] requested range size#8796093022207 2025-05-29T15:27:45.731962Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:54: Put Result: TEvPutResult {Id# [72057594046447617:2:30:1:24576:76:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-05-29T15:27:45.731966Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:54: Put Result: TEvPutResult {Id# [72057594046447617:2:30:0:0:69:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-05-29T15:27:45.731971Z node 1 :TX_ALLOCATOR DEBUG: txallocator__reserve.cpp:56: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 237494511599589 Reserved to# 246290604621796 2025-05-29T15:27:45.731974Z node 1 :TX_ALLOCATOR DEBUG: txallocator_impl.cpp:70: tablet# 72057594046447617 Send to Sender# [1:180:2213] TEvAllocateResult from# 237494511599589 to# 246290604621796 expected SUCCESS 2025-05-29T15:27:45.732015Z node 1 :TX_ALLOCATOR DEBUG: txallocator_impl.cpp:60: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:184:2217] requested range size#8796093022207 2025-05-29T15:27:45.732033Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:54: Put Result: TEvPutResult {Id# [72057594046447617:2:31:1:24576:76:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-05-29T15:27:45.732038Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:54: Put Result: TEvPutResult {Id# [72057594046447617:2:31:0:0:69:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-05-29T15:27:45.732044Z node 1 :TX_ALLOCATOR DEBUG: txallocator__reserve.cpp:56: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 246290604621796 Reserved to# 255086697644003 2025-05-29T15:27:45.732046Z node 1 :TX_ALLOCATOR DEBUG: txallocator_impl.cpp:70: tablet# 72057594046447617 Send to Sender# [1:184:2217] TEvAllocateResult from# 246290604621796 to# 255086697644003 expected SUCCESS 2025-05-29T15:27:45.732090Z node 1 :TX_ALLOCATOR DEBUG: txallocator_impl.cpp:60: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:188:2221] requested range size#8796093022207 2025-05-29T15:27:45.732108Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:54: Put Result: TEvPutResult {Id# [72057594046447617:2:32:1:24576:75:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-05-29T15:27:45.732113Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:54: Put Result: TEvPutResult {Id# [72057594046447617:2:32:0:0:69:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-05-29T15:27:45.732119Z node 1 :TX_ALLOCATOR DEBUG: txallocator__reserve.cpp:56: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 255086697644003 Reserved to# 263882790666210 2025-05-29T15:27:45.732122Z node 1 :TX_ALLOCATOR DEBUG: txallocator_impl.cpp:70: tablet# 72057594046447617 Send to Sender# [1:188:2221] TEvAllocateResult from# 255086697644003 to# 263882790666210 expected SUCCESS 2025-05-29T15:27:45.732171Z node 1 :TX_ALLOCATOR DEBUG: txallocator_impl.cpp:60: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:192:2225] requested range size#8796093022207 2025-05-29T15:27:45.732191Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:54: Put Result: TEvPutResult {Id# [72057594046447617:2:33:1:24576:77:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-05-29T15:27:45.732196Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:54: Put Result: TEvPutResult {Id# [72057594046447617:2:33:0:0:69:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-05-29T15:27:45.732204Z node 1 :TX_ALLOCATOR DEBUG: txallocator__reserve.cpp:56: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 263882790666210 Reserved to# 272678883688417 2025-05-29T15:27:45.732207Z node 1 :TX_ALLOCATOR DEBUG: txallocator_impl.cpp:70: tablet# 72057594046447617 Send to Sender# [1:192:2225] TEvAllocateResult from# 263882790666210 to# 272678883688417 expected SUCCESS 2025-05-29T15:27:45.732251Z node 1 :TX_ALLOCATOR DEBUG: txallocator_impl.cpp:60: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:196:2229] requested range size#8796093022207 2025-05-29T15:27:45.732268Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:54: Put Result: TEvPutResult {Id# [72057594046447617:2:34:1:24576:76:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-05-29T15:27:45.732274Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:54: Put Result: TEvPutResult {Id# [72057594046447617:2:34:0:0:69:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-05-29T15:27:45.732280Z node 1 :TX_ALLOCATOR DEBUG: txallocator__reserve.cpp:56: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 272678883688417 Reserved to# 281474976710624 2025-05-29T15:27:45.732282Z node 1 :TX_ALLOCATOR DEBUG: txallocator_impl.cpp:70: tablet# 72057594046447617 Send to Sender# [1:196:2229] TEvAllocateResult from# 272678883688417 to# 281474976710624 expected SUCCESS 2025-05-29T15:27:45.732345Z node 1 :TX_ALLOCATOR DEBUG: txallocator_impl.cpp:60: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:200:2233] requested range size#31 2025-05-29T15:27:45.732365Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:54: Put Result: TEvPutResult {Id# [72057594046447617:2:35:1:24576:76:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-05-29T15:27:45.732370Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:54: Put Result: TEvPutResult {Id# [72057594046447617:2:35:0:0:69:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-05-29T15:27:45.732376Z node 1 :TX_ALLOCATOR DEBUG: txallocator__reserve.cpp:56: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 281474976710624 Reserved to# 281474976710655 2025-05-29T15:27:45.732378Z node 1 :TX_ALLOCATOR DEBUG: txallocator_impl.cpp:70: tablet# 72057594046447617 Send to Sender# [1:200:2233] TEvAllocateResult from# 281474976710624 to# 281474976710655 expected SUCCESS 2025-05-29T15:27:45.732423Z node 1 :TX_ALLOCATOR DEBUG: txallocator_impl.cpp:60: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:204:2237] requested range size#1 2025-05-29T15:27:45.732430Z node 1 :TX_ALLOCATOR DEBUG: txallocator__reserve.cpp:56: tablet# 72057594046447617 TTxReserve Complete Successed# 0 Reserved from# 281474976710655 Reserved to# 0 2025-05-29T15:27:45.732432Z node 1 :TX_ALLOCATOR ERROR: txallocator_impl.cpp:84: tablet# 72057594046447617 Send to Sender# [1:204:2237] TEvAllocateResult status# IMPOSIBLE expected IMPOSIBLE ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tablet/ut/unittest >> TResourceBroker::TestUpdateCookie [GOOD] Test command err: 2025-05-29T15:27:45.415583Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:1115: TResourceBrokerActor bootstrap 2025-05-29T15:27:45.415682Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:393: Submitted new compaction0 task task-1 (1 by [1:100:2134]) priority=5 resources={200, 200} 2025-05-29T15:27:45.415690Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:679: Assigning waiting task task-1 (1 by [1:100:2134]) to queue queue_compaction0 2025-05-29T15:27:45.415697Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:637: Allocate resources {200, 200} for task task-1 (1 by [1:100:2134]) from queue queue_compaction0 2025-05-29T15:27:45.415702Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:679: Assigning in-fly task task-1 (1 by [1:100:2134]) to queue queue_compaction0 2025-05-29T15:27:45.415713Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:711: Updated planned resource usage for queue queue_compaction0 from 0.000000 to 400.000000 (insert task task-1 (1 by [1:100:2134])) 2025-05-29T15:27:45.415720Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:393: Submitted new compaction0 task task-2 (2 by [1:100:2134]) priority=5 resources={200, 200} 2025-05-29T15:27:45.415722Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:679: Assigning waiting task task-2 (2 by [1:100:2134]) to queue queue_compaction0 2025-05-29T15:27:45.415725Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:637: Allocate resources {200, 200} for task task-2 (2 by [1:100:2134]) from queue queue_compaction0 2025-05-29T15:27:45.415728Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:679: Assigning in-fly task task-2 (2 by [1:100:2134]) to queue queue_compaction0 2025-05-29T15:27:45.415731Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:711: Updated planned resource usage for queue queue_compaction0 from 400.000000 to 800.000000 (insert task task-2 (2 by [1:100:2134])) 2025-05-29T15:27:45.415734Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:393: Submitted new compaction0 task task-3 (3 by [1:100:2134]) priority=5 resources={200, 200} 2025-05-29T15:27:45.415737Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:679: Assigning waiting task task-3 (3 by [1:100:2134]) to queue queue_compaction0 2025-05-29T15:27:45.415746Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:619: Not enough resources to start task task-3 (3 by [1:100:2134]) 2025-05-29T15:27:45.415756Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:441: Update task task-2 (2 by [1:100:2134]) (priority=5 type=compaction0 resources={400, 400} resubmit=1) 2025-05-29T15:27:45.415759Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:679: Assigning waiting task task-2 (2 by [1:100:2134]) to queue queue_compaction0 2025-05-29T15:27:45.415761Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:619: Not enough resources to start task task-2 (2 by [1:100:2134]) 2025-05-29T15:27:45.415766Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:528: Finish task task-1 (1 by [1:100:2134]) (release resources {200, 200}) 2025-05-29T15:27:45.415770Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:577: Updated planned resource usage for queue queue_compaction0 from 400.000000 to 40.000000 (remove task task-1 (1 by [1:100:2134])) 2025-05-29T15:27:45.415773Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:582: Updated real resource usage for queue queue_compaction0 from 0.000000 to 40.000000 2025-05-29T15:27:45.415776Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:637: Allocate resources {400, 400} for task task-2 (2 by [1:100:2134]) from queue queue_compaction0 2025-05-29T15:27:45.415778Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:679: Assigning in-fly task task-2 (2 by [1:100:2134]) to queue queue_compaction0 2025-05-29T15:27:45.415781Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:711: Updated planned resource usage for queue queue_compaction0 from 40.000000 to 804.000000 (insert task task-2 (2 by [1:100:2134])) 2025-05-29T15:27:45.415784Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:619: Not enough resources to start task task-3 (3 by [1:100:2134]) 2025-05-29T15:27:45.415789Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:441: Update task task-2 (2 by [1:100:2134]) (priority=5 type=compaction0 resources={200, 200} resubmit=1) 2025-05-29T15:27:45.415791Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:679: Assigning waiting task task-2 (2 by [1:100:2134]) to queue queue_compaction0 2025-05-29T15:27:45.415794Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:637: Allocate resources {200, 200} for task task-2 (2 by [1:100:2134]) from queue queue_compaction0 2025-05-29T15:27:45.415796Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:679: Assigning in-fly task task-2 (2 by [1:100:2134]) to queue queue_compaction0 2025-05-29T15:27:45.415799Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:711: Updated planned resource usage for queue queue_compaction0 from 40.000000 to 422.000000 (insert task task-2 (2 by [1:100:2134])) 2025-05-29T15:27:45.415801Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:637: Allocate resources {200, 200} for task task-3 (3 by [1:100:2134]) from queue queue_compaction0 2025-05-29T15:27:45.415803Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:679: Assigning in-fly task task-3 (3 by [1:100:2134]) to queue queue_compaction0 2025-05-29T15:27:45.415806Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:711: Updated planned resource usage for queue queue_compaction0 from 422.000000 to 804.000000 (insert task task-3 (3 by [1:100:2134])) 2025-05-29T15:27:45.657855Z node 2 :RESOURCE_BROKER DEBUG: resource_broker.cpp:1115: TResourceBrokerActor bootstrap 2025-05-29T15:27:45.657976Z node 2 :RESOURCE_BROKER DEBUG: resource_broker.cpp:393: Submitted new compaction0 task task-1 (1 by [2:100:2134]) priority=5 resources={400, 400} 2025-05-29T15:27:45.657987Z node 2 :RESOURCE_BROKER DEBUG: resource_broker.cpp:679: Assigning waiting task task-1 (1 by [2:100:2134]) to queue queue_compaction0 2025-05-29T15:27:45.657995Z node 2 :RESOURCE_BROKER DEBUG: resource_broker.cpp:637: Allocate resources {400, 400} for task task-1 (1 by [2:100:2134]) from queue queue_compaction0 2025-05-29T15:27:45.658000Z node 2 :RESOURCE_BROKER DEBUG: resource_broker.cpp:679: Assigning in-fly task task-1 (1 by [2:100:2134]) to queue queue_compaction0 2025-05-29T15:27:45.658012Z node 2 :RESOURCE_BROKER DEBUG: resource_broker.cpp:711: Updated planned resource usage for queue queue_compaction0 from 0.000000 to 800.000000 (insert task task-1 (1 by [2:100:2134])) 2025-05-29T15:27:45.658027Z node 2 :RESOURCE_BROKER DEBUG: resource_broker.cpp:393: Submitted new compaction0 task task-2 (2 by [2:100:2134]) priority=5 resources={200, 200} 2025-05-29T15:27:45.658031Z node 2 :RESOURCE_BROKER DEBUG: resource_broker.cpp:679: Assigning waiting task task-2 (2 by [2:100:2134]) to queue queue_compaction0 2025-05-29T15:27:45.658036Z node 2 :RESOURCE_BROKER DEBUG: resource_broker.cpp:619: Not enough resources to start task task-2 (2 by [2:100:2134]) 2025-05-29T15:27:45.658043Z node 2 :RESOURCE_BROKER DEBUG: resource_broker.cpp:472: Update cookie for task task-2 (2 by [2:100:2134]) 2025-05-29T15:27:45.658050Z node 2 :RESOURCE_BROKER DEBUG: resource_broker.cpp:528: Finish task task-1 (1 by [2:100:2134]) (release resources {400, 400}) 2025-05-29T15:27:45.658056Z node 2 :RESOURCE_BROKER DEBUG: resource_broker.cpp:577: Updated planned resource usage for queue queue_compaction0 from 800.000000 to 0.000000 (remove task task-1 (1 by [2:100:2134])) 2025-05-29T15:27:45.658061Z node 2 :RESOURCE_BROKER DEBUG: resource_broker.cpp:637: Allocate resources {200, 200} for task task-2 (2 by [2:100:2134]) from queue queue_compaction0 2025-05-29T15:27:45.658065Z node 2 :RESOURCE_BROKER DEBUG: resource_broker.cpp:679: Assigning in-fly task task-2 (2 by [2:100:2134]) to queue queue_compaction0 2025-05-29T15:27:45.658070Z node 2 :RESOURCE_BROKER DEBUG: resource_broker.cpp:711: Updated planned resource usage for queue queue_compaction0 from 0.000000 to 380.000000 (insert task task-2 (2 by [2:100:2134])) 2025-05-29T15:27:45.658079Z node 2 :RESOURCE_BROKER DEBUG: resource_broker.cpp:393: Submitted new compaction0 task task-3 (3 by [2:100:2134]) priority=5 resources={200, 200} 2025-05-29T15:27:45.658083Z node 2 :RESOURCE_BROKER DEBUG: resource_broker.cpp:679: Assigning waiting task task-3 (3 by [2:100:2134]) to queue queue_compaction0 2025-05-29T15:27:45.658088Z node 2 :RESOURCE_BROKER DEBUG: resource_broker.cpp:637: Allocate resources {200, 200} for task task-3 (3 by [2:100:2134]) from queue queue_compaction0 2025-05-29T15:27:45.658091Z node 2 :RESOURCE_BROKER DEBUG: resource_broker.cpp:679: Assigning in-fly task task-3 (3 by [2:100:2134]) to queue queue_compaction0 2025-05-29T15:27:45.658096Z node 2 :RESOURCE_BROKER DEBUG: resource_broker.cpp:711: Updated planned resource usage for queue queue_compaction0 from 380.000000 to 760.000000 (insert task task-3 (3 by [2:100:2134])) 2025-05-29T15:27:45.658102Z node 2 :RESOURCE_BROKER DEBUG: resource_broker.cpp:472: Update cookie for task task-2 (2 by [2:100:2134]) 2025-05-29T15:27:45.658107Z node 2 :RESOURCE_BROKER DEBUG: resource_broker.cpp:528: Finish task task-3 (3 by [2:100:2134]) (release resources {200, 200}) 2025-05-29T15:27:45.658112Z node 2 :RESOURCE_BROKER DEBUG: resource_broker.cpp:577: Updated planned resource usage for queue queue_compaction0 from 760.000000 to 380.000000 (remove task task-3 (3 by [2:100:2134])) 2025-05-29T15:27:45.658119Z node 2 :RESOURCE_BROKER DEBUG: resource_broker.cpp:441: Update task task-2 (2 by [2:100:2134]) (priority=5 type=compaction0 resources={400, 400} resubmit=1) 2025-05-29T15:27:45.658127Z node 2 :RESOURCE_BROKER DEBUG: resource_broker.cpp:679: Assigning waiting task task-2 (2 by [2:100:2134]) to queue queue_compaction0 2025-05-29T15:27:45.658131Z node 2 :RESOURCE_BROKER DEBUG: resource_broker.cpp:637: Allocate resources {400, 400} for task task-2 (2 by [2:100:2134]) from queue queue_compaction0 2025-05-29T15:27:45.658135Z node 2 :RESOURCE_BROKER DEBUG: resource_broker.cpp:679: Assigning in-fly task task-2 (2 by [2:100:2134]) to queue queue_compaction0 2025-05-29T15:27:45.658140Z node 2 :RESOURCE_BROKER DEBUG: resource_broker.cpp:711: Updated planned resource usage for queue queue_compaction0 from 0.000000 to 720.000000 (insert task task-2 (2 by [2:100:2134])) |69.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tablet/ut/unittest >> TTabletCountersAggregator::IntegralPercentileAggregationRegular [GOOD] >> THiveTest::TestCreateAndDeleteTabletWithStoragePoolsReboots [GOOD] >> THiveTest::TestCreateAndDeleteTabletWithStoragePools >> IcbAsActorTests::TestHttpGetResponse >> IcbAsActorTests::TestHttpGetResponse [GOOD] >> TTablesWithReboots::CopyTableAndDropWithReboots [GOOD] |69.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/control/ut/unittest >> IcbAsActorTests::TestHttpGetResponse [GOOD] >> THiveTest::TestCreateAndDeleteTabletWithStoragePools [GOOD] >> THiveTest::TestCreateAndReassignTabletWithStoragePools >> THiveTest::TestCreateAndReassignTabletWithStoragePools [GOOD] >> THiveTest::TestCreateAndReassignTabletWhileStarting |69.8%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/sys_view/query_stats/ut/ydb-core-sys_view-query_stats-ut |69.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/sys_view/query_stats/ut/ydb-core-sys_view-query_stats-ut |69.8%| [LD] {RESULT} $(B)/ydb/core/sys_view/query_stats/ut/ydb-core-sys_view-query_stats-ut |69.8%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/kqp/ut/data_integrity/ydb-core-kqp-ut-data_integrity |69.8%| [LD] {RESULT} $(B)/ydb/core/kqp/ut/data_integrity/ydb-core-kqp-ut-data_integrity |69.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/ut/data_integrity/ydb-core-kqp-ut-data_integrity ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_base_reboots/unittest >> TTablesWithReboots::CopyTableAndDropWithReboots [GOOD] Test command err: ==== RunWithTabletReboots =========== RUN: Trace =========== Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2140] Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:116:2058] recipient: [1:110:2141] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:116:2058] recipient: [1:110:2141] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:117:2058] recipient: [1:111:2142] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:117:2058] recipient: [1:111:2142] Leader for TabletID 72057594046678944 is [1:126:2151] sender: [1:127:2058] recipient: [1:109:2140] Leader for TabletID 72057594046447617 is [1:130:2154] sender: [1:132:2058] recipient: [1:110:2141] Leader for TabletID 72057594046316545 is [1:133:2155] sender: [1:135:2058] recipient: [1:111:2142] 2025-05-29T15:26:39.408074Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7488: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-05-29T15:26:39.408097Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7516: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:26:39.408102Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7402: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2025-05-29T15:26:39.408108Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7418: OperationsProcessing config: using default configuration 2025-05-29T15:26:39.408114Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-05-29T15:26:39.408118Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-05-29T15:26:39.408127Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7548: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:26:39.408140Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:39: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-05-29T15:26:39.408249Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7619: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-05-29T15:26:39.408323Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-05-29T15:26:39.423439Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7651: Got new config: QueryServiceConfig { AllExternalDataSourcesAreAvailable: true } 2025-05-29T15:26:39.423461Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:26:39.423563Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7619: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# Leader for TabletID 72057594046447617 is [1:130:2154] sender: [1:175:2058] recipient: [1:15:2062] 2025-05-29T15:26:39.426485Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-05-29T15:26:39.426515Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-05-29T15:26:39.426552Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-05-29T15:26:39.429982Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-05-29T15:26:39.430069Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:445: Clear TempDirsState with owners number: 0 2025-05-29T15:26:39.430198Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1348: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-05-29T15:26:39.430412Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:32: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-05-29T15:26:39.431724Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:157: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:26:39.431776Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:84: [RootDataErasureManager] Stop 2025-05-29T15:26:39.432033Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:26:39.432046Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:26:39.432083Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-05-29T15:26:39.432091Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-29T15:26:39.432098Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:202: TTxServerlessStorageBilling.Complete 2025-05-29T15:26:39.432121Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6671: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:214:2058] recipient: [1:212:2212] IGNORE Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:214:2058] recipient: [1:212:2212] Leader for TabletID 72057594037968897 is [1:218:2216] sender: [1:219:2058] recipient: [1:212:2212] 2025-05-29T15:26:39.434008Z node 1 :HIVE INFO: tablet_helpers.cpp:1130: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:126:2151] sender: [1:239:2058] recipient: [1:15:2062] 2025-05-29T15:26:39.454499Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:372: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-05-29T15:26:39.454553Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:26:39.454605Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-05-29T15:26:39.454652Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:130: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-05-29T15:26:39.454663Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:26:39.455240Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:451: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-05-29T15:26:39.455270Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-05-29T15:26:39.455313Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:26:39.455335Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:313: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-05-29T15:26:39.455341Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:367: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-05-29T15:26:39.455346Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 2 -> 3 2025-05-29T15:26:39.455821Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:26:39.455834Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-05-29T15:26:39.455840Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 3 -> 128 2025-05-29T15:26:39.456154Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:26:39.456163Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:26:39.456169Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:26:39.456175Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1650: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-05-29T15:26:39.456784Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1719: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-05-29T15:26:39.457164Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:652: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-05-29T15:26:39.457201Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1751: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 Leader for TabletID 72057594046316545 is [1:133:2155] sender: [1:254:2058] recipient: [1:15:2062] FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-05-29T15:26:39.457391Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:676: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-05-29T15:26:39.457416Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:680: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 133 RawX2: 4294969451 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-05-29T15:26:39.457423Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:26:39.457474Z node 1 :FLAT_TX_SCHEMESHARD INFO: sch ... peration.cpp:647: TTxOperationReply complete, operationId: 1005:0, at schemeshard: 72057594046678944 2025-05-29T15:27:47.060142Z node 227 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:647: TTxOperationReply complete, operationId: 1005:0, at schemeshard: 72057594046678944 2025-05-29T15:27:47.060309Z node 227 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:27:47.060315Z node 227 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1005, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-29T15:27:47.060339Z node 227 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1005, path id: [OwnerId: 72057594046678944, LocalPathId: 4] 2025-05-29T15:27:47.060355Z node 227 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:27:47.060358Z node 227 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [227:208:2209], at schemeshard: 72057594046678944, txId: 1005, path id: 1 2025-05-29T15:27:47.060362Z node 227 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [227:208:2209], at schemeshard: 72057594046678944, txId: 1005, path id: 4 2025-05-29T15:27:47.060420Z node 227 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1005:0, at schemeshard: 72057594046678944 2025-05-29T15:27:47.060425Z node 227 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:1036: NTableState::TProposedWaitParts operationId# 1005:0 ProgressState at tablet: 72057594046678944 2025-05-29T15:27:47.060434Z node 227 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:674: all shard schema changes has been received, operationId: 1005:0, at schemeshard: 72057594046678944 2025-05-29T15:27:47.060437Z node 227 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:686: send schema changes ack message, operation: 1005:0, datashard: 72075186233409547, at schemeshard: 72057594046678944 2025-05-29T15:27:47.060440Z node 227 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1005:0 129 -> 240 2025-05-29T15:27:47.060529Z node 227 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:5834: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 13 PathOwnerId: 72057594046678944, cookie: 1005 2025-05-29T15:27:47.060540Z node 227 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 13 PathOwnerId: 72057594046678944, cookie: 1005 2025-05-29T15:27:47.060545Z node 227 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72057594046678944, txId: 1005 2025-05-29T15:27:47.060549Z node 227 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 1005, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 13 2025-05-29T15:27:47.060553Z node 227 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2025-05-29T15:27:47.060692Z node 227 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:5834: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 1005 2025-05-29T15:27:47.060701Z node 227 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 1005 2025-05-29T15:27:47.060703Z node 227 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72057594046678944, txId: 1005 2025-05-29T15:27:47.060706Z node 227 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 1005, pathId: [OwnerId: 72057594046678944, LocalPathId: 4], version: 18446744073709551615 2025-05-29T15:27:47.060709Z node 227 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 4 2025-05-29T15:27:47.060716Z node 227 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1606: TOperation IsReadyToNotify, TxId: 1005, ready parts: 0/1, is published: true 2025-05-29T15:27:47.061391Z node 227 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1005:0, at schemeshard: 72057594046678944 2025-05-29T15:27:47.061414Z node 227 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_drop_table.cpp:414: TDropTable TProposedDeletePart operationId: 1005:0 ProgressState, at schemeshard: 72057594046678944 2025-05-29T15:27:47.061502Z node 227 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove table for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 3 2025-05-29T15:27:47.061536Z node 227 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:906: Part operation is done id#1005:0 progress is 1/1 2025-05-29T15:27:47.061542Z node 227 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 1005 ready parts: 1/1 2025-05-29T15:27:47.061547Z node 227 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:906: Part operation is done id#1005:0 progress is 1/1 2025-05-29T15:27:47.061551Z node 227 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 1005 ready parts: 1/1 2025-05-29T15:27:47.061555Z node 227 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1606: TOperation IsReadyToNotify, TxId: 1005, ready parts: 1/1, is published: true 2025-05-29T15:27:47.061560Z node 227 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 1005 ready parts: 1/1 2025-05-29T15:27:47.061569Z node 227 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:973: Operation and all the parts is done, operation id: 1005:0 2025-05-29T15:27:47.061574Z node 227 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5174: RemoveTx for txid 1005:0 2025-05-29T15:27:47.061600Z node 227 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 2 2025-05-29T15:27:47.062071Z node 227 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1005 2025-05-29T15:27:47.062118Z node 227 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1005 2025-05-29T15:27:47.062986Z node 227 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5549: Handle TEvStateChanged, at schemeshard: 72057594046678944, message: Source { RawX1: 439 RawX2: 974957578600 } TabletId: 72075186233409547 State: 4 2025-05-29T15:27:47.063005Z node 227 :FLAT_TX_SCHEMESHARD INFO: schemeshard__state_changed_reply.cpp:78: TTxShardStateChanged DoExecute, datashard informs about state changing, datashardId: 72075186233409547, state: Offline, at schemeshard: 72057594046678944 2025-05-29T15:27:47.063301Z node 227 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:46: Free shard 72057594046678944:2 hive 72057594037968897 at ss 72057594046678944 2025-05-29T15:27:47.063359Z node 227 :HIVE INFO: tablet_helpers.cpp:1356: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 2 TxId_Deprecated: 2 TabletID: 72075186233409547 2025-05-29T15:27:47.063791Z node 227 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5938: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 2 ShardOwnerId: 72057594046678944 ShardLocalIdx: 2, at schemeshard: 72057594046678944 2025-05-29T15:27:47.063850Z node 227 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 1 Forgetting tablet 72075186233409547 2025-05-29T15:27:47.064005Z node 227 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:123: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-05-29T15:27:47.064010Z node 227 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:137: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 4], at schemeshard: 72057594046678944 2025-05-29T15:27:47.064019Z node 227 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-05-29T15:27:47.064369Z node 227 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:174: Deleted shardIdx 72057594046678944:2 2025-05-29T15:27:47.064380Z node 227 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:180: Close pipe to deleted shardIdx 72057594046678944:2 tabletId 72075186233409547 2025-05-29T15:27:47.064604Z node 227 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:160: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestModificationResult got TxId: 1005, wait until txId: 1005 TestWaitNotification wait txId: 1005 2025-05-29T15:27:47.064646Z node 227 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:210: tests -- TTxNotificationSubscriber for txId 1005: send EvNotifyTxCompletion 2025-05-29T15:27:47.064651Z node 227 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:256: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 1005 2025-05-29T15:27:47.064691Z node 227 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 1005, at schemeshard: 72057594046678944 2025-05-29T15:27:47.064708Z node 227 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:227: tests -- TTxNotificationSubscriber for txId 1005: got EvNotifyTxCompletionResult 2025-05-29T15:27:47.064711Z node 227 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:236: tests -- TTxNotificationSubscriber for txId 1005: satisfy waiter [227:678:2638] TestWaitNotification: OK eventTxId 1005 wait until 72075186233409546 is deleted wait until 72075186233409547 is deleted wait until 72075186233409548 is deleted wait until 72075186233409549 is deleted 2025-05-29T15:27:47.064774Z node 227 :HIVE INFO: tablet_helpers.cpp:1476: [72057594037968897] TEvSubscribeToTabletDeletion, 72075186233409546 2025-05-29T15:27:47.064786Z node 227 :HIVE INFO: tablet_helpers.cpp:1476: [72057594037968897] TEvSubscribeToTabletDeletion, 72075186233409547 2025-05-29T15:27:47.064791Z node 227 :HIVE INFO: tablet_helpers.cpp:1476: [72057594037968897] TEvSubscribeToTabletDeletion, 72075186233409548 2025-05-29T15:27:47.064798Z node 227 :HIVE INFO: tablet_helpers.cpp:1476: [72057594037968897] TEvSubscribeToTabletDeletion, 72075186233409549 Deleted tabletId 72075186233409546 Deleted tabletId 72075186233409547 Deleted tabletId 72075186233409548 Deleted tabletId 72075186233409549 >> THiveTest::TestCreateAndReassignTabletWhileStarting [GOOD] >> THiveTest::TestCreateTabletBeforeLocal >> TTablesWithReboots::ParallelCreateDrop [GOOD] |69.9%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/statistics/database/ut/ydb-core-statistics-database-ut |69.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/statistics/database/ut/ydb-core-statistics-database-ut |69.9%| [LD] {RESULT} $(B)/ydb/core/statistics/database/ut/ydb-core-statistics-database-ut >> THiveTest::TestCreateTabletBeforeLocal [GOOD] >> THiveTest::TestCreateTabletAndReassignGroups >> TKesusTest::TestSessionTimeoutAfterDetach [GOOD] >> TKesusTest::TestSessionTimeoutAfterReboot ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/ydb/table_split_ut/unittest >> YdbTableSplit::SplitByLoadWithNonEmptyRangeReads [GOOD] Test command err: 2025-05-29T15:25:48.554932Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509889251023917801:2208];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:25:48.554981Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/0025cb/r3tmp/tmprADjoZ/pdisk_1.dat 2025-05-29T15:25:48.685505Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:25:48.685531Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:25:48.689925Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:25:48.693948Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 8153, node 1 2025-05-29T15:25:48.733673Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:25:48.733692Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-05-29T15:25:48.733694Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-05-29T15:25:48.733746Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:23584 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-29T15:25:48.797605Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:25:48.811083Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 TClient is connected to server localhost:23584 2025-05-29T15:25:49.178923Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509889255318885905:2334], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:25:49.178975Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:25:49.241223Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-05-29T15:25:49.323709Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509889255318886066:2347], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:25:49.323733Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:25:49.323878Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509889255318886071:2350], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:25:49.324709Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480 2025-05-29T15:25:49.330231Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7509889255318886073:2351], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2025-05-29T15:25:49.396148Z node 1 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [1:7509889255318886144:2768] txid# 281474976715660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-29T15:25:49.431546Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [1:7509889255318886155:2355], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:25:49.432356Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=1&id=NDU0MmNhMjEtN2RlNzMwY2QtMmRjYmU3MjQtYTQzMWMwNDM=, ActorId: [1:7509889255318886061:2345], ActorState: ExecuteState, TraceId: 01jweacjcbf8f2f6s6735q5wmc, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: INTERNAL_ERROR:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 BAD_SESSION:
: Error: Session not found: ydb://session/3?node_id=1&id=NDU0MmNhMjEtN2RlNzMwY2QtMmRjYmU3MjQtYTQzMWMwNDM= BAD_SESSION:
: Error: Session not found: ydb://session/3?node_id=1&id=NDU0MmNhMjEtN2RlNzMwY2QtMmRjYmU3MjQtYTQzMWMwNDM= BAD_SESSION:
: Error: Session not found: ydb://session/3?node_id=1&id=NDU0MmNhMjEtN2RlNzMwY2QtMmRjYmU3MjQtYTQzMWMwNDM= BAD_SESSION:
: Error: Session not found: ydb://session/3?node_id=1&id=NDU0MmNhMjEtN2RlNzMwY2QtMmRjYmU3MjQtYTQzMWMwNDM= BAD_SESSION:
: Error: Session not found: ydb://session/3?node_id=1&id=NDU0MmNhMjEtN2RlNzMwY2QtMmRjYmU3MjQtYTQzMWMwNDM= BAD_SESSION:
: Error: Session not found: ydb://session/3?node_id=1&id=NDU0MmNhMjEtN2RlNzMwY2QtMmRjYmU3MjQtYTQzMWMwNDM= BAD_SESSION:
: Error: Session not found: ydb://session/3?node_id=1&id=NDU0MmNhMjEtN2RlNzMwY2QtMmRjYmU3MjQtYTQzMWMwNDM= BAD_SESSION:
: Error: Session not found: ydb://session/3?node_id=1&id=NDU0MmNhMjEtN2RlNzMwY2QtMmRjYmU3MjQtYTQzMWMwNDM= BAD_SESSION:
: Error: Session not found: ydb://session/3?node_id=1&id=NDU0MmNhMjEtN2RlNzMwY2QtMmRjYmU3MjQtYTQzMWMwNDM= BAD_SESSION:
: Error: Session not found: ydb://session/3?node_id=1&id=NDU0MmNhMjEtN2RlNzMwY2QtMmRjYmU3MjQtYTQzMWMwNDM= BAD_SESSION:
: Error: Session not found: ydb://session/3?node_id=1&id=NDU0MmNhMjEtN2RlNzMwY2QtMmRjYmU3MjQtYTQzMWMwNDM= BAD_SESSION:
: Error: Session not found: ydb://session/3?node_id=1&id=NDU0MmNhMjEtN2RlNzMwY2QtMmRjYmU3MjQtYTQzMWMwNDM= BAD_SESSION:
: Error: Session not found: ydb://session/3?node_id=1&id=NDU0MmNhMjEtN2RlNzMwY2QtMmRjYmU3MjQtYTQzMWMwNDM= BAD_SESSION:
: Error: Session not found: ydb://session/3?node_id=1&id=NDU0MmNhMjEtN2RlNzMwY2QtMmRjYmU3MjQtYTQzMWMwNDM= BAD_SESSION:
: Error: Session not found: ydb://session/3?node_id=1&id=NDU0MmNhMjEtN2RlNzMwY2QtMmRjYmU3MjQtYTQzMWMwNDM= BAD_SESSION:
: Error: Session not found: ydb://session/3?node_id=1&id=NDU0MmNhMjEtN2RlNzMwY2QtMmRjYmU3MjQtYTQzMWMwNDM= BAD_SESSION:
: Error: Session not found: ydb://session/3?node_id=1&id=NDU0MmNhMjEtN2RlNzMwY2QtMmRjYmU3MjQtYTQzMWMwNDM= BAD_SESSION:
: Error: Session not found: ydb://session/3?node_id=1&id=NDU0MmNhMjEtN2RlNzMwY2QtMmRjYmU3MjQtYTQzMWMwNDM= BAD_SESSION:
: Error: Session not found: ydb://session/3?node_id=1&id=NDU0MmNhMjEtN2RlNzMwY2QtMmRjYmU3MjQtYTQzMWMwNDM= BAD_SESSION:
: Error: Session not found: ydb://session/3?node_id=1&id=NDU0MmNhMjEtN2RlNzMwY2QtMmRjYmU3MjQtYTQzMWMwNDM= BAD_SESSION:
: Error: Session not found: ydb://session/3?node_id=1&id=NDU0MmNhMjEtN2RlNzMwY2QtMmRjYmU3MjQtYTQzMWMwNDM= BAD_SESSION:
: Error: Session not found: ydb://session/3?node_id=1&id=NDU0MmNhMjEtN2RlNzMwY2QtMmRjYmU3MjQtYTQzMWMwNDM= BAD_SESSION:
: Error: Session not found: ydb://session/3?node_id=1&id=NDU0MmNhMjEtN2RlNzMwY2QtMmRjYmU3MjQtYTQzMWMwNDM= BAD_SESSION:
: Error: Session not found: ydb://session/3?node_id=1&id=NDU0MmNhMjEtN2RlNzMwY2QtMmRjYmU3MjQtYTQzMWMwNDM= BAD_SESSION:
: Error: Session not found: ydb://session/3?node_id=1&id=NDU0MmNhMjEtN2RlNzMwY2QtMmRjYmU3MjQtYTQzMWMwNDM= BAD_SESSION:
: Error: Session not found: ydb://session/3?node_id=1&id=NDU0MmNhMjEtN2RlNzMwY2QtMmRjYmU3MjQtYTQzMWMwNDM= BAD_SESSION:
: Error: Session not found: ydb://session/3?node_id=1&id=NDU0MmNhMjEtN2RlNzMwY2QtMmRjYmU3MjQtYTQzMWMwNDM= BAD_SESSION:
: Error: Session not found: ydb://session/3?node_id=1&id=NDU0MmNhMjEtN2RlNzMwY2QtMmRjYmU3MjQtYTQzMWMwNDM= BAD_SESSION:
: Error: Session not found: ydb://session/3?node_id=1&id=NDU0MmNhMjEtN2RlNzMwY2QtMmRjYmU3MjQtYTQzMWMwNDM= BAD_SESSION:
: Error: Session not found: ydb://session/3?node_id=1&id=NDU0MmNhMjEtN2RlNzMwY2QtMmRjYmU3MjQtYTQzMWMwNDM= BAD_SESSION:
: Error: Session not found: ydb://session/3?node_id=1&id=NDU0MmNhMjEtN2RlNzMwY2QtMmRjYmU3MjQtYTQzMWMwNDM= BAD_SESSION:
: Error: Session not found: ydb://session/3?node_id=1&id=NDU0MmNhMjEtN2RlNzMwY2QtMmRjYmU3MjQtYTQzMWMwNDM= BAD_SESSION:
: Error: Session not found: ydb://session/3?node_id=1&id=NDU0MmNhMjEtN2RlNzMwY2QtMmRjYmU3MjQtYTQzMWMwNDM= BAD_SESSION:
: Error: Session not found: ydb://session/3?node_id=1&id=NDU0MmNhMjEtN2RlNzMwY2QtMmRjYmU3MjQtYTQzMWMwNDM= BAD_SESSION:
: Error: Session not found: ydb://session/3?node_id=1&id=NDU0MmNhMjEtN2RlNzMwY2QtMmRjYmU3MjQtYTQzMWMwNDM= BAD_SESSION:
: Error: Session not found: ydb://session/3?node_id=1&id=NDU0MmNhMjEtN2RlNzMwY2QtMmRjYmU3MjQtYTQzMWMwNDM= BAD_SESSION:
: Error: Session not found: ydb://session/3?node_id=1&id=NDU0MmNhMjEtN2RlNzMwY2QtMmRjYmU3MjQtYTQzMWMwNDM= BAD_SESSION:
: Error: Session not found: ydb://session/3?node_id=1&id=NDU0MmNhMjEtN2RlNzMwY2QtMmRjYmU3MjQtYTQzMWMwNDM= BAD_SESSION:
: Error: Session not found: ydb://session/3?node_id=1&id=NDU0MmNhMjEtN2RlNzMwY2QtMmRjYmU3MjQtYTQzMWMwNDM= BAD_SESSION:
: Error: Session not found: ydb://session/3?node_id=1&id=NDU0MmNhMjEtN2RlNzMwY2QtMmRjYmU3MjQtYTQzMWMwNDM= BAD_SESSION: ... 9: TxId: 281474976901556. Ctx: { TraceId: 01jweafzcf2b7qkhka9rtqyq2s, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YzFjZjM3N2MtNzRlMWJlOTYtYzg4N2ZhNjUtNDg0OGMwZTM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-05-29T15:27:40.944696Z node 1 :KQP_EXECUTER ERROR: kqp_planner.cpp:119: TxId: 281474976901557. Ctx: { TraceId: 01jweafzcgadnb2gse4wpxehnv, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=OGVkYWQ0NTYtZjAzMjYzY2ItYTNhMzA1YTEtOWJhOWRiZTY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-05-29T15:27:40.945155Z node 1 :KQP_EXECUTER ERROR: kqp_planner.cpp:119: TxId: 281474976901558. Ctx: { TraceId: 01jweafzcg2ywqamfy9102ec2x, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NTZlZGJhNGItMzBjNTA4MTQtZDE2MDMxMjgtMTQ4YjhmZDQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-05-29T15:27:40.945364Z node 1 :KQP_EXECUTER ERROR: kqp_planner.cpp:119: TxId: 281474976901559. Ctx: { TraceId: 01jweafzch3pmmq5a8vytvshn1, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MzRiMjQ5Y2UtODUzNzVjZTYtNDZiZGUxYmYtZmE3ZjU3OGY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-05-29T15:27:40.945880Z node 1 :KQP_EXECUTER ERROR: kqp_planner.cpp:119: TxId: 281474976901560. Ctx: { TraceId: 01jweafzched1hxc34a3y7m1hq, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YzFjZjM3N2MtNzRlMWJlOTYtYzg4N2ZhNjUtNDg0OGMwZTM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-05-29T15:27:40.946237Z node 1 :KQP_EXECUTER ERROR: kqp_planner.cpp:119: TxId: 281474976901561. Ctx: { TraceId: 01jweafzchbg6b5sz86bze1wdb, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=OGVkYWQ0NTYtZjAzMjYzY2ItYTNhMzA1YTEtOWJhOWRiZTY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-05-29T15:27:40.946426Z node 1 :KQP_EXECUTER ERROR: kqp_planner.cpp:119: TxId: 281474976901562. Ctx: { TraceId: 01jweafzcjdrsk4dc57hgj7n87, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NTZlZGJhNGItMzBjNTA4MTQtZDE2MDMxMjgtMTQ4YjhmZDQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-05-29T15:27:40.947166Z node 1 :KQP_EXECUTER ERROR: kqp_planner.cpp:119: TxId: 281474976901563. Ctx: { TraceId: 01jweafzcj04v6gswqamc854t0, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MzRiMjQ5Y2UtODUzNzVjZTYtNDZiZGUxYmYtZmE3ZjU3OGY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-05-29T15:27:40.947323Z node 1 :KQP_EXECUTER ERROR: kqp_planner.cpp:119: TxId: 281474976901564. Ctx: { TraceId: 01jweafzcjb8wx3cr3gnyfaet1, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YzFjZjM3N2MtNzRlMWJlOTYtYzg4N2ZhNjUtNDg0OGMwZTM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-05-29T15:27:40.947874Z node 1 :KQP_EXECUTER ERROR: kqp_planner.cpp:119: TxId: 281474976901565. Ctx: { TraceId: 01jweafzck0jj6kc09xq1191py, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=OGVkYWQ0NTYtZjAzMjYzY2ItYTNhMzA1YTEtOWJhOWRiZTY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-05-29T15:27:40.947947Z node 1 :KQP_EXECUTER ERROR: kqp_planner.cpp:119: TxId: 281474976901566. Ctx: { TraceId: 01jweafzck4e4dvfk0459ymm4g, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NTZlZGJhNGItMzBjNTA4MTQtZDE2MDMxMjgtMTQ4YjhmZDQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-05-29T15:27:40.948609Z node 1 :KQP_EXECUTER ERROR: kqp_planner.cpp:119: TxId: 281474976901567. Ctx: { TraceId: 01jweafzcm8502c78kz7epctth, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MzRiMjQ5Y2UtODUzNzVjZTYtNDZiZGUxYmYtZmE3ZjU3OGY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-05-29T15:27:40.948637Z node 1 :KQP_EXECUTER ERROR: kqp_planner.cpp:119: TxId: 281474976901568. Ctx: { TraceId: 01jweafzcmfgg7pzf4y3z25g5g, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YzFjZjM3N2MtNzRlMWJlOTYtYzg4N2ZhNjUtNDg0OGMwZTM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root TClient::Ls request: /Root/Foo 2025-05-29T15:27:40.951033Z node 1 :KQP_EXECUTER ERROR: kqp_planner.cpp:119: TxId: 281474976901570. Ctx: { TraceId: 01jweafzcm226xhzj2zz11968p, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NTZlZGJhNGItMzBjNTA4MTQtZDE2MDMxMjgtMTQ4YjhmZDQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-05-29T15:27:40.951040Z node 1 :KQP_EXECUTER ERROR: kqp_planner.cpp:119: TxId: 281474976901569. Ctx: { TraceId: 01jweafzcmc94mpp4b2q9handn, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=OGVkYWQ0NTYtZjAzMjYzY2ItYTNhMzA1YTEtOWJhOWRiZTY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-05-29T15:27:40.951176Z node 1 :KQP_EXECUTER ERROR: kqp_planner.cpp:119: TxId: 281474976901572. Ctx: { TraceId: 01jweafzcn0sgga9789p7add5b, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YzFjZjM3N2MtNzRlMWJlOTYtYzg4N2ZhNjUtNDg0OGMwZTM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-05-29T15:27:40.951190Z node 1 :KQP_EXECUTER ERROR: kqp_planner.cpp:119: TxId: 281474976901571. Ctx: { TraceId: 01jweafzcnc1q3zpnx2w5baptr, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MzRiMjQ5Y2UtODUzNzVjZTYtNDZiZGUxYmYtZmE3ZjU3OGY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Foo" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976715658 CreateStep: 1748532349333 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 2 TablePartitionVersion: 2 } ChildrenExist: false } Table { Name: "Foo" Columns { Name: "NameHash" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "Name" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } Columns { Name: "Versio... (TRUNCATED) 2025-05-29T15:27:41.085432Z node 1 :KQP_COMPUTE WARN: kqp_read_actor.cpp:1077: TxId: 281474976897488, task: 1, CA Id [1:7509889727767477162:11293]. Got EvDeliveryProblem, TabletId: 72075186224037888, NotDelivered: 1 2025-05-29T15:27:41.116647Z node 1 :KQP_COMPUTE WARN: kqp_read_actor.cpp:1077: TxId: 281474976897492, task: 1, CA Id [1:7509889727767477216:10286]. Got EvDeliveryProblem, TabletId: 72075186224037888, NotDelivered: 1 2025-05-29T15:27:41.270224Z node 1 :KQP_COMPUTE WARN: kqp_read_actor.cpp:1077: TxId: 281474976897493, task: 1, CA Id [1:7509889727767477229:10154]. Got EvDeliveryProblem, TabletId: 72075186224037888, NotDelivered: 1 2025-05-29T15:27:41.270226Z node 1 :KQP_COMPUTE WARN: kqp_read_actor.cpp:1077: TxId: 281474976897490, task: 1, CA Id [1:7509889727767477228:8566]. Got EvDeliveryProblem, TabletId: 72075186224037888, NotDelivered: 1 2025-05-29T15:27:41.337847Z node 1 :KQP_COMPUTE WARN: kqp_read_actor.cpp:1077: TxId: 281474976897489, task: 1, CA Id [1:7509889727767477194:6675]. Got EvDeliveryProblem, TabletId: 72075186224037888, NotDelivered: 1 2025-05-29T15:27:41.494537Z node 1 :KQP_COMPUTE WARN: kqp_read_actor.cpp:1077: TxId: 281474976897491, task: 1, CA Id [1:7509889727767477218:11641]. Got EvDeliveryProblem, TabletId: 72075186224037888, NotDelivered: 1 2025-05-29T15:27:41.845191Z node 1 :KQP_COMPUTE WARN: kqp_read_actor.cpp:1077: TxId: 281474976897492, task: 1, CA Id [1:7509889727767477216:10286]. Got EvDeliveryProblem, TabletId: 72075186224037888, NotDelivered: 1 2025-05-29T15:27:41.892321Z node 1 :KQP_COMPUTE WARN: kqp_read_actor.cpp:1077: TxId: 281474976897493, task: 1, CA Id [1:7509889727767477229:10154]. Got EvDeliveryProblem, TabletId: 72075186224037888, NotDelivered: 1 2025-05-29T15:27:42.007058Z node 1 :KQP_COMPUTE WARN: kqp_read_actor.cpp:1077: TxId: 281474976897488, task: 1, CA Id [1:7509889727767477162:11293]. Got EvDeliveryProblem, TabletId: 72075186224037888, NotDelivered: 1 2025-05-29T15:27:42.079239Z node 1 :KQP_COMPUTE WARN: kqp_read_actor.cpp:1077: TxId: 281474976897489, task: 1, CA Id [1:7509889727767477194:6675]. Got EvDeliveryProblem, TabletId: 72075186224037888, NotDelivered: 1 2025-05-29T15:27:42.256380Z node 1 :KQP_COMPUTE WARN: kqp_read_actor.cpp:1077: TxId: 281474976897490, task: 1, CA Id [1:7509889727767477228:8566]. Got EvDeliveryProblem, TabletId: 72075186224037888, NotDelivered: 1 2025-05-29T15:27:42.301411Z node 1 :KQP_COMPUTE WARN: kqp_read_actor.cpp:1077: TxId: 281474976897491, task: 1, CA Id [1:7509889727767477218:11641]. Got EvDeliveryProblem, TabletId: 72075186224037888, NotDelivered: 1 2025-05-29T15:27:42.525703Z node 1 :KQP_COMPUTE WARN: kqp_read_actor.cpp:1077: TxId: 281474976897493, task: 1, CA Id [1:7509889727767477229:10154]. Got EvDeliveryProblem, TabletId: 72075186224037888, NotDelivered: 1 2025-05-29T15:27:42.638879Z node 1 :KQP_COMPUTE WARN: kqp_read_actor.cpp:1077: TxId: 281474976897488, task: 1, CA Id [1:7509889727767477162:11293]. Got EvDeliveryProblem, TabletId: 72075186224037888, NotDelivered: 1 2025-05-29T15:27:42.735645Z node 1 :KQP_COMPUTE WARN: kqp_read_actor.cpp:1077: TxId: 281474976897492, task: 1, CA Id [1:7509889727767477216:10286]. Got EvDeliveryProblem, TabletId: 72075186224037888, NotDelivered: 1 2025-05-29T15:27:42.942029Z node 1 :KQP_COMPUTE WARN: kqp_read_actor.cpp:1077: TxId: 281474976897489, task: 1, CA Id [1:7509889727767477194:6675]. Got EvDeliveryProblem, TabletId: 72075186224037888, NotDelivered: 1 2025-05-29T15:27:42.942088Z node 1 :KQP_COMPUTE WARN: kqp_read_actor.cpp:1077: TxId: 281474976897491, task: 1, CA Id [1:7509889727767477218:11641]. Got EvDeliveryProblem, TabletId: 72075186224037888, NotDelivered: 1 2025-05-29T15:27:43.180046Z node 1 :KQP_COMPUTE WARN: kqp_read_actor.cpp:1077: TxId: 281474976897490, task: 1, CA Id [1:7509889727767477228:8566]. Got EvDeliveryProblem, TabletId: 72075186224037888, NotDelivered: 1 TClient::Ls request: /Root/Foo TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Foo" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976715658 CreateStep: 1748532349333 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 2 TablePartitionVersion: 2 } ChildrenExist: false } Table { Name: "Foo" Columns { Name: "NameHash" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "Name" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } Columns { Name: "Versio... (TRUNCATED) Table has 2 shards >> TKesusTest::TestSessionTimeoutAfterUnregister [GOOD] >> TKesusTest::TestStopResourceAllocationWhenPipeDestroyed >> KqpSysColV0::SelectRowAsterisk >> THiveTest::TestCreateTabletAndReassignGroups [GOOD] >> THiveTest::TestCreateTabletAndReassignGroups3 >> TKesusTest::TestStopResourceAllocationWhenPipeDestroyed [GOOD] >> TKeyValueTest::TestWriteReadRangeDataLimitThenLimitWorks [GOOD] >> TKeyValueTest::TestWriteReadRangeDataLimitThenLimitWorksNewApi >> BootstrapperTest::MultipleBootstrappers [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_base_reboots/unittest >> TTablesWithReboots::ParallelCreateDrop [GOOD] Test command err: ==== RunWithTabletReboots =========== RUN: Trace =========== Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2140] Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:116:2058] recipient: [1:110:2141] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:116:2058] recipient: [1:110:2141] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:117:2058] recipient: [1:111:2142] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:117:2058] recipient: [1:111:2142] Leader for TabletID 72057594046678944 is [1:126:2151] sender: [1:127:2058] recipient: [1:109:2140] Leader for TabletID 72057594046447617 is [1:130:2154] sender: [1:132:2058] recipient: [1:110:2141] Leader for TabletID 72057594046316545 is [1:133:2155] sender: [1:135:2058] recipient: [1:111:2142] 2025-05-29T15:26:50.422513Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7488: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-05-29T15:26:50.422533Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7516: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:26:50.422537Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7402: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2025-05-29T15:26:50.422541Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7418: OperationsProcessing config: using default configuration 2025-05-29T15:26:50.422545Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-05-29T15:26:50.422548Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-05-29T15:26:50.422555Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7548: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:26:50.422564Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:39: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-05-29T15:26:50.422643Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7619: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-05-29T15:26:50.422695Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-05-29T15:26:50.435274Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7651: Got new config: QueryServiceConfig { AllExternalDataSourcesAreAvailable: true } 2025-05-29T15:26:50.435289Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:26:50.435360Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7619: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# Leader for TabletID 72057594046447617 is [1:130:2154] sender: [1:175:2058] recipient: [1:15:2062] 2025-05-29T15:26:50.437235Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-05-29T15:26:50.437255Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-05-29T15:26:50.437281Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-05-29T15:26:50.439293Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-05-29T15:26:50.439346Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:445: Clear TempDirsState with owners number: 0 2025-05-29T15:26:50.439444Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1348: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-05-29T15:26:50.439600Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:32: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-05-29T15:26:50.440152Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:157: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:26:50.440183Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:84: [RootDataErasureManager] Stop 2025-05-29T15:26:50.440386Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:26:50.440393Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:26:50.440413Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-05-29T15:26:50.440418Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-29T15:26:50.440422Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:202: TTxServerlessStorageBilling.Complete 2025-05-29T15:26:50.440435Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6671: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:214:2058] recipient: [1:212:2212] IGNORE Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:214:2058] recipient: [1:212:2212] Leader for TabletID 72057594037968897 is [1:218:2216] sender: [1:219:2058] recipient: [1:212:2212] 2025-05-29T15:26:50.441462Z node 1 :HIVE INFO: tablet_helpers.cpp:1130: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:126:2151] sender: [1:239:2058] recipient: [1:15:2062] 2025-05-29T15:26:50.459028Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:372: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-05-29T15:26:50.459080Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:26:50.459123Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-05-29T15:26:50.459173Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:130: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-05-29T15:26:50.459182Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:26:50.459784Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:451: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-05-29T15:26:50.459810Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-05-29T15:26:50.459858Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:26:50.459865Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:313: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-05-29T15:26:50.459869Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:367: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-05-29T15:26:50.459873Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 2 -> 3 2025-05-29T15:26:50.460344Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:26:50.460361Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-05-29T15:26:50.460367Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 3 -> 128 2025-05-29T15:26:50.460801Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:26:50.460816Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:26:50.460823Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:26:50.460831Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1650: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-05-29T15:26:50.461585Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1719: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-05-29T15:26:50.490457Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:652: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-05-29T15:26:50.490520Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1751: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 Leader for TabletID 72057594046316545 is [1:133:2155] sender: [1:254:2058] recipient: [1:15:2062] FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-05-29T15:26:50.490728Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:676: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-05-29T15:26:50.490795Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:680: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 133 RawX2: 4294969451 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-05-29T15:26:50.490805Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:26:50.490878Z node 1 :FLAT_TX_SCHEMESHARD INFO: sch ... 1004 2025-05-29T15:27:48.729517Z node 208 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72057594046678944, txId: 1004 2025-05-29T15:27:48.729522Z node 208 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 1004, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 9 2025-05-29T15:27:48.729527Z node 208 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2025-05-29T15:27:48.729758Z node 208 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:5834: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 1004 2025-05-29T15:27:48.729772Z node 208 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 1004 2025-05-29T15:27:48.729776Z node 208 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72057594046678944, txId: 1004 2025-05-29T15:27:48.729781Z node 208 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 1004, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 18446744073709551615 2025-05-29T15:27:48.729786Z node 208 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 5 2025-05-29T15:27:48.729797Z node 208 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1606: TOperation IsReadyToNotify, TxId: 1004, ready parts: 0/1, is published: true 2025-05-29T15:27:48.730429Z node 208 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1004:0, at schemeshard: 72057594046678944 2025-05-29T15:27:48.730444Z node 208 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_drop_table.cpp:414: TDropTable TProposedDeletePart operationId: 1004:0 ProgressState, at schemeshard: 72057594046678944 2025-05-29T15:27:48.730521Z node 208 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove table for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 4 2025-05-29T15:27:48.730550Z node 208 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:906: Part operation is done id#1004:0 progress is 1/1 2025-05-29T15:27:48.730553Z node 208 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 1004 ready parts: 1/1 2025-05-29T15:27:48.730557Z node 208 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:906: Part operation is done id#1004:0 progress is 1/1 2025-05-29T15:27:48.730559Z node 208 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 1004 ready parts: 1/1 2025-05-29T15:27:48.730562Z node 208 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1606: TOperation IsReadyToNotify, TxId: 1004, ready parts: 1/1, is published: true 2025-05-29T15:27:48.730565Z node 208 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 1004 ready parts: 1/1 2025-05-29T15:27:48.730571Z node 208 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:973: Operation and all the parts is done, operation id: 1004:0 2025-05-29T15:27:48.730575Z node 208 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5174: RemoveTx for txid 1004:0 2025-05-29T15:27:48.730595Z node 208 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 3 2025-05-29T15:27:48.730878Z node 208 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1004 2025-05-29T15:27:48.731245Z node 208 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1004 2025-05-29T15:27:48.732879Z node 208 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5549: Handle TEvStateChanged, at schemeshard: 72057594046678944, message: Source { RawX1: 356 RawX2: 893353199905 } TabletId: 72075186233409546 State: 4 2025-05-29T15:27:48.732901Z node 208 :FLAT_TX_SCHEMESHARD INFO: schemeshard__state_changed_reply.cpp:78: TTxShardStateChanged DoExecute, datashard informs about state changing, datashardId: 72075186233409546, state: Offline, at schemeshard: 72057594046678944 2025-05-29T15:27:48.732973Z node 208 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5549: Handle TEvStateChanged, at schemeshard: 72057594046678944, message: Source { RawX1: 357 RawX2: 893353199906 } TabletId: 72075186233409547 State: 4 2025-05-29T15:27:48.732980Z node 208 :FLAT_TX_SCHEMESHARD INFO: schemeshard__state_changed_reply.cpp:78: TTxShardStateChanged DoExecute, datashard informs about state changing, datashardId: 72075186233409547, state: Offline, at schemeshard: 72057594046678944 2025-05-29T15:27:48.733727Z node 208 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:46: Free shard 72057594046678944:1 hive 72057594037968897 at ss 72057594046678944 2025-05-29T15:27:48.733900Z node 208 :HIVE INFO: tablet_helpers.cpp:1356: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 1 TxId_Deprecated: 1 TabletID: 72075186233409546 Forgetting tablet 72075186233409546 2025-05-29T15:27:48.733963Z node 208 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:46: Free shard 72057594046678944:2 hive 72057594037968897 at ss 72057594046678944 2025-05-29T15:27:48.734489Z node 208 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5938: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 1 ShardOwnerId: 72057594046678944 ShardLocalIdx: 1, at schemeshard: 72057594046678944 2025-05-29T15:27:48.734557Z node 208 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 2 2025-05-29T15:27:48.734658Z node 208 :HIVE INFO: tablet_helpers.cpp:1356: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 2 TxId_Deprecated: 2 TabletID: 72075186233409547 2025-05-29T15:27:48.734729Z node 208 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5938: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 2 ShardOwnerId: 72057594046678944 ShardLocalIdx: 2, at schemeshard: 72057594046678944 2025-05-29T15:27:48.734775Z node 208 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 1 Forgetting tablet 72075186233409547 2025-05-29T15:27:48.735312Z node 208 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:123: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-05-29T15:27:48.735321Z node 208 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:137: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 3], at schemeshard: 72057594046678944 2025-05-29T15:27:48.735334Z node 208 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-05-29T15:27:48.735790Z node 208 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:174: Deleted shardIdx 72057594046678944:1 2025-05-29T15:27:48.735803Z node 208 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:180: Close pipe to deleted shardIdx 72057594046678944:1 tabletId 72075186233409546 2025-05-29T15:27:48.736095Z node 208 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:174: Deleted shardIdx 72057594046678944:2 2025-05-29T15:27:48.736105Z node 208 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:180: Close pipe to deleted shardIdx 72057594046678944:2 tabletId 72075186233409547 2025-05-29T15:27:48.736127Z node 208 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:160: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestModificationResult got TxId: 1004, wait until txId: 1004 TestWaitNotification wait txId: 1004 2025-05-29T15:27:48.736188Z node 208 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:210: tests -- TTxNotificationSubscriber for txId 1004: send EvNotifyTxCompletion 2025-05-29T15:27:48.736195Z node 208 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:256: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 1004 2025-05-29T15:27:48.736240Z node 208 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 1004, at schemeshard: 72057594046678944 2025-05-29T15:27:48.736257Z node 208 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:227: tests -- TTxNotificationSubscriber for txId 1004: got EvNotifyTxCompletionResult 2025-05-29T15:27:48.736263Z node 208 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:236: tests -- TTxNotificationSubscriber for txId 1004: satisfy waiter [208:556:2515] TestWaitNotification: OK eventTxId 1004 wait until 72075186233409546 is deleted wait until 72075186233409547 is deleted wait until 72075186233409548 is deleted wait until 72075186233409549 is deleted 2025-05-29T15:27:48.736321Z node 208 :HIVE INFO: tablet_helpers.cpp:1476: [72057594037968897] TEvSubscribeToTabletDeletion, 72075186233409546 2025-05-29T15:27:48.736332Z node 208 :HIVE INFO: tablet_helpers.cpp:1476: [72057594037968897] TEvSubscribeToTabletDeletion, 72075186233409547 2025-05-29T15:27:48.736342Z node 208 :HIVE INFO: tablet_helpers.cpp:1476: [72057594037968897] TEvSubscribeToTabletDeletion, 72075186233409548 2025-05-29T15:27:48.736349Z node 208 :HIVE INFO: tablet_helpers.cpp:1476: [72057594037968897] TEvSubscribeToTabletDeletion, 72075186233409549 Deleted tabletId 72075186233409546 Deleted tabletId 72075186233409547 Deleted tabletId 72075186233409548 Deleted tabletId 72075186233409549 2025-05-29T15:27:48.736426Z node 208 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/DropMe" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-05-29T15:27:48.736474Z node 208 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/DropMe" took 60us result status StatusPathDoesNotExist 2025-05-29T15:27:48.736515Z node 208 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/DropMe\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1]), source_location: ydb/core/tx/schemeshard/schemeshard_path_describer.cpp:1157" Path: "/MyRoot/DropMe" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: true } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tablet/ut/unittest >> BootstrapperTest::MultipleBootstrappers [GOOD] Test command err: ... waiting for pipe to connect ... stopping current instance ... waiting for pipe to disconnect ... waiting for pipe to connect ... sleeping for 2 seconds 2025-05-29T15:27:45.638096Z node 3 :BOOTSTRAPPER DEBUG: bootstrapper.cpp:193: tablet: 9437184, type: Dummy, begin new cycle (lookup in state storage) 2025-05-29T15:27:45.638126Z node 4 :BOOTSTRAPPER DEBUG: bootstrapper.cpp:193: tablet: 9437184, type: Dummy, begin new cycle (lookup in state storage) 2025-05-29T15:27:45.638143Z node 5 :BOOTSTRAPPER DEBUG: bootstrapper.cpp:193: tablet: 9437184, type: Dummy, begin new cycle (lookup in state storage) 2025-05-29T15:27:45.638280Z node 3 :BOOTSTRAPPER DEBUG: bootstrapper.cpp:215: tablet: 9437184, type: Dummy, lookup: NODATA, leader: [0:0:0] 2025-05-29T15:27:45.638286Z node 3 :BOOTSTRAPPER INFO: bootstrapper.cpp:330: tablet:9437184, type: Dummy, begin new round, seed: 8427358873417017059 2025-05-29T15:27:45.638314Z node 4 :BOOTSTRAPPER DEBUG: bootstrapper.cpp:215: tablet: 9437184, type: Dummy, lookup: NODATA, leader: [0:0:0] 2025-05-29T15:27:45.638316Z node 4 :BOOTSTRAPPER INFO: bootstrapper.cpp:330: tablet:9437184, type: Dummy, begin new round, seed: 314095936534775797 2025-05-29T15:27:45.638320Z node 5 :BOOTSTRAPPER DEBUG: bootstrapper.cpp:215: tablet: 9437184, type: Dummy, lookup: NODATA, leader: [0:0:0] 2025-05-29T15:27:45.638322Z node 5 :BOOTSTRAPPER INFO: bootstrapper.cpp:330: tablet:9437184, type: Dummy, begin new round, seed: 4772764162469967008 2025-05-29T15:27:45.638549Z node 5 :BOOTSTRAPPER DEBUG: bootstrapper.cpp:428: tablet: 9437184, type: Dummy, apply alien 4 state: FREE 2025-05-29T15:27:45.638580Z node 4 :BOOTSTRAPPER DEBUG: bootstrapper.cpp:428: tablet: 9437184, type: Dummy, apply alien 3 state: FREE 2025-05-29T15:27:45.638586Z node 5 :BOOTSTRAPPER DEBUG: bootstrapper.cpp:428: tablet: 9437184, type: Dummy, apply alien 3 state: FREE 2025-05-29T15:27:45.638590Z node 5 :BOOTSTRAPPER DEBUG: bootstrapper.cpp:499: tablet: 9437184, type: Dummy, lost round, wait for 0.106295s 2025-05-29T15:27:45.638607Z node 3 :BOOTSTRAPPER DEBUG: bootstrapper.cpp:428: tablet: 9437184, type: Dummy, apply alien 4 state: FREE 2025-05-29T15:27:45.638613Z node 4 :BOOTSTRAPPER DEBUG: bootstrapper.cpp:428: tablet: 9437184, type: Dummy, apply alien 5 state: FREE 2025-05-29T15:27:45.638617Z node 4 :BOOTSTRAPPER NOTICE: bootstrapper.cpp:680: tablet: 9437184, type: Dummy, boot 2025-05-29T15:27:45.638659Z node 3 :BOOTSTRAPPER DEBUG: bootstrapper.cpp:428: tablet: 9437184, type: Dummy, apply alien 5 state: FREE 2025-05-29T15:27:45.638664Z node 3 :BOOTSTRAPPER DEBUG: bootstrapper.cpp:499: tablet: 9437184, type: Dummy, lost round, wait for 0.101463s 2025-05-29T15:27:45.818481Z node 3 :BOOTSTRAPPER DEBUG: bootstrapper.cpp:193: tablet: 9437184, type: Dummy, begin new cycle (lookup in state storage) 2025-05-29T15:27:45.818700Z node 3 :BOOTSTRAPPER DEBUG: bootstrapper.cpp:215: tablet: 9437184, type: Dummy, lookup: OK, leader: [4:278:2096] 2025-05-29T15:27:45.818832Z node 3 :BOOTSTRAPPER DEBUG: bootstrapper.cpp:266: tablet: 9437184, type: Dummy, connect: OK 2025-05-29T15:27:45.818843Z node 3 :BOOTSTRAPPER INFO: bootstrapper.cpp:277: tablet: 9437184, type: Dummy, connected to leader, waiting 2025-05-29T15:27:45.829005Z node 5 :BOOTSTRAPPER DEBUG: bootstrapper.cpp:193: tablet: 9437184, type: Dummy, begin new cycle (lookup in state storage) 2025-05-29T15:27:45.829167Z node 5 :BOOTSTRAPPER DEBUG: bootstrapper.cpp:215: tablet: 9437184, type: Dummy, lookup: OK, leader: [4:278:2096] 2025-05-29T15:27:45.829247Z node 5 :BOOTSTRAPPER DEBUG: bootstrapper.cpp:266: tablet: 9437184, type: Dummy, connect: OK 2025-05-29T15:27:45.829251Z node 5 :BOOTSTRAPPER INFO: bootstrapper.cpp:277: tablet: 9437184, type: Dummy, connected to leader, waiting ... waiting for pipe to connect ... tablet initially started on node 4 (idx 2) in gen 2 ... disconnecting other nodes ... sleeping for 2 seconds (tablet expected to survive) 2025-05-29T15:27:46.730866Z node 4 :PIPE_SERVER ERROR: tablet_pipe_server.cpp:228: [9437184] NodeDisconnected NodeId# 3 2025-05-29T15:27:46.730893Z node 4 :PIPE_SERVER ERROR: tablet_pipe_server.cpp:228: [9437184] NodeDisconnected NodeId# 5 2025-05-29T15:27:46.731102Z node 3 :BOOTSTRAPPER DEBUG: bootstrapper.cpp:303: tablet: 9437184, type: Dummy, disconnected 2025-05-29T15:27:46.731112Z node 3 :BOOTSTRAPPER DEBUG: bootstrapper.cpp:193: tablet: 9437184, type: Dummy, begin new cycle (lookup in state storage) 2025-05-29T15:27:46.731143Z node 5 :BOOTSTRAPPER DEBUG: bootstrapper.cpp:303: tablet: 9437184, type: Dummy, disconnected 2025-05-29T15:27:46.731148Z node 5 :BOOTSTRAPPER DEBUG: bootstrapper.cpp:193: tablet: 9437184, type: Dummy, begin new cycle (lookup in state storage) 2025-05-29T15:27:46.731621Z node 3 :BOOTSTRAPPER DEBUG: bootstrapper.cpp:215: tablet: 9437184, type: Dummy, lookup: OK, leader: [4:278:2096] 2025-05-29T15:27:46.731709Z node 5 :BOOTSTRAPPER DEBUG: bootstrapper.cpp:215: tablet: 9437184, type: Dummy, lookup: OK, leader: [4:278:2096] 2025-05-29T15:27:46.732085Z node 5 :BOOTSTRAPPER DEBUG: bootstrapper.cpp:266: tablet: 9437184, type: Dummy, connect: OK 2025-05-29T15:27:46.732093Z node 5 :BOOTSTRAPPER INFO: bootstrapper.cpp:277: tablet: 9437184, type: Dummy, connected to leader, waiting 2025-05-29T15:27:46.732131Z node 3 :BOOTSTRAPPER DEBUG: bootstrapper.cpp:266: tablet: 9437184, type: Dummy, connect: OK 2025-05-29T15:27:46.732135Z node 3 :BOOTSTRAPPER INFO: bootstrapper.cpp:277: tablet: 9437184, type: Dummy, connected to leader, waiting ... disconnecting other nodes (new tablet connections fail) ... sleeping for 2 seconds (tablet expected to survive) 2025-05-29T15:27:47.490497Z node 4 :PIPE_SERVER ERROR: tablet_pipe_server.cpp:228: [9437184] NodeDisconnected NodeId# 3 2025-05-29T15:27:47.490520Z node 4 :PIPE_SERVER ERROR: tablet_pipe_server.cpp:228: [9437184] NodeDisconnected NodeId# 5 2025-05-29T15:27:47.490558Z node 5 :BOOTSTRAPPER DEBUG: bootstrapper.cpp:303: tablet: 9437184, type: Dummy, disconnected 2025-05-29T15:27:47.490565Z node 5 :BOOTSTRAPPER DEBUG: bootstrapper.cpp:193: tablet: 9437184, type: Dummy, begin new cycle (lookup in state storage) 2025-05-29T15:27:47.490576Z node 3 :BOOTSTRAPPER DEBUG: bootstrapper.cpp:303: tablet: 9437184, type: Dummy, disconnected 2025-05-29T15:27:47.490581Z node 3 :BOOTSTRAPPER DEBUG: bootstrapper.cpp:193: tablet: 9437184, type: Dummy, begin new cycle (lookup in state storage) 2025-05-29T15:27:47.490729Z node 5 :BOOTSTRAPPER DEBUG: bootstrapper.cpp:215: tablet: 9437184, type: Dummy, lookup: OK, leader: [4:278:2096] 2025-05-29T15:27:47.490781Z node 3 :BOOTSTRAPPER DEBUG: bootstrapper.cpp:215: tablet: 9437184, type: Dummy, lookup: OK, leader: [4:278:2096] ... disconnecting nodes 2 <-> 3 (tablet connect attempt) ... blocking NKikimr::TEvTabletPipe::TEvConnect from TABLET_PIPE_CLIENT to TABLET_ACTOR cookie 1 ... disconnecting nodes 2 <-> 1 (tablet connect attempt) ... blocking NKikimr::TEvTabletPipe::TEvConnect from TABLET_PIPE_CLIENT to TABLET_ACTOR cookie 1 2025-05-29T15:27:47.490869Z node 5 :BOOTSTRAPPER DEBUG: bootstrapper.cpp:266: tablet: 9437184, type: Dummy, connect: ERROR 2025-05-29T15:27:47.490875Z node 5 :BOOTSTRAPPER INFO: bootstrapper.cpp:330: tablet:9437184, type: Dummy, begin new round, seed: 10053858333920509680 2025-05-29T15:27:47.490916Z node 3 :BOOTSTRAPPER DEBUG: bootstrapper.cpp:266: tablet: 9437184, type: Dummy, connect: ERROR 2025-05-29T15:27:47.490919Z node 3 :BOOTSTRAPPER INFO: bootstrapper.cpp:330: tablet:9437184, type: Dummy, begin new round, seed: 11851482555838222794 2025-05-29T15:27:47.491006Z node 3 :BOOTSTRAPPER DEBUG: bootstrapper.cpp:428: tablet: 9437184, type: Dummy, apply alien 5 state: FREE 2025-05-29T15:27:47.491018Z node 5 :BOOTSTRAPPER DEBUG: bootstrapper.cpp:428: tablet: 9437184, type: Dummy, apply alien 3 state: FREE 2025-05-29T15:27:47.491024Z node 5 :BOOTSTRAPPER DEBUG: bootstrapper.cpp:428: tablet: 9437184, type: Dummy, apply alien 4 state: OWNER 2025-05-29T15:27:47.491028Z node 5 :BOOTSTRAPPER INFO: bootstrapper.cpp:571: tablet: 9437184, type: Dummy, become watch on node 4 (owner) 2025-05-29T15:27:47.491041Z node 3 :BOOTSTRAPPER DEBUG: bootstrapper.cpp:428: tablet: 9437184, type: Dummy, apply alien 4 state: OWNER 2025-05-29T15:27:47.491044Z node 3 :BOOTSTRAPPER INFO: bootstrapper.cpp:571: tablet: 9437184, type: Dummy, become watch on node 4 (owner) ... disconnect other nodes (new owner expected) ... sleeping for 2 seconds (new tablet expected to start once) 2025-05-29T15:27:48.223274Z node 5 :BOOTSTRAPPER DEBUG: bootstrapper.cpp:643: tablet: 9437184, type: Dummy, disconnected from 4, round 16045690984833335029 2025-05-29T15:27:48.223297Z node 5 :BOOTSTRAPPER DEBUG: bootstrapper.cpp:193: tablet: 9437184, type: Dummy, begin new cycle (lookup in state storage) 2025-05-29T15:27:48.223313Z node 3 :BOOTSTRAPPER DEBUG: bootstrapper.cpp:643: tablet: 9437184, type: Dummy, disconnected from 4, round 16045690984833335029 2025-05-29T15:27:48.223320Z node 3 :BOOTSTRAPPER DEBUG: bootstrapper.cpp:193: tablet: 9437184, type: Dummy, begin new cycle (lookup in state storage) 2025-05-29T15:27:48.223481Z node 5 :BOOTSTRAPPER DEBUG: bootstrapper.cpp:215: tablet: 9437184, type: Dummy, lookup: OK, leader: [4:278:2096] 2025-05-29T15:27:48.223515Z node 3 :BOOTSTRAPPER DEBUG: bootstrapper.cpp:215: tablet: 9437184, type: Dummy, lookup: OK, leader: [4:278:2096] ... disconnecting nodes 2 <-> 3 (tablet connect attempt) ... blocking NKikimr::TEvTabletPipe::TEvConnect from TABLET_PIPE_CLIENT to TABLET_ACTOR cookie 1 ... disconnecting nodes 2 <-> 1 (tablet connect attempt) ... blocking NKikimr::TEvTabletPipe::TEvConnect from TABLET_PIPE_CLIENT to TABLET_ACTOR cookie 1 2025-05-29T15:27:48.223634Z node 5 :BOOTSTRAPPER DEBUG: bootstrapper.cpp:266: tablet: 9437184, type: Dummy, connect: ERROR 2025-05-29T15:27:48.223640Z node 5 :BOOTSTRAPPER INFO: bootstrapper.cpp:330: tablet:9437184, type: Dummy, begin new round, seed: 8470239763125230813 2025-05-29T15:27:48.223660Z node 3 :BOOTSTRAPPER DEBUG: bootstrapper.cpp:266: tablet: 9437184, type: Dummy, connect: ERROR 2025-05-29T15:27:48.223664Z node 3 :BOOTSTRAPPER INFO: bootstrapper.cpp:330: tablet:9437184, type: Dummy, begin new round, seed: 6622044195218853944 ... disconnecting nodes 2 <-> 3 (bootstrap watch attempt) ... blocking NKikimr::TEvBootstrapper::TEvWatch from TABLET_BOOTSTRAPPER to TABLET_BOOTSTRAPPER cookie 16045690984833335031 ... disconnecting nodes 2 <-> 1 (bootstrap watch attempt) ... blocking NKikimr::TEvBootstrapper::TEvWatch from TABLET_BOOTSTRAPPER to TABLET_BOOTSTRAPPER cookie 16045690984833335031 2025-05-29T15:27:48.223748Z node 5 :BOOTSTRAPPER DEBUG: bootstrapper.cpp:403: tablet: 9437184, type: Dummy, disconnected from 4, round 16045690984833335031 2025-05-29T15:27:48.223754Z node 5 :BOOTSTRAPPER DEBUG: bootstrapper.cpp:428: tablet: 9437184, type: Dummy, apply alien 4 state: DISCONNECTED 2025-05-29T15:27:48.223792Z node 3 :BOOTSTRAPPER DEBUG: bootstrapper.cpp:403: tablet: 9437184, type: Dummy, disconnected from 4, round 16045690984833335031 2025-05-29T15:27:48.223796Z node 3 :BOOTSTRAPPER DEBUG: bootstrapper.cpp:428: tablet: 9437184, type: Dummy, apply alien 4 state: DISCONNECTED 2025-05-29T15:27:48.223807Z node 5 :BOOTSTRAPPER DEBUG: bootstrapper.cpp:428: tablet: 9437184, type: Dummy, apply alien 3 state: FREE 2025-05-29T15:27:48.223811Z node 5 :BOOTSTRAPPER DEBUG: bootstrapper.cpp:499: tablet: 9437184, type: Dummy, lost round, wait for 0.127990s 2025-05-29T15:27:48.223818Z node 3 :BOOTSTRAPPER DEBUG: bootstrapper.cpp:428: tablet: 9437184, type: Dummy, apply alien 5 state: FREE 2025-05-29T15:27:48.223822Z node 3 :BOOTSTRAPPER NOTICE: bootstrapper.cpp:680: tablet: 9437184, type: Dummy, boot 2025-05-29T15:27:48.224143Z node 4 :BOOTSTRAPPER INFO: bootstrapper.cpp:715: tablet: 9437184, type: Dummy, tablet dead 2025-05-29T15:27:48.224154Z node 4 :BOOTSTRAPPER DEBUG: bootstrapper.cpp:193: tablet: 9437184, type: Dummy, begin new cycle (lookup in state storage) 2025-05-29T15:27:48.230576Z node 4 :BOOTSTRAPPER DEBUG: bootstrapper.cpp:215: tablet: 9437184, type: Dummy, lookup: OK, leader: [3:429:2096] 2025-05-29T15:27:48.237249Z node 4 :BOOTSTRAPPER DEBUG: bootstrapper.cpp:266: tablet: 9437184, type: Dummy, connect: OK 2025-05-29T15:27:48.237271Z node 4 :BOOTSTRAPPER INFO: bootstrapper.cpp:277: tablet: 9437184, type: Dummy, connected to leader, waiting 2025-05-29T15:27:48.314899Z node 5 :BOOTSTRAPPER DEBUG: bootstrapper.cpp:193: tablet: 9437184, type: Dummy, begin new cycle (lookup in state storage) 2025-05-29T15:27:48.315094Z node 5 :BOOTSTRAPPER DEBUG: bootstrapper.cpp:215: tablet: 9437184, type: Dummy, lookup: OK, leader: [3:429:2096] 2025-05-29T15:27:48.315209Z node 5 :BOOTSTRAPPER DEBUG: bootstrapper.cpp:266: tablet: 9437184, type: Dummy, connect: OK 2025-05-29T15:27:48.315215Z node 5 :BOOTSTRAPPER INFO: bootstrapper.cpp:277: tablet: 9437184, type: Dummy, connected to leader, waiting ... waiting for pipe to connect ... disconnecting nodes 2 <-> 0 (tablet connect attempt) ... blocking NKikimr::TEvTabletPipe::TEvConnect from TABLET_PIPE_CLIENT to cookie 1 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kesus/tablet/ut/unittest >> TKesusTest::TestStopResourceAllocationWhenPipeDestroyed [GOOD] Test command err: 2025-05-29T15:27:26.391044Z node 1 :KESUS_TABLET INFO: tablet_impl.cpp:71: OnActivateExecutor: 72057594037927937 2025-05-29T15:27:26.391080Z node 1 :KESUS_TABLET DEBUG: tx_init_schema.cpp:14: [72057594037927937] TTxInitSchema::Execute 2025-05-29T15:27:26.397138Z node 1 :KESUS_TABLET DEBUG: tx_init_schema.cpp:21: [72057594037927937] TTxInitSchema::Complete 2025-05-29T15:27:26.397187Z node 1 :KESUS_TABLET DEBUG: tx_init.cpp:30: [72057594037927937] TTxInit::Execute 2025-05-29T15:27:26.410250Z node 1 :KESUS_TABLET DEBUG: tx_init.cpp:242: [72057594037927937] TTxInit::Complete 2025-05-29T15:27:26.630462Z node 2 :KESUS_TABLET INFO: tablet_impl.cpp:71: OnActivateExecutor: 72057594037927937 2025-05-29T15:27:26.630493Z node 2 :KESUS_TABLET DEBUG: tx_init_schema.cpp:14: [72057594037927937] TTxInitSchema::Execute 2025-05-29T15:27:26.633462Z node 2 :KESUS_TABLET DEBUG: tx_init_schema.cpp:21: [72057594037927937] TTxInitSchema::Complete 2025-05-29T15:27:26.633491Z node 2 :KESUS_TABLET DEBUG: tx_init.cpp:30: [72057594037927937] TTxInit::Execute 2025-05-29T15:27:26.654750Z node 2 :KESUS_TABLET DEBUG: tx_init.cpp:242: [72057594037927937] TTxInit::Complete 2025-05-29T15:27:26.869855Z node 3 :KESUS_TABLET INFO: tablet_impl.cpp:71: OnActivateExecutor: 72057594037927937 2025-05-29T15:27:26.869899Z node 3 :KESUS_TABLET DEBUG: tx_init_schema.cpp:14: [72057594037927937] TTxInitSchema::Execute 2025-05-29T15:27:26.873719Z node 3 :KESUS_TABLET DEBUG: tx_init_schema.cpp:21: [72057594037927937] TTxInitSchema::Complete 2025-05-29T15:27:26.873896Z node 3 :KESUS_TABLET DEBUG: tx_init.cpp:30: [72057594037927937] TTxInit::Execute 2025-05-29T15:27:26.895502Z node 3 :KESUS_TABLET DEBUG: tx_init.cpp:242: [72057594037927937] TTxInit::Complete 2025-05-29T15:27:26.895656Z node 3 :KESUS_TABLET DEBUG: tx_session_attach.cpp:35: [72057594037927937] TTxSessionAttach::Execute (sender=[3:133:2158], cookie=14681999296219378827, session=0, seqNo=0) 2025-05-29T15:27:26.895690Z node 3 :KESUS_TABLET DEBUG: tx_session_attach.cpp:118: [72057594037927937] Created new session 1 2025-05-29T15:27:26.906599Z node 3 :KESUS_TABLET DEBUG: tx_session_attach.cpp:140: [72057594037927937] TTxSessionAttach::Complete (sender=[3:133:2158], cookie=14681999296219378827, session=1) 2025-05-29T15:27:26.906779Z node 3 :KESUS_TABLET DEBUG: tx_sessions_describe.cpp:23: [72057594037927937] TTxSessionsDescribe::Execute (sender=[3:142:2165], cookie=4311550358399049815) 2025-05-29T15:27:26.906803Z node 3 :KESUS_TABLET DEBUG: tx_sessions_describe.cpp:48: [72057594037927937] TTxSessionsDescribe::Complete (sender=[3:142:2165], cookie=4311550358399049815) 2025-05-29T15:27:27.324052Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-05-29T15:27:27.334950Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-05-29T15:27:27.680076Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-05-29T15:27:27.690698Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-05-29T15:27:28.035638Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-05-29T15:27:28.046426Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-05-29T15:27:28.391512Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-05-29T15:27:28.402225Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-05-29T15:27:28.767474Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-05-29T15:27:28.778320Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-05-29T15:27:29.113023Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-05-29T15:27:29.123765Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-05-29T15:27:29.468705Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-05-29T15:27:29.479749Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-05-29T15:27:29.825234Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-05-29T15:27:29.835963Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-05-29T15:27:30.181068Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-05-29T15:27:30.191918Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-05-29T15:27:30.567998Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-05-29T15:27:30.578791Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-05-29T15:27:30.945559Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-05-29T15:27:30.956472Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-05-29T15:27:31.311731Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-05-29T15:27:31.322630Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-05-29T15:27:31.678234Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-05-29T15:27:31.689114Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-05-29T15:27:32.044861Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-05-29T15:27:32.055671Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-05-29T15:27:32.441464Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-05-29T15:27:32.452310Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-05-29T15:27:32.817212Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-05-29T15:27:32.827978Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-05-29T15:27:33.182970Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-05-29T15:27:33.193833Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-05-29T15:27:33.548959Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-05-29T15:27:33.559814Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-05-29T15:27:33.915361Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-05-29T15:27:33.926193Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-05-29T15:27:34.302385Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-05-29T15:27:34.313163Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-05-29T15:27:34.668941Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-05-29T15:27:34.679603Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-05-29T15:27:35.038963Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-05-29T15:27:35.050294Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-05-29T15:27:35.407499Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-05-29T15:27:35.418306Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-05-29T15:27:35.790583Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-05-29T15:27:35.801482Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-05-29T15:27:36.162548Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-05-29T15:27:36.173330Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-05-29T15:27:36.538688Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-05-29T15:27:36.549602Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-05-29T15:27:36.897409Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-05-29T15:27:36.910008Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-05-29T15:27:37.270278Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-05-29T15:27:37.281072Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-05-29T15:27:37.636179Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-05-29T15:27:37.647214Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-05-29T15:27:38.034045Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-05-29T15:27:38.044939Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-05-29T15:27:38.404246Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-05-29T15:27:38.415228Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-05-29T15:27:38.780783Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-05-29T15:27:38.791513Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-05-29T15:27:39.136382Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-05-29T15:27:39.147152Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-05-29T15:27:39.520888Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-05-29T15:27:39.531648Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-05-29T15:27:39.897391Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-05-29T15:27:39.908316Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-05-29T15:27:40.270303Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-05-29T15:27:40.281187Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-05-29T15:27:40.646944Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-05-29T15:27:40.657736Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-05-29T15:27:41.002819Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-05-29T15:27:41.013698Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-05-29T15:27:41.369076Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-05-29T15:27:41.379911Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-05-29T15:27:41.775360Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-05-29T15:27:41.786266Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-05-29T15:27:42.143955Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-05-29T15:27:42.154782Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-05-29T15:27:42.522777Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-05-29T15:27:42.533784Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-05-29T15:27:42.893251Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-05-29T15:27:42.904179Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-05-29T15:27:43.260439Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-05-29T15:27:43.271342Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-05-29T15:27:43.667172Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-05-29T15:27:43.677860Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-05-29T15:27:44.033152Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-05-29T15:27:44.043973Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-05-29T15:27:44.410053Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-05-29T15:27:44.420874Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-05-29T15:27:44.776337Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-05-29T15:27:44.787024Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-05-29T15:27:45.126439Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-05-29T15:27:45.147197Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-05-29T15:27:45.532970Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-05-29T15:27:45.543859Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-05-29T15:27:45.889734Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-05-29T15:27:45.900690Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-05-29T15:27:46.256845Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-05-29T15:27:46.267829Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-05-29T15:27:46.614071Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-05-29T15:27:46.624893Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-05-29T15:27:46.961027Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-05-29T15:27:46.972011Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-05-29T15:27:47.317630Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-05-29T15:27:47.328632Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-05-29T15:27:47.664199Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-05-29T15:27:47.675071Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-05-29T15:27:48.020893Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-05-29T15:27:48.031705Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-05-29T15:27:48.392376Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-05-29T15:27:48.406235Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-05-29T15:27:48.763166Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-05-29T15:27:48.774245Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-05-29T15:27:49.181228Z node 3 :KESUS_TABLET DEBUG: tx_session_timeout.cpp:27: [72057594037927937] TTxSessionTimeout::Execute (session=1) 2025-05-29T15:27:49.181270Z node 3 :KESUS_TABLET DEBUG: tablet_db.cpp:32: [72057594037927937] Deleting session 1 2025-05-29T15:27:49.192136Z node 3 :KESUS_TABLET DEBUG: tx_session_timeout.cpp:56: [72057594037927937] TTxSessionTimeout::Complete (session=1) 2025-05-29T15:27:49.202540Z node 3 :KESUS_TABLET DEBUG: tx_sessions_describe.cpp:23: [72057594037927937] TTxSessionsDescribe::Execute (sender=[3:531:2479], cookie=3523083979786197728) 2025-05-29T15:27:49.202585Z node 3 :KESUS_TABLET DEBUG: tx_sessions_describe.cpp:48: [72057594037927937] TTxSessionsDescribe::Complete (sender=[3:531:2479], cookie=3523083979786197728) 2025-05-29T15:27:49.411630Z node 4 :KESUS_TABLET INFO: tablet_impl.cpp:71: OnActivateExecutor: 72057594037927937 2025-05-29T15:27:49.411667Z node 4 :KESUS_TABLET DEBUG: tx_init_schema.cpp:14: [72057594037927937] TTxInitSchema::Execute 2025-05-29T15:27:49.416843Z node 4 :KESUS_TABLET DEBUG: tx_init_schema.cpp:21: [72057594037927937] TTxInitSchema::Complete 2025-05-29T15:27:49.416935Z node 4 :KESUS_TABLET DEBUG: tx_init.cpp:30: [72057594037927937] TTxInit::Execute 2025-05-29T15:27:49.438907Z node 4 :KESUS_TABLET DEBUG: tx_init.cpp:242: [72057594037927937] TTxInit::Complete 2025-05-29T15:27:49.440098Z node 4 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:36: [72057594037927937] TTxQuoterResourceAdd::Execute (sender=[4:133:2158], cookie=13240608574137101419, path="Root", config={ MaxUnitsPerSecond: 100 }) 2025-05-29T15:27:49.440182Z node 4 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:76: [72057594037927937] Created new quoter resource 1 "Root" 2025-05-29T15:27:49.450946Z node 4 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:85: [72057594037927937] TTxQuoterResourceAdd::Complete (sender=[4:133:2158], cookie=13240608574137101419) 2025-05-29T15:27:49.451313Z node 4 :KESUS_TABLET TRACE: quoter_runtime.cpp:145: [72057594037927937] Send TEvSubscribeOnResourcesResult to [4:144:2167]. Cookie: 0. Data: { Results { ResourceId: 1 Error { Status: SUCCESS } EffectiveProps { ResourceId: 1 ResourcePath: "Root" HierarchicalDRRResourceConfig { MaxUnitsPerSecond: 100 MaxBurstSizeCoefficient: 1 Weight: 1 } AccountingConfig { ReportPeriodMs: 5000 AccountPeriodMs: 1000 CollectPeriodSec: 30 ProvisionedCoefficient: 60 OvershootCoefficient: 1.1 Provisioned { BillingPeriodSec: 60 } OnDemand { BillingPeriodSec: 60 } Overshoot { BillingPeriodSec: 60 } } } } ProtocolVersion: 1 } 2025-05-29T15:27:49.451323Z node 4 :KESUS_TABLET DEBUG: quoter_runtime.cpp:150: [72057594037927937] Subscribe on quoter resources (sender=[4:144:2167], cookie=0) 2025-05-29T15:27:49.451355Z node 4 :KESUS_TABLET TRACE: quoter_runtime.cpp:145: [72057594037927937] Send TEvSubscribeOnResourcesResult to [4:142:2165]. Cookie: 0. Data: { Results { ResourceId: 1 Error { Status: SUCCESS } EffectiveProps { ResourceId: 1 ResourcePath: "Root" HierarchicalDRRResourceConfig { MaxUnitsPerSecond: 100 MaxBurstSizeCoefficient: 1 Weight: 1 } AccountingConfig { ReportPeriodMs: 5000 AccountPeriodMs: 1000 CollectPeriodSec: 30 ProvisionedCoefficient: 60 OvershootCoefficient: 1.1 Provisioned { BillingPeriodSec: 60 } OnDemand { BillingPeriodSec: 60 } Overshoot { BillingPeriodSec: 60 } } } } ProtocolVersion: 1 } 2025-05-29T15:27:49.451358Z node 4 :KESUS_TABLET DEBUG: quoter_runtime.cpp:150: [72057594037927937] Subscribe on quoter resources (sender=[4:142:2165], cookie=0) 2025-05-29T15:27:49.492060Z node 4 :KESUS_TABLET TRACE: quoter_runtime.cpp:93: [72057594037927937] Send TEvResourcesAllocated to [4:144:2167]. Cookie: 0. Data: { ResourcesInfo { ResourceId: 1 Amount: 5 StateNotification { Status: SUCCESS } } } 2025-05-29T15:27:49.492093Z node 4 :KESUS_TABLET TRACE: quoter_runtime.cpp:93: [72057594037927937] Send TEvResourcesAllocated to [4:142:2165]. Cookie: 0. Data: { ResourcesInfo { ResourceId: 1 Amount: 5 StateNotification { Status: SUCCESS } } } 2025-05-29T15:27:49.492148Z node 4 :KESUS_TABLET TRACE: quoter_runtime.cpp:318: Got TEvServerDisconnected([4:146:2169]) 2025-05-29T15:27:49.492176Z node 4 :KESUS_TABLET TRACE: quoter_runtime.cpp:37: [72057594037927937] Send TEvResourcesAllocated to [4:144:2167]. Cookie: 0. Data: { ResourcesInfo { ResourceId: 1 StateNotification { Status: SESSION_EXPIRED Issues { message: "Disconected." } } } } 2025-05-29T15:27:49.543152Z node 4 :KESUS_TABLET TRACE: quoter_runtime.cpp:93: [72057594037927937] Send TEvResourcesAllocated to [4:142:2165]. Cookie: 0. Data: { ResourcesInfo { ResourceId: 1 Amount: 10 StateNotification { Status: SUCCESS } } } >> THiveTest::TestCreateTabletAndReassignGroups3 [GOOD] >> THiveTest::TestCreateTabletAndMixedReassignGroups3 >> TKesusTest::TestAcquireTimeout [GOOD] >> TKesusTest::TestAcquireSharedBlocked >> TKesusTest::TestAcquireSemaphoreTimeout [GOOD] >> TKesusTest::TestAcquireSemaphoreTimeoutTooBig >> TKesusTest::TestAcquireSharedBlocked [GOOD] >> TKesusTest::TestAcquireTimeoutAfterReboot >> YdbTableSplit::SplitByLoadWithReadsMultipleSplitsWithData [GOOD] >> ReadOnlyVDisk::TestGarbageCollect >> TKesusTest::TestAcquireSemaphoreTimeoutTooBig [GOOD] >> TKesusTest::TestAcquireSemaphoreTimeoutInfinite >> TSchemeshardBorrowedCompactionTest::SchemeshardShouldNotCompactBorrowedAfterSplitMergeWhenDisabled [GOOD] >> TSchemeshardBorrowedCompactionTest::SchemeshardShouldHandleDataShardReboot >> THiveTest::TestCreateTabletAndMixedReassignGroups3 [GOOD] >> THiveTest::TestCreateTabletAndReassignGroupsWithReboots >> ReadOnlyVDisk::TestReads |69.9%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/columnshard/engines/ut/ydb-core-tx-columnshard-engines-ut |69.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/columnshard/engines/ut/ydb-core-tx-columnshard-engines-ut |69.9%| [LD] {RESULT} $(B)/ydb/core/tx/columnshard/engines/ut/ydb-core-tx-columnshard-engines-ut >> TKesusTest::TestAcquireSemaphoreTimeoutInfinite [GOOD] >> TKesusTest::TestAcquireSemaphoreRebootTimeout >> ReadOnlyVDisk::TestSync |70.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/ut_blobstorage/ut_read_only_vdisk/unittest >> TSequenceReboots::CreateSequencesWithIndexedTable [GOOD] >> ReadOnlyVDisk::TestGarbageCollect [GOOD] >> ReadOnlyVDisk::TestStorageLoad >> ReadOnlyVDisk::TestReads [GOOD] >> TKeyValueTest::TestBasicWriteReadOverrun [GOOD] >> TKeyValueTest::TestBlockedEvGetRequest >> THiveTest::TestCreateTabletAndReassignGroupsWithReboots [GOOD] >> THiveTest::TestCreateExternalTablet ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/ut_blobstorage/ut_read_only_vdisk/unittest >> ReadOnlyVDisk::TestGarbageCollect [GOOD] Test command err: RandomSeed# 10340194711481348764 SEND TEvPut with key [1:1:0:0:0:131072:0] TEvPutResult: TEvPutResult {Id# [1:1:0:0:0:131072:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} SEND TEvPut with key [1:1:1:0:0:32768:0] TEvPutResult: TEvPutResult {Id# [1:1:1:0:0:32768:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} === Read all 2 blob(s) === SEND TEvGet with key [1:1:0:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:0:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} SEND TEvGet with key [1:1:1:0:0:32768:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:1:0:0:32768:0] OK Size# 32768 RequestedSize# 32768}} SEND TEvGet with key [1:1:1:0:0:1:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:1:0:0:1:0] NODATA Size# 0}} Setting VDisk read-only to 1 for position 0 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:0:0] SEND TEvPut with key [1:1:2:0:0:131072:0] 2025-05-29T15:27:50.665485Z 1 00h01m40.100000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5307:698] 2025-05-29T15:27:50.665581Z 2 00h01m40.100000s :BS_HULLRECS CRIT: PDiskId# 1000 VDISK[82000000:_:0:1:0]: (2181038080) Db# LogoBlobs; putting blob beyond the barrier id# [1:1:2:0:0:131072:6] barrier# {Soft# {Gen# 1 Step# 2} Hard# } 2025-05-29T15:27:50.665617Z 8 00h01m40.100000s :BS_HULLRECS CRIT: PDiskId# 1000 VDISK[82000000:_:0:7:0]: (2181038080) Db# LogoBlobs; putting blob beyond the barrier id# [1:1:2:0:0:131072:4] barrier# {Soft# {Gen# 1 Step# 2} Hard# } 2025-05-29T15:27:50.665640Z 7 00h01m40.100000s :BS_HULLRECS CRIT: PDiskId# 1000 VDISK[82000000:_:0:6:0]: (2181038080) Db# LogoBlobs; putting blob beyond the barrier id# [1:1:2:0:0:131072:3] barrier# {Soft# {Gen# 1 Step# 2} Hard# } 2025-05-29T15:27:50.665690Z 6 00h01m40.100000s :BS_HULLRECS CRIT: PDiskId# 1000 VDISK[82000000:_:0:5:0]: (2181038080) Db# LogoBlobs; putting blob beyond the barrier id# [1:1:2:0:0:131072:2] barrier# {Soft# {Gen# 1 Step# 2} Hard# } 2025-05-29T15:27:50.665773Z 5 00h01m40.100000s :BS_HULLRECS CRIT: PDiskId# 1000 VDISK[82000000:_:0:4:0]: (2181038080) Db# LogoBlobs; putting blob beyond the barrier id# [1:1:2:0:0:131072:1] barrier# {Soft# {Gen# 1 Step# 2} Hard# } 2025-05-29T15:27:50.665964Z 3 00h01m40.100000s :BS_HULLRECS CRIT: PDiskId# 1000 VDISK[82000000:_:0:2:0]: (2181038080) Db# LogoBlobs; putting blob beyond the barrier id# [1:1:2:0:0:131072:5] barrier# {Soft# {Gen# 1 Step# 2} Hard# } TEvPutResult: TEvPutResult {Id# [1:1:2:0:0:131072:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} 2025-05-29T15:27:50.668207Z 1 00h01m40.100000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5307:698] SEND TEvGet with key [1:1:2:0:0:1:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:2:0:0:1:0] NODATA Size# 0}} Setting VDisk read-only to 1 for position 1 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:1:0] SEND TEvPut with key [1:1:3:0:0:32768:0] 2025-05-29T15:27:50.818837Z 1 00h03m20.160512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5307:698] 2025-05-29T15:27:50.818957Z 8 00h03m20.160512s :BS_HULLRECS CRIT: PDiskId# 1000 VDISK[82000000:_:0:7:0]: (2181038080) Db# LogoBlobs; putting blob beyond the barrier id# [1:1:3:0:0:32768:5] barrier# {Soft# {Gen# 1 Step# 3} Hard# } 2025-05-29T15:27:50.818986Z 7 00h03m20.160512s :BS_HULLRECS CRIT: PDiskId# 1000 VDISK[82000000:_:0:6:0]: (2181038080) Db# LogoBlobs; putting blob beyond the barrier id# [1:1:3:0:0:32768:4] barrier# {Soft# {Gen# 1 Step# 3} Hard# } 2025-05-29T15:27:50.819010Z 6 00h03m20.160512s :BS_HULLRECS CRIT: PDiskId# 1000 VDISK[82000000:_:0:5:0]: (2181038080) Db# LogoBlobs; putting blob beyond the barrier id# [1:1:3:0:0:32768:3] barrier# {Soft# {Gen# 1 Step# 3} Hard# } 2025-05-29T15:27:50.819028Z 5 00h03m20.160512s :BS_HULLRECS CRIT: PDiskId# 1000 VDISK[82000000:_:0:4:0]: (2181038080) Db# LogoBlobs; putting blob beyond the barrier id# [1:1:3:0:0:32768:2] barrier# {Soft# {Gen# 1 Step# 3} Hard# } 2025-05-29T15:27:50.819046Z 4 00h03m20.160512s :BS_HULLRECS CRIT: PDiskId# 1000 VDISK[82000000:_:0:3:0]: (2181038080) Db# LogoBlobs; putting blob beyond the barrier id# [1:1:3:0:0:32768:1] barrier# {Soft# {Gen# 1 Step# 3} Hard# } 2025-05-29T15:27:50.819097Z 2 00h03m20.160512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:1:0]: (2181038080) Unavailable in read-only Sender# [1:5314:705] 2025-05-29T15:27:50.819334Z 3 00h03m20.160512s :BS_HULLRECS CRIT: PDiskId# 1000 VDISK[82000000:_:0:2:0]: (2181038080) Db# LogoBlobs; putting blob beyond the barrier id# [1:1:3:0:0:32768:6] barrier# {Soft# {Gen# 1 Step# 3} Hard# } TEvPutResult: TEvPutResult {Id# [1:1:3:0:0:32768:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} 2025-05-29T15:27:50.900701Z 1 00h04m20.161024s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5307:698] 2025-05-29T15:27:50.900755Z 2 00h04m20.161024s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:1:0]: (2181038080) Unavailable in read-only Sender# [1:5314:705] SEND TEvGet with key [1:1:3:0:0:1:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:3:0:0:1:0] NODATA Size# 0}} Setting VDisk read-only to 1 for position 2 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:2:0] SEND TEvPut with key [1:1:4:0:0:131072:0] 2025-05-29T15:27:50.961381Z 1 00h05m00.200000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5307:698] 2025-05-29T15:27:50.961505Z 8 00h05m00.200000s :BS_HULLRECS CRIT: PDiskId# 1000 VDISK[82000000:_:0:7:0]: (2181038080) Db# LogoBlobs; putting blob beyond the barrier id# [1:1:4:0:0:131072:5] barrier# {Soft# {Gen# 1 Step# 4} Hard# } 2025-05-29T15:27:50.961541Z 7 00h05m00.200000s :BS_HULLRECS CRIT: PDiskId# 1000 VDISK[82000000:_:0:6:0]: (2181038080) Db# LogoBlobs; putting blob beyond the barrier id# [1:1:4:0:0:131072:4] barrier# {Soft# {Gen# 1 Step# 4} Hard# } 2025-05-29T15:27:50.961601Z 6 00h05m00.200000s :BS_HULLRECS CRIT: PDiskId# 1000 VDISK[82000000:_:0:5:0]: (2181038080) Db# LogoBlobs; putting blob beyond the barrier id# [1:1:4:0:0:131072:3] barrier# {Soft# {Gen# 1 Step# 4} Hard# } 2025-05-29T15:27:50.961624Z 5 00h05m00.200000s :BS_HULLRECS CRIT: PDiskId# 1000 VDISK[82000000:_:0:4:0]: (2181038080) Db# LogoBlobs; putting blob beyond the barrier id# [1:1:4:0:0:131072:2] barrier# {Soft# {Gen# 1 Step# 4} Hard# } 2025-05-29T15:27:50.961649Z 4 00h05m00.200000s :BS_HULLRECS CRIT: PDiskId# 1000 VDISK[82000000:_:0:3:0]: (2181038080) Db# LogoBlobs; putting blob beyond the barrier id# [1:1:4:0:0:131072:1] barrier# {Soft# {Gen# 1 Step# 4} Hard# } 2025-05-29T15:27:50.961713Z 2 00h05m00.200000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:1:0]: (2181038080) Unavailable in read-only Sender# [1:5314:705] 2025-05-29T15:27:50.961990Z 3 00h05m00.200000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:2:0]: (2181038080) Unavailable in read-only Sender# [1:5321:712] 2025-05-29T15:27:50.962050Z 1 00h05m00.200000s :BS_PROXY_PUT ERROR: [f0d369c2e40b8c7b] Result# TEvPutResult {Id# [1:1:4:0:0:131072:0] Status# ERROR StatusFlags# { } ErrorReason# "TRestoreStrategy saw optimisticState# EBS_DISINTEGRATED GroupId# 2181038080 BlobId# [1:1:4:0:0:131072:0] Reported ErrorReasons# [ { OrderNumber# 0 VDiskId# [82000000:1:0:0:0] NodeId# 1 ErrorReasons# [ "VDisk is in read-only mode", ] } { OrderNumber# 1 VDiskId# [82000000:1:0:1:0] NodeId# 2 ErrorReasons# [ "VDisk is in read-only mode", ] } { OrderNumber# 2 VDiskId# [82000000:1:0:2:0] NodeId# 3 ErrorReasons# [ "VDisk is in read-only mode", ] } ] Part situations# [ { OrderNumber# 3 Situations# PUUUUU } { OrderNumber# 4 Situations# UPUUUU } { OrderNumber# 5 Situations# UUPUUU } { OrderNumber# 6 Situations# UUUPUU } { OrderNumber# 7 Situations# UUUUPU } { OrderNumber# 0 Situations# UUUUUE } { OrderNumber# 1 Situations# UUUUUE } { OrderNumber# 2 Situations# UUUUUE } ] " ApproximateFreeSpaceShare# 0.999988} GroupId# 2181038080 Marker# BPP12 TEvPutResult: TEvPutResult {Id# [1:1:4:0:0:131072:0] Status# ERROR StatusFlags# { } ErrorReason# "TRestoreStrategy saw optimisticState# EBS_DISINTEGRATED GroupId# 2181038080 BlobId# [1:1:4:0:0:131072:0] Reported ErrorReasons# [ { OrderNumber# 0 VDiskId# [82000000:1:0:0:0] NodeId# 1 ErrorReasons# [ "VDisk is in read-only mode", ] } { OrderNumber# 1 VDiskId# [82000000:1:0:1:0] NodeId# 2 ErrorReasons# [ "VDisk is in read-only mode", ] } { OrderNumber# 2 VDiskId# [82000000:1:0:2:0] NodeId# 3 ErrorReasons# [ "VDisk is in read-only mode", ] } ] Part situations# [ { OrderNumber# 3 Situations# PUUUUU } { OrderNumber# 4 Situations# UPUUUU } { OrderNumber# 5 Situations# UUPUUU } { OrderNumber# 6 Situations# UUUPUU } { OrderNumber# 7 Situations# UUUUPU } { OrderNumber# 0 Situations# UUUUUE } { OrderNumber# 1 Situations# UUUUUE } { OrderNumber# 2 Situations# UUUUUE } ] " ApproximateFreeSpaceShare# 0.999988} 2025-05-29T15:27:51.040713Z 1 00h06m00.210512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5307:698] 2025-05-29T15:27:51.040763Z 2 00h06m00.210512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:1:0]: (2181038080) Unavailable in read-only Sender# [1:5314:705] 2025-05-29T15:27:51.040775Z 3 00h06m00.210512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:2:0]: (2181038080) Unavailable in read-only Sender# [1:5321:712] === Putting VDisk #3 to read-only === Setting VDisk read-only to 1 for position 3 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:3:0] 2025-05-29T15:27:51.165500Z 1 00h07m40.260512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5307:698] 2025-05-29T15:27:51.165551Z 2 00h07m40.260512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:1:0]: (2181038080) Unavailable in read-only Sender# [1:5314:705] 2025-05-29T15:27:51.165563Z 3 00h07m40.260512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:2:0]: (2181038080) Unavailable in read-only Sender# [1:5321:712] 2025-05-29T15:27:51.165573Z 4 00h07m40.260512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:3:0]: (2181038080) Unavailable in read-only Sender# [1:5328:719] === Putting VDisk #4 to read-only === Setting VDisk read-only to 1 for position 4 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:4:0] 2025-05-29T15:27:51.211889Z 1 00h08m20.262048s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5307:698] 2025-05-29T15:27:51.211941Z 2 00h08m20.262048s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:1:0]: (2181038080) Unavailable in read-only Sender# [1:5314:705] 2025-05-29T15:27:51.211952Z 3 00h08m20.262048s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:2:0]: (2181038080) Unavailable in read-only Sender# [1:5321:712] 2025-05-29T15:27:51.211962Z 4 00h08m20.262048s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:3:0]: (2181038080) Unavailable in read-only Sender# [1:5328:719] 2025-05-29T15:27:51.211971Z 5 00h08m20.262048s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:4:0]: (2181038080) Unavailable in read-only Sender# [1:5335:726] === Putting VDisk #5 to read-only === Setting VDisk read-only to 1 for position 5 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:5:0] 2025-05-29T15:27:51.260281Z 1 00h09m00.310512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5307:698] 2025-05-29T15:27:51.260334Z 2 00h09m00.310512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:1:0]: (2181038080) Unavailable in read-only Sender# [1:5314:705] 2025-05-29T15:27:51.260346Z 3 00h09m00.310512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:2:0]: (2181038080) Unavailable in read-only Sender# [1:5321:712] 2025-05-29T15:27:51.260357Z 4 00h09m00.310512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:3:0]: (2181038080) Unavailable in read-only Sender# [1:5328:719] 2025-05-29T15:27:51.260367Z 5 00h09m00.310512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:4:0]: (2181038080) Unavailable in read-only Sender# [1:5335:726] 2025-05-29T15:27:51.260378Z 6 00h09m00.310512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:5:0]: (2181038080) Unavailable in read-only Sender# [1:5342:733] === Putting VDisk #6 to read-only === Setting VDisk read-only to 1 for position 6 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:6:0] 2025-05-29T15:27:51.292901Z 1 00h09m40.312048s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5307:698] 2025-05-29T15:27:51.292936Z 2 00h09m40.312048s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:1:0]: (2181038080) Unavailable in read-only Sender# [1:5314:705] 2025-05-29T15:27:51.292943Z 3 00h09m40.312048s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:2:0]: (2181038080) Unavailable in read-only Sender# [1:5321:712] 2025-05-29T15:27:51.292950Z 4 00h09m40.312048s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:3:0]: (2181038080) Unavailable in read-only Sender# [1:5328:719] 2025-05-29T15:27:51.292956Z 5 00h09m40.312048s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:4:0]: (2181038080) Unavailable in read-only Sender# [1:5335:726] 2025-05-29T15:27:51.292964Z 6 00h09m40.312048s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:5:0]: (2181038080) Unavailable in read-only Sender# [1:5342:733] 2025-05-29T15:27:51.292971Z 7 00h09m40.312048s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:6:0]: (2181038080) Unavailable in read-only Sender# [1:5349:740] === Putting VDisk #0 to normal === Setting VDisk read-only to 0 for position 0 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:0:0] 2025-05-29T15:27:51.323097Z 2 00h10m20.360512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:1:0]: (2181038080) Unavailable in read-only Sender# [1:5314:705] 2025-05-29T15:27:51.323120Z 3 00h10m20.360512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:2:0]: (2181038080) Unavailable in read-only Sender# [1:5321:712] 2025-05-29T15:27:51.323129Z 4 00h10m20.360512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:3:0]: (2181038080) Unavailable in read-only Sender# [1:5328:719] 2025-05-29T15:27:51.323137Z 5 00h10m20.360512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:4:0]: (2181038080) Unavailable in read-only Sender# [1:5335:726] 2025-05-29T15:27:51.323146Z 6 00h10m20.360512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:5:0]: (2181038080) Unavailable in read-only Sender# [1:5342:733] 2025-05-29T15:27:51.323154Z 7 00h10m20.360512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:6:0]: (2181038080) Unavailable in read-only Sender# [1:5349:740] === Putting VDisk #1 to normal === Setting VDisk read-only to 0 for position 1 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:1:0] 2025-05-29T15:27:51.361246Z 3 00h11m00.400000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:2:0]: (2181038080) Unavailable in read-only Sender# [1:5321:712] 2025-05-29T15:27:51.361267Z 4 00h11m00.400000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:3:0]: (2181038080) Unavailable in read-only Sender# [1:5328:719] 2025-05-29T15:27:51.361277Z 5 00h11m00.400000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:4:0]: (2181038080) Unavailable in read-only Sender# [1:5335:726] 2025-05-29T15:27:51.361286Z 6 00h11m00.400000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:5:0]: (2181038080) Unavailable in read-only Sender# [1:5342:733] 2025-05-29T15:27:51.361296Z 7 00h11m00.400000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:6:0]: (2181038080) Unavailable in read-only Sender# [1:5349:740] === Putting VDisk #2 to normal === Setting VDisk read-only to 0 for position 2 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:2:0] 2025-05-29T15:27:51.403727Z 4 00h11m40.410512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:3:0]: (2181038080) Unavailable in read-only Sender# [1:5328:719] 2025-05-29T15:27:51.403751Z 5 00h11m40.410512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:4:0]: (2181038080) Unavailable in read-only Sender# [1:5335:726] 2025-05-29T15:27:51.403761Z 6 00h11m40.410512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:5:0]: (2181038080) Unavailable in read-only Sender# [1:5342:733] 2025-05-29T15:27:51.403771Z 7 00h11m40.410512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:6:0]: (2181038080) Unavailable in read-only Sender# [1:5349:740] === Putting VDisk #3 to normal === Setting VDisk read-only to 0 for position 3 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:3:0] 2025-05-29T15:27:51.447376Z 5 00h12m20.450512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:4:0]: (2181038080) Unavailable in read-only Sender# [1:5335:726] 2025-05-29T15:27:51.447399Z 6 00h12m20.450512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:5:0]: (2181038080) Unavailable in read-only Sender# [1:5342:733] 2025-05-29T15:27:51.447409Z 7 00h12m20.450512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:6:0]: (2181038080) Unavailable in read-only Sender# [1:5349:740] Setting VDisk read-only to 0 for position 4 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:4:0] 2025-05-29T15:27:51.569951Z 6 00h14m00.461536s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:5:0]: (2181038080) Unavailable in read-only Sender# [1:5342:733] 2025-05-29T15:27:51.569974Z 7 00h14m00.461536s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:6:0]: (2181038080) Unavailable in read-only Sender# [1:5349:740] SEND TEvGet with key [1:1:3:0:0:1:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:3:0:0:1:0] NODATA Size# 0}} Setting VDisk read-only to 0 for position 5 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:5:0] 2025-05-29T15:27:51.653643Z 7 00h14m40.500000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:6:0]: (2181038080) Unavailable in read-only Sender# [1:5349:740] SEND TEvGet with key [1:1:3:0:0:1:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:3:0:0:1:0] NODATA Size# 0}} Setting VDisk read-only to 0 for position 6 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:6:0] SEND TEvGet with key [1:1:3:0:0:1:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:3:0:0:1:0] NODATA Size# 0}} SEND TEvPut with key [1:1:4:0:0:131072:0] 2025-05-29T15:27:51.855334Z 1 00h16m30.512048s :BS_HULLRECS CRIT: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Db# LogoBlobs; putting blob beyond the barrier id# [1:1:4:0:0:131072:6] barrier# {Soft# {Gen# 1 Step# 4} Hard# } 2025-05-29T15:27:51.855452Z 8 00h16m30.512048s :BS_HULLRECS CRIT: PDiskId# 1000 VDISK[82000000:_:0:7:0]: (2181038080) Db# LogoBlobs; putting blob beyond the barrier id# [1:1:4:0:0:131072:5] barrier# {Soft# {Gen# 1 Step# 4} Hard# } 2025-05-29T15:27:51.855485Z 7 00h16m30.512048s :BS_HULLRECS CRIT: PDiskId# 1000 VDISK[82000000:_:0:6:0]: (2181038080) Db# LogoBlobs; putting blob beyond the barrier id# [1:1:4:0:0:131072:4] barrier# {Soft# {Gen# 1 Step# 4} Hard# } 2025-05-29T15:27:51.855522Z 6 00h16m30.512048s :BS_HULLRECS CRIT: PDiskId# 1000 VDISK[82000000:_:0:5:0]: (2181038080) Db# LogoBlobs; putting blob beyond the barrier id# [1:1:4:0:0:131072:3] barrier# {Soft# {Gen# 1 Step# 4} Hard# } 2025-05-29T15:27:51.855552Z 5 00h16m30.512048s :BS_HULLRECS CRIT: PDiskId# 1000 VDISK[82000000:_:0:4:0]: (2181038080) Db# LogoBlobs; putting blob beyond the barrier id# [1:1:4:0:0:131072:2] barrier# {Soft# {Gen# 1 Step# 4} Hard# } 2025-05-29T15:27:51.855583Z 4 00h16m30.512048s :BS_HULLRECS CRIT: PDiskId# 1000 VDISK[82000000:_:0:3:0]: (2181038080) Db# LogoBlobs; putting blob beyond the barrier id# [1:1:4:0:0:131072:1] barrier# {Soft# {Gen# 1 Step# 4} Hard# } TEvPutResult: TEvPutResult {Id# [1:1:4:0:0:131072:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999976} SEND TEvGet with key [1:1:4:0:0:1:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:4:0:0:1:0] NODATA Size# 0}} >> TKeyValueTest::TestBlockedEvGetRequest [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/ut_blobstorage/ut_read_only_vdisk/unittest >> ReadOnlyVDisk::TestReads [GOOD] Test command err: RandomSeed# 5081736689892655150 === Trying to put and get a blob === SEND TEvPut with key [1:1:0:0:0:131072:0] TEvPutResult: TEvPutResult {Id# [1:1:0:0:0:131072:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} === Read all 1 blob(s) === SEND TEvGet with key [1:1:0:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:0:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} === Putting VDisk #0 to read-only === Setting VDisk read-only to 1 for position 0 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:0:0] === Read all 1 blob(s) === SEND TEvGet with key [1:1:0:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:0:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} === Putting VDisk #1 to read-only === Setting VDisk read-only to 1 for position 1 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:1:0] === Read all 1 blob(s) === SEND TEvGet with key [1:1:0:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:0:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} === Putting VDisk #2 to read-only === Setting VDisk read-only to 1 for position 2 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:2:0] === Read all 1 blob(s) === SEND TEvGet with key [1:1:0:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:0:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} === Putting VDisk #3 to read-only === Setting VDisk read-only to 1 for position 3 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:3:0] === Read all 1 blob(s) === SEND TEvGet with key [1:1:0:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:0:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} === Putting VDisk #4 to read-only === Setting VDisk read-only to 1 for position 4 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:4:0] === Read all 1 blob(s) === SEND TEvGet with key [1:1:0:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:0:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} === Putting VDisk #5 to read-only === Setting VDisk read-only to 1 for position 5 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:5:0] === Read all 1 blob(s) === SEND TEvGet with key [1:1:0:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:0:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} === Putting VDisk #6 to read-only === Setting VDisk read-only to 1 for position 6 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:6:0] === Read all 1 blob(s) === SEND TEvGet with key [1:1:0:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:0:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} === Restoring to normal VDisk #0 === Setting VDisk read-only to 0 for position 0 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:0:0] === Read all 1 blob(s) === SEND TEvGet with key [1:1:0:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:0:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} === Restoring to normal VDisk #1 === Setting VDisk read-only to 0 for position 1 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:1:0] === Read all 1 blob(s) === SEND TEvGet with key [1:1:0:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:0:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} === Restoring to normal VDisk #2 === Setting VDisk read-only to 0 for position 2 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:2:0] === Read all 1 blob(s) === SEND TEvGet with key [1:1:0:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:0:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} === Restoring to normal VDisk #3 === Setting VDisk read-only to 0 for position 3 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:3:0] === Read all 1 blob(s) === SEND TEvGet with key [1:1:0:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:0:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} === Restoring to normal VDisk #4 === Setting VDisk read-only to 0 for position 4 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:4:0] === Read all 1 blob(s) === SEND TEvGet with key [1:1:0:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:0:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} === Restoring to normal VDisk #5 === Setting VDisk read-only to 0 for position 5 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:5:0] === Read all 1 blob(s) === SEND TEvGet with key [1:1:0:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:0:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} === Restoring to normal VDisk #6 === Setting VDisk read-only to 0 for position 6 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:6:0] === Read all 1 blob(s) === SEND TEvGet with key [1:1:0:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:0:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} >> THiveTest::TestCreateExternalTablet [GOOD] |70.0%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/schemeshard/ut_continuous_backup/ydb-core-tx-schemeshard-ut_continuous_backup |70.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_continuous_backup/ydb-core-tx-schemeshard-ut_continuous_backup |70.0%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_continuous_backup/ydb-core-tx-schemeshard-ut_continuous_backup ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/keyvalue/ut/unittest >> TKeyValueTest::TestBlockedEvGetRequest [GOOD] Test command err: Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:55:2057] recipient: [1:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:55:2057] recipient: [1:51:2095] Leader for TabletID 72057594037927937 is [1:57:2097] sender: [1:58:2057] recipient: [1:51:2095] Leader for TabletID 72057594037927937 is [1:57:2097] sender: [1:75:2057] recipient: [1:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:55:2057] recipient: [2:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:55:2057] recipient: [2:51:2095] Leader for TabletID 72057594037927937 is [2:57:2097] sender: [2:58:2057] recipient: [2:51:2095] Leader for TabletID 72057594037927937 is [2:57:2097] sender: [2:75:2057] recipient: [2:14:2061] !Reboot 72057594037927937 (actor [2:57:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [2:57:2097] sender: [2:77:2057] recipient: [2:36:2083] Leader for TabletID 72057594037927937 is [2:57:2097] sender: [2:80:2057] recipient: [2:14:2061] Leader for TabletID 72057594037927937 is [2:57:2097] sender: [2:81:2057] recipient: [2:79:2110] Leader for TabletID 72057594037927937 is [2:82:2111] sender: [2:83:2057] recipient: [2:79:2110] !Reboot 72057594037927937 (actor [2:57:2097]) rebooted! !Reboot 72057594037927937 (actor [2:57:2097]) tablet resolver refreshed! new actor is[2:82:2111] Leader for TabletID 72057594037927937 is [2:82:2111] sender: [2:168:2057] recipient: [2:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:55:2057] recipient: [3:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:55:2057] recipient: [3:51:2095] Leader for TabletID 72057594037927937 is [3:57:2097] sender: [3:58:2057] recipient: [3:51:2095] Leader for TabletID 72057594037927937 is [3:57:2097] sender: [3:75:2057] recipient: [3:14:2061] !Reboot 72057594037927937 (actor [3:57:2097]) on event NKikimr::TEvKeyValue::TEvExecuteTransaction ! Leader for TabletID 72057594037927937 is [3:57:2097] sender: [3:77:2057] recipient: [3:36:2083] Leader for TabletID 72057594037927937 is [3:57:2097] sender: [3:80:2057] recipient: [3:79:2110] Leader for TabletID 72057594037927937 is [3:57:2097] sender: [3:81:2057] recipient: [3:14:2061] Leader for TabletID 72057594037927937 is [3:82:2111] sender: [3:83:2057] recipient: [3:79:2110] !Reboot 72057594037927937 (actor [3:57:2097]) rebooted! !Reboot 72057594037927937 (actor [3:57:2097]) tablet resolver refreshed! new actor is[3:82:2111] Leader for TabletID 72057594037927937 is [3:82:2111] sender: [3:168:2057] recipient: [3:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [4:55:2057] recipient: [4:50:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [4:55:2057] recipient: [4:50:2095] Leader for TabletID 72057594037927937 is [4:57:2097] sender: [4:58:2057] recipient: [4:50:2095] Leader for TabletID 72057594037927937 is [4:57:2097] sender: [4:75:2057] recipient: [4:14:2061] !Reboot 72057594037927937 (actor [4:57:2097]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [4:57:2097] sender: [4:78:2057] recipient: [4:36:2083] Leader for TabletID 72057594037927937 is [4:57:2097] sender: [4:81:2057] recipient: [4:80:2110] Leader for TabletID 72057594037927937 is [4:57:2097] sender: [4:82:2057] recipient: [4:14:2061] Leader for TabletID 72057594037927937 is [4:83:2111] sender: [4:84:2057] recipient: [4:80:2110] !Reboot 72057594037927937 (actor [4:57:2097]) rebooted! !Reboot 72057594037927937 (actor [4:57:2097]) tablet resolver refreshed! new actor is[4:83:2111] Leader for TabletID 72057594037927937 is [4:83:2111] sender: [4:169:2057] recipient: [4:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [5:55:2057] recipient: [5:52:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [5:55:2057] recipient: [5:52:2095] Leader for TabletID 72057594037927937 is [5:57:2097] sender: [5:58:2057] recipient: [5:52:2095] Leader for TabletID 72057594037927937 is [5:57:2097] sender: [5:75:2057] recipient: [5:14:2061] !Reboot 72057594037927937 (actor [5:57:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [5:57:2097] sender: [5:81:2057] recipient: [5:36:2083] Leader for TabletID 72057594037927937 is [5:57:2097] sender: [5:84:2057] recipient: [5:83:2113] Leader for TabletID 72057594037927937 is [5:57:2097] sender: [5:85:2057] recipient: [5:14:2061] Leader for TabletID 72057594037927937 is [5:86:2114] sender: [5:87:2057] recipient: [5:83:2113] !Reboot 72057594037927937 (actor [5:57:2097]) rebooted! !Reboot 72057594037927937 (actor [5:57:2097]) tablet resolver refreshed! new actor is[5:86:2114] Leader for TabletID 72057594037927937 is [5:86:2114] sender: [5:172:2057] recipient: [5:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [6:55:2057] recipient: [6:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [6:55:2057] recipient: [6:51:2095] Leader for TabletID 72057594037927937 is [6:57:2097] sender: [6:58:2057] recipient: [6:51:2095] Leader for TabletID 72057594037927937 is [6:57:2097] sender: [6:75:2057] recipient: [6:14:2061] !Reboot 72057594037927937 (actor [6:57:2097]) on event NKikimr::TEvKeyValue::TEvRead ! Leader for TabletID 72057594037927937 is [6:57:2097] sender: [6:81:2057] recipient: [6:36:2083] Leader for TabletID 72057594037927937 is [6:57:2097] sender: [6:84:2057] recipient: [6:14:2061] Leader for TabletID 72057594037927937 is [6:57:2097] sender: [6:85:2057] recipient: [6:83:2113] Leader for TabletID 72057594037927937 is [6:86:2114] sender: [6:87:2057] recipient: [6:83:2113] !Reboot 72057594037927937 (actor [6:57:2097]) rebooted! !Reboot 72057594037927937 (actor [6:57:2097]) tablet resolver refreshed! new actor is[6:86:2114] Leader for TabletID 72057594037927937 is [6:86:2114] sender: [6:172:2057] recipient: [6:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [7:55:2057] recipient: [7:52:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [7:55:2057] recipient: [7:52:2095] Leader for TabletID 72057594037927937 is [7:57:2097] sender: [7:58:2057] recipient: [7:52:2095] Leader for TabletID 72057594037927937 is [7:57:2097] sender: [7:75:2057] recipient: [7:14:2061] !Reboot 72057594037927937 (actor [7:57:2097]) on event NKikimr::TEvKeyValue::TEvNotify ! Leader for TabletID 72057594037927937 is [7:57:2097] sender: [7:82:2057] recipient: [7:36:2083] Leader for TabletID 72057594037927937 is [7:57:2097] sender: [7:85:2057] recipient: [7:14:2061] Leader for TabletID 72057594037927937 is [7:57:2097] sender: [7:86:2057] recipient: [7:84:2113] Leader for TabletID 72057594037927937 is [7:87:2114] sender: [7:88:2057] recipient: [7:84:2113] !Reboot 72057594037927937 (actor [7:57:2097]) rebooted! !Reboot 72057594037927937 (actor [7:57:2097]) tablet resolver refreshed! new actor is[7:87:2114] Leader for TabletID 72057594037927937 is [0:0:0] sender: [8:55:2057] recipient: [8:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [8:55:2057] recipient: [8:51:2095] Leader for TabletID 72057594037927937 is [8:57:2097] sender: [8:58:2057] recipient: [8:51:2095] Leader for TabletID 72057594037927937 is [8:57:2097] sender: [8:75:2057] recipient: [8:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [9:55:2057] recipient: [9:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [9:55:2057] recipient: [9:51:2095] Leader for TabletID 72057594037927937 is [9:57:2097] sender: [9:58:2057] recipient: [9:51:2095] Leader for TabletID 72057594037927937 is [9:57:2097] sender: [9:75:2057] recipient: [9:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [10:55:2057] recipient: [10:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [10:55:2057] recipient: [10:51:2095] Leader for TabletID 72057594037927937 is [10:57:2097] sender: [10:58:2057] recipient: [10:51:2095] Leader for TabletID 72057594037927937 is [10:57:2097] sender: [10:75:2057] recipient: [10:14:2061] !Reboot 72057594037927937 (actor [10:57:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [10:57:2097] sender: [10:77:2057] recipient: [10:36:2083] Leader for TabletID 72057594037927937 is [10:57:2097] sender: [10:80:2057] recipient: [10:14:2061] Leader for TabletID 72057594037927937 is [10:57:2097] sender: [10:81:2057] recipient: [10:79:2110] Leader for TabletID 72057594037927937 is [10:82:2111] sender: [10:83:2057] recipient: [10:79:2110] !Reboot 72057594037927937 (actor [10:57:2097]) rebooted! !Reboot 72057594037927937 (actor [10:57:2097]) tablet resolver refreshed! new actor is[10:82:2111] Leader for TabletID 72057594037927937 is [10:82:2111] sender: [10:168:2057] recipient: [10:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [11:55:2057] recipient: [11:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [11:55:2057] recipient: [11:51:2095] Leader for TabletID 72057594037927937 is [11:57:2097] sender: [11:58:2057] recipient: [11:51:2095] Leader for TabletID 72057594037927937 is [11:57:2097] sender: [11:75:2057] recipient: [11:14:2061] !Reboot 72057594037927937 (actor [11:57:2097]) on event NKikimr::TEvKeyValue::TEvExecuteTransaction ! Leader for TabletID 72057594037927937 is [11:57:2097] sender: [11:77:2057] recipient: [11:36:2083] Leader for TabletID 72057594037927937 is [11:57:2097] sender: [11:80:2057] recipient: [11:79:2110] Leader for TabletID 72057594037927937 is [11:57:2097] sender: [11:81:2057] recipient: [11:14:2061] Leader for TabletID 72057594037927937 is [11:82:2111] sender: [11:83:2057] recipient: [11:79:2110] !Reboot 72057594037927937 (actor [11:57:2097]) rebooted! !Reboot 72057594037927937 (actor [11:57:2097]) tablet resolver refreshed! new actor is[11:82:2111] Leader for TabletID 72057594037927937 is [11:82:2111] sender: [11:168:2057] recipient: [11:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [12:55:2057] recipient: [12:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [12:55:2057] recipient: [12:51:2095] Leader for TabletID 72057594037927937 is [12:57:2097] sender: [12:58:2057] recipient: [12:51:2095] Leader for TabletID 72057594037927937 is [12:57:2097] sender: [12:75:2057] recipient: [12:14:2061] !Reboot 72057594037927937 (actor [12:57:2097]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [12:57:2097] sender: [12:78:2057] recipient: [12:36:2083] Leader for TabletID 72057594037927937 is [12:57:2097] sender: [12:81:2057] recipient: [12:14:2061] Leader for TabletID 72057594037927937 is [12:57:2097] sender: [12:82:2057] recipient: [12:80:2110] Leader for TabletID 72057594037927937 is [12:83:2111] sender: [12:84:2057] recipient: [12:80:2110] !Reboot 72057594037927937 (actor [12:57:2097]) rebooted! !Reboot 72057594037927937 (actor [12:57:2097]) tablet resolver refreshed! new actor is[12:83:2111] Leader for TabletID 72057594037927937 is [12:83:2111] sender: [12:169:2057] recipient: [12:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [13:55:2057] recipient: [13:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [13:55:2057] recipient: [13:51:2095] Leader for TabletID 72057594037927937 is [13:57:2097] sender: [13:58:2057] recipient: [13:51:2095] Leader for TabletID 72057594037927937 is [13:57:2097] sender: [13:75:2057] recipient: [13:14:2061] !Reboot 72057594037927937 (actor [13:57:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [13:57:2097] sender: [13:81:2057] recipient: [13:36:2083] Leader for TabletID 72057594037927937 is [13:57:2097] sender: [13:84:2057] recipient: [13:14:2061] Leader for TabletID 72057594037927937 is [13:57:2097] sender: [13:85:2057] recipient: [13:83:2113] Leader for TabletID 72057594037927937 is [13:86:2114] sender: [13:87:2057] recipient: [13:83:2113] !Reboot 72057594037927937 (actor [13:57:2097]) rebooted! !Reboot 72057594037927937 (actor [13:57:2097]) tablet resolver refreshed! new actor is[13:86:2114] Leader for TabletID 72057594037927937 is [13:86:2114] sender: [13:172:2057] recipient: [13:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [14:55:2057] recipient: [14:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [14:55:2057] recipient: [14:51:2095] Leader for TabletID 72057594037927937 is [14:57:2097] sender: [14:58:2057] recipient: [14:51:2095] Leader for TabletID 72057594037927937 is [14:57:2097] sender: [14:75:2057] recipient: [14:14:2061] !Reboot 72057594037927937 (actor [14:57:2097]) on event NKikimr::TEvKeyValue::TEvRead ! Leader for TabletID 72057594037927937 is [14:57:2097] sender: [14:81:2057] recipient: [14:36:2083] Leader for TabletID 72057594037927937 is [14:57:2097] sender: [14:84:2057] recipient: [14:83:2113] Leader for TabletID 72057594037927937 is [14:57:2097] sender: [14:85:2057] recipient: [14:14:2061] Leader for TabletID 72057594037927937 is [14:86:2114] sender: [14:87:2057] recipient: [14:83:2113] !Reboot 72057594037927937 (actor [14:57:2097]) rebooted! !Reboot 72057594037927937 (actor [14:57:2097]) tablet resolver refreshed! new actor is[14:86:2114] Leader for TabletID 72057594037927937 is [14:86:2114] sender: [14:172:2057] recipient: [14:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [15:55:2057] recipient: [15:52:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [15:55:2057] recipient: [15:52:2095] Leader for TabletID 72057594037927937 is [15:57:2097] sender: [15:58:2057] recipient: [15:52:2095] Leader for TabletID 72057594037927937 is [15:57:2097] sender: [15:75:2057] recipient: [15:14:2061] !Reboot 72057594037927937 (actor [15:57:2097]) on event NKikimr::TEvKeyValue::TEvNotify ! Leader for TabletID 72057594037927937 is [15:57:2097] sender: [15:82:2057] recipient: [15:36:2083] Leader for TabletID 72057594037927937 is [15:57:2097] sender: [15:85:2057] recipient: [15:14:2061] Leader for TabletID 72057594037927937 is [15:57:2097] sender: [15:86:2057] recipient: [15:84:2113] Leader for TabletID 72057594037927937 is [15:87:2114] sender: [15:88:2057] recipient: [15:84:2113] !Reboot 72057594037927937 (actor [15:57:2097]) rebooted! !Reboot 72057594037927937 (actor [15:57:2097]) tablet resolver refreshed! new actor is[15:87:2114] Leader for TabletID 72057594037927937 is [0:0:0] sender: [16:55:2057] recipient: [16:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [16:55:2057] recipient: [16:51:2095] Leader for TabletID 72057594037927937 is [16:57:2097] sender: [16:58:2057] recipient: [16:51:2095] Leader for TabletID 72057594037927937 is [16:57:2097] sender: [16:75:2057] recipient: [16:14:2061] 2025-05-29T15:27:52.296550Z node 17 :KEYVALUE ERROR: keyvalue_storage_read_request.cpp:254: {KV323@keyvalue_storage_read_request.cpp:254} Received BLOCKED EvGetResult. KeyValue# 72057594037927937 Status# BLOCKED Deadline# 18446744073709551 Now# 0 SentAt# 1970-01-01T00:00:00.000000Z GotAt# 0 ErrorReason# block race detected 2025-05-29T15:27:52.297250Z node 17 :TABLET_MAIN ERROR: tablet_sys.cpp:934: Tablet: 72057594037927937 HandleBlockBlobStorageResult, msg->Status: ALREADY, not discovered Marker# TSYS21 2025-05-29T15:27:52.297266Z node 17 :TABLET_MAIN ERROR: tablet_sys.cpp:1849: Tablet: 72057594037927937 Type: KeyValue, EReason: ReasonBootBSError, SuggestedGeneration: 0, KnownGeneration: 3 Marker# TSYS31 |70.0%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/blobstorage/dsproxy/ut/ydb-core-blobstorage-dsproxy-ut |70.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/blobstorage/dsproxy/ut/ydb-core-blobstorage-dsproxy-ut |70.0%| [LD] {RESULT} $(B)/ydb/core/blobstorage/dsproxy/ut/ydb-core-blobstorage-dsproxy-ut |70.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/ut_blobstorage/ut_read_only_vdisk/unittest >> TKeyValueTest::TestWriteReadWithRestartsThenResponseOk [GOOD] >> TKeyValueTest::TestWriteReadWhileWriteWorks >> ReadOnlyVDisk::TestSync [GOOD] >> YdbSdkSessionsPool1Session::GetSession/0 |70.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/ut_blobstorage/ut_read_only_vdisk/unittest >> YdbSdkSessionsPool::StressTestAsync/0 >> YdbSdkSessions::TestActiveSessionCountAfterBadSession >> YdbSdkSessions::TestSdkFreeSessionAfterBadSessionQueryService [SKIPPED] >> ReadOnlyVDisk::TestGetWithMustRestoreFirst >> TKeyValueTest::TestWriteReadWithRestartsThenResponseOkNewApi [GOOD] >> TKeyValueTest::TestWriteToExtraChannelThenReadMixedChannelsReturnsOk >> YdbSdkSessionsPool1Session::GetSession/0 [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/ut_blobstorage/ut_read_only_vdisk/unittest >> ReadOnlyVDisk::TestSync [GOOD] Test command err: RandomSeed# 2474636423847716166 Setting VDisk read-only to 1 for position 0 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:0:0] Setting VDisk read-only to 1 for position 1 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:1:0] SEND TEvPut with key [1:1:0:0:0:131072:0] 2025-05-29T15:27:51.157524Z 1 00h02m00.100000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:8808:940] 2025-05-29T15:27:51.157614Z 2 00h02m00.100000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:1:0]: (2181038080) Unavailable in read-only Sender# [1:8815:947] TEvPutResult: TEvPutResult {Id# [1:1:0:0:0:131072:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} Setting VDisk read-only to 0 for position 0 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:0:0] Setting VDisk read-only to 0 for position 1 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:1:0] Setting VDisk read-only to 1 for position 1 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:1:0] Setting VDisk read-only to 1 for position 2 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:2:0] SEND TEvPut with key [1:1:1:0:0:32768:0] 2025-05-29T15:27:51.437815Z 3 00h06m00.210512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:2:0]: (2181038080) Unavailable in read-only Sender# [1:8822:954] 2025-05-29T15:27:51.437846Z 2 00h06m00.210512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:1:0]: (2181038080) Unavailable in read-only Sender# [1:8815:947] TEvPutResult: TEvPutResult {Id# [1:1:1:0:0:32768:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} Setting VDisk read-only to 0 for position 1 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:1:0] Setting VDisk read-only to 0 for position 2 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:2:0] Setting VDisk read-only to 1 for position 2 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:2:0] Setting VDisk read-only to 1 for position 3 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:3:0] SEND TEvPut with key [1:1:2:0:0:131072:0] TEvPutResult: TEvPutResult {Id# [1:1:2:0:0:131072:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} Setting VDisk read-only to 0 for position 2 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:2:0] Setting VDisk read-only to 0 for position 3 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:3:0] Setting VDisk read-only to 1 for position 3 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:3:0] Setting VDisk read-only to 1 for position 4 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:4:0] SEND TEvPut with key [1:1:3:0:0:32768:0] 2025-05-29T15:27:52.147784Z 5 00h14m00.361536s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:4:0]: (2181038080) Unavailable in read-only Sender# [1:8836:968] 2025-05-29T15:27:52.147817Z 4 00h14m00.361536s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:3:0]: (2181038080) Unavailable in read-only Sender# [1:8829:961] TEvPutResult: TEvPutResult {Id# [1:1:3:0:0:32768:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} Setting VDisk read-only to 0 for position 3 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:3:0] Setting VDisk read-only to 0 for position 4 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:4:0] Setting VDisk read-only to 1 for position 4 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:4:0] Setting VDisk read-only to 1 for position 5 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:5:0] SEND TEvPut with key [1:1:4:0:0:131072:0] 2025-05-29T15:27:52.563554Z 6 00h18m00.412560s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:5:0]: (2181038080) Unavailable in read-only Sender# [1:8843:975] 2025-05-29T15:27:52.563585Z 5 00h18m00.412560s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:4:0]: (2181038080) Unavailable in read-only Sender# [1:8836:968] TEvPutResult: TEvPutResult {Id# [1:1:4:0:0:131072:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} Setting VDisk read-only to 0 for position 4 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:4:0] Setting VDisk read-only to 0 for position 5 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:5:0] Setting VDisk read-only to 1 for position 5 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:5:0] Setting VDisk read-only to 1 for position 6 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:6:0] SEND TEvPut with key [1:1:5:0:0:32768:0] 2025-05-29T15:27:52.950079Z 7 00h22m00.500000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:6:0]: (2181038080) Unavailable in read-only Sender# [1:8850:982] 2025-05-29T15:27:52.950107Z 6 00h22m00.500000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:5:0]: (2181038080) Unavailable in read-only Sender# [1:8843:975] TEvPutResult: TEvPutResult {Id# [1:1:5:0:0:32768:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} Setting VDisk read-only to 0 for position 5 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:5:0] Setting VDisk read-only to 0 for position 6 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:6:0] Setting VDisk read-only to 1 for position 6 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:6:0] Setting VDisk read-only to 1 for position 0 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:0:0] SEND TEvPut with key [1:1:6:0:0:131072:0] 2025-05-29T15:27:53.283664Z 7 00h26m00.561536s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:6:0]: (2181038080) Unavailable in read-only Sender# [1:8850:982] TEvPutResult: TEvPutResult {Id# [1:1:6:0:0:131072:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} Setting VDisk read-only to 0 for position 6 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:6:0] Setting VDisk read-only to 0 for position 0 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:0:0] === Read all 7 blob(s) === SEND TEvGet with key [1:1:0:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:0:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} SEND TEvGet with key [1:1:1:0:0:32768:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:1:0:0:32768:0] OK Size# 32768 RequestedSize# 32768}} SEND TEvGet with key [1:1:2:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:2:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} SEND TEvGet with key [1:1:3:0:0:32768:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:3:0:0:32768:0] OK Size# 32768 RequestedSize# 32768}} SEND TEvGet with key [1:1:4:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:4:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} SEND TEvGet with key [1:1:5:0:0:32768:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:5:0:0:32768:0] OK Size# 32768 RequestedSize# 32768}} SEND TEvGet with key [1:1:6:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:6:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/sysview/unittest >> KqpSysColV0::SelectRowAsterisk Test command err: Trying to start YDB, gRPC: 13550, MsgBus: 3264 2025-05-29T15:27:49.498309Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509889773348331170:2061];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:27:49.498336Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/001b40/r3tmp/tmpxVaKHJ/pdisk_1.dat 2025-05-29T15:27:49.552351Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [1:7509889773348331151:2079] 1748532469498169 != 1748532469498172 TServer::EnableGrpc on GrpcPort 13550, node 1 2025-05-29T15:27:49.558877Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:27:49.564922Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:27:49.564940Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-05-29T15:27:49.564943Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-05-29T15:27:49.564993Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:3264 2025-05-29T15:27:49.601508Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:27:49.601541Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:27:49.602631Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:3264 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-29T15:27:49.628932Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:27:49.637887Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:27:49.656640Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:27:49.676721Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:27:49.733344Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:27:49.835861Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509889773348332789:2403], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:27:49.835897Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:27:49.888819Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-05-29T15:27:49.944533Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-05-29T15:27:49.958546Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-05-29T15:27:49.973060Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-05-29T15:27:50.030243Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-05-29T15:27:50.047473Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-05-29T15:27:50.057044Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-05-29T15:27:50.072929Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509889777643300741:2466], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:27:50.072959Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509889777643300746:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:27:50.072961Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:27:50.073727Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715669:3, at schemeshard: 72057594046644480 2025-05-29T15:27:50.076435Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7509889777643300748:2470], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715669 completed, doublechecking } 2025-05-29T15:27:50.160117Z node 1 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [1:7509889777643300799:3398] txid# 281474976715670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-29T15:27:50.247919Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [1:7509889777643300815:2474], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:27:50.248018Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=1&id=NDRmZDM4ZDctYWY4MTdjMDktN2UyMGRjMWMtMmZiMTQxYjM=, ActorId: [1:7509889773348332786:2401], ActorState: ExecuteState, TraceId: 01jweag89r88k7asnz09584g4r, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: VERIFY failed (2025-05-29T15:27:50.248643Z): assertion failed in non-unittest thread with message: assertion failed at ydb/core/kqp/ut/common/kqp_ut_common.h:375, void NKikimr::NKqp::AssertSuccessResult(const NYdb::TStatus &): (result.IsSuccess())
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 library/cpp/testing/unittest/registar.cpp:36 RaiseError(): requirement UnittestThread failed 0. /-S/util/system/yassert.cpp:83: InternalPanicImpl @ 0x13A9EAF5 1. /-S/util/system/yassert.cpp:55: Panic @ 0x13A95AF6 2. /tmp//-S/library/cpp/testing/unittest/registar.cpp:36: RaiseError @ 0x13C37886 3. /-S/ydb/core/kqp/ut/common/kqp_ut_common.h:375: AssertSuccessResult @ 0x13979752 4. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:365: CreateSampleTables @ 0x260D6A12 5. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:581: operator() @ 0x260F7FBC 6. /-S/library/cpp/threading/future/core/future-inl.h:493: SetValue @ 0x260F7FBC 7. /-S/library/cpp/threading/future/async.h:24: operator() @ 0x260F7FBC 8. /-S/util/thread/pool.h:71: Process @ 0x260F7FBC 9. /-S/util/thread/pool.cpp:418: DoExecute @ 0x13AA6479 10. /-S/util/thread/factory.h:15: Execute @ 0x13AA4E69 11. /-S/util/thread/factory.cpp:36: ThreadProc @ 0x13AA4E69 12. /-S/util/system/thread.cpp:244: ThreadProxy @ 0x13AA02DC 13. ??:0: ?? @ 0x7FB460D92AC2 14. ??:0: ?? @ 0x7FB460E2484F >> YdbSdkSessions::TestActiveSessionCountAfterBadSession [GOOD] >> YdbSdkSessions::SessionsServerLimitWithSessionPool [SKIPPED] >> ReadOnlyVDisk::TestDiscover >> ReadOnlyVDisk::TestGetWithMustRestoreFirst [GOOD] >> TKeyValueTest::TestInlineWriteReadDeleteWithRestartsThenResponseOk [GOOD] >> TKeyValueTest::TestInlineWriteReadDeleteWithRestartsThenResponseOkNewApi >> ReadOnlyVDisk::TestWrites ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/ut_blobstorage/ut_read_only_vdisk/unittest >> ReadOnlyVDisk::TestGetWithMustRestoreFirst [GOOD] Test command err: RandomSeed# 4929353826759642626 === Trying to put and get a blob === SEND TEvPut with key [1:1:0:0:0:131072:0] TEvPutResult: TEvPutResult {Id# [1:1:0:0:0:131072:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} === Read all 1 blob(s) === SEND TEvGet with key [1:1:0:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:0:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} === Putting VDisk #0 to read-only === Setting VDisk read-only to 1 for position 0 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:0:0] === Write 10 blobs, expect some VDisks refuse parts but writes go through === SEND TEvPut with key [1:1:1:0:0:32768:0] 2025-05-29T15:27:54.146329Z 1 00h01m30.060512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5307:698] TEvPutResult: TEvPutResult {Id# [1:1:1:0:0:32768:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} SEND TEvPut with key [1:1:2:0:0:131072:0] 2025-05-29T15:27:54.148787Z 1 00h01m30.060512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5307:698] TEvPutResult: TEvPutResult {Id# [1:1:2:0:0:131072:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} SEND TEvPut with key [1:1:3:0:0:32768:0] 2025-05-29T15:27:54.151257Z 1 00h01m30.060512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5307:698] TEvPutResult: TEvPutResult {Id# [1:1:3:0:0:32768:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} SEND TEvPut with key [1:1:4:0:0:131072:0] 2025-05-29T15:27:54.151915Z 1 00h01m30.060512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5307:698] TEvPutResult: TEvPutResult {Id# [1:1:4:0:0:131072:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} SEND TEvPut with key [1:1:5:0:0:32768:0] TEvPutResult: TEvPutResult {Id# [1:1:5:0:0:32768:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} SEND TEvPut with key [1:1:6:0:0:131072:0] TEvPutResult: TEvPutResult {Id# [1:1:6:0:0:131072:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} SEND TEvPut with key [1:1:7:0:0:32768:0] 2025-05-29T15:27:54.153682Z 1 00h01m30.060512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5307:698] TEvPutResult: TEvPutResult {Id# [1:1:7:0:0:32768:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} SEND TEvPut with key [1:1:8:0:0:131072:0] 2025-05-29T15:27:54.154227Z 1 00h01m30.060512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5307:698] TEvPutResult: TEvPutResult {Id# [1:1:8:0:0:131072:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} SEND TEvPut with key [1:1:9:0:0:32768:0] 2025-05-29T15:27:54.154916Z 1 00h01m30.060512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5307:698] TEvPutResult: TEvPutResult {Id# [1:1:9:0:0:32768:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} SEND TEvPut with key [1:1:10:0:0:131072:0] 2025-05-29T15:27:54.155446Z 1 00h01m30.060512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5307:698] TEvPutResult: TEvPutResult {Id# [1:1:10:0:0:131072:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} === Read all 11 blob(s) === SEND TEvGet with key [1:1:0:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:0:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} SEND TEvGet with key [1:1:1:0:0:32768:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:1:0:0:32768:0] OK Size# 32768 RequestedSize# 32768}} SEND TEvGet with key [1:1:2:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:2:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} SEND TEvGet with key [1:1:3:0:0:32768:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:3:0:0:32768:0] OK Size# 32768 RequestedSize# 32768}} SEND TEvGet with key [1:1:4:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:4:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} SEND TEvGet with key [1:1:5:0:0:32768:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:5:0:0:32768:0] OK Size# 32768 RequestedSize# 32768}} SEND TEvGet with key [1:1:6:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:6:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} SEND TEvGet with key [1:1:7:0:0:32768:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:7:0:0:32768:0] OK Size# 32768 RequestedSize# 32768}} SEND TEvGet with key [1:1:8:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:8:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} SEND TEvGet with key [1:1:9:0:0:32768:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:9:0:0:32768:0] OK Size# 32768 RequestedSize# 32768}} SEND TEvGet with key [1:1:10:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:10:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} === Put 2 more VDisks to read-only === Setting VDisk read-only to 1 for position 1 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:1:0] Setting VDisk read-only to 1 for position 2 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:2:0] === Write 10 more blobs, expect errors === SEND TEvPut with key [1:1:11:0:0:32768:0] 2025-05-29T15:27:54.456157Z 1 00h05m30.160512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5307:698] 2025-05-29T15:27:54.456195Z 3 00h05m30.160512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:2:0]: (2181038080) Unavailable in read-only Sender# [1:5321:712] 2025-05-29T15:27:54.456230Z 2 00h05m30.160512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:1:0]: (2181038080) Unavailable in read-only Sender# [1:5314:705] 2025-05-29T15:27:54.456413Z 1 00h05m30.160512s :BS_PROXY_PUT ERROR: [2abc0bc695d22027] Result# TEvPutResult {Id# [1:1:11:0:0:32768:0] Status# ERROR StatusFlags# { } ErrorReason# "TRestoreStrategy saw optimisticState# EBS_DISINTEGRATED GroupId# 2181038080 BlobId# [1:1:11:0:0:32768:0] Reported ErrorReasons# [ { OrderNumber# 0 VDiskId# [82000000:1:0:0:0] NodeId# 1 ErrorReasons# [ "VDisk is in read-only mode", ] } { OrderNumber# 1 VDiskId# [82000000:1:0:1:0] NodeId# 2 ErrorReasons# [ "VDisk is in read-only mode", ] } { OrderNumber# 2 VDiskId# [82000000:1:0:2:0] NodeId# 3 ErrorReasons# [ "VDisk is in read-only mode", ] } ] Part situations# [ { OrderNumber# 5 Situations# SUUUUU } { OrderNumber# 6 Situations# USUUUU } { OrderNumber# 7 Situations# UUSUUU } { OrderNumber# 0 Situations# UUUEUU } { OrderNumber# 1 Situations# UUUUEU } { OrderNumber# 2 Situations# UUUUUE } { OrderNumber# 3 Situations# UUUSUU } { OrderNumber# 4 Situations# UUUUUS } ] " ApproximateFreeSpaceShare# 0.999988} GroupId# 2181038080 Marker# BPP12 TEvPutResult: TEvPutResult {Id# [1:1:11:0:0:32768:0] Status# ERROR StatusFlags# { } ErrorReason# "TRestoreStrategy saw optimisticState# EBS_DISINTEGRATED GroupId# 2181038080 BlobId# [1:1:11:0:0:32768:0] Reported ErrorReasons# [ { OrderNumber# 0 VDiskId# [82000000:1:0:0:0] NodeId# 1 ErrorReasons# [ "VDisk is in read-only mode", ] } { OrderNumber# 1 VDiskId# [82000000:1:0:1:0] NodeId# 2 ErrorReasons# [ "VDisk is in read-only mode", ] } { OrderNumber# 2 VDiskId# [82000000:1:0:2:0] NodeId# 3 ErrorReasons# [ "VDisk is in read-only mode", ] } ] Part situations# [ { OrderNumber# 5 Situations# SUUUUU } { OrderNumber# 6 Situations# USUUUU } { OrderNumber# 7 Situations# UUSUUU } { OrderNumber# 0 Situations# UUUEUU } { OrderNumber# 1 Situations# UUUUEU } { OrderNumber# 2 Situations# UUUUUE } { OrderNumber# 3 Situations# UUUSUU } { OrderNumber# 4 Situations# UUUUUS } ] " ApproximateFreeSpaceShare# 0.999988} SEND TEvPut with key [1:1:12:0:0:131072:0] 2025-05-29T15:27:54.456760Z 1 00h05m30.160512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5307:698] 2025-05-29T15:27:54.457038Z 2 00h05m30.160512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:1:0]: (2181038080) Unavailable in read-only Sender# [1:5314:705] 2025-05-29T15:27:54.457326Z 3 00h05m30.160512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:2:0]: (2181038080) Unavailable in read-only Sender# [1:5321:712] TEvPutResult: TEvPutResult {Id# [1:1:12:0:0:131072:0] Status# ERROR StatusFlags# { } ErrorReason# "TRestoreStrategy saw optimisticState# EBS_DISINTEGRATED GroupId# 2181038080 BlobId# [1:1:12:0:0:131072:0] Reported ErrorReasons# [ { OrderNumber# 0 VDiskId# [82000000:1:0:0:0] NodeId# 1 ErrorReasons# [ "VDisk is in read-only mode", ] } { OrderNumber# 1 VDiskId# [82000000:1:0:1:0] NodeId# 2 ErrorReasons# [ "VDisk is in read-only mode", ] } { OrderNumber# 2 VDiskId# [82000000:1:0:2:0] NodeId# 3 ErrorReasons# [ "VDisk is in read-only mode", ] } ] Part situations# [ { OrderNumber# 4 Situations# SUUUUU } { OrderNumber# 5 Situations# USUUUU } { OrderNumber# 6 Situations# UUSUUU } { OrderNumber# 7 Situations# UUUSUU } { OrderNumber# 0 Situations# UUUUEU } { OrderNumber# 1 Situations# UUUUUE } { OrderNumber# 2 Situations# UUUUEU } { OrderNumber# 3 Situations# UUUUUS } ] " ApproximateFreeSpaceShare# 0.999988} SEND TEvPut with key [1:1:13:0:0:32768:0] 2025-05-29T15:27:54.457664Z 1 00h05m30.160512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5307:698] 2025-05-29T15:27:54.457835Z 2 00h05m30.160512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:1:0]: (2181038080) Unavailable in read-only Sender# [1:5314:705] 2025-05-29T15:27:54.458026Z 3 00h05m30.160512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:2:0]: (2181038080) Unavailable in read-only Sender# [1:5321:712] TEvPutResult: TEvPutResult {Id# [1:1:13:0:0:32768:0] Status# ERROR StatusFlags# { } ErrorReason# "TRestoreStrategy saw optimisticState# EBS_DISINTEGRATED GroupId# 2181038080 BlobId# [1:1:13:0:0:32768:0] Reported ErrorReasons# [ { OrderNumber# 0 VDiskId# [82000000:1:0:0:0] NodeId# 1 ErrorReasons# [ "VDisk is in read-only mode", ] } { OrderNumber# 1 VDiskId# [82000000:1:0:1:0] NodeId# 2 ErrorReasons# [ "VDisk is in read-only mode", ] } { OrderNumber# 2 VDiskId# [82000000:1:0:2:0] NodeId# 3 ErrorReasons# [ "VDisk is in read-only mode", ] } ] Part situations# [ { OrderNumber# 3 Situations# PUUUUU } { OrderNumber# 4 Situations# UPUUUU } { OrderNumber# 5 Situations# UUPUUU } { OrderNumber# 6 Situations# UUUPUU } { OrderNumber# 7 Situations# UUUUPU } { OrderNumber# 0 Situations# UUUUUE } { OrderNumber# 1 Situations# UUUUUE } { OrderNumber# 2 Situations# UUUUUE } ] " ApproximateFreeSpaceShare# 0.999963} SEND TEvPut with key [1:1:14:0:0:131072:0] 2025-05-29T15:27:54.458283Z 3 00h05m30.160512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:2:0]: (2181038080) Unavailable in read-only Sender# [1:5321:712] 2025-05-29T15:27:54.458587Z 1 00h05m30.160512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5307:698] 2025-05-29T15:27:54.458695Z 2 00h05m30.160512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:1:0]: (2181038080) Unavailable in read-only Sender# [1:5314:705] TEvPutResult: TEvPutResult {Id# [1:1:14:0:0:131072:0] Status# ERROR StatusFlags# { } ErrorReason# "TRestoreStrategy saw optimisticState# EBS_DISINTEGRATED GroupId# 2181038080 BlobId# [1:1:14:0:0:131072:0] Reported ErrorReasons# [ { OrderNumber# 2 VDiskId# [82000000:1:0:2:0] NodeId# 3 ErrorReasons# [ "VDisk is in read-only mode", ] } { OrderNumber# 0 VDiskId# [82000000:1:0:0:0] NodeId# 1 ErrorReasons# [ "VDisk is in read-only mode", ] } { OrderNumber# 1 VDiskId# [82000000:1:0:1:0] NodeId# 2 ErrorReasons# [ "VDisk is in read-only mode", ] } ] Part situations# [ { OrderNumber# 2 Situations# EUUUUU } { OrderNumber# 3 Situations# UPUUUU } { OrderNumber# 4 Situations# UUPUUU } { OrderNumber# 5 Situations# UUUPUU } { OrderNumber# 6 Situations# UUUUPU } { OrderNumber# 7 Situations# UUUUUP } { OrderNumber# 0 Situations# EUUUUU } { OrderNumber# 1 Situations# EUUUUU } ] " ApproximateFreeSpaceShare# 0.999963} SEND TEvPut with key [1:1:15:0:0:32768:0] 2025-05-29T15:27:54.458912Z 3 00h05m30.160512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:2:0]: (2181038080) Unavailable in read-only Sender# [1:5321:712] 2025-05-29T15:27:54.458927Z 2 00h05m30.160512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:1:0]: (2181038080) Unavailable in read-only Sender# [1:5314:705] 2025-05-29T15:27:54.459103Z 1 00h05m30.160512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5307:698] TEvPutResult: TEvPutResult {Id# [1:1:15:0:0:32768:0] Status# ERROR StatusFlags# { } ErrorReason# "TRestoreStrategy saw optimisticState# EBS_DISINTEGRATED GroupId# 2181038080 BlobId# [1:1:15:0:0:32768:0] Reported ErrorReasons# [ { OrderNumber# 1 VDiskId# [82000000:1:0:1:0] NodeId# 2 ErrorReasons# [ "VDisk is in read-only mode", ] } { OrderNumber# 2 VDiskId# [82000000:1:0:2:0] NodeId# 3 ErrorReasons# [ "VDisk is in read-only mode", ] } { OrderNumber# 0 VDiskId# [82000000:1:0:0:0] NodeId# 1 ErrorReasons# [ "VDisk is in read-only mode", ] } ] Part situations# [ { OrderNumber# 1 Situations# EUUUUU } { OrderNumber# 2 Situations# UEUUUU } { OrderNumber# 3 Situations# UUSUUU } { OrderNumber# 4 Situations# UUUSUU } { OrderNumber# 5 Situations# UUUUSU } { OrderNumber# 6 Situations# UUUUUS } { OrderNumber# 7 Situations# USUUUU } { OrderNumber# 0 Situations# EUUUUU } ] " ApproximateFreeSpaceShare# 0.999988} SEND TEvPut with key [1:1:16:0:0:131072:0] 2025-05-29T15:27:54.459452Z 3 00h05m30.160512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:2:0]: (2181038080) Unavailable in read-only Sender# [1:5321:712] 2025-05-29T15:27:54.459467Z 2 00h05m30.160512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:1:0]: (2181038080) Unavailable in read-only Sender# [1:5314:705] 2025-05-29T15:27:54.459731Z 1 00h05m30.160512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5307:698] TEvPutResult: TEvPutResult {Id# [1:1:16:0:0:131072:0] Status# ERROR StatusFlags# { } ErrorReason# "TRestoreStrategy saw optimisticState# EBS_DISINTEGRATED GroupId# 2181038080 BlobId# [1:1:16:0:0:131072:0] Reported ErrorReasons# [ { OrderNumber# 1 VDiskId# [82000000:1:0:1:0] NodeId# 2 ErrorReasons# [ "VDisk is in read-only mode", ] } { OrderNumber# 2 VDiskId# [82000000:1:0:2:0] NodeId# 3 ErrorReasons# [ "VDisk is in read-only mode", ] } { OrderNumber# 0 VDiskId# [82000000:1:0:0:0] NodeId# 1 ErrorReasons# [ "VDisk is in read-only mode", ] } ] Part situations# [ { OrderNumber# 1 Situations# EUUUUU } { OrderNumber# 2 Situations# UEUUUU } { OrderNumber# 3 Situations# UUSUUU } { OrderNumber# 4 Situations# UUUSUU } { OrderNumber# 5 Situations# UUUUSU } { OrderNumber# 6 Situations# UUUUUS } { OrderNumber# 7 Situations# USUUUU } { OrderNumber# 0 Situations# EUUUUU } ] " ApproximateFreeSpaceShare# 0.999988} SEND TEvPut with key [1:1:17:0:0:32768:0] 2025-05-29T15:27:54.460038Z 1 00h05m30.160512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5307:698] 2025-05-29T15:27:54.460075Z 3 00h05m30.160512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:2:0]: (2181038080) Unavailable in read-only Sender# [1:5321:712] 2025-05-29T15:27:54.460085Z 2 00h05m30.160512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:1:0]: (2181038080) Unavailable in read-only Sender# [1:5314:705] TEvPutResult: TEvPutResult {Id# [1:1:17:0:0:32768:0] Status# ERROR StatusFlags# { } ErrorReason# "TRestoreStrategy saw optimisticState# EBS_DISINTEGRATED GroupId# 2181038080 BlobId# [1:1:17:0:0:32768:0] Reported ErrorReasons# [ { OrderNumber# 0 VDiskId# [82000000:1:0:0:0] NodeId# 1 ErrorReasons# [ "VDisk is in read-only mode", ] } { OrderNumber# 1 VDiskId# [82000000:1:0:1:0] NodeId# 2 ErrorReasons# [ "VDisk is in read-only mode", ] } { OrderNumber# 2 VDiskId# [82000000:1:0:2:0] NodeId# 3 ErrorReasons# [ "VDisk is in read-only mode", ] } ] Part situations# [ { OrderNumber# 0 Situations# EUUUUU } { OrderNumber# 1 Situations# UEUUUU } { OrderNumber# 2 Situations# UUEUUU } { OrderNumber# 3 Situations# UUUSUU } { OrderNumber# 4 Situations# UUUUSU } { OrderNumber# 5 Situations# UUUUUS } { OrderNumber# 6 Situations# SUUUUU } { OrderNumber# 7 Situations# UUSUUU } ] " ApproximateFreeSpaceShare# 0.999988} SEND TEvPut with key [1:1:18:0:0:131072:0] 2025-05-29T15:27:54.460492Z 1 00h05m30.160512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5307:698] 2025-05-29T15:27:54.460531Z 2 00h05m30.160512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:1:0]: (2181038080) Unavailable in read-only Sender# [1:5314:705] 2025-05-29T15:27:54.460549Z 3 00h05m30.160512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:2:0]: (2181038080) Unavailable in read-only Sender# [1:5321:712] TEvPutResult: TEvPutResult {Id# [1:1:18:0:0:131072:0] Status# ERROR StatusFlags# { } ErrorReason# "TRestoreStrategy saw optimisticState# EBS_DISINTEGRATED GroupId# 2181038080 BlobId# [1:1:18:0:0:131072:0] Reported ErrorReasons# [ { OrderNumber# 0 VDiskId# [82000000:1:0:0:0] NodeId# 1 ErrorReasons# [ "VDisk is in read-only mode", ] } { OrderNumber# 1 VDiskId# [82000000:1:0:1:0] NodeId# 2 ErrorReasons# [ "VDisk is in read-only mode", ] } { OrderNumber# 2 VDiskId# [82000000:1:0:2:0] NodeId# 3 ErrorReasons# [ "VDisk is in read-only mode", ] } ] Part situations# [ { OrderNumber# 7 Situations# SUUUUU } { OrderNumber# 0 Situations# UEUUUU } { OrderNumber# 1 Situations# UUEUUU } { OrderNumber# 2 Situations# UUUEUU } { OrderNumber# 3 Situations# UUUUSU } { OrderNumber# 4 Situations# UUUUUS } { OrderNumber# 5 Situations# USUUUU } { OrderNumber# 6 Situations# UUSUUU } ] " ApproximateFreeSpaceShare# 0.999988} SEND TEvPut with key [1:1:19:0:0:32768:0] 2025-05-29T15:27:54.461035Z 1 00h05m30.160512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5307:698] 2025-05-29T15:27:54.461070Z 3 00h05m30.160512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:2:0]: (2181038080) Unavailable in read-only Sender# [1:5321:712] 2025-05-29T15:27:54.461085Z 2 00h05m30.160512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:1:0]: (2181038080) Unavailable in read-only Sender# [1:5314:705] TEvPutResult: TEvPutResult {Id# [1:1:19:0:0:32768:0] Status# ERROR StatusFlags# { } ErrorReason# "TRestoreStrategy saw optimisticState# EBS_DISINTEGRATED GroupId# 2181038080 BlobId# [1:1:19:0:0:32768:0] Reported ErrorReasons# [ { OrderNumber# 0 VDiskId# [82000000:1:0:0:0] NodeId# 1 ErrorReasons# [ "VDisk is in read-only mode", ] } { OrderNumber# 1 VDiskId# [82000000:1:0:1:0] NodeId# 2 ErrorReasons# [ "VDisk is in read-only mode", ] } { OrderNumber# 2 VDiskId# [82000000:1:0:2:0] NodeId# 3 ErrorReasons# [ "VDisk is in read-only mode", ] } ] Part situations# [ { OrderNumber# 6 Situations# SUUUUU } { OrderNumber# 7 Situations# USUUUU } { OrderNumber# 0 Situations# UUEUUU } { OrderNumber# 1 Situations# UUUEUU } { OrderNumber# 2 Situations# UUUUEU } { OrderNumber# 3 Situations# UUUUUS } { OrderNumber# 4 Situations# UUSUUU } { OrderNumber# 5 Situations# UUUUSU } ] " ApproximateFreeSpaceShare# 0.999988} SEND TEvPut with key [1:1:20:0:0:131072:0] 2025-05-29T15:27:54.461503Z 1 00h05m30.160512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5307:698] 2025-05-29T15:27:54.461522Z 3 00h05m30.160512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:2:0]: (2181038080) Unavailable in read-only Sender# [1:5321:712] 2025-05-29T15:27:54.461541Z 2 00h05m30.160512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:1:0]: (2181038080) Unavailable in read-only Sender# [1:5314:705] TEvPutResult: TEvPutResult {Id# [1:1:20:0:0:131072:0] Status# ERROR StatusFlags# { } ErrorReason# "TRestoreStrategy saw optimisticState# EBS_DISINTEGRATED GroupId# 2181038080 BlobId# [1:1:20:0:0:131072:0] Reported ErrorReasons# [ { OrderNumber# 0 VDiskId# [82000000:1:0:0:0] NodeId# 1 ErrorReasons# [ "VDisk is in read-only mode", ] } { OrderNumber# 1 VDiskId# [82000000:1:0:1:0] NodeId# 2 ErrorReasons# [ "VDisk is in read-only mode", ] } { OrderNumber# 2 VDiskId# [82000000:1:0:2:0] NodeId# 3 ErrorReasons# [ "VDisk is in read-only mode", ] } ] Part situations# [ { OrderNumber# 5 Situations# SUUUUU } { OrderNumber# 6 Situations# USUUUU } { OrderNumber# 7 Situations# UUSUUU } { OrderNumber# 0 Situations# UUUEUU } { OrderNumber# 1 Situations# UUUUEU } { OrderNumber# 2 Situations# UUUUUE } { OrderNumber# 3 Situations# UUUSUU } { OrderNumber# 4 Situations# UUUUUS } ] " ApproximateFreeSpaceShare# 0.999988} SEND TEvGet with key [1:1:11:0:0:32768:0] 2025-05-29T15:27:54.462800Z 1 00h05m30.160512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5308:699] 2025-05-29T15:27:54.462841Z 2 00h05m30.160512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:1:0]: (2181038080) Unavailable in read-only Sender# [1:5315:706] 2025-05-29T15:27:54.462851Z 3 00h05m30.160512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:2:0]: (2181038080) Unavailable in read-only Sender# [1:5322:713] 2025-05-29T15:27:54.462948Z 1 00h05m30.160512s :BS_PROXY_GET ERROR: [1c1376b508ecec50] Response# TEvGetResult {Status# ERROR ResponseSz# 1 {[1:1:11:0:0:32768:0] ERROR Size# 0 RequestedSize# 32768} ErrorReason# "TStrategyBase saw optimisticState# EBS_DISINTEGRATED GroupId# 2181038080 BlobId# [1:1:11:0:0:32768:0] Reported ErrorReasons# [ { OrderNumber# 0 VDiskId# [82000000:1:0:0:0] NodeId# 1 ErrorReasons# [ "VDisk is in read-only mode", ] } { OrderNumber# 1 VDiskId# [82000000:1:0:1:0] NodeId# 2 ErrorReasons# [ "VDisk is in read-only mode", ] } { OrderNumber# 2 VDiskId# [82000000:1:0:2:0] NodeId# 3 ErrorReasons# [ "VDisk is in read-only mode", ] } ] Part situations# [ { OrderNumber# 5 Situations# PUUUUU } { OrderNumber# 6 Situations# UPUUUU } { OrderNumber# 7 Situations# UUPUUU } { OrderNumber# 0 Situations# UUUEUU } { OrderNumber# 1 Situations# UUUUEU } { OrderNumber# 2 Situations# UUUUUE } { OrderNumber# 3 Situations# AAAPAA } { OrderNumber# 4 Situations# AAAAAA } ] "} Marker# BPG29 2025-05-29T15:27:54.462972Z 2 00h05m30.160512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:1:0]: (2181038080) Unavailable in read-only Sender# [1:5315:706] 2025-05-29T15:27:54.462982Z 3 00h05m30.160512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:2:0]: (2181038080) Unavailable in read-only Sender# [1:5322:713] TEvGetResult: TEvGetResult {Status# ERROR ResponseSz# 1 {[1:1:11:0:0:32768:0] ERROR Size# 0 RequestedSize# 32768} ErrorReason# "TStrategyBase saw optimisticState# EBS_DISINTEGRATED GroupId# 2181038080 BlobId# [1:1:11:0:0:32768:0] Reported ErrorReasons# [ { OrderNumber# 0 VDiskId# [82000000:1:0:0:0] NodeId# 1 ErrorReasons# [ "VDisk is in read-only mode", ] } { OrderNumber# 1 VDiskId# [82000000:1:0:1:0] NodeId# 2 ErrorReasons# [ "VDisk is in read-only mode", ] } { OrderNumber# 2 VDiskId# [82000000:1:0:2:0] NodeId# 3 ErrorReasons# [ "VDisk is in read-only mode", ] } ] Part situations# [ { OrderNumber# 5 Situations# PUUUUU } { OrderNumber# 6 Situations# UPUUUU } { OrderNumber# 7 Situations# UUPUUU } { OrderNumber# 0 Situations# UUUEUU } { OrderNumber# 1 Situations# UUUUEU } { OrderNumber# 2 Situations# UUUUUE } { OrderNumber# 3 Situations# AAAPAA } { OrderNumber# 4 Situations# AAAAAA } ] "} >> StatisticsSaveLoad::Delete |70.1%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/data_integrity/unittest >> ReadOnlyVDisk::TestDiscover [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/ydb/table_split_ut/unittest >> YdbTableSplit::SplitByLoadWithReadsMultipleSplitsWithData [GOOD] Test command err: 2025-05-29T15:25:48.935319Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509889253211066642:2207];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:25:48.935366Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/0025bd/r3tmp/tmpHFKcEc/pdisk_1.dat 2025-05-29T15:25:49.092536Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 1028, node 1 2025-05-29T15:25:49.166937Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:25:49.166949Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-05-29T15:25:49.166951Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-05-29T15:25:49.166995Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:18479 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-29T15:25:49.232220Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:25:49.253249Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:25:49.253281Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:25:49.259496Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:18479 2025-05-29T15:25:49.499467Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509889257506034773:2334], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:25:49.499491Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:25:49.534863Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-05-29T15:25:49.632262Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509889257506034951:2362], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:25:49.632303Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:25:49.632438Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509889257506034960:2367], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:25:49.632460Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509889257506034961:2368], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:25:49.633370Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715660:3, at schemeshard: 72057594046644480 2025-05-29T15:25:49.634008Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509889257506035000:2372], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:25:49.634019Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509889257506035002:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:25:49.634047Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:25:49.634577Z node 1 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [1:7509889257506034966:2720] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exists but creating right now (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateCreate), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-29T15:25:49.635631Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509889257506035013:2377], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:25:49.635643Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509889257506035015:2379], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:25:49.635752Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:25:49.636159Z node 1 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [1:7509889257506035016:2741] txid# 281474976715661, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exists but creating right now (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateCreate), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-29T15:25:49.637227Z node 1 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [1:7509889257506035053:2768] txid# 281474976715662, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exists but creating right now (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateCreate), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-29T15:25:49.645462Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7509889257506034965:2370], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715660 completed, doublechecking } 2025-05-29T15:25:49.645517Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7509889257506034964:2369], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715660 completed, doublechecking } 2025-05-29T15:25:49.650948Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976715660, at schemeshard: 72057594046644480 2025-05-29T15:25:49.652725Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7509889257506035011:2376], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715660 completed, doublechecking } 2025-05-29T15:25:49.656331Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976715660, at schemeshard: 72057594046644480 2025-05-29T15:25:49.656426Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7509889257506035044:2383], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715660 completed, doublechecking } 2025-05-29T15:25:49.665077Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [1:7509889257506035105:2403], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:25:49.665742Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=1&id=YWQzYjJlZDQtZWQ1YTkyOWQtNWU1YWNkZTgtNjA0M2ZjYTM=, ActorId: [1:7509889257506034948:2360], ActorState: ExecuteState, TraceId: 01jweacjp395h4ksht46b4jerc, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: INTERNAL_ERROR:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 BAD_SESSION:
: Error: Session not found: ydb://session/3?node_id=1&id=YWQzYjJlZDQtZWQ1YTkyOWQtNWU1YWNkZTgtNjA0M2ZjYTM= BAD_SESSION:
: Error: Session not found: ydb://session/3?node_id=1&id=YWQzYjJlZDQtZWQ1YTkyOWQtNWU1YWNkZTgtNjA0M2ZjYTM= BAD_SESSION:
: Error: Session not found: ydb://session/3?node_id=1&id=YWQzYjJlZDQtZWQ1YTkyOWQtNWU1YWNkZTgtNjA0M2ZjYTM= BAD_SESSION:
: Error: Session not found: ydb://session/3?node_id=1&id=YWQzYjJlZDQtZWQ1YTkyOWQtNWU1YWNkZTgtNjA0M2ZjYTM= BAD_SESSION:
: Error: Session not found: ydb://session/3?node_id=1&id=YWQzYjJlZDQtZWQ1YTkyOWQtNWU1YWNkZTgtNjA0M2ZjYTM= BAD_SESSION:
: Error: Session not found: ydb://session/3?node_id=1&id=YWQzYjJlZDQtZWQ1YTkyOWQtNWU1YWNkZTgtNjA0M2ZjYTM= BAD_SESSION: < ... t: false } Table { Name: "Foo" Columns { Name: "NameHash" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "Name" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } Columns { Name: "Versio... (TRUNCATED) 2025-05-29T15:27:45.319280Z node 1 :KQP_EXECUTER ERROR: kqp_planner.cpp:119: TxId: 281474976952654. Ctx: { TraceId: 01jweag3n6fezrx9hpe6x86jzz, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NGM0M2M1MTItODhmZTQ0ZTQtYTQxNzdkYTctOTkwZDNlNA==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-05-29T15:27:45.356337Z node 1 :KQP_COMPUTE WARN: kqp_read_actor.cpp:1077: TxId: 281474976952362, task: 1, CA Id [1:7509889751429647117:3462]. Got EvDeliveryProblem, TabletId: 72075186224037889, NotDelivered: 1 2025-05-29T15:27:45.356343Z node 1 :KQP_COMPUTE WARN: kqp_read_actor.cpp:1077: TxId: 281474976952364, task: 1, CA Id [1:7509889751429647184:3463]. Got EvDeliveryProblem, TabletId: 72075186224037889, NotDelivered: 1 2025-05-29T15:27:45.356356Z node 1 :KQP_COMPUTE WARN: kqp_read_actor.cpp:1077: TxId: 281474976952369, task: 1, CA Id [1:7509889751429647212:3465]. Got EvDeliveryProblem, TabletId: 72075186224037889, NotDelivered: 1 2025-05-29T15:27:45.356360Z node 1 :KQP_COMPUTE WARN: kqp_read_actor.cpp:1077: TxId: 281474976952368, task: 1, CA Id [1:7509889751429647197:3468]. Got EvDeliveryProblem, TabletId: 72075186224037889, NotDelivered: 1 2025-05-29T15:27:45.436748Z node 1 :KQP_COMPUTE WARN: kqp_read_actor.cpp:1077: TxId: 281474976952370, task: 1, CA Id [1:7509889751429647235:3473]. Got EvDeliveryProblem, TabletId: 72075186224037889, NotDelivered: 1 2025-05-29T15:27:45.436756Z node 1 :KQP_COMPUTE WARN: kqp_read_actor.cpp:1077: TxId: 281474976952358, task: 1, CA Id [1:7509889751429647096:3470]. Got EvDeliveryProblem, TabletId: 72075186224037889, NotDelivered: 1 2025-05-29T15:27:45.480806Z node 1 :KQP_COMPUTE WARN: kqp_read_actor.cpp:1077: TxId: 281474976952359, task: 1, CA Id [1:7509889751429647098:3464]. Got EvDeliveryProblem, TabletId: 72075186224037889, NotDelivered: 1 2025-05-29T15:27:45.568848Z node 1 :KQP_COMPUTE WARN: kqp_read_actor.cpp:1077: TxId: 281474976952367, task: 1, CA Id [1:7509889751429647185:3466]. Got EvDeliveryProblem, TabletId: 72075186224037889, NotDelivered: 1 2025-05-29T15:27:45.715830Z node 1 :KQP_COMPUTE WARN: kqp_read_actor.cpp:1077: TxId: 281474976952364, task: 1, CA Id [1:7509889751429647184:3463]. Got EvDeliveryProblem, TabletId: 72075186224037889, NotDelivered: 1 2025-05-29T15:27:45.812565Z node 1 :KQP_COMPUTE WARN: kqp_read_actor.cpp:1077: TxId: 281474976952358, task: 1, CA Id [1:7509889751429647096:3470]. Got EvDeliveryProblem, TabletId: 72075186224037889, NotDelivered: 1 2025-05-29T15:27:45.812589Z node 1 :KQP_COMPUTE WARN: kqp_read_actor.cpp:1077: TxId: 281474976952361, task: 1, CA Id [1:7509889751429647079:3467]. Got EvDeliveryProblem, TabletId: 72075186224037889, NotDelivered: 1 2025-05-29T15:27:45.898585Z node 1 :KQP_COMPUTE WARN: kqp_read_actor.cpp:1077: TxId: 281474976952369, task: 1, CA Id [1:7509889751429647212:3465]. Got EvDeliveryProblem, TabletId: 72075186224037889, NotDelivered: 1 2025-05-29T15:27:45.955395Z node 1 :KQP_COMPUTE WARN: kqp_read_actor.cpp:1077: TxId: 281474976952362, task: 1, CA Id [1:7509889751429647117:3462]. Got EvDeliveryProblem, TabletId: 72075186224037889, NotDelivered: 1 2025-05-29T15:27:45.955399Z node 1 :KQP_COMPUTE WARN: kqp_read_actor.cpp:1077: TxId: 281474976952368, task: 1, CA Id [1:7509889751429647197:3468]. Got EvDeliveryProblem, TabletId: 72075186224037889, NotDelivered: 1 2025-05-29T15:27:46.082936Z node 1 :KQP_COMPUTE WARN: kqp_read_actor.cpp:1077: TxId: 281474976952370, task: 1, CA Id [1:7509889751429647235:3473]. Got EvDeliveryProblem, TabletId: 72075186224037889, NotDelivered: 1 2025-05-29T15:27:46.082936Z node 1 :KQP_COMPUTE WARN: kqp_read_actor.cpp:1077: TxId: 281474976952359, task: 1, CA Id [1:7509889751429647098:3464]. Got EvDeliveryProblem, TabletId: 72075186224037889, NotDelivered: 1 2025-05-29T15:27:46.126444Z node 1 :KQP_COMPUTE WARN: kqp_read_actor.cpp:1077: TxId: 281474976952367, task: 1, CA Id [1:7509889751429647185:3466]. Got EvDeliveryProblem, TabletId: 72075186224037889, NotDelivered: 1 2025-05-29T15:27:46.406983Z node 1 :KQP_COMPUTE WARN: kqp_read_actor.cpp:1077: TxId: 281474976952361, task: 1, CA Id [1:7509889751429647079:3467]. Got EvDeliveryProblem, TabletId: 72075186224037889, NotDelivered: 1 2025-05-29T15:27:46.464378Z node 1 :KQP_COMPUTE WARN: kqp_read_actor.cpp:1077: TxId: 281474976952364, task: 1, CA Id [1:7509889751429647184:3463]. Got EvDeliveryProblem, TabletId: 72075186224037889, NotDelivered: 1 2025-05-29T15:27:46.591347Z node 1 :KQP_COMPUTE WARN: kqp_read_actor.cpp:1077: TxId: 281474976952369, task: 1, CA Id [1:7509889751429647212:3465]. Got EvDeliveryProblem, TabletId: 72075186224037889, NotDelivered: 1 2025-05-29T15:27:46.692725Z node 1 :KQP_COMPUTE WARN: kqp_read_actor.cpp:1077: TxId: 281474976952367, task: 1, CA Id [1:7509889751429647185:3466]. Got EvDeliveryProblem, TabletId: 72075186224037889, NotDelivered: 1 2025-05-29T15:27:46.726007Z node 1 :KQP_COMPUTE WARN: kqp_read_actor.cpp:1077: TxId: 281474976952368, task: 1, CA Id [1:7509889751429647197:3468]. Got EvDeliveryProblem, TabletId: 72075186224037889, NotDelivered: 1 2025-05-29T15:27:46.798192Z node 1 :KQP_COMPUTE WARN: kqp_read_actor.cpp:1077: TxId: 281474976952358, task: 1, CA Id [1:7509889751429647096:3470]. Got EvDeliveryProblem, TabletId: 72075186224037889, NotDelivered: 1 2025-05-29T15:27:46.907766Z node 1 :KQP_COMPUTE WARN: kqp_read_actor.cpp:1077: TxId: 281474976952362, task: 1, CA Id [1:7509889751429647117:3462]. Got EvDeliveryProblem, TabletId: 72075186224037889, NotDelivered: 1 2025-05-29T15:27:47.028581Z node 1 :KQP_COMPUTE WARN: kqp_read_actor.cpp:1077: TxId: 281474976952370, task: 1, CA Id [1:7509889751429647235:3473]. Got EvDeliveryProblem, TabletId: 72075186224037889, NotDelivered: 1 2025-05-29T15:27:47.086439Z node 1 :KQP_COMPUTE WARN: kqp_read_actor.cpp:1077: TxId: 281474976952359, task: 1, CA Id [1:7509889751429647098:3464]. Got EvDeliveryProblem, TabletId: 72075186224037889, NotDelivered: 1 2025-05-29T15:27:47.137760Z node 1 :KQP_COMPUTE WARN: kqp_read_actor.cpp:1077: TxId: 281474976952364, task: 1, CA Id [1:7509889751429647184:3463]. Got EvDeliveryProblem, TabletId: 72075186224037889, NotDelivered: 1 2025-05-29T15:27:47.237471Z node 1 :KQP_COMPUTE WARN: kqp_read_actor.cpp:1077: TxId: 281474976952367, task: 1, CA Id [1:7509889751429647185:3466]. Got EvDeliveryProblem, TabletId: 72075186224037889, NotDelivered: 1 2025-05-29T15:27:47.288685Z node 1 :KQP_COMPUTE WARN: kqp_read_actor.cpp:1077: TxId: 281474976952361, task: 1, CA Id [1:7509889751429647079:3467]. Got EvDeliveryProblem, TabletId: 72075186224037889, NotDelivered: 1 2025-05-29T15:27:47.477101Z node 1 :KQP_COMPUTE WARN: kqp_read_actor.cpp:1077: TxId: 281474976952368, task: 1, CA Id [1:7509889751429647197:3468]. Got EvDeliveryProblem, TabletId: 72075186224037889, NotDelivered: 1 2025-05-29T15:27:47.587658Z node 1 :KQP_COMPUTE WARN: kqp_read_actor.cpp:1077: TxId: 281474976952362, task: 1, CA Id [1:7509889751429647117:3462]. Got EvDeliveryProblem, TabletId: 72075186224037889, NotDelivered: 1 2025-05-29T15:27:47.587678Z node 1 :KQP_COMPUTE WARN: kqp_read_actor.cpp:1077: TxId: 281474976952369, task: 1, CA Id [1:7509889751429647212:3465]. Got EvDeliveryProblem, TabletId: 72075186224037889, NotDelivered: 1 2025-05-29T15:27:47.673203Z node 1 :KQP_COMPUTE WARN: kqp_read_actor.cpp:1077: TxId: 281474976952358, task: 1, CA Id [1:7509889751429647096:3470]. Got EvDeliveryProblem, TabletId: 72075186224037889, NotDelivered: 1 2025-05-29T15:27:47.792006Z node 1 :KQP_COMPUTE WARN: kqp_read_actor.cpp:1077: TxId: 281474976952364, task: 1, CA Id [1:7509889751429647184:3463]. Got EvDeliveryProblem, TabletId: 72075186224037889, NotDelivered: 1 2025-05-29T15:27:47.823177Z node 1 :KQP_COMPUTE WARN: kqp_read_actor.cpp:1077: TxId: 281474976952370, task: 1, CA Id [1:7509889751429647235:3473]. Got EvDeliveryProblem, TabletId: 72075186224037889, NotDelivered: 1 2025-05-29T15:27:47.979905Z node 1 :KQP_COMPUTE WARN: kqp_read_actor.cpp:1077: TxId: 281474976952367, task: 1, CA Id [1:7509889751429647185:3466]. Got EvDeliveryProblem, TabletId: 72075186224037889, NotDelivered: 1 2025-05-29T15:27:48.014785Z node 1 :KQP_COMPUTE WARN: kqp_read_actor.cpp:1077: TxId: 281474976952359, task: 1, CA Id [1:7509889751429647098:3464]. Got EvDeliveryProblem, TabletId: 72075186224037889, NotDelivered: 1 2025-05-29T15:27:48.159367Z node 1 :KQP_COMPUTE WARN: kqp_read_actor.cpp:1077: TxId: 281474976952361, task: 1, CA Id [1:7509889751429647079:3467]. Got EvDeliveryProblem, TabletId: 72075186224037889, NotDelivered: 1 2025-05-29T15:27:48.191069Z node 1 :KQP_COMPUTE WARN: kqp_read_actor.cpp:1077: TxId: 281474976952368, task: 1, CA Id [1:7509889751429647197:3468]. Got EvDeliveryProblem, TabletId: 72075186224037889, NotDelivered: 1 2025-05-29T15:27:48.377006Z node 1 :KQP_COMPUTE WARN: kqp_read_actor.cpp:1077: TxId: 281474976952370, task: 1, CA Id [1:7509889751429647235:3473]. Got EvDeliveryProblem, TabletId: 72075186224037889, NotDelivered: 1 2025-05-29T15:27:48.482780Z node 1 :KQP_COMPUTE WARN: kqp_read_actor.cpp:1077: TxId: 281474976952362, task: 1, CA Id [1:7509889751429647117:3462]. Got EvDeliveryProblem, TabletId: 72075186224037889, NotDelivered: 1 2025-05-29T15:27:48.519005Z node 1 :KQP_COMPUTE WARN: kqp_read_actor.cpp:1077: TxId: 281474976952358, task: 1, CA Id [1:7509889751429647096:3470]. Got EvDeliveryProblem, TabletId: 72075186224037889, NotDelivered: 1 2025-05-29T15:27:48.519041Z node 1 :KQP_COMPUTE WARN: kqp_read_actor.cpp:1077: TxId: 281474976952369, task: 1, CA Id [1:7509889751429647212:3465]. Got EvDeliveryProblem, TabletId: 72075186224037889, NotDelivered: 1 2025-05-29T15:27:48.673287Z node 1 :KQP_COMPUTE WARN: kqp_read_actor.cpp:1077: TxId: 281474976952359, task: 1, CA Id [1:7509889751429647098:3464]. Got EvDeliveryProblem, TabletId: 72075186224037889, NotDelivered: 1 TClient::Ls request: /Root/Foo TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Foo" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976715658 CreateStep: 1748532349648 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 6 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 6 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 2 TablePartitionVersion: 3 } ChildrenExist: false } Table { Name: "Foo" Columns { Name: "NameHash" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "Name" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } Columns { Name: "Versio... (TRUNCATED) 2025-05-29T15:27:49.119989Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6632: Handle: TEvRunConditionalErase, at schemeshard: 72057594046644480 2025-05-29T15:27:49.120024Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__conditional_erase.cpp:56: TTxRunConditionalErase DoExecute: at schemeshard: 72057594046644480 2025-05-29T15:27:49.120041Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__conditional_erase.cpp:189: TTxRunConditionalErase DoComplete: at schemeshard: 72057594046644480 Table has 3 shards 2025-05-29T15:27:49.744201Z node 1 :TX_DATASHARD ERROR: datashard__stats.cpp:649: CPU usage 4.1107 is higher than threshold of 1 in-flight Tx: 0 immediate Tx: 0 readIterators: 0 at datashard: 72075186224037890 table: [/Root/Foo] |70.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/public/sdk/cpp/tests/integration/sessions_pool/gtest >> YdbSdkSessionsPool1Session::GetSession/0 [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/ut_blobstorage/ut_read_only_vdisk/unittest >> ReadOnlyVDisk::TestDiscover [GOOD] Test command err: RandomSeed# 11724745278495651838 SEND TEvPut with key [1:1:0:0:0:131072:0] TEvPutResult: TEvPutResult {Id# [1:1:0:0:0:131072:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} SEND TEvPut with key [1:1:1:0:0:32768:0] TEvPutResult: TEvPutResult {Id# [1:1:1:0:0:32768:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} SEND TEvPut with key [1:1:2:0:0:131072:0] TEvPutResult: TEvPutResult {Id# [1:1:2:0:0:131072:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} === Read all 3 blob(s) === SEND TEvGet with key [1:1:0:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:0:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} SEND TEvGet with key [1:1:1:0:0:32768:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:1:0:0:32768:0] OK Size# 32768 RequestedSize# 32768}} SEND TEvGet with key [1:1:2:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:2:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} Setting VDisk read-only to 1 for position 0 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:0:0] SEND TEvPut with key [1:1:3:0:0:32768:0] 2025-05-29T15:27:54.731480Z 1 00h01m30.060512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5309:698] TEvPutResult: TEvPutResult {Id# [1:1:3:0:0:32768:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} Setting VDisk read-only to 1 for position 1 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:1:0] SEND TEvPut with key [1:1:4:0:0:131072:0] 2025-05-29T15:27:54.772969Z 1 00h02m00.100000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5309:698] 2025-05-29T15:27:54.773267Z 2 00h02m00.100000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:1:0]: (2181038080) Unavailable in read-only Sender# [1:5316:705] TEvPutResult: TEvPutResult {Id# [1:1:4:0:0:131072:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} Setting VDisk read-only to 1 for position 2 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:2:0] SEND TEvPut with key [1:1:5:0:0:32768:0] 2025-05-29T15:27:54.812550Z 3 00h02m30.110512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:2:0]: (2181038080) Unavailable in read-only Sender# [1:5323:712] 2025-05-29T15:27:54.812764Z 1 00h02m30.110512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5309:698] 2025-05-29T15:27:54.812867Z 2 00h02m30.110512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:1:0]: (2181038080) Unavailable in read-only Sender# [1:5316:705] 2025-05-29T15:27:54.812921Z 1 00h02m30.110512s :BS_PROXY_PUT ERROR: [7d343ee1587055dc] Result# TEvPutResult {Id# [1:1:5:0:0:32768:0] Status# ERROR StatusFlags# { } ErrorReason# "TRestoreStrategy saw optimisticState# EBS_DISINTEGRATED GroupId# 2181038080 BlobId# [1:1:5:0:0:32768:0] Reported ErrorReasons# [ { OrderNumber# 2 VDiskId# [82000000:1:0:2:0] NodeId# 3 ErrorReasons# [ "VDisk is in read-only mode", ] } { OrderNumber# 0 VDiskId# [82000000:1:0:0:0] NodeId# 1 ErrorReasons# [ "VDisk is in read-only mode", ] } { OrderNumber# 1 VDiskId# [82000000:1:0:1:0] NodeId# 2 ErrorReasons# [ "VDisk is in read-only mode", ] } ] Part situations# [ { OrderNumber# 2 Situations# EUUUUU } { OrderNumber# 3 Situations# UPUUUU } { OrderNumber# 4 Situations# UUPUUU } { OrderNumber# 5 Situations# UUUPUU } { OrderNumber# 6 Situations# UUUUPU } { OrderNumber# 7 Situations# UUUUUP } { OrderNumber# 0 Situations# EUUUUU } { OrderNumber# 1 Situations# EUUUUU } ] " ApproximateFreeSpaceShare# 0.999988} GroupId# 2181038080 Marker# BPP12 TEvPutResult: TEvPutResult {Id# [1:1:5:0:0:32768:0] Status# ERROR StatusFlags# { } ErrorReason# "TRestoreStrategy saw optimisticState# EBS_DISINTEGRATED GroupId# 2181038080 BlobId# [1:1:5:0:0:32768:0] Reported ErrorReasons# [ { OrderNumber# 2 VDiskId# [82000000:1:0:2:0] NodeId# 3 ErrorReasons# [ "VDisk is in read-only mode", ] } { OrderNumber# 0 VDiskId# [82000000:1:0:0:0] NodeId# 1 ErrorReasons# [ "VDisk is in read-only mode", ] } { OrderNumber# 1 VDiskId# [82000000:1:0:1:0] NodeId# 2 ErrorReasons# [ "VDisk is in read-only mode", ] } ] Part situations# [ { OrderNumber# 2 Situations# EUUUUU } { OrderNumber# 3 Situations# UPUUUU } { OrderNumber# 4 Situations# UUPUUU } { OrderNumber# 5 Situations# UUUPUU } { OrderNumber# 6 Situations# UUUUPU } { OrderNumber# 7 Situations# UUUUUP } { OrderNumber# 0 Situations# EUUUUU } { OrderNumber# 1 Situations# EUUUUU } ] " ApproximateFreeSpaceShare# 0.999988} === Putting VDisk #3 to read-only === Setting VDisk read-only to 1 for position 3 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:3:0] === Read all 6 blob(s) === SEND TEvGet with key [1:1:0:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:0:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} SEND TEvGet with key [1:1:1:0:0:32768:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:1:0:0:32768:0] OK Size# 32768 RequestedSize# 32768}} SEND TEvGet with key [1:1:2:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:2:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} SEND TEvGet with key [1:1:3:0:0:32768:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:3:0:0:32768:0] OK Size# 32768 RequestedSize# 32768}} SEND TEvGet with key [1:1:4:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:4:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} SEND TEvGet with key [1:1:5:0:0:32768:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:5:0:0:32768:0] OK Size# 32768 RequestedSize# 32768}} === Putting VDisk #4 to read-only === Setting VDisk read-only to 1 for position 4 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:4:0] === Read all 6 blob(s) === SEND TEvGet with key [1:1:0:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:0:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} SEND TEvGet with key [1:1:1:0:0:32768:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:1:0:0:32768:0] OK Size# 32768 RequestedSize# 32768}} SEND TEvGet with key [1:1:2:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:2:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} SEND TEvGet with key [1:1:3:0:0:32768:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:3:0:0:32768:0] OK Size# 32768 RequestedSize# 32768}} SEND TEvGet with key [1:1:4:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:4:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} SEND TEvGet with key [1:1:5:0:0:32768:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:5:0:0:32768:0] OK Size# 32768 RequestedSize# 32768}} === Putting VDisk #5 to read-only === Setting VDisk read-only to 1 for position 5 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:5:0] === Read all 6 blob(s) === SEND TEvGet with key [1:1:0:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:0:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} SEND TEvGet with key [1:1:1:0:0:32768:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:1:0:0:32768:0] OK Size# 32768 RequestedSize# 32768}} SEND TEvGet with key [1:1:2:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:2:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} SEND TEvGet with key [1:1:3:0:0:32768:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:3:0:0:32768:0] OK Size# 32768 RequestedSize# 32768}} SEND TEvGet with key [1:1:4:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:4:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} SEND TEvGet with key [1:1:5:0:0:32768:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:5:0:0:32768:0] OK Size# 32768 RequestedSize# 32768}} === Putting VDisk #6 to read-only === Setting VDisk read-only to 1 for position 6 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:6:0] === Read all 6 blob(s) === SEND TEvGet with key [1:1:0:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:0:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} SEND TEvGet with key [1:1:1:0:0:32768:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:1:0:0:32768:0] OK Size# 32768 RequestedSize# 32768}} SEND TEvGet with key [1:1:2:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:2:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} SEND TEvGet with key [1:1:3:0:0:32768:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:3:0:0:32768:0] OK Size# 32768 RequestedSize# 32768}} SEND TEvGet with key [1:1:4:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:4:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} SEND TEvGet with key [1:1:5:0:0:32768:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:5:0:0:32768:0] OK Size# 32768 RequestedSize# 32768}} === Putting VDisk #0 to normal === Setting VDisk read-only to 0 for position 0 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:0:0] === Putting VDisk #1 to normal === Setting VDisk read-only to 0 for position 1 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:1:0] === Putting VDisk #2 to normal === Setting VDisk read-only to 0 for position 2 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:2:0] === Putting VDisk #3 to normal === Setting VDisk read-only to 0 for position 3 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:3:0] === Putting VDisk #4 to normal === Setting VDisk read-only to 0 for position 4 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:4:0] === Putting VDisk #5 to normal === Setting VDisk read-only to 0 for position 5 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:5:0] === Putting VDisk #6 to normal === Setting VDisk read-only to 0 for position 6 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:6:0] SEND TEvPut with key [1:1:6:0:0:131072:0] TEvPutResult: TEvPutResult {Id# [1:1:6:0:0:131072:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/public/sdk/cpp/tests/integration/sessions/gtest >> YdbSdkSessions::TestSdkFreeSessionAfterBadSessionQueryService [SKIPPED] Test command err: ydb/public/sdk/cpp/tests/integration/sessions/main.cpp:195: Test is failing right now >> ReadOnlyVDisk::TestWrites [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/public/sdk/cpp/tests/integration/sessions/gtest >> YdbSdkSessions::SessionsServerLimitWithSessionPool [SKIPPED] Test command err: ydb/public/sdk/cpp/tests/integration/sessions/main.cpp:583: Enable after accepting a pull request with merging configs |70.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/statistics/database/ut/unittest ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/ut_blobstorage/ut_read_only_vdisk/unittest >> ReadOnlyVDisk::TestWrites [GOOD] Test command err: RandomSeed# 14282702723564249609 === Trying to put and get a blob === SEND TEvPut with key [1:1:0:0:0:131072:0] TEvPutResult: TEvPutResult {Id# [1:1:0:0:0:131072:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} === Read all 1 blob(s) === SEND TEvGet with key [1:1:0:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:0:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} === Putting VDisk #0 to read-only === Setting VDisk read-only to 1 for position 0 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:0:0] === Write 10 blobs, expect some VDisks refuse parts but writes go through === SEND TEvPut with key [1:1:1:0:0:32768:0] 2025-05-29T15:27:55.173374Z 1 00h01m30.060512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5309:698] TEvPutResult: TEvPutResult {Id# [1:1:1:0:0:32768:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} SEND TEvPut with key [1:1:2:0:0:131072:0] 2025-05-29T15:27:55.175384Z 1 00h01m30.060512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5309:698] TEvPutResult: TEvPutResult {Id# [1:1:2:0:0:131072:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} SEND TEvPut with key [1:1:3:0:0:32768:0] 2025-05-29T15:27:55.177488Z 1 00h01m30.060512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5309:698] TEvPutResult: TEvPutResult {Id# [1:1:3:0:0:32768:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} SEND TEvPut with key [1:1:4:0:0:131072:0] 2025-05-29T15:27:55.177905Z 1 00h01m30.060512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5309:698] TEvPutResult: TEvPutResult {Id# [1:1:4:0:0:131072:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} SEND TEvPut with key [1:1:5:0:0:32768:0] TEvPutResult: TEvPutResult {Id# [1:1:5:0:0:32768:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} SEND TEvPut with key [1:1:6:0:0:131072:0] TEvPutResult: TEvPutResult {Id# [1:1:6:0:0:131072:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} SEND TEvPut with key [1:1:7:0:0:32768:0] 2025-05-29T15:27:55.179129Z 1 00h01m30.060512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5309:698] TEvPutResult: TEvPutResult {Id# [1:1:7:0:0:32768:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} SEND TEvPut with key [1:1:8:0:0:131072:0] 2025-05-29T15:27:55.179471Z 1 00h01m30.060512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5309:698] TEvPutResult: TEvPutResult {Id# [1:1:8:0:0:131072:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} SEND TEvPut with key [1:1:9:0:0:32768:0] 2025-05-29T15:27:55.179913Z 1 00h01m30.060512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5309:698] TEvPutResult: TEvPutResult {Id# [1:1:9:0:0:32768:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} SEND TEvPut with key [1:1:10:0:0:131072:0] 2025-05-29T15:27:55.180249Z 1 00h01m30.060512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5309:698] TEvPutResult: TEvPutResult {Id# [1:1:10:0:0:131072:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} === Read all 11 blob(s) === SEND TEvGet with key [1:1:0:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:0:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} SEND TEvGet with key [1:1:1:0:0:32768:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:1:0:0:32768:0] OK Size# 32768 RequestedSize# 32768}} SEND TEvGet with key [1:1:2:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:2:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} SEND TEvGet with key [1:1:3:0:0:32768:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:3:0:0:32768:0] OK Size# 32768 RequestedSize# 32768}} SEND TEvGet with key [1:1:4:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:4:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} SEND TEvGet with key [1:1:5:0:0:32768:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:5:0:0:32768:0] OK Size# 32768 RequestedSize# 32768}} SEND TEvGet with key [1:1:6:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:6:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} SEND TEvGet with key [1:1:7:0:0:32768:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:7:0:0:32768:0] OK Size# 32768 RequestedSize# 32768}} SEND TEvGet with key [1:1:8:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:8:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} SEND TEvGet with key [1:1:9:0:0:32768:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:9:0:0:32768:0] OK Size# 32768 RequestedSize# 32768}} SEND TEvGet with key [1:1:10:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:10:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} === Put 2 more VDisks to read-only === Setting VDisk read-only to 1 for position 1 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:1:0] Setting VDisk read-only to 1 for position 2 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:2:0] === Write 10 more blobs, expect errors === SEND TEvPut with key [1:1:11:0:0:32768:0] 2025-05-29T15:27:55.339397Z 1 00h03m30.110512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5309:698] 2025-05-29T15:27:55.339431Z 3 00h03m30.110512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:2:0]: (2181038080) Unavailable in read-only Sender# [1:5323:712] 2025-05-29T15:27:55.339466Z 2 00h03m30.110512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:1:0]: (2181038080) Unavailable in read-only Sender# [1:5316:705] 2025-05-29T15:27:55.339666Z 1 00h03m30.110512s :BS_PROXY_PUT ERROR: [b5661c35d178d670] Result# TEvPutResult {Id# [1:1:11:0:0:32768:0] Status# ERROR StatusFlags# { } ErrorReason# "TRestoreStrategy saw optimisticState# EBS_DISINTEGRATED GroupId# 2181038080 BlobId# [1:1:11:0:0:32768:0] Reported ErrorReasons# [ { OrderNumber# 0 VDiskId# [82000000:1:0:0:0] NodeId# 1 ErrorReasons# [ "VDisk is in read-only mode", ] } { OrderNumber# 1 VDiskId# [82000000:1:0:1:0] NodeId# 2 ErrorReasons# [ "VDisk is in read-only mode", ] } { OrderNumber# 2 VDiskId# [82000000:1:0:2:0] NodeId# 3 ErrorReasons# [ "VDisk is in read-only mode", ] } ] Part situations# [ { OrderNumber# 5 Situations# SUUUUU } { OrderNumber# 6 Situations# USUUUU } { OrderNumber# 7 Situations# UUSUUU } { OrderNumber# 0 Situations# UUUEUU } { OrderNumber# 1 Situations# UUUUEU } { OrderNumber# 2 Situations# UUUUUE } { OrderNumber# 3 Situations# UUUSUU } { OrderNumber# 4 Situations# UUUUUS } ] " ApproximateFreeSpaceShare# 0.999988} GroupId# 2181038080 Marker# BPP12 TEvPutResult: TEvPutResult {Id# [1:1:11:0:0:32768:0] Status# ERROR StatusFlags# { } ErrorReason# "TRestoreStrategy saw optimisticState# EBS_DISINTEGRATED GroupId# 2181038080 BlobId# [1:1:11:0:0:32768:0] Reported ErrorReasons# [ { OrderNumber# 0 VDiskId# [82000000:1:0:0:0] NodeId# 1 ErrorReasons# [ "VDisk is in read-only mode", ] } { OrderNumber# 1 VDiskId# [82000000:1:0:1:0] NodeId# 2 ErrorReasons# [ "VDisk is in read-only mode", ] } { OrderNumber# 2 VDiskId# [82000000:1:0:2:0] NodeId# 3 ErrorReasons# [ "VDisk is in read-only mode", ] } ] Part situations# [ { OrderNumber# 5 Situations# SUUUUU } { OrderNumber# 6 Situations# USUUUU } { OrderNumber# 7 Situations# UUSUUU } { OrderNumber# 0 Situations# UUUEUU } { OrderNumber# 1 Situations# UUUUEU } { OrderNumber# 2 Situations# UUUUUE } { OrderNumber# 3 Situations# UUUSUU } { OrderNumber# 4 Situations# UUUUUS } ] " ApproximateFreeSpaceShare# 0.999988} SEND TEvPut with key [1:1:12:0:0:131072:0] 2025-05-29T15:27:55.340038Z 1 00h03m30.110512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5309:698] 2025-05-29T15:27:55.340079Z 2 00h03m30.110512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:1:0]: (2181038080) Unavailable in read-only Sender# [1:5316:705] 2025-05-29T15:27:55.340284Z 3 00h03m30.110512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:2:0]: (2181038080) Unavailable in read-only Sender# [1:5323:712] TEvPutResult: TEvPutResult {Id# [1:1:12:0:0:131072:0] Status# ERROR StatusFlags# { } ErrorReason# "TRestoreStrategy saw optimisticState# EBS_DISINTEGRATED GroupId# 2181038080 BlobId# [1:1:12:0:0:131072:0] Reported ErrorReasons# [ { OrderNumber# 0 VDiskId# [82000000:1:0:0:0] NodeId# 1 ErrorReasons# [ "VDisk is in read-only mode", ] } { OrderNumber# 1 VDiskId# [82000000:1:0:1:0] NodeId# 2 ErrorReasons# [ "VDisk is in read-only mode", ] } { OrderNumber# 2 VDiskId# [82000000:1:0:2:0] NodeId# 3 ErrorReasons# [ "VDisk is in read-only mode", ] } ] Part situations# [ { OrderNumber# 4 Situations# SUUUUU } { OrderNumber# 5 Situations# USUUUU } { OrderNumber# 6 Situations# UUSUUU } { OrderNumber# 7 Situations# UUUSUU } { OrderNumber# 0 Situations# UUUUEU } { OrderNumber# 1 Situations# UUUUUE } { OrderNumber# 2 Situations# UUUUEU } { OrderNumber# 3 Situations# UUUUUS } ] " ApproximateFreeSpaceShare# 0.999988} SEND TEvPut with key [1:1:13:0:0:32768:0] 2025-05-29T15:27:55.340640Z 1 00h03m30.110512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5309:698] 2025-05-29T15:27:55.340788Z 2 00h03m30.110512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:1:0]: (2181038080) Unavailable in read-only Sender# [1:5316:705] 2025-05-29T15:27:55.340951Z 3 00h03m30.110512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:2:0]: (2181038080) Unavailable in read-only Sender# [1:5323:712] TEvPutResult: TEvPutResult {Id# [1:1:13:0:0:32768:0] Status# ERROR StatusFlags# { } ErrorReason# "TRestoreStrategy saw optimisticState# EBS_DISINTEGRATED GroupId# 2181038080 BlobId# [1:1:13:0:0:32768:0] Reported ErrorReasons# [ { OrderNumber# 0 VDiskId# [82000000:1:0:0:0] NodeId# 1 ErrorReasons# [ "VDisk is in read-only mode", ] } { OrderNumber# 1 VDiskId# [82000000:1:0:1:0] NodeId# 2 ErrorReasons# [ "VDisk is in read-only mode", ] } { OrderNumber# 2 VDiskId# [82000000:1:0:2:0] NodeId# 3 ErrorReasons# [ "VDisk is in read-only mode", ] } ] Part situations# [ { OrderNumber# 3 Situations# PUUUUU } { OrderNumber# 4 Situations# UPUUUU } { OrderNumber# 5 Situations# UUPUUU } { OrderNumber# 6 Situations# UUUPUU } { OrderNumber# 7 Situations# UUUUPU } { OrderNumber# 0 Situations# UUUUUE } { OrderNumber# 1 Situations# UUUUUE } { OrderNumber# 2 Situations# UUUUUE } ] " ApproximateFreeSpaceShare# 0.999988} SEND TEvPut with key [1:1:14:0:0:131072:0] 2025-05-29T15:27:55.341161Z 3 00h03m30.110512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:2:0]: (2181038080) Unavailable in read-only Sender# [1:5323:712] 2025-05-29T15:27:55.341355Z 1 00h03m30.110512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5309:698] 2025-05-29T15:27:55.341461Z 2 00h03m30.110512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:1:0]: (2181038080) Unavailable in read-only Sender# [1:5316:705] TEvPutResult: TEvPutResult {Id# [1:1:14:0:0:131072:0] Status# ERROR StatusFlags# { } ErrorReason# "TRestoreStrategy saw optimisticState# EBS_DISINTEGRATED GroupId# 2181038080 BlobId# [1:1:14:0:0:131072:0] Reported ErrorReasons# [ { OrderNumber# 2 VDiskId# [82000000:1:0:2:0] NodeId# 3 ErrorReasons# [ "VDisk is in read-only mode", ] } { OrderNumber# 0 VDiskId# [82000000:1:0:0:0] NodeId# 1 ErrorReasons# [ "VDisk is in read-only mode", ] } { OrderNumber# 1 VDiskId# [82000000:1:0:1:0] NodeId# 2 ErrorReasons# [ "VDisk is in read-only m ... ey [1:1:5:0:0:32768:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:5:0:0:32768:0] OK Size# 32768 RequestedSize# 32768}} SEND TEvGet with key [1:1:6:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:6:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} SEND TEvGet with key [1:1:7:0:0:32768:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:7:0:0:32768:0] OK Size# 32768 RequestedSize# 32768}} SEND TEvGet with key [1:1:8:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:8:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} SEND TEvGet with key [1:1:9:0:0:32768:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:9:0:0:32768:0] OK Size# 32768 RequestedSize# 32768}} SEND TEvGet with key [1:1:10:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:10:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} SEND TEvGet with key [1:1:11:0:0:32768:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:11:0:0:32768:0] OK Size# 32768 RequestedSize# 32768}} SEND TEvGet with key [1:1:12:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:12:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} SEND TEvGet with key [1:1:13:0:0:32768:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:13:0:0:32768:0] OK Size# 32768 RequestedSize# 32768}} SEND TEvGet with key [1:1:14:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:14:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} SEND TEvGet with key [1:1:15:0:0:32768:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:15:0:0:32768:0] OK Size# 32768 RequestedSize# 32768}} SEND TEvGet with key [1:1:16:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:16:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} SEND TEvGet with key [1:1:17:0:0:32768:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:17:0:0:32768:0] OK Size# 32768 RequestedSize# 32768}} SEND TEvGet with key [1:1:18:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:18:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} SEND TEvGet with key [1:1:19:0:0:32768:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:19:0:0:32768:0] OK Size# 32768 RequestedSize# 32768}} SEND TEvGet with key [1:1:20:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:20:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} === Restoring to normal VDisk #0 === Setting VDisk read-only to 0 for position 0 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:0:0] === Write 10 blobs, expect some VDisks refuse parts but the writes still go through === SEND TEvPut with key [1:1:21:0:0:32768:0] 2025-05-29T15:27:55.719091Z 3 00h08m00.161536s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:2:0]: (2181038080) Unavailable in read-only Sender# [1:5323:712] 2025-05-29T15:27:55.719138Z 2 00h08m00.161536s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:1:0]: (2181038080) Unavailable in read-only Sender# [1:5316:705] TEvPutResult: TEvPutResult {Id# [1:1:21:0:0:32768:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999963} SEND TEvPut with key [1:1:22:0:0:131072:0] 2025-05-29T15:27:55.720105Z 2 00h08m00.161536s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:1:0]: (2181038080) Unavailable in read-only Sender# [1:5316:705] 2025-05-29T15:27:55.720449Z 3 00h08m00.161536s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:2:0]: (2181038080) Unavailable in read-only Sender# [1:5323:712] TEvPutResult: TEvPutResult {Id# [1:1:22:0:0:131072:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999963} SEND TEvPut with key [1:1:23:0:0:32768:0] TEvPutResult: TEvPutResult {Id# [1:1:23:0:0:32768:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999963} SEND TEvPut with key [1:1:24:0:0:131072:0] 2025-05-29T15:27:55.721389Z 3 00h08m00.161536s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:2:0]: (2181038080) Unavailable in read-only Sender# [1:5323:712] TEvPutResult: TEvPutResult {Id# [1:1:24:0:0:131072:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999963} SEND TEvPut with key [1:1:25:0:0:32768:0] 2025-05-29T15:27:55.722089Z 3 00h08m00.161536s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:2:0]: (2181038080) Unavailable in read-only Sender# [1:5323:712] 2025-05-29T15:27:55.722105Z 2 00h08m00.161536s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:1:0]: (2181038080) Unavailable in read-only Sender# [1:5316:705] TEvPutResult: TEvPutResult {Id# [1:1:25:0:0:32768:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999963} SEND TEvPut with key [1:1:26:0:0:131072:0] 2025-05-29T15:27:55.722693Z 3 00h08m00.161536s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:2:0]: (2181038080) Unavailable in read-only Sender# [1:5323:712] 2025-05-29T15:27:55.722708Z 2 00h08m00.161536s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:1:0]: (2181038080) Unavailable in read-only Sender# [1:5316:705] TEvPutResult: TEvPutResult {Id# [1:1:26:0:0:131072:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999963} SEND TEvPut with key [1:1:27:0:0:32768:0] 2025-05-29T15:27:55.723389Z 3 00h08m00.161536s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:2:0]: (2181038080) Unavailable in read-only Sender# [1:5323:712] 2025-05-29T15:27:55.723403Z 2 00h08m00.161536s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:1:0]: (2181038080) Unavailable in read-only Sender# [1:5316:705] TEvPutResult: TEvPutResult {Id# [1:1:27:0:0:32768:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999963} SEND TEvPut with key [1:1:28:0:0:131072:0] 2025-05-29T15:27:55.723970Z 2 00h08m00.161536s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:1:0]: (2181038080) Unavailable in read-only Sender# [1:5316:705] 2025-05-29T15:27:55.724030Z 3 00h08m00.161536s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:2:0]: (2181038080) Unavailable in read-only Sender# [1:5323:712] TEvPutResult: TEvPutResult {Id# [1:1:28:0:0:131072:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999963} SEND TEvPut with key [1:1:29:0:0:32768:0] 2025-05-29T15:27:55.724642Z 3 00h08m00.161536s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:2:0]: (2181038080) Unavailable in read-only Sender# [1:5323:712] 2025-05-29T15:27:55.724663Z 2 00h08m00.161536s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:1:0]: (2181038080) Unavailable in read-only Sender# [1:5316:705] TEvPutResult: TEvPutResult {Id# [1:1:29:0:0:32768:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999963} SEND TEvPut with key [1:1:30:0:0:131072:0] 2025-05-29T15:27:55.725198Z 3 00h08m00.161536s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:2:0]: (2181038080) Unavailable in read-only Sender# [1:5323:712] 2025-05-29T15:27:55.725226Z 2 00h08m00.161536s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:1:0]: (2181038080) Unavailable in read-only Sender# [1:5316:705] TEvPutResult: TEvPutResult {Id# [1:1:30:0:0:131072:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999963} === Read all 31 blob(s) === SEND TEvGet with key [1:1:0:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:0:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} SEND TEvGet with key [1:1:1:0:0:32768:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:1:0:0:32768:0] OK Size# 32768 RequestedSize# 32768}} SEND TEvGet with key [1:1:2:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:2:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} SEND TEvGet with key [1:1:3:0:0:32768:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:3:0:0:32768:0] OK Size# 32768 RequestedSize# 32768}} SEND TEvGet with key [1:1:4:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:4:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} SEND TEvGet with key [1:1:5:0:0:32768:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:5:0:0:32768:0] OK Size# 32768 RequestedSize# 32768}} SEND TEvGet with key [1:1:6:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:6:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} SEND TEvGet with key [1:1:7:0:0:32768:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:7:0:0:32768:0] OK Size# 32768 RequestedSize# 32768}} SEND TEvGet with key [1:1:8:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:8:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} SEND TEvGet with key [1:1:9:0:0:32768:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:9:0:0:32768:0] OK Size# 32768 RequestedSize# 32768}} SEND TEvGet with key [1:1:10:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:10:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} SEND TEvGet with key [1:1:11:0:0:32768:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:11:0:0:32768:0] OK Size# 32768 RequestedSize# 32768}} SEND TEvGet with key [1:1:12:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:12:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} SEND TEvGet with key [1:1:13:0:0:32768:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:13:0:0:32768:0] OK Size# 32768 RequestedSize# 32768}} SEND TEvGet with key [1:1:14:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:14:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} SEND TEvGet with key [1:1:15:0:0:32768:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:15:0:0:32768:0] OK Size# 32768 RequestedSize# 32768}} SEND TEvGet with key [1:1:16:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:16:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} SEND TEvGet with key [1:1:17:0:0:32768:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:17:0:0:32768:0] OK Size# 32768 RequestedSize# 32768}} SEND TEvGet with key [1:1:18:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:18:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} SEND TEvGet with key [1:1:19:0:0:32768:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:19:0:0:32768:0] OK Size# 32768 RequestedSize# 32768}} SEND TEvGet with key [1:1:20:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:20:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} SEND TEvGet with key [1:1:21:0:0:32768:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:21:0:0:32768:0] OK Size# 32768 RequestedSize# 32768}} SEND TEvGet with key [1:1:22:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:22:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} SEND TEvGet with key [1:1:23:0:0:32768:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:23:0:0:32768:0] OK Size# 32768 RequestedSize# 32768}} SEND TEvGet with key [1:1:24:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:24:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} SEND TEvGet with key [1:1:25:0:0:32768:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:25:0:0:32768:0] OK Size# 32768 RequestedSize# 32768}} SEND TEvGet with key [1:1:26:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:26:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} SEND TEvGet with key [1:1:27:0:0:32768:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:27:0:0:32768:0] OK Size# 32768 RequestedSize# 32768}} SEND TEvGet with key [1:1:28:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:28:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} SEND TEvGet with key [1:1:29:0:0:32768:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:29:0:0:32768:0] OK Size# 32768 RequestedSize# 32768}} SEND TEvGet with key [1:1:30:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:30:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} >> StatisticsSaveLoad::Simple >> TKesusTest::TestAcquireLocks [GOOD] >> TKesusTest::TestAcquireRepeat >> ReadOnlyVDisk::TestStorageLoad [GOOD] |70.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/statistics/database/ut/unittest >> TestProgram::JsonValue [GOOD] >> TKesusTest::TestAcquireRepeat [GOOD] >> TKesusTest::TestAcquireDowngrade |70.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/columnshard/engines/ut/unittest |70.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/columnshard/engines/ut/unittest >> TContinuousBackupTests::TakeIncrementalBackup ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/ut_blobstorage/ut_read_only_vdisk/unittest >> ReadOnlyVDisk::TestStorageLoad [GOOD] Test command err: RandomSeed# 17857555409365568053 2025-05-29T15:27:52.863562Z 1 00h01m08.010512s :BS_HULLRECS CRIT: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Db# LogoBlobs; putting blob beyond the barrier id# [1:2:11:0:11:3794490:2] barrier# {Soft# {Gen# 2 Step# 7} Hard# {Gen# 2 Step# 4294967295}} 2025-05-29T15:27:52.863752Z 5 00h01m08.010512s :BS_HULLRECS CRIT: PDiskId# 1000 VDISK[82000000:_:0:4:0]: (2181038080) Db# LogoBlobs; putting blob beyond the barrier id# [1:2:11:0:11:3794490:6] barrier# {Soft# {Gen# 2 Step# 7} Hard# {Gen# 2 Step# 4294967295}} 2025-05-29T15:27:52.863768Z 4 00h01m08.010512s :BS_HULLRECS CRIT: PDiskId# 1000 VDISK[82000000:_:0:3:0]: (2181038080) Db# LogoBlobs; putting blob beyond the barrier id# [1:2:11:0:11:3794490:5] barrier# {Soft# {Gen# 2 Step# 7} Hard# {Gen# 2 Step# 4294967295}} 2025-05-29T15:27:52.863782Z 2 00h01m08.010512s :BS_HULLRECS CRIT: PDiskId# 1000 VDISK[82000000:_:0:1:0]: (2181038080) Db# LogoBlobs; putting blob beyond the barrier id# [1:2:11:0:11:3794490:3] barrier# {Soft# {Gen# 2 Step# 7} Hard# {Gen# 2 Step# 4294967295}} 2025-05-29T15:27:52.863799Z 8 00h01m08.010512s :BS_HULLRECS CRIT: PDiskId# 1000 VDISK[82000000:_:0:7:0]: (2181038080) Db# LogoBlobs; putting blob beyond the barrier id# [1:2:11:0:11:3794490:1] barrier# {Soft# {Gen# 2 Step# 7} Hard# {Gen# 2 Step# 4294967295}} 2025-05-29T15:27:52.863813Z 3 00h01m08.010512s :BS_HULLRECS CRIT: PDiskId# 1000 VDISK[82000000:_:0:2:0]: (2181038080) Db# LogoBlobs; putting blob beyond the barrier id# [1:2:11:0:11:3794490:4] barrier# {Soft# {Gen# 2 Step# 7} Hard# {Gen# 2 Step# 4294967295}} Setting VDisk read-only to 1 for position 0 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:0:0] 2025-05-29T15:27:53.058831Z 1 00h02m38.100000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5310:700] 2025-05-29T15:27:53.059568Z 1 00h02m38.100000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5310:700] 2025-05-29T15:27:53.060657Z 1 00h02m38.100000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5310:700] 2025-05-29T15:27:53.062075Z 1 00h02m38.100000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5310:700] 2025-05-29T15:27:53.062100Z 1 00h02m38.100000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5310:700] 2025-05-29T15:27:53.066366Z 1 00h02m38.200000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5310:700] 2025-05-29T15:27:53.069266Z 1 00h02m38.300000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5310:700] 2025-05-29T15:27:53.078634Z 1 00h02m38.500000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5310:700] 2025-05-29T15:27:53.081558Z 1 00h02m38.600000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5310:700] 2025-05-29T15:27:53.088370Z 1 00h02m38.800000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5310:700] 2025-05-29T15:27:53.090938Z 1 00h02m38.900000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5310:700] 2025-05-29T15:27:53.101142Z 1 00h02m39.100000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5310:700] 2025-05-29T15:27:53.101194Z 1 00h02m39.100000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5310:700] 2025-05-29T15:27:53.115582Z 1 00h02m39.200000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5310:700] 2025-05-29T15:27:53.125504Z 1 00h02m39.400000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5310:700] 2025-05-29T15:27:53.129346Z 1 00h02m39.500000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5310:700] 2025-05-29T15:27:53.142485Z 1 00h02m39.700000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5310:700] 2025-05-29T15:27:53.144889Z 1 00h02m39.800000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5310:700] 2025-05-29T15:27:53.157689Z 1 00h02m40.000000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5310:700] 2025-05-29T15:27:53.168900Z 1 00h02m40.100000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5310:700] 2025-05-29T15:27:53.168955Z 1 00h02m40.100000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5310:700] 2025-05-29T15:27:53.175181Z 1 00h02m40.200000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5310:700] 2025-05-29T15:27:53.177839Z 1 00h02m40.300000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5310:700] 2025-05-29T15:27:53.180457Z 1 00h02m40.400000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5310:700] 2025-05-29T15:27:53.183517Z 1 00h02m40.500000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5310:700] 2025-05-29T15:27:53.186190Z 1 00h02m40.600000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5310:700] 2025-05-29T15:27:53.189334Z 1 00h02m40.700000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5310:700] 2025-05-29T15:27:53.194850Z 1 00h02m40.800000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5310:700] 2025-05-29T15:27:53.197630Z 1 00h02m40.900000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5310:700] 2025-05-29T15:27:53.200890Z 1 00h02m41.000000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5310:700] 2025-05-29T15:27:53.211743Z 1 00h02m41.100000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5310:700] 2025-05-29T15:27:53.211794Z 1 00h02m41.100000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5310:700] 2025-05-29T15:27:53.217415Z 1 00h02m41.300000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5310:700] 2025-05-29T15:27:53.219810Z 1 00h02m41.400000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5310:700] 2025-05-29T15:27:53.228720Z 1 00h02m41.600000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5310:700] 2025-05-29T15:27:53.231292Z 1 00h02m41.700000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5310:700] 2025-05-29T15:27:53.241616Z 1 00h02m41.900000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5310:700] 2025-05-29T15:27:53.246249Z 1 00h02m42.000000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5310:700] 2025-05-29T15:27:53.252813Z 1 00h02m42.100000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5310:700] 2025-05-29T15:27:53.252854Z 1 00h02m42.100000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5310:700] 2025-05-29T15:27:53.258356Z 1 00h02m42.300000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5310:700] 2025-05-29T15:27:53.260831Z 1 00h02m42.400000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5310:700] 2025-05-29T15:27:53.268702Z 1 00h02m42.600000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5310:700] 2025-05-29T15:27:53.271516Z 1 00h02m42.700000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5310:700] 2025-05-29T15:27:53.275611Z 1 00h02m42.900000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5310:700] 2025-05-29T15:27:53.277633Z 1 00h02m43.000000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5310:700] 2025-05-29T15:27:53.283025Z 1 00h02m43.100000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5310:700] 2025-05-29T15:27:53.283069Z 1 00h02m43.100000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5310:700] 2025-05-29T15:27:53.288145Z 1 00h02m43.200000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5310:700] 2025-05-29T15:27:53.292254Z 1 00h02m43.300000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5310:700] 2025-05-29T15:27:53.294879Z 1 00h02m43.400000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5310:700] 2025-05-29T15:27:53.297511Z 1 00h02m43.500000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5310:700] 2025-05-29T15:27:53.300037Z 1 00h02m43.600000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5310:700] 2025-05-29T15:27:53.302343Z 1 00h02m43.700000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5310:700] 2025-05-29T15:27:53.304764Z 1 00h02m43.800000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5310:700] 2025-05-29T15:27:53.320886Z 1 00h02m43.900000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5310:700] 2025-05-29T15:27:53.325226Z 1 00h02m44.000000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5310:700] 2025-05-29T15:27:53.329632Z 1 00h02m44.100000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5310:700] 2025-05-29T15:27:53.336029Z 1 00h02m44.200000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5310:700] 2025-05-29T15:27:53.339768Z 1 00h02m44.300000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5310:700] 2025-05-29T15:27:53.359434Z 1 00h02m44.500000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5310:700] 2025-05-29T15:27:53.362053Z 1 00h02m44.600000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5310:700] 2025-05-29T15:27:53.373454Z 1 00h02m44.800000s :BS_SKELETON ERROR: PDiskId# 1000 VDI ... 60s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:7:0]: (2181038080) Unavailable in read-only Sender# [1:5359:749] 2025-05-29T15:27:55.143120Z 8 00h20m55.412560s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:7:0]: (2181038080) Unavailable in read-only Sender# [1:5359:749] 2025-05-29T15:27:55.143173Z 8 00h20m55.412560s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:7:0]: (2181038080) Unavailable in read-only Sender# [1:5359:749] 2025-05-29T15:27:55.146144Z 8 00h20m55.512560s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:7:0]: (2181038080) Unavailable in read-only Sender# [1:5359:749] 2025-05-29T15:27:55.156904Z 8 00h20m55.712560s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:7:0]: (2181038080) Unavailable in read-only Sender# [1:5359:749] 2025-05-29T15:27:55.160189Z 8 00h20m55.812560s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:7:0]: (2181038080) Unavailable in read-only Sender# [1:5359:749] 2025-05-29T15:27:55.167254Z 8 00h20m56.012560s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:7:0]: (2181038080) Unavailable in read-only Sender# [1:5359:749] 2025-05-29T15:27:55.170153Z 8 00h20m56.112560s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:7:0]: (2181038080) Unavailable in read-only Sender# [1:5359:749] 2025-05-29T15:27:55.175800Z 8 00h20m56.312560s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:7:0]: (2181038080) Unavailable in read-only Sender# [1:5359:749] 2025-05-29T15:27:55.181487Z 8 00h20m56.412560s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:7:0]: (2181038080) Unavailable in read-only Sender# [1:5359:749] 2025-05-29T15:27:55.181519Z 8 00h20m56.412560s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:7:0]: (2181038080) Unavailable in read-only Sender# [1:5359:749] 2025-05-29T15:27:55.185097Z 8 00h20m56.512560s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:7:0]: (2181038080) Unavailable in read-only Sender# [1:5359:749] 2025-05-29T15:27:55.202289Z 8 00h20m56.712560s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:7:0]: (2181038080) Unavailable in read-only Sender# [1:5359:749] 2025-05-29T15:27:55.205010Z 8 00h20m56.812560s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:7:0]: (2181038080) Unavailable in read-only Sender# [1:5359:749] 2025-05-29T15:27:55.209804Z 8 00h20m56.912560s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:7:0]: (2181038080) Unavailable in read-only Sender# [1:5359:749] 2025-05-29T15:27:55.214980Z 8 00h20m57.012560s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:7:0]: (2181038080) Unavailable in read-only Sender# [1:5359:749] 2025-05-29T15:27:55.219497Z 8 00h20m57.112560s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:7:0]: (2181038080) Unavailable in read-only Sender# [1:5359:749] 2025-05-29T15:27:55.223694Z 8 00h20m57.212560s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:7:0]: (2181038080) Unavailable in read-only Sender# [1:5359:749] 2025-05-29T15:27:55.228306Z 8 00h20m57.312560s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:7:0]: (2181038080) Unavailable in read-only Sender# [1:5359:749] 2025-05-29T15:27:55.240819Z 8 00h20m57.412560s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:7:0]: (2181038080) Unavailable in read-only Sender# [1:5359:749] 2025-05-29T15:27:55.240858Z 8 00h20m57.412560s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:7:0]: (2181038080) Unavailable in read-only Sender# [1:5359:749] 2025-05-29T15:27:55.255923Z 8 00h20m57.612560s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:7:0]: (2181038080) Unavailable in read-only Sender# [1:5359:749] 2025-05-29T15:27:55.266013Z 8 00h20m57.712560s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:7:0]: (2181038080) Unavailable in read-only Sender# [1:5359:749] 2025-05-29T15:27:55.282425Z 8 00h20m57.912560s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:7:0]: (2181038080) Unavailable in read-only Sender# [1:5359:749] 2025-05-29T15:27:55.287617Z 8 00h20m58.012560s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:7:0]: (2181038080) Unavailable in read-only Sender# [1:5359:749] 2025-05-29T15:27:55.303131Z 8 00h20m58.212560s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:7:0]: (2181038080) Unavailable in read-only Sender# [1:5359:749] 2025-05-29T15:27:55.315237Z 8 00h20m58.312560s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:7:0]: (2181038080) Unavailable in read-only Sender# [1:5359:749] 2025-05-29T15:27:55.321284Z 8 00h20m58.412560s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:7:0]: (2181038080) Unavailable in read-only Sender# [1:5359:749] 2025-05-29T15:27:55.321550Z 8 00h20m58.412560s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:7:0]: (2181038080) Unavailable in read-only Sender# [1:5359:749] 2025-05-29T15:27:55.329508Z 8 00h20m58.612560s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:7:0]: (2181038080) Unavailable in read-only Sender# [1:5359:749] 2025-05-29T15:27:55.333579Z 8 00h20m58.712560s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:7:0]: (2181038080) Unavailable in read-only Sender# [1:5359:749] 2025-05-29T15:27:55.339983Z 8 00h20m58.912560s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:7:0]: (2181038080) Unavailable in read-only Sender# [1:5359:749] 2025-05-29T15:27:55.356116Z 8 00h20m59.012560s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:7:0]: (2181038080) Unavailable in read-only Sender# [1:5359:749] 2025-05-29T15:27:55.366543Z 8 00h20m59.212560s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:7:0]: (2181038080) Unavailable in read-only Sender# [1:5359:749] 2025-05-29T15:27:55.371077Z 8 00h20m59.312560s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:7:0]: (2181038080) Unavailable in read-only Sender# [1:5359:749] 2025-05-29T15:27:55.381155Z 8 00h20m59.412560s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:7:0]: (2181038080) Unavailable in read-only Sender# [1:5359:749] 2025-05-29T15:27:55.381198Z 8 00h20m59.412560s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:7:0]: (2181038080) Unavailable in read-only Sender# [1:5359:749] 2025-05-29T15:27:55.389548Z 8 00h20m59.612560s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:7:0]: (2181038080) Unavailable in read-only Sender# [1:5359:749] 2025-05-29T15:27:55.392619Z 8 00h20m59.712560s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:7:0]: (2181038080) Unavailable in read-only Sender# [1:5359:749] 2025-05-29T15:27:55.399636Z 8 00h20m59.912560s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:7:0]: (2181038080) Unavailable in read-only Sender# [1:5359:749] 2025-05-29T15:27:55.405108Z 8 00h21m00.012560s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:7:0]: (2181038080) Unavailable in read-only Sender# [1:5359:749] 2025-05-29T15:27:55.411774Z 8 00h21m00.212560s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:7:0]: (2181038080) Unavailable in read-only Sender# [1:5359:749] 2025-05-29T15:27:55.415364Z 8 00h21m00.312560s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:7:0]: (2181038080) Unavailable in read-only Sender# [1:5359:749] 2025-05-29T15:27:55.424422Z 8 00h21m00.412560s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:7:0]: (2181038080) Unavailable in read-only Sender# [1:5359:749] 2025-05-29T15:27:55.424458Z 8 00h21m00.412560s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:7:0]: (2181038080) Unavailable in read-only Sender# [1:5359:749] 2025-05-29T15:27:55.428859Z 8 00h21m00.512560s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:7:0]: (2181038080) Unavailable in read-only Sender# [1:5359:749] 2025-05-29T15:27:55.432126Z 8 00h21m00.612560s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:7:0]: (2181038080) Unavailable in read-only Sender# [1:5359:749] 2025-05-29T15:27:55.441791Z 8 00h21m00.712560s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:7:0]: (2181038080) Unavailable in read-only Sender# [1:5359:749] 2025-05-29T15:27:55.446518Z 8 00h21m00.812560s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:7:0]: (2181038080) Unavailable in read-only Sender# [1:5359:749] 2025-05-29T15:27:55.450559Z 8 00h21m00.912560s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:7:0]: (2181038080) Unavailable in read-only Sender# [1:5359:749] 2025-05-29T15:27:55.459061Z 8 00h21m01.112560s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:7:0]: (2181038080) Unavailable in read-only Sender# [1:5359:749] 2025-05-29T15:27:55.468462Z 8 00h21m01.212560s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:7:0]: (2181038080) Unavailable in read-only Sender# [1:5359:749] 2025-05-29T15:27:55.480939Z 8 00h21m01.412560s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:7:0]: (2181038080) Unavailable in read-only Sender# [1:5359:749] 2025-05-29T15:27:55.485962Z 8 00h21m01.512560s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:7:0]: (2181038080) Unavailable in read-only Sender# [1:5359:749] 2025-05-29T15:27:55.489684Z 8 00h21m01.612560s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:7:0]: (2181038080) Unavailable in read-only Sender# [1:5359:749] 2025-05-29T15:27:55.497480Z 8 00h21m01.812560s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:7:0]: (2181038080) Unavailable in read-only Sender# [1:5359:749] 2025-05-29T15:27:55.509037Z 8 00h21m01.912560s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:7:0]: (2181038080) Unavailable in read-only Sender# [1:5359:749] 2025-05-29T15:27:55.523586Z 8 00h21m02.112560s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:7:0]: (2181038080) Unavailable in read-only Sender# [1:5359:749] 2025-05-29T15:27:55.535839Z 8 00h21m02.212560s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:7:0]: (2181038080) Unavailable in read-only Sender# [1:5359:749] 2025-05-29T15:27:55.555112Z 8 00h21m02.412560s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:7:0]: (2181038080) Unavailable in read-only Sender# [1:5359:749] 2025-05-29T15:27:55.555187Z 8 00h21m02.412560s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:7:0]: (2181038080) Unavailable in read-only Sender# [1:5359:749] 2025-05-29T15:27:55.555235Z 8 00h21m02.412560s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:7:0]: (2181038080) Unavailable in read-only Sender# [1:5359:749] 2025-05-29T15:27:55.557814Z 1 00h21m02.412560s :BS_HULLRECS CRIT: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Db# LogoBlobs; putting blob beyond the barrier id# [1:5:11:0:11:2345352:3] barrier# {Soft# {Gen# 5 Step# 7} Hard# {Gen# 5 Step# 4294967295}} 2025-05-29T15:27:55.558031Z 4 00h21m02.412560s :BS_HULLRECS CRIT: PDiskId# 1000 VDISK[82000000:_:0:3:0]: (2181038080) Db# LogoBlobs; putting blob beyond the barrier id# [1:5:11:0:11:2345352:6] barrier# {Soft# {Gen# 5 Step# 7} Hard# {Gen# 5 Step# 4294967295}} 2025-05-29T15:27:55.558071Z 3 00h21m02.412560s :BS_HULLRECS CRIT: PDiskId# 1000 VDISK[82000000:_:0:2:0]: (2181038080) Db# LogoBlobs; putting blob beyond the barrier id# [1:5:11:0:11:2345352:5] barrier# {Soft# {Gen# 5 Step# 7} Hard# {Gen# 5 Step# 4294967295}} 2025-05-29T15:27:55.558084Z 7 00h21m02.412560s :BS_HULLRECS CRIT: PDiskId# 1000 VDISK[82000000:_:0:6:0]: (2181038080) Db# LogoBlobs; putting blob beyond the barrier id# [1:5:11:0:11:2345352:1] barrier# {Soft# {Gen# 5 Step# 7} Hard# {Gen# 5 Step# 4294967295}} 2025-05-29T15:27:55.558098Z 2 00h21m02.412560s :BS_HULLRECS CRIT: PDiskId# 1000 VDISK[82000000:_:0:1:0]: (2181038080) Db# LogoBlobs; putting blob beyond the barrier id# [1:5:11:0:11:2345352:4] barrier# {Soft# {Gen# 5 Step# 7} Hard# {Gen# 5 Step# 4294967295}} 2025-05-29T15:27:55.558212Z 5 00h21m02.412560s :BS_HULLRECS CRIT: PDiskId# 1000 VDISK[82000000:_:0:4:0]: (2181038080) Db# LogoBlobs; putting blob beyond the barrier id# [1:5:11:0:11:2345352:2] barrier# {Soft# {Gen# 5 Step# 7} Hard# {Gen# 5 Step# 4294967295}} 2025-05-29T15:27:55.567840Z 5 00h21m02.412560s :BS_HULLRECS CRIT: PDiskId# 1000 VDISK[82000000:_:0:4:0]: (2181038080) Db# LogoBlobs; putting blob beyond the barrier id# [1:5:11:0:11:2345352:2] barrier# {Soft# {Gen# 5 Step# 7} Hard# {Gen# 5 Step# 4294967295}} >> TKesusTest::TestAcquireDowngrade [GOOD] >> TKesusTest::TestAcquireBeforeTimeoutViaSessionTimeout >> TDSProxyGetTest::TestBlock42GetIntervalsWipedAllOk |70.1%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/schemeshard/ut_background_cleaning/ydb-core-tx-schemeshard-ut_background_cleaning |70.1%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_background_cleaning/ydb-core-tx-schemeshard-ut_background_cleaning |70.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_background_cleaning/ydb-core-tx-schemeshard-ut_background_cleaning ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/columnshard/engines/ut/unittest >> TestProgram::JsonValue [GOOD] Test command err: FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:33;event=parse_program;program=Command { Assign { Column { Id: 15 } Constant { Text: "$.key" } } } Command { Assign { Column { Id: 16 } Function { Id: 8 Arguments { Id: 5 } Arguments { Id: 15 } FunctionType: YQL_KERNEL KernelIdx: 0 } } } Command { Projection { Columns { Id: 16 } } } Kernels: "O\022\006Arg\020JsonNode\020JsonPath\006UDF\006Udf\014Apply2\nFlags\010Name\030BlockAsTuple\t\211\004\235\213\004\213\004\207\203\tH\203\001H\213\002\207?\004\001\235?\006\001\235?\n\001\"\000\t\211\004?\020\235?\002\001\235?\004\000\"\000\t\251\000?\026\002\000\t\251\000?\030\002\000\000\t\211\002?\022\235?\010\001\"\000\t\211\n?&?\026?\030?\002?\004?\010,ScalarApply\000?\036?\"\t\251\000?\002\002\000\t\251\000?\004\002\000\t\211\010?\010?\002?\000\207?\004?4$IfPresent\000?.\t\251\000?\000\002\000\t\211\n?4\201\213\004\213\004\203\n\203\005@\207\203\001H?@?4?D?D VisitAll\000\t\211\020?H\211\006?H\207\214\006\214\n\210\203\001H\214\006\016\000\203\004\203\005@\203\004\203\004\207\214\006\214\n\210\203\001H\214\006\026\000\t\211\010?X\203\005@\200\203\005@\202\022\000\003?p6Json2.SqlValueConvertToUtf8\202\003?r\000\002\017\003?Z\000\003?\\\000\003?^\000\003?`\000\027?d\t\211\014?b\311\002?b\203\tH\005\205\004\206\205\004\203\010\203\005@\032\036\203\005@\020Args\034Payload\006\002?\214\005\205\004\203\010\203\005@\032\036\003?\222\002\003?\224\000\003\001\003?\216\000\003\016\000\203\004\203\005@\203\004\203\004?\000\026\000\t\211\010?\244\203\005@\200\203\005@\202\022\000\003?\260\026Json2.Parse\202\003?\262\000\002\017\003?\246\000\003?\250\000\003?\252\000\003?\254\000?:\036\t\211\014?f\211\002?f\203\001H\016\000\203\004\203\005@\203\004\203\004?\004\026\000\t\211\010?\312\203\005@\200\203\005@\202\022\000\003?\326\"Json2.CompilePath\202\003?\330\000\002\017\003?\314\000\003?\316\000\003?\320\000\003?\322\000?2\036\010\000?l\276\t\251\000?@\002\000\'?4\t\251\000?D\002\000?\370\004\'?4\010\000\000\000/" ; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:102;parse_proto_program=Command { Assign { Column { Id: 15 } Constant { Text: "$.key" } } } Command { Assign { Column { Id: 16 } Function { Id: 8 Arguments { Id: 5 } Arguments { Id: 15 } FunctionType: YQL_KERNEL KernelIdx: 0 } } } Command { Projection { Columns { Id: 16 } } } Kernels: "O\022\006Arg\020JsonNode\020JsonPath\006UDF\006Udf\014Apply2\nFlags\010Name\030BlockAsTuple\t\211\004\235\213\004\213\004\207\203\tH\203\001H\213\002\207?\004\001\235?\006\001\235?\n\001\"\000\t\211\004?\020\235?\002\001\235?\004\000\"\000\t\251\000?\026\002\000\t\251\000?\030\002\000\000\t\211\002?\022\235?\010\001\"\000\t\211\n?&?\026?\030?\002?\004?\010,ScalarApply\000?\036?\"\t\251\000?\002\002\000\t\251\000?\004\002\000\t\211\010?\010?\002?\000\207?\004?4$IfPresent\000?.\t\251\000?\000\002\000\t\211\n?4\201\213\004\213\004\203\n\203\005@\207\203\001H?@?4?D?D VisitAll\000\t\211\020?H\211\006?H\207\214\006\214\n\210\203\001H\214\006\016\000\203\004\203\005@\203\004\203\004\207\214\006\214\n\210\203\001H\214\006\026\000\t\211\010?X\203\005@\200\203\005@\202\022\000\003?p6Json2.SqlValueConvertToUtf8\202\003?r\000\002\017\003?Z\000\003?\\\000\003?^\000\003?`\000\027?d\t\211\014?b\311\002?b\203\tH\005\205\004\206\205\004\203\010\203\005@\032\036\203\005@\020Args\034Payload\006\002?\214\005\205\004\203\010\203\005@\032\036\003?\222\002\003?\224\000\003\001\003?\216\000\003\016\000\203\004\203\005@\203\004\203\004?\000\026\000\t\211\010?\244\203\005@\200\203\005@\202\022\000\003?\260\026Json2.Parse\202\003?\262\000\002\017\003?\246\000\003?\250\000\003?\252\000\003?\254\000?:\036\t\211\014?f\211\002?f\203\001H\016\000\203\004\203\005@\203\004\203\004?\004\026\000\t\211\010?\312\203\005@\200\203\005@\202\022\000\003?\326\"Json2.CompilePath\202\003?\330\000\002\017\003?\314\000\003?\316\000\003?\320\000\003?\322\000?2\036\010\000?l\276\t\251\000?@\002\000\'?4\t\251\000?D\002\000?\370\004\'?4\010\000\000\000/" ; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2101;fline=graph_execute.cpp:162;graph_constructed=digraph program {N0[shape=box, label="N0(0):{\"p\":{\"v\":\"$.key\"},\"o\":\"15\",\"t\":\"Const\"}\n"]; N1[shape=box, label="N4(15):{\"i\":\"5,15\",\"p\":{\"kernel\":{\"class_name\":\"SIMPLE\"}},\"o\":\"16\",\"t\":\"Calculation\"}\nREMOVE:15,5"]; N0 -> N1[label="1"]; N3 -> N1[label="2"]; N2[shape=box, label="N2(2):{\"i\":\"0\",\"p\":{\"data\":[{\"name\":\"json_string\",\"id\":5}]},\"o\":\"5\",\"t\":\"FetchOriginalData\"}\n",style=filled,color="#FFFF88"]; N5 -> N2[label="1"]; N3[shape=box, label="N3(7):{\"i\":\"5\",\"p\":{\"address\":{\"name\":\"json_string\",\"id\":5}},\"o\":\"5\",\"t\":\"AssembleOriginalData\"}\n",style=filled,color="#FFFF88"]; N2 -> N3[label="1"]; N4[shape=box, label="N5(15):{\"i\":\"16\",\"t\":\"Projection\"}\n",style=filled,color="#FFAAAA"]; N1 -> N4[label="1"]; N5[shape=box, label="N1(0):{\"p\":{\"data\":[{\"name\":\"json_string\",\"id\":5}]},\"o\":\"0\",\"t\":\"ReserveMemory\"}\n"]; N0->N5->N2->N3->N1->N4[color=red]; }; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:51;event=program_parsed;result={"edges":[{"owner_id":0,"inputs":[]},{"owner_id":1,"inputs":[{"from":0},{"from":3}]},{"owner_id":2,"inputs":[{"from":5}]},{"owner_id":3,"inputs":[{"from":2}]},{"owner_id":4,"inputs":[{"from":1}]},{"owner_id":5,"inputs":[]}],"nodes":{"1":{"p":{"i":"5,15","p":{"kernel":{"class_name":"SIMPLE"}},"o":"16","t":"Calculation"},"w":15,"id":1},"3":{"p":{"i":"5","p":{"address":{"name":"json_string","id":5}},"o":"5","t":"AssembleOriginalData"},"w":7,"id":3},"2":{"p":{"i":"0","p":{"data":[{"name":"json_string","id":5}]},"o":"5","t":"FetchOriginalData"},"w":2,"id":2},"5":{"p":{"p":{"data":[{"name":"json_string","id":5}]},"o":"0","t":"ReserveMemory"},"w":0,"id":5},"4":{"p":{"i":"16","t":"Projection"},"w":15,"id":4},"0":{"p":{"p":{"v":"$.key"},"o":"15","t":"Const"},"w":0,"id":0}}}; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:502;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:502;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:502;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:502;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:502;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:502;T=N5arrow10StringTypeE; json_string: [ "{"key":"value"}", "{"key":10}", "{"key":0.1}", "{"key":false}", "{"another":"value"}", "[]" ] Check output for Utf8 FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:502;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:502;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:502;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:502;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:33;event=parse_program;program=Command { Assign { Column { Id: 15 } Constant { Text: "$.key" } } } Command { Assign { Column { Id: 16 } Function { Id: 8 Arguments { Id: 5 } Arguments { Id: 15 } FunctionType: YQL_KERNEL KernelIdx: 0 } } } Command { Projection { Columns { Id: 16 } } } Kernels: "O\022\006Arg\020JsonNode\020JsonPath\006UDF\006Udf\014Apply2\nFlags\010Name\030BlockAsTuple\t\211\004\235\213\004\213\004\207\203\tH\203\001H\213\002\207\203\014\001\235?\006\001\235?\014\001\"\000\t\211\004?\022\235?\002\001\235?\004\000\"\000\t\251\000?\030\002\000\t\251\000?\032\002\000\000\t\211\002?\024\235?\n\001\"\000\t\211\n?(?\030?\032?\002?\004?\n,ScalarApply\000? ?$\t\251\000?\002\002\000\t\251\000?\004\002\000\t\211\010?\n?\002?\000\207?\010?6$IfPresent\000?0\t\251\000?\000\002\000\t\211\n?6\201\213\004\213\004\203\n\203\005@\207\203\014?B?6?F?F VisitAll\000\t\211\020?J\211\006?J\207\214\006\214\n\210\203\001H\214\006\016\000\203\004\203\005@\203\004\203\004\207\214\006\214\n\210\203\001H\214\006\026\000\t\211\010?Z\203\005@\200\203\005@\202\022\000\003?r$Json2.SqlValueBool\202\003?t\000\002\017\003?\\\000\003?^\000\003?`\000\003?b\000\027?f\t\211\014?d\311\002?d\203\tH\005\205\004\206\205\004\203\010\203\005@\032\036\203\005@\020Args\034Payload\006\002?\216\005\205\004\203\010\203\005@\032\036\003?\224\002\003?\226\000\003\001\003?\220\000\003\016\000\203\004\203\005@\203\004\203\004?\000\026\000\t\211\010?\246\203\005@\200\203\005@\202\022\000\003?\262\026Json2.Parse\202\003?\264\000\002\017\003?\250\000\003?\252\000\003?\254\000\003?\256\000?<\036\t\211\014?h\211\002?h\203\001H\016\000\203\004\203\005@\203\004\203\004?\004\026\000\t\211\010?\314\203\005@\200\203\005@\202\022\000\003?\330\"Json2.CompilePath\202\003?\332\000\002\017\003?\316\000\003?\320\000\003?\322\000\003?\324\000?4\036\010\000?n\276\t\251\000?B\002\000\'?6\t\251\000?F\002\000?\372\004\'?6\010\000\000\000/" ; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:102;parse_proto_program=Command { Assign { Column { Id: 15 } Constant { Text: "$.key" } } } Command { Assign { Column { Id: 16 } Function { Id: 8 Arguments { Id: 5 } Arguments { Id: 15 } FunctionType: YQL_KERNEL KernelIdx: 0 } } } Command { Projection { Columns { Id: 16 } } } Kernels: "O\022\006Arg\020JsonNode\020JsonPath\006UDF\006Udf\014Apply2\nFlags\010Name\030BlockAsTuple\t\211\004\235\213\004\213\004\207\203\tH\203\001H\213\002\207\203\014\001\235?\006\001\235?\014\001\"\000\t\211\004?\022\235?\002\001\235?\004\000\"\000\t\251\000?\030\002\000\t\251\000?\032\002\000\000\t\211\002?\024\235?\n\001\"\000\t\211\n?(?\030?\032?\002?\004?\n,ScalarApply\000? ?$\t\251\000?\002\002\000\t\251\000?\004\002\000\t\211\010?\n?\002?\000\207?\010?6$IfPresent\000?0\t\251\000?\000\002\000\t\211\n?6\201\213\004\213\004\203\n\203\005@\207\203\014?B?6?F?F VisitAll\000\t\211\020?J\211\006?J\207\214\006\214\n\210\203\001H\214\006\016\000\203\004\203\005@\203\004\203\004\207\214\006\214\n\210\203\001H\214\006\026\000\t\211\010?Z\203\005@\200\203\005@\202\022\000\003?r$Json2.SqlValueBool\202\003?t\000\002\017\003?\\\000\003?^\000\003?`\000\003?b\000\027?f\t\211\014?d\311\002?d\203\tH\005\205\004\206\205\004\203\010\203\005@\032\036\203\005@\020Args\034Payload\006\002?\216\005\205\004\203\010\203\005@\032\036\003?\224\002\003?\226\000\003\001\003?\220\000\003\016\000\203\004\203\005@\203\004\203\004?\000\026\000\t\211\010?\246\203\005@\200\203\005@\202\022\000\003?\262\026Json2.Parse\202\003?\264\000\002\017\003?\250\000\003?\252\000\003?\254\000\003?\256\000?<\036\t\211\014?h\211\002?h\203\001H\016\000\203\004\203\005@\203\004\203\004?\004\026\000\t\211\010?\314\203\005@\200\203\005@\202\022\000\003?\330\"Json2.CompilePath\202\003?\332\000\002\017\003?\316\000\003?\320\000\003?\322\000\003?\324\000?4\036\010\000?n\276\t\251\000?B\002\000\'?6\t\251\000?F\002\000?\372\004\'?6\010\000\000\000/" ; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2101;fline=graph_execute.cpp:162;graph_constructed=digraph program {N0[shape=box, label="N0(0):{\"p\":{\"v\":\"$.key\"},\"o\":\"15\",\"t\":\"Const\"}\n"]; N1[shape=box, label="N4(15):{\"i\":\"5,15\",\"p\":{\"kernel\":{\"class_name\":\"SIMPLE\"}},\"o\":\"16\",\"t\" ... } FunctionType: YQL_KERNEL KernelIdx: 0 } } } Command { Projection { Columns { Id: 16 } } } Kernels: "O\022\006Arg\020JsonNode\020JsonPath\006UDF\006Udf\014Apply2\nFlags\010Name\030BlockAsTuple\t\211\004\235\213\004\213\004\207\203\tH\203\001H\213\002\207\203B\001\235?\006\001\235?\014\001\"\000\t\211\004?\022\235?\002\001\235?\004\000\"\000\t\251\000?\030\002\000\t\251\000?\032\002\000\000\t\211\002?\024\235?\n\001\"\000\t\211\n?(?\030?\032?\002?\004?\n,ScalarApply\000? ?$\t\251\000?\002\002\000\t\251\000?\004\002\000\t\211\010?\n?\002?\000\207?\010?6$IfPresent\000?0\t\251\000?\000\002\000\t\211\n?6\201\213\004\213\004\203\n\203\005@\207\203@?B?6?F?6 VisitAll\000\t\211\020?J\211\006?J\207\214\006\214\n\210\203\001H\214\006\016\000\203\004\203\005@\203\004\203\004\207\214\006\214\n\210\203\001H\214\006\026\000\t\211\010?Z\203\005@\200\203\005@\202\022\000\003?r(Json2.SqlValueNumber\202\003?t\000\002\017\003?\\\000\003?^\000\003?`\000\003?b\000\027?f\t\211\014?d\311\002?d\203\tH\005\205\004\206\205\004\203\010\203\005@\032\036\203\005@\020Args\034Payload\006\002?\216\005\205\004\203\010\203\005@\032\036\003?\224\002\003?\226\000\003\001\003?\220\000\003\016\000\203\004\203\005@\203\004\203\004?\000\026\000\t\211\010?\246\203\005@\200\203\005@\202\022\000\003?\262\026Json2.Parse\202\003?\264\000\002\017\003?\250\000\003?\252\000\003?\254\000\003?\256\000?<\036\t\211\014?h\211\002?h\203\001H\016\000\203\004\203\005@\203\004\203\004?\004\026\000\t\211\010?\314\203\005@\200\203\005@\202\022\000\003?\330\"Json2.CompilePath\202\003?\332\000\002\017\003?\316\000\003?\320\000\003?\322\000\003?\324\000?4\036\010\000?n\276\t\251\000?B\002\000\'?6\t\251\000?F\002\000\t\211\004?6\203\005@?F\030Invoke\000\003?\374\016Convert?\372\001\004\'?6\010\000\000\000/" ; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2101;fline=graph_execute.cpp:162;graph_constructed=digraph program {N0[shape=box, label="N0(0):{\"p\":{\"v\":\"$.key\"},\"o\":\"15\",\"t\":\"Const\"}\n"]; N1[shape=box, label="N4(15):{\"i\":\"5,15\",\"p\":{\"kernel\":{\"class_name\":\"SIMPLE\"}},\"o\":\"16\",\"t\":\"Calculation\"}\nREMOVE:15,5"]; N0 -> N1[label="1"]; N3 -> N1[label="2"]; N2[shape=box, label="N2(2):{\"i\":\"0\",\"p\":{\"data\":[{\"name\":\"json_string\",\"id\":5}]},\"o\":\"5\",\"t\":\"FetchOriginalData\"}\n",style=filled,color="#FFFF88"]; N5 -> N2[label="1"]; N3[shape=box, label="N3(7):{\"i\":\"5\",\"p\":{\"address\":{\"name\":\"json_string\",\"id\":5}},\"o\":\"5\",\"t\":\"AssembleOriginalData\"}\n",style=filled,color="#FFFF88"]; N2 -> N3[label="1"]; N4[shape=box, label="N5(15):{\"i\":\"16\",\"t\":\"Projection\"}\n",style=filled,color="#FFAAAA"]; N1 -> N4[label="1"]; N5[shape=box, label="N1(0):{\"p\":{\"data\":[{\"name\":\"json_string\",\"id\":5}]},\"o\":\"0\",\"t\":\"ReserveMemory\"}\n"]; N0->N5->N2->N3->N1->N4[color=red]; }; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:51;event=program_parsed;result={"edges":[{"owner_id":0,"inputs":[]},{"owner_id":1,"inputs":[{"from":0},{"from":3}]},{"owner_id":2,"inputs":[{"from":5}]},{"owner_id":3,"inputs":[{"from":2}]},{"owner_id":4,"inputs":[{"from":1}]},{"owner_id":5,"inputs":[]}],"nodes":{"1":{"p":{"i":"5,15","p":{"kernel":{"class_name":"SIMPLE"}},"o":"16","t":"Calculation"},"w":15,"id":1},"3":{"p":{"i":"5","p":{"address":{"name":"json_string","id":5}},"o":"5","t":"AssembleOriginalData"},"w":7,"id":3},"2":{"p":{"i":"0","p":{"data":[{"name":"json_string","id":5}]},"o":"5","t":"FetchOriginalData"},"w":2,"id":2},"5":{"p":{"p":{"data":[{"name":"json_string","id":5}]},"o":"0","t":"ReserveMemory"},"w":0,"id":5},"4":{"p":{"i":"16","t":"Projection"},"w":15,"id":4},"0":{"p":{"p":{"v":"$.key"},"o":"15","t":"Const"},"w":0,"id":0}}}; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:502;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:502;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:502;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:502;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:502;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:502;T=N5arrow10StringTypeE; json_string: [ "{"key":"value"}", "{"key":10}", "{"key":0.1}", "{"key":false}", "{"another":"value"}", "[]" ] Check output for Float FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:502;T=N5arrow10DoubleTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:502;T=N5arrow10DoubleTypeE; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:33;event=parse_program;program=Command { Assign { Column { Id: 15 } Constant { Text: "$.key" } } } Command { Assign { Column { Id: 16 } Function { Id: 8 Arguments { Id: 5 } Arguments { Id: 15 } FunctionType: YQL_KERNEL KernelIdx: 0 } } } Command { Projection { Columns { Id: 16 } } } Kernels: "O\022\006Arg\020JsonNode\020JsonPath\006UDF\006Udf\014Apply2\nFlags\010Name\030BlockAsTuple\t\211\004\235\213\004\213\004\207\203\tH\203\001H\213\002\207\203@\001\235?\006\001\235?\014\001\"\000\t\211\004?\022\235?\002\001\235?\004\000\"\000\t\251\000?\030\002\000\t\251\000?\032\002\000\000\t\211\002?\024\235?\n\001\"\000\t\211\n?(?\030?\032?\002?\004?\n,ScalarApply\000? ?$\t\251\000?\002\002\000\t\251\000?\004\002\000\t\211\010?\n?\002?\000\207?\010?6$IfPresent\000?0\t\251\000?\000\002\000\t\211\n?6\201\213\004\213\004\203\n\203\005@\207\203@?B?6?F?F VisitAll\000\t\211\020?J\211\006?J\207\214\006\214\n\210\203\001H\214\006\016\000\203\004\203\005@\203\004\203\004\207\214\006\214\n\210\203\001H\214\006\026\000\t\211\010?Z\203\005@\200\203\005@\202\022\000\003?r(Json2.SqlValueNumber\202\003?t\000\002\017\003?\\\000\003?^\000\003?`\000\003?b\000\027?f\t\211\014?d\311\002?d\203\tH\005\205\004\206\205\004\203\010\203\005@\032\036\203\005@\020Args\034Payload\006\002?\216\005\205\004\203\010\203\005@\032\036\003?\224\002\003?\226\000\003\001\003?\220\000\003\016\000\203\004\203\005@\203\004\203\004?\000\026\000\t\211\010?\246\203\005@\200\203\005@\202\022\000\003?\262\026Json2.Parse\202\003?\264\000\002\017\003?\250\000\003?\252\000\003?\254\000\003?\256\000?<\036\t\211\014?h\211\002?h\203\001H\016\000\203\004\203\005@\203\004\203\004?\004\026\000\t\211\010?\314\203\005@\200\203\005@\202\022\000\003?\330\"Json2.CompilePath\202\003?\332\000\002\017\003?\316\000\003?\320\000\003?\322\000\003?\324\000?4\036\010\000?n\276\t\251\000?B\002\000\'?6\t\251\000?F\002\000?\372\004\'?6\010\000\000\000/" ; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:102;parse_proto_program=Command { Assign { Column { Id: 15 } Constant { Text: "$.key" } } } Command { Assign { Column { Id: 16 } Function { Id: 8 Arguments { Id: 5 } Arguments { Id: 15 } FunctionType: YQL_KERNEL KernelIdx: 0 } } } Command { Projection { Columns { Id: 16 } } } Kernels: "O\022\006Arg\020JsonNode\020JsonPath\006UDF\006Udf\014Apply2\nFlags\010Name\030BlockAsTuple\t\211\004\235\213\004\213\004\207\203\tH\203\001H\213\002\207\203@\001\235?\006\001\235?\014\001\"\000\t\211\004?\022\235?\002\001\235?\004\000\"\000\t\251\000?\030\002\000\t\251\000?\032\002\000\000\t\211\002?\024\235?\n\001\"\000\t\211\n?(?\030?\032?\002?\004?\n,ScalarApply\000? ?$\t\251\000?\002\002\000\t\251\000?\004\002\000\t\211\010?\n?\002?\000\207?\010?6$IfPresent\000?0\t\251\000?\000\002\000\t\211\n?6\201\213\004\213\004\203\n\203\005@\207\203@?B?6?F?F VisitAll\000\t\211\020?J\211\006?J\207\214\006\214\n\210\203\001H\214\006\016\000\203\004\203\005@\203\004\203\004\207\214\006\214\n\210\203\001H\214\006\026\000\t\211\010?Z\203\005@\200\203\005@\202\022\000\003?r(Json2.SqlValueNumber\202\003?t\000\002\017\003?\\\000\003?^\000\003?`\000\003?b\000\027?f\t\211\014?d\311\002?d\203\tH\005\205\004\206\205\004\203\010\203\005@\032\036\203\005@\020Args\034Payload\006\002?\216\005\205\004\203\010\203\005@\032\036\003?\224\002\003?\226\000\003\001\003?\220\000\003\016\000\203\004\203\005@\203\004\203\004?\000\026\000\t\211\010?\246\203\005@\200\203\005@\202\022\000\003?\262\026Json2.Parse\202\003?\264\000\002\017\003?\250\000\003?\252\000\003?\254\000\003?\256\000?<\036\t\211\014?h\211\002?h\203\001H\016\000\203\004\203\005@\203\004\203\004?\004\026\000\t\211\010?\314\203\005@\200\203\005@\202\022\000\003?\330\"Json2.CompilePath\202\003?\332\000\002\017\003?\316\000\003?\320\000\003?\322\000\003?\324\000?4\036\010\000?n\276\t\251\000?B\002\000\'?6\t\251\000?F\002\000?\372\004\'?6\010\000\000\000/" ; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2101;fline=graph_execute.cpp:162;graph_constructed=digraph program {N0[shape=box, label="N0(0):{\"p\":{\"v\":\"$.key\"},\"o\":\"15\",\"t\":\"Const\"}\n"]; N1[shape=box, label="N4(15):{\"i\":\"5,15\",\"p\":{\"kernel\":{\"class_name\":\"SIMPLE\"}},\"o\":\"16\",\"t\":\"Calculation\"}\nREMOVE:15,5"]; N0 -> N1[label="1"]; N3 -> N1[label="2"]; N2[shape=box, label="N2(2):{\"i\":\"0\",\"p\":{\"data\":[{\"name\":\"json_string\",\"id\":5}]},\"o\":\"5\",\"t\":\"FetchOriginalData\"}\n",style=filled,color="#FFFF88"]; N5 -> N2[label="1"]; N3[shape=box, label="N3(7):{\"i\":\"5\",\"p\":{\"address\":{\"name\":\"json_string\",\"id\":5}},\"o\":\"5\",\"t\":\"AssembleOriginalData\"}\n",style=filled,color="#FFFF88"]; N2 -> N3[label="1"]; N4[shape=box, label="N5(15):{\"i\":\"16\",\"t\":\"Projection\"}\n",style=filled,color="#FFAAAA"]; N1 -> N4[label="1"]; N5[shape=box, label="N1(0):{\"p\":{\"data\":[{\"name\":\"json_string\",\"id\":5}]},\"o\":\"0\",\"t\":\"ReserveMemory\"}\n"]; N0->N5->N2->N3->N1->N4[color=red]; }; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:51;event=program_parsed;result={"edges":[{"owner_id":0,"inputs":[]},{"owner_id":1,"inputs":[{"from":0},{"from":3}]},{"owner_id":2,"inputs":[{"from":5}]},{"owner_id":3,"inputs":[{"from":2}]},{"owner_id":4,"inputs":[{"from":1}]},{"owner_id":5,"inputs":[]}],"nodes":{"1":{"p":{"i":"5,15","p":{"kernel":{"class_name":"SIMPLE"}},"o":"16","t":"Calculation"},"w":15,"id":1},"3":{"p":{"i":"5","p":{"address":{"name":"json_string","id":5}},"o":"5","t":"AssembleOriginalData"},"w":7,"id":3},"2":{"p":{"i":"0","p":{"data":[{"name":"json_string","id":5}]},"o":"5","t":"FetchOriginalData"},"w":2,"id":2},"5":{"p":{"p":{"data":[{"name":"json_string","id":5}]},"o":"0","t":"ReserveMemory"},"w":0,"id":5},"4":{"p":{"i":"16","t":"Projection"},"w":15,"id":4},"0":{"p":{"p":{"v":"$.key"},"o":"15","t":"Const"},"w":0,"id":0}}}; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:502;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:502;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:502;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:502;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:502;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:502;T=N5arrow10StringTypeE; json_string: [ "{"key":"value"}", "{"key":10}", "{"key":0.1}", "{"key":false}", "{"another":"value"}", "[]" ] Check output for Double FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:502;T=N5arrow10DoubleTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:502;T=N5arrow10DoubleTypeE; >> THeavyPerfTest::TTestLoadEverything [GOOD] >> DSProxyCounters::MultiPutGeneratedSubrequestBytes [GOOD] >> TDSProxyGetTest::TestBlock42GetSpecific [GOOD] >> TDSProxyPatchTest::NaiveErrorOnGetItem_ErasureNone >> THiveImplTest::BootQueueSpeed |70.1%| [TA] $(B)/ydb/core/blobstorage/ut_blobstorage/ut_read_only_vdisk/test-results/unittest/{meta.json ... results_accumulator.log} |70.1%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/schemeshard/ut_index_build/ydb-core-tx-schemeshard-ut_index_build |70.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_index_build/ydb-core-tx-schemeshard-ut_index_build |70.1%| [TA] {RESULT} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_read_only_vdisk/test-results/unittest/{meta.json ... results_accumulator.log} |70.2%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_index_build/ydb-core-tx-schemeshard-ut_index_build >> TContinuousBackupTests::TakeIncrementalBackup [GOOD] >> TDSProxyPatchTest::NaiveErrorOnGetItem_ErasureNone [GOOD] >> TDSProxyPatchTest::SecuredOk_ErasureMirror3dc >> TBlobStorageProxySequenceTest::TestBlock42CheckLwtrack >> TDSProxyPatchTest::SecuredOk_ErasureMirror3dc [GOOD] >> TDSProxyPutTest::TestMirror3dcPutStatusOkWith_3_1_0_VdiskErrors >> TBlobStorageProxySequenceTest::TestGivenBlock42MultiPut2ItemsStatuses [GOOD] >> TDSProxyGetTest::TestMirror32GetIntervalsAllOk ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_sequence_reboots/unittest >> TSequenceReboots::CreateSequencesWithIndexedTable [GOOD] Test command err: ==== RunWithTabletReboots =========== RUN: Trace =========== Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2140] Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:116:2058] recipient: [1:110:2141] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:116:2058] recipient: [1:110:2141] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:117:2058] recipient: [1:111:2142] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:117:2058] recipient: [1:111:2142] Leader for TabletID 72057594046678944 is [1:126:2151] sender: [1:127:2058] recipient: [1:109:2140] Leader for TabletID 72057594046447617 is [1:130:2154] sender: [1:132:2058] recipient: [1:110:2141] Leader for TabletID 72057594046316545 is [1:133:2155] sender: [1:135:2058] recipient: [1:111:2142] 2025-05-29T15:25:48.911698Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7488: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-05-29T15:25:48.911721Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7516: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:25:48.911728Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7402: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2025-05-29T15:25:48.911734Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7418: OperationsProcessing config: using default configuration 2025-05-29T15:25:48.911746Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-05-29T15:25:48.911750Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-05-29T15:25:48.911760Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7548: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:25:48.911773Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:39: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-05-29T15:25:48.911885Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7619: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-05-29T15:25:48.911973Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-05-29T15:25:48.925911Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7651: Got new config: QueryServiceConfig { AllExternalDataSourcesAreAvailable: true } 2025-05-29T15:25:48.925933Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:25:48.926028Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7619: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# Leader for TabletID 72057594046447617 is [1:130:2154] sender: [1:175:2058] recipient: [1:15:2062] 2025-05-29T15:25:48.928932Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-05-29T15:25:48.928961Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-05-29T15:25:48.928989Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-05-29T15:25:48.931591Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-05-29T15:25:48.931661Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:445: Clear TempDirsState with owners number: 0 2025-05-29T15:25:48.931761Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1348: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-05-29T15:25:48.931907Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:32: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-05-29T15:25:48.932495Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:157: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:25:48.932537Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:84: [RootDataErasureManager] Stop 2025-05-29T15:25:48.932737Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:25:48.932745Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:25:48.932775Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-05-29T15:25:48.932782Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-29T15:25:48.932787Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:202: TTxServerlessStorageBilling.Complete 2025-05-29T15:25:48.932804Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6671: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:214:2058] recipient: [1:212:2212] IGNORE Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:214:2058] recipient: [1:212:2212] Leader for TabletID 72057594037968897 is [1:218:2216] sender: [1:219:2058] recipient: [1:212:2212] 2025-05-29T15:25:48.933989Z node 1 :HIVE INFO: tablet_helpers.cpp:1130: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:126:2151] sender: [1:239:2058] recipient: [1:15:2062] 2025-05-29T15:25:48.956100Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:372: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-05-29T15:25:48.956179Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:25:48.956235Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-05-29T15:25:48.956278Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:130: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-05-29T15:25:48.956290Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:25:48.957014Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:451: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-05-29T15:25:48.957038Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-05-29T15:25:48.957082Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:25:48.957092Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:313: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-05-29T15:25:48.957098Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:367: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-05-29T15:25:48.957105Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 2 -> 3 2025-05-29T15:25:48.957507Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:25:48.957519Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-05-29T15:25:48.957525Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 3 -> 128 2025-05-29T15:25:48.957870Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:25:48.957881Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:25:48.957887Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:25:48.957895Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1650: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-05-29T15:25:48.958599Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1719: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-05-29T15:25:48.958995Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:652: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-05-29T15:25:48.959038Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1751: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 Leader for TabletID 72057594046316545 is [1:133:2155] sender: [1:254:2058] recipient: [1:15:2062] FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-05-29T15:25:48.959244Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:676: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-05-29T15:25:48.959269Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:680: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 133 RawX2: 4294969451 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-05-29T15:25:48.959276Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:25:48.959339Z node 1 :FLAT_TX_SCHEMESHARD INFO: sch ... t schemeshard: 72057594046678944 2025-05-29T15:27:51.118507Z node 288 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:46: Free shard 72057594046678944:2 hive 72057594037968897 at ss 72057594046678944 2025-05-29T15:27:51.118586Z node 288 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4877: StateWork, received event# 269877764, Sender [288:651:2601], Recipient [288:126:2151]: NKikimr::TEvTabletPipe::TEvServerDisconnected 2025-05-29T15:27:51.118593Z node 288 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4975: StateWork, processing event TEvTabletPipe::TEvServerDisconnected 2025-05-29T15:27:51.118598Z node 288 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5801: Server pipe is reset, at schemeshard: 72057594046678944 2025-05-29T15:27:51.118623Z node 288 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:207: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2025-05-29T15:27:51.118634Z node 288 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:630: Send to actor: [288:368:2345] msg type: 269552133 msg: NKikimrTxDataShard.TEvStateChangedResult TabletId: 72057594046678944 State: 4 at schemeshard: 72057594046678944 2025-05-29T15:27:51.118643Z node 288 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:46: Free shard 72057594046678944:1 hive 72057594037968897 at ss 72057594046678944 2025-05-29T15:27:51.118676Z node 288 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4877: StateWork, received event# 269877760, Sender [288:659:2609], Recipient [288:126:2151]: NKikimr::TEvTabletPipe::TEvClientConnected { TabletId: 72057594037968897 Status: OK ServerId: [288:660:2610] Leader: 1 Dead: 0 Generation: 2 VersionInfo: } 2025-05-29T15:27:51.118681Z node 288 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4972: StateWork, processing event TEvTabletPipe::TEvClientConnected 2025-05-29T15:27:51.118686Z node 288 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5703: Handle TEvClientConnected, tabletId: 72057594037968897, status: OK, at schemeshard: 72057594046678944 2025-05-29T15:27:51.118705Z node 288 :HIVE INFO: tablet_helpers.cpp:1356: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 2 TxId_Deprecated: 2 TabletID: 72075186233409547 2025-05-29T15:27:51.118793Z node 288 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4877: StateWork, received event# 268698118, Sender [288:220:2218], Recipient [288:126:2151]: NKikimrHive.TEvDeleteTabletReply Status: OK Origin: 72057594037968897 TxId_Deprecated: 2 ShardOwnerId: 72057594046678944 ShardLocalIdx: 2 2025-05-29T15:27:51.118801Z node 288 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4903: StateWork, processing event TEvHive::TEvDeleteTabletReply 2025-05-29T15:27:51.118809Z node 288 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5938: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 2 ShardOwnerId: 72057594046678944 ShardLocalIdx: 2, at schemeshard: 72057594046678944 2025-05-29T15:27:51.118865Z node 288 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 1 Forgetting tablet 72075186233409547 2025-05-29T15:27:51.119446Z node 288 :HIVE INFO: tablet_helpers.cpp:1356: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 1 TxId_Deprecated: 1 TabletID: 72075186233409548 2025-05-29T15:27:51.119572Z node 288 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4877: StateWork, received event# 269877764, Sender [288:652:2602], Recipient [288:126:2151]: NKikimr::TEvTabletPipe::TEvServerDisconnected 2025-05-29T15:27:51.119588Z node 288 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4975: StateWork, processing event TEvTabletPipe::TEvServerDisconnected 2025-05-29T15:27:51.119592Z node 288 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5801: Server pipe is reset, at schemeshard: 72057594046678944 2025-05-29T15:27:51.119606Z node 288 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4877: StateWork, received event# 2146435084, Sender [288:126:2151], Recipient [288:126:2151]: NKikimr::NSchemeShard::TEvPrivate::TEvCleanDroppedPaths 2025-05-29T15:27:51.119611Z node 288 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5049: StateWork, processing event TEvPrivate::TEvCleanDroppedPaths 2025-05-29T15:27:51.119621Z node 288 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:123: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-05-29T15:27:51.119627Z node 288 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:137: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 5], at schemeshard: 72057594046678944 2025-05-29T15:27:51.119641Z node 288 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 1 2025-05-29T15:27:51.119648Z node 288 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:137: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 4], at schemeshard: 72057594046678944 2025-05-29T15:27:51.119653Z node 288 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 2 Forgetting tablet 72075186233409548 2025-05-29T15:27:51.120243Z node 288 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4877: StateWork, received event# 268698118, Sender [288:220:2218], Recipient [288:126:2151]: NKikimrHive.TEvDeleteTabletReply Status: OK Origin: 72057594037968897 TxId_Deprecated: 1 ShardOwnerId: 72057594046678944 ShardLocalIdx: 1 2025-05-29T15:27:51.120255Z node 288 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4903: StateWork, processing event TEvHive::TEvDeleteTabletReply 2025-05-29T15:27:51.120263Z node 288 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5938: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 1 ShardOwnerId: 72057594046678944 ShardLocalIdx: 1, at schemeshard: 72057594046678944 2025-05-29T15:27:51.120300Z node 288 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 1 2025-05-29T15:27:51.120338Z node 288 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4877: StateWork, received event# 269877763, Sender [288:397:2369], Recipient [288:126:2151]: NKikimr::TEvTabletPipe::TEvClientDestroyed { TabletId: 72075186233409547 ClientId: [288:397:2369] ServerId: [288:402:2372] } 2025-05-29T15:27:51.120343Z node 288 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4973: StateWork, processing event TEvTabletPipe::TEvClientDestroyed 2025-05-29T15:27:51.120348Z node 288 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5763: Client pipe, to tablet: 72075186233409547, from:72057594046678944 is reset 2025-05-29T15:27:51.120490Z node 288 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4877: StateWork, received event# 269877763, Sender [288:398:2370], Recipient [288:126:2151]: NKikimr::TEvTabletPipe::TEvClientDestroyed { TabletId: 72075186233409548 ClientId: [288:398:2370] ServerId: [288:408:2377] } 2025-05-29T15:27:51.120498Z node 288 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4973: StateWork, processing event TEvTabletPipe::TEvClientDestroyed 2025-05-29T15:27:51.120502Z node 288 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5763: Client pipe, to tablet: 72075186233409548, from:72057594046678944 is reset 2025-05-29T15:27:51.120903Z node 288 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:174: Deleted shardIdx 72057594046678944:2 2025-05-29T15:27:51.120916Z node 288 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:180: Close pipe to deleted shardIdx 72057594046678944:2 tabletId 72075186233409547 2025-05-29T15:27:51.121256Z node 288 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:160: TTxCleanDroppedPaths Complete, done PersistRemovePath for 2 paths, skipped 0, left 1 candidates, at schemeshard: 72057594046678944 2025-05-29T15:27:51.121282Z node 288 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:123: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-05-29T15:27:51.121289Z node 288 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:137: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 3], at schemeshard: 72057594046678944 2025-05-29T15:27:51.121302Z node 288 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2025-05-29T15:27:51.121346Z node 288 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:174: Deleted shardIdx 72057594046678944:1 2025-05-29T15:27:51.121357Z node 288 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:180: Close pipe to deleted shardIdx 72057594046678944:1 tabletId 72075186233409548 2025-05-29T15:27:51.121387Z node 288 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4877: StateWork, received event# 269877763, Sender [288:659:2609], Recipient [288:126:2151]: NKikimr::TEvTabletPipe::TEvClientDestroyed { TabletId: 72057594037968897 ClientId: [288:659:2609] ServerId: [288:660:2610] } 2025-05-29T15:27:51.121392Z node 288 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4973: StateWork, processing event TEvTabletPipe::TEvClientDestroyed 2025-05-29T15:27:51.121397Z node 288 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5763: Client pipe, to tablet: 72057594037968897, from:72057594046678944 is reset 2025-05-29T15:27:51.121822Z node 288 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:160: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestWaitNotification: OK eventTxId 1003 2025-05-29T15:27:51.121911Z node 288 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4877: StateWork, received event# 271122945, Sender [288:671:2621], Recipient [288:126:2151]: NKikimrSchemeOp.TDescribePath Path: "/MyRoot/Table" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false } 2025-05-29T15:27:51.121916Z node 288 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4889: StateWork, processing event TEvSchemeShard::TEvDescribeScheme 2025-05-29T15:27:51.121925Z node 288 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Table" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-05-29T15:27:51.121971Z node 288 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/Table" took 41us result status StatusPathDoesNotExist 2025-05-29T15:27:51.122011Z node 288 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/Table\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1]), source_location: ydb/core/tx/schemeshard/schemeshard_path_describer.cpp:1157" Path: "/MyRoot/Table" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: true } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_continuous_backup/unittest >> TContinuousBackupTests::TakeIncrementalBackup [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:116:2058] recipient: [1:111:2141] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:116:2058] recipient: [1:111:2141] Leader for TabletID 72057594046678944 is [1:128:2152] sender: [1:132:2058] recipient: [1:111:2141] 2025-05-29T15:27:57.036869Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7488: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-05-29T15:27:57.036910Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7516: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:27:57.036915Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7402: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-05-29T15:27:57.036920Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7418: OperationsProcessing config: using default configuration 2025-05-29T15:27:57.036931Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-05-29T15:27:57.036935Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-05-29T15:27:57.036943Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7548: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:27:57.036956Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:39: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-05-29T15:27:57.037059Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7619: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-05-29T15:27:57.037131Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-05-29T15:27:57.049210Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7309: Cannot subscribe to console configs 2025-05-29T15:27:57.049235Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:27:57.051850Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-05-29T15:27:57.051967Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-05-29T15:27:57.052006Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-05-29T15:27:57.053715Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-05-29T15:27:57.053918Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:445: Clear TempDirsState with owners number: 0 2025-05-29T15:27:57.054069Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1348: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-05-29T15:27:57.054124Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:32: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-05-29T15:27:57.054622Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:157: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:27:57.054668Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:84: [RootDataErasureManager] Stop 2025-05-29T15:27:57.054933Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:27:57.054943Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:27:57.054964Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-05-29T15:27:57.054972Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-29T15:27:57.054978Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:202: TTxServerlessStorageBilling.Complete 2025-05-29T15:27:57.055010Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6671: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-05-29T15:27:57.056262Z node 1 :HIVE INFO: tablet_helpers.cpp:1130: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:128:2152] sender: [1:242:2058] recipient: [1:15:2062] 2025-05-29T15:27:57.072306Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:372: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-05-29T15:27:57.072397Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:27:57.072452Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-05-29T15:27:57.072497Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:130: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-05-29T15:27:57.072506Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:27:57.073340Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:451: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-05-29T15:27:57.073369Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-05-29T15:27:57.073433Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:27:57.073444Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:313: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-05-29T15:27:57.073450Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:367: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-05-29T15:27:57.073455Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 2 -> 3 2025-05-29T15:27:57.073990Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:27:57.074007Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-05-29T15:27:57.074013Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 3 -> 128 2025-05-29T15:27:57.074510Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:27:57.074522Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:27:57.074528Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:27:57.074535Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1650: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-05-29T15:27:57.075152Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1719: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-05-29T15:27:57.075656Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:652: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-05-29T15:27:57.075714Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1751: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-05-29T15:27:57.075926Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:676: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-05-29T15:27:57.075959Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:680: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 124 RawX2: 4294969445 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-05-29T15:27:57.075969Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:27:57.076044Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 128 -> 240 2025-05-29T15:27:57.076053Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:27:57.076089Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-05-29T15:27:57.076102Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:402: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-05-29T15:27:57.076617Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:27:57.076630Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-29T15:27:57.076677Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29 ... shard__operation.cpp:1641: TOperation IsReadyToDone TxId: 103 ready parts: 4/4 2025-05-29T15:27:57.344897Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:906: Part operation is done id#103:1 progress is 4/4 2025-05-29T15:27:57.344900Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 103 ready parts: 4/4 2025-05-29T15:27:57.344905Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1606: TOperation IsReadyToNotify, TxId: 103, ready parts: 4/4, is published: true 2025-05-29T15:27:57.344922Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1629: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:338:2316] message: TxId: 103 2025-05-29T15:27:57.344929Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 103 ready parts: 4/4 2025-05-29T15:27:57.344937Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:973: Operation and all the parts is done, operation id: 103:0 2025-05-29T15:27:57.344942Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5174: RemoveTx for txid 103:0 2025-05-29T15:27:57.344954Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 3 2025-05-29T15:27:57.344959Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:973: Operation and all the parts is done, operation id: 103:1 2025-05-29T15:27:57.344962Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5174: RemoveTx for txid 103:1 2025-05-29T15:27:57.344977Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 2025-05-29T15:27:57.344980Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:973: Operation and all the parts is done, operation id: 103:2 2025-05-29T15:27:57.344984Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5174: RemoveTx for txid 103:2 2025-05-29T15:27:57.344991Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 3 2025-05-29T15:27:57.344995Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:973: Operation and all the parts is done, operation id: 103:3 2025-05-29T15:27:57.344999Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5174: RemoveTx for txid 103:3 2025-05-29T15:27:57.345008Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 4 2025-05-29T15:27:57.345582Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:227: tests -- TTxNotificationSubscriber for txId 103: got EvNotifyTxCompletionResult 2025-05-29T15:27:57.345598Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:236: tests -- TTxNotificationSubscriber for txId 103: satisfy waiter [1:723:2626] TestWaitNotification: OK eventTxId 103 2025-05-29T15:27:57.345734Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/IncrBackupImpl" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: true }, at schemeshard: 72057594046678944 2025-05-29T15:27:57.345797Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/IncrBackupImpl" took 73us result status StatusSuccess 2025-05-29T15:27:57.345937Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/IncrBackupImpl" PathDescription { Self { Name: "IncrBackupImpl" PathId: 5 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 103 CreateStep: 5000004 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "IncrBackupImpl" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Uint64" TypeId: 4 Id: 2 NotNull: false IsBuildInProgress: false } Columns { Name: "__ydb_incrBackupImpl_deleted" Type: "Bool" TypeId: 6 Id: 3 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 TableSchemaVersion: 1 IsBackup: false ReplicationConfig { Mode: REPLICATION_MODE_READ_ONLY ConsistencyLevel: CONSISTENCY_LEVEL_ROW } IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 4 PathsLimit: 10000 ShardsInside: 4 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 1 PQPartitionsLimit: 1000000 } UserAttributes { Key: "__incremental_backup" Value: "{}" } UserAttributes { Key: "__async_replica" Value: "true" } } PathId: 5 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-05-29T15:27:57.346057Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Table/continuousBackupImpl/streamImpl" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: true }, at schemeshard: 72057594046678944 2025-05-29T15:27:57.346092Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/Table/continuousBackupImpl/streamImpl" took 37us result status StatusSuccess 2025-05-29T15:27:57.346204Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Table/continuousBackupImpl/streamImpl" PathDescription { Self { Name: "streamImpl" PathId: 4 SchemeshardId: 72057594046678944 PathType: EPathTypePersQueueGroup CreateFinished: true CreateTxId: 102 CreateStep: 5000003 ParentPathId: 3 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeStreamImpl Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 PQVersion: 2 } ChildrenExist: false BalancerTabletID: 72075186233409548 } PersQueueGroup { Name: "streamImpl" PathId: 4 TotalGroupCount: 1 PartitionPerTablet: 2 PQTabletConfig { PartitionConfig { MaxCountInPartition: 2147483647 LifetimeSeconds: 86400 WriteSpeedInBytesPerSecond: 1048576 BurstSize: 1048576 } TopicName: "continuousBackupImpl" TopicPath: "/MyRoot/Table/continuousBackupImpl/streamImpl" YdbDatabasePath: "/MyRoot" PartitionKeySchema { Name: "key" TypeId: 4 } MeteringMode: METERING_MODE_REQUEST_UNITS OffloadConfig { IncrementalBackup { DstPath: "/MyRoot/IncrBackupImpl" DstPathId { OwnerId: 72057594046678944 LocalId: 5 } } } } Partitions { PartitionId: 0 TabletId: 72075186233409547 Status: Active } AlterVersion: 2 BalancerTabletID: 72075186233409548 NextPartitionId: 1 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 4 PathsLimit: 10000 ShardsInside: 4 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 1 PQPartitionsLimit: 1000000 } } PathId: 4 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-05-29T15:27:57.346413Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/IncrBackupImpl" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: true }, at schemeshard: 72057594046678944 2025-05-29T15:27:57.346448Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/IncrBackupImpl" took 38us result status StatusSuccess 2025-05-29T15:27:57.346527Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/IncrBackupImpl" PathDescription { Self { Name: "IncrBackupImpl" PathId: 5 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 103 CreateStep: 5000004 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "IncrBackupImpl" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Uint64" TypeId: 4 Id: 2 NotNull: false IsBuildInProgress: false } Columns { Name: "__ydb_incrBackupImpl_deleted" Type: "Bool" TypeId: 6 Id: 3 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 TableSchemaVersion: 1 IsBackup: false ReplicationConfig { Mode: REPLICATION_MODE_READ_ONLY ConsistencyLevel: CONSISTENCY_LEVEL_ROW } IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 4 PathsLimit: 10000 ShardsInside: 4 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 1 PQPartitionsLimit: 1000000 } UserAttributes { Key: "__incremental_backup" Value: "{}" } UserAttributes { Key: "__async_replica" Value: "true" } } PathId: 5 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> TBlobStorageProxySequenceTest::TestBlock42CheckLwtrack [GOOD] >> TDSProxyGetTest::TestBlock42GetSpecific3 >> TDSProxyPutTest::TestMirror3dcPutStatusOkWith_3_1_0_VdiskErrors [GOOD] >> TBlobStorageProxySequenceTest::TestGivenBlock42PutWhenPartialGetThenSingleDiskRequestOk >> TBlobStorageProxySequenceTest::TestGivenBlock42IntersectingPutWhenNodataOkThenOk >> TDSProxyGetTest::TestBlock42GetSpecific3 [GOOD] >> TDSProxyPatchTest::NaiveErrorOnGet_ErasureMirror3dc >> TDSProxyGetTest::TestMirror32GetIntervalsAllOk [GOOD] >> TDSProxyPatchTest::NaiveOk_Erasure4Plus2Block >> TBlobStorageProxySequenceTest::TestGivenStripe42GetThenVGetResponsePartsNodata263451ThenGetOk >> TBlobStorageProxySequenceTest::TestGivenBlock42IntersectingPutWhenNodataOkThenOk [GOOD] >> TDSProxyGetTest::TestMirror32GetBlobCrcCheck >> TBlobStorageProxySequenceTest::TestGivenBlock42PutWhenPartialGetThenSingleDiskRequestOk [GOOD] >> TDSProxyLooksLikeLostTheBlob::TDSProxyErrorRegressionBlock42 [GOOD] >> TDSProxyPatchTest::NaiveOk_ErasureNone >> TDSProxyPatchTest::NaiveErrorOnGet_ErasureMirror3dc [GOOD] >> TDSProxyPutTest::TestBlock42MaxPartCountOnHandoff [GOOD] >> TDSProxyRequestReportningTest::CheckDefaultBehaviour ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/dsproxy/ut/unittest >> TDSProxyPutTest::TestMirror3dcPutStatusOkWith_3_1_0_VdiskErrors [GOOD] Test command err: 2025-05-29T15:27:57.663508Z node 3 :BS_PROXY_PUT INFO: dsproxy_put.cpp:645: [7e4afa7ea38a37be] bootstrap ActorId# [3:82:2128] Group# 0 BlobCount# 1 BlobIDs# [[72075186224047637:1:863:1:24576:786:0]] HandleClass# TabletLog Tactic# Default RestartCounter# 0 Marker# BPP13 2025-05-29T15:27:57.663579Z node 3 :BS_PROXY_PUT DEBUG: dsproxy_strategy_base.cpp:324: [7e4afa7ea38a37be] partPlacement record partSituation# ESituation::Unknown to# 0 blob Id# [72075186224047637:1:863:1:24576:786:1] Marker# BPG33 2025-05-29T15:27:57.663585Z node 3 :BS_PROXY_PUT DEBUG: dsproxy_strategy_base.cpp:345: [7e4afa7ea38a37be] Sending missing VPut part# 0 to# 0 blob Id# [72075186224047637:1:863:1:24576:786:1] Marker# BPG32 2025-05-29T15:27:57.663590Z node 3 :BS_PROXY_PUT DEBUG: dsproxy_strategy_base.cpp:324: [7e4afa7ea38a37be] partPlacement record partSituation# ESituation::Unknown to# 1 blob Id# [72075186224047637:1:863:1:24576:786:2] Marker# BPG33 2025-05-29T15:27:57.663592Z node 3 :BS_PROXY_PUT DEBUG: dsproxy_strategy_base.cpp:345: [7e4afa7ea38a37be] Sending missing VPut part# 1 to# 1 blob Id# [72075186224047637:1:863:1:24576:786:2] Marker# BPG32 2025-05-29T15:27:57.663595Z node 3 :BS_PROXY_PUT DEBUG: dsproxy_strategy_base.cpp:324: [7e4afa7ea38a37be] partPlacement record partSituation# ESituation::Unknown to# 2 blob Id# [72075186224047637:1:863:1:24576:786:3] Marker# BPG33 2025-05-29T15:27:57.663598Z node 3 :BS_PROXY_PUT DEBUG: dsproxy_strategy_base.cpp:345: [7e4afa7ea38a37be] Sending missing VPut part# 2 to# 2 blob Id# [72075186224047637:1:863:1:24576:786:3] Marker# BPG32 2025-05-29T15:27:57.667093Z node 3 :BS_PROXY_PUT INFO: dsproxy_put.cpp:260: [7e4afa7ea38a37be] received {EvVPutResult Status# ERROR ID# [72075186224047637:1:863:1:24576:786:2] {MsgQoS MsgId# { SequenceId: 1 MsgId: 0 }}} from# [0:1:0:1:0] Marker# BPP01 2025-05-29T15:27:57.667141Z node 3 :BS_PROXY_PUT DEBUG: dsproxy_strategy_base.cpp:324: [7e4afa7ea38a37be] partPlacement record partSituation# ESituation::Unknown to# 4 blob Id# [72075186224047637:1:863:1:24576:786:2] Marker# BPG33 2025-05-29T15:27:57.667147Z node 3 :BS_PROXY_PUT DEBUG: dsproxy_strategy_base.cpp:345: [7e4afa7ea38a37be] Sending missing VPut part# 1 to# 4 blob Id# [72075186224047637:1:863:1:24576:786:2] Marker# BPG32 2025-05-29T15:27:57.667205Z node 3 :BS_PROXY_PUT INFO: dsproxy_put.cpp:260: [7e4afa7ea38a37be] received {EvVPutResult Status# ERROR ID# [72075186224047637:1:863:1:24576:786:3] {MsgQoS MsgId# { SequenceId: 1 MsgId: 0 }}} from# [0:1:1:1:0] Marker# BPP01 2025-05-29T15:27:57.667211Z node 3 :BS_PROXY_PUT DEBUG: dsproxy_strategy_base.cpp:324: [7e4afa7ea38a37be] partPlacement record partSituation# ESituation::Unknown to# 5 blob Id# [72075186224047637:1:863:1:24576:786:3] Marker# BPG33 2025-05-29T15:27:57.667214Z node 3 :BS_PROXY_PUT DEBUG: dsproxy_strategy_base.cpp:345: [7e4afa7ea38a37be] Sending missing VPut part# 2 to# 5 blob Id# [72075186224047637:1:863:1:24576:786:3] Marker# BPG32 2025-05-29T15:27:57.667237Z node 3 :BS_PROXY_PUT DEBUG: dsproxy_put.cpp:260: [7e4afa7ea38a37be] received {EvVPutResult Status# OK ID# [72075186224047637:1:863:1:24576:786:1] {MsgQoS MsgId# { SequenceId: 1 MsgId: 0 }}} from# [0:1:2:1:0] Marker# BPP01 2025-05-29T15:27:57.667291Z node 3 :BS_PROXY_PUT INFO: dsproxy_put.cpp:260: [7e4afa7ea38a37be] received {EvVPutResult Status# ERROR ID# [72075186224047637:1:863:1:24576:786:2] {MsgQoS MsgId# { SequenceId: 1 MsgId: 0 }}} from# [0:1:0:2:0] Marker# BPP01 2025-05-29T15:27:57.667298Z node 3 :BS_PROXY_PUT DEBUG: dsproxy_strategy_base.cpp:324: [7e4afa7ea38a37be] partPlacement record partSituation# ESituation::Unknown to# 7 blob Id# [72075186224047637:1:863:1:24576:786:2] Marker# BPG33 2025-05-29T15:27:57.667302Z node 3 :BS_PROXY_PUT DEBUG: dsproxy_strategy_base.cpp:345: [7e4afa7ea38a37be] Sending missing VPut part# 1 to# 7 blob Id# [72075186224047637:1:863:1:24576:786:2] Marker# BPG32 2025-05-29T15:27:57.667324Z node 3 :BS_PROXY_PUT DEBUG: dsproxy_put.cpp:260: [7e4afa7ea38a37be] received {EvVPutResult Status# OK ID# [72075186224047637:1:863:1:24576:786:3] {MsgQoS MsgId# { SequenceId: 1 MsgId: 0 }}} from# [0:1:1:2:0] Marker# BPP01 2025-05-29T15:27:57.667348Z node 3 :BS_PROXY_PUT INFO: dsproxy_put.cpp:260: [7e4afa7ea38a37be] received {EvVPutResult Status# ERROR ID# [72075186224047637:1:863:1:24576:786:2] {MsgQoS MsgId# { SequenceId: 1 MsgId: 0 }}} from# [0:1:0:0:0] Marker# BPP01 2025-05-29T15:27:57.667356Z node 3 :BS_PROXY_PUT DEBUG: dsproxy_strategy_base.cpp:324: [7e4afa7ea38a37be] partPlacement record partSituation# ESituation::Unknown to# 3 blob Id# [72075186224047637:1:863:1:24576:786:1] Marker# BPG33 2025-05-29T15:27:57.667361Z node 3 :BS_PROXY_PUT DEBUG: dsproxy_strategy_base.cpp:345: [7e4afa7ea38a37be] Sending missing VPut part# 0 to# 3 blob Id# [72075186224047637:1:863:1:24576:786:1] Marker# BPG32 2025-05-29T15:27:57.667366Z node 3 :BS_PROXY_PUT DEBUG: dsproxy_strategy_base.cpp:324: [7e4afa7ea38a37be] partPlacement record partSituation# ESituation::Unknown to# 8 blob Id# [72075186224047637:1:863:1:24576:786:3] Marker# BPG33 2025-05-29T15:27:57.667369Z node 3 :BS_PROXY_PUT DEBUG: dsproxy_strategy_base.cpp:345: [7e4afa7ea38a37be] Sending missing VPut part# 2 to# 8 blob Id# [72075186224047637:1:863:1:24576:786:3] Marker# BPG32 2025-05-29T15:27:57.667402Z node 3 :BS_PROXY_PUT DEBUG: dsproxy_put.cpp:260: [7e4afa7ea38a37be] received {EvVPutResult Status# OK ID# [72075186224047637:1:863:1:24576:786:3] {MsgQoS MsgId# { SequenceId: 1 MsgId: 0 }}} from# [0:1:1:0:0] Marker# BPP01 2025-05-29T15:27:57.667409Z node 3 :BS_PROXY_PUT DEBUG: dsproxy_put.cpp:260: [7e4afa7ea38a37be] received {EvVPutResult Status# OK ID# [72075186224047637:1:863:1:24576:786:1] {MsgQoS MsgId# { SequenceId: 1 MsgId: 0 }}} from# [0:1:2:2:0] Marker# BPP01 2025-05-29T15:27:57.667426Z node 3 :BS_PROXY_PUT DEBUG: dsproxy_put_impl.cpp:72: [7e4afa7ea38a37be] Result# TEvPutResult {Id# [72075186224047637:1:863:1:24576:786:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0} GroupId# 0 Marker# BPP12 2025-05-29T15:27:57.667435Z node 3 :BS_PROXY_PUT INFO: dsproxy_put.cpp:486: [7e4afa7ea38a37be] SendReply putResult# TEvPutResult {Id# [72075186224047637:1:863:1:24576:786:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0} ResponsesSent# 0 PutImpl.Blobs.size# 1 Last# true Marker# BPP21 2025-05-29T15:27:57.667489Z node 3 :BS_PROXY_PUT NOTICE: {BPP72@dsproxy_put.cpp:470} Query history GroupId# 0 HandleClass# TabletLog Tactic# Default History# THistory { Entries# [ TEvVPut{ TimestampMs# 0.319 sample PartId# [72075186224047637:1:863:1:24576:786:3] QueryCount# 1 VDiskId# [0:1:1:1:0] NodeId# 3 } TEvVPut{ TimestampMs# 0.32 sample PartId# [72075186224047637:1:863:1:24576:786:1] QueryCount# 1 VDiskId# [0:1:2:1:0] NodeId# 3 } TEvVPut{ TimestampMs# 0.32 sample PartId# [72075186224047637:1:863:1:24576:786:2] QueryCount# 1 VDiskId# [0:1:0:1:0] NodeId# 3 } TEvVPutResult{ TimestampMs# 3.819 VDiskId# [0:1:0:1:0] NodeId# 3 Status# ERROR } TEvVPut{ TimestampMs# 3.859 sample PartId# [72075186224047637:1:863:1:24576:786:2] QueryCount# 1 VDiskId# [0:1:0:2:0] NodeId# 3 } TEvVPutResult{ TimestampMs# 3.902 VDiskId# [0:1:1:1:0] NodeId# 3 Status# ERROR } TEvVPut{ TimestampMs# 3.912 sample PartId# [72075186224047637:1:863:1:24576:786:3] QueryCount# 1 VDiskId# [0:1:1:2:0] NodeId# 3 } TEvVPutResult{ TimestampMs# 3.936 VDiskId# [0:1:2:1:0] NodeId# 3 Status# OK } TEvVPutResult{ TimestampMs# 3.989 VDiskId# [0:1:0:2:0] NodeId# 3 Status# ERROR } TEvVPut{ TimestampMs# 4.003 sample PartId# [72075186224047637:1:863:1:24576:786:2] QueryCount# 1 VDiskId# [0:1:0:0:0] NodeId# 3 } TEvVPutResult{ TimestampMs# 4.021 VDiskId# [0:1:1:2:0] NodeId# 3 Status# OK } TEvVPutResult{ TimestampMs# 4.047 VDiskId# [0:1:0:0:0] NodeId# 3 Status# ERROR } TEvVPut{ TimestampMs# 4.069 sample PartId# [72075186224047637:1:863:1:24576:786:3] QueryCount# 1 VDiskId# [0:1:1:0:0] NodeId# 3 } TEvVPut{ TimestampMs# 4.07 sample PartId# [72075186224047637:1:863:1:24576:786:1] QueryCount# 1 VDiskId# [0:1:2:2:0] NodeId# 3 } TEvVPutResult{ TimestampMs# 4.098 VDiskId# [0:1:1:0:0] NodeId# 3 Status# OK } TEvVPutResult{ TimestampMs# 4.109 VDiskId# [0:1:2:2:0] NodeId# 3 Status# OK } ] } >> TDSProxyPatchTest::NaiveOk_Erasure4Plus2Block [GOOD] >> TDSProxyPutTest::TestBlock42PutStatusErrorWith_3_0_VdiskErrors >> TBlobStorageProxySequenceTest::TestGivenStripe42GetThenVGetResponsePartsNodata263451ThenGetOk [GOOD] >> TDSProxyLooksLikeLostTheBlob::TDSProxyNoDataRegressionBlock42 [GOOD] >> TDSProxyPatchTest::SecuredErrorOnGetItem_ErasureMirror3dc >> TDSProxyPatchTest::NaiveOk_ErasureNone [GOOD] >> TDSProxyPutTest::TestBlock42PutStatusOkWith_1_0_VdiskErrors >> TDSProxyRequestReportningTest::CheckDefaultBehaviour [GOOD] >> TDSProxyGetTest::TestMirror32GetBlobCrcCheck [GOOD] >> TDSProxyPatchTest::NaiveErrorOnPut_ErasureNone >> TDSProxyPutTest::TestBlock42PutStatusErrorWith_3_0_VdiskErrors [GOOD] >> TDsProxyQuorumTracker::CheckFailModelErasure3Plus2Stripe >> TDSProxyPatchTest::SecuredErrorOnGetItem_ErasureMirror3dc [GOOD] >> TDSProxyPutTest::TestBlock42PutStatusOkWith_2_0_VdiskErrors >> TDSProxyPatchTest::NaiveErrorOnPut_ErasureNone [GOOD] >> TDSProxyPutTest::TestBlock42PutStatusErrorWith_2_1_VdiskErrors >> TDSProxyPutTest::TestBlock42PutStatusOkWith_1_0_VdiskErrors [GOOD] >> TDsProxyQuorumTracker::CheckFailModelErasure4Plus2Stripe >> TSchemeshardBorrowedCompactionTest::SchemeshardShouldHandleDataShardReboot [GOOD] >> TSchemeshardBorrowedCompactionTest::SchemeshardShouldNotCompactAfterDrop >> TDSProxyPutTest::TestBlock42PutStatusOkWith_2_0_VdiskErrors [GOOD] >> TDsProxyQuorumTracker::CheckFailModelErasureMirror3Plus2 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/dsproxy/ut/unittest >> TDSProxyRequestReportningTest::CheckDefaultBehaviour [GOOD] Test command err: 2025-05-29T15:27:57.649251Z node 1 :BS_PROXY_GET INFO: dsproxy_get.cpp:479: [a33ccc40f398d531] bootstrap ActorId# [1:76:2122] Group# 0 Query# {MustRestoreFirst# 0 [1:0:0:0:0:99:0]@0:0} Deadline# 586524-01-19T08:01:49.551615Z RestartCounter# 0 Marker# BPG01 2025-05-29T15:27:57.649464Z node 1 :BS_PROXY_GET DEBUG: dsproxy_get_impl.cpp:238: [a33ccc40f398d531] query.Id# [1:0:0:0:0:99:0] shift# 0 size# 0 Marker# BPG56 2025-05-29T15:27:57.649497Z node 1 :BS_PROXY_GET DEBUG: dsproxy_strategy_base.cpp:135: [a33ccc40f398d531] Id# [1:0:0:0:0:99:0] considerSlowAsError# 0 Parts# {? ? ? ? ? ? ?????? ??????} pessimisticReplicas# 0 p.State# EBS_DISINTEGRATED optimisticReplicas# 6 o.State# EBS_FULL altruisticReplicas# 6 a.State# EBS_FULL Marker# BPG44 2025-05-29T15:27:57.649506Z node 1 :BS_PROXY_GET DEBUG: dsproxy_strategy_base.cpp:135: [a33ccc40f398d531] Id# [1:0:0:0:0:99:0] considerSlowAsError# 1 Parts# {? ? ? ? ? ? ?????? ??????} pessimisticReplicas# 0 p.State# EBS_DISINTEGRATED optimisticReplicas# 6 o.State# EBS_FULL altruisticReplicas# 6 a.State# EBS_FULL Marker# BPG44 2025-05-29T15:27:57.649519Z node 1 :BS_PROXY_GET DEBUG: dsproxy_strategy_base.cpp:222: [a33ccc40f398d531] AddGet disk# 6 Id# [1:0:0:0:0:99:1] Intervals# {[0, 32)} Marker# BPG46 2025-05-29T15:27:57.649525Z node 1 :BS_PROXY_GET DEBUG: dsproxy_strategy_base.cpp:222: [a33ccc40f398d531] AddGet disk# 4 Id# [1:0:0:0:0:99:1] Intervals# {[0, 32)} Marker# BPG46 2025-05-29T15:27:57.649530Z node 1 :BS_PROXY_GET DEBUG: dsproxy_strategy_base.cpp:222: [a33ccc40f398d531] AddGet disk# 5 Id# [1:0:0:0:0:99:1] Intervals# {[0, 32)} Marker# BPG46 2025-05-29T15:27:57.649534Z node 1 :BS_PROXY_GET DEBUG: dsproxy_strategy_base.cpp:222: [a33ccc40f398d531] AddGet disk# 7 Id# [1:0:0:0:0:99:2] Intervals# {[0, 32)} Marker# BPG46 2025-05-29T15:27:57.649539Z node 1 :BS_PROXY_GET DEBUG: dsproxy_strategy_base.cpp:222: [a33ccc40f398d531] AddGet disk# 4 Id# [1:0:0:0:0:99:2] Intervals# {[0, 32)} Marker# BPG46 2025-05-29T15:27:57.649543Z node 1 :BS_PROXY_GET DEBUG: dsproxy_strategy_base.cpp:222: [a33ccc40f398d531] AddGet disk# 5 Id# [1:0:0:0:0:99:2] Intervals# {[0, 32)} Marker# BPG46 2025-05-29T15:27:57.649548Z node 1 :BS_PROXY_GET DEBUG: dsproxy_strategy_base.cpp:222: [a33ccc40f398d531] AddGet disk# 0 Id# [1:0:0:0:0:99:3] Intervals# {[0, 32)} Marker# BPG46 2025-05-29T15:27:57.649552Z node 1 :BS_PROXY_GET DEBUG: dsproxy_strategy_base.cpp:222: [a33ccc40f398d531] AddGet disk# 4 Id# [1:0:0:0:0:99:3] Intervals# {[0, 32)} Marker# BPG46 2025-05-29T15:27:57.649556Z node 1 :BS_PROXY_GET DEBUG: dsproxy_strategy_base.cpp:222: [a33ccc40f398d531] AddGet disk# 5 Id# [1:0:0:0:0:99:3] Intervals# {[0, 32)} Marker# BPG46 2025-05-29T15:27:57.649561Z node 1 :BS_PROXY_GET DEBUG: dsproxy_strategy_base.cpp:222: [a33ccc40f398d531] AddGet disk# 1 Id# [1:0:0:0:0:99:4] Intervals# {[0, 32)} Marker# BPG46 2025-05-29T15:27:57.649565Z node 1 :BS_PROXY_GET DEBUG: dsproxy_strategy_base.cpp:222: [a33ccc40f398d531] AddGet disk# 4 Id# [1:0:0:0:0:99:4] Intervals# {[0, 32)} Marker# BPG46 2025-05-29T15:27:57.649570Z node 1 :BS_PROXY_GET DEBUG: dsproxy_strategy_base.cpp:222: [a33ccc40f398d531] AddGet disk# 5 Id# [1:0:0:0:0:99:4] Intervals# {[0, 32)} Marker# BPG46 2025-05-29T15:27:57.649592Z node 1 :BS_PROXY_GET DEBUG: dsproxy_get_impl.cpp:278: [a33ccc40f398d531] Send get to orderNumber# 0 vget# {ExtrQuery# [1:0:0:0:0:99:3] sh# 0 sz# 32} {MsgQoS ExtQueueId# GetFastRead} Notify# 0 Internals# 0 TabletId# 0 AcquireBlockedGeneration# 0 ForceBlockedGeneration# 0} Marker# BPG14 2025-05-29T15:27:57.649599Z node 1 :BS_PROXY_GET DEBUG: dsproxy_get_impl.cpp:278: [a33ccc40f398d531] Send get to orderNumber# 1 vget# {ExtrQuery# [1:0:0:0:0:99:4] sh# 0 sz# 32} {MsgQoS ExtQueueId# GetFastRead} Notify# 0 Internals# 0 TabletId# 0 AcquireBlockedGeneration# 0 ForceBlockedGeneration# 0} Marker# BPG14 2025-05-29T15:27:57.649612Z node 1 :BS_PROXY_GET DEBUG: dsproxy_get_impl.cpp:278: [a33ccc40f398d531] Send get to orderNumber# 4 vget# {ExtrQuery# [1:0:0:0:0:99:1] sh# 0 sz# 32}{ExtrQuery# [1:0:0:0:0:99:2] sh# 0 sz# 32}{ExtrQuery# [1:0:0:0:0:99:3] sh# 0 sz# 32}{ExtrQuery# [1:0:0:0:0:99:4] sh# 0 sz# 32} {MsgQoS ExtQueueId# GetFastRead} Notify# 0 Internals# 0 TabletId# 0 AcquireBlockedGeneration# 0 ForceBlockedGeneration# 0} Marker# BPG14 2025-05-29T15:27:57.649620Z node 1 :BS_PROXY_GET DEBUG: dsproxy_get_impl.cpp:278: [a33ccc40f398d531] Send get to orderNumber# 5 vget# {ExtrQuery# [1:0:0:0:0:99:1] sh# 0 sz# 32}{ExtrQuery# [1:0:0:0:0:99:2] sh# 0 sz# 32}{ExtrQuery# [1:0:0:0:0:99:3] sh# 0 sz# 32}{ExtrQuery# [1:0:0:0:0:99:4] sh# 0 sz# 32} {MsgQoS ExtQueueId# GetFastRead} Notify# 0 Internals# 0 TabletId# 0 AcquireBlockedGeneration# 0 ForceBlockedGeneration# 0} Marker# BPG14 2025-05-29T15:27:57.649625Z node 1 :BS_PROXY_GET DEBUG: dsproxy_get_impl.cpp:278: [a33ccc40f398d531] Send get to orderNumber# 6 vget# {ExtrQuery# [1:0:0:0:0:99:1] sh# 0 sz# 32} {MsgQoS ExtQueueId# GetFastRead} Notify# 0 Internals# 0 TabletId# 0 AcquireBlockedGeneration# 0 ForceBlockedGeneration# 0} Marker# BPG14 2025-05-29T15:27:57.649630Z node 1 :BS_PROXY_GET DEBUG: dsproxy_get_impl.cpp:278: [a33ccc40f398d531] Send get to orderNumber# 7 vget# {ExtrQuery# [1:0:0:0:0:99:2] sh# 0 sz# 32} {MsgQoS ExtQueueId# GetFastRead} Notify# 0 Internals# 0 TabletId# 0 AcquireBlockedGeneration# 0 ForceBlockedGeneration# 0} Marker# BPG14 2025-05-29T15:27:57.649940Z node 1 :BS_PROXY_GET DEBUG: dsproxy_get_impl.h:174: [a33ccc40f398d531] handle result# {EvVGetResult QueryResult Status# OK {[1:0:0:0:0:99:2] OK Size# 32 FullDataSize# 99 BufferData# 32b Cookie# 0} BlockedGeneration# 0} Marker# BPG57 2025-05-29T15:27:57.649968Z node 1 :BS_PROXY_GET DEBUG: dsproxy_get_impl.h:186: [a33ccc40f398d531] Handle TEvVGetResult status# OK From# [0:1:0:4:0] orderNumber# 4 ev {EvVGetResult QueryResult Status# OK {[1:0:0:0:0:99:2] OK Size# 32 FullDataSize# 99 BufferData# 32b Cookie# 0} BlockedGeneration# 0} Marker# BPG12 2025-05-29T15:27:57.649975Z node 1 :BS_PROXY_GET DEBUG: dsproxy_get_impl.h:225: [a33ccc40f398d531] Got# OK orderNumber# 4 vDiskId# [0:1:0:4:0] Marker# BPG58 2025-05-29T15:27:57.650000Z node 1 :BS_PROXY_GET DEBUG: dsproxy_strategy_base.cpp:135: [a33ccc40f398d531] Id# [1:0:0:0:0:99:0] considerSlowAsError# 0 Parts# {? ? ? ? ? ? ?+???? ??????} pessimisticReplicas# 1 p.State# EBS_DISINTEGRATED optimisticReplicas# 6 o.State# EBS_FULL altruisticReplicas# 6 a.State# EBS_FULL Marker# BPG44 2025-05-29T15:27:57.650007Z node 1 :BS_PROXY_GET DEBUG: dsproxy_strategy_base.cpp:135: [a33ccc40f398d531] Id# [1:0:0:0:0:99:0] considerSlowAsError# 1 Parts# {? ? ? ? ? ? ?+???? ??????} pessimisticReplicas# 1 p.State# EBS_DISINTEGRATED optimisticReplicas# 6 o.State# EBS_FULL altruisticReplicas# 6 a.State# EBS_FULL Marker# BPG44 2025-05-29T15:27:57.650023Z node 1 :BS_PROXY_GET DEBUG: dsproxy_get_impl.h:174: [a33ccc40f398d531] handle result# {EvVGetResult QueryResult Status# OK {[1:0:0:0:0:99:2] OK Size# 32 FullDataSize# 99 BufferData# 32b Cookie# 0} BlockedGeneration# 0} Marker# BPG57 2025-05-29T15:27:57.650028Z node 1 :BS_PROXY_GET DEBUG: dsproxy_get_impl.h:186: [a33ccc40f398d531] Handle TEvVGetResult status# OK From# [0:1:0:7:0] orderNumber# 7 ev {EvVGetResult QueryResult Status# OK {[1:0:0:0:0:99:2] OK Size# 32 FullDataSize# 99 BufferData# 32b Cookie# 0} BlockedGeneration# 0} Marker# BPG12 2025-05-29T15:27:57.650032Z node 1 :BS_PROXY_GET DEBUG: dsproxy_get_impl.h:225: [a33ccc40f398d531] Got# OK orderNumber# 7 vDiskId# [0:1:0:7:0] Marker# BPG58 2025-05-29T15:27:57.650045Z node 1 :BS_PROXY_GET DEBUG: dsproxy_strategy_base.cpp:135: [a33ccc40f398d531] Id# [1:0:0:0:0:99:0] considerSlowAsError# 0 Parts# {? + ? ? ? ? ?+???? ??????} pessimisticReplicas# 1 p.State# EBS_DISINTEGRATED optimisticReplicas# 6 o.State# EBS_FULL altruisticReplicas# 6 a.State# EBS_FULL Marker# BPG44 2025-05-29T15:27:57.650051Z node 1 :BS_PROXY_GET DEBUG: dsproxy_strategy_base.cpp:135: [a33ccc40f398d531] Id# [1:0:0:0:0:99:0] considerSlowAsError# 1 Parts# {? + ? ? ? ? ?+???? ??????} pessimisticReplicas# 1 p.State# EBS_DISINTEGRATED optimisticReplicas# 6 o.State# EBS_FULL altruisticReplicas# 6 a.State# EBS_FULL Marker# BPG44 2025-05-29T15:27:57.650064Z node 1 :BS_PROXY_GET DEBUG: dsproxy_get_impl.h:174: [a33ccc40f398d531] handle result# {EvVGetResult QueryResult Status# OK {[1:0:0:0:0:99:3] OK Size# 32 FullDataSize# 99 BufferData# 32b Cookie# 0} BlockedGeneration# 0} Marker# BPG57 2025-05-29T15:27:57.650069Z node 1 :BS_PROXY_GET DEBUG: dsproxy_get_impl.h:186: [a33ccc40f398d531] Handle TEvVGetResult status# OK From# [0:1:0:0:0] orderNumber# 0 ev {EvVGetResult QueryResult Status# OK {[1:0:0:0:0:99:3] OK Size# 32 FullDataSize# 99 BufferData# 32b Cookie# 0} BlockedGeneration# 0} Marker# BPG12 2025-05-29T15:27:57.650073Z node 1 :BS_PROXY_GET DEBUG: dsproxy_get_impl.h:225: [a33ccc40f398d531] Got# OK orderNumber# 0 vDiskId# [0:1:0:0:0] Marker# BPG58 2025-05-29T15:27:57.650086Z node 1 :BS_PROXY_GET DEBUG: dsproxy_strategy_base.cpp:135: [a33ccc40f398d531] Id# [1:0:0:0:0:99:0] considerSlowAsError# 0 Parts# {? + + ? ? ? ?+???? ??????} pessimisticReplicas# 2 p.State# EBS_DISINTEGRATED optimisticReplicas# 6 o.State# EBS_FULL altruisticReplicas# 6 a.State# EBS_FULL Marker# BPG44 2025-05-29T15:27:57.650092Z node 1 :BS_PROXY_GET DEBUG: dsproxy_strategy_base.cpp:135: [a33ccc40f398d531] Id# [1:0:0:0:0:99:0] considerSlowAsError# 1 Parts# {? + + ? ? ? ?+???? ??????} pessimisticReplicas# 2 p.State# EBS_DISINTEGRATED optimisticReplicas# 6 o.State# EBS_FULL altruisticReplicas# 6 a.State# EBS_FULL Marker# BPG44 2025-05-29T15:27:57.650101Z node 1 :BS_PROXY_GET DEBUG: dsproxy_get_impl.h:174: [a33ccc40f398d531] handle result# {EvVGetResult QueryResult Status# OK {[1:0:0:0:0:99:1] NODATA Size# 0 FullDataSize# 99 Cookie# 0} BlockedGeneration# 0} Marker# BPG57 2025-05-29T15:27:57.650107Z node 1 :BS_PROXY_GET DEBUG: dsproxy_get_impl.h:186: [a33ccc40f398d531] Handle TEvVGetResult status# OK From# [0:1:0:5:0] orderNumber# 5 ev {EvVGetResult QueryResult Status# OK {[1:0:0:0:0:99:1] NODATA Size# 0 FullDataSize# 99 Cookie# 0} BlockedGeneration# 0} Marker# BPG12 2025-05-29T15:27:57.650113Z node 1 :BS_PROXY_GET DEBUG: dsproxy_get_impl.h:235: [a33ccc40f398d531] Got# NODATA orderNumber# 5 vDiskId# [0:1:0:5:0] Marker# BPG59 2025-05-29T15:27:57.650123Z node 1 :BS_PROXY_GET DEBUG: dsproxy_strategy_base.cpp:135: [a33ccc40f398d531] Id# [1:0:0:0:0:99:0] considerSlowAsError# 0 Parts# {? + + ? ? ? ?+???? -?????} pessimisticReplicas# 2 p.State# EBS_DISINTEGRATED optimisticReplicas# 6 o.State# EBS_FULL altruisticReplicas# 6 a.State# EBS_FULL Marker# BPG44 2025-05-29T15:27:57.650129Z node 1 :BS_PROXY_GET DEBUG: dsproxy_strategy_base.cpp:135: [a33ccc40f398d531] Id# [1:0:0:0:0:99:0] considerSlowAsError# 1 Parts# {? + + ? ? ? ?+???? -?????} pessimisticReplicas# 2 p.State# EBS_DISINTEGRATED optimisticReplicas# 6 o.State# EBS_FULL altruisticReplicas# 6 a.State# EBS_FULL Marker# BPG44 2025-05-29T15:27:57.650143Z node 1 :BS_PROXY_GET DEBUG: dsproxy_strategy_base.cpp:135: [a33ccc40f398d531] Id# [1:0:0:0:0:99:0] considerSlowAsError# 0 Parts# {? + + ? ? ? ?+???? -?????} pessimisticReplicas# 2 p.State# EBS_DISINTEGRATED optimisticReplicas# 6 o.State# EBS_FULL altruisticReplicas# 6 a.State# EBS_FULL Marker# BPG44 2025-05-29T15:27:57.650148Z node 1 :BS_PROXY_GET DEBUG: dsproxy_strategy_base.cpp:135: [a33ccc40f398d531] Id# [1:0:0:0:0:99:0] considerSlowAsError# 1 Parts# {? + + ? ? ? ?+???? -?????} pessimisticReplicas# 2 p.State# EBS_DISINTEGRATED optimisticReplicas# 6 o.State# EBS_FULL altruisticReplicas# 6 a.State# EBS_FULL Marker# BPG44 2025-05-29T15:27:57.650162Z node 1 :BS_PROXY_GET DEBUG: dsproxy_strategy_base.cpp:135: [a33ccc40f398d531] Id# [1:0:0:0:0:99:0] considerSlowAsError# 0 Parts# {? + + ? ? ? ?+???? -?????} pessimisticReplicas# 2 p.State# EBS_DISINTEGRATED optimisticReplicas# 6 o.State# EBS_FULL altruisticReplicas# 6 a.State# EBS_FULL Marker# BPG44 2025-05-29T15:27:57.650168Z node 1 :BS_PROXY_GET DEBUG: dsproxy_strategy_base.cpp:135: [a33ccc40f398d531] Id# [1:0:0:0:0:99:0] considerSlowAsError# 1 Parts# {? + + ? ? ? ?+???? -?????} pessimisticReplicas# 2 p.State# EBS_DISINTEGRATED optimisticReplicas# 6 o.State# EBS_FULL altruisticReplicas# 6 a.State# EBS_FULL Marker# BPG44 2025-05-29T15:27:57.650199Z node 1 :BS_PROXY_GET DEBUG: dsproxy_get_impl.h:174: [a33ccc40f398d531] handle result# {EvVGetResult QueryResult Status# OK {[1:0:0:0:0:99:4] OK Size# 32 FullDataSize# 99 BufferData# 32b Cookie# 0} BlockedGeneration# 0} Marker# BPG57 2025-05-29T15:27:57.650204Z node 1 :BS_PROXY_GET DEBUG: dsproxy_get_impl.h:186: [a33ccc40f398d531] Handle TEvVGetResult status# OK From# [0:1:0:1:0] orderNumber# 1 ev {EvVGetResult QueryResult Status# OK {[1:0:0:0:0:99:4] OK Size# 32 FullDataSize# 99 BufferData# 32b Cookie# 0} BlockedGeneration# 0} Marker# BPG12 2025-05-29T15:27:57.650209Z node 1 :BS_PROXY_GET DEBUG: dsproxy_get_impl.h:225: [a33ccc40f398d531] Got# OK orderNumber# 1 vDiskId# [0:1:0:1:0] Marker# BPG58 2025-05-29T15:27:57.650221Z node 1 :BS_PROXY_GET DEBUG: dsproxy_strategy_base.cpp:135: [a33ccc40f398d531] Id# [1:0:0:0:0:99:0] considerSlowAsError# 0 Parts# {? + + + ? ? ?+???? -?????} pessimisticReplicas# 3 p.State# EBS_DISINTEGRATED optimisticReplicas# 6 o.State# EBS_FULL altruisticReplicas# 6 a.State# EBS_FULL Marker# BPG44 2025-05-29T15:27:57.650227Z node 1 :BS_PROXY_GET DEBUG: dsproxy_strategy_base.cpp:135: [a33ccc40f398d531] Id# [1:0:0:0:0:99:0] considerSlowAsError# 1 Parts# {? + + + ? ? ?+???? -?????} pessimisticReplicas# 3 p.State# EBS_DISINTEGRATED optimisticReplicas# 6 o.State# EBS_FULL altruisticReplicas# 6 a.State# EBS_FULL Marker# BPG44 2025-05-29T15:27:57.650240Z node 1 :BS_PROXY_GET DEBUG: dsproxy_get_impl.h:174: [a33ccc40f398d531] handle result# {EvVGetResult QueryResult Status# OK {[1:0:0:0:0:99:1] OK Size# 32 FullDataSize# 99 BufferData# 32b Cookie# 0} BlockedGeneration# 0} Marker# BPG57 2025-05-29T15:27:57.650245Z node 1 :BS_PROXY_GET DEBUG: dsproxy_get_impl.h:186: [a33ccc40f398d531] Handle TEvVGetResult status# OK From# [0:1:0:6:0] orderNumber# 6 ev {EvVGetResult QueryResult Status# OK {[1:0:0:0:0:99:1] OK Size# 32 FullDataSize# 99 BufferData# 32b Cookie# 0} BlockedGeneration# 0} Marker# BPG12 2025-05-29T15:27:57.650249Z node 1 :BS_PROXY_GET DEBUG: dsproxy_get_impl.h:225: [a33ccc40f398d531] Got# OK orderNumber# 6 vDiskId# [0:1:0:6:0] Marker# BPG58 2025-05-29T15:27:57.650270Z node 1 :BS_PROXY_GET DEBUG: dsproxy_get_impl.cpp:134: [a33ccc40f398d531] Response# TEvGetResult {Status# OK ResponseSz# 1 {[1:0:0:0:0:99:0] OK Size# 99}} Marker# BPG29 2025-05-29T15:27:57.650283Z node 1 :BS_PROXY_GET INFO: dsproxy_get.cpp:407: [a33ccc40f398d531] Result# TEvGetResult {Status# OK ResponseSz# 1 {[1:0:0:0:0:99:0] OK Size# 99}} GroupId# 0 Marker# BPG68 2025-05-29T15:27:57.650322Z node 1 :BS_PROXY_GET DEBUG: {BPG72@dsproxy_get.cpp:425} Query history GroupId# 0 HandleClass# FastRead History# THistory { Entries# [ TEvVGet{ TimestampMs# 0.444 sample PartId# [1:0:0:0:0:99:3] QueryCount# 1 VDiskId# [0:1:0:0:0] NodeId# 1 } TEvVGet{ TimestampMs# 0.444 sample PartId# [1:0:0:0:0:99:4] QueryCount# 1 VDiskId# [0:1:0:1:0] NodeId# 1 } TEvVGet{ TimestampMs# 0.444 sample PartId# [1:0:0:0:0:99:1] QueryCount# 4 VDiskId# [0:1:0:4:0] NodeId# 1 } TEvVGet{ TimestampMs# 0.444 sample PartId# [1:0:0:0:0:99:1] QueryCount# 4 VDiskId# [0:1:0:5:0] NodeId# 1 } TEvVGet{ TimestampMs# 0.444 sample PartId# [1:0:0:0:0:99:1] QueryCount# 1 VDiskId# [0:1:0:6:0] NodeId# 1 } TEvVGet{ TimestampMs# 0.444 sample PartId# [1:0:0:0:0:99:2] QueryCount# 1 VDiskId# [0:1:0:7:0] NodeId# 1 } TEvVGetResult{ TimestampMs# 0.786 VDiskId# [0:1:0:4:0] NodeId# 1 Status# OK } TEvVGetResult{ TimestampMs# 0.831 VDiskId# [0:1:0:7:0] NodeId# 1 Status# OK } TEvVGetResult{ TimestampMs# 0.869 VDiskId# [0:1:0:0:0] NodeId# 1 Status# OK } TEvVGetResult{ TimestampMs# 0.907 VDiskId# [0:1:0:5:0] NodeId# 1 Status# OK } GetAcceleration{ TimestampMs# 0.909 } GetAcceleration{ TimestampMs# 0.926 } TEvVGetResult{ TimestampMs# 1.004 VDiskId# [0:1:0:1:0] NodeId# 1 Status# OK } TEvVGetResult{ TimestampMs# 1.045 VDiskId# [0:1:0:6:0] NodeId# 1 Status# OK } ] } >> TBlobStorageProxySequenceTest::TestGivenStripe42WhenGet2PartsOfBlobThenGetOk >> TDsProxyQuorumTracker::CheckFailModelErasureMirror3Plus2 [GOOD] >> TDSProxyGetTest::TestBlock42GetIntervalsAllOk >> TDSProxyPutTest::TestBlock42PutStatusErrorWith_2_1_VdiskErrors [GOOD] >> TDsProxyQuorumTracker::CheckFailModelErasure3Plus2Block >> TBlobStorageProxySequenceTest::TestGivenStripe42WhenGet2PartsOfBlobThenGetOk [GOOD] >> TDSProxyPatchTest::SecuredErrorOnGetItem_ErasureNone >> TBlobStorageProxySequenceTest::TestProtobufSizeWithMultiGet >> TDSProxyPatchTest::SecuredErrorOnGetItem_ErasureNone [GOOD] >> TDSProxyPatchTest::MovedError_Erasure4Plus2Block ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/dsproxy/ut/unittest >> TDsProxyQuorumTracker::CheckFailModelErasureMirror3Plus2 [GOOD] Test command err: 2025-05-29T15:27:58.501386Z node 3 :BS_PROXY_PUT INFO: dsproxy_put.cpp:645: [7e4afa7ea38a37be] bootstrap ActorId# [3:74:2120] Group# 0 BlobCount# 1 BlobIDs# [[72075186224047637:1:863:1:24576:786:0]] HandleClass# TabletLog Tactic# Default RestartCounter# 0 Marker# BPP13 2025-05-29T15:27:58.501448Z node 3 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:42: [7e4afa7ea38a37be] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 0 part# 0 situation# ESituation::Unknown Marker# BPG51 2025-05-29T15:27:58.501452Z node 3 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:42: [7e4afa7ea38a37be] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 1 part# 1 situation# ESituation::Unknown Marker# BPG51 2025-05-29T15:27:58.501455Z node 3 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:42: [7e4afa7ea38a37be] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 2 part# 2 situation# ESituation::Unknown Marker# BPG51 2025-05-29T15:27:58.501458Z node 3 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:42: [7e4afa7ea38a37be] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 3 part# 3 situation# ESituation::Unknown Marker# BPG51 2025-05-29T15:27:58.501460Z node 3 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:42: [7e4afa7ea38a37be] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 4 part# 4 situation# ESituation::Unknown Marker# BPG51 2025-05-29T15:27:58.501463Z node 3 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:42: [7e4afa7ea38a37be] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 5 part# 5 situation# ESituation::Unknown Marker# BPG51 2025-05-29T15:27:58.501465Z node 3 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:42: [7e4afa7ea38a37be] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 6 part# 0 situation# ESituation::Unknown Marker# BPG51 2025-05-29T15:27:58.501468Z node 3 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:42: [7e4afa7ea38a37be] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 6 part# 1 situation# ESituation::Unknown Marker# BPG51 2025-05-29T15:27:58.501470Z node 3 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:42: [7e4afa7ea38a37be] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 6 part# 2 situation# ESituation::Unknown Marker# BPG51 2025-05-29T15:27:58.501473Z node 3 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:42: [7e4afa7ea38a37be] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 6 part# 3 situation# ESituation::Unknown Marker# BPG51 2025-05-29T15:27:58.501475Z node 3 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:42: [7e4afa7ea38a37be] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 6 part# 4 situation# ESituation::Unknown Marker# BPG51 2025-05-29T15:27:58.501478Z node 3 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:42: [7e4afa7ea38a37be] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 6 part# 5 situation# ESituation::Unknown Marker# BPG51 2025-05-29T15:27:58.501480Z node 3 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:42: [7e4afa7ea38a37be] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 7 part# 0 situation# ESituation::Unknown Marker# BPG51 2025-05-29T15:27:58.501483Z node 3 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:42: [7e4afa7ea38a37be] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 7 part# 1 situation# ESituation::Unknown Marker# BPG51 2025-05-29T15:27:58.501485Z node 3 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:42: [7e4afa7ea38a37be] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 7 part# 2 situation# ESituation::Unknown Marker# BPG51 2025-05-29T15:27:58.501487Z node 3 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:42: [7e4afa7ea38a37be] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 7 part# 3 situation# ESituation::Unknown Marker# BPG51 2025-05-29T15:27:58.501490Z node 3 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:42: [7e4afa7ea38a37be] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 7 part# 4 situation# ESituation::Unknown Marker# BPG51 2025-05-29T15:27:58.501495Z node 3 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:42: [7e4afa7ea38a37be] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 7 part# 5 situation# ESituation::Unknown Marker# BPG51 2025-05-29T15:27:58.501499Z node 3 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:65: [7e4afa7ea38a37be] restore Id# [72075186224047637:1:863:1:24576:786:0] optimisticReplicas# 6 optimisticState# EBS_FULL Marker# BPG55 2025-05-29T15:27:58.501509Z node 3 :BS_PROXY_PUT DEBUG: dsproxy_strategy_base.cpp:324: [7e4afa7ea38a37be] partPlacement record partSituation# ESituation::Unknown to# 0 blob Id# [72075186224047637:1:863:1:24576:786:1] Marker# BPG33 2025-05-29T15:27:58.501514Z node 3 :BS_PROXY_PUT DEBUG: dsproxy_strategy_base.cpp:345: [7e4afa7ea38a37be] Sending missing VPut part# 0 to# 0 blob Id# [72075186224047637:1:863:1:24576:786:1] Marker# BPG32 2025-05-29T15:27:58.501518Z node 3 :BS_PROXY_PUT DEBUG: dsproxy_strategy_base.cpp:324: [7e4afa7ea38a37be] partPlacement record partSituation# ESituation::Unknown to# 1 blob Id# [72075186224047637:1:863:1:24576:786:2] Marker# BPG33 2025-05-29T15:27:58.501520Z node 3 :BS_PROXY_PUT DEBUG: dsproxy_strategy_base.cpp:345: [7e4afa7ea38a37be] Sending missing VPut part# 1 to# 1 blob Id# [72075186224047637:1:863:1:24576:786:2] Marker# BPG32 2025-05-29T15:27:58.501524Z node 3 :BS_PROXY_PUT DEBUG: dsproxy_strategy_base.cpp:324: [7e4afa7ea38a37be] partPlacement record partSituation# ESituation::Unknown to# 2 blob Id# [72075186224047637:1:863:1:24576:786:3] Marker# BPG33 2025-05-29T15:27:58.501526Z node 3 :BS_PROXY_PUT DEBUG: dsproxy_strategy_base.cpp:345: [7e4afa7ea38a37be] Sending missing VPut part# 2 to# 2 blob Id# [72075186224047637:1:863:1:24576:786:3] Marker# BPG32 2025-05-29T15:27:58.501529Z node 3 :BS_PROXY_PUT DEBUG: dsproxy_strategy_base.cpp:324: [7e4afa7ea38a37be] partPlacement record partSituation# ESituation::Unknown to# 3 blob Id# [72075186224047637:1:863:1:24576:786:4] Marker# BPG33 2025-05-29T15:27:58.501532Z node 3 :BS_PROXY_PUT DEBUG: dsproxy_strategy_base.cpp:345: [7e4afa7ea38a37be] Sending missing VPut part# 3 to# 3 blob Id# [72075186224047637:1:863:1:24576:786:4] Marker# BPG32 2025-05-29T15:27:58.501535Z node 3 :BS_PROXY_PUT DEBUG: dsproxy_strategy_base.cpp:324: [7e4afa7ea38a37be] partPlacement record partSituation# ESituation::Unknown to# 4 blob Id# [72075186224047637:1:863:1:24576:786:5] Marker# BPG33 2025-05-29T15:27:58.501537Z node 3 :BS_PROXY_PUT DEBUG: dsproxy_strategy_base.cpp:345: [7e4afa7ea38a37be] Sending missing VPut part# 4 to# 4 blob Id# [72075186224047637:1:863:1:24576:786:5] Marker# BPG32 2025-05-29T15:27:58.501541Z node 3 :BS_PROXY_PUT DEBUG: dsproxy_strategy_base.cpp:324: [7e4afa7ea38a37be] partPlacement record partSituation# ESituation::Unknown to# 5 blob Id# [72075186224047637:1:863:1:24576:786:6] Marker# BPG33 2025-05-29T15:27:58.501543Z node 3 :BS_PROXY_PUT DEBUG: dsproxy_strategy_base.cpp:345: [7e4afa7ea38a37be] Sending missing VPut part# 5 to# 5 blob Id# [72075186224047637:1:863:1:24576:786:6] Marker# BPG32 2025-05-29T15:27:58.504275Z node 3 :BS_PROXY_PUT INFO: dsproxy_put.cpp:260: [7e4afa7ea38a37be] received {EvVPutResult Status# ERROR ID# [72075186224047637:1:863:1:24576:786:1] {MsgQoS MsgId# { SequenceId: 1 MsgId: 0 }}} from# [0:1:0:0:0] Marker# BPP01 2025-05-29T15:27:58.504305Z node 3 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:30: [7e4afa7ea38a37be] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 0 part# 0 error Marker# BPG50 2025-05-29T15:27:58.504311Z node 3 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:42: [7e4afa7ea38a37be] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 1 part# 1 situation# ESituation::Sent Marker# BPG51 2025-05-29T15:27:58.504314Z node 3 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:42: [7e4afa7ea38a37be] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 2 part# 2 situation# ESituation::Sent Marker# BPG51 2025-05-29T15:27:58.504318Z node 3 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:42: [7e4afa7ea38a37be] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 3 part# 3 situation# ESituation::Sent Marker# BPG51 2025-05-29T15:27:58.504321Z node 3 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:42: [7e4afa7ea38a37be] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 4 part# 4 situation# ESituation::Sent Marker# BPG51 2025-05-29T15:27:58.504323Z node 3 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:42: [7e4afa7ea38a37be] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 5 part# 5 situation# ESituation::Sent Marker# BPG51 2025-05-29T15:27:58.504326Z node 3 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:42: [7e4afa7ea38a37be] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 6 part# 0 situation# ESituation::Unknown Marker# BPG51 2025-05-29T15:27:58.504329Z node 3 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:42: [7e4afa7ea38a37be] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 6 part# 1 situation# ESituation::Unknown Marker# BPG51 2025-05-29T15:27:58.504331Z node 3 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:42: [7e4afa7ea38a37be] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 6 part# 2 situation# ESituation::Unknown Marker# BPG51 2025-05-29T15:27:58.504334Z node 3 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:42: [7e4afa7ea38a37be] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 6 part# 3 situation# ESituation::Unknown Marker# BPG51 2025-05-29T15:27:58.504336Z node 3 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:42: [7e4afa7ea38a37be] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 6 part# 4 situation# ESituation::Unknown Marker# BPG51 2025-05-29T15:27:58.504339Z node 3 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:42: [7e4afa7ea38a37be] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 6 part# 5 situation# ESituation::Unknown Marker# BPG51 2025-05-29T15:27:58.504341Z node 3 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:42: [7e4afa7ea38a37be] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 7 part# 0 situation# ESituation::Unknown Marker# BPG51 2025-05-29T15:27:58.504343Z node 3 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:42: [7e4afa7ea38a37be] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 7 part# 1 situation# ESituation::Unknown Marker# BPG51 2025-05-29T15:27:58.504346Z node 3 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:42: [7e4afa7ea38a37be] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 7 part# 2 situation# ESituation::Unknown Marker# BPG51 2025-05-29T15:27:58.504348Z node 3 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:42: [7e4afa7ea38a37be] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 7 part# 3 situation# ESituation::Unknown Marker# BPG51 2025-05-29T15:27:58.504351Z node 3 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:42: [7e4afa7ea38a37be] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 7 part# 4 situation# ESituation::Unknown Marker# BPG51 2025-05-29T15:27:58.504353Z node 3 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:42: [7e4afa7ea38a37be] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 7 part# 5 situation# ESituation::Unknown Marker# BPG51 2025-05-29T15:27:58.504358Z node 3 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:65: [7e4afa7ea38a37be] restore Id# [72075186224047637:1:863:1:24576:786:0] optimisticReplicas# 6 optimisticState# EBS_FULL Marker# BPG55 2025-05-29T15:27:58.504382Z node 3 :BS_PROXY_PUT DEBUG: dsproxy_strategy_base.cpp:324: [7e4afa7ea38a37be] partPlacement record partSituation# ESituation::Unknown to# 6 blob Id# [72075186224047637:1:863:1:24576:786:1] Marker# BPG33 2025-05-29T15:27:58.504388Z node 3 :BS_PROXY_PUT DEBUG: dsproxy_strategy_base.cpp:345: [7e4afa7ea38a37be] Sending missing VPut part# 0 to# 6 blob Id# [72075186224047637:1:863:1:24576:786:1] Marker# BPG32 2025-05-29T15:27:58.504445Z node 3 :BS_PROXY_PUT INFO: dsproxy_put.cpp:260: [7e4afa7ea38a37be] received {EvVPutResult Status# ERROR ID# [72075186224047637:1:863:1:24576:786:2] {MsgQoS MsgId# { SequenceId: 1 MsgId: 0 }}} from# [0:1:0:1:0] Marker# BPP01 2025-05-29T15:27:58.504451Z node 3 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:30: [7e4afa7ea38a37be] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 0 part# 0 error Marker# BPG50 2025-05-29T15:27:58.504454Z node 3 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:30: [7e4afa7ea38a37be] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 1 part# 1 error Marker# BPG50 2025-05-29T15:27:58.504457Z node 3 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:42: [7e4afa7ea38a37be] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 2 part# 2 situation# ESituation::Sent Marker# BPG51 2025-05-29T15:27:58.504459Z node 3 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:42: [7e4afa7ea38a37be] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 3 part# 3 situation# ESituation::Sent Marker# BPG51 2025-05-29T15:27:58.504462Z node 3 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:42: [7e4afa7ea38a37be] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 4 part# 4 situation# ESituation::Sent Marker# BPG51 2025-05-29T15:27:58.504464Z node 3 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:42: [7e4afa7ea38a37be] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 5 part# 5 situation# ESituation::Sent Marker# BPG51 2025-05-29T15:27:58.504467Z node 3 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:42: [7e4afa7ea38a37be] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 6 part# 0 situation# ESituation::Sent Marker# BPG51 2025-05-29T15:27:58.504469Z node 3 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:42: [7e4afa7ea38a37be] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 6 part# 1 situation# ESituation::Unknown Marker# BPG51 2025-05-29T15:27:58.504472Z node 3 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:42: [7e4afa7ea38a37be] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 6 part# 2 situation# ESituation::Unknown Marker# BPG51 2025-05-29T15:27:58.504474Z node 3 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:42: [7e4afa7ea38a37be] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 6 part# 3 situation# ESituation::Unknown Marker# BPG51 2025-05-29T15:27:58.504477Z node 3 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:42: [7e4afa7ea38a37be] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 6 part# 4 situation# ESituation::Unknown Marker# BPG51 2025-05-29T15:27:58.504479Z node 3 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:42: [7e4afa7ea38a37be] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 6 part# 5 situation# ESituation::Unknown Marker# BPG51 2025-05-29T15:27:58.504482Z node 3 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:42: [7e4afa7ea38a37be] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 7 part# 0 situation# ESituation::Unknown Marker# BPG51 2025-05-29T15:27:58.504484Z node 3 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:42: [7e4afa7ea38a37be] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 7 part# 1 situation# ESituation::Unknown Marker# BPG51 2025-05-29T15:27:58.504487Z node 3 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:42: [7e4afa7ea38a37be] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 7 part# 2 situation# ESituation::Unknown Marker# BPG51 2025-05-29T15:27:58.504489Z node 3 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:42: [7e4afa7ea38a37be] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 7 part# 3 situation# ESituation::Unknown Marker# BPG51 2025-05-29T15:27:58.504491Z node 3 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:42: [7e4afa7ea38a37be] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 7 part# 4 situation# ESituation::Unknown Marker# BPG51 2025-05-29T15:27:58.504495Z node 3 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:42: [7e4afa7ea38a37be] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 7 part# 5 situation# ESituation::Unknown Marker# BPG51 2025-05-29T15:27:58.504498Z node 3 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:65: [7e4afa7ea38a37be] restore Id# [72075186224047637:1:863:1:24576:786:0] optimisticReplicas# 6 optimisticState# EBS_FULL Marker# BPG55 2025-05-29T15:27:58.504503Z node 3 :BS_PROXY_PUT DEBUG: dsproxy_strategy_base.cpp:324: [7e4afa7ea38a37be] partPlacement record partSituation# ESituation::Unknown to# 7 blob Id# [72075186224047637:1:863:1:24576:786:2] Marker# BPG33 2025-05-29T15:27:58.504505Z node 3 :BS_PROXY_PUT DEBUG: dsproxy_strategy_base.cpp:345: [7e4afa7ea38a37be] Sending missing VPut part# 1 to# 7 blob Id# [72075186224047637:1:863:1:24576:786:2] Marker# BPG32 2025-05-29T15:27:58.504523Z node 3 :BS_PROXY_PUT DEBUG: dsproxy_put.cpp:260: [7e4afa7ea38a37be] received {EvVPutResult Status# OK ID# [72075186224047637:1:863:1:24576:786:3] {MsgQoS MsgId# { SequenceId: 1 MsgId: 0 }}} from# [0:1:0:2:0] Marker# BPP01 2025-05-29T15:27:58.504537Z node 3 :BS_PROXY_PUT DEBUG: dsproxy_put.cpp:260: [7e4afa7ea38a37be] received {EvVPutResult Status# OK ID# [72075186224047637:1:863:1:24576:786:4] {MsgQoS MsgId# { SequenceId: 1 MsgId: 0 }}} from# [0:1:0:3:0] Marker# BPP01 2025-05-29T15:27:58.504546Z node 3 :BS_PROXY_PUT DEBUG: dsproxy_put.cpp:260: [7e4afa7ea38a37be] received {EvVPutResult Status# OK ID# [72075186224047637:1:863:1:24576:786:5] {MsgQoS MsgId# { SequenceId: 1 MsgId: 0 }}} from# [0:1:0:4:0] Marker# BPP01 2025-05-29T15:27:58.504558Z node 3 :BS_PROXY_PUT DEBUG: dsproxy_put.cpp:260: [7e4afa7ea38a37be] received {EvVPutResult Status# OK ID# [72075186224047637:1:863:1:24576:786:6] {MsgQoS MsgId# { SequenceId: 1 MsgId: 0 }}} from# [0:1:0:5:0] Marker# BPP01 2025-05-29T15:27:58.504608Z node 3 :BS_PROXY_PUT DEBUG: dsproxy_put.cpp:260: [7e4afa7ea38a37be] received {EvVPutResult Status# OK ID# [72075186224047637:1:863:1:24576:786:1] {MsgQoS MsgId# { SequenceId: 1 MsgId: 0 }}} from# [0:1:0:6:0] Marker# BPP01 2025-05-29T15:27:58.504619Z node 3 :BS_PROXY_PUT DEBUG: dsproxy_put.cpp:260: [7e4afa7ea38a37be] received {EvVPutResult Status# OK ID# [72075186224047637:1:863:1:24576:786:2] {MsgQoS MsgId# { SequenceId: 1 MsgId: 0 }}} from# [0:1:0:7:0] Marker# BPP01 2025-05-29T15:27:58.504630Z node 3 :BS_PROXY_PUT DEBUG: dsproxy_put_impl.cpp:72: [7e4afa7ea38a37be] Result# TEvPutResult {Id# [72075186224047637:1:863:1:24576:786:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0} GroupId# 0 Marker# BPP12 2025-05-29T15:27:58.504636Z node 3 :BS_PROXY_PUT INFO: dsproxy_put.cpp:486: [7e4afa7ea38a37be] SendReply putResult# TEvPutResult {Id# [72075186224047637:1:863:1:24576:786:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0} ResponsesSent# 0 PutImpl.Blobs.size# 1 Last# true Marker# BPP21 2025-05-29T15:27:58.504693Z node 3 :BS_PROXY_PUT NOTICE: {BPP72@dsproxy_put.cpp:470} Query history GroupId# 0 HandleClass# TabletLog Tactic# Default History# THistory { Entries# [ TEvVPut{ TimestampMs# 0.352 sample PartId# [72075186224047637:1:863:1:24576:786:6] QueryCount# 1 VDiskId# [0:1:0:5:0] NodeId# 3 } TEvVPut{ TimestampMs# 0.352 sample PartId# [72075186224047637:1:863:1:24576:786:5] QueryCount# 1 VDiskId# [0:1:0:4:0] NodeId# 3 } TEvVPut{ TimestampMs# 0.352 sample PartId# [72075186224047637:1:863:1:24576:786:4] QueryCount# 1 VDiskId# [0:1:0:3:0] NodeId# 3 } TEvVPut{ TimestampMs# 0.353 sample PartId# [72075186224047637:1:863:1:24576:786:3] QueryCount# 1 VDiskId# [0:1:0:2:0] NodeId# 3 } TEvVPut{ TimestampMs# 0.353 sample PartId# [72075186224047637:1:863:1:24576:786:2] QueryCount# 1 VDiskId# [0:1:0:1:0] NodeId# 3 } TEvVPut{ TimestampMs# 0.353 sample PartId# [72075186224047637:1:863:1:24576:786:1] QueryCount# 1 VDiskId# [0:1:0:0:0] NodeId# 3 } TEvVPutResult{ TimestampMs# 3.082 VDiskId# [0:1:0:0:0] NodeId# 3 Status# ERROR } TEvVPut{ TimestampMs# 3.194 sample PartId# [72075186224047637:1:863:1:24576:786:1] QueryCount# 1 VDiskId# [0:1:0:6:0] NodeId# 3 } TEvVPutResult{ TimestampMs# 3.233 VDiskId# [0:1:0:1:0] NodeId# 3 Status# ERROR } TEvVPut{ TimestampMs# 3.294 sample PartId# [72075186224047637:1:863:1:24576:786:2] QueryCount# 1 VDiskId# [0:1:0:7:0] NodeId# 3 } TEvVPutResult{ TimestampMs# 3.311 VDiskId# [0:1:0:2:0] NodeId# 3 Status# OK } TEvVPutResult{ TimestampMs# 3.324 VDiskId# [0:1:0:3:0] NodeId# 3 Status# OK } TEvVPutResult{ TimestampMs# 3.333 VDiskId# [0:1:0:4:0] NodeId# 3 Status# OK } TEvVPutResult{ TimestampMs# 3.346 VDiskId# [0:1:0:5:0] NodeId# 3 Status# OK } TEvVPutResult{ TimestampMs# 3.396 VDiskId# [0:1:0:6:0] NodeId# 3 Status# OK } TEvVPutResult{ TimestampMs# 3.406 VDiskId# [0:1:0:7:0] NodeId# 3 Status# OK } ] } >> TDSProxyGetTest::TestBlock42GetIntervalsAllOk [GOOD] >> TDSProxyPatchTest::MovedOk_ErasureNone >> TDSProxyPatchTest::MovedOk_ErasureNone [GOOD] >> TDSProxyPatchTest::SecuredErrorOnPut_ErasureMirror3dc >> TDSProxyPatchTest::MovedError_Erasure4Plus2Block [GOOD] >> TDSProxyPutTest::TestMirror3dcPutStatusErrorWith_1_1_1_VdiskErrors >> TDSProxyPatchTest::SecuredErrorOnPut_ErasureMirror3dc [GOOD] >> TDSProxyPutTest::TestMirror3dcPutStatusOkWith_2_0_0_VdiskErrors >> TBlobStorageProxySequenceTest::TestBlock42PutWithChangingSlowDisk >> TDsProxyQuorumTracker::CheckFailModelErasure3Plus2Stripe [GOOD] >> TDSProxyPutTest::TestMirror3dcPutStatusErrorWith_1_1_1_VdiskErrors [GOOD] >> TDsProxyQuorumTracker::CheckFailModelErasureMirror3dc [GOOD] >> TDSProxyPutTest::TestMirror3dcPutStatusOkWith_2_0_0_VdiskErrors [GOOD] >> TBlobStorageProxySequenceTest::TestBlock42PutWithChangingSlowDisk [GOOD] >> TDSProxyGetTest::TestBlock42VGetCountWithErasure [GOOD] >> TDSProxyPatchTest::NaiveErrorOnGet_ErasureNone >> THiveImplTest::BootQueueSpeed [GOOD] >> THiveImplTest::BalancerSpeedAndDistribution |70.2%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tablet_flat/ut/ydb-core-tablet_flat-ut |70.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tablet_flat/ut/ydb-core-tablet_flat-ut |70.2%| [LD] {RESULT} $(B)/ydb/core/tablet_flat/ut/ydb-core-tablet_flat-ut ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/dsproxy/ut/unittest >> TDsProxyQuorumTracker::CheckFailModelErasureMirror3dc [GOOD] Test command err: 2025-05-29T15:27:59.437159Z node 4 :BS_PROXY_PUT INFO: dsproxy_put.cpp:645: [7e4afa7ea38a37be] bootstrap ActorId# [4:82:2128] Group# 0 BlobCount# 1 BlobIDs# [[72075186224047637:1:863:1:24576:786:0]] HandleClass# TabletLog Tactic# Default RestartCounter# 0 Marker# BPP13 2025-05-29T15:27:59.437215Z node 4 :BS_PROXY_PUT DEBUG: dsproxy_strategy_base.cpp:324: [7e4afa7ea38a37be] partPlacement record partSituation# ESituation::Unknown to# 0 blob Id# [72075186224047637:1:863:1:24576:786:1] Marker# BPG33 2025-05-29T15:27:59.437221Z node 4 :BS_PROXY_PUT DEBUG: dsproxy_strategy_base.cpp:345: [7e4afa7ea38a37be] Sending missing VPut part# 0 to# 0 blob Id# [72075186224047637:1:863:1:24576:786:1] Marker# BPG32 2025-05-29T15:27:59.437225Z node 4 :BS_PROXY_PUT DEBUG: dsproxy_strategy_base.cpp:324: [7e4afa7ea38a37be] partPlacement record partSituation# ESituation::Unknown to# 1 blob Id# [72075186224047637:1:863:1:24576:786:2] Marker# BPG33 2025-05-29T15:27:59.437228Z node 4 :BS_PROXY_PUT DEBUG: dsproxy_strategy_base.cpp:345: [7e4afa7ea38a37be] Sending missing VPut part# 1 to# 1 blob Id# [72075186224047637:1:863:1:24576:786:2] Marker# BPG32 2025-05-29T15:27:59.437231Z node 4 :BS_PROXY_PUT DEBUG: dsproxy_strategy_base.cpp:324: [7e4afa7ea38a37be] partPlacement record partSituation# ESituation::Unknown to# 2 blob Id# [72075186224047637:1:863:1:24576:786:3] Marker# BPG33 2025-05-29T15:27:59.437234Z node 4 :BS_PROXY_PUT DEBUG: dsproxy_strategy_base.cpp:345: [7e4afa7ea38a37be] Sending missing VPut part# 2 to# 2 blob Id# [72075186224047637:1:863:1:24576:786:3] Marker# BPG32 2025-05-29T15:27:59.439923Z node 4 :BS_PROXY_PUT INFO: dsproxy_put.cpp:260: [7e4afa7ea38a37be] received {EvVPutResult Status# ERROR ID# [72075186224047637:1:863:1:24576:786:2] {MsgQoS MsgId# { SequenceId: 1 MsgId: 0 }}} from# [0:1:0:1:0] Marker# BPP01 2025-05-29T15:27:59.439961Z node 4 :BS_PROXY_PUT DEBUG: dsproxy_strategy_base.cpp:324: [7e4afa7ea38a37be] partPlacement record partSituation# ESituation::Unknown to# 4 blob Id# [72075186224047637:1:863:1:24576:786:2] Marker# BPG33 2025-05-29T15:27:59.439966Z node 4 :BS_PROXY_PUT DEBUG: dsproxy_strategy_base.cpp:345: [7e4afa7ea38a37be] Sending missing VPut part# 1 to# 4 blob Id# [72075186224047637:1:863:1:24576:786:2] Marker# BPG32 2025-05-29T15:27:59.440014Z node 4 :BS_PROXY_PUT INFO: dsproxy_put.cpp:260: [7e4afa7ea38a37be] received {EvVPutResult Status# ERROR ID# [72075186224047637:1:863:1:24576:786:3] {MsgQoS MsgId# { SequenceId: 1 MsgId: 0 }}} from# [0:1:1:1:0] Marker# BPP01 2025-05-29T15:27:59.440019Z node 4 :BS_PROXY_PUT DEBUG: dsproxy_strategy_base.cpp:324: [7e4afa7ea38a37be] partPlacement record partSituation# ESituation::Unknown to# 5 blob Id# [72075186224047637:1:863:1:24576:786:3] Marker# BPG33 2025-05-29T15:27:59.440022Z node 4 :BS_PROXY_PUT DEBUG: dsproxy_strategy_base.cpp:345: [7e4afa7ea38a37be] Sending missing VPut part# 2 to# 5 blob Id# [72075186224047637:1:863:1:24576:786:3] Marker# BPG32 2025-05-29T15:27:59.440036Z node 4 :BS_PROXY_PUT INFO: dsproxy_put.cpp:260: [7e4afa7ea38a37be] received {EvVPutResult Status# ERROR ID# [72075186224047637:1:863:1:24576:786:1] {MsgQoS MsgId# { SequenceId: 1 MsgId: 0 }}} from# [0:1:2:1:0] Marker# BPP01 2025-05-29T15:27:59.440047Z node 4 :BS_PROXY_PUT ERROR: dsproxy_put_impl.cpp:72: [7e4afa7ea38a37be] Result# TEvPutResult {Id# [72075186224047637:1:863:1:24576:786:0] Status# ERROR StatusFlags# { } ErrorReason# "TPut3dcStrategy failed the Fail Model check" ApproximateFreeSpaceShare# 0} GroupId# 0 Marker# BPP12 2025-05-29T15:27:59.440054Z node 4 :BS_PROXY_PUT NOTICE: dsproxy_put.cpp:486: [7e4afa7ea38a37be] SendReply putResult# TEvPutResult {Id# [72075186224047637:1:863:1:24576:786:0] Status# ERROR StatusFlags# { } ErrorReason# "TPut3dcStrategy failed the Fail Model check" ApproximateFreeSpaceShare# 0} ResponsesSent# 0 PutImpl.Blobs.size# 1 Last# true Marker# BPP21 2025-05-29T15:27:59.440086Z node 4 :BS_PROXY_PUT NOTICE: {BPP72@dsproxy_put.cpp:470} Query history GroupId# 0 HandleClass# TabletLog Tactic# Default History# THistory { Entries# [ TEvVPut{ TimestampMs# 0.254 sample PartId# [72075186224047637:1:863:1:24576:786:3] QueryCount# 1 VDiskId# [0:1:1:1:0] NodeId# 4 } TEvVPut{ TimestampMs# 0.255 sample PartId# [72075186224047637:1:863:1:24576:786:1] QueryCount# 1 VDiskId# [0:1:2:1:0] NodeId# 4 } TEvVPut{ TimestampMs# 0.255 sample PartId# [72075186224047637:1:863:1:24576:786:2] QueryCount# 1 VDiskId# [0:1:0:1:0] NodeId# 4 } TEvVPutResult{ TimestampMs# 2.949 VDiskId# [0:1:0:1:0] NodeId# 4 Status# ERROR } TEvVPut{ TimestampMs# 2.981 sample PartId# [72075186224047637:1:863:1:24576:786:2] QueryCount# 1 VDiskId# [0:1:0:2:0] NodeId# 4 } TEvVPutResult{ TimestampMs# 3.018 VDiskId# [0:1:1:1:0] NodeId# 4 Status# ERROR } TEvVPut{ TimestampMs# 3.027 sample PartId# [72075186224047637:1:863:1:24576:786:3] QueryCount# 1 VDiskId# [0:1:1:2:0] NodeId# 4 } TEvVPutResult{ TimestampMs# 3.039 VDiskId# [0:1:2:1:0] NodeId# 4 Status# ERROR } ] } ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/dsproxy/ut/unittest >> TDsProxyQuorumTracker::CheckFailModelErasure3Plus2Stripe [GOOD] Test command err: 2025-05-29T15:27:58.246431Z node 2 :BS_PROXY_PUT INFO: dsproxy_put.cpp:645: [7e4afa7ea38a37be] bootstrap ActorId# [2:74:2120] Group# 0 BlobCount# 1 BlobIDs# [[72075186224047637:1:863:1:24576:786:0]] HandleClass# TabletLog Tactic# Default RestartCounter# 0 Marker# BPP13 2025-05-29T15:27:58.246502Z node 2 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:42: [7e4afa7ea38a37be] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 0 part# 0 situation# ESituation::Unknown Marker# BPG51 2025-05-29T15:27:58.246508Z node 2 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:42: [7e4afa7ea38a37be] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 1 part# 1 situation# ESituation::Unknown Marker# BPG51 2025-05-29T15:27:58.246512Z node 2 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:42: [7e4afa7ea38a37be] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 2 part# 2 situation# ESituation::Unknown Marker# BPG51 2025-05-29T15:27:58.246516Z node 2 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:42: [7e4afa7ea38a37be] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 3 part# 3 situation# ESituation::Unknown Marker# BPG51 2025-05-29T15:27:58.246520Z node 2 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:42: [7e4afa7ea38a37be] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 4 part# 4 situation# ESituation::Unknown Marker# BPG51 2025-05-29T15:27:58.246524Z node 2 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:42: [7e4afa7ea38a37be] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 5 part# 5 situation# ESituation::Unknown Marker# BPG51 2025-05-29T15:27:58.246528Z node 2 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:42: [7e4afa7ea38a37be] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 6 part# 0 situation# ESituation::Unknown Marker# BPG51 2025-05-29T15:27:58.246532Z node 2 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:42: [7e4afa7ea38a37be] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 6 part# 1 situation# ESituation::Unknown Marker# BPG51 2025-05-29T15:27:58.246535Z node 2 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:42: [7e4afa7ea38a37be] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 6 part# 2 situation# ESituation::Unknown Marker# BPG51 2025-05-29T15:27:58.246539Z node 2 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:42: [7e4afa7ea38a37be] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 6 part# 3 situation# ESituation::Unknown Marker# BPG51 2025-05-29T15:27:58.246543Z node 2 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:42: [7e4afa7ea38a37be] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 6 part# 4 situation# ESituation::Unknown Marker# BPG51 2025-05-29T15:27:58.246547Z node 2 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:42: [7e4afa7ea38a37be] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 6 part# 5 situation# ESituation::Unknown Marker# BPG51 2025-05-29T15:27:58.246550Z node 2 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:42: [7e4afa7ea38a37be] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 7 part# 0 situation# ESituation::Unknown Marker# BPG51 2025-05-29T15:27:58.246554Z node 2 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:42: [7e4afa7ea38a37be] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 7 part# 1 situation# ESituation::Unknown Marker# BPG51 2025-05-29T15:27:58.246558Z node 2 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:42: [7e4afa7ea38a37be] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 7 part# 2 situation# ESituation::Unknown Marker# BPG51 2025-05-29T15:27:58.246561Z node 2 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:42: [7e4afa7ea38a37be] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 7 part# 3 situation# ESituation::Unknown Marker# BPG51 2025-05-29T15:27:58.246565Z node 2 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:42: [7e4afa7ea38a37be] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 7 part# 4 situation# ESituation::Unknown Marker# BPG51 2025-05-29T15:27:58.246571Z node 2 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:42: [7e4afa7ea38a37be] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 7 part# 5 situation# ESituation::Unknown Marker# BPG51 2025-05-29T15:27:58.246578Z node 2 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:65: [7e4afa7ea38a37be] restore Id# [72075186224047637:1:863:1:24576:786:0] optimisticReplicas# 6 optimisticState# EBS_FULL Marker# BPG55 2025-05-29T15:27:58.246592Z node 2 :BS_PROXY_PUT DEBUG: dsproxy_strategy_base.cpp:324: [7e4afa7ea38a37be] partPlacement record partSituation# ESituation::Unknown to# 0 blob Id# [72075186224047637:1:863:1:24576:786:1] Marker# BPG33 2025-05-29T15:27:58.246598Z node 2 :BS_PROXY_PUT DEBUG: dsproxy_strategy_base.cpp:345: [7e4afa7ea38a37be] Sending missing VPut part# 0 to# 0 blob Id# [72075186224047637:1:863:1:24576:786:1] Marker# BPG32 2025-05-29T15:27:58.246604Z node 2 :BS_PROXY_PUT DEBUG: dsproxy_strategy_base.cpp:324: [7e4afa7ea38a37be] partPlacement record partSituation# ESituation::Unknown to# 1 blob Id# [72075186224047637:1:863:1:24576:786:2] Marker# BPG33 2025-05-29T15:27:58.246608Z node 2 :BS_PROXY_PUT DEBUG: dsproxy_strategy_base.cpp:345: [7e4afa7ea38a37be] Sending missing VPut part# 1 to# 1 blob Id# [72075186224047637:1:863:1:24576:786:2] Marker# BPG32 2025-05-29T15:27:58.246613Z node 2 :BS_PROXY_PUT DEBUG: dsproxy_strategy_base.cpp:324: [7e4afa7ea38a37be] partPlacement record partSituation# ESituation::Unknown to# 2 blob Id# [72075186224047637:1:863:1:24576:786:3] Marker# BPG33 2025-05-29T15:27:58.246617Z node 2 :BS_PROXY_PUT DEBUG: dsproxy_strategy_base.cpp:345: [7e4afa7ea38a37be] Sending missing VPut part# 2 to# 2 blob Id# [72075186224047637:1:863:1:24576:786:3] Marker# BPG32 2025-05-29T15:27:58.246622Z node 2 :BS_PROXY_PUT DEBUG: dsproxy_strategy_base.cpp:324: [7e4afa7ea38a37be] partPlacement record partSituation# ESituation::Unknown to# 3 blob Id# [72075186224047637:1:863:1:24576:786:4] Marker# BPG33 2025-05-29T15:27:58.246626Z node 2 :BS_PROXY_PUT DEBUG: dsproxy_strategy_base.cpp:345: [7e4afa7ea38a37be] Sending missing VPut part# 3 to# 3 blob Id# [72075186224047637:1:863:1:24576:786:4] Marker# BPG32 2025-05-29T15:27:58.246630Z node 2 :BS_PROXY_PUT DEBUG: dsproxy_strategy_base.cpp:324: [7e4afa7ea38a37be] partPlacement record partSituation# ESituation::Unknown to# 4 blob Id# [72075186224047637:1:863:1:24576:786:5] Marker# BPG33 2025-05-29T15:27:58.246633Z node 2 :BS_PROXY_PUT DEBUG: dsproxy_strategy_base.cpp:345: [7e4afa7ea38a37be] Sending missing VPut part# 4 to# 4 blob Id# [72075186224047637:1:863:1:24576:786:5] Marker# BPG32 2025-05-29T15:27:58.246639Z node 2 :BS_PROXY_PUT DEBUG: dsproxy_strategy_base.cpp:324: [7e4afa7ea38a37be] partPlacement record partSituation# ESituation::Unknown to# 5 blob Id# [72075186224047637:1:863:1:24576:786:6] Marker# BPG33 2025-05-29T15:27:58.246643Z node 2 :BS_PROXY_PUT DEBUG: dsproxy_strategy_base.cpp:345: [7e4afa7ea38a37be] Sending missing VPut part# 5 to# 5 blob Id# [72075186224047637:1:863:1:24576:786:6] Marker# BPG32 2025-05-29T15:27:58.250519Z node 2 :BS_PROXY_PUT INFO: dsproxy_put.cpp:260: [7e4afa7ea38a37be] received {EvVPutResult Status# ERROR ID# [72075186224047637:1:863:1:24576:786:1] {MsgQoS MsgId# { SequenceId: 1 MsgId: 0 }}} from# [0:1:0:0:0] Marker# BPP01 2025-05-29T15:27:58.250561Z node 2 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:30: [7e4afa7ea38a37be] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 0 part# 0 error Marker# BPG50 2025-05-29T15:27:58.250568Z node 2 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:42: [7e4afa7ea38a37be] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 1 part# 1 situation# ESituation::Sent Marker# BPG51 2025-05-29T15:27:58.250573Z node 2 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:42: [7e4afa7ea38a37be] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 2 part# 2 situation# ESituation::Sent Marker# BPG51 2025-05-29T15:27:58.250578Z node 2 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:42: [7e4afa7ea38a37be] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 3 part# 3 situation# ESituation::Sent Marker# BPG51 2025-05-29T15:27:58.250582Z node 2 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:42: [7e4afa7ea38a37be] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 4 part# 4 situation# ESituation::Sent Marker# BPG51 2025-05-29T15:27:58.250586Z node 2 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:42: [7e4afa7ea38a37be] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 5 part# 5 situation# ESituation::Sent Marker# BPG51 2025-05-29T15:27:58.250590Z node 2 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:42: [7e4afa7ea38a37be] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 6 part# 0 situation# ESituation::Unknown Marker# BPG51 2025-05-29T15:27:58.250593Z node 2 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:42: [7e4afa7ea38a37be] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 6 part# 1 situation# ESituation::Unknown Marker# BPG51 2025-05-29T15:27:58.250597Z node 2 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:42: [7e4afa7ea38a37be] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 6 part# 2 situation# ESituation::Unknown Marker# BPG51 2025-05-29T15:27:58.250600Z node 2 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:42: [7e4afa7ea38a37be] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 6 part# 3 situation# ESituation::Unknown Marker# BPG51 2025-05-29T15:27:58.250604Z node 2 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:42: [7e4afa7ea38a37be] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 6 part# 4 situation# ESituation::Unknown Marker# BPG51 2025-05-29T15:27:58.250607Z node 2 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:42: [7e4afa7ea38a37be] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 6 part# 5 situation# ESituation::Unknown Marker# BPG51 2025-05-29T15:27:58.250611Z node 2 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:42: [7e4afa7ea38a37be] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 7 part# 0 situation# ESituation::Unknown Marker# BPG51 2025-05-29T15:27:58.250614Z node 2 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:42: [7e4afa7ea38a37be] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 7 part# 1 situation# ESituation::Unknown Marker# BPG51 2025-05-29T15:27:58.250618Z node 2 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:42: [7e4afa7ea38a37be] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 7 part# 2 situation# ESituation::Unknown Marker# BPG51 2025-05-29T15:27:58.250621Z node 2 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:42: [7e4afa7ea38a37be] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 7 part# 3 situation# ESituation::Unknown Marker# BPG51 2025-05-29T15:27:58.250625Z node 2 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:42: [7e4afa7ea38a37be] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 7 part# 4 situation# ESituation::Unknown Marker# BPG51 2025-05-29T15:27:58.250629Z node 2 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:42: [7e4afa7ea38a37be] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 7 part# 5 situation# ESituation::Unknown Marker# BPG51 2025-05-29T15:27:58.250635Z node 2 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:65: [7e4afa7ea38a37be] restore Id# [72075186224047637:1:863:1:24576:786:0] optimisticReplicas# 6 optimisticState# EBS_FULL Marker# BPG55 2025-05-29T15:27:58.250648Z node 2 :BS_PROXY_PUT DEBUG: dsproxy_strategy_base.cpp:324: [7e4afa7ea38a37be] partPlacement record partSituation# ESituation::Unknown to# 6 blob Id# [72075186224047637:1:863:1:24576:786:1] Marker# BPG33 2025-05-29T15:27:58.250654Z node 2 :BS_PROXY_PUT DEBUG: dsproxy_strategy_base.cpp:345: [7e4afa7ea38a37be] Sending missing VPut part# 0 to# 6 blob Id# [72075186224047637:1:863:1:24576:786:1] Marker# BPG32 2025-05-29T15:27:58.250720Z node 2 :BS_PROXY_PUT INFO: dsproxy_put.cpp:260: [7e4afa7ea38a37be] received {EvVPutResult Status# ERROR ID# [72075186224047637:1:863:1:24576:786:2] {MsgQoS MsgId# { SequenceId: 1 MsgId: 0 }}} from# [0:1:0:1:0] Marker# BPP01 2025-05-29T15:27:58.250728Z node 2 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:30: [7e4afa7ea38a37be] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 0 part# 0 error Marker# BPG50 2025-05-29T15:27:58.250732Z node 2 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:30: [7e4afa7ea38a37be] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 1 part# 1 error Marker# BPG50 2025-05-29T15:27:58.250750Z node 2 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:42: [7e4afa7ea38a37be] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 2 part# 2 situation# ESituation::Sent Marker# BPG51 2025-05-29T15:27:58.250755Z node 2 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:42: [7e4afa7ea38a37be] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 3 part# 3 situation# ESituation::Sent Marker# BPG51 2025-05-29T15:27:58.250758Z node 2 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:42: [7e4afa7ea38a37be] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 4 part# 4 situation# ESituation::Sent Marker# BPG51 2025-05-29T15:27:58.250762Z node 2 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:42: [7e4afa7ea38a37be] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 5 part# 5 situation# ESituation::Sent Marker# BPG51 2025-05-29T15:27:58.250766Z node 2 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:42: [7e4afa7ea38a37be] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 6 part# 0 situation# ESituation::Sent Marker# BPG51 2025-05-29T15:27:58.250769Z node 2 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:42: [7e4afa7ea38a37be] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 6 part# 1 situation# ESituation::Unknown Marker# BPG51 2025-05-29T15:27:58.250772Z node 2 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:42: [7e4afa7ea38a37be] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 6 part# 2 situation# ESituation::Unknown Marker# BPG51 2025-05-29T15:27:58.250776Z node 2 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:42: [7e4afa7ea38a37be] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 6 part# 3 situation# ESituation::Unknown Marker# BPG51 2025-05-29T15:27:58.250779Z node 2 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:42: [7e4afa7ea38a37be] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 6 part# 4 situation# ESituation::Unknown Marker# BPG51 2025-05-29T15:27:58.250783Z node 2 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:42: [7e4afa7ea38a37be] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 6 part# 5 situation# ESituation::Unknown Marker# BPG51 2025-05-29T15:27:58.250787Z node 2 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:42: [7e4afa7ea38a37be] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 7 part# 0 situation# ESituation::Unknown Marker# BPG51 2025-05-29T15:27:58.250791Z node 2 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:42: [7e4afa7ea38a37be] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 7 part# 1 situation# ESituation::Unknown Marker# BPG51 2025-05-29T15:27:58.250794Z node 2 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:42: [7e4afa7ea38a37be] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 7 part# 2 situation# ESituation::Unknown Marker# BPG51 2025-05-29T15:27:58.250798Z node 2 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:42: [7e4afa7ea38a37be] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 7 part# 3 situation# ESituation::Unknown Marker# BPG51 2025-05-29T15:27:58.250801Z node 2 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:42: [7e4afa7ea38a37be] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 7 part# 4 situation# ESituation::Unknown Marker# BPG51 2025-05-29T15:27:58.250806Z node 2 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:42: [7e4afa7ea38a37be] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 7 part# 5 situation# ESituation::Unknown Marker# BPG51 2025-05-29T15:27:58.250811Z node 2 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:65: [7e4afa7ea38a37be] restore Id# [72075186224047637:1:863:1:24576:786:0] optimisticReplicas# 6 optimisticState# EBS_FULL Marker# BPG55 2025-05-29T15:27:58.250817Z node 2 :BS_PROXY_PUT DEBUG: dsproxy_strategy_base.cpp:324: [7e4afa7ea38a37be] partPlacement record partSituation# ESituation::Unknown to# 7 blob Id# [72075186224047637:1:863:1:24576:786:2] Marker# BPG33 2025-05-29T15:27:58.250820Z node 2 :BS_PROXY_PUT DEBUG: dsproxy_strategy_base.cpp:345: [7e4afa7ea38a37be] Sending missing VPut part# 1 to# 7 blob Id# [72075186224047637:1:863:1:24576:786:2] Marker# BPG32 2025-05-29T15:27:58.250843Z node 2 :BS_PROXY_PUT INFO: dsproxy_put.cpp:260: [7e4afa7ea38a37be] received {EvVPutResult Status# ERROR ID# [72075186224047637:1:863:1:24576:786:3] {MsgQoS MsgId# { SequenceId: 1 MsgId: 0 }}} from# [0:1:0:2:0] Marker# BPP01 2025-05-29T15:27:58.250849Z node 2 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:30: [7e4afa7ea38a37be] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 0 part# 0 error Marker# BPG50 2025-05-29T15:27:58.250852Z node 2 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:30: [7e4afa7ea38a37be] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 1 part# 1 error Marker# BPG50 2025-05-29T15:27:58.250856Z node 2 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:30: [7e4afa7ea38a37be] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 2 part# 2 error Marker# BPG50 2025-05-29T15:27:58.250860Z node 2 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:42: [7e4afa7ea38a37be] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 3 part# 3 situation# ESituation::Sent Marker# BPG51 2025-05-29T15:27:58.250864Z node 2 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:42: [7e4afa7ea38a37be] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 4 part# 4 situation# ESituation::Sent Marker# BPG51 2025-05-29T15:27:58.250867Z node 2 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:42: [7e4afa7ea38a37be] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 5 part# 5 situation# ESituation::Sent Marker# BPG51 2025-05-29T15:27:58.250871Z node 2 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:42: [7e4afa7ea38a37be] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 6 part# 0 situation# ESituation::Sent Marker# BPG51 2025-05-29T15:27:58.250875Z node 2 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:42: [7e4afa7ea38a37be] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 6 part# 1 situation# ESituation::Unknown Marker# BPG51 2025-05-29T15:27:58.250879Z node 2 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:42: [7e4afa7ea38a37be] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 6 part# 2 situation# ESituation::Unknown Marker# BPG51 2025-05-29T15:27:58.250882Z node 2 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:42: [7e4afa7ea38a37be] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 6 part# 3 situation# ESituation::Unknown Marker# BPG51 2025-05-29T15:27:58.250885Z node 2 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:42: [7e4afa7ea38a37be] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 6 part# 4 situation# ESituation::Unknown Marker# BPG51 2025-05-29T15:27:58.250889Z node 2 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:42: [7e4afa7ea38a37be] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 6 part# 5 situation# ESituation::Unknown Marker# BPG51 2025-05-29T15:27:58.250892Z node 2 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:42: [7e4afa7ea38a37be] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 7 part# 0 situation# ESituation::Unknown Marker# BPG51 2025-05-29T15:27:58.250897Z node 2 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:42: [7e4afa7ea38a37be] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 7 part# 1 situation# ESituation::Sent Marker# BPG51 2025-05-29T15:27:58.250901Z node 2 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:42: [7e4afa7ea38a37be] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 7 part# 2 situation# ESituation::Unknown Marker# BPG51 2025-05-29T15:27:58.250905Z node 2 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:42: [7e4afa7ea38a37be] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 7 part# 3 situation# ESituation::Unknown Marker# BPG51 2025-05-29T15:27:58.250908Z node 2 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:42: [7e4afa7ea38a37be] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 7 part# 4 situation# ESituation::Unknown Marker# BPG51 2025-05-29T15:27:58.250911Z node 2 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:42: [7e4afa7ea38a37be] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 7 part# 5 situation# ESituation::Unknown Marker# BPG51 2025-05-29T15:27:58.250916Z node 2 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:65: [7e4afa7ea38a37be] restore Id# [72075186224047637:1:863:1:24576:786:0] optimisticReplicas# 5 optimisticState# EBS_DISINTEGRATED Marker# BPG55 2025-05-29T15:27:58.250941Z node 2 :BS_PROXY_PUT ERROR: dsproxy_put_impl.cpp:72: [7e4afa7ea38a37be] Result# TEvPutResult {Id# [72075186224047637:1:863:1:24576:786:0] Status# ERROR StatusFlags# { } ErrorReason# "TRestoreStrategy saw optimisticState# EBS_DISINTEGRATED GroupId# 0 BlobId# [72075186224047637:1:863:1:24576:786:0] Reported ErrorReasons# [ ] Part situations# [ { OrderNumber# 0 Situations# EUUUUU } { OrderNumber# 1 Situations# UEUUUU } { OrderNumber# 2 Situations# UUEUUU } { OrderNumber# 3 Situations# UUUSUU } { OrderNumber# 4 Situations# UUUUSU } { OrderNumber# 5 Situations# UUUUUS } { OrderNumber# 6 Situations# SUUUUU } { OrderNumber# 7 Situations# USUUUU } ] " ApproximateFreeSpaceShare# 0} GroupId# 0 Marker# BPP12 2025-05-29T15:27:58.250950Z node 2 :BS_PROXY_PUT NOTICE: dsproxy_put.cpp:486: [7e4afa7ea38a37be] SendReply putResult# TEvPutResult {Id# [72075186224047637:1:863:1:24576:786:0] Status# ERROR StatusFlags# { } ErrorReason# "TRestoreStrategy saw optimisticState# EBS_DISINTEGRATED GroupId# 0 BlobId# [72075186224047637:1:863:1:24576:786:0] Reported ErrorReasons# [ ] Part situations# [ { OrderNumber# 0 Situations# EUUUUU } { OrderNumber# 1 Situations# UEUUUU } { OrderNumber# 2 Situations# UUEUUU } { OrderNumber# 3 Situations# UUUSUU } { OrderNumber# 4 Situations# UUUUSU } { OrderNumber# 5 Situations# UUUUUS } { OrderNumber# 6 Situations# SUUUUU } { OrderNumber# 7 Situations# USUUUU } ] " ApproximateFreeSpaceShare# 0} ResponsesSent# 0 PutImpl.Blobs.size# 1 Last# true Marker# BPP21 2025-05-29T15:27:58.250994Z node 2 :BS_PROXY_PUT NOTICE: {BPP72@dsproxy_put.cpp:470} Query history GroupId# 0 HandleClass# TabletLog Tactic# Default History# THistory { Entries# [ TEvVPut{ TimestampMs# 0.449 sample PartId# [72075186224047637:1:863:1:24576:786:6] QueryCount# 1 VDiskId# [0:1:0:5:0] NodeId# 2 } TEvVPut{ TimestampMs# 0.45 sample PartId# [72075186224047637:1:863:1:24576:786:5] QueryCount# 1 VDiskId# [0:1:0:4:0] NodeId# 2 } TEvVPut{ TimestampMs# 0.45 sample PartId# [72075186224047637:1:863:1:24576:786:4] QueryCount# 1 VDiskId# [0:1:0:3:0] NodeId# 2 } TEvVPut{ TimestampMs# 0.45 sample PartId# [72075186224047637:1:863:1:24576:786:3] QueryCount# 1 VDiskId# [0:1:0:2:0] NodeId# 2 } TEvVPut{ TimestampMs# 0.45 sample PartId# [72075186224047637:1:863:1:24576:786:2] QueryCount# 1 VDiskId# [0:1:0:1:0] NodeId# 2 } TEvVPut{ TimestampMs# 0.45 sample PartId# [72075186224047637:1:863:1:24576:786:1] QueryCount# 1 VDiskId# [0:1:0:0:0] NodeId# 2 } TEvVPutResult{ TimestampMs# 4.321 VDiskId# [0:1:0:0:0] NodeId# 2 Status# ERROR } TEvVPut{ TimestampMs# 4.448 sample PartId# [72075186224047637:1:863:1:24576:786:1] QueryCount# 1 VDiskId# [0:1:0:6:0] NodeId# 2 } TEvVPutResult{ TimestampMs# 4.497 VDiskId# [0:1:0:1:0] NodeId# 2 Status# ERROR } TEvVPut{ TimestampMs# 4.599 sample PartId# [72075186224047637:1:863:1:24576:786:2] QueryCount# 1 VDiskId# [0:1:0:7:0] NodeId# 2 } TEvVPutResult{ TimestampMs# 4.619 VDiskId# [0:1:0:2:0] NodeId# 2 Status# ERROR } ] } >> TDSProxyFaultTolerancePatchTest::mirror3dc >> TDSProxyPatchTest::NaiveErrorOnGet_ErasureNone [GOOD] >> TDSProxyPutTest::TestBlock42MultiPutAllOk ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/dsproxy/ut/unittest >> TDSProxyPutTest::TestMirror3dcPutStatusOkWith_2_0_0_VdiskErrors [GOOD] Test command err: 2025-05-29T15:27:59.606339Z node 3 :BS_PROXY_PUT INFO: dsproxy_put.cpp:645: [7e4afa7ea38a37be] bootstrap ActorId# [3:82:2128] Group# 0 BlobCount# 1 BlobIDs# [[72075186224047637:1:863:1:24576:786:0]] HandleClass# TabletLog Tactic# Default RestartCounter# 0 Marker# BPP13 2025-05-29T15:27:59.606404Z node 3 :BS_PROXY_PUT DEBUG: dsproxy_strategy_base.cpp:324: [7e4afa7ea38a37be] partPlacement record partSituation# ESituation::Unknown to# 0 blob Id# [72075186224047637:1:863:1:24576:786:1] Marker# BPG33 2025-05-29T15:27:59.606410Z node 3 :BS_PROXY_PUT DEBUG: dsproxy_strategy_base.cpp:345: [7e4afa7ea38a37be] Sending missing VPut part# 0 to# 0 blob Id# [72075186224047637:1:863:1:24576:786:1] Marker# BPG32 2025-05-29T15:27:59.606414Z node 3 :BS_PROXY_PUT DEBUG: dsproxy_strategy_base.cpp:324: [7e4afa7ea38a37be] partPlacement record partSituation# ESituation::Unknown to# 1 blob Id# [72075186224047637:1:863:1:24576:786:2] Marker# BPG33 2025-05-29T15:27:59.606417Z node 3 :BS_PROXY_PUT DEBUG: dsproxy_strategy_base.cpp:345: [7e4afa7ea38a37be] Sending missing VPut part# 1 to# 1 blob Id# [72075186224047637:1:863:1:24576:786:2] Marker# BPG32 2025-05-29T15:27:59.606421Z node 3 :BS_PROXY_PUT DEBUG: dsproxy_strategy_base.cpp:324: [7e4afa7ea38a37be] partPlacement record partSituation# ESituation::Unknown to# 2 blob Id# [72075186224047637:1:863:1:24576:786:3] Marker# BPG33 2025-05-29T15:27:59.606423Z node 3 :BS_PROXY_PUT DEBUG: dsproxy_strategy_base.cpp:345: [7e4afa7ea38a37be] Sending missing VPut part# 2 to# 2 blob Id# [72075186224047637:1:863:1:24576:786:3] Marker# BPG32 2025-05-29T15:27:59.609273Z node 3 :BS_PROXY_PUT INFO: dsproxy_put.cpp:260: [7e4afa7ea38a37be] received {EvVPutResult Status# ERROR ID# [72075186224047637:1:863:1:24576:786:2] {MsgQoS MsgId# { SequenceId: 1 MsgId: 0 }}} from# [0:1:0:1:0] Marker# BPP01 2025-05-29T15:27:59.609319Z node 3 :BS_PROXY_PUT DEBUG: dsproxy_strategy_base.cpp:324: [7e4afa7ea38a37be] partPlacement record partSituation# ESituation::Unknown to# 4 blob Id# [72075186224047637:1:863:1:24576:786:2] Marker# BPG33 2025-05-29T15:27:59.609325Z node 3 :BS_PROXY_PUT DEBUG: dsproxy_strategy_base.cpp:345: [7e4afa7ea38a37be] Sending missing VPut part# 1 to# 4 blob Id# [72075186224047637:1:863:1:24576:786:2] Marker# BPG32 2025-05-29T15:27:59.609383Z node 3 :BS_PROXY_PUT DEBUG: dsproxy_put.cpp:260: [7e4afa7ea38a37be] received {EvVPutResult Status# OK ID# [72075186224047637:1:863:1:24576:786:3] {MsgQoS MsgId# { SequenceId: 1 MsgId: 0 }}} from# [0:1:1:1:0] Marker# BPP01 2025-05-29T15:27:59.609404Z node 3 :BS_PROXY_PUT DEBUG: dsproxy_put.cpp:260: [7e4afa7ea38a37be] received {EvVPutResult Status# OK ID# [72075186224047637:1:863:1:24576:786:1] {MsgQoS MsgId# { SequenceId: 1 MsgId: 0 }}} from# [0:1:2:1:0] Marker# BPP01 2025-05-29T15:27:59.609439Z node 3 :BS_PROXY_PUT INFO: dsproxy_put.cpp:260: [7e4afa7ea38a37be] received {EvVPutResult Status# ERROR ID# [72075186224047637:1:863:1:24576:786:2] {MsgQoS MsgId# { SequenceId: 1 MsgId: 0 }}} from# [0:1:0:2:0] Marker# BPP01 2025-05-29T15:27:59.609446Z node 3 :BS_PROXY_PUT DEBUG: dsproxy_strategy_base.cpp:324: [7e4afa7ea38a37be] partPlacement record partSituation# ESituation::Unknown to# 7 blob Id# [72075186224047637:1:863:1:24576:786:2] Marker# BPG33 2025-05-29T15:27:59.609449Z node 3 :BS_PROXY_PUT DEBUG: dsproxy_strategy_base.cpp:345: [7e4afa7ea38a37be] Sending missing VPut part# 1 to# 7 blob Id# [72075186224047637:1:863:1:24576:786:2] Marker# BPG32 2025-05-29T15:27:59.609477Z node 3 :BS_PROXY_PUT DEBUG: dsproxy_put.cpp:260: [7e4afa7ea38a37be] received {EvVPutResult Status# OK ID# [72075186224047637:1:863:1:24576:786:2] {MsgQoS MsgId# { SequenceId: 1 MsgId: 0 }}} from# [0:1:0:0:0] Marker# BPP01 2025-05-29T15:27:59.609491Z node 3 :BS_PROXY_PUT DEBUG: dsproxy_put_impl.cpp:72: [7e4afa7ea38a37be] Result# TEvPutResult {Id# [72075186224047637:1:863:1:24576:786:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0} GroupId# 0 Marker# BPP12 2025-05-29T15:27:59.609499Z node 3 :BS_PROXY_PUT INFO: dsproxy_put.cpp:486: [7e4afa7ea38a37be] SendReply putResult# TEvPutResult {Id# [72075186224047637:1:863:1:24576:786:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0} ResponsesSent# 0 PutImpl.Blobs.size# 1 Last# true Marker# BPP21 2025-05-29T15:27:59.609535Z node 3 :BS_PROXY_PUT NOTICE: {BPP72@dsproxy_put.cpp:470} Query history GroupId# 0 HandleClass# TabletLog Tactic# Default History# THistory { Entries# [ TEvVPut{ TimestampMs# 0.273 sample PartId# [72075186224047637:1:863:1:24576:786:3] QueryCount# 1 VDiskId# [0:1:1:1:0] NodeId# 3 } TEvVPut{ TimestampMs# 0.274 sample PartId# [72075186224047637:1:863:1:24576:786:1] QueryCount# 1 VDiskId# [0:1:2:1:0] NodeId# 3 } TEvVPut{ TimestampMs# 0.274 sample PartId# [72075186224047637:1:863:1:24576:786:2] QueryCount# 1 VDiskId# [0:1:0:1:0] NodeId# 3 } TEvVPutResult{ TimestampMs# 3.132 VDiskId# [0:1:0:1:0] NodeId# 3 Status# ERROR } TEvVPut{ TimestampMs# 3.173 sample PartId# [72075186224047637:1:863:1:24576:786:2] QueryCount# 1 VDiskId# [0:1:0:2:0] NodeId# 3 } TEvVPutResult{ TimestampMs# 3.218 VDiskId# [0:1:1:1:0] NodeId# 3 Status# OK } TEvVPutResult{ TimestampMs# 3.238 VDiskId# [0:1:2:1:0] NodeId# 3 Status# OK } TEvVPutResult{ TimestampMs# 3.274 VDiskId# [0:1:0:2:0] NodeId# 3 Status# ERROR } TEvVPut{ TimestampMs# 3.284 sample PartId# [72075186224047637:1:863:1:24576:786:2] QueryCount# 1 VDiskId# [0:1:0:0:0] NodeId# 3 } TEvVPutResult{ TimestampMs# 3.31 VDiskId# [0:1:0:0:0] NodeId# 3 Status# OK } ] } >> TDsProxyQuorumTracker::CheckFailModelErasure3Plus2Block [GOOD] >> StatisticsSaveLoad::Delete [FAIL] >> TBlobStorageProxySequenceTest::TestProtobufSizeWithMultiGet [GOOD] >> TDSProxyPatchTest::SecuredErrorOnGet_Erasure4Plus2Block >> TDSProxyPatchTest::SecuredErrorOnGet_Erasure4Plus2Block [GOOD] >> TDSProxyPatchTest::MovedError_ErasureMirror3dc >> TKeyValueTest::TestEmptyWriteReadDeleteWithRestartsThenResponseOk [GOOD] >> TKeyValueTest::TestEmptyWriteReadDeleteWithRestartsThenResponseOkNewApi ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/dsproxy/ut/unittest >> TDsProxyQuorumTracker::CheckFailModelErasure3Plus2Block [GOOD] Test command err: 2025-05-29T15:27:58.666283Z node 3 :BS_PROXY_PUT INFO: dsproxy_put.cpp:645: [7e4afa7ea38a37be] bootstrap ActorId# [3:74:2120] Group# 0 BlobCount# 1 BlobIDs# [[72075186224047637:1:863:1:24576:786:0]] HandleClass# TabletLog Tactic# Default RestartCounter# 0 Marker# BPP13 2025-05-29T15:27:58.666336Z node 3 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:42: [7e4afa7ea38a37be] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 0 part# 0 situation# ESituation::Unknown Marker# BPG51 2025-05-29T15:27:58.666340Z node 3 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:42: [7e4afa7ea38a37be] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 1 part# 1 situation# ESituation::Unknown Marker# BPG51 2025-05-29T15:27:58.666343Z node 3 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:42: [7e4afa7ea38a37be] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 2 part# 2 situation# ESituation::Unknown Marker# BPG51 2025-05-29T15:27:58.666346Z node 3 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:42: [7e4afa7ea38a37be] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 3 part# 3 situation# ESituation::Unknown Marker# BPG51 2025-05-29T15:27:58.666348Z node 3 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:42: [7e4afa7ea38a37be] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 4 part# 4 situation# ESituation::Unknown Marker# BPG51 2025-05-29T15:27:58.666351Z node 3 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:42: [7e4afa7ea38a37be] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 5 part# 5 situation# ESituation::Unknown Marker# BPG51 2025-05-29T15:27:58.666354Z node 3 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:42: [7e4afa7ea38a37be] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 6 part# 0 situation# ESituation::Unknown Marker# BPG51 2025-05-29T15:27:58.666356Z node 3 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:42: [7e4afa7ea38a37be] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 6 part# 1 situation# ESituation::Unknown Marker# BPG51 2025-05-29T15:27:58.666359Z node 3 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:42: [7e4afa7ea38a37be] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 6 part# 2 situation# ESituation::Unknown Marker# BPG51 2025-05-29T15:27:58.666361Z node 3 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:42: [7e4afa7ea38a37be] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 6 part# 3 situation# ESituation::Unknown Marker# BPG51 2025-05-29T15:27:58.666364Z node 3 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:42: [7e4afa7ea38a37be] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 6 part# 4 situation# ESituation::Unknown Marker# BPG51 2025-05-29T15:27:58.666366Z node 3 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:42: [7e4afa7ea38a37be] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 6 part# 5 situation# ESituation::Unknown Marker# BPG51 2025-05-29T15:27:58.666369Z node 3 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:42: [7e4afa7ea38a37be] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 7 part# 0 situation# ESituation::Unknown Marker# BPG51 2025-05-29T15:27:58.666371Z node 3 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:42: [7e4afa7ea38a37be] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 7 part# 1 situation# ESituation::Unknown Marker# BPG51 2025-05-29T15:27:58.666374Z node 3 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:42: [7e4afa7ea38a37be] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 7 part# 2 situation# ESituation::Unknown Marker# BPG51 2025-05-29T15:27:58.666376Z node 3 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:42: [7e4afa7ea38a37be] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 7 part# 3 situation# ESituation::Unknown Marker# BPG51 2025-05-29T15:27:58.666379Z node 3 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:42: [7e4afa7ea38a37be] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 7 part# 4 situation# ESituation::Unknown Marker# BPG51 2025-05-29T15:27:58.666382Z node 3 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:42: [7e4afa7ea38a37be] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 7 part# 5 situation# ESituation::Unknown Marker# BPG51 2025-05-29T15:27:58.666387Z node 3 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:65: [7e4afa7ea38a37be] restore Id# [72075186224047637:1:863:1:24576:786:0] optimisticReplicas# 6 optimisticState# EBS_FULL Marker# BPG55 2025-05-29T15:27:58.666398Z node 3 :BS_PROXY_PUT DEBUG: dsproxy_strategy_base.cpp:324: [7e4afa7ea38a37be] partPlacement record partSituation# ESituation::Unknown to# 0 blob Id# [72075186224047637:1:863:1:24576:786:1] Marker# BPG33 2025-05-29T15:27:58.666403Z node 3 :BS_PROXY_PUT DEBUG: dsproxy_strategy_base.cpp:345: [7e4afa7ea38a37be] Sending missing VPut part# 0 to# 0 blob Id# [72075186224047637:1:863:1:24576:786:1] Marker# BPG32 2025-05-29T15:27:58.666409Z node 3 :BS_PROXY_PUT DEBUG: dsproxy_strategy_base.cpp:324: [7e4afa7ea38a37be] partPlacement record partSituation# ESituation::Unknown to# 1 blob Id# [72075186224047637:1:863:1:24576:786:2] Marker# BPG33 2025-05-29T15:27:58.666414Z node 3 :BS_PROXY_PUT DEBUG: dsproxy_strategy_base.cpp:345: [7e4afa7ea38a37be] Sending missing VPut part# 1 to# 1 blob Id# [72075186224047637:1:863:1:24576:786:2] Marker# BPG32 2025-05-29T15:27:58.666419Z node 3 :BS_PROXY_PUT DEBUG: dsproxy_strategy_base.cpp:324: [7e4afa7ea38a37be] partPlacement record partSituation# ESituation::Unknown to# 2 blob Id# [72075186224047637:1:863:1:24576:786:3] Marker# BPG33 2025-05-29T15:27:58.666422Z node 3 :BS_PROXY_PUT DEBUG: dsproxy_strategy_base.cpp:345: [7e4afa7ea38a37be] Sending missing VPut part# 2 to# 2 blob Id# [72075186224047637:1:863:1:24576:786:3] Marker# BPG32 2025-05-29T15:27:58.666427Z node 3 :BS_PROXY_PUT DEBUG: dsproxy_strategy_base.cpp:324: [7e4afa7ea38a37be] partPlacement record partSituation# ESituation::Unknown to# 3 blob Id# [72075186224047637:1:863:1:24576:786:4] Marker# BPG33 2025-05-29T15:27:58.666430Z node 3 :BS_PROXY_PUT DEBUG: dsproxy_strategy_base.cpp:345: [7e4afa7ea38a37be] Sending missing VPut part# 3 to# 3 blob Id# [72075186224047637:1:863:1:24576:786:4] Marker# BPG32 2025-05-29T15:27:58.666434Z node 3 :BS_PROXY_PUT DEBUG: dsproxy_strategy_base.cpp:324: [7e4afa7ea38a37be] partPlacement record partSituation# ESituation::Unknown to# 4 blob Id# [72075186224047637:1:863:1:24576:786:5] Marker# BPG33 2025-05-29T15:27:58.666438Z node 3 :BS_PROXY_PUT DEBUG: dsproxy_strategy_base.cpp:345: [7e4afa7ea38a37be] Sending missing VPut part# 4 to# 4 blob Id# [72075186224047637:1:863:1:24576:786:5] Marker# BPG32 2025-05-29T15:27:58.666443Z node 3 :BS_PROXY_PUT DEBUG: dsproxy_strategy_base.cpp:324: [7e4afa7ea38a37be] partPlacement record partSituation# ESituation::Unknown to# 5 blob Id# [72075186224047637:1:863:1:24576:786:6] Marker# BPG33 2025-05-29T15:27:58.666446Z node 3 :BS_PROXY_PUT DEBUG: dsproxy_strategy_base.cpp:345: [7e4afa7ea38a37be] Sending missing VPut part# 5 to# 5 blob Id# [72075186224047637:1:863:1:24576:786:6] Marker# BPG32 2025-05-29T15:27:58.670239Z node 3 :BS_PROXY_PUT INFO: dsproxy_put.cpp:260: [7e4afa7ea38a37be] received {EvVPutResult Status# ERROR ID# [72075186224047637:1:863:1:24576:786:1] {MsgQoS MsgId# { SequenceId: 1 MsgId: 0 }}} from# [0:1:0:0:0] Marker# BPP01 2025-05-29T15:27:58.670286Z node 3 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:30: [7e4afa7ea38a37be] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 0 part# 0 error Marker# BPG50 2025-05-29T15:27:58.670295Z node 3 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:42: [7e4afa7ea38a37be] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 1 part# 1 situation# ESituation::Sent Marker# BPG51 2025-05-29T15:27:58.670299Z node 3 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:42: [7e4afa7ea38a37be] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 2 part# 2 situation# ESituation::Sent Marker# BPG51 2025-05-29T15:27:58.670305Z node 3 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:42: [7e4afa7ea38a37be] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 3 part# 3 situation# ESituation::Sent Marker# BPG51 2025-05-29T15:27:58.670309Z node 3 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:42: [7e4afa7ea38a37be] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 4 part# 4 situation# ESituation::Sent Marker# BPG51 2025-05-29T15:27:58.670313Z node 3 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:42: [7e4afa7ea38a37be] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 5 part# 5 situation# ESituation::Sent Marker# BPG51 2025-05-29T15:27:58.670318Z node 3 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:42: [7e4afa7ea38a37be] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 6 part# 0 situation# ESituation::Unknown Marker# BPG51 2025-05-29T15:27:58.670322Z node 3 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:42: [7e4afa7ea38a37be] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 6 part# 1 situation# ESituation::Unknown Marker# BPG51 2025-05-29T15:27:58.670325Z node 3 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:42: [7e4afa7ea38a37be] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 6 part# 2 situation# ESituation::Unknown Marker# BPG51 2025-05-29T15:27:58.670329Z node 3 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:42: [7e4afa7ea38a37be] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 6 part# 3 situation# ESituation::Unknown Marker# BPG51 2025-05-29T15:27:58.670332Z node 3 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:42: [7e4afa7ea38a37be] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 6 part# 4 situation# ESituation::Unknown Marker# BPG51 2025-05-29T15:27:58.670336Z node 3 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:42: [7e4afa7ea38a37be] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 6 part# 5 situation# ESituation::Unknown Marker# BPG51 2025-05-29T15:27:58.670340Z node 3 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:42: [7e4afa7ea38a37be] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 7 part# 0 situation# ESituation::Unknown Marker# BPG51 2025-05-29T15:27:58.670344Z node 3 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:42: [7e4afa7ea38a37be] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 7 part# 1 situation# ESituation::Unknown Marker# BPG51 2025-05-29T15:27:58.670347Z node 3 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:42: [7e4afa7ea38a37be] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 7 part# 2 situation# ESituation::Unknown Marker# BPG51 2025-05-29T15:27:58.670351Z node 3 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:42: [7e4afa7ea38a37be] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 7 part# 3 situation# ESituation::Unknown Marker# BPG51 2025-05-29T15:27:58.670354Z node 3 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:42: [7e4afa7ea38a37be] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 7 part# 4 situation# ESituation::Unknown Marker# BPG51 2025-05-29T15:27:58.670358Z node 3 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:42: [7e4afa7ea38a37be] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 7 part# 5 situation# ESituation::Unknown Marker# BPG51 2025-05-29T15:27:58.670365Z node 3 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:65: [7e4afa7ea38a37be] restore Id# [72075186224047637:1:863:1:24576:786:0] optimisticReplicas# 6 optimisticState# EBS_FULL Marker# BPG55 2025-05-29T15:27:58.670381Z node 3 :BS_PROXY_PUT DEBUG: dsproxy_strategy_base.cpp:324: [7e4afa7ea38a37be] partPlacement record partSituation# ESituation::Unknown to# 6 blob Id# [72075186224047637:1:863:1:24576:786:1] Marker# BPG33 2025-05-29T15:27:58.670388Z node 3 :BS_PROXY_PUT DEBUG: dsproxy_strategy_base.cpp:345: [7e4afa7ea38a37be] Sending missing VPut part# 0 to# 6 blob Id# [72075186224047637:1:863:1:24576:786:1] Marker# BPG32 2025-05-29T15:27:58.670475Z node 3 :BS_PROXY_PUT INFO: dsproxy_put.cpp:260: [7e4afa7ea38a37be] received {EvVPutResult Status# ERROR ID# [72075 ... 30: [7e4afa7ea38a37be] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 0 part# 0 error Marker# BPG50 2025-05-29T15:27:58.670487Z node 3 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:30: [7e4afa7ea38a37be] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 1 part# 1 error Marker# BPG50 2025-05-29T15:27:58.670491Z node 3 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:42: [7e4afa7ea38a37be] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 2 part# 2 situation# ESituation::Sent Marker# BPG51 2025-05-29T15:27:58.670495Z node 3 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:42: [7e4afa7ea38a37be] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 3 part# 3 situation# ESituation::Sent Marker# BPG51 2025-05-29T15:27:58.670499Z node 3 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:42: [7e4afa7ea38a37be] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 4 part# 4 situation# ESituation::Sent Marker# BPG51 2025-05-29T15:27:58.670503Z node 3 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:42: [7e4afa7ea38a37be] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 5 part# 5 situation# ESituation::Sent Marker# BPG51 2025-05-29T15:27:58.670507Z node 3 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:42: [7e4afa7ea38a37be] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 6 part# 0 situation# ESituation::Sent Marker# BPG51 2025-05-29T15:27:58.670511Z node 3 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:42: [7e4afa7ea38a37be] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 6 part# 1 situation# ESituation::Unknown Marker# BPG51 2025-05-29T15:27:58.670515Z node 3 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:42: [7e4afa7ea38a37be] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 6 part# 2 situation# ESituation::Unknown Marker# BPG51 2025-05-29T15:27:58.670518Z node 3 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:42: [7e4afa7ea38a37be] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 6 part# 3 situation# ESituation::Unknown Marker# BPG51 2025-05-29T15:27:58.670523Z node 3 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:42: [7e4afa7ea38a37be] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 6 part# 4 situation# ESituation::Unknown Marker# BPG51 2025-05-29T15:27:58.670526Z node 3 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:42: [7e4afa7ea38a37be] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 6 part# 5 situation# ESituation::Unknown Marker# BPG51 2025-05-29T15:27:58.670530Z node 3 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:42: [7e4afa7ea38a37be] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 7 part# 0 situation# ESituation::Unknown Marker# BPG51 2025-05-29T15:27:58.670534Z node 3 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:42: [7e4afa7ea38a37be] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 7 part# 1 situation# ESituation::Unknown Marker# BPG51 2025-05-29T15:27:58.670538Z node 3 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:42: [7e4afa7ea38a37be] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 7 part# 2 situation# ESituation::Unknown Marker# BPG51 2025-05-29T15:27:58.670542Z node 3 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:42: [7e4afa7ea38a37be] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 7 part# 3 situation# ESituation::Unknown Marker# BPG51 2025-05-29T15:27:58.670546Z node 3 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:42: [7e4afa7ea38a37be] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 7 part# 4 situation# ESituation::Unknown Marker# BPG51 2025-05-29T15:27:58.670552Z node 3 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:42: [7e4afa7ea38a37be] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 7 part# 5 situation# ESituation::Unknown Marker# BPG51 2025-05-29T15:27:58.670556Z node 3 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:65: [7e4afa7ea38a37be] restore Id# [72075186224047637:1:863:1:24576:786:0] optimisticReplicas# 6 optimisticState# EBS_FULL Marker# BPG55 2025-05-29T15:27:58.670563Z node 3 :BS_PROXY_PUT DEBUG: dsproxy_strategy_base.cpp:324: [7e4afa7ea38a37be] partPlacement record partSituation# ESituation::Unknown to# 7 blob Id# [72075186224047637:1:863:1:24576:786:2] Marker# BPG33 2025-05-29T15:27:58.670567Z node 3 :BS_PROXY_PUT DEBUG: dsproxy_strategy_base.cpp:345: [7e4afa7ea38a37be] Sending missing VPut part# 1 to# 7 blob Id# [72075186224047637:1:863:1:24576:786:2] Marker# BPG32 2025-05-29T15:27:58.670593Z node 3 :BS_PROXY_PUT DEBUG: dsproxy_put.cpp:260: [7e4afa7ea38a37be] received {EvVPutResult Status# OK ID# [72075186224047637:1:863:1:24576:786:3] {MsgQoS MsgId# { SequenceId: 1 MsgId: 0 }}} from# [0:1:0:2:0] Marker# BPP01 2025-05-29T15:27:58.670606Z node 3 :BS_PROXY_PUT DEBUG: dsproxy_put.cpp:260: [7e4afa7ea38a37be] received {EvVPutResult Status# OK ID# [72075186224047637:1:863:1:24576:786:4] {MsgQoS MsgId# { SequenceId: 1 MsgId: 0 }}} from# [0:1:0:3:0] Marker# BPP01 2025-05-29T15:27:58.670617Z node 3 :BS_PROXY_PUT DEBUG: dsproxy_put.cpp:260: [7e4afa7ea38a37be] received {EvVPutResult Status# OK ID# [72075186224047637:1:863:1:24576:786:5] {MsgQoS MsgId# { SequenceId: 1 MsgId: 0 }}} from# [0:1:0:4:0] Marker# BPP01 2025-05-29T15:27:58.670629Z node 3 :BS_PROXY_PUT DEBUG: dsproxy_put.cpp:260: [7e4afa7ea38a37be] received {EvVPutResult Status# OK ID# [72075186224047637:1:863:1:24576:786:6] {MsgQoS MsgId# { SequenceId: 1 MsgId: 0 }}} from# [0:1:0:5:0] Marker# BPP01 2025-05-29T15:27:58.670685Z node 3 :BS_PROXY_PUT INFO: dsproxy_put.cpp:260: [7e4afa7ea38a37be] received {EvVPutResult Status# ERROR ID# [72075186224047637:1:863:1:24576:786:1] {MsgQoS MsgId# { SequenceId: 1 MsgId: 0 }}} from# [0:1:0:6:0] Marker# BPP01 2025-05-29T15:27:58.670692Z node 3 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:30: [7e4afa7ea38a37be] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 0 part# 0 error Marker# BPG50 2025-05-29T15:27:58.670696Z node 3 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:30: [7e4afa7ea38a37be] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 1 part# 1 error Marker# BPG50 2025-05-29T15:27:58.670701Z node 3 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:42: [7e4afa7ea38a37be] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 2 part# 2 situation# ESituation::Present Marker# BPG51 2025-05-29T15:27:58.670705Z node 3 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:42: [7e4afa7ea38a37be] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 3 part# 3 situation# ESituation::Present Marker# BPG51 2025-05-29T15:27:58.670709Z node 3 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:42: [7e4afa7ea38a37be] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 4 part# 4 situation# ESituation::Present Marker# BPG51 2025-05-29T15:27:58.670713Z node 3 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:42: [7e4afa7ea38a37be] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 5 part# 5 situation# ESituation::Present Marker# BPG51 2025-05-29T15:27:58.670717Z node 3 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:30: [7e4afa7ea38a37be] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 6 part# 0 error Marker# BPG50 2025-05-29T15:27:58.670722Z node 3 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:42: [7e4afa7ea38a37be] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 7 part# 0 situation# ESituation::Unknown Marker# BPG51 2025-05-29T15:27:58.670726Z node 3 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:42: [7e4afa7ea38a37be] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 7 part# 1 situation# ESituation::Sent Marker# BPG51 2025-05-29T15:27:58.670731Z node 3 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:42: [7e4afa7ea38a37be] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 7 part# 2 situation# ESituation::Unknown Marker# BPG51 2025-05-29T15:27:58.670751Z node 3 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:42: [7e4afa7ea38a37be] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 7 part# 3 situation# ESituation::Unknown Marker# BPG51 2025-05-29T15:27:58.670756Z node 3 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:42: [7e4afa7ea38a37be] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 7 part# 4 situation# ESituation::Unknown Marker# BPG51 2025-05-29T15:27:58.670760Z node 3 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:42: [7e4afa7ea38a37be] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 7 part# 5 situation# ESituation::Unknown Marker# BPG51 2025-05-29T15:27:58.670765Z node 3 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:65: [7e4afa7ea38a37be] restore Id# [72075186224047637:1:863:1:24576:786:0] optimisticReplicas# 5 optimisticState# EBS_DISINTEGRATED Marker# BPG55 2025-05-29T15:27:58.670794Z node 3 :BS_PROXY_PUT ERROR: dsproxy_put_impl.cpp:72: [7e4afa7ea38a37be] Result# TEvPutResult {Id# [72075186224047637:1:863:1:24576:786:0] Status# ERROR StatusFlags# { } ErrorReason# "TRestoreStrategy saw optimisticState# EBS_DISINTEGRATED GroupId# 0 BlobId# [72075186224047637:1:863:1:24576:786:0] Reported ErrorReasons# [ ] Part situations# [ { OrderNumber# 0 Situations# EUUUUU } { OrderNumber# 1 Situations# UEUUUU } { OrderNumber# 2 Situations# UUPUUU } { OrderNumber# 3 Situations# UUUPUU } { OrderNumber# 4 Situations# UUUUPU } { OrderNumber# 5 Situations# UUUUUP } { OrderNumber# 6 Situations# EUUUUU } { OrderNumber# 7 Situations# USUUUU } ] " ApproximateFreeSpaceShare# 0} GroupId# 0 Marker# BPP12 2025-05-29T15:27:58.670804Z node 3 :BS_PROXY_PUT NOTICE: dsproxy_put.cpp:486: [7e4afa7ea38a37be] SendReply putResult# TEvPutResult {Id# [72075186224047637:1:863:1:24576:786:0] Status# ERROR StatusFlags# { } ErrorReason# "TRestoreStrategy saw optimisticState# EBS_DISINTEGRATED GroupId# 0 BlobId# [72075186224047637:1:863:1:24576:786:0] Reported ErrorReasons# [ ] Part situations# [ { OrderNumber# 0 Situations# EUUUUU } { OrderNumber# 1 Situations# UEUUUU } { OrderNumber# 2 Situations# UUPUUU } { OrderNumber# 3 Situations# UUUPUU } { OrderNumber# 4 Situations# UUUUPU } { OrderNumber# 5 Situations# UUUUUP } { OrderNumber# 6 Situations# EUUUUU } { OrderNumber# 7 Situations# USUUUU } ] " ApproximateFreeSpaceShare# 0} ResponsesSent# 0 PutImpl.Blobs.size# 1 Last# true Marker# BPP21 2025-05-29T15:27:58.670879Z node 3 :BS_PROXY_PUT NOTICE: {BPP72@dsproxy_put.cpp:470} Query history GroupId# 0 HandleClass# TabletLog Tactic# Default History# THistory { Entries# [ TEvVPut{ TimestampMs# 0.381 sample PartId# [72075186224047637:1:863:1:24576:786:6] QueryCount# 1 VDiskId# [0:1:0:5:0] NodeId# 3 } TEvVPut{ TimestampMs# 0.382 sample PartId# [72075186224047637:1:863:1:24576:786:5] QueryCount# 1 VDiskId# [0:1:0:4:0] NodeId# 3 } TEvVPut{ TimestampMs# 0.382 sample PartId# [72075186224047637:1:863:1:24576:786:4] QueryCount# 1 VDiskId# [0:1:0:3:0] NodeId# 3 } TEvVPut{ TimestampMs# 0.382 sample PartId# [72075186224047637:1:863:1:24576:786:3] QueryCount# 1 VDiskId# [0:1:0:2:0] NodeId# 3 } TEvVPut{ TimestampMs# 0.382 sample PartId# [72075186224047637:1:863:1:24576:786:2] QueryCount# 1 VDiskId# [0:1:0:1:0] NodeId# 3 } TEvVPut{ TimestampMs# 0.382 sample PartId# [72075186224047637:1:863:1:24576:786:1] QueryCount# 1 VDiskId# [0:1:0:0:0] NodeId# 3 } TEvVPutResult{ TimestampMs# 4.175 VDiskId# [0:1:0:0:0] NodeId# 3 Status# ERROR } TEvVPut{ TimestampMs# 4.318 sample PartId# [72075186224047637:1:863:1:24576:786:1] QueryCount# 1 VDiskId# [0:1:0:6:0] NodeId# 3 } TEvVPutResult{ TimestampMs# 4.382 VDiskId# [0:1:0:1:0] NodeId# 3 Status# ERROR } TEvVPut{ TimestampMs# 4.476 sample PartId# [72075186224047637:1:863:1:24576:786:2] QueryCount# 1 VDiskId# [0:1:0:7:0] NodeId# 3 } TEvVPutResult{ TimestampMs# 4.499 VDiskId# [0:1:0:2:0] NodeId# 3 Status# OK } TEvVPutResult{ TimestampMs# 4.511 VDiskId# [0:1:0:3:0] NodeId# 3 Status# OK } TEvVPutResult{ TimestampMs# 4.522 VDiskId# [0:1:0:4:0] NodeId# 3 Status# OK } TEvVPutResult{ TimestampMs# 4.534 VDiskId# [0:1:0:5:0] NodeId# 3 Status# OK } TEvVPutResult{ TimestampMs# 4.592 VDiskId# [0:1:0:6:0] NodeId# 3 Status# ERROR } ] } >> TDSProxyPutTest::TestBlock42MultiPutAllOk [GOOD] >> TDSProxyRequestReportningTest::CheckLeakyBucketBehaviour >> TDSProxyPatchTest::MovedError_ErasureMirror3dc [GOOD] >> TDSProxyPutTest::TestMirror3dcPutStatusErrorWith_2_2_0_VdiskErrors >> TDSProxyFaultTolerancePatchTest::block42 >> TKeyValueTest::TestRenameToLongKey [GOOD] >> TDSProxyGetTest::TestBlock42GetIntervalsWipedError >> TDSProxyRequestReportningTest::CheckLeakyBucketBehaviour [GOOD] >> TBlobStorageProxySequenceTest::TestGivenBlock42GetThenVGetResponseParts2523Nodata4ThenGetOk >> TDSProxyPutTest::TestMirror3dcPutStatusErrorWith_2_2_0_VdiskErrors [GOOD] >> TDsProxyQuorumTracker::CheckFailModelErasureNone [GOOD] >> StatisticsSaveLoad::Simple [FAIL] >> TBlobStorageProxySequenceTest::TestGivenBlock42GetThenVGetResponseParts2523Nodata4ThenGetOk [GOOD] >> TDSProxyGetTest::TestBlock42WipedErrorWithTwoBlobs |70.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/dsproxy/ut/unittest >> TDSProxyRequestReportningTest::CheckLeakyBucketBehaviour [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/keyvalue/ut/unittest >> TKeyValueTest::TestRenameToLongKey [GOOD] Test command err: Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:55:2057] recipient: [1:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:55:2057] recipient: [1:51:2095] Leader for TabletID 72057594037927937 is [1:57:2097] sender: [1:58:2057] recipient: [1:51:2095] Leader for TabletID 72057594037927937 is [1:57:2097] sender: [1:75:2057] recipient: [1:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:55:2057] recipient: [2:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:55:2057] recipient: [2:51:2095] Leader for TabletID 72057594037927937 is [2:57:2097] sender: [2:58:2057] recipient: [2:51:2095] Leader for TabletID 72057594037927937 is [2:57:2097] sender: [2:75:2057] recipient: [2:14:2061] !Reboot 72057594037927937 (actor [2:57:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [2:57:2097] sender: [2:77:2057] recipient: [2:36:2083] Leader for TabletID 72057594037927937 is [2:57:2097] sender: [2:80:2057] recipient: [2:14:2061] Leader for TabletID 72057594037927937 is [2:57:2097] sender: [2:81:2057] recipient: [2:79:2110] Leader for TabletID 72057594037927937 is [2:82:2111] sender: [2:83:2057] recipient: [2:79:2110] !Reboot 72057594037927937 (actor [2:57:2097]) rebooted! !Reboot 72057594037927937 (actor [2:57:2097]) tablet resolver refreshed! new actor is[2:82:2111] Leader for TabletID 72057594037927937 is [2:82:2111] sender: [2:168:2057] recipient: [2:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:55:2057] recipient: [3:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:55:2057] recipient: [3:51:2095] Leader for TabletID 72057594037927937 is [3:57:2097] sender: [3:58:2057] recipient: [3:51:2095] Leader for TabletID 72057594037927937 is [3:57:2097] sender: [3:75:2057] recipient: [3:14:2061] !Reboot 72057594037927937 (actor [3:57:2097]) on event NKikimr::TEvKeyValue::TEvAcquireLock ! Leader for TabletID 72057594037927937 is [3:57:2097] sender: [3:77:2057] recipient: [3:36:2083] Leader for TabletID 72057594037927937 is [3:57:2097] sender: [3:80:2057] recipient: [3:79:2110] Leader for TabletID 72057594037927937 is [3:57:2097] sender: [3:81:2057] recipient: [3:14:2061] Leader for TabletID 72057594037927937 is [3:82:2111] sender: [3:83:2057] recipient: [3:79:2110] !Reboot 72057594037927937 (actor [3:57:2097]) rebooted! !Reboot 72057594037927937 (actor [3:57:2097]) tablet resolver refreshed! new actor is[3:82:2111] Leader for TabletID 72057594037927937 is [3:82:2111] sender: [3:168:2057] recipient: [3:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [4:55:2057] recipient: [4:50:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [4:55:2057] recipient: [4:50:2095] Leader for TabletID 72057594037927937 is [4:57:2097] sender: [4:58:2057] recipient: [4:50:2095] Leader for TabletID 72057594037927937 is [4:57:2097] sender: [4:75:2057] recipient: [4:14:2061] !Reboot 72057594037927937 (actor [4:57:2097]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [4:57:2097] sender: [4:78:2057] recipient: [4:36:2083] Leader for TabletID 72057594037927937 is [4:57:2097] sender: [4:81:2057] recipient: [4:80:2110] Leader for TabletID 72057594037927937 is [4:57:2097] sender: [4:82:2057] recipient: [4:14:2061] Leader for TabletID 72057594037927937 is [4:83:2111] sender: [4:84:2057] recipient: [4:80:2110] !Reboot 72057594037927937 (actor [4:57:2097]) rebooted! !Reboot 72057594037927937 (actor [4:57:2097]) tablet resolver refreshed! new actor is[4:83:2111] Leader for TabletID 72057594037927937 is [4:83:2111] sender: [4:169:2057] recipient: [4:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [5:55:2057] recipient: [5:52:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [5:55:2057] recipient: [5:52:2095] Leader for TabletID 72057594037927937 is [5:57:2097] sender: [5:58:2057] recipient: [5:52:2095] Leader for TabletID 72057594037927937 is [5:57:2097] sender: [5:75:2057] recipient: [5:14:2061] !Reboot 72057594037927937 (actor [5:57:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [5:57:2097] sender: [5:81:2057] recipient: [5:36:2083] Leader for TabletID 72057594037927937 is [5:57:2097] sender: [5:84:2057] recipient: [5:14:2061] Leader for TabletID 72057594037927937 is [5:57:2097] sender: [5:85:2057] recipient: [5:83:2113] Leader for TabletID 72057594037927937 is [5:86:2114] sender: [5:87:2057] recipient: [5:83:2113] !Reboot 72057594037927937 (actor [5:57:2097]) rebooted! !Reboot 72057594037927937 (actor [5:57:2097]) tablet resolver refreshed! new actor is[5:86:2114] Leader for TabletID 72057594037927937 is [5:86:2114] sender: [5:172:2057] recipient: [5:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [6:55:2057] recipient: [6:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [6:55:2057] recipient: [6:51:2095] Leader for TabletID 72057594037927937 is [6:57:2097] sender: [6:58:2057] recipient: [6:51:2095] Leader for TabletID 72057594037927937 is [6:57:2097] sender: [6:75:2057] recipient: [6:14:2061] !Reboot 72057594037927937 (actor [6:57:2097]) on event NKikimr::TEvKeyValue::TEvGetStorageChannelStatus ! Leader for TabletID 72057594037927937 is [6:57:2097] sender: [6:81:2057] recipient: [6:36:2083] Leader for TabletID 72057594037927937 is [6:57:2097] sender: [6:84:2057] recipient: [6:14:2061] Leader for TabletID 72057594037927937 is [6:57:2097] sender: [6:85:2057] recipient: [6:83:2113] Leader for TabletID 72057594037927937 is [6:86:2114] sender: [6:87:2057] recipient: [6:83:2113] !Reboot 72057594037927937 (actor [6:57:2097]) rebooted! !Reboot 72057594037927937 (actor [6:57:2097]) tablet resolver refreshed! new actor is[6:86:2114] Leader for TabletID 72057594037927937 is [6:86:2114] sender: [6:172:2057] recipient: [6:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [7:55:2057] recipient: [7:52:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [7:55:2057] recipient: [7:52:2095] Leader for TabletID 72057594037927937 is [7:57:2097] sender: [7:58:2057] recipient: [7:52:2095] Leader for TabletID 72057594037927937 is [7:57:2097] sender: [7:75:2057] recipient: [7:14:2061] !Reboot 72057594037927937 (actor [7:57:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [7:57:2097] sender: [7:83:2057] recipient: [7:36:2083] Leader for TabletID 72057594037927937 is [7:57:2097] sender: [7:86:2057] recipient: [7:14:2061] Leader for TabletID 72057594037927937 is [7:57:2097] sender: [7:87:2057] recipient: [7:85:2115] Leader for TabletID 72057594037927937 is [7:88:2116] sender: [7:89:2057] recipient: [7:85:2115] !Reboot 72057594037927937 (actor [7:57:2097]) rebooted! !Reboot 72057594037927937 (actor [7:57:2097]) tablet resolver refreshed! new actor is[7:88:2116] Leader for TabletID 72057594037927937 is [7:88:2116] sender: [7:174:2057] recipient: [7:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [8:55:2057] recipient: [8:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [8:55:2057] recipient: [8:51:2095] Leader for TabletID 72057594037927937 is [8:57:2097] sender: [8:58:2057] recipient: [8:51:2095] Leader for TabletID 72057594037927937 is [8:57:2097] sender: [8:75:2057] recipient: [8:14:2061] !Reboot 72057594037927937 (actor [8:57:2097]) on event NKikimr::TEvKeyValue::TEvExecuteTransaction ! Leader for TabletID 72057594037927937 is [8:57:2097] sender: [8:83:2057] recipient: [8:36:2083] Leader for TabletID 72057594037927937 is [8:57:2097] sender: [8:86:2057] recipient: [8:14:2061] Leader for TabletID 72057594037927937 is [8:57:2097] sender: [8:87:2057] recipient: [8:85:2115] Leader for TabletID 72057594037927937 is [8:88:2116] sender: [8:89:2057] recipient: [8:85:2115] !Reboot 72057594037927937 (actor [8:57:2097]) rebooted! !Reboot 72057594037927937 (actor [8:57:2097]) tablet resolver refreshed! new actor is[8:88:2116] Leader for TabletID 72057594037927937 is [8:88:2116] sender: [8:174:2057] recipient: [8:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [9:55:2057] recipient: [9:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [9:55:2057] recipient: [9:51:2095] Leader for TabletID 72057594037927937 is [9:57:2097] sender: [9:58:2057] recipient: [9:51:2095] Leader for TabletID 72057594037927937 is [9:57:2097] sender: [9:75:2057] recipient: [9:14:2061] !Reboot 72057594037927937 (actor [9:57:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [9:57:2097] sender: [9:85:2057] recipient: [9:36:2083] Leader for TabletID 72057594037927937 is [9:57:2097] sender: [9:88:2057] recipient: [9:14:2061] Leader for TabletID 72057594037927937 is [9:57:2097] sender: [9:89:2057] recipient: [9:87:2117] Leader for TabletID 72057594037927937 is [9:90:2118] sender: [9:91:2057] recipient: [9:87:2117] !Reboot 72057594037927937 (actor [9:57:2097]) rebooted! !Reboot 72057594037927937 (actor [9:57:2097]) tablet resolver refreshed! new actor is[9:90:2118] Leader for TabletID 72057594037927937 is [9:90:2118] sender: [9:176:2057] recipient: [9:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [10:55:2057] recipient: [10:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [10:55:2057] recipient: [10:51:2095] Leader for TabletID 72057594037927937 is [10:57:2097] sender: [10:58:2057] recipient: [10:51:2095] Leader for TabletID 72057594037927937 is [10:57:2097] sender: [10:75:2057] recipient: [10:14:2061] !Reboot 72057594037927937 (actor [10:57:2097]) on event NKikimr::TEvKeyValue::TEvRead ! Leader for TabletID 72057594037927937 is [10:57:2097] sender: [10:85:2057] recipient: [10:36:2083] Leader for TabletID 72057594037927937 is [10:57:2097] sender: [10:88:2057] recipient: [10:14:2061] Leader for TabletID 72057594037927937 is [10:57:2097] sender: [10:89:2057] recipient: [10:87:2117] Leader for TabletID 72057594037927937 is [10:90:2118] sender: [10:91:2057] recipient: [10:87:2117] !Reboot 72057594037927937 (actor [10:57:2097]) rebooted! !Reboot 72057594037927937 (actor [10:57:2097]) tablet resolver refreshed! new actor is[10:90:2118] Leader for TabletID 72057594037927937 is [10:90:2118] sender: [10:176:2057] recipient: [10:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [11:55:2057] recipient: [11:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [11:55:2057] recipient: [11:51:2095] Leader for TabletID 72057594037927937 is [11:57:2097] sender: [11:58:2057] recipient: [11:51:2095] Leader for TabletID 72057594037927937 is [11:57:2097] sender: [11:75:2057] recipient: [11:14:2061] !Reboot 72057594037927937 (actor [11:57:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [11:57:2097] sender: [11:87:2057] recipient: [11:36:2083] Leader for TabletID 72057594037927937 is [11:57:2097] sender: [11:90:2057] recipient: [11:14:2061] Leader for TabletID 72057594037927937 is [11:57:2097] sender: [11:91:2057] recipient: [11:89:2119] Leader for TabletID 72057594037927937 is [11:92:2120] sender: [11:93:2057] recipient: [11:89:2119] !Reboot 72057594037927937 (actor [11:57:2097]) rebooted! !Reboot 72057594037927937 (actor [11:57:2097]) tablet resolver refreshed! new actor is[11:92:2120] Leader for TabletID 72057594037927937 is [11:92:2120] sender: [11:178:2057] recipient: [11:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [12:55:2057] recipient: [12:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [12:55:2057] recipient: [12:51:2095] Leader for TabletID 72057594037927937 is [12:57:2097] sender: [12:58:2057] recipient: [12:51:2095] Leader for TabletID 72057594037927937 is [12:57:2097] sender: [12:75:2057] recipient: [12:14:2061] !Re ... is [18:57:2097] sender: [18:96:2057] recipient: [18:14:2061] Leader for TabletID 72057594037927937 is [18:57:2097] sender: [18:97:2057] recipient: [18:95:2123] Leader for TabletID 72057594037927937 is [18:98:2124] sender: [18:99:2057] recipient: [18:95:2123] !Reboot 72057594037927937 (actor [18:57:2097]) rebooted! !Reboot 72057594037927937 (actor [18:57:2097]) tablet resolver refreshed! new actor is[18:98:2124] Leader for TabletID 72057594037927937 is [18:98:2124] sender: [18:184:2057] recipient: [18:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [19:55:2057] recipient: [19:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [19:55:2057] recipient: [19:51:2095] Leader for TabletID 72057594037927937 is [19:57:2097] sender: [19:58:2057] recipient: [19:51:2095] Leader for TabletID 72057594037927937 is [19:57:2097] sender: [19:75:2057] recipient: [19:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [20:55:2057] recipient: [20:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [20:55:2057] recipient: [20:51:2095] Leader for TabletID 72057594037927937 is [20:57:2097] sender: [20:58:2057] recipient: [20:51:2095] Leader for TabletID 72057594037927937 is [20:57:2097] sender: [20:75:2057] recipient: [20:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [21:55:2057] recipient: [21:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [21:55:2057] recipient: [21:51:2095] Leader for TabletID 72057594037927937 is [21:57:2097] sender: [21:58:2057] recipient: [21:51:2095] Leader for TabletID 72057594037927937 is [21:57:2097] sender: [21:75:2057] recipient: [21:14:2061] !Reboot 72057594037927937 (actor [21:57:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [21:57:2097] sender: [21:77:2057] recipient: [21:36:2083] Leader for TabletID 72057594037927937 is [21:57:2097] sender: [21:80:2057] recipient: [21:14:2061] Leader for TabletID 72057594037927937 is [21:57:2097] sender: [21:81:2057] recipient: [21:79:2110] Leader for TabletID 72057594037927937 is [21:82:2111] sender: [21:83:2057] recipient: [21:79:2110] !Reboot 72057594037927937 (actor [21:57:2097]) rebooted! !Reboot 72057594037927937 (actor [21:57:2097]) tablet resolver refreshed! new actor is[21:82:2111] Leader for TabletID 72057594037927937 is [21:82:2111] sender: [21:168:2057] recipient: [21:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [22:55:2057] recipient: [22:52:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [22:55:2057] recipient: [22:52:2095] Leader for TabletID 72057594037927937 is [22:57:2097] sender: [22:58:2057] recipient: [22:52:2095] Leader for TabletID 72057594037927937 is [22:57:2097] sender: [22:75:2057] recipient: [22:14:2061] !Reboot 72057594037927937 (actor [22:57:2097]) on event NKikimr::TEvKeyValue::TEvAcquireLock ! Leader for TabletID 72057594037927937 is [22:57:2097] sender: [22:77:2057] recipient: [22:36:2083] Leader for TabletID 72057594037927937 is [22:57:2097] sender: [22:80:2057] recipient: [22:14:2061] Leader for TabletID 72057594037927937 is [22:57:2097] sender: [22:81:2057] recipient: [22:79:2110] Leader for TabletID 72057594037927937 is [22:82:2111] sender: [22:83:2057] recipient: [22:79:2110] !Reboot 72057594037927937 (actor [22:57:2097]) rebooted! !Reboot 72057594037927937 (actor [22:57:2097]) tablet resolver refreshed! new actor is[22:82:2111] Leader for TabletID 72057594037927937 is [22:82:2111] sender: [22:168:2057] recipient: [22:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [23:55:2057] recipient: [23:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [23:55:2057] recipient: [23:51:2095] Leader for TabletID 72057594037927937 is [23:57:2097] sender: [23:58:2057] recipient: [23:51:2095] Leader for TabletID 72057594037927937 is [23:57:2097] sender: [23:75:2057] recipient: [23:14:2061] !Reboot 72057594037927937 (actor [23:57:2097]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [23:57:2097] sender: [23:78:2057] recipient: [23:36:2083] Leader for TabletID 72057594037927937 is [23:57:2097] sender: [23:80:2057] recipient: [23:14:2061] Leader for TabletID 72057594037927937 is [23:57:2097] sender: [23:82:2057] recipient: [23:81:2110] Leader for TabletID 72057594037927937 is [23:83:2111] sender: [23:84:2057] recipient: [23:81:2110] !Reboot 72057594037927937 (actor [23:57:2097]) rebooted! !Reboot 72057594037927937 (actor [23:57:2097]) tablet resolver refreshed! new actor is[23:83:2111] Leader for TabletID 72057594037927937 is [23:83:2111] sender: [23:169:2057] recipient: [23:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [24:55:2057] recipient: [24:50:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [24:55:2057] recipient: [24:50:2095] Leader for TabletID 72057594037927937 is [24:57:2097] sender: [24:58:2057] recipient: [24:50:2095] Leader for TabletID 72057594037927937 is [24:57:2097] sender: [24:75:2057] recipient: [24:14:2061] !Reboot 72057594037927937 (actor [24:57:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [24:57:2097] sender: [24:81:2057] recipient: [24:36:2083] Leader for TabletID 72057594037927937 is [24:57:2097] sender: [24:84:2057] recipient: [24:83:2113] Leader for TabletID 72057594037927937 is [24:57:2097] sender: [24:85:2057] recipient: [24:14:2061] Leader for TabletID 72057594037927937 is [24:86:2114] sender: [24:87:2057] recipient: [24:83:2113] !Reboot 72057594037927937 (actor [24:57:2097]) rebooted! !Reboot 72057594037927937 (actor [24:57:2097]) tablet resolver refreshed! new actor is[24:86:2114] Leader for TabletID 72057594037927937 is [24:86:2114] sender: [24:172:2057] recipient: [24:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [25:55:2057] recipient: [25:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [25:55:2057] recipient: [25:51:2095] Leader for TabletID 72057594037927937 is [25:57:2097] sender: [25:58:2057] recipient: [25:51:2095] Leader for TabletID 72057594037927937 is [25:57:2097] sender: [25:75:2057] recipient: [25:14:2061] !Reboot 72057594037927937 (actor [25:57:2097]) on event NKikimr::TEvKeyValue::TEvExecuteTransaction ! Leader for TabletID 72057594037927937 is [25:57:2097] sender: [25:81:2057] recipient: [25:36:2083] Leader for TabletID 72057594037927937 is [25:57:2097] sender: [25:84:2057] recipient: [25:14:2061] Leader for TabletID 72057594037927937 is [25:57:2097] sender: [25:85:2057] recipient: [25:83:2113] Leader for TabletID 72057594037927937 is [25:86:2114] sender: [25:87:2057] recipient: [25:83:2113] !Reboot 72057594037927937 (actor [25:57:2097]) rebooted! !Reboot 72057594037927937 (actor [25:57:2097]) tablet resolver refreshed! new actor is[25:86:2114] Leader for TabletID 72057594037927937 is [25:86:2114] sender: [25:172:2057] recipient: [25:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [26:55:2057] recipient: [26:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [26:55:2057] recipient: [26:51:2095] Leader for TabletID 72057594037927937 is [26:57:2097] sender: [26:58:2057] recipient: [26:51:2095] Leader for TabletID 72057594037927937 is [26:57:2097] sender: [26:75:2057] recipient: [26:14:2061] !Reboot 72057594037927937 (actor [26:57:2097]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [26:57:2097] sender: [26:82:2057] recipient: [26:36:2083] Leader for TabletID 72057594037927937 is [26:57:2097] sender: [26:85:2057] recipient: [26:14:2061] Leader for TabletID 72057594037927937 is [26:57:2097] sender: [26:86:2057] recipient: [26:84:2113] Leader for TabletID 72057594037927937 is [26:87:2114] sender: [26:88:2057] recipient: [26:84:2113] !Reboot 72057594037927937 (actor [26:57:2097]) rebooted! !Reboot 72057594037927937 (actor [26:57:2097]) tablet resolver refreshed! new actor is[26:87:2114] Leader for TabletID 72057594037927937 is [26:87:2114] sender: [26:173:2057] recipient: [26:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [27:55:2057] recipient: [27:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [27:55:2057] recipient: [27:51:2095] Leader for TabletID 72057594037927937 is [27:57:2097] sender: [27:58:2057] recipient: [27:51:2095] Leader for TabletID 72057594037927937 is [27:57:2097] sender: [27:75:2057] recipient: [27:14:2061] !Reboot 72057594037927937 (actor [27:57:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [27:57:2097] sender: [27:85:2057] recipient: [27:36:2083] Leader for TabletID 72057594037927937 is [27:57:2097] sender: [27:88:2057] recipient: [27:14:2061] Leader for TabletID 72057594037927937 is [27:57:2097] sender: [27:89:2057] recipient: [27:87:2116] Leader for TabletID 72057594037927937 is [27:90:2117] sender: [27:91:2057] recipient: [27:87:2116] !Reboot 72057594037927937 (actor [27:57:2097]) rebooted! !Reboot 72057594037927937 (actor [27:57:2097]) tablet resolver refreshed! new actor is[27:90:2117] Leader for TabletID 72057594037927937 is [27:90:2117] sender: [27:176:2057] recipient: [27:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [28:55:2057] recipient: [28:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [28:55:2057] recipient: [28:51:2095] Leader for TabletID 72057594037927937 is [28:57:2097] sender: [28:58:2057] recipient: [28:51:2095] Leader for TabletID 72057594037927937 is [28:57:2097] sender: [28:75:2057] recipient: [28:14:2061] !Reboot 72057594037927937 (actor [28:57:2097]) on event NKikimr::TEvKeyValue::TEvExecuteTransaction ! Leader for TabletID 72057594037927937 is [28:57:2097] sender: [28:85:2057] recipient: [28:36:2083] Leader for TabletID 72057594037927937 is [28:57:2097] sender: [28:87:2057] recipient: [28:14:2061] Leader for TabletID 72057594037927937 is [28:57:2097] sender: [28:89:2057] recipient: [28:88:2116] Leader for TabletID 72057594037927937 is [28:90:2117] sender: [28:91:2057] recipient: [28:88:2116] !Reboot 72057594037927937 (actor [28:57:2097]) rebooted! !Reboot 72057594037927937 (actor [28:57:2097]) tablet resolver refreshed! new actor is[28:90:2117] Leader for TabletID 72057594037927937 is [28:90:2117] sender: [28:176:2057] recipient: [28:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [29:55:2057] recipient: [29:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [29:55:2057] recipient: [29:51:2095] Leader for TabletID 72057594037927937 is [29:57:2097] sender: [29:58:2057] recipient: [29:51:2095] Leader for TabletID 72057594037927937 is [29:57:2097] sender: [29:75:2057] recipient: [29:14:2061] !Reboot 72057594037927937 (actor [29:57:2097]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [29:57:2097] sender: [29:86:2057] recipient: [29:36:2083] Leader for TabletID 72057594037927937 is [29:57:2097] sender: [29:89:2057] recipient: [29:14:2061] Leader for TabletID 72057594037927937 is [29:57:2097] sender: [29:90:2057] recipient: [29:88:2116] Leader for TabletID 72057594037927937 is [29:91:2117] sender: [29:92:2057] recipient: [29:88:2116] !Reboot 72057594037927937 (actor [29:57:2097]) rebooted! !Reboot 72057594037927937 (actor [29:57:2097]) tablet resolver refreshed! new actor is[29:91:2117] Leader for TabletID 72057594037927937 is [29:91:2117] sender: [29:177:2057] recipient: [29:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [30:55:2057] recipient: [30:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [30:55:2057] recipient: [30:51:2095] Leader for TabletID 72057594037927937 is [30:57:2097] sender: [30:58:2057] recipient: [30:51:2095] Leader for TabletID 72057594037927937 is [30:57:2097] sender: [30:75:2057] recipient: [30:14:2061] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/dsproxy/ut/unittest >> TDsProxyQuorumTracker::CheckFailModelErasureNone [GOOD] Test command err: 2025-05-29T15:28:00.789307Z node 7 :BS_PROXY_PUT INFO: dsproxy_put.cpp:645: [7e4afa7ea38a37be] bootstrap ActorId# [7:82:2128] Group# 0 BlobCount# 1 BlobIDs# [[72075186224047637:1:863:1:24576:786:0]] HandleClass# TabletLog Tactic# Default RestartCounter# 0 Marker# BPP13 2025-05-29T15:28:00.789389Z node 7 :BS_PROXY_PUT DEBUG: dsproxy_strategy_base.cpp:324: [7e4afa7ea38a37be] partPlacement record partSituation# ESituation::Unknown to# 0 blob Id# [72075186224047637:1:863:1:24576:786:1] Marker# BPG33 2025-05-29T15:28:00.789397Z node 7 :BS_PROXY_PUT DEBUG: dsproxy_strategy_base.cpp:345: [7e4afa7ea38a37be] Sending missing VPut part# 0 to# 0 blob Id# [72075186224047637:1:863:1:24576:786:1] Marker# BPG32 2025-05-29T15:28:00.789402Z node 7 :BS_PROXY_PUT DEBUG: dsproxy_strategy_base.cpp:324: [7e4afa7ea38a37be] partPlacement record partSituation# ESituation::Unknown to# 1 blob Id# [72075186224047637:1:863:1:24576:786:2] Marker# BPG33 2025-05-29T15:28:00.789406Z node 7 :BS_PROXY_PUT DEBUG: dsproxy_strategy_base.cpp:345: [7e4afa7ea38a37be] Sending missing VPut part# 1 to# 1 blob Id# [72075186224047637:1:863:1:24576:786:2] Marker# BPG32 2025-05-29T15:28:00.789411Z node 7 :BS_PROXY_PUT DEBUG: dsproxy_strategy_base.cpp:324: [7e4afa7ea38a37be] partPlacement record partSituation# ESituation::Unknown to# 2 blob Id# [72075186224047637:1:863:1:24576:786:3] Marker# BPG33 2025-05-29T15:28:00.789415Z node 7 :BS_PROXY_PUT DEBUG: dsproxy_strategy_base.cpp:345: [7e4afa7ea38a37be] Sending missing VPut part# 2 to# 2 blob Id# [72075186224047637:1:863:1:24576:786:3] Marker# BPG32 2025-05-29T15:28:00.793812Z node 7 :BS_PROXY_PUT INFO: dsproxy_put.cpp:260: [7e4afa7ea38a37be] received {EvVPutResult Status# ERROR ID# [72075186224047637:1:863:1:24576:786:2] {MsgQoS MsgId# { SequenceId: 1 MsgId: 0 }}} from# [0:1:0:1:0] Marker# BPP01 2025-05-29T15:28:00.793875Z node 7 :BS_PROXY_PUT DEBUG: dsproxy_strategy_base.cpp:324: [7e4afa7ea38a37be] partPlacement record partSituation# ESituation::Unknown to# 4 blob Id# [72075186224047637:1:863:1:24576:786:2] Marker# BPG33 2025-05-29T15:28:00.793883Z node 7 :BS_PROXY_PUT DEBUG: dsproxy_strategy_base.cpp:345: [7e4afa7ea38a37be] Sending missing VPut part# 1 to# 4 blob Id# [72075186224047637:1:863:1:24576:786:2] Marker# BPG32 2025-05-29T15:28:00.793975Z node 7 :BS_PROXY_PUT INFO: dsproxy_put.cpp:260: [7e4afa7ea38a37be] received {EvVPutResult Status# ERROR ID# [72075186224047637:1:863:1:24576:786:3] {MsgQoS MsgId# { SequenceId: 1 MsgId: 0 }}} from# [0:1:1:1:0] Marker# BPP01 2025-05-29T15:28:00.793985Z node 7 :BS_PROXY_PUT DEBUG: dsproxy_strategy_base.cpp:324: [7e4afa7ea38a37be] partPlacement record partSituation# ESituation::Unknown to# 5 blob Id# [72075186224047637:1:863:1:24576:786:3] Marker# BPG33 2025-05-29T15:28:00.793989Z node 7 :BS_PROXY_PUT DEBUG: dsproxy_strategy_base.cpp:345: [7e4afa7ea38a37be] Sending missing VPut part# 2 to# 5 blob Id# [72075186224047637:1:863:1:24576:786:3] Marker# BPG32 2025-05-29T15:28:00.794011Z node 7 :BS_PROXY_PUT DEBUG: dsproxy_put.cpp:260: [7e4afa7ea38a37be] received {EvVPutResult Status# OK ID# [72075186224047637:1:863:1:24576:786:1] {MsgQoS MsgId# { SequenceId: 1 MsgId: 0 }}} from# [0:1:2:1:0] Marker# BPP01 2025-05-29T15:28:00.794072Z node 7 :BS_PROXY_PUT INFO: dsproxy_put.cpp:260: [7e4afa7ea38a37be] received {EvVPutResult Status# ERROR ID# [72075186224047637:1:863:1:24576:786:2] {MsgQoS MsgId# { SequenceId: 1 MsgId: 0 }}} from# [0:1:0:2:0] Marker# BPP01 2025-05-29T15:28:00.794079Z node 7 :BS_PROXY_PUT DEBUG: dsproxy_strategy_base.cpp:324: [7e4afa7ea38a37be] partPlacement record partSituation# ESituation::Unknown to# 7 blob Id# [72075186224047637:1:863:1:24576:786:2] Marker# BPG33 2025-05-29T15:28:00.794083Z node 7 :BS_PROXY_PUT DEBUG: dsproxy_strategy_base.cpp:345: [7e4afa7ea38a37be] Sending missing VPut part# 1 to# 7 blob Id# [72075186224047637:1:863:1:24576:786:2] Marker# BPG32 2025-05-29T15:28:00.794111Z node 7 :BS_PROXY_PUT INFO: dsproxy_put.cpp:260: [7e4afa7ea38a37be] received {EvVPutResult Status# ERROR ID# [72075186224047637:1:863:1:24576:786:3] {MsgQoS MsgId# { SequenceId: 1 MsgId: 0 }}} from# [0:1:1:2:0] Marker# BPP01 2025-05-29T15:28:00.794128Z node 7 :BS_PROXY_PUT ERROR: dsproxy_put_impl.cpp:72: [7e4afa7ea38a37be] Result# TEvPutResult {Id# [72075186224047637:1:863:1:24576:786:0] Status# ERROR StatusFlags# { } ErrorReason# "TPut3dcStrategy failed the Fail Model check" ApproximateFreeSpaceShare# 0} GroupId# 0 Marker# BPP12 2025-05-29T15:28:00.794138Z node 7 :BS_PROXY_PUT NOTICE: dsproxy_put.cpp:486: [7e4afa7ea38a37be] SendReply putResult# TEvPutResult {Id# [72075186224047637:1:863:1:24576:786:0] Status# ERROR StatusFlags# { } ErrorReason# "TPut3dcStrategy failed the Fail Model check" ApproximateFreeSpaceShare# 0} ResponsesSent# 0 PutImpl.Blobs.size# 1 Last# true Marker# BPP21 2025-05-29T15:28:00.794186Z node 7 :BS_PROXY_PUT NOTICE: {BPP72@dsproxy_put.cpp:470} Query history GroupId# 0 HandleClass# TabletLog Tactic# Default History# THistory { Entries# [ TEvVPut{ TimestampMs# 0.364 sample PartId# [72075186224047637:1:863:1:24576:786:3] QueryCount# 1 VDiskId# [0:1:1:1:0] NodeId# 7 } TEvVPut{ TimestampMs# 0.364 sample PartId# [72075186224047637:1:863:1:24576:786:1] QueryCount# 1 VDiskId# [0:1:2:1:0] NodeId# 7 } TEvVPut{ TimestampMs# 0.364 sample PartId# [72075186224047637:1:863:1:24576:786:2] QueryCount# 1 VDiskId# [0:1:0:1:0] NodeId# 7 } TEvVPutResult{ TimestampMs# 4.772 VDiskId# [0:1:0:1:0] NodeId# 7 Status# ERROR } TEvVPut{ TimestampMs# 4.823 sample PartId# [72075186224047637:1:863:1:24576:786:2] QueryCount# 1 VDiskId# [0:1:0:2:0] NodeId# 7 } TEvVPutResult{ TimestampMs# 4.897 VDiskId# [0:1:1:1:0] NodeId# 7 Status# ERROR } TEvVPut{ TimestampMs# 4.911 sample PartId# [72075186224047637:1:863:1:24576:786:3] QueryCount# 1 VDiskId# [0:1:1:2:0] NodeId# 7 } TEvVPutResult{ TimestampMs# 4.931 VDiskId# [0:1:2:1:0] NodeId# 7 Status# OK } TEvVPutResult{ TimestampMs# 4.992 VDiskId# [0:1:0:2:0] NodeId# 7 Status# ERROR } TEvVPut{ TimestampMs# 5.005 sample PartId# [72075186224047637:1:863:1:24576:786:2] QueryCount# 1 VDiskId# [0:1:0:0:0] NodeId# 7 } TEvVPutResult{ TimestampMs# 5.031 VDiskId# [0:1:1:2:0] NodeId# 7 Status# ERROR } ] } >> TDSProxyGetTest::TestBlock42GetBlobCrcCheck >> THiveImplTest::BalancerSpeedAndDistribution [GOOD] >> THiveImplTest::TestShortTabletTypes [GOOD] >> THiveImplTest::TestStDev [GOOD] >> THiveTest::TestBlockCreateTablet >> TKeyValueTest::TestCopyRangeWorks [GOOD] >> TKeyValueTest::TestCopyRangeWorksNewApi >> TDSProxyGetTest::TestBlock42GetBlobCrcCheck [GOOD] >> TDSProxyPatchTest::SecuredErrorOnPut_Erasure4Plus2Block >> TDSProxyPatchTest::SecuredErrorOnPut_Erasure4Plus2Block [GOOD] >> TDSProxyPatchTest::MovedOk_ErasureMirror3dc >> DSProxyCounters::PutGeneratedSubrequestBytes >> THiveTest::TestBlockCreateTablet [GOOD] >> THiveTest::DrainWithHiveRestart >> TSchemeshardBackgroundCleaningTest::SchemeshardBackgroundCleaningTestSimpleDrop >> TDSProxyPatchTest::MovedOk_ErasureMirror3dc [GOOD] >> TDSProxyPutTest::TestMirror3dcPutStatusOkWith_1_1_0_VdiskErrors >> DSProxyCounters::PutGeneratedSubrequestBytes [GOOD] >> TDSProxyGetTest::TestBlock42GetSpecific2 [GOOD] >> TDSProxyPatchTest::SecuredOk_ErasureNone >> VectorIndexBuildTest::VectorIndexDescriptionIsPersisted-prefixed-false >> TDSProxyPutTest::TestMirror3dcPutStatusOkWith_1_1_0_VdiskErrors [GOOD] >> TDSProxyPatchTest::SecuredOk_ErasureNone [GOOD] >> TDSProxyPatchTest::NaiveErrorOnGet_Erasure4Plus2Block ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/dsproxy/ut/unittest >> TDSProxyPutTest::TestMirror3dcPutStatusOkWith_1_1_0_VdiskErrors [GOOD] Test command err: 2025-05-29T15:28:02.242783Z node 3 :BS_PROXY_PUT INFO: dsproxy_put.cpp:645: [7e4afa7ea38a37be] bootstrap ActorId# [3:82:2128] Group# 0 BlobCount# 1 BlobIDs# [[72075186224047637:1:863:1:24576:786:0]] HandleClass# TabletLog Tactic# Default RestartCounter# 0 Marker# BPP13 2025-05-29T15:28:02.242866Z node 3 :BS_PROXY_PUT DEBUG: dsproxy_strategy_base.cpp:324: [7e4afa7ea38a37be] partPlacement record partSituation# ESituation::Unknown to# 0 blob Id# [72075186224047637:1:863:1:24576:786:1] Marker# BPG33 2025-05-29T15:28:02.242875Z node 3 :BS_PROXY_PUT DEBUG: dsproxy_strategy_base.cpp:345: [7e4afa7ea38a37be] Sending missing VPut part# 0 to# 0 blob Id# [72075186224047637:1:863:1:24576:786:1] Marker# BPG32 2025-05-29T15:28:02.242882Z node 3 :BS_PROXY_PUT DEBUG: dsproxy_strategy_base.cpp:324: [7e4afa7ea38a37be] partPlacement record partSituation# ESituation::Unknown to# 1 blob Id# [72075186224047637:1:863:1:24576:786:2] Marker# BPG33 2025-05-29T15:28:02.242887Z node 3 :BS_PROXY_PUT DEBUG: dsproxy_strategy_base.cpp:345: [7e4afa7ea38a37be] Sending missing VPut part# 1 to# 1 blob Id# [72075186224047637:1:863:1:24576:786:2] Marker# BPG32 2025-05-29T15:28:02.242893Z node 3 :BS_PROXY_PUT DEBUG: dsproxy_strategy_base.cpp:324: [7e4afa7ea38a37be] partPlacement record partSituation# ESituation::Unknown to# 2 blob Id# [72075186224047637:1:863:1:24576:786:3] Marker# BPG33 2025-05-29T15:28:02.242898Z node 3 :BS_PROXY_PUT DEBUG: dsproxy_strategy_base.cpp:345: [7e4afa7ea38a37be] Sending missing VPut part# 2 to# 2 blob Id# [72075186224047637:1:863:1:24576:786:3] Marker# BPG32 2025-05-29T15:28:02.246042Z node 3 :BS_PROXY_PUT INFO: dsproxy_put.cpp:260: [7e4afa7ea38a37be] received {EvVPutResult Status# ERROR ID# [72075186224047637:1:863:1:24576:786:2] {MsgQoS MsgId# { SequenceId: 1 MsgId: 0 }}} from# [0:1:0:1:0] Marker# BPP01 2025-05-29T15:28:02.246083Z node 3 :BS_PROXY_PUT DEBUG: dsproxy_strategy_base.cpp:324: [7e4afa7ea38a37be] partPlacement record partSituation# ESituation::Unknown to# 4 blob Id# [72075186224047637:1:863:1:24576:786:2] Marker# BPG33 2025-05-29T15:28:02.246089Z node 3 :BS_PROXY_PUT DEBUG: dsproxy_strategy_base.cpp:345: [7e4afa7ea38a37be] Sending missing VPut part# 1 to# 4 blob Id# [72075186224047637:1:863:1:24576:786:2] Marker# BPG32 2025-05-29T15:28:02.246142Z node 3 :BS_PROXY_PUT INFO: dsproxy_put.cpp:260: [7e4afa7ea38a37be] received {EvVPutResult Status# ERROR ID# [72075186224047637:1:863:1:24576:786:3] {MsgQoS MsgId# { SequenceId: 1 MsgId: 0 }}} from# [0:1:1:1:0] Marker# BPP01 2025-05-29T15:28:02.246147Z node 3 :BS_PROXY_PUT DEBUG: dsproxy_strategy_base.cpp:324: [7e4afa7ea38a37be] partPlacement record partSituation# ESituation::Unknown to# 5 blob Id# [72075186224047637:1:863:1:24576:786:3] Marker# BPG33 2025-05-29T15:28:02.246150Z node 3 :BS_PROXY_PUT DEBUG: dsproxy_strategy_base.cpp:345: [7e4afa7ea38a37be] Sending missing VPut part# 2 to# 5 blob Id# [72075186224047637:1:863:1:24576:786:3] Marker# BPG32 2025-05-29T15:28:02.246164Z node 3 :BS_PROXY_PUT DEBUG: dsproxy_put.cpp:260: [7e4afa7ea38a37be] received {EvVPutResult Status# OK ID# [72075186224047637:1:863:1:24576:786:1] {MsgQoS MsgId# { SequenceId: 1 MsgId: 0 }}} from# [0:1:2:1:0] Marker# BPP01 2025-05-29T15:28:02.246199Z node 3 :BS_PROXY_PUT DEBUG: dsproxy_put.cpp:260: [7e4afa7ea38a37be] received {EvVPutResult Status# OK ID# [72075186224047637:1:863:1:24576:786:2] {MsgQoS MsgId# { SequenceId: 1 MsgId: 0 }}} from# [0:1:0:2:0] Marker# BPP01 2025-05-29T15:28:02.246207Z node 3 :BS_PROXY_PUT DEBUG: dsproxy_put.cpp:260: [7e4afa7ea38a37be] received {EvVPutResult Status# OK ID# [72075186224047637:1:863:1:24576:786:3] {MsgQoS MsgId# { SequenceId: 1 MsgId: 0 }}} from# [0:1:1:2:0] Marker# BPP01 2025-05-29T15:28:02.246219Z node 3 :BS_PROXY_PUT DEBUG: dsproxy_put_impl.cpp:72: [7e4afa7ea38a37be] Result# TEvPutResult {Id# [72075186224047637:1:863:1:24576:786:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0} GroupId# 0 Marker# BPP12 2025-05-29T15:28:02.246227Z node 3 :BS_PROXY_PUT INFO: dsproxy_put.cpp:486: [7e4afa7ea38a37be] SendReply putResult# TEvPutResult {Id# [72075186224047637:1:863:1:24576:786:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0} ResponsesSent# 0 PutImpl.Blobs.size# 1 Last# true Marker# BPP21 2025-05-29T15:28:02.246257Z node 3 :BS_PROXY_PUT NOTICE: {BPP72@dsproxy_put.cpp:470} Query history GroupId# 0 HandleClass# TabletLog Tactic# Default History# THistory { Entries# [ TEvVPut{ TimestampMs# 0.314 sample PartId# [72075186224047637:1:863:1:24576:786:3] QueryCount# 1 VDiskId# [0:1:1:1:0] NodeId# 3 } TEvVPut{ TimestampMs# 0.315 sample PartId# [72075186224047637:1:863:1:24576:786:1] QueryCount# 1 VDiskId# [0:1:2:1:0] NodeId# 3 } TEvVPut{ TimestampMs# 0.315 sample PartId# [72075186224047637:1:863:1:24576:786:2] QueryCount# 1 VDiskId# [0:1:0:1:0] NodeId# 3 } TEvVPutResult{ TimestampMs# 3.458 VDiskId# [0:1:0:1:0] NodeId# 3 Status# ERROR } TEvVPut{ TimestampMs# 3.493 sample PartId# [72075186224047637:1:863:1:24576:786:2] QueryCount# 1 VDiskId# [0:1:0:2:0] NodeId# 3 } TEvVPutResult{ TimestampMs# 3.533 VDiskId# [0:1:1:1:0] NodeId# 3 Status# ERROR } TEvVPut{ TimestampMs# 3.542 sample PartId# [72075186224047637:1:863:1:24576:786:3] QueryCount# 1 VDiskId# [0:1:1:2:0] NodeId# 3 } TEvVPutResult{ TimestampMs# 3.555 VDiskId# [0:1:2:1:0] NodeId# 3 Status# OK } TEvVPutResult{ TimestampMs# 3.59 VDiskId# [0:1:0:2:0] NodeId# 3 Status# OK } TEvVPutResult{ TimestampMs# 3.599 VDiskId# [0:1:1:2:0] NodeId# 3 Status# OK } ] } >> TDSProxyPatchTest::NaiveErrorOnGet_Erasure4Plus2Block [GOOD] >> TDSProxyPutTest::TestMirror3dcWith3x3MinLatencyMod >> TDSProxyPutTest::TestMirror3dcWith3x3MinLatencyMod [GOOD] >> TTablesWithReboots::CopyTableAndDropWithReboots2 [GOOD] |70.2%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/actorlib_impl/ut/ydb-core-actorlib_impl-ut |70.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/actorlib_impl/ut/ydb-core-actorlib_impl-ut |70.3%| [LD] {RESULT} $(B)/ydb/core/actorlib_impl/ut/ydb-core-actorlib_impl-ut |70.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/dsproxy/ut/unittest >> TDSProxyPutTest::TestMirror3dcWith3x3MinLatencyMod [GOOD] >> TKeyValueTest::TestWriteReadRangeLimitThenLimitWorks [GOOD] >> TKeyValueTest::TestWriteReadRangeLimitThenLimitWorksNewApi >> VectorIndexBuildTest::VectorIndexDescriptionIsPersisted-prefixed-true ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/statistics/database/ut/unittest >> StatisticsSaveLoad::Delete [FAIL] Test command err: 2025-05-29T15:27:55.636701Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:453:2413], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-05-29T15:27:55.636758Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-05-29T15:27:55.636777Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/0021fc/r3tmp/tmp7eN4Rv/pdisk_1.dat 2025-05-29T15:27:55.761281Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 20752, node 1 2025-05-29T15:27:55.862729Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:27:55.862766Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-05-29T15:27:55.862771Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-05-29T15:27:55.862819Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-05-29T15:27:55.863299Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-05-29T15:27:55.938995Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:27:55.939029Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:27:55.950425Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:8631 2025-05-29T15:27:56.293980Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-05-29T15:27:57.076835Z node 2 :STATISTICS INFO: service_impl.cpp:232: Subscribed for config changes on node 2 2025-05-29T15:27:57.085345Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:27:57.085377Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:27:57.144280Z node 1 :HIVE WARN: hive_impl.cpp:771: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-05-29T15:27:57.145038Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-29T15:27:57.289113Z node 2 :HIVE WARN: tx__create_tablet.cpp:342: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-05-29T15:27:57.289304Z node 2 :HIVE WARN: tx__create_tablet.cpp:342: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-05-29T15:27:57.289483Z node 2 :HIVE WARN: tx__create_tablet.cpp:342: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-05-29T15:27:57.289520Z node 2 :HIVE WARN: tx__create_tablet.cpp:342: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-05-29T15:27:57.289538Z node 2 :HIVE WARN: tx__create_tablet.cpp:342: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-05-29T15:27:57.289588Z node 2 :HIVE WARN: tx__create_tablet.cpp:342: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-05-29T15:27:57.289608Z node 2 :HIVE WARN: tx__create_tablet.cpp:342: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-05-29T15:27:57.289625Z node 2 :HIVE WARN: tx__create_tablet.cpp:342: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-05-29T15:27:57.289654Z node 2 :HIVE WARN: tx__create_tablet.cpp:342: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-05-29T15:27:57.446808Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:27:57.446874Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:27:57.459608Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-29T15:27:57.528591Z node 2 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:27:57.537751Z node 2 :STATISTICS INFO: aggregator_impl.cpp:45: [72075186224037894] OnActivateExecutor 2025-05-29T15:27:57.537788Z node 2 :STATISTICS DEBUG: tx_init_schema.cpp:13: [72075186224037894] TTxInitSchema::Execute 2025-05-29T15:27:57.549131Z node 2 :STATISTICS DEBUG: tx_init_schema.cpp:34: [72075186224037894] TTxInitSchema::Complete 2025-05-29T15:27:57.549427Z node 2 :STATISTICS DEBUG: tx_init.cpp:18: [72075186224037894] TTxInit::Execute 2025-05-29T15:27:57.549456Z node 2 :STATISTICS DEBUG: tx_init.cpp:118: [72075186224037894] Loaded BaseStatistics: schemeshard count# 0 2025-05-29T15:27:57.549462Z node 2 :STATISTICS DEBUG: tx_init.cpp:143: [72075186224037894] Loaded ColumnStatistics: column count# 0 2025-05-29T15:27:57.549469Z node 2 :STATISTICS DEBUG: tx_init.cpp:182: [72075186224037894] Loaded ScheduleTraversals: table count# 0 2025-05-29T15:27:57.549476Z node 2 :STATISTICS DEBUG: tx_init.cpp:216: [72075186224037894] Loaded ForceTraversalOperations: table count# 0 2025-05-29T15:27:57.549482Z node 2 :STATISTICS DEBUG: tx_init.cpp:264: [72075186224037894] Loaded ForceTraversalTables: table count# 0 2025-05-29T15:27:57.549490Z node 2 :STATISTICS DEBUG: tx_init.cpp:271: [72075186224037894] TTxInit::Complete 2025-05-29T15:27:57.549821Z node 2 :STATISTICS INFO: aggregator_impl.cpp:62: [72075186224037894] Subscribed for config changes 2025-05-29T15:27:57.567027Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:7864: ResolveSA(), StatisticsAggregatorId=72075186224037894, at schemeshard: 72075186224037897 2025-05-29T15:27:57.567054Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:7894: ConnectToSA(), pipe client id: [2:1863:2597], at schemeshard: 72075186224037897, StatisticsAggregatorId: 72075186224037894, at schemeshard: 72075186224037897 2025-05-29T15:27:57.568164Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:86: [72075186224037894] EvServerConnected, pipe server id = [2:1874:2605] 2025-05-29T15:27:57.569542Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:86: [72075186224037894] EvServerConnected, pipe server id = [2:1905:2622] 2025-05-29T15:27:57.569794Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:213: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:1905:2622], schemeshard id = 72075186224037897 2025-05-29T15:27:57.570543Z node 2 :STATISTICS DEBUG: tx_configure.cpp:21: [72075186224037894] TTxConfigure::Execute: database# /Root/Database 2025-05-29T15:27:57.574627Z node 2 :STATISTICS DEBUG: table_creator.cpp:147: Table _statistics updater. Describe result: PathErrorUnknown 2025-05-29T15:27:57.574642Z node 2 :STATISTICS NOTICE: table_creator.cpp:167: Table _statistics updater. Creating table 2025-05-29T15:27:57.574654Z node 2 :STATISTICS DEBUG: table_creator.cpp:100: Table _statistics updater. Full table path:/Root/Database/.metadata/_statistics 2025-05-29T15:27:57.577240Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720657:1, at schemeshard: 72075186224037897 2025-05-29T15:27:57.579124Z node 2 :STATISTICS DEBUG: table_creator.cpp:190: Table _statistics updater. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720657 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037897 PathId: 3 } 2025-05-29T15:27:57.579168Z node 2 :STATISTICS DEBUG: table_creator.cpp:261: Table _statistics updater. Subscribe on create table tx: 281474976720657 2025-05-29T15:27:57.675793Z node 2 :STATISTICS DEBUG: tx_configure.cpp:36: [72075186224037894] TTxConfigure::Complete 2025-05-29T15:27:57.756510Z node 2 :STATISTICS DEBUG: table_creator.cpp:290: Table _statistics updater. Request: create. Transaction completed: 281474976720657. Doublechecking... 2025-05-29T15:27:57.810142Z node 2 :STATISTICS DEBUG: table_creator.cpp:362: Table _statistics updater. Column diff is empty, finishing 2025-05-29T15:27:58.278487Z node 1 :STATISTICS DEBUG: table_creator.cpp:362: Table _statistics updater. Column diff is empty, finishing 2025-05-29T15:27:58.278572Z node 1 :STATISTICS DEBUG: query_actor.cpp:134: [TQueryBase] Bootstrap. Database: /Root/Database 2025-05-29T15:27:58.280480Z node 1 :STATISTICS DEBUG: query_actor.cpp:197: [TQueryBase] RunDataQuery: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DECLARE $stat_type AS Uint32; DECLARE $column_tags AS List; DECLARE $data AS List; UPSERT INTO `.metadata/_statistics` (owner_id, local_path_id, stat_type, column_tag, data) VALUES ($owner_id, $local_path_id, $stat_type, $column_tags[0], $data[0]), ($owner_id, $local_path_id, $stat_type, $column_tags[1], $data[1]); 2025-05-29T15:27:58.281146Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2231:3070], DatabaseId: /Root/Database, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:27:58.281161Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2248:3076], DatabaseId: /Root/Database, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:27:58.281169Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root/Database, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:27:58.282394Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:2, at schemeshard: 72075186224037897 2025-05-29T15:27:58.290602Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:2252:3079], DatabaseId: /Root/Database, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2025-05-29T15:27:58.496570Z node 1 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [1:2343:3109] txid# 281474976715660, issues: { message: "Check failed: path: \'/Root/Database/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72075186224037897, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-29T15:27:58.536799Z node 1 :STATISTICS DEBUG: service_impl.cpp:771: [TStatService ... tx with tx_id: 2025-05-29T15:27:58.780342Z node 1 :STATISTICS DEBUG: query_actor.cpp:240: [TQueryBase] TEvDataQueryResult INTERNAL_ERROR, Issues: {
: Fatal: Execution, code: 1060 subissue: {
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 } }, SessionId: ydb://session/3?node_id=1&id=N2FkNWEzYjUtNDExYmQyMGEtNjVmNTkzYjktYTg0MjIyMGY=, TxId: 2025-05-29T15:27:58.780352Z node 1 :STATISTICS WARN: query_actor.cpp:372: [TQueryBase] Finish with INTERNAL_ERROR, Issues: {
: Fatal: Execution, code: 1060 subissue: {
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 } }, SessionId: ydb://session/3?node_id=1&id=N2FkNWEzYjUtNDExYmQyMGEtNjVmNTkzYjktYTg0MjIyMGY=, TxId: 2025-05-29T15:27:58.842448Z node 1 :STATISTICS DEBUG: query_actor.cpp:134: [TQueryBase] Bootstrap. Database: /Root/Database 2025-05-29T15:27:58.843247Z node 1 :STATISTICS DEBUG: query_actor.cpp:197: [TQueryBase] RunDataQuery: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DECLARE $stat_type AS Uint32; DECLARE $column_tags AS List; DECLARE $data AS List; UPSERT INTO `.metadata/_statistics` (owner_id, local_path_id, stat_type, column_tag, data) VALUES ($owner_id, $local_path_id, $stat_type, $column_tags[0], $data[0]), ($owner_id, $local_path_id, $stat_type, $column_tags[1], $data[1]); 2025-05-29T15:27:58.847727Z node 1 :STATISTICS DEBUG: service_impl.cpp:771: [TStatService::TEvGetStatistics] RequestId[ 6 ], ReplyToActorId[ [1:2443:3190]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-05-29T15:27:58.847786Z node 1 :STATISTICS DEBUG: service_impl.cpp:787: [TStatService::TEvNavigateKeySetResult] RequestId[ 6 ] 2025-05-29T15:27:58.847796Z node 1 :STATISTICS DEBUG: service_impl.cpp:1260: ReplySuccess(), request id = 6, ReplyToActorId = [1:2443:3190], StatRequests.size() = 1 2025-05-29T15:27:58.857854Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [1:2439:3186], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:27:58.858826Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=1&id=MTM5NzQ3OTEtMzZiOTgzNjctYzkzZmIwOGEtYmY1ZTUzZmI=, ActorId: [1:2436:3183], ActorState: ExecuteState, TraceId: 01jweaggvv52gkmhscr4jhbdtt, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: 2025-05-29T15:27:58.859030Z node 1 :STATISTICS DEBUG: query_actor.cpp:240: [TQueryBase] TEvDataQueryResult INTERNAL_ERROR, Issues: {
: Fatal: Execution, code: 1060 subissue: {
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 } }, SessionId: ydb://session/3?node_id=1&id=MTM5NzQ3OTEtMzZiOTgzNjctYzkzZmIwOGEtYmY1ZTUzZmI=, TxId: 2025-05-29T15:27:58.859045Z node 1 :STATISTICS WARN: query_actor.cpp:372: [TQueryBase] Finish with INTERNAL_ERROR, Issues: {
: Fatal: Execution, code: 1060 subissue: {
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 } }, SessionId: ydb://session/3?node_id=1&id=MTM5NzQ3OTEtMzZiOTgzNjctYzkzZmIwOGEtYmY1ZTUzZmI=, TxId: 2025-05-29T15:27:58.985866Z node 1 :STATISTICS DEBUG: query_actor.cpp:134: [TQueryBase] Bootstrap. Database: /Root/Database 2025-05-29T15:27:58.986616Z node 1 :STATISTICS DEBUG: query_actor.cpp:197: [TQueryBase] RunDataQuery: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DECLARE $stat_type AS Uint32; DECLARE $column_tags AS List; DECLARE $data AS List; UPSERT INTO `.metadata/_statistics` (owner_id, local_path_id, stat_type, column_tag, data) VALUES ($owner_id, $local_path_id, $stat_type, $column_tags[0], $data[0]), ($owner_id, $local_path_id, $stat_type, $column_tags[1], $data[1]); 2025-05-29T15:27:58.990858Z node 1 :STATISTICS DEBUG: service_impl.cpp:771: [TStatService::TEvGetStatistics] RequestId[ 7 ], ReplyToActorId[ [1:2461:3205]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-05-29T15:27:58.990921Z node 1 :STATISTICS DEBUG: service_impl.cpp:787: [TStatService::TEvNavigateKeySetResult] RequestId[ 7 ] 2025-05-29T15:27:58.990929Z node 1 :STATISTICS DEBUG: service_impl.cpp:1260: ReplySuccess(), request id = 7, ReplyToActorId = [1:2461:3205], StatRequests.size() = 1 2025-05-29T15:27:58.998678Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [1:2457:3201], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:27:58.999414Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=1&id=YzU3YWE5ODUtZjc1ZjAyZjktZTg0OThlYWYtMzhlNzAyYzc=, ActorId: [1:2454:3198], ActorState: ExecuteState, TraceId: 01jweagh0a4qehvjr9aher0exr, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: 2025-05-29T15:27:58.999651Z node 1 :STATISTICS DEBUG: query_actor.cpp:240: [TQueryBase] TEvDataQueryResult INTERNAL_ERROR, Issues: {
: Fatal: Execution, code: 1060 subissue: {
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 } }, SessionId: ydb://session/3?node_id=1&id=YzU3YWE5ODUtZjc1ZjAyZjktZTg0OThlYWYtMzhlNzAyYzc=, TxId: 2025-05-29T15:27:58.999660Z node 1 :STATISTICS WARN: query_actor.cpp:372: [TQueryBase] Finish with INTERNAL_ERROR, Issues: {
: Fatal: Execution, code: 1060 subissue: {
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 } }, SessionId: ydb://session/3?node_id=1&id=YzU3YWE5ODUtZjc1ZjAyZjktZTg0OThlYWYtMzhlNzAyYzc=, TxId: 2025-05-29T15:27:59.092972Z node 1 :STATISTICS DEBUG: query_actor.cpp:134: [TQueryBase] Bootstrap. Database: /Root/Database 2025-05-29T15:27:59.093663Z node 1 :STATISTICS DEBUG: query_actor.cpp:197: [TQueryBase] RunDataQuery: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DECLARE $stat_type AS Uint32; DECLARE $column_tags AS List; DECLARE $data AS List; UPSERT INTO `.metadata/_statistics` (owner_id, local_path_id, stat_type, column_tag, data) VALUES ($owner_id, $local_path_id, $stat_type, $column_tags[0], $data[0]), ($owner_id, $local_path_id, $stat_type, $column_tags[1], $data[1]); 2025-05-29T15:27:59.097021Z node 1 :STATISTICS DEBUG: service_impl.cpp:771: [TStatService::TEvGetStatistics] RequestId[ 8 ], ReplyToActorId[ [1:2475:3217]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-05-29T15:27:59.097067Z node 1 :STATISTICS DEBUG: service_impl.cpp:787: [TStatService::TEvNavigateKeySetResult] RequestId[ 8 ] 2025-05-29T15:27:59.097075Z node 1 :STATISTICS DEBUG: service_impl.cpp:1260: ReplySuccess(), request id = 8, ReplyToActorId = [1:2475:3217], StatRequests.size() = 1 2025-05-29T15:27:59.104971Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [1:2471:3213], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:27:59.105784Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=1&id=YjZmNWZjYzUtMjY0OGMwNWItYjY0MWM3MmYtYzhmMWY0NGE=, ActorId: [1:2468:3210], ActorState: ExecuteState, TraceId: 01jweagh3n2eh99ds37g2h65ns, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: 2025-05-29T15:27:59.105968Z node 1 :STATISTICS DEBUG: query_actor.cpp:240: [TQueryBase] TEvDataQueryResult INTERNAL_ERROR, Issues: {
: Fatal: Execution, code: 1060 subissue: {
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 } }, SessionId: ydb://session/3?node_id=1&id=YjZmNWZjYzUtMjY0OGMwNWItYjY0MWM3MmYtYzhmMWY0NGE=, TxId: 2025-05-29T15:27:59.105982Z node 1 :STATISTICS WARN: query_actor.cpp:372: [TQueryBase] Finish with INTERNAL_ERROR, Issues: {
: Fatal: Execution, code: 1060 subissue: {
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 } }, SessionId: ydb://session/3?node_id=1&id=YjZmNWZjYzUtMjY0OGMwNWItYjY0MWM3MmYtYzhmMWY0NGE=, TxId: 2025-05-29T15:27:59.542030Z node 1 :STATISTICS DEBUG: query_actor.cpp:134: [TQueryBase] Bootstrap. Database: /Root/Database 2025-05-29T15:27:59.542759Z node 1 :STATISTICS DEBUG: query_actor.cpp:197: [TQueryBase] RunDataQuery: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DECLARE $stat_type AS Uint32; DECLARE $column_tags AS List; DECLARE $data AS List; UPSERT INTO `.metadata/_statistics` (owner_id, local_path_id, stat_type, column_tag, data) VALUES ($owner_id, $local_path_id, $stat_type, $column_tags[0], $data[0]), ($owner_id, $local_path_id, $stat_type, $column_tags[1], $data[1]); 2025-05-29T15:27:59.546321Z node 1 :STATISTICS DEBUG: service_impl.cpp:771: [TStatService::TEvGetStatistics] RequestId[ 9 ], ReplyToActorId[ [1:2517:3246]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-05-29T15:27:59.546366Z node 1 :STATISTICS DEBUG: service_impl.cpp:787: [TStatService::TEvNavigateKeySetResult] RequestId[ 9 ] 2025-05-29T15:27:59.546375Z node 1 :STATISTICS DEBUG: service_impl.cpp:1260: ReplySuccess(), request id = 9, ReplyToActorId = [1:2517:3246], StatRequests.size() = 1 2025-05-29T15:27:59.554998Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [1:2513:3242], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:27:59.555727Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=1&id=OTQ1YmIyOWQtYTcyOTYzYjctNWIzMGU3YWEtZjEyNTcxODk=, ActorId: [1:2510:3239], ActorState: ExecuteState, TraceId: 01jweaghhp9wtjd71wqrjwxqrs, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: 2025-05-29T15:27:59.555903Z node 1 :STATISTICS DEBUG: query_actor.cpp:240: [TQueryBase] TEvDataQueryResult INTERNAL_ERROR, Issues: {
: Fatal: Execution, code: 1060 subissue: {
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 } }, SessionId: ydb://session/3?node_id=1&id=OTQ1YmIyOWQtYTcyOTYzYjctNWIzMGU3YWEtZjEyNTcxODk=, TxId: 2025-05-29T15:27:59.555915Z node 1 :STATISTICS WARN: query_actor.cpp:372: [TQueryBase] Finish with INTERNAL_ERROR, Issues: {
: Fatal: Execution, code: 1060 subissue: {
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 } }, SessionId: ydb://session/3?node_id=1&id=OTQ1YmIyOWQtYTcyOTYzYjctNWIzMGU3YWEtZjEyNTcxODk=, TxId: assertion failed at ydb/core/statistics/database/ut/ut_database.cpp:72, virtual void NKikimr::NStat::NTestSuiteStatisticsSaveLoad::TTestCaseDelete::Execute_(NUnitTest::TTestContext &): (saveResponse->Get()->Success) TBackTrace::Capture()+28 (0x137DB91C) NUnitTest::NPrivate::RaiseError(char const*, TBasicString> const&, bool)+137 (0x1398F6A9) NKikimr::NStat::NTestSuiteStatisticsSaveLoad::TTestCaseDelete::Execute_(NUnitTest::TTestContext&)+3417 (0x136D6329) NKikimr::NStat::NTestSuiteStatisticsSaveLoad::TCurrentTest::Execute()::'lambda'()::operator()() const+71 (0x136DA847) NUnitTest::TTestBase::Run(std::__y1::function, TBasicString> const&, char const*, bool)+126 (0x1399155E) NKikimr::NStat::NTestSuiteStatisticsSaveLoad::TCurrentTest::Execute()+425 (0x136DA0A9) NUnitTest::TTestFactory::Execute()+803 (0x13991CD3) NUnitTest::RunMain(int, char**)+3021 (0x1399FFED) ??+0 (0x7F12F030BD90) __libc_start_main+128 (0x7F12F030BE40) _start+41 (0x12830029) >> VectorIndexBuildTest::VectorIndexDescriptionIsPersisted-prefixed-false [GOOD] >> IndexBuildTest::BaseCase >> TDsProxyQuorumTracker::CheckFailModelErasure4Plus2Stripe [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_index_build/unittest >> VectorIndexBuildTest::VectorIndexDescriptionIsPersisted-prefixed-false [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:116:2058] recipient: [1:111:2141] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:116:2058] recipient: [1:111:2141] Leader for TabletID 72057594046678944 is [1:128:2152] sender: [1:132:2058] recipient: [1:111:2141] 2025-05-29T15:28:02.454110Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7488: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-05-29T15:28:02.454140Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7516: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:28:02.454146Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7402: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-05-29T15:28:02.454152Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7418: OperationsProcessing config: using default configuration 2025-05-29T15:28:02.454168Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-05-29T15:28:02.454172Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-05-29T15:28:02.454182Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7548: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:28:02.454198Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:39: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-05-29T15:28:02.454309Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7619: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-05-29T15:28:02.454394Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-05-29T15:28:02.466010Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7309: Cannot subscribe to console configs 2025-05-29T15:28:02.466055Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:28:02.468771Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-05-29T15:28:02.468901Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-05-29T15:28:02.468950Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-05-29T15:28:02.470773Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-05-29T15:28:02.471032Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:445: Clear TempDirsState with owners number: 0 2025-05-29T15:28:02.471159Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1348: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-05-29T15:28:02.471215Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:32: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-05-29T15:28:02.471733Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:157: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:28:02.471790Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:84: [RootDataErasureManager] Stop 2025-05-29T15:28:02.472118Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:28:02.472129Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:28:02.472153Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-05-29T15:28:02.472162Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-29T15:28:02.472169Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:202: TTxServerlessStorageBilling.Complete 2025-05-29T15:28:02.472207Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6671: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-05-29T15:28:02.473663Z node 1 :HIVE INFO: tablet_helpers.cpp:1130: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:128:2152] sender: [1:242:2058] recipient: [1:15:2062] 2025-05-29T15:28:02.495517Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:372: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-05-29T15:28:02.495623Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:28:02.495706Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-05-29T15:28:02.495769Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:130: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-05-29T15:28:02.495783Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:28:02.496814Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:451: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-05-29T15:28:02.496850Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-05-29T15:28:02.496934Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:28:02.496947Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:313: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-05-29T15:28:02.496954Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:367: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-05-29T15:28:02.496960Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 2 -> 3 2025-05-29T15:28:02.497476Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:28:02.497489Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-05-29T15:28:02.497495Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 3 -> 128 2025-05-29T15:28:02.497880Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:28:02.497895Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:28:02.497902Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:28:02.497910Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1650: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-05-29T15:28:02.498664Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1719: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-05-29T15:28:02.499264Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:652: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-05-29T15:28:02.499320Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1751: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-05-29T15:28:02.499541Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:676: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-05-29T15:28:02.499572Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:680: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 124 RawX2: 4294969445 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-05-29T15:28:02.499581Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:28:02.499651Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 128 -> 240 2025-05-29T15:28:02.499659Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:28:02.499699Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-05-29T15:28:02.499712Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:402: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-05-29T15:28:02.500273Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:28:02.500284Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-29T15:28:02.500341Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29 ... shard: 72057594046678944 2025-05-29T15:28:03.896398Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:3997: TTxInit for CompletedBackup, read records: 0, at schemeshard: 72057594046678944 2025-05-29T15:28:03.896442Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4282: TTxInit for Publications, read records: 0, at schemeshard: 72057594046678944 2025-05-29T15:28:03.896507Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4587: IndexBuild , records: 1, at schemeshard: 72057594046678944 2025-05-29T15:28:03.896519Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4645: KMeansTreeSample records: 0, at schemeshard: 72057594046678944 2025-05-29T15:28:03.896546Z node 1 :BUILD_INDEX DEBUG: schemeshard_info_types.h:3706: AddShardStatus id# 102 shard 72057594046678944:11 2025-05-29T15:28:03.896561Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4740: SnapshotTables: snapshots: 0 tables: 0, at schemeshard: 72057594046678944 2025-05-29T15:28:03.896568Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4767: SnapshotSteps: snapshots: 0, at schemeshard: 72057594046678944 2025-05-29T15:28:03.896579Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4794: LongLocks: records: 4, at schemeshard: 72057594046678944 2025-05-29T15:28:03.898493Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:84: [RootDataErasureManager] Stop 2025-05-29T15:28:03.898569Z node 1 :BUILD_INDEX NOTICE: schemeshard_build_index__progress.cpp:1121: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Execute: 102 Done 2025-05-29T15:28:03.898587Z node 1 :BUILD_INDEX DEBUG: schemeshard_build_index__progress.cpp:1122: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Execute: 102 Done TBuildInfo{ IndexBuildId: 102, Uid: , DomainPathId: [OwnerId: 72057594046678944, LocalPathId: 1], TablePathId: [OwnerId: 72057594046678944, LocalPathId: 2], IndexType: EIndexTypeGlobalVectorKmeansTree, IndexName: by_embedding, IndexColumn: embedding, DataColumns: covered, State: Done, IsCancellationRequested: 0, Issue: , SubscribersCount: 0, CreateSender: [0:0:0], AlterMainTableTxId: 0, AlterMainTableTxStatus: StatusSuccess, AlterMainTableTxDone: 0, LockTxId: 281474976710757, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976710758, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 0, ApplyTxId: 281474976720769, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, UnlockTxId: 281474976720770, UnlockTxStatus: StatusAccepted, UnlockTxDone: 1, ToUploadShards: 0, DoneShards: 0, Processed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }, Billed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }} 2025-05-29T15:28:03.898593Z node 1 :BUILD_INDEX TRACE: schemeshard_build_index_tx_base.cpp:339: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TIndexBuildInfo SendNotifications: : id# 102, subscribers count# 0 2025-05-29T15:28:03.898982Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:28:03.898990Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:28:03.898999Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-05-29T15:28:03.899004Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-29T15:28:03.899008Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:202: TTxServerlessStorageBilling.Complete 2025-05-29T15:28:03.899022Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6671: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594046678944 is [1:3212:5006] sender: [1:3269:2058] recipient: [1:15:2062] 2025-05-29T15:28:03.950184Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/vectors/by_embedding" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: true }, at schemeshard: 72057594046678944 2025-05-29T15:28:03.950272Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/vectors/by_embedding" took 113us result status StatusSuccess 2025-05-29T15:28:03.950463Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/vectors/by_embedding" PathDescription { Self { Name: "by_embedding" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeTableIndex CreateFinished: true CreateTxId: 281474976710758 CreateStep: 5000004 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 8 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 8 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 5 TableIndexVersion: 2 } ChildrenExist: true } Children { Name: "indexImplLevelTable" PathId: 4 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710758 CreateStep: 5000004 ParentPathId: 3 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" PathSubType: EPathSubTypeVectorKmeansTreeIndexImplTable Version { GeneralVersion: 4 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 2 TablePartitionVersion: 1 } ChildrenExist: false } Children { Name: "indexImplPostingTable" PathId: 5 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710758 CreateStep: 5000004 ParentPathId: 3 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" PathSubType: EPathSubTypeVectorKmeansTreeIndexImplTable Version { GeneralVersion: 4 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 2 TablePartitionVersion: 1 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 4 PathsLimit: 10000 ShardsInside: 7 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } TableIndex { Name: "by_embedding" LocalPathId: 3 Type: EIndexTypeGlobalVectorKmeansTree State: EIndexStateReady KeyColumnNames: "embedding" SchemaVersion: 2 PathOwnerId: 72057594046678944 DataColumnNames: "covered" DataSize: 0 IndexImplTableDescriptions { PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 KeepEraseMarkers: false MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { MinPartitionsCount: 3 MaxPartitionsCount: 3 } } } IndexImplTableDescriptions { PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 KeepEraseMarkers: false MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { MinPartitionsCount: 3 MaxPartitionsCount: 3 } } } VectorIndexKmeansTreeDescription { Settings { settings { metric: DISTANCE_COSINE vector_type: VECTOR_TYPE_FLOAT vector_dimension: 1024 } clusters: 4 levels: 5 } } } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/statistics/database/ut/unittest >> StatisticsSaveLoad::Simple [FAIL] Test command err: 2025-05-29T15:27:56.778772Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:453:2413], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-05-29T15:27:56.778820Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-05-29T15:27:56.778843Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/00217c/r3tmp/tmpAMeAEf/pdisk_1.dat 2025-05-29T15:27:56.891329Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 4030, node 1 2025-05-29T15:27:57.000247Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:27:57.000271Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-05-29T15:27:57.000276Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-05-29T15:27:57.000351Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-05-29T15:27:57.000979Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-05-29T15:27:57.077501Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:27:57.077546Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:27:57.089169Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:28558 2025-05-29T15:27:57.426568Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-05-29T15:27:58.220394Z node 2 :STATISTICS INFO: service_impl.cpp:232: Subscribed for config changes on node 2 2025-05-29T15:27:58.227869Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:27:58.227907Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:27:58.281481Z node 1 :HIVE WARN: hive_impl.cpp:771: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-05-29T15:27:58.282066Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-29T15:27:58.427290Z node 2 :HIVE WARN: tx__create_tablet.cpp:342: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-05-29T15:27:58.427461Z node 2 :HIVE WARN: tx__create_tablet.cpp:342: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-05-29T15:27:58.427634Z node 2 :HIVE WARN: tx__create_tablet.cpp:342: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-05-29T15:27:58.427676Z node 2 :HIVE WARN: tx__create_tablet.cpp:342: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-05-29T15:27:58.427693Z node 2 :HIVE WARN: tx__create_tablet.cpp:342: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-05-29T15:27:58.427748Z node 2 :HIVE WARN: tx__create_tablet.cpp:342: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-05-29T15:27:58.427766Z node 2 :HIVE WARN: tx__create_tablet.cpp:342: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-05-29T15:27:58.427782Z node 2 :HIVE WARN: tx__create_tablet.cpp:342: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-05-29T15:27:58.427799Z node 2 :HIVE WARN: tx__create_tablet.cpp:342: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-05-29T15:27:58.581459Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:27:58.581504Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:27:58.592848Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-29T15:27:58.625289Z node 2 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:27:58.632301Z node 2 :STATISTICS INFO: aggregator_impl.cpp:45: [72075186224037894] OnActivateExecutor 2025-05-29T15:27:58.632323Z node 2 :STATISTICS DEBUG: tx_init_schema.cpp:13: [72075186224037894] TTxInitSchema::Execute 2025-05-29T15:27:58.638354Z node 2 :STATISTICS DEBUG: tx_init_schema.cpp:34: [72075186224037894] TTxInitSchema::Complete 2025-05-29T15:27:58.638548Z node 2 :STATISTICS DEBUG: tx_init.cpp:18: [72075186224037894] TTxInit::Execute 2025-05-29T15:27:58.638570Z node 2 :STATISTICS DEBUG: tx_init.cpp:118: [72075186224037894] Loaded BaseStatistics: schemeshard count# 0 2025-05-29T15:27:58.638576Z node 2 :STATISTICS DEBUG: tx_init.cpp:143: [72075186224037894] Loaded ColumnStatistics: column count# 0 2025-05-29T15:27:58.638582Z node 2 :STATISTICS DEBUG: tx_init.cpp:182: [72075186224037894] Loaded ScheduleTraversals: table count# 0 2025-05-29T15:27:58.638589Z node 2 :STATISTICS DEBUG: tx_init.cpp:216: [72075186224037894] Loaded ForceTraversalOperations: table count# 0 2025-05-29T15:27:58.638594Z node 2 :STATISTICS DEBUG: tx_init.cpp:264: [72075186224037894] Loaded ForceTraversalTables: table count# 0 2025-05-29T15:27:58.638601Z node 2 :STATISTICS DEBUG: tx_init.cpp:271: [72075186224037894] TTxInit::Complete 2025-05-29T15:27:58.638776Z node 2 :STATISTICS INFO: aggregator_impl.cpp:62: [72075186224037894] Subscribed for config changes 2025-05-29T15:27:58.654593Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:7864: ResolveSA(), StatisticsAggregatorId=72075186224037894, at schemeshard: 72075186224037897 2025-05-29T15:27:58.654619Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:7894: ConnectToSA(), pipe client id: [2:1863:2597], at schemeshard: 72075186224037897, StatisticsAggregatorId: 72075186224037894, at schemeshard: 72075186224037897 2025-05-29T15:27:58.655644Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:86: [72075186224037894] EvServerConnected, pipe server id = [2:1874:2605] 2025-05-29T15:27:58.656871Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:86: [72075186224037894] EvServerConnected, pipe server id = [2:1905:2622] 2025-05-29T15:27:58.657154Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:213: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:1905:2622], schemeshard id = 72075186224037897 2025-05-29T15:27:58.657729Z node 2 :STATISTICS DEBUG: tx_configure.cpp:21: [72075186224037894] TTxConfigure::Execute: database# /Root/Database 2025-05-29T15:27:58.662044Z node 2 :STATISTICS DEBUG: table_creator.cpp:147: Table _statistics updater. Describe result: PathErrorUnknown 2025-05-29T15:27:58.662064Z node 2 :STATISTICS NOTICE: table_creator.cpp:167: Table _statistics updater. Creating table 2025-05-29T15:27:58.662075Z node 2 :STATISTICS DEBUG: table_creator.cpp:100: Table _statistics updater. Full table path:/Root/Database/.metadata/_statistics 2025-05-29T15:27:58.665416Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720657:1, at schemeshard: 72075186224037897 2025-05-29T15:27:58.667441Z node 2 :STATISTICS DEBUG: table_creator.cpp:190: Table _statistics updater. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720657 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037897 PathId: 3 } 2025-05-29T15:27:58.667493Z node 2 :STATISTICS DEBUG: table_creator.cpp:261: Table _statistics updater. Subscribe on create table tx: 281474976720657 2025-05-29T15:27:58.766664Z node 2 :STATISTICS DEBUG: tx_configure.cpp:36: [72075186224037894] TTxConfigure::Complete 2025-05-29T15:27:58.846816Z node 2 :STATISTICS DEBUG: table_creator.cpp:290: Table _statistics updater. Request: create. Transaction completed: 281474976720657. Doublechecking... 2025-05-29T15:27:58.900461Z node 2 :STATISTICS DEBUG: table_creator.cpp:362: Table _statistics updater. Column diff is empty, finishing 2025-05-29T15:27:59.367056Z node 1 :STATISTICS DEBUG: table_creator.cpp:362: Table _statistics updater. Column diff is empty, finishing 2025-05-29T15:27:59.367156Z node 1 :STATISTICS DEBUG: query_actor.cpp:134: [TQueryBase] Bootstrap. Database: /Root/Database 2025-05-29T15:27:59.369049Z node 1 :STATISTICS DEBUG: query_actor.cpp:197: [TQueryBase] RunDataQuery: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DECLARE $stat_type AS Uint32; DECLARE $column_tags AS List; DECLARE $data AS List; UPSERT INTO `.metadata/_statistics` (owner_id, local_path_id, stat_type, column_tag, data) VALUES ($owner_id, $local_path_id, $stat_type, $column_tags[0], $data[0]), ($owner_id, $local_path_id, $stat_type, $column_tags[1], $data[1]); 2025-05-29T15:27:59.369661Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2231:3070], DatabaseId: /Root/Database, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:27:59.369674Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2248:3076], DatabaseId: /Root/Database, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:27:59.369681Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root/Database, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:27:59.370899Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:2, at schemeshard: 72075186224037897 2025-05-29T15:27:59.379218Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:2252:3079], DatabaseId: /Root/Database, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2025-05-29T15:27:59.614861Z node 1 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [1:2346:3112] txid# 281474976715660, issues: { message: "Check failed: path: \'/Root/Database/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72075186224037897, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-29T15:27:59.661145Z node 1 :STATISTICS DEBUG: service_impl.cpp:771: [TStatService ... tx with tx_id: 2025-05-29T15:27:59.884631Z node 1 :STATISTICS DEBUG: query_actor.cpp:240: [TQueryBase] TEvDataQueryResult INTERNAL_ERROR, Issues: {
: Fatal: Execution, code: 1060 subissue: {
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 } }, SessionId: ydb://session/3?node_id=1&id=MTg3NDM5NTEtNzI0MzAwMjYtNWE1NGM0MGQtZGQ0YjRhYzE=, TxId: 2025-05-29T15:27:59.884645Z node 1 :STATISTICS WARN: query_actor.cpp:372: [TQueryBase] Finish with INTERNAL_ERROR, Issues: {
: Fatal: Execution, code: 1060 subissue: {
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 } }, SessionId: ydb://session/3?node_id=1&id=MTg3NDM5NTEtNzI0MzAwMjYtNWE1NGM0MGQtZGQ0YjRhYzE=, TxId: 2025-05-29T15:27:59.905520Z node 1 :STATISTICS DEBUG: query_actor.cpp:134: [TQueryBase] Bootstrap. Database: /Root/Database 2025-05-29T15:27:59.906215Z node 1 :STATISTICS DEBUG: query_actor.cpp:197: [TQueryBase] RunDataQuery: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DECLARE $stat_type AS Uint32; DECLARE $column_tags AS List; DECLARE $data AS List; UPSERT INTO `.metadata/_statistics` (owner_id, local_path_id, stat_type, column_tag, data) VALUES ($owner_id, $local_path_id, $stat_type, $column_tags[0], $data[0]), ($owner_id, $local_path_id, $stat_type, $column_tags[1], $data[1]); 2025-05-29T15:27:59.910167Z node 1 :STATISTICS DEBUG: service_impl.cpp:771: [TStatService::TEvGetStatistics] RequestId[ 6 ], ReplyToActorId[ [1:2443:3190]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-05-29T15:27:59.910219Z node 1 :STATISTICS DEBUG: service_impl.cpp:787: [TStatService::TEvNavigateKeySetResult] RequestId[ 6 ] 2025-05-29T15:27:59.910226Z node 1 :STATISTICS DEBUG: service_impl.cpp:1260: ReplySuccess(), request id = 6, ReplyToActorId = [1:2443:3190], StatRequests.size() = 1 2025-05-29T15:27:59.919476Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [1:2439:3186], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:27:59.920351Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=1&id=YTk5MDA2MTctMzEzYjJhMmItNjRjYzMwZWMtZTVhNWVjZmU=, ActorId: [1:2436:3183], ActorState: ExecuteState, TraceId: 01jweaghx21z2gdzkpb82f6byg, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: 2025-05-29T15:27:59.920513Z node 1 :STATISTICS DEBUG: query_actor.cpp:240: [TQueryBase] TEvDataQueryResult INTERNAL_ERROR, Issues: {
: Fatal: Execution, code: 1060 subissue: {
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 } }, SessionId: ydb://session/3?node_id=1&id=YTk5MDA2MTctMzEzYjJhMmItNjRjYzMwZWMtZTVhNWVjZmU=, TxId: 2025-05-29T15:27:59.920523Z node 1 :STATISTICS WARN: query_actor.cpp:372: [TQueryBase] Finish with INTERNAL_ERROR, Issues: {
: Fatal: Execution, code: 1060 subissue: {
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 } }, SessionId: ydb://session/3?node_id=1&id=YTk5MDA2MTctMzEzYjJhMmItNjRjYzMwZWMtZTVhNWVjZmU=, TxId: 2025-05-29T15:28:00.065850Z node 1 :STATISTICS DEBUG: query_actor.cpp:134: [TQueryBase] Bootstrap. Database: /Root/Database 2025-05-29T15:28:00.066403Z node 1 :STATISTICS DEBUG: query_actor.cpp:197: [TQueryBase] RunDataQuery: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DECLARE $stat_type AS Uint32; DECLARE $column_tags AS List; DECLARE $data AS List; UPSERT INTO `.metadata/_statistics` (owner_id, local_path_id, stat_type, column_tag, data) VALUES ($owner_id, $local_path_id, $stat_type, $column_tags[0], $data[0]), ($owner_id, $local_path_id, $stat_type, $column_tags[1], $data[1]); 2025-05-29T15:28:00.069411Z node 1 :STATISTICS DEBUG: service_impl.cpp:771: [TStatService::TEvGetStatistics] RequestId[ 7 ], ReplyToActorId[ [1:2461:3205]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-05-29T15:28:00.069444Z node 1 :STATISTICS DEBUG: service_impl.cpp:787: [TStatService::TEvNavigateKeySetResult] RequestId[ 7 ] 2025-05-29T15:28:00.069450Z node 1 :STATISTICS DEBUG: service_impl.cpp:1260: ReplySuccess(), request id = 7, ReplyToActorId = [1:2461:3205], StatRequests.size() = 1 2025-05-29T15:28:00.076014Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [1:2457:3201], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:28:00.076626Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=1&id=ODViNWYzNzUtYjE1Zjg4ODItOGRhYjI3MTctOWZlMTcwNDE=, ActorId: [1:2454:3198], ActorState: ExecuteState, TraceId: 01jweagj22410n9xp9z1a49v82, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: 2025-05-29T15:28:00.076759Z node 1 :STATISTICS DEBUG: query_actor.cpp:240: [TQueryBase] TEvDataQueryResult INTERNAL_ERROR, Issues: {
: Fatal: Execution, code: 1060 subissue: {
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 } }, SessionId: ydb://session/3?node_id=1&id=ODViNWYzNzUtYjE1Zjg4ODItOGRhYjI3MTctOWZlMTcwNDE=, TxId: 2025-05-29T15:28:00.076769Z node 1 :STATISTICS WARN: query_actor.cpp:372: [TQueryBase] Finish with INTERNAL_ERROR, Issues: {
: Fatal: Execution, code: 1060 subissue: {
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 } }, SessionId: ydb://session/3?node_id=1&id=ODViNWYzNzUtYjE1Zjg4ODItOGRhYjI3MTctOWZlMTcwNDE=, TxId: 2025-05-29T15:28:00.349819Z node 1 :STATISTICS DEBUG: query_actor.cpp:134: [TQueryBase] Bootstrap. Database: /Root/Database 2025-05-29T15:28:00.350453Z node 1 :STATISTICS DEBUG: query_actor.cpp:197: [TQueryBase] RunDataQuery: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DECLARE $stat_type AS Uint32; DECLARE $column_tags AS List; DECLARE $data AS List; UPSERT INTO `.metadata/_statistics` (owner_id, local_path_id, stat_type, column_tag, data) VALUES ($owner_id, $local_path_id, $stat_type, $column_tags[0], $data[0]), ($owner_id, $local_path_id, $stat_type, $column_tags[1], $data[1]); 2025-05-29T15:28:00.353549Z node 1 :STATISTICS DEBUG: service_impl.cpp:771: [TStatService::TEvGetStatistics] RequestId[ 8 ], ReplyToActorId[ [1:2499:3231]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-05-29T15:28:00.353585Z node 1 :STATISTICS DEBUG: service_impl.cpp:787: [TStatService::TEvNavigateKeySetResult] RequestId[ 8 ] 2025-05-29T15:28:00.353590Z node 1 :STATISTICS DEBUG: service_impl.cpp:1260: ReplySuccess(), request id = 8, ReplyToActorId = [1:2499:3231], StatRequests.size() = 1 2025-05-29T15:28:00.360091Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [1:2495:3227], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:28:00.360684Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=1&id=YmI4MWU1ODItMjU0Y2JjZjMtYWQzOWNjMDYtMmNmMTQwOTE=, ActorId: [1:2492:3224], ActorState: ExecuteState, TraceId: 01jweagjayet0w4zmqwcnk4d5p, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: 2025-05-29T15:28:00.360816Z node 1 :STATISTICS DEBUG: query_actor.cpp:240: [TQueryBase] TEvDataQueryResult INTERNAL_ERROR, Issues: {
: Fatal: Execution, code: 1060 subissue: {
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 } }, SessionId: ydb://session/3?node_id=1&id=YmI4MWU1ODItMjU0Y2JjZjMtYWQzOWNjMDYtMmNmMTQwOTE=, TxId: 2025-05-29T15:28:00.360823Z node 1 :STATISTICS WARN: query_actor.cpp:372: [TQueryBase] Finish with INTERNAL_ERROR, Issues: {
: Fatal: Execution, code: 1060 subissue: {
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 } }, SessionId: ydb://session/3?node_id=1&id=YmI4MWU1ODItMjU0Y2JjZjMtYWQzOWNjMDYtMmNmMTQwOTE=, TxId: 2025-05-29T15:28:00.589205Z node 1 :STATISTICS DEBUG: query_actor.cpp:134: [TQueryBase] Bootstrap. Database: /Root/Database 2025-05-29T15:28:00.590002Z node 1 :STATISTICS DEBUG: query_actor.cpp:197: [TQueryBase] RunDataQuery: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DECLARE $stat_type AS Uint32; DECLARE $column_tags AS List; DECLARE $data AS List; UPSERT INTO `.metadata/_statistics` (owner_id, local_path_id, stat_type, column_tag, data) VALUES ($owner_id, $local_path_id, $stat_type, $column_tags[0], $data[0]), ($owner_id, $local_path_id, $stat_type, $column_tags[1], $data[1]); 2025-05-29T15:28:00.594097Z node 1 :STATISTICS DEBUG: service_impl.cpp:771: [TStatService::TEvGetStatistics] RequestId[ 9 ], ReplyToActorId[ [1:2517:3246]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-05-29T15:28:00.594158Z node 1 :STATISTICS DEBUG: service_impl.cpp:787: [TStatService::TEvNavigateKeySetResult] RequestId[ 9 ] 2025-05-29T15:28:00.594166Z node 1 :STATISTICS DEBUG: service_impl.cpp:1260: ReplySuccess(), request id = 9, ReplyToActorId = [1:2517:3246], StatRequests.size() = 1 2025-05-29T15:28:00.604959Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [1:2513:3242], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:28:00.606004Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=1&id=ZDkyNGUyMjQtNjhlMzQ1ZTQtZGFmZDMwZmQtM2ZmNWMwMjY=, ActorId: [1:2510:3239], ActorState: ExecuteState, TraceId: 01jweagjje10yfb96fqt1p2537, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: 2025-05-29T15:28:00.606193Z node 1 :STATISTICS DEBUG: query_actor.cpp:240: [TQueryBase] TEvDataQueryResult INTERNAL_ERROR, Issues: {
: Fatal: Execution, code: 1060 subissue: {
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 } }, SessionId: ydb://session/3?node_id=1&id=ZDkyNGUyMjQtNjhlMzQ1ZTQtZGFmZDMwZmQtM2ZmNWMwMjY=, TxId: 2025-05-29T15:28:00.606205Z node 1 :STATISTICS WARN: query_actor.cpp:372: [TQueryBase] Finish with INTERNAL_ERROR, Issues: {
: Fatal: Execution, code: 1060 subissue: {
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 } }, SessionId: ydb://session/3?node_id=1&id=ZDkyNGUyMjQtNjhlMzQ1ZTQtZGFmZDMwZmQtM2ZmNWMwMjY=, TxId: assertion failed at ydb/core/statistics/database/ut/ut_database.cpp:34, virtual void NKikimr::NStat::NTestSuiteStatisticsSaveLoad::TTestCaseSimple::Execute_(NUnitTest::TTestContext &): (saveResponse->Get()->Success) TBackTrace::Capture()+28 (0x137DB91C) NUnitTest::NPrivate::RaiseError(char const*, TBasicString> const&, bool)+137 (0x1398F6A9) NKikimr::NStat::NTestSuiteStatisticsSaveLoad::TTestCaseSimple::Execute_(NUnitTest::TTestContext&)+5686 (0x136D3D66) NKikimr::NStat::NTestSuiteStatisticsSaveLoad::TCurrentTest::Execute()::'lambda'()::operator()() const+71 (0x136DA847) NUnitTest::TTestBase::Run(std::__y1::function, TBasicString> const&, char const*, bool)+126 (0x1399155E) NKikimr::NStat::NTestSuiteStatisticsSaveLoad::TCurrentTest::Execute()+425 (0x136DA0A9) NUnitTest::TTestFactory::Execute()+803 (0x13991CD3) NUnitTest::RunMain(int, char**)+3021 (0x1399FFED) ??+0 (0x7F5F98825D90) __libc_start_main+128 (0x7F5F98825E40) _start+41 (0x12830029) >> TKeyValueTest::TestLargeWriteAndDelete [GOOD] >> TKeyValueTest::TestWriteReadRangeDataLimitThenLimitWorksNewApi [GOOD] >> IndexBuildTest::WithFollowers ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/dsproxy/ut/unittest >> TDsProxyQuorumTracker::CheckFailModelErasure4Plus2Stripe [GOOD] Test command err: 2025-05-29T15:27:58.375844Z node 3 :BS_PROXY_PUT INFO: dsproxy_put.cpp:645: [7e4afa7ea38a37be] bootstrap ActorId# [3:74:2120] Group# 0 BlobCount# 1 BlobIDs# [[72075186224047637:1:863:1:24576:786:0]] HandleClass# TabletLog Tactic# Default RestartCounter# 0 Marker# BPP13 2025-05-29T15:27:58.375900Z node 3 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:42: [7e4afa7ea38a37be] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 0 part# 0 situation# ESituation::Unknown Marker# BPG51 2025-05-29T15:27:58.375906Z node 3 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:42: [7e4afa7ea38a37be] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 1 part# 1 situation# ESituation::Unknown Marker# BPG51 2025-05-29T15:27:58.375909Z node 3 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:42: [7e4afa7ea38a37be] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 2 part# 2 situation# ESituation::Unknown Marker# BPG51 2025-05-29T15:27:58.375911Z node 3 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:42: [7e4afa7ea38a37be] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 3 part# 3 situation# ESituation::Unknown Marker# BPG51 2025-05-29T15:27:58.375914Z node 3 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:42: [7e4afa7ea38a37be] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 4 part# 4 situation# ESituation::Unknown Marker# BPG51 2025-05-29T15:27:58.375917Z node 3 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:42: [7e4afa7ea38a37be] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 5 part# 5 situation# ESituation::Unknown Marker# BPG51 2025-05-29T15:27:58.375919Z node 3 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:42: [7e4afa7ea38a37be] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 6 part# 0 situation# ESituation::Unknown Marker# BPG51 2025-05-29T15:27:58.375922Z node 3 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:42: [7e4afa7ea38a37be] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 6 part# 1 situation# ESituation::Unknown Marker# BPG51 2025-05-29T15:27:58.375924Z node 3 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:42: [7e4afa7ea38a37be] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 6 part# 2 situation# ESituation::Unknown Marker# BPG51 2025-05-29T15:27:58.375927Z node 3 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:42: [7e4afa7ea38a37be] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 6 part# 3 situation# ESituation::Unknown Marker# BPG51 2025-05-29T15:27:58.375929Z node 3 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:42: [7e4afa7ea38a37be] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 6 part# 4 situation# ESituation::Unknown Marker# BPG51 2025-05-29T15:27:58.375932Z node 3 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:42: [7e4afa7ea38a37be] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 6 part# 5 situation# ESituation::Unknown Marker# BPG51 2025-05-29T15:27:58.375934Z node 3 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:42: [7e4afa7ea38a37be] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 7 part# 0 situation# ESituation::Unknown Marker# BPG51 2025-05-29T15:27:58.375937Z node 3 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:42: [7e4afa7ea38a37be] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 7 part# 1 situation# ESituation::Unknown Marker# BPG51 2025-05-29T15:27:58.375939Z node 3 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:42: [7e4afa7ea38a37be] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 7 part# 2 situation# ESituation::Unknown Marker# BPG51 2025-05-29T15:27:58.375942Z node 3 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:42: [7e4afa7ea38a37be] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 7 part# 3 situation# ESituation::Unknown Marker# BPG51 2025-05-29T15:27:58.375944Z node 3 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:42: [7e4afa7ea38a37be] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 7 part# 4 situation# ESituation::Unknown Marker# BPG51 2025-05-29T15:27:58.375948Z node 3 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:42: [7e4afa7ea38a37be] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 7 part# 5 situation# ESituation::Unknown Marker# BPG51 2025-05-29T15:27:58.375953Z node 3 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:65: [7e4afa7ea38a37be] restore Id# [72075186224047637:1:863:1:24576:786:0] optimisticReplicas# 6 optimisticState# EBS_FULL Marker# BPG55 2025-05-29T15:27:58.375963Z node 3 :BS_PROXY_PUT DEBUG: dsproxy_strategy_base.cpp:324: [7e4afa7ea38a37be] partPlacement record partSituation# ESituation::Unknown to# 0 blob Id# [72075186224047637:1:863:1:24576:786:1] Marker# BPG33 2025-05-29T15:27:58.375968Z node 3 :BS_PROXY_PUT DEBUG: dsproxy_strategy_base.cpp:345: [7e4afa7ea38a37be] Sending missing VPut part# 0 to# 0 blob Id# [72075186224047637:1:863:1:24576:786:1] Marker# BPG32 2025-05-29T15:27:58.375972Z node 3 :BS_PROXY_PUT DEBUG: dsproxy_strategy_base.cpp:324: [7e4afa7ea38a37be] partPlacement record partSituation# ESituation::Unknown to# 1 blob Id# [72075186224047637:1:863:1:24576:786:2] Marker# BPG33 2025-05-29T15:27:58.375975Z node 3 :BS_PROXY_PUT DEBUG: dsproxy_strategy_base.cpp:345: [7e4afa7ea38a37be] Sending missing VPut part# 1 to# 1 blob Id# [72075186224047637:1:863:1:24576:786:2] Marker# BPG32 2025-05-29T15:27:58.375978Z node 3 :BS_PROXY_PUT DEBUG: dsproxy_strategy_base.cpp:324: [7e4afa7ea38a37be] partPlacement record partSituation# ESituation::Unknown to# 2 blob Id# [72075186224047637:1:863:1:24576:786:3] Marker# BPG33 2025-05-29T15:27:58.375981Z node 3 :BS_PROXY_PUT DEBUG: dsproxy_strategy_base.cpp:345: [7e4afa7ea38a37be] Sending missing VPut part# 2 to# 2 blob Id# [72075186224047637:1:863:1:24576:786:3] Marker# BPG32 2025-05-29T15:27:58.375984Z node 3 :BS_PROXY_PUT DEBUG: dsproxy_strategy_base.cpp:324: [7e4afa7ea38a37be] partPlacement record partSituation# ESituation::Unknown to# 3 blob Id# [72075186224047637:1:863:1:24576:786:4] Marker# BPG33 2025-05-29T15:27:58.375987Z node 3 :BS_PROXY_PUT DEBUG: dsproxy_strategy_base.cpp:345: [7e4afa7ea38a37be] Sending missing VPut part# 3 to# 3 blob Id# [72075186224047637:1:863:1:24576:786:4] Marker# BPG32 2025-05-29T15:27:58.375990Z node 3 :BS_PROXY_PUT DEBUG: dsproxy_strategy_base.cpp:324: [7e4afa7ea38a37be] partPlacement record partSituation# ESituation::Unknown to# 4 blob Id# [72075186224047637:1:863:1:24576:786:5] Marker# BPG33 2025-05-29T15:27:58.375993Z node 3 :BS_PROXY_PUT DEBUG: dsproxy_strategy_base.cpp:345: [7e4afa7ea38a37be] Sending missing VPut part# 4 to# 4 blob Id# [72075186224047637:1:863:1:24576:786:5] Marker# BPG32 2025-05-29T15:27:58.375997Z node 3 :BS_PROXY_PUT DEBUG: dsproxy_strategy_base.cpp:324: [7e4afa7ea38a37be] partPlacement record partSituation# ESituation::Unknown to# 5 blob Id# [72075186224047637:1:863:1:24576:786:6] Marker# BPG33 2025-05-29T15:27:58.376000Z node 3 :BS_PROXY_PUT DEBUG: dsproxy_strategy_base.cpp:345: [7e4afa7ea38a37be] Sending missing VPut part# 5 to# 5 blob Id# [72075186224047637:1:863:1:24576:786:6] Marker# BPG32 2025-05-29T15:27:58.378968Z node 3 :BS_PROXY_PUT INFO: dsproxy_put.cpp:260: [7e4afa7ea38a37be] received {EvVPutResult Status# ERROR ID# [72075186224047637:1:863:1:24576:786:1] {MsgQoS MsgId# { SequenceId: 1 MsgId: 0 }}} from# [0:1:0:0:0] Marker# BPP01 2025-05-29T15:27:58.379004Z node 3 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:30: [7e4afa7ea38a37be] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 0 part# 0 error Marker# BPG50 2025-05-29T15:27:58.379010Z node 3 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:42: [7e4afa7ea38a37be] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 1 part# 1 situation# ESituation::Sent Marker# BPG51 2025-05-29T15:27:58.379013Z node 3 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:42: [7e4afa7ea38a37be] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 2 part# 2 situation# ESituation::Sent Marker# BPG51 2025-05-29T15:27:58.379018Z node 3 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:42: [7e4afa7ea38a37be] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 3 part# 3 situation# ESituation::Sent Marker# BPG51 2025-05-29T15:27:58.379020Z node 3 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:42: [7e4afa7ea38a37be] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 4 part# 4 situation# ESituation::Sent Marker# BPG51 2025-05-29T15:27:58.379023Z node 3 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:42: [7e4afa7ea38a37be] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 5 part# 5 situation# ESituation::Sent Marker# BPG51 2025-05-29T15:27:58.379026Z node 3 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:42: [7e4afa7ea38a37be] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 6 part# 0 situation# ESituation::Unknown Marker# BPG51 2025-05-29T15:27:58.379029Z node 3 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:42: [7e4afa7ea38a37be] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 6 part# 1 situation# ESituation::Unknown Marker# BPG51 2025-05-29T15:27:58.379031Z node 3 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:42: [7e4afa7ea38a37be] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 6 part# 2 situation# ESituation::Unknown Marker# BPG51 2025-05-29T15:27:58.379034Z node 3 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:42: [7e4afa7ea38a37be] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 6 part# 3 situation# ESituation::Unknown Marker# BPG51 2025-05-29T15:27:58.379036Z node 3 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:42: [7e4afa7ea38a37be] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 6 part# 4 situation# ESituation::Unknown Marker# BPG51 2025-05-29T15:27:58.379039Z node 3 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:42: [7e4afa7ea38a37be] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 6 part# 5 situation# ESituation::Unknown Marker# BPG51 2025-05-29T15:27:58.379042Z node 3 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:42: [7e4afa7ea38a37be] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 7 part# 0 situation# ESituation::Unknown Marker# BPG51 2025-05-29T15:27:58.379044Z node 3 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:42: [7e4afa7ea38a37be] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 7 part# 1 situation# ESituation::Unknown Marker# BPG51 2025-05-29T15:27:58.379047Z node 3 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:42: [7e4afa7ea38a37be] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 7 part# 2 situation# ESituation::Unknown Marker# BPG51 2025-05-29T15:27:58.379049Z node 3 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:42: [7e4afa7ea38a37be] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 7 part# 3 situation# ESituation::Unknown Marker# BPG51 2025-05-29T15:27:58.379052Z node 3 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:42: [7e4afa7ea38a37be] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 7 part# 4 situation# ESituation::Unknown Marker# BPG51 2025-05-29T15:27:58.379054Z node 3 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:42: [7e4afa7ea38a37be] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 7 part# 5 situation# ESituation::Unknown Marker# BPG51 2025-05-29T15:27:58.379059Z node 3 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:65: [7e4afa7ea38a37be] restore Id# [72075186224047637:1:863:1:24576:786:0] optimisticReplicas# 6 optimisticState# EBS_FULL Marker# BPG55 2025-05-29T15:27:58.379071Z node 3 :BS_PROXY_PUT DEBUG: dsproxy_strategy_base.cpp:324: [7e4afa7ea38a37be] partPlacement record partSituation# ESituation::Unknown to# 6 blob Id# [72075186224047637:1:863:1:24576:786:1] Marker# BPG33 2025-05-29T15:27:58.379075Z node 3 :BS_PROXY_PUT DEBUG: dsproxy_strategy_base.cpp:345: [7e4afa7ea38a37be] Sending missing VPut part# 0 to# 6 blob Id# [72075186224047637:1:863:1:24576:786:1] Marker# BPG32 2025-05-29T15:27:58.379123Z node 3 :BS_PROXY_PUT DEBUG: dsproxy_put.cpp:260: [7e4afa7ea38a37be] received {EvVPutResult Status# OK ID# [72075186224047637:1:863:1:24576:786:2] {MsgQoS MsgId# { SequenceId: 1 MsgId: 0 }}} from# [0:1:0:1:0] Marker# BPP01 2025-05-29T15:27:58.379134Z node 3 :BS_PROXY_PUT DEBUG: dsproxy_put.cpp:260: [7e4afa7ea38a37be] received {EvVPutResult Status# OK ID# [72075186224047637:1:863:1:24576:786:3] {MsgQoS MsgId# { SequenceId: 1 MsgId: 0 }}} from# [0:1:0:2:0] Marker# BPP01 2025-05-29T15:27:58.379141Z node 3 :BS_PROXY_PUT DEBUG: dsproxy_put.cpp:260: [7e4afa7ea38a37be] received {EvVPutResult Status# OK ID# [72075186224047637:1:863:1:24576:786:4] {MsgQoS MsgId# { SequenceId: 1 MsgId: 0 }}} from# [0:1:0:3:0] Marker# BPP01 2025-05-29T15:27:58.379148Z node 3 :BS_PROXY_PUT DEBUG: dsproxy_put.cpp:260: [7e4afa7ea38a37be] received {EvVPutResult Status# OK ID# [72075186224047637:1:863:1:24576:786:5] {MsgQoS MsgId# { SequenceId: 1 MsgId: 0 }}} from# [0:1:0:4:0] Marker# BPP01 2025-05-29T15:27:58.379159Z node 3 :BS_PROXY_PUT DEBUG: dsproxy_put.cpp:260: [7e4afa7ea38a37be] received {EvVPutResult Status# OK ID# [72075186224047637:1:863:1:24576:786:6] {MsgQoS MsgId# { SequenceId: 1 MsgId: 0 }}} from# [0:1:0:5:0] Marker# BPP01 2025-05-29T15:27:58.379186Z node 3 :BS_PROXY_PUT DEBUG: dsproxy_put.cpp:260: [7e4afa7ea38a37be] received {EvVPutResult Status# OK ID# [72075186224047637:1:863:1:24576:786:1] {MsgQoS MsgId# { SequenceId: 1 MsgId: 0 }}} from# [0:1:0:6:0] Marker# BPP01 2025-05-29T15:27:58.379198Z node 3 :BS_PROXY_PUT DEBUG: dsproxy_put_impl.cpp:72: [7e4afa7ea38a37be] Result# TEvPutResult {Id# [72075186224047637:1:863:1:24576:786:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0} GroupId# 0 Marker# BPP12 2025-05-29T15:27:58.379204Z node 3 :BS_PROXY_PUT INFO: dsproxy_put.cpp:486: [7e4afa7ea38a37be] SendReply putResult# TEvPutResult {Id# [72075186224047637:1:863:1:24576:786:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0} ResponsesSent# 0 PutImpl.Blobs.size# 1 Last# true Marker# BPP21 2025-05-29T15:27:58.379248Z node 3 :BS_PROXY_PUT NOTICE: {BPP72@dsproxy_put.cpp:470} Query history GroupId# 0 HandleClass# TabletLog Tactic# Default History# THistory { Entries# [ TEvVPut{ TimestampMs# 0.346 sample PartId# [72075186224047637:1:863:1:24576:786:6] QueryCount# 1 VDiskId# [0:1:0:5:0] NodeId# 3 } TEvVPut{ TimestampMs# 0.347 sample PartId# [72075186224047637:1:863:1:24576:786:5] QueryCount# 1 VDiskId# [0:1:0:4:0] NodeId# 3 } TEvVPut{ TimestampMs# 0.347 sample PartId# [72075186224047637:1:863:1:24576:786:4] QueryCount# 1 VDiskId# [0:1:0:3:0] NodeId# 3 } TEvVPut{ TimestampMs# 0.347 sample PartId# [72075186224047637:1:863:1:24576:786:3] QueryCount# 1 VDiskId# [0:1:0:2:0] NodeId# 3 } TEvVPut{ TimestampMs# 0.347 sample PartId# [72075186224047637:1:863:1:24576:786:2] QueryCount# 1 VDiskId# [0:1:0:1:0] NodeId# 3 } TEvVPut{ TimestampMs# 0.347 sample PartId# [72075186224047637:1:863:1:24576:786:1] QueryCount# 1 VDiskId# [0:1:0:0:0] NodeId# 3 } TEvVPutResult{ TimestampMs# 3.32 VDiskId# [0:1:0:0:0] NodeId# 3 Status# ERROR } TEvVPut{ TimestampMs# 3.415 sample PartId# [72075186224047637:1:863:1:24576:786:1] QueryCount# 1 VDiskId# [0:1:0:6:0] NodeId# 3 } TEvVPutResult{ TimestampMs# 3.45 VDiskId# [0:1:0:1:0] NodeId# 3 Status# OK } TEvVPutResult{ TimestampMs# 3.46 VDiskId# [0:1:0:2:0] NodeId# 3 Status# OK } TEvVPutResult{ TimestampMs# 3.468 VDiskId# [0:1:0:3:0] NodeId# 3 Status# OK } TEvVPutResult{ TimestampMs# 3.475 VDiskId# [0:1:0:4:0] NodeId# 3 Status# OK } TEvVPutResult{ TimestampMs# 3.485 VDiskId# [0:1:0:5:0] NodeId# 3 Status# OK } TEvVPutResult{ TimestampMs# 3.513 VDiskId# [0:1:0:6:0] NodeId# 3 Status# OK } ] } ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/keyvalue/ut/unittest >> TKeyValueTest::TestLargeWriteAndDelete [GOOD] Test command err: Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:55:2057] recipient: [1:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:55:2057] recipient: [1:51:2095] Leader for TabletID 72057594037927937 is [1:57:2097] sender: [1:58:2057] recipient: [1:51:2095] Leader for TabletID 72057594037927937 is [1:57:2097] sender: [1:75:2057] recipient: [1:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:55:2057] recipient: [2:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:55:2057] recipient: [2:51:2095] Leader for TabletID 72057594037927937 is [2:57:2097] sender: [2:58:2057] recipient: [2:51:2095] Leader for TabletID 72057594037927937 is [2:57:2097] sender: [2:75:2057] recipient: [2:14:2061] !Reboot 72057594037927937 (actor [2:57:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [2:57:2097] sender: [2:77:2057] recipient: [2:36:2083] Leader for TabletID 72057594037927937 is [2:57:2097] sender: [2:80:2057] recipient: [2:14:2061] Leader for TabletID 72057594037927937 is [2:57:2097] sender: [2:81:2057] recipient: [2:79:2110] Leader for TabletID 72057594037927937 is [2:82:2111] sender: [2:83:2057] recipient: [2:79:2110] !Reboot 72057594037927937 (actor [2:57:2097]) rebooted! !Reboot 72057594037927937 (actor [2:57:2097]) tablet resolver refreshed! new actor is[2:82:2111] Leader for TabletID 72057594037927937 is [2:82:2111] sender: [2:168:2057] recipient: [2:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:55:2057] recipient: [3:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:55:2057] recipient: [3:51:2095] Leader for TabletID 72057594037927937 is [3:57:2097] sender: [3:58:2057] recipient: [3:51:2095] Leader for TabletID 72057594037927937 is [3:57:2097] sender: [3:75:2057] recipient: [3:14:2061] !Reboot 72057594037927937 (actor [3:57:2097]) on event NKikimr::TEvKeyValue::TEvExecuteTransaction ! Leader for TabletID 72057594037927937 is [3:57:2097] sender: [3:77:2057] recipient: [3:36:2083] Leader for TabletID 72057594037927937 is [3:57:2097] sender: [3:80:2057] recipient: [3:79:2110] Leader for TabletID 72057594037927937 is [3:57:2097] sender: [3:81:2057] recipient: [3:14:2061] Leader for TabletID 72057594037927937 is [3:82:2111] sender: [3:83:2057] recipient: [3:79:2110] !Reboot 72057594037927937 (actor [3:57:2097]) rebooted! !Reboot 72057594037927937 (actor [3:57:2097]) tablet resolver refreshed! new actor is[3:82:2111] Leader for TabletID 72057594037927937 is [3:82:2111] sender: [3:168:2057] recipient: [3:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [4:55:2057] recipient: [4:50:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [4:55:2057] recipient: [4:50:2095] Leader for TabletID 72057594037927937 is [4:57:2097] sender: [4:58:2057] recipient: [4:50:2095] Leader for TabletID 72057594037927937 is [4:57:2097] sender: [4:75:2057] recipient: [4:14:2061] !Reboot 72057594037927937 (actor [4:57:2097]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [4:57:2097] sender: [4:78:2057] recipient: [4:36:2083] Leader for TabletID 72057594037927937 is [4:57:2097] sender: [4:81:2057] recipient: [4:80:2110] Leader for TabletID 72057594037927937 is [4:57:2097] sender: [4:82:2057] recipient: [4:14:2061] Leader for TabletID 72057594037927937 is [4:83:2111] sender: [4:84:2057] recipient: [4:80:2110] !Reboot 72057594037927937 (actor [4:57:2097]) rebooted! !Reboot 72057594037927937 (actor [4:57:2097]) tablet resolver refreshed! new actor is[4:83:2111] Leader for TabletID 72057594037927937 is [4:83:2111] sender: [4:169:2057] recipient: [4:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [5:55:2057] recipient: [5:52:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [5:55:2057] recipient: [5:52:2095] Leader for TabletID 72057594037927937 is [5:57:2097] sender: [5:58:2057] recipient: [5:52:2095] Leader for TabletID 72057594037927937 is [5:57:2097] sender: [5:75:2057] recipient: [5:14:2061] !Reboot 72057594037927937 (actor [5:57:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [5:57:2097] sender: [5:81:2057] recipient: [5:36:2083] Leader for TabletID 72057594037927937 is [5:57:2097] sender: [5:84:2057] recipient: [5:14:2061] Leader for TabletID 72057594037927937 is [5:57:2097] sender: [5:85:2057] recipient: [5:83:2113] Leader for TabletID 72057594037927937 is [5:86:2114] sender: [5:87:2057] recipient: [5:83:2113] !Reboot 72057594037927937 (actor [5:57:2097]) rebooted! !Reboot 72057594037927937 (actor [5:57:2097]) tablet resolver refreshed! new actor is[5:86:2114] Leader for TabletID 72057594037927937 is [5:86:2114] sender: [5:172:2057] recipient: [5:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [6:55:2057] recipient: [6:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [6:55:2057] recipient: [6:51:2095] Leader for TabletID 72057594037927937 is [6:57:2097] sender: [6:58:2057] recipient: [6:51:2095] Leader for TabletID 72057594037927937 is [6:57:2097] sender: [6:75:2057] recipient: [6:14:2061] !Reboot 72057594037927937 (actor [6:57:2097]) on event NKikimr::TEvKeyValue::TEvReadRange ! Leader for TabletID 72057594037927937 is [6:57:2097] sender: [6:81:2057] recipient: [6:36:2083] Leader for TabletID 72057594037927937 is [6:57:2097] sender: [6:84:2057] recipient: [6:14:2061] Leader for TabletID 72057594037927937 is [6:57:2097] sender: [6:85:2057] recipient: [6:83:2113] Leader for TabletID 72057594037927937 is [6:86:2114] sender: [6:87:2057] recipient: [6:83:2113] !Reboot 72057594037927937 (actor [6:57:2097]) rebooted! !Reboot 72057594037927937 (actor [6:57:2097]) tablet resolver refreshed! new actor is[6:86:2114] Leader for TabletID 72057594037927937 is [6:86:2114] sender: [6:172:2057] recipient: [6:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [7:55:2057] recipient: [7:52:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [7:55:2057] recipient: [7:52:2095] Leader for TabletID 72057594037927937 is [7:57:2097] sender: [7:58:2057] recipient: [7:52:2095] Leader for TabletID 72057594037927937 is [7:57:2097] sender: [7:75:2057] recipient: [7:14:2061] !Reboot 72057594037927937 (actor [7:57:2097]) on event NKikimr::TEvKeyValue::TEvNotify ! Leader for TabletID 72057594037927937 is [7:57:2097] sender: [7:82:2057] recipient: [7:36:2083] Leader for TabletID 72057594037927937 is [7:57:2097] sender: [7:85:2057] recipient: [7:84:2113] Leader for TabletID 72057594037927937 is [7:57:2097] sender: [7:86:2057] recipient: [7:14:2061] Leader for TabletID 72057594037927937 is [7:87:2114] sender: [7:88:2057] recipient: [7:84:2113] !Reboot 72057594037927937 (actor [7:57:2097]) rebooted! !Reboot 72057594037927937 (actor [7:57:2097]) tablet resolver refreshed! new actor is[7:87:2114] Leader for TabletID 72057594037927937 is [0:0:0] sender: [8:55:2057] recipient: [8:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [8:55:2057] recipient: [8:51:2095] Leader for TabletID 72057594037927937 is [8:57:2097] sender: [8:58:2057] recipient: [8:51:2095] Leader for TabletID 72057594037927937 is [8:57:2097] sender: [8:75:2057] recipient: [8:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [9:55:2057] recipient: [9:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [9:55:2057] recipient: [9:51:2095] Leader for TabletID 72057594037927937 is [9:57:2097] sender: [9:58:2057] recipient: [9:51:2095] Leader for TabletID 72057594037927937 is [9:57:2097] sender: [9:75:2057] recipient: [9:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [10:55:2057] recipient: [10:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [10:55:2057] recipient: [10:51:2095] Leader for TabletID 72057594037927937 is [10:57:2097] sender: [10:58:2057] recipient: [10:51:2095] Leader for TabletID 72057594037927937 is [10:57:2097] sender: [10:75:2057] recipient: [10:14:2061] !Reboot 72057594037927937 (actor [10:57:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [10:57:2097] sender: [10:77:2057] recipient: [10:36:2083] Leader for TabletID 72057594037927937 is [10:57:2097] sender: [10:80:2057] recipient: [10:14:2061] Leader for TabletID 72057594037927937 is [10:57:2097] sender: [10:81:2057] recipient: [10:79:2110] Leader for TabletID 72057594037927937 is [10:82:2111] sender: [10:83:2057] recipient: [10:79:2110] !Reboot 72057594037927937 (actor [10:57:2097]) rebooted! !Reboot 72057594037927937 (actor [10:57:2097]) tablet resolver refreshed! new actor is[10:82:2111] Leader for TabletID 72057594037927937 is [10:82:2111] sender: [10:168:2057] recipient: [10:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [11:55:2057] recipient: [11:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [11:55:2057] recipient: [11:51:2095] Leader for TabletID 72057594037927937 is [11:57:2097] sender: [11:58:2057] recipient: [11:51:2095] Leader for TabletID 72057594037927937 is [11:57:2097] sender: [11:75:2057] recipient: [11:14:2061] !Reboot 72057594037927937 (actor [11:57:2097]) on event NKikimr::TEvKeyValue::TEvAcquireLock ! Leader for TabletID 72057594037927937 is [11:57:2097] sender: [11:77:2057] recipient: [11:36:2083] Leader for TabletID 72057594037927937 is [11:57:2097] sender: [11:80:2057] recipient: [11:79:2110] Leader for TabletID 72057594037927937 is [11:57:2097] sender: [11:81:2057] recipient: [11:14:2061] Leader for TabletID 72057594037927937 is [11:82:2111] sender: [11:83:2057] recipient: [11:79:2110] !Reboot 72057594037927937 (actor [11:57:2097]) rebooted! !Reboot 72057594037927937 (actor [11:57:2097]) tablet resolver refreshed! new actor is[11:82:2111] Leader for TabletID 72057594037927937 is [11:82:2111] sender: [11:168:2057] recipient: [11:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [12:55:2057] recipient: [12:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [12:55:2057] recipient: [12:51:2095] Leader for TabletID 72057594037927937 is [12:57:2097] sender: [12:58:2057] recipient: [12:51:2095] Leader for TabletID 72057594037927937 is [12:57:2097] sender: [12:75:2057] recipient: [12:14:2061] !Reboot 72057594037927937 (actor [12:57:2097]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [12:57:2097] sender: [12:78:2057] recipient: [12:36:2083] Leader for TabletID 72057594037927937 is [12:57:2097] sender: [12:81:2057] recipient: [12:14:2061] Leader for TabletID 72057594037927937 is [12:57:2097] sender: [12:82:2057] recipient: [12:80:2110] Leader for TabletID 72057594037927937 is [12:83:2111] sender: [12:84:2057] recipient: [12:80:2110] !Reboot 72057594037927937 (actor [12:57:2097]) rebooted! !Reboot 72057594037927937 (actor [12:57:2097]) tablet resolver refreshed! new actor is[12:83:2111] Leader for TabletID 72057594037927937 is [12:83:2111] sender: [12:169:2057] recipient: [12:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [13:55:2057] recipient: [13:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [13:55:2057] recipient: [13:51:2095] Leader for TabletID 72057594037927937 is [13:57:2097] sender: [13:58:2057] recipient: [13:51:2095] Leader for TabletID 72057594037927937 is [13:57:2097] sender: [13:75:2057] recipient: [13:14:2061] !Reboot 72057594037927937 (actor [13:57:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [13:57:2097] sender: [13:81:2057] recipient: [13:36:2083] Leader for TabletID 72057594037927937 is [13:57:2097] sender: [13:84:2057] recipient: [13:14:2061] Leader for TabletID 72057594037927937 is [13:57:2097] sender: [13:85:2057] recipient: [13:83:2113] Leader for TabletID 72057594037927937 is [13:86:2114] sender: [13:87:2057] recipient: [13:83:2113] !Reboot 72057594037927937 (actor [13:57:2097]) rebooted! !Reboot 72057594037927937 (actor [13:57:2097]) tablet resolver refreshed! new actor is[13:86:2114] Leader for TabletID 72057594037927937 is [13:86:2114] sender: [13:172:2057] recipient: [13:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [14:55:2057] recipient: [14:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [14:55:2057] recipient: [14:51:2095] Leader for TabletID 72057594037927937 is [14:57:2097] sender: [14:58:2057] recipient: [14:51:2095] Leader for TabletID 72057594037927937 is [14:57:2097] sender: [14:75:2057] recipient: [14:14:2061] !Reboot 72057594037927937 (actor [14:57:2097]) on event NKikimr::TEvKeyValue::TEvExecuteTransaction ! Leader for TabletID 72057594037927937 is [14:57:2097] sender: [14:81:2057] recipient: [14:36:2083] Leader for TabletID 72057594037927937 is [14:57:2097] sender: [14:84:2057] recipient: [14:83:2113] Leader for TabletID 72057594037927937 is [14:57:2097] sender: [14:85:2057] recipient: [14:14:2061] Leader for TabletID 72057594037927937 is [14:86:2114] sender: [14:87:2057] recipient: [14:83:2113] !Reboot 72057594037927937 (actor [14:57:2097]) rebooted! !Reboot 72057594037927937 (actor [14:57:2097]) tablet resolver refreshed! new actor is[14:86:2114] Leader for TabletID 72057594037927937 is [14:86:2114] sender: [14:172:2057] recipient: [14:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [15:55:2057] recipient: [15:52:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [15:55:2057] recipient: [15:52:2095] Leader for TabletID 72057594037927937 is [15:57:2097] sender: [15:58:2057] recipient: [15:52:2095] Leader for TabletID 72057594037927937 is [15:57:2097] sender: [15:75:2057] recipient: [15:14:2061] !Reboot 72057594037927937 (actor [15:57:2097]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [15:57:2097] sender: [15:82:2057] recipient: [15:36:2083] Leader for TabletID 72057594037927937 is [15:57:2097] sender: [15:85:2057] recipient: [15:14:2061] Leader for TabletID 72057594037927937 is [15:57:2097] sender: [15:86:2057] recipient: [15:84:2113] Leader for TabletID 72057594037927937 is [15:87:2114] sender: [15:88:2057] recipient: [15:84:2113] !Reboot 72057594037927937 (actor [15:57:2097]) rebooted! !Reboot 72057594037927937 (actor [15:57:2097]) tablet resolver refreshed! new actor is[15:87:2114] Leader for TabletID 72057594037927937 is [15:87:2114] sender: [15:173:2057] recipient: [15:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [16:55:2057] recipient: [16:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [16:55:2057] recipient: [16:51:2095] Leader for TabletID 72057594037927937 is [16:57:2097] sender: [16:58:2057] recipient: [16:51:2095] Leader for TabletID 72057594037927937 is [16:57:2097] sender: [16:75:2057] recipient: [16:14:2061] !Reboot 72057594037927937 (actor [16:57:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [16:57:2097] sender: [16:85:2057] recipient: [16:36:2083] Leader for TabletID 72057594037927937 is [16:57:2097] sender: [16:88:2057] recipient: [16:14:2061] Leader for TabletID 72057594037927937 is [16:57:2097] sender: [16:89:2057] recipient: [16:87:2116] Leader for TabletID 72057594037927937 is [16:90:2117] sender: [16:91:2057] recipient: [16:87:2116] !Reboot 72057594037927937 (actor [16:57:2097]) rebooted! !Reboot 72057594037927937 (actor [16:57:2097]) tablet resolver refreshed! new actor is[16:90:2117] Leader for TabletID 72057594037927937 is [16:90:2117] sender: [16:176:2057] recipient: [16:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [17:55:2057] recipient: [17:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [17:55:2057] recipient: [17:51:2095] Leader for TabletID 72057594037927937 is [17:57:2097] sender: [17:58:2057] recipient: [17:51:2095] Leader for TabletID 72057594037927937 is [17:57:2097] sender: [17:75:2057] recipient: [17:14:2061] !Reboot 72057594037927937 (actor [17:57:2097]) on event NKikimr::TEvKeyValue::TEvExecuteTransaction ! Leader for TabletID 72057594037927937 is [17:57:2097] sender: [17:85:2057] recipient: [17:36:2083] Leader for TabletID 72057594037927937 is [17:57:2097] sender: [17:87:2057] recipient: [17:14:2061] Leader for TabletID 72057594037927937 is [17:57:2097] sender: [17:89:2057] recipient: [17:88:2116] Leader for TabletID 72057594037927937 is [17:90:2117] sender: [17:91:2057] recipient: [17:88:2116] !Reboot 72057594037927937 (actor [17:57:2097]) rebooted! !Reboot 72057594037927937 (actor [17:57:2097]) tablet resolver refreshed! new actor is[17:90:2117] Leader for TabletID 72057594037927937 is [17:90:2117] sender: [17:176:2057] recipient: [17:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [18:55:2057] recipient: [18:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [18:55:2057] recipient: [18:51:2095] Leader for TabletID 72057594037927937 is [18:57:2097] sender: [18:58:2057] recipient: [18:51:2095] Leader for TabletID 72057594037927937 is [18:57:2097] sender: [18:75:2057] recipient: [18:14:2061] !Reboot 72057594037927937 (actor [18:57:2097]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [18:57:2097] sender: [18:86:2057] recipient: [18:36:2083] Leader for TabletID 72057594037927937 is [18:57:2097] sender: [18:89:2057] recipient: [18:14:2061] Leader for TabletID 72057594037927937 is [18:57:2097] sender: [18:90:2057] recipient: [18:88:2116] Leader for TabletID 72057594037927937 is [18:91:2117] sender: [18:92:2057] recipient: [18:88:2116] !Reboot 72057594037927937 (actor [18:57:2097]) rebooted! !Reboot 72057594037927937 (actor [18:57:2097]) tablet resolver refreshed! new actor is[18:91:2117] Leader for TabletID 72057594037927937 is [18:91:2117] sender: [18:177:2057] recipient: [18:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [19:55:2057] recipient: [19:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [19:55:2057] recipient: [19:51:2095] Leader for TabletID 72057594037927937 is [19:57:2097] sender: [19:58:2057] recipient: [19:51:2095] Leader for TabletID 72057594037927937 is [19:57:2097] sender: [19:75:2057] recipient: [19:14:2061] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/keyvalue/ut/unittest >> TKeyValueTest::TestWriteReadRangeDataLimitThenLimitWorksNewApi [GOOD] Test command err: Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:55:2057] recipient: [1:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:55:2057] recipient: [1:51:2095] Leader for TabletID 72057594037927937 is [1:57:2097] sender: [1:58:2057] recipient: [1:51:2095] Leader for TabletID 72057594037927937 is [1:57:2097] sender: [1:75:2057] recipient: [1:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:55:2057] recipient: [2:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:55:2057] recipient: [2:51:2095] Leader for TabletID 72057594037927937 is [2:57:2097] sender: [2:58:2057] recipient: [2:51:2095] Leader for TabletID 72057594037927937 is [2:57:2097] sender: [2:75:2057] recipient: [2:14:2061] !Reboot 72057594037927937 (actor [2:57:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [2:57:2097] sender: [2:77:2057] recipient: [2:36:2083] Leader for TabletID 72057594037927937 is [2:57:2097] sender: [2:80:2057] recipient: [2:14:2061] Leader for TabletID 72057594037927937 is [2:57:2097] sender: [2:81:2057] recipient: [2:79:2110] Leader for TabletID 72057594037927937 is [2:82:2111] sender: [2:83:2057] recipient: [2:79:2110] !Reboot 72057594037927937 (actor [2:57:2097]) rebooted! !Reboot 72057594037927937 (actor [2:57:2097]) tablet resolver refreshed! new actor is[2:82:2111] Leader for TabletID 72057594037927937 is [2:82:2111] sender: [2:168:2057] recipient: [2:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:55:2057] recipient: [3:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:55:2057] recipient: [3:51:2095] Leader for TabletID 72057594037927937 is [3:57:2097] sender: [3:58:2057] recipient: [3:51:2095] Leader for TabletID 72057594037927937 is [3:57:2097] sender: [3:75:2057] recipient: [3:14:2061] !Reboot 72057594037927937 (actor [3:57:2097]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [3:57:2097] sender: [3:77:2057] recipient: [3:36:2083] Leader for TabletID 72057594037927937 is [3:57:2097] sender: [3:80:2057] recipient: [3:79:2110] Leader for TabletID 72057594037927937 is [3:57:2097] sender: [3:81:2057] recipient: [3:14:2061] Leader for TabletID 72057594037927937 is [3:82:2111] sender: [3:83:2057] recipient: [3:79:2110] !Reboot 72057594037927937 (actor [3:57:2097]) rebooted! !Reboot 72057594037927937 (actor [3:57:2097]) tablet resolver refreshed! new actor is[3:82:2111] Leader for TabletID 72057594037927937 is [3:82:2111] sender: [3:168:2057] recipient: [3:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [4:55:2057] recipient: [4:50:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [4:55:2057] recipient: [4:50:2095] Leader for TabletID 72057594037927937 is [4:57:2097] sender: [4:58:2057] recipient: [4:50:2095] Leader for TabletID 72057594037927937 is [4:57:2097] sender: [4:75:2057] recipient: [4:14:2061] !Reboot 72057594037927937 (actor [4:57:2097]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [4:57:2097] sender: [4:78:2057] recipient: [4:36:2083] Leader for TabletID 72057594037927937 is [4:57:2097] sender: [4:80:2057] recipient: [4:14:2061] Leader for TabletID 72057594037927937 is [4:57:2097] sender: [4:82:2057] recipient: [4:81:2110] Leader for TabletID 72057594037927937 is [4:83:2111] sender: [4:84:2057] recipient: [4:81:2110] !Reboot 72057594037927937 (actor [4:57:2097]) rebooted! !Reboot 72057594037927937 (actor [4:57:2097]) tablet resolver refreshed! new actor is[4:83:2111] Leader for TabletID 72057594037927937 is [4:83:2111] sender: [4:169:2057] recipient: [4:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [5:55:2057] recipient: [5:52:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [5:55:2057] recipient: [5:52:2095] Leader for TabletID 72057594037927937 is [5:57:2097] sender: [5:58:2057] recipient: [5:52:2095] Leader for TabletID 72057594037927937 is [5:57:2097] sender: [5:75:2057] recipient: [5:14:2061] !Reboot 72057594037927937 (actor [5:57:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [5:57:2097] sender: [5:81:2057] recipient: [5:36:2083] Leader for TabletID 72057594037927937 is [5:57:2097] sender: [5:83:2057] recipient: [5:14:2061] Leader for TabletID 72057594037927937 is [5:57:2097] sender: [5:85:2057] recipient: [5:84:2113] Leader for TabletID 72057594037927937 is [5:86:2114] sender: [5:87:2057] recipient: [5:84:2113] !Reboot 72057594037927937 (actor [5:57:2097]) rebooted! !Reboot 72057594037927937 (actor [5:57:2097]) tablet resolver refreshed! new actor is[5:86:2114] Leader for TabletID 72057594037927937 is [5:86:2114] sender: [5:172:2057] recipient: [5:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [6:55:2057] recipient: [6:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [6:55:2057] recipient: [6:51:2095] Leader for TabletID 72057594037927937 is [6:57:2097] sender: [6:58:2057] recipient: [6:51:2095] Leader for TabletID 72057594037927937 is [6:57:2097] sender: [6:75:2057] recipient: [6:14:2061] !Reboot 72057594037927937 (actor [6:57:2097]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [6:57:2097] sender: [6:81:2057] recipient: [6:36:2083] Leader for TabletID 72057594037927937 is [6:57:2097] sender: [6:84:2057] recipient: [6:14:2061] Leader for TabletID 72057594037927937 is [6:57:2097] sender: [6:85:2057] recipient: [6:83:2113] Leader for TabletID 72057594037927937 is [6:86:2114] sender: [6:87:2057] recipient: [6:83:2113] !Reboot 72057594037927937 (actor [6:57:2097]) rebooted! !Reboot 72057594037927937 (actor [6:57:2097]) tablet resolver refreshed! new actor is[6:86:2114] Leader for TabletID 72057594037927937 is [6:86:2114] sender: [6:172:2057] recipient: [6:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [7:55:2057] recipient: [7:52:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [7:55:2057] recipient: [7:52:2095] Leader for TabletID 72057594037927937 is [7:57:2097] sender: [7:58:2057] recipient: [7:52:2095] Leader for TabletID 72057594037927937 is [7:57:2097] sender: [7:75:2057] recipient: [7:14:2061] !Reboot 72057594037927937 (actor [7:57:2097]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [7:57:2097] sender: [7:82:2057] recipient: [7:36:2083] Leader for TabletID 72057594037927937 is [7:57:2097] sender: [7:85:2057] recipient: [7:84:2113] Leader for TabletID 72057594037927937 is [7:57:2097] sender: [7:86:2057] recipient: [7:14:2061] Leader for TabletID 72057594037927937 is [7:87:2114] sender: [7:88:2057] recipient: [7:84:2113] !Reboot 72057594037927937 (actor [7:57:2097]) rebooted! !Reboot 72057594037927937 (actor [7:57:2097]) tablet resolver refreshed! new actor is[7:87:2114] Leader for TabletID 72057594037927937 is [7:87:2114] sender: [7:173:2057] recipient: [7:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [8:55:2057] recipient: [8:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [8:55:2057] recipient: [8:51:2095] Leader for TabletID 72057594037927937 is [8:57:2097] sender: [8:58:2057] recipient: [8:51:2095] Leader for TabletID 72057594037927937 is [8:57:2097] sender: [8:75:2057] recipient: [8:14:2061] !Reboot 72057594037927937 (actor [8:57:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [8:57:2097] sender: [8:84:2057] recipient: [8:36:2083] Leader for TabletID 72057594037927937 is [8:57:2097] sender: [8:87:2057] recipient: [8:14:2061] Leader for TabletID 72057594037927937 is [8:57:2097] sender: [8:88:2057] recipient: [8:86:2115] Leader for TabletID 72057594037927937 is [8:89:2116] sender: [8:90:2057] recipient: [8:86:2115] !Reboot 72057594037927937 (actor [8:57:2097]) rebooted! !Reboot 72057594037927937 (actor [8:57:2097]) tablet resolver refreshed! new actor is[8:89:2116] Leader for TabletID 72057594037927937 is [8:89:2116] sender: [8:175:2057] recipient: [8:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [9:55:2057] recipient: [9:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [9:55:2057] recipient: [9:51:2095] Leader for TabletID 72057594037927937 is [9:57:2097] sender: [9:58:2057] recipient: [9:51:2095] Leader for TabletID 72057594037927937 is [9:57:2097] sender: [9:75:2057] recipient: [9:14:2061] !Reboot 72057594037927937 (actor [9:57:2097]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [9:57:2097] sender: [9:84:2057] recipient: [9:36:2083] Leader for TabletID 72057594037927937 is [9:57:2097] sender: [9:87:2057] recipient: [9:14:2061] Leader for TabletID 72057594037927937 is [9:57:2097] sender: [9:88:2057] recipient: [9:86:2115] Leader for TabletID 72057594037927937 is [9:89:2116] sender: [9:90:2057] recipient: [9:86:2115] !Reboot 72057594037927937 (actor [9:57:2097]) rebooted! !Reboot 72057594037927937 (actor [9:57:2097]) tablet resolver refreshed! new actor is[9:89:2116] Leader for TabletID 72057594037927937 is [9:89:2116] sender: [9:175:2057] recipient: [9:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [10:55:2057] recipient: [10:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [10:55:2057] recipient: [10:51:2095] Leader for TabletID 72057594037927937 is [10:57:2097] sender: [10:58:2057] recipient: [10:51:2095] Leader for TabletID 72057594037927937 is [10:57:2097] sender: [10:75:2057] recipient: [10:14:2061] !Reboot 72057594037927937 (actor [10:57:2097]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [10:57:2097] sender: [10:85:2057] recipient: [10:36:2083] Leader for TabletID 72057594037927937 is [10:57:2097] sender: [10:88:2057] recipient: [10:14:2061] Leader for TabletID 72057594037927937 is [10:57:2097] sender: [10:89:2057] recipient: [10:87:2115] Leader for TabletID 72057594037927937 is [10:90:2116] sender: [10:91:2057] recipient: [10:87:2115] !Reboot 72057594037927937 (actor [10:57:2097]) rebooted! !Reboot 72057594037927937 (actor [10:57:2097]) tablet resolver refreshed! new actor is[10:90:2116] Leader for TabletID 72057594037927937 is [10:90:2116] sender: [10:176:2057] recipient: [10:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [11:55:2057] recipient: [11:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [11:55:2057] recipient: [11:51:2095] Leader for TabletID 72057594037927937 is [11:57:2097] sender: [11:58:2057] recipient: [11:51:2095] Leader for TabletID 72057594037927937 is [11:57:2097] sender: [11:75:2057] recipient: [11:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [12:55:2057] recipient: [12:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [12:55:2057] recipient: [12:51:2095] Leader for TabletID 72057594037927937 is [12:57:2097] sender: [12:58:2057] recipient: [12:51:2095] Leader for TabletID 72057594037927937 is [12:57:2097] sender: [12:75:2057] recipient: [12:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [13:55:2057] recipient: [13:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [13:55:2057] recipient: [13:51:2095] Leader for TabletID 72057594037927937 is [13:57:2097] sender: [13:58:2057] recipient: [13:51:2095] Leader for TabletID 72057594037927937 is [13:57:2097] sender: [13:75:2057] recipient: [13:14:2061] !Reboot 72057594037927937 (actor [13:57:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [13:57:2097] sender: [13:77:2057] recipient: [13:36:2083] Leader for TabletID 72057594037927937 is [13:57:2097] sender: [13:80:2057] recipient: [13:79:2110] Leader for TabletID 72057594037927937 is [13:57:2097] sender: [13:81:2057] recipient: [13:14:2061] Leader for TabletID 72057594037927937 is [13:82:2111] sender: [13:83:2057] recipient: [13:79:2110] !Reboot 72057594037927937 (actor [13:57:2097]) rebooted! !Reboot 72057594037927937 (actor [13:57:2097]) tablet resolver refreshed! new actor is[13:82:2111] Leader for TabletID 72057594037927937 is [13:82:2111] sender: [13:168:2057] recipient: [13:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [14:55:2057] recipient: [14:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [14:55:2057] recipient: [14:51:2095] Leader for TabletID 72057594037927937 is [14:57:2097] sender: [14:58:2057] recipient: [14:51:2095] Leader for TabletID 72057594037927937 is [14:57:2097] sender: [14:75:2057] recipient: [14:14:2061] !Reboot 72057594037927937 (actor [14:57:2097]) on event NKikimr::TEvKeyValue::TEvExecuteTransaction ! Leader for TabletID 72057594037927937 is [14:57:2097] sender: [14:77:2057] recipient: [14:36:2083] Leader for TabletID 72057594037927937 is [14:57:2097] sender: [14:80:2057] recipient: [14:14:2061] Leader for TabletID 72057594037927937 is [14:57:2097] sender: [14:81:2057] recipient: [14:79:2110] Leader for TabletID 72057594037927937 is [14:82:2111] sender: [14:83:2057] recipient: [14:79:2110] !Reboot 72057594037927937 (actor [14:57:2097]) rebooted! !Reboot 72057594037927937 (actor [14:57:2097]) tablet resolver refreshed! new actor is[14:82:2111] Leader for TabletID 72057594037927937 is [14:82:2111] sender: [14:168:2057] recipient: [14:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [15:55:2057] recipient: [15:52:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [15:55:2057] recipient: [15:52:2095] Leader for TabletID 72057594037927937 is [15:57:2097] sender: [15:58:2057] recipient: [15:52:2095] Leader for TabletID 72057594037927937 is [15:57:2097] sender: [15:75:2057] recipient: [15:14:2061] !Reboot 72057594037927937 (actor [15:57:2097]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [15:57:2097] sender: [15:78:2057] recipient: [15:36:2083] Leader for TabletID 72057594037927937 is [15:57:2097] sender: [15:81:2057] recipient: [15:14:2061] Leader for TabletID 72057594037927937 is [15:57:2097] sender: [15:82:2057] recipient: [15:80:2110] Leader for TabletID 72057594037927937 is [15:83:2111] sender: [15:84:2057] recipient: [15:80:2110] !Reboot 72057594037927937 (actor [15:57:2097]) rebooted! !Reboot 72057594037927937 (actor [15:57:2097]) tablet resolver refreshed! new actor is[15:83:2111] Leader for TabletID 72057594037927937 is [15:83:2111] sender: [15:169:2057] recipient: [15:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [16:55:2057] recipient: [16:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [16:55:2057] recipient: [16:51:2095] Leader for TabletID 72057594037927937 is [16:57:2097] sender: [16:58:2057] recipient: [16:51:2095] Leader for TabletID 72057594037927937 is [16:57:2097] sender: [16:75:2057] recipient: [16:14:2061] !Reboot 72057594037927937 (actor [16:57:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [16:57:2097] sender: [16:81:2057] recipient: [16:36:2083] Leader for TabletID 72057594037927937 is [16:57:2097] sender: [16:84:2057] recipient: [16:83:2113] Leader for TabletID 72057594037927937 is [16:57:2097] sender: [16:85:2057] recipient: [16:14:2061] Leader for TabletID 72057594037927937 is [16:86:2114] sender: [16:87:2057] recipient: [16:83:2113] !Reboot 72057594037927937 (actor [16:57:2097]) rebooted! !Reboot 72057594037927937 (actor [16:57:2097]) tablet resolver refreshed! new actor is[16:86:2114] Leader for TabletID 72057594037927937 is [16:86:2114] sender: [16:172:2057] recipient: [16:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [17:55:2057] recipient: [17:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [17:55:2057] recipient: [17:51:2095] Leader for TabletID 72057594037927937 is [17:57:2097] sender: [17:58:2057] recipient: [17:51:2095] Leader for TabletID 72057594037927937 is [17:57:2097] sender: [17:75:2057] recipient: [17:14:2061] !Reboot 72057594037927937 (actor [17:57:2097]) on event NKikimr::TEvKeyValue::TEvReadRange ! Leader for TabletID 72057594037927937 is [17:57:2097] sender: [17:81:2057] recipient: [17:36:2083] Leader for TabletID 72057594037927937 is [17:57:2097] sender: [17:84:2057] recipient: [17:14:2061] Leader for TabletID 72057594037927937 is [17:57:2097] sender: [17:85:2057] recipient: [17:83:2113] Leader for TabletID 72057594037927937 is [17:86:2114] sender: [17:87:2057] recipient: [17:83:2113] !Reboot 72057594037927937 (actor [17:57:2097]) rebooted! !Reboot 72057594037927937 (actor [17:57:2097]) tablet resolver refreshed! new actor is[17:86:2114] Leader for TabletID 72057594037927937 is [17:86:2114] sender: [17:172:2057] recipient: [17:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [18:55:2057] recipient: [18:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [18:55:2057] recipient: [18:51:2095] Leader for TabletID 72057594037927937 is [18:57:2097] sender: [18:58:2057] recipient: [18:51:2095] Leader for TabletID 72057594037927937 is [18:57:2097] sender: [18:75:2057] recipient: [18:14:2061] !Reboot 72057594037927937 (actor [18:57:2097]) on event NKikimr::TEvKeyValue::TEvNotify ! Leader for TabletID 72057594037927937 is [18:57:2097] sender: [18:82:2057] recipient: [18:36:2083] Leader for TabletID 72057594037927937 is [18:57:2097] sender: [18:85:2057] recipient: [18:84:2113] Leader for TabletID 72057594037927937 is [18:57:2097] sender: [18:86:2057] recipient: [18:14:2061] Leader for TabletID 72057594037927937 is [18:87:2114] sender: [18:88:2057] recipient: [18:84:2113] !Reboot 72057594037927937 (actor [18:57:2097]) rebooted! !Reboot 72057594037927937 (actor [18:57:2097]) tablet resolver refreshed! new actor is[18:87:2114] Leader for TabletID 72057594037927937 is [18:87:2114] sender: [18:105:2057] recipient: [18:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [19:55:2057] recipient: [19:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [19:55:2057] recipient: [19:51:2095] Leader for TabletID 72057594037927937 is [19:57:2097] sender: [19:58:2057] recipient: [19:51:2095] Leader for TabletID 72057594037927937 is [19:57:2097] sender: [19:75:2057] recipient: [19:14:2061] !Reboot 72057594037927937 (actor [19:57:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [19:57:2097] sender: [19:84:2057] recipient: [19:36:2083] Leader for TabletID 72057594037927937 is [19:57:2097] sender: [19:87:2057] recipient: [19:14:2061] Leader for TabletID 72057594037927937 is [19:57:2097] sender: [19:88:2057] recipient: [19:86:2115] Leader for TabletID 72057594037927937 is [19:89:2116] sender: [19:90:2057] recipient: [19:86:2115] !Reboot 72057594037927937 (actor [19:57:2097]) rebooted! !Reboot 72057594037927937 (actor [19:57:2097]) tablet resolver refreshed! new actor is[19:89:2116] Leader for TabletID 72057594037927937 is [19:89:2116] sender: [19:175:2057] recipient: [19:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [20:55:2057] recipient: [20:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [20:55:2057] recipient: [20:51:2095] Leader for TabletID 72057594037927937 is [20:57:2097] sender: [20:58:2057] recipient: [20:51:2095] Leader for TabletID 72057594037927937 is [20:57:2097] sender: [20:75:2057] recipient: [20:14:2061] !Reboot 72057594037927937 (actor [20:57:2097]) on event NKikimr::TEvKeyValue::TEvReadRange ! Leader for TabletID 72057594037927937 is [20:57:2097] sender: [20:84:2057] recipient: [20:36:2083] Leader for TabletID 72057594037927937 is [20:57:2097] sender: [20:87:2057] recipient: [20:14:2061] Leader for TabletID 72057594037927937 is [20:57:2097] sender: [20:88:2057] recipient: [20:86:2115] Leader for TabletID 72057594037927937 is [20:89:2116] sender: [20:90:2057] recipient: [20:86:2115] !Reboot 72057594037927937 (actor [20:57:2097]) rebooted! !Reboot 72057594037927937 (actor [20:57:2097]) tablet resolver refreshed! new actor is[20:89:2116] Leader for TabletID 72057594037927937 is [20:89:2116] sender: [20:175:2057] recipient: [20:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [21:55:2057] recipient: [21:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [21:55:2057] recipient: [21:51:2095] Leader for TabletID 72057594037927937 is [21:57:2097] sender: [21:58:2057] recipient: [21:51:2095] Leader for TabletID 72057594037927937 is [21:57:2097] sender: [21:75:2057] recipient: [21:14:2061] !Reboot 72057594037927937 (actor [21:57:2097]) on event NKikimr::TEvKeyValue::TEvNotify ! Leader for TabletID 72057594037927937 is [21:57:2097] sender: [21:85:2057] recipient: [21:36:2083] Leader for TabletID 72057594037927937 is [21:57:2097] sender: [21:87:2057] recipient: [21:14:2061] Leader for TabletID 72057594037927937 is [21:57:2097] sender: [21:89:2057] recipient: [21:88:2115] Leader for TabletID 72057594037927937 is [21:90:2116] sender: [21:91:2057] recipient: [21:88:2115] !Reboot 72057594037927937 (actor [21:57:2097]) rebooted! !Reboot 72057594037927937 (actor [21:57:2097]) tablet resolver refreshed! new actor is[21:90:2116] Leader for TabletID 72057594037927937 is [0:0:0] sender: [22:55:2057] recipient: [22:52:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [22:55:2057] recipient: [22:52:2095] Leader for TabletID 72057594037927937 is [22:57:2097] sender: [22:58:2057] recipient: [22:52:2095] Leader for TabletID 72057594037927937 is [22:57:2097] sender: [22:75:2057] recipient: [22:14:2061] >> THiveTest::DrainWithHiveRestart [GOOD] >> THiveTest::TestCheckSubHiveForwarding >> IndexBuildTest::WithFollowers [GOOD] >> VectorIndexBuildTest::BaseCase >> TKeyValueTest::TestCleanUpDataOnEmptyTablet [GOOD] >> TKeyValueTest::TestCleanUpDataOnEmptyTabletResetGeneration >> TKeyValueTest::TestCleanUpDataOnEmptyTabletResetGeneration [GOOD] >> TKeyValueTest::TestCleanUpDataWithMockDisk >> VectorIndexBuildTest::VectorIndexDescriptionIsPersisted-prefixed-true [GOOD] >> IndexBuildTest::ShadowDataNotAllowedByDefault >> TDSProxyFaultTolerancePatchTest::block42 [GOOD] >> TDSProxyPatchTest::MovedError_ErasureNone ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_index_build/unittest >> VectorIndexBuildTest::VectorIndexDescriptionIsPersisted-prefixed-true [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:116:2058] recipient: [1:111:2141] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:116:2058] recipient: [1:111:2141] Leader for TabletID 72057594046678944 is [1:128:2152] sender: [1:132:2058] recipient: [1:111:2141] 2025-05-29T15:28:03.742142Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7488: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-05-29T15:28:03.742165Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7516: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:28:03.742169Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7402: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-05-29T15:28:03.742173Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7418: OperationsProcessing config: using default configuration 2025-05-29T15:28:03.742185Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-05-29T15:28:03.742187Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-05-29T15:28:03.742194Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7548: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:28:03.742205Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:39: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-05-29T15:28:03.742286Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7619: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-05-29T15:28:03.742338Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-05-29T15:28:03.752113Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7309: Cannot subscribe to console configs 2025-05-29T15:28:03.752137Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:28:03.754137Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-05-29T15:28:03.754277Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-05-29T15:28:03.754335Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-05-29T15:28:03.755729Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-05-29T15:28:03.755850Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:445: Clear TempDirsState with owners number: 0 2025-05-29T15:28:03.755937Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1348: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-05-29T15:28:03.755969Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:32: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-05-29T15:28:03.756308Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:157: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:28:03.756367Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:84: [RootDataErasureManager] Stop 2025-05-29T15:28:03.756613Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:28:03.756621Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:28:03.756640Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-05-29T15:28:03.756645Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-29T15:28:03.756650Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:202: TTxServerlessStorageBilling.Complete 2025-05-29T15:28:03.756679Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6671: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-05-29T15:28:03.757788Z node 1 :HIVE INFO: tablet_helpers.cpp:1130: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:128:2152] sender: [1:242:2058] recipient: [1:15:2062] 2025-05-29T15:28:03.771855Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:372: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-05-29T15:28:03.771940Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:28:03.771994Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-05-29T15:28:03.772037Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:130: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-05-29T15:28:03.772046Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:28:03.772663Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:451: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-05-29T15:28:03.772687Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-05-29T15:28:03.772737Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:28:03.772745Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:313: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-05-29T15:28:03.772749Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:367: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-05-29T15:28:03.772753Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 2 -> 3 2025-05-29T15:28:03.773061Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:28:03.773069Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-05-29T15:28:03.773072Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 3 -> 128 2025-05-29T15:28:03.773337Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:28:03.773346Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:28:03.773350Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:28:03.773355Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1650: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-05-29T15:28:03.773847Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1719: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-05-29T15:28:03.774228Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:652: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-05-29T15:28:03.774262Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1751: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-05-29T15:28:03.774428Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:676: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-05-29T15:28:03.774448Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:680: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 124 RawX2: 4294969445 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-05-29T15:28:03.774455Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:28:03.774498Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 128 -> 240 2025-05-29T15:28:03.774503Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:28:03.774528Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-05-29T15:28:03.774537Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:402: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-05-29T15:28:03.775031Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:28:03.775038Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-29T15:28:03.775071Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29 ... d_build_index_tx_base.cpp:339: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TIndexBuildInfo SendNotifications: : id# 102, subscribers count# 0 2025-05-29T15:28:05.093387Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:28:05.093400Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:28:05.093914Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-05-29T15:28:05.093931Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-29T15:28:05.093940Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:202: TTxServerlessStorageBilling.Complete 2025-05-29T15:28:05.094482Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6671: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594046678944 is [1:2641:4383] sender: [1:2703:2058] recipient: [1:15:2062] 2025-05-29T15:28:05.135556Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/vectors/by_embedding" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: true }, at schemeshard: 72057594046678944 2025-05-29T15:28:05.135656Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/vectors/by_embedding" took 121us result status StatusSuccess 2025-05-29T15:28:05.135912Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/vectors/by_embedding" PathDescription { Self { Name: "by_embedding" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeTableIndex CreateFinished: true CreateTxId: 281474976710758 CreateStep: 5000004 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 9 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 9 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 6 TableIndexVersion: 2 } ChildrenExist: true } Children { Name: "indexImplLevelTable" PathId: 4 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710758 CreateStep: 5000004 ParentPathId: 3 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" PathSubType: EPathSubTypeVectorKmeansTreeIndexImplTable Version { GeneralVersion: 4 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 2 TablePartitionVersion: 1 } ChildrenExist: false } Children { Name: "indexImplPostingTable" PathId: 5 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710758 CreateStep: 5000004 ParentPathId: 3 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" PathSubType: EPathSubTypeVectorKmeansTreeIndexImplTable Version { GeneralVersion: 4 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 2 TablePartitionVersion: 1 } ChildrenExist: false } Children { Name: "indexImplPrefixTable" PathId: 6 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710758 CreateStep: 5000004 ParentPathId: 3 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" PathSubType: EPathSubTypeVectorKmeansTreeIndexImplTable Version { GeneralVersion: 4 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 2 TablePartitionVersion: 1 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 5 PathsLimit: 10000 ShardsInside: 10 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } TableIndex { Name: "by_embedding" LocalPathId: 3 Type: EIndexTypeGlobalVectorKmeansTree State: EIndexStateReady KeyColumnNames: "prefix" KeyColumnNames: "embedding" SchemaVersion: 2 PathOwnerId: 72057594046678944 DataColumnNames: "covered" DataSize: 0 IndexImplTableDescriptions { PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 KeepEraseMarkers: false MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { MinPartitionsCount: 3 MaxPartitionsCount: 3 } } } IndexImplTableDescriptions { PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 KeepEraseMarkers: false MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { MinPartitionsCount: 3 MaxPartitionsCount: 3 } } } IndexImplTableDescriptions { PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 KeepEraseMarkers: false MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { MinPartitionsCount: 3 MaxPartitionsCount: 3 } } } VectorIndexKmeansTreeDescription { Settings { settings { metric: DISTANCE_COSINE vector_type: VECTOR_TYPE_FLOAT vector_dimension: 1024 } clusters: 4 levels: 5 } } } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> IndexBuildTest::ShadowDataNotAllowedByDefault [GOOD] >> IndexBuildTest::ShadowDataEdgeCases >> TDSProxyPatchTest::MovedError_ErasureNone [GOOD] >> TDSProxyPatchTest::SecuredErrorOnGet_ErasureMirror3dc >> THiveTest::TestCheckSubHiveForwarding [GOOD] >> THiveTest::TestCheckSubHiveDrain >> TDSProxyPatchTest::SecuredErrorOnGet_ErasureMirror3dc [GOOD] >> TDSProxyPutTest::TestMirror3dcPutStatusOkWith_0_0_0_VdiskErrors >> TKesusTest::TestAcquireSemaphoreRebootTimeout [GOOD] >> TKesusTest::TestAcquireSemaphoreViaDecrease >> IndexBuildTest::ShadowDataEdgeCases [GOOD] >> TDSProxyPutTest::TestMirror3dcPutStatusOkWith_0_0_0_VdiskErrors [GOOD] >> TSharedPageCache::ThreeLeveledLRU >> TTxDataShardMiniKQL::WriteAndReadMany [GOOD] >> TKesusTest::TestAcquireSemaphoreViaDecrease [GOOD] >> TClockProCache::Touch [GOOD] >> TClockProCache::UpdateLimit [GOOD] >> TCompaction::OneMemtable [GOOD] >> TCompaction::ManyParts >> BuildStatsHistogram::Three_Serial_Small_2_Levels [GOOD] >> BuildStatsHistogram::Three_Serial_Small_2_Levels_3_Buckets ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_base_reboots/unittest >> TTablesWithReboots::CopyTableAndDropWithReboots2 [GOOD] Test command err: ==== RunWithTabletReboots =========== RUN: Trace =========== Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2140] Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:116:2058] recipient: [1:110:2141] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:116:2058] recipient: [1:110:2141] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:117:2058] recipient: [1:111:2142] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:117:2058] recipient: [1:111:2142] Leader for TabletID 72057594046678944 is [1:126:2151] sender: [1:127:2058] recipient: [1:109:2140] Leader for TabletID 72057594046447617 is [1:130:2154] sender: [1:132:2058] recipient: [1:110:2141] Leader for TabletID 72057594046316545 is [1:133:2155] sender: [1:135:2058] recipient: [1:111:2142] 2025-05-29T15:26:39.624026Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7488: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-05-29T15:26:39.624049Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7516: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:26:39.624055Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7402: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2025-05-29T15:26:39.624060Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7418: OperationsProcessing config: using default configuration 2025-05-29T15:26:39.624066Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-05-29T15:26:39.624071Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-05-29T15:26:39.624080Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7548: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:26:39.624094Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:39: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-05-29T15:26:39.624197Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7619: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-05-29T15:26:39.624260Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-05-29T15:26:39.644814Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7651: Got new config: QueryServiceConfig { AllExternalDataSourcesAreAvailable: true } 2025-05-29T15:26:39.644839Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:26:39.644949Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7619: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# Leader for TabletID 72057594046447617 is [1:130:2154] sender: [1:175:2058] recipient: [1:15:2062] 2025-05-29T15:26:39.650319Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-05-29T15:26:39.650350Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-05-29T15:26:39.650388Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-05-29T15:26:39.657595Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-05-29T15:26:39.657675Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:445: Clear TempDirsState with owners number: 0 2025-05-29T15:26:39.657820Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1348: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-05-29T15:26:39.658066Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:32: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-05-29T15:26:39.659726Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:157: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:26:39.659767Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:84: [RootDataErasureManager] Stop 2025-05-29T15:26:39.660002Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:26:39.660010Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:26:39.660039Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-05-29T15:26:39.660048Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-29T15:26:39.660054Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:202: TTxServerlessStorageBilling.Complete 2025-05-29T15:26:39.660077Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6671: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:214:2058] recipient: [1:212:2212] IGNORE Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:214:2058] recipient: [1:212:2212] Leader for TabletID 72057594037968897 is [1:218:2216] sender: [1:219:2058] recipient: [1:212:2212] 2025-05-29T15:26:39.661507Z node 1 :HIVE INFO: tablet_helpers.cpp:1130: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:126:2151] sender: [1:239:2058] recipient: [1:15:2062] 2025-05-29T15:26:39.677586Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:372: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-05-29T15:26:39.677650Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:26:39.677710Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-05-29T15:26:39.677771Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:130: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-05-29T15:26:39.677783Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:26:39.678436Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:451: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-05-29T15:26:39.678464Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-05-29T15:26:39.678517Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:26:39.678527Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:313: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-05-29T15:26:39.678533Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:367: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-05-29T15:26:39.678538Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 2 -> 3 2025-05-29T15:26:39.679093Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:26:39.679109Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-05-29T15:26:39.679113Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 3 -> 128 2025-05-29T15:26:39.679526Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:26:39.679541Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:26:39.679548Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:26:39.679556Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1650: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-05-29T15:26:39.680244Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1719: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-05-29T15:26:39.680669Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:652: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-05-29T15:26:39.680706Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1751: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 Leader for TabletID 72057594046316545 is [1:133:2155] sender: [1:254:2058] recipient: [1:15:2062] FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-05-29T15:26:39.680897Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:676: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-05-29T15:26:39.680917Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:680: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 133 RawX2: 4294969451 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-05-29T15:26:39.680922Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:26:39.680964Z node 1 :FLAT_TX_SCHEMESHARD INFO: sch ... emeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:28:02.828930Z node 232 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [232:208:2209], at schemeshard: 72057594046678944, txId: 1005, path id: 1 2025-05-29T15:28:02.828935Z node 232 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [232:208:2209], at schemeshard: 72057594046678944, txId: 1005, path id: 4 2025-05-29T15:28:02.829043Z node 232 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:227: tests -- TTxNotificationSubscriber for txId 1004: got EvNotifyTxCompletionResult 2025-05-29T15:28:02.829050Z node 232 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:236: tests -- TTxNotificationSubscriber for txId 1004: satisfy waiter [232:549:2509] 2025-05-29T15:28:02.829793Z node 232 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:5834: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 13 PathOwnerId: 72057594046678944, cookie: 1005 2025-05-29T15:28:02.829814Z node 232 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 13 PathOwnerId: 72057594046678944, cookie: 1005 2025-05-29T15:28:02.829819Z node 232 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72057594046678944, txId: 1005 2025-05-29T15:28:02.829825Z node 232 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 1005, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 13 2025-05-29T15:28:02.829831Z node 232 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 4 2025-05-29T15:28:02.830046Z node 232 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:5834: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 1005 2025-05-29T15:28:02.830058Z node 232 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 1005 2025-05-29T15:28:02.830062Z node 232 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72057594046678944, txId: 1005 2025-05-29T15:28:02.830067Z node 232 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 1005, pathId: [OwnerId: 72057594046678944, LocalPathId: 4], version: 18446744073709551615 2025-05-29T15:28:02.830072Z node 232 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 4 2025-05-29T15:28:02.830084Z node 232 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1606: TOperation IsReadyToNotify, TxId: 1005, ready parts: 0/1, is published: true 2025-05-29T15:28:02.830194Z node 232 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1005:0, at schemeshard: 72057594046678944 2025-05-29T15:28:02.830201Z node 232 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_drop_table.cpp:414: TDropTable TProposedDeletePart operationId: 1005:0 ProgressState, at schemeshard: 72057594046678944 2025-05-29T15:28:02.830260Z node 232 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove table for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 3 2025-05-29T15:28:02.830286Z node 232 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:906: Part operation is done id#1005:0 progress is 1/1 2025-05-29T15:28:02.830291Z node 232 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 1005 ready parts: 1/1 2025-05-29T15:28:02.830296Z node 232 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:906: Part operation is done id#1005:0 progress is 1/1 2025-05-29T15:28:02.830301Z node 232 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 1005 ready parts: 1/1 2025-05-29T15:28:02.830305Z node 232 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1606: TOperation IsReadyToNotify, TxId: 1005, ready parts: 1/1, is published: true 2025-05-29T15:28:02.830314Z node 232 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1629: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [232:408:2380] message: TxId: 1005 2025-05-29T15:28:02.830319Z node 232 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 1005 ready parts: 1/1 2025-05-29T15:28:02.830325Z node 232 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:973: Operation and all the parts is done, operation id: 1005:0 2025-05-29T15:28:02.830330Z node 232 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5174: RemoveTx for txid 1005:0 2025-05-29T15:28:02.830347Z node 232 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 2 2025-05-29T15:28:02.830932Z node 232 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1005 2025-05-29T15:28:02.831339Z node 232 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5549: Handle TEvStateChanged, at schemeshard: 72057594046678944, message: Source { RawX1: 332 RawX2: 996432414990 } TabletId: 72075186233409546 State: 4 2025-05-29T15:28:02.831357Z node 232 :FLAT_TX_SCHEMESHARD INFO: schemeshard__state_changed_reply.cpp:78: TTxShardStateChanged DoExecute, datashard informs about state changing, datashardId: 72075186233409546, state: Offline, at schemeshard: 72057594046678944 2025-05-29T15:28:02.831836Z node 232 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1005 2025-05-29T15:28:02.831882Z node 232 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:227: tests -- TTxNotificationSubscriber for txId 1005: got EvNotifyTxCompletionResult 2025-05-29T15:28:02.831889Z node 232 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:236: tests -- TTxNotificationSubscriber for txId 1005: satisfy waiter [232:549:2509] 2025-05-29T15:28:02.832337Z node 232 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:46: Free shard 72057594046678944:1 hive 72057594037968897 at ss 72057594046678944 2025-05-29T15:28:02.832583Z node 232 :HIVE INFO: tablet_helpers.cpp:1356: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 1 TxId_Deprecated: 1 TabletID: 72075186233409546 2025-05-29T15:28:02.832655Z node 232 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5938: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 1 ShardOwnerId: 72057594046678944 ShardLocalIdx: 1, at schemeshard: 72057594046678944 2025-05-29T15:28:02.832706Z node 232 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 1 Forgetting tablet 72075186233409546 2025-05-29T15:28:02.833321Z node 232 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:123: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-05-29T15:28:02.833330Z node 232 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:137: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 3], at schemeshard: 72057594046678944 2025-05-29T15:28:02.833345Z node 232 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2025-05-29T15:28:02.833582Z node 232 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5549: Handle TEvStateChanged, at schemeshard: 72057594046678944, message: Source { RawX1: 443 RawX2: 996432415084 } TabletId: 72075186233409547 State: 4 2025-05-29T15:28:02.833594Z node 232 :FLAT_TX_SCHEMESHARD INFO: schemeshard__state_changed_reply.cpp:78: TTxShardStateChanged DoExecute, datashard informs about state changing, datashardId: 72075186233409547, state: Offline, at schemeshard: 72057594046678944 2025-05-29T15:28:02.834231Z node 232 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:174: Deleted shardIdx 72057594046678944:1 2025-05-29T15:28:02.834247Z node 232 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:180: Close pipe to deleted shardIdx 72057594046678944:1 tabletId 72075186233409546 2025-05-29T15:28:02.834274Z node 232 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:160: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 2025-05-29T15:28:02.834334Z node 232 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:46: Free shard 72057594046678944:2 hive 72057594037968897 at ss 72057594046678944 2025-05-29T15:28:02.834403Z node 232 :HIVE INFO: tablet_helpers.cpp:1356: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 2 TxId_Deprecated: 2 TabletID: 72075186233409547 2025-05-29T15:28:02.834436Z node 232 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5938: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 2 ShardOwnerId: 72057594046678944 ShardLocalIdx: 2, at schemeshard: 72057594046678944 2025-05-29T15:28:02.834481Z node 232 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 1 Forgetting tablet 72075186233409547 2025-05-29T15:28:02.834956Z node 232 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:123: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-05-29T15:28:02.834966Z node 232 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:137: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 4], at schemeshard: 72057594046678944 2025-05-29T15:28:02.834978Z node 232 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-05-29T15:28:02.835601Z node 232 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:174: Deleted shardIdx 72057594046678944:2 2025-05-29T15:28:02.835614Z node 232 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:180: Close pipe to deleted shardIdx 72057594046678944:2 tabletId 72075186233409547 2025-05-29T15:28:02.835722Z node 232 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:160: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestWaitNotification: OK eventTxId 1004 TestWaitNotification: OK eventTxId 1005 wait until 72075186233409546 is deleted wait until 72075186233409547 is deleted 2025-05-29T15:28:02.835798Z node 232 :HIVE INFO: tablet_helpers.cpp:1476: [72057594037968897] TEvSubscribeToTabletDeletion, 72075186233409546 2025-05-29T15:28:02.835809Z node 232 :HIVE INFO: tablet_helpers.cpp:1476: [72057594037968897] TEvSubscribeToTabletDeletion, 72075186233409547 Deleted tabletId 72075186233409546 Deleted tabletId 72075186233409547 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/dsproxy/ut/unittest >> TDSProxyPutTest::TestMirror3dcPutStatusOkWith_0_0_0_VdiskErrors [GOOD] Test command err: 2025-05-29T15:28:05.941091Z node 25 :BS_PROXY_PUT INFO: dsproxy_put.cpp:645: [7e4afa7ea38a37be] bootstrap ActorId# [25:82:2128] Group# 0 BlobCount# 1 BlobIDs# [[72075186224047637:1:863:1:24576:786:0]] HandleClass# TabletLog Tactic# Default RestartCounter# 0 Marker# BPP13 2025-05-29T15:28:05.941157Z node 25 :BS_PROXY_PUT DEBUG: dsproxy_strategy_base.cpp:324: [7e4afa7ea38a37be] partPlacement record partSituation# ESituation::Unknown to# 0 blob Id# [72075186224047637:1:863:1:24576:786:1] Marker# BPG33 2025-05-29T15:28:05.941162Z node 25 :BS_PROXY_PUT DEBUG: dsproxy_strategy_base.cpp:345: [7e4afa7ea38a37be] Sending missing VPut part# 0 to# 0 blob Id# [72075186224047637:1:863:1:24576:786:1] Marker# BPG32 2025-05-29T15:28:05.941166Z node 25 :BS_PROXY_PUT DEBUG: dsproxy_strategy_base.cpp:324: [7e4afa7ea38a37be] partPlacement record partSituation# ESituation::Unknown to# 1 blob Id# [72075186224047637:1:863:1:24576:786:2] Marker# BPG33 2025-05-29T15:28:05.941168Z node 25 :BS_PROXY_PUT DEBUG: dsproxy_strategy_base.cpp:345: [7e4afa7ea38a37be] Sending missing VPut part# 1 to# 1 blob Id# [72075186224047637:1:863:1:24576:786:2] Marker# BPG32 2025-05-29T15:28:05.941172Z node 25 :BS_PROXY_PUT DEBUG: dsproxy_strategy_base.cpp:324: [7e4afa7ea38a37be] partPlacement record partSituation# ESituation::Unknown to# 2 blob Id# [72075186224047637:1:863:1:24576:786:3] Marker# BPG33 2025-05-29T15:28:05.941174Z node 25 :BS_PROXY_PUT DEBUG: dsproxy_strategy_base.cpp:345: [7e4afa7ea38a37be] Sending missing VPut part# 2 to# 2 blob Id# [72075186224047637:1:863:1:24576:786:3] Marker# BPG32 2025-05-29T15:28:05.944044Z node 25 :BS_PROXY_PUT DEBUG: dsproxy_put.cpp:260: [7e4afa7ea38a37be] received {EvVPutResult Status# OK ID# [72075186224047637:1:863:1:24576:786:2] {MsgQoS MsgId# { SequenceId: 1 MsgId: 0 }}} from# [0:1:0:1:0] Marker# BPP01 2025-05-29T15:28:05.944097Z node 25 :BS_PROXY_PUT DEBUG: dsproxy_put.cpp:260: [7e4afa7ea38a37be] received {EvVPutResult Status# OK ID# [72075186224047637:1:863:1:24576:786:3] {MsgQoS MsgId# { SequenceId: 1 MsgId: 0 }}} from# [0:1:1:1:0] Marker# BPP01 2025-05-29T15:28:05.944106Z node 25 :BS_PROXY_PUT DEBUG: dsproxy_put.cpp:260: [7e4afa7ea38a37be] received {EvVPutResult Status# OK ID# [72075186224047637:1:863:1:24576:786:1] {MsgQoS MsgId# { SequenceId: 1 MsgId: 0 }}} from# [0:1:2:1:0] Marker# BPP01 2025-05-29T15:28:05.944118Z node 25 :BS_PROXY_PUT DEBUG: dsproxy_put_impl.cpp:72: [7e4afa7ea38a37be] Result# TEvPutResult {Id# [72075186224047637:1:863:1:24576:786:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0} GroupId# 0 Marker# BPP12 2025-05-29T15:28:05.944125Z node 25 :BS_PROXY_PUT INFO: dsproxy_put.cpp:486: [7e4afa7ea38a37be] SendReply putResult# TEvPutResult {Id# [72075186224047637:1:863:1:24576:786:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0} ResponsesSent# 0 PutImpl.Blobs.size# 1 Last# true Marker# BPP21 2025-05-29T15:28:05.944153Z node 25 :BS_PROXY_PUT DEBUG: {BPP72@dsproxy_put.cpp:470} Query history GroupId# 0 HandleClass# TabletLog Tactic# Default History# THistory { Entries# [ TEvVPut{ TimestampMs# 0.258 sample PartId# [72075186224047637:1:863:1:24576:786:3] QueryCount# 1 VDiskId# [0:1:1:1:0] NodeId# 25 } TEvVPut{ TimestampMs# 0.259 sample PartId# [72075186224047637:1:863:1:24576:786:1] QueryCount# 1 VDiskId# [0:1:2:1:0] NodeId# 25 } TEvVPut{ TimestampMs# 0.259 sample PartId# [72075186224047637:1:863:1:24576:786:2] QueryCount# 1 VDiskId# [0:1:0:1:0] NodeId# 25 } TEvVPutResult{ TimestampMs# 3.135 VDiskId# [0:1:0:1:0] NodeId# 25 Status# OK } TEvVPutResult{ TimestampMs# 3.168 VDiskId# [0:1:1:1:0] NodeId# 25 Status# OK } TEvVPutResult{ TimestampMs# 3.176 VDiskId# [0:1:2:1:0] NodeId# 25 Status# OK } ] } ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_index_build/unittest >> IndexBuildTest::ShadowDataEdgeCases [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:116:2058] recipient: [1:111:2141] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:116:2058] recipient: [1:111:2141] Leader for TabletID 72057594046678944 is [1:128:2152] sender: [1:132:2058] recipient: [1:111:2141] 2025-05-29T15:28:05.492951Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7488: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-05-29T15:28:05.492972Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7516: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:28:05.492977Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7402: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-05-29T15:28:05.492982Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7418: OperationsProcessing config: using default configuration 2025-05-29T15:28:05.492994Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-05-29T15:28:05.492998Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-05-29T15:28:05.493006Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7548: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:28:05.493017Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:39: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-05-29T15:28:05.493092Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7619: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-05-29T15:28:05.493138Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-05-29T15:28:05.502052Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7309: Cannot subscribe to console configs 2025-05-29T15:28:05.502067Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:28:05.503916Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-05-29T15:28:05.503990Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-05-29T15:28:05.504020Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-05-29T15:28:05.505828Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-05-29T15:28:05.506041Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:445: Clear TempDirsState with owners number: 0 2025-05-29T15:28:05.506150Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1348: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-05-29T15:28:05.506191Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:32: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-05-29T15:28:05.506579Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:157: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:28:05.506622Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:84: [RootDataErasureManager] Stop 2025-05-29T15:28:05.506914Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:28:05.506930Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:28:05.506953Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-05-29T15:28:05.506965Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-29T15:28:05.506972Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:202: TTxServerlessStorageBilling.Complete 2025-05-29T15:28:05.507010Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6671: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-05-29T15:28:05.508450Z node 1 :HIVE INFO: tablet_helpers.cpp:1130: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:128:2152] sender: [1:242:2058] recipient: [1:15:2062] 2025-05-29T15:28:05.527445Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:372: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-05-29T15:28:05.527500Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:28:05.527539Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-05-29T15:28:05.527574Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:130: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-05-29T15:28:05.527582Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:28:05.528199Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:451: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-05-29T15:28:05.528222Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-05-29T15:28:05.528267Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:28:05.528274Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:313: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-05-29T15:28:05.528278Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:367: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-05-29T15:28:05.528281Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 2 -> 3 2025-05-29T15:28:05.528635Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:28:05.528644Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-05-29T15:28:05.528648Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 3 -> 128 2025-05-29T15:28:05.529209Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:28:05.529235Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:28:05.529247Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:28:05.529257Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1650: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-05-29T15:28:05.529993Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1719: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-05-29T15:28:05.530506Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:652: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-05-29T15:28:05.530550Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1751: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-05-29T15:28:05.530760Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:676: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-05-29T15:28:05.530784Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:680: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 124 RawX2: 4294969445 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-05-29T15:28:05.530792Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:28:05.530850Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 128 -> 240 2025-05-29T15:28:05.530859Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:28:05.530895Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-05-29T15:28:05.530906Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:402: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-05-29T15:28:05.531377Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:28:05.531388Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-29T15:28:05.531428Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29 ... 18: TAlterTable TPropose operationId# 109:0 ProgressState, at schemeshard: 72057594046678944 2025-05-29T15:28:06.066258Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1650: TOperation IsReadyToPropose , TxId: 109 ready parts: 1/1 2025-05-29T15:28:06.066290Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1719: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } AffectedSet { TabletId: 72075186233409548 Flags: 2 } ExecLevel: 0 TxId: 109 MinStep: 5000008 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-05-29T15:28:06.066632Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:652: Send tablet strongly msg operationId: 109:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:109 msg type: 269090816 2025-05-29T15:28:06.066663Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1751: TOperation RegisterRelationByTabletId, TxId: 109, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 109 at step: 5000008 FAKE_COORDINATOR: advance: minStep5000008 State->FrontStep: 5000007 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 109 at step: 5000008 FAKE_COORDINATOR: Send Plan to tablet 72075186233409548 for txId: 109 at step: 5000008 2025-05-29T15:28:06.066838Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:676: TTxOperationPlanStep Execute, stepId: 5000008, transactions count in step: 1, at schemeshard: 72057594046678944 2025-05-29T15:28:06.066863Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:680: TTxOperationPlanStep Execute, message: Transactions { TxId: 109 Coordinator: 72057594046316545 AckTo { RawX1: 127 RawX2: 8589936743 } } Step: 5000008 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-05-29T15:28:06.066884Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_alter_table.cpp:359: TAlterTable TPropose operationId# 109:0 HandleReply TEvOperationPlan, operationId: 109:0, stepId: 5000008, at schemeshard: 72057594046678944 2025-05-29T15:28:06.066957Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 109:0 128 -> 129 2025-05-29T15:28:06.066992Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 3 FAKE_COORDINATOR: advance: minStep5000008 State->FrontStep: 5000008 2025-05-29T15:28:06.067911Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:28:06.067922Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 109, path id: [OwnerId: 72057594046678944, LocalPathId: 4] 2025-05-29T15:28:06.067993Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:28:06.067998Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [2:207:2208], at schemeshard: 72057594046678944, txId: 109, path id: 4 2025-05-29T15:28:06.068197Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 109:0, at schemeshard: 72057594046678944 2025-05-29T15:28:06.068205Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:1036: NTableState::TProposedWaitParts operationId# 109:0 ProgressState at tablet: 72057594046678944 2025-05-29T15:28:06.068549Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:5834: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 4 PathOwnerId: 72057594046678944, cookie: 109 2025-05-29T15:28:06.068574Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 4 PathOwnerId: 72057594046678944, cookie: 109 2025-05-29T15:28:06.068578Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72057594046678944, txId: 109 2025-05-29T15:28:06.068583Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 109, pathId: [OwnerId: 72057594046678944, LocalPathId: 4], version: 4 2025-05-29T15:28:06.068589Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 4 2025-05-29T15:28:06.068610Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1606: TOperation IsReadyToNotify, TxId: 109, ready parts: 0/1, is published: true FAKE_COORDINATOR: Erasing txId 109 2025-05-29T15:28:06.069519Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 109 2025-05-29T15:28:06.069960Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6285: Handle TEvProposeTransactionResult, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409548 Status: COMPLETE TxId: 109 Step: 5000008 OrderId: 109 ExecLatency: 3 ProposeLatency: 4 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409548 CpuTimeUsec: 244 } } CommitVersion { Step: 5000008 TxId: 109 } 2025-05-29T15:28:06.069989Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1764: TOperation FindRelatedPartByTabletId, TxId: 109, tablet: 72075186233409548, partId: 0 2025-05-29T15:28:06.070014Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:619: TTxOperationReply execute, operationId: 109:0, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409548 Status: COMPLETE TxId: 109 Step: 5000008 OrderId: 109 ExecLatency: 3 ProposeLatency: 4 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409548 CpuTimeUsec: 244 } } CommitVersion { Step: 5000008 TxId: 109 } 2025-05-29T15:28:06.070030Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_part.cpp:108: HandleReply TEvDataShard::TEvProposeTransactionResult Ignore message: tablet# 72057594046678944, ev# TxKind: TX_KIND_SCHEME Origin: 72075186233409548 Status: COMPLETE TxId: 109 Step: 5000008 OrderId: 109 ExecLatency: 3 ProposeLatency: 4 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409548 CpuTimeUsec: 244 } } CommitVersion { Step: 5000008 TxId: 109 } 2025-05-29T15:28:06.070182Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5512: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 681 RawX2: 8589937226 } Origin: 72075186233409548 State: 2 TxId: 109 Step: 0 Generation: 2 2025-05-29T15:28:06.070188Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1764: TOperation FindRelatedPartByTabletId, TxId: 109, tablet: 72075186233409548, partId: 0 2025-05-29T15:28:06.070199Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:619: TTxOperationReply execute, operationId: 109:0, at schemeshard: 72057594046678944, message: Source { RawX1: 681 RawX2: 8589937226 } Origin: 72075186233409548 State: 2 TxId: 109 Step: 0 Generation: 2 2025-05-29T15:28:06.070204Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:1005: NTableState::TProposedWaitParts operationId# 109:0 HandleReply TEvSchemaChanged at tablet: 72057594046678944 2025-05-29T15:28:06.070209Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:1009: NTableState::TProposedWaitParts operationId# 109:0 HandleReply TEvSchemaChanged at tablet: 72057594046678944 message: Source { RawX1: 681 RawX2: 8589937226 } Origin: 72075186233409548 State: 2 TxId: 109 Step: 0 Generation: 2 2025-05-29T15:28:06.070219Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:655: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 109:0, shardIdx: 72057594046678944:3, datashard: 72075186233409548, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2025-05-29T15:28:06.070222Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:674: all shard schema changes has been received, operationId: 109:0, at schemeshard: 72057594046678944 2025-05-29T15:28:06.070225Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:686: send schema changes ack message, operation: 109:0, datashard: 72075186233409548, at schemeshard: 72057594046678944 2025-05-29T15:28:06.070230Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 109:0 129 -> 240 2025-05-29T15:28:06.070802Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:647: TTxOperationReply complete, operationId: 109:0, at schemeshard: 72057594046678944 2025-05-29T15:28:06.070896Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:647: TTxOperationReply complete, operationId: 109:0, at schemeshard: 72057594046678944 2025-05-29T15:28:06.070955Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 109:0, at schemeshard: 72057594046678944 2025-05-29T15:28:06.070961Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:482: [72057594046678944] TDone opId# 109:0 ProgressState 2025-05-29T15:28:06.070972Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:906: Part operation is done id#109:0 progress is 1/1 2025-05-29T15:28:06.070975Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 109 ready parts: 1/1 2025-05-29T15:28:06.070979Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:906: Part operation is done id#109:0 progress is 1/1 2025-05-29T15:28:06.070981Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 109 ready parts: 1/1 2025-05-29T15:28:06.070985Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1606: TOperation IsReadyToNotify, TxId: 109, ready parts: 1/1, is published: true 2025-05-29T15:28:06.070996Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1629: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [2:336:2314] message: TxId: 109 2025-05-29T15:28:06.071000Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 109 ready parts: 1/1 2025-05-29T15:28:06.071004Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:973: Operation and all the parts is done, operation id: 109:0 2025-05-29T15:28:06.071008Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5174: RemoveTx for txid 109:0 2025-05-29T15:28:06.071031Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 3 2025-05-29T15:28:06.071444Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:227: tests -- TTxNotificationSubscriber for txId 109: got EvNotifyTxCompletionResult 2025-05-29T15:28:06.071456Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:236: tests -- TTxNotificationSubscriber for txId 109: satisfy waiter [2:787:2730] TestWaitNotification: OK eventTxId 109 >> TCompaction::ManyParts [GOOD] >> TCompaction::BootAbort >> TSharedPageCache::ThreeLeveledLRU [GOOD] >> TSharedPageCache::S3FIFO >> BuildStatsHistogram::Three_Serial_Small_2_Levels_3_Buckets [GOOD] >> BuildStatsHistogram::Three_Serial_Small_1_Level [GOOD] >> BuildStatsHistogram::Three_Serial_Small_0_Levels [GOOD] >> BuildStatsMixedIndex::Single [GOOD] >> BuildStatsMixedIndex::Single_Slices [GOOD] >> BuildStatsMixedIndex::Single_History >> Bloom::Conf [GOOD] >> Bloom::Hashes [GOOD] >> Bloom::Rater >> TCompaction::BootAbort [GOOD] >> TCompaction::Defaults [GOOD] >> TCompaction::Merges [GOOD] >> TCompactionMulti::ManyParts >> BuildStatsMixedIndex::Single_History [GOOD] >> BuildStatsMixedIndex::Single_History_Slices [GOOD] >> BuildStatsMixedIndex::Single_Groups [GOOD] >> BuildStatsMixedIndex::Single_Groups_Slices ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kesus/tablet/ut/unittest >> TKesusTest::TestAcquireSemaphoreViaDecrease [GOOD] Test command err: 2025-05-29T15:27:27.754499Z node 1 :KESUS_TABLET INFO: tablet_impl.cpp:71: OnActivateExecutor: 72057594037927937 2025-05-29T15:27:27.754528Z node 1 :KESUS_TABLET DEBUG: tx_init_schema.cpp:14: [72057594037927937] TTxInitSchema::Execute 2025-05-29T15:27:27.757845Z node 1 :KESUS_TABLET DEBUG: tx_init_schema.cpp:21: [72057594037927937] TTxInitSchema::Complete 2025-05-29T15:27:27.757874Z node 1 :KESUS_TABLET DEBUG: tx_init.cpp:30: [72057594037927937] TTxInit::Execute 2025-05-29T15:27:27.768985Z node 1 :KESUS_TABLET DEBUG: tx_init.cpp:242: [72057594037927937] TTxInit::Complete 2025-05-29T15:27:27.769146Z node 1 :KESUS_TABLET DEBUG: tx_session_attach.cpp:35: [72057594037927937] TTxSessionAttach::Execute (sender=[1:131:2156], cookie=12745122638402063936, session=0, seqNo=0) 2025-05-29T15:27:27.769182Z node 1 :KESUS_TABLET DEBUG: tx_session_attach.cpp:118: [72057594037927937] Created new session 1 2025-05-29T15:27:27.790196Z node 1 :KESUS_TABLET DEBUG: tx_session_attach.cpp:140: [72057594037927937] TTxSessionAttach::Complete (sender=[1:131:2156], cookie=12745122638402063936, session=1) 2025-05-29T15:27:27.790298Z node 1 :KESUS_TABLET DEBUG: tx_session_attach.cpp:35: [72057594037927937] TTxSessionAttach::Execute (sender=[1:131:2156], cookie=17264738898679135792, session=0, seqNo=0) 2025-05-29T15:27:27.790329Z node 1 :KESUS_TABLET DEBUG: tx_session_attach.cpp:118: [72057594037927937] Created new session 2 2025-05-29T15:27:27.801043Z node 1 :KESUS_TABLET DEBUG: tx_session_attach.cpp:140: [72057594037927937] TTxSessionAttach::Complete (sender=[1:131:2156], cookie=17264738898679135792, session=2) 2025-05-29T15:27:27.801189Z node 1 :KESUS_TABLET DEBUG: tx_semaphore_create.cpp:32: [72057594037927937] TTxSemaphoreCreate::Execute (sender=[1:142:2165], cookie=5396692672266811017, name="Sem1", limit=1) 2025-05-29T15:27:27.801222Z node 1 :KESUS_TABLET DEBUG: tx_semaphore_create.cpp:104: [72057594037927937] Created new semaphore 1 "Sem1" 2025-05-29T15:27:27.811966Z node 1 :KESUS_TABLET DEBUG: tx_semaphore_create.cpp:112: [72057594037927937] TTxSemaphoreCreate::Complete (sender=[1:142:2165], cookie=5396692672266811017) 2025-05-29T15:27:27.812056Z node 1 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:48: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[1:131:2156], cookie=111, session=1, semaphore="Sem1" count=1) 2025-05-29T15:27:27.812114Z node 1 :KESUS_TABLET DEBUG: tablet_db.cpp:152: [72057594037927937] Processing semaphore 1 "Sem1" queue: next order #1 session 1 2025-05-29T15:27:27.812153Z node 1 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:48: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[1:131:2156], cookie=222, session=2, semaphore="Sem1" count=1) 2025-05-29T15:27:27.822795Z node 1 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:263: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[1:131:2156], cookie=111) 2025-05-29T15:27:27.822822Z node 1 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:263: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[1:131:2156], cookie=222) 2025-05-29T15:27:27.822929Z node 1 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:29: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[1:150:2173], cookie=11847749711153611845, name="Sem1") 2025-05-29T15:27:27.822949Z node 1 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:134: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[1:150:2173], cookie=11847749711153611845) 2025-05-29T15:27:27.823003Z node 1 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:29: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[1:153:2176], cookie=9760591493883446565, name="Sem1") 2025-05-29T15:27:27.823011Z node 1 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:134: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[1:153:2176], cookie=9760591493883446565) 2025-05-29T15:27:28.239107Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-05-29T15:27:28.249915Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-05-29T15:27:28.594512Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-05-29T15:27:28.605184Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-05-29T15:27:28.959896Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-05-29T15:27:28.970569Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-05-29T15:27:29.304908Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-05-29T15:27:29.315628Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-05-29T15:27:29.680357Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-05-29T15:27:29.691190Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-05-29T15:27:30.035330Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-05-29T15:27:30.046097Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-05-29T15:27:30.380494Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-05-29T15:27:30.391290Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-05-29T15:27:30.736092Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-05-29T15:27:30.746899Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-05-29T15:27:31.091581Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-05-29T15:27:31.102421Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-05-29T15:27:31.488153Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-05-29T15:27:31.498963Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-05-29T15:27:31.863600Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-05-29T15:27:31.874282Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-05-29T15:27:32.228732Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-05-29T15:27:32.239462Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-05-29T15:27:32.593970Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-05-29T15:27:32.604695Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-05-29T15:27:32.959025Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-05-29T15:27:32.969742Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-05-29T15:27:33.355531Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-05-29T15:27:33.366367Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-05-29T15:27:33.730950Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-05-29T15:27:33.741888Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-05-29T15:27:34.100640Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-05-29T15:27:34.111420Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-05-29T15:27:34.456811Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-05-29T15:27:34.467624Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-05-29T15:27:34.822656Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-05-29T15:27:34.833432Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-05-29T15:27:35.201597Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-05-29T15:27:35.212428Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-05-29T15:27:35.577347Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-05-29T15:27:35.588173Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-05-29T15:27:35.948989Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-05-29T15:27:35.959997Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-05-29T15:27:36.314550Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-05-29T15:27:36.325270Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-05-29T15:27:36.680110Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-05-29T15:27:36.691073Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-05-29T15:27:37.067615Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-05-29T15:27:37.078389Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-05-29T15:27:37.443910Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-05-29T15:27:37.454759Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-05-29T15:27:37.809785Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-05-29T15:27:37.820840Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-05-29T15:27:38.176060Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-05-29T15:27:38.186867Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-05-29T15:27:38.531826Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-05-29T15:27:38.542759Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-05-29T15:27:38.938727Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-05-29T15:27:38.949399Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-05-29T15:27:39.314298Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-05-29T15:27:39.343264Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-05-29T15:27:39.699993Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-05-29T15:27:39.711080Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-05-29T15:27:40.074357Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-05-29T15:27:40.085286Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-05-29T15:27:40.431522Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-05-29T15:27:40.442405Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck: ... BUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-05-29T15:28:01.065790Z node 4 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-05-29T15:28:01.420900Z node 4 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-05-29T15:28:01.431601Z node 4 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-05-29T15:28:01.776775Z node 4 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-05-29T15:28:01.787526Z node 4 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-05-29T15:28:02.207957Z node 4 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-05-29T15:28:02.218786Z node 4 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-05-29T15:28:02.574151Z node 4 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-05-29T15:28:02.585096Z node 4 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-05-29T15:28:02.934876Z node 4 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-05-29T15:28:02.947080Z node 4 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-05-29T15:28:03.303805Z node 4 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-05-29T15:28:03.314854Z node 4 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-05-29T15:28:03.671013Z node 4 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-05-29T15:28:03.682325Z node 4 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-05-29T15:28:04.059381Z node 4 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-05-29T15:28:04.070545Z node 4 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-05-29T15:28:04.427405Z node 4 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-05-29T15:28:04.438508Z node 4 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-05-29T15:28:04.795019Z node 4 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-05-29T15:28:04.806071Z node 4 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-05-29T15:28:05.162968Z node 4 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-05-29T15:28:05.174057Z node 4 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-05-29T15:28:05.530800Z node 4 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-05-29T15:28:05.541769Z node 4 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-05-29T15:28:05.908711Z node 4 :KESUS_TABLET DEBUG: tx_session_timeout.cpp:27: [72057594037927937] TTxSessionTimeout::Execute (session=1) 2025-05-29T15:28:05.908748Z node 4 :KESUS_TABLET DEBUG: tablet_db.cpp:32: [72057594037927937] Deleting session 1 2025-05-29T15:28:05.908760Z node 4 :KESUS_TABLET DEBUG: tablet_db.cpp:98: [72057594037927937] Deleting session 1 / semaphore 1 "Sem1" owner link 2025-05-29T15:28:05.919847Z node 4 :KESUS_TABLET DEBUG: tx_session_timeout.cpp:56: [72057594037927937] TTxSessionTimeout::Complete (session=1) 2025-05-29T15:28:05.930262Z node 4 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:29: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[4:452:2411], cookie=12752367793533873018, name="Sem1") 2025-05-29T15:28:05.930311Z node 4 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:134: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[4:452:2411], cookie=12752367793533873018) 2025-05-29T15:28:06.121904Z node 5 :KESUS_TABLET INFO: tablet_impl.cpp:71: OnActivateExecutor: 72057594037927937 2025-05-29T15:28:06.121936Z node 5 :KESUS_TABLET DEBUG: tx_init_schema.cpp:14: [72057594037927937] TTxInitSchema::Execute 2025-05-29T15:28:06.124822Z node 5 :KESUS_TABLET DEBUG: tx_init_schema.cpp:21: [72057594037927937] TTxInitSchema::Complete 2025-05-29T15:28:06.124854Z node 5 :KESUS_TABLET DEBUG: tx_init.cpp:30: [72057594037927937] TTxInit::Execute 2025-05-29T15:28:06.146558Z node 5 :KESUS_TABLET DEBUG: tx_init.cpp:242: [72057594037927937] TTxInit::Complete 2025-05-29T15:28:06.146761Z node 5 :KESUS_TABLET DEBUG: tx_session_attach.cpp:35: [72057594037927937] TTxSessionAttach::Execute (sender=[5:131:2156], cookie=12943500792412235810, session=0, seqNo=0) 2025-05-29T15:28:06.146813Z node 5 :KESUS_TABLET DEBUG: tx_session_attach.cpp:118: [72057594037927937] Created new session 1 2025-05-29T15:28:06.157856Z node 5 :KESUS_TABLET DEBUG: tx_session_attach.cpp:140: [72057594037927937] TTxSessionAttach::Complete (sender=[5:131:2156], cookie=12943500792412235810, session=1) 2025-05-29T15:28:06.157983Z node 5 :KESUS_TABLET DEBUG: tx_session_attach.cpp:35: [72057594037927937] TTxSessionAttach::Execute (sender=[5:131:2156], cookie=17439374851358885187, session=0, seqNo=0) 2025-05-29T15:28:06.158028Z node 5 :KESUS_TABLET DEBUG: tx_session_attach.cpp:118: [72057594037927937] Created new session 2 2025-05-29T15:28:06.169042Z node 5 :KESUS_TABLET DEBUG: tx_session_attach.cpp:140: [72057594037927937] TTxSessionAttach::Complete (sender=[5:131:2156], cookie=17439374851358885187, session=2) 2025-05-29T15:28:06.169148Z node 5 :KESUS_TABLET DEBUG: tx_session_attach.cpp:35: [72057594037927937] TTxSessionAttach::Execute (sender=[5:131:2156], cookie=8103398314662164445, session=0, seqNo=0) 2025-05-29T15:28:06.169188Z node 5 :KESUS_TABLET DEBUG: tx_session_attach.cpp:118: [72057594037927937] Created new session 3 2025-05-29T15:28:06.180332Z node 5 :KESUS_TABLET DEBUG: tx_session_attach.cpp:140: [72057594037927937] TTxSessionAttach::Complete (sender=[5:131:2156], cookie=8103398314662164445, session=3) 2025-05-29T15:28:06.180512Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_create.cpp:32: [72057594037927937] TTxSemaphoreCreate::Execute (sender=[5:144:2167], cookie=1467130958094172194, name="Sem1", limit=3) 2025-05-29T15:28:06.180562Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_create.cpp:104: [72057594037927937] Created new semaphore 1 "Sem1" 2025-05-29T15:28:06.191611Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_create.cpp:112: [72057594037927937] TTxSemaphoreCreate::Complete (sender=[5:144:2167], cookie=1467130958094172194) 2025-05-29T15:28:06.191728Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:48: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[5:131:2156], cookie=111, session=1, semaphore="Sem1" count=2) 2025-05-29T15:28:06.191782Z node 5 :KESUS_TABLET DEBUG: tablet_db.cpp:152: [72057594037927937] Processing semaphore 1 "Sem1" queue: next order #1 session 1 2025-05-29T15:28:06.191840Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:48: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[5:131:2156], cookie=222, session=2, semaphore="Sem1" count=1) 2025-05-29T15:28:06.191854Z node 5 :KESUS_TABLET DEBUG: tablet_db.cpp:152: [72057594037927937] Processing semaphore 1 "Sem1" queue: next order #2 session 2 2025-05-29T15:28:06.191869Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:48: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[5:131:2156], cookie=333, session=3, semaphore="Sem1" count=1) 2025-05-29T15:28:06.202806Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:263: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[5:131:2156], cookie=111) 2025-05-29T15:28:06.202844Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:263: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[5:131:2156], cookie=222) 2025-05-29T15:28:06.202850Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:263: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[5:131:2156], cookie=333) 2025-05-29T15:28:06.203011Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:29: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[5:152:2175], cookie=4191186074931026047, name="Sem1") 2025-05-29T15:28:06.203036Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:134: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[5:152:2175], cookie=4191186074931026047) 2025-05-29T15:28:06.203089Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:29: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[5:155:2178], cookie=4318748759617646072, name="Sem1") 2025-05-29T15:28:06.203094Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:134: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[5:155:2178], cookie=4318748759617646072) 2025-05-29T15:28:06.203118Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:48: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[5:131:2156], cookie=444, session=1, semaphore="Sem1" count=1) 2025-05-29T15:28:06.203147Z node 5 :KESUS_TABLET DEBUG: tablet_db.cpp:152: [72057594037927937] Processing semaphore 1 "Sem1" queue: next order #3 session 3 2025-05-29T15:28:06.214083Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:263: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[5:131:2156], cookie=444) 2025-05-29T15:28:06.214253Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:29: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[5:160:2183], cookie=2751092072821254881, name="Sem1") 2025-05-29T15:28:06.214271Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:134: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[5:160:2183], cookie=2751092072821254881) 2025-05-29T15:28:06.214313Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:29: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[5:163:2186], cookie=3001138901974579968, name="Sem1") 2025-05-29T15:28:06.214317Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:134: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[5:163:2186], cookie=3001138901974579968) 2025-05-29T15:28:06.217204Z node 5 :KESUS_TABLET INFO: tablet_impl.cpp:71: OnActivateExecutor: 72057594037927937 2025-05-29T15:28:06.217239Z node 5 :KESUS_TABLET DEBUG: tx_init_schema.cpp:14: [72057594037927937] TTxInitSchema::Execute 2025-05-29T15:28:06.217301Z node 5 :KESUS_TABLET DEBUG: tx_init_schema.cpp:21: [72057594037927937] TTxInitSchema::Complete 2025-05-29T15:28:06.217506Z node 5 :KESUS_TABLET DEBUG: tx_init.cpp:30: [72057594037927937] TTxInit::Execute 2025-05-29T15:28:06.260035Z node 5 :KESUS_TABLET DEBUG: tx_init.cpp:242: [72057594037927937] TTxInit::Complete 2025-05-29T15:28:06.260095Z node 5 :KESUS_TABLET DEBUG: tablet_db.cpp:152: [72057594037927937] Processing semaphore 1 "Sem1" queue: next order #1 session 1 2025-05-29T15:28:06.260104Z node 5 :KESUS_TABLET DEBUG: tablet_db.cpp:152: [72057594037927937] Processing semaphore 1 "Sem1" queue: next order #2 session 2 2025-05-29T15:28:06.260109Z node 5 :KESUS_TABLET DEBUG: tablet_db.cpp:152: [72057594037927937] Processing semaphore 1 "Sem1" queue: next order #3 session 3 2025-05-29T15:28:06.260227Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:29: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[5:203:2216], cookie=5660313843523751552, name="Sem1") 2025-05-29T15:28:06.260253Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:134: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[5:203:2216], cookie=5660313843523751552) 2025-05-29T15:28:06.260376Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:29: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[5:212:2224], cookie=11478178385474870386, name="Sem1") 2025-05-29T15:28:06.260384Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:134: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[5:212:2224], cookie=11478178385474870386) >> Bloom::Rater [GOOD] >> Bloom::Dipping >> TCompactionMulti::ManyParts [GOOD] >> TCompactionMulti::MainPageCollectionEdge >> BuildStatsMixedIndex::Single_Groups_Slices [GOOD] >> BuildStatsMixedIndex::Single_Groups_History [GOOD] >> BuildStatsMixedIndex::Single_Groups_History_Slices [GOOD] >> BuildStatsMixedIndex::Mixed >> Bloom::Dipping [GOOD] >> Bloom::Basics [GOOD] >> Bloom::Stairs [GOOD] >> BuildStatsBTreeIndex::Single [GOOD] >> BuildStatsBTreeIndex::Single_Slices >> BuildStatsMixedIndex::Mixed [GOOD] >> BuildStatsMixedIndex::Mixed_Groups [GOOD] >> BuildStatsMixedIndex::Mixed_Groups_History [GOOD] >> BuildStatsMixedIndex::Serial [GOOD] >> BuildStatsMixedIndex::Serial_Groups >> BuildStatsBTreeIndex::Single_Slices [GOOD] >> BuildStatsBTreeIndex::Single_History [GOOD] >> BuildStatsBTreeIndex::Single_History_Slices [GOOD] >> BuildStatsBTreeIndex::Single_Groups >> BuildStatsMixedIndex::Serial_Groups [GOOD] >> BuildStatsMixedIndex::Serial_Groups_History [GOOD] >> BuildStatsMixedIndex::Single_LowResolution [GOOD] >> BuildStatsMixedIndex::Single_Slices_LowResolution [GOOD] >> BuildStatsMixedIndex::Single_Groups_LowResolution ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/datashard/ut_minikql/unittest >> TTxDataShardMiniKQL::WriteAndReadMany [GOOD] Test command err: 2025-05-29T15:27:03.106030Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3097: StateInit, received event# 268828672, Sender [1:110:2140], Recipient [1:130:2153]: NKikimr::TEvTablet::TEvBoot 2025-05-29T15:27:03.122606Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7309: Cannot subscribe to console configs 2025-05-29T15:27:03.122632Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:27:03.123375Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3097: StateInit, received event# 268828673, Sender [1:110:2140], Recipient [1:130:2153]: NKikimr::TEvTablet::TEvRestored 2025-05-29T15:27:03.123477Z node 1 :TX_DATASHARD INFO: datashard.cpp:373: TDataShard::OnActivateExecutor: tablet 9437184 actor [1:130:2153] 2025-05-29T15:27:03.123536Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:630: TxInitSchema.Execute 2025-05-29T15:27:03.124665Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3110: StateInactive, received event# 268828684, Sender [1:110:2140], Recipient [1:130:2153]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-05-29T15:27:03.137407Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:696: TxInitSchema.Complete 2025-05-29T15:27:03.137500Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:48: TDataShard::TTxInit::Execute 2025-05-29T15:27:03.137668Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1315: LoadChangeRecords: QueueSize: 0, at tablet: 9437184 2025-05-29T15:27:03.137679Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1371: LoadLockChangeRecords at tablet: 9437184 2025-05-29T15:27:03.137687Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1420: LoadChangeRecordCommits at tablet: 9437184 2025-05-29T15:27:03.137747Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:92: TDataShard::TTxInit::Complete 2025-05-29T15:27:03.137760Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:100: TDataShard::TTxInitRestored::Execute 2025-05-29T15:27:03.137770Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:111: DataShard 9437184 persisting started state actor id [1:197:2153] in generation 2 2025-05-29T15:27:03.167121Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:120: TDataShard::TTxInitRestored::Complete 2025-05-29T15:27:03.176216Z node 1 :TX_DATASHARD INFO: datashard.cpp:417: Switched to work state WaitScheme tabletId 9437184 2025-05-29T15:27:03.176313Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:457: 9437184 not sending time cast registration request in state WaitScheme: missing processing params 2025-05-29T15:27:03.176344Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1250: Change sender created: at tablet: 9437184, actorId: [1:215:2212] 2025-05-29T15:27:03.176349Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1255: Trying to activate change sender: at tablet: 9437184 2025-05-29T15:27:03.176355Z node 1 :TX_DATASHARD INFO: datashard.cpp:1272: Cannot activate change sender: at tablet: 9437184, state: WaitScheme 2025-05-29T15:27:03.176360Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-05-29T15:27:03.176422Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3129: StateWork, received event# 2146435072, Sender [1:130:2153], Recipient [1:130:2153]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-05-29T15:27:03.176440Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3154: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-05-29T15:27:03.176526Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 9437184 2025-05-29T15:27:03.176555Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 9437184 2025-05-29T15:27:03.176565Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 9437184 2025-05-29T15:27:03.176572Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 0 2025-05-29T15:27:03.176579Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:385: Check unit PlanQueue at 9437184 2025-05-29T15:27:03.176585Z node 1 :TX_DATASHARD TRACE: plan_queue_unit.cpp:59: TPlanQueueUnit at 9437184 has no attached operations 2025-05-29T15:27:03.176590Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:403: Unit PlanQueue has no ready operations at 9437184 2025-05-29T15:27:03.176597Z node 1 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 9437184 TxInFly 0 2025-05-29T15:27:03.176603Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 9437184 2025-05-29T15:27:03.176615Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3129: StateWork, received event# 269877761, Sender [1:211:2209], Recipient [1:130:2153]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-05-29T15:27:03.176621Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3165: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-05-29T15:27:03.176627Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3710: Server connected at leader tablet# 9437184, clientId# [1:209:2208], serverId# [1:211:2209], sessionId# [0:0:0] 2025-05-29T15:27:03.177093Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3129: StateWork, received event# 269549568, Sender [1:100:2134], Recipient [1:130:2153]: NKikimrTxDataShard.TEvProposeTransaction TxKind: TX_KIND_SCHEME SourceDeprecated { RawX1: 100 RawX2: 4294969430 } TxBody: "\nI\n\006table1\020\r\032\t\n\003key\030\002 \"\032\014\n\005value\030\200$ 8\032\n\n\004uint\030\002 9(\":\010Z\006\010\000\030\000(\000J\014/Root/table1" TxId: 1 ExecLevel: 0 Flags: 0 SchemeShardId: 4200 ProcessingParams { } 2025-05-29T15:27:03.177107Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3135: StateWork, processing event TEvDataShard::TEvProposeTransaction 2025-05-29T15:27:03.177121Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 9437184 2025-05-29T15:27:03.177153Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1827: Trying to execute [0:1] at 9437184 on unit CheckSchemeTx 2025-05-29T15:27:03.177166Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:132: Propose scheme transaction at tablet 9437184 txId 1 ssId 4200 seqNo 0:0 2025-05-29T15:27:03.177176Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:220: Prepared scheme transaction txId 1 at tablet 9437184 2025-05-29T15:27:03.177183Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1862: Execution status for [0:1] at 9437184 is ExecutedNoMoreRestarts 2025-05-29T15:27:03.177188Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1910: Advance execution plan for [0:1] at 9437184 executing on unit CheckSchemeTx 2025-05-29T15:27:03.177194Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1916: Add [0:1] at 9437184 to execution unit StoreSchemeTx 2025-05-29T15:27:03.177199Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1827: Trying to execute [0:1] at 9437184 on unit StoreSchemeTx 2025-05-29T15:27:03.177272Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1862: Execution status for [0:1] at 9437184 is DelayCompleteNoMoreRestarts 2025-05-29T15:27:03.177276Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1910: Advance execution plan for [0:1] at 9437184 executing on unit StoreSchemeTx 2025-05-29T15:27:03.177280Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1916: Add [0:1] at 9437184 to execution unit FinishPropose 2025-05-29T15:27:03.177284Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1827: Trying to execute [0:1] at 9437184 on unit FinishPropose 2025-05-29T15:27:03.177296Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1862: Execution status for [0:1] at 9437184 is DelayComplete 2025-05-29T15:27:03.177299Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1910: Advance execution plan for [0:1] at 9437184 executing on unit FinishPropose 2025-05-29T15:27:03.177303Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1916: Add [0:1] at 9437184 to execution unit WaitForPlan 2025-05-29T15:27:03.177306Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1827: Trying to execute [0:1] at 9437184 on unit WaitForPlan 2025-05-29T15:27:03.177311Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1832: Operation [0:1] at 9437184 is not ready to execute on unit WaitForPlan 2025-05-29T15:27:03.190023Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 9437184 2025-05-29T15:27:03.190056Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1933: Complete execution for [0:1] at 9437184 on unit StoreSchemeTx 2025-05-29T15:27:03.190064Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1933: Complete execution for [0:1] at 9437184 on unit FinishPropose 2025-05-29T15:27:03.190076Z node 1 :TX_DATASHARD TRACE: finish_propose_unit.cpp:167: Propose transaction complete txid 1 at tablet 9437184 send to client, exec latency: 0 ms, propose latency: 1 ms, status: PREPARED 2025-05-29T15:27:03.190104Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:467: 9437184 not sending time cast registration request in state WaitScheme 2025-05-29T15:27:03.190229Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3129: StateWork, received event# 269877761, Sender [1:220:2217], Recipient [1:130:2153]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-05-29T15:27:03.190238Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3165: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-05-29T15:27:03.190246Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3710: Server connected at leader tablet# 9437184, clientId# [1:219:2216], serverId# [1:220:2217], sessionId# [0:0:0] 2025-05-29T15:27:03.190268Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3129: StateWork, received event# 269287424, Sender [1:100:2134], Recipient [1:130:2153]: {TEvPlanStep step# 1000001 MediatorId# 0 TabletID 9437184} 2025-05-29T15:27:03.190273Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3147: StateWork, processing event TEvTxProcessing::TEvPlanStep 2025-05-29T15:27:03.190324Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1790: Trying to execute [1000001:1] at 9437184 on unit WaitForPlan 2025-05-29T15:27:03.190334Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1805: Execution status for [1000001:1] at 9437184 is Executed 2025-05-29T15:27:03.190340Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1910: Advance execution plan for [1000001:1] at 9437184 executing on unit WaitForPlan 2025-05-29T15:27:03.190345Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1916: Add [1000001:1] at 9437184 to execution unit PlanQueue 2025-05-29T15:27:03.191160Z node 1 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:69: Planned transaction txId 1 at step 1000001 at tablet 9437184 { Transactions { TxId: 1 AckTo { RawX1: 100 RawX2: 4294969430 } } Step: 1000001 MediatorID: 0 TabletID: 9437184 } 2025-05-29T15:27:03.191181Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-05-29T15:27:03.191249Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3129: StateWork, received event# 2146435072, Sender [1:130:2153], Recipient [1:130:2153]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-05-29T15:27:03.191256Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3154: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-05-29T15:27:03.191266Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 9437184 2025-05-29T15:27:03.191274Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 1 2025-05-29T15:27:03.191279Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:327: Check candidate unit PlanQueue at 9437184 2025-05-29T15:27:03.191287Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:333: Found ready operation [1000001:1] in PlanQueue unit at 9437184 2025-05-29T15:27:03.191293Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1827: Trying to execute [100000 ... 02?\022\002\205\000\034MyReads MyWrites\205\004\205\002?\022\002\206\202\024Reply\024Write?\030\205\002\206\203\010\002 AllReads\030MyKeys\014Run4ShardsForRead4ShardsToWrite\005?\024)\211\026?\022\203\005\004\200\205\006\203\004\203\004\203\004\006\n\016\213\004\203\004\207\203\001H\213\002\203\004\203\004\203\010\203\010\203\004\206\203\014\203\014,SelectRange\000\003?* h\020\000\000\000\000\000\000\016\000\000\000\000\000\000\000?\014\005?2\003?,D\003?.F\003?0p\007\013?:\003?4\000\'?8\003\013?>\003?<\003j\030\001\003?@\000\003?B\000\003?D\007\240%&\003?F\000\006\004?J\003\203\014\000\003\203\014\000\003\003?L\000\377\007\002\000\005?\032\005?\026?x\000\005?\030\003\005? \005?\034?x\000\006 2025-05-29T15:28:03.386980Z node 3 :TX_DATASHARD TRACE: datashard_impl.h:3135: StateWork, processing event TEvDataShard::TEvProposeTransaction 2025-05-29T15:28:03.387008Z node 3 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 9437184 2025-05-29T15:28:03.387191Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1827: Trying to execute [0:11] at 9437184 on unit CheckDataTx 2025-05-29T15:28:03.388732Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1862: Execution status for [0:11] at 9437184 is Executed 2025-05-29T15:28:03.388750Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1910: Advance execution plan for [0:11] at 9437184 executing on unit CheckDataTx 2025-05-29T15:28:03.388757Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1916: Add [0:11] at 9437184 to execution unit BuildAndWaitDependencies 2025-05-29T15:28:03.388762Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1827: Trying to execute [0:11] at 9437184 on unit BuildAndWaitDependencies 2025-05-29T15:28:03.388776Z node 3 :TX_DATASHARD TRACE: datashard.cpp:2365: GetMvccTxVersion at 9437184 CompleteEdge# v1000001/1 IncompleteEdge# v{min} UnprotectedReadEdge# v{min} ImmediateWriteEdge# v1000001/18446744073709551615 ImmediateWriteEdgeReplied# v1000001/18446744073709551615 2025-05-29T15:28:03.388790Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:483: Activated operation [0:11] at 9437184 2025-05-29T15:28:03.388796Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1862: Execution status for [0:11] at 9437184 is Executed 2025-05-29T15:28:03.388799Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1910: Advance execution plan for [0:11] at 9437184 executing on unit BuildAndWaitDependencies 2025-05-29T15:28:03.388801Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1916: Add [0:11] at 9437184 to execution unit ExecuteDataTx 2025-05-29T15:28:03.388804Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1827: Trying to execute [0:11] at 9437184 on unit ExecuteDataTx 2025-05-29T15:28:03.391472Z node 3 :TX_DATASHARD TRACE: execute_data_tx_unit.cpp:189: Tablet 9437184 is not ready for [0:11] execution 2025-05-29T15:28:03.391520Z node 3 :TX_DATASHARD DEBUG: datashard_active_transaction.cpp:561: tx 11 released its data 2025-05-29T15:28:03.391529Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1862: Execution status for [0:11] at 9437184 is Restart 2025-05-29T15:28:03.400588Z node 3 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 9437184 2025-05-29T15:28:03.400611Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1827: Trying to execute [0:11] at 9437184 on unit ExecuteDataTx 2025-05-29T15:28:03.400803Z node 3 :TX_DATASHARD DEBUG: datashard_active_transaction.cpp:661: tx 11 at 9437184 restored its data 2025-05-29T15:28:03.401601Z node 3 :TX_DATASHARD TRACE: execute_data_tx_unit.cpp:189: Tablet 9437184 is not ready for [0:11] execution 2025-05-29T15:28:03.401634Z node 3 :TX_DATASHARD DEBUG: datashard_active_transaction.cpp:561: tx 11 released its data 2025-05-29T15:28:03.401641Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1862: Execution status for [0:11] at 9437184 is Restart 2025-05-29T15:28:03.428718Z node 3 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 9437184 2025-05-29T15:28:03.428743Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1827: Trying to execute [0:11] at 9437184 on unit ExecuteDataTx 2025-05-29T15:28:03.428930Z node 3 :TX_DATASHARD DEBUG: datashard_active_transaction.cpp:661: tx 11 at 9437184 restored its data 2025-05-29T15:28:03.443445Z node 3 :TX_DATASHARD TRACE: execute_data_tx_unit.cpp:175: Operation [0:11] at 9437184 exceeded memory limit 4194304 and requests 33554432 more for the next try 2025-05-29T15:28:03.443517Z node 3 :TX_DATASHARD DEBUG: datashard_active_transaction.cpp:561: tx 11 released its data 2025-05-29T15:28:03.443527Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1862: Execution status for [0:11] at 9437184 is Restart 2025-05-29T15:28:03.443646Z node 3 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 9437184 2025-05-29T15:28:03.443652Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1827: Trying to execute [0:11] at 9437184 on unit ExecuteDataTx 2025-05-29T15:28:03.443786Z node 3 :TX_DATASHARD DEBUG: datashard_active_transaction.cpp:661: tx 11 at 9437184 restored its data 2025-05-29T15:28:03.699164Z node 3 :TX_DATASHARD TRACE: execute_data_tx_unit.cpp:189: Tablet 9437184 is not ready for [0:11] execution 2025-05-29T15:28:03.699297Z node 3 :TX_DATASHARD DEBUG: datashard_active_transaction.cpp:561: tx 11 released its data 2025-05-29T15:28:03.699308Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1862: Execution status for [0:11] at 9437184 is Restart 2025-05-29T15:28:03.722662Z node 3 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 9437184 2025-05-29T15:28:03.722689Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1827: Trying to execute [0:11] at 9437184 on unit ExecuteDataTx 2025-05-29T15:28:03.722886Z node 3 :TX_DATASHARD DEBUG: datashard_active_transaction.cpp:661: tx 11 at 9437184 restored its data 2025-05-29T15:28:04.038621Z node 3 :TX_DATASHARD TRACE: execute_data_tx_unit.cpp:175: Operation [0:11] at 9437184 exceeded memory limit 37748736 and requests 301989888 more for the next try 2025-05-29T15:28:04.038797Z node 3 :TX_DATASHARD DEBUG: datashard_active_transaction.cpp:561: tx 11 released its data 2025-05-29T15:28:04.038811Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1862: Execution status for [0:11] at 9437184 is Restart 2025-05-29T15:28:04.053373Z node 3 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 9437184 2025-05-29T15:28:04.053400Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1827: Trying to execute [0:11] at 9437184 on unit ExecuteDataTx 2025-05-29T15:28:04.053608Z node 3 :TX_DATASHARD DEBUG: datashard_active_transaction.cpp:661: tx 11 at 9437184 restored its data 2025-05-29T15:28:04.056529Z node 3 :TX_DATASHARD TRACE: execute_data_tx_unit.cpp:189: Tablet 9437184 is not ready for [0:11] execution 2025-05-29T15:28:04.056589Z node 3 :TX_DATASHARD DEBUG: datashard_active_transaction.cpp:561: tx 11 released its data 2025-05-29T15:28:04.056600Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1862: Execution status for [0:11] at 9437184 is Restart 2025-05-29T15:28:04.060489Z node 3 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 9437184 2025-05-29T15:28:04.060508Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1827: Trying to execute [0:11] at 9437184 on unit ExecuteDataTx 2025-05-29T15:28:04.060668Z node 3 :TX_DATASHARD DEBUG: datashard_active_transaction.cpp:661: tx 11 at 9437184 restored its data 2025-05-29T15:28:04.061067Z node 3 :TX_DATASHARD TRACE: execute_data_tx_unit.cpp:189: Tablet 9437184 is not ready for [0:11] execution 2025-05-29T15:28:04.061085Z node 3 :TX_DATASHARD DEBUG: datashard_active_transaction.cpp:561: tx 11 released its data 2025-05-29T15:28:04.061091Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1862: Execution status for [0:11] at 9437184 is Restart 2025-05-29T15:28:04.065272Z node 3 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 9437184 2025-05-29T15:28:04.065295Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1827: Trying to execute [0:11] at 9437184 on unit ExecuteDataTx 2025-05-29T15:28:04.065458Z node 3 :TX_DATASHARD DEBUG: datashard_active_transaction.cpp:661: tx 11 at 9437184 restored its data 2025-05-29T15:28:04.066452Z node 3 :TX_DATASHARD TRACE: execute_data_tx_unit.cpp:189: Tablet 9437184 is not ready for [0:11] execution 2025-05-29T15:28:04.066480Z node 3 :TX_DATASHARD DEBUG: datashard_active_transaction.cpp:561: tx 11 released its data 2025-05-29T15:28:04.066486Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1862: Execution status for [0:11] at 9437184 is Restart 2025-05-29T15:28:04.116591Z node 3 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 9437184 2025-05-29T15:28:04.116625Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1827: Trying to execute [0:11] at 9437184 on unit ExecuteDataTx 2025-05-29T15:28:04.116837Z node 3 :TX_DATASHARD DEBUG: datashard_active_transaction.cpp:661: tx 11 at 9437184 restored its data 2025-05-29T15:28:04.987862Z node 3 :TX_DATASHARD TRACE: execute_data_tx_unit.cpp:306: Executed operation [0:11] at tablet 9437184 with status COMPLETE 2025-05-29T15:28:04.987904Z node 3 :TX_DATASHARD TRACE: execute_data_tx_unit.cpp:312: Datashard execution counters for [0:11] at 9437184: {NSelectRow: 0, NSelectRange: 1, NUpdateRow: 0, NEraseRow: 0, SelectRowRows: 0, SelectRowBytes: 0, SelectRangeRows: 129871, SelectRangeBytes: 40000268, UpdateRowBytes: 0, EraseRowBytes: 0, SelectRangeDeletedRowSkips: 0, InvisibleRowSkips: 0} 2025-05-29T15:28:04.987927Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1862: Execution status for [0:11] at 9437184 is Executed 2025-05-29T15:28:04.987935Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1910: Advance execution plan for [0:11] at 9437184 executing on unit ExecuteDataTx 2025-05-29T15:28:04.987940Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1916: Add [0:11] at 9437184 to execution unit FinishPropose 2025-05-29T15:28:04.987945Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1827: Trying to execute [0:11] at 9437184 on unit FinishPropose 2025-05-29T15:28:04.987956Z node 3 :TX_DATASHARD TRACE: finish_propose_unit.cpp:167: Propose transaction complete txid 11 at tablet 9437184 send to client, exec latency: 64 ms, propose latency: 64 ms, status: COMPLETE 2025-05-29T15:28:04.987979Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1862: Execution status for [0:11] at 9437184 is DelayComplete 2025-05-29T15:28:04.987983Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1910: Advance execution plan for [0:11] at 9437184 executing on unit FinishPropose 2025-05-29T15:28:04.987986Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1916: Add [0:11] at 9437184 to execution unit CompletedOperations 2025-05-29T15:28:04.987989Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1827: Trying to execute [0:11] at 9437184 on unit CompletedOperations 2025-05-29T15:28:04.988001Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1862: Execution status for [0:11] at 9437184 is Executed 2025-05-29T15:28:04.988003Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1910: Advance execution plan for [0:11] at 9437184 executing on unit CompletedOperations 2025-05-29T15:28:04.988006Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1922: Execution plan for [0:11] at 9437184 has finished 2025-05-29T15:28:04.994115Z node 3 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 9437184 2025-05-29T15:28:04.994135Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1933: Complete execution for [0:11] at 9437184 on unit FinishPropose 2025-05-29T15:28:04.994148Z node 3 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 >> BuildStatsBTreeIndex::Single_Groups [GOOD] >> BuildStatsBTreeIndex::Single_Groups_Slices [GOOD] >> BuildStatsBTreeIndex::Single_Groups_History >> TKeyValueTest::TestInlineEmptyWriteReadDeleteWithRestartsThenResponseOk [GOOD] >> TKeyValueTest::TestInlineEmptyWriteReadDeleteWithRestartsThenResponseOkNewApi >> BuildStatsMixedIndex::Single_Groups_LowResolution [GOOD] >> BuildStatsMixedIndex::Single_Groups_Slices_LowResolution [GOOD] >> BuildStatsMixedIndex::Single_Groups_History_LowResolution [GOOD] >> BuildStatsMixedIndex::Single_Groups_History_Slices_LowResolution >> THiveTest::TestCheckSubHiveDrain [GOOD] >> THiveTest::TestCheckSubHiveMigration >> TCompactionMulti::MainPageCollectionEdge [GOOD] >> TCompactionMulti::MainPageCollectionEdgeMany >> BuildStatsMixedIndex::Single_Groups_History_Slices_LowResolution [GOOD] >> Charge::Lookups [GOOD] >> Charge::ByKeysBasics [GOOD] >> Charge::ByKeysGroups [GOOD] >> Charge::ByKeysGroupsLimits [GOOD] >> Charge::ByKeysLimits [GOOD] >> Charge::ByKeysReverse [GOOD] >> Charge::ByKeysHistory [GOOD] >> Charge::ByKeysIndex [GOOD] >> Charge::ByRows [GOOD] >> Charge::ByRowsReverse [GOOD] >> Charge::ByRowsLimits [GOOD] >> Charge::ByRowsLimitsReverse [GOOD] >> DBase::Basics [GOOD] >> DBase::Select [GOOD] >> DBase::Defaults [GOOD] >> DBase::Subsets [GOOD] >> DBase::Garbage [GOOD] >> DBase::Affects [GOOD] >> DBase::Annex [GOOD] >> DBase::AnnexRollbackChanges [GOOD] >> DBase::Outer [GOOD] >> DBase::VersionBasics [GOOD] >> DBase::KIKIMR_15506_MissingSnapshotKeys [GOOD] >> DBase::EraseCacheWithUncommittedChanges [GOOD] >> DBase::EraseCacheWithUncommittedChangesCompacted [GOOD] >> DBase::AlterAndUpsertChangesVisibility [GOOD] >> DBase::UncommittedChangesVisibility [GOOD] >> DBase::UncommittedChangesCommitWithUpdates [GOOD] >> DBase::ReplayNewTable [GOOD] >> DBase::SnapshotNewTable [GOOD] >> DBase::DropModifiedTable [GOOD] >> DBase::KIKIMR_15598_Many_MemTables >> BuildStatsBTreeIndex::Single_Groups_History [GOOD] >> BuildStatsBTreeIndex::Single_Groups_History_Slices >> TFlatTableExecutor_Exceptions::TestTabletExecuteExceptionEnqueue >> TCompactionMulti::MainPageCollectionEdgeMany [GOOD] >> TCompactionMulti::MainPageCollectionOverflow [GOOD] >> TCompactionMulti::MainPageCollectionOverflowSmallRefs [GOOD] >> TCompactionMulti::MainPageCollectionOverflowLargeRefs [GOOD] >> TExecutorDb::RandomOps >> TFlatTableExecutor_Exceptions::TestTabletExecuteExceptionEnqueue [GOOD] >> TFlatTableExecutor_ExecutorTxLimit::TestExecutorTxLimit [GOOD] >> TFlatTableExecutor_Follower::BasicFollowerRead [GOOD] >> TFlatTableExecutor_Follower::FollowerEarlyRebootHoles [GOOD] >> TFlatTableExecutor_Follower::FollowerAttachOnTxQueueScanSnapshot [GOOD] >> TFlatTableExecutor_Follower::FollowerAttachAfterLoan >> TSharedPageCache::S3FIFO [GOOD] >> TSharedPageCache::ReplacementPolicySwitch >> BuildStatsBTreeIndex::Single_Groups_History_Slices [GOOD] >> BuildStatsBTreeIndex::Mixed >> TFlatTableExecutor_Follower::FollowerAttachAfterLoan [GOOD] >> TFlatTableExecutor_Gc::TestFailedGcAfterReboot [GOOD] >> TFlatTableExecutor_IndexLoading::CalculateReadSize_FlatIndex >> TSharedPageCache::ReplacementPolicySwitch [GOOD] >> TSharedPageCache::MiddleCache_FlatIndex >> TActorActivity::Basic [GOOD] >> ActorBootstrapped::TestBootstrapped [GOOD] >> ActorBootstrapped::TestBootstrappedParent [GOOD] >> TActorTracker::Basic >> TFlatTableExecutor_IndexLoading::CalculateReadSize_FlatIndex [GOOD] >> TFlatTableExecutor_IndexLoading::CalculateReadSize_BTreeIndex [GOOD] >> TFlatTableExecutor_IndexLoading::PrechargeAndSeek_FlatIndex >> TSharedPageCache::MiddleCache_FlatIndex [GOOD] >> TSharedPageCache::ZeroCache_BTreeIndex >> TActorTracker::Basic [GOOD] >> TInterconnectTest::TestBlobEvent >> DBase::KIKIMR_15598_Many_MemTables [GOOD] >> BuildStatsBTreeIndex::Mixed [GOOD] >> BuildStatsBTreeIndex::Mixed_Groups >> TKeyValueTest::TestConcatWorks [GOOD] >> TKeyValueTest::TestConcatWorksNewApi ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/actorlib_impl/ut/unittest >> TActorTracker::Basic [GOOD] Test command err: ASYNC_DESTROYER >> THiveTest::TestCheckSubHiveMigration [GOOD] >> THiveTest::TestCheckSubHiveMigrationManyTablets >> TPartSlice::TrivialMerge [GOOD] >> TPartSlice::SupersetByRowId [GOOD] >> TPartSlice::Subtract [GOOD] >> TPartSlice::UnsplitBorrow [GOOD] >> TPartSliceLoader::RestoreMissingSlice >> DataCleanup::CleanupDataNoTables [GOOD] >> DataCleanup::CleanupDataNoTablesWithRestart [GOOD] >> DataCleanup::CleanupDataLog [GOOD] >> DataCleanup::CleanupData [GOOD] >> DataCleanup::CleanupDataMultipleFamilies [GOOD] >> DataCleanup::CleanupDataMultipleTables >> TFlatTableExecutor_ResourceProfile::TestExecutorStaticMemoryLimits [GOOD] >> TFlatTableExecutor_ResourceProfile::TestExecutorTxDataLimitExceeded [GOOD] >> TFlatTableExecutor_ResourceProfile::TestExecutorTxDataGC [GOOD] >> TFlatTableExecutor_ResourceProfile::TestExecutorTxPartialDataHold [GOOD] >> TFlatTableExecutor_ResourceProfile::TestExecutorTxHoldAndUse [GOOD] >> TFlatTableExecutor_ResourceProfile::TestExecutorTxHoldOnRelease [GOOD] >> TFlatTableExecutor_ResourceProfile::TestUpdateConfig [GOOD] >> TFlatTableExecutor_SliceOverlapScan::TestSliceOverlapScan >> DataCleanup::CleanupDataMultipleTables [GOOD] >> TPartSliceLoader::RestoreMissingSlice [GOOD] >> TPartSliceLoader::RestoreOneSlice [GOOD] >> DataCleanup::CleanupDataWithFollowers [GOOD] >> DataCleanup::CleanupDataMultipleTimes [GOOD] >> DataCleanup::CleanupDataEmptyTable [GOOD] >> DataCleanup::CleanupDataWithRestarts [GOOD] >> TPartSliceLoader::RestoreMissingSliceFullScreen [GOOD] >> DataCleanup::CleanupDataRetryWithNotGreaterGenerations [GOOD] >> TPartSliceLoader::RestoreFromScreenIndexKeys [GOOD] >> DataCleanup::CleanupDataWithTabletGCErrors [GOOD] >> TPartSliceLoader::RestoreFromScreenDataKeys [GOOD] >> TRowVersionRangesTest::SimpleInserts [GOOD] >> TRowVersionRangesTest::MergeFailLeft [GOOD] >> TRowVersionRangesTest::MergeFailRight [GOOD] >> DataCleanup::CleanupDataWithSysTabletGCErrors >> TRowVersionRangesTest::MergeFailOuter [GOOD] >> TRowVersionRangesTest::MergeFailInner [GOOD] >> TRowVersionRangesTest::MergeExtendLeft [GOOD] >> TRowVersionRangesTest::MergeExtendLeftInner [GOOD] >> TRowVersionRangesTest::MergeExtendLeftComplete [GOOD] >> TRowVersionRangesTest::MergeExtendRight [GOOD] >> TRowVersionRangesTest::MergeExtendRightInner [GOOD] >> TRowVersionRangesTest::MergeExtendRightComplete [GOOD] >> TRowVersionRangesTest::MergeExtendBoth [GOOD] >> TRowVersionRangesTest::MergeHoleExact [GOOD] >> TRowVersionRangesTest::MergeHoleInner [GOOD] >> TRowVersionRangesTest::MergeHoleOuter [GOOD] >> TRowVersionRangesTest::MergeAllOuter [GOOD] >> TRowVersionRangesTest::MergeAllInner [GOOD] >> TRowVersionRangesTest::MergeAllEdges [GOOD] >> TRowVersionRangesTest::ContainsEmpty [GOOD] >> TRowVersionRangesTest::ContainsNonEmpty [GOOD] >> TRowVersionRangesTest::ContainsInvalid [GOOD] >> TRowVersionRangesTest::AdjustDown [GOOD] >> TRowVersionRangesTest::AdjustDownSnapshot [GOOD] >> TRowVersionRangesTest::SteppedCookieAllocatorOrder [GOOD] >> TRowVersionRangesTest::SteppedCookieAllocatorLowerBound [GOOD] >> TS3FIFOCache::Touch [GOOD] >> TS3FIFOCache::Touch_MainQueue [GOOD] >> TS3FIFOCache::EvictNext [GOOD] >> TS3FIFOCache::UpdateLimit [GOOD] >> TS3FIFOCache::Erase [GOOD] >> TS3FIFOCache::Random [GOOD] >> TS3FIFOGhostQueue::Basics [GOOD] >> TScheme::Shapshot [GOOD] >> TScheme::Delta [GOOD] >> TScheme::Policy [GOOD] >> TScreen::Cuts [GOOD] >> TScreen::Join [GOOD] >> TScreen::Sequential >> TInterconnectTest::TestBlobEvent [GOOD] >> TInterconnectTest::TestBlobEvent220Bytes ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tablet_flat/ut/unittest >> DBase::KIKIMR_15598_Many_MemTables [GOOD] Test command err: 3 parts: [0:0:1:0:0:0:0] 167 rows, 7 pages, 1 levels: (91, 38) (166, 63) (325, 116) (394, 139) (481, 168) [0:0:2:0:0:0:0] 166 rows, 8 pages, 2 levels: (631, 218) (709, 244) (853, 292) (934, 319) (1087, 370) [0:0:3:0:0:0:0] 167 rows, 8 pages, 2 levels: (1156, 393) (1246, 423) (1396, 473) (1471, 498) (1633, 552) Checking BTree: adding part [0:0:1:0:0:0:0] data size (14.1KiB in total) adding group {0,0} PageId: 8 RowCount: 167 DataSize: 13685 ErasedRowCount: 0 LevelCount: 1 IndexSize: 371 added slice [0, 167) data size (13.4KiB - 0B) => 13.4KiB adding part [0:0:2:0:0:0:0] data size (14.6KiB in total) adding group {0,0} PageId: 11 RowCount: 166 DataSize: 14080 ErasedRowCount: 0 LevelCount: 2 IndexSize: 530 added slice [0, 166) data size (13.8KiB - 0B) => 27.1KiB adding part [0:0:3:0:0:0:0] data size (14.8KiB in total) adding group {0,0} PageId: 11 RowCount: 167 DataSize: 14255 ErasedRowCount: 0 LevelCount: 2 IndexSize: 530 added slice [0, 167) data size (13.9KiB - 0B) => 41KiB building histogram with row resolution 0, data size resolution 42B slicing part [0:0:1:0:0:0:0]: { {rows: [0, 166] keys: [{7, 10}, {553, 192}]} } slicing node Part: [0:0:1:0:0:0:0] PageId: 8 Level: 1 BeginRowId: 0 EndRowId: 167 BeginDataSize: 0 EndDataSize: 13685 BeginKey: {7, 10} EndKey: {553, 192} State: 0 => take adding node future events -1 Part: [0:0:1:0:0:0:0] PageId: 8 Level: 1 BeginRowId: 0 EndRowId: 167 BeginDataSize: 0 EndDataSize: 13685 BeginKey: {7, 10} EndKey: {553, 192} State: 0 slicing part [0:0:2:0:0:0:0]: { {rows: [0, 165] keys: [{556, 193}, {1087, 370}]} } slicing node Part: [0:0:2:0:0:0:0] PageId: 11 Level: 2 BeginRowId: 0 EndRowId: 166 BeginDataSize: 0 EndDataSize: 14080 BeginKey: {556, 193} EndKey: {1087, 370} State: 0 => take adding node future events -1 Part: [0:0:2:0:0:0:0] PageId: 11 Level: 2 BeginRowId: 0 EndRowId: 166 BeginDataSize: 0 EndDataSize: 14080 BeginKey: {556, 193} EndKey: {1087, 370} State: 0 slicing part [0:0:3:0:0:0:0]: { {rows: [0, 166] keys: [{1090, 371}, {1645, 556}]} } slicing node Part: [0:0:3:0:0:0:0] PageId: 11 Level: 2 BeginRowId: 0 EndRowId: 167 BeginDataSize: 0 EndDataSize: 14255 BeginKey: {1090, 371} EndKey: {1645, 556} State: 0 => take adding node future events -1 Part: [0:0:3:0:0:0:0] PageId: 11 Level: 2 BeginRowId: 0 EndRowId: 167 BeginDataSize: 0 EndDataSize: 14255 BeginKey: {1090, 371} EndKey: {1645, 556} State: 0 iterating stats.RowCountHistogram: 0 stats.DataSizeHistogram: 0 nextHistogramRowCount: 0 nextHistogramDataSize: 42 closedRowCount: 0 closedDataSize: 0 openedRowCount: 0 openedDataSize: 0 openedSortedByRowCount: 0 openedSortedByDataSize: 0 FutureEvents: 6 currentKeyPointer: IsBegin: 1 Part: [0:0:1:0:0:0:0] PageId: 8 Level: 1 BeginRowId: 0 EndRowId: 167 BeginDataSize: 0 EndDataSize: 13685 BeginKey: {7, 10} EndKey: {553, 192} State: 0 processing event IsBegin: 1 Part: [0:0:1:0:0:0:0] PageId: 8 Level: 1 BeginRowId: 0 EndRowId: 167 BeginDataSize: 0 EndDataSize: 13685 BeginKey: {7, 10} EndKey: {553, 192} State: 0 loading node by row count triggerPart: [0:0:1:0:0:0:0] PageId: 8 Level: 1 BeginRowId: 0 EndRowId: 167 BeginDataSize: 0 EndDataSize: 13685 BeginKey: {7, 10} EndKey: {553, 192} State: 1 closedRowCount: 0 openedRowCount: 167 nextHistogramRowCount: 0 adding part [0:0:1:0:0:0:0] data size (14.1KiB in total) adding group {0,0} PageId: 8 RowCount: 167 DataSize: 13685 ErasedRowCount: 0 LevelCount: 1 IndexSize: 371 added slice [0, 167) data size (13.4KiB - 0B) => 13.4KiB adding part [0:0:2:0:0:0:0] data size (14.6KiB in total) adding group {0,0} PageId: 11 RowCount: 166 DataSize: 14080 ErasedRowCount: 0 LevelCount: 2 IndexSize: 530 added slice [0, 166) data size (13.8KiB - 0B) => 27.1KiB adding part [0:0:3:0:0:0:0] data size (14.8KiB in total) adding group {0,0} PageId: 11 RowCount: 167 DataSize: 14255 ErasedRowCount: 0 LevelCount: 2 IndexSize: 530 added slice [0, 167) data size (13.9KiB - 0B) => 41KiB building histogram with row resolution 0, data size resolution 42B slicing part [0:0:1:0:0:0:0]: { {rows: [0, 166] keys: [{7, 10}, {553, 192}]} } slicing node Part: [0:0:1:0:0:0:0] PageId: 8 Level: 1 BeginRowId: 0 EndRowId: 167 BeginDataSize: 0 EndDataSize: 13685 BeginKey: {7, 10} EndKey: {553, 192} State: 0 => take adding node future events -1 Part: [0:0:1:0:0:0:0] PageId: 8 Level: 1 BeginRowId: 0 EndRowId: 167 BeginDataSize: 0 EndDataSize: 13685 BeginKey: {7, 10} EndKey: {553, 192} State: 0 slicing part [0:0:2:0:0:0:0]: { {rows: [0, 165] keys: [{556, 193}, {1087, 370}]} } slicing node Part: [0:0:2:0:0:0:0] PageId: 11 Level: 2 BeginRowId: 0 EndRowId: 166 BeginDataSize: 0 EndDataSize: 14080 BeginKey: {556, 193} EndKey: {1087, 370} State: 0 => take adding node future events -1 Part: [0:0:2:0:0:0:0] PageId: 11 Level: 2 BeginRowId: 0 EndRowId: 166 BeginDataSize: 0 EndDataSize: 14080 BeginKey: {556, 193} EndKey: {1087, 370} State: 0 slicing part [0:0:3:0:0:0:0]: { {rows: [0, 166] keys: [{1090, 371}, {1645, 556}]} } slicing node Part: [0:0:3:0:0:0:0] PageId: 11 Level: 2 BeginRowId: 0 EndRowId: 167 BeginDataSize: 0 EndDataSize: 14255 BeginKey: {1090, 371} EndKey: {1645, 556} State: 0 => take adding node future events -1 Part: [0:0:3:0:0:0:0] PageId: 11 Level: 2 BeginRowId: 0 EndRowId: 167 BeginDataSize: 0 EndDataSize: 14255 BeginKey: {1090, 371} EndKey: {1645, 556} State: 0 iterating stats.RowCountHistogram: 0 stats.DataSizeHistogram: 0 nextHistogramRowCount: 0 nextHistogramDataSize: 42 closedRowCount: 0 closedDataSize: 0 openedRowCount: 0 openedDataSize: 0 openedSortedByRowCount: 0 openedSortedByDataSize: 0 FutureEvents: 6 currentKeyPointer: IsBegin: 1 Part: [0:0:1:0:0:0:0] PageId: 8 Level: 1 BeginRowId: 0 EndRowId: 167 BeginDataSize: 0 EndDataSize: 13685 BeginKey: {7, 10} EndKey: {553, 192} State: 0 processing event IsBegin: 1 Part: [0:0:1:0:0:0:0] PageId: 8 Level: 1 BeginRowId: 0 EndRowId: 167 BeginDataSize: 0 EndDataSize: 13685 BeginKey: {7, 10} EndKey: {553, 192} State: 0 loading node by row count triggerPart: [0:0:1:0:0:0:0] PageId: 8 Level: 1 BeginRowId: 0 EndRowId: 167 BeginDataSize: 0 EndDataSize: 13685 BeginKey: {7, 10} EndKey: {553, 192} State: 1 closedRowCount: 0 openedRowCount: 167 nextHistogramRowCount: 0 adding event 0 IsBegin: 1 Part: [0:0:1:0:0:0:0] PageId: 0 Level: 0 BeginRowId: 0 EndRowId: 25 BeginDataSize: 0 EndDataSize: 1974 BeginKey: {7, 10} EndKey: {91, 38} State: 0 processing event IsBegin: 1 Part: [0:0:1:0:0:0:0] PageId: 0 Level: 0 BeginRowId: 0 EndRowId: 25 BeginDataSize: 0 EndDataSize: 1974 BeginKey: {7, 10} EndKey: {91, 38} State: 0 adding event 1 IsBegin: 0 Part: [0:0:1:0:0:0:0] PageId: 0 Level: 0 BeginRowId: 0 EndRowId: 25 BeginDataSize: 0 EndDataSize: 1974 BeginKey: {7, 10} EndKey: {91, 38} State: 1 adding event 1 IsBegin: 1 Part: [0:0:1:0:0:0:0] PageId: 1 Level: 0 BeginRowId: 25 EndRowId: 50 BeginDataSize: 1974 EndDataSize: 3992 BeginKey: {91, 38} EndKey: {166, 63} State: 0 adding event 1 IsBegin: 0 Part: [0:0:1:0:0:0:0] PageId: 1 Level: 0 BeginRowId: 25 EndRowId: 50 BeginDataSize: 1974 EndDataSize: 3992 BeginKey: {91, 38} EndKey: {166, 63} State: 0 adding event 1 IsBegin: 1 Part: [0:0:1:0:0:0:0] PageId: 2 Level: 0 BeginRowId: 50 EndRowId: 74 BeginDataSize: 3992 EndDataSize: 5889 BeginKey: {166, 63} EndKey: {253, 92} State: 0 adding event 1 IsBegin: 0 Part: [0:0:1:0:0:0:0] PageId: 2 Level: 0 BeginRowId: 50 EndRowId: 74 BeginDataSize: 3992 EndDataSize: 5889 BeginKey: {166, 63} EndKey: {253, 92} State: 0 adding event 1 IsBegin: 1 Part: [0:0:1:0:0:0:0] PageId: 3 Level: 0 BeginRowId: 74 EndRowId: 96 BeginDataSize: 5889 EndDataSize: 7868 BeginKey: {253, 92} EndKey: {325, 116} State: 0 adding event 1 IsBegin: 0 Part: [0:0:1:0:0:0:0] PageId: 3 Level: 0 BeginRowId: 74 EndRowId: 96 BeginDataSize: 5889 EndDataSize: 7868 BeginKey: {253, 92} EndKey: {325, 116} State: 0 adding event 1 IsBegin: 1 Part: [0:0:1:0:0:0:0] PageId: 4 Level: 0 BeginRowId: 96 EndRowId: 119 BeginDataSize: 7868 EndDataSize: 9910 BeginKey: {325, 116} EndKey: {394, 139} State: 0 adding event 1 IsBegin: 0 Part: [0:0:1:0:0:0:0] PageId: 4 Level: 0 BeginRowId: 96 EndRowId: 119 BeginDataSize: 7868 EndDataSize: 9910 BeginKey: {325, 116} EndKey: {394, 139} State: 0 adding event 1 IsBegin: 1 Part: [0:0:1:0:0:0:0] PageId: 5 Level: 0 BeginRowId: 119 EndRowId: 144 BeginDataSize: 9910 EndDataSize: 11938 BeginKey: {394, 139} EndKey: {481, 168} State: 0 adding event 1 IsBegin: 0 Part: [0:0:1:0:0:0:0] PageId: 5 Level: 0 BeginRowId: 119 EndRowId: 144 BeginDataSize: 9910 EndDataSize: 11938 BeginKey: {394, 139} EndKey: {481, 168} State: 0 adding event 1 IsBegin: 1 Part: [0:0:1:0:0:0:0] PageId: 6 Level: 0 BeginRowId: 144 EndRowId: 167 BeginDataSize: 11938 EndDataSize: 13685 BeginKey: {481, 168} EndKey: {553, 192} State: 0 adding event 1 IsBegin: 0 Part: [0:0:1:0:0:0:0] PageId: 6 Level: 0 BeginRowId: 144 EndRowId: 167 BeginDataSize: 11938 EndDataSize: 13685 BeginKey: {481, 168} EndKey: {553, 192} State: 0 loading node by row count triggerPart: [0:0:1:0:0:0:0] PageId: 0 Level: 0 BeginRowId: 0 EndRowId: 25 BeginDataSize: 0 EndDataSize: 1974 BeginKey: {7, 10} EndKey: {91, 38} State: 1 closedRowCount: 0 openedRowCount: 25 nextHistogramRowCount: 0 loading node by data size triggerPart: [0:0:1:0:0:0:0] PageId: 8 Level: 1 BeginRowId: 0 EndRowId: 167 BeginDataSize: 0 EndDataSize: 13685 BeginKey: {7, 10} EndKey: {553, 192} State: 3 closedDataSize: 0 openedDataSize: 1974 nextHistogramDataSize: 42 loading node by data size triggerPart: [0:0:1:0:0:0:0] PageId: 0 Level: 0 BeginRowId: 0 EndRowId: 25 BeginDataSize: 0 EndDataSize: 1974 BeginKey: {7, 10} EndKey: {91, 38} State: 1 closedDataSize: 0 openedDataSize: 1974 nextHistogramDataSize: 42 checking stats.RowCountHistogram: 0 stats.DataSizeHistogram: 0 nextHistogramRowCount: 0 nextHistogramDataSize: 42 closedRowCount: 0 closedDataSize: 0 openedRowCount: 25 openedDataSize: 1974 openedSortedByRowCount: 0 openedSortedByDataSize: 0 FutureEvents: 18 currentKeyPointer: IsBegin: 1 Part: [0:0:1:0:0:0:0] PageId: 8 Level: 1 BeginRowId: 0 EndRowId: 167 BeginDataSize: 0 EndDataSize: 13685 BeginKey: {7, 10} EndKey: {553, 192} State: 3 iterating stats.RowCountHistogram: 0 stats.DataSizeHistogram: 0 nextHistogramRowCount: 0 nextHistogramDataSize: 42 closedRowCount: 0 closedDataSize: 0 openedRowCount: 25 openedDataSize: 1974 openedSortedByRowCount: 0 openedSortedByDataSize: 0 FutureEvents: 18 currentKeyPointer: IsBegin: 0 Part: [0:0:1:0:0:0:0] PageId: 0 Level: 0 BeginRowId: 0 EndRowId: 25 BeginDataSize: 0 EndDataSize: 1974 BeginKey: {7, 10} EndKey: {91, 38} State: 1 processing event IsBegin: 0 Part: [0:0:1:0:0:0:0] PageId: 0 Level: 0 BeginRowId: 0 EndRowId: 25 BeginDataSize: 0 EndDataSize: 1974 BeginKey: {7, 10} EndKey: {91, 38} State: 1 checking stats.RowCountHistogram: 0 stats.DataSizeHistogram: 0 nextHistogramRowCount: 0 nextHistogramDataSize: 42 closedRowCount: 25 closedDataSize: 1974 openedRowCount: 0 openedDataSize: 0 openedSortedByRowCount: 0 openedSortedByDataSize: 0 FutureEvents: 17 currentKeyPointer: IsBegin: 0 Part: [0:0:1:0:0:0:0] PageId: 0 Level: 0 BeginRowId: 0 EndRowId: 25 BeginDataSize: 0 EndDataSize: 1974 BeginKey: {7, 10} EndKey: {91, 38} State: 2 iterating stats.RowCountHistogram: 1 stats.DataSizeHistogram: 1 nextHistogramRowCount: 26 nextHistogramDataSize: 1975 closedRowCount: 25 closedDataSize: 1974 openedRowCount: 0 openedDataSize: 0 openedSortedByRowCount: 0 openedSortedByDataSize: 0 FutureEvents: 17 currentKeyPointer: IsBegin: 1 Part: [0:0:1:0:0:0:0] PageId: 1 Level: 0 BeginRowId: 25 EndRowId: 50 BeginDataSize: 1974 EndDataSize: 3992 BeginKey: {91, 38} EndKey: {166, 63} State: 0 processing event IsBegin: 1 Part: [0:0:1:0:0:0:0] PageId: 1 Level: 0 BeginRowId: 25 EndRowId: 50 BeginDataSize: 1974 EndDataSize: 3992 BeginKey: {91, 38} EndKey: {166, 63} State: 0 loading node by row count triggerPart: [0:0:1:0:0:0:0] PageId: 1 Level: 0 BeginRowId: 25 EndRowId: 50 BeginDataSize: 1974 EndDataSize: 3992 BeginKey: {91, 38} EndKey: {166, 63} State: 1 closedRowCount: 25 openedRowCount: 25 nextHistogramRowCount: 26 loading node by data size t ... : 41 closedRowCount: 0 closedDataSize: 0 openedRowCount: 0 openedDataSize: 0 openedSortedByRowCount: 0 openedSortedByDataSize: 0 FutureEvents: 6 currentKeyPointer: IsBegin: 1 Part: [0:0:1:0:0:0:0] PageId: 0 Level: 0 BeginRowId: 0 EndRowId: 167 BeginDataSize: 0 EndDataSize: 13565 BeginKey: {7, 10} EndKey: {553, 192} State: 0 processing event IsBegin: 1 Part: [0:0:1:0:0:0:0] PageId: 0 Level: 0 BeginRowId: 0 EndRowId: 167 BeginDataSize: 0 EndDataSize: 13565 BeginKey: {7, 10} EndKey: {553, 192} State: 0 loading node by row count triggerPart: [0:0:1:0:0:0:0] PageId: 0 Level: 0 BeginRowId: 0 EndRowId: 167 BeginDataSize: 0 EndDataSize: 13565 BeginKey: {7, 10} EndKey: {553, 192} State: 1 closedRowCount: 0 openedRowCount: 167 nextHistogramRowCount: 0 loading node by data size triggerPart: [0:0:1:0:0:0:0] PageId: 0 Level: 0 BeginRowId: 0 EndRowId: 167 BeginDataSize: 0 EndDataSize: 13565 BeginKey: {7, 10} EndKey: {553, 192} State: 1 closedDataSize: 0 openedDataSize: 13565 nextHistogramDataSize: 41 checking stats.RowCountHistogram: 0 stats.DataSizeHistogram: 0 nextHistogramRowCount: 0 nextHistogramDataSize: 41 closedRowCount: 0 closedDataSize: 0 openedRowCount: 167 openedDataSize: 13565 openedSortedByRowCount: 0 openedSortedByDataSize: 0 FutureEvents: 5 currentKeyPointer: IsBegin: 1 Part: [0:0:1:0:0:0:0] PageId: 0 Level: 0 BeginRowId: 0 EndRowId: 167 BeginDataSize: 0 EndDataSize: 13565 BeginKey: {7, 10} EndKey: {553, 192} State: 1 iterating stats.RowCountHistogram: 0 stats.DataSizeHistogram: 0 nextHistogramRowCount: 0 nextHistogramDataSize: 41 closedRowCount: 0 closedDataSize: 0 openedRowCount: 167 openedDataSize: 13565 openedSortedByRowCount: 0 openedSortedByDataSize: 0 FutureEvents: 5 currentKeyPointer: IsBegin: 0 Part: [0:0:1:0:0:0:0] PageId: 0 Level: 0 BeginRowId: 0 EndRowId: 167 BeginDataSize: 0 EndDataSize: 13565 BeginKey: {7, 10} EndKey: {553, 192} State: 1 processing event IsBegin: 0 Part: [0:0:1:0:0:0:0] PageId: 0 Level: 0 BeginRowId: 0 EndRowId: 167 BeginDataSize: 0 EndDataSize: 13565 BeginKey: {7, 10} EndKey: {553, 192} State: 1 checking stats.RowCountHistogram: 0 stats.DataSizeHistogram: 0 nextHistogramRowCount: 0 nextHistogramDataSize: 41 closedRowCount: 167 closedDataSize: 13565 openedRowCount: 0 openedDataSize: 0 openedSortedByRowCount: 0 openedSortedByDataSize: 0 FutureEvents: 4 currentKeyPointer: IsBegin: 0 Part: [0:0:1:0:0:0:0] PageId: 0 Level: 0 BeginRowId: 0 EndRowId: 167 BeginDataSize: 0 EndDataSize: 13565 BeginKey: {7, 10} EndKey: {553, 192} State: 2 iterating stats.RowCountHistogram: 1 stats.DataSizeHistogram: 1 nextHistogramRowCount: 168 nextHistogramDataSize: 13566 closedRowCount: 167 closedDataSize: 13565 openedRowCount: 0 openedDataSize: 0 openedSortedByRowCount: 0 openedSortedByDataSize: 0 FutureEvents: 4 currentKeyPointer: IsBegin: 1 Part: [0:0:2:0:0:0:0] PageId: 0 Level: 0 BeginRowId: 0 EndRowId: 166 BeginDataSize: 0 EndDataSize: 13940 BeginKey: {556, 193} EndKey: {1087, 370} State: 0 processing event IsBegin: 1 Part: [0:0:2:0:0:0:0] PageId: 0 Level: 0 BeginRowId: 0 EndRowId: 166 BeginDataSize: 0 EndDataSize: 13940 BeginKey: {556, 193} EndKey: {1087, 370} State: 0 loading node by row count triggerPart: [0:0:2:0:0:0:0] PageId: 0 Level: 0 BeginRowId: 0 EndRowId: 166 BeginDataSize: 0 EndDataSize: 13940 BeginKey: {556, 193} EndKey: {1087, 370} State: 1 closedRowCount: 167 openedRowCount: 166 nextHistogramRowCount: 168 loading node by data size triggerPart: [0:0:2:0:0:0:0] PageId: 0 Level: 0 BeginRowId: 0 EndRowId: 166 BeginDataSize: 0 EndDataSize: 13940 BeginKey: {556, 193} EndKey: {1087, 370} State: 1 closedDataSize: 13565 openedDataSize: 13940 nextHistogramDataSize: 13566 checking stats.RowCountHistogram: 1 stats.DataSizeHistogram: 1 nextHistogramRowCount: 168 nextHistogramDataSize: 13566 closedRowCount: 167 closedDataSize: 13565 openedRowCount: 166 openedDataSize: 13940 openedSortedByRowCount: 0 openedSortedByDataSize: 0 FutureEvents: 3 currentKeyPointer: IsBegin: 1 Part: [0:0:2:0:0:0:0] PageId: 0 Level: 0 BeginRowId: 0 EndRowId: 166 BeginDataSize: 0 EndDataSize: 13940 BeginKey: {556, 193} EndKey: {1087, 370} State: 1 iterating stats.RowCountHistogram: 1 stats.DataSizeHistogram: 1 nextHistogramRowCount: 168 nextHistogramDataSize: 13566 closedRowCount: 167 closedDataSize: 13565 openedRowCount: 166 openedDataSize: 13940 openedSortedByRowCount: 0 openedSortedByDataSize: 0 FutureEvents: 3 currentKeyPointer: IsBegin: 0 Part: [0:0:2:0:0:0:0] PageId: 0 Level: 0 BeginRowId: 0 EndRowId: 166 BeginDataSize: 0 EndDataSize: 13940 BeginKey: {556, 193} EndKey: {1087, 370} State: 1 processing event IsBegin: 0 Part: [0:0:2:0:0:0:0] PageId: 0 Level: 0 BeginRowId: 0 EndRowId: 166 BeginDataSize: 0 EndDataSize: 13940 BeginKey: {556, 193} EndKey: {1087, 370} State: 1 checking stats.RowCountHistogram: 1 stats.DataSizeHistogram: 1 nextHistogramRowCount: 168 nextHistogramDataSize: 13566 closedRowCount: 333 closedDataSize: 27505 openedRowCount: 0 openedDataSize: 0 openedSortedByRowCount: 0 openedSortedByDataSize: 0 FutureEvents: 2 currentKeyPointer: IsBegin: 0 Part: [0:0:2:0:0:0:0] PageId: 0 Level: 0 BeginRowId: 0 EndRowId: 166 BeginDataSize: 0 EndDataSize: 13940 BeginKey: {556, 193} EndKey: {1087, 370} State: 2 iterating stats.RowCountHistogram: 2 stats.DataSizeHistogram: 2 nextHistogramRowCount: 334 nextHistogramDataSize: 27506 closedRowCount: 333 closedDataSize: 27505 openedRowCount: 0 openedDataSize: 0 openedSortedByRowCount: 0 openedSortedByDataSize: 0 FutureEvents: 2 currentKeyPointer: IsBegin: 1 Part: [0:0:3:0:0:0:0] PageId: 0 Level: 0 BeginRowId: 0 EndRowId: 167 BeginDataSize: 0 EndDataSize: 14115 BeginKey: {1090, 371} EndKey: {1645, 556} State: 0 processing event IsBegin: 1 Part: [0:0:3:0:0:0:0] PageId: 0 Level: 0 BeginRowId: 0 EndRowId: 167 BeginDataSize: 0 EndDataSize: 14115 BeginKey: {1090, 371} EndKey: {1645, 556} State: 0 loading node by row count triggerPart: [0:0:3:0:0:0:0] PageId: 0 Level: 0 BeginRowId: 0 EndRowId: 167 BeginDataSize: 0 EndDataSize: 14115 BeginKey: {1090, 371} EndKey: {1645, 556} State: 1 closedRowCount: 333 openedRowCount: 167 nextHistogramRowCount: 334 loading node by data size triggerPart: [0:0:3:0:0:0:0] PageId: 0 Level: 0 BeginRowId: 0 EndRowId: 167 BeginDataSize: 0 EndDataSize: 14115 BeginKey: {1090, 371} EndKey: {1645, 556} State: 1 closedDataSize: 27505 openedDataSize: 14115 nextHistogramDataSize: 27506 checking stats.RowCountHistogram: 2 stats.DataSizeHistogram: 2 nextHistogramRowCount: 334 nextHistogramDataSize: 27506 closedRowCount: 333 closedDataSize: 27505 openedRowCount: 167 openedDataSize: 14115 openedSortedByRowCount: 0 openedSortedByDataSize: 0 FutureEvents: 1 currentKeyPointer: IsBegin: 1 Part: [0:0:3:0:0:0:0] PageId: 0 Level: 0 BeginRowId: 0 EndRowId: 167 BeginDataSize: 0 EndDataSize: 14115 BeginKey: {1090, 371} EndKey: {1645, 556} State: 1 iterating stats.RowCountHistogram: 2 stats.DataSizeHistogram: 2 nextHistogramRowCount: 334 nextHistogramDataSize: 27506 closedRowCount: 333 closedDataSize: 27505 openedRowCount: 167 openedDataSize: 14115 openedSortedByRowCount: 0 openedSortedByDataSize: 0 FutureEvents: 1 currentKeyPointer: IsBegin: 0 Part: [0:0:3:0:0:0:0] PageId: 0 Level: 0 BeginRowId: 0 EndRowId: 167 BeginDataSize: 0 EndDataSize: 14115 BeginKey: {1090, 371} EndKey: {1645, 556} State: 1 processing event IsBegin: 0 Part: [0:0:3:0:0:0:0] PageId: 0 Level: 0 BeginRowId: 0 EndRowId: 167 BeginDataSize: 0 EndDataSize: 14115 BeginKey: {1090, 371} EndKey: {1645, 556} State: 1 checking stats.RowCountHistogram: 2 stats.DataSizeHistogram: 2 nextHistogramRowCount: 334 nextHistogramDataSize: 27506 closedRowCount: 500 closedDataSize: 41620 openedRowCount: 0 openedDataSize: 0 openedSortedByRowCount: 0 openedSortedByDataSize: 0 FutureEvents: 0 currentKeyPointer: IsBegin: 0 Part: [0:0:3:0:0:0:0] PageId: 0 Level: 0 BeginRowId: 0 EndRowId: 167 BeginDataSize: 0 EndDataSize: 14115 BeginKey: {1090, 371} EndKey: {1645, 556} State: 2 finished stats.RowCountHistogram: 2 stats.DataSizeHistogram: 2 nextHistogramRowCount: 334 nextHistogramDataSize: 27506 closedRowCount: 500 closedDataSize: 41620 openedRowCount: 0 openedDataSize: 0 openedSortedByRowCount: 0 openedSortedByDataSize: 0 FutureEvents: 0 Touched 0% bytes, 0 pages RowCountHistogram: 33% (actual 33%) key = (553, 192) value = 167 (actual 166 - 0% error) 33% (actual 33%) key = (1087, 370) value = 333 (actual 332 - 0% error) 33% (actual 33%) DataSizeHistogram: 32% (actual 32%) key = (553, 192) value = 13565 (actual 13565 - 0% error) 33% (actual 33%) key = (1087, 370) value = 27505 (actual 27505 - 0% error) 33% (actual 33%) Checking Flat: Touched 100% bytes, 3 pages RowCountHistogram: 33% (actual 33%) key = (556, 193) value = 167 (actual 167 - 0% error) 33% (actual 33%) key = (1090, 371) value = 333 (actual 333 - 0% error) 33% (actual 33%) DataSizeHistogram: 32% (actual 32%) key = (556, 193) value = 13565 (actual 13565 - 0% error) 33% (actual 33%) key = (1090, 371) value = 27505 (actual 27505 - 0% error) 33% (actual 33%) Checking Mixed: Touched 0% bytes, 0 pages RowCountHistogram: 100% (actual 100%) DataSizeHistogram: 100% (actual 100%) Got : 24000 2106439 49449 38 44 Expected: 24000 2106439 49449 38 44 { [2455, 2599), [2798, 3624), [4540, 4713), [5654, 7161), [8509, 8794), [8936, 9973), [11888, 14280), [14337, 14882), [15507, 16365), [17368, 19451), [19536, 20135), [20790, 21503), [21589, 23243) } Got : 12816 1121048 49449 20 23 Expected: 12816 1121048 49449 20 23 Got : 24000 3547100 81694 64 44 Expected: 24000 3547100 81694 64 44 { [1012, 1475), [1682, 1985), [2727, 3553), [3599, 3992), [5397, 7244), [9181, 9807), [9993, 10178), [12209, 14029), [15089, 15342), [16198, 16984), [17238, 18436), [21087, 21876), [23701, 23794) } Got : 9582 1425198 81694 26 17 Expected: 9582 1425198 81694 26 17 Got : 24000 2460139 23760 42 41 Expected: 24000 2460139 23760 42 41 { [1296, 2520), [3888, 4320), [5040, 6840), [6912, 7272), [10872, 11160), [11520, 12096), [12096, 13824), [15192, 15624), [17064, 17856), [18216, 19296), [19800, 20160), [20736, 21096), [21096, 22104) } Got : 10440 1060798 23760 18 18 Expected: 10440 1060798 23760 18 18 Got : 24000 4054050 46562 68 43 Expected: 24000 4054050 46562 68 43 { [460, 1518), [2300, 2484), [2760, 4002), [4600, 5842), [6302, 9752), [11178, 12328), [14582, 14858), [16790, 18032), [18216, 18446), [18722, 19504), [19504, 19964), [20378, 20470), [21344, 23506) } Got : 13570 2277890 46562 38 24 Expected: 13570 2277890 46562 38 24 Got : 24000 2106459 49449 38 44 Expected: 24000 2106459 49449 38 44 Got : 24000 2460219 23555 41 41 Expected: 24000 2460219 23555 41 41 Got : 24000 4054270 46543 66 43 Expected: 24000 4054270 46543 66 43 Got : 24000 2106479 49555 38 44 Expected: 24000 2106479 49555 38 44 Got : 24000 2460259 23628 41 41 Expected: 24000 2460259 23628 41 41 Got : 24000 4054290 46640 65 43 Expected: 24000 4054290 46640 65 43 Got : 24000 2106439 66674 3 4 Expected: 24000 2106439 66674 3 4 { [2455, 2599), [2798, 3624), [4540, 4713), [5654, 7161), [8509, 8794), [8936, 9973), [11888, 14280), [14337, 14882), [15507, 16365), [17368, 19451), [19536, 20135), [20790, 21503), [21589, 23243) } Got : 12816 1121048 66674 2 2 Expected: 12816 1121048 66674 2 2 Got : 24000 2460139 33541 4 4 Expected: 24000 2460139 33541 4 4 { [1296, 2520), [3888, 4320), [5040, 6840), [6912, 7272), [10872, 11160), [11520, 12096), [12096, 13824), [15192, 15624), [17064, 17856), [18216, 19296), [19800, 20160), [20736, 21096), [21096, 22104) } Got : 10440 1060798 33541 1 1 Expected: 10440 1060798 33541 1 1 Got : 24000 4054050 64742 7 4 Expected: 24000 4054050 64742 7 4 { [460, 1518), [2300, 2484), [2760, 4002), [4600, 5842), [6302, 9752), [11178, 12328), [14582, 14858), [16790, 18032), [18216, 18446), [18722, 19504), [19504, 19964), [20378, 20470), [21344, 23506) } Got : 13570 2234982 64742 4 2 Expected: 13570 2234982 64742 4 2 >> TScreen::Sequential [GOOD] >> TScreen::Random [GOOD] >> TScreen::Shrink [GOOD] >> TScreen::Cook [GOOD] >> TSharedPageCache::Limits >> DataCleanup::CleanupDataWithSysTabletGCErrors [GOOD] >> DBase::WideKey >> TSharedPageCache::ZeroCache_BTreeIndex [GOOD] >> TSharedPageCache::ZeroCache_FlatIndex >> DBase::WideKey [GOOD] >> DBase::VersionPureMem >> TSharedPageCache::Limits [GOOD] >> TSharedPageCache::Limits_Config >> TKesusTest::TestAcquireBeforeTimeoutViaSessionTimeout [GOOD] >> TKesusTest::TestAcquireSemaphore >> DBase::VersionPureMem [GOOD] >> DBase::VersionPureParts >> TSharedPageCache::Limits_Config [GOOD] >> TSharedPageCache::ClockPro >> TInterconnectTest::TestBlobEvent220Bytes [GOOD] >> TInterconnectTest::TestAddressResolve >> TFlatTableExecutor_SliceOverlapScan::TestSliceOverlapScan [GOOD] >> TFlatTableExecutor_SnapshotWithCommits::SnapshotWithCommits [GOOD] >> TFlatTableExecutor_StickyPages::TestNonSticky_FlatIndex [GOOD] >> TFlatTableExecutor_StickyPages::TestNonSticky_BTreeIndex >> DBase::VersionPureParts [GOOD] >> DBase::VersionCompactedMem >> TKesusTest::TestAcquireSemaphore [GOOD] >> TSharedPageCache::ZeroCache_FlatIndex [GOOD] >> TSharedPageCache_Actor::Request_Basics [GOOD] >> TSharedPageCache_Actor::Request_Failed [GOOD] >> TSharedPageCache_Actor::Request_Queue [GOOD] >> TSharedPageCache_Actor::Request_Queue_Failed [GOOD] >> TSharedPageCache_Actor::Request_Queue_Fast [GOOD] >> TSharedPageCache_Actor::Request_Sequential [GOOD] >> TSharedPageCache_Actor::Request_Cached [GOOD] >> TSharedPageCache_Actor::Request_Different_Collections >> TFlatTableExecutor_StickyPages::TestNonSticky_BTreeIndex [GOOD] >> TFlatTableExecutor_StickyPages::TestSticky [GOOD] >> TFlatTableExecutor_StickyPages::TestNonStickyGroup_FlatIndex [GOOD] >> TFlatTableExecutor_StickyPages::TestNonStickyGroup_BTreeIndex [GOOD] >> TFlatTableExecutor_StickyPages::TestStickyMain [GOOD] >> TFlatTableExecutor_StickyPages::TestStickyAlt_FlatIndex [GOOD] >> TFlatTableExecutor_StickyPages::TestStickyAlt_BTreeIndex [GOOD] >> TFlatTableExecutor_StickyPages::TestStickyAll >> DBase::VersionCompactedMem [GOOD] >> DBase::VersionCompactedParts [GOOD] >> Memtable::Basics [GOOD] >> Memtable::BasicsReverse [GOOD] >> Memtable::Markers [GOOD] >> Memtable::Overlap [GOOD] >> Memtable::Wreck >> TSharedPageCache_Actor::Request_Different_Collections [GOOD] >> TSharedPageCache_Actor::Request_Different_Pages [GOOD] >> TSharedPageCache_Actor::Request_Different_Pages_Reversed [GOOD] >> TSharedPageCache_Actor::Request_Subset [GOOD] >> TSharedPageCache_Actor::Request_Subset_Shuffled [GOOD] >> TSharedPageCache_Actor::Request_Superset [GOOD] >> TSharedPageCache_Actor::Request_Superset_Reversed [GOOD] >> TSharedPageCache_Actor::Request_Crossing [GOOD] >> TSharedPageCache_Actor::Request_Crossing_Reversed [GOOD] >> TSharedPageCache_Actor::Request_Crossing_Shuffled [GOOD] >> TSharedPageCache_Actor::Attach_Basics [GOOD] >> TSharedPageCache_Actor::Attach_Request [GOOD] >> TSharedPageCache_Actor::Detach_Basics [GOOD] >> TSharedPageCache_Actor::Detach_Cached >> TFlatTableExecutor_StickyPages::TestStickyAll [GOOD] >> TFlatTableExecutor_StickyPages::TestAlterAddFamilySticky [GOOD] >> TFlatTableExecutor_StickyPages::TestAlterAddFamilyPartiallySticky [GOOD] >> TFlatTableExecutor_VersionedLargeBlobs::TestMultiVersionCompactionLargeBlobs [GOOD] >> TFlatTableExecutor_VersionedRows::TestVersionedRows [GOOD] >> TFlatTableExecutor_VersionedRows::TestVersionedRowsSmallBlobs >> Memtable::Wreck [GOOD] >> Memtable::Erased [GOOD] >> NFwd_TBlobs::MemTableTest [GOOD] >> NFwd_TBlobs::Lower [GOOD] >> NFwd_TBlobs::Sieve [GOOD] >> NFwd_TBlobs::SieveFiltered [GOOD] >> NFwd_TBlobs::Basics [GOOD] >> NFwd_TBlobs::Simple [GOOD] >> NFwd_TBlobs::Shuffle [GOOD] >> NFwd_TBlobs::Grow [GOOD] >> NFwd_TBlobs::Trace [GOOD] >> NFwd_TBlobs::Filtered [GOOD] >> NFwd_TBTreeIndexCache::Basics [GOOD] >> NFwd_TBTreeIndexCache::IndexPagesLocator [GOOD] >> NFwd_TBTreeIndexCache::GetTwice [GOOD] >> NFwd_TBTreeIndexCache::ForwardTwice [GOOD] >> NFwd_TBTreeIndexCache::Forward_OnlyUsed [GOOD] >> NFwd_TBTreeIndexCache::Skip_Done [GOOD] >> NFwd_TBTreeIndexCache::Skip_Done_None [GOOD] >> NFwd_TBTreeIndexCache::Skip_Keep [GOOD] >> NFwd_TBTreeIndexCache::Skip_Wait [GOOD] >> NFwd_TBTreeIndexCache::Trace_BTree [GOOD] >> NFwd_TBTreeIndexCache::Trace_Data [GOOD] >> NFwd_TBTreeIndexCache::End [GOOD] >> NFwd_TBTreeIndexCache::Slices [GOOD] >> NFwd_TBTreeIndexCache::ManyApplies [GOOD] >> NFwd_TFlatIndexCache::Basics [GOOD] >> NFwd_TFlatIndexCache::IndexPagesLocator [GOOD] >> NFwd_TFlatIndexCache::GetTwice [GOOD] >> NFwd_TFlatIndexCache::ForwardTwice [GOOD] >> NFwd_TFlatIndexCache::Skip_Done [GOOD] >> NFwd_TFlatIndexCache::Skip_Done_None [GOOD] >> NFwd_TFlatIndexCache::Skip_Keep [GOOD] >> NFwd_TFlatIndexCache::End [GOOD] >> TSharedPageCache_Actor::Detach_Cached [GOOD] >> TSharedPageCache_Actor::Detach_Expired [GOOD] >> TSharedPageCache_Actor::Detach_InFly [GOOD] >> TSharedPageCache_Actor::Detach_Queued [GOOD] >> TSharedPageCache_Actor::Unregister_Basics [GOOD] >> TSharedPageCache_Actor::Unregister_Cached [GOOD] >> TSharedPageCache_Actor::Unregister_Expired [GOOD] >> TSharedPageCache_Actor::Unregister_InFly [GOOD] >> TSharedPageCache_Actor::Unregister_Queued [GOOD] >> TSharedPageCache_Actor::Unregister_Queued_Pending >> TInterconnectTest::TestManyEvents ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kesus/tablet/ut/unittest >> TKesusTest::TestAcquireSemaphore [GOOD] Test command err: 2025-05-29T15:27:29.775966Z node 1 :KESUS_TABLET INFO: tablet_impl.cpp:71: OnActivateExecutor: 72057594037927937 2025-05-29T15:27:29.776000Z node 1 :KESUS_TABLET DEBUG: tx_init_schema.cpp:14: [72057594037927937] TTxInitSchema::Execute 2025-05-29T15:27:29.779848Z node 1 :KESUS_TABLET DEBUG: tx_init_schema.cpp:21: [72057594037927937] TTxInitSchema::Complete 2025-05-29T15:27:29.779886Z node 1 :KESUS_TABLET DEBUG: tx_init.cpp:30: [72057594037927937] TTxInit::Execute 2025-05-29T15:27:29.791171Z node 1 :KESUS_TABLET DEBUG: tx_init.cpp:242: [72057594037927937] TTxInit::Complete 2025-05-29T15:27:29.791294Z node 1 :KESUS_TABLET DEBUG: tx_session_attach.cpp:35: [72057594037927937] TTxSessionAttach::Execute (sender=[1:131:2156], cookie=17865635815564135579, session=0, seqNo=0) 2025-05-29T15:27:29.791345Z node 1 :KESUS_TABLET DEBUG: tx_session_attach.cpp:118: [72057594037927937] Created new session 1 2025-05-29T15:27:29.812225Z node 1 :KESUS_TABLET DEBUG: tx_session_attach.cpp:140: [72057594037927937] TTxSessionAttach::Complete (sender=[1:131:2156], cookie=17865635815564135579, session=1) 2025-05-29T15:27:29.812314Z node 1 :KESUS_TABLET DEBUG: tx_session_attach.cpp:35: [72057594037927937] TTxSessionAttach::Execute (sender=[1:131:2156], cookie=1225742609347574809, session=0, seqNo=0) 2025-05-29T15:27:29.812341Z node 1 :KESUS_TABLET DEBUG: tx_session_attach.cpp:118: [72057594037927937] Created new session 2 2025-05-29T15:27:29.822990Z node 1 :KESUS_TABLET DEBUG: tx_session_attach.cpp:140: [72057594037927937] TTxSessionAttach::Complete (sender=[1:131:2156], cookie=1225742609347574809, session=2) 2025-05-29T15:27:29.823166Z node 1 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:48: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[1:131:2156], cookie=111, session=1, semaphore="Lock1" count=18446744073709551615) 2025-05-29T15:27:29.823222Z node 1 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:126: [72057594037927937] Created new ephemeral semaphore 1 "Lock1" 2025-05-29T15:27:29.823249Z node 1 :KESUS_TABLET DEBUG: tablet_db.cpp:152: [72057594037927937] Processing semaphore 1 "Lock1" queue: next order #1 session 1 2025-05-29T15:27:29.823295Z node 1 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:48: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[1:131:2156], cookie=222, session=2, semaphore="Lock2" count=1) 2025-05-29T15:27:29.823306Z node 1 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:126: [72057594037927937] Created new ephemeral semaphore 2 "Lock2" 2025-05-29T15:27:29.823314Z node 1 :KESUS_TABLET DEBUG: tablet_db.cpp:152: [72057594037927937] Processing semaphore 2 "Lock2" queue: next order #2 session 2 2025-05-29T15:27:29.823330Z node 1 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:48: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[1:131:2156], cookie=333, session=1, semaphore="Lock2" count=1) 2025-05-29T15:27:29.823339Z node 1 :KESUS_TABLET DEBUG: tablet_db.cpp:152: [72057594037927937] Processing semaphore 2 "Lock2" queue: next order #3 session 1 2025-05-29T15:27:29.834056Z node 1 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:263: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[1:131:2156], cookie=111) 2025-05-29T15:27:29.834081Z node 1 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:263: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[1:131:2156], cookie=222) 2025-05-29T15:27:29.834086Z node 1 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:263: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[1:131:2156], cookie=333) 2025-05-29T15:27:29.834180Z node 1 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:29: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[1:147:2170], cookie=10669627577899842946, name="Lock1") 2025-05-29T15:27:29.834197Z node 1 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:134: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[1:147:2170], cookie=10669627577899842946) 2025-05-29T15:27:29.834238Z node 1 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:29: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[1:150:2173], cookie=8971781552019090056, name="Lock2") 2025-05-29T15:27:29.834242Z node 1 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:134: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[1:150:2173], cookie=8971781552019090056) 2025-05-29T15:27:29.836510Z node 1 :KESUS_TABLET INFO: tablet_impl.cpp:71: OnActivateExecutor: 72057594037927937 2025-05-29T15:27:29.836537Z node 1 :KESUS_TABLET DEBUG: tx_init_schema.cpp:14: [72057594037927937] TTxInitSchema::Execute 2025-05-29T15:27:29.836595Z node 1 :KESUS_TABLET DEBUG: tx_init_schema.cpp:21: [72057594037927937] TTxInitSchema::Complete 2025-05-29T15:27:29.836716Z node 1 :KESUS_TABLET DEBUG: tx_init.cpp:30: [72057594037927937] TTxInit::Execute 2025-05-29T15:27:29.868492Z node 1 :KESUS_TABLET DEBUG: tx_init.cpp:242: [72057594037927937] TTxInit::Complete 2025-05-29T15:27:29.868531Z node 1 :KESUS_TABLET DEBUG: tablet_db.cpp:152: [72057594037927937] Processing semaphore 1 "Lock1" queue: next order #1 session 1 2025-05-29T15:27:29.868539Z node 1 :KESUS_TABLET DEBUG: tablet_db.cpp:152: [72057594037927937] Processing semaphore 2 "Lock2" queue: next order #2 session 2 2025-05-29T15:27:29.868542Z node 1 :KESUS_TABLET DEBUG: tablet_db.cpp:152: [72057594037927937] Processing semaphore 2 "Lock2" queue: next order #3 session 1 2025-05-29T15:27:29.868637Z node 1 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:29: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[1:190:2203], cookie=2930864109047918310, name="Lock1") 2025-05-29T15:27:29.868653Z node 1 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:134: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[1:190:2203], cookie=2930864109047918310) 2025-05-29T15:27:29.868731Z node 1 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:29: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[1:198:2210], cookie=11480276545147173232, name="Lock2") 2025-05-29T15:27:29.868736Z node 1 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:134: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[1:198:2210], cookie=11480276545147173232) 2025-05-29T15:27:30.304831Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-05-29T15:27:30.315537Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-05-29T15:27:30.670494Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-05-29T15:27:30.681282Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-05-29T15:27:31.029415Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-05-29T15:27:31.040432Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-05-29T15:27:31.385188Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-05-29T15:27:31.396019Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-05-29T15:27:31.760773Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-05-29T15:27:31.771625Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-05-29T15:27:32.116185Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-05-29T15:27:32.127026Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-05-29T15:27:32.451242Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-05-29T15:27:32.462141Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-05-29T15:27:32.806628Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-05-29T15:27:32.817358Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-05-29T15:27:33.162175Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-05-29T15:27:33.172958Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-05-29T15:27:33.569532Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-05-29T15:27:33.580333Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-05-29T15:27:33.935603Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-05-29T15:27:33.947517Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-05-29T15:27:34.302325Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-05-29T15:27:34.313118Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-05-29T15:27:34.668248Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-05-29T15:27:34.679104Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-05-29T15:27:35.034645Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-05-29T15:27:35.045521Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-05-29T15:27:35.453565Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-05-29T15:27:35.464372Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-05-29T15:27:35.821288Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-05-29T15:27:35.832388Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-05-29T15:27:36.189226Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-05-29T15:27:36.200179Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-05-29T15:27:36.555054Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-05-29T15:27:36.565648Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-05-29T15:27:36.925023Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-05-29T15:27:36.936393Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-05-29T15:27:37.336849Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-05-29T15:27:37.347567Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-05-29T15:27:37.702665Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-05-29T15:27:37.713634Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-05-29T15:27:38.070504Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-05-29T15:27:38.081340Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-05-29T15:27:38.440821Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-05-29T15:27:38.451579Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-05-29T15:27:38.796488Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-05-29T15:27:38.807127Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-05-29T15:27:39.172110Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [7205759403 ... 2: [72057594037927937] TTxSelfCheck::Execute 2025-05-29T15:28:04.565821Z node 4 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-05-29T15:28:04.921212Z node 4 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-05-29T15:28:04.931979Z node 4 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-05-29T15:28:05.277309Z node 4 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-05-29T15:28:05.288112Z node 4 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-05-29T15:28:05.643739Z node 4 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-05-29T15:28:05.654461Z node 4 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-05-29T15:28:06.009605Z node 4 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-05-29T15:28:06.020384Z node 4 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-05-29T15:28:06.389331Z node 4 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-05-29T15:28:06.400195Z node 4 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-05-29T15:28:06.755406Z node 4 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-05-29T15:28:06.766269Z node 4 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-05-29T15:28:07.121498Z node 4 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-05-29T15:28:07.132194Z node 4 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-05-29T15:28:07.487669Z node 4 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-05-29T15:28:07.498483Z node 4 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-05-29T15:28:07.854485Z node 4 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-05-29T15:28:07.865779Z node 4 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-05-29T15:28:08.225072Z node 4 :KESUS_TABLET DEBUG: tx_session_timeout.cpp:27: [72057594037927937] TTxSessionTimeout::Execute (session=1) 2025-05-29T15:28:08.225101Z node 4 :KESUS_TABLET DEBUG: tablet_db.cpp:32: [72057594037927937] Deleting session 1 2025-05-29T15:28:08.225108Z node 4 :KESUS_TABLET DEBUG: tablet_db.cpp:98: [72057594037927937] Deleting session 1 / semaphore 1 "Lock1" owner link 2025-05-29T15:28:08.225133Z node 4 :KESUS_TABLET DEBUG: tablet_db.cpp:152: [72057594037927937] Processing semaphore 1 "Lock1" queue: next order #3 session 2 2025-05-29T15:28:08.225141Z node 4 :KESUS_TABLET DEBUG: tablet_db.cpp:98: [72057594037927937] Deleting session 1 / semaphore 2 "Lock2" owner link 2025-05-29T15:28:08.225145Z node 4 :KESUS_TABLET DEBUG: tablet_db.cpp:152: [72057594037927937] Processing semaphore 2 "Lock2" queue: next order #4 session 2 2025-05-29T15:28:08.236035Z node 4 :KESUS_TABLET DEBUG: tx_session_timeout.cpp:56: [72057594037927937] TTxSessionTimeout::Complete (session=1) 2025-05-29T15:28:08.236183Z node 4 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:29: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[4:363:2344], cookie=15610769156057818243, name="Lock1") 2025-05-29T15:28:08.236200Z node 4 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:134: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[4:363:2344], cookie=15610769156057818243) 2025-05-29T15:28:08.236249Z node 4 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:29: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[4:366:2347], cookie=7689248257165645513, name="Lock2") 2025-05-29T15:28:08.236253Z node 4 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:134: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[4:366:2347], cookie=7689248257165645513) 2025-05-29T15:28:08.236292Z node 4 :KESUS_TABLET DEBUG: tx_sessions_describe.cpp:23: [72057594037927937] TTxSessionsDescribe::Execute (sender=[4:369:2350], cookie=11810665251807289045) 2025-05-29T15:28:08.236299Z node 4 :KESUS_TABLET DEBUG: tx_sessions_describe.cpp:48: [72057594037927937] TTxSessionsDescribe::Complete (sender=[4:369:2350], cookie=11810665251807289045) 2025-05-29T15:28:08.238981Z node 4 :KESUS_TABLET INFO: tablet_impl.cpp:71: OnActivateExecutor: 72057594037927937 2025-05-29T15:28:08.239009Z node 4 :KESUS_TABLET DEBUG: tx_init_schema.cpp:14: [72057594037927937] TTxInitSchema::Execute 2025-05-29T15:28:08.239097Z node 4 :KESUS_TABLET DEBUG: tx_init_schema.cpp:21: [72057594037927937] TTxInitSchema::Complete 2025-05-29T15:28:08.239259Z node 4 :KESUS_TABLET DEBUG: tx_init.cpp:30: [72057594037927937] TTxInit::Execute 2025-05-29T15:28:08.291666Z node 4 :KESUS_TABLET DEBUG: tx_init.cpp:242: [72057594037927937] TTxInit::Complete 2025-05-29T15:28:08.291716Z node 4 :KESUS_TABLET DEBUG: tablet_db.cpp:152: [72057594037927937] Processing semaphore 1 "Lock1" queue: next order #3 session 2 2025-05-29T15:28:08.291738Z node 4 :KESUS_TABLET DEBUG: tablet_db.cpp:152: [72057594037927937] Processing semaphore 2 "Lock2" queue: next order #4 session 2 2025-05-29T15:28:08.291854Z node 4 :KESUS_TABLET DEBUG: tx_sessions_describe.cpp:23: [72057594037927937] TTxSessionsDescribe::Execute (sender=[4:409:2380], cookie=15827792906780095446) 2025-05-29T15:28:08.291871Z node 4 :KESUS_TABLET DEBUG: tx_sessions_describe.cpp:48: [72057594037927937] TTxSessionsDescribe::Complete (sender=[4:409:2380], cookie=15827792906780095446) 2025-05-29T15:28:08.291973Z node 4 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:29: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[4:416:2386], cookie=14559861094473560835, name="Lock1") 2025-05-29T15:28:08.291981Z node 4 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:134: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[4:416:2386], cookie=14559861094473560835) 2025-05-29T15:28:08.292030Z node 4 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:29: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[4:419:2389], cookie=16348445565812236956, name="Lock2") 2025-05-29T15:28:08.292034Z node 4 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:134: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[4:419:2389], cookie=16348445565812236956) 2025-05-29T15:28:08.527670Z node 5 :KESUS_TABLET INFO: tablet_impl.cpp:71: OnActivateExecutor: 72057594037927937 2025-05-29T15:28:08.527711Z node 5 :KESUS_TABLET DEBUG: tx_init_schema.cpp:14: [72057594037927937] TTxInitSchema::Execute 2025-05-29T15:28:08.532088Z node 5 :KESUS_TABLET DEBUG: tx_init_schema.cpp:21: [72057594037927937] TTxInitSchema::Complete 2025-05-29T15:28:08.532128Z node 5 :KESUS_TABLET DEBUG: tx_init.cpp:30: [72057594037927937] TTxInit::Execute 2025-05-29T15:28:08.554023Z node 5 :KESUS_TABLET DEBUG: tx_init.cpp:242: [72057594037927937] TTxInit::Complete 2025-05-29T15:28:08.554203Z node 5 :KESUS_TABLET DEBUG: tx_session_attach.cpp:35: [72057594037927937] TTxSessionAttach::Execute (sender=[5:131:2156], cookie=16091081842358175061, session=0, seqNo=0) 2025-05-29T15:28:08.554259Z node 5 :KESUS_TABLET DEBUG: tx_session_attach.cpp:118: [72057594037927937] Created new session 1 2025-05-29T15:28:08.565078Z node 5 :KESUS_TABLET DEBUG: tx_session_attach.cpp:140: [72057594037927937] TTxSessionAttach::Complete (sender=[5:131:2156], cookie=16091081842358175061, session=1) 2025-05-29T15:28:08.565167Z node 5 :KESUS_TABLET DEBUG: tx_session_attach.cpp:35: [72057594037927937] TTxSessionAttach::Execute (sender=[5:131:2156], cookie=9184672452391378987, session=0, seqNo=0) 2025-05-29T15:28:08.565199Z node 5 :KESUS_TABLET DEBUG: tx_session_attach.cpp:118: [72057594037927937] Created new session 2 2025-05-29T15:28:08.576167Z node 5 :KESUS_TABLET DEBUG: tx_session_attach.cpp:140: [72057594037927937] TTxSessionAttach::Complete (sender=[5:131:2156], cookie=9184672452391378987, session=2) 2025-05-29T15:28:08.576267Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:48: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[5:131:2156], cookie=111, session=1, semaphore="Sem1" count=1) 2025-05-29T15:28:08.587244Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:263: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[5:131:2156], cookie=111) 2025-05-29T15:28:08.587389Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_create.cpp:32: [72057594037927937] TTxSemaphoreCreate::Execute (sender=[5:144:2167], cookie=16691876331352079985, name="Sem1", limit=1) 2025-05-29T15:28:08.587434Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_create.cpp:104: [72057594037927937] Created new semaphore 1 "Sem1" 2025-05-29T15:28:08.598386Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_create.cpp:112: [72057594037927937] TTxSemaphoreCreate::Complete (sender=[5:144:2167], cookie=16691876331352079985) 2025-05-29T15:28:08.598506Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:48: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[5:131:2156], cookie=333, session=1, semaphore="Sem1" count=100500) 2025-05-29T15:28:08.609506Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:263: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[5:131:2156], cookie=333) 2025-05-29T15:28:08.609620Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:48: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[5:131:2156], cookie=222, session=1, semaphore="Sem1" count=1) 2025-05-29T15:28:08.609670Z node 5 :KESUS_TABLET DEBUG: tablet_db.cpp:152: [72057594037927937] Processing semaphore 1 "Sem1" queue: next order #1 session 1 2025-05-29T15:28:08.609721Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:48: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[5:131:2156], cookie=333, session=2, semaphore="Sem1" count=1) 2025-05-29T15:28:08.620633Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:263: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[5:131:2156], cookie=222) 2025-05-29T15:28:08.620670Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:263: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[5:131:2156], cookie=333) 2025-05-29T15:28:08.620827Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:29: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[5:154:2177], cookie=8614410964568632601, name="Sem1") 2025-05-29T15:28:08.620852Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:134: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[5:154:2177], cookie=8614410964568632601) 2025-05-29T15:28:08.620920Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:29: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[5:157:2180], cookie=4339188535181795853, name="Sem1") 2025-05-29T15:28:08.620928Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:134: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[5:157:2180], cookie=4339188535181795853) 2025-05-29T15:28:08.620982Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_delete.cpp:28: [72057594037927937] TTxSemaphoreDelete::Execute (sender=[5:160:2183], cookie=15651286099661730932, name="Sem1", force=0) 2025-05-29T15:28:08.631873Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_delete.cpp:95: [72057594037927937] TTxSemaphoreDelete::Complete (sender=[5:160:2183], cookie=15651286099661730932) 2025-05-29T15:28:08.632044Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_delete.cpp:28: [72057594037927937] TTxSemaphoreDelete::Execute (sender=[5:165:2188], cookie=10196494975328195787, name="Sem1", force=1) 2025-05-29T15:28:08.632075Z node 5 :KESUS_TABLET DEBUG: tablet_db.cpp:58: [72057594037927937] Deleting semaphore 1 "Sem1" 2025-05-29T15:28:08.643049Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_delete.cpp:95: [72057594037927937] TTxSemaphoreDelete::Complete (sender=[5:165:2188], cookie=10196494975328195787) >> TSharedPageCache_Actor::Unregister_Queued_Pending [GOOD] >> TSwitchableCache::Touch [GOOD] >> TSwitchableCache::Erase [GOOD] >> TSwitchableCache::EvictNext [GOOD] >> TSwitchableCache::UpdateLimit [GOOD] >> TSwitchableCache::Switch_Touch_RotatePages_All [GOOD] >> TSwitchableCache::Switch_Touch_RotatePages_Parts [GOOD] >> TSwitchableCache::Switch_RotatePages_Force [GOOD] >> TSwitchableCache::Switch_RotatePages_Evicts [GOOD] >> TSwitchableCache::Switch_Touch [GOOD] >> TSwitchableCache::Switch_Erase [GOOD] >> TSwitchableCache::Switch_EvictNext [GOOD] >> TSwitchableCache::Switch_UpdateLimit [GOOD] >> TVersions::WreckHead >> TInterconnectTest::TestAddressResolve [GOOD] >> TInterconnectTest::OldNbs >> BuildStatsBTreeIndex::Mixed_Groups [GOOD] >> BuildStatsBTreeIndex::Mixed_Groups_History >> TInterconnectTest::TestConnectAndDisconnect >> BuildStatsBTreeIndex::Mixed_Groups_History [GOOD] >> BuildStatsFlatIndex::Single [GOOD] >> BuildStatsFlatIndex::Single_Slices >> TSharedPageCache::ClockPro [GOOD] >> TSharedPageCache::BigCache_BTreeIndex >> BuildStatsFlatIndex::Single_Slices [GOOD] >> BuildStatsFlatIndex::Single_History [GOOD] >> BuildStatsFlatIndex::Single_History_Slices [GOOD] >> BuildStatsFlatIndex::Single_Groups ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tablet_flat/ut/unittest >> NFwd_TFlatIndexCache::End [GOOD] Test command err: 00000.000 II| FAKE_ENV: Born at 2025-05-29T15:28:08.091540Z 00000.003 NN| TABLET_SAUSAGECACHE: Bootstrap with config MemoryLimit: 8388608 ScanQueueInFlyLimit: 262144 AsyncQueueInFlyLimit: 262144 00000.003 II| FAKE_ENV: Starting storage for BS group 0 00000.003 II| FAKE_ENV: Starting storage for BS group 1 00000.003 II| FAKE_ENV: Starting storage for BS group 2 00000.003 II| FAKE_ENV: Starting storage for BS group 3 00000.005 II| FAKE_ENV: Model starts hard shutdown on level 7 of 8, left 3 actors 00000.005 NN| TABLET_SAUSAGECACHE: Poison cache serviced 0 reqs hit {0 0b} miss {0 0b} 00000.005 II| FAKE_ENV: Shut order, stopping 4 BS groups 00000.005 II| FAKE_ENV: DS.0 gone, left {42b, 1}, put {146b, 4} 00000.005 II| FAKE_ENV: DS.1 gone, left {105b, 3}, put {105b, 3} 00000.005 II| FAKE_ENV: DS.2 gone, left {0b, 0}, put {0b, 0} 00000.005 II| FAKE_ENV: DS.3 gone, left {0b, 0}, put {0b, 0} 00000.005 II| FAKE_ENV: All BS storage groups are stopped 00000.005 II| FAKE_ENV: Model stopped, hosted 3 actors, spent 0.000s 00000.005 II| FAKE_ENV: Logged {Emerg 0 Alert 0 Crit 0 Error 0 Left 15}, stopped 00000.000 II| FAKE_ENV: Born at 2025-05-29T15:28:08.097831Z 00000.002 NN| TABLET_SAUSAGECACHE: Bootstrap with config MemoryLimit: 8388608 ScanQueueInFlyLimit: 262144 AsyncQueueInFlyLimit: 262144 00000.002 II| FAKE_ENV: Starting storage for BS group 0 00000.003 II| FAKE_ENV: Starting storage for BS group 1 00000.003 II| FAKE_ENV: Starting storage for BS group 2 00000.003 II| FAKE_ENV: Starting storage for BS group 3 00000.003 II| FAKE_ENV: Model starts hard shutdown on level 7 of 8, left 3 actors 00000.004 NN| TABLET_SAUSAGECACHE: Poison cache serviced 0 reqs hit {0 0b} miss {0 0b} 00000.004 II| FAKE_ENV: Shut order, stopping 4 BS groups 00000.004 II| FAKE_ENV: DS.0 gone, left {42b, 1}, put {292b, 8} 00000.004 II| FAKE_ENV: DS.1 gone, left {210b, 6}, put {210b, 6} 00000.004 II| FAKE_ENV: DS.2 gone, left {0b, 0}, put {0b, 0} 00000.004 II| FAKE_ENV: DS.3 gone, left {0b, 0}, put {0b, 0} 00000.004 II| FAKE_ENV: All BS storage groups are stopped 00000.004 II| FAKE_ENV: Model stopped, hosted 4 actors, spent 0.000s 00000.004 II| FAKE_ENV: Logged {Emerg 0 Alert 0 Crit 0 Error 0 Left 15}, stopped 00000.000 II| FAKE_ENV: Born at 2025-05-29T15:28:08.102880Z 00000.001 NN| TABLET_SAUSAGECACHE: Bootstrap with config MemoryLimit: 8388608 ScanQueueInFlyLimit: 262144 AsyncQueueInFlyLimit: 262144 00000.001 II| FAKE_ENV: Starting storage for BS group 0 00000.001 II| FAKE_ENV: Starting storage for BS group 1 00000.001 II| FAKE_ENV: Starting storage for BS group 2 00000.001 II| FAKE_ENV: Starting storage for BS group 3 00000.004 II| FAKE_ENV: Model starts hard shutdown on level 7 of 8, left 3 actors 00000.004 NN| TABLET_SAUSAGECACHE: Poison cache serviced 1 reqs hit {1 76b} miss {0 0b} 00000.004 II| FAKE_ENV: Shut order, stopping 4 BS groups 00000.004 II| FAKE_ENV: DS.0 gone, left {42b, 1}, put {1181b, 13} 00000.004 II| FAKE_ENV: DS.1 gone, left {909b, 3}, put {1913b, 12} 00000.004 II| FAKE_ENV: DS.2 gone, left {0b, 0}, put {132b, 2} 00000.004 II| FAKE_ENV: DS.3 gone, left {0b, 0}, put {116b, 2} 00000.004 II| FAKE_ENV: All BS storage groups are stopped 00000.004 II| FAKE_ENV: Model stopped, hosted 3 actors, spent 0.000s 00000.004 II| FAKE_ENV: Logged {Emerg 0 Alert 0 Crit 0 Error 0 Left 15}, stopped 00000.000 II| FAKE_ENV: Born at 2025-05-29T15:28:08.107914Z 00000.002 NN| TABLET_SAUSAGECACHE: Bootstrap with config MemoryLimit: 8388608 ScanQueueInFlyLimit: 262144 AsyncQueueInFlyLimit: 262144 00000.002 II| FAKE_ENV: Starting storage for BS group 0 00000.002 II| FAKE_ENV: Starting storage for BS group 1 00000.002 II| FAKE_ENV: Starting storage for BS group 2 00000.002 II| FAKE_ENV: Starting storage for BS group 3 00000.004 II| FAKE_ENV: Model starts hard shutdown on level 7 of 8, left 3 actors 00000.004 NN| TABLET_SAUSAGECACHE: Poison cache serviced 1 reqs hit {1 102443b} miss {0 0b} 00000.004 II| FAKE_ENV: Shut order, stopping 4 BS groups 00000.004 II| FAKE_ENV: DS.0 gone, left {42b, 1}, put {751b, 11} 00000.004 II| FAKE_ENV: DS.1 gone, left {541b, 3}, put {103970b, 10} 00000.004 II| FAKE_ENV: DS.2 gone, left {0b, 0}, put {0b, 0} 00000.004 II| FAKE_ENV: DS.3 gone, left {0b, 0}, put {0b, 0} 00000.004 II| FAKE_ENV: All BS storage groups are stopped 00000.004 II| FAKE_ENV: Model stopped, hosted 3 actors, spent 0.000s 00000.004 II| FAKE_ENV: Logged {Emerg 0 Alert 0 Crit 0 Error 0 Left 15}, stopped 00000.000 II| FAKE_ENV: Born at 2025-05-29T15:28:08.113303Z 00000.002 NN| TABLET_SAUSAGECACHE: Bootstrap with config MemoryLimit: 8388608 ScanQueueInFlyLimit: 262144 AsyncQueueInFlyLimit: 262144 00000.002 II| FAKE_ENV: Starting storage for BS group 0 00000.002 II| FAKE_ENV: Starting storage for BS group 1 00000.002 II| FAKE_ENV: Starting storage for BS group 2 00000.002 II| FAKE_ENV: Starting storage for BS group 3 ... blocking NKikimr::TEvBlobStorage::TEvCollectGarbage from FLAT_EXECUTOR to FAKE_ENV_A cookie 0 00000.017 II| TABLET_SAUSAGECACHE: Wakeup 1 ... unblocking NKikimr::TEvBlobStorage::TEvCollectGarbage from FLAT_EXECUTOR to FAKE_ENV_A 00000.017 II| FAKE_ENV: Model starts hard shutdown on level 7 of 8, left 3 actors 00000.018 NN| TABLET_SAUSAGECACHE: Poison cache serviced 11 reqs hit {18 513007b} miss {0 0b} 00000.018 II| FAKE_ENV: Shut order, stopping 4 BS groups 00000.018 II| FAKE_ENV: DS.0 gone, left {42b, 1}, put {2095b, 23} 00000.018 II| FAKE_ENV: DS.1 gone, left {774b, 4}, put {210604b, 21} 00000.018 II| FAKE_ENV: DS.2 gone, left {0b, 0}, put {205178b, 4} 00000.018 II| FAKE_ENV: DS.3 gone, left {0b, 0}, put {102690b, 4} 00000.018 II| FAKE_ENV: All BS storage groups are stopped 00000.018 II| FAKE_ENV: Model stopped, hosted 4 actors, spent 15.00s 00000.018 II| FAKE_ENV: Logged {Emerg 0 Alert 0 Crit 0 Error 0 Left 16}, stopped 00000.000 II| FAKE_ENV: Born at 2025-05-29T15:28:08.133190Z 00000.001 NN| TABLET_SAUSAGECACHE: Bootstrap with config MemoryLimit: 8388608 ScanQueueInFlyLimit: 262144 AsyncQueueInFlyLimit: 262144 00000.002 II| FAKE_ENV: Starting storage for BS group 0 00000.002 II| FAKE_ENV: Starting storage for BS group 1 00000.002 II| FAKE_ENV: Starting storage for BS group 2 00000.002 II| FAKE_ENV: Starting storage for BS group 3 00000.005 II| FAKE_ENV: Model starts hard shutdown on level 7 of 8, left 3 actors 00000.005 NN| TABLET_SAUSAGECACHE: Poison cache serviced 3 reqs hit {3 307329b} miss {0 0b} 00000.005 II| FAKE_ENV: Shut order, stopping 4 BS groups 00000.005 II| FAKE_ENV: DS.0 gone, left {42b, 1}, put {1828b, 23} 00000.005 II| FAKE_ENV: DS.1 gone, left {1247b, 3}, put {311467b, 22} 00000.005 II| FAKE_ENV: DS.2 gone, left {0b, 0}, put {0b, 0} 00000.005 II| FAKE_ENV: DS.3 gone, left {0b, 0}, put {0b, 0} 00000.005 II| FAKE_ENV: All BS storage groups are stopped 00000.005 II| FAKE_ENV: Model stopped, hosted 3 actors, spent 0.000s 00000.005 II| FAKE_ENV: Logged {Emerg 0 Alert 0 Crit 0 Error 0 Left 15}, stopped 00000.000 II| FAKE_ENV: Born at 2025-05-29T15:28:08.140192Z 00000.001 NN| TABLET_SAUSAGECACHE: Bootstrap with config MemoryLimit: 8388608 ScanQueueInFlyLimit: 262144 AsyncQueueInFlyLimit: 262144 00000.001 II| FAKE_ENV: Starting storage for BS group 0 00000.001 II| FAKE_ENV: Starting storage for BS group 1 00000.001 II| FAKE_ENV: Starting storage for BS group 2 00000.001 II| FAKE_ENV: Starting storage for BS group 3 00000.007 II| FAKE_ENV: Model starts hard shutdown on level 7 of 8, left 5 actors 00000.007 NN| TABLET_SAUSAGECACHE: Poison cache serviced 4 reqs hit {8 307836b} miss {0 0b} 00000.007 II| FAKE_ENV: Shut order, stopping 4 BS groups 00000.007 II| FAKE_ENV: DS.0 gone, left {57b, 2}, put {1436b, 31} 00000.007 II| FAKE_ENV: DS.1 gone, left {629b, 3}, put {310476b, 16} 00000.007 II| FAKE_ENV: DS.2 gone, left {0b, 0}, put {0b, 0} 00000.007 II| FAKE_ENV: DS.3 gone, left {0b, 0}, put {0b, 0} 00000.007 II| FAKE_ENV: All BS storage groups are stopped 00000.007 II| FAKE_ENV: Model stopped, hosted 5 actors, spent 0.000s 00000.007 II| FAKE_ENV: Logged {Emerg 0 Alert 0 Crit 0 Error 0 Left 15}, stopped 00000.000 II| FAKE_ENV: Born at 2025-05-29T15:28:08.148483Z 00000.001 NN| TABLET_SAUSAGECACHE: Bootstrap with config MemoryLimit: 8388608 ScanQueueInFlyLimit: 262144 AsyncQueueInFlyLimit: 262144 00000.001 II| FAKE_ENV: Starting storage for BS group 0 00000.001 II| FAKE_ENV: Starting storage for BS group 1 00000.001 II| FAKE_ENV: Starting storage for BS group 2 00000.001 II| FAKE_ENV: Starting storage for BS group 3 00000.004 II| FAKE_ENV: Model starts hard shutdown on level 7 of 8, left 3 actors 00000.004 NN| TABLET_SAUSAGECACHE: Poison cache serviced 2 reqs hit {2 194646b} miss {0 0b} 00000.004 II| FAKE_ENV: Shut order, stopping 4 BS groups 00000.004 II| FAKE_ENV: DS.1 gone, left {732b, 6}, put {197813b, 24} 00000.004 II| FAKE_ENV: DS.2 gone, left {0b, 0}, put {0b, 0} 00000.004 II| FAKE_ENV: DS.3 gone, left {0b, 0}, put {0b, 0} 00000.004 II| FAKE_ENV: DS.0 gone, left {42b, 1}, put {1766b, 27} 00000.004 II| FAKE_ENV: All BS storage groups are stopped 00000.004 II| FAKE_ENV: Model stopped, hosted 3 actors, spent 0.000s 00000.004 II| FAKE_ENV: Logged {Emerg 0 Alert 0 Crit 0 Error 0 Left 15}, stopped 00000.000 II| FAKE_ENV: Born at 2025-05-29T15:28:08.153218Z 00000.001 NN| TABLET_SAUSAGECACHE: Bootstrap with config MemoryLimit: 8388608 ScanQueueInFlyLimit: 262144 AsyncQueueInFlyLimit: 262144 00000.001 II| FAKE_ENV: Starting storage for BS group 0 00000.001 II| FAKE_ENV: Starting storage for BS group 1 00000.001 II| FAKE_ENV: Starting storage for BS group 2 00000.001 II| FAKE_ENV: Starting storage for BS group 3 00000.002 II| FAKE_ENV: Model starts hard shutdown on level 7 of 8, left 3 actors 00000.002 NN| TABLET_SAUSAGECACHE: Poison cache serviced 0 reqs hit {0 0b} miss {0 0b} 00000.002 II| FAKE_ENV: Shut order, stopping 4 BS groups 00000.002 II| FAKE_ENV: DS.0 gone, left {42b, 1}, put {326b, 7} 00000.002 II| FAKE_ENV: DS.1 gone, left {418b, 4}, put {453b, 5} 00000.002 II| FAKE_ENV: DS.2 gone, left {0b, 0}, put {0b, 0} 00000.002 II| FAKE_ENV: DS.3 gone, left {0b, 0}, put {0b, 0} 00000.002 II| FAKE_ENV: All BS storage groups are stopped 00000.002 II| FAKE_ENV: Model stopped, hosted 3 actors, spent 0.000s 00000.002 II| FAKE_ENV: Logged {Emerg 0 Alert 0 Crit 0 Error 0 Left 15}, stopped 00000.000 II| FAKE_ENV: Born at 2025-05-29T15:28:08.156011Z 00000.001 NN| TABLET_SAUSAGECACHE: Bootstrap with config MemoryLimit: 8388608 ScanQueueInFlyLimit: 262144 AsyncQueueInFlyLimit: 262144 00000.001 II| FAKE_ENV: Starting storage for BS group 0 00000.001 II| FAKE_ENV: Starting storage for BS group 1 00000.001 II| FAKE_ENV: Starting storage for BS group 2 00000.001 II| FAKE_ENV: Starting storage for BS group 3 ... blocking NKikimr::TEvBlobStorage::TEvCollectGarbage from FLAT_EXECUTOR to FAKE_ENV_A cookie 0 00000.014 II| TABLET_SAUSAGECACHE: Wakeup 1 ... unblocking NKikimr::TEvBlobStorage::TEvCollectGarbage from FLAT_EXECUTOR to FAKE_ENV_A 00000.014 II| FAKE_ENV: Model starts hard shutdown on level 7 of 8, left 3 actors 00000.014 NN| TABLET_SAUSAGECACHE: Poison cache serviced 6 reqs hit {8 410030b} miss {0 0b} 00000.014 II| FAKE_ENV: Shut order, stopping 4 BS groups 00000.014 II| FAKE_ENV: DS.0 gone, left {42b, 1}, put {1492b, 23} 00000.014 II| FAKE_ENV: DS.1 gone, left {504b, 4}, put {310786b, 20} 00000.014 II| FAKE_ENV: DS.2 gone, left {0b, 0}, put {0b, 0} 00000.014 II| FAKE_ENV: DS.3 gone, left {0b, 0}, put {0b, 0} 00000.014 II| FAKE_ENV: All BS storage groups are stopped 00000.014 II| FAKE_ENV: Model stopped, hosted 5 actors, spent 15.00s 00000.014 II| FAKE_ENV: Logged {Emerg 0 Alert 0 Crit 0 Error 0 Left 16}, stopped 00000.000 II| FAKE_ENV: Born at 2025-05-29T15:28:08.171846Z 00000.001 NN| TABLET_SAUSAGECACHE: Bootstrap with config MemoryLimit: 8388608 ScanQueueInFlyLimit: 262144 AsyncQueueInFlyLimit: 262144 00000.001 II| FAKE_ENV: Starting storage for BS group 0 00000.001 II| FAKE_ENV: Starting storage for BS group 1 00000.001 II| FAKE_ENV: Starting storage for BS group 2 00000.001 II| FAKE_ENV: Starting storage for ... 3} Label{34 rev 1, 50b}, [6, +2)row | ERowOp 1: {6} {Set 1 Uint32 : 600} | ERowOp 1: {7} {Set 1 Uint32 : 700} + Rows{4} Label{44 rev 1, 50b}, [8, +2)row | ERowOp 1: {8} {Set 1 Uint32 : 800} | ERowOp 1: {9} {Set 1 Uint32 : 900} + Rows{5} Label{54 rev 1, 50b}, [10, +2)row | ERowOp 1: {10} {Set 1 Uint32 : 1000} | ERowOp 1: {11} {Set 1 Uint32 : 1100} + Rows{6} Label{64 rev 1, 50b}, [12, +2)row | ERowOp 1: {12} {Set 1 Uint32 : 1200} | ERowOp 1: {13} {Set 1 Uint32 : 1300} + Rows{7} Label{74 rev 1, 50b}, [14, +2)row | ERowOp 1: {14} {Set 1 Uint32 : 1400} | ERowOp 1: {15} {Set 1 Uint32 : 1500} + Rows{8} Label{84 rev 1, 50b}, [16, +2)row | ERowOp 1: {16} {Set 1 Uint32 : 1600} | ERowOp 1: {17} {Set 1 Uint32 : 1700} + Rows{9} Label{94 rev 1, 50b}, [18, +2)row | ERowOp 1: {18} {Set 1 Uint32 : 1800} | ERowOp 1: {19} {Set 1 Uint32 : 1900} + Rows{10} Label{104 rev 1, 50b}, [20, +2)row | ERowOp 1: {20} {Set 1 Uint32 : 2000} | ERowOp 1: {21} {Set 1 Uint32 : 2100} + Rows{11} Label{114 rev 1, 50b}, [22, +2)row | ERowOp 1: {22} {Set 1 Uint32 : 2200} | ERowOp 1: {23} {Set 1 Uint32 : 2300} + Rows{12} Label{124 rev 1, 50b}, [24, +2)row | ERowOp 1: {24} {Set 1 Uint32 : 2400} | ERowOp 1: {25} {Set 1 Uint32 : 2500} + Rows{13} Label{134 rev 1, 50b}, [26, +2)row | ERowOp 1: {26} {Set 1 Uint32 : 2600} | ERowOp 1: {27} {Set 1 Uint32 : 2700} + Rows{14} Label{144 rev 1, 50b}, [28, +2)row | ERowOp 1: {28} {Set 1 Uint32 : 2800} | ERowOp 1: {29} {Set 1 Uint32 : 2900} + Rows{15} Label{154 rev 1, 50b}, [30, +2)row | ERowOp 1: {30} {Set 1 Uint32 : 3000} | ERowOp 1: {31} {Set 1 Uint32 : 3100} + Rows{16} Label{164 rev 1, 50b}, [32, +2)row | ERowOp 1: {32} {Set 1 Uint32 : 3200} | ERowOp 1: {33} {Set 1 Uint32 : 3300} + Rows{17} Label{174 rev 1, 50b}, [34, +2)row | ERowOp 1: {34} {Set 1 Uint32 : 3400} | ERowOp 1: {35} {Set 1 Uint32 : 3500} + Rows{18} Label{184 rev 1, 50b}, [36, +2)row | ERowOp 1: {36} {Set 1 Uint32 : 3600} | ERowOp 1: {37} {Set 1 Uint32 : 3700} + Rows{19} Label{194 rev 1, 50b}, [38, +2)row | ERowOp 1: {38} {Set 1 Uint32 : 3800} | ERowOp 1: {39} {Set 1 Uint32 : 3900} Part{[1:2:3:0:0:0:0] eph 0, 1000b 40r} data 1479b + FlatIndex{20} Label{3 rev 3, 453b} 21 rec | Page Row Bytes (Uint32) | 0 0 50b {0} | 1 2 50b {2} | 2 4 50b {4} | 3 6 50b {6} | 4 8 50b {8} | 5 10 50b {10} | 6 12 50b {12} | 7 14 50b {14} | 8 16 50b {16} | 9 18 50b {18} | 10 20 50b {20} | 11 22 50b {22} | 12 24 50b {24} | 13 26 50b {26} | 14 28 50b {28} | 15 30 50b {30} | 16 32 50b {32} | 17 34 50b {34} | 18 36 50b {36} | 19 38 50b {38} | 19 39 50b {39} + Rows{0} Label{04 rev 1, 50b}, [0, +2)row | ERowOp 1: {0} {Set 1 Uint32 : 0} | ERowOp 1: {1} {Set 1 Uint32 : 100} + Rows{1} Label{14 rev 1, 50b}, [2, +2)row | ERowOp 1: {2} {Set 1 Uint32 : 200} | ERowOp 1: {3} {Set 1 Uint32 : 300} + Rows{2} Label{24 rev 1, 50b}, [4, +2)row | ERowOp 1: {4} {Set 1 Uint32 : 400} | ERowOp 1: {5} {Set 1 Uint32 : 500} + Rows{3} Label{34 rev 1, 50b}, [6, +2)row | ERowOp 1: {6} {Set 1 Uint32 : 600} | ERowOp 1: {7} {Set 1 Uint32 : 700} + Rows{4} Label{44 rev 1, 50b}, [8, +2)row | ERowOp 1: {8} {Set 1 Uint32 : 800} | ERowOp 1: {9} {Set 1 Uint32 : 900} + Rows{5} Label{54 rev 1, 50b}, [10, +2)row | ERowOp 1: {10} {Set 1 Uint32 : 1000} | ERowOp 1: {11} {Set 1 Uint32 : 1100} + Rows{6} Label{64 rev 1, 50b}, [12, +2)row | ERowOp 1: {12} {Set 1 Uint32 : 1200} | ERowOp 1: {13} {Set 1 Uint32 : 1300} + Rows{7} Label{74 rev 1, 50b}, [14, +2)row | ERowOp 1: {14} {Set 1 Uint32 : 1400} | ERowOp 1: {15} {Set 1 Uint32 : 1500} + Rows{8} Label{84 rev 1, 50b}, [16, +2)row | ERowOp 1: {16} {Set 1 Uint32 : 1600} | ERowOp 1: {17} {Set 1 Uint32 : 1700} + Rows{9} Label{94 rev 1, 50b}, [18, +2)row | ERowOp 1: {18} {Set 1 Uint32 : 1800} | ERowOp 1: {19} {Set 1 Uint32 : 1900} + Rows{10} Label{104 rev 1, 50b}, [20, +2)row | ERowOp 1: {20} {Set 1 Uint32 : 2000} | ERowOp 1: {21} {Set 1 Uint32 : 2100} + Rows{11} Label{114 rev 1, 50b}, [22, +2)row | ERowOp 1: {22} {Set 1 Uint32 : 2200} | ERowOp 1: {23} {Set 1 Uint32 : 2300} + Rows{12} Label{124 rev 1, 50b}, [24, +2)row | ERowOp 1: {24} {Set 1 Uint32 : 2400} | ERowOp 1: {25} {Set 1 Uint32 : 2500} + Rows{13} Label{134 rev 1, 50b}, [26, +2)row | ERowOp 1: {26} {Set 1 Uint32 : 2600} | ERowOp 1: {27} {Set 1 Uint32 : 2700} + Rows{14} Label{144 rev 1, 50b}, [28, +2)row | ERowOp 1: {28} {Set 1 Uint32 : 2800} | ERowOp 1: {29} {Set 1 Uint32 : 2900} + Rows{15} Label{154 rev 1, 50b}, [30, +2)row | ERowOp 1: {30} {Set 1 Uint32 : 3000} | ERowOp 1: {31} {Set 1 Uint32 : 3100} + Rows{16} Label{164 rev 1, 50b}, [32, +2)row | ERowOp 1: {32} {Set 1 Uint32 : 3200} | ERowOp 1: {33} {Set 1 Uint32 : 3300} + Rows{17} Label{174 rev 1, 50b}, [34, +2)row | ERowOp 1: {34} {Set 1 Uint32 : 3400} | ERowOp 1: {35} {Set 1 Uint32 : 3500} + Rows{18} Label{184 rev 1, 50b}, [36, +2)row | ERowOp 1: {36} {Set 1 Uint32 : 3600} | ERowOp 1: {37} {Set 1 Uint32 : 3700} + Rows{19} Label{194 rev 1, 50b}, [38, +2)row | ERowOp 1: {38} {Set 1 Uint32 : 3800} | ERowOp 1: {39} {Set 1 Uint32 : 3900} Part{[1:2:3:0:0:0:0] eph 0, 1000b 40r} data 1479b + FlatIndex{20} Label{3 rev 3, 453b} 21 rec | Page Row Bytes (Uint32) | 0 0 50b {0} | 1 2 50b {2} | 2 4 50b {4} | 3 6 50b {6} | 4 8 50b {8} | 5 10 50b {10} | 6 12 50b {12} | 7 14 50b {14} | 8 16 50b {16} | 9 18 50b {18} | 10 20 50b {20} | 11 22 50b {22} | 12 24 50b {24} | 13 26 50b {26} | 14 28 50b {28} | 15 30 50b {30} | 16 32 50b {32} | 17 34 50b {34} | 18 36 50b {36} | 19 38 50b {38} | 19 39 50b {39} + Rows{0} Label{04 rev 1, 50b}, [0, +2)row | ERowOp 1: {0} {Set 1 Uint32 : 0} | ERowOp 1: {1} {Set 1 Uint32 : 100} + Rows{1} Label{14 rev 1, 50b}, [2, +2)row | ERowOp 1: {2} {Set 1 Uint32 : 200} | ERowOp 1: {3} {Set 1 Uint32 : 300} + Rows{2} Label{24 rev 1, 50b}, [4, +2)row | ERowOp 1: {4} {Set 1 Uint32 : 400} | ERowOp 1: {5} {Set 1 Uint32 : 500} + Rows{3} Label{34 rev 1, 50b}, [6, +2)row | ERowOp 1: {6} {Set 1 Uint32 : 600} | ERowOp 1: {7} {Set 1 Uint32 : 700} + Rows{4} Label{44 rev 1, 50b}, [8, +2)row | ERowOp 1: {8} {Set 1 Uint32 : 800} | ERowOp 1: {9} {Set 1 Uint32 : 900} + Rows{5} Label{54 rev 1, 50b}, [10, +2)row | ERowOp 1: {10} {Set 1 Uint32 : 1000} | ERowOp 1: {11} {Set 1 Uint32 : 1100} + Rows{6} Label{64 rev 1, 50b}, [12, +2)row | ERowOp 1: {12} {Set 1 Uint32 : 1200} | ERowOp 1: {13} {Set 1 Uint32 : 1300} + Rows{7} Label{74 rev 1, 50b}, [14, +2)row | ERowOp 1: {14} {Set 1 Uint32 : 1400} | ERowOp 1: {15} {Set 1 Uint32 : 1500} + Rows{8} Label{84 rev 1, 50b}, [16, +2)row | ERowOp 1: {16} {Set 1 Uint32 : 1600} | ERowOp 1: {17} {Set 1 Uint32 : 1700} + Rows{9} Label{94 rev 1, 50b}, [18, +2)row | ERowOp 1: {18} {Set 1 Uint32 : 1800} | ERowOp 1: {19} {Set 1 Uint32 : 1900} + Rows{10} Label{104 rev 1, 50b}, [20, +2)row | ERowOp 1: {20} {Set 1 Uint32 : 2000} | ERowOp 1: {21} {Set 1 Uint32 : 2100} + Rows{11} Label{114 rev 1, 50b}, [22, +2)row | ERowOp 1: {22} {Set 1 Uint32 : 2200} | ERowOp 1: {23} {Set 1 Uint32 : 2300} + Rows{12} Label{124 rev 1, 50b}, [24, +2)row | ERowOp 1: {24} {Set 1 Uint32 : 2400} | ERowOp 1: {25} {Set 1 Uint32 : 2500} + Rows{13} Label{134 rev 1, 50b}, [26, +2)row | ERowOp 1: {26} {Set 1 Uint32 : 2600} | ERowOp 1: {27} {Set 1 Uint32 : 2700} + Rows{14} Label{144 rev 1, 50b}, [28, +2)row | ERowOp 1: {28} {Set 1 Uint32 : 2800} | ERowOp 1: {29} {Set 1 Uint32 : 2900} + Rows{15} Label{154 rev 1, 50b}, [30, +2)row | ERowOp 1: {30} {Set 1 Uint32 : 3000} | ERowOp 1: {31} {Set 1 Uint32 : 3100} + Rows{16} Label{164 rev 1, 50b}, [32, +2)row | ERowOp 1: {32} {Set 1 Uint32 : 3200} | ERowOp 1: {33} {Set 1 Uint32 : 3300} + Rows{17} Label{174 rev 1, 50b}, [34, +2)row | ERowOp 1: {34} {Set 1 Uint32 : 3400} | ERowOp 1: {35} {Set 1 Uint32 : 3500} + Rows{18} Label{184 rev 1, 50b}, [36, +2)row | ERowOp 1: {36} {Set 1 Uint32 : 3600} | ERowOp 1: {37} {Set 1 Uint32 : 3700} + Rows{19} Label{194 rev 1, 50b}, [38, +2)row | ERowOp 1: {38} {Set 1 Uint32 : 3800} | ERowOp 1: {39} {Set 1 Uint32 : 3900} Part{[1:2:3:0:0:0:0] eph 0, 1000b 40r} data 1479b + FlatIndex{20} Label{3 rev 3, 453b} 21 rec | Page Row Bytes (Uint32) | 0 0 50b {0} | 1 2 50b {2} | 2 4 50b {4} | 3 6 50b {6} | 4 8 50b {8} | 5 10 50b {10} | 6 12 50b {12} | 7 14 50b {14} | 8 16 50b {16} | 9 18 50b {18} | 10 20 50b {20} | 11 22 50b {22} | 12 24 50b {24} | 13 26 50b {26} | 14 28 50b {28} | 15 30 50b {30} | 16 32 50b {32} | 17 34 50b {34} | 18 36 50b {36} | 19 38 50b {38} | 19 39 50b {39} + Rows{0} Label{04 rev 1, 50b}, [0, +2)row | ERowOp 1: {0} {Set 1 Uint32 : 0} | ERowOp 1: {1} {Set 1 Uint32 : 100} + Rows{1} Label{14 rev 1, 50b}, [2, +2)row | ERowOp 1: {2} {Set 1 Uint32 : 200} | ERowOp 1: {3} {Set 1 Uint32 : 300} + Rows{2} Label{24 rev 1, 50b}, [4, +2)row | ERowOp 1: {4} {Set 1 Uint32 : 400} | ERowOp 1: {5} {Set 1 Uint32 : 500} + Rows{3} Label{34 rev 1, 50b}, [6, +2)row | ERowOp 1: {6} {Set 1 Uint32 : 600} | ERowOp 1: {7} {Set 1 Uint32 : 700} + Rows{4} Label{44 rev 1, 50b}, [8, +2)row | ERowOp 1: {8} {Set 1 Uint32 : 800} | ERowOp 1: {9} {Set 1 Uint32 : 900} + Rows{5} Label{54 rev 1, 50b}, [10, +2)row | ERowOp 1: {10} {Set 1 Uint32 : 1000} | ERowOp 1: {11} {Set 1 Uint32 : 1100} + Rows{6} Label{64 rev 1, 50b}, [12, +2)row | ERowOp 1: {12} {Set 1 Uint32 : 1200} | ERowOp 1: {13} {Set 1 Uint32 : 1300} + Rows{7} Label{74 rev 1, 50b}, [14, +2)row | ERowOp 1: {14} {Set 1 Uint32 : 1400} | ERowOp 1: {15} {Set 1 Uint32 : 1500} + Rows{8} Label{84 rev 1, 50b}, [16, +2)row | ERowOp 1: {16} {Set 1 Uint32 : 1600} | ERowOp 1: {17} {Set 1 Uint32 : 1700} + Rows{9} Label{94 rev 1, 50b}, [18, +2)row | ERowOp 1: {18} {Set 1 Uint32 : 1800} | ERowOp 1: {19} {Set 1 Uint32 : 1900} + Rows{10} Label{104 rev 1, 50b}, [20, +2)row | ERowOp 1: {20} {Set 1 Uint32 : 2000} | ERowOp 1: {21} {Set 1 Uint32 : 2100} + Rows{11} Label{114 rev 1, 50b}, [22, +2)row | ERowOp 1: {22} {Set 1 Uint32 : 2200} | ERowOp 1: {23} {Set 1 Uint32 : 2300} + Rows{12} Label{124 rev 1, 50b}, [24, +2)row | ERowOp 1: {24} {Set 1 Uint32 : 2400} | ERowOp 1: {25} {Set 1 Uint32 : 2500} + Rows{13} Label{134 rev 1, 50b}, [26, +2)row | ERowOp 1: {26} {Set 1 Uint32 : 2600} | ERowOp 1: {27} {Set 1 Uint32 : 2700} + Rows{14} Label{144 rev 1, 50b}, [28, +2)row | ERowOp 1: {28} {Set 1 Uint32 : 2800} | ERowOp 1: {29} {Set 1 Uint32 : 2900} + Rows{15} Label{154 rev 1, 50b}, [30, +2)row | ERowOp 1: {30} {Set 1 Uint32 : 3000} | ERowOp 1: {31} {Set 1 Uint32 : 3100} + Rows{16} Label{164 rev 1, 50b}, [32, +2)row | ERowOp 1: {32} {Set 1 Uint32 : 3200} | ERowOp 1: {33} {Set 1 Uint32 : 3300} + Rows{17} Label{174 rev 1, 50b}, [34, +2)row | ERowOp 1: {34} {Set 1 Uint32 : 3400} | ERowOp 1: {35} {Set 1 Uint32 : 3500} + Rows{18} Label{184 rev 1, 50b}, [36, +2)row | ERowOp 1: {36} {Set 1 Uint32 : 3600} | ERowOp 1: {37} {Set 1 Uint32 : 3700} + Rows{19} Label{194 rev 1, 50b}, [38, +2)row | ERowOp 1: {38} {Set 1 Uint32 : 3800} | ERowOp 1: {39} {Set 1 Uint32 : 3900} >> TSchemeshardBorrowedCompactionTest::SchemeshardShouldNotCompactAfterDrop [GOOD] >> BuildStatsFlatIndex::Single_Groups [GOOD] >> BuildStatsFlatIndex::Single_Groups_Slices [GOOD] >> BuildStatsFlatIndex::Single_Groups_History [GOOD] >> BuildStatsFlatIndex::Single_Groups_History_Slices >> TExecutorDb::RandomOps [GOOD] >> TExecutorDb::FullScan >> TSharedPageCache::BigCache_BTreeIndex [GOOD] >> TSharedPageCache::BigCache_FlatIndex >> BuildStatsFlatIndex::Single_Groups_History_Slices [GOOD] >> BuildStatsFlatIndex::Mixed [GOOD] >> BuildStatsFlatIndex::Mixed_Groups [GOOD] >> BuildStatsFlatIndex::Mixed_Groups_History >> TInterconnectTest::TestConnectAndDisconnect [GOOD] >> TInterconnectTest::TestBlobEventPreSerialized >> TInterconnectTest::TestManyEvents [GOOD] >> TInterconnectTest::TestCrossConnect >> VectorIndexBuildTest::BaseCase [GOOD] >> BuildStatsFlatIndex::Mixed_Groups_History [GOOD] >> BuildStatsFlatIndex::Serial [GOOD] >> BuildStatsFlatIndex::Serial_Groups [GOOD] >> BuildStatsFlatIndex::Serial_Groups_History >> TSharedPageCache::BigCache_FlatIndex [GOOD] >> TSharedPageCache::MiddleCache_BTreeIndex >> BuildStatsFlatIndex::Serial_Groups_History [GOOD] >> BuildStatsHistogram::Single >> TFlatTableExecutor_VersionedRows::TestVersionedRowsSmallBlobs [GOOD] >> TFlatTableExecutor_VersionedRows::TestVersionedRowsLargeBlobs >> TInterconnectTest::TestSimplePingPong ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_compaction/unittest >> TSchemeshardBorrowedCompactionTest::SchemeshardShouldNotCompactAfterDrop [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:116:2058] recipient: [1:111:2141] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:116:2058] recipient: [1:111:2141] Leader for TabletID 72057594046678944 is [1:128:2152] sender: [1:132:2058] recipient: [1:111:2141] 2025-05-29T15:27:21.107123Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7488: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-05-29T15:27:21.107153Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7516: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:27:21.107160Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7402: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-05-29T15:27:21.107166Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7418: OperationsProcessing config: using default configuration 2025-05-29T15:27:21.107185Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-05-29T15:27:21.107189Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-05-29T15:27:21.107199Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7548: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:27:21.107213Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:39: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-05-29T15:27:21.107339Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7619: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-05-29T15:27:21.107414Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-05-29T15:27:21.120801Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7309: Cannot subscribe to console configs 2025-05-29T15:27:21.120828Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:27:21.123745Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-05-29T15:27:21.123911Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-05-29T15:27:21.123958Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-05-29T15:27:21.126121Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-05-29T15:27:21.126323Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:445: Clear TempDirsState with owners number: 0 2025-05-29T15:27:21.126441Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1348: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-05-29T15:27:21.126496Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:32: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-05-29T15:27:21.131205Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:157: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:27:21.131275Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:84: [RootDataErasureManager] Stop 2025-05-29T15:27:21.131612Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:27:21.131623Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:27:21.131646Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-05-29T15:27:21.131655Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-29T15:27:21.131661Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:202: TTxServerlessStorageBilling.Complete 2025-05-29T15:27:21.131732Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6671: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-05-29T15:27:21.133264Z node 1 :HIVE INFO: tablet_helpers.cpp:1130: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:128:2152] sender: [1:242:2058] recipient: [1:15:2062] 2025-05-29T15:27:21.154801Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:372: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-05-29T15:27:21.154897Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:27:21.154984Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-05-29T15:27:21.155041Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:130: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-05-29T15:27:21.155055Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:27:21.156029Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:451: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-05-29T15:27:21.156064Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-05-29T15:27:21.156123Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:27:21.156135Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:313: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-05-29T15:27:21.156141Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:367: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-05-29T15:27:21.156147Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 2 -> 3 2025-05-29T15:27:21.156792Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:27:21.156809Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-05-29T15:27:21.156816Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 3 -> 128 2025-05-29T15:27:21.157339Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:27:21.157357Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:27:21.157364Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:27:21.157371Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1650: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-05-29T15:27:21.158095Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1719: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-05-29T15:27:21.158612Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:652: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-05-29T15:27:21.158658Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1751: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-05-29T15:27:21.158889Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:676: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-05-29T15:27:21.158919Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:680: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 124 RawX2: 4294969445 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-05-29T15:27:21.158927Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:27:21.158992Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 128 -> 240 2025-05-29T15:27:21.159000Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:27:21.159035Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-05-29T15:27:21.159049Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:402: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-05-29T15:27:21.159583Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:27:21.159593Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-29T15:27:21.159636Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29 ... 7594046678944, LocalPathId: 2] 2025-05-29T15:28:08.715062Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4877: StateWork, received event# 269553211, Sender [3:309:2295], Recipient [3:124:2149]: NKikimrTxDataShard.TEvCompactTableResult TabletId: 72075186233409546 PathId { OwnerId: 72057594046678944 LocalId: 2 } Status: OK 2025-05-29T15:28:08.715073Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4953: StateWork, processing event TEvDataShard::TEvCompactTableResult 2025-05-29T15:28:08.715092Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: operation_queue_timer.h:84: Operation queue set wakeup after delta# 0 seconds 2025-05-29T15:28:08.715108Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_compaction.cpp:112: [BackgroundCompaction] [Finished] Compaction completed for pathId# [OwnerId: 72057594046678944, LocalPathId: 2], datashard# 72075186233409546, shardIdx# 72057594046678944:1 in# 3 ms, with status# 0, next wakeup in# 0.997000s, rate# 1, in queue# 1 shards, waiting after compaction# 0 shards, running# 0 shards at schemeshard 72057594046678944 2025-05-29T15:28:08.715492Z node 3 :TX_DATASHARD TRACE: datashard_impl.h:3129: StateWork, received event# 268828683, Sender [3:298:2286], Recipient [3:309:2295]: NKikimr::TEvTablet::TEvFollowerGcApplied 2025-05-29T15:28:08.726377Z node 3 :TX_DATASHARD DEBUG: datashard__compaction.cpp:189: Updated last full compaction of tablet# 72075186233409546, tableId# 2, last full compaction# 1970-01-01T00:00:19.150000Z 2025-05-29T15:28:09.104233Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4877: StateWork, received event# 271125000, Sender [0:0:0], Recipient [3:124:2149]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-05-29T15:28:09.104263Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4885: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-05-29T15:28:09.104293Z node 3 :TX_DATASHARD TRACE: datashard_impl.h:3129: StateWork, received event# 2146435079, Sender [0:0:0], Recipient [3:309:2295]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvPeriodicWakeup 2025-05-29T15:28:09.104316Z node 3 :TX_DATASHARD TRACE: datashard_impl.h:3437: TEvPeriodicTableStats from datashard 72075186233409546, FollowerId 0, tableId 2 2025-05-29T15:28:09.104352Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4877: StateWork, received event# 271124999, Sender [3:124:2149], Recipient [3:124:2149]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2025-05-29T15:28:09.104357Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4884: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2025-05-29T15:28:09.104428Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4877: StateWork, received event# 269553162, Sender [3:309:2295], Recipient [3:124:2149]: NKikimrTxDataShard.TEvPeriodicTableStats DatashardId: 72075186233409546 TableLocalId: 2 Generation: 2 Round: 5 TableStats { DataSize: 13940 RowCount: 100 IndexSize: 102 InMemSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 2 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 SearchHeight: 1 LastFullCompactionTs: 19 HasLoanedParts: false Channels { Channel: 1 DataSize: 13940 IndexSize: 102 } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 4069 Memory: 124088 Storage: 14156 } ShardState: 2 UserTablePartOwners: 72075186233409546 NodeId: 3 StartTime: 40 TableOwnerId: 72057594046678944 FollowerId: 0 2025-05-29T15:28:09.104433Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4914: StateWork, processing event TEvDataShard::TEvPeriodicTableStats 2025-05-29T15:28:09.104448Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:563: Got periodic table stats at tablet 72057594046678944 from shard 72075186233409546 followerId 0 pathId [OwnerId: 72057594046678944, LocalPathId: 2] state 'Ready' dataSize 13940 rowCount 100 cpuUsage 0.4069 2025-05-29T15:28:09.104462Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__table_stats.cpp:570: Got periodic table stats at tablet 72057594046678944 from shard 72075186233409546 followerId 0 pathId [OwnerId: 72057594046678944, LocalPathId: 2] raw table stats: DataSize: 13940 RowCount: 100 IndexSize: 102 InMemSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 2 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 SearchHeight: 1 LastFullCompactionTs: 19 HasLoanedParts: false Channels { Channel: 1 DataSize: 13940 IndexSize: 102 } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 2025-05-29T15:28:09.104468Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__table_stats.cpp:610: Will delay TTxStoreTableStats on# 0.100000s, queue# 1 2025-05-29T15:28:09.145171Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: operation_queue_timer.h:92: Operation queue wakeup 2025-05-29T15:28:09.145225Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_compaction.cpp:31: [BackgroundCompaction] [Start] Compacting for pathId# [OwnerId: 72057594046678944, LocalPathId: 2], datashard# 72075186233409546, compactionInfo# {72057594046678944:1, SH# 0, Rows# 0, Deletes# 0, Compaction# 1970-01-01T00:00:19.000000Z}, next wakeup in# 0.000000s, rate# 1, in queue# 1 shards, waiting after compaction# 0 shards, running# 0 shards at schemeshard 72057594046678944 2025-05-29T15:28:09.145263Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: operation_queue_timer.h:84: Operation queue set wakeup after delta# 30 seconds 2025-05-29T15:28:09.145283Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4877: StateWork, received event# 2146435092, Sender [0:0:0], Recipient [3:124:2149]: NKikimr::NSchemeShard::TEvPrivate::TEvPersistTableStats 2025-05-29T15:28:09.145291Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5053: StateWork, processing event TEvPrivate::TEvPersistTableStats 2025-05-29T15:28:09.145295Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:590: Started TEvPersistStats at tablet 72057594046678944, queue size# 1 2025-05-29T15:28:09.145316Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__table_stats.cpp:601: Will execute TTxStoreStats, queue# 1 2025-05-29T15:28:09.145323Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__table_stats.cpp:610: Will delay TTxStoreTableStats on# 0.000000s, queue# 1 2025-05-29T15:28:09.145383Z node 3 :TX_DATASHARD TRACE: datashard_impl.h:3129: StateWork, received event# 269553210, Sender [3:124:2149], Recipient [3:309:2295]: NKikimrTxDataShard.TEvCompactTable PathId { OwnerId: 72057594046678944 LocalId: 2 } CompactSinglePartedShards: true 2025-05-29T15:28:09.145430Z node 3 :TX_DATASHARD INFO: datashard__compaction.cpp:141: Started background compaction# 7 of 72075186233409546 tableId# 2 localTid# 1001, requested from [3:124:2149], partsCount# 1, memtableSize# 0, memtableWaste# 0, memtableRows# 0 2025-05-29T15:28:09.145489Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:267: PersistSingleStats for pathId 2 shard idx 72057594046678944:1 data size 13940 row count 100 2025-05-29T15:28:09.145508Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:292: TTxStoreTableStats.PersistSingleStats: main stats from datashardId(TabletID)=72075186233409546 maps to shardIdx: 72057594046678944:1 followerId=0, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], pathId map=Simple, is column=0, is olap=0, RowCount 100, DataSize 13940 2025-05-29T15:28:09.145511Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__table_stats.cpp:62: BuildStatsForCollector: datashardId 72075186233409546, followerId 0 2025-05-29T15:28:09.145526Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__background_compaction.cpp:219: [BackgroundCompaction] [Update] Updated shard# 72057594046678944:1 with partCount# 1, rowCount# 100, searchHeight# 1, lastFullCompaction# 1970-01-01T00:00:19.000000Z at schemeshard 72057594046678944 2025-05-29T15:28:09.145546Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:477: Do not want to split tablet 72075186233409546 by size, its table already has 1 out of 1 partitions 2025-05-29T15:28:09.145562Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:207: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2025-05-29T15:28:09.145767Z node 3 :TX_DATASHARD DEBUG: datashard__compaction.cpp:203: CompactionComplete of tablet# 72075186233409546, table# 1001, finished edge# 6, ts 1970-01-01T00:00:19.150000Z 2025-05-29T15:28:09.145775Z node 3 :TX_DATASHARD DEBUG: datashard__compaction.cpp:240: ReplyCompactionWaiters of tablet# 72075186233409546, table# 1001, finished edge# 6, front# 7 2025-05-29T15:28:09.146826Z node 3 :TX_DATASHARD TRACE: datashard_impl.h:3129: StateWork, received event# 2146435080, Sender [3:1268:3203], Recipient [3:309:2295]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvAsyncTableStats 2025-05-29T15:28:09.147578Z node 3 :TX_DATASHARD TRACE: datashard_impl.h:3129: StateWork, received event# 268828683, Sender [3:298:2286], Recipient [3:309:2295]: NKikimr::TEvTablet::TEvFollowerGcApplied 2025-05-29T15:28:09.148658Z node 3 :TX_DATASHARD DEBUG: datashard__compaction.cpp:203: CompactionComplete of tablet# 72075186233409546, table# 1001, finished edge# 7, ts 1970-01-01T00:00:20.151000Z 2025-05-29T15:28:09.148679Z node 3 :TX_DATASHARD DEBUG: datashard__compaction.cpp:240: ReplyCompactionWaiters of tablet# 72075186233409546, table# 1001, finished edge# 7, front# 7 2025-05-29T15:28:09.148687Z node 3 :TX_DATASHARD DEBUG: datashard__compaction.cpp:260: ReplyCompactionWaiters of tablet# 72075186233409546, table# 1001 sending TEvCompactTableResult to# [3:124:2149]pathId# [OwnerId: 72057594046678944, LocalPathId: 2] 2025-05-29T15:28:09.148752Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4877: StateWork, received event# 269553211, Sender [3:309:2295], Recipient [3:124:2149]: NKikimrTxDataShard.TEvCompactTableResult TabletId: 72075186233409546 PathId { OwnerId: 72057594046678944 LocalId: 2 } Status: OK 2025-05-29T15:28:09.148760Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4953: StateWork, processing event TEvDataShard::TEvCompactTableResult 2025-05-29T15:28:09.148780Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: operation_queue_timer.h:84: Operation queue set wakeup after delta# 0 seconds 2025-05-29T15:28:09.148795Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_compaction.cpp:112: [BackgroundCompaction] [Finished] Compaction completed for pathId# [OwnerId: 72057594046678944, LocalPathId: 2], datashard# 72075186233409546, shardIdx# 72057594046678944:1 in# 4 ms, with status# 0, next wakeup in# 0.996000s, rate# 1, in queue# 1 shards, waiting after compaction# 0 shards, running# 0 shards at schemeshard 72057594046678944 2025-05-29T15:28:09.149321Z node 3 :TX_DATASHARD TRACE: datashard_impl.h:3129: StateWork, received event# 268828683, Sender [3:298:2286], Recipient [3:309:2295]: NKikimr::TEvTablet::TEvFollowerGcApplied 2025-05-29T15:28:09.159518Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4877: StateWork, received event# 2146435092, Sender [0:0:0], Recipient [3:124:2149]: NKikimr::NSchemeShard::TEvPrivate::TEvPersistTableStats 2025-05-29T15:28:09.159541Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5053: StateWork, processing event TEvPrivate::TEvPersistTableStats 2025-05-29T15:28:09.159548Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:590: Started TEvPersistStats at tablet 72057594046678944, queue size# 0 2025-05-29T15:28:09.170347Z node 3 :TX_DATASHARD DEBUG: datashard__compaction.cpp:189: Updated last full compaction of tablet# 72075186233409546, tableId# 2, last full compaction# 1970-01-01T00:00:20.151000Z >> TInterconnectTest::OldNbs [GOOD] >> TInterconnectTest::TestBlobEventPreSerialized [GOOD] >> TInterconnectTest::TestBlobEventUpToMebibytes >> TSharedPageCache::MiddleCache_BTreeIndex [GOOD] >> TInterconnectTest::OldFormat ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_index_build/unittest >> VectorIndexBuildTest::BaseCase [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:116:2058] recipient: [1:111:2141] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:116:2058] recipient: [1:111:2141] Leader for TabletID 72057594046678944 is [1:128:2152] sender: [1:132:2058] recipient: [1:111:2141] 2025-05-29T15:28:04.789546Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7488: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-05-29T15:28:04.789577Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7516: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:28:04.789583Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7402: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-05-29T15:28:04.789590Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7418: OperationsProcessing config: using default configuration 2025-05-29T15:28:04.789606Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-05-29T15:28:04.789611Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-05-29T15:28:04.789622Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7548: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:28:04.789638Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:39: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-05-29T15:28:04.789756Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7619: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-05-29T15:28:04.789834Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-05-29T15:28:04.805743Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7309: Cannot subscribe to console configs 2025-05-29T15:28:04.805768Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:28:04.808727Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-05-29T15:28:04.808827Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-05-29T15:28:04.808867Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-05-29T15:28:04.810881Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-05-29T15:28:04.811171Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:445: Clear TempDirsState with owners number: 0 2025-05-29T15:28:04.811307Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1348: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-05-29T15:28:04.811377Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:32: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-05-29T15:28:04.811969Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:157: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:28:04.812022Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:84: [RootDataErasureManager] Stop 2025-05-29T15:28:04.812297Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:28:04.812308Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:28:04.812327Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-05-29T15:28:04.812338Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-29T15:28:04.812344Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:202: TTxServerlessStorageBilling.Complete 2025-05-29T15:28:04.812377Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6671: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-05-29T15:28:04.813789Z node 1 :HIVE INFO: tablet_helpers.cpp:1130: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:128:2152] sender: [1:242:2058] recipient: [1:15:2062] 2025-05-29T15:28:04.831436Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:372: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-05-29T15:28:04.831498Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:28:04.831553Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-05-29T15:28:04.831605Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:130: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-05-29T15:28:04.831617Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:28:04.832381Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:451: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-05-29T15:28:04.832407Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-05-29T15:28:04.832470Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:28:04.832482Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:313: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-05-29T15:28:04.832487Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:367: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-05-29T15:28:04.832493Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 2 -> 3 2025-05-29T15:28:04.832971Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:28:04.832982Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-05-29T15:28:04.832986Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 3 -> 128 2025-05-29T15:28:04.833310Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:28:04.833321Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:28:04.833330Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:28:04.833337Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1650: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-05-29T15:28:04.833833Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1719: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-05-29T15:28:04.834249Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:652: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-05-29T15:28:04.834281Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1751: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-05-29T15:28:04.834454Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:676: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-05-29T15:28:04.834480Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:680: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 124 RawX2: 4294969445 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-05-29T15:28:04.834487Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:28:04.834543Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 128 -> 240 2025-05-29T15:28:04.834551Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:28:04.834579Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-05-29T15:28:04.834590Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:402: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-05-29T15:28:04.835219Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:28:04.835229Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-29T15:28:04.835270Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29 ... Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3134: StateWork, processing event TEvDataShard::TEvStateChangedResult 2025-05-29T15:28:09.514878Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:2962: Handle TEvStateChangedResult datashard 72075186233409587 state Offline 2025-05-29T15:28:09.514896Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3129: StateWork, received event# 269877763, Sender [2:6298:7591], Recipient [2:5758:7092]: NKikimr::TEvTabletPipe::TEvClientDestroyed { TabletId: 72075186233409581 ClientId: [2:6298:7591] ServerId: [2:6299:7592] } 2025-05-29T15:28:09.514899Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3163: StateWork, processing event TEvTabletPipe::TEvClientDestroyed 2025-05-29T15:28:09.514938Z node 2 :HIVE INFO: tablet_helpers.cpp:1356: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72075186233409581 ShardLocalIdx: 7 TxId_Deprecated: 7 TabletID: 72075186233409587 2025-05-29T15:28:09.514975Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5938: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 7 ShardOwnerId: 72075186233409581 ShardLocalIdx: 7, at schemeshard: 72075186233409581 2025-05-29T15:28:09.515024Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72075186233409581, LocalPathId: 6] was 1 Forgetting tablet 72075186233409587 2025-05-29T15:28:09.515092Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3129: StateWork, received event# 268829696, Sender [2:5749:7085], Recipient [2:5758:7092]: NKikimr::TEvTablet::TEvTabletDead 2025-05-29T15:28:09.515126Z node 2 :TX_DATASHARD INFO: datashard.cpp:257: OnTabletDead: 72075186233409587 2025-05-29T15:28:09.515149Z node 2 :TX_DATASHARD INFO: datashard.cpp:1301: Change sender killed: at tablet: 72075186233409587 2025-05-29T15:28:09.515424Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:123: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72075186233409581 2025-05-29T15:28:09.515429Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:137: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72075186233409581, LocalPathId: 6], at schemeshard: 72075186233409581 2025-05-29T15:28:09.515439Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72075186233409581, LocalPathId: 3] was 4 2025-05-29T15:28:09.516096Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:174: Deleted shardIdx 72075186233409581:7 2025-05-29T15:28:09.516109Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:180: Close pipe to deleted shardIdx 72075186233409581:7 tabletId 72075186233409587 2025-05-29T15:28:09.516213Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:160: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72075186233409581 2025-05-29T15:28:09.568180Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:676: TTxOperationPlanStep Execute, stepId: 1750, transactions count in step: 1, at schemeshard: 72075186233409581 2025-05-29T15:28:09.568218Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:680: TTxOperationPlanStep Execute, message: Transactions { TxId: 281474976740763 AckTo { RawX1: 0 RawX2: 0 } } Step: 1750 MediatorID: 72075186233409583 TabletID: 72075186233409581, at schemeshard: 72075186233409581 2025-05-29T15:28:09.568227Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_drop_lock.cpp:44: [72075186233409581] TDropLock TPropose opId# 281474976740763:0 HandleReply TEvOperationPlan: step# 1750 2025-05-29T15:28:09.568233Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 281474976740763:0 128 -> 240 2025-05-29T15:28:09.568747Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 281474976740763:0, at schemeshard: 72075186233409581 2025-05-29T15:28:09.568757Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:482: [72075186233409581] TDone opId# 281474976740763:0 ProgressState 2025-05-29T15:28:09.568769Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:906: Part operation is done id#281474976740763:0 progress is 1/1 2025-05-29T15:28:09.568773Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 281474976740763 ready parts: 1/1 2025-05-29T15:28:09.568776Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:906: Part operation is done id#281474976740763:0 progress is 1/1 2025-05-29T15:28:09.568778Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 281474976740763 ready parts: 1/1 2025-05-29T15:28:09.568783Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1606: TOperation IsReadyToNotify, TxId: 281474976740763, ready parts: 1/1, is published: true 2025-05-29T15:28:09.568793Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1629: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [2:5238:6643] message: TxId: 281474976740763 2025-05-29T15:28:09.568798Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 281474976740763 ready parts: 1/1 2025-05-29T15:28:09.568802Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:973: Operation and all the parts is done, operation id: 281474976740763:0 2025-05-29T15:28:09.568804Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5174: RemoveTx for txid 281474976740763:0 2025-05-29T15:28:09.568817Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72075186233409581, LocalPathId: 2] was 4 2025-05-29T15:28:09.569267Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6746: Handle: TEvNotifyTxCompletionResult: txId# 281474976740763 2025-05-29T15:28:09.569280Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6748: Message: TxId: 281474976740763 2025-05-29T15:28:09.569292Z node 2 :BUILD_INDEX INFO: schemeshard_build_index__progress.cpp:2338: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxReply : TEvNotifyTxCompletionResult, txId# 281474976740763, buildInfoId: 123 2025-05-29T15:28:09.569312Z node 2 :BUILD_INDEX DEBUG: schemeshard_build_index__progress.cpp:2341: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxReply : TEvNotifyTxCompletionResult, txId# 281474976740763, buildInfo: TBuildInfo{ IndexBuildId: 123, Uid: , DomainPathId: [OwnerId: 72075186233409581, LocalPathId: 1], TablePathId: [OwnerId: 72075186233409581, LocalPathId: 2], IndexType: EIndexTypeGlobalVectorKmeansTree, IndexName: index1, IndexColumn: embedding, State: Unlocking, IsCancellationRequested: 0, Issue: , SubscribersCount: 1, CreateSender: [2:5495:6854], AlterMainTableTxId: 0, AlterMainTableTxStatus: StatusSuccess, AlterMainTableTxDone: 0, LockTxId: 281474976740757, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976740758, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 0, ApplyTxId: 281474976740762, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, UnlockTxId: 281474976740763, UnlockTxStatus: StatusAccepted, UnlockTxDone: 0, ToUploadShards: 0, DoneShards: 0, Processed: { upload rows: 416, upload bytes: 6792, read rows: 404, read bytes: 10148 }, Billed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }} 2025-05-29T15:28:09.569727Z node 2 :BUILD_INDEX NOTICE: schemeshard_build_index__progress.cpp:1121: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Execute: 123 Unlocking 2025-05-29T15:28:09.569742Z node 2 :BUILD_INDEX DEBUG: schemeshard_build_index__progress.cpp:1122: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Execute: 123 Unlocking TBuildInfo{ IndexBuildId: 123, Uid: , DomainPathId: [OwnerId: 72075186233409581, LocalPathId: 1], TablePathId: [OwnerId: 72075186233409581, LocalPathId: 2], IndexType: EIndexTypeGlobalVectorKmeansTree, IndexName: index1, IndexColumn: embedding, State: Unlocking, IsCancellationRequested: 0, Issue: , SubscribersCount: 1, CreateSender: [2:5495:6854], AlterMainTableTxId: 0, AlterMainTableTxStatus: StatusSuccess, AlterMainTableTxDone: 0, LockTxId: 281474976740757, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976740758, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 0, ApplyTxId: 281474976740762, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, UnlockTxId: 281474976740763, UnlockTxStatus: StatusAccepted, UnlockTxDone: 1, ToUploadShards: 0, DoneShards: 0, Processed: { upload rows: 416, upload bytes: 6792, read rows: 404, read bytes: 10148 }, Billed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }} 2025-05-29T15:28:09.569749Z node 2 :BUILD_INDEX INFO: schemeshard_build_index_tx_base.cpp:25: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: Change state from Unlocking to Done 2025-05-29T15:28:09.570065Z node 2 :BUILD_INDEX NOTICE: schemeshard_build_index__progress.cpp:1121: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Execute: 123 Done 2025-05-29T15:28:09.570076Z node 2 :BUILD_INDEX DEBUG: schemeshard_build_index__progress.cpp:1122: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Execute: 123 Done TBuildInfo{ IndexBuildId: 123, Uid: , DomainPathId: [OwnerId: 72075186233409581, LocalPathId: 1], TablePathId: [OwnerId: 72075186233409581, LocalPathId: 2], IndexType: EIndexTypeGlobalVectorKmeansTree, IndexName: index1, IndexColumn: embedding, State: Done, IsCancellationRequested: 0, Issue: , SubscribersCount: 1, CreateSender: [2:5495:6854], AlterMainTableTxId: 0, AlterMainTableTxStatus: StatusSuccess, AlterMainTableTxDone: 0, LockTxId: 281474976740757, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976740758, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 0, ApplyTxId: 281474976740762, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, UnlockTxId: 281474976740763, UnlockTxStatus: StatusAccepted, UnlockTxDone: 1, ToUploadShards: 0, DoneShards: 0, Processed: { upload rows: 416, upload bytes: 6792, read rows: 404, read bytes: 10148 }, Billed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }} 2025-05-29T15:28:09.570079Z node 2 :BUILD_INDEX TRACE: schemeshard_build_index_tx_base.cpp:339: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TIndexBuildInfo SendNotifications: : id# 123, subscribers count# 1 2025-05-29T15:28:09.570102Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:227: tests -- TTxNotificationSubscriber for txId 123: got EvNotifyTxCompletionResult 2025-05-29T15:28:09.570108Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:236: tests -- TTxNotificationSubscriber for txId 123: satisfy waiter [2:5512:6871] TestWaitNotification: OK eventTxId 123 2025-05-29T15:28:09.570353Z node 2 :BUILD_INDEX DEBUG: schemeshard_build_index__get.cpp:19: TIndexBuilder::TXTYPE_GET_INDEX_BUILD: DoExecute DatabaseName: "/MyRoot/CommonDB" IndexBuildId: 123 2025-05-29T15:28:09.570405Z node 2 :BUILD_INDEX DEBUG: schemeshard_build_index_tx_base.h:93: TIndexBuilder::TXTYPE_GET_INDEX_BUILD: Reply Status: SUCCESS IndexBuild { Id: 123 State: STATE_DONE Settings { source_path: "/MyRoot/CommonDB/Table" index { name: "index1" index_columns: "embedding" global_vector_kmeans_tree_index { } } max_shards_in_flight: 2 ScanSettings { MaxBatchRows: 1 } } Progress: 100 StartTime { seconds: 1 } EndTime { seconds: 1 } } BUILDINDEX RESPONSE Get: NKikimrIndexBuilder.TEvGetResponse Status: SUCCESS IndexBuild { Id: 123 State: STATE_DONE Settings { source_path: "/MyRoot/CommonDB/Table" index { name: "index1" index_columns: "embedding" global_vector_kmeans_tree_index { } } max_shards_in_flight: 2 ScanSettings { MaxBatchRows: 1 } } Progress: 100 StartTime { seconds: 1 } EndTime { seconds: 1 } } |70.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/actorlib_impl/ut/unittest >> TInterconnectTest::OldNbs [GOOD] >> TInterconnectTest::TestSimplePingPong [GOOD] >> TInterconnectTest::TestSubscribeByFlag >> TExecutorDb::FullScan [GOOD] >> TExecutorDb::CoordinatorSimulation >> TVersions::WreckHead [GOOD] >> TVersions::WreckHeadReverse >> BuildStatsHistogram::Single [GOOD] >> BuildStatsHistogram::Single_Slices >> TInterconnectTest::TestBlobEventUpToMebibytes [GOOD] >> TInterconnectTest::TestBlobEventsThroughSubChannels >> TFlatTableExecutor_VersionedRows::TestVersionedRowsLargeBlobs [GOOD] >> TFlatTableRenameTableAndColumn::TestSchema1ToSchema2NoRestart >> TestProtocols::TestConnectProtocol >> IndexBuildTest::BaseCase [GOOD] >> IndexBuildTest::CancelBuild >> TFlatTableRenameTableAndColumn::TestSchema1ToSchema2NoRestart [GOOD] >> TFlatTableRenameTableAndColumn::TestSchema1ToSchema2 [GOOD] >> TFlatTableRenameTableAndColumn::TestSchema1ToSchema2ToSchema1 [GOOD] >> TFlatTableRenameTableAndColumn::TestSchema1ToSchema2ToSchema1ToSchema2 [GOOD] >> TGenCompaction::OverloadFactorDuringForceCompaction >> TInterconnectTest::TestSubscribeByFlag [GOOD] >> TInterconnectTest::TestReconnect >> TInterconnectTest::OldFormat [GOOD] >> TInterconnectTest::OldFormatSuppressVersionCheckOnNew ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tablet_flat/ut/unittest >> TSharedPageCache::MiddleCache_BTreeIndex [GOOD] Test command err: SmallQueue: MainQueue: {11 0f 1b}, {14 1f 1b}, {15 2f 1b}, {18 0f 1b}, {19 0f 1b}, {23 0f 1b}, {27 0f 1b} GhostQueue: 9, 12, 13, 16, 17, 20, 21, 24, 25, 28 0.29407 00000.000 II| FAKE_ENV: Born at 2025-05-29T15:28:08.331830Z 00000.003 NN| TABLET_SAUSAGECACHE: Bootstrap with config MemoryLimit: 8388608 ScanQueueInFlyLimit: 262144 AsyncQueueInFlyLimit: 262144 00000.003 II| FAKE_ENV: Starting storage for BS group 0 00000.003 II| FAKE_ENV: Starting storage for BS group 1 00000.003 II| FAKE_ENV: Starting storage for BS group 2 00000.003 II| FAKE_ENV: Starting storage for BS group 3 00000.004 II| TABLET_EXECUTOR: Leader{1:2:0} activating executor 00000.004 II| TABLET_EXECUTOR: LSnap{1:2, on 2:1, 35b, wait} done, Waste{2:0, 0b +(0, 0b), 0 trc} 00000.004 DD| TABLET_EXECUTOR: Leader{1:2:2} commited cookie 2 for step 1 00000.004 DD| TABLET_EXECUTOR: Leader{1:2:2} Tx{1, NKikimr::NSharedCache::NTestSuiteTSharedPageCache::TTxInitSchema} queued, type NKikimr::NSharedCache::NTestSuiteTSharedPageCache::TTxInitSchema 00000.004 DD| TABLET_EXECUTOR: Leader{1:2:2} Tx{1, NKikimr::NSharedCache::NTestSuiteTSharedPageCache::TTxInitSchema} took 4194304b of static mem, Memory{4194304 dyn 0} 00000.004 DD| TABLET_EXECUTOR: Leader{1:2:2} Tx{1, NKikimr::NSharedCache::NTestSuiteTSharedPageCache::TTxInitSchema} hope 1 -> done Change{2, redo 0b alter 209b annex 0, ~{ } -{ }, 0 gb} 00000.004 DD| TABLET_EXECUTOR: Leader{1:2:2} Tx{1, NKikimr::NSharedCache::NTestSuiteTSharedPageCache::TTxInitSchema} release 4194304b of static, Memory{0 dyn 0} 00000.004 DD| TABLET_EXECUTOR: Leader{1:2:3} commited cookie 1 for step 2 00000.004 NN| TABLET_SAUSAGECACHE: Update config MemoryLimit: 8388608 ReplacementPolicy: ThreeLeveledLRU 00000.004 NN| TABLET_SAUSAGECACHE: Switch replacement policy from S3FIFO to ThreeLeveledLRU 00000.004 NN| TABLET_SAUSAGECACHE: Switch replacement policy done from S3FIFO to ThreeLeveledLRU 00000.004 DD| TABLET_EXECUTOR: Leader{1:2:3} Tx{2, NKikimr::NSharedCache::NTestSuiteTSharedPageCache::TTxWriteRow} queued, type NKikimr::NSharedCache::NTestSuiteTSharedPageCache::TTxWriteRow 00000.004 DD| TABLET_EXECUTOR: Leader{1:2:3} Tx{2, NKikimr::NSharedCache::NTestSuiteTSharedPageCache::TTxWriteRow} took 4194304b of static mem, Memory{4194304 dyn 0} 00000.005 DD| TABLET_EXECUTOR: Leader{1:2:3} Tx{2, NKikimr::NSharedCache::NTestSuiteTSharedPageCache::TTxWriteRow} hope 1 -> done Change{2, redo 102475b alter 0b annex 0, ~{ 101 } -{ }, 0 gb} 00000.005 DD| TABLET_EXECUTOR: Leader{1:2:3} Tx{2, NKikimr::NSharedCache::NTestSuiteTSharedPageCache::TTxWriteRow} release 4194304b of static, Memory{0 dyn 0} 00000.005 DD| TABLET_EXECUTOR: Leader{1:2:4} commited cookie 1 for step 3 00000.005 DD| TABLET_EXECUTOR: Leader{1:2:4} Tx{3, NKikimr::NSharedCache::NTestSuiteTSharedPageCache::TTxWriteRow} queued, type NKikimr::NSharedCache::NTestSuiteTSharedPageCache::TTxWriteRow 00000.005 DD| TABLET_EXECUTOR: Leader{1:2:4} Tx{3, NKikimr::NSharedCache::NTestSuiteTSharedPageCache::TTxWriteRow} took 4194304b of static mem, Memory{4194304 dyn 0} 00000.005 DD| TABLET_EXECUTOR: Leader{1:2:4} Tx{3, NKikimr::NSharedCache::NTestSuiteTSharedPageCache::TTxWriteRow} hope 1 -> done Change{3, redo 102475b alter 0b annex 0, ~{ 101 } -{ }, 0 gb} 00000.005 DD| TABLET_EXECUTOR: Leader{1:2:4} Tx{3, NKikimr::NSharedCache::NTestSuiteTSharedPageCache::TTxWriteRow} release 4194304b of static, Memory{0 dyn 0} 00000.005 DD| TABLET_EXECUTOR: Leader{1:2:5} commited cookie 1 for step 4 00000.005 DD| TABLET_EXECUTOR: Leader{1:2:5} Tx{4, NKikimr::NSharedCache::NTestSuiteTSharedPageCache::TTxWriteRow} queued, type NKikimr::NSharedCache::NTestSuiteTSharedPageCache::TTxWriteRow 00000.005 DD| TABLET_EXECUTOR: Leader{1:2:5} Tx{4, NKikimr::NSharedCache::NTestSuiteTSharedPageCache::TTxWriteRow} took 4194304b of static mem, Memory{4194304 dyn 0} 00000.005 DD| TABLET_EXECUTOR: Leader{1:2:5} Tx{4, NKikimr::NSharedCache::NTestSuiteTSharedPageCache::TTxWriteRow} hope 1 -> done Change{4, redo 102475b alter 0b annex 0, ~{ 101 } -{ }, 0 gb} 00000.005 DD| TABLET_EXECUTOR: Leader{1:2:5} Tx{4, NKikimr::NSharedCache::NTestSuiteTSharedPageCache::TTxWriteRow} release 4194304b of static, Memory{0 dyn 0} 00000.005 DD| TABLET_EXECUTOR: Leader{1:2:6} commited cookie 1 for step 5 00000.005 DD| TABLET_EXECUTOR: Leader{1:2:6} Tx{5, NKikimr::NSharedCache::NTestSuiteTSharedPageCache::TTxWriteRow} queued, type NKikimr::NSharedCache::NTestSuiteTSharedPageCache::TTxWriteRow 00000.005 DD| TABLET_EXECUTOR: Leader{1:2:6} Tx{5, NKikimr::NSharedCache::NTestSuiteTSharedPageCache::TTxWriteRow} took 4194304b of static mem, Memory{4194304 dyn 0} 00000.005 DD| TABLET_EXECUTOR: Leader{1:2:6} Tx{5, NKikimr::NSharedCache::NTestSuiteTSharedPageCache::TTxWriteRow} hope 1 -> done Change{5, redo 102475b alter 0b annex 0, ~{ 101 } -{ }, 0 gb} 00000.005 DD| TABLET_EXECUTOR: Leader{1:2:6} Tx{5, NKikimr::NSharedCache::NTestSuiteTSharedPageCache::TTxWriteRow} release 4194304b of static, Memory{0 dyn 0} 00000.005 DD| TABLET_EXECUTOR: Leader{1:2:7} commited cookie 1 for step 6 00000.005 DD| TABLET_EXECUTOR: Leader{1:2:7} Tx{6, NKikimr::NSharedCache::NTestSuiteTSharedPageCache::TTxWriteRow} queued, type NKikimr::NSharedCache::NTestSuiteTSharedPageCache::TTxWriteRow 00000.005 DD| TABLET_EXECUTOR: Leader{1:2:7} Tx{6, NKikimr::NSharedCache::NTestSuiteTSharedPageCache::TTxWriteRow} took 4194304b of static mem, Memory{4194304 dyn 0} 00000.005 DD| TABLET_EXECUTOR: Leader{1:2:7} Tx{6, NKikimr::NSharedCache::NTestSuiteTSharedPageCache::TTxWriteRow} hope 1 -> done Change{6, redo 102475b alter 0b annex 0, ~{ 101 } -{ }, 0 gb} 00000.005 DD| TABLET_EXECUTOR: Leader{1:2:7} Tx{6, NKikimr::NSharedCache::NTestSuiteTSharedPageCache::TTxWriteRow} release 4194304b of static, Memory{0 dyn 0} 00000.005 DD| TABLET_EXECUTOR: Leader{1:2:8} commited cookie 1 for step 7 00000.005 DD| TABLET_EXECUTOR: Leader{1:2:8} Tx{7, NKikimr::NSharedCache::NTestSuiteTSharedPageCache::TTxWriteRow} queued, type NKikimr::NSharedCache::NTestSuiteTSharedPageCache::TTxWriteRow 00000.005 DD| TABLET_EXECUTOR: Leader{1:2:8} Tx{7, NKikimr::NSharedCache::NTestSuiteTSharedPageCache::TTxWriteRow} took 4194304b of static mem, Memory{4194304 dyn 0} 00000.006 DD| TABLET_EXECUTOR: Leader{1:2:8} Tx{7, NKikimr::NSharedCache::NTestSuiteTSharedPageCache::TTxWriteRow} hope 1 -> done Change{7, redo 102475b alter 0b annex 0, ~{ 101 } -{ }, 0 gb} 00000.006 DD| TABLET_EXECUTOR: Leader{1:2:8} Tx{7, NKikimr::NSharedCache::NTestSuiteTSharedPageCache::TTxWriteRow} release 4194304b of static, Memory{0 dyn 0} 00000.006 DD| TABLET_EXECUTOR: Leader{1:2:9} commited cookie 1 for step 8 00000.006 DD| TABLET_EXECUTOR: Leader{1:2:9} Tx{8, NKikimr::NSharedCache::NTestSuiteTSharedPageCache::TTxWriteRow} queued, type NKikimr::NSharedCache::NTestSuiteTSharedPageCache::TTxWriteRow 00000.006 DD| TABLET_EXECUTOR: Leader{1:2:9} Tx{8, NKikimr::NSharedCache::NTestSuiteTSharedPageCache::TTxWriteRow} took 4194304b of static mem, Memory{4194304 dyn 0} 00000.006 DD| TABLET_EXECUTOR: Leader{1:2:9} Tx{8, NKikimr::NSharedCache::NTestSuiteTSharedPageCache::TTxWriteRow} hope 1 -> done Change{8, redo 102475b alter 0b annex 0, ~{ 101 } -{ }, 0 gb} 00000.006 DD| TABLET_EXECUTOR: Leader{1:2:9} Tx{8, NKikimr::NSharedCache::NTestSuiteTSharedPageCache::TTxWriteRow} release 4194304b of static, Memory{0 dyn 0} 00000.006 DD| TABLET_EXECUTOR: Leader{1:2:10} commited cookie 1 for step 9 00000.006 DD| TABLET_EXECUTOR: Leader{1:2:10} Tx{9, NKikimr::NSharedCache::NTestSuiteTSharedPageCache::TTxWriteRow} queued, type NKikimr::NSharedCache::NTestSuiteTSharedPageCache::TTxWriteRow 00000.006 DD| TABLET_EXECUTOR: Leader{1:2:10} Tx{9, NKikimr::NSharedCache::NTestSuiteTSharedPageCache::TTxWriteRow} took 4194304b of static mem, Memory{4194304 dyn 0} 00000.006 DD| TABLET_EXECUTOR: Leader{1:2:10} Tx{9, NKikimr::NSharedCache::NTestSuiteTSharedPageCache::TTxWriteRow} hope 1 -> done Change{9, redo 102475b alter 0b annex 0, ~{ 101 } -{ }, 0 gb} 00000.006 DD| TABLET_EXECUTOR: Leader{1:2:10} Tx{9, NKikimr::NSharedCache::NTestSuiteTSharedPageCache::TTxWriteRow} release 4194304b of static, Memory{0 dyn 0} 00000.006 DD| TABLET_EXECUTOR: Leader{1:2:11} commited cookie 1 for step 10 00000.006 DD| TABLET_EXECUTOR: Leader{1:2:11} Tx{10, NKikimr::NSharedCache::NTestSuiteTSharedPageCache::TTxWriteRow} queued, type NKikimr::NSharedCache::NTestSuiteTSharedPageCache::TTxWriteRow 00000.006 DD| TABLET_EXECUTOR: Leader{1:2:11} Tx{10, NKikimr::NSharedCache::NTestSuiteTSharedPageCache::TTxWriteRow} took 4194304b of static mem, Memory{4194304 dyn 0} 00000.006 DD| TABLET_EXECUTOR: Leader{1:2:11} Tx{10, NKikimr::NSharedCache::NTestSuiteTSharedPageCache::TTxWriteRow} hope 1 -> done Change{10, redo 102475b alter 0b annex 0, ~{ 101 } -{ }, 0 gb} 00000.006 DD| TABLET_EXECUTOR: Leader{1:2:11} Tx{10, NKikimr::NSharedCache::NTestSuiteTSharedPageCache::TTxWriteRow} release 4194304b of static, Memory{0 dyn 0} 00000.006 DD| TABLET_EXECUTOR: Leader{1:2:12} commited cookie 1 for step 11 00000.006 DD| TABLET_EXECUTOR: Leader{1:2:12} Tx{11, NKikimr::NSharedCache::NTestSuiteTSharedPageCache::TTxWriteRow} queued, type NKikimr::NSharedCache::NTestSuiteTSharedPageCache::TTxWriteRow 00000.006 DD| TABLET_EXECUTOR: Leader{1:2:12} Tx{11, NKikimr::NSharedCache::NTestSuiteTSharedPageCache::TTxWriteRow} took 4194304b of static mem, Memory{4194304 dyn 0} 00000.006 DD| TABLET_EXECUTOR: Leader{1:2:12} Tx{11, NKikimr::NSharedCache::NTestSuiteTSharedPageCache::TTxWriteRow} hope 1 -> done Change{11, redo 102475b alter 0b annex 0, ~{ 101 } -{ }, 0 gb} 00000.006 DD| TABLET_EXECUTOR: Leader{1:2:12} Tx{11, NKikimr::NSharedCache::NTestSuiteTSharedPageCache::TTxWriteRow} release 4194304b of static, Memory{0 dyn 0} 00000.006 DD| TABLET_EXECUTOR: Leader{1:2:13} commited cookie 1 for step 12 00000.006 DD| TABLET_EXECUTOR: Leader{1:2:13} Tx{12, NKikimr::NSharedCache::NTestSuiteTSharedPageCache::TTxWriteRow} queued, type NKikimr::NSharedCache::NTestSuiteTSharedPageCache::TTxWriteRow 00000.006 DD| TABLET_EXECUTOR: Leader{1:2:13} Tx{12, NKikimr::NSharedCache::NTestSuiteTSharedPageCache::TTxWriteRow} took 4194304b of static mem, Memory{4194304 dyn 0} 00000.006 DD| TABLET_EXECUTOR: Leader{1:2:13} Tx{12, NKikimr::NSharedCache::NTestSuiteTSharedPageCache::TTxWriteRow} hope 1 -> done Change{12, redo 102475b alter 0b annex 0, ~{ 101 } -{ }, 0 gb} 00000.006 DD| TABLET_EXECUTOR: Leader{1:2:13} Tx{12, NKikimr::NSharedCache::NTestSuiteTSharedPageCache::TTxWriteRow} release 4194304b of static, Memory{0 dyn 0} 00000.006 DD| TABLET_EXECUTOR: Leader{1:2:14} commited cookie 1 for step 13 00000.006 DD| TABLET_EXECUTOR: Leader{1:2:14} Tx{13, NKikimr::NSharedCache::NTestSuiteTSharedPageCache::TTxWriteRow} queued, type NKikimr::NSharedCache::NTestSuiteTSharedPageCache::TTxWriteRow 00000.006 DD| TABLET_EXECUTOR: Leader{1:2:14} Tx{13, NKikimr::NSharedCache::NTestSuiteTSharedPageCache::TTxWriteRow} took 4194304b of static mem, Memory{4194304 dyn 0} 00000.006 DD| TABLET_EXECUTOR: Leader{1:2:14} Tx{13, NKikimr::NSharedCache::NTestSuiteTSharedPageCache::TTxWriteRow} hope 1 -> done Change{13, redo 102475b alter 0b annex 0, ~{ 101 } -{ }, 0 gb} 00000.006 DD| TABLET_EXECUTOR: Leader{1:2:14} Tx{13, NKikimr::NSharedCache::NTestSuiteTSharedPageCache::TTxWriteRow} release 4194304b of static, Memory{0 dyn 0} 00000.006 DD| TABLET_EXECUTOR: Leader{1:2:15} commited cookie 1 for step 14 00000.006 DD| TABLET_EXECUTOR: Leader{1:2:15} Tx{14, NKikimr::NSharedCache::NTestSuiteTSharedPageCache::TTxWriteRow} queued, type NKikimr::NSharedCache::NTestSuiteTSharedPageCache::TTxWriteRow 00000.006 DD| TABLET_EXECUTOR: Leader{1:2:15} Tx{14, NKikimr::NSharedCache::NTestSuiteTSharedPageCache::TTxWriteRow} took 4194304b of static mem, Memory{4194304 dyn 0} 00000.007 DD| TABLET_EXECUTOR: Leader{1:2:15} Tx{14, NKikimr::NSharedCache::NTestSuiteTSharedPageCache::TTxWriteRow} hope 1 -> done Change{14, redo 102475b alter 0b annex 0, ~{ 101 } -{ }, 0 gb} 00000.007 DD| TABLET_EXECUTOR: Leader{1:2:15} Tx{14, NKikimr::NSharedCache::NTestSuiteTSharedPageCache::TTxW ... TTxReadRow} took 8388608b of static mem, Memory{8388608 dyn 0} 00000.198 D3| TABLET_EXECUTOR: Leader{1:3:2} requests PageCollection [1:2:103:1:12288:2976:0] 102443 bytes, 1 pages: [4 4] 00000.198 DD| TABLET_EXECUTOR: Leader{1:3:2} Tx{96, NKikimr::NSharedCache::NTestSuiteTSharedPageCache::TTxReadRow} postponed, 102443b, pages {1 wait, 1 load}, freshly touched 4 pages 00000.198 TT| TABLET_SAUSAGECACHE: Request page collection [1:2:103:1:12288:2976:0] owner [6:580:2605] cookie 1 class Online from cache [ ] already requested [ ] to request [ 4 ] 00000.198 DD| TABLET_SAUSAGECACHE: Drop page collection [1:2:103:1:12288:2976:0] pages [ 96 ] owner [6:580:2605] 00000.198 TT| TABLET_SAUSAGECACHE: Receive page collection [1:2:103:1:12288:2976:0] status OK pages [ 4 ] 00000.198 TT| TABLET_SAUSAGECACHE: Send page collection result [1:2:103:1:12288:2976:0] owner [6:580:2605] class Online pages [ 4 ] cookie 1 00000.198 DD| TABLET_EXECUTOR: Leader{1:3:2} got result TEvResult{1 pages [1:2:103:1:12288:2976:0] ok OK}, category 1 00000.198 DD| TABLET_EXECUTOR: Leader{1:3:2} Tx{96, NKikimr::NSharedCache::NTestSuiteTSharedPageCache::TTxReadRow} hope 2 -> done Change{103, redo 0b alter 0b annex 0, ~{ } -{ }, 0 gb} 00000.198 DD| TABLET_EXECUTOR: Leader{1:3:2} Tx{96, NKikimr::NSharedCache::NTestSuiteTSharedPageCache::TTxReadRow} release 8388608b of static, Memory{0 dyn 0} 00000.198 TT| TABLET_SAUSAGECACHE: Touch page collection [1:2:103:1:12288:2976:0] owner [6:580:2605] pages [ 14 4 117 111 ] 00000.198 DD| TABLET_EXECUTOR: Leader{1:3:2} Tx{97, NKikimr::NSharedCache::NTestSuiteTSharedPageCache::TTxReadRow} queued, type NKikimr::NSharedCache::NTestSuiteTSharedPageCache::TTxReadRow 00000.198 DD| TABLET_EXECUTOR: Leader{1:3:2} Tx{97, NKikimr::NSharedCache::NTestSuiteTSharedPageCache::TTxReadRow} took 4194304b of static mem, Memory{4194304 dyn 0} 00000.198 DD| TABLET_EXECUTOR: Leader{1:3:2} Tx{97, NKikimr::NSharedCache::NTestSuiteTSharedPageCache::TTxReadRow} hope 1 -> retry Change{103, redo 0b alter 0b annex 0, ~{ } -{ }, 0 gb} 00000.198 DD| TABLET_EXECUTOR: Leader{1:3:2} Tx{97, NKikimr::NSharedCache::NTestSuiteTSharedPageCache::TTxReadRow} touch new 796b, 102443b lo load (103239b in total), 0b requested for data (4194304b in total) 00000.198 DD| TABLET_EXECUTOR: Leader{1:3:2} Tx{97, NKikimr::NSharedCache::NTestSuiteTSharedPageCache::TTxReadRow} took 8388608b of static mem, Memory{8388608 dyn 0} 00000.198 D3| TABLET_EXECUTOR: Leader{1:3:2} requests PageCollection [1:2:103:1:12288:2976:0] 102443 bytes, 1 pages: [3 4] 00000.198 DD| TABLET_EXECUTOR: Leader{1:3:2} Tx{97, NKikimr::NSharedCache::NTestSuiteTSharedPageCache::TTxReadRow} postponed, 102443b, pages {1 wait, 1 load}, freshly touched 4 pages 00000.198 TT| TABLET_SAUSAGECACHE: Request page collection [1:2:103:1:12288:2976:0] owner [6:580:2605] cookie 1 class Online from cache [ ] already requested [ ] to request [ 3 ] 00000.198 DD| TABLET_SAUSAGECACHE: Drop page collection [1:2:103:1:12288:2976:0] pages [ 95 ] owner [6:580:2605] 00000.198 TT| TABLET_SAUSAGECACHE: Receive page collection [1:2:103:1:12288:2976:0] status OK pages [ 3 ] 00000.198 TT| TABLET_SAUSAGECACHE: Send page collection result [1:2:103:1:12288:2976:0] owner [6:580:2605] class Online pages [ 3 ] cookie 1 00000.198 DD| TABLET_EXECUTOR: Leader{1:3:2} got result TEvResult{1 pages [1:2:103:1:12288:2976:0] ok OK}, category 1 00000.198 DD| TABLET_EXECUTOR: Leader{1:3:2} Tx{97, NKikimr::NSharedCache::NTestSuiteTSharedPageCache::TTxReadRow} hope 2 -> done Change{103, redo 0b alter 0b annex 0, ~{ } -{ }, 0 gb} 00000.198 DD| TABLET_EXECUTOR: Leader{1:3:2} Tx{97, NKikimr::NSharedCache::NTestSuiteTSharedPageCache::TTxReadRow} release 8388608b of static, Memory{0 dyn 0} 00000.198 TT| TABLET_SAUSAGECACHE: Touch page collection [1:2:103:1:12288:2976:0] owner [6:580:2605] pages [ 14 3 117 111 ] 00000.198 DD| TABLET_EXECUTOR: Leader{1:3:2} Tx{98, NKikimr::NSharedCache::NTestSuiteTSharedPageCache::TTxReadRow} queued, type NKikimr::NSharedCache::NTestSuiteTSharedPageCache::TTxReadRow 00000.198 DD| TABLET_EXECUTOR: Leader{1:3:2} Tx{98, NKikimr::NSharedCache::NTestSuiteTSharedPageCache::TTxReadRow} took 4194304b of static mem, Memory{4194304 dyn 0} 00000.198 DD| TABLET_EXECUTOR: Leader{1:3:2} Tx{98, NKikimr::NSharedCache::NTestSuiteTSharedPageCache::TTxReadRow} hope 1 -> retry Change{103, redo 0b alter 0b annex 0, ~{ } -{ }, 0 gb} 00000.198 DD| TABLET_EXECUTOR: Leader{1:3:2} Tx{98, NKikimr::NSharedCache::NTestSuiteTSharedPageCache::TTxReadRow} touch new 796b, 102443b lo load (103239b in total), 0b requested for data (4194304b in total) 00000.198 DD| TABLET_EXECUTOR: Leader{1:3:2} Tx{98, NKikimr::NSharedCache::NTestSuiteTSharedPageCache::TTxReadRow} took 8388608b of static mem, Memory{8388608 dyn 0} 00000.198 D3| TABLET_EXECUTOR: Leader{1:3:2} requests PageCollection [1:2:103:1:12288:2976:0] 102443 bytes, 1 pages: [2 4] 00000.198 DD| TABLET_EXECUTOR: Leader{1:3:2} Tx{98, NKikimr::NSharedCache::NTestSuiteTSharedPageCache::TTxReadRow} postponed, 102443b, pages {1 wait, 1 load}, freshly touched 4 pages 00000.198 TT| TABLET_SAUSAGECACHE: Request page collection [1:2:103:1:12288:2976:0] owner [6:580:2605] cookie 1 class Online from cache [ ] already requested [ ] to request [ 2 ] 00000.198 DD| TABLET_SAUSAGECACHE: Drop page collection [1:2:103:1:12288:2976:0] pages [ 93 ] owner [6:580:2605] 00000.198 TT| TABLET_SAUSAGECACHE: Receive page collection [1:2:103:1:12288:2976:0] status OK pages [ 2 ] 00000.198 TT| TABLET_SAUSAGECACHE: Send page collection result [1:2:103:1:12288:2976:0] owner [6:580:2605] class Online pages [ 2 ] cookie 1 00000.198 DD| TABLET_EXECUTOR: Leader{1:3:2} got result TEvResult{1 pages [1:2:103:1:12288:2976:0] ok OK}, category 1 00000.198 DD| TABLET_EXECUTOR: Leader{1:3:2} Tx{98, NKikimr::NSharedCache::NTestSuiteTSharedPageCache::TTxReadRow} hope 2 -> done Change{103, redo 0b alter 0b annex 0, ~{ } -{ }, 0 gb} 00000.198 DD| TABLET_EXECUTOR: Leader{1:3:2} Tx{98, NKikimr::NSharedCache::NTestSuiteTSharedPageCache::TTxReadRow} release 8388608b of static, Memory{0 dyn 0} 00000.198 TT| TABLET_SAUSAGECACHE: Touch page collection [1:2:103:1:12288:2976:0] owner [6:580:2605] pages [ 14 2 117 111 ] 00000.198 DD| TABLET_EXECUTOR: Leader{1:3:2} Tx{99, NKikimr::NSharedCache::NTestSuiteTSharedPageCache::TTxReadRow} queued, type NKikimr::NSharedCache::NTestSuiteTSharedPageCache::TTxReadRow 00000.198 DD| TABLET_EXECUTOR: Leader{1:3:2} Tx{99, NKikimr::NSharedCache::NTestSuiteTSharedPageCache::TTxReadRow} took 4194304b of static mem, Memory{4194304 dyn 0} 00000.198 DD| TABLET_EXECUTOR: Leader{1:3:2} Tx{99, NKikimr::NSharedCache::NTestSuiteTSharedPageCache::TTxReadRow} hope 1 -> retry Change{103, redo 0b alter 0b annex 0, ~{ } -{ }, 0 gb} 00000.198 DD| TABLET_EXECUTOR: Leader{1:3:2} Tx{99, NKikimr::NSharedCache::NTestSuiteTSharedPageCache::TTxReadRow} touch new 796b, 102443b lo load (103239b in total), 0b requested for data (4194304b in total) 00000.198 DD| TABLET_EXECUTOR: Leader{1:3:2} Tx{99, NKikimr::NSharedCache::NTestSuiteTSharedPageCache::TTxReadRow} took 8388608b of static mem, Memory{8388608 dyn 0} 00000.198 D3| TABLET_EXECUTOR: Leader{1:3:2} requests PageCollection [1:2:103:1:12288:2976:0] 102443 bytes, 1 pages: [1 4] 00000.198 DD| TABLET_EXECUTOR: Leader{1:3:2} Tx{99, NKikimr::NSharedCache::NTestSuiteTSharedPageCache::TTxReadRow} postponed, 102443b, pages {1 wait, 1 load}, freshly touched 4 pages 00000.198 TT| TABLET_SAUSAGECACHE: Request page collection [1:2:103:1:12288:2976:0] owner [6:580:2605] cookie 1 class Online from cache [ ] already requested [ ] to request [ 1 ] 00000.198 DD| TABLET_SAUSAGECACHE: Drop page collection [1:2:103:1:12288:2976:0] pages [ 92 ] owner [6:580:2605] 00000.198 TT| TABLET_SAUSAGECACHE: Receive page collection [1:2:103:1:12288:2976:0] status OK pages [ 1 ] 00000.198 TT| TABLET_SAUSAGECACHE: Send page collection result [1:2:103:1:12288:2976:0] owner [6:580:2605] class Online pages [ 1 ] cookie 1 00000.198 DD| TABLET_EXECUTOR: Leader{1:3:2} got result TEvResult{1 pages [1:2:103:1:12288:2976:0] ok OK}, category 1 00000.198 DD| TABLET_EXECUTOR: Leader{1:3:2} Tx{99, NKikimr::NSharedCache::NTestSuiteTSharedPageCache::TTxReadRow} hope 2 -> done Change{103, redo 0b alter 0b annex 0, ~{ } -{ }, 0 gb} 00000.198 DD| TABLET_EXECUTOR: Leader{1:3:2} Tx{99, NKikimr::NSharedCache::NTestSuiteTSharedPageCache::TTxReadRow} release 8388608b of static, Memory{0 dyn 0} 00000.198 TT| TABLET_SAUSAGECACHE: Touch page collection [1:2:103:1:12288:2976:0] owner [6:580:2605] pages [ 14 1 117 111 ] 00000.198 DD| TABLET_EXECUTOR: Leader{1:3:2} Tx{100, NKikimr::NSharedCache::NTestSuiteTSharedPageCache::TTxReadRow} queued, type NKikimr::NSharedCache::NTestSuiteTSharedPageCache::TTxReadRow 00000.198 DD| TABLET_EXECUTOR: Leader{1:3:2} Tx{100, NKikimr::NSharedCache::NTestSuiteTSharedPageCache::TTxReadRow} took 4194304b of static mem, Memory{4194304 dyn 0} 00000.198 DD| TABLET_EXECUTOR: Leader{1:3:2} Tx{100, NKikimr::NSharedCache::NTestSuiteTSharedPageCache::TTxReadRow} hope 1 -> retry Change{103, redo 0b alter 0b annex 0, ~{ } -{ }, 0 gb} 00000.198 DD| TABLET_EXECUTOR: Leader{1:3:2} Tx{100, NKikimr::NSharedCache::NTestSuiteTSharedPageCache::TTxReadRow} touch new 796b, 102443b lo load (103239b in total), 0b requested for data (4194304b in total) 00000.198 DD| TABLET_EXECUTOR: Leader{1:3:2} Tx{100, NKikimr::NSharedCache::NTestSuiteTSharedPageCache::TTxReadRow} took 8388608b of static mem, Memory{8388608 dyn 0} 00000.198 D3| TABLET_EXECUTOR: Leader{1:3:2} requests PageCollection [1:2:103:1:12288:2976:0] 102443 bytes, 1 pages: [0 4] 00000.285 DD| TABLET_EXECUTOR: Leader{1:3:2} Tx{100, NKikimr::NSharedCache::NTestSuiteTSharedPageCache::TTxReadRow} postponed, 102443b, pages {1 wait, 1 load}, freshly touched 4 pages 00000.285 TT| TABLET_SAUSAGECACHE: Request page collection [1:2:103:1:12288:2976:0] owner [6:580:2605] cookie 1 class Online from cache [ ] already requested [ ] to request [ 0 ] 00000.285 DD| TABLET_SAUSAGECACHE: Drop page collection [1:2:103:1:12288:2976:0] pages [ 91 ] owner [6:580:2605] 00000.285 TT| TABLET_SAUSAGECACHE: Receive page collection [1:2:103:1:12288:2976:0] status OK pages [ 0 ] 00000.285 TT| TABLET_SAUSAGECACHE: Send page collection result [1:2:103:1:12288:2976:0] owner [6:580:2605] class Online pages [ 0 ] cookie 1 00000.285 DD| TABLET_EXECUTOR: Leader{1:3:2} got result TEvResult{1 pages [1:2:103:1:12288:2976:0] ok OK}, category 1 00000.285 DD| TABLET_EXECUTOR: Leader{1:3:2} Tx{100, NKikimr::NSharedCache::NTestSuiteTSharedPageCache::TTxReadRow} hope 2 -> done Change{103, redo 0b alter 0b annex 0, ~{ } -{ }, 0 gb} 00000.285 DD| TABLET_EXECUTOR: Leader{1:3:2} Tx{100, NKikimr::NSharedCache::NTestSuiteTSharedPageCache::TTxReadRow} release 8388608b of static, Memory{0 dyn 0} 00000.285 TT| TABLET_SAUSAGECACHE: Touch page collection [1:2:103:1:12288:2976:0] owner [6:580:2605] pages [ 14 0 117 111 ] Counters: Active:8313958/8388608, Passive:0, MemLimit:-1 00000.285 II| FAKE_ENV: Model starts hard shutdown on level 7 of 8, left 3 actors 00000.286 II| TABLET_EXECUTOR: Leader{1:3:2} suiciding, Waste{2:0, 10255801b +(0, 0b), 1 trc, -48685b acc} 00000.286 DD| TABLET_SAUSAGECACHE: Unregister owner [6:580:2605] 00000.286 DD| TABLET_SAUSAGECACHE: Remove page collection [1:2:103:1:12288:2976:0] owner [6:580:2605] 00000.286 DD| TABLET_SAUSAGECACHE: Remove owner [6:580:2605] 00000.286 NN| TABLET_SAUSAGECACHE: Poison cache serviced 138 reqs hit {0 0b} miss {139 12197190b} 00000.286 II| FAKE_ENV: Shut order, stopping 4 BS groups 00000.286 II| FAKE_ENV: DS.0 gone, left {42b, 1}, put {10191b, 107} 00000.286 II| FAKE_ENV: DS.1 gone, left {10257096b, 5}, put {10305919b, 107} 00000.286 II| FAKE_ENV: DS.2 gone, left {0b, 0}, put {0b, 0} 00000.286 II| FAKE_ENV: DS.3 gone, left {0b, 0}, put {0b, 0} 00000.286 II| FAKE_ENV: All BS storage groups are stopped 00000.286 II| FAKE_ENV: Model stopped, hosted 4 actors, spent 0.000s 00000.286 II| FAKE_ENV: Logged {Emerg 0 Alert 0 Crit 0 Error 0 Left 2741}, stopped >> TGenCompaction::OverloadFactorDuringForceCompaction [GOOD] >> TInterconnectTest::TestBlobEventsThroughSubChannels [GOOD] >> TGenCompaction::ForcedCompactionNoGenerations [GOOD] >> TGenCompaction::ForcedCompactionWithGenerations [GOOD] >> TGenCompaction::ForcedCompactionWithFinalParts [GOOD] >> TGenCompaction::ForcedCompactionByDeletedRows [GOOD] >> TGenCompaction::ForcedCompactionByUnreachableMvccData [GOOD] >> TGenCompaction::ForcedCompactionByUnreachableMvccDataRestart [GOOD] >> TGenCompaction::ForcedCompactionByUnreachableMvccDataBorrowed [GOOD] >> TIterator::Basics [GOOD] >> TIterator::External [GOOD] >> TIterator::Single >> TIterator::Single [GOOD] >> TIterator::SingleReverse >> TInterconnectTest::TestBlobEvent220BytesPreSerialized >> BuildStatsHistogram::Single_Slices [GOOD] >> BuildStatsHistogram::Single_History >> TIterator::SingleReverse [GOOD] >> TIterator::Mixed >> TestProtocols::TestConnectProtocol [GOOD] >> TestProtocols::TestHTTPCollected >> TInterconnectTest::TestReconnect [GOOD] >> TInterconnectTest::TestSubscribeAndUnsubsribeByEvent >> TestProtocols::TestResolveProtocol >> TInterconnectTest::OldFormatSuppressVersionCheckOnNew [GOOD] >> TInterconnectTest::OldFormatSuppressVersionCheckOnOld |70.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/actorlib_impl/ut/unittest >> TInterconnectTest::TestBlobEventsThroughSubChannels [GOOD] >> TExecutorDb::CoordinatorSimulation [GOOD] >> TExecutorDb::RandomCoordinatorSimulation >> TestProtocols::TestHTTPCollected [GOOD] >> TInterconnectTest::TestTraceIdPassThrough >> TInterconnectTest::TestBlobEvent220BytesPreSerialized [GOOD] >> TInterconnectTest::TestBlobEventDifferentSizes >> TestProtocols::TestResolveProtocol [GOOD] >> TestProtocols::TestHTTPCollectedVerySlow >> TInterconnectTest::OldFormatSuppressVersionCheckOnOld [GOOD] >> TInterconnectTest::OldFormatSuppressVersionCheck >> TVersions::WreckHeadReverse [GOOD] >> TVersions::Wreck2 >> TFlatTableExecutor_IndexLoading::PrechargeAndSeek_FlatIndex [GOOD] >> TFlatTableExecutor_IndexLoading::PrechargeAndSeek_BTreeIndex >> TIterator::Mixed [GOOD] >> TIterator::MixedReverse >> TInterconnectTest::TestTraceIdPassThrough [GOOD] >> TInterconnectTest::TestSubscribeAndUnsubsribeByEvent [GOOD] >> TInterconnectTest::TestNotifyUndelivered >> TInterconnectTest::TestBlobEventDifferentSizes [GOOD] >> TInterconnectTest::TestBlobEventDifferentSizesPreSerialized >> TInterconnectTest::OldFormatSuppressVersionCheck [GOOD] >> BuildStatsHistogram::Single_History [GOOD] >> BuildStatsHistogram::Single_History_Slices |70.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/actorlib_impl/ut/unittest >> TInterconnectTest::TestTraceIdPassThrough [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/actorlib_impl/ut/unittest >> TInterconnectTest::TestSubscribeAndUnsubsribeByEvent [GOOD] Test command err: 2025-05-29T15:28:10.668981Z node 6 :INTERCONNECT DEBUG: interconnect_tcp_proxy.h:180: Proxy [6:9:2048] [node 5] ICP77 @206 (null) -> PendingActivation 2025-05-29T15:28:10.668996Z node 6 :INTERCONNECT INFO: interconnect_tcp_proxy.cpp:43: Proxy [6:9:2048] [node 5] ICP01 ready to work 2025-05-29T15:28:10.669039Z node 5 :INTERCONNECT DEBUG: interconnect_tcp_proxy.h:180: Proxy [5:1:2048] [node 6] ICP77 @206 (null) -> PendingActivation 2025-05-29T15:28:10.669043Z node 5 :INTERCONNECT INFO: interconnect_tcp_proxy.cpp:43: Proxy [5:1:2048] [node 6] ICP01 ready to work 2025-05-29T15:28:10.669226Z node 5 :INTERCONNECT DEBUG: interconnect_tcp_proxy.h:180: Proxy [5:1:2048] [node 6] ICP77 @99 PendingActivation -> PendingNodeInfo 2025-05-29T15:28:10.669557Z node 5 :INTERCONNECT DEBUG: interconnect_tcp_proxy.cpp:119: Proxy [5:1:2048] [node 6] ICP02 configured for host ::1:29212 2025-05-29T15:28:10.669586Z node 5 :INTERCONNECT DEBUG: interconnect_tcp_proxy.h:180: Proxy [5:1:2048] [node 6] ICP77 @488 PendingNodeInfo -> PendingConnection 2025-05-29T15:28:10.669599Z node 5 :INTERCONNECT DEBUG: interconnect_handshake.cpp:677: Handshake [5:19:2057] [node 6] ICH01 starting outgoing handshake 2025-05-29T15:28:10.669614Z node 5 :INTERCONNECT DEBUG: interconnect_resolve.cpp:128: ICR04 Host: ::1, RESOLVED address 2025-05-29T15:28:10.669758Z node 6 :INTERCONNECT DEBUG: interconnect_tcp_server.cpp:104: ICListener: [0:0:0] ICL04 Accepted from: ::1:52358 2025-05-29T15:28:10.669808Z node 6 :INTERCONNECT DEBUG: interconnect_handshake.cpp:878: Handshake [6:21:2057] [node 0] ICH02 starting incoming handshake 2025-05-29T15:28:10.669819Z node 5 :INTERCONNECT DEBUG: interconnect_handshake.cpp:682: Handshake [5:19:2057] [node 6] ICH05 connected to peer 2025-05-29T15:28:10.670203Z node 5 :INTERCONNECT DEBUG: interconnect_handshake.cpp:612: Handshake [5:19:2057] [node 6] ICH07 SendExBlock ExRequest Protocol: 2 ProgramPID: 3983761 ProgramStartTime: 20728339743598 Serial: 2648910911 ReceiverNodeId: 6 SenderActorId: "[5:2648910911:0]" SenderHostName: "::1" ReceiverHostName: "::1" UUID: "Cluster for process with id: 3983761" ClusterUUIDs { ClusterUUID: "Cluster for process with id: 3983761" AcceptUUID: "Cluster for process with id: 3983761" } RequestModernFrame: true RequestAuthOnly: false RequestExtendedTraceFmt: true RequestExternalDataChannel: true HandshakeId: "\235X\254\330\221\013H\357\366T\267b\255ti\244\353\037\273\325\356=\230s\007i\004/\255\032+\356" RequestXxhash: true RequestXdcShuffle: true 2025-05-29T15:28:10.670311Z node 6 :INTERCONNECT DEBUG: interconnect_handshake.cpp:612: Handshake [6:21:2057] [node 5] ICH07 ReceiveExBlock ExRequest Protocol: 2 ProgramPID: 3983761 ProgramStartTime: 20728339743598 Serial: 2648910911 ReceiverNodeId: 6 SenderActorId: "[5:2648910911:0]" SenderHostName: "::1" ReceiverHostName: "::1" UUID: "Cluster for process with id: 3983761" ClusterUUIDs { ClusterUUID: "Cluster for process with id: 3983761" AcceptUUID: "Cluster for process with id: 3983761" } RequestModernFrame: true RequestAuthOnly: false RequestExtendedTraceFmt: true RequestExternalDataChannel: true HandshakeId: "\235X\254\330\221\013H\357\366T\267b\255ti\244\353\037\273\325\356=\230s\007i\004/\255\032+\356" RequestXxhash: true RequestXdcShuffle: true 2025-05-29T15:28:10.670330Z node 6 :INTERCONNECT WARN: interconnect_handshake.cpp:501: Handshake [6:21:2057] [node 5] ICH09 Neither CompatibilityInfo nor VersionTag of the peer can be validated, accepting by default 2025-05-29T15:28:10.670353Z node 6 :INTERCONNECT DEBUG: interconnect_tcp_proxy.h:180: Proxy [6:9:2048] [node 5] ICP77 @99 PendingActivation -> PendingNodeInfo 2025-05-29T15:28:10.670643Z node 6 :INTERCONNECT DEBUG: interconnect_tcp_proxy.cpp:119: Proxy [6:9:2048] [node 5] ICP02 configured for host ::1:4821 2025-05-29T15:28:10.670651Z node 6 :INTERCONNECT DEBUG: interconnect_tcp_proxy.cpp:262: Proxy [6:9:2048] [node 5] ICP17 incoming handshake (actor [6:21:2057]) 2025-05-29T15:28:10.670655Z node 6 :INTERCONNECT DEBUG: interconnect_tcp_proxy.h:180: Proxy [6:9:2048] [node 5] ICP77 @488 PendingNodeInfo -> PendingConnection 2025-05-29T15:28:10.670662Z node 6 :INTERCONNECT DEBUG: interconnect_tcp_proxy.cpp:210: Proxy [6:9:2048] [node 5] ICP07 issued incoming handshake reply 2025-05-29T15:28:10.670666Z node 6 :INTERCONNECT INFO: interconnect_tcp_proxy.cpp:219: Proxy [6:9:2048] [node 5] ICP08 No active sessions, becoming PendingConnection 2025-05-29T15:28:10.670669Z node 6 :INTERCONNECT DEBUG: interconnect_tcp_proxy.h:180: Proxy [6:9:2048] [node 5] ICP77 @220 PendingConnection -> PendingConnection 2025-05-29T15:28:10.670698Z node 6 :INTERCONNECT DEBUG: interconnect_handshake.cpp:612: Handshake [6:21:2057] [node 5] ICH07 SendExBlock ExReply Success { Protocol: 2 ProgramPID: 3983761 ProgramStartTime: 20728343806548 Serial: 249148113 SenderActorId: "[6:249148113:0]" ClusterUUIDs { ClusterUUID: "Cluster for process with id: 3983761" AcceptUUID: "Cluster for process with id: 3983761" } StartEncryption: false UseModernFrame: true AuthOnly: false UseExtendedTraceFmt: true UseExternalDataChannel: true UseXxhash: true UseXdcShuffle: true } 2025-05-29T15:28:10.670797Z node 5 :INTERCONNECT DEBUG: interconnect_handshake.cpp:612: Handshake [5:19:2057] [node 6] ICH07 ReceiveExBlock ExReply Success { Protocol: 2 ProgramPID: 3983761 ProgramStartTime: 20728343806548 Serial: 249148113 SenderActorId: "[6:249148113:0]" ClusterUUIDs { ClusterUUID: "Cluster for process with id: 3983761" AcceptUUID: "Cluster for process with id: 3983761" } StartEncryption: false UseModernFrame: true AuthOnly: false UseExtendedTraceFmt: true UseExternalDataChannel: true UseXxhash: true UseXdcShuffle: true } 2025-05-29T15:28:10.670805Z node 5 :INTERCONNECT WARN: interconnect_handshake.cpp:501: Handshake [5:19:2057] [node 6] ICH09 Neither CompatibilityInfo nor VersionTag of the peer can be validated, accepting by default 2025-05-29T15:28:10.670824Z node 5 :INTERCONNECT DEBUG: interconnect_resolve.cpp:128: ICR04 Host: ::1, RESOLVED address 2025-05-29T15:28:10.670903Z node 6 :INTERCONNECT DEBUG: interconnect_tcp_server.cpp:104: ICListener: [0:0:0] ICL04 Accepted from: ::1:52372 2025-05-29T15:28:10.670944Z node 6 :INTERCONNECT DEBUG: interconnect_handshake.cpp:878: Handshake [6:23:2058] [node 0] ICH02 starting incoming handshake 2025-05-29T15:28:10.670973Z node 5 :INTERCONNECT DEBUG: interconnect_handshake.cpp:612: Handshake [5:19:2057] [node 6] ICH07 SendExBlock ExternalDataChannelParams HandshakeId: "\235X\254\330\221\013H\357\366T\267b\255ti\244\353\037\273\325\356=\230s\007i\004/\255\032+\356" 2025-05-29T15:28:10.670996Z node 5 :INTERCONNECT INFO: interconnect_handshake.cpp:375: Handshake [5:19:2057] [node 6] ICH04 handshake succeeded 2025-05-29T15:28:10.671070Z node 5 :INTERCONNECT INFO: interconnect_tcp_proxy.cpp:338: Proxy [5:1:2048] [node 6] ICP20 outgoing handshake succeeded 2025-05-29T15:28:10.671079Z node 5 :INTERCONNECT DEBUG: interconnect_tcp_proxy.h:460: Proxy [5:1:2048] [node 6] ICP052 dropped outgoing handshake: [5:19:2057] poison: false 2025-05-29T15:28:10.671084Z node 5 :INTERCONNECT DEBUG: interconnect_tcp_proxy.h:180: Proxy [5:1:2048] [node 6] ICP77 @350 PendingConnection -> StateWork 2025-05-29T15:28:10.671100Z node 5 :INTERCONNECT INFO: interconnect_tcp_proxy.cpp:377: Proxy [5:1:2048] [node 6] ICP22 created new session: [5:24:2048] 2025-05-29T15:28:10.671116Z node 5 :INTERCONNECT_SESSION INFO: interconnect_tcp_session.cpp:259: Session [5:24:2048] [node 6] ICS09 handshake done sender: [5:19:2057] self: [5:2648910911:0] peer: [6:249148113:0] socket: 24 2025-05-29T15:28:10.671124Z node 5 :INTERCONNECT_SESSION INFO: interconnect_tcp_session.cpp:281: Session [5:24:2048] [node 6] ICS10 traffic start 2025-05-29T15:28:10.671143Z node 5 :INTERCONNECT_SESSION DEBUG: interconnect_tcp_session.cpp:297: Session [5:24:2048] [node 6] ICS11 registering socket in PollerActor 2025-05-29T15:28:10.671149Z node 5 :INTERCONNECT_SESSION DEBUG: interconnect_tcp_session.cpp:940: Session [5:24:2048] [node 6] ICS23 confirm count: 0 2025-05-29T15:28:10.671161Z node 5 :INTERCONNECT_SESSION DEBUG: interconnect_tcp_session.cpp:336: Session [5:24:2048] [node 6] ICS06 rewind SendQueue size# 0 LastConfirmed# 0 NextSerial# 1 2025-05-29T15:28:10.671169Z node 5 :INTERCONNECT_SESSION DEBUG: interconnect_tcp_session.cpp:940: Session [5:24:2048] [node 6] ICS23 confirm count: 0 2025-05-29T15:28:10.671176Z node 5 :INTERCONNECT_SESSION DEBUG: interconnect_tcp_session.cpp:222: Session [5:24:2048] [node 6] ICS04 subscribe for session state for [5:17:2056] 2025-05-29T15:28:10.671394Z node 5 :INTERCONNECT_SESSION DEBUG: interconnect_tcp_input_session.cpp:143: InputSession [5:25:2048] [node 6] ICIS01 InputSession created 2025-05-29T15:28:10.671404Z node 6 :INTERCONNECT INFO: interconnect_handshake.cpp:375: Handshake [6:21:2057] [node 5] ICH04 handshake succeeded 2025-05-29T15:28:10.671472Z node 6 :INTERCONNECT INFO: interconnect_tcp_proxy.cpp:334: Proxy [6:9:2048] [node 5] ICP19 incoming handshake succeeded 2025-05-29T15:28:10.671479Z node 6 :INTERCONNECT DEBUG: interconnect_tcp_proxy.h:445: Proxy [6:9:2048] [node 5] ICP111 dropped incoming handshake: [6:21:2057] poison: false 2025-05-29T15:28:10.671484Z node 6 :INTERCONNECT DEBUG: interconnect_tcp_proxy.h:180: Proxy [6:9:2048] [node 5] ICP77 @350 PendingConnection -> StateWork 2025-05-29T15:28:10.671512Z node 6 :INTERCONNECT INFO: interconnect_tcp_proxy.cpp:377: Proxy [6:9:2048] [node 5] ICP22 created new session: [6:26:2048] 2025-05-29T15:28:10.671517Z node 6 :INTERCONNECT_SESSION INFO: interconnect_tcp_session.cpp:259: Session [6:26:2048] [node 5] ICS09 handshake done sender: [6:21:2057] self: [6:249148113:0] peer: [5:2648910911:0] socket: 25 2025-05-29T15:28:10.671524Z node 6 :INTERCONNECT_SESSION INFO: interconnect_tcp_session.cpp:281: Session [6:26:2048] [node 5] ICS10 traffic start 2025-05-29T15:28:10.671532Z node 6 :INTERCONNECT_SESSION DEBUG: interconnect_tcp_session.cpp:297: Session [6:26:2048] [node 5] ICS11 registering socket in PollerActor 2025-05-29T15:28:10.671537Z node 6 :INTERCONNECT_SESSION DEBUG: interconnect_tcp_session.cpp:940: Session [6:26:2048] [node 5] ICS23 confirm count: 0 2025-05-29T15:28:10.671540Z node 6 :INTERCONNECT_SESSION DEBUG: interconnect_tcp_session.cpp:336: Session [6:26:2048] [node 5] ICS06 rewind SendQueue size# 0 LastConfirmed# 0 NextSerial# 1 2025-05-29T15:28:10.671543Z node 6 :INTERCONNECT_SESSION DEBUG: interconnect_tcp_session.cpp:940: Session [6:26:2048] [node 5] ICS23 confirm count: 0 2025-05-29T15:28:10.671548Z node 5 :INTERCONNECT_SESSION DEBUG: interconnect_tcp_input_session.cpp:219: InputSession [5:25:2048] [node 6] ICIS02 ReceiveData called 2025-05-29T15:28:10.671558Z node 5 :INTERCONNECT_SESSION DEBUG: interconnect_tcp_input_session.cpp:704: InputSession [5:25:2048] [node 6] ICIS12 Read recvres# -11 num# 1 err# 2025-05-29T15:28:10.671579Z node 6 :INTERCONNECT_SESSION DEBUG: interconnect_tcp_input_session.cpp:143: InputSession [6:27:2048] [node 5] ICIS01 InputSession created 2025-05-29T15:28:10.671585Z node 6 :INTERCONNECT_SESSION DEBUG: interconnect_tcp_input_session.cpp:219: InputSession [6:27:2048] [node 5] ICIS02 ReceiveData called 2025-05-29T15:28:10.671599Z node 6 :INTERCONNECT_SESSION DEBUG: interconnect_tcp_input_session.cpp:704: InputSession [6:27:2048] [node 5] ICIS12 Read recvres# -11 num# 1 err# 2025-05-29T15:28:10.671603Z node 5 :INTERCONNECT_SESSION DEBUG: interconnect_tcp_input_session.cpp:219: InputSession [5:25:2048] [node 6] ICIS02 ReceiveData called 2025-05-29T15:28:10.671606Z node 5 :INTERCONNECT_SESSION DEBUG: interconnect_tcp_input_session.cpp:704: InputSession [5:25:2048] [node 6] ICIS12 Read recvres# -11 num# 1 err# 2025-05-29T15:28:10.671610Z node 5 :INTERCONNECT_SESSION DEBUG: interconnect_tcp_input_session.cpp:219: InputSession [5:25:2048] [node 6] ICIS02 ReceiveData called 2025-05-29T15:28:10.671613Z node 5 :INTERCONNECT_SESSION DEBUG: interconnect_tcp_input_session.cpp:704: InputSession [5:25:2 ... RCONNECT_SESSION INFO: interconnect_tcp_session.cpp:245: Session [6:26:2048] [node 5] ICS08 incoming handshake Self# [5:2648910911:0] Peer# [6:249148113:0] Counter# 1 LastInputSerial# 1 2025-05-29T15:28:10.672492Z node 6 :INTERCONNECT DEBUG: interconnect_tcp_proxy.cpp:210: Proxy [6:9:2048] [node 5] ICP07 issued incoming handshake reply 2025-05-29T15:28:10.672539Z node 5 :INTERCONNECT DEBUG: interconnect_resolve.cpp:128: ICR04 Host: ::1, RESOLVED address 2025-05-29T15:28:10.672605Z node 6 :INTERCONNECT DEBUG: interconnect_tcp_server.cpp:104: ICListener: [0:0:0] ICL04 Accepted from: ::1:52386 2025-05-29T15:28:10.672635Z node 6 :INTERCONNECT DEBUG: interconnect_handshake.cpp:878: Handshake [6:35:2061] [node 0] ICH02 starting incoming handshake 2025-05-29T15:28:10.672661Z node 5 :INTERCONNECT DEBUG: interconnect_handshake.cpp:612: Handshake [5:29:2058] [node 6] ICH07 SendExBlock ExternalDataChannelParams HandshakeId: "\213y\236\256\240\375\272L\364\232\341\333\326\220\361TV\314B\374\031\242\213Z\021\212\371\223\000vuu" 2025-05-29T15:28:10.672682Z node 5 :INTERCONNECT INFO: interconnect_handshake.cpp:375: Handshake [5:29:2058] [node 6] ICH04 handshake succeeded 2025-05-29T15:28:10.672772Z node 5 :INTERCONNECT INFO: interconnect_tcp_proxy.cpp:338: Proxy [5:1:2048] [node 6] ICP20 outgoing handshake succeeded 2025-05-29T15:28:10.672779Z node 5 :INTERCONNECT DEBUG: interconnect_tcp_proxy.h:445: Proxy [5:1:2048] [node 6] ICP111 dropped incoming handshake: [5:32:2059] poison: true 2025-05-29T15:28:10.672784Z node 5 :INTERCONNECT DEBUG: interconnect_tcp_proxy.h:460: Proxy [5:1:2048] [node 6] ICP052 dropped outgoing handshake: [5:29:2058] poison: false 2025-05-29T15:28:10.672787Z node 5 :INTERCONNECT DEBUG: interconnect_tcp_proxy.h:180: Proxy [5:1:2048] [node 6] ICP77 @350 StateWork -> StateWork 2025-05-29T15:28:10.672792Z node 5 :INTERCONNECT_SESSION INFO: interconnect_tcp_session.cpp:259: Session [5:24:2048] [node 6] ICS09 handshake done sender: [5:29:2058] self: [5:2648910911:0] peer: [6:249148113:0] socket: 29 2025-05-29T15:28:10.672796Z node 5 :INTERCONNECT_SESSION INFO: interconnect_tcp_session.cpp:281: Session [5:24:2048] [node 6] ICS10 traffic start 2025-05-29T15:28:10.672803Z node 5 :INTERCONNECT_SESSION DEBUG: interconnect_tcp_session.cpp:297: Session [5:24:2048] [node 6] ICS11 registering socket in PollerActor 2025-05-29T15:28:10.672807Z node 5 :INTERCONNECT_SESSION DEBUG: interconnect_tcp_session.cpp:940: Session [5:24:2048] [node 6] ICS23 confirm count: 1 2025-05-29T15:28:10.672810Z node 5 :INTERCONNECT_SESSION DEBUG: interconnect_tcp_session.cpp:336: Session [5:24:2048] [node 6] ICS06 rewind SendQueue size# 1 LastConfirmed# 1 NextSerial# 2 2025-05-29T15:28:10.672826Z node 5 :INTERCONNECT_SESSION DEBUG: interconnect_tcp_session.cpp:940: Session [5:24:2048] [node 6] ICS23 confirm count: 1 2025-05-29T15:28:10.672961Z node 5 :INTERCONNECT_SESSION DEBUG: interconnect_tcp_input_session.cpp:143: InputSession [5:36:2048] [node 6] ICIS01 InputSession created 2025-05-29T15:28:10.673154Z node 6 :INTERCONNECT NOTICE: interconnect_tcp_proxy.cpp:408: Proxy [6:9:2048] [node 5] ICP25 outgoing handshake failed, temporary: 0 explanation: outgoing handshake Peer# ::1(::1:4821) Socket error# connection unexpectedly closed state# ReceiveResponse processed# 0 remain# 52 incoming: [6:33:2060] held: no 2025-05-29T15:28:10.673160Z node 6 :INTERCONNECT DEBUG: interconnect_tcp_proxy.h:460: Proxy [6:9:2048] [node 5] ICP052 dropped outgoing handshake: [6:28:2059] poison: false 2025-05-29T15:28:10.673164Z node 6 :INTERCONNECT DEBUG: interconnect_tcp_proxy.cpp:431: Proxy [6:9:2048] [node 5] ICP28 other handshake is still going on 2025-05-29T15:28:10.673177Z node 5 :INTERCONNECT_SESSION DEBUG: interconnect_tcp_input_session.cpp:219: InputSession [5:36:2048] [node 6] ICIS02 ReceiveData called 2025-05-29T15:28:10.673185Z node 5 :INTERCONNECT_SESSION DEBUG: interconnect_tcp_input_session.cpp:704: InputSession [5:36:2048] [node 6] ICIS12 Read recvres# -11 num# 1 err# 2025-05-29T15:28:10.673193Z node 6 :INTERCONNECT INFO: interconnect_handshake.cpp:375: Handshake [6:33:2060] [node 5] ICH04 handshake succeeded 2025-05-29T15:28:10.673241Z node 6 :INTERCONNECT INFO: interconnect_tcp_proxy.cpp:334: Proxy [6:9:2048] [node 5] ICP19 incoming handshake succeeded 2025-05-29T15:28:10.673245Z node 6 :INTERCONNECT DEBUG: interconnect_tcp_proxy.h:445: Proxy [6:9:2048] [node 5] ICP111 dropped incoming handshake: [6:33:2060] poison: false 2025-05-29T15:28:10.673248Z node 6 :INTERCONNECT DEBUG: interconnect_tcp_proxy.h:180: Proxy [6:9:2048] [node 5] ICP77 @350 StateWork -> StateWork 2025-05-29T15:28:10.673252Z node 6 :INTERCONNECT_SESSION INFO: interconnect_tcp_session.cpp:259: Session [6:26:2048] [node 5] ICS09 handshake done sender: [6:33:2060] self: [6:249148113:0] peer: [5:2648910911:0] socket: 31 2025-05-29T15:28:10.673255Z node 6 :INTERCONNECT_SESSION INFO: interconnect_tcp_session.cpp:281: Session [6:26:2048] [node 5] ICS10 traffic start 2025-05-29T15:28:10.673262Z node 6 :INTERCONNECT_SESSION DEBUG: interconnect_tcp_session.cpp:297: Session [6:26:2048] [node 5] ICS11 registering socket in PollerActor 2025-05-29T15:28:10.673266Z node 6 :INTERCONNECT_SESSION DEBUG: interconnect_tcp_session.cpp:940: Session [6:26:2048] [node 5] ICS23 confirm count: 1 2025-05-29T15:28:10.673269Z node 6 :INTERCONNECT_SESSION DEBUG: interconnect_channel.cpp:59: OutputChannel 0 [node 5] ICOCH98 Dropping confirmed messages 2025-05-29T15:28:10.673273Z node 6 :INTERCONNECT_SESSION DEBUG: interconnect_tcp_session.cpp:991: Session [6:26:2048] [node 5] ICS24 exit InflightDataAmount: 0 bytes droppedDataAmount: 84 bytes dropped 1 packets 2025-05-29T15:28:10.673278Z node 6 :INTERCONNECT_SESSION DEBUG: interconnect_tcp_session.cpp:336: Session [6:26:2048] [node 5] ICS06 rewind SendQueue size# 0 LastConfirmed# 1 NextSerial# 2 2025-05-29T15:28:10.673281Z node 6 :INTERCONNECT_SESSION DEBUG: interconnect_tcp_session.cpp:940: Session [6:26:2048] [node 5] ICS23 confirm count: 1 2025-05-29T15:28:10.673285Z node 5 :INTERCONNECT_SESSION DEBUG: interconnect_tcp_input_session.cpp:219: InputSession [5:36:2048] [node 6] ICIS02 ReceiveData called 2025-05-29T15:28:10.673289Z node 5 :INTERCONNECT_SESSION DEBUG: interconnect_tcp_input_session.cpp:704: InputSession [5:36:2048] [node 6] ICIS12 Read recvres# -11 num# 1 err# 2025-05-29T15:28:10.673300Z node 6 :INTERCONNECT_SESSION DEBUG: interconnect_tcp_input_session.cpp:143: InputSession [6:37:2048] [node 5] ICIS01 InputSession created 2025-05-29T15:28:10.673336Z node 6 :INTERCONNECT_SESSION DEBUG: interconnect_tcp_input_session.cpp:219: InputSession [6:37:2048] [node 5] ICIS02 ReceiveData called 2025-05-29T15:28:10.673341Z node 6 :INTERCONNECT_SESSION DEBUG: interconnect_tcp_input_session.cpp:704: InputSession [6:37:2048] [node 5] ICIS12 Read recvres# 106 num# 1 err# 2025-05-29T15:28:10.673349Z node 6 :INTERCONNECT_SESSION DEBUG: interconnect_tcp_input_session.cpp:704: InputSession [6:37:2048] [node 5] ICIS12 Read recvres# -11 num# 1 err# 2025-05-29T15:28:10.673352Z node 6 :INTERCONNECT_SESSION DEBUG: interconnect_tcp_input_session.cpp:704: InputSession [6:37:2048] [node 5] ICIS12 Read recvres# -11 num# 1 err# 2025-05-29T15:28:10.673355Z node 5 :INTERCONNECT_SESSION DEBUG: interconnect_tcp_input_session.cpp:219: InputSession [5:36:2048] [node 6] ICIS02 ReceiveData called 2025-05-29T15:28:10.673358Z node 5 :INTERCONNECT_SESSION DEBUG: interconnect_tcp_input_session.cpp:704: InputSession [5:36:2048] [node 6] ICIS12 Read recvres# -11 num# 1 err# 2025-05-29T15:28:10.673420Z node 5 :INTERCONNECT_SESSION DEBUG: interconnect_tcp_session.cpp:940: Session [5:24:2048] [node 6] ICS23 confirm count: 1 2025-05-29T15:28:10.673425Z node 5 :INTERCONNECT_SESSION DEBUG: interconnect_tcp_session.cpp:940: Session [5:24:2048] [node 6] ICS23 confirm count: 1 2025-05-29T15:28:10.673429Z node 6 :INTERCONNECT_SESSION DEBUG: interconnect_tcp_input_session.cpp:219: InputSession [6:37:2048] [node 5] ICIS02 ReceiveData called 2025-05-29T15:28:10.673432Z node 6 :INTERCONNECT_SESSION DEBUG: interconnect_tcp_input_session.cpp:704: InputSession [6:37:2048] [node 5] ICIS12 Read recvres# -11 num# 1 err# 2025-05-29T15:28:10.673435Z node 6 :INTERCONNECT_SESSION DEBUG: interconnect_tcp_input_session.cpp:219: InputSession [6:37:2048] [node 5] ICIS02 ReceiveData called 2025-05-29T15:28:10.673438Z node 6 :INTERCONNECT_SESSION DEBUG: interconnect_tcp_input_session.cpp:704: InputSession [6:37:2048] [node 5] ICIS12 Read recvres# -11 num# 1 err# 2025-05-29T15:28:10.673464Z node 5 :INTERCONNECT_SESSION DEBUG: interconnect_tcp_session.cpp:940: Session [5:24:2048] [node 6] ICS23 confirm count: 1 2025-05-29T15:28:10.673468Z node 5 :INTERCONNECT_SESSION DEBUG: interconnect_tcp_session.cpp:940: Session [5:24:2048] [node 6] ICS23 confirm count: 1 2025-05-29T15:28:10.673473Z node 6 :INTERCONNECT_SESSION DEBUG: interconnect_tcp_session.cpp:940: Session [6:26:2048] [node 5] ICS23 confirm count: 1 2025-05-29T15:28:10.673477Z node 6 :INTERCONNECT_SESSION DEBUG: interconnect_tcp_session.cpp:940: Session [6:26:2048] [node 5] ICS23 confirm count: 1 2025-05-29T15:28:10.673483Z node 6 :INTERCONNECT_SESSION DEBUG: interconnect_tcp_session.cpp:147: Session [6:26:2048] [node 5] ICS02 send event from: [6:18:2056] to: [5:17:2056] 2025-05-29T15:28:10.673495Z node 6 :INTERCONNECT_SESSION DEBUG: interconnect_tcp_session.cpp:929: Session [6:26:2048] [node 5] ICS22 outgoing packet Serial# 2 Confirm# 2 DataSize# 84 InflightDataAmount# 84 2025-05-29T15:28:10.673519Z node 6 :INTERCONNECT_SESSION DEBUG: interconnect_tcp_session.cpp:940: Session [6:26:2048] [node 5] ICS23 confirm count: 1 2025-05-29T15:28:10.673524Z node 6 :INTERCONNECT_SESSION DEBUG: interconnect_tcp_session.cpp:940: Session [6:26:2048] [node 5] ICS23 confirm count: 1 2025-05-29T15:28:10.673528Z node 6 :INTERCONNECT_SESSION DEBUG: interconnect_tcp_session.cpp:940: Session [6:26:2048] [node 5] ICS23 confirm count: 1 2025-05-29T15:28:10.673586Z node 5 :INTERCONNECT_SESSION DEBUG: interconnect_tcp_input_session.cpp:219: InputSession [5:36:2048] [node 6] ICIS02 ReceiveData called 2025-05-29T15:28:10.673591Z node 5 :INTERCONNECT_SESSION DEBUG: interconnect_tcp_input_session.cpp:704: InputSession [5:36:2048] [node 6] ICIS12 Read recvres# 106 num# 1 err# 2025-05-29T15:28:10.673599Z node 5 :INTERCONNECT_SESSION DEBUG: interconnect_tcp_input_session.cpp:704: InputSession [5:36:2048] [node 6] ICIS12 Read recvres# -11 num# 1 err# 2025-05-29T15:28:10.673604Z node 5 :INTERCONNECT_SESSION DEBUG: interconnect_tcp_session.cpp:940: Session [5:24:2048] [node 6] ICS23 confirm count: 2 2025-05-29T15:28:10.673607Z node 5 :INTERCONNECT_SESSION DEBUG: interconnect_channel.cpp:59: OutputChannel 0 [node 6] ICOCH98 Dropping confirmed messages 2025-05-29T15:28:10.673610Z node 5 :INTERCONNECT_SESSION DEBUG: interconnect_tcp_session.cpp:991: Session [5:24:2048] [node 6] ICS24 exit InflightDataAmount: 0 bytes droppedDataAmount: 84 bytes dropped 1 packets 2025-05-29T15:28:10.673613Z node 5 :INTERCONNECT_SESSION DEBUG: interconnect_tcp_session.cpp:940: Session [5:24:2048] [node 6] ICS23 confirm count: 2 2025-05-29T15:28:10.673621Z node 5 :INTERCONNECT_SESSION INFO: interconnect_tcp_session.cpp:96: Session [5:24:2048] [node 6] ICS01 socket: 29 reason# 2025-05-29T15:28:10.673625Z node 5 :INTERCONNECT INFO: interconnect_tcp_proxy.cpp:542: Proxy [5:1:2048] [node 6] ICP30 unregister session Session# [5:24:2048] VirtualId# [5:2648910911:0] 2025-05-29T15:28:10.673629Z node 5 :INTERCONNECT DEBUG: interconnect_tcp_proxy.h:180: Proxy [5:1:2048] [node 6] ICP77 @206 StateWork -> PendingActivation 2025-05-29T15:28:10.673631Z node 5 :INTERCONNECT_SESSION INFO: interconnect_tcp_session.cpp:544: Session [5:24:2048] [node 6] ICS25 shutdown socket, reason# 2025-05-29T15:28:10.673657Z node 5 :INTERCONNECT_SESSION DEBUG: interconnect_channel.cpp:337: OutputChannel 0 [node 6] ICOCH89 Notyfying about Undelivered messages! NotYetConfirmed size: 0, Queue size: 0 >> TIterator::MixedReverse [GOOD] >> TIterator::Serial >> TExecutorDb::RandomCoordinatorSimulation [GOOD] >> TExecutorDb::MultiPage ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/actorlib_impl/ut/unittest >> TInterconnectTest::OldFormatSuppressVersionCheck [GOOD] Test command err: 2025-05-29T15:28:10.698376Z node 4 :INTERCONNECT WARN: interconnect_handshake.cpp:501: Handshake [4:20:2056] [node 3] ICH09 Neither CompatibilityInfo nor VersionTag of the peer can be validated, accepting by default 2025-05-29T15:28:11.107445Z node 5 :INTERCONNECT WARN: interconnect_handshake.cpp:501: Handshake [5:18:2057] [node 6] ICH09 Neither CompatibilityInfo nor VersionTag of the peer can be validated, accepting by default 2025-05-29T15:28:11.518499Z node 8 :INTERCONNECT WARN: interconnect_handshake.cpp:501: Handshake [8:20:2056] [node 7] ICH09 Neither CompatibilityInfo nor VersionTag of the peer can be validated, accepting by default 2025-05-29T15:28:11.518829Z node 7 :INTERCONNECT WARN: interconnect_handshake.cpp:501: Handshake [7:18:2057] [node 8] ICH09 Neither CompatibilityInfo nor VersionTag of the peer can be validated, accepting by default >> TInterconnectTest::TestBlobEventDifferentSizesPreSerialized [GOOD] >> TInterconnectTest::TestBlobEventDifferentSizesPreSerializedAndRaw >> TInterconnectTest::TestNotifyUndelivered [GOOD] >> TInterconnectTest::TestNotifyUndeliveredOnMissedActor >> TExecutorDb::MultiPage [GOOD] >> TExecutorDb::EncodedPage >> TIterator::Serial [GOOD] >> TIterator::SerialReverse >> TExecutorDb::EncodedPage [GOOD] >> TFlatCxxDatabaseTest::BasicSchemaTest [GOOD] >> TFlatCxxDatabaseTest::RenameColumnSchemaTest [GOOD] >> TFlatCxxDatabaseTest::SchemaFillerTest [GOOD] >> TFlatDatabaseDecimal::UpdateRead [GOOD] >> TFlatEraseCacheTest::BasicUsage [GOOD] >> TFlatEraseCacheTest::BasicUsageReverse [GOOD] >> TFlatEraseCacheTest::CacheEviction [GOOD] >> TFlatEraseCacheTest::StressGarbageCollection [GOOD] >> TFlatEraseCacheTest::StressGarbageCollectionWithStrings [GOOD] >> TFlatExecutorLeases::Basics >> TIterator::SerialReverse [GOOD] >> TIterator::GetKey [GOOD] >> TIterator::GetKeyWithEraseCache [GOOD] >> TIterator::GetKeyWithVersionSkips [GOOD] >> TLegacy::IndexIter >> TKeyValueTest::TestWriteReadWhileWriteWorks [GOOD] >> TInterconnectTest::TestNotifyUndeliveredOnMissedActor [GOOD] >> TInterconnectTest::TestPreSerializedBlobEventUpToMebibytes >> IndexBuildTest::CancelBuild [GOOD] >> TBtreeIndexBuilder::NoNodes [GOOD] >> TBtreeIndexBuilder::OneNode [GOOD] >> TBtreeIndexBuilder::FewNodes [GOOD] >> TBtreeIndexBuilder::SplitBySize [GOOD] >> TBtreeIndexNode::TIsNullBitmap [GOOD] >> TBtreeIndexNode::CompareTo [GOOD] >> TBtreeIndexNode::Basics [GOOD] >> TBtreeIndexNode::Group [GOOD] >> TBtreeIndexNode::History [GOOD] >> TBtreeIndexNode::OneKey [GOOD] >> TBtreeIndexNode::Reusable [GOOD] >> TBtreeIndexNode::CutKeys [GOOD] >> TBtreeIndexTPart::Conf [GOOD] >> TBtreeIndexTPart::NoNodes [GOOD] >> TBtreeIndexTPart::OneNode [GOOD] >> TBtreeIndexTPart::FewNodes [GOOD] >> TBtreeIndexTPart::Erases [GOOD] >> TBtreeIndexTPart::Groups [GOOD] >> TBtreeIndexTPart::History [GOOD] >> TBtreeIndexTPart::External >> TLegacy::IndexIter [GOOD] >> TLegacy::ScreenedIndexIter [GOOD] >> TLegacy::StatsIter [GOOD] >> TPageHandleTest::Uninitialized [GOOD] >> TPageHandleTest::NormalUse [GOOD] >> TPageHandleTest::HandleRef [GOOD] >> TPageHandleTest::PinnedRef [GOOD] >> TPageHandleTest::PinnedRefPure [GOOD] >> TPart::BasicColumnGroups [GOOD] >> TInterconnectTest::TestBlobEventDifferentSizesPreSerializedAndRaw [GOOD] >> TVersions::Wreck2 [GOOD] >> TVersions::Wreck2Reverse >> IndexBuildTest::CheckLimitWithDroppedIndex >> BuildStatsHistogram::Single_History_Slices [GOOD] >> BuildStatsHistogram::Ten_Mixed >> TBtreeIndexTPart::External [GOOD] >> TChargeBTreeIndex::NoNodes ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/keyvalue/ut/unittest >> TKeyValueTest::TestWriteReadWhileWriteWorks [GOOD] Test command err: Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:55:2057] recipient: [1:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:55:2057] recipient: [1:51:2095] Leader for TabletID 72057594037927937 is [1:57:2097] sender: [1:58:2057] recipient: [1:51:2095] Leader for TabletID 72057594037927937 is [1:57:2097] sender: [1:75:2057] recipient: [1:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:55:2057] recipient: [2:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:55:2057] recipient: [2:51:2095] Leader for TabletID 72057594037927937 is [2:57:2097] sender: [2:58:2057] recipient: [2:51:2095] Leader for TabletID 72057594037927937 is [2:57:2097] sender: [2:75:2057] recipient: [2:14:2061] !Reboot 72057594037927937 (actor [2:57:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [2:57:2097] sender: [2:77:2057] recipient: [2:36:2083] Leader for TabletID 72057594037927937 is [2:57:2097] sender: [2:80:2057] recipient: [2:14:2061] Leader for TabletID 72057594037927937 is [2:57:2097] sender: [2:81:2057] recipient: [2:79:2110] Leader for TabletID 72057594037927937 is [2:82:2111] sender: [2:83:2057] recipient: [2:79:2110] !Reboot 72057594037927937 (actor [2:57:2097]) rebooted! !Reboot 72057594037927937 (actor [2:57:2097]) tablet resolver refreshed! new actor is[2:82:2111] Leader for TabletID 72057594037927937 is [2:82:2111] sender: [2:168:2057] recipient: [2:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:55:2057] recipient: [3:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:55:2057] recipient: [3:51:2095] Leader for TabletID 72057594037927937 is [3:57:2097] sender: [3:58:2057] recipient: [3:51:2095] Leader for TabletID 72057594037927937 is [3:57:2097] sender: [3:75:2057] recipient: [3:14:2061] !Reboot 72057594037927937 (actor [3:57:2097]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [3:57:2097] sender: [3:77:2057] recipient: [3:36:2083] Leader for TabletID 72057594037927937 is [3:57:2097] sender: [3:80:2057] recipient: [3:79:2110] Leader for TabletID 72057594037927937 is [3:57:2097] sender: [3:81:2057] recipient: [3:14:2061] Leader for TabletID 72057594037927937 is [3:82:2111] sender: [3:83:2057] recipient: [3:79:2110] !Reboot 72057594037927937 (actor [3:57:2097]) rebooted! !Reboot 72057594037927937 (actor [3:57:2097]) tablet resolver refreshed! new actor is[3:82:2111] Leader for TabletID 72057594037927937 is [3:82:2111] sender: [3:168:2057] recipient: [3:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [4:55:2057] recipient: [4:50:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [4:55:2057] recipient: [4:50:2095] Leader for TabletID 72057594037927937 is [4:57:2097] sender: [4:58:2057] recipient: [4:50:2095] Leader for TabletID 72057594037927937 is [4:57:2097] sender: [4:75:2057] recipient: [4:14:2061] !Reboot 72057594037927937 (actor [4:57:2097]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [4:57:2097] sender: [4:78:2057] recipient: [4:36:2083] Leader for TabletID 72057594037927937 is [4:57:2097] sender: [4:81:2057] recipient: [4:14:2061] Leader for TabletID 72057594037927937 is [4:57:2097] sender: [4:82:2057] recipient: [4:80:2110] Leader for TabletID 72057594037927937 is [4:83:2111] sender: [4:84:2057] recipient: [4:80:2110] !Reboot 72057594037927937 (actor [4:57:2097]) rebooted! !Reboot 72057594037927937 (actor [4:57:2097]) tablet resolver refreshed! new actor is[4:83:2111] Leader for TabletID 72057594037927937 is [4:83:2111] sender: [4:169:2057] recipient: [4:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [5:55:2057] recipient: [5:52:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [5:55:2057] recipient: [5:52:2095] Leader for TabletID 72057594037927937 is [5:57:2097] sender: [5:58:2057] recipient: [5:52:2095] Leader for TabletID 72057594037927937 is [5:57:2097] sender: [5:75:2057] recipient: [5:14:2061] !Reboot 72057594037927937 (actor [5:57:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [5:57:2097] sender: [5:81:2057] recipient: [5:36:2083] Leader for TabletID 72057594037927937 is [5:57:2097] sender: [5:84:2057] recipient: [5:83:2113] Leader for TabletID 72057594037927937 is [5:57:2097] sender: [5:85:2057] recipient: [5:14:2061] Leader for TabletID 72057594037927937 is [5:86:2114] sender: [5:87:2057] recipient: [5:83:2113] !Reboot 72057594037927937 (actor [5:57:2097]) rebooted! !Reboot 72057594037927937 (actor [5:57:2097]) tablet resolver refreshed! new actor is[5:86:2114] Leader for TabletID 72057594037927937 is [5:86:2114] sender: [5:172:2057] recipient: [5:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [6:55:2057] recipient: [6:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [6:55:2057] recipient: [6:51:2095] Leader for TabletID 72057594037927937 is [6:57:2097] sender: [6:58:2057] recipient: [6:51:2095] Leader for TabletID 72057594037927937 is [6:57:2097] sender: [6:75:2057] recipient: [6:14:2061] !Reboot 72057594037927937 (actor [6:57:2097]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [6:57:2097] sender: [6:81:2057] recipient: [6:36:2083] Leader for TabletID 72057594037927937 is [6:57:2097] sender: [6:84:2057] recipient: [6:14:2061] Leader for TabletID 72057594037927937 is [6:57:2097] sender: [6:85:2057] recipient: [6:83:2113] Leader for TabletID 72057594037927937 is [6:86:2114] sender: [6:87:2057] recipient: [6:83:2113] !Reboot 72057594037927937 (actor [6:57:2097]) rebooted! !Reboot 72057594037927937 (actor [6:57:2097]) tablet resolver refreshed! new actor is[6:86:2114] Leader for TabletID 72057594037927937 is [6:86:2114] sender: [6:172:2057] recipient: [6:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [7:55:2057] recipient: [7:52:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [7:55:2057] recipient: [7:52:2095] Leader for TabletID 72057594037927937 is [7:57:2097] sender: [7:58:2057] recipient: [7:52:2095] Leader for TabletID 72057594037927937 is [7:57:2097] sender: [7:75:2057] recipient: [7:14:2061] !Reboot 72057594037927937 (actor [7:57:2097]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [7:57:2097] sender: [7:82:2057] recipient: [7:36:2083] Leader for TabletID 72057594037927937 is [7:57:2097] sender: [7:85:2057] recipient: [7:14:2061] Leader for TabletID 72057594037927937 is [7:57:2097] sender: [7:86:2057] recipient: [7:84:2113] Leader for TabletID 72057594037927937 is [7:87:2114] sender: [7:88:2057] recipient: [7:84:2113] !Reboot 72057594037927937 (actor [7:57:2097]) rebooted! !Reboot 72057594037927937 (actor [7:57:2097]) tablet resolver refreshed! new actor is[7:87:2114] Leader for TabletID 72057594037927937 is [7:87:2114] sender: [7:173:2057] recipient: [7:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [8:55:2057] recipient: [8:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [8:55:2057] recipient: [8:51:2095] Leader for TabletID 72057594037927937 is [8:57:2097] sender: [8:58:2057] recipient: [8:51:2095] Leader for TabletID 72057594037927937 is [8:57:2097] sender: [8:75:2057] recipient: [8:14:2061] !Reboot 72057594037927937 (actor [8:57:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [8:57:2097] sender: [8:84:2057] recipient: [8:36:2083] Leader for TabletID 72057594037927937 is [8:57:2097] sender: [8:87:2057] recipient: [8:14:2061] Leader for TabletID 72057594037927937 is [8:57:2097] sender: [8:88:2057] recipient: [8:86:2115] Leader for TabletID 72057594037927937 is [8:89:2116] sender: [8:90:2057] recipient: [8:86:2115] !Reboot 72057594037927937 (actor [8:57:2097]) rebooted! !Reboot 72057594037927937 (actor [8:57:2097]) tablet resolver refreshed! new actor is[8:89:2116] Leader for TabletID 72057594037927937 is [8:89:2116] sender: [8:175:2057] recipient: [8:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [9:55:2057] recipient: [9:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [9:55:2057] recipient: [9:51:2095] Leader for TabletID 72057594037927937 is [9:57:2097] sender: [9:58:2057] recipient: [9:51:2095] Leader for TabletID 72057594037927937 is [9:57:2097] sender: [9:75:2057] recipient: [9:14:2061] !Reboot 72057594037927937 (actor [9:57:2097]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [9:57:2097] sender: [9:84:2057] recipient: [9:36:2083] Leader for TabletID 72057594037927937 is [9:57:2097] sender: [9:86:2057] recipient: [9:14:2061] Leader for TabletID 72057594037927937 is [9:57:2097] sender: [9:88:2057] recipient: [9:87:2115] Leader for TabletID 72057594037927937 is [9:89:2116] sender: [9:90:2057] recipient: [9:87:2115] !Reboot 72057594037927937 (actor [9:57:2097]) rebooted! !Reboot 72057594037927937 (actor [9:57:2097]) tablet resolver refreshed! new actor is[9:89:2116] Leader for TabletID 72057594037927937 is [9:89:2116] sender: [9:175:2057] recipient: [9:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [10:55:2057] recipient: [10:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [10:55:2057] recipient: [10:51:2095] Leader for TabletID 72057594037927937 is [10:57:2097] sender: [10:58:2057] recipient: [10:51:2095] Leader for TabletID 72057594037927937 is [10:57:2097] sender: [10:75:2057] recipient: [10:14:2061] !Reboot 72057594037927937 (actor [10:57:2097]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [10:57:2097] sender: [10:85:2057] recipient: [10:36:2083] Leader for TabletID 72057594037927937 is [10:57:2097] sender: [10:88:2057] recipient: [10:14:2061] Leader for TabletID 72057594037927937 is [10:57:2097] sender: [10:89:2057] recipient: [10:87:2115] Leader for TabletID 72057594037927937 is [10:90:2116] sender: [10:91:2057] recipient: [10:87:2115] !Reboot 72057594037927937 (actor [10:57:2097]) rebooted! !Reboot 72057594037927937 (actor [10:57:2097]) tablet resolver refreshed! new actor is[10:90:2116] Leader for TabletID 72057594037927937 is [10:90:2116] sender: [10:176:2057] recipient: [10:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [11:55:2057] recipient: [11:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [11:55:2057] recipient: [11:51:2095] Leader for TabletID 72057594037927937 is [11:57:2097] sender: [11:58:2057] recipient: [11:51:2095] Leader for TabletID 72057594037927937 is [11:57:2097] sender: [11:75:2057] recipient: [11:14:2061] !Reboot 72057594037927937 (actor [11:57:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [11:57:2097] sender: [11:87:2057] recipient: [11:36:2083] Leader for TabletID 72057594037927937 is [11:57:2097] sender: [11:90:2057] recipient: [11:14:2061] Leader for TabletID 72057594037927937 is [11:57:2097] sender: [11:91:2057] recipient: [11:89:2117] Leader for TabletID 72057594037927937 is [11:92:2118] sender: [11:93:2057] recipient: [11:89:2117] !Reboot 72057594037927937 (actor [11:57:2097]) rebooted! !Reboot 72057594037927937 (actor [11:57:2097]) tablet resolver refreshed! new actor is[11:92:2118] Leader for TabletID 72057594037927937 is [11:92:2118] sender: [11:178:2057] recipient: [11:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [12:55:2057] recipient: [12:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [12:55:2057] recipient: [12:51:2095] Leader for TabletID 72057594037927937 is [12:57:2097] sender: [12:58:2057] recipient: [12:51:2095] Leader for TabletID 72057594037927937 is [12:57:2097] sender: [12:75:2057] recipient: [12:14:2061] !Reboot 72057594037927937 (acto ... TabletID 72057594037927937 is [13:57:2097] sender: [13:91:2057] recipient: [13:14:2061] Leader for TabletID 72057594037927937 is [13:57:2097] sender: [13:92:2057] recipient: [13:90:2117] Leader for TabletID 72057594037927937 is [13:93:2118] sender: [13:94:2057] recipient: [13:90:2117] !Reboot 72057594037927937 (actor [13:57:2097]) rebooted! !Reboot 72057594037927937 (actor [13:57:2097]) tablet resolver refreshed! new actor is[13:93:2118] Leader for TabletID 72057594037927937 is [13:93:2118] sender: [13:179:2057] recipient: [13:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [14:55:2057] recipient: [14:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [14:55:2057] recipient: [14:51:2095] Leader for TabletID 72057594037927937 is [14:57:2097] sender: [14:58:2057] recipient: [14:51:2095] Leader for TabletID 72057594037927937 is [14:57:2097] sender: [14:75:2057] recipient: [14:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [15:55:2057] recipient: [15:52:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [15:55:2057] recipient: [15:52:2095] Leader for TabletID 72057594037927937 is [15:57:2097] sender: [15:58:2057] recipient: [15:52:2095] Leader for TabletID 72057594037927937 is [15:57:2097] sender: [15:75:2057] recipient: [15:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [16:55:2057] recipient: [16:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [16:55:2057] recipient: [16:51:2095] Leader for TabletID 72057594037927937 is [16:57:2097] sender: [16:58:2057] recipient: [16:51:2095] Leader for TabletID 72057594037927937 is [16:57:2097] sender: [16:75:2057] recipient: [16:14:2061] !Reboot 72057594037927937 (actor [16:57:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [16:57:2097] sender: [16:77:2057] recipient: [16:36:2083] Leader for TabletID 72057594037927937 is [16:57:2097] sender: [16:80:2057] recipient: [16:14:2061] Leader for TabletID 72057594037927937 is [16:57:2097] sender: [16:81:2057] recipient: [16:79:2110] Leader for TabletID 72057594037927937 is [16:82:2111] sender: [16:83:2057] recipient: [16:79:2110] !Reboot 72057594037927937 (actor [16:57:2097]) rebooted! !Reboot 72057594037927937 (actor [16:57:2097]) tablet resolver refreshed! new actor is[16:82:2111] Leader for TabletID 72057594037927937 is [16:82:2111] sender: [16:168:2057] recipient: [16:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [17:55:2057] recipient: [17:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [17:55:2057] recipient: [17:51:2095] Leader for TabletID 72057594037927937 is [17:57:2097] sender: [17:58:2057] recipient: [17:51:2095] Leader for TabletID 72057594037927937 is [17:57:2097] sender: [17:75:2057] recipient: [17:14:2061] !Reboot 72057594037927937 (actor [17:57:2097]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [17:57:2097] sender: [17:77:2057] recipient: [17:36:2083] Leader for TabletID 72057594037927937 is [17:57:2097] sender: [17:80:2057] recipient: [17:14:2061] Leader for TabletID 72057594037927937 is [17:57:2097] sender: [17:81:2057] recipient: [17:79:2110] Leader for TabletID 72057594037927937 is [17:82:2111] sender: [17:83:2057] recipient: [17:79:2110] !Reboot 72057594037927937 (actor [17:57:2097]) rebooted! !Reboot 72057594037927937 (actor [17:57:2097]) tablet resolver refreshed! new actor is[17:82:2111] Leader for TabletID 72057594037927937 is [17:82:2111] sender: [17:168:2057] recipient: [17:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [18:55:2057] recipient: [18:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [18:55:2057] recipient: [18:51:2095] Leader for TabletID 72057594037927937 is [18:57:2097] sender: [18:58:2057] recipient: [18:51:2095] Leader for TabletID 72057594037927937 is [18:57:2097] sender: [18:75:2057] recipient: [18:14:2061] !Reboot 72057594037927937 (actor [18:57:2097]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [18:57:2097] sender: [18:78:2057] recipient: [18:36:2083] Leader for TabletID 72057594037927937 is [18:57:2097] sender: [18:80:2057] recipient: [18:14:2061] Leader for TabletID 72057594037927937 is [18:57:2097] sender: [18:82:2057] recipient: [18:81:2110] Leader for TabletID 72057594037927937 is [18:83:2111] sender: [18:84:2057] recipient: [18:81:2110] !Reboot 72057594037927937 (actor [18:57:2097]) rebooted! !Reboot 72057594037927937 (actor [18:57:2097]) tablet resolver refreshed! new actor is[18:83:2111] Leader for TabletID 72057594037927937 is [18:83:2111] sender: [18:169:2057] recipient: [18:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [19:55:2057] recipient: [19:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [19:55:2057] recipient: [19:51:2095] Leader for TabletID 72057594037927937 is [19:57:2097] sender: [19:58:2057] recipient: [19:51:2095] Leader for TabletID 72057594037927937 is [19:57:2097] sender: [19:75:2057] recipient: [19:14:2061] !Reboot 72057594037927937 (actor [19:57:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [19:57:2097] sender: [19:81:2057] recipient: [19:36:2083] Leader for TabletID 72057594037927937 is [19:57:2097] sender: [19:84:2057] recipient: [19:14:2061] Leader for TabletID 72057594037927937 is [19:57:2097] sender: [19:85:2057] recipient: [19:83:2113] Leader for TabletID 72057594037927937 is [19:86:2114] sender: [19:87:2057] recipient: [19:83:2113] !Reboot 72057594037927937 (actor [19:57:2097]) rebooted! !Reboot 72057594037927937 (actor [19:57:2097]) tablet resolver refreshed! new actor is[19:86:2114] Leader for TabletID 72057594037927937 is [19:86:2114] sender: [19:172:2057] recipient: [19:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [20:55:2057] recipient: [20:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [20:55:2057] recipient: [20:51:2095] Leader for TabletID 72057594037927937 is [20:57:2097] sender: [20:58:2057] recipient: [20:51:2095] Leader for TabletID 72057594037927937 is [20:57:2097] sender: [20:75:2057] recipient: [20:14:2061] !Reboot 72057594037927937 (actor [20:57:2097]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [20:57:2097] sender: [20:81:2057] recipient: [20:36:2083] Leader for TabletID 72057594037927937 is [20:57:2097] sender: [20:84:2057] recipient: [20:14:2061] Leader for TabletID 72057594037927937 is [20:57:2097] sender: [20:85:2057] recipient: [20:83:2113] Leader for TabletID 72057594037927937 is [20:86:2114] sender: [20:87:2057] recipient: [20:83:2113] !Reboot 72057594037927937 (actor [20:57:2097]) rebooted! !Reboot 72057594037927937 (actor [20:57:2097]) tablet resolver refreshed! new actor is[20:86:2114] Leader for TabletID 72057594037927937 is [20:86:2114] sender: [20:172:2057] recipient: [20:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [21:55:2057] recipient: [21:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [21:55:2057] recipient: [21:51:2095] Leader for TabletID 72057594037927937 is [21:57:2097] sender: [21:58:2057] recipient: [21:51:2095] Leader for TabletID 72057594037927937 is [21:57:2097] sender: [21:75:2057] recipient: [21:14:2061] !Reboot 72057594037927937 (actor [21:57:2097]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [21:57:2097] sender: [21:82:2057] recipient: [21:36:2083] Leader for TabletID 72057594037927937 is [21:57:2097] sender: [21:85:2057] recipient: [21:14:2061] Leader for TabletID 72057594037927937 is [21:57:2097] sender: [21:86:2057] recipient: [21:84:2113] Leader for TabletID 72057594037927937 is [21:87:2114] sender: [21:88:2057] recipient: [21:84:2113] !Reboot 72057594037927937 (actor [21:57:2097]) rebooted! !Reboot 72057594037927937 (actor [21:57:2097]) tablet resolver refreshed! new actor is[21:87:2114] Leader for TabletID 72057594037927937 is [21:87:2114] sender: [21:173:2057] recipient: [21:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [22:55:2057] recipient: [22:52:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [22:55:2057] recipient: [22:52:2095] Leader for TabletID 72057594037927937 is [22:57:2097] sender: [22:58:2057] recipient: [22:52:2095] Leader for TabletID 72057594037927937 is [22:57:2097] sender: [22:75:2057] recipient: [22:14:2061] !Reboot 72057594037927937 (actor [22:57:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [22:57:2097] sender: [22:85:2057] recipient: [22:36:2083] Leader for TabletID 72057594037927937 is [22:57:2097] sender: [22:87:2057] recipient: [22:14:2061] Leader for TabletID 72057594037927937 is [22:57:2097] sender: [22:89:2057] recipient: [22:88:2116] Leader for TabletID 72057594037927937 is [22:90:2117] sender: [22:91:2057] recipient: [22:88:2116] !Reboot 72057594037927937 (actor [22:57:2097]) rebooted! !Reboot 72057594037927937 (actor [22:57:2097]) tablet resolver refreshed! new actor is[22:90:2117] Leader for TabletID 72057594037927937 is [22:90:2117] sender: [22:176:2057] recipient: [22:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [23:55:2057] recipient: [23:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [23:55:2057] recipient: [23:51:2095] Leader for TabletID 72057594037927937 is [23:57:2097] sender: [23:58:2057] recipient: [23:51:2095] Leader for TabletID 72057594037927937 is [23:57:2097] sender: [23:75:2057] recipient: [23:14:2061] !Reboot 72057594037927937 (actor [23:57:2097]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [23:57:2097] sender: [23:85:2057] recipient: [23:36:2083] Leader for TabletID 72057594037927937 is [23:57:2097] sender: [23:88:2057] recipient: [23:14:2061] Leader for TabletID 72057594037927937 is [23:57:2097] sender: [23:89:2057] recipient: [23:87:2116] Leader for TabletID 72057594037927937 is [23:90:2117] sender: [23:91:2057] recipient: [23:87:2116] !Reboot 72057594037927937 (actor [23:57:2097]) rebooted! !Reboot 72057594037927937 (actor [23:57:2097]) tablet resolver refreshed! new actor is[23:90:2117] Leader for TabletID 72057594037927937 is [23:90:2117] sender: [23:176:2057] recipient: [23:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [24:55:2057] recipient: [24:50:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [24:55:2057] recipient: [24:50:2095] Leader for TabletID 72057594037927937 is [24:57:2097] sender: [24:58:2057] recipient: [24:50:2095] Leader for TabletID 72057594037927937 is [24:57:2097] sender: [24:75:2057] recipient: [24:14:2061] !Reboot 72057594037927937 (actor [24:57:2097]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [24:57:2097] sender: [24:86:2057] recipient: [24:36:2083] Leader for TabletID 72057594037927937 is [24:57:2097] sender: [24:89:2057] recipient: [24:14:2061] Leader for TabletID 72057594037927937 is [24:57:2097] sender: [24:90:2057] recipient: [24:88:2116] Leader for TabletID 72057594037927937 is [24:91:2117] sender: [24:92:2057] recipient: [24:88:2116] !Reboot 72057594037927937 (actor [24:57:2097]) rebooted! !Reboot 72057594037927937 (actor [24:57:2097]) tablet resolver refreshed! new actor is[24:91:2117] Leader for TabletID 72057594037927937 is [24:91:2117] sender: [24:177:2057] recipient: [24:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [25:55:2057] recipient: [25:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [25:55:2057] recipient: [25:51:2095] Leader for TabletID 72057594037927937 is [25:57:2097] sender: [25:58:2057] recipient: [25:51:2095] Leader for TabletID 72057594037927937 is [25:57:2097] sender: [25:75:2057] recipient: [25:14:2061] |70.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/actorlib_impl/ut/unittest >> TInterconnectTest::TestBlobEventDifferentSizesPreSerializedAndRaw [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tablet_flat/ut/unittest >> TPart::BasicColumnGroups [GOOD] Test command err: 00000.000 II| FAKE_ENV: Born at 2025-05-29T15:28:08.156940Z 00000.003 DD| RESOURCE_BROKER: TResourceBrokerActor bootstrap 00000.003 NN| TABLET_SAUSAGECACHE: Bootstrap with config MemoryLimit: 8388608 ScanQueueInFlyLimit: 262144 AsyncQueueInFlyLimit: 262144 00000.003 II| FAKE_ENV: Starting storage for BS group 0 00000.003 II| FAKE_ENV: Starting storage for BS group 1 00000.003 II| FAKE_ENV: Starting storage for BS group 2 00000.003 II| FAKE_ENV: Starting storage for BS group 3 00000.004 II| TABLET_EXECUTOR: Leader{1:2:0} activating executor 00000.004 II| TABLET_EXECUTOR: LSnap{1:2, on 2:1, 35b, wait} done, Waste{2:0, 0b +(0, 0b), 0 trc} 00000.004 DD| TABLET_EXECUTOR: Leader{1:2:2} commited cookie 2 for step 1 00000.004 DD| TABLET_EXECUTOR: Leader{1:2:2} Tx{1, NKikimr::NTabletFlatExecutor::TRowsModel::TTxSchema} queued, type NKikimr::NTabletFlatExecutor::TRowsModel::TTxSchema 00000.004 DD| TABLET_EXECUTOR: Leader{1:2:2} Tx{1, NKikimr::NTabletFlatExecutor::TRowsModel::TTxSchema} took 4194304b of static mem, Memory{4194304 dyn 0} 00000.004 DD| TABLET_EXECUTOR: Leader{1:2:2} Tx{1, NKikimr::NTabletFlatExecutor::TRowsModel::TTxSchema} hope 1 -> done Change{2, redo 0b alter 302b annex 0, ~{ } -{ }, 0 gb} 00000.004 DD| TABLET_EXECUTOR: Leader{1:2:2} Tx{1, NKikimr::NTabletFlatExecutor::TRowsModel::TTxSchema} release 4194304b of static, Memory{0 dyn 0} 00000.004 DD| TABLET_EXECUTOR: TGenCompactionStrategy CheckGeneration for 1 generation 1, state Free, final id 0, final level 0 00000.004 DD| TABLET_EXECUTOR: Leader{1:2:3} commited cookie 1 for step 2 00000.004 DD| TABLET_EXECUTOR: Leader{1:2:3} Tx{2, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutor_ResourceProfile::TTxSetResourceProfile} queued, type NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutor_ResourceProfile::TTxSetResourceProfile 00000.004 DD| TABLET_EXECUTOR: Leader{1:2:3} Tx{2, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutor_ResourceProfile::TTxSetResourceProfile} took 4194304b of static mem, Memory{4194304 dyn 0} 00000.004 DD| TABLET_EXECUTOR: Leader{1:2:3} Tx{2, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutor_ResourceProfile::TTxSetResourceProfile} hope 1 -> done Change{2, redo 0b alter 15b annex 0, ~{ } -{ }, 0 gb} 00000.004 DD| TABLET_EXECUTOR: Leader{1:2:3} Tx{2, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutor_ResourceProfile::TTxSetResourceProfile} release 4194304b of static, Memory{0 dyn 0} 00000.004 DD| TABLET_EXECUTOR: TGenCompactionStrategy CheckGeneration for 1 generation 1, state Free, final id 0, final level 0 00000.004 DD| TABLET_EXECUTOR: Leader{1:2:4} commited cookie 1 for step 3 00000.004 DD| TABLET_EXECUTOR: Leader{1:2:4} Tx{3, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutor_ResourceProfile::TTxRequestMemory} queued, type NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutor_ResourceProfile::TTxRequestMemory 00000.004 DD| TABLET_EXECUTOR: Leader{1:2:4} Tx{3, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutor_ResourceProfile::TTxRequestMemory} took 1024b of static mem, Memory{1024 dyn 0} 00000.004 DD| TABLET_EXECUTOR: Leader{1:2:4} Tx{3, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutor_ResourceProfile::TTxRequestMemory} hope 1 -> retry Change{2, redo 0b alter 0b annex 0, ~{ } -{ }, 0 gb} 00000.004 DD| TABLET_EXECUTOR: Leader{1:2:4} Tx{3, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutor_ResourceProfile::TTxRequestMemory} touch new 0b, 0b lo load (0b in total), 104856577b requested for data (104857601b in total) 00000.004 DD| TABLET_EXECUTOR: Leader{1:2:4} Tx{3, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutor_ResourceProfile::TTxRequestMemory} release 1024b of static, Memory{0 dyn 0} 00000.004 DD| TABLET_EXECUTOR: Leader{1:2:4} Tx{3, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutor_ResourceProfile::TTxRequestMemory} release tx data 00000.004 DD| TABLET_EXECUTOR: Leader{1:2:4} Tx{3, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutor_ResourceProfile::TTxRequestMemory} request Res{1 104857601b} type large_transaction 00000.004 DD| RESOURCE_BROKER: Submitted new unknown task Tx{3, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutor_ResourceProfile::TTxRequestMemory} at tablet 1 (1 by [1:30:2062]) priority=5 resources={0, 104857601} 00000.004 EE| RESOURCE_BROKER: Assigning waiting task 'Tx{3, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutor_ResourceProfile::TTxRequestMemory} at tablet 1 (1 by [1:30:2062])' of unknown type 'large_transaction' to default queue 00000.004 DD| RESOURCE_BROKER: Allocate resources {0, 104857601} for task Tx{3, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutor_ResourceProfile::TTxRequestMemory} at tablet 1 (1 by [1:30:2062]) from queue queue_default 00000.004 EE| RESOURCE_BROKER: Assigning in-fly task 'Tx{3, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutor_ResourceProfile::TTxRequestMemory} at tablet 1 (1 by [1:30:2062])' of unknown type 'large_transaction' to default queue 00000.004 DD| RESOURCE_BROKER: Updated planned resource usage for queue queue_default from 0.000000 to 12.207031 (insert task Tx{3, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutor_ResourceProfile::TTxRequestMemory} at tablet 1 (1 by [1:30:2062])) 00000.004 DD| TABLET_EXECUTOR: Leader{1:2:4} Tx{3, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutor_ResourceProfile::TTxRequestMemory} acquired dyn mem Res{1 104857601b}, Memory{0 dyn 104857601} 00000.004 DD| TABLET_EXECUTOR: Leader{1:2:4} Tx{3, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutor_ResourceProfile::TTxRequestMemory} hope 2 -> done Change{2, redo 0b alter 0b annex 0, ~{ } -{ }, 0 gb} 00000.004 DD| TABLET_EXECUTOR: Leader{1:2:4} Tx{3, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutor_ResourceProfile::TTxRequestMemory} release Res{1 104857601b}, Memory{0 dyn 0} 00000.004 DD| RESOURCE_BROKER: Finish task Tx{3, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutor_ResourceProfile::TTxRequestMemory} at tablet 1 (1 by [1:30:2062]) (release resources {0, 104857601}) 00000.004 DD| RESOURCE_BROKER: Updated planned resource usage for queue queue_default from 12.207031 to 0.000000 (remove task Tx{3, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutor_ResourceProfile::TTxRequestMemory} at tablet 1 (1 by [1:30:2062])) 00000.004 DD| TABLET_EXECUTOR: Leader{1:2:4} Tx{4, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutor_ResourceProfile::TTxRequestMemory} queued, type NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutor_ResourceProfile::TTxRequestMemory 00000.004 DD| TABLET_EXECUTOR: Leader{1:2:4} Tx{4, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutor_ResourceProfile::TTxRequestMemory} took 1024b of static mem, Memory{1024 dyn 0} 00000.004 DD| TABLET_EXECUTOR: Leader{1:2:4} Tx{4, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutor_ResourceProfile::TTxRequestMemory} hope 1 -> retry Change{2, redo 0b alter 0b annex 0, ~{ } -{ }, 0 gb} 00000.004 DD| TABLET_EXECUTOR: Leader{1:2:4} Tx{4, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutor_ResourceProfile::TTxRequestMemory} touch new 0b, 0b lo load (0b in total), 104856577b requested for data (104857601b in total) 00000.004 DD| TABLET_EXECUTOR: Leader{1:2:4} Tx{4, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutor_ResourceProfile::TTxRequestMemory} took 104857601b of static mem, Memory{104857601 dyn 0} 00000.004 DD| TABLET_EXECUTOR: Leader{1:2:4} Tx{4, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutor_ResourceProfile::TTxRequestMemory} hope 2 -> done Change{2, redo 0b alter 0b annex 0, ~{ } -{ }, 0 gb} 00000.004 DD| TABLET_EXECUTOR: Leader{1:2:4} Tx{4, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutor_ResourceProfile::TTxRequestMemory} release 104857601b of static, Memory{0 dyn 0} 00000.004 DD| TABLET_EXECUTOR: Leader{1:2:4} Tx{5, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutor_ResourceProfile::TTxRequestMemory} queued, type NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutor_ResourceProfile::TTxRequestMemory 00000.004 DD| TABLET_EXECUTOR: Leader{1:2:4} Tx{5, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutor_ResourceProfile::TTxRequestMemory} took 1024b of static mem, Memory{1024 dyn 0} 00000.004 DD| TABLET_EXECUTOR: Leader{1:2:4} Tx{5, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutor_ResourceProfile::TTxRequestMemory} hope 1 -> retry Change{2, redo 0b alter 0b annex 0, ~{ } -{ }, 0 gb} 00000.004 DD| TABLET_EXECUTOR: Leader{1:2:4} Tx{5, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutor_ResourceProfile::TTxRequestMemory} touch new 0b, 0b lo load (0b in total), 209714177b requested for data (209715201b in total) 00000.004 DD| TABLET_EXECUTOR: Leader{1:2:4} Tx{5, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutor_ResourceProfile::TTxRequestMemory} release 1024b of static, Memory{0 dyn 0} 00000.004 DD| TABLET_EXECUTOR: Leader{1:2:4} Tx{5, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutor_ResourceProfile::TTxRequestMemory} release tx data 00000.004 DD| TABLET_EXECUTOR: Leader{1:2:4} Tx{5, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutor_ResourceProfile::TTxRequestMemory} request Res{2 209715201b} type large_transaction 00000.004 DD| RESOURCE_BROKER: Submitted new unknown task Tx{5, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutor_ResourceProfile::TTxRequestMemory} at tablet 1 (2 by [1:30:2062]) priority=5 resources={0, 209715201} 00000.004 EE| RESOURCE_BROKER: Assigning waiting task 'Tx{5, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutor_ResourceProfile::TTxRequestMemory} at tablet 1 (2 by [1:30:2062])' of unknown type 'large_transaction' to default queue 00000.004 DD| RESOURCE_BROKER: Allocate resources {0, 209715201} for task Tx{5, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutor_ResourceProfile::TTxRequestMemory} at tablet 1 (2 by [1:30:2062]) from queue queue_default 00000.004 EE| RESOURCE_BROKER: Assigning in-fly task 'Tx{5, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutor_ResourceProfile::TTxRequestMemory} at tablet 1 (2 by [1:30:2062])' of unknown type 'large_transaction' to default queue 00000.004 DD| RESOURCE_BROKER: Updated planned resource usage for queue queue_default from 0.000000 to 23.193359 (insert task Tx{5, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutor_ResourceProfile::TTxRequestMemory} at tablet 1 (2 by [1:30:2062])) 00000.004 DD| TABLET_EXECUTOR: Leader{1:2:4} Tx{5, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutor_ResourceProfile::TTxRequestMemory} acquired dyn mem Res{2 209715201b}, Memory{0 dyn 209715201} 00000.004 DD| TABLET_EXECUTOR: Leader{1:2:4} Tx{5, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutor_ResourceProfile::TTxRequestMemory} hope 2 -> done Change{2, redo 0b alter 0b annex 0, ~{ } -{ }, 0 gb} 00000.004 DD| TABLET_EXECUTOR: Leader{1:2:4} Tx{5, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutor_ResourceProfile::TTxRequestMemory} release Res{2 209715201b}, Memory{0 dyn 0} 00000.004 DD| RESOURCE_BROKER: Finish task Tx{5, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutor_ResourceProfile::TTxRequestMemory} at tablet 1 (2 by [1:30:2062]) (release resources {0, 209715201}) 00000.004 DD| RESOURCE_BROKER: Updated planned resource usage for queue queue_default from 23.193359 to 0.000000 (remove task Tx{5, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutor_ResourceProfile::TTxRequestMemory} at tablet 1 (2 by [1:30:2062])) 00000.004 DD| TABLET_EXECUTOR: Leader{1:2:4} Tx{6, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutor_ResourceProfile::TTxRequestMemory} queued, type NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutor_ResourceProfile::TTxRequestMemory 00000.004 DD| TABLET_EXECUTOR: Leader{1:2:4} Tx{6, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutor_ResourceProfile::TTxRequestMemory} took 1024b of static mem, Memory{1024 dyn 0} 00000.004 DD| TABLET_EXE ... 76:97:0], [1:2:54:1:24576:97:0], [1:2:55:1:24576:97:0], [1:2:56:1:24576:97:0], [1:2:57:1:24576:97:0], [1:2:58:1:24576:97:0], [1:2:59:1:24576:97:0], [1:2:60:1:24576:97:0], [1:2:61:1:24576:97:0], [1:2:62:1:24576:97:0], [1:2:63:1:24576:97:0], [1:2:64:1:24576:97:0], [1:2:65:1:24576:97:0], [1:2:66:1:24576:97:0], [1:2:67:1:24576:97:0], [1:2:68:1:24576:97:0], [1:2:69:1:24576:97:0], [1:2:70:1:24576:97:0], [1:2:71:1:24576:97:0], [1:2:72:1:24576:97:0], [1:2:73:1:24576:101:0], [1:2:74:1:24576:102:0], [1:2:75:1:24576:101:0], [1:2:76:1:24576:102:0], [1:2:77:1:24576:104:0], [1:2:78:1:24576:104:0], [1:2:79:1:24576:104:0], [1:2:80:1:24576:104:0], [1:2:81:1:24576:103:0], [1:2:82:1:24576:101:0], [1:2:83:1:24576:104:0], [1:2:84:1:24576:104:0], [1:2:85:1:24576:104:0], [1:2:86:1:24576:104:0], [1:2:87:1:24576:104:0], [1:2:88:1:24576:104:0], [1:2:89:1:24576:104:0], [1:2:90:1:24576:101:0], [1:2:91:1:24576:104:0], [1:2:92:1:24576:104:0], [1:2:93:1:24576:98:0], [1:2:94:1:24576:104:0], [1:2:95:1:24576:104:0], [1:2:96:1:24576:104:0], [1:2:97:1:24576:104:0], [1:2:98:1:24576:104:0], [1:2:99:1:24576:104:0], [1:2:100:1:24576:104:0], [1:2:101:1:24576:97:0], [1:2:102:1:24576:100:0], [1:2:103:1:24576:104:0], [1:2:104:1:24576:104:0], [1:2:105:1:24576:104:0], [1:2:106:1:24576:104:0], [1:2:107:1:24576:104:0], [1:2:108:1:24576:104:0], [1:2:109:1:24576:104:0], [1:2:110:1:24576:104:0], [1:2:111:1:24576:104:0], [1:2:112:1:24576:104:0], [1:2:113:1:24576:104:0], [1:2:114:1:24576:104:0], [1:2:115:1:24576:104:0], [1:2:116:1:24576:104:0], [1:2:117:1:24576:104:0], [1:2:118:1:24576:104:0], [1:2:119:1:24576:104:0], [1:2:120:1:24576:104:0], [1:2:121:1:24576:104:0], [1:2:122:1:24576:104:0], [1:2:123:1:24576:104:0], [1:2:124:1:24576:104:0], [1:2:125:1:24576:104:0], [1:2:126:1:24576:104:0], [1:2:127:1:24576:104:0], [1:2:128:1:24576:104:0], [1:2:129:1:24576:104:0], [1:2:130:1:24576:104:0], [1:2:131:1:24576:104:0], [1:2:132:1:24576:104:0], [1:2:133:1:24576:104:0], [1:2:134:1:24576:104:0], [1:2:135:1:24576:104:0], [1:2:136:1:24576:104:0], [1:2:137:1:24576:104:0], [1:2:138:1:24576:104:0], [1:2:139:1:24576:104:0], [1:2:140:1:24576:104:0], [1:2:141:1:24576:104:0], [1:2:142:1:24576:104:0], [1:2:145:1:24576:60:0], [1:2:146:1:24576:60:0] } 00000.010 DD| TABLET_SAUSAGECACHE: Add page collection [1:2:143:1:12288:758:0] 00000.010 DD| TABLET_SAUSAGECACHE: Add page collection [1:2:143:1:12288:758:0] owner [20:212:2237] 00000.010 TT| TABLET_SAUSAGECACHE: Request page collection [1:2:143:1:12288:758:0] owner [20:212:2237] cookie 4 class Online from cache [ ] already requested [ ] to request [ 22 23 24 25 ] 00000.010 TT| TABLET_SAUSAGECACHE: Receive page collection [1:2:143:1:12288:758:0] status OK pages [ 22 23 24 25 ] 00000.010 TT| TABLET_SAUSAGECACHE: Send page collection result [1:2:143:1:12288:758:0] owner [20:212:2237] class Online pages [ 22 23 24 25 ] cookie 4 00000.010 II| TABLET_EXECUTOR: Leader{1:3:0} activating executor 00000.010 II| TABLET_EXECUTOR: LSnap{1:3, on 3:1, 1880b, wait} done, Waste{2:0, 141856b +(140, 14018b), 146 trc} 00000.010 DD| TABLET_SAUSAGECACHE: Attach page collection [1:2:143:1:12288:758:0] owner [20:212:2237] 00000.010 TT| TABLET_SAUSAGECACHE: Request page collection [1:2:143:1:12288:758:0] owner [20:212:2237] cookie 2 class AsyncLoad from cache [ 22 23 24 25 ] already requested [ ] to request [ 0 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 ] 00000.010 TT| TABLET_SAUSAGECACHE: Request page collection [1:2:143:1:12288:758:0] async queue pages [ 0 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 ] 00000.010 TT| TABLET_SAUSAGECACHE: Receive page collection [1:2:143:1:12288:758:0] status OK pages [ 0 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 ] 00000.010 TT| TABLET_SAUSAGECACHE: Send page collection result [1:2:143:1:12288:758:0] owner [20:212:2237] class AsyncLoad pages [ 0 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 ] cookie 2 00000.010 TT| TABLET_SAUSAGECACHE: Touch page collection [1:2:143:1:12288:758:0] owner [20:212:2237] pages [ 22 23 24 25 ] 00000.010 DD| TABLET_EXECUTOR: Leader{1:3:2} got result TEvResult{26 pages [1:2:143:1:12288:758:0] ok OK}, category 2 00000.010 TT| TABLET_SAUSAGECACHE: Touch page collection [1:2:143:1:12288:758:0] owner [20:212:2237] pages [ 0 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 ] 00000.010 DD| TABLET_EXECUTOR: Leader{1:3:2} commited cookie 2 for step 1 00000.010 DD| TABLET_EXECUTOR: Leader{1:3:2} Tx{1, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutor_StickyPages::TTxFullScan} queued, type NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutor_StickyPages::TTxFullScan 00000.010 DD| TABLET_EXECUTOR: Leader{1:3:2} Tx{1, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutor_StickyPages::TTxFullScan} took 4194304b of static mem, Memory{4194304 dyn 0} 00000.010 DD| TABLET_EXECUTOR: Leader{1:3:2} Tx{1, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutor_StickyPages::TTxFullScan} hope 1 -> done Change{145, redo 0b alter 0b annex 0, ~{ } -{ }, 0 gb} 00000.010 DD| TABLET_EXECUTOR: Leader{1:3:2} Tx{1, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutor_StickyPages::TTxFullScan} release 4194304b of static, Memory{0 dyn 0} 00000.010 II| FAKE_ENV: Model starts hard shutdown on level 7 of 8, left 3 actors 00000.010 II| TABLET_EXECUTOR: Leader{1:3:2} suiciding, Waste{2:0, 141856b +(0, 0b), 1 trc, -14018b acc} 00000.010 DD| TABLET_SAUSAGECACHE: Unregister owner [20:212:2237] 00000.010 DD| TABLET_SAUSAGECACHE: Remove page collection [1:2:143:1:12288:758:0] owner [20:212:2237] 00000.010 DD| TABLET_SAUSAGECACHE: Remove owner [20:212:2237] 00000.010 DD| TABLET_SAUSAGECACHE: Drop expired page collection [1:2:143:1:12288:758:0] 00000.010 NN| TABLET_SAUSAGECACHE: Poison cache serviced 3 reqs hit {6 1077b} miss {50 281387b} 00000.010 II| FAKE_ENV: Shut order, stopping 4 BS groups 00000.010 II| FAKE_ENV: DS.0 gone, left {42b, 1}, put {14354b, 149} 00000.010 II| FAKE_ENV: DS.1 gone, left {143736b, 8}, put {157893b, 150} 00000.010 II| FAKE_ENV: DS.2 gone, left {0b, 0}, put {0b, 0} 00000.010 II| FAKE_ENV: DS.3 gone, left {0b, 0}, put {0b, 0} 00000.010 II| FAKE_ENV: All BS storage groups are stopped 00000.010 II| FAKE_ENV: Model stopped, hosted 4 actors, spent 0.000s 00000.010 II| FAKE_ENV: Logged {Emerg 0 Alert 0 Crit 0 Error 0 Left 795}, stopped 00000.000 II| FAKE_ENV: Born at 2025-05-29T15:28:08.855742Z 00000.001 NN| TABLET_SAUSAGECACHE: Bootstrap with config MemoryLimit: 8388608 ScanQueueInFlyLimit: 262144 AsyncQueueInFlyLimit: 262144 00000.001 II| FAKE_ENV: Starting storage for BS group 0 00000.001 II| FAKE_ENV: Starting storage for BS group 1 00000.001 II| FAKE_ENV: Starting storage for BS group 2 00000.001 II| FAKE_ENV: Starting storage for BS group 3 00000.003 II| FAKE_ENV: Model starts hard shutdown on level 7 of 8, left 3 actors 00000.003 NN| TABLET_SAUSAGECACHE: Poison cache serviced 3 reqs hit {3 512b} miss {0 0b} 00000.003 II| FAKE_ENV: Shut order, stopping 4 BS groups 00000.003 II| FAKE_ENV: DS.0 gone, left {1356b, 12}, put {1376b, 13} 00000.003 II| FAKE_ENV: DS.1 gone, left {6814b, 23}, put {6814b, 23} 00000.003 II| FAKE_ENV: DS.2 gone, left {0b, 0}, put {0b, 0} 00000.003 II| FAKE_ENV: DS.3 gone, left {0b, 0}, put {0b, 0} 00000.003 II| FAKE_ENV: All BS storage groups are stopped 00000.003 II| FAKE_ENV: Model stopped, hosted 3 actors, spent 0.000s 00000.003 II| FAKE_ENV: Logged {Emerg 0 Alert 0 Crit 0 Error 0 Left 15}, stopped 00000.000 II| FAKE_ENV: Born at 2025-05-29T15:28:08.859979Z 00000.001 NN| TABLET_SAUSAGECACHE: Bootstrap with config MemoryLimit: 8388608 ScanQueueInFlyLimit: 262144 AsyncQueueInFlyLimit: 262144 00000.001 II| FAKE_ENV: Starting storage for BS group 0 00000.001 II| FAKE_ENV: Starting storage for BS group 1 00000.001 II| FAKE_ENV: Starting storage for BS group 2 00000.001 II| FAKE_ENV: Starting storage for BS group 3 00000.025 II| FAKE_ENV: Model starts hard shutdown on level 7 of 8, left 2 actors 00000.025 NN| TABLET_SAUSAGECACHE: Poison cache serviced 10 reqs hit {860 5551893b} miss {0 0b} 00000.026 II| FAKE_ENV: Shut order, stopping 4 BS groups 00000.026 II| FAKE_ENV: DS.0 gone, left {1201b, 13}, put {1221b, 14} 00000.026 II| FAKE_ENV: DS.1 gone, left {6751256b, 17}, put {6751256b, 17} 00000.026 II| FAKE_ENV: DS.2 gone, left {0b, 0}, put {0b, 0} 00000.026 II| FAKE_ENV: DS.3 gone, left {0b, 0}, put {0b, 0} 00000.026 II| FAKE_ENV: All BS storage groups are stopped 00000.026 II| FAKE_ENV: Model stopped, hosted 3 actors, spent 0.000s 00000.026 II| FAKE_ENV: Logged {Emerg 0 Alert 0 Crit 0 Error 0 Left 15}, stopped 00000.000 II| FAKE_ENV: Born at 2025-05-29T15:28:08.887541Z 00000.001 NN| TABLET_SAUSAGECACHE: Bootstrap with config MemoryLimit: 8388608 ScanQueueInFlyLimit: 262144 AsyncQueueInFlyLimit: 262144 00000.001 II| FAKE_ENV: Starting storage for BS group 0 00000.001 II| FAKE_ENV: Starting storage for BS group 1 00000.001 II| FAKE_ENV: Starting storage for BS group 2 00000.001 II| FAKE_ENV: Starting storage for BS group 3 00000.840 II| FAKE_ENV: Model starts hard shutdown on level 7 of 8, left 2 actors 00000.840 NN| TABLET_SAUSAGECACHE: Poison cache serviced 4109 reqs hit {2091 2366986b} miss {6144 6340608b} 00000.843 II| FAKE_ENV: Shut order, stopping 4 BS groups 00000.843 II| FAKE_ENV: DS.0 gone, left {1761b, 14}, put {1781b, 15} 00000.843 II| FAKE_ENV: DS.1 gone, left {6927727b, 27}, put {6927727b, 27} 00000.843 II| FAKE_ENV: DS.2 gone, left {0b, 0}, put {0b, 0} 00000.843 II| FAKE_ENV: DS.3 gone, left {0b, 0}, put {0b, 0} 00000.843 II| FAKE_ENV: All BS storage groups are stopped 00000.843 II| FAKE_ENV: Model stopped, hosted 3 actors, spent 0.000s 00000.843 II| FAKE_ENV: Logged {Emerg 0 Alert 0 Crit 0 Error 0 Left 15}, stopped 00000.000 II| FAKE_ENV: Born at 2025-05-29T15:28:09.735281Z 00000.002 NN| TABLET_SAUSAGECACHE: Bootstrap with config MemoryLimit: 8388608 ScanQueueInFlyLimit: 262144 AsyncQueueInFlyLimit: 262144 00000.002 II| FAKE_ENV: Starting storage for BS group 0 00000.002 II| FAKE_ENV: Starting storage for BS group 1 00000.002 II| FAKE_ENV: Starting storage for BS group 2 00000.002 II| FAKE_ENV: Starting storage for BS group 3 00000.890 II| FAKE_ENV: Model starts hard shutdown on level 7 of 8, left 2 actors 00000.890 NN| TABLET_SAUSAGECACHE: Poison cache serviced 4106 reqs hit {43 253450b} miss {4096 4227072b} 00000.891 II| FAKE_ENV: Shut order, stopping 4 BS groups 00000.891 II| FAKE_ENV: DS.0 gone, left {44744b, 2}, put {164747b, 16} 00000.891 II| FAKE_ENV: DS.1 gone, left {2764621b, 2068}, put {2764621b, 2068} 00000.892 II| FAKE_ENV: DS.2 gone, left {0b, 0}, put {0b, 0} 00000.892 II| FAKE_ENV: DS.3 gone, left {0b, 0}, put {0b, 0} 00000.892 II| FAKE_ENV: All BS storage groups are stopped 00000.892 II| FAKE_ENV: Model stopped, hosted 3 actors, spent 0.000s 00000.892 II| FAKE_ENV: Logged {Emerg 0 Alert 0 Crit 0 Error 0 Left 15}, stopped 00000.000 II| FAKE_ENV: Born at 2025-05-29T15:28:10.632326Z 00000.002 II| FAKE_ENV: Starting storage for BS group 0 00000.002 II| FAKE_ENV: Starting storage for BS group 1 00000.002 II| FAKE_ENV: Starting storage for BS group 2 00000.002 II| FAKE_ENV: Starting storage for BS group 3 00000.000 II| FAKE_ENV: Born at 2025-05-29T15:28:10.647722Z 00000.001 II| FAKE_ENV: Starting storage for BS group 0 00000.002 II| FAKE_ENV: Starting storage for BS group 1 00000.002 II| FAKE_ENV: Starting storage for BS group 2 00000.002 II| FAKE_ENV: Starting storage for BS group 3 00000.000 II| FAKE_ENV: Born at 2025-05-29T15:28:10.663977Z 00000.001 II| FAKE_ENV: Starting storage for BS group 0 00000.002 II| FAKE_ENV: Starting storage for BS group 1 00000.002 II| FAKE_ENV: Starting storage for BS group 2 00000.002 II| FAKE_ENV: Starting storage for BS group 3 00000.000 II| FAKE_ENV: Born at 2025-05-29T15:28:10.679527Z 00000.001 II| FAKE_ENV: Starting storage for BS group 0 00000.001 II| FAKE_ENV: Starting storage for BS group 1 00000.001 II| FAKE_ENV: Starting storage for BS group 2 00000.001 II| FAKE_ENV: Starting storage for BS group 3 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_index_build/unittest >> IndexBuildTest::CancelBuild [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:116:2058] recipient: [1:111:2141] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:116:2058] recipient: [1:111:2141] Leader for TabletID 72057594046678944 is [1:128:2152] sender: [1:132:2058] recipient: [1:111:2141] 2025-05-29T15:28:04.210260Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7488: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-05-29T15:28:04.210284Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7516: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:28:04.210291Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7402: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-05-29T15:28:04.210297Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7418: OperationsProcessing config: using default configuration 2025-05-29T15:28:04.210311Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-05-29T15:28:04.210316Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-05-29T15:28:04.210327Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7548: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:28:04.210343Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:39: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-05-29T15:28:04.210469Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7619: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-05-29T15:28:04.210535Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-05-29T15:28:04.224027Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7309: Cannot subscribe to console configs 2025-05-29T15:28:04.224044Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:28:04.226357Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-05-29T15:28:04.226468Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-05-29T15:28:04.226522Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-05-29T15:28:04.228067Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-05-29T15:28:04.228247Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:445: Clear TempDirsState with owners number: 0 2025-05-29T15:28:04.228376Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1348: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-05-29T15:28:04.228427Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:32: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-05-29T15:28:04.228883Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:157: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:28:04.228935Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:84: [RootDataErasureManager] Stop 2025-05-29T15:28:04.229232Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:28:04.229244Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:28:04.229269Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-05-29T15:28:04.229278Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-29T15:28:04.229286Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:202: TTxServerlessStorageBilling.Complete 2025-05-29T15:28:04.229323Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6671: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-05-29T15:28:04.230668Z node 1 :HIVE INFO: tablet_helpers.cpp:1130: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:128:2152] sender: [1:242:2058] recipient: [1:15:2062] 2025-05-29T15:28:04.245251Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:372: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-05-29T15:28:04.245327Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:28:04.245388Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-05-29T15:28:04.245429Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:130: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-05-29T15:28:04.245437Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:28:04.246025Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:451: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-05-29T15:28:04.246047Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-05-29T15:28:04.246104Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:28:04.246115Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:313: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-05-29T15:28:04.246121Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:367: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-05-29T15:28:04.246127Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 2 -> 3 2025-05-29T15:28:04.246447Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:28:04.246457Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-05-29T15:28:04.246460Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 3 -> 128 2025-05-29T15:28:04.246713Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:28:04.246722Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:28:04.246726Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:28:04.246731Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1650: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-05-29T15:28:04.247262Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1719: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-05-29T15:28:04.247635Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:652: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-05-29T15:28:04.247669Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1751: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-05-29T15:28:04.247818Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:676: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-05-29T15:28:04.247838Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:680: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 124 RawX2: 4294969445 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-05-29T15:28:04.247844Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:28:04.247887Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 128 -> 240 2025-05-29T15:28:04.247892Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:28:04.247921Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-05-29T15:28:04.247930Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:402: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-05-29T15:28:04.248259Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:28:04.248265Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-29T15:28:04.248306Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29 ... LAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:906: Part operation is done id#281474976710760:0 progress is 1/1 2025-05-29T15:28:12.483782Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 281474976710760 ready parts: 1/1 2025-05-29T15:28:12.483788Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:906: Part operation is done id#281474976710760:0 progress is 1/1 2025-05-29T15:28:12.483795Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 281474976710760 ready parts: 1/1 2025-05-29T15:28:12.483800Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1606: TOperation IsReadyToNotify, TxId: 281474976710760, ready parts: 1/1, is published: true 2025-05-29T15:28:12.483812Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1629: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [2:136:2157] message: TxId: 281474976710760 2025-05-29T15:28:12.483820Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 281474976710760 ready parts: 1/1 2025-05-29T15:28:12.483825Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:973: Operation and all the parts is done, operation id: 281474976710760:0 2025-05-29T15:28:12.483829Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5174: RemoveTx for txid 281474976710760:0 2025-05-29T15:28:12.483843Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 13 FAKE_COORDINATOR: Erasing txId 281474976710760 2025-05-29T15:28:12.484266Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6746: Handle: TEvNotifyTxCompletionResult: txId# 281474976710760 2025-05-29T15:28:12.484282Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6748: Message: TxId: 281474976710760 2025-05-29T15:28:12.484295Z node 2 :BUILD_INDEX INFO: schemeshard_build_index__progress.cpp:2338: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxReply : TEvNotifyTxCompletionResult, txId# 281474976710760, buildInfoId: 102 2025-05-29T15:28:12.484309Z node 2 :BUILD_INDEX DEBUG: schemeshard_build_index__progress.cpp:2341: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxReply : TEvNotifyTxCompletionResult, txId# 281474976710760, buildInfo: TBuildInfo{ IndexBuildId: 102, Uid: , DomainPathId: [OwnerId: 72057594046678944, LocalPathId: 1], TablePathId: [OwnerId: 72057594046678944, LocalPathId: 2], IndexType: EIndexTypeGlobal, IndexName: index1, IndexColumn: index, State: Cancellation_Unlocking, IsCancellationRequested: 1, Issue: , SubscribersCount: 1, CreateSender: [2:1172:3023], AlterMainTableTxId: 0, AlterMainTableTxStatus: StatusSuccess, AlterMainTableTxDone: 0, LockTxId: 281474976710757, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976710758, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 0, ApplyTxId: 281474976710759, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, UnlockTxId: 281474976710760, UnlockTxStatus: StatusAccepted, UnlockTxDone: 0, ToUploadShards: 0, DoneShards: 0, Processed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }, Billed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }} 2025-05-29T15:28:12.484735Z node 2 :BUILD_INDEX NOTICE: schemeshard_build_index__progress.cpp:1121: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Execute: 102 Cancellation_Unlocking 2025-05-29T15:28:12.484754Z node 2 :BUILD_INDEX DEBUG: schemeshard_build_index__progress.cpp:1122: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Execute: 102 Cancellation_Unlocking TBuildInfo{ IndexBuildId: 102, Uid: , DomainPathId: [OwnerId: 72057594046678944, LocalPathId: 1], TablePathId: [OwnerId: 72057594046678944, LocalPathId: 2], IndexType: EIndexTypeGlobal, IndexName: index1, IndexColumn: index, State: Cancellation_Unlocking, IsCancellationRequested: 1, Issue: , SubscribersCount: 1, CreateSender: [2:1172:3023], AlterMainTableTxId: 0, AlterMainTableTxStatus: StatusSuccess, AlterMainTableTxDone: 0, LockTxId: 281474976710757, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976710758, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 0, ApplyTxId: 281474976710759, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, UnlockTxId: 281474976710760, UnlockTxStatus: StatusAccepted, UnlockTxDone: 1, ToUploadShards: 0, DoneShards: 0, Processed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }, Billed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }} 2025-05-29T15:28:12.484766Z node 2 :BUILD_INDEX INFO: schemeshard_build_index_tx_base.cpp:25: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: Change state from Cancellation_Unlocking to Cancelled 2025-05-29T15:28:12.485125Z node 2 :BUILD_INDEX NOTICE: schemeshard_build_index__progress.cpp:1121: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Execute: 102 Cancelled 2025-05-29T15:28:12.485142Z node 2 :BUILD_INDEX DEBUG: schemeshard_build_index__progress.cpp:1122: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Execute: 102 Cancelled TBuildInfo{ IndexBuildId: 102, Uid: , DomainPathId: [OwnerId: 72057594046678944, LocalPathId: 1], TablePathId: [OwnerId: 72057594046678944, LocalPathId: 2], IndexType: EIndexTypeGlobal, IndexName: index1, IndexColumn: index, State: Cancelled, IsCancellationRequested: 1, Issue: , SubscribersCount: 1, CreateSender: [2:1172:3023], AlterMainTableTxId: 0, AlterMainTableTxStatus: StatusSuccess, AlterMainTableTxDone: 0, LockTxId: 281474976710757, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976710758, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 0, ApplyTxId: 281474976710759, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, UnlockTxId: 281474976710760, UnlockTxStatus: StatusAccepted, UnlockTxDone: 1, ToUploadShards: 0, DoneShards: 0, Processed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }, Billed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }} 2025-05-29T15:28:12.485147Z node 2 :BUILD_INDEX TRACE: schemeshard_build_index_tx_base.cpp:339: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TIndexBuildInfo SendNotifications: : id# 102, subscribers count# 1 2025-05-29T15:28:12.485172Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:227: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2025-05-29T15:28:12.485179Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:236: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [2:1268:3108] TestWaitNotification: OK eventTxId 102 2025-05-29T15:28:12.485581Z node 2 :BUILD_INDEX DEBUG: schemeshard_build_index__get.cpp:19: TIndexBuilder::TXTYPE_GET_INDEX_BUILD: DoExecute DatabaseName: "/MyRoot" IndexBuildId: 102 2025-05-29T15:28:12.485654Z node 2 :BUILD_INDEX DEBUG: schemeshard_build_index_tx_base.h:93: TIndexBuilder::TXTYPE_GET_INDEX_BUILD: Reply Status: SUCCESS IndexBuild { Id: 102 State: STATE_CANCELLED Settings { source_path: "/MyRoot/Table" index { name: "index1" index_columns: "index" global_index { } } max_shards_in_flight: 2 ScanSettings { MaxBatchRows: 1 } } Progress: 0 StartTime { } EndTime { } } BUILDINDEX RESPONSE Get: NKikimrIndexBuilder.TEvGetResponse Status: SUCCESS IndexBuild { Id: 102 State: STATE_CANCELLED Settings { source_path: "/MyRoot/Table" index { name: "index1" index_columns: "index" global_index { } } max_shards_in_flight: 2 ScanSettings { MaxBatchRows: 1 } } Progress: 0 StartTime { } EndTime { } } 2025-05-29T15:28:12.485960Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Table" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-05-29T15:28:12.486036Z node 2 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/Table" took 79us result status StatusSuccess 2025-05-29T15:28:12.486145Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Table" PathDescription { Self { Name: "Table" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 6 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 6 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 TableSchemaVersion: 3 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "Table" Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "index" Type: "Uint32" TypeId: 2 Id: 2 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Utf8" TypeId: 4608 Id: 3 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 TableSchemaVersion: 3 IsBackup: false IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 10 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 11 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-05-29T15:28:12.486407Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Table/index1" Options { ReturnPartitioningInfo: true ReturnPartitionConfig: true BackupInfo: false ReturnBoundaries: true ShowPrivateTable: true }, at schemeshard: 72057594046678944 2025-05-29T15:28:12.486437Z node 2 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/Table/index1" took 34us result status StatusPathDoesNotExist 2025-05-29T15:28:12.486469Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/Table/index1\', error: path has been deleted (id: [OwnerId: 72057594046678944, LocalPathId: 3], type: EPathTypeTableIndex, state: EPathStateNotExist), drop stepId: 5000005, drop txId: 281474976710759, source_location: ydb/core/tx/schemeshard/schemeshard_path_describer.cpp:1157" Path: "/MyRoot/Table/index1" PathId: 3 LastExistedPrefixPath: "/MyRoot/Table" LastExistedPrefixPathId: 2 LastExistedPrefixDescription { Self { Name: "Table" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: false } } PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> TInterconnectTest::TestPreSerializedBlobEventUpToMebibytes [GOOD] >> TInterconnectTest::TestPingPongThroughSubChannel >> TKesusTest::TestAcquireTimeoutAfterReboot [GOOD] >> TKesusTest::TestAcquireSemaphoreViaRelease >> TChargeBTreeIndex::NoNodes [GOOD] >> TChargeBTreeIndex::NoNodes_Groups >> IndexBuildTest::Lock >> TKesusTest::TestAcquireSemaphoreViaRelease [GOOD] >> TInterconnectTest::TestPingPongThroughSubChannel [GOOD] >> IndexBuildTest::CheckLimitWithDroppedIndex [GOOD] >> IndexBuildTest::DropIndex >> IndexBuildTest::Lock [GOOD] >> IndexBuildTest::IndexPartitioningIsPersisted >> TPart::State [GOOD] >> TPart::Trivials [GOOD] >> TPart::Basics [GOOD] >> TPart::CellDefaults [GOOD] >> TPart::Matter [GOOD] >> TPart::External [GOOD] >> TPart::Outer [GOOD] >> TPart::MassCheck >> TFlatExecutorLeases::Basics [GOOD] >> TFlatExecutorLeases::BasicsLeaseTimeout >> TPart::MassCheck [GOOD] >> TPart::WreckPart ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kesus/tablet/ut/unittest >> TKesusTest::TestAcquireSemaphoreViaRelease [GOOD] Test command err: 2025-05-29T15:27:27.354823Z node 1 :KESUS_TABLET INFO: tablet_impl.cpp:71: OnActivateExecutor: 72057594037927937 2025-05-29T15:27:27.354859Z node 1 :KESUS_TABLET DEBUG: tx_init_schema.cpp:14: [72057594037927937] TTxInitSchema::Execute 2025-05-29T15:27:27.358092Z node 1 :KESUS_TABLET DEBUG: tx_init_schema.cpp:21: [72057594037927937] TTxInitSchema::Complete 2025-05-29T15:27:27.358120Z node 1 :KESUS_TABLET DEBUG: tx_init.cpp:30: [72057594037927937] TTxInit::Execute 2025-05-29T15:27:27.369249Z node 1 :KESUS_TABLET DEBUG: tx_init.cpp:242: [72057594037927937] TTxInit::Complete 2025-05-29T15:27:27.369371Z node 1 :KESUS_TABLET DEBUG: tx_session_attach.cpp:35: [72057594037927937] TTxSessionAttach::Execute (sender=[1:131:2156], cookie=18011914500912167074, session=0, seqNo=0) 2025-05-29T15:27:27.369403Z node 1 :KESUS_TABLET DEBUG: tx_session_attach.cpp:118: [72057594037927937] Created new session 1 2025-05-29T15:27:27.390337Z node 1 :KESUS_TABLET DEBUG: tx_session_attach.cpp:140: [72057594037927937] TTxSessionAttach::Complete (sender=[1:131:2156], cookie=18011914500912167074, session=1) 2025-05-29T15:27:27.390502Z node 1 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:48: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[1:131:2156], cookie=111, session=1, semaphore="Lock1" count=1) 2025-05-29T15:27:27.390540Z node 1 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:126: [72057594037927937] Created new ephemeral semaphore 1 "Lock1" 2025-05-29T15:27:27.390551Z node 1 :KESUS_TABLET DEBUG: tablet_db.cpp:152: [72057594037927937] Processing semaphore 1 "Lock1" queue: next order #1 session 1 2025-05-29T15:27:27.401329Z node 1 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:263: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[1:131:2156], cookie=111) 2025-05-29T15:27:27.401416Z node 1 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:48: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[1:131:2156], cookie=222, session=1, semaphore="Lock1" count=18446744073709551615) 2025-05-29T15:27:27.412126Z node 1 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:263: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[1:131:2156], cookie=222) 2025-05-29T15:27:27.412253Z node 1 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:29: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[1:147:2170], cookie=1908738076548518257, name="Lock1") 2025-05-29T15:27:27.412277Z node 1 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:134: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[1:147:2170], cookie=1908738076548518257) 2025-05-29T15:27:27.591467Z node 2 :KESUS_TABLET INFO: tablet_impl.cpp:71: OnActivateExecutor: 72057594037927937 2025-05-29T15:27:27.591494Z node 2 :KESUS_TABLET DEBUG: tx_init_schema.cpp:14: [72057594037927937] TTxInitSchema::Execute 2025-05-29T15:27:27.594322Z node 2 :KESUS_TABLET DEBUG: tx_init_schema.cpp:21: [72057594037927937] TTxInitSchema::Complete 2025-05-29T15:27:27.594349Z node 2 :KESUS_TABLET DEBUG: tx_init.cpp:30: [72057594037927937] TTxInit::Execute 2025-05-29T15:27:27.615349Z node 2 :KESUS_TABLET DEBUG: tx_init.cpp:242: [72057594037927937] TTxInit::Complete 2025-05-29T15:27:27.615497Z node 2 :KESUS_TABLET DEBUG: tx_session_attach.cpp:35: [72057594037927937] TTxSessionAttach::Execute (sender=[2:133:2158], cookie=10035978365133948311, session=0, seqNo=0) 2025-05-29T15:27:27.615527Z node 2 :KESUS_TABLET DEBUG: tx_session_attach.cpp:118: [72057594037927937] Created new session 1 2025-05-29T15:27:27.626061Z node 2 :KESUS_TABLET DEBUG: tx_session_attach.cpp:140: [72057594037927937] TTxSessionAttach::Complete (sender=[2:133:2158], cookie=10035978365133948311, session=1) 2025-05-29T15:27:27.626144Z node 2 :KESUS_TABLET DEBUG: tx_session_attach.cpp:35: [72057594037927937] TTxSessionAttach::Execute (sender=[2:134:2159], cookie=5495534803404295501, session=0, seqNo=0) 2025-05-29T15:27:27.626172Z node 2 :KESUS_TABLET DEBUG: tx_session_attach.cpp:118: [72057594037927937] Created new session 2 2025-05-29T15:27:27.636897Z node 2 :KESUS_TABLET DEBUG: tx_session_attach.cpp:140: [72057594037927937] TTxSessionAttach::Complete (sender=[2:134:2159], cookie=5495534803404295501, session=2) 2025-05-29T15:27:27.637061Z node 2 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:48: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[2:133:2158], cookie=111, session=1, semaphore="Lock1" count=18446744073709551615) 2025-05-29T15:27:27.637100Z node 2 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:126: [72057594037927937] Created new ephemeral semaphore 1 "Lock1" 2025-05-29T15:27:27.637115Z node 2 :KESUS_TABLET DEBUG: tablet_db.cpp:152: [72057594037927937] Processing semaphore 1 "Lock1" queue: next order #1 session 1 2025-05-29T15:27:27.647677Z node 2 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:263: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[2:133:2158], cookie=111) 2025-05-29T15:27:27.647737Z node 2 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:48: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[2:133:2158], cookie=112, session=1, semaphore="Lock2" count=1) 2025-05-29T15:27:27.647761Z node 2 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:126: [72057594037927937] Created new ephemeral semaphore 2 "Lock2" 2025-05-29T15:27:27.647771Z node 2 :KESUS_TABLET DEBUG: tablet_db.cpp:152: [72057594037927937] Processing semaphore 2 "Lock2" queue: next order #2 session 1 2025-05-29T15:27:27.658290Z node 2 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:263: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[2:133:2158], cookie=112) 2025-05-29T15:27:27.658362Z node 2 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:48: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[2:134:2159], cookie=222, session=2, semaphore="Lock1" count=1) 2025-05-29T15:27:27.658394Z node 2 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:48: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[2:134:2159], cookie=223, session=2, semaphore="Lock2" count=18446744073709551615) 2025-05-29T15:27:27.668882Z node 2 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:263: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[2:134:2159], cookie=222) 2025-05-29T15:27:27.668903Z node 2 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:263: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[2:134:2159], cookie=223) 2025-05-29T15:27:27.668953Z node 2 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:48: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[2:134:2159], cookie=333, session=2, semaphore="Lock1" count=1) 2025-05-29T15:27:27.669030Z node 2 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:48: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[2:134:2159], cookie=334, session=2, semaphore="Lock2" count=18446744073709551615) 2025-05-29T15:27:27.679621Z node 2 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:263: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[2:134:2159], cookie=333) 2025-05-29T15:27:27.679647Z node 2 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:263: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[2:134:2159], cookie=334) 2025-05-29T15:27:28.105328Z node 2 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-05-29T15:27:28.115916Z node 2 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-05-29T15:27:28.470545Z node 2 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-05-29T15:27:28.481238Z node 2 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-05-29T15:27:28.825614Z node 2 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-05-29T15:27:28.836486Z node 2 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-05-29T15:27:29.181230Z node 2 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-05-29T15:27:29.192068Z node 2 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-05-29T15:27:29.546734Z node 2 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-05-29T15:27:29.557606Z node 2 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-05-29T15:27:29.891810Z node 2 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-05-29T15:27:29.902508Z node 2 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-05-29T15:27:30.237075Z node 2 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-05-29T15:27:30.247815Z node 2 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-05-29T15:27:30.592567Z node 2 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-05-29T15:27:30.603267Z node 2 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-05-29T15:27:30.958394Z node 2 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-05-29T15:27:30.969246Z node 2 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-05-29T15:27:31.364840Z node 2 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-05-29T15:27:31.375631Z node 2 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-05-29T15:27:31.740496Z node 2 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-05-29T15:27:31.751364Z node 2 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-05-29T15:27:32.116272Z node 2 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-05-29T15:27:32.127026Z node 2 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-05-29T15:27:32.491950Z node 2 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-05-29T15:27:32.502654Z node 2 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-05-29T15:27:32.867454Z node 2 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-05-29T15:27:32.878301Z node 2 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-05-29T15:27:33.253909Z node 2 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-05-29T15:27:33.264711Z node 2 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-05-29T15:27:33.619377Z node 2 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-05-29T15:27:33.630372Z node 2 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-05-29T15:27:33.985886Z node 2 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-05-29T15:27:33.996665Z node 2 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-05-29T15:27:34.351583Z node 2 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-05-29T15:27:34.362357Z node 2 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-05-29T15:27:34.717576Z node 2 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-05-29T15:27:34.728289Z node 2 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-05-29T15:27:35.115656Z node 2 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-05-29T15:27:35.127303Z node 2 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-05-29T15:27:35.483854Z node 2 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [720575940379 ... 5-05-29T15:28:07.010456Z node 4 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-05-29T15:28:07.396856Z node 4 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-05-29T15:28:07.407650Z node 4 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-05-29T15:28:07.763363Z node 4 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-05-29T15:28:07.774245Z node 4 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-05-29T15:28:08.130223Z node 4 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-05-29T15:28:08.141303Z node 4 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-05-29T15:28:08.497981Z node 4 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-05-29T15:28:08.508914Z node 4 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-05-29T15:28:08.864117Z node 4 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-05-29T15:28:08.874871Z node 4 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-05-29T15:28:09.250879Z node 4 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-05-29T15:28:09.261650Z node 4 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-05-29T15:28:09.606919Z node 4 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-05-29T15:28:09.617651Z node 4 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-05-29T15:28:09.976012Z node 4 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-05-29T15:28:09.986935Z node 4 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-05-29T15:28:10.322352Z node 4 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-05-29T15:28:10.333200Z node 4 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-05-29T15:28:10.678787Z node 4 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-05-29T15:28:10.689553Z node 4 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-05-29T15:28:11.044890Z node 4 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-05-29T15:28:11.055766Z node 4 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-05-29T15:28:11.401172Z node 4 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-05-29T15:28:11.412318Z node 4 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-05-29T15:28:11.747590Z node 4 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-05-29T15:28:11.758394Z node 4 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-05-29T15:28:12.093603Z node 4 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-05-29T15:28:12.104474Z node 4 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-05-29T15:28:12.449766Z node 4 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-05-29T15:28:12.460587Z node 4 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-05-29T15:28:12.907912Z node 4 :KESUS_TABLET DEBUG: tx_semaphore_timeout.cpp:30: [72057594037927937] TTxSemaphoreTimeout::Execute (session=2, semaphore=1) 2025-05-29T15:28:12.907941Z node 4 :KESUS_TABLET DEBUG: tablet_db.cpp:124: [72057594037927937] Deleting session 2 / semaphore 1 "Lock1" waiter link 2025-05-29T15:28:12.918793Z node 4 :KESUS_TABLET DEBUG: tx_semaphore_timeout.cpp:71: [72057594037927937] TTxSemaphoreTimeout::Complete (session=2, semaphore=1) 2025-05-29T15:28:12.939198Z node 4 :KESUS_TABLET DEBUG: tx_sessions_describe.cpp:23: [72057594037927937] TTxSessionsDescribe::Execute (sender=[4:596:2533], cookie=1988937205242542144) 2025-05-29T15:28:12.939235Z node 4 :KESUS_TABLET DEBUG: tx_sessions_describe.cpp:48: [72057594037927937] TTxSessionsDescribe::Complete (sender=[4:596:2533], cookie=1988937205242542144) 2025-05-29T15:28:12.939304Z node 4 :KESUS_TABLET DEBUG: tx_sessions_describe.cpp:23: [72057594037927937] TTxSessionsDescribe::Execute (sender=[4:599:2536], cookie=5257211092468746733) 2025-05-29T15:28:12.939309Z node 4 :KESUS_TABLET DEBUG: tx_sessions_describe.cpp:48: [72057594037927937] TTxSessionsDescribe::Complete (sender=[4:599:2536], cookie=5257211092468746733) 2025-05-29T15:28:12.939355Z node 4 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:29: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[4:602:2539], cookie=1536947267769371938, name="Lock1") 2025-05-29T15:28:12.939363Z node 4 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:134: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[4:602:2539], cookie=1536947267769371938) 2025-05-29T15:28:12.939429Z node 4 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:29: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[4:605:2542], cookie=14636849653550498524, name="Lock1") 2025-05-29T15:28:12.939447Z node 4 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:134: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[4:605:2542], cookie=14636849653550498524) 2025-05-29T15:28:13.129171Z node 5 :KESUS_TABLET INFO: tablet_impl.cpp:71: OnActivateExecutor: 72057594037927937 2025-05-29T15:28:13.129200Z node 5 :KESUS_TABLET DEBUG: tx_init_schema.cpp:14: [72057594037927937] TTxInitSchema::Execute 2025-05-29T15:28:13.131851Z node 5 :KESUS_TABLET DEBUG: tx_init_schema.cpp:21: [72057594037927937] TTxInitSchema::Complete 2025-05-29T15:28:13.131875Z node 5 :KESUS_TABLET DEBUG: tx_init.cpp:30: [72057594037927937] TTxInit::Execute 2025-05-29T15:28:13.153177Z node 5 :KESUS_TABLET DEBUG: tx_init.cpp:242: [72057594037927937] TTxInit::Complete 2025-05-29T15:28:13.153285Z node 5 :KESUS_TABLET DEBUG: tx_session_attach.cpp:35: [72057594037927937] TTxSessionAttach::Execute (sender=[5:131:2156], cookie=5014190114628707057, session=0, seqNo=0) 2025-05-29T15:28:13.153313Z node 5 :KESUS_TABLET DEBUG: tx_session_attach.cpp:118: [72057594037927937] Created new session 1 2025-05-29T15:28:13.164032Z node 5 :KESUS_TABLET DEBUG: tx_session_attach.cpp:140: [72057594037927937] TTxSessionAttach::Complete (sender=[5:131:2156], cookie=5014190114628707057, session=1) 2025-05-29T15:28:13.164106Z node 5 :KESUS_TABLET DEBUG: tx_session_attach.cpp:35: [72057594037927937] TTxSessionAttach::Execute (sender=[5:131:2156], cookie=12020307923017705371, session=0, seqNo=0) 2025-05-29T15:28:13.164133Z node 5 :KESUS_TABLET DEBUG: tx_session_attach.cpp:118: [72057594037927937] Created new session 2 2025-05-29T15:28:13.174811Z node 5 :KESUS_TABLET DEBUG: tx_session_attach.cpp:140: [72057594037927937] TTxSessionAttach::Complete (sender=[5:131:2156], cookie=12020307923017705371, session=2) 2025-05-29T15:28:13.174903Z node 5 :KESUS_TABLET DEBUG: tx_session_attach.cpp:35: [72057594037927937] TTxSessionAttach::Execute (sender=[5:131:2156], cookie=4235818955466699846, session=0, seqNo=0) 2025-05-29T15:28:13.174940Z node 5 :KESUS_TABLET DEBUG: tx_session_attach.cpp:118: [72057594037927937] Created new session 3 2025-05-29T15:28:13.185684Z node 5 :KESUS_TABLET DEBUG: tx_session_attach.cpp:140: [72057594037927937] TTxSessionAttach::Complete (sender=[5:131:2156], cookie=4235818955466699846, session=3) 2025-05-29T15:28:13.185803Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_create.cpp:32: [72057594037927937] TTxSemaphoreCreate::Execute (sender=[5:144:2167], cookie=11081380251590457571, name="Sem1", limit=3) 2025-05-29T15:28:13.185835Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_create.cpp:104: [72057594037927937] Created new semaphore 1 "Sem1" 2025-05-29T15:28:13.196492Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_create.cpp:112: [72057594037927937] TTxSemaphoreCreate::Complete (sender=[5:144:2167], cookie=11081380251590457571) 2025-05-29T15:28:13.196560Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:48: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[5:131:2156], cookie=111, session=1, semaphore="Sem1" count=2) 2025-05-29T15:28:13.196590Z node 5 :KESUS_TABLET DEBUG: tablet_db.cpp:152: [72057594037927937] Processing semaphore 1 "Sem1" queue: next order #1 session 1 2025-05-29T15:28:13.196636Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:48: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[5:131:2156], cookie=222, session=2, semaphore="Sem1" count=2) 2025-05-29T15:28:13.196667Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:48: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[5:131:2156], cookie=333, session=3, semaphore="Sem1" count=1) 2025-05-29T15:28:13.207434Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:263: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[5:131:2156], cookie=111) 2025-05-29T15:28:13.207459Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:263: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[5:131:2156], cookie=222) 2025-05-29T15:28:13.207464Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:263: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[5:131:2156], cookie=333) 2025-05-29T15:28:13.207567Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:29: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[5:153:2176], cookie=6394088919824368435, name="Sem1") 2025-05-29T15:28:13.207585Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:134: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[5:153:2176], cookie=6394088919824368435) 2025-05-29T15:28:13.207638Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:29: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[5:156:2179], cookie=477249689865172609, name="Sem1") 2025-05-29T15:28:13.207646Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:134: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[5:156:2179], cookie=477249689865172609) 2025-05-29T15:28:13.207689Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_release.cpp:37: [72057594037927937] TTxSemaphoreRelease::Execute (sender=[5:131:2156], cookie=444, name="Sem1") 2025-05-29T15:28:13.207710Z node 5 :KESUS_TABLET DEBUG: tablet_db.cpp:98: [72057594037927937] Deleting session 1 / semaphore 1 "Sem1" owner link 2025-05-29T15:28:13.207717Z node 5 :KESUS_TABLET DEBUG: tablet_db.cpp:152: [72057594037927937] Processing semaphore 1 "Sem1" queue: next order #2 session 2 2025-05-29T15:28:13.207723Z node 5 :KESUS_TABLET DEBUG: tablet_db.cpp:152: [72057594037927937] Processing semaphore 1 "Sem1" queue: next order #3 session 3 2025-05-29T15:28:13.218314Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_release.cpp:93: [72057594037927937] TTxSemaphoreRelease::Complete (sender=[5:131:2156], cookie=444) 2025-05-29T15:28:13.218424Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:29: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[5:161:2184], cookie=8450628492353410851, name="Sem1") 2025-05-29T15:28:13.218440Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:134: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[5:161:2184], cookie=8450628492353410851) 2025-05-29T15:28:13.218480Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:29: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[5:164:2187], cookie=17498140233577569564, name="Sem1") 2025-05-29T15:28:13.218485Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:134: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[5:164:2187], cookie=17498140233577569564) |70.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/actorlib_impl/ut/unittest >> TInterconnectTest::TestPingPongThroughSubChannel [GOOD] >> TestProtocols::TestHTTPCollectedVerySlow [GOOD] >> TestProtocols::TestHTTPRequest >> TVersions::Wreck2Reverse [GOOD] >> TVersions::Wreck1 >> TPart::WreckPart [GOOD] >> TPart::PageFailEnv >> IndexBuildTest::CancellationNotEnoughRetries >> IndexBuildTest::RejectsCreate >> TestProtocols::TestHTTPRequest [GOOD] >> IndexBuildTest::DropIndex [GOOD] >> TPart::PageFailEnv [GOOD] >> TPart::ForwardEnv [GOOD] >> TPart::WreckPartColumnGroups >> IndexBuildTest::IndexPartitioningIsPersisted [GOOD] >> TSchemeshardBackgroundCleaningTest::SchemeshardBackgroundCleaningTestSimpleCreateClean >> IndexBuildTest::MergeIndexTableShardsOnlyWhenReady >> TKeyValueTest::TestInlineWriteReadDeleteWithRestartsThenResponseOkNewApi [GOOD] >> TPart::WreckPartColumnGroups [GOOD] >> TPart::PageFailEnvColumnGroups ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_index_build/unittest >> IndexBuildTest::DropIndex [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:116:2058] recipient: [1:111:2141] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:116:2058] recipient: [1:111:2141] Leader for TabletID 72057594046678944 is [1:128:2152] sender: [1:132:2058] recipient: [1:111:2141] 2025-05-29T15:28:12.881180Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7488: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-05-29T15:28:12.881200Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7516: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:28:12.881205Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7402: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-05-29T15:28:12.881209Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7418: OperationsProcessing config: using default configuration 2025-05-29T15:28:12.881225Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-05-29T15:28:12.881229Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-05-29T15:28:12.881235Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7548: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:28:12.881246Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:39: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-05-29T15:28:12.881323Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7619: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-05-29T15:28:12.881385Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-05-29T15:28:12.891906Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7309: Cannot subscribe to console configs 2025-05-29T15:28:12.891932Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:28:12.895375Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-05-29T15:28:12.895540Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-05-29T15:28:12.895596Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-05-29T15:28:12.897285Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-05-29T15:28:12.897456Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:445: Clear TempDirsState with owners number: 0 2025-05-29T15:28:12.897581Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1348: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-05-29T15:28:12.897637Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:32: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-05-29T15:28:12.898133Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:157: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:28:12.898188Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:84: [RootDataErasureManager] Stop 2025-05-29T15:28:12.898496Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:28:12.898507Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:28:12.898530Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-05-29T15:28:12.898541Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-29T15:28:12.898548Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:202: TTxServerlessStorageBilling.Complete 2025-05-29T15:28:12.898585Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6671: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-05-29T15:28:12.900146Z node 1 :HIVE INFO: tablet_helpers.cpp:1130: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:128:2152] sender: [1:242:2058] recipient: [1:15:2062] 2025-05-29T15:28:12.922290Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:372: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-05-29T15:28:12.922380Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:28:12.922449Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-05-29T15:28:12.922501Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:130: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-05-29T15:28:12.922511Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:28:12.923294Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:451: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-05-29T15:28:12.923314Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-05-29T15:28:12.923366Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:28:12.923373Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:313: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-05-29T15:28:12.923377Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:367: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-05-29T15:28:12.923381Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 2 -> 3 2025-05-29T15:28:12.923759Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:28:12.923766Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-05-29T15:28:12.923769Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 3 -> 128 2025-05-29T15:28:12.924136Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:28:12.924150Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:28:12.924158Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:28:12.924165Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1650: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-05-29T15:28:12.924712Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1719: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-05-29T15:28:12.925286Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:652: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-05-29T15:28:12.925347Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1751: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-05-29T15:28:12.925532Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:676: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-05-29T15:28:12.925572Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:680: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 124 RawX2: 4294969445 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-05-29T15:28:12.925580Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:28:12.925654Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 128 -> 240 2025-05-29T15:28:12.925661Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:28:12.925702Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-05-29T15:28:12.925713Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:402: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-05-29T15:28:12.926206Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:28:12.926214Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-29T15:28:12.926256Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29 ... 5 PathOwnerId: 72057594046678944, cookie: 105 2025-05-29T15:28:13.882210Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72057594046678944, txId: 105 2025-05-29T15:28:13.882215Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 105, pathId: [OwnerId: 72057594046678944, LocalPathId: 7], version: 18446744073709551615 2025-05-29T15:28:13.882220Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 7] was 5 2025-05-29T15:28:13.882288Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:5834: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 8 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 105 2025-05-29T15:28:13.882297Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 8 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 105 2025-05-29T15:28:13.882301Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72057594046678944, txId: 105 2025-05-29T15:28:13.882304Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 105, pathId: [OwnerId: 72057594046678944, LocalPathId: 8], version: 18446744073709551615 2025-05-29T15:28:13.882308Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 8] was 3 2025-05-29T15:28:13.882517Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:5834: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 15 PathOwnerId: 72057594046678944, cookie: 105 2025-05-29T15:28:13.882531Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 15 PathOwnerId: 72057594046678944, cookie: 105 2025-05-29T15:28:13.882535Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72057594046678944, txId: 105 2025-05-29T15:28:13.882539Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 105, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 15 2025-05-29T15:28:13.882543Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-05-29T15:28:13.882855Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:5834: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 7 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 105 2025-05-29T15:28:13.882875Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 7 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 105 2025-05-29T15:28:13.882880Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72057594046678944, txId: 105 2025-05-29T15:28:13.882983Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:5834: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 8 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 105 2025-05-29T15:28:13.882995Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 8 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 105 2025-05-29T15:28:13.882998Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72057594046678944, txId: 105 2025-05-29T15:28:13.883097Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 105:0, at schemeshard: 72057594046678944 2025-05-29T15:28:13.883105Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_drop_table.cpp:414: TDropTable TProposedDeletePart operationId: 105:0 ProgressState, at schemeshard: 72057594046678944 2025-05-29T15:28:13.883177Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove table for pathId [OwnerId: 72057594046678944, LocalPathId: 7] was 4 2025-05-29T15:28:13.883210Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:906: Part operation is done id#105:0 progress is 2/3 2025-05-29T15:28:13.883215Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 105 ready parts: 2/3 2025-05-29T15:28:13.883219Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:906: Part operation is done id#105:0 progress is 2/3 2025-05-29T15:28:13.883222Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 105 ready parts: 2/3 2025-05-29T15:28:13.883225Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1606: TOperation IsReadyToNotify, TxId: 105, ready parts: 2/3, is published: false 2025-05-29T15:28:13.883432Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:5834: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 9 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 105 2025-05-29T15:28:13.883450Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 9 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 105 2025-05-29T15:28:13.883454Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72057594046678944, txId: 105 2025-05-29T15:28:13.883458Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 105, pathId: [OwnerId: 72057594046678944, LocalPathId: 9], version: 18446744073709551615 2025-05-29T15:28:13.883462Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 9] was 4 2025-05-29T15:28:13.883476Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1606: TOperation IsReadyToNotify, TxId: 105, ready parts: 2/3, is published: true 2025-05-29T15:28:13.883851Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 105:2, at schemeshard: 72057594046678944 2025-05-29T15:28:13.883863Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_drop_table.cpp:414: TDropTable TProposedDeletePart operationId: 105:2 ProgressState, at schemeshard: 72057594046678944 2025-05-29T15:28:13.883913Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove table for pathId [OwnerId: 72057594046678944, LocalPathId: 9] was 3 2025-05-29T15:28:13.883936Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:906: Part operation is done id#105:2 progress is 3/3 2025-05-29T15:28:13.883940Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 105 ready parts: 3/3 2025-05-29T15:28:13.883944Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:906: Part operation is done id#105:2 progress is 3/3 2025-05-29T15:28:13.883947Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 105 ready parts: 3/3 2025-05-29T15:28:13.883954Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1606: TOperation IsReadyToNotify, TxId: 105, ready parts: 3/3, is published: true 2025-05-29T15:28:13.883965Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1629: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [2:417:2373] message: TxId: 105 2025-05-29T15:28:13.883970Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 105 ready parts: 3/3 2025-05-29T15:28:13.883975Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:973: Operation and all the parts is done, operation id: 105:0 2025-05-29T15:28:13.883979Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5174: RemoveTx for txid 105:0 2025-05-29T15:28:13.884000Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 7] was 3 2025-05-29T15:28:13.884005Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:973: Operation and all the parts is done, operation id: 105:1 2025-05-29T15:28:13.884008Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5174: RemoveTx for txid 105:1 2025-05-29T15:28:13.884013Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 8] was 2 2025-05-29T15:28:13.884017Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:973: Operation and all the parts is done, operation id: 105:2 2025-05-29T15:28:13.884019Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5174: RemoveTx for txid 105:2 2025-05-29T15:28:13.884025Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 9] was 2 2025-05-29T15:28:13.884177Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 105 2025-05-29T15:28:13.884367Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 105 2025-05-29T15:28:13.884457Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 105 2025-05-29T15:28:13.884468Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 105 2025-05-29T15:28:13.884473Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 105 2025-05-29T15:28:13.885068Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 105 2025-05-29T15:28:13.885172Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:227: tests -- TTxNotificationSubscriber for txId 105: got EvNotifyTxCompletionResult 2025-05-29T15:28:13.885183Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:236: tests -- TTxNotificationSubscriber for txId 105: satisfy waiter [2:936:2858] TestWaitNotification: OK eventTxId 105 |70.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/actorlib_impl/ut/unittest >> TestProtocols::TestHTTPRequest [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/keyvalue/ut/unittest >> TKeyValueTest::TestInlineWriteReadDeleteWithRestartsThenResponseOkNewApi [GOOD] Test command err: Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:55:2057] recipient: [1:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:55:2057] recipient: [1:51:2095] Leader for TabletID 72057594037927937 is [1:57:2097] sender: [1:58:2057] recipient: [1:51:2095] Leader for TabletID 72057594037927937 is [1:57:2097] sender: [1:75:2057] recipient: [1:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:55:2057] recipient: [2:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:55:2057] recipient: [2:51:2095] Leader for TabletID 72057594037927937 is [2:57:2097] sender: [2:58:2057] recipient: [2:51:2095] Leader for TabletID 72057594037927937 is [2:57:2097] sender: [2:75:2057] recipient: [2:14:2061] !Reboot 72057594037927937 (actor [2:57:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [2:57:2097] sender: [2:77:2057] recipient: [2:36:2083] Leader for TabletID 72057594037927937 is [2:57:2097] sender: [2:80:2057] recipient: [2:14:2061] Leader for TabletID 72057594037927937 is [2:57:2097] sender: [2:81:2057] recipient: [2:79:2110] Leader for TabletID 72057594037927937 is [2:82:2111] sender: [2:83:2057] recipient: [2:79:2110] !Reboot 72057594037927937 (actor [2:57:2097]) rebooted! !Reboot 72057594037927937 (actor [2:57:2097]) tablet resolver refreshed! new actor is[2:82:2111] Leader for TabletID 72057594037927937 is [2:82:2111] sender: [2:168:2057] recipient: [2:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:55:2057] recipient: [3:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:55:2057] recipient: [3:51:2095] Leader for TabletID 72057594037927937 is [3:57:2097] sender: [3:58:2057] recipient: [3:51:2095] Leader for TabletID 72057594037927937 is [3:57:2097] sender: [3:75:2057] recipient: [3:14:2061] !Reboot 72057594037927937 (actor [3:57:2097]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [3:57:2097] sender: [3:77:2057] recipient: [3:36:2083] Leader for TabletID 72057594037927937 is [3:57:2097] sender: [3:80:2057] recipient: [3:79:2110] Leader for TabletID 72057594037927937 is [3:57:2097] sender: [3:81:2057] recipient: [3:14:2061] Leader for TabletID 72057594037927937 is [3:82:2111] sender: [3:83:2057] recipient: [3:79:2110] !Reboot 72057594037927937 (actor [3:57:2097]) rebooted! !Reboot 72057594037927937 (actor [3:57:2097]) tablet resolver refreshed! new actor is[3:82:2111] Leader for TabletID 72057594037927937 is [3:82:2111] sender: [3:168:2057] recipient: [3:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [4:55:2057] recipient: [4:50:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [4:55:2057] recipient: [4:50:2095] Leader for TabletID 72057594037927937 is [4:57:2097] sender: [4:58:2057] recipient: [4:50:2095] Leader for TabletID 72057594037927937 is [4:57:2097] sender: [4:75:2057] recipient: [4:14:2061] !Reboot 72057594037927937 (actor [4:57:2097]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [4:57:2097] sender: [4:78:2057] recipient: [4:36:2083] Leader for TabletID 72057594037927937 is [4:57:2097] sender: [4:81:2057] recipient: [4:80:2110] Leader for TabletID 72057594037927937 is [4:57:2097] sender: [4:82:2057] recipient: [4:14:2061] Leader for TabletID 72057594037927937 is [4:83:2111] sender: [4:84:2057] recipient: [4:80:2110] !Reboot 72057594037927937 (actor [4:57:2097]) rebooted! !Reboot 72057594037927937 (actor [4:57:2097]) tablet resolver refreshed! new actor is[4:83:2111] Leader for TabletID 72057594037927937 is [4:83:2111] sender: [4:169:2057] recipient: [4:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [5:55:2057] recipient: [5:52:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [5:55:2057] recipient: [5:52:2095] Leader for TabletID 72057594037927937 is [5:57:2097] sender: [5:58:2057] recipient: [5:52:2095] Leader for TabletID 72057594037927937 is [5:57:2097] sender: [5:75:2057] recipient: [5:14:2061] !Reboot 72057594037927937 (actor [5:57:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [5:57:2097] sender: [5:81:2057] recipient: [5:36:2083] Leader for TabletID 72057594037927937 is [5:57:2097] sender: [5:84:2057] recipient: [5:14:2061] Leader for TabletID 72057594037927937 is [5:57:2097] sender: [5:85:2057] recipient: [5:83:2113] Leader for TabletID 72057594037927937 is [5:86:2114] sender: [5:87:2057] recipient: [5:83:2113] !Reboot 72057594037927937 (actor [5:57:2097]) rebooted! !Reboot 72057594037927937 (actor [5:57:2097]) tablet resolver refreshed! new actor is[5:86:2114] Leader for TabletID 72057594037927937 is [5:86:2114] sender: [5:172:2057] recipient: [5:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [6:55:2057] recipient: [6:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [6:55:2057] recipient: [6:51:2095] Leader for TabletID 72057594037927937 is [6:57:2097] sender: [6:58:2057] recipient: [6:51:2095] Leader for TabletID 72057594037927937 is [6:57:2097] sender: [6:75:2057] recipient: [6:14:2061] !Reboot 72057594037927937 (actor [6:57:2097]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [6:57:2097] sender: [6:81:2057] recipient: [6:36:2083] Leader for TabletID 72057594037927937 is [6:57:2097] sender: [6:84:2057] recipient: [6:14:2061] Leader for TabletID 72057594037927937 is [6:57:2097] sender: [6:85:2057] recipient: [6:83:2113] Leader for TabletID 72057594037927937 is [6:86:2114] sender: [6:87:2057] recipient: [6:83:2113] !Reboot 72057594037927937 (actor [6:57:2097]) rebooted! !Reboot 72057594037927937 (actor [6:57:2097]) tablet resolver refreshed! new actor is[6:86:2114] Leader for TabletID 72057594037927937 is [6:86:2114] sender: [6:172:2057] recipient: [6:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [7:55:2057] recipient: [7:52:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [7:55:2057] recipient: [7:52:2095] Leader for TabletID 72057594037927937 is [7:57:2097] sender: [7:58:2057] recipient: [7:52:2095] Leader for TabletID 72057594037927937 is [7:57:2097] sender: [7:75:2057] recipient: [7:14:2061] !Reboot 72057594037927937 (actor [7:57:2097]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [7:57:2097] sender: [7:82:2057] recipient: [7:36:2083] Leader for TabletID 72057594037927937 is [7:57:2097] sender: [7:85:2057] recipient: [7:84:2113] Leader for TabletID 72057594037927937 is [7:57:2097] sender: [7:86:2057] recipient: [7:14:2061] Leader for TabletID 72057594037927937 is [7:87:2114] sender: [7:88:2057] recipient: [7:84:2113] !Reboot 72057594037927937 (actor [7:57:2097]) rebooted! !Reboot 72057594037927937 (actor [7:57:2097]) tablet resolver refreshed! new actor is[7:87:2114] Leader for TabletID 72057594037927937 is [7:87:2114] sender: [7:173:2057] recipient: [7:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [8:55:2057] recipient: [8:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [8:55:2057] recipient: [8:51:2095] Leader for TabletID 72057594037927937 is [8:57:2097] sender: [8:58:2057] recipient: [8:51:2095] Leader for TabletID 72057594037927937 is [8:57:2097] sender: [8:75:2057] recipient: [8:14:2061] !Reboot 72057594037927937 (actor [8:57:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [8:57:2097] sender: [8:84:2057] recipient: [8:36:2083] Leader for TabletID 72057594037927937 is [8:57:2097] sender: [8:87:2057] recipient: [8:14:2061] Leader for TabletID 72057594037927937 is [8:57:2097] sender: [8:88:2057] recipient: [8:86:2115] Leader for TabletID 72057594037927937 is [8:89:2116] sender: [8:90:2057] recipient: [8:86:2115] !Reboot 72057594037927937 (actor [8:57:2097]) rebooted! !Reboot 72057594037927937 (actor [8:57:2097]) tablet resolver refreshed! new actor is[8:89:2116] Leader for TabletID 72057594037927937 is [8:89:2116] sender: [8:175:2057] recipient: [8:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [9:55:2057] recipient: [9:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [9:55:2057] recipient: [9:51:2095] Leader for TabletID 72057594037927937 is [9:57:2097] sender: [9:58:2057] recipient: [9:51:2095] Leader for TabletID 72057594037927937 is [9:57:2097] sender: [9:75:2057] recipient: [9:14:2061] !Reboot 72057594037927937 (actor [9:57:2097]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [9:57:2097] sender: [9:84:2057] recipient: [9:36:2083] Leader for TabletID 72057594037927937 is [9:57:2097] sender: [9:87:2057] recipient: [9:14:2061] Leader for TabletID 72057594037927937 is [9:57:2097] sender: [9:88:2057] recipient: [9:86:2115] Leader for TabletID 72057594037927937 is [9:89:2116] sender: [9:90:2057] recipient: [9:86:2115] !Reboot 72057594037927937 (actor [9:57:2097]) rebooted! !Reboot 72057594037927937 (actor [9:57:2097]) tablet resolver refreshed! new actor is[9:89:2116] Leader for TabletID 72057594037927937 is [9:89:2116] sender: [9:175:2057] recipient: [9:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [10:55:2057] recipient: [10:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [10:55:2057] recipient: [10:51:2095] Leader for TabletID 72057594037927937 is [10:57:2097] sender: [10:58:2057] recipient: [10:51:2095] Leader for TabletID 72057594037927937 is [10:57:2097] sender: [10:75:2057] recipient: [10:14:2061] !Reboot 72057594037927937 (actor [10:57:2097]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [10:57:2097] sender: [10:85:2057] recipient: [10:36:2083] Leader for TabletID 72057594037927937 is [10:57:2097] sender: [10:88:2057] recipient: [10:14:2061] Leader for TabletID 72057594037927937 is [10:57:2097] sender: [10:89:2057] recipient: [10:87:2115] Leader for TabletID 72057594037927937 is [10:90:2116] sender: [10:91:2057] recipient: [10:87:2115] !Reboot 72057594037927937 (actor [10:57:2097]) rebooted! !Reboot 72057594037927937 (actor [10:57:2097]) tablet resolver refreshed! new actor is[10:90:2116] Leader for TabletID 72057594037927937 is [10:90:2116] sender: [10:176:2057] recipient: [10:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [11:55:2057] recipient: [11:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [11:55:2057] recipient: [11:51:2095] Leader for TabletID 72057594037927937 is [11:57:2097] sender: [11:58:2057] recipient: [11:51:2095] Leader for TabletID 72057594037927937 is [11:57:2097] sender: [11:75:2057] recipient: [11:14:2061] !Reboot 72057594037927937 (actor [11:57:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [11:57:2097] sender: [11:88:2057] recipient: [11:36:2083] Leader for TabletID 72057594037927937 is [11:57:2097] sender: [11:91:2057] recipient: [11:14:2061] Leader for TabletID 72057594037927937 is [11:57:2097] sender: [11:92:2057] recipient: [11:90:2118] Leader for TabletID 72057594037927937 is [11:93:2119] sender: [11:94:2057] recipient: [11:90:2118] !Reboot 72057594037927937 (actor [11:57:2097]) rebooted! !Reboot 72057594037927937 (actor [11:57:2097]) tablet resolver refreshed! new actor is[11:93:2119] Leader for TabletID 72057594037927937 is [11:93:2119] sender: [11:179:2057] recipient: [11:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [12:55:2057] recipient: [12:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [12:55:2057] recipient: [12:51:2095] Leader for TabletID 72057594037927937 is [12:57:2097] sender: [12:58:2057] recipient: [12:51:2095] Leader for TabletID 72057594037927937 is [12:57:2097] sender: [12:75:2057] recipient: [12:14:2061] !Reboot 72057594037927937 (acto ... 7927937 (actor [16:57:2097]) tablet resolver refreshed! new actor is[16:82:2111] Leader for TabletID 72057594037927937 is [16:82:2111] sender: [16:168:2057] recipient: [16:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [17:55:2057] recipient: [17:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [17:55:2057] recipient: [17:51:2095] Leader for TabletID 72057594037927937 is [17:57:2097] sender: [17:58:2057] recipient: [17:51:2095] Leader for TabletID 72057594037927937 is [17:57:2097] sender: [17:75:2057] recipient: [17:14:2061] !Reboot 72057594037927937 (actor [17:57:2097]) on event NKikimr::TEvKeyValue::TEvExecuteTransaction ! Leader for TabletID 72057594037927937 is [17:57:2097] sender: [17:77:2057] recipient: [17:36:2083] Leader for TabletID 72057594037927937 is [17:57:2097] sender: [17:80:2057] recipient: [17:14:2061] Leader for TabletID 72057594037927937 is [17:57:2097] sender: [17:81:2057] recipient: [17:79:2110] Leader for TabletID 72057594037927937 is [17:82:2111] sender: [17:83:2057] recipient: [17:79:2110] !Reboot 72057594037927937 (actor [17:57:2097]) rebooted! !Reboot 72057594037927937 (actor [17:57:2097]) tablet resolver refreshed! new actor is[17:82:2111] Leader for TabletID 72057594037927937 is [17:82:2111] sender: [17:168:2057] recipient: [17:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [18:55:2057] recipient: [18:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [18:55:2057] recipient: [18:51:2095] Leader for TabletID 72057594037927937 is [18:57:2097] sender: [18:58:2057] recipient: [18:51:2095] Leader for TabletID 72057594037927937 is [18:57:2097] sender: [18:75:2057] recipient: [18:14:2061] !Reboot 72057594037927937 (actor [18:57:2097]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [18:57:2097] sender: [18:78:2057] recipient: [18:36:2083] Leader for TabletID 72057594037927937 is [18:57:2097] sender: [18:80:2057] recipient: [18:14:2061] Leader for TabletID 72057594037927937 is [18:57:2097] sender: [18:82:2057] recipient: [18:81:2110] Leader for TabletID 72057594037927937 is [18:83:2111] sender: [18:84:2057] recipient: [18:81:2110] !Reboot 72057594037927937 (actor [18:57:2097]) rebooted! !Reboot 72057594037927937 (actor [18:57:2097]) tablet resolver refreshed! new actor is[18:83:2111] Leader for TabletID 72057594037927937 is [18:83:2111] sender: [18:169:2057] recipient: [18:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [19:55:2057] recipient: [19:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [19:55:2057] recipient: [19:51:2095] Leader for TabletID 72057594037927937 is [19:57:2097] sender: [19:58:2057] recipient: [19:51:2095] Leader for TabletID 72057594037927937 is [19:57:2097] sender: [19:75:2057] recipient: [19:14:2061] !Reboot 72057594037927937 (actor [19:57:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [19:57:2097] sender: [19:81:2057] recipient: [19:36:2083] Leader for TabletID 72057594037927937 is [19:57:2097] sender: [19:84:2057] recipient: [19:14:2061] Leader for TabletID 72057594037927937 is [19:57:2097] sender: [19:85:2057] recipient: [19:83:2113] Leader for TabletID 72057594037927937 is [19:86:2114] sender: [19:87:2057] recipient: [19:83:2113] !Reboot 72057594037927937 (actor [19:57:2097]) rebooted! !Reboot 72057594037927937 (actor [19:57:2097]) tablet resolver refreshed! new actor is[19:86:2114] Leader for TabletID 72057594037927937 is [19:86:2114] sender: [19:172:2057] recipient: [19:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [20:55:2057] recipient: [20:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [20:55:2057] recipient: [20:51:2095] Leader for TabletID 72057594037927937 is [20:57:2097] sender: [20:58:2057] recipient: [20:51:2095] Leader for TabletID 72057594037927937 is [20:57:2097] sender: [20:75:2057] recipient: [20:14:2061] !Reboot 72057594037927937 (actor [20:57:2097]) on event NKikimr::TEvKeyValue::TEvRead ! Leader for TabletID 72057594037927937 is [20:57:2097] sender: [20:81:2057] recipient: [20:36:2083] Leader for TabletID 72057594037927937 is [20:57:2097] sender: [20:84:2057] recipient: [20:14:2061] Leader for TabletID 72057594037927937 is [20:57:2097] sender: [20:85:2057] recipient: [20:83:2113] Leader for TabletID 72057594037927937 is [20:86:2114] sender: [20:87:2057] recipient: [20:83:2113] !Reboot 72057594037927937 (actor [20:57:2097]) rebooted! !Reboot 72057594037927937 (actor [20:57:2097]) tablet resolver refreshed! new actor is[20:86:2114] Leader for TabletID 72057594037927937 is [20:86:2114] sender: [20:172:2057] recipient: [20:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [21:55:2057] recipient: [21:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [21:55:2057] recipient: [21:51:2095] Leader for TabletID 72057594037927937 is [21:57:2097] sender: [21:58:2057] recipient: [21:51:2095] Leader for TabletID 72057594037927937 is [21:57:2097] sender: [21:75:2057] recipient: [21:14:2061] !Reboot 72057594037927937 (actor [21:57:2097]) on event NKikimr::TEvKeyValue::TEvNotify ! Leader for TabletID 72057594037927937 is [21:57:2097] sender: [21:82:2057] recipient: [21:36:2083] Leader for TabletID 72057594037927937 is [21:57:2097] sender: [21:85:2057] recipient: [21:14:2061] Leader for TabletID 72057594037927937 is [21:57:2097] sender: [21:86:2057] recipient: [21:84:2113] Leader for TabletID 72057594037927937 is [21:87:2114] sender: [21:88:2057] recipient: [21:84:2113] !Reboot 72057594037927937 (actor [21:57:2097]) rebooted! !Reboot 72057594037927937 (actor [21:57:2097]) tablet resolver refreshed! new actor is[21:87:2114] Leader for TabletID 72057594037927937 is [21:87:2114] sender: [21:105:2057] recipient: [21:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [22:55:2057] recipient: [22:52:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [22:55:2057] recipient: [22:52:2095] Leader for TabletID 72057594037927937 is [22:57:2097] sender: [22:58:2057] recipient: [22:52:2095] Leader for TabletID 72057594037927937 is [22:57:2097] sender: [22:75:2057] recipient: [22:14:2061] !Reboot 72057594037927937 (actor [22:57:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [22:57:2097] sender: [22:84:2057] recipient: [22:36:2083] Leader for TabletID 72057594037927937 is [22:57:2097] sender: [22:87:2057] recipient: [22:14:2061] Leader for TabletID 72057594037927937 is [22:57:2097] sender: [22:88:2057] recipient: [22:86:2115] Leader for TabletID 72057594037927937 is [22:89:2116] sender: [22:90:2057] recipient: [22:86:2115] !Reboot 72057594037927937 (actor [22:57:2097]) rebooted! !Reboot 72057594037927937 (actor [22:57:2097]) tablet resolver refreshed! new actor is[22:89:2116] Leader for TabletID 72057594037927937 is [22:89:2116] sender: [22:175:2057] recipient: [22:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [23:55:2057] recipient: [23:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [23:55:2057] recipient: [23:51:2095] Leader for TabletID 72057594037927937 is [23:57:2097] sender: [23:58:2057] recipient: [23:51:2095] Leader for TabletID 72057594037927937 is [23:57:2097] sender: [23:75:2057] recipient: [23:14:2061] !Reboot 72057594037927937 (actor [23:57:2097]) on event NKikimr::TEvKeyValue::TEvExecuteTransaction ! Leader for TabletID 72057594037927937 is [23:57:2097] sender: [23:84:2057] recipient: [23:36:2083] Leader for TabletID 72057594037927937 is [23:57:2097] sender: [23:87:2057] recipient: [23:14:2061] Leader for TabletID 72057594037927937 is [23:57:2097] sender: [23:88:2057] recipient: [23:86:2115] Leader for TabletID 72057594037927937 is [23:89:2116] sender: [23:90:2057] recipient: [23:86:2115] !Reboot 72057594037927937 (actor [23:57:2097]) rebooted! !Reboot 72057594037927937 (actor [23:57:2097]) tablet resolver refreshed! new actor is[23:89:2116] Leader for TabletID 72057594037927937 is [23:89:2116] sender: [23:175:2057] recipient: [23:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [24:55:2057] recipient: [24:50:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [24:55:2057] recipient: [24:50:2095] Leader for TabletID 72057594037927937 is [24:57:2097] sender: [24:58:2057] recipient: [24:50:2095] Leader for TabletID 72057594037927937 is [24:57:2097] sender: [24:75:2057] recipient: [24:14:2061] !Reboot 72057594037927937 (actor [24:57:2097]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [24:57:2097] sender: [24:85:2057] recipient: [24:36:2083] Leader for TabletID 72057594037927937 is [24:57:2097] sender: [24:88:2057] recipient: [24:14:2061] Leader for TabletID 72057594037927937 is [24:57:2097] sender: [24:89:2057] recipient: [24:87:2115] Leader for TabletID 72057594037927937 is [24:90:2116] sender: [24:91:2057] recipient: [24:87:2115] !Reboot 72057594037927937 (actor [24:57:2097]) rebooted! !Reboot 72057594037927937 (actor [24:57:2097]) tablet resolver refreshed! new actor is[24:90:2116] Leader for TabletID 72057594037927937 is [24:90:2116] sender: [24:176:2057] recipient: [24:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [25:55:2057] recipient: [25:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [25:55:2057] recipient: [25:51:2095] Leader for TabletID 72057594037927937 is [25:57:2097] sender: [25:58:2057] recipient: [25:51:2095] Leader for TabletID 72057594037927937 is [25:57:2097] sender: [25:75:2057] recipient: [25:14:2061] !Reboot 72057594037927937 (actor [25:57:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [25:57:2097] sender: [25:88:2057] recipient: [25:36:2083] Leader for TabletID 72057594037927937 is [25:57:2097] sender: [25:90:2057] recipient: [25:14:2061] Leader for TabletID 72057594037927937 is [25:57:2097] sender: [25:92:2057] recipient: [25:91:2118] Leader for TabletID 72057594037927937 is [25:93:2119] sender: [25:94:2057] recipient: [25:91:2118] !Reboot 72057594037927937 (actor [25:57:2097]) rebooted! !Reboot 72057594037927937 (actor [25:57:2097]) tablet resolver refreshed! new actor is[25:93:2119] Leader for TabletID 72057594037927937 is [25:93:2119] sender: [25:179:2057] recipient: [25:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [26:55:2057] recipient: [26:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [26:55:2057] recipient: [26:51:2095] Leader for TabletID 72057594037927937 is [26:57:2097] sender: [26:58:2057] recipient: [26:51:2095] Leader for TabletID 72057594037927937 is [26:57:2097] sender: [26:75:2057] recipient: [26:14:2061] !Reboot 72057594037927937 (actor [26:57:2097]) on event NKikimr::TEvKeyValue::TEvRead ! Leader for TabletID 72057594037927937 is [26:57:2097] sender: [26:88:2057] recipient: [26:36:2083] Leader for TabletID 72057594037927937 is [26:57:2097] sender: [26:91:2057] recipient: [26:14:2061] Leader for TabletID 72057594037927937 is [26:57:2097] sender: [26:92:2057] recipient: [26:90:2118] Leader for TabletID 72057594037927937 is [26:93:2119] sender: [26:94:2057] recipient: [26:90:2118] !Reboot 72057594037927937 (actor [26:57:2097]) rebooted! !Reboot 72057594037927937 (actor [26:57:2097]) tablet resolver refreshed! new actor is[26:93:2119] Leader for TabletID 72057594037927937 is [26:93:2119] sender: [26:179:2057] recipient: [26:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [27:55:2057] recipient: [27:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [27:55:2057] recipient: [27:51:2095] Leader for TabletID 72057594037927937 is [27:57:2097] sender: [27:58:2057] recipient: [27:51:2095] Leader for TabletID 72057594037927937 is [27:57:2097] sender: [27:75:2057] recipient: [27:14:2061] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_index_build/unittest >> IndexBuildTest::IndexPartitioningIsPersisted [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:116:2058] recipient: [1:111:2141] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:116:2058] recipient: [1:111:2141] Leader for TabletID 72057594046678944 is [1:128:2152] sender: [1:132:2058] recipient: [1:111:2141] 2025-05-29T15:28:13.166977Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7488: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-05-29T15:28:13.166999Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7516: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:28:13.167003Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7402: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-05-29T15:28:13.167007Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7418: OperationsProcessing config: using default configuration 2025-05-29T15:28:13.167019Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-05-29T15:28:13.167022Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-05-29T15:28:13.167029Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7548: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:28:13.167040Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:39: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-05-29T15:28:13.167121Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7619: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-05-29T15:28:13.167173Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-05-29T15:28:13.176227Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7309: Cannot subscribe to console configs 2025-05-29T15:28:13.176249Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:28:13.178154Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-05-29T15:28:13.178239Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-05-29T15:28:13.178280Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-05-29T15:28:13.179491Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-05-29T15:28:13.179661Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:445: Clear TempDirsState with owners number: 0 2025-05-29T15:28:13.179742Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1348: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-05-29T15:28:13.179780Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:32: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-05-29T15:28:13.180133Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:157: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:28:13.180183Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:84: [RootDataErasureManager] Stop 2025-05-29T15:28:13.180414Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:28:13.180421Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:28:13.180438Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-05-29T15:28:13.180443Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-29T15:28:13.180447Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:202: TTxServerlessStorageBilling.Complete 2025-05-29T15:28:13.180473Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6671: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-05-29T15:28:13.181427Z node 1 :HIVE INFO: tablet_helpers.cpp:1130: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:128:2152] sender: [1:242:2058] recipient: [1:15:2062] 2025-05-29T15:28:13.194252Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:372: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-05-29T15:28:13.194328Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:28:13.194385Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-05-29T15:28:13.194425Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:130: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-05-29T15:28:13.194432Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:28:13.195185Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:451: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-05-29T15:28:13.195214Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-05-29T15:28:13.195277Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:28:13.195287Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:313: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-05-29T15:28:13.195293Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:367: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-05-29T15:28:13.195299Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 2 -> 3 2025-05-29T15:28:13.195789Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:28:13.195800Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-05-29T15:28:13.195805Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 3 -> 128 2025-05-29T15:28:13.196179Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:28:13.196191Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:28:13.196199Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:28:13.196206Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1650: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-05-29T15:28:13.196762Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1719: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-05-29T15:28:13.197119Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:652: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-05-29T15:28:13.197151Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1751: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-05-29T15:28:13.197296Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:676: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-05-29T15:28:13.197314Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:680: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 124 RawX2: 4294969445 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-05-29T15:28:13.197320Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:28:13.197362Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 128 -> 240 2025-05-29T15:28:13.197368Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:28:13.197395Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-05-29T15:28:13.197404Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:402: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-05-29T15:28:13.197705Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:28:13.197711Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-29T15:28:13.197750Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29 ... 8 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 TableIndexes { Name: "Index" LocalPathId: 3 Type: EIndexTypeGlobal State: EIndexStateReady KeyColumnNames: "value" SchemaVersion: 2 PathOwnerId: 72057594046678944 DataSize: 0 IndexImplTableDescriptions { } } TableSchemaVersion: 3 IsBackup: false IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 3 PathsLimit: 10000 ShardsInside: 4 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-05-29T15:28:13.923388Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Table/Index" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: true }, at schemeshard: 72057594046678944 2025-05-29T15:28:13.923413Z node 2 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/Table/Index" took 28us result status StatusSuccess 2025-05-29T15:28:13.923509Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Table/Index" PathDescription { Self { Name: "Index" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeTableIndex CreateFinished: true CreateTxId: 281474976710758 CreateStep: 5000004 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableIndexVersion: 2 } ChildrenExist: true } Children { Name: "indexImplTable" PathId: 4 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710758 CreateStep: 5000004 ParentPathId: 3 PathState: EPathStateAlter Owner: "root@builtin" ACL: "" PathSubType: EPathSubTypeSyncIndexImplTable Version { GeneralVersion: 4 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 2 TablePartitionVersion: 1 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 3 PathsLimit: 10000 ShardsInside: 4 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } TableIndex { Name: "Index" LocalPathId: 3 Type: EIndexTypeGlobal State: EIndexStateReady KeyColumnNames: "value" SchemaVersion: 2 PathOwnerId: 72057594046678944 DataSize: 0 IndexImplTableDescriptions { PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 KeepEraseMarkers: false MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { MinPartitionsCount: 3 MaxPartitionsCount: 3 } } } } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-05-29T15:28:13.923557Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Table/Index/indexImplTable" Options { ReturnPartitioningInfo: true ReturnPartitionConfig: true BackupInfo: false ReturnBoundaries: true ShowPrivateTable: true }, at schemeshard: 72057594046678944 2025-05-29T15:28:13.923588Z node 2 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/Table/Index/indexImplTable" took 33us result status StatusSuccess 2025-05-29T15:28:13.923674Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Table/Index/indexImplTable" PathDescription { Self { Name: "indexImplTable" PathId: 4 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710758 CreateStep: 5000004 ParentPathId: 3 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 4 PathSubType: EPathSubTypeSyncIndexImplTable Version { GeneralVersion: 4 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 2 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "indexImplTable" Columns { Name: "value" Type: "Utf8" TypeId: 4608 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "value" KeyColumnNames: "key" KeyColumnIds: 1 KeyColumnIds: 2 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 KeepEraseMarkers: false MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { MinPartitionsCount: 3 MaxPartitionsCount: 3 } } SplitBoundary { KeyPrefix { Tuple { Optional { Text: "alice" } } Tuple { } } } SplitBoundary { KeyPrefix { Tuple { Optional { Text: "bob" } } Tuple { } } } TableSchemaVersion: 2 IsBackup: false IsRestore: false } TablePartitions { EndOfRangeKeyPrefix: "\002\000\005\000\000\000alice\000\000\000\200" IsPoint: false IsInclusive: false DatashardId: 72075186233409547 } TablePartitions { EndOfRangeKeyPrefix: "\002\000\003\000\000\000bob\000\000\000\200" IsPoint: false IsInclusive: false DatashardId: 72075186233409548 } TablePartitions { EndOfRangeKeyPrefix: "" IsPoint: false IsInclusive: false DatashardId: 72075186233409549 } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 3 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 3 PathsLimit: 10000 ShardsInside: 4 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 4 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> TPart::PageFailEnvColumnGroups [GOOD] >> TPart::ForwardEnvColumnGroups [GOOD] >> TPart::Versions [GOOD] >> TPart::ManyVersions [GOOD] >> TPart::ManyDeltas [GOOD] >> TPart::CutKeys_Lz4 [GOOD] >> TPart::CutKeys_Seek [GOOD] >> TPart::CutKeys_SeekPages [GOOD] >> TPart::CutKeys_SeekSlices [GOOD] >> TPart::CutKeys_CutString [GOOD] >> TPart::CutKeys_CutUtf8String [GOOD] >> TPartBtreeIndexIteration::NoNodes >> TBlobStorageProxySequenceTest::TestGivenBlock42GroupGenerationGreaterThanVDiskGenerations [GOOD] >> TDSProxyGetTest::TestBlock42WipedOneDiskAndErrorDurringGet [GOOD] >> TDSProxyPatchTest::NaiveErrorOnPut_ErasureMirror3dc >> TSchemeshardBackgroundCleaningTest::SchemeshardBackgroundCleaningTestCreateCleanWithRetry >> IndexBuildTest::RejectsCreate [GOOD] >> IndexBuildTest::RejectsDropIndex >> TDSProxyPatchTest::NaiveErrorOnPut_ErasureMirror3dc [GOOD] >> TDSProxyPutTest::TestBlock42PutStatusErrorWith_1_2_VdiskErrors >> TFlatExecutorLeases::BasicsLeaseTimeout [GOOD] >> TFlatExecutorLeases::BasicsInitialLease >> TDSProxyPutTest::TestBlock42PutStatusErrorWith_1_2_VdiskErrors [GOOD] >> TDsProxyQuorumTracker::CheckFailModelErasure3Plus1Stripe >> TPartBtreeIndexIteration::NoNodes [GOOD] >> TPartBtreeIndexIteration::NoNodes_Groups >> TDsProxyQuorumTracker::CheckFailModelErasure3Plus1Stripe [GOOD] >> TBlobStorageProxySequenceTest::TestGivenMirror3DCGetWithFirstSlowDisk >> IndexBuildTest::RejectsDropIndex [GOOD] >> BuildStatsHistogram::Ten_Mixed [GOOD] >> BuildStatsHistogram::Ten_Serial >> TBlobStorageProxySequenceTest::TestGivenMirror3DCGetWithFirstSlowDisk [GOOD] >> TDSProxyLooksLikeLostTheBlob::TDSProxyLooksLikeLostTheBlobBlock42 |70.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/columnshard/engines/ut/unittest ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/dsproxy/ut/unittest >> TDsProxyQuorumTracker::CheckFailModelErasure3Plus1Stripe [GOOD] Test command err: 2025-05-29T15:28:14.801477Z node 2 :BS_PROXY_PUT INFO: dsproxy_put.cpp:645: [7e4afa7ea38a37be] bootstrap ActorId# [2:74:2120] Group# 0 BlobCount# 1 BlobIDs# [[72075186224047637:1:863:1:24576:786:0]] HandleClass# TabletLog Tactic# Default RestartCounter# 0 Marker# BPP13 2025-05-29T15:28:14.801553Z node 2 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:42: [7e4afa7ea38a37be] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 0 part# 0 situation# ESituation::Unknown Marker# BPG51 2025-05-29T15:28:14.801560Z node 2 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:42: [7e4afa7ea38a37be] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 1 part# 1 situation# ESituation::Unknown Marker# BPG51 2025-05-29T15:28:14.801564Z node 2 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:42: [7e4afa7ea38a37be] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 2 part# 2 situation# ESituation::Unknown Marker# BPG51 2025-05-29T15:28:14.801568Z node 2 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:42: [7e4afa7ea38a37be] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 3 part# 3 situation# ESituation::Unknown Marker# BPG51 2025-05-29T15:28:14.801572Z node 2 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:42: [7e4afa7ea38a37be] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 4 part# 4 situation# ESituation::Unknown Marker# BPG51 2025-05-29T15:28:14.801577Z node 2 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:42: [7e4afa7ea38a37be] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 5 part# 5 situation# ESituation::Unknown Marker# BPG51 2025-05-29T15:28:14.801582Z node 2 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:42: [7e4afa7ea38a37be] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 6 part# 0 situation# ESituation::Unknown Marker# BPG51 2025-05-29T15:28:14.801586Z node 2 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:42: [7e4afa7ea38a37be] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 6 part# 1 situation# ESituation::Unknown Marker# BPG51 2025-05-29T15:28:14.801590Z node 2 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:42: [7e4afa7ea38a37be] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 6 part# 2 situation# ESituation::Unknown Marker# BPG51 2025-05-29T15:28:14.801594Z node 2 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:42: [7e4afa7ea38a37be] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 6 part# 3 situation# ESituation::Unknown Marker# BPG51 2025-05-29T15:28:14.801597Z node 2 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:42: [7e4afa7ea38a37be] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 6 part# 4 situation# ESituation::Unknown Marker# BPG51 2025-05-29T15:28:14.801601Z node 2 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:42: [7e4afa7ea38a37be] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 6 part# 5 situation# ESituation::Unknown Marker# BPG51 2025-05-29T15:28:14.801605Z node 2 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:42: [7e4afa7ea38a37be] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 7 part# 0 situation# ESituation::Unknown Marker# BPG51 2025-05-29T15:28:14.801609Z node 2 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:42: [7e4afa7ea38a37be] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 7 part# 1 situation# ESituation::Unknown Marker# BPG51 2025-05-29T15:28:14.801613Z node 2 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:42: [7e4afa7ea38a37be] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 7 part# 2 situation# ESituation::Unknown Marker# BPG51 2025-05-29T15:28:14.801617Z node 2 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:42: [7e4afa7ea38a37be] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 7 part# 3 situation# ESituation::Unknown Marker# BPG51 2025-05-29T15:28:14.801621Z node 2 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:42: [7e4afa7ea38a37be] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 7 part# 4 situation# ESituation::Unknown Marker# BPG51 2025-05-29T15:28:14.801628Z node 2 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:42: [7e4afa7ea38a37be] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 7 part# 5 situation# ESituation::Unknown Marker# BPG51 2025-05-29T15:28:14.801635Z node 2 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:65: [7e4afa7ea38a37be] restore Id# [72075186224047637:1:863:1:24576:786:0] optimisticReplicas# 6 optimisticState# EBS_FULL Marker# BPG55 2025-05-29T15:28:14.801651Z node 2 :BS_PROXY_PUT DEBUG: dsproxy_strategy_base.cpp:324: [7e4afa7ea38a37be] partPlacement record partSituation# ESituation::Unknown to# 0 blob Id# [72075186224047637:1:863:1:24576:786:1] Marker# BPG33 2025-05-29T15:28:14.801657Z node 2 :BS_PROXY_PUT DEBUG: dsproxy_strategy_base.cpp:345: [7e4afa7ea38a37be] Sending missing VPut part# 0 to# 0 blob Id# [72075186224047637:1:863:1:24576:786:1] Marker# BPG32 2025-05-29T15:28:14.801663Z node 2 :BS_PROXY_PUT DEBUG: dsproxy_strategy_base.cpp:324: [7e4afa7ea38a37be] partPlacement record partSituation# ESituation::Unknown to# 1 blob Id# [72075186224047637:1:863:1:24576:786:2] Marker# BPG33 2025-05-29T15:28:14.801667Z node 2 :BS_PROXY_PUT DEBUG: dsproxy_strategy_base.cpp:345: [7e4afa7ea38a37be] Sending missing VPut part# 1 to# 1 blob Id# [72075186224047637:1:863:1:24576:786:2] Marker# BPG32 2025-05-29T15:28:14.801671Z node 2 :BS_PROXY_PUT DEBUG: dsproxy_strategy_base.cpp:324: [7e4afa7ea38a37be] partPlacement record partSituation# ESituation::Unknown to# 2 blob Id# [72075186224047637:1:863:1:24576:786:3] Marker# BPG33 2025-05-29T15:28:14.801675Z node 2 :BS_PROXY_PUT DEBUG: dsproxy_strategy_base.cpp:345: [7e4afa7ea38a37be] Sending missing VPut part# 2 to# 2 blob Id# [72075186224047637:1:863:1:24576:786:3] Marker# BPG32 2025-05-29T15:28:14.801680Z node 2 :BS_PROXY_PUT DEBUG: dsproxy_strategy_base.cpp:324: [7e4afa7ea38a37be] partPlacement record partSituation# ESituation::Unknown to# 3 blob Id# [72075186224047637:1:863:1:24576:786:4] Marker# BPG33 2025-05-29T15:28:14.801684Z node 2 :BS_PROXY_PUT DEBUG: dsproxy_strategy_base.cpp:345: [7e4afa7ea38a37be] Sending missing VPut part# 3 to# 3 blob Id# [72075186224047637:1:863:1:24576:786:4] Marker# BPG32 2025-05-29T15:28:14.801689Z node 2 :BS_PROXY_PUT DEBUG: dsproxy_strategy_base.cpp:324: [7e4afa7ea38a37be] partPlacement record partSituation# ESituation::Unknown to# 4 blob Id# [72075186224047637:1:863:1:24576:786:5] Marker# BPG33 2025-05-29T15:28:14.801693Z node 2 :BS_PROXY_PUT DEBUG: dsproxy_strategy_base.cpp:345: [7e4afa7ea38a37be] Sending missing VPut part# 4 to# 4 blob Id# [72075186224047637:1:863:1:24576:786:5] Marker# BPG32 2025-05-29T15:28:14.801699Z node 2 :BS_PROXY_PUT DEBUG: dsproxy_strategy_base.cpp:324: [7e4afa7ea38a37be] partPlacement record partSituation# ESituation::Unknown to# 5 blob Id# [72075186224047637:1:863:1:24576:786:6] Marker# BPG33 2025-05-29T15:28:14.801703Z node 2 :BS_PROXY_PUT DEBUG: dsproxy_strategy_base.cpp:345: [7e4afa7ea38a37be] Sending missing VPut part# 5 to# 5 blob Id# [72075186224047637:1:863:1:24576:786:6] Marker# BPG32 2025-05-29T15:28:14.805883Z node 2 :BS_PROXY_PUT INFO: dsproxy_put.cpp:260: [7e4afa7ea38a37be] received {EvVPutResult Status# ERROR ID# [72075186224047637:1:863:1:24576:786:1] {MsgQoS MsgId# { SequenceId: 1 MsgId: 0 }}} from# [0:1:0:0:0] Marker# BPP01 2025-05-29T15:28:14.805921Z node 2 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:30: [7e4afa7ea38a37be] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 0 part# 0 error Marker# BPG50 2025-05-29T15:28:14.805930Z node 2 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:42: [7e4afa7ea38a37be] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 1 part# 1 situation# ESituation::Sent Marker# BPG51 2025-05-29T15:28:14.805935Z node 2 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:42: [7e4afa7ea38a37be] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 2 part# 2 situation# ESituation::Sent Marker# BPG51 2025-05-29T15:28:14.805941Z node 2 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:42: [7e4afa7ea38a37be] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 3 part# 3 situation# ESituation::Sent Marker# BPG51 2025-05-29T15:28:14.805946Z node 2 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:42: [7e4afa7ea38a37be] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 4 part# 4 situation# ESituation::Sent Marker# BPG51 2025-05-29T15:28:14.805950Z node 2 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:42: [7e4afa7ea38a37be] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 5 part# 5 situation# ESituation::Sent Marker# BPG51 2025-05-29T15:28:14.805955Z node 2 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:42: [7e4afa7ea38a37be] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 6 part# 0 situation# ESituation::Unknown Marker# BPG51 2025-05-29T15:28:14.805959Z node 2 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:42: [7e4afa7ea38a37be] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 6 part# 1 situation# ESituation::Unknown Marker# BPG51 2025-05-29T15:28:14.805964Z node 2 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:42: [7e4afa7ea38a37be] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 6 part# 2 situation# ESituation::Unknown Marker# BPG51 2025-05-29T15:28:14.805968Z node 2 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:42: [7e4afa7ea38a37be] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 6 part# 3 situation# ESituation::Unknown Marker# BPG51 2025-05-29T15:28:14.805973Z node 2 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:42: [7e4afa7ea38a37be] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 6 part# 4 situation# ESituation::Unknown Marker# BPG51 2025-05-29T15:28:14.805977Z node 2 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:42: [7e4afa7ea38a37be] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 6 part# 5 situation# ESituation::Unknown Marker# BPG51 2025-05-29T15:28:14.805982Z node 2 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:42: [7e4afa7ea38a37be] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 7 part# 0 situation# ESituation::Unknown Marker# BPG51 2025-05-29T15:28:14.805996Z node 2 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:42: [7e4afa7ea38a37be] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 7 part# 1 situation# ESituation::Unknown Marker# BPG51 2025-05-29T15:28:14.806000Z node 2 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:42: [7e4afa7ea38a37be] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 7 part# 2 situation# ESituation::Unknown Marker# BPG51 2025-05-29T15:28:14.806005Z node 2 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:42: [7e4afa7ea38a37be] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 7 part# 3 situation# ESituation::Unknown Marker# BPG51 2025-05-29T15:28:14.806009Z node 2 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:42: [7e4afa7ea38a37be] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 7 part# 4 situation# ESituation::Unknown Marker# BPG51 2025-05-29T15:28:14.806014Z node 2 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:42: [7e4afa7ea38a37be] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 7 part# 5 situation# ESituation::Unknown Marker# BPG51 2025-05-29T15:28:14.806020Z node 2 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:65: [7e4afa7ea38a37be] restore Id# [72075186224047637:1:863:1:24576:786:0] optimisticReplicas# 6 optimisticState# EBS_FULL Marker# BPG55 2025-05-29T15:28:14.806036Z node 2 :BS_PROXY_PUT DEBUG: dsproxy_strategy_base.cpp:324: [7e4afa7ea38a37be] partPlacement record partSituation# ESituation::Unknown to# 6 blob Id# [72075186224047637:1:863:1:24576:786:1] Marker# BPG33 2025-05-29T15:28:14.806042Z node 2 :BS_PROXY_PUT DEBUG: dsproxy_strategy_base.cpp:345: [7e4afa7ea38a37be] Sending missing VPut part# 0 to# 6 blob Id# [72075186224047637:1:863:1:24576:786:1] Marker# BPG32 2025-05-29T15:28:14.806112Z node 2 :BS_PROXY_PUT DEBUG: dsproxy_put.cpp:260: [7e4afa7ea38a37be] received {EvVPutResult Status# OK ID# [72075186224047637:1:863:1:24576:786:2] {MsgQoS MsgId# { SequenceId: 1 MsgId: 0 }}} from# [0:1:0:1:0] Marker# BPP01 2025-05-29T15:28:14.806128Z node 2 :BS_PROXY_PUT DEBUG: dsproxy_put.cpp:260: [7e4afa7ea38a37be] received {EvVPutResult Status# OK ID# [72075186224047637:1:863:1:24576:786:3] {MsgQoS MsgId# { SequenceId: 1 MsgId: 0 }}} from# [0:1:0:2:0] Marker# BPP01 2025-05-29T15:28:14.806142Z node 2 :BS_PROXY_PUT DEBUG: dsproxy_put.cpp:260: [7e4afa7ea38a37be] received {EvVPutResult Status# OK ID# [72075186224047637:1:863:1:24576:786:4] {MsgQoS MsgId# { SequenceId: 1 MsgId: 0 }}} from# [0:1:0:3:0] Marker# BPP01 2025-05-29T15:28:14.806154Z node 2 :BS_PROXY_PUT DEBUG: dsproxy_put.cpp:260: [7e4afa7ea38a37be] received {EvVPutResult Status# OK ID# [72075186224047637:1:863:1:24576:786:5] {MsgQoS MsgId# { SequenceId: 1 MsgId: 0 }}} from# [0:1:0:4:0] Marker# BPP01 2025-05-29T15:28:14.806171Z node 2 :BS_PROXY_PUT DEBUG: dsproxy_put.cpp:260: [7e4afa7ea38a37be] received {EvVPutResult Status# OK ID# [72075186224047637:1:863:1:24576:786:6] {MsgQoS MsgId# { SequenceId: 1 MsgId: 0 }}} from# [0:1:0:5:0] Marker# BPP01 2025-05-29T15:28:14.806214Z node 2 :BS_PROXY_PUT INFO: dsproxy_put.cpp:260: [7e4afa7ea38a37be] received {EvVPutResult Status# ERROR ID# [72075186224047637:1:863:1:24576:786:1] {MsgQoS MsgId# { SequenceId: 1 MsgId: 0 }}} from# [0:1:0:6:0] Marker# BPP01 2025-05-29T15:28:14.806223Z node 2 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:30: [7e4afa7ea38a37be] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 0 part# 0 error Marker# BPG50 2025-05-29T15:28:14.806228Z node 2 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:42: [7e4afa7ea38a37be] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 1 part# 1 situation# ESituation::Present Marker# BPG51 2025-05-29T15:28:14.806232Z node 2 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:42: [7e4afa7ea38a37be] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 2 part# 2 situation# ESituation::Present Marker# BPG51 2025-05-29T15:28:14.806237Z node 2 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:42: [7e4afa7ea38a37be] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 3 part# 3 situation# ESituation::Present Marker# BPG51 2025-05-29T15:28:14.806241Z node 2 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:42: [7e4afa7ea38a37be] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 4 part# 4 situation# ESituation::Present Marker# BPG51 2025-05-29T15:28:14.806245Z node 2 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:42: [7e4afa7ea38a37be] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 5 part# 5 situation# ESituation::Present Marker# BPG51 2025-05-29T15:28:14.806250Z node 2 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:30: [7e4afa7ea38a37be] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 6 part# 0 error Marker# BPG50 2025-05-29T15:28:14.806254Z node 2 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:42: [7e4afa7ea38a37be] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 7 part# 0 situation# ESituation::Unknown Marker# BPG51 2025-05-29T15:28:14.806259Z node 2 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:42: [7e4afa7ea38a37be] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 7 part# 1 situation# ESituation::Unknown Marker# BPG51 2025-05-29T15:28:14.806263Z node 2 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:42: [7e4afa7ea38a37be] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 7 part# 2 situation# ESituation::Unknown Marker# BPG51 2025-05-29T15:28:14.806268Z node 2 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:42: [7e4afa7ea38a37be] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 7 part# 3 situation# ESituation::Unknown Marker# BPG51 2025-05-29T15:28:14.806272Z node 2 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:42: [7e4afa7ea38a37be] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 7 part# 4 situation# ESituation::Unknown Marker# BPG51 2025-05-29T15:28:14.806278Z node 2 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:42: [7e4afa7ea38a37be] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 7 part# 5 situation# ESituation::Unknown Marker# BPG51 2025-05-29T15:28:14.806283Z node 2 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:65: [7e4afa7ea38a37be] restore Id# [72075186224047637:1:863:1:24576:786:0] optimisticReplicas# 6 optimisticState# EBS_FULL Marker# BPG55 2025-05-29T15:28:14.806292Z node 2 :BS_PROXY_PUT DEBUG: dsproxy_strategy_base.cpp:324: [7e4afa7ea38a37be] partPlacement record partSituation# ESituation::Unknown to# 7 blob Id# [72075186224047637:1:863:1:24576:786:1] Marker# BPG33 2025-05-29T15:28:14.806297Z node 2 :BS_PROXY_PUT DEBUG: dsproxy_strategy_base.cpp:345: [7e4afa7ea38a37be] Sending missing VPut part# 0 to# 7 blob Id# [72075186224047637:1:863:1:24576:786:1] Marker# BPG32 2025-05-29T15:28:14.806344Z node 2 :BS_PROXY_PUT INFO: dsproxy_put.cpp:260: [7e4afa7ea38a37be] received {EvVPutResult Status# ERROR ID# [72075186224047637:1:863:1:24576:786:1] {MsgQoS MsgId# { SequenceId: 1 MsgId: 0 }}} from# [0:1:0:7:0] Marker# BPP01 2025-05-29T15:28:14.806352Z node 2 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:30: [7e4afa7ea38a37be] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 0 part# 0 error Marker# BPG50 2025-05-29T15:28:14.806357Z node 2 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:42: [7e4afa7ea38a37be] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 1 part# 1 situation# ESituation::Present Marker# BPG51 2025-05-29T15:28:14.806361Z node 2 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:42: [7e4afa7ea38a37be] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 2 part# 2 situation# ESituation::Present Marker# BPG51 2025-05-29T15:28:14.806365Z node 2 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:42: [7e4afa7ea38a37be] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 3 part# 3 situation# ESituation::Present Marker# BPG51 2025-05-29T15:28:14.806370Z node 2 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:42: [7e4afa7ea38a37be] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 4 part# 4 situation# ESituation::Present Marker# BPG51 2025-05-29T15:28:14.806374Z node 2 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:42: [7e4afa7ea38a37be] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 5 part# 5 situation# ESituation::Present Marker# BPG51 2025-05-29T15:28:14.806379Z node 2 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:30: [7e4afa7ea38a37be] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 6 part# 0 error Marker# BPG50 2025-05-29T15:28:14.806383Z node 2 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:30: [7e4afa7ea38a37be] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 7 part# 0 error Marker# BPG50 2025-05-29T15:28:14.806388Z node 2 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:65: [7e4afa7ea38a37be] restore Id# [72075186224047637:1:863:1:24576:786:0] optimisticReplicas# 5 optimisticState# EBS_DISINTEGRATED Marker# BPG55 2025-05-29T15:28:14.806414Z node 2 :BS_PROXY_PUT ERROR: dsproxy_put_impl.cpp:72: [7e4afa7ea38a37be] Result# TEvPutResult {Id# [72075186224047637:1:863:1:24576:786:0] Status# ERROR StatusFlags# { } ErrorReason# "TRestoreStrategy saw optimisticState# EBS_DISINTEGRATED GroupId# 0 BlobId# [72075186224047637:1:863:1:24576:786:0] Reported ErrorReasons# [ ] Part situations# [ { OrderNumber# 0 Situations# EUUUUU } { OrderNumber# 1 Situations# UPUUUU } { OrderNumber# 2 Situations# UUPUUU } { OrderNumber# 3 Situations# UUUPUU } { OrderNumber# 4 Situations# UUUUPU } { OrderNumber# 5 Situations# UUUUUP } { OrderNumber# 6 Situations# EUUUUU } { OrderNumber# 7 Situations# EUUUUU } ] " ApproximateFreeSpaceShare# 0} GroupId# 0 Marker# BPP12 2025-05-29T15:28:14.806424Z node 2 :BS_PROXY_PUT NOTICE: dsproxy_put.cpp:486: [7e4afa7ea38a37be] SendReply putResult# TEvPutResult {Id# [72075186224047637:1:863:1:24576:786:0] Status# ERROR StatusFlags# { } ErrorReason# "TRestoreStrategy saw optimisticState# EBS_DISINTEGRATED GroupId# 0 BlobId# [72075186224047637:1:863:1:24576:786:0] Reported ErrorReasons# [ ] Part situations# [ { OrderNumber# 0 Situations# EUUUUU } { OrderNumber# 1 Situations# UPUUUU } { OrderNumber# 2 Situations# UUPUUU } { OrderNumber# 3 Situations# UUUPUU } { OrderNumber# 4 Situations# UUUUPU } { OrderNumber# 5 Situations# UUUUUP } { OrderNumber# 6 Situations# EUUUUU } { OrderNumber# 7 Situations# EUUUUU } ] " ApproximateFreeSpaceShare# 0} ResponsesSent# 0 PutImpl.Blobs.size# 1 Last# true Marker# BPP21 2025-05-29T15:28:14.806477Z node 2 :BS_PROXY_PUT NOTICE: {BPP72@dsproxy_put.cpp:470} Query history GroupId# 0 HandleClass# TabletLog Tactic# Default History# THistory { Entries# [ TEvVPut{ TimestampMs# 0.45 sample PartId# [72075186224047637:1:863:1:24576:786:6] QueryCount# 1 VDiskId# [0:1:0:5:0] NodeId# 2 } TEvVPut{ TimestampMs# 0.45 sample PartId# [72075186224047637:1:863:1:24576:786:5] QueryCount# 1 VDiskId# [0:1:0:4:0] NodeId# 2 } TEvVPut{ TimestampMs# 0.45 sample PartId# [72075186224047637:1:863:1:24576:786:4] QueryCount# 1 VDiskId# [0:1:0:3:0] NodeId# 2 } TEvVPut{ TimestampMs# 0.45 sample PartId# [72075186224047637:1:863:1:24576:786:3] QueryCount# 1 VDiskId# [0:1:0:2:0] NodeId# 2 } TEvVPut{ TimestampMs# 0.45 sample PartId# [72075186224047637:1:863:1:24576:786:2] QueryCount# 1 VDiskId# [0:1:0:1:0] NodeId# 2 } TEvVPut{ TimestampMs# 0.451 sample PartId# [72075186224047637:1:863:1:24576:786:1] QueryCount# 1 VDiskId# [0:1:0:0:0] NodeId# 2 } TEvVPutResult{ TimestampMs# 4.622 VDiskId# [0:1:0:0:0] NodeId# 2 Status# ERROR } TEvVPut{ TimestampMs# 4.778 sample PartId# [72075186224047637:1:863:1:24576:786:1] QueryCount# 1 VDiskId# [0:1:0:6:0] NodeId# 2 } TEvVPutResult{ TimestampMs# 4.83 VDiskId# [0:1:0:1:0] NodeId# 2 Status# OK } TEvVPutResult{ TimestampMs# 4.844 VDiskId# [0:1:0:2:0] NodeId# 2 Status# OK } TEvVPutResult{ TimestampMs# 4.857 VDiskId# [0:1:0:3:0] NodeId# 2 Status# OK } TEvVPutResult{ TimestampMs# 4.87 VDiskId# [0:1:0:4:0] NodeId# 2 Status# OK } TEvVPutResult{ TimestampMs# 4.887 VDiskId# [0:1:0:5:0] NodeId# 2 Status# OK } TEvVPutResult{ TimestampMs# 4.932 VDiskId# [0:1:0:6:0] NodeId# 2 Status# ERROR } TEvVPut{ TimestampMs# 5.018 sample PartId# [72075186224047637:1:863:1:24576:786:1] QueryCount# 1 VDiskId# [0:1:0:7:0] NodeId# 2 } TEvVPutResult{ TimestampMs# 5.062 VDiskId# [0:1:0:7:0] NodeId# 2 Status# ERROR } ] } >> TBlobStorageProxySequenceTest::TestGivenBlock42Put6PartsOnOneVDiskWhenDiscoverThenRecoverFirst >> TContinuousBackupTests::Basic ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_index_build/unittest >> IndexBuildTest::RejectsDropIndex [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:116:2058] recipient: [1:111:2141] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:116:2058] recipient: [1:111:2141] Leader for TabletID 72057594046678944 is [1:128:2152] sender: [1:132:2058] recipient: [1:111:2141] 2025-05-29T15:28:13.969323Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7488: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-05-29T15:28:13.969344Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7516: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:28:13.969349Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7402: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-05-29T15:28:13.969352Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7418: OperationsProcessing config: using default configuration 2025-05-29T15:28:13.969363Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-05-29T15:28:13.969366Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-05-29T15:28:13.969374Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7548: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:28:13.969385Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:39: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-05-29T15:28:13.969470Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7619: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-05-29T15:28:13.969528Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-05-29T15:28:13.979354Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7309: Cannot subscribe to console configs 2025-05-29T15:28:13.979377Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:28:13.981542Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-05-29T15:28:13.981639Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-05-29T15:28:13.981694Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-05-29T15:28:13.983235Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-05-29T15:28:13.983384Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:445: Clear TempDirsState with owners number: 0 2025-05-29T15:28:13.983508Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1348: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-05-29T15:28:13.983557Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:32: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-05-29T15:28:13.983994Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:157: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:28:13.984043Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:84: [RootDataErasureManager] Stop 2025-05-29T15:28:13.984318Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:28:13.984329Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:28:13.984348Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-05-29T15:28:13.984358Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-29T15:28:13.984365Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:202: TTxServerlessStorageBilling.Complete 2025-05-29T15:28:13.984398Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6671: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-05-29T15:28:13.985792Z node 1 :HIVE INFO: tablet_helpers.cpp:1130: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:128:2152] sender: [1:242:2058] recipient: [1:15:2062] 2025-05-29T15:28:14.001622Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:372: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-05-29T15:28:14.001702Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:28:14.001774Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-05-29T15:28:14.001815Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:130: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-05-29T15:28:14.001823Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:28:14.002514Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:451: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-05-29T15:28:14.002539Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-05-29T15:28:14.002599Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:28:14.002609Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:313: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-05-29T15:28:14.002629Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:367: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-05-29T15:28:14.002635Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 2 -> 3 2025-05-29T15:28:14.003043Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:28:14.003052Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-05-29T15:28:14.003056Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 3 -> 128 2025-05-29T15:28:14.003314Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:28:14.003320Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:28:14.003326Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:28:14.003332Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1650: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-05-29T15:28:14.003813Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1719: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-05-29T15:28:14.004116Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:652: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-05-29T15:28:14.004152Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1751: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-05-29T15:28:14.004293Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:676: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-05-29T15:28:14.004311Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:680: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 124 RawX2: 4294969445 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-05-29T15:28:14.004317Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:28:14.004362Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 128 -> 240 2025-05-29T15:28:14.004367Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:28:14.004392Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-05-29T15:28:14.004401Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:402: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-05-29T15:28:14.004767Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:28:14.004775Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-29T15:28:14.004820Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29 ... HEMESHARD DEBUG: schemeshard__operation.cpp:1764: TOperation FindRelatedPartByTabletId, TxId: 107, tablet: 72075186233409547, partId: 0 2025-05-29T15:28:15.008179Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:619: TTxOperationReply execute, operationId: 107:0, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409547 Status: COMPLETE TxId: 107 Step: 5000004 OrderId: 107 ExecLatency: 0 ProposeLatency: 2 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409547 CpuTimeUsec: 135 } } CommitVersion { Step: 5000004 TxId: 107 } 2025-05-29T15:28:15.008188Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_part.cpp:108: HandleReply TEvDataShard::TEvProposeTransactionResult Ignore message: tablet# 72057594046678944, ev# TxKind: TX_KIND_SCHEME Origin: 72075186233409547 Status: COMPLETE TxId: 107 Step: 5000004 OrderId: 107 ExecLatency: 0 ProposeLatency: 2 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409547 CpuTimeUsec: 135 } } CommitVersion { Step: 5000004 TxId: 107 } FAKE_COORDINATOR: Erasing txId 107 2025-05-29T15:28:15.008332Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5512: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 326 RawX2: 8589936900 } Origin: 72075186233409547 State: 5 TxId: 107 Step: 0 Generation: 2 2025-05-29T15:28:15.008338Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1764: TOperation FindRelatedPartByTabletId, TxId: 107, tablet: 72075186233409547, partId: 0 2025-05-29T15:28:15.008352Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:619: TTxOperationReply execute, operationId: 107:0, at schemeshard: 72057594046678944, message: Source { RawX1: 326 RawX2: 8589936900 } Origin: 72075186233409547 State: 5 TxId: 107 Step: 0 Generation: 2 2025-05-29T15:28:15.008357Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_drop_table.cpp:332: TDropTable TDeleteTableBarrier operationId: 107:0 HandleReply TEvDataShard::TEvSchemaChanged, save it, at schemeshard: 72057594046678944 2025-05-29T15:28:15.008851Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 107:0, at schemeshard: 72057594046678944 2025-05-29T15:28:15.008860Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_drop_table.cpp:368: TDropTable TDeleteTableBarrier operationId: 107:0 ProgressState, operation type: TxDropTable, at tablet# 72057594046678944 2025-05-29T15:28:15.008865Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1059: Set barrier, OperationId: 107:0, name: RenamePathBarrier, done: 0, blocked: 1, parts count: 1 2025-05-29T15:28:15.008869Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1103: All parts have reached barrier, tx: 107, done: 0, blocked: 1 2025-05-29T15:28:15.008876Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_drop_table.cpp:344: TDropTable TDeleteTableBarrier operationId: 107:0 HandleReply TEvPrivate::TEvCompleteBarrier, msg: NKikimr::NSchemeShard::TEvPrivate::TEvCompleteBarrier { TxId: 107 Name: RenamePathBarrier }, at tablet# 72057594046678944 2025-05-29T15:28:15.008899Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 107:0 137 -> 129 2025-05-29T15:28:15.008915Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-05-29T15:28:15.008924Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 2025-05-29T15:28:15.009011Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:647: TTxOperationReply complete, operationId: 107:0, at schemeshard: 72057594046678944 2025-05-29T15:28:15.009330Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:647: TTxOperationReply complete, operationId: 107:0, at schemeshard: 72057594046678944 2025-05-29T15:28:15.009632Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:28:15.009643Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 107, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-29T15:28:15.009677Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 107, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2025-05-29T15:28:15.009702Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:28:15.009707Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [2:207:2208], at schemeshard: 72057594046678944, txId: 107, path id: 1 2025-05-29T15:28:15.009713Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [2:207:2208], at schemeshard: 72057594046678944, txId: 107, path id: 2 2025-05-29T15:28:15.009793Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 107:0, at schemeshard: 72057594046678944 2025-05-29T15:28:15.009799Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:1036: NTableState::TProposedWaitParts operationId# 107:0 ProgressState at tablet: 72057594046678944 2025-05-29T15:28:15.009816Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:674: all shard schema changes has been received, operationId: 107:0, at schemeshard: 72057594046678944 2025-05-29T15:28:15.009824Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:686: send schema changes ack message, operation: 107:0, datashard: 72075186233409547, at schemeshard: 72057594046678944 2025-05-29T15:28:15.009829Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 107:0 129 -> 240 2025-05-29T15:28:15.009928Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:5834: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 9 PathOwnerId: 72057594046678944, cookie: 107 2025-05-29T15:28:15.009937Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 9 PathOwnerId: 72057594046678944, cookie: 107 2025-05-29T15:28:15.009940Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72057594046678944, txId: 107 2025-05-29T15:28:15.009943Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 107, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 9 2025-05-29T15:28:15.009946Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-05-29T15:28:15.010093Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:5834: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 107 2025-05-29T15:28:15.010102Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 107 2025-05-29T15:28:15.010105Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72057594046678944, txId: 107 2025-05-29T15:28:15.010108Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 107, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 18446744073709551615 2025-05-29T15:28:15.010111Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 5 2025-05-29T15:28:15.010119Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1606: TOperation IsReadyToNotify, TxId: 107, ready parts: 0/1, is published: true 2025-05-29T15:28:15.010464Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 107:0, at schemeshard: 72057594046678944 2025-05-29T15:28:15.010473Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_drop_table.cpp:414: TDropTable TProposedDeletePart operationId: 107:0 ProgressState, at schemeshard: 72057594046678944 2025-05-29T15:28:15.010524Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove table for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 2025-05-29T15:28:15.010550Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:906: Part operation is done id#107:0 progress is 1/1 2025-05-29T15:28:15.010553Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 107 ready parts: 1/1 2025-05-29T15:28:15.010556Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:906: Part operation is done id#107:0 progress is 1/1 2025-05-29T15:28:15.010561Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 107 ready parts: 1/1 2025-05-29T15:28:15.010564Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1606: TOperation IsReadyToNotify, TxId: 107, ready parts: 1/1, is published: true 2025-05-29T15:28:15.010573Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1629: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [2:378:2345] message: TxId: 107 2025-05-29T15:28:15.010577Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 107 ready parts: 1/1 2025-05-29T15:28:15.010580Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:973: Operation and all the parts is done, operation id: 107:0 2025-05-29T15:28:15.010583Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5174: RemoveTx for txid 107:0 2025-05-29T15:28:15.010596Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2025-05-29T15:28:15.010971Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 107 2025-05-29T15:28:15.011011Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 107 2025-05-29T15:28:15.011288Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:227: tests -- TTxNotificationSubscriber for txId 107: got EvNotifyTxCompletionResult 2025-05-29T15:28:15.011300Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:236: tests -- TTxNotificationSubscriber for txId 107: satisfy waiter [2:585:2544] TestWaitNotification: OK eventTxId 107 >> TFlatTableExecutor_IndexLoading::PrechargeAndSeek_BTreeIndex [GOOD] >> TFlatTableExecutor_IndexLoading::Scan_FlatIndex >> TChargeBTreeIndex::NoNodes_Groups [GOOD] >> TChargeBTreeIndex::NoNodes_History >> TBlobStorageProxySequenceTest::TestGivenBlock42Put6PartsOnOneVDiskWhenDiscoverThenRecoverFirst [GOOD] >> TDSProxyGetTest::TestMirror32GetIntervalsWipedAllOk >> TKesusTest::TestSessionTimeoutAfterReboot [GOOD] >> TKesusTest::TestSessionStealingSameKey >> TFlatTableExecutor_IndexLoading::Scan_FlatIndex [GOOD] >> TFlatTableExecutor_IndexLoading::Scan_BTreeIndex >> TContinuousBackupTests::Basic [GOOD] >> BuildStatsHistogram::Ten_Serial [GOOD] >> BuildStatsHistogram::Ten_Crossed >> TFlatTableExecutor_IndexLoading::Scan_BTreeIndex [GOOD] >> TFlatTableExecutor_IndexLoading::Scan_History_FlatIndex >> TFlatExecutorLeases::BasicsInitialLease [GOOD] >> TFlatExecutorLeases::BasicsInitialLeaseTimeout >> TPartBtreeIndexIteration::NoNodes_Groups [GOOD] >> TPartBtreeIndexIteration::NoNodes_History >> TVersions::Wreck1 [GOOD] >> TVersions::Wreck1Reverse >> TKesusTest::TestSessionStealingSameKey [GOOD] >> TKesusTest::TestSessionStealingDifferentKey >> TFlatTableExecutor_IndexLoading::Scan_History_FlatIndex [GOOD] >> TFlatTableExecutor_IndexLoading::Scan_History_BTreeIndex >> TKesusTest::TestSessionStealingDifferentKey [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_continuous_backup/unittest >> TContinuousBackupTests::Basic [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:116:2058] recipient: [1:111:2141] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:116:2058] recipient: [1:111:2141] Leader for TabletID 72057594046678944 is [1:128:2152] sender: [1:132:2058] recipient: [1:111:2141] 2025-05-29T15:28:15.484386Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7488: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-05-29T15:28:15.484408Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7516: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:28:15.484412Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7402: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-05-29T15:28:15.484415Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7418: OperationsProcessing config: using default configuration 2025-05-29T15:28:15.484424Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-05-29T15:28:15.484427Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-05-29T15:28:15.484433Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7548: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:28:15.484443Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:39: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-05-29T15:28:15.484515Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7619: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-05-29T15:28:15.484574Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-05-29T15:28:15.493324Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7309: Cannot subscribe to console configs 2025-05-29T15:28:15.493347Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:28:15.495411Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-05-29T15:28:15.495509Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-05-29T15:28:15.495542Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-05-29T15:28:15.497014Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-05-29T15:28:15.497140Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:445: Clear TempDirsState with owners number: 0 2025-05-29T15:28:15.497240Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1348: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-05-29T15:28:15.497280Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:32: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-05-29T15:28:15.497636Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:157: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:28:15.497674Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:84: [RootDataErasureManager] Stop 2025-05-29T15:28:15.497862Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:28:15.497869Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:28:15.497884Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-05-29T15:28:15.497889Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-29T15:28:15.497893Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:202: TTxServerlessStorageBilling.Complete 2025-05-29T15:28:15.497918Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6671: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-05-29T15:28:15.498973Z node 1 :HIVE INFO: tablet_helpers.cpp:1130: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:128:2152] sender: [1:242:2058] recipient: [1:15:2062] 2025-05-29T15:28:15.512627Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:372: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-05-29T15:28:15.512699Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:28:15.512760Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-05-29T15:28:15.512802Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:130: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-05-29T15:28:15.512810Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:28:15.513447Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:451: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-05-29T15:28:15.513468Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-05-29T15:28:15.513516Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:28:15.513523Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:313: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-05-29T15:28:15.513527Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:367: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-05-29T15:28:15.513531Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 2 -> 3 2025-05-29T15:28:15.513895Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:28:15.513910Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-05-29T15:28:15.513916Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 3 -> 128 2025-05-29T15:28:15.514422Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:28:15.514442Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:28:15.514449Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:28:15.514457Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1650: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-05-29T15:28:15.515067Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1719: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-05-29T15:28:15.515575Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:652: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-05-29T15:28:15.515621Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1751: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-05-29T15:28:15.515848Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:676: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-05-29T15:28:15.515876Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:680: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 124 RawX2: 4294969445 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-05-29T15:28:15.515884Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:28:15.515956Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 128 -> 240 2025-05-29T15:28:15.515965Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:28:15.516000Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-05-29T15:28:15.516012Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:402: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-05-29T15:28:15.516482Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:28:15.516491Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-29T15:28:15.516537Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29 ... e TEvProposeTransactionResult, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409546 Status: COMPLETE TxId: 104 Step: 5000005 OrderId: 104 ExecLatency: 0 ProposeLatency: 5 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409546 CpuTimeUsec: 173 } } CommitVersion { Step: 5000005 TxId: 104 } 2025-05-29T15:28:15.760733Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1764: TOperation FindRelatedPartByTabletId, TxId: 104, tablet: 72075186233409546, partId: 0 2025-05-29T15:28:15.760757Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:619: TTxOperationReply execute, operationId: 104:0, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409546 Status: COMPLETE TxId: 104 Step: 5000005 OrderId: 104 ExecLatency: 0 ProposeLatency: 5 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409546 CpuTimeUsec: 173 } } CommitVersion { Step: 5000005 TxId: 104 } 2025-05-29T15:28:15.760768Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_part.cpp:108: HandleReply TEvDataShard::TEvProposeTransactionResult Ignore message: tablet# 72057594046678944, ev# TxKind: TX_KIND_SCHEME Origin: 72075186233409546 Status: COMPLETE TxId: 104 Step: 5000005 OrderId: 104 ExecLatency: 0 ProposeLatency: 5 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409546 CpuTimeUsec: 173 } } CommitVersion { Step: 5000005 TxId: 104 } FAKE_COORDINATOR: Erasing txId 104 2025-05-29T15:28:15.760979Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5512: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 310 RawX2: 4294969592 } Origin: 72075186233409546 State: 2 TxId: 104 Step: 0 Generation: 2 2025-05-29T15:28:15.760984Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1764: TOperation FindRelatedPartByTabletId, TxId: 104, tablet: 72075186233409546, partId: 0 2025-05-29T15:28:15.761008Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:619: TTxOperationReply execute, operationId: 104:0, at schemeshard: 72057594046678944, message: Source { RawX1: 310 RawX2: 4294969592 } Origin: 72075186233409546 State: 2 TxId: 104 Step: 0 Generation: 2 2025-05-29T15:28:15.761016Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:1005: NTableState::TProposedWaitParts operationId# 104:0 HandleReply TEvSchemaChanged at tablet: 72057594046678944 2025-05-29T15:28:15.761025Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:1009: NTableState::TProposedWaitParts operationId# 104:0 HandleReply TEvSchemaChanged at tablet: 72057594046678944 message: Source { RawX1: 310 RawX2: 4294969592 } Origin: 72075186233409546 State: 2 TxId: 104 Step: 0 Generation: 2 2025-05-29T15:28:15.761037Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:655: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 104:0, shardIdx: 72057594046678944:1, datashard: 72075186233409546, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2025-05-29T15:28:15.761040Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:674: all shard schema changes has been received, operationId: 104:0, at schemeshard: 72057594046678944 2025-05-29T15:28:15.761043Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:686: send schema changes ack message, operation: 104:0, datashard: 72075186233409546, at schemeshard: 72057594046678944 2025-05-29T15:28:15.761048Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 104:0 129 -> 240 2025-05-29T15:28:15.761425Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:647: TTxOperationReply complete, operationId: 104:0, at schemeshard: 72057594046678944 2025-05-29T15:28:15.761588Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:647: TTxOperationReply complete, operationId: 104:0, at schemeshard: 72057594046678944 2025-05-29T15:28:15.761640Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 104:0, at schemeshard: 72057594046678944 2025-05-29T15:28:15.761645Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:482: [72057594046678944] TDone opId# 104:0 ProgressState 2025-05-29T15:28:15.761656Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:906: Part operation is done id#104:0 progress is 3/3 2025-05-29T15:28:15.761659Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 104 ready parts: 3/3 2025-05-29T15:28:15.761663Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:906: Part operation is done id#104:0 progress is 3/3 2025-05-29T15:28:15.761665Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 104 ready parts: 3/3 2025-05-29T15:28:15.761671Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1606: TOperation IsReadyToNotify, TxId: 104, ready parts: 3/3, is published: true 2025-05-29T15:28:15.761681Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1629: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:338:2316] message: TxId: 104 2025-05-29T15:28:15.761687Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 104 ready parts: 3/3 2025-05-29T15:28:15.761694Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:973: Operation and all the parts is done, operation id: 104:0 2025-05-29T15:28:15.761700Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5174: RemoveTx for txid 104:0 2025-05-29T15:28:15.761724Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 2025-05-29T15:28:15.761728Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:973: Operation and all the parts is done, operation id: 104:1 2025-05-29T15:28:15.761730Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5174: RemoveTx for txid 104:1 2025-05-29T15:28:15.761734Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 2 2025-05-29T15:28:15.761737Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:973: Operation and all the parts is done, operation id: 104:2 2025-05-29T15:28:15.761739Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5174: RemoveTx for txid 104:2 2025-05-29T15:28:15.761744Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 1 2025-05-29T15:28:15.761830Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:123: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-05-29T15:28:15.761834Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:137: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 4], at schemeshard: 72057594046678944 2025-05-29T15:28:15.761841Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 1 2025-05-29T15:28:15.761845Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:137: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 3], at schemeshard: 72057594046678944 2025-05-29T15:28:15.761849Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2025-05-29T15:28:15.762362Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:227: tests -- TTxNotificationSubscriber for txId 104: got EvNotifyTxCompletionResult 2025-05-29T15:28:15.762380Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:236: tests -- TTxNotificationSubscriber for txId 104: satisfy waiter [1:729:2642] 2025-05-29T15:28:15.762534Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:160: TTxCleanDroppedPaths Complete, done PersistRemovePath for 2 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestWaitNotification: OK eventTxId 104 2025-05-29T15:28:15.762650Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Table/continuousBackupImpl" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: true }, at schemeshard: 72057594046678944 2025-05-29T15:28:15.762692Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/Table/continuousBackupImpl" took 53us result status StatusPathDoesNotExist 2025-05-29T15:28:15.762719Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/Table/continuousBackupImpl\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot/Table\' (id: [OwnerId: 72057594046678944, LocalPathId: 2]), source_location: ydb/core/tx/schemeshard/schemeshard_path_describer.cpp:1157" Path: "/MyRoot/Table/continuousBackupImpl" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot/Table" LastExistedPrefixPathId: 2 LastExistedPrefixDescription { Self { Name: "Table" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: false } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 2025-05-29T15:28:15.762793Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Table/continuousBackupImpl/streamImpl" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: true }, at schemeshard: 72057594046678944 2025-05-29T15:28:15.762805Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/Table/continuousBackupImpl/streamImpl" took 15us result status StatusPathDoesNotExist 2025-05-29T15:28:15.762817Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/Table/continuousBackupImpl/streamImpl\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot/Table\' (id: [OwnerId: 72057594046678944, LocalPathId: 2]), source_location: ydb/core/tx/schemeshard/schemeshard_path_describer.cpp:1157" Path: "/MyRoot/Table/continuousBackupImpl/streamImpl" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot/Table" LastExistedPrefixPathId: 2 LastExistedPrefixDescription { Self { Name: "Table" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: false } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 >> TChargeBTreeIndex::NoNodes_History [GOOD] >> TChargeBTreeIndex::NoNodes_Groups_History |70.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/columnshard/engines/ut/unittest |70.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/columnshard/engines/ut/unittest >> TestProgram::YqlKernelEndsWithScalar >> TestProgram::YqlKernelEndsWithScalar [GOOD] >> TKeyValueTest::TestInlineCopyRangeWorks [GOOD] >> TKeyValueTest::TestInlineCopyRangeWorksNewApi ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kesus/tablet/ut/unittest >> TKesusTest::TestSessionStealingDifferentKey [GOOD] Test command err: 2025-05-29T15:27:26.874879Z node 1 :KESUS_TABLET INFO: tablet_impl.cpp:71: OnActivateExecutor: 72057594037927937 2025-05-29T15:27:26.874914Z node 1 :KESUS_TABLET DEBUG: tx_init_schema.cpp:14: [72057594037927937] TTxInitSchema::Execute 2025-05-29T15:27:26.879354Z node 1 :KESUS_TABLET DEBUG: tx_init_schema.cpp:21: [72057594037927937] TTxInitSchema::Complete 2025-05-29T15:27:26.879391Z node 1 :KESUS_TABLET DEBUG: tx_init.cpp:30: [72057594037927937] TTxInit::Execute 2025-05-29T15:27:26.890775Z node 1 :KESUS_TABLET DEBUG: tx_init.cpp:242: [72057594037927937] TTxInit::Complete 2025-05-29T15:27:26.890902Z node 1 :KESUS_TABLET DEBUG: tx_session_attach.cpp:35: [72057594037927937] TTxSessionAttach::Execute (sender=[1:131:2156], cookie=8655725414837974384, session=0, seqNo=0) 2025-05-29T15:27:26.890937Z node 1 :KESUS_TABLET DEBUG: tx_session_attach.cpp:118: [72057594037927937] Created new session 1 2025-05-29T15:27:26.911840Z node 1 :KESUS_TABLET DEBUG: tx_session_attach.cpp:140: [72057594037927937] TTxSessionAttach::Complete (sender=[1:131:2156], cookie=8655725414837974384, session=1) 2025-05-29T15:27:26.912025Z node 1 :KESUS_TABLET DEBUG: tx_session_detach.cpp:100: [72057594037927937] Fast-path detach session=1 from sender=[1:131:2156], cookie=16381676041404419163 2025-05-29T15:27:26.912098Z node 1 :KESUS_TABLET DEBUG: tx_sessions_describe.cpp:23: [72057594037927937] TTxSessionsDescribe::Execute (sender=[1:144:2167], cookie=16074961185624698545) 2025-05-29T15:27:26.912117Z node 1 :KESUS_TABLET DEBUG: tx_sessions_describe.cpp:48: [72057594037927937] TTxSessionsDescribe::Complete (sender=[1:144:2167], cookie=16074961185624698545) 2025-05-29T15:27:27.342644Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-05-29T15:27:27.353345Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-05-29T15:27:27.708119Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-05-29T15:27:27.719104Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-05-29T15:27:28.053516Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-05-29T15:27:28.064200Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-05-29T15:27:28.408711Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-05-29T15:27:28.419487Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-05-29T15:27:28.784570Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-05-29T15:27:28.795340Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-05-29T15:27:29.140039Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-05-29T15:27:29.150850Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-05-29T15:27:29.485490Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-05-29T15:27:29.496280Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-05-29T15:27:29.840891Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-05-29T15:27:29.851583Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-05-29T15:27:30.196239Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-05-29T15:27:30.207007Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-05-29T15:27:30.602662Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-05-29T15:27:30.613566Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-05-29T15:27:30.968507Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-05-29T15:27:30.979316Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-05-29T15:27:31.334233Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-05-29T15:27:31.345076Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-05-29T15:27:31.699849Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-05-29T15:27:31.710546Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-05-29T15:27:32.065355Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-05-29T15:27:32.076203Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-05-29T15:27:32.471787Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-05-29T15:27:32.482545Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-05-29T15:27:32.836919Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-05-29T15:27:32.848064Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-05-29T15:27:33.207613Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-05-29T15:27:33.218423Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-05-29T15:27:33.573140Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-05-29T15:27:33.583755Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-05-29T15:27:33.939540Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-05-29T15:27:33.950321Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-05-29T15:27:34.325959Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-05-29T15:27:34.336792Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-05-29T15:27:34.701751Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-05-29T15:27:34.712432Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-05-29T15:27:35.068409Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-05-29T15:27:35.079268Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-05-29T15:27:35.436729Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-05-29T15:27:35.447469Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-05-29T15:27:35.811309Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-05-29T15:27:35.823015Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-05-29T15:27:36.178835Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-05-29T15:27:36.189489Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-05-29T15:27:36.554435Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-05-29T15:27:36.565121Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-05-29T15:27:36.925711Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-05-29T15:27:36.939063Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-05-29T15:27:37.300930Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-05-29T15:27:37.311695Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-05-29T15:27:37.666676Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-05-29T15:27:37.677514Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-05-29T15:27:38.076083Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-05-29T15:27:38.086800Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-05-29T15:27:38.451811Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-05-29T15:27:38.462515Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-05-29T15:27:38.818816Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-05-29T15:27:38.829578Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-05-29T15:27:39.184289Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-05-29T15:27:39.195104Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-05-29T15:27:39.540428Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-05-29T15:27:39.551166Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-05-29T15:27:39.916585Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-05-29T15:27:39.927375Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-05-29T15:27:40.294003Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-05-29T15:27:40.304890Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-05-29T15:27:40.660418Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-05-29T15:27:40.671341Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-05-29T15:27:41.026364Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-05-29T15:27:41.037092Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-05-29T15:27:41.382354Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-05-29T15:27:41.393110Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-05-29T15:27:41.789397Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-05-29T15:27:41.800094Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-05-29T15:27:42.159231Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-05-29T15:27:42.169949Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-05-29T15:27:42.525361Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-05-29T15:27:42.536153Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-05-29T15:27:42.897991Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-05-29T15:27:42.908860Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck: ... UG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-05-29T15:28:02.604579Z node 2 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-05-29T15:28:02.952963Z node 2 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-05-29T15:28:02.964026Z node 2 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-05-29T15:28:03.320686Z node 2 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-05-29T15:28:03.331564Z node 2 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-05-29T15:28:03.690966Z node 2 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-05-29T15:28:03.701740Z node 2 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-05-29T15:28:04.046905Z node 2 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-05-29T15:28:04.057780Z node 2 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-05-29T15:28:04.444671Z node 2 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-05-29T15:28:04.455558Z node 2 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-05-29T15:28:04.810959Z node 2 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-05-29T15:28:04.821872Z node 2 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-05-29T15:28:05.177690Z node 2 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-05-29T15:28:05.188713Z node 2 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-05-29T15:28:05.545122Z node 2 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-05-29T15:28:05.556166Z node 2 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-05-29T15:28:05.912801Z node 2 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-05-29T15:28:05.923650Z node 2 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-05-29T15:28:06.300193Z node 2 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-05-29T15:28:06.311159Z node 2 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-05-29T15:28:06.677594Z node 2 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-05-29T15:28:06.688584Z node 2 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-05-29T15:28:07.046014Z node 2 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-05-29T15:28:07.056851Z node 2 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-05-29T15:28:07.412632Z node 2 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-05-29T15:28:07.423594Z node 2 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-05-29T15:28:07.780680Z node 2 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-05-29T15:28:07.791620Z node 2 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-05-29T15:28:08.151009Z node 2 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-05-29T15:28:08.161805Z node 2 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-05-29T15:28:08.517927Z node 2 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-05-29T15:28:08.528712Z node 2 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-05-29T15:28:08.874075Z node 2 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-05-29T15:28:08.884849Z node 2 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-05-29T15:28:09.230242Z node 2 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-05-29T15:28:09.241038Z node 2 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-05-29T15:28:09.586550Z node 2 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-05-29T15:28:09.597279Z node 2 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-05-29T15:28:09.944080Z node 2 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-05-29T15:28:09.954951Z node 2 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-05-29T15:28:10.310755Z node 2 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-05-29T15:28:10.321515Z node 2 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-05-29T15:28:10.656459Z node 2 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-05-29T15:28:10.667381Z node 2 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-05-29T15:28:11.002895Z node 2 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-05-29T15:28:11.013782Z node 2 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-05-29T15:28:11.359471Z node 2 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-05-29T15:28:11.370243Z node 2 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-05-29T15:28:11.838253Z node 2 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-05-29T15:28:11.849186Z node 2 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-05-29T15:28:12.205102Z node 2 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-05-29T15:28:12.216006Z node 2 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-05-29T15:28:12.571421Z node 2 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-05-29T15:28:12.582439Z node 2 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-05-29T15:28:12.939212Z node 2 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-05-29T15:28:12.950222Z node 2 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-05-29T15:28:13.306896Z node 2 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-05-29T15:28:13.317944Z node 2 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-05-29T15:28:13.674510Z node 2 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-05-29T15:28:13.685528Z node 2 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-05-29T15:28:14.042264Z node 2 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-05-29T15:28:14.053296Z node 2 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-05-29T15:28:14.420286Z node 2 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-05-29T15:28:14.431247Z node 2 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-05-29T15:28:14.787918Z node 2 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-05-29T15:28:14.799111Z node 2 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-05-29T15:28:15.155746Z node 2 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-05-29T15:28:15.166790Z node 2 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-05-29T15:28:15.492841Z node 2 :KESUS_TABLET DEBUG: tx_session_timeout.cpp:27: [72057594037927937] TTxSessionTimeout::Execute (session=1) 2025-05-29T15:28:15.492870Z node 2 :KESUS_TABLET DEBUG: tablet_db.cpp:32: [72057594037927937] Deleting session 1 2025-05-29T15:28:15.503641Z node 2 :KESUS_TABLET DEBUG: tx_session_timeout.cpp:56: [72057594037927937] TTxSessionTimeout::Complete (session=1) 2025-05-29T15:28:15.513925Z node 2 :KESUS_TABLET DEBUG: tx_sessions_describe.cpp:23: [72057594037927937] TTxSessionsDescribe::Execute (sender=[2:640:2566], cookie=5669422916164994959) 2025-05-29T15:28:15.513956Z node 2 :KESUS_TABLET DEBUG: tx_sessions_describe.cpp:48: [72057594037927937] TTxSessionsDescribe::Complete (sender=[2:640:2566], cookie=5669422916164994959) 2025-05-29T15:28:15.765840Z node 3 :KESUS_TABLET INFO: tablet_impl.cpp:71: OnActivateExecutor: 72057594037927937 2025-05-29T15:28:15.765869Z node 3 :KESUS_TABLET DEBUG: tx_init_schema.cpp:14: [72057594037927937] TTxInitSchema::Execute 2025-05-29T15:28:15.769292Z node 3 :KESUS_TABLET DEBUG: tx_init_schema.cpp:21: [72057594037927937] TTxInitSchema::Complete 2025-05-29T15:28:15.769407Z node 3 :KESUS_TABLET DEBUG: tx_init.cpp:30: [72057594037927937] TTxInit::Execute 2025-05-29T15:28:15.790662Z node 3 :KESUS_TABLET DEBUG: tx_init.cpp:242: [72057594037927937] TTxInit::Complete 2025-05-29T15:28:15.790894Z node 3 :KESUS_TABLET DEBUG: tx_session_attach.cpp:35: [72057594037927937] TTxSessionAttach::Execute (sender=[3:133:2158], cookie=12345, session=0, seqNo=0) 2025-05-29T15:28:15.790933Z node 3 :KESUS_TABLET DEBUG: tx_session_attach.cpp:118: [72057594037927937] Created new session 1 2025-05-29T15:28:15.801769Z node 3 :KESUS_TABLET DEBUG: tx_session_attach.cpp:140: [72057594037927937] TTxSessionAttach::Complete (sender=[3:133:2158], cookie=12345, session=1) 2025-05-29T15:28:15.801949Z node 3 :KESUS_TABLET DEBUG: tx_session_attach.cpp:35: [72057594037927937] TTxSessionAttach::Execute (sender=[3:140:2163], cookie=23456, session=1, seqNo=0) 2025-05-29T15:28:15.812671Z node 3 :KESUS_TABLET DEBUG: tx_session_attach.cpp:140: [72057594037927937] TTxSessionAttach::Complete (sender=[3:140:2163], cookie=23456, session=1) 2025-05-29T15:28:16.004150Z node 4 :KESUS_TABLET INFO: tablet_impl.cpp:71: OnActivateExecutor: 72057594037927937 2025-05-29T15:28:16.004180Z node 4 :KESUS_TABLET DEBUG: tx_init_schema.cpp:14: [72057594037927937] TTxInitSchema::Execute 2025-05-29T15:28:16.007083Z node 4 :KESUS_TABLET DEBUG: tx_init_schema.cpp:21: [72057594037927937] TTxInitSchema::Complete 2025-05-29T15:28:16.007159Z node 4 :KESUS_TABLET DEBUG: tx_init.cpp:30: [72057594037927937] TTxInit::Execute 2025-05-29T15:28:16.028456Z node 4 :KESUS_TABLET DEBUG: tx_init.cpp:242: [72057594037927937] TTxInit::Complete 2025-05-29T15:28:16.028664Z node 4 :KESUS_TABLET DEBUG: tx_session_attach.cpp:35: [72057594037927937] TTxSessionAttach::Execute (sender=[4:133:2158], cookie=12345, session=0, seqNo=0) 2025-05-29T15:28:16.028705Z node 4 :KESUS_TABLET DEBUG: tx_session_attach.cpp:118: [72057594037927937] Created new session 1 2025-05-29T15:28:16.039388Z node 4 :KESUS_TABLET DEBUG: tx_session_attach.cpp:140: [72057594037927937] TTxSessionAttach::Complete (sender=[4:133:2158], cookie=12345, session=1) 2025-05-29T15:28:16.039522Z node 4 :KESUS_TABLET DEBUG: tx_session_attach.cpp:35: [72057594037927937] TTxSessionAttach::Execute (sender=[4:140:2163], cookie=23456, session=1, seqNo=0) 2025-05-29T15:28:16.050172Z node 4 :KESUS_TABLET DEBUG: tx_session_attach.cpp:140: [72057594037927937] TTxSessionAttach::Complete (sender=[4:140:2163], cookie=23456, session=1) >> BuildStatsHistogram::Ten_Crossed [GOOD] >> BuildStatsHistogram::Ten_Mixed_Log >> TPartBtreeIndexIteration::NoNodes_History [GOOD] >> TPartBtreeIndexIteration::OneNode ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/columnshard/engines/ut/unittest >> TestProgram::YqlKernelEndsWithScalar [GOOD] Test command err: FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:33;event=parse_program;program=Command { Assign { Column { Id: 15 } Constant { Bytes: "amet." } } } Command { Assign { Column { Id: 16 } Function { Arguments { Id: 7 } Arguments { Id: 15 } FunctionType: YQL_KERNEL KernelIdx: 0 } } } Command { Projection { Columns { Id: 16 } } } Kernels: "O\004\006Arg\030BlockAsTuple\t\211\004\235\213\004\213\004\203\001H\203\005@\213\002\203\014\001\235?\004\001\235?\010\001\006\000\t\211\004?\016\235?\000\001\235?\002\000\006\000\t\251\000?\024\002\000\t\251\000?\026\002\000\000\t\211\002?\020\235?\006\001\006\000\t\211\006?$\203\005@?\024?\026$BlockFunc\000\003?(\020EndsWith?\034? \001\000\000/" ; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:102;parse_proto_program=Command { Assign { Column { Id: 15 } Constant { Bytes: "amet." } } } Command { Assign { Column { Id: 16 } Function { Arguments { Id: 7 } Arguments { Id: 15 } FunctionType: YQL_KERNEL KernelIdx: 0 } } } Command { Projection { Columns { Id: 16 } } } Kernels: "O\004\006Arg\030BlockAsTuple\t\211\004\235\213\004\213\004\203\001H\203\005@\213\002\203\014\001\235?\004\001\235?\010\001\006\000\t\211\004?\016\235?\000\001\235?\002\000\006\000\t\251\000?\024\002\000\t\251\000?\026\002\000\000\t\211\002?\020\235?\006\001\006\000\t\211\006?$\203\005@?\024?\026$BlockFunc\000\003?(\020EndsWith?\034? \001\000\000/" ; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2101;fline=graph_execute.cpp:162;graph_constructed=digraph program {N0[shape=box, label="N0(0):{\"p\":{\"v\":\"amet.\"},\"o\":\"15\",\"t\":\"Const\"}\n"]; N1[shape=box, label="N4(15):{\"i\":\"7,15\",\"p\":{\"kernel\":{\"class_name\":\"SIMPLE\"}},\"o\":\"16\",\"t\":\"Calculation\"}\nREMOVE:7,15"]; N0 -> N1[label="1"]; N3 -> N1[label="2"]; N2[shape=box, label="N2(2):{\"i\":\"0\",\"p\":{\"data\":[{\"name\":\"string\",\"id\":7}]},\"o\":\"7\",\"t\":\"FetchOriginalData\"}\n",style=filled,color="#FFFF88"]; N5 -> N2[label="1"]; N3[shape=box, label="N3(7):{\"i\":\"7\",\"p\":{\"address\":{\"name\":\"string\",\"id\":7}},\"o\":\"7\",\"t\":\"AssembleOriginalData\"}\n",style=filled,color="#FFFF88"]; N2 -> N3[label="1"]; N4[shape=box, label="N5(15):{\"i\":\"16\",\"t\":\"Projection\"}\n",style=filled,color="#FFAAAA"]; N1 -> N4[label="1"]; N5[shape=box, label="N1(0):{\"p\":{\"data\":[{\"name\":\"string\",\"id\":7}]},\"o\":\"0\",\"t\":\"ReserveMemory\"}\n"]; N0->N5->N2->N3->N1->N4[color=red]; }; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:51;event=program_parsed;result={"edges":[{"owner_id":0,"inputs":[]},{"owner_id":1,"inputs":[{"from":0},{"from":3}]},{"owner_id":2,"inputs":[{"from":5}]},{"owner_id":3,"inputs":[{"from":2}]},{"owner_id":4,"inputs":[{"from":1}]},{"owner_id":5,"inputs":[]}],"nodes":{"1":{"p":{"i":"7,15","p":{"kernel":{"class_name":"SIMPLE"}},"o":"16","t":"Calculation"},"w":15,"id":1},"3":{"p":{"i":"7","p":{"address":{"name":"string","id":7}},"o":"7","t":"AssembleOriginalData"},"w":7,"id":3},"2":{"p":{"i":"0","p":{"data":[{"name":"string","id":7}]},"o":"7","t":"FetchOriginalData"},"w":2,"id":2},"5":{"p":{"p":{"data":[{"name":"string","id":7}]},"o":"0","t":"ReserveMemory"},"w":0,"id":5},"4":{"p":{"i":"16","t":"Projection"},"w":15,"id":4},"0":{"p":{"p":{"v":"amet."},"o":"15","t":"Const"},"w":0,"id":0}}}; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:502;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:502;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:502;T=N5arrow9UInt8TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:502;T=N5arrow9UInt8TypeE; |70.5%| [TA] $(B)/ydb/core/tx/schemeshard/ut_continuous_backup/test-results/unittest/{meta.json ... results_accumulator.log} >> TFlatTableExecutor_IndexLoading::Scan_History_BTreeIndex [GOOD] >> TFlatTableExecutor_IndexLoading::Scan_Groups_FlatIndex |70.5%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/schemeshard/ut_replication_reboots/ydb-core-tx-schemeshard-ut_replication_reboots |70.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_replication_reboots/ydb-core-tx-schemeshard-ut_replication_reboots |70.5%| [TA] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_continuous_backup/test-results/unittest/{meta.json ... results_accumulator.log} |70.5%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_replication_reboots/ydb-core-tx-schemeshard-ut_replication_reboots >> TFlatTableExecutor_IndexLoading::Scan_Groups_FlatIndex [GOOD] >> TFlatTableExecutor_IndexLoading::Scan_Groups_BTreeIndex >> TFlatExecutorLeases::BasicsInitialLeaseTimeout [GOOD] >> TFlatExecutorLeases::BasicsInitialLeaseSleep |70.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/columnshard/engines/ut/unittest >> TColumnEngineTestLogs::IndexTtl [GOOD] >> TestProgram::YqlKernelEndsWith [GOOD] >> TPartBtreeIndexIteration::OneNode [GOOD] >> TPartBtreeIndexIteration::OneNode_Groups |70.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/columnshard/engines/ut/unittest |70.6%| [TA] $(B)/ydb/core/kesus/tablet/ut/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/columnshard/engines/ut/unittest >> TColumnEngineTestLogs::IndexTtl [GOOD] Test command err: FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=column_engine.h:144;event=RegisterTable;path_id=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=88432;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=88432;columns=5; FALLBACK_ACTOR_LOGGING;priority=INFO;component=2100;fline=simple_arrays_cache.h:49;event=insert_to_cache;key=uint64::0;records=1;size=8; FALLBACK_ACTOR_LOGGING;priority=INFO;component=2100;fline=simple_arrays_cache.h:65;event=slice_from_cache;key=uint64::0;records=1;count=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=152;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=152;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=88432;columns=5; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;field_name=_yql_plan_step;fline=native.cpp:71;event=parsing;size=232;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=_yql_plan_step;fline=native.cpp:110;event=serialize;size=232;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;field_name=timestamp;fline=native.cpp:71;event=parsing;size=4200;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=timestamp;fline=native.cpp:110;event=serialize;size=4200;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;field_name=_yql_tx_id;fline=native.cpp:71;event=parsing;size=232;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=_yql_tx_id;fline=native.cpp:110;event=serialize;size=232;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;field_name=resource_type;fline=native.cpp:71;event=parsing;size=8312;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=resource_type;fline=native.cpp:110;event=serialize;size=8312;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;field_name=resource_id;fline=native.cpp:71;event=parsing;size=8312;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=resource_id;fline=native.cpp:110;event=serialize;size=8312;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;field_name=uid;fline=native.cpp:71;event=parsing;size=8312;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=uid;fline=native.cpp:110;event=serialize;size=8312;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;field_name=message;fline=native.cpp:71;event=parsing;size=8312;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=message;fline=native.cpp:110;event=serialize;size=8312;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=constructor_meta.cpp:54;memory_size=174;data_size=146;sum=174;count=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=constructor_meta.cpp:75;memory_size=462;data_size=450;sum=462;count=2;size_of_meta=144; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=constructor_portion.cpp:40;memory_size=534;data_size=522;sum=534;count=1;size_of_portion=216; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=496;columns=4; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=496;columns=4; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=abstract.cpp:53;event=WriteIndexComplete;type=CS::INDEXATION;success=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=332;fline=granule.cpp:17;event=upsert_portion;portion=(portion_id:1;path_id:1;records_count:1000;schema_version:1;level:0;;column_size:37912;index_size:0;meta:((produced=INSERTED;)););path_id=1; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=abstract.cpp:105;event=AbortEmergency;reason=testing;prev_reason=; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=88432;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=88432;columns=5; FALLBACK_ACTOR_LOGGING;priority=INFO;component=2100;fline=simple_arrays_cache.h:65;event=slice_from_cache;key=uint64::0;records=1;count=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=152;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=152;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=88432;columns=5; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;field_name=_yql_plan_step;fline=native.cpp:71;event=parsing;size=232;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=_yql_plan_step;fline=native.cpp:110;event=serialize;size=232;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;field_name=timestamp;fline=native.cpp:71;event=parsing;size=4200;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=timestamp;fline=native.cpp:110;event=serialize;size=4200;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;field_name=_yql_tx_id;fline=native.cpp:71;event=parsing;size=232;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=_yql_tx_id;fline=native.cpp:110;event=serialize;size=232;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;field_name=resource_type;fline=native.cpp:71;event=parsing;size=8312;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=resource_type;fline=native.cpp:110;event=serialize;size=8312;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;field_name=resource_id;fline=native.cpp:71;event=parsing;size=8312;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=resource_id;fline=native.cpp:110;event=serialize;size=8312;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;field_name=uid;fline=native.cpp:71;event=parsing;size=8312;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=uid;fline=native.cpp:110;event=serialize;size=8312;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;field_name=message;fline=native.cpp:71;event=parsing;size=8312;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=message;fline=native.cpp:110;event=serialize;size=8312;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=constructor_meta.cpp:54;memory_size=174;data_size=146;sum=348;count=3; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=constructor_meta.cpp:75;memory_size=462;data_size=450;sum=924;count=4;size_of_meta=144; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=constructor_portion.cpp:40;memory_size=534;data_size=522;sum=1068;count=2;size_of_portion=216; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=496;columns=4; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=496;columns=4; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=abstract.cpp:53;event=WriteIndexComplete;type=CS::INDEXATION;success=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=332;fline=granule.cpp:17;event=upsert_portion;portion=(portion_id:2;path_id:1;records_count:1000;schema_version:1;level:0;;column_size:37912;index_size:0;meta:((produced=INSERTED;)););path_id=1; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=abstract.cpp:105;event=AbortEmergency;reason=testing;prev_reason=; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=88432;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=88432;columns=5; FALLBACK_ACTOR_LOGGING;priority=INFO;component=2100;fline=simple_arrays_cache.h:65;event=slice_from_cache;key=uint64::0;records=1;count=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=152;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=152;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=88432;columns=5; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;field_name=_yql_plan_step;fline=native.cpp:71;event=parsing;size=232;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=_yql_plan_step;fline=native.cpp:110;event=serialize;size=232;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;field_name=timestamp;fline=native.cpp:71;event=parsing;size=4200;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=timestamp;fline=native.cpp:110;event=serialize;size=4200;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;field_name=_yql_tx_id;fline=native.cpp:71;event=parsing;size=232;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=_yql_tx_id;fline=native.cpp:110;event=serialize;size=232;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;field_name=resource_type;fline=native.cpp:71;event=parsing;size=8312;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=resource_type;fline=native.cpp:110;event=serialize;size=8312;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;field_name=resource_id;fline=native.cpp:71;event=parsing;size=8312;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=resource_id;fline=native.cpp:110;event=serialize;size=8312;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;field_name=uid;fline=native.cpp:71;event=parsing;size=8312;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=uid;fline=native.cpp:110;event=serialize;size=8312;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;field_name=message;fline=native.cpp:71;event=parsing;size=8312;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=message;fline=native.cpp:110;event=serialize;size=8312;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=constructor_meta.cpp:54;memory_size=174;data_size=146;sum=522;count=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=constructor_meta.cpp:75;memory_size=462;data_size=450;sum=1386;count=6;size_of_meta=144; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=constructor_portion.cpp:40;memory_size=534;data_size=522;sum=1602;count=3;size_of_portion=216; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=496;columns=4; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=496;columns=4; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=abstract.cpp:53;event=WriteIndexComplete;type=CS::INDEXATION;success=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=332;fline=granule.cpp:17;event=upsert_portion;portion=(portion_id:3;path_id:1;records_count:1000;schema_version:1;level:0;;column_size:37912;index_size:0;meta:((produced=INSERTED;)););path_id=1; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=abstract.cpp:105;event=AbortEmergency;reason=testing;prev_reason=; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=88432;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=88432;columns=5; FALLBACK_ACTOR_LOGGING;priority=INFO;component=2100;fline=simple_arrays_cache.h:65;event=slice_from_cache;key=uint64::0;records=1;count=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=152;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=152;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=88432;columns=5; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;field_name=_yql_plan_step;fline=native ... ;fline=constructor_meta.cpp:54;memory_size=174;data_size=146;sum=5742;count=65; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=constructor_meta.cpp:75;memory_size=462;data_size=450;sum=15246;count=66;size_of_meta=144; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=constructor_portion.cpp:40;memory_size=534;data_size=522;sum=17622;count=33;size_of_portion=216; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=constructor_meta.cpp:54;memory_size=174;data_size=146;sum=5916;count=67; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=constructor_meta.cpp:75;memory_size=462;data_size=450;sum=15708;count=68;size_of_meta=144; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=constructor_portion.cpp:40;memory_size=534;data_size=522;sum=18156;count=34;size_of_portion=216; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=constructor_meta.cpp:54;memory_size=174;data_size=146;sum=6090;count=69; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=constructor_meta.cpp:75;memory_size=462;data_size=450;sum=16170;count=70;size_of_meta=144; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=constructor_portion.cpp:40;memory_size=534;data_size=522;sum=18690;count=35;size_of_portion=216; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=constructor_meta.cpp:54;memory_size=174;data_size=146;sum=6264;count=71; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=constructor_meta.cpp:75;memory_size=462;data_size=450;sum=16632;count=72;size_of_meta=144; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=constructor_portion.cpp:40;memory_size=534;data_size=522;sum=19224;count=36;size_of_portion=216; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=constructor_meta.cpp:54;memory_size=174;data_size=146;sum=6438;count=73; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=constructor_meta.cpp:75;memory_size=462;data_size=450;sum=17094;count=74;size_of_meta=144; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=constructor_portion.cpp:40;memory_size=534;data_size=522;sum=19758;count=37;size_of_portion=216; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=constructor_meta.cpp:54;memory_size=174;data_size=146;sum=6612;count=75; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=constructor_meta.cpp:75;memory_size=462;data_size=450;sum=17556;count=76;size_of_meta=144; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=constructor_portion.cpp:40;memory_size=534;data_size=522;sum=20292;count=38;size_of_portion=216; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=constructor_meta.cpp:54;memory_size=174;data_size=146;sum=6786;count=77; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=constructor_meta.cpp:75;memory_size=462;data_size=450;sum=18018;count=78;size_of_meta=144; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=constructor_portion.cpp:40;memory_size=534;data_size=522;sum=20826;count=39;size_of_portion=216; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=constructor_meta.cpp:54;memory_size=174;data_size=146;sum=6960;count=79; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=constructor_meta.cpp:75;memory_size=462;data_size=450;sum=18480;count=80;size_of_meta=144; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=constructor_portion.cpp:40;memory_size=534;data_size=522;sum=21360;count=40;size_of_portion=216; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=332;fline=portion_info.h:380;event=IsVisible;analyze_portion=(portion_id:1;path_id:1;records_count:1000;schema_version:1;level:0;;column_size:37912;index_size:0;meta:((produced=INSERTED;));remove_snapshot:(plan_step=1;tx_id=1;););visible=0;snapshot=plan_step=3;tx_id=1;; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=332;fline=portion_info.h:380;event=IsVisible;analyze_portion=(portion_id:2;path_id:1;records_count:1000;schema_version:1;level:0;;column_size:37912;index_size:0;meta:((produced=INSERTED;));remove_snapshot:(plan_step=1;tx_id=1;););visible=0;snapshot=plan_step=3;tx_id=1;; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=332;fline=portion_info.h:380;event=IsVisible;analyze_portion=(portion_id:3;path_id:1;records_count:1000;schema_version:1;level:0;;column_size:37912;index_size:0;meta:((produced=INSERTED;));remove_snapshot:(plan_step=1;tx_id=1;););visible=0;snapshot=plan_step=3;tx_id=1;; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=332;fline=portion_info.h:380;event=IsVisible;analyze_portion=(portion_id:4;path_id:1;records_count:1000;schema_version:1;level:0;;column_size:37912;index_size:0;meta:((produced=INSERTED;));remove_snapshot:(plan_step=1;tx_id=1;););visible=0;snapshot=plan_step=3;tx_id=1;; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=332;fline=portion_info.h:380;event=IsVisible;analyze_portion=(portion_id:5;path_id:1;records_count:1000;schema_version:1;level:0;;column_size:37912;index_size:0;meta:((produced=INSERTED;));remove_snapshot:(plan_step=1;tx_id=1;););visible=0;snapshot=plan_step=3;tx_id=1;; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=332;fline=portion_info.h:380;event=IsVisible;analyze_portion=(portion_id:6;path_id:1;records_count:1000;schema_version:1;level:0;;column_size:37944;index_size:0;meta:((produced=INSERTED;));remove_snapshot:(plan_step=1;tx_id=1;););visible=0;snapshot=plan_step=3;tx_id=1;; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=332;fline=portion_info.h:380;event=IsVisible;analyze_portion=(portion_id:7;path_id:1;records_count:1000;schema_version:1;level:0;;column_size:37944;index_size:0;meta:((produced=INSERTED;));remove_snapshot:(plan_step=1;tx_id=1;););visible=0;snapshot=plan_step=3;tx_id=1;; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=332;fline=portion_info.h:380;event=IsVisible;analyze_portion=(portion_id:8;path_id:1;records_count:1000;schema_version:1;level:0;;column_size:37944;index_size:0;meta:((produced=INSERTED;));remove_snapshot:(plan_step=1;tx_id=1;););visible=0;snapshot=plan_step=3;tx_id=1;; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=332;fline=portion_info.h:380;event=IsVisible;analyze_portion=(portion_id:9;path_id:1;records_count:1000;schema_version:1;level:0;;column_size:37912;index_size:0;meta:((produced=INSERTED;));remove_snapshot:(plan_step=1;tx_id=1;););visible=0;snapshot=plan_step=3;tx_id=1;; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=332;fline=portion_info.h:380;event=IsVisible;analyze_portion=(portion_id:10;path_id:1;records_count:1000;schema_version:1;level:0;;column_size:37912;index_size:0;meta:((produced=INSERTED;));remove_snapshot:(plan_step=1;tx_id=1;););visible=0;snapshot=plan_step=3;tx_id=1;; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=332;fline=portion_info.h:380;event=IsVisible;analyze_portion=(portion_id:11;path_id:1;records_count:1000;schema_version:1;level:0;;column_size:37912;index_size:0;meta:((produced=INSERTED;)););visible=1;snapshot=plan_step=3;tx_id=1;; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=334;fline=column_engine_logs.cpp:457;event=portion_selected;pathId=1;portion=(portion_id:11;path_id:1;records_count:1000;schema_version:1;level:0;;column_size:37912;index_size:0;meta:((produced=INSERTED;));); FALLBACK_ACTOR_LOGGING;priority=TRACE;component=332;fline=portion_info.h:380;event=IsVisible;analyze_portion=(portion_id:12;path_id:1;records_count:1000;schema_version:1;level:0;;column_size:37912;index_size:0;meta:((produced=INSERTED;)););visible=1;snapshot=plan_step=3;tx_id=1;; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=334;fline=column_engine_logs.cpp:457;event=portion_selected;pathId=1;portion=(portion_id:12;path_id:1;records_count:1000;schema_version:1;level:0;;column_size:37912;index_size:0;meta:((produced=INSERTED;));); FALLBACK_ACTOR_LOGGING;priority=TRACE;component=332;fline=portion_info.h:380;event=IsVisible;analyze_portion=(portion_id:13;path_id:1;records_count:1000;schema_version:1;level:0;;column_size:37912;index_size:0;meta:((produced=INSERTED;)););visible=1;snapshot=plan_step=3;tx_id=1;; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=334;fline=column_engine_logs.cpp:457;event=portion_selected;pathId=1;portion=(portion_id:13;path_id:1;records_count:1000;schema_version:1;level:0;;column_size:37912;index_size:0;meta:((produced=INSERTED;));); FALLBACK_ACTOR_LOGGING;priority=TRACE;component=332;fline=portion_info.h:380;event=IsVisible;analyze_portion=(portion_id:14;path_id:1;records_count:1000;schema_version:1;level:0;;column_size:37912;index_size:0;meta:((produced=INSERTED;)););visible=1;snapshot=plan_step=3;tx_id=1;; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=334;fline=column_engine_logs.cpp:457;event=portion_selected;pathId=1;portion=(portion_id:14;path_id:1;records_count:1000;schema_version:1;level:0;;column_size:37912;index_size:0;meta:((produced=INSERTED;));); FALLBACK_ACTOR_LOGGING;priority=TRACE;component=332;fline=portion_info.h:380;event=IsVisible;analyze_portion=(portion_id:15;path_id:1;records_count:1000;schema_version:1;level:0;;column_size:37912;index_size:0;meta:((produced=INSERTED;)););visible=1;snapshot=plan_step=3;tx_id=1;; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=334;fline=column_engine_logs.cpp:457;event=portion_selected;pathId=1;portion=(portion_id:15;path_id:1;records_count:1000;schema_version:1;level:0;;column_size:37912;index_size:0;meta:((produced=INSERTED;));); FALLBACK_ACTOR_LOGGING;priority=TRACE;component=332;fline=portion_info.h:380;event=IsVisible;analyze_portion=(portion_id:16;path_id:1;records_count:1000;schema_version:1;level:0;;column_size:37912;index_size:0;meta:((produced=INSERTED;)););visible=1;snapshot=plan_step=3;tx_id=1;; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=334;fline=column_engine_logs.cpp:457;event=portion_selected;pathId=1;portion=(portion_id:16;path_id:1;records_count:1000;schema_version:1;level:0;;column_size:37912;index_size:0;meta:((produced=INSERTED;));); FALLBACK_ACTOR_LOGGING;priority=TRACE;component=332;fline=portion_info.h:380;event=IsVisible;analyze_portion=(portion_id:17;path_id:1;records_count:1000;schema_version:1;level:0;;column_size:37912;index_size:0;meta:((produced=INSERTED;)););visible=1;snapshot=plan_step=3;tx_id=1;; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=334;fline=column_engine_logs.cpp:457;event=portion_selected;pathId=1;portion=(portion_id:17;path_id:1;records_count:1000;schema_version:1;level:0;;column_size:37912;index_size:0;meta:((produced=INSERTED;));); FALLBACK_ACTOR_LOGGING;priority=TRACE;component=332;fline=portion_info.h:380;event=IsVisible;analyze_portion=(portion_id:18;path_id:1;records_count:1000;schema_version:1;level:0;;column_size:37944;index_size:0;meta:((produced=INSERTED;)););visible=1;snapshot=plan_step=3;tx_id=1;; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=334;fline=column_engine_logs.cpp:457;event=portion_selected;pathId=1;portion=(portion_id:18;path_id:1;records_count:1000;schema_version:1;level:0;;column_size:37944;index_size:0;meta:((produced=INSERTED;));); FALLBACK_ACTOR_LOGGING;priority=TRACE;component=332;fline=portion_info.h:380;event=IsVisible;analyze_portion=(portion_id:19;path_id:1;records_count:1000;schema_version:1;level:0;;column_size:37912;index_size:0;meta:((produced=INSERTED;)););visible=1;snapshot=plan_step=3;tx_id=1;; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=334;fline=column_engine_logs.cpp:457;event=portion_selected;pathId=1;portion=(portion_id:19;path_id:1;records_count:1000;schema_version:1;level:0;;column_size:37912;index_size:0;meta:((produced=INSERTED;));); FALLBACK_ACTOR_LOGGING;priority=TRACE;component=332;fline=portion_info.h:380;event=IsVisible;analyze_portion=(portion_id:20;path_id:1;records_count:1000;schema_version:1;level:0;;column_size:37912;index_size:0;meta:((produced=INSERTED;)););visible=1;snapshot=plan_step=3;tx_id=1;; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=334;fline=column_engine_logs.cpp:457;event=portion_selected;pathId=1;portion=(portion_id:20;path_id:1;records_count:1000;schema_version:1;level:0;;column_size:37912;index_size:0;meta:((produced=INSERTED;));); >> TFlatTableExecutor_IndexLoading::Scan_Groups_BTreeIndex [GOOD] >> TFlatTableExecutor_IndexLoading::Scan_Groups_BTreeIndex_Empty [GOOD] >> TFlatTableExecutor_KeepEraseMarkers::TestKeepEraseMarkers [GOOD] >> TFlatTableExecutor_LongTx::MemTableLongTx [GOOD] >> TFlatTableExecutor_LongTx::CompactUncommittedLongTx [GOOD] >> TFlatTableExecutor_LongTx::CompactCommittedLongTx [GOOD] >> TFlatTableExecutor_LongTx::CompactedLongTxRestart [GOOD] >> TFlatTableExecutor_LongTx::CompactMultipleChanges [GOOD] >> TFlatTableExecutor_LongTx::LongTxBorrow >> BuildStatsHistogram::Ten_Mixed_Log [GOOD] >> BuildStatsHistogram::Ten_Serial_Log >> TFlatTableExecutor_LongTx::LongTxBorrow [GOOD] >> TFlatTableExecutor_LongTx::MemTableLongTxRead [GOOD] >> TFlatTableExecutor_LongTx::CompactedTxIdReuse [GOOD] >> TFlatTableExecutor_LongTx::MergeSkewedCommitted [GOOD] >> TFlatTableExecutor_LongTxAndBlobs::SmallValues [GOOD] >> TFlatTableExecutor_LongTxAndBlobs::OuterBlobValues [GOOD] >> TFlatTableExecutor_LongTxAndBlobs::ExternalBlobValues [GOOD] >> TFlatTableExecutor_LowPriorityTxs::TestEnqueueCancel [GOOD] >> TFlatTableExecutor_LowPriorityTxs::TestLowPriority [GOOD] >> TFlatTableExecutor_LowPriorityTxs::TestLowPriorityCancel [GOOD] >> TFlatTableExecutor_LowPriorityTxs::TestLowPriorityAllocatingCancel >> TDSProxyLooksLikeLostTheBlob::TDSProxyLooksLikeLostTheBlobBlock42 [GOOD] >> TDSProxyPatchTest::SecuredErrorOnGetItem_Erasure4Plus2Block >> TFlatTableExecutor_LowPriorityTxs::TestLowPriorityAllocatingCancel [GOOD] >> TFlatTableExecutor_MoveTableData::TestMoveSnapshot [GOOD] >> TFlatTableExecutor_MoveTableData::TestMoveSnapshotFollower [GOOD] >> TFlatTableExecutor_PostponedScan::TestPostponedScan [GOOD] >> TFlatTableExecutor_PostponedScan::TestCancelFinishedScan [GOOD] >> TFlatTableExecutor_PostponedScan::TestCancelRunningPostponedScan [GOOD] >> TFlatTableExecutor_PostponedScan::TestPostponedScanSnapshotMVCC [GOOD] >> TFlatTableExecutor_Reboot::TestSchemeGcAfterReassign [GOOD] >> TFlatTableExecutor_RejectProbability::MaxedOutRejectProbability ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/columnshard/engines/ut/unittest >> TestProgram::YqlKernelEndsWith [GOOD] Test command err: FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:33;event=parse_program;program=Command { Assign { Column { Id: 15 } Function { Arguments { Id: 7 } Arguments { Id: 9 } FunctionType: YQL_KERNEL KernelIdx: 0 } } } Command { Projection { Columns { Id: 15 } } } Kernels: "O\002\030BlockAsTuple\t\211\004\235\213\004\213\002\203\001H\213\002\203\014\001\235?\002\001\235?\006\001\002\000\t\211\002?\014\235?\000\001\002\000\t\251\000?\022\014Arg\000\000\t\211\002?\016\235?\004\001\002\000\t\211\006?\034\203\005@?\022?\022$BlockFunc\000\003? \020EndsWith?\030?\030\001\000\000/" ; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:102;parse_proto_program=Command { Assign { Column { Id: 15 } Function { Arguments { Id: 7 } Arguments { Id: 9 } FunctionType: YQL_KERNEL KernelIdx: 0 } } } Command { Projection { Columns { Id: 15 } } } Kernels: "O\002\030BlockAsTuple\t\211\004\235\213\004\213\002\203\001H\213\002\203\014\001\235?\002\001\235?\006\001\002\000\t\211\002?\014\235?\000\001\002\000\t\251\000?\022\014Arg\000\000\t\211\002?\016\235?\004\001\002\000\t\211\006?\034\203\005@?\022?\022$BlockFunc\000\003? \020EndsWith?\030?\030\001\000\000/" ; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2101;fline=graph_execute.cpp:162;graph_constructed=digraph program {N7[shape=box, label="N0(0):{\"p\":{\"data\":[{\"name\":\"string\",\"id\":7},{\"name\":\"substring\",\"id\":9}]},\"o\":\"0\",\"t\":\"ReserveMemory\"}\n"]; N0[shape=box, label="N4(26):{\"i\":\"7,9\",\"p\":{\"kernel\":{\"class_name\":\"SIMPLE\"}},\"o\":\"15\",\"t\":\"Calculation\"}\nREMOVE:7,9"]; N2 -> N0[label="1"]; N4 -> N0[label="2"]; N2[shape=box, label="N2(9):{\"i\":\"7\",\"p\":{\"address\":{\"name\":\"string\",\"id\":7}},\"o\":\"7\",\"t\":\"AssembleOriginalData\"}\n",style=filled,color="#FFFF88"]; N6 -> N2[label="1"]; N4[shape=box, label="N3(9):{\"i\":\"9\",\"p\":{\"address\":{\"name\":\"substring\",\"id\":9}},\"o\":\"9\",\"t\":\"AssembleOriginalData\"}\n",style=filled,color="#FFFF88"]; N6 -> N4[label="1"]; N5[shape=box, label="N5(26):{\"i\":\"15\",\"t\":\"Projection\"}\n",style=filled,color="#FFAAAA"]; N0 -> N5[label="1"]; N6[shape=box, label="N1(4):{\"i\":\"0\",\"p\":{\"data\":[{\"name\":\"string\",\"id\":7},{\"name\":\"substring\",\"id\":9}]},\"o\":\"7,9\",\"t\":\"FetchOriginalData\"}\n",style=filled,color="#FFFF88"]; N7 -> N6[label="1"]; N7->N6->N2->N4->N0->N5[color=red]; }; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:51;event=program_parsed;result={"edges":[{"owner_id":7,"inputs":[]},{"owner_id":0,"inputs":[{"from":2},{"from":4}]},{"owner_id":2,"inputs":[{"from":6}]},{"owner_id":4,"inputs":[{"from":6}]},{"owner_id":5,"inputs":[{"from":0}]},{"owner_id":6,"inputs":[{"from":7}]}],"nodes":{"2":{"p":{"i":"7","p":{"address":{"name":"string","id":7}},"o":"7","t":"AssembleOriginalData"},"w":9,"id":2},"6":{"p":{"i":"0","p":{"data":[{"name":"string","id":7},{"name":"substring","id":9}]},"o":"7,9","t":"FetchOriginalData"},"w":4,"id":6},"7":{"p":{"p":{"data":[{"name":"string","id":7},{"name":"substring","id":9}]},"o":"0","t":"ReserveMemory"},"w":0,"id":7},"5":{"p":{"i":"15","t":"Projection"},"w":26,"id":5},"4":{"p":{"i":"9","p":{"address":{"name":"substring","id":9}},"o":"9","t":"AssembleOriginalData"},"w":9,"id":4},"0":{"p":{"i":"7,9","p":{"kernel":{"class_name":"SIMPLE"}},"o":"15","t":"Calculation"},"w":26,"id":0}}}; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:502;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:502;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:502;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:502;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:502;T=N5arrow9UInt8TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:502;T=N5arrow9UInt8TypeE; |70.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/columnshard/engines/ut/unittest >> TDSProxyPatchTest::SecuredErrorOnGetItem_Erasure4Plus2Block [GOOD] >> TDSProxyPutTest::TestBlock42PutStatusOkWith_1_1_VdiskErrors >> TFlatTableExecutor_RejectProbability::MaxedOutRejectProbability [GOOD] >> TFlatTableExecutor_RejectProbability::SomeRejectProbability >> TColumnEngineTestLogs::IndexWriteLoadRead [GOOD] >> TFlatTableExecutor_RejectProbability::SomeRejectProbability [GOOD] >> TFlatTableExecutor_RejectProbability::ZeroRejectProbability [GOOD] >> TFlatTableExecutor_RejectProbability::ZeroRejectProbabilityMultipleTables >> BuildStatsHistogram::Ten_Serial_Log [GOOD] >> BuildStatsHistogram::Ten_Crossed_Log >> TDSProxyPutTest::TestBlock42PutStatusOkWith_1_1_VdiskErrors [GOOD] >> TDsProxyQuorumTracker::CheckFailModelErasureMirror3 [GOOD] >> TFlatTableExecutor_RejectProbability::ZeroRejectProbabilityMultipleTables [GOOD] >> TFlatTableExecutor_Reschedule::TestExecuteReschedule [GOOD] >> TFlatTableExecutor_ResourceProfile::TestExecutorSetResourceProfile [GOOD] >> TFlatTableExecutor_ResourceProfile::TestExecutorRequestTxData [GOOD] >> TFlatTableExecutor_ResourceProfile::TestExecutorReuseStaticMemory [GOOD] >> TFlatTableExecutor_ResourceProfile::TestExecutorRequestPages [GOOD] >> TFlatTableExecutor_ResourceProfile::TestExecutorPageLimitExceeded [GOOD] >> TFlatTableExecutor_ResourceProfile::TestExecutorRequestMemory [GOOD] >> TFlatTableExecutor_ResourceProfile::TestExecutorRequestMemoryFollower >> TTablesWithReboots::CopyIndexedTableWithReboots [GOOD] >> TFlatTableExecutor_ResourceProfile::TestExecutorRequestMemoryFollower [GOOD] >> TFlatTableExecutor_ResourceProfile::TestExecutorMemoryLimitExceeded [GOOD] >> TFlatTableExecutor_ResourceProfile::TestExecutorPreserveTxData [GOOD] >> TVersions::Wreck1Reverse [GOOD] >> TVersions::Wreck0 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/columnshard/engines/ut/unittest >> TColumnEngineTestLogs::IndexWriteLoadRead [GOOD] Test command err: FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=3912;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=3912;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=column_engine.h:144;event=RegisterTable;path_id=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=column_engine.h:144;event=RegisterTable;path_id=2; FALLBACK_ACTOR_LOGGING;priority=INFO;component=2100;fline=simple_arrays_cache.h:49;event=insert_to_cache;key=uint64::0;records=1;size=8; FALLBACK_ACTOR_LOGGING;priority=INFO;component=2100;fline=simple_arrays_cache.h:65;event=slice_from_cache;key=uint64::0;records=1;count=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=152;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=152;columns=1; FALLBACK_ACTOR_LOGGING;priority=INFO;component=2100;fline=simple_arrays_cache.h:65;event=slice_from_cache;key=uint64::0;records=1;count=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=152;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=152;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=3912;columns=5; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=3912;columns=5; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;field_name=_yql_plan_step;fline=native.cpp:71;event=parsing;size=944;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=_yql_plan_step;fline=native.cpp:110;event=serialize;size=944;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;field_name=timestamp;fline=native.cpp:71;event=parsing;size=944;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=timestamp;fline=native.cpp:110;event=serialize;size=944;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;field_name=_yql_tx_id;fline=native.cpp:71;event=parsing;size=944;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=_yql_tx_id;fline=native.cpp:110;event=serialize;size=944;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;field_name=resource_type;fline=native.cpp:71;event=parsing;size=1072;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=resource_type;fline=native.cpp:110;event=serialize;size=1072;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;field_name=resource_id;fline=native.cpp:71;event=parsing;size=760;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=resource_id;fline=native.cpp:110;event=serialize;size=760;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;field_name=uid;fline=native.cpp:71;event=parsing;size=760;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=uid;fline=native.cpp:110;event=serialize;size=760;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;field_name=message;fline=native.cpp:71;event=parsing;size=760;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=message;fline=native.cpp:110;event=serialize;size=760;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=constructor_meta.cpp:54;memory_size=94;data_size=66;sum=94;count=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=constructor_meta.cpp:75;memory_size=254;data_size=242;sum=254;count=2;size_of_meta=144; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=constructor_portion.cpp:40;memory_size=326;data_size=314;sum=326;count=1;size_of_portion=216; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=432;columns=4; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=432;columns=4; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=abstract.cpp:53;event=WriteIndexComplete;type=CS::INDEXATION;success=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=332;fline=granule.cpp:17;event=upsert_portion;portion=(portion_id:1;path_id:1;records_count:100;schema_version:1;level:0;;column_size:6184;index_size:0;meta:((produced=INSERTED;)););path_id=1; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=abstract.cpp:105;event=AbortEmergency;reason=testing;prev_reason=; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=332;fline=portion_info.h:380;event=IsVisible;analyze_portion=(portion_id:1;path_id:1;records_count:100;schema_version:1;level:0;;column_size:6184;index_size:0;meta:((produced=INSERTED;)););visible=0;snapshot=plan_step=1;tx_id=0;; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=332;fline=portion_info.h:380;event=IsVisible;analyze_portion=(portion_id:1;path_id:1;records_count:100;schema_version:1;level:0;;column_size:6184;index_size:0;meta:((produced=INSERTED;)););visible=0;snapshot=plan_step=1;tx_id=2;; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=332;fline=portion_info.h:380;event=IsVisible;analyze_portion=(portion_id:1;path_id:1;records_count:100;schema_version:1;level:0;;column_size:6184;index_size:0;meta:((produced=INSERTED;)););visible=1;snapshot=plan_step=2;tx_id=1;; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=334;fline=column_engine_logs.cpp:457;event=portion_selected;pathId=1;portion=(portion_id:1;path_id:1;records_count:100;schema_version:1;level:0;;column_size:6184;index_size:0;meta:((produced=INSERTED;));); ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/dsproxy/ut/unittest >> TDsProxyQuorumTracker::CheckFailModelErasureMirror3 [GOOD] Test command err: 2025-05-29T15:28:17.936279Z node 3 :BS_PROXY_PUT INFO: dsproxy_put.cpp:645: [7e4afa7ea38a37be] bootstrap ActorId# [3:74:2120] Group# 0 BlobCount# 1 BlobIDs# [[72075186224047637:1:863:1:24576:786:0]] HandleClass# TabletLog Tactic# Default RestartCounter# 0 Marker# BPP13 2025-05-29T15:28:17.936345Z node 3 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:42: [7e4afa7ea38a37be] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 0 part# 0 situation# ESituation::Unknown Marker# BPG51 2025-05-29T15:28:17.936353Z node 3 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:42: [7e4afa7ea38a37be] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 1 part# 1 situation# ESituation::Unknown Marker# BPG51 2025-05-29T15:28:17.936357Z node 3 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:42: [7e4afa7ea38a37be] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 2 part# 2 situation# ESituation::Unknown Marker# BPG51 2025-05-29T15:28:17.936361Z node 3 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:42: [7e4afa7ea38a37be] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 3 part# 3 situation# ESituation::Unknown Marker# BPG51 2025-05-29T15:28:17.936366Z node 3 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:42: [7e4afa7ea38a37be] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 4 part# 4 situation# ESituation::Unknown Marker# BPG51 2025-05-29T15:28:17.936370Z node 3 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:42: [7e4afa7ea38a37be] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 5 part# 5 situation# ESituation::Unknown Marker# BPG51 2025-05-29T15:28:17.936373Z node 3 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:42: [7e4afa7ea38a37be] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 6 part# 0 situation# ESituation::Unknown Marker# BPG51 2025-05-29T15:28:17.936377Z node 3 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:42: [7e4afa7ea38a37be] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 6 part# 1 situation# ESituation::Unknown Marker# BPG51 2025-05-29T15:28:17.936381Z node 3 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:42: [7e4afa7ea38a37be] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 6 part# 2 situation# ESituation::Unknown Marker# BPG51 2025-05-29T15:28:17.936385Z node 3 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:42: [7e4afa7ea38a37be] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 6 part# 3 situation# ESituation::Unknown Marker# BPG51 2025-05-29T15:28:17.936389Z node 3 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:42: [7e4afa7ea38a37be] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 6 part# 4 situation# ESituation::Unknown Marker# BPG51 2025-05-29T15:28:17.936394Z node 3 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:42: [7e4afa7ea38a37be] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 6 part# 5 situation# ESituation::Unknown Marker# BPG51 2025-05-29T15:28:17.936398Z node 3 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:42: [7e4afa7ea38a37be] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 7 part# 0 situation# ESituation::Unknown Marker# BPG51 2025-05-29T15:28:17.936402Z node 3 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:42: [7e4afa7ea38a37be] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 7 part# 1 situation# ESituation::Unknown Marker# BPG51 2025-05-29T15:28:17.936406Z node 3 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:42: [7e4afa7ea38a37be] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 7 part# 2 situation# ESituation::Unknown Marker# BPG51 2025-05-29T15:28:17.936410Z node 3 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:42: [7e4afa7ea38a37be] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 7 part# 3 situation# ESituation::Unknown Marker# BPG51 2025-05-29T15:28:17.936414Z node 3 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:42: [7e4afa7ea38a37be] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 7 part# 4 situation# ESituation::Unknown Marker# BPG51 2025-05-29T15:28:17.936420Z node 3 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:42: [7e4afa7ea38a37be] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 7 part# 5 situation# ESituation::Unknown Marker# BPG51 2025-05-29T15:28:17.936425Z node 3 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:65: [7e4afa7ea38a37be] restore Id# [72075186224047637:1:863:1:24576:786:0] optimisticReplicas# 6 optimisticState# EBS_FULL Marker# BPG55 2025-05-29T15:28:17.936435Z node 3 :BS_PROXY_PUT DEBUG: dsproxy_strategy_base.cpp:324: [7e4afa7ea38a37be] partPlacement record partSituation# ESituation::Unknown to# 0 blob Id# [72075186224047637:1:863:1:24576:786:1] Marker# BPG33 2025-05-29T15:28:17.936440Z node 3 :BS_PROXY_PUT DEBUG: dsproxy_strategy_base.cpp:345: [7e4afa7ea38a37be] Sending missing VPut part# 0 to# 0 blob Id# [72075186224047637:1:863:1:24576:786:1] Marker# BPG32 2025-05-29T15:28:17.936444Z node 3 :BS_PROXY_PUT DEBUG: dsproxy_strategy_base.cpp:324: [7e4afa7ea38a37be] partPlacement record partSituation# ESituation::Unknown to# 1 blob Id# [72075186224047637:1:863:1:24576:786:2] Marker# BPG33 2025-05-29T15:28:17.936446Z node 3 :BS_PROXY_PUT DEBUG: dsproxy_strategy_base.cpp:345: [7e4afa7ea38a37be] Sending missing VPut part# 1 to# 1 blob Id# [72075186224047637:1:863:1:24576:786:2] Marker# BPG32 2025-05-29T15:28:17.936449Z node 3 :BS_PROXY_PUT DEBUG: dsproxy_strategy_base.cpp:324: [7e4afa7ea38a37be] partPlacement record partSituation# ESituation::Unknown to# 2 blob Id# [72075186224047637:1:863:1:24576:786:3] Marker# BPG33 2025-05-29T15:28:17.936452Z node 3 :BS_PROXY_PUT DEBUG: dsproxy_strategy_base.cpp:345: [7e4afa7ea38a37be] Sending missing VPut part# 2 to# 2 blob Id# [72075186224047637:1:863:1:24576:786:3] Marker# BPG32 2025-05-29T15:28:17.936455Z node 3 :BS_PROXY_PUT DEBUG: dsproxy_strategy_base.cpp:324: [7e4afa7ea38a37be] partPlacement record partSituation# ESituation::Unknown to# 3 blob Id# [72075186224047637:1:863:1:24576:786:4] Marker# BPG33 2025-05-29T15:28:17.936457Z node 3 :BS_PROXY_PUT DEBUG: dsproxy_strategy_base.cpp:345: [7e4afa7ea38a37be] Sending missing VPut part# 3 to# 3 blob Id# [72075186224047637:1:863:1:24576:786:4] Marker# BPG32 2025-05-29T15:28:17.936460Z node 3 :BS_PROXY_PUT DEBUG: dsproxy_strategy_base.cpp:324: [7e4afa7ea38a37be] partPlacement record partSituation# ESituation::Unknown to# 4 blob Id# [72075186224047637:1:863:1:24576:786:5] Marker# BPG33 2025-05-29T15:28:17.936462Z node 3 :BS_PROXY_PUT DEBUG: dsproxy_strategy_base.cpp:345: [7e4afa7ea38a37be] Sending missing VPut part# 4 to# 4 blob Id# [72075186224047637:1:863:1:24576:786:5] Marker# BPG32 2025-05-29T15:28:17.936466Z node 3 :BS_PROXY_PUT DEBUG: dsproxy_strategy_base.cpp:324: [7e4afa7ea38a37be] partPlacement record partSituation# ESituation::Unknown to# 5 blob Id# [72075186224047637:1:863:1:24576:786:6] Marker# BPG33 2025-05-29T15:28:17.936468Z node 3 :BS_PROXY_PUT DEBUG: dsproxy_strategy_base.cpp:345: [7e4afa7ea38a37be] Sending missing VPut part# 5 to# 5 blob Id# [72075186224047637:1:863:1:24576:786:6] Marker# BPG32 2025-05-29T15:28:17.940062Z node 3 :BS_PROXY_PUT INFO: dsproxy_put.cpp:260: [7e4afa7ea38a37be] received {EvVPutResult Status# ERROR ID# [72075186224047637:1:863:1:24576:786:1] {MsgQoS MsgId# { SequenceId: 1 MsgId: 0 }}} from# [0:1:0:0:0] Marker# BPP01 2025-05-29T15:28:17.940094Z node 3 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:30: [7e4afa7ea38a37be] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 0 part# 0 error Marker# BPG50 2025-05-29T15:28:17.940100Z node 3 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:42: [7e4afa7ea38a37be] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 1 part# 1 situation# ESituation::Sent Marker# BPG51 2025-05-29T15:28:17.940103Z node 3 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:42: [7e4afa7ea38a37be] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 2 part# 2 situation# ESituation::Sent Marker# BPG51 2025-05-29T15:28:17.940108Z node 3 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:42: [7e4afa7ea38a37be] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 3 part# 3 situation# ESituation::Sent Marker# BPG51 2025-05-29T15:28:17.940112Z node 3 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:42: [7e4afa7ea38a37be] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 4 part# 4 situation# ESituation::Sent Marker# BPG51 2025-05-29T15:28:17.940116Z node 3 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:42: [7e4afa7ea38a37be] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 5 part# 5 situation# ESituation::Sent Marker# BPG51 2025-05-29T15:28:17.940120Z node 3 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:42: [7e4afa7ea38a37be] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 6 part# 0 situation# ESituation::Unknown Marker# BPG51 2025-05-29T15:28:17.940124Z node 3 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:42: [7e4afa7ea38a37be] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 6 part# 1 situation# ESituation::Unknown Marker# BPG51 2025-05-29T15:28:17.940128Z node 3 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:42: [7e4afa7ea38a37be] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 6 part# 2 situation# ESituation::Unknown Marker# BPG51 2025-05-29T15:28:17.940133Z node 3 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:42: [7e4afa7ea38a37be] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 6 part# 3 situation# ESituation::Unknown Marker# BPG51 2025-05-29T15:28:17.940137Z node 3 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:42: [7e4afa7ea38a37be] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 6 part# 4 situation# ESituation::Unknown Marker# BPG51 2025-05-29T15:28:17.940141Z node 3 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:42: [7e4afa7ea38a37be] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 6 part# 5 situation# ESituation::Unknown Marker# BPG51 2025-05-29T15:28:17.940145Z node 3 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:42: [7e4afa7ea38a37be] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 7 part# 0 situation# ESituation::Unknown Marker# BPG51 2025-05-29T15:28:17.940149Z node 3 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:42: [7e4afa7ea38a37be] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 7 part# 1 situation# ESituation::Unknown Marker# BPG51 2025-05-29T15:28:17.940152Z node 3 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:42: [7e4afa7ea38a37be] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 7 part# 2 situation# ESituation::Unknown Marker# BPG51 2025-05-29T15:28:17.940154Z node 3 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:42: [7e4afa7ea38a37be] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 7 part# 3 situation# ESituation::Unknown Marker# BPG51 2025-05-29T15:28:17.940157Z node 3 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:42: [7e4afa7ea38a37be] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 7 part# 4 situation# ESituation::Unknown Marker# BPG51 2025-05-29T15:28:17.940159Z node 3 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:42: [7e4afa7ea38a37be] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 7 part# 5 situation# ESituation::Unknown Marker# BPG51 2025-05-29T15:28:17.940162Z node 3 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:65: [7e4afa7ea38a37be] restore Id# [72075186224047637:1:863:1:24576:786:0] optimisticReplicas# 6 optimisticState# EBS_FULL Marker# BPG55 2025-05-29T15:28:17.940174Z node 3 :BS_PROXY_PUT DEBUG: dsproxy_strategy_base.cpp:324: [7e4afa7ea38a37be] partPlacement record partSituation# ESituation::Unknown to# 6 blob Id# [72075186224047637:1:863:1:24576:786:1] Marker# BPG33 2025-05-29T15:28:17.940178Z node 3 :BS_PROXY_PUT DEBUG: dsproxy_strategy_base.cpp:345: [7e4afa7ea38a37be] Sending missing VPut part# 0 to# 6 blob Id# [72075186224047637:1:863:1:24576:786:1] Marker# BPG32 2025-05-29T15:28:17.940223Z node 3 :BS_PROXY_PUT DEBUG: dsproxy_put.cpp:260: [7e4afa7ea38a37be] received {EvVPutResult Status# OK ID# [72075186224047637:1:863:1:24576:786:2] {MsgQoS MsgId# { SequenceId: 1 MsgId: 0 }}} from# [0:1:0:1:0] Marker# BPP01 2025-05-29T15:28:17.940234Z node 3 :BS_PROXY_PUT DEBUG: dsproxy_put.cpp:260: [7e4afa7ea38a37be] received {EvVPutResult Status# OK ID# [72075186224047637:1:863:1:24576:786:3] {MsgQoS MsgId# { SequenceId: 1 MsgId: 0 }}} from# [0:1:0:2:0] Marker# BPP01 2025-05-29T15:28:17.940241Z node 3 :BS_PROXY_PUT DEBUG: dsproxy_put.cpp:260: [7e4afa7ea38a37be] received {EvVPutResult Status# OK ID# [72075186224047637:1:863:1:24576:786:4] {MsgQoS MsgId# { SequenceId: 1 MsgId: 0 }}} from# [0:1:0:3:0] Marker# BPP01 2025-05-29T15:28:17.940247Z node 3 :BS_PROXY_PUT DEBUG: dsproxy_put.cpp:260: [7e4afa7ea38a37be] received {EvVPutResult Status# OK ID# [72075186224047637:1:863:1:24576:786:5] {MsgQoS MsgId# { SequenceId: 1 MsgId: 0 }}} from# [0:1:0:4:0] Marker# BPP01 2025-05-29T15:28:17.940257Z node 3 :BS_PROXY_PUT DEBUG: dsproxy_put.cpp:260: [7e4afa7ea38a37be] received {EvVPutResult Status# OK ID# [72075186224047637:1:863:1:24576:786:6] {MsgQoS MsgId# { SequenceId: 1 MsgId: 0 }}} from# [0:1:0:5:0] Marker# BPP01 2025-05-29T15:28:17.940295Z node 3 :BS_PROXY_PUT INFO: dsproxy_put.cpp:260: [7e4afa7ea38a37be] received {EvVPutResult Status# ERROR ID# [72075186224047637:1:863:1:24576:786:1] {MsgQoS MsgId# { SequenceId: 1 MsgId: 0 }}} from# [0:1:0:6:0] Marker# BPP01 2025-05-29T15:28:17.940302Z node 3 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:30: [7e4afa7ea38a37be] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 0 part# 0 error Marker# BPG50 2025-05-29T15:28:17.940306Z node 3 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:42: [7e4afa7ea38a37be] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 1 part# 1 situation# ESituation::Present Marker# BPG51 2025-05-29T15:28:17.940311Z node 3 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:42: [7e4afa7ea38a37be] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 2 part# 2 situation# ESituation::Present Marker# BPG51 2025-05-29T15:28:17.940315Z node 3 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:42: [7e4afa7ea38a37be] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 3 part# 3 situation# ESituation::Present Marker# BPG51 2025-05-29T15:28:17.940319Z node 3 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:42: [7e4afa7ea38a37be] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 4 part# 4 situation# ESituation::Present Marker# BPG51 2025-05-29T15:28:17.940323Z node 3 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:42: [7e4afa7ea38a37be] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 5 part# 5 situation# ESituation::Present Marker# BPG51 2025-05-29T15:28:17.940325Z node 3 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:30: [7e4afa7ea38a37be] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 6 part# 0 error Marker# BPG50 2025-05-29T15:28:17.940328Z node 3 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:42: [7e4afa7ea38a37be] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 7 part# 0 situation# ESituation::Unknown Marker# BPG51 2025-05-29T15:28:17.940330Z node 3 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:42: [7e4afa7ea38a37be] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 7 part# 1 situation# ESituation::Unknown Marker# BPG51 2025-05-29T15:28:17.940332Z node 3 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:42: [7e4afa7ea38a37be] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 7 part# 2 situation# ESituation::Unknown Marker# BPG51 2025-05-29T15:28:17.940335Z node 3 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:42: [7e4afa7ea38a37be] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 7 part# 3 situation# ESituation::Unknown Marker# BPG51 2025-05-29T15:28:17.940337Z node 3 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:42: [7e4afa7ea38a37be] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 7 part# 4 situation# ESituation::Unknown Marker# BPG51 2025-05-29T15:28:17.940341Z node 3 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:42: [7e4afa7ea38a37be] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 7 part# 5 situation# ESituation::Unknown Marker# BPG51 2025-05-29T15:28:17.940344Z node 3 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:65: [7e4afa7ea38a37be] restore Id# [72075186224047637:1:863:1:24576:786:0] optimisticReplicas# 6 optimisticState# EBS_FULL Marker# BPG55 2025-05-29T15:28:17.940348Z node 3 :BS_PROXY_PUT DEBUG: dsproxy_strategy_base.cpp:324: [7e4afa7ea38a37be] partPlacement record partSituation# ESituation::Unknown to# 7 blob Id# [72075186224047637:1:863:1:24576:786:1] Marker# BPG33 2025-05-29T15:28:17.940350Z node 3 :BS_PROXY_PUT DEBUG: dsproxy_strategy_base.cpp:345: [7e4afa7ea38a37be] Sending missing VPut part# 0 to# 7 blob Id# [72075186224047637:1:863:1:24576:786:1] Marker# BPG32 2025-05-29T15:28:17.940382Z node 3 :BS_PROXY_PUT DEBUG: dsproxy_put.cpp:260: [7e4afa7ea38a37be] received {EvVPutResult Status# OK ID# [72075186224047637:1:863:1:24576:786:1] {MsgQoS MsgId# { SequenceId: 1 MsgId: 0 }}} from# [0:1:0:7:0] Marker# BPP01 2025-05-29T15:28:17.940427Z node 3 :BS_PROXY_PUT DEBUG: dsproxy_put_impl.cpp:72: [7e4afa7ea38a37be] Result# TEvPutResult {Id# [72075186224047637:1:863:1:24576:786:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0} GroupId# 0 Marker# BPP12 2025-05-29T15:28:17.940436Z node 3 :BS_PROXY_PUT INFO: dsproxy_put.cpp:486: [7e4afa7ea38a37be] SendReply putResult# TEvPutResult {Id# [72075186224047637:1:863:1:24576:786:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0} ResponsesSent# 0 PutImpl.Blobs.size# 1 Last# true Marker# BPP21 2025-05-29T15:28:17.940524Z node 3 :BS_PROXY_PUT NOTICE: {BPP72@dsproxy_put.cpp:470} Query history GroupId# 0 HandleClass# TabletLog Tactic# Default History# THistory { Entries# [ TEvVPut{ TimestampMs# 0.789 sample PartId# [72075186224047637:1:863:1:24576:786:6] QueryCount# 1 VDiskId# [0:1:0:5:0] NodeId# 3 } TEvVPut{ TimestampMs# 0.789 sample PartId# [72075186224047637:1:863:1:24576:786:5] QueryCount# 1 VDiskId# [0:1:0:4:0] NodeId# 3 } TEvVPut{ TimestampMs# 0.789 sample PartId# [72075186224047637:1:863:1:24576:786:4] QueryCount# 1 VDiskId# [0:1:0:3:0] NodeId# 3 } TEvVPut{ TimestampMs# 0.789 sample PartId# [72075186224047637:1:863:1:24576:786:3] QueryCount# 1 VDiskId# [0:1:0:2:0] NodeId# 3 } TEvVPut{ TimestampMs# 0.789 sample PartId# [72075186224047637:1:863:1:24576:786:2] QueryCount# 1 VDiskId# [0:1:0:1:0] NodeId# 3 } TEvVPut{ TimestampMs# 0.789 sample PartId# [72075186224047637:1:863:1:24576:786:1] QueryCount# 1 VDiskId# [0:1:0:0:0] NodeId# 3 } TEvVPutResult{ TimestampMs# 4.378 VDiskId# [0:1:0:0:0] NodeId# 3 Status# ERROR } TEvVPut{ TimestampMs# 4.485 sample PartId# [72075186224047637:1:863:1:24576:786:1] QueryCount# 1 VDiskId# [0:1:0:6:0] NodeId# 3 } TEvVPutResult{ TimestampMs# 4.519 VDiskId# [0:1:0:1:0] NodeId# 3 Status# OK } TEvVPutResult{ TimestampMs# 4.528 VDiskId# [0:1:0:2:0] NodeId# 3 Status# OK } TEvVPutResult{ TimestampMs# 4.535 VDiskId# [0:1:0:3:0] NodeId# 3 Status# OK } TEvVPutResult{ TimestampMs# 4.541 VDiskId# [0:1:0:4:0] NodeId# 3 Status# OK } TEvVPutResult{ TimestampMs# 4.552 VDiskId# [0:1:0:5:0] NodeId# 3 Status# OK } TEvVPutResult{ TimestampMs# 4.591 VDiskId# [0:1:0:6:0] NodeId# 3 Status# ERROR } TEvVPut{ TimestampMs# 4.647 sample PartId# [72075186224047637:1:863:1:24576:786:1] QueryCount# 1 VDiskId# [0:1:0:7:0] NodeId# 3 } TEvVPutResult{ TimestampMs# 4.678 VDiskId# [0:1:0:7:0] NodeId# 3 Status# OK } ] } >> TestProgram::NumRowsWithNulls [GOOD] >> TColumnEngineTestInsertTable::TestInsertCommit [GOOD] |70.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/columnshard/engines/ut/unittest >> TKeyValueTest::TestWriteReadRangeLimitThenLimitWorksNewApi [GOOD] >> TestProgram::YqlKernelContains [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/columnshard/engines/ut/unittest >> TestProgram::NumRowsWithNulls [GOOD] Test command err: FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:33;event=parse_program;program=Command { Assign { Column { Id: 10001 } Function { Id: 7 Arguments { Id: 2 } } } } Command { Filter { Predicate { Id: 10001 } } } Command { GroupBy { Aggregates { Column { Id: 10002 } Function { Id: 2 } } } } Command { Projection { Columns { Id: 10002 } } } ; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:102;parse_proto_program=Command { Assign { Column { Id: 10001 } Function { Id: 7 Arguments { Id: 2 } } } } Command { Filter { Predicate { Id: 10001 } } } Command { GroupBy { Aggregates { Column { Id: 10002 } Function { Id: 2 } } } } Command { Projection { Columns { Id: 10002 } } } ; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2101;fline=graph_execute.cpp:162;graph_constructed=digraph program {N0[shape=box, label="N3(15):{\"i\":\"2\",\"p\":{\"kernel\":{\"class_name\":\"SIMPLE\"}},\"o\":\"10001\",\"t\":\"Calculation\"}\nREMOVE:2"]; N2 -> N0[label="1"]; N1[shape=box, label="N1(2):{\"i\":\"0\",\"p\":{\"data\":[{\"name\":\"uid\",\"id\":2}]},\"o\":\"2\",\"t\":\"FetchOriginalData\"}\n",style=filled,color="#FFFF88"]; N6 -> N1[label="1"]; N2[shape=box, label="N2(7):{\"i\":\"2\",\"p\":{\"address\":{\"name\":\"uid\",\"id\":2}},\"o\":\"2\",\"t\":\"AssembleOriginalData\"}\n",style=filled,color="#FFFF88"]; N1 -> N2[label="1"]; N3[shape=box, label="N4(15):{\"i\":\"10001\",\"t\":\"Filter\"}\nREMOVE:10001",style=filled,color="#FFAAAA"]; N0 -> N3[label="1"]; N4[shape=box, label="N5(8):{\"p\":{\"kernel\":{\"class_name\":\"SIMPLE\"}},\"o\":\"10002\",\"t\":\"Calculation\"}\n"]; N5[shape=box, label="N6(8):{\"i\":\"10002\",\"t\":\"Projection\"}\n",style=filled,color="#FFAAAA"]; N4 -> N5[label="1"]; N6[shape=box, label="N0(0):{\"p\":{\"data\":[{\"name\":\"uid\",\"id\":2}]},\"o\":\"0\",\"t\":\"ReserveMemory\"}\n"]; N6->N1->N2->N0->N3->N4->N5[color=red]; }; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:51;event=program_parsed;result={"edges":[{"owner_id":0,"inputs":[{"from":2}]},{"owner_id":1,"inputs":[{"from":6}]},{"owner_id":2,"inputs":[{"from":1}]},{"owner_id":3,"inputs":[{"from":0}]},{"owner_id":4,"inputs":[]},{"owner_id":5,"inputs":[{"from":4}]},{"owner_id":6,"inputs":[]}],"nodes":{"1":{"p":{"i":"0","p":{"data":[{"name":"uid","id":2}]},"o":"2","t":"FetchOriginalData"},"w":2,"id":1},"3":{"p":{"i":"10001","t":"Filter"},"w":15,"id":3},"2":{"p":{"i":"2","p":{"address":{"name":"uid","id":2}},"o":"2","t":"AssembleOriginalData"},"w":7,"id":2},"6":{"p":{"p":{"data":[{"name":"uid","id":2}]},"o":"0","t":"ReserveMemory"},"w":0,"id":6},"5":{"p":{"i":"10002","t":"Projection"},"w":8,"id":5},"4":{"p":{"p":{"kernel":{"class_name":"SIMPLE"}},"o":"10002","t":"Calculation"},"w":8,"id":4},"0":{"p":{"i":"2","p":{"kernel":{"class_name":"SIMPLE"}},"o":"10001","t":"Calculation"},"w":15,"id":0}}}; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:502;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:502;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:502;T=N5arrow10UInt64TypeE; |70.6%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/services/persqueue_v1/ut/describes_ut/ydb-services-persqueue_v1-ut-describes_ut |70.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/services/persqueue_v1/ut/describes_ut/ydb-services-persqueue_v1-ut-describes_ut >> TPartBtreeIndexIteration::OneNode_Groups [GOOD] >> TPartBtreeIndexIteration::OneNode_History >> BuildStatsHistogram::Ten_Crossed_Log [GOOD] >> BuildStatsHistogram::Five_Five_Mixed ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/columnshard/engines/ut/unittest >> TColumnEngineTestInsertTable::TestInsertCommit [GOOD] Test command err: FALLBACK_ACTOR_LOGGING;priority=INFO;component=2100;fline=simple_arrays_cache.h:49;event=insert_to_cache;key=uint64::0;records=1;size=8; FALLBACK_ACTOR_LOGGING;priority=INFO;component=2100;fline=simple_arrays_cache.h:65;event=slice_from_cache;key=uint64::0;records=1;count=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=152;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=152;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=332;fline=insert_table.cpp:43;event=commit_insertion;path_id=0;blob_range={ Blob: DS:0:[2222:1:1:2:100:1:0] Offset: 0 Size: 0 }; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=152;columns=1; ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/columnshard/engines/ut/unittest >> TestProgram::YqlKernelContains [GOOD] Test command err: FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:33;event=parse_program;program=Command { Assign { Column { Id: 15 } Function { Arguments { Id: 7 } Arguments { Id: 9 } FunctionType: YQL_KERNEL KernelIdx: 0 } } } Command { Projection { Columns { Id: 15 } } } Kernels: "O\002\030BlockAsTuple\t\211\004\235\213\004\213\002\203\005@\213\002\203\014\001\235?\002\001\235?\006\001\002\000\t\211\002?\014\235?\000\001\002\000\t\251\000?\022\014Arg\000\000\t\211\002?\016\235?\004\001\002\000\t\211\006?\034\203\005@?\022?\022$BlockFunc\000\003? \034StringContains?\030?\030\001\000\000/" ; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:102;parse_proto_program=Command { Assign { Column { Id: 15 } Function { Arguments { Id: 7 } Arguments { Id: 9 } FunctionType: YQL_KERNEL KernelIdx: 0 } } } Command { Projection { Columns { Id: 15 } } } Kernels: "O\002\030BlockAsTuple\t\211\004\235\213\004\213\002\203\005@\213\002\203\014\001\235?\002\001\235?\006\001\002\000\t\211\002?\014\235?\000\001\002\000\t\251\000?\022\014Arg\000\000\t\211\002?\016\235?\004\001\002\000\t\211\006?\034\203\005@?\022?\022$BlockFunc\000\003? \034StringContains?\030?\030\001\000\000/" ; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2101;fline=graph_execute.cpp:162;graph_constructed=digraph program {N7[shape=box, label="N0(0):{\"p\":{\"data\":[{\"name\":\"string\",\"id\":7},{\"name\":\"substring\",\"id\":9}]},\"o\":\"0\",\"t\":\"ReserveMemory\"}\n"]; N0[shape=box, label="N4(26):{\"i\":\"7,9\",\"p\":{\"kernel\":{\"class_name\":\"SIMPLE\"}},\"o\":\"15\",\"t\":\"Calculation\"}\nREMOVE:7,9"]; N2 -> N0[label="1"]; N4 -> N0[label="2"]; N2[shape=box, label="N2(9):{\"i\":\"7\",\"p\":{\"address\":{\"name\":\"string\",\"id\":7}},\"o\":\"7\",\"t\":\"AssembleOriginalData\"}\n",style=filled,color="#FFFF88"]; N6 -> N2[label="1"]; N4[shape=box, label="N3(9):{\"i\":\"9\",\"p\":{\"address\":{\"name\":\"substring\",\"id\":9}},\"o\":\"9\",\"t\":\"AssembleOriginalData\"}\n",style=filled,color="#FFFF88"]; N6 -> N4[label="1"]; N5[shape=box, label="N5(26):{\"i\":\"15\",\"t\":\"Projection\"}\n",style=filled,color="#FFAAAA"]; N0 -> N5[label="1"]; N6[shape=box, label="N1(4):{\"i\":\"0\",\"p\":{\"data\":[{\"name\":\"string\",\"id\":7},{\"name\":\"substring\",\"id\":9}]},\"o\":\"7,9\",\"t\":\"FetchOriginalData\"}\n",style=filled,color="#FFFF88"]; N7 -> N6[label="1"]; N7->N6->N2->N4->N0->N5[color=red]; }; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:51;event=program_parsed;result={"edges":[{"owner_id":7,"inputs":[]},{"owner_id":0,"inputs":[{"from":2},{"from":4}]},{"owner_id":2,"inputs":[{"from":6}]},{"owner_id":4,"inputs":[{"from":6}]},{"owner_id":5,"inputs":[{"from":0}]},{"owner_id":6,"inputs":[{"from":7}]}],"nodes":{"2":{"p":{"i":"7","p":{"address":{"name":"string","id":7}},"o":"7","t":"AssembleOriginalData"},"w":9,"id":2},"6":{"p":{"i":"0","p":{"data":[{"name":"string","id":7},{"name":"substring","id":9}]},"o":"7,9","t":"FetchOriginalData"},"w":4,"id":6},"7":{"p":{"p":{"data":[{"name":"string","id":7},{"name":"substring","id":9}]},"o":"0","t":"ReserveMemory"},"w":0,"id":7},"5":{"p":{"i":"15","t":"Projection"},"w":26,"id":5},"4":{"p":{"i":"9","p":{"address":{"name":"substring","id":9}},"o":"9","t":"AssembleOriginalData"},"w":9,"id":4},"0":{"p":{"i":"7,9","p":{"kernel":{"class_name":"SIMPLE"}},"o":"15","t":"Calculation"},"w":26,"id":0}}}; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:502;T=N5arrow10BinaryTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:502;T=N5arrow10BinaryTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:502;T=N5arrow10BinaryTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:502;T=N5arrow10BinaryTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:502;T=N5arrow10BinaryTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:502;T=N5arrow10BinaryTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:502;T=N5arrow10BinaryTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:502;T=N5arrow10BinaryTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:502;T=N5arrow9UInt8TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:502;T=N5arrow9UInt8TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:502;T=N5arrow9UInt8TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:502;T=N5arrow9UInt8TypeE; ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/keyvalue/ut/unittest >> TKeyValueTest::TestWriteReadRangeLimitThenLimitWorksNewApi [GOOD] Test command err: Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:55:2057] recipient: [1:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:55:2057] recipient: [1:51:2095] Leader for TabletID 72057594037927937 is [1:57:2097] sender: [1:58:2057] recipient: [1:51:2095] Leader for TabletID 72057594037927937 is [1:57:2097] sender: [1:75:2057] recipient: [1:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:55:2057] recipient: [2:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:55:2057] recipient: [2:51:2095] Leader for TabletID 72057594037927937 is [2:57:2097] sender: [2:58:2057] recipient: [2:51:2095] Leader for TabletID 72057594037927937 is [2:57:2097] sender: [2:75:2057] recipient: [2:14:2061] !Reboot 72057594037927937 (actor [2:57:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [2:57:2097] sender: [2:77:2057] recipient: [2:36:2083] Leader for TabletID 72057594037927937 is [2:57:2097] sender: [2:80:2057] recipient: [2:14:2061] Leader for TabletID 72057594037927937 is [2:57:2097] sender: [2:81:2057] recipient: [2:79:2110] Leader for TabletID 72057594037927937 is [2:82:2111] sender: [2:83:2057] recipient: [2:79:2110] !Reboot 72057594037927937 (actor [2:57:2097]) rebooted! !Reboot 72057594037927937 (actor [2:57:2097]) tablet resolver refreshed! new actor is[2:82:2111] Leader for TabletID 72057594037927937 is [2:82:2111] sender: [2:168:2057] recipient: [2:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:55:2057] recipient: [3:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:55:2057] recipient: [3:51:2095] Leader for TabletID 72057594037927937 is [3:57:2097] sender: [3:58:2057] recipient: [3:51:2095] Leader for TabletID 72057594037927937 is [3:57:2097] sender: [3:75:2057] recipient: [3:14:2061] !Reboot 72057594037927937 (actor [3:57:2097]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [3:57:2097] sender: [3:77:2057] recipient: [3:36:2083] Leader for TabletID 72057594037927937 is [3:57:2097] sender: [3:80:2057] recipient: [3:79:2110] Leader for TabletID 72057594037927937 is [3:57:2097] sender: [3:81:2057] recipient: [3:14:2061] Leader for TabletID 72057594037927937 is [3:82:2111] sender: [3:83:2057] recipient: [3:79:2110] !Reboot 72057594037927937 (actor [3:57:2097]) rebooted! !Reboot 72057594037927937 (actor [3:57:2097]) tablet resolver refreshed! new actor is[3:82:2111] Leader for TabletID 72057594037927937 is [3:82:2111] sender: [3:168:2057] recipient: [3:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [4:55:2057] recipient: [4:50:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [4:55:2057] recipient: [4:50:2095] Leader for TabletID 72057594037927937 is [4:57:2097] sender: [4:58:2057] recipient: [4:50:2095] Leader for TabletID 72057594037927937 is [4:57:2097] sender: [4:75:2057] recipient: [4:14:2061] !Reboot 72057594037927937 (actor [4:57:2097]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [4:57:2097] sender: [4:78:2057] recipient: [4:36:2083] Leader for TabletID 72057594037927937 is [4:57:2097] sender: [4:80:2057] recipient: [4:14:2061] Leader for TabletID 72057594037927937 is [4:57:2097] sender: [4:82:2057] recipient: [4:81:2110] Leader for TabletID 72057594037927937 is [4:83:2111] sender: [4:84:2057] recipient: [4:81:2110] !Reboot 72057594037927937 (actor [4:57:2097]) rebooted! !Reboot 72057594037927937 (actor [4:57:2097]) tablet resolver refreshed! new actor is[4:83:2111] Leader for TabletID 72057594037927937 is [4:83:2111] sender: [4:169:2057] recipient: [4:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [5:55:2057] recipient: [5:52:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [5:55:2057] recipient: [5:52:2095] Leader for TabletID 72057594037927937 is [5:57:2097] sender: [5:58:2057] recipient: [5:52:2095] Leader for TabletID 72057594037927937 is [5:57:2097] sender: [5:75:2057] recipient: [5:14:2061] !Reboot 72057594037927937 (actor [5:57:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [5:57:2097] sender: [5:81:2057] recipient: [5:36:2083] Leader for TabletID 72057594037927937 is [5:57:2097] sender: [5:84:2057] recipient: [5:14:2061] Leader for TabletID 72057594037927937 is [5:57:2097] sender: [5:85:2057] recipient: [5:83:2113] Leader for TabletID 72057594037927937 is [5:86:2114] sender: [5:87:2057] recipient: [5:83:2113] !Reboot 72057594037927937 (actor [5:57:2097]) rebooted! !Reboot 72057594037927937 (actor [5:57:2097]) tablet resolver refreshed! new actor is[5:86:2114] Leader for TabletID 72057594037927937 is [5:86:2114] sender: [5:172:2057] recipient: [5:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [6:55:2057] recipient: [6:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [6:55:2057] recipient: [6:51:2095] Leader for TabletID 72057594037927937 is [6:57:2097] sender: [6:58:2057] recipient: [6:51:2095] Leader for TabletID 72057594037927937 is [6:57:2097] sender: [6:75:2057] recipient: [6:14:2061] !Reboot 72057594037927937 (actor [6:57:2097]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [6:57:2097] sender: [6:81:2057] recipient: [6:36:2083] Leader for TabletID 72057594037927937 is [6:57:2097] sender: [6:84:2057] recipient: [6:14:2061] Leader for TabletID 72057594037927937 is [6:57:2097] sender: [6:85:2057] recipient: [6:83:2113] Leader for TabletID 72057594037927937 is [6:86:2114] sender: [6:87:2057] recipient: [6:83:2113] !Reboot 72057594037927937 (actor [6:57:2097]) rebooted! !Reboot 72057594037927937 (actor [6:57:2097]) tablet resolver refreshed! new actor is[6:86:2114] Leader for TabletID 72057594037927937 is [6:86:2114] sender: [6:172:2057] recipient: [6:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [7:55:2057] recipient: [7:52:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [7:55:2057] recipient: [7:52:2095] Leader for TabletID 72057594037927937 is [7:57:2097] sender: [7:58:2057] recipient: [7:52:2095] Leader for TabletID 72057594037927937 is [7:57:2097] sender: [7:75:2057] recipient: [7:14:2061] !Reboot 72057594037927937 (actor [7:57:2097]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [7:57:2097] sender: [7:82:2057] recipient: [7:36:2083] Leader for TabletID 72057594037927937 is [7:57:2097] sender: [7:85:2057] recipient: [7:84:2113] Leader for TabletID 72057594037927937 is [7:57:2097] sender: [7:86:2057] recipient: [7:14:2061] Leader for TabletID 72057594037927937 is [7:87:2114] sender: [7:88:2057] recipient: [7:84:2113] !Reboot 72057594037927937 (actor [7:57:2097]) rebooted! !Reboot 72057594037927937 (actor [7:57:2097]) tablet resolver refreshed! new actor is[7:87:2114] Leader for TabletID 72057594037927937 is [7:87:2114] sender: [7:173:2057] recipient: [7:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [8:55:2057] recipient: [8:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [8:55:2057] recipient: [8:51:2095] Leader for TabletID 72057594037927937 is [8:57:2097] sender: [8:58:2057] recipient: [8:51:2095] Leader for TabletID 72057594037927937 is [8:57:2097] sender: [8:75:2057] recipient: [8:14:2061] !Reboot 72057594037927937 (actor [8:57:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [8:57:2097] sender: [8:84:2057] recipient: [8:36:2083] Leader for TabletID 72057594037927937 is [8:57:2097] sender: [8:87:2057] recipient: [8:14:2061] Leader for TabletID 72057594037927937 is [8:57:2097] sender: [8:88:2057] recipient: [8:86:2115] Leader for TabletID 72057594037927937 is [8:89:2116] sender: [8:90:2057] recipient: [8:86:2115] !Reboot 72057594037927937 (actor [8:57:2097]) rebooted! !Reboot 72057594037927937 (actor [8:57:2097]) tablet resolver refreshed! new actor is[8:89:2116] Leader for TabletID 72057594037927937 is [8:89:2116] sender: [8:175:2057] recipient: [8:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [9:55:2057] recipient: [9:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [9:55:2057] recipient: [9:51:2095] Leader for TabletID 72057594037927937 is [9:57:2097] sender: [9:58:2057] recipient: [9:51:2095] Leader for TabletID 72057594037927937 is [9:57:2097] sender: [9:75:2057] recipient: [9:14:2061] !Reboot 72057594037927937 (actor [9:57:2097]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [9:57:2097] sender: [9:84:2057] recipient: [9:36:2083] Leader for TabletID 72057594037927937 is [9:57:2097] sender: [9:87:2057] recipient: [9:14:2061] Leader for TabletID 72057594037927937 is [9:57:2097] sender: [9:88:2057] recipient: [9:86:2115] Leader for TabletID 72057594037927937 is [9:89:2116] sender: [9:90:2057] recipient: [9:86:2115] !Reboot 72057594037927937 (actor [9:57:2097]) rebooted! !Reboot 72057594037927937 (actor [9:57:2097]) tablet resolver refreshed! new actor is[9:89:2116] Leader for TabletID 72057594037927937 is [9:89:2116] sender: [9:175:2057] recipient: [9:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [10:55:2057] recipient: [10:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [10:55:2057] recipient: [10:51:2095] Leader for TabletID 72057594037927937 is [10:57:2097] sender: [10:58:2057] recipient: [10:51:2095] Leader for TabletID 72057594037927937 is [10:57:2097] sender: [10:75:2057] recipient: [10:14:2061] !Reboot 72057594037927937 (actor [10:57:2097]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [10:57:2097] sender: [10:85:2057] recipient: [10:36:2083] Leader for TabletID 72057594037927937 is [10:57:2097] sender: [10:88:2057] recipient: [10:14:2061] Leader for TabletID 72057594037927937 is [10:57:2097] sender: [10:89:2057] recipient: [10:87:2115] Leader for TabletID 72057594037927937 is [10:90:2116] sender: [10:91:2057] recipient: [10:87:2115] !Reboot 72057594037927937 (actor [10:57:2097]) rebooted! !Reboot 72057594037927937 (actor [10:57:2097]) tablet resolver refreshed! new actor is[10:90:2116] Leader for TabletID 72057594037927937 is [10:90:2116] sender: [10:176:2057] recipient: [10:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [11:55:2057] recipient: [11:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [11:55:2057] recipient: [11:51:2095] Leader for TabletID 72057594037927937 is [11:57:2097] sender: [11:58:2057] recipient: [11:51:2095] Leader for TabletID 72057594037927937 is [11:57:2097] sender: [11:75:2057] recipient: [11:14:2061] !Reboot 72057594037927937 (actor [11:57:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [11:57:2097] sender: [11:87:2057] recipient: [11:36:2083] Leader for TabletID 72057594037927937 is [11:57:2097] sender: [11:90:2057] recipient: [11:14:2061] Leader for TabletID 72057594037927937 is [11:57:2097] sender: [11:91:2057] recipient: [11:89:2117] Leader for TabletID 72057594037927937 is [11:92:2118] sender: [11:93:2057] recipient: [11:89:2117] !Reboot 72057594037927937 (actor [11:57:2097]) rebooted! !Reboot 72057594037927937 (actor [11:57:2097]) tablet resolver refreshed! new actor is[11:92:2118] Leader for TabletID 72057594037927937 is [11:92:2118] sender: [11:178:2057] recipient: [11:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [12:55:2057] recipient: [12:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [12:55:2057] recipient: [12:51:2095] Leader for TabletID 72057594037927937 is [12:57:2097] sender: [12:58:2057] recipient: [12:51:2095] Leader for TabletID 72057594037927937 is [12:57:2097] sender: [12:75:2057] recipient: [12:14:2061] !Reboot 72057594037927937 (acto ... bletID 72057594037927937 is [13:57:2097] sender: [13:88:2057] recipient: [13:36:2083] Leader for TabletID 72057594037927937 is [13:57:2097] sender: [13:90:2057] recipient: [13:14:2061] Leader for TabletID 72057594037927937 is [13:57:2097] sender: [13:92:2057] recipient: [13:91:2117] Leader for TabletID 72057594037927937 is [13:93:2118] sender: [13:94:2057] recipient: [13:91:2117] !Reboot 72057594037927937 (actor [13:57:2097]) rebooted! !Reboot 72057594037927937 (actor [13:57:2097]) tablet resolver refreshed! new actor is[13:93:2118] Leader for TabletID 72057594037927937 is [13:93:2118] sender: [13:179:2057] recipient: [13:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [14:55:2057] recipient: [14:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [14:55:2057] recipient: [14:51:2095] Leader for TabletID 72057594037927937 is [14:57:2097] sender: [14:58:2057] recipient: [14:51:2095] Leader for TabletID 72057594037927937 is [14:57:2097] sender: [14:75:2057] recipient: [14:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [15:55:2057] recipient: [15:52:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [15:55:2057] recipient: [15:52:2095] Leader for TabletID 72057594037927937 is [15:57:2097] sender: [15:58:2057] recipient: [15:52:2095] Leader for TabletID 72057594037927937 is [15:57:2097] sender: [15:75:2057] recipient: [15:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [16:55:2057] recipient: [16:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [16:55:2057] recipient: [16:51:2095] Leader for TabletID 72057594037927937 is [16:57:2097] sender: [16:58:2057] recipient: [16:51:2095] Leader for TabletID 72057594037927937 is [16:57:2097] sender: [16:75:2057] recipient: [16:14:2061] !Reboot 72057594037927937 (actor [16:57:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [16:57:2097] sender: [16:77:2057] recipient: [16:36:2083] Leader for TabletID 72057594037927937 is [16:57:2097] sender: [16:80:2057] recipient: [16:14:2061] Leader for TabletID 72057594037927937 is [16:57:2097] sender: [16:81:2057] recipient: [16:79:2110] Leader for TabletID 72057594037927937 is [16:82:2111] sender: [16:83:2057] recipient: [16:79:2110] !Reboot 72057594037927937 (actor [16:57:2097]) rebooted! !Reboot 72057594037927937 (actor [16:57:2097]) tablet resolver refreshed! new actor is[16:82:2111] Leader for TabletID 72057594037927937 is [16:82:2111] sender: [16:168:2057] recipient: [16:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [17:55:2057] recipient: [17:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [17:55:2057] recipient: [17:51:2095] Leader for TabletID 72057594037927937 is [17:57:2097] sender: [17:58:2057] recipient: [17:51:2095] Leader for TabletID 72057594037927937 is [17:57:2097] sender: [17:75:2057] recipient: [17:14:2061] !Reboot 72057594037927937 (actor [17:57:2097]) on event NKikimr::TEvKeyValue::TEvExecuteTransaction ! Leader for TabletID 72057594037927937 is [17:57:2097] sender: [17:77:2057] recipient: [17:36:2083] Leader for TabletID 72057594037927937 is [17:57:2097] sender: [17:80:2057] recipient: [17:14:2061] Leader for TabletID 72057594037927937 is [17:57:2097] sender: [17:81:2057] recipient: [17:79:2110] Leader for TabletID 72057594037927937 is [17:82:2111] sender: [17:83:2057] recipient: [17:79:2110] !Reboot 72057594037927937 (actor [17:57:2097]) rebooted! !Reboot 72057594037927937 (actor [17:57:2097]) tablet resolver refreshed! new actor is[17:82:2111] Leader for TabletID 72057594037927937 is [17:82:2111] sender: [17:168:2057] recipient: [17:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [18:55:2057] recipient: [18:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [18:55:2057] recipient: [18:51:2095] Leader for TabletID 72057594037927937 is [18:57:2097] sender: [18:58:2057] recipient: [18:51:2095] Leader for TabletID 72057594037927937 is [18:57:2097] sender: [18:75:2057] recipient: [18:14:2061] !Reboot 72057594037927937 (actor [18:57:2097]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [18:57:2097] sender: [18:78:2057] recipient: [18:36:2083] Leader for TabletID 72057594037927937 is [18:57:2097] sender: [18:81:2057] recipient: [18:14:2061] Leader for TabletID 72057594037927937 is [18:57:2097] sender: [18:82:2057] recipient: [18:80:2110] Leader for TabletID 72057594037927937 is [18:83:2111] sender: [18:84:2057] recipient: [18:80:2110] !Reboot 72057594037927937 (actor [18:57:2097]) rebooted! !Reboot 72057594037927937 (actor [18:57:2097]) tablet resolver refreshed! new actor is[18:83:2111] Leader for TabletID 72057594037927937 is [18:83:2111] sender: [18:169:2057] recipient: [18:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [19:55:2057] recipient: [19:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [19:55:2057] recipient: [19:51:2095] Leader for TabletID 72057594037927937 is [19:57:2097] sender: [19:58:2057] recipient: [19:51:2095] Leader for TabletID 72057594037927937 is [19:57:2097] sender: [19:75:2057] recipient: [19:14:2061] !Reboot 72057594037927937 (actor [19:57:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [19:57:2097] sender: [19:81:2057] recipient: [19:36:2083] Leader for TabletID 72057594037927937 is [19:57:2097] sender: [19:84:2057] recipient: [19:83:2113] Leader for TabletID 72057594037927937 is [19:57:2097] sender: [19:85:2057] recipient: [19:14:2061] Leader for TabletID 72057594037927937 is [19:86:2114] sender: [19:87:2057] recipient: [19:83:2113] !Reboot 72057594037927937 (actor [19:57:2097]) rebooted! !Reboot 72057594037927937 (actor [19:57:2097]) tablet resolver refreshed! new actor is[19:86:2114] Leader for TabletID 72057594037927937 is [19:86:2114] sender: [19:172:2057] recipient: [19:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [20:55:2057] recipient: [20:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [20:55:2057] recipient: [20:51:2095] Leader for TabletID 72057594037927937 is [20:57:2097] sender: [20:58:2057] recipient: [20:51:2095] Leader for TabletID 72057594037927937 is [20:57:2097] sender: [20:75:2057] recipient: [20:14:2061] !Reboot 72057594037927937 (actor [20:57:2097]) on event NKikimr::TEvKeyValue::TEvReadRange ! Leader for TabletID 72057594037927937 is [20:57:2097] sender: [20:81:2057] recipient: [20:36:2083] Leader for TabletID 72057594037927937 is [20:57:2097] sender: [20:84:2057] recipient: [20:83:2113] Leader for TabletID 72057594037927937 is [20:57:2097] sender: [20:85:2057] recipient: [20:14:2061] Leader for TabletID 72057594037927937 is [20:86:2114] sender: [20:87:2057] recipient: [20:83:2113] !Reboot 72057594037927937 (actor [20:57:2097]) rebooted! !Reboot 72057594037927937 (actor [20:57:2097]) tablet resolver refreshed! new actor is[20:86:2114] Leader for TabletID 72057594037927937 is [20:86:2114] sender: [20:172:2057] recipient: [20:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [21:55:2057] recipient: [21:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [21:55:2057] recipient: [21:51:2095] Leader for TabletID 72057594037927937 is [21:57:2097] sender: [21:58:2057] recipient: [21:51:2095] Leader for TabletID 72057594037927937 is [21:57:2097] sender: [21:75:2057] recipient: [21:14:2061] !Reboot 72057594037927937 (actor [21:57:2097]) on event NKikimr::TEvKeyValue::TEvNotify ! Leader for TabletID 72057594037927937 is [21:57:2097] sender: [21:82:2057] recipient: [21:36:2083] Leader for TabletID 72057594037927937 is [21:57:2097] sender: [21:85:2057] recipient: [21:14:2061] Leader for TabletID 72057594037927937 is [21:57:2097] sender: [21:86:2057] recipient: [21:84:2113] Leader for TabletID 72057594037927937 is [21:87:2114] sender: [21:88:2057] recipient: [21:84:2113] !Reboot 72057594037927937 (actor [21:57:2097]) rebooted! !Reboot 72057594037927937 (actor [21:57:2097]) tablet resolver refreshed! new actor is[21:87:2114] Leader for TabletID 72057594037927937 is [21:87:2114] sender: [21:105:2057] recipient: [21:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [22:55:2057] recipient: [22:52:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [22:55:2057] recipient: [22:52:2095] Leader for TabletID 72057594037927937 is [22:57:2097] sender: [22:58:2057] recipient: [22:52:2095] Leader for TabletID 72057594037927937 is [22:57:2097] sender: [22:75:2057] recipient: [22:14:2061] !Reboot 72057594037927937 (actor [22:57:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [22:57:2097] sender: [22:84:2057] recipient: [22:36:2083] Leader for TabletID 72057594037927937 is [22:57:2097] sender: [22:87:2057] recipient: [22:14:2061] Leader for TabletID 72057594037927937 is [22:57:2097] sender: [22:88:2057] recipient: [22:86:2115] Leader for TabletID 72057594037927937 is [22:89:2116] sender: [22:90:2057] recipient: [22:86:2115] !Reboot 72057594037927937 (actor [22:57:2097]) rebooted! !Reboot 72057594037927937 (actor [22:57:2097]) tablet resolver refreshed! new actor is[22:89:2116] Leader for TabletID 72057594037927937 is [22:89:2116] sender: [22:175:2057] recipient: [22:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [23:55:2057] recipient: [23:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [23:55:2057] recipient: [23:51:2095] Leader for TabletID 72057594037927937 is [23:57:2097] sender: [23:58:2057] recipient: [23:51:2095] Leader for TabletID 72057594037927937 is [23:57:2097] sender: [23:75:2057] recipient: [23:14:2061] !Reboot 72057594037927937 (actor [23:57:2097]) on event NKikimr::TEvKeyValue::TEvReadRange ! Leader for TabletID 72057594037927937 is [23:57:2097] sender: [23:84:2057] recipient: [23:36:2083] Leader for TabletID 72057594037927937 is [23:57:2097] sender: [23:87:2057] recipient: [23:14:2061] Leader for TabletID 72057594037927937 is [23:57:2097] sender: [23:88:2057] recipient: [23:86:2115] Leader for TabletID 72057594037927937 is [23:89:2116] sender: [23:90:2057] recipient: [23:86:2115] !Reboot 72057594037927937 (actor [23:57:2097]) rebooted! !Reboot 72057594037927937 (actor [23:57:2097]) tablet resolver refreshed! new actor is[23:89:2116] Leader for TabletID 72057594037927937 is [23:89:2116] sender: [23:175:2057] recipient: [23:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [24:55:2057] recipient: [24:50:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [24:55:2057] recipient: [24:50:2095] Leader for TabletID 72057594037927937 is [24:57:2097] sender: [24:58:2057] recipient: [24:50:2095] Leader for TabletID 72057594037927937 is [24:57:2097] sender: [24:75:2057] recipient: [24:14:2061] !Reboot 72057594037927937 (actor [24:57:2097]) on event NKikimr::TEvKeyValue::TEvNotify ! Leader for TabletID 72057594037927937 is [24:57:2097] sender: [24:85:2057] recipient: [24:36:2083] Leader for TabletID 72057594037927937 is [24:57:2097] sender: [24:88:2057] recipient: [24:14:2061] Leader for TabletID 72057594037927937 is [24:57:2097] sender: [24:89:2057] recipient: [24:87:2115] Leader for TabletID 72057594037927937 is [24:90:2116] sender: [24:91:2057] recipient: [24:87:2115] !Reboot 72057594037927937 (actor [24:57:2097]) rebooted! !Reboot 72057594037927937 (actor [24:57:2097]) tablet resolver refreshed! new actor is[24:90:2116] Leader for TabletID 72057594037927937 is [0:0:0] sender: [25:55:2057] recipient: [25:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [25:55:2057] recipient: [25:51:2095] Leader for TabletID 72057594037927937 is [25:57:2097] sender: [25:58:2057] recipient: [25:51:2095] Leader for TabletID 72057594037927937 is [25:57:2097] sender: [25:75:2057] recipient: [25:14:2061] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tablet_flat/ut/unittest >> TFlatTableExecutor_ResourceProfile::TestExecutorPreserveTxData [GOOD] Test command err: 00000.000 II| FAKE_ENV: Born at 2025-05-29T15:28:07.191324Z 00000.004 NN| TABLET_SAUSAGECACHE: Bootstrap with config MemoryLimit: 8388608 ScanQueueInFlyLimit: 262144 AsyncQueueInFlyLimit: 262144 00000.004 II| FAKE_ENV: Starting storage for BS group 0 00000.004 II| FAKE_ENV: Starting storage for BS group 1 00000.004 II| FAKE_ENV: Starting storage for BS group 2 00000.004 II| FAKE_ENV: Starting storage for BS group 3 00000.053 C1| TABLET_EXECUTOR: Tablet 1 unhandled exception std::runtime_error: test ??+0 (0xCD01D32) ??+0 (0xCD01CD7) NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutor_Exceptions::TTxExecuteThrowException::Execute(NKikimr::NTabletFlatExecutor::TTransactionContext&, NActors::TActorContext const&)+57 (0xC83C179) NKikimr::NTabletFlatExecutor::TExecutor::ExecuteTransaction(NKikimr::NTabletFlatExecutor::TSeat*)+1312 (0xF2D4750) NKikimr::NTabletFlatExecutor::TExecutor::StateWork(TAutoPtr&)+167 (0xF2BF537) NActors::IActor::Receive(TAutoPtr&)+85 (0xD3F8D45) 00000.053 II| FAKE_ENV: Model starts hard shutdown on level 7 of 8, left 2 actors 00000.053 NN| TABLET_SAUSAGECACHE: Poison cache serviced 0 reqs hit {0 0b} miss {0 0b} 00000.053 II| FAKE_ENV: Shut order, stopping 4 BS groups 00000.053 II| FAKE_ENV: DS.0 gone, left {42b, 1}, put {62b, 2} 00000.053 II| FAKE_ENV: DS.1 gone, left {35b, 1}, put {35b, 1} 00000.053 II| FAKE_ENV: DS.2 gone, left {0b, 0}, put {0b, 0} 00000.053 II| FAKE_ENV: DS.3 gone, left {0b, 0}, put {0b, 0} 00000.053 II| FAKE_ENV: All BS storage groups are stopped 00000.053 II| FAKE_ENV: Model stopped, hosted 3 actors, spent 0.000s 00000.053 II| FAKE_ENV: Logged {Emerg 0 Alert 0 Crit 1 Error 0 Left 15}, stopped 00000.000 II| FAKE_ENV: Born at 2025-05-29T15:28:07.245581Z 00000.002 NN| TABLET_SAUSAGECACHE: Bootstrap with config MemoryLimit: 8388608 ScanQueueInFlyLimit: 262144 AsyncQueueInFlyLimit: 262144 00000.002 II| FAKE_ENV: Starting storage for BS group 0 00000.002 II| FAKE_ENV: Starting storage for BS group 1 00000.002 II| FAKE_ENV: Starting storage for BS group 2 00000.002 II| FAKE_ENV: Starting storage for BS group 3 00000.003 II| FAKE_ENV: Model starts hard shutdown on level 7 of 8, left 2 actors 00000.003 NN| TABLET_SAUSAGECACHE: Poison cache serviced 0 reqs hit {0 0b} miss {0 0b} 00000.003 II| FAKE_ENV: Shut order, stopping 4 BS groups 00000.003 II| FAKE_ENV: DS.0 gone, left {111b, 2}, put {131b, 3} 00000.003 II| FAKE_ENV: DS.1 gone, left {42b, 2}, put {42b, 2} 00000.003 II| FAKE_ENV: DS.2 gone, left {0b, 0}, put {0b, 0} 00000.003 II| FAKE_ENV: DS.3 gone, left {0b, 0}, put {0b, 0} 00000.003 II| FAKE_ENV: All BS storage groups are stopped 00000.003 II| FAKE_ENV: Model stopped, hosted 3 actors, spent 0.000s 00000.003 II| FAKE_ENV: Logged {Emerg 0 Alert 0 Crit 0 Error 0 Left 15}, stopped 00000.000 II| FAKE_ENV: Born at 2025-05-29T15:28:07.249510Z 00000.001 NN| TABLET_SAUSAGECACHE: Bootstrap with config MemoryLimit: 8388608 ScanQueueInFlyLimit: 262144 AsyncQueueInFlyLimit: 262144 00000.001 II| FAKE_ENV: Starting storage for BS group 0 00000.001 II| FAKE_ENV: Starting storage for BS group 1 00000.001 II| FAKE_ENV: Starting storage for BS group 2 00000.001 II| FAKE_ENV: Starting storage for BS group 3 00000.002 II| FAKE_ENV: Model starts hard shutdown on level 7 of 8, left 4 actors 00000.002 NN| TABLET_SAUSAGECACHE: Poison cache serviced 0 reqs hit {0 0b} miss {0 0b} 00000.002 II| FAKE_ENV: Shut order, stopping 4 BS groups 00000.002 II| FAKE_ENV: DS.2 gone, left {0b, 0}, put {0b, 0} 00000.002 II| FAKE_ENV: DS.3 gone, left {0b, 0}, put {0b, 0} 00000.002 II| FAKE_ENV: DS.0 gone, left {561b, 14}, put {623b, 16} 00000.002 II| FAKE_ENV: DS.1 gone, left {693b, 8}, put {693b, 8} 00000.002 II| FAKE_ENV: All BS storage groups are stopped 00000.002 II| FAKE_ENV: Model stopped, hosted 4 actors, spent 0.000s 00000.002 II| FAKE_ENV: Logged {Emerg 0 Alert 0 Crit 0 Error 0 Left 15}, stopped 00000.000 II| FAKE_ENV: Born at 2025-05-29T15:28:07.260532Z 00000.002 NN| TABLET_SAUSAGECACHE: Bootstrap with config MemoryLimit: 8388608 ScanQueueInFlyLimit: 262144 AsyncQueueInFlyLimit: 262144 00000.002 II| FAKE_ENV: Starting storage for BS group 0 00000.002 II| FAKE_ENV: Starting storage for BS group 1 00000.002 II| FAKE_ENV: Starting storage for BS group 2 00000.002 II| FAKE_ENV: Starting storage for BS group 3 00000.003 II| FAKE_ENV: Model starts hard shutdown on level 7 of 8, left 4 actors 00000.003 NN| TABLET_SAUSAGECACHE: Poison cache serviced 0 reqs hit {0 0b} miss {0 0b} 00000.003 II| FAKE_ENV: Shut order, stopping 4 BS groups 00000.003 II| FAKE_ENV: DS.0 gone, left {141b, 4}, put {669b, 13} 00000.003 II| FAKE_ENV: DS.1 gone, left {868b, 8}, put {987b, 10} 00000.003 II| FAKE_ENV: DS.2 gone, left {0b, 0}, put {0b, 0} 00000.003 II| FAKE_ENV: DS.3 gone, left {0b, 0}, put {0b, 0} 00000.003 II| FAKE_ENV: All BS storage groups are stopped 00000.003 II| FAKE_ENV: Model stopped, hosted 5 actors, spent 0.000s 00000.003 II| FAKE_ENV: Logged {Emerg 0 Alert 0 Crit 0 Error 0 Left 15}, stopped 00000.000 II| FAKE_ENV: Born at 2025-05-29T15:28:07.264696Z 00000.001 NN| TABLET_SAUSAGECACHE: Bootstrap with config MemoryLimit: 8388608 ScanQueueInFlyLimit: 262144 AsyncQueueInFlyLimit: 262144 00000.001 II| FAKE_ENV: Starting storage for BS group 0 00000.001 II| FAKE_ENV: Starting storage for BS group 1 00000.001 II| FAKE_ENV: Starting storage for BS group 2 00000.001 II| FAKE_ENV: Starting storage for BS group 3 00000.001 II| TABLET_EXECUTOR: Leader{1:2:0} activating executor 00000.001 II| TABLET_EXECUTOR: LSnap{1:2, on 2:1, 35b, wait} done, Waste{2:0, 0b +(0, 0b), 0 trc} 00000.001 DD| TABLET_EXECUTOR: Leader{1:2:2} commited cookie 2 for step 1 ... initializing schema 00000.001 DD| TABLET_EXECUTOR: Leader{1:2:2} Tx{1, NKikimr::NTabletFlatExecutor::TRowsModel::TTxSchema} queued, type NKikimr::NTabletFlatExecutor::TRowsModel::TTxSchema 00000.001 DD| TABLET_EXECUTOR: Leader{1:2:2} Tx{1, NKikimr::NTabletFlatExecutor::TRowsModel::TTxSchema} took 4194304b of static mem, Memory{4194304 dyn 0} 00000.001 DD| TABLET_EXECUTOR: Leader{1:2:2} Tx{1, NKikimr::NTabletFlatExecutor::TRowsModel::TTxSchema} hope 1 -> done Change{2, redo 0b alter 209b annex 0, ~{ } -{ }, 0 gb} 00000.001 DD| TABLET_EXECUTOR: Leader{1:2:2} Tx{1, NKikimr::NTabletFlatExecutor::TRowsModel::TTxSchema} release 4194304b of static, Memory{0 dyn 0} 00000.001 DD| TABLET_EXECUTOR: Leader{1:2:3} commited cookie 1 for step 2 ... inserting rows 00000.001 DD| TABLET_EXECUTOR: Leader{1:2:3} Tx{2, NKikimr::NTabletFlatExecutor::TRowsModel::TTxAddRows} queued, type NKikimr::NTabletFlatExecutor::TRowsModel::TTxAddRows 00000.002 DD| TABLET_EXECUTOR: Leader{1:2:3} Tx{2, NKikimr::NTabletFlatExecutor::TRowsModel::TTxAddRows} took 4194304b of static mem, Memory{4194304 dyn 0} 00000.002 DD| TABLET_EXECUTOR: Leader{1:2:3} Tx{2, NKikimr::NTabletFlatExecutor::TRowsModel::TTxAddRows} hope 1 -> done Change{2, redo 512b alter 0b annex 0, ~{ 101 } -{ }, 0 gb} 00000.002 DD| TABLET_EXECUTOR: Leader{1:2:3} Tx{2, NKikimr::NTabletFlatExecutor::TRowsModel::TTxAddRows} release 4194304b of static, Memory{0 dyn 0} 00000.002 DD| TABLET_EXECUTOR: Leader{1:2:4} commited cookie 1 for step 3 ... starting follower ... waiting for follower attach ... blocking NKikimr::TEvTablet::TEvNewFollowerAttached from TABLET_ACTOR to NKikimr::NTabletFlatExecutor::TTestFlatTablet cookie 0 ... waiting for follower attach (done) ... spamming QueueScan transactions 00000.002 DD| TABLET_EXECUTOR: Leader{1:2:4} Tx{3, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutor_Follower::TTxQueueScan} queued, type NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutor_Follower::TTxQueueScan 00000.002 DD| TABLET_EXECUTOR: Leader{1:2:4} Tx{3, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutor_Follower::TTxQueueScan} took 4194304b of static mem, Memory{4194304 dyn 0} 00000.002 II| TABLET_EXECUTOR: Leader{1:2:5} starting Scan{2 on 101, TEmptyScan{}} 00000.002 DD| TABLET_EXECUTOR: Leader{1:2:5} Tx{3, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutor_Follower::TTxQueueScan} hope 1 -> done Change{3, redo 0b alter 0b annex 0, ~{ } -{ }, 0 gb} 00000.002 DD| TABLET_EXECUTOR: Leader{1:2:5} Tx{3, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutor_Follower::TTxQueueScan} release 4194304b of static, Memory{0 dyn 0} 00000.002 DD| TABLET_EXECUTOR: Leader{1:2:5} commited cookie 8 for step 4 00000.002 DD| TABLET_EXECUTOR: Leader{1:2:5} Tx{4, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutor_Follower::TTxQueueScan} queued, type NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutor_Follower::TTxQueueScan 00000.002 DD| TABLET_EXECUTOR: Leader{1:2:5} Tx{4, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutor_Follower::TTxQueueScan} took 4194304b of static mem, Memory{4194304 dyn 0} 00000.002 II| TABLET_EXECUTOR: Leader{1:2:6} starting Scan{4 on 101, TEmptyScan{}} 00000.002 DD| TABLET_EXECUTOR: Leader{1:2:6} Tx{4, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutor_Follower::TTxQueueScan} hope 1 -> done Change{3, redo 0b alter 0b annex 0, ~{ } -{ }, 0 gb} 00000.002 DD| TABLET_EXECUTOR: Leader{1:2:6} Tx{4, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutor_Follower::TTxQueueScan} release 4194304b of static, Memory{0 dyn 0} 00000.002 DD| TABLET_EXECUTOR: Leader{1:2:6} commited cookie 8 for step 5 00000.002 DD| TABLET_EXECUTOR: Leader{1:2:6} Tx{5, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutor_Follower::TTxQueueScan} queued, type NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutor_Follower::TTxQueueScan 00000.002 DD| TABLET_EXECUTOR: Leader{1:2:6} Tx{5, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutor_Follower::TTxQueueScan} took 4194304b of static mem, Memory{4194304 dyn 0} 00000.002 II| TABLET_EXECUTOR: Leader{1:2:7} starting Scan{6 on 101, TEmptyScan{}} 00000.002 DD| TABLET_EXECUTOR: Leader{1:2:7} Tx{5, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutor_Follower::TTxQueueScan} hope 1 -> done Change{3, redo 0b alter 0b annex 0, ~{ } -{ }, 0 gb} 00000.002 DD| TABLET_EXECUTOR: Leader{1:2:7} Tx{5, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutor_Follower::TTxQueueScan} release 4194304b of static, Memory{0 dyn 0} 00000.002 DD| TABLET_EXECUTOR: Leader{1:2:7} commited cookie 8 for step 6 00000.002 DD| TABLET_EXECUTOR: Leader{1:2:7} Tx{6, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutor_Follower::TTxQueueScan} queued, type NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutor_Follower::TTxQueueScan 00000.002 DD| TABLET_EXECUTOR: Leader{1:2:7} Tx{6, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutor_Follower::TTxQueueScan} took 4194304b of static mem, Memory{4194304 dyn 0} 00000.002 II| TABLET_EXECUTOR: Leader{1:2:8} starting Scan{8 on 101, TEmptyScan{}} 00000.002 DD| TABLET_EXECUTOR: Leader{1:2:8} Tx{6, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutor_Follower::TTxQueueScan} hope 1 -> done Change{3, redo 0b alter 0b annex 0, ~{ } -{ }, 0 gb} 00000.002 DD| TABLET_EXECUTOR: Leader{1:2:8} Tx{6, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutor_Follower::TTxQueueScan} release 4194304b of static, Memory{0 dyn 0} 00000.002 DD| TABLET_EXECUTOR: Leader{1:2:8} commited cookie 8 for step 7 00000.002 DD| TABLET_EXECUTOR: Leader{1:2:8} Tx{7, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutor_Follower::TTxQueueScan} queued, type NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutor_Follower::TTxQueueScan 00000.002 DD| TABLET_EXECUTOR: Leader{1:2:8} Tx{7, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutor_Follower::TTxQueueScan} took 4194304b of static mem, Memory{4194304 dyn 0} 00000.002 II| TABLET_EXECUTOR: Leader{1:2:9} starting Scan{10 on 101, TEmptyScan{}} 00000.002 DD| TABLET_EXECUTOR: Leader{1:2:9} Tx{7, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutor_Follower::TTxQueueScan} hope 1 -> done Change{3, redo 0b alter 0b annex 0, ~{ } -{ }, 0 gb} 00000.002 DD| TABLET_EXECUTOR: Leader{1 ... ange{2, redo 0b alter 0b annex 0, ~{ } -{ }, 0 gb} 00000.004 DD| TABLET_EXECUTOR: Leader{1:2:4} found attached Res{10 20480b} 00000.004 DD| TABLET_EXECUTOR: release 10240b of static tx data due to attached res 10, Memory{0 dyn 20480} 00000.004 DD| TABLET_EXECUTOR: Leader{1:2:4} Tx{24, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutor_ResourceProfile::TTxRequestMemory} touch new 0b, 0b lo load (0b in total), 524267520b requested for data (524288000b in total) 00000.004 EE| TABLET_EXECUTOR: Leader{1:2:4} Tx{24, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutor_ResourceProfile::TTxRequestMemory} mem 524288000b terminated, limit 314572800b is exceeded 00000.004 DD| TABLET_EXECUTOR: Leader{1:2:4} Tx{24, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutor_ResourceProfile::TTxRequestMemory} release Res{10 20480b}, Memory{0 dyn 0} 00000.004 DD| RESOURCE_BROKER: Update cookie for task Tx{23, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutor_ResourceProfile::TTxRequestMemory} at tablet 1 (10 by [56:30:2062]) 00000.004 DD| RESOURCE_BROKER: Finish task Tx{23, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutor_ResourceProfile::TTxRequestMemory} at tablet 1 (10 by [56:30:2062]) (release resources {0, 20480}) 00000.004 DD| RESOURCE_BROKER: Updated planned resource usage for queue queue_default from 0.001311 to 0.000000 (remove task Tx{23, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutor_ResourceProfile::TTxRequestMemory} at tablet 1 (10 by [56:30:2062])) 00000.004 DD| TABLET_EXECUTOR: Leader{1:2:4} Tx{25, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutor_ResourceProfile::TTxRequestMemory} queued, type NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutor_ResourceProfile::TTxRequestMemory 00000.004 DD| TABLET_EXECUTOR: Leader{1:2:4} Tx{25, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutor_ResourceProfile::TTxRequestMemory} took 1024b of static mem, Memory{1024 dyn 0} 00000.004 DD| TABLET_EXECUTOR: Leader{1:2:4} Tx{25, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutor_ResourceProfile::TTxRequestMemory} hope 1 -> retry Change{2, redo 0b alter 0b annex 0, ~{ } -{ }, 0 gb} 00000.004 DD| TABLET_EXECUTOR: Leader{1:2:4} Tx{25, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutor_ResourceProfile::TTxRequestMemory} touch new 0b, 0b lo load (0b in total), 19456b requested for data (20480b in total) 00000.004 DD| TABLET_EXECUTOR: Leader{1:2:4} Tx{25, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutor_ResourceProfile::TTxRequestMemory} release 1024b of static, Memory{0 dyn 0} 00000.004 DD| TABLET_EXECUTOR: Leader{1:2:4} Tx{25, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutor_ResourceProfile::TTxRequestMemory} release tx data 00000.004 DD| TABLET_EXECUTOR: Leader{1:2:4} Tx{25, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutor_ResourceProfile::TTxRequestMemory} request Res{11 20480b} type small_transaction 00000.004 DD| RESOURCE_BROKER: Submitted new unknown task Tx{25, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutor_ResourceProfile::TTxRequestMemory} at tablet 1 (11 by [56:30:2062]) priority=5 resources={0, 20480} 00000.004 EE| RESOURCE_BROKER: Assigning waiting task 'Tx{25, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutor_ResourceProfile::TTxRequestMemory} at tablet 1 (11 by [56:30:2062])' of unknown type 'small_transaction' to default queue 00000.004 DD| RESOURCE_BROKER: Allocate resources {0, 20480} for task Tx{25, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutor_ResourceProfile::TTxRequestMemory} at tablet 1 (11 by [56:30:2062]) from queue queue_default 00000.004 EE| RESOURCE_BROKER: Assigning in-fly task 'Tx{25, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutor_ResourceProfile::TTxRequestMemory} at tablet 1 (11 by [56:30:2062])' of unknown type 'small_transaction' to default queue 00000.004 DD| RESOURCE_BROKER: Updated planned resource usage for queue queue_default from 0.000000 to 0.001192 (insert task Tx{25, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutor_ResourceProfile::TTxRequestMemory} at tablet 1 (11 by [56:30:2062])) 00000.004 DD| TABLET_EXECUTOR: Leader{1:2:4} Tx{25, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutor_ResourceProfile::TTxRequestMemory} acquired dyn mem Res{11 20480b}, Memory{0 dyn 20480} 00000.004 DD| TABLET_EXECUTOR: Leader{1:2:4} Tx{25, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutor_ResourceProfile::TTxRequestMemory} hope 2 -> done Change{2, redo 0b alter 0b annex 0, ~{ } -{ }, 0 gb} 00000.004 DD| TABLET_EXECUTOR: Leader{1:2:4} Tx{25, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutor_ResourceProfile::TTxRequestMemory} update resource task 11 releasing 0b, Memory{0 dyn 20480} 00000.004 DD| TABLET_EXECUTOR: Leader{1:2:4} Tx{25, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutor_ResourceProfile::TTxRequestMemory} captured Res{11 20480b} 00000.004 DD| RESOURCE_BROKER: Update task Tx{25, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutor_ResourceProfile::TTxRequestMemory} at tablet 1 (11 by [56:30:2062]) (priority=5 type=small_transaction resources={0, 20480} resubmit=0) 00000.004 EE| RESOURCE_BROKER: Assigning in-fly task 'Tx{25, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutor_ResourceProfile::TTxRequestMemory} at tablet 1 (11 by [56:30:2062])' of unknown type 'small_transaction' to default queue 00000.004 DD| RESOURCE_BROKER: Updated planned resource usage for queue queue_default from 0.000000 to 0.001192 (insert task Tx{25, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutor_ResourceProfile::TTxRequestMemory} at tablet 1 (11 by [56:30:2062])) 00000.004 DD| TABLET_EXECUTOR: Leader{1:2:4} Tx{26, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutor_ResourceProfile::TTxRequestMemory} queued, type NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutor_ResourceProfile::TTxRequestMemory 00000.004 DD| TABLET_EXECUTOR: Leader{1:2:4} Tx{26, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutor_ResourceProfile::TTxRequestMemory} took 1024b of static mem, Memory{1024 dyn 20480} 00000.004 DD| TABLET_EXECUTOR: Leader{1:2:4} Tx{26, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutor_ResourceProfile::TTxRequestMemory} hope 1 -> retry Change{2, redo 0b alter 0b annex 0, ~{ } -{ }, 0 gb} 00000.004 DD| TABLET_EXECUTOR: Leader{1:2:4} Tx{26, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutor_ResourceProfile::TTxRequestMemory} touch new 0b, 0b lo load (0b in total), 19456b requested for data (20480b in total) 00000.004 DD| TABLET_EXECUTOR: Leader{1:2:4} Tx{26, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutor_ResourceProfile::TTxRequestMemory} release 1024b of static, Memory{0 dyn 20480} 00000.004 DD| TABLET_EXECUTOR: Leader{1:2:4} Tx{26, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutor_ResourceProfile::TTxRequestMemory} release tx data 00000.004 DD| TABLET_EXECUTOR: Leader{1:2:4} Tx{26, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutor_ResourceProfile::TTxRequestMemory} request Res{12 20480b} type small_transaction 00000.004 DD| RESOURCE_BROKER: Submitted new unknown task Tx{26, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutor_ResourceProfile::TTxRequestMemory} at tablet 1 (12 by [56:30:2062]) priority=5 resources={0, 20480} 00000.004 EE| RESOURCE_BROKER: Assigning waiting task 'Tx{26, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutor_ResourceProfile::TTxRequestMemory} at tablet 1 (12 by [56:30:2062])' of unknown type 'small_transaction' to default queue 00000.004 DD| RESOURCE_BROKER: Allocate resources {0, 20480} for task Tx{26, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutor_ResourceProfile::TTxRequestMemory} at tablet 1 (12 by [56:30:2062]) from queue queue_default 00000.004 EE| RESOURCE_BROKER: Assigning in-fly task 'Tx{26, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutor_ResourceProfile::TTxRequestMemory} at tablet 1 (12 by [56:30:2062])' of unknown type 'small_transaction' to default queue 00000.004 DD| RESOURCE_BROKER: Updated planned resource usage for queue queue_default from 0.001192 to 0.002384 (insert task Tx{26, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutor_ResourceProfile::TTxRequestMemory} at tablet 1 (12 by [56:30:2062])) 00000.004 DD| TABLET_EXECUTOR: Leader{1:2:4} Tx{26, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutor_ResourceProfile::TTxRequestMemory} acquired dyn mem Res{12 20480b}, Memory{0 dyn 40960} 00000.004 DD| TABLET_EXECUTOR: Leader{1:2:4} Tx{26, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutor_ResourceProfile::TTxRequestMemory} hope 2 -> retry Change{2, redo 0b alter 0b annex 0, ~{ } -{ }, 0 gb} 00000.004 DD| TABLET_EXECUTOR: Leader{1:2:4} found attached Res{11 20480b} 00000.004 DD| TABLET_EXECUTOR: Leader{1:2:4} moving tx data from attached Res{11 20480b} to Res{12 ...} 00000.004 DD| TABLET_EXECUTOR: Leader{1:2:4} Tx{26, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutor_ResourceProfile::TTxRequestMemory} touch new 0b, 0b lo load (0b in total), 524267520b requested for data (524288000b in total) 00000.004 EE| TABLET_EXECUTOR: Leader{1:2:4} Tx{26, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutor_ResourceProfile::TTxRequestMemory} mem 524288000b terminated, limit 314572800b is exceeded 00000.004 DD| TABLET_EXECUTOR: Leader{1:2:4} Tx{26, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutor_ResourceProfile::TTxRequestMemory} release Res{12 40960b}, Memory{0 dyn 0} 00000.004 DD| RESOURCE_BROKER: Update task Tx{26, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutor_ResourceProfile::TTxRequestMemory} at tablet 1 (12 by [56:30:2062]) (priority=5 type=small_transaction resources={0, 40960} resubmit=0) 00000.004 EE| RESOURCE_BROKER: Assigning in-fly task 'Tx{26, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutor_ResourceProfile::TTxRequestMemory} at tablet 1 (12 by [56:30:2062])' of unknown type 'small_transaction' to default queue 00000.004 DD| RESOURCE_BROKER: Updated planned resource usage for queue queue_default from 0.001192 to 0.003576 (insert task Tx{26, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutor_ResourceProfile::TTxRequestMemory} at tablet 1 (12 by [56:30:2062])) 00000.004 DD| RESOURCE_BROKER: Finish task Tx{25, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutor_ResourceProfile::TTxRequestMemory} at tablet 1 (11 by [56:30:2062]) (release resources {0, 20480}) 00000.004 DD| RESOURCE_BROKER: Updated planned resource usage for queue queue_default from 0.003576 to 0.002384 (remove task Tx{25, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutor_ResourceProfile::TTxRequestMemory} at tablet 1 (11 by [56:30:2062])) 00000.004 DD| RESOURCE_BROKER: Finish task Tx{26, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutor_ResourceProfile::TTxRequestMemory} at tablet 1 (12 by [56:30:2062]) (release resources {0, 40960}) 00000.004 DD| RESOURCE_BROKER: Updated planned resource usage for queue queue_default from 0.002384 to 0.000000 (remove task Tx{26, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutor_ResourceProfile::TTxRequestMemory} at tablet 1 (12 by [56:30:2062])) 00000.004 II| TABLET_EXECUTOR: Leader{1:2:4} suiciding, Waste{2:0, 317b +(0, 0b), 3 trc, -0b acc} 00000.004 II| FAKE_ENV: Model starts hard shutdown on level 7 of 8, left 2 actors 00000.004 NN| TABLET_SAUSAGECACHE: Poison cache serviced 0 reqs hit {0 0b} miss {0 0b} 00000.004 II| FAKE_ENV: Shut order, stopping 4 BS groups 00000.004 II| FAKE_ENV: DS.0 gone, left {180b, 3}, put {200b, 4} 00000.004 II| FAKE_ENV: DS.1 gone, left {352b, 3}, put {352b, 3} 00000.004 II| FAKE_ENV: DS.2 gone, left {0b, 0}, put {0b, 0} 00000.004 II| FAKE_ENV: DS.3 gone, left {0b, 0}, put {0b, 0} 00000.004 II| FAKE_ENV: All BS storage groups are stopped 00000.004 II| FAKE_ENV: Model stopped, hosted 3 actors, spent 0.000s 00000.004 II| FAKE_ENV: Logged {Emerg 0 Alert 0 Crit 0 Error 45 Left 401}, stopped |70.6%| [TA] {RESULT} $(B)/ydb/core/kesus/tablet/ut/test-results/unittest/{meta.json ... results_accumulator.log} |70.6%| [LD] {RESULT} $(B)/ydb/services/persqueue_v1/ut/describes_ut/ydb-services-persqueue_v1-ut-describes_ut >> TestProgram::JsonExists [GOOD] |70.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/columnshard/engines/ut/unittest |70.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/columnshard/engines/ut/unittest ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/columnshard/engines/ut/unittest >> TestProgram::JsonExists [GOOD] Test command err: FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:33;event=parse_program;program=Command { Assign { Column { Id: 15 } Constant { Text: "$.key" } } } Command { Assign { Column { Id: 16 } Function { Id: 8 Arguments { Id: 5 } Arguments { Id: 15 } FunctionType: YQL_KERNEL KernelIdx: 0 } } } Command { Projection { Columns { Id: 16 } } } Kernels: "O\022\020JsonNode\006Arg\020JsonPath\006UDF\006Udf\014Apply2\nFlags\010Name\030BlockAsTuple\t\211\004\235\213\004\213\004\207\203\tH\203\001H\213\002\207\203\014\001\235?\006\001\235?\014\001\"\000\t\211\004?\022\235?\002\001\235?\004\000\"\000\t\251\000?\030\006\000\t\251\000?\032\006\000\000\t\211\002?\024\235?\n\001\"\000\t\211\n?(?\030?\032?\002?\004?\n,ScalarApply\000? ?$\t\251\000?\002\006\000\t\251\000?\004\006\000\t\211\010?\n?\002?\000\207?\010?6$IfPresent\000?0\t\251\000?\000\006\000\t\211\022?6\211\010?6\207\214\002\214\n\210\203\001H?>?6\016\000\203\004\203\005@\203\004\203\004\207\214\002\214\n\210\203\001H\214\002\207\203\014\026\000\t\211\010?H\203\005@\200\203\005@\202\022\000\003?d\036Json2.SqlExists\202\003?f\000\002\017\003?J\000\003?L\000\003?N\000\003?P\000\027?T\t\211\014?R\311\002?R\203\tH\005\205\004\206\205\004\203\010\203\005@\032\036\203\005@\020Args\034Payload\006\002?\200\005\205\004\203\010\203\005@\032\036\003?\206\002\003?\210\000\003\001\003?\202\000\003\016\000\203\004\203\005@\203\004\203\004?\000\026\000\t\211\010?\230\203\005@\200\203\005@\202\022\000\003?\244\026Json2.Parse\202\003?\246\000\002\017\003?\232\000\003?\234\000\003?\236\000\003?\240\000?<\036\t\211\014?V\211\002?V\203\001H\016\000\203\004\203\005@\203\004\203\004?\004\026\000\t\211\010?\276\203\005@\200\203\005@\202\022\000\003?\312\"Json2.CompilePath\202\003?\314\000\002\017\003?\300\000\003?\302\000\003?\304\000\003?\306\000?4\036\010\000?\\7?`\003?^\000\276\001\'?6\010\000\000\000/" ; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:102;parse_proto_program=Command { Assign { Column { Id: 15 } Constant { Text: "$.key" } } } Command { Assign { Column { Id: 16 } Function { Id: 8 Arguments { Id: 5 } Arguments { Id: 15 } FunctionType: YQL_KERNEL KernelIdx: 0 } } } Command { Projection { Columns { Id: 16 } } } Kernels: "O\022\020JsonNode\006Arg\020JsonPath\006UDF\006Udf\014Apply2\nFlags\010Name\030BlockAsTuple\t\211\004\235\213\004\213\004\207\203\tH\203\001H\213\002\207\203\014\001\235?\006\001\235?\014\001\"\000\t\211\004?\022\235?\002\001\235?\004\000\"\000\t\251\000?\030\006\000\t\251\000?\032\006\000\000\t\211\002?\024\235?\n\001\"\000\t\211\n?(?\030?\032?\002?\004?\n,ScalarApply\000? ?$\t\251\000?\002\006\000\t\251\000?\004\006\000\t\211\010?\n?\002?\000\207?\010?6$IfPresent\000?0\t\251\000?\000\006\000\t\211\022?6\211\010?6\207\214\002\214\n\210\203\001H?>?6\016\000\203\004\203\005@\203\004\203\004\207\214\002\214\n\210\203\001H\214\002\207\203\014\026\000\t\211\010?H\203\005@\200\203\005@\202\022\000\003?d\036Json2.SqlExists\202\003?f\000\002\017\003?J\000\003?L\000\003?N\000\003?P\000\027?T\t\211\014?R\311\002?R\203\tH\005\205\004\206\205\004\203\010\203\005@\032\036\203\005@\020Args\034Payload\006\002?\200\005\205\004\203\010\203\005@\032\036\003?\206\002\003?\210\000\003\001\003?\202\000\003\016\000\203\004\203\005@\203\004\203\004?\000\026\000\t\211\010?\230\203\005@\200\203\005@\202\022\000\003?\244\026Json2.Parse\202\003?\246\000\002\017\003?\232\000\003?\234\000\003?\236\000\003?\240\000?<\036\t\211\014?V\211\002?V\203\001H\016\000\203\004\203\005@\203\004\203\004?\004\026\000\t\211\010?\276\203\005@\200\203\005@\202\022\000\003?\312\"Json2.CompilePath\202\003?\314\000\002\017\003?\300\000\003?\302\000\003?\304\000\003?\306\000?4\036\010\000?\\7?`\003?^\000\276\001\'?6\010\000\000\000/" ; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2101;fline=graph_execute.cpp:162;graph_constructed=digraph program {N0[shape=box, label="N0(0):{\"p\":{\"v\":\"$.key\"},\"o\":\"15\",\"t\":\"Const\"}\n"]; N1[shape=box, label="N4(15):{\"i\":\"5,15\",\"p\":{\"kernel\":{\"class_name\":\"SIMPLE\"}},\"o\":\"16\",\"t\":\"Calculation\"}\nREMOVE:15,5"]; N0 -> N1[label="1"]; N3 -> N1[label="2"]; N2[shape=box, label="N2(2):{\"i\":\"0\",\"p\":{\"data\":[{\"name\":\"json_string\",\"id\":5}]},\"o\":\"5\",\"t\":\"FetchOriginalData\"}\n",style=filled,color="#FFFF88"]; N5 -> N2[label="1"]; N3[shape=box, label="N3(7):{\"i\":\"5\",\"p\":{\"address\":{\"name\":\"json_string\",\"id\":5}},\"o\":\"5\",\"t\":\"AssembleOriginalData\"}\n",style=filled,color="#FFFF88"]; N2 -> N3[label="1"]; N4[shape=box, label="N5(15):{\"i\":\"16\",\"t\":\"Projection\"}\n",style=filled,color="#FFAAAA"]; N1 -> N4[label="1"]; N5[shape=box, label="N1(0):{\"p\":{\"data\":[{\"name\":\"json_string\",\"id\":5}]},\"o\":\"0\",\"t\":\"ReserveMemory\"}\n"]; N0->N5->N2->N3->N1->N4[color=red]; }; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:51;event=program_parsed;result={"edges":[{"owner_id":0,"inputs":[]},{"owner_id":1,"inputs":[{"from":0},{"from":3}]},{"owner_id":2,"inputs":[{"from":5}]},{"owner_id":3,"inputs":[{"from":2}]},{"owner_id":4,"inputs":[{"from":1}]},{"owner_id":5,"inputs":[]}],"nodes":{"1":{"p":{"i":"5,15","p":{"kernel":{"class_name":"SIMPLE"}},"o":"16","t":"Calculation"},"w":15,"id":1},"3":{"p":{"i":"5","p":{"address":{"name":"json_string","id":5}},"o":"5","t":"AssembleOriginalData"},"w":7,"id":3},"2":{"p":{"i":"0","p":{"data":[{"name":"json_string","id":5}]},"o":"5","t":"FetchOriginalData"},"w":2,"id":2},"5":{"p":{"p":{"data":[{"name":"json_string","id":5}]},"o":"0","t":"ReserveMemory"},"w":0,"id":5},"4":{"p":{"i":"16","t":"Projection"},"w":15,"id":4},"0":{"p":{"p":{"v":"$.key"},"o":"15","t":"Const"},"w":0,"id":0}}}; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:502;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:502;T=N5arrow10StringTypeE; json_string: [ "{"key":"value"}", "[]" ] FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:502;T=N5arrow9UInt8TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:502;T=N5arrow9UInt8TypeE; |70.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/columnshard/engines/ut/unittest >> TPartBtreeIndexIteration::OneNode_History [GOOD] >> TPartBtreeIndexIteration::OneNode_Slices |70.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/columnshard/engines/ut/unittest |70.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_replication_reboots/unittest |70.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/columnshard/engines/ut/unittest >> TFlatExecutorLeases::BasicsInitialLeaseSleep [GOOD] >> TFlatExecutorLeases::BasicsInitialLeaseSleepTimeout |70.7%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/schemeshard/ut_reboots/ydb-core-tx-schemeshard-ut_reboots |70.7%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_reboots/ydb-core-tx-schemeshard-ut_reboots |70.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_reboots/ydb-core-tx-schemeshard-ut_reboots |70.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_replication_reboots/unittest |70.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_replication_reboots/unittest >> TReplicationWithRebootsTests::AlterReplicationConfig >> TReplicationWithRebootsTests::CreateDropRecreate |70.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_replication_reboots/unittest >> TVersions::Wreck0 [GOOD] >> TVersions::Wreck0Reverse >> TKeyValueTest::TestInlineWriteReadWithRestartsThenResponseOk [GOOD] >> TKeyValueTest::TestInlineWriteReadWithRestartsThenResponseOkNewApi |70.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_replication_reboots/unittest |70.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_replication_reboots/unittest |70.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_replication_reboots/unittest >> TTopicApiDescribes::GetPartitionDescribe >> TTablesWithReboots::ChainedCopyTableAndDropWithReboots [GOOD] >> TTopicApiDescribes::DescribeConsumer |70.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/persqueue_v1/ut/describes_ut/unittest >> BuildStatsHistogram::Five_Five_Mixed [GOOD] >> BuildStatsHistogram::Five_Five_Serial >> TChargeBTreeIndex::NoNodes_Groups_History [GOOD] >> TChargeBTreeIndex::OneNode >> TIcNodeCache::GetNodesInfoTest >> TPartBtreeIndexIteration::OneNode_Slices [GOOD] >> TPartBtreeIndexIteration::OneNode_Groups_Slices |70.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/persqueue_v1/ut/describes_ut/unittest |70.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/persqueue_v1/ut/describes_ut/unittest >> TKeyValueTest::TestWriteToExtraChannelThenReadMixedChannelsReturnsOk [GOOD] >> TTopicApiDescribes::GetPartitionDescribe [FAIL] >> BuildStatsHistogram::Five_Five_Serial [GOOD] >> BuildStatsHistogram::Five_Five_Crossed >> TTopicApiDescribes::DescribeTopic >> TTopicApiDescribes::DescribeConsumer [FAIL] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/keyvalue/ut/unittest >> TKeyValueTest::TestWriteToExtraChannelThenReadMixedChannelsReturnsOk [GOOD] Test command err: Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:55:2057] recipient: [1:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:55:2057] recipient: [1:51:2095] Leader for TabletID 72057594037927937 is [1:57:2097] sender: [1:58:2057] recipient: [1:51:2095] Leader for TabletID 72057594037927937 is [1:57:2097] sender: [1:75:2057] recipient: [1:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:55:2057] recipient: [2:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:55:2057] recipient: [2:51:2095] Leader for TabletID 72057594037927937 is [2:57:2097] sender: [2:58:2057] recipient: [2:51:2095] Leader for TabletID 72057594037927937 is [2:57:2097] sender: [2:75:2057] recipient: [2:14:2061] !Reboot 72057594037927937 (actor [2:57:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [2:57:2097] sender: [2:77:2057] recipient: [2:36:2083] Leader for TabletID 72057594037927937 is [2:57:2097] sender: [2:80:2057] recipient: [2:14:2061] Leader for TabletID 72057594037927937 is [2:57:2097] sender: [2:81:2057] recipient: [2:79:2110] Leader for TabletID 72057594037927937 is [2:82:2111] sender: [2:83:2057] recipient: [2:79:2110] !Reboot 72057594037927937 (actor [2:57:2097]) rebooted! !Reboot 72057594037927937 (actor [2:57:2097]) tablet resolver refreshed! new actor is[2:82:2111] Leader for TabletID 72057594037927937 is [2:82:2111] sender: [2:168:2057] recipient: [2:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:55:2057] recipient: [3:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:55:2057] recipient: [3:51:2095] Leader for TabletID 72057594037927937 is [3:57:2097] sender: [3:58:2057] recipient: [3:51:2095] Leader for TabletID 72057594037927937 is [3:57:2097] sender: [3:75:2057] recipient: [3:14:2061] !Reboot 72057594037927937 (actor [3:57:2097]) on event NKikimr::TEvKeyValue::TEvExecuteTransaction ! Leader for TabletID 72057594037927937 is [3:57:2097] sender: [3:77:2057] recipient: [3:36:2083] Leader for TabletID 72057594037927937 is [3:57:2097] sender: [3:80:2057] recipient: [3:79:2110] Leader for TabletID 72057594037927937 is [3:57:2097] sender: [3:81:2057] recipient: [3:14:2061] Leader for TabletID 72057594037927937 is [3:82:2111] sender: [3:83:2057] recipient: [3:79:2110] !Reboot 72057594037927937 (actor [3:57:2097]) rebooted! !Reboot 72057594037927937 (actor [3:57:2097]) tablet resolver refreshed! new actor is[3:82:2111] Leader for TabletID 72057594037927937 is [3:82:2111] sender: [3:168:2057] recipient: [3:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [4:55:2057] recipient: [4:50:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [4:55:2057] recipient: [4:50:2095] Leader for TabletID 72057594037927937 is [4:57:2097] sender: [4:58:2057] recipient: [4:50:2095] Leader for TabletID 72057594037927937 is [4:57:2097] sender: [4:75:2057] recipient: [4:14:2061] !Reboot 72057594037927937 (actor [4:57:2097]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [4:57:2097] sender: [4:78:2057] recipient: [4:36:2083] Leader for TabletID 72057594037927937 is [4:57:2097] sender: [4:81:2057] recipient: [4:14:2061] Leader for TabletID 72057594037927937 is [4:57:2097] sender: [4:82:2057] recipient: [4:80:2110] Leader for TabletID 72057594037927937 is [4:83:2111] sender: [4:84:2057] recipient: [4:80:2110] !Reboot 72057594037927937 (actor [4:57:2097]) rebooted! !Reboot 72057594037927937 (actor [4:57:2097]) tablet resolver refreshed! new actor is[4:83:2111] Leader for TabletID 72057594037927937 is [4:83:2111] sender: [4:169:2057] recipient: [4:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [5:55:2057] recipient: [5:52:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [5:55:2057] recipient: [5:52:2095] Leader for TabletID 72057594037927937 is [5:57:2097] sender: [5:58:2057] recipient: [5:52:2095] Leader for TabletID 72057594037927937 is [5:57:2097] sender: [5:75:2057] recipient: [5:14:2061] !Reboot 72057594037927937 (actor [5:57:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [5:57:2097] sender: [5:81:2057] recipient: [5:36:2083] Leader for TabletID 72057594037927937 is [5:57:2097] sender: [5:84:2057] recipient: [5:83:2113] Leader for TabletID 72057594037927937 is [5:57:2097] sender: [5:85:2057] recipient: [5:14:2061] Leader for TabletID 72057594037927937 is [5:86:2114] sender: [5:87:2057] recipient: [5:83:2113] !Reboot 72057594037927937 (actor [5:57:2097]) rebooted! !Reboot 72057594037927937 (actor [5:57:2097]) tablet resolver refreshed! new actor is[5:86:2114] Leader for TabletID 72057594037927937 is [5:86:2114] sender: [5:172:2057] recipient: [5:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [6:55:2057] recipient: [6:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [6:55:2057] recipient: [6:51:2095] Leader for TabletID 72057594037927937 is [6:57:2097] sender: [6:58:2057] recipient: [6:51:2095] Leader for TabletID 72057594037927937 is [6:57:2097] sender: [6:75:2057] recipient: [6:14:2061] !Reboot 72057594037927937 (actor [6:57:2097]) on event NKikimr::TEvKeyValue::TEvReadRange ! Leader for TabletID 72057594037927937 is [6:57:2097] sender: [6:81:2057] recipient: [6:36:2083] Leader for TabletID 72057594037927937 is [6:57:2097] sender: [6:84:2057] recipient: [6:14:2061] Leader for TabletID 72057594037927937 is [6:57:2097] sender: [6:85:2057] recipient: [6:83:2113] Leader for TabletID 72057594037927937 is [6:86:2114] sender: [6:87:2057] recipient: [6:83:2113] !Reboot 72057594037927937 (actor [6:57:2097]) rebooted! !Reboot 72057594037927937 (actor [6:57:2097]) tablet resolver refreshed! new actor is[6:86:2114] Leader for TabletID 72057594037927937 is [6:86:2114] sender: [6:172:2057] recipient: [6:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [7:55:2057] recipient: [7:52:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [7:55:2057] recipient: [7:52:2095] Leader for TabletID 72057594037927937 is [7:57:2097] sender: [7:58:2057] recipient: [7:52:2095] Leader for TabletID 72057594037927937 is [7:57:2097] sender: [7:75:2057] recipient: [7:14:2061] !Reboot 72057594037927937 (actor [7:57:2097]) on event NKikimr::TEvKeyValue::TEvNotify ! Leader for TabletID 72057594037927937 is [7:57:2097] sender: [7:82:2057] recipient: [7:36:2083] Leader for TabletID 72057594037927937 is [7:57:2097] sender: [7:85:2057] recipient: [7:14:2061] Leader for TabletID 72057594037927937 is [7:57:2097] sender: [7:86:2057] recipient: [7:84:2113] Leader for TabletID 72057594037927937 is [7:87:2114] sender: [7:88:2057] recipient: [7:84:2113] !Reboot 72057594037927937 (actor [7:57:2097]) rebooted! !Reboot 72057594037927937 (actor [7:57:2097]) tablet resolver refreshed! new actor is[7:87:2114] Leader for TabletID 72057594037927937 is [7:87:2114] sender: [7:105:2057] recipient: [7:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [8:55:2057] recipient: [8:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [8:55:2057] recipient: [8:51:2095] Leader for TabletID 72057594037927937 is [8:57:2097] sender: [8:58:2057] recipient: [8:51:2095] Leader for TabletID 72057594037927937 is [8:57:2097] sender: [8:75:2057] recipient: [8:14:2061] !Reboot 72057594037927937 (actor [8:57:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [8:57:2097] sender: [8:84:2057] recipient: [8:36:2083] Leader for TabletID 72057594037927937 is [8:57:2097] sender: [8:87:2057] recipient: [8:14:2061] Leader for TabletID 72057594037927937 is [8:57:2097] sender: [8:88:2057] recipient: [8:86:2115] Leader for TabletID 72057594037927937 is [8:89:2116] sender: [8:90:2057] recipient: [8:86:2115] !Reboot 72057594037927937 (actor [8:57:2097]) rebooted! !Reboot 72057594037927937 (actor [8:57:2097]) tablet resolver refreshed! new actor is[8:89:2116] Leader for TabletID 72057594037927937 is [8:89:2116] sender: [8:175:2057] recipient: [8:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [9:55:2057] recipient: [9:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [9:55:2057] recipient: [9:51:2095] Leader for TabletID 72057594037927937 is [9:57:2097] sender: [9:58:2057] recipient: [9:51:2095] Leader for TabletID 72057594037927937 is [9:57:2097] sender: [9:75:2057] recipient: [9:14:2061] !Reboot 72057594037927937 (actor [9:57:2097]) on event NKikimr::TEvKeyValue::TEvReadRange ! Leader for TabletID 72057594037927937 is [9:57:2097] sender: [9:84:2057] recipient: [9:36:2083] Leader for TabletID 72057594037927937 is [9:57:2097] sender: [9:86:2057] recipient: [9:14:2061] Leader for TabletID 72057594037927937 is [9:57:2097] sender: [9:88:2057] recipient: [9:87:2115] Leader for TabletID 72057594037927937 is [9:89:2116] sender: [9:90:2057] recipient: [9:87:2115] !Reboot 72057594037927937 (actor [9:57:2097]) rebooted! !Reboot 72057594037927937 (actor [9:57:2097]) tablet resolver refreshed! new actor is[9:89:2116] Leader for TabletID 72057594037927937 is [9:89:2116] sender: [9:175:2057] recipient: [9:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [10:55:2057] recipient: [10:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [10:55:2057] recipient: [10:51:2095] Leader for TabletID 72057594037927937 is [10:57:2097] sender: [10:58:2057] recipient: [10:51:2095] Leader for TabletID 72057594037927937 is [10:57:2097] sender: [10:75:2057] recipient: [10:14:2061] !Reboot 72057594037927937 (actor [10:57:2097]) on event NKikimr::TEvKeyValue::TEvNotify ! Leader for TabletID 72057594037927937 is [10:57:2097] sender: [10:85:2057] recipient: [10:36:2083] Leader for TabletID 72057594037927937 is [10:57:2097] sender: [10:88:2057] recipient: [10:14:2061] Leader for TabletID 72057594037927937 is [10:57:2097] sender: [10:89:2057] recipient: [10:87:2115] Leader for TabletID 72057594037927937 is [10:90:2116] sender: [10:91:2057] recipient: [10:87:2115] !Reboot 72057594037927937 (actor [10:57:2097]) rebooted! !Reboot 72057594037927937 (actor [10:57:2097]) tablet resolver refreshed! new actor is[10:90:2116] Leader for TabletID 72057594037927937 is [10:90:2116] sender: [10:108:2057] recipient: [10:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [11:55:2057] recipient: [11:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [11:55:2057] recipient: [11:51:2095] Leader for TabletID 72057594037927937 is [11:57:2097] sender: [11:58:2057] recipient: [11:51:2095] Leader for TabletID 72057594037927937 is [11:57:2097] sender: [11:75:2057] recipient: [11:14:2061] !Reboot 72057594037927937 (actor [11:57:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [11:57:2097] sender: [11:87:2057] recipient: [11:36:2083] Leader for TabletID 72057594037927937 is [11:57:2097] sender: [11:90:2057] recipient: [11:14:2061] Leader for TabletID 72057594037927937 is [11:57:2097] sender: [11:91:2057] recipient: [11:89:2117] Leader for TabletID 72057594037927937 is [11:92:2118] sender: [11:93:2057] recipient: [11:89:2117] !Reboot 72057594037927937 (actor [11:57:2097]) rebooted! !Reboot 72057594037927937 (actor [11:57:2097]) tablet resolver refreshed! new actor is[11:92:2118] Leader for TabletID 72057594037927937 is [11:92:2118] sender: [11:178:2057] recipient: [11:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [12:55:2057] recipient: [12:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [12:55:2057] recipient: [12:51:2095] Leader for TabletID 72057594037927937 is [12:57:2097] sender: [12:58:2057] recipient: [12:51:2095] Leader for TabletID 72057594037927937 is [12:57:2097] sender: [12:75:2057] recipient: [12:14:2061] !Reboot 72057594037927937 (a ... 594037927937 (actor [23:57:2097]) tablet resolver refreshed! new actor is[23:86:2114] Leader for TabletID 72057594037927937 is [23:86:2114] sender: [23:172:2057] recipient: [23:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [24:55:2057] recipient: [24:50:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [24:55:2057] recipient: [24:50:2095] Leader for TabletID 72057594037927937 is [24:57:2097] sender: [24:58:2057] recipient: [24:50:2095] Leader for TabletID 72057594037927937 is [24:57:2097] sender: [24:75:2057] recipient: [24:14:2061] !Reboot 72057594037927937 (actor [24:57:2097]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [24:57:2097] sender: [24:82:2057] recipient: [24:36:2083] Leader for TabletID 72057594037927937 is [24:57:2097] sender: [24:85:2057] recipient: [24:14:2061] Leader for TabletID 72057594037927937 is [24:57:2097] sender: [24:86:2057] recipient: [24:84:2113] Leader for TabletID 72057594037927937 is [24:87:2114] sender: [24:88:2057] recipient: [24:84:2113] !Reboot 72057594037927937 (actor [24:57:2097]) rebooted! !Reboot 72057594037927937 (actor [24:57:2097]) tablet resolver refreshed! new actor is[24:87:2114] Leader for TabletID 72057594037927937 is [24:87:2114] sender: [24:173:2057] recipient: [24:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [25:55:2057] recipient: [25:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [25:55:2057] recipient: [25:51:2095] Leader for TabletID 72057594037927937 is [25:57:2097] sender: [25:58:2057] recipient: [25:51:2095] Leader for TabletID 72057594037927937 is [25:57:2097] sender: [25:75:2057] recipient: [25:14:2061] !Reboot 72057594037927937 (actor [25:57:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [25:57:2097] sender: [25:85:2057] recipient: [25:36:2083] Leader for TabletID 72057594037927937 is [25:57:2097] sender: [25:88:2057] recipient: [25:14:2061] Leader for TabletID 72057594037927937 is [25:57:2097] sender: [25:89:2057] recipient: [25:87:2116] Leader for TabletID 72057594037927937 is [25:90:2117] sender: [25:91:2057] recipient: [25:87:2116] !Reboot 72057594037927937 (actor [25:57:2097]) rebooted! !Reboot 72057594037927937 (actor [25:57:2097]) tablet resolver refreshed! new actor is[25:90:2117] Leader for TabletID 72057594037927937 is [25:90:2117] sender: [25:176:2057] recipient: [25:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [26:55:2057] recipient: [26:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [26:55:2057] recipient: [26:51:2095] Leader for TabletID 72057594037927937 is [26:57:2097] sender: [26:58:2057] recipient: [26:51:2095] Leader for TabletID 72057594037927937 is [26:57:2097] sender: [26:75:2057] recipient: [26:14:2061] !Reboot 72057594037927937 (actor [26:57:2097]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [26:57:2097] sender: [26:85:2057] recipient: [26:36:2083] Leader for TabletID 72057594037927937 is [26:57:2097] sender: [26:88:2057] recipient: [26:14:2061] Leader for TabletID 72057594037927937 is [26:57:2097] sender: [26:89:2057] recipient: [26:87:2116] Leader for TabletID 72057594037927937 is [26:90:2117] sender: [26:91:2057] recipient: [26:87:2116] !Reboot 72057594037927937 (actor [26:57:2097]) rebooted! !Reboot 72057594037927937 (actor [26:57:2097]) tablet resolver refreshed! new actor is[26:90:2117] Leader for TabletID 72057594037927937 is [26:90:2117] sender: [26:176:2057] recipient: [26:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [27:55:2057] recipient: [27:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [27:55:2057] recipient: [27:51:2095] Leader for TabletID 72057594037927937 is [27:57:2097] sender: [27:58:2057] recipient: [27:51:2095] Leader for TabletID 72057594037927937 is [27:57:2097] sender: [27:75:2057] recipient: [27:14:2061] !Reboot 72057594037927937 (actor [27:57:2097]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [27:57:2097] sender: [27:86:2057] recipient: [27:36:2083] Leader for TabletID 72057594037927937 is [27:57:2097] sender: [27:89:2057] recipient: [27:14:2061] Leader for TabletID 72057594037927937 is [27:57:2097] sender: [27:90:2057] recipient: [27:88:2116] Leader for TabletID 72057594037927937 is [27:91:2117] sender: [27:92:2057] recipient: [27:88:2116] !Reboot 72057594037927937 (actor [27:57:2097]) rebooted! !Reboot 72057594037927937 (actor [27:57:2097]) tablet resolver refreshed! new actor is[27:91:2117] Leader for TabletID 72057594037927937 is [27:91:2117] sender: [27:177:2057] recipient: [27:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [28:55:2057] recipient: [28:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [28:55:2057] recipient: [28:51:2095] Leader for TabletID 72057594037927937 is [28:57:2097] sender: [28:58:2057] recipient: [28:51:2095] Leader for TabletID 72057594037927937 is [28:57:2097] sender: [28:75:2057] recipient: [28:14:2061] !Reboot 72057594037927937 (actor [28:57:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [28:57:2097] sender: [28:89:2057] recipient: [28:36:2083] Leader for TabletID 72057594037927937 is [28:57:2097] sender: [28:92:2057] recipient: [28:14:2061] Leader for TabletID 72057594037927937 is [28:57:2097] sender: [28:93:2057] recipient: [28:91:2119] Leader for TabletID 72057594037927937 is [28:94:2120] sender: [28:95:2057] recipient: [28:91:2119] !Reboot 72057594037927937 (actor [28:57:2097]) rebooted! !Reboot 72057594037927937 (actor [28:57:2097]) tablet resolver refreshed! new actor is[28:94:2120] Leader for TabletID 72057594037927937 is [28:94:2120] sender: [28:180:2057] recipient: [28:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [29:55:2057] recipient: [29:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [29:55:2057] recipient: [29:51:2095] Leader for TabletID 72057594037927937 is [29:57:2097] sender: [29:58:2057] recipient: [29:51:2095] Leader for TabletID 72057594037927937 is [29:57:2097] sender: [29:75:2057] recipient: [29:14:2061] !Reboot 72057594037927937 (actor [29:57:2097]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [29:57:2097] sender: [29:89:2057] recipient: [29:36:2083] Leader for TabletID 72057594037927937 is [29:57:2097] sender: [29:91:2057] recipient: [29:14:2061] Leader for TabletID 72057594037927937 is [29:57:2097] sender: [29:93:2057] recipient: [29:92:2119] Leader for TabletID 72057594037927937 is [29:94:2120] sender: [29:95:2057] recipient: [29:92:2119] !Reboot 72057594037927937 (actor [29:57:2097]) rebooted! !Reboot 72057594037927937 (actor [29:57:2097]) tablet resolver refreshed! new actor is[29:94:2120] Leader for TabletID 72057594037927937 is [29:94:2120] sender: [29:180:2057] recipient: [29:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [30:55:2057] recipient: [30:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [30:55:2057] recipient: [30:51:2095] Leader for TabletID 72057594037927937 is [30:57:2097] sender: [30:58:2057] recipient: [30:51:2095] Leader for TabletID 72057594037927937 is [30:57:2097] sender: [30:75:2057] recipient: [30:14:2061] !Reboot 72057594037927937 (actor [30:57:2097]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [30:57:2097] sender: [30:90:2057] recipient: [30:36:2083] Leader for TabletID 72057594037927937 is [30:57:2097] sender: [30:93:2057] recipient: [30:14:2061] Leader for TabletID 72057594037927937 is [30:57:2097] sender: [30:94:2057] recipient: [30:92:2119] Leader for TabletID 72057594037927937 is [30:95:2120] sender: [30:96:2057] recipient: [30:92:2119] !Reboot 72057594037927937 (actor [30:57:2097]) rebooted! !Reboot 72057594037927937 (actor [30:57:2097]) tablet resolver refreshed! new actor is[30:95:2120] Leader for TabletID 72057594037927937 is [30:95:2120] sender: [30:181:2057] recipient: [30:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [31:55:2057] recipient: [31:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [31:55:2057] recipient: [31:51:2095] Leader for TabletID 72057594037927937 is [31:57:2097] sender: [31:58:2057] recipient: [31:51:2095] Leader for TabletID 72057594037927937 is [31:57:2097] sender: [31:75:2057] recipient: [31:14:2061] !Reboot 72057594037927937 (actor [31:57:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [31:57:2097] sender: [31:92:2057] recipient: [31:36:2083] Leader for TabletID 72057594037927937 is [31:57:2097] sender: [31:95:2057] recipient: [31:14:2061] Leader for TabletID 72057594037927937 is [31:57:2097] sender: [31:96:2057] recipient: [31:94:2121] Leader for TabletID 72057594037927937 is [31:97:2122] sender: [31:98:2057] recipient: [31:94:2121] !Reboot 72057594037927937 (actor [31:57:2097]) rebooted! !Reboot 72057594037927937 (actor [31:57:2097]) tablet resolver refreshed! new actor is[31:97:2122] Leader for TabletID 72057594037927937 is [31:97:2122] sender: [31:183:2057] recipient: [31:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [32:55:2057] recipient: [32:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [32:55:2057] recipient: [32:51:2095] Leader for TabletID 72057594037927937 is [32:57:2097] sender: [32:58:2057] recipient: [32:51:2095] Leader for TabletID 72057594037927937 is [32:57:2097] sender: [32:75:2057] recipient: [32:14:2061] !Reboot 72057594037927937 (actor [32:57:2097]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [32:57:2097] sender: [32:92:2057] recipient: [32:36:2083] Leader for TabletID 72057594037927937 is [32:57:2097] sender: [32:95:2057] recipient: [32:14:2061] Leader for TabletID 72057594037927937 is [32:57:2097] sender: [32:96:2057] recipient: [32:94:2121] Leader for TabletID 72057594037927937 is [32:97:2122] sender: [32:98:2057] recipient: [32:94:2121] !Reboot 72057594037927937 (actor [32:57:2097]) rebooted! !Reboot 72057594037927937 (actor [32:57:2097]) tablet resolver refreshed! new actor is[32:97:2122] Leader for TabletID 72057594037927937 is [32:97:2122] sender: [32:183:2057] recipient: [32:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [33:55:2057] recipient: [33:50:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [33:55:2057] recipient: [33:50:2095] Leader for TabletID 72057594037927937 is [33:57:2097] sender: [33:58:2057] recipient: [33:50:2095] Leader for TabletID 72057594037927937 is [33:57:2097] sender: [33:75:2057] recipient: [33:14:2061] !Reboot 72057594037927937 (actor [33:57:2097]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [33:57:2097] sender: [33:93:2057] recipient: [33:36:2083] Leader for TabletID 72057594037927937 is [33:57:2097] sender: [33:96:2057] recipient: [33:14:2061] Leader for TabletID 72057594037927937 is [33:57:2097] sender: [33:97:2057] recipient: [33:95:2121] Leader for TabletID 72057594037927937 is [33:98:2122] sender: [33:99:2057] recipient: [33:95:2121] !Reboot 72057594037927937 (actor [33:57:2097]) rebooted! !Reboot 72057594037927937 (actor [33:57:2097]) tablet resolver refreshed! new actor is[33:98:2122] Leader for TabletID 72057594037927937 is [33:98:2122] sender: [33:184:2057] recipient: [33:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [34:55:2057] recipient: [34:52:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [34:55:2057] recipient: [34:52:2095] Leader for TabletID 72057594037927937 is [34:57:2097] sender: [34:58:2057] recipient: [34:52:2095] Leader for TabletID 72057594037927937 is [34:57:2097] sender: [34:75:2057] recipient: [34:14:2061] >> TKeyValueTest::TestCopyRangeWorksNewApi [GOOD] >> TKeyValueTest::TestCopyRangeToLongKey ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/mind/hive/ut/unittest >> THiveTest::TestCreateExternalTablet [GOOD] Test command err: 2025-05-29T15:25:57.987681Z node 1 :LOCAL DEBUG: local.cpp:1491: TLocal::Bootstrap 2025-05-29T15:25:57.987724Z node 1 :BS_NODE DEBUG: {NW26@node_warden_impl.cpp:321} Bootstrap 2025-05-29T15:25:57.988552Z node 1 :BS_NODE DEBUG: {NW18@node_warden_resource.cpp:51} ApplyServiceSet IsStatic# true Comprehensive# false Origin# initial ServiceSet# {PDisks { NodeID: 1 PDiskID: 1 Path: "/tmp/pdisk.dat" PDiskGuid: 1 } VDisks { VDiskID { GroupID: 0 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } VDiskLocation { NodeID: 1 PDiskID: 1 VDiskSlotID: 0 PDiskGuid: 1 } } Groups { GroupID: 0 GroupGeneration: 1 ErasureSpecies: 0 Rings { FailDomains { VDiskLocations { NodeID: 1 PDiskID: 1 VDiskSlotID: 0 PDiskGuid: 1 } } } } AvailabilityDomains: 0 } 2025-05-29T15:25:57.988629Z node 1 :BS_NODE DEBUG: {NW04@node_warden_pdisk.cpp:196} StartLocalPDisk NodeId# 1 PDiskId# 1 Path# "/tmp/pdisk.dat" PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} Temporary# false 2025-05-29T15:25:57.988802Z node 1 :BS_NODE DEBUG: {NW23@node_warden_vdisk.cpp:67} StartLocalVDiskActor SlayInFlight# false VDiskId# [0:1:0:0:0] VSlotId# 1:1:0 PDiskGuid# 1 DonorMode# false PDiskRestartInFlight# false PDisksWaitingToStart# false 2025-05-29T15:25:57.989142Z node 1 :BS_NODE DEBUG: {NW24@node_warden_vdisk.cpp:265} StartLocalVDiskActor done VDiskId# [0:1:0:0:0] VSlotId# 1:1:0 PDiskGuid# 1 2025-05-29T15:25:57.989157Z node 1 :BS_NODE DEBUG: {NW12@node_warden_proxy.cpp:23} StartLocalProxy GroupId# 0 2025-05-29T15:25:57.989362Z node 1 :BS_NODE DEBUG: {NW21@node_warden_pipe.cpp:23} EstablishPipe AvailDomainId# 0 PipeClientId# [1:29:2075] ControllerId# 72057594037932033 2025-05-29T15:25:57.989367Z node 1 :BS_NODE DEBUG: {NW20@node_warden_pipe.cpp:72} SendRegisterNode 2025-05-29T15:25:57.989397Z node 1 :BS_NODE DEBUG: {NW11@node_warden_impl.cpp:296} StartInvalidGroupProxy GroupId# 4294967295 2025-05-29T15:25:57.989424Z node 1 :BS_NODE DEBUG: {NW62@node_warden_impl.cpp:308} StartRequestReportingThrottler 2025-05-29T15:25:58.005831Z node 1 :BS_NODE DEBUG: {NWDC00@distconf.cpp:20} Bootstrap 2025-05-29T15:25:58.005897Z node 1 :BS_NODE DEBUG: {NWDC40@distconf_persistent_storage.cpp:25} TReaderActor bootstrap Paths# [] 2025-05-29T15:25:58.009341Z node 1 :BS_PROXY INFO: dsproxy_state.cpp:146: Group# 0 TEvConfigureProxy received GroupGeneration# 1 IsLimitedKeyless# false Marker# DSP02 2025-05-29T15:25:58.009362Z node 1 :BS_PROXY NOTICE: dsproxy_state.cpp:294: EnsureMonitoring Group# 0 IsLimitedKeyless# 0 fullIfPossible# 0 Marker# DSP58 2025-05-29T15:25:58.009779Z node 1 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [1:28:2074] Create Queue# [1:39:2082] targetNodeId# 1 Marker# DSP01 2025-05-29T15:25:58.009823Z node 1 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [1:28:2074] Create Queue# [1:40:2083] targetNodeId# 1 Marker# DSP01 2025-05-29T15:25:58.009856Z node 1 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [1:28:2074] Create Queue# [1:41:2084] targetNodeId# 1 Marker# DSP01 2025-05-29T15:25:58.009888Z node 1 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [1:28:2074] Create Queue# [1:42:2085] targetNodeId# 1 Marker# DSP01 2025-05-29T15:25:58.009920Z node 1 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [1:28:2074] Create Queue# [1:43:2086] targetNodeId# 1 Marker# DSP01 2025-05-29T15:25:58.009954Z node 1 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [1:28:2074] Create Queue# [1:44:2087] targetNodeId# 1 Marker# DSP01 2025-05-29T15:25:58.009989Z node 1 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [1:28:2074] Create Queue# [1:45:2088] targetNodeId# 1 Marker# DSP01 2025-05-29T15:25:58.009994Z node 1 :BS_PROXY INFO: dsproxy_state.cpp:29: Group# 0 SetStateEstablishingSessions Marker# DSP03 2025-05-29T15:25:58.010011Z node 1 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:50: TClient[72057594037932033] ::Bootstrap [1:29:2075] 2025-05-29T15:25:58.010016Z node 1 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:533: TClient[72057594037932033] lookup [1:29:2075] 2025-05-29T15:25:58.010025Z node 1 :BS_PROXY NOTICE: dsproxy_state.cpp:234: Group# 4294967295 HasInvalidGroupId# 1 Bootstrap -> StateEjected Marker# DSP42 2025-05-29T15:25:58.010060Z node 1 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:149: TClient[72057594037932033] queue send [1:29:2075] 2025-05-29T15:25:58.010073Z node 1 :BS_NODE DEBUG: {NWDC53@distconf.cpp:300} StateWaitForInit event Type# 131082 StorageConfigLoaded# false NodeListObtained# false PendingEvents.size# 0 2025-05-29T15:25:58.010078Z node 1 :BS_NODE DEBUG: {NWDC11@distconf_binding.cpp:6} TEvNodesInfo 2025-05-29T15:25:58.010086Z node 1 :LOCAL DEBUG: local.cpp:1441: TDomainLocal(dc-1): Bootstrap 2025-05-29T15:25:58.010252Z node 1 :TABLET_RESOLVER DEBUG: tablet_resolver.cpp:484: Handle TEvForward tabletId: 72057594037932033 entry.State: StInit ev: {EvForward TabletID: 72057594037932033 Ev: nullptr Flags: 1:2:0} 2025-05-29T15:25:58.010274Z node 1 :BS_NODE DEBUG: {NWDC53@distconf.cpp:300} StateWaitForInit event Type# 2146435074 StorageConfigLoaded# false NodeListObtained# false PendingEvents.size# 0 2025-05-29T15:25:58.010281Z node 1 :BS_NODE DEBUG: {NWDC32@distconf_persistent_storage.cpp:221} TEvStorageConfigLoaded Cookie# 0 NumItemsRead# 0 2025-05-29T15:25:58.020063Z node 1 :BS_NODE DEBUG: {NWDC35@distconf_persistent_storage.cpp:184} PersistConfig Record# {} Drives# [] 2025-05-29T15:25:58.020797Z node 1 :LOCAL DEBUG: local.cpp:1149: TDomainLocal(dc-1): Binding to hive 72057594037927937 at domain dc-1 (allocated resources: ) 2025-05-29T15:25:58.020865Z node 1 :BS_NODE DEBUG: {NWDC51@distconf_persistent_storage.cpp:103} TWriterActor bootstrap Drives# [] Record# {} 2025-05-29T15:25:58.020885Z node 1 :LOCAL DEBUG: local.cpp:975: TLocalNodeRegistrar::Bootstrap 2025-05-29T15:25:58.020891Z node 1 :LOCAL DEBUG: local.cpp:181: TLocalNodeRegistrar::TryToRegister 2025-05-29T15:25:58.020929Z node 1 :LOCAL DEBUG: local.cpp:213: TLocalNodeRegistrar::TryToRegister pipe to hive, pipe:[1:50:2091] 2025-05-29T15:25:58.047025Z node 1 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:149: TClient[72057594037932033] queue send [1:29:2075] 2025-05-29T15:25:58.047485Z node 1 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:50: TClient[72057594037936129] ::Bootstrap [1:33:2063] 2025-05-29T15:25:58.047500Z node 1 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:533: TClient[72057594037936129] lookup [1:33:2063] 2025-05-29T15:25:58.047554Z node 1 :BS_NODE DEBUG: {NWDC53@distconf.cpp:300} StateWaitForInit event Type# 2146435075 StorageConfigLoaded# true NodeListObtained# false PendingEvents.size# 0 2025-05-29T15:25:58.047596Z node 1 :STATESTORAGE DEBUG: statestorage_proxy.cpp:246: ProxyRequest::HandleInit ringGroup:0 ev: {EvLookup TabletID: 72057594037932033 Cookie: 0 ProxyOptions: SigNone} 2025-05-29T15:25:58.047624Z node 1 :STATESTORAGE DEBUG: statestorage_replica.cpp:183: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037932033 Cookie: 0} 2025-05-29T15:25:58.047632Z node 1 :STATESTORAGE DEBUG: statestorage_replica.cpp:183: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037932033 Cookie: 1} 2025-05-29T15:25:58.047639Z node 1 :STATESTORAGE DEBUG: statestorage_replica.cpp:183: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037932033 Cookie: 2} 2025-05-29T15:25:58.047646Z node 1 :STATESTORAGE DEBUG: statestorage_proxy.cpp:355: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 1 TabletID: 72057594037932033} 2025-05-29T15:25:58.047709Z node 1 :TABLET_RESOLVER DEBUG: tablet_resolver.cpp:484: Handle TEvForward tabletId: 72057594037936129 entry.State: StInit ev: {EvForward TabletID: 72057594037936129 Ev: nullptr Flags: 1:2:0} 2025-05-29T15:25:58.047722Z node 1 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:50: TClient[72057594037927937] ::Bootstrap [1:50:2091] 2025-05-29T15:25:58.047725Z node 1 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:533: TClient[72057594037927937] lookup [1:50:2091] 2025-05-29T15:25:58.047745Z node 1 :BS_NODE DEBUG: {NWDC53@distconf.cpp:300} StateWaitForInit event Type# 131082 StorageConfigLoaded# true NodeListObtained# false PendingEvents.size# 1 2025-05-29T15:25:58.047750Z node 1 :BS_NODE DEBUG: {NWDC11@distconf_binding.cpp:6} TEvNodesInfo 2025-05-29T15:25:58.047784Z node 1 :BS_NODE DEBUG: {NWDC18@distconf_binding.cpp:322} UpdateBound RefererNodeId# 1 NodeId# ::1:12001/1 Meta# {Fingerprint: "\371$\224\316I\335\243.)W\014\261m\013\346Osy\0160" } 2025-05-29T15:25:58.047800Z node 1 :STATESTORAGE DEBUG: statestorage_proxy.cpp:355: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 1 TabletID: 72057594037932033} 2025-05-29T15:25:58.047849Z node 1 :BS_NODE DEBUG: {NW18@node_warden_resource.cpp:51} ApplyServiceSet IsStatic# true Comprehensive# true Origin# distconf ServiceSet# {PDisks { NodeID: 1 PDiskID: 1 Path: "/tmp/pdisk.dat" PDiskGuid: 1 } VDisks { VDiskID { GroupID: 0 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } VDiskLocation { NodeID: 1 PDiskID: 1 VDiskSlotID: 0 PDiskGuid: 1 } } Groups { GroupID: 0 GroupGeneration: 1 ErasureSpecies: 0 Rings { FailDomains { VDiskLocations { NodeID: 1 PDiskID: 1 VDiskSlotID: 0 PDiskGuid: 1 } } } } AvailabilityDomains: 0 } 2025-05-29T15:25:58.047883Z node 1 :TABLET_RESOLVER DEBUG: tablet_resolver.cpp:484: Handle TEvForward tabletId: 72057594037927937 entry.State: StInit ev: {EvForward TabletID: 72057594037927937 Ev: nullptr Flags: 1:2:0} 2025-05-29T15:25:58.047896Z node 1 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:149: TClient[72057594037927937] queue send [1:50:2091] 2025-05-29T15:25:58.047906Z node 1 :STATESTORAGE DEBUG: statestorage_proxy.cpp:355: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 1 TabletID: 72057594037932033} 2025-05-29T15:25:58.047937Z node 1 :TABLET_RESOLVER DEBUG: tablet_resolver.cpp:610: Handle TEvInfo tabletId: 72057594037932033 entry.State: StInitResolve success: false ev: {EvInfo Status: 5 TabletID: 72057594037932033 Cookie: 0 CurrentLeader: [0:0:0] CurrentLeaderTablet: [0:0:0] CurrentGeneration: 0 CurrentStep: 0 Locked: false LockedFor: 0 Signature: { Size: 3 Signature: {{[1:24343667:0] : 2}, {[1:2199047599219:0] : 8}, {[1:1099535971443:0] : 5}}}} 2025-05-29T15:25:58.047943Z node 1 :TABLET_RESOLVER DEBUG: tablet_resolver.cpp:361: DropEntry tabletId: 72057594037932033 followers: 0 2025-05-29T15:25:58.048218Z node 1 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:172: TClient[72057594037932033] forward result error, check reconnect [1:29:2075] 2025-05-29T15:25:58.048230Z node 1 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:562: TClient[72057594037932033] schedule retry [1:29:2075] 2025-05-29T15:25:58.048241Z node 1 :BS_NODE DEBUG: {NWDC53@distconf.cpp:300} StateWaitForInit event Type# 2146435072 StorageConfigLoaded# true NodeListObtained# true PendingEvents.size# 1 2025-05-29T15:25:58.048251Z node 1 :BS_NODE DEBUG: {NWDC15@distconf.cpp:361} StateFunc Type# 2146435075 Sender# [1:47:2090] SessionId# [0:0:0] Cookie# 0 2025-05-29T15:25:58.048260Z node 1 :BS_NODE DEBUG: {NWDC36@distconf_persistent_storage.cpp:205} TEvStorageConfigStored NumOk# 0 NumError# 0 Passed# 0.037946s 2025-05-29T15:25:58.048307Z node 1 :BS_NODE DEBUG: {NWDC15@distconf.cpp:361} StateFunc Type# 268639248 Sender# [1:12:2059] SessionId# [0:0:0] Cookie# 0 2025-05-29T15:25:58.049049Z node 1 :STATESTORAGE DEBUG: statestorage_proxy.cpp:246: ProxyRequest::HandleInit ringGroup:0 ev: {EvLookup TabletID: 72057594037936129 Cookie: 0 ProxyOptions: SigNone} 2025-05-29T15:25:58.058997Z node 1 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:149: TClient[72057594037932033] queue send [1:29:2075] 2025-05-29T15:25:58.059178Z node 1 :STATESTORAGE DEBUG: statestorage_replica.cpp:183: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936129 Cookie: 0} 2025-05-29T15:25:58.059190Z node 1 :STATESTORAGE DEBUG: statestorage_replica.cpp:183: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936129 Cookie: 1} 2025-05-29T15:25:58.059218Z node 1 :STATESTORAGE DEBUG: statestorage_replica.cpp:183: Replica::Handle ev: {EvReplicaLookup TabletID: 720575 ... 98 :PIPE_SERVER DEBUG: tablet_pipe_server.cpp:291: [72057594037932033] Accept Connect Originator# [98:304:2286] 2025-05-29T15:27:52.585454Z node 98 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:310: TClient[72057594037932033] connected with status OK role: Leader [98:304:2286] 2025-05-29T15:27:52.585457Z node 98 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:325: TClient[72057594037932033] send queued [98:304:2286] 2025-05-29T15:27:52.585459Z node 98 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:629: TClient[72057594037932033] push event to server [98:304:2286] 2025-05-29T15:27:52.585463Z node 98 :PIPE_SERVER DEBUG: tablet_pipe_server.cpp:141: [72057594037932033] HandleSend Sender# [98:274:2265] EventType# 268637702 2025-05-29T15:27:52.585477Z node 98 :TABLET_EXECUTOR DEBUG: Leader{72057594037932033:2:9} Tx{28, NKikimr::NBsController::TBlobStorageController::TTxSelectGroups} queued, type NKikimr::NBsController::TBlobStorageController::TTxSelectGroups 2025-05-29T15:27:52.585481Z node 98 :TABLET_EXECUTOR DEBUG: Leader{72057594037932033:2:9} Tx{28, NKikimr::NBsController::TBlobStorageController::TTxSelectGroups} took 4194304b of static mem, Memory{4194304 dyn 0} 2025-05-29T15:27:52.585501Z node 98 :TABLET_EXECUTOR DEBUG: Leader{72057594037932033:2:9} Tx{28, NKikimr::NBsController::TBlobStorageController::TTxSelectGroups} hope 1 -> done Change{20, redo 0b alter 0b annex 0, ~{ } -{ }, 0 gb} 2025-05-29T15:27:52.585505Z node 98 :TABLET_EXECUTOR DEBUG: Leader{72057594037932033:2:9} Tx{28, NKikimr::NBsController::TBlobStorageController::TTxSelectGroups} release 4194304b of static, Memory{0 dyn 0} 2025-05-29T15:27:52.585518Z node 98 :HIVE DEBUG: hive_impl.cpp:72: HIVE#72057594037927937 Connected to tablet 72057594037932033 from tablet 72057594037927937 2025-05-29T15:27:52.585542Z node 98 :HIVE DEBUG: hive_impl.cpp:433: HIVE#72057594037927937 THive::Handle TEvControllerSelectGroupsResult: success Status: OK NewStyleQuerySupported: true MatchingGroups { Groups { ErasureSpecies: 0 GroupID: 2147483648 StoragePoolName: "def1" AssuredResources { } CurrentResources { } PhysicalGroup: true Decommitted: false } } MatchingGroups { Groups { ErasureSpecies: 0 GroupID: 2147483649 StoragePoolName: "def2" AssuredResources { } CurrentResources { } PhysicalGroup: true Decommitted: false } } MatchingGroups { Groups { ErasureSpecies: 0 GroupID: 2147483650 StoragePoolName: "def3" AssuredResources { } CurrentResources { } PhysicalGroup: true Decommitted: false } } 2025-05-29T15:27:52.585555Z node 98 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:5} Tx{5, NKikimr::NHive::TTxUpdateTabletGroups} queued, type NKikimr::NHive::TTxUpdateTabletGroups 2025-05-29T15:27:52.585558Z node 98 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:5} Tx{5, NKikimr::NHive::TTxUpdateTabletGroups} took 4194304b of static mem, Memory{4194304 dyn 0} 2025-05-29T15:27:52.585563Z node 98 :HIVE DEBUG: tx__update_tablet_groups.cpp:63: HIVE#72057594037927937 THive::TTxUpdateTabletGroups::Execute{21603628414752}(72075186224037888,HIVE_REASSIGN_REASON_NO,[]) 2025-05-29T15:27:52.585573Z node 98 :HIVE DEBUG: tx__update_tablet_groups.cpp:151: HIVE#72057594037927937 THive::TTxUpdateTabletGroups::Execute{21603628414752}: tablet 72075186224037888 channel 0 assigned to group 2147483648 2025-05-29T15:27:52.585591Z node 98 :HIVE DEBUG: tx__update_tablet_groups.cpp:151: HIVE#72057594037927937 THive::TTxUpdateTabletGroups::Execute{21603628414752}: tablet 72075186224037888 channel 1 assigned to group 2147483649 2025-05-29T15:27:52.585599Z node 98 :HIVE DEBUG: tx__update_tablet_groups.cpp:151: HIVE#72057594037927937 THive::TTxUpdateTabletGroups::Execute{21603628414752}: tablet 72075186224037888 channel 2 assigned to group 2147483650 2025-05-29T15:27:52.585608Z node 98 :HIVE NOTICE: tx__update_tablet_groups.cpp:326: HIVE#72057594037927937 THive::TTxUpdateTabletGroups{21603628414752}(72075186224037888)::Execute - TryToBoot was not successfull 2025-05-29T15:27:52.585614Z node 98 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:5} Tx{5, NKikimr::NHive::TTxUpdateTabletGroups} hope 1 -> done Change{5, redo 698b alter 0b annex 0, ~{ 2, 1, 3 } -{ }, 0 gb} 2025-05-29T15:27:52.585617Z node 98 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:5} Tx{5, NKikimr::NHive::TTxUpdateTabletGroups} release 4194304b of static, Memory{0 dyn 0} 2025-05-29T15:27:52.595950Z node 98 :BS_PROXY_PUT INFO: dsproxy_put.cpp:645: [aeed6b7f2709b4c0] bootstrap ActorId# [98:307:2289] Group# 0 BlobCount# 1 BlobIDs# [[72057594037927937:2:4:0:0:698:0]] HandleClass# TabletLog Tactic# MinLatency RestartCounter# 0 Marker# BPP13 2025-05-29T15:27:52.596008Z node 98 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:42: [aeed6b7f2709b4c0] Id# [72057594037927937:2:4:0:0:698:0] restore disk# 0 part# 0 situation# ESituation::Unknown Marker# BPG51 2025-05-29T15:27:52.596018Z node 98 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:65: [aeed6b7f2709b4c0] restore Id# [72057594037927937:2:4:0:0:698:0] optimisticReplicas# 1 optimisticState# EBS_FULL Marker# BPG55 2025-05-29T15:27:52.596029Z node 98 :BS_PROXY_PUT DEBUG: dsproxy_strategy_base.cpp:324: [aeed6b7f2709b4c0] partPlacement record partSituation# ESituation::Unknown to# 0 blob Id# [72057594037927937:2:4:0:0:698:1] Marker# BPG33 2025-05-29T15:27:52.596036Z node 98 :BS_PROXY_PUT DEBUG: dsproxy_strategy_base.cpp:345: [aeed6b7f2709b4c0] Sending missing VPut part# 0 to# 0 blob Id# [72057594037927937:2:4:0:0:698:1] Marker# BPG32 2025-05-29T15:27:52.596072Z node 98 :BS_PROXY DEBUG: group_sessions.h:165: Send to queueActorId# [98:35:2079] NKikimr::TEvBlobStorage::TEvVPut# {ID# [72057594037927937:2:4:0:0:698:1] FDS# 698 HandleClass# TabletLog {MsgQoS ExtQueueId# PutTabletLog} DataSize# 0 Data# } cookie# 0 2025-05-29T15:27:52.596486Z node 98 :BS_PROXY_PUT DEBUG: dsproxy_put.cpp:260: [aeed6b7f2709b4c0] received {EvVPutResult Status# OK ID# [72057594037927937:2:4:0:0:698:1] {MsgQoS MsgId# { SequenceId: 1 MsgId: 18 } Cost# 85496 ExtQueueId# PutTabletLog IntQueueId# IntPutLog Window# { Status# Processed ActualWindowSize# 0 MaxWindowSize# 150000000 ExpectedMsgId# { SequenceId: 1 MsgId: 19 }}}} from# [0:1:0:0:0] Marker# BPP01 2025-05-29T15:27:52.596513Z node 98 :BS_PROXY_PUT DEBUG: dsproxy_put_impl.cpp:72: [aeed6b7f2709b4c0] Result# TEvPutResult {Id# [72057594037927937:2:4:0:0:698:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0.998955} GroupId# 0 Marker# BPP12 2025-05-29T15:27:52.596522Z node 98 :BS_PROXY_PUT INFO: dsproxy_put.cpp:486: [aeed6b7f2709b4c0] SendReply putResult# TEvPutResult {Id# [72057594037927937:2:4:0:0:698:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0.998955} ResponsesSent# 0 PutImpl.Blobs.size# 1 Last# true Marker# BPP21 2025-05-29T15:27:52.596544Z node 98 :BS_PROXY_PUT DEBUG: {BPP72@dsproxy_put.cpp:470} Query history GroupId# 0 HandleClass# TabletLog Tactic# MinLatency History# THistory { Entries# [ TEvVPut{ TimestampMs# 0.167 sample PartId# [72057594037927937:2:4:0:0:698:1] QueryCount# 1 VDiskId# [0:1:0:0:0] NodeId# 98 } TEvVPutResult{ TimestampMs# 0.588 VDiskId# [0:1:0:0:0] NodeId# 98 Status# OK } ] } 2025-05-29T15:27:52.596567Z node 98 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:54: Put Result: TEvPutResult {Id# [72057594037927937:2:4:0:0:698:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0.998955} 2025-05-29T15:27:52.596594Z node 98 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:5} commited cookie 1 for step 4 2025-05-29T15:27:52.596620Z node 98 :HIVE DEBUG: tx__create_tablet.cpp:503: HIVE#72057594037927937 THive::TTxCreateTablet::Complete (72057594037927937,0) TabletId: 72075186224037888 SideEffects: {Notifications: 0x10040201 [98:263:2259] {EvCreateTabletReply Status: OK Owner: 72057594037927937 OwnerIdx: 0 TabletID: 72075186224037888 Origin: 72057594037927937}} 2025-05-29T15:27:52.596649Z node 98 :HIVE DEBUG: tx__update_tablet_groups.cpp:332: HIVE#72057594037927937 THive::TTxUpdateTabletGroups{21603628414752}(72075186224037888)::Complete SideEffects: {Notifications: 0x10040207 [98:263:2259] {EvTabletCreationResult Status: OK TabletID: 72075186224037888} Callbacks: 1 Actions: NKikimr::TTabletKillRequest} 2025-05-29T15:27:52.596698Z node 98 :STATESTORAGE DEBUG: statestorage_proxy.cpp:246: ProxyRequest::HandleInit ringGroup:0 ev: {EvLookup TabletID: 72075186224037888 Cookie: 0 ProxyOptions: SigNone} 2025-05-29T15:27:52.596718Z node 98 :STATESTORAGE DEBUG: statestorage_replica.cpp:183: Replica::Handle ev: {EvReplicaLookup TabletID: 72075186224037888 Cookie: 0} 2025-05-29T15:27:52.596726Z node 98 :STATESTORAGE DEBUG: statestorage_replica.cpp:183: Replica::Handle ev: {EvReplicaLookup TabletID: 72075186224037888 Cookie: 1} 2025-05-29T15:27:52.596730Z node 98 :STATESTORAGE DEBUG: statestorage_replica.cpp:183: Replica::Handle ev: {EvReplicaLookup TabletID: 72075186224037888 Cookie: 2} 2025-05-29T15:27:52.596737Z node 98 :STATESTORAGE DEBUG: statestorage_proxy.cpp:355: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 1 TabletID: 72075186224037888} 2025-05-29T15:27:52.596747Z node 98 :STATESTORAGE DEBUG: statestorage_proxy.cpp:355: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 1 TabletID: 72075186224037888} 2025-05-29T15:27:52.596755Z node 98 :STATESTORAGE DEBUG: statestorage_proxy.cpp:355: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 1 TabletID: 72075186224037888} 2025-05-29T15:27:52.596812Z node 98 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:50: TClient[72075186224037888] ::Bootstrap [98:311:2292] 2025-05-29T15:27:52.596820Z node 98 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:533: TClient[72075186224037888] lookup [98:311:2292] 2025-05-29T15:27:52.596841Z node 98 :TABLET_RESOLVER DEBUG: tablet_resolver.cpp:484: Handle TEvForward tabletId: 72075186224037888 entry.State: StInit ev: {EvForward TabletID: 72075186224037888 Ev: nullptr Flags: 1:2:0} 2025-05-29T15:27:52.596857Z node 98 :STATESTORAGE DEBUG: statestorage_proxy.cpp:246: ProxyRequest::HandleInit ringGroup:0 ev: {EvLookup TabletID: 72075186224037888 Cookie: 0 ProxyOptions: SigNone} 2025-05-29T15:27:52.596867Z node 98 :STATESTORAGE DEBUG: statestorage_replica.cpp:183: Replica::Handle ev: {EvReplicaLookup TabletID: 72075186224037888 Cookie: 0} 2025-05-29T15:27:52.596872Z node 98 :STATESTORAGE DEBUG: statestorage_replica.cpp:183: Replica::Handle ev: {EvReplicaLookup TabletID: 72075186224037888 Cookie: 1} 2025-05-29T15:27:52.596877Z node 98 :STATESTORAGE DEBUG: statestorage_replica.cpp:183: Replica::Handle ev: {EvReplicaLookup TabletID: 72075186224037888 Cookie: 2} 2025-05-29T15:27:52.596885Z node 98 :STATESTORAGE DEBUG: statestorage_proxy.cpp:355: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 1 TabletID: 72075186224037888} 2025-05-29T15:27:52.596891Z node 98 :STATESTORAGE DEBUG: statestorage_proxy.cpp:355: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 1 TabletID: 72075186224037888} 2025-05-29T15:27:52.596895Z node 98 :STATESTORAGE DEBUG: statestorage_proxy.cpp:355: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 1 TabletID: 72075186224037888} 2025-05-29T15:27:52.596908Z node 98 :TABLET_RESOLVER DEBUG: tablet_resolver.cpp:610: Handle TEvInfo tabletId: 72075186224037888 entry.State: StInitResolve success: false ev: {EvInfo Status: 5 TabletID: 72075186224037888 Cookie: 0 CurrentLeader: [0:0:0] CurrentLeaderTablet: [0:0:0] CurrentGeneration: 0 CurrentStep: 0 Locked: false LockedFor: 0 Signature: { Size: 3 Signature: {{[98:1099535971443:0] : 5}, {[98:2199047599219:0] : 8}, {[98:24343667:0] : 2}}}} 2025-05-29T15:27:52.596915Z node 98 :TABLET_RESOLVER DEBUG: tablet_resolver.cpp:361: DropEntry tabletId: 72075186224037888 followers: 0 2025-05-29T15:27:52.596925Z node 98 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:172: TClient[72075186224037888] forward result error, check reconnect [98:311:2292] 2025-05-29T15:27:52.596929Z node 98 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:498: TClient[72075186224037888] connect failed [98:311:2292] >> BuildStatsHistogram::Five_Five_Crossed [GOOD] >> BuildStatsHistogram::Single_Small_2_Levels [GOOD] >> BuildStatsHistogram::Single_Small_2_Levels_3_Buckets [GOOD] >> BuildStatsHistogram::Single_Small_1_Level [GOOD] >> BuildStatsHistogram::Single_Small_0_Levels >> TChargeBTreeIndex::OneNode [GOOD] >> TChargeBTreeIndex::OneNode_Groups >> TTopicApiDescribes::GetLocalDescribe >> TFlatExecutorLeases::BasicsInitialLeaseSleepTimeout [GOOD] >> TFlatTableDatetime::TestDate [GOOD] >> TFlatTableExecutor_BackgroundCompactions::TestRunBackgroundSnapshot [GOOD] >> TFlatTableExecutor_BackgroundCompactions::TestChangeBackgroundSnapshotToRegular [GOOD] >> TFlatTableExecutor_BackgroundCompactions::TestRunBackgroundCompactionGen1 [GOOD] >> TFlatTableExecutor_BackgroundCompactions::TestChangeBackgroundCompactionToRegular [GOOD] >> TFlatTableExecutor_BackgroundCompactions::TestRunBackgroundCompactionGen2 [GOOD] >> TFlatTableExecutor_BackgroundCompactions::TestChangeBackgroundSnapshotPriorityByTime >> BuildStatsHistogram::Single_Small_0_Levels [GOOD] >> BuildStatsHistogram::Three_Mixed_Small_2_Levels [GOOD] >> BuildStatsHistogram::Three_Mixed_Small_2_Levels_3_Buckets [GOOD] >> BuildStatsHistogram::Three_Mixed_Small_1_Level [GOOD] >> BuildStatsHistogram::Three_Mixed_Small_0_Levels [GOOD] >> BuildStatsHistogram::Mixed_Groups_History >> TKeyValueTest::TestEmptyWriteReadDeleteWithRestartsThenResponseOkNewApi [GOOD] >> TKeyValueTest::TestGetStatusWorks >> TFlatTableExecutor_BackgroundCompactions::TestChangeBackgroundSnapshotPriorityByTime [GOOD] >> TFlatTableExecutor_BackgroundCompactions::TestChangeBackgroundCompactionPriorityByTime |70.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/persqueue_v1/ut/describes_ut/unittest >> TIcNodeCache::GetNodesInfoTest [FAIL] >> TFlatTableExecutor_BackgroundCompactions::TestChangeBackgroundCompactionPriorityByTime [GOOD] >> TFlatTableExecutor_BTreeIndex::EnableLocalDBBtreeIndex_Default >> TFlatTableExecutor_BTreeIndex::EnableLocalDBBtreeIndex_Default [GOOD] >> TFlatTableExecutor_BTreeIndex::EnableLocalDBBtreeIndex_True >> TPartBtreeIndexIteration::OneNode_Groups_Slices [GOOD] >> TPartBtreeIndexIteration::OneNode_History_Slices >> TVersions::Wreck0Reverse [GOOD] |70.8%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/mind/bscontroller/ut_bscontroller/ydb-core-mind-bscontroller-ut_bscontroller |70.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/mind/bscontroller/ut_bscontroller/ydb-core-mind-bscontroller-ut_bscontroller |70.8%| [LD] {RESULT} $(B)/ydb/core/mind/bscontroller/ut_bscontroller/ydb-core-mind-bscontroller-ut_bscontroller >> TFlatTableExecutor_BTreeIndex::EnableLocalDBBtreeIndex_True [GOOD] >> TFlatTableExecutor_BTreeIndex::EnableLocalDBBtreeIndex_False >> TTopicApiDescribes::DescribeTopic [FAIL] >> TReplicationWithRebootsTests::CreateInParallelWithoutInitialController |70.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/persqueue_v1/ut/describes_ut/unittest >> TFlatTableExecutor_BTreeIndex::EnableLocalDBBtreeIndex_False [GOOD] >> TFlatTableExecutor_BTreeIndex::EnableLocalDBBtreeIndex_True_EnableLocalDBFlatIndex_False >> BuildStatsHistogram::Mixed_Groups_History [GOOD] >> BuildStatsHistogram::Serial_Groups_History >> TFlatTableExecutor_BTreeIndex::EnableLocalDBBtreeIndex_True_EnableLocalDBFlatIndex_False [GOOD] >> TFlatTableExecutor_BTreeIndex::EnableLocalDBBtreeIndex_False_EnableLocalDBFlatIndex_False >> BuildStatsHistogram::Serial_Groups_History [GOOD] >> BuildStatsHistogram::Benchmark ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tablet_flat/ut/unittest >> TVersions::Wreck0Reverse [GOOD] Test command err: 00000.000 II| FAKE_ENV: Born at 2025-05-29T15:28:06.198620Z 00000.004 NN| TABLET_SAUSAGECACHE: Bootstrap with config MemoryLimit: 8388608 ScanQueueInFlyLimit: 262144 AsyncQueueInFlyLimit: 262144 00000.004 II| FAKE_ENV: Starting storage for BS group 0 00000.004 II| FAKE_ENV: Starting storage for BS group 1 00000.004 II| FAKE_ENV: Starting storage for BS group 2 00000.004 II| FAKE_ENV: Starting storage for BS group 3 00000.004 II| TABLET_EXECUTOR: Leader{1:2:0} activating executor 00000.004 II| TABLET_EXECUTOR: LSnap{1:2, on 2:1, 35b, wait} done, Waste{2:0, 0b +(0, 0b), 0 trc} 00000.005 DD| TABLET_EXECUTOR: Leader{1:2:2} commited cookie 2 for step 1 00000.005 DD| TABLET_EXECUTOR: Leader{1:2:2} Tx{1, NKikimr::NSharedCache::NTestSuiteTSharedPageCache::TTxInitSchema} queued, type NKikimr::NSharedCache::NTestSuiteTSharedPageCache::TTxInitSchema 00000.005 DD| TABLET_EXECUTOR: Leader{1:2:2} Tx{1, NKikimr::NSharedCache::NTestSuiteTSharedPageCache::TTxInitSchema} took 4194304b of static mem, Memory{4194304 dyn 0} 00000.005 DD| TABLET_EXECUTOR: Leader{1:2:2} Tx{1, NKikimr::NSharedCache::NTestSuiteTSharedPageCache::TTxInitSchema} hope 1 -> done Change{2, redo 0b alter 209b annex 0, ~{ } -{ }, 0 gb} 00000.005 DD| TABLET_EXECUTOR: Leader{1:2:2} Tx{1, NKikimr::NSharedCache::NTestSuiteTSharedPageCache::TTxInitSchema} release 4194304b of static, Memory{0 dyn 0} 00000.005 DD| TABLET_EXECUTOR: Leader{1:2:3} commited cookie 1 for step 2 00000.005 NN| TABLET_SAUSAGECACHE: Update config MemoryLimit: 8388608 ReplacementPolicy: ThreeLeveledLRU 00000.005 NN| TABLET_SAUSAGECACHE: Switch replacement policy from S3FIFO to ThreeLeveledLRU 00000.005 NN| TABLET_SAUSAGECACHE: Switch replacement policy done from S3FIFO to ThreeLeveledLRU 00000.005 II| TABLET_SAUSAGECACHE: Limit memory consumer with 16777216TiB 00000.005 DD| TABLET_EXECUTOR: Leader{1:2:3} Tx{2, NKikimr::NSharedCache::NTestSuiteTSharedPageCache::TTxWriteRow} queued, type NKikimr::NSharedCache::NTestSuiteTSharedPageCache::TTxWriteRow 00000.005 DD| TABLET_EXECUTOR: Leader{1:2:3} Tx{2, NKikimr::NSharedCache::NTestSuiteTSharedPageCache::TTxWriteRow} took 4194304b of static mem, Memory{4194304 dyn 0} 00000.005 DD| TABLET_EXECUTOR: Leader{1:2:3} Tx{2, NKikimr::NSharedCache::NTestSuiteTSharedPageCache::TTxWriteRow} hope 1 -> done Change{2, redo 102475b alter 0b annex 0, ~{ 101 } -{ }, 0 gb} 00000.005 DD| TABLET_EXECUTOR: Leader{1:2:3} Tx{2, NKikimr::NSharedCache::NTestSuiteTSharedPageCache::TTxWriteRow} release 4194304b of static, Memory{0 dyn 0} 00000.005 DD| TABLET_EXECUTOR: Leader{1:2:4} commited cookie 1 for step 3 00000.005 DD| TABLET_EXECUTOR: Leader{1:2:4} Tx{3, NKikimr::NSharedCache::NTestSuiteTSharedPageCache::TTxWriteRow} queued, type NKikimr::NSharedCache::NTestSuiteTSharedPageCache::TTxWriteRow 00000.005 DD| TABLET_EXECUTOR: Leader{1:2:4} Tx{3, NKikimr::NSharedCache::NTestSuiteTSharedPageCache::TTxWriteRow} took 4194304b of static mem, Memory{4194304 dyn 0} 00000.006 DD| TABLET_EXECUTOR: Leader{1:2:4} Tx{3, NKikimr::NSharedCache::NTestSuiteTSharedPageCache::TTxWriteRow} hope 1 -> done Change{3, redo 102475b alter 0b annex 0, ~{ 101 } -{ }, 0 gb} 00000.006 DD| TABLET_EXECUTOR: Leader{1:2:4} Tx{3, NKikimr::NSharedCache::NTestSuiteTSharedPageCache::TTxWriteRow} release 4194304b of static, Memory{0 dyn 0} 00000.006 DD| TABLET_EXECUTOR: Leader{1:2:5} commited cookie 1 for step 4 00000.006 DD| TABLET_EXECUTOR: Leader{1:2:5} Tx{4, NKikimr::NSharedCache::NTestSuiteTSharedPageCache::TTxWriteRow} queued, type NKikimr::NSharedCache::NTestSuiteTSharedPageCache::TTxWriteRow 00000.006 DD| TABLET_EXECUTOR: Leader{1:2:5} Tx{4, NKikimr::NSharedCache::NTestSuiteTSharedPageCache::TTxWriteRow} took 4194304b of static mem, Memory{4194304 dyn 0} 00000.006 DD| TABLET_EXECUTOR: Leader{1:2:5} Tx{4, NKikimr::NSharedCache::NTestSuiteTSharedPageCache::TTxWriteRow} hope 1 -> done Change{4, redo 102475b alter 0b annex 0, ~{ 101 } -{ }, 0 gb} 00000.006 DD| TABLET_EXECUTOR: Leader{1:2:5} Tx{4, NKikimr::NSharedCache::NTestSuiteTSharedPageCache::TTxWriteRow} release 4194304b of static, Memory{0 dyn 0} 00000.006 DD| TABLET_EXECUTOR: Leader{1:2:6} commited cookie 1 for step 5 00000.006 DD| TABLET_EXECUTOR: Leader{1:2:6} Tx{5, NKikimr::NSharedCache::NTestSuiteTSharedPageCache::TTxWriteRow} queued, type NKikimr::NSharedCache::NTestSuiteTSharedPageCache::TTxWriteRow 00000.006 DD| TABLET_EXECUTOR: Leader{1:2:6} Tx{5, NKikimr::NSharedCache::NTestSuiteTSharedPageCache::TTxWriteRow} took 4194304b of static mem, Memory{4194304 dyn 0} 00000.006 DD| TABLET_EXECUTOR: Leader{1:2:6} Tx{5, NKikimr::NSharedCache::NTestSuiteTSharedPageCache::TTxWriteRow} hope 1 -> done Change{5, redo 102475b alter 0b annex 0, ~{ 101 } -{ }, 0 gb} 00000.006 DD| TABLET_EXECUTOR: Leader{1:2:6} Tx{5, NKikimr::NSharedCache::NTestSuiteTSharedPageCache::TTxWriteRow} release 4194304b of static, Memory{0 dyn 0} 00000.006 DD| TABLET_EXECUTOR: Leader{1:2:7} commited cookie 1 for step 6 00000.006 DD| TABLET_EXECUTOR: Leader{1:2:7} Tx{6, NKikimr::NSharedCache::NTestSuiteTSharedPageCache::TTxWriteRow} queued, type NKikimr::NSharedCache::NTestSuiteTSharedPageCache::TTxWriteRow 00000.006 DD| TABLET_EXECUTOR: Leader{1:2:7} Tx{6, NKikimr::NSharedCache::NTestSuiteTSharedPageCache::TTxWriteRow} took 4194304b of static mem, Memory{4194304 dyn 0} 00000.006 DD| TABLET_EXECUTOR: Leader{1:2:7} Tx{6, NKikimr::NSharedCache::NTestSuiteTSharedPageCache::TTxWriteRow} hope 1 -> done Change{6, redo 102475b alter 0b annex 0, ~{ 101 } -{ }, 0 gb} 00000.006 DD| TABLET_EXECUTOR: Leader{1:2:7} Tx{6, NKikimr::NSharedCache::NTestSuiteTSharedPageCache::TTxWriteRow} release 4194304b of static, Memory{0 dyn 0} 00000.006 DD| TABLET_EXECUTOR: Leader{1:2:8} commited cookie 1 for step 7 00000.006 DD| TABLET_EXECUTOR: Leader{1:2:8} Tx{7, NKikimr::NSharedCache::NTestSuiteTSharedPageCache::TTxWriteRow} queued, type NKikimr::NSharedCache::NTestSuiteTSharedPageCache::TTxWriteRow 00000.006 DD| TABLET_EXECUTOR: Leader{1:2:8} Tx{7, NKikimr::NSharedCache::NTestSuiteTSharedPageCache::TTxWriteRow} took 4194304b of static mem, Memory{4194304 dyn 0} 00000.006 DD| TABLET_EXECUTOR: Leader{1:2:8} Tx{7, NKikimr::NSharedCache::NTestSuiteTSharedPageCache::TTxWriteRow} hope 1 -> done Change{7, redo 102475b alter 0b annex 0, ~{ 101 } -{ }, 0 gb} 00000.006 DD| TABLET_EXECUTOR: Leader{1:2:8} Tx{7, NKikimr::NSharedCache::NTestSuiteTSharedPageCache::TTxWriteRow} release 4194304b of static, Memory{0 dyn 0} 00000.006 DD| TABLET_EXECUTOR: Leader{1:2:9} commited cookie 1 for step 8 00000.006 DD| TABLET_EXECUTOR: Leader{1:2:9} Tx{8, NKikimr::NSharedCache::NTestSuiteTSharedPageCache::TTxWriteRow} queued, type NKikimr::NSharedCache::NTestSuiteTSharedPageCache::TTxWriteRow 00000.006 DD| TABLET_EXECUTOR: Leader{1:2:9} Tx{8, NKikimr::NSharedCache::NTestSuiteTSharedPageCache::TTxWriteRow} took 4194304b of static mem, Memory{4194304 dyn 0} 00000.006 DD| TABLET_EXECUTOR: Leader{1:2:9} Tx{8, NKikimr::NSharedCache::NTestSuiteTSharedPageCache::TTxWriteRow} hope 1 -> done Change{8, redo 102475b alter 0b annex 0, ~{ 101 } -{ }, 0 gb} 00000.006 DD| TABLET_EXECUTOR: Leader{1:2:9} Tx{8, NKikimr::NSharedCache::NTestSuiteTSharedPageCache::TTxWriteRow} release 4194304b of static, Memory{0 dyn 0} 00000.006 DD| TABLET_EXECUTOR: Leader{1:2:10} commited cookie 1 for step 9 00000.006 DD| TABLET_EXECUTOR: Leader{1:2:10} Tx{9, NKikimr::NSharedCache::NTestSuiteTSharedPageCache::TTxWriteRow} queued, type NKikimr::NSharedCache::NTestSuiteTSharedPageCache::TTxWriteRow 00000.006 DD| TABLET_EXECUTOR: Leader{1:2:10} Tx{9, NKikimr::NSharedCache::NTestSuiteTSharedPageCache::TTxWriteRow} took 4194304b of static mem, Memory{4194304 dyn 0} 00000.006 DD| TABLET_EXECUTOR: Leader{1:2:10} Tx{9, NKikimr::NSharedCache::NTestSuiteTSharedPageCache::TTxWriteRow} hope 1 -> done Change{9, redo 102475b alter 0b annex 0, ~{ 101 } -{ }, 0 gb} 00000.007 DD| TABLET_EXECUTOR: Leader{1:2:10} Tx{9, NKikimr::NSharedCache::NTestSuiteTSharedPageCache::TTxWriteRow} release 4194304b of static, Memory{0 dyn 0} 00000.007 DD| TABLET_EXECUTOR: Leader{1:2:11} commited cookie 1 for step 10 00000.007 DD| TABLET_EXECUTOR: Leader{1:2:11} Tx{10, NKikimr::NSharedCache::NTestSuiteTSharedPageCache::TTxWriteRow} queued, type NKikimr::NSharedCache::NTestSuiteTSharedPageCache::TTxWriteRow 00000.007 DD| TABLET_EXECUTOR: Leader{1:2:11} Tx{10, NKikimr::NSharedCache::NTestSuiteTSharedPageCache::TTxWriteRow} took 4194304b of static mem, Memory{4194304 dyn 0} 00000.007 DD| TABLET_EXECUTOR: Leader{1:2:11} Tx{10, NKikimr::NSharedCache::NTestSuiteTSharedPageCache::TTxWriteRow} hope 1 -> done Change{10, redo 102475b alter 0b annex 0, ~{ 101 } -{ }, 0 gb} 00000.007 DD| TABLET_EXECUTOR: Leader{1:2:11} Tx{10, NKikimr::NSharedCache::NTestSuiteTSharedPageCache::TTxWriteRow} release 4194304b of static, Memory{0 dyn 0} 00000.007 DD| TABLET_EXECUTOR: Leader{1:2:12} commited cookie 1 for step 11 00000.007 DD| TABLET_EXECUTOR: Leader{1:2:12} Tx{11, NKikimr::NSharedCache::NTestSuiteTSharedPageCache::TTxWriteRow} queued, type NKikimr::NSharedCache::NTestSuiteTSharedPageCache::TTxWriteRow 00000.007 DD| TABLET_EXECUTOR: Leader{1:2:12} Tx{11, NKikimr::NSharedCache::NTestSuiteTSharedPageCache::TTxWriteRow} took 4194304b of static mem, Memory{4194304 dyn 0} 00000.007 DD| TABLET_EXECUTOR: Leader{1:2:12} Tx{11, NKikimr::NSharedCache::NTestSuiteTSharedPageCache::TTxWriteRow} hope 1 -> done Change{11, redo 102475b alter 0b annex 0, ~{ 101 } -{ }, 0 gb} 00000.007 DD| TABLET_EXECUTOR: Leader{1:2:12} Tx{11, NKikimr::NSharedCache::NTestSuiteTSharedPageCache::TTxWriteRow} release 4194304b of static, Memory{0 dyn 0} 00000.007 DD| TABLET_EXECUTOR: Leader{1:2:13} commited cookie 1 for step 12 00000.007 DD| TABLET_EXECUTOR: Leader{1:2:13} Tx{12, NKikimr::NSharedCache::NTestSuiteTSharedPageCache::TTxWriteRow} queued, type NKikimr::NSharedCache::NTestSuiteTSharedPageCache::TTxWriteRow 00000.007 DD| TABLET_EXECUTOR: Leader{1:2:13} Tx{12, NKikimr::NSharedCache::NTestSuiteTSharedPageCache::TTxWriteRow} took 4194304b of static mem, Memory{4194304 dyn 0} 00000.007 DD| TABLET_EXECUTOR: Leader{1:2:13} Tx{12, NKikimr::NSharedCache::NTestSuiteTSharedPageCache::TTxWriteRow} hope 1 -> done Change{12, redo 102475b alter 0b annex 0, ~{ 101 } -{ }, 0 gb} 00000.007 DD| TABLET_EXECUTOR: Leader{1:2:13} Tx{12, NKikimr::NSharedCache::NTestSuiteTSharedPageCache::TTxWriteRow} release 4194304b of static, Memory{0 dyn 0} 00000.007 DD| TABLET_EXECUTOR: Leader{1:2:14} commited cookie 1 for step 13 00000.007 DD| TABLET_EXECUTOR: Leader{1:2:14} Tx{13, NKikimr::NSharedCache::NTestSuiteTSharedPageCache::TTxWriteRow} queued, type NKikimr::NSharedCache::NTestSuiteTSharedPageCache::TTxWriteRow 00000.007 DD| TABLET_EXECUTOR: Leader{1:2:14} Tx{13, NKikimr::NSharedCache::NTestSuiteTSharedPageCache::TTxWriteRow} took 4194304b of static mem, Memory{4194304 dyn 0} 00000.007 DD| TABLET_EXECUTOR: Leader{1:2:14} Tx{13, NKikimr::NSharedCache::NTestSuiteTSharedPageCache::TTxWriteRow} hope 1 -> done Change{13, redo 102475b alter 0b annex 0, ~{ 101 } -{ }, 0 gb} 00000.007 DD| TABLET_EXECUTOR: Leader{1:2:14} Tx{13, NKikimr::NSharedCache::NTestSuiteTSharedPageCache::TTxWriteRow} release 4194304b of static, Memory{0 dyn 0} 00000.007 DD| TABLET_EXECUTOR: Leader{1:2:15} commited cookie 1 for step 14 00000.007 DD| TABLET_EXECUTOR: Leader{1:2:15} Tx{14, NKikimr::NSharedCache::NTestSuiteTSharedPageCache::TTxWriteRow} queued, type NKikimr::NSharedCache::NTestSuiteTSharedPageCache::TTxWriteRow 00000.007 DD| TABLET_EXECUTOR: Leader{1:2:15} Tx{14, NKikimr::NSharedCache::NTestSuiteTSharedPageCache::TTxWriteRow} took 4194304b of static mem, Memory{4194304 dyn 0} 00000.007 DD| TABLET_EXECUTOR: Leader{1:2:15} Tx{14, NKikimr::NSharedCache::NTestSuiteTSharedPageCache::TTxWriteRow} hope 1 -> done Change{14, redo 102475b alter 0b annex 0, ~{ 101 } -{ }, 0 gb} 00000.007 DD| TABLET_EXECUTOR: Leader{1:2:15} Tx{14, NKikimr::NSharedCache::NTestSuiteTSharedPageCache::TTxWriteRow} release 4194304b of static, Memory{0 dyn 0} 00000.007 DD| TABLET_EXECUTOR: Leader{ ... ageCollection: [1:0:256:0:0:0:1] Pages: [ 1 3 ] Cookie: 4 Actual: PageCollection: [1:0:256:0:0:0:1] Pages: [ 1 3 ] Cookie: 4 2025-05-29T15:28:08.949366Z node 35 :TABLET_SAUSAGECACHE NOTICE: shared_sausagecache.cpp:1265: Bootstrap with config MemoryLimit: 456 AsyncQueueInFlyLimit: 19 ... waiting for NKikimr::NSharedCache::TEvRequest 2025-05-29T15:28:08.949512Z node 35 :TABLET_SAUSAGECACHE DEBUG: shared_sausagecache.cpp:385: Add page collection [1:0:256:0:0:0:1] 2025-05-29T15:28:08.949521Z node 35 :TABLET_SAUSAGECACHE DEBUG: shared_sausagecache.cpp:400: Add page collection [1:0:256:0:0:0:1] owner [35:5:2052] 2025-05-29T15:28:08.949533Z node 35 :TABLET_SAUSAGECACHE TRACE: shared_sausagecache.cpp:597: Request page collection [1:0:256:0:0:0:1] owner [35:5:2052] cookie 1 class AsyncLoad from cache [ ] already requested [ ] to request [ 1 2 3 4 5 ] 2025-05-29T15:28:08.949538Z node 35 :TABLET_SAUSAGECACHE TRACE: shared_sausagecache.cpp:681: Request page collection [1:0:256:0:0:0:1] async queue pages [ 1 2 ] ... waiting for NKikimr::NSharedCache::TEvRequest (done) ... waiting for NKikimr::NSharedCache::TEvRequest 2025-05-29T15:28:08.949558Z node 35 :TABLET_SAUSAGECACHE TRACE: shared_sausagecache.cpp:597: Request page collection [1:0:256:0:0:0:1] owner [35:5:2052] cookie 2 class AsyncLoad from cache [ ] already requested [ ] to request [ 6 7 ] ... waiting for NKikimr::NSharedCache::TEvRequest (done) ... waiting for NKikimr::NSharedCache::TEvRequest ... blocking NKikimr::NTabletFlatExecutor::NBlockIO::TEvFetch from SAUSAGE_CACHE to SAUSAGE_BIO_A cookie 0 2025-05-29T15:28:08.949575Z node 35 :TABLET_SAUSAGECACHE DEBUG: shared_sausagecache.cpp:385: Add page collection [1:0:256:0:0:0:2] 2025-05-29T15:28:08.949578Z node 35 :TABLET_SAUSAGECACHE DEBUG: shared_sausagecache.cpp:400: Add page collection [1:0:256:0:0:0:2] owner [35:5:2052] 2025-05-29T15:28:08.949583Z node 35 :TABLET_SAUSAGECACHE TRACE: shared_sausagecache.cpp:597: Request page collection [1:0:256:0:0:0:2] owner [35:5:2052] cookie 3 class AsyncLoad from cache [ ] already requested [ ] to request [ 10 11 12 ] ... waiting for NKikimr::NSharedCache::TEvRequest (done) ... waiting for NKikimr::NSharedCache::TEvRequest 2025-05-29T15:28:08.949593Z node 35 :TABLET_SAUSAGECACHE DEBUG: shared_sausagecache.cpp:400: Add page collection [1:0:256:0:0:0:1] owner [35:6:2053] 2025-05-29T15:28:08.949598Z node 35 :TABLET_SAUSAGECACHE TRACE: shared_sausagecache.cpp:597: Request page collection [1:0:256:0:0:0:1] owner [35:6:2053] cookie 4 class AsyncLoad from cache [ ] already requested [ 1 5 ] to request [ 9 10 ] ... waiting for NKikimr::NSharedCache::TEvRequest (done) Checking fetches#4 Expected: PageCollection: [1:0:256:0:0:0:1] Pages: [ 1 2 ] Cookie: 20 Actual: PageCollection: [1:0:256:0:0:0:1] Pages: [ 1 2 ] Cookie: 20 ... waiting for NKikimr::NSharedCache::TEvUnregister 2025-05-29T15:28:08.949629Z node 35 :TABLET_SAUSAGECACHE DEBUG: shared_sausagecache.cpp:803: Unregister owner [35:5:2052] 2025-05-29T15:28:08.949634Z node 35 :TABLET_SAUSAGECACHE DEBUG: shared_sausagecache.cpp:1072: Send page collection error [1:0:256:0:0:0:2] owner [35:5:2052] class AsyncLoad error RACE cookie 3 2025-05-29T15:28:08.949638Z node 35 :TABLET_SAUSAGECACHE DEBUG: shared_sausagecache.cpp:816: Remove page collection [1:0:256:0:0:0:2] owner [35:5:2052] 2025-05-29T15:28:08.949641Z node 35 :TABLET_SAUSAGECACHE DEBUG: shared_sausagecache.cpp:1072: Send page collection error [1:0:256:0:0:0:1] owner [35:5:2052] class AsyncLoad error RACE cookie 1 2025-05-29T15:28:08.949644Z node 35 :TABLET_SAUSAGECACHE DEBUG: shared_sausagecache.cpp:1072: Send page collection error [1:0:256:0:0:0:1] owner [35:5:2052] class AsyncLoad error RACE cookie 2 2025-05-29T15:28:08.949646Z node 35 :TABLET_SAUSAGECACHE DEBUG: shared_sausagecache.cpp:816: Remove page collection [1:0:256:0:0:0:1] owner [35:5:2052] 2025-05-29T15:28:08.949649Z node 35 :TABLET_SAUSAGECACHE DEBUG: shared_sausagecache.cpp:823: Remove owner [35:5:2052] ... waiting for NKikimr::NSharedCache::TEvUnregister (done) ... waiting for results #4 ... waiting for results #4 (done) Checking results#4 Expected: PageCollection: [1:0:256:0:0:0:1] Pages: [ ] Cookie: 1 PageCollection: [1:0:256:0:0:0:1] Pages: [ ] Cookie: 2 PageCollection: [1:0:256:0:0:0:2] Pages: [ ] Cookie: 3 Actual: PageCollection: [1:0:256:0:0:0:1] Pages: [ ] Cookie: 1 PageCollection: [1:0:256:0:0:0:1] Pages: [ ] Cookie: 2 PageCollection: [1:0:256:0:0:0:2] Pages: [ ] Cookie: 3 ... waiting for fetches #4 2025-05-29T15:28:08.949686Z node 35 :TABLET_SAUSAGECACHE TRACE: shared_sausagecache.cpp:867: Receive page collection [1:0:256:0:0:0:1] status OK pages [ 1 2 ] 2025-05-29T15:28:08.949691Z node 35 :TABLET_SAUSAGECACHE TRACE: shared_sausagecache.cpp:730: Drop pending page collection request [1:0:256:0:0:0:1] class AsyncLoad cookie 1 2025-05-29T15:28:08.949695Z node 35 :TABLET_SAUSAGECACHE TRACE: shared_sausagecache.cpp:681: Request page collection [1:0:256:0:0:0:1] async queue pages [ 5 9 ] ... blocking NKikimr::NTabletFlatExecutor::NBlockIO::TEvFetch from SAUSAGE_CACHE to SAUSAGE_BIO_A cookie 0 ... waiting for fetches #4 (done) Checking fetches#4 Expected: PageCollection: [1:0:256:0:0:0:1] Pages: [ 5 9 ] Cookie: 20 Actual: PageCollection: [1:0:256:0:0:0:1] Pages: [ 5 9 ] Cookie: 20 ... waiting for fetches #4 2025-05-29T15:28:08.949720Z node 35 :TABLET_SAUSAGECACHE TRACE: shared_sausagecache.cpp:867: Receive page collection [1:0:256:0:0:0:1] status OK pages [ 5 9 ] 2025-05-29T15:28:08.949725Z node 35 :TABLET_SAUSAGECACHE TRACE: shared_sausagecache.cpp:730: Drop pending page collection request [1:0:256:0:0:0:1] class AsyncLoad cookie 2 2025-05-29T15:28:08.949729Z node 35 :TABLET_SAUSAGECACHE TRACE: shared_sausagecache.cpp:681: Request page collection [1:0:256:0:0:0:1] async queue pages [ 10 ] 2025-05-29T15:28:08.949736Z node 35 :TABLET_SAUSAGECACHE TRACE: shared_sausagecache.cpp:730: Drop pending page collection request [1:0:256:0:0:0:2] class AsyncLoad cookie 3 2025-05-29T15:28:08.949752Z node 35 :TABLET_SAUSAGECACHE DEBUG: shared_sausagecache.cpp:1012: Drop page collection [1:0:256:0:0:0:1] pages [ 2 ] owner [35:6:2053] ... blocking NKikimr::NTabletFlatExecutor::NBlockIO::TEvFetch from SAUSAGE_CACHE to SAUSAGE_BIO_A cookie 0 ... waiting for fetches #4 (done) Checking fetches#4 Expected: PageCollection: [1:0:256:0:0:0:1] Pages: [ 10 ] Cookie: 10 Actual: PageCollection: [1:0:256:0:0:0:1] Pages: [ 10 ] Cookie: 10 ... waiting for results #4 2025-05-29T15:28:08.949783Z node 35 :TABLET_SAUSAGECACHE TRACE: shared_sausagecache.cpp:867: Receive page collection [1:0:256:0:0:0:1] status OK pages [ 10 ] 2025-05-29T15:28:08.949787Z node 35 :TABLET_SAUSAGECACHE TRACE: shared_sausagecache.cpp:1050: Send page collection result [1:0:256:0:0:0:1] owner [35:6:2053] class AsyncLoad pages [ 1 5 9 10 ] cookie 4 ... waiting for results #4 (done) Checking results#4 Expected: PageCollection: [1:0:256:0:0:0:1] Pages: [ 1 5 9 10 ] Cookie: 4 Actual: PageCollection: [1:0:256:0:0:0:1] Pages: [ 1 5 9 10 ] Cookie: 4 2025-05-29T15:28:08.957484Z node 36 :TABLET_SAUSAGECACHE NOTICE: shared_sausagecache.cpp:1265: Bootstrap with config MemoryLimit: 456 AsyncQueueInFlyLimit: 19 ... waiting for NKikimr::NSharedCache::TEvRequest 2025-05-29T15:28:08.957544Z node 36 :TABLET_SAUSAGECACHE DEBUG: shared_sausagecache.cpp:385: Add page collection [1:0:256:0:0:0:1] 2025-05-29T15:28:08.957551Z node 36 :TABLET_SAUSAGECACHE DEBUG: shared_sausagecache.cpp:400: Add page collection [1:0:256:0:0:0:1] owner [36:5:2052] 2025-05-29T15:28:08.957565Z node 36 :TABLET_SAUSAGECACHE TRACE: shared_sausagecache.cpp:597: Request page collection [1:0:256:0:0:0:1] owner [36:5:2052] cookie 1 class AsyncLoad from cache [ ] already requested [ ] to request [ 1 ] 2025-05-29T15:28:08.957571Z node 36 :TABLET_SAUSAGECACHE TRACE: shared_sausagecache.cpp:681: Request page collection [1:0:256:0:0:0:1] async queue pages [ 1 ] ... waiting for NKikimr::NSharedCache::TEvRequest (done) ... waiting for NKikimr::NSharedCache::TEvRequest ... blocking NKikimr::NTabletFlatExecutor::NBlockIO::TEvFetch from SAUSAGE_CACHE to SAUSAGE_BIO_A cookie 0 2025-05-29T15:28:08.957602Z node 36 :TABLET_SAUSAGECACHE DEBUG: shared_sausagecache.cpp:385: Add page collection [1:0:256:0:0:0:2] 2025-05-29T15:28:08.957605Z node 36 :TABLET_SAUSAGECACHE DEBUG: shared_sausagecache.cpp:400: Add page collection [1:0:256:0:0:0:2] owner [36:6:2053] 2025-05-29T15:28:08.957611Z node 36 :TABLET_SAUSAGECACHE TRACE: shared_sausagecache.cpp:597: Request page collection [1:0:256:0:0:0:2] owner [36:6:2053] cookie 2 class AsyncLoad from cache [ ] already requested [ ] to request [ 10 11 ] 2025-05-29T15:28:08.957614Z node 36 :TABLET_SAUSAGECACHE TRACE: shared_sausagecache.cpp:681: Request page collection [1:0:256:0:0:0:2] async queue pages [ 10 ] ... waiting for NKikimr::NSharedCache::TEvRequest (done) ... waiting for NKikimr::NSharedCache::TEvRequest ... blocking NKikimr::NTabletFlatExecutor::NBlockIO::TEvFetch from SAUSAGE_CACHE to SAUSAGE_BIO_A cookie 0 2025-05-29T15:28:08.957631Z node 36 :TABLET_SAUSAGECACHE TRACE: shared_sausagecache.cpp:597: Request page collection [1:0:256:0:0:0:2] owner [36:6:2053] cookie 3 class AsyncLoad from cache [ ] already requested [ ] to request [ 12 ] ... waiting for NKikimr::NSharedCache::TEvRequest (done) Checking fetches#3 Expected: PageCollection: [1:0:256:0:0:0:1] Pages: [ 1 ] Cookie: 10 PageCollection: [1:0:256:0:0:0:2] Pages: [ 10 ] Cookie: 10 Actual: PageCollection: [1:0:256:0:0:0:1] Pages: [ 1 ] Cookie: 10 PageCollection: [1:0:256:0:0:0:2] Pages: [ 10 ] Cookie: 10 ... waiting for NKikimr::NSharedCache::TEvUnregister 2025-05-29T15:28:08.957664Z node 36 :TABLET_SAUSAGECACHE DEBUG: shared_sausagecache.cpp:803: Unregister owner [36:6:2053] 2025-05-29T15:28:08.957670Z node 36 :TABLET_SAUSAGECACHE DEBUG: shared_sausagecache.cpp:1072: Send page collection error [1:0:256:0:0:0:2] owner [36:6:2053] class AsyncLoad error RACE cookie 2 2025-05-29T15:28:08.957673Z node 36 :TABLET_SAUSAGECACHE DEBUG: shared_sausagecache.cpp:1072: Send page collection error [1:0:256:0:0:0:2] owner [36:6:2053] class AsyncLoad error RACE cookie 3 2025-05-29T15:28:08.957676Z node 36 :TABLET_SAUSAGECACHE DEBUG: shared_sausagecache.cpp:816: Remove page collection [1:0:256:0:0:0:2] owner [36:6:2053] 2025-05-29T15:28:08.957679Z node 36 :TABLET_SAUSAGECACHE DEBUG: shared_sausagecache.cpp:823: Remove owner [36:6:2053] ... waiting for NKikimr::NSharedCache::TEvUnregister (done) ... waiting for results #3 ... waiting for results #3 (done) Checking results#3 Expected: PageCollection: [1:0:256:0:0:0:2] Pages: [ ] Cookie: 2 PageCollection: [1:0:256:0:0:0:2] Pages: [ ] Cookie: 3 Actual: PageCollection: [1:0:256:0:0:0:2] Pages: [ ] Cookie: 2 PageCollection: [1:0:256:0:0:0:2] Pages: [ ] Cookie: 3 ... waiting for results #3 2025-05-29T15:28:08.957710Z node 36 :TABLET_SAUSAGECACHE TRACE: shared_sausagecache.cpp:867: Receive page collection [1:0:256:0:0:0:1] status OK pages [ 1 ] 2025-05-29T15:28:08.957716Z node 36 :TABLET_SAUSAGECACHE TRACE: shared_sausagecache.cpp:1050: Send page collection result [1:0:256:0:0:0:1] owner [36:5:2052] class AsyncLoad pages [ 1 ] cookie 1 2025-05-29T15:28:08.957721Z node 36 :TABLET_SAUSAGECACHE TRACE: shared_sausagecache.cpp:730: Drop pending page collection request [1:0:256:0:0:0:2] class AsyncLoad cookie 2 2025-05-29T15:28:08.957723Z node 36 :TABLET_SAUSAGECACHE TRACE: shared_sausagecache.cpp:730: Drop pending page collection request [1:0:256:0:0:0:2] class AsyncLoad cookie 3 ... waiting for results #3 (done) Checking results#3 Expected: PageCollection: [1:0:256:0:0:0:1] Pages: [ 1 ] Cookie: 1 Actual: PageCollection: [1:0:256:0:0:0:1] Pages: [ 1 ] Cookie: 1 Checking fetches#3 Expected: Actual: 2025-05-29T15:28:08.967897Z node 36 :TABLET_SAUSAGECACHE TRACE: shared_sausagecache.cpp:867: Receive page collection [1:0:256:0:0:0:2] status OK pages [ 10 ] Checking results#3 Expected: Actual: Checking fetches#3 Expected: Actual: |70.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_reboots/unittest >> TFlatTableExecutor_BTreeIndex::EnableLocalDBBtreeIndex_False_EnableLocalDBFlatIndex_False [GOOD] >> TFlatTableExecutor_BTreeIndex::EnableLocalDBBtreeIndex_True_TurnOff >> BuildStatsHistogram::Benchmark [GOOD] >> BuildStatsHistogram::Many_Mixed >> TDSProxyGetTest::TestMirror32GetIntervalsWipedAllOk [GOOD] >> TDSProxyPatchTest::NaiveOk_ErasureMirror3dc >> TTopicApiDescribes::GetLocalDescribe [FAIL] >> TFlatTableExecutor_BTreeIndex::EnableLocalDBBtreeIndex_True_TurnOff [GOOD] >> TFlatTableExecutor_BTreeIndex::EnableLocalDBBtreeIndex_True_Generations >> TDSProxyPatchTest::NaiveOk_ErasureMirror3dc [GOOD] >> TDSProxyPutTest::TestBlock42PutStatusOkWith_0_0_VdiskErrors >> TFlatTableExecutor_BTreeIndex::EnableLocalDBBtreeIndex_True_Generations [GOOD] >> TFlatTableExecutor_CachePressure::TestNotEnoughLocalCache [GOOD] >> TFlatTableExecutor_Cold::ColdBorrowScan [GOOD] >> TFlatTableExecutor_ColumnGroups::TestManyRows >> TConsistentOpsWithReboots::CreateIndexedTableAndForceDropSimultaneously >> TPartBtreeIndexIteration::OneNode_History_Slices [GOOD] >> TPartBtreeIndexIteration::OneNode_Groups_History_Slices >> TDSProxyPutTest::TestBlock42PutStatusOkWith_0_0_VdiskErrors [GOOD] >> TDsProxyQuorumTracker::CheckFailModelErasure4Plus2Block >> TFlatTableExecutor_ColumnGroups::TestManyRows [GOOD] >> TFlatTableExecutor_CompactionScan::TestCompactionScan [GOOD] >> TFlatTableExecutor_CompressedSelectRows::TestCompressedSelectRows [GOOD] >> TFlatTableExecutor_Exceptions::TestTabletExecuteExceptionDirect >> TFlatTableExecutor_Exceptions::TestTabletExecuteExceptionDirect [GOOD] >> TFlatTableExecutorGC::TestGCVectorDeduplicaton [GOOD] >> IntermediateDirsReboots::CreateTableWithIntermediateDirsForceDrop >> TConsistentOpsWithReboots::CreateNotNullColumnTableWithReboots ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_base_reboots/unittest >> TTablesWithReboots::CopyIndexedTableWithReboots [GOOD] Test command err: ==== RunWithTabletReboots =========== RUN: Trace =========== Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2140] Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:116:2058] recipient: [1:110:2141] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:116:2058] recipient: [1:110:2141] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:117:2058] recipient: [1:111:2142] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:117:2058] recipient: [1:111:2142] Leader for TabletID 72057594046678944 is [1:126:2151] sender: [1:127:2058] recipient: [1:109:2140] Leader for TabletID 72057594046447617 is [1:130:2154] sender: [1:132:2058] recipient: [1:110:2141] Leader for TabletID 72057594046316545 is [1:133:2155] sender: [1:135:2058] recipient: [1:111:2142] 2025-05-29T15:26:40.859881Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7488: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-05-29T15:26:40.859906Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7516: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:26:40.859912Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7402: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2025-05-29T15:26:40.859917Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7418: OperationsProcessing config: using default configuration 2025-05-29T15:26:40.859923Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-05-29T15:26:40.859927Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-05-29T15:26:40.859936Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7548: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:26:40.859950Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:39: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-05-29T15:26:40.860055Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7619: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-05-29T15:26:40.860136Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-05-29T15:26:40.874341Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7651: Got new config: QueryServiceConfig { AllExternalDataSourcesAreAvailable: true } 2025-05-29T15:26:40.874362Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:26:40.874456Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7619: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# Leader for TabletID 72057594046447617 is [1:130:2154] sender: [1:175:2058] recipient: [1:15:2062] 2025-05-29T15:26:40.877172Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-05-29T15:26:40.877202Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-05-29T15:26:40.877239Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-05-29T15:26:40.880012Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-05-29T15:26:40.880081Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:445: Clear TempDirsState with owners number: 0 2025-05-29T15:26:40.880195Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1348: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-05-29T15:26:40.880330Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:32: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-05-29T15:26:40.880841Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:157: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:26:40.880872Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:84: [RootDataErasureManager] Stop 2025-05-29T15:26:40.881099Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:26:40.881107Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:26:40.881140Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-05-29T15:26:40.881147Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-29T15:26:40.881152Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:202: TTxServerlessStorageBilling.Complete 2025-05-29T15:26:40.881169Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6671: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:214:2058] recipient: [1:212:2212] IGNORE Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:214:2058] recipient: [1:212:2212] Leader for TabletID 72057594037968897 is [1:218:2216] sender: [1:219:2058] recipient: [1:212:2212] 2025-05-29T15:26:40.882292Z node 1 :HIVE INFO: tablet_helpers.cpp:1130: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:126:2151] sender: [1:239:2058] recipient: [1:15:2062] 2025-05-29T15:26:40.904976Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:372: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-05-29T15:26:40.905044Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:26:40.905107Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-05-29T15:26:40.905165Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:130: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-05-29T15:26:40.905177Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:26:40.905803Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:451: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-05-29T15:26:40.905824Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-05-29T15:26:40.905862Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:26:40.905870Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:313: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-05-29T15:26:40.905874Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:367: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-05-29T15:26:40.905878Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 2 -> 3 2025-05-29T15:26:40.906422Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:26:40.906447Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-05-29T15:26:40.906456Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 3 -> 128 2025-05-29T15:26:40.906954Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:26:40.906969Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:26:40.906975Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:26:40.906983Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1650: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-05-29T15:26:40.907744Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1719: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-05-29T15:26:40.908256Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:652: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-05-29T15:26:40.908304Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1751: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 Leader for TabletID 72057594046316545 is [1:133:2155] sender: [1:254:2058] recipient: [1:15:2062] FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-05-29T15:26:40.908505Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:676: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-05-29T15:26:40.908534Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:680: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 133 RawX2: 4294969451 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-05-29T15:26:40.908542Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:26:40.908602Z node 1 :FLAT_TX_SCHEMESHARD INFO: sch ... "root@builtin" ACL: "" ChildrenExist: true } Children { Name: "NewTable2" PathId: 11 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 1004 CreateStep: 5000005 ParentPathId: 1 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" ChildrenExist: true } Children { Name: "Table" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 1002 CreateStep: 5000003 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 13 PathsLimit: 10000 ShardsInside: 12 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/MyRoot" } } } PathId: 1 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-05-29T15:28:18.204648Z node 251 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "MyRoot/Table" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-05-29T15:28:18.204676Z node 251 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "MyRoot/Table" took 28us result status StatusSuccess 2025-05-29T15:28:18.204780Z node 251 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "MyRoot/Table" PathDescription { Self { Name: "Table" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 1002 CreateStep: 5000003 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: true } Table { Name: "Table" Columns { Name: "key1" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "key2" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } Columns { Name: "key3" Type: "Uint64" TypeId: 4 Id: 3 NotNull: false IsBuildInProgress: false } Columns { Name: "value0" Type: "String" TypeId: 4097 Id: 4 NotNull: false IsBuildInProgress: false } Columns { Name: "value1" Type: "String" TypeId: 4097 Id: 5 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key1" KeyColumnNames: "key2" KeyColumnNames: "key3" KeyColumnIds: 1 KeyColumnIds: 2 KeyColumnIds: 3 TableIndexes { Name: "VectorIndexByValue0CoveringValue1" LocalPathId: 4 Type: EIndexTypeGlobalVectorKmeansTree State: EIndexStateReady KeyColumnNames: "value0" SchemaVersion: 1 PathOwnerId: 72057594046678944 DataColumnNames: "value1" DataSize: 0 IndexImplTableDescriptions { } IndexImplTableDescriptions { } VectorIndexKmeansTreeDescription { Settings { settings { metric: DISTANCE_COSINE vector_type: VECTOR_TYPE_INT8 vector_dimension: 2 } clusters: 3 levels: 5 } } } TableSchemaVersion: 1 IsBackup: false IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 2 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 13 PathsLimit: 10000 ShardsInside: 12 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-05-29T15:28:18.204823Z node 251 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "MyRoot/NewTable1" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-05-29T15:28:18.204838Z node 251 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "MyRoot/NewTable1" took 15us result status StatusSuccess 2025-05-29T15:28:18.204894Z node 251 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "MyRoot/NewTable1" PathDescription { Self { Name: "NewTable1" PathId: 7 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 1003 CreateStep: 5000004 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: true } Table { Name: "NewTable1" Columns { Name: "key1" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "key2" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } Columns { Name: "key3" Type: "Uint64" TypeId: 4 Id: 3 NotNull: false IsBuildInProgress: false } Columns { Name: "value0" Type: "String" TypeId: 4097 Id: 4 NotNull: false IsBuildInProgress: false } Columns { Name: "value1" Type: "String" TypeId: 4097 Id: 5 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key1" KeyColumnNames: "key2" KeyColumnNames: "key3" KeyColumnIds: 1 KeyColumnIds: 2 KeyColumnIds: 3 TableIndexes { Name: "VectorIndexByValue0CoveringValue1" LocalPathId: 8 Type: EIndexTypeGlobalVectorKmeansTree State: EIndexStateReady KeyColumnNames: "value0" SchemaVersion: 1 PathOwnerId: 72057594046678944 DataColumnNames: "value1" DataSize: 0 IndexImplTableDescriptions { } IndexImplTableDescriptions { } VectorIndexKmeansTreeDescription { Settings { settings { metric: DISTANCE_COSINE vector_type: VECTOR_TYPE_INT8 vector_dimension: 2 } clusters: 3 levels: 5 } } } TableSchemaVersion: 1 IsBackup: false IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 2 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 13 PathsLimit: 10000 ShardsInside: 12 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 7 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-05-29T15:28:18.204941Z node 251 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "MyRoot/NewTable2" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-05-29T15:28:18.204959Z node 251 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "MyRoot/NewTable2" took 19us result status StatusSuccess 2025-05-29T15:28:18.205016Z node 251 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "MyRoot/NewTable2" PathDescription { Self { Name: "NewTable2" PathId: 11 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 1004 CreateStep: 5000005 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: true } Table { Name: "NewTable2" Columns { Name: "key1" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "key2" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } Columns { Name: "key3" Type: "Uint64" TypeId: 4 Id: 3 NotNull: false IsBuildInProgress: false } Columns { Name: "value0" Type: "String" TypeId: 4097 Id: 4 NotNull: false IsBuildInProgress: false } Columns { Name: "value1" Type: "String" TypeId: 4097 Id: 5 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key1" KeyColumnNames: "key2" KeyColumnNames: "key3" KeyColumnIds: 1 KeyColumnIds: 2 KeyColumnIds: 3 TableIndexes { Name: "VectorIndexByValue0CoveringValue1" LocalPathId: 12 Type: EIndexTypeGlobalVectorKmeansTree State: EIndexStateReady KeyColumnNames: "value0" SchemaVersion: 1 PathOwnerId: 72057594046678944 DataColumnNames: "value1" DataSize: 0 IndexImplTableDescriptions { } IndexImplTableDescriptions { } VectorIndexKmeansTreeDescription { Settings { settings { metric: DISTANCE_COSINE vector_type: VECTOR_TYPE_INT8 vector_dimension: 2 } clusters: 3 levels: 5 } } } TableSchemaVersion: 1 IsBackup: false IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 2 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 13 PathsLimit: 10000 ShardsInside: 12 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 11 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_base_reboots/unittest >> TTablesWithReboots::ChainedCopyTableAndDropWithReboots [GOOD] Test command err: ==== RunWithTabletReboots =========== RUN: Trace =========== Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2140] Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:116:2058] recipient: [1:110:2141] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:116:2058] recipient: [1:110:2141] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:117:2058] recipient: [1:111:2142] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:117:2058] recipient: [1:111:2142] Leader for TabletID 72057594046678944 is [1:126:2151] sender: [1:127:2058] recipient: [1:109:2140] Leader for TabletID 72057594046447617 is [1:130:2154] sender: [1:132:2058] recipient: [1:110:2141] Leader for TabletID 72057594046316545 is [1:133:2155] sender: [1:135:2058] recipient: [1:111:2142] 2025-05-29T15:26:50.805304Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7488: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-05-29T15:26:50.805329Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7516: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:26:50.805335Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7402: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2025-05-29T15:26:50.805341Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7418: OperationsProcessing config: using default configuration 2025-05-29T15:26:50.805347Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-05-29T15:26:50.805351Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-05-29T15:26:50.805359Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7548: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:26:50.805374Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:39: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-05-29T15:26:50.805484Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7619: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-05-29T15:26:50.805556Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-05-29T15:26:50.819789Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7651: Got new config: QueryServiceConfig { AllExternalDataSourcesAreAvailable: true } 2025-05-29T15:26:50.819808Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:26:50.819914Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7619: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# Leader for TabletID 72057594046447617 is [1:130:2154] sender: [1:175:2058] recipient: [1:15:2062] 2025-05-29T15:26:50.822708Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-05-29T15:26:50.822758Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-05-29T15:26:50.822799Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-05-29T15:26:50.825898Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-05-29T15:26:50.825973Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:445: Clear TempDirsState with owners number: 0 2025-05-29T15:26:50.826089Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1348: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-05-29T15:26:50.826253Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:32: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-05-29T15:26:50.826939Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:157: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:26:50.826979Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:84: [RootDataErasureManager] Stop 2025-05-29T15:26:50.827216Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:26:50.827226Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:26:50.827257Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-05-29T15:26:50.827264Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-29T15:26:50.827270Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:202: TTxServerlessStorageBilling.Complete 2025-05-29T15:26:50.827289Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6671: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:214:2058] recipient: [1:212:2212] IGNORE Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:214:2058] recipient: [1:212:2212] Leader for TabletID 72057594037968897 is [1:218:2216] sender: [1:219:2058] recipient: [1:212:2212] 2025-05-29T15:26:50.828546Z node 1 :HIVE INFO: tablet_helpers.cpp:1130: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:126:2151] sender: [1:239:2058] recipient: [1:15:2062] 2025-05-29T15:26:50.848255Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:372: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-05-29T15:26:50.848310Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:26:50.848359Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-05-29T15:26:50.848404Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:130: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-05-29T15:26:50.848414Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:26:50.849044Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:451: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-05-29T15:26:50.849069Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-05-29T15:26:50.849117Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:26:50.849127Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:313: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-05-29T15:26:50.849132Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:367: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-05-29T15:26:50.849138Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 2 -> 3 2025-05-29T15:26:50.849594Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:26:50.849604Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-05-29T15:26:50.849609Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 3 -> 128 2025-05-29T15:26:50.849984Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:26:50.849995Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:26:50.850001Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:26:50.850008Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1650: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-05-29T15:26:50.850685Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1719: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-05-29T15:26:50.851122Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:652: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-05-29T15:26:50.851157Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1751: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 Leader for TabletID 72057594046316545 is [1:133:2155] sender: [1:254:2058] recipient: [1:15:2062] FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-05-29T15:26:50.851343Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:676: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-05-29T15:26:50.851366Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:680: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 133 RawX2: 4294969451 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-05-29T15:26:50.851372Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:26:50.851423Z node 1 :FLAT_TX_SCHEMESHARD INFO: sch ... emeshard__state_changed_reply.cpp:78: TTxShardStateChanged DoExecute, datashard informs about state changing, datashardId: 72075186233409548, state: Offline, at schemeshard: 72057594046678944 2025-05-29T15:28:21.314495Z node 166 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5549: Handle TEvStateChanged, at schemeshard: 72057594046678944, message: Source { RawX1: 333 RawX2: 712964573455 } TabletId: 72075186233409546 State: 4 2025-05-29T15:28:21.314502Z node 166 :FLAT_TX_SCHEMESHARD INFO: schemeshard__state_changed_reply.cpp:78: TTxShardStateChanged DoExecute, datashard informs about state changing, datashardId: 72075186233409546, state: Offline, at schemeshard: 72057594046678944 2025-05-29T15:28:21.315254Z node 166 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5549: Handle TEvStateChanged, at schemeshard: 72057594046678944, message: Source { RawX1: 689 RawX2: 712964573770 } TabletId: 72075186233409549 State: 4 2025-05-29T15:28:21.315269Z node 166 :FLAT_TX_SCHEMESHARD INFO: schemeshard__state_changed_reply.cpp:78: TTxShardStateChanged DoExecute, datashard informs about state changing, datashardId: 72075186233409549, state: Offline, at schemeshard: 72057594046678944 2025-05-29T15:28:21.315459Z node 166 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:46: Free shard 72057594046678944:3 hive 72057594037968897 at ss 72057594046678944 2025-05-29T15:28:21.315590Z node 166 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:46: Free shard 72057594046678944:1 hive 72057594037968897 at ss 72057594046678944 2025-05-29T15:28:21.315837Z node 166 :HIVE INFO: tablet_helpers.cpp:1356: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 3 TxId_Deprecated: 3 TabletID: 72075186233409548 2025-05-29T15:28:21.319933Z node 166 :HIVE INFO: tablet_helpers.cpp:1356: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 1 TxId_Deprecated: 1 TabletID: 72075186233409546 2025-05-29T15:28:21.319982Z node 166 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5938: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 3 ShardOwnerId: 72057594046678944 ShardLocalIdx: 3, at schemeshard: 72057594046678944 2025-05-29T15:28:21.320038Z node 166 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 1 2025-05-29T15:28:21.320198Z node 166 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5549: Handle TEvStateChanged, at schemeshard: 72057594046678944, message: Source { RawX1: 438 RawX2: 712964573543 } TabletId: 72075186233409547 State: 4 2025-05-29T15:28:21.320206Z node 166 :FLAT_TX_SCHEMESHARD INFO: schemeshard__state_changed_reply.cpp:78: TTxShardStateChanged DoExecute, datashard informs about state changing, datashardId: 72075186233409547, state: Offline, at schemeshard: 72057594046678944 Forgetting tablet 72075186233409548 Forgetting tablet 72075186233409546 2025-05-29T15:28:21.320632Z node 166 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5938: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 1 ShardOwnerId: 72057594046678944 ShardLocalIdx: 1, at schemeshard: 72057594046678944 2025-05-29T15:28:21.320681Z node 166 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 1 2025-05-29T15:28:21.320804Z node 166 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:123: TTxCleanDroppedPaths Execute, 2 paths in candidate queue, at schemeshard: 72057594046678944 2025-05-29T15:28:21.320809Z node 166 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:137: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 5], at schemeshard: 72057594046678944 2025-05-29T15:28:21.320819Z node 166 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 5 2025-05-29T15:28:21.320823Z node 166 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:137: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 3], at schemeshard: 72057594046678944 2025-05-29T15:28:21.320827Z node 166 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 4 2025-05-29T15:28:21.320906Z node 166 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:46: Free shard 72057594046678944:4 hive 72057594037968897 at ss 72057594046678944 2025-05-29T15:28:21.320929Z node 166 :HIVE INFO: tablet_helpers.cpp:1356: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 4 TxId_Deprecated: 4 TabletID: 72075186233409549 2025-05-29T15:28:21.320965Z node 166 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5938: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 4 ShardOwnerId: 72057594046678944 ShardLocalIdx: 4, at schemeshard: 72057594046678944 2025-05-29T15:28:21.320985Z node 166 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 6] was 1 Forgetting tablet 72075186233409549 2025-05-29T15:28:21.322142Z node 166 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:174: Deleted shardIdx 72057594046678944:3 2025-05-29T15:28:21.322153Z node 166 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:180: Close pipe to deleted shardIdx 72057594046678944:3 tabletId 72075186233409548 2025-05-29T15:28:21.322195Z node 166 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:46: Free shard 72057594046678944:2 hive 72057594037968897 at ss 72057594046678944 2025-05-29T15:28:21.322232Z node 166 :HIVE INFO: tablet_helpers.cpp:1356: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 2 TxId_Deprecated: 2 TabletID: 72075186233409547 2025-05-29T15:28:21.322259Z node 166 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5938: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 2 ShardOwnerId: 72057594046678944 ShardLocalIdx: 2, at schemeshard: 72057594046678944 2025-05-29T15:28:21.322288Z node 166 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 1 Forgetting tablet 72075186233409547 2025-05-29T15:28:21.322331Z node 166 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:174: Deleted shardIdx 72057594046678944:1 2025-05-29T15:28:21.322335Z node 166 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:180: Close pipe to deleted shardIdx 72057594046678944:1 tabletId 72075186233409546 2025-05-29T15:28:21.322667Z node 166 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:160: TTxCleanDroppedPaths Complete, done PersistRemovePath for 2 paths, skipped 0, left 2 candidates, at schemeshard: 72057594046678944 2025-05-29T15:28:21.322703Z node 166 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:123: TTxCleanDroppedPaths Execute, 2 paths in candidate queue, at schemeshard: 72057594046678944 2025-05-29T15:28:21.322706Z node 166 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:137: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 6], at schemeshard: 72057594046678944 2025-05-29T15:28:21.322715Z node 166 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2025-05-29T15:28:21.322719Z node 166 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:137: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 4], at schemeshard: 72057594046678944 2025-05-29T15:28:21.322722Z node 166 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-05-29T15:28:21.322795Z node 166 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:174: Deleted shardIdx 72057594046678944:4 2025-05-29T15:28:21.322802Z node 166 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:180: Close pipe to deleted shardIdx 72057594046678944:4 tabletId 72075186233409549 2025-05-29T15:28:21.323149Z node 166 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:174: Deleted shardIdx 72057594046678944:2 2025-05-29T15:28:21.323157Z node 166 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:180: Close pipe to deleted shardIdx 72057594046678944:2 tabletId 72075186233409547 2025-05-29T15:28:21.323168Z node 166 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:160: TTxCleanDroppedPaths Complete, done PersistRemovePath for 2 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestModificationResult got TxId: 1009, wait until txId: 1009 TestWaitNotification wait txId: 1009 2025-05-29T15:28:21.323233Z node 166 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:210: tests -- TTxNotificationSubscriber for txId 1009: send EvNotifyTxCompletion 2025-05-29T15:28:21.323238Z node 166 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:256: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 1009 2025-05-29T15:28:21.323290Z node 166 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 1009, at schemeshard: 72057594046678944 2025-05-29T15:28:21.323302Z node 166 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:227: tests -- TTxNotificationSubscriber for txId 1009: got EvNotifyTxCompletionResult 2025-05-29T15:28:21.323308Z node 166 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:236: tests -- TTxNotificationSubscriber for txId 1009: satisfy waiter [166:1066:3002] TestWaitNotification: OK eventTxId 1009 wait until 72075186233409546 is deleted wait until 72075186233409547 is deleted wait until 72075186233409548 is deleted wait until 72075186233409549 is deleted wait until 72075186233409550 is deleted wait until 72075186233409551 is deleted wait until 72075186233409552 is deleted wait until 72075186233409553 is deleted wait until 72075186233409554 is deleted 2025-05-29T15:28:21.323361Z node 166 :HIVE INFO: tablet_helpers.cpp:1476: [72057594037968897] TEvSubscribeToTabletDeletion, 72075186233409546 2025-05-29T15:28:21.323368Z node 166 :HIVE INFO: tablet_helpers.cpp:1476: [72057594037968897] TEvSubscribeToTabletDeletion, 72075186233409547 2025-05-29T15:28:21.323373Z node 166 :HIVE INFO: tablet_helpers.cpp:1476: [72057594037968897] TEvSubscribeToTabletDeletion, 72075186233409548 2025-05-29T15:28:21.323379Z node 166 :HIVE INFO: tablet_helpers.cpp:1476: [72057594037968897] TEvSubscribeToTabletDeletion, 72075186233409549 2025-05-29T15:28:21.323384Z node 166 :HIVE INFO: tablet_helpers.cpp:1476: [72057594037968897] TEvSubscribeToTabletDeletion, 72075186233409550 2025-05-29T15:28:21.323391Z node 166 :HIVE INFO: tablet_helpers.cpp:1476: [72057594037968897] TEvSubscribeToTabletDeletion, 72075186233409551 2025-05-29T15:28:21.323394Z node 166 :HIVE INFO: tablet_helpers.cpp:1476: [72057594037968897] TEvSubscribeToTabletDeletion, 72075186233409552 2025-05-29T15:28:21.323399Z node 166 :HIVE INFO: tablet_helpers.cpp:1476: [72057594037968897] TEvSubscribeToTabletDeletion, 72075186233409553 2025-05-29T15:28:21.323406Z node 166 :HIVE INFO: tablet_helpers.cpp:1476: [72057594037968897] TEvSubscribeToTabletDeletion, 72075186233409554 Deleted tabletId 72075186233409546 Deleted tabletId 72075186233409547 Deleted tabletId 72075186233409548 Deleted tabletId 72075186233409549 Deleted tabletId 72075186233409550 Deleted tabletId 72075186233409551 Deleted tabletId 72075186233409552 Deleted tabletId 72075186233409553 Deleted tabletId 72075186233409554 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tablet_flat/ut/unittest >> TFlatTableExecutorGC::TestGCVectorDeduplicaton [GOOD] Test command err: 00000.000 II| FAKE_ENV: Born at 2025-05-29T15:28:07.221409Z 00000.004 NN| TABLET_SAUSAGECACHE: Bootstrap with config MemoryLimit: 8388608 ScanQueueInFlyLimit: 262144 AsyncQueueInFlyLimit: 262144 00000.004 II| FAKE_ENV: TNanny initiates TDummy tablet 72057594037927937 birth 00000.004 II| FAKE_ENV: Starting storage for BS group 0 00000.004 II| FAKE_ENV: Starting storage for BS group 1 00000.004 II| FAKE_ENV: Starting storage for BS group 2 00000.004 II| FAKE_ENV: Starting storage for BS group 3 00000.005 II| TABLET_EXECUTOR: Leader{1:2:0} activating executor 00000.005 II| TABLET_EXECUTOR: LSnap{1:2, on 2:1, 35b, wait} done, Waste{2:0, 0b +(0, 0b), 0 trc} 00000.019 II| TABLET_EXECUTOR: LSnap{1:2, on 2:301, 5699b, wait} done, Waste{2:0, 621702b +(0, 0b), 300 trc} 00000.021 II| TABLET_EXECUTOR: Leader{1:2:334} starting compaction 00000.021 II| TABLET_EXECUTOR: Leader{1:2:335} starting Scan{1 on 3, Compact{1.2.334, eph 1}} 00000.021 II| TABLET_EXECUTOR: Leader{1:2:335} started compaction 1 00000.021 II| TABLET_OPS_HOST: Scan{1 on 3, Compact{1.2.334, eph 1}} begin on TSubset{head 2, 1m 0p 0c} 00000.021 II| TABLET_OPS_HOST: Scan{1 on 3, Compact{1.2.334, eph 1}} end=Done, 111r seen, TFwd{fetch=0B,saved=0B,usage=0B,after=0B,before=0B}, bio Spent{time=0.000s,wait=0.000s,interrupts=1}, trace 14 of 15 ~1p 00000.021 II| OPS_COMPACT: Compact{1.2.334, eph 1} end=Done, 6 blobs 87r (max 111), put Spent{time=0.000s,wait=0.000s,interrupts=1} Part{ 1 pk, lobs 14 +4, (140434 0 166392)b }, ecr=1.000 00000.022 II| TABLET_EXECUTOR: Leader{1:2:335} Compact 1 on TGenCompactionParams{3: gen 0 epoch +inf, 0 parts} step 334, product {1 parts epoch 2} done 00000.023 II| TABLET_EXECUTOR: Leader{1:2:359} starting compaction 00000.023 II| TABLET_EXECUTOR: Leader{1:2:360} starting Scan{3 on 2, Compact{1.2.359, eph 1}} 00000.023 II| TABLET_EXECUTOR: Leader{1:2:360} started compaction 3 00000.023 II| TABLET_OPS_HOST: Scan{3 on 2, Compact{1.2.359, eph 1}} begin on TSubset{head 2, 1m 0p 0c} 00000.024 II| TABLET_OPS_HOST: Scan{3 on 2, Compact{1.2.359, eph 1}} end=Done, 115r seen, TFwd{fetch=0B,saved=0B,usage=0B,after=0B,before=0B}, bio Spent{time=0.000s,wait=0.000s,interrupts=1}, trace 11 of 16 ~1p 00000.024 II| OPS_COMPACT: Compact{1.2.359, eph 1} end=Done, 12 blobs 85r (max 115), put Spent{time=0.000s,wait=0.000s,interrupts=1} Part{ 2 pk, lobs 11 +8, (189118 20413 169608)b }, ecr=1.000 00000.024 II| TABLET_EXECUTOR: Leader{1:2:361} Compact 3 on TGenCompactionParams{2: gen 0 epoch +inf, 0 parts} step 359, product {1 parts epoch 2} done 00000.038 II| TABLET_EXECUTOR: LSnap{1:2, on 2:601, 8573b, wait} done, Waste{2:0, 1592430b +(149, 68055b), 300 trc} 00000.044 II| TABLET_EXECUTOR: Leader{1:2:696} starting compaction 00000.044 II| TABLET_EXECUTOR: Leader{1:2:697} starting Scan{5 on 3, Compact{1.2.696, eph 2}} 00000.044 II| TABLET_EXECUTOR: Leader{1:2:697} started compaction 5 00000.044 II| TABLET_OPS_HOST: Scan{5 on 3, Compact{1.2.696, eph 2}} begin on TSubset{head 3, 1m 0p 0c} 00000.045 II| TABLET_OPS_HOST: Scan{5 on 3, Compact{1.2.696, eph 2}} end=Done, 106r seen, TFwd{fetch=0B,saved=0B,usage=0B,after=0B,before=0B}, bio Spent{time=0.000s,wait=0.000s,interrupts=1}, trace 15 of 18 ~1p 00000.045 II| OPS_COMPACT: Compact{1.2.696, eph 2} end=Done, 14 blobs 106r (max 106), put Spent{time=0.000s,wait=0.000s,interrupts=1} Part{ 2 pk, lobs 15 +10, (154429 7005 223046)b }, ecr=1.000 00000.045 II| TABLET_EXECUTOR: Leader{1:2:698} Compact 5 on TGenCompactionParams{3: gen 0 epoch +inf, 0 parts} step 696, product {1 parts epoch 3} done 00000.045 II| TABLET_EXECUTOR: Leader{1:2:700} starting compaction 00000.045 II| TABLET_EXECUTOR: Leader{1:2:701} starting Scan{7 on 3, Compact{1.2.700, eph 2}} 00000.045 II| TABLET_EXECUTOR: Leader{1:2:701} started compaction 7 00000.045 II| TABLET_OPS_HOST: Scan{7 on 3, Compact{1.2.700, eph 2}} begin on TSubset{head 0, 0m 2p 0c} 00000.046 II| TABLET_OPS_HOST: Scan{7 on 3, Compact{1.2.700, eph 2}} end=Done, 165r seen, TFwd{fetch=287KiB,saved=287KiB,usage=287KiB,after=0B,before=0B}, bio Spent{time=0.000s,wait=0.000s,interrupts=6}, trace 39 of 43 ~3p 00000.046 II| OPS_COMPACT: Compact{1.2.700, eph 2} end=Done, 4 blobs 142r (max 193), put Spent{time=0.000s,wait=0.000s,interrupts=1} Part{ 2 pk, lobs 39 +0, (261300 7005 356930)b }, ecr=1.000 00000.046 II| TABLET_EXECUTOR: Leader{1:2:704} Compact 7 on TGenCompactionParams{3: gen 1 epoch 0, 2 parts} step 700, product {1 parts epoch 0} done 00000.049 II| TABLET_EXECUTOR: Leader{1:2:716} starting compaction 00000.049 II| TABLET_EXECUTOR: Leader{1:2:717} starting Scan{9 on 2, Compact{1.2.716, eph 2}} 00000.049 II| TABLET_EXECUTOR: Leader{1:2:717} started compaction 9 00000.049 II| TABLET_OPS_HOST: Scan{9 on 2, Compact{1.2.716, eph 2}} begin on TSubset{head 3, 1m 0p 0c} 00000.050 II| TABLET_OPS_HOST: Scan{9 on 2, Compact{1.2.716, eph 2}} end=Done, 109r seen, TFwd{fetch=0B,saved=0B,usage=0B,after=0B,before=0B}, bio Spent{time=0.000s,wait=0.000s,interrupts=1}, trace 21 of 21 ~1p 00000.050 II| OPS_COMPACT: Compact{1.2.716, eph 2} end=Done, 10 blobs 109r (max 109), put Spent{time=0.000s,wait=0.000s,interrupts=1} Part{ 2 pk, lobs 21 +6, (142118 20104 244685)b }, ecr=1.000 00000.050 II| TABLET_EXECUTOR: Leader{1:2:718} Compact 9 on TGenCompactionParams{2: gen 0 epoch +inf, 0 parts} step 716, product {1 parts epoch 3} done 00000.050 II| TABLET_EXECUTOR: Leader{1:2:720} starting compaction 00000.050 II| TABLET_EXECUTOR: Leader{1:2:721} starting Scan{11 on 2, Compact{1.2.720, eph 2}} 00000.050 II| TABLET_EXECUTOR: Leader{1:2:721} started compaction 11 00000.050 II| TABLET_OPS_HOST: Scan{11 on 2, Compact{1.2.720, eph 2}} begin on TSubset{head 0, 0m 2p 0c} 00000.051 II| TABLET_OPS_HOST: Scan{11 on 2, Compact{1.2.720, eph 2}} end=Done, 162r seen, TFwd{fetch=322KiB,saved=322KiB,usage=322KiB,after=0B,before=0B}, bio Spent{time=0.000s,wait=0.000s,interrupts=8}, trace 42 of 46 ~3p 00000.051 II| OPS_COMPACT: Compact{1.2.720, eph 2} end=Done, 4 blobs 133r (max 194), put Spent{time=0.000s,wait=0.000s,interrupts=1} Part{ 2 pk, lobs 42 +0, (257587 40517 377861)b }, ecr=1.000 00000.051 II| TABLET_EXECUTOR: Leader{1:2:722} Compact 11 on TGenCompactionParams{2: gen 1 epoch 0, 2 parts} step 720, product {1 parts epoch 0} done 00000.067 II| TABLET_EXECUTOR: LSnap{1:2, on 2:901, 10230b, wait} done, Waste{2:0, 2448182b +(167, 746035b), 300 trc} 00000.075 II| TABLET_EXECUTOR: Leader{1:2:1010} starting compaction 00000.075 II| TABLET_EXECUTOR: Leader{1:2:1011} starting Scan{13 on 3, Compact{1.2.1010, eph 3}} 00000.075 II| TABLET_EXECUTOR: Leader{1:2:1011} started compaction 13 00000.075 II| TABLET_OPS_HOST: Scan{13 on 3, Compact{1.2.1010, eph 3}} begin on TSubset{head 4, 1m 0p 0c} 00000.075 II| TABLET_OPS_HOST: Scan{13 on 3, Compact{1.2.1010, eph 3}} end=Done, 109r seen, TFwd{fetch=0B,saved=0B,usage=0B,after=0B,before=0B}, bio Spent{time=0.000s,wait=0.000s,interrupts=1}, trace 15 of 19 ~1p 00000.075 II| OPS_COMPACT: Compact{1.2.1010, eph 3} end=Done, 7 blobs 109r (max 109), put Spent{time=0.000s,wait=0.000s,interrupts=1} Part{ 1 pk, lobs 15 +5, (104385 0 179117)b }, ecr=1.000 00000.076 II| TABLET_EXECUTOR: Leader{1:2:1012} Compact 13 on TGenCompactionParams{3: gen 0 epoch +inf, 0 parts} step 1010, product {1 parts epoch 4} done 00000.079 II| TABLET_EXECUTOR: Leader{1:2:1053} starting compaction 00000.079 II| TABLET_EXECUTOR: Leader{1:2:1054} starting Scan{15 on 2, Compact{1.2.1053, eph 3}} 00000.079 II| TABLET_EXECUTOR: Leader{1:2:1054} started compaction 15 00000.079 II| TABLET_OPS_HOST: Scan{15 on 2, Compact{1.2.1053, eph 3}} begin on TSubset{head 4, 1m 0p 0c} 00000.080 II| TABLET_OPS_HOST: Scan{15 on 2, Compact{1.2.1053, eph 3}} end=Done, 112r seen, TFwd{fetch=0B,saved=0B,usage=0B,after=0B,before=0B}, bio Spent{time=0.000s,wait=0.000s,interrupts=1}, trace 16 of 22 ~1p 00000.080 II| OPS_COMPACT: Compact{1.2.1053, eph 3} end=Done, 10 blobs 112r (max 112), put Spent{time=0.000s,wait=0.000s,interrupts=1} Part{ 2 pk, lobs 16 +6, (193399 46972 189745)b }, ecr=1.000 00000.080 II| TABLET_EXECUTOR: Leader{1:2:1055} Compact 15 on TGenCompactionParams{2: gen 0 epoch +inf, 0 parts} step 1053, product {1 parts epoch 4} done 00000.093 II| TABLET_EXECUTOR: LSnap{1:2, on 2:1201, 11337b, wait} done, Waste{2:0, 3304626b +(158, 102907b), 300 trc} 00000.101 II| TABLET_EXECUTOR: Leader{1:2:1304} starting compaction 00000.101 II| TABLET_EXECUTOR: Leader{1:2:1305} starting Scan{17 on 3, Compact{1.2.1304, eph 4}} 00000.101 II| TABLET_EXECUTOR: Leader{1:2:1305} started compaction 17 00000.101 II| TABLET_OPS_HOST: Scan{17 on 3, Compact{1.2.1304, eph 4}} begin on TSubset{head 5, 1m 0p 0c} 00000.102 II| TABLET_OPS_HOST: Scan{17 on 3, Compact{1.2.1304, eph 4}} end=Done, 96r seen, TFwd{fetch=0B,saved=0B,usage=0B,after=0B,before=0B}, bio Spent{time=0.000s,wait=0.000s,interrupts=1}, trace 17 of 20 ~1p 00000.102 II| OPS_COMPACT: Compact{1.2.1304, eph 4} end=Done, 9 blobs 96r (max 96), put Spent{time=0.000s,wait=0.000s,interrupts=1} Part{ 2 pk, lobs 17 +5, (117971 13569 204374)b }, ecr=1.000 00000.102 II| TABLET_EXECUTOR: Leader{1:2:1306} Compact 17 on TGenCompactionParams{3: gen 0 epoch +inf, 0 parts} step 1304, product {1 parts epoch 5} done 00000.102 II| TABLET_EXECUTOR: Leader{1:2:1307} starting compaction 00000.102 II| TABLET_EXECUTOR: Leader{1:2:1308} starting Scan{19 on 3, Compact{1.2.1307, eph 4}} 00000.102 II| TABLET_EXECUTOR: Leader{1:2:1308} started compaction 19 00000.102 II| TABLET_OPS_HOST: Scan{19 on 3, Compact{1.2.1307, eph 4}} begin on TSubset{head 0, 0m 2p 0c} 00000.103 II| TABLET_OPS_HOST: Scan{19 on 3, Compact{1.2.1307, eph 4}} end=Done, 166r seen, TFwd{fetch=216KiB,saved=216KiB,usage=216KiB,after=0B,before=0B}, bio Spent{time=0.000s,wait=0.000s,interrupts=4}, trace 34 of 42 ~3p 00000.103 II| OPS_COMPACT: Compact{1.2.1307, eph 4} end=Done, 4 blobs 166r (max 205), put Spent{time=0.000s,wait=0.000s,interrupts=1} Part{ 2 pk, lobs 34 +0, (189544 13569 307371)b }, ecr=1.000 00000.103 II| TABLET_EXECUTOR: Leader{1:2:1310} Compact 19 on TGenCompactionParams{3: gen 1 epoch 0, 2 parts} step 1307, product {1 parts epoch 0} done 00000.115 II| TABLET_EXECUTOR: Leader{1:2:1415} starting compaction 00000.115 II| TABLET_EXECUTOR: Leader{1:2:1416} starting Scan{21 on 2, Compact{1.2.1415, eph 4}} 00000.115 II| TABLET_EXECUTOR: Leader{1:2:1416} started compaction 21 00000.115 II| TABLET_OPS_HOST: Scan{21 on 2, Compact{1.2.1415, eph 4}} begin on TSubset{head 5, 1m 0p 0c} 00000.116 II| TABLET_OPS_HOST: Scan{21 on 2, Compact{1.2.1415, eph 4}} end=Done, 106r seen, TFwd{fetch=0B,saved=0B,usage=0B,after=0B,before=0B}, bio Spent{time=0.000s,wait=0.000s,interrupts=1}, trace 15 of 17 ~1p 00000.116 II| OPS_COMPACT: Compact{1.2.1415, eph 4} end=Done, 9 blobs 106r (max 106), put Spent{time=0.000s,wait=0.000s,interrupts=1} Part{ 2 pk, lobs 15 +5, (123834 13392 181297)b }, ecr=1.000 00000.116 II| TABLET_EXECUTOR: Leader{1:2:1417} Compact 21 on TGenCompactionParams{2: gen 0 epoch +inf, 0 parts} step 1415, product {1 parts epoch 5} done 00000.116 II| TABLET_EXECUTOR: Leader{1:2:1419} starting compaction 00000.116 II| TABLET_EXECUTOR: Leader{1:2:1420} starting Scan{23 on 2, Compact{1.2.1419, eph 4}} 00000.116 II| TABLET_EXECUTOR: Leader{1:2:1420} started compaction 23 00000.116 II| TABLET_OPS_HOST: Scan{23 on 2, Compact{1.2.1419, eph 4}} begin on TSubset{head 0, 0m 2p 0c} 00000.117 II| TABLET_OPS_HOST: Scan{23 on 2, Compact{1.2.1419, eph 4}} end=Done, 176r seen, TFwd{fetch=308KiB,saved=308KiB,usage=308KiB,after=0B,before=0B}, bio Spent{time=0.000s,wait=0.000s,interrupts=7}, trace 31 of 42 ~3p 00000.117 II| OPS_COMPACT: Compact{1.2.1419, eph 4} end=Done, 4 blobs 176r (max 218), put Spent{time=0.000s,wait=0.000s,interrupts=1} Part{ 2 pk, lobs 31 +0, (259234 60364 277307)b }, ecr=1.000 00000.117 II| TABLET_EXECUTOR: Leader{1:2:1420} Compact 23 on TGenCompactionParams{2: gen 1 epoch 0, 2 parts} step 1419, product {1 parts epoch 0} done 00000.126 II| TABLET_EXECUTOR: LSnap{1:2, on 2:1501, 12774b, wait} done, Waste{2:0, 3768225b + ... ] 00000.019 TT| TABLET_SAUSAGECACHE: Send page collection result [1:2:250:1:12288:161:0] owner [37:418:2424] class Scan pages [ 0 ] cookie 0 00000.019 DD| TABLET_SAUSAGECACHE: Add page collection [1:2:313:1:12288:161:0] owner [37:418:2424] 00000.019 TT| TABLET_SAUSAGECACHE: Request page collection [1:2:313:1:12288:161:0] owner [37:418:2424] cookie 0 class Scan from cache [ 0 ] 00000.019 TT| TABLET_SAUSAGECACHE: Send page collection result [1:2:313:1:12288:161:0] owner [37:418:2424] class Scan pages [ 0 ] cookie 0 00000.019 DD| TABLET_SAUSAGECACHE: Save page collection [1:2:315:1:12288:163:0] owner [37:419:2424] compacted pages [ 2 ] 00000.019 DD| TABLET_SAUSAGECACHE: Add page collection [1:2:315:1:12288:163:0] 00000.019 DD| TABLET_SAUSAGECACHE: Unregister owner [37:418:2424] 00000.019 DD| TABLET_SAUSAGECACHE: Remove page collection [1:2:313:1:12288:161:0] owner [37:418:2424] 00000.019 DD| TABLET_SAUSAGECACHE: Remove page collection [1:2:188:1:12288:161:0] owner [37:418:2424] 00000.019 DD| TABLET_SAUSAGECACHE: Remove page collection [1:2:126:1:12288:161:0] owner [37:418:2424] 00000.019 DD| TABLET_SAUSAGECACHE: Remove page collection [1:2:250:1:12288:161:0] owner [37:418:2424] 00000.019 DD| TABLET_SAUSAGECACHE: Remove page collection [1:2:64:1:12288:161:0] owner [37:418:2424] 00000.019 DD| TABLET_SAUSAGECACHE: Remove owner [37:418:2424] 00000.019 II| TABLET_EXECUTOR: Leader{1:2:316} Compact 63 on TGenCompactionParams{101: gen 2 epoch 0, 5 parts} step 315, product {1 parts epoch 0} done 00000.019 DD| TABLET_EXECUTOR: TGenCompactionStrategy CompactionFinished for 1: compaction 63, generation 2 00000.019 DD| TABLET_EXECUTOR: TGenCompactionStrategy CheckGeneration for 1 generation 2, state Free, final id 0, final level 2 00000.019 DD| RESOURCE_BROKER: Finish task gen2-table-101-tablet-1 (32 by [37:30:2062]) (release resources {1, 0}) 00000.019 DD| RESOURCE_BROKER: Updated planned resource usage for queue queue_compaction_gen2 from 4.687500 to 0.000000 (remove task gen2-table-101-tablet-1 (32 by [37:30:2062])) 00000.019 DD| TABLET_SAUSAGECACHE: Attach page collection [1:2:315:1:12288:163:0] owner [37:30:2062] 00000.019 DD| TABLET_SAUSAGECACHE: Add page collection [1:2:315:1:12288:163:0] owner [37:30:2062] 00000.019 DD| TABLET_SAUSAGECACHE: Detach page collection [1:2:313:1:12288:161:0] owner [37:30:2062] 00000.019 DD| TABLET_SAUSAGECACHE: Remove page collection [1:2:313:1:12288:161:0] owner [37:30:2062] 00000.019 DD| TABLET_SAUSAGECACHE: Detach page collection [1:2:250:1:12288:161:0] owner [37:30:2062] 00000.019 DD| TABLET_SAUSAGECACHE: Remove page collection [1:2:250:1:12288:161:0] owner [37:30:2062] 00000.019 DD| TABLET_SAUSAGECACHE: Detach page collection [1:2:188:1:12288:161:0] owner [37:30:2062] 00000.019 DD| TABLET_SAUSAGECACHE: Remove page collection [1:2:188:1:12288:161:0] owner [37:30:2062] 00000.019 DD| TABLET_EXECUTOR: Leader{1:2:317} commited cookie 3 for step 316 00000.019 DD| TABLET_SAUSAGECACHE: Detach page collection [1:2:126:1:12288:161:0] owner [37:30:2062] 00000.019 DD| TABLET_SAUSAGECACHE: Remove page collection [1:2:126:1:12288:161:0] owner [37:30:2062] 00000.019 DD| TABLET_SAUSAGECACHE: Detach page collection [1:2:64:1:12288:161:0] owner [37:30:2062] 00000.019 DD| TABLET_SAUSAGECACHE: Remove page collection [1:2:64:1:12288:161:0] owner [37:30:2062] 00000.019 DD| TABLET_EXECUTOR: Leader{1:2:317} switch applied on followers, step 316 00000.019 TT| TABLET_SAUSAGECACHE: Touch page collection [1:2:315:1:12288:163:0] owner [37:30:2062] pages [ 2 ] 00000.019 DD| TABLET_SAUSAGECACHE: Add page collection [1:2:64:1:12288:161:0] owner [37:405:2414] 00000.019 TT| TABLET_SAUSAGECACHE: Request page collection [1:2:64:1:12288:161:0] owner [37:405:2414] cookie 0 class Scan from cache [ 0 ] 00000.019 TT| TABLET_SAUSAGECACHE: Send page collection result [1:2:64:1:12288:161:0] owner [37:405:2414] class Scan pages [ 0 ] cookie 0 00000.019 DD| TABLET_SAUSAGECACHE: Add page collection [1:2:126:1:12288:161:0] owner [37:405:2414] 00000.019 TT| TABLET_SAUSAGECACHE: Request page collection [1:2:126:1:12288:161:0] owner [37:405:2414] cookie 0 class Scan from cache [ 0 ] 00000.019 TT| TABLET_SAUSAGECACHE: Send page collection result [1:2:126:1:12288:161:0] owner [37:405:2414] class Scan pages [ 0 ] cookie 0 00000.019 DD| TABLET_SAUSAGECACHE: Add page collection [1:2:188:1:12288:161:0] owner [37:405:2414] 00000.019 TT| TABLET_SAUSAGECACHE: Request page collection [1:2:188:1:12288:161:0] owner [37:405:2414] cookie 0 class Scan from cache [ 0 ] 00000.019 TT| TABLET_SAUSAGECACHE: Send page collection result [1:2:188:1:12288:161:0] owner [37:405:2414] class Scan pages [ 0 ] cookie 0 00000.019 DD| TABLET_SAUSAGECACHE: Add page collection [1:2:250:1:12288:161:0] owner [37:405:2414] 00000.019 TT| TABLET_SAUSAGECACHE: Request page collection [1:2:250:1:12288:161:0] owner [37:405:2414] cookie 0 class Scan from cache [ 0 ] 00000.019 TT| TABLET_SAUSAGECACHE: Send page collection result [1:2:250:1:12288:161:0] owner [37:405:2414] class Scan pages [ 0 ] cookie 0 00000.019 DD| TABLET_SAUSAGECACHE: Add page collection [1:2:261:1:12288:161:0] owner [37:405:2414] 00000.019 TT| TABLET_SAUSAGECACHE: Request page collection [1:2:261:1:12288:161:0] owner [37:405:2414] cookie 0 class Scan from cache [ 0 ] 00000.019 TT| TABLET_SAUSAGECACHE: Send page collection result [1:2:261:1:12288:161:0] owner [37:405:2414] class Scan pages [ 0 ] cookie 0 00000.019 DD| TABLET_SAUSAGECACHE: Add page collection [1:2:273:1:12288:161:0] owner [37:405:2414] 00000.019 TT| TABLET_SAUSAGECACHE: Request page collection [1:2:273:1:12288:161:0] owner [37:405:2414] cookie 0 class Scan from cache [ 0 ] 00000.019 TT| TABLET_SAUSAGECACHE: Send page collection result [1:2:273:1:12288:161:0] owner [37:405:2414] class Scan pages [ 0 ] cookie 0 00000.019 DD| TABLET_SAUSAGECACHE: Add page collection [1:2:285:1:12288:161:0] owner [37:405:2414] 00000.019 TT| TABLET_SAUSAGECACHE: Request page collection [1:2:285:1:12288:161:0] owner [37:405:2414] cookie 0 class Scan from cache [ 0 ] 00000.019 TT| TABLET_SAUSAGECACHE: Send page collection result [1:2:285:1:12288:161:0] owner [37:405:2414] class Scan pages [ 0 ] cookie 0 00000.019 DD| TABLET_SAUSAGECACHE: Add page collection [1:2:297:1:12288:161:0] owner [37:405:2414] 00000.019 TT| TABLET_SAUSAGECACHE: Request page collection [1:2:297:1:12288:161:0] owner [37:405:2414] cookie 0 class Scan from cache [ 0 ] 00000.019 TT| TABLET_SAUSAGECACHE: Send page collection result [1:2:297:1:12288:161:0] owner [37:405:2414] class Scan pages [ 0 ] cookie 0 00000.019 DD| TABLET_SAUSAGECACHE: Unregister owner [37:405:2414] 00000.019 DD| TABLET_SAUSAGECACHE: Remove page collection [1:2:285:1:12288:161:0] owner [37:405:2414] 00000.020 DD| TABLET_SAUSAGECACHE: Remove page collection [1:2:261:1:12288:161:0] owner [37:405:2414] 00000.020 DD| TABLET_SAUSAGECACHE: Remove page collection [1:2:126:1:12288:161:0] owner [37:405:2414] 00000.020 DD| TABLET_SAUSAGECACHE: Remove page collection [1:2:64:1:12288:161:0] owner [37:405:2414] 00000.020 DD| TABLET_SAUSAGECACHE: Remove page collection [1:2:250:1:12288:161:0] owner [37:405:2414] 00000.020 DD| TABLET_SAUSAGECACHE: Remove page collection [1:2:297:1:12288:161:0] owner [37:405:2414] 00000.020 DD| TABLET_SAUSAGECACHE: Remove page collection [1:2:188:1:12288:161:0] owner [37:405:2414] 00000.020 DD| TABLET_SAUSAGECACHE: Remove page collection [1:2:273:1:12288:161:0] owner [37:405:2414] 00000.020 DD| TABLET_SAUSAGECACHE: Remove owner [37:405:2414] 00000.020 DD| RESOURCE_BROKER: Finish task Scan{58 on 101}::1 (29 by [37:30:2062]) (release resources {1, 0}) 00000.020 DD| RESOURCE_BROKER: Updated planned resource usage for queue queue_scan from 11.718750 to 0.000000 (remove task Scan{58 on 101}::1 (29 by [37:30:2062])) 00000.020 II| TABLET_EXECUTOR: Leader{1:2:317} suiciding, Waste{2:0, 7661b +(30, 11928b), 16 trc, -42337b acc} 00000.020 DD| TABLET_SAUSAGECACHE: Unregister owner [37:30:2062] 00000.020 DD| TABLET_SAUSAGECACHE: Remove page collection [1:2:315:1:12288:163:0] owner [37:30:2062] 00000.020 DD| TABLET_SAUSAGECACHE: Remove owner [37:30:2062] 00000.020 II| FAKE_ENV: Model starts hard shutdown on level 7 of 8, left 2 actors 00000.020 NN| TABLET_SAUSAGECACHE: Poison cache serviced 38 reqs hit {38 21480b} miss {0 0b} 00000.020 II| FAKE_ENV: Shut order, stopping 4 BS groups 00000.020 II| FAKE_ENV: DS.0 gone, left {1961b, 17}, put {31662b, 317} 00000.020 II| FAKE_ENV: DS.1 gone, left {23848b, 37}, put {57238b, 346} 00000.020 II| FAKE_ENV: DS.2 gone, left {0b, 0}, put {0b, 0} 00000.020 II| FAKE_ENV: DS.3 gone, left {0b, 0}, put {0b, 0} 00000.020 II| FAKE_ENV: All BS storage groups are stopped 00000.020 II| FAKE_ENV: Model stopped, hosted 3 actors, spent 0.000s 00000.020 II| FAKE_ENV: Logged {Emerg 0 Alert 0 Crit 0 Error 0 Left 2287}, stopped 00000.000 II| FAKE_ENV: Born at 2025-05-29T15:28:25.037232Z 00000.001 NN| TABLET_SAUSAGECACHE: Bootstrap with config MemoryLimit: 8388608 ScanQueueInFlyLimit: 262144 AsyncQueueInFlyLimit: 262144 00000.001 II| FAKE_ENV: Starting storage for BS group 0 00000.001 II| FAKE_ENV: Starting storage for BS group 1 00000.001 II| FAKE_ENV: Starting storage for BS group 2 00000.001 II| FAKE_ENV: Starting storage for BS group 3 00000.010 II| FAKE_ENV: Model starts hard shutdown on level 7 of 8, left 2 actors 00000.010 NN| TABLET_SAUSAGECACHE: Poison cache serviced 0 reqs hit {0 0b} miss {0 0b} 00000.010 II| FAKE_ENV: Shut order, stopping 4 BS groups 00000.010 II| FAKE_ENV: DS.0 gone, left {536b, 6}, put {556b, 7} 00000.010 II| FAKE_ENV: DS.1 gone, left {30495b, 8}, put {30495b, 8} 00000.010 II| FAKE_ENV: DS.2 gone, left {0b, 0}, put {0b, 0} 00000.010 II| FAKE_ENV: DS.3 gone, left {0b, 0}, put {0b, 0} 00000.010 II| FAKE_ENV: All BS storage groups are stopped 00000.010 II| FAKE_ENV: Model stopped, hosted 3 actors, spent 0.000s 00000.010 II| FAKE_ENV: Logged {Emerg 0 Alert 0 Crit 0 Error 0 Left 15}, stopped 00000.000 II| FAKE_ENV: Born at 2025-05-29T15:28:25.048688Z 00000.001 NN| TABLET_SAUSAGECACHE: Bootstrap with config MemoryLimit: 8388608 ScanQueueInFlyLimit: 262144 AsyncQueueInFlyLimit: 262144 00000.001 II| FAKE_ENV: Starting storage for BS group 0 00000.001 II| FAKE_ENV: Starting storage for BS group 1 00000.001 II| FAKE_ENV: Starting storage for BS group 2 00000.001 II| FAKE_ENV: Starting storage for BS group 3 00000.060 CC| TABLET_EXECUTOR: Tablet 1 unhandled exception std::runtime_error: test ??+0 (0xCD01D32) ??+0 (0xCD01CD7) NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutor_Exceptions::TTxExecuteThrowException::Execute(NKikimr::NTabletFlatExecutor::TTransactionContext&, NActors::TActorContext const&)+57 (0xC83C179) NKikimr::NTabletFlatExecutor::TExecutor::ExecuteTransaction(NKikimr::NTabletFlatExecutor::TSeat*)+1312 (0xF2D4750) NKikimr::NTabletFlatExecutor::TExecutor::DoExecute(TAutoPtr, NKikimr::NTabletFlatExecutor::TExecutor::ETxMode)+3356 (0xF2D2BFC) non-virtual thunk to NKikimr::NTabletFlatExecutor::TExecutor::Execute(TAutoPtr, NActors::TActorContext const&)+35 (0xF2D5C73) ??+0 (0xC83C090) NKikimr::NFake::TDummy::Inbox(TAutoPtr&)+480 (0xC7EDC90) NActors::IActor::Receive(TAutoPtr&)+85 (0xD3F8D45) 00000.061 II| FAKE_ENV: Model starts hard shutdown on level 7 of 8, left 2 actors 00000.061 NN| TABLET_SAUSAGECACHE: Poison cache serviced 0 reqs hit {0 0b} miss {0 0b} 00000.061 II| FAKE_ENV: Shut order, stopping 4 BS groups 00000.061 II| FAKE_ENV: DS.1 gone, left {35b, 1}, put {35b, 1} 00000.061 II| FAKE_ENV: DS.2 gone, left {0b, 0}, put {0b, 0} 00000.061 II| FAKE_ENV: DS.3 gone, left {0b, 0}, put {0b, 0} 00000.061 II| FAKE_ENV: DS.0 gone, left {42b, 1}, put {62b, 2} 00000.061 II| FAKE_ENV: All BS storage groups are stopped 00000.061 II| FAKE_ENV: Model stopped, hosted 3 actors, spent 0.000s 00000.061 II| FAKE_ENV: Logged {Emerg 0 Alert 0 Crit 1 Error 0 Left 15}, stopped ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/persqueue_v1/ut/describes_ut/unittest >> TTopicApiDescribes::GetPartitionDescribe [FAIL] Test command err: 2025-05-29T15:28:21.253521Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509889909152038204:2077];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:28:21.253729Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-05-29T15:28:21.257087Z node 2 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7509889910580587655:2073];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:28:21.257171Z node 2 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/000df1/r3tmp/tmpLQfeut/pdisk_1.dat 2025-05-29T15:28:21.281105Z node 2 :PQ_READ_PROXY DEBUG: caching_service.cpp:44: Direct read cache: : Created 2025-05-29T15:28:21.282276Z node 1 :PQ_READ_PROXY DEBUG: caching_service.cpp:44: Direct read cache: : Created 2025-05-29T15:28:21.310976Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 2324, node 1 2025-05-29T15:28:21.323280Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/ciyv/000df1/r3tmp/yandexzBbJLn.tmp 2025-05-29T15:28:21.323290Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: /home/runner/.ya/build/build_root/ciyv/000df1/r3tmp/yandexzBbJLn.tmp 2025-05-29T15:28:21.330613Z INFO: TTestServer started on Port 13872 GrpcPort 2324 2025-05-29T15:28:21.332186Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:202: successfully initialized from file: /home/runner/.ya/build/build_root/ciyv/000df1/r3tmp/yandexzBbJLn.tmp 2025-05-29T15:28:21.332243Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:13872 PQClient connected to localhost:2324 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: 2025-05-29T15:28:21.353608Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:28:21.353629Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:28:21.355266Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-29T15:28:21.382066Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:28:21.382113Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:28:21.383061Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976720657:0, at schemeshard: 72057594046644480 2025-05-29T15:28:21.383651Z node 1 :HIVE WARN: hive_impl.cpp:771: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-05-29T15:28:21.383889Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected waiting... waiting... 2025-05-29T15:28:21.397294Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720659:0, at schemeshard: 72057594046644480 waiting... waiting... waiting... 2025-05-29T15:28:21.582767Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509889909152039192:2336], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:28:21.582770Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509889909152039206:2339], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:28:21.582791Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:28:21.583493Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976720662:3, at schemeshard: 72057594046644480 2025-05-29T15:28:21.583702Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509889909152039241:2342], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:28:21.583717Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:28:21.587429Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7509889909152039212:2340], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976720662 completed, doublechecking } 2025-05-29T15:28:21.611229Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720663:0, at schemeshard: 72057594046644480 2025-05-29T15:28:21.611396Z node 2 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [2:7509889910580588038:2315], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-05-29T15:28:21.611492Z node 2 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=2&id=ZmE3MGM0NzUtYzI4NTk1Y2QtMzc5MDU5N2UtMTJjM2ViOTM=, ActorId: [2:7509889910580587999:2309], ActorState: ExecuteState, TraceId: 01jweah72s5raeecm9szw1s5ng, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-05-29T15:28:21.612027Z node 2 :PERSQUEUE_CLUSTER_TRACKER ERROR: cluster_tracker.cpp:167: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-05-29T15:28:21.669503Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720664:0, at schemeshard: 72057594046644480 2025-05-29T15:28:21.671539Z node 1 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [1:7509889909152039487:2877] txid# 281474976720665, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 9], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-29T15:28:21.674955Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [1:7509889909152039516:2358], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:5:17: Error: At function: KiReadTable!
:5:17: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Versions]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-05-29T15:28:21.675045Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=1&id=NDNhMzQ1ZTgtYWM5ZGEwNi03ZmIxYTVhNS1iOTUxOTlm, ActorId: [1:7509889909152039180:2334], ActorState: ExecuteState, TraceId: 01jweah72ca69ettx3etbeqrxa, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-05-29T15:28:21.675205Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: cluster_tracker.cpp:167: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 5 column: 17 } message: "At function: KiReadTable!" end_position { row: 5 column: 17 } severity: 1 issues { position { row: 5 column: 17 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Versions]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 5 column: 17 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-05-29T15:28:21.733468Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720666:0, at schemeshard: 72057594046644480 === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost", true, true, 1000), ("dc2", "dc2.logbroker.yandex.net", false, true, 1000); 2025-05-29T15:28:21.764414Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [1:7509889909152039714:2382], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:28:21.764548Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=1&id=NmNmMzA4MDAtNjZhNTcxMzEtNGJhZDk2NGItMzYzNTBjYjk=, ActorId: [1:7509889909152039711:2380], ActorState: ExecuteState, TraceId: 01jweah77kdyd60bc827j4mjdv, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: assertion failed at ydb/core/testlib/test_pq_client.h:537, TMaybe NKikimr::NPersQueueTests::TFlatMsgBusPQClient::RunYqlDataQueryWithParams(TString, const NYdb::TParams &): (result.IsSuccess())
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 TBackTrace::Capture()+28 (0x1390E95C) NUnitTest::NPrivate::RaiseError(char const*, TBasicString> const&, bool)+137 (0x13AC1CE9) NKikimr::NPersQueueTests::TFlatMsgBusPQClient::RunYqlDataQueryWithParams(TBasicString>, NYdb::Dev::TParams const&)+932 (0x137F8E34) NKikimr::NPersQueueTests::TFlatMsgBusPQClient::RunYqlDataQuery(TBasicString>)+88 (0x137F7CC8) NKikimr::NPersQueueTests::TFlatMsgBusPQClient::InitDCs(THashMap>, NKikimr::NPersQueueTests::TPQTestClusterInfo, THash>>, TEqualTo>>, std::__y1::allocator>>>, TBasicString> const&)+1170 (0x137F6F12) NKikimr::NPersQueueTests::TFlatMsgBusPQClient::FullInit(THashMap>, NKikimr::NPersQueueTests::TPQTestClusterInfo, THash>>, TEqualTo>>, std::__y1::allocator>>>, TBasicString> const&, TBasicString> const&)+327 (0x137F4E47) NPersQueue::TTestServer::StartServer(bool, TMaybe>, NMaybe::TPolicyUndefinedExcept>)+888 (0x137F3A78) NPersQueue::TTestServer::TTestServer(NKikimr::Tests::TServerSettings const&, bool, TVector> const&, NActors::NLog::EPriority, TMaybe, TDelete>, NMaybe::TPolicyUndefinedExcept>)+1563 (0x137EC50B) NPersQueue::TTestServer::TTestServer(bool)+134 (0x137E6B16) NKikimr::NPersQueueTests::TDescribeTestServer::TDescribeTestServer(unsigned int)+36 (0x137FE364) NKikimr::NPersQueueTests::NTestSuiteTTopicApiDescribes::TTestCaseGetPartitionDescribe::Execute_(NUnitTest::TTestContext&)+31 (0x1380031F) NKikimr::NPersQueueTests::NTestSuiteTTopicApiDescribes::TCurrentTest::Execute()::'lambda'()::operator()() const+71 (0x13809B17) NUnitTest::TTestBase::Run(std::__y1::function, TBasicString> const&, char const*, bool)+126 (0x13AC3B9E) NKikimr::NPersQueueTests::NTestSuiteTTopicApiDescribes::TCurrentTest::Execute()+425 (0x138094D9) NUnitTest::TTestFactory::Execute()+803 (0x13AC4313) NUnitTest::RunMain(int, char**)+3021 (0x13AD5EBD) ??+0 (0x7F22BEDA7D90) __libc_start_main+128 (0x7F22BEDA7E40) _start+41 (0x12944029) >> TConsistentOpsWithReboots::DropIndexedTableWithReboots |70.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_reboots/unittest ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/persqueue_v1/ut/describes_ut/unittest >> TTopicApiDescribes::DescribeConsumer [FAIL] Test command err: 2025-05-29T15:28:21.480370Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509889912002110012:2075];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:28:21.480389Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-05-29T15:28:21.483628Z node 2 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7509889909777919430:2073];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:28:21.483644Z node 2 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/000dea/r3tmp/tmpY0PnBj/pdisk_1.dat 2025-05-29T15:28:21.521624Z node 1 :PQ_READ_PROXY DEBUG: caching_service.cpp:44: Direct read cache: : Created 2025-05-29T15:28:21.529766Z node 2 :PQ_READ_PROXY DEBUG: caching_service.cpp:44: Direct read cache: : Created 2025-05-29T15:28:21.546947Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 15032, node 1 2025-05-29T15:28:21.558803Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/ciyv/000dea/r3tmp/yandexxESOwE.tmp 2025-05-29T15:28:21.558814Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: /home/runner/.ya/build/build_root/ciyv/000dea/r3tmp/yandexxESOwE.tmp 2025-05-29T15:28:21.558891Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:202: successfully initialized from file: /home/runner/.ya/build/build_root/ciyv/000dea/r3tmp/yandexxESOwE.tmp 2025-05-29T15:28:21.558936Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-05-29T15:28:21.563582Z INFO: TTestServer started on Port 19721 GrpcPort 15032 TClient is connected to server localhost:19721 PQClient connected to localhost:15032 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: 2025-05-29T15:28:21.581145Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:28:21.581184Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:28:21.582905Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-29T15:28:21.619327Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:28:21.619351Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:28:21.620578Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976720657:0, at schemeshard: 72057594046644480 2025-05-29T15:28:21.620778Z node 1 :HIVE WARN: hive_impl.cpp:771: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-05-29T15:28:21.621030Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected waiting... waiting... 2025-05-29T15:28:21.633774Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720659:0, at schemeshard: 72057594046644480 waiting... waiting... waiting... 2025-05-29T15:28:21.812651Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7509889909777919789:2310], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:28:21.812678Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:28:21.812692Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7509889909777919800:2313], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:28:21.813949Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710657:3, at schemeshard: 72057594046644480 2025-05-29T15:28:21.816653Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509889912002111015:2336], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:28:21.816740Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:28:21.818595Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7509889909777919804:2314], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710657 completed, doublechecking } 2025-05-29T15:28:21.846857Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [1:7509889912002111060:2341], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-05-29T15:28:21.846936Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=1&id=NzZlMjFmZWItMzMwMjUyZDktY2IxYzk3NWEtNTY1NmY3OTI=, ActorId: [1:7509889912002110996:2334], ActorState: ExecuteState, TraceId: 01jweah79p6xjvk8ns4n3fkr83, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-05-29T15:28:21.847050Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720662:0, at schemeshard: 72057594046644480 2025-05-29T15:28:21.847297Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: cluster_tracker.cpp:167: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-05-29T15:28:21.890216Z node 2 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [2:7509889909777919832:2165] txid# 281474976710658, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 9], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-29T15:28:21.894043Z node 2 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [2:7509889909777919846:2318], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-05-29T15:28:21.894115Z node 2 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=2&id=ZDY3OThkNzItZjUzNTYyZTItY2YzMDFhMWItYmI3YzI5NQ==, ActorId: [2:7509889909777919773:2309], ActorState: ExecuteState, TraceId: 01jweah79mate9xqp1tysqxcad, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-05-29T15:28:21.894257Z node 2 :PERSQUEUE_CLUSTER_TRACKER ERROR: cluster_tracker.cpp:167: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-05-29T15:28:21.906408Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720663:0, at schemeshard: 72057594046644480 2025-05-29T15:28:21.973620Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720664:0, at schemeshard: 72057594046644480 === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost", true, true, 1000), ("dc2", "dc2.logbroker.yandex.net", false, true, 1000); 2025-05-29T15:28:22.001175Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [1:7509889912002111480:2377], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:28:22.001273Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=1&id=ZTJiZjI5ODAtNmNjYmUxY2MtNjNlNTZjZjMtOTFjYTc3ZWU=, ActorId: [1:7509889912002111477:2375], ActorState: ExecuteState, TraceId: 01jweah7f57nsxnp4s6d78yrjp, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: assertion failed at ydb/core/testlib/test_pq_client.h:537, TMaybe NKikimr::NPersQueueTests::TFlatMsgBusPQClient::RunYqlDataQueryWithParams(TString, const NYdb::TParams &): (result.IsSuccess())
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 TBackTrace::Capture()+28 (0x1390E95C) NUnitTest::NPrivate::RaiseError(char const*, TBasicString> const&, bool)+137 (0x13AC1CE9) NKikimr::NPersQueueTests::TFlatMsgBusPQClient::RunYqlDataQueryWithParams(TBasicString>, NYdb::Dev::TParams const&)+932 (0x137F8E34) NKikimr::NPersQueueTests::TFlatMsgBusPQClient::RunYqlDataQuery(TBasicString>)+88 (0x137F7CC8) NKikimr::NPersQueueTests::TFlatMsgBusPQClient::InitDCs(THashMap>, NKikimr::NPersQueueTests::TPQTestClusterInfo, THash>>, TEqualTo>>, std::__y1::allocator>>>, TBasicString> const&)+1170 (0x137F6F12) NKikimr::NPersQueueTests::TFlatMsgBusPQClient::FullInit(THashMap>, NKikimr::NPersQueueTests::TPQTestClusterInfo, THash>>, TEqualTo>>, std::__y1::allocator>>>, TBasicString> const&, TBasicString> const&)+327 (0x137F4E47) NPersQueue::TTestServer::StartServer(bool, TMaybe>, NMaybe::TPolicyUndefinedExcept>)+888 (0x137F3A78) NPersQueue::TTestServer::TTestServer(NKikimr::Tests::TServerSettings const&, bool, TVector> const&, NActors::NLog::EPriority, TMaybe, TDelete>, NMaybe::TPolicyUndefinedExcept>)+1563 (0x137EC50B) NPersQueue::TTestServer::TTestServer(bool)+134 (0x137E6B16) NKikimr::NPersQueueTests::TDescribeTestServer::TDescribeTestServer(unsigned int)+36 (0x137FE364) NKikimr::NPersQueueTests::NTestSuiteTTopicApiDescribes::TTestCaseDescribeConsumer::Execute_(NUnitTest::TTestContext&)+31 (0x13804B6F) NKikimr::NPersQueueTests::NTestSuiteTTopicApiDescribes::TCurrentTest::Execute()::'lambda'()::operator()() const+71 (0x13809B17) NUnitTest::TTestBase::Run(std::__y1::function, TBasicString> const&, char const*, bool)+126 (0x13AC3B9E) NKikimr::NPersQueueTests::NTestSuiteTTopicApiDescribes::TCurrentTest::Execute()+425 (0x138094D9) NUnitTest::TTestFactory::Execute()+803 (0x13AC4313) NUnitTest::RunMain(int, char**)+3021 (0x13AD5EBD) ??+0 (0x7F5534365D90) __libc_start_main+128 (0x7F5534365E40) _start+41 (0x12944029) |70.8%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/datashard/ut_minstep/ydb-core-tx-datashard-ut_minstep |70.8%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_minstep/ydb-core-tx-datashard-ut_minstep |70.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_minstep/ydb-core-tx-datashard-ut_minstep >> IntermediateDirsReboots::CreateKesusWithIntermediateDirs ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/persqueue_v1/ut/describes_ut/unittest >> TIcNodeCache::GetNodesInfoTest [FAIL] Test command err: 2025-05-29T15:28:22.056580Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509889915034664027:2074];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:28:22.056600Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-05-29T15:28:22.060100Z node 2 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7509889915449402495:2266];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:28:22.090999Z node 1 :PQ_READ_PROXY DEBUG: caching_service.cpp:44: Direct read cache: : Created test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/000dae/r3tmp/tmpZGQ4kx/pdisk_1.dat 2025-05-29T15:28:22.103103Z node 2 :PQ_READ_PROXY DEBUG: caching_service.cpp:44: Direct read cache: : Created 2025-05-29T15:28:22.103138Z node 2 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-05-29T15:28:22.123746Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 13501, node 1 2025-05-29T15:28:22.134280Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/ciyv/000dae/r3tmp/yandexNwOq82.tmp 2025-05-29T15:28:22.134295Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: /home/runner/.ya/build/build_root/ciyv/000dae/r3tmp/yandexNwOq82.tmp 2025-05-29T15:28:22.134369Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:202: successfully initialized from file: /home/runner/.ya/build/build_root/ciyv/000dae/r3tmp/yandexNwOq82.tmp 2025-05-29T15:28:22.134418Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-05-29T15:28:22.138724Z INFO: TTestServer started on Port 6739 GrpcPort 13501 TClient is connected to server localhost:6739 PQClient connected to localhost:13501 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: 2025-05-29T15:28:22.156982Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:28:22.157016Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:28:22.158735Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-29T15:28:22.161646Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-05-29T15:28:22.173593Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:28:22.194128Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:28:22.194154Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:28:22.195791Z node 1 :HIVE WARN: hive_impl.cpp:771: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-05-29T15:28:22.196086Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected waiting... waiting... 2025-05-29T15:28:22.466608Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509889915034665101:2343], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:28:22.466636Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509889915034665075:2340], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:28:22.466657Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:28:22.467454Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710662:3, at schemeshard: 72057594046644480 2025-05-29T15:28:22.467657Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509889915034665132:2346], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:28:22.467679Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:28:22.472058Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7509889915034665104:2344], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710662 completed, doublechecking } 2025-05-29T15:28:22.514146Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-05-29T15:28:22.542623Z node 2 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [2:7509889915449402650:2313], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-05-29T15:28:22.542730Z node 2 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=2&id=NzY5ZDg1N2MtMzRhMjViMGItY2E2ZDJlMDMtNmY4NTEyNjM=, ActorId: [2:7509889915449402590:2306], ActorState: ExecuteState, TraceId: 01jweah7zy5a9h743na5y1jc7e, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-05-29T15:28:22.543315Z node 2 :PERSQUEUE_CLUSTER_TRACKER ERROR: cluster_tracker.cpp:167: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-05-29T15:28:22.560230Z node 1 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [1:7509889915034665246:2811] txid# 281474976710664, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 9], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-29T15:28:22.563971Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [1:7509889915034665264:2355], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-05-29T15:28:22.564042Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=1&id=MjA3MGU4MDAtMzMxZDY5M2MtMTczNGIzNTctNmM5NWUxN2Q=, ActorId: [1:7509889915034665072:2338], ActorState: ExecuteState, TraceId: 01jweah7y21sfcmwce84rwx637, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-05-29T15:28:22.564173Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: cluster_tracker.cpp:167: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-05-29T15:28:22.581778Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-05-29T15:28:22.652492Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost", true, true, 1000), ("dc2", "dc2.logbroker.yandex.net", false, true, 1000); 2025-05-29T15:28:22.687762Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [1:7509889915034665532:2381], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:28:22.687886Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=1&id=MWQzMWQ0Mi0yODU4Njk4ZS0xODY0OGE0Ni02OTJmN2EwZg==, ActorId: [1:7509889915034665529:2379], ActorState: ExecuteState, TraceId: 01jweah84d3ebj1zzr55cemgx1, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: assertion failed at ydb/core/testlib/test_pq_client.h:537, TMaybe NKikimr::NPersQueueTests::TFlatMsgBusPQClient::RunYqlDataQueryWithParams(TString, const NYdb::TParams &): (result.IsSuccess())
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 TBackTrace::Capture()+28 (0x1390E95C) NUnitTest::NPrivate::RaiseError(char const*, TBasicString> const&, bool)+137 (0x13AC1CE9) NKikimr::NPersQueueTests::TFlatMsgBusPQClient::RunYqlDataQueryWithParams(TBasicString>, NYdb::Dev::TParams const&)+932 (0x137F8E34) NKikimr::NPersQueueTests::TFlatMsgBusPQClient::RunYqlDataQuery(TBasicString>)+88 (0x137F7CC8) NKikimr::NPersQueueTests::TFlatMsgBusPQClient::InitDCs(THashMap>, NKikimr::NPersQueueTests::TPQTestClusterInfo, THash>>, TEqualTo>>, std::__y1::allocator>>>, TBasicString> const&)+1170 (0x137F6F12) NKikimr::NPersQueueTests::TFlatMsgBusPQClient::FullInit(THashMap>, NKikimr::NPersQueueTests::TPQTestClusterInfo, THash>>, TEqualTo>>, std::__y1::allocator>>>, TBasicString> const&, TBasicString> const&)+327 (0x137F4E47) NPersQueue::TTestServer::StartServer(bool, TMaybe>, NMaybe::TPolicyUndefinedExcept>)+888 (0x137F3A78) NPersQueue::TTestServer::TTestServer(NKikimr::Tests::TServerSettings const&, bool, TVector> const&, NActors::NLog::EPriority, TMaybe, TDelete>, NMaybe::TPolicyUndefinedExcept>)+1563 (0x137EC50B) NPersQueue::TTestServer::TTestServer(bool)+134 (0x137E6B16) NKikimr::NPersQueueTests::NTestSuiteTIcNodeCache::TTestCaseGetNodesInfoTest::Execute_(NUnitTest::TTestContext&)+31 (0x137E683F) NKikimr::NPersQueueTests::NTestSuiteTIcNodeCache::TCurrentTest::Execute()::'lambda'()::operator()() const+71 (0x137EAC97) NUnitTest::TTestBase::Run(std::__y1::function, TBasicString> const&, char const*, bool)+126 (0x13AC3B9E) NKikimr::NPersQueueTests::NTestSuiteTIcNodeCache::TCurrentTest::Execute()+429 (0x137EA4FD) NUnitTest::TTestFactory::Execute()+803 (0x13AC4313) NUnitTest::RunMain(int, char**)+3021 (0x13AD5EBD) ??+0 (0x7FA3921F4D90) __libc_start_main+128 (0x7FA3921F4E40) _start+41 (0x12944029) |70.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_reboots/unittest |70.8%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/schemeshard/ut_view/ydb-core-tx-schemeshard-ut_view |70.9%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_view/ydb-core-tx-schemeshard-ut_view |70.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_view/ydb-core-tx-schemeshard-ut_view ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/persqueue_v1/ut/describes_ut/unittest >> TTopicApiDescribes::DescribeTopic [FAIL] Test command err: 2025-05-29T15:28:22.618107Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509889914279599617:2074];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:28:22.618137Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-05-29T15:28:22.625191Z node 2 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7509889914705719369:2157];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/000d99/r3tmp/tmpd5DmSE/pdisk_1.dat 2025-05-29T15:28:22.656376Z node 2 :PQ_READ_PROXY DEBUG: caching_service.cpp:44: Direct read cache: : Created 2025-05-29T15:28:22.658609Z node 2 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-05-29T15:28:22.659251Z node 1 :PQ_READ_PROXY DEBUG: caching_service.cpp:44: Direct read cache: : Created 2025-05-29T15:28:22.685927Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 20948, node 1 2025-05-29T15:28:22.703039Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/ciyv/000d99/r3tmp/yandexCB8hGu.tmp 2025-05-29T15:28:22.703059Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: /home/runner/.ya/build/build_root/ciyv/000d99/r3tmp/yandexCB8hGu.tmp 2025-05-29T15:28:22.703142Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:202: successfully initialized from file: /home/runner/.ya/build/build_root/ciyv/000d99/r3tmp/yandexCB8hGu.tmp 2025-05-29T15:28:22.703173Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-05-29T15:28:22.706924Z INFO: TTestServer started on Port 1617 GrpcPort 20948 2025-05-29T15:28:22.718829Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:28:22.718883Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting TClient is connected to server localhost:1617 2025-05-29T15:28:22.723098Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected PQClient connected to localhost:20948 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-29T15:28:22.752323Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976720657:0, at schemeshard: 72057594046644480 2025-05-29T15:28:22.753740Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:28:22.753772Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting waiting... 2025-05-29T15:28:22.754807Z node 1 :HIVE WARN: hive_impl.cpp:771: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-05-29T15:28:22.755068Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected waiting... 2025-05-29T15:28:22.768814Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720659:0, at schemeshard: 72057594046644480 waiting... waiting... waiting... 2025-05-29T15:28:23.015309Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509889918574567924:2339], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:28:23.015336Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509889918574567904:2336], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:28:23.015359Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:28:23.016321Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976720662:3, at schemeshard: 72057594046644480 2025-05-29T15:28:23.016519Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509889918574567956:2342], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:28:23.016553Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:28:23.021690Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7509889918574567933:2340], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976720662 completed, doublechecking } 2025-05-29T15:28:23.051144Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720663:0, at schemeshard: 72057594046644480 2025-05-29T15:28:23.070834Z node 2 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [2:7509889919000686954:2315], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-05-29T15:28:23.070947Z node 2 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=2&id=OGFhMTI0YmUtZjA1ZThkNTUtMTJjNzUxZDktZDBmYjEwYTA=, ActorId: [2:7509889919000686915:2309], ActorState: ExecuteState, TraceId: 01jweah8ge1s1n68780grnz6yp, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-05-29T15:28:23.071448Z node 2 :PERSQUEUE_CLUSTER_TRACKER ERROR: cluster_tracker.cpp:167: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-05-29T15:28:23.100449Z node 1 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [1:7509889918574568113:2801] txid# 281474976720664, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 9], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-29T15:28:23.104617Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [1:7509889918574568132:2354], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-05-29T15:28:23.104693Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=1&id=OGFjMGE1NjUtMTczMGRjZTktMjRlN2Y1MTAtMzllZmFkNDE=, ActorId: [1:7509889918574567901:2334], ActorState: ExecuteState, TraceId: 01jweah8f63f0q944dqh4ef366, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-05-29T15:28:23.105111Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: cluster_tracker.cpp:167: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-05-29T15:28:23.110307Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720665:0, at schemeshard: 72057594046644480 2025-05-29T15:28:23.133507Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720666:0, at schemeshard: 72057594046644480 === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost", true, true, 1000), ("dc2", "dc2.logbroker.yandex.net", false, true, 1000); 2025-05-29T15:28:23.166668Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [1:7509889918574568410:2381], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:28:23.166784Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=1&id=NzI1MjljZTUtMzJkMjExMGYtNTAxMTdmZjAtNzUyOTNhYmY=, ActorId: [1:7509889918574568407:2379], ActorState: ExecuteState, TraceId: 01jweah8kf0k49b1tqxzr1pdsk, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: assertion failed at ydb/core/testlib/test_pq_client.h:537, TMaybe NKikimr::NPersQueueTests::TFlatMsgBusPQClient::RunYqlDataQueryWithParams(TString, const NYdb::TParams &): (result.IsSuccess())
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 TBackTrace::Capture()+28 (0x1390E95C) NUnitTest::NPrivate::RaiseError(char const*, TBasicString> const&, bool)+137 (0x13AC1CE9) NKikimr::NPersQueueTests::TFlatMsgBusPQClient::RunYqlDataQueryWithParams(TBasicString>, NYdb::Dev::TParams const&)+932 (0x137F8E34) NKikimr::NPersQueueTests::TFlatMsgBusPQClient::RunYqlDataQuery(TBasicString>)+88 (0x137F7CC8) NKikimr::NPersQueueTests::TFlatMsgBusPQClient::InitDCs(THashMap>, NKikimr::NPersQueueTests::TPQTestClusterInfo, THash>>, TEqualTo>>, std::__y1::allocator>>>, TBasicString> const&)+1170 (0x137F6F12) NKikimr::NPersQueueTests::TFlatMsgBusPQClient::FullInit(THashMap>, NKikimr::NPersQueueTests::TPQTestClusterInfo, THash>>, TEqualTo>>, std::__y1::allocator>>>, TBasicString> const&, TBasicString> const&)+327 (0x137F4E47) NPersQueue::TTestServer::StartServer(bool, TMaybe>, NMaybe::TPolicyUndefinedExcept>)+888 (0x137F3A78) NPersQueue::TTestServer::TTestServer(NKikimr::Tests::TServerSettings const&, bool, TVector> const&, NActors::NLog::EPriority, TMaybe, TDelete>, NMaybe::TPolicyUndefinedExcept>)+1563 (0x137EC50B) NPersQueue::TTestServer::TTestServer(bool)+134 (0x137E6B16) NKikimr::NPersQueueTests::TDescribeTestServer::TDescribeTestServer(unsigned int)+36 (0x137FE364) NKikimr::NPersQueueTests::NTestSuiteTTopicApiDescribes::TTestCaseDescribeTopic::Execute_(NUnitTest::TTestContext&)+29 (0x13802C4D) NKikimr::NPersQueueTests::NTestSuiteTTopicApiDescribes::TCurrentTest::Execute()::'lambda'()::operator()() const+71 (0x13809B17) NUnitTest::TTestBase::Run(std::__y1::function, TBasicString> const&, char const*, bool)+126 (0x13AC3B9E) NKikimr::NPersQueueTests::NTestSuiteTTopicApiDescribes::TCurrentTest::Execute()+425 (0x138094D9) NUnitTest::TTestFactory::Execute()+803 (0x13AC4313) NUnitTest::RunMain(int, char**)+3021 (0x13AD5EBD) ??+0 (0x7FEE00A56D90) __libc_start_main+128 (0x7FEE00A56E40) _start+41 (0x12944029) |70.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_reboots/unittest >> TSchemeshardBorrowedCompactionTest::SchemeshardShouldCompactBorrowedBeforeSplit [GOOD] >> TSchemeshardBorrowedCompactionTest::SchemeshardShouldCompactBorrowedAfterSplitMerge |70.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_reboots/unittest |70.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_reboots/unittest >> TPartBtreeIndexIteration::OneNode_Groups_History_Slices [GOOD] >> TPartBtreeIndexIteration::FewNodes ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/persqueue_v1/ut/describes_ut/unittest >> TTopicApiDescribes::GetLocalDescribe [FAIL] Test command err: 2025-05-29T15:28:23.235816Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509889918311066361:2075];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:28:23.236170Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-05-29T15:28:23.238272Z node 2 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7509889917756720659:2073];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:28:23.238342Z node 2 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/000d5e/r3tmp/tmpJvgV0x/pdisk_1.dat 2025-05-29T15:28:23.264726Z node 1 :PQ_READ_PROXY DEBUG: caching_service.cpp:44: Direct read cache: : Created 2025-05-29T15:28:23.270665Z node 2 :PQ_READ_PROXY DEBUG: caching_service.cpp:44: Direct read cache: : Created 2025-05-29T15:28:23.287990Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 14187, node 1 2025-05-29T15:28:23.298985Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/ciyv/000d5e/r3tmp/yandexambxYr.tmp 2025-05-29T15:28:23.299002Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: /home/runner/.ya/build/build_root/ciyv/000d5e/r3tmp/yandexambxYr.tmp 2025-05-29T15:28:23.299087Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:202: successfully initialized from file: /home/runner/.ya/build/build_root/ciyv/000d5e/r3tmp/yandexambxYr.tmp 2025-05-29T15:28:23.299158Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-05-29T15:28:23.306145Z INFO: TTestServer started on Port 21696 GrpcPort 14187 TClient is connected to server localhost:21696 PQClient connected to localhost:14187 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: 2025-05-29T15:28:23.336450Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:28:23.336482Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:28:23.338001Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-29T15:28:23.339612Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-05-29T15:28:23.348692Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:28:23.363647Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:28:23.363670Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:28:23.364637Z node 1 :HIVE WARN: hive_impl.cpp:771: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-05-29T15:28:23.364878Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected waiting... waiting... 2025-05-29T15:28:23.603102Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7509889917756720965:2309], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:28:23.603123Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7509889917756720957:2306], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:28:23.603142Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:28:23.604521Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976720657:3, at schemeshard: 72057594046644480 2025-05-29T15:28:23.609101Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7509889917756720971:2310], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976720657 completed, doublechecking } 2025-05-29T15:28:23.664742Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-05-29T15:28:23.665015Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [1:7509889918311067483:2345], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-05-29T15:28:23.665116Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=1&id=M2QwOTNhNzUtZDg1NGNhODYtNjZkODIyNWEtMjkxZjU2NWQ=, ActorId: [1:7509889918311067443:2338], ActorState: ExecuteState, TraceId: 01jweah9210vbdtdp8yrmm9pqx, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-05-29T15:28:23.665541Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: cluster_tracker.cpp:167: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-05-29T15:28:23.710885Z node 2 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [2:7509889917756721040:2152] txid# 281474976720658, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 9], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-29T15:28:23.715942Z node 2 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [2:7509889917756721055:2317], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-05-29T15:28:23.716032Z node 2 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=2&id=NjczZDJjYzEtMTg2OGJmOTgtYzBmYmUxNGUtMTY2NDdjZmQ=, ActorId: [2:7509889917756720955:2305], ActorState: ExecuteState, TraceId: 01jweah91j65b1cr5r69ekqfp7, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-05-29T15:28:23.716192Z node 2 :PERSQUEUE_CLUSTER_TRACKER ERROR: cluster_tracker.cpp:167: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-05-29T15:28:23.725312Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-05-29T15:28:23.744614Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost", true, true, 1000), ("dc2", "dc2.logbroker.yandex.net", false, true, 1000); 2025-05-29T15:28:23.774493Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [1:7509889918311067819:2376], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:28:23.774602Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=1&id=YzNlMjRmNjItZjI0M2U3ZDEtM2VlMTA4NGMtYTE4ZWZmMGI=, ActorId: [1:7509889918311067816:2374], ActorState: ExecuteState, TraceId: 01jweah96f425zgtp0qep6tex5, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: assertion failed at ydb/core/testlib/test_pq_client.h:537, TMaybe NKikimr::NPersQueueTests::TFlatMsgBusPQClient::RunYqlDataQueryWithParams(TString, const NYdb::TParams &): (result.IsSuccess())
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 TBackTrace::Capture()+28 (0x1390E95C) NUnitTest::NPrivate::RaiseError(char const*, TBasicString> const&, bool)+137 (0x13AC1CE9) NKikimr::NPersQueueTests::TFlatMsgBusPQClient::RunYqlDataQueryWithParams(TBasicString>, NYdb::Dev::TParams const&)+932 (0x137F8E34) NKikimr::NPersQueueTests::TFlatMsgBusPQClient::RunYqlDataQuery(TBasicString>)+88 (0x137F7CC8) NKikimr::NPersQueueTests::TFlatMsgBusPQClient::InitDCs(THashMap>, NKikimr::NPersQueueTests::TPQTestClusterInfo, THash>>, TEqualTo>>, std::__y1::allocator>>>, TBasicString> const&)+1170 (0x137F6F12) NKikimr::NPersQueueTests::TFlatMsgBusPQClient::FullInit(THashMap>, NKikimr::NPersQueueTests::TPQTestClusterInfo, THash>>, TEqualTo>>, std::__y1::allocator>>>, TBasicString> const&, TBasicString> const&)+327 (0x137F4E47) NPersQueue::TTestServer::StartServer(bool, TMaybe>, NMaybe::TPolicyUndefinedExcept>)+888 (0x137F3A78) NPersQueue::TTestServer::TTestServer(NKikimr::Tests::TServerSettings const&, bool, TVector> const&, NActors::NLog::EPriority, TMaybe, TDelete>, NMaybe::TPolicyUndefinedExcept>)+1563 (0x137EC50B) NPersQueue::TTestServer::TTestServer(bool)+134 (0x137E6B16) NKikimr::NPersQueueTests::TDescribeTestServer::TDescribeTestServer(unsigned int)+36 (0x137FE364) NKikimr::NPersQueueTests::NTestSuiteTTopicApiDescribes::TTestCaseGetLocalDescribe::Execute_(NUnitTest::TTestContext&)+37 (0x137FCE35) NKikimr::NPersQueueTests::NTestSuiteTTopicApiDescribes::TCurrentTest::Execute()::'lambda'()::operator()() const+71 (0x13809B17) NUnitTest::TTestBase::Run(std::__y1::function, TBasicString> const&, char const*, bool)+126 (0x13AC3B9E) NKikimr::NPersQueueTests::NTestSuiteTTopicApiDescribes::TCurrentTest::Execute()+425 (0x138094D9) NUnitTest::TTestFactory::Execute()+803 (0x13AC4313) NUnitTest::RunMain(int, char**)+3021 (0x13AD5EBD) ??+0 (0x7FE090200D90) __libc_start_main+128 (0x7FE090200E40) _start+41 (0x12944029) |70.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_reboots/unittest >> TChargeBTreeIndex::OneNode_Groups [GOOD] >> TChargeBTreeIndex::OneNode_History >> BsControllerConfig::OverlayMap >> THiveTest::TestCheckSubHiveMigrationManyTablets [GOOD] >> THiveTest::TestCheckSubHiveMigrationWithReboots >> BsControllerConfig::OverlayMap [GOOD] |70.9%| [TA] $(B)/ydb/services/persqueue_v1/ut/describes_ut/test-results/unittest/{meta.json ... results_accumulator.log} >> BsControllerConfig::OverlayMapCrossReferences >> BsControllerConfig::PDiskCreate >> BsControllerConfig::ExtendByCreatingSeparateBox |70.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/mind/bscontroller/ut_bscontroller/unittest >> BsControllerConfig::OverlayMap [GOOD] >> TSchemeshardBackgroundCompactionTest::ShouldNotCompactServerlessAfterDisable [GOOD] >> TPartBtreeIndexIteration::FewNodes [GOOD] >> TPartBtreeIndexIteration::FewNodes_Groups >> BsControllerConfig::OverlayMapCrossReferences [GOOD] >> BsControllerConfig::MergeIntersectingBoxes >> BsControllerConfig::SelectAllGroups |70.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/mind/bscontroller/ut_bscontroller/unittest >> BsControllerConfig::OverlayMapCrossReferences [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_compaction/unittest >> TSchemeshardBackgroundCompactionTest::ShouldNotCompactServerlessAfterDisable [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:116:2058] recipient: [1:111:2141] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:116:2058] recipient: [1:111:2141] Leader for TabletID 72057594046678944 is [1:128:2152] sender: [1:132:2058] recipient: [1:111:2141] 2025-05-29T15:27:21.452177Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7488: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-05-29T15:27:21.452215Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7516: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:27:21.452222Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7402: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-05-29T15:27:21.452228Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7418: OperationsProcessing config: using default configuration 2025-05-29T15:27:21.452247Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-05-29T15:27:21.452252Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-05-29T15:27:21.452263Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7548: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:27:21.452276Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:39: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-05-29T15:27:21.452401Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7619: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-05-29T15:27:21.452473Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-05-29T15:27:21.466628Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7309: Cannot subscribe to console configs 2025-05-29T15:27:21.466653Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:27:21.469443Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-05-29T15:27:21.469545Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-05-29T15:27:21.469601Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-05-29T15:27:21.471272Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-05-29T15:27:21.471432Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:445: Clear TempDirsState with owners number: 0 2025-05-29T15:27:21.471552Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1348: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-05-29T15:27:21.471601Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:32: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-05-29T15:27:21.472041Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:157: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:27:21.472082Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:84: [RootDataErasureManager] Stop 2025-05-29T15:27:21.472313Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:27:21.472320Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:27:21.472337Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-05-29T15:27:21.472343Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-29T15:27:21.472347Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:202: TTxServerlessStorageBilling.Complete 2025-05-29T15:27:21.472376Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6671: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-05-29T15:27:21.473508Z node 1 :HIVE INFO: tablet_helpers.cpp:1130: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:128:2152] sender: [1:242:2058] recipient: [1:15:2062] 2025-05-29T15:27:21.491598Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:372: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-05-29T15:27:21.491687Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:27:21.491751Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-05-29T15:27:21.491808Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:130: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-05-29T15:27:21.491820Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:27:21.492600Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:451: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-05-29T15:27:21.492631Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-05-29T15:27:21.492683Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:27:21.492694Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:313: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-05-29T15:27:21.492700Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:367: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-05-29T15:27:21.492706Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 2 -> 3 2025-05-29T15:27:21.493144Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:27:21.493154Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-05-29T15:27:21.493160Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 3 -> 128 2025-05-29T15:27:21.493528Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:27:21.493537Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:27:21.493542Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:27:21.493550Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1650: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-05-29T15:27:21.494226Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1719: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-05-29T15:27:21.494655Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:652: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-05-29T15:27:21.494694Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1751: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-05-29T15:27:21.494913Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:676: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-05-29T15:27:21.494938Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:680: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 124 RawX2: 4294969445 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-05-29T15:27:21.494946Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:27:21.495010Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 128 -> 240 2025-05-29T15:27:21.495017Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:27:21.495052Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-05-29T15:27:21.495064Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:402: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-05-29T15:27:21.495465Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:27:21.495474Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-29T15:27:21.495520Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29 ... T15:28:28.142679Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4937: StateWork, processing event TEvPrivate::TEvRunConditionalErase 2025-05-29T15:28:28.142683Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6632: Handle: TEvRunConditionalErase, at schemeshard: 72075186233409546 2025-05-29T15:28:28.142704Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__conditional_erase.cpp:56: TTxRunConditionalErase DoExecute: at schemeshard: 72075186233409546 2025-05-29T15:28:28.142722Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__conditional_erase.cpp:189: TTxRunConditionalErase DoComplete: at schemeshard: 72075186233409546 2025-05-29T15:28:28.245096Z node 3 :TX_DATASHARD TRACE: datashard_impl.h:3129: StateWork, received event# 2146435073, Sender [0:0:0], Recipient [3:770:2655]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvCleanupTransaction 2025-05-29T15:28:28.245131Z node 3 :TX_DATASHARD TRACE: datashard_impl.h:3155: StateWork, processing event TEvPrivate::TEvCleanupTransaction 2025-05-29T15:28:28.245162Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:214: No cleanup at 72075186233409552 outdated step 200 last cleanup 0 2025-05-29T15:28:28.245186Z node 3 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186233409552 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-05-29T15:28:28.245195Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:385: Check unit PlanQueue at 72075186233409552 2025-05-29T15:28:28.245202Z node 3 :TX_DATASHARD TRACE: plan_queue_unit.cpp:59: TPlanQueueUnit at 72075186233409552 has no attached operations 2025-05-29T15:28:28.245207Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:403: Unit PlanQueue has no ready operations at 72075186233409552 2025-05-29T15:28:28.245246Z node 3 :TX_DATASHARD TRACE: datashard_impl.h:3129: StateWork, received event# 2146435079, Sender [0:0:0], Recipient [3:770:2655]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvPeriodicWakeup 2025-05-29T15:28:28.245299Z node 3 :TX_DATASHARD TRACE: datashard_impl.h:3437: TEvPeriodicTableStats from datashard 72075186233409552, FollowerId 0, tableId 2 2025-05-29T15:28:28.245412Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4877: StateWork, received event# 269553162, Sender [3:770:2655], Recipient [3:900:2757]: NKikimrTxDataShard.TEvPeriodicTableStats DatashardId: 72075186233409552 TableLocalId: 2 Generation: 2 Round: 10 TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 InMemSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 1 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 0 RangeReadRows: 0 SearchHeight: 0 LastFullCompactionTs: 0 HasLoanedParts: false ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 9 Memory: 119208 } ShardState: 2 UserTablePartOwners: 72075186233409552 NodeId: 3 StartTime: 121 TableOwnerId: 72075186233409549 FollowerId: 0 2025-05-29T15:28:28.245421Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4914: StateWork, processing event TEvDataShard::TEvPeriodicTableStats 2025-05-29T15:28:28.245439Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:563: Got periodic table stats at tablet 72075186233409549 from shard 72075186233409552 followerId 0 pathId [OwnerId: 72075186233409549, LocalPathId: 2] state 'Ready' dataSize 0 rowCount 0 cpuUsage 0.0009 2025-05-29T15:28:28.245458Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__table_stats.cpp:570: Got periodic table stats at tablet 72075186233409549 from shard 72075186233409552 followerId 0 pathId [OwnerId: 72075186233409549, LocalPathId: 2] raw table stats: DataSize: 0 RowCount: 0 IndexSize: 0 InMemSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 1 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 0 RangeReadRows: 0 SearchHeight: 0 LastFullCompactionTs: 0 HasLoanedParts: false ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 2025-05-29T15:28:28.245467Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__table_stats.cpp:610: Will delay TTxStoreTableStats on# 0.100000s, queue# 1 2025-05-29T15:28:28.255655Z node 3 :TX_DATASHARD TRACE: datashard_impl.h:3129: StateWork, received event# 2146435073, Sender [0:0:0], Recipient [3:775:2659]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvCleanupTransaction 2025-05-29T15:28:28.255691Z node 3 :TX_DATASHARD TRACE: datashard_impl.h:3155: StateWork, processing event TEvPrivate::TEvCleanupTransaction 2025-05-29T15:28:28.255720Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:214: No cleanup at 72075186233409553 outdated step 200 last cleanup 0 2025-05-29T15:28:28.255750Z node 3 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186233409553 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-05-29T15:28:28.255760Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:385: Check unit PlanQueue at 72075186233409553 2025-05-29T15:28:28.255766Z node 3 :TX_DATASHARD TRACE: plan_queue_unit.cpp:59: TPlanQueueUnit at 72075186233409553 has no attached operations 2025-05-29T15:28:28.255772Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:403: Unit PlanQueue has no ready operations at 72075186233409553 2025-05-29T15:28:28.255827Z node 3 :TX_DATASHARD TRACE: datashard_impl.h:3129: StateWork, received event# 2146435079, Sender [0:0:0], Recipient [3:775:2659]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvPeriodicWakeup 2025-05-29T15:28:28.255866Z node 3 :TX_DATASHARD TRACE: datashard_impl.h:3437: TEvPeriodicTableStats from datashard 72075186233409553, FollowerId 0, tableId 2 2025-05-29T15:28:28.255980Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4877: StateWork, received event# 269553162, Sender [3:775:2659], Recipient [3:900:2757]: NKikimrTxDataShard.TEvPeriodicTableStats DatashardId: 72075186233409553 TableLocalId: 2 Generation: 2 Round: 10 TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 InMemSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 1 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 0 RangeReadRows: 0 SearchHeight: 0 LastFullCompactionTs: 0 HasLoanedParts: false ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 9 Memory: 119208 } ShardState: 2 UserTablePartOwners: 72075186233409553 NodeId: 3 StartTime: 121 TableOwnerId: 72075186233409549 FollowerId: 0 2025-05-29T15:28:28.255990Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4914: StateWork, processing event TEvDataShard::TEvPeriodicTableStats 2025-05-29T15:28:28.256008Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:563: Got periodic table stats at tablet 72075186233409549 from shard 72075186233409553 followerId 0 pathId [OwnerId: 72075186233409549, LocalPathId: 2] state 'Ready' dataSize 0 rowCount 0 cpuUsage 0.0009 2025-05-29T15:28:28.256025Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__table_stats.cpp:570: Got periodic table stats at tablet 72075186233409549 from shard 72075186233409553 followerId 0 pathId [OwnerId: 72075186233409549, LocalPathId: 2] raw table stats: DataSize: 0 RowCount: 0 IndexSize: 0 InMemSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 1 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 0 RangeReadRows: 0 SearchHeight: 0 LastFullCompactionTs: 0 HasLoanedParts: false ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 2025-05-29T15:28:28.266507Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4877: StateWork, received event# 271125000, Sender [0:0:0], Recipient [3:900:2757]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-05-29T15:28:28.266536Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4885: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-05-29T15:28:28.266552Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4877: StateWork, received event# 271124999, Sender [3:900:2757], Recipient [3:900:2757]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2025-05-29T15:28:28.266558Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4884: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2025-05-29T15:28:28.276704Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4877: StateWork, received event# 2146435096, Sender [0:0:0], Recipient [3:900:2757]: NKikimr::NSchemeShard::TEvPrivate::TEvSendBaseStatsToSA 2025-05-29T15:28:28.276733Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5062: StateWork, processing event TEvPrivate::TEvSendBaseStatsToSA 2025-05-29T15:28:28.276825Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4877: StateWork, received event# 2146435076, Sender [0:0:0], Recipient [3:900:2757]: NKikimr::NSchemeShard::TEvPrivate::TEvRunConditionalErase 2025-05-29T15:28:28.276830Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4937: StateWork, processing event TEvPrivate::TEvRunConditionalErase 2025-05-29T15:28:28.276833Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6632: Handle: TEvRunConditionalErase, at schemeshard: 72075186233409549 2025-05-29T15:28:28.276854Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__conditional_erase.cpp:56: TTxRunConditionalErase DoExecute: at schemeshard: 72075186233409549 2025-05-29T15:28:28.276867Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__conditional_erase.cpp:189: TTxRunConditionalErase DoComplete: at schemeshard: 72075186233409549 2025-05-29T15:28:28.276894Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4877: StateWork, received event# 269746180, Sender [3:2030:3847], Recipient [3:900:2757]: NKikimr::TEvTxProxySchemeCache::TEvNavigateKeySetResult 2025-05-29T15:28:28.276898Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5061: StateWork, processing event TEvTxProxySchemeCache::TEvNavigateKeySetResult 2025-05-29T15:28:28.297342Z node 3 :TX_DATASHARD TRACE: datashard_impl.h:3129: StateWork, received event# 269877761, Sender [3:2033:3850], Recipient [3:770:2655]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-05-29T15:28:28.297366Z node 3 :TX_DATASHARD TRACE: datashard_impl.h:3165: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-05-29T15:28:28.297374Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:3710: Server connected at leader tablet# 72075186233409552, clientId# [3:2032:3849], serverId# [3:2033:3850], sessionId# [0:0:0] 2025-05-29T15:28:28.297432Z node 3 :TX_DATASHARD TRACE: datashard_impl.h:3129: StateWork, received event# 269553213, Sender [3:2031:3848], Recipient [3:770:2655]: NKikimrTxDataShard.TEvGetCompactTableStats PathId { OwnerId: 72075186233409549 LocalId: 2 } 2025-05-29T15:28:28.297511Z node 3 :TX_DATASHARD TRACE: datashard_impl.h:3129: StateWork, received event# 269877761, Sender [3:2036:3853], Recipient [3:775:2659]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-05-29T15:28:28.297514Z node 3 :TX_DATASHARD TRACE: datashard_impl.h:3165: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-05-29T15:28:28.297518Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:3710: Server connected at leader tablet# 72075186233409553, clientId# [3:2035:3852], serverId# [3:2036:3853], sessionId# [0:0:0] 2025-05-29T15:28:28.297527Z node 3 :TX_DATASHARD TRACE: datashard_impl.h:3129: StateWork, received event# 269553213, Sender [3:2034:3851], Recipient [3:775:2659]: NKikimrTxDataShard.TEvGetCompactTableStats PathId { OwnerId: 72075186233409549 LocalId: 2 } |70.9%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/services/persqueue_v1/ut/ydb-services-persqueue_v1-ut |70.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/services/persqueue_v1/ut/ydb-services-persqueue_v1-ut |70.9%| [TA] {RESULT} $(B)/ydb/services/persqueue_v1/ut/describes_ut/test-results/unittest/{meta.json ... results_accumulator.log} |70.9%| [LD] {RESULT} $(B)/ydb/services/persqueue_v1/ut/ydb-services-persqueue_v1-ut >> TSchemeshardBackgroundCompactionTest::SchemeshardShouldNotCompactBorrowed [GOOD] >> TSchemeshardBackgroundCompactionTest::SchemeshardShouldHandleCompactionTimeouts >> BsControllerConfig::ManyPDisksRestarts >> TSchemeshardBackgroundCompactionTest::SchemeshardShouldNotRequestCompactionsAfterDisable [GOOD] >> BsControllerConfig::ReassignGroupDisk >> BsControllerConfig::Basic ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_compaction/unittest >> TSchemeshardBackgroundCompactionTest::SchemeshardShouldNotRequestCompactionsAfterDisable [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:116:2058] recipient: [1:111:2141] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:116:2058] recipient: [1:111:2141] Leader for TabletID 72057594046678944 is [1:128:2152] sender: [1:132:2058] recipient: [1:111:2141] 2025-05-29T15:27:20.667598Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7488: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-05-29T15:27:20.667626Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7516: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:27:20.667631Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7402: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-05-29T15:27:20.667637Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7418: OperationsProcessing config: using default configuration 2025-05-29T15:27:20.667658Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-05-29T15:27:20.667663Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-05-29T15:27:20.667673Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7548: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:27:20.667688Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:39: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-05-29T15:27:20.667836Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7619: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-05-29T15:27:20.667907Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-05-29T15:27:20.678859Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7309: Cannot subscribe to console configs 2025-05-29T15:27:20.678887Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:27:20.681539Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-05-29T15:27:20.681684Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-05-29T15:27:20.681733Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-05-29T15:27:20.683790Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-05-29T15:27:20.684007Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:445: Clear TempDirsState with owners number: 0 2025-05-29T15:27:20.684134Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1348: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-05-29T15:27:20.684201Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:32: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-05-29T15:27:20.684786Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:157: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:27:20.684829Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:84: [RootDataErasureManager] Stop 2025-05-29T15:27:20.685136Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:27:20.685145Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:27:20.685166Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-05-29T15:27:20.685175Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-29T15:27:20.685181Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:202: TTxServerlessStorageBilling.Complete 2025-05-29T15:27:20.685219Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6671: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-05-29T15:27:20.686764Z node 1 :HIVE INFO: tablet_helpers.cpp:1130: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:128:2152] sender: [1:242:2058] recipient: [1:15:2062] 2025-05-29T15:27:20.709102Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:372: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-05-29T15:27:20.709198Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:27:20.709274Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-05-29T15:27:20.709334Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:130: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-05-29T15:27:20.709346Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:27:20.710257Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:451: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-05-29T15:27:20.710288Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-05-29T15:27:20.710346Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:27:20.710358Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:313: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-05-29T15:27:20.710364Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:367: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-05-29T15:27:20.710371Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 2 -> 3 2025-05-29T15:27:20.710783Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:27:20.710793Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-05-29T15:27:20.710799Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 3 -> 128 2025-05-29T15:27:20.711153Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:27:20.711162Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:27:20.711168Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:27:20.711177Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1650: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-05-29T15:27:20.711918Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1719: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-05-29T15:27:20.712337Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:652: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-05-29T15:27:20.712379Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1751: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-05-29T15:27:20.712587Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:676: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-05-29T15:27:20.712613Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:680: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 124 RawX2: 4294969445 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-05-29T15:27:20.712624Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:27:20.712680Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 128 -> 240 2025-05-29T15:27:20.712688Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:27:20.712724Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-05-29T15:27:20.712737Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:402: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-05-29T15:27:20.713154Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:27:20.713163Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-29T15:27:20.713210Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29 ... shard_pipeline.cpp:403: Unit PlanQueue has no ready operations at 72075186233409546 2025-05-29T15:28:28.929861Z node 3 :TX_DATASHARD TRACE: datashard_impl.h:3129: StateWork, received event# 2146435079, Sender [0:0:0], Recipient [3:317:2300]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvPeriodicWakeup 2025-05-29T15:28:28.929904Z node 3 :TX_DATASHARD TRACE: datashard_impl.h:3437: TEvPeriodicTableStats from datashard 72075186233409546, FollowerId 0, tableId 2 2025-05-29T15:28:28.929998Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4877: StateWork, received event# 269553162, Sender [3:317:2300], Recipient [3:124:2149]: NKikimrTxDataShard.TEvPeriodicTableStats DatashardId: 72075186233409546 TableLocalId: 2 Generation: 2 Round: 10 TableStats { DataSize: 13940 RowCount: 100 IndexSize: 102 InMemSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 1 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 SearchHeight: 1 LastFullCompactionTs: 29 HasLoanedParts: false Channels { Channel: 1 DataSize: 13940 IndexSize: 102 } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 9 Memory: 124088 Storage: 14156 } ShardState: 2 UserTablePartOwners: 72075186233409546 NodeId: 3 StartTime: 40 TableOwnerId: 72057594046678944 FollowerId: 0 2025-05-29T15:28:28.930004Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4914: StateWork, processing event TEvDataShard::TEvPeriodicTableStats 2025-05-29T15:28:28.930029Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:563: Got periodic table stats at tablet 72057594046678944 from shard 72075186233409546 followerId 0 pathId [OwnerId: 72057594046678944, LocalPathId: 2] state 'Ready' dataSize 13940 rowCount 100 cpuUsage 0.0009 2025-05-29T15:28:28.930043Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__table_stats.cpp:570: Got periodic table stats at tablet 72057594046678944 from shard 72075186233409546 followerId 0 pathId [OwnerId: 72057594046678944, LocalPathId: 2] raw table stats: DataSize: 13940 RowCount: 100 IndexSize: 102 InMemSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 1 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 SearchHeight: 1 LastFullCompactionTs: 29 HasLoanedParts: false Channels { Channel: 1 DataSize: 13940 IndexSize: 102 } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 2025-05-29T15:28:28.930049Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__table_stats.cpp:610: Will delay TTxStoreTableStats on# 0.100000s, queue# 1 2025-05-29T15:28:28.940229Z node 3 :TX_DATASHARD TRACE: datashard_impl.h:3129: StateWork, received event# 2146435073, Sender [0:0:0], Recipient [3:322:2303]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvCleanupTransaction 2025-05-29T15:28:28.940262Z node 3 :TX_DATASHARD TRACE: datashard_impl.h:3155: StateWork, processing event TEvPrivate::TEvCleanupTransaction 2025-05-29T15:28:28.940285Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:214: No cleanup at 72075186233409547 outdated step 5000002 last cleanup 0 2025-05-29T15:28:28.940304Z node 3 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186233409547 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-05-29T15:28:28.940311Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:385: Check unit PlanQueue at 72075186233409547 2025-05-29T15:28:28.940315Z node 3 :TX_DATASHARD TRACE: plan_queue_unit.cpp:59: TPlanQueueUnit at 72075186233409547 has no attached operations 2025-05-29T15:28:28.940319Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:403: Unit PlanQueue has no ready operations at 72075186233409547 2025-05-29T15:28:28.940361Z node 3 :TX_DATASHARD TRACE: datashard_impl.h:3129: StateWork, received event# 2146435079, Sender [0:0:0], Recipient [3:322:2303]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvPeriodicWakeup 2025-05-29T15:28:28.940398Z node 3 :TX_DATASHARD TRACE: datashard_impl.h:3437: TEvPeriodicTableStats from datashard 72075186233409547, FollowerId 0, tableId 2 2025-05-29T15:28:28.940491Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4877: StateWork, received event# 269553162, Sender [3:322:2303], Recipient [3:124:2149]: NKikimrTxDataShard.TEvPeriodicTableStats DatashardId: 72075186233409547 TableLocalId: 2 Generation: 2 Round: 10 TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 InMemSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 1 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 0 RangeReadRows: 0 SearchHeight: 0 LastFullCompactionTs: 0 HasLoanedParts: false ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 8 Memory: 119208 } ShardState: 2 UserTablePartOwners: 72075186233409547 NodeId: 3 StartTime: 40 TableOwnerId: 72057594046678944 FollowerId: 0 2025-05-29T15:28:28.940500Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4914: StateWork, processing event TEvDataShard::TEvPeriodicTableStats 2025-05-29T15:28:28.940518Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:563: Got periodic table stats at tablet 72057594046678944 from shard 72075186233409547 followerId 0 pathId [OwnerId: 72057594046678944, LocalPathId: 2] state 'Ready' dataSize 0 rowCount 0 cpuUsage 0.0008 2025-05-29T15:28:28.940555Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__table_stats.cpp:570: Got periodic table stats at tablet 72057594046678944 from shard 72075186233409547 followerId 0 pathId [OwnerId: 72057594046678944, LocalPathId: 2] raw table stats: DataSize: 0 RowCount: 0 IndexSize: 0 InMemSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 1 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 0 RangeReadRows: 0 SearchHeight: 0 LastFullCompactionTs: 0 HasLoanedParts: false ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 2025-05-29T15:28:28.981525Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4877: StateWork, received event# 2146435092, Sender [0:0:0], Recipient [3:124:2149]: NKikimr::NSchemeShard::TEvPrivate::TEvPersistTableStats 2025-05-29T15:28:28.981553Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5053: StateWork, processing event TEvPrivate::TEvPersistTableStats 2025-05-29T15:28:28.981558Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:590: Started TEvPersistStats at tablet 72057594046678944, queue size# 2 2025-05-29T15:28:28.981584Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__table_stats.cpp:601: Will execute TTxStoreStats, queue# 2 2025-05-29T15:28:28.981590Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__table_stats.cpp:610: Will delay TTxStoreTableStats on# 0.000000s, queue# 2 2025-05-29T15:28:28.981619Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:267: PersistSingleStats for pathId 2 shard idx 72057594046678944:1 data size 13940 row count 100 2025-05-29T15:28:28.981641Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:292: TTxStoreTableStats.PersistSingleStats: main stats from datashardId(TabletID)=72075186233409546 maps to shardIdx: 72057594046678944:1 followerId=0, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], pathId map=Simple, is column=0, is olap=0, RowCount 100, DataSize 13940 2025-05-29T15:28:28.981646Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__table_stats.cpp:62: BuildStatsForCollector: datashardId 72075186233409546, followerId 0 2025-05-29T15:28:28.981663Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__background_compaction.cpp:219: [BackgroundCompaction] [Update] Updated shard# 72057594046678944:1 with partCount# 1, rowCount# 100, searchHeight# 1, lastFullCompaction# 1970-01-01T00:00:29.000000Z at schemeshard 72057594046678944 2025-05-29T15:28:28.981686Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:477: Do not want to split tablet 72075186233409546 by size, its table already has 2 out of 2 partitions 2025-05-29T15:28:28.981699Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:267: PersistSingleStats for pathId 2 shard idx 72057594046678944:2 data size 0 row count 0 2025-05-29T15:28:28.981703Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:292: TTxStoreTableStats.PersistSingleStats: main stats from datashardId(TabletID)=72075186233409547 maps to shardIdx: 72057594046678944:2 followerId=0, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], pathId map=Simple, is column=0, is olap=0, RowCount 0, DataSize 0 2025-05-29T15:28:28.981706Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__table_stats.cpp:62: BuildStatsForCollector: datashardId 72075186233409547, followerId 0 2025-05-29T15:28:28.981710Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__background_compaction.cpp:219: [BackgroundCompaction] [Update] Updated shard# 72057594046678944:2 with partCount# 0, rowCount# 0, searchHeight# 0, lastFullCompaction# 1970-01-01T00:00:00.000000Z at schemeshard 72057594046678944 2025-05-29T15:28:28.981714Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:477: Do not want to split tablet 72075186233409547 by size, its table already has 2 out of 2 partitions 2025-05-29T15:28:28.981729Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:207: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2025-05-29T15:28:28.991933Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4877: StateWork, received event# 2146435092, Sender [0:0:0], Recipient [3:124:2149]: NKikimr::NSchemeShard::TEvPrivate::TEvPersistTableStats 2025-05-29T15:28:28.991967Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5053: StateWork, processing event TEvPrivate::TEvPersistTableStats 2025-05-29T15:28:28.991972Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:590: Started TEvPersistStats at tablet 72057594046678944, queue size# 0 2025-05-29T15:28:29.022773Z node 3 :TX_DATASHARD TRACE: datashard_impl.h:3129: StateWork, received event# 269877761, Sender [3:1323:3243], Recipient [3:317:2300]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-05-29T15:28:29.022797Z node 3 :TX_DATASHARD TRACE: datashard_impl.h:3165: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-05-29T15:28:29.022805Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:3710: Server connected at leader tablet# 72075186233409546, clientId# [3:1322:3242], serverId# [3:1323:3243], sessionId# [0:0:0] 2025-05-29T15:28:29.022855Z node 3 :TX_DATASHARD TRACE: datashard_impl.h:3129: StateWork, received event# 269553213, Sender [3:1321:3241], Recipient [3:317:2300]: NKikimrTxDataShard.TEvGetCompactTableStats PathId { OwnerId: 72057594046678944 LocalId: 2 } 2025-05-29T15:28:29.023029Z node 3 :TX_DATASHARD TRACE: datashard_impl.h:3129: StateWork, received event# 269877761, Sender [3:1326:3246], Recipient [3:322:2303]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-05-29T15:28:29.023034Z node 3 :TX_DATASHARD TRACE: datashard_impl.h:3165: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-05-29T15:28:29.023039Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:3710: Server connected at leader tablet# 72075186233409547, clientId# [3:1325:3245], serverId# [3:1326:3246], sessionId# [0:0:0] 2025-05-29T15:28:29.023057Z node 3 :TX_DATASHARD TRACE: datashard_impl.h:3129: StateWork, received event# 269553213, Sender [3:1324:3244], Recipient [3:322:2303]: NKikimrTxDataShard.TEvGetCompactTableStats PathId { OwnerId: 72057594046678944 LocalId: 2 } >> TChargeBTreeIndex::OneNode_History [GOOD] >> TChargeBTreeIndex::OneNode_Groups_History >> TDsProxyQuorumTracker::CheckFailModelErasure4Plus2Block [GOOD] >> TKeyValueTest::TestInlineEmptyWriteReadDeleteWithRestartsThenResponseOkNewApi [GOOD] >> TPartBtreeIndexIteration::FewNodes_Groups [GOOD] >> TPartBtreeIndexIteration::FewNodes_History ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/dsproxy/ut/unittest >> TDsProxyQuorumTracker::CheckFailModelErasure4Plus2Block [GOOD] Test command err: 2025-05-29T15:28:24.495944Z node 3 :BS_PROXY_PUT INFO: dsproxy_put.cpp:645: [7e4afa7ea38a37be] bootstrap ActorId# [3:74:2120] Group# 0 BlobCount# 1 BlobIDs# [[72075186224047637:1:863:1:24576:786:0]] HandleClass# TabletLog Tactic# Default RestartCounter# 0 Marker# BPP13 2025-05-29T15:28:24.496006Z node 3 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:42: [7e4afa7ea38a37be] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 0 part# 0 situation# ESituation::Unknown Marker# BPG51 2025-05-29T15:28:24.496014Z node 3 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:42: [7e4afa7ea38a37be] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 1 part# 1 situation# ESituation::Unknown Marker# BPG51 2025-05-29T15:28:24.496018Z node 3 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:42: [7e4afa7ea38a37be] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 2 part# 2 situation# ESituation::Unknown Marker# BPG51 2025-05-29T15:28:24.496023Z node 3 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:42: [7e4afa7ea38a37be] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 3 part# 3 situation# ESituation::Unknown Marker# BPG51 2025-05-29T15:28:24.496027Z node 3 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:42: [7e4afa7ea38a37be] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 4 part# 4 situation# ESituation::Unknown Marker# BPG51 2025-05-29T15:28:24.496031Z node 3 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:42: [7e4afa7ea38a37be] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 5 part# 5 situation# ESituation::Unknown Marker# BPG51 2025-05-29T15:28:24.496035Z node 3 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:42: [7e4afa7ea38a37be] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 6 part# 0 situation# ESituation::Unknown Marker# BPG51 2025-05-29T15:28:24.496040Z node 3 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:42: [7e4afa7ea38a37be] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 6 part# 1 situation# ESituation::Unknown Marker# BPG51 2025-05-29T15:28:24.496044Z node 3 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:42: [7e4afa7ea38a37be] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 6 part# 2 situation# ESituation::Unknown Marker# BPG51 2025-05-29T15:28:24.496049Z node 3 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:42: [7e4afa7ea38a37be] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 6 part# 3 situation# ESituation::Unknown Marker# BPG51 2025-05-29T15:28:24.496053Z node 3 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:42: [7e4afa7ea38a37be] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 6 part# 4 situation# ESituation::Unknown Marker# BPG51 2025-05-29T15:28:24.496057Z node 3 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:42: [7e4afa7ea38a37be] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 6 part# 5 situation# ESituation::Unknown Marker# BPG51 2025-05-29T15:28:24.496062Z node 3 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:42: [7e4afa7ea38a37be] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 7 part# 0 situation# ESituation::Unknown Marker# BPG51 2025-05-29T15:28:24.496066Z node 3 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:42: [7e4afa7ea38a37be] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 7 part# 1 situation# ESituation::Unknown Marker# BPG51 2025-05-29T15:28:24.496070Z node 3 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:42: [7e4afa7ea38a37be] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 7 part# 2 situation# ESituation::Unknown Marker# BPG51 2025-05-29T15:28:24.496074Z node 3 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:42: [7e4afa7ea38a37be] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 7 part# 3 situation# ESituation::Unknown Marker# BPG51 2025-05-29T15:28:24.496078Z node 3 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:42: [7e4afa7ea38a37be] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 7 part# 4 situation# ESituation::Unknown Marker# BPG51 2025-05-29T15:28:24.496084Z node 3 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:42: [7e4afa7ea38a37be] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 7 part# 5 situation# ESituation::Unknown Marker# BPG51 2025-05-29T15:28:24.496091Z node 3 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:65: [7e4afa7ea38a37be] restore Id# [72075186224047637:1:863:1:24576:786:0] optimisticReplicas# 6 optimisticState# EBS_FULL Marker# BPG55 2025-05-29T15:28:24.496104Z node 3 :BS_PROXY_PUT DEBUG: dsproxy_strategy_base.cpp:324: [7e4afa7ea38a37be] partPlacement record partSituation# ESituation::Unknown to# 0 blob Id# [72075186224047637:1:863:1:24576:786:1] Marker# BPG33 2025-05-29T15:28:24.496111Z node 3 :BS_PROXY_PUT DEBUG: dsproxy_strategy_base.cpp:345: [7e4afa7ea38a37be] Sending missing VPut part# 0 to# 0 blob Id# [72075186224047637:1:863:1:24576:786:1] Marker# BPG32 2025-05-29T15:28:24.496118Z node 3 :BS_PROXY_PUT DEBUG: dsproxy_strategy_base.cpp:324: [7e4afa7ea38a37be] partPlacement record partSituation# ESituation::Unknown to# 1 blob Id# [72075186224047637:1:863:1:24576:786:2] Marker# BPG33 2025-05-29T15:28:24.496122Z node 3 :BS_PROXY_PUT DEBUG: dsproxy_strategy_base.cpp:345: [7e4afa7ea38a37be] Sending missing VPut part# 1 to# 1 blob Id# [72075186224047637:1:863:1:24576:786:2] Marker# BPG32 2025-05-29T15:28:24.496127Z node 3 :BS_PROXY_PUT DEBUG: dsproxy_strategy_base.cpp:324: [7e4afa7ea38a37be] partPlacement record partSituation# ESituation::Unknown to# 2 blob Id# [72075186224047637:1:863:1:24576:786:3] Marker# BPG33 2025-05-29T15:28:24.496131Z node 3 :BS_PROXY_PUT DEBUG: dsproxy_strategy_base.cpp:345: [7e4afa7ea38a37be] Sending missing VPut part# 2 to# 2 blob Id# [72075186224047637:1:863:1:24576:786:3] Marker# BPG32 2025-05-29T15:28:24.496136Z node 3 :BS_PROXY_PUT DEBUG: dsproxy_strategy_base.cpp:324: [7e4afa7ea38a37be] partPlacement record partSituation# ESituation::Unknown to# 3 blob Id# [72075186224047637:1:863:1:24576:786:4] Marker# BPG33 2025-05-29T15:28:24.496140Z node 3 :BS_PROXY_PUT DEBUG: dsproxy_strategy_base.cpp:345: [7e4afa7ea38a37be] Sending missing VPut part# 3 to# 3 blob Id# [72075186224047637:1:863:1:24576:786:4] Marker# BPG32 2025-05-29T15:28:24.496144Z node 3 :BS_PROXY_PUT DEBUG: dsproxy_strategy_base.cpp:324: [7e4afa7ea38a37be] partPlacement record partSituation# ESituation::Unknown to# 4 blob Id# [72075186224047637:1:863:1:24576:786:5] Marker# BPG33 2025-05-29T15:28:24.496147Z node 3 :BS_PROXY_PUT DEBUG: dsproxy_strategy_base.cpp:345: [7e4afa7ea38a37be] Sending missing VPut part# 4 to# 4 blob Id# [72075186224047637:1:863:1:24576:786:5] Marker# BPG32 2025-05-29T15:28:24.496153Z node 3 :BS_PROXY_PUT DEBUG: dsproxy_strategy_base.cpp:324: [7e4afa7ea38a37be] partPlacement record partSituation# ESituation::Unknown to# 5 blob Id# [72075186224047637:1:863:1:24576:786:6] Marker# BPG33 2025-05-29T15:28:24.496158Z node 3 :BS_PROXY_PUT DEBUG: dsproxy_strategy_base.cpp:345: [7e4afa7ea38a37be] Sending missing VPut part# 5 to# 5 blob Id# [72075186224047637:1:863:1:24576:786:6] Marker# BPG32 2025-05-29T15:28:24.500609Z node 3 :BS_PROXY_PUT DEBUG: dsproxy_put.cpp:260: [7e4afa7ea38a37be] received {EvVPutResult Status# OK ID# [72075186224047637:1:863:1:24576:786:1] {MsgQoS MsgId# { SequenceId: 1 MsgId: 0 }}} from# [0:1:0:0:0] Marker# BPP01 2025-05-29T15:28:24.500682Z node 3 :BS_PROXY_PUT DEBUG: dsproxy_put.cpp:260: [7e4afa7ea38a37be] received {EvVPutResult Status# OK ID# [72075186224047637:1:863:1:24576:786:2] {MsgQoS MsgId# { SequenceId: 1 MsgId: 0 }}} from# [0:1:0:1:0] Marker# BPP01 2025-05-29T15:28:24.500696Z node 3 :BS_PROXY_PUT DEBUG: dsproxy_put.cpp:260: [7e4afa7ea38a37be] received {EvVPutResult Status# OK ID# [72075186224047637:1:863:1:24576:786:3] {MsgQoS MsgId# { SequenceId: 1 MsgId: 0 }}} from# [0:1:0:2:0] Marker# BPP01 2025-05-29T15:28:24.500708Z node 3 :BS_PROXY_PUT DEBUG: dsproxy_put.cpp:260: [7e4afa7ea38a37be] received {EvVPutResult Status# OK ID# [72075186224047637:1:863:1:24576:786:4] {MsgQoS MsgId# { SequenceId: 1 MsgId: 0 }}} from# [0:1:0:3:0] Marker# BPP01 2025-05-29T15:28:24.500732Z node 3 :BS_PROXY_PUT DEBUG: dsproxy_put.cpp:260: [7e4afa7ea38a37be] received {EvVPutResult Status# OK ID# [72075186224047637:1:863:1:24576:786:5] {MsgQoS MsgId# { SequenceId: 1 MsgId: 0 }}} from# [0:1:0:4:0] Marker# BPP01 2025-05-29T15:28:24.500744Z node 3 :BS_PROXY_PUT DEBUG: dsproxy_put.cpp:260: [7e4afa7ea38a37be] received {EvVPutResult Status# OK ID# [72075186224047637:1:863:1:24576:786:6] {MsgQoS MsgId# { SequenceId: 1 MsgId: 0 }}} from# [0:1:0:5:0] Marker# BPP01 2025-05-29T15:28:24.500762Z node 3 :BS_PROXY_PUT DEBUG: dsproxy_put_impl.cpp:72: [7e4afa7ea38a37be] Result# TEvPutResult {Id# [72075186224047637:1:863:1:24576:786:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0} GroupId# 0 Marker# BPP12 2025-05-29T15:28:24.500771Z node 3 :BS_PROXY_PUT INFO: dsproxy_put.cpp:486: [7e4afa7ea38a37be] SendReply putResult# TEvPutResult {Id# [72075186224047637:1:863:1:24576:786:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0} ResponsesSent# 0 PutImpl.Blobs.size# 1 Last# true Marker# BPP21 2025-05-29T15:28:24.500822Z node 3 :BS_PROXY_PUT DEBUG: {BPP72@dsproxy_put.cpp:470} Query history GroupId# 0 HandleClass# TabletLog Tactic# Default History# THistory { Entries# [ TEvVPut{ TimestampMs# 0.294 sample PartId# [72075186224047637:1:863:1:24576:786:6] QueryCount# 1 VDiskId# [0:1:0:5:0] NodeId# 3 } TEvVPut{ TimestampMs# 0.294 sample PartId# [72075186224047637:1:863:1:24576:786:5] QueryCount# 1 VDiskId# [0:1:0:4:0] NodeId# 3 } TEvVPut{ TimestampMs# 0.295 sample PartId# [72075186224047637:1:863:1:24576:786:4] QueryCount# 1 VDiskId# [0:1:0:3:0] NodeId# 3 } TEvVPut{ TimestampMs# 0.295 sample PartId# [72075186224047637:1:863:1:24576:786:3] QueryCount# 1 VDiskId# [0:1:0:2:0] NodeId# 3 } TEvVPut{ TimestampMs# 0.295 sample PartId# [72075186224047637:1:863:1:24576:786:2] QueryCount# 1 VDiskId# [0:1:0:1:0] NodeId# 3 } TEvVPut{ TimestampMs# 0.295 sample PartId# [72075186224047637:1:863:1:24576:786:1] QueryCount# 1 VDiskId# [0:1:0:0:0] NodeId# 3 } TEvVPutResult{ TimestampMs# 4.745 VDiskId# [0:1:0:0:0] NodeId# 3 Status# OK } TEvVPutResult{ TimestampMs# 4.787 VDiskId# [0:1:0:1:0] NodeId# 3 Status# OK } TEvVPutResult{ TimestampMs# 4.8 VDiskId# [0:1:0:2:0] NodeId# 3 Status# OK } TEvVPutResult{ TimestampMs# 4.814 VDiskId# [0:1:0:3:0] NodeId# 3 Status# OK } TEvVPutResult{ TimestampMs# 4.836 VDiskId# [0:1:0:4:0] NodeId# 3 Status# OK } TEvVPutResult{ TimestampMs# 4.849 VDiskId# [0:1:0:5:0] NodeId# 3 Status# OK } ] } ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/keyvalue/ut/unittest >> TKeyValueTest::TestInlineEmptyWriteReadDeleteWithRestartsThenResponseOkNewApi [GOOD] Test command err: Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:55:2057] recipient: [1:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:55:2057] recipient: [1:51:2095] Leader for TabletID 72057594037927937 is [1:57:2097] sender: [1:58:2057] recipient: [1:51:2095] Leader for TabletID 72057594037927937 is [1:57:2097] sender: [1:75:2057] recipient: [1:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:55:2057] recipient: [2:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:55:2057] recipient: [2:51:2095] Leader for TabletID 72057594037927937 is [2:57:2097] sender: [2:58:2057] recipient: [2:51:2095] Leader for TabletID 72057594037927937 is [2:57:2097] sender: [2:75:2057] recipient: [2:14:2061] !Reboot 72057594037927937 (actor [2:57:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [2:57:2097] sender: [2:77:2057] recipient: [2:36:2083] Leader for TabletID 72057594037927937 is [2:57:2097] sender: [2:80:2057] recipient: [2:14:2061] Leader for TabletID 72057594037927937 is [2:57:2097] sender: [2:81:2057] recipient: [2:79:2110] Leader for TabletID 72057594037927937 is [2:82:2111] sender: [2:83:2057] recipient: [2:79:2110] !Reboot 72057594037927937 (actor [2:57:2097]) rebooted! !Reboot 72057594037927937 (actor [2:57:2097]) tablet resolver refreshed! new actor is[2:82:2111] Leader for TabletID 72057594037927937 is [2:82:2111] sender: [2:168:2057] recipient: [2:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:55:2057] recipient: [3:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:55:2057] recipient: [3:51:2095] Leader for TabletID 72057594037927937 is [3:57:2097] sender: [3:58:2057] recipient: [3:51:2095] Leader for TabletID 72057594037927937 is [3:57:2097] sender: [3:75:2057] recipient: [3:14:2061] !Reboot 72057594037927937 (actor [3:57:2097]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [3:57:2097] sender: [3:77:2057] recipient: [3:36:2083] Leader for TabletID 72057594037927937 is [3:57:2097] sender: [3:80:2057] recipient: [3:79:2110] Leader for TabletID 72057594037927937 is [3:57:2097] sender: [3:81:2057] recipient: [3:14:2061] Leader for TabletID 72057594037927937 is [3:82:2111] sender: [3:83:2057] recipient: [3:79:2110] !Reboot 72057594037927937 (actor [3:57:2097]) rebooted! !Reboot 72057594037927937 (actor [3:57:2097]) tablet resolver refreshed! new actor is[3:82:2111] Leader for TabletID 72057594037927937 is [3:82:2111] sender: [3:168:2057] recipient: [3:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [4:55:2057] recipient: [4:50:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [4:55:2057] recipient: [4:50:2095] Leader for TabletID 72057594037927937 is [4:57:2097] sender: [4:58:2057] recipient: [4:50:2095] Leader for TabletID 72057594037927937 is [4:57:2097] sender: [4:75:2057] recipient: [4:14:2061] !Reboot 72057594037927937 (actor [4:57:2097]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [4:57:2097] sender: [4:78:2057] recipient: [4:36:2083] Leader for TabletID 72057594037927937 is [4:57:2097] sender: [4:81:2057] recipient: [4:80:2110] Leader for TabletID 72057594037927937 is [4:57:2097] sender: [4:82:2057] recipient: [4:14:2061] Leader for TabletID 72057594037927937 is [4:83:2111] sender: [4:84:2057] recipient: [4:80:2110] !Reboot 72057594037927937 (actor [4:57:2097]) rebooted! !Reboot 72057594037927937 (actor [4:57:2097]) tablet resolver refreshed! new actor is[4:83:2111] Leader for TabletID 72057594037927937 is [4:83:2111] sender: [4:169:2057] recipient: [4:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [5:55:2057] recipient: [5:52:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [5:55:2057] recipient: [5:52:2095] Leader for TabletID 72057594037927937 is [5:57:2097] sender: [5:58:2057] recipient: [5:52:2095] Leader for TabletID 72057594037927937 is [5:57:2097] sender: [5:75:2057] recipient: [5:14:2061] !Reboot 72057594037927937 (actor [5:57:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [5:57:2097] sender: [5:81:2057] recipient: [5:36:2083] Leader for TabletID 72057594037927937 is [5:57:2097] sender: [5:84:2057] recipient: [5:14:2061] Leader for TabletID 72057594037927937 is [5:57:2097] sender: [5:85:2057] recipient: [5:83:2113] Leader for TabletID 72057594037927937 is [5:86:2114] sender: [5:87:2057] recipient: [5:83:2113] !Reboot 72057594037927937 (actor [5:57:2097]) rebooted! !Reboot 72057594037927937 (actor [5:57:2097]) tablet resolver refreshed! new actor is[5:86:2114] Leader for TabletID 72057594037927937 is [5:86:2114] sender: [5:172:2057] recipient: [5:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [6:55:2057] recipient: [6:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [6:55:2057] recipient: [6:51:2095] Leader for TabletID 72057594037927937 is [6:57:2097] sender: [6:58:2057] recipient: [6:51:2095] Leader for TabletID 72057594037927937 is [6:57:2097] sender: [6:75:2057] recipient: [6:14:2061] !Reboot 72057594037927937 (actor [6:57:2097]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [6:57:2097] sender: [6:81:2057] recipient: [6:36:2083] Leader for TabletID 72057594037927937 is [6:57:2097] sender: [6:84:2057] recipient: [6:14:2061] Leader for TabletID 72057594037927937 is [6:57:2097] sender: [6:85:2057] recipient: [6:83:2113] Leader for TabletID 72057594037927937 is [6:86:2114] sender: [6:87:2057] recipient: [6:83:2113] !Reboot 72057594037927937 (actor [6:57:2097]) rebooted! !Reboot 72057594037927937 (actor [6:57:2097]) tablet resolver refreshed! new actor is[6:86:2114] Leader for TabletID 72057594037927937 is [6:86:2114] sender: [6:172:2057] recipient: [6:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [7:55:2057] recipient: [7:52:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [7:55:2057] recipient: [7:52:2095] Leader for TabletID 72057594037927937 is [7:57:2097] sender: [7:58:2057] recipient: [7:52:2095] Leader for TabletID 72057594037927937 is [7:57:2097] sender: [7:75:2057] recipient: [7:14:2061] !Reboot 72057594037927937 (actor [7:57:2097]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [7:57:2097] sender: [7:82:2057] recipient: [7:36:2083] Leader for TabletID 72057594037927937 is [7:57:2097] sender: [7:85:2057] recipient: [7:84:2113] Leader for TabletID 72057594037927937 is [7:57:2097] sender: [7:86:2057] recipient: [7:14:2061] Leader for TabletID 72057594037927937 is [7:87:2114] sender: [7:88:2057] recipient: [7:84:2113] !Reboot 72057594037927937 (actor [7:57:2097]) rebooted! !Reboot 72057594037927937 (actor [7:57:2097]) tablet resolver refreshed! new actor is[7:87:2114] Leader for TabletID 72057594037927937 is [7:87:2114] sender: [7:173:2057] recipient: [7:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [8:55:2057] recipient: [8:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [8:55:2057] recipient: [8:51:2095] Leader for TabletID 72057594037927937 is [8:57:2097] sender: [8:58:2057] recipient: [8:51:2095] Leader for TabletID 72057594037927937 is [8:57:2097] sender: [8:75:2057] recipient: [8:14:2061] !Reboot 72057594037927937 (actor [8:57:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [8:57:2097] sender: [8:84:2057] recipient: [8:36:2083] Leader for TabletID 72057594037927937 is [8:57:2097] sender: [8:87:2057] recipient: [8:14:2061] Leader for TabletID 72057594037927937 is [8:57:2097] sender: [8:88:2057] recipient: [8:86:2115] Leader for TabletID 72057594037927937 is [8:89:2116] sender: [8:90:2057] recipient: [8:86:2115] !Reboot 72057594037927937 (actor [8:57:2097]) rebooted! !Reboot 72057594037927937 (actor [8:57:2097]) tablet resolver refreshed! new actor is[8:89:2116] Leader for TabletID 72057594037927937 is [8:89:2116] sender: [8:175:2057] recipient: [8:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [9:55:2057] recipient: [9:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [9:55:2057] recipient: [9:51:2095] Leader for TabletID 72057594037927937 is [9:57:2097] sender: [9:58:2057] recipient: [9:51:2095] Leader for TabletID 72057594037927937 is [9:57:2097] sender: [9:75:2057] recipient: [9:14:2061] !Reboot 72057594037927937 (actor [9:57:2097]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [9:57:2097] sender: [9:84:2057] recipient: [9:36:2083] Leader for TabletID 72057594037927937 is [9:57:2097] sender: [9:87:2057] recipient: [9:14:2061] Leader for TabletID 72057594037927937 is [9:57:2097] sender: [9:88:2057] recipient: [9:86:2115] Leader for TabletID 72057594037927937 is [9:89:2116] sender: [9:90:2057] recipient: [9:86:2115] !Reboot 72057594037927937 (actor [9:57:2097]) rebooted! !Reboot 72057594037927937 (actor [9:57:2097]) tablet resolver refreshed! new actor is[9:89:2116] Leader for TabletID 72057594037927937 is [9:89:2116] sender: [9:175:2057] recipient: [9:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [10:55:2057] recipient: [10:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [10:55:2057] recipient: [10:51:2095] Leader for TabletID 72057594037927937 is [10:57:2097] sender: [10:58:2057] recipient: [10:51:2095] Leader for TabletID 72057594037927937 is [10:57:2097] sender: [10:75:2057] recipient: [10:14:2061] !Reboot 72057594037927937 (actor [10:57:2097]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [10:57:2097] sender: [10:85:2057] recipient: [10:36:2083] Leader for TabletID 72057594037927937 is [10:57:2097] sender: [10:88:2057] recipient: [10:14:2061] Leader for TabletID 72057594037927937 is [10:57:2097] sender: [10:89:2057] recipient: [10:87:2115] Leader for TabletID 72057594037927937 is [10:90:2116] sender: [10:91:2057] recipient: [10:87:2115] !Reboot 72057594037927937 (actor [10:57:2097]) rebooted! !Reboot 72057594037927937 (actor [10:57:2097]) tablet resolver refreshed! new actor is[10:90:2116] Leader for TabletID 72057594037927937 is [10:90:2116] sender: [10:176:2057] recipient: [10:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [11:55:2057] recipient: [11:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [11:55:2057] recipient: [11:51:2095] Leader for TabletID 72057594037927937 is [11:57:2097] sender: [11:58:2057] recipient: [11:51:2095] Leader for TabletID 72057594037927937 is [11:57:2097] sender: [11:75:2057] recipient: [11:14:2061] !Reboot 72057594037927937 (actor [11:57:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [11:57:2097] sender: [11:87:2057] recipient: [11:36:2083] Leader for TabletID 72057594037927937 is [11:57:2097] sender: [11:90:2057] recipient: [11:14:2061] Leader for TabletID 72057594037927937 is [11:57:2097] sender: [11:91:2057] recipient: [11:89:2117] Leader for TabletID 72057594037927937 is [11:92:2118] sender: [11:93:2057] recipient: [11:89:2117] !Reboot 72057594037927937 (actor [11:57:2097]) rebooted! !Reboot 72057594037927937 (actor [11:57:2097]) tablet resolver refreshed! new actor is[11:92:2118] Leader for TabletID 72057594037927937 is [11:92:2118] sender: [11:178:2057] recipient: [11:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [12:55:2057] recipient: [12:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [12:55:2057] recipient: [12:51:2095] Leader for TabletID 72057594037927937 is [12:57:2097] sender: [12:58:2057] recipient: [12:51:2095] Leader for TabletID 72057594037927937 is [12:57:2097] sender: [12:75:2057] recipient: [12:14:2061] !Reboot 72057594037927937 (acto ... boot 72057594037927937 (actor [22:57:2097]) tablet resolver refreshed! new actor is[22:86:2114] Leader for TabletID 72057594037927937 is [22:86:2114] sender: [22:172:2057] recipient: [22:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [23:55:2057] recipient: [23:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [23:55:2057] recipient: [23:51:2095] Leader for TabletID 72057594037927937 is [23:57:2097] sender: [23:58:2057] recipient: [23:51:2095] Leader for TabletID 72057594037927937 is [23:57:2097] sender: [23:75:2057] recipient: [23:14:2061] !Reboot 72057594037927937 (actor [23:57:2097]) on event NKikimr::TEvKeyValue::TEvRead ! Leader for TabletID 72057594037927937 is [23:57:2097] sender: [23:81:2057] recipient: [23:36:2083] Leader for TabletID 72057594037927937 is [23:57:2097] sender: [23:84:2057] recipient: [23:14:2061] Leader for TabletID 72057594037927937 is [23:57:2097] sender: [23:85:2057] recipient: [23:83:2113] Leader for TabletID 72057594037927937 is [23:86:2114] sender: [23:87:2057] recipient: [23:83:2113] !Reboot 72057594037927937 (actor [23:57:2097]) rebooted! !Reboot 72057594037927937 (actor [23:57:2097]) tablet resolver refreshed! new actor is[23:86:2114] Leader for TabletID 72057594037927937 is [23:86:2114] sender: [23:172:2057] recipient: [23:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [24:55:2057] recipient: [24:50:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [24:55:2057] recipient: [24:50:2095] Leader for TabletID 72057594037927937 is [24:57:2097] sender: [24:58:2057] recipient: [24:50:2095] Leader for TabletID 72057594037927937 is [24:57:2097] sender: [24:75:2057] recipient: [24:14:2061] !Reboot 72057594037927937 (actor [24:57:2097]) on event NKikimr::TEvKeyValue::TEvNotify ! Leader for TabletID 72057594037927937 is [24:57:2097] sender: [24:82:2057] recipient: [24:36:2083] Leader for TabletID 72057594037927937 is [24:57:2097] sender: [24:85:2057] recipient: [24:84:2113] Leader for TabletID 72057594037927937 is [24:57:2097] sender: [24:86:2057] recipient: [24:14:2061] Leader for TabletID 72057594037927937 is [24:87:2114] sender: [24:88:2057] recipient: [24:84:2113] !Reboot 72057594037927937 (actor [24:57:2097]) rebooted! !Reboot 72057594037927937 (actor [24:57:2097]) tablet resolver refreshed! new actor is[24:87:2114] Leader for TabletID 72057594037927937 is [24:87:2114] sender: [24:105:2057] recipient: [24:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [25:55:2057] recipient: [25:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [25:55:2057] recipient: [25:51:2095] Leader for TabletID 72057594037927937 is [25:57:2097] sender: [25:58:2057] recipient: [25:51:2095] Leader for TabletID 72057594037927937 is [25:57:2097] sender: [25:75:2057] recipient: [25:14:2061] !Reboot 72057594037927937 (actor [25:57:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [25:57:2097] sender: [25:84:2057] recipient: [25:36:2083] Leader for TabletID 72057594037927937 is [25:57:2097] sender: [25:87:2057] recipient: [25:14:2061] Leader for TabletID 72057594037927937 is [25:57:2097] sender: [25:88:2057] recipient: [25:86:2115] Leader for TabletID 72057594037927937 is [25:89:2116] sender: [25:90:2057] recipient: [25:86:2115] !Reboot 72057594037927937 (actor [25:57:2097]) rebooted! !Reboot 72057594037927937 (actor [25:57:2097]) tablet resolver refreshed! new actor is[25:89:2116] Leader for TabletID 72057594037927937 is [25:89:2116] sender: [25:175:2057] recipient: [25:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [26:55:2057] recipient: [26:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [26:55:2057] recipient: [26:51:2095] Leader for TabletID 72057594037927937 is [26:57:2097] sender: [26:58:2057] recipient: [26:51:2095] Leader for TabletID 72057594037927937 is [26:57:2097] sender: [26:75:2057] recipient: [26:14:2061] !Reboot 72057594037927937 (actor [26:57:2097]) on event NKikimr::TEvKeyValue::TEvReadRange ! Leader for TabletID 72057594037927937 is [26:57:2097] sender: [26:84:2057] recipient: [26:36:2083] Leader for TabletID 72057594037927937 is [26:57:2097] sender: [26:87:2057] recipient: [26:14:2061] Leader for TabletID 72057594037927937 is [26:57:2097] sender: [26:88:2057] recipient: [26:86:2115] Leader for TabletID 72057594037927937 is [26:89:2116] sender: [26:90:2057] recipient: [26:86:2115] !Reboot 72057594037927937 (actor [26:57:2097]) rebooted! !Reboot 72057594037927937 (actor [26:57:2097]) tablet resolver refreshed! new actor is[26:89:2116] Leader for TabletID 72057594037927937 is [26:89:2116] sender: [26:175:2057] recipient: [26:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [27:55:2057] recipient: [27:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [27:55:2057] recipient: [27:51:2095] Leader for TabletID 72057594037927937 is [27:57:2097] sender: [27:58:2057] recipient: [27:51:2095] Leader for TabletID 72057594037927937 is [27:57:2097] sender: [27:75:2057] recipient: [27:14:2061] !Reboot 72057594037927937 (actor [27:57:2097]) on event NKikimr::TEvKeyValue::TEvNotify ! Leader for TabletID 72057594037927937 is [27:57:2097] sender: [27:85:2057] recipient: [27:36:2083] Leader for TabletID 72057594037927937 is [27:57:2097] sender: [27:88:2057] recipient: [27:14:2061] Leader for TabletID 72057594037927937 is [27:57:2097] sender: [27:89:2057] recipient: [27:87:2115] Leader for TabletID 72057594037927937 is [27:90:2116] sender: [27:91:2057] recipient: [27:87:2115] !Reboot 72057594037927937 (actor [27:57:2097]) rebooted! !Reboot 72057594037927937 (actor [27:57:2097]) tablet resolver refreshed! new actor is[27:90:2116] Leader for TabletID 72057594037927937 is [27:90:2116] sender: [27:108:2057] recipient: [27:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [28:55:2057] recipient: [28:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [28:55:2057] recipient: [28:51:2095] Leader for TabletID 72057594037927937 is [28:57:2097] sender: [28:58:2057] recipient: [28:51:2095] Leader for TabletID 72057594037927937 is [28:57:2097] sender: [28:75:2057] recipient: [28:14:2061] !Reboot 72057594037927937 (actor [28:57:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [28:57:2097] sender: [28:87:2057] recipient: [28:36:2083] Leader for TabletID 72057594037927937 is [28:57:2097] sender: [28:89:2057] recipient: [28:14:2061] Leader for TabletID 72057594037927937 is [28:57:2097] sender: [28:91:2057] recipient: [28:90:2117] Leader for TabletID 72057594037927937 is [28:92:2118] sender: [28:93:2057] recipient: [28:90:2117] !Reboot 72057594037927937 (actor [28:57:2097]) rebooted! !Reboot 72057594037927937 (actor [28:57:2097]) tablet resolver refreshed! new actor is[28:92:2118] Leader for TabletID 72057594037927937 is [28:92:2118] sender: [28:178:2057] recipient: [28:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [29:55:2057] recipient: [29:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [29:55:2057] recipient: [29:51:2095] Leader for TabletID 72057594037927937 is [29:57:2097] sender: [29:58:2057] recipient: [29:51:2095] Leader for TabletID 72057594037927937 is [29:57:2097] sender: [29:75:2057] recipient: [29:14:2061] !Reboot 72057594037927937 (actor [29:57:2097]) on event NKikimr::TEvKeyValue::TEvExecuteTransaction ! Leader for TabletID 72057594037927937 is [29:57:2097] sender: [29:87:2057] recipient: [29:36:2083] Leader for TabletID 72057594037927937 is [29:57:2097] sender: [29:90:2057] recipient: [29:14:2061] Leader for TabletID 72057594037927937 is [29:57:2097] sender: [29:91:2057] recipient: [29:89:2117] Leader for TabletID 72057594037927937 is [29:92:2118] sender: [29:93:2057] recipient: [29:89:2117] !Reboot 72057594037927937 (actor [29:57:2097]) rebooted! !Reboot 72057594037927937 (actor [29:57:2097]) tablet resolver refreshed! new actor is[29:92:2118] Leader for TabletID 72057594037927937 is [29:92:2118] sender: [29:178:2057] recipient: [29:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [30:55:2057] recipient: [30:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [30:55:2057] recipient: [30:51:2095] Leader for TabletID 72057594037927937 is [30:57:2097] sender: [30:58:2057] recipient: [30:51:2095] Leader for TabletID 72057594037927937 is [30:57:2097] sender: [30:75:2057] recipient: [30:14:2061] !Reboot 72057594037927937 (actor [30:57:2097]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [30:57:2097] sender: [30:88:2057] recipient: [30:36:2083] Leader for TabletID 72057594037927937 is [30:57:2097] sender: [30:90:2057] recipient: [30:14:2061] Leader for TabletID 72057594037927937 is [30:57:2097] sender: [30:92:2057] recipient: [30:91:2117] Leader for TabletID 72057594037927937 is [30:93:2118] sender: [30:94:2057] recipient: [30:91:2117] !Reboot 72057594037927937 (actor [30:57:2097]) rebooted! !Reboot 72057594037927937 (actor [30:57:2097]) tablet resolver refreshed! new actor is[30:93:2118] Leader for TabletID 72057594037927937 is [30:93:2118] sender: [30:179:2057] recipient: [30:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [31:55:2057] recipient: [31:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [31:55:2057] recipient: [31:51:2095] Leader for TabletID 72057594037927937 is [31:57:2097] sender: [31:58:2057] recipient: [31:51:2095] Leader for TabletID 72057594037927937 is [31:57:2097] sender: [31:75:2057] recipient: [31:14:2061] !Reboot 72057594037927937 (actor [31:57:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [31:57:2097] sender: [31:91:2057] recipient: [31:36:2083] Leader for TabletID 72057594037927937 is [31:57:2097] sender: [31:94:2057] recipient: [31:14:2061] Leader for TabletID 72057594037927937 is [31:57:2097] sender: [31:95:2057] recipient: [31:93:2120] Leader for TabletID 72057594037927937 is [31:96:2121] sender: [31:97:2057] recipient: [31:93:2120] !Reboot 72057594037927937 (actor [31:57:2097]) rebooted! !Reboot 72057594037927937 (actor [31:57:2097]) tablet resolver refreshed! new actor is[31:96:2121] Leader for TabletID 72057594037927937 is [31:96:2121] sender: [31:182:2057] recipient: [31:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [32:55:2057] recipient: [32:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [32:55:2057] recipient: [32:51:2095] Leader for TabletID 72057594037927937 is [32:57:2097] sender: [32:58:2057] recipient: [32:51:2095] Leader for TabletID 72057594037927937 is [32:57:2097] sender: [32:75:2057] recipient: [32:14:2061] !Reboot 72057594037927937 (actor [32:57:2097]) on event NKikimr::TEvKeyValue::TEvRead ! Leader for TabletID 72057594037927937 is [32:57:2097] sender: [32:91:2057] recipient: [32:36:2083] Leader for TabletID 72057594037927937 is [32:57:2097] sender: [32:94:2057] recipient: [32:14:2061] Leader for TabletID 72057594037927937 is [32:57:2097] sender: [32:95:2057] recipient: [32:93:2120] Leader for TabletID 72057594037927937 is [32:96:2121] sender: [32:97:2057] recipient: [32:93:2120] !Reboot 72057594037927937 (actor [32:57:2097]) rebooted! !Reboot 72057594037927937 (actor [32:57:2097]) tablet resolver refreshed! new actor is[32:96:2121] Leader for TabletID 72057594037927937 is [32:96:2121] sender: [32:182:2057] recipient: [32:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [33:55:2057] recipient: [33:50:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [33:55:2057] recipient: [33:50:2095] Leader for TabletID 72057594037927937 is [33:57:2097] sender: [33:58:2057] recipient: [33:50:2095] Leader for TabletID 72057594037927937 is [33:57:2097] sender: [33:75:2057] recipient: [33:14:2061] >> YdbTableSplit::SplitByLoadWithDeletes [FAIL] >> BsControllerConfig::SelectAllGroups [GOOD] >> TDataShardMinStepTest::TestDropTableCompletesQuicklyRW+VolatileTxs ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/mind/bscontroller/ut_bscontroller/unittest >> BsControllerConfig::SelectAllGroups [GOOD] Test command err: 2025-05-29T15:28:28.706875Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2057} StateInit event Type# 268828672 Event# NKikimr::TEvTablet::TEvBoot 2025-05-29T15:28:28.707488Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2057} StateInit event Type# 268828673 Event# NKikimr::TEvTablet::TEvRestored 2025-05-29T15:28:28.707552Z node 1 :BS_CONTROLLER DEBUG: {BSC22@console_interaction.cpp:14} Console interaction started 2025-05-29T15:28:28.707802Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2057} StateInit event Type# 268828684 Event# NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-05-29T15:28:28.707872Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2057} StateInit event Type# 268639244 Event# NKikimr::TEvNodeWardenStorageConfig 2025-05-29T15:28:28.707908Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2057} StateInit event Type# 131082 Event# NActors::TEvInterconnect::TEvNodesInfo 2025-05-29T15:28:28.707911Z node 1 :BS_CONTROLLER DEBUG: {BSC01@bsc.cpp:521} Handle TEvInterconnect::TEvNodesInfo 2025-05-29T15:28:28.707944Z node 1 :BS_CONTROLLER DEBUG: {BSCTXIS01@init_scheme.cpp:17} TTxInitScheme Execute 2025-05-29T15:28:28.708637Z node 1 :BS_CONTROLLER DEBUG: {BSCTXIS03@init_scheme.cpp:44} TTxInitScheme Complete 2025-05-29T15:28:28.708656Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM01@migrate.cpp:190} Execute tx 2025-05-29T15:28:28.708678Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM02@migrate.cpp:251} Complete tx IncompatibleData# false 2025-05-29T15:28:28.708690Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxTrimUnusedSlots 2025-05-29T15:28:28.708698Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxTrimUnusedSlots 2025-05-29T15:28:28.708704Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateSchemaVersion >> TSchemeshardBackgroundCleaningTest::SchemeshardBackgroundCleaningTestSimpleDrop [GOOD] >> TSchemeshardBackgroundCleaningTest::SchemeshardBackgroundCleaningTestSimpleDropIndex >> TSchemeShardViewTest::ReadOnlyMode >> TDataShardMinStepTest::TestDropTablePlanComesNotTooEarlyRW+VolatileTxs |71.0%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/tx_proxy/ut_encrypted_storage/ydb-core-tx-tx_proxy-ut_encrypted_storage |71.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/tx_proxy/ut_encrypted_storage/ydb-core-tx-tx_proxy-ut_encrypted_storage |71.0%| [LD] {RESULT} $(B)/ydb/core/tx/tx_proxy/ut_encrypted_storage/ydb-core-tx-tx_proxy-ut_encrypted_storage >> TPartBtreeIndexIteration::FewNodes_History [GOOD] >> TPartBtreeIndexIteration::FewNodes_Sticky >> TSchemeShardViewTest::ReadOnlyMode [GOOD] >> TDataShardMinStepTest::TestDropTableCompletesQuicklyRW+VolatileTxs [FAIL] >> TDataShardMinStepTest::TestDropTableCompletesQuicklyRW-VolatileTxs ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_view/unittest >> TSchemeShardViewTest::ReadOnlyMode [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:116:2058] recipient: [1:111:2141] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:116:2058] recipient: [1:111:2141] Leader for TabletID 72057594046678944 is [1:128:2152] sender: [1:132:2058] recipient: [1:111:2141] 2025-05-29T15:28:31.264409Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7488: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-05-29T15:28:31.264447Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7516: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:28:31.264453Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7402: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-05-29T15:28:31.264459Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7418: OperationsProcessing config: using default configuration 2025-05-29T15:28:31.264472Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-05-29T15:28:31.264477Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-05-29T15:28:31.264487Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7548: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:28:31.264502Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:39: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-05-29T15:28:31.264633Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7619: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-05-29T15:28:31.264715Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-05-29T15:28:31.279389Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7309: Cannot subscribe to console configs 2025-05-29T15:28:31.279417Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:28:31.282179Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-05-29T15:28:31.282296Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-05-29T15:28:31.282339Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-05-29T15:28:31.283876Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-05-29T15:28:31.284028Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:445: Clear TempDirsState with owners number: 0 2025-05-29T15:28:31.284145Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1348: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-05-29T15:28:31.284190Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:32: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-05-29T15:28:31.284656Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:157: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:28:31.284702Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:84: [RootDataErasureManager] Stop 2025-05-29T15:28:31.284961Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:28:31.284970Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:28:31.284990Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-05-29T15:28:31.284998Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-29T15:28:31.285004Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:202: TTxServerlessStorageBilling.Complete 2025-05-29T15:28:31.285037Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6671: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-05-29T15:28:31.286481Z node 1 :HIVE INFO: tablet_helpers.cpp:1130: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:128:2152] sender: [1:242:2058] recipient: [1:15:2062] 2025-05-29T15:28:31.307222Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:372: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-05-29T15:28:31.307325Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:28:31.307406Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-05-29T15:28:31.307456Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:130: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-05-29T15:28:31.307469Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:28:31.308487Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:451: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-05-29T15:28:31.308522Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-05-29T15:28:31.308587Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:28:31.308599Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:313: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-05-29T15:28:31.308606Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:367: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-05-29T15:28:31.308613Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 2 -> 3 2025-05-29T15:28:31.309220Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:28:31.309237Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-05-29T15:28:31.309242Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 3 -> 128 2025-05-29T15:28:31.309653Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:28:31.309664Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:28:31.309670Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:28:31.309676Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1650: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-05-29T15:28:31.310297Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1719: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-05-29T15:28:31.310724Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:652: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-05-29T15:28:31.310779Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1751: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-05-29T15:28:31.310958Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:676: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-05-29T15:28:31.310982Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:680: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 124 RawX2: 4294969445 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-05-29T15:28:31.310989Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:28:31.311070Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 128 -> 240 2025-05-29T15:28:31.311080Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:28:31.311120Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-05-29T15:28:31.311134Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:402: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-05-29T15:28:31.311616Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:28:31.311626Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-29T15:28:31.311676Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29 ... ng: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-29T15:28:31.392841Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:202: TTxServerlessStorageBilling.Complete 2025-05-29T15:28:31.393019Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6671: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 TestModificationResults wait txId: 103 Leader for TabletID 72057594046678944 is [1:383:2351] sender: [1:441:2058] recipient: [1:15:2062] 2025-05-29T15:28:31.434406Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:372: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpCreateView CreateView { Name: "ThirdView" QueryText: "Some query" } } TxId: 103 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-05-29T15:28:31.434461Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_create_view.cpp:118: [72057594046678944] TCreateView Propose, path: /MyRoot/ThirdView, opId: 103:0 2025-05-29T15:28:31.434468Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_create_view.cpp:124: [72057594046678944] TCreateView Propose, path: /MyRoot/ThirdView, opId: 103:0, viewDescription: Name: "ThirdView" QueryText: "Some query" 2025-05-29T15:28:31.434487Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:319: AttachChild: child attached as only one child to the parent, parent id: [OwnerId: 72057594046678944, LocalPathId: 1], parent name: MyRoot, child name: ThirdView, child id: [OwnerId: 72057594046678944, LocalPathId: 3], at schemeshard: 72057594046678944 2025-05-29T15:28:31.434515Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 0 2025-05-29T15:28:31.434522Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:130: IgniteOperation, opId: 103:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-05-29T15:28:31.435267Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:451: TTxOperationPropose Complete, txId: 103, response: Status: StatusAccepted TxId: 103 SchemeshardId: 72057594046678944 PathId: 3, at schemeshard: 72057594046678944 2025-05-29T15:28:31.435292Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 103, database: /MyRoot, subject: , status: StatusAccepted, operation: CREATE VIEW, path: /MyRoot/ThirdView 2025-05-29T15:28:31.435328Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 103:0, at schemeshard: 72057594046678944 2025-05-29T15:28:31.435334Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_create_view.cpp:30: [72057594046678944] TCreateView::TPropose, opId: 103:0 ProgressState 2025-05-29T15:28:31.435342Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1650: TOperation IsReadyToPropose , TxId: 103 ready parts: 1/1 2025-05-29T15:28:31.435361Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1719: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 103 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-05-29T15:28:31.435743Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:652: Send tablet strongly msg operationId: 103:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:103 msg type: 269090816 2025-05-29T15:28:31.435789Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1751: TOperation RegisterRelationByTabletId, TxId: 103, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 103 at step: 5000003 FAKE_COORDINATOR: advance: minStep5000003 State->FrontStep: 5000002 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 103 at step: 5000003 2025-05-29T15:28:31.435902Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:676: TTxOperationPlanStep Execute, stepId: 5000003, transactions count in step: 1, at schemeshard: 72057594046678944 2025-05-29T15:28:31.435922Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:680: TTxOperationPlanStep Execute, message: Transactions { TxId: 103 Coordinator: 72057594046316545 AckTo { RawX1: 124 RawX2: 4294969445 } } Step: 5000003 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-05-29T15:28:31.435932Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_create_view.cpp:45: [72057594046678944] TCreateView::TPropose, opId: 103:0 HandleReply TEvPrivate::TEvOperationPlan, step: 5000003 2025-05-29T15:28:31.435956Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 103:0 128 -> 240 2025-05-29T15:28:31.435982Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-05-29T15:28:31.435991Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 1 FAKE_COORDINATOR: Erasing txId 103 2025-05-29T15:28:31.436340Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:28:31.436347Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 103, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-29T15:28:31.436377Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 103, path id: [OwnerId: 72057594046678944, LocalPathId: 3] 2025-05-29T15:28:31.436396Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:28:31.436401Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:433:2390], at schemeshard: 72057594046678944, txId: 103, path id: 1 2025-05-29T15:28:31.436407Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:433:2390], at schemeshard: 72057594046678944, txId: 103, path id: 3 2025-05-29T15:28:31.436462Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 103:0, at schemeshard: 72057594046678944 2025-05-29T15:28:31.436467Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:482: [72057594046678944] TDone opId# 103:0 ProgressState 2025-05-29T15:28:31.436478Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:906: Part operation is done id#103:0 progress is 1/1 2025-05-29T15:28:31.436481Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2025-05-29T15:28:31.436485Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:906: Part operation is done id#103:0 progress is 1/1 2025-05-29T15:28:31.436487Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2025-05-29T15:28:31.436491Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1606: TOperation IsReadyToNotify, TxId: 103, ready parts: 1/1, is published: false 2025-05-29T15:28:31.436494Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2025-05-29T15:28:31.436497Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:973: Operation and all the parts is done, operation id: 103:0 2025-05-29T15:28:31.436501Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5174: RemoveTx for txid 103:0 2025-05-29T15:28:31.436510Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 2 2025-05-29T15:28:31.436518Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:982: Publication still in progress, tx: 103, publications: 2, subscribers: 0 2025-05-29T15:28:31.436521Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:989: Publication details: tx: 103, [OwnerId: 72057594046678944, LocalPathId: 1], 5 2025-05-29T15:28:31.436524Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:989: Publication details: tx: 103, [OwnerId: 72057594046678944, LocalPathId: 3], 2 2025-05-29T15:28:31.436625Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:5834: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 4 LocalPathId: 1 Version: 5 PathOwnerId: 72057594046678944, cookie: 103 2025-05-29T15:28:31.436637Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 4 LocalPathId: 1 Version: 5 PathOwnerId: 72057594046678944, cookie: 103 2025-05-29T15:28:31.436640Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 103 2025-05-29T15:28:31.436644Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 103, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 5 2025-05-29T15:28:31.436647Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2025-05-29T15:28:31.436725Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:5834: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 4 LocalPathId: 3 Version: 2 PathOwnerId: 72057594046678944, cookie: 103 2025-05-29T15:28:31.436732Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 4 LocalPathId: 3 Version: 2 PathOwnerId: 72057594046678944, cookie: 103 2025-05-29T15:28:31.436735Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 103 2025-05-29T15:28:31.436737Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 103, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 2 2025-05-29T15:28:31.436740Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 1 2025-05-29T15:28:31.436746Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 103, subscribers: 0 2025-05-29T15:28:31.437159Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 103 2025-05-29T15:28:31.437287Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 103 TestModificationResult got TxId: 103, wait until txId: 103 >> TSchemeShardViewTest::CreateView >> BsControllerConfig::PDiskCreate [GOOD] |71.0%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/schemeshard/ut_split_merge/ydb-core-tx-schemeshard-ut_split_merge |71.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_split_merge/ydb-core-tx-schemeshard-ut_split_merge |71.0%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_split_merge/ydb-core-tx-schemeshard-ut_split_merge >> TSchemeShardViewTest::CreateView [GOOD] >> TPartBtreeIndexIteration::FewNodes_Sticky [GOOD] >> TPartBtreeIndexIteration::FewNodes_Slices ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/mind/bscontroller/ut_bscontroller/unittest >> BsControllerConfig::PDiskCreate [GOOD] Test command err: Leader for TabletID 72057594037932033 is [0:0:0] sender: [1:215:2066] recipient: [1:193:2076] IGNORE Leader for TabletID 72057594037932033 is [0:0:0] sender: [1:215:2066] recipient: [1:193:2076] Leader for TabletID 72057594037932033 is [1:223:2078] sender: [1:224:2066] recipient: [1:193:2076] 2025-05-29T15:28:28.064773Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2057} StateInit event Type# 268828672 Event# NKikimr::TEvTablet::TEvBoot 2025-05-29T15:28:28.065764Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2057} StateInit event Type# 268828673 Event# NKikimr::TEvTablet::TEvRestored 2025-05-29T15:28:28.065837Z node 1 :BS_CONTROLLER DEBUG: {BSC22@console_interaction.cpp:14} Console interaction started 2025-05-29T15:28:28.065946Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2057} StateInit event Type# 268828684 Event# NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-05-29T15:28:28.066271Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2057} StateInit event Type# 268639244 Event# NKikimr::TEvNodeWardenStorageConfig 2025-05-29T15:28:28.066322Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2057} StateInit event Type# 131082 Event# NActors::TEvInterconnect::TEvNodesInfo 2025-05-29T15:28:28.066327Z node 1 :BS_CONTROLLER DEBUG: {BSC01@bsc.cpp:521} Handle TEvInterconnect::TEvNodesInfo 2025-05-29T15:28:28.066365Z node 1 :BS_CONTROLLER DEBUG: {BSCTXIS01@init_scheme.cpp:17} TTxInitScheme Execute 2025-05-29T15:28:28.067341Z node 1 :BS_CONTROLLER DEBUG: {BSCTXIS03@init_scheme.cpp:44} TTxInitScheme Complete 2025-05-29T15:28:28.067363Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM01@migrate.cpp:190} Execute tx 2025-05-29T15:28:28.067382Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM02@migrate.cpp:251} Complete tx IncompatibleData# false 2025-05-29T15:28:28.067394Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxTrimUnusedSlots 2025-05-29T15:28:28.067402Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxTrimUnusedSlots 2025-05-29T15:28:28.067409Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateSchemaVersion Leader for TabletID 72057594037932033 is [1:223:2078] sender: [1:248:2066] recipient: [1:20:2067] 2025-05-29T15:28:28.077778Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateSchemaVersion 2025-05-29T15:28:28.077826Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxGenerateInstanceId 2025-05-29T15:28:28.088100Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxGenerateInstanceId 2025-05-29T15:28:28.088142Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateStaticPDiskInfo 2025-05-29T15:28:28.088155Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateStaticPDiskInfo 2025-05-29T15:28:28.088167Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxFillInNonNullConfigForPDisk 2025-05-29T15:28:28.088189Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxFillInNonNullConfigForPDisk 2025-05-29T15:28:28.088197Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxDropDriveStatus 2025-05-29T15:28:28.088203Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxDropDriveStatus 2025-05-29T15:28:28.088215Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateCompatibilityInfo 2025-05-29T15:28:28.098496Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateCompatibilityInfo 2025-05-29T15:28:28.098541Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateEnableConfigV2 2025-05-29T15:28:28.108844Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateEnableConfigV2 2025-05-29T15:28:28.108917Z node 1 :BS_CONTROLLER DEBUG: {BSCTXLE01@load_everything.cpp:19} TTxLoadEverything Execute 2025-05-29T15:28:28.109137Z node 1 :BS_CONTROLLER DEBUG: {BSCTXLE03@load_everything.cpp:557} TTxLoadEverything Complete 2025-05-29T15:28:28.109147Z node 1 :BS_CONTROLLER DEBUG: {BSC09@impl.h:2188} LoadFinished 2025-05-29T15:28:28.109209Z node 1 :BS_CONTROLLER DEBUG: {BSC18@console_interaction.cpp:31} Console connection service started 2025-05-29T15:28:28.109216Z node 1 :BS_CONTROLLER DEBUG: {BSCTXLE04@load_everything.cpp:562} TTxLoadEverything InitQueue processed 2025-05-29T15:28:28.112050Z node 1 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:398} Execute TEvControllerConfigRequest Request# {Command { DefineHostConfig { HostConfigId: 1 Drive { Path: "/dev/disk1" } Drive { Path: "/dev/disk2" SharedWithOs: true } Drive { Path: "/dev/disk3" Type: SSD } } } Command { DefineBox { BoxId: 1 Name: "test box" Host { Key { Fqdn: "::1" IcPort: 12001 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12002 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12003 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12004 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12005 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12006 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12007 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12008 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12009 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12010 } HostConfigId: 1 } } } Command { QueryBaseConfig { } } } 2025-05-29T15:28:28.112187Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:340} Create new pdisk PDiskId# 1:1000 Path# /dev/disk1 2025-05-29T15:28:28.112198Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:340} Create new pdisk PDiskId# 1:1001 Path# /dev/disk2 2025-05-29T15:28:28.112202Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:340} Create new pdisk PDiskId# 1:1002 Path# /dev/disk3 2025-05-29T15:28:28.112206Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:340} Create new pdisk PDiskId# 2:1000 Path# /dev/disk1 2025-05-29T15:28:28.112211Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:340} Create new pdisk PDiskId# 2:1001 Path# /dev/disk2 2025-05-29T15:28:28.112215Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:340} Create new pdisk PDiskId# 2:1002 Path# /dev/disk3 2025-05-29T15:28:28.112220Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:340} Create new pdisk PDiskId# 3:1000 Path# /dev/disk1 2025-05-29T15:28:28.112224Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:340} Create new pdisk PDiskId# 3:1001 Path# /dev/disk2 2025-05-29T15:28:28.112229Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:340} Create new pdisk PDiskId# 3:1002 Path# /dev/disk3 2025-05-29T15:28:28.112233Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:340} Create new pdisk PDiskId# 4:1000 Path# /dev/disk1 2025-05-29T15:28:28.112239Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:340} Create new pdisk PDiskId# 4:1001 Path# /dev/disk2 2025-05-29T15:28:28.112244Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:340} Create new pdisk PDiskId# 4:1002 Path# /dev/disk3 2025-05-29T15:28:28.112252Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:340} Create new pdisk PDiskId# 5:1000 Path# /dev/disk1 2025-05-29T15:28:28.112256Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:340} Create new pdisk PDiskId# 5:1001 Path# /dev/disk2 2025-05-29T15:28:28.112261Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:340} Create new pdisk PDiskId# 5:1002 Path# /dev/disk3 2025-05-29T15:28:28.112266Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:340} Create new pdisk PDiskId# 6:1000 Path# /dev/disk1 2025-05-29T15:28:28.112271Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:340} Create new pdisk PDiskId# 6:1001 Path# /dev/disk2 2025-05-29T15:28:28.112277Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:340} Create new pdisk PDiskId# 6:1002 Path# /dev/disk3 2025-05-29T15:28:28.112282Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:340} Create new pdisk PDiskId# 7:1000 Path# /dev/disk1 2025-05-29T15:28:28.112286Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:340} Create new pdisk PDiskId# 7:1001 Path# /dev/disk2 2025-05-29T15:28:28.112291Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:340} Create new pdisk PDiskId# 7:1002 Path# /dev/disk3 2025-05-29T15:28:28.112295Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:340} Create new pdisk PDiskId# 8:1000 Path# /dev/disk1 2025-05-29T15:28:28.112299Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:340} Create new pdisk PDiskId# 8:1001 Path# /dev/disk2 2025-05-29T15:28:28.112304Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:340} Create new pdisk PDiskId# 8:1002 Path# /dev/disk3 2025-05-29T15:28:28.112308Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:340} Create new pdisk PDiskId# 9:1000 Path# /dev/disk1 2025-05-29T15:28:28.112312Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:340} Create new pdisk PDiskId# 9:1001 Path# /dev/disk2 2025-05-29T15:28:28.112316Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:340} Create new pdisk PDiskId# 9:1002 Path# /dev/disk3 2025-05-29T15:28:28.112327Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:340} Create new pdisk PDiskId# 10:1000 Path# /dev/disk1 2025-05-29T15:28:28.112331Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:340} Create new pdisk PDiskId# 10:1001 Path# /dev/disk2 2025-05-29T15:28:28.112335Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:340} Create new pdisk PDiskId# 10:1002 Path# /dev/disk3 Leader for TabletID 72057594037932033 is [0:0:0] sender: [11:215:2066] recipient: [11:186:2076] IGNORE Leader for TabletID 72057594037932033 is [0:0:0] sender: [11:215:2066] recipient: [11:186:2076] Leader for TabletID 72057594037932033 is [11:224:2078] sender: [11:226:2066] recipient: [11:186:2076] 2025-05-29T15:28:30.066136Z node 11 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2057} StateInit event Type# 268828672 Event# NKikimr::TEvTablet::TEvBoot 2025-05-29T15:28:30.066287Z node 11 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2057} StateInit event Type# 268828673 Event# NKikimr::TEvTablet::TEvRestored 2025-05-29T15:28:30.066323Z node 11 :BS_CONTROLLER DEBUG: {BSC22@console_interaction.cpp:14} Console interaction started 2025-05-29T15:28:30.066441Z node 11 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2057} StateInit event Type# 268828684 Event# NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-05-29T15:28:30.066539Z node 11 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2057} StateInit event Type# 268639244 Event# NKikimr::TEvNodeWardenStorageConfig 2025-05-29T15:28:30.066570Z node 11 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2057} StateInit event Type# 131082 Event# NActors::TEvInterconnect::TEvNodesInfo 2025-05-29T15:28:30.066573Z node 11 :BS_CONTROLLER DEBUG: {BSC01@bsc.cpp:521} Handle TEvInterconnect::TEvNodesInfo 2025-05-29T15:28:30.066600Z node 11 :BS_CONTROLLER DEBUG: {BSCTXIS01@init_scheme.cpp:17} TTxInitScheme Execute 2025-05-29T15:28:30.067237Z node 11 :BS_CONTROLLER DEBUG: {BSCTXIS03@init_scheme.cpp:44} TTxInitScheme Complete 2025-05-29T15:28:30.067254Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM01@migrate.cpp:190} Execute tx 2025-05-29T15:28:30.067273Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM02@migrate.cpp:251} Complete tx IncompatibleData# false 2025-05-29T15:28:30.067284Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxTrimUnusedSlots 2025-05-29T15:28:30.067296Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxTrimUnusedSlots 2025-05-29T15:28:30.067302Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateSchemaVersion Leader for TabletID 72057594037932033 is [11:224:2078] sender: [11:248:2066] recipient: [11:20:2067] 2025-05-29T15:28:30.077659Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateSchemaVersion 2025-05-29T15:28:30.077696Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxGenerateInstanceId 2025-05-29T15:28:30.087936Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxGenerateInstanceId 2025-05-29T15:28:30.087973Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateStaticPDiskInfo 2025-05-29T15:28:30.087982Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateStaticPDiskInfo 2025-05-29T15:28:30.087991Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxFillInNonNullConfigForPDisk 2025-05-29T15:28:30.088012Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxFillInNonNullConfigForPDisk 2025-05-29T15:28:30.088017Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxDropDriveStatus 2025-05-29T15:28:30.088021Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxDropDriveStatus 2025-05-29T15:28:30.088041Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateCompatibilityInfo 2025-05-29T15:28:30.098273Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateCompatibilityInfo 2025-05-29T15:28:30.098309Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateEnableConfigV2 2025-05-29T15:28:30.108541Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateEnableConfigV2 2025-05-29T15:28:30.108579Z node 11 :BS_CONTROLLER DEBUG: {BSCTXLE01@load_everything.cpp:19} TTxLoadEverything Execute 2025-05-29T15:28:30.108692Z node 11 :BS_CONTROLLER DEBUG: {BSCTXLE03@load_everything.cpp:557} TTxLoadEverything Complete 2025-05-29T15:28:30.108697Z node 11 :BS_CONTROLLER DEBUG: {BSC09@impl.h:2188} LoadFinished 2025-05-29T15:28:30.108722Z node 11 :BS_CONTROLLER DEBUG: {BSC18@console_interaction.cpp:31} Console connection service started 2025-05-29T15:28:30.108727Z node 11 :BS_CONTROLLER DEBUG: {BSCTXLE04@load_everything.cpp:562} TTxLoadEverything InitQueue processed 2025-05-29T15:28:30.108868Z node 11 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:398} Execute TEvControllerConfigRequest Request# {Command { DefineHostConfig { HostConfigId: 2 Drive { Path: "/dev/disk1" } Drive { Path: "/dev/disk2" SharedWithOs: true } Drive { Path: "/dev/disk3" Type: SSD } } } Command { DefineBox { BoxId: 1 Name: "test box" Host { Key { Fqdn: "::1" IcPort: 12001 } HostConfigId: 2 } Host { Key { Fqdn: "::1" IcPort: 12002 } HostConfigId: 2 } Host { Key { Fqdn: "::1" IcPort: 12003 } HostConfigId: 2 } Host { Key { Fqdn: "::1" IcPort: 12004 } HostConfigId: 2 } Host { Key { Fqdn: "::1" IcPort: 12005 } HostConfigId: 2 } Host { Key { Fqdn: "::1" IcPort: 12006 } HostConfigId: 2 } Host { Key { Fqdn: "::1" IcPort: 12007 } HostConfigId: 2 } Host { Key { Fqdn: "::1" IcPort: 12008 } HostConfigId: 2 } Host { Key { Fqdn: "::1" IcPort: 12009 } HostConfigId: 2 } Host { Key { Fqdn: "::1" IcPort: 12010 } HostConfigId: 2 } } } Command { QueryBaseConfig { } } } 2025-05-29T15:28:30.108961Z node 11 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:340} Create new pdisk PDiskId# 11:1000 Path# /dev/disk1 2025-05-29T15:28:30.108966Z node 11 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:340} Create new pdisk PDiskId# 11:1001 Path# /dev/disk2 2025-05-29T15:28:30.108969Z node 11 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:340} Create new pdisk PDiskId# 11:1002 Path# /dev/disk3 2025-05-29T15:28:30.108972Z node 11 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:340} Create new pdisk PDiskId# 12:1000 Path# /dev/disk1 2025-05-29T15:28:30.108975Z node 11 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:340} Create new pdisk PDiskId# 12:1001 Path# /dev/disk2 2025-05-29T15:28:30.108979Z node 11 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:340} Create new pdisk PDiskId# 12:1002 Path# /dev/disk3 2025-05-29T15:28:30.108992Z node 11 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:340} Create new pdisk PDiskId# 13:1000 Path# /dev/disk1 2025-05-29T15:28:30.108995Z node 11 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:340} Create new pdisk PDiskId# 13:1001 Path# /dev/disk2 2025-05-29T15:28:30.108998Z node 11 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:340} Create new pdisk PDiskId# 13:1002 Path# /dev/disk3 2025-05-29T15:28:30.109001Z node 11 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:340} Create new pdisk PDiskId# 14:1000 Path# /dev/disk1 2025-05-29T15:28:30.109005Z node 11 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:340} Create new pdisk PDiskId# 14:1001 Path# /dev/disk2 2025-05-29T15:28:30.109011Z node 11 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:340} Create new pdisk PDiskId# 14:1002 Path# /dev/disk3 2025-05-29T15:28:30.109014Z node 11 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:340} Create new pdisk PDiskId# 15:1000 Path# /dev/disk1 2025-05-29T15:28:30.109017Z node 11 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:340} Create new pdisk PDiskId# 15:1001 Path# /dev/disk2 2025-05-29T15:28:30.109020Z node 11 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:340} Create new pdisk PDiskId# 15:1002 Path# /dev/disk3 2025-05-29T15:28:30.109023Z node 11 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:340} Create new pdisk PDiskId# 16:1000 Path# /dev/disk1 2025-05-29T15:28:30.109025Z node 11 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:340} Create new pdisk PDiskId# 16:1001 Path# /dev/disk2 2025-05-29T15:28:30.109030Z node 11 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:340} Create new pdisk PDiskId# 16:1002 Path# /dev/disk3 2025-05-29T15:28:30.109033Z node 11 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:340} Create new pdisk PDiskId# 17:1000 Path# /dev/disk1 2025-05-29T15:28:30.109038Z node 11 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:340} Create new pdisk PDiskId# 17:1001 Path# /dev/disk2 2025-05-29T15:28:30.109041Z node 11 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:340} Create new pdisk PDiskId# 17:1002 Path# /dev/disk3 2025-05-29T15:28:30.109044Z node 11 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:340} Create new pdisk PDiskId# 18:1000 Path# /dev/disk1 2025-05-29T15:28:30.109047Z node 11 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:340} Create new pdisk PDiskId# 18:1001 Path# /dev/disk2 2025-05-29T15:28:30.109051Z node 11 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:340} Create new pdisk PDiskId# 18:1002 Path# /dev/disk3 2025-05-29T15:28:30.109054Z node 11 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:340} Create new pdisk PDiskId# 19:1000 Path# /dev/disk1 2025-05-29T15:28:30.109058Z node 11 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:340} Create new pdisk PDiskId# 19:1001 Path# /dev/disk2 2025-05-29T15:28:30.109061Z node 11 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:340} Create new pdisk PDiskId# 19:1002 Path# /dev/disk3 2025-05-29T15:28:30.109063Z node 11 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:340} Create new pdisk PDiskId# 20:1000 Path# /dev/disk1 2025-05-29T15:28:30.109066Z node 11 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:340} Create new pdisk PDiskId# 20:1001 Path# /dev/disk2 2025-05-29T15:28:30.109069Z node 11 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:340} Create new pdisk PDiskId# 20:1002 Path# /dev/disk3 >> TDataShardMinStepTest::TestDropTablePlanComesNotTooEarlyRW+VolatileTxs [FAIL] >> TDataShardMinStepTest::TestDropTablePlanComesNotTooEarlyRW-VolatileTxs ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_view/unittest >> TSchemeShardViewTest::CreateView [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:116:2058] recipient: [1:111:2141] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:116:2058] recipient: [1:111:2141] Leader for TabletID 72057594046678944 is [1:128:2152] sender: [1:132:2058] recipient: [1:111:2141] 2025-05-29T15:28:32.133728Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7488: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-05-29T15:28:32.133757Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7516: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:28:32.133763Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7402: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-05-29T15:28:32.133769Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7418: OperationsProcessing config: using default configuration 2025-05-29T15:28:32.133791Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-05-29T15:28:32.133797Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-05-29T15:28:32.133808Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7548: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:28:32.133823Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:39: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-05-29T15:28:32.133942Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7619: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-05-29T15:28:32.134036Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-05-29T15:28:32.149189Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7309: Cannot subscribe to console configs 2025-05-29T15:28:32.149221Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:28:32.153048Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-05-29T15:28:32.153177Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-05-29T15:28:32.153224Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-05-29T15:28:32.155079Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-05-29T15:28:32.155275Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:445: Clear TempDirsState with owners number: 0 2025-05-29T15:28:32.155400Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1348: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-05-29T15:28:32.155458Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:32: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-05-29T15:28:32.156039Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:157: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:28:32.156089Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:84: [RootDataErasureManager] Stop 2025-05-29T15:28:32.156350Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:28:32.156359Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:28:32.156379Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-05-29T15:28:32.156387Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-29T15:28:32.156393Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:202: TTxServerlessStorageBilling.Complete 2025-05-29T15:28:32.156425Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6671: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-05-29T15:28:32.157818Z node 1 :HIVE INFO: tablet_helpers.cpp:1130: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:128:2152] sender: [1:242:2058] recipient: [1:15:2062] 2025-05-29T15:28:32.180701Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:372: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-05-29T15:28:32.180787Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:28:32.180854Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-05-29T15:28:32.180898Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:130: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-05-29T15:28:32.180908Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:28:32.181701Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:451: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-05-29T15:28:32.181727Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-05-29T15:28:32.181781Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:28:32.181792Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:313: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-05-29T15:28:32.181798Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:367: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-05-29T15:28:32.181804Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 2 -> 3 2025-05-29T15:28:32.182204Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:28:32.182217Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-05-29T15:28:32.182223Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 3 -> 128 2025-05-29T15:28:32.182573Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:28:32.182583Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:28:32.182590Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:28:32.182598Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1650: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-05-29T15:28:32.183399Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1719: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-05-29T15:28:32.183823Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:652: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-05-29T15:28:32.183861Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1751: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-05-29T15:28:32.184059Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:676: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-05-29T15:28:32.184085Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:680: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 124 RawX2: 4294969445 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-05-29T15:28:32.184093Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:28:32.184161Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 128 -> 240 2025-05-29T15:28:32.184169Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:28:32.184203Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-05-29T15:28:32.184216Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:402: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-05-29T15:28:32.184626Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:28:32.184635Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-29T15:28:32.184677Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29 ... peration.cpp:1650: TOperation IsReadyToPropose , TxId: 101 ready parts: 1/1 2025-05-29T15:28:32.190483Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1719: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 101 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-05-29T15:28:32.190648Z node 1 :TX_PROXY DEBUG: client.cpp:89: Handle TEvAllocateResult ACCEPTED RangeBegin# 281474976715656 RangeEnd# 281474976720656 txAllocator# 72057594046447617 2025-05-29T15:28:32.190879Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:652: Send tablet strongly msg operationId: 101:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:101 msg type: 269090816 2025-05-29T15:28:32.190907Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1751: TOperation RegisterRelationByTabletId, TxId: 101, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 101 at step: 5000002 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000001 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 101 at step: 5000002 2025-05-29T15:28:32.190977Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:676: TTxOperationPlanStep Execute, stepId: 5000002, transactions count in step: 1, at schemeshard: 72057594046678944 2025-05-29T15:28:32.191000Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:680: TTxOperationPlanStep Execute, message: Transactions { TxId: 101 Coordinator: 72057594046316545 AckTo { RawX1: 124 RawX2: 4294969445 } } Step: 5000002 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-05-29T15:28:32.191009Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_create_view.cpp:45: [72057594046678944] TCreateView::TPropose, opId: 101:0 HandleReply TEvPrivate::TEvOperationPlan, step: 5000002 2025-05-29T15:28:32.191029Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 101:0 128 -> 240 2025-05-29T15:28:32.191057Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-05-29T15:28:32.191068Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 FAKE_COORDINATOR: Erasing txId 101 2025-05-29T15:28:32.191444Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:28:32.191452Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 101, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-29T15:28:32.191486Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 101, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2025-05-29T15:28:32.191503Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:28:32.191509Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:209:2210], at schemeshard: 72057594046678944, txId: 101, path id: 1 2025-05-29T15:28:32.191517Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:209:2210], at schemeshard: 72057594046678944, txId: 101, path id: 2 2025-05-29T15:28:32.191580Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 101:0, at schemeshard: 72057594046678944 2025-05-29T15:28:32.191588Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:482: [72057594046678944] TDone opId# 101:0 ProgressState 2025-05-29T15:28:32.191601Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:906: Part operation is done id#101:0 progress is 1/1 2025-05-29T15:28:32.191607Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2025-05-29T15:28:32.191613Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:906: Part operation is done id#101:0 progress is 1/1 2025-05-29T15:28:32.191617Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2025-05-29T15:28:32.191623Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1606: TOperation IsReadyToNotify, TxId: 101, ready parts: 1/1, is published: false 2025-05-29T15:28:32.191629Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2025-05-29T15:28:32.191634Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:973: Operation and all the parts is done, operation id: 101:0 2025-05-29T15:28:32.191639Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5174: RemoveTx for txid 101:0 2025-05-29T15:28:32.191653Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2025-05-29T15:28:32.191659Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:982: Publication still in progress, tx: 101, publications: 2, subscribers: 0 2025-05-29T15:28:32.191665Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:989: Publication details: tx: 101, [OwnerId: 72057594046678944, LocalPathId: 1], 4 2025-05-29T15:28:32.191669Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:989: Publication details: tx: 101, [OwnerId: 72057594046678944, LocalPathId: 2], 2 2025-05-29T15:28:32.191792Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:5834: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 4 PathOwnerId: 72057594046678944, cookie: 101 2025-05-29T15:28:32.191805Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 4 PathOwnerId: 72057594046678944, cookie: 101 2025-05-29T15:28:32.191811Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 101 2025-05-29T15:28:32.191817Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 4 2025-05-29T15:28:32.191822Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-05-29T15:28:32.191929Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:5834: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 2 PathOwnerId: 72057594046678944, cookie: 101 2025-05-29T15:28:32.191940Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 2 PathOwnerId: 72057594046678944, cookie: 101 2025-05-29T15:28:32.191945Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 101 2025-05-29T15:28:32.191950Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 2 2025-05-29T15:28:32.191955Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 2025-05-29T15:28:32.191965Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 101, subscribers: 0 2025-05-29T15:28:32.192607Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2025-05-29T15:28:32.192679Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 TestModificationResult got TxId: 101, wait until txId: 101 TestWaitNotification wait txId: 101 2025-05-29T15:28:32.192725Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:210: tests -- TTxNotificationSubscriber for txId 101: send EvNotifyTxCompletion 2025-05-29T15:28:32.192733Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:256: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 101 2025-05-29T15:28:32.192794Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 101, at schemeshard: 72057594046678944 2025-05-29T15:28:32.192829Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:227: tests -- TTxNotificationSubscriber for txId 101: got EvNotifyTxCompletionResult 2025-05-29T15:28:32.192835Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:236: tests -- TTxNotificationSubscriber for txId 101: satisfy waiter [1:302:2292] TestWaitNotification: OK eventTxId 101 2025-05-29T15:28:32.192922Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/MyView" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-05-29T15:28:32.192951Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/MyView" took 37us result status StatusSuccess 2025-05-29T15:28:32.193048Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/MyView" PathDescription { Self { Name: "MyView" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeView CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 ViewVersion: 1 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } ViewDescription { Name: "MyView" PathId { OwnerId: 72057594046678944 LocalId: 2 } Version: 1 QueryText: "Some query" CapturedContext { } } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> TSchemeShardViewTest::EmptyQueryText |71.0%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/grpc_streaming/ut/ydb-core-grpc_streaming-ut |71.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/grpc_streaming/ut/ydb-core-grpc_streaming-ut |71.0%| [LD] {RESULT} $(B)/ydb/core/grpc_streaming/ut/ydb-core-grpc_streaming-ut |71.0%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/services/fq/ut_integration/ydb-services-fq-ut_integration >> TDataShardMinStepTest::TestDropTableCompletesQuicklyRW-VolatileTxs [FAIL] >> TKeyValueTest::TestConcatWorksNewApi [GOOD] >> TKeyValueTest::TestConcatToLongKey |71.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/services/fq/ut_integration/ydb-services-fq-ut_integration |71.0%| [LD] {RESULT} $(B)/ydb/services/fq/ut_integration/ydb-services-fq-ut_integration >> TSchemeShardViewTest::EmptyQueryText [GOOD] >> TSchemeShardViewTest::AsyncCreateSameView >> TReplicationWithRebootsTests::AlterReplicationConfig [GOOD] >> TSchemeShardViewTest::AsyncCreateDifferentViews >> TSchemeShardViewTest::AsyncCreateSameView [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_view/unittest >> TSchemeShardViewTest::EmptyQueryText [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:116:2058] recipient: [1:111:2141] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:116:2058] recipient: [1:111:2141] Leader for TabletID 72057594046678944 is [1:128:2152] sender: [1:132:2058] recipient: [1:111:2141] 2025-05-29T15:28:33.205614Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7488: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-05-29T15:28:33.205642Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7516: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:28:33.205648Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7402: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-05-29T15:28:33.205653Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7418: OperationsProcessing config: using default configuration 2025-05-29T15:28:33.205667Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-05-29T15:28:33.205672Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-05-29T15:28:33.205682Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7548: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:28:33.205697Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:39: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-05-29T15:28:33.205820Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7619: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-05-29T15:28:33.205901Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-05-29T15:28:33.217520Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7309: Cannot subscribe to console configs 2025-05-29T15:28:33.217544Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:28:33.220074Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-05-29T15:28:33.220209Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-05-29T15:28:33.220256Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-05-29T15:28:33.221935Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-05-29T15:28:33.222084Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:445: Clear TempDirsState with owners number: 0 2025-05-29T15:28:33.222199Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1348: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-05-29T15:28:33.222241Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:32: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-05-29T15:28:33.222693Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:157: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:28:33.222733Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:84: [RootDataErasureManager] Stop 2025-05-29T15:28:33.222973Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:28:33.222981Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:28:33.222998Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-05-29T15:28:33.223003Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-29T15:28:33.223007Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:202: TTxServerlessStorageBilling.Complete 2025-05-29T15:28:33.223037Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6671: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-05-29T15:28:33.224720Z node 1 :HIVE INFO: tablet_helpers.cpp:1130: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:128:2152] sender: [1:242:2058] recipient: [1:15:2062] 2025-05-29T15:28:33.238659Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:372: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-05-29T15:28:33.238771Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:28:33.238839Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-05-29T15:28:33.238879Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:130: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-05-29T15:28:33.238887Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:28:33.239694Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:451: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-05-29T15:28:33.239720Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-05-29T15:28:33.239766Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:28:33.239775Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:313: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-05-29T15:28:33.239778Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:367: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-05-29T15:28:33.239784Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 2 -> 3 2025-05-29T15:28:33.240248Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:28:33.240264Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-05-29T15:28:33.240269Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 3 -> 128 2025-05-29T15:28:33.240599Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:28:33.240609Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:28:33.240614Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:28:33.240620Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1650: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-05-29T15:28:33.241123Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1719: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-05-29T15:28:33.241535Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:652: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-05-29T15:28:33.241575Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1751: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-05-29T15:28:33.241726Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:676: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-05-29T15:28:33.241748Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:680: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 124 RawX2: 4294969445 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-05-29T15:28:33.241753Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:28:33.241812Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 128 -> 240 2025-05-29T15:28:33.241817Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:28:33.241844Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-05-29T15:28:33.241853Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:402: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-05-29T15:28:33.242240Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:28:33.242245Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-29T15:28:33.242285Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29 ... 44 TestModificationResults wait txId: 101 2025-05-29T15:28:33.243741Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:434: actor# [1:272:2262] Bootstrap 2025-05-29T15:28:33.245209Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:453: actor# [1:272:2262] Become StateWork (SchemeCache [1:277:2267]) 2025-05-29T15:28:33.245737Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:372: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpCreateView CreateView { Name: "MyView" QueryText: "" } } TxId: 101 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-05-29T15:28:33.245777Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_create_view.cpp:118: [72057594046678944] TCreateView Propose, path: /MyRoot/MyView, opId: 101:0 2025-05-29T15:28:33.245783Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_create_view.cpp:124: [72057594046678944] TCreateView Propose, path: /MyRoot/MyView, opId: 101:0, viewDescription: Name: "MyView" QueryText: "" 2025-05-29T15:28:33.245799Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:319: AttachChild: child attached as only one child to the parent, parent id: [OwnerId: 72057594046678944, LocalPathId: 1], parent name: MyRoot, child name: MyView, child id: [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2025-05-29T15:28:33.245813Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 0 2025-05-29T15:28:33.245819Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:130: IgniteOperation, opId: 101:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-05-29T15:28:33.245926Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:213: actor# [1:272:2262] HANDLE TEvClientConnected success connect from tablet# 72057594046447617 2025-05-29T15:28:33.246552Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:451: TTxOperationPropose Complete, txId: 101, response: Status: StatusAccepted TxId: 101 SchemeshardId: 72057594046678944 PathId: 2, at schemeshard: 72057594046678944 2025-05-29T15:28:33.246577Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 101, database: /MyRoot, subject: , status: StatusAccepted, operation: CREATE VIEW, path: /MyRoot/MyView 2025-05-29T15:28:33.246602Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 101:0, at schemeshard: 72057594046678944 2025-05-29T15:28:33.246608Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_create_view.cpp:30: [72057594046678944] TCreateView::TPropose, opId: 101:0 ProgressState 2025-05-29T15:28:33.246614Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1650: TOperation IsReadyToPropose , TxId: 101 ready parts: 1/1 2025-05-29T15:28:33.246631Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1719: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 101 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-05-29T15:28:33.246784Z node 1 :TX_PROXY DEBUG: client.cpp:89: Handle TEvAllocateResult ACCEPTED RangeBegin# 281474976715656 RangeEnd# 281474976720656 txAllocator# 72057594046447617 2025-05-29T15:28:33.247089Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:652: Send tablet strongly msg operationId: 101:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:101 msg type: 269090816 2025-05-29T15:28:33.247123Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1751: TOperation RegisterRelationByTabletId, TxId: 101, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 101 at step: 5000002 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000001 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 101 at step: 5000002 2025-05-29T15:28:33.247193Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:676: TTxOperationPlanStep Execute, stepId: 5000002, transactions count in step: 1, at schemeshard: 72057594046678944 2025-05-29T15:28:33.247216Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:680: TTxOperationPlanStep Execute, message: Transactions { TxId: 101 Coordinator: 72057594046316545 AckTo { RawX1: 124 RawX2: 4294969445 } } Step: 5000002 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-05-29T15:28:33.247224Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_create_view.cpp:45: [72057594046678944] TCreateView::TPropose, opId: 101:0 HandleReply TEvPrivate::TEvOperationPlan, step: 5000002 2025-05-29T15:28:33.247249Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 101:0 128 -> 240 2025-05-29T15:28:33.247278Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-05-29T15:28:33.247288Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 FAKE_COORDINATOR: Erasing txId 101 2025-05-29T15:28:33.247726Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:28:33.247734Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 101, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-29T15:28:33.247776Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 101, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2025-05-29T15:28:33.247794Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:28:33.247799Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:209:2210], at schemeshard: 72057594046678944, txId: 101, path id: 1 2025-05-29T15:28:33.247804Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:209:2210], at schemeshard: 72057594046678944, txId: 101, path id: 2 2025-05-29T15:28:33.247875Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 101:0, at schemeshard: 72057594046678944 2025-05-29T15:28:33.247881Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:482: [72057594046678944] TDone opId# 101:0 ProgressState 2025-05-29T15:28:33.247893Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:906: Part operation is done id#101:0 progress is 1/1 2025-05-29T15:28:33.247897Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2025-05-29T15:28:33.247902Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:906: Part operation is done id#101:0 progress is 1/1 2025-05-29T15:28:33.247905Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2025-05-29T15:28:33.247909Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1606: TOperation IsReadyToNotify, TxId: 101, ready parts: 1/1, is published: false 2025-05-29T15:28:33.247915Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2025-05-29T15:28:33.247920Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:973: Operation and all the parts is done, operation id: 101:0 2025-05-29T15:28:33.247924Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5174: RemoveTx for txid 101:0 2025-05-29T15:28:33.247938Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2025-05-29T15:28:33.247943Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:982: Publication still in progress, tx: 101, publications: 2, subscribers: 0 2025-05-29T15:28:33.247947Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:989: Publication details: tx: 101, [OwnerId: 72057594046678944, LocalPathId: 1], 4 2025-05-29T15:28:33.247950Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:989: Publication details: tx: 101, [OwnerId: 72057594046678944, LocalPathId: 2], 2 2025-05-29T15:28:33.248071Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:5834: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 4 PathOwnerId: 72057594046678944, cookie: 101 2025-05-29T15:28:33.248084Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 4 PathOwnerId: 72057594046678944, cookie: 101 2025-05-29T15:28:33.248089Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 101 2025-05-29T15:28:33.248094Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 4 2025-05-29T15:28:33.248111Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-05-29T15:28:33.248232Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:5834: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 2 PathOwnerId: 72057594046678944, cookie: 101 2025-05-29T15:28:33.248252Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 2 PathOwnerId: 72057594046678944, cookie: 101 2025-05-29T15:28:33.248256Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 101 2025-05-29T15:28:33.248261Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 2 2025-05-29T15:28:33.248265Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 2025-05-29T15:28:33.248273Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 101, subscribers: 0 2025-05-29T15:28:33.249137Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2025-05-29T15:28:33.249260Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 TestModificationResult got TxId: 101, wait until txId: 101 >> TDataShardMinStepTest::TestDropTablePlanComesNotTooEarlyRW-VolatileTxs [FAIL] >> BsControllerConfig::Basic [GOOD] >> BsControllerConfig::DeleteStoragePool >> TSchemeShardViewTest::AsyncCreateDifferentViews [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_replication_reboots/unittest >> TReplicationWithRebootsTests::AlterReplicationConfig [GOOD] Test command err: ==== RunWithTabletReboots =========== RUN: Trace =========== Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2140] Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:116:2058] recipient: [1:110:2141] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:116:2058] recipient: [1:110:2141] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:117:2058] recipient: [1:111:2142] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:117:2058] recipient: [1:111:2142] Leader for TabletID 72057594046678944 is [1:126:2151] sender: [1:127:2058] recipient: [1:109:2140] Leader for TabletID 72057594046447617 is [1:130:2154] sender: [1:132:2058] recipient: [1:110:2141] Leader for TabletID 72057594046316545 is [1:133:2155] sender: [1:135:2058] recipient: [1:111:2142] 2025-05-29T15:28:20.616332Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7488: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-05-29T15:28:20.616359Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7516: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:28:20.616365Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7402: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2025-05-29T15:28:20.616371Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7418: OperationsProcessing config: using default configuration 2025-05-29T15:28:20.616387Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-05-29T15:28:20.616391Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-05-29T15:28:20.616400Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7548: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:28:20.616414Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:39: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-05-29T15:28:20.616530Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7619: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-05-29T15:28:20.616607Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-05-29T15:28:20.629345Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7651: Got new config: QueryServiceConfig { AllExternalDataSourcesAreAvailable: true } 2025-05-29T15:28:20.629369Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:28:20.629462Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7619: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# Leader for TabletID 72057594046447617 is [1:130:2154] sender: [1:175:2058] recipient: [1:15:2062] 2025-05-29T15:28:20.632293Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-05-29T15:28:20.632328Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-05-29T15:28:20.632359Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-05-29T15:28:20.635187Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-05-29T15:28:20.635250Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:445: Clear TempDirsState with owners number: 0 2025-05-29T15:28:20.635352Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1348: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-05-29T15:28:20.635555Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:32: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-05-29T15:28:20.636226Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:157: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:28:20.636282Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:84: [RootDataErasureManager] Stop 2025-05-29T15:28:20.636539Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:28:20.636550Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:28:20.636587Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-05-29T15:28:20.636595Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-29T15:28:20.636600Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:202: TTxServerlessStorageBilling.Complete 2025-05-29T15:28:20.636615Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6671: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:214:2058] recipient: [1:212:2212] IGNORE Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:214:2058] recipient: [1:212:2212] Leader for TabletID 72057594037968897 is [1:218:2216] sender: [1:219:2058] recipient: [1:212:2212] 2025-05-29T15:28:20.637851Z node 1 :HIVE INFO: tablet_helpers.cpp:1130: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:126:2151] sender: [1:239:2058] recipient: [1:15:2062] 2025-05-29T15:28:20.657224Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:372: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-05-29T15:28:20.657320Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:28:20.657381Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-05-29T15:28:20.657426Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:130: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-05-29T15:28:20.657434Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:28:20.658410Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:451: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-05-29T15:28:20.658443Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-05-29T15:28:20.658512Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:28:20.658523Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:313: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-05-29T15:28:20.658528Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:367: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-05-29T15:28:20.658532Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 2 -> 3 2025-05-29T15:28:20.659229Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:28:20.659259Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-05-29T15:28:20.659269Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 3 -> 128 2025-05-29T15:28:20.659874Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:28:20.659891Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:28:20.659898Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:28:20.659907Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1650: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-05-29T15:28:20.660717Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1719: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-05-29T15:28:20.661305Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:652: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-05-29T15:28:20.661350Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1751: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 Leader for TabletID 72057594046316545 is [1:133:2155] sender: [1:254:2058] recipient: [1:15:2062] FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-05-29T15:28:20.661572Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:676: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-05-29T15:28:20.661600Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:680: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 133 RawX2: 4294969451 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-05-29T15:28:20.661606Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:28:20.661676Z node 1 :FLAT_TX_SCHEMESHARD INFO: sch ... X1: 336 RawX2: 219043334418 } Origin: 72075186233409546 State: 2 TxId: 1003 Step: 0 Generation: 2 2025-05-29T15:28:33.229247Z node 51 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4909: StateWork, processing event TEvDataShard::TEvSchemaChanged 2025-05-29T15:28:33.229252Z node 51 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5512: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 336 RawX2: 219043334418 } Origin: 72075186233409546 State: 2 TxId: 1003 Step: 0 Generation: 2 2025-05-29T15:28:33.229255Z node 51 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1764: TOperation FindRelatedPartByTabletId, TxId: 1003, tablet: 72075186233409546, partId: 0 2025-05-29T15:28:33.229262Z node 51 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:619: TTxOperationReply execute, operationId: 1003:0, at schemeshard: 72057594046678944, message: Source { RawX1: 336 RawX2: 219043334418 } Origin: 72075186233409546 State: 2 TxId: 1003 Step: 0 Generation: 2 2025-05-29T15:28:33.229266Z node 51 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:1005: NTableState::TProposedWaitParts operationId# 1003:0 HandleReply TEvSchemaChanged at tablet: 72057594046678944 2025-05-29T15:28:33.229271Z node 51 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:1009: NTableState::TProposedWaitParts operationId# 1003:0 HandleReply TEvSchemaChanged at tablet: 72057594046678944 message: Source { RawX1: 336 RawX2: 219043334418 } Origin: 72075186233409546 State: 2 TxId: 1003 Step: 0 Generation: 2 2025-05-29T15:28:33.229278Z node 51 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:655: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 1003:0, shardIdx: 72057594046678944:1, datashard: 72075186233409546, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2025-05-29T15:28:33.229280Z node 51 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:674: all shard schema changes has been received, operationId: 1003:0, at schemeshard: 72057594046678944 2025-05-29T15:28:33.229283Z node 51 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:686: send schema changes ack message, operation: 1003:0, datashard: 72075186233409546, at schemeshard: 72057594046678944 2025-05-29T15:28:33.229286Z node 51 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1003:0 129 -> 240 2025-05-29T15:28:33.229305Z node 51 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:165: TSideEffects ApplyOnExecute at tablet# 72057594046678944 2025-05-29T15:28:33.229808Z node 51 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:207: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2025-05-29T15:28:33.229849Z node 51 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1003 2025-05-29T15:28:33.229852Z node 51 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:207: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2025-05-29T15:28:33.229874Z node 51 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:647: TTxOperationReply complete, operationId: 1003:0, at schemeshard: 72057594046678944 2025-05-29T15:28:33.229877Z node 51 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:207: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2025-05-29T15:28:33.229891Z node 51 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:647: TTxOperationReply complete, operationId: 1003:0, at schemeshard: 72057594046678944 2025-05-29T15:28:33.229894Z node 51 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:207: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2025-05-29T15:28:33.229896Z node 51 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:275: Activate send for 1003:0 2025-05-29T15:28:33.229906Z node 51 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:630: Send to actor: [51:336:2322] msg type: 269552132 msg: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 1003 at schemeshard: 72057594046678944 2025-05-29T15:28:33.229917Z node 51 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4877: StateWork, received event# 2146435072, Sender [51:123:2148], Recipient [51:123:2148]: NKikimr::NSchemeShard::TEvPrivate::TEvProgressOperation 2025-05-29T15:28:33.229922Z node 51 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4894: StateWork, processing event TEvPrivate::TEvProgressOperation 2025-05-29T15:28:33.229928Z node 51 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1003:0, at schemeshard: 72057594046678944 2025-05-29T15:28:33.229934Z node 51 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:482: [72057594046678944] TDone opId# 1003:0 ProgressState 2025-05-29T15:28:33.229944Z node 51 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:165: TSideEffects ApplyOnExecute at tablet# 72057594046678944 2025-05-29T15:28:33.229948Z node 51 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:906: Part operation is done id#1003:0 progress is 1/1 2025-05-29T15:28:33.229952Z node 51 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 1003 ready parts: 1/1 2025-05-29T15:28:33.229955Z node 51 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:906: Part operation is done id#1003:0 progress is 1/1 2025-05-29T15:28:33.229957Z node 51 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 1003 ready parts: 1/1 2025-05-29T15:28:33.229960Z node 51 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1606: TOperation IsReadyToNotify, TxId: 1003, ready parts: 1/1, is published: true 2025-05-29T15:28:33.229966Z node 51 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1629: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [51:412:2384] message: TxId: 1003 2025-05-29T15:28:33.229970Z node 51 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 1003 ready parts: 1/1 2025-05-29T15:28:33.229973Z node 51 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:973: Operation and all the parts is done, operation id: 1003:0 2025-05-29T15:28:33.229976Z node 51 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5174: RemoveTx for txid 1003:0 2025-05-29T15:28:33.229990Z node 51 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 3 2025-05-29T15:28:33.230331Z node 51 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:207: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2025-05-29T15:28:33.230344Z node 51 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:630: Send to actor: [51:412:2384] msg type: 271124998 msg: NKikimrScheme.TEvNotifyTxCompletionResult TxId: 1003 at schemeshard: 72057594046678944 2025-05-29T15:28:33.230368Z node 51 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:227: tests -- TTxNotificationSubscriber for txId 1003: got EvNotifyTxCompletionResult 2025-05-29T15:28:33.230372Z node 51 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:236: tests -- TTxNotificationSubscriber for txId 1003: satisfy waiter [51:417:2389] 2025-05-29T15:28:33.230392Z node 51 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4877: StateWork, received event# 269877764, Sender [51:420:2392], Recipient [51:123:2148]: NKikimr::TEvTabletPipe::TEvServerDisconnected 2025-05-29T15:28:33.230396Z node 51 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4975: StateWork, processing event TEvTabletPipe::TEvServerDisconnected 2025-05-29T15:28:33.230398Z node 51 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5801: Server pipe is reset, at schemeshard: 72057594046678944 2025-05-29T15:28:33.230412Z node 51 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4877: StateWork, received event# 269877764, Sender [51:450:2422], Recipient [51:123:2148]: NKikimr::TEvTabletPipe::TEvServerDisconnected 2025-05-29T15:28:33.230415Z node 51 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4975: StateWork, processing event TEvTabletPipe::TEvServerDisconnected 2025-05-29T15:28:33.230417Z node 51 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5801: Server pipe is reset, at schemeshard: 72057594046678944 TestWaitNotification: OK eventTxId 1003 2025-05-29T15:28:33.230470Z node 51 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4877: StateWork, received event# 271122945, Sender [51:460:2432], Recipient [51:123:2148]: NKikimrSchemeOp.TDescribePath Path: "/MyRoot/Table" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false } 2025-05-29T15:28:33.230476Z node 51 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4889: StateWork, processing event TEvSchemeShard::TEvDescribeScheme 2025-05-29T15:28:33.230484Z node 51 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Table" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-05-29T15:28:33.230509Z node 51 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/Table" took 21us result status StatusSuccess 2025-05-29T15:28:33.230591Z node 51 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Table" PathDescription { Self { Name: "Table" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 1002 CreateStep: 5000003 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 4 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 4 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 2 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "Table" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Uint64" TypeId: 4 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 TableSchemaVersion: 2 IsBackup: false ReplicationConfig { Mode: REPLICATION_MODE_NONE } IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 2 PathsLimit: 10000 ShardsInside: 1 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_view/unittest >> TSchemeShardViewTest::AsyncCreateSameView [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:116:2058] recipient: [1:111:2141] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:116:2058] recipient: [1:111:2141] Leader for TabletID 72057594046678944 is [1:128:2152] sender: [1:132:2058] recipient: [1:111:2141] 2025-05-29T15:28:33.424427Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7488: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-05-29T15:28:33.424455Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7516: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:28:33.424461Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7402: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-05-29T15:28:33.424467Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7418: OperationsProcessing config: using default configuration 2025-05-29T15:28:33.424479Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-05-29T15:28:33.424484Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-05-29T15:28:33.424493Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7548: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:28:33.424508Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:39: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-05-29T15:28:33.424631Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7619: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-05-29T15:28:33.424708Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-05-29T15:28:33.436770Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7309: Cannot subscribe to console configs 2025-05-29T15:28:33.436796Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:28:33.439498Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-05-29T15:28:33.439606Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-05-29T15:28:33.439643Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-05-29T15:28:33.441925Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-05-29T15:28:33.442114Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:445: Clear TempDirsState with owners number: 0 2025-05-29T15:28:33.442238Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1348: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-05-29T15:28:33.442288Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:32: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-05-29T15:28:33.442797Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:157: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:28:33.442855Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:84: [RootDataErasureManager] Stop 2025-05-29T15:28:33.443146Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:28:33.443158Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:28:33.443183Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-05-29T15:28:33.443190Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-29T15:28:33.443196Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:202: TTxServerlessStorageBilling.Complete 2025-05-29T15:28:33.443238Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6671: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-05-29T15:28:33.444651Z node 1 :HIVE INFO: tablet_helpers.cpp:1130: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:128:2152] sender: [1:242:2058] recipient: [1:15:2062] 2025-05-29T15:28:33.460698Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:372: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-05-29T15:28:33.460782Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:28:33.460844Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-05-29T15:28:33.460886Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:130: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-05-29T15:28:33.460896Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:28:33.461733Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:451: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-05-29T15:28:33.461759Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-05-29T15:28:33.461805Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:28:33.461814Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:313: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-05-29T15:28:33.461820Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:367: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-05-29T15:28:33.461825Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 2 -> 3 2025-05-29T15:28:33.462214Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:28:33.462224Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-05-29T15:28:33.462229Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 3 -> 128 2025-05-29T15:28:33.462535Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:28:33.462546Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:28:33.462552Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:28:33.462558Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1650: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-05-29T15:28:33.463115Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1719: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-05-29T15:28:33.463514Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:652: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-05-29T15:28:33.463554Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1751: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-05-29T15:28:33.463726Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:676: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-05-29T15:28:33.463746Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:680: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 124 RawX2: 4294969445 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-05-29T15:28:33.463752Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:28:33.463829Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 128 -> 240 2025-05-29T15:28:33.463835Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:28:33.463865Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-05-29T15:28:33.463878Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:402: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-05-29T15:28:33.464282Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:28:33.464289Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-29T15:28:33.464332Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29 ... TCreateView::TPropose, opId: 101:0 HandleReply TEvPrivate::TEvOperationPlan, step: 5000002 2025-05-29T15:28:33.470448Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 101:0 128 -> 240 2025-05-29T15:28:33.470475Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-05-29T15:28:33.470484Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 FAKE_COORDINATOR: Erasing txId 101 2025-05-29T15:28:33.470836Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:28:33.470844Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 101, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-29T15:28:33.470881Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 101, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2025-05-29T15:28:33.470897Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:28:33.470902Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:209:2210], at schemeshard: 72057594046678944, txId: 101, path id: 1 2025-05-29T15:28:33.470907Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:209:2210], at schemeshard: 72057594046678944, txId: 101, path id: 2 2025-05-29T15:28:33.470962Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 101:0, at schemeshard: 72057594046678944 2025-05-29T15:28:33.470971Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:482: [72057594046678944] TDone opId# 101:0 ProgressState 2025-05-29T15:28:33.470982Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:906: Part operation is done id#101:0 progress is 1/1 2025-05-29T15:28:33.470987Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2025-05-29T15:28:33.470991Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:906: Part operation is done id#101:0 progress is 1/1 2025-05-29T15:28:33.470994Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2025-05-29T15:28:33.470998Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1606: TOperation IsReadyToNotify, TxId: 101, ready parts: 1/1, is published: false 2025-05-29T15:28:33.471004Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2025-05-29T15:28:33.471008Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:973: Operation and all the parts is done, operation id: 101:0 2025-05-29T15:28:33.471013Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5174: RemoveTx for txid 101:0 2025-05-29T15:28:33.471026Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2025-05-29T15:28:33.471031Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:982: Publication still in progress, tx: 101, publications: 2, subscribers: 0 2025-05-29T15:28:33.471036Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:989: Publication details: tx: 101, [OwnerId: 72057594046678944, LocalPathId: 1], 4 2025-05-29T15:28:33.471039Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:989: Publication details: tx: 101, [OwnerId: 72057594046678944, LocalPathId: 2], 2 2025-05-29T15:28:33.471158Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:5834: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 4 PathOwnerId: 72057594046678944, cookie: 101 2025-05-29T15:28:33.471169Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 4 PathOwnerId: 72057594046678944, cookie: 101 2025-05-29T15:28:33.471173Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 101 2025-05-29T15:28:33.471177Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 4 2025-05-29T15:28:33.471181Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-05-29T15:28:33.471291Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:5834: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 2 PathOwnerId: 72057594046678944, cookie: 101 2025-05-29T15:28:33.471300Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 2 PathOwnerId: 72057594046678944, cookie: 101 2025-05-29T15:28:33.471304Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 101 2025-05-29T15:28:33.471309Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 2 2025-05-29T15:28:33.471312Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 2025-05-29T15:28:33.471320Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 101, subscribers: 0 2025-05-29T15:28:33.472051Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2025-05-29T15:28:33.472092Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 TestModificationResult got TxId: 101, wait until txId: 101 TestModificationResults wait txId: 102 TestModificationResult got TxId: 102, wait until txId: 102 TestModificationResults wait txId: 103 TestModificationResult got TxId: 103, wait until txId: 103 TestWaitNotification wait txId: 101 2025-05-29T15:28:33.472153Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:210: tests -- TTxNotificationSubscriber for txId 101: send EvNotifyTxCompletion 2025-05-29T15:28:33.472160Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:256: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 101 TestWaitNotification wait txId: 102 2025-05-29T15:28:33.472173Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:210: tests -- TTxNotificationSubscriber for txId 102: send EvNotifyTxCompletion 2025-05-29T15:28:33.472176Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:256: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 102 TestWaitNotification wait txId: 103 2025-05-29T15:28:33.472184Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:210: tests -- TTxNotificationSubscriber for txId 103: send EvNotifyTxCompletion 2025-05-29T15:28:33.472188Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:256: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 103 2025-05-29T15:28:33.472241Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 101, at schemeshard: 72057594046678944 2025-05-29T15:28:33.472265Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 102, at schemeshard: 72057594046678944 2025-05-29T15:28:33.472273Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:227: tests -- TTxNotificationSubscriber for txId 101: got EvNotifyTxCompletionResult 2025-05-29T15:28:33.472277Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:236: tests -- TTxNotificationSubscriber for txId 101: satisfy waiter [1:308:2298] 2025-05-29T15:28:33.472312Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:227: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2025-05-29T15:28:33.472316Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:236: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [1:308:2298] 2025-05-29T15:28:33.472328Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 103, at schemeshard: 72057594046678944 2025-05-29T15:28:33.472343Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:227: tests -- TTxNotificationSubscriber for txId 103: got EvNotifyTxCompletionResult 2025-05-29T15:28:33.472346Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:236: tests -- TTxNotificationSubscriber for txId 103: satisfy waiter [1:308:2298] TestWaitNotification: OK eventTxId 101 TestWaitNotification: OK eventTxId 102 TestWaitNotification: OK eventTxId 103 2025-05-29T15:28:33.472404Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/MyView" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-05-29T15:28:33.472429Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/MyView" took 32us result status StatusSuccess 2025-05-29T15:28:33.472502Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/MyView" PathDescription { Self { Name: "MyView" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeView CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 ViewVersion: 1 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } ViewDescription { Name: "MyView" PathId { OwnerId: 72057594046678944 LocalId: 2 } Version: 1 QueryText: "Some query" CapturedContext { } } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> BsControllerConfig::ReassignGroupDisk [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_view/unittest >> TSchemeShardViewTest::AsyncCreateDifferentViews [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:116:2058] recipient: [1:111:2141] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:116:2058] recipient: [1:111:2141] Leader for TabletID 72057594046678944 is [1:128:2152] sender: [1:132:2058] recipient: [1:111:2141] 2025-05-29T15:28:33.662319Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7488: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-05-29T15:28:33.662339Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7516: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:28:33.662343Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7402: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-05-29T15:28:33.662346Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7418: OperationsProcessing config: using default configuration 2025-05-29T15:28:33.662355Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-05-29T15:28:33.662359Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-05-29T15:28:33.662365Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7548: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:28:33.662375Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:39: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-05-29T15:28:33.662459Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7619: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-05-29T15:28:33.662530Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-05-29T15:28:33.672511Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7309: Cannot subscribe to console configs 2025-05-29T15:28:33.672535Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:28:33.674850Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-05-29T15:28:33.674986Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-05-29T15:28:33.675030Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-05-29T15:28:33.676672Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-05-29T15:28:33.676808Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:445: Clear TempDirsState with owners number: 0 2025-05-29T15:28:33.676926Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1348: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-05-29T15:28:33.676971Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:32: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-05-29T15:28:33.677414Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:157: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:28:33.677460Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:84: [RootDataErasureManager] Stop 2025-05-29T15:28:33.677710Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:28:33.677722Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:28:33.677743Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-05-29T15:28:33.677752Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-29T15:28:33.677759Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:202: TTxServerlessStorageBilling.Complete 2025-05-29T15:28:33.677795Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6671: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-05-29T15:28:33.679296Z node 1 :HIVE INFO: tablet_helpers.cpp:1130: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:128:2152] sender: [1:242:2058] recipient: [1:15:2062] 2025-05-29T15:28:33.700676Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:372: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-05-29T15:28:33.700760Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:28:33.700828Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-05-29T15:28:33.700873Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:130: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-05-29T15:28:33.700885Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:28:33.701742Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:451: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-05-29T15:28:33.701769Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-05-29T15:28:33.701821Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:28:33.701833Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:313: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-05-29T15:28:33.701839Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:367: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-05-29T15:28:33.701845Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 2 -> 3 2025-05-29T15:28:33.702273Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:28:33.702286Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-05-29T15:28:33.702292Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 3 -> 128 2025-05-29T15:28:33.702649Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:28:33.702662Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:28:33.702669Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:28:33.702676Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1650: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-05-29T15:28:33.703389Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1719: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-05-29T15:28:33.703995Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:652: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-05-29T15:28:33.704033Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1751: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-05-29T15:28:33.704230Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:676: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-05-29T15:28:33.704259Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:680: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 124 RawX2: 4294969445 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-05-29T15:28:33.704267Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:28:33.704335Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 128 -> 240 2025-05-29T15:28:33.704343Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:28:33.704375Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-05-29T15:28:33.704388Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:402: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-05-29T15:28:33.704858Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:28:33.704869Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-29T15:28:33.704910Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29 ... dateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 5 PathOwnerId: 72057594046678944, cookie: 102 2025-05-29T15:28:33.716366Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 5 PathOwnerId: 72057594046678944, cookie: 102 2025-05-29T15:28:33.716370Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 102 2025-05-29T15:28:33.716373Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 5 2025-05-29T15:28:33.716376Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2025-05-29T15:28:33.716453Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:5834: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 2 PathOwnerId: 72057594046678944, cookie: 102 2025-05-29T15:28:33.716459Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 2 PathOwnerId: 72057594046678944, cookie: 102 2025-05-29T15:28:33.716462Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 102 2025-05-29T15:28:33.716464Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 2 2025-05-29T15:28:33.716467Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 1 2025-05-29T15:28:33.716472Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 102, subscribers: 0 2025-05-29T15:28:33.716895Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2025-05-29T15:28:33.716920Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 TestModificationResult got TxId: 101, wait until txId: 101 TestModificationResults wait txId: 102 TestModificationResult got TxId: 102, wait until txId: 102 TestModificationResults wait txId: 103 TestModificationResult got TxId: 103, wait until txId: 103 TestWaitNotification wait txId: 101 2025-05-29T15:28:33.716967Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:210: tests -- TTxNotificationSubscriber for txId 101: send EvNotifyTxCompletion 2025-05-29T15:28:33.716973Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:256: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 101 TestWaitNotification wait txId: 102 2025-05-29T15:28:33.716983Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:210: tests -- TTxNotificationSubscriber for txId 102: send EvNotifyTxCompletion 2025-05-29T15:28:33.716985Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:256: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 102 TestWaitNotification wait txId: 103 2025-05-29T15:28:33.716991Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:210: tests -- TTxNotificationSubscriber for txId 103: send EvNotifyTxCompletion 2025-05-29T15:28:33.716993Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:256: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 103 2025-05-29T15:28:33.717060Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 101, at schemeshard: 72057594046678944 2025-05-29T15:28:33.717073Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 102, at schemeshard: 72057594046678944 2025-05-29T15:28:33.717081Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:227: tests -- TTxNotificationSubscriber for txId 101: got EvNotifyTxCompletionResult 2025-05-29T15:28:33.717084Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:236: tests -- TTxNotificationSubscriber for txId 101: satisfy waiter [1:339:2329] 2025-05-29T15:28:33.717109Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:227: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2025-05-29T15:28:33.717111Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:236: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [1:339:2329] 2025-05-29T15:28:33.717120Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 103, at schemeshard: 72057594046678944 2025-05-29T15:28:33.717131Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:227: tests -- TTxNotificationSubscriber for txId 103: got EvNotifyTxCompletionResult 2025-05-29T15:28:33.717133Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:236: tests -- TTxNotificationSubscriber for txId 103: satisfy waiter [1:339:2329] TestWaitNotification: OK eventTxId 101 TestWaitNotification: OK eventTxId 102 TestWaitNotification: OK eventTxId 103 2025-05-29T15:28:33.717182Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/SomeDir" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-05-29T15:28:33.717201Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/SomeDir" took 26us result status StatusSuccess 2025-05-29T15:28:33.717273Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/SomeDir" PathDescription { Self { Name: "SomeDir" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 } ChildrenExist: true } Children { Name: "FirstView" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeView CreateFinished: true CreateTxId: 102 CreateStep: 5000004 ParentPathId: 2 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" ChildrenExist: false } Children { Name: "SecondView" PathId: 4 SchemeshardId: 72057594046678944 PathType: EPathTypeView CreateFinished: true CreateTxId: 103 CreateStep: 5000003 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 3 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-05-29T15:28:33.717321Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/SomeDir/FirstView" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-05-29T15:28:33.717334Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/SomeDir/FirstView" took 14us result status StatusSuccess 2025-05-29T15:28:33.717367Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/SomeDir/FirstView" PathDescription { Self { Name: "FirstView" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeView CreateFinished: true CreateTxId: 102 CreateStep: 5000004 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 ViewVersion: 1 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 3 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } ViewDescription { Name: "FirstView" PathId { OwnerId: 72057594046678944 LocalId: 3 } Version: 1 QueryText: "First query" CapturedContext { } } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-05-29T15:28:33.717397Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/SomeDir/SecondView" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-05-29T15:28:33.717406Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/SomeDir/SecondView" took 9us result status StatusSuccess 2025-05-29T15:28:33.717426Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/SomeDir/SecondView" PathDescription { Self { Name: "SecondView" PathId: 4 SchemeshardId: 72057594046678944 PathType: EPathTypeView CreateFinished: true CreateTxId: 103 CreateStep: 5000003 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 ViewVersion: 1 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 3 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } ViewDescription { Name: "SecondView" PathId { OwnerId: 72057594046678944 LocalId: 4 } Version: 1 QueryText: "Second query" CapturedContext { } } } PathId: 4 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/mind/bscontroller/ut_bscontroller/unittest >> BsControllerConfig::ReassignGroupDisk [GOOD] Test command err: Leader for TabletID 72057594037932033 is [0:0:0] sender: [1:279:2068] recipient: [1:265:2078] IGNORE Leader for TabletID 72057594037932033 is [0:0:0] sender: [1:279:2068] recipient: [1:265:2078] Leader for TabletID 72057594037932033 is [1:291:2080] sender: [1:293:2068] recipient: [1:265:2078] 2025-05-29T15:28:29.623393Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2057} StateInit event Type# 268828672 Event# NKikimr::TEvTablet::TEvBoot 2025-05-29T15:28:29.624010Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2057} StateInit event Type# 268828673 Event# NKikimr::TEvTablet::TEvRestored 2025-05-29T15:28:29.624061Z node 1 :BS_CONTROLLER DEBUG: {BSC22@console_interaction.cpp:14} Console interaction started 2025-05-29T15:28:29.624299Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2057} StateInit event Type# 268828684 Event# NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-05-29T15:28:29.624361Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2057} StateInit event Type# 268639244 Event# NKikimr::TEvNodeWardenStorageConfig 2025-05-29T15:28:29.624376Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2057} StateInit event Type# 131082 Event# NActors::TEvInterconnect::TEvNodesInfo 2025-05-29T15:28:29.624379Z node 1 :BS_CONTROLLER DEBUG: {BSC01@bsc.cpp:521} Handle TEvInterconnect::TEvNodesInfo 2025-05-29T15:28:29.624410Z node 1 :BS_CONTROLLER DEBUG: {BSCTXIS01@init_scheme.cpp:17} TTxInitScheme Execute 2025-05-29T15:28:29.625100Z node 1 :BS_CONTROLLER DEBUG: {BSCTXIS03@init_scheme.cpp:44} TTxInitScheme Complete 2025-05-29T15:28:29.625120Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM01@migrate.cpp:190} Execute tx 2025-05-29T15:28:29.625136Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM02@migrate.cpp:251} Complete tx IncompatibleData# false 2025-05-29T15:28:29.625148Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxTrimUnusedSlots 2025-05-29T15:28:29.625156Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxTrimUnusedSlots 2025-05-29T15:28:29.625163Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateSchemaVersion Leader for TabletID 72057594037932033 is [1:291:2080] sender: [1:314:2068] recipient: [1:22:2069] 2025-05-29T15:28:29.635519Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateSchemaVersion 2025-05-29T15:28:29.635559Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxGenerateInstanceId 2025-05-29T15:28:29.645823Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxGenerateInstanceId 2025-05-29T15:28:29.645862Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateStaticPDiskInfo 2025-05-29T15:28:29.645873Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateStaticPDiskInfo 2025-05-29T15:28:29.645881Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxFillInNonNullConfigForPDisk 2025-05-29T15:28:29.645912Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxFillInNonNullConfigForPDisk 2025-05-29T15:28:29.645919Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxDropDriveStatus 2025-05-29T15:28:29.645923Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxDropDriveStatus 2025-05-29T15:28:29.645930Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateCompatibilityInfo 2025-05-29T15:28:29.656172Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateCompatibilityInfo 2025-05-29T15:28:29.656207Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateEnableConfigV2 2025-05-29T15:28:29.666473Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateEnableConfigV2 2025-05-29T15:28:29.666523Z node 1 :BS_CONTROLLER DEBUG: {BSCTXLE01@load_everything.cpp:19} TTxLoadEverything Execute 2025-05-29T15:28:29.666788Z node 1 :BS_CONTROLLER DEBUG: {BSCTXLE03@load_everything.cpp:557} TTxLoadEverything Complete 2025-05-29T15:28:29.666799Z node 1 :BS_CONTROLLER DEBUG: {BSC09@impl.h:2188} LoadFinished 2025-05-29T15:28:29.666838Z node 1 :BS_CONTROLLER DEBUG: {BSC18@console_interaction.cpp:31} Console connection service started 2025-05-29T15:28:29.666845Z node 1 :BS_CONTROLLER DEBUG: {BSCTXLE04@load_everything.cpp:562} TTxLoadEverything InitQueue processed 2025-05-29T15:28:29.668663Z node 1 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:398} Execute TEvControllerConfigRequest Request# {Command { DefineHostConfig { HostConfigId: 1 Drive { Path: "/dev/disk" } } } Command { DefineBox { BoxId: 1 Name: "box" Host { Key { Fqdn: "::1" IcPort: 12001 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12002 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12003 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12004 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12005 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12006 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12007 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12008 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12009 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12010 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12011 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12012 } HostConfigId: 1 } } } Command { DefineStoragePool { BoxId: 1 StoragePoolId: 1 Name: "storage pool" ErasureSpecies: "block-4-2" VDiskKind: "Default" NumGroups: 8 PDiskFilter { Property { Type: ROT } } } } } 2025-05-29T15:28:29.668764Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:340} Create new pdisk PDiskId# 1:1000 Path# /dev/disk 2025-05-29T15:28:29.668769Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:340} Create new pdisk PDiskId# 2:1000 Path# /dev/disk 2025-05-29T15:28:29.668772Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:340} Create new pdisk PDiskId# 3:1000 Path# /dev/disk 2025-05-29T15:28:29.668774Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:340} Create new pdisk PDiskId# 4:1000 Path# /dev/disk 2025-05-29T15:28:29.668777Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:340} Create new pdisk PDiskId# 5:1000 Path# /dev/disk 2025-05-29T15:28:29.668780Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:340} Create new pdisk PDiskId# 6:1000 Path# /dev/disk 2025-05-29T15:28:29.668783Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:340} Create new pdisk PDiskId# 7:1000 Path# /dev/disk 2025-05-29T15:28:29.668786Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:340} Create new pdisk PDiskId# 8:1000 Path# /dev/disk 2025-05-29T15:28:29.668788Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:340} Create new pdisk PDiskId# 9:1000 Path# /dev/disk 2025-05-29T15:28:29.668791Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:340} Create new pdisk PDiskId# 10:1000 Path# /dev/disk 2025-05-29T15:28:29.668794Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:340} Create new pdisk PDiskId# 11:1000 Path# /dev/disk 2025-05-29T15:28:29.668796Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:340} Create new pdisk PDiskId# 12:1000 Path# /dev/disk Response# Status { Success: true } Status { Success: true } Status { Success: true } Success: true ConfigTxSeqNo: 1 2025-05-29T15:28:29.680897Z node 1 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:398} Execute TEvControllerConfigRequest Request# {Command { UpdateDriveStatus { HostKey { NodeId: 1 } Path: "/dev/disk" Status: INACTIVE } } } Response# Status { Success: true } Success: true ConfigTxSeqNo: 2 Leader for TabletID 72057594037932033 is [0:0:0] sender: [13:286:2068] recipient: [13:265:2078] IGNORE Leader for TabletID 72057594037932033 is [0:0:0] sender: [13:286:2068] recipient: [13:265:2078] Leader for TabletID 72057594037932033 is [13:292:2080] sender: [13:293:2068] recipient: [13:265:2078] 2025-05-29T15:28:31.817864Z node 13 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2057} StateInit event Type# 268828672 Event# NKikimr::TEvTablet::TEvBoot 2025-05-29T15:28:31.817990Z node 13 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2057} StateInit event Type# 268828673 Event# NKikimr::TEvTablet::TEvRestored 2025-05-29T15:28:31.818038Z node 13 :BS_CONTROLLER DEBUG: {BSC22@console_interaction.cpp:14} Console interaction started 2025-05-29T15:28:31.818163Z node 13 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2057} StateInit event Type# 268828684 Event# NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-05-29T15:28:31.818216Z node 13 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2057} StateInit event Type# 268639244 Event# NKikimr::TEvNodeWardenStorageConfig 2025-05-29T15:28:31.818238Z node 13 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2057} StateInit event Type# 131082 Event# NActors::TEvInterconnect::TEvNodesInfo 2025-05-29T15:28:31.818242Z node 13 :BS_CONTROLLER DEBUG: {BSC01@bsc.cpp:521} Handle TEvInterconnect::TEvNodesInfo 2025-05-29T15:28:31.818272Z node 13 :BS_CONTROLLER DEBUG: {BSCTXIS01@init_scheme.cpp:17} TTxInitScheme Execute 2025-05-29T15:28:31.818898Z node 13 :BS_CONTROLLER DEBUG: {BSCTXIS03@init_scheme.cpp:44} TTxInitScheme Complete 2025-05-29T15:28:31.818921Z node 13 :BS_CONTROLLER DEBUG: {BSCTXM01@migrate.cpp:190} Execute tx 2025-05-29T15:28:31.818938Z node 13 :BS_CONTROLLER DEBUG: {BSCTXM02@migrate.cpp:251} Complete tx IncompatibleData# false 2025-05-29T15:28:31.818951Z node 13 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxTrimUnusedSlots 2025-05-29T15:28:31.818959Z node 13 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxTrimUnusedSlots 2025-05-29T15:28:31.818965Z node 13 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateSchemaVersion Leader for TabletID 72057594037932033 is [13:292:2080] sender: [13:314:2068] recipient: [13:22:2069] 2025-05-29T15:28:31.829307Z node 13 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateSchemaVersion 2025-05-29T15:28:31.829349Z node 13 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxGenerateInstanceId 2025-05-29T15:28:31.839594Z node 13 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxGenerateInstanceId 2025-05-29T15:28:31.839632Z node 13 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateStaticPDiskInfo 2025-05-29T15:28:31.839642Z node 13 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateStaticPDiskInfo 2025-05-29T15:28:31.839651Z node 13 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxFillInNonNullConfigForPDisk 2025-05-29T15:28:31.839670Z node 13 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxFillInNonNullConfigForPDisk 2025-05-29T15:28:31.839676Z node 13 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxDropDriveStatus 2025-05-29T15:28:31.839680Z node 13 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxDropDriveStatus 2025-05-29T15:28:31.839688Z node 13 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateCompatibilityInfo 2025-05-29T15:28:31.849894Z node 13 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateCompatibilityInfo 2025-05-29T15:28:31.849929Z node 13 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateEnableConfigV2 2025-05-29T15:28:31.860125Z node 13 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateEnableConfigV2 2025-05-29T15:28:31.860152Z node 13 :BS_CONTROLLER DEBUG: {BSCTXLE01@load_everything.cpp:19} TTxLoadEverything Execute 2025-05-29T15:28:31.860267Z node 13 :BS_CONTROLLER DEBUG: {BSCTXLE03@load_everything.cpp:557} TTxLoadEverything Complete 2025-05-29T15:28:31.860272Z node 13 :BS_CONTROLLER DEBUG: {BSC09@impl.h:2188} LoadFinished 2025-05-29T15:28:31.860299Z node 13 :BS_CONTROLLER DEBUG: {BSC18@console_interaction.cpp:31} Console connection service started 2025-05-29T15:28:31.860304Z node 13 :BS_CONTROLLER DEBUG: {BSCTXLE04@load_everything.cpp:562} TTxLoadEverything InitQueue processed 2025-05-29T15:28:31.860472Z node 13 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:398} Execute TEvControllerConfigRequest Request# {Command { DefineHostConfig { HostConfigId: 2 Drive { Path: "/dev/disk" } } } Command { DefineBox { BoxId: 1 Name: "box" Host { Key { Fqdn: "::1" IcPort: 12001 } HostConfigId: 2 } Host { Key { Fqdn: "::1" IcPort: 12002 } HostConfigId: 2 } Host { Key { Fqdn: "::1" IcPort: 12003 } HostConfigId: 2 } Host { Key { Fqdn: "::1" IcPort: 12004 } HostConfigId: 2 } Host { Key { Fqdn: "::1" IcPort: 12005 } HostConfigId: 2 } Host { Key { Fqdn: "::1" IcPort: 12006 } HostConfigId: 2 } Host { Key { Fqdn: "::1" IcPort: 12007 } HostConfigId: 2 } Host { Key { Fqdn: "::1" IcPort: 12008 } HostConfigId: 2 } Host { Key { Fqdn: "::1" IcPort: 12009 } HostConfigId: 2 } Host { Key { Fqdn: "::1" IcPort: 12010 } HostConfigId: 2 } Host { Key { Fqdn: "::1" IcPort: 12011 } HostConfigId: 2 } Host { Key { Fqdn: "::1" IcPort: 12012 } HostConfigId: 2 } } } Command { DefineStoragePool { BoxId: 1 StoragePoolId: 1 Name: "storage pool" ErasureSpecies: "block-4-2" VDiskKind: "Default" NumGroups: 8 PDiskFilter { Property { Type: ROT } } } } } 2025-05-29T15:28:31.860559Z node 13 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:340} Create new pdisk PDiskId# 13:1000 Path# /dev/disk 2025-05-29T15:28:31.860567Z node 13 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:340} Create new pdisk PDiskId# 14:1000 Path# /dev/disk 2025-05-29T15:28:31.860572Z node 13 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:340} Create new pdisk PDiskId# 15:1000 Path# /dev/disk 2025-05-29T15:28:31.860575Z node 13 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:340} Create new pdisk PDiskId# 16:1000 Path# /dev/disk 2025-05-29T15:28:31.860579Z node 13 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:340} Create new pdisk PDiskId# 17:1000 Path# /dev/disk 2025-05-29T15:28:31.860582Z node 13 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:340} Create new pdisk PDiskId# 18:1000 Path# /dev/disk 2025-05-29T15:28:31.860585Z node 13 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:340} Create new pdisk PDiskId# 19:1000 Path# /dev/disk 2025-05-29T15:28:31.860588Z node 13 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:340} Create new pdisk PDiskId# 20:1000 Path# /dev/disk 2025-05-29T15:28:31.860591Z node 13 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:340} Create new pdisk PDiskId# 21:1000 Path# /dev/disk 2025-05-29T15:28:31.860594Z node 13 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:340} Create new pdisk PDiskId# 22:1000 Path# /dev/disk 2025-05-29T15:28:31.860597Z node 13 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:340} Create new pdisk PDiskId# 23:1000 Path# /dev/disk 2025-05-29T15:28:31.860599Z node 13 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:340} Create new pdisk PDiskId# 24:1000 Path# /dev/disk Response# Status { Success: true } Status { Success: true } Status { Success: true } Success: true ConfigTxSeqNo: 1 2025-05-29T15:28:31.872582Z node 13 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:398} Execute TEvControllerConfigRequest Request# {Command { UpdateDriveStatus { HostKey { NodeId: 1 } Path: "/dev/disk" Status: INACTIVE } } } Response# Status { ErrorDescription: "Host not found NodeId# 1 HostKey# NodeId: 1\n incorrect" FailReason: kHostNotFound FailParam { NodeId: 1 } } ErrorDescription: "Host not found NodeId# 1 HostKey# NodeId: 1\n incorrect" >> BuildStatsHistogram::Many_Mixed [GOOD] >> BuildStatsHistogram::Many_Serial >> TSchemeShardViewTest::AsyncDropSameView >> TKeyValueTest::TestGetStatusWorks [GOOD] >> TPartBtreeIndexIteration::FewNodes_Slices [GOOD] >> TSchemeShardViewTest::EmptyName >> TSchemeShardViewTest::DropView >> TPartBtreeIndexIteration::FewNodes_Groups_Slices >> TStorageBalanceTest::TestScenario3 [GOOD] >> TSchemeShardViewTest::AsyncDropSameView [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/keyvalue/ut/unittest >> TKeyValueTest::TestGetStatusWorks [GOOD] Test command err: Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:55:2057] recipient: [1:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:55:2057] recipient: [1:51:2095] Leader for TabletID 72057594037927937 is [1:57:2097] sender: [1:58:2057] recipient: [1:51:2095] Leader for TabletID 72057594037927937 is [1:57:2097] sender: [1:75:2057] recipient: [1:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:55:2057] recipient: [2:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:55:2057] recipient: [2:51:2095] Leader for TabletID 72057594037927937 is [2:57:2097] sender: [2:58:2057] recipient: [2:51:2095] Leader for TabletID 72057594037927937 is [2:57:2097] sender: [2:75:2057] recipient: [2:14:2061] !Reboot 72057594037927937 (actor [2:57:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [2:57:2097] sender: [2:77:2057] recipient: [2:36:2083] Leader for TabletID 72057594037927937 is [2:57:2097] sender: [2:80:2057] recipient: [2:14:2061] Leader for TabletID 72057594037927937 is [2:57:2097] sender: [2:81:2057] recipient: [2:79:2110] Leader for TabletID 72057594037927937 is [2:82:2111] sender: [2:83:2057] recipient: [2:79:2110] !Reboot 72057594037927937 (actor [2:57:2097]) rebooted! !Reboot 72057594037927937 (actor [2:57:2097]) tablet resolver refreshed! new actor is[2:82:2111] Leader for TabletID 72057594037927937 is [2:82:2111] sender: [2:168:2057] recipient: [2:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:55:2057] recipient: [3:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:55:2057] recipient: [3:51:2095] Leader for TabletID 72057594037927937 is [3:57:2097] sender: [3:58:2057] recipient: [3:51:2095] Leader for TabletID 72057594037927937 is [3:57:2097] sender: [3:75:2057] recipient: [3:14:2061] !Reboot 72057594037927937 (actor [3:57:2097]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [3:57:2097] sender: [3:77:2057] recipient: [3:36:2083] Leader for TabletID 72057594037927937 is [3:57:2097] sender: [3:80:2057] recipient: [3:79:2110] Leader for TabletID 72057594037927937 is [3:57:2097] sender: [3:81:2057] recipient: [3:14:2061] Leader for TabletID 72057594037927937 is [3:82:2111] sender: [3:83:2057] recipient: [3:79:2110] !Reboot 72057594037927937 (actor [3:57:2097]) rebooted! !Reboot 72057594037927937 (actor [3:57:2097]) tablet resolver refreshed! new actor is[3:82:2111] Leader for TabletID 72057594037927937 is [3:82:2111] sender: [3:168:2057] recipient: [3:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [4:55:2057] recipient: [4:50:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [4:55:2057] recipient: [4:50:2095] Leader for TabletID 72057594037927937 is [4:57:2097] sender: [4:58:2057] recipient: [4:50:2095] Leader for TabletID 72057594037927937 is [4:57:2097] sender: [4:75:2057] recipient: [4:14:2061] !Reboot 72057594037927937 (actor [4:57:2097]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [4:57:2097] sender: [4:78:2057] recipient: [4:36:2083] Leader for TabletID 72057594037927937 is [4:57:2097] sender: [4:81:2057] recipient: [4:80:2110] Leader for TabletID 72057594037927937 is [4:57:2097] sender: [4:82:2057] recipient: [4:14:2061] Leader for TabletID 72057594037927937 is [4:83:2111] sender: [4:84:2057] recipient: [4:80:2110] !Reboot 72057594037927937 (actor [4:57:2097]) rebooted! !Reboot 72057594037927937 (actor [4:57:2097]) tablet resolver refreshed! new actor is[4:83:2111] Leader for TabletID 72057594037927937 is [4:83:2111] sender: [4:169:2057] recipient: [4:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [5:55:2057] recipient: [5:52:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [5:55:2057] recipient: [5:52:2095] Leader for TabletID 72057594037927937 is [5:57:2097] sender: [5:58:2057] recipient: [5:52:2095] Leader for TabletID 72057594037927937 is [5:57:2097] sender: [5:75:2057] recipient: [5:14:2061] !Reboot 72057594037927937 (actor [5:57:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [5:57:2097] sender: [5:81:2057] recipient: [5:36:2083] Leader for TabletID 72057594037927937 is [5:57:2097] sender: [5:84:2057] recipient: [5:14:2061] Leader for TabletID 72057594037927937 is [5:57:2097] sender: [5:85:2057] recipient: [5:83:2113] Leader for TabletID 72057594037927937 is [5:86:2114] sender: [5:87:2057] recipient: [5:83:2113] !Reboot 72057594037927937 (actor [5:57:2097]) rebooted! !Reboot 72057594037927937 (actor [5:57:2097]) tablet resolver refreshed! new actor is[5:86:2114] Leader for TabletID 72057594037927937 is [5:86:2114] sender: [5:172:2057] recipient: [5:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [6:55:2057] recipient: [6:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [6:55:2057] recipient: [6:51:2095] Leader for TabletID 72057594037927937 is [6:57:2097] sender: [6:58:2057] recipient: [6:51:2095] Leader for TabletID 72057594037927937 is [6:57:2097] sender: [6:75:2057] recipient: [6:14:2061] !Reboot 72057594037927937 (actor [6:57:2097]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [6:57:2097] sender: [6:81:2057] recipient: [6:36:2083] Leader for TabletID 72057594037927937 is [6:57:2097] sender: [6:84:2057] recipient: [6:14:2061] Leader for TabletID 72057594037927937 is [6:57:2097] sender: [6:85:2057] recipient: [6:83:2113] Leader for TabletID 72057594037927937 is [6:86:2114] sender: [6:87:2057] recipient: [6:83:2113] !Reboot 72057594037927937 (actor [6:57:2097]) rebooted! !Reboot 72057594037927937 (actor [6:57:2097]) tablet resolver refreshed! new actor is[6:86:2114] Leader for TabletID 72057594037927937 is [6:86:2114] sender: [6:172:2057] recipient: [6:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [7:55:2057] recipient: [7:52:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [7:55:2057] recipient: [7:52:2095] Leader for TabletID 72057594037927937 is [7:57:2097] sender: [7:58:2057] recipient: [7:52:2095] Leader for TabletID 72057594037927937 is [7:57:2097] sender: [7:75:2057] recipient: [7:14:2061] !Reboot 72057594037927937 (actor [7:57:2097]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [7:57:2097] sender: [7:82:2057] recipient: [7:36:2083] Leader for TabletID 72057594037927937 is [7:57:2097] sender: [7:85:2057] recipient: [7:84:2113] Leader for TabletID 72057594037927937 is [7:57:2097] sender: [7:86:2057] recipient: [7:14:2061] Leader for TabletID 72057594037927937 is [7:87:2114] sender: [7:88:2057] recipient: [7:84:2113] !Reboot 72057594037927937 (actor [7:57:2097]) rebooted! !Reboot 72057594037927937 (actor [7:57:2097]) tablet resolver refreshed! new actor is[7:87:2114] Leader for TabletID 72057594037927937 is [7:87:2114] sender: [7:173:2057] recipient: [7:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [8:55:2057] recipient: [8:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [8:55:2057] recipient: [8:51:2095] Leader for TabletID 72057594037927937 is [8:57:2097] sender: [8:58:2057] recipient: [8:51:2095] Leader for TabletID 72057594037927937 is [8:57:2097] sender: [8:75:2057] recipient: [8:14:2061] !Reboot 72057594037927937 (actor [8:57:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [8:57:2097] sender: [8:84:2057] recipient: [8:36:2083] Leader for TabletID 72057594037927937 is [8:57:2097] sender: [8:87:2057] recipient: [8:14:2061] Leader for TabletID 72057594037927937 is [8:57:2097] sender: [8:88:2057] recipient: [8:86:2115] Leader for TabletID 72057594037927937 is [8:89:2116] sender: [8:90:2057] recipient: [8:86:2115] !Reboot 72057594037927937 (actor [8:57:2097]) rebooted! !Reboot 72057594037927937 (actor [8:57:2097]) tablet resolver refreshed! new actor is[8:89:2116] Leader for TabletID 72057594037927937 is [8:89:2116] sender: [8:175:2057] recipient: [8:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [9:55:2057] recipient: [9:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [9:55:2057] recipient: [9:51:2095] Leader for TabletID 72057594037927937 is [9:57:2097] sender: [9:58:2057] recipient: [9:51:2095] Leader for TabletID 72057594037927937 is [9:57:2097] sender: [9:75:2057] recipient: [9:14:2061] !Reboot 72057594037927937 (actor [9:57:2097]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [9:57:2097] sender: [9:84:2057] recipient: [9:36:2083] Leader for TabletID 72057594037927937 is [9:57:2097] sender: [9:87:2057] recipient: [9:14:2061] Leader for TabletID 72057594037927937 is [9:57:2097] sender: [9:88:2057] recipient: [9:86:2115] Leader for TabletID 72057594037927937 is [9:89:2116] sender: [9:90:2057] recipient: [9:86:2115] !Reboot 72057594037927937 (actor [9:57:2097]) rebooted! !Reboot 72057594037927937 (actor [9:57:2097]) tablet resolver refreshed! new actor is[9:89:2116] Leader for TabletID 72057594037927937 is [9:89:2116] sender: [9:175:2057] recipient: [9:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [10:55:2057] recipient: [10:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [10:55:2057] recipient: [10:51:2095] Leader for TabletID 72057594037927937 is [10:57:2097] sender: [10:58:2057] recipient: [10:51:2095] Leader for TabletID 72057594037927937 is [10:57:2097] sender: [10:75:2057] recipient: [10:14:2061] !Reboot 72057594037927937 (actor [10:57:2097]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [10:57:2097] sender: [10:85:2057] recipient: [10:36:2083] Leader for TabletID 72057594037927937 is [10:57:2097] sender: [10:88:2057] recipient: [10:14:2061] Leader for TabletID 72057594037927937 is [10:57:2097] sender: [10:89:2057] recipient: [10:87:2115] Leader for TabletID 72057594037927937 is [10:90:2116] sender: [10:91:2057] recipient: [10:87:2115] !Reboot 72057594037927937 (actor [10:57:2097]) rebooted! !Reboot 72057594037927937 (actor [10:57:2097]) tablet resolver refreshed! new actor is[10:90:2116] Leader for TabletID 72057594037927937 is [10:90:2116] sender: [10:176:2057] recipient: [10:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [11:55:2057] recipient: [11:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [11:55:2057] recipient: [11:51:2095] Leader for TabletID 72057594037927937 is [11:57:2097] sender: [11:58:2057] recipient: [11:51:2095] Leader for TabletID 72057594037927937 is [11:57:2097] sender: [11:75:2057] recipient: [11:14:2061] !Reboot 72057594037927937 (actor [11:57:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [11:57:2097] sender: [11:87:2057] recipient: [11:36:2083] Leader for TabletID 72057594037927937 is [11:57:2097] sender: [11:90:2057] recipient: [11:14:2061] Leader for TabletID 72057594037927937 is [11:57:2097] sender: [11:91:2057] recipient: [11:89:2117] Leader for TabletID 72057594037927937 is [11:92:2118] sender: [11:93:2057] recipient: [11:89:2117] !Reboot 72057594037927937 (actor [11:57:2097]) rebooted! !Reboot 72057594037927937 (actor [11:57:2097]) tablet resolver refreshed! new actor is[11:92:2118] Leader for TabletID 72057594037927937 is [11:92:2118] sender: [11:178:2057] recipient: [11:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [12:55:2057] recipient: [12:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [12:55:2057] recipient: [12:51:2095] Leader for TabletID 72057594037927937 is [12:57:2097] sender: [12:58:2057] recipient: [12:51:2095] Leader for TabletID 72057594037927937 is [12:57:2097] sender: [12:75:2057] recipient: [12:14:2061] !Reboot 72057594037927937 (acto ... or TabletID 72057594037927937 is [29:57:2097] sender: [29:90:2057] recipient: [29:14:2061] Leader for TabletID 72057594037927937 is [29:57:2097] sender: [29:91:2057] recipient: [29:89:2117] Leader for TabletID 72057594037927937 is [29:92:2118] sender: [29:93:2057] recipient: [29:89:2117] !Reboot 72057594037927937 (actor [29:57:2097]) rebooted! !Reboot 72057594037927937 (actor [29:57:2097]) tablet resolver refreshed! new actor is[29:92:2118] Leader for TabletID 72057594037927937 is [29:92:2118] sender: [29:178:2057] recipient: [29:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [30:55:2057] recipient: [30:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [30:55:2057] recipient: [30:51:2095] Leader for TabletID 72057594037927937 is [30:57:2097] sender: [30:58:2057] recipient: [30:51:2095] Leader for TabletID 72057594037927937 is [30:57:2097] sender: [30:75:2057] recipient: [30:14:2061] !Reboot 72057594037927937 (actor [30:57:2097]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [30:57:2097] sender: [30:88:2057] recipient: [30:36:2083] Leader for TabletID 72057594037927937 is [30:57:2097] sender: [30:90:2057] recipient: [30:14:2061] Leader for TabletID 72057594037927937 is [30:57:2097] sender: [30:92:2057] recipient: [30:91:2117] Leader for TabletID 72057594037927937 is [30:93:2118] sender: [30:94:2057] recipient: [30:91:2117] !Reboot 72057594037927937 (actor [30:57:2097]) rebooted! !Reboot 72057594037927937 (actor [30:57:2097]) tablet resolver refreshed! new actor is[30:93:2118] Leader for TabletID 72057594037927937 is [30:93:2118] sender: [30:179:2057] recipient: [30:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [31:55:2057] recipient: [31:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [31:55:2057] recipient: [31:51:2095] Leader for TabletID 72057594037927937 is [31:57:2097] sender: [31:58:2057] recipient: [31:51:2095] Leader for TabletID 72057594037927937 is [31:57:2097] sender: [31:75:2057] recipient: [31:14:2061] !Reboot 72057594037927937 (actor [31:57:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [31:57:2097] sender: [31:91:2057] recipient: [31:36:2083] Leader for TabletID 72057594037927937 is [31:57:2097] sender: [31:94:2057] recipient: [31:14:2061] Leader for TabletID 72057594037927937 is [31:57:2097] sender: [31:95:2057] recipient: [31:93:2120] Leader for TabletID 72057594037927937 is [31:96:2121] sender: [31:97:2057] recipient: [31:93:2120] !Reboot 72057594037927937 (actor [31:57:2097]) rebooted! !Reboot 72057594037927937 (actor [31:57:2097]) tablet resolver refreshed! new actor is[31:96:2121] Leader for TabletID 72057594037927937 is [31:96:2121] sender: [31:182:2057] recipient: [31:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [32:55:2057] recipient: [32:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [32:55:2057] recipient: [32:51:2095] Leader for TabletID 72057594037927937 is [32:57:2097] sender: [32:58:2057] recipient: [32:51:2095] Leader for TabletID 72057594037927937 is [32:57:2097] sender: [32:75:2057] recipient: [32:14:2061] !Reboot 72057594037927937 (actor [32:57:2097]) on event NKikimr::TEvKeyValue::TEvRead ! Leader for TabletID 72057594037927937 is [32:57:2097] sender: [32:91:2057] recipient: [32:36:2083] Leader for TabletID 72057594037927937 is [32:57:2097] sender: [32:94:2057] recipient: [32:14:2061] Leader for TabletID 72057594037927937 is [32:57:2097] sender: [32:95:2057] recipient: [32:93:2120] Leader for TabletID 72057594037927937 is [32:96:2121] sender: [32:97:2057] recipient: [32:93:2120] !Reboot 72057594037927937 (actor [32:57:2097]) rebooted! !Reboot 72057594037927937 (actor [32:57:2097]) tablet resolver refreshed! new actor is[32:96:2121] Leader for TabletID 72057594037927937 is [32:96:2121] sender: [32:182:2057] recipient: [32:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [33:55:2057] recipient: [33:50:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [33:55:2057] recipient: [33:50:2095] Leader for TabletID 72057594037927937 is [33:57:2097] sender: [33:58:2057] recipient: [33:50:2095] Leader for TabletID 72057594037927937 is [33:57:2097] sender: [33:75:2057] recipient: [33:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [34:55:2057] recipient: [34:52:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [34:55:2057] recipient: [34:52:2095] Leader for TabletID 72057594037927937 is [34:57:2097] sender: [34:58:2057] recipient: [34:52:2095] Leader for TabletID 72057594037927937 is [34:57:2097] sender: [34:75:2057] recipient: [34:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [35:55:2057] recipient: [35:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [35:55:2057] recipient: [35:51:2095] Leader for TabletID 72057594037927937 is [35:57:2097] sender: [35:58:2057] recipient: [35:51:2095] Leader for TabletID 72057594037927937 is [35:57:2097] sender: [35:75:2057] recipient: [35:14:2061] !Reboot 72057594037927937 (actor [35:57:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [35:57:2097] sender: [35:77:2057] recipient: [35:36:2083] Leader for TabletID 72057594037927937 is [35:57:2097] sender: [35:80:2057] recipient: [35:14:2061] Leader for TabletID 72057594037927937 is [35:57:2097] sender: [35:81:2057] recipient: [35:79:2110] Leader for TabletID 72057594037927937 is [35:82:2111] sender: [35:83:2057] recipient: [35:79:2110] !Reboot 72057594037927937 (actor [35:57:2097]) rebooted! !Reboot 72057594037927937 (actor [35:57:2097]) tablet resolver refreshed! new actor is[35:82:2111] Leader for TabletID 72057594037927937 is [35:82:2111] sender: [35:168:2057] recipient: [35:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [36:55:2057] recipient: [36:52:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [36:55:2057] recipient: [36:52:2095] Leader for TabletID 72057594037927937 is [36:57:2097] sender: [36:58:2057] recipient: [36:52:2095] Leader for TabletID 72057594037927937 is [36:57:2097] sender: [36:75:2057] recipient: [36:14:2061] !Reboot 72057594037927937 (actor [36:57:2097]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [36:57:2097] sender: [36:77:2057] recipient: [36:36:2083] Leader for TabletID 72057594037927937 is [36:57:2097] sender: [36:79:2057] recipient: [36:14:2061] Leader for TabletID 72057594037927937 is [36:57:2097] sender: [36:81:2057] recipient: [36:80:2110] Leader for TabletID 72057594037927937 is [36:82:2111] sender: [36:83:2057] recipient: [36:80:2110] !Reboot 72057594037927937 (actor [36:57:2097]) rebooted! !Reboot 72057594037927937 (actor [36:57:2097]) tablet resolver refreshed! new actor is[36:82:2111] Leader for TabletID 72057594037927937 is [36:82:2111] sender: [36:168:2057] recipient: [36:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [37:55:2057] recipient: [37:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [37:55:2057] recipient: [37:51:2095] Leader for TabletID 72057594037927937 is [37:57:2097] sender: [37:58:2057] recipient: [37:51:2095] Leader for TabletID 72057594037927937 is [37:57:2097] sender: [37:75:2057] recipient: [37:14:2061] !Reboot 72057594037927937 (actor [37:57:2097]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [37:57:2097] sender: [37:78:2057] recipient: [37:36:2083] Leader for TabletID 72057594037927937 is [37:57:2097] sender: [37:81:2057] recipient: [37:14:2061] Leader for TabletID 72057594037927937 is [37:57:2097] sender: [37:82:2057] recipient: [37:80:2110] Leader for TabletID 72057594037927937 is [37:83:2111] sender: [37:84:2057] recipient: [37:80:2110] !Reboot 72057594037927937 (actor [37:57:2097]) rebooted! !Reboot 72057594037927937 (actor [37:57:2097]) tablet resolver refreshed! new actor is[37:83:2111] Leader for TabletID 72057594037927937 is [37:83:2111] sender: [37:169:2057] recipient: [37:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [38:55:2057] recipient: [38:52:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [38:55:2057] recipient: [38:52:2095] Leader for TabletID 72057594037927937 is [38:57:2097] sender: [38:58:2057] recipient: [38:52:2095] Leader for TabletID 72057594037927937 is [38:57:2097] sender: [38:75:2057] recipient: [38:14:2061] !Reboot 72057594037927937 (actor [38:57:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [38:57:2097] sender: [38:80:2057] recipient: [38:36:2083] Leader for TabletID 72057594037927937 is [38:57:2097] sender: [38:83:2057] recipient: [38:14:2061] Leader for TabletID 72057594037927937 is [38:57:2097] sender: [38:84:2057] recipient: [38:82:2112] Leader for TabletID 72057594037927937 is [38:85:2113] sender: [38:86:2057] recipient: [38:82:2112] !Reboot 72057594037927937 (actor [38:57:2097]) rebooted! !Reboot 72057594037927937 (actor [38:57:2097]) tablet resolver refreshed! new actor is[38:85:2113] Leader for TabletID 72057594037927937 is [38:85:2113] sender: [38:171:2057] recipient: [38:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [39:55:2057] recipient: [39:50:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [39:55:2057] recipient: [39:50:2095] Leader for TabletID 72057594037927937 is [39:57:2097] sender: [39:58:2057] recipient: [39:50:2095] Leader for TabletID 72057594037927937 is [39:57:2097] sender: [39:75:2057] recipient: [39:14:2061] !Reboot 72057594037927937 (actor [39:57:2097]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [39:57:2097] sender: [39:80:2057] recipient: [39:36:2083] Leader for TabletID 72057594037927937 is [39:57:2097] sender: [39:83:2057] recipient: [39:14:2061] Leader for TabletID 72057594037927937 is [39:57:2097] sender: [39:84:2057] recipient: [39:82:2112] Leader for TabletID 72057594037927937 is [39:85:2113] sender: [39:86:2057] recipient: [39:82:2112] !Reboot 72057594037927937 (actor [39:57:2097]) rebooted! !Reboot 72057594037927937 (actor [39:57:2097]) tablet resolver refreshed! new actor is[39:85:2113] Leader for TabletID 72057594037927937 is [39:85:2113] sender: [39:171:2057] recipient: [39:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [40:55:2057] recipient: [40:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [40:55:2057] recipient: [40:51:2095] Leader for TabletID 72057594037927937 is [40:57:2097] sender: [40:58:2057] recipient: [40:51:2095] Leader for TabletID 72057594037927937 is [40:57:2097] sender: [40:75:2057] recipient: [40:14:2061] !Reboot 72057594037927937 (actor [40:57:2097]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [40:57:2097] sender: [40:81:2057] recipient: [40:36:2083] Leader for TabletID 72057594037927937 is [40:57:2097] sender: [40:84:2057] recipient: [40:14:2061] Leader for TabletID 72057594037927937 is [40:57:2097] sender: [40:85:2057] recipient: [40:83:2112] Leader for TabletID 72057594037927937 is [40:86:2113] sender: [40:87:2057] recipient: [40:83:2112] !Reboot 72057594037927937 (actor [40:57:2097]) rebooted! !Reboot 72057594037927937 (actor [40:57:2097]) tablet resolver refreshed! new actor is[40:86:2113] Leader for TabletID 72057594037927937 is [40:86:2113] sender: [40:172:2057] recipient: [40:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [41:55:2057] recipient: [41:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [41:55:2057] recipient: [41:51:2095] Leader for TabletID 72057594037927937 is [41:57:2097] sender: [41:58:2057] recipient: [41:51:2095] Leader for TabletID 72057594037927937 is [41:57:2097] sender: [41:75:2057] recipient: [41:14:2061] |71.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_view/unittest >> TSchemeShardViewTest::EmptyName [GOOD] >> TSchemeShardViewTest::DropView [GOOD] >> TPersQueueTest::ReadFromSeveralPartitions ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_view/unittest >> TSchemeShardViewTest::AsyncDropSameView [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:116:2058] recipient: [1:111:2141] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:116:2058] recipient: [1:111:2141] Leader for TabletID 72057594046678944 is [1:128:2152] sender: [1:132:2058] recipient: [1:111:2141] 2025-05-29T15:28:34.767912Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7488: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-05-29T15:28:34.767933Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7516: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:28:34.767937Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7402: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-05-29T15:28:34.767941Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7418: OperationsProcessing config: using default configuration 2025-05-29T15:28:34.767952Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-05-29T15:28:34.767955Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-05-29T15:28:34.767962Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7548: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:28:34.767972Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:39: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-05-29T15:28:34.768061Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7619: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-05-29T15:28:34.768127Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-05-29T15:28:34.776810Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7309: Cannot subscribe to console configs 2025-05-29T15:28:34.776831Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:28:34.779219Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-05-29T15:28:34.779329Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-05-29T15:28:34.779363Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-05-29T15:28:34.781345Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-05-29T15:28:34.781506Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:445: Clear TempDirsState with owners number: 0 2025-05-29T15:28:34.781616Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1348: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-05-29T15:28:34.781655Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:32: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-05-29T15:28:34.782085Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:157: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:28:34.782124Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:84: [RootDataErasureManager] Stop 2025-05-29T15:28:34.782326Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:28:34.782333Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:28:34.782350Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-05-29T15:28:34.782356Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-29T15:28:34.782360Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:202: TTxServerlessStorageBilling.Complete 2025-05-29T15:28:34.782387Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6671: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-05-29T15:28:34.783608Z node 1 :HIVE INFO: tablet_helpers.cpp:1130: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:128:2152] sender: [1:242:2058] recipient: [1:15:2062] 2025-05-29T15:28:34.796709Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:372: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-05-29T15:28:34.796779Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:28:34.796835Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-05-29T15:28:34.796877Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:130: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-05-29T15:28:34.796885Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:28:34.797624Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:451: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-05-29T15:28:34.797642Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-05-29T15:28:34.797680Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:28:34.797688Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:313: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-05-29T15:28:34.797692Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:367: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-05-29T15:28:34.797697Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 2 -> 3 2025-05-29T15:28:34.797987Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:28:34.797994Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-05-29T15:28:34.797997Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 3 -> 128 2025-05-29T15:28:34.798279Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:28:34.798287Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:28:34.798292Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:28:34.798297Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1650: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-05-29T15:28:34.798756Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1719: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-05-29T15:28:34.799328Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:652: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-05-29T15:28:34.799365Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1751: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-05-29T15:28:34.799512Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:676: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-05-29T15:28:34.799534Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:680: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 124 RawX2: 4294969445 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-05-29T15:28:34.799539Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:28:34.799595Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 128 -> 240 2025-05-29T15:28:34.799601Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:28:34.799625Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-05-29T15:28:34.799635Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:402: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-05-29T15:28:34.799985Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:28:34.799991Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-29T15:28:34.800023Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29 ... 5:28:34.811032Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 FAKE_COORDINATOR: Erasing txId 102 2025-05-29T15:28:34.811448Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:28:34.811457Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 102, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-29T15:28:34.811491Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 102, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2025-05-29T15:28:34.811513Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:28:34.811518Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:209:2210], at schemeshard: 72057594046678944, txId: 102, path id: 1 2025-05-29T15:28:34.811523Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:209:2210], at schemeshard: 72057594046678944, txId: 102, path id: 2 2025-05-29T15:28:34.811608Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2025-05-29T15:28:34.811616Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:482: [72057594046678944] TDone opId# 102:0 ProgressState 2025-05-29T15:28:34.811629Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:906: Part operation is done id#102:0 progress is 1/1 2025-05-29T15:28:34.811634Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-05-29T15:28:34.811640Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:906: Part operation is done id#102:0 progress is 1/1 2025-05-29T15:28:34.811643Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-05-29T15:28:34.811647Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1606: TOperation IsReadyToNotify, TxId: 102, ready parts: 1/1, is published: false 2025-05-29T15:28:34.811653Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-05-29T15:28:34.811658Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:973: Operation and all the parts is done, operation id: 102:0 2025-05-29T15:28:34.811662Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5174: RemoveTx for txid 102:0 2025-05-29T15:28:34.811673Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2025-05-29T15:28:34.811679Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:982: Publication still in progress, tx: 102, publications: 2, subscribers: 0 2025-05-29T15:28:34.811683Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:989: Publication details: tx: 102, [OwnerId: 72057594046678944, LocalPathId: 1], 5 2025-05-29T15:28:34.811686Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:989: Publication details: tx: 102, [OwnerId: 72057594046678944, LocalPathId: 2], 18446744073709551615 2025-05-29T15:28:34.811797Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:5834: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 5 PathOwnerId: 72057594046678944, cookie: 102 2025-05-29T15:28:34.811810Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 5 PathOwnerId: 72057594046678944, cookie: 102 2025-05-29T15:28:34.811814Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 102 2025-05-29T15:28:34.811820Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 5 2025-05-29T15:28:34.811825Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-05-29T15:28:34.811927Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:5834: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 102 2025-05-29T15:28:34.811939Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 102 2025-05-29T15:28:34.811947Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 102 2025-05-29T15:28:34.811951Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 18446744073709551615 2025-05-29T15:28:34.811956Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 2025-05-29T15:28:34.811967Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 102, subscribers: 0 2025-05-29T15:28:34.812030Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:123: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-05-29T15:28:34.812037Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:137: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2025-05-29T15:28:34.812046Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-05-29T15:28:34.812735Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2025-05-29T15:28:34.812838Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2025-05-29T15:28:34.812853Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:160: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestModificationResult got TxId: 102, wait until txId: 102 TestModificationResults wait txId: 103 TestModificationResult got TxId: 103, wait until txId: 103 TestModificationResults wait txId: 104 TestModificationResult got TxId: 104, wait until txId: 104 TestWaitNotification wait txId: 102 2025-05-29T15:28:34.812928Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:210: tests -- TTxNotificationSubscriber for txId 102: send EvNotifyTxCompletion 2025-05-29T15:28:34.812935Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:256: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 102 TestWaitNotification wait txId: 103 2025-05-29T15:28:34.812952Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:210: tests -- TTxNotificationSubscriber for txId 103: send EvNotifyTxCompletion 2025-05-29T15:28:34.812956Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:256: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 103 TestWaitNotification wait txId: 104 2025-05-29T15:28:34.812965Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:210: tests -- TTxNotificationSubscriber for txId 104: send EvNotifyTxCompletion 2025-05-29T15:28:34.812969Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:256: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 104 2025-05-29T15:28:34.813049Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 102, at schemeshard: 72057594046678944 2025-05-29T15:28:34.813073Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:227: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2025-05-29T15:28:34.813078Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:236: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [1:332:2322] 2025-05-29T15:28:34.813108Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 103, at schemeshard: 72057594046678944 2025-05-29T15:28:34.813120Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 104, at schemeshard: 72057594046678944 2025-05-29T15:28:34.813128Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:227: tests -- TTxNotificationSubscriber for txId 103: got EvNotifyTxCompletionResult 2025-05-29T15:28:34.813132Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:236: tests -- TTxNotificationSubscriber for txId 103: satisfy waiter [1:332:2322] 2025-05-29T15:28:34.813142Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:227: tests -- TTxNotificationSubscriber for txId 104: got EvNotifyTxCompletionResult 2025-05-29T15:28:34.813145Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:236: tests -- TTxNotificationSubscriber for txId 104: satisfy waiter [1:332:2322] TestWaitNotification: OK eventTxId 102 TestWaitNotification: OK eventTxId 103 TestWaitNotification: OK eventTxId 104 2025-05-29T15:28:34.813215Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/MyView" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-05-29T15:28:34.813239Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/MyView" took 33us result status StatusPathDoesNotExist 2025-05-29T15:28:34.813286Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/MyView\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1]), source_location: ydb/core/tx/schemeshard/schemeshard_path_describer.cpp:1157" Path: "/MyRoot/MyView" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: false } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_view/unittest >> TSchemeShardViewTest::DropView [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:116:2058] recipient: [1:111:2141] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:116:2058] recipient: [1:111:2141] Leader for TabletID 72057594046678944 is [1:128:2152] sender: [1:132:2058] recipient: [1:111:2141] 2025-05-29T15:28:35.004387Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7488: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-05-29T15:28:35.004414Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7516: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:28:35.004419Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7402: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-05-29T15:28:35.004422Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7418: OperationsProcessing config: using default configuration 2025-05-29T15:28:35.004432Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-05-29T15:28:35.004435Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-05-29T15:28:35.004442Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7548: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:28:35.004456Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:39: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-05-29T15:28:35.004535Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7619: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-05-29T15:28:35.004590Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-05-29T15:28:35.013973Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7309: Cannot subscribe to console configs 2025-05-29T15:28:35.013994Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:28:35.016104Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-05-29T15:28:35.016197Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-05-29T15:28:35.016228Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-05-29T15:28:35.017519Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-05-29T15:28:35.017651Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:445: Clear TempDirsState with owners number: 0 2025-05-29T15:28:35.017744Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1348: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-05-29T15:28:35.017778Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:32: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-05-29T15:28:35.018302Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:157: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:28:35.018354Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:84: [RootDataErasureManager] Stop 2025-05-29T15:28:35.018561Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:28:35.018569Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:28:35.018587Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-05-29T15:28:35.018592Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-29T15:28:35.018597Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:202: TTxServerlessStorageBilling.Complete 2025-05-29T15:28:35.018624Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6671: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-05-29T15:28:35.019800Z node 1 :HIVE INFO: tablet_helpers.cpp:1130: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:128:2152] sender: [1:242:2058] recipient: [1:15:2062] 2025-05-29T15:28:35.033209Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:372: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-05-29T15:28:35.033290Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:28:35.033348Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-05-29T15:28:35.033380Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:130: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-05-29T15:28:35.033390Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:28:35.034193Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:451: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-05-29T15:28:35.034220Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-05-29T15:28:35.034269Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:28:35.034280Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:313: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-05-29T15:28:35.034286Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:367: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-05-29T15:28:35.034292Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 2 -> 3 2025-05-29T15:28:35.034787Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:28:35.034802Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-05-29T15:28:35.034808Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 3 -> 128 2025-05-29T15:28:35.035492Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:28:35.035514Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:28:35.035522Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:28:35.035530Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1650: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-05-29T15:28:35.036344Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1719: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-05-29T15:28:35.036897Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:652: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-05-29T15:28:35.036942Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1751: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-05-29T15:28:35.037163Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:676: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-05-29T15:28:35.037193Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:680: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 124 RawX2: 4294969445 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-05-29T15:28:35.037202Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:28:35.037286Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 128 -> 240 2025-05-29T15:28:35.037295Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:28:35.037331Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-05-29T15:28:35.037344Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:402: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-05-29T15:28:35.037857Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:28:35.037866Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-29T15:28:35.037917Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29 ... 2057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 102 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-05-29T15:28:35.049415Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:652: Send tablet strongly msg operationId: 102:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:102 msg type: 269090816 2025-05-29T15:28:35.049447Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1751: TOperation RegisterRelationByTabletId, TxId: 102, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 102 at step: 5000003 FAKE_COORDINATOR: advance: minStep5000003 State->FrontStep: 5000002 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 102 at step: 5000003 2025-05-29T15:28:35.049519Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:676: TTxOperationPlanStep Execute, stepId: 5000003, transactions count in step: 1, at schemeshard: 72057594046678944 2025-05-29T15:28:35.049539Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:680: TTxOperationPlanStep Execute, message: Transactions { TxId: 102 Coordinator: 72057594046316545 AckTo { RawX1: 124 RawX2: 4294969445 } } Step: 5000003 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-05-29T15:28:35.049548Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_drop_view.cpp:43: [72057594046678944] TDropView TPropose, opId: 102:0 HandleReply TEvOperationPlan, step: 5000003 2025-05-29T15:28:35.049574Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 102:0 128 -> 240 2025-05-29T15:28:35.049605Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-05-29T15:28:35.049616Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 FAKE_COORDINATOR: Erasing txId 102 2025-05-29T15:28:35.050062Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:28:35.050071Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 102, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-29T15:28:35.050109Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 102, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2025-05-29T15:28:35.050137Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:28:35.050142Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:209:2210], at schemeshard: 72057594046678944, txId: 102, path id: 1 2025-05-29T15:28:35.050149Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:209:2210], at schemeshard: 72057594046678944, txId: 102, path id: 2 2025-05-29T15:28:35.050226Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2025-05-29T15:28:35.050234Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:482: [72057594046678944] TDone opId# 102:0 ProgressState 2025-05-29T15:28:35.050247Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:906: Part operation is done id#102:0 progress is 1/1 2025-05-29T15:28:35.050252Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-05-29T15:28:35.050261Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:906: Part operation is done id#102:0 progress is 1/1 2025-05-29T15:28:35.050265Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-05-29T15:28:35.050270Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1606: TOperation IsReadyToNotify, TxId: 102, ready parts: 1/1, is published: false 2025-05-29T15:28:35.050276Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-05-29T15:28:35.050281Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:973: Operation and all the parts is done, operation id: 102:0 2025-05-29T15:28:35.050286Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5174: RemoveTx for txid 102:0 2025-05-29T15:28:35.050298Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2025-05-29T15:28:35.050305Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:982: Publication still in progress, tx: 102, publications: 2, subscribers: 0 2025-05-29T15:28:35.050310Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:989: Publication details: tx: 102, [OwnerId: 72057594046678944, LocalPathId: 1], 5 2025-05-29T15:28:35.050314Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:989: Publication details: tx: 102, [OwnerId: 72057594046678944, LocalPathId: 2], 18446744073709551615 2025-05-29T15:28:35.050425Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:5834: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 5 PathOwnerId: 72057594046678944, cookie: 102 2025-05-29T15:28:35.050437Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 5 PathOwnerId: 72057594046678944, cookie: 102 2025-05-29T15:28:35.050443Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 102 2025-05-29T15:28:35.050449Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 5 2025-05-29T15:28:35.050454Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-05-29T15:28:35.050584Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:5834: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 102 2025-05-29T15:28:35.050596Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 102 2025-05-29T15:28:35.050602Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 102 2025-05-29T15:28:35.050608Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 18446744073709551615 2025-05-29T15:28:35.050613Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 2025-05-29T15:28:35.050628Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 102, subscribers: 0 2025-05-29T15:28:35.050888Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:123: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-05-29T15:28:35.050897Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:137: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2025-05-29T15:28:35.050908Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-05-29T15:28:35.051295Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2025-05-29T15:28:35.051650Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2025-05-29T15:28:35.051681Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:160: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestModificationResult got TxId: 102, wait until txId: 102 TestWaitNotification wait txId: 102 2025-05-29T15:28:35.051733Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:210: tests -- TTxNotificationSubscriber for txId 102: send EvNotifyTxCompletion 2025-05-29T15:28:35.051741Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:256: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 102 2025-05-29T15:28:35.051818Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 102, at schemeshard: 72057594046678944 2025-05-29T15:28:35.051835Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:227: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2025-05-29T15:28:35.051841Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:236: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [1:326:2316] TestWaitNotification: OK eventTxId 102 2025-05-29T15:28:35.051918Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/MyView" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-05-29T15:28:35.051946Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/MyView" took 40us result status StatusPathDoesNotExist 2025-05-29T15:28:35.052005Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/MyView\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1]), source_location: ydb/core/tx/schemeshard/schemeshard_path_describer.cpp:1157" Path: "/MyRoot/MyView" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: false } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 |71.1%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/kqp/ut/effects/ydb-core-kqp-ut-effects |71.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/ut/effects/ydb-core-kqp-ut-effects |71.1%| [LD] {RESULT} $(B)/ydb/core/kqp/ut/effects/ydb-core-kqp-ut-effects ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_view/unittest >> TSchemeShardViewTest::EmptyName [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:116:2058] recipient: [1:111:2141] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:116:2058] recipient: [1:111:2141] Leader for TabletID 72057594046678944 is [1:128:2152] sender: [1:132:2058] recipient: [1:111:2141] 2025-05-29T15:28:34.999309Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7488: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-05-29T15:28:34.999329Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7516: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:28:34.999333Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7402: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-05-29T15:28:34.999337Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7418: OperationsProcessing config: using default configuration 2025-05-29T15:28:34.999348Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-05-29T15:28:34.999351Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-05-29T15:28:34.999357Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7548: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:28:34.999368Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:39: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-05-29T15:28:34.999470Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7619: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-05-29T15:28:34.999530Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-05-29T15:28:35.008412Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7309: Cannot subscribe to console configs 2025-05-29T15:28:35.008432Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:28:35.010246Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-05-29T15:28:35.010329Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-05-29T15:28:35.010359Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-05-29T15:28:35.012010Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-05-29T15:28:35.012172Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:445: Clear TempDirsState with owners number: 0 2025-05-29T15:28:35.012290Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1348: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-05-29T15:28:35.012339Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:32: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-05-29T15:28:35.012792Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:157: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:28:35.012844Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:84: [RootDataErasureManager] Stop 2025-05-29T15:28:35.013140Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:28:35.013149Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:28:35.013173Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-05-29T15:28:35.013180Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-29T15:28:35.013186Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:202: TTxServerlessStorageBilling.Complete 2025-05-29T15:28:35.013225Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6671: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-05-29T15:28:35.014523Z node 1 :HIVE INFO: tablet_helpers.cpp:1130: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:128:2152] sender: [1:242:2058] recipient: [1:15:2062] 2025-05-29T15:28:35.028463Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:372: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-05-29T15:28:35.028548Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:28:35.028604Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-05-29T15:28:35.028641Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:130: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-05-29T15:28:35.028650Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:28:35.029360Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:451: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-05-29T15:28:35.029381Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-05-29T15:28:35.029421Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:28:35.029428Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:313: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-05-29T15:28:35.029432Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:367: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-05-29T15:28:35.029436Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 2 -> 3 2025-05-29T15:28:35.029720Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:28:35.029728Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-05-29T15:28:35.029732Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 3 -> 128 2025-05-29T15:28:35.030011Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:28:35.030019Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:28:35.030023Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:28:35.030038Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1650: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-05-29T15:28:35.030465Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1719: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-05-29T15:28:35.030832Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:652: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-05-29T15:28:35.030869Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1751: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-05-29T15:28:35.031033Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:676: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-05-29T15:28:35.031054Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:680: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 124 RawX2: 4294969445 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-05-29T15:28:35.031059Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:28:35.031110Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 128 -> 240 2025-05-29T15:28:35.031114Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:28:35.031148Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-05-29T15:28:35.031160Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:402: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-05-29T15:28:35.031661Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:28:35.031674Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-29T15:28:35.031730Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:28:35.031738Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:209:2210], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-05-29T15:28:35.031823Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:28:35.031832Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:482: [72057594046678944] TDone opId# 1:0 ProgressState 2025-05-29T15:28:35.031846Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:906: Part operation is done id#1:0 progress is 1/1 2025-05-29T15:28:35.031852Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-05-29T15:28:35.031858Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:906: Part operation is done id#1:0 progress is 1/1 2025-05-29T15:28:35.031861Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-05-29T15:28:35.031866Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1606: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-05-29T15:28:35.031874Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-05-29T15:28:35.031879Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:973: Operation and all the parts is done, operation id: 1:0 2025-05-29T15:28:35.031884Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5174: RemoveTx for txid 1:0 2025-05-29T15:28:35.031899Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-05-29T15:28:35.031906Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:982: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-05-29T15:28:35.031911Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:989: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-05-29T15:28:35.032279Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:5834: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-05-29T15:28:35.032302Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-05-29T15:28:35.032308Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2025-05-29T15:28:35.032313Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2025-05-29T15:28:35.032319Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-05-29T15:28:35.032335Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 1, subscribers: 0 2025-05-29T15:28:35.033069Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1 2025-05-29T15:28:35.033148Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 1, at schemeshard: 72057594046678944 TestModificationResults wait txId: 101 2025-05-29T15:28:35.033276Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:434: actor# [1:272:2262] Bootstrap 2025-05-29T15:28:35.034574Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:453: actor# [1:272:2262] Become StateWork (SchemeCache [1:277:2267]) 2025-05-29T15:28:35.035184Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:372: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpCreateView CreateView { Name: "" QueryText: "Some query" } } TxId: 101 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-05-29T15:28:35.035227Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_create_view.cpp:118: [72057594046678944] TCreateView Propose, path: /MyRoot/, opId: 101:0 2025-05-29T15:28:35.035234Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_create_view.cpp:124: [72057594046678944] TCreateView Propose, path: /MyRoot/, opId: 101:0, viewDescription: Name: "" QueryText: "Some query" 2025-05-29T15:28:35.035254Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:130: IgniteOperation, opId: 101:1, propose status:StatusSchemeError, reason: Check failed: path: '/MyRoot/', error: path part shouldn't be empty, source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_view.cpp:151, at schemeshard: 72057594046678944 2025-05-29T15:28:35.035382Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:213: actor# [1:272:2262] HANDLE TEvClientConnected success connect from tablet# 72057594046447617 2025-05-29T15:28:35.035995Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:451: TTxOperationPropose Complete, txId: 101, response: Status: StatusSchemeError Reason: "Check failed: path: \'/MyRoot/\', error: path part shouldn\'t be empty, source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_view.cpp:151" TxId: 101 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2025-05-29T15:28:35.036023Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 101, database: /MyRoot, subject: , status: StatusSchemeError, reason: Check failed: path: '/MyRoot/', error: path part shouldn't be empty, source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_view.cpp:151, operation: CREATE VIEW, path: /MyRoot/ 2025-05-29T15:28:35.036098Z node 1 :TX_PROXY DEBUG: client.cpp:89: Handle TEvAllocateResult ACCEPTED RangeBegin# 281474976715656 RangeEnd# 281474976720656 txAllocator# 72057594046447617 TestModificationResult got TxId: 101, wait until txId: 101 >> TopicService::OneConsumer_TheRangesDoNotOverlap >> TPersQueueTest::WriteExisting >> TPartitionWriterCacheActorTests::WriteReplyOrder >> TSchemeShardSplitBySizeTest::Make20MergeOperationsWithInflyLimit5 >> TPersQueueTest::ReadFromSeveralPartitions [FAIL] >> TPersQueueTest::ReadFromSeveralPartitionsMigrated |71.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/tx_proxy/ut_encrypted_storage/unittest >> TPartitionWriterCacheActorTests::WriteReplyOrder [GOOD] >> TPartitionWriterCacheActorTests::DropOldWriter >> TPartBtreeIndexIteration::FewNodes_Groups_Slices [GOOD] >> TPartBtreeIndexIteration::FewNodes_History_Slices >> TopicService::OneConsumer_TheRangesDoNotOverlap [FAIL] >> TPartitionWriterCacheActorTests::DropOldWriter [GOOD] >> TPersQueueCommonTest::Auth_CreateGrpcStreamWithInvalidTokenInInitialMetadata_SessionClosedWithUnauthenticatedError ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/datashard/ut_minstep/unittest >> TDataShardMinStepTest::TestDropTableCompletesQuicklyRW-VolatileTxs [FAIL] Test command err: 2025-05-29T15:28:31.073751Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:102:2148], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-05-29T15:28:31.073796Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-05-29T15:28:31.073810Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/001c23/r3tmp/tmpMg2Wuv/pdisk_1.dat 2025-05-29T15:28:31.177002Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //Root, opId: 1:0, at schemeshard: 72057594046644480 2025-05-29T15:28:31.177883Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:130: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-05-29T15:28:31.178103Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-05-29T15:28:31.178280Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2025-05-29T15:28:31.178547Z node 1 :TX_COORDINATOR DEBUG: coordinator_impl.cpp:183: tablet# 72057594046316545 txid# 1 HANDLE EvProposeTransaction marker# C0 2025-05-29T15:28:31.178556Z node 1 :TX_COORDINATOR DEBUG: coordinator_impl.cpp:29: tablet# 72057594046316545 txid# 1 step# 500 Status# 16 SEND to# [1:411:2405] Proxy marker# C1 2025-05-29T15:28:31.189429Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:28:31.189462Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:84: [RootDataErasureManager] Stop 2025-05-29T15:28:31.192877Z node 1 :HIVE DEBUG: hive_impl.cpp:2265: HIVE#72057594037968897 Merged config: { } 2025-05-29T15:28:31.192937Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [1:32:2079] 1748532510718370 != 1748532510718374 2025-05-29T15:28:31.236432Z node 1 :HIVE DEBUG: hive_impl.cpp:141: HIVE#72057594037968897 Handle TEvLocal::TEvRegisterNode from [1:336:2375] HiveId: 72057594037968897 ServicedDomains { SchemeShard: 72057594046644480 PathId: 1 } TabletAvailability { Type: Mediator Priority: 0 } TabletAvailability { Type: Dummy Priority: 0 } TabletAvailability { Type: KeyValue Priority: 0 } TabletAvailability { Type: Coordinator Priority: 0 } TabletAvailability { Type: Hive Priority: 0 } TabletAvailability { Type: SchemeShard Priority: 0 } TabletAvailability { Type: DataShard Priority: 0 } TabletAvailability { Type: PersQueue Priority: 0 } TabletAvailability { Type: PersQueueReadBalancer Priority: 0 } TabletAvailability { Type: Kesus Priority: 0 } TabletAvailability { Type: SysViewProcessor Priority: 0 } TabletAvailability { Type: ColumnShard Priority: 0 } TabletAvailability { Type: SequenceShard Priority: 0 } TabletAvailability { Type: ReplicationController Priority: 0 } TabletAvailability { Type: StatisticsAggregator Priority: 0 } 2025-05-29T15:28:31.236495Z node 1 :HIVE DEBUG: tx__register_node.cpp:21: HIVE#72057594037968897 THive::TTxRegisterNode(1)::Execute 2025-05-29T15:28:31.236563Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:28:31.236571Z node 1 :HIVE DEBUG: hive_impl.cpp:361: HIVE#72057594037968897 ProcessWaitQueue (0) 2025-05-29T15:28:31.236576Z node 1 :HIVE DEBUG: hive_impl.cpp:342: HIVE#72057594037968897 ProcessBootQueue (0) 2025-05-29T15:28:31.236584Z node 1 :HIVE DEBUG: hive_impl.cpp:361: HIVE#72057594037968897 ProcessWaitQueue (0) 2025-05-29T15:28:31.236588Z node 1 :HIVE DEBUG: hive_impl.cpp:342: HIVE#72057594037968897 ProcessBootQueue (0) 2025-05-29T15:28:31.236605Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:28:31.236710Z node 1 :HIVE DEBUG: tx__process_boot_queue.cpp:18: HIVE#72057594037968897 THive::TTxProcessBootQueue()::Execute 2025-05-29T15:28:31.236719Z node 1 :HIVE DEBUG: hive_impl.cpp:222: HIVE#72057594037968897 Handle ProcessBootQueue (size: 0) 2025-05-29T15:28:31.236725Z node 1 :HIVE DEBUG: hive_impl.cpp:225: HIVE#72057594037968897 Handle ProcessWaitQueue (size: 0) 2025-05-29T15:28:31.236732Z node 1 :HIVE DEBUG: hive_impl.cpp:302: HIVE#72057594037968897 ProcessBootQueue - BootQueue empty (WaitQueue: 0) 2025-05-29T15:28:31.236772Z node 1 :HIVE DEBUG: hive_impl.cpp:798: HIVE#72057594037968897 TEvInterconnect::TEvNodeInfo NodeId 1 Location DataCenter: "1" Module: "1" Rack: "1" Unit: "1" 2025-05-29T15:28:31.247119Z node 1 :HIVE DEBUG: tx__register_node.cpp:88: HIVE#72057594037968897 THive::TTxRegisterNode(1)::Complete 2025-05-29T15:28:31.247174Z node 1 :HIVE DEBUG: node_info.cpp:367: HIVE#72057594037968897 Node(1) Ping([1:336:2375]) 2025-05-29T15:28:31.247200Z node 1 :HIVE DEBUG: tx__process_boot_queue.cpp:26: HIVE#72057594037968897 THive::TTxProcessBootQueue()::Complete 2025-05-29T15:28:31.247377Z node 1 :HIVE DEBUG: hive_impl.cpp:727: HIVE#72057594037968897 THive::Handle::TEvSyncTablets 2025-05-29T15:28:31.247394Z node 1 :HIVE DEBUG: tx__sync_tablets.cpp:41: HIVE#72057594037968897 THive::TTxSyncTablets([1:336:2375])::Execute 2025-05-29T15:28:31.247402Z node 1 :HIVE DEBUG: hive_impl.cpp:342: HIVE#72057594037968897 ProcessBootQueue (0) 2025-05-29T15:28:31.247418Z node 1 :HIVE DEBUG: tx__sync_tablets.cpp:130: HIVE#72057594037968897 THive::TTxSyncTablets([1:336:2375])::Complete 2025-05-29T15:28:31.247465Z node 1 :HIVE DEBUG: hive_impl.cpp:721: HIVE#72057594037968897 Handle TEvLocal::TEvStatus for Node 1: Status: 0 StartTime: 0 ResourceMaximum { Memory: 202797641728 } 2025-05-29T15:28:31.247473Z node 1 :HIVE DEBUG: tx__status.cpp:22: HIVE#72057594037968897 THive::TTxStatus(1)::Execute 2025-05-29T15:28:31.247482Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-29T15:28:31.247527Z node 1 :HIVE DEBUG: hive_impl.cpp:2781: HIVE#72057594037968897 AddRegisteredDataCentersNode(1, 1) 2025-05-29T15:28:31.247537Z node 1 :HIVE DEBUG: hive_impl.cpp:361: HIVE#72057594037968897 ProcessWaitQueue (0) 2025-05-29T15:28:31.247541Z node 1 :HIVE DEBUG: hive_impl.cpp:342: HIVE#72057594037968897 ProcessBootQueue (0) 2025-05-29T15:28:31.247572Z node 1 :HIVE DEBUG: tx__process_boot_queue.cpp:18: HIVE#72057594037968897 THive::TTxProcessBootQueue()::Execute 2025-05-29T15:28:31.247578Z node 1 :HIVE DEBUG: hive_impl.cpp:222: HIVE#72057594037968897 Handle ProcessBootQueue (size: 0) 2025-05-29T15:28:31.247584Z node 1 :HIVE DEBUG: hive_impl.cpp:225: HIVE#72057594037968897 Handle ProcessWaitQueue (size: 0) 2025-05-29T15:28:31.247590Z node 1 :HIVE DEBUG: hive_impl.cpp:302: HIVE#72057594037968897 ProcessBootQueue - BootQueue empty (WaitQueue: 0) 2025-05-29T15:28:31.257918Z node 1 :HIVE DEBUG: tx__status.cpp:65: HIVE#72057594037968897 THive::TTxStatus(1)::Complete 2025-05-29T15:28:31.257953Z node 1 :HIVE DEBUG: tx__process_boot_queue.cpp:26: HIVE#72057594037968897 THive::TTxProcessBootQueue()::Complete 2025-05-29T15:28:31.319238Z node 1 :TX_COORDINATOR DEBUG: coordinator__plan_step.cpp:184: Transaction 1 has been planned 2025-05-29T15:28:31.319309Z node 1 :TX_COORDINATOR DEBUG: coordinator__plan_step.cpp:197: Planned transaction 1 for mediator 72057594046382081 tablet 72057594046644480 2025-05-29T15:28:31.319408Z node 1 :TX_COORDINATOR TRACE: coordinator_impl.cpp:268: Coordinator# 72057594046316545 scheduling step 1000 in 0.500000s at 0.950000s 2025-05-29T15:28:31.319542Z node 1 :TX_COORDINATOR DEBUG: coordinator_impl.cpp:579: Send from# 72057594046316545 to mediator# 72057594046382081, step# 500, txid# 1 marker# C2 2025-05-29T15:28:31.319556Z node 1 :TX_COORDINATOR DEBUG: coordinator_impl.cpp:424: tablet# 72057594046316545 txid# 1 stepId# 500 Status# 17 SEND EvProposeTransactionStatus to# [1:411:2405] Proxy 2025-05-29T15:28:31.319867Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:676: TTxOperationPlanStep Execute, stepId: 500, transactions count in step: 1, at schemeshard: 72057594046644480 2025-05-29T15:28:31.320248Z node 1 :TX_COORDINATOR DEBUG: coordinator_impl.cpp:397: tablet# 72057594046316545 HANDLE EvMediatorQueueConfirmations MediatorId# 72057594046382081 2025-05-29T15:28:31.320276Z node 1 :TX_COORDINATOR DEBUG: coordinator__mediators_confirmations.cpp:84: at tablet# 72057594046316545 [2:6] persistent tx 1 for mediator 72057594046382081 tablet 72057594046644480 removed=1 2025-05-29T15:28:31.320282Z node 1 :TX_COORDINATOR DEBUG: coordinator__mediators_confirmations.cpp:91: at tablet# 72057594046316545 [2:6] persistent tx 1 for mediator 72057594046382081 acknowledged 2025-05-29T15:28:31.320288Z node 1 :TX_COORDINATOR DEBUG: coordinator__mediators_confirmations.cpp:99: at tablet# 72057594046316545 [2:6] persistent tx 1 acknowledged 2025-05-29T15:28:31.320473Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:973: Operation and all the parts is done, operation id: 1:0 2025-05-29T15:28:31.320492Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:982: Publication still in progress, tx: 1, publications: 1, subscribers: 1 2025-05-29T15:28:31.320702Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 1, subscribers: 1 2025-05-29T15:28:31.321365Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_create_table.cpp:426: TCreateTable Propose, path: /Root/table-1, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-05-29T15:28:31.321704Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:130: IgniteOperation, opId: 281474976715657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-05-29T15:28:31.321718Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-05-29T15:28:31.321897Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 281474976715657, database: /Root, subject: , status: StatusAccepted, operation: CREATE TABLE, path: /Root/table-1 2025-05-29T15:28:31.322641Z node 1 :HIVE DEBUG: hive_impl.cpp:34: HIVE#72057594037968897 Handle TEvHive::TEvCreateTablet(DataShard(72057594046644480,1)) 2025-05-29T15:28:31.327646Z node 1 :HIVE DEBUG: tx__create_tablet.cpp:200: HIVE#72057594037968897 THive::TTxCreateTablet::Execute Owner: 72057594046644480 OwnerIdx: 1 TabletType: DataShard ObjectDomain { SchemeShard: 72057594046644480 PathId: 1 } ObjectId: 2 BindedChannels { StoragePoolName: "/Root:test" } BindedChannels { StoragePoolName: "/Root:test" } BindedChannels { StoragePoolName: "/Root:test" } AllowedDomains { SchemeShard: 72057594046644480 PathId: 1 } 2025-05-29T15:28:31.327691Z node 1 :HIVE DEBUG: tx__create_tablet.cpp:348: HIVE#72057594037968897 Hive 72057594037968897 allocated TabletId 72075186224037888 from TabletIdIndex 65536 2025-05-29T15:28:31.327766Z node 1 :HIVE DEBUG: tx__create_tablet.cpp:440: HIVE#72057594037968897 THive::TTxCreateTablet::Execute; Default resources after merge for type DataShard: {} 2025-05-29T15:28:31.327779Z node 1 :HIVE DEBUG: tx__create_tablet.cpp:443: HIVE#7205759403 ... orm.cpp:33: PhysicalBuildTxsTransformer: ( (let $1 (KqpTable '"/Root/table-1" '"72057594046644480:2" '"" '1)) (let $2 (KqpTableSinkSettings $1 '"false" '"upsert" '"0" '"false" '"false" '())) (let $3 (DqStage '() (lambda '() (block '( (let $4 (Just (Uint32 '1))) (return (ToFlow (AsList (AsStruct '('"key" $4) '('"value" $4))))) ))) '() '((DqSink '"0" (DataSink '"KqpTableSink" '"db") $2)))) (return '('('() '((KqpSinkEffect $3 '"0"))))) ) 2025-05-29T15:28:32.943038Z node 2 :KQP_YQL TRACE: log.cpp:67: TraceId: 01jweahhzf81mbrjnkmqc6k5bp, SessionId: CompileActor 2025-05-29 15:28:32.943 TRACE ydb-core-tx-datashard-ut_minstep(pid=4017975, tid=0x00007F378B74ADC0) [KQP] kqp_transform.cpp:33: PhysicalBuildQueryTransformer: ( (let $1 (KqpTable '"/Root/table-1" '"72057594046644480:2" '"" '1)) (let $2 (KqpTableSinkSettings $1 '"false" '"upsert" '"0" '"false" '"false" '())) (let $3 (DqStage '() (lambda '() (block '( (let $4 (Just (Uint32 '1))) (return (ToFlow (AsList (AsStruct '('"key" $4) '('"value" $4))))) ))) '() '((DqSink '"0" (DataSink '"KqpTableSink" '"db") $2)))) (return '('('() '((KqpSinkEffect $3 '"0"))))) ) 2025-05-29T15:28:32.943048Z node 2 :KQP_YQL DEBUG: log.cpp:67: TraceId: 01jweahhzf81mbrjnkmqc6k5bp, SessionId: CompileActor 2025-05-29 15:28:32.943 DEBUG ydb-core-tx-datashard-ut_minstep(pid=4017975, tid=0x00007F378B74ADC0) [perf] type_ann_expr.cpp:48: Execution of [TypeAnnotationTransformer::DoTransform] took 3us 2025-05-29T15:28:32.943059Z node 2 :KQP_YQL DEBUG: log.cpp:67: TraceId: 01jweahhzf81mbrjnkmqc6k5bp, SessionId: CompileActor 2025-05-29 15:28:32.943 DEBUG ydb-core-tx-datashard-ut_minstep(pid=4017975, tid=0x00007F378B74ADC0) [perf] yql_expr_constraint.cpp:3248: Execution of [ConstraintTransformer::DoTransform] took 3us 2025-05-29T15:28:32.943071Z node 2 :KQP_YQL DEBUG: log.cpp:67: TraceId: 01jweahhzf81mbrjnkmqc6k5bp, SessionId: CompileActor 2025-05-29 15:28:32.943 DEBUG ydb-core-tx-datashard-ut_minstep(pid=4017975, tid=0x00007F378B74ADC0) [perf] yql_expr_csee.cpp:620: Execution of [UpdateCompletness] took 5us 2025-05-29T15:28:32.943098Z node 2 :KQP_YQL DEBUG: log.cpp:67: TraceId: 01jweahhzf81mbrjnkmqc6k5bp, SessionId: CompileActor 2025-05-29 15:28:32.943 DEBUG ydb-core-tx-datashard-ut_minstep(pid=4017975, tid=0x00007F378B74ADC0) [perf] yql_expr_csee.cpp:633: Execution of [EliminateCommonSubExpressions] took 19us 2025-05-29T15:28:32.943184Z node 2 :KQP_YQL TRACE: log.cpp:67: TraceId: 01jweahhzf81mbrjnkmqc6k5bp, SessionId: CompileActor 2025-05-29 15:28:32.943 TRACE ydb-core-tx-datashard-ut_minstep(pid=4017975, tid=0x00007F378B74ADC0) [KQP] kqp_transform.cpp:33: PhysicalOptimizeTransformer: ( (let $1 (KqpTable '"/Root/table-1" '"72057594046644480:2" '"" '1)) (let $2 (KqpTableSinkSettings $1 '"false" '"upsert" '"0" '"false" '"false" '())) (let $3 (DqPhyStage '() (lambda '() (block '( (let $4 (Just (Uint32 '1))) (return (Iterator (AsList (AsStruct '('"key" $4) '('"value" $4))))) ))) '('('"_logical_id" '180) '('"_id" '"7462a801-a9417aaf-f68b416b-b3e6fba0")) '((DqSink '"0" (DataSink '"KqpTableSink" '"db") $2)))) (return (KqpPhysicalQuery '((KqpPhysicalTx '($3) '() '() '('('"type" '"data") '('"with_effects")))) '() '('('"type" '"data_query")))) ) 2025-05-29T15:28:32.943232Z node 2 :KQP_YQL TRACE: log.cpp:67: TraceId: 01jweahhzf81mbrjnkmqc6k5bp, SessionId: CompileActor 2025-05-29 15:28:32.943 TRACE ydb-core-tx-datashard-ut_minstep(pid=4017975, tid=0x00007F378B74ADC0) [KQP] kqp_transform.cpp:33: PhysicalBuildTxsTransformer: ( (let $1 (KqpTable '"/Root/table-1" '"72057594046644480:2" '"" '1)) (let $2 (KqpTableSinkSettings $1 '"false" '"upsert" '"0" '"false" '"false" '())) (let $3 (DqPhyStage '() (lambda '() (block '( (let $4 (Just (Uint32 '1))) (return (Iterator (AsList (AsStruct '('"key" $4) '('"value" $4))))) ))) '('('"_logical_id" '180) '('"_id" '"7462a801-a9417aaf-f68b416b-b3e6fba0")) '((DqSink '"0" (DataSink '"KqpTableSink" '"db") $2)))) (return (KqpPhysicalQuery '((KqpPhysicalTx '($3) '() '() '('('"type" '"data") '('"with_effects")))) '() '('('"type" '"data_query")))) ) 2025-05-29T15:28:32.943277Z node 2 :KQP_YQL TRACE: log.cpp:67: TraceId: 01jweahhzf81mbrjnkmqc6k5bp, SessionId: CompileActor 2025-05-29 15:28:32.943 TRACE ydb-core-tx-datashard-ut_minstep(pid=4017975, tid=0x00007F378B74ADC0) [KQP] kqp_transform.cpp:33: PhysicalBuildQueryTransformer: ( (let $1 (KqpTable '"/Root/table-1" '"72057594046644480:2" '"" '1)) (let $2 (KqpTableSinkSettings $1 '"false" '"upsert" '"0" '"false" '"false" '())) (let $3 (DqPhyStage '() (lambda '() (block '( (let $4 (Just (Uint32 '1))) (return (Iterator (AsList (AsStruct '('"key" $4) '('"value" $4))))) ))) '('('"_logical_id" '180) '('"_id" '"7462a801-a9417aaf-f68b416b-b3e6fba0")) '((DqSink '"0" (DataSink '"KqpTableSink" '"db") $2)))) (return (KqpPhysicalQuery '((KqpPhysicalTx '($3) '() '() '('('"type" '"data") '('"with_effects")))) '() '('('"type" '"data_query")))) ) 2025-05-29T15:28:32.943474Z node 2 :KQP_YQL DEBUG: log.cpp:67: TraceId: 01jweahhzf81mbrjnkmqc6k5bp, SessionId: CompileActor 2025-05-29 15:28:32.943 DEBUG ydb-core-tx-datashard-ut_minstep(pid=4017975, tid=0x00007F378B74ADC0) [perf] type_ann_expr.cpp:48: Execution of [TypeAnnotationTransformer::DoTransform] took 186us 2025-05-29T15:28:32.943539Z node 2 :KQP_YQL INFO: log.cpp:67: TraceId: 01jweahhzf81mbrjnkmqc6k5bp, SessionId: CompileActor 2025-05-29 15:28:32.943 INFO ydb-core-tx-datashard-ut_minstep(pid=4017975, tid=0x00007F378B74ADC0) [core exec] yql_execution.cpp:466: Register async execution for node #117 2025-05-29T15:28:32.943555Z node 2 :KQP_YQL INFO: log.cpp:67: TraceId: 01jweahhzf81mbrjnkmqc6k5bp, SessionId: CompileActor 2025-05-29 15:28:32.943 INFO ydb-core-tx-datashard-ut_minstep(pid=4017975, tid=0x00007F378B74ADC0) [core exec] yql_execution.cpp:87: Finish, output #119, status: Async 2025-05-29T15:28:32.943616Z node 2 :KQP_YQL INFO: log.cpp:67: TraceId: 01jweahhzf81mbrjnkmqc6k5bp, SessionId: CompileActor 2025-05-29 15:28:32.943 INFO ydb-core-tx-datashard-ut_minstep(pid=4017975, tid=0x00007F378B74ADC0) [core exec] yql_execution.cpp:133: Completed async execution for node #117 2025-05-29T15:28:32.943625Z node 2 :KQP_YQL INFO: log.cpp:67: TraceId: 01jweahhzf81mbrjnkmqc6k5bp, SessionId: CompileActor 2025-05-29 15:28:32.943 INFO ydb-core-tx-datashard-ut_minstep(pid=4017975, tid=0x00007F378B74ADC0) [core exec] yql_execution.cpp:153: State is ExecutionRequired after apply async changes for node #117 2025-05-29T15:28:32.943634Z node 2 :KQP_YQL INFO: log.cpp:67: TraceId: 01jweahhzf81mbrjnkmqc6k5bp, SessionId: CompileActor 2025-05-29 15:28:32.943 INFO ydb-core-tx-datashard-ut_minstep(pid=4017975, tid=0x00007F378B74ADC0) [core exec] yql_execution.cpp:59: Begin, root #119 2025-05-29T15:28:32.943643Z node 2 :KQP_YQL INFO: log.cpp:67: TraceId: 01jweahhzf81mbrjnkmqc6k5bp, SessionId: CompileActor 2025-05-29 15:28:32.943 INFO ydb-core-tx-datashard-ut_minstep(pid=4017975, tid=0x00007F378B74ADC0) [core exec] yql_execution.cpp:72: Collect unused nodes for root #119, status: Ok 2025-05-29T15:28:32.943651Z node 2 :KQP_YQL TRACE: log.cpp:67: TraceId: 01jweahhzf81mbrjnkmqc6k5bp, SessionId: CompileActor 2025-05-29 15:28:32.943 TRACE ydb-core-tx-datashard-ut_minstep(pid=4017975, tid=0x00007F378B74ADC0) [core exec] yql_execution.cpp:387: {0}, callable #119 2025-05-29T15:28:32.943657Z node 2 :KQP_YQL TRACE: log.cpp:67: TraceId: 01jweahhzf81mbrjnkmqc6k5bp, SessionId: CompileActor 2025-05-29 15:28:32.943 TRACE ydb-core-tx-datashard-ut_minstep(pid=4017975, tid=0x00007F378B74ADC0) [core exec] yql_execution.cpp:387: {1}, callable #118 2025-05-29T15:28:32.943663Z node 2 :KQP_YQL TRACE: log.cpp:67: TraceId: 01jweahhzf81mbrjnkmqc6k5bp, SessionId: CompileActor 2025-05-29 15:28:32.943 TRACE ydb-core-tx-datashard-ut_minstep(pid=4017975, tid=0x00007F378B74ADC0) [core exec] yql_execution.cpp:387: {2}, callable #117 2025-05-29T15:28:32.943693Z node 2 :KQP_YQL NOTICE: log.cpp:67: TraceId: 01jweahhzf81mbrjnkmqc6k5bp, SessionId: CompileActor 2025-05-29 15:28:32.943 NOTE ydb-core-tx-datashard-ut_minstep(pid=4017975, tid=0x00007F378B74ADC0) [common provider] yql_provider_gateway.cpp:21:
: Fatal: Execution, code: 1060 2025-05-29T15:28:32.943701Z node 2 :KQP_YQL NOTICE: log.cpp:67: TraceId: 01jweahhzf81mbrjnkmqc6k5bp, SessionId: CompileActor 2025-05-29 15:28:32.943 NOTE ydb-core-tx-datashard-ut_minstep(pid=4017975, tid=0x00007F378B74ADC0) [common provider] yql_provider_gateway.cpp:21:
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:28:32.943718Z node 2 :KQP_YQL INFO: log.cpp:67: TraceId: 01jweahhzf81mbrjnkmqc6k5bp, SessionId: CompileActor 2025-05-29 15:28:32.943 INFO ydb-core-tx-datashard-ut_minstep(pid=4017975, tid=0x00007F378B74ADC0) [core exec] yql_execution.cpp:87: Finish, output #119, status: Error 2025-05-29T15:28:32.943732Z node 2 :KQP_YQL NOTICE: log.cpp:67: TraceId: 01jweahhzf81mbrjnkmqc6k5bp, SessionId: CompileActor 2025-05-29 15:28:32.943 NOTE ydb-core-tx-datashard-ut_minstep(pid=4017975, tid=0x00007F378B74ADC0) [common provider] yql_provider_gateway.cpp:21:
: Fatal: Execution, code: 1060 2025-05-29T15:28:32.943739Z node 2 :KQP_YQL NOTICE: log.cpp:67: TraceId: 01jweahhzf81mbrjnkmqc6k5bp, SessionId: CompileActor 2025-05-29 15:28:32.943 NOTE ydb-core-tx-datashard-ut_minstep(pid=4017975, tid=0x00007F378B74ADC0) [common provider] yql_provider_gateway.cpp:21:
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:28:32.943820Z node 2 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [2:825:2669], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:28:32.944552Z node 2 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=2&id=ODFmOWIwMmUtMjRhMGU3MDQtZjZjYzk0YjQtNjJlMGQ1YjI=, ActorId: [2:729:2611], ActorState: ExecuteState, TraceId: 01jweahhzf81mbrjnkmqc6k5bp, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: assertion failed at ydb/core/tx/datashard/ut_common/datashard_ut_common.cpp:2091, void NKikimr::ExecSQL(Tests::TServer::TPtr, TActorId, const TString &, bool, Ydb::StatusIds::StatusCode): (response.GetYdbStatus() == code) failed: (INTERNAL_ERROR != SUCCESS) Response { QueryIssues { message: "Execution" issue_code: 1060 issues { message: "yql/essentials/ast/yql_expr.h:1874: index out of range" issue_code: 1 } } TxMeta { } } YdbStatus: INTERNAL_ERROR ConsumedRu: 1 , with diff: (INT|SUCC)E(RNAL_ERROR|SS) TBackTrace::Capture()+28 (0x13A9D20C) NUnitTest::NPrivate::RaiseError(char const*, TBasicString> const&, bool)+137 (0x13C50589) NKikimr::ExecSQL(TIntrusivePtr>, NActors::TActorId, TBasicString> const&, bool, Ydb::StatusIds_StatusCode)+1300 (0x262E4EB4) NKikimr::NTestSuiteTDataShardMinStepTest::TestDropTableCompletesQuickly(TBasicString> const&, Ydb::StatusIds_StatusCode, bool)+1336 (0x139933E8) NKikimr::NTestSuiteTDataShardMinStepTest::TTestCaseTestDropTableCompletesQuicklyRW::Execute_(NUnitTest::TTestContext&)+139 (0x1399E84B) NKikimr::NTestSuiteTDataShardMinStepTest::TCurrentTest::Execute()::'lambda'()::operator()() const+71 (0x13999257) NUnitTest::TTestBase::Run(std::__y1::function, TBasicString> const&, char const*, bool)+126 (0x13C5243E) NKikimr::NTestSuiteTDataShardMinStepTest::TCurrentTest::Execute()+433 (0x13998AB1) NUnitTest::TTestFactory::Execute()+803 (0x13C52BB3) NUnitTest::RunMain(int, char**)+3021 (0x13C6475D) ??+0 (0x7F378B85FD90) __libc_start_main+128 (0x7F378B85FE40) _start+41 (0x129F6029) >> TopicService::OneConsumer_TheRangesOverlap >> BuildStatsHistogram::Many_Serial [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/datashard/ut_minstep/unittest >> TDataShardMinStepTest::TestDropTablePlanComesNotTooEarlyRW-VolatileTxs [FAIL] Test command err: 2025-05-29T15:28:31.574103Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:102:2148], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-05-29T15:28:31.574139Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-05-29T15:28:31.574150Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/001be3/r3tmp/tmpXTx6m8/pdisk_1.dat 2025-05-29T15:28:31.671965Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //Root, opId: 1:0, at schemeshard: 72057594046644480 2025-05-29T15:28:31.672884Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:130: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-05-29T15:28:31.673090Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-05-29T15:28:31.673253Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2025-05-29T15:28:31.673513Z node 1 :TX_COORDINATOR DEBUG: coordinator_impl.cpp:183: tablet# 72057594046316545 txid# 1 HANDLE EvProposeTransaction marker# C0 2025-05-29T15:28:31.673522Z node 1 :TX_COORDINATOR DEBUG: coordinator_impl.cpp:29: tablet# 72057594046316545 txid# 1 step# 500 Status# 16 SEND to# [1:411:2405] Proxy marker# C1 2025-05-29T15:28:31.684517Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:28:31.684556Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:84: [RootDataErasureManager] Stop 2025-05-29T15:28:31.687416Z node 1 :HIVE DEBUG: hive_impl.cpp:2265: HIVE#72057594037968897 Merged config: { } 2025-05-29T15:28:31.687467Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [1:32:2079] 1748532511235355 != 1748532511235359 2025-05-29T15:28:31.729769Z node 1 :HIVE DEBUG: hive_impl.cpp:141: HIVE#72057594037968897 Handle TEvLocal::TEvRegisterNode from [1:336:2375] HiveId: 72057594037968897 ServicedDomains { SchemeShard: 72057594046644480 PathId: 1 } TabletAvailability { Type: Mediator Priority: 0 } TabletAvailability { Type: Dummy Priority: 0 } TabletAvailability { Type: KeyValue Priority: 0 } TabletAvailability { Type: Coordinator Priority: 0 } TabletAvailability { Type: Hive Priority: 0 } TabletAvailability { Type: SchemeShard Priority: 0 } TabletAvailability { Type: DataShard Priority: 0 } TabletAvailability { Type: PersQueue Priority: 0 } TabletAvailability { Type: PersQueueReadBalancer Priority: 0 } TabletAvailability { Type: Kesus Priority: 0 } TabletAvailability { Type: SysViewProcessor Priority: 0 } TabletAvailability { Type: ColumnShard Priority: 0 } TabletAvailability { Type: SequenceShard Priority: 0 } TabletAvailability { Type: ReplicationController Priority: 0 } TabletAvailability { Type: StatisticsAggregator Priority: 0 } 2025-05-29T15:28:31.729808Z node 1 :HIVE DEBUG: tx__register_node.cpp:21: HIVE#72057594037968897 THive::TTxRegisterNode(1)::Execute 2025-05-29T15:28:31.729837Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:28:31.729843Z node 1 :HIVE DEBUG: hive_impl.cpp:361: HIVE#72057594037968897 ProcessWaitQueue (0) 2025-05-29T15:28:31.729846Z node 1 :HIVE DEBUG: hive_impl.cpp:342: HIVE#72057594037968897 ProcessBootQueue (0) 2025-05-29T15:28:31.729850Z node 1 :HIVE DEBUG: hive_impl.cpp:361: HIVE#72057594037968897 ProcessWaitQueue (0) 2025-05-29T15:28:31.729852Z node 1 :HIVE DEBUG: hive_impl.cpp:342: HIVE#72057594037968897 ProcessBootQueue (0) 2025-05-29T15:28:31.729862Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:28:31.729923Z node 1 :HIVE DEBUG: tx__process_boot_queue.cpp:18: HIVE#72057594037968897 THive::TTxProcessBootQueue()::Execute 2025-05-29T15:28:31.729929Z node 1 :HIVE DEBUG: hive_impl.cpp:222: HIVE#72057594037968897 Handle ProcessBootQueue (size: 0) 2025-05-29T15:28:31.729932Z node 1 :HIVE DEBUG: hive_impl.cpp:225: HIVE#72057594037968897 Handle ProcessWaitQueue (size: 0) 2025-05-29T15:28:31.729937Z node 1 :HIVE DEBUG: hive_impl.cpp:302: HIVE#72057594037968897 ProcessBootQueue - BootQueue empty (WaitQueue: 0) 2025-05-29T15:28:31.729963Z node 1 :HIVE DEBUG: hive_impl.cpp:798: HIVE#72057594037968897 TEvInterconnect::TEvNodeInfo NodeId 1 Location DataCenter: "1" Module: "1" Rack: "1" Unit: "1" 2025-05-29T15:28:31.740281Z node 1 :HIVE DEBUG: tx__register_node.cpp:88: HIVE#72057594037968897 THive::TTxRegisterNode(1)::Complete 2025-05-29T15:28:31.740327Z node 1 :HIVE DEBUG: node_info.cpp:367: HIVE#72057594037968897 Node(1) Ping([1:336:2375]) 2025-05-29T15:28:31.740353Z node 1 :HIVE DEBUG: tx__process_boot_queue.cpp:26: HIVE#72057594037968897 THive::TTxProcessBootQueue()::Complete 2025-05-29T15:28:31.740528Z node 1 :HIVE DEBUG: hive_impl.cpp:727: HIVE#72057594037968897 THive::Handle::TEvSyncTablets 2025-05-29T15:28:31.740543Z node 1 :HIVE DEBUG: tx__sync_tablets.cpp:41: HIVE#72057594037968897 THive::TTxSyncTablets([1:336:2375])::Execute 2025-05-29T15:28:31.740549Z node 1 :HIVE DEBUG: hive_impl.cpp:342: HIVE#72057594037968897 ProcessBootQueue (0) 2025-05-29T15:28:31.740561Z node 1 :HIVE DEBUG: tx__sync_tablets.cpp:130: HIVE#72057594037968897 THive::TTxSyncTablets([1:336:2375])::Complete 2025-05-29T15:28:31.740597Z node 1 :HIVE DEBUG: hive_impl.cpp:721: HIVE#72057594037968897 Handle TEvLocal::TEvStatus for Node 1: Status: 0 StartTime: 0 ResourceMaximum { Memory: 202797641728 } 2025-05-29T15:28:31.740605Z node 1 :HIVE DEBUG: tx__status.cpp:22: HIVE#72057594037968897 THive::TTxStatus(1)::Execute 2025-05-29T15:28:31.740610Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-29T15:28:31.740651Z node 1 :HIVE DEBUG: hive_impl.cpp:2781: HIVE#72057594037968897 AddRegisteredDataCentersNode(1, 1) 2025-05-29T15:28:31.740659Z node 1 :HIVE DEBUG: hive_impl.cpp:361: HIVE#72057594037968897 ProcessWaitQueue (0) 2025-05-29T15:28:31.740661Z node 1 :HIVE DEBUG: hive_impl.cpp:342: HIVE#72057594037968897 ProcessBootQueue (0) 2025-05-29T15:28:31.740682Z node 1 :HIVE DEBUG: tx__process_boot_queue.cpp:18: HIVE#72057594037968897 THive::TTxProcessBootQueue()::Execute 2025-05-29T15:28:31.740686Z node 1 :HIVE DEBUG: hive_impl.cpp:222: HIVE#72057594037968897 Handle ProcessBootQueue (size: 0) 2025-05-29T15:28:31.740689Z node 1 :HIVE DEBUG: hive_impl.cpp:225: HIVE#72057594037968897 Handle ProcessWaitQueue (size: 0) 2025-05-29T15:28:31.740693Z node 1 :HIVE DEBUG: hive_impl.cpp:302: HIVE#72057594037968897 ProcessBootQueue - BootQueue empty (WaitQueue: 0) 2025-05-29T15:28:31.751013Z node 1 :HIVE DEBUG: tx__status.cpp:65: HIVE#72057594037968897 THive::TTxStatus(1)::Complete 2025-05-29T15:28:31.751044Z node 1 :HIVE DEBUG: tx__process_boot_queue.cpp:26: HIVE#72057594037968897 THive::TTxProcessBootQueue()::Complete 2025-05-29T15:28:31.812256Z node 1 :TX_COORDINATOR DEBUG: coordinator__plan_step.cpp:184: Transaction 1 has been planned 2025-05-29T15:28:31.812319Z node 1 :TX_COORDINATOR DEBUG: coordinator__plan_step.cpp:197: Planned transaction 1 for mediator 72057594046382081 tablet 72057594046644480 2025-05-29T15:28:31.812413Z node 1 :TX_COORDINATOR TRACE: coordinator_impl.cpp:268: Coordinator# 72057594046316545 scheduling step 1000 in 0.500000s at 0.950000s 2025-05-29T15:28:31.812524Z node 1 :TX_COORDINATOR DEBUG: coordinator_impl.cpp:579: Send from# 72057594046316545 to mediator# 72057594046382081, step# 500, txid# 1 marker# C2 2025-05-29T15:28:31.812535Z node 1 :TX_COORDINATOR DEBUG: coordinator_impl.cpp:424: tablet# 72057594046316545 txid# 1 stepId# 500 Status# 17 SEND EvProposeTransactionStatus to# [1:411:2405] Proxy 2025-05-29T15:28:31.812774Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:676: TTxOperationPlanStep Execute, stepId: 500, transactions count in step: 1, at schemeshard: 72057594046644480 2025-05-29T15:28:31.813064Z node 1 :TX_COORDINATOR DEBUG: coordinator_impl.cpp:397: tablet# 72057594046316545 HANDLE EvMediatorQueueConfirmations MediatorId# 72057594046382081 2025-05-29T15:28:31.813084Z node 1 :TX_COORDINATOR DEBUG: coordinator__mediators_confirmations.cpp:84: at tablet# 72057594046316545 [2:6] persistent tx 1 for mediator 72057594046382081 tablet 72057594046644480 removed=1 2025-05-29T15:28:31.813089Z node 1 :TX_COORDINATOR DEBUG: coordinator__mediators_confirmations.cpp:91: at tablet# 72057594046316545 [2:6] persistent tx 1 for mediator 72057594046382081 acknowledged 2025-05-29T15:28:31.813093Z node 1 :TX_COORDINATOR DEBUG: coordinator__mediators_confirmations.cpp:99: at tablet# 72057594046316545 [2:6] persistent tx 1 acknowledged 2025-05-29T15:28:31.813251Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:973: Operation and all the parts is done, operation id: 1:0 2025-05-29T15:28:31.813264Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:982: Publication still in progress, tx: 1, publications: 1, subscribers: 1 2025-05-29T15:28:31.813428Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 1, subscribers: 1 2025-05-29T15:28:31.814007Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_create_table.cpp:426: TCreateTable Propose, path: /Root/table-1, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-05-29T15:28:31.814325Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:130: IgniteOperation, opId: 281474976715657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-05-29T15:28:31.814338Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-05-29T15:28:31.814473Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 281474976715657, database: /Root, subject: , status: StatusAccepted, operation: CREATE TABLE, path: /Root/table-1 2025-05-29T15:28:31.815026Z node 1 :HIVE DEBUG: hive_impl.cpp:34: HIVE#72057594037968897 Handle TEvHive::TEvCreateTablet(DataShard(72057594046644480,1)) 2025-05-29T15:28:31.819535Z node 1 :HIVE DEBUG: tx__create_tablet.cpp:200: HIVE#72057594037968897 THive::TTxCreateTablet::Execute Owner: 72057594046644480 OwnerIdx: 1 TabletType: DataShard ObjectDomain { SchemeShard: 72057594046644480 PathId: 1 } ObjectId: 2 BindedChannels { StoragePoolName: "/Root:test" } BindedChannels { StoragePoolName: "/Root:test" } BindedChannels { StoragePoolName: "/Root:test" } AllowedDomains { SchemeShard: 72057594046644480 PathId: 1 } 2025-05-29T15:28:31.819573Z node 1 :HIVE DEBUG: tx__create_tablet.cpp:348: HIVE#72057594037968897 Hive 72057594037968897 allocated TabletId 72075186224037888 from TabletIdIndex 65536 2025-05-29T15:28:31.819635Z node 1 :HIVE DEBUG: tx__create_tablet.cpp:440: HIVE#72057594037968897 THive::TTxCreateTablet::Execute; Default resources after merge for type DataShard: {} 2025-05-29T15:28:31.819643Z node 1 :HIVE DEBUG: tx__create_tablet.cpp:443: HIVE#7205759403 ... cpp:33: PhysicalBuildTxsTransformer: ( (let $1 (KqpTable '"/Root/table-1" '"72057594046644480:2" '"" '1)) (let $2 (KqpTableSinkSettings $1 '"false" '"upsert" '"0" '"false" '"false" '())) (let $3 (DqStage '() (lambda '() (block '( (let $4 (Just (Uint32 '1))) (return (ToFlow (AsList (AsStruct '('"key" $4) '('"value" $4))))) ))) '() '((DqSink '"0" (DataSink '"KqpTableSink" '"db") $2)))) (return '('('() '((KqpSinkEffect $3 '"0"))))) ) 2025-05-29T15:28:33.488951Z node 2 :KQP_YQL TRACE: log.cpp:67: TraceId: 01jweahjgp46n188p221y44d5d, SessionId: CompileActor 2025-05-29 15:28:33.488 TRACE ydb-core-tx-datashard-ut_minstep(pid=4019897, tid=0x00007F70723F8DC0) [KQP] kqp_transform.cpp:33: PhysicalBuildQueryTransformer: ( (let $1 (KqpTable '"/Root/table-1" '"72057594046644480:2" '"" '1)) (let $2 (KqpTableSinkSettings $1 '"false" '"upsert" '"0" '"false" '"false" '())) (let $3 (DqStage '() (lambda '() (block '( (let $4 (Just (Uint32 '1))) (return (ToFlow (AsList (AsStruct '('"key" $4) '('"value" $4))))) ))) '() '((DqSink '"0" (DataSink '"KqpTableSink" '"db") $2)))) (return '('('() '((KqpSinkEffect $3 '"0"))))) ) 2025-05-29T15:28:33.488958Z node 2 :KQP_YQL DEBUG: log.cpp:67: TraceId: 01jweahjgp46n188p221y44d5d, SessionId: CompileActor 2025-05-29 15:28:33.488 DEBUG ydb-core-tx-datashard-ut_minstep(pid=4019897, tid=0x00007F70723F8DC0) [perf] type_ann_expr.cpp:48: Execution of [TypeAnnotationTransformer::DoTransform] took 2us 2025-05-29T15:28:33.488964Z node 2 :KQP_YQL DEBUG: log.cpp:67: TraceId: 01jweahjgp46n188p221y44d5d, SessionId: CompileActor 2025-05-29 15:28:33.488 DEBUG ydb-core-tx-datashard-ut_minstep(pid=4019897, tid=0x00007F70723F8DC0) [perf] yql_expr_constraint.cpp:3248: Execution of [ConstraintTransformer::DoTransform] took 2us 2025-05-29T15:28:33.488971Z node 2 :KQP_YQL DEBUG: log.cpp:67: TraceId: 01jweahjgp46n188p221y44d5d, SessionId: CompileActor 2025-05-29 15:28:33.488 DEBUG ydb-core-tx-datashard-ut_minstep(pid=4019897, tid=0x00007F70723F8DC0) [perf] yql_expr_csee.cpp:620: Execution of [UpdateCompletness] took 4us 2025-05-29T15:28:33.488988Z node 2 :KQP_YQL DEBUG: log.cpp:67: TraceId: 01jweahjgp46n188p221y44d5d, SessionId: CompileActor 2025-05-29 15:28:33.488 DEBUG ydb-core-tx-datashard-ut_minstep(pid=4019897, tid=0x00007F70723F8DC0) [perf] yql_expr_csee.cpp:633: Execution of [EliminateCommonSubExpressions] took 12us 2025-05-29T15:28:33.489051Z node 2 :KQP_YQL TRACE: log.cpp:67: TraceId: 01jweahjgp46n188p221y44d5d, SessionId: CompileActor 2025-05-29 15:28:33.489 TRACE ydb-core-tx-datashard-ut_minstep(pid=4019897, tid=0x00007F70723F8DC0) [KQP] kqp_transform.cpp:33: PhysicalOptimizeTransformer: ( (let $1 (KqpTable '"/Root/table-1" '"72057594046644480:2" '"" '1)) (let $2 (KqpTableSinkSettings $1 '"false" '"upsert" '"0" '"false" '"false" '())) (let $3 (DqPhyStage '() (lambda '() (block '( (let $4 (Just (Uint32 '1))) (return (Iterator (AsList (AsStruct '('"key" $4) '('"value" $4))))) ))) '('('"_logical_id" '180) '('"_id" '"20d26363-20624e30-590d3407-fcb1cbbe")) '((DqSink '"0" (DataSink '"KqpTableSink" '"db") $2)))) (return (KqpPhysicalQuery '((KqpPhysicalTx '($3) '() '() '('('"type" '"data") '('"with_effects")))) '() '('('"type" '"data_query")))) ) 2025-05-29T15:28:33.489079Z node 2 :KQP_YQL TRACE: log.cpp:67: TraceId: 01jweahjgp46n188p221y44d5d, SessionId: CompileActor 2025-05-29 15:28:33.489 TRACE ydb-core-tx-datashard-ut_minstep(pid=4019897, tid=0x00007F70723F8DC0) [KQP] kqp_transform.cpp:33: PhysicalBuildTxsTransformer: ( (let $1 (KqpTable '"/Root/table-1" '"72057594046644480:2" '"" '1)) (let $2 (KqpTableSinkSettings $1 '"false" '"upsert" '"0" '"false" '"false" '())) (let $3 (DqPhyStage '() (lambda '() (block '( (let $4 (Just (Uint32 '1))) (return (Iterator (AsList (AsStruct '('"key" $4) '('"value" $4))))) ))) '('('"_logical_id" '180) '('"_id" '"20d26363-20624e30-590d3407-fcb1cbbe")) '((DqSink '"0" (DataSink '"KqpTableSink" '"db") $2)))) (return (KqpPhysicalQuery '((KqpPhysicalTx '($3) '() '() '('('"type" '"data") '('"with_effects")))) '() '('('"type" '"data_query")))) ) 2025-05-29T15:28:33.489107Z node 2 :KQP_YQL TRACE: log.cpp:67: TraceId: 01jweahjgp46n188p221y44d5d, SessionId: CompileActor 2025-05-29 15:28:33.489 TRACE ydb-core-tx-datashard-ut_minstep(pid=4019897, tid=0x00007F70723F8DC0) [KQP] kqp_transform.cpp:33: PhysicalBuildQueryTransformer: ( (let $1 (KqpTable '"/Root/table-1" '"72057594046644480:2" '"" '1)) (let $2 (KqpTableSinkSettings $1 '"false" '"upsert" '"0" '"false" '"false" '())) (let $3 (DqPhyStage '() (lambda '() (block '( (let $4 (Just (Uint32 '1))) (return (Iterator (AsList (AsStruct '('"key" $4) '('"value" $4))))) ))) '('('"_logical_id" '180) '('"_id" '"20d26363-20624e30-590d3407-fcb1cbbe")) '((DqSink '"0" (DataSink '"KqpTableSink" '"db") $2)))) (return (KqpPhysicalQuery '((KqpPhysicalTx '($3) '() '() '('('"type" '"data") '('"with_effects")))) '() '('('"type" '"data_query")))) ) 2025-05-29T15:28:33.489244Z node 2 :KQP_YQL DEBUG: log.cpp:67: TraceId: 01jweahjgp46n188p221y44d5d, SessionId: CompileActor 2025-05-29 15:28:33.489 DEBUG ydb-core-tx-datashard-ut_minstep(pid=4019897, tid=0x00007F70723F8DC0) [perf] type_ann_expr.cpp:48: Execution of [TypeAnnotationTransformer::DoTransform] took 129us 2025-05-29T15:28:33.489286Z node 2 :KQP_YQL INFO: log.cpp:67: TraceId: 01jweahjgp46n188p221y44d5d, SessionId: CompileActor 2025-05-29 15:28:33.489 INFO ydb-core-tx-datashard-ut_minstep(pid=4019897, tid=0x00007F70723F8DC0) [core exec] yql_execution.cpp:466: Register async execution for node #117 2025-05-29T15:28:33.489298Z node 2 :KQP_YQL INFO: log.cpp:67: TraceId: 01jweahjgp46n188p221y44d5d, SessionId: CompileActor 2025-05-29 15:28:33.489 INFO ydb-core-tx-datashard-ut_minstep(pid=4019897, tid=0x00007F70723F8DC0) [core exec] yql_execution.cpp:87: Finish, output #119, status: Async 2025-05-29T15:28:33.489360Z node 2 :KQP_YQL INFO: log.cpp:67: TraceId: 01jweahjgp46n188p221y44d5d, SessionId: CompileActor 2025-05-29 15:28:33.489 INFO ydb-core-tx-datashard-ut_minstep(pid=4019897, tid=0x00007F70723F8DC0) [core exec] yql_execution.cpp:133: Completed async execution for node #117 2025-05-29T15:28:33.489369Z node 2 :KQP_YQL INFO: log.cpp:67: TraceId: 01jweahjgp46n188p221y44d5d, SessionId: CompileActor 2025-05-29 15:28:33.489 INFO ydb-core-tx-datashard-ut_minstep(pid=4019897, tid=0x00007F70723F8DC0) [core exec] yql_execution.cpp:153: State is ExecutionRequired after apply async changes for node #117 2025-05-29T15:28:33.489378Z node 2 :KQP_YQL INFO: log.cpp:67: TraceId: 01jweahjgp46n188p221y44d5d, SessionId: CompileActor 2025-05-29 15:28:33.489 INFO ydb-core-tx-datashard-ut_minstep(pid=4019897, tid=0x00007F70723F8DC0) [core exec] yql_execution.cpp:59: Begin, root #119 2025-05-29T15:28:33.489387Z node 2 :KQP_YQL INFO: log.cpp:67: TraceId: 01jweahjgp46n188p221y44d5d, SessionId: CompileActor 2025-05-29 15:28:33.489 INFO ydb-core-tx-datashard-ut_minstep(pid=4019897, tid=0x00007F70723F8DC0) [core exec] yql_execution.cpp:72: Collect unused nodes for root #119, status: Ok 2025-05-29T15:28:33.489393Z node 2 :KQP_YQL TRACE: log.cpp:67: TraceId: 01jweahjgp46n188p221y44d5d, SessionId: CompileActor 2025-05-29 15:28:33.489 TRACE ydb-core-tx-datashard-ut_minstep(pid=4019897, tid=0x00007F70723F8DC0) [core exec] yql_execution.cpp:387: {0}, callable #119 2025-05-29T15:28:33.489397Z node 2 :KQP_YQL TRACE: log.cpp:67: TraceId: 01jweahjgp46n188p221y44d5d, SessionId: CompileActor 2025-05-29 15:28:33.489 TRACE ydb-core-tx-datashard-ut_minstep(pid=4019897, tid=0x00007F70723F8DC0) [core exec] yql_execution.cpp:387: {1}, callable #118 2025-05-29T15:28:33.489400Z node 2 :KQP_YQL TRACE: log.cpp:67: TraceId: 01jweahjgp46n188p221y44d5d, SessionId: CompileActor 2025-05-29 15:28:33.489 TRACE ydb-core-tx-datashard-ut_minstep(pid=4019897, tid=0x00007F70723F8DC0) [core exec] yql_execution.cpp:387: {2}, callable #117 2025-05-29T15:28:33.489423Z node 2 :KQP_YQL NOTICE: log.cpp:67: TraceId: 01jweahjgp46n188p221y44d5d, SessionId: CompileActor 2025-05-29 15:28:33.489 NOTE ydb-core-tx-datashard-ut_minstep(pid=4019897, tid=0x00007F70723F8DC0) [common provider] yql_provider_gateway.cpp:21:
: Fatal: Execution, code: 1060 2025-05-29T15:28:33.489428Z node 2 :KQP_YQL NOTICE: log.cpp:67: TraceId: 01jweahjgp46n188p221y44d5d, SessionId: CompileActor 2025-05-29 15:28:33.489 NOTE ydb-core-tx-datashard-ut_minstep(pid=4019897, tid=0x00007F70723F8DC0) [common provider] yql_provider_gateway.cpp:21:
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:28:33.489438Z node 2 :KQP_YQL INFO: log.cpp:67: TraceId: 01jweahjgp46n188p221y44d5d, SessionId: CompileActor 2025-05-29 15:28:33.489 INFO ydb-core-tx-datashard-ut_minstep(pid=4019897, tid=0x00007F70723F8DC0) [core exec] yql_execution.cpp:87: Finish, output #119, status: Error 2025-05-29T15:28:33.489448Z node 2 :KQP_YQL NOTICE: log.cpp:67: TraceId: 01jweahjgp46n188p221y44d5d, SessionId: CompileActor 2025-05-29 15:28:33.489 NOTE ydb-core-tx-datashard-ut_minstep(pid=4019897, tid=0x00007F70723F8DC0) [common provider] yql_provider_gateway.cpp:21:
: Fatal: Execution, code: 1060 2025-05-29T15:28:33.489452Z node 2 :KQP_YQL NOTICE: log.cpp:67: TraceId: 01jweahjgp46n188p221y44d5d, SessionId: CompileActor 2025-05-29 15:28:33.489 NOTE ydb-core-tx-datashard-ut_minstep(pid=4019897, tid=0x00007F70723F8DC0) [common provider] yql_provider_gateway.cpp:21:
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:28:33.489512Z node 2 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [2:825:2669], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:28:33.490138Z node 2 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=2&id=YWE0M2UyZi0zMGUzMDg5LWRjMGJlNGJlLTcwMmQ3ZjY1, ActorId: [2:729:2611], ActorState: ExecuteState, TraceId: 01jweahjgp46n188p221y44d5d, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: assertion failed at ydb/core/tx/datashard/ut_common/datashard_ut_common.cpp:2091, void NKikimr::ExecSQL(Tests::TServer::TPtr, TActorId, const TString &, bool, Ydb::StatusIds::StatusCode): (response.GetYdbStatus() == code) failed: (INTERNAL_ERROR != SUCCESS) Response { QueryIssues { message: "Execution" issue_code: 1060 issues { message: "yql/essentials/ast/yql_expr.h:1874: index out of range" issue_code: 1 } } TxMeta { } } YdbStatus: INTERNAL_ERROR ConsumedRu: 1 , with diff: (INT|SUCC)E(RNAL_ERROR|SS) TBackTrace::Capture()+28 (0x13A9D20C) NUnitTest::NPrivate::RaiseError(char const*, TBasicString> const&, bool)+137 (0x13C50589) NKikimr::ExecSQL(TIntrusivePtr>, NActors::TActorId, TBasicString> const&, bool, Ydb::StatusIds_StatusCode)+1300 (0x262E4EB4) NKikimr::NTestSuiteTDataShardMinStepTest::TestDropTablePlanComesNotTooEarly(TBasicString> const&, Ydb::StatusIds_StatusCode, bool)+1312 (0x1398EAC0) NKikimr::NTestSuiteTDataShardMinStepTest::TTestCaseTestDropTablePlanComesNotTooEarlyRW::Execute_(NUnitTest::TTestContext&)+139 (0x1399E4CB) NKikimr::NTestSuiteTDataShardMinStepTest::TCurrentTest::Execute()::'lambda'()::operator()() const+71 (0x13999257) NUnitTest::TTestBase::Run(std::__y1::function, TBasicString> const&, char const*, bool)+126 (0x13C5243E) NKikimr::NTestSuiteTDataShardMinStepTest::TCurrentTest::Execute()+433 (0x13998AB1) NUnitTest::TTestFactory::Execute()+803 (0x13C52BB3) NUnitTest::RunMain(int, char**)+3021 (0x13C6475D) ??+0 (0x7F707250DD90) __libc_start_main+128 (0x7F707250DE40) _start+41 (0x129F6029) >> TPersQueueTest::WriteExisting [FAIL] >> TPersQueueTest::WriteExistingBigValue >> TGRpcStreamingTest::ClientDisconnects >> IntermediateDirsReboots::CreateTableWithIntermediateDirsForceDrop [GOOD] |71.2%| [TA] $(B)/ydb/core/tx/datashard/ut_minstep/test-results/unittest/{meta.json ... results_accumulator.log} >> TPersQueueTest::ReadFromSeveralPartitionsMigrated [FAIL] >> TPersQueueTest::SchemeshardRestart >> TGRpcStreamingTest::WriteAndFinishWorks >> TGRpcStreamingTest::ClientDisconnects [GOOD] >> TKeyValueTest::TestInlineCopyRangeWorksNewApi [GOOD] >> TopicService::OneConsumer_TheRangesOverlap [FAIL] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_reboots/unittest >> IntermediateDirsReboots::CreateTableWithIntermediateDirsForceDrop [GOOD] Test command err: ==== RunWithTabletReboots =========== RUN: Trace =========== Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2140] Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:116:2058] recipient: [1:110:2141] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:116:2058] recipient: [1:110:2141] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:117:2058] recipient: [1:111:2142] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:117:2058] recipient: [1:111:2142] Leader for TabletID 72057594046678944 is [1:126:2151] sender: [1:127:2058] recipient: [1:109:2140] Leader for TabletID 72057594046447617 is [1:130:2154] sender: [1:132:2058] recipient: [1:110:2141] Leader for TabletID 72057594046316545 is [1:133:2155] sender: [1:135:2058] recipient: [1:111:2142] 2025-05-29T15:28:25.280626Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7488: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-05-29T15:28:25.280648Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7516: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:28:25.280652Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7402: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2025-05-29T15:28:25.280657Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7418: OperationsProcessing config: using default configuration 2025-05-29T15:28:25.280662Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-05-29T15:28:25.280664Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-05-29T15:28:25.280671Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7548: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:28:25.280682Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:39: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-05-29T15:28:25.280770Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7619: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-05-29T15:28:25.280842Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-05-29T15:28:25.290720Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7651: Got new config: QueryServiceConfig { AllExternalDataSourcesAreAvailable: true } 2025-05-29T15:28:25.290759Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:28:25.290837Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7619: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# Leader for TabletID 72057594046447617 is [1:130:2154] sender: [1:175:2058] recipient: [1:15:2062] 2025-05-29T15:28:25.293110Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-05-29T15:28:25.293138Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-05-29T15:28:25.293168Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-05-29T15:28:25.295330Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-05-29T15:28:25.295408Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:445: Clear TempDirsState with owners number: 0 2025-05-29T15:28:25.295506Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1348: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-05-29T15:28:25.295670Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:32: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-05-29T15:28:25.296227Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:157: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:28:25.296263Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:84: [RootDataErasureManager] Stop 2025-05-29T15:28:25.296502Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:28:25.296509Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:28:25.296538Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-05-29T15:28:25.296544Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-29T15:28:25.296548Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:202: TTxServerlessStorageBilling.Complete 2025-05-29T15:28:25.296564Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6671: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:214:2058] recipient: [1:212:2212] IGNORE Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:214:2058] recipient: [1:212:2212] Leader for TabletID 72057594037968897 is [1:218:2216] sender: [1:219:2058] recipient: [1:212:2212] 2025-05-29T15:28:25.297590Z node 1 :HIVE INFO: tablet_helpers.cpp:1130: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:126:2151] sender: [1:239:2058] recipient: [1:15:2062] 2025-05-29T15:28:25.310597Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:372: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-05-29T15:28:25.310679Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:28:25.310832Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-05-29T15:28:25.310878Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:130: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-05-29T15:28:25.310886Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:28:25.311695Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:451: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-05-29T15:28:25.311728Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-05-29T15:28:25.311790Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:28:25.311799Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:313: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-05-29T15:28:25.311804Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:367: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-05-29T15:28:25.311808Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 2 -> 3 2025-05-29T15:28:25.312198Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:28:25.312207Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-05-29T15:28:25.312211Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 3 -> 128 2025-05-29T15:28:25.312463Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:28:25.312474Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:28:25.312479Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:28:25.312487Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1650: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-05-29T15:28:25.312983Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1719: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-05-29T15:28:25.313276Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:652: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-05-29T15:28:25.313307Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1751: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 Leader for TabletID 72057594046316545 is [1:133:2155] sender: [1:254:2058] recipient: [1:15:2062] FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-05-29T15:28:25.313465Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:676: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-05-29T15:28:25.313484Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:680: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 133 RawX2: 4294969451 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-05-29T15:28:25.313500Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:28:25.313556Z node 1 :FLAT_TX_SCHEMESHARD INFO: sch ... 96] 2025-05-29T15:28:37.249064Z node 49 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1003 2025-05-29T15:28:37.249070Z node 49 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:28:37.249075Z node 49 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [49:207:2208], at schemeshard: 72057594046678944, txId: 1003, path id: 1 2025-05-29T15:28:37.249080Z node 49 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [49:207:2208], at schemeshard: 72057594046678944, txId: 1003, path id: 5 2025-05-29T15:28:37.249084Z node 49 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [49:207:2208], at schemeshard: 72057594046678944, txId: 1003, path id: 3 2025-05-29T15:28:37.249088Z node 49 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [49:207:2208], at schemeshard: 72057594046678944, txId: 1003, path id: 4 2025-05-29T15:28:37.249229Z node 49 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:5834: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 9 PathOwnerId: 72057594046678944, cookie: 1003 2025-05-29T15:28:37.249240Z node 49 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 9 PathOwnerId: 72057594046678944, cookie: 1003 2025-05-29T15:28:37.249244Z node 49 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 4, at schemeshard: 72057594046678944, txId: 1003 2025-05-29T15:28:37.249249Z node 49 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 1003, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 9 2025-05-29T15:28:37.249254Z node 49 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2025-05-29T15:28:37.249367Z node 49 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:5834: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 5 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 1003 2025-05-29T15:28:37.249381Z node 49 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 5 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 1003 2025-05-29T15:28:37.249386Z node 49 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 3, at schemeshard: 72057594046678944, txId: 1003 2025-05-29T15:28:37.249391Z node 49 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 1003, pathId: [OwnerId: 72057594046678944, LocalPathId: 5], version: 18446744073709551615 2025-05-29T15:28:37.249395Z node 49 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 2 2025-05-29T15:28:37.249608Z node 49 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:5834: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 1003 2025-05-29T15:28:37.249621Z node 49 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 1003 2025-05-29T15:28:37.249626Z node 49 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 1003 2025-05-29T15:28:37.249631Z node 49 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 1003, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 18446744073709551615 2025-05-29T15:28:37.249636Z node 49 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 2 2025-05-29T15:28:37.249795Z node 49 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:5834: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 1003 2025-05-29T15:28:37.249807Z node 49 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 1003 2025-05-29T15:28:37.249812Z node 49 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1003 2025-05-29T15:28:37.249816Z node 49 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 1003, pathId: [OwnerId: 72057594046678944, LocalPathId: 4], version: 18446744073709551615 2025-05-29T15:28:37.249820Z node 49 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 2 2025-05-29T15:28:37.249830Z node 49 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 1003, subscribers: 1 2025-05-29T15:28:37.249835Z node 49 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:212: TTxAckPublishToSchemeBoard Notify send TEvNotifyTxCompletionResult, at schemeshard: 72057594046678944, to actorId: [49:305:2295] 2025-05-29T15:28:37.250060Z node 49 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:46: Free shard 72057594046678944:1 hive 72057594037968897 at ss 72057594046678944 2025-05-29T15:28:37.250192Z node 49 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1003 Leader for TabletID 72057594037968897 is [49:219:2217] sender: [49:343:2058] recipient: [49:15:2062] 2025-05-29T15:28:37.250387Z node 49 :HIVE INFO: tablet_helpers.cpp:1356: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 1 TxId_Deprecated: 1 2025-05-29T15:28:37.250559Z node 49 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5938: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 1 ShardOwnerId: 72057594046678944 ShardLocalIdx: 1, at schemeshard: 72057594046678944 2025-05-29T15:28:37.250619Z node 49 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 1 2025-05-29T15:28:37.250667Z node 49 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:123: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-05-29T15:28:37.250673Z node 49 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:137: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 5], at schemeshard: 72057594046678944 2025-05-29T15:28:37.250681Z node 49 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 1 2025-05-29T15:28:37.250687Z node 49 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:137: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 4], at schemeshard: 72057594046678944 2025-05-29T15:28:37.250693Z node 49 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 1 2025-05-29T15:28:37.250698Z node 49 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:137: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 3], at schemeshard: 72057594046678944 2025-05-29T15:28:37.250703Z node 49 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-05-29T15:28:37.250929Z node 49 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1003 2025-05-29T15:28:37.251070Z node 49 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1003 2025-05-29T15:28:37.251172Z node 49 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1003 2025-05-29T15:28:37.251186Z node 49 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:227: tests -- TTxNotificationSubscriber for txId 1003: got EvNotifyTxCompletionResult 2025-05-29T15:28:37.251192Z node 49 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:236: tests -- TTxNotificationSubscriber for txId 1003: satisfy waiter [49:306:2296] 2025-05-29T15:28:37.251516Z node 49 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:174: Deleted shardIdx 72057594046678944:1 2025-05-29T15:28:37.251564Z node 49 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:160: TTxCleanDroppedPaths Complete, done PersistRemovePath for 3 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestWaitNotification: OK eventTxId 1002 TestWaitNotification: OK eventTxId 1003 2025-05-29T15:28:37.251644Z node 49 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/x" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-05-29T15:28:37.251674Z node 49 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/x" took 40us result status StatusPathDoesNotExist 2025-05-29T15:28:37.251708Z node 49 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/x\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1]), source_location: ydb/core/tx/schemeshard/schemeshard_path_describer.cpp:1157" Path: "/MyRoot/x" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: true } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 >> TPartBtreeIndexIteration::FewNodes_History_Slices [GOOD] >> TPartBtreeIndexIteration::FewNodes_Groups_History_Slices >> TPersQueueCommonTest::Auth_CreateGrpcStreamWithInvalidTokenInInitialMetadata_SessionClosedWithUnauthenticatedError [FAIL] >> TPersQueueCommonTest::Auth_MultipleUpdateTokenRequestIterationsWithValidToken_GotUpdateTokenResponseForEachRequest ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/keyvalue/ut/unittest >> TKeyValueTest::TestInlineCopyRangeWorksNewApi [GOOD] Test command err: Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:55:2057] recipient: [1:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:55:2057] recipient: [1:51:2095] Leader for TabletID 72057594037927937 is [1:57:2097] sender: [1:58:2057] recipient: [1:51:2095] Leader for TabletID 72057594037927937 is [1:57:2097] sender: [1:75:2057] recipient: [1:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:55:2057] recipient: [2:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:55:2057] recipient: [2:51:2095] Leader for TabletID 72057594037927937 is [2:57:2097] sender: [2:58:2057] recipient: [2:51:2095] Leader for TabletID 72057594037927937 is [2:57:2097] sender: [2:75:2057] recipient: [2:14:2061] !Reboot 72057594037927937 (actor [2:57:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [2:57:2097] sender: [2:77:2057] recipient: [2:36:2083] Leader for TabletID 72057594037927937 is [2:57:2097] sender: [2:80:2057] recipient: [2:14:2061] Leader for TabletID 72057594037927937 is [2:57:2097] sender: [2:81:2057] recipient: [2:79:2110] Leader for TabletID 72057594037927937 is [2:82:2111] sender: [2:83:2057] recipient: [2:79:2110] !Reboot 72057594037927937 (actor [2:57:2097]) rebooted! !Reboot 72057594037927937 (actor [2:57:2097]) tablet resolver refreshed! new actor is[2:82:2111] Leader for TabletID 72057594037927937 is [2:82:2111] sender: [2:168:2057] recipient: [2:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:55:2057] recipient: [3:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:55:2057] recipient: [3:51:2095] Leader for TabletID 72057594037927937 is [3:57:2097] sender: [3:58:2057] recipient: [3:51:2095] Leader for TabletID 72057594037927937 is [3:57:2097] sender: [3:75:2057] recipient: [3:14:2061] !Reboot 72057594037927937 (actor [3:57:2097]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [3:57:2097] sender: [3:77:2057] recipient: [3:36:2083] Leader for TabletID 72057594037927937 is [3:57:2097] sender: [3:80:2057] recipient: [3:79:2110] Leader for TabletID 72057594037927937 is [3:57:2097] sender: [3:81:2057] recipient: [3:14:2061] Leader for TabletID 72057594037927937 is [3:82:2111] sender: [3:83:2057] recipient: [3:79:2110] !Reboot 72057594037927937 (actor [3:57:2097]) rebooted! !Reboot 72057594037927937 (actor [3:57:2097]) tablet resolver refreshed! new actor is[3:82:2111] Leader for TabletID 72057594037927937 is [3:82:2111] sender: [3:168:2057] recipient: [3:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [4:55:2057] recipient: [4:50:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [4:55:2057] recipient: [4:50:2095] Leader for TabletID 72057594037927937 is [4:57:2097] sender: [4:58:2057] recipient: [4:50:2095] Leader for TabletID 72057594037927937 is [4:57:2097] sender: [4:75:2057] recipient: [4:14:2061] !Reboot 72057594037927937 (actor [4:57:2097]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [4:57:2097] sender: [4:78:2057] recipient: [4:36:2083] Leader for TabletID 72057594037927937 is [4:57:2097] sender: [4:81:2057] recipient: [4:80:2110] Leader for TabletID 72057594037927937 is [4:57:2097] sender: [4:82:2057] recipient: [4:14:2061] Leader for TabletID 72057594037927937 is [4:83:2111] sender: [4:84:2057] recipient: [4:80:2110] !Reboot 72057594037927937 (actor [4:57:2097]) rebooted! !Reboot 72057594037927937 (actor [4:57:2097]) tablet resolver refreshed! new actor is[4:83:2111] Leader for TabletID 72057594037927937 is [4:83:2111] sender: [4:169:2057] recipient: [4:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [5:55:2057] recipient: [5:52:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [5:55:2057] recipient: [5:52:2095] Leader for TabletID 72057594037927937 is [5:57:2097] sender: [5:58:2057] recipient: [5:52:2095] Leader for TabletID 72057594037927937 is [5:57:2097] sender: [5:75:2057] recipient: [5:14:2061] !Reboot 72057594037927937 (actor [5:57:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [5:57:2097] sender: [5:81:2057] recipient: [5:36:2083] Leader for TabletID 72057594037927937 is [5:57:2097] sender: [5:84:2057] recipient: [5:14:2061] Leader for TabletID 72057594037927937 is [5:57:2097] sender: [5:85:2057] recipient: [5:83:2113] Leader for TabletID 72057594037927937 is [5:86:2114] sender: [5:87:2057] recipient: [5:83:2113] !Reboot 72057594037927937 (actor [5:57:2097]) rebooted! !Reboot 72057594037927937 (actor [5:57:2097]) tablet resolver refreshed! new actor is[5:86:2114] Leader for TabletID 72057594037927937 is [5:86:2114] sender: [5:172:2057] recipient: [5:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [6:55:2057] recipient: [6:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [6:55:2057] recipient: [6:51:2095] Leader for TabletID 72057594037927937 is [6:57:2097] sender: [6:58:2057] recipient: [6:51:2095] Leader for TabletID 72057594037927937 is [6:57:2097] sender: [6:75:2057] recipient: [6:14:2061] !Reboot 72057594037927937 (actor [6:57:2097]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [6:57:2097] sender: [6:81:2057] recipient: [6:36:2083] Leader for TabletID 72057594037927937 is [6:57:2097] sender: [6:84:2057] recipient: [6:14:2061] Leader for TabletID 72057594037927937 is [6:57:2097] sender: [6:85:2057] recipient: [6:83:2113] Leader for TabletID 72057594037927937 is [6:86:2114] sender: [6:87:2057] recipient: [6:83:2113] !Reboot 72057594037927937 (actor [6:57:2097]) rebooted! !Reboot 72057594037927937 (actor [6:57:2097]) tablet resolver refreshed! new actor is[6:86:2114] Leader for TabletID 72057594037927937 is [6:86:2114] sender: [6:172:2057] recipient: [6:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [7:55:2057] recipient: [7:52:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [7:55:2057] recipient: [7:52:2095] Leader for TabletID 72057594037927937 is [7:57:2097] sender: [7:58:2057] recipient: [7:52:2095] Leader for TabletID 72057594037927937 is [7:57:2097] sender: [7:75:2057] recipient: [7:14:2061] !Reboot 72057594037927937 (actor [7:57:2097]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [7:57:2097] sender: [7:82:2057] recipient: [7:36:2083] Leader for TabletID 72057594037927937 is [7:57:2097] sender: [7:85:2057] recipient: [7:84:2113] Leader for TabletID 72057594037927937 is [7:57:2097] sender: [7:86:2057] recipient: [7:14:2061] Leader for TabletID 72057594037927937 is [7:87:2114] sender: [7:88:2057] recipient: [7:84:2113] !Reboot 72057594037927937 (actor [7:57:2097]) rebooted! !Reboot 72057594037927937 (actor [7:57:2097]) tablet resolver refreshed! new actor is[7:87:2114] Leader for TabletID 72057594037927937 is [7:87:2114] sender: [7:173:2057] recipient: [7:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [8:55:2057] recipient: [8:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [8:55:2057] recipient: [8:51:2095] Leader for TabletID 72057594037927937 is [8:57:2097] sender: [8:58:2057] recipient: [8:51:2095] Leader for TabletID 72057594037927937 is [8:57:2097] sender: [8:75:2057] recipient: [8:14:2061] !Reboot 72057594037927937 (actor [8:57:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [8:57:2097] sender: [8:85:2057] recipient: [8:36:2083] Leader for TabletID 72057594037927937 is [8:57:2097] sender: [8:88:2057] recipient: [8:14:2061] Leader for TabletID 72057594037927937 is [8:57:2097] sender: [8:89:2057] recipient: [8:87:2116] Leader for TabletID 72057594037927937 is [8:90:2117] sender: [8:91:2057] recipient: [8:87:2116] !Reboot 72057594037927937 (actor [8:57:2097]) rebooted! !Reboot 72057594037927937 (actor [8:57:2097]) tablet resolver refreshed! new actor is[8:90:2117] Leader for TabletID 72057594037927937 is [8:90:2117] sender: [8:176:2057] recipient: [8:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [9:55:2057] recipient: [9:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [9:55:2057] recipient: [9:51:2095] Leader for TabletID 72057594037927937 is [9:57:2097] sender: [9:58:2057] recipient: [9:51:2095] Leader for TabletID 72057594037927937 is [9:57:2097] sender: [9:75:2057] recipient: [9:14:2061] !Reboot 72057594037927937 (actor [9:57:2097]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [9:57:2097] sender: [9:85:2057] recipient: [9:36:2083] Leader for TabletID 72057594037927937 is [9:57:2097] sender: [9:88:2057] recipient: [9:14:2061] Leader for TabletID 72057594037927937 is [9:57:2097] sender: [9:89:2057] recipient: [9:87:2116] Leader for TabletID 72057594037927937 is [9:90:2117] sender: [9:91:2057] recipient: [9:87:2116] !Reboot 72057594037927937 (actor [9:57:2097]) rebooted! !Reboot 72057594037927937 (actor [9:57:2097]) tablet resolver refreshed! new actor is[9:90:2117] Leader for TabletID 72057594037927937 is [9:90:2117] sender: [9:176:2057] recipient: [9:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [10:55:2057] recipient: [10:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [10:55:2057] recipient: [10:51:2095] Leader for TabletID 72057594037927937 is [10:57:2097] sender: [10:58:2057] recipient: [10:51:2095] Leader for TabletID 72057594037927937 is [10:57:2097] sender: [10:75:2057] recipient: [10:14:2061] !Reboot 72057594037927937 (actor [10:57:2097]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [10:57:2097] sender: [10:86:2057] recipient: [10:36:2083] Leader for TabletID 72057594037927937 is [10:57:2097] sender: [10:89:2057] recipient: [10:14:2061] Leader for TabletID 72057594037927937 is [10:57:2097] sender: [10:90:2057] recipient: [10:88:2116] Leader for TabletID 72057594037927937 is [10:91:2117] sender: [10:92:2057] recipient: [10:88:2116] !Reboot 72057594037927937 (actor [10:57:2097]) rebooted! !Reboot 72057594037927937 (actor [10:57:2097]) tablet resolver refreshed! new actor is[10:91:2117] Leader for TabletID 72057594037927937 is [10:91:2117] sender: [10:177:2057] recipient: [10:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [11:55:2057] recipient: [11:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [11:55:2057] recipient: [11:51:2095] Leader for TabletID 72057594037927937 is [11:57:2097] sender: [11:58:2057] recipient: [11:51:2095] Leader for TabletID 72057594037927937 is [11:57:2097] sender: [11:75:2057] recipient: [11:14:2061] !Reboot 72057594037927937 (actor [11:57:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [11:57:2097] sender: [11:89:2057] recipient: [11:36:2083] Leader for TabletID 72057594037927937 is [11:57:2097] sender: [11:92:2057] recipient: [11:14:2061] Leader for TabletID 72057594037927937 is [11:57:2097] sender: [11:93:2057] recipient: [11:91:2119] Leader for TabletID 72057594037927937 is [11:94:2120] sender: [11:95:2057] recipient: [11:91:2119] !Reboot 72057594037927937 (actor [11:57:2097]) rebooted! !Reboot 72057594037927937 (actor [11:57:2097]) tablet resolver refreshed! new actor is[11:94:2120] Leader for TabletID 72057594037927937 is [11:94:2120] sender: [11:180:2057] recipient: [11:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [12:55:2057] recipient: [12:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [12:55:2057] recipient: [12:51:2095] Leader for TabletID 72057594037927937 is [12:57:2097] sender: [12:58:2057] recipient: [12:51:2095] Leader for TabletID 72057594037927937 is [12:57:2097] sender: [12:75:2057] recipient: [12:14:2061] !Reboot 72057594037927937 (acto ... 29:79:2110] !Reboot 72057594037927937 (actor [29:57:2097]) rebooted! !Reboot 72057594037927937 (actor [29:57:2097]) tablet resolver refreshed! new actor is[29:82:2111] Leader for TabletID 72057594037927937 is [29:82:2111] sender: [29:168:2057] recipient: [29:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [30:55:2057] recipient: [30:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [30:55:2057] recipient: [30:51:2095] Leader for TabletID 72057594037927937 is [30:57:2097] sender: [30:58:2057] recipient: [30:51:2095] Leader for TabletID 72057594037927937 is [30:57:2097] sender: [30:75:2057] recipient: [30:14:2061] !Reboot 72057594037927937 (actor [30:57:2097]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [30:57:2097] sender: [30:78:2057] recipient: [30:36:2083] Leader for TabletID 72057594037927937 is [30:57:2097] sender: [30:81:2057] recipient: [30:14:2061] Leader for TabletID 72057594037927937 is [30:57:2097] sender: [30:82:2057] recipient: [30:80:2110] Leader for TabletID 72057594037927937 is [30:83:2111] sender: [30:84:2057] recipient: [30:80:2110] !Reboot 72057594037927937 (actor [30:57:2097]) rebooted! !Reboot 72057594037927937 (actor [30:57:2097]) tablet resolver refreshed! new actor is[30:83:2111] Leader for TabletID 72057594037927937 is [30:83:2111] sender: [30:169:2057] recipient: [30:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [31:55:2057] recipient: [31:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [31:55:2057] recipient: [31:51:2095] Leader for TabletID 72057594037927937 is [31:57:2097] sender: [31:58:2057] recipient: [31:51:2095] Leader for TabletID 72057594037927937 is [31:57:2097] sender: [31:75:2057] recipient: [31:14:2061] !Reboot 72057594037927937 (actor [31:57:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [31:57:2097] sender: [31:81:2057] recipient: [31:36:2083] Leader for TabletID 72057594037927937 is [31:57:2097] sender: [31:84:2057] recipient: [31:14:2061] Leader for TabletID 72057594037927937 is [31:57:2097] sender: [31:85:2057] recipient: [31:83:2113] Leader for TabletID 72057594037927937 is [31:86:2114] sender: [31:87:2057] recipient: [31:83:2113] !Reboot 72057594037927937 (actor [31:57:2097]) rebooted! !Reboot 72057594037927937 (actor [31:57:2097]) tablet resolver refreshed! new actor is[31:86:2114] Leader for TabletID 72057594037927937 is [31:86:2114] sender: [31:172:2057] recipient: [31:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [32:55:2057] recipient: [32:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [32:55:2057] recipient: [32:51:2095] Leader for TabletID 72057594037927937 is [32:57:2097] sender: [32:58:2057] recipient: [32:51:2095] Leader for TabletID 72057594037927937 is [32:57:2097] sender: [32:75:2057] recipient: [32:14:2061] !Reboot 72057594037927937 (actor [32:57:2097]) on event NKikimr::TEvKeyValue::TEvExecuteTransaction ! Leader for TabletID 72057594037927937 is [32:57:2097] sender: [32:81:2057] recipient: [32:36:2083] Leader for TabletID 72057594037927937 is [32:57:2097] sender: [32:84:2057] recipient: [32:14:2061] Leader for TabletID 72057594037927937 is [32:57:2097] sender: [32:85:2057] recipient: [32:83:2113] Leader for TabletID 72057594037927937 is [32:86:2114] sender: [32:87:2057] recipient: [32:83:2113] !Reboot 72057594037927937 (actor [32:57:2097]) rebooted! !Reboot 72057594037927937 (actor [32:57:2097]) tablet resolver refreshed! new actor is[32:86:2114] Leader for TabletID 72057594037927937 is [32:86:2114] sender: [32:172:2057] recipient: [32:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [33:55:2057] recipient: [33:50:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [33:55:2057] recipient: [33:50:2095] Leader for TabletID 72057594037927937 is [33:57:2097] sender: [33:58:2057] recipient: [33:50:2095] Leader for TabletID 72057594037927937 is [33:57:2097] sender: [33:75:2057] recipient: [33:14:2061] !Reboot 72057594037927937 (actor [33:57:2097]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [33:57:2097] sender: [33:82:2057] recipient: [33:36:2083] Leader for TabletID 72057594037927937 is [33:57:2097] sender: [33:85:2057] recipient: [33:14:2061] Leader for TabletID 72057594037927937 is [33:57:2097] sender: [33:86:2057] recipient: [33:84:2113] Leader for TabletID 72057594037927937 is [33:87:2114] sender: [33:88:2057] recipient: [33:84:2113] !Reboot 72057594037927937 (actor [33:57:2097]) rebooted! !Reboot 72057594037927937 (actor [33:57:2097]) tablet resolver refreshed! new actor is[33:87:2114] Leader for TabletID 72057594037927937 is [33:87:2114] sender: [33:173:2057] recipient: [33:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [34:55:2057] recipient: [34:52:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [34:55:2057] recipient: [34:52:2095] Leader for TabletID 72057594037927937 is [34:57:2097] sender: [34:58:2057] recipient: [34:52:2095] Leader for TabletID 72057594037927937 is [34:57:2097] sender: [34:75:2057] recipient: [34:14:2061] !Reboot 72057594037927937 (actor [34:57:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [34:57:2097] sender: [34:85:2057] recipient: [34:36:2083] Leader for TabletID 72057594037927937 is [34:57:2097] sender: [34:88:2057] recipient: [34:14:2061] Leader for TabletID 72057594037927937 is [34:57:2097] sender: [34:89:2057] recipient: [34:87:2116] Leader for TabletID 72057594037927937 is [34:90:2117] sender: [34:91:2057] recipient: [34:87:2116] !Reboot 72057594037927937 (actor [34:57:2097]) rebooted! !Reboot 72057594037927937 (actor [34:57:2097]) tablet resolver refreshed! new actor is[34:90:2117] Leader for TabletID 72057594037927937 is [34:90:2117] sender: [34:176:2057] recipient: [34:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [35:55:2057] recipient: [35:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [35:55:2057] recipient: [35:51:2095] Leader for TabletID 72057594037927937 is [35:57:2097] sender: [35:58:2057] recipient: [35:51:2095] Leader for TabletID 72057594037927937 is [35:57:2097] sender: [35:75:2057] recipient: [35:14:2061] !Reboot 72057594037927937 (actor [35:57:2097]) on event NKikimr::TEvKeyValue::TEvExecuteTransaction ! Leader for TabletID 72057594037927937 is [35:57:2097] sender: [35:85:2057] recipient: [35:36:2083] Leader for TabletID 72057594037927937 is [35:57:2097] sender: [35:88:2057] recipient: [35:14:2061] Leader for TabletID 72057594037927937 is [35:57:2097] sender: [35:89:2057] recipient: [35:87:2116] Leader for TabletID 72057594037927937 is [35:90:2117] sender: [35:91:2057] recipient: [35:87:2116] !Reboot 72057594037927937 (actor [35:57:2097]) rebooted! !Reboot 72057594037927937 (actor [35:57:2097]) tablet resolver refreshed! new actor is[35:90:2117] Leader for TabletID 72057594037927937 is [35:90:2117] sender: [35:176:2057] recipient: [35:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [36:55:2057] recipient: [36:52:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [36:55:2057] recipient: [36:52:2095] Leader for TabletID 72057594037927937 is [36:57:2097] sender: [36:58:2057] recipient: [36:52:2095] Leader for TabletID 72057594037927937 is [36:57:2097] sender: [36:75:2057] recipient: [36:14:2061] !Reboot 72057594037927937 (actor [36:57:2097]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [36:57:2097] sender: [36:86:2057] recipient: [36:36:2083] Leader for TabletID 72057594037927937 is [36:57:2097] sender: [36:89:2057] recipient: [36:14:2061] Leader for TabletID 72057594037927937 is [36:57:2097] sender: [36:90:2057] recipient: [36:88:2116] Leader for TabletID 72057594037927937 is [36:91:2117] sender: [36:92:2057] recipient: [36:88:2116] !Reboot 72057594037927937 (actor [36:57:2097]) rebooted! !Reboot 72057594037927937 (actor [36:57:2097]) tablet resolver refreshed! new actor is[36:91:2117] Leader for TabletID 72057594037927937 is [36:91:2117] sender: [36:177:2057] recipient: [36:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [37:55:2057] recipient: [37:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [37:55:2057] recipient: [37:51:2095] Leader for TabletID 72057594037927937 is [37:57:2097] sender: [37:58:2057] recipient: [37:51:2095] Leader for TabletID 72057594037927937 is [37:57:2097] sender: [37:75:2057] recipient: [37:14:2061] !Reboot 72057594037927937 (actor [37:57:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [37:57:2097] sender: [37:89:2057] recipient: [37:36:2083] Leader for TabletID 72057594037927937 is [37:57:2097] sender: [37:92:2057] recipient: [37:14:2061] Leader for TabletID 72057594037927937 is [37:57:2097] sender: [37:93:2057] recipient: [37:91:2119] Leader for TabletID 72057594037927937 is [37:94:2120] sender: [37:95:2057] recipient: [37:91:2119] !Reboot 72057594037927937 (actor [37:57:2097]) rebooted! !Reboot 72057594037927937 (actor [37:57:2097]) tablet resolver refreshed! new actor is[37:94:2120] Leader for TabletID 72057594037927937 is [37:94:2120] sender: [37:180:2057] recipient: [37:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [38:55:2057] recipient: [38:52:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [38:55:2057] recipient: [38:52:2095] Leader for TabletID 72057594037927937 is [38:57:2097] sender: [38:58:2057] recipient: [38:52:2095] Leader for TabletID 72057594037927937 is [38:57:2097] sender: [38:75:2057] recipient: [38:14:2061] !Reboot 72057594037927937 (actor [38:57:2097]) on event NKikimr::TEvKeyValue::TEvReadRange ! Leader for TabletID 72057594037927937 is [38:57:2097] sender: [38:89:2057] recipient: [38:36:2083] Leader for TabletID 72057594037927937 is [38:57:2097] sender: [38:91:2057] recipient: [38:14:2061] Leader for TabletID 72057594037927937 is [38:57:2097] sender: [38:93:2057] recipient: [38:92:2119] Leader for TabletID 72057594037927937 is [38:94:2120] sender: [38:95:2057] recipient: [38:92:2119] !Reboot 72057594037927937 (actor [38:57:2097]) rebooted! !Reboot 72057594037927937 (actor [38:57:2097]) tablet resolver refreshed! new actor is[38:94:2120] Leader for TabletID 72057594037927937 is [38:94:2120] sender: [38:180:2057] recipient: [38:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [39:55:2057] recipient: [39:50:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [39:55:2057] recipient: [39:50:2095] Leader for TabletID 72057594037927937 is [39:57:2097] sender: [39:58:2057] recipient: [39:50:2095] Leader for TabletID 72057594037927937 is [39:57:2097] sender: [39:75:2057] recipient: [39:14:2061] !Reboot 72057594037927937 (actor [39:57:2097]) on event NKikimr::TEvKeyValue::TEvNotify ! Leader for TabletID 72057594037927937 is [39:57:2097] sender: [39:90:2057] recipient: [39:36:2083] Leader for TabletID 72057594037927937 is [39:57:2097] sender: [39:93:2057] recipient: [39:14:2061] Leader for TabletID 72057594037927937 is [39:57:2097] sender: [39:94:2057] recipient: [39:92:2119] Leader for TabletID 72057594037927937 is [39:95:2120] sender: [39:96:2057] recipient: [39:92:2119] !Reboot 72057594037927937 (actor [39:57:2097]) rebooted! !Reboot 72057594037927937 (actor [39:57:2097]) tablet resolver refreshed! new actor is[39:95:2120] Leader for TabletID 72057594037927937 is [0:0:0] sender: [40:55:2057] recipient: [40:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [40:55:2057] recipient: [40:51:2095] Leader for TabletID 72057594037927937 is [40:57:2097] sender: [40:58:2057] recipient: [40:51:2095] Leader for TabletID 72057594037927937 is [40:57:2097] sender: [40:75:2057] recipient: [40:14:2061] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/mind/hive/ut/unittest >> TStorageBalanceTest::TestScenario3 [GOOD] Test command err: c[def1] ---------------------------------------------------------------------------------------------------- (0) 2025-05-29T15:25:58.685843Z node 1 :LOCAL DEBUG: local.cpp:1491: TLocal::Bootstrap 2025-05-29T15:25:58.685894Z node 1 :BS_NODE DEBUG: {NW26@node_warden_impl.cpp:321} Bootstrap 2025-05-29T15:25:58.686960Z node 1 :BS_NODE DEBUG: {NW18@node_warden_resource.cpp:51} ApplyServiceSet IsStatic# true Comprehensive# false Origin# initial ServiceSet# {PDisks { NodeID: 1 PDiskID: 1 Path: "/tmp/pdisk.dat" PDiskGuid: 1 } VDisks { VDiskID { GroupID: 0 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } VDiskLocation { NodeID: 1 PDiskID: 1 VDiskSlotID: 0 PDiskGuid: 1 } } Groups { GroupID: 0 GroupGeneration: 1 ErasureSpecies: 0 Rings { FailDomains { VDiskLocations { NodeID: 1 PDiskID: 1 VDiskSlotID: 0 PDiskGuid: 1 } } } } AvailabilityDomains: 0 } 2025-05-29T15:25:58.687113Z node 1 :BS_NODE DEBUG: {NW04@node_warden_pdisk.cpp:196} StartLocalPDisk NodeId# 1 PDiskId# 1 Path# "/tmp/pdisk.dat" PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} Temporary# false 2025-05-29T15:25:58.687322Z node 1 :BS_NODE DEBUG: {NW23@node_warden_vdisk.cpp:67} StartLocalVDiskActor SlayInFlight# false VDiskId# [0:1:0:0:0] VSlotId# 1:1:0 PDiskGuid# 1 DonorMode# false PDiskRestartInFlight# false PDisksWaitingToStart# false 2025-05-29T15:25:58.687609Z node 1 :BS_NODE DEBUG: {NW24@node_warden_vdisk.cpp:265} StartLocalVDiskActor done VDiskId# [0:1:0:0:0] VSlotId# 1:1:0 PDiskGuid# 1 2025-05-29T15:25:58.687625Z node 1 :BS_NODE DEBUG: {NW12@node_warden_proxy.cpp:23} StartLocalProxy GroupId# 0 2025-05-29T15:25:58.687817Z node 1 :BS_NODE DEBUG: {NW21@node_warden_pipe.cpp:23} EstablishPipe AvailDomainId# 0 PipeClientId# [1:29:2075] ControllerId# 72057594037932033 2025-05-29T15:25:58.687824Z node 1 :BS_NODE DEBUG: {NW20@node_warden_pipe.cpp:72} SendRegisterNode 2025-05-29T15:25:58.687858Z node 1 :BS_NODE DEBUG: {NW11@node_warden_impl.cpp:296} StartInvalidGroupProxy GroupId# 4294967295 2025-05-29T15:25:58.687886Z node 1 :BS_NODE DEBUG: {NW62@node_warden_impl.cpp:308} StartRequestReportingThrottler 2025-05-29T15:25:58.692839Z node 1 :BS_NODE DEBUG: {NWDC00@distconf.cpp:20} Bootstrap 2025-05-29T15:25:58.692936Z node 1 :BS_NODE DEBUG: {NWDC40@distconf_persistent_storage.cpp:25} TReaderActor bootstrap Paths# [] 2025-05-29T15:25:58.695549Z node 1 :BS_PROXY INFO: dsproxy_state.cpp:146: Group# 0 TEvConfigureProxy received GroupGeneration# 1 IsLimitedKeyless# false Marker# DSP02 2025-05-29T15:25:58.695574Z node 1 :BS_PROXY NOTICE: dsproxy_state.cpp:294: EnsureMonitoring Group# 0 IsLimitedKeyless# 0 fullIfPossible# 0 Marker# DSP58 2025-05-29T15:25:58.695952Z node 1 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [1:28:2074] Create Queue# [1:39:2082] targetNodeId# 1 Marker# DSP01 2025-05-29T15:25:58.695994Z node 1 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [1:28:2074] Create Queue# [1:40:2083] targetNodeId# 1 Marker# DSP01 2025-05-29T15:25:58.696026Z node 1 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [1:28:2074] Create Queue# [1:41:2084] targetNodeId# 1 Marker# DSP01 2025-05-29T15:25:58.696062Z node 1 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [1:28:2074] Create Queue# [1:42:2085] targetNodeId# 1 Marker# DSP01 2025-05-29T15:25:58.696105Z node 1 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [1:28:2074] Create Queue# [1:43:2086] targetNodeId# 1 Marker# DSP01 2025-05-29T15:25:58.696140Z node 1 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [1:28:2074] Create Queue# [1:44:2087] targetNodeId# 1 Marker# DSP01 2025-05-29T15:25:58.696170Z node 1 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [1:28:2074] Create Queue# [1:45:2088] targetNodeId# 1 Marker# DSP01 2025-05-29T15:25:58.696176Z node 1 :BS_PROXY INFO: dsproxy_state.cpp:29: Group# 0 SetStateEstablishingSessions Marker# DSP03 2025-05-29T15:25:58.696196Z node 1 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:50: TClient[72057594037932033] ::Bootstrap [1:29:2075] 2025-05-29T15:25:58.696203Z node 1 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:533: TClient[72057594037932033] lookup [1:29:2075] 2025-05-29T15:25:58.696212Z node 1 :BS_PROXY NOTICE: dsproxy_state.cpp:234: Group# 4294967295 HasInvalidGroupId# 1 Bootstrap -> StateEjected Marker# DSP42 2025-05-29T15:25:58.696251Z node 1 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:149: TClient[72057594037932033] queue send [1:29:2075] 2025-05-29T15:25:58.696265Z node 1 :BS_NODE DEBUG: {NWDC53@distconf.cpp:300} StateWaitForInit event Type# 131082 StorageConfigLoaded# false NodeListObtained# false PendingEvents.size# 0 2025-05-29T15:25:58.696271Z node 1 :BS_NODE DEBUG: {NWDC11@distconf_binding.cpp:6} TEvNodesInfo 2025-05-29T15:25:58.696281Z node 1 :LOCAL DEBUG: local.cpp:1441: TDomainLocal(dc-1): Bootstrap 2025-05-29T15:25:58.696472Z node 1 :TABLET_RESOLVER DEBUG: tablet_resolver.cpp:484: Handle TEvForward tabletId: 72057594037932033 entry.State: StInit ev: {EvForward TabletID: 72057594037932033 Ev: nullptr Flags: 1:2:0} 2025-05-29T15:25:58.696506Z node 1 :BS_NODE DEBUG: {NWDC53@distconf.cpp:300} StateWaitForInit event Type# 2146435074 StorageConfigLoaded# false NodeListObtained# false PendingEvents.size# 0 2025-05-29T15:25:58.696514Z node 1 :BS_NODE DEBUG: {NWDC32@distconf_persistent_storage.cpp:221} TEvStorageConfigLoaded Cookie# 0 NumItemsRead# 0 2025-05-29T15:25:58.703722Z node 1 :BS_NODE DEBUG: {NWDC35@distconf_persistent_storage.cpp:184} PersistConfig Record# {} Drives# [] 2025-05-29T15:25:58.704510Z node 1 :LOCAL DEBUG: local.cpp:1149: TDomainLocal(dc-1): Binding to hive 72057594037927937 at domain dc-1 (allocated resources: ) 2025-05-29T15:25:58.704590Z node 1 :BS_NODE DEBUG: {NWDC51@distconf_persistent_storage.cpp:103} TWriterActor bootstrap Drives# [] Record# {} 2025-05-29T15:25:58.704615Z node 1 :LOCAL DEBUG: local.cpp:975: TLocalNodeRegistrar::Bootstrap 2025-05-29T15:25:58.704621Z node 1 :LOCAL DEBUG: local.cpp:181: TLocalNodeRegistrar::TryToRegister 2025-05-29T15:25:58.704677Z node 1 :LOCAL DEBUG: local.cpp:213: TLocalNodeRegistrar::TryToRegister pipe to hive, pipe:[1:50:2091] 2025-05-29T15:25:58.719955Z node 1 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:149: TClient[72057594037932033] queue send [1:29:2075] 2025-05-29T15:25:58.720362Z node 1 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:50: TClient[72057594037936129] ::Bootstrap [1:33:2063] 2025-05-29T15:25:58.720372Z node 1 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:533: TClient[72057594037936129] lookup [1:33:2063] 2025-05-29T15:25:58.720426Z node 1 :BS_NODE DEBUG: {NWDC53@distconf.cpp:300} StateWaitForInit event Type# 2146435075 StorageConfigLoaded# true NodeListObtained# false PendingEvents.size# 0 2025-05-29T15:25:58.720459Z node 1 :STATESTORAGE DEBUG: statestorage_proxy.cpp:246: ProxyRequest::HandleInit ringGroup:0 ev: {EvLookup TabletID: 72057594037932033 Cookie: 0 ProxyOptions: SigNone} 2025-05-29T15:25:58.720486Z node 1 :STATESTORAGE DEBUG: statestorage_replica.cpp:183: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037932033 Cookie: 0} 2025-05-29T15:25:58.720493Z node 1 :STATESTORAGE DEBUG: statestorage_replica.cpp:183: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037932033 Cookie: 1} 2025-05-29T15:25:58.720500Z node 1 :STATESTORAGE DEBUG: statestorage_replica.cpp:183: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037932033 Cookie: 2} 2025-05-29T15:25:58.720508Z node 1 :STATESTORAGE DEBUG: statestorage_proxy.cpp:355: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 1 TabletID: 72057594037932033} 2025-05-29T15:25:58.720575Z node 1 :TABLET_RESOLVER DEBUG: tablet_resolver.cpp:484: Handle TEvForward tabletId: 72057594037936129 entry.State: StInit ev: {EvForward TabletID: 72057594037936129 Ev: nullptr Flags: 1:2:0} 2025-05-29T15:25:58.720585Z node 1 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:50: TClient[72057594037927937] ::Bootstrap [1:50:2091] 2025-05-29T15:25:58.720589Z node 1 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:533: TClient[72057594037927937] lookup [1:50:2091] 2025-05-29T15:25:58.720606Z node 1 :BS_NODE DEBUG: {NWDC53@distconf.cpp:300} StateWaitForInit event Type# 131082 StorageConfigLoaded# true NodeListObtained# false PendingEvents.size# 1 2025-05-29T15:25:58.720613Z node 1 :BS_NODE DEBUG: {NWDC11@distconf_binding.cpp:6} TEvNodesInfo 2025-05-29T15:25:58.720650Z node 1 :BS_NODE DEBUG: {NWDC18@distconf_binding.cpp:322} UpdateBound RefererNodeId# 1 NodeId# ::1:12001/1 Meta# {Fingerprint: "\371$\224\316I\335\243.)W\014\261m\013\346Osy\0160" } 2025-05-29T15:25:58.720665Z node 1 :STATESTORAGE DEBUG: statestorage_proxy.cpp:355: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 1 TabletID: 72057594037932033} 2025-05-29T15:25:58.720730Z node 1 :BS_NODE DEBUG: {NW18@node_warden_resource.cpp:51} ApplyServiceSet IsStatic# true Comprehensive# true Origin# distconf ServiceSet# {PDisks { NodeID: 1 PDiskID: 1 Path: "/tmp/pdisk.dat" PDiskGuid: 1 } VDisks { VDiskID { GroupID: 0 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } VDiskLocation { NodeID: 1 PDiskID: 1 VDiskSlotID: 0 PDiskGuid: 1 } } Groups { GroupID: 0 GroupGeneration: 1 ErasureSpecies: 0 Rings { FailDomains { VDiskLocations { NodeID: 1 PDiskID: 1 VDiskSlotID: 0 PDiskGuid: 1 } } } } AvailabilityDomains: 0 } 2025-05-29T15:25:58.720768Z node 1 :TABLET_RESOLVER DEBUG: tablet_resolver.cpp:484: Handle TEvForward tabletId: 72057594037927937 entry.State: StInit ev: {EvForward TabletID: 72057594037927937 Ev: nullptr Flags: 1:2:0} 2025-05-29T15:25:58.720776Z node 1 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:149: TClient[72057594037927937] queue send [1:50:2091] 2025-05-29T15:25:58.720785Z node 1 :STATESTORAGE DEBUG: statestorage_proxy.cpp:355: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 1 TabletID: 72057594037932033} 2025-05-29T15:25:58.720814Z node 1 :TABLET_RESOLVER DEBUG: tablet_resolver.cpp:610: Handle TEvInfo tabletId: 72057594037932033 entry.State: StInitResolve success: false ev: {EvInfo Status: 5 TabletID: 72057594037932033 Cookie: 0 CurrentLeader: [0:0:0] CurrentLeaderTablet: [0:0:0] CurrentGeneration: 0 CurrentStep: 0 Locked: false LockedFor: 0 Signature: { Size: 3 Signature: {{[1:24343667:0] : 2}, {[1:2199047599219:0] : 8}, {[1:1099535971443:0] : 5}}}} 2025-05-29T15:25:58.720819Z node 1 :TABLET_RESOLVER DEBUG: tablet_resolver.cpp:361: DropEntry tabletId: 72057594037932033 followers: 0 2025-05-29T15:25:58.720936Z node 1 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:172: TClient[72057594037932033] forward result error, check reconnect [1:29:2075] 2025-05-29T15:25:58.720942Z node 1 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:562: TClient[72057594037932033] schedule retry [1:29:2075] 2025-05-29T15:25:58.720949Z node 1 :BS_NODE DEBUG: {NWDC53@distconf.cpp:300} StateWaitForInit event Type# 2146435072 StorageConfigLoaded# true NodeListObtained# true PendingEvents.size# 1 2025-05-29T15:25:58.720958Z node 1 :BS_NODE DEBUG: {NWDC15@distconf.cpp:361} StateFunc Type# 2146435075 Sender# [1:47:2090] SessionId# [0:0:0] Cookie# 0 2025-05-29T15:25:58.720967Z node 1 :BS_NODE DEBUG: {NWDC36@distconf_persistent_storage.cpp:205} TEvStorageConfigStored NumOk# 0 NumError# 0 Passed# 0.024424s 2025-05-29T15:25:58.721011Z node 1 :BS_NODE DEBUG: {NWDC15@distconf.cpp:361} StateFunc Type# 268639248 Sender# [1:12:2059] SessionId# [0:0:0] Cookie# 0 2025-05-29T15:25:58.721753Z node 1 :STATESTORAGE DEBUG: statestorage_proxy.cpp:246: ProxyRequest::HandleInit ringGroup:0 ev: {EvLookup TabletID: 72057594037936129 Cookie: 0 ProxyOptions: SigNone} 2025-05-29T15:25:58.726919Z node 1 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:149: TClient[72057594037932033] queue send [1:29:2075] 2025-05-29T15:25:58.727080Z node 1 :STATESTORAGE DEBUG: statestorage_replica.cpp:183: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936129 Cookie: 0} 2025-05-29T15:25:58.727093Z node 1 :STATESTORAGE DEBUG: statestorage_replica.cpp:183: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936129 Cookie: 1} 2025-05-29T15:25:58.72712 ... d838b6b61d96b877] restore Id# [72057594037927937:2:490:0:0:246:0] optimisticReplicas# 1 optimisticState# EBS_FULL Marker# BPG55 2025-05-29T15:28:33.026460Z node 12 :BS_PROXY_PUT DEBUG: dsproxy_strategy_base.cpp:324: [d838b6b61d96b877] partPlacement record partSituation# ESituation::Unknown to# 0 blob Id# [72057594037927937:2:490:0:0:246:1] Marker# BPG33 2025-05-29T15:28:33.026464Z node 12 :BS_PROXY_PUT DEBUG: dsproxy_strategy_base.cpp:345: [d838b6b61d96b877] Sending missing VPut part# 0 to# 0 blob Id# [72057594037927937:2:490:0:0:246:1] Marker# BPG32 2025-05-29T15:28:33.026488Z node 12 :BS_PROXY DEBUG: group_sessions.h:165: Send to queueActorId# [12:425:2088] NKikimr::TEvBlobStorage::TEvVPut# {ID# [72057594037927937:2:490:0:0:246:1] FDS# 246 HandleClass# TabletLog {MsgQoS ExtQueueId# PutTabletLog} DataSize# 0 Data# } cookie# 0 2025-05-29T15:28:33.027093Z node 12 :BS_PROXY_PUT DEBUG: dsproxy_put.cpp:260: [d838b6b61d96b877] received {EvVPutResult Status# OK ID# [72057594037927937:2:490:0:0:246:1] {MsgQoS MsgId# { SequenceId: 1 MsgId: 506 } Cost# 81937 ExtQueueId# PutTabletLog IntQueueId# IntPutLog Window# { Status# Processed ActualWindowSize# 0 MaxWindowSize# 150000000 ExpectedMsgId# { SequenceId: 1 MsgId: 507 }}}} from# [0:1:0:0:0] Marker# BPP01 2025-05-29T15:28:33.027115Z node 12 :BS_PROXY_PUT DEBUG: dsproxy_put_impl.cpp:72: [d838b6b61d96b877] Result# TEvPutResult {Id# [72057594037927937:2:490:0:0:246:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0.998955} GroupId# 0 Marker# BPP12 2025-05-29T15:28:33.027121Z node 12 :BS_PROXY_PUT INFO: dsproxy_put.cpp:486: [d838b6b61d96b877] SendReply putResult# TEvPutResult {Id# [72057594037927937:2:490:0:0:246:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0.998955} ResponsesSent# 0 PutImpl.Blobs.size# 1 Last# true Marker# BPP21 2025-05-29T15:28:33.027142Z node 12 :BS_PROXY_PUT DEBUG: {BPP72@dsproxy_put.cpp:470} Query history GroupId# 0 HandleClass# TabletLog Tactic# MinLatency History# THistory { Entries# [ TEvVPut{ TimestampMs# 0.112 sample PartId# [72057594037927937:2:490:0:0:246:1] QueryCount# 1 VDiskId# [0:1:0:0:0] NodeId# 12 } TEvVPutResult{ TimestampMs# 0.727 VDiskId# [0:1:0:0:0] NodeId# 12 Status# OK } ] } 2025-05-29T15:28:33.027210Z node 12 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:54: Put Result: TEvPutResult {Id# [72057594037927937:2:490:0:0:246:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0.998955} 2025-05-29T15:28:33.027275Z node 12 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:491} commited cookie 1 for step 490 2025-05-29T15:28:33.027287Z node 12 :HIVE DEBUG: tx__reassign_groups.cpp:56: HIVE#72057594037927937 THive::TTxReassignGroups(72075186224037986)::Complete 2025-05-29T15:28:33.027298Z node 12 :HIVE DEBUG: tx__update_tablet_groups.cpp:332: HIVE#72057594037927937 THive::TTxUpdateTabletGroups{125050062069920}(72075186224037986)::Complete SideEffects: {Notifications: 0x7FF0000F [12:5507:2604] NKikimr::NHive::TEvPrivate::TEvRestartCancelled} 2025-05-29T15:28:33.027374Z node 12 :HIVE DEBUG: storage_balancer.cpp:115: HIVE#72057594037927937 StorageBalancer received RestartCancelled for tablet (72075186224037986,0) 2025-05-29T15:28:33.027380Z node 12 :HIVE DEBUG: storage_balancer.cpp:92: HIVE#72057594037927937 StorageBalancer initiating reassign for tablet 72075186224037889 2025-05-29T15:28:33.027490Z node 12 :HIVE DEBUG: hive_impl.cpp:964: HIVE#72057594037927937 THive::TEvReassignTablet TabletID: 72075186224037889 Channels: 1 Channels: 2 Channels: 0 ReassignReason: HIVE_REASSIGN_REASON_BALANCE 2025-05-29T15:28:33.027499Z node 12 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:491} Tx{1483, NKikimr::NHive::TTxReassignGroups} queued, type NKikimr::NHive::TTxReassignGroups 2025-05-29T15:28:33.027504Z node 12 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:491} Tx{1483, NKikimr::NHive::TTxReassignGroups} took 4194304b of static mem, Memory{4194304 dyn 0} 2025-05-29T15:28:33.027508Z node 12 :HIVE DEBUG: tx__reassign_groups.cpp:30: HIVE#72057594037927937 THive::TTxReassignGroups(72075186224037889,[0,1,2])::Execute 2025-05-29T15:28:33.027557Z node 12 :HIVE DEBUG: hive_impl.cpp:1065: HIVE#72057594037927937 THive::AssignTabletGroups TEvControllerSelectGroups tablet 72075186224037889 GroupParameters { StoragePoolSpecifier { Name: "def1" } } ReturnAllMatchingGroups: true 2025-05-29T15:28:33.027571Z node 12 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:491} Tx{1483, NKikimr::NHive::TTxReassignGroups} hope 1 -> done Change{992, redo 303b alter 0b annex 0, ~{ 1, 2 } -{ }, 0 gb} 2025-05-29T15:28:33.027576Z node 12 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:491} Tx{1483, NKikimr::NHive::TTxReassignGroups} release 4194304b of static, Memory{0 dyn 0} 2025-05-29T15:28:33.027595Z node 12 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:155: TClient[72057594037932033] send [12:1309:2642] 2025-05-29T15:28:33.027599Z node 12 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:629: TClient[72057594037932033] push event to server [12:1309:2642] 2025-05-29T15:28:33.027605Z node 12 :PIPE_SERVER DEBUG: tablet_pipe_server.cpp:141: [72057594037932033] HandleSend Sender# [12:1243:2604] EventType# 268637702 c[def1] *****----------------------------------------------------------------------------------------------- (0.05) *****----------------------------------------------------------------------------------------------- (0.054) *****----------------------------------------------------------------------------------------------- (0.054) *****----------------------------------------------------------------------------------------------- (0.05) ******---------------------------------------------------------------------------------------------- (0.056) *****----------------------------------------------------------------------------------------------- (0.054) ******---------------------------------------------------------------------------------------------- (0.06) *****----------------------------------------------------------------------------------------------- (0.052) ******---------------------------------------------------------------------------------------------- (0.06) *****----------------------------------------------------------------------------------------------- (0.052) ******---------------------------------------------------------------------------------------------- (0.058) 2025-05-29T15:28:33.128648Z node 12 :HIVE DEBUG: hive_impl.cpp:433: HIVE#72057594037927937 THive::Handle TEvControllerSelectGroupsResult: success Status: OK MatchingGroups { Groups { GroupID: 2147483649 StoragePoolName: "def1" AssuredResources { Space: 50000000000 Occupancy: 0 } CurrentResources { Space: 2500000000 Occupancy: 0.05 } AllocatedSize: 2500000000 } Groups { GroupID: 2147483650 StoragePoolName: "def1" AssuredResources { Space: 50000000000 Occupancy: 0 } CurrentResources { Space: 2700000000 Occupancy: 0.054 } AllocatedSize: 2700000000 } Groups { GroupID: 2147483651 StoragePoolName: "def1" AssuredResources { Space: 50000000000 Occupancy: 0 } CurrentResources { Space: 2700000000 Occupancy: 0.054 } AllocatedSize: 2700000000 } Groups { GroupID: 2147483652 StoragePoolName: "def1" AssuredResources { Space: 50000000000 Occupancy: 0 } CurrentResources { Space: 2500000000 Occupancy: 0.05 } AllocatedSize: 2500000000 } Groups { GroupID: 2147483653 StoragePoolName: "def1" AssuredResources { Space: 50000000000 Occupancy: 0 } CurrentResources { Space: 2800000000 Occupancy: 0.056 } AllocatedSize: 2800000000 } Groups { GroupID: 2147483654 StoragePoolName: "def1" AssuredResources { Space: 50000000000 Occupancy: 0 } CurrentResources { Space: 2700000000 Occupancy: 0.054 } AllocatedSize: 2700000000 } Groups { GroupID: 2147483655 StoragePoolName: "def1" AssuredResources { Space: 50000000000 Occupancy: 0 } CurrentResources { Space: 3000000000 Occupancy: 0.06 } AllocatedSize: 3000000000 } Groups { GroupID: 2147483656 StoragePoolName: "def1" AssuredResources { Space: 50000000000 Occupancy: 0 } CurrentResources { Space: 2600000000 Occupancy: 0.052 } AllocatedSize: 2600000000 } Groups { GroupID: 2147483657 StoragePoolName: "def1" AssuredResources { Space: 50000000000 Occupancy: 0 } CurrentResources { Space: 3000000000 Occupancy: 0.06 } AllocatedSize: 3000000000 } Groups { GroupID: 2147483658 StoragePoolName: "def1" AssuredResources { Space: 50000000000 Occupancy: 0 } CurrentResources { Space: 2600000000 Occupancy: 0.052 } AllocatedSize: 2600000000 } Groups { GroupID: 2147483659 StoragePoolName: "def1" AssuredResources { Space: 50000000000 Occupancy: 0 } CurrentResources { Space: 2900000000 Occupancy: 0.058 } AllocatedSize: 2900000000 } } 2025-05-29T15:28:33.128711Z node 12 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:492} Tx{1484, NKikimr::NHive::TTxUpdateTabletGroups} queued, type NKikimr::NHive::TTxUpdateTabletGroups 2025-05-29T15:28:33.128721Z node 12 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:492} Tx{1484, NKikimr::NHive::TTxUpdateTabletGroups} took 4194304b of static mem, Memory{4194304 dyn 0} 2025-05-29T15:28:33.128733Z node 12 :HIVE DEBUG: tx__update_tablet_groups.cpp:63: HIVE#72057594037927937 THive::TTxUpdateTabletGroups::Execute{125050062069920}(72075186224037889,HIVE_REASSIGN_REASON_BALANCE,[]) 2025-05-29T15:28:33.128749Z node 12 :HIVE DEBUG: tx__update_tablet_groups.cpp:151: HIVE#72057594037927937 THive::TTxUpdateTabletGroups::Execute{125050062069920}: tablet 72075186224037889 channel 0 assigned to group 2147483649 2025-05-29T15:28:33.128752Z node 12 :HIVE DEBUG: tx__update_tablet_groups.cpp:171: HIVE#72057594037927937 THive::TTxUpdateTabletGroups::Execute{125050062069920}: tablet 72075186224037889 skipped reassign of channel 0 2025-05-29T15:28:33.128756Z node 12 :HIVE DEBUG: tx__update_tablet_groups.cpp:151: HIVE#72057594037927937 THive::TTxUpdateTabletGroups::Execute{125050062069920}: tablet 72075186224037889 channel 1 assigned to group 2147483650 2025-05-29T15:28:33.128759Z node 12 :HIVE DEBUG: tx__update_tablet_groups.cpp:171: HIVE#72057594037927937 THive::TTxUpdateTabletGroups::Execute{125050062069920}: tablet 72075186224037889 skipped reassign of channel 1 2025-05-29T15:28:33.128763Z node 12 :HIVE DEBUG: tx__update_tablet_groups.cpp:151: HIVE#72057594037927937 THive::TTxUpdateTabletGroups::Execute{125050062069920}: tablet 72075186224037889 channel 2 assigned to group 2147483658 2025-05-29T15:28:33.128779Z node 12 :HIVE DEBUG: tx__update_tablet_groups.cpp:171: HIVE#72057594037927937 THive::TTxUpdateTabletGroups::Execute{125050062069920}: tablet 72075186224037889 skipped reassign of channel 2 2025-05-29T15:28:33.128785Z node 12 :HIVE WARN: tx__update_tablet_groups.cpp:272: HIVE#72057594037927937 THive::TTxUpdateTabletGroups::Execute{125050062069920}: tablet 72075186224037889 wasn't changed 2025-05-29T15:28:33.128789Z node 12 :HIVE WARN: tx__update_tablet_groups.cpp:281: HIVE#72057594037927937 THive::TTxUpdateTabletGroups::Execute{125050062069920}: tablet 72075186224037889 skipped channel 0 2025-05-29T15:28:33.128811Z node 12 :HIVE WARN: tx__update_tablet_groups.cpp:281: HIVE#72057594037927937 THive::TTxUpdateTabletGroups::Execute{125050062069920}: tablet 72075186224037889 skipped channel 1 2025-05-29T15:28:33.128815Z node 12 :HIVE WARN: tx__update_tablet_groups.cpp:281: HIVE#72057594037927937 THive::TTxUpdateTabletGroups::Execute{125050062069920}: tablet 72075186224037889 skipped channel 2 2025-05-29T15:28:33.128833Z node 12 :HIVE NOTICE: tx__update_tablet_groups.cpp:326: HIVE#72057594037927937 THive::TTxUpdateTabletGroups{125050062069920}(72075186224037889)::Execute - TryToBoot was not successfull 2025-05-29T15:28:33.128846Z node 12 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:492} Tx{1484, NKikimr::NHive::TTxUpdateTabletGroups} hope 1 -> done Change{993, redo 257b alter 0b annex 0, ~{ 2, 1 } -{ }, 0 gb} 2025-05-29T15:28:33.128851Z node 12 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:492} Tx{1484, NKikimr::NHive::TTxUpdateTabletGroups} release 4194304b of static, Memory{0 dyn 0} >> Yq_1::Basic >> TGRpcStreamingTest::WriteAndFinishWorks [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/grpc_streaming/ut/unittest >> TGRpcStreamingTest::ClientDisconnects [GOOD] Test command err: 2025-05-29T15:28:37.413908Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509889978368517276:2062];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:28:37.413932Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/001900/r3tmp/tmpCFIYDB/pdisk_1.dat 2025-05-29T15:28:37.464946Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [1:7509889978368517256:2079] 1748532517413733 != 1748532517413736 2025-05-29T15:28:37.466652Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:28:37.478419Z node 1 :GRPC_SERVER DEBUG: grpc_streaming.h:227: [0x7183befe0380] stream accepted Name# Session ok# true peer# ipv6:[::1]:38270 2025-05-29T15:28:37.478582Z node 1 :GRPC_SERVER DEBUG: grpc_streaming.h:301: [0x7183befe0380] facade attach Name# Session actor# [1:7509889978368517794:2250] peer# ipv6:[::1]:38270 2025-05-29T15:28:37.478618Z node 1 :GRPC_SERVER DEBUG: grpc_streaming.h:268: [0x7183befe0380] stream done notification Name# Session ok# true peer# ipv6:[::1]:38270 2025-05-29T15:28:37.478664Z node 1 :GRPC_SERVER DEBUG: grpc_streaming_ut.cpp:230: Received TEvNotifiedWhenDone 2025-05-29T15:28:37.478772Z node 1 :GRPC_SERVER DEBUG: grpc_streaming.h:547: [0x7183befe0380] stream finished Name# Session ok# false peer# unknown grpc status# (1) message# Request abandoned 2025-05-29T15:28:37.478784Z node 1 :GRPC_SERVER DEBUG: grpc_streaming.h:580: [0x7183befe0380] deregistering request Name# Session peer# unknown (finish done) 2025-05-29T15:28:37.516242Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:28:37.516267Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:28:37.517374Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected >> TopicService::DifferentConsumers_TheRangesOverlap >> TPersQueueTest::SchemeshardRestart [FAIL] >> TPersQueueTest::SameOffset >> TPersQueueTest::WriteExistingBigValue [FAIL] >> TPersQueueTest::WriteEmptyData |71.2%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/schemeshard/ut_external_table_reboots/ydb-core-tx-schemeshard-ut_external_table_reboots |71.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_external_table_reboots/ydb-core-tx-schemeshard-ut_external_table_reboots |71.2%| [TA] {RESULT} $(B)/ydb/core/tx/datashard/ut_minstep/test-results/unittest/{meta.json ... results_accumulator.log} |71.2%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_external_table_reboots/ydb-core-tx-schemeshard-ut_external_table_reboots ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/grpc_streaming/ut/unittest >> TGRpcStreamingTest::WriteAndFinishWorks [GOOD] Test command err: 2025-05-29T15:28:37.802186Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509889978957895117:2063];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:28:37.802202Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/00190c/r3tmp/tmpklRjDk/pdisk_1.dat 2025-05-29T15:28:37.848681Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [1:7509889978957895096:2079] 1748532517802075 != 1748532517802078 2025-05-29T15:28:37.850442Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:28:37.862079Z node 1 :GRPC_SERVER DEBUG: grpc_streaming.h:227: [0x317b3edb9100] stream accepted Name# Session ok# true peer# ipv6:[::1]:54460 2025-05-29T15:28:37.862214Z node 1 :GRPC_SERVER DEBUG: grpc_streaming.h:301: [0x317b3edb9100] facade attach Name# Session actor# [1:7509889978957895635:2251] peer# ipv6:[::1]:54460 2025-05-29T15:28:37.862230Z node 1 :GRPC_SERVER DEBUG: grpc_streaming.h:401: [0x317b3edb9100] facade write Name# Session data# peer# ipv6:[::1]:54460 2025-05-29T15:28:37.862402Z node 1 :GRPC_SERVER DEBUG: grpc_streaming.h:396: [0x317b3edb9100] facade write Name# Session data# peer# ipv6:[::1]:54460 grpc status# (0) message# 2025-05-29T15:28:37.862419Z node 1 :GRPC_SERVER DEBUG: grpc_streaming.h:456: [0x317b3edb9100] write finished Name# Session ok# true peer# ipv6:[::1]:54460 2025-05-29T15:28:37.862448Z node 1 :GRPC_SERVER DEBUG: grpc_streaming_ut.cpp:347: Received TEvWriteFinished, success = 1 2025-05-29T15:28:37.862521Z node 1 :GRPC_SERVER DEBUG: grpc_streaming.h:268: [0x317b3edb9100] stream done notification Name# Session ok# true peer# ipv6:[::1]:54460 2025-05-29T15:28:37.862574Z node 1 :GRPC_SERVER DEBUG: grpc_streaming.h:456: [0x317b3edb9100] write finished Name# Session ok# true peer# ipv6:[::1]:54460 2025-05-29T15:28:37.862591Z node 1 :GRPC_SERVER DEBUG: grpc_streaming.h:547: [0x317b3edb9100] stream finished Name# Session ok# true peer# ipv6:[::1]:54460 grpc status# (0) message# 2025-05-29T15:28:37.862593Z node 1 :GRPC_SERVER DEBUG: grpc_streaming_ut.cpp:347: Received TEvWriteFinished, success = 1 2025-05-29T15:28:37.862606Z node 1 :GRPC_SERVER DEBUG: grpc_streaming.h:580: [0x317b3edb9100] deregistering request Name# Session peer# ipv6:[::1]:54460 (finish done) 2025-05-29T15:28:37.927878Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:28:37.927910Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:28:37.928917Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected |71.2%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/schemeshard/ut_sequence/ydb-core-tx-schemeshard-ut_sequence |71.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_sequence/ydb-core-tx-schemeshard-ut_sequence >> Yq_1::CreateConnection_With_Existing_Name |71.2%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_sequence/ydb-core-tx-schemeshard-ut_sequence ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tablet_flat/ut/unittest >> BuildStatsHistogram::Many_Serial [GOOD] Test command err: adding part [0:0:1:0:0:0:0] data size (1.93MiB in total) adding group {0,0} PageId: 934 RowCount: 24000 DataSize: 1692763 GroupDataSize: 413676 ErasedRowCount: 0 LevelCount: 3 IndexSize: 49449 added slice [0, 24000) data size (1.61MiB - 0B) => 1.61MiB added small blobs data size => 1.73MiB added large blobs data size => 2.01MiB building histogram with row resolution 2400, data size resolution 206KiB slicing part [0:0:1:0:0:0:0]: { {rows: [0, 23999] keys: [{7, 10}, {80038, 26687}]} } slicing node Part: [0:0:1:0:0:0:0] PageId: 934 Level: 3 BeginRowId: 0 EndRowId: 24000 BeginDataSize: 0 EndDataSize: 2106439 BeginKey: {7, 10} EndKey: {80038, 26687} State: 0 => take adding node future events -1 Part: [0:0:1:0:0:0:0] PageId: 934 Level: 3 BeginRowId: 0 EndRowId: 24000 BeginDataSize: 0 EndDataSize: 2106439 BeginKey: {7, 10} EndKey: {80038, 26687} State: 0 iterating stats.RowCountHistogram: 0 stats.DataSizeHistogram: 0 nextHistogramRowCount: 2400 nextHistogramDataSize: 210643 closedRowCount: 0 closedDataSize: 0 openedRowCount: 0 openedDataSize: 0 openedSortedByRowCount: 0 openedSortedByDataSize: 0 FutureEvents: 2 currentKeyPointer: IsBegin: 1 Part: [0:0:1:0:0:0:0] PageId: 934 Level: 3 BeginRowId: 0 EndRowId: 24000 BeginDataSize: 0 EndDataSize: 2106439 BeginKey: {7, 10} EndKey: {80038, 26687} State: 0 processing event IsBegin: 1 Part: [0:0:1:0:0:0:0] PageId: 934 Level: 3 BeginRowId: 0 EndRowId: 24000 BeginDataSize: 0 EndDataSize: 2106439 BeginKey: {7, 10} EndKey: {80038, 26687} State: 0 loading node by row count triggerPart: [0:0:1:0:0:0:0] PageId: 934 Level: 3 BeginRowId: 0 EndRowId: 24000 BeginDataSize: 0 EndDataSize: 2106439 BeginKey: {7, 10} EndKey: {80038, 26687} State: 1 closedRowCount: 0 openedRowCount: 24000 nextHistogramRowCount: 2400 adding part [0:0:1:0:0:0:0] data size (1.93MiB in total) adding group {0,0} PageId: 934 RowCount: 24000 DataSize: 1692763 GroupDataSize: 413676 ErasedRowCount: 0 LevelCount: 3 IndexSize: 49449 added slice [0, 24000) data size (1.61MiB - 0B) => 1.61MiB added small blobs data size => 1.73MiB added large blobs data size => 2.01MiB building histogram with row resolution 2400, data size resolution 206KiB slicing part [0:0:1:0:0:0:0]: { {rows: [0, 23999] keys: [{7, 10}, {80038, 26687}]} } slicing node Part: [0:0:1:0:0:0:0] PageId: 934 Level: 3 BeginRowId: 0 EndRowId: 24000 BeginDataSize: 0 EndDataSize: 2106439 BeginKey: {7, 10} EndKey: {80038, 26687} State: 0 => take adding node future events -1 Part: [0:0:1:0:0:0:0] PageId: 934 Level: 3 BeginRowId: 0 EndRowId: 24000 BeginDataSize: 0 EndDataSize: 2106439 BeginKey: {7, 10} EndKey: {80038, 26687} State: 0 iterating stats.RowCountHistogram: 0 stats.DataSizeHistogram: 0 nextHistogramRowCount: 2400 nextHistogramDataSize: 210643 closedRowCount: 0 closedDataSize: 0 openedRowCount: 0 openedDataSize: 0 openedSortedByRowCount: 0 openedSortedByDataSize: 0 FutureEvents: 2 currentKeyPointer: IsBegin: 1 Part: [0:0:1:0:0:0:0] PageId: 934 Level: 3 BeginRowId: 0 EndRowId: 24000 BeginDataSize: 0 EndDataSize: 2106439 BeginKey: {7, 10} EndKey: {80038, 26687} State: 0 processing event IsBegin: 1 Part: [0:0:1:0:0:0:0] PageId: 934 Level: 3 BeginRowId: 0 EndRowId: 24000 BeginDataSize: 0 EndDataSize: 2106439 BeginKey: {7, 10} EndKey: {80038, 26687} State: 0 loading node by row count triggerPart: [0:0:1:0:0:0:0] PageId: 934 Level: 3 BeginRowId: 0 EndRowId: 24000 BeginDataSize: 0 EndDataSize: 2106439 BeginKey: {7, 10} EndKey: {80038, 26687} State: 1 closedRowCount: 0 openedRowCount: 24000 nextHistogramRowCount: 2400 adding event 0 IsBegin: 1 Part: [0:0:1:0:0:0:0] PageId: 219 Level: 2 BeginRowId: 0 EndRowId: 2855 BeginDataSize: 0 EndDataSize: 252082 BeginKey: {7, 10} EndKey: {9445, 3156} State: 0 processing event IsBegin: 1 Part: [0:0:1:0:0:0:0] PageId: 219 Level: 2 BeginRowId: 0 EndRowId: 2855 BeginDataSize: 0 EndDataSize: 252082 BeginKey: {7, 10} EndKey: {9445, 3156} State: 0 adding event 1 IsBegin: 0 Part: [0:0:1:0:0:0:0] PageId: 219 Level: 2 BeginRowId: 0 EndRowId: 2855 BeginDataSize: 0 EndDataSize: 252082 BeginKey: {7, 10} EndKey: {9445, 3156} State: 1 adding event 1 IsBegin: 1 Part: [0:0:1:0:0:0:0] PageId: 330 Level: 2 BeginRowId: 2855 EndRowId: 5712 BeginDataSize: 252082 EndDataSize: 503626 BeginKey: {9445, 3156} EndKey: {19015, 6346} State: 0 adding event 1 IsBegin: 0 Part: [0:0:1:0:0:0:0] PageId: 330 Level: 2 BeginRowId: 2855 EndRowId: 5712 BeginDataSize: 252082 EndDataSize: 503626 BeginKey: {9445, 3156} EndKey: {19015, 6346} State: 0 adding event 1 IsBegin: 1 Part: [0:0:1:0:0:0:0] PageId: 441 Level: 2 BeginRowId: 5712 EndRowId: 8565 BeginDataSize: 503626 EndDataSize: 754239 BeginKey: {19015, 6346} EndKey: {28576, 9533} State: 0 adding event 1 IsBegin: 0 Part: [0:0:1:0:0:0:0] PageId: 441 Level: 2 BeginRowId: 5712 EndRowId: 8565 BeginDataSize: 503626 EndDataSize: 754239 BeginKey: {19015, 6346} EndKey: {28576, 9533} State: 0 adding event 1 IsBegin: 1 Part: [0:0:1:0:0:0:0] PageId: 552 Level: 2 BeginRowId: 8565 EndRowId: 11434 BeginDataSize: 754239 EndDataSize: 1008444 BeginKey: {28576, 9533} EndKey: {38122, 12715} State: 0 adding event 1 IsBegin: 0 Part: [0:0:1:0:0:0:0] PageId: 552 Level: 2 BeginRowId: 8565 EndRowId: 11434 BeginDataSize: 754239 EndDataSize: 1008444 BeginKey: {28576, 9533} EndKey: {38122, 12715} State: 0 adding event 1 IsBegin: 1 Part: [0:0:1:0:0:0:0] PageId: 663 Level: 2 BeginRowId: 11434 EndRowId: 14280 BeginDataSize: 1008444 EndDataSize: 1257358 BeginKey: {38122, 12715} EndKey: {47692, 15905} State: 0 adding event 1 IsBegin: 0 Part: [0:0:1:0:0:0:0] PageId: 663 Level: 2 BeginRowId: 11434 EndRowId: 14280 BeginDataSize: 1008444 EndDataSize: 1257358 BeginKey: {38122, 12715} EndKey: {47692, 15905} State: 0 adding event 1 IsBegin: 1 Part: [0:0:1:0:0:0:0] PageId: 774 Level: 2 BeginRowId: 14280 EndRowId: 17140 BeginDataSize: 1257358 EndDataSize: 1508340 BeginKey: {47692, 15905} EndKey: {57265, 19096} State: 0 adding event 1 IsBegin: 0 Part: [0:0:1:0:0:0:0] PageId: 774 Level: 2 BeginRowId: 14280 EndRowId: 17140 BeginDataSize: 1257358 EndDataSize: 1508340 BeginKey: {47692, 15905} EndKey: {57265, 19096} State: 0 adding event 1 IsBegin: 1 Part: [0:0:1:0:0:0:0] PageId: 885 Level: 2 BeginRowId: 17140 EndRowId: 19992 BeginDataSize: 1508340 EndDataSize: 1755252 BeginKey: {57265, 19096} EndKey: {66697, 22240} State: 0 adding event 1 IsBegin: 0 Part: [0:0:1:0:0:0:0] PageId: 885 Level: 2 BeginRowId: 17140 EndRowId: 19992 BeginDataSize: 1508340 EndDataSize: 1755252 BeginKey: {57265, 19096} EndKey: {66697, 22240} State: 0 adding event 1 IsBegin: 1 Part: [0:0:1:0:0:0:0] PageId: 933 Level: 2 BeginRowId: 19992 EndRowId: 24000 BeginDataSize: 1755252 EndDataSize: 2106439 BeginKey: {66697, 22240} EndKey: {80038, 26687} State: 0 adding event 1 IsBegin: 0 Part: [0:0:1:0:0:0:0] PageId: 933 Level: 2 BeginRowId: 19992 EndRowId: 24000 BeginDataSize: 1755252 EndDataSize: 2106439 BeginKey: {66697, 22240} EndKey: {80038, 26687} State: 0 checking stats.RowCountHistogram: 0 stats.DataSizeHistogram: 0 nextHistogramRowCount: 2400 nextHistogramDataSize: 210643 closedRowCount: 0 closedDataSize: 0 openedRowCount: 2855 openedDataSize: 252082 openedSortedByRowCount: 1 openedSortedByDataSize: 2 FutureEvents: 16 currentKeyPointer: IsBegin: 1 Part: [0:0:1:0:0:0:0] PageId: 934 Level: 3 BeginRowId: 0 EndRowId: 24000 BeginDataSize: 0 EndDataSize: 2106439 BeginKey: {7, 10} EndKey: {80038, 26687} State: 3 iterating stats.RowCountHistogram: 0 stats.DataSizeHistogram: 0 nextHistogramRowCount: 2400 nextHistogramDataSize: 210643 closedRowCount: 0 closedDataSize: 0 openedRowCount: 2855 openedDataSize: 252082 openedSortedByRowCount: 1 openedSortedByDataSize: 2 FutureEvents: 16 currentKeyPointer: IsBegin: 0 Part: [0:0:1:0:0:0:0] PageId: 219 Level: 2 BeginRowId: 0 EndRowId: 2855 BeginDataSize: 0 EndDataSize: 252082 BeginKey: {7, 10} EndKey: {9445, 3156} State: 1 processing event IsBegin: 0 Part: [0:0:1:0:0:0:0] PageId: 219 Level: 2 BeginRowId: 0 EndRowId: 2855 BeginDataSize: 0 EndDataSize: 252082 BeginKey: {7, 10} EndKey: {9445, 3156} State: 1 checking stats.RowCountHistogram: 0 stats.DataSizeHistogram: 0 nextHistogramRowCount: 2400 nextHistogramDataSize: 210643 closedRowCount: 2855 closedDataSize: 252082 openedRowCount: 0 openedDataSize: 0 openedSortedByRowCount: 1 openedSortedByDataSize: 2 FutureEvents: 15 currentKeyPointer: IsBegin: 0 Part: [0:0:1:0:0:0:0] PageId: 219 Level: 2 BeginRowId: 0 EndRowId: 2855 BeginDataSize: 0 EndDataSize: 252082 BeginKey: {7, 10} EndKey: {9445, 3156} State: 2 iterating stats.RowCountHistogram: 1 stats.DataSizeHistogram: 1 nextHistogramRowCount: 4800 nextHistogramDataSize: 421286 closedRowCount: 2855 closedDataSize: 252082 openedRowCount: 0 openedDataSize: 0 openedSortedByRowCount: 1 openedSortedByDataSize: 2 FutureEvents: 15 currentKeyPointer: IsBegin: 1 Part: [0:0:1:0:0:0:0] PageId: 330 Level: 2 BeginRowId: 2855 EndRowId: 5712 BeginDataSize: 252082 EndDataSize: 503626 BeginKey: {9445, 3156} EndKey: {19015, 6346} State: 0 processing event IsBegin: 1 Part: [0:0:1:0:0:0:0] PageId: 330 Level: 2 BeginRowId: 2855 EndRowId: 5712 BeginDataSize: 252082 EndDataSize: 503626 BeginKey: {9445, 3156} EndKey: {19015, 6346} State: 0 checking stats.RowCountHistogram: 1 stats.DataSizeHistogram: 1 nextHistogramRowCount: 4800 nextHistogramDataSize: 421286 closedRowCount: 2855 closedDataSize: 252082 openedRowCount: 2857 openedDataSize: 251544 openedSortedByRowCount: 2 openedSortedByDataSize: 3 FutureEvents: 14 currentKeyPointer: IsBegin: 1 Part: [0:0:1:0:0:0:0] PageId: 330 Level: 2 BeginRowId: 2855 EndRowId: 5712 BeginDataSize: 252082 EndDataSize: 503626 BeginKey: {9445, 3156} EndKey: {19015, 6346} State: 1 iterating stats.RowCountHistogram: 1 stats.DataSizeHistogram: 1 nextHistogramRowCount: 4800 nextHistogramDataSize: 421286 closedRowCount: 2855 closedDataSize: 252082 openedRowCount: 2857 openedDataSize: 251544 openedSortedByRowCount: 2 openedSortedByDataSize: 3 FutureEvents: 14 currentKeyPointer: IsBegin: 0 Part: [0:0:1:0:0:0:0] PageId: 330 Level: 2 BeginRowId: 2855 EndRowId: 5712 BeginDataSize: 252082 EndDataSize: 503626 BeginKey: {9445, 3156} EndKey: {19015, 6346} State: 1 processing event IsBegin: 0 Part: [0:0:1:0:0:0:0] PageId: 330 Level: 2 BeginRowId: 2855 EndRowId: 5712 BeginDataSize: 252082 EndDataSize: 503626 BeginKey: {9445, 3156} EndKey: {19015, 6346} State: 1 checking stats.RowCountHistogram: 1 stats.DataSizeHistogram: 1 nextHistogramRowCount: 4800 nextHistogramDataSize: 421286 closedRowCount: 5712 closedDataSize: 503626 openedRowCount: 0 openedDataSize: 0 openedSortedByRowCount: 2 openedSortedByDataSize: 3 FutureEvents: 13 currentKeyPointer: IsBegin: 0 Part: [0:0:1:0:0:0:0] PageId: 330 Level: 2 BeginRowId: 2855 EndRowId: 5712 BeginDataSize: 252082 EndDataSize: 503626 BeginKey: {9445, 3156} EndKey: {19015, 6346} State: 2 iterating stats.RowCountHistogram: 2 stats.DataSizeHistogram: 2 nextHistogramRowCount: 7200 nextHistogramDataSize: 631929 closedRowCount: 5712 closedDataSize: 503626 openedRowCount: 0 openedDataSize: 0 openedSortedByRowCount: 2 openedSortedByDataSize: 3 FutureEvents: 13 currentKeyPointer: IsBegin: 1 Part: [0:0:1:0:0:0:0] PageId: 441 Level: 2 BeginRowId: 5712 EndRowId: 8565 BeginDataSize: 503626 EndDataSize: 754239 BeginKey: {19015, 6346} EndKey: {28576, 9533} State: 0 processing event IsBegin: 1 Part: [0:0:1:0:0:0:0] PageId: 441 Level: 2 BeginRowId: 5712 EndRowId: 8565 BeginDataSize: 503626 EndDataSize: 754239 BeginKey: {19015, 6346} EndKey: {28576, 9533} State: 0 loading node by row count triggerPart: [0:0:1:0:0:0:0] PageId: 330 Level: 2 BeginRowId: 2855 EndRowId: 5712 BeginDataSize: 252082 EndDataSize: 503626 BeginKey: {9445, 3156} EndKey: {19015, 6346} State: 2 closedRowCount: 5712 openedRowCount: 2853 nextHistogramRowCount: 7200 loading node by row count triggerPart: [0:0:1:0:0:0:0] PageId: 219 Level: 2 BeginRowId: 0 EndRowId: 2855 BeginDataSize: 0 EndDataSize: 252082 BeginKey: {7, 10} EndKey: {9445, 3156} State: 2 closedRowCount: 5712 openedRowCount: 2853 nextHistogramRowCount: 7200 loading node by row count tri ... 65} State: 1 iterating stats.RowCountHistogram: 8 stats.DataSizeHistogram: 8 nextHistogramRowCount: 90000 nextHistogramDataSize: 9370665 closedRowCount: 84700 closedDataSize: 8822453 openedRowCount: 100 openedDataSize: 10550 openedSortedByRowCount: 848 openedSortedByDataSize: 848 FutureEvents: 305 currentKeyPointer: IsBegin: 0 Part: [0:0:848:0:0:0:0] PageId: 148 Level: 4 BeginRowId: 0 EndRowId: 100 BeginDataSize: 0 EndDataSize: 10550 BeginKey: {282451, 94158} EndKey: {282772, 94265} State: 1 processing event IsBegin: 0 Part: [0:0:848:0:0:0:0] PageId: 148 Level: 4 BeginRowId: 0 EndRowId: 100 BeginDataSize: 0 EndDataSize: 10550 BeginKey: {282451, 94158} EndKey: {282772, 94265} State: 1 checking stats.RowCountHistogram: 8 stats.DataSizeHistogram: 8 nextHistogramRowCount: 90000 nextHistogramDataSize: 9370665 closedRowCount: 84800 closedDataSize: 8833003 openedRowCount: 0 openedDataSize: 0 openedSortedByRowCount: 848 openedSortedByDataSize: 848 FutureEvents: 304 currentKeyPointer: IsBegin: 0 Part: [0:0:848:0:0:0:0] PageId: 148 Level: 4 BeginRowId: 0 EndRowId: 100 BeginDataSize: 0 EndDataSize: 10550 BeginKey: {282451, 94158} EndKey: {282772, 94265} State: 2 iterating stats.RowCountHistogram: 8 stats.DataSizeHistogram: 8 nextHistogramRowCount: 90000 nextHistogramDataSize: 9370665 closedRowCount: 84800 closedDataSize: 8833003 openedRowCount: 0 openedDataSize: 0 openedSortedByRowCount: 848 openedSortedByDataSize: 848 FutureEvents: 304 currentKeyPointer: IsBegin: 1 Part: [0:0:849:0:0:0:0] PageId: 148 Level: 4 BeginRowId: 0 EndRowId: 100 BeginDataSize: 0 EndDataSize: 10111 BeginKey: {282775, 94266} EndKey: {283117, 94380} State: 0 processing event IsBegin: 1 Part: [0:0:849:0:0:0:0] PageId: 148 Level: 4 BeginRowId: 0 EndRowId: 100 BeginDataSize: 0 EndDataSize: 10111 BeginKey: {282775, 94266} EndKey: {283117, 94380} State: 0 checking stats.RowCountHistogram: 8 stats.DataSizeHistogram: 8 nextHistogramRowCount: 90000 nextHistogramDataSize: 9370665 closedRowCount: 84800 closedDataSize: 8833003 openedRowCount: 100 openedDataSize: 10111 openedSortedByRowCount: 849 openedSortedByDataSize: 849 FutureEvents: 303 currentKeyPointer: IsBegin: 1 Part: [0:0:849:0:0:0:0] PageId: 148 Level: 4 BeginRowId: 0 EndRowId: 100 BeginDataSize: 0 EndDataSize: 10111 BeginKey: {282775, 94266} EndKey: {283117, 94380} State: 1 iterating stats.RowCountHistogram: 8 stats.DataSizeHistogram: 8 nextHistogramRowCount: 90000 nextHistogramDataSize: 9370665 closedRowCount: 84800 closedDataSize: 8833003 openedRowCount: 100 openedDataSize: 10111 openedSortedByRowCount: 849 openedSortedByDataSize: 849 FutureEvents: 303 currentKeyPointer: IsBegin: 0 Part: [0:0:849:0:0:0:0] PageId: 148 Level: 4 BeginRowId: 0 EndRowId: 100 BeginDataSize: 0 EndDataSize: 10111 BeginKey: {282775, 94266} EndKey: {283117, 94380} State: 1 processing event IsBegin: 0 Part: [0:0:849:0:0:0:0] PageId: 148 Level: 4 BeginRowId: 0 EndRowId: 100 BeginDataSize: 0 EndDataSize: 10111 BeginKey: {282775, 94266} EndKey: {283117, 94380} State: 1 checking stats.RowCountHistogram: 8 stats.DataSizeHistogram: 8 nextHistogramRowCount: 90000 nextHistogramDataSize: 9370665 closedRowCount: 84900 closedDataSize: 8843114 openedRowCount: 0 openedDataSize: 0 openedSortedByRowCount: 849 openedSortedByDataSize: 849 FutureEvents: 302 currentKeyPointer: IsBegin: 0 Part: [0:0:849:0:0:0:0] PageId: 148 Level: 4 BeginRowId: 0 EndRowId: 100 BeginDataSize: 0 EndDataSize: 10111 BeginKey: {282775, 94266} EndKey: {283117, 94380} State: 2 iterating stats.RowCountHistogram: 8 stats.DataSizeHistogram: 8 nextHistogramRowCount: 90000 nextHistogramDataSize: 9370665 closedRowCount: 84900 closedDataSize: 8843114 openedRowCount: 0 openedDataSize: 0 openedSortedByRowCount: 849 openedSortedByDataSize: 849 FutureEvents: 302 currentKeyPointer: IsBegin: 1 Part: [0:0:850:0:0:0:0] PageId: 148 Level: 4 BeginRowId: 0 EndRowId: 100 BeginDataSize: 0 EndDataSize: 10583 BeginKey: {283123, 94382} EndKey: {283444, 94489} State: 0 processing event IsBegin: 1 Part: [0:0:850:0:0:0:0] PageId: 148 Level: 4 BeginRowId: 0 EndRowId: 100 BeginDataSize: 0 EndDataSize: 10583 BeginKey: {283123, 94382} EndKey: {283444, 94489} State: 0 checking stats.RowCountHistogram: 8 stats.DataSizeHistogram: 8 nextHistogramRowCount: 90000 nextHistogramDataSize: 9370665 closedRowCount: 84900 closedDataSize: 8843114 openedRowCount: 100 openedDataSize: 10583 openedSortedByRowCount: 850 openedSortedByDataSize: 850 FutureEvents: 301 currentKeyPointer: IsBegin: 1 Part: [0:0:850:0:0:0:0] PageId: 148 Level: 4 BeginRowId: 0 EndRowId: 100 BeginDataSize: 0 EndDataSize: 10583 BeginKey: {283123, 94382} EndKey: {283444, 94489} State: 1 iterating stats.RowCountHistogram: 8 stats.DataSizeHistogram: 8 nextHistogramRowCount: 90000 nextHistogramDataSize: 9370665 closedRowCount: 84900 closedDataSize: 8843114 openedRowCount: 100 openedDataSize: 10583 openedSortedByRowCount: 850 openedSortedByDataSize: 850 FutureEvents: 301 currentKeyPointer: IsBegin: 0 Part: [0:0:850:0:0:0:0] PageId: 148 Level: 4 BeginRowId: 0 EndRowId: 100 BeginDataSize: 0 EndDataSize: 10583 BeginKey: {283123, 94382} EndKey: {283444, 94489} State: 1 processing event IsBegin: 0 Part: [0:0:850:0:0:0:0] PageId: 148 Level: 4 BeginRowId: 0 EndRowId: 100 BeginDataSize: 0 EndDataSize: 10583 BeginKey: {283123, 94382} EndKey: {283444, 94489} State: 1 checking stats.RowCountHistogram: 8 stats.DataSizeHistogram: 8 nextHistogramRowCount: 90000 nextHistogramDataSize: 9370665 closedRowCount: 85000 closedDataSize: 8853697 openedRowCount: 0 openedDataSize: 0 openedSortedByRowCount: 850 openedSortedByDataSize: 850 FutureEvents: 300 currentKeyPointer: IsBegin: 0 Part: [0:0:850:0:0:0:0] PageId: 148 Level: 4 BeginRowId: 0 EndRowId: 100 BeginDataSize: 0 EndDataSize: 10583 BeginKey: {283123, 94382} EndKey: {283444, 94489} State: 2 iterating stats.RowCountHistogram: 8 stats.DataSizeHistogram: 9 nextHistogramRowCount: 90000 nextHistogramDataSize: 18446744073709551615 closedRowCount: 85000 closedDataSize: 8853697 openedRowCount: 0 openedDataSize: 0 openedSortedByRowCount: 850 openedSortedByDataSize: 850 FutureEvents: 300 currentKeyPointer: IsBegin: 1 Part: [0:0:851:0:0:0:0] PageId: 148 Level: 4 BeginRowId: 0 EndRowId: 100 BeginDataSize: 0 EndDataSize: 10456 BeginKey: {283447, 94490} EndKey: {283771, 94598} State: 0 processing event IsBegin: 1 Part: [0:0:851:0:0:0:0] PageId: 148 Level: 4 BeginRowId: 0 EndRowId: 100 BeginDataSize: 0 EndDataSize: 10456 BeginKey: {283447, 94490} EndKey: {283771, 94598} State: 0 checking stats.RowCountHistogram: 8 stats.DataSizeHistogram: 9 nextHistogramRowCount: 90000 nextHistogramDataSize: 18446744073709551615 closedRowCount: 85000 closedDataSize: 8853697 openedRowCount: 100 openedDataSize: 10456 openedSortedByRowCount: 851 openedSortedByDataSize: 851 FutureEvents: 299 currentKeyPointer: IsBegin: 1 Part: [0:0:851:0:0:0:0] PageId: 148 Level: 4 BeginRowId: 0 EndRowId: 100 BeginDataSize: 0 EndDataSize: 10456 BeginKey: {283447, 94490} EndKey: {283771, 94598} State: 1 iterating stats.RowCountHistogram: 8 stats.DataSizeHistogram: 9 nextHistogramRowCount: 90000 nextHistogramDataSize: 18446744073709551615 closedRowCount: 85000 closedDataSize: 8853697 openedRowCount: 100 openedDataSize: 10456 openedSortedByRowCount: 851 openedSortedByDataSize: 851 FutureEvents: 299 currentKeyPointer: IsBegin: 0 Part: [0:0:851:0:0:0:0] PageId: 148 Level: 4 BeginRowId: 0 EndRowId: 100 BeginDataSize: 0 EndDataSize: 10456 BeginKey: {283447, 94490} EndKey: {283771, 94598} State: 1 processing event IsBegin: 0 Part: [0:0:851:0:0:0:0] PageId: 148 Level: 4 BeginRowId: 0 EndRowId: 100 BeginDataSize: 0 EndDataSize: 10456 BeginKey: {283447, 94490} EndKey: {283771, 94598} State: 1 checking stats.RowCountHistogram: 8 stats.DataSizeHistogram: 9 nextHistogramRowCount: 90000 nextHistogramDataSize: 18446744073709551615 closedRowCount: 85100 closedDataSize: 8864153 openedRowCount: 0 openedDataSize: 0 openedSortedByRowCount: 851 openedSortedByDataSize: 851 FutureEvents: 298 currentKeyPointer: IsBegin: 0 Part: [0:0:851:0:0:0:0] PageId: 148 Level: 4 BeginRowId: 0 EndRowId: 100 BeginDataSize: 0 EndDataSize: 10456 BeginKey: {283447, 94490} EndKey: {283771, 94598} State: 2 finished stats.RowCountHistogram: 9 stats.DataSizeHistogram: 9 nextHistogramRowCount: 18446744073709551615 nextHistogramDataSize: 18446744073709551615 closedRowCount: 85100 closedDataSize: 8864153 openedRowCount: 0 openedDataSize: 0 openedSortedByRowCount: 851 openedSortedByDataSize: 851 FutureEvents: 298 Touched 0% bytes, 0 pages RowCountHistogram: 5% (actual 6%) key = (16984, 5669) value = 5100 (actual 6998 - -1% error) 10% (actual 9%) key = (50416, 16813) value = 15100 (actual 16798 - -1% error) 10% (actual 9%) key = (83701, 27908) value = 25100 (actual 26598 - -1% error) 10% (actual 9%) key = (116986, 39003) value = 35100 (actual 36398 - -1% error) 10% (actual 9%) key = (150319, 50114) value = 45100 (actual 46198 - -1% error) 10% (actual 9%) key = (183700, 61241) value = 55100 (actual 55998 - 0% error) 10% (actual 9%) key = (217081, 72368) value = 65100 (actual 65798 - 0% error) 10% (actual 9%) key = (250486, 83503) value = 75100 (actual 75598 - 0% error) 10% (actual 9%) key = (283771, 94598) value = 85100 (actual 85398 - 0% error) 14% (actual 14%) DataSizeHistogram: 5% (actual 6%) key = (16648, 5557) value = 524891 (actual 723287 - -1% error) 10% (actual 9%) key = (50086, 16703) value = 1569936 (actual 1747238 - -1% error) 9% (actual 9%) key = (83356, 27793) value = 2610698 (actual 2767306 - -1% error) 10% (actual 9%) key = (116647, 38890) value = 3652143 (actual 3787394 - -1% error) 9% (actual 9%) key = (149656, 49893) value = 4685435 (actual 4800597 - -1% error) 10% (actual 9%) key = (183040, 61021) value = 5728420 (actual 5822785 - 0% error) 10% (actual 9%) key = (216727, 72250) value = 6776444 (actual 6848929 - 0% error) 9% (actual 9%) key = (250144, 83389) value = 7813547 (actual 7865227 - 0% error) 9% (actual 9%) key = (283444, 94489) value = 8853697 (actual 8884838 - 0% error) 14% (actual 14%) Checking Flat: Touched 100% bytes, 1000 pages RowCountHistogram: 10% (actual 11%) key = (33379, 11134) value = 10000 (actual 11800 - -1% error) 10% (actual 9%) key = (66721, 22248) value = 20000 (actual 21600 - -1% error) 10% (actual 9%) key = (100015, 33346) value = 30000 (actual 31400 - -1% error) 10% (actual 9%) key = (133258, 44427) value = 40000 (actual 41200 - -1% error) 10% (actual 9%) key = (166621, 55548) value = 50000 (actual 51000 - -1% error) 10% (actual 9%) key = (200041, 66688) value = 60000 (actual 60800 - 0% error) 10% (actual 9%) key = (233449, 77824) value = 70000 (actual 70600 - 0% error) 10% (actual 9%) key = (266824, 88949) value = 80000 (actual 80400 - 0% error) 10% (actual 9%) key = (300073, 100032) value = 90000 (actual 90200 - 0% error) 10% (actual 9%) DataSizeHistogram: 10% (actual 11%) key = (33187, NULL) value = 1041247 (actual 1229534 - -1% error) 10% (actual 9%) key = (66517, NULL) value = 2082456 (actual 2249844 - -1% error) 10% (actual 9%) key = (99709, NULL) value = 3123684 (actual 3270138 - -1% error) 10% (actual 9%) key = (132925, NULL) value = 4164886 (actual 4290603 - -1% error) 10% (actual 9%) key = (166246, NULL) value = 5206111 (actual 5311117 - -1% error) 10% (actual 9%) key = (199678, NULL) value = 6247321 (actual 6331068 - 0% error) 10% (actual 9%) key = (233290, NULL) value = 7288529 (actual 7350869 - 0% error) 10% (actual 9%) key = (266701, NULL) value = 8329759 (actual 8371441 - 0% error) 10% (actual 9%) key = (300052, NULL) value = 9371030 (actual 9392083 - 0% error) 9% (actual 9%) Checking Mixed: Touched 0% bytes, 0 pages RowCountHistogram: 100% (actual 100%) DataSizeHistogram: 100% (actual 100%) >> TopicService::DifferentConsumers_TheRangesOverlap [FAIL] >> Yq_1::DescribeJob >> TPersQueueCommonTest::Auth_MultipleUpdateTokenRequestIterationsWithValidToken_GotUpdateTokenResponseForEachRequest [FAIL] >> TPersQueueCommonTest::Auth_WriteSessionWithValidTokenAndACEAndThenRemoveACEAndSendWriteRequest_SessionClosedWithUnauthorizedErrorAfterSuccessfullWriteResponse >> Yq_1::DeleteConnections >> Yq_1::CreateQuery_With_Idempotency >> TSchemeShardSplitBySizeTest::Make20MergeOperationsWithInflyLimit5 [GOOD] >> TopicService::UnknownConsumer >> TPersQueueTest::SameOffset [FAIL] >> TPersQueueTest::SchemeOperationsTest >> TPersQueueTest::WriteEmptyData [FAIL] >> TPersQueueTest::WriteNonExistingPartition >> Yq_1::Basic_Null >> TSchemeshardBackgroundCleaningTest::SchemeshardBackgroundCleaningTestSimpleCreateClean [GOOD] >> TSchemeshardBackgroundCleaningTest::SchemeshardBackgroundCleaningTestReboot ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_split_merge/unittest >> TSchemeShardSplitBySizeTest::Make20MergeOperationsWithInflyLimit5 [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:116:2058] recipient: [1:111:2141] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:116:2058] recipient: [1:111:2141] Leader for TabletID 72057594046678944 is [1:128:2152] sender: [1:132:2058] recipient: [1:111:2141] 2025-05-29T15:28:36.514406Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7488: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-05-29T15:28:36.514428Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7516: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:28:36.514432Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7402: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-05-29T15:28:36.514436Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7418: OperationsProcessing config: using default configuration 2025-05-29T15:28:36.514447Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-05-29T15:28:36.514450Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-05-29T15:28:36.514456Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7548: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:28:36.514467Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:39: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-05-29T15:28:36.514552Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7619: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-05-29T15:28:36.514605Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-05-29T15:28:36.524183Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7309: Cannot subscribe to console configs 2025-05-29T15:28:36.524205Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:28:36.526426Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-05-29T15:28:36.526550Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-05-29T15:28:36.526584Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-05-29T15:28:36.528449Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-05-29T15:28:36.528625Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:445: Clear TempDirsState with owners number: 0 2025-05-29T15:28:36.528768Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1348: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-05-29T15:28:36.528834Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:32: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-05-29T15:28:36.529320Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:157: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:28:36.529376Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:84: [RootDataErasureManager] Stop 2025-05-29T15:28:36.529672Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:28:36.529682Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:28:36.529704Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-05-29T15:28:36.529715Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-29T15:28:36.529722Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:202: TTxServerlessStorageBilling.Complete 2025-05-29T15:28:36.529759Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6671: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-05-29T15:28:36.531104Z node 1 :HIVE INFO: tablet_helpers.cpp:1130: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:128:2152] sender: [1:242:2058] recipient: [1:15:2062] 2025-05-29T15:28:36.550981Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:372: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-05-29T15:28:36.551071Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:28:36.551144Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-05-29T15:28:36.551191Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:130: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-05-29T15:28:36.551202Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:28:36.552147Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:451: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-05-29T15:28:36.552183Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-05-29T15:28:36.552243Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:28:36.552255Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:313: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-05-29T15:28:36.552262Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:367: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-05-29T15:28:36.552268Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 2 -> 3 2025-05-29T15:28:36.552823Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:28:36.552840Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-05-29T15:28:36.552846Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 3 -> 128 2025-05-29T15:28:36.553267Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:28:36.553281Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:28:36.553288Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:28:36.553304Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1650: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-05-29T15:28:36.553767Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1719: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-05-29T15:28:36.554494Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:652: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-05-29T15:28:36.554528Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1751: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-05-29T15:28:36.554678Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:676: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-05-29T15:28:36.554695Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:680: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 124 RawX2: 4294969445 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-05-29T15:28:36.554701Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:28:36.554767Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 128 -> 240 2025-05-29T15:28:36.554774Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:28:36.554800Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-05-29T15:28:36.554808Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:402: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-05-29T15:28:36.555268Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:28:36.555279Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-29T15:28:36.555330Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29 ... 8Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_split_merge.cpp:462: Notify src datashard 72075186233409585 on partitioning changed splitOp# 173 at tablet 72057594046678944 2025-05-29T15:28:39.104114Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:5834: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 3 LocalPathId: 2 Version: 23 PathOwnerId: 72057594046678944, cookie: 173 2025-05-29T15:28:39.104126Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 3 LocalPathId: 2 Version: 23 PathOwnerId: 72057594046678944, cookie: 173 2025-05-29T15:28:39.104131Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72057594046678944, txId: 173 2025-05-29T15:28:39.104140Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 173, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 23 2025-05-29T15:28:39.104148Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 25 2025-05-29T15:28:39.104168Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1606: TOperation IsReadyToNotify, TxId: 173, ready parts: 0/1, is published: true 2025-05-29T15:28:39.104736Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:652: Send tablet strongly msg operationId: 173:0 from tablet: 72057594046678944 to tablet: 72075186233409584 cookie: 72057594046678944:39 msg type: 269553158 2025-05-29T15:28:39.104763Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:652: Send tablet strongly msg operationId: 173:0 from tablet: 72057594046678944 to tablet: 72075186233409585 cookie: 72057594046678944:40 msg type: 269553158 2025-05-29T15:28:39.105447Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 173 2025-05-29T15:28:39.106058Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:619: TTxOperationReply execute, operationId: 173:0, at schemeshard: 72057594046678944, message: OperationCookie: 173 TabletId: 72075186233409584 2025-05-29T15:28:39.106073Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_split_merge.cpp:386: TSplitMerge TNotifySrc, operationId: 173:0 HandleReply TEvSplitPartitioningChangedAck, from datashard: 72075186233409584, at schemeshard: 72057594046678944 2025-05-29T15:28:39.106421Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:619: TTxOperationReply execute, operationId: 173:0, at schemeshard: 72057594046678944, message: OperationCookie: 173 TabletId: 72075186233409585 2025-05-29T15:28:39.106429Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_split_merge.cpp:386: TSplitMerge TNotifySrc, operationId: 173:0 HandleReply TEvSplitPartitioningChangedAck, from datashard: 72075186233409585, at schemeshard: 72057594046678944 2025-05-29T15:28:39.106447Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:906: Part operation is done id#173:0 progress is 1/1 2025-05-29T15:28:39.106452Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 173 ready parts: 1/1 2025-05-29T15:28:39.106457Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:906: Part operation is done id#173:0 progress is 1/1 2025-05-29T15:28:39.106461Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 173 ready parts: 1/1 2025-05-29T15:28:39.106467Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1606: TOperation IsReadyToNotify, TxId: 173, ready parts: 1/1, is published: true 2025-05-29T15:28:39.106478Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1629: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:1695:3205] message: TxId: 173 2025-05-29T15:28:39.106486Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 173 ready parts: 1/1 2025-05-29T15:28:39.106492Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:973: Operation and all the parts is done, operation id: 173:0 2025-05-29T15:28:39.106497Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5174: RemoveTx for txid 173:0 2025-05-29T15:28:39.106541Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 24 2025-05-29T15:28:39.107260Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:647: TTxOperationReply complete, operationId: 173:0, at schemeshard: 72057594046678944 2025-05-29T15:28:39.107347Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:647: TTxOperationReply complete, operationId: 173:0, at schemeshard: 72057594046678944 2025-05-29T15:28:39.107353Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:260: Unable to activate 173:0 2025-05-29T15:28:39.107395Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:227: tests -- TTxNotificationSubscriber for txId 173: got EvNotifyTxCompletionResult 2025-05-29T15:28:39.107401Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:236: tests -- TTxNotificationSubscriber for txId 173: satisfy waiter [1:5205:6287] 2025-05-29T15:28:39.107519Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5549: Handle TEvStateChanged, at schemeshard: 72057594046678944, message: Source { RawX1: 953 RawX2: 4294970006 } TabletId: 72075186233409584 State: 4 2025-05-29T15:28:39.107533Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__state_changed_reply.cpp:78: TTxShardStateChanged DoExecute, datashard informs about state changing, datashardId: 72075186233409584, state: Offline, at schemeshard: 72057594046678944 2025-05-29T15:28:39.107586Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5549: Handle TEvStateChanged, at schemeshard: 72057594046678944, message: Source { RawX1: 955 RawX2: 4294970007 } TabletId: 72075186233409585 State: 4 2025-05-29T15:28:39.107592Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__state_changed_reply.cpp:78: TTxShardStateChanged DoExecute, datashard informs about state changing, datashardId: 72075186233409585, state: Offline, at schemeshard: 72057594046678944 2025-05-29T15:28:39.108182Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:46: Free shard 72057594046678944:39 hive 72057594037968897 at ss 72057594046678944 2025-05-29T15:28:39.108271Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:46: Free shard 72057594046678944:40 hive 72057594037968897 at ss 72057594046678944 2025-05-29T15:28:39.108327Z node 1 :HIVE INFO: tablet_helpers.cpp:1356: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 39 TxId_Deprecated: 39 TabletID: 72075186233409584 Forgetting tablet 72075186233409584 2025-05-29T15:28:39.108756Z node 1 :HIVE INFO: tablet_helpers.cpp:1356: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 40 TxId_Deprecated: 40 TabletID: 72075186233409585 2025-05-29T15:28:39.108867Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5938: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 39 ShardOwnerId: 72057594046678944 ShardLocalIdx: 39, at schemeshard: 72057594046678944 2025-05-29T15:28:39.108940Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 23 Forgetting tablet 72075186233409585 2025-05-29T15:28:39.109709Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5938: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 40 ShardOwnerId: 72057594046678944 ShardLocalIdx: 40, at schemeshard: 72057594046678944 2025-05-29T15:28:39.109757Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 22 2025-05-29T15:28:39.110395Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:174: Deleted shardIdx 72057594046678944:39 2025-05-29T15:28:39.110408Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:180: Close pipe to deleted shardIdx 72057594046678944:39 tabletId 72075186233409584 2025-05-29T15:28:39.110469Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:174: Deleted shardIdx 72057594046678944:40 2025-05-29T15:28:39.110475Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:180: Close pipe to deleted shardIdx 72057594046678944:40 tabletId 72075186233409585 TestWaitNotification: OK eventTxId 173 2025-05-29T15:28:39.110787Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Table" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-05-29T15:28:39.110847Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/Table" took 115us result status StatusSuccess 2025-05-29T15:28:39.110946Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Table" PathDescription { Self { Name: "Table" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 123 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 23 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 23 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 21 } ChildrenExist: false } Table { Name: "Table" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 TableSchemaVersion: 1 IsBackup: false IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 20 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 20 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> TKeyValueTest::TestCopyRangeToLongKey [GOOD] >> Yq_1::ModifyConnections >> TopicService::UnknownConsumer [FAIL] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/keyvalue/ut/unittest >> TKeyValueTest::TestCopyRangeToLongKey [GOOD] Test command err: Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:55:2057] recipient: [1:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:55:2057] recipient: [1:51:2095] Leader for TabletID 72057594037927937 is [1:57:2097] sender: [1:58:2057] recipient: [1:51:2095] Leader for TabletID 72057594037927937 is [1:57:2097] sender: [1:75:2057] recipient: [1:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:55:2057] recipient: [2:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:55:2057] recipient: [2:51:2095] Leader for TabletID 72057594037927937 is [2:57:2097] sender: [2:58:2057] recipient: [2:51:2095] Leader for TabletID 72057594037927937 is [2:57:2097] sender: [2:75:2057] recipient: [2:14:2061] !Reboot 72057594037927937 (actor [2:57:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [2:57:2097] sender: [2:77:2057] recipient: [2:36:2083] Leader for TabletID 72057594037927937 is [2:57:2097] sender: [2:80:2057] recipient: [2:14:2061] Leader for TabletID 72057594037927937 is [2:57:2097] sender: [2:81:2057] recipient: [2:79:2110] Leader for TabletID 72057594037927937 is [2:82:2111] sender: [2:83:2057] recipient: [2:79:2110] !Reboot 72057594037927937 (actor [2:57:2097]) rebooted! !Reboot 72057594037927937 (actor [2:57:2097]) tablet resolver refreshed! new actor is[2:82:2111] Leader for TabletID 72057594037927937 is [2:82:2111] sender: [2:168:2057] recipient: [2:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:55:2057] recipient: [3:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:55:2057] recipient: [3:51:2095] Leader for TabletID 72057594037927937 is [3:57:2097] sender: [3:58:2057] recipient: [3:51:2095] Leader for TabletID 72057594037927937 is [3:57:2097] sender: [3:75:2057] recipient: [3:14:2061] !Reboot 72057594037927937 (actor [3:57:2097]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [3:57:2097] sender: [3:77:2057] recipient: [3:36:2083] Leader for TabletID 72057594037927937 is [3:57:2097] sender: [3:80:2057] recipient: [3:79:2110] Leader for TabletID 72057594037927937 is [3:57:2097] sender: [3:81:2057] recipient: [3:14:2061] Leader for TabletID 72057594037927937 is [3:82:2111] sender: [3:83:2057] recipient: [3:79:2110] !Reboot 72057594037927937 (actor [3:57:2097]) rebooted! !Reboot 72057594037927937 (actor [3:57:2097]) tablet resolver refreshed! new actor is[3:82:2111] Leader for TabletID 72057594037927937 is [3:82:2111] sender: [3:168:2057] recipient: [3:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [4:55:2057] recipient: [4:50:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [4:55:2057] recipient: [4:50:2095] Leader for TabletID 72057594037927937 is [4:57:2097] sender: [4:58:2057] recipient: [4:50:2095] Leader for TabletID 72057594037927937 is [4:57:2097] sender: [4:75:2057] recipient: [4:14:2061] !Reboot 72057594037927937 (actor [4:57:2097]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [4:57:2097] sender: [4:78:2057] recipient: [4:36:2083] Leader for TabletID 72057594037927937 is [4:57:2097] sender: [4:81:2057] recipient: [4:14:2061] Leader for TabletID 72057594037927937 is [4:57:2097] sender: [4:82:2057] recipient: [4:80:2110] Leader for TabletID 72057594037927937 is [4:83:2111] sender: [4:84:2057] recipient: [4:80:2110] !Reboot 72057594037927937 (actor [4:57:2097]) rebooted! !Reboot 72057594037927937 (actor [4:57:2097]) tablet resolver refreshed! new actor is[4:83:2111] Leader for TabletID 72057594037927937 is [4:83:2111] sender: [4:169:2057] recipient: [4:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [5:55:2057] recipient: [5:52:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [5:55:2057] recipient: [5:52:2095] Leader for TabletID 72057594037927937 is [5:57:2097] sender: [5:58:2057] recipient: [5:52:2095] Leader for TabletID 72057594037927937 is [5:57:2097] sender: [5:75:2057] recipient: [5:14:2061] !Reboot 72057594037927937 (actor [5:57:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [5:57:2097] sender: [5:81:2057] recipient: [5:36:2083] Leader for TabletID 72057594037927937 is [5:57:2097] sender: [5:84:2057] recipient: [5:14:2061] Leader for TabletID 72057594037927937 is [5:57:2097] sender: [5:85:2057] recipient: [5:83:2113] Leader for TabletID 72057594037927937 is [5:86:2114] sender: [5:87:2057] recipient: [5:83:2113] !Reboot 72057594037927937 (actor [5:57:2097]) rebooted! !Reboot 72057594037927937 (actor [5:57:2097]) tablet resolver refreshed! new actor is[5:86:2114] Leader for TabletID 72057594037927937 is [5:86:2114] sender: [5:172:2057] recipient: [5:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [6:55:2057] recipient: [6:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [6:55:2057] recipient: [6:51:2095] Leader for TabletID 72057594037927937 is [6:57:2097] sender: [6:58:2057] recipient: [6:51:2095] Leader for TabletID 72057594037927937 is [6:57:2097] sender: [6:75:2057] recipient: [6:14:2061] !Reboot 72057594037927937 (actor [6:57:2097]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [6:57:2097] sender: [6:81:2057] recipient: [6:36:2083] Leader for TabletID 72057594037927937 is [6:57:2097] sender: [6:84:2057] recipient: [6:14:2061] Leader for TabletID 72057594037927937 is [6:57:2097] sender: [6:85:2057] recipient: [6:83:2113] Leader for TabletID 72057594037927937 is [6:86:2114] sender: [6:87:2057] recipient: [6:83:2113] !Reboot 72057594037927937 (actor [6:57:2097]) rebooted! !Reboot 72057594037927937 (actor [6:57:2097]) tablet resolver refreshed! new actor is[6:86:2114] Leader for TabletID 72057594037927937 is [6:86:2114] sender: [6:172:2057] recipient: [6:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [7:55:2057] recipient: [7:52:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [7:55:2057] recipient: [7:52:2095] Leader for TabletID 72057594037927937 is [7:57:2097] sender: [7:58:2057] recipient: [7:52:2095] Leader for TabletID 72057594037927937 is [7:57:2097] sender: [7:75:2057] recipient: [7:14:2061] !Reboot 72057594037927937 (actor [7:57:2097]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [7:57:2097] sender: [7:82:2057] recipient: [7:36:2083] Leader for TabletID 72057594037927937 is [7:57:2097] sender: [7:85:2057] recipient: [7:84:2113] Leader for TabletID 72057594037927937 is [7:57:2097] sender: [7:86:2057] recipient: [7:14:2061] Leader for TabletID 72057594037927937 is [7:87:2114] sender: [7:88:2057] recipient: [7:84:2113] !Reboot 72057594037927937 (actor [7:57:2097]) rebooted! !Reboot 72057594037927937 (actor [7:57:2097]) tablet resolver refreshed! new actor is[7:87:2114] Leader for TabletID 72057594037927937 is [7:87:2114] sender: [7:173:2057] recipient: [7:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [8:55:2057] recipient: [8:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [8:55:2057] recipient: [8:51:2095] Leader for TabletID 72057594037927937 is [8:57:2097] sender: [8:58:2057] recipient: [8:51:2095] Leader for TabletID 72057594037927937 is [8:57:2097] sender: [8:75:2057] recipient: [8:14:2061] !Reboot 72057594037927937 (actor [8:57:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [8:57:2097] sender: [8:85:2057] recipient: [8:36:2083] Leader for TabletID 72057594037927937 is [8:57:2097] sender: [8:88:2057] recipient: [8:14:2061] Leader for TabletID 72057594037927937 is [8:57:2097] sender: [8:89:2057] recipient: [8:87:2116] Leader for TabletID 72057594037927937 is [8:90:2117] sender: [8:91:2057] recipient: [8:87:2116] !Reboot 72057594037927937 (actor [8:57:2097]) rebooted! !Reboot 72057594037927937 (actor [8:57:2097]) tablet resolver refreshed! new actor is[8:90:2117] Leader for TabletID 72057594037927937 is [8:90:2117] sender: [8:176:2057] recipient: [8:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [9:55:2057] recipient: [9:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [9:55:2057] recipient: [9:51:2095] Leader for TabletID 72057594037927937 is [9:57:2097] sender: [9:58:2057] recipient: [9:51:2095] Leader for TabletID 72057594037927937 is [9:57:2097] sender: [9:75:2057] recipient: [9:14:2061] !Reboot 72057594037927937 (actor [9:57:2097]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [9:57:2097] sender: [9:85:2057] recipient: [9:36:2083] Leader for TabletID 72057594037927937 is [9:57:2097] sender: [9:88:2057] recipient: [9:14:2061] Leader for TabletID 72057594037927937 is [9:57:2097] sender: [9:89:2057] recipient: [9:87:2116] Leader for TabletID 72057594037927937 is [9:90:2117] sender: [9:91:2057] recipient: [9:87:2116] !Reboot 72057594037927937 (actor [9:57:2097]) rebooted! !Reboot 72057594037927937 (actor [9:57:2097]) tablet resolver refreshed! new actor is[9:90:2117] Leader for TabletID 72057594037927937 is [9:90:2117] sender: [9:176:2057] recipient: [9:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [10:55:2057] recipient: [10:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [10:55:2057] recipient: [10:51:2095] Leader for TabletID 72057594037927937 is [10:57:2097] sender: [10:58:2057] recipient: [10:51:2095] Leader for TabletID 72057594037927937 is [10:57:2097] sender: [10:75:2057] recipient: [10:14:2061] !Reboot 72057594037927937 (actor [10:57:2097]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [10:57:2097] sender: [10:86:2057] recipient: [10:36:2083] Leader for TabletID 72057594037927937 is [10:57:2097] sender: [10:89:2057] recipient: [10:14:2061] Leader for TabletID 72057594037927937 is [10:57:2097] sender: [10:90:2057] recipient: [10:88:2116] Leader for TabletID 72057594037927937 is [10:91:2117] sender: [10:92:2057] recipient: [10:88:2116] !Reboot 72057594037927937 (actor [10:57:2097]) rebooted! !Reboot 72057594037927937 (actor [10:57:2097]) tablet resolver refreshed! new actor is[10:91:2117] Leader for TabletID 72057594037927937 is [10:91:2117] sender: [10:177:2057] recipient: [10:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [11:55:2057] recipient: [11:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [11:55:2057] recipient: [11:51:2095] Leader for TabletID 72057594037927937 is [11:57:2097] sender: [11:58:2057] recipient: [11:51:2095] Leader for TabletID 72057594037927937 is [11:57:2097] sender: [11:75:2057] recipient: [11:14:2061] !Reboot 72057594037927937 (actor [11:57:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [11:57:2097] sender: [11:89:2057] recipient: [11:36:2083] Leader for TabletID 72057594037927937 is [11:57:2097] sender: [11:92:2057] recipient: [11:14:2061] Leader for TabletID 72057594037927937 is [11:57:2097] sender: [11:93:2057] recipient: [11:91:2119] Leader for TabletID 72057594037927937 is [11:94:2120] sender: [11:95:2057] recipient: [11:91:2119] !Reboot 72057594037927937 (actor [11:57:2097]) rebooted! !Reboot 72057594037927937 (actor [11:57:2097]) tablet resolver refreshed! new actor is[11:94:2120] Leader for TabletID 72057594037927937 is [11:94:2120] sender: [11:180:2057] recipient: [11:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [12:55:2057] recipient: [12:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [12:55:2057] recipient: [12:51:2095] Leader for TabletID 72057594037927937 is [12:57:2097] sender: [12:58:2057] recipient: [12:51:2095] Leader for TabletID 72057594037927937 is [12:57:2097] sender: [12:75:2057] recipient: [12:14:2061] !Reboot 72057594037927937 (acto ... 7 is [27:57:2097] sender: [27:90:2057] recipient: [27:36:2083] Leader for TabletID 72057594037927937 is [27:57:2097] sender: [27:93:2057] recipient: [27:14:2061] Leader for TabletID 72057594037927937 is [27:57:2097] sender: [27:94:2057] recipient: [27:92:2119] Leader for TabletID 72057594037927937 is [27:95:2120] sender: [27:96:2057] recipient: [27:92:2119] !Reboot 72057594037927937 (actor [27:57:2097]) rebooted! !Reboot 72057594037927937 (actor [27:57:2097]) tablet resolver refreshed! new actor is[27:95:2120] Leader for TabletID 72057594037927937 is [0:0:0] sender: [28:55:2057] recipient: [28:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [28:55:2057] recipient: [28:51:2095] Leader for TabletID 72057594037927937 is [28:57:2097] sender: [28:58:2057] recipient: [28:51:2095] Leader for TabletID 72057594037927937 is [28:57:2097] sender: [28:75:2057] recipient: [28:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [29:55:2057] recipient: [29:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [29:55:2057] recipient: [29:51:2095] Leader for TabletID 72057594037927937 is [29:57:2097] sender: [29:58:2057] recipient: [29:51:2095] Leader for TabletID 72057594037927937 is [29:57:2097] sender: [29:75:2057] recipient: [29:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [30:55:2057] recipient: [30:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [30:55:2057] recipient: [30:51:2095] Leader for TabletID 72057594037927937 is [30:57:2097] sender: [30:58:2057] recipient: [30:51:2095] Leader for TabletID 72057594037927937 is [30:57:2097] sender: [30:75:2057] recipient: [30:14:2061] !Reboot 72057594037927937 (actor [30:57:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [30:57:2097] sender: [30:77:2057] recipient: [30:36:2083] Leader for TabletID 72057594037927937 is [30:57:2097] sender: [30:80:2057] recipient: [30:79:2110] Leader for TabletID 72057594037927937 is [30:57:2097] sender: [30:81:2057] recipient: [30:14:2061] Leader for TabletID 72057594037927937 is [30:82:2111] sender: [30:83:2057] recipient: [30:79:2110] !Reboot 72057594037927937 (actor [30:57:2097]) rebooted! !Reboot 72057594037927937 (actor [30:57:2097]) tablet resolver refreshed! new actor is[30:82:2111] Leader for TabletID 72057594037927937 is [30:82:2111] sender: [30:168:2057] recipient: [30:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [31:55:2057] recipient: [31:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [31:55:2057] recipient: [31:51:2095] Leader for TabletID 72057594037927937 is [31:57:2097] sender: [31:58:2057] recipient: [31:51:2095] Leader for TabletID 72057594037927937 is [31:57:2097] sender: [31:75:2057] recipient: [31:14:2061] !Reboot 72057594037927937 (actor [31:57:2097]) on event NKikimr::TEvKeyValue::TEvAcquireLock ! Leader for TabletID 72057594037927937 is [31:57:2097] sender: [31:77:2057] recipient: [31:36:2083] Leader for TabletID 72057594037927937 is [31:57:2097] sender: [31:80:2057] recipient: [31:14:2061] Leader for TabletID 72057594037927937 is [31:57:2097] sender: [31:81:2057] recipient: [31:79:2110] Leader for TabletID 72057594037927937 is [31:82:2111] sender: [31:83:2057] recipient: [31:79:2110] !Reboot 72057594037927937 (actor [31:57:2097]) rebooted! !Reboot 72057594037927937 (actor [31:57:2097]) tablet resolver refreshed! new actor is[31:82:2111] Leader for TabletID 72057594037927937 is [31:82:2111] sender: [31:168:2057] recipient: [31:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [32:55:2057] recipient: [32:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [32:55:2057] recipient: [32:51:2095] Leader for TabletID 72057594037927937 is [32:57:2097] sender: [32:58:2057] recipient: [32:51:2095] Leader for TabletID 72057594037927937 is [32:57:2097] sender: [32:75:2057] recipient: [32:14:2061] !Reboot 72057594037927937 (actor [32:57:2097]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [32:57:2097] sender: [32:78:2057] recipient: [32:36:2083] Leader for TabletID 72057594037927937 is [32:57:2097] sender: [32:80:2057] recipient: [32:14:2061] Leader for TabletID 72057594037927937 is [32:57:2097] sender: [32:82:2057] recipient: [32:81:2110] Leader for TabletID 72057594037927937 is [32:83:2111] sender: [32:84:2057] recipient: [32:81:2110] !Reboot 72057594037927937 (actor [32:57:2097]) rebooted! !Reboot 72057594037927937 (actor [32:57:2097]) tablet resolver refreshed! new actor is[32:83:2111] Leader for TabletID 72057594037927937 is [32:83:2111] sender: [32:169:2057] recipient: [32:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [33:55:2057] recipient: [33:50:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [33:55:2057] recipient: [33:50:2095] Leader for TabletID 72057594037927937 is [33:57:2097] sender: [33:58:2057] recipient: [33:50:2095] Leader for TabletID 72057594037927937 is [33:57:2097] sender: [33:75:2057] recipient: [33:14:2061] !Reboot 72057594037927937 (actor [33:57:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [33:57:2097] sender: [33:81:2057] recipient: [33:36:2083] Leader for TabletID 72057594037927937 is [33:57:2097] sender: [33:83:2057] recipient: [33:14:2061] Leader for TabletID 72057594037927937 is [33:57:2097] sender: [33:85:2057] recipient: [33:84:2113] Leader for TabletID 72057594037927937 is [33:86:2114] sender: [33:87:2057] recipient: [33:84:2113] !Reboot 72057594037927937 (actor [33:57:2097]) rebooted! !Reboot 72057594037927937 (actor [33:57:2097]) tablet resolver refreshed! new actor is[33:86:2114] Leader for TabletID 72057594037927937 is [33:86:2114] sender: [33:172:2057] recipient: [33:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [34:55:2057] recipient: [34:52:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [34:55:2057] recipient: [34:52:2095] Leader for TabletID 72057594037927937 is [34:57:2097] sender: [34:58:2057] recipient: [34:52:2095] Leader for TabletID 72057594037927937 is [34:57:2097] sender: [34:75:2057] recipient: [34:14:2061] !Reboot 72057594037927937 (actor [34:57:2097]) on event NKikimr::TEvKeyValue::TEvExecuteTransaction ! Leader for TabletID 72057594037927937 is [34:57:2097] sender: [34:81:2057] recipient: [34:36:2083] Leader for TabletID 72057594037927937 is [34:57:2097] sender: [34:84:2057] recipient: [34:83:2113] Leader for TabletID 72057594037927937 is [34:57:2097] sender: [34:85:2057] recipient: [34:14:2061] Leader for TabletID 72057594037927937 is [34:86:2114] sender: [34:87:2057] recipient: [34:83:2113] !Reboot 72057594037927937 (actor [34:57:2097]) rebooted! !Reboot 72057594037927937 (actor [34:57:2097]) tablet resolver refreshed! new actor is[34:86:2114] Leader for TabletID 72057594037927937 is [34:86:2114] sender: [34:172:2057] recipient: [34:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [35:55:2057] recipient: [35:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [35:55:2057] recipient: [35:51:2095] Leader for TabletID 72057594037927937 is [35:57:2097] sender: [35:58:2057] recipient: [35:51:2095] Leader for TabletID 72057594037927937 is [35:57:2097] sender: [35:75:2057] recipient: [35:14:2061] !Reboot 72057594037927937 (actor [35:57:2097]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [35:57:2097] sender: [35:82:2057] recipient: [35:36:2083] Leader for TabletID 72057594037927937 is [35:57:2097] sender: [35:85:2057] recipient: [35:14:2061] Leader for TabletID 72057594037927937 is [35:57:2097] sender: [35:86:2057] recipient: [35:84:2113] Leader for TabletID 72057594037927937 is [35:87:2114] sender: [35:88:2057] recipient: [35:84:2113] !Reboot 72057594037927937 (actor [35:57:2097]) rebooted! !Reboot 72057594037927937 (actor [35:57:2097]) tablet resolver refreshed! new actor is[35:87:2114] Leader for TabletID 72057594037927937 is [35:87:2114] sender: [35:173:2057] recipient: [35:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [36:55:2057] recipient: [36:52:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [36:55:2057] recipient: [36:52:2095] Leader for TabletID 72057594037927937 is [36:57:2097] sender: [36:58:2057] recipient: [36:52:2095] Leader for TabletID 72057594037927937 is [36:57:2097] sender: [36:75:2057] recipient: [36:14:2061] !Reboot 72057594037927937 (actor [36:57:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [36:57:2097] sender: [36:85:2057] recipient: [36:36:2083] Leader for TabletID 72057594037927937 is [36:57:2097] sender: [36:88:2057] recipient: [36:14:2061] Leader for TabletID 72057594037927937 is [36:57:2097] sender: [36:89:2057] recipient: [36:87:2116] Leader for TabletID 72057594037927937 is [36:90:2117] sender: [36:91:2057] recipient: [36:87:2116] !Reboot 72057594037927937 (actor [36:57:2097]) rebooted! !Reboot 72057594037927937 (actor [36:57:2097]) tablet resolver refreshed! new actor is[36:90:2117] Leader for TabletID 72057594037927937 is [36:90:2117] sender: [36:176:2057] recipient: [36:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [37:55:2057] recipient: [37:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [37:55:2057] recipient: [37:51:2095] Leader for TabletID 72057594037927937 is [37:57:2097] sender: [37:58:2057] recipient: [37:51:2095] Leader for TabletID 72057594037927937 is [37:57:2097] sender: [37:75:2057] recipient: [37:14:2061] !Reboot 72057594037927937 (actor [37:57:2097]) on event NKikimr::TEvKeyValue::TEvExecuteTransaction ! Leader for TabletID 72057594037927937 is [37:57:2097] sender: [37:85:2057] recipient: [37:36:2083] Leader for TabletID 72057594037927937 is [37:57:2097] sender: [37:88:2057] recipient: [37:14:2061] Leader for TabletID 72057594037927937 is [37:57:2097] sender: [37:89:2057] recipient: [37:87:2116] Leader for TabletID 72057594037927937 is [37:90:2117] sender: [37:91:2057] recipient: [37:87:2116] !Reboot 72057594037927937 (actor [37:57:2097]) rebooted! !Reboot 72057594037927937 (actor [37:57:2097]) tablet resolver refreshed! new actor is[37:90:2117] Leader for TabletID 72057594037927937 is [37:90:2117] sender: [37:176:2057] recipient: [37:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [38:55:2057] recipient: [38:52:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [38:55:2057] recipient: [38:52:2095] Leader for TabletID 72057594037927937 is [38:57:2097] sender: [38:58:2057] recipient: [38:52:2095] Leader for TabletID 72057594037927937 is [38:57:2097] sender: [38:75:2057] recipient: [38:14:2061] !Reboot 72057594037927937 (actor [38:57:2097]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [38:57:2097] sender: [38:86:2057] recipient: [38:36:2083] Leader for TabletID 72057594037927937 is [38:57:2097] sender: [38:88:2057] recipient: [38:14:2061] Leader for TabletID 72057594037927937 is [38:57:2097] sender: [38:90:2057] recipient: [38:89:2116] Leader for TabletID 72057594037927937 is [38:91:2117] sender: [38:92:2057] recipient: [38:89:2116] !Reboot 72057594037927937 (actor [38:57:2097]) rebooted! !Reboot 72057594037927937 (actor [38:57:2097]) tablet resolver refreshed! new actor is[38:91:2117] Leader for TabletID 72057594037927937 is [38:91:2117] sender: [38:177:2057] recipient: [38:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [39:55:2057] recipient: [39:50:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [39:55:2057] recipient: [39:50:2095] Leader for TabletID 72057594037927937 is [39:57:2097] sender: [39:58:2057] recipient: [39:50:2095] Leader for TabletID 72057594037927937 is [39:57:2097] sender: [39:75:2057] recipient: [39:14:2061] >> TPersQueueCommonTest::Auth_WriteSessionWithValidTokenAndACEAndThenRemoveACEAndSendWriteRequest_SessionClosedWithUnauthorizedErrorAfterSuccessfullWriteResponse [FAIL] >> TPersQueueCommonTest::Auth_MultipleInflightWriteUpdateTokenRequestWithDifferentValidToken_SessionClosedWithOverloadedError >> Yq_1::Basic [FAIL] >> Yq_1::Basic_EmptyList >> TConsistentOpsWithReboots::CreateIndexedTableAndForceDropSimultaneously [GOOD] >> TopicService::UnknownTopic >> TPersQueueTest::SchemeOperationsTest [FAIL] >> TPersQueueTest::SchemeOperationFirstClassCitizen >> TPersQueueTest::WriteNonExistingPartition [FAIL] >> TPersQueueTest::WriteNonExistingTopic >> Yq_1::ListConnections |71.2%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/schemeshard/ut_index/ydb-core-tx-schemeshard-ut_index |71.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_index/ydb-core-tx-schemeshard-ut_index |71.3%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_index/ydb-core-tx-schemeshard-ut_index |71.3%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/kqp/ut/query/ydb-core-kqp-ut-query |71.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/ut/query/ydb-core-kqp-ut-query |71.3%| [LD] {RESULT} $(B)/ydb/core/kqp/ut/query/ydb-core-kqp-ut-query ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_reboots/unittest >> TConsistentOpsWithReboots::CreateIndexedTableAndForceDropSimultaneously [GOOD] Test command err: ==== RunWithTabletReboots =========== RUN: Trace =========== Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2140] Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:116:2058] recipient: [1:110:2141] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:116:2058] recipient: [1:110:2141] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:117:2058] recipient: [1:111:2142] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:117:2058] recipient: [1:111:2142] Leader for TabletID 72057594046678944 is [1:126:2151] sender: [1:127:2058] recipient: [1:109:2140] Leader for TabletID 72057594046447617 is [1:130:2154] sender: [1:132:2058] recipient: [1:110:2141] Leader for TabletID 72057594046316545 is [1:133:2155] sender: [1:135:2058] recipient: [1:111:2142] 2025-05-29T15:28:24.771942Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7488: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-05-29T15:28:24.771974Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7516: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:28:24.771979Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7402: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2025-05-29T15:28:24.771985Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7418: OperationsProcessing config: using default configuration 2025-05-29T15:28:24.771992Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-05-29T15:28:24.771997Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-05-29T15:28:24.772007Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7548: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:28:24.772023Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:39: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-05-29T15:28:24.772147Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7619: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-05-29T15:28:24.772244Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-05-29T15:28:24.789056Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7651: Got new config: QueryServiceConfig { AllExternalDataSourcesAreAvailable: true } 2025-05-29T15:28:24.789085Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:28:24.789203Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7619: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# Leader for TabletID 72057594046447617 is [1:130:2154] sender: [1:175:2058] recipient: [1:15:2062] 2025-05-29T15:28:24.796582Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-05-29T15:28:24.796629Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-05-29T15:28:24.796672Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-05-29T15:28:24.799805Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-05-29T15:28:24.799908Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:445: Clear TempDirsState with owners number: 0 2025-05-29T15:28:24.800002Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1348: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-05-29T15:28:24.800152Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:32: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-05-29T15:28:24.800652Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:157: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:28:24.800685Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:84: [RootDataErasureManager] Stop 2025-05-29T15:28:24.800906Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:28:24.800913Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:28:24.800939Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-05-29T15:28:24.800945Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-29T15:28:24.800949Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:202: TTxServerlessStorageBilling.Complete 2025-05-29T15:28:24.800962Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6671: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:214:2058] recipient: [1:212:2212] IGNORE Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:214:2058] recipient: [1:212:2212] Leader for TabletID 72057594037968897 is [1:218:2216] sender: [1:219:2058] recipient: [1:212:2212] 2025-05-29T15:28:24.802287Z node 1 :HIVE INFO: tablet_helpers.cpp:1130: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:126:2151] sender: [1:239:2058] recipient: [1:15:2062] 2025-05-29T15:28:24.823728Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:372: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-05-29T15:28:24.823825Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:28:24.823907Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-05-29T15:28:24.823957Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:130: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-05-29T15:28:24.823969Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:28:24.826049Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:451: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-05-29T15:28:24.826085Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-05-29T15:28:24.826152Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:28:24.826164Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:313: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-05-29T15:28:24.826170Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:367: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-05-29T15:28:24.826176Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 2 -> 3 2025-05-29T15:28:24.826717Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:28:24.826730Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-05-29T15:28:24.826735Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 3 -> 128 2025-05-29T15:28:24.827109Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:28:24.827119Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:28:24.827125Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:28:24.827133Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1650: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-05-29T15:28:24.827756Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1719: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-05-29T15:28:24.828164Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:652: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-05-29T15:28:24.828205Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1751: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 Leader for TabletID 72057594046316545 is [1:133:2155] sender: [1:254:2058] recipient: [1:15:2062] FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-05-29T15:28:24.828402Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:676: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-05-29T15:28:24.828427Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:680: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 133 RawX2: 4294969451 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-05-29T15:28:24.828811Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:28:24.828892Z node 1 :FLAT_TX_SCHEMESHARD INFO: sch ... Z node 63 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:5834: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 1003 2025-05-29T15:28:40.227616Z node 63 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 1003 2025-05-29T15:28:40.227619Z node 63 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 1003 2025-05-29T15:28:40.227622Z node 63 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 1003, pathId: [OwnerId: 72057594046678944, LocalPathId: 4], version: 18446744073709551615 2025-05-29T15:28:40.227625Z node 63 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 2 2025-05-29T15:28:40.227727Z node 63 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1003 2025-05-29T15:28:40.227750Z node 63 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:5834: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 1003 2025-05-29T15:28:40.227755Z node 63 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 1003 2025-05-29T15:28:40.227757Z node 63 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1003 2025-05-29T15:28:40.227760Z node 63 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 1003, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 18446744073709551615 2025-05-29T15:28:40.227765Z node 63 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2025-05-29T15:28:40.227772Z node 63 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 1003, subscribers: 1 2025-05-29T15:28:40.227777Z node 63 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:212: TTxAckPublishToSchemeBoard Notify send TEvNotifyTxCompletionResult, at schemeshard: 72057594046678944, to actorId: [63:309:2299] 2025-05-29T15:28:40.227802Z node 63 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:174: Deleted shardIdx 72057594046678944:1 2025-05-29T15:28:40.228575Z node 63 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:174: Deleted shardIdx 72057594046678944:3 2025-05-29T15:28:40.228610Z node 63 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1003 2025-05-29T15:28:40.228632Z node 63 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:174: Deleted shardIdx 72057594046678944:2 2025-05-29T15:28:40.228663Z node 63 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:160: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 2 candidates, at schemeshard: 72057594046678944 2025-05-29T15:28:40.228704Z node 63 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1003 2025-05-29T15:28:40.228715Z node 63 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:123: TTxCleanDroppedPaths Execute, 2 paths in candidate queue, at schemeshard: 72057594046678944 2025-05-29T15:28:40.228721Z node 63 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:137: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 6], at schemeshard: 72057594046678944 2025-05-29T15:28:40.228740Z node 63 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 2 2025-05-29T15:28:40.228747Z node 63 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:137: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 5], at schemeshard: 72057594046678944 2025-05-29T15:28:40.228753Z node 63 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 1 2025-05-29T15:28:40.228758Z node 63 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:137: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 4], at schemeshard: 72057594046678944 2025-05-29T15:28:40.228763Z node 63 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 1 2025-05-29T15:28:40.228768Z node 63 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:137: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 3], at schemeshard: 72057594046678944 2025-05-29T15:28:40.228774Z node 63 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 2025-05-29T15:28:40.228779Z node 63 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:137: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2025-05-29T15:28:40.228784Z node 63 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-05-29T15:28:40.229327Z node 63 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1003 2025-05-29T15:28:40.229353Z node 63 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1003 2025-05-29T15:28:40.229365Z node 63 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1003 2025-05-29T15:28:40.229377Z node 63 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1003 2025-05-29T15:28:40.229391Z node 63 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:227: tests -- TTxNotificationSubscriber for txId 1003: got EvNotifyTxCompletionResult 2025-05-29T15:28:40.229400Z node 63 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:236: tests -- TTxNotificationSubscriber for txId 1003: satisfy waiter [63:310:2300] 2025-05-29T15:28:40.229730Z node 63 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:160: TTxCleanDroppedPaths Complete, done PersistRemovePath for 5 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestWaitNotification: OK eventTxId 1002 TestWaitNotification: OK eventTxId 1003 wait until 72075186233409546 is deleted wait until 72075186233409547 is deleted wait until 72075186233409548 is deleted wait until 72075186233409549 is deleted wait until 72075186233409550 is deleted wait until 72075186233409551 is deleted wait until 72075186233409552 is deleted wait until 72075186233409553 is deleted wait until 72075186233409554 is deleted wait until 72075186233409555 is deleted 2025-05-29T15:28:40.229825Z node 63 :HIVE INFO: tablet_helpers.cpp:1476: [72057594037968897] TEvSubscribeToTabletDeletion, 72075186233409546 2025-05-29T15:28:40.229838Z node 63 :HIVE INFO: tablet_helpers.cpp:1476: [72057594037968897] TEvSubscribeToTabletDeletion, 72075186233409547 2025-05-29T15:28:40.229847Z node 63 :HIVE INFO: tablet_helpers.cpp:1476: [72057594037968897] TEvSubscribeToTabletDeletion, 72075186233409548 2025-05-29T15:28:40.229854Z node 63 :HIVE INFO: tablet_helpers.cpp:1476: [72057594037968897] TEvSubscribeToTabletDeletion, 72075186233409549 2025-05-29T15:28:40.229863Z node 63 :HIVE INFO: tablet_helpers.cpp:1476: [72057594037968897] TEvSubscribeToTabletDeletion, 72075186233409550 2025-05-29T15:28:40.229872Z node 63 :HIVE INFO: tablet_helpers.cpp:1476: [72057594037968897] TEvSubscribeToTabletDeletion, 72075186233409551 2025-05-29T15:28:40.229880Z node 63 :HIVE INFO: tablet_helpers.cpp:1476: [72057594037968897] TEvSubscribeToTabletDeletion, 72075186233409552 2025-05-29T15:28:40.229889Z node 63 :HIVE INFO: tablet_helpers.cpp:1476: [72057594037968897] TEvSubscribeToTabletDeletion, 72075186233409553 2025-05-29T15:28:40.229896Z node 63 :HIVE INFO: tablet_helpers.cpp:1476: [72057594037968897] TEvSubscribeToTabletDeletion, 72075186233409554 2025-05-29T15:28:40.229906Z node 63 :HIVE INFO: tablet_helpers.cpp:1476: [72057594037968897] TEvSubscribeToTabletDeletion, 72075186233409555 Deleted tabletId 72075186233409546 Deleted tabletId 72075186233409547 Deleted tabletId 72075186233409548 Deleted tabletId 72075186233409549 Deleted tabletId 72075186233409550 Deleted tabletId 72075186233409551 Deleted tabletId 72075186233409552 Deleted tabletId 72075186233409553 Deleted tabletId 72075186233409554 Deleted tabletId 72075186233409555 2025-05-29T15:28:40.230028Z node 63 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-05-29T15:28:40.230085Z node 63 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot" took 70us result status StatusSuccess 2025-05-29T15:28:40.230177Z node 63 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot" PathDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 8 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 8 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 6 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/MyRoot" } } } PathId: 1 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> TChargeBTreeIndex::OneNode_Groups_History [GOOD] >> TChargeBTreeIndex::FewNodes >> Yq_1::CreateConnection_With_Existing_Name [FAIL] >> Yq_1::CreateConnections_With_Idempotency >> Yq_1::DescribeJob [FAIL] >> Yq_1::DescribeQuery >> PrivateApi::PingTask >> Yq_1::CreateQuery_With_Idempotency [FAIL] >> Yq_1::CreateQuery_Without_Connection >> IndexBuildTest::MergeIndexTableShardsOnlyWhenReady [GOOD] >> IndexBuildTest::RejectsCancel >> TopicService::UnknownTopic [FAIL] >> TPersQueueCommonTest::Auth_MultipleInflightWriteUpdateTokenRequestWithDifferentValidToken_SessionClosedWithOverloadedError [FAIL] >> TPersQueueCommonTest::Auth_WriteUpdateTokenRequestWithInvalidToken_SessionClosedWithUnauthenticatedError >> Yq_1::DeleteConnections [FAIL] >> Yq_1::Create_And_Modify_The_Same_Connection >> IntermediateDirsReboots::CreateKesusWithIntermediateDirs [GOOD] >> TopicService::UseDoubleSlashInTopicPath >> KqpWrite::Insert >> TPersQueueTest::SchemeOperationFirstClassCitizen [FAIL] >> TPersQueueTest::SchemeOperationsCheckPropValues >> TPersQueueTest::WriteNonExistingTopic [FAIL] >> TPersQueueTest::WriteAfterAlter >> Yq_1::ModifyConnections [FAIL] >> Yq_1::ModifyQuery ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_reboots/unittest >> IntermediateDirsReboots::CreateKesusWithIntermediateDirs [GOOD] Test command err: ==== RunWithTabletReboots =========== RUN: Trace =========== Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2140] Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:116:2058] recipient: [1:110:2141] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:116:2058] recipient: [1:110:2141] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:117:2058] recipient: [1:111:2142] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:117:2058] recipient: [1:111:2142] Leader for TabletID 72057594046678944 is [1:126:2151] sender: [1:127:2058] recipient: [1:109:2140] Leader for TabletID 72057594046447617 is [1:130:2154] sender: [1:132:2058] recipient: [1:110:2141] Leader for TabletID 72057594046316545 is [1:133:2155] sender: [1:135:2058] recipient: [1:111:2142] 2025-05-29T15:28:26.599918Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7488: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-05-29T15:28:26.599938Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7516: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:28:26.599941Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7402: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2025-05-29T15:28:26.599945Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7418: OperationsProcessing config: using default configuration 2025-05-29T15:28:26.599949Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-05-29T15:28:26.599952Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-05-29T15:28:26.599958Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7548: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:28:26.599969Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:39: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-05-29T15:28:26.600069Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7619: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-05-29T15:28:26.600135Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-05-29T15:28:26.609846Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7651: Got new config: QueryServiceConfig { AllExternalDataSourcesAreAvailable: true } 2025-05-29T15:28:26.609868Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:28:26.609955Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7619: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# Leader for TabletID 72057594046447617 is [1:130:2154] sender: [1:175:2058] recipient: [1:15:2062] 2025-05-29T15:28:26.612070Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-05-29T15:28:26.612094Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-05-29T15:28:26.612123Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-05-29T15:28:26.614191Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-05-29T15:28:26.614273Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:445: Clear TempDirsState with owners number: 0 2025-05-29T15:28:26.614363Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1348: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-05-29T15:28:26.614560Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:32: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-05-29T15:28:26.615327Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:157: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:28:26.615366Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:84: [RootDataErasureManager] Stop 2025-05-29T15:28:26.615633Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:28:26.615641Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:28:26.615668Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-05-29T15:28:26.615674Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-29T15:28:26.615679Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:202: TTxServerlessStorageBilling.Complete 2025-05-29T15:28:26.615695Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6671: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:214:2058] recipient: [1:212:2212] IGNORE Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:214:2058] recipient: [1:212:2212] Leader for TabletID 72057594037968897 is [1:218:2216] sender: [1:219:2058] recipient: [1:212:2212] 2025-05-29T15:28:26.616815Z node 1 :HIVE INFO: tablet_helpers.cpp:1130: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:126:2151] sender: [1:239:2058] recipient: [1:15:2062] 2025-05-29T15:28:26.629933Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:372: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-05-29T15:28:26.630023Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:28:26.630083Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-05-29T15:28:26.630119Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:130: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-05-29T15:28:26.630127Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:28:26.630846Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:451: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-05-29T15:28:26.630869Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-05-29T15:28:26.630923Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:28:26.630930Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:313: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-05-29T15:28:26.630934Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:367: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-05-29T15:28:26.630938Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 2 -> 3 2025-05-29T15:28:26.631255Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:28:26.631266Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-05-29T15:28:26.631272Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 3 -> 128 2025-05-29T15:28:26.631535Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:28:26.631543Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:28:26.631547Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:28:26.631552Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1650: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-05-29T15:28:26.631968Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1719: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-05-29T15:28:26.632242Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:652: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-05-29T15:28:26.632274Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1751: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 Leader for TabletID 72057594046316545 is [1:133:2155] sender: [1:254:2058] recipient: [1:15:2062] FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-05-29T15:28:26.632447Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:676: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-05-29T15:28:26.632465Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:680: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 133 RawX2: 4294969451 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-05-29T15:28:26.632482Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:28:26.632533Z node 1 :FLAT_TX_SCHEMESHARD INFO: sch ... 5-29T15:28:41.727776Z node 61 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:989: Publication details: tx: 1003, [OwnerId: 72057594046678944, LocalPathId: 1], 8 2025-05-29T15:28:41.727778Z node 61 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:989: Publication details: tx: 1003, [OwnerId: 72057594046678944, LocalPathId: 3], 6 2025-05-29T15:28:41.727780Z node 61 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:989: Publication details: tx: 1003, [OwnerId: 72057594046678944, LocalPathId: 4], 6 2025-05-29T15:28:41.727782Z node 61 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:989: Publication details: tx: 1003, [OwnerId: 72057594046678944, LocalPathId: 5], 5 2025-05-29T15:28:41.727784Z node 61 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:989: Publication details: tx: 1003, [OwnerId: 72057594046678944, LocalPathId: 6], 3 2025-05-29T15:28:41.728134Z node 61 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:5834: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 8 PathOwnerId: 72057594046678944, cookie: 1003 2025-05-29T15:28:41.728147Z node 61 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 8 PathOwnerId: 72057594046678944, cookie: 1003 2025-05-29T15:28:41.728151Z node 61 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 5, at schemeshard: 72057594046678944, txId: 1003 2025-05-29T15:28:41.728154Z node 61 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 1003, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 8 2025-05-29T15:28:41.728158Z node 61 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2025-05-29T15:28:41.728303Z node 61 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:5834: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 6 PathOwnerId: 72057594046678944, cookie: 1003 2025-05-29T15:28:41.728311Z node 61 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 6 PathOwnerId: 72057594046678944, cookie: 1003 2025-05-29T15:28:41.728313Z node 61 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 4, at schemeshard: 72057594046678944, txId: 1003 2025-05-29T15:28:41.728316Z node 61 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 1003, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 6 2025-05-29T15:28:41.728318Z node 61 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 2 2025-05-29T15:28:41.728372Z node 61 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:5834: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 6 PathOwnerId: 72057594046678944, cookie: 1003 2025-05-29T15:28:41.728379Z node 61 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 6 PathOwnerId: 72057594046678944, cookie: 1003 2025-05-29T15:28:41.728383Z node 61 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 3, at schemeshard: 72057594046678944, txId: 1003 2025-05-29T15:28:41.728386Z node 61 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 1003, pathId: [OwnerId: 72057594046678944, LocalPathId: 4], version: 6 2025-05-29T15:28:41.728388Z node 61 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 2 2025-05-29T15:28:41.728550Z node 61 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:5834: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 5 Version: 5 PathOwnerId: 72057594046678944, cookie: 1003 2025-05-29T15:28:41.728557Z node 61 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 5 Version: 5 PathOwnerId: 72057594046678944, cookie: 1003 2025-05-29T15:28:41.728560Z node 61 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 1003 2025-05-29T15:28:41.728562Z node 61 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 1003, pathId: [OwnerId: 72057594046678944, LocalPathId: 5], version: 5 2025-05-29T15:28:41.728564Z node 61 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 2 2025-05-29T15:28:41.728592Z node 61 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:5834: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 6 Version: 3 PathOwnerId: 72057594046678944, cookie: 1003 2025-05-29T15:28:41.728598Z node 61 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 6 Version: 3 PathOwnerId: 72057594046678944, cookie: 1003 2025-05-29T15:28:41.728602Z node 61 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1003 2025-05-29T15:28:41.728605Z node 61 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 1003, pathId: [OwnerId: 72057594046678944, LocalPathId: 6], version: 3 2025-05-29T15:28:41.728609Z node 61 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 6] was 3 2025-05-29T15:28:41.728616Z node 61 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 1003, subscribers: 1 2025-05-29T15:28:41.728621Z node 61 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:212: TTxAckPublishToSchemeBoard Notify send TEvNotifyTxCompletionResult, at schemeshard: 72057594046678944, to actorId: [61:371:2350] 2025-05-29T15:28:41.729070Z node 61 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1003 2025-05-29T15:28:41.729115Z node 61 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1003 2025-05-29T15:28:41.729150Z node 61 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1003 2025-05-29T15:28:41.729161Z node 61 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1003 2025-05-29T15:28:41.729300Z node 61 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1003 2025-05-29T15:28:41.729311Z node 61 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:227: tests -- TTxNotificationSubscriber for txId 1003: got EvNotifyTxCompletionResult 2025-05-29T15:28:41.729315Z node 61 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:236: tests -- TTxNotificationSubscriber for txId 1003: satisfy waiter [61:372:2351] TestWaitNotification: OK eventTxId 1003 2025-05-29T15:28:41.729406Z node 61 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Valid/x/y/z" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-05-29T15:28:41.729436Z node 61 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/Valid/x/y/z" took 39us result status StatusSuccess 2025-05-29T15:28:41.729490Z node 61 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Valid/x/y/z" PathDescription { Self { Name: "z" PathId: 6 SchemeshardId: 72057594046678944 PathType: EPathTypeKesus CreateFinished: true CreateTxId: 1003 CreateStep: 5000003 ParentPathId: 5 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 KesusVersion: 2 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 5 PathsLimit: 10000 ShardsInside: 1 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } Kesus { Name: "z" PathId: 6 KesusTabletId: 72075186233409546 Config { } Version: 2 } } PathId: 6 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-05-29T15:28:41.729527Z node 61 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Invalid" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-05-29T15:28:41.729540Z node 61 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/Invalid" took 14us result status StatusPathDoesNotExist 2025-05-29T15:28:41.729552Z node 61 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/Invalid\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1]), source_location: ydb/core/tx/schemeshard/schemeshard_path_describer.cpp:1157" Path: "/MyRoot/Invalid" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: true } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 >> Yq_1::Basic_EmptyList [FAIL] >> Yq_1::Basic_EmptyDict >> TPartBtreeIndexIteration::FewNodes_Groups_History_Slices [GOOD] >> TPartBtreeIndexIteration::FewNodes_Groups_History_Slices_Sticky >> TopicService::UseDoubleSlashInTopicPath [FAIL] >> TPersQueueCommonTest::Auth_WriteUpdateTokenRequestWithInvalidToken_SessionClosedWithUnauthenticatedError [FAIL] >> TPersQueueCommonTest::Auth_WriteUpdateTokenRequestWithValidTokenButWithoutACL_SessionClosedWithUnauthorizedError >> Yq_1::ListConnections [FAIL] >> Yq_1::ListConnectionsOnEmptyConnectionsTable >> TChargeBTreeIndex::FewNodes [GOOD] >> TChargeBTreeIndex::FewNodes_Groups >> TopicService::RelativePath >> PrivateApi::PingTask [GOOD] >> PrivateApi::GetTask >> TPersQueueTest::SchemeOperationsCheckPropValues [FAIL] >> TPersQueueTest::ReadRuleServiceType >> TPersQueueTest::WriteAfterAlter [FAIL] >> TPersQueueTest::WhenTheTopicIsDeletedBeforeDataIsDecompressed_Compressed >> KqpInplaceUpdate::SingleRowStr-UseSink >> TSchemeshardBackgroundCleaningTest::SchemeshardBackgroundCleaningTestCreateCleanWithRetry [GOOD] >> TSchemeshardBackgroundCleaningTest::SchemeshardBackgroundCleaningTestCreateCleanManyTables >> TopicService::RelativePath [FAIL] >> Yq_1::CreateQuery_Without_Connection [FAIL] >> TDSProxyGetTest::TestBlock42WipedErrorWithTwoBlobs [GOOD] >> TDSProxyPatchTest::NaiveErrorOnPut_Erasure4Plus2Block >> Yq_1::Create_And_Modify_The_Same_Connection [FAIL] >> TPersQueueCommonTest::Auth_WriteUpdateTokenRequestWithValidTokenButWithoutACL_SessionClosedWithUnauthorizedError [FAIL] >> TPersQueueCommonTest::TestWriteWithRateLimiterWithBlobsRateLimit [GOOD] >> TPersQueueCommonTest::TestWriteWithRateLimiterWithUserPayloadRateLimit >> TDSProxyPatchTest::NaiveErrorOnPut_Erasure4Plus2Block [GOOD] >> TDSProxyPutTest::TestBlock42PutAllOk >> TDSProxyPutTest::TestBlock42PutAllOk [GOOD] >> TDsProxyQuorumTracker::CheckFailModelErasure3Plus1Block >> Yq_1::Basic_EmptyDict [FAIL] >> TDsProxyQuorumTracker::CheckFailModelErasure3Plus1Block [GOOD] >> TopicService::AccessRights >> Yq_1::ModifyQuery [FAIL] >> TPersQueueTest::ReadRuleServiceType [FAIL] >> TPersQueueTest::ReadRuleServiceTypeLimit >> Yq_1::Basic_Null [FAIL] >> Yq_1::Basic_TaggedLiteral |71.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/dsproxy/ut/unittest >> TDsProxyQuorumTracker::CheckFailModelErasure3Plus1Block [GOOD] >> Yq_1::CreateConnections_With_Idempotency [FAIL] >> TPersQueueTest::WhenTheTopicIsDeletedBeforeDataIsDecompressed_Compressed [FAIL] >> TPersQueueTest::WhenTheTopicIsDeletedAfterReadingTheData_Compressed >> IndexBuildTest::RejectsCancel [GOOD] >> Yq_1::ListConnectionsOnEmptyConnectionsTable [GOOD] >> PrivateApi::GetTask [GOOD] >> PrivateApi::Nodes >> TopicService::AccessRights [FAIL] >> TPersQueueCommonTest::TestWriteWithRateLimiterWithUserPayloadRateLimit [FAIL] >> TPersQueueCommonTest::TestLimiterLimitsWithBlobsRateLimit ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_index_build/unittest >> IndexBuildTest::RejectsCancel [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:116:2058] recipient: [1:111:2141] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:116:2058] recipient: [1:111:2141] Leader for TabletID 72057594046678944 is [1:128:2152] sender: [1:132:2058] recipient: [1:111:2141] 2025-05-29T15:28:14.178195Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7488: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-05-29T15:28:14.178213Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7516: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:28:14.178217Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7402: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2025-05-29T15:28:14.178221Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7418: OperationsProcessing config: using default configuration 2025-05-29T15:28:14.178230Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-05-29T15:28:14.178233Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-05-29T15:28:14.178239Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7548: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:28:14.178248Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:39: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-05-29T15:28:14.178318Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7619: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-05-29T15:28:14.178359Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-05-29T15:28:14.186552Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7309: Cannot subscribe to console configs 2025-05-29T15:28:14.186570Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:28:14.188520Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-05-29T15:28:14.188614Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-05-29T15:28:14.188651Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-05-29T15:28:14.190296Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-05-29T15:28:14.190464Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:445: Clear TempDirsState with owners number: 0 2025-05-29T15:28:14.190554Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1348: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-05-29T15:28:14.190599Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:32: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-05-29T15:28:14.191056Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:157: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:28:14.191098Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:84: [RootDataErasureManager] Stop 2025-05-29T15:28:14.191348Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:28:14.191358Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:28:14.191380Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-05-29T15:28:14.191388Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-29T15:28:14.191394Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:202: TTxServerlessStorageBilling.Complete 2025-05-29T15:28:14.191426Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6671: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-05-29T15:28:14.192657Z node 1 :HIVE INFO: tablet_helpers.cpp:1130: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:128:2152] sender: [1:242:2058] recipient: [1:15:2062] 2025-05-29T15:28:14.205566Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:372: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-05-29T15:28:14.205622Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:28:14.205670Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-05-29T15:28:14.205713Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:130: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-05-29T15:28:14.205723Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:28:14.206260Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:451: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-05-29T15:28:14.206278Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-05-29T15:28:14.206317Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:28:14.206324Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:313: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-05-29T15:28:14.206328Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:367: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-05-29T15:28:14.206332Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 2 -> 3 2025-05-29T15:28:14.206668Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:28:14.206676Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-05-29T15:28:14.206679Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 3 -> 128 2025-05-29T15:28:14.206955Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:28:14.206964Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:28:14.206967Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:28:14.206971Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1650: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-05-29T15:28:14.207394Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1719: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-05-29T15:28:14.207721Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:652: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-05-29T15:28:14.207752Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1751: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-05-29T15:28:14.207875Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:676: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-05-29T15:28:14.207891Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:680: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 124 RawX2: 4294969445 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-05-29T15:28:14.207896Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:28:14.207928Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 128 -> 240 2025-05-29T15:28:14.207932Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:28:14.207953Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-05-29T15:28:14.207960Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:402: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-05-29T15:28:14.208283Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:28:14.208288Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-29T15:28:14.208315Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29T1 ... lockTxId: 281474976710760, UnlockTxStatus: StatusAccepted, UnlockTxDone: 1, ToUploadShards: 0, DoneShards: 0, Processed: { upload rows: 101, upload bytes: 1818, read rows: 101, read bytes: 1818 }, Billed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }} 2025-05-29T15:28:44.531116Z node 2 :BUILD_INDEX INFO: schemeshard_build_index_tx_base.cpp:25: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: Change state from Unlocking to Done 2025-05-29T15:28:44.531417Z node 2 :BUILD_INDEX NOTICE: schemeshard_build_index__progress.cpp:1121: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Execute: 102 Done 2025-05-29T15:28:44.531426Z node 2 :BUILD_INDEX DEBUG: schemeshard_build_index__progress.cpp:1122: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Execute: 102 Done TBuildInfo{ IndexBuildId: 102, Uid: , DomainPathId: [OwnerId: 72057594046678944, LocalPathId: 1], TablePathId: [OwnerId: 72057594046678944, LocalPathId: 2], IndexType: EIndexTypeGlobal, IndexName: index1, IndexColumn: index, State: Done, IsCancellationRequested: 0, Issue: , SubscribersCount: 1, CreateSender: [2:1172:3023], AlterMainTableTxId: 0, AlterMainTableTxStatus: StatusSuccess, AlterMainTableTxDone: 0, LockTxId: 281474976710757, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976710758, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 5000004, ApplyTxId: 281474976710759, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, UnlockTxId: 281474976710760, UnlockTxStatus: StatusAccepted, UnlockTxDone: 1, ToUploadShards: 0, DoneShards: 0, Processed: { upload rows: 101, upload bytes: 1818, read rows: 101, read bytes: 1818 }, Billed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }} 2025-05-29T15:28:44.531430Z node 2 :BUILD_INDEX TRACE: schemeshard_build_index_tx_base.cpp:339: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TIndexBuildInfo SendNotifications: : id# 102, subscribers count# 1 2025-05-29T15:28:44.531447Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:227: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2025-05-29T15:28:44.531453Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:236: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [2:1266:3106] TestWaitNotification: OK eventTxId 102 2025-05-29T15:28:44.531709Z node 2 :BUILD_INDEX NOTICE: schemeshard_build_index__cancel.cpp:18: TIndexBuilder::TXTYPE_CANCEL_INDEX_BUILD: DoExecute TxId: 105 DatabaseName: "/MyRoot" IndexBuildId: 102 2025-05-29T15:28:44.531727Z node 2 :BUILD_INDEX NOTICE: schemeshard_build_index_tx_base.h:91: TIndexBuilder::TXTYPE_CANCEL_INDEX_BUILD: Reply TxId: 105 Status: PRECONDITION_FAILED Issues { message: "Index build process with id <102> has been finished already" severity: 1 } BUILDINDEX RESPONSE CANCEL: NKikimrIndexBuilder.TEvCancelResponse TxId: 105 Status: PRECONDITION_FAILED Issues { message: "Index build process with id <102> has been finished already" severity: 1 } 2025-05-29T15:28:44.531852Z node 2 :BUILD_INDEX DEBUG: schemeshard_build_index__get.cpp:19: TIndexBuilder::TXTYPE_GET_INDEX_BUILD: DoExecute DatabaseName: "/MyRoot" IndexBuildId: 102 2025-05-29T15:28:44.531900Z node 2 :BUILD_INDEX DEBUG: schemeshard_build_index_tx_base.h:93: TIndexBuilder::TXTYPE_GET_INDEX_BUILD: Reply Status: SUCCESS IndexBuild { Id: 102 State: STATE_DONE Settings { source_path: "/MyRoot/Table" index { name: "index1" index_columns: "index" global_index { } } max_shards_in_flight: 2 ScanSettings { MaxBatchRows: 1 } } Progress: 100 StartTime { } EndTime { seconds: 30 } } BUILDINDEX RESPONSE Get: NKikimrIndexBuilder.TEvGetResponse Status: SUCCESS IndexBuild { Id: 102 State: STATE_DONE Settings { source_path: "/MyRoot/Table" index { name: "index1" index_columns: "index" global_index { } } max_shards_in_flight: 2 ScanSettings { MaxBatchRows: 1 } } Progress: 100 StartTime { } EndTime { seconds: 30 } } 2025-05-29T15:28:44.532034Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Table" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-05-29T15:28:44.532067Z node 2 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/Table" took 35us result status StatusSuccess 2025-05-29T15:28:44.532138Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Table" PathDescription { Self { Name: "Table" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 6 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 6 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 TableSchemaVersion: 3 TablePartitionVersion: 1 } ChildrenExist: true } Table { Name: "Table" Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "index" Type: "Uint32" TypeId: 2 Id: 2 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Utf8" TypeId: 4608 Id: 3 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 TableIndexes { Name: "index1" LocalPathId: 3 Type: EIndexTypeGlobal State: EIndexStateReady KeyColumnNames: "index" SchemaVersion: 2 PathOwnerId: 72057594046678944 DataSize: 0 IndexImplTableDescriptions { } } TableSchemaVersion: 3 IsBackup: false IsRestore: false } TableStats { DataSize: 13280 RowCount: 101 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 10 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 10 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 10 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 2237 Memory: 823440 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 3 PathsLimit: 10000 ShardsInside: 11 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 13280 DataSize: 13280 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-05-29T15:28:44.532276Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Table/index1" Options { ReturnPartitioningInfo: true ReturnPartitionConfig: true BackupInfo: false ReturnBoundaries: true ShowPrivateTable: true }, at schemeshard: 72057594046678944 2025-05-29T15:28:44.532298Z node 2 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/Table/index1" took 23us result status StatusSuccess 2025-05-29T15:28:44.532384Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Table/index1" PathDescription { Self { Name: "index1" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeTableIndex CreateFinished: true CreateTxId: 281474976710758 CreateStep: 5000004 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableIndexVersion: 2 } ChildrenExist: true } Children { Name: "indexImplTable" PathId: 4 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710758 CreateStep: 5000004 ParentPathId: 3 PathState: EPathStateAlter Owner: "root@builtin" ACL: "" PathSubType: EPathSubTypeSyncIndexImplTable Version { GeneralVersion: 4 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 2 TablePartitionVersion: 1 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 3 PathsLimit: 10000 ShardsInside: 11 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 13280 DataSize: 13280 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } TableIndex { Name: "index1" LocalPathId: 3 Type: EIndexTypeGlobal State: EIndexStateReady KeyColumnNames: "index" SchemaVersion: 2 PathOwnerId: 72057594046678944 DataSize: 0 IndexImplTableDescriptions { Columns { Name: "index" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "index" KeyColumnNames: "key" KeyColumnIds: 1 KeyColumnIds: 2 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 KeepEraseMarkers: false MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { SizeToSplit: 2147483648 MinPartitionsCount: 1 } } } } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> BsControllerConfig::MergeIntersectingBoxes [GOOD] >> BsControllerConfig::MoveGroups >> TopicService::ThereAreGapsInTheOffsetRanges >> TPersQueueTest::ReadRuleServiceTypeLimit [FAIL] >> TPersQueueTest::ReadRuleDisallowDefaultServiceType >> KqpEffects::InsertAbort_Params_Success >> TPersQueueTest::WhenTheTopicIsDeletedAfterReadingTheData_Compressed [FAIL] >> TPersQueueTest::WhenTheTopicIsDeletedBeforeDataIsDecompressed_Uncompressed >> TopicService::ThereAreGapsInTheOffsetRanges [FAIL] >> KqpImmediateEffects::UpsertExistingKey >> BsControllerConfig::ExtendByCreatingSeparateBox [GOOD] >> BsControllerConfig::ExtendBoxAndStoragePool |71.3%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/kqp/provider/ut/ydb-core-kqp-provider-ut |71.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/provider/ut/ydb-core-kqp-provider-ut |71.3%| [LD] {RESULT} $(B)/ydb/core/kqp/provider/ut/ydb-core-kqp-provider-ut >> TPersQueueCommonTest::TestLimiterLimitsWithBlobsRateLimit [FAIL] >> TPersQueueCommonTest::TestLimiterLimitsWithUserPayloadRateLimit >> Yq_1::DescribeQuery [FAIL] |71.3%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/services/ydb/backup_ut/ydb-services-ydb-backup_ut |71.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/services/ydb/backup_ut/ydb-services-ydb-backup_ut |71.3%| [LD] {RESULT} $(B)/ydb/services/ydb/backup_ut/ydb-services-ydb-backup_ut >> TopicService::OnePartitionAndNoGapsInTheOffsets >> TPersQueueTest::ReadRuleDisallowDefaultServiceType [FAIL] >> TPersQueueTest::ReadRuleServiceTypeMigration >> KqpWrite::CastValuesOptional >> TPartBtreeIndexIteration::FewNodes_Groups_History_Slices_Sticky [GOOD] >> TPartGroupBtreeIndexIter::NoNodes [GOOD] >> TPartGroupBtreeIndexIter::OneNode [GOOD] >> TPartGroupBtreeIndexIter::FewNodes [GOOD] >> TPartMulti::Basics [GOOD] >> TPartMulti::BasicsReverse [GOOD] >> TPartSlice::SimpleMerge [GOOD] >> TPartSlice::ComplexMerge [GOOD] >> TPartSlice::LongTailMerge [GOOD] >> TPartSlice::CutSingle [GOOD] >> TPartSlice::CutMulti [GOOD] >> TPartSlice::LookupBasics [GOOD] >> TPartSlice::LookupFull [GOOD] >> TPartSlice::EqualByRowId [GOOD] >> TPartSlice::ParallelCompactions [GOOD] >> TDSProxyGetTest::TestBlock42GetIntervalsWipedError [GOOD] >> TDSProxyPatchTest::SecuredOk_Erasure4Plus2Block >> TPersQueueTest::WhenTheTopicIsDeletedBeforeDataIsDecompressed_Uncompressed [FAIL] >> TPersQueueTest::WhenTheTopicIsDeletedAfterReadingTheData_Uncompressed >> TopicService::OnePartitionAndNoGapsInTheOffsets [FAIL] >> TDSProxyPatchTest::SecuredOk_Erasure4Plus2Block [GOOD] >> TDSProxyPatchTest::NaiveErrorOnGetItem_ErasureMirror3dc >> IndexBuildTest::CancellationNotEnoughRetries [GOOD] >> IndexBuildTest::CancellationNoTable >> TDSProxyPatchTest::NaiveErrorOnGetItem_ErasureMirror3dc [GOOD] >> TDSProxyPutTest::TestMirror3dcPutStatusOkWith_3_0_0_VdiskErrors ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/fq/ut_integration/unittest >> Yq_1::Create_And_Modify_The_Same_Connection [FAIL] Test command err: 2025-05-29T15:28:39.328355Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509889988469192298:2272];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:28:39.328378Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; E0529 15:28:39.355782472 4051418 dns_resolver_ares.cc:452] no server name supplied in dns URI E0529 15:28:39.355831104 4051418 channel.cc:120] channel stack builder failed: UNKNOWN: the target uri is not valid: dns:/// test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/0008ea/r3tmp/tmpdh0S67/pdisk_1.dat 2025-05-29T15:28:39.717311Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:7509889988469192741:2312], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-05-29T15:28:39.717372Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; TServer::EnableGrpc on GrpcPort 9072, node 1 2025-05-29T15:28:39.725439Z node 1 :GRPC_SERVER WARN: grpc_request_proxy.cpp:529: SchemeBoardDelete /Root Strong=0 2025-05-29T15:28:39.725439Z node 1 :GRPC_SERVER WARN: grpc_request_proxy.cpp:529: SchemeBoardDelete /Root Strong=0 2025-05-29T15:28:39.744659Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:28:39.744926Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:28:39.744936Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-05-29T15:28:39.744940Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-05-29T15:28:39.745005Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:30204 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-29T15:28:39.993506Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:28:40.109924Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:28:40.109948Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:28:40.111410Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-29T15:28:40.361106Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: schema.cpp:293: Create table "Root/yq/bindings". Create session OK 2025-05-29T15:28:40.361124Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: schema.cpp:113: Call create table "Root/yq/bindings" 2025-05-29T15:28:40.361125Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: schema.cpp:354: Call create table "Root/yq/bindings" 2025-05-29T15:28:40.361356Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: schema.cpp:293: Create table "Root/yq/connections". Create session OK 2025-05-29T15:28:40.361365Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: schema.cpp:113: Call create table "Root/yq/connections" 2025-05-29T15:28:40.361366Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: schema.cpp:354: Call create table "Root/yq/connections" 2025-05-29T15:28:40.361483Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: schema.cpp:293: Create table "Root/yq/compute_databases". Create session OK 2025-05-29T15:28:40.361489Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: schema.cpp:113: Call create table "Root/yq/compute_databases" 2025-05-29T15:28:40.361490Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: schema.cpp:354: Call create table "Root/yq/compute_databases" 2025-05-29T15:28:40.361544Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: schema.cpp:293: Create table "Root/yq/tenants". Create session OK 2025-05-29T15:28:40.361552Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: schema.cpp:113: Call create table "Root/yq/tenants" 2025-05-29T15:28:40.361553Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: schema.cpp:354: Call create table "Root/yq/tenants" 2025-05-29T15:28:40.361596Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: schema.cpp:293: Create table "Root/yq/nodes". Create session OK 2025-05-29T15:28:40.361605Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: schema.cpp:113: Call create table "Root/yq/nodes" 2025-05-29T15:28:40.361606Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: schema.cpp:354: Call create table "Root/yq/nodes" 2025-05-29T15:28:40.361715Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: schema.cpp:293: Create table "Root/yq/idempotency_keys". Create session OK 2025-05-29T15:28:40.361717Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: schema.cpp:113: Call create table "Root/yq/idempotency_keys" 2025-05-29T15:28:40.361719Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: schema.cpp:354: Call create table "Root/yq/idempotency_keys" 2025-05-29T15:28:40.361771Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: schema.cpp:293: Create table "Root/yq/queries". Create session OK 2025-05-29T15:28:40.361773Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: schema.cpp:113: Call create table "Root/yq/queries" 2025-05-29T15:28:40.361774Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: schema.cpp:354: Call create table "Root/yq/queries" 2025-05-29T15:28:40.362052Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: schema.cpp:293: Create table "Root/yq/jobs". Create session OK 2025-05-29T15:28:40.362062Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: schema.cpp:113: Call create table "Root/yq/jobs" 2025-05-29T15:28:40.362063Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: schema.cpp:354: Call create table "Root/yq/jobs" 2025-05-29T15:28:40.362157Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: schema.cpp:293: Create table "Root/yq/pending_small". Create session OK 2025-05-29T15:28:40.362164Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: schema.cpp:113: Call create table "Root/yq/pending_small" 2025-05-29T15:28:40.362165Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: schema.cpp:354: Call create table "Root/yq/pending_small" 2025-05-29T15:28:40.362392Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: schema.cpp:293: Create table "Root/yq/tenant_acks". Create session OK 2025-05-29T15:28:40.362402Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: schema.cpp:113: Call create table "Root/yq/tenant_acks" 2025-05-29T15:28:40.362403Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: schema.cpp:354: Call create table "Root/yq/tenant_acks" 2025-05-29T15:28:40.362514Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: schema.cpp:293: Create table "Root/yq/mappings". Create session OK 2025-05-29T15:28:40.362522Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: schema.cpp:113: Call create table "Root/yq/mappings" 2025-05-29T15:28:40.362523Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: schema.cpp:354: Call create table "Root/yq/mappings" 2025-05-29T15:28:40.362641Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: schema.cpp:293: Create table "Root/yq/result_sets". Create session OK 2025-05-29T15:28:40.362649Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: schema.cpp:113: Call create table "Root/yq/result_sets" 2025-05-29T15:28:40.362650Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: schema.cpp:354: Call create table "Root/yq/result_sets" 2025-05-29T15:28:40.362804Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: schema.cpp:293: Create table "Root/yq/quotas". Create session OK 2025-05-29T15:28:40.362820Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: schema.cpp:113: Call create table "Root/yq/quotas" 2025-05-29T15:28:40.362822Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: schema.cpp:354: Call create table "Root/yq/quotas" 2025-05-29T15:28:40.364266Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-05-29T15:28:40.364613Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-05-29T15:28:40.364825Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-05-29T15:28:40.365012Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715670:0, at schemeshard: 72057594046644480 2025-05-29T15:28:40.365215Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-05-29T15:28:40.365252Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509889992764160604:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:28:40.365257Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509889992764160586:2392], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:28:40.365264Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509889992764160608:2402], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:28:40.365267Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509889992764160607:2401], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:28:40.365274Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permis ... id=ZjFjM2Q5MjMtZmQ2ZjY4OGEtY2RmYTljNjUtN2M0YmNiMWI=. TraceId : 01jweahw643nptj4ghdna4npb9. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. CA StateFunc 271646922 2025-05-29T15:28:43.205778Z node 4 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:1332: TxId: 281474976715720, task: 1, CA Id [4:7509890005970489479:2706]. enter getasyncinputdata results size 0, freeSpace 8388608 2025-05-29T15:28:43.205779Z node 4 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:1428: TxId: 281474976715720, task: 1, CA Id [4:7509890005970489479:2706]. returned async data processed rows 0 left freeSpace 8388608 received rows 0 running reads 1 pending shards 0 finished = 0 has limit 0 limit reached 0 2025-05-29T15:28:43.205916Z node 4 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:959: TxId: 281474976715720, task: 1, CA Id [4:7509890005970489479:2706]. Recv TEvReadResult from ShardID=72075186224037888, ReadId=0, Status=SUCCESS, Finished=1, RowCount=0, TxLocks= LockId: 281474976715720 DataShard: 72075186224037888 Generation: 1 Counter: 19 SchemeShard: 72057594046644480 PathId: 3, BrokenTxLocks= 2025-05-29T15:28:43.205919Z node 4 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:1051: TxId: 281474976715720, task: 1, CA Id [4:7509890005970489479:2706]. Taken 1 locks 2025-05-29T15:28:43.205920Z node 4 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:1065: TxId: 281474976715720, task: 1, CA Id [4:7509890005970489479:2706]. new data for read #0 seqno = 1 finished = 1 2025-05-29T15:28:43.205923Z node 4 :KQP_COMPUTE DEBUG: kqp_pure_compute_actor.cpp:148: SelfId: [4:7509890005970489479:2706], TxId: 281474976715720, task: 1. Ctx: { CustomerSuppliedId : . SessionId : ydb://session/3?node_id=4&id=ZjFjM2Q5MjMtZmQ2ZjY4OGEtY2RmYTljNjUtN2M0YmNiMWI=. TraceId : 01jweahw643nptj4ghdna4npb9. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. CA StateFunc 276037645 2025-05-29T15:28:43.205925Z node 4 :KQP_COMPUTE DEBUG: kqp_pure_compute_actor.cpp:148: SelfId: [4:7509890005970489479:2706], TxId: 281474976715720, task: 1. Ctx: { CustomerSuppliedId : . SessionId : ydb://session/3?node_id=4&id=ZjFjM2Q5MjMtZmQ2ZjY4OGEtY2RmYTljNjUtN2M0YmNiMWI=. TraceId : 01jweahw643nptj4ghdna4npb9. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. CA StateFunc 271646922 2025-05-29T15:28:43.205927Z node 4 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:1332: TxId: 281474976715720, task: 1, CA Id [4:7509890005970489479:2706]. enter getasyncinputdata results size 1, freeSpace 8388608 2025-05-29T15:28:43.205929Z node 4 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:1228: TxId: 281474976715720, task: 1, CA Id [4:7509890005970489479:2706]. enter pack cells method shardId: 72075186224037888 processedRows: 0 packed rows: 0 freeSpace: 8388608 2025-05-29T15:28:43.205931Z node 4 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:1309: TxId: 281474976715720, task: 1, CA Id [4:7509890005970489479:2706]. exit pack cells method shardId: 72075186224037888 processedRows: 0 packed rows: 0 freeSpace: 8388608 2025-05-29T15:28:43.205932Z node 4 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:1366: TxId: 281474976715720, task: 1, CA Id [4:7509890005970489479:2706]. returned 0 rows; processed 0 rows 2025-05-29T15:28:43.205941Z node 4 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:1403: TxId: 281474976715720, task: 1, CA Id [4:7509890005970489479:2706]. dropping batch for read #0 2025-05-29T15:28:43.205942Z node 4 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:459: TxId: 281474976715720, task: 1, CA Id [4:7509890005970489479:2706]. effective maxinflight 1024 sorted 0 2025-05-29T15:28:43.205943Z node 4 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:481: TxId: 281474976715720, task: 1, CA Id [4:7509890005970489479:2706]. Scheduled table scans, in flight: 0 shards. pending shards to read: 0, 2025-05-29T15:28:43.205945Z node 4 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:1428: TxId: 281474976715720, task: 1, CA Id [4:7509890005970489479:2706]. returned async data processed rows 0 left freeSpace 8388608 received rows 0 running reads 0 pending shards 0 finished = 1 has limit 0 limit reached 0 2025-05-29T15:28:43.205954Z node 4 :KQP_COMPUTE DEBUG: dq_compute_actor_impl.h:502: SelfId: [4:7509890005970489479:2706], TxId: 281474976715720, task: 1. Ctx: { CustomerSuppliedId : . SessionId : ydb://session/3?node_id=4&id=ZjFjM2Q5MjMtZmQ2ZjY4OGEtY2RmYTljNjUtN2M0YmNiMWI=. TraceId : 01jweahw643nptj4ghdna4npb9. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. Continue execution, either output buffers are not empty or not all channels are ready, hasDataToSend: 1, channelsReady: 1 2025-05-29T15:28:43.205961Z node 4 :KQP_COMPUTE DEBUG: kqp_pure_compute_actor.cpp:148: SelfId: [4:7509890005970489479:2706], TxId: 281474976715720, task: 1. Ctx: { CustomerSuppliedId : . SessionId : ydb://session/3?node_id=4&id=ZjFjM2Q5MjMtZmQ2ZjY4OGEtY2RmYTljNjUtN2M0YmNiMWI=. TraceId : 01jweahw643nptj4ghdna4npb9. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. CA StateFunc 271646922 2025-05-29T15:28:43.205963Z node 4 :KQP_COMPUTE DEBUG: dq_compute_actor_channels.cpp:675: TxId: 281474976715720, task: 1. Tasks execution finished 2025-05-29T15:28:43.205964Z node 4 :KQP_COMPUTE DEBUG: dq_compute_actor_impl.h:510: SelfId: [4:7509890005970489479:2706], TxId: 281474976715720, task: 1. Ctx: { CustomerSuppliedId : . SessionId : ydb://session/3?node_id=4&id=ZjFjM2Q5MjMtZmQ2ZjY4OGEtY2RmYTljNjUtN2M0YmNiMWI=. TraceId : 01jweahw643nptj4ghdna4npb9. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. Compute state finished. All channels and sinks finished 2025-05-29T15:28:43.205980Z node 4 :KQP_COMPUTE DEBUG: dq_compute_actor_channels.cpp:494: TxId: 281474976715720, task: 1. pass away 2025-05-29T15:28:43.205991Z node 4 :KQP_COMPUTE DEBUG: log.cpp:784: fline=kqp_compute_actor_factory.cpp:66;problem=finish_compute_actor;tx_id=281474976715720;task_id=1;success=1;message={
: Error: COMPUTE_STATE_FINISHED }; 2025-05-29T15:28:43.211212Z node 4 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [4:7509890005970489437:2695], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:28:43.211878Z node 4 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=4&id=ODhmNDI5MDItMWQxNzk0ZWYtZTgwZGY3ZTYtMWMzNGI5YjA=, ActorId: [4:7509890005970489397:2683], ActorState: ExecuteState, TraceId: 01jweahw5z5gdw101brpkfb31b, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: 01jweahw5wdt5sfvmrzbpt8gn1 2025-05-29T15:28:43.212086Z node 4 :KQP_EXECUTER ERROR: kqp_planner.cpp:119: TxId: 281474976715721. Ctx: { TraceId: 01jweahw5z5gdw101brpkfb31b, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=4&id=ODhmNDI5MDItMWQxNzk0ZWYtZTgwZGY3ZTYtMWMzNGI5YjA=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-05-29T15:28:43.212640Z node 4 :FQ_QUOTA_SERVICE ERROR: quota_manager.cpp:648: SyncQuota finished with error: 2025-05-29T15:28:43.213826Z node 4 :KQP_EXECUTER ERROR: kqp_planner.cpp:119: TxId: 281474976715722. Ctx: { TraceId: 01jweahw6d32feyfh9xtdkngs6, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=4&id=NDhhNWJhMTMtNmU3YzlkZWEtZDA5MjYyMTYtMWVjZmYzMTQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-05-29T15:28:43.213886Z node 4 :KQP_COMPUTE DEBUG: log.cpp:784: fline=kqp_compute_actor_factory.cpp:148;event=channel_info;ch_size=8388608;ch_count=2;ch_limit=8388608;inputs=1;input_channels_count=0; 2025-05-29T15:28:43.213928Z node 4 :KQP_COMPUTE DEBUG: dq_compute_actor_impl.h:134: SelfId: [4:7509890005970489502:2698], TxId: 281474976715722, task: 1. Ctx: { TraceId : 01jweahw6d32feyfh9xtdkngs6. SessionId : ydb://session/3?node_id=4&id=NDhhNWJhMTMtNmU3YzlkZWEtZDA5MjYyMTYtMWVjZmYzMTQ=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. Start compute actor [4:7509890005970489502:2698], task: 1 2025-05-29T15:28:43.213932Z node 4 :KQP_COMPUTE DEBUG: dq_compute_actor_impl.h:141: SelfId: [4:7509890005970489502:2698], TxId: 281474976715722, task: 1. Ctx: { TraceId : 01jweahw6d32feyfh9xtdkngs6. SessionId : ydb://session/3?node_id=4&id=NDhhNWJhMTMtNmU3YzlkZWEtZDA5MjYyMTYtMWVjZmYzMTQ=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. Set execution timeout 299.999391s 2025-05-29T15:28:43.214023Z node 4 :KQP_COMPUTE DEBUG: dq_compute_actor_impl.h:1365: SelfId: [4:7509890005970489502:2698], TxId: 281474976715722, task: 1. Ctx: { TraceId : 01jweahw6d32feyfh9xtdkngs6. SessionId : ydb://session/3?node_id=4&id=NDhhNWJhMTMtNmU3YzlkZWEtZDA5MjYyMTYtMWVjZmYzMTQ=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. Create source for input 0 { Source { Type: "KqpReadRangesSource" Settings { type_url: "type.googleapis.com/NKikimrTxDataShard.TKqpReadRangesSourceSettings" value: "\n$\n\014\010\200\202\224\204\200\200\200\200\001\020\003\022\016Root/yq/quotas\030\001*\0000\001\0326\0224\003\000\005\000\000\000cloud\n\000\000\000mock_cloud\027\000\000\000yq.analyticsQuery.count\"\032\010\005\022\020limit_updated_at\0302(\0000\000\"\026\010\004\022\014metric_limit\030\004(\0000\000\"\026\010\006\022\014metric_usage\030\004(\0000\000\"\032\010\007\022\020usage_updated_at\0302(\0000\000(\0000\000@\201 @\201 @\201 H\001R\022\010\367\341\243\345\3612\020\377\377\377\377\377\377\377\377\377\001X\200\200\204\200\200\200\204\200\001`\000h\312\247\200\200\200\200@p\004z\000z\000z\000\240\001\000\270\001\000" } WatermarksMode: WATERMARKS_MODE_DISABLED } } 2025-05-29T15:28:43.214062Z node 4 :KQP_COMPUTE DEBUG: kqp_pure_compute_actor.cpp:148: SelfId: [4:7509890005970489502:2698], TxId: 281474976715722, task: 1. Ctx: { TraceId : 01jweahw6d32feyfh9xtdkngs6. SessionId : ydb://session/3?node_id=4&id=NDhhNWJhMTMtNmU3YzlkZWEtZDA5MjYyMTYtMWVjZmYzMTQ=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. CA StateFunc 271646926 2025-05-29T15:28:43.214088Z node 4 :KQP_COMPUTE DEBUG: dq_compute_actor_impl.h:1074: SelfId: [4:7509890005970489502:2698], TxId: 281474976715722, task: 1. Ctx: { TraceId : 01jweahw6d32feyfh9xtdkngs6. SessionId : ydb://session/3?node_id=4&id=NDhhNWJhMTMtNmU3YzlkZWEtZDA5MjYyMTYtMWVjZmYzMTQ=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. Received channels info: Update { Id: 1 TransportVersion: DATA_TRANSPORT_OOB_PICKLE_1_0 SrcTaskId: 1 SrcEndpoint { ActorId { RawX1: 7509890005970489502 RawX2: 4503616807242378 } } DstEndpoint { ActorId { RawX1: 7509890005970489498 RawX2: 4503616807242378 } } InMemory: true } assertion failed at ydb/services/fq/ut_integration/fq_ut.cpp:463, virtual void NTestSuiteYq_1::TTestCaseCreate_And_Modify_The_Same_Connection::Execute_(NUnitTest::TTestContext &): (result.GetStatus() == EStatus::SUCCESS)
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 TBackTrace::Capture()+28 (0x139D25FC) NUnitTest::NPrivate::RaiseError(char const*, TBasicString> const&, bool)+137 (0x13B861F9) NTestSuiteYq_1::TTestCaseCreate_And_Modify_The_Same_Connection::Execute_(NUnitTest::TTestContext&)+2156 (0x138A35FC) NTestSuiteYq_1::TCurrentTest::Execute()::'lambda'()::operator()() const+71 (0x138BC937) NUnitTest::TTestBase::Run(std::__y1::function, TBasicString> const&, char const*, bool)+126 (0x13B880AE) NTestSuiteYq_1::TCurrentTest::Execute()+415 (0x138BC24F) NUnitTest::TTestFactory::Execute()+803 (0x13B88823) NUnitTest::RunMain(int, char**)+3021 (0x13B9A3CD) ??+0 (0x7FB6A2164D90) __libc_start_main+128 (0x7FB6A2164E40) _start+41 (0x129E1029) >> TConsistentOpsWithReboots::CreateNotNullColumnTableWithReboots [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tablet_flat/ut/unittest >> TPartSlice::ParallelCompactions [GOOD] Test command err: ======= CUT ======= Part{[1:2:3:0:0:0:0] eph 0, 346b 12r} data 755b + FlatIndex{4} Label{3 rev 3, 172b} 5 rec | Page Row Bytes (Uint32, String) | 0 0 86b {1, aaa} | 1 3 88b {1, b} | 2 6 86b {2, NULL} | 3 9 86b {2, ccx} | 3 11 86b {2, cxz} + BTreeIndex{PageId: 5 RowCount: 12 DataSize: 346 ErasedRowCount: 0} Label{13 rev 1, 208b} | PageId: 0 RowCount: 3 DataSize: 86 ErasedRowCount: 0 | > {1, b} | PageId: 1 RowCount: 6 DataSize: 174 ErasedRowCount: 0 | > {2, NULL} | PageId: 2 RowCount: 9 DataSize: 260 ErasedRowCount: 0 | > {2, ccx} | PageId: 3 RowCount: 12 DataSize: 346 ErasedRowCount: 0 ======= FULL ======= Part{[1:2:3:0:0:0:0] eph 0, 346b 12r} data 777b + FlatIndex{4} Label{3 rev 3, 179b} 5 rec | Page Row Bytes (Uint32, String) | 0 0 86b {1, aaa} | 1 3 88b {1, baaaa} | 2 6 86b {2, aaa} | 3 9 86b {2, ccx} | 3 11 86b {2, cxz} + BTreeIndex{PageId: 5 RowCount: 12 DataSize: 346 ErasedRowCount: 0} Label{13 rev 1, 223b} | PageId: 0 RowCount: 3 DataSize: 86 ErasedRowCount: 0 | > {1, baaaa} | PageId: 1 RowCount: 6 DataSize: 174 ErasedRowCount: 0 | > {2, aaa} | PageId: 2 RowCount: 9 DataSize: 260 ErasedRowCount: 0 | > {2, ccx} | PageId: 3 RowCount: 12 DataSize: 346 ErasedRowCount: 0 ======= CUT ======= Part{[1:2:3:0:0:0:0] eph 0, 420b 10r} data 1347b + FlatIndex{10} Label{3 rev 3, 362b} 11 rec | Page Row Bytes (Uint32, String) | 0 0 42b {1, aaa} | 1 1 42b {1, ab} | 2 2 42b {1, ac} | 3 3 42b {1, b} | 4 4 42b {1, bb} | 5 5 42b {2, NULL} | 6 6 42b {2, ab} | 7 7 42b {2, ac} | 8 8 42b {2, b} | 9 9 42b {2, bb} | 9 9 42b {2, bba} + BTreeIndex{PageId: 11 RowCount: 10 DataSize: 420 ErasedRowCount: 0} Label{13 rev 1, 536b} | PageId: 0 RowCount: 1 DataSize: 42 ErasedRowCount: 0 | > {1, ab} | PageId: 1 RowCount: 2 DataSize: 84 ErasedRowCount: 0 | > {1, ac} | PageId: 2 RowCount: 3 DataSize: 126 ErasedRowCount: 0 | > {1, b} | PageId: 3 RowCount: 4 DataSize: 168 ErasedRowCount: 0 | > {1, bb} | PageId: 4 RowCount: 5 DataSize: 210 ErasedRowCount: 0 | > {2, NULL} | PageId: 5 RowCount: 6 DataSize: 252 ErasedRowCount: 0 | > {2, ab} | PageId: 6 RowCount: 7 DataSize: 294 ErasedRowCount: 0 | > {2, ac} | PageId: 7 RowCount: 8 DataSize: 336 ErasedRowCount: 0 | > {2, b} | PageId: 8 RowCount: 9 DataSize: 378 ErasedRowCount: 0 | > {2, bb} | PageId: 9 RowCount: 10 DataSize: 420 ErasedRowCount: 0 ======= FULL ======= Part{[1:2:3:0:0:0:0] eph 0, 420b 10r} data 1381b + FlatIndex{10} Label{3 rev 3, 375b} 11 rec | Page Row Bytes (Uint32, String) | 0 0 42b {1, aaa} | 1 1 42b {1, aba} | 2 2 42b {1, aca} | 3 3 42b {1, baa} | 4 4 42b {1, bba} | 5 5 42b {2, aaa} | 6 6 42b {2, aba} | 7 7 42b {2, aca} | 8 8 42b {2, baa} | 9 9 42b {2, bba} | 9 9 42b {2, bba} + BTreeIndex{PageId: 11 RowCount: 10 DataSize: 420 ErasedRowCount: 0} Label{13 rev 1, 557b} | PageId: 0 RowCount: 1 DataSize: 42 ErasedRowCount: 0 | > {1, aba} | PageId: 1 RowCount: 2 DataSize: 84 ErasedRowCount: 0 | > {1, aca} | PageId: 2 RowCount: 3 DataSize: 126 ErasedRowCount: 0 | > {1, baa} | PageId: 3 RowCount: 4 DataSize: 168 ErasedRowCount: 0 | > {1, bba} | PageId: 4 RowCount: 5 DataSize: 210 ErasedRowCount: 0 | > {2, aaa} | PageId: 5 RowCount: 6 DataSize: 252 ErasedRowCount: 0 | > {2, aba} | PageId: 6 RowCount: 7 DataSize: 294 ErasedRowCount: 0 | > {2, aca} | PageId: 7 RowCount: 8 DataSize: 336 ErasedRowCount: 0 | > {2, baa} | PageId: 8 RowCount: 9 DataSize: 378 ErasedRowCount: 0 | > {2, bba} | PageId: 9 RowCount: 10 DataSize: 420 ErasedRowCount: 0 ======= SLICES ======= { [0, 2), [2, 4), [4, 5), [5, 6), [6, 7), [7, 9), [9, 9] } ======= CUT ======= Part{[1:2:3:0:0:0:0] eph 0, 420b 10r} data 1347b + FlatIndex{10} Label{3 rev 3, 362b} 11 rec | Page Row Bytes (Uint32, String) | 0 0 42b {1, aaa} | 1 1 42b {1, ab} | 2 2 42b {1, ac} | 3 3 42b {1, b} | 4 4 42b {1, bb} | 5 5 42b {2, NULL} | 6 6 42b {2, ab} | 7 7 42b {2, ac} | 8 8 42b {2, b} | 9 9 42b {2, bb} | 9 9 42b {2, bba} + BTreeIndex{PageId: 11 RowCount: 10 DataSize: 420 ErasedRowCount: 0} Label{13 rev 1, 536b} | PageId: 0 RowCount: 1 DataSize: 42 ErasedRowCount: 0 | > {1, ab} | PageId: 1 RowCount: 2 DataSize: 84 ErasedRowCount: 0 | > {1, ac} | PageId: 2 RowCount: 3 DataSize: 126 ErasedRowCount: 0 | > {1, b} | PageId: 3 RowCount: 4 DataSize: 168 ErasedRowCount: 0 | > {1, bb} | PageId: 4 RowCount: 5 DataSize: 210 ErasedRowCount: 0 | > {2, NULL} | PageId: 5 RowCount: 6 DataSize: 252 ErasedRowCount: 0 | > {2, ab} | PageId: 6 RowCount: 7 DataSize: 294 ErasedRowCount: 0 | > {2, ac} | PageId: 7 RowCount: 8 DataSize: 336 ErasedRowCount: 0 | > {2, b} | PageId: 8 RowCount: 9 DataSize: 378 ErasedRowCount: 0 | > {2, bb} | PageId: 9 RowCount: 10 DataSize: 420 ErasedRowCount: 0 ======= FULL ======= Part{[1:2:3:0:0:0:0] eph 0, 420b 10r} data 1381b + FlatIndex{10} Label{3 rev 3, 375b} 11 rec | Page Row Bytes (Uint32, String) | 0 0 42b {1, aaa} | 1 1 42b {1, aba} | 2 2 42b {1, aca} | 3 3 42b {1, baa} | 4 4 42b {1, bba} | 5 5 42b {2, aaa} | 6 6 42b {2, aba} | 7 7 42b {2, aca} | 8 8 42b {2, baa} | 9 9 42b {2, bba} | 9 9 42b {2, bba} + BTreeIndex{PageId: 11 RowCount: 10 DataSize: 420 ErasedRowCount: 0} Label{13 rev 1, 557b} | PageId: 0 RowCount: 1 DataSize: 42 ErasedRowCount: 0 | > {1, aba} | PageId: 1 RowCount: 2 DataSize: 84 ErasedRowCount: 0 | > {1, aca} | PageId: 2 RowCount: 3 DataSize: 126 ErasedRowCount: 0 | > {1, baa} | PageId: 3 RowCount: 4 DataSize: 168 ErasedRowCount: 0 | > {1, bba} | PageId: 4 RowCount: 5 DataSize: 210 ErasedRowCount: 0 | > {2, aaa} | PageId: 5 RowCount: 6 DataSize: 252 ErasedRowCount: 0 | > {2, aba} | PageId: 6 RowCount: 7 DataSize: 294 ErasedRowCount: 0 | > {2, aca} | PageId: 7 RowCount: 8 DataSize: 336 ErasedRowCount: 0 | > {2, baa} | PageId: 8 RowCount: 9 DataSize: 378 ErasedRowCount: 0 | > {2, bba} | PageId: 9 RowCount: 10 DataSize: 420 ErasedRowCount: 0 Part{[1:2:3:0:0:0:0] eph 0, 81b 2r} data 316b + FlatIndex{2} Label{3 rev 3, 107b} 3 rec | Page Row Bytes (String) | 0 0 40b {cccccc} | 1 1 41b {ccccccd} | 1 1 41b {ccccccd} + BTreeIndex{PageId: 3 RowCount: 2 DataSize: 81 ErasedRowCount: 0} Label{13 rev 1, 109b} | PageId: 0 RowCount: 1 DataSize: 40 ErasedRowCount: 0 | > {ccccccd} | PageId: 1 RowCount: 2 DataSize: 81 ErasedRowCount: 0 Part{[1:2:3:0:0:0:0] eph 0, 83b 2r} data 320b + FlatIndex{2} Label{3 rev 3, 109b} 3 rec | Page Row Bytes (String) | 0 0 40b {cccccc} | 1 1 43b {ccccccd} | 1 1 43b {ccccccddd} + BTreeIndex{PageId: 3 RowCount: 2 DataSize: 83 ErasedRowCount: 0} Label{13 rev 1, 109b} | PageId: 0 RowCount: 1 DataSize: 40 ErasedRowCount: 0 | > {ccccccd} | PageId: 1 RowCount: 2 DataSize: 83 ErasedRowCount: 0 Part{[1:2:3:0:0:0:0] eph 0, 80b 2r} data 312b + FlatIndex{2} Label{3 rev 3, 105b} 3 rec | Page Row Bytes (String) | 0 0 40b {cccccc} | 1 1 40b {cccccd} | 1 1 40b {cccccd} + BTreeIndex{PageId: 3 RowCount: 2 DataSize: 80 ErasedRowCount: 0} Label{13 rev 1, 108b} | PageId: 0 RowCount: 1 DataSize: 40 ErasedRowCount: 0 | > {cccccd} | PageId: 1 RowCount: 2 DataSize: 80 ErasedRowCount: 0 Part{[1:2:3:0:0:0:0] eph 0, 82b 2r} data 316b + FlatIndex{2} Label{3 rev 3, 107b} 3 rec | Page Row Bytes (String) | 0 0 40b {cccccc} | 1 1 42b {cccccd} | 1 1 42b {cccccddd} + BTreeIndex{PageId: 3 RowCount: 2 DataSize: 82 ErasedRowCount: 0} Label{13 rev 1, 108b} | PageId: 0 RowCount: 1 DataSize: 40 ErasedRowCount: 0 | > {cccccd} | PageId: 1 RowCount: 2 DataSize: 82 ErasedRowCount: 0 Part{[1:2:3:0:0:0:0] eph 0, 79b 2r} data 308b + FlatIndex{2} Label{3 rev 3, 103b} 3 rec | Page Row Bytes (String) | 0 0 40b {cccccc} | 1 1 39b {ccccd} | 1 1 39b {ccccd} + BTreeIndex{PageId: 3 RowCount: 2 DataSize: 79 ErasedRowCount: 0} Label{13 rev 1, 107b} | PageId: 0 RowCount: 1 DataSize: 40 ErasedRowCount: 0 | > {ccccd} | PageId: 1 RowCount: 2 DataSize: 79 ErasedRowCount: 0 Part{[1:2:3:0:0:0:0] eph 0, 81b 2r} data 312b + FlatIndex{2} Label{3 rev 3, 105b} 3 rec | Page Row Bytes (String) | 0 0 40b {cccccc} | 1 1 41b {ccccd} | 1 1 41b {ccccddd} + BTreeIndex{PageId: 3 RowCount: 2 DataSize: 81 ErasedRowCount: 0} Label{13 rev 1, 107b} | PageId: 0 RowCount: 1 DataSize: 40 ErasedRowCount: 0 | > {ccccd} | PageId: 1 RowCount: 2 DataSize: 81 ErasedRowCount: 0 Part{[1:2:3:0:0:0:0] eph 0, 78b 2r} data 304b + FlatIndex{2} Label{3 rev 3, 101b} 3 rec | Page Row Bytes (String) | 0 0 40b {cccccc} | 1 1 38b {cccd} | 1 1 38b {cccd} + BTreeIndex{PageId: 3 RowCount: 2 DataSize: 78 ErasedRowCount: 0} Label{13 rev 1, 106b} | PageId: 0 RowCount: 1 DataSize: 40 ErasedRowCount: 0 | > {cccd} | PageId: 1 RowCount: 2 DataSize: 78 ErasedRowCount: 0 Part{[1:2:3:0:0:0:0] eph 0, 80b 2r} data 308b + FlatIndex{2} Label{3 rev 3, 103b} 3 rec | Page Row Bytes (String) | 0 0 40b {cccccc} | 1 1 40b {cccd} | 1 1 40b {cccddd} + BTreeIndex{PageId: 3 RowCount: 2 DataSize: 80 ErasedRowCount: 0} Label{13 rev 1, 106b} | PageId: 0 RowCount: 1 DataSize: 40 ErasedRowCount: 0 | > {cccd} | PageId: 1 RowCount: 2 DataSize: 80 ErasedRowCount: 0 Part{[1:2:3:0:0:0:0] eph 0, 75b 2r} data 292b + FlatIndex{2} Label{3 rev 3, 95b} 3 rec | Page Row Bytes (String) | 0 0 40b {cccccc} | 1 1 35b {d} | 1 1 35b {d} + BTreeIndex{PageId: 3 RowCount: 2 DataSize: 75 ErasedRowCount: 0} Label{13 rev 1, 103b} | PageId: 0 RowCount: 1 DataSize: 40 ErasedRowCount: 0 | > {d} | PageId: 1 RowCount: 2 DataSize: 75 ErasedRowCount: 0 Part{[1:2:3:0:0:0:0] eph 0, 77b 2r} data 296b + FlatIndex{2} Label{3 rev 3, 97b} 3 rec | Page Row Bytes (String) | 0 0 40b {cccccc} | 1 1 37b {d} | 1 1 37b {ddd} + BTreeIndex{PageId: 3 RowCount: 2 DataSize: 77 ErasedRowCount: 0} Label{13 rev 1, 103b} | PageId: 0 RowCount: 1 DataSize: 40 ErasedRowCount: 0 | > {d} | PageId: 1 RowCount: 2 DataSize: 77 ErasedRowCount: 0 Part{[1:2:3:0:0:0:0] eph 0, 69b 2r} data 280b + FlatIndex{2} Label{3 rev 3, 89b} 3 rec | Page Row Bytes (String) | 0 0 34b {} | 1 1 35b {d} | 1 1 35b {d} + BTreeIndex{PageId: 3 RowCount: 2 DataSize: 69 ErasedRowCount: 0} Label{13 rev 1, 103b} | PageId: 0 RowCount: 1 DataSize: 34 ErasedRowCount: 0 | > {d} | PageId: 1 RowCount: 2 DataSize: 69 ErasedRowCount: 0 Part{[1:2:3:0:0:0:0] eph 0, 71b 2r} data 284b + FlatIndex{2} Label{3 rev 3, 91b} 3 rec | Page Row Bytes (String) | 0 0 34b {} | 1 1 37b {d} | 1 1 37b {ddd} + BTreeIndex{PageId: 3 RowCount: 2 DataSize: 71 ErasedRowCount: 0} Label{13 rev 1, 10 ... owOp 1: {0, 8} {Set 2 Uint32 : 5}, {Set 3 Uint64 : 5}, {Set 4 String : xxxxxxxxxx_5} + Rows{3} Label{34 rev 1, 120b}, [6, +2)row | ERowOp 1: {0, 10} {Set 2 Uint32 : 6}, {Set 3 Uint64 : 6}, {Set 4 String : xxxxxxxxxx_6} | ERowOp 1: {1, 1} {Set 2 Uint32 : 7}, {Set 3 Uint64 : 7}, {Set 4 String : xxxxxxxxxx_7} + Rows{4} Label{44 rev 1, 120b}, [8, +2)row | ERowOp 1: {1, 3} {Set 2 Uint32 : 8}, {Set 3 Uint64 : 8}, {Set 4 String : xxxxxxxxxx_8} | ERowOp 1: {1, 4} {Set 2 Uint32 : 9}, {Set 3 Uint64 : 9}, {Set 4 String : xxxxxxxxxx_9} + Rows{5} Label{54 rev 1, 122b}, [10, +2)row | ERowOp 1: {1, 6} {Set 2 Uint32 : 10}, {Set 3 Uint64 : 10}, {Set 4 String : xxxxxxxxxx_10} | ERowOp 1: {1, 7} {Set 2 Uint32 : 11}, {Set 3 Uint64 : 11}, {Set 4 String : xxxxxxxxxx_11} + Rows{6} Label{64 rev 1, 122b}, [12, +2)row | ERowOp 1: {1, 8} {Set 2 Uint32 : 12}, {Set 3 Uint64 : 12}, {Set 4 String : xxxxxxxxxx_12} | ERowOp 1: {1, 10} {Set 2 Uint32 : 13}, {Set 3 Uint64 : 13}, {Set 4 String : xxxxxxxxxx_13} + Rows{7} Label{74 rev 1, 122b}, [14, +2)row | ERowOp 1: {2, 1} {Set 2 Uint32 : 14}, {Set 3 Uint64 : 14}, {Set 4 String : xxxxxxxxxx_14} | ERowOp 1: {2, 3} {Set 2 Uint32 : 15}, {Set 3 Uint64 : 15}, {Set 4 String : xxxxxxxxxx_15} + Rows{8} Label{84 rev 1, 122b}, [16, +2)row | ERowOp 1: {2, 4} {Set 2 Uint32 : 16}, {Set 3 Uint64 : 16}, {Set 4 String : xxxxxxxxxx_16} | ERowOp 1: {2, 6} {Set 2 Uint32 : 17}, {Set 3 Uint64 : 17}, {Set 4 String : xxxxxxxxxx_17} + Rows{9} Label{94 rev 1, 122b}, [18, +2)row | ERowOp 1: {2, 7} {Set 2 Uint32 : 18}, {Set 3 Uint64 : 18}, {Set 4 String : xxxxxxxxxx_18} | ERowOp 1: {2, 8} {Set 2 Uint32 : 19}, {Set 3 Uint64 : 19}, {Set 4 String : xxxxxxxxxx_19} + Rows{10} Label{104 rev 1, 122b}, [20, +2)row | ERowOp 1: {2, 10} {Set 2 Uint32 : 20}, {Set 3 Uint64 : 20}, {Set 4 String : xxxxxxxxxx_20} | ERowOp 1: {3, 1} {Set 2 Uint32 : 21}, {Set 3 Uint64 : 21}, {Set 4 String : xxxxxxxxxx_21} + Rows{11} Label{114 rev 1, 122b}, [22, +2)row | ERowOp 1: {3, 3} {Set 2 Uint32 : 22}, {Set 3 Uint64 : 22}, {Set 4 String : xxxxxxxxxx_22} | ERowOp 1: {3, 4} {Set 2 Uint32 : 23}, {Set 3 Uint64 : 23}, {Set 4 String : xxxxxxxxxx_23} + Rows{12} Label{124 rev 1, 122b}, [24, +2)row | ERowOp 1: {3, 6} {Set 2 Uint32 : 24}, {Set 3 Uint64 : 24}, {Set 4 String : xxxxxxxxxx_24} | ERowOp 1: {3, 7} {Set 2 Uint32 : 25}, {Set 3 Uint64 : 25}, {Set 4 String : xxxxxxxxxx_25} + Rows{13} Label{134 rev 1, 122b}, [26, +2)row | ERowOp 1: {3, 8} {Set 2 Uint32 : 26}, {Set 3 Uint64 : 26}, {Set 4 String : xxxxxxxxxx_26} | ERowOp 1: {3, 10} {Set 2 Uint32 : 27}, {Set 3 Uint64 : 27}, {Set 4 String : xxxxxxxxxx_27} + Rows{14} Label{144 rev 1, 122b}, [28, +2)row | ERowOp 1: {4, 1} {Set 2 Uint32 : 28}, {Set 3 Uint64 : 28}, {Set 4 String : xxxxxxxxxx_28} | ERowOp 1: {4, 3} {Set 2 Uint32 : 29}, {Set 3 Uint64 : 29}, {Set 4 String : xxxxxxxxxx_29} + Rows{15} Label{154 rev 1, 122b}, [30, +2)row | ERowOp 1: {4, 4} {Set 2 Uint32 : 30}, {Set 3 Uint64 : 30}, {Set 4 String : xxxxxxxxxx_30} | ERowOp 1: {4, 6} {Set 2 Uint32 : 31}, {Set 3 Uint64 : 31}, {Set 4 String : xxxxxxxxxx_31} + Rows{16} Label{164 rev 1, 122b}, [32, +2)row | ERowOp 1: {4, 7} {Set 2 Uint32 : 32}, {Set 3 Uint64 : 32}, {Set 4 String : xxxxxxxxxx_32} | ERowOp 1: {4, 8} {Set 2 Uint32 : 33}, {Set 3 Uint64 : 33}, {Set 4 String : xxxxxxxxxx_33} + Rows{17} Label{174 rev 1, 122b}, [34, +2)row | ERowOp 1: {4, 10} {Set 2 Uint32 : 34}, {Set 3 Uint64 : 34}, {Set 4 String : xxxxxxxxxx_34} | ERowOp 1: {5, 1} {Set 2 Uint32 : 35}, {Set 3 Uint64 : 35}, {Set 4 String : xxxxxxxxxx_35} + Rows{18} Label{184 rev 1, 122b}, [36, +2)row | ERowOp 1: {5, 3} {Set 2 Uint32 : 36}, {Set 3 Uint64 : 36}, {Set 4 String : xxxxxxxxxx_36} | ERowOp 1: {5, 4} {Set 2 Uint32 : 37}, {Set 3 Uint64 : 37}, {Set 4 String : xxxxxxxxxx_37} + Rows{19} Label{194 rev 1, 122b}, [38, +2)row | ERowOp 1: {5, 6} {Set 2 Uint32 : 38}, {Set 3 Uint64 : 38}, {Set 4 String : xxxxxxxxxx_38} | ERowOp 1: {5, 7} {Set 2 Uint32 : 39}, {Set 3 Uint64 : 39}, {Set 4 String : xxxxxxxxxx_39} Slices{ [0, 39] } Part{[1:2:3:0:0:0:0] eph 0, 2430b 40r} data 4441b + FlatIndex{26} Label{3 rev 3, 558b} 21 rec | Page Row Bytes (Uint32, Uint32) | 0 0 120b {0, 1} | 1 2 120b {0, 4} | 2 4 120b {0, 7} | 3 6 120b {0, 10} | 4 8 120b {1, 3} | 5 10 122b {1, 6} | 7 12 122b {1, 8} | 8 14 122b {2, NULL} | 9 16 122b {2, 4} | 11 18 122b {2, 7} | 12 20 122b {2, 10} | 13 22 122b {3, 3} | 15 24 122b {3, 6} | 16 26 122b {3, 8} | 17 28 122b {4, NULL} | 19 30 122b {4, 4} | 20 32 122b {4, 7} | 21 34 122b {4, 10} | 24 36 122b {5, 3} | 25 38 122b {5, 6} | 25 39 122b {5, 7} + BTreeIndex{PageId: 29 RowCount: 40 DataSize: 2430 ErasedRowCount: 0} Label{13 rev 1, 102b} | + BTreeIndex{PageId: 23 RowCount: 18 DataSize: 1088 ErasedRowCount: 0} Label{13 rev 1, 151b} | | + BTreeIndex{PageId: 6 RowCount: 6 DataSize: 360 ErasedRowCount: 0} Label{13 rev 1, 151b} | | | PageId: 0 RowCount: 2 DataSize: 120 ErasedRowCount: 0 | | | > {0, 4} | | | PageId: 1 RowCount: 4 DataSize: 240 ErasedRowCount: 0 | | | > {0, 7} | | | PageId: 2 RowCount: 6 DataSize: 360 ErasedRowCount: 0 | | > {0, 10} | | + BTreeIndex{PageId: 10 RowCount: 12 DataSize: 722 ErasedRowCount: 0} Label{13 rev 1, 151b} | | | PageId: 3 RowCount: 8 DataSize: 480 ErasedRowCount: 0 | | | > {1, 3} | | | PageId: 4 RowCount: 10 DataSize: 600 ErasedRowCount: 0 | | | > {1, 6} | | | PageId: 5 RowCount: 12 DataSize: 722 ErasedRowCount: 0 | | > {1, 8} | | + BTreeIndex{PageId: 14 RowCount: 18 DataSize: 1088 ErasedRowCount: 0} Label{13 rev 1, 147b} | | | PageId: 7 RowCount: 14 DataSize: 844 ErasedRowCount: 0 | | | > {2, NULL} | | | PageId: 8 RowCount: 16 DataSize: 966 ErasedRowCount: 0 | | | > {2, 4} | | | PageId: 9 RowCount: 18 DataSize: 1088 ErasedRowCount: 0 | > {2, 7} | + BTreeIndex{PageId: 28 RowCount: 40 DataSize: 2430 ErasedRowCount: 0} Label{13 rev 1, 151b} | | + BTreeIndex{PageId: 18 RowCount: 24 DataSize: 1454 ErasedRowCount: 0} Label{13 rev 1, 151b} | | | PageId: 11 RowCount: 20 DataSize: 1210 ErasedRowCount: 0 | | | > {2, 10} | | | PageId: 12 RowCount: 22 DataSize: 1332 ErasedRowCount: 0 | | | > {3, 3} | | | PageId: 13 RowCount: 24 DataSize: 1454 ErasedRowCount: 0 | | > {3, 6} | | + BTreeIndex{PageId: 22 RowCount: 30 DataSize: 1820 ErasedRowCount: 0} Label{13 rev 1, 147b} | | | PageId: 15 RowCount: 26 DataSize: 1576 ErasedRowCount: 0 | | | > {3, 8} | | | PageId: 16 RowCount: 28 DataSize: 1698 ErasedRowCount: 0 | | | > {4, NULL} | | | PageId: 17 RowCount: 30 DataSize: 1820 ErasedRowCount: 0 | | > {4, 4} | | + BTreeIndex{PageId: 27 RowCount: 40 DataSize: 2430 ErasedRowCount: 0} Label{13 rev 1, 249b} | | | PageId: 19 RowCount: 32 DataSize: 1942 ErasedRowCount: 0 | | | > {4, 7} | | | PageId: 20 RowCount: 34 DataSize: 2064 ErasedRowCount: 0 | | | > {4, 10} | | | PageId: 21 RowCount: 36 DataSize: 2186 ErasedRowCount: 0 | | | > {5, 3} | | | PageId: 24 RowCount: 38 DataSize: 2308 ErasedRowCount: 0 | | | > {5, 6} | | | PageId: 25 RowCount: 40 DataSize: 2430 ErasedRowCount: 0 + Rows{0} Label{04 rev 1, 120b}, [0, +2)row | ERowOp 1: {0, 1} {Set 2 Uint32 : 0}, {Set 3 Uint64 : 0}, {Set 4 String : xxxxxxxxxx_0} | ERowOp 1: {0, 3} {Set 2 Uint32 : 1}, {Set 3 Uint64 : 1}, {Set 4 String : xxxxxxxxxx_1} + Rows{1} Label{14 rev 1, 120b}, [2, +2)row | ERowOp 1: {0, 4} {Set 2 Uint32 : 2}, {Set 3 Uint64 : 2}, {Set 4 String : xxxxxxxxxx_2} | ERowOp 1: {0, 6} {Set 2 Uint32 : 3}, {Set 3 Uint64 : 3}, {Set 4 String : xxxxxxxxxx_3} + Rows{2} Label{24 rev 1, 120b}, [4, +2)row | ERowOp 1: {0, 7} {Set 2 Uint32 : 4}, {Set 3 Uint64 : 4}, {Set 4 String : xxxxxxxxxx_4} | ERowOp 1: {0, 8} {Set 2 Uint32 : 5}, {Set 3 Uint64 : 5}, {Set 4 String : xxxxxxxxxx_5} + Rows{3} Label{34 rev 1, 120b}, [6, +2)row | ERowOp 1: {0, 10} {Set 2 Uint32 : 6}, {Set 3 Uint64 : 6}, {Set 4 String : xxxxxxxxxx_6} | ERowOp 1: {1, 1} {Set 2 Uint32 : 7}, {Set 3 Uint64 : 7}, {Set 4 String : xxxxxxxxxx_7} + Rows{4} Label{44 rev 1, 120b}, [8, +2)row | ERowOp 1: {1, 3} {Set 2 Uint32 : 8}, {Set 3 Uint64 : 8}, {Set 4 String : xxxxxxxxxx_8} | ERowOp 1: {1, 4} {Set 2 Uint32 : 9}, {Set 3 Uint64 : 9}, {Set 4 String : xxxxxxxxxx_9} + Rows{5} Label{54 rev 1, 122b}, [10, +2)row | ERowOp 1: {1, 6} {Set 2 Uint32 : 10}, {Set 3 Uint64 : 10}, {Set 4 String : xxxxxxxxxx_10} | ERowOp 1: {1, 7} {Set 2 Uint32 : 11}, {Set 3 Uint64 : 11}, {Set 4 String : xxxxxxxxxx_11} + Rows{7} Label{74 rev 1, 122b}, [12, +2)row | ERowOp 1: {1, 8} {Set 2 Uint32 : 12}, {Set 3 Uint64 : 12}, {Set 4 String : xxxxxxxxxx_12} | ERowOp 1: {1, 10} {Set 2 Uint32 : 13}, {Set 3 Uint64 : 13}, {Set 4 String : xxxxxxxxxx_13} + Rows{8} Label{84 rev 1, 122b}, [14, +2)row | ERowOp 1: {2, 1} {Set 2 Uint32 : 14}, {Set 3 Uint64 : 14}, {Set 4 String : xxxxxxxxxx_14} | ERowOp 1: {2, 3} {Set 2 Uint32 : 15}, {Set 3 Uint64 : 15}, {Set 4 String : xxxxxxxxxx_15} + Rows{9} Label{94 rev 1, 122b}, [16, +2)row | ERowOp 1: {2, 4} {Set 2 Uint32 : 16}, {Set 3 Uint64 : 16}, {Set 4 String : xxxxxxxxxx_16} | ERowOp 1: {2, 6} {Set 2 Uint32 : 17}, {Set 3 Uint64 : 17}, {Set 4 String : xxxxxxxxxx_17} + Rows{11} Label{114 rev 1, 122b}, [18, +2)row | ERowOp 1: {2, 7} {Set 2 Uint32 : 18}, {Set 3 Uint64 : 18}, {Set 4 String : xxxxxxxxxx_18} | ERowOp 1: {2, 8} {Set 2 Uint32 : 19}, {Set 3 Uint64 : 19}, {Set 4 String : xxxxxxxxxx_19} + Rows{12} Label{124 rev 1, 122b}, [20, +2)row | ERowOp 1: {2, 10} {Set 2 Uint32 : 20}, {Set 3 Uint64 : 20}, {Set 4 String : xxxxxxxxxx_20} | ERowOp 1: {3, 1} {Set 2 Uint32 : 21}, {Set 3 Uint64 : 21}, {Set 4 String : xxxxxxxxxx_21} + Rows{13} Label{134 rev 1, 122b}, [22, +2)row | ERowOp 1: {3, 3} {Set 2 Uint32 : 22}, {Set 3 Uint64 : 22}, {Set 4 String : xxxxxxxxxx_22} | ERowOp 1: {3, 4} {Set 2 Uint32 : 23}, {Set 3 Uint64 : 23}, {Set 4 String : xxxxxxxxxx_23} + Rows{15} Label{154 rev 1, 122b}, [24, +2)row | ERowOp 1: {3, 6} {Set 2 Uint32 : 24}, {Set 3 Uint64 : 24}, {Set 4 String : xxxxxxxxxx_24} | ERowOp 1: {3, 7} {Set 2 Uint32 : 25}, {Set 3 Uint64 : 25}, {Set 4 String : xxxxxxxxxx_25} + Rows{16} Label{164 rev 1, 122b}, [26, +2)row | ERowOp 1: {3, 8} {Set 2 Uint32 : 26}, {Set 3 Uint64 : 26}, {Set 4 String : xxxxxxxxxx_26} | ERowOp 1: {3, 10} {Set 2 Uint32 : 27}, {Set 3 Uint64 : 27}, {Set 4 String : xxxxxxxxxx_27} + Rows{17} Label{174 rev 1, 122b}, [28, +2)row | ERowOp 1: {4, 1} {Set 2 Uint32 : 28}, {Set 3 Uint64 : 28}, {Set 4 String : xxxxxxxxxx_28} | ERowOp 1: {4, 3} {Set 2 Uint32 : 29}, {Set 3 Uint64 : 29}, {Set 4 String : xxxxxxxxxx_29} + Rows{19} Label{194 rev 1, 122b}, [30, +2)row | ERowOp 1: {4, 4} {Set 2 Uint32 : 30}, {Set 3 Uint64 : 30}, {Set 4 String : xxxxxxxxxx_30} | ERowOp 1: {4, 6} {Set 2 Uint32 : 31}, {Set 3 Uint64 : 31}, {Set 4 String : xxxxxxxxxx_31} + Rows{20} Label{204 rev 1, 122b}, [32, +2)row | ERowOp 1: {4, 7} {Set 2 Uint32 : 32}, {Set 3 Uint64 : 32}, {Set 4 String : xxxxxxxxxx_32} | ERowOp 1: {4, 8} {Set 2 Uint32 : 33}, {Set 3 Uint64 : 33}, {Set 4 String : xxxxxxxxxx_33} + Rows{21} Label{214 rev 1, 122b}, [34, +2)row | ERowOp 1: {4, 10} {Set 2 Uint32 : 34}, {Set 3 Uint64 : 34}, {Set 4 String : xxxxxxxxxx_34} | ERowOp 1: {5, 1} {Set 2 Uint32 : 35}, {Set 3 Uint64 : 35}, {Set 4 String : xxxxxxxxxx_35} + Rows{24} Label{244 rev 1, 122b}, [36, +2)row | ERowOp 1: {5, 3} {Set 2 Uint32 : 36}, {Set 3 Uint64 : 36}, {Set 4 String : xxxxxxxxxx_36} | ERowOp 1: {5, 4} {Set 2 Uint32 : 37}, {Set 3 Uint64 : 37}, {Set 4 String : xxxxxxxxxx_37} + Rows{25} Label{254 rev 1, 122b}, [38, +2)row | ERowOp 1: {5, 6} {Set 2 Uint32 : 38}, {Set 3 Uint64 : 38}, {Set 4 String : xxxxxxxxxx_38} | ERowOp 1: {5, 7} {Set 2 Uint32 : 39}, {Set 3 Uint64 : 39}, {Set 4 String : xxxxxxxxxx_39} Slices{ [0, 39] } >> TPersQueueCommonTest::TestLimiterLimitsWithUserPayloadRateLimit [FAIL] >> TPersQueueTest::AllEqual [GOOD] >> TPersQueueTest::BadSids >> Yq_1::Basic_TaggedLiteral [FAIL] >> IndexBuildTest::CancellationNoTable [GOOD] >> TPersQueueTest::ReadRuleServiceTypeMigration [FAIL] >> TPersQueueTest::ReadRuleServiceTypeMigrationWithDisallowDefault >> TopicService::MultiplePartitionsAndNoGapsInTheOffsets >> TDSProxyPutTest::TestMirror3dcPutStatusOkWith_3_0_0_VdiskErrors [GOOD] |71.4%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/schemeshard/ut_login/ydb-core-tx-schemeshard-ut_login >> TKeyValueTest::TestCleanUpDataWithMockDisk [GOOD] >> KqpWrite::CastValues |71.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_login/ydb-core-tx-schemeshard-ut_login |71.4%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_login/ydb-core-tx-schemeshard-ut_login ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/dsproxy/ut/unittest >> TDSProxyPutTest::TestMirror3dcPutStatusOkWith_3_0_0_VdiskErrors [GOOD] Test command err: 2025-05-29T15:28:47.571905Z node 3 :BS_PROXY_PUT INFO: dsproxy_put.cpp:645: [7e4afa7ea38a37be] bootstrap ActorId# [3:82:2128] Group# 0 BlobCount# 1 BlobIDs# [[72075186224047637:1:863:1:24576:786:0]] HandleClass# TabletLog Tactic# Default RestartCounter# 0 Marker# BPP13 2025-05-29T15:28:47.571958Z node 3 :BS_PROXY_PUT DEBUG: dsproxy_strategy_base.cpp:324: [7e4afa7ea38a37be] partPlacement record partSituation# ESituation::Unknown to# 0 blob Id# [72075186224047637:1:863:1:24576:786:1] Marker# BPG33 2025-05-29T15:28:47.571963Z node 3 :BS_PROXY_PUT DEBUG: dsproxy_strategy_base.cpp:345: [7e4afa7ea38a37be] Sending missing VPut part# 0 to# 0 blob Id# [72075186224047637:1:863:1:24576:786:1] Marker# BPG32 2025-05-29T15:28:47.571967Z node 3 :BS_PROXY_PUT DEBUG: dsproxy_strategy_base.cpp:324: [7e4afa7ea38a37be] partPlacement record partSituation# ESituation::Unknown to# 1 blob Id# [72075186224047637:1:863:1:24576:786:2] Marker# BPG33 2025-05-29T15:28:47.571969Z node 3 :BS_PROXY_PUT DEBUG: dsproxy_strategy_base.cpp:345: [7e4afa7ea38a37be] Sending missing VPut part# 1 to# 1 blob Id# [72075186224047637:1:863:1:24576:786:2] Marker# BPG32 2025-05-29T15:28:47.571972Z node 3 :BS_PROXY_PUT DEBUG: dsproxy_strategy_base.cpp:324: [7e4afa7ea38a37be] partPlacement record partSituation# ESituation::Unknown to# 2 blob Id# [72075186224047637:1:863:1:24576:786:3] Marker# BPG33 2025-05-29T15:28:47.571975Z node 3 :BS_PROXY_PUT DEBUG: dsproxy_strategy_base.cpp:345: [7e4afa7ea38a37be] Sending missing VPut part# 2 to# 2 blob Id# [72075186224047637:1:863:1:24576:786:3] Marker# BPG32 2025-05-29T15:28:47.574481Z node 3 :BS_PROXY_PUT INFO: dsproxy_put.cpp:260: [7e4afa7ea38a37be] received {EvVPutResult Status# ERROR ID# [72075186224047637:1:863:1:24576:786:2] {MsgQoS MsgId# { SequenceId: 1 MsgId: 0 }}} from# [0:1:0:1:0] Marker# BPP01 2025-05-29T15:28:47.574512Z node 3 :BS_PROXY_PUT DEBUG: dsproxy_strategy_base.cpp:324: [7e4afa7ea38a37be] partPlacement record partSituation# ESituation::Unknown to# 4 blob Id# [72075186224047637:1:863:1:24576:786:2] Marker# BPG33 2025-05-29T15:28:47.574518Z node 3 :BS_PROXY_PUT DEBUG: dsproxy_strategy_base.cpp:345: [7e4afa7ea38a37be] Sending missing VPut part# 1 to# 4 blob Id# [72075186224047637:1:863:1:24576:786:2] Marker# BPG32 2025-05-29T15:28:47.574557Z node 3 :BS_PROXY_PUT DEBUG: dsproxy_put.cpp:260: [7e4afa7ea38a37be] received {EvVPutResult Status# OK ID# [72075186224047637:1:863:1:24576:786:3] {MsgQoS MsgId# { SequenceId: 1 MsgId: 0 }}} from# [0:1:1:1:0] Marker# BPP01 2025-05-29T15:28:47.574570Z node 3 :BS_PROXY_PUT DEBUG: dsproxy_put.cpp:260: [7e4afa7ea38a37be] received {EvVPutResult Status# OK ID# [72075186224047637:1:863:1:24576:786:1] {MsgQoS MsgId# { SequenceId: 1 MsgId: 0 }}} from# [0:1:2:1:0] Marker# BPP01 2025-05-29T15:28:47.574594Z node 3 :BS_PROXY_PUT INFO: dsproxy_put.cpp:260: [7e4afa7ea38a37be] received {EvVPutResult Status# ERROR ID# [72075186224047637:1:863:1:24576:786:2] {MsgQoS MsgId# { SequenceId: 1 MsgId: 0 }}} from# [0:1:0:2:0] Marker# BPP01 2025-05-29T15:28:47.574599Z node 3 :BS_PROXY_PUT DEBUG: dsproxy_strategy_base.cpp:324: [7e4afa7ea38a37be] partPlacement record partSituation# ESituation::Unknown to# 7 blob Id# [72075186224047637:1:863:1:24576:786:2] Marker# BPG33 2025-05-29T15:28:47.574602Z node 3 :BS_PROXY_PUT DEBUG: dsproxy_strategy_base.cpp:345: [7e4afa7ea38a37be] Sending missing VPut part# 1 to# 7 blob Id# [72075186224047637:1:863:1:24576:786:2] Marker# BPG32 2025-05-29T15:28:47.574620Z node 3 :BS_PROXY_PUT INFO: dsproxy_put.cpp:260: [7e4afa7ea38a37be] received {EvVPutResult Status# ERROR ID# [72075186224047637:1:863:1:24576:786:2] {MsgQoS MsgId# { SequenceId: 1 MsgId: 0 }}} from# [0:1:0:0:0] Marker# BPP01 2025-05-29T15:28:47.574625Z node 3 :BS_PROXY_PUT DEBUG: dsproxy_strategy_base.cpp:324: [7e4afa7ea38a37be] partPlacement record partSituation# ESituation::Unknown to# 3 blob Id# [72075186224047637:1:863:1:24576:786:1] Marker# BPG33 2025-05-29T15:28:47.574630Z node 3 :BS_PROXY_PUT DEBUG: dsproxy_strategy_base.cpp:345: [7e4afa7ea38a37be] Sending missing VPut part# 0 to# 3 blob Id# [72075186224047637:1:863:1:24576:786:1] Marker# BPG32 2025-05-29T15:28:47.574632Z node 3 :BS_PROXY_PUT DEBUG: dsproxy_strategy_base.cpp:324: [7e4afa7ea38a37be] partPlacement record partSituation# ESituation::Unknown to# 5 blob Id# [72075186224047637:1:863:1:24576:786:3] Marker# BPG33 2025-05-29T15:28:47.574635Z node 3 :BS_PROXY_PUT DEBUG: dsproxy_strategy_base.cpp:345: [7e4afa7ea38a37be] Sending missing VPut part# 2 to# 5 blob Id# [72075186224047637:1:863:1:24576:786:3] Marker# BPG32 2025-05-29T15:28:47.574664Z node 3 :BS_PROXY_PUT DEBUG: dsproxy_put.cpp:260: [7e4afa7ea38a37be] received {EvVPutResult Status# OK ID# [72075186224047637:1:863:1:24576:786:3] {MsgQoS MsgId# { SequenceId: 1 MsgId: 0 }}} from# [0:1:1:2:0] Marker# BPP01 2025-05-29T15:28:47.574671Z node 3 :BS_PROXY_PUT DEBUG: dsproxy_put.cpp:260: [7e4afa7ea38a37be] received {EvVPutResult Status# OK ID# [72075186224047637:1:863:1:24576:786:1] {MsgQoS MsgId# { SequenceId: 1 MsgId: 0 }}} from# [0:1:2:2:0] Marker# BPP01 2025-05-29T15:28:47.574684Z node 3 :BS_PROXY_PUT DEBUG: dsproxy_put_impl.cpp:72: [7e4afa7ea38a37be] Result# TEvPutResult {Id# [72075186224047637:1:863:1:24576:786:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0} GroupId# 0 Marker# BPP12 2025-05-29T15:28:47.574690Z node 3 :BS_PROXY_PUT INFO: dsproxy_put.cpp:486: [7e4afa7ea38a37be] SendReply putResult# TEvPutResult {Id# [72075186224047637:1:863:1:24576:786:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0} ResponsesSent# 0 PutImpl.Blobs.size# 1 Last# true Marker# BPP21 2025-05-29T15:28:47.574726Z node 3 :BS_PROXY_PUT NOTICE: {BPP72@dsproxy_put.cpp:470} Query history GroupId# 0 HandleClass# TabletLog Tactic# Default History# THistory { Entries# [ TEvVPut{ TimestampMs# 0.248 sample PartId# [72075186224047637:1:863:1:24576:786:3] QueryCount# 1 VDiskId# [0:1:1:1:0] NodeId# 3 } TEvVPut{ TimestampMs# 0.248 sample PartId# [72075186224047637:1:863:1:24576:786:1] QueryCount# 1 VDiskId# [0:1:2:1:0] NodeId# 3 } TEvVPut{ TimestampMs# 0.249 sample PartId# [72075186224047637:1:863:1:24576:786:2] QueryCount# 1 VDiskId# [0:1:0:1:0] NodeId# 3 } TEvVPutResult{ TimestampMs# 2.757 VDiskId# [0:1:0:1:0] NodeId# 3 Status# ERROR } TEvVPut{ TimestampMs# 2.784 sample PartId# [72075186224047637:1:863:1:24576:786:2] QueryCount# 1 VDiskId# [0:1:0:2:0] NodeId# 3 } TEvVPutResult{ TimestampMs# 2.814 VDiskId# [0:1:1:1:0] NodeId# 3 Status# OK } TEvVPutResult{ TimestampMs# 2.826 VDiskId# [0:1:2:1:0] NodeId# 3 Status# OK } TEvVPutResult{ TimestampMs# 2.851 VDiskId# [0:1:0:2:0] NodeId# 3 Status# ERROR } TEvVPut{ TimestampMs# 2.859 sample PartId# [72075186224047637:1:863:1:24576:786:2] QueryCount# 1 VDiskId# [0:1:0:0:0] NodeId# 3 } TEvVPutResult{ TimestampMs# 2.877 VDiskId# [0:1:0:0:0] NodeId# 3 Status# ERROR } TEvVPut{ TimestampMs# 2.895 sample PartId# [72075186224047637:1:863:1:24576:786:3] QueryCount# 1 VDiskId# [0:1:1:2:0] NodeId# 3 } TEvVPut{ TimestampMs# 2.895 sample PartId# [72075186224047637:1:863:1:24576:786:1] QueryCount# 1 VDiskId# [0:1:2:2:0] NodeId# 3 } TEvVPutResult{ TimestampMs# 2.92 VDiskId# [0:1:1:2:0] NodeId# 3 Status# OK } TEvVPutResult{ TimestampMs# 2.927 VDiskId# [0:1:2:2:0] NodeId# 3 Status# OK } ] } ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_reboots/unittest >> TConsistentOpsWithReboots::CreateNotNullColumnTableWithReboots [GOOD] Test command err: ==== RunWithTabletReboots =========== RUN: Trace =========== Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2140] Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:116:2058] recipient: [1:110:2141] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:116:2058] recipient: [1:110:2141] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:117:2058] recipient: [1:111:2142] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:117:2058] recipient: [1:111:2142] Leader for TabletID 72057594046678944 is [1:126:2151] sender: [1:127:2058] recipient: [1:109:2140] Leader for TabletID 72057594046447617 is [1:130:2154] sender: [1:132:2058] recipient: [1:110:2141] Leader for TabletID 72057594046316545 is [1:133:2155] sender: [1:135:2058] recipient: [1:111:2142] 2025-05-29T15:28:25.331546Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7488: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-05-29T15:28:25.331570Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7516: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:28:25.331576Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7402: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2025-05-29T15:28:25.331582Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7418: OperationsProcessing config: using default configuration 2025-05-29T15:28:25.331588Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-05-29T15:28:25.331592Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-05-29T15:28:25.331602Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7548: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:28:25.331615Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:39: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-05-29T15:28:25.331714Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7619: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-05-29T15:28:25.331785Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-05-29T15:28:25.346650Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7651: Got new config: QueryServiceConfig { AllExternalDataSourcesAreAvailable: true } 2025-05-29T15:28:25.346673Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:28:25.346785Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7619: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# Leader for TabletID 72057594046447617 is [1:130:2154] sender: [1:175:2058] recipient: [1:15:2062] 2025-05-29T15:28:25.349865Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-05-29T15:28:25.349897Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-05-29T15:28:25.349934Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-05-29T15:28:25.352893Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-05-29T15:28:25.352978Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:445: Clear TempDirsState with owners number: 0 2025-05-29T15:28:25.353085Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1348: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-05-29T15:28:25.353298Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:32: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-05-29T15:28:25.354026Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:157: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:28:25.354067Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:84: [RootDataErasureManager] Stop 2025-05-29T15:28:25.354322Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:28:25.354332Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:28:25.354366Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-05-29T15:28:25.354374Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-29T15:28:25.354380Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:202: TTxServerlessStorageBilling.Complete 2025-05-29T15:28:25.354400Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6671: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:214:2058] recipient: [1:212:2212] IGNORE Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:214:2058] recipient: [1:212:2212] Leader for TabletID 72057594037968897 is [1:218:2216] sender: [1:219:2058] recipient: [1:212:2212] 2025-05-29T15:28:25.355773Z node 1 :HIVE INFO: tablet_helpers.cpp:1130: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:126:2151] sender: [1:239:2058] recipient: [1:15:2062] 2025-05-29T15:28:25.376708Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:372: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-05-29T15:28:25.376790Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:28:25.376851Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-05-29T15:28:25.376899Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:130: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-05-29T15:28:25.376916Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:28:25.377643Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:451: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-05-29T15:28:25.377669Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-05-29T15:28:25.377720Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:28:25.377730Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:313: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-05-29T15:28:25.377736Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:367: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-05-29T15:28:25.377742Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 2 -> 3 2025-05-29T15:28:25.378118Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:28:25.378130Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-05-29T15:28:25.378136Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 3 -> 128 2025-05-29T15:28:25.378460Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:28:25.378469Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:28:25.378475Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:28:25.378483Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1650: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-05-29T15:28:25.379168Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1719: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-05-29T15:28:25.379579Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:652: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-05-29T15:28:25.379617Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1751: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 Leader for TabletID 72057594046316545 is [1:133:2155] sender: [1:254:2058] recipient: [1:15:2062] FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-05-29T15:28:25.379815Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:676: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-05-29T15:28:25.379840Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:680: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 133 RawX2: 4294969451 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-05-29T15:28:25.379858Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:28:25.379915Z node 1 :FLAT_TX_SCHEMESHARD INFO: sch ... schemeshard__operation_part.cpp:108: HandleReply TEvDataShard::TEvProposeTransactionResult Ignore message: tablet# 72057594046678944, ev# TxKind: TX_KIND_SCHEME Origin: 72075186233409546 Status: COMPLETE TxId: 1003 Step: 5000004 OrderId: 1003 ExecLatency: 0 ProposeLatency: 2 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409546 CpuTimeUsec: 262 } } CommitVersion { Step: 5000004 TxId: 1003 } FAKE_COORDINATOR: Erasing txId 1003 2025-05-29T15:28:47.418882Z node 88 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5512: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 358 RawX2: 377957124392 } Origin: 72075186233409546 State: 2 TxId: 1003 Step: 0 Generation: 2 2025-05-29T15:28:47.418890Z node 88 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1764: TOperation FindRelatedPartByTabletId, TxId: 1003, tablet: 72075186233409546, partId: 0 2025-05-29T15:28:47.418906Z node 88 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:619: TTxOperationReply execute, operationId: 1003:0, at schemeshard: 72057594046678944, message: Source { RawX1: 358 RawX2: 377957124392 } Origin: 72075186233409546 State: 2 TxId: 1003 Step: 0 Generation: 2 2025-05-29T15:28:47.418913Z node 88 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:1005: NTableState::TProposedWaitParts operationId# 1003:0 HandleReply TEvSchemaChanged at tablet: 72057594046678944 2025-05-29T15:28:47.418922Z node 88 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:1009: NTableState::TProposedWaitParts operationId# 1003:0 HandleReply TEvSchemaChanged at tablet: 72057594046678944 message: Source { RawX1: 358 RawX2: 377957124392 } Origin: 72075186233409546 State: 2 TxId: 1003 Step: 0 Generation: 2 2025-05-29T15:28:47.418938Z node 88 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:655: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 1003:0, shardIdx: 72057594046678944:1, datashard: 72075186233409546, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2025-05-29T15:28:47.418943Z node 88 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:674: all shard schema changes has been received, operationId: 1003:0, at schemeshard: 72057594046678944 2025-05-29T15:28:47.418949Z node 88 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:686: send schema changes ack message, operation: 1003:0, datashard: 72075186233409546, at schemeshard: 72057594046678944 2025-05-29T15:28:47.418956Z node 88 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1003:0 129 -> 240 2025-05-29T15:28:47.419772Z node 88 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1003 2025-05-29T15:28:47.419791Z node 88 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1003 2025-05-29T15:28:47.419864Z node 88 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:647: TTxOperationReply complete, operationId: 1003:0, at schemeshard: 72057594046678944 2025-05-29T15:28:47.419890Z node 88 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:647: TTxOperationReply complete, operationId: 1003:0, at schemeshard: 72057594046678944 2025-05-29T15:28:47.419959Z node 88 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1003:0, at schemeshard: 72057594046678944 2025-05-29T15:28:47.419967Z node 88 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:482: [72057594046678944] TDone opId# 1003:0 ProgressState 2025-05-29T15:28:47.419982Z node 88 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:906: Part operation is done id#1003:0 progress is 1/1 2025-05-29T15:28:47.419989Z node 88 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 1003 ready parts: 1/1 2025-05-29T15:28:47.420015Z node 88 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:906: Part operation is done id#1003:0 progress is 1/1 2025-05-29T15:28:47.420019Z node 88 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 1003 ready parts: 1/1 2025-05-29T15:28:47.420025Z node 88 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1606: TOperation IsReadyToNotify, TxId: 1003, ready parts: 1/1, is published: true 2025-05-29T15:28:47.420031Z node 88 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 1003 ready parts: 1/1 2025-05-29T15:28:47.420038Z node 88 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:973: Operation and all the parts is done, operation id: 1003:0 2025-05-29T15:28:47.420044Z node 88 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5174: RemoveTx for txid 1003:0 2025-05-29T15:28:47.420073Z node 88 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 3 TestModificationResult got TxId: 1003, wait until txId: 1003 TestWaitNotification wait txId: 1002 2025-05-29T15:28:47.420855Z node 88 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:210: tests -- TTxNotificationSubscriber for txId 1002: send EvNotifyTxCompletion 2025-05-29T15:28:47.420875Z node 88 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:256: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 1002 TestWaitNotification wait txId: 1003 2025-05-29T15:28:47.420892Z node 88 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:210: tests -- TTxNotificationSubscriber for txId 1003: send EvNotifyTxCompletion 2025-05-29T15:28:47.420896Z node 88 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:256: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 1003 2025-05-29T15:28:47.420978Z node 88 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 1002, at schemeshard: 72057594046678944 2025-05-29T15:28:47.421012Z node 88 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:227: tests -- TTxNotificationSubscriber for txId 1002: got EvNotifyTxCompletionResult 2025-05-29T15:28:47.421018Z node 88 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:236: tests -- TTxNotificationSubscriber for txId 1002: satisfy waiter [88:431:2403] 2025-05-29T15:28:47.421034Z node 88 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 1003, at schemeshard: 72057594046678944 2025-05-29T15:28:47.421051Z node 88 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:227: tests -- TTxNotificationSubscriber for txId 1003: got EvNotifyTxCompletionResult 2025-05-29T15:28:47.421056Z node 88 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:236: tests -- TTxNotificationSubscriber for txId 1003: satisfy waiter [88:431:2403] TestWaitNotification: OK eventTxId 1002 TestWaitNotification: OK eventTxId 1003 2025-05-29T15:28:47.421143Z node 88 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/DirB" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-05-29T15:28:47.421195Z node 88 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/DirB" took 71us result status StatusSuccess 2025-05-29T15:28:47.421301Z node 88 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/DirB" PathDescription { Self { Name: "DirB" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1002 CreateStep: 5000003 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 } ChildrenExist: true } Children { Name: "TestNotNullTable" PathId: 4 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 1003 CreateStep: 5000004 ParentPathId: 3 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 3 PathsLimit: 10000 ShardsInside: 1 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-05-29T15:28:47.421371Z node 88 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/DirB/TestNotNullTable" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-05-29T15:28:47.421406Z node 88 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/DirB/TestNotNullTable" took 37us result status StatusSuccess 2025-05-29T15:28:47.421499Z node 88 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/DirB/TestNotNullTable" PathDescription { Self { Name: "TestNotNullTable" PathId: 4 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 1003 CreateStep: 5000004 ParentPathId: 3 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "TestNotNullTable" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: true IsBuildInProgress: false } Columns { Name: "value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: true IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 TableSchemaVersion: 1 IsBackup: false IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 3 PathsLimit: 10000 ShardsInside: 1 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 4 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/fq/ut_integration/unittest >> Yq_1::CreateQuery_Without_Connection [FAIL] Test command err: 2025-05-29T15:28:39.310406Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509889988305397032:2075];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:28:39.310436Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; E0529 15:28:39.344286601 4051388 dns_resolver_ares.cc:452] no server name supplied in dns URI E0529 15:28:39.344351195 4051388 channel.cc:120] channel stack builder failed: UNKNOWN: the target uri is not valid: dns:/// 2025-05-29T15:28:39.345593Z node 1 :YQ_CONTROL_PLANE_STORAGE ERROR: schema.cpp:160: Create directory "Root/yq" error: TRANSPORT_UNAVAILABLE [ {
: Error: GRpc error: (14): failed to connect to all addresses; last error: UNKNOWN: ipv4:127.0.0.1:9923: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:9923 } ] 2025-05-29T15:28:39.347190Z node 1 :YQL_NODES_MANAGER ERROR: nodes_manager.cpp:364: ydb/core/fq/libs/actors/nodes_manager.cpp:322: TRANSPORT_UNAVAILABLE
: Error: GRpc error: (14): failed to connect to all addresses; last error: UNKNOWN: ipv4:127.0.0.1:9923: Failed to connect to remote host: Connection refused
: Error: Grpc error response on endpoint localhost:9923 test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/0008f6/r3tmp/tmphXbeY2/pdisk_1.dat 2025-05-29T15:28:39.675335Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-05-29T15:28:39.675470Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:7509889988305397540:2311], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } TServer::EnableGrpc on GrpcPort 9923, node 1 2025-05-29T15:28:39.699071Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:28:39.699082Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-05-29T15:28:39.699085Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-05-29T15:28:39.699150Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-05-29T15:28:39.702113Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded TClient is connected to server localhost:27009 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-29T15:28:39.993542Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:28:40.054503Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:28:40.054526Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:28:40.055896Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-29T15:28:40.357817Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: schema.cpp:293: Create table "Root/yq/idempotency_keys". Create session OK 2025-05-29T15:28:40.357835Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: schema.cpp:113: Call create table "Root/yq/idempotency_keys" 2025-05-29T15:28:40.357838Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: schema.cpp:354: Call create table "Root/yq/idempotency_keys" 2025-05-29T15:28:40.359492Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: schema.cpp:293: Create table "Root/yq/tenants". Create session OK 2025-05-29T15:28:40.359502Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: schema.cpp:113: Call create table "Root/yq/tenants" 2025-05-29T15:28:40.359504Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: schema.cpp:354: Call create table "Root/yq/tenants" 2025-05-29T15:28:40.359511Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: schema.cpp:293: Create table "Root/yq/queries". Create session OK 2025-05-29T15:28:40.359514Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: schema.cpp:113: Call create table "Root/yq/queries" 2025-05-29T15:28:40.359516Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: schema.cpp:354: Call create table "Root/yq/queries" 2025-05-29T15:28:40.359658Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: schema.cpp:293: Create table "Root/yq/bindings". Create session OK 2025-05-29T15:28:40.359664Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: schema.cpp:293: Create table "Root/yq/result_sets". Create session OK 2025-05-29T15:28:40.359666Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: schema.cpp:113: Call create table "Root/yq/bindings" 2025-05-29T15:28:40.359666Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: schema.cpp:113: Call create table "Root/yq/result_sets" 2025-05-29T15:28:40.359667Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: schema.cpp:354: Call create table "Root/yq/bindings" 2025-05-29T15:28:40.359667Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: schema.cpp:354: Call create table "Root/yq/result_sets" 2025-05-29T15:28:40.359738Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: schema.cpp:293: Create table "Root/yq/tenant_acks". Create session OK 2025-05-29T15:28:40.359745Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: schema.cpp:113: Call create table "Root/yq/tenant_acks" 2025-05-29T15:28:40.359746Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: schema.cpp:354: Call create table "Root/yq/tenant_acks" 2025-05-29T15:28:40.359749Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: schema.cpp:293: Create table "Root/yq/jobs". Create session OK 2025-05-29T15:28:40.359750Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: schema.cpp:113: Call create table "Root/yq/jobs" 2025-05-29T15:28:40.359751Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: schema.cpp:354: Call create table "Root/yq/jobs" 2025-05-29T15:28:40.359795Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: schema.cpp:293: Create table "Root/yq/nodes". Create session OK 2025-05-29T15:28:40.359802Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: schema.cpp:113: Call create table "Root/yq/nodes" 2025-05-29T15:28:40.359803Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: schema.cpp:354: Call create table "Root/yq/nodes" 2025-05-29T15:28:40.359810Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: schema.cpp:293: Create table "Root/yq/pending_small". Create session OK 2025-05-29T15:28:40.359811Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: schema.cpp:113: Call create table "Root/yq/pending_small" 2025-05-29T15:28:40.359813Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: schema.cpp:354: Call create table "Root/yq/pending_small" 2025-05-29T15:28:40.359869Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: schema.cpp:293: Create table "Root/yq/compute_databases". Create session OK 2025-05-29T15:28:40.359876Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: schema.cpp:113: Call create table "Root/yq/compute_databases" 2025-05-29T15:28:40.359877Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: schema.cpp:354: Call create table "Root/yq/compute_databases" 2025-05-29T15:28:40.359880Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: schema.cpp:293: Create table "Root/yq/quotas". Create session OK 2025-05-29T15:28:40.359881Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: schema.cpp:113: Call create table "Root/yq/quotas" 2025-05-29T15:28:40.359883Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: schema.cpp:354: Call create table "Root/yq/quotas" 2025-05-29T15:28:40.359999Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: schema.cpp:293: Create table "Root/yq/connections". Create session OK 2025-05-29T15:28:40.360006Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: schema.cpp:113: Call create table "Root/yq/connections" 2025-05-29T15:28:40.360007Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: schema.cpp:354: Call create table "Root/yq/connections" 2025-05-29T15:28:40.360141Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: schema.cpp:293: Create table "Root/yq/mappings". Create session OK 2025-05-29T15:28:40.360148Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: schema.cpp:113: Call create table "Root/yq/mappings" 2025-05-29T15:28:40.360150Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: schema.cpp:354: Call create table "Root/yq/mappings" 2025-05-29T15:28:40.361070Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509889992600365446:2387], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:28:40.361090Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509889992600365467:2390], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:28:40.361097Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:28:40.361446Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 2025-05-29T15:28:40.361649Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 2025-05-29T15:28:40.361780Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-05-29T15:28:40.361894Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509889992600365537:2394], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:28:40.361899Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509889992600365546:2396], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have ac ... shardId: 72075186224037893 processedRows: 0 packed rows: 0 freeSpace: 8388608 2025-05-29T15:28:43.136539Z node 1 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:1309: TxId: 281474976715724, task: 1, CA Id [1:7509890002408947421:2730]. exit pack cells method shardId: 72075186224037893 processedRows: 0 packed rows: 0 freeSpace: 8388608 2025-05-29T15:28:43.136541Z node 1 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:1366: TxId: 281474976715724, task: 1, CA Id [1:7509890002408947421:2730]. returned 0 rows; processed 0 rows 2025-05-29T15:28:43.136551Z node 1 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:1403: TxId: 281474976715724, task: 1, CA Id [1:7509890002408947421:2730]. dropping batch for read #0 2025-05-29T15:28:43.136553Z node 1 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:459: TxId: 281474976715724, task: 1, CA Id [1:7509890002408947421:2730]. effective maxinflight 1024 sorted 0 2025-05-29T15:28:43.136554Z node 1 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:481: TxId: 281474976715724, task: 1, CA Id [1:7509890002408947421:2730]. Scheduled table scans, in flight: 0 shards. pending shards to read: 0, 2025-05-29T15:28:43.136557Z node 1 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:1428: TxId: 281474976715724, task: 1, CA Id [1:7509890002408947421:2730]. returned async data processed rows 0 left freeSpace 8388608 received rows 0 running reads 0 pending shards 0 finished = 1 has limit 0 limit reached 0 2025-05-29T15:28:43.136572Z node 1 :KQP_COMPUTE DEBUG: dq_compute_actor_impl.h:502: SelfId: [1:7509890002408947421:2730], TxId: 281474976715724, task: 1. Ctx: { TraceId : 01jweahw3zabnxn2kw3v22zx2b. CustomerSuppliedId : . SessionId : ydb://session/3?node_id=1&id=ZDVkNTBkNGEtMjZmM2IxNTMtMzdlNDEyMGEtYjcxZTgzYg==. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. Continue execution, either output buffers are not empty or not all channels are ready, hasDataToSend: 1, channelsReady: 1 2025-05-29T15:28:43.136585Z node 1 :KQP_COMPUTE DEBUG: kqp_pure_compute_actor.cpp:148: SelfId: [1:7509890002408947421:2730], TxId: 281474976715724, task: 1. Ctx: { TraceId : 01jweahw3zabnxn2kw3v22zx2b. CustomerSuppliedId : . SessionId : ydb://session/3?node_id=1&id=ZDVkNTBkNGEtMjZmM2IxNTMtMzdlNDEyMGEtYjcxZTgzYg==. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. CA StateFunc 271646922 2025-05-29T15:28:43.136588Z node 1 :KQP_COMPUTE DEBUG: dq_compute_actor_channels.cpp:675: TxId: 281474976715724, task: 1. Tasks execution finished 2025-05-29T15:28:43.136590Z node 1 :KQP_COMPUTE DEBUG: dq_compute_actor_impl.h:510: SelfId: [1:7509890002408947421:2730], TxId: 281474976715724, task: 1. Ctx: { TraceId : 01jweahw3zabnxn2kw3v22zx2b. CustomerSuppliedId : . SessionId : ydb://session/3?node_id=1&id=ZDVkNTBkNGEtMjZmM2IxNTMtMzdlNDEyMGEtYjcxZTgzYg==. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. Compute state finished. All channels and sinks finished 2025-05-29T15:28:43.136616Z node 1 :KQP_COMPUTE DEBUG: dq_compute_actor_channels.cpp:494: TxId: 281474976715724, task: 1. pass away 2025-05-29T15:28:43.136647Z node 1 :KQP_COMPUTE DEBUG: log.cpp:784: fline=kqp_compute_actor_factory.cpp:66;problem=finish_compute_actor;tx_id=281474976715724;task_id=1;success=1;message={
: Error: COMPUTE_STATE_FINISHED }; 2025-05-29T15:28:43.139150Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [1:7509890002408947390:2723], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:28:43.139697Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=1&id=ZDgxMjlhYjAtOGRjYWZhNzktYTYxZTljYTYtNjU2Zjc1MTI=, ActorId: [1:7509890002408947339:2710], ActorState: ExecuteState, TraceId: 01jweahw3f5mhbbf4jxsma86m9, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: 01jweahw3d913t8xy2210a0n40 2025-05-29T15:28:43.139814Z node 1 :KQP_EXECUTER ERROR: kqp_planner.cpp:119: TxId: 281474976715725. Ctx: { TraceId: 01jweahw3f5mhbbf4jxsma86m9, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZDgxMjlhYjAtOGRjYWZhNzktYTYxZTljYTYtNjU2Zjc1MTI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-05-29T15:28:43.140272Z node 1 :FQ_QUOTA_SERVICE ERROR: quota_manager.cpp:648: SyncQuota finished with error: 2025-05-29T15:28:43.143065Z node 1 :KQP_EXECUTER ERROR: kqp_planner.cpp:119: TxId: 281474976715726. Ctx: { TraceId: 01jweahw460b7vmmk8d6atgn3v, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NmMxZTYzYmUtM2JhYjA2NTktOWM3YjE2MzktNDE2YjUwODE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-05-29T15:28:43.143137Z node 1 :KQP_COMPUTE DEBUG: log.cpp:784: fline=kqp_compute_actor_factory.cpp:148;event=channel_info;ch_size=8388608;ch_count=2;ch_limit=8388608;inputs=1;input_channels_count=0; 2025-05-29T15:28:43.143192Z node 1 :KQP_COMPUTE DEBUG: dq_compute_actor_impl.h:134: SelfId: [1:7509890002408947454:2739], TxId: 281474976715726, task: 1. Ctx: { SessionId : ydb://session/3?node_id=1&id=NmMxZTYzYmUtM2JhYjA2NTktOWM3YjE2MzktNDE2YjUwODE=. TraceId : 01jweahw460b7vmmk8d6atgn3v. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. Start compute actor [1:7509890002408947454:2739], task: 1 2025-05-29T15:28:43.143202Z node 1 :KQP_COMPUTE DEBUG: dq_compute_actor_impl.h:141: SelfId: [1:7509890002408947454:2739], TxId: 281474976715726, task: 1. Ctx: { SessionId : ydb://session/3?node_id=1&id=NmMxZTYzYmUtM2JhYjA2NTktOWM3YjE2MzktNDE2YjUwODE=. TraceId : 01jweahw460b7vmmk8d6atgn3v. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. Set execution timeout 299.998957s 2025-05-29T15:28:43.143343Z node 1 :KQP_COMPUTE DEBUG: dq_compute_actor_impl.h:1365: SelfId: [1:7509890002408947454:2739], TxId: 281474976715726, task: 1. Ctx: { SessionId : ydb://session/3?node_id=1&id=NmMxZTYzYmUtM2JhYjA2NTktOWM3YjE2MzktNDE2YjUwODE=. TraceId : 01jweahw460b7vmmk8d6atgn3v. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. Create source for input 0 { Source { Type: "KqpReadRangesSource" Settings { type_url: "type.googleapis.com/NKikimrTxDataShard.TKqpReadRangesSourceSettings" value: "\n$\n\014\010\200\202\224\204\200\200\200\200\001\020\007\022\016Root/yq/quotas\030\001*\0000\001\0322\0220\003\000\005\000\000\000cloud\n\000\000\000mock_cloud\023\000\000\000yq.cpuPercent.count\"\032\010\005\022\020limit_updated_at\0302(\0000\000\"\026\010\004\022\014metric_limit\030\004(\0000\000\"\026\010\006\022\014metric_usage\030\004(\0000\000\"\032\010\007\022\020usage_updated_at\0302(\0000\000(\0000\000@\201 @\201 @\201 H\001R\022\010\261\341\243\345\3612\020\377\377\377\377\377\377\377\377\377\001X\205\200\204\200\200\200\204\200\001`\000h\316\247\200\200\200\200@p\001z\000z\000z\000\240\001\000\270\001\000" } WatermarksMode: WATERMARKS_MODE_DISABLED } } 2025-05-29T15:28:43.143378Z node 1 :KQP_COMPUTE DEBUG: kqp_pure_compute_actor.cpp:148: SelfId: [1:7509890002408947454:2739], TxId: 281474976715726, task: 1. Ctx: { SessionId : ydb://session/3?node_id=1&id=NmMxZTYzYmUtM2JhYjA2NTktOWM3YjE2MzktNDE2YjUwODE=. TraceId : 01jweahw460b7vmmk8d6atgn3v. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. CA StateFunc 271646926 2025-05-29T15:28:43.143411Z node 1 :KQP_COMPUTE DEBUG: dq_compute_actor_impl.h:1074: SelfId: [1:7509890002408947454:2739], TxId: 281474976715726, task: 1. Ctx: { SessionId : ydb://session/3?node_id=1&id=NmMxZTYzYmUtM2JhYjA2NTktOWM3YjE2MzktNDE2YjUwODE=. TraceId : 01jweahw460b7vmmk8d6atgn3v. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. Received channels info: Update { Id: 1 TransportVersion: DATA_TRANSPORT_OOB_PICKLE_1_0 SrcTaskId: 1 SrcEndpoint { ActorId { RawX1: 7509890002408947454 RawX2: 4503603922340531 } } DstEndpoint { ActorId { RawX1: 7509890002408947450 RawX2: 4503603922340531 } } InMemory: true } 2025-05-29T15:28:43.143436Z node 1 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:445: TxId: 281474976715726, task: 1, CA Id [1:7509890002408947454:2739]. Shards State: TShardState{ TabletId: 72075186224037893, Last Key , Ranges: [], Points: [# 0: (String : cloud, String : mock_cloud, String : yq.cpuPercent.count)], RetryAttempt: 0, ResolveAttempt: 0 } 2025-05-29T15:28:43.143443Z node 1 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:459: TxId: 281474976715726, task: 1, CA Id [1:7509890002408947454:2739]. effective maxinflight 1024 sorted 0 2025-05-29T15:28:43.143445Z node 1 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:463: TxId: 281474976715726, task: 1, CA Id [1:7509890002408947454:2739]. BEFORE: 1.0 2025-05-29T15:28:43.143459Z node 1 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:885: TxId: 281474976715726, task: 1, CA Id [1:7509890002408947454:2739]. Send EvRead to shardId: 72075186224037893, tablePath: Root/yq/quotas, ranges: , limit: (empty maybe), readId = 0, reverse = 0, snapshot = (txid=18446744073709551615,step=1748532523185), lockTxId = 281474976715726, lockNodeId = 1 2025-05-29T15:28:43.143473Z node 1 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:477: TxId: 281474976715726, task: 1, CA Id [1:7509890002408947454:2739]. AFTER: 0.1 2025-05-29T15:28:43.143478Z node 1 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:481: TxId: 281474976715726, task: 1, CA Id [1:7509890002408947454:2739]. Scheduled table scans, in flight: 1 shards. pending shards to read: 0, 2025-05-29T15:28:43.143504Z node 1 :KQP_COMPUTE DEBUG: kqp_pure_compute_actor.cpp:148: SelfId: [1:7509890002408947454:2739], TxId: 281474976715726, task: 1. Ctx: { SessionId : ydb://session/3?node_id=1&id=NmMxZTYzYmUtM2JhYjA2NTktOWM3YjE2MzktNDE2YjUwODE=. TraceId : 01jweahw460b7vmmk8d6atgn3v. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. CA StateFunc 271646922 2025-05-29T15:28:43.143510Z node 1 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:1332: TxId: 281474976715726, task: 1, CA Id [1:7509890002408947454:2739]. enter getasyncinputdata results size 0, freeSpace 8388608 2025-05-29T15:28:43.143514Z node 1 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:1428: TxId: 281474976715726, task: 1, CA Id [1:7509890002408947454:2739]. returned async data processed rows 0 left freeSpace 8388608 received rows 0 running reads 1 pending shards 0 finished = 0 has limit 0 limit reached 0 [FAIL] Yq_1::CreateQuery_Without_Connection -> assertion failed at ydb/services/fq/ut_integration/fq_ut.cpp:57, TString (anonymous namespace)::CreateNewHistoryAndWaitFinish(const TString &, NYdb::NFq::TClient &, const TString &, const FederatedQuery::QueryMeta::ComputeStatus &): (result.GetStatus() == EStatus::SUCCESS) failed: (BAD_REQUEST != SUCCESS)
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 , with diff: (BAD_REQ|S)U(|CC)ES(T|S) TBackTrace::Capture()+28 (0x139D25FC) NUnitTest::NPrivate::RaiseError(char const*, TBasicString> const&, bool)+137 (0x13B861F9) ??+0 (0x1388D3A4) NTestSuiteYq_1::TTestCaseCreateQuery_Without_Connection::Execute_(NUnitTest::TTestContext&)+986 (0x138AA5EA) NTestSuiteYq_1::TCurrentTest::Execute()::'lambda'()::operator()() const+71 (0x138BC937) NUnitTest::TTestBase::Run(std::__y1::function, TBasicString> const&, char const*, bool)+126 (0x13B880AE) NTestSuiteYq_1::TCurrentTest::Execute()+415 (0x138BC24F) NUnitTest::TTestFactory::Execute()+803 (0x13B88823) NUnitTest::RunMain(int, char**)+3021 (0x13B9A3CD) ??+0 (0x7F40D2EB5D90) __libc_start_main+128 (0x7F40D2EB5E40) _start+41 (0x129E1029) (yexception) library/cpp/testing/unittest/utmain.cpp:562: Forked test failed ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/fq/ut_integration/unittest >> Yq_1::ModifyQuery [FAIL] Test command err: 2025-05-29T15:28:40.033918Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509889991095249919:2075];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:28:40.033941Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; E0529 15:28:40.067604469 4054680 dns_resolver_ares.cc:452] no server name supplied in dns URI E0529 15:28:40.067654937 4054680 channel.cc:120] channel stack builder failed: UNKNOWN: the target uri is not valid: dns:/// 2025-05-29T15:28:40.402981Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:7509889991095250234:2308], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-05-29T15:28:40.403014Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/0008cd/r3tmp/tmpxJcLWZ/pdisk_1.dat 2025-05-29T15:28:40.452701Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 61750, node 1 2025-05-29T15:28:40.458250Z node 1 :GRPC_SERVER WARN: grpc_request_proxy.cpp:529: SchemeBoardDelete /Root Strong=0 2025-05-29T15:28:40.458264Z node 1 :GRPC_SERVER WARN: grpc_request_proxy.cpp:529: SchemeBoardDelete /Root Strong=0 2025-05-29T15:28:40.459545Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:28:40.459561Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-05-29T15:28:40.459564Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-05-29T15:28:40.459650Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:15843 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-29T15:28:40.729976Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:28:40.801015Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:28:40.801050Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:28:40.802664Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-29T15:28:41.095744Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976715658, at schemeshard: 72057594046644480 2025-05-29T15:28:41.098276Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: schema.cpp:293: Create table "Root/yq/nodes". Create session OK 2025-05-29T15:28:41.098282Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: schema.cpp:113: Call create table "Root/yq/nodes" 2025-05-29T15:28:41.098285Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: schema.cpp:354: Call create table "Root/yq/nodes" 2025-05-29T15:28:41.098911Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: schema.cpp:293: Create table "Root/yq/mappings". Create session OK 2025-05-29T15:28:41.098917Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: schema.cpp:113: Call create table "Root/yq/mappings" 2025-05-29T15:28:41.098920Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: schema.cpp:354: Call create table "Root/yq/mappings" 2025-05-29T15:28:41.102891Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: schema.cpp:293: Create table "Root/yq/idempotency_keys". Create session OK 2025-05-29T15:28:41.102908Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: schema.cpp:113: Call create table "Root/yq/idempotency_keys" 2025-05-29T15:28:41.102910Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: schema.cpp:354: Call create table "Root/yq/idempotency_keys" 2025-05-29T15:28:41.102926Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: schema.cpp:293: Create table "Root/yq/queries". Create session OK 2025-05-29T15:28:41.102932Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: schema.cpp:113: Call create table "Root/yq/queries" 2025-05-29T15:28:41.102934Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: schema.cpp:354: Call create table "Root/yq/queries" 2025-05-29T15:28:41.103960Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 2025-05-29T15:28:41.104244Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 2025-05-29T15:28:41.104423Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 2025-05-29T15:28:41.105152Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-05-29T15:28:41.106763Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: schema.cpp:293: Create table "Root/yq/result_sets". Create session OK 2025-05-29T15:28:41.106767Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: schema.cpp:113: Call create table "Root/yq/result_sets" 2025-05-29T15:28:41.106769Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: schema.cpp:354: Call create table "Root/yq/result_sets" 2025-05-29T15:28:41.106978Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: schema.cpp:293: Create table "Root/yq/quotas". Create session OK 2025-05-29T15:28:41.106981Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: schema.cpp:113: Call create table "Root/yq/quotas" 2025-05-29T15:28:41.106982Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: schema.cpp:354: Call create table "Root/yq/quotas" 2025-05-29T15:28:41.107402Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: schema.cpp:293: Create table "Root/yq/tenant_acks". Create session OK 2025-05-29T15:28:41.107405Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: schema.cpp:113: Call create table "Root/yq/tenant_acks" 2025-05-29T15:28:41.107406Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: schema.cpp:354: Call create table "Root/yq/tenant_acks" 2025-05-29T15:28:41.110763Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-05-29T15:28:41.111017Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-05-29T15:28:41.111043Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: schema.cpp:293: Create table "Root/yq/bindings". Create session OK 2025-05-29T15:28:41.111049Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: schema.cpp:113: Call create table "Root/yq/bindings" 2025-05-29T15:28:41.111050Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: schema.cpp:354: Call create table "Root/yq/bindings" 2025-05-29T15:28:41.111172Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-05-29T15:28:41.111211Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: schema.cpp:293: Create table "Root/yq/tenants". Create session OK 2025-05-29T15:28:41.111213Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: schema.cpp:113: Call create table "Root/yq/tenants" 2025-05-29T15:28:41.111215Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: schema.cpp:354: Call create table "Root/yq/tenants" 2025-05-29T15:28:41.111314Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: schema.cpp:155: Successfully created directory "Root/yq" 2025-05-29T15:28:41.111318Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: schema.cpp:122: Reply for create directory "Root/yq": 2025-05-29T15:28:41.111326Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: schema.cpp:293: Create table "Root/yq/jobs". Create session OK 2025-05-29T15:28:41.111328Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: schema.cpp:113: Call create table "Root/yq/jobs" 2025-05-29T15:28:41.111329Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: schema.cpp:354: Call create table "Root/yq/jobs" 2025-05-29T15:28:41.111446Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: schema.cpp:293: Create table "Root/yq/pending_small". Create session OK 2025-05-29T15:28:41.111448Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: schema.cpp:113: Call create table "Root/yq/pending_small" 2025-05-29T15:28:41.111449Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: schema.cpp:354: Call create table "Root/yq/pending_small" 2025-05-29T15:28:41.111550Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: schema.cpp:293: Create table "Root/yq/compute_databases". Create session OK 2025-05-29T15:28:41.111552Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: schema.cpp:113: Call create table "Root/yq/compute_databases" 2025-05-29T15:28:41.111552Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: schema.cpp:354: Call create table "Root/yq/compute_databases" 2025-05-29T15:28:41.111891Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: schema.cpp:293: Create table "Root/yq/connections". Create session OK 2025-05-29T15:28:41.111895Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: schema.cpp:113: Call create table "Root/yq/connections" 2025-05-29T15:28:41.111897Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: schema.cpp:354: Call create table "Root/yq/connections" 2025-05-29T15:28:41.112605Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-05-29T15:28:41.114515Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-05-29T15:28:41.115085Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itsel ... FQ_QUOTA_SERVICE ERROR: quota_manager.cpp:648: SyncQuota finished with error: 2025-05-29T15:28:43.772952Z node 4 :FQ_QUOTA_SERVICE ERROR: quota_manager.cpp:648: SyncQuota finished with error: 2025-05-29T15:28:43.772961Z node 4 :FQ_QUOTA_SERVICE ERROR: quota_manager.cpp:648: SyncQuota finished with error: 2025-05-29T15:28:43.772964Z node 4 :FQ_QUOTA_SERVICE ERROR: quota_manager.cpp:648: SyncQuota finished with error: 2025-05-29T15:28:43.772975Z node 4 :FQ_QUOTA_SERVICE ERROR: quota_manager.cpp:648: SyncQuota finished with error: 2025-05-29T15:28:43.772979Z node 4 :FQ_QUOTA_SERVICE ERROR: quota_manager.cpp:648: SyncQuota finished with error: 2025-05-29T15:28:43.772984Z node 4 :FQ_QUOTA_SERVICE ERROR: quota_manager.cpp:648: SyncQuota finished with error: 2025-05-29T15:28:43.772989Z node 4 :FQ_QUOTA_SERVICE ERROR: quota_manager.cpp:648: SyncQuota finished with error: 2025-05-29T15:28:43.773003Z node 4 :FQ_QUOTA_SERVICE ERROR: quota_manager.cpp:648: SyncQuota finished with error: 2025-05-29T15:28:43.773008Z node 4 :FQ_QUOTA_SERVICE ERROR: quota_manager.cpp:648: SyncQuota finished with error: 2025-05-29T15:28:43.773011Z node 4 :FQ_QUOTA_SERVICE ERROR: quota_manager.cpp:648: SyncQuota finished with error: 2025-05-29T15:28:43.773024Z node 4 :FQ_QUOTA_SERVICE ERROR: quota_manager.cpp:648: SyncQuota finished with error: 2025-05-29T15:28:43.773028Z node 4 :FQ_QUOTA_SERVICE ERROR: quota_manager.cpp:648: SyncQuota finished with error: 2025-05-29T15:28:43.773031Z node 4 :FQ_QUOTA_SERVICE ERROR: quota_manager.cpp:648: SyncQuota finished with error: 2025-05-29T15:28:43.773038Z node 4 :FQ_QUOTA_SERVICE ERROR: quota_manager.cpp:648: SyncQuota finished with error: 2025-05-29T15:28:43.773049Z node 4 :FQ_QUOTA_SERVICE ERROR: quota_manager.cpp:648: SyncQuota finished with error: 2025-05-29T15:28:43.773054Z node 4 :FQ_QUOTA_SERVICE ERROR: quota_manager.cpp:648: SyncQuota finished with error: 2025-05-29T15:28:43.773059Z node 4 :FQ_QUOTA_SERVICE ERROR: quota_manager.cpp:648: SyncQuota finished with error: 2025-05-29T15:28:43.773071Z node 4 :FQ_QUOTA_SERVICE ERROR: quota_manager.cpp:648: SyncQuota finished with error: 2025-05-29T15:28:43.773077Z node 4 :FQ_QUOTA_SERVICE ERROR: quota_manager.cpp:648: SyncQuota finished with error: 2025-05-29T15:28:43.773082Z node 4 :FQ_QUOTA_SERVICE ERROR: quota_manager.cpp:648: SyncQuota finished with error: 2025-05-29T15:28:43.773087Z node 4 :FQ_QUOTA_SERVICE ERROR: quota_manager.cpp:648: SyncQuota finished with error: 2025-05-29T15:28:43.773095Z node 4 :FQ_QUOTA_SERVICE ERROR: quota_manager.cpp:648: SyncQuota finished with error: 2025-05-29T15:28:43.773102Z node 4 :FQ_QUOTA_SERVICE ERROR: quota_manager.cpp:648: SyncQuota finished with error: 2025-05-29T15:28:43.773115Z node 4 :FQ_QUOTA_SERVICE ERROR: quota_manager.cpp:648: SyncQuota finished with error: 2025-05-29T15:28:43.773120Z node 4 :FQ_QUOTA_SERVICE ERROR: quota_manager.cpp:648: SyncQuota finished with error: 2025-05-29T15:28:43.773125Z node 4 :FQ_QUOTA_SERVICE ERROR: quota_manager.cpp:648: SyncQuota finished with error: 2025-05-29T15:28:43.773130Z node 4 :FQ_QUOTA_SERVICE ERROR: quota_manager.cpp:648: SyncQuota finished with error: 2025-05-29T15:28:43.773143Z node 4 :FQ_QUOTA_SERVICE ERROR: quota_manager.cpp:648: SyncQuota finished with error: 2025-05-29T15:28:43.773151Z node 4 :FQ_QUOTA_SERVICE ERROR: quota_manager.cpp:648: SyncQuota finished with error: 2025-05-29T15:28:43.773155Z node 4 :FQ_QUOTA_SERVICE ERROR: quota_manager.cpp:648: SyncQuota finished with error: 2025-05-29T15:28:43.773159Z node 4 :FQ_QUOTA_SERVICE ERROR: quota_manager.cpp:648: SyncQuota finished with error: 2025-05-29T15:28:43.773175Z node 4 :FQ_QUOTA_SERVICE ERROR: quota_manager.cpp:648: SyncQuota finished with error: 2025-05-29T15:28:43.773179Z node 4 :FQ_QUOTA_SERVICE ERROR: quota_manager.cpp:648: SyncQuota finished with error: 2025-05-29T15:28:43.773184Z node 4 :FQ_QUOTA_SERVICE ERROR: quota_manager.cpp:648: SyncQuota finished with error: 2025-05-29T15:28:43.773196Z node 4 :FQ_QUOTA_SERVICE ERROR: quota_manager.cpp:648: SyncQuota finished with error: 2025-05-29T15:28:43.773201Z node 4 :FQ_QUOTA_SERVICE ERROR: quota_manager.cpp:648: SyncQuota finished with error: 2025-05-29T15:28:43.773206Z node 4 :FQ_QUOTA_SERVICE ERROR: quota_manager.cpp:648: SyncQuota finished with error: 2025-05-29T15:28:43.773213Z node 4 :FQ_QUOTA_SERVICE ERROR: quota_manager.cpp:648: SyncQuota finished with error: 2025-05-29T15:28:43.773225Z node 4 :FQ_QUOTA_SERVICE ERROR: quota_manager.cpp:648: SyncQuota finished with error: 2025-05-29T15:28:43.773233Z node 4 :FQ_QUOTA_SERVICE ERROR: quota_manager.cpp:648: SyncQuota finished with error: 2025-05-29T15:28:43.773237Z node 4 :FQ_QUOTA_SERVICE ERROR: quota_manager.cpp:648: SyncQuota finished with error: 2025-05-29T15:28:43.773242Z node 4 :FQ_QUOTA_SERVICE ERROR: quota_manager.cpp:648: SyncQuota finished with error: 2025-05-29T15:28:43.773259Z node 4 :FQ_QUOTA_SERVICE ERROR: quota_manager.cpp:648: SyncQuota finished with error: 2025-05-29T15:28:43.773265Z node 4 :FQ_QUOTA_SERVICE ERROR: quota_manager.cpp:648: SyncQuota finished with error: 2025-05-29T15:28:43.773269Z node 4 :FQ_QUOTA_SERVICE ERROR: quota_manager.cpp:648: SyncQuota finished with error: 2025-05-29T15:28:43.773274Z node 4 :FQ_QUOTA_SERVICE ERROR: quota_manager.cpp:648: SyncQuota finished with error: 2025-05-29T15:28:43.773281Z node 4 :FQ_QUOTA_SERVICE ERROR: quota_manager.cpp:648: SyncQuota finished with error: 2025-05-29T15:28:43.773291Z node 4 :FQ_QUOTA_SERVICE ERROR: quota_manager.cpp:648: SyncQuota finished with error: 2025-05-29T15:28:43.773302Z node 4 :FQ_QUOTA_SERVICE ERROR: quota_manager.cpp:648: SyncQuota finished with error: 2025-05-29T15:28:43.773309Z node 4 :FQ_QUOTA_SERVICE ERROR: quota_manager.cpp:648: SyncQuota finished with error: 2025-05-29T15:28:43.773313Z node 4 :FQ_QUOTA_SERVICE ERROR: quota_manager.cpp:648: SyncQuota finished with error: 2025-05-29T15:28:43.773318Z node 4 :FQ_QUOTA_SERVICE ERROR: quota_manager.cpp:648: SyncQuota finished with error: 2025-05-29T15:28:43.773332Z node 4 :FQ_QUOTA_SERVICE ERROR: quota_manager.cpp:648: SyncQuota finished with error: 2025-05-29T15:28:43.773336Z node 4 :FQ_QUOTA_SERVICE ERROR: quota_manager.cpp:648: SyncQuota finished with error: 2025-05-29T15:28:43.773341Z node 4 :FQ_QUOTA_SERVICE ERROR: quota_manager.cpp:648: SyncQuota finished with error: 2025-05-29T15:28:43.773353Z node 4 :FQ_QUOTA_SERVICE ERROR: quota_manager.cpp:648: SyncQuota finished with error: 2025-05-29T15:28:43.773358Z node 4 :FQ_QUOTA_SERVICE ERROR: quota_manager.cpp:648: SyncQuota finished with error: 2025-05-29T15:28:43.773362Z node 4 :FQ_QUOTA_SERVICE ERROR: quota_manager.cpp:648: SyncQuota finished with error: 2025-05-29T15:28:43.773369Z node 4 :FQ_QUOTA_SERVICE ERROR: quota_manager.cpp:648: SyncQuota finished with error: 2025-05-29T15:28:43.773376Z node 4 :FQ_QUOTA_SERVICE ERROR: quota_manager.cpp:648: SyncQuota finished with error: 2025-05-29T15:28:43.773384Z node 4 :FQ_QUOTA_SERVICE ERROR: quota_manager.cpp:648: SyncQuota finished with error: 2025-05-29T15:28:43.773389Z node 4 :FQ_QUOTA_SERVICE ERROR: quota_manager.cpp:648: SyncQuota finished with error: 2025-05-29T15:28:43.773402Z node 4 :FQ_QUOTA_SERVICE ERROR: quota_manager.cpp:648: SyncQuota finished with error: 2025-05-29T15:28:43.773406Z node 4 :FQ_QUOTA_SERVICE ERROR: quota_manager.cpp:648: SyncQuota finished with error: 2025-05-29T15:28:43.773411Z node 4 :FQ_QUOTA_SERVICE ERROR: quota_manager.cpp:648: SyncQuota finished with error: 2025-05-29T15:28:43.773416Z node 4 :FQ_QUOTA_SERVICE ERROR: quota_manager.cpp:648: SyncQuota finished with error: 2025-05-29T15:28:43.773420Z node 4 :FQ_QUOTA_SERVICE ERROR: quota_manager.cpp:648: SyncQuota finished with error: 2025-05-29T15:28:43.773425Z node 4 :FQ_QUOTA_SERVICE ERROR: quota_manager.cpp:648: SyncQuota finished with error: 2025-05-29T15:28:43.773430Z node 4 :FQ_QUOTA_SERVICE ERROR: quota_manager.cpp:648: SyncQuota finished with error: 2025-05-29T15:28:43.773435Z node 4 :FQ_QUOTA_SERVICE ERROR: quota_manager.cpp:648: SyncQuota finished with error: 2025-05-29T15:28:43.773440Z node 4 :FQ_QUOTA_SERVICE ERROR: quota_manager.cpp:648: SyncQuota finished with error: 2025-05-29T15:28:43.773450Z node 4 :FQ_QUOTA_SERVICE ERROR: quota_manager.cpp:648: SyncQuota finished with error: 2025-05-29T15:28:43.773453Z node 4 :FQ_QUOTA_SERVICE ERROR: quota_manager.cpp:648: SyncQuota finished with error: 2025-05-29T15:28:43.773466Z node 4 :FQ_QUOTA_SERVICE ERROR: quota_manager.cpp:648: SyncQuota finished with error: 2025-05-29T15:28:43.773470Z node 4 :FQ_QUOTA_SERVICE ERROR: quota_manager.cpp:648: SyncQuota finished with error: 2025-05-29T15:28:43.773474Z node 4 :FQ_QUOTA_SERVICE ERROR: quota_manager.cpp:648: SyncQuota finished with error: 2025-05-29T15:28:43.773489Z node 4 :FQ_QUOTA_SERVICE ERROR: quota_manager.cpp:648: SyncQuota finished with error: 2025-05-29T15:28:43.773494Z node 4 :FQ_QUOTA_SERVICE ERROR: quota_manager.cpp:648: SyncQuota finished with error: 2025-05-29T15:28:43.773498Z node 4 :FQ_QUOTA_SERVICE ERROR: quota_manager.cpp:648: SyncQuota finished with error: 2025-05-29T15:28:43.773503Z node 4 :FQ_QUOTA_SERVICE ERROR: quota_manager.cpp:648: SyncQuota finished with error: 2025-05-29T15:28:43.773507Z node 4 :FQ_QUOTA_SERVICE ERROR: quota_manager.cpp:648: SyncQuota finished with error: 2025-05-29T15:28:43.773517Z node 4 :FQ_QUOTA_SERVICE ERROR: quota_manager.cpp:648: SyncQuota finished with error: 2025-05-29T15:28:43.773527Z node 4 :FQ_QUOTA_SERVICE ERROR: quota_manager.cpp:648: SyncQuota finished with error: 2025-05-29T15:28:43.773535Z node 4 :FQ_QUOTA_SERVICE ERROR: quota_manager.cpp:648: SyncQuota finished with error: 2025-05-29T15:28:43.773539Z node 4 :FQ_QUOTA_SERVICE ERROR: quota_manager.cpp:648: SyncQuota finished with error: 2025-05-29T15:28:43.773544Z node 4 :FQ_QUOTA_SERVICE ERROR: quota_manager.cpp:648: SyncQuota finished with error: 2025-05-29T15:28:43.773562Z node 4 :FQ_QUOTA_SERVICE ERROR: quota_manager.cpp:648: SyncQuota finished with error: 2025-05-29T15:28:43.773566Z node 4 :FQ_QUOTA_SERVICE ERROR: quota_manager.cpp:648: SyncQuota finished with error: 2025-05-29T15:28:43.773570Z node 4 :FQ_QUOTA_SERVICE ERROR: quota_manager.cpp:648: SyncQuota finished with error: 2025-05-29T15:28:43.773575Z node 4 :FQ_QUOTA_SERVICE ERROR: quota_manager.cpp:648: SyncQuota finished with error: 2025-05-29T15:28:43.773580Z node 4 :FQ_QUOTA_SERVICE ERROR: quota_manager.cpp:648: SyncQuota finished with error: 2025-05-29T15:28:43.773584Z node 4 :FQ_QUOTA_SERVICE ERROR: quota_manager.cpp:648: SyncQuota finished with error: assertion failed at ydb/services/fq/ut_integration/fq_ut.cpp:57, TString (anonymous namespace)::CreateNewHistoryAndWaitFinish(const TString &, NYdb::NFq::TClient &, const TString &, const FederatedQuery::QueryMeta::ComputeStatus &): (result.GetStatus() == EStatus::SUCCESS) failed: (BAD_REQUEST != SUCCESS)
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 , with diff: (BAD_REQ|S)U(|CC)ES(T|S) TBackTrace::Capture()+28 (0x139D25FC) NUnitTest::NPrivate::RaiseError(char const*, TBasicString> const&, bool)+137 (0x13B861F9) ??+0 (0x1388D3A4) NTestSuiteYq_1::TTestCaseModifyQuery::Execute_(NUnitTest::TTestContext&)+1074 (0x138AD3F2) NTestSuiteYq_1::TCurrentTest::Execute()::'lambda'()::operator()() const+71 (0x138BC937) NUnitTest::TTestBase::Run(std::__y1::function, TBasicString> const&, char const*, bool)+126 (0x13B880AE) NTestSuiteYq_1::TCurrentTest::Execute()+415 (0x138BC24F) NUnitTest::TTestFactory::Execute()+803 (0x13B88823) NUnitTest::RunMain(int, char**)+3021 (0x13B9A3CD) ??+0 (0x7FCDD4AF7D90) __libc_start_main+128 (0x7FCDD4AF7E40) _start+41 (0x129E1029) ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/keyvalue/ut/unittest >> TKeyValueTest::TestCleanUpDataWithMockDisk [GOOD] Test command err: Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:55:2057] recipient: [1:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:55:2057] recipient: [1:51:2095] Leader for TabletID 72057594037927937 is [1:57:2097] sender: [1:58:2057] recipient: [1:51:2095] Leader for TabletID 72057594037927937 is [1:57:2097] sender: [1:75:2057] recipient: [1:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:55:2057] recipient: [2:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:55:2057] recipient: [2:51:2095] Leader for TabletID 72057594037927937 is [2:57:2097] sender: [2:58:2057] recipient: [2:51:2095] Leader for TabletID 72057594037927937 is [2:57:2097] sender: [2:75:2057] recipient: [2:14:2061] !Reboot 72057594037927937 (actor [2:57:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [2:57:2097] sender: [2:77:2057] recipient: [2:36:2083] Leader for TabletID 72057594037927937 is [2:57:2097] sender: [2:80:2057] recipient: [2:14:2061] Leader for TabletID 72057594037927937 is [2:57:2097] sender: [2:81:2057] recipient: [2:79:2110] Leader for TabletID 72057594037927937 is [2:82:2111] sender: [2:83:2057] recipient: [2:79:2110] !Reboot 72057594037927937 (actor [2:57:2097]) rebooted! !Reboot 72057594037927937 (actor [2:57:2097]) tablet resolver refreshed! new actor is[2:82:2111] Leader for TabletID 72057594037927937 is [2:82:2111] sender: [2:168:2057] recipient: [2:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:55:2057] recipient: [3:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:55:2057] recipient: [3:51:2095] Leader for TabletID 72057594037927937 is [3:57:2097] sender: [3:58:2057] recipient: [3:51:2095] Leader for TabletID 72057594037927937 is [3:57:2097] sender: [3:75:2057] recipient: [3:14:2061] !Reboot 72057594037927937 (actor [3:57:2097]) on event NKikimr::TEvKeyValue::TEvAcquireLock ! Leader for TabletID 72057594037927937 is [3:57:2097] sender: [3:77:2057] recipient: [3:36:2083] Leader for TabletID 72057594037927937 is [3:57:2097] sender: [3:80:2057] recipient: [3:79:2110] Leader for TabletID 72057594037927937 is [3:57:2097] sender: [3:81:2057] recipient: [3:14:2061] Leader for TabletID 72057594037927937 is [3:82:2111] sender: [3:83:2057] recipient: [3:79:2110] !Reboot 72057594037927937 (actor [3:57:2097]) rebooted! !Reboot 72057594037927937 (actor [3:57:2097]) tablet resolver refreshed! new actor is[3:82:2111] Leader for TabletID 72057594037927937 is [3:82:2111] sender: [3:168:2057] recipient: [3:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [4:55:2057] recipient: [4:50:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [4:55:2057] recipient: [4:50:2095] Leader for TabletID 72057594037927937 is [4:57:2097] sender: [4:58:2057] recipient: [4:50:2095] Leader for TabletID 72057594037927937 is [4:57:2097] sender: [4:75:2057] recipient: [4:14:2061] !Reboot 72057594037927937 (actor [4:57:2097]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [4:57:2097] sender: [4:78:2057] recipient: [4:36:2083] Leader for TabletID 72057594037927937 is [4:57:2097] sender: [4:81:2057] recipient: [4:80:2110] Leader for TabletID 72057594037927937 is [4:57:2097] sender: [4:82:2057] recipient: [4:14:2061] Leader for TabletID 72057594037927937 is [4:83:2111] sender: [4:84:2057] recipient: [4:80:2110] !Reboot 72057594037927937 (actor [4:57:2097]) rebooted! !Reboot 72057594037927937 (actor [4:57:2097]) tablet resolver refreshed! new actor is[4:83:2111] Leader for TabletID 72057594037927937 is [4:83:2111] sender: [4:169:2057] recipient: [4:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [5:55:2057] recipient: [5:52:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [5:55:2057] recipient: [5:52:2095] Leader for TabletID 72057594037927937 is [5:57:2097] sender: [5:58:2057] recipient: [5:52:2095] Leader for TabletID 72057594037927937 is [5:57:2097] sender: [5:75:2057] recipient: [5:14:2061] !Reboot 72057594037927937 (actor [5:57:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [5:57:2097] sender: [5:81:2057] recipient: [5:36:2083] Leader for TabletID 72057594037927937 is [5:57:2097] sender: [5:84:2057] recipient: [5:14:2061] Leader for TabletID 72057594037927937 is [5:57:2097] sender: [5:85:2057] recipient: [5:83:2113] Leader for TabletID 72057594037927937 is [5:86:2114] sender: [5:87:2057] recipient: [5:83:2113] !Reboot 72057594037927937 (actor [5:57:2097]) rebooted! !Reboot 72057594037927937 (actor [5:57:2097]) tablet resolver refreshed! new actor is[5:86:2114] Leader for TabletID 72057594037927937 is [5:86:2114] sender: [5:172:2057] recipient: [5:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [6:55:2057] recipient: [6:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [6:55:2057] recipient: [6:51:2095] Leader for TabletID 72057594037927937 is [6:57:2097] sender: [6:58:2057] recipient: [6:51:2095] Leader for TabletID 72057594037927937 is [6:57:2097] sender: [6:75:2057] recipient: [6:14:2061] !Reboot 72057594037927937 (actor [6:57:2097]) on event NKikimr::TEvKeyValue::TEvCleanUpDataRequest ! Leader for TabletID 72057594037927937 is [6:57:2097] sender: [6:81:2057] recipient: [6:36:2083] Leader for TabletID 72057594037927937 is [6:57:2097] sender: [6:84:2057] recipient: [6:14:2061] Leader for TabletID 72057594037927937 is [6:57:2097] sender: [6:85:2057] recipient: [6:83:2113] Leader for TabletID 72057594037927937 is [6:86:2114] sender: [6:87:2057] recipient: [6:83:2113] !Reboot 72057594037927937 (actor [6:57:2097]) rebooted! !Reboot 72057594037927937 (actor [6:57:2097]) tablet resolver refreshed! new actor is[6:86:2114] Leader for TabletID 72057594037927937 is [6:86:2114] sender: [6:172:2057] recipient: [6:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [7:55:2057] recipient: [7:52:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [7:55:2057] recipient: [7:52:2095] Leader for TabletID 72057594037927937 is [7:57:2097] sender: [7:58:2057] recipient: [7:52:2095] Leader for TabletID 72057594037927937 is [7:57:2097] sender: [7:75:2057] recipient: [7:14:2061] !Reboot 72057594037927937 (actor [7:57:2097]) on event NKikimr::TEvKeyValue::TEvForceTabletDataCleanup ! Leader for TabletID 72057594037927937 is [7:57:2097] sender: [7:81:2057] recipient: [7:36:2083] Leader for TabletID 72057594037927937 is [7:57:2097] sender: [7:84:2057] recipient: [7:83:2113] Leader for TabletID 72057594037927937 is [7:57:2097] sender: [7:85:2057] recipient: [7:14:2061] Leader for TabletID 72057594037927937 is [7:86:2114] sender: [7:87:2057] recipient: [7:83:2113] !Reboot 72057594037927937 (actor [7:57:2097]) rebooted! !Reboot 72057594037927937 (actor [7:57:2097]) tablet resolver refreshed! new actor is[7:86:2114] Leader for TabletID 72057594037927937 is [7:86:2114] sender: [7:172:2057] recipient: [7:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [8:55:2057] recipient: [8:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [8:55:2057] recipient: [8:51:2095] Leader for TabletID 72057594037927937 is [8:57:2097] sender: [8:58:2057] recipient: [8:51:2095] Leader for TabletID 72057594037927937 is [8:57:2097] sender: [8:75:2057] recipient: [8:14:2061] !Reboot 72057594037927937 (actor [8:57:2097]) on event NKikimr::TEvTablet::TEvFollowerGcApplied ! Leader for TabletID 72057594037927937 is [8:57:2097] sender: [8:86:2057] recipient: [8:36:2083] Leader for TabletID 72057594037927937 is [8:57:2097] sender: [8:88:2057] recipient: [8:14:2061] Leader for TabletID 72057594037927937 is [8:57:2097] sender: [8:90:2057] recipient: [8:89:2117] Leader for TabletID 72057594037927937 is [8:91:2118] sender: [8:92:2057] recipient: [8:89:2117] !Reboot 72057594037927937 (actor [8:57:2097]) rebooted! !Reboot 72057594037927937 (actor [8:57:2097]) tablet resolver refreshed! new actor is[8:91:2118] Leader for TabletID 72057594037927937 is [8:91:2118] sender: [8:177:2057] recipient: [8:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [9:55:2057] recipient: [9:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [9:55:2057] recipient: [9:51:2095] Leader for TabletID 72057594037927937 is [9:57:2097] sender: [9:58:2057] recipient: [9:51:2095] Leader for TabletID 72057594037927937 is [9:57:2097] sender: [9:75:2057] recipient: [9:14:2061] !Reboot 72057594037927937 (actor [9:57:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [9:57:2097] sender: [9:90:2057] recipient: [9:36:2083] Leader for TabletID 72057594037927937 is [9:57:2097] sender: [9:93:2057] recipient: [9:14:2061] Leader for TabletID 72057594037927937 is [9:57:2097] sender: [9:94:2057] recipient: [9:92:2121] Leader for TabletID 72057594037927937 is [9:95:2122] sender: [9:96:2057] recipient: [9:92:2121] !Reboot 72057594037927937 (actor [9:57:2097]) rebooted! !Reboot 72057594037927937 (actor [9:57:2097]) tablet resolver refreshed! new actor is[9:95:2122] Leader for TabletID 72057594037927937 is [9:95:2122] sender: [9:181:2057] recipient: [9:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [10:55:2057] recipient: [10:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [10:55:2057] recipient: [10:51:2095] Leader for TabletID 72057594037927937 is [10:57:2097] sender: [10:58:2057] recipient: [10:51:2095] Leader for TabletID 72057594037927937 is [10:57:2097] sender: [10:75:2057] recipient: [10:14:2061] !Reboot 72057594037927937 (actor [10:57:2097]) on event NKikimr::TEvKeyValue::TEvCleanUpDataRequest ! Leader for TabletID 72057594037927937 is [10:57:2097] sender: [10:90:2057] recipient: [10:36:2083] Leader for TabletID 72057594037927937 is [10:57:2097] sender: [10:93:2057] recipient: [10:14:2061] Leader for TabletID 72057594037927937 is [10:57:2097] sender: [10:94:2057] recipient: [10:92:2121] Leader for TabletID 72057594037927937 is [10:95:2122] sender: [10:96:2057] recipient: [10:92:2121] !Reboot 72057594037927937 (actor [10:57:2097]) rebooted! !Reboot 72057594037927937 (actor [10:57:2097]) tablet resolver refreshed! new actor is[10:95:2122] Leader for TabletID 72057594037927937 is [10:95:2122] sender: [10:181:2057] recipient: [10:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [11:55:2057] recipient: [11:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [11:55:2057] recipient: [11:51:2095] Leader for TabletID 72057594037927937 is [11:57:2097] sender: [11:58:2057] recipient: [11:51:2095] Leader for TabletID 72057594037927937 is [11:57:2097] sender: [11:75:2057] recipient: [11:14:2061] !Reboot 72057594037927937 (actor [11:57:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [11:57:2097] sender: [11:92:2057] recipient: [11:36:2083] Leader for TabletID 72057594037927937 is [11:57:2097] sender: [11:95:2057] recipient: [11:14:2061] Leader for TabletID 72057594037927937 is [11:57:2097] sender: [11:96:2057] recipient: [11:94:2123] Leader for TabletID 72057594037927937 is [11:97:2124] sender: [11:98:2057] recipient: [11:94:2123] !Reboot 72057594037927937 (actor [11:57:2097]) rebooted! !Reboot 72057594037927937 (actor [11:57:2097]) tablet resolver refreshed! new actor is[11:97:2124] Leader for TabletID 72057594037927937 is [11:97:2124] sender: [11:183:2057] recipient: [11:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [12:55:2057] recipient: [12:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [12:55:2057] recipient: [12:51:2095] Leader for TabletID 72057594037927937 is [12:57:2097] sender: [12:58:2057] recipient: [12:51:2095] Leader for TabletID 72057594037927937 is [12:57:2097] sender: [12:75:2057] recipient: [12:1 ... 94037927937 is [35:100:2125] sender: [35:186:2057] recipient: [35:17:2064] Leader for TabletID 72057594037927937 is [0:0:0] sender: [36:57:2057] recipient: [36:54:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [36:57:2057] recipient: [36:54:2097] Leader for TabletID 72057594037927937 is [36:59:2099] sender: [36:60:2057] recipient: [36:54:2097] Leader for TabletID 72057594037927937 is [36:59:2099] sender: [36:77:2057] recipient: [36:17:2064] !Reboot 72057594037927937 (actor [36:59:2099]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [36:59:2099] sender: [36:96:2057] recipient: [36:39:2086] Leader for TabletID 72057594037927937 is [36:59:2099] sender: [36:99:2057] recipient: [36:17:2064] Leader for TabletID 72057594037927937 is [36:59:2099] sender: [36:100:2057] recipient: [36:98:2124] Leader for TabletID 72057594037927937 is [36:101:2125] sender: [36:102:2057] recipient: [36:98:2124] !Reboot 72057594037927937 (actor [36:59:2099]) rebooted! !Reboot 72057594037927937 (actor [36:59:2099]) tablet resolver refreshed! new actor is[36:101:2125] Leader for TabletID 72057594037927937 is [36:101:2125] sender: [36:187:2057] recipient: [36:17:2064] Leader for TabletID 72057594037927937 is [0:0:0] sender: [37:57:2057] recipient: [37:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [37:57:2057] recipient: [37:53:2097] Leader for TabletID 72057594037927937 is [37:59:2099] sender: [37:60:2057] recipient: [37:53:2097] Leader for TabletID 72057594037927937 is [37:59:2099] sender: [37:77:2057] recipient: [37:17:2064] !Reboot 72057594037927937 (actor [37:59:2099]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [37:59:2099] sender: [37:99:2057] recipient: [37:39:2086] Leader for TabletID 72057594037927937 is [37:59:2099] sender: [37:102:2057] recipient: [37:17:2064] Leader for TabletID 72057594037927937 is [37:59:2099] sender: [37:103:2057] recipient: [37:101:2127] Leader for TabletID 72057594037927937 is [37:104:2128] sender: [37:105:2057] recipient: [37:101:2127] !Reboot 72057594037927937 (actor [37:59:2099]) rebooted! !Reboot 72057594037927937 (actor [37:59:2099]) tablet resolver refreshed! new actor is[37:104:2128] Leader for TabletID 72057594037927937 is [37:104:2128] sender: [37:190:2057] recipient: [37:17:2064] Leader for TabletID 72057594037927937 is [0:0:0] sender: [38:57:2057] recipient: [38:54:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [38:57:2057] recipient: [38:54:2097] Leader for TabletID 72057594037927937 is [38:59:2099] sender: [38:60:2057] recipient: [38:54:2097] Leader for TabletID 72057594037927937 is [38:59:2099] sender: [38:77:2057] recipient: [38:17:2064] !Reboot 72057594037927937 (actor [38:59:2099]) on event NKikimr::TEvKeyValue::TEvExecuteTransaction ! Leader for TabletID 72057594037927937 is [38:59:2099] sender: [38:99:2057] recipient: [38:39:2086] Leader for TabletID 72057594037927937 is [38:59:2099] sender: [38:102:2057] recipient: [38:17:2064] Leader for TabletID 72057594037927937 is [38:59:2099] sender: [38:103:2057] recipient: [38:101:2127] Leader for TabletID 72057594037927937 is [38:104:2128] sender: [38:105:2057] recipient: [38:101:2127] !Reboot 72057594037927937 (actor [38:59:2099]) rebooted! !Reboot 72057594037927937 (actor [38:59:2099]) tablet resolver refreshed! new actor is[38:104:2128] Leader for TabletID 72057594037927937 is [38:104:2128] sender: [38:190:2057] recipient: [38:17:2064] Leader for TabletID 72057594037927937 is [0:0:0] sender: [39:57:2057] recipient: [39:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [39:57:2057] recipient: [39:53:2097] Leader for TabletID 72057594037927937 is [39:59:2099] sender: [39:60:2057] recipient: [39:53:2097] Leader for TabletID 72057594037927937 is [39:59:2099] sender: [39:77:2057] recipient: [39:17:2064] !Reboot 72057594037927937 (actor [39:59:2099]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [39:59:2099] sender: [39:100:2057] recipient: [39:39:2086] Leader for TabletID 72057594037927937 is [39:59:2099] sender: [39:103:2057] recipient: [39:17:2064] Leader for TabletID 72057594037927937 is [39:59:2099] sender: [39:104:2057] recipient: [39:102:2127] Leader for TabletID 72057594037927937 is [39:105:2128] sender: [39:106:2057] recipient: [39:102:2127] !Reboot 72057594037927937 (actor [39:59:2099]) rebooted! !Reboot 72057594037927937 (actor [39:59:2099]) tablet resolver refreshed! new actor is[39:105:2128] Leader for TabletID 72057594037927937 is [39:105:2128] sender: [39:191:2057] recipient: [39:17:2064] Leader for TabletID 72057594037927937 is [0:0:0] sender: [40:57:2057] recipient: [40:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [40:57:2057] recipient: [40:53:2097] Leader for TabletID 72057594037927937 is [40:59:2099] sender: [40:60:2057] recipient: [40:53:2097] Leader for TabletID 72057594037927937 is [40:59:2099] sender: [40:77:2057] recipient: [40:17:2064] !Reboot 72057594037927937 (actor [40:59:2099]) on event NKikimr::TEvKeyValue::TEvCollect ! Leader for TabletID 72057594037927937 is [40:59:2099] sender: [40:101:2057] recipient: [40:39:2086] Leader for TabletID 72057594037927937 is [40:59:2099] sender: [40:103:2057] recipient: [40:17:2064] Leader for TabletID 72057594037927937 is [40:59:2099] sender: [40:105:2057] recipient: [40:104:2128] Leader for TabletID 72057594037927937 is [40:106:2129] sender: [40:107:2057] recipient: [40:104:2128] !Reboot 72057594037927937 (actor [40:59:2099]) rebooted! !Reboot 72057594037927937 (actor [40:59:2099]) tablet resolver refreshed! new actor is[40:106:2129] Leader for TabletID 72057594037927937 is [40:106:2129] sender: [40:126:2057] recipient: [40:17:2064] Leader for TabletID 72057594037927937 is [0:0:0] sender: [41:57:2057] recipient: [41:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [41:57:2057] recipient: [41:53:2097] Leader for TabletID 72057594037927937 is [41:59:2099] sender: [41:60:2057] recipient: [41:53:2097] Leader for TabletID 72057594037927937 is [41:59:2099] sender: [41:77:2057] recipient: [41:17:2064] !Reboot 72057594037927937 (actor [41:59:2099]) on event NKikimr::TEvKeyValue::TEvCompleteGC ! Leader for TabletID 72057594037927937 is [41:59:2099] sender: [41:102:2057] recipient: [41:39:2086] Leader for TabletID 72057594037927937 is [41:59:2099] sender: [41:105:2057] recipient: [41:17:2064] Leader for TabletID 72057594037927937 is [41:59:2099] sender: [41:106:2057] recipient: [41:104:2129] Leader for TabletID 72057594037927937 is [41:107:2130] sender: [41:108:2057] recipient: [41:104:2129] !Reboot 72057594037927937 (actor [41:59:2099]) rebooted! !Reboot 72057594037927937 (actor [41:59:2099]) tablet resolver refreshed! new actor is[41:107:2130] Leader for TabletID 72057594037927937 is [41:107:2130] sender: [41:127:2057] recipient: [41:17:2064] Leader for TabletID 72057594037927937 is [0:0:0] sender: [42:57:2057] recipient: [42:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [42:57:2057] recipient: [42:53:2097] Leader for TabletID 72057594037927937 is [42:59:2099] sender: [42:60:2057] recipient: [42:53:2097] Leader for TabletID 72057594037927937 is [42:59:2099] sender: [42:77:2057] recipient: [42:17:2064] !Reboot 72057594037927937 (actor [42:59:2099]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [42:59:2099] sender: [42:105:2057] recipient: [42:39:2086] Leader for TabletID 72057594037927937 is [42:59:2099] sender: [42:108:2057] recipient: [42:17:2064] Leader for TabletID 72057594037927937 is [42:59:2099] sender: [42:109:2057] recipient: [42:107:2132] Leader for TabletID 72057594037927937 is [42:110:2133] sender: [42:111:2057] recipient: [42:107:2132] !Reboot 72057594037927937 (actor [42:59:2099]) rebooted! !Reboot 72057594037927937 (actor [42:59:2099]) tablet resolver refreshed! new actor is[42:110:2133] Leader for TabletID 72057594037927937 is [42:110:2133] sender: [42:196:2057] recipient: [42:17:2064] Leader for TabletID 72057594037927937 is [0:0:0] sender: [43:57:2057] recipient: [43:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [43:57:2057] recipient: [43:53:2097] Leader for TabletID 72057594037927937 is [43:59:2099] sender: [43:60:2057] recipient: [43:53:2097] Leader for TabletID 72057594037927937 is [43:59:2099] sender: [43:77:2057] recipient: [43:17:2064] !Reboot 72057594037927937 (actor [43:59:2099]) on event NKikimr::TEvKeyValue::TEvCleanUpDataRequest ! Leader for TabletID 72057594037927937 is [43:59:2099] sender: [43:105:2057] recipient: [43:39:2086] Leader for TabletID 72057594037927937 is [43:59:2099] sender: [43:108:2057] recipient: [43:17:2064] Leader for TabletID 72057594037927937 is [43:59:2099] sender: [43:109:2057] recipient: [43:107:2132] Leader for TabletID 72057594037927937 is [43:110:2133] sender: [43:111:2057] recipient: [43:107:2132] !Reboot 72057594037927937 (actor [43:59:2099]) rebooted! !Reboot 72057594037927937 (actor [43:59:2099]) tablet resolver refreshed! new actor is[43:110:2133] Leader for TabletID 72057594037927937 is [43:110:2133] sender: [43:196:2057] recipient: [43:17:2064] Leader for TabletID 72057594037927937 is [0:0:0] sender: [44:57:2057] recipient: [44:54:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [44:57:2057] recipient: [44:54:2097] Leader for TabletID 72057594037927937 is [44:59:2099] sender: [44:60:2057] recipient: [44:54:2097] Leader for TabletID 72057594037927937 is [44:59:2099] sender: [44:77:2057] recipient: [44:17:2064] !Reboot 72057594037927937 (actor [44:59:2099]) on event NKikimr::TEvKeyValue::TEvForceTabletDataCleanup ! Leader for TabletID 72057594037927937 is [44:59:2099] sender: [44:105:2057] recipient: [44:39:2086] Leader for TabletID 72057594037927937 is [44:59:2099] sender: [44:108:2057] recipient: [44:17:2064] Leader for TabletID 72057594037927937 is [44:59:2099] sender: [44:109:2057] recipient: [44:107:2132] Leader for TabletID 72057594037927937 is [44:110:2133] sender: [44:111:2057] recipient: [44:107:2132] !Reboot 72057594037927937 (actor [44:59:2099]) rebooted! !Reboot 72057594037927937 (actor [44:59:2099]) tablet resolver refreshed! new actor is[44:110:2133] Leader for TabletID 72057594037927937 is [44:110:2133] sender: [44:196:2057] recipient: [44:17:2064] Leader for TabletID 72057594037927937 is [0:0:0] sender: [45:57:2057] recipient: [45:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [45:57:2057] recipient: [45:53:2097] Leader for TabletID 72057594037927937 is [45:59:2099] sender: [45:60:2057] recipient: [45:53:2097] Leader for TabletID 72057594037927937 is [45:59:2099] sender: [45:77:2057] recipient: [45:17:2064] !Reboot 72057594037927937 (actor [45:59:2099]) on event NKikimr::TEvTablet::TEvFollowerGcApplied ! Leader for TabletID 72057594037927937 is [45:59:2099] sender: [45:110:2057] recipient: [45:39:2086] Leader for TabletID 72057594037927937 is [45:59:2099] sender: [45:113:2057] recipient: [45:17:2064] Leader for TabletID 72057594037927937 is [45:59:2099] sender: [45:114:2057] recipient: [45:112:2136] Leader for TabletID 72057594037927937 is [45:115:2137] sender: [45:116:2057] recipient: [45:112:2136] !Reboot 72057594037927937 (actor [45:59:2099]) rebooted! !Reboot 72057594037927937 (actor [45:59:2099]) tablet resolver refreshed! new actor is[45:115:2137] Leader for TabletID 72057594037927937 is [45:115:2137] sender: [45:201:2057] recipient: [45:17:2064] Leader for TabletID 72057594037927937 is [0:0:0] sender: [46:57:2057] recipient: [46:54:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [46:57:2057] recipient: [46:54:2097] Leader for TabletID 72057594037927937 is [46:59:2099] sender: [46:60:2057] recipient: [46:54:2097] Leader for TabletID 72057594037927937 is [46:59:2099] sender: [46:77:2057] recipient: [46:17:2064] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/fq/ut_integration/unittest >> Yq_1::Basic_EmptyDict [FAIL] Test command err: 2025-05-29T15:28:38.258927Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509889980940485692:2074];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:28:38.259199Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; E0529 15:28:38.306024491 4047117 dns_resolver_ares.cc:452] no server name supplied in dns URI E0529 15:28:38.306096710 4047117 channel.cc:120] channel stack builder failed: UNKNOWN: the target uri is not valid: dns:/// 2025-05-29T15:28:38.308388Z node 1 :YQ_CONTROL_PLANE_STORAGE ERROR: schema.cpp:160: Create directory "Root/yq" error: TRANSPORT_UNAVAILABLE [ {
: Error: GRpc error: (14): failed to connect to all addresses; last error: UNKNOWN: ipv4:127.0.0.1:12124: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:12124 } ] test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/0009e4/r3tmp/tmpv9wHXc/pdisk_1.dat TServer::EnableGrpc on GrpcPort 12124, node 1 2025-05-29T15:28:38.679684Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:28:38.679701Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-05-29T15:28:38.679704Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-05-29T15:28:38.679727Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:28:38.679762Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:24724 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: 2025-05-29T15:28:38.694431Z node 1 :KQP_COMPUTE INFO: spilling_file.cpp:230: Init DQ local file spilling service at /home/runner/.ya/build/build_root/ciyv/0009e4/r3tmp/spilling-tmp-runner/node_1_17e8edc9-8cbe3ff4-c6f82a39-67a2b60c, actor: [1:7509889980940486403:2317] 2025-05-29T15:28:38.694588Z node 1 :KQP_COMPUTE INFO: spilling_file.cpp:781: [RemoveOldTmp] removing at root: /home/runner/.ya/build/build_root/ciyv/0009e4/r3tmp/spilling-tmp-runner Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-29T15:28:38.935856Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:28:39.088530Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:28:39.088556Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:28:39.090600Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-29T15:28:39.311666Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: schema.cpp:293: Create table "Root/yq/nodes". Create session OK 2025-05-29T15:28:39.311684Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: schema.cpp:113: Call create table "Root/yq/nodes" 2025-05-29T15:28:39.311686Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: schema.cpp:354: Call create table "Root/yq/nodes" 2025-05-29T15:28:39.311946Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976715658, at schemeshard: 72057594046644480 2025-05-29T15:28:39.311959Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: schema.cpp:293: Create table "Root/yq/jobs". Create session OK 2025-05-29T15:28:39.311961Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: schema.cpp:113: Call create table "Root/yq/jobs" 2025-05-29T15:28:39.311962Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: schema.cpp:354: Call create table "Root/yq/jobs" 2025-05-29T15:28:39.312443Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: schema.cpp:293: Create table "Root/yq/tenant_acks". Create session OK 2025-05-29T15:28:39.312458Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: schema.cpp:113: Call create table "Root/yq/tenant_acks" 2025-05-29T15:28:39.312460Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: schema.cpp:354: Call create table "Root/yq/tenant_acks" 2025-05-29T15:28:39.312593Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: schema.cpp:293: Create table "Root/yq/idempotency_keys". Create session OK 2025-05-29T15:28:39.312612Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: schema.cpp:113: Call create table "Root/yq/idempotency_keys" 2025-05-29T15:28:39.312614Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: schema.cpp:354: Call create table "Root/yq/idempotency_keys" 2025-05-29T15:28:39.312648Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: schema.cpp:293: Create table "Root/yq/pending_small". Create session OK 2025-05-29T15:28:39.312663Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: schema.cpp:113: Call create table "Root/yq/pending_small" 2025-05-29T15:28:39.312664Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: schema.cpp:354: Call create table "Root/yq/pending_small" 2025-05-29T15:28:39.312806Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: schema.cpp:293: Create table "Root/yq/queries". Create session OK 2025-05-29T15:28:39.312817Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: schema.cpp:113: Call create table "Root/yq/queries" 2025-05-29T15:28:39.312819Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: schema.cpp:354: Call create table "Root/yq/queries" 2025-05-29T15:28:39.312880Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: schema.cpp:293: Create table "Root/yq/mappings". Create session OK 2025-05-29T15:28:39.312883Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: schema.cpp:113: Call create table "Root/yq/mappings" 2025-05-29T15:28:39.312885Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: schema.cpp:354: Call create table "Root/yq/mappings" 2025-05-29T15:28:39.312946Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: schema.cpp:293: Create table "Root/yq/bindings". Create session OK 2025-05-29T15:28:39.312949Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: schema.cpp:113: Call create table "Root/yq/bindings" 2025-05-29T15:28:39.312950Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: schema.cpp:354: Call create table "Root/yq/bindings" 2025-05-29T15:28:39.313006Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: schema.cpp:293: Create table "Root/yq/result_sets". Create session OK 2025-05-29T15:28:39.313008Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: schema.cpp:113: Call create table "Root/yq/result_sets" 2025-05-29T15:28:39.313011Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: schema.cpp:354: Call create table "Root/yq/result_sets" 2025-05-29T15:28:39.313019Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: schema.cpp:293: Create table "Root/yq/tenants". Create session OK 2025-05-29T15:28:39.313021Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: schema.cpp:113: Call create table "Root/yq/tenants" 2025-05-29T15:28:39.313022Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: schema.cpp:354: Call create table "Root/yq/tenants" 2025-05-29T15:28:39.313089Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: schema.cpp:293: Create table "Root/yq/compute_databases". Create session OK 2025-05-29T15:28:39.313091Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: schema.cpp:113: Call create table "Root/yq/compute_databases" 2025-05-29T15:28:39.313092Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: schema.cpp:354: Call create table "Root/yq/compute_databases" 2025-05-29T15:28:39.313144Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: schema.cpp:293: Create table "Root/yq/quotas". Create session OK 2025-05-29T15:28:39.313146Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: schema.cpp:113: Call create table "Root/yq/quotas" 2025-05-29T15:28:39.313147Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: schema.cpp:354: Call create table "Root/yq/quotas" 2025-05-29T15:28:39.313251Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: schema.cpp:293: Create table "Root/yq/connections". Create session OK 2025-05-29T15:28:39.313268Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: schema.cpp:113: Call create table "Root/yq/connections" 2025-05-29T15:28:39.313270Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: schema.cpp:354: Call create table "Root/yq/connections" 2025-05-29T15:28:39.313634Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: schema.cpp:155: Successfully created directory "Root/yq" 2025-05-29T15:28:39.313655Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: schema.cpp:122: Reply for create directory "Root/yq": 2025-05-29T15:28:39.317310Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 2025-05-29T15:28:39.317614Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 2025-05-29T15:28:39.317781Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 2025-05-29T15:28:39.317963Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-05-29T15:28:39.318131Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-05-29T15:28:39.318636Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-05-29T15:28:39.318787Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-05-29T15:28:39.318903Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-05-29T15:28:39.319074Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-05-29T15:28:39.319218Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__ope ... QUOTA_SERVICE ERROR: quota_manager.cpp:648: SyncQuota finished with error: 2025-05-29T15:28:43.779208Z node 7 :FQ_QUOTA_SERVICE ERROR: quota_manager.cpp:648: SyncQuota finished with error: 2025-05-29T15:28:43.779218Z node 7 :FQ_QUOTA_SERVICE ERROR: quota_manager.cpp:648: SyncQuota finished with error: 2025-05-29T15:28:43.779223Z node 7 :FQ_QUOTA_SERVICE ERROR: quota_manager.cpp:648: SyncQuota finished with error: 2025-05-29T15:28:43.779231Z node 7 :FQ_QUOTA_SERVICE ERROR: quota_manager.cpp:648: SyncQuota finished with error: 2025-05-29T15:28:43.779237Z node 7 :FQ_QUOTA_SERVICE ERROR: quota_manager.cpp:648: SyncQuota finished with error: 2025-05-29T15:28:43.779241Z node 7 :FQ_QUOTA_SERVICE ERROR: quota_manager.cpp:648: SyncQuota finished with error: 2025-05-29T15:28:43.779249Z node 7 :FQ_QUOTA_SERVICE ERROR: quota_manager.cpp:648: SyncQuota finished with error: 2025-05-29T15:28:43.779254Z node 7 :FQ_QUOTA_SERVICE ERROR: quota_manager.cpp:648: SyncQuota finished with error: 2025-05-29T15:28:43.779259Z node 7 :FQ_QUOTA_SERVICE ERROR: quota_manager.cpp:648: SyncQuota finished with error: 2025-05-29T15:28:43.779271Z node 7 :FQ_QUOTA_SERVICE ERROR: quota_manager.cpp:648: SyncQuota finished with error: 2025-05-29T15:28:43.779278Z node 7 :FQ_QUOTA_SERVICE ERROR: quota_manager.cpp:648: SyncQuota finished with error: 2025-05-29T15:28:43.779281Z node 7 :FQ_QUOTA_SERVICE ERROR: quota_manager.cpp:648: SyncQuota finished with error: 2025-05-29T15:28:43.779287Z node 7 :FQ_QUOTA_SERVICE ERROR: quota_manager.cpp:648: SyncQuota finished with error: 2025-05-29T15:28:43.779296Z node 7 :FQ_QUOTA_SERVICE ERROR: quota_manager.cpp:648: SyncQuota finished with error: 2025-05-29T15:28:43.779301Z node 7 :FQ_QUOTA_SERVICE ERROR: quota_manager.cpp:648: SyncQuota finished with error: 2025-05-29T15:28:43.779308Z node 7 :FQ_QUOTA_SERVICE ERROR: quota_manager.cpp:648: SyncQuota finished with error: 2025-05-29T15:28:43.779318Z node 7 :FQ_QUOTA_SERVICE ERROR: quota_manager.cpp:648: SyncQuota finished with error: 2025-05-29T15:28:43.779324Z node 7 :FQ_QUOTA_SERVICE ERROR: quota_manager.cpp:648: SyncQuota finished with error: 2025-05-29T15:28:43.779334Z node 7 :FQ_QUOTA_SERVICE ERROR: quota_manager.cpp:648: SyncQuota finished with error: 2025-05-29T15:28:43.779340Z node 7 :FQ_QUOTA_SERVICE ERROR: quota_manager.cpp:648: SyncQuota finished with error: 2025-05-29T15:28:43.779342Z node 7 :FQ_QUOTA_SERVICE ERROR: quota_manager.cpp:648: SyncQuota finished with error: 2025-05-29T15:28:43.779355Z node 7 :FQ_QUOTA_SERVICE ERROR: quota_manager.cpp:648: SyncQuota finished with error: 2025-05-29T15:28:43.779364Z node 7 :FQ_QUOTA_SERVICE ERROR: quota_manager.cpp:648: SyncQuota finished with error: 2025-05-29T15:28:43.779369Z node 7 :FQ_QUOTA_SERVICE ERROR: quota_manager.cpp:648: SyncQuota finished with error: 2025-05-29T15:28:43.779373Z node 7 :FQ_QUOTA_SERVICE ERROR: quota_manager.cpp:648: SyncQuota finished with error: 2025-05-29T15:28:43.779377Z node 7 :FQ_QUOTA_SERVICE ERROR: quota_manager.cpp:648: SyncQuota finished with error: 2025-05-29T15:28:43.779382Z node 7 :FQ_QUOTA_SERVICE ERROR: quota_manager.cpp:648: SyncQuota finished with error: 2025-05-29T15:28:43.779387Z node 7 :FQ_QUOTA_SERVICE ERROR: quota_manager.cpp:648: SyncQuota finished with error: 2025-05-29T15:28:43.779398Z node 7 :FQ_QUOTA_SERVICE ERROR: quota_manager.cpp:648: SyncQuota finished with error: 2025-05-29T15:28:43.779404Z node 7 :FQ_QUOTA_SERVICE ERROR: quota_manager.cpp:648: SyncQuota finished with error: 2025-05-29T15:28:43.779407Z node 7 :FQ_QUOTA_SERVICE ERROR: quota_manager.cpp:648: SyncQuota finished with error: 2025-05-29T15:28:43.779411Z node 7 :FQ_QUOTA_SERVICE ERROR: quota_manager.cpp:648: SyncQuota finished with error: 2025-05-29T15:28:43.779420Z node 7 :FQ_QUOTA_SERVICE ERROR: quota_manager.cpp:648: SyncQuota finished with error: 2025-05-29T15:28:43.779429Z node 7 :FQ_QUOTA_SERVICE ERROR: quota_manager.cpp:648: SyncQuota finished with error: 2025-05-29T15:28:43.779434Z node 7 :FQ_QUOTA_SERVICE ERROR: quota_manager.cpp:648: SyncQuota finished with error: 2025-05-29T15:28:43.779438Z node 7 :FQ_QUOTA_SERVICE ERROR: quota_manager.cpp:648: SyncQuota finished with error: 2025-05-29T15:28:43.779447Z node 7 :FQ_QUOTA_SERVICE ERROR: quota_manager.cpp:648: SyncQuota finished with error: 2025-05-29T15:28:43.779453Z node 7 :FQ_QUOTA_SERVICE ERROR: quota_manager.cpp:648: SyncQuota finished with error: 2025-05-29T15:28:43.779461Z node 7 :FQ_QUOTA_SERVICE ERROR: quota_manager.cpp:648: SyncQuota finished with error: 2025-05-29T15:28:43.779469Z node 7 :FQ_QUOTA_SERVICE ERROR: quota_manager.cpp:648: SyncQuota finished with error: 2025-05-29T15:28:43.779482Z node 7 :FQ_QUOTA_SERVICE ERROR: quota_manager.cpp:648: SyncQuota finished with error: 2025-05-29T15:28:43.779490Z node 7 :FQ_QUOTA_SERVICE ERROR: quota_manager.cpp:648: SyncQuota finished with error: 2025-05-29T15:28:43.779494Z node 7 :FQ_QUOTA_SERVICE ERROR: quota_manager.cpp:648: SyncQuota finished with error: 2025-05-29T15:28:43.779508Z node 7 :FQ_QUOTA_SERVICE ERROR: quota_manager.cpp:648: SyncQuota finished with error: 2025-05-29T15:28:43.779521Z node 7 :FQ_QUOTA_SERVICE ERROR: quota_manager.cpp:648: SyncQuota finished with error: 2025-05-29T15:28:43.779525Z node 7 :FQ_QUOTA_SERVICE ERROR: quota_manager.cpp:648: SyncQuota finished with error: 2025-05-29T15:28:43.779529Z node 7 :FQ_QUOTA_SERVICE ERROR: quota_manager.cpp:648: SyncQuota finished with error: 2025-05-29T15:28:43.779539Z node 7 :FQ_QUOTA_SERVICE ERROR: quota_manager.cpp:648: SyncQuota finished with error: 2025-05-29T15:28:43.779550Z node 7 :FQ_QUOTA_SERVICE ERROR: quota_manager.cpp:648: SyncQuota finished with error: 2025-05-29T15:28:43.779554Z node 7 :FQ_QUOTA_SERVICE ERROR: quota_manager.cpp:648: SyncQuota finished with error: 2025-05-29T15:28:43.779558Z node 7 :FQ_QUOTA_SERVICE ERROR: quota_manager.cpp:648: SyncQuota finished with error: 2025-05-29T15:28:43.779571Z node 7 :FQ_QUOTA_SERVICE ERROR: quota_manager.cpp:648: SyncQuota finished with error: 2025-05-29T15:28:43.779581Z node 7 :FQ_QUOTA_SERVICE ERROR: quota_manager.cpp:648: SyncQuota finished with error: 2025-05-29T15:28:43.779585Z node 7 :FQ_QUOTA_SERVICE ERROR: quota_manager.cpp:648: SyncQuota finished with error: 2025-05-29T15:28:43.779598Z node 7 :FQ_QUOTA_SERVICE ERROR: quota_manager.cpp:648: SyncQuota finished with error: 2025-05-29T15:28:43.779607Z node 7 :FQ_QUOTA_SERVICE ERROR: quota_manager.cpp:648: SyncQuota finished with error: 2025-05-29T15:28:43.779611Z node 7 :FQ_QUOTA_SERVICE ERROR: quota_manager.cpp:648: SyncQuota finished with error: 2025-05-29T15:28:43.779615Z node 7 :FQ_QUOTA_SERVICE ERROR: quota_manager.cpp:648: SyncQuota finished with error: 2025-05-29T15:28:43.779625Z node 7 :FQ_QUOTA_SERVICE ERROR: quota_manager.cpp:648: SyncQuota finished with error: 2025-05-29T15:28:43.779634Z node 7 :FQ_QUOTA_SERVICE ERROR: quota_manager.cpp:648: SyncQuota finished with error: 2025-05-29T15:28:43.779638Z node 7 :FQ_QUOTA_SERVICE ERROR: quota_manager.cpp:648: SyncQuota finished with error: 2025-05-29T15:28:43.779656Z node 7 :FQ_QUOTA_SERVICE ERROR: quota_manager.cpp:648: SyncQuota finished with error: 2025-05-29T15:28:43.779664Z node 7 :FQ_QUOTA_SERVICE ERROR: quota_manager.cpp:648: SyncQuota finished with error: 2025-05-29T15:28:43.779666Z node 7 :FQ_QUOTA_SERVICE ERROR: quota_manager.cpp:648: SyncQuota finished with error: 2025-05-29T15:28:43.779678Z node 7 :FQ_QUOTA_SERVICE ERROR: quota_manager.cpp:648: SyncQuota finished with error: 2025-05-29T15:28:43.779687Z node 7 :FQ_QUOTA_SERVICE ERROR: quota_manager.cpp:648: SyncQuota finished with error: 2025-05-29T15:28:43.779690Z node 7 :FQ_QUOTA_SERVICE ERROR: quota_manager.cpp:648: SyncQuota finished with error: 2025-05-29T15:28:43.779704Z node 7 :FQ_QUOTA_SERVICE ERROR: quota_manager.cpp:648: SyncQuota finished with error: 2025-05-29T15:28:43.779712Z node 7 :FQ_QUOTA_SERVICE ERROR: quota_manager.cpp:648: SyncQuota finished with error: 2025-05-29T15:28:43.779716Z node 7 :FQ_QUOTA_SERVICE ERROR: quota_manager.cpp:648: SyncQuota finished with error: 2025-05-29T15:28:43.779721Z node 7 :FQ_QUOTA_SERVICE ERROR: quota_manager.cpp:648: SyncQuota finished with error: 2025-05-29T15:28:43.779734Z node 7 :FQ_QUOTA_SERVICE ERROR: quota_manager.cpp:648: SyncQuota finished with error: 2025-05-29T15:28:43.779742Z node 7 :FQ_QUOTA_SERVICE ERROR: quota_manager.cpp:648: SyncQuota finished with error: 2025-05-29T15:28:43.779746Z node 7 :FQ_QUOTA_SERVICE ERROR: quota_manager.cpp:648: SyncQuota finished with error: 2025-05-29T15:28:43.779760Z node 7 :FQ_QUOTA_SERVICE ERROR: quota_manager.cpp:648: SyncQuota finished with error: 2025-05-29T15:28:43.779768Z node 7 :FQ_QUOTA_SERVICE ERROR: quota_manager.cpp:648: SyncQuota finished with error: 2025-05-29T15:28:43.779772Z node 7 :FQ_QUOTA_SERVICE ERROR: quota_manager.cpp:648: SyncQuota finished with error: 2025-05-29T15:28:43.779777Z node 7 :FQ_QUOTA_SERVICE ERROR: quota_manager.cpp:648: SyncQuota finished with error: 2025-05-29T15:28:43.779786Z node 7 :FQ_QUOTA_SERVICE ERROR: quota_manager.cpp:648: SyncQuota finished with error: 2025-05-29T15:28:43.779790Z node 7 :FQ_QUOTA_SERVICE ERROR: quota_manager.cpp:648: SyncQuota finished with error: 2025-05-29T15:28:43.779795Z node 7 :FQ_QUOTA_SERVICE ERROR: quota_manager.cpp:648: SyncQuota finished with error: 2025-05-29T15:28:43.779801Z node 7 :FQ_QUOTA_SERVICE ERROR: quota_manager.cpp:648: SyncQuota finished with error: 2025-05-29T15:28:43.779806Z node 7 :FQ_QUOTA_SERVICE ERROR: quota_manager.cpp:648: SyncQuota finished with error: 2025-05-29T15:28:43.779816Z node 7 :FQ_QUOTA_SERVICE ERROR: quota_manager.cpp:648: SyncQuota finished with error: 2025-05-29T15:28:43.779822Z node 7 :FQ_QUOTA_SERVICE ERROR: quota_manager.cpp:648: SyncQuota finished with error: 2025-05-29T15:28:43.779834Z node 7 :FQ_QUOTA_SERVICE ERROR: quota_manager.cpp:648: SyncQuota finished with error: 2025-05-29T15:28:43.779857Z node 7 :FQ_QUOTA_SERVICE ERROR: quota_manager.cpp:648: SyncQuota finished with error: 2025-05-29T15:28:43.779865Z node 7 :FQ_QUOTA_SERVICE ERROR: quota_manager.cpp:648: SyncQuota finished with error: 2025-05-29T15:28:43.779869Z node 7 :FQ_QUOTA_SERVICE ERROR: quota_manager.cpp:648: SyncQuota finished with error: 2025-05-29T15:28:43.779895Z node 7 :FQ_QUOTA_SERVICE ERROR: quota_manager.cpp:648: SyncQuota finished with error: 2025-05-29T15:28:43.779907Z node 7 :FQ_QUOTA_SERVICE ERROR: quota_manager.cpp:648: SyncQuota finished with error: 2025-05-29T15:28:43.779912Z node 7 :FQ_QUOTA_SERVICE ERROR: quota_manager.cpp:648: SyncQuota finished with error: assertion failed at ydb/services/fq/ut_integration/fq_ut.cpp:57, TString (anonymous namespace)::CreateNewHistoryAndWaitFinish(const TString &, NYdb::NFq::TClient &, const TString &, const FederatedQuery::QueryMeta::ComputeStatus &): (result.GetStatus() == EStatus::SUCCESS) failed: (BAD_REQUEST != SUCCESS)
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 , with diff: (BAD_REQ|S)U(|CC)ES(T|S) TBackTrace::Capture()+28 (0x139D25FC) NUnitTest::NPrivate::RaiseError(char const*, TBasicString> const&, bool)+137 (0x13B861F9) ??+0 (0x1388D3A4) NTestSuiteYq_1::TTestCaseBasic_EmptyDict::Execute_(NUnitTest::TTestContext&)+933 (0x13891DA5) NTestSuiteYq_1::TCurrentTest::Execute()::'lambda'()::operator()() const+71 (0x138BC937) NUnitTest::TTestBase::Run(std::__y1::function, TBasicString> const&, char const*, bool)+126 (0x13B880AE) NTestSuiteYq_1::TCurrentTest::Execute()+415 (0x138BC24F) NUnitTest::TTestFactory::Execute()+803 (0x13B88823) NUnitTest::RunMain(int, char**)+3021 (0x13B9A3CD) ??+0 (0x7F5E76501D90) __libc_start_main+128 (0x7F5E76501E40) _start+41 (0x129E1029) >> TopicService::MultiplePartitionsAndNoGapsInTheOffsets [FAIL] >> TPersQueueTest::WhenTheTopicIsDeletedAfterReadingTheData_Uncompressed [FAIL] >> TTopicYqlTest::CreateAndAlterTopicYql ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/fq/ut_integration/unittest >> Yq_1::CreateConnections_With_Idempotency [FAIL] Test command err: 2025-05-29T15:28:39.031945Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509889987672227907:2211];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:28:39.032979Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; E0529 15:28:39.083158578 4050244 dns_resolver_ares.cc:452] no server name supplied in dns URI E0529 15:28:39.083214452 4050244 channel.cc:120] channel stack builder failed: UNKNOWN: the target uri is not valid: dns:/// 2025-05-29T15:28:39.087232Z node 1 :YQ_CONTROL_PLANE_STORAGE ERROR: schema.cpp:160: Create directory "Root/yq" error: TRANSPORT_UNAVAILABLE [ {
: Error: GRpc error: (14): failed to connect to all addresses; last error: UNKNOWN: ipv4:127.0.0.1:26543: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:26543 } ] 2025-05-29T15:28:39.381093Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-05-29T15:28:39.385011Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:7509889987672228092:2308], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/0009cc/r3tmp/tmpB5MrVU/pdisk_1.dat 2025-05-29T15:28:39.460344Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:7509889987672228092:2308], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-05-29T15:28:39.475173Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 26543, node 1 2025-05-29T15:28:39.486909Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:28:39.486923Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-05-29T15:28:39.486926Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-05-29T15:28:39.487001Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:26850 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-29T15:28:39.716691Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:28:39.749428Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:28:39.749459Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:28:39.751027Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-29T15:28:40.085843Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: schema.cpp:293: Create table "Root/yq/tenants". Create session OK 2025-05-29T15:28:40.085856Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: schema.cpp:113: Call create table "Root/yq/tenants" 2025-05-29T15:28:40.085859Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: schema.cpp:354: Call create table "Root/yq/tenants" 2025-05-29T15:28:40.085951Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: schema.cpp:293: Create table "Root/yq/tenant_acks". Create session OK 2025-05-29T15:28:40.085958Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: schema.cpp:113: Call create table "Root/yq/tenant_acks" 2025-05-29T15:28:40.085960Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: schema.cpp:354: Call create table "Root/yq/tenant_acks" 2025-05-29T15:28:40.086146Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: schema.cpp:293: Create table "Root/yq/result_sets". Create session OK 2025-05-29T15:28:40.086155Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: schema.cpp:113: Call create table "Root/yq/result_sets" 2025-05-29T15:28:40.086156Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: schema.cpp:354: Call create table "Root/yq/result_sets" 2025-05-29T15:28:40.086269Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: schema.cpp:293: Create table "Root/yq/compute_databases". Create session OK 2025-05-29T15:28:40.086276Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: schema.cpp:113: Call create table "Root/yq/compute_databases" 2025-05-29T15:28:40.086277Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: schema.cpp:354: Call create table "Root/yq/compute_databases" 2025-05-29T15:28:40.086287Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: schema.cpp:293: Create table "Root/yq/quotas". Create session OK 2025-05-29T15:28:40.086290Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: schema.cpp:113: Call create table "Root/yq/quotas" 2025-05-29T15:28:40.086291Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: schema.cpp:354: Call create table "Root/yq/quotas" 2025-05-29T15:28:40.086333Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: schema.cpp:293: Create table "Root/yq/connections". Create session OK 2025-05-29T15:28:40.086340Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: schema.cpp:113: Call create table "Root/yq/connections" 2025-05-29T15:28:40.086341Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: schema.cpp:354: Call create table "Root/yq/connections" 2025-05-29T15:28:40.086444Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: schema.cpp:293: Create table "Root/yq/bindings". Create session OK 2025-05-29T15:28:40.086444Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: schema.cpp:293: Create table "Root/yq/mappings". Create session OK 2025-05-29T15:28:40.086446Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: schema.cpp:113: Call create table "Root/yq/mappings" 2025-05-29T15:28:40.086447Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: schema.cpp:354: Call create table "Root/yq/mappings" 2025-05-29T15:28:40.086451Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: schema.cpp:113: Call create table "Root/yq/bindings" 2025-05-29T15:28:40.086452Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: schema.cpp:354: Call create table "Root/yq/bindings" 2025-05-29T15:28:40.086538Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: schema.cpp:293: Create table "Root/yq/pending_small". Create session OK 2025-05-29T15:28:40.086544Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: schema.cpp:113: Call create table "Root/yq/pending_small" 2025-05-29T15:28:40.086545Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: schema.cpp:354: Call create table "Root/yq/pending_small" 2025-05-29T15:28:40.086615Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: schema.cpp:293: Create table "Root/yq/nodes". Create session OK 2025-05-29T15:28:40.086623Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: schema.cpp:113: Call create table "Root/yq/nodes" 2025-05-29T15:28:40.086624Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: schema.cpp:354: Call create table "Root/yq/nodes" 2025-05-29T15:28:40.086630Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: schema.cpp:293: Create table "Root/yq/jobs". Create session OK 2025-05-29T15:28:40.086632Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: schema.cpp:113: Call create table "Root/yq/jobs" 2025-05-29T15:28:40.086633Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: schema.cpp:354: Call create table "Root/yq/jobs" 2025-05-29T15:28:40.086769Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: schema.cpp:293: Create table "Root/yq/queries". Create session OK 2025-05-29T15:28:40.086780Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: schema.cpp:113: Call create table "Root/yq/queries" 2025-05-29T15:28:40.086781Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: schema.cpp:354: Call create table "Root/yq/queries" 2025-05-29T15:28:40.086801Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: schema.cpp:293: Create table "Root/yq/idempotency_keys". Create session OK 2025-05-29T15:28:40.086803Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: schema.cpp:113: Call create table "Root/yq/idempotency_keys" 2025-05-29T15:28:40.086804Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: schema.cpp:354: Call create table "Root/yq/idempotency_keys" 2025-05-29T15:28:40.087675Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: schema.cpp:155: Successfully created directory "Root/yq" 2025-05-29T15:28:40.087684Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: schema.cpp:122: Reply for create directory "Root/yq": 2025-05-29T15:28:40.088192Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-05-29T15:28:40.088402Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-05-29T15:28:40.088534Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 2025-05-29T15:28:40.088634Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-05-29T15:28:40.088742Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-05-29T15:28:40.088844Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 2025-05-29T15:28:40.089021Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 2025-05-29T15:28:40.089192Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-05-29T15:28:40.089347Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, subop ... E ERROR: quota_manager.cpp:648: SyncQuota finished with error: 2025-05-29T15:28:43.882180Z node 4 :FQ_QUOTA_SERVICE ERROR: quota_manager.cpp:648: SyncQuota finished with error: 2025-05-29T15:28:43.882187Z node 4 :FQ_QUOTA_SERVICE ERROR: quota_manager.cpp:648: SyncQuota finished with error: 2025-05-29T15:28:43.882191Z node 4 :FQ_QUOTA_SERVICE ERROR: quota_manager.cpp:648: SyncQuota finished with error: 2025-05-29T15:28:43.882200Z node 4 :FQ_QUOTA_SERVICE ERROR: quota_manager.cpp:648: SyncQuota finished with error: 2025-05-29T15:28:43.882203Z node 4 :FQ_QUOTA_SERVICE ERROR: quota_manager.cpp:648: SyncQuota finished with error: 2025-05-29T15:28:43.882211Z node 4 :FQ_QUOTA_SERVICE ERROR: quota_manager.cpp:648: SyncQuota finished with error: 2025-05-29T15:28:43.882216Z node 4 :FQ_QUOTA_SERVICE ERROR: quota_manager.cpp:648: SyncQuota finished with error: 2025-05-29T15:28:43.882226Z node 4 :FQ_QUOTA_SERVICE ERROR: quota_manager.cpp:648: SyncQuota finished with error: 2025-05-29T15:28:43.882230Z node 4 :FQ_QUOTA_SERVICE ERROR: quota_manager.cpp:648: SyncQuota finished with error: 2025-05-29T15:28:43.882234Z node 4 :FQ_QUOTA_SERVICE ERROR: quota_manager.cpp:648: SyncQuota finished with error: 2025-05-29T15:28:43.882244Z node 4 :FQ_QUOTA_SERVICE ERROR: quota_manager.cpp:648: SyncQuota finished with error: 2025-05-29T15:28:43.882247Z node 4 :FQ_QUOTA_SERVICE ERROR: quota_manager.cpp:648: SyncQuota finished with error: 2025-05-29T15:28:43.882251Z node 4 :FQ_QUOTA_SERVICE ERROR: quota_manager.cpp:648: SyncQuota finished with error: 2025-05-29T15:28:43.882256Z node 4 :FQ_QUOTA_SERVICE ERROR: quota_manager.cpp:648: SyncQuota finished with error: 2025-05-29T15:28:43.882264Z node 4 :FQ_QUOTA_SERVICE ERROR: quota_manager.cpp:648: SyncQuota finished with error: 2025-05-29T15:28:43.882268Z node 4 :FQ_QUOTA_SERVICE ERROR: quota_manager.cpp:648: SyncQuota finished with error: 2025-05-29T15:28:43.882278Z node 4 :FQ_QUOTA_SERVICE ERROR: quota_manager.cpp:648: SyncQuota finished with error: 2025-05-29T15:28:43.882282Z node 4 :FQ_QUOTA_SERVICE ERROR: quota_manager.cpp:648: SyncQuota finished with error: 2025-05-29T15:28:43.882285Z node 4 :FQ_QUOTA_SERVICE ERROR: quota_manager.cpp:648: SyncQuota finished with error: 2025-05-29T15:28:43.882298Z node 4 :FQ_QUOTA_SERVICE ERROR: quota_manager.cpp:648: SyncQuota finished with error: 2025-05-29T15:28:43.882301Z node 4 :FQ_QUOTA_SERVICE ERROR: quota_manager.cpp:648: SyncQuota finished with error: 2025-05-29T15:28:43.882305Z node 4 :FQ_QUOTA_SERVICE ERROR: quota_manager.cpp:648: SyncQuota finished with error: 2025-05-29T15:28:43.882310Z node 4 :FQ_QUOTA_SERVICE ERROR: quota_manager.cpp:648: SyncQuota finished with error: 2025-05-29T15:28:43.882322Z node 4 :FQ_QUOTA_SERVICE ERROR: quota_manager.cpp:648: SyncQuota finished with error: 2025-05-29T15:28:43.882325Z node 4 :FQ_QUOTA_SERVICE ERROR: quota_manager.cpp:648: SyncQuota finished with error: 2025-05-29T15:28:43.882328Z node 4 :FQ_QUOTA_SERVICE ERROR: quota_manager.cpp:648: SyncQuota finished with error: 2025-05-29T15:28:43.882332Z node 4 :FQ_QUOTA_SERVICE ERROR: quota_manager.cpp:648: SyncQuota finished with error: 2025-05-29T15:28:43.882341Z node 4 :FQ_QUOTA_SERVICE ERROR: quota_manager.cpp:648: SyncQuota finished with error: 2025-05-29T15:28:43.882346Z node 4 :FQ_QUOTA_SERVICE ERROR: quota_manager.cpp:648: SyncQuota finished with error: 2025-05-29T15:28:43.882355Z node 4 :FQ_QUOTA_SERVICE ERROR: quota_manager.cpp:648: SyncQuota finished with error: 2025-05-29T15:28:43.882359Z node 4 :FQ_QUOTA_SERVICE ERROR: quota_manager.cpp:648: SyncQuota finished with error: 2025-05-29T15:28:43.882362Z node 4 :FQ_QUOTA_SERVICE ERROR: quota_manager.cpp:648: SyncQuota finished with error: 2025-05-29T15:28:43.882366Z node 4 :FQ_QUOTA_SERVICE ERROR: quota_manager.cpp:648: SyncQuota finished with error: 2025-05-29T15:28:43.882374Z node 4 :FQ_QUOTA_SERVICE ERROR: quota_manager.cpp:648: SyncQuota finished with error: 2025-05-29T15:28:43.882380Z node 4 :FQ_QUOTA_SERVICE ERROR: quota_manager.cpp:648: SyncQuota finished with error: 2025-05-29T15:28:43.882390Z node 4 :FQ_QUOTA_SERVICE ERROR: quota_manager.cpp:648: SyncQuota finished with error: 2025-05-29T15:28:43.882393Z node 4 :FQ_QUOTA_SERVICE ERROR: quota_manager.cpp:648: SyncQuota finished with error: 2025-05-29T15:28:43.882397Z node 4 :FQ_QUOTA_SERVICE ERROR: quota_manager.cpp:648: SyncQuota finished with error: 2025-05-29T15:28:43.882403Z node 4 :FQ_QUOTA_SERVICE ERROR: quota_manager.cpp:648: SyncQuota finished with error: 2025-05-29T15:28:43.882410Z node 4 :FQ_QUOTA_SERVICE ERROR: quota_manager.cpp:648: SyncQuota finished with error: 2025-05-29T15:28:43.882416Z node 4 :FQ_QUOTA_SERVICE ERROR: quota_manager.cpp:648: SyncQuota finished with error: 2025-05-29T15:28:43.882422Z node 4 :FQ_QUOTA_SERVICE ERROR: quota_manager.cpp:648: SyncQuota finished with error: 2025-05-29T15:28:43.882427Z node 4 :FQ_QUOTA_SERVICE ERROR: quota_manager.cpp:648: SyncQuota finished with error: 2025-05-29T15:28:43.882439Z node 4 :FQ_QUOTA_SERVICE ERROR: quota_manager.cpp:648: SyncQuota finished with error: 2025-05-29T15:28:43.882442Z node 4 :FQ_QUOTA_SERVICE ERROR: quota_manager.cpp:648: SyncQuota finished with error: 2025-05-29T15:28:43.882446Z node 4 :FQ_QUOTA_SERVICE ERROR: quota_manager.cpp:648: SyncQuota finished with error: 2025-05-29T15:28:43.882449Z node 4 :FQ_QUOTA_SERVICE ERROR: quota_manager.cpp:648: SyncQuota finished with error: 2025-05-29T15:28:43.882462Z node 4 :FQ_QUOTA_SERVICE ERROR: quota_manager.cpp:648: SyncQuota finished with error: 2025-05-29T15:28:43.882466Z node 4 :FQ_QUOTA_SERVICE ERROR: quota_manager.cpp:648: SyncQuota finished with error: 2025-05-29T15:28:43.882469Z node 4 :FQ_QUOTA_SERVICE ERROR: quota_manager.cpp:648: SyncQuota finished with error: 2025-05-29T15:28:43.882472Z node 4 :FQ_QUOTA_SERVICE ERROR: quota_manager.cpp:648: SyncQuota finished with error: 2025-05-29T15:28:43.882482Z node 4 :FQ_QUOTA_SERVICE ERROR: quota_manager.cpp:648: SyncQuota finished with error: 2025-05-29T15:28:43.882490Z node 4 :FQ_QUOTA_SERVICE ERROR: quota_manager.cpp:648: SyncQuota finished with error: 2025-05-29T15:28:43.882493Z node 4 :FQ_QUOTA_SERVICE ERROR: quota_manager.cpp:648: SyncQuota finished with error: 2025-05-29T15:28:43.882496Z node 4 :FQ_QUOTA_SERVICE ERROR: quota_manager.cpp:648: SyncQuota finished with error: 2025-05-29T15:28:43.882503Z node 4 :FQ_QUOTA_SERVICE ERROR: quota_manager.cpp:648: SyncQuota finished with error: 2025-05-29T15:28:43.882514Z node 4 :FQ_QUOTA_SERVICE ERROR: quota_manager.cpp:648: SyncQuota finished with error: 2025-05-29T15:28:43.882517Z node 4 :FQ_QUOTA_SERVICE ERROR: quota_manager.cpp:648: SyncQuota finished with error: 2025-05-29T15:28:43.882520Z node 4 :FQ_QUOTA_SERVICE ERROR: quota_manager.cpp:648: SyncQuota finished with error: 2025-05-29T15:28:43.882531Z node 4 :FQ_QUOTA_SERVICE ERROR: quota_manager.cpp:648: SyncQuota finished with error: 2025-05-29T15:28:43.882536Z node 4 :FQ_QUOTA_SERVICE ERROR: quota_manager.cpp:648: SyncQuota finished with error: 2025-05-29T15:28:43.882539Z node 4 :FQ_QUOTA_SERVICE ERROR: quota_manager.cpp:648: SyncQuota finished with error: 2025-05-29T15:28:43.882550Z node 4 :FQ_QUOTA_SERVICE ERROR: quota_manager.cpp:648: SyncQuota finished with error: 2025-05-29T15:28:43.882557Z node 4 :FQ_QUOTA_SERVICE ERROR: quota_manager.cpp:648: SyncQuota finished with error: 2025-05-29T15:28:43.882561Z node 4 :FQ_QUOTA_SERVICE ERROR: quota_manager.cpp:648: SyncQuota finished with error: 2025-05-29T15:28:43.882564Z node 4 :FQ_QUOTA_SERVICE ERROR: quota_manager.cpp:648: SyncQuota finished with error: 2025-05-29T15:28:43.882574Z node 4 :FQ_QUOTA_SERVICE ERROR: quota_manager.cpp:648: SyncQuota finished with error: 2025-05-29T15:28:43.882580Z node 4 :FQ_QUOTA_SERVICE ERROR: quota_manager.cpp:648: SyncQuota finished with error: 2025-05-29T15:28:43.882583Z node 4 :FQ_QUOTA_SERVICE ERROR: quota_manager.cpp:648: SyncQuota finished with error: 2025-05-29T15:28:43.882586Z node 4 :FQ_QUOTA_SERVICE ERROR: quota_manager.cpp:648: SyncQuota finished with error: 2025-05-29T15:28:43.882599Z node 4 :FQ_QUOTA_SERVICE ERROR: quota_manager.cpp:648: SyncQuota finished with error: 2025-05-29T15:28:43.882602Z node 4 :FQ_QUOTA_SERVICE ERROR: quota_manager.cpp:648: SyncQuota finished with error: 2025-05-29T15:28:43.882605Z node 4 :FQ_QUOTA_SERVICE ERROR: quota_manager.cpp:648: SyncQuota finished with error: 2025-05-29T15:28:43.882615Z node 4 :FQ_QUOTA_SERVICE ERROR: quota_manager.cpp:648: SyncQuota finished with error: 2025-05-29T15:28:43.882619Z node 4 :FQ_QUOTA_SERVICE ERROR: quota_manager.cpp:648: SyncQuota finished with error: 2025-05-29T15:28:43.882622Z node 4 :FQ_QUOTA_SERVICE ERROR: quota_manager.cpp:648: SyncQuota finished with error: 2025-05-29T15:28:43.882627Z node 4 :FQ_QUOTA_SERVICE ERROR: quota_manager.cpp:648: SyncQuota finished with error: 2025-05-29T15:28:43.882635Z node 4 :FQ_QUOTA_SERVICE ERROR: quota_manager.cpp:648: SyncQuota finished with error: 2025-05-29T15:28:43.882640Z node 4 :FQ_QUOTA_SERVICE ERROR: quota_manager.cpp:648: SyncQuota finished with error: 2025-05-29T15:28:43.882644Z node 4 :FQ_QUOTA_SERVICE ERROR: quota_manager.cpp:648: SyncQuota finished with error: 2025-05-29T15:28:43.882651Z node 4 :FQ_QUOTA_SERVICE ERROR: quota_manager.cpp:648: SyncQuota finished with error: 2025-05-29T15:28:43.882657Z node 4 :FQ_QUOTA_SERVICE ERROR: quota_manager.cpp:648: SyncQuota finished with error: 2025-05-29T15:28:43.882664Z node 4 :FQ_QUOTA_SERVICE ERROR: quota_manager.cpp:648: SyncQuota finished with error: 2025-05-29T15:28:43.882667Z node 4 :FQ_QUOTA_SERVICE ERROR: quota_manager.cpp:648: SyncQuota finished with error: 2025-05-29T15:28:43.882676Z node 4 :FQ_QUOTA_SERVICE ERROR: quota_manager.cpp:648: SyncQuota finished with error: 2025-05-29T15:28:43.882688Z node 4 :FQ_QUOTA_SERVICE ERROR: quota_manager.cpp:648: SyncQuota finished with error: 2025-05-29T15:28:43.882691Z node 4 :FQ_QUOTA_SERVICE ERROR: quota_manager.cpp:648: SyncQuota finished with error: 2025-05-29T15:28:43.882694Z node 4 :FQ_QUOTA_SERVICE ERROR: quota_manager.cpp:648: SyncQuota finished with error: 2025-05-29T15:28:43.882698Z node 4 :FQ_QUOTA_SERVICE ERROR: quota_manager.cpp:648: SyncQuota finished with error: 2025-05-29T15:28:43.882707Z node 4 :FQ_QUOTA_SERVICE ERROR: quota_manager.cpp:648: SyncQuota finished with error: 2025-05-29T15:28:43.882710Z node 4 :FQ_QUOTA_SERVICE ERROR: quota_manager.cpp:648: SyncQuota finished with error: 2025-05-29T15:28:43.882723Z node 4 :FQ_QUOTA_SERVICE ERROR: quota_manager.cpp:648: SyncQuota finished with error: 2025-05-29T15:28:43.882726Z node 4 :FQ_QUOTA_SERVICE ERROR: quota_manager.cpp:648: SyncQuota finished with error: 2025-05-29T15:28:43.882729Z node 4 :FQ_QUOTA_SERVICE ERROR: quota_manager.cpp:648: SyncQuota finished with error: assertion failed at ydb/services/fq/ut_integration/fq_ut.cpp:536, virtual void NTestSuiteYq_1::TTestCaseCreateConnections_With_Idempotency::Execute_(NUnitTest::TTestContext &): (result.GetStatus() == EStatus::SUCCESS)
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 TBackTrace::Capture()+28 (0x139D25FC) NUnitTest::NPrivate::RaiseError(char const*, TBasicString> const&, bool)+137 (0x13B861F9) NTestSuiteYq_1::TTestCaseCreateConnections_With_Idempotency::Execute_(NUnitTest::TTestContext&)+2304 (0x138A60D0) NTestSuiteYq_1::TCurrentTest::Execute()::'lambda'()::operator()() const+71 (0x138BC937) NUnitTest::TTestBase::Run(std::__y1::function, TBasicString> const&, char const*, bool)+126 (0x13B880AE) NTestSuiteYq_1::TCurrentTest::Execute()+415 (0x138BC24F) NUnitTest::TTestFactory::Execute()+803 (0x13B88823) NUnitTest::RunMain(int, char**)+3021 (0x13B9A3CD) ??+0 (0x7FDA9BA63D90) __libc_start_main+128 (0x7FDA9BA63E40) _start+41 (0x129E1029) >> KqpImmediateEffects::MultipleEffectsWithIndex >> KqpEffects::InsertAbort_Literal_Duplicates+UseSink >> TPersQueueTest::ReadRuleServiceTypeMigrationWithDisallowDefault [FAIL] >> TPersQueueTest::ReadWithoutConsumerFederation >> TTopicYqlTest::DropTopicYql >> TPersQueueTest::BadSids [FAIL] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/fq/ut_integration/unittest >> Yq_1::ListConnectionsOnEmptyConnectionsTable [GOOD] Test command err: 2025-05-29T15:28:40.812222Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509889992539790374:2208];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:28:40.812259Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; E0529 15:28:40.857210658 4057752 dns_resolver_ares.cc:452] no server name supplied in dns URI E0529 15:28:40.857267212 4057752 channel.cc:120] channel stack builder failed: UNKNOWN: the target uri is not valid: dns:/// 2025-05-29T15:28:40.859620Z node 1 :YQ_CONTROL_PLANE_STORAGE ERROR: schema.cpp:160: Create directory "Root/yq" error: TRANSPORT_UNAVAILABLE [ {
: Error: GRpc error: (14): failed to connect to all addresses; last error: UNKNOWN: ipv4:127.0.0.1:14921: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:14921 } ] 2025-05-29T15:28:41.289931Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-05-29T15:28:41.291170Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:7509889996834757852:2308], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/0008c7/r3tmp/tmp4hLKzY/pdisk_1.dat 2025-05-29T15:28:41.362103Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:7509889996834757852:2308], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } TServer::EnableGrpc on GrpcPort 14921, node 1 TClient is connected to server localhost:7105 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-29T15:28:41.484956Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:28:41.519373Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:28:41.519404Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:28:41.521015Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-29T15:28:41.752805Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:28:41.753230Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:28:41.753237Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-05-29T15:28:41.753240Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-05-29T15:28:41.753291Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-05-29T15:28:41.862523Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: schema.cpp:293: Create table "Root/yq/tenant_acks". Create session OK 2025-05-29T15:28:41.862540Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: schema.cpp:113: Call create table "Root/yq/tenant_acks" 2025-05-29T15:28:41.862542Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: schema.cpp:354: Call create table "Root/yq/tenant_acks" 2025-05-29T15:28:41.862784Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: schema.cpp:293: Create table "Root/yq/bindings". Create session OK 2025-05-29T15:28:41.862792Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: schema.cpp:113: Call create table "Root/yq/bindings" 2025-05-29T15:28:41.862794Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: schema.cpp:354: Call create table "Root/yq/bindings" 2025-05-29T15:28:41.862820Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: schema.cpp:293: Create table "Root/yq/pending_small". Create session OK 2025-05-29T15:28:41.862862Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: schema.cpp:113: Call create table "Root/yq/pending_small" 2025-05-29T15:28:41.862865Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: schema.cpp:354: Call create table "Root/yq/pending_small" 2025-05-29T15:28:41.862958Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: schema.cpp:293: Create table "Root/yq/queries". Create session OK 2025-05-29T15:28:41.862968Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: schema.cpp:113: Call create table "Root/yq/queries" 2025-05-29T15:28:41.862970Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: schema.cpp:354: Call create table "Root/yq/queries" 2025-05-29T15:28:41.863341Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: schema.cpp:293: Create table "Root/yq/compute_databases". Create session OK 2025-05-29T15:28:41.863350Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: schema.cpp:113: Call create table "Root/yq/compute_databases" 2025-05-29T15:28:41.863351Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: schema.cpp:354: Call create table "Root/yq/compute_databases" 2025-05-29T15:28:41.863414Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: schema.cpp:293: Create table "Root/yq/quotas". Create session OK 2025-05-29T15:28:41.863419Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: schema.cpp:113: Call create table "Root/yq/quotas" 2025-05-29T15:28:41.863421Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: schema.cpp:354: Call create table "Root/yq/quotas" 2025-05-29T15:28:41.863650Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: schema.cpp:293: Create table "Root/yq/tenants". Create session OK 2025-05-29T15:28:41.863658Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: schema.cpp:113: Call create table "Root/yq/tenants" 2025-05-29T15:28:41.863659Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: schema.cpp:354: Call create table "Root/yq/tenants" 2025-05-29T15:28:41.863674Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: schema.cpp:293: Create table "Root/yq/jobs". Create session OK 2025-05-29T15:28:41.863677Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: schema.cpp:113: Call create table "Root/yq/jobs" 2025-05-29T15:28:41.863678Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: schema.cpp:354: Call create table "Root/yq/jobs" 2025-05-29T15:28:41.863776Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: schema.cpp:293: Create table "Root/yq/mappings". Create session OK 2025-05-29T15:28:41.863783Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: schema.cpp:113: Call create table "Root/yq/mappings" 2025-05-29T15:28:41.863784Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: schema.cpp:354: Call create table "Root/yq/mappings" 2025-05-29T15:28:41.863795Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: schema.cpp:293: Create table "Root/yq/idempotency_keys". Create session OK 2025-05-29T15:28:41.863797Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: schema.cpp:113: Call create table "Root/yq/idempotency_keys" 2025-05-29T15:28:41.863798Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: schema.cpp:354: Call create table "Root/yq/idempotency_keys" 2025-05-29T15:28:41.863970Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: schema.cpp:293: Create table "Root/yq/nodes". Create session OK 2025-05-29T15:28:41.863978Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: schema.cpp:113: Call create table "Root/yq/nodes" 2025-05-29T15:28:41.863979Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: schema.cpp:354: Call create table "Root/yq/nodes" 2025-05-29T15:28:41.864000Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: schema.cpp:293: Create table "Root/yq/result_sets". Create session OK 2025-05-29T15:28:41.864001Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: schema.cpp:113: Call create table "Root/yq/result_sets" 2025-05-29T15:28:41.864002Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: schema.cpp:354: Call create table "Root/yq/result_sets" 2025-05-29T15:28:41.864065Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: schema.cpp:293: Create table "Root/yq/connections". Create session OK 2025-05-29T15:28:41.864072Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: schema.cpp:113: Call create table "Root/yq/connections" 2025-05-29T15:28:41.864073Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: schema.cpp:354: Call create table "Root/yq/connections" 2025-05-29T15:28:41.865000Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-05-29T15:28:41.865240Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 2025-05-29T15:28:41.865448Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 2025-05-29T15:28:41.865615Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 2025-05-29T15:28:41.865883Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-05-29T15:28:41.866036Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-05-29T15:28:41.866152Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509889996834758691:2387], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:28:41.866171Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509889996834758701:2390], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:28:41.866183Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 202 ... pp:648: SyncQuota finished with error: 2025-05-29T15:28:44.298191Z node 4 :FQ_QUOTA_SERVICE ERROR: quota_manager.cpp:648: SyncQuota finished with error: 2025-05-29T15:28:44.298201Z node 4 :FQ_QUOTA_SERVICE ERROR: quota_manager.cpp:648: SyncQuota finished with error: 2025-05-29T15:28:44.298206Z node 4 :FQ_QUOTA_SERVICE ERROR: quota_manager.cpp:648: SyncQuota finished with error: 2025-05-29T15:28:44.298211Z node 4 :FQ_QUOTA_SERVICE ERROR: quota_manager.cpp:648: SyncQuota finished with error: 2025-05-29T15:28:44.298216Z node 4 :FQ_QUOTA_SERVICE ERROR: quota_manager.cpp:648: SyncQuota finished with error: 2025-05-29T15:28:44.298220Z node 4 :FQ_QUOTA_SERVICE ERROR: quota_manager.cpp:648: SyncQuota finished with error: 2025-05-29T15:28:44.298231Z node 4 :FQ_QUOTA_SERVICE ERROR: quota_manager.cpp:648: SyncQuota finished with error: 2025-05-29T15:28:44.298241Z node 4 :FQ_QUOTA_SERVICE ERROR: quota_manager.cpp:648: SyncQuota finished with error: 2025-05-29T15:28:44.298245Z node 4 :FQ_QUOTA_SERVICE ERROR: quota_manager.cpp:648: SyncQuota finished with error: 2025-05-29T15:28:44.298250Z node 4 :FQ_QUOTA_SERVICE ERROR: quota_manager.cpp:648: SyncQuota finished with error: 2025-05-29T15:28:44.298263Z node 4 :FQ_QUOTA_SERVICE ERROR: quota_manager.cpp:648: SyncQuota finished with error: 2025-05-29T15:28:44.298267Z node 4 :FQ_QUOTA_SERVICE ERROR: quota_manager.cpp:648: SyncQuota finished with error: 2025-05-29T15:28:44.298270Z node 4 :FQ_QUOTA_SERVICE ERROR: quota_manager.cpp:648: SyncQuota finished with error: 2025-05-29T15:28:44.298282Z node 4 :FQ_QUOTA_SERVICE ERROR: quota_manager.cpp:648: SyncQuota finished with error: 2025-05-29T15:28:44.298289Z node 4 :FQ_QUOTA_SERVICE ERROR: quota_manager.cpp:648: SyncQuota finished with error: 2025-05-29T15:28:44.298292Z node 4 :FQ_QUOTA_SERVICE ERROR: quota_manager.cpp:648: SyncQuota finished with error: 2025-05-29T15:28:44.298305Z node 4 :FQ_QUOTA_SERVICE ERROR: quota_manager.cpp:648: SyncQuota finished with error: 2025-05-29T15:28:44.298315Z node 4 :FQ_QUOTA_SERVICE ERROR: quota_manager.cpp:648: SyncQuota finished with error: 2025-05-29T15:28:44.298319Z node 4 :FQ_QUOTA_SERVICE ERROR: quota_manager.cpp:648: SyncQuota finished with error: 2025-05-29T15:28:44.298324Z node 4 :FQ_QUOTA_SERVICE ERROR: quota_manager.cpp:648: SyncQuota finished with error: 2025-05-29T15:28:44.298328Z node 4 :FQ_QUOTA_SERVICE ERROR: quota_manager.cpp:648: SyncQuota finished with error: 2025-05-29T15:28:44.298333Z node 4 :FQ_QUOTA_SERVICE ERROR: quota_manager.cpp:648: SyncQuota finished with error: 2025-05-29T15:28:44.298338Z node 4 :FQ_QUOTA_SERVICE ERROR: quota_manager.cpp:648: SyncQuota finished with error: 2025-05-29T15:28:44.298347Z node 4 :FQ_QUOTA_SERVICE ERROR: quota_manager.cpp:648: SyncQuota finished with error: 2025-05-29T15:28:44.298357Z node 4 :FQ_QUOTA_SERVICE ERROR: quota_manager.cpp:648: SyncQuota finished with error: 2025-05-29T15:28:44.298363Z node 4 :FQ_QUOTA_SERVICE ERROR: quota_manager.cpp:648: SyncQuota finished with error: 2025-05-29T15:28:44.298368Z node 4 :FQ_QUOTA_SERVICE ERROR: quota_manager.cpp:648: SyncQuota finished with error: 2025-05-29T15:28:44.298373Z node 4 :FQ_QUOTA_SERVICE ERROR: quota_manager.cpp:648: SyncQuota finished with error: 2025-05-29T15:28:44.298383Z node 4 :FQ_QUOTA_SERVICE ERROR: quota_manager.cpp:648: SyncQuota finished with error: 2025-05-29T15:28:44.298388Z node 4 :FQ_QUOTA_SERVICE ERROR: quota_manager.cpp:648: SyncQuota finished with error: 2025-05-29T15:28:44.298392Z node 4 :FQ_QUOTA_SERVICE ERROR: quota_manager.cpp:648: SyncQuota finished with error: 2025-05-29T15:28:44.298397Z node 4 :FQ_QUOTA_SERVICE ERROR: quota_manager.cpp:648: SyncQuota finished with error: 2025-05-29T15:28:44.298403Z node 4 :FQ_QUOTA_SERVICE ERROR: quota_manager.cpp:648: SyncQuota finished with error: 2025-05-29T15:28:44.298414Z node 4 :FQ_QUOTA_SERVICE ERROR: quota_manager.cpp:648: SyncQuota finished with error: 2025-05-29T15:28:44.298420Z node 4 :FQ_QUOTA_SERVICE ERROR: quota_manager.cpp:648: SyncQuota finished with error: 2025-05-29T15:28:44.298425Z node 4 :FQ_QUOTA_SERVICE ERROR: quota_manager.cpp:648: SyncQuota finished with error: 2025-05-29T15:28:44.298430Z node 4 :FQ_QUOTA_SERVICE ERROR: quota_manager.cpp:648: SyncQuota finished with error: 2025-05-29T15:28:44.298434Z node 4 :FQ_QUOTA_SERVICE ERROR: quota_manager.cpp:648: SyncQuota finished with error: 2025-05-29T15:28:44.298444Z node 4 :FQ_QUOTA_SERVICE ERROR: quota_manager.cpp:648: SyncQuota finished with error: 2025-05-29T15:28:44.298449Z node 4 :FQ_QUOTA_SERVICE ERROR: quota_manager.cpp:648: SyncQuota finished with error: 2025-05-29T15:28:44.298453Z node 4 :FQ_QUOTA_SERVICE ERROR: quota_manager.cpp:648: SyncQuota finished with error: 2025-05-29T15:28:44.298459Z node 4 :FQ_QUOTA_SERVICE ERROR: quota_manager.cpp:648: SyncQuota finished with error: 2025-05-29T15:28:44.298470Z node 4 :FQ_QUOTA_SERVICE ERROR: quota_manager.cpp:648: SyncQuota finished with error: 2025-05-29T15:28:44.298482Z node 4 :FQ_QUOTA_SERVICE ERROR: quota_manager.cpp:648: SyncQuota finished with error: 2025-05-29T15:28:44.298487Z node 4 :FQ_QUOTA_SERVICE ERROR: quota_manager.cpp:648: SyncQuota finished with error: 2025-05-29T15:28:44.298491Z node 4 :FQ_QUOTA_SERVICE ERROR: quota_manager.cpp:648: SyncQuota finished with error: 2025-05-29T15:28:44.298496Z node 4 :FQ_QUOTA_SERVICE ERROR: quota_manager.cpp:648: SyncQuota finished with error: 2025-05-29T15:28:44.298500Z node 4 :FQ_QUOTA_SERVICE ERROR: quota_manager.cpp:648: SyncQuota finished with error: 2025-05-29T15:28:44.298510Z node 4 :FQ_QUOTA_SERVICE ERROR: quota_manager.cpp:648: SyncQuota finished with error: 2025-05-29T15:28:44.298514Z node 4 :FQ_QUOTA_SERVICE ERROR: quota_manager.cpp:648: SyncQuota finished with error: 2025-05-29T15:28:44.298519Z node 4 :FQ_QUOTA_SERVICE ERROR: quota_manager.cpp:648: SyncQuota finished with error: 2025-05-29T15:28:44.298524Z node 4 :FQ_QUOTA_SERVICE ERROR: quota_manager.cpp:648: SyncQuota finished with error: 2025-05-29T15:28:44.298528Z node 4 :FQ_QUOTA_SERVICE ERROR: quota_manager.cpp:648: SyncQuota finished with error: 2025-05-29T15:28:44.298538Z node 4 :FQ_QUOTA_SERVICE ERROR: quota_manager.cpp:648: SyncQuota finished with error: 2025-05-29T15:28:44.298546Z node 4 :FQ_QUOTA_SERVICE ERROR: quota_manager.cpp:648: SyncQuota finished with error: 2025-05-29T15:28:44.298556Z node 4 :FQ_QUOTA_SERVICE ERROR: quota_manager.cpp:648: SyncQuota finished with error: 2025-05-29T15:28:44.298561Z node 4 :FQ_QUOTA_SERVICE ERROR: quota_manager.cpp:648: SyncQuota finished with error: 2025-05-29T15:28:44.298570Z node 4 :FQ_QUOTA_SERVICE ERROR: quota_manager.cpp:648: SyncQuota finished with error: 2025-05-29T15:28:44.298573Z node 4 :FQ_QUOTA_SERVICE ERROR: quota_manager.cpp:648: SyncQuota finished with error: 2025-05-29T15:28:44.298585Z node 4 :FQ_QUOTA_SERVICE ERROR: quota_manager.cpp:648: SyncQuota finished with error: 2025-05-29T15:28:44.298593Z node 4 :FQ_QUOTA_SERVICE ERROR: quota_manager.cpp:648: SyncQuota finished with error: 2025-05-29T15:28:44.298596Z node 4 :FQ_QUOTA_SERVICE ERROR: quota_manager.cpp:648: SyncQuota finished with error: 2025-05-29T15:28:44.298606Z node 4 :FQ_QUOTA_SERVICE ERROR: quota_manager.cpp:648: SyncQuota finished with error: 2025-05-29T15:28:44.298614Z node 4 :FQ_QUOTA_SERVICE ERROR: quota_manager.cpp:648: SyncQuota finished with error: 2025-05-29T15:28:44.298625Z node 4 :FQ_QUOTA_SERVICE ERROR: quota_manager.cpp:648: SyncQuota finished with error: 2025-05-29T15:28:44.298630Z node 4 :FQ_QUOTA_SERVICE ERROR: quota_manager.cpp:648: SyncQuota finished with error: 2025-05-29T15:28:44.298634Z node 4 :FQ_QUOTA_SERVICE ERROR: quota_manager.cpp:648: SyncQuota finished with error: 2025-05-29T15:28:44.298638Z node 4 :FQ_QUOTA_SERVICE ERROR: quota_manager.cpp:648: SyncQuota finished with error: 2025-05-29T15:28:44.298643Z node 4 :FQ_QUOTA_SERVICE ERROR: quota_manager.cpp:648: SyncQuota finished with error: 2025-05-29T15:28:44.298651Z node 4 :FQ_QUOTA_SERVICE ERROR: quota_manager.cpp:648: SyncQuota finished with error: 2025-05-29T15:28:44.298662Z node 4 :FQ_QUOTA_SERVICE ERROR: quota_manager.cpp:648: SyncQuota finished with error: 2025-05-29T15:28:44.298668Z node 4 :FQ_QUOTA_SERVICE ERROR: quota_manager.cpp:648: SyncQuota finished with error: 2025-05-29T15:28:44.298673Z node 4 :FQ_QUOTA_SERVICE ERROR: quota_manager.cpp:648: SyncQuota finished with error: 2025-05-29T15:28:44.298684Z node 4 :FQ_QUOTA_SERVICE ERROR: quota_manager.cpp:648: SyncQuota finished with error: 2025-05-29T15:28:44.298690Z node 4 :FQ_QUOTA_SERVICE ERROR: quota_manager.cpp:648: SyncQuota finished with error: 2025-05-29T15:28:44.298695Z node 4 :FQ_QUOTA_SERVICE ERROR: quota_manager.cpp:648: SyncQuota finished with error: 2025-05-29T15:28:44.298700Z node 4 :FQ_QUOTA_SERVICE ERROR: quota_manager.cpp:648: SyncQuota finished with error: 2025-05-29T15:28:44.298711Z node 4 :FQ_QUOTA_SERVICE ERROR: quota_manager.cpp:648: SyncQuota finished with error: 2025-05-29T15:28:44.298717Z node 4 :FQ_QUOTA_SERVICE ERROR: quota_manager.cpp:648: SyncQuota finished with error: 2025-05-29T15:28:44.298722Z node 4 :FQ_QUOTA_SERVICE ERROR: quota_manager.cpp:648: SyncQuota finished with error: 2025-05-29T15:28:44.298733Z node 4 :FQ_QUOTA_SERVICE ERROR: quota_manager.cpp:648: SyncQuota finished with error: 2025-05-29T15:28:44.298749Z node 4 :FQ_QUOTA_SERVICE ERROR: quota_manager.cpp:648: SyncQuota finished with error: 2025-05-29T15:28:44.298755Z node 4 :FQ_QUOTA_SERVICE ERROR: quota_manager.cpp:648: SyncQuota finished with error: 2025-05-29T15:28:44.298769Z node 4 :FQ_QUOTA_SERVICE ERROR: quota_manager.cpp:648: SyncQuota finished with error: 2025-05-29T15:28:44.298779Z node 4 :FQ_QUOTA_SERVICE ERROR: quota_manager.cpp:648: SyncQuota finished with error: 2025-05-29T15:28:44.298783Z node 4 :FQ_QUOTA_SERVICE ERROR: quota_manager.cpp:648: SyncQuota finished with error: 2025-05-29T15:28:44.298787Z node 4 :FQ_QUOTA_SERVICE ERROR: quota_manager.cpp:648: SyncQuota finished with error: 2025-05-29T15:28:44.298801Z node 4 :FQ_QUOTA_SERVICE ERROR: quota_manager.cpp:648: SyncQuota finished with error: 2025-05-29T15:28:44.298811Z node 4 :FQ_QUOTA_SERVICE ERROR: quota_manager.cpp:648: SyncQuota finished with error: 2025-05-29T15:28:44.298815Z node 4 :FQ_QUOTA_SERVICE ERROR: quota_manager.cpp:648: SyncQuota finished with error: 2025-05-29T15:28:44.298833Z node 4 :FQ_QUOTA_SERVICE ERROR: quota_manager.cpp:648: SyncQuota finished with error: 2025-05-29T15:28:44.298860Z node 4 :FQ_QUOTA_SERVICE ERROR: quota_manager.cpp:648: SyncQuota finished with error: 2025-05-29T15:28:44.298866Z node 4 :FQ_QUOTA_SERVICE ERROR: quota_manager.cpp:648: SyncQuota finished with error: 2025-05-29T15:28:44.298897Z node 4 :FQ_QUOTA_SERVICE ERROR: quota_manager.cpp:648: SyncQuota finished with error: 2025-05-29T15:28:44.298908Z node 4 :FQ_QUOTA_SERVICE ERROR: quota_manager.cpp:648: SyncQuota finished with error: 2025-05-29T15:28:44.298913Z node 4 :FQ_QUOTA_SERVICE ERROR: quota_manager.cpp:648: SyncQuota finished with error: 2025-05-29T15:28:44.298928Z node 4 :FQ_QUOTA_SERVICE ERROR: quota_manager.cpp:648: SyncQuota finished with error: 2025-05-29T15:28:44.298939Z node 4 :FQ_QUOTA_SERVICE ERROR: quota_manager.cpp:648: SyncQuota finished with error: 2025-05-29T15:28:44.298944Z node 4 :FQ_QUOTA_SERVICE ERROR: quota_manager.cpp:648: SyncQuota finished with error: 2025-05-29T15:28:44.298949Z node 4 :FQ_QUOTA_SERVICE ERROR: quota_manager.cpp:648: SyncQuota finished with error: 2025-05-29T15:28:44.298955Z node 4 :FQ_QUOTA_SERVICE ERROR: quota_manager.cpp:648: SyncQuota finished with error: 2025-05-29T15:28:44.298968Z node 4 :FQ_QUOTA_SERVICE ERROR: quota_manager.cpp:648: SyncQuota finished with error: 2025-05-29T15:28:44.298986Z node 4 :FQ_QUOTA_SERVICE ERROR: quota_manager.cpp:648: SyncQuota finished with error: 2025-05-29T15:28:44.298990Z node 4 :FQ_QUOTA_SERVICE ERROR: quota_manager.cpp:648: SyncQuota finished with error: ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_index_build/unittest >> IndexBuildTest::CancellationNoTable [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:116:2058] recipient: [1:111:2141] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:116:2058] recipient: [1:111:2141] Leader for TabletID 72057594046678944 is [1:128:2152] sender: [1:132:2058] recipient: [1:111:2141] 2025-05-29T15:28:13.949382Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7488: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-05-29T15:28:13.949401Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7516: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:28:13.949405Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7402: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-05-29T15:28:13.949408Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7418: OperationsProcessing config: using default configuration 2025-05-29T15:28:13.949418Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-05-29T15:28:13.949421Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-05-29T15:28:13.949427Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7548: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:28:13.949437Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:39: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-05-29T15:28:13.949512Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7619: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-05-29T15:28:13.949565Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-05-29T15:28:13.958820Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7309: Cannot subscribe to console configs 2025-05-29T15:28:13.958839Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:28:13.960671Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-05-29T15:28:13.960746Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-05-29T15:28:13.960778Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-05-29T15:28:13.962477Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-05-29T15:28:13.962644Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:445: Clear TempDirsState with owners number: 0 2025-05-29T15:28:13.962719Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1348: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-05-29T15:28:13.962795Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:32: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-05-29T15:28:13.963265Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:157: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:28:13.963307Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:84: [RootDataErasureManager] Stop 2025-05-29T15:28:13.963512Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:28:13.963519Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:28:13.963540Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-05-29T15:28:13.963548Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-29T15:28:13.963554Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:202: TTxServerlessStorageBilling.Complete 2025-05-29T15:28:13.963590Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6671: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-05-29T15:28:13.964877Z node 1 :HIVE INFO: tablet_helpers.cpp:1130: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:128:2152] sender: [1:242:2058] recipient: [1:15:2062] 2025-05-29T15:28:13.979148Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:372: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-05-29T15:28:13.979216Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:28:13.979269Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-05-29T15:28:13.979307Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:130: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-05-29T15:28:13.979316Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:28:13.980006Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:451: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-05-29T15:28:13.980026Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-05-29T15:28:13.980071Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:28:13.980078Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:313: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-05-29T15:28:13.980082Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:367: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-05-29T15:28:13.980086Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 2 -> 3 2025-05-29T15:28:13.980475Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:28:13.980489Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-05-29T15:28:13.980494Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 3 -> 128 2025-05-29T15:28:13.980822Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:28:13.980836Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:28:13.980840Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:28:13.980845Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1650: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-05-29T15:28:13.981400Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1719: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-05-29T15:28:13.981748Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:652: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-05-29T15:28:13.981777Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1751: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-05-29T15:28:13.981957Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:676: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-05-29T15:28:13.981991Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:680: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 124 RawX2: 4294969445 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-05-29T15:28:13.981999Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:28:13.982048Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 128 -> 240 2025-05-29T15:28:13.982056Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:28:13.982084Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-05-29T15:28:13.982095Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:402: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-05-29T15:28:13.982487Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:28:13.982494Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-29T15:28:13.982524Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29 ... ommon.cpp:313: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-05-29T15:28:47.630034Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:367: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-05-29T15:28:47.630039Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 2 -> 3 2025-05-29T15:28:47.630428Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:28:47.630440Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-05-29T15:28:47.630446Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 3 -> 128 2025-05-29T15:28:47.630848Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:28:47.630865Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:28:47.630871Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:28:47.630878Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1650: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-05-29T15:28:47.630907Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1719: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-05-29T15:28:47.631362Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:652: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-05-29T15:28:47.631409Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1751: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-05-29T15:28:47.631611Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:676: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-05-29T15:28:47.631637Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:680: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 127 RawX2: 8589936743 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-05-29T15:28:47.631645Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:28:47.631700Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 128 -> 240 2025-05-29T15:28:47.631710Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:28:47.631736Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-05-29T15:28:47.631748Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:402: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-05-29T15:28:47.632273Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:28:47.632283Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-29T15:28:47.632324Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:28:47.632330Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [2:207:2208], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-05-29T15:28:47.632389Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:28:47.632398Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:482: [72057594046678944] TDone opId# 1:0 ProgressState 2025-05-29T15:28:47.632410Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:906: Part operation is done id#1:0 progress is 1/1 2025-05-29T15:28:47.632414Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-05-29T15:28:47.632420Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:906: Part operation is done id#1:0 progress is 1/1 2025-05-29T15:28:47.632423Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-05-29T15:28:47.632428Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1606: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-05-29T15:28:47.632436Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-05-29T15:28:47.632441Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:973: Operation and all the parts is done, operation id: 1:0 2025-05-29T15:28:47.632445Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5174: RemoveTx for txid 1:0 2025-05-29T15:28:47.632458Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-05-29T15:28:47.632463Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:982: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-05-29T15:28:47.632468Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:989: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-05-29T15:28:47.632589Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:5834: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-05-29T15:28:47.632601Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-05-29T15:28:47.632606Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2025-05-29T15:28:47.632611Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2025-05-29T15:28:47.632615Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-05-29T15:28:47.632627Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 1, subscribers: 0 2025-05-29T15:28:47.633258Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1 2025-05-29T15:28:47.633369Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 1, at schemeshard: 72057594046678944 2025-05-29T15:28:47.633465Z node 2 :TX_PROXY DEBUG: proxy_impl.cpp:434: actor# [2:270:2260] Bootstrap 2025-05-29T15:28:47.634685Z node 2 :TX_PROXY DEBUG: proxy_impl.cpp:453: actor# [2:270:2260] Become StateWork (SchemeCache [2:275:2265]) 2025-05-29T15:28:47.634819Z node 2 :BUILD_INDEX NOTICE: schemeshard_build_index__create.cpp:23: TIndexBuilder::TXTYPE_CREATE_INDEX_BUILD: DoExecute TxId: 101 DatabaseName: "/MyRoot" Settings { source_path: "/MyRoot/Table" index { name: "index1" index_columns: "index" global_index { settings { } } } max_shards_in_flight: 2 ScanSettings { MaxBatchRows: 1 } } 2025-05-29T15:28:47.634872Z node 2 :BUILD_INDEX NOTICE: schemeshard_build_index_tx_base.h:91: TIndexBuilder::TXTYPE_CREATE_INDEX_BUILD: Reply TxId: 101 Status: BAD_REQUEST Issues { message: "Check failed: path: \'/MyRoot/Table\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1]), source_location: ydb/core/tx/schemeshard/schemeshard_build_index__create.cpp:70" severity: 1 } SchemeStatus: 2 2025-05-29T15:28:47.634943Z node 2 :TX_PROXY DEBUG: proxy_impl.cpp:213: actor# [2:270:2260] HANDLE TEvClientConnected success connect from tablet# 72057594046447617 2025-05-29T15:28:47.635412Z node 2 :TX_PROXY DEBUG: client.cpp:89: Handle TEvAllocateResult ACCEPTED RangeBegin# 281474976715656 RangeEnd# 281474976720656 txAllocator# 72057594046447617 BUILDINDEX RESPONSE CREATE: NKikimrIndexBuilder.TEvCreateResponse TxId: 101 Status: BAD_REQUEST Issues { message: "Check failed: path: \'/MyRoot/Table\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1]), source_location: ydb/core/tx/schemeshard/schemeshard_build_index__create.cpp:70" severity: 1 } SchemeStatus: 2 TestWaitNotification wait txId: 101 2025-05-29T15:28:47.635479Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:210: tests -- TTxNotificationSubscriber for txId 101: send EvNotifyTxCompletion 2025-05-29T15:28:47.635487Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:256: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 101 2025-05-29T15:28:47.635540Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 101, at schemeshard: 72057594046678944 2025-05-29T15:28:47.635558Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:227: tests -- TTxNotificationSubscriber for txId 101: got EvNotifyTxCompletionResult 2025-05-29T15:28:47.635563Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:236: tests -- TTxNotificationSubscriber for txId 101: satisfy waiter [2:282:2272] TestWaitNotification: OK eventTxId 101 2025-05-29T15:28:47.635626Z node 2 :BUILD_INDEX DEBUG: schemeshard_build_index__list.cpp:23: TIndexBuilder::TXTYPE_LIST_INDEX_BUILD: DoExecute DatabaseName: "/MyRoot" PageSize: 100 PageToken: "" 2025-05-29T15:28:47.635640Z node 2 :BUILD_INDEX DEBUG: schemeshard_build_index_tx_base.h:93: TIndexBuilder::TXTYPE_LIST_INDEX_BUILD: Reply Status: SUCCESS NextPageToken: "0" BUILDINDEX RESPONSE LIST: NKikimrIndexBuilder.TEvListResponse Status: SUCCESS NextPageToken: "0" >> TSequence::CreateSequenceParallel >> KqpImmediateEffects::ConflictingKeyR1WRR2 >> KqpInplaceUpdate::SingleRowSimple-UseSink |71.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_external_table_reboots/unittest >> TSequence::CreateSequence |71.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_external_table_reboots/unittest >> KqpQuery::QueryClientTimeout >> TSequence::CreateSequenceParallel [GOOD] >> TSequence::CreateSequenceSequential >> TTopicYqlTest::CreateAndAlterTopicYql [FAIL] >> TTopicYqlTest::AlterAutopartitioning >> TSequence::CreateSequence [GOOD] >> TSequence::CreateDropRecreate >> TSequence::CreateSequenceSequential [GOOD] >> TSequence::CreateSequenceInsideTableThenDropSequence >> KqpStats::StreamLookupStats+StreamLookupJoin >> TPersQueueTest::ReadWithoutConsumerFederation [FAIL] >> TPersQueueTest::ReadWithoutConsumerFirstClassCitizen >> TTopicYqlTest::DropTopicYql [FAIL] >> TTopicYqlTest::CreateTopicYqlBackCompatibility >> KqpAnalyze::AnalyzeTable+ColumnStore >> TSequence::CreateDropRecreate [GOOD] >> TSequence::CreateSequenceInsideSequenceNotAllowed >> KqpEffects::InsertAbort_Select_Conflict+UseSink >> KqpQuery::RewriteIfPresentToMap >> TSequence::CreateSequenceInsideTableThenDropSequence [GOOD] >> TSequence::CreateSequenceInsideTableThenDropTable >> TSequence::CreateSequenceInsideSequenceNotAllowed [GOOD] >> TSequence::CreateSequenceInsideIndexTableNotAllowed >> TChargeBTreeIndex::FewNodes_Groups [GOOD] >> TChargeBTreeIndex::FewNodes_History ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/fq/ut_integration/unittest >> Yq_1::DescribeQuery [FAIL] Test command err: 2025-05-29T15:28:39.226218Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509889988837715083:2075];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:28:39.226238Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; E0529 15:28:39.258133724 4050743 dns_resolver_ares.cc:452] no server name supplied in dns URI E0529 15:28:39.258181979 4050743 channel.cc:120] channel stack builder failed: UNKNOWN: the target uri is not valid: dns:/// 2025-05-29T15:28:39.259592Z node 1 :YQL_NODES_MANAGER ERROR: nodes_manager.cpp:364: ydb/core/fq/libs/actors/nodes_manager.cpp:322: TRANSPORT_UNAVAILABLE
: Error: GRpc error: (14): failed to connect to all addresses; last error: UNKNOWN: ipv4:127.0.0.1:8056: Failed to connect to remote host: Connection refused
: Error: Grpc error response on endpoint localhost:8056 2025-05-29T15:28:39.260199Z node 1 :YQ_CONTROL_PLANE_STORAGE ERROR: schema.cpp:160: Create directory "Root/yq" error: TRANSPORT_UNAVAILABLE [ {
: Error: GRpc error: (14): failed to connect to all addresses; last error: UNKNOWN: ipv4:127.0.0.1:8056: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:8056 } ] 2025-05-29T15:28:39.581356Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-05-29T15:28:39.581626Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:7509889988837715406:2308], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/000918/r3tmp/tmpifcaPC/pdisk_1.dat TServer::EnableGrpc on GrpcPort 8056, node 1 2025-05-29T15:28:39.642868Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:28:39.643352Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:28:39.643361Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-05-29T15:28:39.643363Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-05-29T15:28:39.643414Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:23148 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-29T15:28:39.860836Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:28:39.966371Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:28:39.966391Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:28:39.969228Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-29T15:28:40.262025Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: schema.cpp:293: Create table "Root/yq/quotas". Create session OK 2025-05-29T15:28:40.262052Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: schema.cpp:113: Call create table "Root/yq/quotas" 2025-05-29T15:28:40.262053Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: schema.cpp:354: Call create table "Root/yq/quotas" 2025-05-29T15:28:40.262056Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: schema.cpp:293: Create table "Root/yq/compute_databases". Create session OK 2025-05-29T15:28:40.262059Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: schema.cpp:113: Call create table "Root/yq/compute_databases" 2025-05-29T15:28:40.262060Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: schema.cpp:354: Call create table "Root/yq/compute_databases" 2025-05-29T15:28:40.262231Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: schema.cpp:293: Create table "Root/yq/queries". Create session OK 2025-05-29T15:28:40.262239Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: schema.cpp:113: Call create table "Root/yq/queries" 2025-05-29T15:28:40.262240Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: schema.cpp:354: Call create table "Root/yq/queries" 2025-05-29T15:28:40.262246Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: schema.cpp:293: Create table "Root/yq/jobs". Create session OK 2025-05-29T15:28:40.262247Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: schema.cpp:113: Call create table "Root/yq/jobs" 2025-05-29T15:28:40.262248Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: schema.cpp:354: Call create table "Root/yq/jobs" 2025-05-29T15:28:40.262317Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: schema.cpp:293: Create table "Root/yq/connections". Create session OK 2025-05-29T15:28:40.262326Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: schema.cpp:113: Call create table "Root/yq/connections" 2025-05-29T15:28:40.262327Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: schema.cpp:354: Call create table "Root/yq/connections" 2025-05-29T15:28:40.262330Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: schema.cpp:293: Create table "Root/yq/tenant_acks". Create session OK 2025-05-29T15:28:40.262331Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: schema.cpp:113: Call create table "Root/yq/tenant_acks" 2025-05-29T15:28:40.262332Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: schema.cpp:354: Call create table "Root/yq/tenant_acks" 2025-05-29T15:28:40.262392Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976715658, at schemeshard: 72057594046644480 2025-05-29T15:28:40.262394Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: schema.cpp:293: Create table "Root/yq/nodes". Create session OK 2025-05-29T15:28:40.262395Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: schema.cpp:113: Call create table "Root/yq/nodes" 2025-05-29T15:28:40.262395Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: schema.cpp:354: Call create table "Root/yq/nodes" 2025-05-29T15:28:40.262415Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: schema.cpp:293: Create table "Root/yq/idempotency_keys". Create session OK 2025-05-29T15:28:40.262416Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: schema.cpp:113: Call create table "Root/yq/idempotency_keys" 2025-05-29T15:28:40.262417Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: schema.cpp:354: Call create table "Root/yq/idempotency_keys" 2025-05-29T15:28:40.262514Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: schema.cpp:293: Create table "Root/yq/tenants". Create session OK 2025-05-29T15:28:40.262519Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: schema.cpp:113: Call create table "Root/yq/tenants" 2025-05-29T15:28:40.262520Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: schema.cpp:354: Call create table "Root/yq/tenants" 2025-05-29T15:28:40.262540Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: schema.cpp:293: Create table "Root/yq/mappings". Create session OK 2025-05-29T15:28:40.262550Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: schema.cpp:113: Call create table "Root/yq/mappings" 2025-05-29T15:28:40.262551Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: schema.cpp:354: Call create table "Root/yq/mappings" 2025-05-29T15:28:40.263218Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: schema.cpp:293: Create table "Root/yq/pending_small". Create session OK 2025-05-29T15:28:40.263228Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: schema.cpp:113: Call create table "Root/yq/pending_small" 2025-05-29T15:28:40.263230Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: schema.cpp:354: Call create table "Root/yq/pending_small" 2025-05-29T15:28:40.263290Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: schema.cpp:293: Create table "Root/yq/result_sets". Create session OK 2025-05-29T15:28:40.263299Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: schema.cpp:113: Call create table "Root/yq/result_sets" 2025-05-29T15:28:40.263300Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: schema.cpp:354: Call create table "Root/yq/result_sets" 2025-05-29T15:28:40.263392Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: schema.cpp:293: Create table "Root/yq/bindings". Create session OK 2025-05-29T15:28:40.263398Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: schema.cpp:113: Call create table "Root/yq/bindings" 2025-05-29T15:28:40.263399Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: schema.cpp:354: Call create table "Root/yq/bindings" 2025-05-29T15:28:40.263481Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: schema.cpp:155: Successfully created directory "Root/yq" 2025-05-29T15:28:40.263489Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: schema.cpp:122: Reply for create directory "Root/yq": 2025-05-29T15:28:40.264353Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 2025-05-29T15:28:40.264639Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 2025-05-29T15:28:40.264841Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-05-29T15:28:40.264986Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-05-29T15:28:40.265165Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-05-29T15:28:40.265314Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 2025-05-29T15:28:40.265548Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-05-29T15:28:40.265708Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but pro ... _QUOTA_SERVICE ERROR: quota_manager.cpp:648: SyncQuota finished with error: 2025-05-29T15:28:45.672921Z node 4 :FQ_QUOTA_SERVICE ERROR: quota_manager.cpp:648: SyncQuota finished with error: 2025-05-29T15:28:45.672926Z node 4 :FQ_QUOTA_SERVICE ERROR: quota_manager.cpp:648: SyncQuota finished with error: 2025-05-29T15:28:45.672939Z node 4 :FQ_QUOTA_SERVICE ERROR: quota_manager.cpp:648: SyncQuota finished with error: 2025-05-29T15:28:45.672948Z node 4 :FQ_QUOTA_SERVICE ERROR: quota_manager.cpp:648: SyncQuota finished with error: 2025-05-29T15:28:45.672952Z node 4 :FQ_QUOTA_SERVICE ERROR: quota_manager.cpp:648: SyncQuota finished with error: 2025-05-29T15:28:45.672967Z node 4 :FQ_QUOTA_SERVICE ERROR: quota_manager.cpp:648: SyncQuota finished with error: 2025-05-29T15:28:45.672972Z node 4 :FQ_QUOTA_SERVICE ERROR: quota_manager.cpp:648: SyncQuota finished with error: 2025-05-29T15:28:45.672977Z node 4 :FQ_QUOTA_SERVICE ERROR: quota_manager.cpp:648: SyncQuota finished with error: 2025-05-29T15:28:45.672992Z node 4 :FQ_QUOTA_SERVICE ERROR: quota_manager.cpp:648: SyncQuota finished with error: 2025-05-29T15:28:45.673000Z node 4 :FQ_QUOTA_SERVICE ERROR: quota_manager.cpp:648: SyncQuota finished with error: 2025-05-29T15:28:45.673004Z node 4 :FQ_QUOTA_SERVICE ERROR: quota_manager.cpp:648: SyncQuota finished with error: 2025-05-29T15:28:45.673012Z node 4 :FQ_QUOTA_SERVICE ERROR: quota_manager.cpp:648: SyncQuota finished with error: 2025-05-29T15:28:45.673026Z node 4 :FQ_QUOTA_SERVICE ERROR: quota_manager.cpp:648: SyncQuota finished with error: 2025-05-29T15:28:45.673031Z node 4 :FQ_QUOTA_SERVICE ERROR: quota_manager.cpp:648: SyncQuota finished with error: 2025-05-29T15:28:45.673036Z node 4 :FQ_QUOTA_SERVICE ERROR: quota_manager.cpp:648: SyncQuota finished with error: 2025-05-29T15:28:45.673043Z node 4 :FQ_QUOTA_SERVICE ERROR: quota_manager.cpp:648: SyncQuota finished with error: 2025-05-29T15:28:45.673079Z node 4 :FQ_QUOTA_SERVICE ERROR: quota_manager.cpp:648: SyncQuota finished with error: 2025-05-29T15:28:45.673093Z node 4 :FQ_QUOTA_SERVICE ERROR: quota_manager.cpp:648: SyncQuota finished with error: 2025-05-29T15:28:45.673098Z node 4 :FQ_QUOTA_SERVICE ERROR: quota_manager.cpp:648: SyncQuota finished with error: 2025-05-29T15:28:45.673126Z node 4 :FQ_QUOTA_SERVICE ERROR: quota_manager.cpp:648: SyncQuota finished with error: 2025-05-29T15:28:45.673135Z node 4 :FQ_QUOTA_SERVICE ERROR: quota_manager.cpp:648: SyncQuota finished with error: 2025-05-29T15:28:45.673140Z node 4 :FQ_QUOTA_SERVICE ERROR: quota_manager.cpp:648: SyncQuota finished with error: 2025-05-29T15:28:45.673152Z node 4 :FQ_QUOTA_SERVICE ERROR: quota_manager.cpp:648: SyncQuota finished with error: 2025-05-29T15:28:45.673163Z node 4 :FQ_QUOTA_SERVICE ERROR: quota_manager.cpp:648: SyncQuota finished with error: 2025-05-29T15:28:45.673171Z node 4 :FQ_QUOTA_SERVICE ERROR: quota_manager.cpp:648: SyncQuota finished with error: 2025-05-29T15:28:45.673176Z node 4 :FQ_QUOTA_SERVICE ERROR: quota_manager.cpp:648: SyncQuota finished with error: 2025-05-29T15:28:45.673183Z node 4 :FQ_QUOTA_SERVICE ERROR: quota_manager.cpp:648: SyncQuota finished with error: 2025-05-29T15:28:45.673197Z node 4 :FQ_QUOTA_SERVICE ERROR: quota_manager.cpp:648: SyncQuota finished with error: 2025-05-29T15:28:45.673202Z node 4 :FQ_QUOTA_SERVICE ERROR: quota_manager.cpp:648: SyncQuota finished with error: 2025-05-29T15:28:45.673207Z node 4 :FQ_QUOTA_SERVICE ERROR: quota_manager.cpp:648: SyncQuota finished with error: 2025-05-29T15:28:45.673212Z node 4 :FQ_QUOTA_SERVICE ERROR: quota_manager.cpp:648: SyncQuota finished with error: 2025-05-29T15:28:45.673227Z node 4 :FQ_QUOTA_SERVICE ERROR: quota_manager.cpp:648: SyncQuota finished with error: 2025-05-29T15:28:45.673236Z node 4 :FQ_QUOTA_SERVICE ERROR: quota_manager.cpp:648: SyncQuota finished with error: 2025-05-29T15:28:45.673241Z node 4 :FQ_QUOTA_SERVICE ERROR: quota_manager.cpp:648: SyncQuota finished with error: 2025-05-29T15:28:45.673245Z node 4 :FQ_QUOTA_SERVICE ERROR: quota_manager.cpp:648: SyncQuota finished with error: 2025-05-29T15:28:45.673268Z node 4 :FQ_QUOTA_SERVICE ERROR: quota_manager.cpp:648: SyncQuota finished with error: 2025-05-29T15:28:45.673277Z node 4 :FQ_QUOTA_SERVICE ERROR: quota_manager.cpp:648: SyncQuota finished with error: 2025-05-29T15:28:45.673283Z node 4 :FQ_QUOTA_SERVICE ERROR: quota_manager.cpp:648: SyncQuota finished with error: 2025-05-29T15:28:45.673287Z node 4 :FQ_QUOTA_SERVICE ERROR: quota_manager.cpp:648: SyncQuota finished with error: 2025-05-29T15:28:45.673302Z node 4 :FQ_QUOTA_SERVICE ERROR: quota_manager.cpp:648: SyncQuota finished with error: 2025-05-29T15:28:45.673310Z node 4 :FQ_QUOTA_SERVICE ERROR: quota_manager.cpp:648: SyncQuota finished with error: 2025-05-29T15:28:45.673314Z node 4 :FQ_QUOTA_SERVICE ERROR: quota_manager.cpp:648: SyncQuota finished with error: 2025-05-29T15:28:45.673319Z node 4 :FQ_QUOTA_SERVICE ERROR: quota_manager.cpp:648: SyncQuota finished with error: 2025-05-29T15:28:45.673329Z node 4 :FQ_QUOTA_SERVICE ERROR: quota_manager.cpp:648: SyncQuota finished with error: 2025-05-29T15:28:45.673345Z node 4 :FQ_QUOTA_SERVICE ERROR: quota_manager.cpp:648: SyncQuota finished with error: 2025-05-29T15:28:45.673354Z node 4 :FQ_QUOTA_SERVICE ERROR: quota_manager.cpp:648: SyncQuota finished with error: 2025-05-29T15:28:45.673370Z node 4 :FQ_QUOTA_SERVICE ERROR: quota_manager.cpp:648: SyncQuota finished with error: 2025-05-29T15:28:45.673379Z node 4 :FQ_QUOTA_SERVICE ERROR: quota_manager.cpp:648: SyncQuota finished with error: 2025-05-29T15:28:45.673384Z node 4 :FQ_QUOTA_SERVICE ERROR: quota_manager.cpp:648: SyncQuota finished with error: 2025-05-29T15:28:45.673389Z node 4 :FQ_QUOTA_SERVICE ERROR: quota_manager.cpp:648: SyncQuota finished with error: 2025-05-29T15:28:45.673409Z node 4 :FQ_QUOTA_SERVICE ERROR: quota_manager.cpp:648: SyncQuota finished with error: 2025-05-29T15:28:45.673414Z node 4 :FQ_QUOTA_SERVICE ERROR: quota_manager.cpp:648: SyncQuota finished with error: 2025-05-29T15:28:45.673418Z node 4 :FQ_QUOTA_SERVICE ERROR: quota_manager.cpp:648: SyncQuota finished with error: 2025-05-29T15:28:45.673424Z node 4 :FQ_QUOTA_SERVICE ERROR: quota_manager.cpp:648: SyncQuota finished with error: 2025-05-29T15:28:45.673437Z node 4 :FQ_QUOTA_SERVICE ERROR: quota_manager.cpp:648: SyncQuota finished with error: 2025-05-29T15:28:45.673448Z node 4 :FQ_QUOTA_SERVICE ERROR: quota_manager.cpp:648: SyncQuota finished with error: 2025-05-29T15:28:45.673453Z node 4 :FQ_QUOTA_SERVICE ERROR: quota_manager.cpp:648: SyncQuota finished with error: 2025-05-29T15:28:45.673457Z node 4 :FQ_QUOTA_SERVICE ERROR: quota_manager.cpp:648: SyncQuota finished with error: 2025-05-29T15:28:45.673463Z node 4 :FQ_QUOTA_SERVICE ERROR: quota_manager.cpp:648: SyncQuota finished with error: 2025-05-29T15:28:45.673476Z node 4 :FQ_QUOTA_SERVICE ERROR: quota_manager.cpp:648: SyncQuota finished with error: 2025-05-29T15:28:45.673483Z node 4 :FQ_QUOTA_SERVICE ERROR: quota_manager.cpp:648: SyncQuota finished with error: 2025-05-29T15:28:45.673488Z node 4 :FQ_QUOTA_SERVICE ERROR: quota_manager.cpp:648: SyncQuota finished with error: 2025-05-29T15:28:45.673503Z node 4 :FQ_QUOTA_SERVICE ERROR: quota_manager.cpp:648: SyncQuota finished with error: 2025-05-29T15:28:45.673508Z node 4 :FQ_QUOTA_SERVICE ERROR: quota_manager.cpp:648: SyncQuota finished with error: 2025-05-29T15:28:45.673513Z node 4 :FQ_QUOTA_SERVICE ERROR: quota_manager.cpp:648: SyncQuota finished with error: 2025-05-29T15:28:45.673529Z node 4 :FQ_QUOTA_SERVICE ERROR: quota_manager.cpp:648: SyncQuota finished with error: 2025-05-29T15:28:45.673534Z node 4 :FQ_QUOTA_SERVICE ERROR: quota_manager.cpp:648: SyncQuota finished with error: 2025-05-29T15:28:45.673539Z node 4 :FQ_QUOTA_SERVICE ERROR: quota_manager.cpp:648: SyncQuota finished with error: 2025-05-29T15:28:45.673554Z node 4 :FQ_QUOTA_SERVICE ERROR: quota_manager.cpp:648: SyncQuota finished with error: 2025-05-29T15:28:45.673558Z node 4 :FQ_QUOTA_SERVICE ERROR: quota_manager.cpp:648: SyncQuota finished with error: 2025-05-29T15:28:45.673563Z node 4 :FQ_QUOTA_SERVICE ERROR: quota_manager.cpp:648: SyncQuota finished with error: 2025-05-29T15:28:45.673578Z node 4 :FQ_QUOTA_SERVICE ERROR: quota_manager.cpp:648: SyncQuota finished with error: 2025-05-29T15:28:45.673583Z node 4 :FQ_QUOTA_SERVICE ERROR: quota_manager.cpp:648: SyncQuota finished with error: 2025-05-29T15:28:45.673587Z node 4 :FQ_QUOTA_SERVICE ERROR: quota_manager.cpp:648: SyncQuota finished with error: 2025-05-29T15:28:45.673593Z node 4 :FQ_QUOTA_SERVICE ERROR: quota_manager.cpp:648: SyncQuota finished with error: 2025-05-29T15:28:45.673603Z node 4 :FQ_QUOTA_SERVICE ERROR: quota_manager.cpp:648: SyncQuota finished with error: 2025-05-29T15:28:45.673609Z node 4 :FQ_QUOTA_SERVICE ERROR: quota_manager.cpp:648: SyncQuota finished with error: 2025-05-29T15:28:45.673618Z node 4 :FQ_QUOTA_SERVICE ERROR: quota_manager.cpp:648: SyncQuota finished with error: 2025-05-29T15:28:45.673628Z node 4 :FQ_QUOTA_SERVICE ERROR: quota_manager.cpp:648: SyncQuota finished with error: 2025-05-29T15:28:45.673639Z node 4 :FQ_QUOTA_SERVICE ERROR: quota_manager.cpp:648: SyncQuota finished with error: 2025-05-29T15:28:45.673645Z node 4 :FQ_QUOTA_SERVICE ERROR: quota_manager.cpp:648: SyncQuota finished with error: 2025-05-29T15:28:45.673650Z node 4 :FQ_QUOTA_SERVICE ERROR: quota_manager.cpp:648: SyncQuota finished with error: 2025-05-29T15:28:45.673656Z node 4 :FQ_QUOTA_SERVICE ERROR: quota_manager.cpp:648: SyncQuota finished with error: 2025-05-29T15:28:45.673666Z node 4 :FQ_QUOTA_SERVICE ERROR: quota_manager.cpp:648: SyncQuota finished with error: 2025-05-29T15:28:45.673673Z node 4 :FQ_QUOTA_SERVICE ERROR: quota_manager.cpp:648: SyncQuota finished with error: 2025-05-29T15:28:45.673684Z node 4 :FQ_QUOTA_SERVICE ERROR: quota_manager.cpp:648: SyncQuota finished with error: 2025-05-29T15:28:45.673694Z node 4 :FQ_QUOTA_SERVICE ERROR: quota_manager.cpp:648: SyncQuota finished with error: 2025-05-29T15:28:45.673703Z node 4 :FQ_QUOTA_SERVICE ERROR: quota_manager.cpp:648: SyncQuota finished with error: 2025-05-29T15:28:45.673707Z node 4 :FQ_QUOTA_SERVICE ERROR: quota_manager.cpp:648: SyncQuota finished with error: 2025-05-29T15:28:45.673711Z node 4 :FQ_QUOTA_SERVICE ERROR: quota_manager.cpp:648: SyncQuota finished with error: 2025-05-29T15:28:45.673726Z node 4 :FQ_QUOTA_SERVICE ERROR: quota_manager.cpp:648: SyncQuota finished with error: 2025-05-29T15:28:45.673731Z node 4 :FQ_QUOTA_SERVICE ERROR: quota_manager.cpp:648: SyncQuota finished with error: assertion failed at ydb/services/fq/ut_integration/fq_ut.cpp:57, TString (anonymous namespace)::CreateNewHistoryAndWaitFinish(const TString &, NYdb::NFq::TClient &, const TString &, const FederatedQuery::QueryMeta::ComputeStatus &): (result.GetStatus() == EStatus::SUCCESS) failed: (BAD_REQUEST != SUCCESS)
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 , with diff: (BAD_REQ|S)U(|CC)ES(T|S) TBackTrace::Capture()+28 (0x139D25FC) NUnitTest::NPrivate::RaiseError(char const*, TBasicString> const&, bool)+137 (0x13B861F9) ??+0 (0x1388D3A4) NTestSuiteYq_1::TTestCaseDescribeQuery::Execute_(NUnitTest::TTestContext&)+1017 (0x138B36B9) NTestSuiteYq_1::TCurrentTest::Execute()::'lambda'()::operator()() const+71 (0x138BC937) NUnitTest::TTestBase::Run(std::__y1::function, TBasicString> const&, char const*, bool)+126 (0x13B880AE) NTestSuiteYq_1::TCurrentTest::Execute()+415 (0x138BC24F) NUnitTest::TTestFactory::Execute()+803 (0x13B88823) NUnitTest::RunMain(int, char**)+3021 (0x13B9A3CD) ??+0 (0x7F37626F7D90) __libc_start_main+128 (0x7F37626F7E40) _start+41 (0x129E1029) >> TSequence::CreateSequenceInsideIndexTableNotAllowed [GOOD] >> TSequence::CopyTableWithSequence >> KqpImmediateEffects::WriteThenReadWithCommit >> BackupPathTest::RecursiveDirectoryPlusExplicitTable >> TSequence::CreateSequenceInsideTableThenDropTable [GOOD] >> TSequence::CreateSequencesWithIndexedTable >> TTopicYqlTest::AlterAutopartitioning [FAIL] >> TTopicYqlTest::BadRequests >> TConsoleConfigSubscriptionTests::TestConfigNotificationRetries [GOOD] >> TConsoleConfigSubscriptionTests::TestConfigSubscriptionsCleanup >> BsControllerConfig::DeleteStoragePool [GOOD] >> TPersQueueTest::ReadWithoutConsumerFirstClassCitizen [FAIL] >> TSequence::CopyTableWithSequence [GOOD] >> TSequence::AlterSequence >> TTopicYqlTest::CreateTopicYqlBackCompatibility [FAIL] >> TSequence::CreateSequencesWithIndexedTable [GOOD] >> TSequence::CreateTableWithDefaultFromSequence >> TKeyValueTest::TestConcatToLongKey [GOOD] |71.4%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/replication/service/ut_table_writer/ydb-core-tx-replication-service-ut_table_writer |71.5%| [LD] {RESULT} $(B)/ydb/core/tx/replication/service/ut_table_writer/ydb-core-tx-replication-service-ut_table_writer |71.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/replication/service/ut_table_writer/ydb-core-tx-replication-service-ut_table_writer ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/effects/unittest >> KqpWrite::CastValuesOptional Test command err: Trying to start YDB, gRPC: 29219, MsgBus: 14560 2025-05-29T15:28:41.994390Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509889996221755279:2064];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:28:41.994411Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/001ec0/r3tmp/tmpVaVK1U/pdisk_1.dat 2025-05-29T15:28:42.068578Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:28:42.068697Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [1:7509889996221755249:2079] 1748532521994166 != 1748532521994169 TServer::EnableGrpc on GrpcPort 29219, node 1 2025-05-29T15:28:42.081665Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:28:42.081692Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-05-29T15:28:42.081693Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-05-29T15:28:42.081739Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-05-29T15:28:42.096819Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:28:42.096862Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:28:42.097983Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:14560 TClient is connected to server localhost:14560 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-29T15:28:42.150717Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:28:42.159546Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:28:42.181056Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:28:42.241315Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:28:42.251902Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:28:42.408045Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890000516724195:2403], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:28:42.408098Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:28:42.444700Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-05-29T15:28:42.451751Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-05-29T15:28:42.465341Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-05-29T15:28:42.479605Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-05-29T15:28:42.493518Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-05-29T15:28:42.507588Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-05-29T15:28:42.521747Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-05-29T15:28:42.537513Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890000516724848:2466], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:28:42.537540Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890000516724853:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:28:42.537545Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:28:42.538262Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715669:3, at schemeshard: 72057594046644480 2025-05-29T15:28:42.541493Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7509890000516724855:2470], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715669 completed, doublechecking } 2025-05-29T15:28:42.596567Z node 1 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [1:7509890000516724906:3397] txid# 281474976715670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-29T15:28:42.712559Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [1:7509890000516724915:2474], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:28:42.712688Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=1&id=ODgyZTY3OTgtZjIwNmVkODktYjExYjdmNGMtYjY0ODcxYWU=, ActorId: [1:7509890000516724177:2401], ActorState: ExecuteState, TraceId: 01jweahvh90d1b9xb4vtxn653p, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: VERIFY failed (2025-05-29T15:28:42.713302Z): assertion failed in non-unittest thread with message: assertion failed at ydb/core/kqp/ut/common/kqp_ut_common.h:375, void NKikimr::NKqp::AssertSuccessResult(const NYdb::TStatus &): (result.IsSuccess())
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 library/cpp/testing/unittest/registar.cpp:36 RaiseError(): requirement UnittestThread failed 0. /-S/util/system/yassert.cpp:83: InternalPanicImpl @ 0x15F23B95 1. /-S/util/system/yassert.cpp:55: Panic @ 0x15F1AB96 2. /tmp//-S/library/cpp/testing/unittest/registar.cpp:36: RaiseError @ 0x160BC066 3. /-S/ydb/core/kqp/ut/common/kqp_ut_common.h:375: AssertSuccessResult @ 0x15D88C82 4. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:365: CreateSampleTables @ 0x2855B9A2 5. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:581: operator() @ 0x2857CF4C 6. /-S/library/cpp/threading/future/core/future-inl.h:493: SetValue @ 0x2857CF4C 7. /-S/library/cpp/threading/future/async.h:24: operator() @ 0x2857CF4C 8. /-S/util/thread/pool.h:71: Process @ 0x2857CF4C 9. /-S/util/thread/pool.cpp:418: DoExecute @ 0x15F2B519 10. /-S/util/thread/factory.h:15: Execute @ 0x15F29F09 11. /-S/util/thread/factory.cpp:36: ThreadProc @ 0x15F29F09 12. /-S/util/system/thread.cpp:244: ThreadProxy @ 0x15F2537C 13. ??:0: ?? @ 0x7FCFE81D5AC2 14. ??:0: ?? @ 0x7FCFE826784F Trying to start YDB, gRPC: 11635, MsgBus: 8987 2025-05-29T15:28:46.872060Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509890017553739536:2209];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:28:46.872115Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/001ec0/r3tmp/tmpIVjet7/pdisk_1.dat 2025-05-29T15:28:46.962924Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 11635, node 1 2025-05-29T15:28:46.971419Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:28:46.971449Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:28:46.972711Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-29T15:28:46.980047Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:28:46.980061Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-05-29T15:28:46.980063Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-05-29T15:28:46.980120Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:8987 TClient is connected to server localhost:8987 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-29T15:28:47.065062Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:28:47.067861Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-05-29T15:28:47.079072Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:28:47.148551Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:28:47.217959Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:28:47.233092Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:28:47.303625Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890021848708281:2403], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:28:47.303660Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:28:47.360732Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-05-29T15:28:47.370003Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-05-29T15:28:47.426761Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-05-29T15:28:47.435983Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-05-29T15:28:47.491853Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-05-29T15:28:47.505777Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-05-29T15:28:47.520259Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-05-29T15:28:47.536059Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890021848708936:2466], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:28:47.536086Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:28:47.536148Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890021848708941:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:28:47.537034Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715669:3, at schemeshard: 72057594046644480 2025-05-29T15:28:47.539588Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7509890021848708943:2470], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715669 completed, doublechecking } 2025-05-29T15:28:47.612413Z node 1 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [1:7509890021848708994:3395] txid# 281474976715670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-29T15:28:47.688072Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [1:7509890021848709010:2474], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:28:47.688213Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=1&id=YTI0NDJjMGQtN2JiMDkxYWMtZGI2MDY2ZDgtYzE3NGMxM2Q=, ActorId: [1:7509890021848708278:2401], ActorState: ExecuteState, TraceId: 01jweaj0dfbcmnk47ec6xwaybn, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: VERIFY failed (2025-05-29T15:28:47.688854Z): assertion failed in non-unittest thread with message: assertion failed at ydb/core/kqp/ut/common/kqp_ut_common.h:375, void NKikimr::NKqp::AssertSuccessResult(const NYdb::TStatus &): (result.IsSuccess())
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 library/cpp/testing/unittest/registar.cpp:36 RaiseError(): requirement UnittestThread failed 0. /-S/util/system/yassert.cpp:83: InternalPanicImpl @ 0x15F23B95 1. /-S/util/system/yassert.cpp:55: Panic @ 0x15F1AB96 2. /tmp//-S/library/cpp/testing/unittest/registar.cpp:36: RaiseError @ 0x160BC066 3. /-S/ydb/core/kqp/ut/common/kqp_ut_common.h:375: AssertSuccessResult @ 0x15D88C82 4. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:365: CreateSampleTables @ 0x2855B9A2 5. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:581: operator() @ 0x2857CF4C 6. /-S/library/cpp/threading/future/core/future-inl.h:493: SetValue @ 0x2857CF4C 7. /-S/library/cpp/threading/future/async.h:24: operator() @ 0x2857CF4C 8. /-S/util/thread/pool.h:71: Process @ 0x2857CF4C 9. /-S/util/thread/pool.cpp:418: DoExecute @ 0x15F2B519 10. /-S/util/thread/factory.h:15: Execute @ 0x15F29F09 11. /-S/util/thread/factory.cpp:36: ThreadProc @ 0x15F29F09 12. /-S/util/system/thread.cpp:244: ThreadProxy @ 0x15F2537C 13. ??:0: ?? @ 0x7FC9D258CAC2 14. ??:0: ?? @ 0x7FC9D261E84F ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/mind/bscontroller/ut_bscontroller/unittest >> BsControllerConfig::DeleteStoragePool [GOOD] Test command err: Leader for TabletID 72057594037932033 is [0:0:0] sender: [1:215:2066] recipient: [1:193:2076] IGNORE Leader for TabletID 72057594037932033 is [0:0:0] sender: [1:215:2066] recipient: [1:193:2076] Leader for TabletID 72057594037932033 is [1:223:2078] sender: [1:224:2066] recipient: [1:193:2076] 2025-05-29T15:28:29.749360Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2057} StateInit event Type# 268828672 Event# NKikimr::TEvTablet::TEvBoot 2025-05-29T15:28:29.750039Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2057} StateInit event Type# 268828673 Event# NKikimr::TEvTablet::TEvRestored 2025-05-29T15:28:29.750109Z node 1 :BS_CONTROLLER DEBUG: {BSC22@console_interaction.cpp:14} Console interaction started 2025-05-29T15:28:29.750193Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2057} StateInit event Type# 268828684 Event# NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-05-29T15:28:29.750431Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2057} StateInit event Type# 268639244 Event# NKikimr::TEvNodeWardenStorageConfig 2025-05-29T15:28:29.750473Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2057} StateInit event Type# 131082 Event# NActors::TEvInterconnect::TEvNodesInfo 2025-05-29T15:28:29.750477Z node 1 :BS_CONTROLLER DEBUG: {BSC01@bsc.cpp:521} Handle TEvInterconnect::TEvNodesInfo 2025-05-29T15:28:29.750507Z node 1 :BS_CONTROLLER DEBUG: {BSCTXIS01@init_scheme.cpp:17} TTxInitScheme Execute 2025-05-29T15:28:29.751270Z node 1 :BS_CONTROLLER DEBUG: {BSCTXIS03@init_scheme.cpp:44} TTxInitScheme Complete 2025-05-29T15:28:29.751296Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM01@migrate.cpp:190} Execute tx 2025-05-29T15:28:29.751323Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM02@migrate.cpp:251} Complete tx IncompatibleData# false 2025-05-29T15:28:29.751337Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxTrimUnusedSlots 2025-05-29T15:28:29.751346Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxTrimUnusedSlots 2025-05-29T15:28:29.751353Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateSchemaVersion Leader for TabletID 72057594037932033 is [1:223:2078] sender: [1:247:2066] recipient: [1:20:2067] 2025-05-29T15:28:29.761650Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateSchemaVersion 2025-05-29T15:28:29.761688Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxGenerateInstanceId 2025-05-29T15:28:29.771889Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxGenerateInstanceId 2025-05-29T15:28:29.771913Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateStaticPDiskInfo 2025-05-29T15:28:29.771922Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateStaticPDiskInfo 2025-05-29T15:28:29.771930Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxFillInNonNullConfigForPDisk 2025-05-29T15:28:29.771950Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxFillInNonNullConfigForPDisk 2025-05-29T15:28:29.771956Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxDropDriveStatus 2025-05-29T15:28:29.771961Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxDropDriveStatus 2025-05-29T15:28:29.771969Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateCompatibilityInfo 2025-05-29T15:28:29.782262Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateCompatibilityInfo 2025-05-29T15:28:29.782315Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateEnableConfigV2 2025-05-29T15:28:29.792609Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateEnableConfigV2 2025-05-29T15:28:29.792666Z node 1 :BS_CONTROLLER DEBUG: {BSCTXLE01@load_everything.cpp:19} TTxLoadEverything Execute 2025-05-29T15:28:29.792855Z node 1 :BS_CONTROLLER DEBUG: {BSCTXLE03@load_everything.cpp:557} TTxLoadEverything Complete 2025-05-29T15:28:29.792861Z node 1 :BS_CONTROLLER DEBUG: {BSC09@impl.h:2188} LoadFinished 2025-05-29T15:28:29.792900Z node 1 :BS_CONTROLLER DEBUG: {BSC18@console_interaction.cpp:31} Console connection service started 2025-05-29T15:28:29.792905Z node 1 :BS_CONTROLLER DEBUG: {BSCTXLE04@load_everything.cpp:562} TTxLoadEverything InitQueue processed 2025-05-29T15:28:29.794466Z node 1 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:398} Execute TEvControllerConfigRequest Request# {} Leader for TabletID 72057594037932033 is [0:0:0] sender: [11:215:2066] recipient: [11:186:2076] IGNORE Leader for TabletID 72057594037932033 is [0:0:0] sender: [11:215:2066] recipient: [11:186:2076] Leader for TabletID 72057594037932033 is [11:224:2078] sender: [11:226:2066] recipient: [11:186:2076] 2025-05-29T15:28:31.760762Z node 11 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2057} StateInit event Type# 268828672 Event# NKikimr::TEvTablet::TEvBoot 2025-05-29T15:28:31.761011Z node 11 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2057} StateInit event Type# 268828673 Event# NKikimr::TEvTablet::TEvRestored 2025-05-29T15:28:31.761073Z node 11 :BS_CONTROLLER DEBUG: {BSC22@console_interaction.cpp:14} Console interaction started 2025-05-29T15:28:31.761244Z node 11 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2057} StateInit event Type# 268828684 Event# NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-05-29T15:28:31.761383Z node 11 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2057} StateInit event Type# 268639244 Event# NKikimr::TEvNodeWardenStorageConfig 2025-05-29T15:28:31.761429Z node 11 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2057} StateInit event Type# 131082 Event# NActors::TEvInterconnect::TEvNodesInfo 2025-05-29T15:28:31.761436Z node 11 :BS_CONTROLLER DEBUG: {BSC01@bsc.cpp:521} Handle TEvInterconnect::TEvNodesInfo 2025-05-29T15:28:31.761479Z node 11 :BS_CONTROLLER DEBUG: {BSCTXIS01@init_scheme.cpp:17} TTxInitScheme Execute 2025-05-29T15:28:31.762469Z node 11 :BS_CONTROLLER DEBUG: {BSCTXIS03@init_scheme.cpp:44} TTxInitScheme Complete 2025-05-29T15:28:31.762497Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM01@migrate.cpp:190} Execute tx 2025-05-29T15:28:31.762527Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM02@migrate.cpp:251} Complete tx IncompatibleData# false 2025-05-29T15:28:31.762547Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxTrimUnusedSlots 2025-05-29T15:28:31.762562Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxTrimUnusedSlots 2025-05-29T15:28:31.762572Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateSchemaVersion Leader for TabletID 72057594037932033 is [11:224:2078] sender: [11:247:2066] recipient: [11:20:2067] 2025-05-29T15:28:31.772980Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateSchemaVersion 2025-05-29T15:28:31.773038Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxGenerateInstanceId 2025-05-29T15:28:31.783357Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxGenerateInstanceId 2025-05-29T15:28:31.783409Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateStaticPDiskInfo 2025-05-29T15:28:31.783426Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateStaticPDiskInfo 2025-05-29T15:28:31.783439Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxFillInNonNullConfigForPDisk 2025-05-29T15:28:31.783469Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxFillInNonNullConfigForPDisk 2025-05-29T15:28:31.783477Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxDropDriveStatus 2025-05-29T15:28:31.783484Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxDropDriveStatus 2025-05-29T15:28:31.783496Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateCompatibilityInfo 2025-05-29T15:28:31.793807Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateCompatibilityInfo 2025-05-29T15:28:31.793857Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateEnableConfigV2 2025-05-29T15:28:31.804215Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateEnableConfigV2 2025-05-29T15:28:31.804279Z node 11 :BS_CONTROLLER DEBUG: {BSCTXLE01@load_everything.cpp:19} TTxLoadEverything Execute 2025-05-29T15:28:31.804457Z node 11 :BS_CONTROLLER DEBUG: {BSCTXLE03@load_everything.cpp:557} TTxLoadEverything Complete 2025-05-29T15:28:31.804467Z node 11 :BS_CONTROLLER DEBUG: {BSC09@impl.h:2188} LoadFinished 2025-05-29T15:28:31.804511Z node 11 :BS_CONTROLLER DEBUG: {BSC18@console_interaction.cpp:31} Console connection service started 2025-05-29T15:28:31.804520Z node 11 :BS_CONTROLLER DEBUG: {BSCTXLE04@load_everything.cpp:562} TTxLoadEverything InitQueue processed 2025-05-29T15:28:31.804699Z node 11 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:398} Execute TEvControllerConfigRequest Request# {} Leader for TabletID 72057594037932033 is [0:0:0] sender: [21:3015:2106] recipient: [21:2914:2116] IGNORE Leader for TabletID 72057594037932033 is [0:0:0] sender: [21:3015:2106] recipient: [21:2914:2116] Leader for TabletID 72057594037932033 is [21:3062:2118] sender: [21:3065:2106] recipient: [21:2914:2116] 2025-05-29T15:28:34.269817Z node 21 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2057} StateInit event Type# 268828672 Event# NKikimr::TEvTablet::TEvBoot 2025-05-29T15:28:34.270061Z node 21 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2057} StateInit event Type# 268828673 Event# NKikimr::TEvTablet::TEvRestored 2025-05-29T15:28:34.270105Z n ... ev/disk3 2025-05-29T15:28:43.042239Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:340} Create new pdisk PDiskId# 96:1000 Path# /dev/disk1 2025-05-29T15:28:43.042244Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:340} Create new pdisk PDiskId# 96:1001 Path# /dev/disk2 2025-05-29T15:28:43.042249Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:340} Create new pdisk PDiskId# 96:1002 Path# /dev/disk3 2025-05-29T15:28:43.042255Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:340} Create new pdisk PDiskId# 97:1000 Path# /dev/disk1 2025-05-29T15:28:43.042260Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:340} Create new pdisk PDiskId# 97:1001 Path# /dev/disk2 2025-05-29T15:28:43.042265Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:340} Create new pdisk PDiskId# 97:1002 Path# /dev/disk3 2025-05-29T15:28:43.042270Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:340} Create new pdisk PDiskId# 98:1000 Path# /dev/disk1 2025-05-29T15:28:43.042275Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:340} Create new pdisk PDiskId# 98:1001 Path# /dev/disk2 2025-05-29T15:28:43.042280Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:340} Create new pdisk PDiskId# 98:1002 Path# /dev/disk3 2025-05-29T15:28:43.042285Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:340} Create new pdisk PDiskId# 99:1000 Path# /dev/disk1 2025-05-29T15:28:43.042290Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:340} Create new pdisk PDiskId# 99:1001 Path# /dev/disk2 2025-05-29T15:28:43.042294Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:340} Create new pdisk PDiskId# 99:1002 Path# /dev/disk3 2025-05-29T15:28:43.042302Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:340} Create new pdisk PDiskId# 100:1000 Path# /dev/disk1 2025-05-29T15:28:43.042307Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:340} Create new pdisk PDiskId# 100:1001 Path# /dev/disk2 2025-05-29T15:28:43.042312Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:340} Create new pdisk PDiskId# 100:1002 Path# /dev/disk3 2025-05-29T15:28:43.042321Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:340} Create new pdisk PDiskId# 101:1000 Path# /dev/disk1 2025-05-29T15:28:43.042329Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:340} Create new pdisk PDiskId# 101:1001 Path# /dev/disk2 2025-05-29T15:28:43.042334Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:340} Create new pdisk PDiskId# 101:1002 Path# /dev/disk3 2025-05-29T15:28:43.042339Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:340} Create new pdisk PDiskId# 102:1000 Path# /dev/disk1 2025-05-29T15:28:43.042344Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:340} Create new pdisk PDiskId# 102:1001 Path# /dev/disk2 2025-05-29T15:28:43.042349Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:340} Create new pdisk PDiskId# 102:1002 Path# /dev/disk3 2025-05-29T15:28:43.042356Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:340} Create new pdisk PDiskId# 103:1000 Path# /dev/disk1 2025-05-29T15:28:43.042361Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:340} Create new pdisk PDiskId# 103:1001 Path# /dev/disk2 2025-05-29T15:28:43.042366Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:340} Create new pdisk PDiskId# 103:1002 Path# /dev/disk3 2025-05-29T15:28:43.042371Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:340} Create new pdisk PDiskId# 104:1000 Path# /dev/disk1 2025-05-29T15:28:43.042376Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:340} Create new pdisk PDiskId# 104:1001 Path# /dev/disk2 2025-05-29T15:28:43.042381Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:340} Create new pdisk PDiskId# 104:1002 Path# /dev/disk3 2025-05-29T15:28:43.042386Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:340} Create new pdisk PDiskId# 105:1000 Path# /dev/disk1 2025-05-29T15:28:43.042391Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:340} Create new pdisk PDiskId# 105:1001 Path# /dev/disk2 2025-05-29T15:28:43.042397Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:340} Create new pdisk PDiskId# 105:1002 Path# /dev/disk3 2025-05-29T15:28:43.042402Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:340} Create new pdisk PDiskId# 106:1000 Path# /dev/disk1 2025-05-29T15:28:43.042407Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:340} Create new pdisk PDiskId# 106:1001 Path# /dev/disk2 2025-05-29T15:28:43.042412Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:340} Create new pdisk PDiskId# 106:1002 Path# /dev/disk3 2025-05-29T15:28:43.042417Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:340} Create new pdisk PDiskId# 107:1000 Path# /dev/disk1 2025-05-29T15:28:43.042422Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:340} Create new pdisk PDiskId# 107:1001 Path# /dev/disk2 2025-05-29T15:28:43.042428Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:340} Create new pdisk PDiskId# 107:1002 Path# /dev/disk3 2025-05-29T15:28:43.042432Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:340} Create new pdisk PDiskId# 108:1000 Path# /dev/disk1 2025-05-29T15:28:43.042438Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:340} Create new pdisk PDiskId# 108:1001 Path# /dev/disk2 2025-05-29T15:28:43.042443Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:340} Create new pdisk PDiskId# 108:1002 Path# /dev/disk3 2025-05-29T15:28:43.042448Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:340} Create new pdisk PDiskId# 109:1000 Path# /dev/disk1 2025-05-29T15:28:43.042453Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:340} Create new pdisk PDiskId# 109:1001 Path# /dev/disk2 2025-05-29T15:28:43.042459Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:340} Create new pdisk PDiskId# 109:1002 Path# /dev/disk3 2025-05-29T15:28:43.042463Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:340} Create new pdisk PDiskId# 110:1000 Path# /dev/disk1 2025-05-29T15:28:43.043061Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:340} Create new pdisk PDiskId# 110:1001 Path# /dev/disk2 2025-05-29T15:28:43.043071Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:340} Create new pdisk PDiskId# 110:1002 Path# /dev/disk3 2025-05-29T15:28:43.043075Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:340} Create new pdisk PDiskId# 111:1000 Path# /dev/disk1 2025-05-29T15:28:43.043078Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:340} Create new pdisk PDiskId# 111:1001 Path# /dev/disk2 2025-05-29T15:28:43.043083Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:340} Create new pdisk PDiskId# 111:1002 Path# /dev/disk3 2025-05-29T15:28:43.043088Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:340} Create new pdisk PDiskId# 112:1000 Path# /dev/disk1 2025-05-29T15:28:43.043094Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:340} Create new pdisk PDiskId# 112:1001 Path# /dev/disk2 2025-05-29T15:28:43.043099Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:340} Create new pdisk PDiskId# 112:1002 Path# /dev/disk3 2025-05-29T15:28:43.043104Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:340} Create new pdisk PDiskId# 113:1000 Path# /dev/disk1 2025-05-29T15:28:43.043110Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:340} Create new pdisk PDiskId# 113:1001 Path# /dev/disk2 2025-05-29T15:28:43.043116Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:340} Create new pdisk PDiskId# 113:1002 Path# /dev/disk3 2025-05-29T15:28:43.043121Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:340} Create new pdisk PDiskId# 114:1000 Path# /dev/disk1 2025-05-29T15:28:43.043126Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:340} Create new pdisk PDiskId# 114:1001 Path# /dev/disk2 2025-05-29T15:28:43.043132Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:340} Create new pdisk PDiskId# 114:1002 Path# /dev/disk3 2025-05-29T15:28:43.043138Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:340} Create new pdisk PDiskId# 115:1000 Path# /dev/disk1 2025-05-29T15:28:43.043143Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:340} Create new pdisk PDiskId# 115:1001 Path# /dev/disk2 2025-05-29T15:28:43.043148Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:340} Create new pdisk PDiskId# 115:1002 Path# /dev/disk3 2025-05-29T15:28:43.043154Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:340} Create new pdisk PDiskId# 116:1000 Path# /dev/disk1 2025-05-29T15:28:43.043158Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:340} Create new pdisk PDiskId# 116:1001 Path# /dev/disk2 2025-05-29T15:28:43.043161Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:340} Create new pdisk PDiskId# 116:1002 Path# /dev/disk3 2025-05-29T15:28:43.043164Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:340} Create new pdisk PDiskId# 117:1000 Path# /dev/disk1 2025-05-29T15:28:43.043167Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:340} Create new pdisk PDiskId# 117:1001 Path# /dev/disk2 2025-05-29T15:28:43.043170Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:340} Create new pdisk PDiskId# 117:1002 Path# /dev/disk3 2025-05-29T15:28:43.043173Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:340} Create new pdisk PDiskId# 118:1000 Path# /dev/disk1 2025-05-29T15:28:43.043177Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:340} Create new pdisk PDiskId# 118:1001 Path# /dev/disk2 2025-05-29T15:28:43.043180Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:340} Create new pdisk PDiskId# 118:1002 Path# /dev/disk3 2025-05-29T15:28:43.043183Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:340} Create new pdisk PDiskId# 119:1000 Path# /dev/disk1 2025-05-29T15:28:43.043186Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:340} Create new pdisk PDiskId# 119:1001 Path# /dev/disk2 2025-05-29T15:28:43.043191Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:340} Create new pdisk PDiskId# 119:1002 Path# /dev/disk3 2025-05-29T15:28:43.043195Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:340} Create new pdisk PDiskId# 120:1000 Path# /dev/disk1 2025-05-29T15:28:43.043198Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:340} Create new pdisk PDiskId# 120:1001 Path# /dev/disk2 2025-05-29T15:28:43.043201Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:340} Create new pdisk PDiskId# 120:1002 Path# /dev/disk3 2025-05-29T15:28:43.055065Z node 71 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:398} Execute TEvControllerConfigRequest Request# {Command { DefineStoragePool { BoxId: 1 StoragePoolId: 1 Name: "storage pool 1" ErasureSpecies: "block-4-2" VDiskKind: "Default" NumGroups: 50 PDiskFilter { Property { Type: ROT } } } } } 2025-05-29T15:28:43.083932Z node 71 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:398} Execute TEvControllerConfigRequest Request# {Command { DefineStoragePool { BoxId: 1 StoragePoolId: 2 Name: "storage pool 2" ErasureSpecies: "block-4-2" VDiskKind: "Default" NumGroups: 50 PDiskFilter { Property { Type: SSD } } } } Command { DeleteStoragePool { BoxId: 1 StoragePoolId: 2 ItemConfigGeneration: 1 } } } 2025-05-29T15:28:43.105160Z node 71 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:398} Execute TEvControllerConfigRequest Request# {Command { DeleteStoragePool { BoxId: 1 StoragePoolId: 1 ItemConfigGeneration: 1 } } Command { QueryBaseConfig { } } } >> TSequence::CreateTableWithDefaultFromSequence [GOOD] >> TSequence::CreateTableWithDefaultFromSequenceAndIndex ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/keyvalue/ut/unittest >> TKeyValueTest::TestConcatToLongKey [GOOD] Test command err: Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:55:2057] recipient: [1:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:55:2057] recipient: [1:51:2095] Leader for TabletID 72057594037927937 is [1:57:2097] sender: [1:58:2057] recipient: [1:51:2095] Leader for TabletID 72057594037927937 is [1:57:2097] sender: [1:75:2057] recipient: [1:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:55:2057] recipient: [2:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:55:2057] recipient: [2:51:2095] Leader for TabletID 72057594037927937 is [2:57:2097] sender: [2:58:2057] recipient: [2:51:2095] Leader for TabletID 72057594037927937 is [2:57:2097] sender: [2:75:2057] recipient: [2:14:2061] !Reboot 72057594037927937 (actor [2:57:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [2:57:2097] sender: [2:77:2057] recipient: [2:36:2083] Leader for TabletID 72057594037927937 is [2:57:2097] sender: [2:80:2057] recipient: [2:14:2061] Leader for TabletID 72057594037927937 is [2:57:2097] sender: [2:81:2057] recipient: [2:79:2110] Leader for TabletID 72057594037927937 is [2:82:2111] sender: [2:83:2057] recipient: [2:79:2110] !Reboot 72057594037927937 (actor [2:57:2097]) rebooted! !Reboot 72057594037927937 (actor [2:57:2097]) tablet resolver refreshed! new actor is[2:82:2111] Leader for TabletID 72057594037927937 is [2:82:2111] sender: [2:168:2057] recipient: [2:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:55:2057] recipient: [3:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:55:2057] recipient: [3:51:2095] Leader for TabletID 72057594037927937 is [3:57:2097] sender: [3:58:2057] recipient: [3:51:2095] Leader for TabletID 72057594037927937 is [3:57:2097] sender: [3:75:2057] recipient: [3:14:2061] !Reboot 72057594037927937 (actor [3:57:2097]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [3:57:2097] sender: [3:77:2057] recipient: [3:36:2083] Leader for TabletID 72057594037927937 is [3:57:2097] sender: [3:80:2057] recipient: [3:79:2110] Leader for TabletID 72057594037927937 is [3:57:2097] sender: [3:81:2057] recipient: [3:14:2061] Leader for TabletID 72057594037927937 is [3:82:2111] sender: [3:83:2057] recipient: [3:79:2110] !Reboot 72057594037927937 (actor [3:57:2097]) rebooted! !Reboot 72057594037927937 (actor [3:57:2097]) tablet resolver refreshed! new actor is[3:82:2111] Leader for TabletID 72057594037927937 is [3:82:2111] sender: [3:168:2057] recipient: [3:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [4:55:2057] recipient: [4:50:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [4:55:2057] recipient: [4:50:2095] Leader for TabletID 72057594037927937 is [4:57:2097] sender: [4:58:2057] recipient: [4:50:2095] Leader for TabletID 72057594037927937 is [4:57:2097] sender: [4:75:2057] recipient: [4:14:2061] !Reboot 72057594037927937 (actor [4:57:2097]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [4:57:2097] sender: [4:78:2057] recipient: [4:36:2083] Leader for TabletID 72057594037927937 is [4:57:2097] sender: [4:81:2057] recipient: [4:14:2061] Leader for TabletID 72057594037927937 is [4:57:2097] sender: [4:82:2057] recipient: [4:80:2110] Leader for TabletID 72057594037927937 is [4:83:2111] sender: [4:84:2057] recipient: [4:80:2110] !Reboot 72057594037927937 (actor [4:57:2097]) rebooted! !Reboot 72057594037927937 (actor [4:57:2097]) tablet resolver refreshed! new actor is[4:83:2111] Leader for TabletID 72057594037927937 is [4:83:2111] sender: [4:169:2057] recipient: [4:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [5:55:2057] recipient: [5:52:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [5:55:2057] recipient: [5:52:2095] Leader for TabletID 72057594037927937 is [5:57:2097] sender: [5:58:2057] recipient: [5:52:2095] Leader for TabletID 72057594037927937 is [5:57:2097] sender: [5:75:2057] recipient: [5:14:2061] !Reboot 72057594037927937 (actor [5:57:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [5:57:2097] sender: [5:81:2057] recipient: [5:36:2083] Leader for TabletID 72057594037927937 is [5:57:2097] sender: [5:84:2057] recipient: [5:83:2113] Leader for TabletID 72057594037927937 is [5:57:2097] sender: [5:85:2057] recipient: [5:14:2061] Leader for TabletID 72057594037927937 is [5:86:2114] sender: [5:87:2057] recipient: [5:83:2113] !Reboot 72057594037927937 (actor [5:57:2097]) rebooted! !Reboot 72057594037927937 (actor [5:57:2097]) tablet resolver refreshed! new actor is[5:86:2114] Leader for TabletID 72057594037927937 is [5:86:2114] sender: [5:172:2057] recipient: [5:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [6:55:2057] recipient: [6:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [6:55:2057] recipient: [6:51:2095] Leader for TabletID 72057594037927937 is [6:57:2097] sender: [6:58:2057] recipient: [6:51:2095] Leader for TabletID 72057594037927937 is [6:57:2097] sender: [6:75:2057] recipient: [6:14:2061] !Reboot 72057594037927937 (actor [6:57:2097]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [6:57:2097] sender: [6:81:2057] recipient: [6:36:2083] Leader for TabletID 72057594037927937 is [6:57:2097] sender: [6:84:2057] recipient: [6:14:2061] Leader for TabletID 72057594037927937 is [6:57:2097] sender: [6:85:2057] recipient: [6:83:2113] Leader for TabletID 72057594037927937 is [6:86:2114] sender: [6:87:2057] recipient: [6:83:2113] !Reboot 72057594037927937 (actor [6:57:2097]) rebooted! !Reboot 72057594037927937 (actor [6:57:2097]) tablet resolver refreshed! new actor is[6:86:2114] Leader for TabletID 72057594037927937 is [6:86:2114] sender: [6:172:2057] recipient: [6:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [7:55:2057] recipient: [7:52:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [7:55:2057] recipient: [7:52:2095] Leader for TabletID 72057594037927937 is [7:57:2097] sender: [7:58:2057] recipient: [7:52:2095] Leader for TabletID 72057594037927937 is [7:57:2097] sender: [7:75:2057] recipient: [7:14:2061] !Reboot 72057594037927937 (actor [7:57:2097]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [7:57:2097] sender: [7:82:2057] recipient: [7:36:2083] Leader for TabletID 72057594037927937 is [7:57:2097] sender: [7:85:2057] recipient: [7:14:2061] Leader for TabletID 72057594037927937 is [7:57:2097] sender: [7:86:2057] recipient: [7:84:2113] Leader for TabletID 72057594037927937 is [7:87:2114] sender: [7:88:2057] recipient: [7:84:2113] !Reboot 72057594037927937 (actor [7:57:2097]) rebooted! !Reboot 72057594037927937 (actor [7:57:2097]) tablet resolver refreshed! new actor is[7:87:2114] Leader for TabletID 72057594037927937 is [7:87:2114] sender: [7:173:2057] recipient: [7:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [8:55:2057] recipient: [8:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [8:55:2057] recipient: [8:51:2095] Leader for TabletID 72057594037927937 is [8:57:2097] sender: [8:58:2057] recipient: [8:51:2095] Leader for TabletID 72057594037927937 is [8:57:2097] sender: [8:75:2057] recipient: [8:14:2061] !Reboot 72057594037927937 (actor [8:57:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [8:57:2097] sender: [8:85:2057] recipient: [8:36:2083] Leader for TabletID 72057594037927937 is [8:57:2097] sender: [8:88:2057] recipient: [8:14:2061] Leader for TabletID 72057594037927937 is [8:57:2097] sender: [8:89:2057] recipient: [8:87:2116] Leader for TabletID 72057594037927937 is [8:90:2117] sender: [8:91:2057] recipient: [8:87:2116] !Reboot 72057594037927937 (actor [8:57:2097]) rebooted! !Reboot 72057594037927937 (actor [8:57:2097]) tablet resolver refreshed! new actor is[8:90:2117] Leader for TabletID 72057594037927937 is [8:90:2117] sender: [8:176:2057] recipient: [8:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [9:55:2057] recipient: [9:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [9:55:2057] recipient: [9:51:2095] Leader for TabletID 72057594037927937 is [9:57:2097] sender: [9:58:2057] recipient: [9:51:2095] Leader for TabletID 72057594037927937 is [9:57:2097] sender: [9:75:2057] recipient: [9:14:2061] !Reboot 72057594037927937 (actor [9:57:2097]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [9:57:2097] sender: [9:85:2057] recipient: [9:36:2083] Leader for TabletID 72057594037927937 is [9:57:2097] sender: [9:87:2057] recipient: [9:14:2061] Leader for TabletID 72057594037927937 is [9:57:2097] sender: [9:89:2057] recipient: [9:88:2116] Leader for TabletID 72057594037927937 is [9:90:2117] sender: [9:91:2057] recipient: [9:88:2116] !Reboot 72057594037927937 (actor [9:57:2097]) rebooted! !Reboot 72057594037927937 (actor [9:57:2097]) tablet resolver refreshed! new actor is[9:90:2117] Leader for TabletID 72057594037927937 is [9:90:2117] sender: [9:176:2057] recipient: [9:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [10:55:2057] recipient: [10:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [10:55:2057] recipient: [10:51:2095] Leader for TabletID 72057594037927937 is [10:57:2097] sender: [10:58:2057] recipient: [10:51:2095] Leader for TabletID 72057594037927937 is [10:57:2097] sender: [10:75:2057] recipient: [10:14:2061] !Reboot 72057594037927937 (actor [10:57:2097]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [10:57:2097] sender: [10:86:2057] recipient: [10:36:2083] Leader for TabletID 72057594037927937 is [10:57:2097] sender: [10:89:2057] recipient: [10:14:2061] Leader for TabletID 72057594037927937 is [10:57:2097] sender: [10:90:2057] recipient: [10:88:2116] Leader for TabletID 72057594037927937 is [10:91:2117] sender: [10:92:2057] recipient: [10:88:2116] !Reboot 72057594037927937 (actor [10:57:2097]) rebooted! !Reboot 72057594037927937 (actor [10:57:2097]) tablet resolver refreshed! new actor is[10:91:2117] Leader for TabletID 72057594037927937 is [10:91:2117] sender: [10:177:2057] recipient: [10:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [11:55:2057] recipient: [11:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [11:55:2057] recipient: [11:51:2095] Leader for TabletID 72057594037927937 is [11:57:2097] sender: [11:58:2057] recipient: [11:51:2095] Leader for TabletID 72057594037927937 is [11:57:2097] sender: [11:75:2057] recipient: [11:14:2061] !Reboot 72057594037927937 (actor [11:57:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [11:57:2097] sender: [11:88:2057] recipient: [11:36:2083] Leader for TabletID 72057594037927937 is [11:57:2097] sender: [11:91:2057] recipient: [11:14:2061] Leader for TabletID 72057594037927937 is [11:57:2097] sender: [11:92:2057] recipient: [11:90:2118] Leader for TabletID 72057594037927937 is [11:93:2119] sender: [11:94:2057] recipient: [11:90:2118] !Reboot 72057594037927937 (actor [11:57:2097]) rebooted! !Reboot 72057594037927937 (actor [11:57:2097]) tablet resolver refreshed! new actor is[11:93:2119] Leader for TabletID 72057594037927937 is [11:93:2119] sender: [11:179:2057] recipient: [11:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [12:55:2057] recipient: [12:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [12:55:2057] recipient: [12:51:2095] Leader for TabletID 72057594037927937 is [12:57:2097] sender: [12:58:2057] recipient: [12:51:2095] Leader for TabletID 72057594037927937 is [12:57:2097] sender: [12:75:2057] recipient: [12:14:2061] !Reboot 72057594037927937 (acto ... 7 is [33:57:2097] sender: [33:93:2057] recipient: [33:36:2083] Leader for TabletID 72057594037927937 is [33:57:2097] sender: [33:95:2057] recipient: [33:14:2061] Leader for TabletID 72057594037927937 is [33:57:2097] sender: [33:97:2057] recipient: [33:96:2121] Leader for TabletID 72057594037927937 is [33:98:2122] sender: [33:99:2057] recipient: [33:96:2121] !Reboot 72057594037927937 (actor [33:57:2097]) rebooted! !Reboot 72057594037927937 (actor [33:57:2097]) tablet resolver refreshed! new actor is[33:98:2122] Leader for TabletID 72057594037927937 is [0:0:0] sender: [34:55:2057] recipient: [34:52:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [34:55:2057] recipient: [34:52:2095] Leader for TabletID 72057594037927937 is [34:57:2097] sender: [34:58:2057] recipient: [34:52:2095] Leader for TabletID 72057594037927937 is [34:57:2097] sender: [34:75:2057] recipient: [34:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [35:55:2057] recipient: [35:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [35:55:2057] recipient: [35:51:2095] Leader for TabletID 72057594037927937 is [35:57:2097] sender: [35:58:2057] recipient: [35:51:2095] Leader for TabletID 72057594037927937 is [35:57:2097] sender: [35:75:2057] recipient: [35:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [36:55:2057] recipient: [36:52:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [36:55:2057] recipient: [36:52:2095] Leader for TabletID 72057594037927937 is [36:57:2097] sender: [36:58:2057] recipient: [36:52:2095] Leader for TabletID 72057594037927937 is [36:57:2097] sender: [36:75:2057] recipient: [36:14:2061] !Reboot 72057594037927937 (actor [36:57:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [36:57:2097] sender: [36:77:2057] recipient: [36:36:2083] Leader for TabletID 72057594037927937 is [36:57:2097] sender: [36:79:2057] recipient: [36:14:2061] Leader for TabletID 72057594037927937 is [36:57:2097] sender: [36:81:2057] recipient: [36:80:2110] Leader for TabletID 72057594037927937 is [36:82:2111] sender: [36:83:2057] recipient: [36:80:2110] !Reboot 72057594037927937 (actor [36:57:2097]) rebooted! !Reboot 72057594037927937 (actor [36:57:2097]) tablet resolver refreshed! new actor is[36:82:2111] Leader for TabletID 72057594037927937 is [36:82:2111] sender: [36:168:2057] recipient: [36:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [37:55:2057] recipient: [37:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [37:55:2057] recipient: [37:51:2095] Leader for TabletID 72057594037927937 is [37:57:2097] sender: [37:58:2057] recipient: [37:51:2095] Leader for TabletID 72057594037927937 is [37:57:2097] sender: [37:75:2057] recipient: [37:14:2061] !Reboot 72057594037927937 (actor [37:57:2097]) on event NKikimr::TEvKeyValue::TEvAcquireLock ! Leader for TabletID 72057594037927937 is [37:57:2097] sender: [37:77:2057] recipient: [37:36:2083] Leader for TabletID 72057594037927937 is [37:57:2097] sender: [37:80:2057] recipient: [37:79:2110] Leader for TabletID 72057594037927937 is [37:57:2097] sender: [37:81:2057] recipient: [37:14:2061] Leader for TabletID 72057594037927937 is [37:82:2111] sender: [37:83:2057] recipient: [37:79:2110] !Reboot 72057594037927937 (actor [37:57:2097]) rebooted! !Reboot 72057594037927937 (actor [37:57:2097]) tablet resolver refreshed! new actor is[37:82:2111] Leader for TabletID 72057594037927937 is [37:82:2111] sender: [37:168:2057] recipient: [37:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [38:55:2057] recipient: [38:52:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [38:55:2057] recipient: [38:52:2095] Leader for TabletID 72057594037927937 is [38:57:2097] sender: [38:58:2057] recipient: [38:52:2095] Leader for TabletID 72057594037927937 is [38:57:2097] sender: [38:75:2057] recipient: [38:14:2061] !Reboot 72057594037927937 (actor [38:57:2097]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [38:57:2097] sender: [38:78:2057] recipient: [38:36:2083] Leader for TabletID 72057594037927937 is [38:57:2097] sender: [38:81:2057] recipient: [38:80:2110] Leader for TabletID 72057594037927937 is [38:57:2097] sender: [38:82:2057] recipient: [38:14:2061] Leader for TabletID 72057594037927937 is [38:83:2111] sender: [38:84:2057] recipient: [38:80:2110] !Reboot 72057594037927937 (actor [38:57:2097]) rebooted! !Reboot 72057594037927937 (actor [38:57:2097]) tablet resolver refreshed! new actor is[38:83:2111] Leader for TabletID 72057594037927937 is [38:83:2111] sender: [38:169:2057] recipient: [38:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [39:55:2057] recipient: [39:50:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [39:55:2057] recipient: [39:50:2095] Leader for TabletID 72057594037927937 is [39:57:2097] sender: [39:58:2057] recipient: [39:50:2095] Leader for TabletID 72057594037927937 is [39:57:2097] sender: [39:75:2057] recipient: [39:14:2061] !Reboot 72057594037927937 (actor [39:57:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [39:57:2097] sender: [39:81:2057] recipient: [39:36:2083] Leader for TabletID 72057594037927937 is [39:57:2097] sender: [39:84:2057] recipient: [39:14:2061] Leader for TabletID 72057594037927937 is [39:57:2097] sender: [39:85:2057] recipient: [39:83:2113] Leader for TabletID 72057594037927937 is [39:86:2114] sender: [39:87:2057] recipient: [39:83:2113] !Reboot 72057594037927937 (actor [39:57:2097]) rebooted! !Reboot 72057594037927937 (actor [39:57:2097]) tablet resolver refreshed! new actor is[39:86:2114] Leader for TabletID 72057594037927937 is [39:86:2114] sender: [39:172:2057] recipient: [39:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [40:55:2057] recipient: [40:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [40:55:2057] recipient: [40:51:2095] Leader for TabletID 72057594037927937 is [40:57:2097] sender: [40:58:2057] recipient: [40:51:2095] Leader for TabletID 72057594037927937 is [40:57:2097] sender: [40:75:2057] recipient: [40:14:2061] !Reboot 72057594037927937 (actor [40:57:2097]) on event NKikimr::TEvKeyValue::TEvExecuteTransaction ! Leader for TabletID 72057594037927937 is [40:57:2097] sender: [40:81:2057] recipient: [40:36:2083] Leader for TabletID 72057594037927937 is [40:57:2097] sender: [40:84:2057] recipient: [40:14:2061] Leader for TabletID 72057594037927937 is [40:57:2097] sender: [40:85:2057] recipient: [40:83:2113] Leader for TabletID 72057594037927937 is [40:86:2114] sender: [40:87:2057] recipient: [40:83:2113] !Reboot 72057594037927937 (actor [40:57:2097]) rebooted! !Reboot 72057594037927937 (actor [40:57:2097]) tablet resolver refreshed! new actor is[40:86:2114] Leader for TabletID 72057594037927937 is [40:86:2114] sender: [40:172:2057] recipient: [40:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [41:55:2057] recipient: [41:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [41:55:2057] recipient: [41:51:2095] Leader for TabletID 72057594037927937 is [41:57:2097] sender: [41:58:2057] recipient: [41:51:2095] Leader for TabletID 72057594037927937 is [41:57:2097] sender: [41:75:2057] recipient: [41:14:2061] !Reboot 72057594037927937 (actor [41:57:2097]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [41:57:2097] sender: [41:82:2057] recipient: [41:36:2083] Leader for TabletID 72057594037927937 is [41:57:2097] sender: [41:84:2057] recipient: [41:14:2061] Leader for TabletID 72057594037927937 is [41:57:2097] sender: [41:86:2057] recipient: [41:85:2113] Leader for TabletID 72057594037927937 is [41:87:2114] sender: [41:88:2057] recipient: [41:85:2113] !Reboot 72057594037927937 (actor [41:57:2097]) rebooted! !Reboot 72057594037927937 (actor [41:57:2097]) tablet resolver refreshed! new actor is[41:87:2114] Leader for TabletID 72057594037927937 is [41:87:2114] sender: [41:173:2057] recipient: [41:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [42:55:2057] recipient: [42:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [42:55:2057] recipient: [42:51:2095] Leader for TabletID 72057594037927937 is [42:57:2097] sender: [42:58:2057] recipient: [42:51:2095] Leader for TabletID 72057594037927937 is [42:57:2097] sender: [42:75:2057] recipient: [42:14:2061] !Reboot 72057594037927937 (actor [42:57:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [42:57:2097] sender: [42:85:2057] recipient: [42:36:2083] Leader for TabletID 72057594037927937 is [42:57:2097] sender: [42:88:2057] recipient: [42:14:2061] Leader for TabletID 72057594037927937 is [42:57:2097] sender: [42:89:2057] recipient: [42:87:2116] Leader for TabletID 72057594037927937 is [42:90:2117] sender: [42:91:2057] recipient: [42:87:2116] !Reboot 72057594037927937 (actor [42:57:2097]) rebooted! !Reboot 72057594037927937 (actor [42:57:2097]) tablet resolver refreshed! new actor is[42:90:2117] Leader for TabletID 72057594037927937 is [42:90:2117] sender: [42:176:2057] recipient: [42:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [43:55:2057] recipient: [43:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [43:55:2057] recipient: [43:51:2095] Leader for TabletID 72057594037927937 is [43:57:2097] sender: [43:58:2057] recipient: [43:51:2095] Leader for TabletID 72057594037927937 is [43:57:2097] sender: [43:75:2057] recipient: [43:14:2061] !Reboot 72057594037927937 (actor [43:57:2097]) on event NKikimr::TEvKeyValue::TEvExecuteTransaction ! Leader for TabletID 72057594037927937 is [43:57:2097] sender: [43:85:2057] recipient: [43:36:2083] Leader for TabletID 72057594037927937 is [43:57:2097] sender: [43:88:2057] recipient: [43:14:2061] Leader for TabletID 72057594037927937 is [43:57:2097] sender: [43:89:2057] recipient: [43:87:2116] Leader for TabletID 72057594037927937 is [43:90:2117] sender: [43:91:2057] recipient: [43:87:2116] !Reboot 72057594037927937 (actor [43:57:2097]) rebooted! !Reboot 72057594037927937 (actor [43:57:2097]) tablet resolver refreshed! new actor is[43:90:2117] Leader for TabletID 72057594037927937 is [43:90:2117] sender: [43:176:2057] recipient: [43:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [44:55:2057] recipient: [44:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [44:55:2057] recipient: [44:51:2095] Leader for TabletID 72057594037927937 is [44:57:2097] sender: [44:58:2057] recipient: [44:51:2095] Leader for TabletID 72057594037927937 is [44:57:2097] sender: [44:75:2057] recipient: [44:14:2061] !Reboot 72057594037927937 (actor [44:57:2097]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [44:57:2097] sender: [44:86:2057] recipient: [44:36:2083] Leader for TabletID 72057594037927937 is [44:57:2097] sender: [44:89:2057] recipient: [44:14:2061] Leader for TabletID 72057594037927937 is [44:57:2097] sender: [44:90:2057] recipient: [44:88:2116] Leader for TabletID 72057594037927937 is [44:91:2117] sender: [44:92:2057] recipient: [44:88:2116] !Reboot 72057594037927937 (actor [44:57:2097]) rebooted! !Reboot 72057594037927937 (actor [44:57:2097]) tablet resolver refreshed! new actor is[44:91:2117] Leader for TabletID 72057594037927937 is [44:91:2117] sender: [44:177:2057] recipient: [44:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [45:55:2057] recipient: [45:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [45:55:2057] recipient: [45:51:2095] Leader for TabletID 72057594037927937 is [45:57:2097] sender: [45:58:2057] recipient: [45:51:2095] Leader for TabletID 72057594037927937 is [45:57:2097] sender: [45:75:2057] recipient: [45:14:2061] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/fq/ut_integration/unittest >> Yq_1::Basic_TaggedLiteral [FAIL] Test command err: 2025-05-29T15:28:39.681545Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509889987232514294:2075];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:28:39.681563Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; E0529 15:28:39.732163507 4053034 dns_resolver_ares.cc:452] no server name supplied in dns URI E0529 15:28:39.732213022 4053034 channel.cc:120] channel stack builder failed: UNKNOWN: the target uri is not valid: dns:/// 2025-05-29T15:28:39.732876Z node 1 :YQ_CONTROL_PLANE_STORAGE ERROR: schema.cpp:160: Create directory "Root/yq" error: TRANSPORT_UNAVAILABLE [ {
: Error: GRpc error: (14): failed to connect to all addresses; last error: UNKNOWN: ipv4:127.0.0.1:15242: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:15242 } ] 2025-05-29T15:28:39.988020Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:7509889987232514616:2308], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-05-29T15:28:39.988057Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/0008de/r3tmp/tmpygXJem/pdisk_1.dat 2025-05-29T15:28:40.053009Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:7509889987232514616:2308], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } TServer::EnableGrpc on GrpcPort 15242, node 1 TClient is connected to server localhost:3317 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-29T15:28:40.103554Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:28:40.103973Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:28:40.103984Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-05-29T15:28:40.103988Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-05-29T15:28:40.104053Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-05-29T15:28:40.385022Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:28:40.395161Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-05-29T15:28:40.493745Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:28:40.493787Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:28:40.495289Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-29T15:28:40.738531Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: schema.cpp:293: Create table "Root/yq/compute_databases". Create session OK 2025-05-29T15:28:40.738543Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: schema.cpp:113: Call create table "Root/yq/compute_databases" 2025-05-29T15:28:40.738545Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: schema.cpp:354: Call create table "Root/yq/compute_databases" 2025-05-29T15:28:40.738978Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: schema.cpp:293: Create table "Root/yq/queries". Create session OK 2025-05-29T15:28:40.738996Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: schema.cpp:113: Call create table "Root/yq/queries" 2025-05-29T15:28:40.738997Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: schema.cpp:354: Call create table "Root/yq/queries" 2025-05-29T15:28:40.739042Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: schema.cpp:293: Create table "Root/yq/jobs". Create session OK 2025-05-29T15:28:40.739049Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: schema.cpp:113: Call create table "Root/yq/jobs" 2025-05-29T15:28:40.739050Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: schema.cpp:354: Call create table "Root/yq/jobs" 2025-05-29T15:28:40.739163Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: schema.cpp:293: Create table "Root/yq/mappings". Create session OK 2025-05-29T15:28:40.739169Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: schema.cpp:113: Call create table "Root/yq/mappings" 2025-05-29T15:28:40.739170Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: schema.cpp:354: Call create table "Root/yq/mappings" 2025-05-29T15:28:40.739198Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: schema.cpp:293: Create table "Root/yq/bindings". Create session OK 2025-05-29T15:28:40.739206Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: schema.cpp:113: Call create table "Root/yq/bindings" 2025-05-29T15:28:40.739207Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: schema.cpp:354: Call create table "Root/yq/bindings" 2025-05-29T15:28:40.739221Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: schema.cpp:293: Create table "Root/yq/connections". Create session OK 2025-05-29T15:28:40.739223Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: schema.cpp:113: Call create table "Root/yq/connections" 2025-05-29T15:28:40.739224Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: schema.cpp:354: Call create table "Root/yq/connections" 2025-05-29T15:28:40.739284Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: schema.cpp:293: Create table "Root/yq/pending_small". Create session OK 2025-05-29T15:28:40.739285Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: schema.cpp:293: Create table "Root/yq/idempotency_keys". Create session OK 2025-05-29T15:28:40.739287Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: schema.cpp:113: Call create table "Root/yq/idempotency_keys" 2025-05-29T15:28:40.739288Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: schema.cpp:354: Call create table "Root/yq/idempotency_keys" 2025-05-29T15:28:40.739292Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: schema.cpp:113: Call create table "Root/yq/pending_small" 2025-05-29T15:28:40.739292Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: schema.cpp:354: Call create table "Root/yq/pending_small" 2025-05-29T15:28:40.739473Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: schema.cpp:293: Create table "Root/yq/result_sets". Create session OK 2025-05-29T15:28:40.739481Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: schema.cpp:113: Call create table "Root/yq/result_sets" 2025-05-29T15:28:40.739482Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: schema.cpp:354: Call create table "Root/yq/result_sets" 2025-05-29T15:28:40.739513Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: schema.cpp:293: Create table "Root/yq/nodes". Create session OK 2025-05-29T15:28:40.739519Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: schema.cpp:113: Call create table "Root/yq/nodes" 2025-05-29T15:28:40.739519Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: schema.cpp:354: Call create table "Root/yq/nodes" 2025-05-29T15:28:40.739574Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: schema.cpp:293: Create table "Root/yq/tenant_acks". Create session OK 2025-05-29T15:28:40.739577Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: schema.cpp:113: Call create table "Root/yq/tenant_acks" 2025-05-29T15:28:40.739578Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: schema.cpp:354: Call create table "Root/yq/tenant_acks" 2025-05-29T15:28:40.739589Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: schema.cpp:293: Create table "Root/yq/quotas". Create session OK 2025-05-29T15:28:40.739591Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: schema.cpp:113: Call create table "Root/yq/quotas" 2025-05-29T15:28:40.739592Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: schema.cpp:354: Call create table "Root/yq/quotas" 2025-05-29T15:28:40.739698Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: schema.cpp:293: Create table "Root/yq/tenants". Create session OK 2025-05-29T15:28:40.739704Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: schema.cpp:113: Call create table "Root/yq/tenants" 2025-05-29T15:28:40.739705Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: schema.cpp:354: Call create table "Root/yq/tenants" 2025-05-29T15:28:40.741695Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-05-29T15:28:40.741941Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 2025-05-29T15:28:40.742175Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 2025-05-29T15:28:40.742491Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 2025-05-29T15:28:40.742677Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-05-29T15:28:40.742689Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509889991527482782:2391], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:28:40.742701Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:28:40.742765Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509889991527482791:2394], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:28:40.742947Z node 1 :FLAT_TX_S ... A_SERVICE ERROR: quota_manager.cpp:648: SyncQuota finished with error: 2025-05-29T15:28:47.101633Z node 4 :FQ_QUOTA_SERVICE ERROR: quota_manager.cpp:648: SyncQuota finished with error: 2025-05-29T15:28:47.101637Z node 4 :FQ_QUOTA_SERVICE ERROR: quota_manager.cpp:648: SyncQuota finished with error: 2025-05-29T15:28:47.101642Z node 4 :FQ_QUOTA_SERVICE ERROR: quota_manager.cpp:648: SyncQuota finished with error: 2025-05-29T15:28:47.101655Z node 4 :FQ_QUOTA_SERVICE ERROR: quota_manager.cpp:648: SyncQuota finished with error: 2025-05-29T15:28:47.101659Z node 4 :FQ_QUOTA_SERVICE ERROR: quota_manager.cpp:648: SyncQuota finished with error: 2025-05-29T15:28:47.101662Z node 4 :FQ_QUOTA_SERVICE ERROR: quota_manager.cpp:648: SyncQuota finished with error: 2025-05-29T15:28:47.101674Z node 4 :FQ_QUOTA_SERVICE ERROR: quota_manager.cpp:648: SyncQuota finished with error: 2025-05-29T15:28:47.101678Z node 4 :FQ_QUOTA_SERVICE ERROR: quota_manager.cpp:648: SyncQuota finished with error: 2025-05-29T15:28:47.101682Z node 4 :FQ_QUOTA_SERVICE ERROR: quota_manager.cpp:648: SyncQuota finished with error: 2025-05-29T15:28:47.101693Z node 4 :FQ_QUOTA_SERVICE ERROR: quota_manager.cpp:648: SyncQuota finished with error: 2025-05-29T15:28:47.101697Z node 4 :FQ_QUOTA_SERVICE ERROR: quota_manager.cpp:648: SyncQuota finished with error: 2025-05-29T15:28:47.101700Z node 4 :FQ_QUOTA_SERVICE ERROR: quota_manager.cpp:648: SyncQuota finished with error: 2025-05-29T15:28:47.101706Z node 4 :FQ_QUOTA_SERVICE ERROR: quota_manager.cpp:648: SyncQuota finished with error: 2025-05-29T15:28:47.101721Z node 4 :FQ_QUOTA_SERVICE ERROR: quota_manager.cpp:648: SyncQuota finished with error: 2025-05-29T15:28:47.101725Z node 4 :FQ_QUOTA_SERVICE ERROR: quota_manager.cpp:648: SyncQuota finished with error: 2025-05-29T15:28:47.101728Z node 4 :FQ_QUOTA_SERVICE ERROR: quota_manager.cpp:648: SyncQuota finished with error: 2025-05-29T15:28:47.101732Z node 4 :FQ_QUOTA_SERVICE ERROR: quota_manager.cpp:648: SyncQuota finished with error: 2025-05-29T15:28:47.101744Z node 4 :FQ_QUOTA_SERVICE ERROR: quota_manager.cpp:648: SyncQuota finished with error: 2025-05-29T15:28:47.101752Z node 4 :FQ_QUOTA_SERVICE ERROR: quota_manager.cpp:648: SyncQuota finished with error: 2025-05-29T15:28:47.101755Z node 4 :FQ_QUOTA_SERVICE ERROR: quota_manager.cpp:648: SyncQuota finished with error: 2025-05-29T15:28:47.101759Z node 4 :FQ_QUOTA_SERVICE ERROR: quota_manager.cpp:648: SyncQuota finished with error: 2025-05-29T15:28:47.101765Z node 4 :FQ_QUOTA_SERVICE ERROR: quota_manager.cpp:648: SyncQuota finished with error: 2025-05-29T15:28:47.101776Z node 4 :FQ_QUOTA_SERVICE ERROR: quota_manager.cpp:648: SyncQuota finished with error: 2025-05-29T15:28:47.101780Z node 4 :FQ_QUOTA_SERVICE ERROR: quota_manager.cpp:648: SyncQuota finished with error: 2025-05-29T15:28:47.101784Z node 4 :FQ_QUOTA_SERVICE ERROR: quota_manager.cpp:648: SyncQuota finished with error: 2025-05-29T15:28:47.101866Z node 4 :FQ_QUOTA_SERVICE ERROR: quota_manager.cpp:648: SyncQuota finished with error: 2025-05-29T15:28:47.101872Z node 4 :FQ_QUOTA_SERVICE ERROR: quota_manager.cpp:648: SyncQuota finished with error: 2025-05-29T15:28:47.101893Z node 4 :FQ_QUOTA_SERVICE ERROR: quota_manager.cpp:648: SyncQuota finished with error: 2025-05-29T15:28:47.101903Z node 4 :FQ_QUOTA_SERVICE ERROR: quota_manager.cpp:648: SyncQuota finished with error: 2025-05-29T15:28:47.101907Z node 4 :FQ_QUOTA_SERVICE ERROR: quota_manager.cpp:648: SyncQuota finished with error: 2025-05-29T15:28:47.101918Z node 4 :FQ_QUOTA_SERVICE ERROR: quota_manager.cpp:648: SyncQuota finished with error: 2025-05-29T15:28:47.101921Z node 4 :FQ_QUOTA_SERVICE ERROR: quota_manager.cpp:648: SyncQuota finished with error: 2025-05-29T15:28:47.101925Z node 4 :FQ_QUOTA_SERVICE ERROR: quota_manager.cpp:648: SyncQuota finished with error: 2025-05-29T15:28:47.101931Z node 4 :FQ_QUOTA_SERVICE ERROR: quota_manager.cpp:648: SyncQuota finished with error: 2025-05-29T15:28:47.101946Z node 4 :FQ_QUOTA_SERVICE ERROR: quota_manager.cpp:648: SyncQuota finished with error: 2025-05-29T15:28:47.101950Z node 4 :FQ_QUOTA_SERVICE ERROR: quota_manager.cpp:648: SyncQuota finished with error: 2025-05-29T15:28:47.101954Z node 4 :FQ_QUOTA_SERVICE ERROR: quota_manager.cpp:648: SyncQuota finished with error: 2025-05-29T15:28:47.101958Z node 4 :FQ_QUOTA_SERVICE ERROR: quota_manager.cpp:648: SyncQuota finished with error: 2025-05-29T15:28:47.101970Z node 4 :FQ_QUOTA_SERVICE ERROR: quota_manager.cpp:648: SyncQuota finished with error: 2025-05-29T15:28:47.101974Z node 4 :FQ_QUOTA_SERVICE ERROR: quota_manager.cpp:648: SyncQuota finished with error: 2025-05-29T15:28:47.101978Z node 4 :FQ_QUOTA_SERVICE ERROR: quota_manager.cpp:648: SyncQuota finished with error: 2025-05-29T15:28:47.101988Z node 4 :FQ_QUOTA_SERVICE ERROR: quota_manager.cpp:648: SyncQuota finished with error: 2025-05-29T15:28:47.101994Z node 4 :FQ_QUOTA_SERVICE ERROR: quota_manager.cpp:648: SyncQuota finished with error: 2025-05-29T15:28:47.101998Z node 4 :FQ_QUOTA_SERVICE ERROR: quota_manager.cpp:648: SyncQuota finished with error: 2025-05-29T15:28:47.102004Z node 4 :FQ_QUOTA_SERVICE ERROR: quota_manager.cpp:648: SyncQuota finished with error: 2025-05-29T15:28:47.102015Z node 4 :FQ_QUOTA_SERVICE ERROR: quota_manager.cpp:648: SyncQuota finished with error: 2025-05-29T15:28:47.102022Z node 4 :FQ_QUOTA_SERVICE ERROR: quota_manager.cpp:648: SyncQuota finished with error: 2025-05-29T15:28:47.102025Z node 4 :FQ_QUOTA_SERVICE ERROR: quota_manager.cpp:648: SyncQuota finished with error: 2025-05-29T15:28:47.102036Z node 4 :FQ_QUOTA_SERVICE ERROR: quota_manager.cpp:648: SyncQuota finished with error: 2025-05-29T15:28:47.102040Z node 4 :FQ_QUOTA_SERVICE ERROR: quota_manager.cpp:648: SyncQuota finished with error: 2025-05-29T15:28:47.102043Z node 4 :FQ_QUOTA_SERVICE ERROR: quota_manager.cpp:648: SyncQuota finished with error: 2025-05-29T15:28:47.102067Z node 4 :FQ_QUOTA_SERVICE ERROR: quota_manager.cpp:648: SyncQuota finished with error: 2025-05-29T15:28:47.102070Z node 4 :FQ_QUOTA_SERVICE ERROR: quota_manager.cpp:648: SyncQuota finished with error: 2025-05-29T15:28:47.102074Z node 4 :FQ_QUOTA_SERVICE ERROR: quota_manager.cpp:648: SyncQuota finished with error: 2025-05-29T15:28:47.102085Z node 4 :FQ_QUOTA_SERVICE ERROR: quota_manager.cpp:648: SyncQuota finished with error: 2025-05-29T15:28:47.102089Z node 4 :FQ_QUOTA_SERVICE ERROR: quota_manager.cpp:648: SyncQuota finished with error: 2025-05-29T15:28:47.102093Z node 4 :FQ_QUOTA_SERVICE ERROR: quota_manager.cpp:648: SyncQuota finished with error: 2025-05-29T15:28:47.102098Z node 4 :FQ_QUOTA_SERVICE ERROR: quota_manager.cpp:648: SyncQuota finished with error: 2025-05-29T15:28:47.102105Z node 4 :FQ_QUOTA_SERVICE ERROR: quota_manager.cpp:648: SyncQuota finished with error: 2025-05-29T15:28:47.102110Z node 4 :FQ_QUOTA_SERVICE ERROR: quota_manager.cpp:648: SyncQuota finished with error: 2025-05-29T15:28:47.102122Z node 4 :FQ_QUOTA_SERVICE ERROR: quota_manager.cpp:648: SyncQuota finished with error: 2025-05-29T15:28:47.102128Z node 4 :FQ_QUOTA_SERVICE ERROR: quota_manager.cpp:648: SyncQuota finished with error: 2025-05-29T15:28:47.102132Z node 4 :FQ_QUOTA_SERVICE ERROR: quota_manager.cpp:648: SyncQuota finished with error: 2025-05-29T15:28:47.102144Z node 4 :FQ_QUOTA_SERVICE ERROR: quota_manager.cpp:648: SyncQuota finished with error: 2025-05-29T15:28:47.102147Z node 4 :FQ_QUOTA_SERVICE ERROR: quota_manager.cpp:648: SyncQuota finished with error: 2025-05-29T15:28:47.102151Z node 4 :FQ_QUOTA_SERVICE ERROR: quota_manager.cpp:648: SyncQuota finished with error: 2025-05-29T15:28:47.102163Z node 4 :FQ_QUOTA_SERVICE ERROR: quota_manager.cpp:648: SyncQuota finished with error: 2025-05-29T15:28:47.102166Z node 4 :FQ_QUOTA_SERVICE ERROR: quota_manager.cpp:648: SyncQuota finished with error: 2025-05-29T15:28:47.102170Z node 4 :FQ_QUOTA_SERVICE ERROR: quota_manager.cpp:648: SyncQuota finished with error: 2025-05-29T15:28:47.102176Z node 4 :FQ_QUOTA_SERVICE ERROR: quota_manager.cpp:648: SyncQuota finished with error: 2025-05-29T15:28:47.102188Z node 4 :FQ_QUOTA_SERVICE ERROR: quota_manager.cpp:648: SyncQuota finished with error: 2025-05-29T15:28:47.102194Z node 4 :FQ_QUOTA_SERVICE ERROR: quota_manager.cpp:648: SyncQuota finished with error: 2025-05-29T15:28:47.102198Z node 4 :FQ_QUOTA_SERVICE ERROR: quota_manager.cpp:648: SyncQuota finished with error: 2025-05-29T15:28:47.102210Z node 4 :FQ_QUOTA_SERVICE ERROR: quota_manager.cpp:648: SyncQuota finished with error: 2025-05-29T15:28:47.102214Z node 4 :FQ_QUOTA_SERVICE ERROR: quota_manager.cpp:648: SyncQuota finished with error: 2025-05-29T15:28:47.102217Z node 4 :FQ_QUOTA_SERVICE ERROR: quota_manager.cpp:648: SyncQuota finished with error: 2025-05-29T15:28:47.102223Z node 4 :FQ_QUOTA_SERVICE ERROR: quota_manager.cpp:648: SyncQuota finished with error: 2025-05-29T15:28:47.102242Z node 4 :FQ_QUOTA_SERVICE ERROR: quota_manager.cpp:648: SyncQuota finished with error: 2025-05-29T15:28:47.102250Z node 4 :FQ_QUOTA_SERVICE ERROR: quota_manager.cpp:648: SyncQuota finished with error: 2025-05-29T15:28:47.102261Z node 4 :FQ_QUOTA_SERVICE ERROR: quota_manager.cpp:648: SyncQuota finished with error: 2025-05-29T15:28:47.102273Z node 4 :FQ_QUOTA_SERVICE ERROR: quota_manager.cpp:648: SyncQuota finished with error: 2025-05-29T15:28:47.102284Z node 4 :FQ_QUOTA_SERVICE ERROR: quota_manager.cpp:648: SyncQuota finished with error: 2025-05-29T15:28:47.102288Z node 4 :FQ_QUOTA_SERVICE ERROR: quota_manager.cpp:648: SyncQuota finished with error: 2025-05-29T15:28:47.102292Z node 4 :FQ_QUOTA_SERVICE ERROR: quota_manager.cpp:648: SyncQuota finished with error: 2025-05-29T15:28:47.102298Z node 4 :FQ_QUOTA_SERVICE ERROR: quota_manager.cpp:648: SyncQuota finished with error: 2025-05-29T15:28:47.102307Z node 4 :FQ_QUOTA_SERVICE ERROR: quota_manager.cpp:648: SyncQuota finished with error: 2025-05-29T15:28:47.102317Z node 4 :FQ_QUOTA_SERVICE ERROR: quota_manager.cpp:648: SyncQuota finished with error: 2025-05-29T15:28:47.102321Z node 4 :FQ_QUOTA_SERVICE ERROR: quota_manager.cpp:648: SyncQuota finished with error: 2025-05-29T15:28:47.102325Z node 4 :FQ_QUOTA_SERVICE ERROR: quota_manager.cpp:648: SyncQuota finished with error: 2025-05-29T15:28:47.102336Z node 4 :FQ_QUOTA_SERVICE ERROR: quota_manager.cpp:648: SyncQuota finished with error: 2025-05-29T15:28:47.102340Z node 4 :FQ_QUOTA_SERVICE ERROR: quota_manager.cpp:648: SyncQuota finished with error: 2025-05-29T15:28:47.102344Z node 4 :FQ_QUOTA_SERVICE ERROR: quota_manager.cpp:648: SyncQuota finished with error: assertion failed at ydb/services/fq/ut_integration/fq_ut.cpp:57, TString (anonymous namespace)::CreateNewHistoryAndWaitFinish(const TString &, NYdb::NFq::TClient &, const TString &, const FederatedQuery::QueryMeta::ComputeStatus &): (result.GetStatus() == EStatus::SUCCESS) failed: (BAD_REQUEST != SUCCESS)
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 , with diff: (BAD_REQ|S)U(|CC)ES(T|S) TBackTrace::Capture()+28 (0x139D25FC) NUnitTest::NPrivate::RaiseError(char const*, TBasicString> const&, bool)+137 (0x13B861F9) ??+0 (0x1388D3A4) NTestSuiteYq_1::TTestCaseBasic_TaggedLiteral::Execute_(NUnitTest::TTestContext&)+967 (0x13892BA7) NTestSuiteYq_1::TCurrentTest::Execute()::'lambda'()::operator()() const+71 (0x138BC937) NUnitTest::TTestBase::Run(std::__y1::function, TBasicString> const&, char const*, bool)+126 (0x13B880AE) NTestSuiteYq_1::TCurrentTest::Execute()+415 (0x138BC24F) NUnitTest::TTestFactory::Execute()+803 (0x13B88823) NUnitTest::RunMain(int, char**)+3021 (0x13B9A3CD) ??+0 (0x7F71AE25AD90) __libc_start_main+128 (0x7F71AE25AE40) _start+41 (0x129E1029) >> BackupRestoreS3::RestoreTablePartitioningSettings >> BackupPathTest::RecursiveDirectoryPlusExplicitTable [GOOD] >> TSequence::AlterSequence [GOOD] >> TSequence::AlterTableSetDefaultFromSequence >> TSequence::CreateTableWithDefaultFromSequenceAndIndex [GOOD] >> TTopicYqlTest::BadRequests [FAIL] >> TDSProxyGetTest::TestBlock42GetIntervalsWipedAllOk [GOOD] >> TDSProxyPatchTest::SecuredErrorOnPut_ErasureNone >> TSchemeShardLoginTest::UserLogin >> BackupPathTest::ParallelBackupWholeDatabase >> TWebLoginService::AuditLogEmptySIDsLoginSuccess >> TDSProxyPatchTest::SecuredErrorOnPut_ErasureNone [GOOD] >> TDSProxyPatchTest::NaiveErrorOnGetItem_Erasure4Plus2Block ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_sequence/unittest >> TSequence::CreateTableWithDefaultFromSequenceAndIndex [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:116:2058] recipient: [1:111:2141] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:116:2058] recipient: [1:111:2141] Leader for TabletID 72057594046678944 is [1:128:2152] sender: [1:132:2058] recipient: [1:111:2141] 2025-05-29T15:28:49.337507Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7488: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-05-29T15:28:49.337533Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7516: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:28:49.337539Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7402: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-05-29T15:28:49.337544Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7418: OperationsProcessing config: using default configuration 2025-05-29T15:28:49.337550Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-05-29T15:28:49.337553Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-05-29T15:28:49.337562Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7548: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:28:49.337575Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:39: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-05-29T15:28:49.337684Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7619: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-05-29T15:28:49.337764Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-05-29T15:28:49.349348Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7309: Cannot subscribe to console configs 2025-05-29T15:28:49.349369Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:28:49.351679Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-05-29T15:28:49.351817Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-05-29T15:28:49.351860Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-05-29T15:28:49.353667Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-05-29T15:28:49.353847Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:445: Clear TempDirsState with owners number: 0 2025-05-29T15:28:49.353949Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1348: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-05-29T15:28:49.354006Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:32: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-05-29T15:28:49.354487Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:157: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:28:49.354526Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:84: [RootDataErasureManager] Stop 2025-05-29T15:28:49.354758Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:28:49.354772Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:28:49.354795Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-05-29T15:28:49.354804Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-29T15:28:49.354811Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:202: TTxServerlessStorageBilling.Complete 2025-05-29T15:28:49.354847Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6671: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-05-29T15:28:49.356060Z node 1 :HIVE INFO: tablet_helpers.cpp:1130: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:128:2152] sender: [1:242:2058] recipient: [1:15:2062] 2025-05-29T15:28:49.372931Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:372: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-05-29T15:28:49.373015Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:28:49.373079Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-05-29T15:28:49.373120Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:130: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-05-29T15:28:49.373130Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:28:49.374155Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:451: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-05-29T15:28:49.374200Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-05-29T15:28:49.374285Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:28:49.374308Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:313: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-05-29T15:28:49.374316Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:367: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-05-29T15:28:49.374322Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 2 -> 3 2025-05-29T15:28:49.375035Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:28:49.375053Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-05-29T15:28:49.375061Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 3 -> 128 2025-05-29T15:28:49.375506Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:28:49.375518Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:28:49.375525Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:28:49.375533Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1650: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-05-29T15:28:49.376312Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1719: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-05-29T15:28:49.376795Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:652: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-05-29T15:28:49.376838Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1751: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-05-29T15:28:49.376996Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:676: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-05-29T15:28:49.377017Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:680: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 124 RawX2: 4294969445 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-05-29T15:28:49.377032Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:28:49.377083Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 128 -> 240 2025-05-29T15:28:49.377090Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:28:49.377128Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-05-29T15:28:49.377142Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:402: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-05-29T15:28:49.377604Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:28:49.377615Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-29T15:28:49.377666Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29 ... INFO: schemeshard__operation_side_effects.cpp:906: Part operation is done id#102:2 progress is 3/4 2025-05-29T15:28:52.243047Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 102 ready parts: 3/4 2025-05-29T15:28:52.243051Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1606: TOperation IsReadyToNotify, TxId: 102, ready parts: 3/4, is published: true 2025-05-29T15:28:52.243161Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:207: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2025-05-29T15:28:52.243167Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:275: Activate send for 102:0 2025-05-29T15:28:52.243177Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:630: Send to actor: [7:342:2319] msg type: 269552132 msg: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 102 at schemeshard: 72057594046678944 2025-05-29T15:28:52.243226Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4877: StateWork, received event# 2146435072, Sender [7:129:2153], Recipient [7:129:2153]: NKikimr::NSchemeShard::TEvPrivate::TEvProgressOperation 2025-05-29T15:28:52.243231Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4894: StateWork, processing event TEvPrivate::TEvProgressOperation 2025-05-29T15:28:52.243236Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2025-05-29T15:28:52.243240Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_drop_table.cpp:414: TDropTable TProposedDeletePart operationId: 102:0 ProgressState, at schemeshard: 72057594046678944 2025-05-29T15:28:52.243272Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove table for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 5 2025-05-29T15:28:52.243287Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:165: TSideEffects ApplyOnExecute at tablet# 72057594046678944 2025-05-29T15:28:52.243291Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:906: Part operation is done id#102:0 progress is 4/4 2025-05-29T15:28:52.243294Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 102 ready parts: 4/4 2025-05-29T15:28:52.243299Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:906: Part operation is done id#102:0 progress is 4/4 2025-05-29T15:28:52.243302Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 102 ready parts: 4/4 2025-05-29T15:28:52.243306Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1606: TOperation IsReadyToNotify, TxId: 102, ready parts: 4/4, is published: true 2025-05-29T15:28:52.243315Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1629: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [7:418:2375] message: TxId: 102 2025-05-29T15:28:52.243320Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 102 ready parts: 4/4 2025-05-29T15:28:52.243329Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:973: Operation and all the parts is done, operation id: 102:0 2025-05-29T15:28:52.243333Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5174: RemoveTx for txid 102:0 2025-05-29T15:28:52.243350Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 2025-05-29T15:28:52.243355Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:973: Operation and all the parts is done, operation id: 102:1 2025-05-29T15:28:52.243358Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5174: RemoveTx for txid 102:1 2025-05-29T15:28:52.243364Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 2 2025-05-29T15:28:52.243367Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:973: Operation and all the parts is done, operation id: 102:2 2025-05-29T15:28:52.243370Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5174: RemoveTx for txid 102:2 2025-05-29T15:28:52.243377Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 2 2025-05-29T15:28:52.243382Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:973: Operation and all the parts is done, operation id: 102:3 2025-05-29T15:28:52.243385Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5174: RemoveTx for txid 102:3 2025-05-29T15:28:52.243393Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 1 2025-05-29T15:28:52.243501Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4877: StateWork, received event# 2146435084, Sender [7:129:2153], Recipient [7:129:2153]: NKikimr::NSchemeShard::TEvPrivate::TEvCleanDroppedPaths 2025-05-29T15:28:52.243507Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5049: StateWork, processing event TEvPrivate::TEvCleanDroppedPaths 2025-05-29T15:28:52.243513Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:123: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-05-29T15:28:52.243518Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:137: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 5], at schemeshard: 72057594046678944 2025-05-29T15:28:52.243527Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2025-05-29T15:28:52.243826Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2025-05-29T15:28:52.243833Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:207: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2025-05-29T15:28:52.243849Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2025-05-29T15:28:52.243853Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:207: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2025-05-29T15:28:52.243863Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2025-05-29T15:28:52.243867Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:207: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2025-05-29T15:28:52.243872Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2025-05-29T15:28:52.243876Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:207: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2025-05-29T15:28:52.243881Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2025-05-29T15:28:52.243884Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:207: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2025-05-29T15:28:52.244334Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2025-05-29T15:28:52.244344Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:207: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2025-05-29T15:28:52.244359Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:207: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2025-05-29T15:28:52.244368Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:207: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2025-05-29T15:28:52.244382Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:630: Send to actor: [7:418:2375] msg type: 271124998 msg: NKikimrScheme.TEvNotifyTxCompletionResult TxId: 102 at schemeshard: 72057594046678944 2025-05-29T15:28:52.244407Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:227: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2025-05-29T15:28:52.244413Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:236: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [7:524:2474] 2025-05-29T15:28:52.244431Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:160: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 2025-05-29T15:28:52.244466Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4877: StateWork, received event# 269877764, Sender [7:526:2476], Recipient [7:129:2153]: NKikimr::TEvTabletPipe::TEvServerDisconnected 2025-05-29T15:28:52.244471Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4975: StateWork, processing event TEvTabletPipe::TEvServerDisconnected 2025-05-29T15:28:52.244475Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5801: Server pipe is reset, at schemeshard: 72057594046678944 TestWaitNotification: OK eventTxId 102 2025-05-29T15:28:52.244559Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4877: StateWork, received event# 271122945, Sender [7:599:2549], Recipient [7:129:2153]: NKikimrSchemeOp.TDescribePath Path: "/MyRoot/Table" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false } 2025-05-29T15:28:52.244565Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4889: StateWork, processing event TEvSchemeShard::TEvDescribeScheme 2025-05-29T15:28:52.244578Z node 7 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Table" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-05-29T15:28:52.244615Z node 7 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/Table" took 32us result status StatusPathDoesNotExist 2025-05-29T15:28:52.244648Z node 7 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/Table\', error: path has been deleted (id: [OwnerId: 72057594046678944, LocalPathId: 2], type: EPathTypeTable, state: EPathStateNotExist), drop stepId: 5000003, drop txId: 102, source_location: ydb/core/tx/schemeshard/schemeshard_path_describer.cpp:1157" Path: "/MyRoot/Table" PathId: 2 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: false } } PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> TWebLoginService::AuditLogEmptySIDsLoginSuccess [GOOD] >> TWebLoginService::AuditLogLdapLoginBadPassword >> TSequence::AlterTableSetDefaultFromSequence [GOOD] >> TDSProxyPatchTest::NaiveErrorOnGetItem_Erasure4Plus2Block [GOOD] >> TSchemeShardLoginTest::BanUnbanUser >> TDSProxyPutTest::TestMirror3dcPutStatusOkWith_2_1_0_VdiskErrors >> TSchemeShardLoginTest::AddAccess_NonExisting-StrictAclCheck-false ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/effects/unittest >> KqpWrite::CastValues Test command err: Trying to start YDB, gRPC: 10164, MsgBus: 11157 2025-05-29T15:28:43.395199Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509890002512324223:2069];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:28:43.395225Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/001ea9/r3tmp/tmpOQhqx5/pdisk_1.dat 2025-05-29T15:28:43.465292Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 10164, node 1 2025-05-29T15:28:43.486931Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:28:43.486946Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-05-29T15:28:43.486947Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-05-29T15:28:43.486989Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-05-29T15:28:43.496888Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:28:43.496928Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:28:43.497830Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:11157 TClient is connected to server localhost:11157 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-29T15:28:43.540918Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:28:43.543340Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:28:43.546288Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-05-29T15:28:43.566322Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:28:43.585003Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:28:43.594894Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:28:43.742622Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890002512325820:2403], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:28:43.742655Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:28:43.782595Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-05-29T15:28:43.788936Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-05-29T15:28:43.794841Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-05-29T15:28:43.849600Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-05-29T15:28:43.859099Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-05-29T15:28:43.872993Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-05-29T15:28:43.886348Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-05-29T15:28:43.902733Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890002512326474:2466], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:28:43.902766Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890002512326479:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:28:43.902770Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:28:43.903530Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715669:3, at schemeshard: 72057594046644480 2025-05-29T15:28:43.906578Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7509890002512326481:2470], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715669 completed, doublechecking } 2025-05-29T15:28:43.993156Z node 1 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [1:7509890002512326532:3398] txid# 281474976715670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-29T15:28:44.078951Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [1:7509890002512326548:2474], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:28:44.081297Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=1&id=OTc3MzkxMmUtYmI0ODMxMTItYTA2YWExZTgtZWEyYmVhZGU=, ActorId: [1:7509890002512325817:2401], ActorState: ExecuteState, TraceId: 01jweahwvy3q84c1rhrn2x3ymp, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: VERIFY failed (2025-05-29T15:28:44.082382Z): assertion failed in non-unittest thread with message: assertion failed at ydb/core/kqp/ut/common/kqp_ut_common.h:375, void NKikimr::NKqp::AssertSuccessResult(const NYdb::TStatus &): (result.IsSuccess())
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 library/cpp/testing/unittest/registar.cpp:36 RaiseError(): requirement UnittestThread failed 0. /-S/util/system/yassert.cpp:83: InternalPanicImpl @ 0x15F23B95 1. /-S/util/system/yassert.cpp:55: Panic @ 0x15F1AB96 2. /tmp//-S/library/cpp/testing/unittest/registar.cpp:36: RaiseError @ 0x160BC066 3. /-S/ydb/core/kqp/ut/common/kqp_ut_common.h:375: AssertSuccessResult @ 0x15D88C82 4. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:365: CreateSampleTables @ 0x2855B9A2 5. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:581: operator() @ 0x2857CF4C 6. /-S/library/cpp/threading/future/core/future-inl.h:493: SetValue @ 0x2857CF4C 7. /-S/library/cpp/threading/future/async.h:24: operator() @ 0x2857CF4C 8. /-S/util/thread/pool.h:71: Process @ 0x2857CF4C 9. /-S/util/thread/pool.cpp:418: DoExecute @ 0x15F2B519 10. /-S/util/thread/factory.h:15: Execute @ 0x15F29F09 11. /-S/util/thread/factory.cpp:36: ThreadProc @ 0x15F29F09 12. /-S/util/system/thread.cpp:244: ThreadProxy @ 0x15F2537C 13. ??:0: ?? @ 0x7F3956F8AAC2 14. ??:0: ?? @ 0x7F395701C84F Trying to start YDB, gRPC: 8799, MsgBus: 13527 2025-05-29T15:28:48.099548Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509890027517316001:2075];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:28:48.099783Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/001ea9/r3tmp/tmp5ws7Oe/pdisk_1.dat 2025-05-29T15:28:48.152667Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 8799, node 1 2025-05-29T15:28:48.168456Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:28:48.168472Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-05-29T15:28:48.168474Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-05-29T15:28:48.168528Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:13527 2025-05-29T15:28:48.200994Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:28:48.201023Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:28:48.202266Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:13527 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-29T15:28:48.229228Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:28:48.239654Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:28:48.300623Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:28:48.318245Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:28:48.328234Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:28:48.489430Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890027517317589:2403], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:28:48.489464Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:28:48.535856Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-05-29T15:28:48.543873Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-05-29T15:28:48.555397Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-05-29T15:28:48.569715Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-05-29T15:28:48.583274Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-05-29T15:28:48.591087Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-05-29T15:28:48.605028Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-05-29T15:28:48.620943Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890027517318241:2466], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:28:48.620973Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:28:48.620991Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890027517318246:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:28:48.621877Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715669:3, at schemeshard: 72057594046644480 2025-05-29T15:28:48.624505Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7509890027517318248:2470], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715669 completed, doublechecking } 2025-05-29T15:28:48.692370Z node 1 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [1:7509890027517318299:3396] txid# 281474976715670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-29T15:28:48.783927Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [1:7509890027517318315:2474], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:28:48.784075Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=1&id=MjhiYTBhYmMtNzY1OWQxYmItNzZiY2UxZTgtMWQ1NjkxOWM=, ActorId: [1:7509890027517317571:2401], ActorState: ExecuteState, TraceId: 01jweaj1fcbdrvtrjvn84qa700, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: VERIFY failed (2025-05-29T15:28:48.784834Z): assertion failed in non-unittest thread with message: assertion failed at ydb/core/kqp/ut/common/kqp_ut_common.h:375, void NKikimr::NKqp::AssertSuccessResult(const NYdb::TStatus &): (result.IsSuccess())
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 library/cpp/testing/unittest/registar.cpp:36 RaiseError(): requirement UnittestThread failed 0. /-S/util/system/yassert.cpp:83: InternalPanicImpl @ 0x15F23B95 1. /-S/util/system/yassert.cpp:55: Panic @ 0x15F1AB96 2. /tmp//-S/library/cpp/testing/unittest/registar.cpp:36: RaiseError @ 0x160BC066 3. /-S/ydb/core/kqp/ut/common/kqp_ut_common.h:375: AssertSuccessResult @ 0x15D88C82 4. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:365: CreateSampleTables @ 0x2855B9A2 5. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:581: operator() @ 0x2857CF4C 6. /-S/library/cpp/threading/future/core/future-inl.h:493: SetValue @ 0x2857CF4C 7. /-S/library/cpp/threading/future/async.h:24: operator() @ 0x2857CF4C 8. /-S/util/thread/pool.h:71: Process @ 0x2857CF4C 9. /-S/util/thread/pool.cpp:418: DoExecute @ 0x15F2B519 10. /-S/util/thread/factory.h:15: Execute @ 0x15F29F09 11. /-S/util/thread/factory.cpp:36: ThreadProc @ 0x15F29F09 12. /-S/util/system/thread.cpp:244: ThreadProxy @ 0x15F2537C 13. ??:0: ?? @ 0x7F4FF96E3AC2 14. ??:0: ?? @ 0x7F4FF977584F >> TSchemeShardLoginTest::UserLogin [GOOD] >> TSchemeShardLoginTest::UserStayLockedOutIfEnterValidPassword >> TDSProxyPutTest::TestMirror3dcPutStatusOkWith_2_1_0_VdiskErrors [GOOD] >> TWebLoginService::AuditLogLdapLoginBadPassword [GOOD] >> TWebLoginService::AuditLogLdapLoginBadUser >> BackupPathTest::ParallelBackupWholeDatabase [FAIL] >> KqpImmediateEffects::MultiShardUpsertAfterRead >> TSchemeShardLoginTest::UserStayLockedOutIfEnterValidPassword [GOOD] >> TWebLoginService::AuditLogAdminLoginSuccess ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_sequence/unittest >> TSequence::AlterTableSetDefaultFromSequence [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:116:2058] recipient: [1:111:2141] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:116:2058] recipient: [1:111:2141] Leader for TabletID 72057594046678944 is [1:128:2152] sender: [1:132:2058] recipient: [1:111:2141] 2025-05-29T15:28:49.715759Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7488: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-05-29T15:28:49.715789Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7516: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:28:49.715795Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7402: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-05-29T15:28:49.715800Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7418: OperationsProcessing config: using default configuration 2025-05-29T15:28:49.715806Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-05-29T15:28:49.715810Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-05-29T15:28:49.715818Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7548: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:28:49.715831Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:39: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-05-29T15:28:49.715947Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7619: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-05-29T15:28:49.716029Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-05-29T15:28:49.727859Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7309: Cannot subscribe to console configs 2025-05-29T15:28:49.727884Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:28:49.730233Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-05-29T15:28:49.730335Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-05-29T15:28:49.730375Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-05-29T15:28:49.734824Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-05-29T15:28:49.735035Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:445: Clear TempDirsState with owners number: 0 2025-05-29T15:28:49.735177Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1348: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-05-29T15:28:49.735255Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:32: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-05-29T15:28:49.735907Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:157: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:28:49.735954Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:84: [RootDataErasureManager] Stop 2025-05-29T15:28:49.736223Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:28:49.736235Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:28:49.736256Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-05-29T15:28:49.736264Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-29T15:28:49.736271Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:202: TTxServerlessStorageBilling.Complete 2025-05-29T15:28:49.736306Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6671: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-05-29T15:28:49.737503Z node 1 :HIVE INFO: tablet_helpers.cpp:1130: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:128:2152] sender: [1:242:2058] recipient: [1:15:2062] 2025-05-29T15:28:49.753716Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:372: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-05-29T15:28:49.753789Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:28:49.753844Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-05-29T15:28:49.753881Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:130: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-05-29T15:28:49.753888Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:28:49.754690Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:451: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-05-29T15:28:49.754713Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-05-29T15:28:49.754778Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:28:49.754796Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:313: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-05-29T15:28:49.754802Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:367: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-05-29T15:28:49.754807Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 2 -> 3 2025-05-29T15:28:49.755237Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:28:49.755249Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-05-29T15:28:49.755254Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 3 -> 128 2025-05-29T15:28:49.755579Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:28:49.755591Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:28:49.755596Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:28:49.755603Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1650: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-05-29T15:28:49.756210Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1719: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-05-29T15:28:49.756587Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:652: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-05-29T15:28:49.756623Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1751: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-05-29T15:28:49.756802Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:676: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-05-29T15:28:49.756826Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:680: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 124 RawX2: 4294969445 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-05-29T15:28:49.756841Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:28:49.756902Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 128 -> 240 2025-05-29T15:28:49.756909Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:28:49.756941Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-05-29T15:28:49.756953Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:402: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-05-29T15:28:49.757466Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:28:49.757476Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-29T15:28:49.757525Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29 ... CHEME Origin: 72075186233409549 Status: COMPLETE TxId: 114 Step: 5000014 OrderId: 114 ExecLatency: 0 ProposeLatency: 2 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409549 CpuTimeUsec: 391 } } CommitVersion { Step: 5000014 TxId: 114 } 2025-05-29T15:28:53.057918Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:165: TSideEffects ApplyOnExecute at tablet# 72057594046678944 2025-05-29T15:28:53.058229Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4877: StateWork, received event# 269877761, Sender [7:1050:2986], Recipient [7:136:2157]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-05-29T15:28:53.058240Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4974: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-05-29T15:28:53.058245Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5753: Pipe server connected, at tablet: 72057594046678944 2025-05-29T15:28:53.058277Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4877: StateWork, received event# 269551620, Sender [7:992:2936], Recipient [7:136:2157]: NKikimrTxDataShard.TEvSchemaChanged Source { RawX1: 992 RawX2: 30064774008 } Origin: 72075186233409549 State: 2 TxId: 114 Step: 0 Generation: 2 2025-05-29T15:28:53.058283Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4909: StateWork, processing event TEvDataShard::TEvSchemaChanged 2025-05-29T15:28:53.058293Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5512: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 992 RawX2: 30064774008 } Origin: 72075186233409549 State: 2 TxId: 114 Step: 0 Generation: 2 2025-05-29T15:28:53.058300Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1764: TOperation FindRelatedPartByTabletId, TxId: 114, tablet: 72075186233409549, partId: 0 2025-05-29T15:28:53.058318Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:619: TTxOperationReply execute, operationId: 114:0, at schemeshard: 72057594046678944, message: Source { RawX1: 992 RawX2: 30064774008 } Origin: 72075186233409549 State: 2 TxId: 114 Step: 0 Generation: 2 2025-05-29T15:28:53.058325Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:1005: NTableState::TProposedWaitParts operationId# 114:0 HandleReply TEvSchemaChanged at tablet: 72057594046678944 2025-05-29T15:28:53.058333Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:1009: NTableState::TProposedWaitParts operationId# 114:0 HandleReply TEvSchemaChanged at tablet: 72057594046678944 message: Source { RawX1: 992 RawX2: 30064774008 } Origin: 72075186233409549 State: 2 TxId: 114 Step: 0 Generation: 2 2025-05-29T15:28:53.058345Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:655: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 114:0, shardIdx: 72057594046678944:4, datashard: 72075186233409549, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2025-05-29T15:28:53.058349Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:674: all shard schema changes has been received, operationId: 114:0, at schemeshard: 72057594046678944 2025-05-29T15:28:53.058354Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:686: send schema changes ack message, operation: 114:0, datashard: 72075186233409549, at schemeshard: 72057594046678944 2025-05-29T15:28:53.058360Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 114:0 129 -> 240 2025-05-29T15:28:53.058386Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:165: TSideEffects ApplyOnExecute at tablet# 72057594046678944 2025-05-29T15:28:53.058504Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:207: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2025-05-29T15:28:53.058528Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 114 2025-05-29T15:28:53.058532Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:207: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2025-05-29T15:28:53.059226Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 114 2025-05-29T15:28:53.059239Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:207: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2025-05-29T15:28:53.059498Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:647: TTxOperationReply complete, operationId: 114:0, at schemeshard: 72057594046678944 2025-05-29T15:28:53.059506Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:207: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2025-05-29T15:28:53.059545Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:647: TTxOperationReply complete, operationId: 114:0, at schemeshard: 72057594046678944 2025-05-29T15:28:53.059549Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:207: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2025-05-29T15:28:53.059554Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:275: Activate send for 114:0 2025-05-29T15:28:53.059577Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:630: Send to actor: [7:992:2936] msg type: 269552132 msg: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 114 at schemeshard: 72057594046678944 2025-05-29T15:28:53.059652Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4877: StateWork, received event# 2146435072, Sender [7:136:2157], Recipient [7:136:2157]: NKikimr::NSchemeShard::TEvPrivate::TEvProgressOperation 2025-05-29T15:28:53.059659Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4894: StateWork, processing event TEvPrivate::TEvProgressOperation 2025-05-29T15:28:53.059666Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 114:0, at schemeshard: 72057594046678944 2025-05-29T15:28:53.059684Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:482: [72057594046678944] TDone opId# 114:0 ProgressState 2025-05-29T15:28:53.059697Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:165: TSideEffects ApplyOnExecute at tablet# 72057594046678944 2025-05-29T15:28:53.059702Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:906: Part operation is done id#114:0 progress is 1/1 2025-05-29T15:28:53.059708Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 114 ready parts: 1/1 2025-05-29T15:28:53.059714Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:906: Part operation is done id#114:0 progress is 1/1 2025-05-29T15:28:53.059717Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 114 ready parts: 1/1 2025-05-29T15:28:53.059723Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1606: TOperation IsReadyToNotify, TxId: 114, ready parts: 1/1, is published: true 2025-05-29T15:28:53.059735Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1629: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [7:396:2363] message: TxId: 114 2025-05-29T15:28:53.059743Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 114 ready parts: 1/1 2025-05-29T15:28:53.059749Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:973: Operation and all the parts is done, operation id: 114:0 2025-05-29T15:28:53.059755Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5174: RemoveTx for txid 114:0 2025-05-29T15:28:53.059785Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 6] was 3 2025-05-29T15:28:53.060653Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:207: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2025-05-29T15:28:53.060675Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:630: Send to actor: [7:396:2363] msg type: 271124998 msg: NKikimrScheme.TEvNotifyTxCompletionResult TxId: 114 at schemeshard: 72057594046678944 2025-05-29T15:28:53.060725Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:227: tests -- TTxNotificationSubscriber for txId 114: got EvNotifyTxCompletionResult 2025-05-29T15:28:53.060732Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:236: tests -- TTxNotificationSubscriber for txId 114: satisfy waiter [7:1018:2954] 2025-05-29T15:28:53.060787Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4877: StateWork, received event# 269877764, Sender [7:1020:2956], Recipient [7:136:2157]: NKikimr::TEvTabletPipe::TEvServerDisconnected 2025-05-29T15:28:53.060794Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4975: StateWork, processing event TEvTabletPipe::TEvServerDisconnected 2025-05-29T15:28:53.060799Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5801: Server pipe is reset, at schemeshard: 72057594046678944 TestWaitNotification: OK eventTxId 114 TestModificationResults wait txId: 115 2025-05-29T15:28:53.061041Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4877: StateWork, received event# 271122432, Sender [7:1059:2995], Recipient [7:136:2157]: {TEvModifySchemeTransaction txid# 115 TabletId# 72057594046678944} 2025-05-29T15:28:53.061047Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4888: StateWork, processing event TEvSchemeShard::TEvModifySchemeTransaction 2025-05-29T15:28:53.061853Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:372: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpAlterTable AlterTable { Name: "Table3" Columns { Name: "value" DefaultFromSequence: "/MyRoot/seq1" } } } TxId: 115 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-05-29T15:28:53.061912Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_table.cpp:508: TAlterTable Propose, path: /MyRoot/Table3, pathId: , opId: 115:0, at schemeshard: 72057594046678944 2025-05-29T15:28:53.062003Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:130: IgniteOperation, opId: 115:1, propose status:StatusInvalidParameter, reason: Column 'value' is of type Bool but default expression is of type Int64, at schemeshard: 72057594046678944 2025-05-29T15:28:53.062062Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:165: TSideEffects ApplyOnExecute at tablet# 72057594046678944 2025-05-29T15:28:53.063320Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:451: TTxOperationPropose Complete, txId: 115, response: Status: StatusInvalidParameter Reason: "Column \'value\' is of type Bool but default expression is of type Int64" TxId: 115 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2025-05-29T15:28:53.063364Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 115, database: /MyRoot, subject: , status: StatusInvalidParameter, reason: Column 'value' is of type Bool but default expression is of type Int64, operation: ALTER TABLE, path: /MyRoot/Table3 2025-05-29T15:28:53.063371Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:207: TSideEffects ApplyOnComplete at tablet# 72057594046678944 TestModificationResult got TxId: 115, wait until txId: 115 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/persqueue_v1/ut/unittest >> TPersQueueTest::BadSids [FAIL] Test command err: 2025-05-29T15:28:36.340769Z node 1 :PERSQUEUE NOTICE: pq_impl.cpp:1099: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-05-29T15:28:36.340796Z node 1 :PERSQUEUE INFO: pq_impl.cpp:800: [PQ: 72057594037927937] doesn't have tx writes info 2025-05-29T15:28:36.587324Z node 2 :PERSQUEUE NOTICE: pq_impl.cpp:1099: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-05-29T15:28:36.587352Z node 2 :PERSQUEUE INFO: pq_impl.cpp:800: [PQ: 72057594037927937] doesn't have tx writes info === Server->StartServer(false); 2025-05-29T15:28:36.861994Z node 3 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7509889972288136583:2142];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:28:36.862029Z node 3 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-05-29T15:28:36.864847Z node 4 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7509889975529321581:2076];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:28:36.864880Z node 4 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/001ec4/r3tmp/tmp1ct2S0/pdisk_1.dat 2025-05-29T15:28:36.903623Z node 3 :PQ_READ_PROXY DEBUG: caching_service.cpp:44: Direct read cache: : Created 2025-05-29T15:28:36.912573Z node 4 :PQ_READ_PROXY DEBUG: caching_service.cpp:44: Direct read cache: : Created 2025-05-29T15:28:36.937148Z node 3 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 3679, node 3 2025-05-29T15:28:36.948178Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/ciyv/001ec4/r3tmp/yandexLcgaqe.tmp 2025-05-29T15:28:36.948204Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: /home/runner/.ya/build/build_root/ciyv/001ec4/r3tmp/yandexLcgaqe.tmp 2025-05-29T15:28:36.948283Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:202: successfully initialized from file: /home/runner/.ya/build/build_root/ciyv/001ec4/r3tmp/yandexLcgaqe.tmp 2025-05-29T15:28:36.948325Z node 3 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-05-29T15:28:36.952503Z INFO: TTestServer started on Port 5685 GrpcPort 3679 2025-05-29T15:28:36.962147Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:28:36.962182Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting TClient is connected to server localhost:5685 PQClient connected to localhost:3679 === TenantModeEnabled() = 0 === Init PQ - start server on port 3679 WaitRootIsUp 'Root'... TClient::Ls request: Root 2025-05-29T15:28:36.963656Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-29T15:28:37.003245Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:28:37.003270Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:28:37.004160Z node 3 :HIVE WARN: hive_impl.cpp:771: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 4 Cookie 4 2025-05-29T15:28:37.004378Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-29T15:28:37.012216Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:372: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "Root" StoragePools { Name: "/Root:test" Kind: "test" } } } TxId: 281474976720657 TabletId: 72057594046644480 PeerName: "" , at schemeshard: 72057594046644480 2025-05-29T15:28:37.012274Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //Root, opId: 281474976720657:0, at schemeshard: 72057594046644480 2025-05-29T15:28:37.012321Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 0 2025-05-29T15:28:37.012379Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:130: IgniteOperation, opId: 281474976720657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-05-29T15:28:37.012396Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976720657:0, at schemeshard: 72057594046644480 2025-05-29T15:28:37.013052Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:451: TTxOperationPropose Complete, txId: 281474976720657, response: Status: StatusAccepted TxId: 281474976720657 SchemeshardId: 72057594046644480 PathId: 1, at schemeshard: 72057594046644480 2025-05-29T15:28:37.013079Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 281474976720657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2025-05-29T15:28:37.013126Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 281474976720657:0, at schemeshard: 72057594046644480 2025-05-29T15:28:37.013141Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:313: TCreateParts opId# 281474976720657:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046644480 2025-05-29T15:28:37.013144Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:367: TCreateParts opId# 281474976720657:0 ProgressState no shards to create, do next state 2025-05-29T15:28:37.013148Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 281474976720657:0 2 -> 3 waiting... 2025-05-29T15:28:37.013592Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__notify.cpp:30: NotifyTxCompletion operation in-flight, txId: 281474976720657, at schemeshard: 72057594046644480 2025-05-29T15:28:37.013604Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1606: TOperation IsReadyToNotify, TxId: 281474976720657, ready parts: 0/1, is published: true 2025-05-29T15:28:37.013608Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__notify.cpp:131: NotifyTxCompletion transaction is registered, txId: 281474976720657, at schemeshard: 72057594046644480 2025-05-29T15:28:37.013628Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 281474976720657:0, at schemeshard: 72057594046644480 2025-05-29T15:28:37.013638Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 281474976720657:0 ProgressState, at schemeshard: 72057594046644480 2025-05-29T15:28:37.013642Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 281474976720657:0 3 -> 128 2025-05-29T15:28:37.013989Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 281474976720657:0, at schemeshard: 72057594046644480 2025-05-29T15:28:37.013999Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 281474976720657:0, at schemeshard: 72057594046644480 2025-05-29T15:28:37.014002Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 281474976720657:0, at tablet# 72057594046644480 2025-05-29T15:28:37.014006Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1650: TOperation IsReadyToPropose , TxId: 281474976720657 ready parts: 1/1 2025-05-29T15:28:37.014600Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1719: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046644480 Flags: 2 } ExecLevel: 0 TxId: 281474976720657 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-05-29T15:28:37.014929Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:652: Send tablet strongly msg operationId: 281474976720657:4294967295 from tablet: 72057594046644480 to tablet: 72057594046316545 cookie: 0:281474976720657 msg type: 269090816 2025-05-29T15:28:37.014971Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1751: TOperation RegisterRelationByTabletId, TxId: 281474976720657, partId: 4294967295, tablet: 72057594046316545 2025-05-29T15:28:37.015419Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:676: TTxOperationPlanStep Execute, stepId: 1748532517060, transactions count in step: 1, at schemeshard: 72057594046644480 2025-05-29T15:28:37.015451Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:680: TTxOperationPlanStep Execute, message: Transactions { TxId: 281474976720657 AckTo { RawX1: 0 RawX2: 0 } } Step: 1748532517060 MediatorID: 72057594046382081 TabletID: 72057594046644480, at schemeshard: 72057594046644480 2025-05-29T15:28:37.015461Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976720657:0, at tablet# 72057594046644480 2025-05-29T15:28:37.015525Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 281474976720657:0 128 -> 240 2025-05-29T15:28:37.015535Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976720657:0, at tablet# 72057594046644480 2025-05-29T15:28:37.015566Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 1 2025-05-29T15:28:37.015578Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:402: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046644480, LocalPathId: 1], at schemeshard: 72057594046644480 2025-05-29T15:28:37.015961Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp ... 7.904909Z node 21 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-05-29T15:28:47.908326Z INFO: TTestServer started on Port 19942 GrpcPort 16686 TClient is connected to server localhost:19942 PQClient connected to localhost:16686 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-29T15:28:47.957278Z node 21 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(21, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:28:47.957312Z node 21 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(21, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:28:47.961922Z node 21 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(21, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-29T15:28:47.962296Z node 21 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976720657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-05-29T15:28:47.982856Z node 21 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720659:0, at schemeshard: 72057594046644480 waiting... waiting... waiting... 2025-05-29T15:28:48.261334Z node 22 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [22:7509890027774182793:2309], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:28:48.261357Z node 22 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [22:7509890027774182769:2306], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:28:48.261378Z node 22 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:28:48.262479Z node 21 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715657:3, at schemeshard: 72057594046644480 2025-05-29T15:28:48.266427Z node 22 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [22:7509890027774182797:2310], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715657 completed, doublechecking } 2025-05-29T15:28:48.278250Z node 21 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [21:7509890024055140215:2345], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-05-29T15:28:48.278674Z node 21 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=21&id=YjhiOTg1ODAtZTJiNDY0ODItZGI3MTBhY2UtYTRiODg1YTY=, ActorId: [21:7509890024055140190:2338], ActorState: ExecuteState, TraceId: 01jweaj14jbzdah9g7djryz5yk, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-05-29T15:28:48.278817Z node 21 :PERSQUEUE_CLUSTER_TRACKER ERROR: cluster_tracker.cpp:167: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-05-29T15:28:48.279873Z node 21 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720662:0, at schemeshard: 72057594046644480 2025-05-29T15:28:48.320465Z node 22 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [22:7509890027774182866:2153] txid# 281474976715658, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 9], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-29T15:28:48.324748Z node 22 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [22:7509890027774182881:2317], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-05-29T15:28:48.324829Z node 22 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=22&id=NGQwM2E4NDUtOTg4NDgxMjctOWQ4MGNmZDItODA1NGY2ZA==, ActorId: [22:7509890027774182766:2305], ActorState: ExecuteState, TraceId: 01jweaj1456d1xx0d1tdctcs0k, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-05-29T15:28:48.324968Z node 22 :PERSQUEUE_CLUSTER_TRACKER ERROR: cluster_tracker.cpp:167: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-05-29T15:28:48.341019Z node 21 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720663:0, at schemeshard: 72057594046644480 2025-05-29T15:28:48.360249Z node 21 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720664:0, at schemeshard: 72057594046644480 === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost", true, true, 1000), ("dc2", "dc2.logbroker.yandex.net", false, true, 1000); 2025-05-29T15:28:48.387191Z node 21 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [21:7509890024055140556:2376], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:28:48.387296Z node 21 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=21&id=NGMzMGU2NTMtNTA2ZDFlOWQtNGYwZjM4NjgtODQxYjVhMjI=, ActorId: [21:7509890024055140553:2374], ActorState: ExecuteState, TraceId: 01jweaj17sbth6q7d8rjykfmgp, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: assertion failed at ydb/core/testlib/test_pq_client.h:537, TMaybe NKikimr::NPersQueueTests::TFlatMsgBusPQClient::RunYqlDataQueryWithParams(TString, const NYdb::TParams &): (result.IsSuccess())
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 TBackTrace::Capture()+28 (0x13B9173C) NUnitTest::NPrivate::RaiseError(char const*, TBasicString> const&, bool)+137 (0x13D44119) NKikimr::NPersQueueTests::TFlatMsgBusPQClient::RunYqlDataQueryWithParams(TBasicString>, NYdb::Dev::TParams const&)+932 (0x139C81C4) NKikimr::NPersQueueTests::TFlatMsgBusPQClient::RunYqlDataQuery(TBasicString>)+88 (0x139F6148) NKikimr::NPersQueueTests::TFlatMsgBusPQClient::InitDCs(THashMap>, NKikimr::NPersQueueTests::TPQTestClusterInfo, THash>>, TEqualTo>>, std::__y1::allocator>>>, TBasicString> const&)+1170 (0x13995592) NKikimr::NPersQueueTests::TFlatMsgBusPQClient::FullInit(THashMap>, NKikimr::NPersQueueTests::TPQTestClusterInfo, THash>>, TEqualTo>>, std::__y1::allocator>>>, TBasicString> const&, TBasicString> const&)+327 (0x139C2D47) NPersQueue::TTestServer::StartServer(bool, TMaybe>, NMaybe::TPolicyUndefinedExcept>)+888 (0x138D5468) NPersQueue::TTestServer::TTestServer(NKikimr::Tests::TServerSettings const&, bool, TVector> const&, NActors::NLog::EPriority, TMaybe, TDelete>, NMaybe::TPolicyUndefinedExcept>)+1563 (0x138D474B) NPersQueue::TTestServer::TTestServer(bool)+134 (0x138D0986) NKikimr::NPersQueueTests::NTestSuiteTPersQueueTest::TTestCaseBadSids::Execute_(NUnitTest::TTestContext&)+37 (0x138DC4B5) NKikimr::NPersQueueTests::NTestSuiteTPersQueueTest::TCurrentTest::Execute()::'lambda'()::operator()() const+71 (0x139BF897) NUnitTest::TTestBase::Run(std::__y1::function, TBasicString> const&, char const*, bool)+126 (0x13D45FCE) NKikimr::NPersQueueTests::NTestSuiteTPersQueueTest::TCurrentTest::Execute()+436 (0x139BF1B4) NUnitTest::TTestFactory::Execute()+803 (0x13D46743) NUnitTest::RunMain(int, char**)+3021 (0x13D582DD) ??+0 (0x7F4AD2ABFD90) __libc_start_main+128 (0x7F4AD2ABFE40) _start+41 (0x129C9029) ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/dsproxy/ut/unittest >> TDSProxyPutTest::TestMirror3dcPutStatusOkWith_2_1_0_VdiskErrors [GOOD] Test command err: 2025-05-29T15:28:53.109553Z node 3 :BS_PROXY_PUT INFO: dsproxy_put.cpp:645: [7e4afa7ea38a37be] bootstrap ActorId# [3:82:2128] Group# 0 BlobCount# 1 BlobIDs# [[72075186224047637:1:863:1:24576:786:0]] HandleClass# TabletLog Tactic# Default RestartCounter# 0 Marker# BPP13 2025-05-29T15:28:53.109630Z node 3 :BS_PROXY_PUT DEBUG: dsproxy_strategy_base.cpp:324: [7e4afa7ea38a37be] partPlacement record partSituation# ESituation::Unknown to# 0 blob Id# [72075186224047637:1:863:1:24576:786:1] Marker# BPG33 2025-05-29T15:28:53.109639Z node 3 :BS_PROXY_PUT DEBUG: dsproxy_strategy_base.cpp:345: [7e4afa7ea38a37be] Sending missing VPut part# 0 to# 0 blob Id# [72075186224047637:1:863:1:24576:786:1] Marker# BPG32 2025-05-29T15:28:53.109645Z node 3 :BS_PROXY_PUT DEBUG: dsproxy_strategy_base.cpp:324: [7e4afa7ea38a37be] partPlacement record partSituation# ESituation::Unknown to# 1 blob Id# [72075186224047637:1:863:1:24576:786:2] Marker# BPG33 2025-05-29T15:28:53.109650Z node 3 :BS_PROXY_PUT DEBUG: dsproxy_strategy_base.cpp:345: [7e4afa7ea38a37be] Sending missing VPut part# 1 to# 1 blob Id# [72075186224047637:1:863:1:24576:786:2] Marker# BPG32 2025-05-29T15:28:53.109656Z node 3 :BS_PROXY_PUT DEBUG: dsproxy_strategy_base.cpp:324: [7e4afa7ea38a37be] partPlacement record partSituation# ESituation::Unknown to# 2 blob Id# [72075186224047637:1:863:1:24576:786:3] Marker# BPG33 2025-05-29T15:28:53.109661Z node 3 :BS_PROXY_PUT DEBUG: dsproxy_strategy_base.cpp:345: [7e4afa7ea38a37be] Sending missing VPut part# 2 to# 2 blob Id# [72075186224047637:1:863:1:24576:786:3] Marker# BPG32 2025-05-29T15:28:53.114228Z node 3 :BS_PROXY_PUT INFO: dsproxy_put.cpp:260: [7e4afa7ea38a37be] received {EvVPutResult Status# ERROR ID# [72075186224047637:1:863:1:24576:786:2] {MsgQoS MsgId# { SequenceId: 1 MsgId: 0 }}} from# [0:1:0:1:0] Marker# BPP01 2025-05-29T15:28:53.114286Z node 3 :BS_PROXY_PUT DEBUG: dsproxy_strategy_base.cpp:324: [7e4afa7ea38a37be] partPlacement record partSituation# ESituation::Unknown to# 4 blob Id# [72075186224047637:1:863:1:24576:786:2] Marker# BPG33 2025-05-29T15:28:53.114295Z node 3 :BS_PROXY_PUT DEBUG: dsproxy_strategy_base.cpp:345: [7e4afa7ea38a37be] Sending missing VPut part# 1 to# 4 blob Id# [72075186224047637:1:863:1:24576:786:2] Marker# BPG32 2025-05-29T15:28:53.114370Z node 3 :BS_PROXY_PUT INFO: dsproxy_put.cpp:260: [7e4afa7ea38a37be] received {EvVPutResult Status# ERROR ID# [72075186224047637:1:863:1:24576:786:3] {MsgQoS MsgId# { SequenceId: 1 MsgId: 0 }}} from# [0:1:1:1:0] Marker# BPP01 2025-05-29T15:28:53.114378Z node 3 :BS_PROXY_PUT DEBUG: dsproxy_strategy_base.cpp:324: [7e4afa7ea38a37be] partPlacement record partSituation# ESituation::Unknown to# 5 blob Id# [72075186224047637:1:863:1:24576:786:3] Marker# BPG33 2025-05-29T15:28:53.114383Z node 3 :BS_PROXY_PUT DEBUG: dsproxy_strategy_base.cpp:345: [7e4afa7ea38a37be] Sending missing VPut part# 2 to# 5 blob Id# [72075186224047637:1:863:1:24576:786:3] Marker# BPG32 2025-05-29T15:28:53.114406Z node 3 :BS_PROXY_PUT DEBUG: dsproxy_put.cpp:260: [7e4afa7ea38a37be] received {EvVPutResult Status# OK ID# [72075186224047637:1:863:1:24576:786:1] {MsgQoS MsgId# { SequenceId: 1 MsgId: 0 }}} from# [0:1:2:1:0] Marker# BPP01 2025-05-29T15:28:53.114464Z node 3 :BS_PROXY_PUT INFO: dsproxy_put.cpp:260: [7e4afa7ea38a37be] received {EvVPutResult Status# ERROR ID# [72075186224047637:1:863:1:24576:786:2] {MsgQoS MsgId# { SequenceId: 1 MsgId: 0 }}} from# [0:1:0:2:0] Marker# BPP01 2025-05-29T15:28:53.114473Z node 3 :BS_PROXY_PUT DEBUG: dsproxy_strategy_base.cpp:324: [7e4afa7ea38a37be] partPlacement record partSituation# ESituation::Unknown to# 7 blob Id# [72075186224047637:1:863:1:24576:786:2] Marker# BPG33 2025-05-29T15:28:53.114477Z node 3 :BS_PROXY_PUT DEBUG: dsproxy_strategy_base.cpp:345: [7e4afa7ea38a37be] Sending missing VPut part# 1 to# 7 blob Id# [72075186224047637:1:863:1:24576:786:2] Marker# BPG32 2025-05-29T15:28:53.114496Z node 3 :BS_PROXY_PUT DEBUG: dsproxy_put.cpp:260: [7e4afa7ea38a37be] received {EvVPutResult Status# OK ID# [72075186224047637:1:863:1:24576:786:3] {MsgQoS MsgId# { SequenceId: 1 MsgId: 0 }}} from# [0:1:1:2:0] Marker# BPP01 2025-05-29T15:28:53.114526Z node 3 :BS_PROXY_PUT DEBUG: dsproxy_put.cpp:260: [7e4afa7ea38a37be] received {EvVPutResult Status# OK ID# [72075186224047637:1:863:1:24576:786:2] {MsgQoS MsgId# { SequenceId: 1 MsgId: 0 }}} from# [0:1:0:0:0] Marker# BPP01 2025-05-29T15:28:53.114546Z node 3 :BS_PROXY_PUT DEBUG: dsproxy_put_impl.cpp:72: [7e4afa7ea38a37be] Result# TEvPutResult {Id# [72075186224047637:1:863:1:24576:786:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0} GroupId# 0 Marker# BPP12 2025-05-29T15:28:53.114556Z node 3 :BS_PROXY_PUT INFO: dsproxy_put.cpp:486: [7e4afa7ea38a37be] SendReply putResult# TEvPutResult {Id# [72075186224047637:1:863:1:24576:786:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0} ResponsesSent# 0 PutImpl.Blobs.size# 1 Last# true Marker# BPP21 2025-05-29T15:28:53.114613Z node 3 :BS_PROXY_PUT NOTICE: {BPP72@dsproxy_put.cpp:470} Query history GroupId# 0 HandleClass# TabletLog Tactic# Default History# THistory { Entries# [ TEvVPut{ TimestampMs# 0.383 sample PartId# [72075186224047637:1:863:1:24576:786:3] QueryCount# 1 VDiskId# [0:1:1:1:0] NodeId# 3 } TEvVPut{ TimestampMs# 0.384 sample PartId# [72075186224047637:1:863:1:24576:786:1] QueryCount# 1 VDiskId# [0:1:2:1:0] NodeId# 3 } TEvVPut{ TimestampMs# 0.384 sample PartId# [72075186224047637:1:863:1:24576:786:2] QueryCount# 1 VDiskId# [0:1:0:1:0] NodeId# 3 } TEvVPutResult{ TimestampMs# 4.957 VDiskId# [0:1:0:1:0] NodeId# 3 Status# ERROR } TEvVPut{ TimestampMs# 5.013 sample PartId# [72075186224047637:1:863:1:24576:786:2] QueryCount# 1 VDiskId# [0:1:0:2:0] NodeId# 3 } TEvVPutResult{ TimestampMs# 5.068 VDiskId# [0:1:1:1:0] NodeId# 3 Status# ERROR } TEvVPut{ TimestampMs# 5.083 sample PartId# [72075186224047637:1:863:1:24576:786:3] QueryCount# 1 VDiskId# [0:1:1:2:0] NodeId# 3 } TEvVPutResult{ TimestampMs# 5.105 VDiskId# [0:1:2:1:0] NodeId# 3 Status# OK } TEvVPutResult{ TimestampMs# 5.163 VDiskId# [0:1:0:2:0] NodeId# 3 Status# ERROR } TEvVPut{ TimestampMs# 5.178 sample PartId# [72075186224047637:1:863:1:24576:786:2] QueryCount# 1 VDiskId# [0:1:0:0:0] NodeId# 3 } TEvVPutResult{ TimestampMs# 5.196 VDiskId# [0:1:1:2:0] NodeId# 3 Status# OK } TEvVPutResult{ TimestampMs# 5.225 VDiskId# [0:1:0:0:0] NodeId# 3 Status# OK } ] } >> TWebLoginService::AuditLogLdapLoginBadUser [GOOD] >> TWebLoginService::AuditLogLdapLoginBadBind >> TSchemeShardLoginTest::AddAccess_NonExisting-StrictAclCheck-false [GOOD] >> TSchemeShardLoginTest::AddAccess_NonExisting-StrictAclCheck-true >> TSchemeShardLoginTest::BanUnbanUser [GOOD] >> TSchemeShardLoginTest::BanUserWithWaiting >> TSchemeShardLoginTest::AddAccess_NonExisting-StrictAclCheck-true [GOOD] >> TSchemeShardLoginTest::AddAccess_NonYdb-StrictAclCheck-false >> KqpEffects::InsertAbort_Literal_Conflict+UseSink >> TWebLoginService::AuditLogLdapLoginBadBind [GOOD] >> BackupRestoreS3::RestoreTablePartitioningSettings [GOOD] >> BackupRestoreS3::RestoreIndexTablePartitioningSettings >> BackupRestore::RestoreTablePartitioningSettings >> TSchemeShardLoginTest::RemoveUser_NonExisting-StrictAclCheck-false >> TWebLoginService::AuditLogAdminLoginSuccess [GOOD] >> TWebLoginService::AuditLogCreateModifyUser >> TSchemeShardLoginTest::AddAccess_NonYdb-StrictAclCheck-false [GOOD] >> TSchemeShardLoginTest::AddAccess_NonYdb-StrictAclCheck-true |71.5%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/library/yql/providers/generic/actors/ut/ydb-library-yql-providers-generic-actors-ut |71.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/library/yql/providers/generic/actors/ut/ydb-library-yql-providers-generic-actors-ut |71.5%| [LD] {RESULT} $(B)/ydb/library/yql/providers/generic/actors/ut/ydb-library-yql-providers-generic-actors-ut >> KqpImmediateEffects::ConflictingKeyRW1RR2 >> KqpInplaceUpdate::SingleRowStr+UseSink >> TWebLoginService::AuditLogCreateModifyUser [GOOD] >> TSchemeShardLoginTest::RemoveGroup-StrictAclCheck-false >> TSchemeShardLoginTest::AddAccess_NonYdb-StrictAclCheck-true [GOOD] >> TSchemeShardLoginTest::AccountLockoutAndAutomaticallyUnlock >> TSchemeShardLoginTest::RemoveUser_NonExisting-StrictAclCheck-false [GOOD] >> TSchemeShardLoginTest::RemoveUser_NonExisting-StrictAclCheck-true ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_login/unittest >> TWebLoginService::AuditLogLdapLoginBadBind [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:116:2058] recipient: [1:110:2141] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:116:2058] recipient: [1:110:2141] Leader for TabletID 72057594046678944 is [1:127:2152] sender: [1:129:2058] recipient: [1:110:2141] 2025-05-29T15:28:52.964656Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7488: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-05-29T15:28:52.964684Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7516: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:28:52.964690Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7402: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-05-29T15:28:52.964695Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7418: OperationsProcessing config: using default configuration 2025-05-29T15:28:52.964701Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-05-29T15:28:52.964706Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-05-29T15:28:52.964716Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7548: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:28:52.964731Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:39: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-05-29T15:28:52.964831Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7619: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-05-29T15:28:52.964897Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-05-29T15:28:52.981694Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7309: Cannot subscribe to console configs 2025-05-29T15:28:52.981724Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:28:52.985909Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-05-29T15:28:52.985979Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-05-29T15:28:52.986018Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-05-29T15:28:52.987846Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-05-29T15:28:52.987894Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:445: Clear TempDirsState with owners number: 0 2025-05-29T15:28:52.987995Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1348: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-05-29T15:28:52.988057Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:32: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-05-29T15:28:52.988627Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:157: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:28:52.988675Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:84: [RootDataErasureManager] Stop 2025-05-29T15:28:52.988968Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:28:52.988981Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:28:52.988992Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-05-29T15:28:52.989000Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-29T15:28:52.989006Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:202: TTxServerlessStorageBilling.Complete 2025-05-29T15:28:52.989043Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6671: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-05-29T15:28:52.990415Z node 1 :HIVE INFO: tablet_helpers.cpp:1130: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:127:2152] sender: [1:241:2058] recipient: [1:15:2062] 2025-05-29T15:28:53.012456Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:372: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-05-29T15:28:53.012541Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:28:53.012613Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-05-29T15:28:53.012671Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:130: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-05-29T15:28:53.012682Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:28:53.013706Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:451: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-05-29T15:28:53.013756Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-05-29T15:28:53.013826Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:28:53.013838Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:313: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-05-29T15:28:53.013845Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:367: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-05-29T15:28:53.013852Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 2 -> 3 2025-05-29T15:28:53.014350Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:28:53.014363Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-05-29T15:28:53.014370Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 3 -> 128 2025-05-29T15:28:53.014728Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:28:53.014761Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:28:53.014769Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:28:53.014777Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1650: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-05-29T15:28:53.015490Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1719: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-05-29T15:28:53.015929Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:652: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-05-29T15:28:53.015973Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1751: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-05-29T15:28:53.016172Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:676: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-05-29T15:28:53.016198Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:680: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 134 RawX2: 4294969452 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-05-29T15:28:53.016206Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:28:53.016268Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 128 -> 240 2025-05-29T15:28:53.016276Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:28:53.016316Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-05-29T15:28:53.016328Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:402: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-05-29T15:28:53.016777Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:28:53.016787Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-29T15:28:53.016836Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29 ... type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:28:53.718627Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:451: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-05-29T15:28:53.718652Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-05-29T15:28:53.718684Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:28:53.718692Z node 4 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:313: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-05-29T15:28:53.718695Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:367: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-05-29T15:28:53.718698Z node 4 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 2 -> 3 2025-05-29T15:28:53.718969Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:28:53.718978Z node 4 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-05-29T15:28:53.718981Z node 4 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 3 -> 128 2025-05-29T15:28:53.719202Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:28:53.719210Z node 4 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:28:53.719214Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:28:53.719218Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1650: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-05-29T15:28:53.719241Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1719: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-05-29T15:28:53.719429Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:652: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-05-29T15:28:53.719457Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1751: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-05-29T15:28:53.719570Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:676: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-05-29T15:28:53.719583Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:680: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 134 RawX2: 17179871340 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-05-29T15:28:53.719587Z node 4 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:28:53.719628Z node 4 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 128 -> 240 2025-05-29T15:28:53.719632Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:28:53.719657Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-05-29T15:28:53.719665Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:402: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-05-29T15:28:53.719987Z node 4 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:28:53.719997Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-29T15:28:53.720022Z node 4 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:28:53.720025Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [4:207:2208], at schemeshard: 72057594046678944, txId: 1, path id: 1 FAKE_COORDINATOR: Erasing txId 1 2025-05-29T15:28:53.720081Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:28:53.720086Z node 4 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:482: [72057594046678944] TDone opId# 1:0 ProgressState 2025-05-29T15:28:53.720095Z node 4 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:906: Part operation is done id#1:0 progress is 1/1 2025-05-29T15:28:53.720098Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-05-29T15:28:53.720101Z node 4 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:906: Part operation is done id#1:0 progress is 1/1 2025-05-29T15:28:53.720103Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-05-29T15:28:53.720106Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1606: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-05-29T15:28:53.720109Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-05-29T15:28:53.720113Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:973: Operation and all the parts is done, operation id: 1:0 2025-05-29T15:28:53.720115Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5174: RemoveTx for txid 1:0 2025-05-29T15:28:53.720125Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-05-29T15:28:53.720129Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:982: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-05-29T15:28:53.720132Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:989: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-05-29T15:28:53.720220Z node 4 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:5834: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-05-29T15:28:53.720231Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-05-29T15:28:53.720236Z node 4 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2025-05-29T15:28:53.720240Z node 4 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2025-05-29T15:28:53.720243Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-05-29T15:28:53.720253Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 1, subscribers: 0 2025-05-29T15:28:53.720796Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1 2025-05-29T15:28:53.720866Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 1, at schemeshard: 72057594046678944 2025-05-29T15:28:53.721280Z node 4 :TX_PROXY DEBUG: proxy_impl.cpp:434: actor# [4:270:2260] Bootstrap 2025-05-29T15:28:53.722701Z node 4 :TX_PROXY DEBUG: proxy_impl.cpp:453: actor# [4:270:2260] Become StateWork (SchemeCache [4:278:2268]) 2025-05-29T15:28:53.722766Z node 4 :HTTP WARN: login_page.cpp:102: 127.0.0.1:0 POST /login 2025-05-29T15:28:53.722839Z node 4 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:232: init: scheme: ldap, uris: ldap://localhost:14214, port: 14214 2025-05-29T15:28:53.722866Z node 4 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:195: bind: bindDn: cn=robouser,dc=search,dc=yandex,dc=net 2025-05-29T15:28:53.739888Z node 4 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:201: Could not perform initial LDAP bind for dn cn=robouser,dc=search,dc=yandex,dc=net on server ldap://localhost:14214. Invalid credentials 2025-05-29T15:28:53.740114Z node 4 :HTTP ERROR: login_page.cpp:209: Login fail for user1@ldap: Could not login via LDAP 2025-05-29T15:28:53.740231Z node 4 :TX_PROXY DEBUG: proxy_impl.cpp:213: actor# [4:270:2260] HANDLE TEvClientConnected success connect from tablet# 72057594046447617 2025-05-29T15:28:53.741047Z node 4 :TX_PROXY DEBUG: client.cpp:89: Handle TEvAllocateResult ACCEPTED RangeBegin# 281474976715656 RangeEnd# 281474976720656 txAllocator# 72057594046447617 AUDIT LOG buffer(2): 2025-05-29T15:28:53.718647Z: component=schemeshard, tx_id=1, remote_address={none}, subject={none}, sanitized_token={none}, database={none}, operation=ALTER DATABASE, paths=[//MyRoot], status=SUCCESS, detailed_status=StatusAccepted 2025-05-29T15:28:53.740045Z: component=grpc-login, remote_address=localhost, database=/MyRoot, operation=LOGIN, status=ERROR, detailed_status=UNAUTHORIZED, reason=Could not login via LDAP: Could not perform initial LDAP bind for dn cn=robouser,dc=search,dc=yandex,dc=net on server ldap://localhost:14214. Invalid credentials, login_user=user1@ldap, sanitized_token={none} AUDIT LOG checked line: 2025-05-29T15:28:53.740045Z: component=grpc-login, remote_address=localhost, database=/MyRoot, operation=LOGIN, status=ERROR, detailed_status=UNAUTHORIZED, reason=Could not login via LDAP: Could not perform initial LDAP bind for dn cn=robouser,dc=search,dc=yandex,dc=net on server ldap://localhost:14214. Invalid credentials, login_user=user1@ldap, sanitized_token={none} >> TSchemeShardLoginTest::RemoveUser-StrictAclCheck-false >> TSchemeShardLoginTest::RemoveGroup-StrictAclCheck-false [GOOD] >> TSchemeShardLoginTest::RemoveGroup-StrictAclCheck-true >> TSchemeShardLoginTest::RemoveUser_NonExisting-StrictAclCheck-true [GOOD] >> TSchemeShardLoginTest::RemoveUser_Groups-StrictAclCheck-false >> TSchemeShardLoginTest::RemoveGroup_NonExisting-StrictAclCheck-false ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_login/unittest >> TWebLoginService::AuditLogCreateModifyUser [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:116:2058] recipient: [1:111:2141] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:116:2058] recipient: [1:111:2141] Leader for TabletID 72057594046678944 is [1:128:2152] sender: [1:132:2058] recipient: [1:111:2141] 2025-05-29T15:28:52.976147Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7488: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-05-29T15:28:52.976180Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7516: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:28:52.976186Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7402: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-05-29T15:28:52.976192Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7418: OperationsProcessing config: using default configuration 2025-05-29T15:28:52.976200Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-05-29T15:28:52.976204Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-05-29T15:28:52.976213Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7548: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:28:52.976227Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:39: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-05-29T15:28:52.976349Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7619: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-05-29T15:28:52.976429Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-05-29T15:28:52.990519Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7309: Cannot subscribe to console configs 2025-05-29T15:28:52.990547Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:28:52.993218Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-05-29T15:28:52.993353Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-05-29T15:28:52.993404Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-05-29T15:28:52.994848Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-05-29T15:28:52.995000Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:445: Clear TempDirsState with owners number: 0 2025-05-29T15:28:52.995142Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1348: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-05-29T15:28:52.995217Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:32: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-05-29T15:28:52.995688Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:157: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:28:52.995735Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:84: [RootDataErasureManager] Stop 2025-05-29T15:28:52.996043Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:28:52.996055Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:28:52.996081Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-05-29T15:28:52.996089Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-29T15:28:52.996096Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:202: TTxServerlessStorageBilling.Complete 2025-05-29T15:28:52.996135Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6671: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-05-29T15:28:52.997568Z node 1 :HIVE INFO: tablet_helpers.cpp:1130: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:128:2152] sender: [1:242:2058] recipient: [1:15:2062] 2025-05-29T15:28:53.024649Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:372: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-05-29T15:28:53.024742Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:28:53.024826Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-05-29T15:28:53.024887Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:130: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-05-29T15:28:53.024899Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:28:53.025821Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:451: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-05-29T15:28:53.025853Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-05-29T15:28:53.025922Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:28:53.025934Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:313: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-05-29T15:28:53.025941Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:367: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-05-29T15:28:53.025947Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 2 -> 3 2025-05-29T15:28:53.026385Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:28:53.026398Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-05-29T15:28:53.026405Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 3 -> 128 2025-05-29T15:28:53.026764Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:28:53.026781Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:28:53.026788Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:28:53.026796Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1650: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-05-29T15:28:53.027538Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1719: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-05-29T15:28:53.027952Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:652: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-05-29T15:28:53.027999Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1751: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-05-29T15:28:53.028205Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:676: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-05-29T15:28:53.028232Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:680: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 124 RawX2: 4294969445 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-05-29T15:28:53.028240Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:28:53.028306Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 128 -> 240 2025-05-29T15:28:53.028314Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:28:53.028355Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-05-29T15:28:53.028369Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:402: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-05-29T15:28:53.028776Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:28:53.028786Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-29T15:28:53.028832Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29 ... peration: MODIFY USER, path: /MyRoot 2025-05-29T15:28:54.234951Z node 4 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:28:54.234957Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 105, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-29T15:28:54.234989Z node 4 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:28:54.234994Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [4:209:2210], at schemeshard: 72057594046678944, txId: 105, path id: 1 2025-05-29T15:28:54.235096Z node 4 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:5834: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 8 PathOwnerId: 72057594046678944, cookie: 105 2025-05-29T15:28:54.235106Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 8 PathOwnerId: 72057594046678944, cookie: 105 2025-05-29T15:28:54.235112Z node 4 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 105 2025-05-29T15:28:54.235118Z node 4 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 105, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 8 2025-05-29T15:28:54.235124Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-05-29T15:28:54.235141Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 105, subscribers: 0 2025-05-29T15:28:54.235516Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 105 AUDIT LOG buffer(6): 2025-05-29T15:28:54.211292Z: component=schemeshard, tx_id=1, remote_address={none}, subject={none}, sanitized_token={none}, database={none}, operation=ALTER DATABASE, paths=[//MyRoot], status=SUCCESS, detailed_status=StatusAccepted 2025-05-29T15:28:54.220564Z: component=schemeshard, tx_id=101, remote_address={none}, subject={none}, sanitized_token={none}, database=/MyRoot, operation=CREATE USER, paths=[/MyRoot], status=SUCCESS, detailed_status=StatusSuccess, login_user_level=admin, login_user=user1 2025-05-29T15:28:54.226523Z: component=schemeshard, tx_id=102, remote_address={none}, subject={none}, sanitized_token={none}, database=/MyRoot, operation=MODIFY USER, paths=[/MyRoot], status=SUCCESS, detailed_status=StatusSuccess, login_user_level=admin, login_user=user1, login_user_change=[password] 2025-05-29T15:28:54.229763Z: component=schemeshard, tx_id=103, remote_address={none}, subject={none}, sanitized_token={none}, database=/MyRoot, operation=MODIFY USER, paths=[/MyRoot], status=SUCCESS, detailed_status=StatusSuccess, login_user_level=admin, login_user=user1, login_user_change=[blocking] 2025-05-29T15:28:54.232114Z: component=schemeshard, tx_id=104, remote_address={none}, subject={none}, sanitized_token={none}, database=/MyRoot, operation=MODIFY USER, paths=[/MyRoot], status=SUCCESS, detailed_status=StatusSuccess, login_user_level=admin, login_user=user1, login_user_change=[unblocking] 2025-05-29T15:28:54.234332Z: component=schemeshard, tx_id=105, remote_address={none}, subject={none}, sanitized_token={none}, database=/MyRoot, operation=MODIFY USER, paths=[/MyRoot], status=SUCCESS, detailed_status=StatusSuccess, login_user_level=admin, login_user=user1, login_user_change=[password] AUDIT LOG checked line: 2025-05-29T15:28:54.234332Z: component=schemeshard, tx_id=105, remote_address={none}, subject={none}, sanitized_token={none}, database=/MyRoot, operation=MODIFY USER, paths=[/MyRoot], status=SUCCESS, detailed_status=StatusSuccess, login_user_level=admin, login_user=user1, login_user_change=[password] 2025-05-29T15:28:54.236439Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:372: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpAlterLogin AlterLogin { ModifyUser { User: "user1" Password: "password1" CanLogin: false } } } TxId: 106 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-05-29T15:28:54.239284Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:130: IgniteOperation, opId: 106:1, propose status:StatusSuccess, reason: , at schemeshard: 72057594046678944 2025-05-29T15:28:54.239324Z node 4 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:906: Part operation is done id#106:0 progress is 1/1 2025-05-29T15:28:54.239330Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 106 ready parts: 1/1 2025-05-29T15:28:54.239337Z node 4 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:906: Part operation is done id#106:0 progress is 1/1 2025-05-29T15:28:54.239341Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 106 ready parts: 1/1 2025-05-29T15:28:54.239354Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-05-29T15:28:54.239367Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1606: TOperation IsReadyToNotify, TxId: 106, ready parts: 1/1, is published: false 2025-05-29T15:28:54.239373Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 106 ready parts: 1/1 2025-05-29T15:28:54.239378Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:973: Operation and all the parts is done, operation id: 106:0 2025-05-29T15:28:54.239385Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:982: Publication still in progress, tx: 106, publications: 1, subscribers: 0 2025-05-29T15:28:54.239390Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:989: Publication details: tx: 106, [OwnerId: 72057594046678944, LocalPathId: 1], 9 2025-05-29T15:28:54.240179Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:451: TTxOperationPropose Complete, txId: 106, response: Status: StatusSuccess TxId: 106 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2025-05-29T15:28:54.240207Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 106, database: /MyRoot, subject: , status: StatusSuccess, operation: MODIFY USER, path: /MyRoot 2025-05-29T15:28:54.240250Z node 4 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:28:54.240258Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 106, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-29T15:28:54.240297Z node 4 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:28:54.240302Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [4:209:2210], at schemeshard: 72057594046678944, txId: 106, path id: 1 2025-05-29T15:28:54.240412Z node 4 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:5834: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 9 PathOwnerId: 72057594046678944, cookie: 106 2025-05-29T15:28:54.240424Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 9 PathOwnerId: 72057594046678944, cookie: 106 2025-05-29T15:28:54.240430Z node 4 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 106 2025-05-29T15:28:54.240436Z node 4 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 106, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 9 2025-05-29T15:28:54.240442Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-05-29T15:28:54.240464Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 106, subscribers: 0 2025-05-29T15:28:54.240877Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 106 AUDIT LOG buffer(7): 2025-05-29T15:28:54.211292Z: component=schemeshard, tx_id=1, remote_address={none}, subject={none}, sanitized_token={none}, database={none}, operation=ALTER DATABASE, paths=[//MyRoot], status=SUCCESS, detailed_status=StatusAccepted 2025-05-29T15:28:54.220564Z: component=schemeshard, tx_id=101, remote_address={none}, subject={none}, sanitized_token={none}, database=/MyRoot, operation=CREATE USER, paths=[/MyRoot], status=SUCCESS, detailed_status=StatusSuccess, login_user_level=admin, login_user=user1 2025-05-29T15:28:54.226523Z: component=schemeshard, tx_id=102, remote_address={none}, subject={none}, sanitized_token={none}, database=/MyRoot, operation=MODIFY USER, paths=[/MyRoot], status=SUCCESS, detailed_status=StatusSuccess, login_user_level=admin, login_user=user1, login_user_change=[password] 2025-05-29T15:28:54.229763Z: component=schemeshard, tx_id=103, remote_address={none}, subject={none}, sanitized_token={none}, database=/MyRoot, operation=MODIFY USER, paths=[/MyRoot], status=SUCCESS, detailed_status=StatusSuccess, login_user_level=admin, login_user=user1, login_user_change=[blocking] 2025-05-29T15:28:54.232114Z: component=schemeshard, tx_id=104, remote_address={none}, subject={none}, sanitized_token={none}, database=/MyRoot, operation=MODIFY USER, paths=[/MyRoot], status=SUCCESS, detailed_status=StatusSuccess, login_user_level=admin, login_user=user1, login_user_change=[unblocking] 2025-05-29T15:28:54.234332Z: component=schemeshard, tx_id=105, remote_address={none}, subject={none}, sanitized_token={none}, database=/MyRoot, operation=MODIFY USER, paths=[/MyRoot], status=SUCCESS, detailed_status=StatusSuccess, login_user_level=admin, login_user=user1, login_user_change=[password] 2025-05-29T15:28:54.239253Z: component=schemeshard, tx_id=106, remote_address={none}, subject={none}, sanitized_token={none}, database=/MyRoot, operation=MODIFY USER, paths=[/MyRoot], status=SUCCESS, detailed_status=StatusSuccess, login_user_level=admin, login_user=user1, login_user_change=[password, blocking] AUDIT LOG checked line: 2025-05-29T15:28:54.239253Z: component=schemeshard, tx_id=106, remote_address={none}, subject={none}, sanitized_token={none}, database=/MyRoot, operation=MODIFY USER, paths=[/MyRoot], status=SUCCESS, detailed_status=StatusSuccess, login_user_level=admin, login_user=user1, login_user_change=[password, blocking] |71.5%| [TA] $(B)/ydb/core/tx/schemeshard/ut_sequence/test-results/unittest/{meta.json ... results_accumulator.log} >> KqpQuery::QueryClientTimeoutPrecompiled >> TSchemeShardLoginTest::RemoveGroup-StrictAclCheck-true [GOOD] >> TSchemeShardLoginTest::DisableBuiltinAuthMechanism >> TSchemeShardLoginTest::RemoveUser_Owner-StrictAclCheck-true >> TSchemeShardLoginTest::RemoveUser_Groups-StrictAclCheck-false [GOOD] >> TSchemeShardLoginTest::RemoveUser_Groups-StrictAclCheck-true >> TSchemeShardLoginTest::RemoveGroup_NonExisting-StrictAclCheck-false [GOOD] >> TSchemeShardLoginTest::RemoveGroup_NonExisting-StrictAclCheck-true >> TSchemeShardLoginTest::RemoveUser-StrictAclCheck-false [GOOD] >> TSchemeShardLoginTest::RemoveUser-StrictAclCheck-true >> KqpStats::StreamLookupStats-StreamLookupJoin ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/persqueue_v1/ut/unittest >> TTopicYqlTest::CreateTopicYqlBackCompatibility [FAIL] Test command err: 2025-05-29T15:28:36.055113Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509889972603318624:2074];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:28:36.055138Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-05-29T15:28:36.058806Z node 2 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7509889975467239965:2073];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:28:36.058830Z node 2 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-05-29T15:28:36.098706Z node 1 :PQ_READ_PROXY DEBUG: caching_service.cpp:44: Direct read cache: : Created test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/00202d/r3tmp/tmpyvSXO8/pdisk_1.dat 2025-05-29T15:28:36.107484Z node 2 :PQ_READ_PROXY DEBUG: caching_service.cpp:44: Direct read cache: : Created 2025-05-29T15:28:36.138012Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 25149, node 1 2025-05-29T15:28:36.149142Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/ciyv/00202d/r3tmp/yandexm5JbB7.tmp 2025-05-29T15:28:36.149157Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: /home/runner/.ya/build/build_root/ciyv/00202d/r3tmp/yandexm5JbB7.tmp 2025-05-29T15:28:36.149226Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:202: successfully initialized from file: /home/runner/.ya/build/build_root/ciyv/00202d/r3tmp/yandexm5JbB7.tmp 2025-05-29T15:28:36.149274Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-05-29T15:28:36.153899Z INFO: TTestServer started on Port 8508 GrpcPort 25149 2025-05-29T15:28:36.155558Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:28:36.155589Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:28:36.156949Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:8508 PQClient connected to localhost:25149 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-29T15:28:36.182774Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-05-29T15:28:36.202048Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:28:36.205450Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:28:36.205469Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:28:36.206333Z node 1 :HIVE WARN: hive_impl.cpp:771: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-05-29T15:28:36.206669Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected waiting... waiting... 2025-05-29T15:28:36.451336Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509889972603319681:2340], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:28:36.451372Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:28:36.451372Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509889972603319686:2343], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:28:36.452167Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710662:3, at schemeshard: 72057594046644480 2025-05-29T15:28:36.452434Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509889972603319728:2346], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:28:36.452448Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:28:36.456043Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7509889972603319697:2344], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710662 completed, doublechecking } 2025-05-29T15:28:36.484944Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-05-29T15:28:36.534592Z node 1 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [1:7509889972603319839:2807] txid# 281474976710664, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 9], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-29T15:28:36.544989Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-05-29T15:28:36.545185Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [1:7509889972603319857:2355], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:5:17: Error: At function: KiReadTable!
:5:17: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Versions]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-05-29T15:28:36.545280Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=1&id=OTdmMzQwNzMtZDJhOTE2N2ItNzgxMDM4MDAtZWVlNTM3ZjI=, ActorId: [1:7509889972603319664:2338], ActorState: ExecuteState, TraceId: 01jweahnk15g5dty3k52wpkmsv, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-05-29T15:28:36.545721Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: cluster_tracker.cpp:167: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 5 column: 17 } message: "At function: KiReadTable!" end_position { row: 5 column: 17 } severity: 1 issues { position { row: 5 column: 17 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Versions]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 5 column: 17 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-05-29T15:28:36.564948Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost", true, true, 1000), ("dc2", "dc2.logbroker.yandex.net", false, true, 1000); 2025-05-29T15:28:36.592608Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [1:7509889972603320125:2382], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:28:36.592714Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=1&id=NzE3NGJiNWEtMjZiZGE2ZWMtZWNjNTMxMGItMzFmNTE4OTQ=, ActorId: [1:7509889972603320122:2380], ActorState: ExecuteState, TraceId: 01jweahnq21zy5fzypvnr836xe, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: assertion failed at ydb/core/testlib/test_pq_client.h:537, TMaybe NKikimr::NPersQueueTests::TFlatMsgBusPQClient::RunYqlDataQueryWithParams(TString, const NYdb::TParams &): (result.IsSuccess())
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 TBackTrace::Capture()+28 (0x13B9173C) NUnitTest::NPrivate::RaiseError(char const*, TBasicString> const&, bool)+137 (0x13D44119) NKikimr::NPersQueueTests::TFlatMsgBusPQClient::RunYqlDataQueryWithParams(TBasicString>, NYdb::Dev::TParams const&)+932 (0x139C81C4) NKikimr::NPersQueueTests::TFlatMsgBusPQClient::RunYqlDataQuery(TBasicString>)+88 (0x139F6148) NKikimr::NPersQueueTests::TFlatMsgBusPQClient::InitDCs(THashMap>, NKikimr::NPersQueueTests::TPQTestClusterInfo, THash>>, TEqualTo>>, std::__y1::allocator>>>, TBasicString> const&)+1170 (0x13995592) NKikimr::NPersQueueTests::TFlatMsgBusPQClient::FullInit(THashMap Disconnected 2025-05-29T15:28:50.355555Z node 25 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(25, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:28:50.355839Z node 25 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(26, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:28:50.355861Z node 25 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(26, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:28:50.356766Z node 25 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-05-29T15:28:50.358171Z node 25 :HIVE WARN: hive_impl.cpp:771: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 26 Cookie 26 waiting... 2025-05-29T15:28:50.358212Z node 25 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(25, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-29T15:28:50.358439Z node 25 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(26, (0,0,0,0)) VolatileState: Connecting -> Connected waiting... 2025-05-29T15:28:50.370583Z node 25 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-05-29T15:28:50.437882Z node 25 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976715660, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:28:50.621315Z node 25 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [25:7509890035406051006:2336], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:28:50.621334Z node 25 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [25:7509890035406051027:2339], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:28:50.621355Z node 25 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:28:50.622214Z node 25 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715662:3, at schemeshard: 72057594046644480 2025-05-29T15:28:50.625067Z node 25 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [25:7509890035406051072:2342], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:28:50.625139Z node 25 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:28:50.628036Z node 25 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-05-29T15:28:50.631705Z node 25 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [25:7509890035406051043:2340], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715662 completed, doublechecking } 2025-05-29T15:28:50.674206Z node 26 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [26:7509890033639417099:2315], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-05-29T15:28:50.674305Z node 26 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=26&id=NTk1MmE4MzItMTAwMDNkMDEtZTdkY2VmZTktMzBjNjA3NjY=, ActorId: [26:7509890033639417045:2309], ActorState: ExecuteState, TraceId: 01jweaj3fb9t2g1m3m8f2de96f, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-05-29T15:28:50.674466Z node 26 :PERSQUEUE_CLUSTER_TRACKER ERROR: cluster_tracker.cpp:167: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-05-29T15:28:50.685776Z node 25 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [25:7509890035406051217:2779] txid# 281474976715664, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 9], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-29T15:28:50.690610Z node 25 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [25:7509890035406051236:2354], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-05-29T15:28:50.691137Z node 25 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=25&id=YWE2NDRkMjItNWVjNWNiZDItOWRiMDBhYmEtM2E0ZGIyODA=, ActorId: [25:7509890035406051002:2333], ActorState: ExecuteState, TraceId: 01jweaj3dwefd4mkbyc327039h, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-05-29T15:28:50.691258Z node 25 :PERSQUEUE_CLUSTER_TRACKER ERROR: cluster_tracker.cpp:167: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-05-29T15:28:50.697888Z node 25 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-05-29T15:28:50.777946Z node 25 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost", true, true, 1000), ("dc2", "dc2.logbroker.yandex.net", false, true, 1000); 2025-05-29T15:28:50.818399Z node 25 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [25:7509890035406051520:2381], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:28:50.818723Z node 25 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=25&id=N2FmN2Y5OWQtZTdlNTY2ZDgtYTI3NzkxNmQtMmFmNDZmY2M=, ActorId: [25:7509890035406051517:2379], ActorState: ExecuteState, TraceId: 01jweaj3kk5wdchj9awaye9nza, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: assertion failed at ydb/core/testlib/test_pq_client.h:537, TMaybe NKikimr::NPersQueueTests::TFlatMsgBusPQClient::RunYqlDataQueryWithParams(TString, const NYdb::TParams &): (result.IsSuccess())
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 TBackTrace::Capture()+28 (0x13B9173C) NUnitTest::NPrivate::RaiseError(char const*, TBasicString> const&, bool)+137 (0x13D44119) NKikimr::NPersQueueTests::TFlatMsgBusPQClient::RunYqlDataQueryWithParams(TBasicString>, NYdb::Dev::TParams const&)+932 (0x139C81C4) NKikimr::NPersQueueTests::TFlatMsgBusPQClient::RunYqlDataQuery(TBasicString>)+88 (0x139F6148) NKikimr::NPersQueueTests::TFlatMsgBusPQClient::InitDCs(THashMap>, NKikimr::NPersQueueTests::TPQTestClusterInfo, THash>>, TEqualTo>>, std::__y1::allocator>>>, TBasicString> const&)+1170 (0x13995592) NKikimr::NPersQueueTests::TFlatMsgBusPQClient::FullInit(THashMap>, NKikimr::NPersQueueTests::TPQTestClusterInfo, THash>>, TEqualTo>>, std::__y1::allocator>>>, TBasicString> const&, TBasicString> const&)+327 (0x139C2D47) NPersQueue::TTestServer::StartServer(bool, TMaybe>, NMaybe::TPolicyUndefinedExcept>)+888 (0x138D5468) NPersQueue::TTestServer::TTestServer(NKikimr::Tests::TServerSettings const&, bool, TVector> const&, NActors::NLog::EPriority, TMaybe, TDelete>, NMaybe::TPolicyUndefinedExcept>)+1563 (0x138D474B) NKikimr::NPersQueueTests::NTestSuiteTTopicYqlTest::TTestCaseCreateTopicYqlBackCompatibility::Execute_(NUnitTest::TTestContext&)+359 (0x13A3C817) NKikimr::NPersQueueTests::NTestSuiteTTopicYqlTest::TCurrentTest::Execute()::'lambda'()::operator()() const+71 (0x13A51177) NUnitTest::TTestBase::Run(std::__y1::function, TBasicString> const&, char const*, bool)+126 (0x13D45FCE) NKikimr::NPersQueueTests::NTestSuiteTTopicYqlTest::TCurrentTest::Execute()+436 (0x13A50B34) NUnitTest::TTestFactory::Execute()+803 (0x13D46743) NUnitTest::RunMain(int, char**)+3021 (0x13D582DD) ??+0 (0x7FA4BC336D90) __libc_start_main+128 (0x7FA4BC336E40) _start+41 (0x129C9029) |71.5%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/tx_proxy/ut_ext_tenant/ydb-core-tx-tx_proxy-ut_ext_tenant |71.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/tx_proxy/ut_ext_tenant/ydb-core-tx-tx_proxy-ut_ext_tenant >> TChargeBTreeIndex::FewNodes_History [GOOD] >> TSchemeShardLoginTest::RemoveUser_Owner-StrictAclCheck-true [GOOD] >> TSchemeShardLoginTest::RemoveGroup_NonExisting-StrictAclCheck-true [GOOD] >> TSchemeShardLoginTest::RemoveUser-StrictAclCheck-true [GOOD] >> TSchemeShardLoginTest::RemoveUser_Acl-StrictAclCheck-false >> TSchemeShardLoginTest::DisableBuiltinAuthMechanism [GOOD] >> BackupRestore::RestoreTablePartitioningSettings [GOOD] >> TSchemeShardLoginTest::RemoveUser_Groups-StrictAclCheck-true [GOOD] >> TChargeBTreeIndex::FewNodes_Sticky >> TSchemeShardLoginTest::RemoveGroup_Owner-StrictAclCheck-false >> BackupRestore::RestoreIndexTablePartitioningSettings >> TSchemeShardLoginTest::FailedLoginUserUnderNameOfGroup >> KqpEffects::InsertAbort_Select_Conflict-UseSink >> TSchemeShardLoginTest::RemoveUser_Owner-StrictAclCheck-false >> TSchemeShardLoginTest::TestExternalLogin >> TSchemeShardLoginTest::FailedLoginUserUnderNameOfGroup [GOOD] >> TSchemeShardLoginTest::TestExternalLogin [GOOD] >> TSchemeShardLoginTest::FailedLoginWithInvalidUser >> TSchemeShardLoginTest::TestExternalLoginWithIncorrectLdapDomain >> TSchemeShardLoginTest::RemoveUser_Acl-StrictAclCheck-false [GOOD] >> TSchemeShardLoginTest::RemoveGroup_Owner-StrictAclCheck-false [GOOD] >> TSchemeShardLoginTest::RemoveGroup_Acl-StrictAclCheck-false >> TSchemeShardLoginTest::RemoveUser_Acl-StrictAclCheck-true >> KqpQuery::RowsLimit |71.5%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/blobstorage/nodewarden/ut_sequence/ydb-core-blobstorage-nodewarden-ut_sequence |71.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/blobstorage/nodewarden/ut_sequence/ydb-core-blobstorage-nodewarden-ut_sequence |71.6%| [LD] {RESULT} $(B)/ydb/core/tx/tx_proxy/ut_ext_tenant/ydb-core-tx-tx_proxy-ut_ext_tenant |71.6%| [LD] {RESULT} $(B)/ydb/core/blobstorage/nodewarden/ut_sequence/ydb-core-blobstorage-nodewarden-ut_sequence ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/persqueue_v1/ut/unittest >> TPersQueueTest::ReadWithoutConsumerFirstClassCitizen [FAIL] Test command err: 2025-05-29T15:28:35.369269Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509889968493907159:2075];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:28:35.369519Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-05-29T15:28:35.371805Z node 2 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7509889971363408102:2073];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:28:35.371825Z node 2 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-05-29T15:28:35.396914Z node 1 :PQ_READ_PROXY DEBUG: caching_service.cpp:44: Direct read cache: : Created test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/002039/r3tmp/tmpKThA1G/pdisk_1.dat 2025-05-29T15:28:35.406402Z node 2 :PQ_READ_PROXY DEBUG: caching_service.cpp:44: Direct read cache: : Created 2025-05-29T15:28:35.424894Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 5301, node 1 2025-05-29T15:28:35.440469Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/ciyv/002039/r3tmp/yandexScMDk4.tmp 2025-05-29T15:28:35.440489Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: /home/runner/.ya/build/build_root/ciyv/002039/r3tmp/yandexScMDk4.tmp 2025-05-29T15:28:35.440570Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:202: successfully initialized from file: /home/runner/.ya/build/build_root/ciyv/002039/r3tmp/yandexScMDk4.tmp 2025-05-29T15:28:35.440625Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-05-29T15:28:35.446814Z INFO: TTestServer started on Port 16576 GrpcPort 5301 TClient is connected to server localhost:16576 PQClient connected to localhost:5301 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: 2025-05-29T15:28:35.469612Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:28:35.469679Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:28:35.471232Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-29T15:28:35.499619Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:28:35.499650Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:28:35.501218Z node 1 :HIVE WARN: hive_impl.cpp:771: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-05-29T15:28:35.501543Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-29T15:28:35.503053Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-05-29T15:28:35.515377Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... waiting... waiting... 2025-05-29T15:28:35.683148Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509889968493908161:2336], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:28:35.683189Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:28:35.683236Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509889968493908173:2339], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:28:35.683963Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715662:3, at schemeshard: 72057594046644480 2025-05-29T15:28:35.683980Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509889968493908204:2342], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:28:35.683987Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:28:35.687038Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7509889968493908175:2340], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715662 completed, doublechecking } 2025-05-29T15:28:35.711067Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-05-29T15:28:35.753994Z node 1 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [1:7509889968493908363:2806] txid# 281474976715664, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 9], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-29T15:28:35.764743Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [1:7509889968493908382:2354], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-05-29T15:28:35.764831Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=1&id=MmRkNjA5MmUtMTc0NjFiNWMtZGEyYzliZjItZTI5OTA4Nzg=, ActorId: [1:7509889968493908143:2334], ActorState: ExecuteState, TraceId: 01jweahmv04b4hyhhzdfmzmrpt, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-05-29T15:28:35.765316Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: cluster_tracker.cpp:167: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-05-29T15:28:35.769632Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-05-29T15:28:35.788229Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost", true, true, 1000), ("dc2", "dc2.logbroker.yandex.net", false, true, 1000); 2025-05-29T15:28:35.816499Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [1:7509889968493908666:2381], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:28:35.816609Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=1&id=OWRmODQ4NGItYjY3YTIzYWQtMWQzNGQ5MWMtMmI3ZmYyOGM=, ActorId: [1:7509889968493908663:2379], ActorState: ExecuteState, TraceId: 01jweahmyt1x41qnhfpj3f2zxx, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: 2025-05-29T15:28:35.878136Z node 2 :PERSQUEUE_CLUSTER_TRACKER ERROR: cluster_tracker.cpp:167: failed to list clusters: { Response { QueryIssues { message: "Execution" issue_code: 1060 severity: 2 issues { position { row: 3 column: 120 } message: "Cost Based Optimizer could not be applied to this query: couldn\'t load statistics" end_position { row: 3 column: 120 } issue_code: 8001 severity: 2 } } TxMeta { } YdbResults { columns { name: "C.name" type { optional_type { item { type_id: UTF8 } } } } columns { name: "C.balancer" type { optional_type { item { type_id: UTF8 } } } } columns { name: "C.local" type { optional_type { item { type_id: BOOL } } } } columns { name: "C.enabled" type { optional_type { item { type_id: BOOL } } } } columns { name: "C.weight" type { optional_type { item { type_id: UINT64 } } } } columns { name: "V.version" type { optional_type { item { type_id: INT64 } } } } } QueryDiagnostics: "" } YdbStatus: SUCCESS ConsumedRu: 19 } assertion failed at ydb/core/testlib/test_pq_client.h:537, TMaybe NKikimr::NPersQueueTests::TFlatMsgBusPQClient::RunYqlDataQueryWithParams(TString, const NYdb::TParams &): (result.IsSuccess())
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 [[a ... 7594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-29T15:28:50.325335Z node 26 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(26, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:28:50.325369Z node 26 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(26, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:28:50.327302Z node 26 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(26, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-29T15:28:50.329222Z node 26 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:28:50.334199Z node 26 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(27, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:28:50.334227Z node 26 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(27, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:28:50.335489Z node 26 :HIVE WARN: hive_impl.cpp:771: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 27 Cookie 27 waiting... 2025-05-29T15:28:50.336269Z node 26 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(27, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-29T15:28:50.341840Z node 26 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... waiting... waiting... 2025-05-29T15:28:50.608597Z node 26 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [26:7509890035885100168:2336], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:28:50.608620Z node 26 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [26:7509890035885100180:2339], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:28:50.608628Z node 26 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:28:50.609483Z node 26 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715662:3, at schemeshard: 72057594046644480 2025-05-29T15:28:50.611566Z node 26 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [26:7509890035885100215:2342], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:28:50.611613Z node 26 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:28:50.617906Z node 26 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [26:7509890035885100183:2340], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715662 completed, doublechecking } 2025-05-29T15:28:50.618294Z node 26 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-05-29T15:28:50.637414Z node 26 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-05-29T15:28:50.663938Z node 27 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [27:7509890034335130648:2317], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:5:17: Error: At function: KiReadTable!
:5:17: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Versions]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-05-29T15:28:50.664338Z node 27 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=27&id=ODMzZDFkNzYtODE3MWVjN2MtMmUwYTVkOTktNjQ0NGUwNDY=, ActorId: [27:7509890034335130623:2311], ActorState: ExecuteState, TraceId: 01jweaj3f15f9s027vazn5pett, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-05-29T15:28:50.664489Z node 27 :PERSQUEUE_CLUSTER_TRACKER ERROR: cluster_tracker.cpp:167: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 5 column: 17 } message: "At function: KiReadTable!" end_position { row: 5 column: 17 } severity: 1 issues { position { row: 5 column: 17 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Versions]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 5 column: 17 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-05-29T15:28:50.687187Z node 26 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [26:7509890035885100457:2859] txid# 281474976715665, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 9], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-29T15:28:50.691637Z node 26 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [26:7509890035885100475:2358], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:5:17: Error: At function: KiReadTable!
:5:17: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Versions]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-05-29T15:28:50.692288Z node 26 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=26&id=NWQzYTFiN2YtYjVkOTAwODktOTY4N2ZmMDktMTUwMzZhZDE=, ActorId: [26:7509890035885100151:2334], ActorState: ExecuteState, TraceId: 01jweaj3df2y93hsg1cskrs13g, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-05-29T15:28:50.692412Z node 26 :PERSQUEUE_CLUSTER_TRACKER ERROR: cluster_tracker.cpp:167: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 5 column: 17 } message: "At function: KiReadTable!" end_position { row: 5 column: 17 } severity: 1 issues { position { row: 5 column: 17 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Versions]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 5 column: 17 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-05-29T15:28:50.710414Z node 26 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost", true, true, 1000), ("dc2", "dc2.logbroker.yandex.net", false, true, 1000); 2025-05-29T15:28:50.753860Z node 26 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [26:7509890035885100668:2382], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:28:50.753983Z node 26 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=26&id=ZjE0YjdhNjAtZWNmYjEyNjYtMjkyMDYzZDMtNTllOWQ1MGI=, ActorId: [26:7509890035885100664:2380], ActorState: ExecuteState, TraceId: 01jweaj3hjene2wz7r0yv0ttwv, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: assertion failed at ydb/core/testlib/test_pq_client.h:537, TMaybe NKikimr::NPersQueueTests::TFlatMsgBusPQClient::RunYqlDataQueryWithParams(TString, const NYdb::TParams &): (result.IsSuccess())
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 TBackTrace::Capture()+28 (0x13B9173C) NUnitTest::NPrivate::RaiseError(char const*, TBasicString> const&, bool)+137 (0x13D44119) NKikimr::NPersQueueTests::TFlatMsgBusPQClient::RunYqlDataQueryWithParams(TBasicString>, NYdb::Dev::TParams const&)+932 (0x139C81C4) NKikimr::NPersQueueTests::TFlatMsgBusPQClient::RunYqlDataQuery(TBasicString>)+88 (0x139F6148) NKikimr::NPersQueueTests::TFlatMsgBusPQClient::InitDCs(THashMap>, NKikimr::NPersQueueTests::TPQTestClusterInfo, THash>>, TEqualTo>>, std::__y1::allocator>>>, TBasicString> const&)+1170 (0x13995592) NKikimr::NPersQueueTests::TFlatMsgBusPQClient::FullInit(THashMap>, NKikimr::NPersQueueTests::TPQTestClusterInfo, THash>>, TEqualTo>>, std::__y1::allocator>>>, TBasicString> const&, TBasicString> const&)+327 (0x139C2D47) NPersQueue::TTestServer::StartServer(bool, TMaybe>, NMaybe::TPolicyUndefinedExcept>)+888 (0x138D5468) NPersQueue::TTestServer::TTestServer(NKikimr::Tests::TServerSettings const&, bool, TVector> const&, NActors::NLog::EPriority, TMaybe, TDelete>, NMaybe::TPolicyUndefinedExcept>)+1563 (0x138D474B) NPersQueue::TTestServer::TTestServer(bool)+134 (0x138D0986) NKikimr::NPersQueueTests::NTestSuiteTPersQueueTest::TTestCaseReadWithoutConsumerFirstClassCitizen::Execute_(NUnitTest::TTestContext&)+44 (0x139AD4DC) NKikimr::NPersQueueTests::NTestSuiteTPersQueueTest::TCurrentTest::Execute()::'lambda'()::operator()() const+71 (0x139BF897) NUnitTest::TTestBase::Run(std::__y1::function, TBasicString> const&, char const*, bool)+126 (0x13D45FCE) NKikimr::NPersQueueTests::NTestSuiteTPersQueueTest::TCurrentTest::Execute()+436 (0x139BF1B4) NUnitTest::TTestFactory::Execute()+803 (0x13D46743) NUnitTest::RunMain(int, char**)+3021 (0x13D582DD) ??+0 (0x7F9F1203DD90) __libc_start_main+128 (0x7F9F1203DE40) _start+41 (0x129C9029) >> TSchemeShardLoginTest::FailedLoginWithInvalidUser [GOOD] >> BackupRestoreS3::RestoreIndexTablePartitioningSettings [GOOD] >> TSchemeShardLoginTest::RemoveUser_Owner-StrictAclCheck-false [GOOD] >> TWebLoginService::AuditLogLoginSuccess >> TSchemeShardLoginTest::TestExternalLoginWithIncorrectLdapDomain [GOOD] >> TSchemeShardLoginTest::RemoveGroup_Acl-StrictAclCheck-false [GOOD] >> TSchemeShardLoginTest::ResetFailedAttemptCount >> TReplicationWithRebootsTests::CreateDropRecreate [GOOD] >> BackupRestoreS3::RestoreIndexTableReadReplicasSettings >> TSchemeShardLoginTest::RemoveGroup_Acl-StrictAclCheck-true >> TSchemeShardLoginTest::RemoveUser_Acl-StrictAclCheck-true [GOOD] >> TSchemeShardLoginTest::RemoveGroup_Owner-StrictAclCheck-true >> TSchemeShardLoginTest::RemoveGroup_Acl-StrictAclCheck-true [GOOD] |71.6%| [TA] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_sequence/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_login/unittest >> TSchemeShardLoginTest::FailedLoginWithInvalidUser [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:116:2058] recipient: [1:111:2141] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:116:2058] recipient: [1:111:2141] Leader for TabletID 72057594046678944 is [1:128:2152] sender: [1:132:2058] recipient: [1:111:2141] 2025-05-29T15:28:54.449974Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7488: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-05-29T15:28:54.450002Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7516: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:28:54.450008Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7402: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-05-29T15:28:54.450012Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7418: OperationsProcessing config: using default configuration 2025-05-29T15:28:54.450019Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-05-29T15:28:54.450024Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-05-29T15:28:54.450032Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7548: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:28:54.450046Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:39: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-05-29T15:28:54.450176Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7619: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-05-29T15:28:54.450245Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-05-29T15:28:54.472727Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7309: Cannot subscribe to console configs 2025-05-29T15:28:54.472747Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:28:54.479878Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-05-29T15:28:54.480009Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-05-29T15:28:54.480051Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-05-29T15:28:54.481797Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-05-29T15:28:54.481932Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:445: Clear TempDirsState with owners number: 0 2025-05-29T15:28:54.482064Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1348: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-05-29T15:28:54.482171Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:32: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-05-29T15:28:54.482720Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:157: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:28:54.482782Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:84: [RootDataErasureManager] Stop 2025-05-29T15:28:54.483075Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:28:54.483088Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:28:54.483111Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-05-29T15:28:54.483123Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-29T15:28:54.483130Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:202: TTxServerlessStorageBilling.Complete 2025-05-29T15:28:54.483168Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6671: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-05-29T15:28:54.486095Z node 1 :HIVE INFO: tablet_helpers.cpp:1130: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:128:2152] sender: [1:242:2058] recipient: [1:15:2062] 2025-05-29T15:28:54.504276Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:372: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-05-29T15:28:54.504335Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:28:54.504389Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-05-29T15:28:54.504430Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:130: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-05-29T15:28:54.504438Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:28:54.505075Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:451: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-05-29T15:28:54.505098Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-05-29T15:28:54.505144Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:28:54.505152Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:313: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-05-29T15:28:54.505156Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:367: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-05-29T15:28:54.505160Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 2 -> 3 2025-05-29T15:28:54.505610Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:28:54.505626Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-05-29T15:28:54.505632Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 3 -> 128 2025-05-29T15:28:54.506058Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:28:54.506083Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:28:54.506093Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:28:54.506100Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1650: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-05-29T15:28:54.506687Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1719: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-05-29T15:28:54.510938Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:652: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-05-29T15:28:54.510985Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1751: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-05-29T15:28:54.511138Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:676: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-05-29T15:28:54.511171Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:680: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 124 RawX2: 4294969445 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-05-29T15:28:54.511178Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:28:54.511254Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 128 -> 240 2025-05-29T15:28:54.511265Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:28:54.511301Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-05-29T15:28:54.511311Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:402: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-05-29T15:28:54.511819Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:28:54.511828Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-29T15:28:54.511857Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29 ... ESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:28:55.506037Z node 5 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:482: [72057594046678944] TDone opId# 1:0 ProgressState 2025-05-29T15:28:55.506048Z node 5 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:906: Part operation is done id#1:0 progress is 1/1 2025-05-29T15:28:55.506052Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-05-29T15:28:55.506057Z node 5 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:906: Part operation is done id#1:0 progress is 1/1 2025-05-29T15:28:55.506060Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-05-29T15:28:55.506064Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1606: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-05-29T15:28:55.506085Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-05-29T15:28:55.506090Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:973: Operation and all the parts is done, operation id: 1:0 2025-05-29T15:28:55.506095Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5174: RemoveTx for txid 1:0 2025-05-29T15:28:55.506106Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-05-29T15:28:55.506111Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:982: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-05-29T15:28:55.506116Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:989: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-05-29T15:28:55.506188Z node 5 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:5834: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-05-29T15:28:55.506198Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-05-29T15:28:55.506203Z node 5 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2025-05-29T15:28:55.506208Z node 5 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2025-05-29T15:28:55.506212Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-05-29T15:28:55.506228Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 1, subscribers: 0 2025-05-29T15:28:55.506665Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1 2025-05-29T15:28:55.506771Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 1, at schemeshard: 72057594046678944 2025-05-29T15:28:55.506859Z node 5 :TX_PROXY DEBUG: proxy_impl.cpp:434: actor# [5:271:2261] Bootstrap 2025-05-29T15:28:55.508217Z node 5 :TX_PROXY DEBUG: proxy_impl.cpp:453: actor# [5:271:2261] Become StateWork (SchemeCache [5:276:2266]) 2025-05-29T15:28:55.508291Z node 5 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-05-29T15:28:55.508323Z node 5 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot" took 41us result status StatusSuccess 2025-05-29T15:28:55.508399Z node 5 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot" PathDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/MyRoot" } } } PathId: 1 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-05-29T15:28:55.508480Z node 5 :TX_PROXY DEBUG: proxy_impl.cpp:213: actor# [5:271:2261] HANDLE TEvClientConnected success connect from tablet# 72057594046447617 2025-05-29T15:28:55.508743Z node 5 :TX_PROXY DEBUG: client.cpp:89: Handle TEvAllocateResult ACCEPTED RangeBegin# 281474976715656 RangeEnd# 281474976720656 txAllocator# 72057594046447617 Status: StatusSuccess Path: "/MyRoot" PathDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/MyRoot" } } } PathId: 1 PathOwnerId: 72057594046678944 2025-05-29T15:28:55.508841Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__login.cpp:43: TTxLogin Execute at schemeshard: 72057594046678944 2025-05-29T15:28:55.508846Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__login.cpp:46: TTxLogin RotateKeys at schemeshard: 72057594046678944 2025-05-29T15:28:55.561897Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__login.cpp:85: TTxLogin Complete, result: Error: "Cannot find user: user1", at schemeshard: 72057594046678944 2025-05-29T15:28:55.561950Z node 5 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:28:55.561959Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 0, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-29T15:28:55.562015Z node 5 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:28:55.562021Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [5:208:2209], at schemeshard: 72057594046678944, txId: 0, path id: 1 2025-05-29T15:28:55.562177Z node 5 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:5834: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 4 PathOwnerId: 72057594046678944, cookie: 0 2025-05-29T15:28:55.562280Z node 5 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-05-29T15:28:55.562312Z node 5 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot" took 38us result status StatusSuccess 2025-05-29T15:28:55.562417Z node 5 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot" PathDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 4 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 4 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 1 SecurityStateVersion: 1 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { PublicKeys { KeyId: 1 KeyDataPEM: "-----BEGIN PUBLIC KEY-----\nMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAyuRemEAkVa6bdgbpAzqT\n4VUhhu9b7TDuntEu+flW0qn/QQMPF2LM3Q4BqPKUk+ztXUVUBxz1scrD2RywApc8\nZOWVAg+1/9VgCodD/fyYout6KX4zUC36ZDPmXH2FZYs8rYoUkOzkB5QSat9NeKEv\nkz1MUUbd5HkAqe7B/g5U+AZ29tnvWmD+uOUQpW2WBr3htb+/mAbWC0kGrDLPygzg\ntFStqfh0tEaChV/6Due/2pcbNDkbArWjb2KgD3Kin2E+SWCjTgrTA5xf7dvfLKz6\nq/7C41k3qPyUaB/7J4otlVEEUpaGBGU8/FPmMYgOoVocLmODmumj5e1Dx2Q/jz2T\nbwIDAQAB\n-----END PUBLIC KEY-----\n" ExpiresAt: 1748618935560 } Audience: "/MyRoot" } } } PathId: 1 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> THiveTest::TestCheckSubHiveMigrationWithReboots [GOOD] >> THiveTest::PipeAlivenessOfDeadTablet ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_login/unittest >> TSchemeShardLoginTest::RemoveUser_Owner-StrictAclCheck-false [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:116:2058] recipient: [1:111:2141] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:116:2058] recipient: [1:111:2141] Leader for TabletID 72057594046678944 is [1:128:2152] sender: [1:132:2058] recipient: [1:111:2141] 2025-05-29T15:28:54.263721Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7488: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-05-29T15:28:54.263758Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7516: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:28:54.263775Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7402: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-05-29T15:28:54.263782Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7418: OperationsProcessing config: using default configuration 2025-05-29T15:28:54.263789Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-05-29T15:28:54.263795Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-05-29T15:28:54.263805Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7548: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:28:54.263822Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:39: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-05-29T15:28:54.263973Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7619: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-05-29T15:28:54.264068Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-05-29T15:28:54.283254Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7309: Cannot subscribe to console configs 2025-05-29T15:28:54.283285Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:28:54.290621Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-05-29T15:28:54.290729Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-05-29T15:28:54.290777Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-05-29T15:28:54.292504Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-05-29T15:28:54.292741Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:445: Clear TempDirsState with owners number: 0 2025-05-29T15:28:54.292862Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1348: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-05-29T15:28:54.292912Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:32: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-05-29T15:28:54.293386Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:157: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:28:54.293424Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:84: [RootDataErasureManager] Stop 2025-05-29T15:28:54.293638Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:28:54.293646Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:28:54.293662Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-05-29T15:28:54.293668Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-29T15:28:54.293673Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:202: TTxServerlessStorageBilling.Complete 2025-05-29T15:28:54.293707Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6671: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-05-29T15:28:54.295024Z node 1 :HIVE INFO: tablet_helpers.cpp:1130: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:128:2152] sender: [1:242:2058] recipient: [1:15:2062] 2025-05-29T15:28:54.318178Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:372: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-05-29T15:28:54.318243Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:28:54.318301Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-05-29T15:28:54.318344Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:130: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-05-29T15:28:54.318353Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:28:54.319845Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:451: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-05-29T15:28:54.319876Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-05-29T15:28:54.319931Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:28:54.319940Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:313: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-05-29T15:28:54.319945Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:367: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-05-29T15:28:54.319951Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 2 -> 3 2025-05-29T15:28:54.320403Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:28:54.320414Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-05-29T15:28:54.320418Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 3 -> 128 2025-05-29T15:28:54.320747Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:28:54.320756Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:28:54.320762Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:28:54.320767Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1650: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-05-29T15:28:54.321266Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1719: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-05-29T15:28:54.321565Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:652: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-05-29T15:28:54.321598Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1751: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-05-29T15:28:54.321737Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:676: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-05-29T15:28:54.321754Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:680: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 124 RawX2: 4294969445 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-05-29T15:28:54.321758Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:28:54.321802Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 128 -> 240 2025-05-29T15:28:54.321809Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:28:54.321838Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-05-29T15:28:54.321849Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:402: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-05-29T15:28:54.322156Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:28:54.322162Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-29T15:28:54.322194Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29 ... node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:451: TTxOperationPropose Complete, txId: 105, response: Status: StatusSuccess TxId: 105 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2025-05-29T15:28:55.481675Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 105, database: /MyRoot, subject: , status: StatusSuccess, operation: MODIFY ACL, path: /MyRoot/Dir1/DirSub1, set owner:user2 2025-05-29T15:28:55.481697Z node 5 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:28:55.481702Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 105, path id: [OwnerId: 72057594046678944, LocalPathId: 3] 2025-05-29T15:28:55.481716Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 105, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2025-05-29T15:28:55.481729Z node 5 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:28:55.481733Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [5:208:2209], at schemeshard: 72057594046678944, txId: 105, path id: 3 2025-05-29T15:28:55.481737Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [5:208:2209], at schemeshard: 72057594046678944, txId: 105, path id: 2 2025-05-29T15:28:55.481810Z node 5 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:5834: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 5 PathOwnerId: 72057594046678944, cookie: 105 2025-05-29T15:28:55.481818Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 5 PathOwnerId: 72057594046678944, cookie: 105 2025-05-29T15:28:55.481822Z node 5 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 105 2025-05-29T15:28:55.481825Z node 5 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 105, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 5 2025-05-29T15:28:55.481829Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 1 2025-05-29T15:28:55.481880Z node 5 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:5834: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 7 PathOwnerId: 72057594046678944, cookie: 105 2025-05-29T15:28:55.481887Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 7 PathOwnerId: 72057594046678944, cookie: 105 2025-05-29T15:28:55.481890Z node 5 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 105 2025-05-29T15:28:55.481896Z node 5 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 105, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 7 2025-05-29T15:28:55.481899Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2025-05-29T15:28:55.481906Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 105, subscribers: 0 2025-05-29T15:28:55.482307Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 105 2025-05-29T15:28:55.482358Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 105 TestModificationResult got TxId: 105, wait until txId: 105 TestModificationResults wait txId: 106 2025-05-29T15:28:55.483059Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:372: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpAlterLogin AlterLogin { RemoveUser { User: "user1" } } } TxId: 106 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-05-29T15:28:55.483148Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:130: IgniteOperation, opId: 106:1, propose status:StatusSuccess, reason: , at schemeshard: 72057594046678944 2025-05-29T15:28:55.483161Z node 5 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:906: Part operation is done id#106:0 progress is 1/1 2025-05-29T15:28:55.483165Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 106 ready parts: 1/1 2025-05-29T15:28:55.483169Z node 5 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:906: Part operation is done id#106:0 progress is 1/1 2025-05-29T15:28:55.483172Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 106 ready parts: 1/1 2025-05-29T15:28:55.483179Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-05-29T15:28:55.483186Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1606: TOperation IsReadyToNotify, TxId: 106, ready parts: 1/1, is published: false 2025-05-29T15:28:55.483190Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 106 ready parts: 1/1 2025-05-29T15:28:55.483194Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:973: Operation and all the parts is done, operation id: 106:0 2025-05-29T15:28:55.483198Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:982: Publication still in progress, tx: 106, publications: 1, subscribers: 0 2025-05-29T15:28:55.483201Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:989: Publication details: tx: 106, [OwnerId: 72057594046678944, LocalPathId: 1], 10 2025-05-29T15:28:55.483519Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:451: TTxOperationPropose Complete, txId: 106, response: Status: StatusSuccess TxId: 106 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2025-05-29T15:28:55.483535Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 106, database: /MyRoot, subject: , status: StatusSuccess, operation: REMOVE USER, path: /MyRoot 2025-05-29T15:28:55.483558Z node 5 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:28:55.483565Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 106, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-29T15:28:55.483587Z node 5 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:28:55.483591Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [5:208:2209], at schemeshard: 72057594046678944, txId: 106, path id: 1 2025-05-29T15:28:55.483677Z node 5 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:5834: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 10 PathOwnerId: 72057594046678944, cookie: 106 2025-05-29T15:28:55.483686Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 10 PathOwnerId: 72057594046678944, cookie: 106 2025-05-29T15:28:55.483689Z node 5 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 106 2025-05-29T15:28:55.483693Z node 5 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 106, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 10 2025-05-29T15:28:55.483697Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-05-29T15:28:55.483725Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 106, subscribers: 0 2025-05-29T15:28:55.483972Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 106 TestModificationResult got TxId: 106, wait until txId: 106 2025-05-29T15:28:55.484043Z node 5 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Dir1/DirSub1" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-05-29T15:28:55.484063Z node 5 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/Dir1/DirSub1" took 27us result status StatusSuccess 2025-05-29T15:28:55.484126Z node 5 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Dir1/DirSub1" PathDescription { Self { Name: "DirSub1" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 103 CreateStep: 5000002 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "user2" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 2 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-05-29T15:28:55.484179Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__login.cpp:43: TTxLogin Execute at schemeshard: 72057594046678944 2025-05-29T15:28:55.484194Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__login.cpp:85: TTxLogin Complete, result: Error: "Cannot find user: user1", at schemeshard: 72057594046678944 >> TConsoleTests::TestAlterTenantTooManyStorageResourcesForRunningExtSubdomain [GOOD] >> BackupRestoreS3::TestAllSchemeObjectTypes-EPathTypeTable >> TSchemeShardLoginTest::RemoveGroup_Owner-StrictAclCheck-true [GOOD] >> TWebLoginService::AuditLogLoginSuccess [GOOD] >> TWebLoginService::AuditLogLoginBadPassword >> TSchemeshardBackgroundCleaningTest::SchemeshardBackgroundCleaningTestCreateCleanManyTables [FAIL] >> TSchemeshardBackgroundCleaningTest::CreateTableInTemp ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/effects/unittest >> KqpImmediateEffects::WriteThenReadWithCommit Test command err: Trying to start YDB, gRPC: 11401, MsgBus: 3654 2025-05-29T15:28:46.356528Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509890019165567355:2185];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:28:46.356618Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/001e8d/r3tmp/tmpywbXeD/pdisk_1.dat 2025-05-29T15:28:46.427765Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 11401, node 1 2025-05-29T15:28:46.438468Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:28:46.438478Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-05-29T15:28:46.438479Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-05-29T15:28:46.438514Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:3654 2025-05-29T15:28:46.456861Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:28:46.456890Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:28:46.457953Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:3654 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-29T15:28:46.502416Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:28:46.511482Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:28:46.575855Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:28:46.595140Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:28:46.608786Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:28:46.727228Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890019165568828:2403], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:28:46.727247Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:28:46.773857Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-05-29T15:28:46.781766Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-05-29T15:28:46.843092Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-05-29T15:28:46.864618Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-05-29T15:28:46.888541Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-05-29T15:28:46.911072Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-05-29T15:28:46.927720Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-05-29T15:28:46.945874Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890019165569484:2466], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:28:46.945904Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:28:46.945953Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890019165569489:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:28:46.946769Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715669:3, at schemeshard: 72057594046644480 2025-05-29T15:28:46.952015Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7509890019165569491:2470], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715669 completed, doublechecking } 2025-05-29T15:28:47.008758Z node 1 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [1:7509890023460536838:3395] txid# 281474976715670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-29T15:28:47.137536Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [1:7509890023460536847:2474], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:28:47.137664Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=1&id=NGI0MTI5YTQtOWYwMjJhYjgtM2I1ZGVmYzAtYTM1YmViZGY=, ActorId: [1:7509890019165568810:2401], ActorState: ExecuteState, TraceId: 01jweahzv1f8yymttsxpns9967, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: VERIFY failed (2025-05-29T15:28:47.139483Z): assertion failed in non-unittest thread with message: assertion failed at ydb/core/kqp/ut/common/kqp_ut_common.h:375, void NKikimr::NKqp::AssertSuccessResult(const NYdb::TStatus &): (result.IsSuccess())
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 library/cpp/testing/unittest/registar.cpp:36 RaiseError(): requirement UnittestThread failed 0. /-S/util/system/yassert.cpp:83: InternalPanicImpl @ 0x15F23B95 1. /-S/util/system/yassert.cpp:55: Panic @ 0x15F1AB96 2. /tmp//-S/library/cpp/testing/unittest/registar.cpp:36: RaiseError @ 0x160BC066 3. /-S/ydb/core/kqp/ut/common/kqp_ut_common.h:375: AssertSuccessResult @ 0x15D88C82 4. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:365: CreateSampleTables @ 0x2855B9A2 5. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:581: operator() @ 0x2857CF4C 6. /-S/library/cpp/threading/future/core/future-inl.h:493: SetValue @ 0x2857CF4C 7. /-S/library/cpp/threading/future/async.h:24: operator() @ 0x2857CF4C 8. /-S/util/thread/pool.h:71: Process @ 0x2857CF4C 9. /-S/util/thread/pool.cpp:418: DoExecute @ 0x15F2B519 10. /-S/util/thread/factory.h:15: Execute @ 0x15F29F09 11. /-S/util/thread/factory.cpp:36: ThreadProc @ 0x15F29F09 12. /-S/util/system/thread.cpp:244: ThreadProxy @ 0x15F2537C 13. ??:0: ?? @ 0x7F85418A9AC2 14. ??:0: ?? @ 0x7F854193B84F Trying to start YDB, gRPC: 10354, MsgBus: 63096 2025-05-29T15:28:50.938237Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509890032896743024:2068];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:28:50.938257Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/001e8d/r3tmp/tmpuR8sfM/pdisk_1.dat 2025-05-29T15:28:51.003121Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 10354, node 1 2025-05-29T15:28:51.026346Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:28:51.026358Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-05-29T15:28:51.026360Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-05-29T15:28:51.026402Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-05-29T15:28:51.040944Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:28:51.040982Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:28:51.042072Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:63096 TClient is connected to server localhost:63096 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-29T15:28:51.107974Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:28:51.110307Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-05-29T15:28:51.120638Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:28:51.145848Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-05-29T15:28:51.172844Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 2025-05-29T15:28:51.192146Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:28:51.351458Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890037191711915:2403], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:28:51.351491Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:28:51.405642Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-05-29T15:28:51.417035Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-05-29T15:28:51.425490Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-05-29T15:28:51.440280Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-05-29T15:28:51.456787Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-05-29T15:28:51.468938Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-05-29T15:28:51.529667Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-05-29T15:28:51.553564Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890037191712570:2466], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:28:51.553589Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:28:51.553756Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890037191712575:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:28:51.554695Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715669:3, at schemeshard: 72057594046644480 2025-05-29T15:28:51.557857Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7509890037191712577:2470], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715669 completed, doublechecking } 2025-05-29T15:28:51.629844Z node 1 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [1:7509890037191712628:3396] txid# 281474976715670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-29T15:28:51.730017Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [1:7509890037191712644:2474], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:28:51.730825Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=1&id=ODAwYTYzMzgtMzI2ZTMyZGUtNWY4ZWE1MWItOTQyNTU4MWY=, ActorId: [1:7509890037191711888:2400], ActorState: ExecuteState, TraceId: 01jweaj4b0f5da75xg1m8g0tvm, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: VERIFY failed (2025-05-29T15:28:51.737448Z): assertion failed in non-unittest thread with message: assertion failed at ydb/core/kqp/ut/common/kqp_ut_common.h:375, void NKikimr::NKqp::AssertSuccessResult(const NYdb::TStatus &): (result.IsSuccess())
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 library/cpp/testing/unittest/registar.cpp:36 RaiseError(): requirement UnittestThread failed 0. /-S/util/system/yassert.cpp:83: InternalPanicImpl @ 0x15F23B95 1. /-S/util/system/yassert.cpp:55: Panic @ 0x15F1AB96 2. /tmp//-S/library/cpp/testing/unittest/registar.cpp:36: RaiseError @ 0x160BC066 3. /-S/ydb/core/kqp/ut/common/kqp_ut_common.h:375: AssertSuccessResult @ 0x15D88C82 4. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:365: CreateSampleTables @ 0x2855B9A2 5. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:581: operator() @ 0x2857CF4C 6. /-S/library/cpp/threading/future/core/future-inl.h:493: SetValue @ 0x2857CF4C 7. /-S/library/cpp/threading/future/async.h:24: operator() @ 0x2857CF4C 8. /-S/util/thread/pool.h:71: Process @ 0x2857CF4C 9. /-S/util/thread/pool.cpp:418: DoExecute @ 0x15F2B519 10. /-S/util/thread/factory.h:15: Execute @ 0x15F29F09 11. /-S/util/thread/factory.cpp:36: ThreadProc @ 0x15F29F09 12. /-S/util/system/thread.cpp:244: ThreadProxy @ 0x15F2537C 13. ??:0: ?? @ 0x7F681C40AAC2 14. ??:0: ?? @ 0x7F681C49C84F ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_login/unittest >> TSchemeShardLoginTest::RemoveGroup_Acl-StrictAclCheck-true [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:116:2058] recipient: [1:111:2141] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:116:2058] recipient: [1:111:2141] Leader for TabletID 72057594046678944 is [1:128:2152] sender: [1:132:2058] recipient: [1:111:2141] 2025-05-29T15:28:54.824496Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7488: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-05-29T15:28:54.824525Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7516: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:28:54.824531Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7402: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-05-29T15:28:54.824537Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7418: OperationsProcessing config: using default configuration 2025-05-29T15:28:54.824543Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-05-29T15:28:54.824548Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-05-29T15:28:54.824558Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7548: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:28:54.824572Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:39: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-05-29T15:28:54.824683Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7619: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-05-29T15:28:54.824754Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-05-29T15:28:54.839879Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7309: Cannot subscribe to console configs 2025-05-29T15:28:54.839905Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:28:54.842563Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-05-29T15:28:54.842682Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-05-29T15:28:54.842725Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-05-29T15:28:54.844220Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-05-29T15:28:54.844359Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:445: Clear TempDirsState with owners number: 0 2025-05-29T15:28:54.844479Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1348: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-05-29T15:28:54.844536Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:32: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-05-29T15:28:54.844987Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:157: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:28:54.845023Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:84: [RootDataErasureManager] Stop 2025-05-29T15:28:54.845301Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:28:54.845311Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:28:54.845330Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-05-29T15:28:54.845340Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-29T15:28:54.845346Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:202: TTxServerlessStorageBilling.Complete 2025-05-29T15:28:54.845384Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6671: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-05-29T15:28:54.846770Z node 1 :HIVE INFO: tablet_helpers.cpp:1130: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:128:2152] sender: [1:242:2058] recipient: [1:15:2062] 2025-05-29T15:28:54.870295Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:372: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-05-29T15:28:54.870365Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:28:54.870428Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-05-29T15:28:54.870479Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:130: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-05-29T15:28:54.870491Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:28:54.871270Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:451: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-05-29T15:28:54.871300Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-05-29T15:28:54.871359Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:28:54.871369Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:313: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-05-29T15:28:54.871376Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:367: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-05-29T15:28:54.871382Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 2 -> 3 2025-05-29T15:28:54.871782Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:28:54.871793Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-05-29T15:28:54.871800Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 3 -> 128 2025-05-29T15:28:54.872119Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:28:54.872129Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:28:54.872136Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:28:54.872144Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1650: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-05-29T15:28:54.872917Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1719: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-05-29T15:28:54.873309Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:652: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-05-29T15:28:54.873347Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1751: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-05-29T15:28:54.873546Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:676: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-05-29T15:28:54.873572Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:680: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 124 RawX2: 4294969445 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-05-29T15:28:54.873579Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:28:54.873640Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 128 -> 240 2025-05-29T15:28:54.873648Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:28:54.873685Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-05-29T15:28:54.873697Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:402: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-05-29T15:28:54.874114Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:28:54.874124Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-29T15:28:54.874169Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29 ... 7: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 2025-05-29T15:28:55.874318Z node 5 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:5834: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 8 PathOwnerId: 72057594046678944, cookie: 105 2025-05-29T15:28:55.874324Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 8 PathOwnerId: 72057594046678944, cookie: 105 2025-05-29T15:28:55.874327Z node 5 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 105 2025-05-29T15:28:55.874329Z node 5 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 105, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 8 2025-05-29T15:28:55.874332Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-05-29T15:28:55.874338Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 105, subscribers: 0 2025-05-29T15:28:55.874937Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 105 2025-05-29T15:28:55.875011Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 105 TestModificationResult got TxId: 105, wait until txId: 105 2025-05-29T15:28:55.875104Z node 5 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Dir1" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-05-29T15:28:55.875131Z node 5 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/Dir1" took 34us result status StatusSuccess 2025-05-29T15:28:55.875204Z node 5 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Dir1" PathDescription { Self { Name: "Dir1" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 102 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 2 EffectiveACLVersion: 2 UserAttrsVersion: 1 ChildrenVersion: 2 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 TestModificationResults wait txId: 106 2025-05-29T15:28:55.875790Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:372: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpAlterLogin AlterLogin { RemoveGroup { Group: "group1" } } } TxId: 106 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-05-29T15:28:55.875828Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5218: ExamineTreeVFS visit path id [OwnerId: 72057594046678944, LocalPathId: 1] name: MyRoot type: EPathTypeDir state: EPathStateNoChanges stepDropped: 0 droppedTxId: 0 parent: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-29T15:28:55.875833Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5234: ExamineTreeVFS run path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-29T15:28:55.875839Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5218: ExamineTreeVFS visit path id [OwnerId: 72057594046678944, LocalPathId: 2] name: Dir1 type: EPathTypeDir state: EPathStateNoChanges stepDropped: 0 droppedTxId: 0 parent: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-29T15:28:55.875842Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5234: ExamineTreeVFS run path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2025-05-29T15:28:55.875880Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:130: IgniteOperation, opId: 106:1, propose status:StatusSuccess, reason: , at schemeshard: 72057594046678944 2025-05-29T15:28:55.875891Z node 5 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:906: Part operation is done id#106:0 progress is 1/1 2025-05-29T15:28:55.875895Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 106 ready parts: 1/1 2025-05-29T15:28:55.875899Z node 5 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:906: Part operation is done id#106:0 progress is 1/1 2025-05-29T15:28:55.875904Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 106 ready parts: 1/1 2025-05-29T15:28:55.875910Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-05-29T15:28:55.875917Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1606: TOperation IsReadyToNotify, TxId: 106, ready parts: 1/1, is published: false 2025-05-29T15:28:55.875920Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 106 ready parts: 1/1 2025-05-29T15:28:55.875924Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:973: Operation and all the parts is done, operation id: 106:0 2025-05-29T15:28:55.875927Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:982: Publication still in progress, tx: 106, publications: 1, subscribers: 0 2025-05-29T15:28:55.875929Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:989: Publication details: tx: 106, [OwnerId: 72057594046678944, LocalPathId: 1], 9 2025-05-29T15:28:55.876422Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:451: TTxOperationPropose Complete, txId: 106, response: Status: StatusSuccess TxId: 106 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2025-05-29T15:28:55.876449Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 106, database: /MyRoot, subject: , status: StatusSuccess, operation: REMOVE GROUP, path: /MyRoot 2025-05-29T15:28:55.876482Z node 5 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:28:55.876487Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 106, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-29T15:28:55.876511Z node 5 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:28:55.876515Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [5:208:2209], at schemeshard: 72057594046678944, txId: 106, path id: 1 2025-05-29T15:28:55.876594Z node 5 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:5834: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 9 PathOwnerId: 72057594046678944, cookie: 106 2025-05-29T15:28:55.876604Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 9 PathOwnerId: 72057594046678944, cookie: 106 2025-05-29T15:28:55.876609Z node 5 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 106 2025-05-29T15:28:55.876613Z node 5 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 106, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 9 2025-05-29T15:28:55.876618Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-05-29T15:28:55.876636Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 106, subscribers: 0 2025-05-29T15:28:55.877040Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 106 TestModificationResult got TxId: 106, wait until txId: 106 2025-05-29T15:28:55.877136Z node 5 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-05-29T15:28:55.877161Z node 5 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot" took 32us result status StatusSuccess 2025-05-29T15:28:55.877241Z node 5 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot" PathDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 9 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 9 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 5 SubDomainVersion: 1 SecurityStateVersion: 2 } ChildrenExist: true } Children { Name: "Dir1" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 102 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 1 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/MyRoot" } } } PathId: 1 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> TWebLoginService::AuditLogLoginBadPassword [GOOD] >> TWebLoginService::AuditLogLdapLoginSuccess >> YdbSdkSessionsPool::StressTestAsync/0 [GOOD] >> YdbSdkSessionsPool::StressTestAsync/1 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_login/unittest >> TSchemeShardLoginTest::RemoveGroup_Owner-StrictAclCheck-true [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:116:2058] recipient: [1:111:2141] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:116:2058] recipient: [1:111:2141] Leader for TabletID 72057594046678944 is [1:128:2152] sender: [1:132:2058] recipient: [1:111:2141] 2025-05-29T15:28:54.802568Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7488: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-05-29T15:28:54.802595Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7516: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:28:54.802602Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7402: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-05-29T15:28:54.802607Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7418: OperationsProcessing config: using default configuration 2025-05-29T15:28:54.802613Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-05-29T15:28:54.802617Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-05-29T15:28:54.802627Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7548: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:28:54.802641Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:39: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-05-29T15:28:54.802786Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7619: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-05-29T15:28:54.802870Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-05-29T15:28:54.815775Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7309: Cannot subscribe to console configs 2025-05-29T15:28:54.815803Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:28:54.818945Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-05-29T15:28:54.819118Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-05-29T15:28:54.819167Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-05-29T15:28:54.821168Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-05-29T15:28:54.821315Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:445: Clear TempDirsState with owners number: 0 2025-05-29T15:28:54.821486Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1348: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-05-29T15:28:54.821561Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:32: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-05-29T15:28:54.822121Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:157: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:28:54.822165Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:84: [RootDataErasureManager] Stop 2025-05-29T15:28:54.822457Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:28:54.822466Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:28:54.822505Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-05-29T15:28:54.822514Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-29T15:28:54.822520Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:202: TTxServerlessStorageBilling.Complete 2025-05-29T15:28:54.822566Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6671: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-05-29T15:28:54.823864Z node 1 :HIVE INFO: tablet_helpers.cpp:1130: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:128:2152] sender: [1:242:2058] recipient: [1:15:2062] 2025-05-29T15:28:54.845998Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:372: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-05-29T15:28:54.846106Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:28:54.846186Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-05-29T15:28:54.846249Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:130: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-05-29T15:28:54.846261Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:28:54.847167Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:451: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-05-29T15:28:54.847197Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-05-29T15:28:54.847259Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:28:54.847270Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:313: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-05-29T15:28:54.847277Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:367: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-05-29T15:28:54.847283Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 2 -> 3 2025-05-29T15:28:54.847719Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:28:54.847730Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-05-29T15:28:54.847735Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 3 -> 128 2025-05-29T15:28:54.848035Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:28:54.848043Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:28:54.848049Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:28:54.848056Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1650: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-05-29T15:28:54.848819Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1719: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-05-29T15:28:54.849226Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:652: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-05-29T15:28:54.849268Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1751: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-05-29T15:28:54.849465Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:676: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-05-29T15:28:54.849489Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:680: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 124 RawX2: 4294969445 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-05-29T15:28:54.849500Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:28:54.849568Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 128 -> 240 2025-05-29T15:28:54.849575Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:28:54.849622Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-05-29T15:28:54.849637Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:402: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-05-29T15:28:54.850126Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:28:54.850136Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-29T15:28:54.850188Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29 ... 7: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 2025-05-29T15:28:56.078003Z node 5 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:5834: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 8 PathOwnerId: 72057594046678944, cookie: 105 2025-05-29T15:28:56.078012Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 8 PathOwnerId: 72057594046678944, cookie: 105 2025-05-29T15:28:56.078015Z node 5 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 105 2025-05-29T15:28:56.078019Z node 5 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 105, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 8 2025-05-29T15:28:56.078023Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-05-29T15:28:56.078029Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 105, subscribers: 0 2025-05-29T15:28:56.078559Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 105 2025-05-29T15:28:56.078616Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 105 TestModificationResult got TxId: 105, wait until txId: 105 2025-05-29T15:28:56.078684Z node 5 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Dir1" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-05-29T15:28:56.078707Z node 5 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/Dir1" took 28us result status StatusSuccess 2025-05-29T15:28:56.078780Z node 5 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Dir1" PathDescription { Self { Name: "Dir1" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 102 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 TestModificationResults wait txId: 106 2025-05-29T15:28:56.079315Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:372: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpAlterLogin AlterLogin { RemoveGroup { Group: "group1" } } } TxId: 106 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-05-29T15:28:56.079351Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5218: ExamineTreeVFS visit path id [OwnerId: 72057594046678944, LocalPathId: 1] name: MyRoot type: EPathTypeDir state: EPathStateNoChanges stepDropped: 0 droppedTxId: 0 parent: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-29T15:28:56.079356Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5234: ExamineTreeVFS run path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-29T15:28:56.079362Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5218: ExamineTreeVFS visit path id [OwnerId: 72057594046678944, LocalPathId: 2] name: Dir1 type: EPathTypeDir state: EPathStateNoChanges stepDropped: 0 droppedTxId: 0 parent: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-29T15:28:56.079366Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5234: ExamineTreeVFS run path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2025-05-29T15:28:56.079404Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:130: IgniteOperation, opId: 106:1, propose status:StatusSuccess, reason: , at schemeshard: 72057594046678944 2025-05-29T15:28:56.079420Z node 5 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:906: Part operation is done id#106:0 progress is 1/1 2025-05-29T15:28:56.079424Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 106 ready parts: 1/1 2025-05-29T15:28:56.079429Z node 5 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:906: Part operation is done id#106:0 progress is 1/1 2025-05-29T15:28:56.079433Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 106 ready parts: 1/1 2025-05-29T15:28:56.079442Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-05-29T15:28:56.079450Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1606: TOperation IsReadyToNotify, TxId: 106, ready parts: 1/1, is published: false 2025-05-29T15:28:56.079455Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 106 ready parts: 1/1 2025-05-29T15:28:56.079460Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:973: Operation and all the parts is done, operation id: 106:0 2025-05-29T15:28:56.079465Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:982: Publication still in progress, tx: 106, publications: 1, subscribers: 0 2025-05-29T15:28:56.079473Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:989: Publication details: tx: 106, [OwnerId: 72057594046678944, LocalPathId: 1], 9 2025-05-29T15:28:56.079905Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:451: TTxOperationPropose Complete, txId: 106, response: Status: StatusSuccess TxId: 106 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2025-05-29T15:28:56.079924Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 106, database: /MyRoot, subject: , status: StatusSuccess, operation: REMOVE GROUP, path: /MyRoot 2025-05-29T15:28:56.079954Z node 5 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:28:56.079960Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 106, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-29T15:28:56.079983Z node 5 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:28:56.079988Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [5:208:2209], at schemeshard: 72057594046678944, txId: 106, path id: 1 2025-05-29T15:28:56.080068Z node 5 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:5834: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 9 PathOwnerId: 72057594046678944, cookie: 106 2025-05-29T15:28:56.080078Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 9 PathOwnerId: 72057594046678944, cookie: 106 2025-05-29T15:28:56.080083Z node 5 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 106 2025-05-29T15:28:56.080087Z node 5 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 106, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 9 2025-05-29T15:28:56.080092Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-05-29T15:28:56.080108Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 106, subscribers: 0 2025-05-29T15:28:56.080483Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 106 TestModificationResult got TxId: 106, wait until txId: 106 2025-05-29T15:28:56.080567Z node 5 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-05-29T15:28:56.080590Z node 5 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot" took 29us result status StatusSuccess 2025-05-29T15:28:56.080642Z node 5 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot" PathDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 9 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 9 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 5 SubDomainVersion: 1 SecurityStateVersion: 2 } ChildrenExist: true } Children { Name: "Dir1" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 102 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 1 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/MyRoot" } } } PathId: 1 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> BackupRestoreS3::TestAllSchemeObjectTypes-EPathTypeInvalid [GOOD] >> BackupRestoreS3::TestAllSchemeObjectTypes-EPathTypeDir [GOOD] >> BackupRestoreS3::TestAllSchemeObjectTypes-EPathTypePersQueueGroup [GOOD] >> BackupRestoreS3::TestAllSchemeObjectTypes-EPathTypeBlockStoreVolume [GOOD] >> BackupRestoreS3::TestAllSchemeObjectTypes-EPathTypeKesus [GOOD] >> BackupRestoreS3::TestAllSchemeObjectTypes-EPathTypeExtSubDomain [GOOD] >> BackupRestoreS3::TestAllSchemeObjectTypes-EPathTypeFileStore [GOOD] >> BackupRestoreS3::TestAllSchemeObjectTypes-EPathTypeColumnStore [GOOD] >> BackupRestoreS3::TestAllSchemeObjectTypes-EPathTypeColumnTable [GOOD] >> BackupRestoreS3::TestAllSchemeObjectTypes-EPathTypeCdcStream >> TWebLoginService::AuditLogLdapLoginSuccess [GOOD] >> TWebLoginService::AuditLogLogout >> THiveTest::PipeAlivenessOfDeadTablet [GOOD] >> THiveTest::TestBootProgress ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_replication_reboots/unittest >> TReplicationWithRebootsTests::CreateDropRecreate [GOOD] Test command err: ==== RunWithTabletReboots =========== RUN: Trace =========== Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2140] Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:116:2058] recipient: [1:110:2141] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:116:2058] recipient: [1:110:2141] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:117:2058] recipient: [1:111:2142] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:117:2058] recipient: [1:111:2142] Leader for TabletID 72057594046678944 is [1:126:2151] sender: [1:127:2058] recipient: [1:109:2140] Leader for TabletID 72057594046447617 is [1:130:2154] sender: [1:132:2058] recipient: [1:110:2141] Leader for TabletID 72057594046316545 is [1:133:2155] sender: [1:135:2058] recipient: [1:111:2142] 2025-05-29T15:28:20.759317Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7488: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-05-29T15:28:20.759337Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7516: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:28:20.759340Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7402: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2025-05-29T15:28:20.759345Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7418: OperationsProcessing config: using default configuration 2025-05-29T15:28:20.759356Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-05-29T15:28:20.759359Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-05-29T15:28:20.759365Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7548: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:28:20.759374Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:39: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-05-29T15:28:20.759450Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7619: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-05-29T15:28:20.759511Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-05-29T15:28:20.771616Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7651: Got new config: QueryServiceConfig { AllExternalDataSourcesAreAvailable: true } 2025-05-29T15:28:20.771637Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:28:20.771733Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7619: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# Leader for TabletID 72057594046447617 is [1:130:2154] sender: [1:175:2058] recipient: [1:15:2062] 2025-05-29T15:28:20.774117Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-05-29T15:28:20.774150Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-05-29T15:28:20.774180Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-05-29T15:28:20.777035Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-05-29T15:28:20.777116Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:445: Clear TempDirsState with owners number: 0 2025-05-29T15:28:20.777234Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1348: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-05-29T15:28:20.777442Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:32: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-05-29T15:28:20.778081Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:157: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:28:20.778133Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:84: [RootDataErasureManager] Stop 2025-05-29T15:28:20.778388Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:28:20.778400Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:28:20.778437Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-05-29T15:28:20.778445Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-29T15:28:20.778451Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:202: TTxServerlessStorageBilling.Complete 2025-05-29T15:28:20.778469Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6671: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:214:2058] recipient: [1:212:2212] IGNORE Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:214:2058] recipient: [1:212:2212] Leader for TabletID 72057594037968897 is [1:218:2216] sender: [1:219:2058] recipient: [1:212:2212] 2025-05-29T15:28:20.779899Z node 1 :HIVE INFO: tablet_helpers.cpp:1130: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:126:2151] sender: [1:239:2058] recipient: [1:15:2062] 2025-05-29T15:28:20.798340Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:372: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-05-29T15:28:20.798428Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:28:20.798485Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-05-29T15:28:20.798527Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:130: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-05-29T15:28:20.798534Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:28:20.799403Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:451: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-05-29T15:28:20.799434Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-05-29T15:28:20.799509Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:28:20.799519Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:313: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-05-29T15:28:20.799525Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:367: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-05-29T15:28:20.799530Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 2 -> 3 2025-05-29T15:28:20.800034Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:28:20.800050Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-05-29T15:28:20.800055Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 3 -> 128 2025-05-29T15:28:20.800426Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:28:20.800438Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:28:20.800445Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:28:20.800452Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1650: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-05-29T15:28:20.801075Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1719: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-05-29T15:28:20.801474Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:652: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-05-29T15:28:20.801518Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1751: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 Leader for TabletID 72057594046316545 is [1:133:2155] sender: [1:254:2058] recipient: [1:15:2062] FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-05-29T15:28:20.801730Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:676: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-05-29T15:28:20.801754Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:680: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 133 RawX2: 4294969451 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-05-29T15:28:20.801761Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:28:20.801826Z node 1 :FLAT_TX_SCHEMESHARD INFO: sch ... 29T15:28:55.725015Z node 130 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:165: TSideEffects ApplyOnExecute at tablet# 72057594046678944 2025-05-29T15:28:55.725020Z node 130 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:906: Part operation is done id#1004:0 progress is 1/1 2025-05-29T15:28:55.725024Z node 130 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 1004 ready parts: 1/1 2025-05-29T15:28:55.725029Z node 130 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:906: Part operation is done id#1004:0 progress is 1/1 2025-05-29T15:28:55.725033Z node 130 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 1004 ready parts: 1/1 2025-05-29T15:28:55.725038Z node 130 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1606: TOperation IsReadyToNotify, TxId: 1004, ready parts: 1/1, is published: false 2025-05-29T15:28:55.725043Z node 130 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 1004 ready parts: 1/1 2025-05-29T15:28:55.725048Z node 130 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:973: Operation and all the parts is done, operation id: 1004:0 2025-05-29T15:28:55.725053Z node 130 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5174: RemoveTx for txid 1004:0 2025-05-29T15:28:55.725084Z node 130 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 4 2025-05-29T15:28:55.725091Z node 130 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:982: Publication still in progress, tx: 1004, publications: 2, subscribers: 0 2025-05-29T15:28:55.725098Z node 130 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:989: Publication details: tx: 1004, [OwnerId: 72057594046678944, LocalPathId: 1], 11 2025-05-29T15:28:55.725102Z node 130 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:989: Publication details: tx: 1004, [OwnerId: 72057594046678944, LocalPathId: 4], 2 2025-05-29T15:28:55.725264Z node 130 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4877: StateWork, received event# 274137603, Sender [130:206:2207], Recipient [130:126:2151]: NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 1] Version: 11 } 2025-05-29T15:28:55.725271Z node 130 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4961: StateWork, processing event NSchemeBoard::NSchemeshardEvents::TEvUpdateAck 2025-05-29T15:28:55.725284Z node 130 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:5834: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 11 PathOwnerId: 72057594046678944, cookie: 1004 2025-05-29T15:28:55.725294Z node 130 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 11 PathOwnerId: 72057594046678944, cookie: 1004 2025-05-29T15:28:55.725299Z node 130 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 1004 2025-05-29T15:28:55.725303Z node 130 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 1004, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 11 2025-05-29T15:28:55.725308Z node 130 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2025-05-29T15:28:55.725321Z node 130 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:165: TSideEffects ApplyOnExecute at tablet# 72057594046678944 2025-05-29T15:28:55.725515Z node 130 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4877: StateWork, received event# 274137603, Sender [130:206:2207], Recipient [130:126:2151]: NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 4] Version: 2 } 2025-05-29T15:28:55.725523Z node 130 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4961: StateWork, processing event NSchemeBoard::NSchemeshardEvents::TEvUpdateAck 2025-05-29T15:28:55.725533Z node 130 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:5834: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 2 PathOwnerId: 72057594046678944, cookie: 1004 2025-05-29T15:28:55.725544Z node 130 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 2 PathOwnerId: 72057594046678944, cookie: 1004 2025-05-29T15:28:55.725548Z node 130 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1004 2025-05-29T15:28:55.725553Z node 130 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 1004, pathId: [OwnerId: 72057594046678944, LocalPathId: 4], version: 2 2025-05-29T15:28:55.725557Z node 130 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 3 2025-05-29T15:28:55.725569Z node 130 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 1004, subscribers: 0 2025-05-29T15:28:55.725577Z node 130 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:165: TSideEffects ApplyOnExecute at tablet# 72057594046678944 2025-05-29T15:28:55.725956Z node 130 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:207: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2025-05-29T15:28:55.726292Z node 130 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1004 2025-05-29T15:28:55.726301Z node 130 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:207: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2025-05-29T15:28:55.726317Z node 130 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1004 2025-05-29T15:28:55.726321Z node 130 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:207: TSideEffects ApplyOnComplete at tablet# 72057594046678944 TestWaitNotification wait txId: 1004 2025-05-29T15:28:55.726382Z node 130 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:210: tests -- TTxNotificationSubscriber for txId 1004: send EvNotifyTxCompletion 2025-05-29T15:28:55.726390Z node 130 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:256: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 1004 2025-05-29T15:28:55.726453Z node 130 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4877: StateWork, received event# 269877761, Sender [130:569:2506], Recipient [130:126:2151]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-05-29T15:28:55.726459Z node 130 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4974: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-05-29T15:28:55.726464Z node 130 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5753: Pipe server connected, at tablet: 72057594046678944 2025-05-29T15:28:55.726488Z node 130 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4877: StateWork, received event# 271124996, Sender [130:420:2374], Recipient [130:126:2151]: NKikimrScheme.TEvNotifyTxCompletion TxId: 1004 2025-05-29T15:28:55.726494Z node 130 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4890: StateWork, processing event TEvSchemeShard::TEvNotifyTxCompletion 2025-05-29T15:28:55.726505Z node 130 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 1004, at schemeshard: 72057594046678944 2025-05-29T15:28:55.726524Z node 130 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:227: tests -- TTxNotificationSubscriber for txId 1004: got EvNotifyTxCompletionResult 2025-05-29T15:28:55.726528Z node 130 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:236: tests -- TTxNotificationSubscriber for txId 1004: satisfy waiter [130:567:2504] 2025-05-29T15:28:55.726550Z node 130 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4877: StateWork, received event# 269877764, Sender [130:569:2506], Recipient [130:126:2151]: NKikimr::TEvTabletPipe::TEvServerDisconnected 2025-05-29T15:28:55.726554Z node 130 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4975: StateWork, processing event TEvTabletPipe::TEvServerDisconnected 2025-05-29T15:28:55.726558Z node 130 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5801: Server pipe is reset, at schemeshard: 72057594046678944 TestWaitNotification: OK eventTxId 1004 2025-05-29T15:28:55.726609Z node 130 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4877: StateWork, received event# 271122945, Sender [130:570:2507], Recipient [130:126:2151]: NKikimrSchemeOp.TDescribePath Path: "/MyRoot/Replication" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false } 2025-05-29T15:28:55.726613Z node 130 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4889: StateWork, processing event TEvSchemeShard::TEvDescribeScheme 2025-05-29T15:28:55.726625Z node 130 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Replication" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-05-29T15:28:55.726668Z node 130 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/Replication" took 39us result status StatusSuccess 2025-05-29T15:28:55.726772Z node 130 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Replication" PathDescription { Self { Name: "Replication" PathId: 4 SchemeshardId: 72057594046678944 PathType: EPathTypeReplication CreateFinished: true CreateTxId: 1004 CreateStep: 5000005 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 ReplicationVersion: 1 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 2 PathsLimit: 10000 ShardsInside: 1 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } ReplicationDescription { Name: "Replication" Config { SrcConnectionParams { StaticCredentials { User: "user" } } Specific { Targets { SrcPath: "/MyRoot1/Table" DstPath: "/MyRoot2/Table" } } ConsistencySettings { Row { } } } PathId { OwnerId: 72057594046678944 LocalId: 4 } Version: 1 ControllerId: 72075186233409547 State { StandBy { } } } } PathId: 4 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> BackupRestore::RestoreIndexTablePartitioningSettings [GOOD] >> BackupRestore::RestoreIndexTableReadReplicasSettings ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/persqueue_v1/ut/unittest >> TTopicYqlTest::BadRequests [FAIL] Test command err: 2025-05-29T15:28:36.179632Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509889972622825350:2277];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:28:36.179693Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-05-29T15:28:36.181469Z node 2 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7509889974218920296:2073];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:28:36.181511Z node 2 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/001ec8/r3tmp/tmp7F1V7p/pdisk_1.dat 2025-05-29T15:28:36.220342Z node 1 :PQ_READ_PROXY DEBUG: caching_service.cpp:44: Direct read cache: : Created 2025-05-29T15:28:36.231184Z node 2 :PQ_READ_PROXY DEBUG: caching_service.cpp:44: Direct read cache: : Created 2025-05-29T15:28:36.267622Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 1833, node 1 2025-05-29T15:28:36.278510Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:28:36.278541Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:28:36.279677Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/ciyv/001ec8/r3tmp/yandexOWARoU.tmp 2025-05-29T15:28:36.279687Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: /home/runner/.ya/build/build_root/ciyv/001ec8/r3tmp/yandexOWARoU.tmp 2025-05-29T15:28:36.279764Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:202: successfully initialized from file: /home/runner/.ya/build/build_root/ciyv/001ec8/r3tmp/yandexOWARoU.tmp 2025-05-29T15:28:36.279816Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-05-29T15:28:36.279858Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-29T15:28:36.285607Z INFO: TTestServer started on Port 12772 GrpcPort 1833 TClient is connected to server localhost:12772 PQClient connected to localhost:1833 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-29T15:28:36.315670Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-05-29T15:28:36.327245Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:28:36.335617Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:28:36.335643Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:28:36.336625Z node 1 :HIVE WARN: hive_impl.cpp:771: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-05-29T15:28:36.336888Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected waiting... waiting... 2025-05-29T15:28:36.506211Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7509889974218920594:2306], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:28:36.506228Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7509889974218920605:2309], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:28:36.506233Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:28:36.507379Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715657:3, at schemeshard: 72057594046644480 2025-05-29T15:28:36.510809Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7509889974218920608:2310], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715657 completed, doublechecking } 2025-05-29T15:28:36.586830Z node 2 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [2:7509889974218920636:2127] txid# 281474976715658, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 9], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-29T15:28:36.620251Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [1:7509889972622826266:2345], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-05-29T15:28:36.620603Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=1&id=ZWFjYjgyY2EtNWU3YTQ2ZDUtOTlhNTI1YjUtMjA3MGMwODc=, ActorId: [1:7509889972622826226:2338], ActorState: ExecuteState, TraceId: 01jweahnr0b6afedkya2eryjpe, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-05-29T15:28:36.620853Z node 2 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [2:7509889974218920651:2314], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-05-29T15:28:36.620904Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: cluster_tracker.cpp:167: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-05-29T15:28:36.620971Z node 2 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=2&id=OWVlYTdjNjQtOTg4NzAxYTctMjdlYmU4MjMtODA5NWQzMWU=, ActorId: [2:7509889974218920592:2305], ActorState: ExecuteState, TraceId: 01jweahnms04e1n7h23p8yeayk, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-05-29T15:28:36.621135Z node 2 :PERSQUEUE_CLUSTER_TRACKER ERROR: cluster_tracker.cpp:167: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-05-29T15:28:36.626243Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-05-29T15:28:36.686254Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-05-29T15:28:36.705116Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost", true, true, 1000), ("dc2", "dc2.logbroker.yandex.net", false, true, 1000); 2025-05-29T15:28:36.733652Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [1:7509889972622826597:2376], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:28:36.733772Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=1&id=NDlhNDBkMTYtOTc5N2IwOGUtODNlZjBkZmMtZjYyYmFmYjM=, ActorId: [1:7509889972622826594:2374], ActorState: ExecuteState, TraceId: 01jweahnvf38769t6arrf2v3py, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: assertion failed at ydb/core/testlib/test_pq_client.h:537, TMaybe NKikimr::NPersQueueTests::TFlatMsgBusPQClient::RunYqlDataQueryWithParams(TString, const NYdb::TParams &): (result.IsSuccess())
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 TBackTrace::Capture()+28 (0x13B9173C) NUnitTest::NPrivate::RaiseError( ... PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-29T15:28:51.367145Z node 25 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(25, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:28:51.367172Z node 25 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(25, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:28:51.368949Z node 25 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(25, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-29T15:28:51.375792Z node 25 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-05-29T15:28:51.375934Z node 25 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(26, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:28:51.375951Z node 25 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(26, (0,0,0,0)) VolatileState: Disconnected -> Connecting waiting... 2025-05-29T15:28:51.377079Z node 25 :HIVE WARN: hive_impl.cpp:771: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 26 Cookie 26 2025-05-29T15:28:51.377348Z node 25 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(26, (0,0,0,0)) VolatileState: Connecting -> Connected waiting... 2025-05-29T15:28:51.392437Z node 25 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-05-29T15:28:51.483195Z node 25 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976715660, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:28:51.490839Z node 25 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976715661, at schemeshard: 72057594046644480 2025-05-29T15:28:51.714050Z node 26 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [26:7509890039547037685:2310], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:28:51.714111Z node 26 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:28:51.714133Z node 26 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [26:7509890039547037712:2313], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:28:51.715558Z node 25 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976720657:3, at schemeshard: 72057594046644480 2025-05-29T15:28:51.732882Z node 26 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [26:7509890039547037714:2314], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976720657 completed, doublechecking } 2025-05-29T15:28:51.748766Z node 25 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [25:7509890040271450949:2341], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-05-29T15:28:51.749056Z node 25 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=25&id=MTZhNzcwODItMzcyZWJkNzMtZjhiZmEwMWYtZjE2NzMwMGM=, ActorId: [25:7509890040271450908:2334], ActorState: ExecuteState, TraceId: 01jweaj4gy524yq2t315nq0gnz, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-05-29T15:28:51.749276Z node 25 :PERSQUEUE_CLUSTER_TRACKER ERROR: cluster_tracker.cpp:167: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-05-29T15:28:51.753878Z node 25 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-05-29T15:28:51.779518Z node 25 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-05-29T15:28:51.813486Z node 25 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-05-29T15:28:51.833842Z node 26 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [26:7509890039547037783:2197] txid# 281474976720658, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 9], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost", true, true, 1000), ("dc2", "dc2.logbroker.yandex.net", false, true, 1000); 2025-05-29T15:28:51.874580Z node 26 :PERSQUEUE_CLUSTER_TRACKER ERROR: cluster_tracker.cpp:167: failed to list clusters: { Response { QueryIssues { message: "Execution" issue_code: 1060 severity: 2 issues { position { row: 3 column: 120 } message: "Cost Based Optimizer could not be applied to this query: couldn\'t load statistics" end_position { row: 3 column: 120 } issue_code: 8001 severity: 2 } } TxMeta { } YdbResults { columns { name: "C.name" type { optional_type { item { type_id: UTF8 } } } } columns { name: "C.balancer" type { optional_type { item { type_id: UTF8 } } } } columns { name: "C.local" type { optional_type { item { type_id: BOOL } } } } columns { name: "C.enabled" type { optional_type { item { type_id: BOOL } } } } columns { name: "C.weight" type { optional_type { item { type_id: UINT64 } } } } columns { name: "V.version" type { optional_type { item { type_id: INT64 } } } } } QueryDiagnostics: "" } YdbStatus: SUCCESS ConsumedRu: 23 } 2025-05-29T15:28:51.871763Z node 25 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [25:7509890040271451334:2376], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:28:51.872273Z node 25 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=25&id=NGVhMWQxMjQtZmY3ZWUwMWQtZTBiZGQ3MzYtZTE1NDBkNTk=, ActorId: [25:7509890040271451331:2374], ActorState: ExecuteState, TraceId: 01jweaj4mj5afsdwxsbz6ae3cq, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: assertion failed at ydb/core/testlib/test_pq_client.h:537, TMaybe NKikimr::NPersQueueTests::TFlatMsgBusPQClient::RunYqlDataQueryWithParams(TString, const NYdb::TParams &): (result.IsSuccess())
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 TBackTrace::Capture()+28 (0x13B9173C) NUnitTest::NPrivate::RaiseError(char const*, TBasicString> const&, bool)+137 (0x13D44119) NKikimr::NPersQueueTests::TFlatMsgBusPQClient::RunYqlDataQueryWithParams(TBasicString>, NYdb::Dev::TParams const&)+932 (0x139C81C4) NKikimr::NPersQueueTests::TFlatMsgBusPQClient::RunYqlDataQuery(TBasicString>)+88 (0x139F6148) NKikimr::NPersQueueTests::TFlatMsgBusPQClient::InitDCs(THashMap>, NKikimr::NPersQueueTests::TPQTestClusterInfo, THash>>, TEqualTo>>, std::__y1::allocator>>>, TBasicString> const&)+1170 (0x13995592) NKikimr::NPersQueueTests::TFlatMsgBusPQClient::FullInit(THashMap>, NKikimr::NPersQueueTests::TPQTestClusterInfo, THash>>, TEqualTo>>, std::__y1::allocator>>>, TBasicString> const&, TBasicString> const&)+327 (0x139C2D47) NPersQueue::TTestServer::StartServer(bool, TMaybe>, NMaybe::TPolicyUndefinedExcept>)+888 (0x138D5468) NPersQueue::TTestServer::TTestServer(NKikimr::Tests::TServerSettings const&, bool, TVector> const&, NActors::NLog::EPriority, TMaybe, TDelete>, NMaybe::TPolicyUndefinedExcept>)+1563 (0x138D474B) NPersQueue::TTestServer::TTestServer(bool)+134 (0x138D0986) NKikimr::NPersQueueTests::NTestSuiteTTopicYqlTest::TTestCaseBadRequests::Execute_(NUnitTest::TTestContext&)+33 (0x13A4E141) NKikimr::NPersQueueTests::NTestSuiteTTopicYqlTest::TCurrentTest::Execute()::'lambda'()::operator()() const+71 (0x13A51177) NUnitTest::TTestBase::Run(std::__y1::function, TBasicString> const&, char const*, bool)+126 (0x13D45FCE) NKikimr::NPersQueueTests::NTestSuiteTTopicYqlTest::TCurrentTest::Execute()+436 (0x13A50B34) NUnitTest::TTestFactory::Execute()+803 (0x13D46743) NUnitTest::RunMain(int, char**)+3021 (0x13D582DD) ??+0 (0x7F6D65A4CD90) __libc_start_main+128 (0x7F6D65A4CE40) _start+41 (0x129C9029) >> TWebLoginService::AuditLogLogout [GOOD] >> BackupRestoreS3::RestoreIndexTableReadReplicasSettings [GOOD] >> BackupRestoreS3::RestoreTableSplitBoundaries >> LocalTableWriter::ConsistentWrite >> THiveTest::TestBootProgress [GOOD] >> PrivateApi::Nodes [FAIL] |71.6%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/kqp/ut/scheme/ydb-core-kqp-ut-scheme |71.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/ut/scheme/ydb-core-kqp-ut-scheme |71.6%| [LD] {RESULT} $(B)/ydb/core/kqp/ut/scheme/ydb-core-kqp-ut-scheme |71.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/replication/service/ut_table_writer/unittest >> LocalTableWriter::ApplyInCorrectOrder |71.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/replication/service/ut_table_writer/unittest |71.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/replication/service/ut_table_writer/unittest ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_login/unittest >> TWebLoginService::AuditLogLogout [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:116:2058] recipient: [1:110:2141] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:116:2058] recipient: [1:110:2141] Leader for TabletID 72057594046678944 is [1:127:2152] sender: [1:129:2058] recipient: [1:110:2141] 2025-05-29T15:28:55.826713Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7488: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-05-29T15:28:55.826766Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7516: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:28:55.826772Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7402: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-05-29T15:28:55.826777Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7418: OperationsProcessing config: using default configuration 2025-05-29T15:28:55.826783Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-05-29T15:28:55.826788Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-05-29T15:28:55.826796Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7548: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:28:55.826809Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:39: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-05-29T15:28:55.826917Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7619: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-05-29T15:28:55.826989Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-05-29T15:28:55.842168Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7309: Cannot subscribe to console configs 2025-05-29T15:28:55.842192Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:28:55.846112Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-05-29T15:28:55.846210Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-05-29T15:28:55.846249Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-05-29T15:28:55.848584Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-05-29T15:28:55.848636Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:445: Clear TempDirsState with owners number: 0 2025-05-29T15:28:55.848760Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1348: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-05-29T15:28:55.848816Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:32: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-05-29T15:28:55.849310Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:157: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:28:55.849344Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:84: [RootDataErasureManager] Stop 2025-05-29T15:28:55.849608Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:28:55.849619Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:28:55.849627Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-05-29T15:28:55.849634Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-29T15:28:55.849640Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:202: TTxServerlessStorageBilling.Complete 2025-05-29T15:28:55.849687Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6671: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-05-29T15:28:55.851025Z node 1 :HIVE INFO: tablet_helpers.cpp:1130: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:127:2152] sender: [1:241:2058] recipient: [1:15:2062] 2025-05-29T15:28:55.872285Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:372: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-05-29T15:28:55.872338Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:28:55.872389Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-05-29T15:28:55.872439Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:130: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-05-29T15:28:55.872449Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:28:55.873001Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:451: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-05-29T15:28:55.873037Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-05-29T15:28:55.873087Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:28:55.873097Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:313: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-05-29T15:28:55.873103Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:367: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-05-29T15:28:55.873108Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 2 -> 3 2025-05-29T15:28:55.873462Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:28:55.873470Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-05-29T15:28:55.873476Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 3 -> 128 2025-05-29T15:28:55.873790Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:28:55.873798Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:28:55.873803Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:28:55.873810Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1650: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-05-29T15:28:55.874534Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1719: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-05-29T15:28:55.875016Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:652: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-05-29T15:28:55.875068Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1751: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-05-29T15:28:55.875264Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:676: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-05-29T15:28:55.875296Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:680: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 134 RawX2: 4294969452 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-05-29T15:28:55.875305Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:28:55.875386Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 128 -> 240 2025-05-29T15:28:55.875394Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:28:55.875425Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-05-29T15:28:55.875436Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:402: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-05-29T15:28:55.876004Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:28:55.876014Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-29T15:28:55.876046Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29 ... TxId: 101 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-05-29T15:28:56.820414Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:130: IgniteOperation, opId: 101:1, propose status:StatusSuccess, reason: , at schemeshard: 72057594046678944 2025-05-29T15:28:56.820455Z node 4 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:906: Part operation is done id#101:0 progress is 1/1 2025-05-29T15:28:56.820462Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2025-05-29T15:28:56.820469Z node 4 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:906: Part operation is done id#101:0 progress is 1/1 2025-05-29T15:28:56.820473Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2025-05-29T15:28:56.820488Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-05-29T15:28:56.820500Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1606: TOperation IsReadyToNotify, TxId: 101, ready parts: 1/1, is published: false 2025-05-29T15:28:56.820506Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2025-05-29T15:28:56.820512Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:973: Operation and all the parts is done, operation id: 101:0 2025-05-29T15:28:56.820519Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:982: Publication still in progress, tx: 101, publications: 1, subscribers: 0 2025-05-29T15:28:56.820524Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:989: Publication details: tx: 101, [OwnerId: 72057594046678944, LocalPathId: 1], 4 2025-05-29T15:28:56.820732Z node 4 :TX_PROXY DEBUG: proxy_impl.cpp:213: actor# [4:270:2260] HANDLE TEvClientConnected success connect from tablet# 72057594046447617 2025-05-29T15:28:56.821636Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:451: TTxOperationPropose Complete, txId: 101, response: Status: StatusSuccess TxId: 101 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2025-05-29T15:28:56.821666Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 101, database: /MyRoot, subject: , status: StatusSuccess, operation: CREATE USER, path: /MyRoot 2025-05-29T15:28:56.821716Z node 4 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:28:56.821723Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 101, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-29T15:28:56.821767Z node 4 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:28:56.821773Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [4:207:2208], at schemeshard: 72057594046678944, txId: 101, path id: 1 2025-05-29T15:28:56.821961Z node 4 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:5834: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 4 PathOwnerId: 72057594046678944, cookie: 101 2025-05-29T15:28:56.821976Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 4 PathOwnerId: 72057594046678944, cookie: 101 2025-05-29T15:28:56.821985Z node 4 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 101 2025-05-29T15:28:56.821991Z node 4 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 4 2025-05-29T15:28:56.821997Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-05-29T15:28:56.822020Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 101, subscribers: 0 2025-05-29T15:28:56.822133Z node 4 :TX_PROXY DEBUG: client.cpp:89: Handle TEvAllocateResult ACCEPTED RangeBegin# 281474976715656 RangeEnd# 281474976720656 txAllocator# 72057594046447617 2025-05-29T15:28:56.822467Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 TestModificationResult got TxId: 101, wait until txId: 101 2025-05-29T15:28:56.822555Z node 4 :HTTP WARN: login_page.cpp:102: 127.0.0.1:0 POST /login 2025-05-29T15:28:56.822948Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__login.cpp:43: TTxLogin Execute at schemeshard: 72057594046678944 2025-05-29T15:28:56.822959Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__login.cpp:46: TTxLogin RotateKeys at schemeshard: 72057594046678944 2025-05-29T15:28:56.891201Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__login.cpp:85: TTxLogin Complete, result: Token: "eyJhbGciOiJQUzI1NiIsImtpZCI6IjEifQ.eyJhdWQiOlsiXC9NeVJvb3QiXSwiZXhwIjoxNzQ4NTc1NzM2LCJpYXQiOjE3NDg1MzI1MzYsInN1YiI6InVzZXIxIn0.Xh5zVnypwLYfn_vmpYE-abLKZMSva81IX9pzU6YTEOgzjknb6bslmAFxMvSowSQ1_s8Kwr8nhjCpDVbruk8AILqpxuUPpKvo4c_BwZx5jxnmhQ3BWtVTIgDPf_ooBK2YgIsgVFcypAhqyqRLvuyf2YoYT6_JCxXiG6BWU8QDF2k5Unh7kjTvuhESeoe_bYX8h7dsr0h_64-f-dRi-prauSe6gfySKjvbPdxDC_AHTjV3-zeg4JPzk-P7Tp80eoYfBVmbilgwd39r5itQArzuIeo41MHkRvasdA4b0KvZzuRd9NBGIeUrCYeWzDcz4UwhG-w2n_3jTAubANZHat2kGQ" SanitizedToken: "eyJhbGciOiJQUzI1NiIsImtpZCI6IjEifQ.eyJhdWQiOlsiXC9NeVJvb3QiXSwiZXhwIjoxNzQ4NTc1NzM2LCJpYXQiOjE3NDg1MzI1MzYsInN1YiI6InVzZXIxIn0.**" IsAdmin: true, at schemeshard: 72057594046678944 2025-05-29T15:28:56.891247Z node 4 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:28:56.891256Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 0, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-29T15:28:56.891318Z node 4 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:28:56.891324Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [4:207:2208], at schemeshard: 72057594046678944, txId: 0, path id: 1 2025-05-29T15:28:56.891620Z node 4 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:5834: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 5 PathOwnerId: 72057594046678944, cookie: 0 2025-05-29T15:28:56.891764Z node 4 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-05-29T15:28:56.891807Z node 4 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot" took 47us result status StatusSuccess 2025-05-29T15:28:56.891900Z node 4 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot" PathDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 1 SecurityStateVersion: 2 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { PublicKeys { KeyId: 1 KeyDataPEM: "-----BEGIN PUBLIC KEY-----\nMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAv674N9LEafz7jcDnh6Hu\nR2BSk4F4QW99uWERt1LtaoXPuwbR85r8fSgx+EnSEuQ6dKjNqNxBEmNzJM6KI8k1\nzW08IltukVtdd9qzrvZmoEmgDoqMmVdqrlk77sPHyAfmXdhfGgnibwA2neh0HlPJ\n9nkOR9RMsa4rSqfQ2LCXJP4oTbNuzVfsNRCrW0n6fPhKptHWsomOy5a72NF9Xy+L\nCpjqjhHjiajJeyPUAyEdzpkbkFZrAxZJnx04IJ5bG0vtOYLzf4mdGI9wT1LTGxom\nKY93e9DtjCc8bQE6VoqGUdl/Mxs5C/YjNnq0rW4q+wiLOEMGjcMWK2Piu4BWHidN\n+wIDAQAB\n-----END PUBLIC KEY-----\n" ExpiresAt: 1748618936884 } Sids { Name: "user1" Type: USER } Audience: "/MyRoot" } } } PathId: 1 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-05-29T15:28:56.891970Z node 4 :HTTP WARN: login_page.cpp:248: 127.0.0.1:0 POST /logout 2025-05-29T15:28:56.891978Z node 4 :HTTP ERROR: login_page.cpp:326: Logout: No ydb_session_id cookie 2025-05-29T15:28:56.892044Z node 4 :HTTP WARN: login_page.cpp:248: 127.0.0.1:0 POST /logout 2025-05-29T15:28:56.892417Z node 4 :TICKET_PARSER ERROR: ticket_parser_impl.h:963: Ticket **** (589A015B): Token is not in correct format 2025-05-29T15:28:56.892433Z node 4 :HTTP ERROR: login_page.cpp:326: Logout: Token is not in correct format 2025-05-29T15:28:56.892477Z node 4 :HTTP WARN: login_page.cpp:248: 127.0.0.1:0 POST /logout AUDIT LOG buffer(4): 2025-05-29T15:28:56.812063Z: component=schemeshard, tx_id=1, remote_address={none}, subject={none}, sanitized_token={none}, database={none}, operation=ALTER DATABASE, paths=[//MyRoot], status=SUCCESS, detailed_status=StatusAccepted 2025-05-29T15:28:56.820375Z: component=schemeshard, tx_id=101, remote_address={none}, subject={none}, sanitized_token={none}, database=/MyRoot, operation=CREATE USER, paths=[/MyRoot], status=SUCCESS, detailed_status=StatusSuccess, login_user_level=admin, login_user=user1 2025-05-29T15:28:56.891451Z: component=grpc-login, remote_address=localhost, database=/MyRoot, operation=LOGIN, status=SUCCESS, login_user=user1, sanitized_token=eyJhbGciOiJQUzI1NiIsImtpZCI6IjEifQ.eyJhdWQiOlsiXC9NeVJvb3QiXSwiZXhwIjoxNzQ4NTc1NzM2LCJpYXQiOjE3NDg1MzI1MzYsInN1YiI6InVzZXIxIn0.**, login_user_level=admin 2025-05-29T15:28:56.892695Z: component=web-login, remote_address=127.0.0.1, subject=user1, sanitized_token=eyJhbGciOiJQUzI1NiIsImtpZCI6IjEifQ.eyJhdWQiOlsiXC9NeVJvb3QiXSwiZXhwIjoxNzQ4NTc1NzM2LCJpYXQiOjE3NDg1MzI1MzYsInN1YiI6InVzZXIxIn0.**, operation=LOGOUT, status=SUCCESS AUDIT LOG checked line: 2025-05-29T15:28:56.892695Z: component=web-login, remote_address=127.0.0.1, subject=user1, sanitized_token=eyJhbGciOiJQUzI1NiIsImtpZCI6IjEifQ.eyJhdWQiOlsiXC9NeVJvb3QiXSwiZXhwIjoxNzQ4NTc1NzM2LCJpYXQiOjE3NDg1MzI1MzYsInN1YiI6InVzZXIxIn0.**, operation=LOGOUT, status=SUCCESS >> BackupRestoreS3::TestAllSchemeObjectTypes-EPathTypeTable [FAIL] >> BackupRestoreS3::TestAllSchemeObjectTypes-EPathTypeSubDomain [GOOD] >> BackupRestoreS3::TestAllSchemeObjectTypes-EPathTypeRtmrVolume [GOOD] >> BackupRestoreS3::TestAllSchemeObjectTypes-EPathTypeSolomonVolume [GOOD] >> BackupRestoreS3::TestAllSchemeObjectTypes-EPathTypeTableIndex ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/cms/console/ut/unittest >> TConsoleTests::TestAlterTenantTooManyStorageResourcesForRunningExtSubdomain [GOOD] Test command err: 2025-05-29T15:24:14.908760Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7488: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-05-29T15:24:14.908792Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7516: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:24:14.908798Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7402: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-05-29T15:24:14.908804Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7418: OperationsProcessing config: using default configuration 2025-05-29T15:24:14.908821Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-05-29T15:24:14.908826Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-05-29T15:24:14.908844Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7548: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:24:14.908871Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:39: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-05-29T15:24:14.909008Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7619: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-05-29T15:24:14.909109Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-05-29T15:24:14.913754Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7309: Cannot subscribe to console configs 2025-05-29T15:24:14.913784Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:24:14.915954Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-05-29T15:24:14.915997Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-05-29T15:24:14.916017Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046578944 2025-05-29T15:24:14.916854Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-05-29T15:24:14.916985Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:445: Clear TempDirsState with owners number: 0 2025-05-29T15:24:14.917122Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1348: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046578944 2025-05-29T15:24:14.917258Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:32: TTxInitRoot DoExecute, path: dc-1, pathId: [OwnerId: 72057594046578944, LocalPathId: 1], at schemeshard: 72057594046578944 2025-05-29T15:24:14.917955Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:157: TTxInitRoot DoComplete, at schemeshard: 72057594046578944 2025-05-29T15:24:14.918006Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:84: [RootDataErasureManager] Stop 2025-05-29T15:24:14.918334Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046578944 2025-05-29T15:24:14.918350Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046578944 2025-05-29T15:24:14.918385Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-05-29T15:24:14.918397Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046578944, domainId: [OwnerId: 72057594046578944, LocalPathId: 1] 2025-05-29T15:24:14.918404Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:202: TTxServerlessStorageBilling.Complete 2025-05-29T15:24:14.918432Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6671: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046578944 2025-05-29T15:24:14.959977Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:372: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "dc-1" StoragePools { Name: "" Kind: "hdd" } StoragePools { Name: "" Kind: "hdd-3" } StoragePools { Name: "" Kind: "hdd-1" } StoragePools { Name: "" Kind: "hdd-2" } } } TxId: 1 TabletId: 72057594046578944 , at schemeshard: 72057594046578944 2025-05-29T15:24:14.960089Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //dc-1, opId: 1:0, at schemeshard: 72057594046578944 2025-05-29T15:24:14.960176Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046578944, LocalPathId: 1] was 0 2025-05-29T15:24:14.960242Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:130: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046578944 2025-05-29T15:24:14.960255Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944 2025-05-29T15:24:14.962482Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:451: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046578944 PathId: 1, at schemeshard: 72057594046578944 2025-05-29T15:24:14.962535Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //dc-1 2025-05-29T15:24:14.962603Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046578944 2025-05-29T15:24:14.962617Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:313: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046578944 2025-05-29T15:24:14.962623Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:367: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-05-29T15:24:14.962629Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 2 -> 3 2025-05-29T15:24:14.963257Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046578944 2025-05-29T15:24:14.963271Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046578944 2025-05-29T15:24:14.963278Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 3 -> 128 2025-05-29T15:24:14.963599Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046578944 2025-05-29T15:24:14.963608Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046578944 2025-05-29T15:24:14.963627Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046578944 2025-05-29T15:24:14.963635Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1650: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-05-29T15:24:14.964314Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1719: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046578944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-05-29T15:24:14.964698Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:652: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046578944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-05-29T15:24:14.964756Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1751: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 2025-05-29T15:24:14.964972Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__notify.cpp:30: NotifyTxCompletion operation in-flight, txId: 1, at schemeshard: 72057594046578944 2025-05-29T15:24:14.964979Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1606: TOperation IsReadyToNotify, TxId: 1, ready parts: 0/1, is published: true 2025-05-29T15:24:14.964985Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__notify.cpp:131: NotifyTxCompletion transaction is registered, txId: 1, at schemeshard: 72057594046578944 2025-05-29T15:24:15.208768Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:676: TTxOperationPlanStep Execute, stepId: 500, transactions count in step: 1, at schemeshard: 72057594046578944 2025-05-29T15:24:15.208828Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:680: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 AckTo { RawX1: 0 RawX2: 0 } } Step: 500 MediatorID: 72057594046382081 TabletID: 72057594046578944, at schemeshard: 72057594046578944 2025-05-29T15:24:15.208843Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046578944 2025-05-29T15:24:15.208934Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 128 -> 240 2025-05-29T15:24:15.208947Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046578944 2025-05-29T15:24:15.208984Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046578944, LocalPathId: 1] was 1 2025-05-29T15:24:15.209014Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:402: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046578944, LocalPathId: 1], at schemeshard: 72057594046578944 2025-05-29T15:24:15.211976Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046578944 2025-05-29T15:24:15.212009Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046578944, txId: 1, path id: [OwnerId: 72057594046578944, LocalPathId: 1] 2025-05-29T15:24:15.212070Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046578944 2025-05-29T15:24:15.212076Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:261:2249], at schemeshard: 72057594046578944, txId: 1, path id: 1 202 ... e 108 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6632: Handle: TEvRunConditionalErase, at schemeshard: 72075186233409546 2025-05-29T15:28:20.593298Z node 108 :FLAT_TX_SCHEMESHARD INFO: schemeshard__conditional_erase.cpp:56: TTxRunConditionalErase DoExecute: at schemeshard: 72075186233409546 2025-05-29T15:28:20.593319Z node 108 :FLAT_TX_SCHEMESHARD INFO: schemeshard__conditional_erase.cpp:189: TTxRunConditionalErase DoComplete: at schemeshard: 72075186233409546 2025-05-29T15:28:20.870420Z node 108 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:398} Execute TEvControllerConfigRequest Request# {Command { ReadStoragePool { BoxId: 1 Name: "/dc-1/users/tenant-1:hdd-2" } } } 2025-05-29T15:28:20.870578Z node 108 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:398} Execute TEvControllerConfigRequest Request# {Command { DefineStoragePool { BoxId: 1 Name: "/dc-1/users/tenant-1:hdd-2" ErasureSpecies: "none" VDiskKind: "Default" Kind: "hdd-2" NumGroups: 1000 PDiskFilter { Property { Type: ROT } } ScopeId { X1: 72057594046578944 X2: 3 } } } } 2025-05-29T15:28:20.872832Z node 108 :CMS_TENANTS ERROR: console_tenants_manager.cpp:252: TPoolManip(/dc-1/users/tenant-1:hdd-2) cannot create pool '/dc-1/users/tenant-1:hdd-2' (0): Group fit error BoxId# 1 StoragePoolId# 3 Error# failed to allocate group: no group options PDisks# {[(108:1-s[16/16])(109:1000-s[16/16]o)(110:1000-s[16/16]o)(111:1000-s[16/16]o)(112:1000-s[16/16]o)(113:1000-s[16/16]o)(114:1000-s[16/16]o)(115:1000-s[16/16]o)(116:1000-s[16/16]o)]} 2025-05-29T15:28:20.872933Z node 108 :CMS_TENANTS CRIT: console_tenants_manager.cpp:3514: Couldn't update storage pool /dc-1/users/tenant-1:hdd-2 for tenant /dc-1/users/tenant-1: Group fit error BoxId# 1 StoragePoolId# 3 Error# failed to allocate group: no group options PDisks# {[(108:1-s[16/16])(109:1000-s[16/16]o)(110:1000-s[16/16]o)(111:1000-s[16/16]o)(112:1000-s[16/16]o)(113:1000-s[16/16]o)(114:1000-s[16/16]o)(115:1000-s[16/16]o)(116:1000-s[16/16]o)]} 2025-05-29T15:28:26.376590Z node 108 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:398} Execute TEvControllerConfigRequest Request# {Command { ReadStoragePool { BoxId: 1 Name: "/dc-1/users/tenant-1:hdd-2" } } } 2025-05-29T15:28:26.376823Z node 108 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:398} Execute TEvControllerConfigRequest Request# {Command { DefineStoragePool { BoxId: 1 Name: "/dc-1/users/tenant-1:hdd-2" ErasureSpecies: "none" VDiskKind: "Default" Kind: "hdd-2" NumGroups: 1000 PDiskFilter { Property { Type: ROT } } ScopeId { X1: 72057594046578944 X2: 3 } } } } 2025-05-29T15:28:26.380665Z node 108 :CMS_TENANTS ERROR: console_tenants_manager.cpp:252: TPoolManip(/dc-1/users/tenant-1:hdd-2) cannot create pool '/dc-1/users/tenant-1:hdd-2' (0): Group fit error BoxId# 1 StoragePoolId# 3 Error# failed to allocate group: no group options PDisks# {[(108:1-s[16/16])(109:1000-s[16/16]o)(110:1000-s[16/16]o)(111:1000-s[16/16]o)(112:1000-s[16/16]o)(113:1000-s[16/16]o)(114:1000-s[16/16]o)(115:1000-s[16/16]o)(116:1000-s[16/16]o)]} 2025-05-29T15:28:26.380796Z node 108 :CMS_TENANTS CRIT: console_tenants_manager.cpp:3514: Couldn't update storage pool /dc-1/users/tenant-1:hdd-2 for tenant /dc-1/users/tenant-1: Group fit error BoxId# 1 StoragePoolId# 3 Error# failed to allocate group: no group options PDisks# {[(108:1-s[16/16])(109:1000-s[16/16]o)(110:1000-s[16/16]o)(111:1000-s[16/16]o)(112:1000-s[16/16]o)(113:1000-s[16/16]o)(114:1000-s[16/16]o)(115:1000-s[16/16]o)(116:1000-s[16/16]o)]} 2025-05-29T15:28:31.969598Z node 108 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:398} Execute TEvControllerConfigRequest Request# {Command { ReadStoragePool { BoxId: 1 Name: "/dc-1/users/tenant-1:hdd-2" } } } 2025-05-29T15:28:31.969818Z node 108 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:398} Execute TEvControllerConfigRequest Request# {Command { DefineStoragePool { BoxId: 1 Name: "/dc-1/users/tenant-1:hdd-2" ErasureSpecies: "none" VDiskKind: "Default" Kind: "hdd-2" NumGroups: 1000 PDiskFilter { Property { Type: ROT } } ScopeId { X1: 72057594046578944 X2: 3 } } } } 2025-05-29T15:28:31.972848Z node 108 :CMS_TENANTS ERROR: console_tenants_manager.cpp:252: TPoolManip(/dc-1/users/tenant-1:hdd-2) cannot create pool '/dc-1/users/tenant-1:hdd-2' (0): Group fit error BoxId# 1 StoragePoolId# 3 Error# failed to allocate group: no group options PDisks# {[(108:1-s[16/16])(109:1000-s[16/16]o)(110:1000-s[16/16]o)(111:1000-s[16/16]o)(112:1000-s[16/16]o)(113:1000-s[16/16]o)(114:1000-s[16/16]o)(115:1000-s[16/16]o)(116:1000-s[16/16]o)]} 2025-05-29T15:28:31.972947Z node 108 :CMS_TENANTS CRIT: console_tenants_manager.cpp:3514: Couldn't update storage pool /dc-1/users/tenant-1:hdd-2 for tenant /dc-1/users/tenant-1: Group fit error BoxId# 1 StoragePoolId# 3 Error# failed to allocate group: no group options PDisks# {[(108:1-s[16/16])(109:1000-s[16/16]o)(110:1000-s[16/16]o)(111:1000-s[16/16]o)(112:1000-s[16/16]o)(113:1000-s[16/16]o)(114:1000-s[16/16]o)(115:1000-s[16/16]o)(116:1000-s[16/16]o)]} 2025-05-29T15:28:37.526589Z node 108 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:398} Execute TEvControllerConfigRequest Request# {Command { ReadStoragePool { BoxId: 1 Name: "/dc-1/users/tenant-1:hdd-2" } } } 2025-05-29T15:28:37.526777Z node 108 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:398} Execute TEvControllerConfigRequest Request# {Command { DefineStoragePool { BoxId: 1 Name: "/dc-1/users/tenant-1:hdd-2" ErasureSpecies: "none" VDiskKind: "Default" Kind: "hdd-2" NumGroups: 1000 PDiskFilter { Property { Type: ROT } } ScopeId { X1: 72057594046578944 X2: 3 } } } } 2025-05-29T15:28:37.528625Z node 108 :CMS_TENANTS ERROR: console_tenants_manager.cpp:252: TPoolManip(/dc-1/users/tenant-1:hdd-2) cannot create pool '/dc-1/users/tenant-1:hdd-2' (0): Group fit error BoxId# 1 StoragePoolId# 3 Error# failed to allocate group: no group options PDisks# {[(108:1-s[16/16])(109:1000-s[16/16]o)(110:1000-s[16/16]o)(111:1000-s[16/16]o)(112:1000-s[16/16]o)(113:1000-s[16/16]o)(114:1000-s[16/16]o)(115:1000-s[16/16]o)(116:1000-s[16/16]o)]} 2025-05-29T15:28:37.528673Z node 108 :CMS_TENANTS CRIT: console_tenants_manager.cpp:3514: Couldn't update storage pool /dc-1/users/tenant-1:hdd-2 for tenant /dc-1/users/tenant-1: Group fit error BoxId# 1 StoragePoolId# 3 Error# failed to allocate group: no group options PDisks# {[(108:1-s[16/16])(109:1000-s[16/16]o)(110:1000-s[16/16]o)(111:1000-s[16/16]o)(112:1000-s[16/16]o)(113:1000-s[16/16]o)(114:1000-s[16/16]o)(115:1000-s[16/16]o)(116:1000-s[16/16]o)]} 2025-05-29T15:28:43.020041Z node 108 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:398} Execute TEvControllerConfigRequest Request# {Command { ReadStoragePool { BoxId: 1 Name: "/dc-1/users/tenant-1:hdd-2" } } } 2025-05-29T15:28:43.020253Z node 108 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:398} Execute TEvControllerConfigRequest Request# {Command { DefineStoragePool { BoxId: 1 Name: "/dc-1/users/tenant-1:hdd-2" ErasureSpecies: "none" VDiskKind: "Default" Kind: "hdd-2" NumGroups: 1000 PDiskFilter { Property { Type: ROT } } ScopeId { X1: 72057594046578944 X2: 3 } } } } 2025-05-29T15:28:43.023450Z node 108 :CMS_TENANTS ERROR: console_tenants_manager.cpp:252: TPoolManip(/dc-1/users/tenant-1:hdd-2) cannot create pool '/dc-1/users/tenant-1:hdd-2' (0): Group fit error BoxId# 1 StoragePoolId# 3 Error# failed to allocate group: no group options PDisks# {[(108:1-s[16/16])(109:1000-s[16/16]o)(110:1000-s[16/16]o)(111:1000-s[16/16]o)(112:1000-s[16/16]o)(113:1000-s[16/16]o)(114:1000-s[16/16]o)(115:1000-s[16/16]o)(116:1000-s[16/16]o)]} 2025-05-29T15:28:43.023624Z node 108 :CMS_TENANTS CRIT: console_tenants_manager.cpp:3514: Couldn't update storage pool /dc-1/users/tenant-1:hdd-2 for tenant /dc-1/users/tenant-1: Group fit error BoxId# 1 StoragePoolId# 3 Error# failed to allocate group: no group options PDisks# {[(108:1-s[16/16])(109:1000-s[16/16]o)(110:1000-s[16/16]o)(111:1000-s[16/16]o)(112:1000-s[16/16]o)(113:1000-s[16/16]o)(114:1000-s[16/16]o)(115:1000-s[16/16]o)(116:1000-s[16/16]o)]} 2025-05-29T15:28:48.811065Z node 108 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:398} Execute TEvControllerConfigRequest Request# {Command { ReadStoragePool { BoxId: 1 Name: "/dc-1/users/tenant-1:hdd-2" } } } 2025-05-29T15:28:48.811235Z node 108 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:398} Execute TEvControllerConfigRequest Request# {Command { DefineStoragePool { BoxId: 1 Name: "/dc-1/users/tenant-1:hdd-2" ErasureSpecies: "none" VDiskKind: "Default" Kind: "hdd-2" NumGroups: 1000 PDiskFilter { Property { Type: ROT } } ScopeId { X1: 72057594046578944 X2: 3 } } } } 2025-05-29T15:28:48.813261Z node 108 :CMS_TENANTS ERROR: console_tenants_manager.cpp:252: TPoolManip(/dc-1/users/tenant-1:hdd-2) cannot create pool '/dc-1/users/tenant-1:hdd-2' (0): Group fit error BoxId# 1 StoragePoolId# 3 Error# failed to allocate group: no group options PDisks# {[(108:1-s[16/16])(109:1000-s[16/16]o)(110:1000-s[16/16]o)(111:1000-s[16/16]o)(112:1000-s[16/16]o)(113:1000-s[16/16]o)(114:1000-s[16/16]o)(115:1000-s[16/16]o)(116:1000-s[16/16]o)]} 2025-05-29T15:28:48.813328Z node 108 :CMS_TENANTS CRIT: console_tenants_manager.cpp:3514: Couldn't update storage pool /dc-1/users/tenant-1:hdd-2 for tenant /dc-1/users/tenant-1: Group fit error BoxId# 1 StoragePoolId# 3 Error# failed to allocate group: no group options PDisks# {[(108:1-s[16/16])(109:1000-s[16/16]o)(110:1000-s[16/16]o)(111:1000-s[16/16]o)(112:1000-s[16/16]o)(113:1000-s[16/16]o)(114:1000-s[16/16]o)(115:1000-s[16/16]o)(116:1000-s[16/16]o)]} 2025-05-29T15:28:53.695990Z node 108 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6632: Handle: TEvRunConditionalErase, at schemeshard: 72057594046578944 2025-05-29T15:28:53.696052Z node 108 :FLAT_TX_SCHEMESHARD INFO: schemeshard__conditional_erase.cpp:56: TTxRunConditionalErase DoExecute: at schemeshard: 72057594046578944 2025-05-29T15:28:53.696084Z node 108 :FLAT_TX_SCHEMESHARD INFO: schemeshard__conditional_erase.cpp:189: TTxRunConditionalErase DoComplete: at schemeshard: 72057594046578944 2025-05-29T15:28:54.355030Z node 108 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6632: Handle: TEvRunConditionalErase, at schemeshard: 72075186233409546 2025-05-29T15:28:54.355098Z node 108 :FLAT_TX_SCHEMESHARD INFO: schemeshard__conditional_erase.cpp:56: TTxRunConditionalErase DoExecute: at schemeshard: 72075186233409546 2025-05-29T15:28:54.355124Z node 108 :FLAT_TX_SCHEMESHARD INFO: schemeshard__conditional_erase.cpp:189: TTxRunConditionalErase DoComplete: at schemeshard: 72075186233409546 2025-05-29T15:28:54.637293Z node 108 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:398} Execute TEvControllerConfigRequest Request# {Command { ReadStoragePool { BoxId: 1 Name: "/dc-1/users/tenant-1:hdd-2" } } } 2025-05-29T15:28:54.637519Z node 108 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:398} Execute TEvControllerConfigRequest Request# {Command { DefineStoragePool { BoxId: 1 Name: "/dc-1/users/tenant-1:hdd-2" ErasureSpecies: "none" VDiskKind: "Default" Kind: "hdd-2" NumGroups: 1000 PDiskFilter { Property { Type: ROT } } ScopeId { X1: 72057594046578944 X2: 3 } } } } 2025-05-29T15:28:54.641213Z node 108 :CMS_TENANTS ERROR: console_tenants_manager.cpp:252: TPoolManip(/dc-1/users/tenant-1:hdd-2) cannot create pool '/dc-1/users/tenant-1:hdd-2' (0): Group fit error BoxId# 1 StoragePoolId# 3 Error# failed to allocate group: no group options PDisks# {[(108:1-s[16/16])(109:1000-s[16/16]o)(110:1000-s[16/16]o)(111:1000-s[16/16]o)(112:1000-s[16/16]o)(113:1000-s[16/16]o)(114:1000-s[16/16]o)(115:1000-s[16/16]o)(116:1000-s[16/16]o)]} 2025-05-29T15:28:54.641359Z node 108 :CMS_TENANTS CRIT: console_tenants_manager.cpp:3514: Couldn't update storage pool /dc-1/users/tenant-1:hdd-2 for tenant /dc-1/users/tenant-1: Group fit error BoxId# 1 StoragePoolId# 3 Error# failed to allocate group: no group options PDisks# {[(108:1-s[16/16])(109:1000-s[16/16]o)(110:1000-s[16/16]o)(111:1000-s[16/16]o)(112:1000-s[16/16]o)(113:1000-s[16/16]o)(114:1000-s[16/16]o)(115:1000-s[16/16]o)(116:1000-s[16/16]o)]} >> LocalTableWriter::ConsistentWrite [GOOD] >> LocalTableWriter::WriteTable >> LocalTableWriter::SupportedTypes >> LocalTableWriter::WaitTxIds >> LocalTableWriter::ApplyInCorrectOrder [GOOD] >> TChargeBTreeIndex::FewNodes_Sticky [GOOD] >> TChargeBTreeIndex::FewNodes_Groups_History ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/replication/service/ut_table_writer/unittest >> LocalTableWriter::ConsistentWrite [GOOD] Test command err: 2025-05-29T15:28:57.200996Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509890063475604589:2071];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:28:57.201021Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/000d64/r3tmp/tmpEhcEVh/pdisk_1.dat 2025-05-29T15:28:57.258385Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded TClient is connected to server localhost:10410 TServer::EnableGrpc on GrpcPort 18362, node 1 2025-05-29T15:28:57.282776Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:28:57.282790Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-05-29T15:28:57.282792Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-05-29T15:28:57.282827Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-05-29T15:28:57.302308Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:28:57.302340Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting TClient is connected to server localhost:10410 2025-05-29T15:28:57.306189Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-29T15:28:57.333746Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:28:57.336911Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... TClient::Ls request: /Root/Table TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Table" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976715658 CreateStep: 1748532537437 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "Table" Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" Key... (TRUNCATED) 2025-05-29T15:28:57.399027Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:295: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 2][1:7509890063475605257:2344] Handshake: worker# [1:7509890063475605165:2284] 2025-05-29T15:28:57.399129Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:312: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 2][1:7509890063475605257:2344] Handle TEvTxProxySchemeCache::TEvNavigateKeySetResult: result# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root/Table TableId: [72057594046644480:2:1] RequestType: ByTableId Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Ok Kind: KindTable DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } }] } 2025-05-29T15:28:57.399224Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:387: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 2][1:7509890063475605257:2344] Handle TEvTxProxySchemeCache::TEvResolveKeySetResult: result# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 ResultSet [{ TableId: [OwnerId: 72057594046644480, LocalPathId: 2] Access: 0 SyncVersion: false Status: OkData Kind: KindRegularTable PartitionsCount: 1 DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } From: (Uint32 : NULL) IncFrom: 1 To: () IncTo: 0 }] } 2025-05-29T15:28:57.399239Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:417: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 2][1:7509890063475605257:2344] Send handshake: worker# [1:7509890063475605165:2284] 2025-05-29T15:28:57.399333Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:431: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 2][1:7509890063475605257:2344] Handle NKikimr::NReplication::NService::TEvWorker::TEvData { Source: TestSource Records [{ Codec: RAW Data: 48b Offset: 1 SeqNo: 0 CreateTime: 1970-01-01T00:00:00.000000Z MessageGroupId: ProducerId: },{ Codec: RAW Data: 48b Offset: 2 SeqNo: 0 CreateTime: 1970-01-01T00:00:00.000000Z MessageGroupId: ProducerId: },{ Codec: RAW Data: 48b Offset: 3 SeqNo: 0 CreateTime: 1970-01-01T00:00:00.000000Z MessageGroupId: ProducerId: }] } 2025-05-29T15:28:57.400224Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:490: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 2][1:7509890063475605257:2344] Handle NKikimrReplication.TEvTxIdResult VersionTxIds { Version { Step: 10 TxId: 0 } TxId: 1 } 2025-05-29T15:28:57.400271Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:556: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 2][1:7509890063475605257:2344] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRequestRecords { Records [{ Order: 1 BodySize: 48 },{ Order: 2 BodySize: 48 },{ Order: 3 BodySize: 48 }] } 2025-05-29T15:28:57.400326Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:54: [TablePartitionWriter][72057594046644480:2:1][72075186224037888][1:7509890063475605260:2344] Handle NKikimr::TEvTxUserProxy::TEvGetProxyServicesResponse 2025-05-29T15:28:57.400336Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:587: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 2][1:7509890063475605257:2344] Handle NKikimr::NChangeExchange::TEvChangeExchangePrivate::TEvReady { PartitionId: 72075186224037888 } 2025-05-29T15:28:57.400351Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:74: [TablePartitionWriter][72057594046644480:2:1][72075186224037888][1:7509890063475605260:2344] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRecords { Records [{ Order: 1 Group: 0 Step: 1 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 48b },{ Order: 2 Group: 0 Step: 2 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 48b },{ Order: 3 Group: 0 Step: 3 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 48b }] } 2025-05-29T15:28:57.401775Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:111: [TablePartitionWriter][72057594046644480:2:1][72075186224037888][1:7509890063475605260:2344] Handle NKikimrTxDataShard.TEvApplyReplicationChangesResult Status: STATUS_OK 2025-05-29T15:28:57.401793Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:587: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 2][1:7509890063475605257:2344] Handle NKikimr::NChangeExchange::TEvChangeExchangePrivate::TEvReady { PartitionId: 72075186224037888 } 2025-05-29T15:28:57.401801Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:570: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 2][1:7509890063475605257:2344] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRemoveRecords { Records [1,2,3] } 2025-05-29T15:28:57.401900Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:431: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 2][1:7509890063475605257:2344] Handle NKikimr::NReplication::NService::TEvWorker::TEvData { Source: TestSource Records [{ Codec: RAW Data: 19b Offset: 4 SeqNo: 0 CreateTime: 1970-01-01T00:00:00.000000Z MessageGroupId: ProducerId: }] } 2025-05-29T15:28:57.401959Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:431: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 2][1:7509890063475605257:2344] Handle NKikimr::NReplication::NService::TEvWorker::TEvData { Source: TestSource Records [{ Codec: RAW Data: 49b Offset: 5 SeqNo: 0 CreateTime: 1970-01-01T00:00:00.000000Z MessageGroupId: ProducerId: },{ Codec: RAW Data: 49b Offset: 6 SeqNo: 0 CreateTime: 1970-01-01T00:00:00.000000Z MessageGroupId: ProducerId: },{ Codec: RAW Data: 49b Offset: 7 SeqNo: 0 CreateTime: 1970-01-01T00:00:00.000000Z MessageGroupId: ProducerId: },{ Codec: RAW Data: 49b Offset: 8 SeqNo: 0 CreateTime: 1970-01-01T00:00:00.000000Z MessageGroupId: ProducerId: }] } 2025-05-29T15:28:57.402019Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:490: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 2][1:7509890063475605257:2344] Handle NKikimrReplication.TEvTxIdResult VersionTxIds { Version { Step: 20 TxId: 0 } TxId: 2 } VersionTxIds { Version { Step: 30 TxId: 0 } TxId: 3 } 2025-05-29T15:28:57.402038Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:556: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 2][1:7509890063475605257:2344] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRequestRecords { Records [{ Order: 5 BodySize: 49 },{ Order: 6 BodySize: 49 },{ Order: 7 BodySize: 49 },{ Order: 8 BodySize: 49 }] } 2025-05-29T15:28:57.402065Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:74: [TablePartitionWriter][72057594046644480:2:1][72075186224037888][1:7509890063475605260:2344] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRecords { Records [{ Order: 5 Group: 0 Step: 11 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 49b },{ Order: 6 Group: 0 Step: 12 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 49b },{ Order: 7 Group: 0 Step: 21 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 49b },{ Order: 8 Group: 0 Step: 22 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 49b }] } 2025-05-29T15:28:57.403008Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:111: [TablePartitionWriter][72057594046644480:2:1][72075186224037888][1:7509890063475605260:2344] Handle NKikimrTxDataShard.TEvApplyReplicationChangesResult Status: STATUS_OK 2025-05-29T15:28:57.403026Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:587: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 2][1:7509890063475605257:2344] Handle NKikimr::NChangeExchange::TEvChangeExchangePrivate::TEvReady { PartitionId: 72075186224037888 } 2025-05-29T15:28:57.403033Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:570: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 2][1:7509890063475605257:2344] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRemoveRecords { Records [5,6,7,8] } 2025-05-29T15:28:57.403104Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:431: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 2][1:7509890063475605257:2344] Handle NKikimr::NReplication::NService::TEvWorker::TEvData { Source: TestSource Records [{ Codec: RAW Data: 49b Offset: 9 SeqNo: 0 CreateTime: 1970-01-01T00:00:00.000000Z MessageGroupId: ProducerId: },{ Codec: RAW Data: 49b Offset: 10 SeqNo: 0 CreateTime: 1970-01-01T00:00:00.000000Z MessageGroupId: ProducerId: }] } 2025-05-29T15:28:57.403132Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:556: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 2][1:7509890063475605257:2344] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRequestRecords { Records [{ Order: 9 BodySize: 49 },{ Order: 10 BodySize: 49 }] } 2025-05-29T15:28:57.403153Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:74: [TablePartitionWriter][72057594046644480:2:1][72075186224037888][1:7509890063475605260:2344] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRecords { Records [{ Order: 9 Group: 0 Step: 13 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 49b },{ Order: 10 Group: 0 Step: 23 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 49b }] } 2025-05-29T15:28:57.404012Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:111: [TablePartitionWriter][72057594046644480:2:1][72075186224037888][1:7509890063475605260:2344] Handle NKikimrTxDataShard.TEvApplyReplicationChangesResult Status: STATUS_OK 2025-05-29T15:28:57.404030Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:587: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 2][1:7509890063475605257:2344] Handle NKikimr::NChangeExchange::TEvChangeExchangePrivate::TEvReady { PartitionId: 72075186224037888 } 2025-05-29T15:28:57.404036Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:570: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 2][1:7509890063475605257:2344] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRemoveRecords { Records [9,10] } 2025-05-29T15:28:57.404103Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:431: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 2][1:7509890063475605257:2344] Handle NKikimr::NReplication::NService::TEvWorker::TEvData { Source: TestSource Records [{ Codec: RAW Data: 19b Offset: 11 SeqNo: 0 CreateTime: 1970-01-01T00:00:00.000000Z MessageGroupId: ProducerId: }] } >> TSchemeShardLoginTest::BanUserWithWaiting [GOOD] >> TSchemeShardLoginTest::ChangeAcceptablePasswordParameters >> BackupRestore::RestoreIndexTableReadReplicasSettings [GOOD] >> BackupRestore::RestoreTableSplitBoundaries >> LocalTableWriter::WriteTable [GOOD] >> LocalTableWriter::DataAlongWithHeartbeat >> BackupRestoreS3::TestAllSchemeObjectTypes-EPathTypeCdcStream [GOOD] >> BackupRestoreS3::TestAllSchemeObjectTypes-EPathTypeBlobDepot [GOOD] >> BackupRestoreS3::TestAllSchemeObjectTypes-EPathTypeExternalTable [GOOD] >> BackupRestoreS3::TestAllSchemeObjectTypes-EPathTypeExternalDataSource [GOOD] >> BackupRestoreS3::TestAllSchemeObjectTypes-EPathTypeBackupCollection [GOOD] >> BackupRestoreS3::TestAllPrimitiveTypes-UINT8 >> TSchemeShardSplitBySample::HistogramNoResultWhenMedianKeyIsAtBoundary [GOOD] >> TSchemeShardSplitBySample::Mixed [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/replication/service/ut_table_writer/unittest >> LocalTableWriter::ApplyInCorrectOrder [GOOD] Test command err: 2025-05-29T15:28:57.474380Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509890063426677788:2072];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:28:57.474405Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/000d43/r3tmp/tmppQbVJE/pdisk_1.dat 2025-05-29T15:28:57.531122Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded TClient is connected to server localhost:10752 TServer::EnableGrpc on GrpcPort 24231, node 1 2025-05-29T15:28:57.560443Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:28:57.560460Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-05-29T15:28:57.560462Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-05-29T15:28:57.560528Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-05-29T15:28:57.575682Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:28:57.575715Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:28:57.576830Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:10752 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-29T15:28:57.606195Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:28:57.609275Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... TClient::Ls request: /Root/Table TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Table" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976715658 CreateStep: 1748532537710 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "Table" Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" Key... (TRUNCATED) 2025-05-29T15:28:57.671825Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:295: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 2][1:7509890063426678452:2344] Handshake: worker# [1:7509890063426678360:2284] 2025-05-29T15:28:57.671892Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:312: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 2][1:7509890063426678452:2344] Handle TEvTxProxySchemeCache::TEvNavigateKeySetResult: result# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root/Table TableId: [72057594046644480:2:1] RequestType: ByTableId Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Ok Kind: KindTable DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } }] } 2025-05-29T15:28:57.671925Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:387: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 2][1:7509890063426678452:2344] Handle TEvTxProxySchemeCache::TEvResolveKeySetResult: result# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 ResultSet [{ TableId: [OwnerId: 72057594046644480, LocalPathId: 2] Access: 0 SyncVersion: false Status: OkData Kind: KindRegularTable PartitionsCount: 1 DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } From: (Uint32 : NULL) IncFrom: 1 To: () IncTo: 0 }] } 2025-05-29T15:28:57.671933Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:417: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 2][1:7509890063426678452:2344] Send handshake: worker# [1:7509890063426678360:2284] 2025-05-29T15:28:57.672017Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:431: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 2][1:7509890063426678452:2344] Handle NKikimr::NReplication::NService::TEvWorker::TEvData { Source: TestSource Records [{ Codec: RAW Data: 48b Offset: 1 SeqNo: 0 CreateTime: 1970-01-01T00:00:00.000000Z MessageGroupId: ProducerId: }] } 2025-05-29T15:28:57.672592Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:490: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 2][1:7509890063426678452:2344] Handle NKikimrReplication.TEvTxIdResult VersionTxIds { Version { Step: 10 TxId: 0 } TxId: 1 } 2025-05-29T15:28:57.672610Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:556: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 2][1:7509890063426678452:2344] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRequestRecords { Records [{ Order: 1 BodySize: 48 }] } 2025-05-29T15:28:57.672641Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:54: [TablePartitionWriter][72057594046644480:2:1][72075186224037888][1:7509890063426678455:2344] Handle NKikimr::TEvTxUserProxy::TEvGetProxyServicesResponse 2025-05-29T15:28:57.672647Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:587: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 2][1:7509890063426678452:2344] Handle NKikimr::NChangeExchange::TEvChangeExchangePrivate::TEvReady { PartitionId: 72075186224037888 } 2025-05-29T15:28:57.672655Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:74: [TablePartitionWriter][72057594046644480:2:1][72075186224037888][1:7509890063426678455:2344] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRecords { Records [{ Order: 1 Group: 0 Step: 1 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 48b }] } 2025-05-29T15:28:57.673815Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:111: [TablePartitionWriter][72057594046644480:2:1][72075186224037888][1:7509890063426678455:2344] Handle NKikimrTxDataShard.TEvApplyReplicationChangesResult Status: STATUS_OK 2025-05-29T15:28:57.673827Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:587: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 2][1:7509890063426678452:2344] Handle NKikimr::NChangeExchange::TEvChangeExchangePrivate::TEvReady { PartitionId: 72075186224037888 } 2025-05-29T15:28:57.673833Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:570: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 2][1:7509890063426678452:2344] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRemoveRecords { Records [1] } 2025-05-29T15:28:57.673883Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:431: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 2][1:7509890063426678452:2344] Handle NKikimr::NReplication::NService::TEvWorker::TEvData { Source: TestSource Records [{ Codec: RAW Data: 49b Offset: 2 SeqNo: 0 CreateTime: 1970-01-01T00:00:00.000000Z MessageGroupId: ProducerId: },{ Codec: RAW Data: 48b Offset: 3 SeqNo: 0 CreateTime: 1970-01-01T00:00:00.000000Z MessageGroupId: ProducerId: },{ Codec: RAW Data: 19b Offset: 4 SeqNo: 0 CreateTime: 1970-01-01T00:00:00.000000Z MessageGroupId: ProducerId: }] } 2025-05-29T15:28:57.673935Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:490: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 2][1:7509890063426678452:2344] Handle NKikimrReplication.TEvTxIdResult VersionTxIds { Version { Step: 20 TxId: 0 } TxId: 2 } 2025-05-29T15:28:57.673946Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:556: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 2][1:7509890063426678452:2344] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRequestRecords { Records [{ Order: 2 BodySize: 49 },{ Order: 3 BodySize: 48 }] } 2025-05-29T15:28:57.673955Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:74: [TablePartitionWriter][72057594046644480:2:1][72075186224037888][1:7509890063426678455:2344] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRecords { Records [{ Order: 2 Group: 0 Step: 11 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 49b },{ Order: 3 Group: 0 Step: 2 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 48b }] } 2025-05-29T15:28:57.674790Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:111: [TablePartitionWriter][72057594046644480:2:1][72075186224037888][1:7509890063426678455:2344] Handle NKikimrTxDataShard.TEvApplyReplicationChangesResult Status: STATUS_OK 2025-05-29T15:28:57.674798Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:587: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 2][1:7509890063426678452:2344] Handle NKikimr::NChangeExchange::TEvChangeExchangePrivate::TEvReady { PartitionId: 72075186224037888 } 2025-05-29T15:28:57.674801Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:570: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 2][1:7509890063426678452:2344] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRemoveRecords { Records [2,3] } >> LocalTableWriter::SupportedTypes [GOOD] >> BackupRestoreS3::RestoreTableSplitBoundaries [GOOD] >> BackupRestoreS3::RestoreIndexTableSplitBoundaries >> TSchemeShardLoginTest::ChangeAcceptablePasswordParameters [GOOD] >> TSchemeShardLoginTest::ChangeAccountLockoutParameters ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/effects/unittest >> KqpImmediateEffects::MultiShardUpsertAfterRead Test command err: Trying to start YDB, gRPC: 25692, MsgBus: 64017 2025-05-29T15:28:48.604394Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509890024001266494:2073];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:28:48.604417Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/001e89/r3tmp/tmpmYVZAC/pdisk_1.dat 2025-05-29T15:28:48.675827Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 25692, node 1 2025-05-29T15:28:48.697078Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:28:48.697098Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-05-29T15:28:48.697101Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-05-29T15:28:48.697151Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-05-29T15:28:48.705483Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:28:48.705532Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:28:48.706587Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:64017 TClient is connected to server localhost:64017 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-29T15:28:48.761494Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:28:48.771335Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:28:48.838280Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:28:48.862837Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:28:48.875819Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:28:49.027705Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890028296235379:2403], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:28:49.027744Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:28:49.078385Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-05-29T15:28:49.086638Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-05-29T15:28:49.095472Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-05-29T15:28:49.108358Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-05-29T15:28:49.114856Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-05-29T15:28:49.122547Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-05-29T15:28:49.136363Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-05-29T15:28:49.154417Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890028296236032:2466], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:28:49.154462Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:28:49.154488Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890028296236037:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:28:49.155295Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715669:3, at schemeshard: 72057594046644480 2025-05-29T15:28:49.164059Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7509890028296236039:2470], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715669 completed, doublechecking } 2025-05-29T15:28:49.242010Z node 1 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [1:7509890028296236090:3396] txid# 281474976715670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-29T15:28:49.350609Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [1:7509890028296236106:2474], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:28:49.350719Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=1&id=Y2M5Zjg5MzQtOTFiZDM1NDEtOGNmNDEwY2UtZTM3YmYyZTY=, ActorId: [1:7509890028296235376:2401], ActorState: ExecuteState, TraceId: 01jweaj2027w44ct9yrs5stvne, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: VERIFY failed (2025-05-29T15:28:49.351404Z): assertion failed in non-unittest thread with message: assertion failed at ydb/core/kqp/ut/common/kqp_ut_common.h:375, void NKikimr::NKqp::AssertSuccessResult(const NYdb::TStatus &): (result.IsSuccess())
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 library/cpp/testing/unittest/registar.cpp:36 RaiseError(): requirement UnittestThread failed 0. /-S/util/system/yassert.cpp:83: InternalPanicImpl @ 0x15F23B95 1. /-S/util/system/yassert.cpp:55: Panic @ 0x15F1AB96 2. /tmp//-S/library/cpp/testing/unittest/registar.cpp:36: RaiseError @ 0x160BC066 3. /-S/ydb/core/kqp/ut/common/kqp_ut_common.h:375: AssertSuccessResult @ 0x15D88C82 4. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:365: CreateSampleTables @ 0x2855B9A2 5. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:581: operator() @ 0x2857CF4C 6. /-S/library/cpp/threading/future/core/future-inl.h:493: SetValue @ 0x2857CF4C 7. /-S/library/cpp/threading/future/async.h:24: operator() @ 0x2857CF4C 8. /-S/util/thread/pool.h:71: Process @ 0x2857CF4C 9. /-S/util/thread/pool.cpp:418: DoExecute @ 0x15F2B519 10. /-S/util/thread/factory.h:15: Execute @ 0x15F29F09 11. /-S/util/thread/factory.cpp:36: ThreadProc @ 0x15F29F09 12. /-S/util/system/thread.cpp:244: ThreadProxy @ 0x15F2537C 13. ??:0: ?? @ 0x7F1F9FF79AC2 14. ??:0: ?? @ 0x7F1FA000B84F Trying to start YDB, gRPC: 28481, MsgBus: 32043 2025-05-29T15:28:53.520016Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509890049336473962:2071];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:28:53.520042Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/001e89/r3tmp/tmpFTWxHh/pdisk_1.dat 2025-05-29T15:28:53.569111Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 28481, node 1 2025-05-29T15:28:53.597681Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:28:53.597700Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-05-29T15:28:53.597703Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-05-29T15:28:53.597759Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:32043 2025-05-29T15:28:53.621275Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:28:53.621314Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:28:53.622371Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:32043 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-29T15:28:53.654600Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:28:53.663368Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:28:53.687682Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:28:53.712998Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:28:53.725716Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:28:53.902808Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890049336475549:2403], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:28:53.902839Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:28:53.960621Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-05-29T15:28:53.971866Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-05-29T15:28:53.982102Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-05-29T15:28:53.996458Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-05-29T15:28:54.053077Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-05-29T15:28:54.071487Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-05-29T15:28:54.084433Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-05-29T15:28:54.119585Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890053631443501:2466], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:28:54.119616Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:28:54.119731Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890053631443506:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:28:54.120694Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715669:3, at schemeshard: 72057594046644480 2025-05-29T15:28:54.124498Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976715669, at schemeshard: 72057594046644480 2025-05-29T15:28:54.124571Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7509890053631443508:2470], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715669 completed, doublechecking } 2025-05-29T15:28:54.228027Z node 1 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [1:7509890053631443560:3397] txid# 281474976715670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-29T15:28:54.357413Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [1:7509890053631443569:2474], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:28:54.358832Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=1&id=ZjQzNjM4YjgtY2FlODkzMy02Mjc0NzgzYi03YTUxNGU0Mg==, ActorId: [1:7509890049336475531:2401], ActorState: ExecuteState, TraceId: 01jweaj6v6c6hwebr6nhazsfb0, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: VERIFY failed (2025-05-29T15:28:54.359676Z): assertion failed in non-unittest thread with message: assertion failed at ydb/core/kqp/ut/common/kqp_ut_common.h:375, void NKikimr::NKqp::AssertSuccessResult(const NYdb::TStatus &): (result.IsSuccess())
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 library/cpp/testing/unittest/registar.cpp:36 RaiseError(): requirement UnittestThread failed 0. /-S/util/system/yassert.cpp:83: InternalPanicImpl @ 0x15F23B95 1. /-S/util/system/yassert.cpp:55: Panic @ 0x15F1AB96 2. /tmp//-S/library/cpp/testing/unittest/registar.cpp:36: RaiseError @ 0x160BC066 3. /-S/ydb/core/kqp/ut/common/kqp_ut_common.h:375: AssertSuccessResult @ 0x15D88C82 4. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:365: CreateSampleTables @ 0x2855B9A2 5. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:581: operator() @ 0x2857CF4C 6. /-S/library/cpp/threading/future/core/future-inl.h:493: SetValue @ 0x2857CF4C 7. /-S/library/cpp/threading/future/async.h:24: operator() @ 0x2857CF4C 8. /-S/util/thread/pool.h:71: Process @ 0x2857CF4C 9. /-S/util/thread/pool.cpp:418: DoExecute @ 0x15F2B519 10. /-S/util/thread/factory.h:15: Execute @ 0x15F29F09 11. /-S/util/thread/factory.cpp:36: ThreadProc @ 0x15F29F09 12. /-S/util/system/thread.cpp:244: ThreadProxy @ 0x15F2537C 13. ??:0: ?? @ 0x7F3BEE8EBAC2 14. ??:0: ?? @ 0x7F3BEE97D84F >> BackupRestore::TestAllSchemeObjectTypes-EPathTypeDir >> KqpEffects::InsertAbort_Literal_Conflict-UseSink >> LocalTableWriter::DecimalKeys ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/replication/service/ut_table_writer/unittest >> LocalTableWriter::WriteTable [GOOD] Test command err: 2025-05-29T15:28:57.743583Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509890063624021457:2067];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:28:57.743751Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/000d1f/r3tmp/tmpuVsBaq/pdisk_1.dat 2025-05-29T15:28:57.816331Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [1:7509890063624021420:2079] 1748532537743019 != 1748532537743022 2025-05-29T15:28:57.820624Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded TClient is connected to server localhost:25867 TServer::EnableGrpc on GrpcPort 11799, node 1 2025-05-29T15:28:57.845356Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:28:57.845389Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:28:57.846569Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-29T15:28:57.850583Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:28:57.850594Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-05-29T15:28:57.850597Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-05-29T15:28:57.850665Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:25867 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-29T15:28:57.889165Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:28:57.892802Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... TClient::Ls request: /Root/Table TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Table" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976715658 CreateStep: 1748532537997 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "Table" Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" Key... (TRUNCATED) 2025-05-29T15:28:57.960585Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:295: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 2][1:7509890063624022129:2344] Handshake: worker# [1:7509890063624022037:2284] 2025-05-29T15:28:57.960685Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:312: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 2][1:7509890063624022129:2344] Handle TEvTxProxySchemeCache::TEvNavigateKeySetResult: result# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root/Table TableId: [72057594046644480:2:1] RequestType: ByTableId Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Ok Kind: KindTable DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } }] } 2025-05-29T15:28:57.960729Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:387: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 2][1:7509890063624022129:2344] Handle TEvTxProxySchemeCache::TEvResolveKeySetResult: result# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 ResultSet [{ TableId: [OwnerId: 72057594046644480, LocalPathId: 2] Access: 0 SyncVersion: false Status: OkData Kind: KindRegularTable PartitionsCount: 1 DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } From: (Uint32 : NULL) IncFrom: 1 To: () IncTo: 0 }] } 2025-05-29T15:28:57.960737Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:417: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 2][1:7509890063624022129:2344] Send handshake: worker# [1:7509890063624022037:2284] 2025-05-29T15:28:57.960912Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:431: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 2][1:7509890063624022129:2344] Handle NKikimr::NReplication::NService::TEvWorker::TEvData { Source: TestSource Records [{ Codec: RAW Data: 36b Offset: 1 SeqNo: 0 CreateTime: 1970-01-01T00:00:00.000000Z MessageGroupId: ProducerId: },{ Codec: RAW Data: 36b Offset: 2 SeqNo: 0 CreateTime: 1970-01-01T00:00:00.000000Z MessageGroupId: ProducerId: },{ Codec: RAW Data: 36b Offset: 3 SeqNo: 0 CreateTime: 1970-01-01T00:00:00.000000Z MessageGroupId: ProducerId: }] } 2025-05-29T15:28:57.960942Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:556: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 2][1:7509890063624022129:2344] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRequestRecords { Records [{ Order: 1 BodySize: 36 },{ Order: 2 BodySize: 36 },{ Order: 3 BodySize: 36 }] } 2025-05-29T15:28:57.960981Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:54: [TablePartitionWriter][72057594046644480:2:1][72075186224037888][1:7509890063624022132:2344] Handle NKikimr::TEvTxUserProxy::TEvGetProxyServicesResponse 2025-05-29T15:28:57.960987Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:587: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 2][1:7509890063624022129:2344] Handle NKikimr::NChangeExchange::TEvChangeExchangePrivate::TEvReady { PartitionId: 72075186224037888 } 2025-05-29T15:28:57.961004Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:74: [TablePartitionWriter][72057594046644480:2:1][72075186224037888][1:7509890063624022132:2344] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRecords { Records [{ Order: 1 Group: 0 Step: 0 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 36b },{ Order: 2 Group: 0 Step: 0 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 36b },{ Order: 3 Group: 0 Step: 0 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 36b }] } 2025-05-29T15:28:57.961511Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:111: [TablePartitionWriter][72057594046644480:2:1][72075186224037888][1:7509890063624022132:2344] Handle NKikimrTxDataShard.TEvApplyReplicationChangesResult Status: STATUS_OK 2025-05-29T15:28:57.961519Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:587: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 2][1:7509890063624022129:2344] Handle NKikimr::NChangeExchange::TEvChangeExchangePrivate::TEvReady { PartitionId: 72075186224037888 } 2025-05-29T15:28:57.961525Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:570: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 2][1:7509890063624022129:2344] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRemoveRecords { Records [1,2,3] } |71.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_split_merge/unittest >> TSchemeShardSplitBySample::Mixed [GOOD] |71.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/grpc_streaming/ut/unittest >> TSchemeShardLoginTest::AccountLockoutAndAutomaticallyUnlock [GOOD] >> TSchemeshardBorrowedCompactionTest::SchemeshardShouldCompactBorrowedAfterSplitMerge [GOOD] >> TSchemeshardBorrowedCompactionTest::SchemeshardShouldHandleBorrowCompactionTimeouts >> LocalTableWriter::DataAlongWithHeartbeat [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/replication/service/ut_table_writer/unittest >> LocalTableWriter::SupportedTypes [GOOD] Test command err: 2025-05-29T15:28:57.960012Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509890065418337571:2081];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:28:57.960263Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/000d12/r3tmp/tmp9MdObz/pdisk_1.dat 2025-05-29T15:28:58.043612Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:28:58.058514Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:28:58.058548Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:28:58.060067Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:13054 TServer::EnableGrpc on GrpcPort 4187, node 1 2025-05-29T15:28:58.081994Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:28:58.082015Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-05-29T15:28:58.082018Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-05-29T15:28:58.082100Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:13054 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-29T15:28:58.132124Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:28:58.135591Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-05-29T15:28:58.136783Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... TClient::Ls request: /Root/Table TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Table" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976715658 CreateStep: 1748532538235 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "Table" Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "int32_value" Type: "Int32" TypeId: 1 Id: 2 NotNull: false IsBuildInProgress: false } Columns { Name: "ui... (TRUNCATED) 2025-05-29T15:28:58.206444Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:295: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 2][1:7509890069713305508:2344] Handshake: worker# [1:7509890069713305416:2284] 2025-05-29T15:28:58.206553Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:312: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 2][1:7509890069713305508:2344] Handle TEvTxProxySchemeCache::TEvNavigateKeySetResult: result# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root/Table TableId: [72057594046644480:2:1] RequestType: ByTableId Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Ok Kind: KindTable DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } }] } 2025-05-29T15:28:58.206609Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:387: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 2][1:7509890069713305508:2344] Handle TEvTxProxySchemeCache::TEvResolveKeySetResult: result# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 ResultSet [{ TableId: [OwnerId: 72057594046644480, LocalPathId: 2] Access: 0 SyncVersion: false Status: OkData Kind: KindRegularTable PartitionsCount: 1 DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } From: (Uint32 : NULL) IncFrom: 1 To: () IncTo: 0 }] } 2025-05-29T15:28:58.206617Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:417: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 2][1:7509890069713305508:2344] Send handshake: worker# [1:7509890069713305416:2284] 2025-05-29T15:28:58.206850Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:431: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 2][1:7509890069713305508:2344] Handle NKikimr::NReplication::NService::TEvWorker::TEvData { Source: TestSource Records [{ Codec: RAW Data: 45b Offset: 1 SeqNo: 0 CreateTime: 1970-01-01T00:00:00.000000Z MessageGroupId: ProducerId: },{ Codec: RAW Data: 45b Offset: 2 SeqNo: 0 CreateTime: 1970-01-01T00:00:00.000000Z MessageGroupId: ProducerId: },{ Codec: RAW Data: 45b Offset: 3 SeqNo: 0 CreateTime: 1970-01-01T00:00:00.000000Z MessageGroupId: ProducerId: },{ Codec: RAW Data: 45b Offset: 4 SeqNo: 0 CreateTime: 1970-01-01T00:00:00.000000Z MessageGroupId: ProducerId: },{ Codec: RAW Data: 41b Offset: 5 SeqNo: 0 CreateTime: 1970-01-01T00:00:00.000000Z MessageGroupId: ProducerId: },{ Codec: RAW Data: 41b Offset: 6 SeqNo: 0 CreateTime: 1970-01-01T00:00:00.000000Z MessageGroupId: ProducerId: },{ Codec: RAW Data: 45b Offset: 7 SeqNo: 0 CreateTime: 1970-01-01T00:00:00.000000Z MessageGroupId: ProducerId: },{ Codec: RAW Data: 44b Offset: 8 SeqNo: 0 CreateTime: 1970-01-01T00:00:00.000000Z MessageGroupId: ProducerId: },{ Codec: RAW Data: 66b Offset: 9 SeqNo: 0 CreateTime: 1970-01-01T00:00:00.000000Z MessageGroupId: ProducerId: },{ Codec: RAW Data: 71b Offset: 10 SeqNo: 0 CreateTime: 1970-01-01T00:00:00.000000Z MessageGroupId: ProducerId: },{ Codec: RAW Data: 72b Offset: 11 SeqNo: 0 CreateTime: 1970-01-01T00:00:00.000000Z MessageGroupId: ProducerId: },{ Codec: RAW Data: 49b Offset: 12 SeqNo: 0 CreateTime: 1970-01-01T00:00:00.000000Z MessageGroupId: ProducerId: },{ Codec: RAW Data: 48b Offset: 13 SeqNo: 0 CreateTime: 1970-01-01T00:00:00.000000Z MessageGroupId: ProducerId: },{ Codec: RAW Data: 51b Offset: 14 SeqNo: 0 CreateTime: 1970-01-01T00:00:00.000000Z MessageGroupId: ProducerId: },{ Codec: RAW Data: 58b Offset: 15 SeqNo: 0 CreateTime: 1970-01-01T00:00:00.000000Z MessageGroupId: ProducerId: },{ Codec: RAW Data: 51b Offset: 16 SeqNo: 0 CreateTime: 1970-01-01T00:00:00.000000Z MessageGroupId: ProducerId: },{ Codec: RAW Data: 54b Offset: 17 SeqNo: 0 CreateTime: 1970-01-01T00:00:00.000000Z MessageGroupId: ProducerId: },{ Codec: RAW Data: 57b Offset: 18 SeqNo: 0 CreateTime: 1970-01-01T00:00:00.000000Z MessageGroupId: ProducerId: },{ Codec: RAW Data: 76b Offset: 19 SeqNo: 0 CreateTime: 1970-01-01T00:00:00.000000Z MessageGroupId: ProducerId: },{ Codec: RAW Data: 45b Offset: 20 SeqNo: 0 CreateTime: 1970-01-01T00:00:00.000000Z MessageGroupId: ProducerId: },{ Codec: RAW Data: 54b Offset: 21 SeqNo: 0 CreateTime: 1970-01-01T00:00:00.000000Z MessageGroupId: ProducerId: },{ Codec: RAW Data: 61b Offset: 22 SeqNo: 0 CreateTime: 1970-01-01T00:00:00.000000Z MessageGroupId: ProducerId: },{ Codec: RAW Data: 51b Offset: 23 SeqNo: 0 CreateTime: 1970-01-01T00:00:00.000000Z MessageGroupId: ProducerId: },{ Codec: RAW Data: 45b Offset: 24 SeqNo: 0 CreateTime: 1970-01-01T00:00:00.000000Z MessageGroupId: ProducerId: },{ Codec: RAW Data: 46b Offset: 25 SeqNo: 0 CreateTime: 1970-01-01T00:00:00.000000Z MessageGroupId: ProducerId: },{ Codec: RAW Data: 47b Offset: 26 SeqNo: 0 CreateTime: 1970-01-01T00:00:00.000000Z MessageGroupId: ProducerId: },{ Codec: RAW Data: 50b Offset: 27 SeqNo: 0 CreateTime: 1970-01-01T00:00:00.000000Z MessageGroupId: ProducerId: },{ Codec: RAW Data: 49b Offset: 28 SeqNo: 0 CreateTime: 1970-01-01T00:00:00.000000Z MessageGroupId: ProducerId: },{ Codec: RAW Data: 72b Offset: 29 SeqNo: 0 CreateTime: 1970-01-01T00:00:00.000000Z MessageGroupId: ProducerId: },{ Codec: RAW Data: 57b Offset: 30 SeqNo: 0 CreateTime: 1970-01-01T00:00:00.000000Z MessageGroupId: ProducerId: },{ Codec: RAW Data: 64b Offset: 31 SeqNo: 0 CreateTime: 1970-01-01T00:00:00.000000Z MessageGroupId: ProducerId: }] } 2025-05-29T15:28:58.207043Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:556: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 2][1:7509890069713305508:2344] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRequestRecords { Records [{ Order: 1 BodySize: 45 },{ Order: 2 BodySize: 45 },{ Order: 3 BodySize: 45 },{ Order: 4 BodySize: 45 },{ Order: 5 BodySize: 41 },{ Order: 6 BodySize: 41 },{ Order: 7 BodySize: 45 },{ Order: 8 BodySize: 44 },{ Order: 9 BodySize: 66 },{ Order: 10 BodySize: 71 },{ Order: 11 BodySize: 72 },{ Order: 12 BodySize: 49 },{ Order: 13 BodySize: 48 },{ Order: 14 BodySize: 51 },{ Order: 15 BodySize: 58 },{ Order: 16 BodySize: 51 },{ Order: 17 BodySize: 54 },{ Order: 18 BodySize: 57 },{ Order: 19 BodySize: 76 },{ Order: 20 BodySize: 45 },{ Order: 21 BodySize: 54 },{ Order: 22 BodySize: 61 },{ Order: 23 BodySize: 51 },{ Order: 24 BodySize: 45 },{ Order: 25 BodySize: 46 },{ Order: 26 BodySize: 47 },{ Order: 27 BodySize: 50 },{ Order: 28 BodySize: 49 },{ Order: 29 BodySize: 72 },{ Order: 30 BodySize: 57 },{ Order: 31 BodySize: 64 }] } 2025-05-29T15:28:58.207137Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:54: [TablePartitionWriter][72057594046644480:2:1][72075186224037888][1:7509890069713305511:2344] Handle NKikimr::TEvTxUserProxy::TEvGetProxyServicesResponse 2025-05-29T15:28:58.207149Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:587: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 2][1:7509890069713305508:2344] Handle NKikimr::NChangeExchange::TEvChangeExchangePrivate::TEvReady { PartitionId: 72075186224037888 } 2025-05-29T15:28:58.207195Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:74: [TablePartitionWriter][72057594046644480:2:1][72075186224037888][1:7509890069713305511:2344] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRecords { Records [{ Order: 1 Group: 0 Step: 0 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 45b },{ Order: 2 Group: 0 Step: 0 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 45b },{ Order: 3 Group: 0 Step: 0 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 45b },{ Order: 4 Group: 0 Step: 0 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 45b },{ Order: 5 Group: 0 Step: 0 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 41b },{ Order: 6 Group: 0 Step: 0 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 41b },{ Order: 7 Group: 0 Step: 0 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 45b },{ Order: 8 Group: 0 Step: 0 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 44b },{ Order: 9 Group: 0 Step: 0 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 66b },{ Order: 10 Group: 0 Step: 0 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 71b },{ Order: 11 Group: 0 Step: 0 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 72b },{ Order: 12 Group: 0 Step: 0 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 49b },{ Order: 13 Group: 0 Step: 0 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 48b },{ Order: 14 Group: 0 Step: 0 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 51b },{ Order: 15 Group: 0 Step: 0 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 58b },{ Order: 16 Group: 0 Step: 0 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 51b },{ Order: 17 Group: 0 Step: 0 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 54b },{ Order: 18 Group: 0 Step: 0 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 57b },{ Order: 19 Group: 0 Step: 0 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 76b },{ Order: 20 Group: 0 Step: 0 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 45b },{ Order: 21 Group: 0 Step: 0 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 54b },{ Order: 22 Group: 0 Step: 0 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 61b },{ Order: 23 Group: 0 Step: 0 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 51b },{ Order: 24 Group: 0 Step: 0 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 45b },{ Order: 25 Group: 0 Step: 0 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 46b },{ Order: 26 Group: 0 Step: 0 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 47b },{ Order: 27 Group: 0 Step: 0 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 50b },{ Order: 28 Group: 0 Step: 0 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 49b },{ Order: 29 Group: 0 Step: 0 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 72b },{ Order: 30 Group: 0 Step: 0 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 57b },{ Order: 31 Group: 0 Step: 0 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 64b }] } 2025-05-29T15:28:58.215754Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:111: [TablePartitionWriter][72057594046644480:2:1][72075186224037888][1:7509890069713305511:2344] Handle NKikimrTxDataShard.TEvApplyReplicationChangesResult Status: STATUS_OK 2025-05-29T15:28:58.215789Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:587: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 2][1:7509890069713305508:2344] Handle NKikimr::NChangeExchange::TEvChangeExchangePrivate::TEvReady { PartitionId: 72075186224037888 } 2025-05-29T15:28:58.215808Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:570: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 2][1:7509890069713305508:2344] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRemoveRecords { Records [1,2,3,4,5,6,7,8,9,10,11,12,13,14,15,16,17,18,19,20,21,22,23,24,25,26,27,28,29,30,31] } ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_login/unittest >> TSchemeShardLoginTest::AccountLockoutAndAutomaticallyUnlock [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:116:2058] recipient: [1:111:2141] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:116:2058] recipient: [1:111:2141] Leader for TabletID 72057594046678944 is [1:128:2152] sender: [1:132:2058] recipient: [1:111:2141] 2025-05-29T15:28:53.409968Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7488: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-05-29T15:28:53.409992Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7516: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:28:53.409997Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7402: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-05-29T15:28:53.410000Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7418: OperationsProcessing config: using default configuration 2025-05-29T15:28:53.410005Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-05-29T15:28:53.410008Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-05-29T15:28:53.410015Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7548: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:28:53.410026Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:39: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-05-29T15:28:53.410123Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7619: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-05-29T15:28:53.410178Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-05-29T15:28:53.419833Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7309: Cannot subscribe to console configs 2025-05-29T15:28:53.419856Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:28:53.422579Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-05-29T15:28:53.422692Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-05-29T15:28:53.422720Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-05-29T15:28:53.424539Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-05-29T15:28:53.424721Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:445: Clear TempDirsState with owners number: 0 2025-05-29T15:28:53.424846Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1348: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-05-29T15:28:53.424929Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:32: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-05-29T15:28:53.425566Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:157: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:28:53.425617Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:84: [RootDataErasureManager] Stop 2025-05-29T15:28:53.425911Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:28:53.425925Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:28:53.425947Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-05-29T15:28:53.425959Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-29T15:28:53.425966Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:202: TTxServerlessStorageBilling.Complete 2025-05-29T15:28:53.426004Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6671: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-05-29T15:28:53.427326Z node 1 :HIVE INFO: tablet_helpers.cpp:1130: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:128:2152] sender: [1:242:2058] recipient: [1:15:2062] 2025-05-29T15:28:53.445440Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:372: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-05-29T15:28:53.445501Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:28:53.445565Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-05-29T15:28:53.445622Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:130: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-05-29T15:28:53.445632Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:28:53.446213Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:451: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-05-29T15:28:53.446234Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-05-29T15:28:53.446276Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:28:53.446284Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:313: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-05-29T15:28:53.446289Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:367: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-05-29T15:28:53.446293Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 2 -> 3 2025-05-29T15:28:53.446583Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:28:53.446591Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-05-29T15:28:53.446595Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 3 -> 128 2025-05-29T15:28:53.446885Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:28:53.446893Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:28:53.446900Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:28:53.446907Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1650: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-05-29T15:28:53.447400Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1719: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-05-29T15:28:53.447738Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:652: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-05-29T15:28:53.447768Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1751: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-05-29T15:28:53.447905Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:676: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-05-29T15:28:53.447922Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:680: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 124 RawX2: 4294969445 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-05-29T15:28:53.447928Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:28:53.447979Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 128 -> 240 2025-05-29T15:28:53.447985Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:28:53.448010Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-05-29T15:28:53.448019Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:402: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-05-29T15:28:53.448381Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:28:53.448387Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-29T15:28:53.448418Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29 ... operation: CREATE USER, path: /MyRoot 2025-05-29T15:28:54.526828Z node 5 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:28:54.526834Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 101, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-29T15:28:54.526862Z node 5 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:28:54.526867Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [5:208:2209], at schemeshard: 72057594046678944, txId: 101, path id: 1 2025-05-29T15:28:54.526951Z node 5 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:5834: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 4 PathOwnerId: 72057594046678944, cookie: 101 2025-05-29T15:28:54.526962Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 4 PathOwnerId: 72057594046678944, cookie: 101 2025-05-29T15:28:54.526968Z node 5 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 101 2025-05-29T15:28:54.526973Z node 5 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 4 2025-05-29T15:28:54.526978Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-05-29T15:28:54.526995Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 101, subscribers: 0 2025-05-29T15:28:54.527357Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 TestModificationResult got TxId: 101, wait until txId: 101 2025-05-29T15:28:54.527506Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__login.cpp:43: TTxLogin Execute at schemeshard: 72057594046678944 2025-05-29T15:28:54.527514Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__login.cpp:46: TTxLogin RotateKeys at schemeshard: 72057594046678944 2025-05-29T15:28:54.638174Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__login.cpp:85: TTxLogin Complete, result: Error: "Invalid password", at schemeshard: 72057594046678944 2025-05-29T15:28:54.638221Z node 5 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:28:54.638230Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 0, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-29T15:28:54.638287Z node 5 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:28:54.638294Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [5:208:2209], at schemeshard: 72057594046678944, txId: 0, path id: 1 2025-05-29T15:28:54.638416Z node 5 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:5834: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 5 PathOwnerId: 72057594046678944, cookie: 0 2025-05-29T15:28:54.638494Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__login.cpp:43: TTxLogin Execute at schemeshard: 72057594046678944 2025-05-29T15:28:54.641205Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__login.cpp:85: TTxLogin Complete, result: Error: "Invalid password", at schemeshard: 72057594046678944 2025-05-29T15:28:54.641308Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__login.cpp:43: TTxLogin Execute at schemeshard: 72057594046678944 2025-05-29T15:28:54.643968Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__login.cpp:85: TTxLogin Complete, result: Error: "Invalid password", at schemeshard: 72057594046678944 2025-05-29T15:28:54.644065Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__login.cpp:43: TTxLogin Execute at schemeshard: 72057594046678944 2025-05-29T15:28:54.646578Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__login.cpp:85: TTxLogin Complete, result: Error: "Invalid password", at schemeshard: 72057594046678944 2025-05-29T15:28:54.646669Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__login.cpp:43: TTxLogin Execute at schemeshard: 72057594046678944 2025-05-29T15:28:54.646686Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__login.cpp:85: TTxLogin Complete, result: Error: "User user1 login denied: too many failed password attempts", at schemeshard: 72057594046678944 2025-05-29T15:28:54.646722Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__login.cpp:43: TTxLogin Execute at schemeshard: 72057594046678944 2025-05-29T15:28:54.646731Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__login.cpp:85: TTxLogin Complete, result: Error: "User user1 login denied: too many failed password attempts", at schemeshard: 72057594046678944 2025-05-29T15:28:54.646867Z node 5 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-05-29T15:28:54.646923Z node 5 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot" took 65us result status StatusSuccess 2025-05-29T15:28:54.647036Z node 5 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot" PathDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 1 SecurityStateVersion: 2 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { PublicKeys { KeyId: 1 KeyDataPEM: "-----BEGIN PUBLIC KEY-----\nMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAz0YMYPTvbhcLfANnfkN5\nNPiiaiJPy+M9KGf9XdTKYstFLOTT3t4YnecVcu+tQ59YxtxADXonlfmn1eYbHSk5\nwWfwrSNamfjyhynZsVxwt2IcXo1mFKb/9A5/z/89imgC0N02f6cQAKzywpCpVFaV\nEMdHbcVlPvRMc1Uy8bmoVxFUxmPD9+PzBu9/wrHsZCLwDa9CU5xlYCYBC7Oc+yRr\nYKlzsmuEkBeFy9rZJJGKBlQgqEAwhO6rJA7w4YrXB4h85HF3t9m5hSwleN7BzzBP\nrfjKLL/QUBAH0xFvTef9UumXo/OK6PPVMHSZNwkD6JqduDhR8RFwMMkLX9jNOuEK\nNwIDAQAB\n-----END PUBLIC KEY-----\n" ExpiresAt: 1748618934635 } Sids { Name: "user1" Type: USER } Audience: "/MyRoot" } } } PathId: 1 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-05-29T15:28:58.647333Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__login.cpp:43: TTxLogin Execute at schemeshard: 72057594046678944 2025-05-29T15:28:58.649794Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__login.cpp:85: TTxLogin Complete, result: Error: "Invalid password", at schemeshard: 72057594046678944 2025-05-29T15:28:58.649912Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__login.cpp:43: TTxLogin Execute at schemeshard: 72057594046678944 2025-05-29T15:28:58.653504Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__login.cpp:85: TTxLogin Complete, result: Token: "eyJhbGciOiJQUzI1NiIsImtpZCI6IjEifQ.eyJhdWQiOlsiXC9NeVJvb3QiXSwiZXhwIjoxNzQ4NTc1NzM4LCJpYXQiOjE3NDg1MzI1MzgsInN1YiI6InVzZXIxIn0.WoSfV102xTjP4fJj9x0ZTiQyUrizskA17B0hc7utXkRsY6_BXatVT8aZPadUdL7xdO-DnmEZTzsmG7b9JV_pZVZedbq7KkTHZAnZAmVjc2i5vd_fvZTEGZalOzEna5SvXAvlE8vFobl-ivqCWNPHdq1s5QmCDTjKkqxE5Yrdh64ciYM-iCk8qj6sKoTqz-Wc4obbl_khLoMBfE7qx56GeuywiuIPIl6Ia_fgKy955vjwLU2RFZq2gjpKI-MPYI0EL9g9oz_0afbYGFYB0apzDskRoYSp0O6khSPh9u9Ml6vtt_IttMYJRb8n67HNmpD2B6ZOSqe63TLHrwIDNTh6hA" SanitizedToken: "eyJhbGciOiJQUzI1NiIsImtpZCI6IjEifQ.eyJhdWQiOlsiXC9NeVJvb3QiXSwiZXhwIjoxNzQ4NTc1NzM4LCJpYXQiOjE3NDg1MzI1MzgsInN1YiI6InVzZXIxIn0.**" IsAdmin: true, at schemeshard: 72057594046678944 2025-05-29T15:28:58.653603Z node 5 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-05-29T15:28:58.653648Z node 5 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot" took 45us result status StatusSuccess 2025-05-29T15:28:58.653725Z node 5 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot" PathDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 1 SecurityStateVersion: 2 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { PublicKeys { KeyId: 1 KeyDataPEM: "-----BEGIN PUBLIC KEY-----\nMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAz0YMYPTvbhcLfANnfkN5\nNPiiaiJPy+M9KGf9XdTKYstFLOTT3t4YnecVcu+tQ59YxtxADXonlfmn1eYbHSk5\nwWfwrSNamfjyhynZsVxwt2IcXo1mFKb/9A5/z/89imgC0N02f6cQAKzywpCpVFaV\nEMdHbcVlPvRMc1Uy8bmoVxFUxmPD9+PzBu9/wrHsZCLwDa9CU5xlYCYBC7Oc+yRr\nYKlzsmuEkBeFy9rZJJGKBlQgqEAwhO6rJA7w4YrXB4h85HF3t9m5hSwleN7BzzBP\nrfjKLL/QUBAH0xFvTef9UumXo/OK6PPVMHSZNwkD6JqduDhR8RFwMMkLX9jNOuEK\nNwIDAQAB\n-----END PUBLIC KEY-----\n" ExpiresAt: 1748618934635 } Sids { Name: "user1" Type: USER } Audience: "/MyRoot" } } } PathId: 1 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> BackupRestoreS3::TestAllSchemeObjectTypes-EPathTypeTableIndex [GOOD] >> BackupRestoreS3::TestAllSchemeObjectTypes-EPathTypeSequence >> TExtSubDomainTest::DeclareAndAlterPools-AlterDatabaseCreateHiveFirst-false ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/effects/unittest >> KqpInplaceUpdate::SingleRowStr+UseSink Test command err: Trying to start YDB, gRPC: 10854, MsgBus: 1509 2025-05-29T15:28:49.366332Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509890029530888033:2075];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:28:49.366360Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/001e6a/r3tmp/tmp0iITlS/pdisk_1.dat TServer::EnableGrpc on GrpcPort 10854, node 1 2025-05-29T15:28:49.427436Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:28:49.433341Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:28:49.433355Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-05-29T15:28:49.433357Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-05-29T15:28:49.433407Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:1509 2025-05-29T15:28:49.467478Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:28:49.467510Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:28:49.468468Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:1509 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-29T15:28:49.496562Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:28:49.506703Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:28:49.571636Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:28:49.594536Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:28:49.607670Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:28:49.826541Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890029530889619:2403], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:28:49.826573Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:28:49.885860Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-05-29T15:28:49.895577Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-05-29T15:28:49.908796Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-05-29T15:28:49.972010Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-05-29T15:28:49.984501Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-05-29T15:28:50.001169Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-05-29T15:28:50.016548Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-05-29T15:28:50.034790Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890033825857569:2466], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:28:50.034822Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:28:50.034890Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890033825857574:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:28:50.035919Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715669:3, at schemeshard: 72057594046644480 2025-05-29T15:28:50.039875Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7509890033825857576:2470], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715669 completed, doublechecking } 2025-05-29T15:28:50.120666Z node 1 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [1:7509890033825857627:3394] txid# 281474976715670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-29T15:28:50.272958Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [1:7509890033825857636:2474], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:28:50.273069Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=1&id=Y2U4YWEzNDgtNmQwNTQzNDAtNjkzYzk5YTktOGJmYjdiNDA=, ActorId: [1:7509890029530889601:2401], ActorState: ExecuteState, TraceId: 01jweaj2vj8bfg9ywf42v5njs2, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: VERIFY failed (2025-05-29T15:28:50.273790Z): assertion failed in non-unittest thread with message: assertion failed at ydb/core/kqp/ut/common/kqp_ut_common.h:375, void NKikimr::NKqp::AssertSuccessResult(const NYdb::TStatus &): (result.IsSuccess())
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 library/cpp/testing/unittest/registar.cpp:36 RaiseError(): requirement UnittestThread failed 0. /-S/util/system/yassert.cpp:83: InternalPanicImpl @ 0x15F23B95 1. /-S/util/system/yassert.cpp:55: Panic @ 0x15F1AB96 2. /tmp//-S/library/cpp/testing/unittest/registar.cpp:36: RaiseError @ 0x160BC066 3. /-S/ydb/core/kqp/ut/common/kqp_ut_common.h:375: AssertSuccessResult @ 0x15D88C82 4. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:365: CreateSampleTables @ 0x2855B9A2 5. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:581: operator() @ 0x2857CF4C 6. /-S/library/cpp/threading/future/core/future-inl.h:493: SetValue @ 0x2857CF4C 7. /-S/library/cpp/threading/future/async.h:24: operator() @ 0x2857CF4C 8. /-S/util/thread/pool.h:71: Process @ 0x2857CF4C 9. /-S/util/thread/pool.cpp:418: DoExecute @ 0x15F2B519 10. /-S/util/thread/factory.h:15: Execute @ 0x15F29F09 11. /-S/util/thread/factory.cpp:36: ThreadProc @ 0x15F29F09 12. /-S/util/system/thread.cpp:244: ThreadProxy @ 0x15F2537C 13. ??:0: ?? @ 0x7F4179659AC2 14. ??:0: ?? @ 0x7F41796EB84F Trying to start YDB, gRPC: 17927, MsgBus: 25401 2025-05-29T15:28:54.449710Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509890050642600738:2211];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:28:54.449781Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/001e6a/r3tmp/tmp6vCX9Q/pdisk_1.dat 2025-05-29T15:28:54.506833Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 17927, node 1 2025-05-29T15:28:54.530436Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:28:54.530448Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-05-29T15:28:54.530450Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-05-29T15:28:54.530490Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-05-29T15:28:54.549525Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:28:54.549566Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:28:54.550566Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:25401 TClient is connected to server localhost:25401 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-29T15:28:54.595480Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:28:54.597872Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-05-29T15:28:54.601756Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:28:54.671711Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:28:54.696389Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:28:54.708354Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:28:54.874199Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890050642602176:2403], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:28:54.874239Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:28:54.938303Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-05-29T15:28:54.949886Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-05-29T15:28:54.959609Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-05-29T15:28:54.981290Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-05-29T15:28:55.000240Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-05-29T15:28:55.021891Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-05-29T15:28:55.031525Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-05-29T15:28:55.047613Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890054937570123:2466], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:28:55.047637Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890054937570128:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:28:55.047640Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:28:55.048579Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715669:3, at schemeshard: 72057594046644480 2025-05-29T15:28:55.058084Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7509890054937570130:2470], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715669 completed, doublechecking } 2025-05-29T15:28:55.156585Z node 1 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [1:7509890054937570181:3395] txid# 281474976715670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-29T15:28:55.279822Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [1:7509890054937570197:2474], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:28:55.280494Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=1&id=YzE0ODY0ZTAtMjkyNmY0N2UtZWFhNjFjZi03NDAwYzY5MA==, ActorId: [1:7509890050642602158:2401], ActorState: ExecuteState, TraceId: 01jweaj7r7dg3jsfh6hftcby5x, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: VERIFY failed (2025-05-29T15:28:55.281762Z): assertion failed in non-unittest thread with message: assertion failed at ydb/core/kqp/ut/common/kqp_ut_common.h:375, void NKikimr::NKqp::AssertSuccessResult(const NYdb::TStatus &): (result.IsSuccess())
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 library/cpp/testing/unittest/registar.cpp:36 RaiseError(): requirement UnittestThread failed 0. /-S/util/system/yassert.cpp:83: InternalPanicImpl @ 0x15F23B95 1. /-S/util/system/yassert.cpp:55: Panic @ 0x15F1AB96 2. /tmp//-S/library/cpp/testing/unittest/registar.cpp:36: RaiseError @ 0x160BC066 3. /-S/ydb/core/kqp/ut/common/kqp_ut_common.h:375: AssertSuccessResult @ 0x15D88C82 4. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:365: CreateSampleTables @ 0x2855B9A2 5. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:581: operator() @ 0x2857CF4C 6. /-S/library/cpp/threading/future/core/future-inl.h:493: SetValue @ 0x2857CF4C 7. /-S/library/cpp/threading/future/async.h:24: operator() @ 0x2857CF4C 8. /-S/util/thread/pool.h:71: Process @ 0x2857CF4C 9. /-S/util/thread/pool.cpp:418: DoExecute @ 0x15F2B519 10. /-S/util/thread/factory.h:15: Execute @ 0x15F29F09 11. /-S/util/thread/factory.cpp:36: ThreadProc @ 0x15F29F09 12. /-S/util/system/thread.cpp:244: ThreadProxy @ 0x15F2537C 13. ??:0: ?? @ 0x7F03AEF46AC2 14. ??:0: ?? @ 0x7F03AEFD884F ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/effects/unittest >> KqpImmediateEffects::ConflictingKeyRW1RR2 Test command err: Trying to start YDB, gRPC: 5583, MsgBus: 19644 2025-05-29T15:28:49.311972Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509890032248517728:2070];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:28:49.311998Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/001e78/r3tmp/tmpIYrc9s/pdisk_1.dat 2025-05-29T15:28:49.371315Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 5583, node 1 2025-05-29T15:28:49.390088Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:28:49.390104Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-05-29T15:28:49.390107Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-05-29T15:28:49.390153Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:19644 2025-05-29T15:28:49.413148Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:28:49.413175Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:28:49.414286Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:19644 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-29T15:28:49.452523Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:28:49.461153Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:28:49.524067Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:28:49.548703Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:28:49.558157Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:28:49.664888Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890032248519320:2403], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:28:49.664908Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:28:49.722917Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-05-29T15:28:49.732446Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-05-29T15:28:49.742391Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-05-29T15:28:49.755334Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-05-29T15:28:49.767187Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-05-29T15:28:49.782637Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-05-29T15:28:49.798790Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-05-29T15:28:49.816957Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890032248519973:2466], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:28:49.816979Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890032248519978:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:28:49.816986Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:28:49.817946Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715669:3, at schemeshard: 72057594046644480 2025-05-29T15:28:49.827156Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7509890032248519980:2470], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715669 completed, doublechecking } 2025-05-29T15:28:49.894134Z node 1 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [1:7509890032248520031:3394] txid# 281474976715670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-29T15:28:50.003740Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [1:7509890032248520047:2474], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:28:50.003884Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=1&id=ZmQ3YzdhNzItMTdmNzIyYzMtNWQ1MmViYy1mNmM3Yjc1Nw==, ActorId: [1:7509890032248519317:2401], ActorState: ExecuteState, TraceId: 01jweaj2mq1y8kfae207vqyj6y, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: VERIFY failed (2025-05-29T15:28:50.005759Z): assertion failed in non-unittest thread with message: assertion failed at ydb/core/kqp/ut/common/kqp_ut_common.h:375, void NKikimr::NKqp::AssertSuccessResult(const NYdb::TStatus &): (result.IsSuccess())
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 library/cpp/testing/unittest/registar.cpp:36 RaiseError(): requirement UnittestThread failed 0. /-S/util/system/yassert.cpp:83: InternalPanicImpl @ 0x15F23B95 1. /-S/util/system/yassert.cpp:55: Panic @ 0x15F1AB96 2. /tmp//-S/library/cpp/testing/unittest/registar.cpp:36: RaiseError @ 0x160BC066 3. /-S/ydb/core/kqp/ut/common/kqp_ut_common.h:375: AssertSuccessResult @ 0x15D88C82 4. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:365: CreateSampleTables @ 0x2855B9A2 5. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:581: operator() @ 0x2857CF4C 6. /-S/library/cpp/threading/future/core/future-inl.h:493: SetValue @ 0x2857CF4C 7. /-S/library/cpp/threading/future/async.h:24: operator() @ 0x2857CF4C 8. /-S/util/thread/pool.h:71: Process @ 0x2857CF4C 9. /-S/util/thread/pool.cpp:418: DoExecute @ 0x15F2B519 10. /-S/util/thread/factory.h:15: Execute @ 0x15F29F09 11. /-S/util/thread/factory.cpp:36: ThreadProc @ 0x15F29F09 12. /-S/util/system/thread.cpp:244: ThreadProxy @ 0x15F2537C 13. ??:0: ?? @ 0x7FD7D1B91AC2 14. ??:0: ?? @ 0x7FD7D1C2384F Trying to start YDB, gRPC: 16819, MsgBus: 12375 2025-05-29T15:28:54.362911Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509890052188408143:2265];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:28:54.363001Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/001e78/r3tmp/tmpCFMuD3/pdisk_1.dat 2025-05-29T15:28:54.440895Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 16819, node 1 2025-05-29T15:28:54.462239Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:28:54.462258Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-05-29T15:28:54.462262Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-05-29T15:28:54.462317Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:12375 2025-05-29T15:28:54.499463Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:28:54.499496Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:28:54.500431Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:12375 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-29T15:28:54.544547Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-05-29T15:28:54.557805Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-05-29T15:28:54.575696Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:28:54.594674Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:28:54.606791Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:28:54.820405Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890052188409541:2403], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:28:54.820435Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:28:54.881288Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-05-29T15:28:54.893793Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-05-29T15:28:54.911496Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-05-29T15:28:54.925506Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-05-29T15:28:54.935890Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-05-29T15:28:54.946486Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-05-29T15:28:54.957060Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-05-29T15:28:54.983811Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890052188410195:2466], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:28:54.983847Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:28:54.983957Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890052188410200:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:28:54.984969Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715669:3, at schemeshard: 72057594046644480 2025-05-29T15:28:54.988372Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7509890052188410202:2470], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715669 completed, doublechecking } 2025-05-29T15:28:55.057072Z node 1 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [1:7509890056483377549:3398] txid# 281474976715670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-29T15:28:55.149273Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [1:7509890056483377565:2474], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:28:55.149400Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=1&id=NjRlMTQ2NGYtYzIxZGE3MmItZjVmZDQ2ZDUtNTQ4NjVmOTg=, ActorId: [1:7509890052188409514:2400], ActorState: ExecuteState, TraceId: 01jweaj7p705mt5h3gban705j8, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: VERIFY failed (2025-05-29T15:28:55.150219Z): assertion failed in non-unittest thread with message: assertion failed at ydb/core/kqp/ut/common/kqp_ut_common.h:375, void NKikimr::NKqp::AssertSuccessResult(const NYdb::TStatus &): (result.IsSuccess())
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 library/cpp/testing/unittest/registar.cpp:36 RaiseError(): requirement UnittestThread failed 0. /-S/util/system/yassert.cpp:83: InternalPanicImpl @ 0x15F23B95 1. /-S/util/system/yassert.cpp:55: Panic @ 0x15F1AB96 2. /tmp//-S/library/cpp/testing/unittest/registar.cpp:36: RaiseError @ 0x160BC066 3. /-S/ydb/core/kqp/ut/common/kqp_ut_common.h:375: AssertSuccessResult @ 0x15D88C82 4. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:365: CreateSampleTables @ 0x2855B9A2 5. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:581: operator() @ 0x2857CF4C 6. /-S/library/cpp/threading/future/core/future-inl.h:493: SetValue @ 0x2857CF4C 7. /-S/library/cpp/threading/future/async.h:24: operator() @ 0x2857CF4C 8. /-S/util/thread/pool.h:71: Process @ 0x2857CF4C 9. /-S/util/thread/pool.cpp:418: DoExecute @ 0x15F2B519 10. /-S/util/thread/factory.h:15: Execute @ 0x15F29F09 11. /-S/util/thread/factory.cpp:36: ThreadProc @ 0x15F29F09 12. /-S/util/system/thread.cpp:244: ThreadProxy @ 0x15F2537C 13. ??:0: ?? @ 0x7F594D321AC2 14. ??:0: ?? @ 0x7F594D3B384F >> LocalTableWriter::DecimalKeys [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/replication/service/ut_table_writer/unittest >> LocalTableWriter::DataAlongWithHeartbeat [GOOD] Test command err: 2025-05-29T15:28:58.368243Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509890067135441314:2068];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:28:58.368274Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/000aad/r3tmp/tmpjmFsEz/pdisk_1.dat 2025-05-29T15:28:58.423380Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded TClient is connected to server localhost:28270 TServer::EnableGrpc on GrpcPort 25703, node 1 2025-05-29T15:28:58.447122Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:28:58.447134Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-05-29T15:28:58.447136Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-05-29T15:28:58.447170Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:28270 2025-05-29T15:28:58.469868Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:28:58.469901Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:28:58.471032Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-29T15:28:58.501325Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:28:58.504121Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... TClient::Ls request: /Root/Table TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Table" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976715658 CreateStep: 1748532538606 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "Table" Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" Key... (TRUNCATED) 2025-05-29T15:28:58.565615Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:295: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 2][1:7509890067135441987:2344] Handshake: worker# [1:7509890067135441988:2345] 2025-05-29T15:28:58.565703Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:312: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 2][1:7509890067135441987:2344] Handle TEvTxProxySchemeCache::TEvNavigateKeySetResult: result# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root/Table TableId: [72057594046644480:2:1] RequestType: ByTableId Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Ok Kind: KindTable DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } }] } 2025-05-29T15:28:58.565800Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:387: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 2][1:7509890067135441987:2344] Handle TEvTxProxySchemeCache::TEvResolveKeySetResult: result# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 ResultSet [{ TableId: [OwnerId: 72057594046644480, LocalPathId: 2] Access: 0 SyncVersion: false Status: OkData Kind: KindRegularTable PartitionsCount: 1 DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } From: (Uint32 : NULL) IncFrom: 1 To: () IncTo: 0 }] } 2025-05-29T15:28:58.565817Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:417: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 2][1:7509890067135441987:2344] Send handshake: worker# [1:7509890067135441988:2345] 2025-05-29T15:28:58.565930Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:431: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 2][1:7509890067135441987:2344] Handle NKikimr::NReplication::NService::TEvWorker::TEvData { Source: TestSource Records [{ Codec: RAW Data: 48b Offset: 1 SeqNo: 0 CreateTime: 1970-01-01T00:00:00.000000Z MessageGroupId: ProducerId: },{ Codec: RAW Data: 19b Offset: 2 SeqNo: 0 CreateTime: 1970-01-01T00:00:00.000000Z MessageGroupId: ProducerId: }] } 2025-05-29T15:28:58.566548Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:490: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 2][1:7509890067135441987:2344] Handle NKikimrReplication.TEvTxIdResult VersionTxIds { Version { Step: 10 TxId: 0 } TxId: 1 } 2025-05-29T15:28:58.566570Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:556: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 2][1:7509890067135441987:2344] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRequestRecords { Records [{ Order: 1 BodySize: 48 }] } 2025-05-29T15:28:58.566605Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:54: [TablePartitionWriter][72057594046644480:2:1][72075186224037888][1:7509890067135441991:2344] Handle NKikimr::TEvTxUserProxy::TEvGetProxyServicesResponse 2025-05-29T15:28:58.566612Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:587: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 2][1:7509890067135441987:2344] Handle NKikimr::NChangeExchange::TEvChangeExchangePrivate::TEvReady { PartitionId: 72075186224037888 } 2025-05-29T15:28:58.566620Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:74: [TablePartitionWriter][72057594046644480:2:1][72075186224037888][1:7509890067135441991:2344] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRecords { Records [{ Order: 1 Group: 0 Step: 1 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 48b }] } 2025-05-29T15:28:58.568103Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:111: [TablePartitionWriter][72057594046644480:2:1][72075186224037888][1:7509890067135441991:2344] Handle NKikimrTxDataShard.TEvApplyReplicationChangesResult Status: STATUS_OK 2025-05-29T15:28:58.568117Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:587: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 2][1:7509890067135441987:2344] Handle NKikimr::NChangeExchange::TEvChangeExchangePrivate::TEvReady { PartitionId: 72075186224037888 } 2025-05-29T15:28:58.568124Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:570: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 2][1:7509890067135441987:2344] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRemoveRecords { Records [1] } >> KqpQuery::QueryExplain >> TExtSubDomainTest::GenericCases >> LocalTableWriter::WaitTxIds [GOOD] >> BackupRestore::RestoreTableSplitBoundaries [GOOD] >> BackupRestore::ImportDataShouldHandleErrors ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/replication/service/ut_table_writer/unittest >> LocalTableWriter::DecimalKeys [GOOD] Test command err: 2025-05-29T15:28:58.701351Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509890070405242493:2070];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:28:58.701368Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/000aa7/r3tmp/tmp8Ws5bo/pdisk_1.dat 2025-05-29T15:28:58.772139Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded TClient is connected to server localhost:23176 TServer::EnableGrpc on GrpcPort 2439, node 1 2025-05-29T15:28:58.802718Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:28:58.802773Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:28:58.803097Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:28:58.803107Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-05-29T15:28:58.803110Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-05-29T15:28:58.803165Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-05-29T15:28:58.803848Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:23176 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-29T15:28:58.858726Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:28:58.866814Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... TClient::Ls request: /Root/Table TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Table" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976715658 CreateStep: 1748532538970 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "Table" Columns { Name: "key" Type: "Decimal(1,0)" TypeId: 4865 Id: 1 NotNull: false TypeInfo { DecimalPrecision: 1 DecimalScale: 0 } IsBuildInProgress: false } Columns { Name: "value" Type: "Decimal(35,10)" TypeId: 4865 I... (TRUNCATED) 2025-05-29T15:28:58.931111Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:295: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 2][1:7509890070405243160:2344] Handshake: worker# [1:7509890070405243068:2284] 2025-05-29T15:28:58.931243Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:312: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 2][1:7509890070405243160:2344] Handle TEvTxProxySchemeCache::TEvNavigateKeySetResult: result# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root/Table TableId: [72057594046644480:2:1] RequestType: ByTableId Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Ok Kind: KindTable DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } }] } 2025-05-29T15:28:58.931325Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:387: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 2][1:7509890070405243160:2344] Handle TEvTxProxySchemeCache::TEvResolveKeySetResult: result# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 ResultSet [{ TableId: [OwnerId: 72057594046644480, LocalPathId: 2] Access: 0 SyncVersion: false Status: OkData Kind: KindRegularTable PartitionsCount: 1 DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } From: (Decimal(1,0) : NULL) IncFrom: 1 To: () IncTo: 0 }] } 2025-05-29T15:28:58.931344Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:417: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 2][1:7509890070405243160:2344] Send handshake: worker# [1:7509890070405243068:2284] 2025-05-29T15:28:58.931456Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:431: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 2][1:7509890070405243160:2344] Handle NKikimr::NReplication::NService::TEvWorker::TEvData { Source: TestSource Records [{ Codec: RAW Data: 57b Offset: 1 SeqNo: 0 CreateTime: 1970-01-01T00:00:00.000000Z MessageGroupId: ProducerId: },{ Codec: RAW Data: 57b Offset: 2 SeqNo: 0 CreateTime: 1970-01-01T00:00:00.000000Z MessageGroupId: ProducerId: },{ Codec: RAW Data: 57b Offset: 3 SeqNo: 0 CreateTime: 1970-01-01T00:00:00.000000Z MessageGroupId: ProducerId: }] } 2025-05-29T15:28:58.931499Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:556: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 2][1:7509890070405243160:2344] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRequestRecords { Records [{ Order: 1 BodySize: 57 },{ Order: 2 BodySize: 57 },{ Order: 3 BodySize: 57 }] } 2025-05-29T15:28:58.931557Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:54: [TablePartitionWriter][72057594046644480:2:1][72075186224037888][1:7509890070405243163:2344] Handle NKikimr::TEvTxUserProxy::TEvGetProxyServicesResponse 2025-05-29T15:28:58.931570Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:587: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 2][1:7509890070405243160:2344] Handle NKikimr::NChangeExchange::TEvChangeExchangePrivate::TEvReady { PartitionId: 72075186224037888 } 2025-05-29T15:28:58.931587Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:74: [TablePartitionWriter][72057594046644480:2:1][72075186224037888][1:7509890070405243163:2344] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRecords { Records [{ Order: 1 Group: 0 Step: 0 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 57b },{ Order: 2 Group: 0 Step: 0 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 57b },{ Order: 3 Group: 0 Step: 0 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 57b }] } 2025-05-29T15:28:58.932627Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:111: [TablePartitionWriter][72057594046644480:2:1][72075186224037888][1:7509890070405243163:2344] Handle NKikimrTxDataShard.TEvApplyReplicationChangesResult Status: STATUS_OK 2025-05-29T15:28:58.932646Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:587: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 2][1:7509890070405243160:2344] Handle NKikimr::NChangeExchange::TEvChangeExchangePrivate::TEvReady { PartitionId: 72075186224037888 } 2025-05-29T15:28:58.932655Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:570: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 2][1:7509890070405243160:2344] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRemoveRecords { Records [1,2,3] } >> BackupRestore::TestAllSchemeObjectTypes-EPathTypeDir [GOOD] >> BackupRestore::TestAllSchemeObjectTypes-EPathTypeBlockStoreVolume [GOOD] >> BackupRestore::TestAllSchemeObjectTypes-EPathTypeColumnStore [GOOD] >> BackupRestore::TestAllSchemeObjectTypes-EPathTypeColumnTable [GOOD] >> BackupRestore::TestAllSchemeObjectTypes-EPathTypeCdcStream >> KqpStats::SysViewClientLost >> BackupRestoreS3::TestAllPrimitiveTypes-UINT8 [FAIL] >> TExtSubDomainTest::DeclareAndDefineWithoutNodes-AlterDatabaseCreateHiveFirst-true >> BackupRestoreS3::TestAllPrimitiveTypes-UINT64 >> TExtSubDomainTest::CreateTableInsideThenStopTenantAndForceDeleteSubDomain-AlterDatabaseCreateHiveFirst-false >> TExtSubDomainTest::CreateTableInsideAndLs-AlterDatabaseCreateHiveFirst-false ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/replication/service/ut_table_writer/unittest >> LocalTableWriter::WaitTxIds [GOOD] Test command err: 2025-05-29T15:28:58.037518Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509890069833184443:2201];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:28:58.038109Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/000abb/r3tmp/tmpLySzzD/pdisk_1.dat 2025-05-29T15:28:58.101961Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [1:7509890069833184280:2079] 1748532538036562 != 1748532538036565 2025-05-29T15:28:58.103495Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded TClient is connected to server localhost:16048 TServer::EnableGrpc on GrpcPort 22862, node 1 2025-05-29T15:28:58.143931Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:28:58.143945Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-05-29T15:28:58.143947Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-05-29T15:28:58.143994Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:16048 2025-05-29T15:28:58.167232Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:28:58.167266Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:28:58.169638Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-29T15:28:58.201472Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:28:58.205237Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-05-29T15:28:58.207057Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... TClient::Ls request: /Root/Table TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Table" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976715658 CreateStep: 1748532538305 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "Table" Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" Key... (TRUNCATED) 2025-05-29T15:28:58.271657Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:295: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 2][1:7509890069833184987:2344] Handshake: worker# [1:7509890069833184988:2345] 2025-05-29T15:28:58.271751Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:312: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 2][1:7509890069833184987:2344] Handle TEvTxProxySchemeCache::TEvNavigateKeySetResult: result# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root/Table TableId: [72057594046644480:2:1] RequestType: ByTableId Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Ok Kind: KindTable DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } }] } 2025-05-29T15:28:58.271809Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:387: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 2][1:7509890069833184987:2344] Handle TEvTxProxySchemeCache::TEvResolveKeySetResult: result# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 ResultSet [{ TableId: [OwnerId: 72057594046644480, LocalPathId: 2] Access: 0 SyncVersion: false Status: OkData Kind: KindRegularTable PartitionsCount: 1 DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } From: (Uint32 : NULL) IncFrom: 1 To: () IncTo: 0 }] } 2025-05-29T15:28:58.271818Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:417: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 2][1:7509890069833184987:2344] Send handshake: worker# [1:7509890069833184988:2345] 2025-05-29T15:28:58.272005Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:431: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 2][1:7509890069833184987:2344] Handle NKikimr::NReplication::NService::TEvWorker::TEvData { Source: TestSource Records [{ Codec: RAW Data: 48b Offset: 1 SeqNo: 0 CreateTime: 1970-01-01T00:00:00.000000Z MessageGroupId: ProducerId: },{ Codec: RAW Data: 49b Offset: 2 SeqNo: 0 CreateTime: 1970-01-01T00:00:00.000000Z MessageGroupId: ProducerId: }] } 2025-05-29T15:28:58.272880Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:490: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 2][1:7509890069833184987:2344] Handle NKikimrReplication.TEvTxIdResult VersionTxIds { Version { Step: 10 TxId: 0 } TxId: 1 } 2025-05-29T15:28:58.272908Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:556: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 2][1:7509890069833184987:2344] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRequestRecords { Records [{ Order: 1 BodySize: 48 }] } 2025-05-29T15:28:58.272956Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:54: [TablePartitionWriter][72057594046644480:2:1][72075186224037888][1:7509890069833184991:2344] Handle NKikimr::TEvTxUserProxy::TEvGetProxyServicesResponse 2025-05-29T15:28:58.272962Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:587: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 2][1:7509890069833184987:2344] Handle NKikimr::NChangeExchange::TEvChangeExchangePrivate::TEvReady { PartitionId: 72075186224037888 } 2025-05-29T15:28:58.272975Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:74: [TablePartitionWriter][72057594046644480:2:1][72075186224037888][1:7509890069833184991:2344] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRecords { Records [{ Order: 1 Group: 0 Step: 1 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 48b }] } 2025-05-29T15:28:58.274047Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:111: [TablePartitionWriter][72057594046644480:2:1][72075186224037888][1:7509890069833184991:2344] Handle NKikimrTxDataShard.TEvApplyReplicationChangesResult Status: STATUS_OK 2025-05-29T15:28:58.274060Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:587: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 2][1:7509890069833184987:2344] Handle NKikimr::NChangeExchange::TEvChangeExchangePrivate::TEvReady { PartitionId: 72075186224037888 } 2025-05-29T15:28:58.274066Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:570: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 2][1:7509890069833184987:2344] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRemoveRecords { Records [1] } 2025-05-29T15:28:59.272291Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:490: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 2][1:7509890069833184987:2344] Handle NKikimrReplication.TEvTxIdResult VersionTxIds { Version { Step: 20 TxId: 0 } TxId: 2 } 2025-05-29T15:28:59.272349Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:556: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 2][1:7509890069833184987:2344] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRequestRecords { Records [{ Order: 2 BodySize: 49 }] } 2025-05-29T15:28:59.272384Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:74: [TablePartitionWriter][72057594046644480:2:1][72075186224037888][1:7509890069833184991:2344] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRecords { Records [{ Order: 2 Group: 0 Step: 11 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 49b }] } 2025-05-29T15:28:59.273501Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:111: [TablePartitionWriter][72057594046644480:2:1][72075186224037888][1:7509890069833184991:2344] Handle NKikimrTxDataShard.TEvApplyReplicationChangesResult Status: STATUS_OK 2025-05-29T15:28:59.273522Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:587: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 2][1:7509890069833184987:2344] Handle NKikimr::NChangeExchange::TEvChangeExchangePrivate::TEvReady { PartitionId: 72075186224037888 } 2025-05-29T15:28:59.273530Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:570: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 2][1:7509890069833184987:2344] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRemoveRecords { Records [2] } >> TExtSubDomainTest::DeclareAndDefineWithoutNodes-AlterDatabaseCreateHiveFirst-false >> TExtSubDomainTest::DeclareAndDrop >> BackupRestoreS3::RestoreIndexTableSplitBoundaries [GOOD] >> BackupRestoreS3::RestoreIndexTableDecimalSplitBoundaries >> TExtSubDomainTest::DeclareAndAlterPools-AlterDatabaseCreateHiveFirst-false [GOOD] >> TExtSubDomainTest::DeclareAndAlterPools-AlterDatabaseCreateHiveFirst-true >> TSchemeShardLoginTest::ResetFailedAttemptCount [GOOD] >> TSchemeShardLoginTest::ResetFailedAttemptCountAfterModifyUser >> BackupRestoreS3::TestAllSchemeObjectTypes-EPathTypeSequence [FAIL] >> BackupRestoreS3::TestAllSchemeObjectTypes-EPathTypeReplication [GOOD] >> BackupRestoreS3::TestAllSchemeObjectTypes-EPathTypeView >> KqpScheme::SchemaVersionMissmatchWithRead ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/effects/unittest >> KqpEffects::InsertAbort_Select_Conflict-UseSink Test command err: Trying to start YDB, gRPC: 1940, MsgBus: 30914 2025-05-29T15:28:45.773237Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509890011933771190:2273];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:28:45.773343Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/001e9e/r3tmp/tmpgVumc1/pdisk_1.dat 2025-05-29T15:28:45.839367Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 1940, node 1 2025-05-29T15:28:45.858980Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:28:45.858993Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-05-29T15:28:45.858995Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-05-29T15:28:45.859044Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-05-29T15:28:45.873725Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:28:45.873760Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:28:45.874789Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:30914 TClient is connected to server localhost:30914 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-29T15:28:45.937762Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:28:45.940887Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-05-29T15:28:45.942957Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-05-29T15:28:45.967911Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 2025-05-29T15:28:45.989941Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:28:46.002071Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:28:46.242146Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890016228739861:2403], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:28:46.242175Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:28:46.307528Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-05-29T15:28:46.323243Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-05-29T15:28:46.335335Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-05-29T15:28:46.344262Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-05-29T15:28:46.400657Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-05-29T15:28:46.414130Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-05-29T15:28:46.428188Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-05-29T15:28:46.451423Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890016228740515:2466], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:28:46.451492Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:28:46.451578Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890016228740520:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:28:46.452605Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710669:3, at schemeshard: 72057594046644480 2025-05-29T15:28:46.456064Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7509890016228740522:2470], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710669 completed, doublechecking } 2025-05-29T15:28:46.537281Z node 1 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [1:7509890016228740573:3398] txid# 281474976710670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-29T15:28:46.620519Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [1:7509890016228740589:2474], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:28:46.620650Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=1&id=YTlkMDliNjMtNTVmNGU2ZmItYmVmMzUyMTUtMWYwOTM0MDU=, ActorId: [1:7509890016228739858:2401], ActorState: ExecuteState, TraceId: 01jweahzbja7t511z65mw68gvx, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: VERIFY failed (2025-05-29T15:28:46.621467Z): assertion failed in non-unittest thread with message: assertion failed at ydb/core/kqp/ut/common/kqp_ut_common.h:375, void NKikimr::NKqp::AssertSuccessResult(const NYdb::TStatus &): (result.IsSuccess())
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 library/cpp/testing/unittest/registar.cpp:36 RaiseError(): requirement UnittestThread failed 0. /-S/util/system/yassert.cpp:83: InternalPanicImpl @ 0x15F23B95 1. /-S/util/system/yassert.cpp:55: Panic @ 0x15F1AB96 2. /tmp//-S/library/cpp/testing/unittest/registar.cpp:36: RaiseError @ 0x160BC066 3. /-S/ydb/core/kqp/ut/common/kqp_ut_common.h:375: AssertSuccessResult @ 0x15D88C82 4. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:365: CreateSampleTables @ 0x2855B9A2 5. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:581: operator() @ 0x2857CF4C 6. /-S/library/cpp/threading/future/core/future-inl.h:493: SetValue @ 0x2857CF4C 7. /-S/library/cpp/threading/future/async.h:24: operator() @ 0x2857CF4C 8. /-S/util/thread/pool.h:71: Process @ 0x2857CF4C 9. /-S/util/thread/pool.cpp:418: DoExecute @ 0x15F2B519 10. /-S/util/thread/factory.h:15: Execute @ 0x15F29F09 11. /-S/util/thread/factory.cpp:36: ThreadProc @ 0x15F29F09 12. /-S/util/system/thread.cpp:244: ThreadProxy @ 0x15F2537C 13. ??:0: ?? @ 0x7F42D926BAC2 14. ??:0: ?? @ 0x7F42D92FD84F Trying to start YDB, gRPC: 14100, MsgBus: 13924 2025-05-29T15:28:50.488528Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509890033083303261:2233];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:28:50.489262Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/001e9e/r3tmp/tmpa8Mf09/pdisk_1.dat 2025-05-29T15:28:50.562052Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:28:50.562180Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [1:7509890033083303059:2079] 1748532530487187 != 1748532530487190 TServer::EnableGrpc on GrpcPort 14100, node 1 2025-05-29T15:28:50.579451 ... als/ast/yql_expr.h:1874: index out of range, code: 1 library/cpp/testing/unittest/registar.cpp:36 RaiseError(): requirement UnittestThread failed 0. /-S/util/system/yassert.cpp:83: InternalPanicImpl @ 0x15F23B95 1. /-S/util/system/yassert.cpp:55: Panic @ 0x15F1AB96 2. /tmp//-S/library/cpp/testing/unittest/registar.cpp:36: RaiseError @ 0x160BC066 3. /-S/ydb/core/kqp/ut/common/kqp_ut_common.h:375: AssertSuccessResult @ 0x15D88C82 4. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:365: CreateSampleTables @ 0x2855B9A2 5. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:581: operator() @ 0x2857CF4C 6. /-S/library/cpp/threading/future/core/future-inl.h:493: SetValue @ 0x2857CF4C 7. /-S/library/cpp/threading/future/async.h:24: operator() @ 0x2857CF4C 8. /-S/util/thread/pool.h:71: Process @ 0x2857CF4C 9. /-S/util/thread/pool.cpp:418: DoExecute @ 0x15F2B519 10. /-S/util/thread/factory.h:15: Execute @ 0x15F29F09 11. /-S/util/thread/factory.cpp:36: ThreadProc @ 0x15F29F09 12. /-S/util/system/thread.cpp:244: ThreadProxy @ 0x15F2537C 13. ??:0: ?? @ 0x7F0D66AB2AC2 14. ??:0: ?? @ 0x7F0D66B4484F Trying to start YDB, gRPC: 9985, MsgBus: 6754 2025-05-29T15:28:55.449587Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509890057545448807:2274];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:28:55.449606Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/001e9e/r3tmp/tmpvqNlGF/pdisk_1.dat 2025-05-29T15:28:55.522291Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 9985, node 1 2025-05-29T15:28:55.541278Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:28:55.541293Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-05-29T15:28:55.541295Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-05-29T15:28:55.541337Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-05-29T15:28:55.549013Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:28:55.549048Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:28:55.550163Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:6754 TClient is connected to server localhost:6754 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-29T15:28:55.616162Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:28:55.621414Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:28:55.643339Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:28:55.672559Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:28:55.688548Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:28:55.867941Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890057545450182:2403], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:28:55.867979Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:28:55.924362Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-05-29T15:28:55.934048Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-05-29T15:28:55.956620Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-05-29T15:28:55.970705Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-05-29T15:28:55.984378Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-05-29T15:28:55.999493Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-05-29T15:28:56.056398Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-05-29T15:28:56.073013Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890061840418133:2466], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:28:56.073036Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:28:56.073152Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890061840418138:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:28:56.073974Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710669:3, at schemeshard: 72057594046644480 2025-05-29T15:28:56.079710Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7509890061840418140:2470], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710669 completed, doublechecking } 2025-05-29T15:28:56.143684Z node 1 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [1:7509890061840418191:3398] txid# 281474976710670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-29T15:28:56.260632Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [1:7509890061840418207:2474], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:28:56.262215Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=1&id=M2Y0YjJlMmYtNWNkZGQxYjYtMjkyNDEyNmYtNzJhZTNlMTg=, ActorId: [1:7509890057545450164:2401], ActorState: ExecuteState, TraceId: 01jweaj8r820gpvp0mvdpgrdhs, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: VERIFY failed (2025-05-29T15:28:56.263004Z): assertion failed in non-unittest thread with message: assertion failed at ydb/core/kqp/ut/common/kqp_ut_common.h:375, void NKikimr::NKqp::AssertSuccessResult(const NYdb::TStatus &): (result.IsSuccess())
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 library/cpp/testing/unittest/registar.cpp:36 RaiseError(): requirement UnittestThread failed 0. /-S/util/system/yassert.cpp:83: InternalPanicImpl @ 0x15F23B95 1. /-S/util/system/yassert.cpp:55: Panic @ 0x15F1AB96 2. /tmp//-S/library/cpp/testing/unittest/registar.cpp:36: RaiseError @ 0x160BC066 3. /-S/ydb/core/kqp/ut/common/kqp_ut_common.h:375: AssertSuccessResult @ 0x15D88C82 4. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:365: CreateSampleTables @ 0x2855B9A2 5. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:581: operator() @ 0x2857CF4C 6. /-S/library/cpp/threading/future/core/future-inl.h:493: SetValue @ 0x2857CF4C 7. /-S/library/cpp/threading/future/async.h:24: operator() @ 0x2857CF4C 8. /-S/util/thread/pool.h:71: Process @ 0x2857CF4C 9. /-S/util/thread/pool.cpp:418: DoExecute @ 0x15F2B519 10. /-S/util/thread/factory.h:15: Execute @ 0x15F29F09 11. /-S/util/thread/factory.cpp:36: ThreadProc @ 0x15F29F09 12. /-S/util/system/thread.cpp:244: ThreadProxy @ 0x15F2537C 13. ??:0: ?? @ 0x7FB4D6107AC2 14. ??:0: ?? @ 0x7FB4D619984F |71.8%| [TA] $(B)/ydb/core/tx/replication/service/ut_table_writer/test-results/unittest/{meta.json ... results_accumulator.log} >> KqpScheme::CreateTableWithDefaultSettings >> KqpScheme::DisableExternalDataSourcesOnServerless >> KqpScheme::CreateTableWithWrongPartitionAtKeys |71.8%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/tests/olap/high_load/ydb-tests-olap-high_load >> KqpQuery::RowsLimitServiceOverride |71.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/olap/high_load/ydb-tests-olap-high_load |71.8%| [TA] {RESULT} $(B)/ydb/core/tx/replication/service/ut_table_writer/test-results/unittest/{meta.json ... results_accumulator.log} |71.8%| [LD] {RESULT} $(B)/ydb/tests/olap/high_load/ydb-tests-olap-high_load >> KqpOlapScheme::InvalidColumnInTieringRule >> TExtSubDomainTest::GenericCases [GOOD] >> BackupRestore::ImportDataShouldHandleErrors [FAIL] >> BackupRestore::BackupUuid >> TSchemeShardLoginTest::ResetFailedAttemptCountAfterModifyUser [GOOD] >> TExtSubDomainTest::DeclareAndDefineWithoutNodes-AlterDatabaseCreateHiveFirst-true [GOOD] >> TExtSubDomainTest::DeclareAndDrop [GOOD] >> TExtSubDomainTest::CreateTableInsideAndLs-AlterDatabaseCreateHiveFirst-false [GOOD] >> TExtSubDomainTest::CreateTableInsideAndLs-AlterDatabaseCreateHiveFirst-true >> TExtSubDomainTest::DeclareAndDefineWithoutNodes-AlterDatabaseCreateHiveFirst-false [GOOD] >> BackupRestoreS3::TestAllPrimitiveTypes-UINT64 [FAIL] >> BackupRestoreS3::TestAllPrimitiveTypes-UTF8 >> BackupRestore::TestAllSchemeObjectTypes-EPathTypeCdcStream [GOOD] >> BackupRestore::TestAllSchemeObjectTypes-EPathTypeBlobDepot [GOOD] >> BackupRestore::TestAllSchemeObjectTypes-EPathTypeBackupCollection [GOOD] >> BackupRestore::TestAllPrimitiveTypes-UINT8 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/tx_proxy/ut_ext_tenant/unittest >> TExtSubDomainTest::GenericCases [GOOD] Test command err: 2025-05-29T15:28:59.538533Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509890072617480895:2212];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:28:59.538618Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/002488/r3tmp/tmpOeTuyl/pdisk_1.dat 2025-05-29T15:28:59.619797Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:28:59.637959Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:28:59.637984Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting TClient is connected to server localhost:9846 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 2025-05-29T15:28:59.643602Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-29T15:28:59.647072Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:403: actor# [1:7509890072617480977:2115] Handle TEvNavigate describe path dc-1 2025-05-29T15:28:59.648191Z node 1 :TX_PROXY DEBUG: describe.cpp:272: Actor# [1:7509890072617481432:2426] HANDLE EvNavigateScheme dc-1 2025-05-29T15:28:59.648226Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2702: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [1:7509890072617481003:2128], request# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-05-29T15:28:59.648234Z node 1 :TX_PROXY_SCHEME_CACHE TRACE: cache.cpp:2322: Create subscriber: self# [1:7509890072617481003:2128], path# /dc-1, domainOwnerId# 72057594046644480 2025-05-29T15:28:59.648276Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:960: [main][1:7509890072617481433:2427][/dc-1] Handle NKikimr::TEvStateStorage::TEvResolveReplicasList 2025-05-29T15:28:59.648556Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1061: [1:7509890072617480657:2050] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1 DomainOwnerId: 72057594046644480 }: sender# [1:7509890072617481437:2427] 2025-05-29T15:28:59.648577Z node 1 :SCHEME_BOARD_REPLICA INFO: replica.cpp:646: [1:7509890072617480657:2050] Subscribe: subscriber# [1:7509890072617481437:2427], path# /dc-1, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2025-05-29T15:28:59.648582Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1061: [1:7509890072617480660:2053] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1 DomainOwnerId: 72057594046644480 }: sender# [1:7509890072617481438:2427] 2025-05-29T15:28:59.648588Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1061: [1:7509890072617480663:2056] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1 DomainOwnerId: 72057594046644480 }: sender# [1:7509890072617481439:2427] 2025-05-29T15:28:59.648592Z node 1 :SCHEME_BOARD_REPLICA INFO: replica.cpp:646: [1:7509890072617480663:2056] Subscribe: subscriber# [1:7509890072617481439:2427], path# /dc-1, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2025-05-29T15:28:59.648600Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:365: [replica][1:7509890072617481437:2427][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7509890072617480657:2050] 2025-05-29T15:28:59.648604Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:365: [replica][1:7509890072617481439:2427][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7509890072617480663:2056] 2025-05-29T15:28:59.648608Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:807: [main][1:7509890072617481433:2427][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7509890072617481434:2427] 2025-05-29T15:28:59.648612Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:807: [main][1:7509890072617481433:2427][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7509890072617481436:2427] 2025-05-29T15:28:59.648614Z node 1 :SCHEME_BOARD_REPLICA INFO: replica.cpp:646: [1:7509890072617480660:2053] Subscribe: subscriber# [1:7509890072617481438:2427], path# /dc-1, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2025-05-29T15:28:59.648619Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: subscriber.cpp:836: [main][1:7509890072617481433:2427][/dc-1] Set up state: owner# [1:7509890072617481003:2128], state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 2) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements } 2025-05-29T15:28:59.648651Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1089: [1:7509890072617480657:2050] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 2 }: sender# [1:7509890072617481437:2427] 2025-05-29T15:28:59.648653Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:365: [replica][1:7509890072617481438:2427][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7509890072617480660:2053] 2025-05-29T15:28:59.648657Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:381: [replica][1:7509890072617481437:2427][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7509890072617481434:2427], cookie# 1 2025-05-29T15:28:59.648659Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:381: [replica][1:7509890072617481438:2427][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7509890072617481435:2427], cookie# 1 2025-05-29T15:28:59.648661Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:381: [replica][1:7509890072617481439:2427][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7509890072617481436:2427], cookie# 1 2025-05-29T15:28:59.648664Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:807: [main][1:7509890072617481433:2427][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7509890072617481435:2427] 2025-05-29T15:28:59.648670Z node 1 :SCHEME_BOARD_SUBSCRIBER INFO: subscriber.cpp:854: [main][1:7509890072617481433:2427][/dc-1] Path was already updated: owner# [1:7509890072617481003:2128], state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 2) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 2) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements } 2025-05-29T15:28:59.648671Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1128: [1:7509890072617480657:2050] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7509890072617481437:2427], cookie# 1 2025-05-29T15:28:59.648676Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1089: [1:7509890072617480663:2056] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 2 }: sender# [1:7509890072617481439:2427] 2025-05-29T15:28:59.648680Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1128: [1:7509890072617480663:2056] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7509890072617481439:2427], cookie# 1 2025-05-29T15:28:59.648684Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1089: [1:7509890072617480660:2053] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 2 }: sender# [1:7509890072617481438:2427] 2025-05-29T15:28:59.648687Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1128: [1:7509890072617480660:2053] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7509890072617481438:2427], cookie# 1 2025-05-29T15:28:59.648691Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:390: [replica][1:7509890072617481437:2427][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7509890072617480657:2050], cookie# 1 2025-05-29T15:28:59.648693Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:390: [replica][1:7509890072617481439:2427][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7509890072617480663:2056], cookie# 1 2025-05-29T15:28:59.648695Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:390: [replica][1:7509890072617481438:2427][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7509890072617480660:2053], cookie# 1 2025-05-29T15:28:59.648700Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:892: [main][1:7509890072617481433:2427][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7509890072617481434:2427], cookie# 1 2025-05-29T15:28:59.648703Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:932: [main][1:7509890072617481433:2427][/dc-1] Sync is in progress: cookie# 1, size# 3, half# 1, successes# 1, faulires# 0 2025-05-29T15:28:59.648706Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:892: [main][1:7509890072617481433:2427][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7509890072617481436:2427], cookie# 1 2025-05-29T15:28:59.648708Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:946: [main][1:7509890072617481433:2427][/dc-1] Sync is done: cookie# 1, size# 3, half# 1, successes# 2, faulires# 0, partial# 0 2025-05-29T15:28:59.648711Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:892: [main][1:7509890072617481433:2427][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7509890072617481435:2427], cookie# 1 2025-05-29T15:28:59.648712Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:906: [main][1:7509890072617481433:2427][/dc-1] Unexpected sync response: sender# [1:7509890072617481435:2427], cookie# 1 2025-05-29T15:28:59.661491Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2589: HandleNotify: self# [1:7509890072617481003:2128], notify# NKikimr::TSchemeBoardEvents::TEvNotifyUpdate { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DescribeSchemeResult: Status: StatusSuccess Path: "/dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLi ... 075186224037892 SchemeShard: 72075186224037888 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } }] } 2025-05-29T15:29:00.234610Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:315: actor# [1:7509890072617480977:2115] Handle TEvProposeTransaction 2025-05-29T15:29:00.234620Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:238: actor# [1:7509890072617480977:2115] TxId# 281474976715668 ProcessProposeTransaction 2025-05-29T15:29:00.234627Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:273: actor# [1:7509890072617480977:2115] Cookie# 0 userReqId# "" txid# 281474976715668 SEND to# [1:7509890076912449454:2955] DataReq marker# P0 2025-05-29T15:29:00.234641Z node 1 :TX_PROXY TRACE: datareq.cpp:492: StateWaitInit, received event# 269811712, Sender [1:7509890072617480977:2115], Recipient [1:7509890076912449454:2955]: NKikimr::TEvTxProxyReq::TEvMakeRequest 2025-05-29T15:29:00.234645Z node 1 :TX_PROXY TRACE: datareq.cpp:494: StateWaitInit, processing event TEvTxProxyReq::TEvMakeRequest 2025-05-29T15:29:00.234652Z node 1 :TX_PROXY DEBUG: datareq.cpp:1330: Actor# [1:7509890076912449454:2955] Cookie# 0 txid# 281474976715668 HANDLE TDataReq marker# P1 2025-05-29T15:29:00.234715Z node 1 :TX_PROXY DEBUG: datareq.cpp:1245: Actor [1:7509890076912449454:2955] txid 281474976715668 disallow followers cause of operation 2 read target mode 0 2025-05-29T15:29:00.234721Z node 1 :TX_PROXY DEBUG: datareq.cpp:1245: Actor [1:7509890076912449454:2955] txid 281474976715668 disallow followers cause of operation 2 read target mode 0 2025-05-29T15:29:00.234726Z node 1 :TX_PROXY DEBUG: datareq.cpp:1453: Actor# [1:7509890076912449454:2955] txid# 281474976715668 SEND to# [1:7509890072617481003:2128] TSchemeCache with 2 scheme entries. DataReq marker# P2 2025-05-29T15:29:00.234758Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2760: Handle TEvTxProxySchemeCache::TEvResolveKeySet: self# [1:7509890072617481003:2128], request# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 ResultSet [{ TableId: [OwnerId: 72075186224037888, LocalPathId: 6] Access: 0 SyncVersion: false Status: Unknown Kind: KindUnknown PartitionsCount: 0 DomainInfo Point: (Uint64 : 42) },{ TableId: [OwnerId: 72075186224037888, LocalPathId: 5] Access: 0 SyncVersion: false Status: Unknown Kind: KindUnknown PartitionsCount: 0 DomainInfo Point: (Uint64 : 42) }] } 2025-05-29T15:29:00.234768Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2063: FillEntry for TResolve: self# [1:7509890072617481003:2128], cacheItem# { Subscriber: { Subscriber: [1:7509890076912449409:2941] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 1 } Filled: 1 Status: StatusSuccess Kind: 3 TableKind: 1 Created: 1 CreateStep: 1748532540250 PathId: [OwnerId: 72075186224037888, LocalPathId: 6] DomainId: [OwnerId: 72057594046644480, LocalPathId: 2] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 1 }, entry# { TableId: [OwnerId: 72075186224037888, LocalPathId: 6] Access: 0 SyncVersion: false Status: Unknown Kind: KindUnknown PartitionsCount: 0 DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-05-29T15:29:00.234780Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2063: FillEntry for TResolve: self# [1:7509890072617481003:2128], cacheItem# { Subscriber: { Subscriber: [1:7509890076912449338:2879] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 1 } Filled: 1 Status: StatusSuccess Kind: 3 TableKind: 1 Created: 1 CreateStep: 1748532540150 PathId: [OwnerId: 72075186224037888, LocalPathId: 5] DomainId: [OwnerId: 72057594046644480, LocalPathId: 2] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 1 }, entry# { TableId: [OwnerId: 72075186224037888, LocalPathId: 5] Access: 0 SyncVersion: false Status: Unknown Kind: KindUnknown PartitionsCount: 0 DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-05-29T15:29:00.234827Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:265: Send result: self# [1:7509890076912449456:2957], recipient# [1:7509890076912449454:2955], result# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 ResultSet [{ TableId: [OwnerId: 72075186224037888, LocalPathId: 6] Access: 0 SyncVersion: false Status: OkData Kind: KindRegularTable PartitionsCount: 1 DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 2] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 2] Params { Version: 2 PlanResolution: 50 Coordinators: 72075186224037889 Coordinators: 72075186224037890 TimeCastBucketsPerMediator: 2 Mediators: 72075186224037891 Mediators: 72075186224037892 SchemeShard: 72075186224037888 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } Point: (Uint64 : 42) },{ TableId: [OwnerId: 72075186224037888, LocalPathId: 5] Access: 0 SyncVersion: false Status: OkData Kind: KindRegularTable PartitionsCount: 1 DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 2] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 2] Params { Version: 2 PlanResolution: 50 Coordinators: 72075186224037889 Coordinators: 72075186224037890 TimeCastBucketsPerMediator: 2 Mediators: 72075186224037891 Mediators: 72075186224037892 SchemeShard: 72075186224037888 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } Point: (Uint64 : 42) }] } 2025-05-29T15:29:00.234843Z node 1 :TX_PROXY TRACE: datareq.cpp:499: StateWaitResolve, received event# 269746178, Sender [1:7509890076912449456:2957], Recipient [1:7509890076912449454:2955]: NKikimr::TEvTxProxySchemeCache::TEvResolveKeySetResult 2025-05-29T15:29:00.234846Z node 1 :TX_PROXY TRACE: datareq.cpp:503: StateWaitResolve, processing event TEvTxProxySchemeCache::TEvResolveKeySetResult 2025-05-29T15:29:00.234849Z node 1 :TX_PROXY DEBUG: datareq.cpp:1620: Actor# [1:7509890076912449454:2955] txid# 281474976715668 HANDLE EvResolveKeySetResult TDataReq marker# P3 ErrorCount# 0 2025-05-29T15:29:00.234972Z node 1 :TX_PROXY DEBUG: datareq.cpp:1115: Actor# [1:7509890076912449454:2955] txid# 281474976715668 SEND TEvProposeTransaction to datashard 72075186224037893 with 327 bytes program affected shards 2 followers disallowed marker# P4 2025-05-29T15:29:00.235004Z node 1 :TX_PROXY DEBUG: datareq.cpp:1115: Actor# [1:7509890076912449454:2955] txid# 281474976715668 SEND TEvProposeTransaction to datashard 72075186224037895 with 327 bytes program affected shards 2 followers disallowed marker# P4 2025-05-29T15:29:00.237961Z node 1 :TX_PROXY TRACE: datareq.cpp:531: StateWaitPrepare, received event# 269550080, Sender [2:7509890078716620627:2322], Recipient [1:7509890076912449454:2955] 2025-05-29T15:29:00.237970Z node 1 :TX_PROXY TRACE: datareq.cpp:535: StateWaitPrepare, processing event TEvDataShard::TEvProposeTransactionResult 2025-05-29T15:29:00.237987Z node 1 :TX_PROXY DEBUG: datareq.cpp:1873: Actor# [1:7509890076912449454:2955] txid# 281474976715668 HANDLE Prepare TEvProposeTransactionResult TDataReq TabletStatus# StatusWait GetStatus# PREPARED shard id 72075186224037893 read size 0 out readset size 0 marker# P6 2025-05-29T15:29:00.237993Z node 1 :TX_PROXY TRACE: datareq.cpp:531: StateWaitPrepare, received event# 269550080, Sender [2:7509890078716620847:2337], Recipient [1:7509890076912449454:2955] 2025-05-29T15:29:00.237995Z node 1 :TX_PROXY TRACE: datareq.cpp:535: StateWaitPrepare, processing event TEvDataShard::TEvProposeTransactionResult 2025-05-29T15:29:00.238001Z node 1 :TX_PROXY DEBUG: datareq.cpp:1873: Actor# [1:7509890076912449454:2955] txid# 281474976715668 HANDLE Prepare TEvProposeTransactionResult TDataReq TabletStatus# StatusWait GetStatus# PREPARED shard id 72075186224037895 read size 0 out readset size 0 marker# P6 2025-05-29T15:29:00.238006Z node 1 :TX_PROXY DEBUG: datareq.cpp:2921: Actor# [1:7509890076912449454:2955] txid# 281474976715668 SEND EvProposeTransaction to# 72075186224037889 Coordinator marker# P7 2025-05-29T15:29:00.238247Z node 1 :TX_PROXY TRACE: datareq.cpp:563: StateWaitPlan, received event# 269091328, Sender [2:7509890074421653063:2297], Recipient [1:7509890076912449454:2955] 2025-05-29T15:29:00.238256Z node 1 :TX_PROXY TRACE: datareq.cpp:567: StateWaitPlan, processing event TEvTxProxy::TEvProposeTransactionStatus 2025-05-29T15:29:00.238281Z node 1 :TX_PROXY DEBUG: datareq.cpp:2111: Actor# [1:7509890076912449454:2955] txid# 281474976715668 HANDLE TEvProposeTransactionStatus TDataReq marker# P11 Status# 16 2025-05-29T15:29:00.251870Z node 1 :TX_PROXY TRACE: datareq.cpp:563: StateWaitPlan, received event# 269091328, Sender [2:7509890074421653063:2297], Recipient [1:7509890076912449454:2955] 2025-05-29T15:29:00.251882Z node 1 :TX_PROXY TRACE: datareq.cpp:567: StateWaitPlan, processing event TEvTxProxy::TEvProposeTransactionStatus 2025-05-29T15:29:00.251891Z node 1 :TX_PROXY DEBUG: datareq.cpp:2135: Actor# [1:7509890076912449454:2955] txid# 281474976715668 HANDLE TEvProposeTransactionStatus TDataReq marker# P10 Status# 17 2025-05-29T15:29:00.254837Z node 1 :TX_PROXY TRACE: datareq.cpp:563: StateWaitPlan, received event# 269550080, Sender [2:7509890078716620627:2322], Recipient [1:7509890076912449454:2955] 2025-05-29T15:29:00.254846Z node 1 :TX_PROXY TRACE: datareq.cpp:568: StateWaitPlan, processing event TEvDataShard::TEvProposeTransactionResult 2025-05-29T15:29:00.254858Z node 1 :TX_PROXY DEBUG: datareq.cpp:2286: Actor# [1:7509890076912449454:2955] txid# 281474976715668 HANDLE Plan TEvProposeTransactionResult TDataReq GetStatus# COMPLETE shard id 72075186224037893 marker# P12 2025-05-29T15:29:00.254868Z node 1 :TX_PROXY TRACE: datareq.cpp:563: StateWaitPlan, received event# 269550080, Sender [2:7509890078716620847:2337], Recipient [1:7509890076912449454:2955] 2025-05-29T15:29:00.254868Z node 1 :TX_PROXY TRACE: datareq.cpp:568: StateWaitPlan, processing event TEvDataShard::TEvProposeTransactionResult 2025-05-29T15:29:00.254872Z node 1 :TX_PROXY DEBUG: datareq.cpp:2286: Actor# [1:7509890076912449454:2955] txid# 281474976715668 HANDLE Plan TEvProposeTransactionResult TDataReq GetStatus# COMPLETE shard id 72075186224037895 marker# P12 2025-05-29T15:29:00.254968Z node 1 :TX_PROXY DEBUG: datareq.cpp:2691: Actor# [1:7509890076912449454:2955] txid# 281474976715668 MergeResult ExecComplete TDataReq marker# P17 2025-05-29T15:29:00.254992Z node 1 :TX_PROXY INFO: datareq.cpp:834: Actor# [1:7509890076912449454:2955] txid# 281474976715668 RESPONSE Status# ExecComplete prepare time: 0.003354s execute time: 0.016985s total time: 0.020339s marker# P13 2025-05-29T15:29:00.267886Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1075: [1:7509890072617480657:2050] Handle NKikimrSchemeBoard.TEvUnsubscribe { Path: /dc-1/USER_0 }: sender# [2:7509890074421652998:2106] 2025-05-29T15:29:00.267905Z node 1 :SCHEME_BOARD_REPLICA INFO: replica.cpp:662: [1:7509890072617480657:2050] Unsubscribe: subscriber# [2:7509890074421652998:2106], path# /dc-1/USER_0 2025-05-29T15:29:00.267913Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1075: [1:7509890072617480660:2053] Handle NKikimrSchemeBoard.TEvUnsubscribe { Path: /dc-1/USER_0 }: sender# [2:7509890074421652999:2106] 2025-05-29T15:29:00.267917Z node 1 :SCHEME_BOARD_REPLICA INFO: replica.cpp:662: [1:7509890072617480660:2053] Unsubscribe: subscriber# [2:7509890074421652999:2106], path# /dc-1/USER_0 2025-05-29T15:29:00.267922Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1075: [1:7509890072617480663:2056] Handle NKikimrSchemeBoard.TEvUnsubscribe { Path: /dc-1/USER_0 }: sender# [2:7509890074421653000:2106] 2025-05-29T15:29:00.267927Z node 1 :SCHEME_BOARD_REPLICA INFO: replica.cpp:662: [1:7509890072617480663:2056] Unsubscribe: subscriber# [2:7509890074421653000:2106], path# /dc-1/USER_0 2025-05-29T15:29:00.268066Z node 1 :HIVE WARN: tx__status.cpp:57: HIVE#72057594037968897 THive::TTxStatus(status=2 node=Connected) - killing node 2 2025-05-29T15:29:00.268360Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connected -> Disconnected 2025-05-29T15:29:00.268948Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:445: Clear TempDirsState with owners number: 0 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_login/unittest >> TSchemeShardLoginTest::ResetFailedAttemptCountAfterModifyUser [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:116:2058] recipient: [1:111:2141] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:116:2058] recipient: [1:111:2141] Leader for TabletID 72057594046678944 is [1:128:2152] sender: [1:132:2058] recipient: [1:111:2141] 2025-05-29T15:28:55.062595Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7488: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-05-29T15:28:55.062624Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7516: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:28:55.062631Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7402: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-05-29T15:28:55.062637Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7418: OperationsProcessing config: using default configuration 2025-05-29T15:28:55.062643Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-05-29T15:28:55.062648Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-05-29T15:28:55.062658Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7548: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:28:55.062673Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:39: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-05-29T15:28:55.062804Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7619: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-05-29T15:28:55.062880Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-05-29T15:28:55.077244Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7309: Cannot subscribe to console configs 2025-05-29T15:28:55.077271Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:28:55.080109Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-05-29T15:28:55.080248Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-05-29T15:28:55.080299Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-05-29T15:28:55.082046Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-05-29T15:28:55.082236Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:445: Clear TempDirsState with owners number: 0 2025-05-29T15:28:55.082373Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1348: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-05-29T15:28:55.082442Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:32: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-05-29T15:28:55.082949Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:157: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:28:55.082991Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:84: [RootDataErasureManager] Stop 2025-05-29T15:28:55.083249Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:28:55.083261Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:28:55.083281Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-05-29T15:28:55.083289Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-29T15:28:55.083295Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:202: TTxServerlessStorageBilling.Complete 2025-05-29T15:28:55.083329Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6671: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-05-29T15:28:55.084744Z node 1 :HIVE INFO: tablet_helpers.cpp:1130: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:128:2152] sender: [1:242:2058] recipient: [1:15:2062] 2025-05-29T15:28:55.107141Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:372: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-05-29T15:28:55.107215Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:28:55.107281Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-05-29T15:28:55.107333Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:130: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-05-29T15:28:55.107344Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:28:55.108133Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:451: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-05-29T15:28:55.108164Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-05-29T15:28:55.108223Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:28:55.108234Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:313: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-05-29T15:28:55.108241Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:367: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-05-29T15:28:55.108247Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 2 -> 3 2025-05-29T15:28:55.108674Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:28:55.108689Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-05-29T15:28:55.108695Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 3 -> 128 2025-05-29T15:28:55.109051Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:28:55.109063Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:28:55.109069Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:28:55.109077Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1650: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-05-29T15:28:55.109800Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1719: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-05-29T15:28:55.110258Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:652: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-05-29T15:28:55.110300Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1751: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-05-29T15:28:55.110487Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:676: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-05-29T15:28:55.110515Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:680: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 124 RawX2: 4294969445 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-05-29T15:28:55.110525Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:28:55.110582Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 128 -> 240 2025-05-29T15:28:55.110589Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:28:55.110623Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-05-29T15:28:55.110636Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:402: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-05-29T15:28:55.111139Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:28:55.111150Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-29T15:28:55.111194Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29 ... 00.503172Z node 5 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 103, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 8 2025-05-29T15:29:00.503178Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-05-29T15:29:00.503196Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 103, subscribers: 0 2025-05-29T15:29:00.503539Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 103 2025-05-29T15:29:00.503791Z node 5 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:445: Clear TempDirsState with owners number: 0 Leader for TabletID 72057594046678944 is [5:312:2298] sender: [5:407:2058] recipient: [5:103:2137] Leader for TabletID 72057594046678944 is [5:312:2298] sender: [5:410:2058] recipient: [5:15:2062] Leader for TabletID 72057594046678944 is [5:312:2298] sender: [5:411:2058] recipient: [5:409:2379] Leader for TabletID 72057594046678944 is [5:412:2380] sender: [5:413:2058] recipient: [5:409:2379] 2025-05-29T15:29:00.517519Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7488: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-05-29T15:29:00.517548Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7516: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:29:00.517554Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7402: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-05-29T15:29:00.517560Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7418: OperationsProcessing config: using default configuration 2025-05-29T15:29:00.517566Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-05-29T15:29:00.517570Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-05-29T15:29:00.517578Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7548: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:29:00.517591Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:39: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-05-29T15:29:00.517675Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7619: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-05-29T15:29:00.517739Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-05-29T15:29:00.521983Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-05-29T15:29:00.522505Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-05-29T15:29:00.522556Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-05-29T15:29:00.522584Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7309: Cannot subscribe to console configs 2025-05-29T15:29:00.522590Z node 5 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:29:00.522628Z node 5 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:445: Clear TempDirsState with owners number: 0 2025-05-29T15:29:00.522753Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit for Paths, read records: 1, at schemeshard: 72057594046678944 2025-05-29T15:29:00.522774Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1457: TTxInit for UserAttributes, read records: 0, at schemeshard: 72057594046678944 2025-05-29T15:29:00.522784Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1483: TTxInit for UserAttributesAlterData, read records: 0, at schemeshard: 72057594046678944 2025-05-29T15:29:00.522843Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1785: TTxInit for Tables, read records: 0, at schemeshard: 72057594046678944 2025-05-29T15:29:00.522855Z node 5 :FLAT_TX_SCHEMESHARD INFO: schemeshard__root_data_erasure_manager.cpp:452: [RootDataErasureManager] Restore: Generation# 0, Status# 0, WakeupInterval# 604800 s, NumberDataErasureTenantsInRunning# 0 2025-05-29T15:29:00.522884Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:2033: TTxInit for Columns, read records: 0, at schemeshard: 72057594046678944 2025-05-29T15:29:00.522896Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:2093: TTxInit for ColumnsAlters, read records: 0, at schemeshard: 72057594046678944 2025-05-29T15:29:00.522906Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:2151: TTxInit for Shards, read records: 0, at schemeshard: 72057594046678944 2025-05-29T15:29:00.522923Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:2237: TTxInit for TablePartitions, read records: 0, at schemeshard: 72057594046678944 2025-05-29T15:29:00.522932Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:2303: TTxInit for TableShardPartitionConfigs, read records: 0, at schemeshard: 72057594046678944 2025-05-29T15:29:00.522950Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:2453: TTxInit for ChannelsBinding, read records: 0, at schemeshard: 72057594046678944 2025-05-29T15:29:00.522983Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:2832: TTxInit for TableIndexes, read records: 0, at schemeshard: 72057594046678944 2025-05-29T15:29:00.522996Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:2911: TTxInit for TableIndexKeys, read records: 0, at schemeshard: 72057594046678944 2025-05-29T15:29:00.523041Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:3409: TTxInit for KesusInfos, read records: 0, at schemeshard: 72057594046678944 2025-05-29T15:29:00.523049Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:3445: TTxInit for KesusAlters, read records: 0, at schemeshard: 72057594046678944 2025-05-29T15:29:00.523068Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:3659: TTxInit for TxShards, read records: 0, at schemeshard: 72057594046678944 2025-05-29T15:29:00.523078Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:3804: TTxInit for ShardToDelete, read records: 0, at schemeshard: 72057594046678944 2025-05-29T15:29:00.523089Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:3821: TTxInit for BackupSettings, read records: 0, at schemeshard: 72057594046678944 2025-05-29T15:29:00.523125Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:3981: TTxInit for ShardBackupStatus, read records: 0, at schemeshard: 72057594046678944 2025-05-29T15:29:00.523136Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:3997: TTxInit for CompletedBackup, read records: 0, at schemeshard: 72057594046678944 2025-05-29T15:29:00.523155Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4282: TTxInit for Publications, read records: 0, at schemeshard: 72057594046678944 2025-05-29T15:29:00.523183Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4587: IndexBuild , records: 0, at schemeshard: 72057594046678944 2025-05-29T15:29:00.523192Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4645: KMeansTreeSample records: 0, at schemeshard: 72057594046678944 2025-05-29T15:29:00.523208Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4740: SnapshotTables: snapshots: 0 tables: 0, at schemeshard: 72057594046678944 2025-05-29T15:29:00.523214Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4767: SnapshotSteps: snapshots: 0, at schemeshard: 72057594046678944 2025-05-29T15:29:00.523220Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4794: LongLocks: records: 0, at schemeshard: 72057594046678944 2025-05-29T15:29:00.525017Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:84: [RootDataErasureManager] Stop 2025-05-29T15:29:00.525326Z node 5 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:29:00.525339Z node 5 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:29:00.525404Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-05-29T15:29:00.525413Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-29T15:29:00.525418Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:202: TTxServerlessStorageBilling.Complete 2025-05-29T15:29:00.525524Z node 5 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6671: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594046678944 is [5:412:2380] sender: [5:470:2058] recipient: [5:15:2062] 2025-05-29T15:29:00.573941Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__login.cpp:43: TTxLogin Execute at schemeshard: 72057594046678944 2025-05-29T15:29:00.573961Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__login.cpp:46: TTxLogin RotateKeys at schemeshard: 72057594046678944 2025-05-29T15:29:00.642488Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__login.cpp:85: TTxLogin Complete, result: Token: "eyJhbGciOiJQUzI1NiIsImtpZCI6IjMifQ.eyJhdWQiOlsiXC9NeVJvb3QiXSwiZXhwIjoxNzQ4NTc1NzQwLCJpYXQiOjE3NDg1MzI1NDAsInN1YiI6InVzZXIxIn0.d5r6ZnTBbKa2ZmxjeBbrEtWFNlumGugYf4IJBxsDLV26oIAaZZq0Au3UbqFImwUA8K681ntbJxahhPT334HnAXOox8SwLUXg0IFCvOR3ini7xc4S8ZHkLDNpye4eLcAzVRg8zNSMesTxVPgxQcwl4c2YYa1BP7Nz1jlxmPlUTdgAVf-WnOQu03MBXrv1dRakT3ViDsuyvZCsVgLeOO5BVrmyYWrwvh5a_zEoZyVI0T4tsgpOVgI8SyZTRtB67CSu6Bmeg8LVHENFUirTuMCE-zutkS7jFVorxEI4PQYCdk95pQNF9I9bwNyEtCu4YiYuCWw3ZpbXeGm0RSpkwADxbw" SanitizedToken: "eyJhbGciOiJQUzI1NiIsImtpZCI6IjMifQ.eyJhdWQiOlsiXC9NeVJvb3QiXSwiZXhwIjoxNzQ4NTc1NzQwLCJpYXQiOjE3NDg1MzI1NDAsInN1YiI6InVzZXIxIn0.**" IsAdmin: true, at schemeshard: 72057594046678944 2025-05-29T15:29:00.642540Z node 5 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:29:00.642548Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 0, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-29T15:29:00.642604Z node 5 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:29:00.642611Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [5:461:2418], at schemeshard: 72057594046678944, txId: 0, path id: 1 2025-05-29T15:29:00.642772Z node 5 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:5834: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 4 LocalPathId: 1 Version: 9 PathOwnerId: 72057594046678944, cookie: 0 >> TExtSubDomainTest::DeclareAndAlterPools-AlterDatabaseCreateHiveFirst-true [GOOD] >> TSchemeshardBackgroundCleaningTest::SchemeshardBackgroundCleaningTestSimpleDropIndex [GOOD] >> TSchemeshardBackgroundCleaningTest::TempInTemp ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/tx_proxy/ut_ext_tenant/unittest >> TExtSubDomainTest::DeclareAndDefineWithoutNodes-AlterDatabaseCreateHiveFirst-true [GOOD] Test command err: 2025-05-29T15:28:59.683051Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509890072431662966:2062];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:28:59.683094Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/002479/r3tmp/tmpM5bodL/pdisk_1.dat 2025-05-29T15:28:59.852112Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [1:7509890072431662945:2079] 1748532539682920 != 1748532539682923 2025-05-29T15:28:59.869203Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:28:59.870217Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:28:59.870235Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:28:59.877026Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:6231 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 2025-05-29T15:28:59.907012Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:403: actor# [1:7509890072431663187:2092] Handle TEvNavigate describe path dc-1 2025-05-29T15:28:59.908807Z node 1 :TX_PROXY DEBUG: describe.cpp:272: Actor# [1:7509890072431663697:2432] HANDLE EvNavigateScheme dc-1 2025-05-29T15:28:59.908838Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2702: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [1:7509890072431663232:2116], request# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-05-29T15:28:59.908845Z node 1 :TX_PROXY_SCHEME_CACHE TRACE: cache.cpp:2322: Create subscriber: self# [1:7509890072431663232:2116], path# /dc-1, domainOwnerId# 72057594046644480 2025-05-29T15:28:59.908885Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:960: [main][1:7509890072431663698:2433][/dc-1] Handle NKikimr::TEvStateStorage::TEvResolveReplicasList 2025-05-29T15:28:59.909294Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1061: [1:7509890072431662915:2049] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1 DomainOwnerId: 72057594046644480 }: sender# [1:7509890072431663702:2433] 2025-05-29T15:28:59.909307Z node 1 :SCHEME_BOARD_REPLICA INFO: replica.cpp:646: [1:7509890072431662915:2049] Subscribe: subscriber# [1:7509890072431663702:2433], path# /dc-1, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2025-05-29T15:28:59.909318Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1061: [1:7509890072431662918:2052] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1 DomainOwnerId: 72057594046644480 }: sender# [1:7509890072431663703:2433] 2025-05-29T15:28:59.909322Z node 1 :SCHEME_BOARD_REPLICA INFO: replica.cpp:646: [1:7509890072431662918:2052] Subscribe: subscriber# [1:7509890072431663703:2433], path# /dc-1, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2025-05-29T15:28:59.909327Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1061: [1:7509890072431662921:2055] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1 DomainOwnerId: 72057594046644480 }: sender# [1:7509890072431663704:2433] 2025-05-29T15:28:59.909331Z node 1 :SCHEME_BOARD_REPLICA INFO: replica.cpp:646: [1:7509890072431662921:2055] Subscribe: subscriber# [1:7509890072431663704:2433], path# /dc-1, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2025-05-29T15:28:59.909341Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:365: [replica][1:7509890072431663702:2433][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7509890072431662915:2049] 2025-05-29T15:28:59.909345Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:365: [replica][1:7509890072431663703:2433][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7509890072431662918:2052] 2025-05-29T15:28:59.909349Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:365: [replica][1:7509890072431663704:2433][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7509890072431662921:2055] 2025-05-29T15:28:59.909355Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:807: [main][1:7509890072431663698:2433][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7509890072431663699:2433] 2025-05-29T15:28:59.909361Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:807: [main][1:7509890072431663698:2433][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7509890072431663700:2433] 2025-05-29T15:28:59.909370Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: subscriber.cpp:836: [main][1:7509890072431663698:2433][/dc-1] Set up state: owner# [1:7509890072431663232:2116], state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 2) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements } 2025-05-29T15:28:59.909405Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:807: [main][1:7509890072431663698:2433][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7509890072431663701:2433] 2025-05-29T15:28:59.909411Z node 1 :SCHEME_BOARD_SUBSCRIBER INFO: subscriber.cpp:854: [main][1:7509890072431663698:2433][/dc-1] Path was already updated: owner# [1:7509890072431663232:2116], state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 2) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 2) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements } 2025-05-29T15:28:59.909417Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:381: [replica][1:7509890072431663702:2433][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7509890072431663699:2433], cookie# 1 2025-05-29T15:28:59.909420Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:381: [replica][1:7509890072431663703:2433][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7509890072431663700:2433], cookie# 1 2025-05-29T15:28:59.909423Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:381: [replica][1:7509890072431663704:2433][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7509890072431663701:2433], cookie# 1 2025-05-29T15:28:59.909428Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1089: [1:7509890072431662915:2049] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 2 }: sender# [1:7509890072431663702:2433] 2025-05-29T15:28:59.909433Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1128: [1:7509890072431662915:2049] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7509890072431663702:2433], cookie# 1 2025-05-29T15:28:59.909436Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1089: [1:7509890072431662918:2052] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 2 }: sender# [1:7509890072431663703:2433] 2025-05-29T15:28:59.909438Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1128: [1:7509890072431662918:2052] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7509890072431663703:2433], cookie# 1 2025-05-29T15:28:59.909441Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1089: [1:7509890072431662921:2055] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 2 }: sender# [1:7509890072431663704:2433] 2025-05-29T15:28:59.909444Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1128: [1:7509890072431662921:2055] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7509890072431663704:2433], cookie# 1 2025-05-29T15:28:59.909782Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:390: [replica][1:7509890072431663702:2433][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7509890072431662915:2049], cookie# 1 2025-05-29T15:28:59.909793Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:390: [replica][1:7509890072431663703:2433][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7509890072431662918:2052], cookie# 1 2025-05-29T15:28:59.909796Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:390: [replica][1:7509890072431663704:2433][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7509890072431662921:2055], cookie# 1 2025-05-29T15:28:59.909803Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:892: [main][1:7509890072431663698:2433][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7509890072431663699:2433], cookie# 1 2025-05-29T15:28:59.909809Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:932: [main][1:7509890072431663698:2433][/dc-1] Sync is in progress: cookie# 1, size# 3, half# 1, successes# 1, faulires# 0 2025-05-29T15:28:59.909812Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:892: [main][1:7509890072431663698:2433][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7509890072431663700:2433], cookie# 1 2025-05-29T15:28:59.909816Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:946: [main][1:7509890072431663698:2433][/dc-1] Sync is done: cookie# 1, size# 3, half# 1, successes# 2, faulires# 0, partial# 0 2025-05-29T15:28:59.909819Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:892: [main][1:7509890072431663698:2433][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7509890072431663701:2433], cookie# 1 2025-05-29T15:28:59.909822Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:906: [main][1:7509890072431663698:2433][/dc-1] Unexpected sync response: sender# [1:7509890072431663701:2433], cookie# 1 2025-05-29T15:28:59.918149Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2589: HandleNotify: self# [1:7509890072431663232:2116], notify# NKikimr::TSchemeBoardEvents::TEvNotifyUpdate { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DescribeSchemeResult: Status: StatusSuccess Path: "/dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 720 ... RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } }] } 2025-05-29T15:29:00.513146Z node 1 :TX_PROXY DEBUG: describe.cpp:356: Actor# [1:7509890076726631339:2657] HANDLE EvNavigateKeySetResult TDescribeReq marker# P5 ErrorCount# 0 2025-05-29T15:29:00.513167Z node 1 :TX_PROXY DEBUG: describe.cpp:435: Actor# [1:7509890076726631339:2657] SEND to# 72057594046644480 shardToRequest NKikimrSchemeOp.TDescribePath Path: "/dc-1" Options { ShowPrivateTable: true } 2025-05-29T15:29:00.513381Z node 1 :TX_PROXY DEBUG: describe.cpp:448: Actor# [1:7509890076726631339:2657] Handle TEvDescribeSchemeResult Forward to# [1:7509890076726631338:2656] Cookie: 0 TEvDescribeSchemeResult: NKikimrScheme.TEvDescribeSchemeResult PreSerializedData size# 63 Record# Status: StatusSuccess Path: "/dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 1748532539992 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 3 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } StoragePools { Name: "" Kind: "storage-pool-number-1" } StoragePools { Name: "" Kind: "storage-pool-number-2" } StoragePools { Name: "/dc-1:test" Kind: "test" } PathsInside: 1 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/dc-1" } } } PathId: 1 PathOwnerId: 72057594046644480 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 1748532539992 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 3 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: "USER_0" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeExtSubDomain CreateFinished: true CreateTxId: 281474976710658 CreateStep: 1748532540006 ParentPathId: 1 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" } DomainDescription { SchemeShardId_Depricated: 72057594046... (TRUNCATED) 2025-05-29T15:29:00.684055Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2702: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [1:7509890072431663232:2116], request# { ErrorCount: 0 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-05-29T15:29:00.684094Z node 1 :TX_PROXY_SCHEME_CACHE TRACE: cache.cpp:2322: Create subscriber: self# [1:7509890072431663232:2116], path# /dc-1/.metadata/initialization/migrations, domainOwnerId# 72057594046644480 2025-05-29T15:29:00.684166Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:960: [main][1:7509890076726631343:2660][/dc-1/.metadata/initialization/migrations] Handle NKikimr::TEvStateStorage::TEvResolveReplicasList 2025-05-29T15:29:00.684241Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1061: [1:7509890072431662918:2052] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1/.metadata/initialization/migrations DomainOwnerId: 72057594046644480 }: sender# [1:7509890076726631348:2660] 2025-05-29T15:29:00.684241Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1061: [1:7509890072431662915:2049] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1/.metadata/initialization/migrations DomainOwnerId: 72057594046644480 }: sender# [1:7509890076726631347:2660] 2025-05-29T15:29:00.684247Z node 1 :SCHEME_BOARD_REPLICA INFO: replica.cpp:520: [1:7509890072431662915:2049] Upsert description: path# /dc-1/.metadata/initialization/migrations 2025-05-29T15:29:00.684250Z node 1 :SCHEME_BOARD_REPLICA INFO: replica.cpp:520: [1:7509890072431662918:2052] Upsert description: path# /dc-1/.metadata/initialization/migrations 2025-05-29T15:29:00.684271Z node 1 :SCHEME_BOARD_REPLICA INFO: replica.cpp:646: [1:7509890072431662918:2052] Subscribe: subscriber# [1:7509890076726631348:2660], path# /dc-1/.metadata/initialization/migrations, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2025-05-29T15:29:00.684272Z node 1 :SCHEME_BOARD_REPLICA INFO: replica.cpp:646: [1:7509890072431662915:2049] Subscribe: subscriber# [1:7509890076726631347:2660], path# /dc-1/.metadata/initialization/migrations, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2025-05-29T15:29:00.684283Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1061: [1:7509890072431662921:2055] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1/.metadata/initialization/migrations DomainOwnerId: 72057594046644480 }: sender# [1:7509890076726631349:2660] 2025-05-29T15:29:00.684285Z node 1 :SCHEME_BOARD_REPLICA INFO: replica.cpp:520: [1:7509890072431662921:2055] Upsert description: path# /dc-1/.metadata/initialization/migrations 2025-05-29T15:29:00.684289Z node 1 :SCHEME_BOARD_REPLICA INFO: replica.cpp:646: [1:7509890072431662921:2055] Subscribe: subscriber# [1:7509890076726631349:2660], path# /dc-1/.metadata/initialization/migrations, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2025-05-29T15:29:00.684289Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:365: [replica][1:7509890076726631348:2660][/dc-1/.metadata/initialization/migrations] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/.metadata/initialization/migrations Version: 0 }: sender# [1:7509890072431662918:2052] 2025-05-29T15:29:00.684298Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1089: [1:7509890072431662918:2052] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 0 }: sender# [1:7509890076726631348:2660] 2025-05-29T15:29:00.684305Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:365: [replica][1:7509890076726631347:2660][/dc-1/.metadata/initialization/migrations] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/.metadata/initialization/migrations Version: 0 }: sender# [1:7509890072431662915:2049] 2025-05-29T15:29:00.684310Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1089: [1:7509890072431662915:2049] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 0 }: sender# [1:7509890076726631347:2660] 2025-05-29T15:29:00.684312Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:365: [replica][1:7509890076726631349:2660][/dc-1/.metadata/initialization/migrations] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/.metadata/initialization/migrations Version: 0 }: sender# [1:7509890072431662921:2055] 2025-05-29T15:29:00.684315Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1089: [1:7509890072431662921:2055] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 0 }: sender# [1:7509890076726631349:2660] 2025-05-29T15:29:00.684320Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:807: [main][1:7509890076726631343:2660][/dc-1/.metadata/initialization/migrations] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/.metadata/initialization/migrations Version: 0 }: sender# [1:7509890076726631345:2660] 2025-05-29T15:29:00.684332Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:807: [main][1:7509890076726631343:2660][/dc-1/.metadata/initialization/migrations] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/.metadata/initialization/migrations Version: 0 }: sender# [1:7509890076726631344:2660] 2025-05-29T15:29:00.684348Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: subscriber.cpp:836: [main][1:7509890076726631343:2660][/dc-1/.metadata/initialization/migrations] Set up state: owner# [1:7509890072431663232:2116], state# { Deleted: 1 Strong: 1 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2025-05-29T15:29:00.684359Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:807: [main][1:7509890076726631343:2660][/dc-1/.metadata/initialization/migrations] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/.metadata/initialization/migrations Version: 0 }: sender# [1:7509890076726631346:2660] 2025-05-29T15:29:00.684366Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2589: HandleNotify: self# [1:7509890072431663232:2116], notify# NKikimr::TSchemeBoardEvents::TEvNotifyDelete { Path: /dc-1/.metadata/initialization/migrations PathId: Strong: 1 } 2025-05-29T15:29:00.684370Z node 1 :SCHEME_BOARD_SUBSCRIBER INFO: subscriber.cpp:854: [main][1:7509890076726631343:2660][/dc-1/.metadata/initialization/migrations] Ignore empty state: owner# [1:7509890072431663232:2116], state# { Deleted: 1 Strong: 1 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 1 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2025-05-29T15:29:00.684378Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2464: ResolveCacheItem: self# [1:7509890072431663232:2116], notify# NKikimr::TSchemeBoardEvents::TEvNotifyDelete { Path: /dc-1/.metadata/initialization/migrations PathId: Strong: 1 }, by path# { Subscriber: { Subscriber: [1:7509890076726631343:2660] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 0 Status: undefined Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, by pathId# nullptr 2025-05-29T15:29:00.684395Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:1842: FillEntry for TNavigate: self# [1:7509890072431663232:2116], cacheItem# { Subscriber: { Subscriber: [1:7509890076726631343:2660] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-05-29T15:29:00.684428Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:265: Send result: self# [1:7509890076726631350:2661], recipient# [1:7509890076726631342:2319], result# { ErrorCount: 1 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/tx_proxy/ut_ext_tenant/unittest >> TExtSubDomainTest::DeclareAndDrop [GOOD] Test command err: 2025-05-29T15:29:00.144892Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509890076896765957:2215];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/002444/r3tmp/tmpyorl2m/pdisk_1.dat 2025-05-29T15:29:00.192995Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-05-29T15:29:00.215817Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:29:00.216177Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [1:7509890076896765756:2079] 1748532540114035 != 1748532540114038 TClient is connected to server localhost:22321 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 2025-05-29T15:29:00.242514Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:403: actor# [1:7509890076896765817:2101] Handle TEvNavigate describe path dc-1 2025-05-29T15:29:00.244150Z node 1 :TX_PROXY DEBUG: describe.cpp:272: Actor# [1:7509890076896766293:2250] HANDLE EvNavigateScheme dc-1 2025-05-29T15:29:00.244192Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2702: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [1:7509890076896766044:2115], request# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-05-29T15:29:00.244201Z node 1 :TX_PROXY_SCHEME_CACHE TRACE: cache.cpp:2322: Create subscriber: self# [1:7509890076896766044:2115], path# /dc-1, domainOwnerId# 72057594046644480 2025-05-29T15:29:00.244240Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:960: [main][1:7509890076896766294:2251][/dc-1] Handle NKikimr::TEvStateStorage::TEvResolveReplicasList 2025-05-29T15:29:00.244611Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1061: [1:7509890076896765726:2049] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1 DomainOwnerId: 72057594046644480 }: sender# [1:7509890076896766298:2251] 2025-05-29T15:29:00.244632Z node 1 :SCHEME_BOARD_REPLICA INFO: replica.cpp:646: [1:7509890076896765726:2049] Subscribe: subscriber# [1:7509890076896766298:2251], path# /dc-1, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2025-05-29T15:29:00.244646Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1061: [1:7509890076896765732:2055] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1 DomainOwnerId: 72057594046644480 }: sender# [1:7509890076896766300:2251] 2025-05-29T15:29:00.244649Z node 1 :SCHEME_BOARD_REPLICA INFO: replica.cpp:646: [1:7509890076896765732:2055] Subscribe: subscriber# [1:7509890076896766300:2251], path# /dc-1, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2025-05-29T15:29:00.244658Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:365: [replica][1:7509890076896766298:2251][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7509890076896765726:2049] 2025-05-29T15:29:00.244660Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:365: [replica][1:7509890076896766300:2251][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7509890076896765732:2055] 2025-05-29T15:29:00.244673Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:807: [main][1:7509890076896766294:2251][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7509890076896766295:2251] 2025-05-29T15:29:00.244679Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:807: [main][1:7509890076896766294:2251][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7509890076896766297:2251] 2025-05-29T15:29:00.244685Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: subscriber.cpp:836: [main][1:7509890076896766294:2251][/dc-1] Set up state: owner# [1:7509890076896766044:2115], state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 2) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements } 2025-05-29T15:29:00.244718Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:381: [replica][1:7509890076896766298:2251][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7509890076896766295:2251], cookie# 1 2025-05-29T15:29:00.244720Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:381: [replica][1:7509890076896766299:2251][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7509890076896766296:2251], cookie# 1 2025-05-29T15:29:00.244722Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:381: [replica][1:7509890076896766300:2251][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7509890076896766297:2251], cookie# 1 2025-05-29T15:29:00.244726Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1089: [1:7509890076896765726:2049] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 2 }: sender# [1:7509890076896766298:2251] 2025-05-29T15:29:00.244729Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1128: [1:7509890076896765726:2049] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7509890076896766298:2251], cookie# 1 2025-05-29T15:29:00.244732Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1089: [1:7509890076896765732:2055] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 2 }: sender# [1:7509890076896766300:2251] 2025-05-29T15:29:00.244734Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1128: [1:7509890076896765732:2055] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7509890076896766300:2251], cookie# 1 2025-05-29T15:29:00.246730Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1061: [1:7509890076896765729:2052] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1 DomainOwnerId: 72057594046644480 }: sender# [1:7509890076896766299:2251] 2025-05-29T15:29:00.246777Z node 1 :SCHEME_BOARD_REPLICA INFO: replica.cpp:646: [1:7509890076896765729:2052] Subscribe: subscriber# [1:7509890076896766299:2251], path# /dc-1, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2025-05-29T15:29:00.246801Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1128: [1:7509890076896765729:2052] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7509890076896766299:2251], cookie# 1 2025-05-29T15:29:00.246821Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:390: [replica][1:7509890076896766298:2251][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7509890076896765726:2049], cookie# 1 2025-05-29T15:29:00.246829Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:390: [replica][1:7509890076896766300:2251][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7509890076896765732:2055], cookie# 1 2025-05-29T15:29:00.246838Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:365: [replica][1:7509890076896766299:2251][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7509890076896765729:2052] 2025-05-29T15:29:00.246846Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:390: [replica][1:7509890076896766299:2251][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7509890076896765729:2052], cookie# 1 2025-05-29T15:29:00.246854Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:892: [main][1:7509890076896766294:2251][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7509890076896766295:2251], cookie# 1 2025-05-29T15:29:00.246865Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:932: [main][1:7509890076896766294:2251][/dc-1] Sync is in progress: cookie# 1, size# 3, half# 1, successes# 1, faulires# 0 2025-05-29T15:29:00.246874Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:892: [main][1:7509890076896766294:2251][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7509890076896766297:2251], cookie# 1 2025-05-29T15:29:00.246877Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:946: [main][1:7509890076896766294:2251][/dc-1] Sync is done: cookie# 1, size# 3, half# 1, successes# 2, faulires# 0, partial# 0 2025-05-29T15:29:00.246884Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:807: [main][1:7509890076896766294:2251][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7509890076896766296:2251] 2025-05-29T15:29:00.246901Z node 1 :SCHEME_BOARD_SUBSCRIBER INFO: subscriber.cpp:854: [main][1:7509890076896766294:2251][/dc-1] Path was already updated: owner# [1:7509890076896766044:2115], state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 2) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 2) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements } 2025-05-29T15:29:00.246911Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:892: [main][1:7509890076896766294:2251][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7509890076896766296:2251], cookie# 1 2025-05-29T15:29:00.246913Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:906: [main][1:7509890076896766294:2251][/dc-1] Unexpected sync response: sender# [1:7509890076896766296:2251], cookie# 1 2025-05-29T15:29:00.246922Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1089: [1:7509890076896765729:2052] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 2 }: sender# [1:7509890076896766299:2251] 2025-05-29T15:29:00.254856Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2589: HandleNotify: self# [1:7509890076896766044:2115], notify# NKikimr::TSchemeBoardEvents::TEvNotifyUpdate { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DescribeSchemeResult: Status: StatusSuccess Path: "/dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/dc-1" } } } PathId: 1 PathOwnerId: 72057594046644480 } 2025-05-29T15:29:00.254939Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2464: ResolveCacheItem: self# [1:7509890076896766044:2115], notify ... ean_pathes.cpp:205: TTxCleanDroppedSubDomains Execute, 1 paths in candidate queue, at schemeshard: 72057594046644480 2025-05-29T15:29:00.314441Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:224: TTxCleanDroppedPaths: PersistRemoveSubDomain for PathId# [OwnerId: 72057594046644480, LocalPathId: 2], at schemeshard: 72057594046644480 2025-05-29T15:29:00.314449Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason for pathId [OwnerId: 72057594046644480, LocalPathId: 2] was 1 2025-05-29T15:29:00.314461Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:123: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046644480 2025-05-29T15:29:00.314462Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:137: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046644480, LocalPathId: 2], at schemeshard: 72057594046644480 2025-05-29T15:29:00.314467Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 1 2025-05-29T15:29:00.314512Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046644480, cookie: 281474976715659 2025-05-29T15:29:00.314526Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046644480, cookie: 281474976715659 2025-05-29T15:29:00.314548Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:245: TTxCleanDroppedSubDomains Complete, done PersistRemoveSubDomain for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046644480 2025-05-29T15:29:00.314554Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:160: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046644480 TClient::Ls request: /dc-1 2025-05-29T15:29:00.314961Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:403: actor# [1:7509890076896765817:2101] Handle TEvNavigate describe path /dc-1 2025-05-29T15:29:00.316355Z node 1 :TX_PROXY DEBUG: describe.cpp:272: Actor# [1:7509890076896766383:2315] HANDLE EvNavigateScheme /dc-1 2025-05-29T15:29:00.316386Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2702: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [1:7509890076896766044:2115], request# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-05-29T15:29:00.316405Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:871: [main][1:7509890076896766294:2251][/dc-1] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvSyncRequest: sender# [1:7509890076896766044:2115], cookie# 4 2025-05-29T15:29:00.316416Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:381: [replica][1:7509890076896766298:2251][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7509890076896766295:2251], cookie# 4 2025-05-29T15:29:00.316419Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:381: [replica][1:7509890076896766299:2251][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7509890076896766296:2251], cookie# 4 2025-05-29T15:29:00.316421Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:381: [replica][1:7509890076896766300:2251][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7509890076896766297:2251], cookie# 4 2025-05-29T15:29:00.316425Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1128: [1:7509890076896765729:2052] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7509890076896766299:2251], cookie# 4 2025-05-29T15:29:00.316431Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1128: [1:7509890076896765732:2055] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7509890076896766300:2251], cookie# 4 2025-05-29T15:29:00.316438Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:390: [replica][1:7509890076896766299:2251][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 7 Partial: 0 }: sender# [1:7509890076896765729:2052], cookie# 4 2025-05-29T15:29:00.316440Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:390: [replica][1:7509890076896766300:2251][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 7 Partial: 0 }: sender# [1:7509890076896765732:2055], cookie# 4 2025-05-29T15:29:00.316442Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:892: [main][1:7509890076896766294:2251][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 7 Partial: 0 }: sender# [1:7509890076896766296:2251], cookie# 4 2025-05-29T15:29:00.316447Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:932: [main][1:7509890076896766294:2251][/dc-1] Sync is in progress: cookie# 4, size# 3, half# 1, successes# 1, faulires# 0 2025-05-29T15:29:00.316450Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:892: [main][1:7509890076896766294:2251][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 7 Partial: 0 }: sender# [1:7509890076896766297:2251], cookie# 4 2025-05-29T15:29:00.316452Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:946: [main][1:7509890076896766294:2251][/dc-1] Sync is done: cookie# 4, size# 3, half# 1, successes# 2, faulires# 0, partial# 0 2025-05-29T15:29:00.316458Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2589: HandleNotify: self# [1:7509890076896766044:2115], notify# NKikimr::NSchemeBoard::NInternalEvents::TEvSyncResponse { Path: /dc-1 PathId: Partial: 0 } 2025-05-29T15:29:00.316468Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2464: ResolveCacheItem: self# [1:7509890076896766044:2115], notify# NKikimr::NSchemeBoard::NInternalEvents::TEvSyncResponse { Path: /dc-1 PathId: Partial: 0 }, by path# { Subscriber: { Subscriber: [1:7509890076896766294:2251] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 4 } Filled: 1 Status: StatusSuccess Kind: 2 TableKind: 0 Created: 1 CreateStep: 1748532540349 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, by pathId# nullptr 2025-05-29T15:29:00.316477Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:1842: FillEntry for TNavigate: self# [1:7509890076896766044:2115], cacheItem# { Subscriber: { Subscriber: [1:7509890076896766294:2251] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 4 } Filled: 1 Status: StatusSuccess Kind: 2 TableKind: 0 Created: 1 CreateStep: 1748532540349 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 4 IsSync: true Partial: 0 } 2025-05-29T15:29:00.316511Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:265: Send result: self# [1:7509890076896766384:2316], recipient# [1:7509890076896766383:2315], result# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [72057594046644480:1:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } }] } 2025-05-29T15:29:00.316516Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1128: [1:7509890076896765726:2049] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7509890076896766298:2251], cookie# 4 2025-05-29T15:29:00.316521Z node 1 :TX_PROXY DEBUG: describe.cpp:356: Actor# [1:7509890076896766383:2315] HANDLE EvNavigateKeySetResult TDescribeReq marker# P5 ErrorCount# 0 2025-05-29T15:29:00.316533Z node 1 :TX_PROXY DEBUG: describe.cpp:435: Actor# [1:7509890076896766383:2315] SEND to# 72057594046644480 shardToRequest NKikimrSchemeOp.TDescribePath Path: "/dc-1" Options { ShowPrivateTable: true } 2025-05-29T15:29:00.316538Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:390: [replica][1:7509890076896766298:2251][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 7 Partial: 0 }: sender# [1:7509890076896765726:2049], cookie# 4 2025-05-29T15:29:00.316540Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:892: [main][1:7509890076896766294:2251][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 7 Partial: 0 }: sender# [1:7509890076896766295:2251], cookie# 4 2025-05-29T15:29:00.316542Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:906: [main][1:7509890076896766294:2251][/dc-1] Unexpected sync response: sender# [1:7509890076896766295:2251], cookie# 4 2025-05-29T15:29:00.316661Z node 1 :TX_PROXY DEBUG: describe.cpp:448: Actor# [1:7509890076896766383:2315] Handle TEvDescribeSchemeResult Forward to# [1:7509890076896766382:2314] Cookie: 0 TEvDescribeSchemeResult: NKikimrScheme.TEvDescribeSchemeResult PreSerializedData size# 2 Record# Status: StatusSuccess Path: "/dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 1748532540349 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 7 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 7 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 5 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } StoragePools { Name: "/dc-1:test" Kind: "test" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/dc-1" } } } PathId: 1 PathOwnerId: 72057594046644480 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 1748532540349 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 7 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 7 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 5 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version... (TRUNCATED) >> KqpScheme::CreateExternalDataSourceWithSa ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/tx_proxy/ut_ext_tenant/unittest >> TExtSubDomainTest::DeclareAndDefineWithoutNodes-AlterDatabaseCreateHiveFirst-false [GOOD] Test command err: 2025-05-29T15:29:00.027549Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509890078730350874:2063];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:29:00.027572Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/002453/r3tmp/tmpXe2Arm/pdisk_1.dat 2025-05-29T15:29:00.135321Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [1:7509890078730350852:2079] 1748532540027318 != 1748532540027321 2025-05-29T15:29:00.141971Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded TClient is connected to server localhost:32120 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 2025-05-29T15:29:00.176729Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:403: actor# [1:7509890078730351093:2092] Handle TEvNavigate describe path dc-1 2025-05-29T15:29:00.178818Z node 1 :TX_PROXY DEBUG: describe.cpp:272: Actor# [1:7509890078730351580:2413] HANDLE EvNavigateScheme dc-1 2025-05-29T15:29:00.178880Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2702: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [1:7509890078730351217:2147], request# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-05-29T15:29:00.178893Z node 1 :TX_PROXY_SCHEME_CACHE TRACE: cache.cpp:2322: Create subscriber: self# [1:7509890078730351217:2147], path# /dc-1, domainOwnerId# 72057594046644480 2025-05-29T15:29:00.178942Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:960: [main][1:7509890078730351581:2414][/dc-1] Handle NKikimr::TEvStateStorage::TEvResolveReplicasList 2025-05-29T15:29:00.179440Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1061: [1:7509890078730350822:2049] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1 DomainOwnerId: 72057594046644480 }: sender# [1:7509890078730351585:2414] 2025-05-29T15:29:00.179472Z node 1 :SCHEME_BOARD_REPLICA INFO: replica.cpp:646: [1:7509890078730350822:2049] Subscribe: subscriber# [1:7509890078730351585:2414], path# /dc-1, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2025-05-29T15:29:00.179495Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1061: [1:7509890078730350825:2052] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1 DomainOwnerId: 72057594046644480 }: sender# [1:7509890078730351586:2414] 2025-05-29T15:29:00.179504Z node 1 :SCHEME_BOARD_REPLICA INFO: replica.cpp:646: [1:7509890078730350825:2052] Subscribe: subscriber# [1:7509890078730351586:2414], path# /dc-1, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2025-05-29T15:29:00.179509Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1061: [1:7509890078730350828:2055] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1 DomainOwnerId: 72057594046644480 }: sender# [1:7509890078730351587:2414] 2025-05-29T15:29:00.179525Z node 1 :SCHEME_BOARD_REPLICA INFO: replica.cpp:646: [1:7509890078730350828:2055] Subscribe: subscriber# [1:7509890078730351587:2414], path# /dc-1, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2025-05-29T15:29:00.179541Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:365: [replica][1:7509890078730351585:2414][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7509890078730350822:2049] 2025-05-29T15:29:00.179550Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:365: [replica][1:7509890078730351586:2414][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7509890078730350825:2052] 2025-05-29T15:29:00.179554Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:365: [replica][1:7509890078730351587:2414][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7509890078730350828:2055] 2025-05-29T15:29:00.179561Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:807: [main][1:7509890078730351581:2414][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7509890078730351582:2414] 2025-05-29T15:29:00.179569Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:807: [main][1:7509890078730351581:2414][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7509890078730351583:2414] 2025-05-29T15:29:00.179584Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: subscriber.cpp:836: [main][1:7509890078730351581:2414][/dc-1] Set up state: owner# [1:7509890078730351217:2147], state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 2) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements } 2025-05-29T15:29:00.179638Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:807: [main][1:7509890078730351581:2414][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7509890078730351584:2414] 2025-05-29T15:29:00.179650Z node 1 :SCHEME_BOARD_SUBSCRIBER INFO: subscriber.cpp:854: [main][1:7509890078730351581:2414][/dc-1] Path was already updated: owner# [1:7509890078730351217:2147], state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 2) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 2) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements } 2025-05-29T15:29:00.179658Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:381: [replica][1:7509890078730351585:2414][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7509890078730351582:2414], cookie# 1 2025-05-29T15:29:00.179662Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:381: [replica][1:7509890078730351586:2414][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7509890078730351583:2414], cookie# 1 2025-05-29T15:29:00.179665Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:381: [replica][1:7509890078730351587:2414][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7509890078730351584:2414], cookie# 1 2025-05-29T15:29:00.179671Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1089: [1:7509890078730350822:2049] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 2 }: sender# [1:7509890078730351585:2414] 2025-05-29T15:29:00.179676Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1128: [1:7509890078730350822:2049] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7509890078730351585:2414], cookie# 1 2025-05-29T15:29:00.179681Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1089: [1:7509890078730350825:2052] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 2 }: sender# [1:7509890078730351586:2414] 2025-05-29T15:29:00.179685Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1128: [1:7509890078730350825:2052] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7509890078730351586:2414], cookie# 1 2025-05-29T15:29:00.179689Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1089: [1:7509890078730350828:2055] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 2 }: sender# [1:7509890078730351587:2414] 2025-05-29T15:29:00.179692Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1128: [1:7509890078730350828:2055] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7509890078730351587:2414], cookie# 1 2025-05-29T15:29:00.181139Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:390: [replica][1:7509890078730351585:2414][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7509890078730350822:2049], cookie# 1 2025-05-29T15:29:00.181152Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:390: [replica][1:7509890078730351586:2414][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7509890078730350825:2052], cookie# 1 2025-05-29T15:29:00.181154Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:390: [replica][1:7509890078730351587:2414][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7509890078730350828:2055], cookie# 1 2025-05-29T15:29:00.181160Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:892: [main][1:7509890078730351581:2414][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7509890078730351582:2414], cookie# 1 2025-05-29T15:29:00.181166Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:932: [main][1:7509890078730351581:2414][/dc-1] Sync is in progress: cookie# 1, size# 3, half# 1, successes# 1, faulires# 0 2025-05-29T15:29:00.181170Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:892: [main][1:7509890078730351581:2414][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7509890078730351583:2414], cookie# 1 2025-05-29T15:29:00.181174Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:946: [main][1:7509890078730351581:2414][/dc-1] Sync is done: cookie# 1, size# 3, half# 1, successes# 2, faulires# 0, partial# 0 2025-05-29T15:29:00.181178Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:892: [main][1:7509890078730351581:2414][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7509890078730351584:2414], cookie# 1 2025-05-29T15:29:00.181181Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:906: [main][1:7509890078730351581:2414][/dc-1] Unexpected sync response: sender# [1:7509890078730351584:2414], cookie# 1 2025-05-29T15:29:00.181313Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:213: actor# [1:7509890078730351093:2092] HANDLE TEvClientConnected success connect from tablet# 72057594046447617 2025-05-29T15:29:00.181557Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:29:00.181575Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:29:00.182123Z node 1 :TX_PROXY DEBUG: client.cpp:89: Handle TEvAllocateResult ACCEPTED RangeBegin# 281474976715656 RangeEnd# 281474976720656 txAllocator# 72057594046447617 2025-05-29T15:29:00.182674Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-29T15:29:00.190849Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2589: HandleNotify: self# [1:7509890078730351217:2147], notify# NKikimr::TSchemeBoardEvents::TEvNotifyUpdate { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DescribeSchemeResult: Status: StatusSuccess Path: "/dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } DomainDescription ... al: 0 SchemaVersion: 0 }, by pathId# nullptr 2025-05-29T15:29:00.762903Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:1842: FillEntry for TNavigate: self# [1:7509890078730351217:2147], cacheItem# { Subscriber: { Subscriber: [1:7509890078730351792:2577] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 2 } Filled: 1 Status: StatusSuccess Kind: 9 TableKind: 0 Created: 1 CreateStep: 1748532540286 PathId: [OwnerId: 72057594046644480, LocalPathId: 2] DomainId: [OwnerId: 72057594046644480, LocalPathId: 2] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/USER_0 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 2 IsSync: true Partial: 0 } 2025-05-29T15:29:00.762958Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:265: Send result: self# [1:7509890078730351949:2659], recipient# [1:7509890078730351948:2658], result# { ErrorCount: 1 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_0 TableId: [72057594046644480:2:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: RedirectLookupError Kind: KindExtSubdomain DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 2] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 2] Params { Version: 1 PlanResolution: 0 TimeCastBucketsPerMediator: 0 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } }] } 2025-05-29T15:29:00.762965Z node 1 :TX_PROXY INFO: describe.cpp:356: Actor# [1:7509890078730351948:2658] HANDLE EvNavigateKeySetResult TDescribeReq marker# P5 ErrorCount# 1 TClient::Ls response: Status: 128 StatusCode: ERROR Issues { message: "Default error" severity: 1 } SchemeStatus: 13 ErrorReason: "Could not resolve redirected path" TClient::Ls request: /dc-1 2025-05-29T15:29:00.763948Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:403: actor# [1:7509890078730351093:2092] Handle TEvNavigate describe path /dc-1 2025-05-29T15:29:00.765616Z node 1 :TX_PROXY DEBUG: describe.cpp:272: Actor# [1:7509890078730351951:2661] HANDLE EvNavigateScheme /dc-1 2025-05-29T15:29:00.765634Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2702: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [1:7509890078730351217:2147], request# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-05-29T15:29:00.765677Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:871: [main][1:7509890078730351581:2414][/dc-1] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvSyncRequest: sender# [1:7509890078730351217:2147], cookie# 4 2025-05-29T15:29:00.765701Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:381: [replica][1:7509890078730351585:2414][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7509890078730351582:2414], cookie# 4 2025-05-29T15:29:00.765723Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:381: [replica][1:7509890078730351586:2414][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7509890078730351583:2414], cookie# 4 2025-05-29T15:29:00.765733Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:381: [replica][1:7509890078730351587:2414][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7509890078730351584:2414], cookie# 4 2025-05-29T15:29:00.765745Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1128: [1:7509890078730350822:2049] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7509890078730351585:2414], cookie# 4 2025-05-29T15:29:00.765763Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1128: [1:7509890078730350825:2052] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7509890078730351586:2414], cookie# 4 2025-05-29T15:29:00.765775Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1128: [1:7509890078730350828:2055] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7509890078730351587:2414], cookie# 4 2025-05-29T15:29:00.765786Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:390: [replica][1:7509890078730351585:2414][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 5 Partial: 0 }: sender# [1:7509890078730350822:2049], cookie# 4 2025-05-29T15:29:00.765795Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:390: [replica][1:7509890078730351586:2414][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 5 Partial: 0 }: sender# [1:7509890078730350825:2052], cookie# 4 2025-05-29T15:29:00.765798Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:390: [replica][1:7509890078730351587:2414][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 5 Partial: 0 }: sender# [1:7509890078730350828:2055], cookie# 4 2025-05-29T15:29:00.765804Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:892: [main][1:7509890078730351581:2414][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 5 Partial: 0 }: sender# [1:7509890078730351582:2414], cookie# 4 2025-05-29T15:29:00.765810Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:932: [main][1:7509890078730351581:2414][/dc-1] Sync is in progress: cookie# 4, size# 3, half# 1, successes# 1, faulires# 0 2025-05-29T15:29:00.765816Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:892: [main][1:7509890078730351581:2414][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 5 Partial: 0 }: sender# [1:7509890078730351583:2414], cookie# 4 2025-05-29T15:29:00.765820Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:946: [main][1:7509890078730351581:2414][/dc-1] Sync is done: cookie# 4, size# 3, half# 1, successes# 2, faulires# 0, partial# 0 2025-05-29T15:29:00.765825Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:892: [main][1:7509890078730351581:2414][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 5 Partial: 0 }: sender# [1:7509890078730351584:2414], cookie# 4 2025-05-29T15:29:00.765833Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:906: [main][1:7509890078730351581:2414][/dc-1] Unexpected sync response: sender# [1:7509890078730351584:2414], cookie# 4 2025-05-29T15:29:00.765843Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2589: HandleNotify: self# [1:7509890078730351217:2147], notify# NKikimr::NSchemeBoard::NInternalEvents::TEvSyncResponse { Path: /dc-1 PathId: Partial: 0 } 2025-05-29T15:29:00.765870Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2464: ResolveCacheItem: self# [1:7509890078730351217:2147], notify# NKikimr::NSchemeBoard::NInternalEvents::TEvSyncResponse { Path: /dc-1 PathId: Partial: 0 }, by path# { Subscriber: { Subscriber: [1:7509890078730351581:2414] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 4 } Filled: 1 Status: StatusSuccess Kind: 2 TableKind: 0 Created: 1 CreateStep: 1748532540265 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, by pathId# nullptr 2025-05-29T15:29:00.765899Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:1842: FillEntry for TNavigate: self# [1:7509890078730351217:2147], cacheItem# { Subscriber: { Subscriber: [1:7509890078730351581:2414] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 4 } Filled: 1 Status: StatusSuccess Kind: 2 TableKind: 0 Created: 1 CreateStep: 1748532540265 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 4 IsSync: true Partial: 0 } 2025-05-29T15:29:00.765966Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:265: Send result: self# [1:7509890078730351952:2662], recipient# [1:7509890078730351951:2661], result# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [72057594046644480:1:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } }] } 2025-05-29T15:29:00.765981Z node 1 :TX_PROXY DEBUG: describe.cpp:356: Actor# [1:7509890078730351951:2661] HANDLE EvNavigateKeySetResult TDescribeReq marker# P5 ErrorCount# 0 2025-05-29T15:29:00.766004Z node 1 :TX_PROXY DEBUG: describe.cpp:435: Actor# [1:7509890078730351951:2661] SEND to# 72057594046644480 shardToRequest NKikimrSchemeOp.TDescribePath Path: "/dc-1" Options { ShowPrivateTable: true } 2025-05-29T15:29:00.766238Z node 1 :TX_PROXY DEBUG: describe.cpp:448: Actor# [1:7509890078730351951:2661] Handle TEvDescribeSchemeResult Forward to# [1:7509890078730351950:2660] Cookie: 0 TEvDescribeSchemeResult: NKikimrScheme.TEvDescribeSchemeResult PreSerializedData size# 63 Record# Status: StatusSuccess Path: "/dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 1748532540265 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 3 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } StoragePools { Name: "" Kind: "storage-pool-number-1" } StoragePools { Name: "" Kind: "storage-pool-number-2" } StoragePools { Name: "/dc-1:test" Kind: "test" } PathsInside: 1 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/dc-1" } } } PathId: 1 PathOwnerId: 72057594046644480 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 1748532540265 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 3 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: "USER_0" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeExtSubDomain CreateFinished: true CreateTxId: 281474976715658 CreateStep: 1748532540286 ParentPathId: 1 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" } DomainDescription { SchemeShardId_Depricated: 72057594046... (TRUNCATED) ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/fq/ut_integration/unittest >> PrivateApi::Nodes [FAIL] Test command err: 2025-05-29T15:28:41.428727Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509889996676184050:2074];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:28:41.428747Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; E0529 15:28:41.458708655 4059375 dns_resolver_ares.cc:452] no server name supplied in dns URI E0529 15:28:41.458776689 4059375 channel.cc:120] channel stack builder failed: UNKNOWN: the target uri is not valid: dns:/// 2025-05-29T15:28:41.460740Z node 1 :YQ_CONTROL_PLANE_STORAGE ERROR: schema.cpp:160: Create directory "Root/yq" error: TRANSPORT_UNAVAILABLE [ {
: Error: GRpc error: (14): failed to connect to all addresses; last error: UNKNOWN: ipv4:127.0.0.1:12390: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:12390 } ] test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/0008bb/r3tmp/tmpM2YhY5/pdisk_1.dat 2025-05-29T15:28:41.761508Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:7509889996676184544:2311], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-05-29T15:28:41.761548Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; TServer::EnableGrpc on GrpcPort 12390, node 1 2025-05-29T15:28:41.811485Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded TClient is connected to server localhost:18391 WaitRootIsUp 'Root'... TClient::Ls request: Root 2025-05-29T15:28:41.811771Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:28:41.811785Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-05-29T15:28:41.811788Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-05-29T15:28:41.811857Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-29T15:28:42.079961Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:28:42.164587Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:28:42.164618Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:28:42.166197Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-29T15:28:42.468641Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976715658, at schemeshard: 72057594046644480 2025-05-29T15:28:42.469608Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: schema.cpp:293: Create table "Root/yq/queries". Create session OK 2025-05-29T15:28:42.469620Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: schema.cpp:113: Call create table "Root/yq/queries" 2025-05-29T15:28:42.469622Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: schema.cpp:354: Call create table "Root/yq/queries" 2025-05-29T15:28:42.469640Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: schema.cpp:293: Create table "Root/yq/nodes". Create session OK 2025-05-29T15:28:42.469645Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: schema.cpp:113: Call create table "Root/yq/nodes" 2025-05-29T15:28:42.469646Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: schema.cpp:354: Call create table "Root/yq/nodes" 2025-05-29T15:28:42.469865Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: schema.cpp:293: Create table "Root/yq/tenant_acks". Create session OK 2025-05-29T15:28:42.469875Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: schema.cpp:113: Call create table "Root/yq/tenant_acks" 2025-05-29T15:28:42.469877Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: schema.cpp:354: Call create table "Root/yq/tenant_acks" 2025-05-29T15:28:42.469913Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: schema.cpp:293: Create table "Root/yq/connections". Create session OK 2025-05-29T15:28:42.469916Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: schema.cpp:113: Call create table "Root/yq/connections" 2025-05-29T15:28:42.469917Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: schema.cpp:354: Call create table "Root/yq/connections" 2025-05-29T15:28:42.469945Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: schema.cpp:293: Create table "Root/yq/bindings". Create session OK 2025-05-29T15:28:42.469954Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: schema.cpp:113: Call create table "Root/yq/bindings" 2025-05-29T15:28:42.469955Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: schema.cpp:354: Call create table "Root/yq/bindings" 2025-05-29T15:28:42.469987Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: schema.cpp:293: Create table "Root/yq/mappings". Create session OK 2025-05-29T15:28:42.469988Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: schema.cpp:113: Call create table "Root/yq/mappings" 2025-05-29T15:28:42.469989Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: schema.cpp:354: Call create table "Root/yq/mappings" 2025-05-29T15:28:42.470012Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: schema.cpp:293: Create table "Root/yq/idempotency_keys". Create session OK 2025-05-29T15:28:42.470019Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: schema.cpp:113: Call create table "Root/yq/idempotency_keys" 2025-05-29T15:28:42.470020Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: schema.cpp:354: Call create table "Root/yq/idempotency_keys" 2025-05-29T15:28:42.470226Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: schema.cpp:293: Create table "Root/yq/jobs". Create session OK 2025-05-29T15:28:42.470233Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: schema.cpp:113: Call create table "Root/yq/jobs" 2025-05-29T15:28:42.470234Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: schema.cpp:354: Call create table "Root/yq/jobs" 2025-05-29T15:28:42.470249Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: schema.cpp:293: Create table "Root/yq/result_sets". Create session OK 2025-05-29T15:28:42.470252Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: schema.cpp:113: Call create table "Root/yq/result_sets" 2025-05-29T15:28:42.470253Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: schema.cpp:354: Call create table "Root/yq/result_sets" 2025-05-29T15:28:42.470340Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: schema.cpp:293: Create table "Root/yq/tenants". Create session OK 2025-05-29T15:28:42.470349Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: schema.cpp:113: Call create table "Root/yq/tenants" 2025-05-29T15:28:42.470351Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: schema.cpp:354: Call create table "Root/yq/tenants" 2025-05-29T15:28:42.470379Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: schema.cpp:293: Create table "Root/yq/pending_small". Create session OK 2025-05-29T15:28:42.470386Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: schema.cpp:113: Call create table "Root/yq/pending_small" 2025-05-29T15:28:42.470387Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: schema.cpp:354: Call create table "Root/yq/pending_small" 2025-05-29T15:28:42.470610Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: schema.cpp:155: Successfully created directory "Root/yq" 2025-05-29T15:28:42.470616Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: schema.cpp:293: Create table "Root/yq/quotas". Create session OK 2025-05-29T15:28:42.470618Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: schema.cpp:113: Call create table "Root/yq/quotas" 2025-05-29T15:28:42.470620Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: schema.cpp:354: Call create table "Root/yq/quotas" 2025-05-29T15:28:42.470620Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: schema.cpp:122: Reply for create directory "Root/yq": 2025-05-29T15:28:42.470717Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: schema.cpp:293: Create table "Root/yq/compute_databases". Create session OK 2025-05-29T15:28:42.470724Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: schema.cpp:113: Call create table "Root/yq/compute_databases" 2025-05-29T15:28:42.470725Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: schema.cpp:354: Call create table "Root/yq/compute_databases" 2025-05-29T15:28:42.471891Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 2025-05-29T15:28:42.472268Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 2025-05-29T15:28:42.472478Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 2025-05-29T15:28:42.472704Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-05-29T15:28:42.473057Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-05-29T15:28:42.473378Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-05-29T15:28:42.473591Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-05-29T15:28:42.473781Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-05-29T15:28:42.473967Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715670:0, at schemeshard: 7205759 ... , CA Id [7:7509890059766572238:2722]. new data for read #0 seqno = 1 finished = 1 2025-05-29T15:28:56.425790Z node 7 :KQP_COMPUTE DEBUG: kqp_pure_compute_actor.cpp:148: SelfId: [7:7509890059766572238:2722], TxId: 281474976715712, task: 1. Ctx: { SessionId : ydb://session/3?node_id=7&id=ZWE2NjRiZjItNTRiZDdlNjAtZGY2NGIyNzctZTAxNmI0MWM=. CustomerSuppliedId : . TraceId : 01jweaj9386ays9gffp7szcshd. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. CA StateFunc 276037645 2025-05-29T15:28:56.425794Z node 7 :KQP_COMPUTE DEBUG: kqp_pure_compute_actor.cpp:148: SelfId: [7:7509890059766572238:2722], TxId: 281474976715712, task: 1. Ctx: { SessionId : ydb://session/3?node_id=7&id=ZWE2NjRiZjItNTRiZDdlNjAtZGY2NGIyNzctZTAxNmI0MWM=. CustomerSuppliedId : . TraceId : 01jweaj9386ays9gffp7szcshd. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. CA StateFunc 271646922 2025-05-29T15:28:56.425797Z node 7 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:1332: TxId: 281474976715712, task: 1, CA Id [7:7509890059766572238:2722]. enter getasyncinputdata results size 1, freeSpace 8388608 2025-05-29T15:28:56.425800Z node 7 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:1228: TxId: 281474976715712, task: 1, CA Id [7:7509890059766572238:2722]. enter pack cells method shardId: 72075186224037895 processedRows: 0 packed rows: 0 freeSpace: 8388608 2025-05-29T15:28:56.425803Z node 7 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:1309: TxId: 281474976715712, task: 1, CA Id [7:7509890059766572238:2722]. exit pack cells method shardId: 72075186224037895 processedRows: 0 packed rows: 0 freeSpace: 8388608 2025-05-29T15:28:56.425806Z node 7 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:1366: TxId: 281474976715712, task: 1, CA Id [7:7509890059766572238:2722]. returned 0 rows; processed 0 rows 2025-05-29T15:28:56.425819Z node 7 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:1403: TxId: 281474976715712, task: 1, CA Id [7:7509890059766572238:2722]. dropping batch for read #0 2025-05-29T15:28:56.425820Z node 7 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:459: TxId: 281474976715712, task: 1, CA Id [7:7509890059766572238:2722]. effective maxinflight 1024 sorted 0 2025-05-29T15:28:56.425822Z node 7 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:481: TxId: 281474976715712, task: 1, CA Id [7:7509890059766572238:2722]. Scheduled table scans, in flight: 0 shards. pending shards to read: 0, 2025-05-29T15:28:56.425825Z node 7 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:1428: TxId: 281474976715712, task: 1, CA Id [7:7509890059766572238:2722]. returned async data processed rows 0 left freeSpace 8388608 received rows 0 running reads 0 pending shards 0 finished = 1 has limit 0 limit reached 0 2025-05-29T15:28:56.425841Z node 7 :KQP_COMPUTE DEBUG: dq_compute_actor_impl.h:502: SelfId: [7:7509890059766572238:2722], TxId: 281474976715712, task: 1. Ctx: { SessionId : ydb://session/3?node_id=7&id=ZWE2NjRiZjItNTRiZDdlNjAtZGY2NGIyNzctZTAxNmI0MWM=. CustomerSuppliedId : . TraceId : 01jweaj9386ays9gffp7szcshd. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. Continue execution, either output buffers are not empty or not all channels are ready, hasDataToSend: 1, channelsReady: 1 2025-05-29T15:28:56.425844Z node 7 :KQP_COMPUTE DEBUG: kqp_pure_compute_actor.cpp:148: SelfId: [7:7509890059766572240:2723], TxId: 281474976715712, task: 2. Ctx: { TraceId : 01jweaj9386ays9gffp7szcshd. SessionId : ydb://session/3?node_id=7&id=ZWE2NjRiZjItNTRiZDdlNjAtZGY2NGIyNzctZTAxNmI0MWM=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. CA StateFunc 271646923 2025-05-29T15:28:56.425851Z node 7 :KQP_COMPUTE DEBUG: dq_compute_actor_channels.cpp:163: TxId: 281474976715712, task: 2. Finish input channelId: 1, from: [7:7509890059766572238:2722] 2025-05-29T15:28:56.425858Z node 7 :KQP_COMPUTE DEBUG: kqp_pure_compute_actor.cpp:148: SelfId: [7:7509890059766572238:2722], TxId: 281474976715712, task: 1. Ctx: { SessionId : ydb://session/3?node_id=7&id=ZWE2NjRiZjItNTRiZDdlNjAtZGY2NGIyNzctZTAxNmI0MWM=. CustomerSuppliedId : . TraceId : 01jweaj9386ays9gffp7szcshd. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. CA StateFunc 271646927 2025-05-29T15:28:56.425860Z node 7 :KQP_COMPUTE DEBUG: kqp_pure_compute_actor.cpp:148: SelfId: [7:7509890059766572240:2723], TxId: 281474976715712, task: 2. Ctx: { TraceId : 01jweaj9386ays9gffp7szcshd. SessionId : ydb://session/3?node_id=7&id=ZWE2NjRiZjItNTRiZDdlNjAtZGY2NGIyNzctZTAxNmI0MWM=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. CA StateFunc 271646922 2025-05-29T15:28:56.425864Z node 7 :KQP_COMPUTE DEBUG: kqp_pure_compute_actor.cpp:148: SelfId: [7:7509890059766572238:2722], TxId: 281474976715712, task: 1. Ctx: { SessionId : ydb://session/3?node_id=7&id=ZWE2NjRiZjItNTRiZDdlNjAtZGY2NGIyNzctZTAxNmI0MWM=. CustomerSuppliedId : . TraceId : 01jweaj9386ays9gffp7szcshd. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. CA StateFunc 271646922 2025-05-29T15:28:56.425867Z node 7 :KQP_COMPUTE DEBUG: dq_compute_actor_channels.cpp:675: TxId: 281474976715712, task: 1. Tasks execution finished 2025-05-29T15:28:56.425870Z node 7 :KQP_COMPUTE DEBUG: dq_compute_actor_impl.h:510: SelfId: [7:7509890059766572238:2722], TxId: 281474976715712, task: 1. Ctx: { SessionId : ydb://session/3?node_id=7&id=ZWE2NjRiZjItNTRiZDdlNjAtZGY2NGIyNzctZTAxNmI0MWM=. CustomerSuppliedId : . TraceId : 01jweaj9386ays9gffp7szcshd. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. Compute state finished. All channels and sinks finished 2025-05-29T15:28:56.425874Z node 7 :KQP_COMPUTE DEBUG: dq_compute_actor_impl.h:502: SelfId: [7:7509890059766572240:2723], TxId: 281474976715712, task: 2. Ctx: { TraceId : 01jweaj9386ays9gffp7szcshd. SessionId : ydb://session/3?node_id=7&id=ZWE2NjRiZjItNTRiZDdlNjAtZGY2NGIyNzctZTAxNmI0MWM=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. Continue execution, either output buffers are not empty or not all channels are ready, hasDataToSend: 1, channelsReady: 1 2025-05-29T15:28:56.425887Z node 7 :KQP_COMPUTE DEBUG: kqp_pure_compute_actor.cpp:148: SelfId: [7:7509890059766572240:2723], TxId: 281474976715712, task: 2. Ctx: { TraceId : 01jweaj9386ays9gffp7szcshd. SessionId : ydb://session/3?node_id=7&id=ZWE2NjRiZjItNTRiZDdlNjAtZGY2NGIyNzctZTAxNmI0MWM=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. CA StateFunc 271646922 2025-05-29T15:28:56.425903Z node 7 :KQP_COMPUTE DEBUG: dq_compute_actor_channels.cpp:658: TxId: 281474976715712, task: 2. Tasks execution finished, don't wait for ack delivery in input channelId: 1, seqNo: [1] 2025-05-29T15:28:56.425903Z node 7 :KQP_COMPUTE DEBUG: dq_compute_actor_channels.cpp:494: TxId: 281474976715712, task: 1. pass away 2025-05-29T15:28:56.425905Z node 7 :KQP_COMPUTE DEBUG: dq_compute_actor_channels.cpp:675: TxId: 281474976715712, task: 2. Tasks execution finished 2025-05-29T15:28:56.425907Z node 7 :KQP_COMPUTE DEBUG: dq_compute_actor_impl.h:510: SelfId: [7:7509890059766572240:2723], TxId: 281474976715712, task: 2. Ctx: { TraceId : 01jweaj9386ays9gffp7szcshd. SessionId : ydb://session/3?node_id=7&id=ZWE2NjRiZjItNTRiZDdlNjAtZGY2NGIyNzctZTAxNmI0MWM=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. Compute state finished. All channels and sinks finished 2025-05-29T15:28:56.425922Z node 7 :KQP_COMPUTE DEBUG: dq_compute_actor_channels.cpp:494: TxId: 281474976715712, task: 2. pass away 2025-05-29T15:28:56.425932Z node 7 :KQP_COMPUTE DEBUG: log.cpp:784: fline=kqp_compute_actor_factory.cpp:66;problem=finish_compute_actor;tx_id=281474976715712;task_id=1;success=1;message={
: Error: COMPUTE_STATE_FINISHED }; 2025-05-29T15:28:56.425945Z node 7 :KQP_COMPUTE DEBUG: log.cpp:784: fline=kqp_compute_actor_factory.cpp:66;problem=finish_compute_actor;tx_id=281474976715712;task_id=2;success=1;message={
: Error: COMPUTE_STATE_FINISHED }; 2025-05-29T15:28:56.438918Z node 7 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [7:7509890059766572245:2725], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:28:56.439085Z node 7 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=7&id=ZWE2NjRiZjItNTRiZDdlNjAtZGY2NGIyNzctZTAxNmI0MWM=, ActorId: [7:7509890055471604814:2692], ActorState: ExecuteState, TraceId: 01jweaj93bd7vejt4a72abddzd, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: 01jweaj9381bw9dgzp0trb2m16 2025-05-29T15:28:56.439332Z node 7 :KQP_EXECUTER ERROR: kqp_planner.cpp:119: TxId: 281474976715713. Ctx: { TraceId: 01jweaj93bd7vejt4a72abddzd, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=7&id=ZWE2NjRiZjItNTRiZDdlNjAtZGY2NGIyNzctZTAxNmI0MWM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-05-29T15:28:56.440033Z node 7 :YQ_CONTROL_PLANE_STORAGE WARN: ydb_control_plane_storage.cpp:599: DB Error, Status: INTERNAL_ERROR, Issues: {
: Fatal: Execution, code: 1060 subissue: {
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 } }, Query: --!syntax_v1 -- Query name: NodesHealthCheck(write) PRAGMA TablePathPrefix("Root/yq"); DECLARE $tenant as String; DECLARE $node_id as Uint32; DECLARE $instance_id as String; DECLARE $hostname as String; DECLARE $deadline as Timestamp; DECLARE $active_workers as Uint64; DECLARE $memory_limit as Uint64; DECLARE $memory_allocated as Uint64; DECLARE $ic_port as Uint32; DECLARE $node_address as String; DECLARE $data_center as String; UPSERT INTO `nodes` (`tenant`, `node_id`, `instance_id`, `hostname`, `expire_at`, `active_workers`, `memory_limit`, `memory_allocated`, `interconnect_port`, `node_address`, `data_center`) VALUES ($tenant ,$node_id, $instance_id, $hostname, $deadline, $active_workers, $memory_limit, $memory_allocated, $ic_port, $node_address, $data_center); 2025-05-29T15:28:56.440179Z node 7 :YQ_CONTROL_PLANE_STORAGE WARN: ydb_control_plane_storage_impl.h:770: NodesHealthCheckRequest - NodesHealthCheckResult: {tenant: "Tenant" node { node_id: 100500 instance_id: "60cf355f-b96f2f33-ffa50b6-afb44189" hostname: "hostname" } } ERROR: {
: Fatal: Execution, code: 1060 subissue: {
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 } } 2025-05-29T15:28:56.440281Z node 7 :YQL_NODES_MANAGER ERROR: nodes_health_check.cpp:65: Failed with code: INTERNAL_ERROR Details:
: Error: Can't do NodesHealthCheck: (yexception) ydb/core/fq/libs/actors/nodes_health_check.cpp:95:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 assertion failed at ydb/services/fq/ut_integration/fq_ut.cpp:828, virtual void NTestSuitePrivateApi::TTestCaseNodes::Execute_(NUnitTest::TTestContext &): (result) the execution of the query did not end within the time limit TBackTrace::Capture()+28 (0x139D25FC) NUnitTest::NPrivate::RaiseError(char const*, TBasicString> const&, bool)+137 (0x13B861F9) NTestSuitePrivateApi::TTestCaseNodes::Execute_(NUnitTest::TTestContext&)+2099 (0x138B8763) NTestSuitePrivateApi::TCurrentTest::Execute()::'lambda'()::operator()() const+71 (0x138C42D7) NUnitTest::TTestBase::Run(std::__y1::function, TBasicString> const&, char const*, bool)+126 (0x13B880AE) NTestSuitePrivateApi::TCurrentTest::Execute()+428 (0x138C3C9C) NUnitTest::TTestFactory::Execute()+803 (0x13B88823) NUnitTest::RunMain(int, char**)+3021 (0x13B9A3CD) ??+0 (0x7FE37197CD90) __libc_start_main+128 (0x7FE37197CE40) _start+41 (0x129E1029) >> BackupRestoreS3::RestoreIndexTableDecimalSplitBoundaries [GOOD] >> BackupRestoreS3::RestoreViewQueryText >> TExtSubDomainTest::CreateTableInsideThenStopTenantAndForceDeleteSubDomain-AlterDatabaseCreateHiveFirst-false [GOOD] >> TExtSubDomainTest::CreateTableInsideThenStopTenantAndForceDeleteSubDomain-AlterDatabaseCreateHiveFirst-true ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/tx_proxy/ut_ext_tenant/unittest >> TExtSubDomainTest::DeclareAndAlterPools-AlterDatabaseCreateHiveFirst-true [GOOD] Test command err: 2025-05-29T15:28:59.171418Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509890075213317793:2075];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:28:59.171464Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/002496/r3tmp/tmpUoh409/pdisk_1.dat 2025-05-29T15:28:59.240031Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded TClient is connected to server localhost:13482 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 2025-05-29T15:28:59.266663Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:403: actor# [1:7509890075213318018:2139] Handle TEvNavigate describe path dc-1 2025-05-29T15:28:59.268004Z node 1 :TX_PROXY DEBUG: describe.cpp:272: Actor# [1:7509890075213318430:2420] HANDLE EvNavigateScheme dc-1 2025-05-29T15:28:59.268073Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2702: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [1:7509890075213318044:2152], request# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-05-29T15:28:59.268097Z node 1 :TX_PROXY_SCHEME_CACHE TRACE: cache.cpp:2322: Create subscriber: self# [1:7509890075213318044:2152], path# /dc-1, domainOwnerId# 72057594046644480 2025-05-29T15:28:59.268148Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:960: [main][1:7509890075213318431:2421][/dc-1] Handle NKikimr::TEvStateStorage::TEvResolveReplicasList 2025-05-29T15:28:59.268488Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1061: [1:7509890075213317662:2050] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1 DomainOwnerId: 72057594046644480 }: sender# [1:7509890075213318435:2421] 2025-05-29T15:28:59.268509Z node 1 :SCHEME_BOARD_REPLICA INFO: replica.cpp:646: [1:7509890075213317662:2050] Subscribe: subscriber# [1:7509890075213318435:2421], path# /dc-1, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2025-05-29T15:28:59.268508Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1061: [1:7509890075213317668:2056] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1 DomainOwnerId: 72057594046644480 }: sender# [1:7509890075213318437:2421] 2025-05-29T15:28:59.268523Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1061: [1:7509890075213317665:2053] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1 DomainOwnerId: 72057594046644480 }: sender# [1:7509890075213318436:2421] 2025-05-29T15:28:59.268526Z node 1 :SCHEME_BOARD_REPLICA INFO: replica.cpp:646: [1:7509890075213317665:2053] Subscribe: subscriber# [1:7509890075213318436:2421], path# /dc-1, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2025-05-29T15:28:59.268526Z node 1 :SCHEME_BOARD_REPLICA INFO: replica.cpp:646: [1:7509890075213317668:2056] Subscribe: subscriber# [1:7509890075213318437:2421], path# /dc-1, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2025-05-29T15:28:59.268535Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:365: [replica][1:7509890075213318435:2421][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7509890075213317662:2050] 2025-05-29T15:28:59.268538Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:365: [replica][1:7509890075213318436:2421][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7509890075213317665:2053] 2025-05-29T15:28:59.268543Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1089: [1:7509890075213317662:2050] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 2 }: sender# [1:7509890075213318435:2421] 2025-05-29T15:28:59.268548Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:365: [replica][1:7509890075213318437:2421][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7509890075213317668:2056] 2025-05-29T15:28:59.268549Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1089: [1:7509890075213317665:2053] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 2 }: sender# [1:7509890075213318436:2421] 2025-05-29T15:28:59.268554Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1089: [1:7509890075213317668:2056] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 2 }: sender# [1:7509890075213318437:2421] 2025-05-29T15:28:59.268554Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:807: [main][1:7509890075213318431:2421][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7509890075213318432:2421] 2025-05-29T15:28:59.268559Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:807: [main][1:7509890075213318431:2421][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7509890075213318433:2421] 2025-05-29T15:28:59.268568Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: subscriber.cpp:836: [main][1:7509890075213318431:2421][/dc-1] Set up state: owner# [1:7509890075213318044:2152], state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 2) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements } 2025-05-29T15:28:59.268614Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:807: [main][1:7509890075213318431:2421][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7509890075213318434:2421] 2025-05-29T15:28:59.268625Z node 1 :SCHEME_BOARD_SUBSCRIBER INFO: subscriber.cpp:854: [main][1:7509890075213318431:2421][/dc-1] Path was already updated: owner# [1:7509890075213318044:2152], state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 2) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 2) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements } 2025-05-29T15:28:59.268631Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:381: [replica][1:7509890075213318435:2421][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7509890075213318432:2421], cookie# 1 2025-05-29T15:28:59.268633Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:381: [replica][1:7509890075213318436:2421][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7509890075213318433:2421], cookie# 1 2025-05-29T15:28:59.268635Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:381: [replica][1:7509890075213318437:2421][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7509890075213318434:2421], cookie# 1 2025-05-29T15:28:59.268639Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1128: [1:7509890075213317662:2050] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7509890075213318435:2421], cookie# 1 2025-05-29T15:28:59.268651Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1128: [1:7509890075213317665:2053] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7509890075213318436:2421], cookie# 1 2025-05-29T15:28:59.268658Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1128: [1:7509890075213317668:2056] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7509890075213318437:2421], cookie# 1 2025-05-29T15:28:59.268668Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:390: [replica][1:7509890075213318435:2421][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7509890075213317662:2050], cookie# 1 2025-05-29T15:28:59.268670Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:390: [replica][1:7509890075213318436:2421][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7509890075213317665:2053], cookie# 1 2025-05-29T15:28:59.268672Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:390: [replica][1:7509890075213318437:2421][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7509890075213317668:2056], cookie# 1 2025-05-29T15:28:59.268677Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:892: [main][1:7509890075213318431:2421][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7509890075213318432:2421], cookie# 1 2025-05-29T15:28:59.268681Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:932: [main][1:7509890075213318431:2421][/dc-1] Sync is in progress: cookie# 1, size# 3, half# 1, successes# 1, faulires# 0 2025-05-29T15:28:59.268684Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:892: [main][1:7509890075213318431:2421][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7509890075213318433:2421], cookie# 1 2025-05-29T15:28:59.268687Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:946: [main][1:7509890075213318431:2421][/dc-1] Sync is done: cookie# 1, size# 3, half# 1, successes# 2, faulires# 0, partial# 0 2025-05-29T15:28:59.268690Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:892: [main][1:7509890075213318431:2421][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7509890075213318434:2421], cookie# 1 2025-05-29T15:28:59.268691Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:906: [main][1:7509890075213318431:2421][/dc-1] Unexpected sync response: sender# [1:7509890075213318434:2421], cookie# 1 2025-05-29T15:28:59.270323Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:28:59.270350Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:28:59.272094Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-29T15:28:59.279828Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2589: HandleNotify: self# [1:7509890075213318044:2152], notify# NKikimr::TSchemeBoardEvents::TEvNotifyUpdate { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DescribeSchemeResult: Status: StatusSuccess Path: "/dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsL ... CACHE DEBUG: cache.cpp:2589: HandleNotify: self# [3:7509890077111746216:2126], notify# NKikimr::TSchemeBoardEvents::TEvNotifyDelete { Path: /dc-1/USER_0 PathId: [OwnerId: 72075186224037888, LocalPathId: 1] Strong: 1 } 2025-05-29T15:29:00.538506Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2464: ResolveCacheItem: self# [3:7509890077111746216:2126], notify# NKikimr::TSchemeBoardEvents::TEvNotifyDelete { Path: /dc-1/USER_0 PathId: [OwnerId: 72075186224037888, LocalPathId: 1] Strong: 1 }, by path# { Subscriber: { Subscriber: [3:7509890077111746890:2605] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 8 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: [OwnerId: 72057594046644480, LocalPathId: 2] DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, by pathId# { Subscriber: { Subscriber: [3:7509890077111747259:2893] DomainOwnerId: 72057594046644480 Type: 1 SyncCookie: 0 } Filled: 1 Status: StatusSuccess Kind: 8 TableKind: 0 Created: 1 CreateStep: 0 PathId: [OwnerId: 72075186224037888, LocalPathId: 1] DomainId: [OwnerId: 72057594046644480, LocalPathId: 2] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 } 2025-05-29T15:29:00.538510Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2487: ResolveCacheItemForNotify: subdomain case: self# [3:7509890077111746216:2126], path# /dc-1/USER_0, pathId# [OwnerId: 72075186224037888, LocalPathId: 1], byPath# { Subscriber: { Subscriber: [3:7509890077111746890:2605] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 8 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: [OwnerId: 72057594046644480, LocalPathId: 2] DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, byPathId# { Subscriber: { Subscriber: [3:7509890077111747259:2893] DomainOwnerId: 72057594046644480 Type: 1 SyncCookie: 0 } Filled: 1 Status: StatusSuccess Kind: 8 TableKind: 0 Created: 1 CreateStep: 0 PathId: [OwnerId: 72075186224037888, LocalPathId: 1] DomainId: [OwnerId: 72057594046644480, LocalPathId: 2] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 } 2025-05-29T15:29:00.538512Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2563: ResolveCacheItemForNotify: path has gone, update only by pathId: self# [3:7509890077111746216:2126], path# /dc-1/USER_0, pathId# [OwnerId: 72075186224037888, LocalPathId: 1] 2025-05-29T15:29:00.538958Z node 3 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1089: [3:7509890077111745881:2050] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 18446744073709551615 }: sender# [4:7509890077932852165:2282] 2025-05-29T15:29:00.538964Z node 3 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1089: [3:7509890077111745887:2056] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 18446744073709551615 }: sender# [4:7509890077932852167:2282] 2025-05-29T15:29:00.538970Z node 3 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1089: [3:7509890077111745884:2053] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 18446744073709551615 }: sender# [4:7509890077932852166:2282] 2025-05-29T15:29:00.539200Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:46: Free shard 72057594046644480:5 hive 72057594037968897 at ss 72057594046644480 2025-05-29T15:29:00.539209Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:46: Free shard 72057594046644480:2 hive 72057594037968897 at ss 72057594046644480 2025-05-29T15:29:00.539211Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:46: Free shard 72057594046644480:4 hive 72057594037968897 at ss 72057594046644480 2025-05-29T15:29:00.539212Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:46: Free shard 72057594046644480:1 hive 72057594037968897 at ss 72057594046644480 2025-05-29T15:29:00.539214Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:46: Free shard 72057594046644480:3 hive 72057594037968897 at ss 72057594046644480 2025-05-29T15:29:00.539234Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046644480, cookie: 281474976715662 2025-05-29T15:29:00.539240Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046644480, cookie: 281474976715662 2025-05-29T15:29:00.539250Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 281474976715662:0, at schemeshard: 72057594046644480 2025-05-29T15:29:00.539259Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:482: [72057594046644480] TDone opId# 281474976715662:0 ProgressState 2025-05-29T15:29:00.539275Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:906: Part operation is done id#281474976715662:0 progress is 1/1 2025-05-29T15:29:00.539280Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 281474976715662 ready parts: 1/1 2025-05-29T15:29:00.539283Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:906: Part operation is done id#281474976715662:0 progress is 1/1 2025-05-29T15:29:00.539284Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 281474976715662 ready parts: 1/1 2025-05-29T15:29:00.539287Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1606: TOperation IsReadyToNotify, TxId: 281474976715662, ready parts: 1/1, is published: true 2025-05-29T15:29:00.539297Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1629: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [3:7509890077111747177:2310] message: TxId: 281474976715662 2025-05-29T15:29:00.539305Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 281474976715662 ready parts: 1/1 2025-05-29T15:29:00.539313Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:973: Operation and all the parts is done, operation id: 281474976715662:0 2025-05-29T15:29:00.539314Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5174: RemoveTx for txid 281474976715662:0 2025-05-29T15:29:00.539361Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046644480, LocalPathId: 2] was 7 2025-05-29T15:29:00.540871Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5938: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 5 ShardOwnerId: 72057594046644480 ShardLocalIdx: 5, at schemeshard: 72057594046644480 2025-05-29T15:29:00.540945Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046644480, LocalPathId: 2] was 6 2025-05-29T15:29:00.540996Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5938: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 2 ShardOwnerId: 72057594046644480 ShardLocalIdx: 2, at schemeshard: 72057594046644480 2025-05-29T15:29:00.541023Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046644480, LocalPathId: 2] was 5 2025-05-29T15:29:00.541041Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5938: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 4 ShardOwnerId: 72057594046644480 ShardLocalIdx: 4, at schemeshard: 72057594046644480 2025-05-29T15:29:00.541058Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046644480, LocalPathId: 2] was 4 2025-05-29T15:29:00.541073Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5938: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 1 ShardOwnerId: 72057594046644480 ShardLocalIdx: 1, at schemeshard: 72057594046644480 2025-05-29T15:29:00.541089Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046644480, LocalPathId: 2] was 3 2025-05-29T15:29:00.541102Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5938: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 3 ShardOwnerId: 72057594046644480 ShardLocalIdx: 3, at schemeshard: 72057594046644480 2025-05-29T15:29:00.541118Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046644480, LocalPathId: 2] was 2 2025-05-29T15:29:00.541139Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:205: TTxCleanDroppedSubDomains Execute, 1 paths in candidate queue, at schemeshard: 72057594046644480 2025-05-29T15:29:00.541141Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:224: TTxCleanDroppedPaths: PersistRemoveSubDomain for PathId# [OwnerId: 72057594046644480, LocalPathId: 2], at schemeshard: 72057594046644480 2025-05-29T15:29:00.541171Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason for pathId [OwnerId: 72057594046644480, LocalPathId: 2] was 1 2025-05-29T15:29:00.541203Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:123: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046644480 2025-05-29T15:29:00.541205Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:137: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046644480, LocalPathId: 2], at schemeshard: 72057594046644480 2025-05-29T15:29:00.541218Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 1 2025-05-29T15:29:00.542058Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:174: Deleted shardIdx 72057594046644480:5 2025-05-29T15:29:00.542069Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:180: Close pipe to deleted shardIdx 72057594046644480:5 tabletId 72075186224037892 2025-05-29T15:29:00.542089Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:174: Deleted shardIdx 72057594046644480:2 2025-05-29T15:29:00.542090Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:180: Close pipe to deleted shardIdx 72057594046644480:2 tabletId 72075186224037889 2025-05-29T15:29:00.542094Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:174: Deleted shardIdx 72057594046644480:4 2025-05-29T15:29:00.542096Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:180: Close pipe to deleted shardIdx 72057594046644480:4 tabletId 72075186224037891 2025-05-29T15:29:00.542099Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:174: Deleted shardIdx 72057594046644480:1 2025-05-29T15:29:00.542101Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:180: Close pipe to deleted shardIdx 72057594046644480:1 tabletId 72075186224037888 2025-05-29T15:29:00.542104Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:174: Deleted shardIdx 72057594046644480:3 2025-05-29T15:29:00.542106Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:180: Close pipe to deleted shardIdx 72057594046644480:3 tabletId 72075186224037890 2025-05-29T15:29:00.542113Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:245: TTxCleanDroppedSubDomains Complete, done PersistRemoveSubDomain for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046644480 2025-05-29T15:29:00.542118Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:160: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046644480 >> KqpOlapScheme::AddColumnWithTtl >> TExtSubDomainTest::CreateTableInsideAndLs-AlterDatabaseCreateHiveFirst-true [GOOD] >> TDSProxyFaultTolerancePatchTest::mirror3dc [GOOD] >> TDSProxyPatchTest::SecuredErrorOnGet_ErasureNone |71.8%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/fq/libs/row_dispatcher/format_handler/ut/ydb-core-fq-libs-row_dispatcher-format_handler-ut |71.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/fq/libs/row_dispatcher/format_handler/ut/ydb-core-fq-libs-row_dispatcher-format_handler-ut |71.8%| [LD] {RESULT} $(B)/ydb/core/fq/libs/row_dispatcher/format_handler/ut/ydb-core-fq-libs-row_dispatcher-format_handler-ut >> BackupRestoreS3::TestAllSchemeObjectTypes-EPathTypeView [GOOD] >> BackupRestoreS3::TestAllSchemeObjectTypes-EPathTypeResourcePool [GOOD] >> BackupRestoreS3::TestAllSchemeObjectTypes-EPathTypeTransfer [GOOD] >> BackupRestoreS3::TestAllSchemeObjectTypes-EPathTypeSysView [GOOD] >> EncryptedBackupParamsValidationTest::BadSourcePath >> TDSProxyPatchTest::SecuredErrorOnGet_ErasureNone [GOOD] >> TDSProxyPatchTest::MovedOk_Erasure4Plus2Block >> BackupRestore::BackupUuid [FAIL] >> TDataShardLocksTest::UseLocksCache [FAIL] >> BackupRestore::RestoreViewQueryText ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/tx_proxy/ut_ext_tenant/unittest >> TExtSubDomainTest::CreateTableInsideAndLs-AlterDatabaseCreateHiveFirst-true [GOOD] Test command err: 2025-05-29T15:28:59.903514Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509890072203219250:2078];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:28:59.903552Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/002454/r3tmp/tmp65iq5u/pdisk_1.dat 2025-05-29T15:29:00.002695Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:29:00.005210Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:29:00.005237Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:29:00.011867Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:1864 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 2025-05-29T15:29:00.042358Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:403: actor# [1:7509890072203219467:2139] Handle TEvNavigate describe path dc-1 2025-05-29T15:29:00.043733Z node 1 :TX_PROXY DEBUG: describe.cpp:272: Actor# [1:7509890076498187186:2429] HANDLE EvNavigateScheme dc-1 2025-05-29T15:29:00.043799Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2702: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [1:7509890072203219491:2153], request# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-05-29T15:29:00.043819Z node 1 :TX_PROXY_SCHEME_CACHE TRACE: cache.cpp:2322: Create subscriber: self# [1:7509890072203219491:2153], path# /dc-1, domainOwnerId# 72057594046644480 2025-05-29T15:29:00.043877Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:960: [main][1:7509890076498187187:2430][/dc-1] Handle NKikimr::TEvStateStorage::TEvResolveReplicasList 2025-05-29T15:29:00.044245Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1061: [1:7509890072203219111:2050] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1 DomainOwnerId: 72057594046644480 }: sender# [1:7509890076498187191:2430] 2025-05-29T15:29:00.044266Z node 1 :SCHEME_BOARD_REPLICA INFO: replica.cpp:646: [1:7509890072203219111:2050] Subscribe: subscriber# [1:7509890076498187191:2430], path# /dc-1, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2025-05-29T15:29:00.044284Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1061: [1:7509890072203219114:2053] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1 DomainOwnerId: 72057594046644480 }: sender# [1:7509890076498187192:2430] 2025-05-29T15:29:00.044289Z node 1 :SCHEME_BOARD_REPLICA INFO: replica.cpp:646: [1:7509890072203219114:2053] Subscribe: subscriber# [1:7509890076498187192:2430], path# /dc-1, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2025-05-29T15:29:00.044294Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1061: [1:7509890072203219117:2056] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1 DomainOwnerId: 72057594046644480 }: sender# [1:7509890076498187193:2430] 2025-05-29T15:29:00.044296Z node 1 :SCHEME_BOARD_REPLICA INFO: replica.cpp:646: [1:7509890072203219117:2056] Subscribe: subscriber# [1:7509890076498187193:2430], path# /dc-1, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2025-05-29T15:29:00.044305Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:365: [replica][1:7509890076498187191:2430][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7509890072203219111:2050] 2025-05-29T15:29:00.044316Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:365: [replica][1:7509890076498187192:2430][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7509890072203219114:2053] 2025-05-29T15:29:00.044321Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:365: [replica][1:7509890076498187193:2430][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7509890072203219117:2056] 2025-05-29T15:29:00.044328Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:807: [main][1:7509890076498187187:2430][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7509890076498187188:2430] 2025-05-29T15:29:00.044338Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:807: [main][1:7509890076498187187:2430][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7509890076498187189:2430] 2025-05-29T15:29:00.044352Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: subscriber.cpp:836: [main][1:7509890076498187187:2430][/dc-1] Set up state: owner# [1:7509890072203219491:2153], state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 2) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements } 2025-05-29T15:29:00.044380Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:807: [main][1:7509890076498187187:2430][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7509890076498187190:2430] 2025-05-29T15:29:00.044388Z node 1 :SCHEME_BOARD_SUBSCRIBER INFO: subscriber.cpp:854: [main][1:7509890076498187187:2430][/dc-1] Path was already updated: owner# [1:7509890072203219491:2153], state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 2) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 2) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements } 2025-05-29T15:29:00.044394Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:381: [replica][1:7509890076498187191:2430][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7509890076498187188:2430], cookie# 1 2025-05-29T15:29:00.044405Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:381: [replica][1:7509890076498187192:2430][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7509890076498187189:2430], cookie# 1 2025-05-29T15:29:00.044410Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:381: [replica][1:7509890076498187193:2430][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7509890076498187190:2430], cookie# 1 2025-05-29T15:29:00.044414Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1089: [1:7509890072203219111:2050] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 2 }: sender# [1:7509890076498187191:2430] 2025-05-29T15:29:00.044417Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1128: [1:7509890072203219111:2050] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7509890076498187191:2430], cookie# 1 2025-05-29T15:29:00.044420Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1089: [1:7509890072203219114:2053] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 2 }: sender# [1:7509890076498187192:2430] 2025-05-29T15:29:00.044422Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1128: [1:7509890072203219114:2053] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7509890076498187192:2430], cookie# 1 2025-05-29T15:29:00.044425Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1089: [1:7509890072203219117:2056] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 2 }: sender# [1:7509890076498187193:2430] 2025-05-29T15:29:00.044431Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1128: [1:7509890072203219117:2056] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7509890076498187193:2430], cookie# 1 2025-05-29T15:29:00.051474Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:390: [replica][1:7509890076498187191:2430][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7509890072203219111:2050], cookie# 1 2025-05-29T15:29:00.051492Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:390: [replica][1:7509890076498187192:2430][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7509890072203219114:2053], cookie# 1 2025-05-29T15:29:00.051495Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:390: [replica][1:7509890076498187193:2430][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7509890072203219117:2056], cookie# 1 2025-05-29T15:29:00.051504Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:892: [main][1:7509890076498187187:2430][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7509890076498187188:2430], cookie# 1 2025-05-29T15:29:00.051513Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:932: [main][1:7509890076498187187:2430][/dc-1] Sync is in progress: cookie# 1, size# 3, half# 1, successes# 1, faulires# 0 2025-05-29T15:29:00.051518Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:892: [main][1:7509890076498187187:2430][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7509890076498187189:2430], cookie# 1 2025-05-29T15:29:00.051523Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:946: [main][1:7509890076498187187:2430][/dc-1] Sync is done: cookie# 1, size# 3, half# 1, successes# 2, faulires# 0, partial# 0 2025-05-29T15:29:00.051527Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:892: [main][1:7509890076498187187:2430][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7509890076498187190:2430], cookie# 1 2025-05-29T15:29:00.051530Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:906: [main][1:7509890076498187187:2430][/dc-1] Unexpected sync response: sender# [1:7509890076498187190:2430], cookie# 1 2025-05-29T15:29:00.051616Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:213: actor# [1:7509890072203219467:2139] HANDLE TEvClientConnected success connect from tablet# 72057594046447617 2025-05-29T15:29:00.053352Z node 1 :TX_PROXY DEBUG: client.cpp:89: Handle TEvAllocateResult ACCEPTED RangeBegin# 281474976715656 RangeEnd# 281474976720656 txAllocator# 72057594046447617 2025-05-29T15:29:00.058501Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2589: HandleNotify: self# [1:7509890072203219491:2153], notify# NKikimr::TSchemeBoardEvents::TEvNotifyUpdate { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DescribeSchemeResult: Status: StatusSuccess Path: "/dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 7205759404638208 ... 046644480, LocalPathId: 2] was 4 2025-05-29T15:29:01.266114Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5938: Free tablet reply, message: Status: INVALID_OWNER Origin: 72075186224037888 TxId_Deprecated: 1 ShardOwnerId: 72057594046644480 ShardLocalIdx: 1 ForwardRequest { HiveTabletId: 72057594037968897 }, at schemeshard: 72057594046644480 2025-05-29T15:29:01.266118Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__delete_tablet_reply.cpp:39: Got DeleteTabletReply with Forward response from Hive 72075186224037888 to Hive 72057594037968897 shardIdx 72057594046644480:1 2025-05-29T15:29:01.266120Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_shard_deleter.cpp:100: Redirecting tablet deletion requests from 72075186224037888 to 72057594037968897 2025-05-29T15:29:01.266125Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_shard_deleter.cpp:74: Resending tablet deletion request from 72057594046644480 to 72057594037968897 2025-05-29T15:29:01.266133Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:46: Free shard 72057594046644480:1 hive 72057594037968897 at ss 72057594046644480 2025-05-29T15:29:01.266143Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5938: Free tablet reply, message: Status: OK Origin: 72075186224037888 TxId_Deprecated: 3 ShardOwnerId: 72057594046644480 ShardLocalIdx: 3, at schemeshard: 72057594046644480 2025-05-29T15:29:01.266160Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046644480, LocalPathId: 2] was 3 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 1748532540979 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 7 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 7 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 5 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } StoragePo... (TRUNCATED) 2025-05-29T15:29:01.267614Z node 4 :HIVE WARN: hive_impl.cpp:491: HIVE#72075186224037888 Handle TEvLocal::TEvTabletStatus from node 4, TabletId: 72075186224037890 not found 2025-05-29T15:29:01.267626Z node 4 :HIVE WARN: hive_impl.cpp:491: HIVE#72075186224037888 Handle TEvLocal::TEvTabletStatus from node 4, TabletId: 72075186224037889 not found 2025-05-29T15:29:01.267629Z node 4 :HIVE WARN: hive_impl.cpp:491: HIVE#72075186224037888 Handle TEvLocal::TEvTabletStatus from node 4, TabletId: 72075186224037891 not found 2025-05-29T15:29:01.267632Z node 4 :HIVE WARN: hive_impl.cpp:491: HIVE#72075186224037888 Handle TEvLocal::TEvTabletStatus from node 4, TabletId: 72075186224037892 not found 2025-05-29T15:29:01.267635Z node 4 :HIVE WARN: hive_impl.cpp:491: HIVE#72075186224037888 Handle TEvLocal::TEvTabletStatus from node 4, TabletId: 72075186224037893 not found 2025-05-29T15:29:01.271210Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:174: Deleted shardIdx 72057594046644480:5 2025-05-29T15:29:01.271224Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:180: Close pipe to deleted shardIdx 72057594046644480:5 tabletId 72075186224037892 2025-05-29T15:29:01.271237Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:174: Deleted shardIdx 72057594046644480:2 2025-05-29T15:29:01.271238Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:180: Close pipe to deleted shardIdx 72057594046644480:2 tabletId 72075186224037893 2025-05-29T15:29:01.271242Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:174: Deleted shardIdx 72057594046644480:4 2025-05-29T15:29:01.271244Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:180: Close pipe to deleted shardIdx 72057594046644480:4 tabletId 72075186224037890 2025-05-29T15:29:01.271248Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:174: Deleted shardIdx 72057594046644480:6 2025-05-29T15:29:01.271248Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:180: Close pipe to deleted shardIdx 72057594046644480:6 tabletId 72075186224037889 2025-05-29T15:29:01.271266Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:174: Deleted shardIdx 72057594046644480:3 2025-05-29T15:29:01.271269Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:180: Close pipe to deleted shardIdx 72057594046644480:3 tabletId 72075186224037891 2025-05-29T15:29:01.271292Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5938: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 1 ShardOwnerId: 72057594046644480 ShardLocalIdx: 1, at schemeshard: 72057594046644480 2025-05-29T15:29:01.271367Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046644480, LocalPathId: 2] was 2 2025-05-29T15:29:01.271420Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:205: TTxCleanDroppedSubDomains Execute, 1 paths in candidate queue, at schemeshard: 72057594046644480 2025-05-29T15:29:01.271422Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:224: TTxCleanDroppedPaths: PersistRemoveSubDomain for PathId# [OwnerId: 72057594046644480, LocalPathId: 2], at schemeshard: 72057594046644480 2025-05-29T15:29:01.271449Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason for pathId [OwnerId: 72057594046644480, LocalPathId: 2] was 1 2025-05-29T15:29:01.271473Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:123: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046644480 2025-05-29T15:29:01.271475Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:137: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046644480, LocalPathId: 2], at schemeshard: 72057594046644480 2025-05-29T15:29:01.271485Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 1 2025-05-29T15:29:01.272460Z node 3 :HIVE WARN: hive_impl.cpp:491: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 4, TabletId: 72075186224037888 not found 2025-05-29T15:29:01.273191Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:174: Deleted shardIdx 72057594046644480:1 2025-05-29T15:29:01.273204Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:180: Close pipe to deleted shardIdx 72057594046644480:1 tabletId 72075186224037888 2025-05-29T15:29:01.273222Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:245: TTxCleanDroppedSubDomains Complete, done PersistRemoveSubDomain for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046644480 2025-05-29T15:29:01.273229Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:160: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046644480 2025-05-29T15:29:01.276313Z node 3 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1075: [3:7509890078469237079:2050] Handle NKikimrSchemeBoard.TEvUnsubscribe { Path: /dc-1/USER_0 }: sender# [4:7509890075706068787:2102] 2025-05-29T15:29:01.276326Z node 3 :SCHEME_BOARD_REPLICA INFO: replica.cpp:662: [3:7509890078469237079:2050] Unsubscribe: subscriber# [4:7509890075706068787:2102], path# /dc-1/USER_0 2025-05-29T15:29:01.276331Z node 3 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1075: [3:7509890078469237082:2053] Handle NKikimrSchemeBoard.TEvUnsubscribe { Path: /dc-1/USER_0 }: sender# [4:7509890075706068788:2102] 2025-05-29T15:29:01.276333Z node 3 :SCHEME_BOARD_REPLICA INFO: replica.cpp:662: [3:7509890078469237082:2053] Unsubscribe: subscriber# [4:7509890075706068788:2102], path# /dc-1/USER_0 2025-05-29T15:29:01.276336Z node 3 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1075: [3:7509890078469237085:2056] Handle NKikimrSchemeBoard.TEvUnsubscribe { Path: /dc-1/USER_0 }: sender# [4:7509890075706068789:2102] 2025-05-29T15:29:01.276338Z node 3 :SCHEME_BOARD_REPLICA INFO: replica.cpp:662: [3:7509890078469237085:2056] Unsubscribe: subscriber# [4:7509890075706068789:2102], path# /dc-1/USER_0 2025-05-29T15:29:01.276382Z node 3 :HIVE WARN: tx__status.cpp:57: HIVE#72057594037968897 THive::TTxStatus(status=2 node=Connected) - killing node 4 2025-05-29T15:29:01.276414Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connected -> Disconnected 2025-05-29T15:29:01.357545Z node 4 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2702: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [4:7509890075706068790:2103], request# { ErrorCount: 0 DatabaseName: /dc-1/USER_0 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_0/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-05-29T15:29:01.357606Z node 4 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:265: Send result: self# [4:7509890080001036882:2482], recipient# [4:7509890080001036877:2350], result# { ErrorCount: 1 DatabaseName: /dc-1/USER_0 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_0/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2025-05-29T15:29:01.357894Z node 4 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2702: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [4:7509890075706068790:2103], request# { ErrorCount: 0 DatabaseName: /dc-1/USER_0 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_0/.metadata/workload_manager/delayed_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo },{ Path: dc-1/USER_0/.metadata/workload_manager/running_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-05-29T15:29:01.357922Z node 4 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:265: Send result: self# [4:7509890080001036887:2483], recipient# [4:7509890080001036886:2356], result# { ErrorCount: 2 DatabaseName: /dc-1/USER_0 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_0/.metadata/workload_manager/delayed_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo },{ Path: dc-1/USER_0/.metadata/workload_manager/running_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } >> BackupRestore::TestAllPrimitiveTypes-UINT8 [FAIL] >> BackupRestore::TestAllPrimitiveTypes-UINT16 >> KqpScheme::SchemaVersionMissmatchWithWrite >> TDSProxyPatchTest::MovedOk_Erasure4Plus2Block [GOOD] >> BackupRestoreS3::TestAllPrimitiveTypes-UTF8 [FAIL] >> TDSProxyPutTest::TestMirror3dcPutStatusOkWith_1_0_0_VdiskErrors >> BackupRestoreS3::TestAllPrimitiveTypes-YSON >> KqpScheme::AddColumnFamilyWithCompressionLevel >> KqpScheme::CreateDroppedTable >> KqpScheme::InvalidationAfterDropCreate >> KqpScheme::DropChangefeedNegative >> TDSProxyPutTest::TestMirror3dcPutStatusOkWith_1_0_0_VdiskErrors [GOOD] >> KqpScheme::DropKeyColumn >> KqpScheme::CreateFamilyWithCompressionLevel >> EncryptedBackupParamsValidationTest::BadSourcePath [GOOD] >> KqpScheme::DisableExternalDataSourcesOnServerless [GOOD] >> KqpScheme::DisableResourcePools >> BsControllerConfig::MoveGroups [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/dsproxy/ut/unittest >> TDSProxyPutTest::TestMirror3dcPutStatusOkWith_1_0_0_VdiskErrors [GOOD] Test command err: 2025-05-29T15:29:02.234057Z node 11 :BS_PROXY_PUT INFO: dsproxy_put.cpp:645: [7e4afa7ea38a37be] bootstrap ActorId# [11:82:2128] Group# 0 BlobCount# 1 BlobIDs# [[72075186224047637:1:863:1:24576:786:0]] HandleClass# TabletLog Tactic# Default RestartCounter# 0 Marker# BPP13 2025-05-29T15:29:02.234172Z node 11 :BS_PROXY_PUT DEBUG: dsproxy_strategy_base.cpp:324: [7e4afa7ea38a37be] partPlacement record partSituation# ESituation::Unknown to# 0 blob Id# [72075186224047637:1:863:1:24576:786:1] Marker# BPG33 2025-05-29T15:29:02.234180Z node 11 :BS_PROXY_PUT DEBUG: dsproxy_strategy_base.cpp:345: [7e4afa7ea38a37be] Sending missing VPut part# 0 to# 0 blob Id# [72075186224047637:1:863:1:24576:786:1] Marker# BPG32 2025-05-29T15:29:02.234187Z node 11 :BS_PROXY_PUT DEBUG: dsproxy_strategy_base.cpp:324: [7e4afa7ea38a37be] partPlacement record partSituation# ESituation::Unknown to# 1 blob Id# [72075186224047637:1:863:1:24576:786:2] Marker# BPG33 2025-05-29T15:29:02.234192Z node 11 :BS_PROXY_PUT DEBUG: dsproxy_strategy_base.cpp:345: [7e4afa7ea38a37be] Sending missing VPut part# 1 to# 1 blob Id# [72075186224047637:1:863:1:24576:786:2] Marker# BPG32 2025-05-29T15:29:02.234197Z node 11 :BS_PROXY_PUT DEBUG: dsproxy_strategy_base.cpp:324: [7e4afa7ea38a37be] partPlacement record partSituation# ESituation::Unknown to# 2 blob Id# [72075186224047637:1:863:1:24576:786:3] Marker# BPG33 2025-05-29T15:29:02.234202Z node 11 :BS_PROXY_PUT DEBUG: dsproxy_strategy_base.cpp:345: [7e4afa7ea38a37be] Sending missing VPut part# 2 to# 2 blob Id# [72075186224047637:1:863:1:24576:786:3] Marker# BPG32 2025-05-29T15:29:02.238262Z node 11 :BS_PROXY_PUT INFO: dsproxy_put.cpp:260: [7e4afa7ea38a37be] received {EvVPutResult Status# ERROR ID# [72075186224047637:1:863:1:24576:786:2] {MsgQoS MsgId# { SequenceId: 1 MsgId: 0 }}} from# [0:1:0:1:0] Marker# BPP01 2025-05-29T15:29:02.238317Z node 11 :BS_PROXY_PUT DEBUG: dsproxy_strategy_base.cpp:324: [7e4afa7ea38a37be] partPlacement record partSituation# ESituation::Unknown to# 4 blob Id# [72075186224047637:1:863:1:24576:786:2] Marker# BPG33 2025-05-29T15:29:02.238324Z node 11 :BS_PROXY_PUT DEBUG: dsproxy_strategy_base.cpp:345: [7e4afa7ea38a37be] Sending missing VPut part# 1 to# 4 blob Id# [72075186224047637:1:863:1:24576:786:2] Marker# BPG32 2025-05-29T15:29:02.238393Z node 11 :BS_PROXY_PUT DEBUG: dsproxy_put.cpp:260: [7e4afa7ea38a37be] received {EvVPutResult Status# OK ID# [72075186224047637:1:863:1:24576:786:3] {MsgQoS MsgId# { SequenceId: 1 MsgId: 0 }}} from# [0:1:1:1:0] Marker# BPP01 2025-05-29T15:29:02.238412Z node 11 :BS_PROXY_PUT DEBUG: dsproxy_put.cpp:260: [7e4afa7ea38a37be] received {EvVPutResult Status# OK ID# [72075186224047637:1:863:1:24576:786:1] {MsgQoS MsgId# { SequenceId: 1 MsgId: 0 }}} from# [0:1:2:1:0] Marker# BPP01 2025-05-29T15:29:02.238455Z node 11 :BS_PROXY_PUT DEBUG: dsproxy_put.cpp:260: [7e4afa7ea38a37be] received {EvVPutResult Status# OK ID# [72075186224047637:1:863:1:24576:786:2] {MsgQoS MsgId# { SequenceId: 1 MsgId: 0 }}} from# [0:1:0:2:0] Marker# BPP01 2025-05-29T15:29:02.238484Z node 11 :BS_PROXY_PUT DEBUG: dsproxy_put_impl.cpp:72: [7e4afa7ea38a37be] Result# TEvPutResult {Id# [72075186224047637:1:863:1:24576:786:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0} GroupId# 0 Marker# BPP12 2025-05-29T15:29:02.238494Z node 11 :BS_PROXY_PUT INFO: dsproxy_put.cpp:486: [7e4afa7ea38a37be] SendReply putResult# TEvPutResult {Id# [72075186224047637:1:863:1:24576:786:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0} ResponsesSent# 0 PutImpl.Blobs.size# 1 Last# true Marker# BPP21 2025-05-29T15:29:02.238537Z node 11 :BS_PROXY_PUT NOTICE: {BPP72@dsproxy_put.cpp:470} Query history GroupId# 0 HandleClass# TabletLog Tactic# Default History# THistory { Entries# [ TEvVPut{ TimestampMs# 0.416 sample PartId# [72075186224047637:1:863:1:24576:786:3] QueryCount# 1 VDiskId# [0:1:1:1:0] NodeId# 11 } TEvVPut{ TimestampMs# 0.417 sample PartId# [72075186224047637:1:863:1:24576:786:1] QueryCount# 1 VDiskId# [0:1:2:1:0] NodeId# 11 } TEvVPut{ TimestampMs# 0.417 sample PartId# [72075186224047637:1:863:1:24576:786:2] QueryCount# 1 VDiskId# [0:1:0:1:0] NodeId# 11 } TEvVPutResult{ TimestampMs# 4.48 VDiskId# [0:1:0:1:0] NodeId# 11 Status# ERROR } TEvVPut{ TimestampMs# 4.531 sample PartId# [72075186224047637:1:863:1:24576:786:2] QueryCount# 1 VDiskId# [0:1:0:2:0] NodeId# 11 } TEvVPutResult{ TimestampMs# 4.582 VDiskId# [0:1:1:1:0] NodeId# 11 Status# OK } TEvVPutResult{ TimestampMs# 4.6 VDiskId# [0:1:2:1:0] NodeId# 11 Status# OK } TEvVPutResult{ TimestampMs# 4.644 VDiskId# [0:1:0:2:0] NodeId# 11 Status# OK } ] } ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/mind/bscontroller/ut_bscontroller/unittest >> BsControllerConfig::MoveGroups [GOOD] Test command err: Leader for TabletID 72057594037932033 is [0:0:0] sender: [1:3015:2106] recipient: [1:2886:2116] IGNORE Leader for TabletID 72057594037932033 is [0:0:0] sender: [1:3015:2106] recipient: [1:2886:2116] Leader for TabletID 72057594037932033 is [1:3062:2118] sender: [1:3065:2106] recipient: [1:2886:2116] 2025-05-29T15:28:28.921628Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2057} StateInit event Type# 268828672 Event# NKikimr::TEvTablet::TEvBoot 2025-05-29T15:28:28.922376Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2057} StateInit event Type# 268828673 Event# NKikimr::TEvTablet::TEvRestored 2025-05-29T15:28:28.922435Z node 1 :BS_CONTROLLER DEBUG: {BSC22@console_interaction.cpp:14} Console interaction started 2025-05-29T15:28:28.922550Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2057} StateInit event Type# 268828684 Event# NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-05-29T15:28:28.923003Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2057} StateInit event Type# 268639244 Event# NKikimr::TEvNodeWardenStorageConfig 2025-05-29T15:28:28.923088Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2057} StateInit event Type# 131082 Event# NActors::TEvInterconnect::TEvNodesInfo 2025-05-29T15:28:28.923093Z node 1 :BS_CONTROLLER DEBUG: {BSC01@bsc.cpp:521} Handle TEvInterconnect::TEvNodesInfo 2025-05-29T15:28:28.923144Z node 1 :BS_CONTROLLER DEBUG: {BSCTXIS01@init_scheme.cpp:17} TTxInitScheme Execute 2025-05-29T15:28:28.923966Z node 1 :BS_CONTROLLER DEBUG: {BSCTXIS03@init_scheme.cpp:44} TTxInitScheme Complete 2025-05-29T15:28:28.923989Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM01@migrate.cpp:190} Execute tx 2025-05-29T15:28:28.924014Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM02@migrate.cpp:251} Complete tx IncompatibleData# false 2025-05-29T15:28:28.924034Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxTrimUnusedSlots 2025-05-29T15:28:28.924044Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxTrimUnusedSlots 2025-05-29T15:28:28.924052Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateSchemaVersion Leader for TabletID 72057594037932033 is [1:3062:2118] sender: [1:3088:2106] recipient: [1:60:2107] 2025-05-29T15:28:28.934521Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateSchemaVersion 2025-05-29T15:28:28.934562Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxGenerateInstanceId 2025-05-29T15:28:28.944901Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxGenerateInstanceId 2025-05-29T15:28:28.944960Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateStaticPDiskInfo 2025-05-29T15:28:28.944989Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateStaticPDiskInfo 2025-05-29T15:28:28.945004Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxFillInNonNullConfigForPDisk 2025-05-29T15:28:28.945030Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxFillInNonNullConfigForPDisk 2025-05-29T15:28:28.945040Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxDropDriveStatus 2025-05-29T15:28:28.945046Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxDropDriveStatus 2025-05-29T15:28:28.945056Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateCompatibilityInfo 2025-05-29T15:28:28.955377Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateCompatibilityInfo 2025-05-29T15:28:28.955429Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateEnableConfigV2 2025-05-29T15:28:28.965728Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateEnableConfigV2 2025-05-29T15:28:28.965790Z node 1 :BS_CONTROLLER DEBUG: {BSCTXLE01@load_everything.cpp:19} TTxLoadEverything Execute 2025-05-29T15:28:28.965970Z node 1 :BS_CONTROLLER DEBUG: {BSCTXLE03@load_everything.cpp:557} TTxLoadEverything Complete 2025-05-29T15:28:28.965975Z node 1 :BS_CONTROLLER DEBUG: {BSC09@impl.h:2188} LoadFinished 2025-05-29T15:28:28.966009Z node 1 :BS_CONTROLLER DEBUG: {BSC18@console_interaction.cpp:31} Console connection service started 2025-05-29T15:28:28.966027Z node 1 :BS_CONTROLLER DEBUG: {BSCTXLE04@load_everything.cpp:562} TTxLoadEverything InitQueue processed 2025-05-29T15:28:28.967865Z node 1 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:398} Execute TEvControllerConfigRequest Request# {Command { DefineHostConfig { HostConfigId: 1 Drive { Path: "/dev/disk1" } Drive { Path: "/dev/disk2" SharedWithOs: true } Drive { Path: "/dev/disk3" Type: SSD } } } Command { DefineBox { BoxId: 1 Name: "first box" Host { Key { Fqdn: "::1" IcPort: 12001 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12002 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12003 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12004 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12005 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12006 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12007 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12008 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12009 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12010 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12011 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12012 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12013 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12014 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12015 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12016 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12017 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12018 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12019 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12020 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12021 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12022 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12023 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12024 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12025 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12026 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12027 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12028 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12029 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12030 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12031 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12032 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12033 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12034 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12035 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12036 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12037 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12038 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12039 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12040 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12041 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12042 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12043 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12044 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12045 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12046 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12047 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12048 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12049 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12050 } HostConfigId: 1 } } } Command { DefineStoragePool { BoxId: 1 StoragePoolId: 1 Name: "first storage pool" ErasureSpecies: "block-4-2" VDiskKind: "Default" NumGroups: 150 PDiskFilter { Property { Type: ROT } } } } } 2025-05-29T15:28:28.968107Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:340} Create new pdisk PDiskId# 1:1000 Path# /dev/disk1 2025-05-29T15:28:28.968114Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:340} Create new pdisk PDiskId# 1:1001 Path# /dev/disk2 2025-05-29T15:28:28.968117Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:340} Create new pdisk PDiskId# 1:1002 Path# /dev/disk3 2025-05-29T15:28:28.968120Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:340} Create new pdisk PDiskId# 2:1000 Path# /dev/disk1 2025-05-29T15:28:28.968123Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:340} Create new pdisk PDiskId# 2:1001 Path# /dev/disk2 2025-05-29T15:28:28.968127Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:340} Create new pdisk PDiskId# 2:1002 Path# /dev/disk3 2025-05-29T15:28:28.968130Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:340} Create new pdisk PDiskId# 3:1000 Path# /dev/disk1 2025-05-29T15:28:28.968134Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:340} Create new pdisk PDiskId# 3:1001 Path# /dev/disk2 2025-05-29T15:28:28.968137Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:340} Create new pdisk PDiskId# 3:1002 Path# /dev/disk3 2025-05-29T15:28:28.968140Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:340} Create new pdisk PDiskId# 4:1000 Path# /dev/disk1 2025-05-29T15:28:28.968143Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:340} Create new pdisk PDiskId# 4:1001 Path# /dev/disk2 2025-05-29T15:28:28.968146Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:340} Create new pdisk PDiskId# 4:1002 Path# /dev/disk3 2025-05-29T15:28:28.968150Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:340} Create new pdisk PDiskId# 5:1000 Path# /dev/disk1 2025-05-29T15:28:28.968154Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:340} Create new pdisk PDiskId# 5:1001 Path# /dev/disk2 2025-05-29T15:28:28.968159Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:340} Create new pdisk PDiskId# 5:1002 Path# /dev/disk3 2025-05-29T15:28:28.968163Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:340} Create new pdisk PDiskId# 6:1000 Path# /dev/disk1 2025-05-29T15:28:28.968171Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:340} Create new pdisk PDiskId# 6:1001 Path# /dev/disk2 2025-05-29T15:28:28.968176Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:340} Create new pdisk PDiskId# 6:1002 Path# /dev/disk3 2025-05-29T15:28:28.968180Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:340} Create new pdisk PDiskId# 7:1000 Path# /dev/disk1 2025-05-29T15:28:28.968185Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:340} Create new pdisk PDiskId# 7:1001 Path# /dev/disk2 2025-05-29T15:28:28.968190Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:3 ... sks.cpp:340} Create new pdisk PDiskId# 177:1001 Path# /dev/disk2 2025-05-29T15:28:53.970213Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:340} Create new pdisk PDiskId# 177:1002 Path# /dev/disk3 2025-05-29T15:28:53.970218Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:340} Create new pdisk PDiskId# 178:1000 Path# /dev/disk1 2025-05-29T15:28:53.970222Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:340} Create new pdisk PDiskId# 178:1001 Path# /dev/disk2 2025-05-29T15:28:53.970227Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:340} Create new pdisk PDiskId# 178:1002 Path# /dev/disk3 2025-05-29T15:28:53.970233Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:340} Create new pdisk PDiskId# 179:1000 Path# /dev/disk1 2025-05-29T15:28:53.970238Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:340} Create new pdisk PDiskId# 179:1001 Path# /dev/disk2 2025-05-29T15:28:53.970243Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:340} Create new pdisk PDiskId# 179:1002 Path# /dev/disk3 2025-05-29T15:28:53.970248Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:340} Create new pdisk PDiskId# 180:1000 Path# /dev/disk1 2025-05-29T15:28:53.970253Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:340} Create new pdisk PDiskId# 180:1001 Path# /dev/disk2 2025-05-29T15:28:53.970257Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:340} Create new pdisk PDiskId# 180:1002 Path# /dev/disk3 2025-05-29T15:28:53.970262Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:340} Create new pdisk PDiskId# 181:1000 Path# /dev/disk1 2025-05-29T15:28:53.970266Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:340} Create new pdisk PDiskId# 181:1001 Path# /dev/disk2 2025-05-29T15:28:53.970271Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:340} Create new pdisk PDiskId# 181:1002 Path# /dev/disk3 2025-05-29T15:28:53.970275Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:340} Create new pdisk PDiskId# 182:1000 Path# /dev/disk1 2025-05-29T15:28:53.970282Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:340} Create new pdisk PDiskId# 182:1001 Path# /dev/disk2 2025-05-29T15:28:53.970287Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:340} Create new pdisk PDiskId# 182:1002 Path# /dev/disk3 2025-05-29T15:28:53.970292Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:340} Create new pdisk PDiskId# 183:1000 Path# /dev/disk1 2025-05-29T15:28:53.970297Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:340} Create new pdisk PDiskId# 183:1001 Path# /dev/disk2 2025-05-29T15:28:53.970302Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:340} Create new pdisk PDiskId# 183:1002 Path# /dev/disk3 2025-05-29T15:28:53.970307Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:340} Create new pdisk PDiskId# 184:1000 Path# /dev/disk1 2025-05-29T15:28:53.970312Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:340} Create new pdisk PDiskId# 184:1001 Path# /dev/disk2 2025-05-29T15:28:53.970316Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:340} Create new pdisk PDiskId# 184:1002 Path# /dev/disk3 2025-05-29T15:28:53.970321Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:340} Create new pdisk PDiskId# 185:1000 Path# /dev/disk1 2025-05-29T15:28:53.970326Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:340} Create new pdisk PDiskId# 185:1001 Path# /dev/disk2 2025-05-29T15:28:53.970331Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:340} Create new pdisk PDiskId# 185:1002 Path# /dev/disk3 2025-05-29T15:28:53.970336Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:340} Create new pdisk PDiskId# 186:1000 Path# /dev/disk1 2025-05-29T15:28:53.970340Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:340} Create new pdisk PDiskId# 186:1001 Path# /dev/disk2 2025-05-29T15:28:53.970346Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:340} Create new pdisk PDiskId# 186:1002 Path# /dev/disk3 2025-05-29T15:28:53.970350Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:340} Create new pdisk PDiskId# 187:1000 Path# /dev/disk1 2025-05-29T15:28:53.970355Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:340} Create new pdisk PDiskId# 187:1001 Path# /dev/disk2 2025-05-29T15:28:53.970360Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:340} Create new pdisk PDiskId# 187:1002 Path# /dev/disk3 2025-05-29T15:28:53.970367Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:340} Create new pdisk PDiskId# 188:1000 Path# /dev/disk1 2025-05-29T15:28:53.970373Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:340} Create new pdisk PDiskId# 188:1001 Path# /dev/disk2 2025-05-29T15:28:53.970378Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:340} Create new pdisk PDiskId# 188:1002 Path# /dev/disk3 2025-05-29T15:28:53.970383Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:340} Create new pdisk PDiskId# 189:1000 Path# /dev/disk1 2025-05-29T15:28:53.970388Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:340} Create new pdisk PDiskId# 189:1001 Path# /dev/disk2 2025-05-29T15:28:53.970392Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:340} Create new pdisk PDiskId# 189:1002 Path# /dev/disk3 2025-05-29T15:28:53.970397Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:340} Create new pdisk PDiskId# 190:1000 Path# /dev/disk1 2025-05-29T15:28:53.970401Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:340} Create new pdisk PDiskId# 190:1001 Path# /dev/disk2 2025-05-29T15:28:53.970406Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:340} Create new pdisk PDiskId# 190:1002 Path# /dev/disk3 2025-05-29T15:28:53.970410Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:340} Create new pdisk PDiskId# 191:1000 Path# /dev/disk1 2025-05-29T15:28:53.970415Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:340} Create new pdisk PDiskId# 191:1001 Path# /dev/disk2 2025-05-29T15:28:53.970420Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:340} Create new pdisk PDiskId# 191:1002 Path# /dev/disk3 2025-05-29T15:28:53.970425Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:340} Create new pdisk PDiskId# 192:1000 Path# /dev/disk1 2025-05-29T15:28:53.970429Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:340} Create new pdisk PDiskId# 192:1001 Path# /dev/disk2 2025-05-29T15:28:53.970434Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:340} Create new pdisk PDiskId# 192:1002 Path# /dev/disk3 2025-05-29T15:28:53.970441Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:340} Create new pdisk PDiskId# 193:1000 Path# /dev/disk1 2025-05-29T15:28:53.970448Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:340} Create new pdisk PDiskId# 193:1001 Path# /dev/disk2 2025-05-29T15:28:53.970453Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:340} Create new pdisk PDiskId# 193:1002 Path# /dev/disk3 2025-05-29T15:28:53.970459Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:340} Create new pdisk PDiskId# 194:1000 Path# /dev/disk1 2025-05-29T15:28:53.970464Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:340} Create new pdisk PDiskId# 194:1001 Path# /dev/disk2 2025-05-29T15:28:53.970470Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:340} Create new pdisk PDiskId# 194:1002 Path# /dev/disk3 2025-05-29T15:28:53.970476Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:340} Create new pdisk PDiskId# 195:1000 Path# /dev/disk1 2025-05-29T15:28:53.970480Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:340} Create new pdisk PDiskId# 195:1001 Path# /dev/disk2 2025-05-29T15:28:53.970485Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:340} Create new pdisk PDiskId# 195:1002 Path# /dev/disk3 2025-05-29T15:28:53.970490Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:340} Create new pdisk PDiskId# 196:1000 Path# /dev/disk1 2025-05-29T15:28:53.970496Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:340} Create new pdisk PDiskId# 196:1001 Path# /dev/disk2 2025-05-29T15:28:53.970502Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:340} Create new pdisk PDiskId# 196:1002 Path# /dev/disk3 2025-05-29T15:28:53.970509Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:340} Create new pdisk PDiskId# 197:1000 Path# /dev/disk1 2025-05-29T15:28:53.970514Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:340} Create new pdisk PDiskId# 197:1001 Path# /dev/disk2 2025-05-29T15:28:53.970519Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:340} Create new pdisk PDiskId# 197:1002 Path# /dev/disk3 2025-05-29T15:28:53.970524Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:340} Create new pdisk PDiskId# 198:1000 Path# /dev/disk1 2025-05-29T15:28:53.970529Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:340} Create new pdisk PDiskId# 198:1001 Path# /dev/disk2 2025-05-29T15:28:53.970534Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:340} Create new pdisk PDiskId# 198:1002 Path# /dev/disk3 2025-05-29T15:28:53.970545Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:340} Create new pdisk PDiskId# 199:1000 Path# /dev/disk1 2025-05-29T15:28:53.970550Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:340} Create new pdisk PDiskId# 199:1001 Path# /dev/disk2 2025-05-29T15:28:53.970555Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:340} Create new pdisk PDiskId# 199:1002 Path# /dev/disk3 2025-05-29T15:28:53.970559Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:340} Create new pdisk PDiskId# 200:1000 Path# /dev/disk1 2025-05-29T15:28:53.970564Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:340} Create new pdisk PDiskId# 200:1001 Path# /dev/disk2 2025-05-29T15:28:53.970569Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:340} Create new pdisk PDiskId# 200:1002 Path# /dev/disk3 2025-05-29T15:28:54.041354Z node 151 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:398} Execute TEvControllerConfigRequest Request# {Command { QueryBaseConfig { } } } 2025-05-29T15:28:54.062920Z node 151 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:398} Execute TEvControllerConfigRequest Request# {Command { MoveGroups { BoxId: 1 OriginStoragePoolId: 2 OriginStoragePoolGeneration: 1 TargetStoragePoolId: 1 TargetStoragePoolGeneration: 1 ExplicitGroupId: 2147483748 } } } 2025-05-29T15:28:54.076143Z node 151 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:398} Execute TEvControllerConfigRequest Request# {Command { QueryBaseConfig { } } } 2025-05-29T15:28:54.097847Z node 151 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:398} Execute TEvControllerConfigRequest Request# {Command { MoveGroups { BoxId: 1 OriginStoragePoolId: 2 OriginStoragePoolGeneration: 2 TargetStoragePoolId: 1 TargetStoragePoolGeneration: 2 ExplicitGroupId: 2147483749 } } } 2025-05-29T15:28:54.112138Z node 151 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:398} Execute TEvControllerConfigRequest Request# {Command { QueryBaseConfig { } } } 2025-05-29T15:28:54.133370Z node 151 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:398} Execute TEvControllerConfigRequest Request# {Command { MoveGroups { BoxId: 1 OriginStoragePoolId: 2 OriginStoragePoolGeneration: 3 TargetStoragePoolId: 1 TargetStoragePoolGeneration: 3 } } } 2025-05-29T15:28:54.147600Z node 151 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:398} Execute TEvControllerConfigRequest Request# {Command { QueryBaseConfig { } } } >> BackupRestoreS3::RestoreViewQueryText [GOOD] >> BackupRestoreS3::RestoreViewReferenceTable >> TExtSubDomainTest::CreateTableInsideThenStopTenantAndForceDeleteSubDomain-AlterDatabaseCreateHiveFirst-true [GOOD] >> EncryptedBackupParamsValidationTest::NoDestination >> BackupRestore::RestoreViewQueryText [GOOD] >> BackupRestore::RestoreViewReferenceTable >> KqpOlapScheme::TtlRunInterval >> BackupRestore::TestAllPrimitiveTypes-UINT16 [FAIL] >> BackupRestore::TestAllPrimitiveTypes-UINT32 >> BackupRestoreS3::TestAllPrimitiveTypes-YSON [FAIL] >> BackupRestoreS3::TestAllPrimitiveTypes-UUID ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/tx_proxy/ut_ext_tenant/unittest >> TExtSubDomainTest::CreateTableInsideThenStopTenantAndForceDeleteSubDomain-AlterDatabaseCreateHiveFirst-true [GOOD] Test command err: 2025-05-29T15:28:59.848542Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509890071560210652:2140];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:28:59.848576Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/002462/r3tmp/tmpoegzx3/pdisk_1.dat 2025-05-29T15:28:59.931145Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:28:59.947267Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:28:59.947300Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:28:59.949639Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:29589 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 2025-05-29T15:28:59.970990Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:403: actor# [1:7509890071560210742:2113] Handle TEvNavigate describe path dc-1 2025-05-29T15:28:59.972920Z node 1 :TX_PROXY DEBUG: describe.cpp:272: Actor# [1:7509890071560211267:2423] HANDLE EvNavigateScheme dc-1 2025-05-29T15:28:59.972964Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2702: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [1:7509890071560210834:2127], request# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-05-29T15:28:59.972972Z node 1 :TX_PROXY_SCHEME_CACHE TRACE: cache.cpp:2322: Create subscriber: self# [1:7509890071560210834:2127], path# /dc-1, domainOwnerId# 72057594046644480 2025-05-29T15:28:59.973014Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:960: [main][1:7509890071560211268:2424][/dc-1] Handle NKikimr::TEvStateStorage::TEvResolveReplicasList 2025-05-29T15:28:59.973404Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1061: [1:7509890071560210495:2050] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1 DomainOwnerId: 72057594046644480 }: sender# [1:7509890071560211272:2424] 2025-05-29T15:28:59.973416Z node 1 :SCHEME_BOARD_REPLICA INFO: replica.cpp:646: [1:7509890071560210495:2050] Subscribe: subscriber# [1:7509890071560211272:2424], path# /dc-1, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2025-05-29T15:28:59.973430Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1061: [1:7509890071560210498:2053] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1 DomainOwnerId: 72057594046644480 }: sender# [1:7509890071560211273:2424] 2025-05-29T15:28:59.973433Z node 1 :SCHEME_BOARD_REPLICA INFO: replica.cpp:646: [1:7509890071560210498:2053] Subscribe: subscriber# [1:7509890071560211273:2424], path# /dc-1, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2025-05-29T15:28:59.973440Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1061: [1:7509890071560210501:2056] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1 DomainOwnerId: 72057594046644480 }: sender# [1:7509890071560211274:2424] 2025-05-29T15:28:59.973443Z node 1 :SCHEME_BOARD_REPLICA INFO: replica.cpp:646: [1:7509890071560210501:2056] Subscribe: subscriber# [1:7509890071560211274:2424], path# /dc-1, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2025-05-29T15:28:59.973452Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:365: [replica][1:7509890071560211272:2424][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7509890071560210495:2050] 2025-05-29T15:28:59.973456Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:365: [replica][1:7509890071560211273:2424][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7509890071560210498:2053] 2025-05-29T15:28:59.973460Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:365: [replica][1:7509890071560211274:2424][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7509890071560210501:2056] 2025-05-29T15:28:59.973466Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:807: [main][1:7509890071560211268:2424][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7509890071560211269:2424] 2025-05-29T15:28:59.973471Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:807: [main][1:7509890071560211268:2424][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7509890071560211270:2424] 2025-05-29T15:28:59.973480Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: subscriber.cpp:836: [main][1:7509890071560211268:2424][/dc-1] Set up state: owner# [1:7509890071560210834:2127], state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 2) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements } 2025-05-29T15:28:59.973501Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:807: [main][1:7509890071560211268:2424][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7509890071560211271:2424] 2025-05-29T15:28:59.973508Z node 1 :SCHEME_BOARD_SUBSCRIBER INFO: subscriber.cpp:854: [main][1:7509890071560211268:2424][/dc-1] Path was already updated: owner# [1:7509890071560210834:2127], state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 2) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 2) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements } 2025-05-29T15:28:59.973514Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:381: [replica][1:7509890071560211272:2424][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7509890071560211269:2424], cookie# 1 2025-05-29T15:28:59.973517Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:381: [replica][1:7509890071560211273:2424][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7509890071560211270:2424], cookie# 1 2025-05-29T15:28:59.973521Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:381: [replica][1:7509890071560211274:2424][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7509890071560211271:2424], cookie# 1 2025-05-29T15:28:59.973531Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1089: [1:7509890071560210495:2050] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 2 }: sender# [1:7509890071560211272:2424] 2025-05-29T15:28:59.973535Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1128: [1:7509890071560210495:2050] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7509890071560211272:2424], cookie# 1 2025-05-29T15:28:59.973539Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1089: [1:7509890071560210498:2053] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 2 }: sender# [1:7509890071560211273:2424] 2025-05-29T15:28:59.973541Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1128: [1:7509890071560210498:2053] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7509890071560211273:2424], cookie# 1 2025-05-29T15:28:59.973545Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1089: [1:7509890071560210501:2056] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 2 }: sender# [1:7509890071560211274:2424] 2025-05-29T15:28:59.973548Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1128: [1:7509890071560210501:2056] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7509890071560211274:2424], cookie# 1 2025-05-29T15:28:59.977942Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:390: [replica][1:7509890071560211272:2424][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7509890071560210495:2050], cookie# 1 2025-05-29T15:28:59.977955Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:390: [replica][1:7509890071560211273:2424][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7509890071560210498:2053], cookie# 1 2025-05-29T15:28:59.977959Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:390: [replica][1:7509890071560211274:2424][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7509890071560210501:2056], cookie# 1 2025-05-29T15:28:59.977967Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:892: [main][1:7509890071560211268:2424][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7509890071560211269:2424], cookie# 1 2025-05-29T15:28:59.977974Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:932: [main][1:7509890071560211268:2424][/dc-1] Sync is in progress: cookie# 1, size# 3, half# 1, successes# 1, faulires# 0 2025-05-29T15:28:59.977979Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:892: [main][1:7509890071560211268:2424][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7509890071560211270:2424], cookie# 1 2025-05-29T15:28:59.977983Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:946: [main][1:7509890071560211268:2424][/dc-1] Sync is done: cookie# 1, size# 3, half# 1, successes# 2, faulires# 0, partial# 0 2025-05-29T15:28:59.977988Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:892: [main][1:7509890071560211268:2424][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7509890071560211271:2424], cookie# 1 2025-05-29T15:28:59.977993Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:906: [main][1:7509890071560211268:2424][/dc-1] Unexpected sync response: sender# [1:7509890071560211271:2424], cookie# 1 2025-05-29T15:28:59.986326Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2589: HandleNotify: self# [1:7509890071560210834:2127], notify# NKikimr::TSchemeBoardEvents::TEvNotifyUpdate { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DescribeSchemeResult: Status: StatusSuccess Path: "/dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsL ... 5:29:02.558756Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2702: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [3:7509890083286592271:2138], request# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_0 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-05-29T15:29:02.558769Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:871: [main][3:7509890083286592914:2596][/dc-1/USER_0] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvSyncRequest: sender# [3:7509890083286592271:2138], cookie# 24 2025-05-29T15:29:02.558783Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:381: [replica][3:7509890083286592918:2596][/dc-1/USER_0] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1/USER_0 }: sender# [3:7509890083286592915:2596], cookie# 24 2025-05-29T15:29:02.558788Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:381: [replica][3:7509890083286592919:2596][/dc-1/USER_0] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1/USER_0 }: sender# [3:7509890083286592916:2596], cookie# 24 2025-05-29T15:29:02.558796Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:381: [replica][3:7509890083286592920:2596][/dc-1/USER_0] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1/USER_0 }: sender# [3:7509890083286592917:2596], cookie# 24 2025-05-29T15:29:02.558802Z node 3 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1128: [3:7509890083286591915:2050] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1/USER_0 }: sender# [3:7509890083286592918:2596], cookie# 24 2025-05-29T15:29:02.558810Z node 3 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1128: [3:7509890083286591918:2053] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1/USER_0 }: sender# [3:7509890083286592919:2596], cookie# 24 2025-05-29T15:29:02.558817Z node 3 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1128: [3:7509890083286591921:2056] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1/USER_0 }: sender# [3:7509890083286592920:2596], cookie# 24 2025-05-29T15:29:02.558826Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:390: [replica][3:7509890083286592918:2596][/dc-1/USER_0] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 18446744073709551615 Partial: 0 }: sender# [3:7509890083286591915:2050], cookie# 24 2025-05-29T15:29:02.558830Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:390: [replica][3:7509890083286592919:2596][/dc-1/USER_0] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 18446744073709551615 Partial: 0 }: sender# [3:7509890083286591918:2053], cookie# 24 2025-05-29T15:29:02.558833Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:390: [replica][3:7509890083286592920:2596][/dc-1/USER_0] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 18446744073709551615 Partial: 0 }: sender# [3:7509890083286591921:2056], cookie# 24 2025-05-29T15:29:02.558840Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:892: [main][3:7509890083286592914:2596][/dc-1/USER_0] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 18446744073709551615 Partial: 0 }: sender# [3:7509890083286592915:2596], cookie# 24 2025-05-29T15:29:02.558845Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:932: [main][3:7509890083286592914:2596][/dc-1/USER_0] Sync is in progress: cookie# 24, size# 3, half# 1, successes# 1, faulires# 0 2025-05-29T15:29:02.558851Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:892: [main][3:7509890083286592914:2596][/dc-1/USER_0] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 18446744073709551615 Partial: 0 }: sender# [3:7509890083286592916:2596], cookie# 24 2025-05-29T15:29:02.558854Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:946: [main][3:7509890083286592914:2596][/dc-1/USER_0] Sync is done: cookie# 24, size# 3, half# 1, successes# 2, faulires# 0, partial# 0 2025-05-29T15:29:02.558859Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:892: [main][3:7509890083286592914:2596][/dc-1/USER_0] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 18446744073709551615 Partial: 0 }: sender# [3:7509890083286592917:2596], cookie# 24 2025-05-29T15:29:02.558867Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:906: [main][3:7509890083286592914:2596][/dc-1/USER_0] Unexpected sync response: sender# [3:7509890083286592917:2596], cookie# 24 2025-05-29T15:29:02.558874Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2589: HandleNotify: self# [3:7509890083286592271:2138], notify# NKikimr::NSchemeBoard::NInternalEvents::TEvSyncResponse { Path: /dc-1/USER_0 PathId: Partial: 0 } 2025-05-29T15:29:02.558884Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2464: ResolveCacheItem: self# [3:7509890083286592271:2138], notify# NKikimr::NSchemeBoard::NInternalEvents::TEvSyncResponse { Path: /dc-1/USER_0 PathId: Partial: 0 }, by path# { Subscriber: { Subscriber: [3:7509890083286592914:2596] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 24 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: [OwnerId: 72057594046644480, LocalPathId: 4] DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, by pathId# nullptr 2025-05-29T15:29:02.558895Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:1842: FillEntry for TNavigate: self# [3:7509890083286592271:2138], cacheItem# { Subscriber: { Subscriber: [3:7509890083286592914:2596] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 24 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: [OwnerId: 72057594046644480, LocalPathId: 4] DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/USER_0 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 24 IsSync: true Partial: 0 } 2025-05-29T15:29:02.558904Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:265: Send result: self# [3:7509890087581561303:3467], recipient# [3:7509890087581561302:3466], result# { ErrorCount: 1 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_0 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2025-05-29T15:29:02.558910Z node 3 :TX_PROXY INFO: describe.cpp:356: Actor# [3:7509890087581561302:3466] HANDLE EvNavigateKeySetResult TDescribeReq marker# P5 ErrorCount# 1 TClient::Ls response: Status: 128 StatusCode: PATH_NOT_EXIST Issues { message: "Path not exist" issue_code: 200200 severity: 1 } SchemeStatus: 2 ErrorReason: "Path not found" 2025-05-29T15:29:02.665461Z node 4 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2702: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [4:7509890085577403383:2837], request# { ErrorCount: 0 DatabaseName: /dc-1/USER_0 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_0/.metadata/workload_manager/delayed_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo },{ Path: dc-1/USER_0/.metadata/workload_manager/running_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-05-29T15:29:02.665514Z node 4 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:1842: FillEntry for TNavigate: self# [4:7509890085577403383:2837], cacheItem# { Subscriber: { Subscriber: [4:7509890085577403451:2861] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/USER_0/.metadata/workload_manager/delayed_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-05-29T15:29:02.665524Z node 4 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:1842: FillEntry for TNavigate: self# [4:7509890085577403383:2837], cacheItem# { Subscriber: { Subscriber: [4:7509890085577403452:2862] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/USER_0/.metadata/workload_manager/running_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-05-29T15:29:02.665580Z node 4 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:265: Send result: self# [4:7509890085577404019:3182], recipient# [4:7509890085577404018:2910], result# { ErrorCount: 2 DatabaseName: /dc-1/USER_0 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_0/.metadata/workload_manager/delayed_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo },{ Path: dc-1/USER_0/.metadata/workload_manager/running_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2025-05-29T15:29:02.682722Z node 4 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2702: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [4:7509890085577403383:2837], request# { ErrorCount: 0 DatabaseName: /dc-1/USER_0 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_0/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-05-29T15:29:02.682790Z node 4 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:1842: FillEntry for TNavigate: self# [4:7509890085577403383:2837], cacheItem# { Subscriber: { Subscriber: [4:7509890085577403467:2864] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/USER_0/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-05-29T15:29:02.682824Z node 4 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:265: Send result: self# [4:7509890085577404021:3183], recipient# [4:7509890085577404020:2911], result# { ErrorCount: 1 DatabaseName: /dc-1/USER_0 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_0/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } |71.9%| [TA] $(B)/ydb/core/blobstorage/dsproxy/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> EncryptedBackupParamsValidationTest::NoDestination [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/effects/unittest >> KqpEffects::InsertAbort_Literal_Conflict-UseSink Test command err: Trying to start YDB, gRPC: 23049, MsgBus: 7365 2025-05-29T15:28:48.912610Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509890025175045841:2202];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:28:48.912800Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/001e79/r3tmp/tmpweHwRL/pdisk_1.dat 2025-05-29T15:28:48.990728Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 23049, node 1 2025-05-29T15:28:49.010046Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:28:49.010074Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-05-29T15:28:49.010076Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-05-29T15:28:49.010131Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-05-29T15:28:49.013474Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:28:49.013511Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:28:49.016245Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:7365 TClient is connected to server localhost:7365 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-29T15:28:49.081548Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:28:49.091457Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:28:49.154915Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:28:49.174432Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:28:49.187237Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:28:49.280908Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890029470014604:2403], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:28:49.280935Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:28:49.320194Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-05-29T15:28:49.328045Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-05-29T15:28:49.339695Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-05-29T15:28:49.394481Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-05-29T15:28:49.449727Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-05-29T15:28:49.458453Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-05-29T15:28:49.472987Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-05-29T15:28:49.488923Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890029470015259:2466], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:28:49.488955Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:28:49.488962Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890029470015264:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:28:49.489808Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710669:3, at schemeshard: 72057594046644480 2025-05-29T15:28:49.492179Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7509890029470015266:2470], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710669 completed, doublechecking } 2025-05-29T15:28:49.557813Z node 1 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [1:7509890029470015317:3394] txid# 281474976710670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-29T15:28:49.668199Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [1:7509890029470015333:2474], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:28:49.670668Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=1&id=MTc3YjBhNjMtZjUzN2MyNTQtYzc0OTNkMWItM2JmZGRkNjU=, ActorId: [1:7509890029470014586:2401], ActorState: ExecuteState, TraceId: 01jweaj2ag0tb3f87gj8hxhqa5, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: VERIFY failed (2025-05-29T15:28:49.671784Z): assertion failed in non-unittest thread with message: assertion failed at ydb/core/kqp/ut/common/kqp_ut_common.h:375, void NKikimr::NKqp::AssertSuccessResult(const NYdb::TStatus &): (result.IsSuccess())
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 library/cpp/testing/unittest/registar.cpp:36 RaiseError(): requirement UnittestThread failed 0. /-S/util/system/yassert.cpp:83: InternalPanicImpl @ 0x15F23B95 1. /-S/util/system/yassert.cpp:55: Panic @ 0x15F1AB96 2. /tmp//-S/library/cpp/testing/unittest/registar.cpp:36: RaiseError @ 0x160BC066 3. /-S/ydb/core/kqp/ut/common/kqp_ut_common.h:375: AssertSuccessResult @ 0x15D88C82 4. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:365: CreateSampleTables @ 0x2855B9A2 5. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:581: operator() @ 0x2857CF4C 6. /-S/library/cpp/threading/future/core/future-inl.h:493: SetValue @ 0x2857CF4C 7. /-S/library/cpp/threading/future/async.h:24: operator() @ 0x2857CF4C 8. /-S/util/thread/pool.h:71: Process @ 0x2857CF4C 9. /-S/util/thread/pool.cpp:418: DoExecute @ 0x15F2B519 10. /-S/util/thread/factory.h:15: Execute @ 0x15F29F09 11. /-S/util/thread/factory.cpp:36: ThreadProc @ 0x15F29F09 12. /-S/util/system/thread.cpp:244: ThreadProxy @ 0x15F2537C 13. ??:0: ?? @ 0x7F634C711AC2 14. ??:0: ?? @ 0x7F634C7A384F Trying to start YDB, gRPC: 61144, MsgBus: 64566 2025-05-29T15:28:53.894921Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509890048238557281:2073];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:28:53.895230Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/001e79/r3tmp/tmpbYFufn/pdisk_1.dat 2025-05-29T15:28:53.978929Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 61144, node 1 2025-05-29T15:28:53.997747Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:28:53.997803Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:28:53.998870Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Co ... 0x15F23B95 1. /-S/util/system/yassert.cpp:55: Panic @ 0x15F1AB96 2. /tmp//-S/library/cpp/testing/unittest/registar.cpp:36: RaiseError @ 0x160BC066 3. /-S/ydb/core/kqp/ut/common/kqp_ut_common.h:375: AssertSuccessResult @ 0x15D88C82 4. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:365: CreateSampleTables @ 0x2855B9A2 5. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:581: operator() @ 0x2857CF4C 6. /-S/library/cpp/threading/future/core/future-inl.h:493: SetValue @ 0x2857CF4C 7. /-S/library/cpp/threading/future/async.h:24: operator() @ 0x2857CF4C 8. /-S/util/thread/pool.h:71: Process @ 0x2857CF4C 9. /-S/util/thread/pool.cpp:418: DoExecute @ 0x15F2B519 10. /-S/util/thread/factory.h:15: Execute @ 0x15F29F09 11. /-S/util/thread/factory.cpp:36: ThreadProc @ 0x15F29F09 12. /-S/util/system/thread.cpp:244: ThreadProxy @ 0x15F2537C 13. ??:0: ?? @ 0x7F5512499AC2 14. ??:0: ?? @ 0x7F551252B84F Trying to start YDB, gRPC: 12764, MsgBus: 4725 2025-05-29T15:28:58.705842Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509890070241066951:2067];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:28:58.706167Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/001e79/r3tmp/tmprm8gPY/pdisk_1.dat 2025-05-29T15:28:58.761378Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:28:58.761493Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [1:7509890070241066916:2079] 1748532538705693 != 1748532538705696 TServer::EnableGrpc on GrpcPort 12764, node 1 2025-05-29T15:28:58.780838Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:28:58.780856Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-05-29T15:28:58.780858Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-05-29T15:28:58.780917Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:4725 2025-05-29T15:28:58.807703Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:28:58.807734Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:28:58.808765Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:4725 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-29T15:28:58.852217Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:28:58.866643Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:28:58.886940Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:28:58.912457Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:28:58.924029Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:28:59.096737Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890074536035845:2403], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:28:59.096773Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:28:59.145697Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-05-29T15:28:59.201052Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-05-29T15:28:59.209399Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-05-29T15:28:59.268357Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-05-29T15:28:59.279815Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-05-29T15:28:59.294461Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-05-29T15:28:59.307620Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-05-29T15:28:59.323858Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890074536036501:2466], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:28:59.323881Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:28:59.323884Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890074536036506:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:28:59.324734Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715669:3, at schemeshard: 72057594046644480 2025-05-29T15:28:59.327424Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7509890074536036508:2470], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715669 completed, doublechecking } 2025-05-29T15:28:59.387848Z node 1 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [1:7509890074536036559:3397] txid# 281474976715670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-29T15:28:59.490874Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [1:7509890074536036575:2474], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:28:59.490988Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=1&id=MmNkZjgzMWItOTZiZmRjNy1kMGQxOWZkYS1mZGM5OTk3Zg==, ActorId: [1:7509890074536035827:2401], ActorState: ExecuteState, TraceId: 01jweajbxv7cj59nv52z9h0zv7, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: VERIFY failed (2025-05-29T15:28:59.492256Z): assertion failed in non-unittest thread with message: assertion failed at ydb/core/kqp/ut/common/kqp_ut_common.h:375, void NKikimr::NKqp::AssertSuccessResult(const NYdb::TStatus &): (result.IsSuccess())
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 library/cpp/testing/unittest/registar.cpp:36 RaiseError(): requirement UnittestThread failed 0. /-S/util/system/yassert.cpp:83: InternalPanicImpl @ 0x15F23B95 1. /-S/util/system/yassert.cpp:55: Panic @ 0x15F1AB96 2. /tmp//-S/library/cpp/testing/unittest/registar.cpp:36: RaiseError @ 0x160BC066 3. /-S/ydb/core/kqp/ut/common/kqp_ut_common.h:375: AssertSuccessResult @ 0x15D88C82 4. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:365: CreateSampleTables @ 0x2855B9A2 5. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:581: operator() @ 0x2857CF4C 6. /-S/library/cpp/threading/future/core/future-inl.h:493: SetValue @ 0x2857CF4C 7. /-S/library/cpp/threading/future/async.h:24: operator() @ 0x2857CF4C 8. /-S/util/thread/pool.h:71: Process @ 0x2857CF4C 9. /-S/util/thread/pool.cpp:418: DoExecute @ 0x15F2B519 10. /-S/util/thread/factory.h:15: Execute @ 0x15F29F09 11. /-S/util/thread/factory.cpp:36: ThreadProc @ 0x15F29F09 12. /-S/util/system/thread.cpp:244: ThreadProxy @ 0x15F2537C 13. ??:0: ?? @ 0x7FA8AAEF5AC2 14. ??:0: ?? @ 0x7FA8AAF8784F >> KqpOlapScheme::AddColumnLongPk >> KqpQuery::QueryFromSqs >> KqpScheme::CreateAndAlterTableComplex |71.9%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/services/keyvalue/ut/ydb-services-keyvalue-ut |71.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/services/keyvalue/ut/ydb-services-keyvalue-ut |71.9%| [TA] {RESULT} $(B)/ydb/core/blobstorage/dsproxy/ut/test-results/unittest/{meta.json ... results_accumulator.log} |71.9%| [LD] {RESULT} $(B)/ydb/services/keyvalue/ut/ydb-services-keyvalue-ut >> BackupRestoreS3::RestoreViewReferenceTable [FAIL] >> BackupRestoreS3::RestoreViewDependentOnAnotherView >> EncryptedBackupParamsValidationTest::NoItemDestination >> BackupRestore::RestoreViewReferenceTable [FAIL] >> BackupRestore::RestoreViewToDifferentDatabase >> TSchemeshardBackgroundCompactionTest::SchemeshardShouldHandleCompactionTimeouts [GOOD] >> KqpStats::SysViewCancelled >> KqpOlapScheme::BulkError >> BackupRestore::TestAllPrimitiveTypes-UINT32 [FAIL] >> BackupRestore::TestAllPrimitiveTypes-UINT64 >> BackupRestoreS3::TestAllPrimitiveTypes-UUID [FAIL] >> TReplicationWithRebootsTests::CreateInParallelWithoutInitialController [GOOD] >> KqpScheme::CreateTableWithCompactionPolicyUncompat >> KqpOlapScheme::NullColumnError >> KqpOlapScheme::AddColumnLongPk [GOOD] >> KqpOlapScheme::AddColumnSimpleReader ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_compaction/unittest >> TSchemeshardBackgroundCompactionTest::SchemeshardShouldHandleCompactionTimeouts [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:116:2058] recipient: [1:111:2141] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:116:2058] recipient: [1:111:2141] Leader for TabletID 72057594046678944 is [1:128:2152] sender: [1:132:2058] recipient: [1:111:2141] 2025-05-29T15:27:21.175872Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7488: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-05-29T15:27:21.175896Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7516: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:27:21.175904Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7402: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-05-29T15:27:21.175909Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7418: OperationsProcessing config: using default configuration 2025-05-29T15:27:21.175931Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-05-29T15:27:21.175936Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-05-29T15:27:21.175946Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7548: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:27:21.175962Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:39: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-05-29T15:27:21.176086Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7619: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-05-29T15:27:21.176167Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-05-29T15:27:21.191673Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7309: Cannot subscribe to console configs 2025-05-29T15:27:21.191699Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:27:21.194600Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-05-29T15:27:21.194734Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-05-29T15:27:21.194797Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-05-29T15:27:21.196631Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-05-29T15:27:21.196801Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:445: Clear TempDirsState with owners number: 0 2025-05-29T15:27:21.196919Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1348: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-05-29T15:27:21.196997Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:32: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-05-29T15:27:21.197606Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:157: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:27:21.197654Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:84: [RootDataErasureManager] Stop 2025-05-29T15:27:21.197987Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:27:21.198000Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:27:21.198022Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-05-29T15:27:21.198033Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-29T15:27:21.198039Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:202: TTxServerlessStorageBilling.Complete 2025-05-29T15:27:21.198078Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6671: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-05-29T15:27:21.199640Z node 1 :HIVE INFO: tablet_helpers.cpp:1130: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:128:2152] sender: [1:242:2058] recipient: [1:15:2062] 2025-05-29T15:27:21.222065Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:372: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-05-29T15:27:21.222151Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:27:21.222213Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-05-29T15:27:21.222274Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:130: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-05-29T15:27:21.222287Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:27:21.223087Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:451: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-05-29T15:27:21.223120Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-05-29T15:27:21.223174Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:27:21.223187Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:313: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-05-29T15:27:21.223193Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:367: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-05-29T15:27:21.223199Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 2 -> 3 2025-05-29T15:27:21.223652Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:27:21.223664Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-05-29T15:27:21.223671Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 3 -> 128 2025-05-29T15:27:21.224016Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:27:21.224028Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:27:21.224037Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:27:21.224045Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1650: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-05-29T15:27:21.224801Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1719: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-05-29T15:27:21.225249Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:652: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-05-29T15:27:21.225296Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1751: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-05-29T15:27:21.225554Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:676: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-05-29T15:27:21.225582Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:680: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 124 RawX2: 4294969445 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-05-29T15:27:21.225591Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:27:21.225658Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 128 -> 240 2025-05-29T15:27:21.225666Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:27:21.225704Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-05-29T15:27:21.225717Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:402: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-05-29T15:27:21.226198Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:27:21.226210Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-29T15:27:21.226259Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29 ... [0:0:0], Recipient [3:309:2295]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvCleanupTransaction 2025-05-29T15:29:02.123426Z node 3 :TX_DATASHARD TRACE: datashard_impl.h:3155: StateWork, processing event TEvPrivate::TEvCleanupTransaction 2025-05-29T15:29:02.123457Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:214: No cleanup at 72075186233409546 outdated step 5000002 last cleanup 0 2025-05-29T15:29:02.123484Z node 3 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186233409546 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-05-29T15:29:02.123492Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:385: Check unit PlanQueue at 72075186233409546 2025-05-29T15:29:02.123498Z node 3 :TX_DATASHARD TRACE: plan_queue_unit.cpp:59: TPlanQueueUnit at 72075186233409546 has no attached operations 2025-05-29T15:29:02.123503Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:403: Unit PlanQueue has no ready operations at 72075186233409546 2025-05-29T15:29:02.133722Z node 3 :TX_DATASHARD TRACE: datashard_impl.h:3129: StateWork, received event# 2146435079, Sender [0:0:0], Recipient [3:309:2295]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvPeriodicWakeup 2025-05-29T15:29:02.133923Z node 3 :TX_DATASHARD TRACE: datashard_impl.h:3437: TEvPeriodicTableStats from datashard 72075186233409546, FollowerId 0, tableId 2 2025-05-29T15:29:02.134257Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4877: StateWork, received event# 269553162, Sender [3:309:2295], Recipient [3:124:2149]: NKikimrTxDataShard.TEvPeriodicTableStats DatashardId: 72075186233409546 TableLocalId: 2 Generation: 2 Round: 7 TableStats { DataSize: 13940 RowCount: 100 IndexSize: 102 InMemSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 1 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 SearchHeight: 1 LastFullCompactionTs: 80 HasLoanedParts: false Channels { Channel: 1 DataSize: 13940 IndexSize: 102 } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 37 Memory: 124088 Storage: 14156 GroupWriteThroughput { GroupID: 0 Channel: 0 Throughput: 261 } GroupWriteThroughput { GroupID: 0 Channel: 1 Throughput: 444 } GroupWriteIops { GroupID: 0 Channel: 0 Iops: 1 } } ShardState: 2 UserTablePartOwners: 72075186233409546 NodeId: 3 StartTime: 40 TableOwnerId: 72057594046678944 FollowerId: 0 2025-05-29T15:29:02.134272Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4914: StateWork, processing event TEvDataShard::TEvPeriodicTableStats 2025-05-29T15:29:02.134292Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:563: Got periodic table stats at tablet 72057594046678944 from shard 72075186233409546 followerId 0 pathId [OwnerId: 72057594046678944, LocalPathId: 2] state 'Ready' dataSize 13940 rowCount 100 cpuUsage 0.0037 2025-05-29T15:29:02.134315Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__table_stats.cpp:570: Got periodic table stats at tablet 72057594046678944 from shard 72075186233409546 followerId 0 pathId [OwnerId: 72057594046678944, LocalPathId: 2] raw table stats: DataSize: 13940 RowCount: 100 IndexSize: 102 InMemSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 1 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 SearchHeight: 1 LastFullCompactionTs: 80 HasLoanedParts: false Channels { Channel: 1 DataSize: 13940 IndexSize: 102 } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 2025-05-29T15:29:02.134324Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__table_stats.cpp:610: Will delay TTxStoreTableStats on# 0.100000s, queue# 1 2025-05-29T15:29:02.134427Z node 3 :TX_DATASHARD TRACE: datashard_impl.h:3129: StateWork, received event# 2146435080, Sender [3:1057:2999], Recipient [3:309:2295]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvAsyncTableStats 2025-05-29T15:29:02.185907Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4877: StateWork, received event# 2146435092, Sender [0:0:0], Recipient [3:124:2149]: NKikimr::NSchemeShard::TEvPrivate::TEvPersistTableStats 2025-05-29T15:29:02.185944Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5053: StateWork, processing event TEvPrivate::TEvPersistTableStats 2025-05-29T15:29:02.185951Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:590: Started TEvPersistStats at tablet 72057594046678944, queue size# 1 2025-05-29T15:29:02.185980Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__table_stats.cpp:601: Will execute TTxStoreStats, queue# 1 2025-05-29T15:29:02.185987Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__table_stats.cpp:610: Will delay TTxStoreTableStats on# 0.000000s, queue# 1 2025-05-29T15:29:02.186024Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:267: PersistSingleStats for pathId 2 shard idx 72057594046678944:1 data size 13940 row count 100 2025-05-29T15:29:02.186054Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:292: TTxStoreTableStats.PersistSingleStats: main stats from datashardId(TabletID)=72075186233409546 maps to shardIdx: 72057594046678944:1 followerId=0, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], pathId map=Simple, is column=0, is olap=0, RowCount 100, DataSize 13940 2025-05-29T15:29:02.186061Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__table_stats.cpp:62: BuildStatsForCollector: datashardId 72075186233409546, followerId 0 2025-05-29T15:29:02.186095Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__background_compaction.cpp:219: [BackgroundCompaction] [Update] Updated shard# 72057594046678944:1 with partCount# 1, rowCount# 100, searchHeight# 1, lastFullCompaction# 1970-01-01T00:01:20.000000Z at schemeshard 72057594046678944 2025-05-29T15:29:02.186124Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:477: Do not want to split tablet 72075186233409546 by size, its table already has 1 out of 1 partitions 2025-05-29T15:29:02.186156Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:207: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2025-05-29T15:29:02.196436Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4877: StateWork, received event# 2146435092, Sender [0:0:0], Recipient [3:124:2149]: NKikimr::NSchemeShard::TEvPrivate::TEvPersistTableStats 2025-05-29T15:29:02.196473Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5053: StateWork, processing event TEvPrivate::TEvPersistTableStats 2025-05-29T15:29:02.196481Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:590: Started TEvPersistStats at tablet 72057594046678944, queue size# 0 2025-05-29T15:29:02.479084Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4877: StateWork, received event# 271125000, Sender [0:0:0], Recipient [3:124:2149]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-05-29T15:29:02.479114Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4885: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-05-29T15:29:02.479130Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4877: StateWork, received event# 271124999, Sender [3:124:2149], Recipient [3:124:2149]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2025-05-29T15:29:02.479136Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4884: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2025-05-29T15:29:02.848175Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4877: StateWork, received event# 271125000, Sender [0:0:0], Recipient [3:124:2149]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-05-29T15:29:02.848227Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4885: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-05-29T15:29:02.848252Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4877: StateWork, received event# 271124999, Sender [3:124:2149], Recipient [3:124:2149]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2025-05-29T15:29:02.848258Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4884: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2025-05-29T15:29:03.197034Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4877: StateWork, received event# 271125000, Sender [0:0:0], Recipient [3:124:2149]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-05-29T15:29:03.197074Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4885: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-05-29T15:29:03.197098Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4877: StateWork, received event# 271124999, Sender [3:124:2149], Recipient [3:124:2149]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2025-05-29T15:29:03.197103Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4884: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2025-05-29T15:29:03.537540Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4877: StateWork, received event# 271125000, Sender [0:0:0], Recipient [3:124:2149]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-05-29T15:29:03.537569Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4885: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-05-29T15:29:03.537584Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4877: StateWork, received event# 271124999, Sender [3:124:2149], Recipient [3:124:2149]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2025-05-29T15:29:03.537589Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4884: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2025-05-29T15:29:03.864446Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4877: StateWork, received event# 271125000, Sender [0:0:0], Recipient [3:124:2149]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-05-29T15:29:03.864479Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4885: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-05-29T15:29:03.864506Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4877: StateWork, received event# 271124999, Sender [3:124:2149], Recipient [3:124:2149]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2025-05-29T15:29:03.864510Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4884: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2025-05-29T15:29:03.895042Z node 3 :TX_DATASHARD TRACE: datashard_impl.h:3129: StateWork, received event# 2146435079, Sender [0:0:0], Recipient [3:309:2295]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvPeriodicWakeup 2025-05-29T15:29:04.246615Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4877: StateWork, received event# 271125000, Sender [0:0:0], Recipient [3:124:2149]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-05-29T15:29:04.246656Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4885: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-05-29T15:29:04.246682Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4877: StateWork, received event# 271124999, Sender [3:124:2149], Recipient [3:124:2149]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2025-05-29T15:29:04.246688Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4884: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime >> KqpScheme::CreateTableWithVectorIndexPublicApi >> EncryptedBackupParamsValidationTest::NoItemDestination [GOOD] >> KqpScheme::SchemaVersionMissmatchWithIndexRead >> KqpQuery::SelectCountAsteriskFromVar >> KqpOlapScheme::BulkError [GOOD] >> KqpOlapScheme::ColumnFamilyWithFieldData >> EncryptedBackupParamsValidationTest::NoCommonDestination >> KqpOlapScheme::NullColumnError [GOOD] >> KqpOlapScheme::TenThousandColumns >> BackupRestore::TestAllPrimitiveTypes-UINT64 [FAIL] >> BackupRestore::TestAllPrimitiveTypes-TIMESTAMP >> BackupRestoreS3::RestoreViewDependentOnAnotherView [GOOD] >> BackupRestoreS3::TestAllIndexTypes-EIndexTypeInvalid [GOOD] >> BackupRestoreS3::TestAllIndexTypes-EIndexTypeGlobal >> BackupRestore::RestoreViewToDifferentDatabase [FAIL] >> BackupRestore::RestoreViewDependentOnAnotherView >> KqpScheme::CreateExternalDataSourceValidationAuthMethod >> TGRpcStreamingTest::SimpleEcho ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_replication_reboots/unittest >> TReplicationWithRebootsTests::CreateInParallelWithoutInitialController [GOOD] Test command err: ==== RunWithTabletReboots =========== RUN: Trace =========== Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2140] Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:116:2058] recipient: [1:110:2141] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:116:2058] recipient: [1:110:2141] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:117:2058] recipient: [1:111:2142] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:117:2058] recipient: [1:111:2142] Leader for TabletID 72057594046678944 is [1:126:2151] sender: [1:127:2058] recipient: [1:109:2140] Leader for TabletID 72057594046447617 is [1:130:2154] sender: [1:132:2058] recipient: [1:110:2141] Leader for TabletID 72057594046316545 is [1:133:2155] sender: [1:135:2058] recipient: [1:111:2142] 2025-05-29T15:28:23.906564Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7488: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-05-29T15:28:23.906588Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7516: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:28:23.906593Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7402: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2025-05-29T15:28:23.906598Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7418: OperationsProcessing config: using default configuration 2025-05-29T15:28:23.906614Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-05-29T15:28:23.906618Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-05-29T15:28:23.906627Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7548: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:28:23.906641Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:39: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-05-29T15:28:23.906775Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7619: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-05-29T15:28:23.906861Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-05-29T15:28:23.920761Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7651: Got new config: QueryServiceConfig { AllExternalDataSourcesAreAvailable: true } 2025-05-29T15:28:23.920782Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:28:23.920882Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7619: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# Leader for TabletID 72057594046447617 is [1:130:2154] sender: [1:175:2058] recipient: [1:15:2062] 2025-05-29T15:28:23.923535Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-05-29T15:28:23.923562Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-05-29T15:28:23.923585Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-05-29T15:28:23.926156Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-05-29T15:28:23.926241Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:445: Clear TempDirsState with owners number: 0 2025-05-29T15:28:23.926377Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1348: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-05-29T15:28:23.926578Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:32: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-05-29T15:28:23.927254Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:157: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:28:23.927312Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:84: [RootDataErasureManager] Stop 2025-05-29T15:28:23.927587Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:28:23.927599Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:28:23.927630Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-05-29T15:28:23.927638Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-29T15:28:23.927644Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:202: TTxServerlessStorageBilling.Complete 2025-05-29T15:28:23.927661Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6671: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:214:2058] recipient: [1:212:2212] IGNORE Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:214:2058] recipient: [1:212:2212] Leader for TabletID 72057594037968897 is [1:218:2216] sender: [1:219:2058] recipient: [1:212:2212] 2025-05-29T15:28:23.929018Z node 1 :HIVE INFO: tablet_helpers.cpp:1130: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:126:2151] sender: [1:239:2058] recipient: [1:15:2062] 2025-05-29T15:28:23.950084Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:372: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-05-29T15:28:23.950187Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:28:23.950256Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-05-29T15:28:23.950311Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:130: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-05-29T15:28:23.950323Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:28:23.951119Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:451: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-05-29T15:28:23.951149Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-05-29T15:28:23.951221Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:28:23.951233Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:313: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-05-29T15:28:23.951239Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:367: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-05-29T15:28:23.951245Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 2 -> 3 2025-05-29T15:28:23.951676Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:28:23.951689Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-05-29T15:28:23.951695Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 3 -> 128 2025-05-29T15:28:23.952017Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:28:23.952027Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:28:23.952033Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:28:23.952040Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1650: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-05-29T15:28:23.952624Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1719: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-05-29T15:28:23.952950Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:652: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-05-29T15:28:23.952993Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1751: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 Leader for TabletID 72057594046316545 is [1:133:2155] sender: [1:254:2058] recipient: [1:15:2062] FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-05-29T15:28:23.953190Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:676: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-05-29T15:28:23.953213Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:680: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 133 RawX2: 4294969451 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-05-29T15:28:23.953220Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:28:23.953284Z node 1 :FLAT_TX_SCHEMESHARD INFO: sch ... 03, at schemeshard: 72057594046678944 2025-05-29T15:29:04.790437Z node 133 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4877: StateWork, received event# 269877761, Sender [133:637:2545], Recipient [133:123:2148]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-05-29T15:29:04.790440Z node 133 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4974: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-05-29T15:29:04.790445Z node 133 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5753: Pipe server connected, at tablet: 72057594046678944 2025-05-29T15:29:04.790451Z node 133 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:227: tests -- TTxNotificationSubscriber for txId 1002: got EvNotifyTxCompletionResult 2025-05-29T15:29:04.790456Z node 133 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:236: tests -- TTxNotificationSubscriber for txId 1002: satisfy waiter [133:631:2539] 2025-05-29T15:29:04.790475Z node 133 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:227: tests -- TTxNotificationSubscriber for txId 1003: got EvNotifyTxCompletionResult 2025-05-29T15:29:04.790479Z node 133 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:236: tests -- TTxNotificationSubscriber for txId 1003: satisfy waiter [133:631:2539] 2025-05-29T15:29:04.790495Z node 133 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4877: StateWork, received event# 271124996, Sender [133:630:2538], Recipient [133:123:2148]: NKikimrScheme.TEvNotifyTxCompletion TxId: 1004 2025-05-29T15:29:04.790499Z node 133 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4890: StateWork, processing event TEvSchemeShard::TEvNotifyTxCompletion 2025-05-29T15:29:04.790504Z node 133 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 1004, at schemeshard: 72057594046678944 2025-05-29T15:29:04.790521Z node 133 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4877: StateWork, received event# 269877764, Sender [133:635:2543], Recipient [133:123:2148]: NKikimr::TEvTabletPipe::TEvServerDisconnected 2025-05-29T15:29:04.790525Z node 133 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4975: StateWork, processing event TEvTabletPipe::TEvServerDisconnected 2025-05-29T15:29:04.790530Z node 133 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5801: Server pipe is reset, at schemeshard: 72057594046678944 2025-05-29T15:29:04.790537Z node 133 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4877: StateWork, received event# 269877764, Sender [133:636:2544], Recipient [133:123:2148]: NKikimr::TEvTabletPipe::TEvServerDisconnected 2025-05-29T15:29:04.790541Z node 133 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4975: StateWork, processing event TEvTabletPipe::TEvServerDisconnected 2025-05-29T15:29:04.790544Z node 133 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5801: Server pipe is reset, at schemeshard: 72057594046678944 2025-05-29T15:29:04.790550Z node 133 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:227: tests -- TTxNotificationSubscriber for txId 1004: got EvNotifyTxCompletionResult 2025-05-29T15:29:04.790554Z node 133 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:236: tests -- TTxNotificationSubscriber for txId 1004: satisfy waiter [133:631:2539] 2025-05-29T15:29:04.790571Z node 133 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4877: StateWork, received event# 269877764, Sender [133:637:2545], Recipient [133:123:2148]: NKikimr::TEvTabletPipe::TEvServerDisconnected 2025-05-29T15:29:04.790574Z node 133 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4975: StateWork, processing event TEvTabletPipe::TEvServerDisconnected 2025-05-29T15:29:04.790578Z node 133 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5801: Server pipe is reset, at schemeshard: 72057594046678944 TestWaitNotification: OK eventTxId 1002 TestWaitNotification: OK eventTxId 1003 TestWaitNotification: OK eventTxId 1004 2025-05-29T15:29:04.790644Z node 133 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4877: StateWork, received event# 271122945, Sender [133:638:2546], Recipient [133:123:2148]: NKikimrSchemeOp.TDescribePath Path: "/MyRoot/Replication1" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false } 2025-05-29T15:29:04.790649Z node 133 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4889: StateWork, processing event TEvSchemeShard::TEvDescribeScheme 2025-05-29T15:29:04.790661Z node 133 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Replication1" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-05-29T15:29:04.790729Z node 133 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/Replication1" took 63us result status StatusSuccess 2025-05-29T15:29:04.790850Z node 133 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Replication1" PathDescription { Self { Name: "Replication1" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeReplication CreateFinished: true CreateTxId: 1002 CreateStep: 5000003 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 ReplicationVersion: 1 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 4 PathsLimit: 10000 ShardsInside: 3 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } ReplicationDescription { Name: "Replication1" Config { SrcConnectionParams { StaticCredentials { User: "user" } } Specific { Targets { SrcPath: "/MyRoot1/Table" DstPath: "/MyRoot2/Table" } } ConsistencySettings { Row { } } } PathId { OwnerId: 72057594046678944 LocalId: 3 } Version: 1 ControllerId: 72075186233409546 State { StandBy { } } } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-05-29T15:29:04.790978Z node 133 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4877: StateWork, received event# 271122945, Sender [133:639:2547], Recipient [133:123:2148]: NKikimrSchemeOp.TDescribePath Path: "/MyRoot/Replication2" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false } 2025-05-29T15:29:04.790982Z node 133 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4889: StateWork, processing event TEvSchemeShard::TEvDescribeScheme 2025-05-29T15:29:04.790988Z node 133 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Replication2" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-05-29T15:29:04.791007Z node 133 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/Replication2" took 17us result status StatusSuccess 2025-05-29T15:29:04.791031Z node 133 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Replication2" PathDescription { Self { Name: "Replication2" PathId: 4 SchemeshardId: 72057594046678944 PathType: EPathTypeReplication CreateFinished: true CreateTxId: 1003 CreateStep: 5000004 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 ReplicationVersion: 1 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 4 PathsLimit: 10000 ShardsInside: 3 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } ReplicationDescription { Name: "Replication2" Config { SrcConnectionParams { StaticCredentials { User: "user" } } Specific { Targets { SrcPath: "/MyRoot1/Table" DstPath: "/MyRoot2/Table" } } ConsistencySettings { Row { } } } PathId { OwnerId: 72057594046678944 LocalId: 4 } Version: 1 ControllerId: 72075186233409547 State { StandBy { } } } } PathId: 4 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-05-29T15:29:04.791083Z node 133 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4877: StateWork, received event# 271122945, Sender [133:640:2548], Recipient [133:123:2148]: NKikimrSchemeOp.TDescribePath Path: "/MyRoot/Replication3" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false } 2025-05-29T15:29:04.791085Z node 133 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4889: StateWork, processing event TEvSchemeShard::TEvDescribeScheme 2025-05-29T15:29:04.791090Z node 133 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Replication3" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-05-29T15:29:04.791100Z node 133 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/Replication3" took 10us result status StatusSuccess 2025-05-29T15:29:04.791122Z node 133 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Replication3" PathDescription { Self { Name: "Replication3" PathId: 5 SchemeshardId: 72057594046678944 PathType: EPathTypeReplication CreateFinished: true CreateTxId: 1004 CreateStep: 5000005 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 ReplicationVersion: 1 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 4 PathsLimit: 10000 ShardsInside: 3 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } ReplicationDescription { Name: "Replication3" Config { SrcConnectionParams { StaticCredentials { User: "user" } } Specific { Targets { SrcPath: "/MyRoot1/Table" DstPath: "/MyRoot2/Table" } } ConsistencySettings { Row { } } } PathId { OwnerId: 72057594046678944 LocalId: 5 } Version: 1 ControllerId: 72075186233409548 State { StandBy { } } } } PathId: 5 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> KqpOlapScheme::ColumnFamilyWithFieldData [GOOD] >> KqpOlapScheme::CrateWithWrongCodec >> KqpOlapScheme::AddColumnSimpleReader [GOOD] >> KqpOlapScheme::AddColumnOldSchemeBulkUpsert >> EncryptedBackupParamsValidationTest::NoCommonDestination [GOOD] >> TGRpcStreamingTest::SimpleEcho [GOOD] >> KqpScheme::TouchIndexAfterMoveIndexRead >> KqpOlapScheme::CrateWithWrongCodec [GOOD] >> KqpOlapScheme::AlterCompressionType >> EncryptedBackupParamsValidationTest::IncorrectKeyLengthExport >> TKeyValueTest::TestInlineWriteReadWithRestartsThenResponseOkNewApi [GOOD] >> KqpScheme::AddDropColumn >> KqpScheme::DropIndexDataColumn >> KqpOlapScheme::AddColumnOldSchemeBulkUpsert [GOOD] >> KqpOlapScheme::AddColumnWithStore ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/grpc_streaming/ut/unittest >> TGRpcStreamingTest::SimpleEcho [GOOD] Test command err: 2025-05-29T15:29:06.173295Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509890102062299222:2145];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:29:06.174006Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/0018ed/r3tmp/tmpESzFPI/pdisk_1.dat 2025-05-29T15:29:06.238520Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:29:06.241348Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [1:7509890102062299090:2079] 1748532546171368 != 1748532546171371 2025-05-29T15:29:06.243203Z node 1 :GRPC_SERVER DEBUG: grpc_streaming.h:227: [0x7051fefcc380] stream accepted Name# Session ok# true peer# ipv6:[::1]:34742 2025-05-29T15:29:06.243297Z node 1 :GRPC_SERVER DEBUG: grpc_streaming.h:301: [0x7051fefcc380] facade attach Name# Session actor# [1:7509890102062299628:2250] peer# ipv6:[::1]:34742 2025-05-29T15:29:06.243314Z node 1 :GRPC_SERVER DEBUG: grpc_streaming.h:325: [0x7051fefcc380] facade read Name# Session peer# ipv6:[::1]:34742 2025-05-29T15:29:06.243601Z node 1 :GRPC_SERVER DEBUG: grpc_streaming.h:353: [0x7051fefcc380] read finished Name# Session ok# true data# peer# ipv6:[::1]:34742 2025-05-29T15:29:06.243638Z node 1 :GRPC_SERVER DEBUG: grpc_streaming_ut.cpp:142: Received TEvReadFinished, success = 1 2025-05-29T15:29:06.243651Z node 1 :GRPC_SERVER DEBUG: grpc_streaming.h:401: [0x7051fefcc380] facade write Name# Session data# peer# ipv6:[::1]:34742 2025-05-29T15:29:06.243794Z node 1 :GRPC_SERVER DEBUG: grpc_streaming.h:511: [0x7051fefcc380] facade finish Name# Session peer# ipv6:[::1]:34742 grpc status# (0) message# 2025-05-29T15:29:06.244005Z node 1 :GRPC_SERVER DEBUG: grpc_streaming.h:456: [0x7051fefcc380] write finished Name# Session ok# true peer# ipv6:[::1]:34742 2025-05-29T15:29:06.244125Z node 1 :GRPC_SERVER DEBUG: grpc_streaming.h:268: [0x7051fefcc380] stream done notification Name# Session ok# true peer# ipv6:[::1]:34742 2025-05-29T15:29:06.244135Z node 1 :GRPC_SERVER DEBUG: grpc_streaming.h:547: [0x7051fefcc380] stream finished Name# Session ok# true peer# ipv6:[::1]:34742 grpc status# (0) message# 2025-05-29T15:29:06.244147Z node 1 :GRPC_SERVER DEBUG: grpc_streaming.h:580: [0x7051fefcc380] deregistering request Name# Session peer# ipv6:[::1]:34742 (finish done) 2025-05-29T15:29:06.273202Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:29:06.273234Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:29:06.274275Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected >> KqpScheme::AlterTableAlterIndex+UseQueryService >> KqpScheme::DropExternalDataSource >> BackupRestore::TestAllPrimitiveTypes-TIMESTAMP [FAIL] >> BackupRestore::TestAllPrimitiveTypes-TZ_DATE [GOOD] >> BackupRestore::TestAllPrimitiveTypes-TZ_DATETIME [GOOD] >> BackupRestore::TestAllPrimitiveTypes-TZ_TIMESTAMP [GOOD] >> BackupRestore::TestAllPrimitiveTypes-TIMESTAMP64 >> KqpScheme::CreateExternalTableWithSettings >> KqpScheme::CreateDropTableMultipleTime >> KqpScheme::InvalidationAfterDropCreateCompatSchema >> BackupRestore::RestoreViewDependentOnAnotherView [GOOD] >> BackupRestore::RestoreKesusResources >> BsControllerConfig::ExtendBoxAndStoragePool [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/keyvalue/ut/unittest >> TKeyValueTest::TestInlineWriteReadWithRestartsThenResponseOkNewApi [GOOD] Test command err: Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:55:2057] recipient: [1:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:55:2057] recipient: [1:51:2095] Leader for TabletID 72057594037927937 is [1:57:2097] sender: [1:58:2057] recipient: [1:51:2095] Leader for TabletID 72057594037927937 is [1:57:2097] sender: [1:75:2057] recipient: [1:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:55:2057] recipient: [2:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:55:2057] recipient: [2:51:2095] Leader for TabletID 72057594037927937 is [2:57:2097] sender: [2:58:2057] recipient: [2:51:2095] Leader for TabletID 72057594037927937 is [2:57:2097] sender: [2:75:2057] recipient: [2:14:2061] !Reboot 72057594037927937 (actor [2:57:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [2:57:2097] sender: [2:77:2057] recipient: [2:36:2083] Leader for TabletID 72057594037927937 is [2:57:2097] sender: [2:80:2057] recipient: [2:14:2061] Leader for TabletID 72057594037927937 is [2:57:2097] sender: [2:81:2057] recipient: [2:79:2110] Leader for TabletID 72057594037927937 is [2:82:2111] sender: [2:83:2057] recipient: [2:79:2110] !Reboot 72057594037927937 (actor [2:57:2097]) rebooted! !Reboot 72057594037927937 (actor [2:57:2097]) tablet resolver refreshed! new actor is[2:82:2111] Leader for TabletID 72057594037927937 is [2:82:2111] sender: [2:168:2057] recipient: [2:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:55:2057] recipient: [3:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:55:2057] recipient: [3:51:2095] Leader for TabletID 72057594037927937 is [3:57:2097] sender: [3:58:2057] recipient: [3:51:2095] Leader for TabletID 72057594037927937 is [3:57:2097] sender: [3:75:2057] recipient: [3:14:2061] !Reboot 72057594037927937 (actor [3:57:2097]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [3:57:2097] sender: [3:77:2057] recipient: [3:36:2083] Leader for TabletID 72057594037927937 is [3:57:2097] sender: [3:80:2057] recipient: [3:79:2110] Leader for TabletID 72057594037927937 is [3:57:2097] sender: [3:81:2057] recipient: [3:14:2061] Leader for TabletID 72057594037927937 is [3:82:2111] sender: [3:83:2057] recipient: [3:79:2110] !Reboot 72057594037927937 (actor [3:57:2097]) rebooted! !Reboot 72057594037927937 (actor [3:57:2097]) tablet resolver refreshed! new actor is[3:82:2111] Leader for TabletID 72057594037927937 is [3:82:2111] sender: [3:168:2057] recipient: [3:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [4:55:2057] recipient: [4:50:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [4:55:2057] recipient: [4:50:2095] Leader for TabletID 72057594037927937 is [4:57:2097] sender: [4:58:2057] recipient: [4:50:2095] Leader for TabletID 72057594037927937 is [4:57:2097] sender: [4:75:2057] recipient: [4:14:2061] !Reboot 72057594037927937 (actor [4:57:2097]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [4:57:2097] sender: [4:78:2057] recipient: [4:36:2083] Leader for TabletID 72057594037927937 is [4:57:2097] sender: [4:81:2057] recipient: [4:80:2110] Leader for TabletID 72057594037927937 is [4:57:2097] sender: [4:82:2057] recipient: [4:14:2061] Leader for TabletID 72057594037927937 is [4:83:2111] sender: [4:84:2057] recipient: [4:80:2110] !Reboot 72057594037927937 (actor [4:57:2097]) rebooted! !Reboot 72057594037927937 (actor [4:57:2097]) tablet resolver refreshed! new actor is[4:83:2111] Leader for TabletID 72057594037927937 is [4:83:2111] sender: [4:169:2057] recipient: [4:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [5:55:2057] recipient: [5:52:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [5:55:2057] recipient: [5:52:2095] Leader for TabletID 72057594037927937 is [5:57:2097] sender: [5:58:2057] recipient: [5:52:2095] Leader for TabletID 72057594037927937 is [5:57:2097] sender: [5:75:2057] recipient: [5:14:2061] !Reboot 72057594037927937 (actor [5:57:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [5:57:2097] sender: [5:81:2057] recipient: [5:36:2083] Leader for TabletID 72057594037927937 is [5:57:2097] sender: [5:84:2057] recipient: [5:14:2061] Leader for TabletID 72057594037927937 is [5:57:2097] sender: [5:85:2057] recipient: [5:83:2113] Leader for TabletID 72057594037927937 is [5:86:2114] sender: [5:87:2057] recipient: [5:83:2113] !Reboot 72057594037927937 (actor [5:57:2097]) rebooted! !Reboot 72057594037927937 (actor [5:57:2097]) tablet resolver refreshed! new actor is[5:86:2114] Leader for TabletID 72057594037927937 is [5:86:2114] sender: [5:172:2057] recipient: [5:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [6:55:2057] recipient: [6:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [6:55:2057] recipient: [6:51:2095] Leader for TabletID 72057594037927937 is [6:57:2097] sender: [6:58:2057] recipient: [6:51:2095] Leader for TabletID 72057594037927937 is [6:57:2097] sender: [6:75:2057] recipient: [6:14:2061] !Reboot 72057594037927937 (actor [6:57:2097]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [6:57:2097] sender: [6:81:2057] recipient: [6:36:2083] Leader for TabletID 72057594037927937 is [6:57:2097] sender: [6:84:2057] recipient: [6:14:2061] Leader for TabletID 72057594037927937 is [6:57:2097] sender: [6:85:2057] recipient: [6:83:2113] Leader for TabletID 72057594037927937 is [6:86:2114] sender: [6:87:2057] recipient: [6:83:2113] !Reboot 72057594037927937 (actor [6:57:2097]) rebooted! !Reboot 72057594037927937 (actor [6:57:2097]) tablet resolver refreshed! new actor is[6:86:2114] Leader for TabletID 72057594037927937 is [6:86:2114] sender: [6:172:2057] recipient: [6:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [7:55:2057] recipient: [7:52:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [7:55:2057] recipient: [7:52:2095] Leader for TabletID 72057594037927937 is [7:57:2097] sender: [7:58:2057] recipient: [7:52:2095] Leader for TabletID 72057594037927937 is [7:57:2097] sender: [7:75:2057] recipient: [7:14:2061] !Reboot 72057594037927937 (actor [7:57:2097]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [7:57:2097] sender: [7:82:2057] recipient: [7:36:2083] Leader for TabletID 72057594037927937 is [7:57:2097] sender: [7:84:2057] recipient: [7:14:2061] Leader for TabletID 72057594037927937 is [7:57:2097] sender: [7:86:2057] recipient: [7:85:2113] Leader for TabletID 72057594037927937 is [7:87:2114] sender: [7:88:2057] recipient: [7:85:2113] !Reboot 72057594037927937 (actor [7:57:2097]) rebooted! !Reboot 72057594037927937 (actor [7:57:2097]) tablet resolver refreshed! new actor is[7:87:2114] Leader for TabletID 72057594037927937 is [7:87:2114] sender: [7:173:2057] recipient: [7:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [8:55:2057] recipient: [8:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [8:55:2057] recipient: [8:51:2095] Leader for TabletID 72057594037927937 is [8:57:2097] sender: [8:58:2057] recipient: [8:51:2095] Leader for TabletID 72057594037927937 is [8:57:2097] sender: [8:75:2057] recipient: [8:14:2061] !Reboot 72057594037927937 (actor [8:57:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [8:57:2097] sender: [8:85:2057] recipient: [8:36:2083] Leader for TabletID 72057594037927937 is [8:57:2097] sender: [8:88:2057] recipient: [8:14:2061] Leader for TabletID 72057594037927937 is [8:57:2097] sender: [8:89:2057] recipient: [8:87:2116] Leader for TabletID 72057594037927937 is [8:90:2117] sender: [8:91:2057] recipient: [8:87:2116] !Reboot 72057594037927937 (actor [8:57:2097]) rebooted! !Reboot 72057594037927937 (actor [8:57:2097]) tablet resolver refreshed! new actor is[8:90:2117] Leader for TabletID 72057594037927937 is [8:90:2117] sender: [8:176:2057] recipient: [8:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [9:55:2057] recipient: [9:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [9:55:2057] recipient: [9:51:2095] Leader for TabletID 72057594037927937 is [9:57:2097] sender: [9:58:2057] recipient: [9:51:2095] Leader for TabletID 72057594037927937 is [9:57:2097] sender: [9:75:2057] recipient: [9:14:2061] !Reboot 72057594037927937 (actor [9:57:2097]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [9:57:2097] sender: [9:85:2057] recipient: [9:36:2083] Leader for TabletID 72057594037927937 is [9:57:2097] sender: [9:87:2057] recipient: [9:14:2061] Leader for TabletID 72057594037927937 is [9:57:2097] sender: [9:89:2057] recipient: [9:88:2116] Leader for TabletID 72057594037927937 is [9:90:2117] sender: [9:91:2057] recipient: [9:88:2116] !Reboot 72057594037927937 (actor [9:57:2097]) rebooted! !Reboot 72057594037927937 (actor [9:57:2097]) tablet resolver refreshed! new actor is[9:90:2117] Leader for TabletID 72057594037927937 is [9:90:2117] sender: [9:176:2057] recipient: [9:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [10:55:2057] recipient: [10:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [10:55:2057] recipient: [10:51:2095] Leader for TabletID 72057594037927937 is [10:57:2097] sender: [10:58:2057] recipient: [10:51:2095] Leader for TabletID 72057594037927937 is [10:57:2097] sender: [10:75:2057] recipient: [10:14:2061] !Reboot 72057594037927937 (actor [10:57:2097]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [10:57:2097] sender: [10:86:2057] recipient: [10:36:2083] Leader for TabletID 72057594037927937 is [10:57:2097] sender: [10:89:2057] recipient: [10:14:2061] Leader for TabletID 72057594037927937 is [10:57:2097] sender: [10:90:2057] recipient: [10:88:2116] Leader for TabletID 72057594037927937 is [10:91:2117] sender: [10:92:2057] recipient: [10:88:2116] !Reboot 72057594037927937 (actor [10:57:2097]) rebooted! !Reboot 72057594037927937 (actor [10:57:2097]) tablet resolver refreshed! new actor is[10:91:2117] Leader for TabletID 72057594037927937 is [10:91:2117] sender: [10:177:2057] recipient: [10:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [11:55:2057] recipient: [11:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [11:55:2057] recipient: [11:51:2095] Leader for TabletID 72057594037927937 is [11:57:2097] sender: [11:58:2057] recipient: [11:51:2095] Leader for TabletID 72057594037927937 is [11:57:2097] sender: [11:75:2057] recipient: [11:14:2061] !Reboot 72057594037927937 (actor [11:57:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [11:57:2097] sender: [11:89:2057] recipient: [11:36:2083] Leader for TabletID 72057594037927937 is [11:57:2097] sender: [11:92:2057] recipient: [11:14:2061] Leader for TabletID 72057594037927937 is [11:57:2097] sender: [11:93:2057] recipient: [11:91:2119] Leader for TabletID 72057594037927937 is [11:94:2120] sender: [11:95:2057] recipient: [11:91:2119] !Reboot 72057594037927937 (actor [11:57:2097]) rebooted! !Reboot 72057594037927937 (actor [11:57:2097]) tablet resolver refreshed! new actor is[11:94:2120] Leader for TabletID 72057594037927937 is [11:94:2120] sender: [11:180:2057] recipient: [11:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [12:55:2057] recipient: [12:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [12:55:2057] recipient: [12:51:2095] Leader for TabletID 72057594037927937 is [12:57:2097] sender: [12:58:2057] recipient: [12:51:2095] Leader for TabletID 72057594037927937 is [12:57:2097] sender: [12:75:2057] recipient: [12:14:2061] !Reboot 72057594037927937 (acto ... 97]) rebooted! !Reboot 72057594037927937 (actor [44:57:2097]) tablet resolver refreshed! new actor is[44:102:2126] Leader for TabletID 72057594037927937 is [44:102:2126] sender: [44:188:2057] recipient: [44:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [45:55:2057] recipient: [45:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [45:55:2057] recipient: [45:51:2095] Leader for TabletID 72057594037927937 is [45:57:2097] sender: [45:58:2057] recipient: [45:51:2095] Leader for TabletID 72057594037927937 is [45:57:2097] sender: [45:75:2057] recipient: [45:14:2061] !Reboot 72057594037927937 (actor [45:57:2097]) on event NKikimr::TEvKeyValue::TEvNotify ! Leader for TabletID 72057594037927937 is [45:57:2097] sender: [45:98:2057] recipient: [45:36:2083] Leader for TabletID 72057594037927937 is [45:57:2097] sender: [45:101:2057] recipient: [45:14:2061] Leader for TabletID 72057594037927937 is [45:57:2097] sender: [45:102:2057] recipient: [45:100:2125] Leader for TabletID 72057594037927937 is [45:103:2126] sender: [45:104:2057] recipient: [45:100:2125] !Reboot 72057594037927937 (actor [45:57:2097]) rebooted! !Reboot 72057594037927937 (actor [45:57:2097]) tablet resolver refreshed! new actor is[45:103:2126] Leader for TabletID 72057594037927937 is [45:103:2126] sender: [45:121:2057] recipient: [45:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [46:55:2057] recipient: [46:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [46:55:2057] recipient: [46:51:2095] Leader for TabletID 72057594037927937 is [46:57:2097] sender: [46:58:2057] recipient: [46:51:2095] Leader for TabletID 72057594037927937 is [46:57:2097] sender: [46:75:2057] recipient: [46:14:2061] !Reboot 72057594037927937 (actor [46:57:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [46:57:2097] sender: [46:100:2057] recipient: [46:36:2083] Leader for TabletID 72057594037927937 is [46:57:2097] sender: [46:103:2057] recipient: [46:14:2061] Leader for TabletID 72057594037927937 is [46:57:2097] sender: [46:104:2057] recipient: [46:102:2127] Leader for TabletID 72057594037927937 is [46:105:2128] sender: [46:106:2057] recipient: [46:102:2127] !Reboot 72057594037927937 (actor [46:57:2097]) rebooted! !Reboot 72057594037927937 (actor [46:57:2097]) tablet resolver refreshed! new actor is[46:105:2128] Leader for TabletID 72057594037927937 is [46:105:2128] sender: [46:191:2057] recipient: [46:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [47:55:2057] recipient: [47:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [47:55:2057] recipient: [47:51:2095] Leader for TabletID 72057594037927937 is [47:57:2097] sender: [47:58:2057] recipient: [47:51:2095] Leader for TabletID 72057594037927937 is [47:57:2097] sender: [47:75:2057] recipient: [47:14:2061] !Reboot 72057594037927937 (actor [47:57:2097]) on event NKikimr::TEvKeyValue::TEvReadRange ! Leader for TabletID 72057594037927937 is [47:57:2097] sender: [47:100:2057] recipient: [47:36:2083] Leader for TabletID 72057594037927937 is [47:57:2097] sender: [47:103:2057] recipient: [47:14:2061] Leader for TabletID 72057594037927937 is [47:57:2097] sender: [47:104:2057] recipient: [47:102:2127] Leader for TabletID 72057594037927937 is [47:105:2128] sender: [47:106:2057] recipient: [47:102:2127] !Reboot 72057594037927937 (actor [47:57:2097]) rebooted! !Reboot 72057594037927937 (actor [47:57:2097]) tablet resolver refreshed! new actor is[47:105:2128] Leader for TabletID 72057594037927937 is [47:105:2128] sender: [47:191:2057] recipient: [47:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [48:55:2057] recipient: [48:52:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [48:55:2057] recipient: [48:52:2095] Leader for TabletID 72057594037927937 is [48:57:2097] sender: [48:58:2057] recipient: [48:52:2095] Leader for TabletID 72057594037927937 is [48:57:2097] sender: [48:75:2057] recipient: [48:14:2061] !Reboot 72057594037927937 (actor [48:57:2097]) on event NKikimr::TEvKeyValue::TEvNotify ! Leader for TabletID 72057594037927937 is [48:57:2097] sender: [48:101:2057] recipient: [48:36:2083] Leader for TabletID 72057594037927937 is [48:57:2097] sender: [48:104:2057] recipient: [48:14:2061] Leader for TabletID 72057594037927937 is [48:57:2097] sender: [48:105:2057] recipient: [48:103:2127] Leader for TabletID 72057594037927937 is [48:106:2128] sender: [48:107:2057] recipient: [48:103:2127] !Reboot 72057594037927937 (actor [48:57:2097]) rebooted! !Reboot 72057594037927937 (actor [48:57:2097]) tablet resolver refreshed! new actor is[48:106:2128] Leader for TabletID 72057594037927937 is [48:106:2128] sender: [48:124:2057] recipient: [48:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [49:55:2057] recipient: [49:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [49:55:2057] recipient: [49:51:2095] Leader for TabletID 72057594037927937 is [49:57:2097] sender: [49:58:2057] recipient: [49:51:2095] Leader for TabletID 72057594037927937 is [49:57:2097] sender: [49:75:2057] recipient: [49:14:2061] !Reboot 72057594037927937 (actor [49:57:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [49:57:2097] sender: [49:103:2057] recipient: [49:36:2083] Leader for TabletID 72057594037927937 is [49:57:2097] sender: [49:106:2057] recipient: [49:14:2061] Leader for TabletID 72057594037927937 is [49:57:2097] sender: [49:107:2057] recipient: [49:105:2129] Leader for TabletID 72057594037927937 is [49:108:2130] sender: [49:109:2057] recipient: [49:105:2129] !Reboot 72057594037927937 (actor [49:57:2097]) rebooted! !Reboot 72057594037927937 (actor [49:57:2097]) tablet resolver refreshed! new actor is[49:108:2130] Leader for TabletID 72057594037927937 is [49:108:2130] sender: [49:194:2057] recipient: [49:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [50:55:2057] recipient: [50:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [50:55:2057] recipient: [50:51:2095] Leader for TabletID 72057594037927937 is [50:57:2097] sender: [50:58:2057] recipient: [50:51:2095] Leader for TabletID 72057594037927937 is [50:57:2097] sender: [50:75:2057] recipient: [50:14:2061] !Reboot 72057594037927937 (actor [50:57:2097]) on event NKikimr::TEvKeyValue::TEvReadRange ! Leader for TabletID 72057594037927937 is [50:57:2097] sender: [50:103:2057] recipient: [50:36:2083] Leader for TabletID 72057594037927937 is [50:57:2097] sender: [50:106:2057] recipient: [50:14:2061] Leader for TabletID 72057594037927937 is [50:57:2097] sender: [50:107:2057] recipient: [50:105:2129] Leader for TabletID 72057594037927937 is [50:108:2130] sender: [50:109:2057] recipient: [50:105:2129] !Reboot 72057594037927937 (actor [50:57:2097]) rebooted! !Reboot 72057594037927937 (actor [50:57:2097]) tablet resolver refreshed! new actor is[50:108:2130] Leader for TabletID 72057594037927937 is [50:108:2130] sender: [50:194:2057] recipient: [50:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [51:55:2057] recipient: [51:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [51:55:2057] recipient: [51:51:2095] Leader for TabletID 72057594037927937 is [51:57:2097] sender: [51:58:2057] recipient: [51:51:2095] Leader for TabletID 72057594037927937 is [51:57:2097] sender: [51:75:2057] recipient: [51:14:2061] !Reboot 72057594037927937 (actor [51:57:2097]) on event NKikimr::TEvKeyValue::TEvNotify ! Leader for TabletID 72057594037927937 is [51:57:2097] sender: [51:104:2057] recipient: [51:36:2083] Leader for TabletID 72057594037927937 is [51:57:2097] sender: [51:107:2057] recipient: [51:14:2061] Leader for TabletID 72057594037927937 is [51:57:2097] sender: [51:108:2057] recipient: [51:106:2129] Leader for TabletID 72057594037927937 is [51:109:2130] sender: [51:110:2057] recipient: [51:106:2129] !Reboot 72057594037927937 (actor [51:57:2097]) rebooted! !Reboot 72057594037927937 (actor [51:57:2097]) tablet resolver refreshed! new actor is[51:109:2130] Leader for TabletID 72057594037927937 is [51:109:2130] sender: [51:127:2057] recipient: [51:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [52:55:2057] recipient: [52:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [52:55:2057] recipient: [52:51:2095] Leader for TabletID 72057594037927937 is [52:57:2097] sender: [52:58:2057] recipient: [52:51:2095] Leader for TabletID 72057594037927937 is [52:57:2097] sender: [52:75:2057] recipient: [52:14:2061] !Reboot 72057594037927937 (actor [52:57:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [52:57:2097] sender: [52:106:2057] recipient: [52:36:2083] Leader for TabletID 72057594037927937 is [52:57:2097] sender: [52:109:2057] recipient: [52:14:2061] Leader for TabletID 72057594037927937 is [52:57:2097] sender: [52:110:2057] recipient: [52:108:2131] Leader for TabletID 72057594037927937 is [52:111:2132] sender: [52:112:2057] recipient: [52:108:2131] !Reboot 72057594037927937 (actor [52:57:2097]) rebooted! !Reboot 72057594037927937 (actor [52:57:2097]) tablet resolver refreshed! new actor is[52:111:2132] Leader for TabletID 72057594037927937 is [52:111:2132] sender: [52:197:2057] recipient: [52:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [53:55:2057] recipient: [53:50:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [53:55:2057] recipient: [53:50:2095] Leader for TabletID 72057594037927937 is [53:57:2097] sender: [53:58:2057] recipient: [53:50:2095] Leader for TabletID 72057594037927937 is [53:57:2097] sender: [53:75:2057] recipient: [53:14:2061] !Reboot 72057594037927937 (actor [53:57:2097]) on event NKikimr::TEvKeyValue::TEvReadRange ! Leader for TabletID 72057594037927937 is [53:57:2097] sender: [53:106:2057] recipient: [53:36:2083] Leader for TabletID 72057594037927937 is [53:57:2097] sender: [53:108:2057] recipient: [53:14:2061] Leader for TabletID 72057594037927937 is [53:57:2097] sender: [53:110:2057] recipient: [53:109:2131] Leader for TabletID 72057594037927937 is [53:111:2132] sender: [53:112:2057] recipient: [53:109:2131] !Reboot 72057594037927937 (actor [53:57:2097]) rebooted! !Reboot 72057594037927937 (actor [53:57:2097]) tablet resolver refreshed! new actor is[53:111:2132] Leader for TabletID 72057594037927937 is [53:111:2132] sender: [53:197:2057] recipient: [53:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [54:55:2057] recipient: [54:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [54:55:2057] recipient: [54:51:2095] Leader for TabletID 72057594037927937 is [54:57:2097] sender: [54:58:2057] recipient: [54:51:2095] Leader for TabletID 72057594037927937 is [54:57:2097] sender: [54:75:2057] recipient: [54:14:2061] !Reboot 72057594037927937 (actor [54:57:2097]) on event NKikimr::TEvKeyValue::TEvNotify ! Leader for TabletID 72057594037927937 is [54:57:2097] sender: [54:107:2057] recipient: [54:36:2083] Leader for TabletID 72057594037927937 is [54:57:2097] sender: [54:110:2057] recipient: [54:14:2061] Leader for TabletID 72057594037927937 is [54:57:2097] sender: [54:111:2057] recipient: [54:109:2131] Leader for TabletID 72057594037927937 is [54:112:2132] sender: [54:113:2057] recipient: [54:109:2131] !Reboot 72057594037927937 (actor [54:57:2097]) rebooted! !Reboot 72057594037927937 (actor [54:57:2097]) tablet resolver refreshed! new actor is[54:112:2132] Leader for TabletID 72057594037927937 is [0:0:0] sender: [55:55:2057] recipient: [55:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [55:55:2057] recipient: [55:51:2095] Leader for TabletID 72057594037927937 is [55:57:2097] sender: [55:58:2057] recipient: [55:51:2095] Leader for TabletID 72057594037927937 is [55:57:2097] sender: [55:75:2057] recipient: [55:14:2061] >> BackupRestoreS3::TestAllIndexTypes-EIndexTypeGlobal [GOOD] >> BackupRestoreS3::TestAllIndexTypes-EIndexTypeGlobalAsync >> KqpOlapScheme::AlterCompressionType [GOOD] >> KqpOlapScheme::CreateTableStoreWithFamily >> EncryptedBackupParamsValidationTest::IncorrectKeyLengthExport [GOOD] >> KqpScheme::DisableResourcePoolsOnServerless ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/mind/bscontroller/ut_bscontroller/unittest >> BsControllerConfig::ExtendBoxAndStoragePool [GOOD] Test command err: Leader for TabletID 72057594037932033 is [0:0:0] sender: [1:3015:2106] recipient: [1:2886:2116] IGNORE Leader for TabletID 72057594037932033 is [0:0:0] sender: [1:3015:2106] recipient: [1:2886:2116] Leader for TabletID 72057594037932033 is [1:3062:2118] sender: [1:3065:2106] recipient: [1:2886:2116] 2025-05-29T15:28:28.707779Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2057} StateInit event Type# 268828672 Event# NKikimr::TEvTablet::TEvBoot 2025-05-29T15:28:28.708852Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2057} StateInit event Type# 268828673 Event# NKikimr::TEvTablet::TEvRestored 2025-05-29T15:28:28.708942Z node 1 :BS_CONTROLLER DEBUG: {BSC22@console_interaction.cpp:14} Console interaction started 2025-05-29T15:28:28.709121Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2057} StateInit event Type# 268828684 Event# NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-05-29T15:28:28.709638Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2057} StateInit event Type# 268639244 Event# NKikimr::TEvNodeWardenStorageConfig 2025-05-29T15:28:28.709736Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2057} StateInit event Type# 131082 Event# NActors::TEvInterconnect::TEvNodesInfo 2025-05-29T15:28:28.709743Z node 1 :BS_CONTROLLER DEBUG: {BSC01@bsc.cpp:521} Handle TEvInterconnect::TEvNodesInfo 2025-05-29T15:28:28.709819Z node 1 :BS_CONTROLLER DEBUG: {BSCTXIS01@init_scheme.cpp:17} TTxInitScheme Execute 2025-05-29T15:28:28.710958Z node 1 :BS_CONTROLLER DEBUG: {BSCTXIS03@init_scheme.cpp:44} TTxInitScheme Complete 2025-05-29T15:28:28.710993Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM01@migrate.cpp:190} Execute tx 2025-05-29T15:28:28.711018Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM02@migrate.cpp:251} Complete tx IncompatibleData# false 2025-05-29T15:28:28.711041Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxTrimUnusedSlots 2025-05-29T15:28:28.711056Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxTrimUnusedSlots 2025-05-29T15:28:28.711067Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateSchemaVersion Leader for TabletID 72057594037932033 is [1:3062:2118] sender: [1:3088:2106] recipient: [1:60:2107] 2025-05-29T15:28:28.721594Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateSchemaVersion 2025-05-29T15:28:28.721641Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxGenerateInstanceId 2025-05-29T15:28:28.731946Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxGenerateInstanceId 2025-05-29T15:28:28.731990Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateStaticPDiskInfo 2025-05-29T15:28:28.732003Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateStaticPDiskInfo 2025-05-29T15:28:28.732016Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxFillInNonNullConfigForPDisk 2025-05-29T15:28:28.732060Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxFillInNonNullConfigForPDisk 2025-05-29T15:28:28.732071Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxDropDriveStatus 2025-05-29T15:28:28.732078Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxDropDriveStatus 2025-05-29T15:28:28.732092Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateCompatibilityInfo 2025-05-29T15:28:28.742400Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateCompatibilityInfo 2025-05-29T15:28:28.742443Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateEnableConfigV2 2025-05-29T15:28:28.752749Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateEnableConfigV2 2025-05-29T15:28:28.752806Z node 1 :BS_CONTROLLER DEBUG: {BSCTXLE01@load_everything.cpp:19} TTxLoadEverything Execute 2025-05-29T15:28:28.753048Z node 1 :BS_CONTROLLER DEBUG: {BSCTXLE03@load_everything.cpp:557} TTxLoadEverything Complete 2025-05-29T15:28:28.753060Z node 1 :BS_CONTROLLER DEBUG: {BSC09@impl.h:2188} LoadFinished 2025-05-29T15:28:28.753104Z node 1 :BS_CONTROLLER DEBUG: {BSC18@console_interaction.cpp:31} Console connection service started 2025-05-29T15:28:28.753112Z node 1 :BS_CONTROLLER DEBUG: {BSCTXLE04@load_everything.cpp:562} TTxLoadEverything InitQueue processed 2025-05-29T15:28:28.756020Z node 1 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:398} Execute TEvControllerConfigRequest Request# {Command { DefineHostConfig { HostConfigId: 1 Drive { Path: "/dev/disk1" } Drive { Path: "/dev/disk2" SharedWithOs: true } Drive { Path: "/dev/disk3" Type: SSD } } } Command { DefineBox { BoxId: 1 Name: "first box" Host { Key { Fqdn: "::1" IcPort: 12001 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12002 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12003 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12004 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12005 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12006 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12007 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12008 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12009 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12010 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12011 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12012 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12013 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12014 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12015 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12016 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12017 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12018 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12019 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12020 } HostConfigId: 1 } } } Command { DefineStoragePool { BoxId: 1 StoragePoolId: 1 Name: "first storage pool" ErasureSpecies: "block-4-2" VDiskKind: "Default" NumGroups: 60 PDiskFilter { Property { Type: ROT } } } } Command { QueryBaseConfig { } } } 2025-05-29T15:28:28.756206Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:340} Create new pdisk PDiskId# 1:1000 Path# /dev/disk1 2025-05-29T15:28:28.756214Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:340} Create new pdisk PDiskId# 1:1001 Path# /dev/disk2 2025-05-29T15:28:28.756220Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:340} Create new pdisk PDiskId# 1:1002 Path# /dev/disk3 2025-05-29T15:28:28.756225Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:340} Create new pdisk PDiskId# 2:1000 Path# /dev/disk1 2025-05-29T15:28:28.756233Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:340} Create new pdisk PDiskId# 2:1001 Path# /dev/disk2 2025-05-29T15:28:28.756238Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:340} Create new pdisk PDiskId# 2:1002 Path# /dev/disk3 2025-05-29T15:28:28.756243Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:340} Create new pdisk PDiskId# 3:1000 Path# /dev/disk1 2025-05-29T15:28:28.756250Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:340} Create new pdisk PDiskId# 3:1001 Path# /dev/disk2 2025-05-29T15:28:28.756255Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:340} Create new pdisk PDiskId# 3:1002 Path# /dev/disk3 2025-05-29T15:28:28.756259Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:340} Create new pdisk PDiskId# 4:1000 Path# /dev/disk1 2025-05-29T15:28:28.756265Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:340} Create new pdisk PDiskId# 4:1001 Path# /dev/disk2 2025-05-29T15:28:28.756270Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:340} Create new pdisk PDiskId# 4:1002 Path# /dev/disk3 2025-05-29T15:28:28.756275Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:340} Create new pdisk PDiskId# 5:1000 Path# /dev/disk1 2025-05-29T15:28:28.756279Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:340} Create new pdisk PDiskId# 5:1001 Path# /dev/disk2 2025-05-29T15:28:28.756285Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:340} Create new pdisk PDiskId# 5:1002 Path# /dev/disk3 2025-05-29T15:28:28.756290Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:340} Create new pdisk PDiskId# 6:1000 Path# /dev/disk1 2025-05-29T15:28:28.756298Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:340} Create new pdisk PDiskId# 6:1001 Path# /dev/disk2 2025-05-29T15:28:28.756305Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:340} Create new pdisk PDiskId# 6:1002 Path# /dev/disk3 2025-05-29T15:28:28.756310Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:340} Create new pdisk PDiskId# 7:1000 Path# /dev/disk1 2025-05-29T15:28:28.756315Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:340} Create new pdisk PDiskId# 7:1001 Path# /dev/disk2 2025-05-29T15:28:28.756320Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:340} Create new pdisk PDiskId# 7:1002 Path# /dev/disk3 2025-05-29T15:28:28.756325Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:340} Create new pdisk PDiskId# 8:1000 Path# /dev/disk1 2025-05-29T15:28:28.756329Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:340} Create new pdisk PDiskId# 8:1001 Path# /dev/disk2 2025-05-29T15:28:28.756334Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:340} Create new pdisk PDiskId# 8:1002 Path# /dev/disk3 2025-05-29T15:28:28.756339Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:340} Create new pdisk PDiskId# 9:1000 Path# /dev/disk1 2025-05-29T15:28:28.756344Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:340} Create new pdisk PDiskId# 9:1001 Path# /dev/disk2 2025-05-29T15:28:28.756349Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:340} Create new pdisk PDiskId# 9:1002 Path# /dev/disk3 2025-05-29T15:28:28.756354Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:340} Create new pdisk PDiskId# 10:1000 Path# /dev/disk1 2025-05-29T15:28:28.756359Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:340} Create new pdisk PDiskId# 10:1001 Path# /dev/disk2 2025-05-29T15:28:28.756363Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:340} Create new pdisk PDiskId# 10:1002 Path# /dev/disk3 2025-05-29T15:28:28.756367Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:340} Create new pdisk PDiskId# 11:1000 Path# /dev/disk1 2025-05-29T15:28:28.756372Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:340} Create new pdisk PDiskId# 11:1001 Path# /dev/disk2 2025-05-29T15:28:28.756378Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:340} Cr ... 9T15:28:57.344423Z node 161 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:340} Create new pdisk PDiskId# 203:1001 Path# /dev/disk2 2025-05-29T15:28:57.344428Z node 161 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:340} Create new pdisk PDiskId# 203:1002 Path# /dev/disk3 2025-05-29T15:28:57.344431Z node 161 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:340} Create new pdisk PDiskId# 204:1000 Path# /dev/disk1 2025-05-29T15:28:57.344434Z node 161 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:340} Create new pdisk PDiskId# 204:1001 Path# /dev/disk2 2025-05-29T15:28:57.344438Z node 161 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:340} Create new pdisk PDiskId# 204:1002 Path# /dev/disk3 2025-05-29T15:28:57.344441Z node 161 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:340} Create new pdisk PDiskId# 205:1000 Path# /dev/disk1 2025-05-29T15:28:57.344445Z node 161 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:340} Create new pdisk PDiskId# 205:1001 Path# /dev/disk2 2025-05-29T15:28:57.344448Z node 161 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:340} Create new pdisk PDiskId# 205:1002 Path# /dev/disk3 2025-05-29T15:28:57.344451Z node 161 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:340} Create new pdisk PDiskId# 206:1000 Path# /dev/disk1 2025-05-29T15:28:57.344454Z node 161 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:340} Create new pdisk PDiskId# 206:1001 Path# /dev/disk2 2025-05-29T15:28:57.344458Z node 161 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:340} Create new pdisk PDiskId# 206:1002 Path# /dev/disk3 2025-05-29T15:28:57.344461Z node 161 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:340} Create new pdisk PDiskId# 207:1000 Path# /dev/disk1 2025-05-29T15:28:57.344465Z node 161 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:340} Create new pdisk PDiskId# 207:1001 Path# /dev/disk2 2025-05-29T15:28:57.344468Z node 161 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:340} Create new pdisk PDiskId# 207:1002 Path# /dev/disk3 2025-05-29T15:28:57.344472Z node 161 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:340} Create new pdisk PDiskId# 208:1000 Path# /dev/disk1 2025-05-29T15:28:57.344475Z node 161 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:340} Create new pdisk PDiskId# 208:1001 Path# /dev/disk2 2025-05-29T15:28:57.344478Z node 161 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:340} Create new pdisk PDiskId# 208:1002 Path# /dev/disk3 2025-05-29T15:28:57.344481Z node 161 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:340} Create new pdisk PDiskId# 209:1000 Path# /dev/disk1 2025-05-29T15:28:57.344484Z node 161 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:340} Create new pdisk PDiskId# 209:1001 Path# /dev/disk2 2025-05-29T15:28:57.344487Z node 161 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:340} Create new pdisk PDiskId# 209:1002 Path# /dev/disk3 2025-05-29T15:28:57.344491Z node 161 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:340} Create new pdisk PDiskId# 210:1000 Path# /dev/disk1 2025-05-29T15:28:57.344494Z node 161 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:340} Create new pdisk PDiskId# 210:1001 Path# /dev/disk2 2025-05-29T15:28:57.344497Z node 161 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:340} Create new pdisk PDiskId# 210:1002 Path# /dev/disk3 2025-05-29T15:28:57.416917Z node 161 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:398} Execute TEvControllerConfigRequest Request# {Command { DefineHostConfig { HostConfigId: 4 Drive { Path: "/dev/disk1" } Drive { Path: "/dev/disk2" SharedWithOs: true } Drive { Path: "/dev/disk3" Type: SSD } } } Command { DefineBox { BoxId: 1 Name: "first box" Host { Key { Fqdn: "::1" IcPort: 12001 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12002 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12003 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12004 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12005 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12006 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12007 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12008 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12009 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12010 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12011 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12012 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12013 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12014 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12015 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12016 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12017 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12018 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12019 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12020 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12021 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12022 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12023 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12024 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12025 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12026 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12027 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12028 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12029 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12030 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12031 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12032 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12033 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12034 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12035 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12036 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12037 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12038 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12039 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12040 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12041 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12042 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12043 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12044 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12045 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12046 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12047 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12048 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12049 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12050 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12051 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12052 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12053 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12054 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12055 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12056 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12057 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12058 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12059 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12060 } HostConfigId: 4 } ItemConfigGeneration: 1 } } Command { DefineStoragePool { BoxId: 1 StoragePoolId: 1 Name: "first storage pool" ErasureSpecies: "block-4-2" VDiskKind: "Default" NumGroups: 180 PDiskFilter { Property { Type: ROT } } ItemConfigGeneration: 1 } } Command { QueryBaseConfig { } } } 2025-05-29T15:28:57.418941Z node 161 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:340} Create new pdisk PDiskId# 211:1000 Path# /dev/disk1 2025-05-29T15:28:57.418974Z node 161 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:340} Create new pdisk PDiskId# 211:1001 Path# /dev/disk2 2025-05-29T15:28:57.418980Z node 161 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:340} Create new pdisk PDiskId# 211:1002 Path# /dev/disk3 2025-05-29T15:28:57.418986Z node 161 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:340} Create new pdisk PDiskId# 212:1000 Path# /dev/disk1 2025-05-29T15:28:57.418991Z node 161 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:340} Create new pdisk PDiskId# 212:1001 Path# /dev/disk2 2025-05-29T15:28:57.418996Z node 161 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:340} Create new pdisk PDiskId# 212:1002 Path# /dev/disk3 2025-05-29T15:28:57.419001Z node 161 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:340} Create new pdisk PDiskId# 213:1000 Path# /dev/disk1 2025-05-29T15:28:57.419006Z node 161 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:340} Create new pdisk PDiskId# 213:1001 Path# /dev/disk2 2025-05-29T15:28:57.419014Z node 161 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:340} Create new pdisk PDiskId# 213:1002 Path# /dev/disk3 2025-05-29T15:28:57.419019Z node 161 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:340} Create new pdisk PDiskId# 214:1000 Path# /dev/disk1 2025-05-29T15:28:57.419023Z node 161 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:340} Create new pdisk PDiskId# 214:1001 Path# /dev/disk2 2025-05-29T15:28:57.419028Z node 161 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:340} Create new pdisk PDiskId# 214:1002 Path# /dev/disk3 2025-05-29T15:28:57.419033Z node 161 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:340} Create new pdisk PDiskId# 215:1000 Path# /dev/disk1 2025-05-29T15:28:57.419038Z node 161 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:340} Create new pdisk PDiskId# 215:1001 Path# /dev/disk2 2025-05-29T15:28:57.419042Z node 161 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:340} Create new pdisk PDiskId# 215:1002 Path# /dev/disk3 2025-05-29T15:28:57.419047Z node 161 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:340} Create new pdisk PDiskId# 216:1000 Path# /dev/disk1 2025-05-29T15:28:57.419051Z node 161 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:340} Create new pdisk PDiskId# 216:1001 Path# /dev/disk2 2025-05-29T15:28:57.419056Z node 161 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:340} Create new pdisk PDiskId# 216:1002 Path# /dev/disk3 2025-05-29T15:28:57.419060Z node 161 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:340} Create new pdisk PDiskId# 217:1000 Path# /dev/disk1 2025-05-29T15:28:57.419065Z node 161 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:340} Create new pdisk PDiskId# 217:1001 Path# /dev/disk2 2025-05-29T15:28:57.419069Z node 161 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:340} Create new pdisk PDiskId# 217:1002 Path# /dev/disk3 2025-05-29T15:28:57.419074Z node 161 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:340} Create new pdisk PDiskId# 218:1000 Path# /dev/disk1 2025-05-29T15:28:57.419078Z node 161 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:340} Create new pdisk PDiskId# 218:1001 Path# /dev/disk2 2025-05-29T15:28:57.419083Z node 161 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:340} Create new pdisk PDiskId# 218:1002 Path# /dev/disk3 2025-05-29T15:28:57.419087Z node 161 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:340} Create new pdisk PDiskId# 219:1000 Path# /dev/disk1 2025-05-29T15:28:57.419092Z node 161 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:340} Create new pdisk PDiskId# 219:1001 Path# /dev/disk2 2025-05-29T15:28:57.419097Z node 161 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:340} Create new pdisk PDiskId# 219:1002 Path# /dev/disk3 2025-05-29T15:28:57.419101Z node 161 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:340} Create new pdisk PDiskId# 220:1000 Path# /dev/disk1 2025-05-29T15:28:57.419108Z node 161 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:340} Create new pdisk PDiskId# 220:1001 Path# /dev/disk2 2025-05-29T15:28:57.419113Z node 161 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:340} Create new pdisk PDiskId# 220:1002 Path# /dev/disk3 >> KqpOlapScheme::WithoutDefaultColumnFamily >> BackupRestore::RestoreKesusResources [GOOD] >> BackupRestore::RestoreReplicationWithoutSecret >> EncryptedBackupParamsValidationTest::EmptyImportItem >> TSchemeshardBackgroundCleaningTest::CreateTableInTemp [GOOD] >> KqpOlapScheme::CreateTableStoreWithFamily [GOOD] >> KqpOlapScheme::CreateTableNonDefaultFamilyWithoutCompression >> KqpAcl::FailNavigate >> BackupRestore::TestAllPrimitiveTypes-TIMESTAMP64 [FAIL] >> BackupRestore::TestAllPrimitiveTypes-STRING >> KqpOlapTypes::Timestamp >> KqpOlapScheme::WithoutDefaultColumnFamily [GOOD] >> KqpOlapScheme::UnknownColumnFamily >> KqpOlapScheme::AddColumnWithStore [GOOD] >> KqpOlapScheme::AddColumnWithColumnFamily |71.9%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/external_sources/s3/ut/ydb-core-external_sources-s3-ut |71.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/external_sources/s3/ut/ydb-core-external_sources-s3-ut |71.9%| [LD] {RESULT} $(B)/ydb/core/external_sources/s3/ut/ydb-core-external_sources-s3-ut >> KqpScheme::BuildingUniqIndexDeniesTableModificationsPublicApi ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/query/unittest >> KqpQuery::QueryFromSqs Test command err: Trying to start YDB, gRPC: 5955, MsgBus: 22121 2025-05-29T15:28:49.722088Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509890030935290921:2062];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:28:49.722133Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/001406/r3tmp/tmpb1VKDg/pdisk_1.dat 2025-05-29T15:28:49.812110Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [1:7509890030935290901:2079] 1748532529721932 != 1748532529721935 2025-05-29T15:28:49.814539Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 5955, node 1 2025-05-29T15:28:49.834985Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:28:49.835000Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-05-29T15:28:49.835002Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-05-29T15:28:49.835052Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:22121 2025-05-29T15:28:49.879402Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:28:49.879432Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:28:49.879833Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:22121 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-29T15:28:49.920628Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:28:49.923885Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-05-29T15:28:49.931596Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:28:49.964583Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:28:50.003960Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:28:50.031888Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:28:50.213421Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890035230259832:2403], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:28:50.213452Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:28:50.273850Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-05-29T15:28:50.283553Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-05-29T15:28:50.291094Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-05-29T15:28:50.348805Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-05-29T15:28:50.360697Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-05-29T15:28:50.417209Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-05-29T15:28:50.429419Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-05-29T15:28:50.445551Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890035230260488:2466], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:28:50.445555Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890035230260493:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:28:50.445574Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:28:50.446491Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715669:3, at schemeshard: 72057594046644480 2025-05-29T15:28:50.452410Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7509890035230260495:2470], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715669 completed, doublechecking } 2025-05-29T15:28:50.546360Z node 1 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [1:7509890035230260549:3401] txid# 281474976715670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-29T15:28:50.651468Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [1:7509890035230260565:2474], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:28:50.653050Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=1&id=MWUxZTE1ZjQtOGE3OWUzNjctNzM2ZDRhNjktMjIzMTgwNzA=, ActorId: [1:7509890035230259829:2401], ActorState: ExecuteState, TraceId: 01jweaj38devbtt106n09a8dke, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: VERIFY failed (2025-05-29T15:28:50.653830Z): assertion failed in non-unittest thread with message: assertion failed at ydb/core/kqp/ut/common/kqp_ut_common.h:375, void NKikimr::NKqp::AssertSuccessResult(const NYdb::TStatus &): (result.IsSuccess())
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 library/cpp/testing/unittest/registar.cpp:36 RaiseError(): requirement UnittestThread failed 0. /-S/util/system/yassert.cpp:83: InternalPanicImpl @ 0x13DD8F65 1. /-S/util/system/yassert.cpp:55: Panic @ 0x13DCFF66 2. /tmp//-S/library/cpp/testing/unittest/registar.cpp:36: RaiseError @ 0x13F70BC6 3. /-S/ydb/core/kqp/ut/common/kqp_ut_common.h:375: AssertSuccessResult @ 0x13A3FAE2 4. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:365: CreateSampleTables @ 0x264F71F2 5. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:581: operator() @ 0x2651596C 6. /-S/library/cpp/threading/future/core/future-inl.h:493: SetValue @ 0x2651596C 7. /-S/library/cpp/threading/future/async.h:24: operator() @ 0x2651596C 8. /-S/util/thread/pool.h:71: Process @ 0x2651596C 9. /-S/util/thread/pool.cpp:418: DoExecute @ 0x13DE06B9 10. /-S/util/thread/factory.h:15: Execute @ 0x13DDF0A9 11. /-S/util/thread/factory.cpp:36: ThreadProc @ 0x13DDF0A9 12. /-S/util/system/thread.cpp:244: ThreadProxy @ 0x13DDA51C 13. ??:0: ?? @ 0x7F8953BE5AC2 14. ??:0: ?? @ 0x7F8953C7784F Trying to start YDB, gRPC: 25378, MsgBus: 19485 2025-05-29T15:28:54.953854Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509890050479412715:2276];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:28:54.953873Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/001406/r3tmp/tmpb1JheR/pdisk_1.dat 2025-05-29T15:28:55.034348Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 25378, node 1 2025-05-29T15:28:55.052917 ... alPanicImpl @ 0x13DD8F65 1. /-S/util/system/yassert.cpp:55: Panic @ 0x13DCFF66 2. /tmp//-S/library/cpp/testing/unittest/registar.cpp:36: RaiseError @ 0x13F70BC6 3. /-S/ydb/core/kqp/ut/common/kqp_ut_common.h:375: AssertSuccessResult @ 0x13A3FAE2 4. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:365: CreateSampleTables @ 0x264F71F2 5. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:581: operator() @ 0x2651596C 6. /-S/library/cpp/threading/future/core/future-inl.h:493: SetValue @ 0x2651596C 7. /-S/library/cpp/threading/future/async.h:24: operator() @ 0x2651596C 8. /-S/util/thread/pool.h:71: Process @ 0x2651596C 9. /-S/util/thread/pool.cpp:418: DoExecute @ 0x13DE06B9 10. /-S/util/thread/factory.h:15: Execute @ 0x13DDF0A9 11. /-S/util/thread/factory.cpp:36: ThreadProc @ 0x13DDF0A9 12. /-S/util/system/thread.cpp:244: ThreadProxy @ 0x13DDA51C 13. ??:0: ?? @ 0x7FCB1A343AC2 14. ??:0: ?? @ 0x7FCB1A3D584F Trying to start YDB, gRPC: 26450, MsgBus: 9621 2025-05-29T15:29:04.181369Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509890093310080946:2069];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:29:04.181408Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/001406/r3tmp/tmpVJJKSE/pdisk_1.dat 2025-05-29T15:29:04.258628Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 26450, node 1 2025-05-29T15:29:04.278729Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:29:04.278770Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-05-29T15:29:04.278772Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-05-29T15:29:04.278824Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-05-29T15:29:04.283258Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:29:04.283291Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:29:04.284418Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:9621 TClient is connected to server localhost:9621 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-29T15:29:04.371225Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:29:04.375374Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-05-29T15:29:04.384484Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:29:04.458529Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:29:04.482519Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:29:04.494647Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:29:04.611082Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890093310082550:2403], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:29:04.611113Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:29:04.658349Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-05-29T15:29:04.665321Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-05-29T15:29:04.676174Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-05-29T15:29:04.691048Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-05-29T15:29:04.751924Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-05-29T15:29:04.761936Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-05-29T15:29:04.775122Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-05-29T15:29:04.795853Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890093310083204:2466], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:29:04.795912Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:29:04.795989Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890093310083209:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:29:04.797054Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715669:3, at schemeshard: 72057594046644480 2025-05-29T15:29:04.807337Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7509890093310083211:2470], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715669 completed, doublechecking } 2025-05-29T15:29:04.871126Z node 1 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [1:7509890093310083262:3397] txid# 281474976715670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-29T15:29:04.969660Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [1:7509890093310083278:2474], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:29:04.970327Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=1&id=MjViM2RhZGItZTk2NDU3OS03ZTFiZGUzNC0yYjI1MmVkOA==, ActorId: [1:7509890093310082532:2401], ActorState: ExecuteState, TraceId: 01jweajh8v9458ngw8341ac72g, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: VERIFY failed (2025-05-29T15:29:04.971461Z): assertion failed in non-unittest thread with message: assertion failed at ydb/core/kqp/ut/common/kqp_ut_common.h:375, void NKikimr::NKqp::AssertSuccessResult(const NYdb::TStatus &): (result.IsSuccess())
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 library/cpp/testing/unittest/registar.cpp:36 RaiseError(): requirement UnittestThread failed 0. /-S/util/system/yassert.cpp:83: InternalPanicImpl @ 0x13DD8F65 1. /-S/util/system/yassert.cpp:55: Panic @ 0x13DCFF66 2. /tmp//-S/library/cpp/testing/unittest/registar.cpp:36: RaiseError @ 0x13F70BC6 3. /-S/ydb/core/kqp/ut/common/kqp_ut_common.h:375: AssertSuccessResult @ 0x13A3FAE2 4. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:365: CreateSampleTables @ 0x264F71F2 5. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:581: operator() @ 0x2651596C 6. /-S/library/cpp/threading/future/core/future-inl.h:493: SetValue @ 0x2651596C 7. /-S/library/cpp/threading/future/async.h:24: operator() @ 0x2651596C 8. /-S/util/thread/pool.h:71: Process @ 0x2651596C 9. /-S/util/thread/pool.cpp:418: DoExecute @ 0x13DE06B9 10. /-S/util/thread/factory.h:15: Execute @ 0x13DDF0A9 11. /-S/util/thread/factory.cpp:36: ThreadProc @ 0x13DDF0A9 12. /-S/util/system/thread.cpp:244: ThreadProxy @ 0x13DDA51C 13. ??:0: ?? @ 0x7F155CA73AC2 14. ??:0: ?? @ 0x7F155CB0584F >> KqpOlapScheme::CreateTableNonDefaultFamilyWithoutCompression [GOOD] >> KqpScheme::CreateAlterUserWithHash >> EncryptedBackupParamsValidationTest::EmptyImportItem [GOOD] >> KqpOlapScheme::UnknownColumnFamily [GOOD] >> KqpOlapScheme::TwoSimilarColumnFamilies >> BackupRestoreS3::TestAllIndexTypes-EIndexTypeGlobalAsync [GOOD] >> BackupRestoreS3::TestAllIndexTypes-EIndexTypeGlobalUnique [GOOD] >> BackupRestoreS3::TestAllIndexTypes-EIndexTypeGlobalVectorKmeansTree ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/ydb/backup_ut/unittest >> BackupRestoreS3::TestAllPrimitiveTypes-UUID [FAIL] Test command err: 2025-05-29T15:28:56.824167Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509890060057141073:2084];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:28:56.824492Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/001cdb/r3tmp/tmpOF2OH6/pdisk_1.dat 2025-05-29T15:28:56.889559Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 14053, node 1 2025-05-29T15:28:56.907266Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:28:56.907289Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-05-29T15:28:56.907292Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-05-29T15:28:56.907345Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-05-29T15:28:56.923710Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:28:56.923743Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:28:56.925389Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:2153 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-29T15:28:56.963354Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:28:57.158357Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890064352109296:2337], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:28:57.158404Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:28:57.190989Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:315: actor# [1:7509890060057141279:2140] Handle TEvProposeTransaction 2025-05-29T15:28:57.191024Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:238: actor# [1:7509890060057141279:2140] TxId# 281474976715658 ProcessProposeTransaction 2025-05-29T15:28:57.191047Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:257: actor# [1:7509890060057141279:2140] Cookie# 0 userReqId# "" txid# 281474976715658 SEND to# [1:7509890064352109317:2607] 2025-05-29T15:28:57.202398Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1562: Actor# [1:7509890064352109317:2607] txid# 281474976715658 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/Root" OperationType: ESchemeOpCreateTable CreateTable { Name: "table" Columns { Name: "Key" Type: "Uint32" NotNull: false } Columns { Name: "Value" Type: "Utf8" NotNull: false } KeyColumnNames: "Key" PartitionConfig { } Temporary: false } } } UserToken: "" DatabaseName: "" 2025-05-29T15:28:57.202433Z node 1 :TX_PROXY DEBUG: schemereq.cpp:569: Actor# [1:7509890064352109317:2607] txid# 281474976715658 Bootstrap, UserSID: CheckAdministrator: 0 CheckDatabaseAdministrator: 0 2025-05-29T15:28:57.202605Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1627: Actor# [1:7509890064352109317:2607] txid# 281474976715658 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P6 2025-05-29T15:28:57.202623Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1617: Actor# [1:7509890064352109317:2607] txid# 281474976715658 TEvNavigateKeySet requested from SchemeCache 2025-05-29T15:28:57.202683Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1450: Actor# [1:7509890064352109317:2607] txid# 281474976715658 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-05-29T15:28:57.202731Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1497: Actor# [1:7509890064352109317:2607] HANDLE EvNavigateKeySetResult, txid# 281474976715658 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2025-05-29T15:28:57.202765Z node 1 :TX_PROXY DEBUG: schemereq.cpp:103: Actor# [1:7509890064352109317:2607] txid# 281474976715658 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976715658 TabletId# 72057594046644480} 2025-05-29T15:28:57.202829Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1352: Actor# [1:7509890064352109317:2607] txid# 281474976715658 HANDLE EvClientConnected 2025-05-29T15:28:57.203231Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-05-29T15:28:57.204209Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1374: Actor# [1:7509890064352109317:2607] txid# 281474976715658 Status StatusAccepted HANDLE {TEvModifySchemeTransactionResult Status# StatusAccepted txid# 281474976715658} 2025-05-29T15:28:57.204239Z node 1 :TX_PROXY DEBUG: schemereq.cpp:549: Actor# [1:7509890064352109317:2607] txid# 281474976715658 SEND to# [1:7509890064352109316:2340] Source {TEvProposeTransactionStatus txid# 281474976715658 Status# 53} 2025-05-29T15:28:57.271610Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890064352109461:2347], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:28:57.271633Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:28:57.278539Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:315: actor# [1:7509890060057141279:2140] Handle TEvProposeTransaction 2025-05-29T15:28:57.278563Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:238: actor# [1:7509890060057141279:2140] TxId# 281474976715659 ProcessProposeTransaction 2025-05-29T15:28:57.278586Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:257: actor# [1:7509890060057141279:2140] Cookie# 0 userReqId# "" txid# 281474976715659 SEND to# [1:7509890064352109473:2727] 2025-05-29T15:28:57.279429Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1562: Actor# [1:7509890064352109473:2727] txid# 281474976715659 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/Root" OperationType: ESchemeOpCreateCdcStream CreateCdcStream { TableName: "table" StreamDescription { Name: "a" Mode: ECdcStreamModeUpdate Format: ECdcStreamFormatJson VirtualTimestamps: false AwsRegion: "" } } } } UserToken: "" DatabaseName: "" PeerName: "" 2025-05-29T15:28:57.279445Z node 1 :TX_PROXY DEBUG: schemereq.cpp:569: Actor# [1:7509890064352109473:2727] txid# 281474976715659 Bootstrap, UserSID: CheckAdministrator: 0 CheckDatabaseAdministrator: 0 2025-05-29T15:28:57.279460Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1617: Actor# [1:7509890064352109473:2727] txid# 281474976715659 TEvNavigateKeySet requested from SchemeCache 2025-05-29T15:28:57.279559Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1450: Actor# [1:7509890064352109473:2727] txid# 281474976715659 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-05-29T15:28:57.279602Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1497: Actor# [1:7509890064352109473:2727] HANDLE EvNavigateKeySetResult, txid# 281474976715659 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2025-05-29T15:28:57.279612Z node 1 :TX_PROXY DEBUG: schemereq.cpp:103: Actor# [1:7509890064352109473:2727] txid# 281474976715659 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976715659 TabletId# 72057594046644480} 2025-05-29T15:28:57.279686Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1352: Actor# [1:7509890064352109473:2727] txid# 281474976715659 HANDLE EvClientConnected 2025-05-29T15:28:57.281040Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1374: Actor# [1:7509890064352109473:2727] txid# 281474976715659 Status StatusAccepted HANDLE {TEvModifySchemeTransactionResult Status# StatusAccepted txid# 281474976715659} 2025-05-29T15:28:57.281054Z node 1 :TX_PROXY DEBUG: schemereq.cpp:549: Actor# [1:7509890064352109473:2727] txid# 281474976715659 SEND to# [1:7509890064352109472:2352] Source {TEvProposeTransactionStatus txid# 281474976715659 Status# 53} 2025-05-29T15:28:57.341032Z node 1 :CHANGE_EXCHANGE WARN: change_sender_cdc_stream.cpp:398: [CdcChangeSenderMain][72075186224037888:1][1:7509890064352109647:2360] Failed entry at 'ResolveTopic': entry# { Path: TableId: [72057594046644480:4:0] RequestType: ByTableId Operation: OpTopic RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo } 2025-05-29T15:28:57.345994Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890064352109744:2368], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:28:57.346015Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:28:57.348875Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:315: actor# [1:7509890060057141279:2140] Handle TEvProposeTransaction 2025-05-29T15:28:57.348888Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:238: actor# [1:7509890060057141279:2140] TxId# 281474976715660 ProcessProposeTransaction 2025-05-29T15:28:57.348902Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:257: actor# [1:7509890060057141279:2140] Cookie# 0 userReqId# "" txid# 281474976715660 SEND to# [1:7509890064352109756:2931] 2025-05-29 ... 50: Actor# [16:7509890091187303862:2599] txid# 281474976715658 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-05-29T15:29:03.874483Z node 16 :TX_PROXY DEBUG: schemereq.cpp:1497: Actor# [16:7509890091187303862:2599] HANDLE EvNavigateKeySetResult, txid# 281474976715658 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2025-05-29T15:29:03.874496Z node 16 :TX_PROXY DEBUG: schemereq.cpp:103: Actor# [16:7509890091187303862:2599] txid# 281474976715658 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976715658 TabletId# 72057594046644480} 2025-05-29T15:29:03.874538Z node 16 :TX_PROXY DEBUG: schemereq.cpp:1352: Actor# [16:7509890091187303862:2599] txid# 281474976715658 HANDLE EvClientConnected 2025-05-29T15:29:03.874887Z node 16 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2025-05-29T15:29:03.875569Z node 16 :TX_PROXY DEBUG: schemereq.cpp:1374: Actor# [16:7509890091187303862:2599] txid# 281474976715658 Status StatusAccepted HANDLE {TEvModifySchemeTransactionResult Status# StatusAccepted txid# 281474976715658} 2025-05-29T15:29:03.875582Z node 16 :TX_PROXY DEBUG: schemereq.cpp:549: Actor# [16:7509890091187303862:2599] txid# 281474976715658 SEND to# [16:7509890091187303861:2341] Source {TEvProposeTransactionStatus txid# 281474976715658 Status# 53} 2025-05-29T15:29:03.878314Z node 16 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [16:7509890091187303861:2341], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-05-29T15:29:03.966610Z node 16 :TX_PROXY DEBUG: proxy_impl.cpp:315: actor# [16:7509890091187303128:2138] Handle TEvProposeTransaction 2025-05-29T15:29:03.966628Z node 16 :TX_PROXY DEBUG: proxy_impl.cpp:238: actor# [16:7509890091187303128:2138] TxId# 281474976715659 ProcessProposeTransaction 2025-05-29T15:29:03.966642Z node 16 :TX_PROXY DEBUG: proxy_impl.cpp:257: actor# [16:7509890091187303128:2138] Cookie# 0 userReqId# "" txid# 281474976715659 SEND to# [16:7509890091187303934:2651] 2025-05-29T15:29:03.967548Z node 16 :TX_PROXY DEBUG: schemereq.cpp:1562: Actor# [16:7509890091187303934:2651] txid# 281474976715659 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/Root/.metadata/workload_manager/pools" OperationType: ESchemeOpCreateResourcePool ModifyACL { Name: "default" DiffACL: "\n!\010\000\022\035\010\001\020\201\004\032\024all-users@well-known \003\n\031\010\000\022\025\010\001\020\201\004\032\014root@builtin \003" NewOwner: "metadata@system" } Internal: true CreateResourcePool { Name: "default" Properties { Properties { key: "concurrent_query_limit" value: "-1" } Properties { key: "database_load_cpu_threshold" value: "-1" } Properties { key: "query_cancel_after_seconds" value: "0" } Properties { key: "query_cpu_limit_percent_per_node" value: "-1" } Properties { key: "query_memory_limit_percent_per_node" value: "-1" } Properties { key: "queue_size" value: "-1" } Properties { key: "resource_weight" value: "-1" } Properties { key: "total_cpu_limit_percent_per_node" value: "-1" } } } } } UserToken: "\n\017metadata@system\022\000" DatabaseName: "/Root" 2025-05-29T15:29:03.967563Z node 16 :TX_PROXY DEBUG: schemereq.cpp:569: Actor# [16:7509890091187303934:2651] txid# 281474976715659 Bootstrap, UserSID: metadata@system CheckAdministrator: 0 CheckDatabaseAdministrator: 0 2025-05-29T15:29:03.967566Z node 16 :TX_PROXY DEBUG: schemereq.cpp:578: Actor# [16:7509890091187303934:2651] txid# 281474976715659 Bootstrap, UserSID: metadata@system IsClusterAdministrator: 1 2025-05-29T15:29:03.967758Z node 16 :TX_PROXY DEBUG: schemereq.cpp:1627: Actor# [16:7509890091187303934:2651] txid# 281474976715659 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P6 2025-05-29T15:29:03.967769Z node 16 :TX_PROXY DEBUG: schemereq.cpp:1617: Actor# [16:7509890091187303934:2651] txid# 281474976715659 TEvNavigateKeySet requested from SchemeCache 2025-05-29T15:29:03.967797Z node 16 :TX_PROXY DEBUG: schemereq.cpp:1450: Actor# [16:7509890091187303934:2651] txid# 281474976715659 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-05-29T15:29:03.967826Z node 16 :TX_PROXY DEBUG: schemereq.cpp:1497: Actor# [16:7509890091187303934:2651] HANDLE EvNavigateKeySetResult, txid# 281474976715659 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2025-05-29T15:29:03.967837Z node 16 :TX_PROXY DEBUG: schemereq.cpp:103: Actor# [16:7509890091187303934:2651] txid# 281474976715659 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976715659 TabletId# 72057594046644480} 2025-05-29T15:29:03.967877Z node 16 :TX_PROXY DEBUG: schemereq.cpp:1352: Actor# [16:7509890091187303934:2651] txid# 281474976715659 HANDLE EvClientConnected 2025-05-29T15:29:03.969396Z node 16 :TX_PROXY DEBUG: schemereq.cpp:1374: Actor# [16:7509890091187303934:2651] txid# 281474976715659 Status StatusAlreadyExists HANDLE {TEvModifySchemeTransactionResult Status# StatusAlreadyExists txid# 281474976715659 Reason# Check failed: path: '/Root/.metadata/workload_manager/pools/default', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92} 2025-05-29T15:29:03.969430Z node 16 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [16:7509890091187303934:2651] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-29T15:29:03.969434Z node 16 :TX_PROXY DEBUG: schemereq.cpp:549: Actor# [16:7509890091187303934:2651] txid# 281474976715659 SEND to# [16:7509890091187303861:2341] Source {TEvProposeTransactionStatus txid# 281474976715659 Status# 48} 2025-05-29T15:29:03.975995Z node 16 :TX_PROXY DEBUG: proxy_impl.cpp:315: actor# [16:7509890091187303128:2138] Handle TEvProposeTransaction 2025-05-29T15:29:03.976008Z node 16 :TX_PROXY DEBUG: proxy_impl.cpp:238: actor# [16:7509890091187303128:2138] TxId# 281474976715660 ProcessProposeTransaction 2025-05-29T15:29:03.976021Z node 16 :TX_PROXY DEBUG: proxy_impl.cpp:257: actor# [16:7509890091187303128:2138] Cookie# 0 userReqId# "" txid# 281474976715660 SEND to# [16:7509890091187303967:2664] 2025-05-29T15:29:03.976897Z node 16 :TX_PROXY DEBUG: schemereq.cpp:1562: Actor# [16:7509890091187303967:2664] txid# 281474976715660 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/Root" OperationType: ESchemeOpCreateTable CreateTable { Name: "UuidTable" Columns { Name: "Key" Type: "Uuid" NotNull: false } Columns { Name: "Value" Type: "Int32" NotNull: false } KeyColumnNames: "Key" PartitionConfig { } Temporary: false } FailedOnAlreadyExists: true } } UserToken: "" DatabaseName: "" RequestType: "" PeerName: "ipv6:[::1]:37400" 2025-05-29T15:29:03.976903Z node 16 :TX_PROXY DEBUG: schemereq.cpp:569: Actor# [16:7509890091187303967:2664] txid# 281474976715660 Bootstrap, UserSID: CheckAdministrator: 0 CheckDatabaseAdministrator: 0 2025-05-29T15:29:03.976986Z node 16 :TX_PROXY DEBUG: schemereq.cpp:1627: Actor# [16:7509890091187303967:2664] txid# 281474976715660 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P6 2025-05-29T15:29:03.976995Z node 16 :TX_PROXY DEBUG: schemereq.cpp:1617: Actor# [16:7509890091187303967:2664] txid# 281474976715660 TEvNavigateKeySet requested from SchemeCache 2025-05-29T15:29:03.977019Z node 16 :TX_PROXY DEBUG: schemereq.cpp:1450: Actor# [16:7509890091187303967:2664] txid# 281474976715660 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-05-29T15:29:03.977040Z node 16 :TX_PROXY DEBUG: schemereq.cpp:1497: Actor# [16:7509890091187303967:2664] HANDLE EvNavigateKeySetResult, txid# 281474976715660 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2025-05-29T15:29:03.977049Z node 16 :TX_PROXY DEBUG: schemereq.cpp:103: Actor# [16:7509890091187303967:2664] txid# 281474976715660 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976715660 TabletId# 72057594046644480} 2025-05-29T15:29:03.977087Z node 16 :TX_PROXY DEBUG: schemereq.cpp:1352: Actor# [16:7509890091187303967:2664] txid# 281474976715660 HANDLE EvClientConnected 2025-05-29T15:29:03.977447Z node 16 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 2025-05-29T15:29:03.979318Z node 16 :TX_PROXY DEBUG: schemereq.cpp:1374: Actor# [16:7509890091187303967:2664] txid# 281474976715660 Status StatusAccepted HANDLE {TEvModifySchemeTransactionResult Status# StatusAccepted txid# 281474976715660} 2025-05-29T15:29:03.979335Z node 16 :TX_PROXY DEBUG: schemereq.cpp:549: Actor# [16:7509890091187303967:2664] txid# 281474976715660 SEND to# [16:7509890091187303966:2334] Source {TEvProposeTransactionStatus txid# 281474976715660 Status# 53} 2025-05-29T15:29:04.031848Z node 16 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [16:7509890095482271407:2357], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:29:04.031966Z node 16 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=16&id=ZDMxYjY1NTktYTJlZTlkZmQtZWNlOGFmOGItZmY5MWFiZjE=, ActorId: [16:7509890091187303843:2334], ActorState: ExecuteState, TraceId: 01jweajggq0nf9d3a4v3bjc85g, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: assertion failed at ydb/services/ydb/backup_ut/ydb_backup_ut.cpp:177, NQuery::TExecuteQueryResult (anonymous namespace)::ExecuteQuery(NQuery::TSession &, const TString &, bool): (result.IsSuccess()) query: UPSERT INTO `/Root/UuidTable` (Key, Value) VALUES (RandomUuid(1), 1); issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 TBackTrace::Capture()+28 (0x139A39CC) NUnitTest::NPrivate::RaiseError(char const*, TBasicString> const&, bool)+137 (0x13B57539) ??+0 (0x1384FC23) ??+0 (0x138603AA) NTestSuiteBackupRestoreS3::TTestCaseTestAllPrimitiveTypes::Execute_(NUnitTest::TTestContext&)+716 (0x1387193C) NTestSuiteBackupRestoreS3::TCurrentTest::Execute()::'lambda'()::operator()() const+71 (0x1389E0A7) NUnitTest::TTestBase::Run(std::__y1::function, TBasicString> const&, char const*, bool)+126 (0x13B593EE) NTestSuiteBackupRestoreS3::TCurrentTest::Execute()+436 (0x1389DA64) NUnitTest::TTestFactory::Execute()+803 (0x13B59B63) NUnitTest::RunMain(int, char**)+3021 (0x13B6B70D) ??+0 (0x7F7867157D90) __libc_start_main+128 (0x7F7867157E40) _start+41 (0x12914029) ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/query/unittest >> KqpStats::SysViewCancelled Test command err: Trying to start YDB, gRPC: 1765, MsgBus: 13541 2025-05-29T15:28:50.196027Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509890032859406052:2265];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:28:50.196068Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/001400/r3tmp/tmpcyTfT9/pdisk_1.dat 2025-05-29T15:28:50.257943Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [1:7509890032859405826:2079] 1748532530195025 != 1748532530195028 2025-05-29T15:28:50.259674Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 1765, node 1 2025-05-29T15:28:50.275280Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:28:50.275299Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-05-29T15:28:50.275302Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-05-29T15:28:50.275357Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:13541 TClient is connected to server localhost:13541 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: 2025-05-29T15:28:50.335327Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:28:50.335362Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:28:50.336513Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-29T15:28:50.342663Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:28:50.345341Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-05-29T15:28:50.357960Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:28:50.424327Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:28:50.489338Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-05-29T15:28:50.560430Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 2025-05-29T15:28:50.623269Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890032859407487:2403], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:28:50.623302Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:28:50.671489Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-05-29T15:28:50.678498Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-05-29T15:28:50.693336Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-05-29T15:28:50.705221Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-05-29T15:28:50.760966Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-05-29T15:28:50.770235Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-05-29T15:28:50.782455Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-05-29T15:28:50.798725Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890032859408141:2466], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:28:50.798823Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:28:50.798835Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890032859408146:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:28:50.799600Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715669:3, at schemeshard: 72057594046644480 2025-05-29T15:28:50.804386Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7509890032859408148:2470], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715669 completed, doublechecking } 2025-05-29T15:28:50.884669Z node 1 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [1:7509890032859408199:3398] txid# 281474976715670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-29T15:28:51.018953Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [1:7509890032859408215:2474], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:28:51.020080Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=1&id=YzRiODRkMjktNDYwNTE2NTgtYzYyZTVmZjUtZmU1NmJmNA==, ActorId: [1:7509890032859407469:2401], ActorState: ExecuteState, TraceId: 01jweaj3ke1m4dxs4ztgbp362c, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: VERIFY failed (2025-05-29T15:28:51.021030Z): assertion failed in non-unittest thread with message: assertion failed at ydb/core/kqp/ut/common/kqp_ut_common.h:375, void NKikimr::NKqp::AssertSuccessResult(const NYdb::TStatus &): (result.IsSuccess())
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 library/cpp/testing/unittest/registar.cpp:36 RaiseError(): requirement UnittestThread failed 0. /-S/util/system/yassert.cpp:83: InternalPanicImpl @ 0x13DD8F65 1. /-S/util/system/yassert.cpp:55: Panic @ 0x13DCFF66 2. /tmp//-S/library/cpp/testing/unittest/registar.cpp:36: RaiseError @ 0x13F70BC6 3. /-S/ydb/core/kqp/ut/common/kqp_ut_common.h:375: AssertSuccessResult @ 0x13A3FAE2 4. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:365: CreateSampleTables @ 0x264F71F2 5. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:581: operator() @ 0x2651596C 6. /-S/library/cpp/threading/future/core/future-inl.h:493: SetValue @ 0x2651596C 7. /-S/library/cpp/threading/future/async.h:24: operator() @ 0x2651596C 8. /-S/util/thread/pool.h:71: Process @ 0x2651596C 9. /-S/util/thread/pool.cpp:418: DoExecute @ 0x13DE06B9 10. /-S/util/thread/factory.h:15: Execute @ 0x13DDF0A9 11. /-S/util/thread/factory.cpp:36: ThreadProc @ 0x13DDF0A9 12. /-S/util/system/thread.cpp:244: ThreadProxy @ 0x13DDA51C 13. ??:0: ?? @ 0x7FEF90994AC2 14. ??:0: ?? @ 0x7FEF90A2684F Trying to start YDB, gRPC: 20968, MsgBus: 17032 2025-05-29T15:28:55.153878Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509890056259691028:2062];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:28:55.153935Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/001400/r3tmp/tmpy3zKs8/pdisk_1.dat 2025-05-29T15:28:55.219362Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [1:7509890056259691007:2079] 17485325351537 ... 3DD8F65 1. /-S/util/system/yassert.cpp:55: Panic @ 0x13DCFF66 2. /tmp//-S/library/cpp/testing/unittest/registar.cpp:36: RaiseError @ 0x13F70BC6 3. /-S/ydb/core/kqp/ut/common/kqp_ut_common.h:375: AssertSuccessResult @ 0x13A3FAE2 4. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:365: CreateSampleTables @ 0x264F71F2 5. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:581: operator() @ 0x2651596C 6. /-S/library/cpp/threading/future/core/future-inl.h:493: SetValue @ 0x2651596C 7. /-S/library/cpp/threading/future/async.h:24: operator() @ 0x2651596C 8. /-S/util/thread/pool.h:71: Process @ 0x2651596C 9. /-S/util/thread/pool.cpp:418: DoExecute @ 0x13DE06B9 10. /-S/util/thread/factory.h:15: Execute @ 0x13DDF0A9 11. /-S/util/thread/factory.cpp:36: ThreadProc @ 0x13DDF0A9 12. /-S/util/system/thread.cpp:244: ThreadProxy @ 0x13DDA51C 13. ??:0: ?? @ 0x7F46424FAAC2 14. ??:0: ?? @ 0x7F464258C84F Trying to start YDB, gRPC: 20697, MsgBus: 29720 2025-05-29T15:29:04.568578Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509890093535784084:2064];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:29:04.568603Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/001400/r3tmp/tmpMgLApu/pdisk_1.dat 2025-05-29T15:29:04.625054Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [1:7509890093535784057:2079] 1748532544568421 != 1748532544568424 2025-05-29T15:29:04.628057Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 20697, node 1 2025-05-29T15:29:04.638624Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:29:04.638644Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-05-29T15:29:04.638645Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-05-29T15:29:04.638681Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:29720 2025-05-29T15:29:04.670401Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:29:04.670428Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:29:04.671481Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:29720 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-29T15:29:04.701091Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:29:04.708353Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:29:04.776095Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:29:04.800070Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-05-29T15:29:04.812014Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 2025-05-29T15:29:04.941999Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890093535785695:2403], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:29:04.942044Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:29:04.987475Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-05-29T15:29:04.995729Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-05-29T15:29:05.006239Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-05-29T15:29:05.020438Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-05-29T15:29:05.079851Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-05-29T15:29:05.089778Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-05-29T15:29:05.103966Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-05-29T15:29:05.120436Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890097830753646:2466], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:29:05.120462Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:29:05.120548Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890097830753651:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:29:05.121390Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715669:3, at schemeshard: 72057594046644480 2025-05-29T15:29:05.131162Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7509890097830753653:2470], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715669 completed, doublechecking } 2025-05-29T15:29:05.200214Z node 1 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [1:7509890097830753704:3398] txid# 281474976715670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-29T15:29:05.317771Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [1:7509890097830753720:2474], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:29:05.319530Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=1&id=MmIyNmY2MTYtYTUzOTIwZGEtZThlYmRlZTgtM2M0ZjcxNjQ=, ActorId: [1:7509890093535785691:2401], ActorState: ExecuteState, TraceId: 01jweajhk04j4dp5g7b3tx165f, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: VERIFY failed (2025-05-29T15:29:05.322936Z): assertion failed in non-unittest thread with message: assertion failed at ydb/core/kqp/ut/common/kqp_ut_common.h:375, void NKikimr::NKqp::AssertSuccessResult(const NYdb::TStatus &): (result.IsSuccess())
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 library/cpp/testing/unittest/registar.cpp:36 RaiseError(): requirement UnittestThread failed 0. /-S/util/system/yassert.cpp:83: InternalPanicImpl @ 0x13DD8F65 1. /-S/util/system/yassert.cpp:55: Panic @ 0x13DCFF66 2. /tmp//-S/library/cpp/testing/unittest/registar.cpp:36: RaiseError @ 0x13F70BC6 3. /-S/ydb/core/kqp/ut/common/kqp_ut_common.h:375: AssertSuccessResult @ 0x13A3FAE2 4. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:365: CreateSampleTables @ 0x264F71F2 5. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:581: operator() @ 0x2651596C 6. /-S/library/cpp/threading/future/core/future-inl.h:493: SetValue @ 0x2651596C 7. /-S/library/cpp/threading/future/async.h:24: operator() @ 0x2651596C 8. /-S/util/thread/pool.h:71: Process @ 0x2651596C 9. /-S/util/thread/pool.cpp:418: DoExecute @ 0x13DE06B9 10. /-S/util/thread/factory.h:15: Execute @ 0x13DDF0A9 11. /-S/util/thread/factory.cpp:36: ThreadProc @ 0x13DDF0A9 12. /-S/util/system/thread.cpp:244: ThreadProxy @ 0x13DDA51C 13. ??:0: ?? @ 0x7F137D761AC2 14. ??:0: ?? @ 0x7F137D7F384F >> KqpOlapScheme::AddColumnWithColumnFamily [GOOD] >> KqpOlapScheme::AddColumnFamilyWithNotSupportedCodec ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/scheme/unittest >> KqpOlapScheme::CreateTableNonDefaultFamilyWithoutCompression [GOOD] Test command err: Trying to start YDB, gRPC: 23845, MsgBus: 7945 2025-05-29T15:29:04.690358Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509890093947794453:2069];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:29:04.690379Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/001176/r3tmp/tmp29RYtc/pdisk_1.dat 2025-05-29T15:29:04.750399Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 23845, node 1 2025-05-29T15:29:04.768919Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:29:04.768933Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-05-29T15:29:04.768935Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-05-29T15:29:04.769001Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:7945 2025-05-29T15:29:04.791639Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:29:04.791670Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:29:04.792706Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:7945 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-29T15:29:04.839158Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:29:04.842173Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 CREATE TABLE `/Root/ColumnTableTest` (id Int32 NOT NULL, level Uuid, PRIMARY KEY (id)) PARTITION BY HASH(id) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 1); 2025-05-29T15:29:05.095761Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890098242762378:2331], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:29:05.095785Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:29:05.152404Z node 1 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [1:7509890098242762398:2296] txid# 281474976715658, issues: { message: "Column \'level\': Type error: unsupported type Uuid" severity: 1 } Trying to start YDB, gRPC: 16627, MsgBus: 9178 2025-05-29T15:29:05.427643Z node 2 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7509890097680362731:2075];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:29:05.427740Z node 2 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/001176/r3tmp/tmptPXpbw/pdisk_1.dat 2025-05-29T15:29:05.442395Z node 2 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 16627, node 2 2025-05-29T15:29:05.459046Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:29:05.459059Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-05-29T15:29:05.459062Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-05-29T15:29:05.459111Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:9178 TClient is connected to server localhost:9178 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-29T15:29:05.527875Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:29:05.527911Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:29:05.528964Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-29T15:29:05.531730Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:29:05.532847Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 CREATE TABLE `/Root/TableWithoutColumnFamily` (Key Uint64 NOT NULL, Value1 String, Value2 Uint32, PRIMARY KEY (Key), FAMILY default (DATA="test", COMPRESSION="off")) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 1); 2025-05-29T15:29:05.783629Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7509890097680363339:2331], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:29:05.783647Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } CREATE TABLE `/Root/TableWithoutColumnFamily` (Key Uint64 NOT NULL, Value1 String FAMILY family1, Value2 Uint32, PRIMARY KEY (Key), FAMILY default (COMPRESSION="off"), FAMILY family1 (DATA="test", COMPRESSION="lz4")) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 1); 2025-05-29T15:29:05.787891Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7509890097680363360:2334], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:29:05.787914Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } CREATE TABLE `/Root/TableWithoutColumnFamily` (Key Uint64 NOT NULL, Value1 String FAMILY family1, Value2 Uint32, PRIMARY KEY (Key), FAMILY default (COMPRESSION="off"), FAMILY family1 (COMPRESSION="lz4")) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 1); 2025-05-29T15:29:05.791859Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7509890097680363368:2337], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:29:05.791886Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:29:05.795232Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-05-29T15:29:05.803730Z node 2 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;self_id=[2:7509890097680363430:2341];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-05-29T15:29:05.803774Z node 2 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;self_id=[2:7509890097680363430:2341];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-05-29T15:29:05.803803Z node 2 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;self_id=[2:7509890097680363430:2341];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-05-29T15:29:05.803822Z node 2 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;self_id=[2:7509890097680363430:2341];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-05-29T15:29:05.803843Z node 2 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;self_id=[2:7509890097680363430:2341];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fl ... : log.cpp:784: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-05-29T15:29:07.259496Z node 4 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-05-29T15:29:07.259512Z node 4 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-05-29T15:29:07.259517Z node 4 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-05-29T15:29:07.259527Z node 4 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-05-29T15:29:07.259532Z node 4 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanDeprecatedSnapshot;id=CleanDeprecatedSnapshot; 2025-05-29T15:29:07.259538Z node 4 :TX_COLUMNSHARD CRIT: log.cpp:784: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=clean_deprecated_snapshot.cpp:30;tasks_for_rewrite=0; 2025-05-29T15:29:07.259543Z node 4 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanDeprecatedSnapshot;id=17; 2025-05-29T15:29:07.259547Z node 4 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV0ChunksMeta;id=RestoreV0ChunksMeta; 2025-05-29T15:29:07.259706Z node 4 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV0ChunksMeta;id=18; 2025-05-29T15:29:07.259711Z node 4 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-05-29T15:29:07.303825Z node 4 :TX_COLUMNSHARD_TX WARN: log.cpp:784: tablet_id=72075186224037888;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715658;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715658; 2025-05-29T15:29:07.307656Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7509890105947187168:2345], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:29:07.307678Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:29:07.311157Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 2025-05-29T15:29:07.314138Z node 4 :TX_COLUMNSHARD_TX WARN: log.cpp:784: tablet_id=72075186224037888;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715659;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715659; Trying to start YDB, gRPC: 31259, MsgBus: 5604 2025-05-29T15:29:07.560369Z node 5 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[5:7509890107713258918:2070];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:29:07.560402Z node 5 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/001176/r3tmp/tmpzg3qV7/pdisk_1.dat 2025-05-29T15:29:07.580271Z node 5 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 31259, node 5 2025-05-29T15:29:07.603589Z node 5 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:29:07.603599Z node 5 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-05-29T15:29:07.603602Z node 5 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-05-29T15:29:07.603655Z node 5 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:5604 2025-05-29T15:29:07.663124Z node 5 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:29:07.663153Z node 5 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:29:07.664117Z node 5 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:5604 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-29T15:29:07.687635Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:29:07.691528Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 CREATE TABLESTORE `/Root/TableStoreWithColumnFamily` (Key Uint64 NOT NULL, Value1 String FAMILY family1, Value2 Uint32 FAMILY family2, PRIMARY KEY (Key), FAMILY default (COMPRESSION="off"), FAMILY family1 (COMPRESSION="zstd", COMPRESSION_LEVEL=1), FAMILY family2 (COMPRESSION="lz4")) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 1); 2025-05-29T15:29:08.035114Z node 5 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7509890112008226832:2331], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:29:08.035148Z node 5 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } Trying to start YDB, gRPC: 15735, MsgBus: 1305 2025-05-29T15:29:08.268088Z node 6 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[6:7509890112772144237:2062];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:29:08.268113Z node 6 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/001176/r3tmp/tmpFjgqD3/pdisk_1.dat 2025-05-29T15:29:08.283528Z node 6 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 15735, node 6 2025-05-29T15:29:08.299976Z node 6 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:29:08.299990Z node 6 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-05-29T15:29:08.300006Z node 6 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-05-29T15:29:08.300059Z node 6 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:1305 TClient is connected to server localhost:1305 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-29T15:29:08.372444Z node 6 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:29:08.372471Z node 6 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:29:08.372799Z node 6 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:29:08.373525Z node 6 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-29T15:29:08.602350Z node 6 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7509890112772144871:2331], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:29:08.602394Z node 6 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } >> KqpOlapTypes::Timestamp [GOOD] >> KqpOlapTypes::Decimal35 >> KqpOlapScheme::TwoSimilarColumnFamilies [GOOD] >> KqpOlapTypes::Decimal >> EncryptedBackupParamsValidationTest::IncorrectKeyImport >> BackupRestore::TestAllPrimitiveTypes-STRING [FAIL] >> BackupRestore::TestAllPrimitiveTypes-UTF8 >> KqpScheme::DisableResourcePoolsOnServerless [GOOD] >> KqpScheme::DisableResourcePoolClassifiers >> KqpOlapScheme::AddColumnFamilyWithNotSupportedCodec [GOOD] >> KqpScheme::CreateTableWithCompactionPolicyCompat >> KqpScheme::FamilyColumnTest >> KqpConstraints::AlterTableAddColumnWithDefaultValue >> KqpScheme::SchemaVersionMissmatchWithIndexWrite >> KqpScheme::AlterIndexImplTable-VectorIndex >> KqpScheme::CreateUserWithPassword ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/scheme/unittest >> KqpOlapScheme::AddColumnFamilyWithNotSupportedCodec [GOOD] Test command err: Trying to start YDB, gRPC: 11481, MsgBus: 12463 2025-05-29T15:29:04.099594Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509890093253797214:2071];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:29:04.099615Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/00117d/r3tmp/tmpaYnp7J/pdisk_1.dat 2025-05-29T15:29:04.214861Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 11481, node 1 2025-05-29T15:29:04.254682Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:29:04.254695Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-05-29T15:29:04.254697Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-05-29T15:29:04.254769Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-05-29T15:29:04.272131Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:29:04.272163Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:29:04.273594Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:12463 TClient is connected to server localhost:12463 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-29T15:29:04.344490Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:29:04.351873Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 CREATE TABLE `/Root/ColumnTableTest` (id Int32 NOT NULL, id_second Int32 NOT NULL, resource_id Utf8, level Int32, PRIMARY KEY (id, id_second)) PARTITION BY HASH(id) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 1); 2025-05-29T15:29:04.563728Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890093253797843:2331], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:29:04.563750Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:29:04.611823Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-05-29T15:29:04.623509Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;self_id=[1:7509890093253797919:2335];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-05-29T15:29:04.623595Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;self_id=[1:7509890093253797919:2335];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-05-29T15:29:04.623664Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;self_id=[1:7509890093253797919:2335];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-05-29T15:29:04.623692Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;self_id=[1:7509890093253797919:2335];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-05-29T15:29:04.623728Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;self_id=[1:7509890093253797919:2335];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-05-29T15:29:04.623754Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;self_id=[1:7509890093253797919:2335];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-05-29T15:29:04.623782Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;self_id=[1:7509890093253797919:2335];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-05-29T15:29:04.623803Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;self_id=[1:7509890093253797919:2335];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-05-29T15:29:04.623823Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;self_id=[1:7509890093253797919:2335];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-05-29T15:29:04.623846Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;self_id=[1:7509890093253797919:2335];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-05-29T15:29:04.623875Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;self_id=[1:7509890093253797919:2335];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2025-05-29T15:29:04.623900Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;self_id=[1:7509890093253797919:2335];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2025-05-29T15:29:04.624442Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-05-29T15:29:04.624455Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-05-29T15:29:04.624467Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-05-29T15:29:04.624476Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-05-29T15:29:04.624494Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-05-29T15:29:04.624503Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-05-29T15:29:04.624521Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-05-29T15:29:04.624531Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-05-29T15:29:04.624537Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-05-29T15:29:04.624544Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-05-29T15:29:04.624549Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-05-29T15:29:04.624552Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-05-29T15:29:04.624565Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-05-29T15:29:04.624573Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-05-29T15:29:04.624585Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-05-29T15:29:04.624594Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-05-29T15:29:04.624601Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-05-29T15:29:04.624604Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switche ... 5:29:09.442581Z node 6 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:29:09.442614Z node 6 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:29:09.443043Z node 6 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-05-29T15:29:09.444120Z node 6 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Connecting -> Connected CREATE TABLE `/Root/TableWithFamily` (Key Uint64 NOT NULL, Value1 String FAMILY family1, Value2 Uint32 FAMILY family1, PRIMARY KEY (Key), FAMILY default (COMPRESSION="off"), FAMILY family1 (COMPRESSION="lz4"), FAMILY family2 (COMPRESSION="lz4")) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 1); 2025-05-29T15:29:09.709160Z node 6 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7509890116124900015:2331], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:29:09.709198Z node 6 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:29:09.713148Z node 6 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-05-29T15:29:09.721928Z node 6 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;self_id=[6:7509890116124900061:2335];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-05-29T15:29:09.721978Z node 6 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;self_id=[6:7509890116124900061:2335];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-05-29T15:29:09.722038Z node 6 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;self_id=[6:7509890116124900061:2335];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-05-29T15:29:09.722063Z node 6 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;self_id=[6:7509890116124900061:2335];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-05-29T15:29:09.722090Z node 6 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;self_id=[6:7509890116124900061:2335];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-05-29T15:29:09.722124Z node 6 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;self_id=[6:7509890116124900061:2335];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-05-29T15:29:09.722143Z node 6 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;self_id=[6:7509890116124900061:2335];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-05-29T15:29:09.722168Z node 6 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;self_id=[6:7509890116124900061:2335];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-05-29T15:29:09.722193Z node 6 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;self_id=[6:7509890116124900061:2335];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-05-29T15:29:09.722217Z node 6 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;self_id=[6:7509890116124900061:2335];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-05-29T15:29:09.722240Z node 6 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;self_id=[6:7509890116124900061:2335];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2025-05-29T15:29:09.722264Z node 6 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;self_id=[6:7509890116124900061:2335];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2025-05-29T15:29:09.722824Z node 6 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-05-29T15:29:09.722839Z node 6 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-05-29T15:29:09.722854Z node 6 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-05-29T15:29:09.722860Z node 6 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-05-29T15:29:09.722877Z node 6 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-05-29T15:29:09.722885Z node 6 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-05-29T15:29:09.722896Z node 6 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-05-29T15:29:09.722904Z node 6 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-05-29T15:29:09.722914Z node 6 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-05-29T15:29:09.722922Z node 6 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-05-29T15:29:09.722929Z node 6 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-05-29T15:29:09.722937Z node 6 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-05-29T15:29:09.722959Z node 6 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-05-29T15:29:09.722969Z node 6 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-05-29T15:29:09.722987Z node 6 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-05-29T15:29:09.722995Z node 6 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-05-29T15:29:09.723006Z node 6 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-05-29T15:29:09.723015Z node 6 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanDeprecatedSnapshot;id=CleanDeprecatedSnapshot; 2025-05-29T15:29:09.723022Z node 6 :TX_COLUMNSHARD CRIT: log.cpp:784: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=clean_deprecated_snapshot.cpp:30;tasks_for_rewrite=0; 2025-05-29T15:29:09.723031Z node 6 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanDeprecatedSnapshot;id=17; 2025-05-29T15:29:09.723035Z node 6 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV0ChunksMeta;id=RestoreV0ChunksMeta; 2025-05-29T15:29:09.723126Z node 6 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV0ChunksMeta;id=18; 2025-05-29T15:29:09.723134Z node 6 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-05-29T15:29:09.769594Z node 6 :TX_COLUMNSHARD_TX WARN: log.cpp:784: tablet_id=72075186224037888;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715658;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715658; 2025-05-29T15:29:09.772535Z node 6 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7509890116124900132:2345], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:29:09.772563Z node 6 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:29:09.775852Z node 6 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7509890116124900138:2347], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:29:09.775874Z node 6 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/query/unittest >> KqpQuery::SelectCountAsteriskFromVar Test command err: Trying to start YDB, gRPC: 30347, MsgBus: 15132 2025-05-29T15:28:50.538536Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509890035189541491:2063];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:28:50.538572Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/0013e2/r3tmp/tmphRBK6F/pdisk_1.dat 2025-05-29T15:28:50.622903Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:28:50.622974Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [1:7509890035189541470:2079] 1748532530538404 != 1748532530538407 TServer::EnableGrpc on GrpcPort 30347, node 1 2025-05-29T15:28:50.642700Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:28:50.642715Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-05-29T15:28:50.642718Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-05-29T15:28:50.642786Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-05-29T15:28:50.642798Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:28:50.642861Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:28:50.643917Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:15132 TClient is connected to server localhost:15132 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-29T15:28:50.707233Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:28:50.712922Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:28:50.736667Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:28:50.758474Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:28:50.771858Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:28:50.960576Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890035189543108:2403], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:28:50.960605Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:28:51.013102Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-05-29T15:28:51.037011Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-05-29T15:28:51.048503Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-05-29T15:28:51.065217Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-05-29T15:28:51.076893Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-05-29T15:28:51.092424Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-05-29T15:28:51.108065Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-05-29T15:28:51.138901Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890039484511055:2466], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:28:51.138925Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:28:51.139231Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890039484511060:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:28:51.140137Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715669:3, at schemeshard: 72057594046644480 2025-05-29T15:28:51.143745Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976715669, at schemeshard: 72057594046644480 2025-05-29T15:28:51.143883Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7509890039484511062:2470], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715669 completed, doublechecking } 2025-05-29T15:28:51.225004Z node 1 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [1:7509890039484511113:3397] txid# 281474976715670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-29T15:28:51.356710Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [1:7509890039484511129:2474], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:28:51.356878Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=1&id=NjVkYTJiNzgtNzA5MWFhMTYtYWVmYWE1NS01MTIzYjg2MQ==, ActorId: [1:7509890035189543081:2400], ActorState: ExecuteState, TraceId: 01jweaj3xy3s0nanrw7cxr63m2, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: VERIFY failed (2025-05-29T15:28:51.359373Z): assertion failed in non-unittest thread with message: assertion failed at ydb/core/kqp/ut/common/kqp_ut_common.h:375, void NKikimr::NKqp::AssertSuccessResult(const NYdb::TStatus &): (result.IsSuccess())
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 library/cpp/testing/unittest/registar.cpp:36 RaiseError(): requirement UnittestThread failed 0. /-S/util/system/yassert.cpp:83: InternalPanicImpl @ 0x13DD8F65 1. /-S/util/system/yassert.cpp:55: Panic @ 0x13DCFF66 2. /tmp//-S/library/cpp/testing/unittest/registar.cpp:36: RaiseError @ 0x13F70BC6 3. /-S/ydb/core/kqp/ut/common/kqp_ut_common.h:375: AssertSuccessResult @ 0x13A3FAE2 4. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:365: CreateSampleTables @ 0x264F71F2 5. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:581: operator() @ 0x2651596C 6. /-S/library/cpp/threading/future/core/future-inl.h:493: SetValue @ 0x2651596C 7. /-S/library/cpp/threading/future/async.h:24: operator() @ 0x2651596C 8. /-S/util/thread/pool.h:71: Process @ 0x2651596C 9. /-S/util/thread/pool.cpp:418: DoExecute @ 0x13DE06B9 10. /-S/util/thread/factory.h:15: Execute @ 0x13DDF0A9 11. /-S/util/thread/factory.cpp:36: ThreadProc @ 0x13DDF0A9 12. /-S/util/system/thread.cpp:244: ThreadProxy @ 0x13DDA51C 13. ??:0: ?? @ 0x7F3994B34AC2 14. ??:0: ?? @ 0x7F3994BC684F Trying to start YDB, gRPC: 19918, MsgBus: 17559 2025-05-29T15:28:55.565097Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509890058078133825:2201];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:28:55.565303Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/0013e2/r3tmp/tmpn76rWy/pdisk_1.dat 2025-05-29T15:28:55.619584Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [1:7509890058078133663:2079] 174853253556 ... 3DD8F65 1. /-S/util/system/yassert.cpp:55: Panic @ 0x13DCFF66 2. /tmp//-S/library/cpp/testing/unittest/registar.cpp:36: RaiseError @ 0x13F70BC6 3. /-S/ydb/core/kqp/ut/common/kqp_ut_common.h:375: AssertSuccessResult @ 0x13A3FAE2 4. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:365: CreateSampleTables @ 0x264F71F2 5. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:581: operator() @ 0x2651596C 6. /-S/library/cpp/threading/future/core/future-inl.h:493: SetValue @ 0x2651596C 7. /-S/library/cpp/threading/future/async.h:24: operator() @ 0x2651596C 8. /-S/util/thread/pool.h:71: Process @ 0x2651596C 9. /-S/util/thread/pool.cpp:418: DoExecute @ 0x13DE06B9 10. /-S/util/thread/factory.h:15: Execute @ 0x13DDF0A9 11. /-S/util/thread/factory.cpp:36: ThreadProc @ 0x13DDF0A9 12. /-S/util/system/thread.cpp:244: ThreadProxy @ 0x13DDA51C 13. ??:0: ?? @ 0x7FD0879F1AC2 14. ??:0: ?? @ 0x7FD087A8384F Trying to start YDB, gRPC: 17573, MsgBus: 21764 2025-05-29T15:29:05.446449Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509890099673335660:2259];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:29:05.446535Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/0013e2/r3tmp/tmpsQ97qH/pdisk_1.dat 2025-05-29T15:29:05.509707Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:29:05.509812Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [1:7509890099673335440:2079] 1748532545445500 != 1748532545445503 TServer::EnableGrpc on GrpcPort 17573, node 1 2025-05-29T15:29:05.521354Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:29:05.521372Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-05-29T15:29:05.521375Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-05-29T15:29:05.521437Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:21764 2025-05-29T15:29:05.548270Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:29:05.548307Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:29:05.549331Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:21764 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-29T15:29:05.591729Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:29:05.604034Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:29:05.624145Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:29:05.647135Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:29:05.659374Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:29:05.827781Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890099673337073:2403], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:29:05.827807Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:29:05.877419Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-05-29T15:29:05.889071Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-05-29T15:29:05.902300Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-05-29T15:29:05.916065Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-05-29T15:29:05.971795Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-05-29T15:29:05.986307Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-05-29T15:29:06.000332Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-05-29T15:29:06.023720Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890103968305021:2466], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:29:06.023764Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:29:06.023772Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890103968305026:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:29:06.024835Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710669:3, at schemeshard: 72057594046644480 2025-05-29T15:29:06.027581Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7509890103968305028:2470], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710669 completed, doublechecking } 2025-05-29T15:29:06.083382Z node 1 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [1:7509890103968305079:3397] txid# 281474976710670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-29T15:29:06.209734Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [1:7509890103968305095:2474], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:29:06.209888Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=1&id=NTEwZjg5ZS05YWU4NTY5NC1kM2FiZDA1Ny03YzdjZWVlMg==, ActorId: [1:7509890099673337054:2401], ActorState: ExecuteState, TraceId: 01jweajjf7a31fjk6snseck895, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: VERIFY failed (2025-05-29T15:29:06.214926Z): assertion failed in non-unittest thread with message: assertion failed at ydb/core/kqp/ut/common/kqp_ut_common.h:375, void NKikimr::NKqp::AssertSuccessResult(const NYdb::TStatus &): (result.IsSuccess())
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 library/cpp/testing/unittest/registar.cpp:36 RaiseError(): requirement UnittestThread failed 0. /-S/util/system/yassert.cpp:83: InternalPanicImpl @ 0x13DD8F65 1. /-S/util/system/yassert.cpp:55: Panic @ 0x13DCFF66 2. /tmp//-S/library/cpp/testing/unittest/registar.cpp:36: RaiseError @ 0x13F70BC6 3. /-S/ydb/core/kqp/ut/common/kqp_ut_common.h:375: AssertSuccessResult @ 0x13A3FAE2 4. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:365: CreateSampleTables @ 0x264F71F2 5. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:581: operator() @ 0x2651596C 6. /-S/library/cpp/threading/future/core/future-inl.h:493: SetValue @ 0x2651596C 7. /-S/library/cpp/threading/future/async.h:24: operator() @ 0x2651596C 8. /-S/util/thread/pool.h:71: Process @ 0x2651596C 9. /-S/util/thread/pool.cpp:418: DoExecute @ 0x13DE06B9 10. /-S/util/thread/factory.h:15: Execute @ 0x13DDF0A9 11. /-S/util/thread/factory.cpp:36: ThreadProc @ 0x13DDF0A9 12. /-S/util/system/thread.cpp:244: ThreadProxy @ 0x13DDA51C 13. ??:0: ?? @ 0x7F3F4C95DAC2 14. ??:0: ?? @ 0x7F3F4C9EF84F >> KqpScheme::MoveTableWithSerialTypes >> KqpScheme::CreateExternalDataSourceValidationLocation >> BackupRestoreS3::TestAllIndexTypes-EIndexTypeGlobalVectorKmeansTree [GOOD] >> BackupRestoreS3::PrefixedVectorIndex >> EncryptedBackupParamsValidationTest::IncorrectKeyImport [GOOD] >> KqpOlapTypes::Decimal35 [GOOD] >> KqpOlapTypes::DecimalCsv >> KqpOlapTypes::Decimal [GOOD] >> KqpOlapTypes::AttributeNegative >> BackupRestore::TestAllPrimitiveTypes-UTF8 [FAIL] >> BackupRestore::TestAllPrimitiveTypes-YSON >> EncryptedBackupParamsValidationTest::EncryptionSettingsWithoutKeyImport >> KqpOlapScheme::CreateTableWithTtl >> KqpScheme::TouchIndexAfterMoveIndexWrite >> KqpScheme::CreateAndAlterTableWithPartitioningBySizeUncompat >> KqpOlapTypes::DecimalCsv [GOOD] >> KqpOlapTypes::NegativeTimestampErr >> KqpOlapScheme::TenThousandColumns [FAIL] >> KqpOlapScheme::NullKeySchema >> KqpScheme::AddChangefeedWhenDisabled >> KqpScheme::DropNonExistingExternalDataSource >> KqpScheme::InvalidationAfterDropCreateTable2 >> KqpScheme::DropExternalTable >> KqpScheme::CreateExternalTableWithUpperCaseSettings >> EncryptedBackupParamsValidationTest::EncryptionSettingsWithoutKeyImport [GOOD] >> BackupRestoreS3::PrefixedVectorIndex [GOOD] >> BackupRestoreS3::TestAllPrimitiveTypes-BOOL >> KqpScheme::AlterTableAlterIndex-UseQueryService >> KqpOlapScheme::CreateTableWithTtl [GOOD] >> KqpOlapScheme::CreateTableWithoutTtl >> BackupRestore::TestAllPrimitiveTypes-YSON [FAIL] >> BackupRestore::TestAllPrimitiveTypes-UUID >> KqpScheme::CreateDropTableViaApiMultipleTime >> KqpOlapTypes::NegativeTimestampErr [GOOD] >> KqpOlapTypes::JsonImport >> KqpOlapScheme::NullKeySchema [GOOD] >> KqpOlapScheme::SetColumnFamily >> BsControllerConfig::ManyPDisksRestarts [GOOD] >> BsControllerConfig::MergeBoxes >> KqpOlapScheme::CreateTableWithoutTtl [GOOD] >> KqpOlapScheme::CreateWithDefaultColumnFamily >> TSchemeshardBackgroundCleaningTest::TempInTemp [GOOD] >> KqpAcl::FailResolve >> KqpOlapTypes::JsonImport [GOOD] >> KqpScheme::AddChangefeed ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/datashard/ut_locks/unittest >> TDataShardLocksTest::UseLocksCache [FAIL] Test command err: 2025-05-29T15:23:28.815027Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:102:2148], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-05-29T15:23:28.815061Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-05-29T15:23:28.815073Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/001b3a/r3tmp/tmpeUe09c/pdisk_1.dat 2025-05-29T15:23:28.931808Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-05-29T15:23:28.947113Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:23:28.951004Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [1:32:2079] 1748532208390846 != 1748532208390850 2025-05-29T15:23:28.994449Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:23:28.994497Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:23:29.005403Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-29T15:23:29.080734Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-05-29T15:23:29.099243Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3097: StateInit, received event# 268828672, Sender [1:656:2563], Recipient [1:671:2572]: NKikimr::TEvTablet::TEvBoot 2025-05-29T15:23:29.099509Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3097: StateInit, received event# 268828672, Sender [1:657:2564], Recipient [1:673:2574]: NKikimr::TEvTablet::TEvBoot 2025-05-29T15:23:29.099655Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3097: StateInit, received event# 268828673, Sender [1:656:2563], Recipient [1:671:2572]: NKikimr::TEvTablet::TEvRestored 2025-05-29T15:23:29.099744Z node 1 :TX_DATASHARD INFO: datashard.cpp:373: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:671:2572] 2025-05-29T15:23:29.099814Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:630: TxInitSchema.Execute 2025-05-29T15:23:29.109651Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3110: StateInactive, received event# 268828684, Sender [1:656:2563], Recipient [1:671:2572]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-05-29T15:23:29.109763Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3097: StateInit, received event# 268828673, Sender [1:657:2564], Recipient [1:673:2574]: NKikimr::TEvTablet::TEvRestored 2025-05-29T15:23:29.109913Z node 1 :TX_DATASHARD INFO: datashard.cpp:373: TDataShard::OnActivateExecutor: tablet 72075186224037889 actor [1:673:2574] 2025-05-29T15:23:29.109957Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:630: TxInitSchema.Execute 2025-05-29T15:23:29.111581Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3110: StateInactive, received event# 268828684, Sender [1:657:2564], Recipient [1:673:2574]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-05-29T15:23:29.111734Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:696: TxInitSchema.Complete 2025-05-29T15:23:29.111762Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:48: TDataShard::TTxInit::Execute 2025-05-29T15:23:29.111926Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1315: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-05-29T15:23:29.111935Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1371: LoadLockChangeRecords at tablet: 72075186224037888 2025-05-29T15:23:29.111943Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1420: LoadChangeRecordCommits at tablet: 72075186224037888 2025-05-29T15:23:29.111999Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:92: TDataShard::TTxInit::Complete 2025-05-29T15:23:29.112029Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:100: TDataShard::TTxInitRestored::Execute 2025-05-29T15:23:29.112041Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:111: DataShard 72075186224037888 persisting started state actor id [1:704:2572] in generation 1 2025-05-29T15:23:29.112116Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:696: TxInitSchema.Complete 2025-05-29T15:23:29.112129Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:48: TDataShard::TTxInit::Execute 2025-05-29T15:23:29.112248Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1315: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037889 2025-05-29T15:23:29.112256Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1371: LoadLockChangeRecords at tablet: 72075186224037889 2025-05-29T15:23:29.112262Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1420: LoadChangeRecordCommits at tablet: 72075186224037889 2025-05-29T15:23:29.112289Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:92: TDataShard::TTxInit::Complete 2025-05-29T15:23:29.112303Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:100: TDataShard::TTxInitRestored::Execute 2025-05-29T15:23:29.112310Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:111: DataShard 72075186224037889 persisting started state actor id [1:705:2574] in generation 1 2025-05-29T15:23:29.127079Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:120: TDataShard::TTxInitRestored::Complete 2025-05-29T15:23:29.132593Z node 1 :TX_DATASHARD INFO: datashard.cpp:417: Switched to work state WaitScheme tabletId 72075186224037888 2025-05-29T15:23:29.132702Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:457: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-05-29T15:23:29.132737Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1250: Change sender created: at tablet: 72075186224037888, actorId: [1:708:2593] 2025-05-29T15:23:29.132744Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1255: Trying to activate change sender: at tablet: 72075186224037888 2025-05-29T15:23:29.132749Z node 1 :TX_DATASHARD INFO: datashard.cpp:1272: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-05-29T15:23:29.132756Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-05-29T15:23:29.132863Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3129: StateWork, received event# 2146435072, Sender [1:671:2572], Recipient [1:671:2572]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-05-29T15:23:29.132875Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3154: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-05-29T15:23:29.132908Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:120: TDataShard::TTxInitRestored::Complete 2025-05-29T15:23:29.132918Z node 1 :TX_DATASHARD INFO: datashard.cpp:417: Switched to work state WaitScheme tabletId 72075186224037889 2025-05-29T15:23:29.132933Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:457: 72075186224037889 not sending time cast registration request in state WaitScheme: missing processing params 2025-05-29T15:23:29.132944Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1250: Change sender created: at tablet: 72075186224037889, actorId: [1:709:2594] 2025-05-29T15:23:29.132948Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1255: Trying to activate change sender: at tablet: 72075186224037889 2025-05-29T15:23:29.132952Z node 1 :TX_DATASHARD INFO: datashard.cpp:1272: Cannot activate change sender: at tablet: 72075186224037889, state: WaitScheme 2025-05-29T15:23:29.132956Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2025-05-29T15:23:29.133052Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3129: StateWork, received event# 2146435072, Sender [1:673:2574], Recipient [1:673:2574]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-05-29T15:23:29.133059Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3154: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-05-29T15:23:29.133103Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 72075186224037888 2025-05-29T15:23:29.133131Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-05-29T15:23:29.133162Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2025-05-29T15:23:29.133169Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-05-29T15:23:29.133177Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:385: Check unit PlanQueue at 72075186224037888 2025-05-29T15:23:29.133183Z node 1 :TX_DATASHARD TRACE: plan_queue_unit.cpp:59: TPlanQueueUnit at 72075186224037888 has no attached operations 2025-05-29T15:23:29.133187Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:403: Unit PlanQueue has no ready operations at 72075186224037888 2025-05-29T15:23:29.133192Z node 1 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 72075186224037888 TxInFly 0 2025-05-29T15:23:29.133199Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2025-05-29T15:23:29.133207Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 72075186224037889 2025-05-29T15:23:29.133221Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037889 2025-05-29T15:23:29.133246Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3129: StateWork, received event# 269877761, Sender [1:688:2582], Recipient [1:671:2572]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-05-29T15:23:29.133252Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3165: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-05-29T15:23:29.133259Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3710: Server connected at leader tablet# 72075186224037888, clientId# [1:663:2568], serverId# [1:688:2582], sessionId# [0:0:0] 2025-05-29T15:23:29.133266Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037889 2025-05-29T15:23:29.133271Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037889 active 0 active planned 0 immediate 0 planned 0 2025-05-29T15:23:29.133274Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:385: Check unit PlanQueue at 72075186224037889 2025-05-29T15:23:29.133277Z node 1 :TX_DATASHARD TRACE: plan_queue_unit.cpp:59: TPlanQueueUnit at 72075186224037889 has no attached operations 2025-05-29T15:23:29.133281Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:403: Unit PlanQueue has no ready operations at 72075186224037889 2025-05-29T15:23:29.133284Z node 1 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 72075186224037889 TxInFly 0 2025-05-29T15:23:29.133290Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037889 2025-05-29T15:23:29.133400Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3129: StateWork, received event# 269549568, Sender [1:411:2405], Recipient [1:688:2582] 2025-05-29T15:23:29.133409Z node 1 :TX_DATASHARD TRACE: datashard_impl.h ... 87Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3129: StateWork, received event# 2146435079, Sender [0:0:0], Recipient [1:673:2574]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvPeriodicWakeup 2025-05-29T15:28:53.385509Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3437: TEvPeriodicTableStats from datashard 72075186224037889, FollowerId 0, tableId 2 2025-05-29T15:28:54.120182Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3129: StateWork, received event# 2146435079, Sender [0:0:0], Recipient [1:671:2572]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvPeriodicWakeup 2025-05-29T15:28:54.120250Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3129: StateWork, received event# 2146435079, Sender [0:0:0], Recipient [1:673:2574]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvPeriodicWakeup 2025-05-29T15:28:54.928763Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3129: StateWork, received event# 2146435073, Sender [0:0:0], Recipient [1:671:2572]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvCleanupTransaction 2025-05-29T15:28:54.928806Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3155: StateWork, processing event TEvPrivate::TEvCleanupTransaction 2025-05-29T15:28:54.928847Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:214: No cleanup at 72075186224037888 outdated step 2070000 last cleanup 0 2025-05-29T15:28:54.928873Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-05-29T15:28:54.928883Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:385: Check unit PlanQueue at 72075186224037888 2025-05-29T15:28:54.928889Z node 1 :TX_DATASHARD TRACE: plan_queue_unit.cpp:59: TPlanQueueUnit at 72075186224037888 has no attached operations 2025-05-29T15:28:54.928894Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:403: Unit PlanQueue has no ready operations at 72075186224037888 2025-05-29T15:28:54.928942Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3129: StateWork, received event# 2146435073, Sender [0:0:0], Recipient [1:673:2574]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvCleanupTransaction 2025-05-29T15:28:54.928947Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3155: StateWork, processing event TEvPrivate::TEvCleanupTransaction 2025-05-29T15:28:54.928958Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:214: No cleanup at 72075186224037889 outdated step 2070000 last cleanup 0 2025-05-29T15:28:54.928964Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037889 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-05-29T15:28:54.928968Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:385: Check unit PlanQueue at 72075186224037889 2025-05-29T15:28:54.928972Z node 1 :TX_DATASHARD TRACE: plan_queue_unit.cpp:59: TPlanQueueUnit at 72075186224037889 has no attached operations 2025-05-29T15:28:54.928975Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:403: Unit PlanQueue has no ready operations at 72075186224037889 2025-05-29T15:28:54.928995Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3129: StateWork, received event# 2146435079, Sender [0:0:0], Recipient [1:671:2572]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvPeriodicWakeup 2025-05-29T15:28:54.929037Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3437: TEvPeriodicTableStats from datashard 72075186224037888, FollowerId 0, tableId 2 2025-05-29T15:28:54.929054Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3129: StateWork, received event# 2146435079, Sender [0:0:0], Recipient [1:673:2574]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvPeriodicWakeup 2025-05-29T15:28:54.929070Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3437: TEvPeriodicTableStats from datashard 72075186224037889, FollowerId 0, tableId 2 2025-05-29T15:28:55.698369Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3129: StateWork, received event# 2146435079, Sender [0:0:0], Recipient [1:671:2572]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvPeriodicWakeup 2025-05-29T15:28:55.698433Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3129: StateWork, received event# 2146435079, Sender [0:0:0], Recipient [1:673:2574]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvPeriodicWakeup 2025-05-29T15:28:56.431191Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3129: StateWork, received event# 2146435079, Sender [0:0:0], Recipient [1:671:2572]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvPeriodicWakeup 2025-05-29T15:28:56.431295Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3437: TEvPeriodicTableStats from datashard 72075186224037888, FollowerId 0, tableId 2 2025-05-29T15:28:56.431317Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3129: StateWork, received event# 2146435079, Sender [0:0:0], Recipient [1:673:2574]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvPeriodicWakeup 2025-05-29T15:28:56.431332Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3437: TEvPeriodicTableStats from datashard 72075186224037889, FollowerId 0, tableId 2 2025-05-29T15:28:57.199795Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3129: StateWork, received event# 2146435073, Sender [0:0:0], Recipient [1:671:2572]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvCleanupTransaction 2025-05-29T15:28:57.199832Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3155: StateWork, processing event TEvPrivate::TEvCleanupTransaction 2025-05-29T15:28:57.199878Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:214: No cleanup at 72075186224037888 outdated step 2085000 last cleanup 0 2025-05-29T15:28:57.199902Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-05-29T15:28:57.199912Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:385: Check unit PlanQueue at 72075186224037888 2025-05-29T15:28:57.199917Z node 1 :TX_DATASHARD TRACE: plan_queue_unit.cpp:59: TPlanQueueUnit at 72075186224037888 has no attached operations 2025-05-29T15:28:57.199921Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:403: Unit PlanQueue has no ready operations at 72075186224037888 2025-05-29T15:28:57.199967Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3129: StateWork, received event# 2146435073, Sender [0:0:0], Recipient [1:673:2574]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvCleanupTransaction 2025-05-29T15:28:57.199973Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3155: StateWork, processing event TEvPrivate::TEvCleanupTransaction 2025-05-29T15:28:57.199982Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:214: No cleanup at 72075186224037889 outdated step 2085000 last cleanup 0 2025-05-29T15:28:57.199989Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037889 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-05-29T15:28:57.199993Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:385: Check unit PlanQueue at 72075186224037889 2025-05-29T15:28:57.199998Z node 1 :TX_DATASHARD TRACE: plan_queue_unit.cpp:59: TPlanQueueUnit at 72075186224037889 has no attached operations 2025-05-29T15:28:57.200002Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:403: Unit PlanQueue has no ready operations at 72075186224037889 2025-05-29T15:28:57.200041Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3129: StateWork, received event# 2146435079, Sender [0:0:0], Recipient [1:671:2572]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvPeriodicWakeup 2025-05-29T15:28:57.200055Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3129: StateWork, received event# 2146435079, Sender [0:0:0], Recipient [1:673:2574]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvPeriodicWakeup 2025-05-29T15:28:57.918886Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3129: StateWork, received event# 2146435079, Sender [0:0:0], Recipient [1:671:2572]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvPeriodicWakeup 2025-05-29T15:28:57.919008Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3437: TEvPeriodicTableStats from datashard 72075186224037888, FollowerId 0, tableId 2 2025-05-29T15:28:57.919037Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3129: StateWork, received event# 2146435079, Sender [0:0:0], Recipient [1:673:2574]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvPeriodicWakeup 2025-05-29T15:28:57.919059Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3437: TEvPeriodicTableStats from datashard 72075186224037889, FollowerId 0, tableId 2 2025-05-29T15:28:58.655151Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3129: StateWork, received event# 2146435079, Sender [0:0:0], Recipient [1:671:2572]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvPeriodicWakeup 2025-05-29T15:28:58.655202Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3129: StateWork, received event# 2146435079, Sender [0:0:0], Recipient [1:673:2574]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvPeriodicWakeup 2025-05-29T15:28:59.405299Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3129: StateWork, received event# 2146435073, Sender [0:0:0], Recipient [1:671:2572]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvCleanupTransaction 2025-05-29T15:28:59.405341Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3155: StateWork, processing event TEvPrivate::TEvCleanupTransaction 2025-05-29T15:28:59.405381Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:214: No cleanup at 72075186224037888 outdated step 2100000 last cleanup 0 2025-05-29T15:28:59.405408Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-05-29T15:28:59.405420Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:385: Check unit PlanQueue at 72075186224037888 2025-05-29T15:28:59.405427Z node 1 :TX_DATASHARD TRACE: plan_queue_unit.cpp:59: TPlanQueueUnit at 72075186224037888 has no attached operations 2025-05-29T15:28:59.405432Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:403: Unit PlanQueue has no ready operations at 72075186224037888 2025-05-29T15:28:59.405472Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3129: StateWork, received event# 2146435073, Sender [0:0:0], Recipient [1:673:2574]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvCleanupTransaction 2025-05-29T15:28:59.405477Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3155: StateWork, processing event TEvPrivate::TEvCleanupTransaction 2025-05-29T15:28:59.405487Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:214: No cleanup at 72075186224037889 outdated step 2100000 last cleanup 0 2025-05-29T15:28:59.405493Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037889 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-05-29T15:28:59.405496Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:385: Check unit PlanQueue at 72075186224037889 2025-05-29T15:28:59.405500Z node 1 :TX_DATASHARD TRACE: plan_queue_unit.cpp:59: TPlanQueueUnit at 72075186224037889 has no attached operations 2025-05-29T15:28:59.405504Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:403: Unit PlanQueue has no ready operations at 72075186224037889 2025-05-29T15:28:59.405532Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3129: StateWork, received event# 2146435079, Sender [0:0:0], Recipient [1:671:2572]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvPeriodicWakeup 2025-05-29T15:28:59.405574Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3437: TEvPeriodicTableStats from datashard 72075186224037888, FollowerId 0, tableId 2 2025-05-29T15:28:59.405591Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3129: StateWork, received event# 2146435079, Sender [0:0:0], Recipient [1:673:2574]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvPeriodicWakeup 2025-05-29T15:28:59.405606Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3437: TEvPeriodicTableStats from datashard 72075186224037889, FollowerId 0, tableId 2 (NActors::TSchedulingLimitReachedException) TestActorRuntime Processed over 100000 events.ydb/library/actors/testlib/test_runtime.cpp:716: ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_background_cleaning/unittest >> TSchemeshardBackgroundCleaningTest::TempInTemp [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:227:2060] recipient: [1:221:2142] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:227:2060] recipient: [1:221:2142] Leader for TabletID 72057594046678944 is [1:237:2152] sender: [1:238:2060] recipient: [1:221:2142] 2025-05-29T15:28:02.270333Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7488: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-05-29T15:28:02.270354Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7516: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:28:02.270358Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7402: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-05-29T15:28:02.270361Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7418: OperationsProcessing config: using default configuration 2025-05-29T15:28:02.270372Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-05-29T15:28:02.270374Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-05-29T15:28:02.270380Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7548: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:28:02.270390Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:39: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-05-29T15:28:02.270464Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7619: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-05-29T15:28:02.270524Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-05-29T15:28:02.282503Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7309: Cannot subscribe to console configs 2025-05-29T15:28:02.282527Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:28:02.286193Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-05-29T15:28:02.286261Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-05-29T15:28:02.286296Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-05-29T15:28:02.288095Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-05-29T15:28:02.288240Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:445: Clear TempDirsState with owners number: 0 2025-05-29T15:28:02.288341Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1348: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-05-29T15:28:02.288405Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:32: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-05-29T15:28:02.289107Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:157: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:28:02.289142Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:84: [RootDataErasureManager] Stop 2025-05-29T15:28:02.289392Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:28:02.289403Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:28:02.289420Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-05-29T15:28:02.289427Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-29T15:28:02.289433Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:202: TTxServerlessStorageBilling.Complete 2025-05-29T15:28:02.289446Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6671: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-05-29T15:28:02.290983Z node 1 :HIVE INFO: tablet_helpers.cpp:1130: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:237:2152] sender: [1:351:2060] recipient: [1:17:2064] 2025-05-29T15:28:02.308124Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:372: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-05-29T15:28:02.308196Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:28:02.308261Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-05-29T15:28:02.308305Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:130: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-05-29T15:28:02.308316Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:28:02.309117Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:451: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-05-29T15:28:02.309148Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-05-29T15:28:02.309221Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:28:02.309230Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:313: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-05-29T15:28:02.309236Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:367: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-05-29T15:28:02.309242Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 2 -> 3 2025-05-29T15:28:02.309668Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:28:02.309682Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-05-29T15:28:02.309688Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 3 -> 128 2025-05-29T15:28:02.310057Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:28:02.310072Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:28:02.310078Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:28:02.310085Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1650: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-05-29T15:28:02.310690Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1719: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-05-29T15:28:02.311138Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:652: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-05-29T15:28:02.311180Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1751: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-05-29T15:28:02.311374Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:676: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-05-29T15:28:02.311397Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:680: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 246 RawX2: 4294969454 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-05-29T15:28:02.311404Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:28:02.311469Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 128 -> 240 2025-05-29T15:28:02.311477Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:28:02.311505Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-05-29T15:28:02.311517Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:402: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-05-29T15:28:02.312009Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:28:02.312019Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-29T15:28:02.312058Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29 ... d EvNotifyTxCompletion 2025-05-29T15:29:12.276612Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:256: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 106 2025-05-29T15:29:12.276676Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4877: StateWork, received event# 269877761, Sender [7:686:2506], Recipient [7:238:2153]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-05-29T15:29:12.276683Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4974: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-05-29T15:29:12.276687Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5753: Pipe server connected, at tablet: 72057594046678944 2025-05-29T15:29:12.276710Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4877: StateWork, received event# 271124996, Sender [7:581:2401], Recipient [7:238:2153]: NKikimrScheme.TEvNotifyTxCompletion TxId: 106 2025-05-29T15:29:12.276714Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4890: StateWork, processing event TEvSchemeShard::TEvNotifyTxCompletion 2025-05-29T15:29:12.276725Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 106, at schemeshard: 72057594046678944 2025-05-29T15:29:12.276745Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:227: tests -- TTxNotificationSubscriber for txId 106: got EvNotifyTxCompletionResult 2025-05-29T15:29:12.276749Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:236: tests -- TTxNotificationSubscriber for txId 106: satisfy waiter [7:684:2504] 2025-05-29T15:29:12.276771Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4877: StateWork, received event# 269877764, Sender [7:686:2506], Recipient [7:238:2153]: NKikimr::TEvTabletPipe::TEvServerDisconnected 2025-05-29T15:29:12.276778Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4975: StateWork, processing event TEvTabletPipe::TEvServerDisconnected 2025-05-29T15:29:12.276782Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5801: Server pipe is reset, at schemeshard: 72057594046678944 TestWaitNotification: OK eventTxId 106 TestModificationResults wait txId: 107 2025-05-29T15:29:12.276863Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4877: StateWork, received event# 271122432, Sender [8:557:2102], Recipient [7:238:2153] 2025-05-29T15:29:12.276868Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4888: StateWork, processing event TEvSchemeShard::TEvModifySchemeTransaction 2025-05-29T15:29:12.277437Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:372: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot/test/tmp/a/b" OperationType: ESchemeOpMkDir MkDir { Name: "tmp2" } TempDirOwnerActorId { RawX1: 557 RawX2: 34359740470 } AllowCreateInTempDir: false } TxId: 107 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-05-29T15:29:12.277490Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_mkdir.cpp:115: TMkDir Propose, path: /MyRoot/test/tmp/a/b/tmp2, operationId: 107:0, at schemeshard: 72057594046678944 2025-05-29T15:29:12.277510Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:130: IgniteOperation, opId: 107:1, propose status:StatusPreconditionFailed, reason: Check failed: path: '/MyRoot/test/tmp/a/b', error: path is temporary (id: [OwnerId: 72057594046678944, LocalPathId: 5], type: EPathTypeDir, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_mkdir.cpp:134, at schemeshard: 72057594046678944 2025-05-29T15:29:12.277556Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:165: TSideEffects ApplyOnExecute at tablet# 72057594046678944 2025-05-29T15:29:12.278063Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:451: TTxOperationPropose Complete, txId: 107, response: Status: StatusPreconditionFailed Reason: "Check failed: path: \'/MyRoot/test/tmp/a/b\', error: path is temporary (id: [OwnerId: 72057594046678944, LocalPathId: 5], type: EPathTypeDir, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_mkdir.cpp:134" TxId: 107 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2025-05-29T15:29:12.278095Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 107, database: /MyRoot, subject: , status: StatusPreconditionFailed, reason: Check failed: path: '/MyRoot/test/tmp/a/b', error: path is temporary (id: [OwnerId: 72057594046678944, LocalPathId: 5], type: EPathTypeDir, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_mkdir.cpp:134, operation: CREATE DIRECTORY, path: /MyRoot/test/tmp/a/b/tmp2 2025-05-29T15:29:12.278111Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:207: TSideEffects ApplyOnComplete at tablet# 72057594046678944 TestModificationResult got TxId: 107, wait until txId: 107 TestWaitNotification wait txId: 107 2025-05-29T15:29:12.278177Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:210: tests -- TTxNotificationSubscriber for txId 107: send EvNotifyTxCompletion 2025-05-29T15:29:12.278182Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:256: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 107 2025-05-29T15:29:12.278226Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4877: StateWork, received event# 269877761, Sender [7:692:2512], Recipient [7:238:2153]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-05-29T15:29:12.278231Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4974: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-05-29T15:29:12.278235Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5753: Pipe server connected, at tablet: 72057594046678944 2025-05-29T15:29:12.278253Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4877: StateWork, received event# 271124996, Sender [7:581:2401], Recipient [7:238:2153]: NKikimrScheme.TEvNotifyTxCompletion TxId: 107 2025-05-29T15:29:12.278257Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4890: StateWork, processing event TEvSchemeShard::TEvNotifyTxCompletion 2025-05-29T15:29:12.278267Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 107, at schemeshard: 72057594046678944 2025-05-29T15:29:12.278284Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:227: tests -- TTxNotificationSubscriber for txId 107: got EvNotifyTxCompletionResult 2025-05-29T15:29:12.278288Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:236: tests -- TTxNotificationSubscriber for txId 107: satisfy waiter [7:690:2510] 2025-05-29T15:29:12.278304Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4877: StateWork, received event# 269877764, Sender [7:692:2512], Recipient [7:238:2153]: NKikimr::TEvTabletPipe::TEvServerDisconnected 2025-05-29T15:29:12.278308Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4975: StateWork, processing event TEvTabletPipe::TEvServerDisconnected 2025-05-29T15:29:12.278312Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5801: Server pipe is reset, at schemeshard: 72057594046678944 TestWaitNotification: OK eventTxId 107 TestModificationResults wait txId: 108 2025-05-29T15:29:12.278366Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4877: StateWork, received event# 271122432, Sender [8:557:2102], Recipient [7:238:2153] 2025-05-29T15:29:12.278370Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4888: StateWork, processing event TEvSchemeShard::TEvModifySchemeTransaction 2025-05-29T15:29:12.278774Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:372: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot/test/tmp/a/b" OperationType: ESchemeOpMkDir MkDir { Name: "tmp2" } TempDirOwnerActorId { RawX1: 557 RawX2: 34359740470 } AllowCreateInTempDir: true } TxId: 108 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-05-29T15:29:12.278800Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_mkdir.cpp:115: TMkDir Propose, path: /MyRoot/test/tmp/a/b/tmp2, operationId: 108:0, at schemeshard: 72057594046678944 2025-05-29T15:29:12.278805Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:130: IgniteOperation, opId: 108:1, propose status:StatusPreconditionFailed, reason: Can't create temporary directory while flag AllowCreateInTempDir is set. Temporary directory can't be created in another temporary directory., at schemeshard: 72057594046678944 2025-05-29T15:29:12.278828Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:165: TSideEffects ApplyOnExecute at tablet# 72057594046678944 2025-05-29T15:29:12.279206Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:451: TTxOperationPropose Complete, txId: 108, response: Status: StatusPreconditionFailed Reason: "Can\'t create temporary directory while flag AllowCreateInTempDir is set. Temporary directory can\'t be created in another temporary directory." TxId: 108 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2025-05-29T15:29:12.279238Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 108, database: /MyRoot, subject: , status: StatusPreconditionFailed, reason: Can't create temporary directory while flag AllowCreateInTempDir is set. Temporary directory can't be created in another temporary directory., operation: CREATE DIRECTORY, path: /MyRoot/test/tmp/a/b/tmp2 2025-05-29T15:29:12.279244Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:207: TSideEffects ApplyOnComplete at tablet# 72057594046678944 TestModificationResult got TxId: 108, wait until txId: 108 TestWaitNotification wait txId: 108 2025-05-29T15:29:12.279312Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:210: tests -- TTxNotificationSubscriber for txId 108: send EvNotifyTxCompletion 2025-05-29T15:29:12.279317Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:256: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 108 2025-05-29T15:29:12.279362Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4877: StateWork, received event# 269877761, Sender [7:698:2518], Recipient [7:238:2153]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-05-29T15:29:12.279367Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4974: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-05-29T15:29:12.279371Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5753: Pipe server connected, at tablet: 72057594046678944 2025-05-29T15:29:12.279386Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4877: StateWork, received event# 271124996, Sender [7:581:2401], Recipient [7:238:2153]: NKikimrScheme.TEvNotifyTxCompletion TxId: 108 2025-05-29T15:29:12.279390Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4890: StateWork, processing event TEvSchemeShard::TEvNotifyTxCompletion 2025-05-29T15:29:12.279399Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 108, at schemeshard: 72057594046678944 2025-05-29T15:29:12.279415Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:227: tests -- TTxNotificationSubscriber for txId 108: got EvNotifyTxCompletionResult 2025-05-29T15:29:12.279419Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:236: tests -- TTxNotificationSubscriber for txId 108: satisfy waiter [7:696:2516] 2025-05-29T15:29:12.279436Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4877: StateWork, received event# 269877764, Sender [7:698:2518], Recipient [7:238:2153]: NKikimr::TEvTabletPipe::TEvServerDisconnected 2025-05-29T15:29:12.279440Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4975: StateWork, processing event TEvTabletPipe::TEvServerDisconnected 2025-05-29T15:29:12.279444Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5801: Server pipe is reset, at schemeshard: 72057594046678944 TestWaitNotification: OK eventTxId 108 >> KqpOlapScheme::SetColumnFamily [GOOD] >> KqpOlapScheme::PrimaryKeyNotDefaultColumnFamily >> BackupRestoreS3::TestAllPrimitiveTypes-BOOL [FAIL] >> BackupRestoreS3::TestAllPrimitiveTypes-DATE >> KqpScheme::CreateAlterUserLoginNoLogin >> BackupRestore::TestAllPrimitiveTypes-UUID [FAIL] >> KqpOlapScheme::CreateWithDefaultColumnFamily [GOOD] >> KqpOlapScheme::CreateWithColumnFamily >> KqpOlapScheme::PrimaryKeyNotDefaultColumnFamily [GOOD] >> KqpOlapScheme::SetNotDefaultColumnFamilyForPrimaryKey >> KqpScheme::DisableResourcePoolClassifiersOnServerless >> KqpScheme::QueryWithAlter >> KqpOlapScheme::CreateWithColumnFamily [GOOD] >> KqpOlapScheme::CreateTableWithDefaultFamilyWithoutSettings >> KqpScheme::CreateTableWithReadReplicasUncompat >> KqpScheme::AlterIndexImplTableUsingPublicAPI >> KqpScheme::Int8Int16 >> KqpScheme::CreateTableWithDefaultFamily >> KqpConstraints::DefaultValuesForTable >> BackupRestoreS3::TestAllPrimitiveTypes-DATE [FAIL] >> BackupRestoreS3::TestAllPrimitiveTypes-DATETIME >> KqpScheme::ResourcePoolsValidation >> KqpOlapScheme::SetNotDefaultColumnFamilyForPrimaryKey [GOOD] >> KqpScheme::ModifyPermissions >> KqpScheme::CreateExternalTable >> TSchemeShardLoginTest::ChangeAccountLockoutParameters [GOOD] >> TSchemeShardLoginTest::CheckThatLockedOutParametersIsRestoredFromLocalDb >> KqpScheme::CreateUserWithoutPassword >> KqpOlapScheme::CreateTableWithDefaultFamilyWithoutSettings [GOOD] >> KqpOlapScheme::CreateTableWithFamilyWithOnlyCompressionLevel ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/scheme/unittest >> KqpOlapTypes::AttributeNegative Test command err: Trying to start YDB, gRPC: 21489, MsgBus: 7190 2025-05-29T15:29:03.199156Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509890091044031245:2081];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:29:03.199335Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/001182/r3tmp/tmp0rpfVq/pdisk_1.dat 2025-05-29T15:29:03.273912Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 21489, node 1 2025-05-29T15:29:03.293322Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:29:03.293337Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-05-29T15:29:03.293339Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-05-29T15:29:03.293401Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-05-29T15:29:03.298403Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:29:03.298435Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:29:03.299553Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:7190 TClient is connected to server localhost:7190 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-29T15:29:03.350547Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:29:03.360186Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:29:03.379857Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-05-29T15:29:03.407915Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:29:03.431471Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 2025-05-29T15:29:03.619980Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890091044032823:2403], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:29:03.620008Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:29:03.659383Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-05-29T15:29:03.666196Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-05-29T15:29:03.675701Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-05-29T15:29:03.690053Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-05-29T15:29:03.745602Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-05-29T15:29:03.759951Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-05-29T15:29:03.773512Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-05-29T15:29:03.789329Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890091044033481:2466], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:29:03.789364Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:29:03.789386Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890091044033486:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:29:03.790071Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715669:3, at schemeshard: 72057594046644480 2025-05-29T15:29:03.793137Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7509890091044033488:2470], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715669 completed, doublechecking } 2025-05-29T15:29:03.855478Z node 1 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [1:7509890091044033539:3397] txid# 281474976715670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-29T15:29:03.953020Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [1:7509890091044033555:2474], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:29:03.953151Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=1&id=NjkxYTU5YmEtNDJkY2QxYTctMTA5NGM4Y2QtZWQzZGY2NjY=, ActorId: [1:7509890091044032805:2401], ActorState: ExecuteState, TraceId: 01jweajg9d0zfwhcaftzhs46md, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: VERIFY failed (2025-05-29T15:29:03.953834Z): assertion failed in non-unittest thread with message: assertion failed at ydb/core/kqp/ut/common/kqp_ut_common.h:375, void NKikimr::NKqp::AssertSuccessResult(const NYdb::TStatus &): (result.IsSuccess())
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 library/cpp/testing/unittest/registar.cpp:36 RaiseError(): requirement UnittestThread failed 0. /-S/util/system/yassert.cpp:83: InternalPanicImpl @ 0x1633E055 1. /-S/util/system/yassert.cpp:55: Panic @ 0x16335056 2. /tmp//-S/library/cpp/testing/unittest/registar.cpp:36: RaiseError @ 0x164D75B6 3. /-S/ydb/core/kqp/ut/common/kqp_ut_common.h:375: AssertSuccessResult @ 0x15DE0A22 4. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:365: CreateSampleTables @ 0x289A0492 5. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:581: operator() @ 0x289C1A1C 6. /-S/library/cpp/threading/future/core/future-inl.h:493: SetValue @ 0x289C1A1C 7. /-S/library/cpp/threading/future/async.h:24: operator() @ 0x289C1A1C 8. /-S/util/thread/pool.h:71: Process @ 0x289C1A1C 9. /-S/util/thread/pool.cpp:418: DoExecute @ 0x163457A9 10. /-S/util/thread/factory.h:15: Execute @ 0x16344199 11. /-S/util/thread/factory.cpp:36: ThreadProc @ 0x16344199 12. /-S/util/system/thread.cpp:244: ThreadProxy @ 0x1633F60C 13. ??:0: ?? @ 0x7EFDE8534AC2 14. ??:0: ?? @ 0x7EFDE85C684F Trying to start YDB, gRPC: 16205, MsgBus: 19685 2025-05-29T15:29:07.954459Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509890109249310727:2196];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:29:07.954574Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/001182/r3tmp/tmpzsbCB5/pdisk_1.dat 2025-05-29T15:29:08.058160Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:29:08.059422Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [1:7509890109249310570:2079] 1748532547952598 != 1748532547952601 TServer::EnableGrpc on GrpcPort 16205, node 1 2025-05-29T15:29:08.089844Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:29:08.089860Z node 1 :NET_CLASSIFIER WARN ... apshotManager: discarding snapshot; our snapshot: [step: 1748532550422, txId: 18446744073709551615] shutting down 2025-05-29T15:29:10.470218Z node 4 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:238: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1748532550464, txId: 18446744073709551615] shutting down 2025-05-29T15:29:10.524452Z node 4 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:238: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1748532550506, txId: 18446744073709551615] shutting down 2025-05-29T15:29:10.563699Z node 4 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:238: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1748532550555, txId: 18446744073709551615] shutting down 2025-05-29T15:29:10.603463Z node 4 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:238: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1748532550597, txId: 18446744073709551615] shutting down 2025-05-29T15:29:10.647066Z node 4 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:238: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1748532550639, txId: 18446744073709551615] shutting down Trying to start YDB, gRPC: 9258, MsgBus: 11316 2025-05-29T15:29:10.774570Z node 5 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[5:7509890119691315042:2071];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:29:10.774592Z node 5 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/001182/r3tmp/tmprGsCRu/pdisk_1.dat 2025-05-29T15:29:10.788384Z node 5 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 9258, node 5 2025-05-29T15:29:10.801577Z node 5 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:29:10.801594Z node 5 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-05-29T15:29:10.801596Z node 5 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-05-29T15:29:10.801650Z node 5 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:11316 TClient is connected to server localhost:11316 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-29T15:29:10.874991Z node 5 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:29:10.875025Z node 5 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:29:10.876067Z node 5 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-29T15:29:10.878889Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:29:10.889214Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:29:10.902026Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:29:10.919376Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:29:10.928660Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:29:11.178128Z node 5 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7509890123986283919:2403], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:29:11.178149Z node 5 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:29:11.187621Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-05-29T15:29:11.195482Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-05-29T15:29:11.208131Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-05-29T15:29:11.221658Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-05-29T15:29:11.235319Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-05-29T15:29:11.250920Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-05-29T15:29:11.264151Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-05-29T15:29:11.279910Z node 5 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7509890123986284572:2466], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:29:11.279940Z node 5 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:29:11.279956Z node 5 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7509890123986284577:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:29:11.280819Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715669:3, at schemeshard: 72057594046644480 2025-05-29T15:29:11.283411Z node 5 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [5:7509890123986284579:2470], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715669 completed, doublechecking } 2025-05-29T15:29:11.339802Z node 5 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [5:7509890123986284630:3393] txid# 281474976715670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-29T15:29:11.602918Z node 5 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [5:7509890123986284646:2474], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:29:11.603064Z node 5 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=5&id=ZjM4OTczMjgtODI5NTYzYTYtZWQ2ZmZmYjItNTNjNmNlOWY=, ActorId: [5:7509890123986283916:2401], ActorState: ExecuteState, TraceId: 01jweajqkf0n2m3069z7hwxc34, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: VERIFY failed (2025-05-29T15:29:11.603723Z): assertion failed in non-unittest thread with message: assertion failed at ydb/core/kqp/ut/common/kqp_ut_common.h:375, void NKikimr::NKqp::AssertSuccessResult(const NYdb::TStatus &): (result.IsSuccess())
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 library/cpp/testing/unittest/registar.cpp:36 RaiseError(): requirement UnittestThread failed 0. /-S/util/system/yassert.cpp:83: InternalPanicImpl @ 0x1633E055 1. /-S/util/system/yassert.cpp:55: Panic @ 0x16335056 2. /tmp//-S/library/cpp/testing/unittest/registar.cpp:36: RaiseError @ 0x164D75B6 3. /-S/ydb/core/kqp/ut/common/kqp_ut_common.h:375: AssertSuccessResult @ 0x15DE0A22 4. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:365: CreateSampleTables @ 0x289A0492 5. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:581: operator() @ 0x289C1A1C 6. /-S/library/cpp/threading/future/core/future-inl.h:493: SetValue @ 0x289C1A1C 7. /-S/library/cpp/threading/future/async.h:24: operator() @ 0x289C1A1C 8. /-S/util/thread/pool.h:71: Process @ 0x289C1A1C 9. /-S/util/thread/pool.cpp:418: DoExecute @ 0x163457A9 10. /-S/util/thread/factory.h:15: Execute @ 0x16344199 11. /-S/util/thread/factory.cpp:36: ThreadProc @ 0x16344199 12. /-S/util/system/thread.cpp:244: ThreadProxy @ 0x1633F60C 13. ??:0: ?? @ 0x7F1A50D49AC2 14. ??:0: ?? @ 0x7F1A50DDB84F >> KqpScheme::CreateAndAlterTableWithPartitioningBySizeCompat >> KqpOlapScheme::CreateTableWithFamilyWithOnlyCompressionLevel [GOOD] >> BackupRestoreS3::TestAllPrimitiveTypes-DATETIME [FAIL] >> BackupRestoreS3::TestAllPrimitiveTypes-DATE32 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/ydb/backup_ut/unittest >> EncryptedBackupParamsValidationTest::EncryptionSettingsWithoutKeyImport [GOOD] Test command err: 2025-05-29T15:28:56.336394Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509890060214942776:2075];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:28:56.336424Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/001cea/r3tmp/tmp1kVCFf/pdisk_1.dat 2025-05-29T15:28:56.413247Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 8807, node 1 2025-05-29T15:28:56.422784Z node 1 :GRPC_SERVER WARN: grpc_request_proxy.cpp:529: SchemeBoardDelete /Root Strong=0 2025-05-29T15:28:56.422800Z node 1 :GRPC_SERVER WARN: grpc_request_proxy.cpp:529: SchemeBoardDelete /Root Strong=0 2025-05-29T15:28:56.426672Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:28:56.426685Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-05-29T15:28:56.426688Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-05-29T15:28:56.426755Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-05-29T15:28:56.436910Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:28:56.436936Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:28:56.438496Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:20598 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-29T15:28:56.481048Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:28:56.702667Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890060214943723:2337], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:28:56.702696Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:28:56.743067Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:315: actor# [1:7509890060214943004:2140] Handle TEvProposeTransaction 2025-05-29T15:28:56.743089Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:238: actor# [1:7509890060214943004:2140] TxId# 281474976715658 ProcessProposeTransaction 2025-05-29T15:28:56.743113Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:257: actor# [1:7509890060214943004:2140] Cookie# 0 userReqId# "" txid# 281474976715658 SEND to# [1:7509890060214943744:2604] 2025-05-29T15:28:56.752968Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1562: Actor# [1:7509890060214943744:2604] txid# 281474976715658 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/Root" OperationType: ESchemeOpCreateTable CreateTable { Name: "table" Columns { Name: "Key" Type: "Uint32" NotNull: false } Columns { Name: "Value" Type: "Utf8" NotNull: false } KeyColumnNames: "Key" PartitionConfig { } Temporary: false } } } UserToken: "" DatabaseName: "" 2025-05-29T15:28:56.753001Z node 1 :TX_PROXY DEBUG: schemereq.cpp:569: Actor# [1:7509890060214943744:2604] txid# 281474976715658 Bootstrap, UserSID: CheckAdministrator: 0 CheckDatabaseAdministrator: 0 2025-05-29T15:28:56.753223Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1627: Actor# [1:7509890060214943744:2604] txid# 281474976715658 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P6 2025-05-29T15:28:56.753248Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1617: Actor# [1:7509890060214943744:2604] txid# 281474976715658 TEvNavigateKeySet requested from SchemeCache 2025-05-29T15:28:56.753299Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1450: Actor# [1:7509890060214943744:2604] txid# 281474976715658 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-05-29T15:28:56.753342Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1497: Actor# [1:7509890060214943744:2604] HANDLE EvNavigateKeySetResult, txid# 281474976715658 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2025-05-29T15:28:56.753359Z node 1 :TX_PROXY DEBUG: schemereq.cpp:103: Actor# [1:7509890060214943744:2604] txid# 281474976715658 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976715658 TabletId# 72057594046644480} 2025-05-29T15:28:56.753416Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1352: Actor# [1:7509890060214943744:2604] txid# 281474976715658 HANDLE EvClientConnected 2025-05-29T15:28:56.753827Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-05-29T15:28:56.754846Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1374: Actor# [1:7509890060214943744:2604] txid# 281474976715658 Status StatusAccepted HANDLE {TEvModifySchemeTransactionResult Status# StatusAccepted txid# 281474976715658} 2025-05-29T15:28:56.754867Z node 1 :TX_PROXY DEBUG: schemereq.cpp:549: Actor# [1:7509890060214943744:2604] txid# 281474976715658 SEND to# [1:7509890060214943743:2340] Source {TEvProposeTransactionStatus txid# 281474976715658 Status# 53} 2025-05-29T15:28:56.824535Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890060214943887:2347], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:28:56.824581Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:28:56.824631Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890060214943892:2350], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:28:56.824718Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:315: actor# [1:7509890060214943004:2140] Handle TEvProposeTransaction 2025-05-29T15:28:56.824726Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:238: actor# [1:7509890060214943004:2140] TxId# 281474976715659 ProcessProposeTransaction 2025-05-29T15:28:56.824743Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:257: actor# [1:7509890060214943004:2140] Cookie# 0 userReqId# "" txid# 281474976715659 SEND to# [1:7509890060214943895:2722] 2025-05-29T15:28:56.825842Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1562: Actor# [1:7509890060214943895:2722] txid# 281474976715659 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/Root/.metadata/workload_manager/pools" OperationType: ESchemeOpCreateResourcePool ModifyACL { Name: "default" DiffACL: "\n!\010\000\022\035\010\001\020\201\004\032\024all-users@well-known \003\n\031\010\000\022\025\010\001\020\201\004\032\014root@builtin \003" NewOwner: "metadata@system" } Internal: true CreateResourcePool { Name: "default" Properties { Properties { key: "concurrent_query_limit" value: "-1" } Properties { key: "database_load_cpu_threshold" value: "-1" } Properties { key: "query_cancel_after_seconds" value: "0" } Properties { key: "query_cpu_limit_percent_per_node" value: "-1" } Properties { key: "query_memory_limit_percent_per_node" value: "-1" } Properties { key: "queue_size" value: "-1" } Properties { key: "resource_weight" value: "-1" } Properties { key: "total_cpu_limit_percent_per_node" value: "-1" } } } } } UserToken: "\n\017metadata@system\022\000" DatabaseName: "/Root" 2025-05-29T15:28:56.825858Z node 1 :TX_PROXY DEBUG: schemereq.cpp:569: Actor# [1:7509890060214943895:2722] txid# 281474976715659 Bootstrap, UserSID: metadata@system CheckAdministrator: 0 CheckDatabaseAdministrator: 0 2025-05-29T15:28:56.825862Z node 1 :TX_PROXY DEBUG: schemereq.cpp:578: Actor# [1:7509890060214943895:2722] txid# 281474976715659 Bootstrap, UserSID: metadata@system IsClusterAdministrator: 1 2025-05-29T15:28:56.826219Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1627: Actor# [1:7509890060214943895:2722] txid# 281474976715659 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P6 2025-05-29T15:28:56.826241Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1617: Actor# [1:7509890060214943895:2722] txid# 281474976715659 TEvNavigateKeySet requested from SchemeCache 2025-05-29T15:28:56.826281Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1450: Actor# [1:7509890060214943895:2722] txid# 281474976715659 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-05-29T15:28:56.826317Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1497: Actor# [1:7509890060214943895:2722] HANDLE EvNavigateKeySetResult, txid# 281474976715659 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2025-05-29T15:28:56.826328Z node 1 :TX_PROXY DEBUG: schemereq.cpp:103: Actor# [1:7509890060214943895:2722] txid# 281474976715659 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976715659 TabletId# 72057594046644480} 2025-05-29T15:28:56.826375Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1352: Actor# [1:7509890060214943895:2722] txid# 281474976715659 HANDLE EvClientConnected 2025-05-29T15:28:56.826734Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480 2025-05-29T15:28:56.828387Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1374: Actor# [1:7509890060214943895:2722] txid# 281474976715659 Status Statu ... AT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5512: Handle TEvSchemaChanged, tabletId: 72057594046644480, at schemeshard: 72057594046644480, message: Source { RawX1: 7509890124344718059 RawX2: 4503745656260921 } Origin: 72075186224037890 State: 2 TxId: 281474976710761 Step: 0 Generation: 1 OpResult { Success: true Explain: "" BytesProcessed: 0 RowsProcessed: 0 } 2025-05-29T15:29:11.833574Z node 34 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1764: TOperation FindRelatedPartByTabletId, TxId: 281474976710761, tablet: 72075186224037890, partId: 0 2025-05-29T15:29:11.833605Z node 34 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:619: TTxOperationReply execute, operationId: 281474976710761:0, at schemeshard: 72057594046644480, message: Source { RawX1: 7509890124344718059 RawX2: 4503745656260921 } Origin: 72075186224037890 State: 2 TxId: 281474976710761 Step: 0 Generation: 1 OpResult { Success: true Explain: "" BytesProcessed: 0 RowsProcessed: 0 } 2025-05-29T15:29:11.833626Z node 34 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_backup_restore_common.h:233: TRestore TProposedWaitParts, opId: 281474976710761:0 HandleReply TEvSchemaChanged at tablet# 72057594046644480 message# Source { RawX1: 7509890124344718059 RawX2: 4503745656260921 } Origin: 72075186224037890 State: 2 TxId: 281474976710761 Step: 0 Generation: 1 OpResult { Success: true Explain: "" BytesProcessed: 0 RowsProcessed: 0 } 2025-05-29T15:29:11.833640Z node 34 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:655: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 281474976710761:0, shardIdx: 72057594046644480:3, datashard: 72075186224037890, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046644480 2025-05-29T15:29:11.833646Z node 34 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:674: all shard schema changes has been received, operationId: 281474976710761:0, at schemeshard: 72057594046644480 2025-05-29T15:29:11.833648Z node 34 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:686: send schema changes ack message, operation: 281474976710761:0, datashard: 72075186224037890, at schemeshard: 72057594046644480 2025-05-29T15:29:11.833653Z node 34 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 281474976710761:0 129 -> 240 2025-05-29T15:29:11.833702Z node 34 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_backup_restore_common.h:116: Unable to make a bill: kind# TRestore, opId# 281474976710761:0, reason# domain is not a serverless db, domain# /Root, domainPathId# [OwnerId: 72057594046644480, LocalPathId: 1], IsDomainSchemeShard: 1, ParentDomainId: [OwnerId: 72057594046644480, LocalPathId: 1], ResourcesDomainId: [OwnerId: 72057594046644480, LocalPathId: 1] 2025-05-29T15:29:11.833743Z node 34 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:165: TSideEffects ApplyOnExecute at tablet# 72057594046644480 2025-05-29T15:29:11.834234Z node 34 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:647: TTxOperationReply complete, operationId: 281474976710761:0, at schemeshard: 72057594046644480 2025-05-29T15:29:11.834244Z node 34 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:207: TSideEffects ApplyOnComplete at tablet# 72057594046644480 2025-05-29T15:29:11.834247Z node 34 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:275: Activate send for 281474976710761:0 2025-05-29T15:29:11.834263Z node 34 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:630: Send to actor: [34:7509890124344718059:2361] msg type: 269552132 msg: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 281474976710761 at schemeshard: 72057594046644480 2025-05-29T15:29:11.834303Z node 34 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4877: StateWork, received event# 2146435072, Sender [34:7509890124344716853:2206], Recipient [34:7509890124344716853:2206]: NKikimr::NSchemeShard::TEvPrivate::TEvProgressOperation 2025-05-29T15:29:11.834311Z node 34 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4894: StateWork, processing event TEvPrivate::TEvProgressOperation 2025-05-29T15:29:11.834319Z node 34 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 281474976710761:0, at schemeshard: 72057594046644480 2025-05-29T15:29:11.834324Z node 34 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:482: [72057594046644480] TDone opId# 281474976710761:0 ProgressState 2025-05-29T15:29:11.834339Z node 34 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:165: TSideEffects ApplyOnExecute at tablet# 72057594046644480 2025-05-29T15:29:11.834346Z node 34 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:906: Part operation is done id#281474976710761:0 progress is 1/1 2025-05-29T15:29:11.834349Z node 34 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 281474976710761 ready parts: 1/1 2025-05-29T15:29:11.834354Z node 34 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:906: Part operation is done id#281474976710761:0 progress is 1/1 2025-05-29T15:29:11.834355Z node 34 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 281474976710761 ready parts: 1/1 2025-05-29T15:29:11.834359Z node 34 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1606: TOperation IsReadyToNotify, TxId: 281474976710761, ready parts: 1/1, is published: true 2025-05-29T15:29:11.834370Z node 34 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1629: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [34:7509890124344716853:2206] message: TxId: 281474976710761 2025-05-29T15:29:11.834379Z node 34 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 281474976710761 ready parts: 1/1 2025-05-29T15:29:11.834383Z node 34 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:973: Operation and all the parts is done, operation id: 281474976710761:0 2025-05-29T15:29:11.834386Z node 34 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5174: RemoveTx for txid 281474976710761:0 2025-05-29T15:29:11.834426Z node 34 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046644480, LocalPathId: 14] was 3 2025-05-29T15:29:11.834801Z node 34 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:207: TSideEffects ApplyOnComplete at tablet# 72057594046644480 2025-05-29T15:29:11.834818Z node 34 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:630: Send to actor: [34:7509890124344716853:2206] msg type: 271124998 msg: NKikimrScheme.TEvNotifyTxCompletionResult TxId: 281474976710761 at schemeshard: 72057594046644480 2025-05-29T15:29:11.834849Z node 34 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4877: StateWork, received event# 271124998, Sender [34:7509890124344716853:2206], Recipient [34:7509890124344716853:2206]: NKikimrScheme.TEvNotifyTxCompletionResult TxId: 281474976710761 2025-05-29T15:29:11.834857Z node 34 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5035: StateWork, processing event TEvSchemeShard::TEvNotifyTxCompletionResult 2025-05-29T15:29:11.834859Z node 34 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6746: Handle: TEvNotifyTxCompletionResult: txId# 281474976710761 2025-05-29T15:29:11.834862Z node 34 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6748: Message: TxId: 281474976710761 2025-05-29T15:29:11.834870Z node 34 :IMPORT DEBUG: schemeshard_import__create.cpp:361: TImport::TTxProgress: DoExecute 2025-05-29T15:29:11.834872Z node 34 :IMPORT DEBUG: schemeshard_import__create.cpp:1472: TImport::TTxProgress: OnNotifyResult: txId# 281474976710761 2025-05-29T15:29:11.834905Z node 34 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_xxport__tx_base.h:63: SendNotifications: : id# 281474976715662, subscribers count# 0 2025-05-29T15:29:11.835262Z node 34 :IMPORT DEBUG: schemeshard_import__create.cpp:385: TImport::TTxProgress: DoComplete 2025-05-29T15:29:11.835466Z node 34 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4877: StateWork, received event# 269877764, Sender [34:7509890124344718201:3203], Recipient [34:7509890124344716853:2206]: NKikimr::TEvTabletPipe::TEvServerDisconnected 2025-05-29T15:29:11.835476Z node 34 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4975: StateWork, processing event TEvTabletPipe::TEvServerDisconnected 2025-05-29T15:29:11.835479Z node 34 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5801: Server pipe is reset, at schemeshard: 72057594046644480 2025-05-29T15:29:11.857967Z node 34 :TX_PROXY DEBUG: rpc_operation_request_base.h:50: [GetImport] [34:7509890124344718215:2365] [0] Resolve database: name# /Root 2025-05-29T15:29:11.858132Z node 34 :TX_PROXY DEBUG: rpc_operation_request_base.h:66: [GetImport] [34:7509890124344718215:2365] [0] Handle TEvTxProxySchemeCache::TEvNavigateKeySetResult: request# { ErrorCount: 0 DatabaseName: /Root DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root TableId: [72057594046644480:1:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } }] } 2025-05-29T15:29:11.858140Z node 34 :TX_PROXY DEBUG: rpc_operation_request_base.h:106: [GetImport] [34:7509890124344718215:2365] [0] Send request: schemeShardId# 72057594046644480 2025-05-29T15:29:11.858220Z node 34 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4877: StateWork, received event# 269877761, Sender [34:7509890124344718218:3218], Recipient [34:7509890124344716853:2206]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-05-29T15:29:11.858230Z node 34 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4974: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-05-29T15:29:11.858232Z node 34 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5753: Pipe server connected, at tablet: 72057594046644480 2025-05-29T15:29:11.858256Z node 34 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4877: StateWork, received event# 275251202, Sender [34:7509890124344718215:2365], Recipient [34:7509890124344716853:2206]: NKikimrImport.TEvGetImportRequest Request { Id: 281474976715662 } DatabaseName: "/Root" 2025-05-29T15:29:11.858261Z node 34 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4990: StateWork, processing event TEvImport::TEvGetImportRequest 2025-05-29T15:29:11.858357Z node 34 :TX_PROXY DEBUG: rpc_get_operation.cpp:220: [GetImport] [34:7509890124344718215:2365] [0] Handle TEvImport::TEvGetImportResponse: record# Entry { Id: 281474976715662 Status: SUCCESS Progress: PROGRESS_DONE ImportFromS3Settings { endpoint: "localhost:8807" scheme: HTTP bucket: "test_bucket" source_prefix: "Prefix" destination_path: "Root//RestorePrefix/" encryption_settings { symmetric_key { key: "Cool random key!" } } } StartTime { seconds: 1748532551 } EndTime { seconds: 1748532551 } } 2025-05-29T15:29:11.858519Z node 34 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4877: StateWork, received event# 269877764, Sender [34:7509890124344718218:3218], Recipient [34:7509890124344716853:2206]: NKikimr::TEvTabletPipe::TEvServerDisconnected 2025-05-29T15:29:11.858526Z node 34 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4975: StateWork, processing event TEvTabletPipe::TEvServerDisconnected 2025-05-29T15:29:11.858528Z node 34 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5801: Server pipe is reset, at schemeshard: 72057594046644480 >> KqpScheme::AlterUser >> KqpScheme::TouchIndexAfterMoveIndexReadReplace ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/scheme/unittest >> KqpOlapScheme::CreateTableWithFamilyWithOnlyCompressionLevel [GOOD] Test command err: Trying to start YDB, gRPC: 12610, MsgBus: 14628 2025-05-29T15:29:11.474038Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509890124162129420:2068];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:29:11.474064Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/001156/r3tmp/tmpCcmyn2/pdisk_1.dat 2025-05-29T15:29:11.533500Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 12610, node 1 2025-05-29T15:29:11.548420Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:29:11.548437Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-05-29T15:29:11.548439Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-05-29T15:29:11.548491Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:14628 2025-05-29T15:29:11.575715Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:29:11.575739Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:29:11.576904Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:14628 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-29T15:29:11.606696Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... CREATE TABLE `/Root/ColumnTableTest` (id Int32 NOT NULL, id_second Int32 NOT NULL, level Int32, created_at Timestamp NOT NULL, PRIMARY KEY (created_at, id_second)) PARTITION BY HASH(created_at) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 1, TTL = Interval("PT1H") ON created_at); 2025-05-29T15:29:11.801612Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890124162130051:2331], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:29:11.801659Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:29:11.853148Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-05-29T15:29:11.866509Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;self_id=[1:7509890124162130127:2335];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-05-29T15:29:11.866608Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;self_id=[1:7509890124162130127:2335];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-05-29T15:29:11.866668Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;self_id=[1:7509890124162130127:2335];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-05-29T15:29:11.866703Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;self_id=[1:7509890124162130127:2335];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-05-29T15:29:11.866731Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;self_id=[1:7509890124162130127:2335];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-05-29T15:29:11.866778Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;self_id=[1:7509890124162130127:2335];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-05-29T15:29:11.866810Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;self_id=[1:7509890124162130127:2335];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-05-29T15:29:11.866840Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;self_id=[1:7509890124162130127:2335];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-05-29T15:29:11.866871Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;self_id=[1:7509890124162130127:2335];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-05-29T15:29:11.866905Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;self_id=[1:7509890124162130127:2335];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-05-29T15:29:11.866932Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;self_id=[1:7509890124162130127:2335];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2025-05-29T15:29:11.866961Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;self_id=[1:7509890124162130127:2335];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2025-05-29T15:29:11.867653Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-05-29T15:29:11.867674Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-05-29T15:29:11.867693Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-05-29T15:29:11.867698Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-05-29T15:29:11.867722Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-05-29T15:29:11.867728Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-05-29T15:29:11.867741Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-05-29T15:29:11.867747Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-05-29T15:29:11.867761Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-05-29T15:29:11.867768Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-05-29T15:29:11.867777Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-05-29T15:29:11.867782Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-05-29T15:29:11.867811Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-05-29T15:29:11.867824Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-05-29T15:29:11.867847Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-05-29T15:29:11.867856Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-05-29T15:29:11.867870Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-05-29T15:29:11.867882Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanDeprecatedSnapshot;id=CleanDeprecatedSnapshot; 2025-05-29T15:29:11.867890Z node 1 :TX_COLUMN ... ecute;tx_current=0;tx_id=281474976715658;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715658; 2025-05-29T15:29:14.835807Z node 5 :TX_COLUMNSHARD_TX WARN: log.cpp:784: tablet_id=72075186224037888;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715658;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715658; 2025-05-29T15:29:14.836736Z node 5 :TX_COLUMNSHARD_TX WARN: log.cpp:784: tablet_id=72075186224037908;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715658;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715658; 2025-05-29T15:29:14.836832Z node 5 :TX_COLUMNSHARD_TX WARN: log.cpp:784: tablet_id=72075186224037910;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715658;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715658; 2025-05-29T15:29:14.837737Z node 5 :TX_COLUMNSHARD_TX WARN: log.cpp:784: tablet_id=72075186224037916;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715658;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715658; 2025-05-29T15:29:14.837850Z node 5 :TX_COLUMNSHARD_TX WARN: log.cpp:784: tablet_id=72075186224037938;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715658;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715658; 2025-05-29T15:29:14.838761Z node 5 :TX_COLUMNSHARD_TX WARN: log.cpp:784: tablet_id=72075186224037942;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715658;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715658; 2025-05-29T15:29:14.838913Z node 5 :TX_COLUMNSHARD_TX WARN: log.cpp:784: tablet_id=72075186224037889;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715658;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715658; 2025-05-29T15:29:14.839691Z node 5 :TX_COLUMNSHARD_TX WARN: log.cpp:784: tablet_id=72075186224037891;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715658;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715658; 2025-05-29T15:29:14.839800Z node 5 :TX_COLUMNSHARD_TX WARN: log.cpp:784: tablet_id=72075186224037940;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715658;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715658; 2025-05-29T15:29:14.840564Z node 5 :TX_COLUMNSHARD_TX WARN: log.cpp:784: tablet_id=72075186224037922;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715658;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715658; 2025-05-29T15:29:14.840672Z node 5 :TX_COLUMNSHARD_TX WARN: log.cpp:784: tablet_id=72075186224037946;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715658;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715658; 2025-05-29T15:29:14.841661Z node 5 :TX_COLUMNSHARD_TX WARN: log.cpp:784: tablet_id=72075186224037895;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715658;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715658; 2025-05-29T15:29:14.841723Z node 5 :TX_COLUMNSHARD_TX WARN: log.cpp:784: tablet_id=72075186224037897;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715658;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715658; 2025-05-29T15:29:14.842633Z node 5 :TX_COLUMNSHARD_TX WARN: log.cpp:784: tablet_id=72075186224037909;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715658;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715658; 2025-05-29T15:29:14.842657Z node 5 :TX_COLUMNSHARD_TX WARN: log.cpp:784: tablet_id=72075186224037917;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715658;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715658; 2025-05-29T15:29:14.843708Z node 5 :TX_COLUMNSHARD_TX WARN: log.cpp:784: tablet_id=72075186224037902;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715658;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715658; 2025-05-29T15:29:14.843708Z node 5 :TX_COLUMNSHARD_TX WARN: log.cpp:784: tablet_id=72075186224037947;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715658;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715658; 2025-05-29T15:29:14.844734Z node 5 :TX_COLUMNSHARD_TX WARN: log.cpp:784: tablet_id=72075186224037919;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715658;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715658; 2025-05-29T15:29:14.844758Z node 5 :TX_COLUMNSHARD_TX WARN: log.cpp:784: tablet_id=72075186224037939;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715658;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715658; 2025-05-29T15:29:14.845776Z node 5 :TX_COLUMNSHARD_TX WARN: log.cpp:784: tablet_id=72075186224037941;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715658;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715658; 2025-05-29T15:29:14.845778Z node 5 :TX_COLUMNSHARD_TX WARN: log.cpp:784: tablet_id=72075186224037944;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715658;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715658; 2025-05-29T15:29:14.846778Z node 5 :TX_COLUMNSHARD_TX WARN: log.cpp:784: tablet_id=72075186224037898;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715658;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715658; 2025-05-29T15:29:14.846840Z node 5 :TX_COLUMNSHARD_TX WARN: log.cpp:784: tablet_id=72075186224037951;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715658;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715658; 2025-05-29T15:29:14.847760Z node 5 :TX_COLUMNSHARD_TX WARN: log.cpp:784: tablet_id=72075186224037912;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715658;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715658; 2025-05-29T15:29:14.847842Z node 5 :TX_COLUMNSHARD_TX WARN: log.cpp:784: tablet_id=72075186224037893;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715658;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715658; 2025-05-29T15:29:14.848721Z node 5 :TX_COLUMNSHARD_TX WARN: log.cpp:784: tablet_id=72075186224037921;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715658;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715658; 2025-05-29T15:29:14.848738Z node 5 :TX_COLUMNSHARD_TX WARN: log.cpp:784: tablet_id=72075186224037925;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715658;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715658; 2025-05-29T15:29:14.849652Z node 5 :TX_COLUMNSHARD_TX WARN: log.cpp:784: tablet_id=72075186224037927;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715658;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715658; 2025-05-29T15:29:14.849664Z node 5 :TX_COLUMNSHARD_TX WARN: log.cpp:784: tablet_id=72075186224037911;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715658;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715658; 2025-05-29T15:29:14.850675Z node 5 :TX_COLUMNSHARD_TX WARN: log.cpp:784: tablet_id=72075186224037913;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715658;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715658; 2025-05-29T15:29:14.850680Z node 5 :TX_COLUMNSHARD_TX WARN: log.cpp:784: tablet_id=72075186224037949;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715658;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715658; 2025-05-29T15:29:14.851661Z node 5 :TX_COLUMNSHARD_TX WARN: log.cpp:784: tablet_id=72075186224037923;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715658;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715658; 2025-05-29T15:29:14.851668Z node 5 :TX_COLUMNSHARD_TX WARN: log.cpp:784: tablet_id=72075186224037945;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715658;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715658; 2025-05-29T15:29:14.854614Z node 5 :TX_COLUMNSHARD_TX WARN: log.cpp:784: tablet_id=72075186224037915;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715658;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715658; Trying to start YDB, gRPC: 22680, MsgBus: 30725 2025-05-29T15:29:15.215458Z node 6 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[6:7509890141893079309:2071];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:29:15.215490Z node 6 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/001156/r3tmp/tmp4HSShq/pdisk_1.dat 2025-05-29T15:29:15.228348Z node 6 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 22680, node 6 2025-05-29T15:29:15.243002Z node 6 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:29:15.243019Z node 6 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-05-29T15:29:15.243021Z node 6 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-05-29T15:29:15.243067Z node 6 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:30725 TClient is connected to server localhost:30725 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-29T15:29:15.315824Z node 6 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:29:15.315854Z node 6 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:29:15.316955Z node 6 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-29T15:29:15.319160Z node 6 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:29:15.521711Z node 6 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7509890141893079927:2331], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:29:15.521735Z node 6 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } >> KqpScheme::CreateResourcePool >> KqpScheme::InvalidationAfterDropCreateTable2MultiStageTx >> TKeyValueTest::TestRewriteThenLastValue [GOOD] >> TKeyValueTest::TestRewriteThenLastValueNewApi >> KqpScheme::DropResourcePool >> KqpScheme::CreateTableWithPartitionAtKeysSimpleUncompat >> KqpScheme::CreateDropColumnTable >> KqpScheme::DropDependentExternalDataSource >> KqpScheme::AddChangefeedNegative ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/scheme/unittest >> KqpOlapScheme::SetNotDefaultColumnFamilyForPrimaryKey [GOOD] Test command err: Trying to start YDB, gRPC: 20329, MsgBus: 3139 2025-05-29T15:29:04.999700Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509890094532972768:2068];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:29:04.999749Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/001172/r3tmp/tmpPHuuKA/pdisk_1.dat 2025-05-29T15:29:05.071428Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [1:7509890094532972731:2079] 1748532544999558 != 1748532544999561 2025-05-29T15:29:05.073881Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 20329, node 1 2025-05-29T15:29:05.090106Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:29:05.090115Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-05-29T15:29:05.090117Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-05-29T15:29:05.090156Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-05-29T15:29:05.101339Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:29:05.101367Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:29:05.102423Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:3139 TClient is connected to server localhost:3139 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... 2025-05-29T15:29:05.151646Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 CREATE TABLE `/Root/ColumnTableTest` (id Int32 NOT NULL, resource_id Utf8 NOT NULL, level Int32 NOT NULL, PRIMARY KEY (id)) PARTITION BY HASH(id) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 1); 2025-05-29T15:29:05.370383Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890098827940693:2331], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:29:05.370413Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:29:05.413566Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-05-29T15:29:05.425881Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;self_id=[1:7509890098827940769:2335];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-05-29T15:29:05.425953Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;self_id=[1:7509890098827940769:2335];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-05-29T15:29:05.426008Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;self_id=[1:7509890098827940769:2335];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-05-29T15:29:05.426039Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;self_id=[1:7509890098827940769:2335];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-05-29T15:29:05.426107Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;self_id=[1:7509890098827940769:2335];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-05-29T15:29:05.426142Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;self_id=[1:7509890098827940769:2335];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-05-29T15:29:05.426167Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;self_id=[1:7509890098827940769:2335];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-05-29T15:29:05.426192Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;self_id=[1:7509890098827940769:2335];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-05-29T15:29:05.426223Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;self_id=[1:7509890098827940769:2335];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-05-29T15:29:05.426254Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;self_id=[1:7509890098827940769:2335];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-05-29T15:29:05.426294Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;self_id=[1:7509890098827940769:2335];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2025-05-29T15:29:05.426323Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;self_id=[1:7509890098827940769:2335];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2025-05-29T15:29:05.426965Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-05-29T15:29:05.426982Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-05-29T15:29:05.426998Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-05-29T15:29:05.427003Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-05-29T15:29:05.427022Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-05-29T15:29:05.427028Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-05-29T15:29:05.427040Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-05-29T15:29:05.427048Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-05-29T15:29:05.427073Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-05-29T15:29:05.427077Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-05-29T15:29:05.427084Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-05-29T15:29:05.427089Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-05-29T15:29:05.427114Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-05-29T15:29:05.427130Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-05-29T15:29:05.427151Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-05-29T15:29:05.427156Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-05-29T15:29:05.427175Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-05-29T15:29:05.427186Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;descri ... 7968897 Node(6, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:29:14.089930Z node 6 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:29:14.091086Z node 6 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-29T15:29:14.095554Z node 6 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:29:14.099203Z node 6 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 CREATE TABLE `/Root/TableWithFamily` (Key Uint64 NOT NULL, Value1 String FAMILY family1, Value2 Uint32 FAMILY family1, PRIMARY KEY (Key), FAMILY default (COMPRESSION="off"), FAMILY family1 (COMPRESSION="zstd", COMPRESSION_LEVEL=1)) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 1); 2025-05-29T15:29:14.358456Z node 6 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7509890139071008577:2331], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:29:14.358483Z node 6 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:29:14.362457Z node 6 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-05-29T15:29:14.371428Z node 6 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;self_id=[6:7509890139071008623:2335];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-05-29T15:29:14.371460Z node 6 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;self_id=[6:7509890139071008623:2335];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-05-29T15:29:14.371507Z node 6 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;self_id=[6:7509890139071008623:2335];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-05-29T15:29:14.371531Z node 6 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;self_id=[6:7509890139071008623:2335];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-05-29T15:29:14.371558Z node 6 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;self_id=[6:7509890139071008623:2335];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-05-29T15:29:14.371587Z node 6 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;self_id=[6:7509890139071008623:2335];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-05-29T15:29:14.371619Z node 6 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;self_id=[6:7509890139071008623:2335];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-05-29T15:29:14.371646Z node 6 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;self_id=[6:7509890139071008623:2335];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-05-29T15:29:14.371674Z node 6 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;self_id=[6:7509890139071008623:2335];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-05-29T15:29:14.371702Z node 6 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;self_id=[6:7509890139071008623:2335];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-05-29T15:29:14.371728Z node 6 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;self_id=[6:7509890139071008623:2335];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2025-05-29T15:29:14.371753Z node 6 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;self_id=[6:7509890139071008623:2335];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2025-05-29T15:29:14.372276Z node 6 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-05-29T15:29:14.372292Z node 6 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-05-29T15:29:14.372307Z node 6 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-05-29T15:29:14.372312Z node 6 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-05-29T15:29:14.372333Z node 6 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-05-29T15:29:14.372342Z node 6 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-05-29T15:29:14.372354Z node 6 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-05-29T15:29:14.372359Z node 6 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-05-29T15:29:14.372371Z node 6 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-05-29T15:29:14.372379Z node 6 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-05-29T15:29:14.372387Z node 6 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-05-29T15:29:14.372396Z node 6 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-05-29T15:29:14.372418Z node 6 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-05-29T15:29:14.372428Z node 6 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-05-29T15:29:14.372455Z node 6 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-05-29T15:29:14.372464Z node 6 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-05-29T15:29:14.372478Z node 6 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-05-29T15:29:14.372486Z node 6 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanDeprecatedSnapshot;id=CleanDeprecatedSnapshot; 2025-05-29T15:29:14.372495Z node 6 :TX_COLUMNSHARD CRIT: log.cpp:784: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=clean_deprecated_snapshot.cpp:30;tasks_for_rewrite=0; 2025-05-29T15:29:14.372505Z node 6 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanDeprecatedSnapshot;id=17; 2025-05-29T15:29:14.372510Z node 6 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV0ChunksMeta;id=RestoreV0ChunksMeta; 2025-05-29T15:29:14.372599Z node 6 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV0ChunksMeta;id=18; 2025-05-29T15:29:14.372609Z node 6 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-05-29T15:29:14.417847Z node 6 :TX_COLUMNSHARD_TX WARN: log.cpp:784: tablet_id=72075186224037888;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715658;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715658; 2025-05-29T15:29:14.420624Z node 6 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7509890139071008694:2345], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:29:14.420645Z node 6 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:29:14.423675Z node 6 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 2025-05-29T15:29:14.425924Z node 6 :TX_COLUMNSHARD_TX WARN: log.cpp:784: tablet_id=72075186224037888;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715659;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715659; >> KqpScheme::AlterTableAddImplicitSyncIndex >> KqpScheme::AlterTableAlterVectorIndex >> KqpConstraints::SerialTypeNegative1 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/ydb/backup_ut/unittest >> BackupRestore::TestAllPrimitiveTypes-UUID [FAIL] Test command err: 2025-05-29T15:28:58.796620Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509890070412183515:2075];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:28:58.796651Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/001cc7/r3tmp/tmpWGMA6a/pdisk_1.dat 2025-05-29T15:28:58.868092Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 25727, node 1 2025-05-29T15:28:58.879366Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:28:58.879379Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-05-29T15:28:58.879381Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-05-29T15:28:58.879424Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:2505 WaitRootIsUp 'Root'... TClient::Ls request: Root 2025-05-29T15:28:58.895642Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:28:58.895690Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting TClient::Ls response: 2025-05-29T15:28:58.898702Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-29T15:28:58.941428Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... Backup "/Root" to "/home/runner/.ya/build/build_root/ciyv/001cc7/r3tmp/tmpSZy980/"Create temporary directory "/Root/~backup_20250529T152858" in databaseProcess "/home/runner/.ya/build/build_root/ciyv/001cc7/r3tmp/tmpSZy980/dir"Create directory "/Root/~backup_20250529T152858/dir" in databaseWrite ACL into "/home/runner/.ya/build/build_root/ciyv/001cc7/r3tmp/tmpSZy980/dir/permissions.pb"Remove directory "/Root/~backup_20250529T152858/dir"2025-05-29T15:28:58.997124Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpRmDir, opId: 281474976715661:0, at schemeshard: 72057594046644480 Remove temporary directory "/Root/~backup_20250529T152858" in database2025-05-29T15:28:59.003405Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpRmDir, opId: 281474976715662:0, at schemeshard: 72057594046644480 Backup completed successfully2025-05-29T15:28:59.014457Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpRmDir, opId: 281474976715663:0, at schemeshard: 72057594046644480 Restore "/home/runner/.ya/build/build_root/ciyv/001cc7/r3tmp/tmpSZy980/" to "/Root"Resolved db base path: "/Root"List of entries in the backup: [{"type":"Directory","path":"/home/runner/.ya/build/build_root/ciyv/001cc7/r3tmp/tmpSZy980/"},{"type":"Directory","path":"/home/runner/.ya/build/build_root/ciyv/001cc7/r3tmp/tmpSZy980/dir"}]Process "/home/runner/.ya/build/build_root/ciyv/001cc7/r3tmp/tmpSZy980/dir"Restore empty directory "/home/runner/.ya/build/build_root/ciyv/001cc7/r3tmp/tmpSZy980/dir" to "/Root/dir"Restore ACL "/home/runner/.ya/build/build_root/ciyv/001cc7/r3tmp/tmpSZy980/dir" to "/Root/dir"Read ACL from "/home/runner/.ya/build/build_root/ciyv/001cc7/r3tmp/tmpSZy980/dir/permissions.pb"2025-05-29T15:28:59.043280Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715665:0, at schemeshard: 72057594046644480 Restore completed successfully 2025-05-29T15:28:59.663697Z node 4 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7509890072446455125:2075];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:28:59.663714Z node 4 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/001cc7/r3tmp/tmpRSt7W8/pdisk_1.dat 2025-05-29T15:28:59.684488Z node 4 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 6083, node 4 2025-05-29T15:28:59.700595Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:28:59.700615Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-05-29T15:28:59.700618Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-05-29T15:28:59.700673Z node 4 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:16674 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-29T15:28:59.766963Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:28:59.766994Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:28:59.775320Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-29T15:28:59.775671Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:28:59.785177Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-05-29T15:29:00.047850Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7509890076741423352:2334], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:29:00.047876Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:29:00.090156Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-05-29T15:29:00.174913Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7509890076741423513:2344], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:29:00.174938Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:29:00.203648Z node 4 :CHANGE_EXCHANGE WARN: change_sender_cdc_stream.cpp:398: [CdcChangeSenderMain][72075186224037888:1][4:7509890076741423693:2356] Failed entry at 'ResolveTopic': entry# { Path: TableId: [72057594046644480:4:0] RequestType: ByTableId Operation: OpTopic RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo } 2025-05-29T15:29:00.218944Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7509890076741423793:2365], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:29:00.218966Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:29:00.244675Z node 4 :CHANGE_EXCHANGE WARN: change_sender_cdc_stream.cpp:398: [CdcChangeSenderMain][72075186224037888:1][4:7509890076741423966:2377] Failed entry at 'ResolveTopic': entry# { Path: TableId: [72057594046644480:6:0] RequestType: ByTableId Operation: OpTopic RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo } 2025-05-29T15:29:00.256083Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7509890076741424069:2386], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:29: ... 9T15:29:11.492741Z node 31 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [31:7509890125979606095:2335], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:29:11.492760Z node 31 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:29:11.493613Z node 31 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2025-05-29T15:29:11.498796Z node 31 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [31:7509890125979606109:2339], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-05-29T15:29:11.597338Z node 31 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [31:7509890125979606180:2645] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-29T15:29:11.601970Z node 31 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [31:7509890125979606213:2658] txid# 281474976715660, issues: { message: "Column Key has wrong key type Yson" severity: 1 } 2025-05-29T15:29:11.602066Z node 31 :KQP_SESSION WARN: kqp_session_actor.cpp:2583: SessionId: ydb://session/3?node_id=31&id=OTQ0MmE4NjgtZjM0Y2I1ODktYjc0MzY5MS03ZWM1YzhjOQ==, ActorId: [31:7509890125979606076:2332], ActorState: ExecuteState, TraceId: 01jweajqt4b66zemaghhm4830p, Create QueryResponse for error on request, msg: 2025-05-29T15:29:11.602285Z node 31 :KQP_EXECUTER ERROR: kqp_planner.cpp:119: TxId: 281474976715661. Ctx: { TraceId: 01jweajqt4b66zemaghhm4830p, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=31&id=OTQ0MmE4NjgtZjM0Y2I1ODktYjc0MzY5MS03ZWM1YzhjOQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-05-29T15:29:11.608191Z node 31 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-05-29T15:29:11.680058Z node 31 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [31:7509890125979606384:2359], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:29:11.680172Z node 31 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=31&id=OTQ0MmE4NjgtZjM0Y2I1ODktYjc0MzY5MS03ZWM1YzhjOQ==, ActorId: [31:7509890125979606076:2332], ActorState: ExecuteState, TraceId: 01jweajqznahkmh9pj4ra7n6ba, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: assertion failed at ydb/services/ydb/backup_ut/ydb_backup_ut.cpp:177, NQuery::TExecuteQueryResult (anonymous namespace)::ExecuteQuery(NQuery::TSession &, const TString &, bool): (result.IsSuccess()) query: UPSERT INTO `/Root/YsonTable` (Key, Value) VALUES (1, Yson("{ foo = bar }")); issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 TBackTrace::Capture()+28 (0x139A39CC) NUnitTest::NPrivate::RaiseError(char const*, TBasicString> const&, bool)+137 (0x13B57539) ??+0 (0x1384FC23) ??+0 (0x138603AA) NTestSuiteBackupRestore::TTestCaseTestAllPrimitiveTypes::Execute_(NUnitTest::TTestContext&)+1979 (0x1385EC2B) NTestSuiteBackupRestore::TCurrentTest::Execute()::'lambda'()::operator()() const+71 (0x13875F87) NUnitTest::TTestBase::Run(std::__y1::function, TBasicString> const&, char const*, bool)+126 (0x13B593EE) NTestSuiteBackupRestore::TCurrentTest::Execute()+436 (0x13875944) NUnitTest::TTestFactory::Execute()+803 (0x13B59B63) NUnitTest::RunMain(int, char**)+3021 (0x13B6B70D) ??+0 (0x7F53FE4C9D90) __libc_start_main+128 (0x7F53FE4C9E40) _start+41 (0x12914029) 2025-05-29T15:29:12.345244Z node 34 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[34:7509890128479541890:2078];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:29:12.345851Z node 34 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/001cc7/r3tmp/tmplinvui/pdisk_1.dat 2025-05-29T15:29:12.364936Z node 34 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 29267, node 34 2025-05-29T15:29:12.381345Z node 34 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:29:12.381359Z node 34 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-05-29T15:29:12.381362Z node 34 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-05-29T15:29:12.381403Z node 34 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:10723 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-29T15:29:12.445965Z node 34 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(34, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:29:12.446006Z node 34 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(34, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:29:12.448422Z node 34 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(34, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-29T15:29:12.449215Z node 34 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:29:12.680238Z node 34 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [34:7509890128479542817:2335], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:29:12.680263Z node 34 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [34:7509890128479542825:2338], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:29:12.680270Z node 34 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:29:12.680980Z node 34 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2025-05-29T15:29:12.684953Z node 34 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [34:7509890128479542831:2339], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-05-29T15:29:12.757637Z node 34 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [34:7509890128479542902:2646] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-29T15:29:12.761409Z node 34 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 2025-05-29T15:29:12.826922Z node 34 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [34:7509890128479543080:2355], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:29:12.827019Z node 34 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=34&id=ZjBjMjc3MTgtNzAyMjllYTItYzc5MDEyYWUtMTQ0ZDk4NDc=, ActorId: [34:7509890128479542813:2332], ActorState: ExecuteState, TraceId: 01jweajs3m7gedjx8wajeszfbd, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: assertion failed at ydb/services/ydb/backup_ut/ydb_backup_ut.cpp:177, NQuery::TExecuteQueryResult (anonymous namespace)::ExecuteQuery(NQuery::TSession &, const TString &, bool): (result.IsSuccess()) query: UPSERT INTO `/Root/UuidTable` (Key, Value) VALUES (RandomUuid(1), 1); issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 TBackTrace::Capture()+28 (0x139A39CC) NUnitTest::NPrivate::RaiseError(char const*, TBasicString> const&, bool)+137 (0x13B57539) ??+0 (0x1384FC23) ??+0 (0x138603AA) NTestSuiteBackupRestore::TTestCaseTestAllPrimitiveTypes::Execute_(NUnitTest::TTestContext&)+1979 (0x1385EC2B) NTestSuiteBackupRestore::TCurrentTest::Execute()::'lambda'()::operator()() const+71 (0x13875F87) NUnitTest::TTestBase::Run(std::__y1::function, TBasicString> const&, char const*, bool)+126 (0x13B593EE) NTestSuiteBackupRestore::TCurrentTest::Execute()+436 (0x13875944) NUnitTest::TTestFactory::Execute()+803 (0x13B59B63) NUnitTest::RunMain(int, char**)+3021 (0x13B6B70D) ??+0 (0x7F53FE4C9D90) __libc_start_main+128 (0x7F53FE4C9E40) _start+41 (0x12914029) >> BackupRestoreS3::TestAllPrimitiveTypes-DATE32 [FAIL] >> BackupRestoreS3::TestAllPrimitiveTypes-DATETIME64 >> KqpScheme::CreateDropColumnTable [GOOD] >> KqpScheme::CreateDropColumnTableNegative >> KqpAcl::FailedReadAccessDenied ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/scheme/unittest >> KqpScheme::AddChangefeed Test command err: Trying to start YDB, gRPC: 18885, MsgBus: 19084 2025-05-29T15:29:08.632728Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509890113309654012:2073];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:29:08.632911Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/001168/r3tmp/tmpxp5uIx/pdisk_1.dat 2025-05-29T15:29:08.701913Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 18885, node 1 2025-05-29T15:29:08.719137Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:29:08.719151Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-05-29T15:29:08.719153Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-05-29T15:29:08.719194Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-05-29T15:29:08.734068Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:29:08.734105Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:29:08.735178Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:19084 TClient is connected to server localhost:19084 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-29T15:29:08.783866Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... CREATE TABLE `/Root/ColumnTableTest` (id Int64 NOT NULL, timestamp Timestamp NOT NULL, ui64_type Uint64 NOT NULL, PRIMARY KEY (id)) PARTITION BY HASH(id) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 1); 2025-05-29T15:29:09.005907Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890117604621931:2331], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:29:09.005933Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:29:09.061258Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-05-29T15:29:09.077544Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;self_id=[1:7509890117604622007:2335];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-05-29T15:29:09.077599Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;self_id=[1:7509890117604622007:2335];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-05-29T15:29:09.077636Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;self_id=[1:7509890117604622007:2335];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-05-29T15:29:09.077654Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;self_id=[1:7509890117604622007:2335];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-05-29T15:29:09.077679Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;self_id=[1:7509890117604622007:2335];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-05-29T15:29:09.077706Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;self_id=[1:7509890117604622007:2335];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-05-29T15:29:09.077723Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;self_id=[1:7509890117604622007:2335];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-05-29T15:29:09.077749Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;self_id=[1:7509890117604622007:2335];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-05-29T15:29:09.077772Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;self_id=[1:7509890117604622007:2335];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-05-29T15:29:09.077791Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;self_id=[1:7509890117604622007:2335];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-05-29T15:29:09.077821Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;self_id=[1:7509890117604622007:2335];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2025-05-29T15:29:09.077837Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;self_id=[1:7509890117604622007:2335];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2025-05-29T15:29:09.078652Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-05-29T15:29:09.078670Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-05-29T15:29:09.078687Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-05-29T15:29:09.078693Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-05-29T15:29:09.078713Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-05-29T15:29:09.078718Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-05-29T15:29:09.078731Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-05-29T15:29:09.078747Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-05-29T15:29:09.078762Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-05-29T15:29:09.078769Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-05-29T15:29:09.078776Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-05-29T15:29:09.078781Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-05-29T15:29:09.078807Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-05-29T15:29:09.078814Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-05-29T15:29:09.078838Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-05-29T15:29:09.078843Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-05-29T15:29:09.078856Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-05-29T15:29:09.078861Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanDeprecatedSnapshot;id=CleanDeprecatedSnapshot; 2025-05-29T15:29:09.078870Z node 1 :TX_COLUMNSHARD CRIT: log.cpp:784: tablet_id=72075186224037888;process=TTxUpdateSchema:: ... :12.643305Z node 5 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-05-29T15:29:12.690011Z node 5 :TX_COLUMNSHARD_TX WARN: log.cpp:784: tablet_id=72075186224037888;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715658;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715658; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:502;T=N5arrow9Int64TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:502;T=N5arrow10BinaryTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:502;T=N5arrow9Int64TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:502;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:502;T=N5arrow10BinaryTypeE; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=512;columns=3; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=512;columns=3; Trying to start YDB, gRPC: 32091, MsgBus: 15731 2025-05-29T15:29:12.981637Z node 6 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[6:7509890128752101905:2070];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:29:12.981661Z node 6 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/001168/r3tmp/tmpsQljYx/pdisk_1.dat 2025-05-29T15:29:12.995137Z node 6 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 32091, node 6 2025-05-29T15:29:13.007080Z node 6 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:29:13.007093Z node 6 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-05-29T15:29:13.007095Z node 6 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-05-29T15:29:13.007136Z node 6 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:15731 TClient is connected to server localhost:15731 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-29T15:29:13.081908Z node 6 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:29:13.081940Z node 6 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:29:13.083105Z node 6 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-29T15:29:13.084825Z node 6 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:29:13.094595Z node 6 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:29:13.109257Z node 6 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:29:13.124252Z node 6 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:29:13.133811Z node 6 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:29:13.362208Z node 6 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7509890133047070788:2403], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:29:13.362242Z node 6 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:29:13.371719Z node 6 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-05-29T15:29:13.378428Z node 6 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-05-29T15:29:13.391663Z node 6 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-05-29T15:29:13.405266Z node 6 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-05-29T15:29:13.419429Z node 6 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-05-29T15:29:13.434210Z node 6 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-05-29T15:29:13.447892Z node 6 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-05-29T15:29:13.463065Z node 6 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7509890133047071444:2466], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:29:13.463092Z node 6 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:29:13.463096Z node 6 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7509890133047071449:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:29:13.463735Z node 6 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715669:3, at schemeshard: 72057594046644480 2025-05-29T15:29:13.467171Z node 6 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [6:7509890133047071451:2470], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715669 completed, doublechecking } 2025-05-29T15:29:13.541999Z node 6 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [6:7509890133047071502:3396] txid# 281474976715670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-29T15:29:13.644489Z node 6 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [6:7509890133047071518:2474], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:29:13.644611Z node 6 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=6&id=ZjA5NjU5Mi03MTg5ZTM1Zi05ZGYwYjNiZC01ODFiMTA1NA==, ActorId: [6:7509890133047070770:2401], ActorState: ExecuteState, TraceId: 01jweajsqpf1nxw36j4pcdt3n0, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: VERIFY failed (2025-05-29T15:29:13.645350Z): assertion failed in non-unittest thread with message: assertion failed at ydb/core/kqp/ut/common/kqp_ut_common.h:375, void NKikimr::NKqp::AssertSuccessResult(const NYdb::TStatus &): (result.IsSuccess())
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 library/cpp/testing/unittest/registar.cpp:36 RaiseError(): requirement UnittestThread failed 0. /-S/util/system/yassert.cpp:83: InternalPanicImpl @ 0x1633E055 1. /-S/util/system/yassert.cpp:55: Panic @ 0x16335056 2. /tmp//-S/library/cpp/testing/unittest/registar.cpp:36: RaiseError @ 0x164D75B6 3. /-S/ydb/core/kqp/ut/common/kqp_ut_common.h:375: AssertSuccessResult @ 0x15DE0A22 4. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:365: CreateSampleTables @ 0x289A0492 5. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:581: operator() @ 0x289C1A1C 6. /-S/library/cpp/threading/future/core/future-inl.h:493: SetValue @ 0x289C1A1C 7. /-S/library/cpp/threading/future/async.h:24: operator() @ 0x289C1A1C 8. /-S/util/thread/pool.h:71: Process @ 0x289C1A1C 9. /-S/util/thread/pool.cpp:418: DoExecute @ 0x163457A9 10. /-S/util/thread/factory.h:15: Execute @ 0x16344199 11. /-S/util/thread/factory.cpp:36: ThreadProc @ 0x16344199 12. /-S/util/system/thread.cpp:244: ThreadProxy @ 0x1633F60C 13. ??:0: ?? @ 0x7F9DBA159AC2 14. ??:0: ?? @ 0x7F9DBA1EB84F >> KqpScheme::AlterCompressionLevelInColumnFamily >> KqpScheme::CreateDropColumnTableNegative [GOOD] >> KqpScheme::CreateExternalDataSource >> KqpAcl::AclForOltpAndOlap+isOlap >> KqpScheme::ChangefeedTopicPartitions >> BackupRestoreS3::TestAllPrimitiveTypes-DATETIME64 [FAIL] >> KqpConstraints::DropCreateSerial >> KqpScheme::RenameTable >> TChargeBTreeIndex::FewNodes_Groups_History [GOOD] >> TChargeBTreeIndex::FewNodes_Groups_History_Sticky >> KqpScheme::AlterResourcePool >> KqpScheme::Int8Int16Olap >> KqpScheme::CreateTableWithDecimalColumn >> KqpScheme::CreateTableWithReadReplicasCompat >> KqpScheme::ResourcePoolClassifiersValidation >> KqpConstraints::DefaultValuesForTableNegative2 >> KqpScheme::CreateTransfer >> KqpScheme::CreateExternalTableCheckPrimaryKey >> KqpScheme::ModifyUnknownPermissions >> TSchemeShardLoginTest::CheckThatLockedOutParametersIsRestoredFromLocalDb [GOOD] >> KqpScheme::CreateAndAlterTableWithPartitionSizeUncompat >> KqpScheme::CreateResourcePoolClassifier >> KqpScheme::TouchIndexAfterMoveIndexWriteReplace ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_login/unittest >> TSchemeShardLoginTest::CheckThatLockedOutParametersIsRestoredFromLocalDb [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:116:2058] recipient: [1:111:2141] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:116:2058] recipient: [1:111:2141] Leader for TabletID 72057594046678944 is [1:128:2152] sender: [1:132:2058] recipient: [1:111:2141] 2025-05-29T15:28:53.283700Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7488: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-05-29T15:28:53.283726Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7516: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:28:53.283731Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7402: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-05-29T15:28:53.283734Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7418: OperationsProcessing config: using default configuration 2025-05-29T15:28:53.283738Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-05-29T15:28:53.283741Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-05-29T15:28:53.283747Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7548: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:28:53.283758Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:39: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-05-29T15:28:53.283842Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7619: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-05-29T15:28:53.283896Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-05-29T15:28:53.294560Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7309: Cannot subscribe to console configs 2025-05-29T15:28:53.294585Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:28:53.297368Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-05-29T15:28:53.297489Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-05-29T15:28:53.297537Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-05-29T15:28:53.299077Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-05-29T15:28:53.299213Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:445: Clear TempDirsState with owners number: 0 2025-05-29T15:28:53.299328Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1348: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-05-29T15:28:53.299385Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:32: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-05-29T15:28:53.299815Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:157: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:28:53.299850Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:84: [RootDataErasureManager] Stop 2025-05-29T15:28:53.300103Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:28:53.300113Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:28:53.300132Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-05-29T15:28:53.300140Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-29T15:28:53.300146Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:202: TTxServerlessStorageBilling.Complete 2025-05-29T15:28:53.300180Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6671: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-05-29T15:28:53.301475Z node 1 :HIVE INFO: tablet_helpers.cpp:1130: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:128:2152] sender: [1:242:2058] recipient: [1:15:2062] 2025-05-29T15:28:53.322447Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:372: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-05-29T15:28:53.322527Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:28:53.322596Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-05-29T15:28:53.322651Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:130: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-05-29T15:28:53.322662Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:28:53.323567Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:451: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-05-29T15:28:53.323599Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-05-29T15:28:53.323659Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:28:53.323669Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:313: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-05-29T15:28:53.323676Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:367: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-05-29T15:28:53.323682Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 2 -> 3 2025-05-29T15:28:53.324153Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:28:53.324164Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-05-29T15:28:53.324170Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 3 -> 128 2025-05-29T15:28:53.324490Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:28:53.324501Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:28:53.324508Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:28:53.324516Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1650: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-05-29T15:28:53.325182Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1719: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-05-29T15:28:53.325579Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:652: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-05-29T15:28:53.325623Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1751: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-05-29T15:28:53.325817Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:676: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-05-29T15:28:53.325844Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:680: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 124 RawX2: 4294969445 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-05-29T15:28:53.325855Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:28:53.325920Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 128 -> 240 2025-05-29T15:28:53.325928Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:28:53.325966Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-05-29T15:28:53.325978Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:402: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-05-29T15:28:53.326415Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:28:53.326424Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-29T15:28:53.326468Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29 ... Table profiles were not loaded 2025-05-29T15:29:17.233945Z node 5 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:445: Clear TempDirsState with owners number: 0 2025-05-29T15:29:17.234028Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit for Paths, read records: 1, at schemeshard: 72057594046678944 2025-05-29T15:29:17.234045Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1457: TTxInit for UserAttributes, read records: 0, at schemeshard: 72057594046678944 2025-05-29T15:29:17.234053Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1483: TTxInit for UserAttributesAlterData, read records: 0, at schemeshard: 72057594046678944 2025-05-29T15:29:17.234105Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1785: TTxInit for Tables, read records: 0, at schemeshard: 72057594046678944 2025-05-29T15:29:17.234132Z node 5 :FLAT_TX_SCHEMESHARD INFO: schemeshard__root_data_erasure_manager.cpp:452: [RootDataErasureManager] Restore: Generation# 0, Status# 0, WakeupInterval# 604800 s, NumberDataErasureTenantsInRunning# 0 2025-05-29T15:29:17.234161Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:2033: TTxInit for Columns, read records: 0, at schemeshard: 72057594046678944 2025-05-29T15:29:17.234172Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:2093: TTxInit for ColumnsAlters, read records: 0, at schemeshard: 72057594046678944 2025-05-29T15:29:17.234181Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:2151: TTxInit for Shards, read records: 0, at schemeshard: 72057594046678944 2025-05-29T15:29:17.234193Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:2237: TTxInit for TablePartitions, read records: 0, at schemeshard: 72057594046678944 2025-05-29T15:29:17.234201Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:2303: TTxInit for TableShardPartitionConfigs, read records: 0, at schemeshard: 72057594046678944 2025-05-29T15:29:17.234218Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:2453: TTxInit for ChannelsBinding, read records: 0, at schemeshard: 72057594046678944 2025-05-29T15:29:17.234248Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:2832: TTxInit for TableIndexes, read records: 0, at schemeshard: 72057594046678944 2025-05-29T15:29:17.234260Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:2911: TTxInit for TableIndexKeys, read records: 0, at schemeshard: 72057594046678944 2025-05-29T15:29:17.234301Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:3409: TTxInit for KesusInfos, read records: 0, at schemeshard: 72057594046678944 2025-05-29T15:29:17.234309Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:3445: TTxInit for KesusAlters, read records: 0, at schemeshard: 72057594046678944 2025-05-29T15:29:17.234328Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:3659: TTxInit for TxShards, read records: 0, at schemeshard: 72057594046678944 2025-05-29T15:29:17.234337Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:3804: TTxInit for ShardToDelete, read records: 0, at schemeshard: 72057594046678944 2025-05-29T15:29:17.234346Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:3821: TTxInit for BackupSettings, read records: 0, at schemeshard: 72057594046678944 2025-05-29T15:29:17.234379Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:3981: TTxInit for ShardBackupStatus, read records: 0, at schemeshard: 72057594046678944 2025-05-29T15:29:17.234389Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:3997: TTxInit for CompletedBackup, read records: 0, at schemeshard: 72057594046678944 2025-05-29T15:29:17.234406Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4282: TTxInit for Publications, read records: 0, at schemeshard: 72057594046678944 2025-05-29T15:29:17.234435Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4587: IndexBuild , records: 0, at schemeshard: 72057594046678944 2025-05-29T15:29:17.234443Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4645: KMeansTreeSample records: 0, at schemeshard: 72057594046678944 2025-05-29T15:29:17.234458Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4740: SnapshotTables: snapshots: 0 tables: 0, at schemeshard: 72057594046678944 2025-05-29T15:29:17.234463Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4767: SnapshotSteps: snapshots: 0, at schemeshard: 72057594046678944 2025-05-29T15:29:17.234470Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4794: LongLocks: records: 0, at schemeshard: 72057594046678944 2025-05-29T15:29:17.236637Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:84: [RootDataErasureManager] Stop 2025-05-29T15:29:17.236990Z node 5 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:29:17.237004Z node 5 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:29:17.237211Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-05-29T15:29:17.237223Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-29T15:29:17.237230Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:202: TTxServerlessStorageBilling.Complete 2025-05-29T15:29:17.237487Z node 5 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6671: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594046678944 is [5:374:2342] sender: [5:432:2058] recipient: [5:15:2062] 2025-05-29T15:29:17.288818Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__login.cpp:43: TTxLogin Execute at schemeshard: 72057594046678944 2025-05-29T15:29:17.288837Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__login.cpp:46: TTxLogin RotateKeys at schemeshard: 72057594046678944 2025-05-29T15:29:17.407960Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__login.cpp:85: TTxLogin Complete, result: Error: "User user1 login denied: too many failed password attempts", at schemeshard: 72057594046678944 2025-05-29T15:29:17.408007Z node 5 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:29:17.408016Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 0, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-29T15:29:17.408063Z node 5 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:29:17.408070Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [5:424:2381], at schemeshard: 72057594046678944, txId: 0, path id: 1 2025-05-29T15:29:17.408188Z node 5 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:5834: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 4 LocalPathId: 1 Version: 7 PathOwnerId: 72057594046678944, cookie: 0 2025-05-29T15:29:19.408453Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__login.cpp:43: TTxLogin Execute at schemeshard: 72057594046678944 2025-05-29T15:29:19.413414Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__login.cpp:85: TTxLogin Complete, result: Token: "eyJhbGciOiJQUzI1NiIsImtpZCI6IjMifQ.eyJhdWQiOlsiXC9NeVJvb3QiXSwiZXhwIjoxNzQ4NTc1NzU5LCJpYXQiOjE3NDg1MzI1NTksInN1YiI6InVzZXIxIn0.Hpr-oD7gAncAG7kfxJq4P3FFO9hwVy9zI1GxHuCCCJ-rR2wQjPmRMqAX2CxdXaXnOr3g17NGXJiRmvmma5xzs-viHi2B3VgUPhV8AvPK_xfhzegphb98O8QnEaz01GF1i6eLeX14YtRRjeA4OlpHGEiYEY1szYAxVzY_4Cnr17uNEOPj-72sH0xiMgmGGtoKV4zPdpUNyQU7PMQH_j9RJUnm7tyQd0b_M_mn1JTPxDYqQVquReXt4hivTNCOmi_DiJljI4JmI61mI5oMtGHJ2ShAl2u4haufw2vbCN4R96M6sSUFGagef2l8bmVbkFvEYS63GmJAZyjyQadRoSfFeQ" SanitizedToken: "eyJhbGciOiJQUzI1NiIsImtpZCI6IjMifQ.eyJhdWQiOlsiXC9NeVJvb3QiXSwiZXhwIjoxNzQ4NTc1NzU5LCJpYXQiOjE3NDg1MzI1NTksInN1YiI6InVzZXIxIn0.**" IsAdmin: true, at schemeshard: 72057594046678944 2025-05-29T15:29:19.413538Z node 5 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-05-29T15:29:19.413581Z node 5 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot" took 45us result status StatusSuccess 2025-05-29T15:29:19.413661Z node 5 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot" PathDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 7 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 7 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 1 SecurityStateVersion: 4 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { PublicKeys { KeyId: 1 KeyDataPEM: "-----BEGIN PUBLIC KEY-----\nMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAzW4enZtvrPwR+MRzBZ9E\nVvAANNfmHKC9p3vdcR7LS1Jlupn1YtC0xVMOaplNLRCSE0aGXu5SyctpzNp8U26h\nE8H3sdHvvsv9/3f5KOSpd+fJfzoJHNR4fF0KGFiNxTNsQBpu5aI22HVnTN+aYlXh\nXXYHCi/LPks6lIpNwh50L88TQk4qLAFdpRS/YKUadSMxC3Ep40E0cBbGiwDtyiHR\nz5Tl1I103MjWXvrFXKKt7UM9o0UECqyPeKPuLop4Z8mkuS3ZoMh1b36eEkI/u5sR\nxRCMaOBau21xvcDiAk0olriby6DRIoBZOnSONxcKw6Olsb8bTdDU84laxbY/fIqF\nsQIDAQAB\n-----END PUBLIC KEY-----\n" ExpiresAt: 1748618955152 } PublicKeys { KeyId: 2 KeyDataPEM: "-----BEGIN PUBLIC KEY-----\nMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAk8vcb9dfxVEPdzi7ABEv\nO7V288N6F+2lP8a8bUywiyP0SUz9Syi3ApqDpyWfeegsYZsLkfDk4WwQuOUipBda\nM5X4BiiY3NamlJQNfWZ76sEF4YMWj5a49ekA1p/3Fc6fnyNSMC3XcbF4/sov4SIL\nWyqSJeZPWSS9XiS9xNJtxwIA3qd+csLAo1Ituh2jij5QcU2GKf4d5IAr4ajUlovU\nk7i9LoCedtTfGBHkfhtlLfa4ySEjRhHxuz+hB4O13GFnp4oxiY5+mEesed8vYFgl\nBWC3NAIitv9nyip12eEUC/2N8QZG/CP7rhfieMt8HO+RE6GLPZTHc8+WGWb0N2B2\nEwIDAQAB\n-----END PUBLIC KEY-----\n" ExpiresAt: 1748618955218 } PublicKeys { KeyId: 3 KeyDataPEM: "-----BEGIN PUBLIC KEY-----\nMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEA2NcVBdoHmhHhu39w9stO\nS5qCNP9XmcXVqS2rxw89bvx65+/d3IEywuiSDcknbVB4fuz4aAO8jW4BKQ01KeEi\n1gIQlBmVDOTKvaJxIkFOg1TrjWz4y4621qUgxWL5VRk8cpGdrF6sw6eN+i2u+x5x\n6fqVpBVc1BnUxRYbgrycAuvZMA2ek+xToYDorVNoiwe5g2vWzoHboKGU07cFUQh8\nCQ9kcICjr50mXwqxTTyXjJpTZzkJYQNZNti/h9TS3N2g8VwEj6v7V1G4z08aMvwc\n9hPcjLBm81KmW/cihZ4XHFMkd7gygpiQ6aomb16DeqZVVe2ZZcr/MSaCfNdF98p4\nIwIDAQAB\n-----END PUBLIC KEY-----\n" ExpiresAt: 1748618957407 } Sids { Name: "user1" Type: USER } Audience: "/MyRoot" } } } PathId: 1 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> KqpScheme::AsyncReplicationConnectionString >> KqpScheme::InvalidationAfterDropCreateTable2NoEffects >> KqpScheme::CreateTableWithPartitionAtKeysSimpleCompat >> KqpScheme::DropAsyncReplication >> KqpScheme::DropNonExistingResourcePool >> KqpScheme::CreateTableWithTtlSettingsUncompat >> KqpScheme::AlterTableAddExplicitSyncIndex >> KqpConstraints::SerialTypeForNonKeyColumn >> KqpScheme::AlterColumnTableTiering >> KqpAcl::FailedWriteAccessDenied >> KqpScheme::AlterTableAlterMissedIndex >> KqpScheme::AlterIndexImplTable+VectorIndex ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/ydb/backup_ut/unittest >> BackupRestoreS3::TestAllPrimitiveTypes-DATETIME64 [FAIL] Test command err: 2025-05-29T15:28:52.346363Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509890043000291384:2080];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:28:52.346396Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/001cf2/r3tmp/tmpSopD9p/pdisk_1.dat 2025-05-29T15:28:52.422394Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 31676, node 1 2025-05-29T15:28:52.437472Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:28:52.437481Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-05-29T15:28:52.437483Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-05-29T15:28:52.437519Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-05-29T15:28:52.446498Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:28:52.446542Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:28:52.448217Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:16882 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-29T15:28:52.493732Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:28:52.768907Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890043000292314:2337], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:28:52.768942Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:28:52.814516Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:315: actor# [1:7509890043000291595:2140] Handle TEvProposeTransaction 2025-05-29T15:28:52.814541Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:238: actor# [1:7509890043000291595:2140] TxId# 281474976715658 ProcessProposeTransaction 2025-05-29T15:28:52.814564Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:257: actor# [1:7509890043000291595:2140] Cookie# 0 userReqId# "" txid# 281474976715658 SEND to# [1:7509890043000292335:2603] 2025-05-29T15:28:52.827034Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1562: Actor# [1:7509890043000292335:2603] txid# 281474976715658 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/Root" OperationType: ESchemeOpCreateTable CreateTable { Name: "table" Columns { Name: "Key" Type: "Uint32" NotNull: false } Columns { Name: "Value" Type: "Utf8" NotNull: false } KeyColumnNames: "Key" PartitionConfig { PartitioningPolicy { MinPartitionsCount: 10 SplitByLoadSettings { Enabled: true } } } Temporary: false } } } UserToken: "" DatabaseName: "" 2025-05-29T15:28:52.827064Z node 1 :TX_PROXY DEBUG: schemereq.cpp:569: Actor# [1:7509890043000292335:2603] txid# 281474976715658 Bootstrap, UserSID: CheckAdministrator: 0 CheckDatabaseAdministrator: 0 2025-05-29T15:28:52.827206Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1627: Actor# [1:7509890043000292335:2603] txid# 281474976715658 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P6 2025-05-29T15:28:52.827222Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1617: Actor# [1:7509890043000292335:2603] txid# 281474976715658 TEvNavigateKeySet requested from SchemeCache 2025-05-29T15:28:52.827250Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1450: Actor# [1:7509890043000292335:2603] txid# 281474976715658 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-05-29T15:28:52.827285Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1497: Actor# [1:7509890043000292335:2603] HANDLE EvNavigateKeySetResult, txid# 281474976715658 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2025-05-29T15:28:52.827298Z node 1 :TX_PROXY DEBUG: schemereq.cpp:103: Actor# [1:7509890043000292335:2603] txid# 281474976715658 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976715658 TabletId# 72057594046644480} 2025-05-29T15:28:52.827350Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1352: Actor# [1:7509890043000292335:2603] txid# 281474976715658 HANDLE EvClientConnected 2025-05-29T15:28:52.827769Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-05-29T15:28:52.829387Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1374: Actor# [1:7509890043000292335:2603] txid# 281474976715658 Status StatusAccepted HANDLE {TEvModifySchemeTransactionResult Status# StatusAccepted txid# 281474976715658} 2025-05-29T15:28:52.829409Z node 1 :TX_PROXY DEBUG: schemereq.cpp:549: Actor# [1:7509890043000292335:2603] txid# 281474976715658 SEND to# [1:7509890043000292334:2340] Source {TEvProposeTransactionStatus txid# 281474976715658 Status# 53} 2025-05-29T15:28:52.925420Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:403: actor# [1:7509890043000291595:2140] Handle TEvNavigate describe path /Root/table 2025-05-29T15:28:52.927536Z node 1 :TX_PROXY DEBUG: describe.cpp:272: Actor# [1:7509890043000292475:2715] HANDLE EvNavigateScheme /Root/table 2025-05-29T15:28:52.927649Z node 1 :TX_PROXY DEBUG: describe.cpp:356: Actor# [1:7509890043000292475:2715] HANDLE EvNavigateKeySetResult TDescribeReq marker# P5 ErrorCount# 0 2025-05-29T15:28:52.927693Z node 1 :TX_PROXY DEBUG: describe.cpp:435: Actor# [1:7509890043000292475:2715] SEND to# 72057594046644480 shardToRequest NKikimrSchemeOp.TDescribePath Path: "/Root/table" Options { ShowPrivateTable: false } 2025-05-29T15:28:52.928116Z node 1 :TX_PROXY DEBUG: describe.cpp:448: Actor# [1:7509890043000292475:2715] Handle TEvDescribeSchemeResult Forward to# [1:7509890043000292473:2346] Cookie: 0 TEvDescribeSchemeResult: NKikimrScheme.TEvDescribeSchemeResult PreSerializedData size# 20 Record# Status: StatusSuccess Path: "/Root/table" PathDescription { Self { Name: "table" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976715658 CreateStep: 1748532532950 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "table" Columns { Name: "Key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "Value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "Key" KeyColumnIds: 1 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { MinPartitionsCount: 10 SplitByLoadSettings { Enabled: true } } } TableSchemaVersion: 1 IsBackup: false IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 720 ... 98] txid# 281474976715658 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-05-29T15:29:17.510144Z node 52 :TX_PROXY DEBUG: schemereq.cpp:1497: Actor# [52:7509890152050308660:2598] HANDLE EvNavigateKeySetResult, txid# 281474976715658 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2025-05-29T15:29:17.510160Z node 52 :TX_PROXY DEBUG: schemereq.cpp:103: Actor# [52:7509890152050308660:2598] txid# 281474976715658 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976715658 TabletId# 72057594046644480} 2025-05-29T15:29:17.510213Z node 52 :TX_PROXY DEBUG: schemereq.cpp:1352: Actor# [52:7509890152050308660:2598] txid# 281474976715658 HANDLE EvClientConnected 2025-05-29T15:29:17.510561Z node 52 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2025-05-29T15:29:17.511569Z node 52 :TX_PROXY DEBUG: schemereq.cpp:1374: Actor# [52:7509890152050308660:2598] txid# 281474976715658 Status StatusAccepted HANDLE {TEvModifySchemeTransactionResult Status# StatusAccepted txid# 281474976715658} 2025-05-29T15:29:17.511590Z node 52 :TX_PROXY DEBUG: schemereq.cpp:549: Actor# [52:7509890152050308660:2598] txid# 281474976715658 SEND to# [52:7509890152050308659:2341] Source {TEvProposeTransactionStatus txid# 281474976715658 Status# 53} 2025-05-29T15:29:17.514714Z node 52 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [52:7509890152050308659:2341], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-05-29T15:29:17.584083Z node 52 :TX_PROXY DEBUG: proxy_impl.cpp:315: actor# [52:7509890152050307926:2138] Handle TEvProposeTransaction 2025-05-29T15:29:17.584103Z node 52 :TX_PROXY DEBUG: proxy_impl.cpp:238: actor# [52:7509890152050307926:2138] TxId# 281474976715659 ProcessProposeTransaction 2025-05-29T15:29:17.584145Z node 52 :TX_PROXY DEBUG: proxy_impl.cpp:257: actor# [52:7509890152050307926:2138] Cookie# 0 userReqId# "" txid# 281474976715659 SEND to# [52:7509890152050308732:2650] 2025-05-29T15:29:17.585019Z node 52 :TX_PROXY DEBUG: schemereq.cpp:1562: Actor# [52:7509890152050308732:2650] txid# 281474976715659 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/Root/.metadata/workload_manager/pools" OperationType: ESchemeOpCreateResourcePool ModifyACL { Name: "default" DiffACL: "\n!\010\000\022\035\010\001\020\201\004\032\024all-users@well-known \003\n\031\010\000\022\025\010\001\020\201\004\032\014root@builtin \003" NewOwner: "metadata@system" } Internal: true CreateResourcePool { Name: "default" Properties { Properties { key: "concurrent_query_limit" value: "-1" } Properties { key: "database_load_cpu_threshold" value: "-1" } Properties { key: "query_cancel_after_seconds" value: "0" } Properties { key: "query_cpu_limit_percent_per_node" value: "-1" } Properties { key: "query_memory_limit_percent_per_node" value: "-1" } Properties { key: "queue_size" value: "-1" } Properties { key: "resource_weight" value: "-1" } Properties { key: "total_cpu_limit_percent_per_node" value: "-1" } } } } } UserToken: "\n\017metadata@system\022\000" DatabaseName: "/Root" 2025-05-29T15:29:17.585041Z node 52 :TX_PROXY DEBUG: schemereq.cpp:569: Actor# [52:7509890152050308732:2650] txid# 281474976715659 Bootstrap, UserSID: metadata@system CheckAdministrator: 0 CheckDatabaseAdministrator: 0 2025-05-29T15:29:17.585045Z node 52 :TX_PROXY DEBUG: schemereq.cpp:578: Actor# [52:7509890152050308732:2650] txid# 281474976715659 Bootstrap, UserSID: metadata@system IsClusterAdministrator: 1 2025-05-29T15:29:17.585229Z node 52 :TX_PROXY DEBUG: schemereq.cpp:1627: Actor# [52:7509890152050308732:2650] txid# 281474976715659 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P6 2025-05-29T15:29:17.585260Z node 52 :TX_PROXY DEBUG: schemereq.cpp:1617: Actor# [52:7509890152050308732:2650] txid# 281474976715659 TEvNavigateKeySet requested from SchemeCache 2025-05-29T15:29:17.585313Z node 52 :TX_PROXY DEBUG: schemereq.cpp:1450: Actor# [52:7509890152050308732:2650] txid# 281474976715659 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-05-29T15:29:17.585359Z node 52 :TX_PROXY DEBUG: schemereq.cpp:1497: Actor# [52:7509890152050308732:2650] HANDLE EvNavigateKeySetResult, txid# 281474976715659 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2025-05-29T15:29:17.585381Z node 52 :TX_PROXY DEBUG: schemereq.cpp:103: Actor# [52:7509890152050308732:2650] txid# 281474976715659 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976715659 TabletId# 72057594046644480} 2025-05-29T15:29:17.585443Z node 52 :TX_PROXY DEBUG: schemereq.cpp:1352: Actor# [52:7509890152050308732:2650] txid# 281474976715659 HANDLE EvClientConnected 2025-05-29T15:29:17.586483Z node 52 :TX_PROXY DEBUG: schemereq.cpp:1374: Actor# [52:7509890152050308732:2650] txid# 281474976715659 Status StatusAlreadyExists HANDLE {TEvModifySchemeTransactionResult Status# StatusAlreadyExists txid# 281474976715659 Reason# Check failed: path: '/Root/.metadata/workload_manager/pools/default', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92} 2025-05-29T15:29:17.586527Z node 52 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [52:7509890152050308732:2650] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-29T15:29:17.586535Z node 52 :TX_PROXY DEBUG: schemereq.cpp:549: Actor# [52:7509890152050308732:2650] txid# 281474976715659 SEND to# [52:7509890152050308659:2341] Source {TEvProposeTransactionStatus txid# 281474976715659 Status# 48} 2025-05-29T15:29:17.590257Z node 52 :TX_PROXY DEBUG: proxy_impl.cpp:315: actor# [52:7509890152050307926:2138] Handle TEvProposeTransaction 2025-05-29T15:29:17.590275Z node 52 :TX_PROXY DEBUG: proxy_impl.cpp:238: actor# [52:7509890152050307926:2138] TxId# 281474976715660 ProcessProposeTransaction 2025-05-29T15:29:17.590293Z node 52 :TX_PROXY DEBUG: proxy_impl.cpp:257: actor# [52:7509890152050307926:2138] Cookie# 0 userReqId# "" txid# 281474976715660 SEND to# [52:7509890152050308765:2663] 2025-05-29T15:29:17.591072Z node 52 :TX_PROXY DEBUG: schemereq.cpp:1562: Actor# [52:7509890152050308765:2663] txid# 281474976715660 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/Root" OperationType: ESchemeOpCreateTable CreateTable { Name: "Datetime64Table" Columns { Name: "Key" Type: "Datetime64" NotNull: false } Columns { Name: "Value" Type: "Int32" NotNull: false } KeyColumnNames: "Key" PartitionConfig { } Temporary: false } FailedOnAlreadyExists: true } } UserToken: "" DatabaseName: "" RequestType: "" PeerName: "ipv6:[::1]:41112" 2025-05-29T15:29:17.591089Z node 52 :TX_PROXY DEBUG: schemereq.cpp:569: Actor# [52:7509890152050308765:2663] txid# 281474976715660 Bootstrap, UserSID: CheckAdministrator: 0 CheckDatabaseAdministrator: 0 2025-05-29T15:29:17.591186Z node 52 :TX_PROXY DEBUG: schemereq.cpp:1627: Actor# [52:7509890152050308765:2663] txid# 281474976715660 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P6 2025-05-29T15:29:17.591201Z node 52 :TX_PROXY DEBUG: schemereq.cpp:1617: Actor# [52:7509890152050308765:2663] txid# 281474976715660 TEvNavigateKeySet requested from SchemeCache 2025-05-29T15:29:17.591260Z node 52 :TX_PROXY DEBUG: schemereq.cpp:1450: Actor# [52:7509890152050308765:2663] txid# 281474976715660 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-05-29T15:29:17.591291Z node 52 :TX_PROXY DEBUG: schemereq.cpp:1497: Actor# [52:7509890152050308765:2663] HANDLE EvNavigateKeySetResult, txid# 281474976715660 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2025-05-29T15:29:17.591308Z node 52 :TX_PROXY DEBUG: schemereq.cpp:103: Actor# [52:7509890152050308765:2663] txid# 281474976715660 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976715660 TabletId# 72057594046644480} 2025-05-29T15:29:17.591371Z node 52 :TX_PROXY DEBUG: schemereq.cpp:1352: Actor# [52:7509890152050308765:2663] txid# 281474976715660 HANDLE EvClientConnected 2025-05-29T15:29:17.591772Z node 52 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 2025-05-29T15:29:17.592661Z node 52 :TX_PROXY DEBUG: schemereq.cpp:1374: Actor# [52:7509890152050308765:2663] txid# 281474976715660 Status StatusAccepted HANDLE {TEvModifySchemeTransactionResult Status# StatusAccepted txid# 281474976715660} 2025-05-29T15:29:17.592676Z node 52 :TX_PROXY DEBUG: schemereq.cpp:549: Actor# [52:7509890152050308765:2663] txid# 281474976715660 SEND to# [52:7509890152050308764:2334] Source {TEvProposeTransactionStatus txid# 281474976715660 Status# 53} 2025-05-29T15:29:17.616273Z node 52 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [52:7509890152050308907:2357], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:29:17.616409Z node 52 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=52&id=ZjE2NjQzNjgtZDZhNjAxYTQtZDVmZjg3NDgtZjlmZjlkZmI=, ActorId: [52:7509890152050308641:2334], ActorState: ExecuteState, TraceId: 01jweajxs8dyh0vk7vjmd5hwza, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: assertion failed at ydb/services/ydb/backup_ut/ydb_backup_ut.cpp:177, NQuery::TExecuteQueryResult (anonymous namespace)::ExecuteQuery(NQuery::TSession &, const TString &, bool): (result.IsSuccess()) query: UPSERT INTO `/Root/Datetime64Table` (Key, Value) VALUES (CAST("2020-01-01" AS Datetime64), 1); issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 TBackTrace::Capture()+28 (0x139A39CC) NUnitTest::NPrivate::RaiseError(char const*, TBasicString> const&, bool)+137 (0x13B57539) ??+0 (0x1384FC23) ??+0 (0x138603AA) NTestSuiteBackupRestoreS3::TTestCaseTestAllPrimitiveTypes::Execute_(NUnitTest::TTestContext&)+716 (0x1387193C) NTestSuiteBackupRestoreS3::TCurrentTest::Execute()::'lambda'()::operator()() const+71 (0x1389E0A7) NUnitTest::TTestBase::Run(std::__y1::function, TBasicString> const&, char const*, bool)+126 (0x13B593EE) NTestSuiteBackupRestoreS3::TCurrentTest::Execute()+436 (0x1389DA64) NUnitTest::TTestFactory::Execute()+803 (0x13B59B63) NUnitTest::RunMain(int, char**)+3021 (0x13B6B70D) ??+0 (0x7F5DF5953D90) __libc_start_main+128 (0x7F5DF5953E40) _start+41 (0x12914029) >> KqpAcl::AclForOltpAndOlap-isOlap >> KqpConstraints::DefaultsAndDeleteAndUpdate >> KqpScheme::CreateAlterDropTableStore ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/scheme/unittest >> KqpScheme::CreateExternalDataSource Test command err: Trying to start YDB, gRPC: 11811, MsgBus: 9824 2025-05-29T15:29:02.393234Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509890084859803139:2065];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:29:02.393260Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/00118b/r3tmp/tmpbbOtcF/pdisk_1.dat TServer::EnableGrpc on GrpcPort 11811, node 1 2025-05-29T15:29:02.473083Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:29:02.478936Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [1:7509890084859803110:2079] 1748532542393129 != 1748532542393132 2025-05-29T15:29:02.479144Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:29:02.479154Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-05-29T15:29:02.479156Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-05-29T15:29:02.479204Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-05-29T15:29:02.495016Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:29:02.495049Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:29:02.496508Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:9824 TClient is connected to server localhost:9824 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-29T15:29:02.561983Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:29:02.567155Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-05-29T15:29:02.579705Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:29:02.605952Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:29:02.679609Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:29:02.695274Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:29:02.792162Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890084859804759:2403], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:29:02.792185Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:29:02.848395Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-05-29T15:29:02.856900Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-05-29T15:29:02.870928Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-05-29T15:29:02.926284Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-05-29T15:29:02.940326Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-05-29T15:29:02.954922Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-05-29T15:29:02.968861Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-05-29T15:29:02.986230Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890084859805412:2466], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:29:02.986257Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890084859805417:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:29:02.986264Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:29:02.987063Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715669:3, at schemeshard: 72057594046644480 2025-05-29T15:29:02.996103Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7509890084859805419:2470], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715669 completed, doublechecking } 2025-05-29T15:29:03.057612Z node 1 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [1:7509890089154772766:3397] txid# 281474976715670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-29T15:29:03.138671Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [1:7509890089154772782:2474], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:29:03.138793Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=1&id=NzJiYTI2N2YtNWIyZTAxYTUtYjRhMDZiMWMtYmExNmZkOWQ=, ActorId: [1:7509890084859804741:2401], ActorState: ExecuteState, TraceId: 01jweajfg9bg05qvpg6b03mxpb, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: VERIFY failed (2025-05-29T15:29:03.139884Z): assertion failed in non-unittest thread with message: assertion failed at ydb/core/kqp/ut/common/kqp_ut_common.h:375, void NKikimr::NKqp::AssertSuccessResult(const NYdb::TStatus &): (result.IsSuccess())
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 library/cpp/testing/unittest/registar.cpp:36 RaiseError(): requirement UnittestThread failed 0. /-S/util/system/yassert.cpp:83: InternalPanicImpl @ 0x1633E055 1. /-S/util/system/yassert.cpp:55: Panic @ 0x16335056 2. /tmp//-S/library/cpp/testing/unittest/registar.cpp:36: RaiseError @ 0x164D75B6 3. /-S/ydb/core/kqp/ut/common/kqp_ut_common.h:375: AssertSuccessResult @ 0x15DE0A22 4. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:365: CreateSampleTables @ 0x289A0492 5. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:581: operator() @ 0x289C1A1C 6. /-S/library/cpp/threading/future/core/future-inl.h:493: SetValue @ 0x289C1A1C 7. /-S/library/cpp/threading/future/async.h:24: operator() @ 0x289C1A1C 8. /-S/util/thread/pool.h:71: Process @ 0x289C1A1C 9. /-S/util/thread/pool.cpp:418: DoExecute @ 0x163457A9 10. /-S/util/thread/factory.h:15: Execute @ 0x16344199 11. /-S/util/thread/factory.cpp:36: ThreadProc @ 0x16344199 12. /-S/util/system/thread.cpp:244: ThreadProxy @ 0x1633F60C 13. ??:0: ?? @ 0x7F8B0CDE3AC2 14. ??:0: ?? @ 0x7F8B0CE7584F Trying to start YDB, gRPC: 14862, MsgBus: 62123 2025-05-29T15:29:07.309153Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509890107078777360:2081];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:29:07.309375Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/00118b/r3tmp/tmpaFlH0O/pdisk_1.dat 2025-05-29T15:29:07.386791Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 14862, node 1 2025-05-29T15:29:07.407872Z ... ce] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:29:17.609425Z node 2 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [2:7509890152072185482:2296] txid# 281474976715658, issues: { message: "Nullable key column \'Key\'" severity: 1 } 2025-05-29T15:29:17.610851Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7509890152072185490:2335], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:29:17.610867Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:29:17.614196Z node 2 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [2:7509890152072185496:2304] txid# 281474976715659, issues: { message: "Nullable key column \'Key\'" severity: 1 } Trying to start YDB, gRPC: 20397, MsgBus: 10814 2025-05-29T15:29:17.882973Z node 3 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7509890150743256836:2071];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:29:17.883012Z node 3 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/00118b/r3tmp/tmppcwYXW/pdisk_1.dat 2025-05-29T15:29:17.896759Z node 3 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 20397, node 3 2025-05-29T15:29:17.910661Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:29:17.910676Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-05-29T15:29:17.910678Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-05-29T15:29:17.910727Z node 3 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:10814 TClient is connected to server localhost:10814 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-29T15:29:17.983549Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:29:17.983575Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:29:17.984709Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-29T15:29:17.986951Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:29:17.990238Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:29:18.052799Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:29:18.068088Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:29:18.077617Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:29:18.201458Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7509890155038225738:2403], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:29:18.201486Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:29:18.212284Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-05-29T15:29:18.219144Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-05-29T15:29:18.228086Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-05-29T15:29:18.242593Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-05-29T15:29:18.256276Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-05-29T15:29:18.270842Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-05-29T15:29:18.284621Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-05-29T15:29:18.300636Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7509890155038226389:2466], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:29:18.300673Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:29:18.300698Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7509890155038226394:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:29:18.301691Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715669:3, at schemeshard: 72057594046644480 2025-05-29T15:29:18.304427Z node 3 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7509890155038226396:2470], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715669 completed, doublechecking } 2025-05-29T15:29:18.374359Z node 3 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [3:7509890155038226447:3394] txid# 281474976715670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-29T15:29:18.466852Z node 3 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [3:7509890155038226463:2474], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:29:18.466972Z node 3 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=3&id=ZDUyNmY0ZWItOTkwYTNkMDItZmYxN2U5MDktNTA0OTM2OWM=, ActorId: [3:7509890155038225720:2401], ActorState: ExecuteState, TraceId: 01jweajyew3x9k2mp5j7sjyy4q, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: VERIFY failed (2025-05-29T15:29:18.467771Z): assertion failed in non-unittest thread with message: assertion failed at ydb/core/kqp/ut/common/kqp_ut_common.h:375, void NKikimr::NKqp::AssertSuccessResult(const NYdb::TStatus &): (result.IsSuccess())
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 library/cpp/testing/unittest/registar.cpp:36 RaiseError(): requirement UnittestThread failed 0. /-S/util/system/yassert.cpp:83: InternalPanicImpl @ 0x1633E055 1. /-S/util/system/yassert.cpp:55: Panic @ 0x16335056 2. /tmp//-S/library/cpp/testing/unittest/registar.cpp:36: RaiseError @ 0x164D75B6 3. /-S/ydb/core/kqp/ut/common/kqp_ut_common.h:375: AssertSuccessResult @ 0x15DE0A22 4. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:365: CreateSampleTables @ 0x289A0492 5. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:581: operator() @ 0x289C1A1C 6. /-S/library/cpp/threading/future/core/future-inl.h:493: SetValue @ 0x289C1A1C 7. /-S/library/cpp/threading/future/async.h:24: operator() @ 0x289C1A1C 8. /-S/util/thread/pool.h:71: Process @ 0x289C1A1C 9. /-S/util/thread/pool.cpp:418: DoExecute @ 0x163457A9 10. /-S/util/thread/factory.h:15: Execute @ 0x16344199 11. /-S/util/thread/factory.cpp:36: ThreadProc @ 0x16344199 12. /-S/util/system/thread.cpp:244: ThreadProxy @ 0x1633F60C 13. ??:0: ?? @ 0x7F09B4C26AC2 14. ??:0: ?? @ 0x7F09B4CB884F >> KqpScheme::PathWithNoRoot |72.0%| [TA] $(B)/ydb/core/tx/schemeshard/ut_login/test-results/unittest/{meta.json ... results_accumulator.log} >> KqpScheme::AlterNonExistingResourcePool >> KqpScheme::CreateTableStoreNegative >> KqpScheme::AlterTableRenameIndex >> KqpScheme::ResourcePoolClassifiersRankValidation >> KqpScheme::CreateTableWithTtlOnIntColumn >> KqpScheme::CreateAlterDropTableStore [GOOD] >> KqpScheme::CreateAlterDropColumnTableInStore >> KqpConstraints::DefaultValuesForTableNegative3 >> KqpScheme::CreateTransfer_QueryService >> KqpScheme::ModifyPermissionsByRelativePath >> KqpScheme::DropTransfer >> KqpScheme::CreateTableStoreNegative [GOOD] >> KqpScheme::CreateExternalTableValidation >> KqpScheme::UseUnauthorizedTable |72.0%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/kqp/ut/view/ydb-core-kqp-ut-view |72.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/ut/view/ydb-core-kqp-ut-view ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/scheme/unittest >> KqpScheme::CreateTableStoreNegative [GOOD] Test command err: Trying to start YDB, gRPC: 63305, MsgBus: 12253 2025-05-29T15:29:00.384995Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509890077253374290:2070];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:29:00.385491Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/001212/r3tmp/tmpyZMZ1V/pdisk_1.dat 2025-05-29T15:29:00.512772Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:29:00.513129Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:29:00.513145Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:29:00.514901Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 63305, node 1 2025-05-29T15:29:00.546451Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:29:00.546470Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-05-29T15:29:00.546473Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-05-29T15:29:00.546529Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:12253 TClient is connected to server localhost:12253 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-29T15:29:00.611269Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:29:00.635677Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-05-29T15:29:00.699920Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 2025-05-29T15:29:00.766058Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:29:00.781033Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:29:00.847332Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890077253375900:2403], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:29:00.847358Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:29:00.890349Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-05-29T15:29:00.898018Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-05-29T15:29:00.910725Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-05-29T15:29:00.967337Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-05-29T15:29:00.982244Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-05-29T15:29:00.995636Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-05-29T15:29:01.009545Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-05-29T15:29:01.031737Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890081548343852:2466], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:29:01.031756Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:29:01.031767Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890081548343857:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:29:01.032626Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710669:3, at schemeshard: 72057594046644480 2025-05-29T15:29:01.037025Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7509890081548343859:2470], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710669 completed, doublechecking } 2025-05-29T15:29:01.109254Z node 1 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [1:7509890081548343910:3400] txid# 281474976710670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-29T15:29:01.234584Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [1:7509890081548343926:2474], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:29:01.234700Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=1&id=N2NkNGY1OTQtZjAxOWQ0OTItYjQ5NDZiZGMtYzFjOTQ3YjE=, ActorId: [1:7509890077253375896:2401], ActorState: ExecuteState, TraceId: 01jweajdk7ak47nqsxsfyspyzr, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: VERIFY failed (2025-05-29T15:29:01.239280Z): assertion failed in non-unittest thread with message: assertion failed at ydb/core/kqp/ut/common/kqp_ut_common.h:375, void NKikimr::NKqp::AssertSuccessResult(const NYdb::TStatus &): (result.IsSuccess())
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 library/cpp/testing/unittest/registar.cpp:36 RaiseError(): requirement UnittestThread failed 0. /-S/util/system/yassert.cpp:83: InternalPanicImpl @ 0x1633E055 1. /-S/util/system/yassert.cpp:55: Panic @ 0x16335056 2. /tmp//-S/library/cpp/testing/unittest/registar.cpp:36: RaiseError @ 0x164D75B6 3. /-S/ydb/core/kqp/ut/common/kqp_ut_common.h:375: AssertSuccessResult @ 0x15DE0A22 4. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:365: CreateSampleTables @ 0x289A0492 5. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:581: operator() @ 0x289C1A1C 6. /-S/library/cpp/threading/future/core/future-inl.h:493: SetValue @ 0x289C1A1C 7. /-S/library/cpp/threading/future/async.h:24: operator() @ 0x289C1A1C 8. /-S/util/thread/pool.h:71: Process @ 0x289C1A1C 9. /-S/util/thread/pool.cpp:418: DoExecute @ 0x163457A9 10. /-S/util/thread/factory.h:15: Execute @ 0x16344199 11. /-S/util/thread/factory.cpp:36: ThreadProc @ 0x16344199 12. /-S/util/system/thread.cpp:244: ThreadProxy @ 0x1633F60C 13. ??:0: ?? @ 0x7FA04C234AC2 14. ??:0: ?? @ 0x7FA04C2C684F Trying to start YDB, gRPC: 2366, MsgBus: 15900 2025-05-29T15:29:04.977697Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509890094152595185:2209];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:29:04.977770Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/001212/r3tmp/tmp7N36zo/pdisk_1.dat 2025-05-29T15:29:05.056097Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 2366, node 1 2025-05-29T15:29:05.076755Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:29:05.076769Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-05-29T15:29:05.076770Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2 ... cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:29:19.150408Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890157996819169:2403], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:29:19.150435Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:29:19.200756Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-05-29T15:29:19.255455Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-05-29T15:29:19.264033Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-05-29T15:29:19.271074Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-05-29T15:29:19.278357Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-05-29T15:29:19.292478Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-05-29T15:29:19.306250Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-05-29T15:29:19.322517Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890157996819825:2466], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:29:19.322544Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:29:19.322557Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890157996819830:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:29:19.323235Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715669:3, at schemeshard: 72057594046644480 2025-05-29T15:29:19.326003Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7509890157996819832:2470], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715669 completed, doublechecking } 2025-05-29T15:29:19.396614Z node 1 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [1:7509890157996819883:3399] txid# 281474976715670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-29T15:29:19.478024Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [1:7509890157996819899:2474], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:29:19.478125Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=1&id=ZGUyNjJjZDItNzViMjk1OTQtY2E0Mzc5NWUtMWMxMzM0NmY=, ActorId: [1:7509890157996819151:2401], ActorState: ExecuteState, TraceId: 01jweajzetdzgw7h80c3w3hg7d, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: VERIFY failed (2025-05-29T15:29:19.478640Z): assertion failed in non-unittest thread with message: assertion failed at ydb/core/kqp/ut/common/kqp_ut_common.h:375, void NKikimr::NKqp::AssertSuccessResult(const NYdb::TStatus &): (result.IsSuccess())
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 library/cpp/testing/unittest/registar.cpp:36 RaiseError(): requirement UnittestThread failed 0. /-S/util/system/yassert.cpp:83: InternalPanicImpl @ 0x1633E055 1. /-S/util/system/yassert.cpp:55: Panic @ 0x16335056 2. /tmp//-S/library/cpp/testing/unittest/registar.cpp:36: RaiseError @ 0x164D75B6 3. /-S/ydb/core/kqp/ut/common/kqp_ut_common.h:375: AssertSuccessResult @ 0x15DE0A22 4. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:365: CreateSampleTables @ 0x289A0492 5. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:581: operator() @ 0x289C1A1C 6. /-S/library/cpp/threading/future/core/future-inl.h:493: SetValue @ 0x289C1A1C 7. /-S/library/cpp/threading/future/async.h:24: operator() @ 0x289C1A1C 8. /-S/util/thread/pool.h:71: Process @ 0x289C1A1C 9. /-S/util/thread/pool.cpp:418: DoExecute @ 0x163457A9 10. /-S/util/thread/factory.h:15: Execute @ 0x16344199 11. /-S/util/thread/factory.cpp:36: ThreadProc @ 0x16344199 12. /-S/util/system/thread.cpp:244: ThreadProxy @ 0x1633F60C 13. ??:0: ?? @ 0x7FE199C9CAC2 14. ??:0: ?? @ 0x7FE199D2E84F Trying to start YDB, gRPC: 20482, MsgBus: 13252 2025-05-29T15:29:22.893350Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509890170697326805:2066];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:29:22.893389Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/001212/r3tmp/tmpOjF69S/pdisk_1.dat 2025-05-29T15:29:22.949746Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 20482, node 1 2025-05-29T15:29:22.967218Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:29:22.967233Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-05-29T15:29:22.967235Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-05-29T15:29:22.967276Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:13252 2025-05-29T15:29:22.994532Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:29:22.994557Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:29:22.995686Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:13252 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-29T15:29:23.034120Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:29:23.346165Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890174992294727:2328], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:29:23.346198Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:29:23.383618Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890174992294739:2330], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:29:23.383643Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:29:23.386462Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890174992294744:2332], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:29:23.386480Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:29:23.389684Z node 1 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [1:7509890174992294748:2297] txid# 281474976715658, issues: { message: "trying to create OLAP store with zero shards" severity: 1 } >> KqpScheme::CreateAlterDropColumnTableInStore [GOOD] >> KqpScheme::CreateResourcePoolClassifierOnServerless >> KqpScheme::TouchIndexAfterMoveTableRead >> KqpScheme::InvalidationAfterDropCreateTable2MultiStageTxNoEffects >> KqpScheme::CreateAndAlterTableWithPartitioningByLoadUncompat |72.0%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/datashard/build_index/ut/ydb-core-tx-datashard-build_index-ut |72.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/build_index/ut/ydb-core-tx-datashard-build_index-ut |72.0%| [LD] {RESULT} $(B)/ydb/core/kqp/ut/view/ydb-core-kqp-ut-view |72.1%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/build_index/ut/ydb-core-tx-datashard-build_index-ut ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/scheme/unittest >> KqpScheme::CreateAlterDropColumnTableInStore [GOOD] Test command err: Trying to start YDB, gRPC: 25149, MsgBus: 11949 2025-05-29T15:29:04.149496Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509890094094800636:2206];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:29:04.149556Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/00117b/r3tmp/tmpCwV8jr/pdisk_1.dat 2025-05-29T15:29:04.220898Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:29:04.221181Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [1:7509890094094800457:2079] 1748532544146631 != 1748532544146634 TServer::EnableGrpc on GrpcPort 25149, node 1 2025-05-29T15:29:04.249820Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:29:04.249834Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-05-29T15:29:04.249837Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-05-29T15:29:04.249886Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-05-29T15:29:04.250937Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:29:04.250971Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:29:04.251980Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:11949 TClient is connected to server localhost:11949 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... 2025-05-29T15:29:04.332551Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-05-29T15:29:04.336127Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-05-29T15:29:04.346824Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:29:04.379469Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:29:04.402624Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:29:04.415065Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:29:04.543352Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890094094802109:2403], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:29:04.543378Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:29:04.584581Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-05-29T15:29:04.593338Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-05-29T15:29:04.606622Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-05-29T15:29:04.620559Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-05-29T15:29:04.635221Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-05-29T15:29:04.648947Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-05-29T15:29:04.662846Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-05-29T15:29:04.679589Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890094094802762:2466], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:29:04.679619Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890094094802767:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:29:04.679629Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:29:04.680398Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715669:3, at schemeshard: 72057594046644480 2025-05-29T15:29:04.689862Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7509890094094802769:2470], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715669 completed, doublechecking } 2025-05-29T15:29:04.783810Z node 1 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [1:7509890094094802820:3397] txid# 281474976715670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-29T15:29:04.901077Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [1:7509890094094802836:2474], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:29:04.903576Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=1&id=MWJlMTBhNjItZDc4OTI3ODAtNjZhYjA1ZWEtMjEyOTU0ZGI=, ActorId: [1:7509890094094802106:2401], ActorState: ExecuteState, TraceId: 01jweajh576wezzbtvgfdjxcd6, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: VERIFY failed (2025-05-29T15:29:04.904666Z): assertion failed in non-unittest thread with message: assertion failed at ydb/core/kqp/ut/common/kqp_ut_common.h:375, void NKikimr::NKqp::AssertSuccessResult(const NYdb::TStatus &): (result.IsSuccess())
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 library/cpp/testing/unittest/registar.cpp:36 RaiseError(): requirement UnittestThread failed 0. /-S/util/system/yassert.cpp:83: InternalPanicImpl @ 0x1633E055 1. /-S/util/system/yassert.cpp:55: Panic @ 0x16335056 2. /tmp//-S/library/cpp/testing/unittest/registar.cpp:36: RaiseError @ 0x164D75B6 3. /-S/ydb/core/kqp/ut/common/kqp_ut_common.h:375: AssertSuccessResult @ 0x15DE0A22 4. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:365: CreateSampleTables @ 0x289A0492 5. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:581: operator() @ 0x289C1A1C 6. /-S/library/cpp/threading/future/core/future-inl.h:493: SetValue @ 0x289C1A1C 7. /-S/library/cpp/threading/future/async.h:24: operator() @ 0x289C1A1C 8. /-S/util/thread/pool.h:71: Process @ 0x289C1A1C 9. /-S/util/thread/pool.cpp:418: DoExecute @ 0x163457A9 10. /-S/util/thread/factory.h:15: Execute @ 0x16344199 11. /-S/util/thread/factory.cpp:36: ThreadProc @ 0x16344199 12. /-S/util/system/thread.cpp:244: ThreadProxy @ 0x1633F60C 13. ??:0: ?? @ 0x7F787955CAC2 14. ??:0: ?? @ 0x7F78795EE84F Trying to start YDB, gRPC: 7221, MsgBus: 21676 2025-05-29T15:29:09.134297Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509890117858653662:2064];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:29:09.134320Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/00117b/r3tmp/tmpzcRnj9/pdisk_1.dat 2025-05-29T15:29:09.194972Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [1:7509890117858653631:2079] 1748532549134 ... X_COLUMNSHARD_TX WARN: log.cpp:784: tablet_id=72075186224037890;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715658;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715658; 2025-05-29T15:29:23.718883Z node 2 :TX_COLUMNSHARD_TX WARN: log.cpp:784: tablet_id=72075186224037892;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715658;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715658; 2025-05-29T15:29:23.718889Z node 2 :TX_COLUMNSHARD_TX WARN: log.cpp:784: tablet_id=72075186224037896;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715658;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715658; 2025-05-29T15:29:23.722579Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7509890174865923397:2395], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:29:23.722602Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:29:23.726807Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 2025-05-29T15:29:23.731469Z node 2 :TX_COLUMNSHARD_TX WARN: log.cpp:784: tablet_id=72075186224037894;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715659;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715659; 2025-05-29T15:29:23.731780Z node 2 :TX_COLUMNSHARD_TX WARN: log.cpp:784: tablet_id=72075186224037895;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715659;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715659; 2025-05-29T15:29:23.732436Z node 2 :TX_COLUMNSHARD_TX WARN: log.cpp:784: tablet_id=72075186224037893;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715659;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715659; 2025-05-29T15:29:23.732721Z node 2 :TX_COLUMNSHARD_TX WARN: log.cpp:784: tablet_id=72075186224037892;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715659;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715659; 2025-05-29T15:29:23.733420Z node 2 :TX_COLUMNSHARD_TX WARN: log.cpp:784: tablet_id=72075186224037897;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715659;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715659; 2025-05-29T15:29:23.733707Z node 2 :TX_COLUMNSHARD_TX WARN: log.cpp:784: tablet_id=72075186224037896;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715659;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715659; 2025-05-29T15:29:23.734221Z node 2 :TX_COLUMNSHARD_TX WARN: log.cpp:784: tablet_id=72075186224037889;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715659;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715659; 2025-05-29T15:29:23.734819Z node 2 :TX_COLUMNSHARD_TX WARN: log.cpp:784: tablet_id=72075186224037888;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715659;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715659; 2025-05-29T15:29:23.734879Z node 2 :TX_COLUMNSHARD_TX WARN: log.cpp:784: tablet_id=72075186224037891;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715659;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715659; 2025-05-29T15:29:23.735642Z node 2 :TX_COLUMNSHARD_TX WARN: log.cpp:784: tablet_id=72075186224037890;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715659;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715659; 2025-05-29T15:29:23.738404Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7509890174865923533:2452], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:29:23.738420Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:29:23.740908Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpDropTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 2025-05-29T15:29:23.743453Z node 2 :TX_COLUMNSHARD_TX WARN: log.cpp:784: tablet_id=72075186224037896;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715660;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715660; 2025-05-29T15:29:23.743483Z node 2 :TX_COLUMNSHARD_TX WARN: log.cpp:784: tablet_id=72075186224037889;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715660;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715660; 2025-05-29T15:29:23.743516Z node 2 :TX_COLUMNSHARD_TX WARN: log.cpp:784: tablet_id=72075186224037888;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715660;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715660; 2025-05-29T15:29:23.743546Z node 2 :TX_COLUMNSHARD_TX WARN: log.cpp:784: tablet_id=72075186224037891;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715660;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715660; 2025-05-29T15:29:23.743564Z node 2 :TX_COLUMNSHARD_TX WARN: log.cpp:784: tablet_id=72075186224037890;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715660;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715660; 2025-05-29T15:29:23.743606Z node 2 :TX_COLUMNSHARD_TX WARN: log.cpp:784: tablet_id=72075186224037893;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715660;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715660; 2025-05-29T15:29:23.743641Z node 2 :TX_COLUMNSHARD_TX WARN: log.cpp:784: tablet_id=72075186224037892;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715660;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715660; 2025-05-29T15:29:23.743665Z node 2 :TX_COLUMNSHARD_TX WARN: log.cpp:784: tablet_id=72075186224037895;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715660;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715660; 2025-05-29T15:29:23.743710Z node 2 :TX_COLUMNSHARD_TX WARN: log.cpp:784: tablet_id=72075186224037894;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715660;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715660; 2025-05-29T15:29:23.743719Z node 2 :TX_COLUMNSHARD_TX WARN: log.cpp:784: tablet_id=72075186224037897;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715660;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715660; 2025-05-29T15:29:23.748675Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7509890174865923604:2458], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:29:23.748694Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:29:23.750486Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpDropColumnStore, opId: 281474976715661:0, at schemeshard: 72057594046644480 2025-05-29T15:29:23.758715Z node 2 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037895;self_id=[2:7509890174865923051:2332];ev=NKikimr::TEvTablet::TEvTabletDead;fline=columnshard_impl.cpp:1152;event=tablet_die; 2025-05-29T15:29:23.758808Z node 2 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037890;self_id=[2:7509890174865923073:2336];ev=NKikimr::TEvTablet::TEvTabletDead;fline=columnshard_impl.cpp:1152;event=tablet_die; 2025-05-29T15:29:23.759526Z node 2 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037892;self_id=[2:7509890174865923088:2339];ev=NKikimr::TEvTablet::TEvTabletDead;fline=columnshard_impl.cpp:1152;event=tablet_die; 2025-05-29T15:29:23.759841Z node 2 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037896;self_id=[2:7509890174865923076:2337];ev=NKikimr::TEvTablet::TEvTabletDead;fline=columnshard_impl.cpp:1152;event=tablet_die; 2025-05-29T15:29:23.759982Z node 2 :HIVE WARN: hive_impl.cpp:491: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037895 not found 2025-05-29T15:29:23.759996Z node 2 :HIVE WARN: hive_impl.cpp:491: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037890 not found 2025-05-29T15:29:23.759998Z node 2 :HIVE WARN: hive_impl.cpp:491: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037896 not found 2025-05-29T15:29:23.760000Z node 2 :HIVE WARN: hive_impl.cpp:491: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037889 not found 2025-05-29T15:29:23.760002Z node 2 :HIVE WARN: hive_impl.cpp:491: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037892 not found 2025-05-29T15:29:23.760005Z node 2 :HIVE WARN: hive_impl.cpp:491: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037893 not found 2025-05-29T15:29:23.760309Z node 2 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037891;self_id=[2:7509890174865923047:2331];ev=NKikimr::TEvTablet::TEvTabletDead;fline=columnshard_impl.cpp:1152;event=tablet_die; 2025-05-29T15:29:23.760516Z node 2 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037889;self_id=[2:7509890174865923061:2334];ev=NKikimr::TEvTablet::TEvTabletDead;fline=columnshard_impl.cpp:1152;event=tablet_die; 2025-05-29T15:29:23.760652Z node 2 :HIVE WARN: hive_impl.cpp:491: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037888 not found 2025-05-29T15:29:23.760661Z node 2 :HIVE WARN: hive_impl.cpp:491: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037891 not found 2025-05-29T15:29:23.760662Z node 2 :HIVE WARN: hive_impl.cpp:491: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037897 not found 2025-05-29T15:29:23.760891Z node 2 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037897;self_id=[2:7509890174865923068:2335];ev=NKikimr::TEvTablet::TEvTabletDead;fline=columnshard_impl.cpp:1152;event=tablet_die; 2025-05-29T15:29:23.761315Z node 2 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;self_id=[2:7509890174865923053:2333];ev=NKikimr::TEvTablet::TEvTabletDead;fline=columnshard_impl.cpp:1152;event=tablet_die; 2025-05-29T15:29:23.761514Z node 2 :HIVE WARN: hive_impl.cpp:491: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037894 not found 2025-05-29T15:29:23.761634Z node 2 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037893;self_id=[2:7509890174865923159:2340];ev=NKikimr::TEvTablet::TEvTabletDead;fline=columnshard_impl.cpp:1152;event=tablet_die; 2025-05-29T15:29:23.762264Z node 2 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037894;self_id=[2:7509890174865923087:2338];ev=NKikimr::TEvTablet::TEvTabletDead;fline=columnshard_impl.cpp:1152;event=tablet_die; |72.1%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/memory_controller/ut/ydb-core-memory_controller-ut ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/ydb/table_split_ut/unittest >> YdbTableSplit::SplitByLoadWithDeletes [FAIL] Test command err: 2025-05-29T15:25:47.566844Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509889247361748392:2207];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:25:47.566889Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/0026ea/r3tmp/tmpvuyLDM/pdisk_1.dat 2025-05-29T15:25:47.786484Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 4872, node 1 2025-05-29T15:25:47.818389Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:25:47.818399Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-05-29T15:25:47.818401Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-05-29T15:25:47.818447Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:18835 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: 2025-05-29T15:25:47.871434Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:25:47.871460Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:25:47.873078Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-29T15:25:47.879426Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... TClient is connected to server localhost:18835 2025-05-29T15:25:48.083260Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509889251656716539:2334], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:25:48.083286Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:25:48.143174Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-05-29T15:25:48.232081Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509889251656716699:2347], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:25:48.232141Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:25:48.233555Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 TClient::Ls request: /Root/Foo TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Foo" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710658 CreateStep: 1748532348248 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 4 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 4 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 2 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "Foo" Columns { Name: "NameHash" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "Name" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } Columns { Name: "Versio... (TRUNCATED) Table has 1 shards TClient::Ls request: /Root/Foo TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Foo" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710658 CreateStep: 1748532348248 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 4 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 4 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 2 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "Foo" Columns { Name: "NameHash" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "Name" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } Columns { Name: "Versio... (TRUNCATED) 2025-05-29T15:25:48.316506Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509889251656716786:2371], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:25:48.316534Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:25:48.316833Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509889251656716791:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:25:48.317588Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509889251656716801:2381], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:25:48.317603Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509889251656716808:2386], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:25:48.317609Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509889251656716809:2387], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:25:48.317615Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509889251656716810:2388], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:25:48.317637Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509889251656716811:2389], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:25:48.317643Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:25:48.317729Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_mkdir.cpp:115: TMkDir Propose, path: /Root/.metadata, operationId: 281474976710660:0, at schemeshard: 72057594046644480 2025-05-29T15:25:48.317782Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:130: IgniteOperation, opId: 281474976710660:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-05-29T15:25:48.317786Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_mkdir.cpp:115: TMkDir Propose, path: /Root/.metadata/workload_manager, operationId: 281474976710660:1, at schemeshard: 72057594046644480 2025-05-29T15:25:48.317801Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:130: IgniteOperation, opId: 281474976710660:2, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-05-29T15:25:48.317810Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_mkdir.cpp:115: TMkDir Propose, path: /Root/.metadata/workload_manager/pools, operationId: 281474976710660:2, at schemeshard: 72057594046644480 2025-05-29T15:25:48.317820Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:130: IgniteOperation, opId: 281474976710660:3, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-05-29T15:25:48.317830Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_create_resource_pool.cpp:148: [72057594046644480] TCreateResourcePool Propose: opId# 281474976710660:3, path# /Root/.metadata/workload_manager/pools/default 2025-05-29T15:25:48.317873Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 281474976710660:3 1 -> 128 2025-05-29T15:25:48.317939Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:130: IgniteOperation, opId: 281474976710660:4, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-05-29T15:25:48.317943Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propo ... Zjk4Yi1kNDNhZjE0OA== BAD_SESSION:
: Error: Session not found: ydb://session/3?node_id=1&id=ZWIzMTk0OGUtOGFiNjlmMy01MjYxZjk4Yi1kNDNhZjE0OA== BAD_SESSION:
: Error: Session not found: ydb://session/3?node_id=1&id=ZWIzMTk0OGUtOGFiNjlmMy01MjYxZjk4Yi1kNDNhZjE0OA== BAD_SESSION:
: Error: Session not found: ydb://session/3?node_id=1&id=ZWIzMTk0OGUtOGFiNjlmMy01MjYxZjk4Yi1kNDNhZjE0OA== BAD_SESSION:
: Error: Session not found: ydb://session/3?node_id=1&id=ZWIzMTk0OGUtOGFiNjlmMy01MjYxZjk4Yi1kNDNhZjE0OA== BAD_SESSION:
: Error: Session not found: ydb://session/3?node_id=1&id=ZWIzMTk0OGUtOGFiNjlmMy01MjYxZjk4Yi1kNDNhZjE0OA== BAD_SESSION:
: Error: Session not found: ydb://session/3?node_id=1&id=ZWIzMTk0OGUtOGFiNjlmMy01MjYxZjk4Yi1kNDNhZjE0OA== BAD_SESSION:
: Error: Session not found: ydb://session/3?node_id=1&id=ZWIzMTk0OGUtOGFiNjlmMy01MjYxZjk4Yi1kNDNhZjE0OA== BAD_SESSION:
: Error: Session not found: ydb://session/3?node_id=1&id=ZWIzMTk0OGUtOGFiNjlmMy01MjYxZjk4Yi1kNDNhZjE0OA== BAD_SESSION:
: Error: Session not found: ydb://session/3?node_id=1&id=ZWIzMTk0OGUtOGFiNjlmMy01MjYxZjk4Yi1kNDNhZjE0OA== BAD_SESSION:
: Error: Session not found: ydb://session/3?node_id=1&id=ZWIzMTk0OGUtOGFiNjlmMy01MjYxZjk4Yi1kNDNhZjE0OA== BAD_SESSION:
: Error: Session not found: ydb://session/3?node_id=1&id=ZWIzMTk0OGUtOGFiNjlmMy01MjYxZjk4Yi1kNDNhZjE0OA== BAD_SESSION:
: Error: Session not found: ydb://session/3?node_id=1&id=ZWIzMTk0OGUtOGFiNjlmMy01MjYxZjk4Yi1kNDNhZjE0OA== BAD_SESSION:
: Error: Session not found: ydb://session/3?node_id=1&id=ZWIzMTk0OGUtOGFiNjlmMy01MjYxZjk4Yi1kNDNhZjE0OA== BAD_SESSION:
: Error: Session not found: ydb://session/3?node_id=1&id=ZWIzMTk0OGUtOGFiNjlmMy01MjYxZjk4Yi1kNDNhZjE0OA== BAD_SESSION:
: Error: Session not found: ydb://session/3?node_id=1&id=ZWIzMTk0OGUtOGFiNjlmMy01MjYxZjk4Yi1kNDNhZjE0OA== BAD_SESSION:
: Error: Session not found: ydb://session/3?node_id=1&id=ZWIzMTk0OGUtOGFiNjlmMy01MjYxZjk4Yi1kNDNhZjE0OA== BAD_SESSION:
: Error: Session not found: ydb://session/3?node_id=1&id=ZWIzMTk0OGUtOGFiNjlmMy01MjYxZjk4Yi1kNDNhZjE0OA== BAD_SESSION:
: Error: Session not found: ydb://session/3?node_id=1&id=ZWIzMTk0OGUtOGFiNjlmMy01MjYxZjk4Yi1kNDNhZjE0OA== BAD_SESSION:
: Error: Session not found: ydb://session/3?node_id=1&id=ZWIzMTk0OGUtOGFiNjlmMy01MjYxZjk4Yi1kNDNhZjE0OA== BAD_SESSION:
: Error: Session not found: ydb://session/3?node_id=1&id=ZWIzMTk0OGUtOGFiNjlmMy01MjYxZjk4Yi1kNDNhZjE0OA== BAD_SESSION:
: Error: Session not found: ydb://session/3?node_id=1&id=ZWIzMTk0OGUtOGFiNjlmMy01MjYxZjk4Yi1kNDNhZjE0OA== BAD_SESSION:
: Error: Session not found: ydb://session/3?node_id=1&id=ZWIzMTk0OGUtOGFiNjlmMy01MjYxZjk4Yi1kNDNhZjE0OA== BAD_SESSION:
: Error: Session not found: ydb://session/3?node_id=1&id=ZWIzMTk0OGUtOGFiNjlmMy01MjYxZjk4Yi1kNDNhZjE0OA== BAD_SESSION:
: Error: Session not found: ydb://session/3?node_id=1&id=ZWIzMTk0OGUtOGFiNjlmMy01MjYxZjk4Yi1kNDNhZjE0OA== BAD_SESSION:
: Error: Session not found: ydb://session/3?node_id=1&id=ZWIzMTk0OGUtOGFiNjlmMy01MjYxZjk4Yi1kNDNhZjE0OA== BAD_SESSION:
: Error: Session not found: ydb://session/3?node_id=1&id=ZWIzMTk0OGUtOGFiNjlmMy01MjYxZjk4Yi1kNDNhZjE0OA== BAD_SESSION:
: Error: Session not found: ydb://session/3?node_id=1&id=ZWIzMTk0OGUtOGFiNjlmMy01MjYxZjk4Yi1kNDNhZjE0OA== BAD_SESSION:
: Error: Session not found: ydb://session/3?node_id=1&id=ZWIzMTk0OGUtOGFiNjlmMy01MjYxZjk4Yi1kNDNhZjE0OA== BAD_SESSION:
: Error: Session not found: ydb://session/3?node_id=1&id=ZWIzMTk0OGUtOGFiNjlmMy01MjYxZjk4Yi1kNDNhZjE0OA== BAD_SESSION:
: Error: Session not found: ydb://session/3?node_id=1&id=ZWIzMTk0OGUtOGFiNjlmMy01MjYxZjk4Yi1kNDNhZjE0OA== BAD_SESSION:
: Error: Session not found: ydb://session/3?node_id=1&id=ZWIzMTk0OGUtOGFiNjlmMy01MjYxZjk4Yi1kNDNhZjE0OA== BAD_SESSION:
: Error: Session not found: ydb://session/3?node_id=1&id=ZWIzMTk0OGUtOGFiNjlmMy01MjYxZjk4Yi1kNDNhZjE0OA== BAD_SESSION:
: Error: Session not found: ydb://session/3?node_id=1&id=ZWIzMTk0OGUtOGFiNjlmMy01MjYxZjk4Yi1kNDNhZjE0OA== BAD_SESSION:
: Error: Session not found: ydb://session/3?node_id=1&id=ZWIzMTk0OGUtOGFiNjlmMy01MjYxZjk4Yi1kNDNhZjE0OA== BAD_SESSION:
: Error: Session not found: ydb://session/3?node_id=1&id=ZWIzMTk0OGUtOGFiNjlmMy01MjYxZjk4Yi1kNDNhZjE0OA== BAD_SESSION:
: Error: Session not found: ydb://session/3?node_id=1&id=ZWIzMTk0OGUtOGFiNjlmMy01MjYxZjk4Yi1kNDNhZjE0OA== BAD_SESSION:
: Error: Session not found: ydb://session/3?node_id=1&id=ZWIzMTk0OGUtOGFiNjlmMy01MjYxZjk4Yi1kNDNhZjE0OA== BAD_SESSION:
: Error: Session not found: ydb://session/3?node_id=1&id=ZWIzMTk0OGUtOGFiNjlmMy01MjYxZjk4Yi1kNDNhZjE0OA== BAD_SESSION:
: Error: Session not found: ydb://session/3?node_id=1&id=ZWIzMTk0OGUtOGFiNjlmMy01MjYxZjk4Yi1kNDNhZjE0OA== BAD_SESSION:
: Error: Session not found: ydb://session/3?node_id=1&id=ZWIzMTk0OGUtOGFiNjlmMy01MjYxZjk4Yi1kNDNhZjE0OA== BAD_SESSION:
: Error: Session not found: ydb://session/3?node_id=1&id=ZWIzMTk0OGUtOGFiNjlmMy01MjYxZjk4Yi1kNDNhZjE0OA== BAD_SESSION:
: Error: Session not found: ydb://session/3?node_id=1&id=ZWIzMTk0OGUtOGFiNjlmMy01MjYxZjk4Yi1kNDNhZjE0OA== BAD_SESSION:
: Error: Session not found: ydb://session/3?node_id=1&id=ZWIzMTk0OGUtOGFiNjlmMy01MjYxZjk4Yi1kNDNhZjE0OA== TClient::Ls request: /Root/Foo TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Foo" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710658 CreateStep: 1748532348248 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 4 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 4 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 2 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "Foo" Columns { Name: "NameHash" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "Name" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } Columns { Name: "Versio... (TRUNCATED) TClient::Ls request: /Root/Foo TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Foo" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710658 CreateStep: 1748532348248 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 4 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 4 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 2 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "Foo" Columns { Name: "NameHash" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "Name" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } Columns { Name: "Versio... (TRUNCATED) TClient::Ls request: /Root/Foo TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Foo" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710658 CreateStep: 1748532348248 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 4 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 4 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 2 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "Foo" Columns { Name: "NameHash" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "Name" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } Columns { Name: "Versio... (TRUNCATED) TClient::Ls request: /Root/Foo TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Foo" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710658 CreateStep: 1748532348248 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 4 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 4 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 2 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "Foo" Columns { Name: "NameHash" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "Name" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } Columns { Name: "Versio... (TRUNCATED) TClient::Ls request: /Root/Foo TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Foo" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710658 CreateStep: 1748532348248 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 4 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 4 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 2 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "Foo" Columns { Name: "NameHash" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "Name" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } Columns { Name: "Versio... (TRUNCATED) Table has 1 shards assertion failed at ydb/services/ydb/ydb_table_split_ut.cpp:193, void NTestSuiteYdbTableSplit::DoTestSplitByLoad(TKikimrWithGrpcAndRootSchema &, TString, bool, size_t): (shardsAfter >= shardsBefore + minSplits) Table didn't split!!11 O_O TBackTrace::Capture()+28 (0x1397789C) NUnitTest::NPrivate::RaiseError(char const*, TBasicString> const&, bool)+137 (0x13B2EC59) NTestSuiteYdbTableSplit::DoTestSplitByLoad(NYdb::TBasicKikimrWithGrpcAndRootSchema&, TBasicString>, bool, unsigned long)+5967 (0x1385D22F) NTestSuiteYdbTableSplit::TTestCaseSplitByLoadWithDeletes::Execute_(NUnitTest::TTestContext&)+430 (0x13860B9E) NTestSuiteYdbTableSplit::TCurrentTest::Execute()::'lambda'()::operator()() const+71 (0x13872467) NUnitTest::TTestBase::Run(std::__y1::function, TBasicString> const&, char const*, bool)+126 (0x13B30B0E) NTestSuiteYdbTableSplit::TCurrentTest::Execute()+436 (0x13871D84) NUnitTest::TTestFactory::Execute()+803 (0x13B31283) NUnitTest::RunMain(int, char**)+3021 (0x13B42E2D) ??+0 (0x7FF1088E1D90) __libc_start_main+128 (0x7FF1088E1E40) _start+41 (0x128BE029) |72.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/memory_controller/ut/ydb-core-memory_controller-ut |72.1%| [LD] {RESULT} $(B)/ydb/core/memory_controller/ut/ydb-core-memory_controller-ut >> KqpScheme::CreateTableWithPartitionAtKeysSigned >> KqpScheme::DropAsyncReplicationCascade >> KqpScheme::AsyncReplicationConnectionStringWithSsl >> KqpScheme::DropNonExistingResourcePoolClassifier >> KqpScheme::DisableS3ExternalDataSource >> KqpScheme::AlterTableAddExplicitAsyncIndex >> KqpConstraints::SerialTypeSerial2 |72.1%| [TA] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_login/test-results/unittest/{meta.json ... results_accumulator.log} >> KqpScheme::CreateTableWithUniqConstraintPublicApi >> KqpAcl::AclRevoke-UseSink-IsOlap >> KqpScheme::CreateAndDropUser+StrictAclCheck >> KqpScheme::AlterTableAddUniqIndexSqlFeatureOff >> KqpScheme::CreateTableWithTtlSettingsCompat >> KqpScheme::AlterDatabaseChangeOwner+EnableAlterDatabase >> TSchemeshardBorrowedCompactionTest::SchemeshardShouldHandleBorrowCompactionTimeouts [GOOD] >> KqpAcl::AclDml-UseSink-IsOlap >> KqpScheme::AlterResourcePoolClassifier >> KqpConstraints::DefaultValuesForTableNegative4 >> KqpScheme::AlterTableReplaceIndex ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/scheme/unittest >> KqpScheme::ResourcePoolClassifiersRankValidation Test command err: Trying to start YDB, gRPC: 16715, MsgBus: 12963 2025-05-29T15:29:00.352576Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509890077502142080:2079];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:29:00.352926Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/00129d/r3tmp/tmpCbWo6w/pdisk_1.dat 2025-05-29T15:29:00.429568Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 16715, node 1 2025-05-29T15:29:00.454360Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:29:00.454377Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-05-29T15:29:00.454379Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-05-29T15:29:00.454433Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-05-29T15:29:00.493696Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:29:00.493724Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:29:00.494691Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:12963 TClient is connected to server localhost:12963 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-29T15:29:00.564062Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:29:00.566698Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:29:00.573226Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-05-29T15:29:00.596378Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:29:00.616553Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:29:00.635501Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:29:00.809711Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890077502143660:2403], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:29:00.809739Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:29:00.874191Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-05-29T15:29:00.881926Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-05-29T15:29:00.889544Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-05-29T15:29:00.946288Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-05-29T15:29:00.960350Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-05-29T15:29:00.973916Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-05-29T15:29:00.988732Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-05-29T15:29:01.051450Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890081797111618:2466], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:29:01.051479Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:29:01.051658Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890081797111623:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:29:01.052634Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715669:3, at schemeshard: 72057594046644480 2025-05-29T15:29:01.061003Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7509890081797111625:2470], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715669 completed, doublechecking } 2025-05-29T15:29:01.124573Z node 1 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [1:7509890081797111676:3398] txid# 281474976715670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-29T15:29:01.230006Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [1:7509890081797111685:2474], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:29:01.230218Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=1&id=YmJhOTBhN2MtODhiNjg5NDMtOGI5ZDAzYi05MmUzOTgxMQ==, ActorId: [1:7509890077502143633:2400], ActorState: ExecuteState, TraceId: 01jweajdktbbdf40q0avk19mjq, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: VERIFY failed (2025-05-29T15:29:01.236620Z): assertion failed in non-unittest thread with message: assertion failed at ydb/core/kqp/ut/common/kqp_ut_common.h:375, void NKikimr::NKqp::AssertSuccessResult(const NYdb::TStatus &): (result.IsSuccess())
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 library/cpp/testing/unittest/registar.cpp:36 RaiseError(): requirement UnittestThread failed 0. /-S/util/system/yassert.cpp:83: InternalPanicImpl @ 0x1633E055 1. /-S/util/system/yassert.cpp:55: Panic @ 0x16335056 2. /tmp//-S/library/cpp/testing/unittest/registar.cpp:36: RaiseError @ 0x164D75B6 3. /-S/ydb/core/kqp/ut/common/kqp_ut_common.h:375: AssertSuccessResult @ 0x15DE0A22 4. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:365: CreateSampleTables @ 0x289A0492 5. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:581: operator() @ 0x289C1A1C 6. /-S/library/cpp/threading/future/core/future-inl.h:493: SetValue @ 0x289C1A1C 7. /-S/library/cpp/threading/future/async.h:24: operator() @ 0x289C1A1C 8. /-S/util/thread/pool.h:71: Process @ 0x289C1A1C 9. /-S/util/thread/pool.cpp:418: DoExecute @ 0x163457A9 10. /-S/util/thread/factory.h:15: Execute @ 0x16344199 11. /-S/util/thread/factory.cpp:36: ThreadProc @ 0x16344199 12. /-S/util/system/thread.cpp:244: ThreadProxy @ 0x1633F60C 13. ??:0: ?? @ 0x7FE312850AC2 14. ??:0: ?? @ 0x7FE3128E284F Trying to start YDB, gRPC: 21845, MsgBus: 2395 2025-05-29T15:29:05.399201Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509890098406219656:2078];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:29:05.399233Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/00129d/r3tmp/tmpeJ8zv7/pdisk_1.dat 2025-05-29T15:29:05.458287Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 21845, node 1 2025-05-29T15:29:05.474933Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:29:05.474947Z node 1 :NET_CLASSIFIER WARN: net_classi ... st/yql_expr.h:1874: index out of range, code: 1 library/cpp/testing/unittest/registar.cpp:36 RaiseError(): requirement UnittestThread failed 0. /-S/util/system/yassert.cpp:83: InternalPanicImpl @ 0x1633E055 1. /-S/util/system/yassert.cpp:55: Panic @ 0x16335056 2. /tmp//-S/library/cpp/testing/unittest/registar.cpp:36: RaiseError @ 0x164D75B6 3. /-S/ydb/core/kqp/ut/common/kqp_ut_common.h:375: AssertSuccessResult @ 0x15DE0A22 4. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:365: CreateSampleTables @ 0x289A0492 5. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:581: operator() @ 0x289C1A1C 6. /-S/library/cpp/threading/future/core/future-inl.h:493: SetValue @ 0x289C1A1C 7. /-S/library/cpp/threading/future/async.h:24: operator() @ 0x289C1A1C 8. /-S/util/thread/pool.h:71: Process @ 0x289C1A1C 9. /-S/util/thread/pool.cpp:418: DoExecute @ 0x163457A9 10. /-S/util/thread/factory.h:15: Execute @ 0x16344199 11. /-S/util/thread/factory.cpp:36: ThreadProc @ 0x16344199 12. /-S/util/system/thread.cpp:244: ThreadProxy @ 0x1633F60C 13. ??:0: ?? @ 0x7F1BE66B3AC2 14. ??:0: ?? @ 0x7F1BE674584F Trying to start YDB, gRPC: 31698, MsgBus: 21062 2025-05-29T15:29:22.977452Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509890171665032721:2070];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:29:22.977478Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/00129d/r3tmp/tmpSBa5hR/pdisk_1.dat 2025-05-29T15:29:23.056002Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 31698, node 1 2025-05-29T15:29:23.071419Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:29:23.071434Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-05-29T15:29:23.071436Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-05-29T15:29:23.071477Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-05-29T15:29:23.078596Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:29:23.078621Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:29:23.079761Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:21062 TClient is connected to server localhost:21062 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-29T15:29:23.136821Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:29:23.142065Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:29:23.206872Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:29:23.233353Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:29:23.247354Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:29:23.316518Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890175960001611:2403], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:29:23.316551Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:29:23.365876Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-05-29T15:29:23.373793Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-05-29T15:29:23.387200Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-05-29T15:29:23.401557Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-05-29T15:29:23.415786Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-05-29T15:29:23.429800Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-05-29T15:29:23.446186Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-05-29T15:29:23.459512Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890175960002262:2466], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:29:23.459533Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:29:23.459548Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890175960002267:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:29:23.460165Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710669:3, at schemeshard: 72057594046644480 2025-05-29T15:29:23.463368Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7509890175960002269:2470], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710669 completed, doublechecking } 2025-05-29T15:29:23.556229Z node 1 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [1:7509890175960002320:3394] txid# 281474976710670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-29T15:29:23.661313Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [1:7509890175960002336:2474], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:29:23.661444Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=1&id=M2Y0ODU5NDEtYTI5ODIxNjYtYWEwYTk5YjMtZjNlZTNmODk=, ActorId: [1:7509890175960001593:2401], ActorState: ExecuteState, TraceId: 01jweak3g395bf66qdcdrd4r8v, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: VERIFY failed (2025-05-29T15:29:23.662118Z): assertion failed in non-unittest thread with message: assertion failed at ydb/core/kqp/ut/common/kqp_ut_common.h:375, void NKikimr::NKqp::AssertSuccessResult(const NYdb::TStatus &): (result.IsSuccess())
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 library/cpp/testing/unittest/registar.cpp:36 RaiseError(): requirement UnittestThread failed 0. /-S/util/system/yassert.cpp:83: InternalPanicImpl @ 0x1633E055 1. /-S/util/system/yassert.cpp:55: Panic @ 0x16335056 2. /tmp//-S/library/cpp/testing/unittest/registar.cpp:36: RaiseError @ 0x164D75B6 3. /-S/ydb/core/kqp/ut/common/kqp_ut_common.h:375: AssertSuccessResult @ 0x15DE0A22 4. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:365: CreateSampleTables @ 0x289A0492 5. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:581: operator() @ 0x289C1A1C 6. /-S/library/cpp/threading/future/core/future-inl.h:493: SetValue @ 0x289C1A1C 7. /-S/library/cpp/threading/future/async.h:24: operator() @ 0x289C1A1C 8. /-S/util/thread/pool.h:71: Process @ 0x289C1A1C 9. /-S/util/thread/pool.cpp:418: DoExecute @ 0x163457A9 10. /-S/util/thread/factory.h:15: Execute @ 0x16344199 11. /-S/util/thread/factory.cpp:36: ThreadProc @ 0x16344199 12. /-S/util/system/thread.cpp:244: ThreadProxy @ 0x1633F60C 13. ??:0: ?? @ 0x7F5145F59AC2 14. ??:0: ?? @ 0x7F5145FEB84F ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_compaction/unittest >> TSchemeshardBorrowedCompactionTest::SchemeshardShouldHandleBorrowCompactionTimeouts [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:116:2058] recipient: [1:111:2141] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:116:2058] recipient: [1:111:2141] Leader for TabletID 72057594046678944 is [1:128:2152] sender: [1:132:2058] recipient: [1:111:2141] 2025-05-29T15:27:20.918806Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7488: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-05-29T15:27:20.918835Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7516: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:27:20.918841Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7402: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-05-29T15:27:20.918846Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7418: OperationsProcessing config: using default configuration 2025-05-29T15:27:20.918863Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-05-29T15:27:20.918867Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-05-29T15:27:20.918876Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7548: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:27:20.918890Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:39: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-05-29T15:27:20.918997Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7619: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-05-29T15:27:20.919067Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-05-29T15:27:20.932312Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7309: Cannot subscribe to console configs 2025-05-29T15:27:20.932337Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:27:20.934906Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-05-29T15:27:20.935031Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-05-29T15:27:20.935078Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-05-29T15:27:20.936570Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-05-29T15:27:20.936754Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:445: Clear TempDirsState with owners number: 0 2025-05-29T15:27:20.936865Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1348: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-05-29T15:27:20.936936Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:32: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-05-29T15:27:20.937432Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:157: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:27:20.937480Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:84: [RootDataErasureManager] Stop 2025-05-29T15:27:20.937759Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:27:20.937769Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:27:20.937791Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-05-29T15:27:20.937799Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-29T15:27:20.937804Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:202: TTxServerlessStorageBilling.Complete 2025-05-29T15:27:20.937839Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6671: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-05-29T15:27:20.939207Z node 1 :HIVE INFO: tablet_helpers.cpp:1130: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:128:2152] sender: [1:242:2058] recipient: [1:15:2062] 2025-05-29T15:27:20.961065Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:372: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-05-29T15:27:20.961147Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:27:20.961211Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-05-29T15:27:20.961275Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:130: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-05-29T15:27:20.961285Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:27:20.963064Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:451: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-05-29T15:27:20.963097Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-05-29T15:27:20.963148Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:27:20.963173Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:313: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-05-29T15:27:20.963178Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:367: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-05-29T15:27:20.963185Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 2 -> 3 2025-05-29T15:27:20.965141Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:27:20.965162Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-05-29T15:27:20.965171Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 3 -> 128 2025-05-29T15:27:20.965664Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:27:20.965677Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:27:20.965684Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:27:20.965692Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1650: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-05-29T15:27:20.966485Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1719: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-05-29T15:27:20.967082Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:652: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-05-29T15:27:20.967156Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1751: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-05-29T15:27:20.967389Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:676: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-05-29T15:27:20.967420Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:680: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 124 RawX2: 4294969445 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-05-29T15:27:20.967429Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:27:20.967493Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 128 -> 240 2025-05-29T15:27:20.967502Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:27:20.967539Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-05-29T15:27:20.967551Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:402: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-05-29T15:27:20.968121Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:27:20.968134Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-29T15:27:20.968191Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29 ... ed: 0 PlannedTxCompleted: 2 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 SearchHeight: 1 LastFullCompactionTs: 0 HasLoanedParts: true Channels { Channel: 1 DataSize: 13940 IndexSize: 102 } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 91 Memory: 124088 Storage: 14156 } ShardState: 2 UserTablePartOwners: 72075186233409546 NodeId: 3 StartTime: 40 TableOwnerId: 72057594046678944 FollowerId: 0 2025-05-29T15:29:25.584847Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4914: StateWork, processing event TEvDataShard::TEvPeriodicTableStats 2025-05-29T15:29:25.584866Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:563: Got periodic table stats at tablet 72057594046678944 from shard 72075186233409546 followerId 0 pathId [OwnerId: 72057594046678944, LocalPathId: 2] state 'Ready' dataSize 13940 rowCount 100 cpuUsage 0.0091 2025-05-29T15:29:25.584900Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__table_stats.cpp:570: Got periodic table stats at tablet 72057594046678944 from shard 72075186233409546 followerId 0 pathId [OwnerId: 72057594046678944, LocalPathId: 2] raw table stats: DataSize: 13940 RowCount: 100 IndexSize: 102 InMemSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 2 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 SearchHeight: 1 LastFullCompactionTs: 0 HasLoanedParts: true Channels { Channel: 1 DataSize: 13940 IndexSize: 102 } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 2025-05-29T15:29:25.584908Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__table_stats.cpp:610: Will delay TTxStoreTableStats on# 0.100000s, queue# 1 2025-05-29T15:29:25.625970Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4877: StateWork, received event# 2146435092, Sender [0:0:0], Recipient [3:124:2149]: NKikimr::NSchemeShard::TEvPrivate::TEvPersistTableStats 2025-05-29T15:29:25.626006Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5053: StateWork, processing event TEvPrivate::TEvPersistTableStats 2025-05-29T15:29:25.626014Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:590: Started TEvPersistStats at tablet 72057594046678944, queue size# 1 2025-05-29T15:29:25.626042Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__table_stats.cpp:601: Will execute TTxStoreStats, queue# 1 2025-05-29T15:29:25.626050Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__table_stats.cpp:610: Will delay TTxStoreTableStats on# 0.000000s, queue# 1 2025-05-29T15:29:25.626085Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:267: PersistSingleStats for pathId 2 shard idx 72057594046678944:1 data size 13940 row count 100 2025-05-29T15:29:25.626112Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:292: TTxStoreTableStats.PersistSingleStats: main stats from datashardId(TabletID)=72075186233409546 maps to shardIdx: 72057594046678944:1 followerId=0, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], pathId map=Simple, is column=0, is olap=0, RowCount 100, DataSize 13940 2025-05-29T15:29:25.626119Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__table_stats.cpp:62: BuildStatsForCollector: datashardId 72075186233409546, followerId 0 2025-05-29T15:29:25.626167Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:477: Do not want to split tablet 72075186233409546 by size, its table already has 1 out of 1 partitions 2025-05-29T15:29:25.626197Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:207: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2025-05-29T15:29:25.636435Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4877: StateWork, received event# 2146435092, Sender [0:0:0], Recipient [3:124:2149]: NKikimr::NSchemeShard::TEvPrivate::TEvPersistTableStats 2025-05-29T15:29:25.636470Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5053: StateWork, processing event TEvPrivate::TEvPersistTableStats 2025-05-29T15:29:25.636477Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:590: Started TEvPersistStats at tablet 72057594046678944, queue size# 0 2025-05-29T15:29:25.667066Z node 3 :TX_DATASHARD TRACE: datashard_impl.h:3129: StateWork, received event# 2146435079, Sender [0:0:0], Recipient [3:714:2679]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvPeriodicWakeup 2025-05-29T15:29:25.667200Z node 3 :TX_DATASHARD TRACE: datashard_impl.h:3437: TEvPeriodicTableStats from datashard 72075186233409547, FollowerId 0, tableId 3 2025-05-29T15:29:25.667321Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4877: StateWork, received event# 269553162, Sender [3:714:2679], Recipient [3:124:2149]: NKikimrTxDataShard.TEvPeriodicTableStats DatashardId: 72075186233409547 TableLocalId: 3 Generation: 2 Round: 12 TableStats { DataSize: 13940 RowCount: 100 IndexSize: 102 InMemSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 2 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 SearchHeight: 1 LastFullCompactionTs: 0 HasLoanedParts: false Channels { Channel: 1 DataSize: 13940 IndexSize: 102 } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 18 Memory: 124088 } ShardState: 2 UserTablePartOwners: 72075186233409547 UserTablePartOwners: 72075186233409546 NodeId: 3 StartTime: 211 TableOwnerId: 72057594046678944 FollowerId: 0 2025-05-29T15:29:25.667332Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4914: StateWork, processing event TEvDataShard::TEvPeriodicTableStats 2025-05-29T15:29:25.667351Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:563: Got periodic table stats at tablet 72057594046678944 from shard 72075186233409547 followerId 0 pathId [OwnerId: 72057594046678944, LocalPathId: 3] state 'Ready' dataSize 13940 rowCount 100 cpuUsage 0.0018 2025-05-29T15:29:25.667376Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__table_stats.cpp:570: Got periodic table stats at tablet 72057594046678944 from shard 72075186233409547 followerId 0 pathId [OwnerId: 72057594046678944, LocalPathId: 3] raw table stats: DataSize: 13940 RowCount: 100 IndexSize: 102 InMemSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 2 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 SearchHeight: 1 LastFullCompactionTs: 0 HasLoanedParts: false Channels { Channel: 1 DataSize: 13940 IndexSize: 102 } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 2025-05-29T15:29:25.667385Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__table_stats.cpp:610: Will delay TTxStoreTableStats on# 0.100000s, queue# 1 2025-05-29T15:29:25.708146Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: operation_queue_timer.h:92: Operation queue wakeup 2025-05-29T15:29:25.708198Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__borrowed_compaction.cpp:65: Borrowed compaction timeout for pathId# [OwnerId: 72057594046678944, LocalPathId: 3], datashard# 72075186233409547, next wakeup# 0.000000s, in queue# 0 shards, running# 0 shards at schemeshard 72057594046678944 2025-05-29T15:29:25.708212Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__borrowed_compaction.cpp:28: RunBorrowedCompaction for pathId# [OwnerId: 72057594046678944, LocalPathId: 3], datashard# 72075186233409547, next wakeup# 0.000000s, rate# 0, in queue# 1 shards, running# 0 shards at schemeshard 72057594046678944 2025-05-29T15:29:25.708241Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: operation_queue_timer.h:84: Operation queue set wakeup after delta# 3 seconds 2025-05-29T15:29:25.708247Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__borrowed_compaction.cpp:100: Borrowed compaction enqueued shard# 72057594046678944:2 at schemeshard 72057594046678944 2025-05-29T15:29:25.708290Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4877: StateWork, received event# 2146435092, Sender [0:0:0], Recipient [3:124:2149]: NKikimr::NSchemeShard::TEvPrivate::TEvPersistTableStats 2025-05-29T15:29:25.708300Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5053: StateWork, processing event TEvPrivate::TEvPersistTableStats 2025-05-29T15:29:25.708305Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:590: Started TEvPersistStats at tablet 72057594046678944, queue size# 1 2025-05-29T15:29:25.708330Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__table_stats.cpp:601: Will execute TTxStoreStats, queue# 1 2025-05-29T15:29:25.708337Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__table_stats.cpp:610: Will delay TTxStoreTableStats on# 0.000000s, queue# 1 2025-05-29T15:29:25.708364Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:267: PersistSingleStats for pathId 3 shard idx 72057594046678944:2 data size 13940 row count 100 2025-05-29T15:29:25.708389Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:292: TTxStoreTableStats.PersistSingleStats: main stats from datashardId(TabletID)=72075186233409547 maps to shardIdx: 72057594046678944:2 followerId=0, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], pathId map=CopyTable, is column=0, is olap=0, RowCount 100, DataSize 13940, with borrowed parts 2025-05-29T15:29:25.708395Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__table_stats.cpp:62: BuildStatsForCollector: datashardId 72075186233409547, followerId 0 2025-05-29T15:29:25.708433Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:472: Want to split tablet 72075186233409547 by size split by size (shardCount: 1, maxShardCount: 2, shardSize: 13940, maxShardSize: 1) 2025-05-29T15:29:25.708449Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:505: Postpone split tablet 72075186233409547 because it has borrow parts, enqueue compact them first 2025-05-29T15:29:25.708454Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__borrowed_compaction.cpp:100: Borrowed compaction enqueued shard# 72057594046678944:2 at schemeshard 72057594046678944 2025-05-29T15:29:25.708485Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:207: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2025-05-29T15:29:25.718736Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4877: StateWork, received event# 2146435092, Sender [0:0:0], Recipient [3:124:2149]: NKikimr::NSchemeShard::TEvPrivate::TEvPersistTableStats 2025-05-29T15:29:25.718792Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5053: StateWork, processing event TEvPrivate::TEvPersistTableStats 2025-05-29T15:29:25.718800Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:590: Started TEvPersistStats at tablet 72057594046678944, queue size# 0 2025-05-29T15:29:25.963847Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4877: StateWork, received event# 271125000, Sender [0:0:0], Recipient [3:124:2149]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-05-29T15:29:25.963886Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4885: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-05-29T15:29:25.963909Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4877: StateWork, received event# 271124999, Sender [3:124:2149], Recipient [3:124:2149]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2025-05-29T15:29:25.963915Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4884: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime >> KqpScheme::AlterDatabaseChangeOwner+EnableAlterDatabase [GOOD] >> KqpScheme::AlterDatabaseChangeOwner-EnableAlterDatabase >> KqpScheme::CreateTableWithTtlOnDatetime64Column ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/scheme/unittest >> KqpScheme::CreateTransfer_QueryService Test command err: Trying to start YDB, gRPC: 15659, MsgBus: 11675 2025-05-29T15:29:00.447044Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509890079312852139:2199];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:29:00.447992Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/00126a/r3tmp/tmpE55Hvl/pdisk_1.dat 2025-05-29T15:29:00.560929Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [1:7509890079312851971:2079] 1748532540445711 != 1748532540445714 2025-05-29T15:29:00.565795Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 15659, node 1 2025-05-29T15:29:00.591440Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:29:00.591457Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-05-29T15:29:00.591459Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-05-29T15:29:00.591511Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:11675 2025-05-29T15:29:00.634430Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:29:00.634465Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:29:00.635930Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:11675 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-29T15:29:00.669702Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:29:00.673964Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:29:00.692560Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:29:00.715345Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:29:00.728642Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:29:00.923841Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890079312853612:2403], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:29:00.923879Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:29:00.984873Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-05-29T15:29:00.998513Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-05-29T15:29:01.009048Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-05-29T15:29:01.023724Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-05-29T15:29:01.037591Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-05-29T15:29:01.051743Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-05-29T15:29:01.064695Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-05-29T15:29:01.088056Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890083607821560:2466], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:29:01.088079Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890083607821565:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:29:01.088080Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:29:01.089087Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710669:3, at schemeshard: 72057594046644480 2025-05-29T15:29:01.093154Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976710669, at schemeshard: 72057594046644480 2025-05-29T15:29:01.093206Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7509890083607821567:2470], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710669 completed, doublechecking } 2025-05-29T15:29:01.162546Z node 1 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [1:7509890083607821618:3401] txid# 281474976710670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-29T15:29:01.275193Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [1:7509890083607821627:2474], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:29:01.277187Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=1&id=MzdiZmI5NTMtZTQxMWI4ZjYtMTZmYWViZDAtZmZmZmRhNTI=, ActorId: [1:7509890079312853594:2401], ActorState: ExecuteState, TraceId: 01jweajdmz7mryzbfgdrgg9bg5, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: VERIFY failed (2025-05-29T15:29:01.278442Z): assertion failed in non-unittest thread with message: assertion failed at ydb/core/kqp/ut/common/kqp_ut_common.h:375, void NKikimr::NKqp::AssertSuccessResult(const NYdb::TStatus &): (result.IsSuccess())
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 library/cpp/testing/unittest/registar.cpp:36 RaiseError(): requirement UnittestThread failed 0. /-S/util/system/yassert.cpp:83: InternalPanicImpl @ 0x1633E055 1. /-S/util/system/yassert.cpp:55: Panic @ 0x16335056 2. /tmp//-S/library/cpp/testing/unittest/registar.cpp:36: RaiseError @ 0x164D75B6 3. /-S/ydb/core/kqp/ut/common/kqp_ut_common.h:375: AssertSuccessResult @ 0x15DE0A22 4. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:365: CreateSampleTables @ 0x289A0492 5. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:581: operator() @ 0x289C1A1C 6. /-S/library/cpp/threading/future/core/future-inl.h:493: SetValue @ 0x289C1A1C 7. /-S/library/cpp/threading/future/async.h:24: operator() @ 0x289C1A1C 8. /-S/util/thread/pool.h:71: Process @ 0x289C1A1C 9. /-S/util/thread/pool.cpp:418: DoExecute @ 0x163457A9 10. /-S/util/thread/factory.h:15: Execute @ 0x16344199 11. /-S/util/thread/factory.cpp:36: ThreadProc @ 0x16344199 12. /-S/util/system/thread.cpp:244: ThreadProxy @ 0x1633F60C 13. ??:0: ?? @ 0x7FBA9D9BFAC2 14. ??:0: ?? @ 0x7FBA9DA5184F Trying to start YDB, gRPC: 17926, MsgBus: 17843 2025-05-29T15:29:05.187665Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509890100856542210:2071];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:29:05.187859Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/00126a/r3tmp/tmprhEZtz/pdisk_1.dat 2025-05-29T15:29:05.265414Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 17926, node 1 2025-05-29T15:29:05.2869 ... st/yql_expr.h:1874: index out of range, code: 1 library/cpp/testing/unittest/registar.cpp:36 RaiseError(): requirement UnittestThread failed 0. /-S/util/system/yassert.cpp:83: InternalPanicImpl @ 0x1633E055 1. /-S/util/system/yassert.cpp:55: Panic @ 0x16335056 2. /tmp//-S/library/cpp/testing/unittest/registar.cpp:36: RaiseError @ 0x164D75B6 3. /-S/ydb/core/kqp/ut/common/kqp_ut_common.h:375: AssertSuccessResult @ 0x15DE0A22 4. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:365: CreateSampleTables @ 0x289A0492 5. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:581: operator() @ 0x289C1A1C 6. /-S/library/cpp/threading/future/core/future-inl.h:493: SetValue @ 0x289C1A1C 7. /-S/library/cpp/threading/future/async.h:24: operator() @ 0x289C1A1C 8. /-S/util/thread/pool.h:71: Process @ 0x289C1A1C 9. /-S/util/thread/pool.cpp:418: DoExecute @ 0x163457A9 10. /-S/util/thread/factory.h:15: Execute @ 0x16344199 11. /-S/util/thread/factory.cpp:36: ThreadProc @ 0x16344199 12. /-S/util/system/thread.cpp:244: ThreadProxy @ 0x1633F60C 13. ??:0: ?? @ 0x7F266EAA5AC2 14. ??:0: ?? @ 0x7F266EB3784F Trying to start YDB, gRPC: 11048, MsgBus: 25173 2025-05-29T15:29:23.371184Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509890174245721883:2066];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:29:23.371206Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/00126a/r3tmp/tmpxRZuso/pdisk_1.dat 2025-05-29T15:29:23.420076Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 11048, node 1 2025-05-29T15:29:23.440932Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:29:23.440943Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-05-29T15:29:23.440945Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-05-29T15:29:23.440982Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:25173 2025-05-29T15:29:23.472542Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:29:23.472569Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:29:23.473684Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:25173 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-29T15:29:23.508739Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:29:23.516919Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:29:23.582495Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:29:23.601877Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:29:23.612899Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:29:23.714396Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890174245723484:2403], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:29:23.714431Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:29:23.762164Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-05-29T15:29:23.769852Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-05-29T15:29:23.779948Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-05-29T15:29:23.793898Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-05-29T15:29:23.849426Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-05-29T15:29:23.863960Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-05-29T15:29:23.877545Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-05-29T15:29:23.892859Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890174245724138:2466], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:29:23.892885Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890174245724143:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:29:23.892888Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:29:23.893591Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715669:3, at schemeshard: 72057594046644480 2025-05-29T15:29:23.897337Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7509890174245724145:2470], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715669 completed, doublechecking } 2025-05-29T15:29:23.952788Z node 1 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [1:7509890174245724196:3396] txid# 281474976715670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-29T15:29:24.043601Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [1:7509890174245724212:2474], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:29:24.043689Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=1&id=OWUwMzdhNmUtZDVlZTM2NTEtMmQxNTUxZmQtN2JmZDliY2E=, ActorId: [1:7509890174245723481:2401], ActorState: ExecuteState, TraceId: 01jweak3xm6e1z6zyhghgge8da, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: VERIFY failed (2025-05-29T15:29:24.044288Z): assertion failed in non-unittest thread with message: assertion failed at ydb/core/kqp/ut/common/kqp_ut_common.h:375, void NKikimr::NKqp::AssertSuccessResult(const NYdb::TStatus &): (result.IsSuccess())
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 library/cpp/testing/unittest/registar.cpp:36 RaiseError(): requirement UnittestThread failed 0. /-S/util/system/yassert.cpp:83: InternalPanicImpl @ 0x1633E055 1. /-S/util/system/yassert.cpp:55: Panic @ 0x16335056 2. /tmp//-S/library/cpp/testing/unittest/registar.cpp:36: RaiseError @ 0x164D75B6 3. /-S/ydb/core/kqp/ut/common/kqp_ut_common.h:375: AssertSuccessResult @ 0x15DE0A22 4. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:365: CreateSampleTables @ 0x289A0492 5. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:581: operator() @ 0x289C1A1C 6. /-S/library/cpp/threading/future/core/future-inl.h:493: SetValue @ 0x289C1A1C 7. /-S/library/cpp/threading/future/async.h:24: operator() @ 0x289C1A1C 8. /-S/util/thread/pool.h:71: Process @ 0x289C1A1C 9. /-S/util/thread/pool.cpp:418: DoExecute @ 0x163457A9 10. /-S/util/thread/factory.h:15: Execute @ 0x16344199 11. /-S/util/thread/factory.cpp:36: ThreadProc @ 0x16344199 12. /-S/util/system/thread.cpp:244: ThreadProxy @ 0x1633F60C 13. ??:0: ?? @ 0x7F645FB21AC2 14. ??:0: ?? @ 0x7F645FBB384F >> KqpConstraints::DefaultAndIndexesTestDefaultColumnNotIncludedInIndex >> KqpScheme::ModifyPermissionsByRelativePathQueryClient >> KqpScheme::DropTransfer_QueryService >> KqpScheme::RenameTableWithVectorIndex >> KqpScheme::AlterDatabaseChangeOwner-EnableAlterDatabase [GOOD] >> KqpScheme::AlterGroup >> KqpScheme::CreateTableWithTtlOnDatetime64Column [GOOD] >> KqpScheme::CreateTableWithStoreExternalBlobs >> KqpScheme::DescribeIndexTable >> KqpScheme::CreateAndAlterTableWithPartitionBy ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/scheme/unittest >> KqpScheme::CreateExternalTableValidation Test command err: Trying to start YDB, gRPC: 23786, MsgBus: 10539 2025-05-29T15:29:01.291765Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509890080904151003:2072];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:29:01.291958Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/0011fb/r3tmp/tmpQUttam/pdisk_1.dat 2025-05-29T15:29:01.383787Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 23786, node 1 2025-05-29T15:29:01.392812Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:29:01.392854Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:29:01.393868Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-29T15:29:01.400410Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:29:01.400423Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-05-29T15:29:01.400425Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-05-29T15:29:01.400467Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:10539 TClient is connected to server localhost:10539 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-29T15:29:01.465755Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:29:01.474398Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:29:01.490078Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:29:01.506924Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:29:01.518027Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:29:01.700091Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890080904152591:2403], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:29:01.700109Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:29:01.767824Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-05-29T15:29:01.779971Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-05-29T15:29:01.794887Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-05-29T15:29:01.814326Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-05-29T15:29:01.832538Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-05-29T15:29:01.850810Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-05-29T15:29:01.866829Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-05-29T15:29:01.886345Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890080904153244:2466], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:29:01.886377Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:29:01.886416Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890080904153249:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:29:01.889750Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715669:3, at schemeshard: 72057594046644480 2025-05-29T15:29:01.894229Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976715669, at schemeshard: 72057594046644480 2025-05-29T15:29:01.894309Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7509890080904153251:2470], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715669 completed, doublechecking } 2025-05-29T15:29:01.951453Z node 1 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [1:7509890080904153302:3398] txid# 281474976715670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-29T15:29:02.068473Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [1:7509890080904153318:2474], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:29:02.068565Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=1&id=NjkwNTc0N2YtMzkzYWJkNjctOWIzM2U4YmYtNzY5ZTdjY2M=, ActorId: [1:7509890080904152573:2401], ActorState: ExecuteState, TraceId: 01jweajedw3789bpj9ctar8s03, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: VERIFY failed (2025-05-29T15:29:02.070799Z): assertion failed in non-unittest thread with message: assertion failed at ydb/core/kqp/ut/common/kqp_ut_common.h:375, void NKikimr::NKqp::AssertSuccessResult(const NYdb::TStatus &): (result.IsSuccess())
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 library/cpp/testing/unittest/registar.cpp:36 RaiseError(): requirement UnittestThread failed 0. /-S/util/system/yassert.cpp:83: InternalPanicImpl @ 0x1633E055 1. /-S/util/system/yassert.cpp:55: Panic @ 0x16335056 2. /tmp//-S/library/cpp/testing/unittest/registar.cpp:36: RaiseError @ 0x164D75B6 3. /-S/ydb/core/kqp/ut/common/kqp_ut_common.h:375: AssertSuccessResult @ 0x15DE0A22 4. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:365: CreateSampleTables @ 0x289A0492 5. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:581: operator() @ 0x289C1A1C 6. /-S/library/cpp/threading/future/core/future-inl.h:493: SetValue @ 0x289C1A1C 7. /-S/library/cpp/threading/future/async.h:24: operator() @ 0x289C1A1C 8. /-S/util/thread/pool.h:71: Process @ 0x289C1A1C 9. /-S/util/thread/pool.cpp:418: DoExecute @ 0x163457A9 10. /-S/util/thread/factory.h:15: Execute @ 0x16344199 11. /-S/util/thread/factory.cpp:36: ThreadProc @ 0x16344199 12. /-S/util/system/thread.cpp:244: ThreadProxy @ 0x1633F60C 13. ??:0: ?? @ 0x7F3520EA7AC2 14. ??:0: ?? @ 0x7F3520F3984F Trying to start YDB, gRPC: 17478, MsgBus: 31876 2025-05-29T15:29:06.049772Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509890102633206599:2068];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:29:06.049854Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/0011fb/r3tmp/tmp4F6rLy/pdisk_1.dat 2025-05-29T15:29:06.109702Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:29:06.109802Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [1:7509890102633206561:2079] 1748532546049622 != 1748532546049625 TServer::EnableGrpc on GrpcPort 17478, node 1 2025-05-29T15:29:06.1285 ... st/yql_expr.h:1874: index out of range, code: 1 library/cpp/testing/unittest/registar.cpp:36 RaiseError(): requirement UnittestThread failed 0. /-S/util/system/yassert.cpp:83: InternalPanicImpl @ 0x1633E055 1. /-S/util/system/yassert.cpp:55: Panic @ 0x16335056 2. /tmp//-S/library/cpp/testing/unittest/registar.cpp:36: RaiseError @ 0x164D75B6 3. /-S/ydb/core/kqp/ut/common/kqp_ut_common.h:375: AssertSuccessResult @ 0x15DE0A22 4. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:365: CreateSampleTables @ 0x289A0492 5. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:581: operator() @ 0x289C1A1C 6. /-S/library/cpp/threading/future/core/future-inl.h:493: SetValue @ 0x289C1A1C 7. /-S/library/cpp/threading/future/async.h:24: operator() @ 0x289C1A1C 8. /-S/util/thread/pool.h:71: Process @ 0x289C1A1C 9. /-S/util/thread/pool.cpp:418: DoExecute @ 0x163457A9 10. /-S/util/thread/factory.h:15: Execute @ 0x16344199 11. /-S/util/thread/factory.cpp:36: ThreadProc @ 0x16344199 12. /-S/util/system/thread.cpp:244: ThreadProxy @ 0x1633F60C 13. ??:0: ?? @ 0x7FCA9E2ECAC2 14. ??:0: ?? @ 0x7FCA9E37E84F Trying to start YDB, gRPC: 22688, MsgBus: 62731 2025-05-29T15:29:23.613783Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509890177757234326:2070];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:29:23.613847Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/0011fb/r3tmp/tmpaBnhpv/pdisk_1.dat 2025-05-29T15:29:23.678959Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 22688, node 1 2025-05-29T15:29:23.695536Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:29:23.695549Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-05-29T15:29:23.695551Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-05-29T15:29:23.695592Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:62731 2025-05-29T15:29:23.714984Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:29:23.715017Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:29:23.716100Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:62731 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-29T15:29:23.746045Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:29:23.754527Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:29:23.818010Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:29:23.841282Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:29:23.853932Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:29:24.074273Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890182052203230:2403], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:29:24.074306Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:29:24.134717Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-05-29T15:29:24.143279Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-05-29T15:29:24.151732Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-05-29T15:29:24.164500Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-05-29T15:29:24.178793Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-05-29T15:29:24.192807Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-05-29T15:29:24.206842Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-05-29T15:29:24.223143Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890182052203882:2466], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:29:24.223181Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890182052203887:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:29:24.223186Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:29:24.224090Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710669:3, at schemeshard: 72057594046644480 2025-05-29T15:29:24.226221Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7509890182052203889:2470], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710669 completed, doublechecking } 2025-05-29T15:29:24.320408Z node 1 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [1:7509890182052203940:3399] txid# 281474976710670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-29T15:29:24.430895Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [1:7509890182052203956:2474], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:29:24.431032Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=1&id=MjAxOTAxYjktZGExNzY5MzAtZjBiM2MzZjYtZGM5ZjE4ZDU=, ActorId: [1:7509890182052203227:2401], ActorState: ExecuteState, TraceId: 01jweak47yb1tfqtzb9pg6dcbm, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: VERIFY failed (2025-05-29T15:29:24.431785Z): assertion failed in non-unittest thread with message: assertion failed at ydb/core/kqp/ut/common/kqp_ut_common.h:375, void NKikimr::NKqp::AssertSuccessResult(const NYdb::TStatus &): (result.IsSuccess())
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 library/cpp/testing/unittest/registar.cpp:36 RaiseError(): requirement UnittestThread failed 0. /-S/util/system/yassert.cpp:83: InternalPanicImpl @ 0x1633E055 1. /-S/util/system/yassert.cpp:55: Panic @ 0x16335056 2. /tmp//-S/library/cpp/testing/unittest/registar.cpp:36: RaiseError @ 0x164D75B6 3. /-S/ydb/core/kqp/ut/common/kqp_ut_common.h:375: AssertSuccessResult @ 0x15DE0A22 4. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:365: CreateSampleTables @ 0x289A0492 5. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:581: operator() @ 0x289C1A1C 6. /-S/library/cpp/threading/future/core/future-inl.h:493: SetValue @ 0x289C1A1C 7. /-S/library/cpp/threading/future/async.h:24: operator() @ 0x289C1A1C 8. /-S/util/thread/pool.h:71: Process @ 0x289C1A1C 9. /-S/util/thread/pool.cpp:418: DoExecute @ 0x163457A9 10. /-S/util/thread/factory.h:15: Execute @ 0x16344199 11. /-S/util/thread/factory.cpp:36: ThreadProc @ 0x16344199 12. /-S/util/system/thread.cpp:244: ThreadProxy @ 0x1633F60C 13. ??:0: ?? @ 0x7FAE66606AC2 14. ??:0: ?? @ 0x7FAE6669884F >> KqpAcl::ReadSuccess >> KqpScheme::UseNonexistentTable ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/scheme/unittest >> KqpScheme::InvalidationAfterDropCreateTable2MultiStageTxNoEffects Test command err: Trying to start YDB, gRPC: 6886, MsgBus: 63192 2025-05-29T15:29:02.436721Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509890086024769756:2069];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:29:02.436738Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/0011cb/r3tmp/tmpAUzV01/pdisk_1.dat 2025-05-29T15:29:02.499600Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 6886, node 1 2025-05-29T15:29:02.517331Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:29:02.517340Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-05-29T15:29:02.517342Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-05-29T15:29:02.517374Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:63192 2025-05-29T15:29:02.537938Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:29:02.537974Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:29:02.539398Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:63192 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... 2025-05-29T15:29:02.601190Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-05-29T15:29:02.607100Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-05-29T15:29:02.618508Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:29:02.659079Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:29:02.687539Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:29:02.702845Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:29:02.825223Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890086024771352:2403], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:29:02.825249Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:29:02.882658Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-05-29T15:29:02.891034Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-05-29T15:29:02.898844Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-05-29T15:29:02.912719Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-05-29T15:29:02.926273Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-05-29T15:29:02.940327Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-05-29T15:29:02.954457Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-05-29T15:29:02.971079Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890086024772004:2466], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:29:02.971113Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:29:02.971335Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890086024772009:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:29:02.972159Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715669:3, at schemeshard: 72057594046644480 2025-05-29T15:29:02.974455Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7509890086024772011:2470], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715669 completed, doublechecking } 2025-05-29T15:29:03.057085Z node 1 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [1:7509890090319739358:3399] txid# 281474976715670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-29T15:29:03.201525Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [1:7509890090319739367:2474], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:29:03.202965Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=1&id=OTFlZWUzODAtZWEwNGNhNzAtMTMwODcwYWYtZmNmN2IyNGI=, ActorId: [1:7509890086024771334:2401], ActorState: ExecuteState, TraceId: 01jweajffta736n63wmas2386d, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: VERIFY failed (2025-05-29T15:29:03.211070Z): assertion failed in non-unittest thread with message: assertion failed at ydb/core/kqp/ut/common/kqp_ut_common.h:375, void NKikimr::NKqp::AssertSuccessResult(const NYdb::TStatus &): (result.IsSuccess())
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 library/cpp/testing/unittest/registar.cpp:36 RaiseError(): requirement UnittestThread failed 0. /-S/util/system/yassert.cpp:83: InternalPanicImpl @ 0x1633E055 1. /-S/util/system/yassert.cpp:55: Panic @ 0x16335056 2. /tmp//-S/library/cpp/testing/unittest/registar.cpp:36: RaiseError @ 0x164D75B6 3. /-S/ydb/core/kqp/ut/common/kqp_ut_common.h:375: AssertSuccessResult @ 0x15DE0A22 4. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:365: CreateSampleTables @ 0x289A0492 5. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:581: operator() @ 0x289C1A1C 6. /-S/library/cpp/threading/future/core/future-inl.h:493: SetValue @ 0x289C1A1C 7. /-S/library/cpp/threading/future/async.h:24: operator() @ 0x289C1A1C 8. /-S/util/thread/pool.h:71: Process @ 0x289C1A1C 9. /-S/util/thread/pool.cpp:418: DoExecute @ 0x163457A9 10. /-S/util/thread/factory.h:15: Execute @ 0x16344199 11. /-S/util/thread/factory.cpp:36: ThreadProc @ 0x16344199 12. /-S/util/system/thread.cpp:244: ThreadProxy @ 0x1633F60C 13. ??:0: ?? @ 0x7FF92A3F7AC2 14. ??:0: ?? @ 0x7FF92A48984F Trying to start YDB, gRPC: 4913, MsgBus: 10682 2025-05-29T15:29:07.309972Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509890106406409641:2071];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:29:07.310020Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/0011cb/r3tmp/tmp0D2O81/pdisk_1.dat 2025-05-29T15:29:07.375663Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 4913, node 1 2025-05-29T15:29:07.390219Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:29:07.390233Z node 1 :NET_CLASSIFIER WARN: net_classifie ... st/yql_expr.h:1874: index out of range, code: 1 library/cpp/testing/unittest/registar.cpp:36 RaiseError(): requirement UnittestThread failed 0. /-S/util/system/yassert.cpp:83: InternalPanicImpl @ 0x1633E055 1. /-S/util/system/yassert.cpp:55: Panic @ 0x16335056 2. /tmp//-S/library/cpp/testing/unittest/registar.cpp:36: RaiseError @ 0x164D75B6 3. /-S/ydb/core/kqp/ut/common/kqp_ut_common.h:375: AssertSuccessResult @ 0x15DE0A22 4. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:365: CreateSampleTables @ 0x289A0492 5. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:581: operator() @ 0x289C1A1C 6. /-S/library/cpp/threading/future/core/future-inl.h:493: SetValue @ 0x289C1A1C 7. /-S/library/cpp/threading/future/async.h:24: operator() @ 0x289C1A1C 8. /-S/util/thread/pool.h:71: Process @ 0x289C1A1C 9. /-S/util/thread/pool.cpp:418: DoExecute @ 0x163457A9 10. /-S/util/thread/factory.h:15: Execute @ 0x16344199 11. /-S/util/thread/factory.cpp:36: ThreadProc @ 0x16344199 12. /-S/util/system/thread.cpp:244: ThreadProxy @ 0x1633F60C 13. ??:0: ?? @ 0x7FD3AA5E8AC2 14. ??:0: ?? @ 0x7FD3AA67A84F Trying to start YDB, gRPC: 27661, MsgBus: 27401 2025-05-29T15:29:24.661883Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509890181910026583:2066];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:29:24.661901Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/0011cb/r3tmp/tmp01xwMS/pdisk_1.dat 2025-05-29T15:29:24.721815Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 27661, node 1 2025-05-29T15:29:24.736916Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:29:24.736927Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-05-29T15:29:24.736929Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-05-29T15:29:24.736968Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:27401 2025-05-29T15:29:24.763565Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:29:24.763585Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:29:24.764631Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:27401 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-29T15:29:24.793663Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:29:24.804066Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:29:24.868950Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:29:24.888869Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:29:24.900906Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:29:24.984119Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890181910028201:2403], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:29:24.984143Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:29:25.025505Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-05-29T15:29:25.033772Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-05-29T15:29:25.046801Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-05-29T15:29:25.060364Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-05-29T15:29:25.067257Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-05-29T15:29:25.081534Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-05-29T15:29:25.095618Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-05-29T15:29:25.111891Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890186204996148:2466], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:29:25.111922Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:29:25.112010Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890186204996153:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:29:25.112758Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710669:3, at schemeshard: 72057594046644480 2025-05-29T15:29:25.115175Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7509890186204996155:2470], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710669 completed, doublechecking } 2025-05-29T15:29:25.212544Z node 1 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [1:7509890186204996206:3397] txid# 281474976710670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-29T15:29:25.311217Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [1:7509890186204996222:2474], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:29:25.311464Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=1&id=ZmY4MmViZjgtZDEyOGE0YzktMjBiNGUyNGEtYTVjZjMzZTc=, ActorId: [1:7509890181910028183:2401], ActorState: ExecuteState, TraceId: 01jweak53q88g20575r03haefe, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: VERIFY failed (2025-05-29T15:29:25.312129Z): assertion failed in non-unittest thread with message: assertion failed at ydb/core/kqp/ut/common/kqp_ut_common.h:375, void NKikimr::NKqp::AssertSuccessResult(const NYdb::TStatus &): (result.IsSuccess())
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 library/cpp/testing/unittest/registar.cpp:36 RaiseError(): requirement UnittestThread failed 0. /-S/util/system/yassert.cpp:83: InternalPanicImpl @ 0x1633E055 1. /-S/util/system/yassert.cpp:55: Panic @ 0x16335056 2. /tmp//-S/library/cpp/testing/unittest/registar.cpp:36: RaiseError @ 0x164D75B6 3. /-S/ydb/core/kqp/ut/common/kqp_ut_common.h:375: AssertSuccessResult @ 0x15DE0A22 4. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:365: CreateSampleTables @ 0x289A0492 5. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:581: operator() @ 0x289C1A1C 6. /-S/library/cpp/threading/future/core/future-inl.h:493: SetValue @ 0x289C1A1C 7. /-S/library/cpp/threading/future/async.h:24: operator() @ 0x289C1A1C 8. /-S/util/thread/pool.h:71: Process @ 0x289C1A1C 9. /-S/util/thread/pool.cpp:418: DoExecute @ 0x163457A9 10. /-S/util/thread/factory.h:15: Execute @ 0x16344199 11. /-S/util/thread/factory.cpp:36: ThreadProc @ 0x16344199 12. /-S/util/system/thread.cpp:244: ThreadProxy @ 0x1633F60C 13. ??:0: ?? @ 0x7FBEBAC1CAC2 14. ??:0: ?? @ 0x7FBEBACAE84F >> TSchemeshardBackgroundCleaningTest::SchemeshardBackgroundCleaningTestReboot [GOOD] >> TSchemeshardBackgroundCleaningTest::SchemeshardBackgroundCleaningTestSimpleCleanIndex >> KqpScheme::CreateAndAlterTableWithPartitioningByLoadCompat >> KqpScheme::AsyncReplicationEndpointAndDatabase ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/scheme/unittest >> KqpScheme::TouchIndexAfterMoveTableRead Test command err: Trying to start YDB, gRPC: 24683, MsgBus: 7171 2025-05-29T15:29:02.223679Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509890086227336907:2070];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:29:02.223881Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/0011e7/r3tmp/tmp6xTFEX/pdisk_1.dat 2025-05-29T15:29:02.282519Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 24683, node 1 2025-05-29T15:29:02.302937Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:29:02.302952Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-05-29T15:29:02.302955Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-05-29T15:29:02.302993Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:7171 2025-05-29T15:29:02.324913Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:29:02.324940Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:29:02.325944Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:7171 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-29T15:29:02.364093Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:29:02.369888Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:29:02.388815Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:29:02.409033Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:29:02.420817Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:29:02.613662Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890086227338498:2403], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:29:02.613705Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:29:02.666431Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-05-29T15:29:02.679477Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-05-29T15:29:02.688267Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-05-29T15:29:02.745382Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-05-29T15:29:02.759568Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-05-29T15:29:02.772542Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-05-29T15:29:02.787132Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-05-29T15:29:02.802984Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890086227339152:2466], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:29:02.803008Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:29:02.803061Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890086227339157:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:29:02.803748Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710669:3, at schemeshard: 72057594046644480 2025-05-29T15:29:02.806358Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7509890086227339159:2470], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710669 completed, doublechecking } 2025-05-29T15:29:02.893166Z node 1 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [1:7509890086227339210:3397] txid# 281474976710670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-29T15:29:03.011564Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [1:7509890086227339226:2474], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:29:03.011708Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=1&id=M2UwMjM3MDItN2ExMjEwOTMtNmY5NjY4YmItODhiNDdmY2E=, ActorId: [1:7509890086227338479:2401], ActorState: ExecuteState, TraceId: 01jweajfaj2xqw74f7cbwwazmv, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: VERIFY failed (2025-05-29T15:29:03.012657Z): assertion failed in non-unittest thread with message: assertion failed at ydb/core/kqp/ut/common/kqp_ut_common.h:375, void NKikimr::NKqp::AssertSuccessResult(const NYdb::TStatus &): (result.IsSuccess())
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 library/cpp/testing/unittest/registar.cpp:36 RaiseError(): requirement UnittestThread failed 0. /-S/util/system/yassert.cpp:83: InternalPanicImpl @ 0x1633E055 1. /-S/util/system/yassert.cpp:55: Panic @ 0x16335056 2. /tmp//-S/library/cpp/testing/unittest/registar.cpp:36: RaiseError @ 0x164D75B6 3. /-S/ydb/core/kqp/ut/common/kqp_ut_common.h:375: AssertSuccessResult @ 0x15DE0A22 4. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:365: CreateSampleTables @ 0x289A0492 5. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:581: operator() @ 0x289C1A1C 6. /-S/library/cpp/threading/future/core/future-inl.h:493: SetValue @ 0x289C1A1C 7. /-S/library/cpp/threading/future/async.h:24: operator() @ 0x289C1A1C 8. /-S/util/thread/pool.h:71: Process @ 0x289C1A1C 9. /-S/util/thread/pool.cpp:418: DoExecute @ 0x163457A9 10. /-S/util/thread/factory.h:15: Execute @ 0x16344199 11. /-S/util/thread/factory.cpp:36: ThreadProc @ 0x16344199 12. /-S/util/system/thread.cpp:244: ThreadProxy @ 0x1633F60C 13. ??:0: ?? @ 0x7FE3B247FAC2 14. ??:0: ?? @ 0x7FE3B251184F Trying to start YDB, gRPC: 14896, MsgBus: 5978 2025-05-29T15:29:06.798821Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509890104238949682:2068];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:29:06.798852Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/0011e7/r3tmp/tmpFgHYry/pdisk_1.dat 2025-05-29T15:29:06.879019Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 14896, node 1 2025-05-29T15:29:06.894236Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:29:06.894252Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-05-29T15:29:06.894254Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 202 ... st/yql_expr.h:1874: index out of range, code: 1 library/cpp/testing/unittest/registar.cpp:36 RaiseError(): requirement UnittestThread failed 0. /-S/util/system/yassert.cpp:83: InternalPanicImpl @ 0x1633E055 1. /-S/util/system/yassert.cpp:55: Panic @ 0x16335056 2. /tmp//-S/library/cpp/testing/unittest/registar.cpp:36: RaiseError @ 0x164D75B6 3. /-S/ydb/core/kqp/ut/common/kqp_ut_common.h:375: AssertSuccessResult @ 0x15DE0A22 4. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:365: CreateSampleTables @ 0x289A0492 5. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:581: operator() @ 0x289C1A1C 6. /-S/library/cpp/threading/future/core/future-inl.h:493: SetValue @ 0x289C1A1C 7. /-S/library/cpp/threading/future/async.h:24: operator() @ 0x289C1A1C 8. /-S/util/thread/pool.h:71: Process @ 0x289C1A1C 9. /-S/util/thread/pool.cpp:418: DoExecute @ 0x163457A9 10. /-S/util/thread/factory.h:15: Execute @ 0x16344199 11. /-S/util/thread/factory.cpp:36: ThreadProc @ 0x16344199 12. /-S/util/system/thread.cpp:244: ThreadProxy @ 0x1633F60C 13. ??:0: ?? @ 0x7F81E10CEAC2 14. ??:0: ?? @ 0x7F81E116084F Trying to start YDB, gRPC: 27442, MsgBus: 29966 2025-05-29T15:29:24.348265Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509890181163823029:2070];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:29:24.348287Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/0011e7/r3tmp/tmpKwNxjr/pdisk_1.dat 2025-05-29T15:29:24.405952Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 27442, node 1 2025-05-29T15:29:24.424144Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:29:24.424159Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-05-29T15:29:24.424162Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-05-29T15:29:24.424210Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:29966 2025-05-29T15:29:24.449440Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:29:24.449483Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:29:24.450548Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:29966 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-29T15:29:24.485696Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:29:24.497830Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:29:24.517399Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:29:24.537017Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:29:24.548574Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:29:24.696487Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890181163824622:2403], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:29:24.696513Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:29:24.738784Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-05-29T15:29:24.794008Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-05-29T15:29:24.848585Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-05-29T15:29:24.903041Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-05-29T15:29:24.913519Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-05-29T15:29:24.927577Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-05-29T15:29:24.983463Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-05-29T15:29:25.000330Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890185458792577:2466], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:29:25.000363Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890185458792582:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:29:25.000394Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:29:25.001117Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715669:3, at schemeshard: 72057594046644480 2025-05-29T15:29:25.003193Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7509890185458792584:2470], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715669 completed, doublechecking } 2025-05-29T15:29:25.093974Z node 1 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [1:7509890185458792635:3396] txid# 281474976715670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-29T15:29:25.205015Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [1:7509890185458792651:2474], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:29:25.205153Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=1&id=ZmNjMWZjMmItOTJiNjhiMDctOWU5MDZhMTItMzg0NTJiZjk=, ActorId: [1:7509890181163824604:2401], ActorState: ExecuteState, TraceId: 01jweak5077vxrqde1nr6w3ws5, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: VERIFY failed (2025-05-29T15:29:25.205917Z): assertion failed in non-unittest thread with message: assertion failed at ydb/core/kqp/ut/common/kqp_ut_common.h:375, void NKikimr::NKqp::AssertSuccessResult(const NYdb::TStatus &): (result.IsSuccess())
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 library/cpp/testing/unittest/registar.cpp:36 RaiseError(): requirement UnittestThread failed 0. /-S/util/system/yassert.cpp:83: InternalPanicImpl @ 0x1633E055 1. /-S/util/system/yassert.cpp:55: Panic @ 0x16335056 2. /tmp//-S/library/cpp/testing/unittest/registar.cpp:36: RaiseError @ 0x164D75B6 3. /-S/ydb/core/kqp/ut/common/kqp_ut_common.h:375: AssertSuccessResult @ 0x15DE0A22 4. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:365: CreateSampleTables @ 0x289A0492 5. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:581: operator() @ 0x289C1A1C 6. /-S/library/cpp/threading/future/core/future-inl.h:493: SetValue @ 0x289C1A1C 7. /-S/library/cpp/threading/future/async.h:24: operator() @ 0x289C1A1C 8. /-S/util/thread/pool.h:71: Process @ 0x289C1A1C 9. /-S/util/thread/pool.cpp:418: DoExecute @ 0x163457A9 10. /-S/util/thread/factory.h:15: Execute @ 0x16344199 11. /-S/util/thread/factory.cpp:36: ThreadProc @ 0x16344199 12. /-S/util/system/thread.cpp:244: ThreadProxy @ 0x1633F60C 13. ??:0: ?? @ 0x7F755F276AC2 14. ??:0: ?? @ 0x7F755F30884F ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/scheme/unittest >> KqpScheme::DropAsyncReplicationCascade Test command err: Trying to start YDB, gRPC: 5397, MsgBus: 13704 2025-05-29T15:29:02.505054Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509890088026499009:2073];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:29:02.505081Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/001186/r3tmp/tmplG3ndl/pdisk_1.dat 2025-05-29T15:29:02.571918Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 5397, node 1 2025-05-29T15:29:02.592045Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:29:02.592057Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-05-29T15:29:02.592059Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-05-29T15:29:02.592110Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-05-29T15:29:02.606303Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:29:02.606328Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:29:02.607584Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:13704 TClient is connected to server localhost:13704 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-29T15:29:02.670489Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:29:02.673183Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-05-29T15:29:02.686510Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:29:02.702956Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:29:02.730520Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:29:02.741742Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:29:02.871161Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890088026500596:2403], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:29:02.871187Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:29:02.916512Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-05-29T15:29:02.924178Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-05-29T15:29:02.932981Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-05-29T15:29:02.940212Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-05-29T15:29:02.947296Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-05-29T15:29:02.954427Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-05-29T15:29:02.969028Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-05-29T15:29:02.984791Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890088026501248:2466], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:29:02.984809Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890088026501253:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:29:02.984816Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:29:02.985594Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715669:3, at schemeshard: 72057594046644480 2025-05-29T15:29:02.988632Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7509890088026501255:2470], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715669 completed, doublechecking } 2025-05-29T15:29:03.053104Z node 1 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [1:7509890092321468602:3396] txid# 281474976715670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-29T15:29:03.167867Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [1:7509890092321468618:2474], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:29:03.168950Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=1&id=ODUyNWFhZmQtZmEwYjc1MDMtNWFlOTY0ZDUtZWJlZmJiOQ==, ActorId: [1:7509890088026500578:2401], ActorState: ExecuteState, TraceId: 01jweajfg80ps53zak86d70xv6, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: VERIFY failed (2025-05-29T15:29:03.170003Z): assertion failed in non-unittest thread with message: assertion failed at ydb/core/kqp/ut/common/kqp_ut_common.h:375, void NKikimr::NKqp::AssertSuccessResult(const NYdb::TStatus &): (result.IsSuccess())
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 library/cpp/testing/unittest/registar.cpp:36 RaiseError(): requirement UnittestThread failed 0. /-S/util/system/yassert.cpp:83: InternalPanicImpl @ 0x1633E055 1. /-S/util/system/yassert.cpp:55: Panic @ 0x16335056 2. /tmp//-S/library/cpp/testing/unittest/registar.cpp:36: RaiseError @ 0x164D75B6 3. /-S/ydb/core/kqp/ut/common/kqp_ut_common.h:375: AssertSuccessResult @ 0x15DE0A22 4. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:365: CreateSampleTables @ 0x289A0492 5. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:581: operator() @ 0x289C1A1C 6. /-S/library/cpp/threading/future/core/future-inl.h:493: SetValue @ 0x289C1A1C 7. /-S/library/cpp/threading/future/async.h:24: operator() @ 0x289C1A1C 8. /-S/util/thread/pool.h:71: Process @ 0x289C1A1C 9. /-S/util/thread/pool.cpp:418: DoExecute @ 0x163457A9 10. /-S/util/thread/factory.h:15: Execute @ 0x16344199 11. /-S/util/thread/factory.cpp:36: ThreadProc @ 0x16344199 12. /-S/util/system/thread.cpp:244: ThreadProxy @ 0x1633F60C 13. ??:0: ?? @ 0x7F0E3413DAC2 14. ??:0: ?? @ 0x7F0E341CF84F Trying to start YDB, gRPC: 10920, MsgBus: 20496 2025-05-29T15:29:07.195289Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509890105498181036:2068];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:29:07.195311Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/001186/r3tmp/tmpFByEPY/pdisk_1.dat 2025-05-29T15:29:07.253674Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 10920, node 1 2025-05-29T15:29:07.272002Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:29:07.272015Z node 1 :NET_CLASSIFIER WARN: net_classif ... /ast/yql_expr.h:1874: index out of range, code: 1 library/cpp/testing/unittest/registar.cpp:36 RaiseError(): requirement UnittestThread failed 0. /-S/util/system/yassert.cpp:83: InternalPanicImpl @ 0x1633E055 1. /-S/util/system/yassert.cpp:55: Panic @ 0x16335056 2. /tmp//-S/library/cpp/testing/unittest/registar.cpp:36: RaiseError @ 0x164D75B6 3. /-S/ydb/core/kqp/ut/common/kqp_ut_common.h:375: AssertSuccessResult @ 0x15DE0A22 4. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:365: CreateSampleTables @ 0x289A0492 5. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:581: operator() @ 0x289C1A1C 6. /-S/library/cpp/threading/future/core/future-inl.h:493: SetValue @ 0x289C1A1C 7. /-S/library/cpp/threading/future/async.h:24: operator() @ 0x289C1A1C 8. /-S/util/thread/pool.h:71: Process @ 0x289C1A1C 9. /-S/util/thread/pool.cpp:418: DoExecute @ 0x163457A9 10. /-S/util/thread/factory.h:15: Execute @ 0x16344199 11. /-S/util/thread/factory.cpp:36: ThreadProc @ 0x16344199 12. /-S/util/system/thread.cpp:244: ThreadProxy @ 0x1633F60C 13. ??:0: ?? @ 0x7FC78FFA1AC2 14. ??:0: ?? @ 0x7FC79003384F Trying to start YDB, gRPC: 9641, MsgBus: 63195 2025-05-29T15:29:25.024866Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509890184429195001:2065];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:29:25.024900Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/001186/r3tmp/tmpIWZL6d/pdisk_1.dat 2025-05-29T15:29:25.086653Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 9641, node 1 2025-05-29T15:29:25.107360Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:29:25.107376Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-05-29T15:29:25.107378Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-05-29T15:29:25.107429Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:63195 2025-05-29T15:29:25.125942Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:29:25.125969Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:29:25.126994Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:63195 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-29T15:29:25.167856Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:29:25.176053Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:29:25.192521Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:29:25.209877Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:29:25.222896Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:29:25.369804Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890184429196617:2403], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:29:25.369833Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:29:25.419324Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-05-29T15:29:25.427095Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-05-29T15:29:25.438538Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-05-29T15:29:25.492814Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-05-29T15:29:25.546939Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-05-29T15:29:25.557057Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-05-29T15:29:25.571239Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-05-29T15:29:25.587547Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890184429197274:2466], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:29:25.587564Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890184429197279:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:29:25.587573Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:29:25.588253Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710669:3, at schemeshard: 72057594046644480 2025-05-29T15:29:25.591154Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7509890184429197281:2470], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710669 completed, doublechecking } 2025-05-29T15:29:25.668462Z node 1 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [1:7509890184429197332:3397] txid# 281474976710670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-29T15:29:25.750681Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [1:7509890184429197348:2474], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:29:25.750796Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=1&id=YTJmZjk4NzUtYmMxNGQ3NWQtMjBkNjZmZWUtYWI3NzBmNGQ=, ActorId: [1:7509890184429196599:2401], ActorState: ExecuteState, TraceId: 01jweak5jkdtt31wnxmwhfsw03, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: VERIFY failed (2025-05-29T15:29:25.751358Z): assertion failed in non-unittest thread with message: assertion failed at ydb/core/kqp/ut/common/kqp_ut_common.h:375, void NKikimr::NKqp::AssertSuccessResult(const NYdb::TStatus &): (result.IsSuccess())
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 library/cpp/testing/unittest/registar.cpp:36 RaiseError(): requirement UnittestThread failed 0. /-S/util/system/yassert.cpp:83: InternalPanicImpl @ 0x1633E055 1. /-S/util/system/yassert.cpp:55: Panic @ 0x16335056 2. /tmp//-S/library/cpp/testing/unittest/registar.cpp:36: RaiseError @ 0x164D75B6 3. /-S/ydb/core/kqp/ut/common/kqp_ut_common.h:375: AssertSuccessResult @ 0x15DE0A22 4. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:365: CreateSampleTables @ 0x289A0492 5. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:581: operator() @ 0x289C1A1C 6. /-S/library/cpp/threading/future/core/future-inl.h:493: SetValue @ 0x289C1A1C 7. /-S/library/cpp/threading/future/async.h:24: operator() @ 0x289C1A1C 8. /-S/util/thread/pool.h:71: Process @ 0x289C1A1C 9. /-S/util/thread/pool.cpp:418: DoExecute @ 0x163457A9 10. /-S/util/thread/factory.h:15: Execute @ 0x16344199 11. /-S/util/thread/factory.cpp:36: ThreadProc @ 0x16344199 12. /-S/util/system/thread.cpp:244: ThreadProxy @ 0x1633F60C 13. ??:0: ?? @ 0x7F4E99963AC2 14. ??:0: ?? @ 0x7F4E999F584F >> KqpScheme::BuildingUniqIndexDeniesTableModificationsSql >> KqpScheme::DoubleCreateExternalDataSource >> KqpConstraints::SerialTypeSerial ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/scheme/unittest >> KqpScheme::DropNonExistingResourcePoolClassifier Test command err: Trying to start YDB, gRPC: 23315, MsgBus: 61401 2025-05-29T15:29:02.484995Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509890085525118121:2205];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:29:02.485098Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/001189/r3tmp/tmpyYDHSf/pdisk_1.dat 2025-05-29T15:29:02.571625Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 23315, node 1 2025-05-29T15:29:02.599273Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:29:02.599289Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-05-29T15:29:02.599292Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-05-29T15:29:02.599342Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:61401 2025-05-29T15:29:02.635830Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:29:02.635860Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:29:02.636599Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:61401 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-29T15:29:02.670967Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:29:02.673976Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-05-29T15:29:02.685730Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:29:02.708243Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:29:02.727033Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:29:02.739985Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:29:02.883985Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890085525119579:2403], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:29:02.884012Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:29:02.930386Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-05-29T15:29:02.944857Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-05-29T15:29:02.956932Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-05-29T15:29:03.014318Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-05-29T15:29:03.025070Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-05-29T15:29:03.040115Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-05-29T15:29:03.054784Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-05-29T15:29:03.077139Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890089820087532:2466], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:29:03.077169Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:29:03.077267Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890089820087537:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:29:03.078214Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715669:3, at schemeshard: 72057594046644480 2025-05-29T15:29:03.081003Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7509890089820087539:2470], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715669 completed, doublechecking } 2025-05-29T15:29:03.155280Z node 1 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [1:7509890089820087590:3396] txid# 281474976715670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-29T15:29:03.295414Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [1:7509890089820087599:2474], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:29:03.295569Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=1&id=ZWEzY2JmOWUtNjM2NzNiMzMtOWE3MTcyZDAtOWY1ZGZlMDc=, ActorId: [1:7509890085525119552:2400], ActorState: ExecuteState, TraceId: 01jweajfk4680aw5t634d2wr30, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: VERIFY failed (2025-05-29T15:29:03.299189Z): assertion failed in non-unittest thread with message: assertion failed at ydb/core/kqp/ut/common/kqp_ut_common.h:375, void NKikimr::NKqp::AssertSuccessResult(const NYdb::TStatus &): (result.IsSuccess())
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 library/cpp/testing/unittest/registar.cpp:36 RaiseError(): requirement UnittestThread failed 0. /-S/util/system/yassert.cpp:83: InternalPanicImpl @ 0x1633E055 1. /-S/util/system/yassert.cpp:55: Panic @ 0x16335056 2. /tmp//-S/library/cpp/testing/unittest/registar.cpp:36: RaiseError @ 0x164D75B6 3. /-S/ydb/core/kqp/ut/common/kqp_ut_common.h:375: AssertSuccessResult @ 0x15DE0A22 4. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:365: CreateSampleTables @ 0x289A0492 5. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:581: operator() @ 0x289C1A1C 6. /-S/library/cpp/threading/future/core/future-inl.h:493: SetValue @ 0x289C1A1C 7. /-S/library/cpp/threading/future/async.h:24: operator() @ 0x289C1A1C 8. /-S/util/thread/pool.h:71: Process @ 0x289C1A1C 9. /-S/util/thread/pool.cpp:418: DoExecute @ 0x163457A9 10. /-S/util/thread/factory.h:15: Execute @ 0x16344199 11. /-S/util/thread/factory.cpp:36: ThreadProc @ 0x16344199 12. /-S/util/system/thread.cpp:244: ThreadProxy @ 0x1633F60C 13. ??:0: ?? @ 0x7F074F96DAC2 14. ??:0: ?? @ 0x7F074F9FF84F Trying to start YDB, gRPC: 14415, MsgBus: 10129 2025-05-29T15:29:07.134242Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509890106063879957:2076];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:29:07.134848Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/001189/r3tmp/tmpSc28P9/pdisk_1.dat 2025-05-29T15:29:07.211357Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 14415, node 1 2025-05-29T15:29:07.231851Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:29:07.231865Z node 1 :NET_CLASSIFIER WARN: net_class ... s/ast/yql_expr.h:1874: index out of range, code: 1 library/cpp/testing/unittest/registar.cpp:36 RaiseError(): requirement UnittestThread failed 0. /-S/util/system/yassert.cpp:83: InternalPanicImpl @ 0x1633E055 1. /-S/util/system/yassert.cpp:55: Panic @ 0x16335056 2. /tmp//-S/library/cpp/testing/unittest/registar.cpp:36: RaiseError @ 0x164D75B6 3. /-S/ydb/core/kqp/ut/common/kqp_ut_common.h:375: AssertSuccessResult @ 0x15DE0A22 4. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:365: CreateSampleTables @ 0x289A0492 5. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:581: operator() @ 0x289C1A1C 6. /-S/library/cpp/threading/future/core/future-inl.h:493: SetValue @ 0x289C1A1C 7. /-S/library/cpp/threading/future/async.h:24: operator() @ 0x289C1A1C 8. /-S/util/thread/pool.h:71: Process @ 0x289C1A1C 9. /-S/util/thread/pool.cpp:418: DoExecute @ 0x163457A9 10. /-S/util/thread/factory.h:15: Execute @ 0x16344199 11. /-S/util/thread/factory.cpp:36: ThreadProc @ 0x16344199 12. /-S/util/system/thread.cpp:244: ThreadProxy @ 0x1633F60C 13. ??:0: ?? @ 0x7F3B4A48AAC2 14. ??:0: ?? @ 0x7F3B4A51C84F Trying to start YDB, gRPC: 20024, MsgBus: 6445 2025-05-29T15:29:25.225641Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509890185368390112:2072];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:29:25.225657Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/001189/r3tmp/tmp40cGXJ/pdisk_1.dat 2025-05-29T15:29:25.314794Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 20024, node 1 2025-05-29T15:29:25.331400Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:29:25.331425Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-05-29T15:29:25.331427Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-05-29T15:29:25.331466Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:6445 TClient is connected to server localhost:6445 2025-05-29T15:29:25.370681Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:29:25.370707Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:29:25.371882Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-29T15:29:25.385305Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:29:25.393648Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:29:25.455901Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:29:25.475709Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:29:25.488014Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:29:25.538648Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890185368391705:2403], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:29:25.538677Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:29:25.577926Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-05-29T15:29:25.632853Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-05-29T15:29:25.688654Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-05-29T15:29:25.697531Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-05-29T15:29:25.711734Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-05-29T15:29:25.725455Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-05-29T15:29:25.739987Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-05-29T15:29:25.755542Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890185368392362:2466], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:29:25.755579Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:29:25.755585Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890185368392367:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:29:25.756251Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710669:3, at schemeshard: 72057594046644480 2025-05-29T15:29:25.759610Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7509890185368392369:2470], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710669 completed, doublechecking } 2025-05-29T15:29:25.850671Z node 1 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [1:7509890185368392420:3400] txid# 281474976710670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-29T15:29:25.945533Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [1:7509890185368392436:2474], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:29:25.945649Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=1&id=ODNkYmYwNTMtYWI1ZTA1NzUtODgzNjgzZWItZjAwMDM4Mg==, ActorId: [1:7509890185368391702:2401], ActorState: ExecuteState, TraceId: 01jweak5qv43wzryhphp0gbdvb, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: VERIFY failed (2025-05-29T15:29:25.946361Z): assertion failed in non-unittest thread with message: assertion failed at ydb/core/kqp/ut/common/kqp_ut_common.h:375, void NKikimr::NKqp::AssertSuccessResult(const NYdb::TStatus &): (result.IsSuccess())
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 library/cpp/testing/unittest/registar.cpp:36 RaiseError(): requirement UnittestThread failed 0. /-S/util/system/yassert.cpp:83: InternalPanicImpl @ 0x1633E055 1. /-S/util/system/yassert.cpp:55: Panic @ 0x16335056 2. /tmp//-S/library/cpp/testing/unittest/registar.cpp:36: RaiseError @ 0x164D75B6 3. /-S/ydb/core/kqp/ut/common/kqp_ut_common.h:375: AssertSuccessResult @ 0x15DE0A22 4. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:365: CreateSampleTables @ 0x289A0492 5. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:581: operator() @ 0x289C1A1C 6. /-S/library/cpp/threading/future/core/future-inl.h:493: SetValue @ 0x289C1A1C 7. /-S/library/cpp/threading/future/async.h:24: operator() @ 0x289C1A1C 8. /-S/util/thread/pool.h:71: Process @ 0x289C1A1C 9. /-S/util/thread/pool.cpp:418: DoExecute @ 0x163457A9 10. /-S/util/thread/factory.h:15: Execute @ 0x16344199 11. /-S/util/thread/factory.cpp:36: ThreadProc @ 0x16344199 12. /-S/util/system/thread.cpp:244: ThreadProxy @ 0x1633F60C 13. ??:0: ?? @ 0x7FE8B142EAC2 14. ??:0: ?? @ 0x7FE8B14C084F >> KqpScheme::CreateTableWithPartitionAtKeysComplex >> KqpOlapScheme::DropColumn >> KqpScheme::AlterTableAddExplicitSyncVectorKMeansTreeIndex >> BsControllerConfig::MergeBoxes [GOOD] >> KqpScheme::CreateTableWithVectorIndex |72.1%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/long_tx_service/ut/ydb-core-tx-long_tx_service-ut |72.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/long_tx_service/ut/ydb-core-tx-long_tx_service-ut |72.1%| [LD] {RESULT} $(B)/ydb/core/tx/long_tx_service/ut/ydb-core-tx-long_tx_service-ut >> KqpScheme::CreateAndDropUser-StrictAclCheck >> KqpConstraints::SerialTypeSmallSerial >> KqpOlapScheme::DropTable >> KqpAcl::AclRevoke+UseSink-IsOlap ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/mind/bscontroller/ut_bscontroller/unittest >> BsControllerConfig::MergeBoxes [GOOD] Test command err: Leader for TabletID 72057594037932033 is [0:0:0] sender: [1:11015:2156] recipient: [1:10814:2166] IGNORE Leader for TabletID 72057594037932033 is [0:0:0] sender: [1:11015:2156] recipient: [1:10814:2166] Leader for TabletID 72057594037932033 is [1:11112:2168] sender: [1:11115:2156] recipient: [1:10814:2166] 2025-05-29T15:28:29.627737Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2057} StateInit event Type# 268828672 Event# NKikimr::TEvTablet::TEvBoot 2025-05-29T15:28:29.628393Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2057} StateInit event Type# 268828673 Event# NKikimr::TEvTablet::TEvRestored 2025-05-29T15:28:29.628455Z node 1 :BS_CONTROLLER DEBUG: {BSC22@console_interaction.cpp:14} Console interaction started 2025-05-29T15:28:29.628712Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2057} StateInit event Type# 268828684 Event# NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-05-29T15:28:29.628783Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2057} StateInit event Type# 268639244 Event# NKikimr::TEvNodeWardenStorageConfig 2025-05-29T15:28:29.628831Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2057} StateInit event Type# 131082 Event# NActors::TEvInterconnect::TEvNodesInfo 2025-05-29T15:28:29.628834Z node 1 :BS_CONTROLLER DEBUG: {BSC01@bsc.cpp:521} Handle TEvInterconnect::TEvNodesInfo 2025-05-29T15:28:29.628906Z node 1 :BS_CONTROLLER DEBUG: {BSCTXIS01@init_scheme.cpp:17} TTxInitScheme Execute 2025-05-29T15:28:29.629566Z node 1 :BS_CONTROLLER DEBUG: {BSCTXIS03@init_scheme.cpp:44} TTxInitScheme Complete 2025-05-29T15:28:29.629589Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM01@migrate.cpp:190} Execute tx 2025-05-29T15:28:29.629610Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM02@migrate.cpp:251} Complete tx IncompatibleData# false 2025-05-29T15:28:29.629627Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxTrimUnusedSlots 2025-05-29T15:28:29.629635Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxTrimUnusedSlots 2025-05-29T15:28:29.629644Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateSchemaVersion Leader for TabletID 72057594037932033 is [1:11112:2168] sender: [1:11138:2156] recipient: [1:110:2157] 2025-05-29T15:28:29.640394Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateSchemaVersion 2025-05-29T15:28:29.640434Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxGenerateInstanceId 2025-05-29T15:28:29.650841Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxGenerateInstanceId 2025-05-29T15:28:29.650900Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateStaticPDiskInfo 2025-05-29T15:28:29.650916Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateStaticPDiskInfo 2025-05-29T15:28:29.650934Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxFillInNonNullConfigForPDisk 2025-05-29T15:28:29.650965Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxFillInNonNullConfigForPDisk 2025-05-29T15:28:29.650980Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxDropDriveStatus 2025-05-29T15:28:29.650987Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxDropDriveStatus 2025-05-29T15:28:29.651000Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateCompatibilityInfo 2025-05-29T15:28:29.661362Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateCompatibilityInfo 2025-05-29T15:28:29.661413Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateEnableConfigV2 2025-05-29T15:28:29.671748Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateEnableConfigV2 2025-05-29T15:28:29.671804Z node 1 :BS_CONTROLLER DEBUG: {BSCTXLE01@load_everything.cpp:19} TTxLoadEverything Execute 2025-05-29T15:28:29.671959Z node 1 :BS_CONTROLLER DEBUG: {BSCTXLE03@load_everything.cpp:557} TTxLoadEverything Complete 2025-05-29T15:28:29.671964Z node 1 :BS_CONTROLLER DEBUG: {BSC09@impl.h:2188} LoadFinished 2025-05-29T15:28:29.671999Z node 1 :BS_CONTROLLER DEBUG: {BSC18@console_interaction.cpp:31} Console connection service started 2025-05-29T15:28:29.672004Z node 1 :BS_CONTROLLER DEBUG: {BSCTXLE04@load_everything.cpp:562} TTxLoadEverything InitQueue processed 2025-05-29T15:28:29.673858Z node 1 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:398} Execute TEvControllerConfigRequest Request# {Command { DefineHostConfig { HostConfigId: 1 Drive { Path: "/dev/disk0" } Drive { Path: "/dev/disk1" } Drive { Path: "/dev/disk2" } Drive { Path: "/dev/disk3" } Drive { Path: "/dev/disk4" } Drive { Path: "/dev/disk5" } Drive { Path: "/dev/disk6" } Drive { Path: "/dev/disk7" } Drive { Path: "/dev/disk8" Type: SSD } Drive { Path: "/dev/disk9" Type: SSD } Drive { Path: "/dev/disk10" Type: SSD } Drive { Path: "/dev/disk11" Type: SSD } Drive { Path: "/dev/disk12" Type: SSD } Drive { Path: "/dev/disk13" Type: SSD } Drive { Path: "/dev/disk14" Type: SSD } Drive { Path: "/dev/disk15" Type: SSD } } } Command { DefineBox { BoxId: 1 Name: "test box" Host { Key { Fqdn: "::1" IcPort: 12001 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12002 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12003 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12004 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12005 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12006 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12007 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12008 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12009 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12010 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12011 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12012 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12013 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12014 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12015 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12016 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12017 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12018 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12019 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12020 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12021 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12022 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12023 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12024 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12025 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12026 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12027 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12028 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12029 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12030 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12031 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12032 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12033 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12034 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12035 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12036 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12037 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12038 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12039 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12040 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12041 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12042 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12043 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12044 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12045 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12046 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12047 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12048 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12049 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12050 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12051 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12052 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12053 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12054 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12055 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12056 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12057 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12058 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12059 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12060 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12061 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12062 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12063 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12064 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12065 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12066 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12067 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12068 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12069 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12070 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12071 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12072 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12073 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12074 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12075 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12076 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12077 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12078 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12079 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12080 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12081 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12082 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12083 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12084 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12085 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12086 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12087 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12088 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12089 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12090 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12091 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12092 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12093 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12094 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12095 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12096 } HostConfigId: 1 } Host { Ke ... 0} Create new pdisk PDiskId# 275:1002 Path# /dev/disk3 2025-05-29T15:29:21.560908Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:340} Create new pdisk PDiskId# 276:1000 Path# /dev/disk1 2025-05-29T15:29:21.560911Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:340} Create new pdisk PDiskId# 276:1001 Path# /dev/disk2 2025-05-29T15:29:21.560914Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:340} Create new pdisk PDiskId# 276:1002 Path# /dev/disk3 2025-05-29T15:29:21.560917Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:340} Create new pdisk PDiskId# 277:1000 Path# /dev/disk1 2025-05-29T15:29:21.560920Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:340} Create new pdisk PDiskId# 277:1001 Path# /dev/disk2 2025-05-29T15:29:21.560923Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:340} Create new pdisk PDiskId# 277:1002 Path# /dev/disk3 2025-05-29T15:29:21.560926Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:340} Create new pdisk PDiskId# 278:1000 Path# /dev/disk1 2025-05-29T15:29:21.560929Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:340} Create new pdisk PDiskId# 278:1001 Path# /dev/disk2 2025-05-29T15:29:21.560932Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:340} Create new pdisk PDiskId# 278:1002 Path# /dev/disk3 2025-05-29T15:29:21.560935Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:340} Create new pdisk PDiskId# 279:1000 Path# /dev/disk1 2025-05-29T15:29:21.560937Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:340} Create new pdisk PDiskId# 279:1001 Path# /dev/disk2 2025-05-29T15:29:21.560940Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:340} Create new pdisk PDiskId# 279:1002 Path# /dev/disk3 2025-05-29T15:29:21.560943Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:340} Create new pdisk PDiskId# 280:1000 Path# /dev/disk1 2025-05-29T15:29:21.560946Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:340} Create new pdisk PDiskId# 280:1001 Path# /dev/disk2 2025-05-29T15:29:21.560948Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:340} Create new pdisk PDiskId# 280:1002 Path# /dev/disk3 2025-05-29T15:29:21.560951Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:340} Create new pdisk PDiskId# 281:1000 Path# /dev/disk1 2025-05-29T15:29:21.560955Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:340} Create new pdisk PDiskId# 281:1001 Path# /dev/disk2 2025-05-29T15:29:21.560958Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:340} Create new pdisk PDiskId# 281:1002 Path# /dev/disk3 2025-05-29T15:29:21.560960Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:340} Create new pdisk PDiskId# 282:1000 Path# /dev/disk1 2025-05-29T15:29:21.560963Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:340} Create new pdisk PDiskId# 282:1001 Path# /dev/disk2 2025-05-29T15:29:21.560968Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:340} Create new pdisk PDiskId# 282:1002 Path# /dev/disk3 2025-05-29T15:29:21.560971Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:340} Create new pdisk PDiskId# 283:1000 Path# /dev/disk1 2025-05-29T15:29:21.560973Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:340} Create new pdisk PDiskId# 283:1001 Path# /dev/disk2 2025-05-29T15:29:21.560976Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:340} Create new pdisk PDiskId# 283:1002 Path# /dev/disk3 2025-05-29T15:29:21.560979Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:340} Create new pdisk PDiskId# 284:1000 Path# /dev/disk1 2025-05-29T15:29:21.639707Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:340} Create new pdisk PDiskId# 284:1001 Path# /dev/disk2 2025-05-29T15:29:21.639743Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:340} Create new pdisk PDiskId# 284:1002 Path# /dev/disk3 2025-05-29T15:29:21.639748Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:340} Create new pdisk PDiskId# 285:1000 Path# /dev/disk1 2025-05-29T15:29:21.639751Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:340} Create new pdisk PDiskId# 285:1001 Path# /dev/disk2 2025-05-29T15:29:21.639756Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:340} Create new pdisk PDiskId# 285:1002 Path# /dev/disk3 2025-05-29T15:29:21.639760Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:340} Create new pdisk PDiskId# 286:1000 Path# /dev/disk1 2025-05-29T15:29:21.639764Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:340} Create new pdisk PDiskId# 286:1001 Path# /dev/disk2 2025-05-29T15:29:21.639768Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:340} Create new pdisk PDiskId# 286:1002 Path# /dev/disk3 2025-05-29T15:29:21.639773Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:340} Create new pdisk PDiskId# 287:1000 Path# /dev/disk1 2025-05-29T15:29:21.639777Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:340} Create new pdisk PDiskId# 287:1001 Path# /dev/disk2 2025-05-29T15:29:21.639789Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:340} Create new pdisk PDiskId# 287:1002 Path# /dev/disk3 2025-05-29T15:29:21.639793Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:340} Create new pdisk PDiskId# 288:1000 Path# /dev/disk1 2025-05-29T15:29:21.639798Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:340} Create new pdisk PDiskId# 288:1001 Path# /dev/disk2 2025-05-29T15:29:21.639802Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:340} Create new pdisk PDiskId# 288:1002 Path# /dev/disk3 2025-05-29T15:29:21.639806Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:340} Create new pdisk PDiskId# 289:1000 Path# /dev/disk1 2025-05-29T15:29:21.639809Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:340} Create new pdisk PDiskId# 289:1001 Path# /dev/disk2 2025-05-29T15:29:21.639817Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:340} Create new pdisk PDiskId# 289:1002 Path# /dev/disk3 2025-05-29T15:29:21.639821Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:340} Create new pdisk PDiskId# 290:1000 Path# /dev/disk1 2025-05-29T15:29:21.639825Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:340} Create new pdisk PDiskId# 290:1001 Path# /dev/disk2 2025-05-29T15:29:21.639829Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:340} Create new pdisk PDiskId# 290:1002 Path# /dev/disk3 2025-05-29T15:29:21.639833Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:340} Create new pdisk PDiskId# 291:1000 Path# /dev/disk1 2025-05-29T15:29:21.639836Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:340} Create new pdisk PDiskId# 291:1001 Path# /dev/disk2 2025-05-29T15:29:21.639840Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:340} Create new pdisk PDiskId# 291:1002 Path# /dev/disk3 2025-05-29T15:29:21.639843Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:340} Create new pdisk PDiskId# 292:1000 Path# /dev/disk1 2025-05-29T15:29:21.639849Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:340} Create new pdisk PDiskId# 292:1001 Path# /dev/disk2 2025-05-29T15:29:21.639853Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:340} Create new pdisk PDiskId# 292:1002 Path# /dev/disk3 2025-05-29T15:29:21.639857Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:340} Create new pdisk PDiskId# 293:1000 Path# /dev/disk1 2025-05-29T15:29:21.639866Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:340} Create new pdisk PDiskId# 293:1001 Path# /dev/disk2 2025-05-29T15:29:21.639872Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:340} Create new pdisk PDiskId# 293:1002 Path# /dev/disk3 2025-05-29T15:29:21.639876Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:340} Create new pdisk PDiskId# 294:1000 Path# /dev/disk1 2025-05-29T15:29:21.639880Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:340} Create new pdisk PDiskId# 294:1001 Path# /dev/disk2 2025-05-29T15:29:21.639883Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:340} Create new pdisk PDiskId# 294:1002 Path# /dev/disk3 2025-05-29T15:29:21.639888Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:340} Create new pdisk PDiskId# 295:1000 Path# /dev/disk1 2025-05-29T15:29:21.639899Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:340} Create new pdisk PDiskId# 295:1001 Path# /dev/disk2 2025-05-29T15:29:21.639902Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:340} Create new pdisk PDiskId# 295:1002 Path# /dev/disk3 2025-05-29T15:29:21.639906Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:340} Create new pdisk PDiskId# 296:1000 Path# /dev/disk1 2025-05-29T15:29:21.639910Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:340} Create new pdisk PDiskId# 296:1001 Path# /dev/disk2 2025-05-29T15:29:21.639914Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:340} Create new pdisk PDiskId# 296:1002 Path# /dev/disk3 2025-05-29T15:29:21.639918Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:340} Create new pdisk PDiskId# 297:1000 Path# /dev/disk1 2025-05-29T15:29:21.639921Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:340} Create new pdisk PDiskId# 297:1001 Path# /dev/disk2 2025-05-29T15:29:21.639925Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:340} Create new pdisk PDiskId# 297:1002 Path# /dev/disk3 2025-05-29T15:29:21.639929Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:340} Create new pdisk PDiskId# 298:1000 Path# /dev/disk1 2025-05-29T15:29:21.639932Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:340} Create new pdisk PDiskId# 298:1001 Path# /dev/disk2 2025-05-29T15:29:21.639935Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:340} Create new pdisk PDiskId# 298:1002 Path# /dev/disk3 2025-05-29T15:29:21.639939Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:340} Create new pdisk PDiskId# 299:1000 Path# /dev/disk1 2025-05-29T15:29:21.639943Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:340} Create new pdisk PDiskId# 299:1001 Path# /dev/disk2 2025-05-29T15:29:21.639949Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:340} Create new pdisk PDiskId# 299:1002 Path# /dev/disk3 2025-05-29T15:29:21.639954Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:340} Create new pdisk PDiskId# 300:1000 Path# /dev/disk1 2025-05-29T15:29:21.639960Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:340} Create new pdisk PDiskId# 300:1001 Path# /dev/disk2 2025-05-29T15:29:21.639966Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:340} Create new pdisk PDiskId# 300:1002 Path# /dev/disk3 2025-05-29T15:29:21.671291Z node 251 :BS_CONTROLLER ERROR: {BSC07@impl.h:2181} ProcessControllerEvent event processing took too much time Type# 268637706 Duration# 0.111033s 2025-05-29T15:29:21.671373Z node 251 :BS_CONTROLLER ERROR: {BSC00@bsc.cpp:689} StateWork event processing took too much time Type# 2146435078 Duration# 0.111132s 2025-05-29T15:29:21.684450Z node 251 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:398} Execute TEvControllerConfigRequest Request# {Command { MergeBoxes { OriginBoxId: 2 OriginBoxGeneration: 1 TargetBoxId: 1 TargetBoxGeneration: 1 StoragePoolIdMap { OriginStoragePoolId: 1 TargetStoragePoolId: 2 } } } } 2025-05-29T15:29:21.699553Z node 251 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:398} Execute TEvControllerConfigRequest Request# {Command { ReadBox { BoxId: 1 } } Command { QueryBaseConfig { } } } >> KqpScheme::AlterTableAddUniqIndexPublicApiFeatureOff >> KqpScheme::CreateTableWithUniformPartitionsUncompat |72.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/control/ut/unittest >> KqpOlapScheme::DropColumn [GOOD] >> KqpOlapScheme::DropColumnAfterAdd >> KqpOlapScheme::DropTable [GOOD] >> KqpOlapScheme::DropColumnOldSchemeBulkUpsert >> KqpAcl::AclDml+UseSink-IsOlap >> KqpScheme::AlterNonExistingResourcePoolClassifier >> KqpScheme::AlterTableRenameVectorIndex >> KqpConstraints::IndexedTableAndNotNullColumn >> KqpOlapScheme::DropColumnAfterAdd [GOOD] >> KqpOlapScheme::DropColumnErrors |72.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/control/ut/unittest >> TSchemeShardSplitBySizeTest::SplitShardsWithPgKey >> KqpOlapScheme::DropColumnOldSchemeBulkUpsert [GOOD] >> KqpOlapScheme::DropThenAddColumn >> TInterconnectTest::TestCrossConnect [GOOD] >> KqpConstraints::AlterTableAddNotNullWithDefault >> TInterconnectTest::TestManyEventsWithReconnect >> KqpScheme::DropResourcePoolClassifier >> KqpScheme::NEG_CreateTableWithUnsupportedStoreType >> KqpScheme::ModifyPermissionsByIncorrectPaths >> TInterconnectTest::TestManyEventsWithReconnect [GOOD] >> TInterconnectTest::TestEventWithPayloadSerialization >> KqpScheme::CreateTableWithPgColumn >> KqpScheme::AlterColumnTableTtl >> KqpOlapScheme::DropColumnErrors [GOOD] >> KqpOlapScheme::DropColumnAfterInsert >> TInterconnectTest::TestEventWithPayloadSerialization [GOOD] >> KqpScheme::CreatedAt >> KqpScheme::NEG_CreateTableWithUnsupportedStoreType [GOOD] >> KqpScheme::OlapSharding_KeyOnly >> KqpOlapScheme::DropThenAddColumn [GOOD] >> KqpOlapScheme::DropThenAddColumnCompaction >> TSchemeShardSplitBySample::NoResultOnEmptySample [GOOD] >> TSchemeShardSplitBySample::NoResultOnSampleTooSmall [GOOD] >> KqpScheme::CreateAndAlterTableWithPartitionSizeCompat >> KqpAcl::WriteSuccess |72.2%| [TA] $(B)/ydb/core/tx/schemeshard/ut_compaction/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/mind/hive/ut/unittest >> THiveTest::TestBootProgress [GOOD] Test command err: 2025-05-29T15:26:04.912766Z node 2 :BS_NODE DEBUG: {NW26@node_warden_impl.cpp:321} Bootstrap 2025-05-29T15:26:04.913713Z node 2 :BS_NODE DEBUG: {NW18@node_warden_resource.cpp:51} ApplyServiceSet IsStatic# true Comprehensive# false Origin# initial ServiceSet# {PDisks { NodeID: 1 PDiskID: 1 Path: "/tmp/pdisk.dat" PDiskGuid: 1 } VDisks { VDiskID { GroupID: 0 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } VDiskLocation { NodeID: 1 PDiskID: 1 VDiskSlotID: 0 PDiskGuid: 1 } } Groups { GroupID: 0 GroupGeneration: 1 ErasureSpecies: 0 Rings { FailDomains { VDiskLocations { NodeID: 1 PDiskID: 1 VDiskSlotID: 0 PDiskGuid: 1 } } } } AvailabilityDomains: 0 } 2025-05-29T15:26:04.913786Z node 2 :BS_NODE DEBUG: {NW12@node_warden_proxy.cpp:23} StartLocalProxy GroupId# 0 2025-05-29T15:26:04.914024Z node 2 :BS_NODE DEBUG: {NW21@node_warden_pipe.cpp:23} EstablishPipe AvailDomainId# 0 PipeClientId# [2:48:2073] ControllerId# 72057594037932033 2025-05-29T15:26:04.914032Z node 2 :BS_NODE DEBUG: {NW20@node_warden_pipe.cpp:72} SendRegisterNode 2025-05-29T15:26:04.914065Z node 2 :BS_NODE DEBUG: {NW11@node_warden_impl.cpp:296} StartInvalidGroupProxy GroupId# 4294967295 2025-05-29T15:26:04.914092Z node 2 :BS_NODE DEBUG: {NW62@node_warden_impl.cpp:308} StartRequestReportingThrottler 2025-05-29T15:26:04.918487Z node 2 :LOCAL DEBUG: local.cpp:1491: TLocal::Bootstrap 2025-05-29T15:26:04.918596Z node 2 :BS_PROXY INFO: dsproxy_state.cpp:146: Group# 0 TEvConfigureProxy received GroupGeneration# 1 IsLimitedKeyless# false Marker# DSP02 2025-05-29T15:26:04.918607Z node 2 :BS_PROXY NOTICE: dsproxy_state.cpp:294: EnsureMonitoring Group# 0 IsLimitedKeyless# 0 fullIfPossible# 0 Marker# DSP58 2025-05-29T15:26:04.918999Z node 2 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [2:47:2072] Create Queue# [2:56:2077] targetNodeId# 1 Marker# DSP01 2025-05-29T15:26:04.919030Z node 2 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [2:47:2072] Create Queue# [2:57:2078] targetNodeId# 1 Marker# DSP01 2025-05-29T15:26:04.919061Z node 2 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [2:47:2072] Create Queue# [2:58:2079] targetNodeId# 1 Marker# DSP01 2025-05-29T15:26:04.919094Z node 2 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [2:47:2072] Create Queue# [2:59:2080] targetNodeId# 1 Marker# DSP01 2025-05-29T15:26:04.919129Z node 2 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [2:47:2072] Create Queue# [2:60:2081] targetNodeId# 1 Marker# DSP01 2025-05-29T15:26:04.919161Z node 2 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [2:47:2072] Create Queue# [2:61:2082] targetNodeId# 1 Marker# DSP01 2025-05-29T15:26:04.919193Z node 2 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [2:47:2072] Create Queue# [2:62:2083] targetNodeId# 1 Marker# DSP01 2025-05-29T15:26:04.919198Z node 2 :BS_PROXY INFO: dsproxy_state.cpp:29: Group# 0 SetStateEstablishingSessions Marker# DSP03 2025-05-29T15:26:04.919218Z node 2 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:50: TClient[72057594037932033] ::Bootstrap [2:48:2073] 2025-05-29T15:26:04.919224Z node 2 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:533: TClient[72057594037932033] lookup [2:48:2073] 2025-05-29T15:26:04.919240Z node 2 :BS_PROXY NOTICE: dsproxy_state.cpp:234: Group# 4294967295 HasInvalidGroupId# 1 Bootstrap -> StateEjected Marker# DSP42 2025-05-29T15:26:04.919253Z node 2 :BS_NODE DEBUG: {NWDC00@distconf.cpp:20} Bootstrap 2025-05-29T15:26:04.919362Z node 1 :BS_NODE DEBUG: {NW26@node_warden_impl.cpp:321} Bootstrap 2025-05-29T15:26:04.920007Z node 1 :BS_NODE DEBUG: {NW18@node_warden_resource.cpp:51} ApplyServiceSet IsStatic# true Comprehensive# false Origin# initial ServiceSet# {PDisks { NodeID: 1 PDiskID: 1 Path: "/tmp/pdisk.dat" PDiskGuid: 1 } VDisks { VDiskID { GroupID: 0 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } VDiskLocation { NodeID: 1 PDiskID: 1 VDiskSlotID: 0 PDiskGuid: 1 } } Groups { GroupID: 0 GroupGeneration: 1 ErasureSpecies: 0 Rings { FailDomains { VDiskLocations { NodeID: 1 PDiskID: 1 VDiskSlotID: 0 PDiskGuid: 1 } } } } AvailabilityDomains: 0 } 2025-05-29T15:26:04.920047Z node 1 :BS_NODE DEBUG: {NW04@node_warden_pdisk.cpp:196} StartLocalPDisk NodeId# 1 PDiskId# 1 Path# "/tmp/pdisk.dat" PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} Temporary# false 2025-05-29T15:26:04.920200Z node 1 :BS_NODE DEBUG: {NW23@node_warden_vdisk.cpp:67} StartLocalVDiskActor SlayInFlight# false VDiskId# [0:1:0:0:0] VSlotId# 1:1:0 PDiskGuid# 1 DonorMode# false PDiskRestartInFlight# false PDisksWaitingToStart# false 2025-05-29T15:26:04.920481Z node 1 :BS_NODE DEBUG: {NW24@node_warden_vdisk.cpp:265} StartLocalVDiskActor done VDiskId# [0:1:0:0:0] VSlotId# 1:1:0 PDiskGuid# 1 2025-05-29T15:26:04.920493Z node 1 :BS_NODE DEBUG: {NW12@node_warden_proxy.cpp:23} StartLocalProxy GroupId# 0 2025-05-29T15:26:04.920676Z node 1 :BS_NODE DEBUG: {NW21@node_warden_pipe.cpp:23} EstablishPipe AvailDomainId# 0 PipeClientId# [1:71:2076] ControllerId# 72057594037932033 2025-05-29T15:26:04.920681Z node 1 :BS_NODE DEBUG: {NW20@node_warden_pipe.cpp:72} SendRegisterNode 2025-05-29T15:26:04.920700Z node 1 :BS_NODE DEBUG: {NW11@node_warden_impl.cpp:296} StartInvalidGroupProxy GroupId# 4294967295 2025-05-29T15:26:04.920725Z node 1 :BS_NODE DEBUG: {NW62@node_warden_impl.cpp:308} StartRequestReportingThrottler 2025-05-29T15:26:04.920799Z node 2 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:149: TClient[72057594037932033] queue send [2:48:2073] 2025-05-29T15:26:04.920812Z node 2 :BS_NODE DEBUG: {NWDC53@distconf.cpp:300} StateWaitForInit event Type# 131082 StorageConfigLoaded# false NodeListObtained# false PendingEvents.size# 0 2025-05-29T15:26:04.920817Z node 2 :BS_NODE DEBUG: {NWDC11@distconf_binding.cpp:6} TEvNodesInfo 2025-05-29T15:26:04.920840Z node 2 :BS_NODE DEBUG: {NWDC40@distconf_persistent_storage.cpp:25} TReaderActor bootstrap Paths# [] 2025-05-29T15:26:04.924015Z node 1 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:50: TClient[72057594037936129] ::Bootstrap [1:41:2064] 2025-05-29T15:26:04.924033Z node 1 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:533: TClient[72057594037936129] lookup [1:41:2064] 2025-05-29T15:26:04.924047Z node 1 :LOCAL DEBUG: local.cpp:1491: TLocal::Bootstrap 2025-05-29T15:26:04.926422Z node 1 :BS_PROXY INFO: dsproxy_state.cpp:146: Group# 0 TEvConfigureProxy received GroupGeneration# 1 IsLimitedKeyless# false Marker# DSP02 2025-05-29T15:26:04.926443Z node 1 :BS_PROXY NOTICE: dsproxy_state.cpp:294: EnsureMonitoring Group# 0 IsLimitedKeyless# 0 fullIfPossible# 0 Marker# DSP58 2025-05-29T15:26:04.928342Z node 1 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [1:70:2075] Create Queue# [1:78:2081] targetNodeId# 1 Marker# DSP01 2025-05-29T15:26:04.928382Z node 1 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [1:70:2075] Create Queue# [1:79:2082] targetNodeId# 1 Marker# DSP01 2025-05-29T15:26:04.928414Z node 1 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [1:70:2075] Create Queue# [1:80:2083] targetNodeId# 1 Marker# DSP01 2025-05-29T15:26:04.928455Z node 1 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [1:70:2075] Create Queue# [1:81:2084] targetNodeId# 1 Marker# DSP01 2025-05-29T15:26:04.928489Z node 1 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [1:70:2075] Create Queue# [1:82:2085] targetNodeId# 1 Marker# DSP01 2025-05-29T15:26:04.928515Z node 1 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [1:70:2075] Create Queue# [1:83:2086] targetNodeId# 1 Marker# DSP01 2025-05-29T15:26:04.928546Z node 1 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [1:70:2075] Create Queue# [1:84:2087] targetNodeId# 1 Marker# DSP01 2025-05-29T15:26:04.928552Z node 1 :BS_PROXY INFO: dsproxy_state.cpp:29: Group# 0 SetStateEstablishingSessions Marker# DSP03 2025-05-29T15:26:04.928573Z node 1 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:50: TClient[72057594037932033] ::Bootstrap [1:71:2076] 2025-05-29T15:26:04.928579Z node 1 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:533: TClient[72057594037932033] lookup [1:71:2076] 2025-05-29T15:26:04.928590Z node 1 :BS_PROXY NOTICE: dsproxy_state.cpp:234: Group# 4294967295 HasInvalidGroupId# 1 Bootstrap -> StateEjected Marker# DSP42 2025-05-29T15:26:04.928604Z node 1 :BS_NODE DEBUG: {NWDC00@distconf.cpp:20} Bootstrap 2025-05-29T15:26:04.928794Z node 1 :BS_NODE DEBUG: {NWDC40@distconf_persistent_storage.cpp:25} TReaderActor bootstrap Paths# [] 2025-05-29T15:26:04.928825Z node 2 :TABLET_RESOLVER DEBUG: tablet_resolver.cpp:484: Handle TEvForward tabletId: 72057594037932033 entry.State: StInit ev: {EvForward TabletID: 72057594037932033 Ev: nullptr Flags: 1:2:0} 2025-05-29T15:26:04.928897Z node 1 :TABLET_RESOLVER DEBUG: tablet_resolver.cpp:484: Handle TEvForward tabletId: 72057594037936129 entry.State: StInit ev: {EvForward TabletID: 72057594037936129 Ev: nullptr Flags: 1:2:0} 2025-05-29T15:26:04.971809Z node 1 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:149: TClient[72057594037932033] queue send [1:71:2076] 2025-05-29T15:26:04.971845Z node 1 :BS_NODE DEBUG: {NWDC53@distconf.cpp:300} StateWaitForInit event Type# 131082 StorageConfigLoaded# false NodeListObtained# false PendingEvents.size# 0 2025-05-29T15:26:04.971853Z node 1 :BS_NODE DEBUG: {NWDC11@distconf_binding.cpp:6} TEvNodesInfo 2025-05-29T15:26:04.972199Z node 1 :LOCAL DEBUG: local.cpp:1441: TDomainLocal(dc-1): Bootstrap 2025-05-29T15:26:04.972287Z node 2 :BS_NODE DEBUG: {NWDC53@distconf.cpp:300} StateWaitForInit event Type# 2146435074 StorageConfigLoaded# false NodeListObtained# false PendingEvents.size# 0 2025-05-29T15:26:04.972297Z node 2 :BS_NODE DEBUG: {NWDC32@distconf_persistent_storage.cpp:221} TEvStorageConfigLoaded Cookie# 0 NumItemsRead# 0 2025-05-29T15:26:04.973216Z node 2 :BS_NODE DEBUG: {NWDC35@distconf_persistent_storage.cpp:184} PersistConfig Record# {} Drives# [] 2025-05-29T15:26:04.973258Z node 2 :LOCAL DEBUG: local.cpp:1441: TDomainLocal(dc-1): Bootstrap 2025-05-29T15:26:04.973267Z node 2 :BS_NODE DEBUG: {NWDC51@distconf_persistent_storage.cpp:103} TWriterActor bootstrap Drives# [] Record# {} 2025-05-29T15:26:04.973311Z node 1 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:149: TClient[72057594037932033] queue send [1:71:2076] 2025-05-29T15:26:04.973320Z node 1 :BS_NODE DEBUG: {NWDC53@distconf.cpp:300} StateWaitForInit event Type# 2146435074 StorageConfigLoaded# false NodeListObtained# false PendingEvents.size# 0 2025-05-29T15:26:04.973326Z node 1 :BS_NODE DEBUG: {NWDC32@distconf_persistent_storage.cpp:221} TEvStorageConfigLoaded Cookie# 0 NumItemsRead# 0 2025-05-29T15:26:04.973341Z node 1 :BS_NODE DEBUG: {NWDC35@distconf_persistent_storage.cpp:184} PersistConfig Record# {} Drives# [] 2025-05-29T15:26:04.973477Z node 1 :LOCAL DEBUG: local.cpp:1149: TDomainLocal(dc-1): Binding to hive 72057594037927937 at domain dc-1 (allocated resources: ) 2025-05-29T15:26:04.973492Z node 2 :STATESTORAGE DEBUG: statestorage_proxy.cpp:246: ProxyRequest::HandleInit ringGroup:0 ev: {EvLookup TabletID: 72057594037932033 Cookie: 0 ProxyOptions: SigNone} 2025-05-29T15:26:04.973510Z node 1 :BS_NODE DEBUG: {NWDC51@distconf_persistent_storage.cpp:103} TWriterActor bootstrap Drives# [] Record# {} 2025-05-29T15:26:04.973518Z node 1 :LOCAL DEBUG: local.cpp:975: TLocalNodeRegistrar::Bootstrap 2025-05-29T15:26:04.973523Z node 1 :LOCAL DEBUG: local.cpp:181: TLocalNodeRegistrar::TryToRegister 2025-05-29T15:26:04.973560Z node 1 :LOCAL DEBUG: local.cpp:213: TLocalNodeRegistrar::TryToRegister pipe to hive, pipe:[1:97:2093] 2025-05-29T15:26:04.973582Z node 2 :BS_NODE DEBUG: {NWDC53@distconf.cpp:300} StateWaitForInit event Type# 2146435075 StorageConfigLoaded# true NodeListObtained# false PendingEvents.size# 0 2025-05-29T15:26:04.973641Z node 2 :LOCAL DEBUG: local.cpp:1149: TDomainLocal(dc-1): Binding to hive 72057594037927937 at domain dc-1 (allocated resources: ) 2025-05-29T15:26:04.973648Z node 2 :LOCAL DEBUG: local.cpp:975: TLocalNodeRegistrar::Bootstrap 2025-05-29T15:26:04.973652Z node 2 :LOCAL DEBUG: local.cpp:181: TLocalNodeRegistrar::TryToRegister 2025-05 ... ikimr::(anonymous namespace)::TDummyFlatTablet::TTxInit} queued, type NKikimr::(anonymous namespace)::TDummyFlatTablet::TTxInit 2025-05-29T15:28:57.097570Z node 71 :TABLET_EXECUTOR DEBUG: Leader{72075186224037893:1:3} Tx{2, NKikimr::(anonymous namespace)::TDummyFlatTablet::TTxInit} took 4194304b of static mem, Memory{4194304 dyn 0} 2025-05-29T15:28:57.097590Z node 71 :TABLET_EXECUTOR DEBUG: Leader{72075186224037893:1:3} Tx{2, NKikimr::(anonymous namespace)::TDummyFlatTablet::TTxInit} hope 1 -> done Change{2, redo 0b alter 0b annex 0, ~{ } -{ }, 0 gb} 2025-05-29T15:28:57.097594Z node 71 :TABLET_EXECUTOR DEBUG: Leader{72075186224037893:1:3} Tx{2, NKikimr::(anonymous namespace)::TDummyFlatTablet::TTxInit} release 4194304b of static, Memory{0 dyn 0} 2025-05-29T15:28:57.097609Z node 71 :BS_PROXY_COLLECT INFO: dsproxy_collect.cpp:172: [312c30a25b7db152] bootstrap ActorId# [71:634:2562] Group# 2147483653 TabletId# 72075186224037893 Channel# 1 RecordGeneration# 1 PerGenerationCounter# 1 Deadline# 586524-01-19T08:01:49.551615Z CollectGeneration# 1 CollectStep# 0 Collect# true Hard# false RestartCounter# 0 Marker# DSPC03 2025-05-29T15:28:57.097614Z node 71 :BS_PROXY DEBUG: group_sessions.h:165: Send to queueActorId# [71:598:2531] NKikimr::TEvBlobStorage::TEvVCollectGarbage# {TEvVCollectGarbage for [tablet:gen:cnt:channel]=[72075186224037893:1:1:1] collect=[1:0] cookie# 0 2025-05-29T15:28:57.097647Z node 71 :TABLET_MAIN INFO: tablet_sys.cpp:1009: Tablet: 72075186224037893 Active! Generation: 1, Type: Dummy started in 9msec Marker# TSYS24 2025-05-29T15:28:57.097653Z node 71 :PIPE_SERVER DEBUG: tablet_pipe_server.cpp:338: [72075186224037893] Activate 2025-05-29T15:28:57.097696Z node 71 :BS_PROXY_COLLECT DEBUG: dsproxy_collect.cpp:44: [52d29a5e326db0d3] received TEvVCollectGarbageResult# {EvVCollectGarbageResult Status# OK TabletId# 72075186224037893 RecordGeneration# 1 Channel# 0 VDisk# [80000000:1:0:0:0]} Marker# DSPC01 2025-05-29T15:28:57.097703Z node 71 :BS_PROXY_COLLECT INFO: dsproxy_collect.cpp:112: [52d29a5e326db0d3] Result# TEvCollectGarbageResult {TabletId# 72075186224037893 RecordGeneration# 1 PerGenerationCounter# 1 Channel# 0 Status# OK} Marker# DSPC02 2025-05-29T15:28:57.097786Z node 71 :LOCAL DEBUG: local.cpp:765: TLocalNodeRegistrar: Handle TEvTablet::TEvReady tablet 72075186224037893 generation 1 2025-05-29T15:28:57.097790Z node 71 :LOCAL DEBUG: local.cpp:740: TLocalNodeRegistrar: tablet (72075186224037893,0) marked as running at generation 1 2025-05-29T15:28:57.097812Z node 71 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:155: TClient[72057594037927937] send [71:52:2092] 2025-05-29T15:28:57.097815Z node 71 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:629: TClient[72057594037927937] push event to server [71:52:2092] 2025-05-29T15:28:57.097829Z node 71 :PIPE_SERVER DEBUG: tablet_pipe_server.cpp:141: [72057594037927937] HandleSend Sender# [71:51:2092] EventType# 268960257 2025-05-29T15:28:57.097839Z node 71 :BS_PROXY_COLLECT DEBUG: dsproxy_collect.cpp:44: [312c30a25b7db152] received TEvVCollectGarbageResult# {EvVCollectGarbageResult Status# OK TabletId# 72075186224037893 RecordGeneration# 1 Channel# 1 VDisk# [80000005:1:0:0:0]} Marker# DSPC01 2025-05-29T15:28:57.097842Z node 71 :BS_PROXY_COLLECT INFO: dsproxy_collect.cpp:112: [312c30a25b7db152] Result# TEvCollectGarbageResult {TabletId# 72075186224037893 RecordGeneration# 1 PerGenerationCounter# 1 Channel# 1 Status# OK} Marker# DSPC02 2025-05-29T15:28:57.097866Z node 71 :HIVE DEBUG: hive_impl.cpp:480: HIVE#72057594037927937 Handle TEvLocal::TEvTabletStatus, TabletId: 72075186224037893 2025-05-29T15:28:57.097875Z node 71 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:11} Tx{37, NKikimr::NHive::TTxUpdateTabletStatus} queued, type NKikimr::NHive::TTxUpdateTabletStatus 2025-05-29T15:28:57.097878Z node 71 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:11} Tx{37, NKikimr::NHive::TTxUpdateTabletStatus} took 4194304b of static mem, Memory{4194304 dyn 0} 2025-05-29T15:28:57.097885Z node 71 :HIVE DEBUG: tx__update_tablet_status.cpp:77: HIVE#72057594037927937 THive::TTxUpdateTabletStatus::Execute for tablet Dummy.72075186224037893.Leader.1 status 0 generation 1 follower 0 from local [71:51:2092] 2025-05-29T15:28:57.097895Z node 71 :HIVE DEBUG: tablet_info.cpp:123: HIVE#72057594037927937 Tablet(Dummy.72075186224037893.Leader.1) VolatileState: Starting -> Running (Node 71) 2025-05-29T15:28:57.097901Z node 71 :HIVE TRACE: node_info.cpp:118: HIVE#72057594037927937 Node(71, (0,1048576,0,0)->(0,0,0,0)) 2025-05-29T15:28:57.097915Z node 71 :HIVE TRACE: hive_impl.cpp:2557: HIVE#72057594037927937 UpdateTotalResources: ObjectId (72057594037927937,0): {Memory: 1048576} -> {} 2025-05-29T15:28:57.097920Z node 71 :HIVE TRACE: hive_impl.cpp:2563: HIVE#72057594037927937 UpdateTotalResources: Type Dummy: {Memory: 1048576} -> {} 2025-05-29T15:28:57.097926Z node 71 :HIVE TRACE: node_info.cpp:118: HIVE#72057594037927937 Node(71, (0,0,0,0)->(0,1048576,0,0)) 2025-05-29T15:28:57.097930Z node 71 :HIVE TRACE: hive_impl.cpp:2557: HIVE#72057594037927937 UpdateTotalResources: ObjectId (72057594037927937,0): {} -> {Memory: 1048576} 2025-05-29T15:28:57.097934Z node 71 :HIVE TRACE: hive_impl.cpp:2563: HIVE#72057594037927937 UpdateTotalResources: Type Dummy: {} -> {Memory: 1048576} 2025-05-29T15:28:57.097958Z node 71 :HIVE DEBUG: hive_impl.cpp:342: HIVE#72057594037927937 ProcessBootQueue (0) 2025-05-29T15:28:57.097962Z node 71 :HIVE TRACE: hive_impl.cpp:344: HIVE#72057594037927937 ProcessBootQueue - sending 2025-05-29T15:28:57.097969Z node 71 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:11} Tx{37, NKikimr::NHive::TTxUpdateTabletStatus} hope 1 -> done Change{25, redo 162b alter 0b annex 0, ~{ 1 } -{ }, 0 gb} 2025-05-29T15:28:57.097973Z node 71 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:11} Tx{37, NKikimr::NHive::TTxUpdateTabletStatus} release 4194304b of static, Memory{0 dyn 0} 2025-05-29T15:28:57.097994Z node 71 :HIVE TRACE: hive_impl.cpp:328: HIVE#72057594037927937 ProcessBootQueue - executing 2025-05-29T15:28:57.097996Z node 71 :HIVE DEBUG: hive_impl.cpp:361: HIVE#72057594037927937 ProcessWaitQueue (5) 2025-05-29T15:28:57.097998Z node 71 :HIVE DEBUG: hive_impl.cpp:342: HIVE#72057594037927937 ProcessBootQueue (0) 2025-05-29T15:28:57.098003Z node 71 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:12} Tx{38, NKikimr::NHive::TTxProcessBootQueue} queued, type NKikimr::NHive::TTxProcessBootQueue 2025-05-29T15:28:57.098005Z node 71 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:12} Tx{38, NKikimr::NHive::TTxProcessBootQueue} took 4194304b of static mem, Memory{4194304 dyn 0} 2025-05-29T15:28:57.098008Z node 71 :HIVE DEBUG: tx__process_boot_queue.cpp:18: HIVE#72057594037927937 THive::TTxProcessBootQueue()::Execute 2025-05-29T15:28:57.098011Z node 71 :HIVE DEBUG: hive_impl.cpp:222: HIVE#72057594037927937 Handle ProcessBootQueue (size: 0) 2025-05-29T15:28:57.098014Z node 71 :HIVE DEBUG: hive_impl.cpp:225: HIVE#72057594037927937 Handle ProcessWaitQueue (size: 5) 2025-05-29T15:28:57.098018Z node 71 :HIVE TRACE: hive_impl.cpp:238: HIVE#72057594037927937 Tablet 72075186224037891.0 has priority 4.000002048 2025-05-29T15:28:57.098023Z node 71 :HIVE DEBUG: hive_impl.cpp:1211: HIVE#72057594037927937 [FBN] Finding best node for tablet Hive.72075186224037891.Leader.0 2025-05-29T15:28:57.098028Z node 71 :HIVE TRACE: hive_impl.cpp:1212: HIVE#72057594037927937 [FBN] Tablet Hive.72075186224037891.Leader.0 family {Hive.72075186224037891.Leader.0 Booting} 2025-05-29T15:28:57.098037Z node 71 :HIVE TRACE: hive_impl.cpp:1335: HIVE#72057594037927937 [FBN] Node 71 is not allowed to run the tablet Hive.72075186224037891.Leader.0 node domains [72057594046678944:1] tablet object domain 52:42 tablet allowed domains [52:42] tablet effective allowed domains [52:42] 2025-05-29T15:28:57.098040Z node 71 :HIVE TRACE: hive_impl.cpp:1343: HIVE#72057594037927937 [FBN] Tablet Hive.72075186224037891.Leader.0 selected nodes count 0 2025-05-29T15:28:57.098043Z node 71 :HIVE TRACE: hive_impl.cpp:1375: HIVE#72057594037927937 [FBN] Tablet Hive.72075186224037891.Leader.0 no node was selected 2025-05-29T15:28:57.098050Z node 71 :HIVE DEBUG: hive_impl.cpp:302: HIVE#72057594037927937 ProcessBootQueue - BootQueue empty (WaitQueue: 5) 2025-05-29T15:28:57.098053Z node 71 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:12} Tx{38, NKikimr::NHive::TTxProcessBootQueue} hope 1 -> done Change{26, redo 0b alter 0b annex 0, ~{ } -{ }, 0 gb} 2025-05-29T15:28:57.098056Z node 71 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:12} Tx{38, NKikimr::NHive::TTxProcessBootQueue} release 4194304b of static, Memory{0 dyn 0} 2025-05-29T15:28:57.098122Z node 71 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:50: TClient[72075186224037893] ::Bootstrap [71:636:2564] 2025-05-29T15:28:57.098126Z node 71 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:533: TClient[72075186224037893] lookup [71:636:2564] 2025-05-29T15:28:57.098143Z node 71 :TABLET_RESOLVER DEBUG: tablet_resolver.cpp:484: Handle TEvForward tabletId: 72075186224037893 entry.State: StInit ev: {EvForward TabletID: 72075186224037893 Ev: nullptr Flags: 1:2:0} 2025-05-29T15:28:57.098166Z node 71 :STATESTORAGE DEBUG: statestorage_proxy.cpp:246: ProxyRequest::HandleInit ringGroup:0 ev: {EvLookup TabletID: 72075186224037893 Cookie: 0 ProxyOptions: SigNone} 2025-05-29T15:28:57.098179Z node 71 :STATESTORAGE DEBUG: statestorage_replica.cpp:183: Replica::Handle ev: {EvReplicaLookup TabletID: 72075186224037893 Cookie: 0} 2025-05-29T15:28:57.098183Z node 71 :STATESTORAGE DEBUG: statestorage_replica.cpp:183: Replica::Handle ev: {EvReplicaLookup TabletID: 72075186224037893 Cookie: 1} 2025-05-29T15:28:57.098186Z node 71 :STATESTORAGE DEBUG: statestorage_replica.cpp:183: Replica::Handle ev: {EvReplicaLookup TabletID: 72075186224037893 Cookie: 2} 2025-05-29T15:28:57.098192Z node 71 :STATESTORAGE DEBUG: statestorage_proxy.cpp:355: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 0 TabletID: 72075186224037893 CurrentLeader: [71:574:2515] CurrentLeaderTablet: [71:589:2527] CurrentGeneration: 1 CurrentStep: 0} 2025-05-29T15:28:57.098201Z node 71 :STATESTORAGE DEBUG: statestorage_proxy.cpp:355: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 0 TabletID: 72075186224037893 CurrentLeader: [71:574:2515] CurrentLeaderTablet: [71:589:2527] CurrentGeneration: 1 CurrentStep: 0} 2025-05-29T15:28:57.098213Z node 71 :TABLET_RESOLVER DEBUG: tablet_resolver.cpp:610: Handle TEvInfo tabletId: 72075186224037893 entry.State: StInitResolve success: true ev: {EvInfo Status: 0 TabletID: 72075186224037893 Cookie: 0 CurrentLeader: [71:574:2515] CurrentLeaderTablet: [71:589:2527] CurrentGeneration: 1 CurrentStep: 0 Locked: false LockedFor: 0 Signature: { Size: 2 Signature: {{[71:24343667:0] : 2}, {[71:1099535971443:0] : 5}}}} 2025-05-29T15:28:57.098217Z node 71 :TABLET_RESOLVER DEBUG: tablet_resolver.cpp:354: ApplyEntry leader tabletId: 72075186224037893 followers: 0 2025-05-29T15:28:57.098223Z node 71 :TABLET_RESOLVER DEBUG: tablet_resolver.cpp:279: SelectForward node 71 selfDC leaderDC 1:2:0 local 1 localDc 1 other 0 disallowed 0 tabletId: 72075186224037893 followers: 0 countLeader 1 allowFollowers 0 winner: [71:574:2515] 2025-05-29T15:28:57.098232Z node 71 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:190: TClient[72075186224037893] forward result local node, try to connect [71:636:2564] 2025-05-29T15:28:57.098235Z node 71 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:671: TClient[72075186224037893]::SendEvent [71:636:2564] 2025-05-29T15:28:57.098245Z node 71 :PIPE_SERVER DEBUG: tablet_pipe_server.cpp:291: [72075186224037893] Accept Connect Originator# [71:636:2564] 2025-05-29T15:28:57.098260Z node 71 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:310: TClient[72075186224037893] connected with status OK role: Leader [71:636:2564] 2025-05-29T15:28:57.098263Z node 71 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:325: TClient[72075186224037893] send queued [71:636:2564] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/actorlib_impl/ut/unittest >> TInterconnectTest::TestEventWithPayloadSerialization [GOOD] Test command err: Starting iteration 0 Starting iteration 1 Starting iteration 2 Starting iteration 3 Starting iteration 4 Starting iteration 5 Starting iteration 6 Starting iteration 7 Starting iteration 8 Starting iteration 9 Starting iteration 10 Starting iteration 11 Starting iteration 12 Starting iteration 13 Starting iteration 14 Starting iteration 15 Starting iteration 16 Starting iteration 17 Starting iteration 18 Starting iteration 19 Starting iteration 20 Starting iteration 21 Starting iteration 22 Starting iteration 23 Starting iteration 24 Starting iteration 25 Starting iteration 26 Starting iteration 27 Starting iteration 28 Starting iteration 29 Starting iteration 30 Starting iteration 31 Starting iteration 32 Starting iteration 33 Starting iteration 34 Starting iteration 35 Starting iteration 36 Starting iteration 37 Starting iteration 38 Starting iteration 39 Starting iteration 40 Starting iteration 41 Starting iteration 42 Starting iteration 43 Starting iteration 44 Starting iteration 45 Starting iteration 46 Starting iteration 47 Starting iteration 48 Starting iteration 49 Starting iteration 50 Starting iteration 51 Starting iteration 52 Starting iteration 53 Starting iteration 54 Starting iteration 55 Starting iteration 56 Starting iteration 57 Starting iteration 58 Starting iteration 59 Starting iteration 60 Starting iteration 61 Starting iteration 62 Starting iteration 63 Starting iteration 64 Starting iteration 65 Starting iteration 66 Starting iteration 67 Starting iteration 68 Starting iteration 69 Starting iteration 70 Starting iteration 71 Starting iteration 72 Starting iteration 73 Starting iteration 74 Starting iteration 75 Starting iteration 76 Starting iteration 77 Starting iteration 78 Starting iteration 79 Starting iteration 80 Starting iteration 81 Starting iteration 82 Starting iteration 83 Starting iteration 84 Starting iteration 85 Starting iteration 86 Starting iteration 87 Starting iteration 88 Starting iteration 89 Starting iteration 90 Starting iteration 91 Starting iteration 92 Starting iteration 93 Starting iteration 94 Starting iteration 95 Starting iteration 96 Starting iteration 97 Starting iteration 98 Starting iteration 99 Starting iteration 100 Starting iteration 101 Starting iteration 102 Starting iteration 103 Starting iteration 104 Starting iteration 105 Starting iteration 106 Starting iteration 107 Starting iteration 108 Starting iteration 109 Starting iteration 110 Starting iteration 111 Starting iteration 112 Starting iteration 113 Starting iteration 114 Starting iteration 115 Starting iteration 116 Starting iteration 117 Starting iteration 118 Starting iteration 119 Starting iteration 120 Starting iteration 121 Starting iteration 122 Starting iteration 123 Starting iteration 124 Starting iteration 125 Starting iteration 126 Starting iteration 127 Starting iteration 128 Starting iteration 129 Starting iteration 130 Starting iteration 131 Starting iteration 132 Starting iteration 133 Starting iteration 134 Starting iteration 135 Starting iteration 136 Starting iteration 137 Starting iteration 138 Starting iteration 139 Starting iteration 140 Starting iteration 141 Starting iteration 142 Starting iteration 143 Starting iteration 144 Starting iteration 145 Starting iteration 146 Starting iteration 147 Starting iteration 148 Starting iteration 149 Starting iteration 150 Starting iteration 151 Starting iteration 152 Starting iteration 153 Starting iteration 154 Starting iteration 155 Starting iteration 156 Starting iteration 157 Starting iteration 158 Starting iteration 159 Starting iteration 160 Starting iteration 161 Starting iteration 162 Starting iteration 163 Starting iteration 164 Starting iteration 165 Starting iteration 166 Starting iteration 167 Starting iteration 168 Starting iteration 169 Starting iteration 170 Starting iteration 171 Starting iteration 172 Starting iteration 173 Starting iteration 174 Starting iteration 175 Starting iteration 176 Starting iteration 177 Starting iteration 178 Starting iteration 179 Starting iteration 180 Starting iteration 181 Starting iteration 182 Starting iteration 183 Starting iteration 184 Starting iteration 185 Starting iteration 186 Starting iteration 187 Starting iteration 188 Starting iteration 189 Starting iteration 190 Starting iteration 191 Starting iteration 192 Starting iteration 193 Starting iteration 194 Starting iteration 195 Starting iteration 196 Starting iteration 197 Starting iteration 198 Starting iteration 199 0 0 0 1 0 3 0 7 0 15 0 31 0 63 0 127 0 255 0 511 0 1023 0 2047 0 4095 0 8191 0 16383 0 32767 0 65535 1 0 1 1 1 3 1 7 1 15 1 31 1 63 1 127 1 255 1 511 1 1023 1 2047 1 4095 1 8191 1 16383 1 32767 1 65535 3 0 3 1 3 3 3 7 3 15 3 31 3 63 3 127 3 255 3 511 3 1023 3 2047 3 4095 3 8191 3 16383 3 32767 3 65535 7 0 7 1 7 3 7 7 7 15 7 31 7 63 7 127 7 255 7 511 7 1023 7 2047 7 4095 7 8191 7 16383 7 32767 7 65535 15 0 15 1 15 3 15 7 15 15 15 31 15 63 15 127 15 255 15 511 15 1023 15 2047 15 4095 15 8191 15 16383 15 32767 15 65535 31 0 31 1 31 3 31 7 31 15 31 31 31 63 31 127 31 255 31 511 31 1023 31 2047 31 4095 31 8191 31 16383 31 32767 31 65535 63 0 63 1 63 3 63 7 63 15 63 31 63 63 63 127 63 255 63 511 63 1023 63 2047 63 4095 63 8191 63 16383 63 32767 63 65535 127 0 127 1 127 3 127 7 127 15 127 31 127 63 127 127 127 255 127 511 127 1023 127 2047 127 4095 127 8191 127 16383 127 32767 127 65535 255 0 255 1 255 3 255 7 255 15 255 31 255 63 255 127 255 255 255 511 255 1023 255 2047 255 4095 255 8191 255 16383 255 32767 255 65535 511 0 511 1 511 3 511 7 511 15 511 31 511 63 511 127 511 255 511 511 511 1023 511 2047 511 4095 511 8191 511 16383 511 32767 511 65535 1023 0 1023 1 1023 3 1023 7 1023 15 1023 31 1023 63 1023 127 1023 255 1023 511 1023 1023 1023 2047 1023 4095 1023 8191 1023 16383 1023 32767 1023 65535 2047 0 2047 1 2047 3 2047 7 2047 15 2047 31 2047 63 2047 127 2047 255 2047 511 2047 1023 2047 2047 2047 4095 2047 8191 2047 16383 2047 32767 2047 65535 4095 0 4095 1 4095 3 4095 7 4095 15 4095 31 4095 63 4095 127 4095 255 4095 511 4095 1023 4095 2047 4095 4095 4095 8191 4095 16383 4095 32767 4095 65535 8191 0 8191 1 8191 3 8191 7 8191 15 8191 31 8191 63 8191 127 8191 255 8191 511 8191 1023 8191 2047 8191 4095 8191 8191 8191 16383 8191 32767 8191 65535 16383 0 16383 1 16383 3 16383 7 16383 15 16383 31 16383 63 16383 127 16383 255 16383 511 16383 1023 16383 2047 16383 4095 16383 8191 16383 16383 16383 32767 16383 65535 32767 0 32767 1 32767 3 32767 7 32767 15 32767 31 32767 63 32767 127 32767 255 32767 511 32767 1023 32767 2047 32767 4095 32767 8191 32767 16383 32767 32767 32767 65535 65535 0 65535 1 65535 3 65535 7 65535 15 65535 31 65535 63 65535 127 65535 255 65535 511 65535 1023 65535 2047 65535 4095 65535 8191 65535 16383 65535 32767 65535 65535 |72.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_split_merge/unittest >> TSchemeShardSplitBySample::NoResultOnSampleTooSmall [GOOD] >> KqpScheme::UseDroppedTable |72.2%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/datashard/ut_init/ydb-core-tx-datashard-ut_init >> KqpScheme::AlterColumnTableTtl [GOOD] |72.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_init/ydb-core-tx-datashard-ut_init >> KqpScheme::OlapSharding_KeyOnly [GOOD] >> KqpScheme::AlterTransfer_QueryService |72.2%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/library/query_actor/ut/ydb-library-query_actor-ut |72.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/library/query_actor/ut/ydb-library-query_actor-ut |72.2%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_init/ydb-core-tx-datashard-ut_init ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/scheme/unittest >> KqpScheme::AlterColumnTableTtl [GOOD] Test command err: Trying to start YDB, gRPC: 27435, MsgBus: 5120 2025-05-29T15:29:17.704026Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509890152334153701:2066];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:29:17.704066Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/001136/r3tmp/tmpnk4nRq/pdisk_1.dat 2025-05-29T15:29:17.776446Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 27435, node 1 2025-05-29T15:29:17.798091Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:29:17.798103Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-05-29T15:29:17.798106Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-05-29T15:29:17.798169Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-05-29T15:29:17.805592Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:29:17.805637Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:29:17.806695Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:5120 TClient is connected to server localhost:5120 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-29T15:29:17.854571Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:29:17.860621Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:29:17.926513Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:29:17.947825Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:29:17.961450Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:29:18.045844Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890156629122599:2403], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:29:18.045867Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:29:18.099398Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-05-29T15:29:18.106997Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-05-29T15:29:18.116037Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-05-29T15:29:18.130654Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-05-29T15:29:18.144473Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-05-29T15:29:18.158826Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-05-29T15:29:18.173023Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-05-29T15:29:18.189077Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890156629123251:2466], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:29:18.189111Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:29:18.189110Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890156629123256:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:29:18.189872Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710669:3, at schemeshard: 72057594046644480 2025-05-29T15:29:18.192492Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7509890156629123258:2470], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710669 completed, doublechecking } 2025-05-29T15:29:18.252695Z node 1 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [1:7509890156629123309:3399] txid# 281474976710670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-29T15:29:18.323166Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [1:7509890156629123325:2474], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:29:18.323649Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=1&id=YzNjM2ZjMjQtY2YxNmEyNjItODA1ZDU0MjAtOTQ0MThlZQ==, ActorId: [1:7509890156629122596:2401], ActorState: ExecuteState, TraceId: 01jweajybc7qj8r2h9m1hshbsd, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: VERIFY failed (2025-05-29T15:29:18.324366Z): assertion failed in non-unittest thread with message: assertion failed at ydb/core/kqp/ut/common/kqp_ut_common.h:375, void NKikimr::NKqp::AssertSuccessResult(const NYdb::TStatus &): (result.IsSuccess())
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 library/cpp/testing/unittest/registar.cpp:36 RaiseError(): requirement UnittestThread failed 0. /-S/util/system/yassert.cpp:83: InternalPanicImpl @ 0x1633E055 1. /-S/util/system/yassert.cpp:55: Panic @ 0x16335056 2. /tmp//-S/library/cpp/testing/unittest/registar.cpp:36: RaiseError @ 0x164D75B6 3. /-S/ydb/core/kqp/ut/common/kqp_ut_common.h:375: AssertSuccessResult @ 0x15DE0A22 4. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:365: CreateSampleTables @ 0x289A0492 5. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:581: operator() @ 0x289C1A1C 6. /-S/library/cpp/threading/future/core/future-inl.h:493: SetValue @ 0x289C1A1C 7. /-S/library/cpp/threading/future/async.h:24: operator() @ 0x289C1A1C 8. /-S/util/thread/pool.h:71: Process @ 0x289C1A1C 9. /-S/util/thread/pool.cpp:418: DoExecute @ 0x163457A9 10. /-S/util/thread/factory.h:15: Execute @ 0x16344199 11. /-S/util/thread/factory.cpp:36: ThreadProc @ 0x16344199 12. /-S/util/system/thread.cpp:244: ThreadProxy @ 0x1633F60C 13. ??:0: ?? @ 0x7FB0C6C28AC2 14. ??:0: ?? @ 0x7FB0C6CBA84F Trying to start YDB, gRPC: 20633, MsgBus: 15343 2025-05-29T15:29:21.869102Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509890165900407931:2070];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:29:21.869130Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/001136/r3tmp/tmpMKp3p1/pdisk_1.dat TServer::EnableGrpc on GrpcPort 20633, node 1 2025-05-29T15:29:21.931072Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:29:21.937303Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:29:21.937315Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-05-29T15:29:21.937316Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 20 ... state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715663;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715663; 2025-05-29T15:29:33.311956Z node 1 :TX_COLUMNSHARD_TX WARN: log.cpp:784: tablet_id=72075186224037893;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715663;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715663; 2025-05-29T15:29:33.311958Z node 1 :TX_COLUMNSHARD_TX WARN: log.cpp:784: tablet_id=72075186224037895;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715663;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715663; 2025-05-29T15:29:33.312012Z node 1 :TX_COLUMNSHARD_TX WARN: log.cpp:784: tablet_id=72075186224037897;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715663;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715663; 2025-05-29T15:29:33.312014Z node 1 :TX_COLUMNSHARD_TX WARN: log.cpp:784: tablet_id=72075186224037892;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715663;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715663; 2025-05-29T15:29:33.312068Z node 1 :TX_COLUMNSHARD_TX WARN: log.cpp:784: tablet_id=72075186224037888;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715663;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715663; 2025-05-29T15:29:33.312075Z node 1 :TX_COLUMNSHARD_TX WARN: log.cpp:784: tablet_id=72075186224037894;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715663;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715663; 2025-05-29T15:29:33.315087Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890217992127009:2470], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:29:33.315107Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:29:33.317983Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-05-29T15:29:33.325934Z node 1 :TX_COLUMNSHARD_TX WARN: log.cpp:784: tablet_id=72075186224037889;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715664;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715664; 2025-05-29T15:29:33.325935Z node 1 :TX_COLUMNSHARD_TX WARN: log.cpp:784: tablet_id=72075186224037890;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715664;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715664; 2025-05-29T15:29:33.326004Z node 1 :TX_COLUMNSHARD_TX WARN: log.cpp:784: tablet_id=72075186224037892;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715664;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715664; 2025-05-29T15:29:33.326015Z node 1 :TX_COLUMNSHARD_TX WARN: log.cpp:784: tablet_id=72075186224037891;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715664;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715664; 2025-05-29T15:29:33.326070Z node 1 :TX_COLUMNSHARD_TX WARN: log.cpp:784: tablet_id=72075186224037894;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715664;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715664; 2025-05-29T15:29:33.326093Z node 1 :TX_COLUMNSHARD_TX WARN: log.cpp:784: tablet_id=72075186224037893;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715664;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715664; 2025-05-29T15:29:33.326154Z node 1 :TX_COLUMNSHARD_TX WARN: log.cpp:784: tablet_id=72075186224037896;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715664;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715664; 2025-05-29T15:29:33.326173Z node 1 :TX_COLUMNSHARD_TX WARN: log.cpp:784: tablet_id=72075186224037888;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715664;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715664; 2025-05-29T15:29:33.326215Z node 1 :TX_COLUMNSHARD_TX WARN: log.cpp:784: tablet_id=72075186224037895;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715664;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715664; 2025-05-29T15:29:33.326226Z node 1 :TX_COLUMNSHARD_TX WARN: log.cpp:784: tablet_id=72075186224037897;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715664;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715664; 2025-05-29T15:29:33.329268Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890217992127076:2476], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:29:33.329286Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:29:33.331753Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpDropTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-05-29T15:29:33.339825Z node 1 :TX_COLUMNSHARD_TX WARN: log.cpp:784: tablet_id=72075186224037888;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715665;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715665; 2025-05-29T15:29:33.339916Z node 1 :TX_COLUMNSHARD_TX WARN: log.cpp:784: tablet_id=72075186224037896;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715665;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715665; 2025-05-29T15:29:33.339984Z node 1 :TX_COLUMNSHARD_TX WARN: log.cpp:784: tablet_id=72075186224037893;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715665;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715665; 2025-05-29T15:29:33.340054Z node 1 :TX_COLUMNSHARD_TX WARN: log.cpp:784: tablet_id=72075186224037895;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715665;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715665; 2025-05-29T15:29:33.340278Z node 1 :TX_COLUMNSHARD_TX WARN: log.cpp:784: tablet_id=72075186224037897;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715665;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715665; 2025-05-29T15:29:33.340353Z node 1 :TX_COLUMNSHARD_TX WARN: log.cpp:784: tablet_id=72075186224037890;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715665;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715665; 2025-05-29T15:29:33.340370Z node 1 :TX_COLUMNSHARD_TX WARN: log.cpp:784: tablet_id=72075186224037889;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715665;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715665; 2025-05-29T15:29:33.340419Z node 1 :TX_COLUMNSHARD_TX WARN: log.cpp:784: tablet_id=72075186224037894;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715665;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715665; 2025-05-29T15:29:33.340433Z node 1 :TX_COLUMNSHARD_TX WARN: log.cpp:784: tablet_id=72075186224037892;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715665;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715665; 2025-05-29T15:29:33.340494Z node 1 :TX_COLUMNSHARD_TX WARN: log.cpp:784: tablet_id=72075186224037891;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715665;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715665; 2025-05-29T15:29:33.343985Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037889;self_id=[1:7509890217992126284:2336];ev=NKikimr::TEvTablet::TEvTabletDead;fline=columnshard_impl.cpp:1152;event=tablet_die; 2025-05-29T15:29:33.344429Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037890;self_id=[1:7509890217992126281:2333];ev=NKikimr::TEvTablet::TEvTabletDead;fline=columnshard_impl.cpp:1152;event=tablet_die; 2025-05-29T15:29:33.344835Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037894;self_id=[1:7509890217992126282:2334];ev=NKikimr::TEvTablet::TEvTabletDead;fline=columnshard_impl.cpp:1152;event=tablet_die; 2025-05-29T15:29:33.344868Z node 1 :HIVE WARN: hive_impl.cpp:491: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037889 not found 2025-05-29T15:29:33.344872Z node 1 :HIVE WARN: hive_impl.cpp:491: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037894 not found 2025-05-29T15:29:33.344874Z node 1 :HIVE WARN: hive_impl.cpp:491: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037891 not found 2025-05-29T15:29:33.344876Z node 1 :HIVE WARN: hive_impl.cpp:491: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037897 not found 2025-05-29T15:29:33.344878Z node 1 :HIVE WARN: hive_impl.cpp:491: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037888 not found 2025-05-29T15:29:33.344881Z node 1 :HIVE WARN: hive_impl.cpp:491: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037896 not found 2025-05-29T15:29:33.344883Z node 1 :HIVE WARN: hive_impl.cpp:491: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037893 not found 2025-05-29T15:29:33.345313Z node 1 :HIVE WARN: hive_impl.cpp:491: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037890 not found 2025-05-29T15:29:33.345316Z node 1 :HIVE WARN: hive_impl.cpp:491: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037895 not found 2025-05-29T15:29:33.345318Z node 1 :HIVE WARN: hive_impl.cpp:491: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037892 not found 2025-05-29T15:29:33.345351Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037895;self_id=[1:7509890217992126287:2339];ev=NKikimr::TEvTablet::TEvTabletDead;fline=columnshard_impl.cpp:1152;event=tablet_die; 2025-05-29T15:29:33.345530Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037891;self_id=[1:7509890217992126286:2338];ev=NKikimr::TEvTablet::TEvTabletDead;fline=columnshard_impl.cpp:1152;event=tablet_die; 2025-05-29T15:29:33.346267Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037897;self_id=[1:7509890217992126296:2340];ev=NKikimr::TEvTablet::TEvTabletDead;fline=columnshard_impl.cpp:1152;event=tablet_die; 2025-05-29T15:29:33.346641Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037892;self_id=[1:7509890217992126280:2332];ev=NKikimr::TEvTablet::TEvTabletDead;fline=columnshard_impl.cpp:1152;event=tablet_die; 2025-05-29T15:29:33.346932Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;self_id=[1:7509890217992126285:2337];ev=NKikimr::TEvTablet::TEvTabletDead;fline=columnshard_impl.cpp:1152;event=tablet_die; 2025-05-29T15:29:33.347183Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037896;self_id=[1:7509890217992126352:2341];ev=NKikimr::TEvTablet::TEvTabletDead;fline=columnshard_impl.cpp:1152;event=tablet_die; 2025-05-29T15:29:33.347678Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037893;self_id=[1:7509890217992126283:2335];ev=NKikimr::TEvTablet::TEvTabletDead;fline=columnshard_impl.cpp:1152;event=tablet_die; |72.2%| [LD] {RESULT} $(B)/ydb/library/query_actor/ut/ydb-library-query_actor-ut |72.2%| [TA] $(B)/ydb/core/actorlib_impl/ut/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/scheme/unittest >> KqpScheme::OlapSharding_KeyOnly [GOOD] Test command err: Trying to start YDB, gRPC: 28660, MsgBus: 3872 2025-05-29T15:29:14.181105Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509890138429202020:2068];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:29:14.181123Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/00114d/r3tmp/tmpTkFUiz/pdisk_1.dat 2025-05-29T15:29:14.234139Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 28660, node 1 2025-05-29T15:29:14.254836Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:29:14.254849Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-05-29T15:29:14.254851Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-05-29T15:29:14.254894Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:3872 2025-05-29T15:29:14.282546Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:29:14.282567Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:29:14.283669Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:3872 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-29T15:29:14.311235Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:29:14.319976Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:29:14.335275Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:29:14.358051Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:29:14.368744Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:29:14.575556Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890138429203615:2403], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:29:14.575586Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:29:14.635029Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-05-29T15:29:14.644073Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-05-29T15:29:14.699805Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-05-29T15:29:14.756306Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-05-29T15:29:14.763152Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-05-29T15:29:14.778578Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-05-29T15:29:14.792279Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-05-29T15:29:14.811783Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890138429204272:2466], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:29:14.811828Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:29:14.811950Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890138429204277:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:29:14.812871Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715669:3, at schemeshard: 72057594046644480 2025-05-29T15:29:14.820160Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7509890138429204279:2470], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715669 completed, doublechecking } 2025-05-29T15:29:14.910665Z node 1 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [1:7509890138429204330:3396] txid# 281474976715670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-29T15:29:15.033636Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [1:7509890138429204346:2474], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:29:15.033772Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=1&id=NzI4YmM5ZmQtMzVmYjVlZTMtNjNlNzg0NWItOTQwOWQxOGI=, ActorId: [1:7509890138429203597:2401], ActorState: ExecuteState, TraceId: 01jweajv1vc99d8k8bvqf74g6f, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: VERIFY failed (2025-05-29T15:29:15.034565Z): assertion failed in non-unittest thread with message: assertion failed at ydb/core/kqp/ut/common/kqp_ut_common.h:375, void NKikimr::NKqp::AssertSuccessResult(const NYdb::TStatus &): (result.IsSuccess())
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 library/cpp/testing/unittest/registar.cpp:36 RaiseError(): requirement UnittestThread failed 0. /-S/util/system/yassert.cpp:83: InternalPanicImpl @ 0x1633E055 1. /-S/util/system/yassert.cpp:55: Panic @ 0x16335056 2. /tmp//-S/library/cpp/testing/unittest/registar.cpp:36: RaiseError @ 0x164D75B6 3. /-S/ydb/core/kqp/ut/common/kqp_ut_common.h:375: AssertSuccessResult @ 0x15DE0A22 4. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:365: CreateSampleTables @ 0x289A0492 5. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:581: operator() @ 0x289C1A1C 6. /-S/library/cpp/threading/future/core/future-inl.h:493: SetValue @ 0x289C1A1C 7. /-S/library/cpp/threading/future/async.h:24: operator() @ 0x289C1A1C 8. /-S/util/thread/pool.h:71: Process @ 0x289C1A1C 9. /-S/util/thread/pool.cpp:418: DoExecute @ 0x163457A9 10. /-S/util/thread/factory.h:15: Execute @ 0x16344199 11. /-S/util/thread/factory.cpp:36: ThreadProc @ 0x16344199 12. /-S/util/system/thread.cpp:244: ThreadProxy @ 0x1633F60C 13. ??:0: ?? @ 0x7F01F49FDAC2 14. ??:0: ?? @ 0x7F01F4A8F84F Trying to start YDB, gRPC: 28912, MsgBus: 15968 2025-05-29T15:29:18.542506Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509890155794562454:2070];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:29:18.542529Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/00114d/r3tmp/tmp0KiAyI/pdisk_1.dat 2025-05-29T15:29:18.597213Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 28912, node 1 2025-05-29T15:29:18.617697Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:29:18.617709Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-05-29T15:29:18.617710Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 20 ... d: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:29:28.170706Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:29:28.170803Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890195784244923:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:29:28.171716Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715669:3, at schemeshard: 72057594046644480 2025-05-29T15:29:28.174800Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7509890195784244925:2470], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715669 completed, doublechecking } 2025-05-29T15:29:28.248861Z node 1 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [1:7509890195784244976:3397] txid# 281474976715670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-29T15:29:28.335703Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [1:7509890195784244992:2474], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:29:28.335809Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=1&id=ZDJiZWZlYTQtMmRiMzVjYjMtZDFiMjliLTk5NDU4NGYz, ActorId: [1:7509890195784244246:2401], ActorState: ExecuteState, TraceId: 01jweak83acb2z2s61m5100m98, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: VERIFY failed (2025-05-29T15:29:28.336453Z): assertion failed in non-unittest thread with message: assertion failed at ydb/core/kqp/ut/common/kqp_ut_common.h:375, void NKikimr::NKqp::AssertSuccessResult(const NYdb::TStatus &): (result.IsSuccess())
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 library/cpp/testing/unittest/registar.cpp:36 RaiseError(): requirement UnittestThread failed 0. /-S/util/system/yassert.cpp:83: InternalPanicImpl @ 0x1633E055 1. /-S/util/system/yassert.cpp:55: Panic @ 0x16335056 2. /tmp//-S/library/cpp/testing/unittest/registar.cpp:36: RaiseError @ 0x164D75B6 3. /-S/ydb/core/kqp/ut/common/kqp_ut_common.h:375: AssertSuccessResult @ 0x15DE0A22 4. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:365: CreateSampleTables @ 0x289A0492 5. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:581: operator() @ 0x289C1A1C 6. /-S/library/cpp/threading/future/core/future-inl.h:493: SetValue @ 0x289C1A1C 7. /-S/library/cpp/threading/future/async.h:24: operator() @ 0x289C1A1C 8. /-S/util/thread/pool.h:71: Process @ 0x289C1A1C 9. /-S/util/thread/pool.cpp:418: DoExecute @ 0x163457A9 10. /-S/util/thread/factory.h:15: Execute @ 0x16344199 11. /-S/util/thread/factory.cpp:36: ThreadProc @ 0x16344199 12. /-S/util/system/thread.cpp:244: ThreadProxy @ 0x1633F60C 13. ??:0: ?? @ 0x7F6BA31A5AC2 14. ??:0: ?? @ 0x7F6BA323784F Trying to start YDB, gRPC: 1716, MsgBus: 13901 2025-05-29T15:29:32.295228Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509890213185133591:2068];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:29:32.295253Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/00114d/r3tmp/tmpyBbZ4Y/pdisk_1.dat TServer::EnableGrpc on GrpcPort 1716, node 1 2025-05-29T15:29:32.357721Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:29:32.365431Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:29:32.365445Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-05-29T15:29:32.365447Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-05-29T15:29:32.365500Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:13901 2025-05-29T15:29:32.396845Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:29:32.396867Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:29:32.397981Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:13901 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-29T15:29:32.424133Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:29:32.655205Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890213185134214:2328], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:29:32.655232Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } Trying to start YDB, gRPC: 13693, MsgBus: 18534 2025-05-29T15:29:33.048959Z node 2 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7509890218712493003:2150];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/00114d/r3tmp/tmp4iqbHe/pdisk_1.dat 2025-05-29T15:29:33.058887Z node 2 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-05-29T15:29:33.073558Z node 2 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 13693, node 2 2025-05-29T15:29:33.098237Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:29:33.098249Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-05-29T15:29:33.098251Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-05-29T15:29:33.098306Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:18534 2025-05-29T15:29:33.156026Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:29:33.156068Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:29:33.157263Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:18534 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-29T15:29:33.172164Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:29:33.174982Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-05-29T15:29:33.501961Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7509890218712493535:2328], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:29:33.501992Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:29:33.506862Z node 2 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [2:7509890218712493555:2295] txid# 281474976715658, issues: { message: "sharding column name have to been primary key column: Value1" severity: 1 } |72.2%| [TA] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_compaction/test-results/unittest/{meta.json ... results_accumulator.log} |72.3%| [TA] {RESULT} $(B)/ydb/core/actorlib_impl/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> KqpScheme::CreateAndDropGroup >> TExtSubDomainTest::DeclareAndLs >> TExtSubDomainTest::DeclareAndDefineWithNodes-AlterDatabaseCreateHiveFirst-false >> KqpScheme::DoubleCreateExternalTable >> KqpScheme::CreateTableWithPartitionAtKeysUuid >> KqpConstraints::SerialTypeSerial4 >> TExtSubDomainTest::DeclareAndLs [GOOD] >> KqpScheme::AlterSequence >> TExtSubDomainTest::CreateTableInsideAndAlterDomainAndTable-AlterDatabaseCreateHiveFirst-false >> KqpScheme::CreateAsyncReplication >> KqpConstraints::SerialTypeSerial8 |72.3%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/datashard/ut_compaction/ydb-core-tx-datashard-ut_compaction |72.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_compaction/ydb-core-tx-datashard-ut_compaction |72.3%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_compaction/ydb-core-tx-datashard-ut_compaction ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/tx_proxy/ut_ext_tenant/unittest >> TExtSubDomainTest::DeclareAndLs [GOOD] Test command err: 2025-05-29T15:29:34.542111Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509890222239908474:2202];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:29:34.543058Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/002437/r3tmp/tmp7XBN44/pdisk_1.dat 2025-05-29T15:29:34.622879Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded TClient is connected to server localhost:8437 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 2025-05-29T15:29:34.652175Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:403: actor# [1:7509890222239908562:2101] Handle TEvNavigate describe path dc-1 2025-05-29T15:29:34.653969Z node 1 :TX_PROXY DEBUG: describe.cpp:272: Actor# [1:7509890222239908842:2251] HANDLE EvNavigateScheme dc-1 2025-05-29T15:29:34.654019Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2702: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [1:7509890222239908589:2115], request# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-05-29T15:29:34.654030Z node 1 :TX_PROXY_SCHEME_CACHE TRACE: cache.cpp:2322: Create subscriber: self# [1:7509890222239908589:2115], path# /dc-1, domainOwnerId# 72057594046644480 2025-05-29T15:29:34.654078Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:960: [main][1:7509890222239908843:2252][/dc-1] Handle NKikimr::TEvStateStorage::TEvResolveReplicasList 2025-05-29T15:29:34.654471Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1061: [1:7509890222239908271:2049] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1 DomainOwnerId: 72057594046644480 }: sender# [1:7509890222239908847:2252] 2025-05-29T15:29:34.654489Z node 1 :SCHEME_BOARD_REPLICA INFO: replica.cpp:646: [1:7509890222239908271:2049] Subscribe: subscriber# [1:7509890222239908847:2252], path# /dc-1, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2025-05-29T15:29:34.654513Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1061: [1:7509890222239908274:2052] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1 DomainOwnerId: 72057594046644480 }: sender# [1:7509890222239908848:2252] 2025-05-29T15:29:34.654516Z node 1 :SCHEME_BOARD_REPLICA INFO: replica.cpp:646: [1:7509890222239908274:2052] Subscribe: subscriber# [1:7509890222239908848:2252], path# /dc-1, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2025-05-29T15:29:34.654521Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1061: [1:7509890222239908277:2055] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1 DomainOwnerId: 72057594046644480 }: sender# [1:7509890222239908849:2252] 2025-05-29T15:29:34.654524Z node 1 :SCHEME_BOARD_REPLICA INFO: replica.cpp:646: [1:7509890222239908277:2055] Subscribe: subscriber# [1:7509890222239908849:2252], path# /dc-1, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2025-05-29T15:29:34.654534Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:365: [replica][1:7509890222239908847:2252][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7509890222239908271:2049] 2025-05-29T15:29:34.654538Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:365: [replica][1:7509890222239908848:2252][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7509890222239908274:2052] 2025-05-29T15:29:34.654542Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:365: [replica][1:7509890222239908849:2252][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7509890222239908277:2055] 2025-05-29T15:29:34.654556Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:807: [main][1:7509890222239908843:2252][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7509890222239908844:2252] 2025-05-29T15:29:34.654561Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:807: [main][1:7509890222239908843:2252][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7509890222239908845:2252] 2025-05-29T15:29:34.654570Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: subscriber.cpp:836: [main][1:7509890222239908843:2252][/dc-1] Set up state: owner# [1:7509890222239908589:2115], state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 2) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements } 2025-05-29T15:29:34.654607Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:807: [main][1:7509890222239908843:2252][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7509890222239908846:2252] 2025-05-29T15:29:34.654614Z node 1 :SCHEME_BOARD_SUBSCRIBER INFO: subscriber.cpp:854: [main][1:7509890222239908843:2252][/dc-1] Path was already updated: owner# [1:7509890222239908589:2115], state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 2) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 2) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements } 2025-05-29T15:29:34.654621Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:381: [replica][1:7509890222239908847:2252][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7509890222239908844:2252], cookie# 1 2025-05-29T15:29:34.654624Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:381: [replica][1:7509890222239908848:2252][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7509890222239908845:2252], cookie# 1 2025-05-29T15:29:34.654627Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:381: [replica][1:7509890222239908849:2252][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7509890222239908846:2252], cookie# 1 2025-05-29T15:29:34.654631Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1089: [1:7509890222239908271:2049] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 2 }: sender# [1:7509890222239908847:2252] 2025-05-29T15:29:34.654635Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1128: [1:7509890222239908271:2049] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7509890222239908847:2252], cookie# 1 2025-05-29T15:29:34.654639Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1089: [1:7509890222239908274:2052] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 2 }: sender# [1:7509890222239908848:2252] 2025-05-29T15:29:34.654641Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1128: [1:7509890222239908274:2052] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7509890222239908848:2252], cookie# 1 2025-05-29T15:29:34.654644Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1089: [1:7509890222239908277:2055] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 2 }: sender# [1:7509890222239908849:2252] 2025-05-29T15:29:34.654646Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1128: [1:7509890222239908277:2055] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7509890222239908849:2252], cookie# 1 2025-05-29T15:29:34.656691Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:390: [replica][1:7509890222239908847:2252][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7509890222239908271:2049], cookie# 1 2025-05-29T15:29:34.656716Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:390: [replica][1:7509890222239908848:2252][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7509890222239908274:2052], cookie# 1 2025-05-29T15:29:34.656722Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:390: [replica][1:7509890222239908849:2252][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7509890222239908277:2055], cookie# 1 2025-05-29T15:29:34.656731Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:892: [main][1:7509890222239908843:2252][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7509890222239908844:2252], cookie# 1 2025-05-29T15:29:34.656738Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:932: [main][1:7509890222239908843:2252][/dc-1] Sync is in progress: cookie# 1, size# 3, half# 1, successes# 1, faulires# 0 2025-05-29T15:29:34.656743Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:892: [main][1:7509890222239908843:2252][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7509890222239908845:2252], cookie# 1 2025-05-29T15:29:34.656748Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:946: [main][1:7509890222239908843:2252][/dc-1] Sync is done: cookie# 1, size# 3, half# 1, successes# 2, faulires# 0, partial# 0 2025-05-29T15:29:34.656753Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:892: [main][1:7509890222239908843:2252][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7509890222239908846:2252], cookie# 1 2025-05-29T15:29:34.656756Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:906: [main][1:7509890222239908843:2252][/dc-1] Unexpected sync response: sender# [1:7509890222239908846:2252], cookie# 1 2025-05-29T15:29:34.665513Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2589: HandleNotify: self# [1:7509890222239908589:2115], notify# NKikimr::TSchemeBoardEvents::TEvNotifyUpdate { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DescribeSchemeResult: Status: StatusSuccess Path: "/dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/dc-1" } } } PathId: 1 PathOwnerId: 72057594046644480 } 2025-05-29T15:29:34.665631Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2464: ResolveCacheItem: self# [1:7509890222239908589:2115], notify# NKikimr::TSchemeBoardEvents::TEvNotifyUpdate { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DescribeSchemeResult: Status: StatusSuccess Path: "/dc-1" PathDescription { Self { Name: " ... 057594046644480, LocalPathId: 2] DomainId: [OwnerId: 72057594046644480, LocalPathId: 2] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, by pathId# nullptr 2025-05-29T15:29:34.710136Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:1842: FillEntry for TNavigate: self# [1:7509890222239908589:2115], cacheItem# { Subscriber: { Subscriber: [1:7509890222239908904:2297] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 1 } Filled: 1 Status: StatusSuccess Kind: 9 TableKind: 0 Created: 1 CreateStep: 1748532574754 PathId: [OwnerId: 72057594046644480, LocalPathId: 2] DomainId: [OwnerId: 72057594046644480, LocalPathId: 2] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/USER_0 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 1 IsSync: true Partial: 0 } TClient::Ls response: Status: 128 StatusCode: ERROR Issues { message: "Default error" severity: 1 } SchemeStatus: 13 ErrorReason: "Could not resolve redirected path" TClient::Ls request: /dc-1 2025-05-29T15:29:34.710194Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:265: Send result: self# [1:7509890222239908911:2298], recipient# [1:7509890222239908903:2296], result# { ErrorCount: 1 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_0 TableId: [72057594046644480:2:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: RedirectLookupError Kind: KindExtSubdomain DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 2] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 2] Params { Version: 1 PlanResolution: 0 TimeCastBucketsPerMediator: 0 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } }] } 2025-05-29T15:29:34.710205Z node 1 :TX_PROXY INFO: describe.cpp:356: Actor# [1:7509890222239908903:2296] HANDLE EvNavigateKeySetResult TDescribeReq marker# P5 ErrorCount# 1 2025-05-29T15:29:34.710992Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:403: actor# [1:7509890222239908562:2101] Handle TEvNavigate describe path /dc-1 2025-05-29T15:29:34.711873Z node 1 :TX_PROXY DEBUG: describe.cpp:272: Actor# [1:7509890222239908913:2300] HANDLE EvNavigateScheme /dc-1 2025-05-29T15:29:34.711911Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2702: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [1:7509890222239908589:2115], request# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-05-29T15:29:34.711925Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:871: [main][1:7509890222239908843:2252][/dc-1] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvSyncRequest: sender# [1:7509890222239908589:2115], cookie# 4 2025-05-29T15:29:34.711933Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:381: [replica][1:7509890222239908847:2252][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7509890222239908844:2252], cookie# 4 2025-05-29T15:29:34.711936Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:381: [replica][1:7509890222239908848:2252][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7509890222239908845:2252], cookie# 4 2025-05-29T15:29:34.711938Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:381: [replica][1:7509890222239908849:2252][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7509890222239908846:2252], cookie# 4 2025-05-29T15:29:34.711949Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1128: [1:7509890222239908271:2049] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7509890222239908847:2252], cookie# 4 2025-05-29T15:29:34.711954Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1128: [1:7509890222239908274:2052] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7509890222239908848:2252], cookie# 4 2025-05-29T15:29:34.711958Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1128: [1:7509890222239908277:2055] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7509890222239908849:2252], cookie# 4 2025-05-29T15:29:34.711970Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:390: [replica][1:7509890222239908847:2252][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 5 Partial: 0 }: sender# [1:7509890222239908271:2049], cookie# 4 2025-05-29T15:29:34.711973Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:390: [replica][1:7509890222239908848:2252][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 5 Partial: 0 }: sender# [1:7509890222239908274:2052], cookie# 4 2025-05-29T15:29:34.711974Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:390: [replica][1:7509890222239908849:2252][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 5 Partial: 0 }: sender# [1:7509890222239908277:2055], cookie# 4 2025-05-29T15:29:34.711977Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:892: [main][1:7509890222239908843:2252][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 5 Partial: 0 }: sender# [1:7509890222239908844:2252], cookie# 4 2025-05-29T15:29:34.711981Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:932: [main][1:7509890222239908843:2252][/dc-1] Sync is in progress: cookie# 4, size# 3, half# 1, successes# 1, faulires# 0 2025-05-29T15:29:34.711983Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:892: [main][1:7509890222239908843:2252][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 5 Partial: 0 }: sender# [1:7509890222239908845:2252], cookie# 4 2025-05-29T15:29:34.711985Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:946: [main][1:7509890222239908843:2252][/dc-1] Sync is done: cookie# 4, size# 3, half# 1, successes# 2, faulires# 0, partial# 0 2025-05-29T15:29:34.711987Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:892: [main][1:7509890222239908843:2252][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 5 Partial: 0 }: sender# [1:7509890222239908846:2252], cookie# 4 2025-05-29T15:29:34.711988Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:906: [main][1:7509890222239908843:2252][/dc-1] Unexpected sync response: sender# [1:7509890222239908846:2252], cookie# 4 2025-05-29T15:29:34.712009Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2589: HandleNotify: self# [1:7509890222239908589:2115], notify# NKikimr::NSchemeBoard::NInternalEvents::TEvSyncResponse { Path: /dc-1 PathId: Partial: 0 } 2025-05-29T15:29:34.712018Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2464: ResolveCacheItem: self# [1:7509890222239908589:2115], notify# NKikimr::NSchemeBoard::NInternalEvents::TEvSyncResponse { Path: /dc-1 PathId: Partial: 0 }, by path# { Subscriber: { Subscriber: [1:7509890222239908843:2252] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 4 } Filled: 1 Status: StatusSuccess Kind: 2 TableKind: 0 Created: 1 CreateStep: 1748532574747 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, by pathId# nullptr 2025-05-29T15:29:34.712026Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:1842: FillEntry for TNavigate: self# [1:7509890222239908589:2115], cacheItem# { Subscriber: { Subscriber: [1:7509890222239908843:2252] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 4 } Filled: 1 Status: StatusSuccess Kind: 2 TableKind: 0 Created: 1 CreateStep: 1748532574747 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 4 IsSync: true Partial: 0 } 2025-05-29T15:29:34.712052Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:265: Send result: self# [1:7509890222239908914:2301], recipient# [1:7509890222239908913:2300], result# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [72057594046644480:1:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } }] } 2025-05-29T15:29:34.712057Z node 1 :TX_PROXY DEBUG: describe.cpp:356: Actor# [1:7509890222239908913:2300] HANDLE EvNavigateKeySetResult TDescribeReq marker# P5 ErrorCount# 0 2025-05-29T15:29:34.712068Z node 1 :TX_PROXY DEBUG: describe.cpp:435: Actor# [1:7509890222239908913:2300] SEND to# 72057594046644480 shardToRequest NKikimrSchemeOp.TDescribePath Path: "/dc-1" Options { ShowPrivateTable: true } 2025-05-29T15:29:34.712167Z node 1 :TX_PROXY DEBUG: describe.cpp:448: Actor# [1:7509890222239908913:2300] Handle TEvDescribeSchemeResult Forward to# [1:7509890222239908912:2299] Cookie: 0 TEvDescribeSchemeResult: NKikimrScheme.TEvDescribeSchemeResult PreSerializedData size# 63 Record# Status: StatusSuccess Path: "/dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 1748532574747 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 3 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } StoragePools { Name: "/dc-1:test" Kind: "test" } PathsInside: 1 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/dc-1" } } } PathId: 1 PathOwnerId: 72057594046644480 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 1748532574747 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 3 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: "USER_0" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeExtSubDomain CreateFinished: true CreateTxId: 281474976715658 CreateStep: 1748532574754 ParentPathId: 1 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" } Children { Name: ".sys" PathId: 18446744073709551615 ... (TRUNCATED) >> KqpScheme::CreateTableWithVectorIndexCovered >> TExtSubDomainTest::DeclareAndDefineWithNodes-AlterDatabaseCreateHiveFirst-false [GOOD] >> TExtSubDomainTest::DeclareAndDefineWithNodes-AlterDatabaseCreateHiveFirst-true >> KqpScheme::CreateTableWithUniformPartitionsCompat |72.3%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/grpc_services/tablet/ut/ydb-core-grpc_services-tablet-ut |72.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/grpc_services/tablet/ut/ydb-core-grpc_services-tablet-ut |72.3%| [LD] {RESULT} $(B)/ydb/core/grpc_services/tablet/ut/ydb-core-grpc_services-tablet-ut >> EncryptedBackupParamsValidationTest::NoSourcePrefix >> KqpAcl::AclRevoke-UseSink+IsOlap >> KqpScheme::AlterSequence [GOOD] >> KqpScheme::AlterSequenceRestartWith >> BackupRestore::TestAllIndexTypes-EIndexTypeInvalid [GOOD] >> BackupRestore::TestAllIndexTypes-EIndexTypeGlobalUnique [GOOD] >> BackupRestore::TestAllIndexTypes-EIndexTypeGlobalVectorKmeansTree |72.3%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/services/rate_limiter/ut/ydb-services-rate_limiter-ut |72.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/services/rate_limiter/ut/ydb-services-rate_limiter-ut |72.3%| [LD] {RESULT} $(B)/ydb/services/rate_limiter/ut/ydb-services-rate_limiter-ut |72.3%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/datashard/ut_column_stats/ydb-core-tx-datashard-ut_column_stats |72.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_column_stats/ydb-core-tx-datashard-ut_column_stats |72.3%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_column_stats/ydb-core-tx-datashard-ut_column_stats ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/scheme/unittest >> KqpScheme::AlterTableAddUniqIndexPublicApiFeatureOff Test command err: Trying to start YDB, gRPC: 15700, MsgBus: 19426 2025-05-29T15:29:07.244262Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509890107289117440:2072];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:29:07.244286Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/00116f/r3tmp/tmpx43eJQ/pdisk_1.dat 2025-05-29T15:29:07.321446Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 15700, node 1 2025-05-29T15:29:07.334447Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:29:07.334467Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-05-29T15:29:07.334469Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-05-29T15:29:07.334525Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-05-29T15:29:07.345243Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:29:07.345273Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:29:07.346301Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:19426 TClient is connected to server localhost:19426 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-29T15:29:07.398322Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:29:07.401184Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-05-29T15:29:07.405726Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-05-29T15:29:07.424595Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 2025-05-29T15:29:07.447586Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:29:07.461374Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:29:07.631308Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890107289119027:2403], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:29:07.631349Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:29:07.690187Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-05-29T15:29:07.699264Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-05-29T15:29:07.758517Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-05-29T15:29:07.781944Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-05-29T15:29:07.806028Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-05-29T15:29:07.873855Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-05-29T15:29:07.893883Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-05-29T15:29:07.982720Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890107289119688:2466], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:29:07.982772Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:29:07.982888Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890107289119693:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:29:07.983936Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715669:3, at schemeshard: 72057594046644480 2025-05-29T15:29:07.987491Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7509890107289119695:2470], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715669 completed, doublechecking } 2025-05-29T15:29:08.081626Z node 1 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [1:7509890111584087042:3397] txid# 281474976715670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-29T15:29:08.201661Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [1:7509890111584087058:2474], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:29:08.202146Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=1&id=ZjE1YTIwZC1mMTE3OWYwMy03YTk1MjZhZC1iZjNkNThlMA==, ActorId: [1:7509890107289119009:2401], ActorState: ExecuteState, TraceId: 01jweajmcebxzvp6dhqcj01cy7, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: VERIFY failed (2025-05-29T15:29:08.202930Z): assertion failed in non-unittest thread with message: assertion failed at ydb/core/kqp/ut/common/kqp_ut_common.h:375, void NKikimr::NKqp::AssertSuccessResult(const NYdb::TStatus &): (result.IsSuccess())
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 library/cpp/testing/unittest/registar.cpp:36 RaiseError(): requirement UnittestThread failed 0. /-S/util/system/yassert.cpp:83: InternalPanicImpl @ 0x1633E055 1. /-S/util/system/yassert.cpp:55: Panic @ 0x16335056 2. /tmp//-S/library/cpp/testing/unittest/registar.cpp:36: RaiseError @ 0x164D75B6 3. /-S/ydb/core/kqp/ut/common/kqp_ut_common.h:375: AssertSuccessResult @ 0x15DE0A22 4. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:365: CreateSampleTables @ 0x289A0492 5. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:581: operator() @ 0x289C1A1C 6. /-S/library/cpp/threading/future/core/future-inl.h:493: SetValue @ 0x289C1A1C 7. /-S/library/cpp/threading/future/async.h:24: operator() @ 0x289C1A1C 8. /-S/util/thread/pool.h:71: Process @ 0x289C1A1C 9. /-S/util/thread/pool.cpp:418: DoExecute @ 0x163457A9 10. /-S/util/thread/factory.h:15: Execute @ 0x16344199 11. /-S/util/thread/factory.cpp:36: ThreadProc @ 0x16344199 12. /-S/util/system/thread.cpp:244: ThreadProxy @ 0x1633F60C 13. ??:0: ?? @ 0x7FEB34373AC2 14. ??:0: ?? @ 0x7FEB3440584F Trying to start YDB, gRPC: 31589, MsgBus: 8563 2025-05-29T15:29:12.095092Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509890130426641600:2066];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:29:12.095114Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/00116f/r3tmp/tmpBj6xRO/pdisk_1.dat 2025-05-29T15:29:12.152704Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 31589, node 1 2025-05-29T15:29:12.174983Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:29:12.174996Z node 1 :NET_CLASSIFIER WARN: net_classi ... :375: AssertSuccessResult @ 0x15DE0A22 4. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:365: CreateSampleTables @ 0x289A0492 5. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:581: operator() @ 0x289C1A1C 6. /-S/library/cpp/threading/future/core/future-inl.h:493: SetValue @ 0x289C1A1C 7. /-S/library/cpp/threading/future/async.h:24: operator() @ 0x289C1A1C 8. /-S/util/thread/pool.h:71: Process @ 0x289C1A1C 9. /-S/util/thread/pool.cpp:418: DoExecute @ 0x163457A9 10. /-S/util/thread/factory.h:15: Execute @ 0x16344199 11. /-S/util/thread/factory.cpp:36: ThreadProc @ 0x16344199 12. /-S/util/system/thread.cpp:244: ThreadProxy @ 0x1633F60C 13. ??:0: ?? @ 0x7F5B99AC0AC2 14. ??:0: ?? @ 0x7F5B99B5284F Trying to start YDB, gRPC: 63414, MsgBus: 25941 2025-05-29T15:29:30.747534Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509890206349415528:2196];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:29:30.747647Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/00116f/r3tmp/tmpnQeft7/pdisk_1.dat 2025-05-29T15:29:30.844936Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [1:7509890206349415371:2079] 1748532570742577 != 1748532570742580 2025-05-29T15:29:30.846896Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 63414, node 1 2025-05-29T15:29:30.862285Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:29:30.862296Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-05-29T15:29:30.862298Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-05-29T15:29:30.862343Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:25941 TClient is connected to server localhost:25941 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-29T15:29:30.915510Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:29:30.917878Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:29:30.917899Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:29:30.919048Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-29T15:29:30.923093Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:29:30.987934Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:29:31.004335Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:29:31.017310Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:29:31.096394Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890210644384304:2403], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:29:31.096419Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:29:31.156582Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-05-29T15:29:31.164604Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-05-29T15:29:31.221033Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-05-29T15:29:31.228510Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-05-29T15:29:31.244373Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-05-29T15:29:31.256415Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-05-29T15:29:31.276820Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-05-29T15:29:31.296173Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890210644384959:2466], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:29:31.296204Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:29:31.296306Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890210644384964:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:29:31.297506Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710669:3, at schemeshard: 72057594046644480 2025-05-29T15:29:31.301611Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976710669, at schemeshard: 72057594046644480 2025-05-29T15:29:31.302100Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7509890210644384966:2470], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710669 completed, doublechecking } 2025-05-29T15:29:31.371677Z node 1 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [1:7509890210644385017:3399] txid# 281474976710670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-29T15:29:31.509676Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [1:7509890210644385026:2474], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:29:31.511872Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=1&id=YTJjNzJmYmMtOTViZjFkZDYtMzI1NWI5YTUtNzIzOWJjNTU=, ActorId: [1:7509890210644384286:2401], ActorState: ExecuteState, TraceId: 01jweakb4zcmevz981a134tb47, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: VERIFY failed (2025-05-29T15:29:31.515093Z): assertion failed in non-unittest thread with message: assertion failed at ydb/core/kqp/ut/common/kqp_ut_common.h:375, void NKikimr::NKqp::AssertSuccessResult(const NYdb::TStatus &): (result.IsSuccess())
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 library/cpp/testing/unittest/registar.cpp:36 RaiseError(): requirement UnittestThread failed 0. /-S/util/system/yassert.cpp:83: InternalPanicImpl @ 0x1633E055 1. /-S/util/system/yassert.cpp:55: Panic @ 0x16335056 2. /tmp//-S/library/cpp/testing/unittest/registar.cpp:36: RaiseError @ 0x164D75B6 3. /-S/ydb/core/kqp/ut/common/kqp_ut_common.h:375: AssertSuccessResult @ 0x15DE0A22 4. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:365: CreateSampleTables @ 0x289A0492 5. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:581: operator() @ 0x289C1A1C 6. /-S/library/cpp/threading/future/core/future-inl.h:493: SetValue @ 0x289C1A1C 7. /-S/library/cpp/threading/future/async.h:24: operator() @ 0x289C1A1C 8. /-S/util/thread/pool.h:71: Process @ 0x289C1A1C 9. /-S/util/thread/pool.cpp:418: DoExecute @ 0x163457A9 10. /-S/util/thread/factory.h:15: Execute @ 0x16344199 11. /-S/util/thread/factory.cpp:36: ThreadProc @ 0x16344199 12. /-S/util/system/thread.cpp:244: ThreadProxy @ 0x1633F60C 13. ??:0: ?? @ 0x7F02ED8C7AC2 14. ??:0: ?? @ 0x7F02ED95984F |72.3%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/services/config/ut/ydb-services-config-ut |72.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/services/config/ut/ydb-services-config-ut |72.3%| [LD] {RESULT} $(B)/ydb/services/config/ut/ydb-services-config-ut >> TExtSubDomainTest::DeclareAndDefineWithNodes-AlterDatabaseCreateHiveFirst-true [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_background_cleaning/unittest >> TSchemeshardBackgroundCleaningTest::CreateTableInTemp [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:227:2060] recipient: [1:221:2142] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:227:2060] recipient: [1:221:2142] Leader for TabletID 72057594046678944 is [1:238:2153] sender: [1:239:2060] recipient: [1:221:2142] 2025-05-29T15:28:14.813270Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7488: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-05-29T15:28:14.813291Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7516: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:28:14.813295Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7402: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-05-29T15:28:14.813299Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7418: OperationsProcessing config: using default configuration 2025-05-29T15:28:14.813308Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-05-29T15:28:14.813311Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-05-29T15:28:14.813318Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7548: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:28:14.813329Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:39: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-05-29T15:28:14.813408Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7619: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-05-29T15:28:14.813470Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-05-29T15:28:14.823422Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7309: Cannot subscribe to console configs 2025-05-29T15:28:14.823443Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:28:14.825676Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-05-29T15:28:14.825701Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-05-29T15:28:14.825723Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-05-29T15:28:14.827526Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-05-29T15:28:14.827569Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:445: Clear TempDirsState with owners number: 0 2025-05-29T15:28:14.827666Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1348: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-05-29T15:28:14.827761Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:32: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-05-29T15:28:14.828625Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:157: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:28:14.828659Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:84: [RootDataErasureManager] Stop 2025-05-29T15:28:14.829017Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:28:14.829029Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:28:14.829057Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-05-29T15:28:14.829063Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-29T15:28:14.829068Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:202: TTxServerlessStorageBilling.Complete 2025-05-29T15:28:14.829084Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6671: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-05-29T15:28:14.830110Z node 1 :HIVE INFO: tablet_helpers.cpp:1130: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:238:2153] sender: [1:351:2060] recipient: [1:17:2064] 2025-05-29T15:28:14.847017Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:372: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-05-29T15:28:14.847109Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:28:14.847176Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-05-29T15:28:14.847218Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:130: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-05-29T15:28:14.847228Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:28:14.848194Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:451: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-05-29T15:28:14.848227Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-05-29T15:28:14.848291Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:28:14.848303Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:313: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-05-29T15:28:14.848309Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:367: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-05-29T15:28:14.848314Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 2 -> 3 2025-05-29T15:28:14.848788Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:28:14.848798Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-05-29T15:28:14.848803Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 3 -> 128 2025-05-29T15:28:14.849152Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:28:14.849164Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:28:14.849170Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:28:14.849178Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1650: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-05-29T15:28:14.849763Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1719: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-05-29T15:28:14.850219Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:652: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-05-29T15:28:14.850256Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1751: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-05-29T15:28:14.850447Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:676: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-05-29T15:28:14.850472Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:680: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 246 RawX2: 4294969454 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-05-29T15:28:14.850480Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:28:14.850544Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 128 -> 240 2025-05-29T15:28:14.850551Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:28:14.850585Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-05-29T15:28:14.850595Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:402: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-05-29T15:28:14.851071Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:28:14.851079Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-29T15:28:14.851127Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29 ... RD DEBUG: schemeshard__operation.cpp:1606: TOperation IsReadyToNotify, TxId: 104, ready parts: 2/3, is published: true 2025-05-29T15:29:07.599447Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:647: TTxOperationReply complete, operationId: 104:0, at schemeshard: 72057594046678944 2025-05-29T15:29:07.599452Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:207: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2025-05-29T15:29:07.599456Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:275: Activate send for 104:0 2025-05-29T15:29:07.599465Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:630: Send to actor: [7:984:2746] msg type: 269552132 msg: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 104 at schemeshard: 72057594046678944 2025-05-29T15:29:07.599479Z node 7 :TX_DATASHARD TRACE: datashard_impl.h:3129: StateWork, received event# 269552132, Sender [7:238:2153], Recipient [7:984:2746]: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 104 2025-05-29T15:29:07.599484Z node 7 :TX_DATASHARD TRACE: datashard_impl.h:3133: StateWork, processing event TEvDataShard::TEvSchemaChangedResult 2025-05-29T15:29:07.599488Z node 7 :TX_DATASHARD DEBUG: datashard.cpp:2953: Handle TEvSchemaChangedResult 104 datashard 72075186233409551 state Ready 2025-05-29T15:29:07.599494Z node 7 :TX_DATASHARD DEBUG: datashard__schema_changed.cpp:22: 72075186233409551 Got TEvSchemaChangedResult from SS at 72075186233409551 2025-05-29T15:29:07.599520Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4877: StateWork, received event# 2146435072, Sender [7:238:2153], Recipient [7:238:2153]: NKikimr::NSchemeShard::TEvPrivate::TEvProgressOperation 2025-05-29T15:29:07.599524Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4894: StateWork, processing event TEvPrivate::TEvProgressOperation 2025-05-29T15:29:07.599530Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 104:0, at schemeshard: 72057594046678944 2025-05-29T15:29:07.599534Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:482: [72057594046678944] TDone opId# 104:0 ProgressState 2025-05-29T15:29:07.599541Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:165: TSideEffects ApplyOnExecute at tablet# 72057594046678944 2025-05-29T15:29:07.599545Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:906: Part operation is done id#104:0 progress is 3/3 2025-05-29T15:29:07.599549Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 104 ready parts: 3/3 2025-05-29T15:29:07.599553Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:906: Part operation is done id#104:0 progress is 3/3 2025-05-29T15:29:07.599557Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 104 ready parts: 3/3 2025-05-29T15:29:07.599563Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1606: TOperation IsReadyToNotify, TxId: 104, ready parts: 3/3, is published: true 2025-05-29T15:29:07.599574Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1629: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [7:584:2404] message: TxId: 104 2025-05-29T15:29:07.599580Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 104 ready parts: 3/3 2025-05-29T15:29:07.599587Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:973: Operation and all the parts is done, operation id: 104:0 2025-05-29T15:29:07.599591Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5174: RemoveTx for txid 104:0 2025-05-29T15:29:07.599621Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 10] was 4 2025-05-29T15:29:07.599626Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:973: Operation and all the parts is done, operation id: 104:1 2025-05-29T15:29:07.599629Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5174: RemoveTx for txid 104:1 2025-05-29T15:29:07.599635Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 11] was 3 2025-05-29T15:29:07.599639Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:973: Operation and all the parts is done, operation id: 104:2 2025-05-29T15:29:07.599642Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5174: RemoveTx for txid 104:2 2025-05-29T15:29:07.599648Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 12] was 3 2025-05-29T15:29:07.600130Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:207: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2025-05-29T15:29:07.600155Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:207: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2025-05-29T15:29:07.600169Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:630: Send to actor: [7:584:2404] msg type: 271124998 msg: NKikimrScheme.TEvNotifyTxCompletionResult TxId: 104 at schemeshard: 72057594046678944 2025-05-29T15:29:07.600200Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:227: tests -- TTxNotificationSubscriber for txId 104: got EvNotifyTxCompletionResult 2025-05-29T15:29:07.600206Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:236: tests -- TTxNotificationSubscriber for txId 104: satisfy waiter [7:1035:2782] 2025-05-29T15:29:07.600243Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4877: StateWork, received event# 269877764, Sender [7:1037:2784], Recipient [7:238:2153]: NKikimr::TEvTabletPipe::TEvServerDisconnected 2025-05-29T15:29:07.600249Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4975: StateWork, processing event TEvTabletPipe::TEvServerDisconnected 2025-05-29T15:29:07.600254Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5801: Server pipe is reset, at schemeshard: 72057594046678944 TestWaitNotification: OK eventTxId 104 TestModificationResults wait txId: 105 2025-05-29T15:29:07.600441Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4877: StateWork, received event# 271122432, Sender [8:558:2102], Recipient [7:238:2153] 2025-05-29T15:29:07.600447Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4888: StateWork, processing event TEvSchemeShard::TEvModifySchemeTransaction 2025-05-29T15:29:07.601233Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:372: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot/tmp" OperationType: ESchemeOpCreateIndexedTable CreateIndexedTable { TableDescription { Name: "NotTempTable" Columns { Name: "key" Type: "Uint64" } Columns { Name: "value" Type: "Utf8" } KeyColumnNames: "key" } IndexDescription { Name: "ValueIndex" KeyColumnNames: "value" } } AllowCreateInTempDir: false } TxId: 105 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-05-29T15:29:07.601323Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_just_reject.cpp:47: TReject Propose, opId: 105:0, explain: Check failed: path: '/MyRoot/tmp', error: path is temporary (id: [OwnerId: 72057594046678944, LocalPathId: 3], type: EPathTypeDir, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_indexed_table.cpp:66, at schemeshard: 72057594046678944 2025-05-29T15:29:07.601330Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:130: IgniteOperation, opId: 105:1, propose status:StatusPreconditionFailed, reason: Check failed: path: '/MyRoot/tmp', error: path is temporary (id: [OwnerId: 72057594046678944, LocalPathId: 3], type: EPathTypeDir, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_indexed_table.cpp:66, at schemeshard: 72057594046678944 2025-05-29T15:29:07.616797Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:165: TSideEffects ApplyOnExecute at tablet# 72057594046678944 2025-05-29T15:29:07.621817Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:451: TTxOperationPropose Complete, txId: 105, response: Status: StatusPreconditionFailed Reason: "Check failed: path: \'/MyRoot/tmp\', error: path is temporary (id: [OwnerId: 72057594046678944, LocalPathId: 3], type: EPathTypeDir, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_indexed_table.cpp:66" TxId: 105 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2025-05-29T15:29:07.621896Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 105, database: /MyRoot, subject: , status: StatusPreconditionFailed, reason: Check failed: path: '/MyRoot/tmp', error: path is temporary (id: [OwnerId: 72057594046678944, LocalPathId: 3], type: EPathTypeDir, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_indexed_table.cpp:66, operation: CREATE TABLE WITH INDEXES, path: /MyRoot/tmp/NotTempTable 2025-05-29T15:29:07.621911Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:207: TSideEffects ApplyOnComplete at tablet# 72057594046678944 TestModificationResult got TxId: 105, wait until txId: 105 TestWaitNotification wait txId: 105 2025-05-29T15:29:07.622069Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:210: tests -- TTxNotificationSubscriber for txId 105: send EvNotifyTxCompletion 2025-05-29T15:29:07.622079Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:256: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 105 2025-05-29T15:29:07.622180Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4877: StateWork, received event# 269877761, Sender [7:1107:2854], Recipient [7:238:2153]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-05-29T15:29:07.622189Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4974: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-05-29T15:29:07.622194Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5753: Pipe server connected, at tablet: 72057594046678944 2025-05-29T15:29:07.622219Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4877: StateWork, received event# 271124996, Sender [7:584:2404], Recipient [7:238:2153]: NKikimrScheme.TEvNotifyTxCompletion TxId: 105 2025-05-29T15:29:07.622225Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4890: StateWork, processing event TEvSchemeShard::TEvNotifyTxCompletion 2025-05-29T15:29:07.622249Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 105, at schemeshard: 72057594046678944 2025-05-29T15:29:07.622284Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:227: tests -- TTxNotificationSubscriber for txId 105: got EvNotifyTxCompletionResult 2025-05-29T15:29:07.622290Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:236: tests -- TTxNotificationSubscriber for txId 105: satisfy waiter [7:1105:2852] 2025-05-29T15:29:07.622313Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4877: StateWork, received event# 269877764, Sender [7:1107:2854], Recipient [7:238:2153]: NKikimr::TEvTabletPipe::TEvServerDisconnected 2025-05-29T15:29:07.622319Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4975: StateWork, processing event TEvTabletPipe::TEvServerDisconnected 2025-05-29T15:29:07.622324Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5801: Server pipe is reset, at schemeshard: 72057594046678944 TestWaitNotification: OK eventTxId 105 >> TConsistentOpsWithReboots::DropIndexedTableWithReboots [GOOD] >> KqpAcl::AclDml-UseSink+IsOlap >> KqpConstraints::IndexAutoChooseAndNonReadyIndex >> KqpScheme::AlterTableWithDecimalColumn ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/scheme/unittest >> KqpScheme::AlterNonExistingResourcePoolClassifier Test command err: Trying to start YDB, gRPC: 18311, MsgBus: 13309 2025-05-29T15:29:10.298040Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509890118715701388:2072];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:29:10.298074Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/00115c/r3tmp/tmpS4C321/pdisk_1.dat 2025-05-29T15:29:10.358531Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 18311, node 1 2025-05-29T15:29:10.375355Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:29:10.375367Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-05-29T15:29:10.375369Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-05-29T15:29:10.375410Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:13309 2025-05-29T15:29:10.399007Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:29:10.399032Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:29:10.400211Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:13309 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-29T15:29:10.440057Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:29:10.447370Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:29:10.467233Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:29:10.491022Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:29:10.502569Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:29:10.698634Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890118715702985:2403], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:29:10.698669Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:29:10.754386Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-05-29T15:29:10.761804Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-05-29T15:29:10.773562Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-05-29T15:29:10.787069Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-05-29T15:29:10.842078Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-05-29T15:29:10.850529Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-05-29T15:29:10.864896Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-05-29T15:29:10.881133Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890118715703639:2466], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:29:10.881155Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890118715703644:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:29:10.881157Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:29:10.881901Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715669:3, at schemeshard: 72057594046644480 2025-05-29T15:29:10.884138Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7509890118715703646:2470], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715669 completed, doublechecking } 2025-05-29T15:29:10.951373Z node 1 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [1:7509890118715703697:3397] txid# 281474976715670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-29T15:29:11.025464Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [1:7509890118715703713:2474], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:29:11.025592Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=1&id=Mzc0YTkzNDQtNWFhNzZhZWItNGE0OTE0YzItNTM5MDE1M2E=, ActorId: [1:7509890118715702982:2401], ActorState: ExecuteState, TraceId: 01jweajq709grvk5g45vngm3a0, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: VERIFY failed (2025-05-29T15:29:11.026246Z): assertion failed in non-unittest thread with message: assertion failed at ydb/core/kqp/ut/common/kqp_ut_common.h:375, void NKikimr::NKqp::AssertSuccessResult(const NYdb::TStatus &): (result.IsSuccess())
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 library/cpp/testing/unittest/registar.cpp:36 RaiseError(): requirement UnittestThread failed 0. /-S/util/system/yassert.cpp:83: InternalPanicImpl @ 0x1633E055 1. /-S/util/system/yassert.cpp:55: Panic @ 0x16335056 2. /tmp//-S/library/cpp/testing/unittest/registar.cpp:36: RaiseError @ 0x164D75B6 3. /-S/ydb/core/kqp/ut/common/kqp_ut_common.h:375: AssertSuccessResult @ 0x15DE0A22 4. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:365: CreateSampleTables @ 0x289A0492 5. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:581: operator() @ 0x289C1A1C 6. /-S/library/cpp/threading/future/core/future-inl.h:493: SetValue @ 0x289C1A1C 7. /-S/library/cpp/threading/future/async.h:24: operator() @ 0x289C1A1C 8. /-S/util/thread/pool.h:71: Process @ 0x289C1A1C 9. /-S/util/thread/pool.cpp:418: DoExecute @ 0x163457A9 10. /-S/util/thread/factory.h:15: Execute @ 0x16344199 11. /-S/util/thread/factory.cpp:36: ThreadProc @ 0x16344199 12. /-S/util/system/thread.cpp:244: ThreadProxy @ 0x1633F60C 13. ??:0: ?? @ 0x7F5AAEE39AC2 14. ??:0: ?? @ 0x7F5AAEECB84F Trying to start YDB, gRPC: 32380, MsgBus: 17608 2025-05-29T15:29:14.429353Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509890138410525632:2073];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:29:14.429407Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/00115c/r3tmp/tmp0qW0rL/pdisk_1.dat 2025-05-29T15:29:14.481437Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 32380, node 1 2025-05-29T15:29:14.496888Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:29:14.496898Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-05-29T15:29:14.496900Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) ... mon.h:375: AssertSuccessResult @ 0x15DE0A22 4. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:365: CreateSampleTables @ 0x289A0492 5. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:581: operator() @ 0x289C1A1C 6. /-S/library/cpp/threading/future/core/future-inl.h:493: SetValue @ 0x289C1A1C 7. /-S/library/cpp/threading/future/async.h:24: operator() @ 0x289C1A1C 8. /-S/util/thread/pool.h:71: Process @ 0x289C1A1C 9. /-S/util/thread/pool.cpp:418: DoExecute @ 0x163457A9 10. /-S/util/thread/factory.h:15: Execute @ 0x16344199 11. /-S/util/thread/factory.cpp:36: ThreadProc @ 0x16344199 12. /-S/util/system/thread.cpp:244: ThreadProxy @ 0x1633F60C 13. ??:0: ?? @ 0x7FBD89C29AC2 14. ??:0: ?? @ 0x7FBD89CBB84F Trying to start YDB, gRPC: 4478, MsgBus: 5017 2025-05-29T15:29:31.581317Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509890208618961339:2061];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:29:31.581508Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/00115c/r3tmp/tmp7byq94/pdisk_1.dat 2025-05-29T15:29:31.689299Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:29:31.690016Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [1:7509890208618961319:2079] 1748532571581046 != 1748532571581049 TServer::EnableGrpc on GrpcPort 4478, node 1 2025-05-29T15:29:31.708070Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:29:31.708084Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-05-29T15:29:31.708086Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-05-29T15:29:31.708129Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-05-29T15:29:31.722151Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:29:31.722185Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:29:31.723103Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:5017 TClient is connected to server localhost:5017 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-29T15:29:31.787848Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:29:31.795511Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:29:31.820186Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-05-29T15:29:31.895138Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-05-29T15:29:31.920305Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 2025-05-29T15:29:31.933908Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:29:32.031131Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890212913930248:2403], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:29:32.031162Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:29:32.091413Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-05-29T15:29:32.098481Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-05-29T15:29:32.115701Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-05-29T15:29:32.126239Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-05-29T15:29:32.139674Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-05-29T15:29:32.200937Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-05-29T15:29:32.215659Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-05-29T15:29:32.232134Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890212913930908:2466], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:29:32.232164Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:29:32.232255Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890212913930913:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:29:32.233193Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715669:3, at schemeshard: 72057594046644480 2025-05-29T15:29:32.242542Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7509890212913930915:2470], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715669 completed, doublechecking } 2025-05-29T15:29:32.331662Z node 1 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [1:7509890212913930966:3400] txid# 281474976715670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-29T15:29:32.443537Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [1:7509890212913930982:2474], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:29:32.445858Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=1&id=NDEwNGYwMTQtOWUyYzYzMC01NDA2MDEzNy0xODFjZjg0YQ==, ActorId: [1:7509890212913930230:2401], ActorState: ExecuteState, TraceId: 01jweakc27crradrzc7j4x34ct, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: VERIFY failed (2025-05-29T15:29:32.446565Z): assertion failed in non-unittest thread with message: assertion failed at ydb/core/kqp/ut/common/kqp_ut_common.h:375, void NKikimr::NKqp::AssertSuccessResult(const NYdb::TStatus &): (result.IsSuccess())
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 library/cpp/testing/unittest/registar.cpp:36 RaiseError(): requirement UnittestThread failed 0. /-S/util/system/yassert.cpp:83: InternalPanicImpl @ 0x1633E055 1. /-S/util/system/yassert.cpp:55: Panic @ 0x16335056 2. /tmp//-S/library/cpp/testing/unittest/registar.cpp:36: RaiseError @ 0x164D75B6 3. /-S/ydb/core/kqp/ut/common/kqp_ut_common.h:375: AssertSuccessResult @ 0x15DE0A22 4. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:365: CreateSampleTables @ 0x289A0492 5. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:581: operator() @ 0x289C1A1C 6. /-S/library/cpp/threading/future/core/future-inl.h:493: SetValue @ 0x289C1A1C 7. /-S/library/cpp/threading/future/async.h:24: operator() @ 0x289C1A1C 8. /-S/util/thread/pool.h:71: Process @ 0x289C1A1C 9. /-S/util/thread/pool.cpp:418: DoExecute @ 0x163457A9 10. /-S/util/thread/factory.h:15: Execute @ 0x16344199 11. /-S/util/thread/factory.cpp:36: ThreadProc @ 0x16344199 12. /-S/util/system/thread.cpp:244: ThreadProxy @ 0x1633F60C 13. ??:0: ?? @ 0x7F7BBBEB3AC2 14. ??:0: ?? @ 0x7F7BBBF4584F |72.4%| [TA] $(B)/ydb/core/tx/schemeshard/ut_index_build/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/tx_proxy/ut_ext_tenant/unittest >> TExtSubDomainTest::DeclareAndDefineWithNodes-AlterDatabaseCreateHiveFirst-true [GOOD] Test command err: 2025-05-29T15:29:34.701220Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509890223903325911:2210];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:29:34.702121Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/00243d/r3tmp/tmpeLM6sp/pdisk_1.dat 2025-05-29T15:29:34.794895Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:29:34.803550Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:29:34.803584Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:29:34.815386Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:26073 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 2025-05-29T15:29:34.830873Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:403: actor# [1:7509890223903325996:2139] Handle TEvNavigate describe path dc-1 2025-05-29T15:29:34.832332Z node 1 :TX_PROXY DEBUG: describe.cpp:272: Actor# [1:7509890223903326375:2397] HANDLE EvNavigateScheme dc-1 2025-05-29T15:29:34.832385Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2702: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [1:7509890223903326022:2152], request# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-05-29T15:29:34.832395Z node 1 :TX_PROXY_SCHEME_CACHE TRACE: cache.cpp:2322: Create subscriber: self# [1:7509890223903326022:2152], path# /dc-1, domainOwnerId# 72057594046644480 2025-05-29T15:29:34.832436Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:960: [main][1:7509890223903326376:2398][/dc-1] Handle NKikimr::TEvStateStorage::TEvResolveReplicasList 2025-05-29T15:29:34.832787Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1061: [1:7509890223903325639:2050] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1 DomainOwnerId: 72057594046644480 }: sender# [1:7509890223903326380:2398] 2025-05-29T15:29:34.832805Z node 1 :SCHEME_BOARD_REPLICA INFO: replica.cpp:646: [1:7509890223903325639:2050] Subscribe: subscriber# [1:7509890223903326380:2398], path# /dc-1, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2025-05-29T15:29:34.832819Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1061: [1:7509890223903325642:2053] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1 DomainOwnerId: 72057594046644480 }: sender# [1:7509890223903326381:2398] 2025-05-29T15:29:34.832823Z node 1 :SCHEME_BOARD_REPLICA INFO: replica.cpp:646: [1:7509890223903325642:2053] Subscribe: subscriber# [1:7509890223903326381:2398], path# /dc-1, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2025-05-29T15:29:34.832826Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1061: [1:7509890223903325645:2056] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1 DomainOwnerId: 72057594046644480 }: sender# [1:7509890223903326382:2398] 2025-05-29T15:29:34.832828Z node 1 :SCHEME_BOARD_REPLICA INFO: replica.cpp:646: [1:7509890223903325645:2056] Subscribe: subscriber# [1:7509890223903326382:2398], path# /dc-1, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2025-05-29T15:29:34.832837Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:365: [replica][1:7509890223903326380:2398][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7509890223903325639:2050] 2025-05-29T15:29:34.832840Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:365: [replica][1:7509890223903326381:2398][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7509890223903325642:2053] 2025-05-29T15:29:34.832850Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:365: [replica][1:7509890223903326382:2398][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7509890223903325645:2056] 2025-05-29T15:29:34.832856Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:807: [main][1:7509890223903326376:2398][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7509890223903326377:2398] 2025-05-29T15:29:34.832860Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:807: [main][1:7509890223903326376:2398][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7509890223903326378:2398] 2025-05-29T15:29:34.832868Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: subscriber.cpp:836: [main][1:7509890223903326376:2398][/dc-1] Set up state: owner# [1:7509890223903326022:2152], state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 2) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements } 2025-05-29T15:29:34.832901Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:807: [main][1:7509890223903326376:2398][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7509890223903326379:2398] 2025-05-29T15:29:34.832906Z node 1 :SCHEME_BOARD_SUBSCRIBER INFO: subscriber.cpp:854: [main][1:7509890223903326376:2398][/dc-1] Path was already updated: owner# [1:7509890223903326022:2152], state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 2) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 2) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements } 2025-05-29T15:29:34.832911Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:381: [replica][1:7509890223903326380:2398][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7509890223903326377:2398], cookie# 1 2025-05-29T15:29:34.832913Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:381: [replica][1:7509890223903326381:2398][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7509890223903326378:2398], cookie# 1 2025-05-29T15:29:34.832914Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:381: [replica][1:7509890223903326382:2398][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7509890223903326379:2398], cookie# 1 2025-05-29T15:29:34.832918Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1089: [1:7509890223903325639:2050] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 2 }: sender# [1:7509890223903326380:2398] 2025-05-29T15:29:34.832920Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1128: [1:7509890223903325639:2050] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7509890223903326380:2398], cookie# 1 2025-05-29T15:29:34.832923Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1089: [1:7509890223903325642:2053] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 2 }: sender# [1:7509890223903326381:2398] 2025-05-29T15:29:34.832925Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1128: [1:7509890223903325642:2053] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7509890223903326381:2398], cookie# 1 2025-05-29T15:29:34.832928Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1089: [1:7509890223903325645:2056] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 2 }: sender# [1:7509890223903326382:2398] 2025-05-29T15:29:34.832929Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1128: [1:7509890223903325645:2056] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7509890223903326382:2398], cookie# 1 2025-05-29T15:29:34.834789Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:390: [replica][1:7509890223903326380:2398][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7509890223903325639:2050], cookie# 1 2025-05-29T15:29:34.834807Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:390: [replica][1:7509890223903326381:2398][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7509890223903325642:2053], cookie# 1 2025-05-29T15:29:34.834811Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:390: [replica][1:7509890223903326382:2398][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7509890223903325645:2056], cookie# 1 2025-05-29T15:29:34.834818Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:892: [main][1:7509890223903326376:2398][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7509890223903326377:2398], cookie# 1 2025-05-29T15:29:34.834825Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:932: [main][1:7509890223903326376:2398][/dc-1] Sync is in progress: cookie# 1, size# 3, half# 1, successes# 1, faulires# 0 2025-05-29T15:29:34.834829Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:892: [main][1:7509890223903326376:2398][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7509890223903326378:2398], cookie# 1 2025-05-29T15:29:34.834833Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:946: [main][1:7509890223903326376:2398][/dc-1] Sync is done: cookie# 1, size# 3, half# 1, successes# 2, faulires# 0, partial# 0 2025-05-29T15:29:34.834838Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:892: [main][1:7509890223903326376:2398][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7509890223903326379:2398], cookie# 1 2025-05-29T15:29:34.834840Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:906: [main][1:7509890223903326376:2398][/dc-1] Unexpected sync response: sender# [1:7509890223903326379:2398], cookie# 1 2025-05-29T15:29:34.839757Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2589: HandleNotify: self# [1:7509890223903326022:2152], notify# NKikimr::TSchemeBoardEvents::TEvNotifyUpdate { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DescribeSchemeResult: Status: StatusSuccess Path: "/dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsL ... 2057594046644480 }: sender# [3:7509890232822437195:2758] 2025-05-29T15:29:36.112348Z node 3 :SCHEME_BOARD_REPLICA INFO: replica.cpp:520: [3:7509890228527468681:2056] Upsert description: path# /dc-1/.metadata/workload_manager/delayed_requests 2025-05-29T15:29:36.112353Z node 3 :SCHEME_BOARD_REPLICA INFO: replica.cpp:646: [3:7509890228527468681:2056] Subscribe: subscriber# [3:7509890232822437195:2758], path# /dc-1/.metadata/workload_manager/delayed_requests, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2025-05-29T15:29:36.112353Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:365: [replica][3:7509890232822437191:2758][/dc-1/.metadata/workload_manager/delayed_requests] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/.metadata/workload_manager/delayed_requests Version: 0 }: sender# [3:7509890228527468675:2050] 2025-05-29T15:29:36.112359Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:365: [replica][3:7509890232822437194:2758][/dc-1/.metadata/workload_manager/delayed_requests] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/.metadata/workload_manager/delayed_requests Version: 0 }: sender# [3:7509890228527468678:2053] 2025-05-29T15:29:36.112366Z node 3 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1061: [3:7509890228527468681:2056] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1/.metadata/workload_manager/running_requests DomainOwnerId: 72057594046644480 }: sender# [3:7509890232822437198:2759] 2025-05-29T15:29:36.112366Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:365: [replica][3:7509890232822437195:2758][/dc-1/.metadata/workload_manager/delayed_requests] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/.metadata/workload_manager/delayed_requests Version: 0 }: sender# [3:7509890228527468681:2056] 2025-05-29T15:29:36.112369Z node 3 :SCHEME_BOARD_REPLICA INFO: replica.cpp:520: [3:7509890228527468681:2056] Upsert description: path# /dc-1/.metadata/workload_manager/running_requests 2025-05-29T15:29:36.112374Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:807: [main][3:7509890232822437185:2758][/dc-1/.metadata/workload_manager/delayed_requests] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/.metadata/workload_manager/delayed_requests Version: 0 }: sender# [3:7509890232822437187:2758] 2025-05-29T15:29:36.112375Z node 3 :SCHEME_BOARD_REPLICA INFO: replica.cpp:646: [3:7509890228527468681:2056] Subscribe: subscriber# [3:7509890232822437198:2759], path# /dc-1/.metadata/workload_manager/running_requests, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2025-05-29T15:29:36.112382Z node 3 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1089: [3:7509890228527468681:2056] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 0 }: sender# [3:7509890232822437195:2758] 2025-05-29T15:29:36.112388Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:807: [main][3:7509890232822437185:2758][/dc-1/.metadata/workload_manager/delayed_requests] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/.metadata/workload_manager/delayed_requests Version: 0 }: sender# [3:7509890232822437188:2758] 2025-05-29T15:29:36.112389Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:365: [replica][3:7509890232822437197:2759][/dc-1/.metadata/workload_manager/running_requests] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/.metadata/workload_manager/running_requests Version: 0 }: sender# [3:7509890228527468678:2053] 2025-05-29T15:29:36.112394Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:365: [replica][3:7509890232822437196:2759][/dc-1/.metadata/workload_manager/running_requests] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/.metadata/workload_manager/running_requests Version: 0 }: sender# [3:7509890228527468675:2050] 2025-05-29T15:29:36.112398Z node 3 :SCHEME_BOARD_SUBSCRIBER NOTICE: subscriber.cpp:836: [main][3:7509890232822437185:2758][/dc-1/.metadata/workload_manager/delayed_requests] Set up state: owner# [3:7509890228527468984:2114], state# { Deleted: 1 Strong: 1 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2025-05-29T15:29:36.112400Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:365: [replica][3:7509890232822437198:2759][/dc-1/.metadata/workload_manager/running_requests] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/.metadata/workload_manager/running_requests Version: 0 }: sender# [3:7509890228527468681:2056] 2025-05-29T15:29:36.112404Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:807: [main][3:7509890232822437185:2758][/dc-1/.metadata/workload_manager/delayed_requests] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/.metadata/workload_manager/delayed_requests Version: 0 }: sender# [3:7509890232822437189:2758] 2025-05-29T15:29:36.112408Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:807: [main][3:7509890232822437186:2759][/dc-1/.metadata/workload_manager/running_requests] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/.metadata/workload_manager/running_requests Version: 0 }: sender# [3:7509890232822437192:2759] 2025-05-29T15:29:36.112409Z node 3 :SCHEME_BOARD_SUBSCRIBER INFO: subscriber.cpp:854: [main][3:7509890232822437185:2758][/dc-1/.metadata/workload_manager/delayed_requests] Ignore empty state: owner# [3:7509890228527468984:2114], state# { Deleted: 1 Strong: 1 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 1 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2025-05-29T15:29:36.112415Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2589: HandleNotify: self# [3:7509890228527468984:2114], notify# NKikimr::TSchemeBoardEvents::TEvNotifyDelete { Path: /dc-1/.metadata/workload_manager/delayed_requests PathId: Strong: 1 } 2025-05-29T15:29:36.112420Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:807: [main][3:7509890232822437186:2759][/dc-1/.metadata/workload_manager/running_requests] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/.metadata/workload_manager/running_requests Version: 0 }: sender# [3:7509890232822437190:2759] 2025-05-29T15:29:36.112425Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2464: ResolveCacheItem: self# [3:7509890228527468984:2114], notify# NKikimr::TSchemeBoardEvents::TEvNotifyDelete { Path: /dc-1/.metadata/workload_manager/delayed_requests PathId: Strong: 1 }, by path# { Subscriber: { Subscriber: [3:7509890232822437185:2758] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 0 Status: undefined Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, by pathId# nullptr 2025-05-29T15:29:36.112426Z node 3 :SCHEME_BOARD_SUBSCRIBER NOTICE: subscriber.cpp:836: [main][3:7509890232822437186:2759][/dc-1/.metadata/workload_manager/running_requests] Set up state: owner# [3:7509890228527468984:2114], state# { Deleted: 1 Strong: 1 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2025-05-29T15:29:36.112430Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:807: [main][3:7509890232822437186:2759][/dc-1/.metadata/workload_manager/running_requests] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/.metadata/workload_manager/running_requests Version: 0 }: sender# [3:7509890232822437193:2759] 2025-05-29T15:29:36.112435Z node 3 :SCHEME_BOARD_SUBSCRIBER INFO: subscriber.cpp:854: [main][3:7509890232822437186:2759][/dc-1/.metadata/workload_manager/running_requests] Ignore empty state: owner# [3:7509890228527468984:2114], state# { Deleted: 1 Strong: 1 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 1 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2025-05-29T15:29:36.112439Z node 3 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1089: [3:7509890228527468675:2050] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 0 }: sender# [3:7509890232822437191:2758] 2025-05-29T15:29:36.112442Z node 3 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1089: [3:7509890228527468675:2050] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 0 }: sender# [3:7509890232822437196:2759] 2025-05-29T15:29:36.112444Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:1842: FillEntry for TNavigate: self# [3:7509890228527468984:2114], cacheItem# { Subscriber: { Subscriber: [3:7509890232822437185:2758] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/.metadata/workload_manager/delayed_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-05-29T15:29:36.112446Z node 3 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1089: [3:7509890228527468678:2053] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 0 }: sender# [3:7509890232822437194:2758] 2025-05-29T15:29:36.112448Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2589: HandleNotify: self# [3:7509890228527468984:2114], notify# NKikimr::TSchemeBoardEvents::TEvNotifyDelete { Path: /dc-1/.metadata/workload_manager/running_requests PathId: Strong: 1 } 2025-05-29T15:29:36.112449Z node 3 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1089: [3:7509890228527468678:2053] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 0 }: sender# [3:7509890232822437197:2759] 2025-05-29T15:29:36.112452Z node 3 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1089: [3:7509890228527468681:2056] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 0 }: sender# [3:7509890232822437198:2759] 2025-05-29T15:29:36.112455Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2464: ResolveCacheItem: self# [3:7509890228527468984:2114], notify# NKikimr::TSchemeBoardEvents::TEvNotifyDelete { Path: /dc-1/.metadata/workload_manager/running_requests PathId: Strong: 1 }, by path# { Subscriber: { Subscriber: [3:7509890232822437186:2759] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 0 Status: undefined Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, by pathId# nullptr 2025-05-29T15:29:36.112462Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:1842: FillEntry for TNavigate: self# [3:7509890228527468984:2114], cacheItem# { Subscriber: { Subscriber: [3:7509890232822437186:2759] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/.metadata/workload_manager/running_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-05-29T15:29:36.112491Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:265: Send result: self# [3:7509890232822437199:2760], recipient# [3:7509890232822437184:2318], result# { ErrorCount: 2 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/workload_manager/delayed_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo },{ Path: dc-1/.metadata/workload_manager/running_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } |72.3%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/fq/libs/control_plane_proxy/ut/ydb-core-fq-libs-control_plane_proxy-ut |72.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/fq/libs/control_plane_proxy/ut/ydb-core-fq-libs-control_plane_proxy-ut >> TExtSubDomainTest::CreateTableInsideAndAlterDomainAndTable-AlterDatabaseCreateHiveFirst-false [GOOD] >> EncryptedBackupParamsValidationTest::NoSourcePrefix [GOOD] >> BackupRestore::TestAllSchemeObjectTypes-EPathTypeInvalid [GOOD] >> BackupRestore::TestAllSchemeObjectTypes-EPathTypeTable >> TSchemeShardSplitBySizeTest::ConcurrentSplitOneToOne >> BackupPathTest::ExportWholeDatabase >> KqpScheme::ChangefeedAwsRegion >> EncryptedBackupParamsValidationTest::NoSourcePrefixEncrypted >> BackupRestore::TestAllIndexTypes-EIndexTypeGlobalVectorKmeansTree [GOOD] >> BackupRestore::TestAllPrimitiveTypes-PRIMITIVE_TYPE_ID_UNSPECIFIED [GOOD] >> TExtSubDomainTest::CreateTableInsideAndAlterDomainAndTable-AlterDatabaseCreateHiveFirst-true >> BackupRestore::TestAllPrimitiveTypes-BOOL >> TSchemeShardSplitBySizeTest::Test [GOOD] |72.4%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/datashard/ut_followers/ydb-core-tx-datashard-ut_followers |72.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_followers/ydb-core-tx-datashard-ut_followers |72.4%| [LD] {RESULT} $(B)/ydb/core/fq/libs/control_plane_proxy/ut/ydb-core-fq-libs-control_plane_proxy-ut |72.4%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_followers/ydb-core-tx-datashard-ut_followers >> KqpScheme::CreateAndAlterTableWithMinMaxPartitionsUncompat >> KqpOlapScheme::DropColumnAfterInsert [GOOD] >> BackupRestoreS3::TestAllPrimitiveTypes-PRIMITIVE_TYPE_ID_UNSPECIFIED [GOOD] >> TSchemeShardSplitBySizeTest::ConcurrentSplitOneToOne [GOOD] >> KqpScheme::TouchIndexAfterMoveTableWrite >> KqpAcl::RecursiveCreateTableShouldSuccess >> KqpScheme::DisableCreateExternalDataSource >> EncryptedBackupParamsValidationTest::NoSourcePrefixEncrypted [GOOD] >> TExtSubDomainTest::CreateTableInsideAndAlterDomainAndTable-AlterDatabaseCreateHiveFirst-true [GOOD] >> KqpOlapScheme::CreateWithoutColumnFamily >> BackupRestoreS3::TestAllPrimitiveTypes-INT8 >> BackupRestore::TestAllSchemeObjectTypes-EPathTypeTable [FAIL] >> BackupPathTest::ExportWholeDatabase [GOOD] >> KqpOlapScheme::CreateWithoutColumnFamily [GOOD] |72.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_split_merge/unittest >> TSchemeShardSplitBySizeTest::Test [GOOD] >> KqpOlapScheme::DropColumnAndResetTtl >> BackupRestore::TestAllSchemeObjectTypes-EPathTypePersQueueGroup >> EncryptedBackupParamsValidationTestFeatureDisabled::EncryptionParamsSpecifiedExport >> BackupRestore::TestAllPrimitiveTypes-BOOL [FAIL] >> BackupRestore::TestAllPrimitiveTypes-INT8 >> BackupPathTest::ExportWholeDatabaseWithEncryption >> KqpScheme::CreateTableWithFamiliesRegular >> KqpScheme::DoubleCreateResourcePool >> KqpScheme::AlterSequenceRestartWith [FAIL] >> EncryptedBackupParamsValidationTestFeatureDisabled::EncryptionParamsSpecifiedExport [GOOD] >> KqpOlapScheme::DropColumnAndResetTtl [GOOD] >> BackupRestoreS3::TestAllPrimitiveTypes-INT8 [FAIL] >> BackupRestoreS3::TestAllPrimitiveTypes-INT16 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/scheme/unittest >> KqpScheme::CreateAndDropGroup Test command err: Trying to start YDB, gRPC: 20545, MsgBus: 10140 2025-05-29T15:29:11.574348Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509890124555719512:2066];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:29:11.574370Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/001151/r3tmp/tmp1H8pY0/pdisk_1.dat TServer::EnableGrpc on GrpcPort 20545, node 1 2025-05-29T15:29:11.638420Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:29:11.645225Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:29:11.645239Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-05-29T15:29:11.645241Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-05-29T15:29:11.645296Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:10140 2025-05-29T15:29:11.675263Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:29:11.675291Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:29:11.676333Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:10140 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-29T15:29:11.709066Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:29:11.712821Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-05-29T15:29:11.716155Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:29:11.734809Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:29:11.794720Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:29:11.806542Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:29:11.902023Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890124555721143:2403], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:29:11.902049Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:29:11.944905Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-05-29T15:29:11.952302Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-05-29T15:29:11.958715Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-05-29T15:29:11.970820Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-05-29T15:29:11.985180Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-05-29T15:29:11.998687Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-05-29T15:29:12.012728Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-05-29T15:29:12.028752Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890128850689093:2466], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:29:12.028774Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890128850689098:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:29:12.028780Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:29:12.029479Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715669:3, at schemeshard: 72057594046644480 2025-05-29T15:29:12.032283Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7509890128850689100:2470], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715669 completed, doublechecking } 2025-05-29T15:29:12.130985Z node 1 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [1:7509890128850689151:3398] txid# 281474976715670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-29T15:29:12.229937Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [1:7509890128850689167:2474], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:29:12.230065Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=1&id=OWVhNzE3YTctNDQ0N2IzZGYtMjA1MTk3MTUtZTMwZmRmNmY=, ActorId: [1:7509890124555721125:2401], ActorState: ExecuteState, TraceId: 01jweajraw6tjmd1p86fke3c9t, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: VERIFY failed (2025-05-29T15:29:12.230798Z): assertion failed in non-unittest thread with message: assertion failed at ydb/core/kqp/ut/common/kqp_ut_common.h:375, void NKikimr::NKqp::AssertSuccessResult(const NYdb::TStatus &): (result.IsSuccess())
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 library/cpp/testing/unittest/registar.cpp:36 RaiseError(): requirement UnittestThread failed 0. /-S/util/system/yassert.cpp:83: InternalPanicImpl @ 0x1633E055 1. /-S/util/system/yassert.cpp:55: Panic @ 0x16335056 2. /tmp//-S/library/cpp/testing/unittest/registar.cpp:36: RaiseError @ 0x164D75B6 3. /-S/ydb/core/kqp/ut/common/kqp_ut_common.h:375: AssertSuccessResult @ 0x15DE0A22 4. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:365: CreateSampleTables @ 0x289A0492 5. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:581: operator() @ 0x289C1A1C 6. /-S/library/cpp/threading/future/core/future-inl.h:493: SetValue @ 0x289C1A1C 7. /-S/library/cpp/threading/future/async.h:24: operator() @ 0x289C1A1C 8. /-S/util/thread/pool.h:71: Process @ 0x289C1A1C 9. /-S/util/thread/pool.cpp:418: DoExecute @ 0x163457A9 10. /-S/util/thread/factory.h:15: Execute @ 0x16344199 11. /-S/util/thread/factory.cpp:36: ThreadProc @ 0x16344199 12. /-S/util/system/thread.cpp:244: ThreadProxy @ 0x1633F60C 13. ??:0: ?? @ 0x7F25EBE12AC2 14. ??:0: ?? @ 0x7F25EBEA484F Trying to start YDB, gRPC: 5173, MsgBus: 1665 2025-05-29T15:29:15.681543Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509890141739360319:2067];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:29:15.681571Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/001151/r3tmp/tmpPKV93d/pdisk_1.dat 2025-05-29T15:29:15.735682Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 5173, node 1 2025-05-29T15:29:15.751701Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:29:15.751714Z node 1 :NET_CLASSIFIER WARN: net_classifi ... anicImpl @ 0x1633E055 1. /-S/util/system/yassert.cpp:55: Panic @ 0x16335056 2. /tmp//-S/library/cpp/testing/unittest/registar.cpp:36: RaiseError @ 0x164D75B6 3. /-S/ydb/core/kqp/ut/common/kqp_ut_common.h:375: AssertSuccessResult @ 0x15DE0A22 4. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:365: CreateSampleTables @ 0x289A0492 5. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:581: operator() @ 0x289C1A1C 6. /-S/library/cpp/threading/future/core/future-inl.h:493: SetValue @ 0x289C1A1C 7. /-S/library/cpp/threading/future/async.h:24: operator() @ 0x289C1A1C 8. /-S/util/thread/pool.h:71: Process @ 0x289C1A1C 9. /-S/util/thread/pool.cpp:418: DoExecute @ 0x163457A9 10. /-S/util/thread/factory.h:15: Execute @ 0x16344199 11. /-S/util/thread/factory.cpp:36: ThreadProc @ 0x16344199 12. /-S/util/system/thread.cpp:244: ThreadProxy @ 0x1633F60C 13. ??:0: ?? @ 0x7F45C78E1AC2 14. ??:0: ?? @ 0x7F45C797384F Trying to start YDB, gRPC: 10167, MsgBus: 15264 2025-05-29T15:29:34.375323Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509890223748592489:2081];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:29:34.375593Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/001151/r3tmp/tmpidezAO/pdisk_1.dat 2025-05-29T15:29:34.506062Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:29:34.506091Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:29:34.510194Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-29T15:29:34.510964Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 10167, node 1 2025-05-29T15:29:34.534930Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:29:34.534944Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-05-29T15:29:34.534948Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-05-29T15:29:34.534993Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:15264 TClient is connected to server localhost:15264 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-29T15:29:34.615803Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:29:34.619478Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-05-29T15:29:34.630099Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:29:34.650794Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-05-29T15:29:34.675705Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 2025-05-29T15:29:34.691826Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:29:34.873786Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890223748594055:2403], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:29:34.873817Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:29:34.921807Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-05-29T15:29:34.979174Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-05-29T15:29:34.990117Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-05-29T15:29:35.001517Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-05-29T15:29:35.016489Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-05-29T15:29:35.028979Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-05-29T15:29:35.043026Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-05-29T15:29:35.059166Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890228043562010:2466], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:29:35.059191Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:29:35.059371Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890228043562015:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:29:35.060171Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715669:3, at schemeshard: 72057594046644480 2025-05-29T15:29:35.062398Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7509890228043562017:2470], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715669 completed, doublechecking } 2025-05-29T15:29:35.156208Z node 1 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [1:7509890228043562068:3399] txid# 281474976715670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-29T15:29:35.287803Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [1:7509890228043562077:2474], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:29:35.288383Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=1&id=OTRlNTNjYjQtNWNhZWMwODItMTFhMmQzMzItNjhkYjZjMWI=, ActorId: [1:7509890223748594037:2401], ActorState: ExecuteState, TraceId: 01jweaketj75twt9197ceyqf4z, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: VERIFY failed (2025-05-29T15:29:35.289965Z): assertion failed in non-unittest thread with message: assertion failed at ydb/core/kqp/ut/common/kqp_ut_common.h:375, void NKikimr::NKqp::AssertSuccessResult(const NYdb::TStatus &): (result.IsSuccess())
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 library/cpp/testing/unittest/registar.cpp:36 RaiseError(): requirement UnittestThread failed 0. /-S/util/system/yassert.cpp:83: InternalPanicImpl @ 0x1633E055 1. /-S/util/system/yassert.cpp:55: Panic @ 0x16335056 2. /tmp//-S/library/cpp/testing/unittest/registar.cpp:36: RaiseError @ 0x164D75B6 3. /-S/ydb/core/kqp/ut/common/kqp_ut_common.h:375: AssertSuccessResult @ 0x15DE0A22 4. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:365: CreateSampleTables @ 0x289A0492 5. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:581: operator() @ 0x289C1A1C 6. /-S/library/cpp/threading/future/core/future-inl.h:493: SetValue @ 0x289C1A1C 7. /-S/library/cpp/threading/future/async.h:24: operator() @ 0x289C1A1C 8. /-S/util/thread/pool.h:71: Process @ 0x289C1A1C 9. /-S/util/thread/pool.cpp:418: DoExecute @ 0x163457A9 10. /-S/util/thread/factory.h:15: Execute @ 0x16344199 11. /-S/util/thread/factory.cpp:36: ThreadProc @ 0x16344199 12. /-S/util/system/thread.cpp:244: ThreadProxy @ 0x1633F60C 13. ??:0: ?? @ 0x7F273F78CAC2 14. ??:0: ?? @ 0x7F273F81E84F |72.4%| [TA] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_index_build/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/tx_proxy/ut_ext_tenant/unittest >> TExtSubDomainTest::CreateTableInsideAndAlterDomainAndTable-AlterDatabaseCreateHiveFirst-true [GOOD] Test command err: 2025-05-29T15:29:35.402269Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509890227981268564:2274];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:29:35.402292Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/002431/r3tmp/tmpah5Yxa/pdisk_1.dat 2025-05-29T15:29:35.478075Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded TClient is connected to server localhost:31989 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 2025-05-29T15:29:35.502550Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:29:35.502586Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:29:35.502800Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:403: actor# [1:7509890227981268580:2138] Handle TEvNavigate describe path dc-1 2025-05-29T15:29:35.504519Z node 1 :TX_PROXY DEBUG: describe.cpp:272: Actor# [1:7509890227981268998:2420] HANDLE EvNavigateScheme dc-1 2025-05-29T15:29:35.504554Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-29T15:29:35.504587Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2702: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [1:7509890227981268604:2152], request# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-05-29T15:29:35.504613Z node 1 :TX_PROXY_SCHEME_CACHE TRACE: cache.cpp:2322: Create subscriber: self# [1:7509890227981268604:2152], path# /dc-1, domainOwnerId# 72057594046644480 2025-05-29T15:29:35.504671Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:960: [main][1:7509890227981268999:2421][/dc-1] Handle NKikimr::TEvStateStorage::TEvResolveReplicasList 2025-05-29T15:29:35.505089Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1061: [1:7509890227981268228:2050] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1 DomainOwnerId: 72057594046644480 }: sender# [1:7509890227981269003:2421] 2025-05-29T15:29:35.505108Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1061: [1:7509890227981268231:2053] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1 DomainOwnerId: 72057594046644480 }: sender# [1:7509890227981269004:2421] 2025-05-29T15:29:35.505132Z node 1 :SCHEME_BOARD_REPLICA INFO: replica.cpp:646: [1:7509890227981268231:2053] Subscribe: subscriber# [1:7509890227981269004:2421], path# /dc-1, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2025-05-29T15:29:35.505131Z node 1 :SCHEME_BOARD_REPLICA INFO: replica.cpp:646: [1:7509890227981268228:2050] Subscribe: subscriber# [1:7509890227981269003:2421], path# /dc-1, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2025-05-29T15:29:35.505152Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1061: [1:7509890227981268234:2056] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1 DomainOwnerId: 72057594046644480 }: sender# [1:7509890227981269005:2421] 2025-05-29T15:29:35.505156Z node 1 :SCHEME_BOARD_REPLICA INFO: replica.cpp:646: [1:7509890227981268234:2056] Subscribe: subscriber# [1:7509890227981269005:2421], path# /dc-1, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2025-05-29T15:29:35.505158Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:365: [replica][1:7509890227981269004:2421][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7509890227981268231:2053] 2025-05-29T15:29:35.505195Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1089: [1:7509890227981268231:2053] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 2 }: sender# [1:7509890227981269004:2421] 2025-05-29T15:29:35.505182Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:365: [replica][1:7509890227981269003:2421][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7509890227981268228:2050] 2025-05-29T15:29:35.505202Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:365: [replica][1:7509890227981269005:2421][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7509890227981268234:2056] 2025-05-29T15:29:35.505216Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:807: [main][1:7509890227981268999:2421][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7509890227981269001:2421] 2025-05-29T15:29:35.505225Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:807: [main][1:7509890227981268999:2421][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7509890227981269000:2421] 2025-05-29T15:29:35.505247Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: subscriber.cpp:836: [main][1:7509890227981268999:2421][/dc-1] Set up state: owner# [1:7509890227981268604:2152], state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 2) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements } 2025-05-29T15:29:35.505268Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1089: [1:7509890227981268228:2050] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 2 }: sender# [1:7509890227981269003:2421] 2025-05-29T15:29:35.505272Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1089: [1:7509890227981268234:2056] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 2 }: sender# [1:7509890227981269005:2421] 2025-05-29T15:29:35.505291Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:807: [main][1:7509890227981268999:2421][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7509890227981269002:2421] 2025-05-29T15:29:35.505315Z node 1 :SCHEME_BOARD_SUBSCRIBER INFO: subscriber.cpp:854: [main][1:7509890227981268999:2421][/dc-1] Path was already updated: owner# [1:7509890227981268604:2152], state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 2) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 2) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements } 2025-05-29T15:29:35.505346Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:381: [replica][1:7509890227981269003:2421][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7509890227981269000:2421], cookie# 1 2025-05-29T15:29:35.505368Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:381: [replica][1:7509890227981269004:2421][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7509890227981269001:2421], cookie# 1 2025-05-29T15:29:35.505389Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:381: [replica][1:7509890227981269005:2421][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7509890227981269002:2421], cookie# 1 2025-05-29T15:29:35.505455Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1128: [1:7509890227981268228:2050] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7509890227981269003:2421], cookie# 1 2025-05-29T15:29:35.505474Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1128: [1:7509890227981268231:2053] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7509890227981269004:2421], cookie# 1 2025-05-29T15:29:35.505495Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1128: [1:7509890227981268234:2056] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7509890227981269005:2421], cookie# 1 2025-05-29T15:29:35.505506Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:390: [replica][1:7509890227981269003:2421][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7509890227981268228:2050], cookie# 1 2025-05-29T15:29:35.505509Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:390: [replica][1:7509890227981269004:2421][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7509890227981268231:2053], cookie# 1 2025-05-29T15:29:35.505519Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:390: [replica][1:7509890227981269005:2421][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7509890227981268234:2056], cookie# 1 2025-05-29T15:29:35.505541Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:892: [main][1:7509890227981268999:2421][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7509890227981269000:2421], cookie# 1 2025-05-29T15:29:35.505552Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:932: [main][1:7509890227981268999:2421][/dc-1] Sync is in progress: cookie# 1, size# 3, half# 1, successes# 1, faulires# 0 2025-05-29T15:29:35.505573Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:892: [main][1:7509890227981268999:2421][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7509890227981269001:2421], cookie# 1 2025-05-29T15:29:35.505594Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:946: [main][1:7509890227981268999:2421][/dc-1] Sync is done: cookie# 1, size# 3, half# 1, successes# 2, faulires# 0, partial# 0 2025-05-29T15:29:35.505597Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:892: [main][1:7509890227981268999:2421][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7509890227981269002:2421], cookie# 1 2025-05-29T15:29:35.505617Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:906: [main][1:7509890227981268999:2421][/dc-1] Unexpected sync response: sender# [1:7509890227981269002:2421], cookie# 1 2025-05-29T15:29:35.518474Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2589: HandleNotify: self# [1:7509890227981268604:2152], notify# NKikimr::TSchemeBoardEvents::TEvNotifyUpdate { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DescribeSchemeResult: Status: StatusSuccess Path: "/dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsL ... c: false Partial: 0 } 2025-05-29T15:29:38.231781Z node 4 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:1842: FillEntry for TNavigate: self# [4:7509890236799851815:2903], cacheItem# { Subscriber: { Subscriber: [4:7509890241094819302:3010] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/USER_0/.metadata/workload_manager/running_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-05-29T15:29:38.231842Z node 4 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:265: Send result: self# [4:7509890241094819755:3290], recipient# [4:7509890241094819754:2910], result# { ErrorCount: 2 DatabaseName: /dc-1/USER_0 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_0/.metadata/workload_manager/delayed_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo },{ Path: dc-1/USER_0/.metadata/workload_manager/running_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2025-05-29T15:29:38.350984Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2702: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [3:7509890237462429615:2147], request# { ErrorCount: 0 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-05-29T15:29:38.351032Z node 3 :TX_PROXY_SCHEME_CACHE TRACE: cache.cpp:2322: Create subscriber: self# [3:7509890237462429615:2147], path# /dc-1/.metadata/initialization/migrations, domainOwnerId# 72057594046644480 2025-05-29T15:29:38.351117Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:960: [main][3:7509890241757399164:3993][/dc-1/.metadata/initialization/migrations] Handle NKikimr::TEvStateStorage::TEvResolveReplicasList 2025-05-29T15:29:38.351217Z node 3 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1061: [3:7509890237462429249:2053] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1/.metadata/initialization/migrations DomainOwnerId: 72057594046644480 }: sender# [3:7509890241757399169:3993] 2025-05-29T15:29:38.351222Z node 3 :SCHEME_BOARD_REPLICA INFO: replica.cpp:520: [3:7509890237462429249:2053] Upsert description: path# /dc-1/.metadata/initialization/migrations 2025-05-29T15:29:38.351224Z node 3 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1061: [3:7509890237462429246:2050] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1/.metadata/initialization/migrations DomainOwnerId: 72057594046644480 }: sender# [3:7509890241757399168:3993] 2025-05-29T15:29:38.351229Z node 3 :SCHEME_BOARD_REPLICA INFO: replica.cpp:520: [3:7509890237462429246:2050] Upsert description: path# /dc-1/.metadata/initialization/migrations 2025-05-29T15:29:38.351246Z node 3 :SCHEME_BOARD_REPLICA INFO: replica.cpp:646: [3:7509890237462429249:2053] Subscribe: subscriber# [3:7509890241757399169:3993], path# /dc-1/.metadata/initialization/migrations, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2025-05-29T15:29:38.351247Z node 3 :SCHEME_BOARD_REPLICA INFO: replica.cpp:646: [3:7509890237462429246:2050] Subscribe: subscriber# [3:7509890241757399168:3993], path# /dc-1/.metadata/initialization/migrations, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2025-05-29T15:29:38.351259Z node 3 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1061: [3:7509890237462429252:2056] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1/.metadata/initialization/migrations DomainOwnerId: 72057594046644480 }: sender# [3:7509890241757399170:3993] 2025-05-29T15:29:38.351270Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:365: [replica][3:7509890241757399169:3993][/dc-1/.metadata/initialization/migrations] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/.metadata/initialization/migrations Version: 0 }: sender# [3:7509890237462429249:2053] 2025-05-29T15:29:38.351271Z node 3 :SCHEME_BOARD_REPLICA INFO: replica.cpp:520: [3:7509890237462429252:2056] Upsert description: path# /dc-1/.metadata/initialization/migrations 2025-05-29T15:29:38.351275Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:365: [replica][3:7509890241757399168:3993][/dc-1/.metadata/initialization/migrations] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/.metadata/initialization/migrations Version: 0 }: sender# [3:7509890237462429246:2050] 2025-05-29T15:29:38.351277Z node 3 :SCHEME_BOARD_REPLICA INFO: replica.cpp:646: [3:7509890237462429252:2056] Subscribe: subscriber# [3:7509890241757399170:3993], path# /dc-1/.metadata/initialization/migrations, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2025-05-29T15:29:38.351283Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:807: [main][3:7509890241757399164:3993][/dc-1/.metadata/initialization/migrations] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/.metadata/initialization/migrations Version: 0 }: sender# [3:7509890241757399166:3993] 2025-05-29T15:29:38.351287Z node 3 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1089: [3:7509890237462429249:2053] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 0 }: sender# [3:7509890241757399169:3993] 2025-05-29T15:29:38.351291Z node 3 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1089: [3:7509890237462429246:2050] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 0 }: sender# [3:7509890241757399168:3993] 2025-05-29T15:29:38.351296Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:807: [main][3:7509890241757399164:3993][/dc-1/.metadata/initialization/migrations] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/.metadata/initialization/migrations Version: 0 }: sender# [3:7509890241757399165:3993] 2025-05-29T15:29:38.351303Z node 3 :SCHEME_BOARD_SUBSCRIBER NOTICE: subscriber.cpp:836: [main][3:7509890241757399164:3993][/dc-1/.metadata/initialization/migrations] Set up state: owner# [3:7509890237462429615:2147], state# { Deleted: 1 Strong: 1 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2025-05-29T15:29:38.351310Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:365: [replica][3:7509890241757399170:3993][/dc-1/.metadata/initialization/migrations] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/.metadata/initialization/migrations Version: 0 }: sender# [3:7509890237462429252:2056] 2025-05-29T15:29:38.351318Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2589: HandleNotify: self# [3:7509890237462429615:2147], notify# NKikimr::TSchemeBoardEvents::TEvNotifyDelete { Path: /dc-1/.metadata/initialization/migrations PathId: Strong: 1 } 2025-05-29T15:29:38.351325Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:807: [main][3:7509890241757399164:3993][/dc-1/.metadata/initialization/migrations] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/.metadata/initialization/migrations Version: 0 }: sender# [3:7509890241757399167:3993] 2025-05-29T15:29:38.351330Z node 3 :SCHEME_BOARD_SUBSCRIBER INFO: subscriber.cpp:854: [main][3:7509890241757399164:3993][/dc-1/.metadata/initialization/migrations] Ignore empty state: owner# [3:7509890237462429615:2147], state# { Deleted: 1 Strong: 1 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 1 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2025-05-29T15:29:38.351332Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2464: ResolveCacheItem: self# [3:7509890237462429615:2147], notify# NKikimr::TSchemeBoardEvents::TEvNotifyDelete { Path: /dc-1/.metadata/initialization/migrations PathId: Strong: 1 }, by path# { Subscriber: { Subscriber: [3:7509890241757399164:3993] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 0 Status: undefined Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, by pathId# nullptr 2025-05-29T15:29:38.351336Z node 3 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1089: [3:7509890237462429252:2056] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 0 }: sender# [3:7509890241757399170:3993] 2025-05-29T15:29:38.351356Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:1842: FillEntry for TNavigate: self# [3:7509890237462429615:2147], cacheItem# { Subscriber: { Subscriber: [3:7509890241757399164:3993] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-05-29T15:29:38.351387Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:265: Send result: self# [3:7509890241757399171:3994], recipient# [3:7509890241757399161:2336], result# { ErrorCount: 1 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2025-05-29T15:29:38.718644Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2702: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [3:7509890237462429615:2147], request# { ErrorCount: 0 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-05-29T15:29:38.718685Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:1842: FillEntry for TNavigate: self# [3:7509890237462429615:2147], cacheItem# { Subscriber: { Subscriber: [3:7509890237462430909:3146] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-05-29T15:29:38.718706Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:265: Send result: self# [3:7509890241757399187:3997], recipient# [3:7509890241757399186:2337], result# { ErrorCount: 1 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_reboots/unittest >> TConsistentOpsWithReboots::DropIndexedTableWithReboots [GOOD] Test command err: ==== RunWithTabletReboots =========== RUN: Trace =========== Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2140] Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:116:2058] recipient: [1:110:2141] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:116:2058] recipient: [1:110:2141] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:117:2058] recipient: [1:111:2142] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:117:2058] recipient: [1:111:2142] Leader for TabletID 72057594046678944 is [1:126:2151] sender: [1:127:2058] recipient: [1:109:2140] Leader for TabletID 72057594046447617 is [1:130:2154] sender: [1:132:2058] recipient: [1:110:2141] Leader for TabletID 72057594046316545 is [1:133:2155] sender: [1:135:2058] recipient: [1:111:2142] 2025-05-29T15:28:26.358769Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7488: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-05-29T15:28:26.358792Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7516: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:28:26.358798Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7402: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2025-05-29T15:28:26.358803Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7418: OperationsProcessing config: using default configuration 2025-05-29T15:28:26.358810Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-05-29T15:28:26.358813Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-05-29T15:28:26.358822Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7548: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:28:26.358836Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:39: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-05-29T15:28:26.358933Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7619: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-05-29T15:28:26.359010Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-05-29T15:28:26.373718Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7651: Got new config: QueryServiceConfig { AllExternalDataSourcesAreAvailable: true } 2025-05-29T15:28:26.373743Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:28:26.373857Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7619: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# Leader for TabletID 72057594046447617 is [1:130:2154] sender: [1:175:2058] recipient: [1:15:2062] 2025-05-29T15:28:26.376761Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-05-29T15:28:26.376795Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-05-29T15:28:26.376828Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-05-29T15:28:26.379966Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-05-29T15:28:26.380059Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:445: Clear TempDirsState with owners number: 0 2025-05-29T15:28:26.380170Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1348: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-05-29T15:28:26.380361Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:32: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-05-29T15:28:26.381133Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:157: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:28:26.381173Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:84: [RootDataErasureManager] Stop 2025-05-29T15:28:26.381417Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:28:26.381430Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:28:26.381458Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-05-29T15:28:26.381467Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-29T15:28:26.381472Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:202: TTxServerlessStorageBilling.Complete 2025-05-29T15:28:26.381491Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6671: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:214:2058] recipient: [1:212:2212] IGNORE Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:214:2058] recipient: [1:212:2212] Leader for TabletID 72057594037968897 is [1:218:2216] sender: [1:219:2058] recipient: [1:212:2212] 2025-05-29T15:28:26.382908Z node 1 :HIVE INFO: tablet_helpers.cpp:1130: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:126:2151] sender: [1:239:2058] recipient: [1:15:2062] 2025-05-29T15:28:26.404053Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:372: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-05-29T15:28:26.404133Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:28:26.404187Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-05-29T15:28:26.404242Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:130: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-05-29T15:28:26.404254Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:28:26.404936Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:451: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-05-29T15:28:26.404964Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-05-29T15:28:26.405015Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:28:26.405026Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:313: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-05-29T15:28:26.405031Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:367: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-05-29T15:28:26.405037Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 2 -> 3 2025-05-29T15:28:26.405606Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:28:26.405620Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-05-29T15:28:26.405627Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 3 -> 128 2025-05-29T15:28:26.406055Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:28:26.406071Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:28:26.406077Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:28:26.406084Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1650: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-05-29T15:28:26.406727Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1719: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-05-29T15:28:26.407203Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:652: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-05-29T15:28:26.407242Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1751: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 Leader for TabletID 72057594046316545 is [1:133:2155] sender: [1:254:2058] recipient: [1:15:2062] FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-05-29T15:28:26.407440Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:676: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-05-29T15:28:26.407468Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:680: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 133 RawX2: 4294969451 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-05-29T15:28:26.407488Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:28:26.407551Z node 1 :FLAT_TX_SCHEMESHARD INFO: sch ... 94046678944, LocalPathId: 4] was 4 2025-05-29T15:29:36.470085Z node 239 :HIVE INFO: tablet_helpers.cpp:1356: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 3 TxId_Deprecated: 3 TabletID: 72075186233409548 Forgetting tablet 72075186233409547 2025-05-29T15:29:36.470702Z node 239 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5938: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 2 ShardOwnerId: 72057594046678944 ShardLocalIdx: 2, at schemeshard: 72057594046678944 2025-05-29T15:29:36.470771Z node 239 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 6] was 1 2025-05-29T15:29:36.470821Z node 239 :HIVE INFO: tablet_helpers.cpp:1356: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 4 TxId_Deprecated: 4 TabletID: 72075186233409549 2025-05-29T15:29:36.471299Z node 239 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5938: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 3 ShardOwnerId: 72057594046678944 ShardLocalIdx: 3, at schemeshard: 72057594046678944 2025-05-29T15:29:36.471345Z node 239 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 8] was 1 Forgetting tablet 72075186233409548 Forgetting tablet 72075186233409549 2025-05-29T15:29:36.472015Z node 239 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:123: TTxCleanDroppedPaths Execute, 2 paths in candidate queue, at schemeshard: 72057594046678944 2025-05-29T15:29:36.472029Z node 239 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:137: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 8], at schemeshard: 72057594046678944 2025-05-29T15:29:36.472046Z node 239 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 7] was 1 2025-05-29T15:29:36.472054Z node 239 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:137: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 7], at schemeshard: 72057594046678944 2025-05-29T15:29:36.472059Z node 239 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 3 2025-05-29T15:29:36.472063Z node 239 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:137: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 6], at schemeshard: 72057594046678944 2025-05-29T15:29:36.472069Z node 239 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 1 2025-05-29T15:29:36.472073Z node 239 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:137: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 5], at schemeshard: 72057594046678944 2025-05-29T15:29:36.472077Z node 239 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 2 2025-05-29T15:29:36.472339Z node 239 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5938: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 4 ShardOwnerId: 72057594046678944 ShardLocalIdx: 4, at schemeshard: 72057594046678944 2025-05-29T15:29:36.472389Z node 239 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 10] was 1 2025-05-29T15:29:36.472637Z node 239 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:174: Deleted shardIdx 72057594046678944:1 2025-05-29T15:29:36.472650Z node 239 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:180: Close pipe to deleted shardIdx 72057594046678944:1 tabletId 72075186233409546 2025-05-29T15:29:36.472950Z node 239 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:174: Deleted shardIdx 72057594046678944:2 2025-05-29T15:29:36.472962Z node 239 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:180: Close pipe to deleted shardIdx 72057594046678944:2 tabletId 72075186233409547 2025-05-29T15:29:36.472990Z node 239 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:174: Deleted shardIdx 72057594046678944:3 2025-05-29T15:29:36.472995Z node 239 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:180: Close pipe to deleted shardIdx 72057594046678944:3 tabletId 72075186233409548 2025-05-29T15:29:36.473328Z node 239 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:160: TTxCleanDroppedPaths Complete, done PersistRemovePath for 4 paths, skipped 0, left 1 candidates, at schemeshard: 72057594046678944 2025-05-29T15:29:36.473355Z node 239 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:123: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-05-29T15:29:36.473360Z node 239 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:137: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 10], at schemeshard: 72057594046678944 2025-05-29T15:29:36.473375Z node 239 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 9] was 1 2025-05-29T15:29:36.473381Z node 239 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:137: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 9], at schemeshard: 72057594046678944 2025-05-29T15:29:36.473386Z node 239 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 1 2025-05-29T15:29:36.473390Z node 239 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:137: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 4], at schemeshard: 72057594046678944 2025-05-29T15:29:36.473396Z node 239 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 1 2025-05-29T15:29:36.473420Z node 239 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:174: Deleted shardIdx 72057594046678944:4 2025-05-29T15:29:36.473427Z node 239 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:180: Close pipe to deleted shardIdx 72057594046678944:4 tabletId 72075186233409549 2025-05-29T15:29:36.473744Z node 239 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:160: TTxCleanDroppedPaths Complete, done PersistRemovePath for 3 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestModificationResult got TxId: 1004, wait until txId: 1004 TestWaitNotification wait txId: 1004 2025-05-29T15:29:36.473803Z node 239 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:210: tests -- TTxNotificationSubscriber for txId 1004: send EvNotifyTxCompletion 2025-05-29T15:29:36.473811Z node 239 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:256: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 1004 2025-05-29T15:29:36.473880Z node 239 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 1004, at schemeshard: 72057594046678944 2025-05-29T15:29:36.473898Z node 239 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:227: tests -- TTxNotificationSubscriber for txId 1004: got EvNotifyTxCompletionResult 2025-05-29T15:29:36.473903Z node 239 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:236: tests -- TTxNotificationSubscriber for txId 1004: satisfy waiter [239:823:2754] TestWaitNotification: OK eventTxId 1004 2025-05-29T15:29:36.473971Z node 239 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/DirB" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-05-29T15:29:36.474017Z node 239 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/DirB" took 56us result status StatusSuccess 2025-05-29T15:29:36.474091Z node 239 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/DirB" PathDescription { Self { Name: "DirB" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1002 CreateStep: 5000003 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 9 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 9 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 8 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 2 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-05-29T15:29:36.474169Z node 239 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/DirB/Table1" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-05-29T15:29:36.474190Z node 239 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/DirB/Table1" took 41us result status StatusPathDoesNotExist 2025-05-29T15:29:36.474208Z node 239 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/DirB/Table1\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot/DirB\' (id: [OwnerId: 72057594046678944, LocalPathId: 3]), source_location: ydb/core/tx/schemeshard/schemeshard_path_describer.cpp:1157" Path: "/MyRoot/DirB/Table1" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot/DirB" LastExistedPrefixPathId: 3 LastExistedPrefixDescription { Self { Name: "DirB" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1002 CreateStep: 5000003 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: false } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 wait until 72075186233409546 is deleted wait until 72075186233409547 is deleted wait until 72075186233409548 is deleted 2025-05-29T15:29:36.474251Z node 239 :HIVE INFO: tablet_helpers.cpp:1476: [72057594037968897] TEvSubscribeToTabletDeletion, 72075186233409546 2025-05-29T15:29:36.474263Z node 239 :HIVE INFO: tablet_helpers.cpp:1476: [72057594037968897] TEvSubscribeToTabletDeletion, 72075186233409547 2025-05-29T15:29:36.474269Z node 239 :HIVE INFO: tablet_helpers.cpp:1476: [72057594037968897] TEvSubscribeToTabletDeletion, 72075186233409548 Deleted tabletId 72075186233409546 Deleted tabletId 72075186233409547 Deleted tabletId 72075186233409548 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/scheme/unittest >> KqpConstraints::AlterTableAddNotNullWithDefault Test command err: Trying to start YDB, gRPC: 8202, MsgBus: 31986 2025-05-29T15:29:10.162984Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509890119354123923:2067];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:29:10.163009Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/001162/r3tmp/tmpc52F7Z/pdisk_1.dat 2025-05-29T15:29:10.220407Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 8202, node 1 2025-05-29T15:29:10.236283Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:29:10.236297Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-05-29T15:29:10.236300Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-05-29T15:29:10.236342Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:31986 2025-05-29T15:29:10.264841Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:29:10.264873Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:29:10.265939Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:31986 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-29T15:29:10.297366Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:29:10.314571Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:29:10.379653Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:29:10.439868Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:29:10.451861Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:29:10.539375Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890119354125531:2403], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:29:10.539426Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:29:10.586404Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-05-29T15:29:10.593204Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-05-29T15:29:10.605452Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-05-29T15:29:10.619469Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-05-29T15:29:10.674577Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-05-29T15:29:10.682450Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-05-29T15:29:10.696632Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-05-29T15:29:10.712795Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890119354126184:2466], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:29:10.712828Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:29:10.712844Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890119354126189:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:29:10.713567Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715669:3, at schemeshard: 72057594046644480 2025-05-29T15:29:10.716550Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7509890119354126191:2470], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715669 completed, doublechecking } 2025-05-29T15:29:10.771063Z node 1 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [1:7509890119354126242:3398] txid# 281474976715670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-29T15:29:10.843069Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [1:7509890119354126258:2474], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:29:10.843178Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=1&id=NjVkNjdiM2ItN2ZjZmJmZGMtMTkzNDlhN2ItNzE0ODY1ODE=, ActorId: [1:7509890119354125513:2401], ActorState: ExecuteState, TraceId: 01jweajq1rec02j3rgpb9c9e6g, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: VERIFY failed (2025-05-29T15:29:10.843765Z): assertion failed in non-unittest thread with message: assertion failed at ydb/core/kqp/ut/common/kqp_ut_common.h:375, void NKikimr::NKqp::AssertSuccessResult(const NYdb::TStatus &): (result.IsSuccess())
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 library/cpp/testing/unittest/registar.cpp:36 RaiseError(): requirement UnittestThread failed 0. /-S/util/system/yassert.cpp:83: InternalPanicImpl @ 0x1633E055 1. /-S/util/system/yassert.cpp:55: Panic @ 0x16335056 2. /tmp//-S/library/cpp/testing/unittest/registar.cpp:36: RaiseError @ 0x164D75B6 3. /-S/ydb/core/kqp/ut/common/kqp_ut_common.h:375: AssertSuccessResult @ 0x15DE0A22 4. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:365: CreateSampleTables @ 0x289A0492 5. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:581: operator() @ 0x289C1A1C 6. /-S/library/cpp/threading/future/core/future-inl.h:493: SetValue @ 0x289C1A1C 7. /-S/library/cpp/threading/future/async.h:24: operator() @ 0x289C1A1C 8. /-S/util/thread/pool.h:71: Process @ 0x289C1A1C 9. /-S/util/thread/pool.cpp:418: DoExecute @ 0x163457A9 10. /-S/util/thread/factory.h:15: Execute @ 0x16344199 11. /-S/util/thread/factory.cpp:36: ThreadProc @ 0x16344199 12. /-S/util/system/thread.cpp:244: ThreadProxy @ 0x1633F60C 13. ??:0: ?? @ 0x7FBCF2FF3AC2 14. ??:0: ?? @ 0x7FBCF308584F Trying to start YDB, gRPC: 10914, MsgBus: 1553 2025-05-29T15:29:14.644436Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509890136028363173:2203];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:29:14.645159Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/001162/r3tmp/tmpUDEJ7N/pdisk_1.dat 2025-05-29T15:29:14.712136Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [1:7509890136028363005:2079] 1748532554643697 != 1748532554643700 2025-05-29T15:29:14.714613Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 10914, node 1 2025-05-29T15:29:14.727213Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:29:14.727225Z node 1 :NET_CLASSIFIER WARN ... erator() @ 0x289C1A1C 6. /-S/library/cpp/threading/future/core/future-inl.h:493: SetValue @ 0x289C1A1C 7. /-S/library/cpp/threading/future/async.h:24: operator() @ 0x289C1A1C 8. /-S/util/thread/pool.h:71: Process @ 0x289C1A1C 9. /-S/util/thread/pool.cpp:418: DoExecute @ 0x163457A9 10. /-S/util/thread/factory.h:15: Execute @ 0x16344199 11. /-S/util/thread/factory.cpp:36: ThreadProc @ 0x16344199 12. /-S/util/system/thread.cpp:244: ThreadProxy @ 0x1633F60C 13. ??:0: ?? @ 0x7F82DC567AC2 14. ??:0: ?? @ 0x7F82DC5F984F Trying to start YDB, gRPC: 15485, MsgBus: 20139 2025-05-29T15:29:32.212386Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509890214594912200:2072];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:29:32.212406Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/001162/r3tmp/tmpMgXPd6/pdisk_1.dat 2025-05-29T15:29:32.288463Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:29:32.288529Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [1:7509890214594912158:2079] 1748532572212205 != 1748532572212208 TServer::EnableGrpc on GrpcPort 15485, node 1 2025-05-29T15:29:32.302027Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:29:32.302039Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-05-29T15:29:32.302041Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-05-29T15:29:32.302080Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-05-29T15:29:32.313075Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:29:32.313100Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:29:32.319087Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:20139 TClient is connected to server localhost:20139 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-29T15:29:32.360804Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:29:32.367326Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-05-29T15:29:32.369697Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:29:32.431631Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:29:32.456296Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:29:32.469252Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:29:32.548158Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890214594913797:2403], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:29:32.548178Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:29:32.602158Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-05-29T15:29:32.609903Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-05-29T15:29:32.621082Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-05-29T15:29:32.634958Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-05-29T15:29:32.650008Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-05-29T15:29:32.663498Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-05-29T15:29:32.683959Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-05-29T15:29:32.711839Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890214594914448:2466], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:29:32.711861Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:29:32.711992Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890214594914453:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:29:32.712716Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715669:3, at schemeshard: 72057594046644480 2025-05-29T15:29:32.715287Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976715669, at schemeshard: 72057594046644480 2025-05-29T15:29:32.715324Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7509890214594914455:2470], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715669 completed, doublechecking } 2025-05-29T15:29:32.811658Z node 1 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [1:7509890214594914507:3396] txid# 281474976715670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-29T15:29:32.913276Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [1:7509890214594914516:2474], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:29:32.914841Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=1&id=MmNlN2UyOGUtNWYwZDczYmMtNWMxMTA3MWUtMWZiMDMwY2Q=, ActorId: [1:7509890214594913779:2401], ActorState: ExecuteState, TraceId: 01jweakch65fg5mj1zvztpph8r, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: VERIFY failed (2025-05-29T15:29:32.916327Z): assertion failed in non-unittest thread with message: assertion failed at ydb/core/kqp/ut/common/kqp_ut_common.h:375, void NKikimr::NKqp::AssertSuccessResult(const NYdb::TStatus &): (result.IsSuccess())
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 library/cpp/testing/unittest/registar.cpp:36 RaiseError(): requirement UnittestThread failed 0. /-S/util/system/yassert.cpp:83: InternalPanicImpl @ 0x1633E055 1. /-S/util/system/yassert.cpp:55: Panic @ 0x16335056 2. /tmp//-S/library/cpp/testing/unittest/registar.cpp:36: RaiseError @ 0x164D75B6 3. /-S/ydb/core/kqp/ut/common/kqp_ut_common.h:375: AssertSuccessResult @ 0x15DE0A22 4. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:365: CreateSampleTables @ 0x289A0492 5. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:581: operator() @ 0x289C1A1C 6. /-S/library/cpp/threading/future/core/future-inl.h:493: SetValue @ 0x289C1A1C 7. /-S/library/cpp/threading/future/async.h:24: operator() @ 0x289C1A1C 8. /-S/util/thread/pool.h:71: Process @ 0x289C1A1C 9. /-S/util/thread/pool.cpp:418: DoExecute @ 0x163457A9 10. /-S/util/thread/factory.h:15: Execute @ 0x16344199 11. /-S/util/thread/factory.cpp:36: ThreadProc @ 0x16344199 12. /-S/util/system/thread.cpp:244: ThreadProxy @ 0x1633F60C 13. ??:0: ?? @ 0x7F29CF09DAC2 14. ??:0: ?? @ 0x7F29CF12F84F >> BackupRestore::TestAllSchemeObjectTypes-EPathTypePersQueueGroup [GOOD] >> BackupRestore::TestAllSchemeObjectTypes-EPathTypeSubDomain [GOOD] >> BackupRestore::TestAllSchemeObjectTypes-EPathTypeRtmrVolume [GOOD] >> BackupRestore::TestAllSchemeObjectTypes-EPathTypeKesus ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/scheme/unittest >> KqpScheme::CreateTableWithPgColumn Test command err: Trying to start YDB, gRPC: 21623, MsgBus: 26639 2025-05-29T15:29:14.327013Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509890136402001950:2070];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:29:14.327034Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/001146/r3tmp/tmpclO9sr/pdisk_1.dat 2025-05-29T15:29:14.381704Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 21623, node 1 2025-05-29T15:29:14.395830Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:29:14.395847Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-05-29T15:29:14.395849Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-05-29T15:29:14.395891Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:26639 2025-05-29T15:29:14.428151Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:29:14.428183Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:29:14.429215Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:26639 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-29T15:29:14.459669Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:29:14.470160Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:29:14.534032Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:29:14.555684Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:29:14.567395Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:29:14.654131Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890136402003546:2403], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:29:14.654157Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:29:14.712240Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-05-29T15:29:14.720949Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-05-29T15:29:14.779948Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-05-29T15:29:14.792331Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-05-29T15:29:14.805694Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-05-29T15:29:14.819807Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-05-29T15:29:14.834216Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-05-29T15:29:14.855777Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890136402004199:2466], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:29:14.855806Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:29:14.855834Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890136402004204:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:29:14.856629Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715669:3, at schemeshard: 72057594046644480 2025-05-29T15:29:14.860656Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7509890136402004206:2470], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715669 completed, doublechecking } 2025-05-29T15:29:14.933942Z node 1 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [1:7509890136402004257:3397] txid# 281474976715670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-29T15:29:15.038192Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [1:7509890136402004266:2474], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:29:15.038311Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=1&id=MWExM2MzLWM0OGZjYmUtMjU5ZWYxZDktNjRhMTcyNmU=, ActorId: [1:7509890136402003543:2401], ActorState: ExecuteState, TraceId: 01jweajv37e41g1cy58z7rh7vk, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: VERIFY failed (2025-05-29T15:29:15.039008Z): assertion failed in non-unittest thread with message: assertion failed at ydb/core/kqp/ut/common/kqp_ut_common.h:375, void NKikimr::NKqp::AssertSuccessResult(const NYdb::TStatus &): (result.IsSuccess())
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 library/cpp/testing/unittest/registar.cpp:36 RaiseError(): requirement UnittestThread failed 0. /-S/util/system/yassert.cpp:83: InternalPanicImpl @ 0x1633E055 1. /-S/util/system/yassert.cpp:55: Panic @ 0x16335056 2. /tmp//-S/library/cpp/testing/unittest/registar.cpp:36: RaiseError @ 0x164D75B6 3. /-S/ydb/core/kqp/ut/common/kqp_ut_common.h:375: AssertSuccessResult @ 0x15DE0A22 4. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:365: CreateSampleTables @ 0x289A0492 5. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:581: operator() @ 0x289C1A1C 6. /-S/library/cpp/threading/future/core/future-inl.h:493: SetValue @ 0x289C1A1C 7. /-S/library/cpp/threading/future/async.h:24: operator() @ 0x289C1A1C 8. /-S/util/thread/pool.h:71: Process @ 0x289C1A1C 9. /-S/util/thread/pool.cpp:418: DoExecute @ 0x163457A9 10. /-S/util/thread/factory.h:15: Execute @ 0x16344199 11. /-S/util/thread/factory.cpp:36: ThreadProc @ 0x16344199 12. /-S/util/system/thread.cpp:244: ThreadProxy @ 0x1633F60C 13. ??:0: ?? @ 0x7FF96011AAC2 14. ??:0: ?? @ 0x7FF9601AC84F Trying to start YDB, gRPC: 7709, MsgBus: 26387 2025-05-29T15:29:18.892603Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509890155861084050:2067];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:29:18.892620Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/001146/r3tmp/tmpZmBuzf/pdisk_1.dat 2025-05-29T15:29:18.945350Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [1:7509890155861084014:2079] 1748532558892456 != 1748532558892459 2025-05-29T15:29:18.946675Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 7709, node 1 2025-05-29T15:29:18.963136Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:29:18.963149Z node 1 :NET_CLASSIFIER WARN: n ... 633E055 1. /-S/util/system/yassert.cpp:55: Panic @ 0x16335056 2. /tmp//-S/library/cpp/testing/unittest/registar.cpp:36: RaiseError @ 0x164D75B6 3. /-S/ydb/core/kqp/ut/common/kqp_ut_common.h:375: AssertSuccessResult @ 0x15DE0A22 4. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:365: CreateSampleTables @ 0x289A0492 5. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:581: operator() @ 0x289C1A1C 6. /-S/library/cpp/threading/future/core/future-inl.h:493: SetValue @ 0x289C1A1C 7. /-S/library/cpp/threading/future/async.h:24: operator() @ 0x289C1A1C 8. /-S/util/thread/pool.h:71: Process @ 0x289C1A1C 9. /-S/util/thread/pool.cpp:418: DoExecute @ 0x163457A9 10. /-S/util/thread/factory.h:15: Execute @ 0x16344199 11. /-S/util/thread/factory.cpp:36: ThreadProc @ 0x16344199 12. /-S/util/system/thread.cpp:244: ThreadProxy @ 0x1633F60C 13. ??:0: ?? @ 0x7F2586620AC2 14. ??:0: ?? @ 0x7F25866B284F Trying to start YDB, gRPC: 29820, MsgBus: 19348 2025-05-29T15:29:32.633624Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509890216975977248:2071];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:29:32.633642Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/001146/r3tmp/tmpYxEPxh/pdisk_1.dat 2025-05-29T15:29:32.731314Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:29:32.733833Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [1:7509890216975977206:2079] 1748532572633428 != 1748532572633431 TServer::EnableGrpc on GrpcPort 29820, node 1 2025-05-29T15:29:32.734914Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:29:32.734934Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:29:32.736209Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-29T15:29:32.748275Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:29:32.748289Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-05-29T15:29:32.748291Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-05-29T15:29:32.748337Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:19348 TClient is connected to server localhost:19348 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... 2025-05-29T15:29:32.847153Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-05-29T15:29:32.855850Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:29:32.936475Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:29:32.963299Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-05-29T15:29:32.985712Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 2025-05-29T15:29:33.106973Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890221270946134:2403], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:29:33.107034Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:29:33.155982Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-05-29T15:29:33.166530Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-05-29T15:29:33.182154Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-05-29T15:29:33.196599Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-05-29T15:29:33.213750Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-05-29T15:29:33.227139Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-05-29T15:29:33.239054Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-05-29T15:29:33.255184Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890221270946786:2466], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:29:33.255209Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:29:33.255256Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890221270946791:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:29:33.256397Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715669:3, at schemeshard: 72057594046644480 2025-05-29T15:29:33.263913Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7509890221270946793:2470], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715669 completed, doublechecking } 2025-05-29T15:29:33.345367Z node 1 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [1:7509890221270946844:3396] txid# 281474976715670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-29T15:29:33.459961Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [1:7509890221270946853:2474], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:29:33.460093Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=1&id=ZWU0ZDRmNWUtOTE0Nzg1ZDctZTlhNjUzY2QtOTc1MDc1YzE=, ActorId: [1:7509890221270946108:2401], ActorState: ExecuteState, TraceId: 01jweakd26c0v6r7drg6mrcpg1, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: VERIFY failed (2025-05-29T15:29:33.460780Z): assertion failed in non-unittest thread with message: assertion failed at ydb/core/kqp/ut/common/kqp_ut_common.h:375, void NKikimr::NKqp::AssertSuccessResult(const NYdb::TStatus &): (result.IsSuccess())
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 library/cpp/testing/unittest/registar.cpp:36 RaiseError(): requirement UnittestThread failed 0. /-S/util/system/yassert.cpp:83: InternalPanicImpl @ 0x1633E055 1. /-S/util/system/yassert.cpp:55: Panic @ 0x16335056 2. /tmp//-S/library/cpp/testing/unittest/registar.cpp:36: RaiseError @ 0x164D75B6 3. /-S/ydb/core/kqp/ut/common/kqp_ut_common.h:375: AssertSuccessResult @ 0x15DE0A22 4. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:365: CreateSampleTables @ 0x289A0492 5. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:581: operator() @ 0x289C1A1C 6. /-S/library/cpp/threading/future/core/future-inl.h:493: SetValue @ 0x289C1A1C 7. /-S/library/cpp/threading/future/async.h:24: operator() @ 0x289C1A1C 8. /-S/util/thread/pool.h:71: Process @ 0x289C1A1C 9. /-S/util/thread/pool.cpp:418: DoExecute @ 0x163457A9 10. /-S/util/thread/factory.h:15: Execute @ 0x16344199 11. /-S/util/thread/factory.cpp:36: ThreadProc @ 0x16344199 12. /-S/util/system/thread.cpp:244: ThreadProxy @ 0x1633F60C 13. ??:0: ?? @ 0x7F1997BEDAC2 14. ??:0: ?? @ 0x7F1997C7F84F ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_split_merge/unittest >> TSchemeShardSplitBySizeTest::ConcurrentSplitOneToOne [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:116:2058] recipient: [1:111:2141] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:116:2058] recipient: [1:111:2141] Leader for TabletID 72057594046678944 is [1:128:2152] sender: [1:132:2058] recipient: [1:111:2141] 2025-05-29T15:29:38.247181Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7488: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-05-29T15:29:38.247209Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7516: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:29:38.247215Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7402: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-05-29T15:29:38.247221Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7418: OperationsProcessing config: using default configuration 2025-05-29T15:29:38.247234Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-05-29T15:29:38.247239Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-05-29T15:29:38.247249Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7548: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:29:38.247263Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:39: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-05-29T15:29:38.247378Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7619: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-05-29T15:29:38.247437Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-05-29T15:29:38.263262Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7309: Cannot subscribe to console configs 2025-05-29T15:29:38.263288Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:29:38.266417Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-05-29T15:29:38.266599Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-05-29T15:29:38.266638Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-05-29T15:29:38.268689Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-05-29T15:29:38.268842Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:445: Clear TempDirsState with owners number: 0 2025-05-29T15:29:38.268955Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1348: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-05-29T15:29:38.269004Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:32: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-05-29T15:29:38.269566Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:157: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:29:38.269622Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:84: [RootDataErasureManager] Stop 2025-05-29T15:29:38.269912Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:29:38.269922Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:29:38.269944Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-05-29T15:29:38.269953Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-29T15:29:38.269960Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:202: TTxServerlessStorageBilling.Complete 2025-05-29T15:29:38.269995Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6671: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-05-29T15:29:38.271567Z node 1 :HIVE INFO: tablet_helpers.cpp:1130: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:128:2152] sender: [1:242:2058] recipient: [1:15:2062] 2025-05-29T15:29:38.294044Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:372: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-05-29T15:29:38.294124Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:29:38.294210Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-05-29T15:29:38.294262Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:130: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-05-29T15:29:38.294275Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:29:38.295145Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:451: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-05-29T15:29:38.295181Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-05-29T15:29:38.295234Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:29:38.295245Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:313: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-05-29T15:29:38.295251Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:367: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-05-29T15:29:38.295256Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 2 -> 3 2025-05-29T15:29:38.295755Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:29:38.295768Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-05-29T15:29:38.295774Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 3 -> 128 2025-05-29T15:29:38.296173Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:29:38.296184Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:29:38.296190Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:29:38.296208Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1650: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-05-29T15:29:38.296937Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1719: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-05-29T15:29:38.297475Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:652: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-05-29T15:29:38.297526Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1751: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-05-29T15:29:38.297727Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:676: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-05-29T15:29:38.297756Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:680: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 124 RawX2: 4294969445 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-05-29T15:29:38.297764Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:29:38.297825Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 128 -> 240 2025-05-29T15:29:38.297833Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:29:38.297868Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-05-29T15:29:38.297880Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:402: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-05-29T15:29:38.298499Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:29:38.298513Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-29T15:29:38.298560Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29 ... 44, LocalPathId: 2], version: 4 2025-05-29T15:29:38.534430Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 5 2025-05-29T15:29:38.534446Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1606: TOperation IsReadyToNotify, TxId: 102, ready parts: 0/1, is published: true 2025-05-29T15:29:38.536252Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:652: Send tablet strongly msg operationId: 102:0 from tablet: 72057594046678944 to tablet: 72075186233409546 cookie: 72057594046678944:1 msg type: 269553158 2025-05-29T15:29:38.536625Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2025-05-29T15:29:38.537451Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:619: TTxOperationReply execute, operationId: 102:0, at schemeshard: 72057594046678944, message: OperationCookie: 102 TabletId: 72075186233409546 2025-05-29T15:29:38.537464Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_split_merge.cpp:386: TSplitMerge TNotifySrc, operationId: 102:0 HandleReply TEvSplitPartitioningChangedAck, from datashard: 72075186233409546, at schemeshard: 72057594046678944 2025-05-29T15:29:38.537482Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:906: Part operation is done id#102:0 progress is 1/1 2025-05-29T15:29:38.537495Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-05-29T15:29:38.537501Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:906: Part operation is done id#102:0 progress is 1/1 2025-05-29T15:29:38.537505Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-05-29T15:29:38.537510Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1606: TOperation IsReadyToNotify, TxId: 102, ready parts: 1/1, is published: true 2025-05-29T15:29:38.537518Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-05-29T15:29:38.537524Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:973: Operation and all the parts is done, operation id: 102:0 2025-05-29T15:29:38.537529Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5174: RemoveTx for txid 102:0 2025-05-29T15:29:38.537564Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 2025-05-29T15:29:38.538359Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:647: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2025-05-29T15:29:38.538369Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:260: Unable to activate 102:0 Leader for TabletID 72057594046678944 is [1:468:2416] sender: [1:637:2058] recipient: [1:15:2062] 2025-05-29T15:29:38.538841Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5549: Handle TEvStateChanged, at schemeshard: 72057594046678944, message: Source { RawX1: 310 RawX2: 4294969592 } TabletId: 72075186233409546 State: 4 2025-05-29T15:29:38.538860Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__state_changed_reply.cpp:78: TTxShardStateChanged DoExecute, datashard informs about state changing, datashardId: 72075186233409546, state: Offline, at schemeshard: 72057594046678944 2025-05-29T15:29:38.539380Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:46: Free shard 72057594046678944:1 hive 72057594037968897 at ss 72057594046678944 2025-05-29T15:29:38.539496Z node 1 :HIVE INFO: tablet_helpers.cpp:1356: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 1 TxId_Deprecated: 1 TabletID: 72075186233409546 Forgetting tablet 72075186233409546 2025-05-29T15:29:38.539950Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5938: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 1 ShardOwnerId: 72057594046678944 ShardLocalIdx: 1, at schemeshard: 72057594046678944 2025-05-29T15:29:38.540007Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2025-05-29T15:29:38.540710Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:174: Deleted shardIdx 72057594046678944:1 2025-05-29T15:29:38.540729Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:180: Close pipe to deleted shardIdx 72057594046678944:1 tabletId 72075186233409546 TestWaitNotification wait txId: 102 2025-05-29T15:29:38.561096Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:210: tests -- TTxNotificationSubscriber for txId 102: send EvNotifyTxCompletion 2025-05-29T15:29:38.561121Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:256: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 102 TestWaitNotification wait txId: 103 2025-05-29T15:29:38.561141Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:210: tests -- TTxNotificationSubscriber for txId 103: send EvNotifyTxCompletion 2025-05-29T15:29:38.561145Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:256: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 103 2025-05-29T15:29:38.561272Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 102, at schemeshard: 72057594046678944 2025-05-29T15:29:38.561310Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:227: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2025-05-29T15:29:38.561317Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:236: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [1:647:2567] 2025-05-29T15:29:38.561337Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 103, at schemeshard: 72057594046678944 2025-05-29T15:29:38.561361Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:227: tests -- TTxNotificationSubscriber for txId 103: got EvNotifyTxCompletionResult 2025-05-29T15:29:38.561366Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:236: tests -- TTxNotificationSubscriber for txId 103: satisfy waiter [1:647:2567] TestWaitNotification: OK eventTxId 102 TestWaitNotification: OK eventTxId 103 wait until 72075186233409546 is deleted 2025-05-29T15:29:38.561435Z node 1 :HIVE INFO: tablet_helpers.cpp:1476: [72057594037968897] TEvSubscribeToTabletDeletion, 72075186233409546 Deleted tabletId 72075186233409546 2025-05-29T15:29:38.561560Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Table" Options { ReturnPartitioningInfo: true ReturnPartitionConfig: true BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-05-29T15:29:38.561636Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/Table" took 110us result status StatusSuccess 2025-05-29T15:29:38.561867Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Table" PathDescription { Self { Name: "Table" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 4 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 4 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 2 } ChildrenExist: false } Table { Name: "Table" Columns { Name: "Key" Type: "Utf8" TypeId: 4608 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "Value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "Key" KeyColumnNames: "Value" KeyColumnIds: 1 KeyColumnIds: 2 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { MinPartitionsCount: 1 } } TableSchemaVersion: 1 IsBackup: false IsRestore: false } TablePartitions { EndOfRangeKeyPrefix: "" IsPoint: false IsInclusive: false DatashardId: 72075186233409547 } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 1 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/scheme/unittest >> KqpScheme::DropResourcePoolClassifier Test command err: Trying to start YDB, gRPC: 10247, MsgBus: 20083 2025-05-29T15:29:10.203008Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509890119742478401:2065];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:29:10.203033Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/001161/r3tmp/tmpS6ITtg/pdisk_1.dat 2025-05-29T15:29:10.259888Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 10247, node 1 2025-05-29T15:29:10.274320Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:29:10.274336Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-05-29T15:29:10.274338Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-05-29T15:29:10.274377Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:20083 2025-05-29T15:29:10.305164Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:29:10.305185Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:29:10.306379Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:20083 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-29T15:29:10.323724Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:29:10.331813Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:29:10.350511Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:29:10.370623Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:29:10.383784Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:29:10.576469Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890119742480020:2403], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:29:10.576500Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:29:10.631367Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-05-29T15:29:10.639406Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-05-29T15:29:10.647541Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-05-29T15:29:10.661305Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-05-29T15:29:10.675340Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-05-29T15:29:10.689558Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-05-29T15:29:10.703862Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-05-29T15:29:10.719691Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890119742480673:2466], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:29:10.719717Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890119742480678:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:29:10.719728Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:29:10.720300Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710669:3, at schemeshard: 72057594046644480 2025-05-29T15:29:10.723169Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7509890119742480680:2470], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710669 completed, doublechecking } 2025-05-29T15:29:10.822034Z node 1 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [1:7509890119742480731:3398] txid# 281474976710670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-29T15:29:10.899671Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [1:7509890119742480747:2474], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:29:10.899824Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=1&id=N2E4MDczYTItMWIxZmMzNmItMmIyMWFiY2YtYTFlMmM5MzI=, ActorId: [1:7509890119742480017:2401], ActorState: ExecuteState, TraceId: 01jweajq1z9d4mccc0a2hgaadw, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: VERIFY failed (2025-05-29T15:29:10.900521Z): assertion failed in non-unittest thread with message: assertion failed at ydb/core/kqp/ut/common/kqp_ut_common.h:375, void NKikimr::NKqp::AssertSuccessResult(const NYdb::TStatus &): (result.IsSuccess())
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 library/cpp/testing/unittest/registar.cpp:36 RaiseError(): requirement UnittestThread failed 0. /-S/util/system/yassert.cpp:83: InternalPanicImpl @ 0x1633E055 1. /-S/util/system/yassert.cpp:55: Panic @ 0x16335056 2. /tmp//-S/library/cpp/testing/unittest/registar.cpp:36: RaiseError @ 0x164D75B6 3. /-S/ydb/core/kqp/ut/common/kqp_ut_common.h:375: AssertSuccessResult @ 0x15DE0A22 4. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:365: CreateSampleTables @ 0x289A0492 5. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:581: operator() @ 0x289C1A1C 6. /-S/library/cpp/threading/future/core/future-inl.h:493: SetValue @ 0x289C1A1C 7. /-S/library/cpp/threading/future/async.h:24: operator() @ 0x289C1A1C 8. /-S/util/thread/pool.h:71: Process @ 0x289C1A1C 9. /-S/util/thread/pool.cpp:418: DoExecute @ 0x163457A9 10. /-S/util/thread/factory.h:15: Execute @ 0x16344199 11. /-S/util/thread/factory.cpp:36: ThreadProc @ 0x16344199 12. /-S/util/system/thread.cpp:244: ThreadProxy @ 0x1633F60C 13. ??:0: ?? @ 0x7F61540F1AC2 14. ??:0: ?? @ 0x7F615418384F Trying to start YDB, gRPC: 26609, MsgBus: 1373 2025-05-29T15:29:14.474673Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509890138103340758:2071];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:29:14.474692Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/001161/r3tmp/tmpTS3NEE/pdisk_1.dat 2025-05-29T15:29:14.528037Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 26609, node 1 2025-05-29T15:29:14.547435Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:29:14.547451Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-05-29T15:29:14.547454Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) ... alPanicImpl @ 0x1633E055 1. /-S/util/system/yassert.cpp:55: Panic @ 0x16335056 2. /tmp//-S/library/cpp/testing/unittest/registar.cpp:36: RaiseError @ 0x164D75B6 3. /-S/ydb/core/kqp/ut/common/kqp_ut_common.h:375: AssertSuccessResult @ 0x15DE0A22 4. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:365: CreateSampleTables @ 0x289A0492 5. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:581: operator() @ 0x289C1A1C 6. /-S/library/cpp/threading/future/core/future-inl.h:493: SetValue @ 0x289C1A1C 7. /-S/library/cpp/threading/future/async.h:24: operator() @ 0x289C1A1C 8. /-S/util/thread/pool.h:71: Process @ 0x289C1A1C 9. /-S/util/thread/pool.cpp:418: DoExecute @ 0x163457A9 10. /-S/util/thread/factory.h:15: Execute @ 0x16344199 11. /-S/util/thread/factory.cpp:36: ThreadProc @ 0x16344199 12. /-S/util/system/thread.cpp:244: ThreadProxy @ 0x1633F60C 13. ??:0: ?? @ 0x7F1A4C820AC2 14. ??:0: ?? @ 0x7F1A4C8B284F Trying to start YDB, gRPC: 13342, MsgBus: 6667 2025-05-29T15:29:32.240978Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509890213742769576:2071];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:29:32.240996Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/001161/r3tmp/tmpXQXj95/pdisk_1.dat 2025-05-29T15:29:32.297041Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 13342, node 1 2025-05-29T15:29:32.318317Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:29:32.318329Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-05-29T15:29:32.318331Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-05-29T15:29:32.318389Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:6667 2025-05-29T15:29:32.343627Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:29:32.343676Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:29:32.345048Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:6667 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-29T15:29:32.392026Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:29:32.404860Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:29:32.467960Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:29:32.489439Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:29:32.501198Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:29:32.613083Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890213742771177:2403], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:29:32.613134Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:29:32.672764Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-05-29T15:29:32.683176Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-05-29T15:29:32.691757Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-05-29T15:29:32.704397Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-05-29T15:29:32.721391Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-05-29T15:29:32.734950Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-05-29T15:29:32.750415Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-05-29T15:29:32.771309Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890213742771829:2466], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:29:32.771336Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:29:32.771483Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890213742771834:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:29:32.772542Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715669:3, at schemeshard: 72057594046644480 2025-05-29T15:29:32.777927Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976715669, at schemeshard: 72057594046644480 2025-05-29T15:29:32.778047Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7509890213742771836:2470], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715669 completed, doublechecking } 2025-05-29T15:29:32.851903Z node 1 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [1:7509890213742771887:3394] txid# 281474976715670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-29T15:29:32.994500Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [1:7509890213742771903:2474], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:29:32.994819Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=1&id=NGFhYzM4MTMtNGY5OGI1OWEtNTg1Y2UzOTktYWNlYjNkMmU=, ActorId: [1:7509890213742771159:2401], ActorState: ExecuteState, TraceId: 01jweakck25k94kprgvtzzdbkd, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: VERIFY failed (2025-05-29T15:29:32.998734Z): assertion failed in non-unittest thread with message: assertion failed at ydb/core/kqp/ut/common/kqp_ut_common.h:375, void NKikimr::NKqp::AssertSuccessResult(const NYdb::TStatus &): (result.IsSuccess())
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 library/cpp/testing/unittest/registar.cpp:36 RaiseError(): requirement UnittestThread failed 0. /-S/util/system/yassert.cpp:83: InternalPanicImpl @ 0x1633E055 1. /-S/util/system/yassert.cpp:55: Panic @ 0x16335056 2. /tmp//-S/library/cpp/testing/unittest/registar.cpp:36: RaiseError @ 0x164D75B6 3. /-S/ydb/core/kqp/ut/common/kqp_ut_common.h:375: AssertSuccessResult @ 0x15DE0A22 4. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:365: CreateSampleTables @ 0x289A0492 5. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:581: operator() @ 0x289C1A1C 6. /-S/library/cpp/threading/future/core/future-inl.h:493: SetValue @ 0x289C1A1C 7. /-S/library/cpp/threading/future/async.h:24: operator() @ 0x289C1A1C 8. /-S/util/thread/pool.h:71: Process @ 0x289C1A1C 9. /-S/util/thread/pool.cpp:418: DoExecute @ 0x163457A9 10. /-S/util/thread/factory.h:15: Execute @ 0x16344199 11. /-S/util/thread/factory.cpp:36: ThreadProc @ 0x16344199 12. /-S/util/system/thread.cpp:244: ThreadProxy @ 0x1633F60C 13. ??:0: ?? @ 0x7F55CD2A0AC2 14. ??:0: ?? @ 0x7F55CD33284F ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/scheme/unittest >> KqpScheme::AlterTransfer_QueryService Test command err: Trying to start YDB, gRPC: 13039, MsgBus: 7031 2025-05-29T15:29:09.272927Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:102:2148], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-05-29T15:29:09.272959Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-05-29T15:29:09.272969Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/001167/r3tmp/tmpwskpul/pdisk_1.dat TServer::EnableGrpc on GrpcPort 13039, node 1 TClient is connected to server localhost:7031 TClient is connected to server localhost:7031 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-29T15:29:09.463619Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:29:09.463647Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-05-29T15:29:09.463652Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-05-29T15:29:09.463737Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-05-29T15:29:09.463831Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [1:32:2079] 1748532548906907 != 1748532548906911 2025-05-29T15:29:09.527529Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:29:09.527566Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:29:09.528330Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:29:09.530565Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-29T15:29:09.618535Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:29:09.804395Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:29:10.085739Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:29:10.288598Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:29:10.616672Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:1722:3316], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:29:10.616737Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:29:10.621719Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-05-29T15:29:10.784041Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-05-29T15:29:11.007680Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-05-29T15:29:11.211340Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-05-29T15:29:11.426392Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-05-29T15:29:11.634262Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-05-29T15:29:11.893264Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-05-29T15:29:12.131657Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2392:3811], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:29:12.131713Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:29:12.131769Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2397:3816], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:29:12.132887Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715669:3, at schemeshard: 72057594046644480 2025-05-29T15:29:12.280955Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:2399:3818], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715669 completed, doublechecking } 2025-05-29T15:29:12.323661Z node 1 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [1:2457:3857] txid# 281474976715670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-29T15:29:12.432959Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [1:2467:3866], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:29:12.435833Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=1&id=NDY3ZGM5NTMtNDUwOGUxZGYtMTJmZmI0YS05OWU0NTNhZA==, ActorId: [1:1708:3303], ActorState: ExecuteState, TraceId: 01jweajre36bzfpdnxwmhq54bp, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: VERIFY failed (2025-05-29T15:29:12.437034Z): assertion failed in non-unittest thread with message: assertion failed at ydb/core/kqp/ut/common/kqp_ut_common.h:375, void NKikimr::NKqp::AssertSuccessResult(const NYdb::TStatus &): (result.IsSuccess())
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 library/cpp/testing/unittest/registar.cpp:36 RaiseError(): requirement UnittestThread failed 2025-05-29T15:29:13.902707Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7301: Cannot get console configs 2025-05-29T15:29:13.902785Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 0. /-S/util/system/yassert.cpp:83: InternalPanicImpl @ 0x1633E055 1. /-S/util/system/yassert.cpp:55: Panic @ 0x16335056 2. /tmp//-S/library/cpp/testing/unittest/registar.cpp:36: RaiseError @ 0x164D75B6 3. /-S/ydb/core/kqp/ut/common/kqp_ut_common.h:375: AssertSuccessResult @ 0x15DE0A22 4. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:365: CreateSampleTables @ 0x289A0492 5. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:581: operator() @ 0x289C1A1C 6. /-S/library/cpp/threading/future/core/future-inl.h:493: SetValue @ 0x289C1A1C 7. /-S/library/cpp/threading/future/async.h:24: operator() @ 0x289C1A1C 8. /-S/util/thread/pool.h:71: Process @ 0x289C1A1C 9. /-S/util/thread/pool.cpp:418: DoExecute @ 0x163457A9 10. /-S/util/thread/factory.h:15: Execute @ 0x16344199 11. /-S/util/thread/factory.cpp:36: ThreadProc @ 0x16344199 12. /-S/util/system/thread.cpp:244: ThreadProxy @ 0x1633F60C 13. ??:0: ?? @ 0x7F0D4F5DBAC2 14. ??:0: ?? @ 0x7F0D4F66D84F Trying to start YDB, gRPC: 24319, MsgBus: 11663 2025-05-29T15:29:15.967699Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509890139746262157:2066];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:29:15.967733Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/001167/r3tmp/tmpMhSPKl/pdisk_1.dat 2025-05-29T15:29:16.032879Z node 1 :IMPORT WARN: schemeshard_import.cp ... anicImpl @ 0x1633E055 1. /-S/util/system/yassert.cpp:55: Panic @ 0x16335056 2. /tmp//-S/library/cpp/testing/unittest/registar.cpp:36: RaiseError @ 0x164D75B6 3. /-S/ydb/core/kqp/ut/common/kqp_ut_common.h:375: AssertSuccessResult @ 0x15DE0A22 4. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:365: CreateSampleTables @ 0x289A0492 5. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:581: operator() @ 0x289C1A1C 6. /-S/library/cpp/threading/future/core/future-inl.h:493: SetValue @ 0x289C1A1C 7. /-S/library/cpp/threading/future/async.h:24: operator() @ 0x289C1A1C 8. /-S/util/thread/pool.h:71: Process @ 0x289C1A1C 9. /-S/util/thread/pool.cpp:418: DoExecute @ 0x163457A9 10. /-S/util/thread/factory.h:15: Execute @ 0x16344199 11. /-S/util/thread/factory.cpp:36: ThreadProc @ 0x16344199 12. /-S/util/system/thread.cpp:244: ThreadProxy @ 0x1633F60C 13. ??:0: ?? @ 0x7F5160F0AAC2 14. ??:0: ?? @ 0x7F5160F9C84F Trying to start YDB, gRPC: 24978, MsgBus: 16390 2025-05-29T15:29:34.094925Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509890222147140252:2067];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:29:34.094950Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/001167/r3tmp/tmpe45xnt/pdisk_1.dat 2025-05-29T15:29:34.185167Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 24978, node 1 2025-05-29T15:29:34.196203Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:29:34.196235Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:29:34.197283Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-29T15:29:34.204086Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:29:34.204100Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-05-29T15:29:34.204102Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-05-29T15:29:34.204137Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:16390 TClient is connected to server localhost:16390 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-29T15:29:34.287895Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:29:34.299235Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:29:34.387980Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:29:34.423880Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:29:34.487923Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:29:34.549511Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890222147141858:2403], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:29:34.549538Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:29:34.619297Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-05-29T15:29:34.679114Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-05-29T15:29:34.688503Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-05-29T15:29:34.705121Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-05-29T15:29:34.716099Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-05-29T15:29:34.728425Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-05-29T15:29:34.785652Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-05-29T15:29:34.815751Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890222147142515:2466], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:29:34.815781Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:29:34.815918Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890222147142520:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:29:34.817110Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715669:3, at schemeshard: 72057594046644480 2025-05-29T15:29:34.821137Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976715669, at schemeshard: 72057594046644480 2025-05-29T15:29:34.821220Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7509890222147142522:2470], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715669 completed, doublechecking } 2025-05-29T15:29:34.884005Z node 1 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [1:7509890222147142573:3399] txid# 281474976715670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-29T15:29:34.987408Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [1:7509890222147142582:2474], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:29:34.987524Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=1&id=NGUwYzMwNi00NzE2NmJiZi01N2YwNTMyYy1iMDBjM2ZmMA==, ActorId: [1:7509890222147141855:2401], ActorState: ExecuteState, TraceId: 01jweakejzds1wg7ztdrsyxt1m, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: VERIFY failed (2025-05-29T15:29:34.988402Z): assertion failed in non-unittest thread with message: assertion failed at ydb/core/kqp/ut/common/kqp_ut_common.h:375, void NKikimr::NKqp::AssertSuccessResult(const NYdb::TStatus &): (result.IsSuccess())
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 library/cpp/testing/unittest/registar.cpp:36 RaiseError(): requirement UnittestThread failed 0. /-S/util/system/yassert.cpp:83: InternalPanicImpl @ 0x1633E055 1. /-S/util/system/yassert.cpp:55: Panic @ 0x16335056 2. /tmp//-S/library/cpp/testing/unittest/registar.cpp:36: RaiseError @ 0x164D75B6 3. /-S/ydb/core/kqp/ut/common/kqp_ut_common.h:375: AssertSuccessResult @ 0x15DE0A22 4. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:365: CreateSampleTables @ 0x289A0492 5. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:581: operator() @ 0x289C1A1C 6. /-S/library/cpp/threading/future/core/future-inl.h:493: SetValue @ 0x289C1A1C 7. /-S/library/cpp/threading/future/async.h:24: operator() @ 0x289C1A1C 8. /-S/util/thread/pool.h:71: Process @ 0x289C1A1C 9. /-S/util/thread/pool.cpp:418: DoExecute @ 0x163457A9 10. /-S/util/thread/factory.h:15: Execute @ 0x16344199 11. /-S/util/thread/factory.cpp:36: ThreadProc @ 0x16344199 12. /-S/util/system/thread.cpp:244: ThreadProxy @ 0x1633F60C 13. ??:0: ?? @ 0x7FC50EB6BAC2 14. ??:0: ?? @ 0x7FC50EBFD84F ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/scheme/unittest >> KqpScheme::ModifyPermissionsByIncorrectPaths Test command err: Trying to start YDB, gRPC: 65280, MsgBus: 28864 2025-05-29T15:29:10.539080Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509890118265389580:2087];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:29:10.539358Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/00115b/r3tmp/tmp6tN03C/pdisk_1.dat 2025-05-29T15:29:10.599565Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 65280, node 1 2025-05-29T15:29:10.617332Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:29:10.617345Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-05-29T15:29:10.617347Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-05-29T15:29:10.617387Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:28864 2025-05-29T15:29:10.638095Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:29:10.638131Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:29:10.639234Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:28864 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-29T15:29:10.681145Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:29:10.688655Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:29:10.710920Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:29:10.729148Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:29:10.739917Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:29:10.883241Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890118265391135:2403], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:29:10.883270Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:29:10.923135Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-05-29T15:29:10.930639Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-05-29T15:29:10.941802Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-05-29T15:29:10.955445Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-05-29T15:29:11.010494Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-05-29T15:29:11.017889Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-05-29T15:29:11.032586Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-05-29T15:29:11.048912Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890122560359085:2466], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:29:11.048939Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:29:11.048967Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890122560359090:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:29:11.049649Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715669:3, at schemeshard: 72057594046644480 2025-05-29T15:29:11.052051Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7509890122560359092:2470], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715669 completed, doublechecking } 2025-05-29T15:29:11.150307Z node 1 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [1:7509890122560359143:3397] txid# 281474976715670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-29T15:29:11.249385Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [1:7509890122560359159:2474], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:29:11.249539Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=1&id=NDJlNTUzZDYtMmRiMzIzOWYtYTNhMmRmOS01Njg5Nzc5YQ==, ActorId: [1:7509890118265391117:2401], ActorState: ExecuteState, TraceId: 01jweajqc8aqn5q220nbzqvr2n, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: VERIFY failed (2025-05-29T15:29:11.250213Z): assertion failed in non-unittest thread with message: assertion failed at ydb/core/kqp/ut/common/kqp_ut_common.h:375, void NKikimr::NKqp::AssertSuccessResult(const NYdb::TStatus &): (result.IsSuccess())
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 library/cpp/testing/unittest/registar.cpp:36 RaiseError(): requirement UnittestThread failed 0. /-S/util/system/yassert.cpp:83: InternalPanicImpl @ 0x1633E055 1. /-S/util/system/yassert.cpp:55: Panic @ 0x16335056 2. /tmp//-S/library/cpp/testing/unittest/registar.cpp:36: RaiseError @ 0x164D75B6 3. /-S/ydb/core/kqp/ut/common/kqp_ut_common.h:375: AssertSuccessResult @ 0x15DE0A22 4. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:365: CreateSampleTables @ 0x289A0492 5. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:581: operator() @ 0x289C1A1C 6. /-S/library/cpp/threading/future/core/future-inl.h:493: SetValue @ 0x289C1A1C 7. /-S/library/cpp/threading/future/async.h:24: operator() @ 0x289C1A1C 8. /-S/util/thread/pool.h:71: Process @ 0x289C1A1C 9. /-S/util/thread/pool.cpp:418: DoExecute @ 0x163457A9 10. /-S/util/thread/factory.h:15: Execute @ 0x16344199 11. /-S/util/thread/factory.cpp:36: ThreadProc @ 0x16344199 12. /-S/util/system/thread.cpp:244: ThreadProxy @ 0x1633F60C 13. ??:0: ?? @ 0x7FE9785FBAC2 14. ??:0: ?? @ 0x7FE97868D84F Trying to start YDB, gRPC: 25894, MsgBus: 28227 2025-05-29T15:29:14.880071Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509890138067408796:2138];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:29:14.880327Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/00115b/r3tmp/tmpkXk78p/pdisk_1.dat 2025-05-29T15:29:14.955480Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 25894, node 1 2025-05-29T15:29:14.974525Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:29:14.974535Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-05-29T15:29:14.974537Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) ... st/yql_expr.h:1874: index out of range, code: 1 library/cpp/testing/unittest/registar.cpp:36 RaiseError(): requirement UnittestThread failed 0. /-S/util/system/yassert.cpp:83: InternalPanicImpl @ 0x1633E055 1. /-S/util/system/yassert.cpp:55: Panic @ 0x16335056 2. /tmp//-S/library/cpp/testing/unittest/registar.cpp:36: RaiseError @ 0x164D75B6 3. /-S/ydb/core/kqp/ut/common/kqp_ut_common.h:375: AssertSuccessResult @ 0x15DE0A22 4. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:365: CreateSampleTables @ 0x289A0492 5. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:581: operator() @ 0x289C1A1C 6. /-S/library/cpp/threading/future/core/future-inl.h:493: SetValue @ 0x289C1A1C 7. /-S/library/cpp/threading/future/async.h:24: operator() @ 0x289C1A1C 8. /-S/util/thread/pool.h:71: Process @ 0x289C1A1C 9. /-S/util/thread/pool.cpp:418: DoExecute @ 0x163457A9 10. /-S/util/thread/factory.h:15: Execute @ 0x16344199 11. /-S/util/thread/factory.cpp:36: ThreadProc @ 0x16344199 12. /-S/util/system/thread.cpp:244: ThreadProxy @ 0x1633F60C 13. ??:0: ?? @ 0x7FF7964A5AC2 14. ??:0: ?? @ 0x7FF79653784F Trying to start YDB, gRPC: 63657, MsgBus: 20072 2025-05-29T15:29:32.322175Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509890216987681302:2210];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:29:32.322289Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/00115b/r3tmp/tmp6Woffy/pdisk_1.dat 2025-05-29T15:29:32.395731Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 63657, node 1 2025-05-29T15:29:32.412105Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:29:32.412114Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-05-29T15:29:32.412116Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-05-29T15:29:32.412161Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-05-29T15:29:32.421555Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:29:32.421596Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:29:32.422662Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:20072 TClient is connected to server localhost:20072 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-29T15:29:32.478758Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:29:32.492473Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:29:32.513845Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:29:32.536429Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:29:32.549046Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:29:32.781115Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890216987682757:2403], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:29:32.781161Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:29:32.839159Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-05-29T15:29:32.869344Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-05-29T15:29:32.926287Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-05-29T15:29:32.937584Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-05-29T15:29:32.950728Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-05-29T15:29:32.966864Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-05-29T15:29:32.980758Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-05-29T15:29:33.042601Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890221282650715:2466], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:29:33.042624Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:29:33.042761Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890221282650720:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:29:33.043723Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715669:3, at schemeshard: 72057594046644480 2025-05-29T15:29:33.046981Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7509890221282650722:2470], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715669 completed, doublechecking } 2025-05-29T15:29:33.131808Z node 1 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [1:7509890221282650773:3398] txid# 281474976715670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-29T15:29:33.227397Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [1:7509890221282650782:2474], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:29:33.229990Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=1&id=OWQ0Y2NjZmEtOGYwMDllMzYtMWNhNzQ2ZWUtMjViZGRiZDg=, ActorId: [1:7509890216987682730:2400], ActorState: ExecuteState, TraceId: 01jweakcvh3xqbxw58yw2gphga, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: VERIFY failed (2025-05-29T15:29:33.231619Z): assertion failed in non-unittest thread with message: assertion failed at ydb/core/kqp/ut/common/kqp_ut_common.h:375, void NKikimr::NKqp::AssertSuccessResult(const NYdb::TStatus &): (result.IsSuccess())
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 library/cpp/testing/unittest/registar.cpp:36 RaiseError(): requirement UnittestThread failed 0. /-S/util/system/yassert.cpp:83: InternalPanicImpl @ 0x1633E055 1. /-S/util/system/yassert.cpp:55: Panic @ 0x16335056 2. /tmp//-S/library/cpp/testing/unittest/registar.cpp:36: RaiseError @ 0x164D75B6 3. /-S/ydb/core/kqp/ut/common/kqp_ut_common.h:375: AssertSuccessResult @ 0x15DE0A22 4. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:365: CreateSampleTables @ 0x289A0492 5. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:581: operator() @ 0x289C1A1C 6. /-S/library/cpp/threading/future/core/future-inl.h:493: SetValue @ 0x289C1A1C 7. /-S/library/cpp/threading/future/async.h:24: operator() @ 0x289C1A1C 8. /-S/util/thread/pool.h:71: Process @ 0x289C1A1C 9. /-S/util/thread/pool.cpp:418: DoExecute @ 0x163457A9 10. /-S/util/thread/factory.h:15: Execute @ 0x16344199 11. /-S/util/thread/factory.cpp:36: ThreadProc @ 0x16344199 12. /-S/util/system/thread.cpp:244: ThreadProxy @ 0x1633F60C 13. ??:0: ?? @ 0x7F0899ABDAC2 14. ??:0: ?? @ 0x7F0899B4F84F |72.4%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/datashard/ut_background_compaction/ydb-core-tx-datashard-ut_background_compaction >> KqpConstraints::SerialTypeBigSerial >> EncryptedBackupParamsValidationTestFeatureDisabled::CommonSourcePathSpecified |72.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_background_compaction/ydb-core-tx-datashard-ut_background_compaction |72.4%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_background_compaction/ydb-core-tx-datashard-ut_background_compaction >> BackupPathTest::ExportWholeDatabaseWithEncryption [GOOD] >> ReadAttributesUtils::ReplaceAttributesEmpty [GOOD] >> ReadAttributesUtils::ReplaceAttributesFilter [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/scheme/unittest >> KqpOlapScheme::DropColumnAndResetTtl [GOOD] Test command err: Trying to start YDB, gRPC: 13506, MsgBus: 22033 2025-05-29T15:29:30.037206Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509890204214167197:2067];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:29:30.037257Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/0010f7/r3tmp/tmpayCSpl/pdisk_1.dat 2025-05-29T15:29:30.108747Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 13506, node 1 2025-05-29T15:29:30.123621Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:29:30.123645Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-05-29T15:29:30.123648Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-05-29T15:29:30.123695Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-05-29T15:29:30.138977Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:29:30.139023Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:29:30.140092Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:22033 TClient is connected to server localhost:22033 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-29T15:29:30.182299Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... CREATE TABLE `/Root/ColumnTableTest` (id Int32 NOT NULL, resource_id Utf8, level Int32, PRIMARY KEY (id)) PARTITION BY HASH(id) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 1); 2025-05-29T15:29:30.384507Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890204214167831:2331], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:29:30.384539Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:29:30.435678Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-05-29T15:29:30.445671Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;self_id=[1:7509890204214167906:2335];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-05-29T15:29:30.445721Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;self_id=[1:7509890204214167906:2335];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-05-29T15:29:30.445754Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;self_id=[1:7509890204214167906:2335];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-05-29T15:29:30.445776Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;self_id=[1:7509890204214167906:2335];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-05-29T15:29:30.445792Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;self_id=[1:7509890204214167906:2335];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-05-29T15:29:30.445808Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;self_id=[1:7509890204214167906:2335];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-05-29T15:29:30.445846Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;self_id=[1:7509890204214167906:2335];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-05-29T15:29:30.445864Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;self_id=[1:7509890204214167906:2335];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-05-29T15:29:30.445877Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;self_id=[1:7509890204214167906:2335];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-05-29T15:29:30.445897Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;self_id=[1:7509890204214167906:2335];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-05-29T15:29:30.445912Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;self_id=[1:7509890204214167906:2335];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2025-05-29T15:29:30.445934Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;self_id=[1:7509890204214167906:2335];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2025-05-29T15:29:30.446311Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-05-29T15:29:30.446323Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-05-29T15:29:30.446332Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-05-29T15:29:30.446335Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-05-29T15:29:30.446347Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-05-29T15:29:30.446349Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-05-29T15:29:30.446356Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-05-29T15:29:30.446359Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-05-29T15:29:30.446365Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-05-29T15:29:30.446371Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-05-29T15:29:30.446376Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-05-29T15:29:30.446378Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-05-29T15:29:30.446396Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-05-29T15:29:30.446404Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-05-29T15:29:30.446416Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-05-29T15:29:30.446422Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-05-29T15:29:30.446430Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-05-29T15:29:30.446438Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanDeprecatedSnapshot;id=CleanDeprecatedSnapshot; 2025-05-29T15:29:30.446447Z node 1 :TX_COLUMNSHARD CRIT: log.cpp:784: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=clean_deprec ... on_start;last_saved_id=0; 2025-05-29T15:29:39.706940Z node 6 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;self_id=[6:7509890244162128197:2335];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-05-29T15:29:39.706976Z node 6 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;self_id=[6:7509890244162128197:2335];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-05-29T15:29:39.706995Z node 6 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;self_id=[6:7509890244162128197:2335];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-05-29T15:29:39.707011Z node 6 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;self_id=[6:7509890244162128197:2335];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-05-29T15:29:39.707034Z node 6 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;self_id=[6:7509890244162128197:2335];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-05-29T15:29:39.707055Z node 6 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;self_id=[6:7509890244162128197:2335];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-05-29T15:29:39.707072Z node 6 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;self_id=[6:7509890244162128197:2335];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-05-29T15:29:39.707089Z node 6 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;self_id=[6:7509890244162128197:2335];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-05-29T15:29:39.707105Z node 6 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;self_id=[6:7509890244162128197:2335];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-05-29T15:29:39.707118Z node 6 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;self_id=[6:7509890244162128197:2335];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2025-05-29T15:29:39.707141Z node 6 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;self_id=[6:7509890244162128197:2335];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2025-05-29T15:29:39.707626Z node 6 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-05-29T15:29:39.707643Z node 6 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-05-29T15:29:39.707658Z node 6 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-05-29T15:29:39.707664Z node 6 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-05-29T15:29:39.707684Z node 6 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-05-29T15:29:39.707693Z node 6 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-05-29T15:29:39.707705Z node 6 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-05-29T15:29:39.707714Z node 6 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-05-29T15:29:39.707726Z node 6 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-05-29T15:29:39.707731Z node 6 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-05-29T15:29:39.707738Z node 6 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-05-29T15:29:39.707748Z node 6 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-05-29T15:29:39.707772Z node 6 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-05-29T15:29:39.707784Z node 6 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-05-29T15:29:39.707805Z node 6 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-05-29T15:29:39.707816Z node 6 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-05-29T15:29:39.707831Z node 6 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-05-29T15:29:39.707839Z node 6 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanDeprecatedSnapshot;id=CleanDeprecatedSnapshot; 2025-05-29T15:29:39.707847Z node 6 :TX_COLUMNSHARD CRIT: log.cpp:784: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=clean_deprecated_snapshot.cpp:30;tasks_for_rewrite=0; 2025-05-29T15:29:39.707858Z node 6 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanDeprecatedSnapshot;id=17; 2025-05-29T15:29:39.707863Z node 6 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV0ChunksMeta;id=RestoreV0ChunksMeta; 2025-05-29T15:29:39.707949Z node 6 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV0ChunksMeta;id=18; 2025-05-29T15:29:39.707957Z node 6 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-05-29T15:29:39.756793Z node 6 :TX_COLUMNSHARD_TX WARN: log.cpp:784: tablet_id=72075186224037888;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715658;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715658; 2025-05-29T15:29:39.762979Z node 6 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7509890244162128268:2345], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:29:39.763014Z node 6 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:29:39.765682Z node 6 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 2025-05-29T15:29:39.768427Z node 6 :TX_COLUMNSHARD_TX WARN: log.cpp:784: tablet_id=72075186224037888;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715659;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715659; 2025-05-29T15:29:39.771559Z node 6 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7509890244162128295:2347], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:29:39.771582Z node 6 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:29:39.776927Z node 6 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 2025-05-29T15:29:39.779472Z node 6 :TX_COLUMNSHARD_TX WARN: log.cpp:784: tablet_id=72075186224037888;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715660;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715660; 2025-05-29T15:29:39.782657Z node 6 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7509890244162128327:2353], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:29:39.782682Z node 6 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:29:39.785719Z node 6 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 2025-05-29T15:29:39.794218Z node 6 :TX_COLUMNSHARD_TX WARN: log.cpp:784: tablet_id=72075186224037888;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715661;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715661; >> KqpScheme::CreateAsyncReplicationWithSecret >> BackupRestore::TestAllPrimitiveTypes-INT8 [FAIL] >> BackupRestore::TestAllPrimitiveTypes-INT16 |72.5%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/time_cast/ut/ydb-core-tx-time_cast-ut |72.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/time_cast/ut/ydb-core-tx-time_cast-ut |72.5%| [LD] {RESULT} $(B)/ydb/core/tx/time_cast/ut/ydb-core-tx-time_cast-ut >> KqpScheme::CreateTableWithVectorIndexCaseIncentive >> KqpConstraints::Utf8AndDefault >> KqpScheme::CreateTableWithUniformPartitionsUuid >> EncryptedBackupParamsValidationTestFeatureDisabled::CommonSourcePathSpecified [GOOD] |72.5%| [TA] $(B)/ydb/core/tx/tx_proxy/ut_ext_tenant/test-results/unittest/{meta.json ... results_accumulator.log} |72.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/provider/ut/unittest >> ReadAttributesUtils::ReplaceAttributesFilter [GOOD] >> BackupPathTest::ExportWithCommonSourcePath >> KikimrIcGateway::TestLoadTableMetadata >> KikimrIcGateway::TestCreateSameExternalTable >> BackupRestore::TestAllSchemeObjectTypes-EPathTypeKesus [GOOD] >> BackupRestore::TestAllSchemeObjectTypes-EPathTypeSolomonVolume [GOOD] >> BackupRestore::TestAllSchemeObjectTypes-EPathTypeTableIndex >> TSchemeShardSplitBySample::FlatlistNoResultWhenMedianKeyIsAtBoundary [GOOD] >> TSchemeShardSplitBySample::Histogram [GOOD] >> BackupRestoreS3::TestAllPrimitiveTypes-INT16 [FAIL] >> KikimrIcGateway::TestLoadBasicSecretValueFromExternalDataSourceMetadata >> TSchemeShardSplitBySizeTest::Merge111Shards >> BackupRestoreS3::TestAllPrimitiveTypes-UINT16 |72.5%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/columnshard/splitter/ut/ydb-core-tx-columnshard-splitter-ut |72.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/columnshard/splitter/ut/ydb-core-tx-columnshard-splitter-ut >> ReadAttributesUtils::AttributesGatheringEmpry [GOOD] >> ReadAttributesUtils::AttributesGatheringFilter [GOOD] >> ReadAttributesUtils::AttributesGatheringRecursive [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/scheme/unittest >> KqpAcl::AclRevoke-UseSink+IsOlap Test command err: Trying to start YDB, gRPC: 22897, MsgBus: 25990 2025-05-29T15:29:08.346368Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509890112318955269:2068];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:29:08.346387Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/00116d/r3tmp/tmpnki3vI/pdisk_1.dat TServer::EnableGrpc on GrpcPort 22897, node 1 2025-05-29T15:29:08.405558Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:29:08.417310Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:29:08.417323Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-05-29T15:29:08.417324Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-05-29T15:29:08.417367Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:25990 2025-05-29T15:29:08.449296Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:29:08.449326Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:29:08.450353Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:25990 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-29T15:29:08.476631Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:29:08.480907Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-05-29T15:29:08.481981Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:29:08.500917Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:29:08.521530Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:29:08.534755Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:29:08.713946Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890112318956879:2404], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:29:08.713996Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:29:08.772269Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-05-29T15:29:08.780103Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-05-29T15:29:08.793544Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-05-29T15:29:08.808514Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-05-29T15:29:08.821115Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-05-29T15:29:08.837383Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-05-29T15:29:08.849516Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480 2025-05-29T15:29:08.865616Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890112318957532:2467], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:29:08.865638Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:29:08.865649Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890112318957537:2470], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:29:08.866487Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480 2025-05-29T15:29:08.876772Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7509890112318957539:2471], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2025-05-29T15:29:08.936227Z node 1 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [1:7509890112318957590:3404] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-29T15:29:09.061371Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [1:7509890112318957606:2475], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:29:09.061489Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=1&id=ZDU2OWM3ZDctNTk2Y2IzYTEtNjJmNzkzNTgtZjUwZjllNjE=, ActorId: [1:7509890112318956876:2402], ActorState: ExecuteState, TraceId: 01jweajn810y3k4rx7ds6zz170, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: VERIFY failed (2025-05-29T15:29:09.063598Z): assertion failed in non-unittest thread with message: assertion failed at ydb/core/kqp/ut/common/kqp_ut_common.h:375, void NKikimr::NKqp::AssertSuccessResult(const NYdb::TStatus &): (result.IsSuccess())
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 library/cpp/testing/unittest/registar.cpp:36 RaiseError(): requirement UnittestThread failed 0. /-S/util/system/yassert.cpp:83: InternalPanicImpl @ 0x1633E055 1. /-S/util/system/yassert.cpp:55: Panic @ 0x16335056 2. /tmp//-S/library/cpp/testing/unittest/registar.cpp:36: RaiseError @ 0x164D75B6 3. /-S/ydb/core/kqp/ut/common/kqp_ut_common.h:375: AssertSuccessResult @ 0x15DE0A22 4. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:365: CreateSampleTables @ 0x289A0492 5. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:581: operator() @ 0x289C1A1C 6. /-S/library/cpp/threading/future/core/future-inl.h:493: SetValue @ 0x289C1A1C 7. /-S/library/cpp/threading/future/async.h:24: operator() @ 0x289C1A1C 8. /-S/util/thread/pool.h:71: Process @ 0x289C1A1C 9. /-S/util/thread/pool.cpp:418: DoExecute @ 0x163457A9 10. /-S/util/thread/factory.h:15: Execute @ 0x16344199 11. /-S/util/thread/factory.cpp:36: ThreadProc @ 0x16344199 12. /-S/util/system/thread.cpp:244: ThreadProxy @ 0x1633F60C 13. ??:0: ?? @ 0x7F993687FAC2 14. ??:0: ?? @ 0x7F993691184F Trying to start YDB, gRPC: 24086, MsgBus: 63752 2025-05-29T15:29:12.906674Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509890127251825076:2072];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:29:12.906696Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/00116d/r3tmp/tmp1rKGtM/pdisk_1.dat 2025-05-29T15:29:12.963050Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 24086, node 1 2025-05-29T15:29:12.980447Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) ... st/yql_expr.h:1874: index out of range, code: 1 library/cpp/testing/unittest/registar.cpp:36 RaiseError(): requirement UnittestThread failed 0. /-S/util/system/yassert.cpp:83: InternalPanicImpl @ 0x1633E055 1. /-S/util/system/yassert.cpp:55: Panic @ 0x16335056 2. /tmp//-S/library/cpp/testing/unittest/registar.cpp:36: RaiseError @ 0x164D75B6 3. /-S/ydb/core/kqp/ut/common/kqp_ut_common.h:375: AssertSuccessResult @ 0x15DE0A22 4. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:365: CreateSampleTables @ 0x289A0492 5. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:581: operator() @ 0x289C1A1C 6. /-S/library/cpp/threading/future/core/future-inl.h:493: SetValue @ 0x289C1A1C 7. /-S/library/cpp/threading/future/async.h:24: operator() @ 0x289C1A1C 8. /-S/util/thread/pool.h:71: Process @ 0x289C1A1C 9. /-S/util/thread/pool.cpp:418: DoExecute @ 0x163457A9 10. /-S/util/thread/factory.h:15: Execute @ 0x16344199 11. /-S/util/thread/factory.cpp:36: ThreadProc @ 0x16344199 12. /-S/util/system/thread.cpp:244: ThreadProxy @ 0x1633F60C 13. ??:0: ?? @ 0x7F8714570AC2 14. ??:0: ?? @ 0x7F871460284F Trying to start YDB, gRPC: 26527, MsgBus: 27689 2025-05-29T15:29:36.247840Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509890230935946182:2067];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:29:36.247867Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/00116d/r3tmp/tmp0ELpyo/pdisk_1.dat 2025-05-29T15:29:36.360215Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:29:36.369119Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:29:36.369148Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:29:36.375192Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 26527, node 1 2025-05-29T15:29:36.410983Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:29:36.411004Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-05-29T15:29:36.411007Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-05-29T15:29:36.411061Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:27689 TClient is connected to server localhost:27689 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... 2025-05-29T15:29:36.523980Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:29:36.592251Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:29:36.664107Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 2025-05-29T15:29:36.696135Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-05-29T15:29:36.711717Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 2025-05-29T15:29:36.791427Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890230935947779:2403], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:29:36.791469Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:29:36.850422Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-05-29T15:29:36.860272Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-05-29T15:29:36.870298Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-05-29T15:29:36.886425Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-05-29T15:29:36.898475Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-05-29T15:29:36.956714Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-05-29T15:29:36.970313Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-05-29T15:29:36.990731Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890230935948439:2466], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:29:36.990783Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:29:36.990873Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890230935948444:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:29:36.991867Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715669:3, at schemeshard: 72057594046644480 2025-05-29T15:29:36.995527Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7509890230935948446:2470], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715669 completed, doublechecking } 2025-05-29T15:29:37.070756Z node 1 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [1:7509890235230915793:3395] txid# 281474976715670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-29T15:29:37.197426Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [1:7509890235230915802:2474], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:29:37.198833Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=1&id=NTM2YjFjODItNDhmODhlZGUtZThmZGJjMjItZGVhNjlmN2Q=, ActorId: [1:7509890230935947753:2401], ActorState: ExecuteState, TraceId: 01jweakgpyfqxd29mzez7cp87h, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: VERIFY failed (2025-05-29T15:29:37.199811Z): assertion failed in non-unittest thread with message: assertion failed at ydb/core/kqp/ut/common/kqp_ut_common.h:375, void NKikimr::NKqp::AssertSuccessResult(const NYdb::TStatus &): (result.IsSuccess())
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 library/cpp/testing/unittest/registar.cpp:36 RaiseError(): requirement UnittestThread failed 0. /-S/util/system/yassert.cpp:83: InternalPanicImpl @ 0x1633E055 1. /-S/util/system/yassert.cpp:55: Panic @ 0x16335056 2. /tmp//-S/library/cpp/testing/unittest/registar.cpp:36: RaiseError @ 0x164D75B6 3. /-S/ydb/core/kqp/ut/common/kqp_ut_common.h:375: AssertSuccessResult @ 0x15DE0A22 4. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:365: CreateSampleTables @ 0x289A0492 5. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:581: operator() @ 0x289C1A1C 6. /-S/library/cpp/threading/future/core/future-inl.h:493: SetValue @ 0x289C1A1C 7. /-S/library/cpp/threading/future/async.h:24: operator() @ 0x289C1A1C 8. /-S/util/thread/pool.h:71: Process @ 0x289C1A1C 9. /-S/util/thread/pool.cpp:418: DoExecute @ 0x163457A9 10. /-S/util/thread/factory.h:15: Execute @ 0x16344199 11. /-S/util/thread/factory.cpp:36: ThreadProc @ 0x16344199 12. /-S/util/system/thread.cpp:244: ThreadProxy @ 0x1633F60C 13. ??:0: ?? @ 0x7F69A6D38AC2 14. ??:0: ?? @ 0x7F69A6DCA84F |72.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/control/ut/unittest |72.5%| [TA] {RESULT} $(B)/ydb/core/tx/tx_proxy/ut_ext_tenant/test-results/unittest/{meta.json ... results_accumulator.log} |72.5%| [LD] {RESULT} $(B)/ydb/core/tx/columnshard/splitter/ut/ydb-core-tx-columnshard-splitter-ut |72.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/control/ut/unittest |72.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/provider/ut/unittest >> ReadAttributesUtils::AttributesGatheringRecursive [GOOD] >> KikimrIcGateway::TestCreateSameExternalTable [GOOD] >> EncryptedBackupParamsValidationTestFeatureDisabled::CommonDestPrefixSpecified >> KikimrIcGateway::TestDropExternalTable |72.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_split_merge/unittest >> TSchemeShardSplitBySample::Histogram [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/scheme/unittest >> KqpScheme::AlterSequenceRestartWith [FAIL] Test command err: Trying to start YDB, gRPC: 28384, MsgBus: 11221 2025-05-29T15:29:16.934866Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509890146502432335:2063];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:29:16.934889Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/00113f/r3tmp/tmpid1Qz2/pdisk_1.dat TServer::EnableGrpc on GrpcPort 28384, node 1 2025-05-29T15:29:16.991572Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:29:16.991724Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [1:7509890146502432308:2079] 1748532556934735 != 1748532556934738 2025-05-29T15:29:16.997207Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:29:16.997219Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-05-29T15:29:16.997221Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-05-29T15:29:16.997263Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:11221 2025-05-29T15:29:17.036090Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:29:17.036118Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:29:17.037288Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:11221 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-29T15:29:17.062577Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:29:17.067280Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:29:17.133357Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:29:17.154444Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:29:17.166170Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:29:17.291268Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890150797401262:2403], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:29:17.291290Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:29:17.331319Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-05-29T15:29:17.338426Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-05-29T15:29:17.345820Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-05-29T15:29:17.400617Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-05-29T15:29:17.408810Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-05-29T15:29:17.416127Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-05-29T15:29:17.430208Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-05-29T15:29:17.446796Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890150797401915:2466], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:29:17.446823Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:29:17.446844Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890150797401920:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:29:17.447632Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715669:3, at schemeshard: 72057594046644480 2025-05-29T15:29:17.450485Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7509890150797401922:2470], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715669 completed, doublechecking } 2025-05-29T15:29:17.541331Z node 1 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [1:7509890150797401973:3399] txid# 281474976715670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-29T15:29:17.622111Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [1:7509890150797401989:2474], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:29:17.622250Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=1&id=Y2UzZWJkM2MtOTBlZWJiYTEtOWY1ZTMwOGYtYjU0NTg5MWQ=, ActorId: [1:7509890150797401244:2401], ActorState: ExecuteState, TraceId: 01jweajxm6b9bkb83hrwvyfa7b, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: VERIFY failed (2025-05-29T15:29:17.622908Z): assertion failed in non-unittest thread with message: assertion failed at ydb/core/kqp/ut/common/kqp_ut_common.h:375, void NKikimr::NKqp::AssertSuccessResult(const NYdb::TStatus &): (result.IsSuccess())
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 library/cpp/testing/unittest/registar.cpp:36 RaiseError(): requirement UnittestThread failed 0. /-S/util/system/yassert.cpp:83: InternalPanicImpl @ 0x1633E055 1. /-S/util/system/yassert.cpp:55: Panic @ 0x16335056 2. /tmp//-S/library/cpp/testing/unittest/registar.cpp:36: RaiseError @ 0x164D75B6 3. /-S/ydb/core/kqp/ut/common/kqp_ut_common.h:375: AssertSuccessResult @ 0x15DE0A22 4. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:365: CreateSampleTables @ 0x289A0492 5. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:581: operator() @ 0x289C1A1C 6. /-S/library/cpp/threading/future/core/future-inl.h:493: SetValue @ 0x289C1A1C 7. /-S/library/cpp/threading/future/async.h:24: operator() @ 0x289C1A1C 8. /-S/util/thread/pool.h:71: Process @ 0x289C1A1C 9. /-S/util/thread/pool.cpp:418: DoExecute @ 0x163457A9 10. /-S/util/thread/factory.h:15: Execute @ 0x16344199 11. /-S/util/thread/factory.cpp:36: ThreadProc @ 0x16344199 12. /-S/util/system/thread.cpp:244: ThreadProxy @ 0x1633F60C 13. ??:0: ?? @ 0x7FA0B81C7AC2 14. ??:0: ?? @ 0x7FA0B825984F Trying to start YDB, gRPC: 12618, MsgBus: 9167 2025-05-29T15:29:21.000392Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509890164541737476:2063];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:29:21.000465Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/00113f/r3tmp/tmpjlqk8I/pdisk_1.dat 2025-05-29T15:29:21.061974Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:29:21.062054Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [1:7509890164541737452:2079] 1748532561000235 != 1748532561000238 TServer::EnableGrpc on GrpcPort 12618, node 1 2025-05-29T ... drenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-29T15:29:35.340465Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:29:35.343818Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-05-29T15:29:35.530761Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890229513924877:2329], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:29:35.530767Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890229513924885:2332], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:29:35.530790Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:29:35.531583Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-05-29T15:29:35.533987Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7509890229513924891:2333], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-05-29T15:29:35.604311Z node 1 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [1:7509890229513924942:2325] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-29T15:29:35.656188Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-05-29T15:29:35.735243Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSequence, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-05-29T15:29:35.751260Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSequence, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-05-29T15:29:35.764724Z node 1 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [1:7509890229513925150:2453] txid# 281474976710663, issues: { message: "Path does not exist" issue_code: 200200 severity: 1 } 2025-05-29T15:29:35.768738Z node 1 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [1:7509890229513925165:2458] txid# 281474976710664, issues: { message: "Path does not exist" issue_code: 200200 severity: 1 } 2025-05-29T15:29:35.770300Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2583: SessionId: ydb://session/3?node_id=1&id=ZGY0ZDY4MjItYzc4YzRmNS01OTlmZTlkMC04M2I2N2ZjOQ==, ActorId: [1:7509890229513924873:2326], ActorState: ExecuteState, TraceId: 01jweakfgnec4eget9ymnfb5ph, Create QueryResponse for error on request, msg: Trying to start YDB, gRPC: 4081, MsgBus: 29759 2025-05-29T15:29:36.151541Z node 2 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7509890229978118474:2085];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:29:36.151563Z node 2 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/00113f/r3tmp/tmpgph6Aq/pdisk_1.dat TServer::EnableGrpc on GrpcPort 4081, node 2 2025-05-29T15:29:36.180096Z node 2 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:29:36.193886Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:29:36.193899Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-05-29T15:29:36.193902Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-05-29T15:29:36.193950Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:29759 TClient is connected to server localhost:29759 WaitRootIsUp 'Root'... TClient::Ls request: Root 2025-05-29T15:29:36.259079Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:29:36.259109Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:29:36.260552Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... 2025-05-29T15:29:36.263741Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-05-29T15:29:36.265728Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-05-29T15:29:36.623198Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7509890229978119064:2331], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:29:36.623225Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:29:36.623328Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7509890229978119076:2334], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:29:36.624162Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2025-05-29T15:29:36.626828Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976715658, at schemeshard: 72057594046644480 2025-05-29T15:29:36.626880Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7509890229978119078:2335], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-05-29T15:29:36.703560Z node 2 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [2:7509890229978119129:2323] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-29T15:29:36.709156Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 2025-05-29T15:29:36.736455Z node 2 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [2:7509890229978119277:2351], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:29:36.737216Z node 2 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=2&id=NDM4MzEzZTYtZGE2MjY1YmYtYzdhNmQ0NWUtYzM3MWFkZmI=, ActorId: [2:7509890229978119061:2329], ActorState: ExecuteState, TraceId: 01jweakgej7wz3me1cfqa82j78, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: assertion failed at ydb/core/kqp/ut/scheme/kqp_scheme_ut.cpp:6743, virtual void NKikimr::NKqp::NTestSuiteKqpScheme::TTestCaseAlterSequenceRestartWith::Execute_(NUnitTest::TTestContext &): (result.IsSuccess())
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 0. /-S/util/system/backtrace.cpp:284: ?? @ 0x1631F88B 1. /tmp//-S/library/cpp/testing/unittest/registar.cpp:46: RaiseError @ 0x164D74C8 2. /tmp//-S/ydb/core/kqp/ut/scheme/kqp_scheme_ut.cpp:6743: Execute_ @ 0x15FD67D2 3. /tmp//-S/ydb/core/kqp/ut/scheme/kqp_scheme_ut.cpp:34: operator() @ 0x161789E6 4. /tmp//-S/library/cpp/testing/unittest/registar.cpp:373: Run @ 0x164D937D 5. /tmp//-S/ydb/core/kqp/ut/scheme/kqp_scheme_ut.cpp:34: Execute @ 0x161783A7 6. /tmp//-S/library/cpp/testing/unittest/registar.cpp:494: Execute @ 0x164D9AF2 7. /tmp//-S/library/cpp/testing/unittest/utmain.cpp:872: RunMain @ 0x164EB69C 8. ??:0: ?? @ 0x7F628305DD8F 9. ??:0: ?? @ 0x7F628305DE3F 10. ??:0: ?? @ 0x14E48028 >> KqpAcl::AclDml+UseSink+IsOlap >> BackupRestore::TestAllPrimitiveTypes-INT16 [FAIL] >> BackupRestore::TestAllPrimitiveTypes-INT32 >> BackupPathTest::ExportWithCommonSourcePath [GOOD] >> KikimrIcGateway::TestDropExternalTable [GOOD] >> KqpScheme::AlterTableWithPgColumn >> EncryptedBackupParamsValidationTestFeatureDisabled::CommonDestPrefixSpecified [GOOD] >> KikimrIcGateway::TestDropExternalDataSource >> TSchemeShardSplitBySizeTest::Merge1KShards >> KqpScheme::ChangefeedRetentionPeriod >> TSchemeShardSplitBySizeTest::AutoMergeInOne >> BackupPathTest::ExportWithCommonSourcePathAndExplicitTableInside >> BackupRestore::TestAllSchemeObjectTypes-EPathTypeTableIndex [GOOD] >> BackupRestore::TestAllSchemeObjectTypes-EPathTypeExtSubDomain [GOOD] >> BackupRestore::TestAllSchemeObjectTypes-EPathTypeFileStore [GOOD] >> BackupRestore::TestAllSchemeObjectTypes-EPathTypeSequence >> EncryptedBackupParamsValidationTestFeatureDisabled::EncryptionParamsSpecifiedImport >> KikimrIcGateway::TestLoadExternalTable >> KikimrProvider::TestFillAuthPropertiesNone [GOOD] >> KikimrProvider::TestFillAuthPropertiesServiceAccount [GOOD] >> KikimrProvider::TestFillAuthPropertiesMdbBasic [GOOD] >> KikimrIcGateway::TestDropExternalDataSource [GOOD] >> KikimrIcGateway::TestCreateExternalTable >> KikimrIcGateway::TestListPath |72.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/provider/ut/unittest >> KikimrProvider::TestFillAuthPropertiesMdbBasic [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/provider/ut/unittest >> KikimrIcGateway::TestDropExternalDataSource [GOOD] Test command err: Trying to start YDB, gRPC: 1639, MsgBus: 28122 2025-05-29T15:29:41.376428Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509890252431870191:2065];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:29:41.376445Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/002493/r3tmp/tmpZJcuYg/pdisk_1.dat 2025-05-29T15:29:41.436254Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [1:7509890252431870167:2079] 1748532581376213 != 1748532581376216 2025-05-29T15:29:41.438054Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 1639, node 1 2025-05-29T15:29:41.451374Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:29:41.451388Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-05-29T15:29:41.451390Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-05-29T15:29:41.451451Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:28122 TClient is connected to server localhost:28122 2025-05-29T15:29:41.509274Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:29:41.509311Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:29:41.510358Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-29T15:29:41.527615Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:29:41.535239Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-05-29T15:29:41.543064Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExternalDataSource, opId: 281474976710658:2, at schemeshard: 72057594046644480 2025-05-29T15:29:41.547060Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExternalTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 2025-05-29T15:29:41.558776Z node 1 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [1:7509890252431870862:2333] txid# 281474976710660, issues: { message: "Check failed: path: \'/Root/f1/f2/external_table\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeExternalTable, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_external_table.cpp:132" severity: 1 }
: Error: Scheme operation failed, status: ExecComplete, reason: Check failed: path: '/Root/f1/f2/external_table', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeExternalTable, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_external_table.cpp:132 2025-05-29T15:29:41.560659Z node 1 :KQP_GATEWAY ERROR: scheme.h:178: Unexpected error on scheme request, TxId: 281474976710660, ProxyStatus: ExecComplete, SchemeShardReason: Check failed: path: '/Root/f1/f2/external_table', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeExternalTable, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_external_table.cpp:132 Trying to start YDB, gRPC: 15131, MsgBus: 20989 2025-05-29T15:29:41.879724Z node 2 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7509890252856602679:2073];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:29:41.880156Z node 2 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/002493/r3tmp/tmpePKPe6/pdisk_1.dat 2025-05-29T15:29:41.907188Z node 2 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:29:41.910851Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [2:7509890252856602634:2079] 1748532581879155 != 1748532581879158 TServer::EnableGrpc on GrpcPort 15131, node 2 2025-05-29T15:29:41.926665Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:29:41.926685Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-05-29T15:29:41.926687Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-05-29T15:29:41.926755Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:20989 2025-05-29T15:29:41.991465Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:29:41.991498Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:29:41.995959Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:20989 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-29T15:29:42.055478Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:29:42.057402Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-05-29T15:29:42.064457Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExternalDataSource, opId: 281474976715658:2, at schemeshard: 72057594046644480 2025-05-29T15:29:42.076279Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExternalTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 19299, MsgBus: 25928 2025-05-29T15:29:42.471834Z node 3 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7509890257141456436:2063];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:29:42.471850Z node 3 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/002493/r3tmp/tmpd3pCHg/pdisk_1.dat TServer::EnableGrpc on GrpcPort 19299, node 3 2025-05-29T15:29:42.534136Z node 3 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:29:42.541266Z node 3 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [3:7509890257141456411:2079] 1748532582471135 != 1748532582471138 2025-05-29T15:29:42.543338Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:29:42.543351Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-05-29T15:29:42.543353Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-05-29T15:29:42.543408Z node 3 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:25928 2025-05-29T15:29:42.583938Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:29:42.583968Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:29:42.585477Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:25928 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-29T15:29:42.616270Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:29:42.618454Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-05-29T15:29:42.623759Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExternalDataSource, opId: 281474976715658:2, at schemeshard: 72057594046644480 >> KikimrProvider::TestFillAuthPropertiesBasic [GOOD] >> KikimrProvider::TestFillAuthPropertiesAws [GOOD] >> KikimrProvider::AlterTableAddIndexWithTableSettings [GOOD] >> KqpScheme::CreateAndAlterTableWithMinMaxPartitionsCompat >> EncryptedBackupParamsValidationTestFeatureDisabled::EncryptionParamsSpecifiedImport [GOOD] >> BackupRestore::TestAllPrimitiveTypes-INT32 [FAIL] >> BackupRestore::TestAllPrimitiveTypes-INT64 >> KikimrIcGateway::TestCreateExternalTable [GOOD] >> KikimrIcGateway::TestCreateResourcePool |72.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/provider/ut/unittest >> KikimrProvider::AlterTableAddIndexWithTableSettings [GOOD] >> KqpScheme::DisableDropExternalDataSource >> KqpConstraints::AddSerialColumnForbidden >> KqpScheme::UnknownFamilyTest >> EncryptedBackupParamsValidationTestFeatureDisabled::CommonSourcePrefixSpecified >> BackupPathTest::ExportWithCommonSourcePathAndExplicitTableInside [GOOD] >> BackupRestore::TestAllSchemeObjectTypes-EPathTypeSequence [FAIL] >> BackupRestore::TestAllSchemeObjectTypes-EPathTypeReplication >> KikimrIcGateway::TestCreateResourcePool [GOOD] >> KikimrIcGateway::TestALterResourcePool >> KqpExplain::LimitOffset >> KqpLimits::DatashardProgramSize+useSink >> BackupPathTest::EmptyDirectoryIsOk >> EncryptedBackupParamsValidationTestFeatureDisabled::CommonSourcePrefixSpecified [GOOD] >> BackupRestoreS3::TestAllPrimitiveTypes-UINT16 [FAIL] >> BackupRestoreS3::TestAllPrimitiveTypes-INT32 >> KqpConstraints::IndexedTableAndNotNullColumnAddNotNullColumn >> KikimrIcGateway::TestALterResourcePool [GOOD] >> KqpLimits::StreamWrite+Allowed >> BackupRestore::TestAllPrimitiveTypes-INT64 [FAIL] >> BackupRestore::TestAllPrimitiveTypes-FLOAT |72.5%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/ymq/actor/yc_search_ut/ydb-core-ymq-actor-yc_search_ut |72.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/ymq/actor/yc_search_ut/ydb-core-ymq-actor-yc_search_ut |72.6%| [LD] {RESULT} $(B)/ydb/core/ymq/actor/yc_search_ut/ydb-core-ymq-actor-yc_search_ut >> EncryptedBackupParamsValidationTestFeatureDisabled::CommonDestPathSpecified ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/provider/ut/unittest >> KikimrIcGateway::TestALterResourcePool [GOOD] Test command err: Trying to start YDB, gRPC: 7403, MsgBus: 18500 2025-05-29T15:29:43.365610Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509890262046620258:2062];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:29:43.365916Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/00245e/r3tmp/tmpB0VdBh/pdisk_1.dat 2025-05-29T15:29:43.507125Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:29:43.507550Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [1:7509890262046620237:2079] 1748532583365317 != 1748532583365320 TServer::EnableGrpc on GrpcPort 7403, node 1 2025-05-29T15:29:43.516294Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:29:43.516306Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-05-29T15:29:43.516308Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-05-29T15:29:43.516356Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:18500 2025-05-29T15:29:43.558833Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:29:43.558861Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:29:43.560524Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:18500 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-29T15:29:43.597189Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:29:43.602251Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-05-29T15:29:43.616219Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExternalDataSource, opId: 281474976710658:2, at schemeshard: 72057594046644480 2025-05-29T15:29:43.620290Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExternalTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 21108, MsgBus: 4318 2025-05-29T15:29:43.943432Z node 2 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7509890260980033301:2205];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/00245e/r3tmp/tmpyEt1I6/pdisk_1.dat 2025-05-29T15:29:43.960901Z node 2 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-05-29T15:29:43.982924Z node 2 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:29:43.983157Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [2:7509890260980033133:2079] 1748532583942579 != 1748532583942582 TServer::EnableGrpc on GrpcPort 21108, node 2 2025-05-29T15:29:44.009605Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:29:44.009619Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-05-29T15:29:44.009621Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-05-29T15:29:44.009674Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:4318 2025-05-29T15:29:44.045411Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:29:44.045442Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:29:44.049918Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:4318 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... 2025-05-29T15:29:44.087158Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-05-29T15:29:44.088813Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-05-29T15:29:44.104922Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 28922, MsgBus: 5651 test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/00245e/r3tmp/tmp3w2tMf/pdisk_1.dat 2025-05-29T15:29:44.433702Z node 3 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7509890264433433736:2200];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:29:44.434010Z node 3 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-05-29T15:29:44.449565Z node 3 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:29:44.449782Z node 3 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [3:7509890264433433574:2079] 1748532584431984 != 1748532584431987 TServer::EnableGrpc on GrpcPort 28922, node 3 2025-05-29T15:29:44.459278Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:29:44.459294Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-05-29T15:29:44.459296Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-05-29T15:29:44.459345Z node 3 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:5651 TClient is connected to server localhost:5651 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-29T15:29:44.536801Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:29:44.536833Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting waiting... 2025-05-29T15:29:44.537225Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-05-29T15:29:44.537902Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-29T15:29:44.549861Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2025-05-29T15:29:44.552771Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterResourcePool, opId: 281474976715659:0, at schemeshard: 72057594046644480 >> TChargeBTreeIndex::FewNodes_Groups_History_Sticky [GOOD] >> TClockProCache::Lifecycle [GOOD] >> TClockProCache::EvictNext [GOOD] >> TClockProCache::Erase [GOOD] >> TClockProCache::Random [GOOD] >> NFwd_TFlatIndexCache::Skip_Wait [GOOD] >> NFwd_TFlatIndexCache::Trace [GOOD] >> NFwd_TFlatIndexCache::Slices [GOOD] >> NFwd_TLoadedPagesCircularBuffer::Basics [GOOD] >> NOther::Blocks [GOOD] >> NPage::Encoded [GOOD] >> NPage::ABI_002 >> NPage::ABI_002 [GOOD] >> NPage::GroupIdEncoding [GOOD] >> NPageCollection::Align [GOOD] >> NPageCollection::Meta ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/scheme/unittest >> KqpScheme::CreateTableWithFamiliesRegular Test command err: Trying to start YDB, gRPC: 28820, MsgBus: 13563 2025-05-29T15:29:16.436291Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509890145226009833:2070];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:29:16.436326Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/001141/r3tmp/tmp9k2dk0/pdisk_1.dat TServer::EnableGrpc on GrpcPort 28820, node 1 2025-05-29T15:29:16.495346Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:29:16.500972Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:29:16.500985Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-05-29T15:29:16.500987Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-05-29T15:29:16.501027Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:13563 TClient is connected to server localhost:13563 2025-05-29T15:29:16.537524Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:29:16.537549Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:29:16.538682Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-29T15:29:16.563807Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:29:16.569942Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:29:16.631070Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:29:16.649723Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:29:16.661186Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:29:16.768502Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890145226011451:2403], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:29:16.768540Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:29:16.809506Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-05-29T15:29:16.817197Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-05-29T15:29:16.828300Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-05-29T15:29:16.882969Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-05-29T15:29:16.890881Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-05-29T15:29:16.905964Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-05-29T15:29:16.919841Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-05-29T15:29:16.935435Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890145226012104:2466], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:29:16.935459Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890145226012109:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:29:16.935464Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:29:16.936126Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715669:3, at schemeshard: 72057594046644480 2025-05-29T15:29:16.939221Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7509890145226012111:2470], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715669 completed, doublechecking } 2025-05-29T15:29:17.034138Z node 1 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [1:7509890149520979458:3397] txid# 281474976715670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-29T15:29:17.135892Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [1:7509890149520979474:2474], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:29:17.135989Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=1&id=N2Q0NWMzN2UtNjRiNzRiOTgtNzEzZDAwOWItNTIxZmRhN2M=, ActorId: [1:7509890145226011433:2401], ActorState: ExecuteState, TraceId: 01jweajx47913s46h563zmjeax, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: VERIFY failed (2025-05-29T15:29:17.136702Z): assertion failed in non-unittest thread with message: assertion failed at ydb/core/kqp/ut/common/kqp_ut_common.h:375, void NKikimr::NKqp::AssertSuccessResult(const NYdb::TStatus &): (result.IsSuccess())
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 library/cpp/testing/unittest/registar.cpp:36 RaiseError(): requirement UnittestThread failed 0. /-S/util/system/yassert.cpp:83: InternalPanicImpl @ 0x1633E055 1. /-S/util/system/yassert.cpp:55: Panic @ 0x16335056 2. /tmp//-S/library/cpp/testing/unittest/registar.cpp:36: RaiseError @ 0x164D75B6 3. /-S/ydb/core/kqp/ut/common/kqp_ut_common.h:375: AssertSuccessResult @ 0x15DE0A22 4. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:365: CreateSampleTables @ 0x289A0492 5. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:581: operator() @ 0x289C1A1C 6. /-S/library/cpp/threading/future/core/future-inl.h:493: SetValue @ 0x289C1A1C 7. /-S/library/cpp/threading/future/async.h:24: operator() @ 0x289C1A1C 8. /-S/util/thread/pool.h:71: Process @ 0x289C1A1C 9. /-S/util/thread/pool.cpp:418: DoExecute @ 0x163457A9 10. /-S/util/thread/factory.h:15: Execute @ 0x16344199 11. /-S/util/thread/factory.cpp:36: ThreadProc @ 0x16344199 12. /-S/util/system/thread.cpp:244: ThreadProxy @ 0x1633F60C 13. ??:0: ?? @ 0x7F2B00719AC2 14. ??:0: ?? @ 0x7F2B007AB84F Trying to start YDB, gRPC: 22595, MsgBus: 10835 2025-05-29T15:29:20.550663Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509890163586123335:2072];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:29:20.550696Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/001141/r3tmp/tmpMXrf0U/pdisk_1.dat TServer::EnableGrpc on GrpcPort 22595, node 1 2025-05-29T15:29:20.608355Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:29:20.617223Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:29:20.617238Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-05-29T15:29:20.617240Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) ... 41702610:2401], ActorState: ExecuteState, TraceId: 01jweakf8qbxjr0zqp0dywkb57, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: 0. /-S/util/system/yassert.cpp:83: InternalPanicImpl @ 0x1633E055 1. /-S/util/system/yassert.cpp:55: Panic @ 0x16335056 2. /tmp//-S/library/cpp/testing/unittest/registar.cpp:36: RaiseError @ 0x164D75B6 3. /-S/ydb/core/kqp/ut/common/kqp_ut_common.h:375: AssertSuccessResult @ 0x15DE0A22 4. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:365: CreateSampleTables @ 0x289A0492 5. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:581: operator() @ 0x289C1A1C 6. /-S/library/cpp/threading/future/core/future-inl.h:493: SetValue @ 0x289C1A1C 7. /-S/library/cpp/threading/future/async.h:24: operator() @ 0x289C1A1C 8. /-S/util/thread/pool.h:71: Process @ 0x289C1A1C 9. /-S/util/thread/pool.cpp:418: DoExecute @ 0x163457A9 10. /-S/util/thread/factory.h:15: Execute @ 0x16344199 11. /-S/util/thread/factory.cpp:36: ThreadProc @ 0x16344199 12. /-S/util/system/thread.cpp:244: ThreadProxy @ 0x1633F60C 13. ??:0: ?? @ 0x7FA7A7D01AC2 14. ??:0: ?? @ 0x7FA7A7D9384F Trying to start YDB, gRPC: 64754, MsgBus: 9168 2025-05-29T15:29:39.666954Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509890246306781410:2068];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:29:39.667053Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/001141/r3tmp/tmpXP39uL/pdisk_1.dat 2025-05-29T15:29:39.725588Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 64754, node 1 2025-05-29T15:29:39.745711Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:29:39.745727Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-05-29T15:29:39.745730Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-05-29T15:29:39.745789Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:9168 2025-05-29T15:29:39.768761Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:29:39.768789Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:29:39.769909Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:9168 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-29T15:29:39.812893Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:29:39.820078Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:29:39.835801Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:29:39.854796Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:29:39.869340Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:29:40.029983Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890250601750302:2403], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:29:40.030015Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:29:40.079152Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-05-29T15:29:40.087464Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-05-29T15:29:40.096088Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-05-29T15:29:40.110949Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-05-29T15:29:40.124693Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-05-29T15:29:40.139499Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-05-29T15:29:40.153569Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-05-29T15:29:40.168912Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890250601750954:2466], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:29:40.168943Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:29:40.168952Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890250601750959:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:29:40.169730Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715669:3, at schemeshard: 72057594046644480 2025-05-29T15:29:40.173519Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7509890250601750961:2470], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715669 completed, doublechecking } 2025-05-29T15:29:40.246589Z node 1 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [1:7509890250601751012:3397] txid# 281474976715670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-29T15:29:40.360713Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [1:7509890250601751021:2474], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:29:40.362470Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=1&id=ZjQ1YjExYWMtMjQ5MGM5ZS02MDQzOTRmNi0xMWFlMTRmNA==, ActorId: [1:7509890250601750284:2401], ActorState: ExecuteState, TraceId: 01jweakkt82qqjed49jcyxpdvp, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: VERIFY failed (2025-05-29T15:29:40.367046Z): assertion failed in non-unittest thread with message: assertion failed at ydb/core/kqp/ut/common/kqp_ut_common.h:375, void NKikimr::NKqp::AssertSuccessResult(const NYdb::TStatus &): (result.IsSuccess())
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 library/cpp/testing/unittest/registar.cpp:36 RaiseError(): requirement UnittestThread failed 0. /-S/util/system/yassert.cpp:83: InternalPanicImpl @ 0x1633E055 1. /-S/util/system/yassert.cpp:55: Panic @ 0x16335056 2. /tmp//-S/library/cpp/testing/unittest/registar.cpp:36: RaiseError @ 0x164D75B6 3. /-S/ydb/core/kqp/ut/common/kqp_ut_common.h:375: AssertSuccessResult @ 0x15DE0A22 4. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:365: CreateSampleTables @ 0x289A0492 5. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:581: operator() @ 0x289C1A1C 6. /-S/library/cpp/threading/future/core/future-inl.h:493: SetValue @ 0x289C1A1C 7. /-S/library/cpp/threading/future/async.h:24: operator() @ 0x289C1A1C 8. /-S/util/thread/pool.h:71: Process @ 0x289C1A1C 9. /-S/util/thread/pool.cpp:418: DoExecute @ 0x163457A9 10. /-S/util/thread/factory.h:15: Execute @ 0x16344199 11. /-S/util/thread/factory.cpp:36: ThreadProc @ 0x16344199 12. /-S/util/system/thread.cpp:244: ThreadProxy @ 0x1633F60C 13. ??:0: ?? @ 0x7F3C9767DAC2 14. ??:0: ?? @ 0x7F3C9770F84F >> KikimrIcGateway::TestLoadTableMetadata [FAIL] >> KikimrIcGateway::TestLoadTokenSecretValueFromExternalDataSourceMetadata >> NPageCollection::Meta [GOOD] >> NPageCollection::PagesToBlobsConverter [GOOD] >> NPageCollection::Grow [GOOD] >> NPageCollection::Groups [GOOD] >> NPageCollection::Chop [GOOD] >> NPageCollection::CookieAllocator [GOOD] >> NProto::LargeGlobId [GOOD] >> Redo::ABI_008 [GOOD] >> Self::Literals [GOOD] >> KqpScheme::DoubleCreateResourcePoolClassifier+UseSink >> BackupPathTest::EmptyDirectoryIsOk [GOOD] >> EncryptedBackupParamsValidationTestFeatureDisabled::CommonDestPathSpecified [GOOD] >> KqpScheme::CreateBackupCollectionDisabledByDefault >> BackupRestoreS3::TestAllPrimitiveTypes-INT32 [FAIL] >> BackupRestoreS3::TestAllPrimitiveTypes-UINT32 >> KqpScheme::CreateTableWithVectorIndexNoFeatureFlag >> BackupPathTest::CommonPrefixButExplicitImportItems ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/scheme/unittest >> KqpConstraints::SerialTypeBigSerial Test command err: Trying to start YDB, gRPC: 25047, MsgBus: 2480 2025-05-29T15:29:17.044576Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509890151921314739:2069];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:29:17.044598Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/001138/r3tmp/tmpjPJcwF/pdisk_1.dat 2025-05-29T15:29:17.100651Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 25047, node 1 2025-05-29T15:29:17.118512Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:29:17.118527Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-05-29T15:29:17.118529Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-05-29T15:29:17.118572Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:2480 2025-05-29T15:29:17.145892Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:29:17.145920Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:29:17.147015Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:2480 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-29T15:29:17.184059Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:29:17.191645Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:29:17.254338Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:29:17.273965Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:29:17.285556Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:29:17.457656Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890151921316338:2403], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:29:17.457692Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:29:17.510496Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-05-29T15:29:17.517523Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-05-29T15:29:17.572263Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-05-29T15:29:17.584232Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-05-29T15:29:17.639205Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-05-29T15:29:17.647702Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-05-29T15:29:17.661817Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-05-29T15:29:17.677103Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890151921316995:2466], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:29:17.677121Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890151921317000:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:29:17.677126Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:29:17.677755Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715669:3, at schemeshard: 72057594046644480 2025-05-29T15:29:17.681876Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7509890151921317002:2470], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715669 completed, doublechecking } 2025-05-29T15:29:17.742021Z node 1 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [1:7509890151921317053:3398] txid# 281474976715670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-29T15:29:17.850544Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [1:7509890151921317069:2474], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:29:17.850656Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=1&id=NTYxODcyYWYtN2ZiMWQyNmYtODY2ZmJjMTMtMTY3NDQxNDM=, ActorId: [1:7509890151921316320:2401], ActorState: ExecuteState, TraceId: 01jweajxvcfq47ppnskwc01shv, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: VERIFY failed (2025-05-29T15:29:17.851386Z): assertion failed in non-unittest thread with message: assertion failed at ydb/core/kqp/ut/common/kqp_ut_common.h:375, void NKikimr::NKqp::AssertSuccessResult(const NYdb::TStatus &): (result.IsSuccess())
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 library/cpp/testing/unittest/registar.cpp:36 RaiseError(): requirement UnittestThread failed 0. /-S/util/system/yassert.cpp:83: InternalPanicImpl @ 0x1633E055 1. /-S/util/system/yassert.cpp:55: Panic @ 0x16335056 2. /tmp//-S/library/cpp/testing/unittest/registar.cpp:36: RaiseError @ 0x164D75B6 3. /-S/ydb/core/kqp/ut/common/kqp_ut_common.h:375: AssertSuccessResult @ 0x15DE0A22 4. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:365: CreateSampleTables @ 0x289A0492 5. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:581: operator() @ 0x289C1A1C 6. /-S/library/cpp/threading/future/core/future-inl.h:493: SetValue @ 0x289C1A1C 7. /-S/library/cpp/threading/future/async.h:24: operator() @ 0x289C1A1C 8. /-S/util/thread/pool.h:71: Process @ 0x289C1A1C 9. /-S/util/thread/pool.cpp:418: DoExecute @ 0x163457A9 10. /-S/util/thread/factory.h:15: Execute @ 0x16344199 11. /-S/util/thread/factory.cpp:36: ThreadProc @ 0x16344199 12. /-S/util/system/thread.cpp:244: ThreadProxy @ 0x1633F60C 13. ??:0: ?? @ 0x7F745A496AC2 14. ??:0: ?? @ 0x7F745A52884F Trying to start YDB, gRPC: 61336, MsgBus: 3703 2025-05-29T15:29:21.249802Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509890169056446848:2067];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:29:21.249828Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/001138/r3tmp/tmpDu0g6Q/pdisk_1.dat 2025-05-29T15:29:21.300179Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:29:21.300268Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [1:7509890169056446812:2079] 1748532561249633 != 1748532561249636 TServer::EnableGrpc on GrpcPort 61336, node 1 2025-05-29T15:29:21.319048Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:29:21.319062Z node 1 :NET_CLASSIFIER WARN: ... n.h:375: AssertSuccessResult @ 0x15DE0A22 4. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:365: CreateSampleTables @ 0x289A0492 5. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:581: operator() @ 0x289C1A1C 6. /-S/library/cpp/threading/future/core/future-inl.h:493: SetValue @ 0x289C1A1C 7. /-S/library/cpp/threading/future/async.h:24: operator() @ 0x289C1A1C 8. /-S/util/thread/pool.h:71: Process @ 0x289C1A1C 9. /-S/util/thread/pool.cpp:418: DoExecute @ 0x163457A9 10. /-S/util/thread/factory.h:15: Execute @ 0x16344199 11. /-S/util/thread/factory.cpp:36: ThreadProc @ 0x16344199 12. /-S/util/system/thread.cpp:244: ThreadProxy @ 0x1633F60C 13. ??:0: ?? @ 0x7F7AC2DA6AC2 14. ??:0: ?? @ 0x7F7AC2E3884F Trying to start YDB, gRPC: 21918, MsgBus: 9094 2025-05-29T15:29:40.534432Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509890248630927618:2218];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/001138/r3tmp/tmpmmWQeh/pdisk_1.dat 2025-05-29T15:29:40.639896Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-05-29T15:29:40.705488Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [1:7509890248630927414:2079] 1748532580521568 != 1748532580521571 2025-05-29T15:29:40.712034Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 21918, node 2025-05-29T15:29:40.720442Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:29:40.720472Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:29:40.721123Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 1 2025-05-29T15:29:40.743000Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:29:40.743013Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-05-29T15:29:40.743015Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-05-29T15:29:40.743059Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:9094 TClient is connected to server localhost:9094 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-29T15:29:40.814664Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:29:40.817925Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-05-29T15:29:40.820581Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:29:40.910577Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:29:40.967921Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:29:40.982047Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:29:41.104920Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890252925896362:2403], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:29:41.104954Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:29:41.172702Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-05-29T15:29:41.232315Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-05-29T15:29:41.256883Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-05-29T15:29:41.273124Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-05-29T15:29:41.283446Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-05-29T15:29:41.294938Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-05-29T15:29:41.308126Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-05-29T15:29:41.324698Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890252925897017:2466], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:29:41.324728Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890252925897022:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:29:41.324733Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:29:41.325395Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710669:3, at schemeshard: 72057594046644480 2025-05-29T15:29:41.334956Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7509890252925897024:2470], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710669 completed, doublechecking } 2025-05-29T15:29:41.413974Z node 1 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [1:7509890252925897075:3397] txid# 281474976710670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-29T15:29:41.531033Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [1:7509890252925897091:2474], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:29:41.532703Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=1&id=N2RlNDdhY2UtODgzNDI0MS01ZjA2Y2RjYi1iMTI0Y2FjNQ==, ActorId: [1:7509890252925896359:2401], ActorState: ExecuteState, TraceId: 01jweakmycecpcg86se6qn5jam, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: VERIFY failed (2025-05-29T15:29:41.535205Z): assertion failed in non-unittest thread with message: assertion failed at ydb/core/kqp/ut/common/kqp_ut_common.h:375, void NKikimr::NKqp::AssertSuccessResult(const NYdb::TStatus &): (result.IsSuccess())
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 library/cpp/testing/unittest/registar.cpp:36 RaiseError(): requirement UnittestThread failed 0. /-S/util/system/yassert.cpp:83: InternalPanicImpl @ 0x1633E055 1. /-S/util/system/yassert.cpp:55: Panic @ 0x16335056 2. /tmp//-S/library/cpp/testing/unittest/registar.cpp:36: RaiseError @ 0x164D75B6 3. /-S/ydb/core/kqp/ut/common/kqp_ut_common.h:375: AssertSuccessResult @ 0x15DE0A22 4. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:365: CreateSampleTables @ 0x289A0492 5. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:581: operator() @ 0x289C1A1C 6. /-S/library/cpp/threading/future/core/future-inl.h:493: SetValue @ 0x289C1A1C 7. /-S/library/cpp/threading/future/async.h:24: operator() @ 0x289C1A1C 8. /-S/util/thread/pool.h:71: Process @ 0x289C1A1C 9. /-S/util/thread/pool.cpp:418: DoExecute @ 0x163457A9 10. /-S/util/thread/factory.h:15: Execute @ 0x16344199 11. /-S/util/thread/factory.cpp:36: ThreadProc @ 0x16344199 12. /-S/util/system/thread.cpp:244: ThreadProxy @ 0x1633F60C 13. ??:0: ?? @ 0x7FD6B0C49AC2 14. ??:0: ?? @ 0x7FD6B0CDB84F ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tablet_flat/ut/unittest >> Self::Literals [GOOD] Test command err: + BTreeIndex{PageId: 0 RowCount: 1155 DataSize: 11055 GroupDataSize: 22055 ErasedRowCount: 385, 13 rev 1, 683b} | PageId: 10000 RowCount: 100 DataSize: 1000 GroupDataSize: 2000 ErasedRowCount: 30 | > {0, a, false, 0} | PageId: 10001 RowCount: 201 DataSize: 2001 GroupDataSize: 4001 ErasedRowCount: 61 | > {1, b, true, 10} | PageId: 10002 RowCount: 303 DataSize: 3003 GroupDataSize: 6003 ErasedRowCount: 93 | > {2, c, false, 20} | PageId: 10003 RowCount: 406 DataSize: 4006 GroupDataSize: 8006 ErasedRowCount: 126 | > {3, d, true, 30} | PageId: 10004 RowCount: 510 DataSize: 5010 GroupDataSize: 10010 ErasedRowCount: 160 | > {4, e, false, 40} | PageId: 10005 RowCount: 615 DataSize: 6015 GroupDataSize: 12015 ErasedRowCount: 195 | > {5, f, true, 50} | PageId: 10006 RowCount: 721 DataSize: 7021 GroupDataSize: 14021 ErasedRowCount: 231 | > {6, g, false, 60} | PageId: 10007 RowCount: 828 DataSize: 8028 GroupDataSize: 16028 ErasedRowCount: 268 | > {7, h, true, 70} | PageId: 10008 RowCount: 936 DataSize: 9036 GroupDataSize: 18036 ErasedRowCount: 306 | > {8, i, false, 80} | PageId: 10009 RowCount: 1045 DataSize: 10045 GroupDataSize: 20045 ErasedRowCount: 345 | > {9, j, true, 90} | PageId: 10010 RowCount: 1155 DataSize: 11055 GroupDataSize: 22055 ErasedRowCount: 385 + BTreeIndex{PageId: 9 RowCount: 2310 DataSize: 21210 GroupDataSize: 42210 ErasedRowCount: 840, 13 rev 1, 116b} | + BTreeIndex{PageId: 5 RowCount: 936 DataSize: 9036 GroupDataSize: 18036 ErasedRowCount: 306, 13 rev 1, 179b} | | + BTreeIndex{PageId: 0 RowCount: 303 DataSize: 3003 GroupDataSize: 6003 ErasedRowCount: 93, 13 rev 1, 179b} | | | PageId: 10000 RowCount: 100 DataSize: 1000 GroupDataSize: 2000 ErasedRowCount: 30 | | | > {0, a, false, 0} | | | PageId: 10001 RowCount: 201 DataSize: 2001 GroupDataSize: 4001 ErasedRowCount: 61 | | | > {1, b, true, 10} | | | PageId: 10002 RowCount: 303 DataSize: 3003 GroupDataSize: 6003 ErasedRowCount: 93 | | > {2, c, false, 20} | | + BTreeIndex{PageId: 1 RowCount: 615 DataSize: 6015 GroupDataSize: 12015 ErasedRowCount: 195, 13 rev 1, 179b} | | | PageId: 10003 RowCount: 406 DataSize: 4006 GroupDataSize: 8006 ErasedRowCount: 126 | | | > {3, d, true, 30} | | | PageId: 10004 RowCount: 510 DataSize: 5010 GroupDataSize: 10010 ErasedRowCount: 160 | | | > {4, e, false, 40} | | | PageId: 10005 RowCount: 615 DataSize: 6015 GroupDataSize: 12015 ErasedRowCount: 195 | | > {5, f, true, 50} | | + BTreeIndex{PageId: 2 RowCount: 936 DataSize: 9036 GroupDataSize: 18036 ErasedRowCount: 306, 13 rev 1, 179b} | | | PageId: 10006 RowCount: 721 DataSize: 7021 GroupDataSize: 14021 ErasedRowCount: 231 | | | > {6, g, false, 60} | | | PageId: 10007 RowCount: 828 DataSize: 8028 GroupDataSize: 16028 ErasedRowCount: 268 | | | > {7, h, true, 70} | | | PageId: 10008 RowCount: 936 DataSize: 9036 GroupDataSize: 18036 ErasedRowCount: 306 | > {8, i, false, 80} | + BTreeIndex{PageId: 8 RowCount: 2310 DataSize: 21210 GroupDataSize: 42210 ErasedRowCount: 840, 13 rev 1, 242b} | | + BTreeIndex{PageId: 3 RowCount: 1266 DataSize: 12066 GroupDataSize: 24066 ErasedRowCount: 426, 13 rev 1, 179b} | | | PageId: 10009 RowCount: 1045 DataSize: 10045 GroupDataSize: 20045 ErasedRowCount: 345 | | | > {9, j, true, 90} | | | PageId: 10010 RowCount: 1155 DataSize: 11055 GroupDataSize: 22055 ErasedRowCount: 385 | | | > {10, k, false, 100} | | | PageId: 10011 RowCount: 1266 DataSize: 12066 GroupDataSize: 24066 ErasedRowCount: 426 | | > {11, l, true, 110} | | + BTreeIndex{PageId: 4 RowCount: 1605 DataSize: 15105 GroupDataSize: 30105 ErasedRowCount: 555, 13 rev 1, 179b} | | | PageId: 10012 RowCount: 1378 DataSize: 13078 GroupDataSize: 26078 ErasedRowCount: 468 | | | > {12, m, false, 120} | | | PageId: 10013 RowCount: 1491 DataSize: 14091 GroupDataSize: 28091 ErasedRowCount: 511 | | | > {13, n, true, 130} | | | PageId: 10014 RowCount: 1605 DataSize: 15105 GroupDataSize: 30105 ErasedRowCount: 555 | | > {14, o, false, 140} | | + BTreeIndex{PageId: 6 RowCount: 1953 DataSize: 18153 GroupDataSize: 36153 ErasedRowCount: 693, 13 rev 1, 179b} | | | PageId: 10015 RowCount: 1720 DataSize: 16120 GroupDataSize: 32120 ErasedRowCount: 600 | | | > {15, p, true, 150} | | | PageId: 10016 RowCount: 1836 DataSize: 17136 GroupDataSize: 34136 ErasedRowCount: 646 | | | > {16, q, false, 160} | | | PageId: 10017 RowCount: 1953 DataSize: 18153 GroupDataSize: 36153 ErasedRowCount: 693 | | > {17, r, true, 170} | | + BTreeIndex{PageId: 7 RowCount: 2310 DataSize: 21210 GroupDataSize: 42210 ErasedRowCount: 840, 13 rev 1, 179b} | | | PageId: 10018 RowCount: 2071 DataSize: 19171 GroupDataSize: 38171 ErasedRowCount: 741 | | | > {18, s, false, 180} | | | PageId: 10019 RowCount: 2190 DataSize: 20190 GroupDataSize: 40190 ErasedRowCount: 790 | | | > {19, t, true, 190} | | | PageId: 10020 RowCount: 2310 DataSize: 21210 GroupDataSize: 42210 ErasedRowCount: 840 + BTreeIndex{PageId: 15 RowCount: 15150 DataSize: 106050 GroupDataSize: 207050 ErasedRowCount: 8080, 13 rev 1, 174b} | + BTreeIndex{PageId: 12 RowCount: 9078 DataSize: 70278 GroupDataSize: 138278 ErasedRowCount: 4318, 13 rev 1, 690b} | | + BTreeIndex{PageId: 0 RowCount: 1266 DataSize: 12066 GroupDataSize: 24066 ErasedRowCount: 426, 13 rev 1, 702b} | | | PageId: 10000 RowCount: 100 DataSize: 1000 GroupDataSize: 2000 ErasedRowCount: 30 | | | > {0, x, NULL, NULL} | | | PageId: 10001 RowCount: 201 DataSize: 2001 GroupDataSize: 4001 ErasedRowCount: 61 | | | > {1, xx, NULL, NULL} | | | PageId: 10002 RowCount: 303 DataSize: 3003 GroupDataSize: 6003 ErasedRowCount: 93 | | | > {2, xxx, NULL, NULL} | | | PageId: 10003 RowCount: 406 DataSize: 4006 GroupDataSize: 8006 ErasedRowCount: 126 | | | > {3, xxxx, NULL, NULL} | | | PageId: 10004 RowCount: 510 DataSize: 5010 GroupDataSize: 10010 ErasedRowCount: 160 | | | > {4, xxxxx, NULL, NULL} | | | PageId: 10005 RowCount: 615 DataSize: 6015 GroupDataSize: 12015 ErasedRowCount: 195 | | | > {5, xxxxxx, NULL, NULL} | | | PageId: 10006 RowCount: 721 DataSize: 7021 GroupDataSize: 14021 ErasedRowCount: 231 | | | > {6, xxxxxxx, NULL, NULL} | | | PageId: 10007 RowCount: 828 DataSize: 8028 GroupDataSize: 16028 ErasedRowCount: 268 | | | > {7, xxxxxxxx, NULL, NULL} | | | PageId: 10008 RowCount: 936 DataSize: 9036 GroupDataSize: 18036 ErasedRowCount: 306 | | | > {8, xxxxxxxxx, NULL, NULL} | | | PageId: 10009 RowCount: 1045 DataSize: 10045 GroupDataSize: 20045 ErasedRowCount: 345 | | | > {9, xxxxxxxxxx, NULL, NULL} | | | PageId: 10010 RowCount: 1155 DataSize: 11055 GroupDataSize: 22055 ErasedRowCount: 385 | | | > {10, xxxxxxxxxx.., NULL, NULL} | | | PageId: 10011 RowCount: 1266 DataSize: 12066 GroupDataSize: 24066 ErasedRowCount: 426 | | > {11, xxxxxxxxxx.., NULL, NULL} | | + BTreeIndex{PageId: 1 RowCount: 2431 DataSize: 22231 GroupDataSize: 44231 ErasedRowCount: 891, 13 rev 1, 683b} | | | PageId: 10012 RowCount: 1378 DataSize: 13078 GroupDataSize: 26078 ErasedRowCount: 468 | | | > {12, xxxxxxxxxx.., NULL, NULL} | | | PageId: 10013 RowCount: 1491 DataSize: 14091 GroupDataSize: 28091 ErasedRowCount: 511 | | | > {13, xxxxxxxxxx.., NULL, NULL} | | | PageId: 10014 RowCount: 1605 DataSize: 15105 GroupDataSize: 30105 ErasedRowCount: 555 | | | > {14, xxxxxxxxxx.., NULL, NULL} | | | PageId: 10015 RowCount: 1720 DataSize: 16120 GroupDataSize: 32120 ErasedRowCount: 600 | | | > {15, xxxxxxxxxx.., NULL, NULL} | | | PageId: 10016 RowCount: 1836 DataSize: 17136 GroupDataSize: 34136 ErasedRowCount: 646 | | | > {16, xxxxxxxxxx.., NULL, NULL} | | | PageId: 10017 RowCount: 1953 DataSize: 18153 GroupDataSize: 36153 ErasedRowCount: 693 | | | > {17, xxxxxxxxxx.., NULL, NULL} | | | PageId: 10018 RowCount: 2071 DataSize: 19171 GroupDataSize: 38171 ErasedRowCount: 741 | | | > {18, xxxxxxxxxx.., NULL, NULL} | | | PageId: 10019 RowCount: 2190 DataSize: 20190 GroupDataSize: 40190 ErasedRowCount: 790 | | | > {19, xxxxxxxxxx.., NULL, NULL} | | | PageId: 10020 RowCount: 2310 DataSize: 21210 GroupDataSize: 42210 ErasedRowCount: 840 | | | > {20, xxxxxxxxxx.., NULL, NULL} | | | PageId: 10021 RowCount: 2431 DataSize: 22231 GroupDataSize: 44231 ErasedRowCount: 891 | | > {21, xxxxxxxxxx.., NULL, NULL} | | + BTreeIndex{PageId: 2 RowCount: 3565 DataSize: 31465 GroupDataSize: 62465 ErasedRowCount: 1395, 13 rev 1, 689b} | | | PageId: 10022 RowCount: 2553 DataSize: 23253 GroupDataSize: 46253 ErasedRowCount: 943 | | | > {22, xxxxxxxxxx.., NULL, NULL} | | | PageId: 10023 RowCount: 2676 DataSize: 24276 GroupDataSize: 48276 ErasedRowCount: 996 | | | > {23, xxxxxxxxxx.., NULL, NULL} | | | PageId: 10024 RowCount: 2800 DataSize: 25300 GroupDataSize: 50300 ErasedRowCount: 1050 | | | > {24, xxxxxxxxxx.., NULL, NULL} | | | PageId: 10025 RowCount: 2925 DataSize: 26325 GroupDataSize: 52325 ErasedRowCount: 1105 | | | > {25, xxxxxxxxxx.., NULL, NULL} | | | PageId: 10026 RowCount: 3051 DataSize: 27351 GroupDataSize: 54351 ErasedRowCount: 1161 | | | > {26, xxxxxxxxxx.., NULL, NULL} | | | PageId: 10027 RowCount: 3178 DataSize: 28378 GroupDataSize: 56378 ErasedRowCount: 1218 | | | > {27, xxxxxxxxxx.., NULL, NULL} | | | PageId: 10028 RowCount: 3306 DataSize: 29406 GroupDataSize: 58406 ErasedRowCount: 1276 | | | > {28, xxxxxxxxxx.., NULL, NULL} | | | PageId: 10029 RowCount: 3435 DataSize: 30435 GroupDataSize: 60435 ErasedRowCount: 1335 | | | > {29, xxxxxxxxxx.., NULL, NULL} | | | PageId: 10030 RowCount: 3565 DataSize: 31465 GroupDataSize: 62465 ErasedRowCount: 1395 | | > {30, xxxxxxxxxx.., NULL, NULL} | | + BTreeIndex{PageId: 3 RowCount: 4641 DataSize: 39741 GroupDataSize: 78741 ErasedRowCount: 1911, 13 rev 1, 669b} | | | PageId: 10031 RowCount: 3696 DataSize: 32496 GroupDataSize: 64496 ErasedRowCount: 1456 | | | > {31, xxxxxxxxxx.., NULL, NULL} | | | PageId: 10032 RowCount: 3828 DataSize: 33528 GroupDataSize: 66528 ErasedRowCount: 1518 | | | > {32, xxxxxxxxxx.., NULL, NULL} | | | PageId: 10033 RowCount: 3961 DataSize: 34561 GroupDataSize: 68561 ErasedRowCount: 1581 | | | > {33, xxxxxxxxxx.., NULL, NULL} | | | PageId: 10034 RowCount: 4095 DataSize: 35595 GroupDataSize: 70595 ErasedRowCount: 1645 | | | > {34, xxxxxxxxxx.., NULL, NULL} | | | PageId: 10035 RowCount: 4230 DataSize: 36630 GroupDataSize: 72630 ErasedRowCount: 1710 | | | > {35, xxxxxxxxxx.., NULL, NULL} | | | PageId: 10036 RowCount: 4366 DataSize: 37666 GroupDataSize: 74666 ErasedRowCount: 1776 | | | > {36, xxxxxxxxxx.., NULL, NULL} | | | PageId: 10037 RowCount: 4503 DataSize: 38703 GroupDataSize: 76703 ErasedRowCount: 1843 | | | > {37, xxxxxxxxxx.., NULL, NULL} | | | PageId: 10038 RowCount: 4641 DataSize: 39741 GroupDataSize: 78741 ErasedRowCount: 1911 | | > {38, xxxxxxxxxx.., NULL, NULL} | | + BTreeIndex{PageId: 4 RowCount: 5781 DataSize: 48081 GroupDataSize: 95081 ErasedRowCount: 2491, 13 rev 1, 725b} | | | PageId: 10039 RowCount: 4780 DataSize: 40780 GroupDataSize: 80780 ErasedRowCount: 1980 | | | > {39, xxxxxxxxxx.., NULL, NULL} | | | PageId: 10040 RowCount: 4920 DataSize: 41820 GroupDataSize: 82820 ErasedRowCount: 2050 | | | > {40, xxxxxxxxxx.., NULL, NULL} | | | PageId: 10041 RowCount: 5061 DataSize: 42861 GroupDataSize: 84861 ErasedRowCount: 2121 | | | > {41, xxxxxxxxxx.., NULL, NULL} | | | PageId: 10042 RowCount: 5203 DataSize: 43903 GroupDataSize: 86903 ErasedRowCount: 2193 | | | > {42, xxxxxxxxxx.., NULL, NULL} | | | PageId: 10043 RowCount: 5346 DataSize: 44946 GroupDataSize: 88946 ErasedRowCount: 2266 | | | > {43, xxxxxxxxxx.., NULL, NULL} | | | PageId: 10044 RowCount: 5490 DataSize: 45990 GroupDataSize: 90990 ErasedRowCount: 2340 | | | > {44, xxxxxxxxxx.., NULL, NULL} | | | PageId: 10045 RowCount: 5635 DataSize: 47035 GroupDataSize: 93035 ErasedRowCount: 2415 | | | > {45, xxxxxxxxxx.., NULL, NULL} | | | PageId: 10046 RowCount: 5781 DataSize: 48081 GroupDataSize: 95081 ErasedRowCount: 2491 | | > {46, xxxxxxxxxx.., NULL, NULL} | | + BTreeIndex{PageId: 5 RowCount: 6831 DataSize: 55431 GroupDataSize: 109431 ErasedRowCount: 3051, 13 ... 7 RowCount: 34 DataSize: 1202 GroupDataSize: 7632 ErasedRowCount: 0 | | | > {4, 10} | | | PageId: 70 RowCount: 36 DataSize: 1284 GroupDataSize: 8163 ErasedRowCount: 0 | | | > {5, 3} | | | PageId: 82 RowCount: 38 DataSize: 1350 GroupDataSize: 8602 ErasedRowCount: 0 | | | > {5, 6} | | | PageId: 87 RowCount: 40 DataSize: 1416 GroupDataSize: 9358 ErasedRowCount: 0 + Rows{0} Label{04 rev 1, 66b}, [0, +2)row | ERowOp 1: {0, 1} | ERowOp 1: {0, 3} + Rows{2} Label{24 rev 1, 66b}, [2, +2)row | ERowOp 1: {0, 4} | ERowOp 1: {0, 6} + Rows{4} Label{44 rev 1, 82b}, [4, +2)row | ERowOp 1: {0, 7} | ERowOp 1: {0, 8} + Rows{8} Label{84 rev 1, 66b}, [6, +2)row | ERowOp 1: {0, 10} | ERowOp 1: {1, 1} + Rows{11} Label{114 rev 1, 66b}, [8, +2)row | ERowOp 1: {1, 3} | ERowOp 1: {1, 4} + Rows{14} Label{144 rev 1, 82b}, [10, +2)row | ERowOp 1: {1, 6} | ERowOp 1: {1, 7} + Rows{20} Label{204 rev 1, 66b}, [12, +2)row | ERowOp 1: {1, 8} | ERowOp 1: {1, 10} + Rows{23} Label{234 rev 1, 66b}, [14, +2)row | ERowOp 1: {2, 1} | ERowOp 1: {2, 3} + Rows{26} Label{264 rev 1, 82b}, [16, +2)row | ERowOp 1: {2, 4} | ERowOp 1: {2, 6} + Rows{36} Label{364 rev 1, 66b}, [18, +2)row | ERowOp 1: {2, 7} | ERowOp 1: {2, 8} + Rows{39} Label{394 rev 1, 66b}, [20, +2)row | ERowOp 1: {2, 10} | ERowOp 1: {3, 1} + Rows{42} Label{424 rev 1, 82b}, [22, +2)row | ERowOp 1: {3, 3} | ERowOp 1: {3, 4} + Rows{48} Label{484 rev 1, 66b}, [24, +2)row | ERowOp 1: {3, 6} | ERowOp 1: {3, 7} + Rows{53} Label{534 rev 1, 66b}, [26, +2)row | ERowOp 1: {3, 8} | ERowOp 1: {3, 10} + Rows{58} Label{584 rev 1, 82b}, [28, +2)row | ERowOp 1: {4, 1} | ERowOp 1: {4, 3} + Rows{64} Label{644 rev 1, 66b}, [30, +2)row | ERowOp 1: {4, 4} | ERowOp 1: {4, 6} + Rows{67} Label{674 rev 1, 66b}, [32, +2)row | ERowOp 1: {4, 7} | ERowOp 1: {4, 8} + Rows{70} Label{704 rev 1, 82b}, [34, +2)row | ERowOp 1: {4, 10} | ERowOp 1: {5, 1} + Rows{82} Label{824 rev 1, 66b}, [36, +2)row | ERowOp 1: {5, 3} | ERowOp 1: {5, 4} + Rows{87} Label{874 rev 1, 66b}, [38, +2)row | ERowOp 1: {5, 6} | ERowOp 1: {5, 7} Slices{ [0, 39] } 0.29274 Part{[1:2:3:0:0:0:0] eph 0, 1000b 40r} data 1479b + FlatIndex{20} Label{3 rev 3, 453b} 21 rec | Page Row Bytes (Uint32) | 0 0 50b {0} | 1 2 50b {2} | 2 4 50b {4} | 3 6 50b {6} | 4 8 50b {8} | 5 10 50b {10} | 6 12 50b {12} | 7 14 50b {14} | 8 16 50b {16} | 9 18 50b {18} | 10 20 50b {20} | 11 22 50b {22} | 12 24 50b {24} | 13 26 50b {26} | 14 28 50b {28} | 15 30 50b {30} | 16 32 50b {32} | 17 34 50b {34} | 18 36 50b {36} | 19 38 50b {38} | 19 39 50b {39} + Rows{0} Label{04 rev 1, 50b}, [0, +2)row | ERowOp 1: {0} {Set 1 Uint32 : 0} | ERowOp 1: {1} {Set 1 Uint32 : 100} + Rows{1} Label{14 rev 1, 50b}, [2, +2)row | ERowOp 1: {2} {Set 1 Uint32 : 200} | ERowOp 1: {3} {Set 1 Uint32 : 300} + Rows{2} Label{24 rev 1, 50b}, [4, +2)row | ERowOp 1: {4} {Set 1 Uint32 : 400} | ERowOp 1: {5} {Set 1 Uint32 : 500} + Rows{3} Label{34 rev 1, 50b}, [6, +2)row | ERowOp 1: {6} {Set 1 Uint32 : 600} | ERowOp 1: {7} {Set 1 Uint32 : 700} + Rows{4} Label{44 rev 1, 50b}, [8, +2)row | ERowOp 1: {8} {Set 1 Uint32 : 800} | ERowOp 1: {9} {Set 1 Uint32 : 900} + Rows{5} Label{54 rev 1, 50b}, [10, +2)row | ERowOp 1: {10} {Set 1 Uint32 : 1000} | ERowOp 1: {11} {Set 1 Uint32 : 1100} + Rows{6} Label{64 rev 1, 50b}, [12, +2)row | ERowOp 1: {12} {Set 1 Uint32 : 1200} | ERowOp 1: {13} {Set 1 Uint32 : 1300} + Rows{7} Label{74 rev 1, 50b}, [14, +2)row | ERowOp 1: {14} {Set 1 Uint32 : 1400} | ERowOp 1: {15} {Set 1 Uint32 : 1500} + Rows{8} Label{84 rev 1, 50b}, [16, +2)row | ERowOp 1: {16} {Set 1 Uint32 : 1600} | ERowOp 1: {17} {Set 1 Uint32 : 1700} + Rows{9} Label{94 rev 1, 50b}, [18, +2)row | ERowOp 1: {18} {Set 1 Uint32 : 1800} | ERowOp 1: {19} {Set 1 Uint32 : 1900} + Rows{10} Label{104 rev 1, 50b}, [20, +2)row | ERowOp 1: {20} {Set 1 Uint32 : 2000} | ERowOp 1: {21} {Set 1 Uint32 : 2100} + Rows{11} Label{114 rev 1, 50b}, [22, +2)row | ERowOp 1: {22} {Set 1 Uint32 : 2200} | ERowOp 1: {23} {Set 1 Uint32 : 2300} + Rows{12} Label{124 rev 1, 50b}, [24, +2)row | ERowOp 1: {24} {Set 1 Uint32 : 2400} | ERowOp 1: {25} {Set 1 Uint32 : 2500} + Rows{13} Label{134 rev 1, 50b}, [26, +2)row | ERowOp 1: {26} {Set 1 Uint32 : 2600} | ERowOp 1: {27} {Set 1 Uint32 : 2700} + Rows{14} Label{144 rev 1, 50b}, [28, +2)row | ERowOp 1: {28} {Set 1 Uint32 : 2800} | ERowOp 1: {29} {Set 1 Uint32 : 2900} + Rows{15} Label{154 rev 1, 50b}, [30, +2)row | ERowOp 1: {30} {Set 1 Uint32 : 3000} | ERowOp 1: {31} {Set 1 Uint32 : 3100} + Rows{16} Label{164 rev 1, 50b}, [32, +2)row | ERowOp 1: {32} {Set 1 Uint32 : 3200} | ERowOp 1: {33} {Set 1 Uint32 : 3300} + Rows{17} Label{174 rev 1, 50b}, [34, +2)row | ERowOp 1: {34} {Set 1 Uint32 : 3400} | ERowOp 1: {35} {Set 1 Uint32 : 3500} + Rows{18} Label{184 rev 1, 50b}, [36, +2)row | ERowOp 1: {36} {Set 1 Uint32 : 3600} | ERowOp 1: {37} {Set 1 Uint32 : 3700} + Rows{19} Label{194 rev 1, 50b}, [38, +2)row | ERowOp 1: {38} {Set 1 Uint32 : 3800} | ERowOp 1: {39} {Set 1 Uint32 : 3900} Part{[1:2:3:0:0:0:0] eph 0, 1000b 40r} data 1479b + FlatIndex{20} Label{3 rev 3, 453b} 21 rec | Page Row Bytes (Uint32) | 0 0 50b {0} | 1 2 50b {2} | 2 4 50b {4} | 3 6 50b {6} | 4 8 50b {8} | 5 10 50b {10} | 6 12 50b {12} | 7 14 50b {14} | 8 16 50b {16} | 9 18 50b {18} | 10 20 50b {20} | 11 22 50b {22} | 12 24 50b {24} | 13 26 50b {26} | 14 28 50b {28} | 15 30 50b {30} | 16 32 50b {32} | 17 34 50b {34} | 18 36 50b {36} | 19 38 50b {38} | 19 39 50b {39} + Rows{0} Label{04 rev 1, 50b}, [0, +2)row | ERowOp 1: {0} {Set 1 Uint32 : 0} | ERowOp 1: {1} {Set 1 Uint32 : 100} + Rows{1} Label{14 rev 1, 50b}, [2, +2)row | ERowOp 1: {2} {Set 1 Uint32 : 200} | ERowOp 1: {3} {Set 1 Uint32 : 300} + Rows{2} Label{24 rev 1, 50b}, [4, +2)row | ERowOp 1: {4} {Set 1 Uint32 : 400} | ERowOp 1: {5} {Set 1 Uint32 : 500} + Rows{3} Label{34 rev 1, 50b}, [6, +2)row | ERowOp 1: {6} {Set 1 Uint32 : 600} | ERowOp 1: {7} {Set 1 Uint32 : 700} + Rows{4} Label{44 rev 1, 50b}, [8, +2)row | ERowOp 1: {8} {Set 1 Uint32 : 800} | ERowOp 1: {9} {Set 1 Uint32 : 900} + Rows{5} Label{54 rev 1, 50b}, [10, +2)row | ERowOp 1: {10} {Set 1 Uint32 : 1000} | ERowOp 1: {11} {Set 1 Uint32 : 1100} + Rows{6} Label{64 rev 1, 50b}, [12, +2)row | ERowOp 1: {12} {Set 1 Uint32 : 1200} | ERowOp 1: {13} {Set 1 Uint32 : 1300} + Rows{7} Label{74 rev 1, 50b}, [14, +2)row | ERowOp 1: {14} {Set 1 Uint32 : 1400} | ERowOp 1: {15} {Set 1 Uint32 : 1500} + Rows{8} Label{84 rev 1, 50b}, [16, +2)row | ERowOp 1: {16} {Set 1 Uint32 : 1600} | ERowOp 1: {17} {Set 1 Uint32 : 1700} + Rows{9} Label{94 rev 1, 50b}, [18, +2)row | ERowOp 1: {18} {Set 1 Uint32 : 1800} | ERowOp 1: {19} {Set 1 Uint32 : 1900} + Rows{10} Label{104 rev 1, 50b}, [20, +2)row | ERowOp 1: {20} {Set 1 Uint32 : 2000} | ERowOp 1: {21} {Set 1 Uint32 : 2100} + Rows{11} Label{114 rev 1, 50b}, [22, +2)row | ERowOp 1: {22} {Set 1 Uint32 : 2200} | ERowOp 1: {23} {Set 1 Uint32 : 2300} + Rows{12} Label{124 rev 1, 50b}, [24, +2)row | ERowOp 1: {24} {Set 1 Uint32 : 2400} | ERowOp 1: {25} {Set 1 Uint32 : 2500} + Rows{13} Label{134 rev 1, 50b}, [26, +2)row | ERowOp 1: {26} {Set 1 Uint32 : 2600} | ERowOp 1: {27} {Set 1 Uint32 : 2700} + Rows{14} Label{144 rev 1, 50b}, [28, +2)row | ERowOp 1: {28} {Set 1 Uint32 : 2800} | ERowOp 1: {29} {Set 1 Uint32 : 2900} + Rows{15} Label{154 rev 1, 50b}, [30, +2)row | ERowOp 1: {30} {Set 1 Uint32 : 3000} | ERowOp 1: {31} {Set 1 Uint32 : 3100} + Rows{16} Label{164 rev 1, 50b}, [32, +2)row | ERowOp 1: {32} {Set 1 Uint32 : 3200} | ERowOp 1: {33} {Set 1 Uint32 : 3300} + Rows{17} Label{174 rev 1, 50b}, [34, +2)row | ERowOp 1: {34} {Set 1 Uint32 : 3400} | ERowOp 1: {35} {Set 1 Uint32 : 3500} + Rows{18} Label{184 rev 1, 50b}, [36, +2)row | ERowOp 1: {36} {Set 1 Uint32 : 3600} | ERowOp 1: {37} {Set 1 Uint32 : 3700} + Rows{19} Label{194 rev 1, 50b}, [38, +2)row | ERowOp 1: {38} {Set 1 Uint32 : 3800} | ERowOp 1: {39} {Set 1 Uint32 : 3900} Part{[1:2:3:0:0:0:0] eph 0, 1000b 40r} data 1479b + FlatIndex{20} Label{3 rev 3, 453b} 21 rec | Page Row Bytes (Uint32) | 0 0 50b {0} | 1 2 50b {2} | 2 4 50b {4} | 3 6 50b {6} | 4 8 50b {8} | 5 10 50b {10} | 6 12 50b {12} | 7 14 50b {14} | 8 16 50b {16} | 9 18 50b {18} | 10 20 50b {20} | 11 22 50b {22} | 12 24 50b {24} | 13 26 50b {26} | 14 28 50b {28} | 15 30 50b {30} | 16 32 50b {32} | 17 34 50b {34} | 18 36 50b {36} | 19 38 50b {38} | 19 39 50b {39} + Rows{0} Label{04 rev 1, 50b}, [0, +2)row | ERowOp 1: {0} {Set 1 Uint32 : 0} | ERowOp 1: {1} {Set 1 Uint32 : 100} + Rows{1} Label{14 rev 1, 50b}, [2, +2)row | ERowOp 1: {2} {Set 1 Uint32 : 200} | ERowOp 1: {3} {Set 1 Uint32 : 300} + Rows{2} Label{24 rev 1, 50b}, [4, +2)row | ERowOp 1: {4} {Set 1 Uint32 : 400} | ERowOp 1: {5} {Set 1 Uint32 : 500} + Rows{3} Label{34 rev 1, 50b}, [6, +2)row | ERowOp 1: {6} {Set 1 Uint32 : 600} | ERowOp 1: {7} {Set 1 Uint32 : 700} + Rows{4} Label{44 rev 1, 50b}, [8, +2)row | ERowOp 1: {8} {Set 1 Uint32 : 800} | ERowOp 1: {9} {Set 1 Uint32 : 900} + Rows{5} Label{54 rev 1, 50b}, [10, +2)row | ERowOp 1: {10} {Set 1 Uint32 : 1000} | ERowOp 1: {11} {Set 1 Uint32 : 1100} + Rows{6} Label{64 rev 1, 50b}, [12, +2)row | ERowOp 1: {12} {Set 1 Uint32 : 1200} | ERowOp 1: {13} {Set 1 Uint32 : 1300} + Rows{7} Label{74 rev 1, 50b}, [14, +2)row | ERowOp 1: {14} {Set 1 Uint32 : 1400} | ERowOp 1: {15} {Set 1 Uint32 : 1500} + Rows{8} Label{84 rev 1, 50b}, [16, +2)row | ERowOp 1: {16} {Set 1 Uint32 : 1600} | ERowOp 1: {17} {Set 1 Uint32 : 1700} + Rows{9} Label{94 rev 1, 50b}, [18, +2)row | ERowOp 1: {18} {Set 1 Uint32 : 1800} | ERowOp 1: {19} {Set 1 Uint32 : 1900} + Rows{10} Label{104 rev 1, 50b}, [20, +2)row | ERowOp 1: {20} {Set 1 Uint32 : 2000} | ERowOp 1: {21} {Set 1 Uint32 : 2100} + Rows{11} Label{114 rev 1, 50b}, [22, +2)row | ERowOp 1: {22} {Set 1 Uint32 : 2200} | ERowOp 1: {23} {Set 1 Uint32 : 2300} + Rows{12} Label{124 rev 1, 50b}, [24, +2)row | ERowOp 1: {24} {Set 1 Uint32 : 2400} | ERowOp 1: {25} {Set 1 Uint32 : 2500} + Rows{13} Label{134 rev 1, 50b}, [26, +2)row | ERowOp 1: {26} {Set 1 Uint32 : 2600} | ERowOp 1: {27} {Set 1 Uint32 : 2700} + Rows{14} Label{144 rev 1, 50b}, [28, +2)row | ERowOp 1: {28} {Set 1 Uint32 : 2800} | ERowOp 1: {29} {Set 1 Uint32 : 2900} + Rows{15} Label{154 rev 1, 50b}, [30, +2)row | ERowOp 1: {30} {Set 1 Uint32 : 3000} | ERowOp 1: {31} {Set 1 Uint32 : 3100} + Rows{16} Label{164 rev 1, 50b}, [32, +2)row | ERowOp 1: {32} {Set 1 Uint32 : 3200} | ERowOp 1: {33} {Set 1 Uint32 : 3300} + Rows{17} Label{174 rev 1, 50b}, [34, +2)row | ERowOp 1: {34} {Set 1 Uint32 : 3400} | ERowOp 1: {35} {Set 1 Uint32 : 3500} + Rows{18} Label{184 rev 1, 50b}, [36, +2)row | ERowOp 1: {36} {Set 1 Uint32 : 3600} | ERowOp 1: {37} {Set 1 Uint32 : 3700} + Rows{19} Label{194 rev 1, 50b}, [38, +2)row | ERowOp 1: {38} {Set 1 Uint32 : 3800} | ERowOp 1: {39} {Set 1 Uint32 : 3900} >> EncryptedBackupParamsValidationTestFeatureDisabled::SrcPrefixAndSrcPathSpecified >> BackupRestore::TestAllPrimitiveTypes-FLOAT [FAIL] >> BackupRestore::TestAllPrimitiveTypes-DOUBLE >> KikimrIcGateway::TestListPath [FAIL] >> KikimrIcGateway::TestDropTable >> KqpOlapScheme::AddColumn >> KqpScheme::CreateTableWithUniqConstraint >> EncryptedBackupParamsValidationTestFeatureDisabled::SrcPrefixAndSrcPathSpecified [GOOD] |72.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/control/ut/unittest >> BackupRestoreS3::TestAllPrimitiveTypes-UINT32 [FAIL] >> BackupRestoreS3::TestAllPrimitiveTypes-INT64 >> TSchemeShardSplitBySizeTest::Make20MergeOperationsWithoutLimit >> TSchemeShardSplitByLoad::IndexTableSplitsUpToMainTableCurrentPartitionCount >> BackupPathTest::CommonPrefixButExplicitImportItems [GOOD] >> KqpAcl::AclRevoke+UseSink+IsOlap >> EncryptedExportTest::EncryptedExportAndImport >> KikimrIcGateway::TestDropTable [FAIL] >> KikimrIcGateway::TestDropResourcePool >> BackupRestore::TestAllPrimitiveTypes-DOUBLE [FAIL] >> BackupRestore::TestAllPrimitiveTypes-DATE >> KqpScheme::AlterTransfer >> BackupPathTest::ExportDirectoryWithEncryption |72.6%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/cms/ut_sentinel/ydb-core-cms-ut_sentinel |72.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/cms/ut_sentinel/ydb-core-cms-ut_sentinel |72.6%| [LD] {RESULT} $(B)/ydb/core/cms/ut_sentinel/ydb-core-cms-ut_sentinel >> KikimrIcGateway::TestDropResourcePool [GOOD] >> KqpScheme::ChangefeedTopicAutoPartitioning >> KqpTypes::QuerySpecialTypes >> KqpOlapScheme::AddColumn [GOOD] >> KqpOlapScheme::AddColumnErrors >> EncryptedExportTest::EncryptedExportAndImport [FAIL] >> KqpLimits::DatashardProgramSize+useSink [FAIL] >> KqpLimits::DatashardProgramSize-useSink |72.6%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/quoter/ut/ydb-core-quoter-ut |72.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/quoter/ut/ydb-core-quoter-ut |72.6%| [LD] {RESULT} $(B)/ydb/core/quoter/ut/ydb-core-quoter-ut >> BackupRestoreS3::TestAllPrimitiveTypes-INT64 [FAIL] >> BackupRestoreS3::TestAllPrimitiveTypes-FLOAT >> KqpScheme::CreateAndAlterTableWithBloomFilterUncompat >> BackupPathTest::ExportDirectoryWithEncryption [GOOD] >> EncryptedExportTest::EncryptionAndCompression >> TEvLocalSyncDataTests::SqueezeBlocks3 [GOOD] >> TQuorumTrackerTests::Erasure4Plus2BlockIncludingMyFailDomain_8_2 [GOOD] >> TSyncNeighborsTests::SerDes2 [GOOD] >> BackupRestore::TestAllPrimitiveTypes-DATE [FAIL] >> BackupRestore::TestAllPrimitiveTypes-DATETIME >> KqpOlapScheme::AddColumnErrors [GOOD] >> KqpOlapScheme::AddColumnFamily >> KqpScheme::DisableCreateExternalTable |72.6%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/datashard/ut_rs/ydb-core-tx-datashard-ut_rs |72.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_rs/ydb-core-tx-datashard-ut_rs |72.6%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_rs/ydb-core-tx-datashard-ut_rs >> KqpScheme::TwoSimilarFamiliesTest >> KqpConstraints::AddColumnWithDefaultForbidden >> KqpLimits::DatashardProgramSize-useSink [FAIL] >> KqpLimits::ComputeNodeMemoryLimit >> BackupPathTest::EncryptedExportWithExplicitDestinationPath |72.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/vdisk/syncer/ut/unittest >> TQuorumTrackerTests::Erasure4Plus2BlockIncludingMyFailDomain_8_2 [GOOD] |72.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/vdisk/syncer/ut/unittest >> TSyncNeighborsTests::SerDes2 [GOOD] >> EncryptedExportTest::EncryptionAndCompression [FAIL] >> KqpOlapScheme::AddColumnFamily [GOOD] >> KqpExplain::MultiUsedStage >> BackupRestoreS3::TestAllPrimitiveTypes-FLOAT [FAIL] >> BackupRestoreS3::TestAllPrimitiveTypes-DOUBLE ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/scheme/unittest >> KqpOlapScheme::AddColumnFamily [GOOD] Test command err: Trying to start YDB, gRPC: 32519, MsgBus: 10144 2025-05-29T15:29:30.418477Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509890206087044329:2069];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:29:30.418510Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/0010f6/r3tmp/tmp9vWwVU/pdisk_1.dat TServer::EnableGrpc on GrpcPort 32519, node 1 2025-05-29T15:29:30.482304Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:29:30.488179Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:29:30.488196Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-05-29T15:29:30.488198Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-05-29T15:29:30.488245Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:10144 2025-05-29T15:29:30.519538Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:29:30.519566Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:29:30.520624Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:10144 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-29T15:29:30.559755Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:29:30.567027Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-05-29T15:29:30.577981Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:29:30.655375Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:29:30.683767Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:29:30.703342Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:29:30.884635Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890206087045925:2403], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:29:30.884658Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:29:30.942313Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-05-29T15:29:30.950196Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-05-29T15:29:30.957419Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-05-29T15:29:30.968404Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-05-29T15:29:30.982104Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-05-29T15:29:30.996724Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-05-29T15:29:31.010795Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-05-29T15:29:31.027009Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890210382013874:2466], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:29:31.027032Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890210382013879:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:29:31.027038Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:29:31.027785Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715669:3, at schemeshard: 72057594046644480 2025-05-29T15:29:31.030365Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7509890210382013881:2470], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715669 completed, doublechecking } 2025-05-29T15:29:31.112059Z node 1 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [1:7509890210382013932:3395] txid# 281474976715670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } VERIFY failed (2025-05-29T15:29:31.221124Z): assertion failed in non-unittest thread with message: assertion failed at ydb/core/kqp/ut/common/kqp_ut_common.h:375, void NKikimr::NKqp::AssertSuccessResult(const NYdb::TStatus &): (result.IsSuccess())
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 library/cpp/testing/unittest/registar.cpp:36 RaiseError(): requirement UnittestThread failed 2025-05-29T15:29:31.220082Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [1:7509890210382013948:2474], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:29:31.220273Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=1&id=YjUzMjlhODUtOGQxMzMzNTgtZTJhYmEyNDgtZjcyMzNiNjk=, ActorId: [1:7509890206087045917:2401], ActorState: ExecuteState, TraceId: 01jweakawj94sbwd3n5dk4km3d, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: 0. /-S/util/system/yassert.cpp:83: InternalPanicImpl @ 0x1633E055 1. /-S/util/system/yassert.cpp:55: Panic @ 0x16335056 2. /tmp//-S/library/cpp/testing/unittest/registar.cpp:36: RaiseError @ 0x164D75B6 3. /-S/ydb/core/kqp/ut/common/kqp_ut_common.h:375: AssertSuccessResult @ 0x15DE0A22 4. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:365: CreateSampleTables @ 0x289A0492 5. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:581: operator() @ 0x289C1A1C 6. /-S/library/cpp/threading/future/core/future-inl.h:493: SetValue @ 0x289C1A1C 7. /-S/library/cpp/threading/future/async.h:24: operator() @ 0x289C1A1C 8. /-S/util/thread/pool.h:71: Process @ 0x289C1A1C 9. /-S/util/thread/pool.cpp:418: DoExecute @ 0x163457A9 10. /-S/util/thread/factory.h:15: Execute @ 0x16344199 11. /-S/util/thread/factory.cpp:36: ThreadProc @ 0x16344199 12. /-S/util/system/thread.cpp:244: ThreadProxy @ 0x1633F60C 13. ??:0: ?? @ 0x7FC6ADE4BAC2 14. ??:0: ?? @ 0x7FC6ADEDD84F Trying to start YDB, gRPC: 28333, MsgBus: 17954 2025-05-29T15:29:35.562173Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509890228067109556:2068];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:29:35.562218Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/0010f6/r3tmp/tmpquD4oM/pdisk_1.dat 2025-05-29T15:29:35.648010Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:29:35.648453Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [1:7509890228067109528:2079] 1748532575561872 != 1748532575561875 TServer::EnableGrpc on GrpcPort 28333, node 1 2025-05-29T15:29:35.6637 ... ); 2025-05-29T15:29:50.229467Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7509890293226936329:2331], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:29:50.229501Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:29:50.233616Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-05-29T15:29:50.240622Z node 3 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;self_id=[3:7509890293226936375:2335];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-05-29T15:29:50.240650Z node 3 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;self_id=[3:7509890293226936375:2335];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-05-29T15:29:50.240711Z node 3 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;self_id=[3:7509890293226936375:2335];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-05-29T15:29:50.240737Z node 3 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;self_id=[3:7509890293226936375:2335];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-05-29T15:29:50.240766Z node 3 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;self_id=[3:7509890293226936375:2335];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-05-29T15:29:50.240791Z node 3 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;self_id=[3:7509890293226936375:2335];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-05-29T15:29:50.240808Z node 3 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;self_id=[3:7509890293226936375:2335];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-05-29T15:29:50.240827Z node 3 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;self_id=[3:7509890293226936375:2335];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-05-29T15:29:50.240845Z node 3 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;self_id=[3:7509890293226936375:2335];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-05-29T15:29:50.240862Z node 3 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;self_id=[3:7509890293226936375:2335];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-05-29T15:29:50.240879Z node 3 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;self_id=[3:7509890293226936375:2335];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2025-05-29T15:29:50.240894Z node 3 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;self_id=[3:7509890293226936375:2335];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2025-05-29T15:29:50.241325Z node 3 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-05-29T15:29:50.241340Z node 3 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-05-29T15:29:50.241354Z node 3 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-05-29T15:29:50.241358Z node 3 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-05-29T15:29:50.241375Z node 3 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-05-29T15:29:50.241383Z node 3 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-05-29T15:29:50.241395Z node 3 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-05-29T15:29:50.241403Z node 3 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-05-29T15:29:50.241414Z node 3 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-05-29T15:29:50.241422Z node 3 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-05-29T15:29:50.241429Z node 3 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-05-29T15:29:50.241433Z node 3 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-05-29T15:29:50.241454Z node 3 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-05-29T15:29:50.241464Z node 3 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-05-29T15:29:50.241483Z node 3 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-05-29T15:29:50.241492Z node 3 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-05-29T15:29:50.241503Z node 3 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-05-29T15:29:50.241509Z node 3 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanDeprecatedSnapshot;id=CleanDeprecatedSnapshot; 2025-05-29T15:29:50.241520Z node 3 :TX_COLUMNSHARD CRIT: log.cpp:784: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=clean_deprecated_snapshot.cpp:30;tasks_for_rewrite=0; 2025-05-29T15:29:50.241526Z node 3 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanDeprecatedSnapshot;id=17; 2025-05-29T15:29:50.241534Z node 3 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV0ChunksMeta;id=RestoreV0ChunksMeta; 2025-05-29T15:29:50.241631Z node 3 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV0ChunksMeta;id=18; 2025-05-29T15:29:50.241641Z node 3 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-05-29T15:29:50.288118Z node 3 :TX_COLUMNSHARD_TX WARN: log.cpp:784: tablet_id=72075186224037888;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715658;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715658; 2025-05-29T15:29:50.290711Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7509890293226936447:2345], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:29:50.290732Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:29:50.293920Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 2025-05-29T15:29:50.301162Z node 3 :TX_COLUMNSHARD_TX WARN: log.cpp:784: tablet_id=72075186224037888;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715659;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715659; 2025-05-29T15:29:50.305761Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7509890293226936481:2351], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:29:50.305778Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:29:50.308810Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 2025-05-29T15:29:50.314939Z node 3 :TX_COLUMNSHARD_TX WARN: log.cpp:784: tablet_id=72075186224037888;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715660;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715660; >> EncryptedExportTest::EncryptionAndChecksum ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/scheme/unittest >> KqpConstraints::IndexedTableAndNotNullColumnAddNotNullColumn Test command err: Trying to start YDB, gRPC: 22560, MsgBus: 30691 2025-05-29T15:29:18.440805Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509890153702361693:2069];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:29:18.440835Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/00112e/r3tmp/tmpYXMvnr/pdisk_1.dat 2025-05-29T15:29:18.485691Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 22560, node 1 2025-05-29T15:29:18.503221Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:29:18.503240Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-05-29T15:29:18.503242Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-05-29T15:29:18.503292Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:30691 TClient is connected to server localhost:30691 2025-05-29T15:29:18.541733Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:29:18.541762Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:29:18.542857Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-29T15:29:18.566616Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:29:18.574188Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:29:18.589288Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:29:18.609274Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:29:18.620895Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:29:18.755451Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890153702363309:2403], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:29:18.755475Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:29:18.788810Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-05-29T15:29:18.796423Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-05-29T15:29:18.850943Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-05-29T15:29:18.906089Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-05-29T15:29:18.914216Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-05-29T15:29:18.929073Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-05-29T15:29:18.942451Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-05-29T15:29:18.958445Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890153702363964:2466], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:29:18.958466Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:29:18.958491Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890153702363969:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:29:18.959208Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715669:3, at schemeshard: 72057594046644480 2025-05-29T15:29:18.962289Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7509890153702363971:2470], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715669 completed, doublechecking } 2025-05-29T15:29:19.035914Z node 1 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [1:7509890157997331318:3397] txid# 281474976715670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-29T15:29:19.124793Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [1:7509890157997331334:2474], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:29:19.124921Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=1&id=NDU2NjdmZTAtZDk1NmIyMTgtZTcxMmQ4NmQtZmY0OThiZTU=, ActorId: [1:7509890153702363291:2401], ActorState: ExecuteState, TraceId: 01jweajz3ee5f0rr1mfr8ctdvq, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: VERIFY failed (2025-05-29T15:29:19.126515Z): assertion failed in non-unittest thread with message: assertion failed at ydb/core/kqp/ut/common/kqp_ut_common.h:375, void NKikimr::NKqp::AssertSuccessResult(const NYdb::TStatus &): (result.IsSuccess())
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 library/cpp/testing/unittest/registar.cpp:36 RaiseError(): requirement UnittestThread failed 0. /-S/util/system/yassert.cpp:83: InternalPanicImpl @ 0x1633E055 1. /-S/util/system/yassert.cpp:55: Panic @ 0x16335056 2. /tmp//-S/library/cpp/testing/unittest/registar.cpp:36: RaiseError @ 0x164D75B6 3. /-S/ydb/core/kqp/ut/common/kqp_ut_common.h:375: AssertSuccessResult @ 0x15DE0A22 4. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:365: CreateSampleTables @ 0x289A0492 5. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:581: operator() @ 0x289C1A1C 6. /-S/library/cpp/threading/future/core/future-inl.h:493: SetValue @ 0x289C1A1C 7. /-S/library/cpp/threading/future/async.h:24: operator() @ 0x289C1A1C 8. /-S/util/thread/pool.h:71: Process @ 0x289C1A1C 9. /-S/util/thread/pool.cpp:418: DoExecute @ 0x163457A9 10. /-S/util/thread/factory.h:15: Execute @ 0x16344199 11. /-S/util/thread/factory.cpp:36: ThreadProc @ 0x16344199 12. /-S/util/system/thread.cpp:244: ThreadProxy @ 0x1633F60C 13. ??:0: ?? @ 0x7F0357437AC2 14. ??:0: ?? @ 0x7F03574C984F Trying to start YDB, gRPC: 7844, MsgBus: 26157 2025-05-29T15:29:22.518648Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509890174039760376:2067];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:29:22.519005Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/00112e/r3tmp/tmpmUszc4/pdisk_1.dat TServer::EnableGrpc on GrpcPort 7844, node 1 2025-05-29T15:29:22.578579Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:29:22.578812Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [1:7509890174039760339:2079] 1748532562518429 != 1748532562518432 2025-05-29T15:29:22.582225Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:29:22.582228Z node 1 :NET_CLASSIFIER WAR ... qp_ut_common.h:375: AssertSuccessResult @ 0x15DE0A22 4. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:365: CreateSampleTables @ 0x289A0492 5. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:581: operator() @ 0x289C1A1C 6. /-S/library/cpp/threading/future/core/future-inl.h:493: SetValue @ 0x289C1A1C 7. /-S/library/cpp/threading/future/async.h:24: operator() @ 0x289C1A1C 8. /-S/util/thread/pool.h:71: Process @ 0x289C1A1C 9. /-S/util/thread/pool.cpp:418: DoExecute @ 0x163457A9 10. /-S/util/thread/factory.h:15: Execute @ 0x16344199 11. /-S/util/thread/factory.cpp:36: ThreadProc @ 0x16344199 12. /-S/util/system/thread.cpp:244: ThreadProxy @ 0x1633F60C 13. ??:0: ?? @ 0x7F05D3AF1AC2 14. ??:0: ?? @ 0x7F05D3B8384F Trying to start YDB, gRPC: 15491, MsgBus: 29807 2025-05-29T15:29:45.135436Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509890270121436961:2215];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:29:45.218709Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/00112e/r3tmp/tmp0Z7vWU/pdisk_1.dat 2025-05-29T15:29:45.296866Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 15491, node 1 2025-05-29T15:29:45.418960Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:29:45.418975Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-05-29T15:29:45.418977Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-05-29T15:29:45.419024Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-05-29T15:29:45.440122Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:29:45.440151Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:29:45.447305Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:29807 TClient is connected to server localhost:29807 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-29T15:29:45.558014Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:29:45.562414Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:29:45.567707Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-05-29T15:29:45.676741Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:29:45.703729Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:29:45.719270Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:29:46.096158Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890274416405697:2403], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:29:46.096208Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:29:46.104161Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-05-29T15:29:46.123313Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-05-29T15:29:46.134676Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-05-29T15:29:46.150018Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-05-29T15:29:46.179720Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-05-29T15:29:46.208287Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-05-29T15:29:46.224266Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-05-29T15:29:46.250867Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890274416406358:2467], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:29:46.250918Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:29:46.251125Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890274416406363:2470], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:29:46.252284Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715669:3, at schemeshard: 72057594046644480 2025-05-29T15:29:46.256748Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976715669, at schemeshard: 72057594046644480 2025-05-29T15:29:46.256828Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7509890274416406365:2471], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715669 completed, doublechecking } 2025-05-29T15:29:46.328189Z node 1 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [1:7509890274416406416:3397] txid# 281474976715670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-29T15:29:46.454238Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [1:7509890274416406432:2475], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:29:46.454826Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=1&id=ZTAzYzBkMTQtZTk3NDFkMWEtOWI5Mzc3OGEtMWYyYTBjYg==, ActorId: [1:7509890274416405671:2401], ActorState: ExecuteState, TraceId: 01jweaksr3bf88wfhbz1xdfzj7, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: VERIFY failed (2025-05-29T15:29:46.459337Z): assertion failed in non-unittest thread with message: assertion failed at ydb/core/kqp/ut/common/kqp_ut_common.h:375, void NKikimr::NKqp::AssertSuccessResult(const NYdb::TStatus &): (result.IsSuccess())
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 library/cpp/testing/unittest/registar.cpp:36 RaiseError(): requirement UnittestThread failed 0. /-S/util/system/yassert.cpp:83: InternalPanicImpl @ 0x1633E055 1. /-S/util/system/yassert.cpp:55: Panic @ 0x16335056 2. /tmp//-S/library/cpp/testing/unittest/registar.cpp:36: RaiseError @ 0x164D75B6 3. /-S/ydb/core/kqp/ut/common/kqp_ut_common.h:375: AssertSuccessResult @ 0x15DE0A22 4. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:365: CreateSampleTables @ 0x289A0492 5. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:581: operator() @ 0x289C1A1C 6. /-S/library/cpp/threading/future/core/future-inl.h:493: SetValue @ 0x289C1A1C 7. /-S/library/cpp/threading/future/async.h:24: operator() @ 0x289C1A1C 8. /-S/util/thread/pool.h:71: Process @ 0x289C1A1C 9. /-S/util/thread/pool.cpp:418: DoExecute @ 0x163457A9 10. /-S/util/thread/factory.h:15: Execute @ 0x16344199 11. /-S/util/thread/factory.cpp:36: ThreadProc @ 0x16344199 12. /-S/util/system/thread.cpp:244: ThreadProxy @ 0x1633F60C 13. ??:0: ?? @ 0x7F0E10BB1AC2 14. ??:0: ?? @ 0x7F0E10C4384F >> TSchemeShardSplitBySizeTest::Make20MergeOperationsWithoutLimit [GOOD] >> BackupRestore::TestAllPrimitiveTypes-DATETIME [FAIL] >> BackupRestore::TestAllPrimitiveTypes-INTERVAL >> BackupPathTest::EncryptedExportWithExplicitDestinationPath [GOOD] >> KqpLimits::CancelAfterRwTx+useSink >> KqpLimits::StreamWrite+Allowed [FAIL] >> KqpLimits::StreamWrite-Allowed ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_split_merge/unittest >> TSchemeShardSplitBySizeTest::Make20MergeOperationsWithoutLimit [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:116:2058] recipient: [1:111:2141] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:116:2058] recipient: [1:111:2141] Leader for TabletID 72057594046678944 is [1:128:2152] sender: [1:132:2058] recipient: [1:111:2141] 2025-05-29T15:29:48.251098Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7488: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-05-29T15:29:48.251127Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7516: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:29:48.251134Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7402: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-05-29T15:29:48.251140Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7418: OperationsProcessing config: using default configuration 2025-05-29T15:29:48.251155Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-05-29T15:29:48.251159Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-05-29T15:29:48.251169Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7548: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:29:48.251184Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:39: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-05-29T15:29:48.251306Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7619: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-05-29T15:29:48.251395Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-05-29T15:29:48.270276Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7309: Cannot subscribe to console configs 2025-05-29T15:29:48.270306Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:29:48.273118Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-05-29T15:29:48.273274Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-05-29T15:29:48.273319Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-05-29T15:29:48.275034Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-05-29T15:29:48.275235Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:445: Clear TempDirsState with owners number: 0 2025-05-29T15:29:48.275392Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1348: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-05-29T15:29:48.275462Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:32: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-05-29T15:29:48.275955Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:157: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:29:48.276009Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:84: [RootDataErasureManager] Stop 2025-05-29T15:29:48.276293Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:29:48.276302Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:29:48.276325Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-05-29T15:29:48.276336Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-29T15:29:48.276342Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:202: TTxServerlessStorageBilling.Complete 2025-05-29T15:29:48.276381Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6671: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-05-29T15:29:48.277728Z node 1 :HIVE INFO: tablet_helpers.cpp:1130: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:128:2152] sender: [1:242:2058] recipient: [1:15:2062] 2025-05-29T15:29:48.307459Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:372: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-05-29T15:29:48.307546Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:29:48.307617Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-05-29T15:29:48.307673Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:130: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-05-29T15:29:48.307687Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:29:48.309349Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:451: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-05-29T15:29:48.309388Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-05-29T15:29:48.309451Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:29:48.309465Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:313: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-05-29T15:29:48.309472Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:367: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-05-29T15:29:48.309478Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 2 -> 3 2025-05-29T15:29:48.310018Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:29:48.310034Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-05-29T15:29:48.310040Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 3 -> 128 2025-05-29T15:29:48.310427Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:29:48.310440Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:29:48.310449Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:29:48.310466Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1650: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-05-29T15:29:48.311204Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1719: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-05-29T15:29:48.311627Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:652: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-05-29T15:29:48.311670Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1751: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-05-29T15:29:48.311872Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:676: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-05-29T15:29:48.311900Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:680: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 124 RawX2: 4294969445 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-05-29T15:29:48.311908Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:29:48.311975Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 128 -> 240 2025-05-29T15:29:48.311981Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:29:48.312014Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-05-29T15:29:48.312027Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:402: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-05-29T15:29:48.312477Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:29:48.312488Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-29T15:29:48.312543Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29 ... HEMESHARD INFO: schemeshard_impl.cpp:5834: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 3 LocalPathId: 2 Version: 23 PathOwnerId: 72057594046678944, cookie: 141 2025-05-29T15:29:50.938813Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 3 LocalPathId: 2 Version: 23 PathOwnerId: 72057594046678944, cookie: 141 2025-05-29T15:29:50.938817Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72057594046678944, txId: 141 2025-05-29T15:29:50.938823Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 141, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 23 2025-05-29T15:29:50.938831Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 25 2025-05-29T15:29:50.938849Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1606: TOperation IsReadyToNotify, TxId: 141, ready parts: 0/1, is published: true 2025-05-29T15:29:50.939224Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:652: Send tablet strongly msg operationId: 141:0 from tablet: 72057594046678944 to tablet: 72075186233409580 cookie: 72057594046678944:35 msg type: 269553158 2025-05-29T15:29:50.939245Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:652: Send tablet strongly msg operationId: 141:0 from tablet: 72057594046678944 to tablet: 72075186233409581 cookie: 72057594046678944:36 msg type: 269553158 2025-05-29T15:29:50.939955Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 141 2025-05-29T15:29:50.940337Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:619: TTxOperationReply execute, operationId: 141:0, at schemeshard: 72057594046678944, message: OperationCookie: 141 TabletId: 72075186233409581 2025-05-29T15:29:50.940351Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_split_merge.cpp:386: TSplitMerge TNotifySrc, operationId: 141:0 HandleReply TEvSplitPartitioningChangedAck, from datashard: 72075186233409581, at schemeshard: 72057594046678944 2025-05-29T15:29:50.940839Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:619: TTxOperationReply execute, operationId: 141:0, at schemeshard: 72057594046678944, message: OperationCookie: 141 TabletId: 72075186233409580 2025-05-29T15:29:50.940849Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_split_merge.cpp:386: TSplitMerge TNotifySrc, operationId: 141:0 HandleReply TEvSplitPartitioningChangedAck, from datashard: 72075186233409580, at schemeshard: 72057594046678944 2025-05-29T15:29:50.940867Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:906: Part operation is done id#141:0 progress is 1/1 2025-05-29T15:29:50.940873Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 141 ready parts: 1/1 2025-05-29T15:29:50.940878Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:906: Part operation is done id#141:0 progress is 1/1 2025-05-29T15:29:50.940881Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 141 ready parts: 1/1 2025-05-29T15:29:50.940886Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1606: TOperation IsReadyToNotify, TxId: 141, ready parts: 1/1, is published: true 2025-05-29T15:29:50.940897Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1629: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:1696:3206] message: TxId: 141 2025-05-29T15:29:50.940905Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 141 ready parts: 1/1 2025-05-29T15:29:50.940912Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:973: Operation and all the parts is done, operation id: 141:0 2025-05-29T15:29:50.940916Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5174: RemoveTx for txid 141:0 2025-05-29T15:29:50.940964Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 24 2025-05-29T15:29:50.942209Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:647: TTxOperationReply complete, operationId: 141:0, at schemeshard: 72057594046678944 2025-05-29T15:29:50.942634Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5549: Handle TEvStateChanged, at schemeshard: 72057594046678944, message: Source { RawX1: 949 RawX2: 4294970003 } TabletId: 72075186233409581 State: 4 2025-05-29T15:29:50.942654Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__state_changed_reply.cpp:78: TTxShardStateChanged DoExecute, datashard informs about state changing, datashardId: 72075186233409581, state: Offline, at schemeshard: 72057594046678944 2025-05-29T15:29:50.942664Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__state_changed_reply.cpp:44: TTxShardStateChanged DoExecuteOperation should be restarted in case missing one of shard, txId: 141, tabletId: 72075186233409581 2025-05-29T15:29:50.942827Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:647: TTxOperationReply complete, operationId: 141:0, at schemeshard: 72057594046678944 2025-05-29T15:29:50.942835Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:260: Unable to activate 141:0 2025-05-29T15:29:50.942900Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:227: tests -- TTxNotificationSubscriber for txId 141: got EvNotifyTxCompletionResult 2025-05-29T15:29:50.942908Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:236: tests -- TTxNotificationSubscriber for txId 141: satisfy waiter [1:3238:4463] 2025-05-29T15:29:50.942987Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5549: Handle TEvStateChanged, at schemeshard: 72057594046678944, message: Source { RawX1: 945 RawX2: 4294970000 } TabletId: 72075186233409580 State: 4 2025-05-29T15:29:50.942997Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__state_changed_reply.cpp:78: TTxShardStateChanged DoExecute, datashard informs about state changing, datashardId: 72075186233409580, state: Offline, at schemeshard: 72057594046678944 2025-05-29T15:29:50.943459Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:46: Free shard 72057594046678944:36 hive 72057594037968897 at ss 72057594046678944 2025-05-29T15:29:50.943763Z node 1 :HIVE INFO: tablet_helpers.cpp:1356: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 36 TxId_Deprecated: 36 TabletID: 72075186233409581 2025-05-29T15:29:50.943819Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5938: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 36 ShardOwnerId: 72057594046678944 ShardLocalIdx: 36, at schemeshard: 72057594046678944 2025-05-29T15:29:50.943901Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 23 Forgetting tablet 72075186233409581 2025-05-29T15:29:50.945172Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:46: Free shard 72057594046678944:35 hive 72057594037968897 at ss 72057594046678944 2025-05-29T15:29:50.945295Z node 1 :HIVE INFO: tablet_helpers.cpp:1356: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 35 TxId_Deprecated: 35 TabletID: 72075186233409580 Forgetting tablet 72075186233409580 2025-05-29T15:29:50.945409Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5938: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 35 ShardOwnerId: 72057594046678944 ShardLocalIdx: 35, at schemeshard: 72057594046678944 2025-05-29T15:29:50.945471Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 22 2025-05-29T15:29:50.946431Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:174: Deleted shardIdx 72057594046678944:36 2025-05-29T15:29:50.946443Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:180: Close pipe to deleted shardIdx 72057594046678944:36 tabletId 72075186233409581 2025-05-29T15:29:50.946511Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:174: Deleted shardIdx 72057594046678944:35 2025-05-29T15:29:50.946518Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:180: Close pipe to deleted shardIdx 72057594046678944:35 tabletId 72075186233409580 TestWaitNotification: OK eventTxId 141 2025-05-29T15:29:50.946681Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Table" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-05-29T15:29:50.946732Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/Table" took 62us result status StatusSuccess 2025-05-29T15:29:50.946942Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Table" PathDescription { Self { Name: "Table" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 123 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 23 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 23 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 21 } ChildrenExist: false } Table { Name: "Table" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 TableSchemaVersion: 1 IsBackup: false IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 20 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 20 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> KqpTypes::UnsafeTimestampCastV0 >> KqpScheme::DoubleCreateResourcePoolClassifier-UseSink >> BackupPathTest::EncryptedExportWithExplicitObjectList >> EncryptedExportTest::EncryptionAndChecksum [FAIL] >> KqpScheme::CreateBackupCollection >> BackupRestoreS3::TestAllPrimitiveTypes-DOUBLE [FAIL] >> BackupRestoreS3::TestAllPrimitiveTypes-TIMESTAMP >> KqpExplain::UpdateConditionalKey-UseSink >> KqpScheme::CreateTableWithVectorIndexCoveredPublicApi >> KqpStats::RequestUnitForBadRequestExecute >> EncryptedExportTest::EncryptionChecksumAndCompression ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/scheme/unittest >> KqpScheme::CreateTableWithUniqConstraint Test command err: Trying to start YDB, gRPC: 65251, MsgBus: 8052 2025-05-29T15:29:20.937852Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509890163989860871:2072];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:29:20.937877Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/00112d/r3tmp/tmpshOkcO/pdisk_1.dat 2025-05-29T15:29:21.002718Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 65251, node 1 2025-05-29T15:29:21.014326Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:29:21.014342Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-05-29T15:29:21.014344Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-05-29T15:29:21.014395Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:8052 2025-05-29T15:29:21.038938Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:29:21.038971Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:29:21.040099Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:8052 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-29T15:29:21.063897Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:29:21.073439Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:29:21.135266Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:29:21.156487Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:29:21.168584Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:29:21.345610Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890168284829758:2403], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:29:21.345631Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:29:21.392430Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-05-29T15:29:21.400808Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-05-29T15:29:21.413486Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-05-29T15:29:21.468252Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-05-29T15:29:21.476790Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-05-29T15:29:21.490595Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-05-29T15:29:21.504418Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-05-29T15:29:21.520893Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890168284830413:2466], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:29:21.520911Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:29:21.520925Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890168284830418:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:29:21.521567Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710669:3, at schemeshard: 72057594046644480 2025-05-29T15:29:21.524005Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7509890168284830420:2470], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710669 completed, doublechecking } 2025-05-29T15:29:21.577447Z node 1 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [1:7509890168284830471:3398] txid# 281474976710670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-29T15:29:21.662221Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [1:7509890168284830487:2474], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:29:21.662309Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=1&id=YWVlNDBkYzItMTM4YjRkNDUtNzY3MjgxN2MtNmZjOGExNzQ=, ActorId: [1:7509890168284829740:2401], ActorState: ExecuteState, TraceId: 01jweak1kg13h2kxpba6tz73p9, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: VERIFY failed (2025-05-29T15:29:21.662895Z): assertion failed in non-unittest thread with message: assertion failed at ydb/core/kqp/ut/common/kqp_ut_common.h:375, void NKikimr::NKqp::AssertSuccessResult(const NYdb::TStatus &): (result.IsSuccess())
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 library/cpp/testing/unittest/registar.cpp:36 RaiseError(): requirement UnittestThread failed 0. /-S/util/system/yassert.cpp:83: InternalPanicImpl @ 0x1633E055 1. /-S/util/system/yassert.cpp:55: Panic @ 0x16335056 2. /tmp//-S/library/cpp/testing/unittest/registar.cpp:36: RaiseError @ 0x164D75B6 3. /-S/ydb/core/kqp/ut/common/kqp_ut_common.h:375: AssertSuccessResult @ 0x15DE0A22 4. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:365: CreateSampleTables @ 0x289A0492 5. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:581: operator() @ 0x289C1A1C 6. /-S/library/cpp/threading/future/core/future-inl.h:493: SetValue @ 0x289C1A1C 7. /-S/library/cpp/threading/future/async.h:24: operator() @ 0x289C1A1C 8. /-S/util/thread/pool.h:71: Process @ 0x289C1A1C 9. /-S/util/thread/pool.cpp:418: DoExecute @ 0x163457A9 10. /-S/util/thread/factory.h:15: Execute @ 0x16344199 11. /-S/util/thread/factory.cpp:36: ThreadProc @ 0x16344199 12. /-S/util/system/thread.cpp:244: ThreadProxy @ 0x1633F60C 13. ??:0: ?? @ 0x7F7CD49B3AC2 14. ??:0: ?? @ 0x7F7CD4A4584F Trying to start YDB, gRPC: 22714, MsgBus: 63278 2025-05-29T15:29:26.105742Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509890189578921039:2072];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:29:26.105800Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/00112d/r3tmp/tmphj0MPX/pdisk_1.dat 2025-05-29T15:29:26.161772Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 22714, node 1 2025-05-29T15:29:26.179507Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:29:26.179521Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-05-29T15:29:26.179523Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 20 ... lPanicImpl @ 0x1633E055 1. /-S/util/system/yassert.cpp:55: Panic @ 0x16335056 2. /tmp//-S/library/cpp/testing/unittest/registar.cpp:36: RaiseError @ 0x164D75B6 3. /-S/ydb/core/kqp/ut/common/kqp_ut_common.h:375: AssertSuccessResult @ 0x15DE0A22 4. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:365: CreateSampleTables @ 0x289A0492 5. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:581: operator() @ 0x289C1A1C 6. /-S/library/cpp/threading/future/core/future-inl.h:493: SetValue @ 0x289C1A1C 7. /-S/library/cpp/threading/future/async.h:24: operator() @ 0x289C1A1C 8. /-S/util/thread/pool.h:71: Process @ 0x289C1A1C 9. /-S/util/thread/pool.cpp:418: DoExecute @ 0x163457A9 10. /-S/util/thread/factory.h:15: Execute @ 0x16344199 11. /-S/util/thread/factory.cpp:36: ThreadProc @ 0x16344199 12. /-S/util/system/thread.cpp:244: ThreadProxy @ 0x1633F60C 13. ??:0: ?? @ 0x7FE4AF6D8AC2 14. ??:0: ?? @ 0x7FE4AF76A84F Trying to start YDB, gRPC: 1966, MsgBus: 31406 2025-05-29T15:29:47.440141Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509890280988748548:2143];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:29:47.441290Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/00112d/r3tmp/tmpD1XI3a/pdisk_1.dat 2025-05-29T15:29:47.547809Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:29:47.548112Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:29:47.548128Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting TServer::EnableGrpc on GrpcPort 1966, node 1 2025-05-29T15:29:47.549215Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-29T15:29:47.559847Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:29:47.559857Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-05-29T15:29:47.559858Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-05-29T15:29:47.559893Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:31406 TClient is connected to server localhost:31406 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... 2025-05-29T15:29:47.632447Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-05-29T15:29:47.639675Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:29:47.664719Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:29:47.701244Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:29:47.718238Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:29:47.919400Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890280988750066:2403], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:29:47.919434Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:29:47.994575Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-05-29T15:29:48.005982Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-05-29T15:29:48.017288Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-05-29T15:29:48.040291Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-05-29T15:29:48.060181Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-05-29T15:29:48.088998Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-05-29T15:29:48.113791Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-05-29T15:29:48.179784Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890285283718017:2466], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:29:48.179815Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:29:48.179944Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890285283718025:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:29:48.180920Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715669:3, at schemeshard: 72057594046644480 2025-05-29T15:29:48.184888Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976715669, at schemeshard: 72057594046644480 2025-05-29T15:29:48.184990Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7509890285283718027:2470], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715669 completed, doublechecking } 2025-05-29T15:29:48.252178Z node 1 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [1:7509890285283718078:3398] txid# 281474976715670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-29T15:29:48.400762Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [1:7509890285283718087:2474], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:29:48.402168Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=1&id=ODA4MWYxOWItYzNjMjcyMTctMzkzMGUzMzktZGI4OGVjNmM=, ActorId: [1:7509890280988750039:2400], ActorState: ExecuteState, TraceId: 01jweakvmkbg359zankfrv7a07, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: VERIFY failed (2025-05-29T15:29:48.403714Z): assertion failed in non-unittest thread with message: assertion failed at ydb/core/kqp/ut/common/kqp_ut_common.h:375, void NKikimr::NKqp::AssertSuccessResult(const NYdb::TStatus &): (result.IsSuccess())
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 library/cpp/testing/unittest/registar.cpp:36 RaiseError(): requirement UnittestThread failed 0. /-S/util/system/yassert.cpp:83: InternalPanicImpl @ 0x1633E055 1. /-S/util/system/yassert.cpp:55: Panic @ 0x16335056 2. /tmp//-S/library/cpp/testing/unittest/registar.cpp:36: RaiseError @ 0x164D75B6 3. /-S/ydb/core/kqp/ut/common/kqp_ut_common.h:375: AssertSuccessResult @ 0x15DE0A22 4. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:365: CreateSampleTables @ 0x289A0492 5. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:581: operator() @ 0x289C1A1C 6. /-S/library/cpp/threading/future/core/future-inl.h:493: SetValue @ 0x289C1A1C 7. /-S/library/cpp/threading/future/async.h:24: operator() @ 0x289C1A1C 8. /-S/util/thread/pool.h:71: Process @ 0x289C1A1C 9. /-S/util/thread/pool.cpp:418: DoExecute @ 0x163457A9 10. /-S/util/thread/factory.h:15: Execute @ 0x16344199 11. /-S/util/thread/factory.cpp:36: ThreadProc @ 0x16344199 12. /-S/util/system/thread.cpp:244: ThreadProxy @ 0x1633F60C 13. ??:0: ?? @ 0x7FD67F99FAC2 14. ??:0: ?? @ 0x7FD67FA3184F >> BackupRestore::TestAllPrimitiveTypes-INTERVAL [FAIL] >> BackupRestore::TestAllPrimitiveTypes-DATE32 >> KqpQuery::RandomNumber >> BackupPathTest::EncryptedExportWithExplicitObjectList [GOOD] >> KqpLimits::DataShardReplySizeExceeded >> EncryptedExportTest::EncryptionChecksumAndCompression [FAIL] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/scheme/unittest >> KqpAcl::AclRevoke+UseSink+IsOlap Test command err: Trying to start YDB, gRPC: 11496, MsgBus: 11709 2025-05-29T15:29:18.014701Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509890152873625455:2072];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:29:18.014747Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/001134/r3tmp/tmpmw6drQ/pdisk_1.dat TServer::EnableGrpc on GrpcPort 11496, node 1 2025-05-29T15:29:18.073887Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:29:18.076791Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:29:18.076801Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-05-29T15:29:18.076803Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-05-29T15:29:18.076845Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:11709 TClient is connected to server localhost:11709 2025-05-29T15:29:18.115855Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:29:18.115888Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:29:18.117037Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-29T15:29:18.142288Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:29:18.148968Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:29:18.212307Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:29:18.270808Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:29:18.281663Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:29:18.408724Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890152873627059:2403], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:29:18.408743Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:29:18.459072Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-05-29T15:29:18.467279Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-05-29T15:29:18.480608Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-05-29T15:29:18.494870Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-05-29T15:29:18.508602Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-05-29T15:29:18.522198Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-05-29T15:29:18.536517Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-05-29T15:29:18.551900Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890152873627711:2466], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:29:18.551922Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890152873627716:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:29:18.551928Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:29:18.552506Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715669:3, at schemeshard: 72057594046644480 2025-05-29T15:29:18.556293Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7509890152873627718:2470], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715669 completed, doublechecking } 2025-05-29T15:29:18.635506Z node 1 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [1:7509890152873627769:3396] txid# 281474976715670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-29T15:29:18.731831Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [1:7509890152873627785:2474], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:29:18.731964Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=1&id=YTg3MmE0NzEtYzY3NTU5MGUtNTNiZDM5MGUtNzI4MGMxMWQ=, ActorId: [1:7509890152873627040:2401], ActorState: ExecuteState, TraceId: 01jweajypq71wfyq42x6ct79pn, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: VERIFY failed (2025-05-29T15:29:18.732700Z): assertion failed in non-unittest thread with message: assertion failed at ydb/core/kqp/ut/common/kqp_ut_common.h:375, void NKikimr::NKqp::AssertSuccessResult(const NYdb::TStatus &): (result.IsSuccess())
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 library/cpp/testing/unittest/registar.cpp:36 RaiseError(): requirement UnittestThread failed 0. /-S/util/system/yassert.cpp:83: InternalPanicImpl @ 0x1633E055 1. /-S/util/system/yassert.cpp:55: Panic @ 0x16335056 2. /tmp//-S/library/cpp/testing/unittest/registar.cpp:36: RaiseError @ 0x164D75B6 3. /-S/ydb/core/kqp/ut/common/kqp_ut_common.h:375: AssertSuccessResult @ 0x15DE0A22 4. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:365: CreateSampleTables @ 0x289A0492 5. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:581: operator() @ 0x289C1A1C 6. /-S/library/cpp/threading/future/core/future-inl.h:493: SetValue @ 0x289C1A1C 7. /-S/library/cpp/threading/future/async.h:24: operator() @ 0x289C1A1C 8. /-S/util/thread/pool.h:71: Process @ 0x289C1A1C 9. /-S/util/thread/pool.cpp:418: DoExecute @ 0x163457A9 10. /-S/util/thread/factory.h:15: Execute @ 0x16344199 11. /-S/util/thread/factory.cpp:36: ThreadProc @ 0x16344199 12. /-S/util/system/thread.cpp:244: ThreadProxy @ 0x1633F60C 13. ??:0: ?? @ 0x7F45FD8FEAC2 14. ??:0: ?? @ 0x7F45FD99084F Trying to start YDB, gRPC: 20029, MsgBus: 4069 2025-05-29T15:29:22.438934Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509890170448861905:2072];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:29:22.438958Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/001134/r3tmp/tmpVWjHmT/pdisk_1.dat TServer::EnableGrpc on GrpcPort 20029, node 1 2025-05-29T15:29:22.497485Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:29:22.504504Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:29:22.504517Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-05-29T15:29:22.504520Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) ... operator() @ 0x289C1A1C 6. /-S/library/cpp/threading/future/core/future-inl.h:493: SetValue @ 0x289C1A1C 7. /-S/library/cpp/threading/future/async.h:24: operator() @ 0x289C1A1C 8. /-S/util/thread/pool.h:71: Process @ 0x289C1A1C 9. /-S/util/thread/pool.cpp:418: DoExecute @ 0x163457A9 10. /-S/util/thread/factory.h:15: Execute @ 0x16344199 11. /-S/util/thread/factory.cpp:36: ThreadProc @ 0x16344199 12. /-S/util/system/thread.cpp:244: ThreadProxy @ 0x1633F60C 13. ??:0: ?? @ 0x7FACC69D5AC2 14. ??:0: ?? @ 0x7FACC6A6784F Trying to start YDB, gRPC: 29518, MsgBus: 4985 2025-05-29T15:29:48.041610Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509890281611447433:2139];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:29:48.044356Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/001134/r3tmp/tmpzqf5MR/pdisk_1.dat 2025-05-29T15:29:48.145154Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:29:48.145184Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:29:48.151035Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-29T15:29:48.157902Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [1:7509890281611447332:2079] 1748532588040393 != 1748532588040396 TServer::EnableGrpc on GrpcPort 29518, node 1 2025-05-29T15:29:48.159103Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:29:48.182267Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:29:48.182283Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-05-29T15:29:48.182286Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-05-29T15:29:48.182327Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:4985 TClient is connected to server localhost:4985 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-29T15:29:48.289728Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:29:48.293130Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:29:48.294271Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:29:48.320232Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 2025-05-29T15:29:48.342603Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:29:48.355573Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:29:48.564661Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890281611448964:2403], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:29:48.564698Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:29:48.629215Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-05-29T15:29:48.644931Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-05-29T15:29:48.711593Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-05-29T15:29:48.732636Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-05-29T15:29:48.748929Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-05-29T15:29:48.774064Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-05-29T15:29:48.787310Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-05-29T15:29:48.863609Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890281611449620:2466], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:29:48.863639Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:29:48.863746Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890281611449625:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:29:48.864656Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715669:3, at schemeshard: 72057594046644480 2025-05-29T15:29:48.868399Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976715669, at schemeshard: 72057594046644480 2025-05-29T15:29:48.868473Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7509890281611449627:2470], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715669 completed, doublechecking } 2025-05-29T15:29:48.940349Z node 1 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [1:7509890281611449678:3395] txid# 281474976715670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-29T15:29:49.049378Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [1:7509890281611449687:2474], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:29:49.049498Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=1&id=MTc2OWY3MmEtMzBkNjhlMzQtZjgyOWNhM2EtODdjNjVjOGY=, ActorId: [1:7509890281611448953:2401], ActorState: ExecuteState, TraceId: 01jweakw9zaddz914ex7atdxtc, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: VERIFY failed (2025-05-29T15:29:49.050459Z): assertion failed in non-unittest thread with message: assertion failed at ydb/core/kqp/ut/common/kqp_ut_common.h:375, void NKikimr::NKqp::AssertSuccessResult(const NYdb::TStatus &): (result.IsSuccess())
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 library/cpp/testing/unittest/registar.cpp:36 RaiseError(): requirement UnittestThread failed 0. /-S/util/system/yassert.cpp:83: InternalPanicImpl @ 0x1633E055 1. /-S/util/system/yassert.cpp:55: Panic @ 0x16335056 2. /tmp//-S/library/cpp/testing/unittest/registar.cpp:36: RaiseError @ 0x164D75B6 3. /-S/ydb/core/kqp/ut/common/kqp_ut_common.h:375: AssertSuccessResult @ 0x15DE0A22 4. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:365: CreateSampleTables @ 0x289A0492 5. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:581: operator() @ 0x289C1A1C 6. /-S/library/cpp/threading/future/core/future-inl.h:493: SetValue @ 0x289C1A1C 7. /-S/library/cpp/threading/future/async.h:24: operator() @ 0x289C1A1C 8. /-S/util/thread/pool.h:71: Process @ 0x289C1A1C 9. /-S/util/thread/pool.cpp:418: DoExecute @ 0x163457A9 10. /-S/util/thread/factory.h:15: Execute @ 0x16344199 11. /-S/util/thread/factory.cpp:36: ThreadProc @ 0x16344199 12. /-S/util/system/thread.cpp:244: ThreadProxy @ 0x1633F60C 13. ??:0: ?? @ 0x7F3CE3DDEAC2 14. ??:0: ?? @ 0x7F3CE3E7084F >> KqpBatchDelete::Large_1 >> BackupPathTest::ExportCommonSourcePathImportExplicitly >> KqpQuery::UdfTerminate ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/scheme/unittest >> KqpScheme::AlterTransfer Test command err: Trying to start YDB, gRPC: 24573, MsgBus: 26378 2025-05-29T15:29:22.931702Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509890173031202565:2066];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:29:22.931723Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/001128/r3tmp/tmpVm4Oqs/pdisk_1.dat TServer::EnableGrpc on GrpcPort 24573, node 1 2025-05-29T15:29:22.988124Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:29:22.999448Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:29:22.999463Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-05-29T15:29:22.999466Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-05-29T15:29:22.999509Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:26378 2025-05-29T15:29:23.033111Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:29:23.033138Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:29:23.034222Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:26378 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-29T15:29:23.060506Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:29:23.069700Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:29:23.087627Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:29:23.108476Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:29:23.119690Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:29:23.272849Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890177326171463:2403], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:29:23.272880Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:29:23.324464Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-05-29T15:29:23.331947Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-05-29T15:29:23.345492Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-05-29T15:29:23.400310Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-05-29T15:29:23.407970Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-05-29T15:29:23.422791Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-05-29T15:29:23.436843Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-05-29T15:29:23.494930Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890177326172119:2466], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:29:23.494957Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890177326172124:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:29:23.494959Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:29:23.495691Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715669:3, at schemeshard: 72057594046644480 2025-05-29T15:29:23.498107Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7509890177326172126:2470], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715669 completed, doublechecking } 2025-05-29T15:29:23.597951Z node 1 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [1:7509890177326172177:3394] txid# 281474976715670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-29T15:29:23.705061Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [1:7509890177326172193:2474], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:29:23.705178Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=1&id=NTVhMjA0OGEtYWM5MjViNzUtOTgxMTE3OTItNzFlY2Y4OTM=, ActorId: [1:7509890177326171445:2401], ActorState: ExecuteState, TraceId: 01jweak3h67vbxfn763wpgd9s4, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: VERIFY failed (2025-05-29T15:29:23.706195Z): assertion failed in non-unittest thread with message: assertion failed at ydb/core/kqp/ut/common/kqp_ut_common.h:375, void NKikimr::NKqp::AssertSuccessResult(const NYdb::TStatus &): (result.IsSuccess())
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 library/cpp/testing/unittest/registar.cpp:36 RaiseError(): requirement UnittestThread failed 0. /-S/util/system/yassert.cpp:83: InternalPanicImpl @ 0x1633E055 1. /-S/util/system/yassert.cpp:55: Panic @ 0x16335056 2. /tmp//-S/library/cpp/testing/unittest/registar.cpp:36: RaiseError @ 0x164D75B6 3. /-S/ydb/core/kqp/ut/common/kqp_ut_common.h:375: AssertSuccessResult @ 0x15DE0A22 4. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:365: CreateSampleTables @ 0x289A0492 5. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:581: operator() @ 0x289C1A1C 6. /-S/library/cpp/threading/future/core/future-inl.h:493: SetValue @ 0x289C1A1C 7. /-S/library/cpp/threading/future/async.h:24: operator() @ 0x289C1A1C 8. /-S/util/thread/pool.h:71: Process @ 0x289C1A1C 9. /-S/util/thread/pool.cpp:418: DoExecute @ 0x163457A9 10. /-S/util/thread/factory.h:15: Execute @ 0x16344199 11. /-S/util/thread/factory.cpp:36: ThreadProc @ 0x16344199 12. /-S/util/system/thread.cpp:244: ThreadProxy @ 0x1633F60C 13. ??:0: ?? @ 0x7EFD8E13DAC2 14. ??:0: ?? @ 0x7EFD8E1CF84F Trying to start YDB, gRPC: 6489, MsgBus: 6522 2025-05-29T15:29:27.021401Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509890194658713144:2075];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:29:27.021431Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/001128/r3tmp/tmpLirCOj/pdisk_1.dat 2025-05-29T15:29:27.089025Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 6489, node 1 2025-05-29T15:29:27.108917Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:29:27.108928Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-05-29T15:29:27.108931Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 20 ... lPanicImpl @ 0x1633E055 1. /-S/util/system/yassert.cpp:55: Panic @ 0x16335056 2. /tmp//-S/library/cpp/testing/unittest/registar.cpp:36: RaiseError @ 0x164D75B6 3. /-S/ydb/core/kqp/ut/common/kqp_ut_common.h:375: AssertSuccessResult @ 0x15DE0A22 4. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:365: CreateSampleTables @ 0x289A0492 5. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:581: operator() @ 0x289C1A1C 6. /-S/library/cpp/threading/future/core/future-inl.h:493: SetValue @ 0x289C1A1C 7. /-S/library/cpp/threading/future/async.h:24: operator() @ 0x289C1A1C 8. /-S/util/thread/pool.h:71: Process @ 0x289C1A1C 9. /-S/util/thread/pool.cpp:418: DoExecute @ 0x163457A9 10. /-S/util/thread/factory.h:15: Execute @ 0x16344199 11. /-S/util/thread/factory.cpp:36: ThreadProc @ 0x16344199 12. /-S/util/system/thread.cpp:244: ThreadProxy @ 0x1633F60C 13. ??:0: ?? @ 0x7F8AA7A67AC2 14. ??:0: ?? @ 0x7F8AA7AF984F Trying to start YDB, gRPC: 4139, MsgBus: 17808 2025-05-29T15:29:48.591219Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509890284602844520:2205];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:29:48.591377Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/001128/r3tmp/tmpoUbPf3/pdisk_1.dat 2025-05-29T15:29:48.687693Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:29:48.700060Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:29:48.700091Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting TServer::EnableGrpc on GrpcPort 4139, node 1 2025-05-29T15:29:48.701805Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-29T15:29:48.713666Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:29:48.713679Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-05-29T15:29:48.713682Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-05-29T15:29:48.713733Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:17808 TClient is connected to server localhost:17808 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-29T15:29:48.897339Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:29:48.900409Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-05-29T15:29:48.905716Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:29:48.940315Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:29:48.966153Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:29:48.987186Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:29:49.080299Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890288897813274:2403], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:29:49.080338Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:29:49.136583Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-05-29T15:29:49.145295Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-05-29T15:29:49.154978Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-05-29T15:29:49.169767Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-05-29T15:29:49.184006Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-05-29T15:29:49.196859Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-05-29T15:29:49.213293Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-05-29T15:29:49.242049Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890288897813926:2466], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:29:49.242073Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:29:49.242165Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890288897813931:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:29:49.243225Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710669:3, at schemeshard: 72057594046644480 2025-05-29T15:29:49.245940Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7509890288897813933:2470], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710669 completed, doublechecking } 2025-05-29T15:29:49.336393Z node 1 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [1:7509890288897813984:3395] txid# 281474976710670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-29T15:29:49.463513Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [1:7509890288897814000:2474], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:29:49.463619Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=1&id=YWMwZDUzMDAtNDkzM2Y5NTMtNDM2MTk2YzYtMWFmMTMyM2U=, ActorId: [1:7509890288897813256:2401], ActorState: ExecuteState, TraceId: 01jweakwnsajscmech9z7jeccv, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: VERIFY failed (2025-05-29T15:29:49.464684Z): assertion failed in non-unittest thread with message: assertion failed at ydb/core/kqp/ut/common/kqp_ut_common.h:375, void NKikimr::NKqp::AssertSuccessResult(const NYdb::TStatus &): (result.IsSuccess())
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 library/cpp/testing/unittest/registar.cpp:36 RaiseError(): requirement UnittestThread failed 0. /-S/util/system/yassert.cpp:83: InternalPanicImpl @ 0x1633E055 1. /-S/util/system/yassert.cpp:55: Panic @ 0x16335056 2. /tmp//-S/library/cpp/testing/unittest/registar.cpp:36: RaiseError @ 0x164D75B6 3. /-S/ydb/core/kqp/ut/common/kqp_ut_common.h:375: AssertSuccessResult @ 0x15DE0A22 4. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:365: CreateSampleTables @ 0x289A0492 5. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:581: operator() @ 0x289C1A1C 6. /-S/library/cpp/threading/future/core/future-inl.h:493: SetValue @ 0x289C1A1C 7. /-S/library/cpp/threading/future/async.h:24: operator() @ 0x289C1A1C 8. /-S/util/thread/pool.h:71: Process @ 0x289C1A1C 9. /-S/util/thread/pool.cpp:418: DoExecute @ 0x163457A9 10. /-S/util/thread/factory.h:15: Execute @ 0x16344199 11. /-S/util/thread/factory.cpp:36: ThreadProc @ 0x16344199 12. /-S/util/system/thread.cpp:244: ThreadProxy @ 0x1633F60C 13. ??:0: ?? @ 0x7F1E77D86AC2 14. ??:0: ?? @ 0x7F1E77E1884F >> BackupRestoreS3::TestAllPrimitiveTypes-TIMESTAMP [FAIL] >> BackupRestoreS3::TestAllPrimitiveTypes-INTERVAL >> ListObjectsInS3Export::ExportWithSchemaMapping >> KqpScheme::ChangefeedAttributes >> BackupRestore::TestAllPrimitiveTypes-DATE32 [FAIL] >> BackupRestore::TestAllPrimitiveTypes-DATETIME64 >> KqpTypes::DyNumberCompare >> KqpOlapScheme::DropThenAddColumnCompaction [GOOD] >> KqpOlapScheme::DropColumnTableStoreErrors >> BackupPathTest::ExportCommonSourcePathImportExplicitly [GOOD] >> KqpLimits::StreamWrite-Allowed [FAIL] >> KqpLimits::ReadsetCountLimit >> KqpScheme::CreateAndAlterTableWithBloomFilterCompat >> KqpQuery::OlapCreateAsSelect_Simple >> KqpScheme::DisableDropExternalTable >> ListObjectsInS3Export::ExportWithSchemaMapping [GOOD] >> KqpExplain::SsaProgramInJsonPlan >> BackupPathTest::ImportFilterByPrefix ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/scheme/unittest >> KqpScheme::TwoSimilarFamiliesTest Test command err: Trying to start YDB, gRPC: 30211, MsgBus: 1555 2025-05-29T15:29:23.743772Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509890177924927085:2068];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:29:23.743793Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/001127/r3tmp/tmpyNqs6F/pdisk_1.dat 2025-05-29T15:29:23.809428Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 30211, node 1 2025-05-29T15:29:23.826198Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:29:23.826210Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-05-29T15:29:23.826212Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-05-29T15:29:23.826259Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:1555 2025-05-29T15:29:23.845010Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:29:23.845030Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:29:23.846167Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:1555 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-29T15:29:23.885785Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:29:23.894608Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-05-29T15:29:23.895831Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:29:23.912274Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:29:23.933371Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:29:23.945421Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:29:24.123832Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890182219895988:2404], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:29:24.123858Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:29:24.173665Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-05-29T15:29:24.181985Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-05-29T15:29:24.192478Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-05-29T15:29:24.206333Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-05-29T15:29:24.261400Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-05-29T15:29:24.269315Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-05-29T15:29:24.283839Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480 2025-05-29T15:29:24.300082Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890182219896641:2467], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:29:24.300108Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:29:24.300109Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890182219896646:2470], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:29:24.300788Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480 2025-05-29T15:29:24.303788Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7509890182219896648:2471], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2025-05-29T15:29:24.357640Z node 1 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [1:7509890182219896699:3405] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-29T15:29:24.464418Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [1:7509890182219896715:2475], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:29:24.464541Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=1&id=ZjZhY2E2ZmMtNzQyMjE5NGItNzNhNTIwOWItYjA1ZDNiZDQ=, ActorId: [1:7509890182219895985:2402], ActorState: ExecuteState, TraceId: 01jweak4ab4vntchtdt9qmata6, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: VERIFY failed (2025-05-29T15:29:24.465215Z): assertion failed in non-unittest thread with message: assertion failed at ydb/core/kqp/ut/common/kqp_ut_common.h:375, void NKikimr::NKqp::AssertSuccessResult(const NYdb::TStatus &): (result.IsSuccess())
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 library/cpp/testing/unittest/registar.cpp:36 RaiseError(): requirement UnittestThread failed 0. /-S/util/system/yassert.cpp:83: InternalPanicImpl @ 0x1633E055 1. /-S/util/system/yassert.cpp:55: Panic @ 0x16335056 2. /tmp//-S/library/cpp/testing/unittest/registar.cpp:36: RaiseError @ 0x164D75B6 3. /-S/ydb/core/kqp/ut/common/kqp_ut_common.h:375: AssertSuccessResult @ 0x15DE0A22 4. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:365: CreateSampleTables @ 0x289A0492 5. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:581: operator() @ 0x289C1A1C 6. /-S/library/cpp/threading/future/core/future-inl.h:493: SetValue @ 0x289C1A1C 7. /-S/library/cpp/threading/future/async.h:24: operator() @ 0x289C1A1C 8. /-S/util/thread/pool.h:71: Process @ 0x289C1A1C 9. /-S/util/thread/pool.cpp:418: DoExecute @ 0x163457A9 10. /-S/util/thread/factory.h:15: Execute @ 0x16344199 11. /-S/util/thread/factory.cpp:36: ThreadProc @ 0x16344199 12. /-S/util/system/thread.cpp:244: ThreadProxy @ 0x1633F60C 13. ??:0: ?? @ 0x7F88B06D0AC2 14. ??:0: ?? @ 0x7F88B076284F Trying to start YDB, gRPC: 6497, MsgBus: 28263 2025-05-29T15:29:28.553713Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509890198400292431:2068];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:29:28.553740Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/001127/r3tmp/tmp2baJUW/pdisk_1.dat 2025-05-29T15:29:28.614650Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 6497, node 1 2025-05-29T15:29:28.630835Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025 ... anicImpl @ 0x1633E055 1. /-S/util/system/yassert.cpp:55: Panic @ 0x16335056 2. /tmp//-S/library/cpp/testing/unittest/registar.cpp:36: RaiseError @ 0x164D75B6 3. /-S/ydb/core/kqp/ut/common/kqp_ut_common.h:375: AssertSuccessResult @ 0x15DE0A22 4. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:365: CreateSampleTables @ 0x289A0492 5. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:581: operator() @ 0x289C1A1C 6. /-S/library/cpp/threading/future/core/future-inl.h:493: SetValue @ 0x289C1A1C 7. /-S/library/cpp/threading/future/async.h:24: operator() @ 0x289C1A1C 8. /-S/util/thread/pool.h:71: Process @ 0x289C1A1C 9. /-S/util/thread/pool.cpp:418: DoExecute @ 0x163457A9 10. /-S/util/thread/factory.h:15: Execute @ 0x16344199 11. /-S/util/thread/factory.cpp:36: ThreadProc @ 0x16344199 12. /-S/util/system/thread.cpp:244: ThreadProxy @ 0x1633F60C 13. ??:0: ?? @ 0x7F92AB729AC2 14. ??:0: ?? @ 0x7F92AB7BB84F Trying to start YDB, gRPC: 18433, MsgBus: 10168 2025-05-29T15:29:49.982309Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509890288454315754:2063];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:29:49.982333Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/001127/r3tmp/tmp1GUCEP/pdisk_1.dat 2025-05-29T15:29:50.052986Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 18433, node 1 2025-05-29T15:29:50.074960Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:29:50.074976Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-05-29T15:29:50.074979Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-05-29T15:29:50.075035Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:10168 2025-05-29T15:29:50.128088Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:29:50.128112Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:29:50.128752Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:10168 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-29T15:29:50.159091Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:29:50.162614Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-05-29T15:29:50.172769Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:29:50.236275Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:29:50.253637Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:29:50.264958Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:29:50.353244Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890292749284662:2403], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:29:50.353283Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:29:50.403255Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-05-29T15:29:50.414094Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-05-29T15:29:50.470260Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-05-29T15:29:50.485698Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-05-29T15:29:50.498347Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-05-29T15:29:50.515182Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-05-29T15:29:50.526897Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-05-29T15:29:50.542632Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890292749285316:2466], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:29:50.542676Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:29:50.542762Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890292749285321:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:29:50.543758Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715669:3, at schemeshard: 72057594046644480 2025-05-29T15:29:50.546779Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7509890292749285323:2470], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715669 completed, doublechecking } 2025-05-29T15:29:50.601497Z node 1 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [1:7509890292749285374:3396] txid# 281474976715670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-29T15:29:50.723903Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [1:7509890292749285390:2474], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:29:50.724019Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=1&id=ZDQyN2I2YTctN2I1YjA0YTQtZGZkN2M3MmItZjY0NDAzNzQ=, ActorId: [1:7509890292749284644:2401], ActorState: ExecuteState, TraceId: 01jweakxye0rra3kjd7rpavt6f, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: VERIFY failed (2025-05-29T15:29:50.724731Z): assertion failed in non-unittest thread with message: assertion failed at ydb/core/kqp/ut/common/kqp_ut_common.h:375, void NKikimr::NKqp::AssertSuccessResult(const NYdb::TStatus &): (result.IsSuccess())
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 library/cpp/testing/unittest/registar.cpp:36 RaiseError(): requirement UnittestThread failed 0. /-S/util/system/yassert.cpp:83: InternalPanicImpl @ 0x1633E055 1. /-S/util/system/yassert.cpp:55: Panic @ 0x16335056 2. /tmp//-S/library/cpp/testing/unittest/registar.cpp:36: RaiseError @ 0x164D75B6 3. /-S/ydb/core/kqp/ut/common/kqp_ut_common.h:375: AssertSuccessResult @ 0x15DE0A22 4. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:365: CreateSampleTables @ 0x289A0492 5. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:581: operator() @ 0x289C1A1C 6. /-S/library/cpp/threading/future/core/future-inl.h:493: SetValue @ 0x289C1A1C 7. /-S/library/cpp/threading/future/async.h:24: operator() @ 0x289C1A1C 8. /-S/util/thread/pool.h:71: Process @ 0x289C1A1C 9. /-S/util/thread/pool.cpp:418: DoExecute @ 0x163457A9 10. /-S/util/thread/factory.h:15: Execute @ 0x16344199 11. /-S/util/thread/factory.cpp:36: ThreadProc @ 0x16344199 12. /-S/util/system/thread.cpp:244: ThreadProxy @ 0x1633F60C 13. ??:0: ?? @ 0x7FD70FF0CAC2 14. ??:0: ?? @ 0x7FD70FF9E84F >> BackupRestoreS3::TestAllPrimitiveTypes-INTERVAL [FAIL] >> BackupRestoreS3::TestAllPrimitiveTypes-TZ_DATE [GOOD] >> BackupRestoreS3::TestAllPrimitiveTypes-TZ_DATETIME [GOOD] >> BackupRestoreS3::TestAllPrimitiveTypes-TZ_TIMESTAMP [GOOD] >> BackupRestoreS3::TestAllPrimitiveTypes-TIMESTAMP64 >> ListObjectsInS3Export::ExportWithoutSchemaMapping >> KqpConstraints::AddNonColumnDoesnotReturnInternalError >> KqpOlapScheme::DropColumnTableStoreErrors [GOOD] >> KqpOlapScheme::DropTableAfterInsert >> BackupRestore::TestAllPrimitiveTypes-DATETIME64 [FAIL] >> BackupRestore::TestAllPrimitiveTypes-INTERVAL64 >> KqpExplain::MergeConnection |72.7%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/graph/ut/ydb-core-graph-ut |72.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/graph/ut/ydb-core-graph-ut |72.7%| [LD] {RESULT} $(B)/ydb/core/graph/ut/ydb-core-graph-ut >> BackupPathTest::ImportFilterByPrefix [GOOD] >> ListObjectsInS3Export::ExportWithoutSchemaMapping [GOOD] >> KqpQuery::Now >> BackupRestoreS3::TestAllPrimitiveTypes-TIMESTAMP64 [FAIL] >> BackupRestoreS3::TestAllPrimitiveTypes-INTERVAL64 >> BackupPathTest::ImportFilterByYdbObjectPath >> KqpLimits::CancelAfterRwTx-useSink >> ListObjectsInS3Export::ExportWithEncryption |72.7%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/datashard/ut_range_ops/ydb-core-tx-datashard-ut_range_ops |72.7%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_range_ops/ydb-core-tx-datashard-ut_range_ops |72.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_range_ops/ydb-core-tx-datashard-ut_range_ops >> KqpTypes::UnsafeTimestampCastV1 >> BackupRestore::TestAllPrimitiveTypes-INTERVAL64 [FAIL] >> BackupRestore::TestAllPrimitiveTypes-JSON ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/scheme/unittest >> KqpScheme::DoubleCreateResourcePoolClassifier-UseSink Test command err: Trying to start YDB, gRPC: 12288, MsgBus: 13327 2025-05-29T15:29:25.023378Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509890186182068317:2099];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:29:25.023603Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/001123/r3tmp/tmpHJ4zoU/pdisk_1.dat 2025-05-29T15:29:25.089894Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:29:25.090073Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [1:7509890186182068245:2079] 1748532565023047 != 1748532565023050 TServer::EnableGrpc on GrpcPort 12288, node 1 2025-05-29T15:29:25.105698Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:29:25.105710Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-05-29T15:29:25.105711Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-05-29T15:29:25.105743Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:13327 2025-05-29T15:29:25.124980Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:29:25.125010Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:29:25.126056Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:13327 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-29T15:29:25.169075Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:29:25.179164Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:29:25.195368Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:29:25.217039Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:29:25.228990Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:29:25.459598Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890186182069878:2403], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:29:25.459626Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:29:25.499103Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-05-29T15:29:25.506671Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-05-29T15:29:25.515211Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-05-29T15:29:25.522254Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-05-29T15:29:25.536311Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-05-29T15:29:25.543136Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-05-29T15:29:25.557075Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-05-29T15:29:25.613697Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890186182070537:2466], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:29:25.613725Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890186182070542:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:29:25.613725Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:29:25.614463Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715669:3, at schemeshard: 72057594046644480 2025-05-29T15:29:25.619470Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7509890186182070544:2470], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715669 completed, doublechecking } 2025-05-29T15:29:25.686123Z node 1 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [1:7509890186182070595:3396] txid# 281474976715670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-29T15:29:25.784065Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [1:7509890186182070611:2474], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:29:25.784183Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=1&id=MzNlZjNjMy1lYzFkOWZlNC1hMzQ1MTRhNS02NWIyM2E4Nw==, ActorId: [1:7509890186182069860:2401], ActorState: ExecuteState, TraceId: 01jweak5kde4acr3f2gr4d2h6t, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: VERIFY failed (2025-05-29T15:29:25.784858Z): assertion failed in non-unittest thread with message: assertion failed at ydb/core/kqp/ut/common/kqp_ut_common.h:375, void NKikimr::NKqp::AssertSuccessResult(const NYdb::TStatus &): (result.IsSuccess())
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 library/cpp/testing/unittest/registar.cpp:36 RaiseError(): requirement UnittestThread failed 0. /-S/util/system/yassert.cpp:83: InternalPanicImpl @ 0x1633E055 1. /-S/util/system/yassert.cpp:55: Panic @ 0x16335056 2. /tmp//-S/library/cpp/testing/unittest/registar.cpp:36: RaiseError @ 0x164D75B6 3. /-S/ydb/core/kqp/ut/common/kqp_ut_common.h:375: AssertSuccessResult @ 0x15DE0A22 4. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:365: CreateSampleTables @ 0x289A0492 5. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:581: operator() @ 0x289C1A1C 6. /-S/library/cpp/threading/future/core/future-inl.h:493: SetValue @ 0x289C1A1C 7. /-S/library/cpp/threading/future/async.h:24: operator() @ 0x289C1A1C 8. /-S/util/thread/pool.h:71: Process @ 0x289C1A1C 9. /-S/util/thread/pool.cpp:418: DoExecute @ 0x163457A9 10. /-S/util/thread/factory.h:15: Execute @ 0x16344199 11. /-S/util/thread/factory.cpp:36: ThreadProc @ 0x16344199 12. /-S/util/system/thread.cpp:244: ThreadProxy @ 0x1633F60C 13. ??:0: ?? @ 0x7FE4A069BAC2 14. ??:0: ?? @ 0x7FE4A072D84F Trying to start YDB, gRPC: 30117, MsgBus: 26729 2025-05-29T15:29:29.613131Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509890200262149306:2268];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:29:29.613238Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/001123/r3tmp/tmp90AMvZ/pdisk_1.dat 2025-05-29T15:29:29.672764Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [1:7509890200262149066:2079] 1748532569610484 != 1748532569610487 2025-05-29T15:29:29.674648Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 30117, node 1 2025-05-29 ... 633E055 1. /-S/util/system/yassert.cpp:55: Panic @ 0x16335056 2. /tmp//-S/library/cpp/testing/unittest/registar.cpp:36: RaiseError @ 0x164D75B6 3. /-S/ydb/core/kqp/ut/common/kqp_ut_common.h:375: AssertSuccessResult @ 0x15DE0A22 4. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:365: CreateSampleTables @ 0x289A0492 5. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:581: operator() @ 0x289C1A1C 6. /-S/library/cpp/threading/future/core/future-inl.h:493: SetValue @ 0x289C1A1C 7. /-S/library/cpp/threading/future/async.h:24: operator() @ 0x289C1A1C 8. /-S/util/thread/pool.h:71: Process @ 0x289C1A1C 9. /-S/util/thread/pool.cpp:418: DoExecute @ 0x163457A9 10. /-S/util/thread/factory.h:15: Execute @ 0x16344199 11. /-S/util/thread/factory.cpp:36: ThreadProc @ 0x16344199 12. /-S/util/system/thread.cpp:244: ThreadProxy @ 0x1633F60C 13. ??:0: ?? @ 0x7F9CD6772AC2 14. ??:0: ?? @ 0x7F9CD680484F Trying to start YDB, gRPC: 10550, MsgBus: 32439 2025-05-29T15:29:51.666027Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509890295176535424:2063];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:29:51.666045Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/001123/r3tmp/tmpGXM7ae/pdisk_1.dat 2025-05-29T15:29:51.730307Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:29:51.730607Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [1:7509890295176535403:2079] 1748532591665881 != 1748532591665884 TServer::EnableGrpc on GrpcPort 10550, node 1 2025-05-29T15:29:51.750104Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:29:51.750120Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-05-29T15:29:51.750123Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-05-29T15:29:51.750174Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-05-29T15:29:51.768276Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:29:51.768315Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:29:51.769375Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:32439 TClient is connected to server localhost:32439 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... 2025-05-29T15:29:51.827502Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 2025-05-29T15:29:51.838616Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-05-29T15:29:51.904641Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 2025-05-29T15:29:51.924321Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:29:51.937570Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:29:52.060474Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890299471504354:2403], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:29:52.060496Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:29:52.099691Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-05-29T15:29:52.107662Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-05-29T15:29:52.162622Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-05-29T15:29:52.171384Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-05-29T15:29:52.185526Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-05-29T15:29:52.200060Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-05-29T15:29:52.213965Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-05-29T15:29:52.229770Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890299471505009:2466], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:29:52.229809Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:29:52.229844Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890299471505014:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:29:52.230600Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710669:3, at schemeshard: 72057594046644480 2025-05-29T15:29:52.233354Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7509890299471505016:2470], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710669 completed, doublechecking } 2025-05-29T15:29:52.320166Z node 1 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [1:7509890299471505067:3398] txid# 281474976710670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-29T15:29:52.427387Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [1:7509890299471505083:2474], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:29:52.427491Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=1&id=YTQ2MDRiMGUtNmU1MDFmODMtZTllOTY2OWItZjJhNTAzOA==, ActorId: [1:7509890299471504336:2401], ActorState: ExecuteState, TraceId: 01jweakzk50kvvgxzrrbazc1mb, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: VERIFY failed (2025-05-29T15:29:52.428151Z): assertion failed in non-unittest thread with message: assertion failed at ydb/core/kqp/ut/common/kqp_ut_common.h:375, void NKikimr::NKqp::AssertSuccessResult(const NYdb::TStatus &): (result.IsSuccess())
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 library/cpp/testing/unittest/registar.cpp:36 RaiseError(): requirement UnittestThread failed 0. /-S/util/system/yassert.cpp:83: InternalPanicImpl @ 0x1633E055 1. /-S/util/system/yassert.cpp:55: Panic @ 0x16335056 2. /tmp//-S/library/cpp/testing/unittest/registar.cpp:36: RaiseError @ 0x164D75B6 3. /-S/ydb/core/kqp/ut/common/kqp_ut_common.h:375: AssertSuccessResult @ 0x15DE0A22 4. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:365: CreateSampleTables @ 0x289A0492 5. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:581: operator() @ 0x289C1A1C 6. /-S/library/cpp/threading/future/core/future-inl.h:493: SetValue @ 0x289C1A1C 7. /-S/library/cpp/threading/future/async.h:24: operator() @ 0x289C1A1C 8. /-S/util/thread/pool.h:71: Process @ 0x289C1A1C 9. /-S/util/thread/pool.cpp:418: DoExecute @ 0x163457A9 10. /-S/util/thread/factory.h:15: Execute @ 0x16344199 11. /-S/util/thread/factory.cpp:36: ThreadProc @ 0x16344199 12. /-S/util/system/thread.cpp:244: ThreadProxy @ 0x1633F60C 13. ??:0: ?? @ 0x7FB3743F4AC2 14. ??:0: ?? @ 0x7FB37448684F ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/scheme/unittest >> KqpScheme::CreateBackupCollection Test command err: Trying to start YDB, gRPC: 21748, MsgBus: 30205 2025-05-29T15:29:25.887361Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509890183642729592:2071];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:29:25.887378Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/00110f/r3tmp/tmpPic5h2/pdisk_1.dat 2025-05-29T15:29:25.943586Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 21748, node 1 2025-05-29T15:29:25.964585Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:29:25.964595Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-05-29T15:29:25.964596Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-05-29T15:29:25.964628Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:30205 2025-05-29T15:29:25.988666Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:29:25.988693Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:29:25.989817Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:30205 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-29T15:29:26.021578Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:29:26.028606Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:29:26.091298Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:29:26.111230Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:29:26.122682Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:29:26.324676Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890187937698481:2403], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:29:26.324705Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:29:26.380299Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-05-29T15:29:26.388266Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-05-29T15:29:26.397674Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-05-29T15:29:26.411968Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-05-29T15:29:26.425696Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-05-29T15:29:26.440451Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-05-29T15:29:26.453823Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-05-29T15:29:26.469744Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890187937699133:2466], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:29:26.469770Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:29:26.469788Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890187937699138:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:29:26.470644Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715669:3, at schemeshard: 72057594046644480 2025-05-29T15:29:26.473501Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7509890187937699140:2470], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715669 completed, doublechecking } 2025-05-29T15:29:26.549495Z node 1 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [1:7509890187937699191:3396] txid# 281474976715670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-29T15:29:26.662940Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [1:7509890187937699207:2474], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:29:26.663111Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=1&id=ZDIxMjU4ZDktZTZmMGM3MzMtNjU2ZDYzYTQtNGU3NGE0ZWE=, ActorId: [1:7509890187937698463:2401], ActorState: ExecuteState, TraceId: 01jweak6e52jx0sk4f9ncz5nmh, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: VERIFY failed (2025-05-29T15:29:26.663839Z): assertion failed in non-unittest thread with message: assertion failed at ydb/core/kqp/ut/common/kqp_ut_common.h:375, void NKikimr::NKqp::AssertSuccessResult(const NYdb::TStatus &): (result.IsSuccess())
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 library/cpp/testing/unittest/registar.cpp:36 RaiseError(): requirement UnittestThread failed 0. /-S/util/system/yassert.cpp:83: InternalPanicImpl @ 0x1633E055 1. /-S/util/system/yassert.cpp:55: Panic @ 0x16335056 2. /tmp//-S/library/cpp/testing/unittest/registar.cpp:36: RaiseError @ 0x164D75B6 3. /-S/ydb/core/kqp/ut/common/kqp_ut_common.h:375: AssertSuccessResult @ 0x15DE0A22 4. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:365: CreateSampleTables @ 0x289A0492 5. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:581: operator() @ 0x289C1A1C 6. /-S/library/cpp/threading/future/core/future-inl.h:493: SetValue @ 0x289C1A1C 7. /-S/library/cpp/threading/future/async.h:24: operator() @ 0x289C1A1C 8. /-S/util/thread/pool.h:71: Process @ 0x289C1A1C 9. /-S/util/thread/pool.cpp:418: DoExecute @ 0x163457A9 10. /-S/util/thread/factory.h:15: Execute @ 0x16344199 11. /-S/util/thread/factory.cpp:36: ThreadProc @ 0x16344199 12. /-S/util/system/thread.cpp:244: ThreadProxy @ 0x1633F60C 13. ??:0: ?? @ 0x7FAD7BBFCAC2 14. ??:0: ?? @ 0x7FAD7BC8E84F Trying to start YDB, gRPC: 5936, MsgBus: 2619 2025-05-29T15:29:30.291428Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509890207084859772:2062];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:29:30.291459Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/00110f/r3tmp/tmpMgA0my/pdisk_1.dat 2025-05-29T15:29:30.349422Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:29:30.349479Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [1:7509890207084859742:2079] 1748532570291338 != 1748532570291341 TServer::EnableGrpc on GrpcPort 5936, node 1 2025-05-29T15:29:30.368932Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:29:30.368945Z node 1 :NET_CLASSIFIER WARN ... 633E055 1. /-S/util/system/yassert.cpp:55: Panic @ 0x16335056 2. /tmp//-S/library/cpp/testing/unittest/registar.cpp:36: RaiseError @ 0x164D75B6 3. /-S/ydb/core/kqp/ut/common/kqp_ut_common.h:375: AssertSuccessResult @ 0x15DE0A22 4. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:365: CreateSampleTables @ 0x289A0492 5. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:581: operator() @ 0x289C1A1C 6. /-S/library/cpp/threading/future/core/future-inl.h:493: SetValue @ 0x289C1A1C 7. /-S/library/cpp/threading/future/async.h:24: operator() @ 0x289C1A1C 8. /-S/util/thread/pool.h:71: Process @ 0x289C1A1C 9. /-S/util/thread/pool.cpp:418: DoExecute @ 0x163457A9 10. /-S/util/thread/factory.h:15: Execute @ 0x16344199 11. /-S/util/thread/factory.cpp:36: ThreadProc @ 0x16344199 12. /-S/util/system/thread.cpp:244: ThreadProxy @ 0x1633F60C 13. ??:0: ?? @ 0x7F67D1C20AC2 14. ??:0: ?? @ 0x7F67D1CB284F Trying to start YDB, gRPC: 17680, MsgBus: 62743 2025-05-29T15:29:51.835604Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509890297632550203:2069];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:29:51.835839Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/00110f/r3tmp/tmpeXgPQu/pdisk_1.dat 2025-05-29T15:29:51.904381Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [1:7509890297632550165:2079] 1748532591835061 != 1748532591835064 2025-05-29T15:29:51.905051Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 17680, node 1 2025-05-29T15:29:51.920555Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:29:51.920570Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-05-29T15:29:51.920572Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-05-29T15:29:51.920622Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-05-29T15:29:51.937269Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:29:51.937302Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting TClient is connected to server localhost:62743 2025-05-29T15:29:51.938443Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:62743 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-29T15:29:51.994005Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:29:52.000246Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:29:52.019056Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:29:52.039527Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:29:52.052210Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:29:52.187025Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890301927519112:2403], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:29:52.187058Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:29:52.227338Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-05-29T15:29:52.236284Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-05-29T15:29:52.249439Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-05-29T15:29:52.304751Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-05-29T15:29:52.360071Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-05-29T15:29:52.368390Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-05-29T15:29:52.382198Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-05-29T15:29:52.398387Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890301927519768:2466], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:29:52.398417Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890301927519773:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:29:52.398418Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:29:52.399323Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715669:3, at schemeshard: 72057594046644480 2025-05-29T15:29:52.401553Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7509890301927519775:2470], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715669 completed, doublechecking } 2025-05-29T15:29:52.495029Z node 1 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [1:7509890301927519827:3396] txid# 281474976715670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-29T15:29:52.578788Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [1:7509890301927519843:2474], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:29:52.578914Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=1&id=ZmVlYWVmZDYtYzk1ZTYzNGQtZDRlYzRjM2EtZDgyMGY3MTA=, ActorId: [1:7509890301927519094:2401], ActorState: ExecuteState, TraceId: 01jweakzrd2seb4csp9b71fx9v, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: VERIFY failed (2025-05-29T15:29:52.580617Z): assertion failed in non-unittest thread with message: assertion failed at ydb/core/kqp/ut/common/kqp_ut_common.h:375, void NKikimr::NKqp::AssertSuccessResult(const NYdb::TStatus &): (result.IsSuccess())
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 library/cpp/testing/unittest/registar.cpp:36 RaiseError(): requirement UnittestThread failed 0. /-S/util/system/yassert.cpp:83: InternalPanicImpl @ 0x1633E055 1. /-S/util/system/yassert.cpp:55: Panic @ 0x16335056 2. /tmp//-S/library/cpp/testing/unittest/registar.cpp:36: RaiseError @ 0x164D75B6 3. /-S/ydb/core/kqp/ut/common/kqp_ut_common.h:375: AssertSuccessResult @ 0x15DE0A22 4. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:365: CreateSampleTables @ 0x289A0492 5. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:581: operator() @ 0x289C1A1C 6. /-S/library/cpp/threading/future/core/future-inl.h:493: SetValue @ 0x289C1A1C 7. /-S/library/cpp/threading/future/async.h:24: operator() @ 0x289C1A1C 8. /-S/util/thread/pool.h:71: Process @ 0x289C1A1C 9. /-S/util/thread/pool.cpp:418: DoExecute @ 0x163457A9 10. /-S/util/thread/factory.h:15: Execute @ 0x16344199 11. /-S/util/thread/factory.cpp:36: ThreadProc @ 0x16344199 12. /-S/util/system/thread.cpp:244: ThreadProxy @ 0x1633F60C 13. ??:0: ?? @ 0x7F776DBBEAC2 14. ??:0: ?? @ 0x7F776DC5084F >> KqpExplain::UpdateOn+UseSink >> BackupPathTest::ImportFilterByYdbObjectPath [GOOD] >> ListObjectsInS3Export::ExportWithEncryption [GOOD] >> KqpLimits::ReplySizeExceeded ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/scheme/unittest >> KqpScheme::CreateTableWithVectorIndexCoveredPublicApi Test command err: Trying to start YDB, gRPC: 15746, MsgBus: 12790 2025-05-29T15:29:25.835590Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509890183659825670:2066];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:29:25.835858Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/001112/r3tmp/tmpQv0Jib/pdisk_1.dat 2025-05-29T15:29:25.888389Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 15746, node 1 2025-05-29T15:29:25.906057Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:29:25.906069Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-05-29T15:29:25.906071Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-05-29T15:29:25.906118Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:12790 2025-05-29T15:29:25.936618Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:29:25.936652Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:29:25.937719Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:12790 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-29T15:29:25.970492Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:29:25.976194Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:29:25.993467Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:29:26.011767Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:29:26.022202Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:29:26.206399Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890187954794565:2403], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:29:26.206423Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:29:26.246239Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-05-29T15:29:26.253865Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-05-29T15:29:26.264119Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-05-29T15:29:26.270641Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-05-29T15:29:26.278218Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-05-29T15:29:26.292570Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-05-29T15:29:26.306588Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-05-29T15:29:26.324357Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890187954795218:2466], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:29:26.324381Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890187954795223:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:29:26.324385Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:29:26.325008Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715669:3, at schemeshard: 72057594046644480 2025-05-29T15:29:26.333386Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7509890187954795225:2470], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715669 completed, doublechecking } 2025-05-29T15:29:26.422520Z node 1 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [1:7509890187954795276:3398] txid# 281474976715670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-29T15:29:26.512881Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [1:7509890187954795292:2474], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:29:26.512983Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=1&id=YTQ3ZDRiODktYmQ1M2QzMWItMTEwZWRmYTItZDhjMzY3MzQ=, ActorId: [1:7509890187954794562:2401], ActorState: ExecuteState, TraceId: 01jweak69mc5564vbg0k88qy7q, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: VERIFY failed (2025-05-29T15:29:26.513721Z): assertion failed in non-unittest thread with message: assertion failed at ydb/core/kqp/ut/common/kqp_ut_common.h:375, void NKikimr::NKqp::AssertSuccessResult(const NYdb::TStatus &): (result.IsSuccess())
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 library/cpp/testing/unittest/registar.cpp:36 RaiseError(): requirement UnittestThread failed 0. /-S/util/system/yassert.cpp:83: InternalPanicImpl @ 0x1633E055 1. /-S/util/system/yassert.cpp:55: Panic @ 0x16335056 2. /tmp//-S/library/cpp/testing/unittest/registar.cpp:36: RaiseError @ 0x164D75B6 3. /-S/ydb/core/kqp/ut/common/kqp_ut_common.h:375: AssertSuccessResult @ 0x15DE0A22 4. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:365: CreateSampleTables @ 0x289A0492 5. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:581: operator() @ 0x289C1A1C 6. /-S/library/cpp/threading/future/core/future-inl.h:493: SetValue @ 0x289C1A1C 7. /-S/library/cpp/threading/future/async.h:24: operator() @ 0x289C1A1C 8. /-S/util/thread/pool.h:71: Process @ 0x289C1A1C 9. /-S/util/thread/pool.cpp:418: DoExecute @ 0x163457A9 10. /-S/util/thread/factory.h:15: Execute @ 0x16344199 11. /-S/util/thread/factory.cpp:36: ThreadProc @ 0x16344199 12. /-S/util/system/thread.cpp:244: ThreadProxy @ 0x1633F60C 13. ??:0: ?? @ 0x7FDB57DF1AC2 14. ??:0: ?? @ 0x7FDB57E8384F Trying to start YDB, gRPC: 7250, MsgBus: 15147 2025-05-29T15:29:30.294672Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509890206803219791:2071];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:29:30.294703Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/001112/r3tmp/tmpY8pnL6/pdisk_1.dat 2025-05-29T15:29:30.360079Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 7250, node 1 2025-05-29T15:29:30.374064Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:29:30.374078Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-05-29T15:29:30.374080Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2 ... /ast/yql_expr.h:1874: index out of range, code: 1 library/cpp/testing/unittest/registar.cpp:36 RaiseError(): requirement UnittestThread failed 0. /-S/util/system/yassert.cpp:83: InternalPanicImpl @ 0x1633E055 1. /-S/util/system/yassert.cpp:55: Panic @ 0x16335056 2. /tmp//-S/library/cpp/testing/unittest/registar.cpp:36: RaiseError @ 0x164D75B6 3. /-S/ydb/core/kqp/ut/common/kqp_ut_common.h:375: AssertSuccessResult @ 0x15DE0A22 4. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:365: CreateSampleTables @ 0x289A0492 5. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:581: operator() @ 0x289C1A1C 6. /-S/library/cpp/threading/future/core/future-inl.h:493: SetValue @ 0x289C1A1C 7. /-S/library/cpp/threading/future/async.h:24: operator() @ 0x289C1A1C 8. /-S/util/thread/pool.h:71: Process @ 0x289C1A1C 9. /-S/util/thread/pool.cpp:418: DoExecute @ 0x163457A9 10. /-S/util/thread/factory.h:15: Execute @ 0x16344199 11. /-S/util/thread/factory.cpp:36: ThreadProc @ 0x16344199 12. /-S/util/system/thread.cpp:244: ThreadProxy @ 0x1633F60C 13. ??:0: ?? @ 0x7F557F706AC2 14. ??:0: ?? @ 0x7F557F79884F Trying to start YDB, gRPC: 2949, MsgBus: 10690 2025-05-29T15:29:52.159617Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509890298836058937:2065];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:29:52.159649Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/001112/r3tmp/tmp7BlGhx/pdisk_1.dat 2025-05-29T15:29:52.213037Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 2949, node 1 2025-05-29T15:29:52.234500Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:29:52.234519Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-05-29T15:29:52.234522Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-05-29T15:29:52.234565Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:10690 2025-05-29T15:29:52.260671Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:29:52.260698Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:29:52.261793Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:10690 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-29T15:29:52.293214Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:29:52.305907Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:29:52.321091Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:29:52.339458Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:29:52.351394Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:29:52.491034Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890298836060535:2403], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:29:52.491060Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:29:52.526655Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-05-29T15:29:52.534054Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-05-29T15:29:52.591819Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-05-29T15:29:52.607923Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-05-29T15:29:52.621997Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-05-29T15:29:52.634554Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-05-29T15:29:52.648694Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-05-29T15:29:52.668879Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890298836061189:2466], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:29:52.668921Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:29:52.669596Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890298836061194:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:29:52.670522Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715669:3, at schemeshard: 72057594046644480 2025-05-29T15:29:52.674372Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7509890298836061196:2470], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715669 completed, doublechecking } 2025-05-29T15:29:52.769604Z node 1 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [1:7509890298836061247:3397] txid# 281474976715670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-29T15:29:52.882516Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [1:7509890298836061263:2474], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:29:52.882822Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=1&id=M2E4NWNiNDctNGE1ZWEzZGItNTQzOWE5NDAtMTI1OWUwMTE=, ActorId: [1:7509890298836060517:2401], ActorState: ExecuteState, TraceId: 01jweam00vfd12x0fcs1ewxp32, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: VERIFY failed (2025-05-29T15:29:52.884543Z): assertion failed in non-unittest thread with message: assertion failed at ydb/core/kqp/ut/common/kqp_ut_common.h:375, void NKikimr::NKqp::AssertSuccessResult(const NYdb::TStatus &): (result.IsSuccess())
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 library/cpp/testing/unittest/registar.cpp:36 RaiseError(): requirement UnittestThread failed 0. /-S/util/system/yassert.cpp:83: InternalPanicImpl @ 0x1633E055 1. /-S/util/system/yassert.cpp:55: Panic @ 0x16335056 2. /tmp//-S/library/cpp/testing/unittest/registar.cpp:36: RaiseError @ 0x164D75B6 3. /-S/ydb/core/kqp/ut/common/kqp_ut_common.h:375: AssertSuccessResult @ 0x15DE0A22 4. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:365: CreateSampleTables @ 0x289A0492 5. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:581: operator() @ 0x289C1A1C 6. /-S/library/cpp/threading/future/core/future-inl.h:493: SetValue @ 0x289C1A1C 7. /-S/library/cpp/threading/future/async.h:24: operator() @ 0x289C1A1C 8. /-S/util/thread/pool.h:71: Process @ 0x289C1A1C 9. /-S/util/thread/pool.cpp:418: DoExecute @ 0x163457A9 10. /-S/util/thread/factory.h:15: Execute @ 0x16344199 11. /-S/util/thread/factory.cpp:36: ThreadProc @ 0x16344199 12. /-S/util/system/thread.cpp:244: ThreadProxy @ 0x1633F60C 13. ??:0: ?? @ 0x7F93AD808AC2 14. ??:0: ?? @ 0x7F93AD89A84F >> KqpStats::RequestUnitForBadRequestExplicitPrepare >> KqpQuery::RandomUuid >> BackupRestoreS3::TestAllPrimitiveTypes-INTERVAL64 [FAIL] >> BackupRestoreS3::TestAllPrimitiveTypes-STRING ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/query/unittest >> KqpLimits::DataShardReplySizeExceeded Test command err: Trying to start YDB, gRPC: 14296, MsgBus: 20338 2025-05-29T15:29:44.713058Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509890268025218875:2064];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:29:44.713107Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/0013d3/r3tmp/tmpWMoaLI/pdisk_1.dat 2025-05-29T15:29:44.786289Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [1:7509890268025218849:2079] 1748532584712877 != 1748532584712880 2025-05-29T15:29:44.786983Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 14296, node 1 2025-05-29T15:29:44.806926Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:29:44.806941Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-05-29T15:29:44.806942Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-05-29T15:29:44.807071Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-05-29T15:29:44.815434Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:29:44.815482Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:29:44.816612Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:20338 TClient is connected to server localhost:20338 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... 2025-05-29T15:29:44.862852Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 2025-05-29T15:29:44.865896Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-05-29T15:29:44.879939Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:29:45.754925Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890272320187155:2354], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:29:45.754996Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:29:45.758918Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890272320187167:2357], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:29:45.760500Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710659:3, at schemeshard: 72057594046644480 2025-05-29T15:29:45.763680Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976710659, at schemeshard: 72057594046644480 2025-05-29T15:29:45.763769Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7509890272320187169:2358], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710659 completed, doublechecking } 2025-05-29T15:29:45.846061Z node 1 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [1:7509890272320187220:2551] txid# 281474976710660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-29T15:29:45.976889Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [1:7509890272320187236:2362], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:29:45.977898Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=1&id=ZjVhM2E5YjktOWYzODJkZGQtODRmZWFjNjktMjRkMmUyZDM=, ActorId: [1:7509890272320187127:2351], ActorState: ExecuteState, TraceId: 01jweaks8he8t21vqbns02dm18, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 assertion failed at ydb/core/kqp/ut/query/kqp_limits_ut.cpp:339, virtual void NKikimr::NKqp::NTestSuiteKqpLimits::TTestCaseDatashardProgramSize::Execute_(NUnitTest::TTestContext &) [useSink = true]: (result.GetStatus() == EStatus::SUCCESS) failed: (INTERNAL_ERROR != SUCCESS) , with diff: (INT|SUCC)E(RNAL_ERROR|SS) 0. /-S/util/system/backtrace.cpp:284: ?? @ 0x13DBA79B 1. /tmp//-S/library/cpp/testing/unittest/registar.cpp:46: RaiseError @ 0x13F70AD8 2. /tmp//-S/ydb/core/kqp/ut/query/kqp_limits_ut.cpp:339: Execute_ @ 0x13B0269C 3. /tmp//-S/ydb/core/kqp/ut/query/kqp_limits_ut.cpp:56: operator() @ 0x13AF4736 4. /tmp//-S/library/cpp/testing/unittest/registar.cpp:373: Run @ 0x13F7298D 5. /tmp//-S/ydb/core/kqp/ut/query/kqp_limits_ut.cpp:56: Execute @ 0x13AF40F7 6. /tmp//-S/library/cpp/testing/unittest/registar.cpp:494: Execute @ 0x13F73102 7. /tmp//-S/library/cpp/testing/unittest/utmain.cpp:872: RunMain @ 0x13F84CAC 8. ??:0: ?? @ 0x7F5E5CD0ED8F 9. ??:0: ?? @ 0x7F5E5CD0EE3F 10. ??:0: ?? @ 0x12A6F028 Trying to start YDB, gRPC: 61579, MsgBus: 32349 2025-05-29T15:29:49.165068Z node 2 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7509890289472119823:2216];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:29:49.165245Z node 2 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/0013d3/r3tmp/tmpAk6rzU/pdisk_1.dat 2025-05-29T15:29:49.181513Z node 2 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:29:49.181658Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [2:7509890289472119644:2079] 1748532589159563 != 1748532589159566 TServer::EnableGrpc on GrpcPort 61579, node 2 2025-05-29T15:29:49.195227Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:29:49.195241Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-05-29T15:29:49.195243Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-05-29T15:29:49.195308Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:32349 TClient is connected to server localhost:32349 WaitRootIsUp 'Root'... TClient::Ls request: Root 2025-05-29T15:29:49.270243Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:29:49.270268Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-29T15:29:49.272412Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-29T15:29:49.272836Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:29:49.274624Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:29:49.279213Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-05-29T15:29:49.804980Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7509890289472120650:2356], DatabaseId: /Root, PoolId: default, Failed to fetc ... x13DD8F65 1. /-S/util/system/yassert.cpp:55: Panic @ 0x13DCFF66 2. /tmp//-S/library/cpp/testing/unittest/registar.cpp:36: RaiseError @ 0x13F70BC6 3. /-S/ydb/core/kqp/ut/common/kqp_ut_common.h:375: AssertSuccessResult @ 0x13A3FAE2 4. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:365: CreateSampleTables @ 0x264F71F2 5. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:581: operator() @ 0x2651596C 6. /-S/library/cpp/threading/future/core/future-inl.h:493: SetValue @ 0x2651596C 7. /-S/library/cpp/threading/future/async.h:24: operator() @ 0x2651596C 8. /-S/util/thread/pool.h:71: Process @ 0x2651596C 9. /-S/util/thread/pool.cpp:418: DoExecute @ 0x13DE06B9 10. /-S/util/thread/factory.h:15: Execute @ 0x13DDF0A9 11. /-S/util/thread/factory.cpp:36: ThreadProc @ 0x13DDF0A9 12. /-S/util/system/thread.cpp:244: ThreadProxy @ 0x13DDA51C 13. ??:0: ?? @ 0x7F5E5CD79AC2 14. ??:0: ?? @ 0x7F5E5CE0B84F Trying to start YDB, gRPC: 3683, MsgBus: 21017 2025-05-29T15:29:52.729938Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509890301266934421:2198];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:29:52.730787Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/0013d3/r3tmp/tmpMfd6ck/pdisk_1.dat 2025-05-29T15:29:52.787494Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [1:7509890301266934262:2079] 1748532592729051 != 1748532592729054 2025-05-29T15:29:52.789530Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 3683, node 1 2025-05-29T15:29:52.803415Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:29:52.803438Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-05-29T15:29:52.803441Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-05-29T15:29:52.803489Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:21017 TClient is connected to server localhost:21017 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: 2025-05-29T15:29:52.862166Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:29:52.862219Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:29:52.863212Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-29T15:29:52.871466Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-05-29T15:29:52.875809Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-05-29T15:29:52.896274Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-05-29T15:29:52.923227Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 2025-05-29T15:29:52.935259Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:29:53.115017Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890305561903188:2403], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:29:53.115070Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:29:53.151139Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-05-29T15:29:53.158842Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-05-29T15:29:53.215712Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-05-29T15:29:53.228887Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-05-29T15:29:53.242958Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-05-29T15:29:53.256561Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-05-29T15:29:53.270837Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-05-29T15:29:53.287393Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890305561903843:2466], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:29:53.287430Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890305561903848:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:29:53.287448Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:29:53.288425Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715669:3, at schemeshard: 72057594046644480 2025-05-29T15:29:53.299136Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7509890305561903850:2470], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715669 completed, doublechecking } 2025-05-29T15:29:53.383546Z node 1 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [1:7509890305561903901:3396] txid# 281474976715670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-29T15:29:53.490763Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [1:7509890305561903910:2474], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:29:53.490905Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=1&id=ODQ4NDgxNTktOTA0YWNlYmYtNTJmNTcwZmUtNWQ2NTgxMjI=, ActorId: [1:7509890305561903185:2401], ActorState: ExecuteState, TraceId: 01jweam0m6cp8zb986a0dgzrxp, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: VERIFY failed (2025-05-29T15:29:53.495257Z): assertion failed in non-unittest thread with message: assertion failed at ydb/core/kqp/ut/common/kqp_ut_common.h:375, void NKikimr::NKqp::AssertSuccessResult(const NYdb::TStatus &): (result.IsSuccess())
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 library/cpp/testing/unittest/registar.cpp:36 RaiseError(): requirement UnittestThread failed 0. /-S/util/system/yassert.cpp:83: InternalPanicImpl @ 0x13DD8F65 1. /-S/util/system/yassert.cpp:55: Panic @ 0x13DCFF66 2. /tmp//-S/library/cpp/testing/unittest/registar.cpp:36: RaiseError @ 0x13F70BC6 3. /-S/ydb/core/kqp/ut/common/kqp_ut_common.h:375: AssertSuccessResult @ 0x13A3FAE2 4. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:365: CreateSampleTables @ 0x264F71F2 5. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:581: operator() @ 0x2651596C 6. /-S/library/cpp/threading/future/core/future-inl.h:493: SetValue @ 0x2651596C 7. /-S/library/cpp/threading/future/async.h:24: operator() @ 0x2651596C 8. /-S/util/thread/pool.h:71: Process @ 0x2651596C 9. /-S/util/thread/pool.cpp:418: DoExecute @ 0x13DE06B9 10. /-S/util/thread/factory.h:15: Execute @ 0x13DDF0A9 11. /-S/util/thread/factory.cpp:36: ThreadProc @ 0x13DDF0A9 12. /-S/util/system/thread.cpp:244: ThreadProxy @ 0x13DDA51C 13. ??:0: ?? @ 0x7F5AF8E06AC2 14. ??:0: ?? @ 0x7F5AF8E9884F >> KqpExplain::UpdateSecondaryConditionalSecondaryKey-UseSink >> BackupPathTest::EncryptedImportWithoutCommonPrefix >> ListObjectsInS3Export::ExportWithWrongEncryptionKey >> BackupRestore::TestAllPrimitiveTypes-JSON [FAIL] >> BackupRestore::TestAllPrimitiveTypes-JSON_DOCUMENT >> KqpQuery::OlapCreateAsSelect_Simple [FAIL] >> KqpQuery::OltpCreateAsSelect_Simple >> KqpQuery::CurrentUtcTimestamp >> KqpQuery::UdfMemoryLimit >> KqpScheme::ChangefeedOnIndexTable >> BackupPathTest::EncryptedImportWithoutCommonPrefix [GOOD] >> KqpTypes::SelectNull |72.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/control/ut/unittest >> ListObjectsInS3Export::ExportWithWrongEncryptionKey [GOOD] >> TSchemeShardSplitBySample::EdgeSelectionWithSameSizeKeys [GOOD] >> TSchemeShardSplitBySample::EdgeSelectionWithDifferentSizeKeys [GOOD] >> BackupRestoreS3::TestAllPrimitiveTypes-STRING [FAIL] >> BackupRestoreS3::TestAllPrimitiveTypes-JSON >> KqpQuery::OltpCreateAsSelect_Simple [FAIL] >> KqpQuery::OltpCreateAsSelect_Disable >> BackupPathTest::ExplicitDuplicatedItems |72.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_split_merge/unittest >> TSchemeShardSplitBySample::EdgeSelectionWithDifferentSizeKeys [GOOD] >> BackupRestore::TestAllPrimitiveTypes-JSON_DOCUMENT [FAIL] >> BackupRestore::TestAllPrimitiveTypes-DYNUMBER >> TTablesWithReboots::LostBorrowAckWithReboots [GOOD] >> ListObjectsInS3Export::PagingParameters ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/scheme/unittest >> KqpScheme::CreateAndAlterTableWithBloomFilterCompat Test command err: Trying to start YDB, gRPC: 64872, MsgBus: 20005 2025-05-29T15:29:28.069423Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509890197574949145:2070];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:29:28.069439Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/00110a/r3tmp/tmpnUHF6b/pdisk_1.dat 2025-05-29T15:29:28.137313Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [1:7509890197574949104:2079] 1748532568069231 != 1748532568069234 2025-05-29T15:29:28.140578Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 64872, node 1 2025-05-29T15:29:28.155696Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:29:28.155712Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-05-29T15:29:28.155715Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-05-29T15:29:28.155761Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-05-29T15:29:28.170501Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:29:28.170526Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:29:28.171611Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:20005 TClient is connected to server localhost:20005 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-29T15:29:28.216669Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:29:28.225810Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:29:28.289734Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:29:28.311136Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:29:28.323231Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:29:28.399983Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890197574950742:2403], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:29:28.400008Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:29:28.459407Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-05-29T15:29:28.466529Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-05-29T15:29:28.476440Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-05-29T15:29:28.531341Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-05-29T15:29:28.539158Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-05-29T15:29:28.553362Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-05-29T15:29:28.567668Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-05-29T15:29:28.585418Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890197574951395:2466], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:29:28.585445Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:29:28.585460Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890197574951400:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:29:28.586341Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715669:3, at schemeshard: 72057594046644480 2025-05-29T15:29:28.594522Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7509890197574951402:2470], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715669 completed, doublechecking } 2025-05-29T15:29:28.674447Z node 1 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [1:7509890197574951453:3396] txid# 281474976715670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-29T15:29:28.806431Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [1:7509890197574951469:2474], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:29:28.806527Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=1&id=MTUxNWJmM2YtYzBmNjc0ZDgtZjhkNDM1N2ItNzYxNmUzNDQ=, ActorId: [1:7509890197574950723:2401], ActorState: ExecuteState, TraceId: 01jweak8g9csmk50vn4d5fpe7v, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: VERIFY failed (2025-05-29T15:29:28.807494Z): assertion failed in non-unittest thread with message: assertion failed at ydb/core/kqp/ut/common/kqp_ut_common.h:375, void NKikimr::NKqp::AssertSuccessResult(const NYdb::TStatus &): (result.IsSuccess())
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 library/cpp/testing/unittest/registar.cpp:36 RaiseError(): requirement UnittestThread failed 0. /-S/util/system/yassert.cpp:83: InternalPanicImpl @ 0x1633E055 1. /-S/util/system/yassert.cpp:55: Panic @ 0x16335056 2. /tmp//-S/library/cpp/testing/unittest/registar.cpp:36: RaiseError @ 0x164D75B6 3. /-S/ydb/core/kqp/ut/common/kqp_ut_common.h:375: AssertSuccessResult @ 0x15DE0A22 4. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:365: CreateSampleTables @ 0x289A0492 5. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:581: operator() @ 0x289C1A1C 6. /-S/library/cpp/threading/future/core/future-inl.h:493: SetValue @ 0x289C1A1C 7. /-S/library/cpp/threading/future/async.h:24: operator() @ 0x289C1A1C 8. /-S/util/thread/pool.h:71: Process @ 0x289C1A1C 9. /-S/util/thread/pool.cpp:418: DoExecute @ 0x163457A9 10. /-S/util/thread/factory.h:15: Execute @ 0x16344199 11. /-S/util/thread/factory.cpp:36: ThreadProc @ 0x16344199 12. /-S/util/system/thread.cpp:244: ThreadProxy @ 0x1633F60C 13. ??:0: ?? @ 0x7F5F95CCCAC2 14. ??:0: ?? @ 0x7F5F95D5E84F Trying to start YDB, gRPC: 18079, MsgBus: 2814 2025-05-29T15:29:33.089914Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509890219582918539:2075];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:29:33.091079Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/00110a/r3tmp/tmpW4u6OP/pdisk_1.dat 2025-05-29T15:29:33.178930Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 18079, node 1 2025-05-29T15:29:33.190833Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:29:33.190859Z node 1 :HIVE WARN: node_info.cpp:25: HIV ... /ast/yql_expr.h:1874: index out of range, code: 1 library/cpp/testing/unittest/registar.cpp:36 RaiseError(): requirement UnittestThread failed 0. /-S/util/system/yassert.cpp:83: InternalPanicImpl @ 0x1633E055 1. /-S/util/system/yassert.cpp:55: Panic @ 0x16335056 2. /tmp//-S/library/cpp/testing/unittest/registar.cpp:36: RaiseError @ 0x164D75B6 3. /-S/ydb/core/kqp/ut/common/kqp_ut_common.h:375: AssertSuccessResult @ 0x15DE0A22 4. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:365: CreateSampleTables @ 0x289A0492 5. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:581: operator() @ 0x289C1A1C 6. /-S/library/cpp/threading/future/core/future-inl.h:493: SetValue @ 0x289C1A1C 7. /-S/library/cpp/threading/future/async.h:24: operator() @ 0x289C1A1C 8. /-S/util/thread/pool.h:71: Process @ 0x289C1A1C 9. /-S/util/thread/pool.cpp:418: DoExecute @ 0x163457A9 10. /-S/util/thread/factory.h:15: Execute @ 0x16344199 11. /-S/util/thread/factory.cpp:36: ThreadProc @ 0x16344199 12. /-S/util/system/thread.cpp:244: ThreadProxy @ 0x1633F60C 13. ??:0: ?? @ 0x7F4A51841AC2 14. ??:0: ?? @ 0x7F4A518D384F Trying to start YDB, gRPC: 2804, MsgBus: 26409 2025-05-29T15:29:54.168900Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509890309166585434:2069];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:29:54.168919Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/00110a/r3tmp/tmpgTbdRE/pdisk_1.dat 2025-05-29T15:29:54.222477Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 2804, node 1 2025-05-29T15:29:54.240182Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:29:54.240195Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-05-29T15:29:54.240197Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-05-29T15:29:54.240243Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:26409 2025-05-29T15:29:54.270009Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:29:54.270044Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:29:54.271107Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:26409 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-29T15:29:54.298388Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:29:54.309087Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:29:54.327560Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:29:54.346695Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:29:54.358685Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:29:54.515057Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890309166587050:2403], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:29:54.515088Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:29:54.568088Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-05-29T15:29:54.579239Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-05-29T15:29:54.590274Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-05-29T15:29:54.602697Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-05-29T15:29:54.616700Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-05-29T15:29:54.632749Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-05-29T15:29:54.643632Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-05-29T15:29:54.664367Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890309166587702:2466], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:29:54.664393Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:29:54.664532Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890309166587707:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:29:54.665535Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715669:3, at schemeshard: 72057594046644480 2025-05-29T15:29:54.671409Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7509890309166587709:2470], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715669 completed, doublechecking } 2025-05-29T15:29:54.732425Z node 1 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [1:7509890309166587760:3397] txid# 281474976715670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } VERIFY failed (2025-05-29T15:29:54.852304Z): assertion failed in non-unittest thread with message: assertion failed at ydb/core/kqp/ut/common/kqp_ut_common.h:375, void NKikimr::NKqp::AssertSuccessResult(const NYdb::TStatus &): (result.IsSuccess())
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 library/cpp/testing/unittest/registar.cpp:36 RaiseError(): requirement UnittestThread failed 2025-05-29T15:29:54.850755Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [1:7509890309166587769:2474], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:29:54.851068Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=1&id=ZGYyYzhjMWUtY2I0ZGUyM2UtOWEyMzY3ZWEtM2MxYzg0NjY=, ActorId: [1:7509890309166587032:2401], ActorState: ExecuteState, TraceId: 01jweam1z724k6h7qgegxgf3pp, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: 0. /-S/util/system/yassert.cpp:83: InternalPanicImpl @ 0x1633E055 1. /-S/util/system/yassert.cpp:55: Panic @ 0x16335056 2. /tmp//-S/library/cpp/testing/unittest/registar.cpp:36: RaiseError @ 0x164D75B6 3. /-S/ydb/core/kqp/ut/common/kqp_ut_common.h:375: AssertSuccessResult @ 0x15DE0A22 4. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:365: CreateSampleTables @ 0x289A0492 5. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:581: operator() @ 0x289C1A1C 6. /-S/library/cpp/threading/future/core/future-inl.h:493: SetValue @ 0x289C1A1C 7. /-S/library/cpp/threading/future/async.h:24: operator() @ 0x289C1A1C 8. /-S/util/thread/pool.h:71: Process @ 0x289C1A1C 9. /-S/util/thread/pool.cpp:418: DoExecute @ 0x163457A9 10. /-S/util/thread/factory.h:15: Execute @ 0x16344199 11. /-S/util/thread/factory.cpp:36: ThreadProc @ 0x16344199 12. /-S/util/system/thread.cpp:244: ThreadProxy @ 0x1633F60C 13. ??:0: ?? @ 0x7F6EE6365AC2 14. ??:0: ?? @ 0x7F6EE63F784F >> KqpExplain::UpdateConditional+UseSink ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/scheme/unittest >> KqpScheme::DisableDropExternalTable Test command err: Trying to start YDB, gRPC: 62549, MsgBus: 29182 2025-05-29T15:29:27.954263Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509890193808370024:2064];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:29:27.954327Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/00110c/r3tmp/tmpNZeBd5/pdisk_1.dat 2025-05-29T15:29:28.023979Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [1:7509890193808369993:2079] 1748532567954099 != 1748532567954102 2025-05-29T15:29:28.024988Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 62549, node 1 2025-05-29T15:29:28.042055Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:29:28.042067Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-05-29T15:29:28.042069Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-05-29T15:29:28.042102Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-05-29T15:29:28.056047Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:29:28.056107Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:29:28.057219Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:29182 TClient is connected to server localhost:29182 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-29T15:29:28.107673Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:29:28.114333Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:29:28.132769Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:29:28.152188Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:29:28.162369Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:29:28.399735Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890198103338923:2403], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:29:28.399772Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:29:28.457481Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-05-29T15:29:28.465534Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-05-29T15:29:28.476288Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-05-29T15:29:28.531120Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-05-29T15:29:28.538954Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-05-29T15:29:28.553829Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-05-29T15:29:28.567854Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-05-29T15:29:28.583570Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890198103339576:2466], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:29:28.583603Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890198103339581:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:29:28.583605Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:29:28.584383Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715669:3, at schemeshard: 72057594046644480 2025-05-29T15:29:28.587109Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7509890198103339583:2470], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715669 completed, doublechecking } 2025-05-29T15:29:28.641174Z node 1 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [1:7509890198103339634:3396] txid# 281474976715670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-29T15:29:28.759494Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [1:7509890198103339650:2474], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:29:28.759612Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=1&id=MTU3ZWEzODItNzBjZmE2NTYtYmYxMjc4NWUtNDUwYTBjZmI=, ActorId: [1:7509890198103338905:2401], ActorState: ExecuteState, TraceId: 01jweak8g7b51873vh7q4g1cnr, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: VERIFY failed (2025-05-29T15:29:28.760516Z): assertion failed in non-unittest thread with message: assertion failed at ydb/core/kqp/ut/common/kqp_ut_common.h:375, void NKikimr::NKqp::AssertSuccessResult(const NYdb::TStatus &): (result.IsSuccess())
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 library/cpp/testing/unittest/registar.cpp:36 RaiseError(): requirement UnittestThread failed 0. /-S/util/system/yassert.cpp:83: InternalPanicImpl @ 0x1633E055 1. /-S/util/system/yassert.cpp:55: Panic @ 0x16335056 2. /tmp//-S/library/cpp/testing/unittest/registar.cpp:36: RaiseError @ 0x164D75B6 3. /-S/ydb/core/kqp/ut/common/kqp_ut_common.h:375: AssertSuccessResult @ 0x15DE0A22 4. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:365: CreateSampleTables @ 0x289A0492 5. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:581: operator() @ 0x289C1A1C 6. /-S/library/cpp/threading/future/core/future-inl.h:493: SetValue @ 0x289C1A1C 7. /-S/library/cpp/threading/future/async.h:24: operator() @ 0x289C1A1C 8. /-S/util/thread/pool.h:71: Process @ 0x289C1A1C 9. /-S/util/thread/pool.cpp:418: DoExecute @ 0x163457A9 10. /-S/util/thread/factory.h:15: Execute @ 0x16344199 11. /-S/util/thread/factory.cpp:36: ThreadProc @ 0x16344199 12. /-S/util/system/thread.cpp:244: ThreadProxy @ 0x1633F60C 13. ??:0: ?? @ 0x7F6D5596EAC2 14. ??:0: ?? @ 0x7F6D55A0084F Trying to start YDB, gRPC: 2590, MsgBus: 11270 2025-05-29T15:29:33.009375Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509890219669548712:2207];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:29:33.009458Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/00110c/r3tmp/tmpj01kzY/pdisk_1.dat 2025-05-29T15:29:33.084214Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 2590, node 1 2025-05-29T15:29:33.111135Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:29:33.111165Z node 1 :HIVE WARN: node_info.cpp:25: HIVE ... als/ast/yql_expr.h:1874: index out of range, code: 1 library/cpp/testing/unittest/registar.cpp:36 RaiseError(): requirement UnittestThread failed 0. /-S/util/system/yassert.cpp:83: InternalPanicImpl @ 0x1633E055 1. /-S/util/system/yassert.cpp:55: Panic @ 0x16335056 2. /tmp//-S/library/cpp/testing/unittest/registar.cpp:36: RaiseError @ 0x164D75B6 3. /-S/ydb/core/kqp/ut/common/kqp_ut_common.h:375: AssertSuccessResult @ 0x15DE0A22 4. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:365: CreateSampleTables @ 0x289A0492 5. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:581: operator() @ 0x289C1A1C 6. /-S/library/cpp/threading/future/core/future-inl.h:493: SetValue @ 0x289C1A1C 7. /-S/library/cpp/threading/future/async.h:24: operator() @ 0x289C1A1C 8. /-S/util/thread/pool.h:71: Process @ 0x289C1A1C 9. /-S/util/thread/pool.cpp:418: DoExecute @ 0x163457A9 10. /-S/util/thread/factory.h:15: Execute @ 0x16344199 11. /-S/util/thread/factory.cpp:36: ThreadProc @ 0x16344199 12. /-S/util/system/thread.cpp:244: ThreadProxy @ 0x1633F60C 13. ??:0: ?? @ 0x7FD614362AC2 14. ??:0: ?? @ 0x7FD6143F484F Trying to start YDB, gRPC: 3014, MsgBus: 4025 2025-05-29T15:29:54.320761Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509890311055325747:2072];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:29:54.320779Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/00110c/r3tmp/tmpe7yIvP/pdisk_1.dat 2025-05-29T15:29:54.385818Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 3014, node 1 2025-05-29T15:29:54.404874Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:29:54.404902Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-05-29T15:29:54.404905Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-05-29T15:29:54.404959Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-05-29T15:29:54.421861Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:29:54.421893Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:29:54.423020Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:4025 TClient is connected to server localhost:4025 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-29T15:29:54.474995Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:29:54.482626Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:29:54.506785Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:29:54.550361Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:29:54.592032Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:29:54.703403Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890311055327335:2403], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:29:54.703455Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:29:54.773595Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-05-29T15:29:54.784075Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-05-29T15:29:54.796819Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-05-29T15:29:54.810428Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-05-29T15:29:54.824925Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-05-29T15:29:54.839264Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-05-29T15:29:54.854014Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-05-29T15:29:54.871546Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890311055327988:2466], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:29:54.871581Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:29:54.871647Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890311055327993:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:29:54.872504Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715669:3, at schemeshard: 72057594046644480 2025-05-29T15:29:54.883962Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7509890311055327995:2470], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715669 completed, doublechecking } 2025-05-29T15:29:54.962039Z node 1 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [1:7509890311055328046:3397] txid# 281474976715670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-29T15:29:55.077026Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [1:7509890311055328062:2474], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:29:55.077179Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=1&id=NzAwOTg0YzYtYTIxZjIxZWQtYmYwNWZhNDktNGY2NGRlNzg=, ActorId: [1:7509890311055327308:2400], ActorState: ExecuteState, TraceId: 01jweam25q4tskt6pq7gedpj5z, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: VERIFY failed (2025-05-29T15:29:55.077893Z): assertion failed in non-unittest thread with message: assertion failed at ydb/core/kqp/ut/common/kqp_ut_common.h:375, void NKikimr::NKqp::AssertSuccessResult(const NYdb::TStatus &): (result.IsSuccess())
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 library/cpp/testing/unittest/registar.cpp:36 RaiseError(): requirement UnittestThread failed 0. /-S/util/system/yassert.cpp:83: InternalPanicImpl @ 0x1633E055 1. /-S/util/system/yassert.cpp:55: Panic @ 0x16335056 2. /tmp//-S/library/cpp/testing/unittest/registar.cpp:36: RaiseError @ 0x164D75B6 3. /-S/ydb/core/kqp/ut/common/kqp_ut_common.h:375: AssertSuccessResult @ 0x15DE0A22 4. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:365: CreateSampleTables @ 0x289A0492 5. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:581: operator() @ 0x289C1A1C 6. /-S/library/cpp/threading/future/core/future-inl.h:493: SetValue @ 0x289C1A1C 7. /-S/library/cpp/threading/future/async.h:24: operator() @ 0x289C1A1C 8. /-S/util/thread/pool.h:71: Process @ 0x289C1A1C 9. /-S/util/thread/pool.cpp:418: DoExecute @ 0x163457A9 10. /-S/util/thread/factory.h:15: Execute @ 0x16344199 11. /-S/util/thread/factory.cpp:36: ThreadProc @ 0x16344199 12. /-S/util/system/thread.cpp:244: ThreadProxy @ 0x1633F60C 13. ??:0: ?? @ 0x7FA1F1E3AAC2 14. ??:0: ?? @ 0x7FA1F1ECC84F >> TSchemeShardSplitBySizeTest::SplitShardsWithDecimalKey >> KqpQuery::OltpCreateAsSelect_Disable [FAIL] >> KqpQuery::OlapCreateAsSelect_Complex >> BackupPathTest::ExplicitDuplicatedItems [GOOD] >> KqpExplain::IdxFullscan >> TSchemeShardSplitByLoad::IndexTableSplitsUpToMainTableCurrentPartitionCount [GOOD] >> TSchemeShardSplitByLoad::IndexTableDoesNotSplitsIfDisabledByMainTable >> KqpLimits::TooBigQuery+useSink >> BackupRestoreS3::TestAllPrimitiveTypes-JSON [FAIL] >> BackupRestoreS3::TestAllPrimitiveTypes-JSON_DOCUMENT >> TSchemeshardBackgroundCleaningTest::SchemeshardBackgroundCleaningTestSimpleCleanIndex [GOOD] >> KqpExplain::Explain >> BackupPathTest::ExportUnexistingExplicitPath >> BackupRestore::TestAllPrimitiveTypes-DYNUMBER [FAIL] >> KqpQuery::CreateAsSelectTypes+NotNull-IsOlap >> KqpQuery::OlapCreateAsSelect_Complex [FAIL] >> KqpQuery::GenericQueryNoRowsLimit ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_background_cleaning/unittest >> TSchemeshardBackgroundCleaningTest::SchemeshardBackgroundCleaningTestSimpleCleanIndex [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:227:2060] recipient: [1:221:2142] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:227:2060] recipient: [1:221:2142] Leader for TabletID 72057594046678944 is [1:238:2153] sender: [1:239:2060] recipient: [1:221:2142] 2025-05-29T15:28:14.128782Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7488: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-05-29T15:28:14.128802Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7516: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:28:14.128807Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7402: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-05-29T15:28:14.128811Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7418: OperationsProcessing config: using default configuration 2025-05-29T15:28:14.128820Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-05-29T15:28:14.128823Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-05-29T15:28:14.128830Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7548: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:28:14.128841Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:39: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-05-29T15:28:14.128933Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7619: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-05-29T15:28:14.129009Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-05-29T15:28:14.138770Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7309: Cannot subscribe to console configs 2025-05-29T15:28:14.138790Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:28:14.141406Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-05-29T15:28:14.141585Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-05-29T15:28:14.141611Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-05-29T15:28:14.143092Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-05-29T15:28:14.143142Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:445: Clear TempDirsState with owners number: 0 2025-05-29T15:28:14.143259Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1348: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-05-29T15:28:14.143336Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:32: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-05-29T15:28:14.144152Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:157: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:28:14.144199Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:84: [RootDataErasureManager] Stop 2025-05-29T15:28:14.144432Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:28:14.144441Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:28:14.144468Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-05-29T15:28:14.144474Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-29T15:28:14.144479Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:202: TTxServerlessStorageBilling.Complete 2025-05-29T15:28:14.144496Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6671: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-05-29T15:28:14.145700Z node 1 :HIVE INFO: tablet_helpers.cpp:1130: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:238:2153] sender: [1:351:2060] recipient: [1:17:2064] 2025-05-29T15:28:14.161014Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:372: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-05-29T15:28:14.161092Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:28:14.161145Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-05-29T15:28:14.161177Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:130: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-05-29T15:28:14.161201Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:28:14.161908Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:451: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-05-29T15:28:14.161927Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-05-29T15:28:14.161978Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:28:14.162000Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:313: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-05-29T15:28:14.162004Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:367: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-05-29T15:28:14.162008Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 2 -> 3 2025-05-29T15:28:14.162295Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:28:14.162301Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-05-29T15:28:14.162305Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 3 -> 128 2025-05-29T15:28:14.162534Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:28:14.162543Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:28:14.162548Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:28:14.162553Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1650: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-05-29T15:28:14.163174Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1719: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-05-29T15:28:14.163567Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:652: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-05-29T15:28:14.163601Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1751: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-05-29T15:28:14.163814Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:676: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-05-29T15:28:14.163834Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:680: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 245 RawX2: 4294969453 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-05-29T15:28:14.163840Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:28:14.163888Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 128 -> 240 2025-05-29T15:28:14.163905Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:28:14.163930Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-05-29T15:28:14.163938Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:402: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-05-29T15:28:14.164240Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:28:14.164245Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-29T15:28:14.164279Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29 ... chemeshard_impl.cpp:4885: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-05-29T15:29:56.550498Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4877: StateWork, received event# 271124999, Sender [7:238:2153], Recipient [7:238:2153]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2025-05-29T15:29:56.550504Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4884: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2025-05-29T15:29:56.887361Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4877: StateWork, received event# 271125000, Sender [0:0:0], Recipient [7:238:2153]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-05-29T15:29:56.887405Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4885: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-05-29T15:29:56.887427Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4877: StateWork, received event# 271124999, Sender [7:238:2153], Recipient [7:238:2153]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2025-05-29T15:29:56.887432Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4884: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2025-05-29T15:29:57.256107Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4877: StateWork, received event# 271125000, Sender [0:0:0], Recipient [7:238:2153]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-05-29T15:29:57.256150Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4885: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-05-29T15:29:57.256171Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4877: StateWork, received event# 271124999, Sender [7:238:2153], Recipient [7:238:2153]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2025-05-29T15:29:57.256177Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4884: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2025-05-29T15:29:57.592789Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4877: StateWork, received event# 271125000, Sender [0:0:0], Recipient [7:238:2153]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-05-29T15:29:57.592820Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4885: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-05-29T15:29:57.592836Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4877: StateWork, received event# 271124999, Sender [7:238:2153], Recipient [7:238:2153]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2025-05-29T15:29:57.592840Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4884: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2025-05-29T15:29:57.929420Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4877: StateWork, received event# 271125000, Sender [0:0:0], Recipient [7:238:2153]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-05-29T15:29:57.929448Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4885: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-05-29T15:29:57.929464Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4877: StateWork, received event# 271124999, Sender [7:238:2153], Recipient [7:238:2153]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2025-05-29T15:29:57.929468Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4884: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2025-05-29T15:29:58.286267Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4877: StateWork, received event# 271125000, Sender [0:0:0], Recipient [7:238:2153]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-05-29T15:29:58.286295Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4885: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-05-29T15:29:58.286312Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4877: StateWork, received event# 271124999, Sender [7:238:2153], Recipient [7:238:2153]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2025-05-29T15:29:58.286315Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4884: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2025-05-29T15:29:58.622596Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4877: StateWork, received event# 271125000, Sender [0:0:0], Recipient [7:238:2153]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-05-29T15:29:58.622639Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4885: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-05-29T15:29:58.622661Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4877: StateWork, received event# 271124999, Sender [7:238:2153], Recipient [7:238:2153]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2025-05-29T15:29:58.622667Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4884: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2025-05-29T15:29:58.990825Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4877: StateWork, received event# 271125000, Sender [0:0:0], Recipient [7:238:2153]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-05-29T15:29:58.990861Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4885: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-05-29T15:29:58.990882Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4877: StateWork, received event# 271124999, Sender [7:238:2153], Recipient [7:238:2153]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2025-05-29T15:29:58.990887Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4884: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2025-05-29T15:29:59.033096Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4877: StateWork, received event# 271122945, Sender [7:1084:2836], Recipient [7:238:2153]: NKikimrSchemeOp.TDescribePath Path: "/MyRoot/tmp/TempTable" Options { ReturnPartitioningInfo: true ReturnPartitionConfig: true BackupInfo: false ReturnBoundaries: true ShowPrivateTable: true } 2025-05-29T15:29:59.033133Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4889: StateWork, processing event TEvSchemeShard::TEvDescribeScheme 2025-05-29T15:29:59.033177Z node 7 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/tmp/TempTable" Options { ReturnPartitioningInfo: true ReturnPartitionConfig: true BackupInfo: false ReturnBoundaries: true ShowPrivateTable: true }, at schemeshard: 72057594046678944 2025-05-29T15:29:59.033288Z node 7 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/tmp/TempTable" took 88us result status StatusPathDoesNotExist 2025-05-29T15:29:59.033398Z node 7 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/tmp/TempTable\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1]), source_location: ydb/core/tx/schemeshard/schemeshard_path_describer.cpp:1157" Path: "/MyRoot/tmp/TempTable" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: false } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 2025-05-29T15:29:59.033542Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4877: StateWork, received event# 271122945, Sender [7:1085:2837], Recipient [7:238:2153]: NKikimrSchemeOp.TDescribePath Path: "/MyRoot/tmp" Options { ReturnPartitioningInfo: true ReturnPartitionConfig: true BackupInfo: false ReturnBoundaries: true ShowPrivateTable: true } 2025-05-29T15:29:59.033551Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4889: StateWork, processing event TEvSchemeShard::TEvDescribeScheme 2025-05-29T15:29:59.033565Z node 7 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/tmp" Options { ReturnPartitioningInfo: true ReturnPartitionConfig: true BackupInfo: false ReturnBoundaries: true ShowPrivateTable: true }, at schemeshard: 72057594046678944 2025-05-29T15:29:59.033593Z node 7 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/tmp" took 26us result status StatusPathDoesNotExist 2025-05-29T15:29:59.033616Z node 7 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/tmp\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1]), source_location: ydb/core/tx/schemeshard/schemeshard_path_describer.cpp:1157" Path: "/MyRoot/tmp" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: false } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 2025-05-29T15:29:59.033684Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4877: StateWork, received event# 271122945, Sender [7:1086:2838], Recipient [7:238:2153]: NKikimrSchemeOp.TDescribePath Path: "/MyRoot/tmp/TempTable/ValueIndex" Options { ShowPrivateTable: true } 2025-05-29T15:29:59.033694Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4889: StateWork, processing event TEvSchemeShard::TEvDescribeScheme 2025-05-29T15:29:59.033704Z node 7 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/tmp/TempTable/ValueIndex" Options { ShowPrivateTable: true }, at schemeshard: 72057594046678944 2025-05-29T15:29:59.033722Z node 7 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/tmp/TempTable/ValueIndex" took 18us result status StatusPathDoesNotExist 2025-05-29T15:29:59.033742Z node 7 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/tmp/TempTable/ValueIndex\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1]), source_location: ydb/core/tx/schemeshard/schemeshard_path_describer.cpp:1157" Path: "/MyRoot/tmp/TempTable/ValueIndex" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: false } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 >> KqpLimits::CancelAfterRoTxWithFollowerStreamLookup >> BackupPathTest::ExportUnexistingExplicitPath [GOOD] >> KqpTypes::Time64Columns+EnableTableDatetime64 >> BackupRestoreS3::TestAllPrimitiveTypes-JSON_DOCUMENT [FAIL] >> BackupRestoreS3::TestAllPrimitiveTypes-DYNUMBER >> KqpOlapScheme::DropTableAfterInsert [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_base_reboots/unittest >> TTablesWithReboots::LostBorrowAckWithReboots [GOOD] Test command err: ==== RunWithTabletReboots =========== RUN: Trace =========== Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2140] Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:116:2058] recipient: [1:110:2141] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:116:2058] recipient: [1:110:2141] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:117:2058] recipient: [1:111:2142] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:117:2058] recipient: [1:111:2142] Leader for TabletID 72057594046678944 is [1:126:2151] sender: [1:127:2058] recipient: [1:109:2140] Leader for TabletID 72057594046447617 is [1:130:2154] sender: [1:132:2058] recipient: [1:110:2141] Leader for TabletID 72057594046316545 is [1:133:2155] sender: [1:135:2058] recipient: [1:111:2142] 2025-05-29T15:26:51.195249Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7488: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-05-29T15:26:51.195275Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7516: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:26:51.195281Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7402: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2025-05-29T15:26:51.195286Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7418: OperationsProcessing config: using default configuration 2025-05-29T15:26:51.195292Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-05-29T15:26:51.195295Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-05-29T15:26:51.195304Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7548: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:26:51.195316Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:39: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-05-29T15:26:51.195406Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7619: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-05-29T15:26:51.195471Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-05-29T15:26:51.209411Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7651: Got new config: QueryServiceConfig { AllExternalDataSourcesAreAvailable: true } 2025-05-29T15:26:51.209429Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:26:51.209499Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7619: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# Leader for TabletID 72057594046447617 is [1:130:2154] sender: [1:175:2058] recipient: [1:15:2062] 2025-05-29T15:26:51.214028Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-05-29T15:26:51.214061Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-05-29T15:26:51.214095Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-05-29T15:26:51.221683Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-05-29T15:26:51.221774Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:445: Clear TempDirsState with owners number: 0 2025-05-29T15:26:51.221939Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1348: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-05-29T15:26:51.222172Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:32: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-05-29T15:26:51.223003Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:157: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:26:51.223034Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:84: [RootDataErasureManager] Stop 2025-05-29T15:26:51.223250Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:26:51.223260Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:26:51.223290Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-05-29T15:26:51.223296Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-29T15:26:51.223303Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:202: TTxServerlessStorageBilling.Complete 2025-05-29T15:26:51.223321Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6671: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:214:2058] recipient: [1:212:2212] IGNORE Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:214:2058] recipient: [1:212:2212] Leader for TabletID 72057594037968897 is [1:218:2216] sender: [1:219:2058] recipient: [1:212:2212] 2025-05-29T15:26:51.224743Z node 1 :HIVE INFO: tablet_helpers.cpp:1130: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:126:2151] sender: [1:239:2058] recipient: [1:15:2062] 2025-05-29T15:26:51.247073Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:372: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-05-29T15:26:51.247124Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:26:51.247173Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-05-29T15:26:51.247246Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:130: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-05-29T15:26:51.247259Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:26:51.249675Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:451: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-05-29T15:26:51.249702Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-05-29T15:26:51.249749Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:26:51.249759Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:313: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-05-29T15:26:51.249765Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:367: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-05-29T15:26:51.249770Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 2 -> 3 2025-05-29T15:26:51.250445Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:26:51.250459Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-05-29T15:26:51.250464Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 3 -> 128 2025-05-29T15:26:51.251118Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:26:51.251131Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:26:51.251136Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:26:51.251141Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1650: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-05-29T15:26:51.251793Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1719: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-05-29T15:26:51.252251Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:652: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-05-29T15:26:51.252281Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1751: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 Leader for TabletID 72057594046316545 is [1:133:2155] sender: [1:254:2058] recipient: [1:15:2062] FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-05-29T15:26:51.252446Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:676: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-05-29T15:26:51.252471Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:680: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 133 RawX2: 4294969451 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-05-29T15:26:51.252478Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:26:51.252533Z node 1 :FLAT_TX_SCHEMESHARD INFO: sch ... d__operation_side_effects.cpp:906: Part operation is done id#1005:0 progress is 1/1 2025-05-29T15:29:57.315603Z node 120 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 1005 ready parts: 1/1 2025-05-29T15:29:57.315609Z node 120 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:906: Part operation is done id#1005:0 progress is 1/1 2025-05-29T15:29:57.315612Z node 120 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 1005 ready parts: 1/1 2025-05-29T15:29:57.315617Z node 120 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1606: TOperation IsReadyToNotify, TxId: 1005, ready parts: 1/1, is published: true 2025-05-29T15:29:57.315622Z node 120 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 1005 ready parts: 1/1 2025-05-29T15:29:57.315627Z node 120 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:973: Operation and all the parts is done, operation id: 1005:0 2025-05-29T15:29:57.315632Z node 120 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5174: RemoveTx for txid 1005:0 2025-05-29T15:29:57.315654Z node 120 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 2 2025-05-29T15:29:57.315753Z node 120 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1005 2025-05-29T15:29:57.315770Z node 120 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1005 2025-05-29T15:29:57.316077Z node 120 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:46: Free shard 72057594046678944:1 hive 72057594037968897 at ss 72057594046678944 2025-05-29T15:29:57.316266Z node 120 :HIVE INFO: tablet_helpers.cpp:1356: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 1 TxId_Deprecated: 1 TabletID: 72075186233409546 2025-05-29T15:29:57.316506Z node 120 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5938: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 1 ShardOwnerId: 72057594046678944 ShardLocalIdx: 1, at schemeshard: 72057594046678944 2025-05-29T15:29:57.316562Z node 120 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 1 Forgetting tablet 72075186233409546 2025-05-29T15:29:57.317225Z node 120 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:123: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-05-29T15:29:57.317235Z node 120 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:137: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 3], at schemeshard: 72057594046678944 2025-05-29T15:29:57.317250Z node 120 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 Leader for TabletID 72075186233409546 is [120:332:2318] sender: [120:671:2058] recipient: [120:15:2062] 2025-05-29T15:29:57.317953Z node 120 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:174: Deleted shardIdx 72057594046678944:1 2025-05-29T15:29:57.317970Z node 120 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:180: Close pipe to deleted shardIdx 72057594046678944:1 tabletId 72075186233409546 2025-05-29T15:29:57.318383Z node 120 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:160: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestModificationResult got TxId: 1005, wait until txId: 1005 TestWaitNotification wait txId: 1005 2025-05-29T15:29:57.318447Z node 120 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:210: tests -- TTxNotificationSubscriber for txId 1005: send EvNotifyTxCompletion 2025-05-29T15:29:57.318453Z node 120 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:256: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 1005 2025-05-29T15:29:57.318504Z node 120 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 1005, at schemeshard: 72057594046678944 2025-05-29T15:29:57.318520Z node 120 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:227: tests -- TTxNotificationSubscriber for txId 1005: got EvNotifyTxCompletionResult 2025-05-29T15:29:57.318523Z node 120 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:236: tests -- TTxNotificationSubscriber for txId 1005: satisfy waiter [120:675:2634] TestWaitNotification: OK eventTxId 1005 wait until 72075186233409546 is deleted wait until 72075186233409547 is deleted 2025-05-29T15:29:57.318569Z node 120 :HIVE INFO: tablet_helpers.cpp:1476: [72057594037968897] TEvSubscribeToTabletDeletion, 72075186233409546 2025-05-29T15:29:57.318579Z node 120 :HIVE INFO: tablet_helpers.cpp:1476: [72057594037968897] TEvSubscribeToTabletDeletion, 72075186233409547 Deleted tabletId 72075186233409546 Leader for TabletID 72075186233409546 is [120:332:2318] sender: [120:681:2058] recipient: [120:15:2062] Leader for TabletID 72075186233409546 is [120:332:2318] sender: [120:682:2058] recipient: [120:15:2062] Leader for TabletID 72075186233409546 is [120:332:2318] sender: [120:684:2058] recipient: [120:15:2062] Leader for TabletID 72075186233409546 is [120:332:2318] sender: [120:685:2058] recipient: [120:15:2062] Leader for TabletID 72075186233409546 is [120:332:2318] sender: [120:687:2058] recipient: [120:15:2062] Leader for TabletID 72075186233409546 is [120:332:2318] sender: [120:688:2058] recipient: [120:15:2062] Leader for TabletID 72075186233409546 is [120:332:2318] sender: [120:691:2058] recipient: [120:15:2062] Leader for TabletID 72075186233409546 is [120:332:2318] sender: [120:692:2058] recipient: [120:15:2062] Leader for TabletID 72075186233409546 is [120:332:2318] sender: [120:696:2058] recipient: [120:15:2062] Leader for TabletID 72075186233409546 is [120:332:2318] sender: [120:697:2058] recipient: [120:15:2062] Leader for TabletID 72075186233409546 is [120:332:2318] sender: [120:703:2058] recipient: [120:15:2062] Leader for TabletID 72075186233409546 is [120:332:2318] sender: [120:704:2058] recipient: [120:15:2062] Leader for TabletID 72075186233409546 is [120:332:2318] sender: [120:705:2058] recipient: [120:15:2062] Leader for TabletID 72075186233409546 is [120:332:2318] sender: [120:706:2058] recipient: [120:15:2062] Leader for TabletID 72075186233409546 is [120:332:2318] sender: [120:709:2058] recipient: [120:15:2062] Leader for TabletID 72075186233409546 is [120:332:2318] sender: [120:710:2058] recipient: [120:15:2062] Leader for TabletID 72075186233409546 is [120:332:2318] sender: [120:711:2058] recipient: [120:15:2062] Leader for TabletID 72075186233409546 is [120:332:2318] sender: [120:712:2058] recipient: [120:15:2062] Leader for TabletID 72075186233409546 is [120:332:2318] sender: [120:714:2058] recipient: [120:15:2062] Leader for TabletID 72075186233409546 is [120:332:2318] sender: [120:715:2058] recipient: [120:15:2062] Leader for TabletID 72075186233409546 is [120:332:2318] sender: [120:716:2058] recipient: [120:15:2062] Leader for TabletID 72075186233409546 is [120:332:2318] sender: [120:717:2058] recipient: [120:15:2062] Leader for TabletID 72075186233409546 is [120:332:2318] sender: [120:720:2058] recipient: [120:15:2062] Leader for TabletID 72075186233409546 is [120:332:2318] sender: [120:721:2058] recipient: [120:15:2062] Leader for TabletID 72075186233409546 is [120:332:2318] sender: [120:722:2058] recipient: [120:15:2062] Leader for TabletID 72075186233409546 is [120:332:2318] sender: [120:723:2058] recipient: [120:15:2062] Leader for TabletID 72075186233409546 is [120:332:2318] sender: [120:725:2058] recipient: [120:15:2062] Leader for TabletID 72075186233409546 is [120:332:2318] sender: [120:726:2058] recipient: [120:15:2062] Leader for TabletID 72075186233409546 is [120:332:2318] sender: [120:727:2058] recipient: [120:15:2062] Leader for TabletID 72075186233409546 is [120:332:2318] sender: [120:728:2058] recipient: [120:15:2062] Leader for TabletID 72075186233409546 is [120:332:2318] sender: [120:730:2058] recipient: [120:15:2062] Leader for TabletID 72075186233409546 is [120:332:2318] sender: [120:731:2058] recipient: [120:15:2062] Leader for TabletID 72075186233409546 is [120:332:2318] sender: [120:732:2058] recipient: [120:15:2062] Leader for TabletID 72075186233409546 is [120:332:2318] sender: [120:733:2058] recipient: [120:15:2062] Leader for TabletID 72075186233409546 is [120:332:2318] sender: [120:735:2058] recipient: [120:15:2062] Leader for TabletID 72075186233409546 is [120:332:2318] sender: [120:736:2058] recipient: [120:15:2062] Leader for TabletID 72075186233409546 is [120:332:2318] sender: [120:737:2058] recipient: [120:15:2062] Leader for TabletID 72075186233409546 is [120:332:2318] sender: [120:738:2058] recipient: [120:15:2062] 2025-05-29T15:29:58.481649Z node 120 :HIVE INFO: tablet_helpers.cpp:1404: [72057594037968897] TEvRequestHiveInfo, msg: TabletID: 72075186233409546 ReturnFollowers: false 2025-05-29T15:29:58.483587Z node 120 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5549: Handle TEvStateChanged, at schemeshard: 72057594046678944, message: Source { RawX1: 441 RawX2: 515396077930 } TabletId: 72075186233409547 State: 4 2025-05-29T15:29:58.483623Z node 120 :FLAT_TX_SCHEMESHARD INFO: schemeshard__state_changed_reply.cpp:78: TTxShardStateChanged DoExecute, datashard informs about state changing, datashardId: 72075186233409547, state: Offline, at schemeshard: 72057594046678944 2025-05-29T15:29:58.484118Z node 120 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:46: Free shard 72057594046678944:2 hive 72057594037968897 at ss 72057594046678944 2025-05-29T15:29:58.484201Z node 120 :HIVE INFO: tablet_helpers.cpp:1356: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 2 TxId_Deprecated: 2 TabletID: 72075186233409547 2025-05-29T15:29:58.484789Z node 120 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5938: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 2 ShardOwnerId: 72057594046678944 ShardLocalIdx: 2, at schemeshard: 72057594046678944 2025-05-29T15:29:58.484847Z node 120 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 1 Forgetting tablet 72075186233409547 2025-05-29T15:29:58.485029Z node 120 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:123: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-05-29T15:29:58.485034Z node 120 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:137: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 4], at schemeshard: 72057594046678944 2025-05-29T15:29:58.485046Z node 120 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-05-29T15:29:58.485868Z node 120 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:174: Deleted shardIdx 72057594046678944:2 2025-05-29T15:29:58.485880Z node 120 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:180: Close pipe to deleted shardIdx 72057594046678944:2 tabletId 72075186233409547 2025-05-29T15:29:58.485982Z node 120 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:160: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 Deleted tabletId 72075186233409547 >> BackupPathTest::ExportUnexistingCommonSourcePath >> KqpParams::ImplicitParameterTypes >> KqpExplain::UpdateOn-UseSink ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/scheme/unittest >> KqpOlapScheme::DropTableAfterInsert [GOOD] Test command err: Trying to start YDB, gRPC: 21101, MsgBus: 26037 2025-05-29T15:29:30.565949Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509890204230929542:2209];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:29:30.566028Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/0010e4/r3tmp/tmplzH86l/pdisk_1.dat 2025-05-29T15:29:30.654193Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 21101, node 1 2025-05-29T15:29:30.665602Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:29:30.665633Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:29:30.666652Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-29T15:29:30.674162Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:29:30.674174Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-05-29T15:29:30.674176Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-05-29T15:29:30.674213Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:26037 TClient is connected to server localhost:26037 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-29T15:29:30.766619Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:29:30.769472Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 CREATE TABLE `/Root/ColumnTableTest` (id Int32 NOT NULL, id_second Int32 NOT NULL, level Int32, created_at Timestamp NOT NULL, PRIMARY KEY (id, id_second)) PARTITION BY HASH(id) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 16); 2025-05-29T15:29:30.963030Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890204230930016:2331], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:29:30.963072Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:29:31.005501Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-05-29T15:29:31.038048Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037893;self_id=[1:7509890208525897529:2343];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-05-29T15:29:31.038175Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037893;self_id=[1:7509890208525897529:2343];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-05-29T15:29:31.038240Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037893;self_id=[1:7509890208525897529:2343];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-05-29T15:29:31.038270Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037893;self_id=[1:7509890208525897529:2343];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-05-29T15:29:31.038308Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037893;self_id=[1:7509890208525897529:2343];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-05-29T15:29:31.038345Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037893;self_id=[1:7509890208525897529:2343];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-05-29T15:29:31.038366Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037893;self_id=[1:7509890208525897529:2343];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-05-29T15:29:31.038373Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037891;self_id=[1:7509890208525897524:2341];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-05-29T15:29:31.038388Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037893;self_id=[1:7509890208525897529:2343];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-05-29T15:29:31.038392Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037891;self_id=[1:7509890208525897524:2341];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-05-29T15:29:31.038431Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037893;self_id=[1:7509890208525897529:2343];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-05-29T15:29:31.038453Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037893;self_id=[1:7509890208525897529:2343];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-05-29T15:29:31.038455Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037891;self_id=[1:7509890208525897524:2341];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-05-29T15:29:31.038480Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037893;self_id=[1:7509890208525897529:2343];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2025-05-29T15:29:31.038483Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037891;self_id=[1:7509890208525897524:2341];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-05-29T15:29:31.038504Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037893;self_id=[1:7509890208525897529:2343];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2025-05-29T15:29:31.038512Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037891;self_id=[1:7509890208525897524:2341];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-05-29T15:29:31.038538Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037891;self_id=[1:7509890208525897524:2341];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-05-29T15:29:31.038571Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037891;self_id=[1:7509890208525897524:2341];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-05-29T15:29:31.038592Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037891;self_id=[1:7509890208525897524:2341];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-05-29T15:29:31.038623Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037891;self_id=[1:7509890208525897524:2341];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-05-29T15:29:31.038648Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037891;self_id=[1:7509890208525897524:2341];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-05-29T15:29:31.038673Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037891;self_id=[1:7509890208525897524:2341];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2025-05-29T15:29:31.038697Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037891;self_id=[1:7509890208525897524:2341];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2025-05-29T15:29:31.039063Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037893;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-05-29T15:29:31.039085Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037893;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-05-29T15:29:31.039104Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037893;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-05-29T15:29:31.039115Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tab ... 5-29T15:29:55.316555Z node 6 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7509890313916148825:2331], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:29:55.316604Z node 6 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:29:55.319795Z node 6 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-05-29T15:29:55.327585Z node 6 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;self_id=[6:7509890313916148871:2335];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-05-29T15:29:55.327613Z node 6 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;self_id=[6:7509890313916148871:2335];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-05-29T15:29:55.327640Z node 6 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;self_id=[6:7509890313916148871:2335];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-05-29T15:29:55.327654Z node 6 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;self_id=[6:7509890313916148871:2335];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-05-29T15:29:55.327666Z node 6 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;self_id=[6:7509890313916148871:2335];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-05-29T15:29:55.327684Z node 6 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;self_id=[6:7509890313916148871:2335];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-05-29T15:29:55.327705Z node 6 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;self_id=[6:7509890313916148871:2335];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-05-29T15:29:55.327723Z node 6 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;self_id=[6:7509890313916148871:2335];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-05-29T15:29:55.327742Z node 6 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;self_id=[6:7509890313916148871:2335];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-05-29T15:29:55.327757Z node 6 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;self_id=[6:7509890313916148871:2335];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-05-29T15:29:55.327774Z node 6 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;self_id=[6:7509890313916148871:2335];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2025-05-29T15:29:55.327788Z node 6 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;self_id=[6:7509890313916148871:2335];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2025-05-29T15:29:55.328180Z node 6 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-05-29T15:29:55.328191Z node 6 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-05-29T15:29:55.328200Z node 6 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-05-29T15:29:55.328209Z node 6 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-05-29T15:29:55.328221Z node 6 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-05-29T15:29:55.328227Z node 6 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-05-29T15:29:55.328234Z node 6 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-05-29T15:29:55.328237Z node 6 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-05-29T15:29:55.328243Z node 6 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-05-29T15:29:55.328246Z node 6 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-05-29T15:29:55.328251Z node 6 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-05-29T15:29:55.328260Z node 6 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-05-29T15:29:55.328276Z node 6 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-05-29T15:29:55.328280Z node 6 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-05-29T15:29:55.328299Z node 6 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-05-29T15:29:55.328308Z node 6 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-05-29T15:29:55.328318Z node 6 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-05-29T15:29:55.328326Z node 6 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanDeprecatedSnapshot;id=CleanDeprecatedSnapshot; 2025-05-29T15:29:55.328332Z node 6 :TX_COLUMNSHARD CRIT: log.cpp:784: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=clean_deprecated_snapshot.cpp:30;tasks_for_rewrite=0; 2025-05-29T15:29:55.328338Z node 6 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanDeprecatedSnapshot;id=17; 2025-05-29T15:29:55.328346Z node 6 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV0ChunksMeta;id=RestoreV0ChunksMeta; 2025-05-29T15:29:55.328423Z node 6 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV0ChunksMeta;id=18; 2025-05-29T15:29:55.328433Z node 6 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=1392;columns=2; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=1392;columns=2; 2025-05-29T15:29:55.375031Z node 6 :TX_COLUMNSHARD_TX WARN: log.cpp:784: tablet_id=72075186224037888;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715658;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715658; 2025-05-29T15:29:55.388504Z node 6 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7509890313916148951:2348], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:29:55.388526Z node 6 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:29:55.391777Z node 6 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpDropTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 2025-05-29T15:29:55.394243Z node 6 :TX_COLUMNSHARD_TX WARN: log.cpp:784: tablet_id=72075186224037888;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715659;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715659; WAIT_INDEXATION: 0 2025-05-29T15:29:55.396810Z node 6 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;self_id=[6:7509890313916148871:2335];ev=NKikimr::TEvTablet::TEvTabletDead;fline=columnshard_impl.cpp:1152;event=tablet_die; 2025-05-29T15:29:55.396983Z node 6 :HIVE WARN: hive_impl.cpp:491: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 6, TabletId: 72075186224037888 not found WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 2025-05-29T15:29:59.902699Z node 6 :METADATA_PROVIDER ERROR: log.cpp:784: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[6:7509890309621180910:2071];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:29:59.902736Z node 6 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; >> KqpQuery::ReadOverloaded+StreamLookup >> KqpStats::OneShardNonLocalExec+UseSink ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/query/unittest >> KqpLimits::ReplySizeExceeded Test command err: Trying to start YDB, gRPC: 2701, MsgBus: 29986 2025-05-29T15:29:45.114647Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509890272733856847:2214];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/0013cc/r3tmp/tmpPOk2mv/pdisk_1.dat 2025-05-29T15:29:45.153231Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-05-29T15:29:45.242102Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [1:7509890272733856653:2079] 1748532585109038 != 1748532585109041 2025-05-29T15:29:45.245538Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:29:45.255826Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:29:45.255858Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:29:45.256972Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 2701, node 1 2025-05-29T15:29:45.270199Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:29:45.270213Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-05-29T15:29:45.270216Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-05-29T15:29:45.270268Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:29986 TClient is connected to server localhost:29986 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-29T15:29:45.397475Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:29:45.410448Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-05-29T15:29:45.419972Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:29:48.891474Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890285618760090:2438], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:29:48.891523Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:29:48.891744Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890285618760102:2441], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:29:48.893021Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480 2025-05-29T15:29:48.897365Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976715659, at schemeshard: 72057594046644480 2025-05-29T15:29:48.897471Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7509890285618760104:2442], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2025-05-29T15:29:48.976847Z node 1 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [1:7509890285618760155:2944] txid# 281474976715660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-29T15:29:49.023049Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 2025-05-29T15:29:49.079873Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [1:7509890289913727810:2479], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:29:49.079997Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=1&id=MTk4NmNhZDUtOTIwOGQ3Ny0zMjcxZDdkYS0zMTRlMmNiMg==, ActorId: [1:7509890289913727803:2475], ActorState: ExecuteState, TraceId: 01jweakwfh6yavve1zzmfa5cm8, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:29:50.113920Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7509890272733856847:2214];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:29:50.113961Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; assertion failed at ydb/core/kqp/ut/query/kqp_limits_ut.cpp:142, virtual void NKikimr::NKqp::NTestSuiteKqpLimits::TTestCaseStreamWrite::Execute_(NUnitTest::TTestContext &) [Allowed = true]: (result.GetStatus() == EStatus::SUCCESS) failed: (INTERNAL_ERROR != SUCCESS)
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 , with diff: (INT|SUCC)E(RNAL_ERROR|SS) 0. /-S/util/system/backtrace.cpp:284: ?? @ 0x13DBA79B 1. /tmp//-S/library/cpp/testing/unittest/registar.cpp:46: RaiseError @ 0x13F70AD8 2. /tmp//-S/ydb/core/kqp/ut/query/kqp_limits_ut.cpp:142: Execute_ @ 0x13AF8E1E 3. /tmp//-S/ydb/core/kqp/ut/query/kqp_limits_ut.cpp:56: operator() @ 0x13AF4736 4. /tmp//-S/library/cpp/testing/unittest/registar.cpp:373: Run @ 0x13F7298D 5. /tmp//-S/ydb/core/kqp/ut/query/kqp_limits_ut.cpp:56: Execute @ 0x13AF40F7 6. /tmp//-S/library/cpp/testing/unittest/registar.cpp:494: Execute @ 0x13F73102 7. /tmp//-S/library/cpp/testing/unittest/utmain.cpp:872: RunMain @ 0x13F84CAC 8. ??:0: ?? @ 0x7EFE89F5ED8F 9. ??:0: ?? @ 0x7EFE89F5EE3F 10. ??:0: ?? @ 0x12A6F028 Trying to start YDB, gRPC: 25283, MsgBus: 12317 2025-05-29T15:29:51.500939Z node 2 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7509890295887423179:2061];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:29:51.500954Z node 2 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/0013cc/r3tmp/tmpUI6Izd/pdisk_1.dat 2025-05-29T15:29:51.512095Z node 2 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:29:51.512324Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [2:7509890295887423160:2079] 1748532591500861 != 1748532591500864 TServer::EnableGrpc on GrpcPort 25283, node 2 2025-05-29T15:29:51.523059Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:29:51.523072Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-05-29T15:29:51.523074Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-05-29T15:29:51.523131Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:12317 TClient is connected to server localhost:12317 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-29T15:29:51.605219Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:29:51.605249Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:29:51.605624Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:29:51.60624 ... x13DD8F65 1. /-S/util/system/yassert.cpp:55: Panic @ 0x13DCFF66 2. /tmp//-S/library/cpp/testing/unittest/registar.cpp:36: RaiseError @ 0x13F70BC6 3. /-S/ydb/core/kqp/ut/common/kqp_ut_common.h:375: AssertSuccessResult @ 0x13A3FAE2 4. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:365: CreateSampleTables @ 0x264F71F2 5. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:581: operator() @ 0x2651596C 6. /-S/library/cpp/threading/future/core/future-inl.h:493: SetValue @ 0x2651596C 7. /-S/library/cpp/threading/future/async.h:24: operator() @ 0x2651596C 8. /-S/util/thread/pool.h:71: Process @ 0x2651596C 9. /-S/util/thread/pool.cpp:418: DoExecute @ 0x13DE06B9 10. /-S/util/thread/factory.h:15: Execute @ 0x13DDF0A9 11. /-S/util/thread/factory.cpp:36: ThreadProc @ 0x13DDF0A9 12. /-S/util/system/thread.cpp:244: ThreadProxy @ 0x13DDA51C 13. ??:0: ?? @ 0x7EFE89FC9AC2 14. ??:0: ?? @ 0x7EFE8A05B84F Trying to start YDB, gRPC: 6258, MsgBus: 20104 2025-05-29T15:29:57.003576Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509890322395692584:2062];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:29:57.003994Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/0013cc/r3tmp/tmpP43n1m/pdisk_1.dat 2025-05-29T15:29:57.057461Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [1:7509890322395692563:2079] 1748532597003424 != 1748532597003427 2025-05-29T15:29:57.059246Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 6258, node 1 2025-05-29T15:29:57.069922Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:29:57.069933Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-05-29T15:29:57.069934Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-05-29T15:29:57.069969Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:20104 TClient is connected to server localhost:20104 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-29T15:29:57.133622Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:29:57.133649Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:29:57.134713Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-29T15:29:57.135359Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:29:57.150978Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:29:57.167351Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:29:57.189258Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:29:57.201423Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:29:57.334226Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890322395694197:2403], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:29:57.334253Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:29:57.369034Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-05-29T15:29:57.376120Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-05-29T15:29:57.385995Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-05-29T15:29:57.442081Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-05-29T15:29:57.498099Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-05-29T15:29:57.506314Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-05-29T15:29:57.520151Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-05-29T15:29:57.535931Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890322395694853:2466], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:29:57.535964Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:29:57.535988Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890322395694858:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:29:57.536779Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715669:3, at schemeshard: 72057594046644480 2025-05-29T15:29:57.539387Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7509890322395694860:2470], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715669 completed, doublechecking } 2025-05-29T15:29:57.592808Z node 1 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [1:7509890322395694911:3398] txid# 281474976715670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-29T15:29:57.686817Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [1:7509890322395694927:2474], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:29:57.686909Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=1&id=Y2YyOTBiOTEtNThlOGEwZjYtM2M5ZDk2ZmUtZjhjZWNmNDk=, ActorId: [1:7509890322395694194:2401], ActorState: ExecuteState, TraceId: 01jweam4rza50wsqy9xh4c5tq3, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: VERIFY failed (2025-05-29T15:29:57.687539Z): assertion failed in non-unittest thread with message: assertion failed at ydb/core/kqp/ut/common/kqp_ut_common.h:375, void NKikimr::NKqp::AssertSuccessResult(const NYdb::TStatus &): (result.IsSuccess())
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 library/cpp/testing/unittest/registar.cpp:36 RaiseError(): requirement UnittestThread failed 0. /-S/util/system/yassert.cpp:83: InternalPanicImpl @ 0x13DD8F65 1. /-S/util/system/yassert.cpp:55: Panic @ 0x13DCFF66 2. /tmp//-S/library/cpp/testing/unittest/registar.cpp:36: RaiseError @ 0x13F70BC6 3. /-S/ydb/core/kqp/ut/common/kqp_ut_common.h:375: AssertSuccessResult @ 0x13A3FAE2 4. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:365: CreateSampleTables @ 0x264F71F2 5. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:581: operator() @ 0x2651596C 6. /-S/library/cpp/threading/future/core/future-inl.h:493: SetValue @ 0x2651596C 7. /-S/library/cpp/threading/future/async.h:24: operator() @ 0x2651596C 8. /-S/util/thread/pool.h:71: Process @ 0x2651596C 9. /-S/util/thread/pool.cpp:418: DoExecute @ 0x13DE06B9 10. /-S/util/thread/factory.h:15: Execute @ 0x13DDF0A9 11. /-S/util/thread/factory.cpp:36: ThreadProc @ 0x13DDF0A9 12. /-S/util/system/thread.cpp:244: ThreadProxy @ 0x13DDA51C 13. ??:0: ?? @ 0x7F886FC37AC2 14. ??:0: ?? @ 0x7F886FCC984F >> BackupPathTest::ExportUnexistingCommonSourcePath [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/query/unittest >> KqpQuery::OlapCreateAsSelect_Complex [FAIL] Test command err: Trying to start YDB, gRPC: 64253, MsgBus: 5898 2025-05-29T15:29:54.194068Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509890307473335606:2065];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:29:54.194086Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/001397/r3tmp/tmpdfkEUv/pdisk_1.dat 2025-05-29T15:29:54.249832Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [1:7509890307473335582:2079] 1748532594193880 != 1748532594193883 2025-05-29T15:29:54.252203Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 64253, node 1 2025-05-29T15:29:54.261488Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:29:54.261500Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-05-29T15:29:54.261502Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-05-29T15:29:54.261544Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:5898 2025-05-29T15:29:54.296698Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:29:54.296730Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:29:54.297757Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:5898 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-29T15:29:54.326815Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:29:54.608967Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890307473336217:2326], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:29:54.609001Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:29:54.609179Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890307473336253:2330], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:29:54.610365Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2025-05-29T15:29:54.612374Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976715658, at schemeshard: 72057594046644480 2025-05-29T15:29:54.612501Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7509890307473336255:2331], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-05-29T15:29:54.679956Z node 1 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [1:7509890307473336306:2324] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-29T15:29:54.732966Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 2025-05-29T15:29:54.760675Z node 1 :TX_COLUMNSHARD INFO: log.cpp:784: tablet_id=72075186224037893;self_id=[1:7509890307473336468:2339];fline=columnshard.cpp:102;event=initialize_shard;step=OnActivateExecutor; 2025-05-29T15:29:54.764880Z node 1 :TX_COLUMNSHARD INFO: log.cpp:784: tablet_id=72075186224037893;self_id=[1:7509890307473336468:2339];fline=columnshard.cpp:120;event=initialize_shard;step=initialize_tiring_finished; 2025-05-29T15:29:54.764944Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Execute at tablet 72075186224037893 2025-05-29T15:29:54.765713Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037893;self_id=[1:7509890307473336468:2339];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-05-29T15:29:54.765773Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037893;self_id=[1:7509890307473336468:2339];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-05-29T15:29:54.765824Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037893;self_id=[1:7509890307473336468:2339];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-05-29T15:29:54.765856Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037893;self_id=[1:7509890307473336468:2339];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-05-29T15:29:54.765877Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037893;self_id=[1:7509890307473336468:2339];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-05-29T15:29:54.765905Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037893;self_id=[1:7509890307473336468:2339];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-05-29T15:29:54.765922Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037893;self_id=[1:7509890307473336468:2339];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-05-29T15:29:54.765947Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037893;self_id=[1:7509890307473336468:2339];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-05-29T15:29:54.765974Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037893;self_id=[1:7509890307473336468:2339];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-05-29T15:29:54.765998Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037893;self_id=[1:7509890307473336468:2339];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-05-29T15:29:54.766021Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037893;self_id=[1:7509890307473336468:2339];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2025-05-29T15:29:54.766043Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037893;self_id=[1:7509890307473336468:2339];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2025-05-29T15:29:54.767742Z node 1 :TX_COLUMNSHARD INFO: log.cpp:784: tablet_id=72075186224037891;self_id=[1:7509890307473336473:2344];fline=columnshard.cpp:102;event=initialize_shard;step=OnActivateExecutor; 2025-05-29T15:29:54.771744Z node 1 :TX_COLUMNSHARD INFO: log.cpp:784: tablet_id=72075186224037888;self_id=[1:7509890307473336471:2342];fline=columnshard.cpp:102;event=initialize_shard;step=OnActivateExecutor; 2025-05-29T15:29:54.771816Z node 1 :TX_COLUMNSHARD INFO: log.cpp:784: tablet_id=72075186224037891;self_id=[1:7509890307473336473:2344];fline=columnshard.cpp:120;event=initialize_shard;step=initialize_tiring_finished; 2025-05-29T15:29:54.771865Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Execute at tablet 72075186224037891 2025-05-29T15:29:54.772690Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037891;self_id=[1:7509890307473336473:2344];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-05-29T15:29:54.772735Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037891;self_id=[1:7509890307473336473:2344];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-05-29T15:29:54.772777Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037891;self_id=[1:7509890307473336473:2344];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-05-29T15:29:54.772801Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037891;self_id=[1:7509890307473336473:2344];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-05-29T15:29:54.772820Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037891;self_id=[1:7509890307473336473:2344];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-05-29T15:29:54.772842Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037891;se ... 9.683802Z node 4 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-05-29T15:29:59.683804Z node 4 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037893;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV0ChunksMeta;id=RestoreV0ChunksMeta; 2025-05-29T15:29:59.683807Z node 4 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-05-29T15:29:59.683818Z node 4 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-05-29T15:29:59.683828Z node 4 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanDeprecatedSnapshot;id=CleanDeprecatedSnapshot; 2025-05-29T15:29:59.683835Z node 4 :TX_COLUMNSHARD CRIT: log.cpp:784: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=clean_deprecated_snapshot.cpp:30;tasks_for_rewrite=0; 2025-05-29T15:29:59.683845Z node 4 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanDeprecatedSnapshot;id=17; 2025-05-29T15:29:59.683851Z node 4 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV0ChunksMeta;id=RestoreV0ChunksMeta; 2025-05-29T15:29:59.683861Z node 4 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037893;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV0ChunksMeta;id=18; 2025-05-29T15:29:59.683864Z node 4 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037893;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-05-29T15:29:59.683914Z node 4 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV0ChunksMeta;id=18; 2025-05-29T15:29:59.683923Z node 4 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-05-29T15:29:59.683983Z node 4 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-05-29T15:29:59.683992Z node 4 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-05-29T15:29:59.684002Z node 4 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-05-29T15:29:59.684010Z node 4 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-05-29T15:29:59.684028Z node 4 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-05-29T15:29:59.684037Z node 4 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-05-29T15:29:59.684047Z node 4 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-05-29T15:29:59.684055Z node 4 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-05-29T15:29:59.684064Z node 4 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-05-29T15:29:59.684072Z node 4 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-05-29T15:29:59.684080Z node 4 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-05-29T15:29:59.684088Z node 4 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-05-29T15:29:59.684108Z node 4 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-05-29T15:29:59.684117Z node 4 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-05-29T15:29:59.684134Z node 4 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-05-29T15:29:59.684143Z node 4 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-05-29T15:29:59.684154Z node 4 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-05-29T15:29:59.684162Z node 4 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanDeprecatedSnapshot;id=CleanDeprecatedSnapshot; 2025-05-29T15:29:59.684169Z node 4 :TX_COLUMNSHARD CRIT: log.cpp:784: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=clean_deprecated_snapshot.cpp:30;tasks_for_rewrite=0; 2025-05-29T15:29:59.684179Z node 4 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanDeprecatedSnapshot;id=17; 2025-05-29T15:29:59.684184Z node 4 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV0ChunksMeta;id=RestoreV0ChunksMeta; 2025-05-29T15:29:59.684242Z node 4 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV0ChunksMeta;id=18; 2025-05-29T15:29:59.684249Z node 4 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-05-29T15:29:59.706878Z node 4 :TX_COLUMNSHARD_TX WARN: log.cpp:784: tablet_id=72075186224037893;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715660;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715660; 2025-05-29T15:29:59.706878Z node 4 :TX_COLUMNSHARD_TX WARN: log.cpp:784: tablet_id=72075186224037892;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715660;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715660; 2025-05-29T15:29:59.707848Z node 4 :TX_COLUMNSHARD_TX WARN: log.cpp:784: tablet_id=72075186224037888;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715660;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715660; 2025-05-29T15:29:59.707869Z node 4 :TX_COLUMNSHARD_TX WARN: log.cpp:784: tablet_id=72075186224037894;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715660;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715660; 2025-05-29T15:29:59.708684Z node 4 :TX_COLUMNSHARD_TX WARN: log.cpp:784: tablet_id=72075186224037889;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715660;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715660; 2025-05-29T15:29:59.708780Z node 4 :TX_COLUMNSHARD_TX WARN: log.cpp:784: tablet_id=72075186224037890;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715660;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715660; 2025-05-29T15:29:59.709454Z node 4 :TX_COLUMNSHARD_TX WARN: log.cpp:784: tablet_id=72075186224037895;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715660;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715660; 2025-05-29T15:29:59.709696Z node 4 :TX_COLUMNSHARD_TX WARN: log.cpp:784: tablet_id=72075186224037891;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715660;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715660; 2025-05-29T15:29:59.710171Z node 4 :TX_COLUMNSHARD_TX WARN: log.cpp:784: tablet_id=72075186224037897;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715660;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715660; 2025-05-29T15:29:59.710794Z node 4 :TX_COLUMNSHARD_TX WARN: log.cpp:784: tablet_id=72075186224037896;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715660;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715660; 2025-05-29T15:29:59.721336Z node 4 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [4:7509890328941011421:2406], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:29:59.721451Z node 4 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=4&id=ZDM4OTNmOTYtMmQ0YTY4NTYtM2MwMWZlZWEtNDg3Nzg3ODU=, ActorId: [4:7509890328941011414:2402], ActorState: ExecuteState, TraceId: 01jweam6x0d8k9s42tye2s0h4s, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: assertion failed at ydb/core/kqp/ut/query/kqp_query_ut.cpp:1753, virtual void NKikimr::NKqp::NTestSuiteKqpQuery::TTestCaseOlapCreateAsSelect_Complex::Execute_(NUnitTest::TTestContext &): (prepareResult.IsSuccess())
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 0. /-S/util/system/backtrace.cpp:284: ?? @ 0x13DBA79B 1. /tmp//-S/library/cpp/testing/unittest/registar.cpp:46: RaiseError @ 0x13F70AD8 2. /tmp//-S/ydb/core/kqp/ut/query/kqp_query_ut.cpp:1753: Execute_ @ 0x13BE4695 3. /tmp//-S/ydb/core/kqp/ut/query/kqp_query_ut.cpp:38: operator() @ 0x13BFC896 4. /tmp//-S/library/cpp/testing/unittest/registar.cpp:373: Run @ 0x13F7298D 5. /tmp//-S/ydb/core/kqp/ut/query/kqp_query_ut.cpp:38: Execute @ 0x13BFC255 6. /tmp//-S/library/cpp/testing/unittest/registar.cpp:494: Execute @ 0x13F73102 7. /tmp//-S/library/cpp/testing/unittest/utmain.cpp:872: RunMain @ 0x13F84CAC 8. ??:0: ?? @ 0x7F8A09225D8F 9. ??:0: ?? @ 0x7F8A09225E3F 10. ??:0: ?? @ 0x12A6F028 >> BackupRestoreS3::TestAllPrimitiveTypes-DYNUMBER [FAIL] >> KqpQuery::PreparedQueryInvalidate >> KqpLimits::BigParameter >> KqpQuery::DdlInDataQuery >> KqpQuery::SelectWhereInSubquery >> KqpQuery::TryToUpdateNonExistentColumn >> BackupPathTest::FilterByPathFailsWhenNoSchemaMapping ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/scheme/unittest >> KqpConstraints::AddNonColumnDoesnotReturnInternalError Test command err: Trying to start YDB, gRPC: 1132, MsgBus: 26256 2025-05-29T15:29:28.411758Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509890196265578937:2069];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:29:28.411783Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/001105/r3tmp/tmpF877TW/pdisk_1.dat 2025-05-29T15:29:28.471849Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 1132, node 1 2025-05-29T15:29:28.490074Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:29:28.490090Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-05-29T15:29:28.490093Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-05-29T15:29:28.490153Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:26256 2025-05-29T15:29:28.513148Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:29:28.513187Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:29:28.514251Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:26256 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-29T15:29:28.549656Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:29:28.560176Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:29:28.579264Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:29:28.600039Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:29:28.611316Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:29:28.753319Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890196265580532:2403], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:29:28.753347Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:29:28.811813Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-05-29T15:29:28.820795Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-05-29T15:29:28.876607Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-05-29T15:29:28.889624Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-05-29T15:29:28.904685Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-05-29T15:29:28.918321Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-05-29T15:29:28.974381Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-05-29T15:29:28.990936Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890196265581192:2466], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:29:28.990955Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890196265581197:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:29:28.990965Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:29:28.991741Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715669:3, at schemeshard: 72057594046644480 2025-05-29T15:29:29.000546Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7509890196265581199:2470], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715669 completed, doublechecking } 2025-05-29T15:29:29.095585Z node 1 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [1:7509890200560548546:3397] txid# 281474976715670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-29T15:29:29.184060Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [1:7509890200560548562:2474], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:29:29.184242Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=1&id=OWViZTI5ZmYtNTRkYzc3NGYtYTFjZWFhMzAtNzk1ZjQ2OTQ=, ActorId: [1:7509890196265580514:2401], ActorState: ExecuteState, TraceId: 01jweak8wy8fbkv7hwyre4m7tf, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: VERIFY failed (2025-05-29T15:29:29.185054Z): assertion failed in non-unittest thread with message: assertion failed at ydb/core/kqp/ut/common/kqp_ut_common.h:375, void NKikimr::NKqp::AssertSuccessResult(const NYdb::TStatus &): (result.IsSuccess())
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 library/cpp/testing/unittest/registar.cpp:36 RaiseError(): requirement UnittestThread failed 0. /-S/util/system/yassert.cpp:83: InternalPanicImpl @ 0x1633E055 1. /-S/util/system/yassert.cpp:55: Panic @ 0x16335056 2. /tmp//-S/library/cpp/testing/unittest/registar.cpp:36: RaiseError @ 0x164D75B6 3. /-S/ydb/core/kqp/ut/common/kqp_ut_common.h:375: AssertSuccessResult @ 0x15DE0A22 4. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:365: CreateSampleTables @ 0x289A0492 5. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:581: operator() @ 0x289C1A1C 6. /-S/library/cpp/threading/future/core/future-inl.h:493: SetValue @ 0x289C1A1C 7. /-S/library/cpp/threading/future/async.h:24: operator() @ 0x289C1A1C 8. /-S/util/thread/pool.h:71: Process @ 0x289C1A1C 9. /-S/util/thread/pool.cpp:418: DoExecute @ 0x163457A9 10. /-S/util/thread/factory.h:15: Execute @ 0x16344199 11. /-S/util/thread/factory.cpp:36: ThreadProc @ 0x16344199 12. /-S/util/system/thread.cpp:244: ThreadProxy @ 0x1633F60C 13. ??:0: ?? @ 0x7FD2E3353AC2 14. ??:0: ?? @ 0x7FD2E33E584F Trying to start YDB, gRPC: 5974, MsgBus: 24905 2025-05-29T15:29:33.276258Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509890217631159888:2069];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:29:33.276557Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/001105/r3tmp/tmpxXeoBz/pdisk_1.dat 2025-05-29T15:29:33.349685Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 5974, node 1 2025-05-29T15:29:33.370119Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:29:33.370132Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-05-29T15:29:33.370134Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 202 ... ibrary/cpp/threading/future/core/future-inl.h:493: SetValue @ 0x289C1A1C 7. /-S/library/cpp/threading/future/async.h:24: operator() @ 0x289C1A1C 8. /-S/util/thread/pool.h:71: Process @ 0x289C1A1C 9. /-S/util/thread/pool.cpp:418: DoExecute @ 0x163457A9 10. /-S/util/thread/factory.h:15: Execute @ 0x16344199 11. /-S/util/thread/factory.cpp:36: ThreadProc @ 0x16344199 12. /-S/util/system/thread.cpp:244: ThreadProxy @ 0x1633F60C 13. ??:0: ?? @ 0x7F716254FAC2 14. ??:0: ?? @ 0x7F71625E184F Trying to start YDB, gRPC: 10986, MsgBus: 62226 2025-05-29T15:29:55.338630Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:102:2148], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-05-29T15:29:55.338664Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-05-29T15:29:55.338677Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/001105/r3tmp/tmpD43WW1/pdisk_1.dat TServer::EnableGrpc on GrpcPort 10986, node 1 TClient is connected to server localhost:62226 2025-05-29T15:29:55.480819Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:29:55.481660Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:29:55.481673Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-05-29T15:29:55.481688Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-05-29T15:29:55.481841Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-05-29T15:29:55.482265Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [1:32:2079] 1748532594893125 != 1748532594893129 TClient is connected to server localhost:62226 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-29T15:29:55.549494Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:29:55.549535Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:29:55.550520Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:29:55.553087Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-29T15:29:55.656344Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:29:55.865977Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:29:56.139655Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:29:56.378904Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:29:56.713674Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:1721:3315], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:29:56.713723Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:29:56.717597Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-05-29T15:29:56.900928Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-05-29T15:29:57.117103Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-05-29T15:29:57.320763Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-05-29T15:29:57.536108Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-05-29T15:29:57.764603Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-05-29T15:29:58.032984Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-05-29T15:29:58.243633Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2392:3811], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:29:58.243665Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:29:58.243704Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2397:3816], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:29:58.244541Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715669:3, at schemeshard: 72057594046644480 2025-05-29T15:29:58.403572Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:2399:3818], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715669 completed, doublechecking } 2025-05-29T15:29:58.446784Z node 1 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [1:2457:3857] txid# 281474976715670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-29T15:29:58.525637Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [1:2467:3866], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:29:58.527593Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=1&id=YmIyODlhODgtZjMxMWYwNzEtMzFkMTA5MzItYTM4Y2M1YmY=, ActorId: [1:1709:3304], ActorState: ExecuteState, TraceId: 01jweam5f34jqja7rezd9dcc02, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: VERIFY failed (2025-05-29T15:29:58.528763Z): assertion failed in non-unittest thread with message: assertion failed at ydb/core/kqp/ut/common/kqp_ut_common.h:375, void NKikimr::NKqp::AssertSuccessResult(const NYdb::TStatus &): (result.IsSuccess())
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 library/cpp/testing/unittest/registar.cpp:36 RaiseError(): requirement UnittestThread failed 2025-05-29T15:30:00.010370Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7301: Cannot get console configs 2025-05-29T15:30:00.010406Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 0. /-S/util/system/yassert.cpp:83: InternalPanicImpl @ 0x1633E055 1. /-S/util/system/yassert.cpp:55: Panic @ 0x16335056 2. /tmp//-S/library/cpp/testing/unittest/registar.cpp:36: RaiseError @ 0x164D75B6 3. /-S/ydb/core/kqp/ut/common/kqp_ut_common.h:375: AssertSuccessResult @ 0x15DE0A22 4. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:365: CreateSampleTables @ 0x289A0492 5. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:581: operator() @ 0x289C1A1C 6. /-S/library/cpp/threading/future/core/future-inl.h:493: SetValue @ 0x289C1A1C 7. /-S/library/cpp/threading/future/async.h:24: operator() @ 0x289C1A1C 8. /-S/util/thread/pool.h:71: Process @ 0x289C1A1C 9. /-S/util/thread/pool.cpp:418: DoExecute @ 0x163457A9 10. /-S/util/thread/factory.h:15: Execute @ 0x16344199 11. /-S/util/thread/factory.cpp:36: ThreadProc @ 0x16344199 12. /-S/util/system/thread.cpp:244: ThreadProxy @ 0x1633F60C 13. ??:0: ?? @ 0x7F4907A1FAC2 14. ??:0: ?? @ 0x7F4907AB184F >> KqpQuery::QueryTimeout >> KqpQuery::CreateAsSelectTypes+NotNull-IsOlap [FAIL] >> KqpQuery::CreateAsSelectTypes-NotNull+IsOlap >> KqpExplain::PrecomputeRange >> KikimrIcGateway::TestLoadAwsSecretValueFromExternalDataSourceMetadata >> KqpTypes::MultipleCurrentUtcTimestamp >> KikimrIcGateway::TestSecretsExistingValidation >> KikimrIcGateway::TestLoadServiceAccountSecretValueFromExternalDataSourceMetadata ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/scheme/unittest >> KqpScheme::ChangefeedOnIndexTable Test command err: Trying to start YDB, gRPC: 30226, MsgBus: 6940 2025-05-29T15:29:29.839311Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:102:2148], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-05-29T15:29:29.839355Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-05-29T15:29:29.839371Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/001101/r3tmp/tmptADSWk/pdisk_1.dat TServer::EnableGrpc on GrpcPort 30226, node 1 TClient is connected to server localhost:6940 TClient is connected to server localhost:6940 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-29T15:29:30.038288Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:29:30.038311Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-05-29T15:29:30.038316Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-05-29T15:29:30.038393Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-05-29T15:29:30.038481Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [1:32:2079] 1748532569361751 != 1748532569361755 2025-05-29T15:29:30.100677Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:29:30.100724Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:29:30.101497Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:29:30.103314Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-29T15:29:30.187508Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:29:30.371339Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:29:30.652485Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:29:30.884322Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:29:31.203837Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:1722:3316], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:29:31.203885Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:29:31.208802Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-05-29T15:29:31.389305Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-05-29T15:29:31.637966Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-05-29T15:29:31.867282Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-05-29T15:29:32.095806Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-05-29T15:29:32.309674Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-05-29T15:29:32.573781Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-05-29T15:29:32.848582Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2392:3811], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:29:32.848660Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:29:32.848739Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2397:3816], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:29:32.850201Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715669:3, at schemeshard: 72057594046644480 2025-05-29T15:29:33.025621Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:2399:3818], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715669 completed, doublechecking } 2025-05-29T15:29:33.076348Z node 1 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [1:2457:3857] txid# 281474976715670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-29T15:29:33.225913Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [1:2467:3866], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:29:33.228096Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=1&id=Y2MyMDE0MzAtM2JlMjk2MGItNGY4ZmNkMi1mOTdhYTVhOA==, ActorId: [1:1708:3303], ActorState: ExecuteState, TraceId: 01jweakcnf4hqef0f9vavcn46h, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: VERIFY failed (2025-05-29T15:29:33.231075Z): assertion failed in non-unittest thread with message: assertion failed at ydb/core/kqp/ut/common/kqp_ut_common.h:375, void NKikimr::NKqp::AssertSuccessResult(const NYdb::TStatus &): (result.IsSuccess())
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 library/cpp/testing/unittest/registar.cpp:36 RaiseError(): requirement UnittestThread failed 2025-05-29T15:29:34.754377Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7301: Cannot get console configs 2025-05-29T15:29:34.754410Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 0. /-S/util/system/yassert.cpp:83: InternalPanicImpl @ 0x1633E055 1. /-S/util/system/yassert.cpp:55: Panic @ 0x16335056 2. /tmp//-S/library/cpp/testing/unittest/registar.cpp:36: RaiseError @ 0x164D75B6 3. /-S/ydb/core/kqp/ut/common/kqp_ut_common.h:375: AssertSuccessResult @ 0x15DE0A22 4. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:365: CreateSampleTables @ 0x289A0492 5. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:581: operator() @ 0x289C1A1C 6. /-S/library/cpp/threading/future/core/future-inl.h:493: SetValue @ 0x289C1A1C 7. /-S/library/cpp/threading/future/async.h:24: operator() @ 0x289C1A1C 8. /-S/util/thread/pool.h:71: Process @ 0x289C1A1C 9. /-S/util/thread/pool.cpp:418: DoExecute @ 0x163457A9 10. /-S/util/thread/factory.h:15: Execute @ 0x16344199 11. /-S/util/thread/factory.cpp:36: ThreadProc @ 0x16344199 12. /-S/util/system/thread.cpp:244: ThreadProxy @ 0x1633F60C 13. ??:0: ?? @ 0x7F1B27E3AAC2 14. ??:0: ?? @ 0x7F1B27ECC84F Trying to start YDB, gRPC: 20680, MsgBus: 8331 2025-05-29T15:29:37.874809Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509890235603938179:2070];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:29:37.874842Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/001101/r3tmp/tmp2TOK6X/pdisk_1.dat 2025-05-29T15:29:37.930402Z node 1 :IMPORT WARN: schemeshard_import.cpp ... st/yql_expr.h:1874: index out of range, code: 1 library/cpp/testing/unittest/registar.cpp:36 RaiseError(): requirement UnittestThread failed 0. /-S/util/system/yassert.cpp:83: InternalPanicImpl @ 0x1633E055 1. /-S/util/system/yassert.cpp:55: Panic @ 0x16335056 2. /tmp//-S/library/cpp/testing/unittest/registar.cpp:36: RaiseError @ 0x164D75B6 3. /-S/ydb/core/kqp/ut/common/kqp_ut_common.h:375: AssertSuccessResult @ 0x15DE0A22 4. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:365: CreateSampleTables @ 0x289A0492 5. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:581: operator() @ 0x289C1A1C 6. /-S/library/cpp/threading/future/core/future-inl.h:493: SetValue @ 0x289C1A1C 7. /-S/library/cpp/threading/future/async.h:24: operator() @ 0x289C1A1C 8. /-S/util/thread/pool.h:71: Process @ 0x289C1A1C 9. /-S/util/thread/pool.cpp:418: DoExecute @ 0x163457A9 10. /-S/util/thread/factory.h:15: Execute @ 0x16344199 11. /-S/util/thread/factory.cpp:36: ThreadProc @ 0x16344199 12. /-S/util/system/thread.cpp:244: ThreadProxy @ 0x1633F60C 13. ??:0: ?? @ 0x7FD92AFA1AC2 14. ??:0: ?? @ 0x7FD92B03384F Trying to start YDB, gRPC: 11593, MsgBus: 13089 2025-05-29T15:29:57.952843Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509890320180016986:2070];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:29:57.952896Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/001101/r3tmp/tmpTDWDrL/pdisk_1.dat 2025-05-29T15:29:58.004643Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 11593, node 1 2025-05-29T15:29:58.019234Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:29:58.019244Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-05-29T15:29:58.019245Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-05-29T15:29:58.019278Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:13089 2025-05-29T15:29:58.053790Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:29:58.053817Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:29:58.054935Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:13089 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-29T15:29:58.082839Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:29:58.087597Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:29:58.151425Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:29:58.173747Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:29:58.185704Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:29:58.374578Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890324474985874:2403], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:29:58.374622Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:29:58.418757Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-05-29T15:29:58.427462Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-05-29T15:29:58.436714Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-05-29T15:29:58.492205Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-05-29T15:29:58.499901Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-05-29T15:29:58.513686Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-05-29T15:29:58.527737Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-05-29T15:29:58.546034Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890324474986529:2466], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:29:58.546084Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:29:58.546128Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890324474986534:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:29:58.547157Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715669:3, at schemeshard: 72057594046644480 2025-05-29T15:29:58.555537Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7509890324474986536:2470], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715669 completed, doublechecking } 2025-05-29T15:29:58.652851Z node 1 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [1:7509890324474986587:3397] txid# 281474976715670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-29T15:29:58.747432Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [1:7509890324474986603:2474], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:29:58.748706Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=1&id=NzY3MTJhY2QtZGI2NWRiNzYtNTlkMTA0OGUtNjExYWIwOTg=, ActorId: [1:7509890324474985856:2401], ActorState: ExecuteState, TraceId: 01jweam5rh47e2ek2hckws3x3z, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: VERIFY failed (2025-05-29T15:29:58.749741Z): assertion failed in non-unittest thread with message: assertion failed at ydb/core/kqp/ut/common/kqp_ut_common.h:375, void NKikimr::NKqp::AssertSuccessResult(const NYdb::TStatus &): (result.IsSuccess())
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 library/cpp/testing/unittest/registar.cpp:36 RaiseError(): requirement UnittestThread failed 0. /-S/util/system/yassert.cpp:83: InternalPanicImpl @ 0x1633E055 1. /-S/util/system/yassert.cpp:55: Panic @ 0x16335056 2. /tmp//-S/library/cpp/testing/unittest/registar.cpp:36: RaiseError @ 0x164D75B6 3. /-S/ydb/core/kqp/ut/common/kqp_ut_common.h:375: AssertSuccessResult @ 0x15DE0A22 4. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:365: CreateSampleTables @ 0x289A0492 5. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:581: operator() @ 0x289C1A1C 6. /-S/library/cpp/threading/future/core/future-inl.h:493: SetValue @ 0x289C1A1C 7. /-S/library/cpp/threading/future/async.h:24: operator() @ 0x289C1A1C 8. /-S/util/thread/pool.h:71: Process @ 0x289C1A1C 9. /-S/util/thread/pool.cpp:418: DoExecute @ 0x163457A9 10. /-S/util/thread/factory.h:15: Execute @ 0x16344199 11. /-S/util/thread/factory.cpp:36: ThreadProc @ 0x16344199 12. /-S/util/system/thread.cpp:244: ThreadProxy @ 0x1633F60C 13. ??:0: ?? @ 0x7F37CB250AC2 14. ??:0: ?? @ 0x7F37CB2E284F >> KqpTypes::Time64Columns+EnableTableDatetime64 [FAIL] >> KqpTypes::Time64Columns-EnableTableDatetime64 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/provider/ut/unittest >> KikimrIcGateway::TestDropResourcePool [GOOD] Test command err: Trying to start YDB, gRPC: 24733, MsgBus: 7810 test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/002484/r3tmp/tmp3VZM2P/pdisk_1.dat 2025-05-29T15:29:43.451409Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509890260855321749:2211];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:29:43.453588Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-05-29T15:29:43.501717Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [1:7509890260855321563:2079] 1748532583350650 != 1748532583350653 2025-05-29T15:29:43.503554Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 24733, node 1 2025-05-29T15:29:43.516748Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:29:43.516766Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-05-29T15:29:43.516768Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-05-29T15:29:43.516814Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:7810 2025-05-29T15:29:43.566877Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:29:43.566913Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:29:43.567863Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:7810 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-29T15:29:43.605914Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:29:43.609095Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-05-29T15:29:43.621804Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 2025-05-29T15:29:43.629945Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976710659, at schemeshard: 72057594046644480 2025-05-29T15:29:43.855544Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890260855322264:2332], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:29:43.855573Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:29:43.919349Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-05-29T15:29:43.992372Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-05-29T15:29:44.014372Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-05-29T15:29:44.076559Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-05-29T15:29:44.091198Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890265150289875:2362], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:29:44.091222Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:29:44.091347Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890265150289880:2365], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:29:44.092251Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710664:3, at schemeshard: 72057594046644480 2025-05-29T15:29:44.102120Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7509890265150289882:2366], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710664 completed, doublechecking } 2025-05-29T15:29:44.194996Z node 1 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [1:7509890265150289933:2554] txid# 281474976710665, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 11], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-29T15:29:44.219094Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [1:7509890265150289949:2370], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:29:44.219232Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=1&id=ZmIzYTU2YTYtMTQ4NjRiNS0yZmM4NGFjZS1hNWM4MGI0OQ==, ActorId: [1:7509890260855322236:2328], ActorState: ExecuteState, TraceId: 01jweakqmt6v06p3agwwyrahxc, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: assertion failed at ydb/core/kqp/ut/common/kqp_ut_common.h:375, void NKikimr::NKqp::AssertSuccessResult(const NYdb::TStatus &): (result.IsSuccess())
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 0. /-S/util/system/backtrace.cpp:284: ?? @ 0x13AC626B 1. /tmp//-S/library/cpp/testing/unittest/registar.cpp:46: RaiseError @ 0x13C7DC88 2. /-S/ydb/core/kqp/ut/common/kqp_ut_common.h:375: AssertSuccessResult @ 0x1398A682 3. /tmp//-S/ydb/core/kqp/provider/yql_kikimr_gateway_ut.cpp:55: CreateSampleTables @ 0x1395A198 4. /tmp//-S/ydb/core/kqp/provider/yql_kikimr_gateway_ut.cpp:282: Execute_ @ 0x13959068 5. /tmp//-S/ydb/core/kqp/provider/yql_kikimr_gateway_ut.cpp:279: operator() @ 0x13989936 6. /tmp//-S/library/cpp/testing/unittest/registar.cpp:373: Run @ 0x13C7FB3D 7. /tmp//-S/ydb/core/kqp/provider/yql_kikimr_gateway_ut.cpp:279: Execute @ 0x13989193 8. /tmp//-S/library/cpp/testing/unittest/registar.cpp:494: Execute @ 0x13C802B2 9. /tmp//-S/library/cpp/testing/unittest/utmain.cpp:872: RunMain @ 0x13C91E5C 10. ??:0: ?? @ 0x7EFE6EDBED8F 11. ??:0: ?? @ 0x7EFE6EDBEE3F 12. ??:0: ?? @ 0x129C1028 Trying to start YDB, gRPC: 16251, MsgBus: 22104 test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/002484/r3tmp/tmpgPXiNo/pdisk_1.dat 2025-05-29T15:29:46.844345Z node 2 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-05-29T15:29:46.860422Z node 2 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:29:46.864011Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [2:7509890274781586491:2079] 1748532586804817 != 1748532586804820 TServer::EnableGrpc on GrpcPort 16251, node 2 2025-05-29T15:29:46.903119Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:29:46.903131Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-05-29T15:29:46.903133Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-05-29T15:29:46.903177Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:22104 2025-05-29T15:29:46.939342Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:29:46.939368Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:29:46.945388Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:22104 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-29T15:29:47.054940Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:29:47.063211Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-05-29T15:29:47.077928Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976715658, at schemeshard: 72057594046644480 2025-05-29T15:29:47.363086Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7509890279076554490:2332], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:29:47.363111Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:29:47.367866Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 2025-05-29T15:29:47.395308Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 2025-05-29T15:29:47.409750Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-05-29T15:29:47.431363Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-05-29T15:29:47.460509Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7509890279076554801:2362], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:29:47.460533Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:29:47.460685Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7509890279076554806:2365], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:29:47.461590Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715664:3, at schemeshard: 72057594046644480 2025-05-29T15:29:47.466420Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976715664, at schemeshard: 72057594046644480 2025-05-29T15:29:47.466682Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7509890279076554808:2366], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715664 completed, doublechecking } 2025-05-29T15:29:47.551802Z node 2 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [2:7509890279076554859:2554] txid# 281474976715665, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 11], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-29T15:29:47.579944Z node 2 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [2:7509890279076554875:2370], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:29:47.581037Z node 2 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=2&id=Y2E4ZmI4ZmQtNmZkNDQzMWUtMTk2NjhkNzgtYTMzZTQzYTU=, ActorId: [2:7509890279076554462:2328], ActorState: ExecuteState, TraceId: 01jweakty3cfcb2m1rgc14r3a3, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: assertion failed at ydb/core/kqp/ut/common/kqp_ut_common.h:375, void NKikimr::NKqp::AssertSuccessResult(const NYdb::TStatus &): (result.IsSuccess())
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 0. /-S/util/system/backtrace.cpp:284: ?? @ 0x13AC626B 1. /tmp//-S/library/cpp/testing/unittest/registar.cpp:46: RaiseError @ 0x13C7DC88 2. /-S/ydb/core/kqp/ut/common/kqp_ut_common.h:375: AssertSuccessResult @ 0x1398A682 3. /tmp//-S/ydb/core/kqp/provider/yql_kikimr_gateway_ut.cpp:55: CreateSampleTables @ 0x1395A198 4. /tmp//-S/ydb/core/kqp/provider/yql_kikimr_gateway_ut.cpp:294: Execute_ @ 0x13963CA0 5. /tmp//-S/ydb/core/kqp/provider/yql_kikimr_gateway_ut.cpp:279: operator() @ 0x13989936 6. /tmp//-S/library/cpp/testing/unittest/registar.cpp:373: Run @ 0x13C7FB3D 7. /tmp//-S/ydb/core/kqp/provider/yql_kikimr_gateway_ut.cpp:279: Execute @ 0x13989193 8. /tmp//-S/library/cpp/testing/unittest/registar.cpp:494: Execute @ 0x13C802B2 9. /tmp//-S/library/cpp/testing/unittest/utmain.cpp:872: RunMain @ 0x13C91E5C 10. ??:0: ?? @ 0x7EFE6EDBED8F 11. ??:0: ?? @ 0x7EFE6EDBEE3F 12. ??:0: ?? @ 0x129C1028 Trying to start YDB, gRPC: 3305, MsgBus: 8965 2025-05-29T15:29:48.132210Z node 3 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7509890284794371246:2070];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:29:48.132225Z node 3 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/002484/r3tmp/tmp61pj66/pdisk_1.dat TServer::EnableGrpc on GrpcPort 3305, node 3 2025-05-29T15:29:48.179903Z node 3 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:29:48.180157Z node 3 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [3:7509890284794371205:2079] 1748532588131722 != 1748532588131725 2025-05-29T15:29:48.186380Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:29:48.186394Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-05-29T15:29:48.186396Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-05-29T15:29:48.186455Z node 3 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:8965 2025-05-29T15:29:48.239005Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:29:48.239043Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:29:48.243237Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:8965 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... 2025-05-29T15:29:48.267277Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 2025-05-29T15:29:48.270982Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-05-29T15:29:48.279770Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 >> BackupPathTest::FilterByPathFailsWhenNoSchemaMapping [GOOD] >> KqpQuery::CreateAsSelectTypes-NotNull+IsOlap [FAIL] >> KqpQuery::CreateAsSelectTypes+NotNull+IsOlap >> KqpExplain::UpdateConditional-UseSink >> KqpParams::CheckQueryCacheForPreparedQuery ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/ydb/backup_ut/unittest >> BackupRestore::TestAllPrimitiveTypes-DYNUMBER [FAIL] Test command err: 2025-05-29T15:29:36.327784Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509890232845784068:2212];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:29:36.327974Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/001cb2/r3tmp/tmpSHF5qt/pdisk_1.dat 2025-05-29T15:29:36.424295Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:29:36.424927Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:29:36.424945Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:29:36.428117Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 21345, node 1 2025-05-29T15:29:36.463037Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:29:36.463053Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-05-29T15:29:36.463055Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-05-29T15:29:36.463111Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:28575 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-29T15:29:36.499867Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:29:36.752929Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890232845784864:2334], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:29:36.752959Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:29:36.798224Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 Backup "/Root" to "/home/runner/.ya/build/build_root/ciyv/001cb2/r3tmp/tmp3KEWrh/"Create temporary directory "/Root/~backup_20250529T152936" in databaseProcess "/home/runner/.ya/build/build_root/ciyv/001cb2/r3tmp/tmp3KEWrh/table"Copy tables: { src: "/Root/table", dst: "/Root/~backup_20250529T152936/table" }Describe table "/Root/table"Describe table "/Root/~backup_20250529T152936/table"Backup table "/Root/~backup_20250529T152936/table" to "/home/runner/.ya/build/build_root/ciyv/001cb2/r3tmp/tmp3KEWrh/table"Write scheme into "/home/runner/.ya/build/build_root/ciyv/001cb2/r3tmp/tmp3KEWrh/table/scheme.pb"Write ACL into "/home/runner/.ya/build/build_root/ciyv/001cb2/r3tmp/tmp3KEWrh/table/permissions.pb"Read table "/Root/~backup_20250529T152936/table"Write data into "/home/runner/.ya/build/build_root/ciyv/001cb2/r3tmp/tmp3KEWrh/table/data_00.csv"Drop table "/Root/~backup_20250529T152936/table"Remove temporary directory "/Root/~backup_20250529T152936" in database2025-05-29T15:29:37.067796Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpRmDir, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-05-29T15:29:37.070840Z node 1 :HIVE WARN: hive_impl.cpp:491: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037891 not found 2025-05-29T15:29:37.070845Z node 1 :HIVE WARN: hive_impl.cpp:491: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037892 not found 2025-05-29T15:29:37.070847Z node 1 :HIVE WARN: hive_impl.cpp:491: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037893 not found Backup completed successfully2025-05-29T15:29:37.077896Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890237140753128:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:29:37.077923Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } Restore "/home/runner/.ya/build/build_root/ciyv/001cb2/r3tmp/tmp3KEWrh/" to "/Root"Resolved db base path: "/Root"List of entries in the backup: [{"type":"Directory","path":"/home/runner/.ya/build/build_root/ciyv/001cb2/r3tmp/tmp3KEWrh/"},{"type":"Table","path":"/home/runner/.ya/build/build_root/ciyv/001cb2/r3tmp/tmp3KEWrh/table"}]Process "/home/runner/.ya/build/build_root/ciyv/001cb2/r3tmp/tmp3KEWrh/table"Read scheme from "/home/runner/.ya/build/build_root/ciyv/001cb2/r3tmp/tmp3KEWrh/table/scheme.pb"Restore table "/home/runner/.ya/build/build_root/ciyv/001cb2/r3tmp/tmp3KEWrh/table" to "/Root/table"2025-05-29T15:29:37.107767Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-05-29T15:29:37.112751Z node 1 :HIVE WARN: hive_impl.cpp:491: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037888 not found 2025-05-29T15:29:37.112766Z node 1 :HIVE WARN: hive_impl.cpp:491: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037889 not found 2025-05-29T15:29:37.112769Z node 1 :HIVE WARN: hive_impl.cpp:491: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037890 not found Created "/Root/table"Read data from "/home/runner/.ya/build/build_root/ciyv/001cb2/r3tmp/tmp3KEWrh/table/data_00.csv"Restore index "byValue" on "/Root/table"2025-05-29T15:29:37.138477Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpInitiateBuildIndexImplTable, opId: 281474976715758:2, at schemeshard: 72057594046644480 2025-05-29T15:29:37.157574Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpInitiateBuildIndexImplTable, opId: 281474976715759:0, at schemeshard: 72057594046644480 2025-05-29T15:29:37.185913Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpInitiateBuildIndexImplTable, opId: 281474976715760:0, at schemeshard: 72057594046644480 2025-05-29T15:29:37.227034Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpFinalizeBuildIndexMainTable, opId: 281474976715762:0, at schemeshard: 72057594046644480 2025-05-29T15:29:37.243499Z node 1 :HIVE WARN: hive_impl.cpp:491: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037897 not found 2025-05-29T15:29:37.243512Z node 1 :HIVE WARN: hive_impl.cpp:491: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037898 not found Restore ACL "/home/runner/.ya/build/build_root/ciyv/001cb2/r3tmp/tmp3KEWrh/table" to "/Root/table"Read ACL from "/home/runner/.ya/build/build_root/ciyv/001cb2/r3tmp/tmp3KEWrh/table/permissions.pb"2025-05-29T15:29:37.442981Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710669:0, at schemeshard: 72057594046644480 Restore completed successfully 2025-05-29T15:29:38.179662Z node 4 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7509890239340112974:2075];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:29:38.180080Z node 4 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/001cb2/r3tmp/tmpToaLGW/pdisk_1.dat 2025-05-29T15:29:38.200606Z node 4 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 13644, node 4 2025-05-29T15:29:38.212171Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:29:38.212184Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-05-29T15:29:38.212186Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-05-29T15:29:38.212228Z node 4 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:11596 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId ... UND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:29:57.828077Z node 46 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:29:57.828857Z node 46 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2025-05-29T15:29:57.833908Z node 46 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [46:7509890322134057296:2339], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-05-29T15:29:57.904084Z node 46 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [46:7509890322134057369:2652] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-29T15:29:57.908393Z node 46 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [46:7509890322134057402:2665] txid# 281474976715660, issues: { message: "Column Key has wrong key type JsonDocument" severity: 1 } 2025-05-29T15:29:57.908466Z node 46 :KQP_SESSION WARN: kqp_session_actor.cpp:2583: SessionId: ydb://session/3?node_id=46&id=N2NjYzYzMjUtM2EyZGMzYzEtNjQyZDlhNWEtOWMxNDEyMzk=, ActorId: [46:7509890322134057263:2332], ActorState: ExecuteState, TraceId: 01jweam5238sjxq3k2n046xxrz, Create QueryResponse for error on request, msg: 2025-05-29T15:29:57.908621Z node 46 :KQP_EXECUTER ERROR: kqp_planner.cpp:119: TxId: 281474976715661. Ctx: { TraceId: 01jweam5238sjxq3k2n046xxrz, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=46&id=N2NjYzYzMjUtM2EyZGMzYzEtNjQyZDlhNWEtOWMxNDEyMzk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-05-29T15:29:57.913028Z node 46 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-05-29T15:29:57.980376Z node 46 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [46:7509890322134057571:2359], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:29:57.980473Z node 46 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=46&id=N2NjYzYzMjUtM2EyZGMzYzEtNjQyZDlhNWEtOWMxNDEyMzk=, ActorId: [46:7509890322134057263:2332], ActorState: ExecuteState, TraceId: 01jweam56n20g8vm302zw9pnpz, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: assertion failed at ydb/services/ydb/backup_ut/ydb_backup_ut.cpp:177, NQuery::TExecuteQueryResult (anonymous namespace)::ExecuteQuery(NQuery::TSession &, const TString &, bool): (result.IsSuccess()) query: UPSERT INTO `/Root/JsonDocumentTable` (Key, Value) VALUES (1, JsonDocument("{ \"foo\": \"bar\" }")); issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 TBackTrace::Capture()+28 (0x139A39CC) NUnitTest::NPrivate::RaiseError(char const*, TBasicString> const&, bool)+137 (0x13B57539) ??+0 (0x1384FC23) ??+0 (0x138603AA) NTestSuiteBackupRestore::TTestCaseTestAllPrimitiveTypes::Execute_(NUnitTest::TTestContext&)+1979 (0x1385EC2B) NTestSuiteBackupRestore::TCurrentTest::Execute()::'lambda'()::operator()() const+71 (0x13875F87) NUnitTest::TTestBase::Run(std::__y1::function, TBasicString> const&, char const*, bool)+126 (0x13B593EE) NTestSuiteBackupRestore::TCurrentTest::Execute()+436 (0x13875944) NUnitTest::TTestFactory::Execute()+803 (0x13B59B63) NUnitTest::RunMain(int, char**)+3021 (0x13B6B70D) ??+0 (0x7FB462A24D90) __libc_start_main+128 (0x7FB462A24E40) _start+41 (0x12914029) 2025-05-29T15:29:58.606401Z node 49 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[49:7509890325528724150:2074];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:29:58.606425Z node 49 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/001cb2/r3tmp/tmpj61BQi/pdisk_1.dat 2025-05-29T15:29:58.627548Z node 49 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 30765, node 49 2025-05-29T15:29:58.649777Z node 49 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:29:58.649790Z node 49 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-05-29T15:29:58.649791Z node 49 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-05-29T15:29:58.649834Z node 49 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:30890 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-29T15:29:58.707181Z node 49 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(49, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:29:58.707217Z node 49 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(49, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:29:58.709108Z node 49 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(49, (0,0,0,0)) VolatileState: Connecting -> Connected waiting... 2025-05-29T15:29:58.711396Z node 49 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-05-29T15:29:58.940627Z node 49 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [49:7509890325528725094:2338], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:29:58.940647Z node 49 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [49:7509890325528725086:2335], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:29:58.940659Z node 49 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:29:58.941297Z node 49 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2025-05-29T15:29:58.946636Z node 49 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976715658, at schemeshard: 72057594046644480 2025-05-29T15:29:58.946751Z node 49 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [49:7509890325528725100:2339], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-05-29T15:29:59.047415Z node 49 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [49:7509890329823692469:2649] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-29T15:29:59.053249Z node 49 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 2025-05-29T15:29:59.084028Z node 49 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [49:7509890329823692641:2355], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:29:59.084141Z node 49 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=49&id=YzU3N2ZlOGYtODM3ODg5MDAtZmY3YzgzNjEtZTU2YzUwZDc=, ActorId: [49:7509890325528725082:2332], ActorState: ExecuteState, TraceId: 01jweam69588ecv4yw0sefjn5n, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: assertion failed at ydb/services/ydb/backup_ut/ydb_backup_ut.cpp:177, NQuery::TExecuteQueryResult (anonymous namespace)::ExecuteQuery(NQuery::TSession &, const TString &, bool): (result.IsSuccess()) query: UPSERT INTO `/Root/DyNumberTable` (Key, Value) VALUES (DyNumber("1"), 1); issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 TBackTrace::Capture()+28 (0x139A39CC) NUnitTest::NPrivate::RaiseError(char const*, TBasicString> const&, bool)+137 (0x13B57539) ??+0 (0x1384FC23) ??+0 (0x138603AA) NTestSuiteBackupRestore::TTestCaseTestAllPrimitiveTypes::Execute_(NUnitTest::TTestContext&)+1979 (0x1385EC2B) NTestSuiteBackupRestore::TCurrentTest::Execute()::'lambda'()::operator()() const+71 (0x13875F87) NUnitTest::TTestBase::Run(std::__y1::function, TBasicString> const&, char const*, bool)+126 (0x13B593EE) NTestSuiteBackupRestore::TCurrentTest::Execute()+436 (0x13875944) NUnitTest::TTestFactory::Execute()+803 (0x13B59B63) NUnitTest::RunMain(int, char**)+3021 (0x13B6B70D) ??+0 (0x7FB462A24D90) __libc_start_main+128 (0x7FB462A24E40) _start+41 (0x12914029) >> KqpTypes::Time64Columns-EnableTableDatetime64 [GOOD] >> BackupPathTest::OnlyOneEmptyDirectory >> KqpQuery::QueryCacheTtl >> KqpExplain::MultiJoinCteLinks >> KqpExplain::ExplainDataQuery >> KqpParams::BadParameterType >> KqpQuery::CreateAsSelectTypes+NotNull+IsOlap [FAIL] >> KqpQuery::CreateAsSelectPath-UseTablePathPrefix >> KqpLimits::TooBigQuery-useSink >> BackupPathTest::OnlyOneEmptyDirectory [GOOD] >> KqpLimits::OutOfSpaceBulkUpsertFail >> KqpQuery::GenericQueryNoRowsLimitLotsOfRows >> KqpQuery::CreateAsSelectPath-UseTablePathPrefix [FAIL] |72.8%| [TA] $(B)/ydb/core/tx/schemeshard/ut_background_cleaning/test-results/unittest/{meta.json ... results_accumulator.log} >> BackupPathTest::ExportRecursiveWithoutDestinationPrefix |72.8%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/library/yql/providers/solomon/actors/ut/ydb-library-yql-providers-solomon-actors-ut |72.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/library/yql/providers/solomon/actors/ut/ydb-library-yql-providers-solomon-actors-ut >> KqpLimits::CancelAfterRoTxWithFollowerStreamLookupDepededRead >> KqpParams::ImplicitSameParameterTypesQueryCacheCheck >> BackupPathTest::ExportRecursiveWithoutDestinationPrefix [GOOD] >> ColumnStatistics::CountMinSketchServerlessStatistics [FAIL] >> KqpExplain::UpdateOnSecondary+UseSink >> BackupPathTest::ChecksumsForSchemaMappingFiles >> KqpQuery::UpdateThenDelete+UseSink >> KqpLimits::AffectedShardsLimit >> KqpQuery::CreateAsSelect_BadCases >> KqpQuery::TableSink_ReplaceDataShardDataQuery+UseSink >> KqpQuery::QueryCache >> KqpStats::OneShardNonLocalExec-UseSink >> KqpQuery::QueryResultsTruncated >> TSchemeShardSplitBySizeTest::Merge111Shards [GOOD] >> KqpParams::CheckQueryCacheForUnpreparedQuery >> KqpExplain::PureExpr >> KikimrIcGateway::TestLoadMdbBasicSecretValueFromExternalDataSourceMetadata >> KqpExplain::UpdateConditionalKey+UseSink >> KikimrIcGateway::TestLoadDataSourceProperties >> KqpParams::CheckQueryCacheForExecuteAndPreparedQueries >> BackupPathTest::ChecksumsForSchemaMappingFiles [GOOD] >> KqpExplain::ExplainDataQueryWithParams >> KqpQuery::ReadOverloaded-StreamLookup >> KqpLimits::WaitCAsStateOnAbort |72.8%| [TA] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_background_cleaning/test-results/unittest/{meta.json ... results_accumulator.log} |72.8%| [LD] {RESULT} $(B)/ydb/library/yql/providers/solomon/actors/ut/ydb-library-yql-providers-solomon-actors-ut ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_split_merge/unittest >> TSchemeShardSplitBySizeTest::Merge111Shards [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:116:2058] recipient: [1:111:2141] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:116:2058] recipient: [1:111:2141] Leader for TabletID 72057594046678944 is [1:128:2152] sender: [1:132:2058] recipient: [1:111:2141] 2025-05-29T15:29:41.702238Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7488: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-05-29T15:29:41.702265Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7516: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:29:41.702270Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7402: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-05-29T15:29:41.702275Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7418: OperationsProcessing config: using default configuration 2025-05-29T15:29:41.702289Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-05-29T15:29:41.702293Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-05-29T15:29:41.702302Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7548: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:29:41.702314Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:39: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-05-29T15:29:41.702428Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7619: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-05-29T15:29:41.702497Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-05-29T15:29:41.714364Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7309: Cannot subscribe to console configs 2025-05-29T15:29:41.714389Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:29:41.716844Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-05-29T15:29:41.716966Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-05-29T15:29:41.717007Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-05-29T15:29:41.718459Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-05-29T15:29:41.718611Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:445: Clear TempDirsState with owners number: 0 2025-05-29T15:29:41.718755Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1348: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-05-29T15:29:41.718812Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:32: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-05-29T15:29:41.719243Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:157: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:29:41.719295Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:84: [RootDataErasureManager] Stop 2025-05-29T15:29:41.719579Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:29:41.719587Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:29:41.719607Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-05-29T15:29:41.719614Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-29T15:29:41.719620Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:202: TTxServerlessStorageBilling.Complete 2025-05-29T15:29:41.719654Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6671: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-05-29T15:29:41.720879Z node 1 :HIVE INFO: tablet_helpers.cpp:1130: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:128:2152] sender: [1:242:2058] recipient: [1:15:2062] 2025-05-29T15:29:41.738484Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:372: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-05-29T15:29:41.738582Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:29:41.738657Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-05-29T15:29:41.738704Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:130: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-05-29T15:29:41.738715Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:29:41.739610Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:451: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-05-29T15:29:41.739639Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-05-29T15:29:41.739695Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:29:41.739707Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:313: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-05-29T15:29:41.739712Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:367: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-05-29T15:29:41.739717Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 2 -> 3 2025-05-29T15:29:41.740085Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:29:41.740096Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-05-29T15:29:41.740101Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 3 -> 128 2025-05-29T15:29:41.740417Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:29:41.740426Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:29:41.740433Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:29:41.740452Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1650: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-05-29T15:29:41.741042Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1719: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-05-29T15:29:41.741457Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:652: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-05-29T15:29:41.741499Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1751: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-05-29T15:29:41.741700Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:676: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-05-29T15:29:41.741722Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:680: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 124 RawX2: 4294969445 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-05-29T15:29:41.741732Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:29:41.741783Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 128 -> 240 2025-05-29T15:29:41.741789Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:29:41.741821Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-05-29T15:29:41.741831Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:402: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-05-29T15:29:41.742259Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:29:41.742268Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-29T15:29:41.742321Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29 ... 28095Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 281474976710730:0, at schemeshard: 72057594046678944 2025-05-29T15:30:06.528106Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_split_merge.cpp:431: TSplitMerge TNotifySrc, operationId: 281474976710730:0 ProgressState, at schemeshard: 72057594046678944 2025-05-29T15:30:06.528115Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_split_merge.cpp:462: Notify src datashard 72075186233409632 on partitioning changed splitOp# 281474976710730 at tablet 72057594046678944 2025-05-29T15:30:06.528123Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_split_merge.cpp:462: Notify src datashard 72075186233409697 on partitioning changed splitOp# 281474976710730 at tablet 72057594046678944 2025-05-29T15:30:06.528225Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:5834: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 78 PathOwnerId: 72057594046678944, cookie: 281474976710730 2025-05-29T15:30:06.528235Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 78 PathOwnerId: 72057594046678944, cookie: 281474976710730 2025-05-29T15:30:06.528241Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72057594046678944, txId: 281474976710730 2025-05-29T15:30:06.528246Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 281474976710730, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 78 2025-05-29T15:30:06.528250Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 50 2025-05-29T15:30:06.528270Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1606: TOperation IsReadyToNotify, TxId: 281474976710730, ready parts: 0/1, is published: true 2025-05-29T15:30:06.528854Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:652: Send tablet strongly msg operationId: 281474976710730:0 from tablet: 72057594046678944 to tablet: 72075186233409632 cookie: 72057594046678944:87 msg type: 269553158 2025-05-29T15:30:06.528874Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:652: Send tablet strongly msg operationId: 281474976710730:0 from tablet: 72057594046678944 to tablet: 72075186233409697 cookie: 72057594046678944:152 msg type: 269553158 2025-05-29T15:30:06.529544Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976710730 2025-05-29T15:30:06.530305Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:619: TTxOperationReply execute, operationId: 281474976710730:0, at schemeshard: 72057594046678944, message: OperationCookie: 281474976710730 TabletId: 72075186233409632 2025-05-29T15:30:06.530325Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_split_merge.cpp:386: TSplitMerge TNotifySrc, operationId: 281474976710730:0 HandleReply TEvSplitPartitioningChangedAck, from datashard: 72075186233409632, at schemeshard: 72057594046678944 2025-05-29T15:30:06.530434Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:619: TTxOperationReply execute, operationId: 281474976710730:0, at schemeshard: 72057594046678944, message: OperationCookie: 281474976710730 TabletId: 72075186233409697 2025-05-29T15:30:06.530440Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_split_merge.cpp:386: TSplitMerge TNotifySrc, operationId: 281474976710730:0 HandleReply TEvSplitPartitioningChangedAck, from datashard: 72075186233409697, at schemeshard: 72057594046678944 2025-05-29T15:30:06.530458Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:906: Part operation is done id#281474976710730:0 progress is 1/1 2025-05-29T15:30:06.530478Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 281474976710730 ready parts: 1/1 2025-05-29T15:30:06.530483Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:906: Part operation is done id#281474976710730:0 progress is 1/1 2025-05-29T15:30:06.530488Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 281474976710730 ready parts: 1/1 2025-05-29T15:30:06.530494Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1606: TOperation IsReadyToNotify, TxId: 281474976710730, ready parts: 1/1, is published: true 2025-05-29T15:30:06.530500Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 281474976710730 ready parts: 1/1 2025-05-29T15:30:06.530506Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:973: Operation and all the parts is done, operation id: 281474976710730:0 2025-05-29T15:30:06.530515Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5174: RemoveTx for txid 281474976710730:0 2025-05-29T15:30:06.530565Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 49 2025-05-29T15:30:06.531823Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:647: TTxOperationReply complete, operationId: 281474976710730:0, at schemeshard: 72057594046678944 2025-05-29T15:30:06.532002Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:647: TTxOperationReply complete, operationId: 281474976710730:0, at schemeshard: 72057594046678944 2025-05-29T15:30:06.532011Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:260: Unable to activate 281474976710730:0 2025-05-29T15:30:06.532275Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5549: Handle TEvStateChanged, at schemeshard: 72057594046678944, message: Source { RawX1: 1811 RawX2: 4294970557 } TabletId: 72075186233409632 State: 4 2025-05-29T15:30:06.532294Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__state_changed_reply.cpp:78: TTxShardStateChanged DoExecute, datashard informs about state changing, datashardId: 72075186233409632, state: Offline, at schemeshard: 72057594046678944 2025-05-29T15:30:06.532475Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5549: Handle TEvStateChanged, at schemeshard: 72057594046678944, message: Source { RawX1: 9784 RawX2: 4294977242 } TabletId: 72075186233409697 State: 4 2025-05-29T15:30:06.532485Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__state_changed_reply.cpp:78: TTxShardStateChanged DoExecute, datashard informs about state changing, datashardId: 72075186233409697, state: Offline, at schemeshard: 72057594046678944 2025-05-29T15:30:06.533153Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:46: Free shard 72057594046678944:87 hive 72057594037968897 at ss 72057594046678944 2025-05-29T15:30:06.533290Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:46: Free shard 72057594046678944:152 hive 72057594037968897 at ss 72057594046678944 2025-05-29T15:30:06.533350Z node 1 :HIVE INFO: tablet_helpers.cpp:1356: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 87 TxId_Deprecated: 87 TabletID: 72075186233409632 Forgetting tablet 72075186233409632 2025-05-29T15:30:06.533954Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5938: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 87 ShardOwnerId: 72057594046678944 ShardLocalIdx: 87, at schemeshard: 72057594046678944 2025-05-29T15:30:06.534049Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 48 2025-05-29T15:30:06.534120Z node 1 :HIVE INFO: tablet_helpers.cpp:1356: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 152 TxId_Deprecated: 152 TabletID: 72075186233409697 2025-05-29T15:30:06.534294Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5938: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 152 ShardOwnerId: 72057594046678944 ShardLocalIdx: 152, at schemeshard: 72057594046678944 2025-05-29T15:30:06.534346Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 47 Forgetting tablet 72075186233409697 2025-05-29T15:30:06.535809Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:174: Deleted shardIdx 72057594046678944:87 2025-05-29T15:30:06.535823Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:180: Close pipe to deleted shardIdx 72057594046678944:87 tabletId 72075186233409632 2025-05-29T15:30:06.535913Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:174: Deleted shardIdx 72057594046678944:152 2025-05-29T15:30:06.535921Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:180: Close pipe to deleted shardIdx 72057594046678944:152 tabletId 72075186233409697 2025-05-29T15:30:06.535963Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6518: Transaction 281474976710735 reset current state at schemeshard 72057594046678944 because pipe to tablet 72057594037968897 disconnected 2025-05-29T15:30:06.535973Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6532: Pipe attached message is not found, ignore event, opId:281474976710735:0, tableId: 72057594037968897, at schemeshardId: 72057594046678944 2025-05-29T15:30:06.535977Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6518: Transaction 281474976710734 reset current state at schemeshard 72057594046678944 because pipe to tablet 72057594037968897 disconnected 2025-05-29T15:30:06.535982Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6532: Pipe attached message is not found, ignore event, opId:281474976710734:0, tableId: 72057594037968897, at schemeshardId: 72057594046678944 2025-05-29T15:30:06.535986Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6518: Transaction 281474976710733 reset current state at schemeshard 72057594046678944 because pipe to tablet 72057594037968897 disconnected 2025-05-29T15:30:06.535991Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6532: Pipe attached message is not found, ignore event, opId:281474976710733:0, tableId: 72057594037968897, at schemeshardId: 72057594046678944 2025-05-29T15:30:06.535995Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6518: Transaction 281474976710732 reset current state at schemeshard 72057594046678944 because pipe to tablet 72057594037968897 disconnected 2025-05-29T15:30:06.536001Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6532: Pipe attached message is not found, ignore event, opId:281474976710732:0, tableId: 72057594037968897, at schemeshardId: 72057594046678944 2025-05-29T15:30:06.536005Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6518: Transaction 281474976710731 reset current state at schemeshard 72057594046678944 because pipe to tablet 72057594037968897 disconnected 2025-05-29T15:30:06.536010Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6532: Pipe attached message is not found, ignore event, opId:281474976710731:0, tableId: 72057594037968897, at schemeshardId: 72057594046678944 Deleted tabletId 72075186233409632 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/provider/ut/unittest >> KikimrIcGateway::TestSecretsExistingValidation Test command err: Trying to start YDB, gRPC: 61132, MsgBus: 61017 2025-05-29T15:29:41.397884Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509890252991140570:2062];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:29:41.397908Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/002498/r3tmp/tmpGI1xsI/pdisk_1.dat 2025-05-29T15:29:41.453542Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:29:41.453755Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [1:7509890252991140550:2079] 1748532581397763 != 1748532581397766 TServer::EnableGrpc on GrpcPort 61132, node 1 2025-05-29T15:29:41.463597Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:29:41.463611Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-05-29T15:29:41.463614Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-05-29T15:29:41.463657Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:61017 2025-05-29T15:29:41.500372Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:29:41.500396Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:29:41.501497Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:61017 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... 2025-05-29T15:29:41.532410Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 2025-05-29T15:29:41.535199Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-05-29T15:29:41.546578Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 2025-05-29T15:29:41.765390Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890252991141254:2332], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:29:41.765421Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:29:41.793296Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-05-29T15:29:41.853105Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-05-29T15:29:41.861563Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-05-29T15:29:41.874644Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-05-29T15:29:41.906931Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890252991141567:2362], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:29:41.906963Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:29:41.907156Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890252991141572:2365], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:29:41.908015Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710664:3, at schemeshard: 72057594046644480 2025-05-29T15:29:41.910402Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976710664, at schemeshard: 72057594046644480 2025-05-29T15:29:41.910490Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7509890252991141574:2366], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710664 completed, doublechecking } 2025-05-29T15:29:41.991772Z node 1 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [1:7509890252991141625:2553] txid# 281474976710665, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 11], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-29T15:29:42.022498Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [1:7509890252991141641:2370], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:29:42.023126Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=1&id=MTY1ZDIxOTEtODVmMGMzOTktYmI2OTE5MDAtM2RkZTEyMGQ=, ActorId: [1:7509890252991141251:2330], ActorState: ExecuteState, TraceId: 01jweaknge3jef6s144phpqc8g, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: assertion failed at ydb/core/kqp/ut/common/kqp_ut_common.h:375, void NKikimr::NKqp::AssertSuccessResult(const NYdb::TStatus &): (result.IsSuccess())
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 0. /-S/util/system/backtrace.cpp:284: ?? @ 0x13AC626B 1. /tmp//-S/library/cpp/testing/unittest/registar.cpp:46: RaiseError @ 0x13C7DC88 2. /-S/ydb/core/kqp/ut/common/kqp_ut_common.h:375: AssertSuccessResult @ 0x1398A682 3. /tmp//-S/ydb/core/kqp/provider/yql_kikimr_gateway_ut.cpp:55: CreateSampleTables @ 0x1395A198 4. /tmp//-S/ydb/core/kqp/provider/yql_kikimr_gateway_ut.cpp:288: Execute_ @ 0x1395FA48 5. /tmp//-S/ydb/core/kqp/provider/yql_kikimr_gateway_ut.cpp:279: operator() @ 0x13989936 6. /tmp//-S/library/cpp/testing/unittest/registar.cpp:373: Run @ 0x13C7FB3D 7. /tmp//-S/ydb/core/kqp/provider/yql_kikimr_gateway_ut.cpp:279: Execute @ 0x13989193 8. /tmp//-S/library/cpp/testing/unittest/registar.cpp:494: Execute @ 0x13C802B2 9. /tmp//-S/library/cpp/testing/unittest/utmain.cpp:872: RunMain @ 0x13C91E5C 10. ??:0: ?? @ 0x7F9BFFE9AD8F 11. ??:0: ?? @ 0x7F9BFFE9AE3F 12. ??:0: ?? @ 0x129C1028 Trying to start YDB, gRPC: 14932, MsgBus: 9574 test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/002498/r3tmp/tmp2CRNVG/pdisk_1.dat 2025-05-29T15:29:45.796633Z node 2 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-05-29T15:29:45.797255Z node 2 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:29:45.802875Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [2:7509890270912806225:2079] 1748532585743839 != 1748532585743842 TServer::EnableGrpc on GrpcPort 14932, node 2 2025-05-29T15:29:45.834951Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:29:45.834968Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-05-29T15:29:45.834970Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-05-29T15:29:45.835016Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-05-29T15:29:45.867172Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:29:45.867204Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:29:45.875153Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:9574 TClient is connected to server localhost:9574 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId ... x13AE4A35 1. /-S/util/system/yassert.cpp:55: Panic @ 0x13ADBA36 2. /tmp//-S/library/cpp/testing/unittest/registar.cpp:36: RaiseError @ 0x13C7DD76 3. /-S/ydb/core/kqp/ut/common/kqp_ut_common.h:375: AssertSuccessResult @ 0x1398A682 4. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:365: CreateSampleTables @ 0x261085D2 5. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:581: operator() @ 0x26129F1C 6. /-S/library/cpp/threading/future/core/future-inl.h:493: SetValue @ 0x26129F1C 7. /-S/library/cpp/threading/future/async.h:24: operator() @ 0x26129F1C 8. /-S/util/thread/pool.h:71: Process @ 0x26129F1C 9. /-S/util/thread/pool.cpp:418: DoExecute @ 0x13AEBF69 10. /-S/util/thread/factory.h:15: Execute @ 0x13AEA959 11. /-S/util/thread/factory.cpp:36: ThreadProc @ 0x13AEA959 12. /-S/util/system/thread.cpp:244: ThreadProxy @ 0x13AE5DCC 13. ??:0: ?? @ 0x7F9BFFF05AC2 14. ??:0: ?? @ 0x7F9BFFF9784F Trying to start YDB, gRPC: 7211, MsgBus: 28377 2025-05-29T15:30:02.832509Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509890345332516777:2061];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:30:02.832527Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/002498/r3tmp/tmpM7g1OJ/pdisk_1.dat 2025-05-29T15:30:02.900771Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [1:7509890345332516758:2079] 1748532602832381 != 1748532602832384 2025-05-29T15:30:02.901291Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 7211, node 1 2025-05-29T15:30:02.916293Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:30:02.916305Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-05-29T15:30:02.916307Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-05-29T15:30:02.916357Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:28377 TClient is connected to server localhost:28377 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-29T15:30:02.975505Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:30:02.975529Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:30:02.976363Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:30:02.977818Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-29T15:30:02.988878Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:30:03.053853Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:30:03.073329Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:30:03.085689Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:30:03.212208Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890349627485692:2403], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:30:03.212246Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:30:03.252082Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-05-29T15:30:03.260084Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-05-29T15:30:03.273992Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-05-29T15:30:03.287815Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-05-29T15:30:03.301973Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-05-29T15:30:03.316299Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-05-29T15:30:03.330026Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-05-29T15:30:03.345571Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890349627486345:2466], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:30:03.345596Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:30:03.345616Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890349627486350:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:30:03.346412Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715669:3, at schemeshard: 72057594046644480 2025-05-29T15:30:03.349497Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7509890349627486352:2470], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715669 completed, doublechecking } 2025-05-29T15:30:03.420589Z node 1 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [1:7509890349627486403:3398] txid# 281474976715670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-29T15:30:03.493468Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [1:7509890349627486419:2474], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:30:03.493578Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=1&id=NWQwMzJhMmItODliZGZkNjUtNmMyNmFjYWYtNGZjNzFkOWU=, ActorId: [1:7509890349627485674:2401], ActorState: ExecuteState, TraceId: 01jweamaehawx18j707p621975, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: VERIFY failed (2025-05-29T15:30:03.494266Z): assertion failed in non-unittest thread with message: assertion failed at ydb/core/kqp/ut/common/kqp_ut_common.h:375, void NKikimr::NKqp::AssertSuccessResult(const NYdb::TStatus &): (result.IsSuccess())
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 library/cpp/testing/unittest/registar.cpp:36 RaiseError(): requirement UnittestThread failed 0. /-S/util/system/yassert.cpp:83: InternalPanicImpl @ 0x13AE4A35 1. /-S/util/system/yassert.cpp:55: Panic @ 0x13ADBA36 2. /tmp//-S/library/cpp/testing/unittest/registar.cpp:36: RaiseError @ 0x13C7DD76 3. /-S/ydb/core/kqp/ut/common/kqp_ut_common.h:375: AssertSuccessResult @ 0x1398A682 4. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:365: CreateSampleTables @ 0x261085D2 5. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:581: operator() @ 0x26129F1C 6. /-S/library/cpp/threading/future/core/future-inl.h:493: SetValue @ 0x26129F1C 7. /-S/library/cpp/threading/future/async.h:24: operator() @ 0x26129F1C 8. /-S/util/thread/pool.h:71: Process @ 0x26129F1C 9. /-S/util/thread/pool.cpp:418: DoExecute @ 0x13AEBF69 10. /-S/util/thread/factory.h:15: Execute @ 0x13AEA959 11. /-S/util/thread/factory.cpp:36: ThreadProc @ 0x13AEA959 12. /-S/util/system/thread.cpp:244: ThreadProxy @ 0x13AE5DCC 13. ??:0: ?? @ 0x7F3F3C93FAC2 14. ??:0: ?? @ 0x7F3F3C9D184F ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/query/unittest >> KqpTypes::Time64Columns-EnableTableDatetime64 [GOOD] Test command err: Trying to start YDB, gRPC: 17073, MsgBus: 6070 2025-05-29T15:29:51.571727Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509890297888238812:2062];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:29:51.571744Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/0013ae/r3tmp/tmpOMfYjj/pdisk_1.dat 2025-05-29T15:29:51.614186Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [1:7509890297888238792:2079] 1748532591571580 != 1748532591571583 2025-05-29T15:29:51.615582Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 17073, node 1 2025-05-29T15:29:51.627959Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:29:51.627974Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-05-29T15:29:51.627977Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-05-29T15:29:51.628038Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:6070 2025-05-29T15:29:51.674063Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:29:51.674092Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:29:51.675087Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:6070 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-29T15:29:51.698429Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:29:51.710646Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:29:51.781514Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:29:51.803300Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:29:51.816149Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:29:51.984155Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890297888240427:2403], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:29:51.984180Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:29:52.025209Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-05-29T15:29:52.032485Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-05-29T15:29:52.045866Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-05-29T15:29:52.059517Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-05-29T15:29:52.073846Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-05-29T15:29:52.087684Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-05-29T15:29:52.101739Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-05-29T15:29:52.118851Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890302183208375:2466], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:29:52.118879Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:29:52.118886Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890302183208380:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:29:52.119885Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715669:3, at schemeshard: 72057594046644480 2025-05-29T15:29:52.129045Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7509890302183208382:2470], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715669 completed, doublechecking } 2025-05-29T15:29:52.207907Z node 1 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [1:7509890302183208433:3397] txid# 281474976715670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-29T15:29:52.308090Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [1:7509890302183208449:2474], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:29:52.308236Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=1&id=YWRlNDBlMi1mMzU3YTIzYS00NjRjZTM2Zi0yYTUwNTQwNA==, ActorId: [1:7509890297888240409:2401], ActorState: ExecuteState, TraceId: 01jweakzfp69938tdxreas6mmp, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: VERIFY failed (2025-05-29T15:29:52.308929Z): assertion failed in non-unittest thread with message: assertion failed at ydb/core/kqp/ut/common/kqp_ut_common.h:375, void NKikimr::NKqp::AssertSuccessResult(const NYdb::TStatus &): (result.IsSuccess())
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 library/cpp/testing/unittest/registar.cpp:36 RaiseError(): requirement UnittestThread failed 0. /-S/util/system/yassert.cpp:83: InternalPanicImpl @ 0x13DD8F65 1. /-S/util/system/yassert.cpp:55: Panic @ 0x13DCFF66 2. /tmp//-S/library/cpp/testing/unittest/registar.cpp:36: RaiseError @ 0x13F70BC6 3. /-S/ydb/core/kqp/ut/common/kqp_ut_common.h:375: AssertSuccessResult @ 0x13A3FAE2 4. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:365: CreateSampleTables @ 0x264F71F2 5. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:581: operator() @ 0x2651596C 6. /-S/library/cpp/threading/future/core/future-inl.h:493: SetValue @ 0x2651596C 7. /-S/library/cpp/threading/future/async.h:24: operator() @ 0x2651596C 8. /-S/util/thread/pool.h:71: Process @ 0x2651596C 9. /-S/util/thread/pool.cpp:418: DoExecute @ 0x13DE06B9 10. /-S/util/thread/factory.h:15: Execute @ 0x13DDF0A9 11. /-S/util/thread/factory.cpp:36: ThreadProc @ 0x13DDF0A9 12. /-S/util/system/thread.cpp:244: ThreadProxy @ 0x13DDA51C 13. ??:0: ?? @ 0x7FF719F57AC2 14. ??:0: ?? @ 0x7FF719FE984F Trying to start YDB, gRPC: 26834, MsgBus: 20902 2025-05-29T15:29:56.199028Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509890316384643144:2065];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:29:56.199052Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/0013ae/r3tmp/tmph8pNLv/pdisk_1.dat 2025-05-29T15:29:56.259770Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [1:7509890316384643120:2079] 1748532596198822 != 1748532596198825 2025-05-29T15:29:56.261715Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 26834, node 1 2025-05-29T15 ... 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:30:00.462264Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [1:7509890335899174420:2079] 1748532600404884 != 1748532600404887 2025-05-29T15:30:00.466102Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:30:00.466114Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-05-29T15:30:00.466116Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-05-29T15:30:00.466156Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:4608 TClient is connected to server localhost:4608 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: 2025-05-29T15:30:00.507672Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:30:00.507705Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:30:00.508789Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-29T15:30:00.535986Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:30:00.797508Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890335899175082:2328], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:30:00.797537Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:30:00.843397Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-05-29T15:30:00.883577Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890335899175182:2337], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:30:00.883619Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:30:00.883725Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890335899175187:2340], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:30:00.884759Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480 2025-05-29T15:30:00.897095Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976715659, at schemeshard: 72057594046644480 2025-05-29T15:30:00.897192Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7509890335899175189:2341], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2025-05-29T15:30:00.988000Z node 1 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [1:7509890335899175240:2383] txid# 281474976715660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-29T15:30:01.019943Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [1:7509890335899175249:2345], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:30:01.020268Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=1&id=YTFiNDUwMGEtNTk3MzJhOS0xMWY4Mjk5MC1lODliMzU4NA==, ActorId: [1:7509890335899175056:2326], ActorState: ExecuteState, TraceId: 01jweam81k1a57acdkraznac2j, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: assertion failed at ydb/core/kqp/ut/query/kqp_types_ut.cpp:244, virtual void NKikimr::NKqp::NTestSuiteKqpTypes::TTestCaseTime64Columns::Execute_(NUnitTest::TTestContext &) [EnableTableDatetime64 = true]: (result.GetStatus() == EStatus::SUCCESS) failed: (INTERNAL_ERROR != SUCCESS)
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 , with diff: (INT|SUCC)E(RNAL_ERROR|SS) 0. /-S/util/system/backtrace.cpp:284: ?? @ 0x13DBA79B 1. /tmp//-S/library/cpp/testing/unittest/registar.cpp:46: RaiseError @ 0x13F70AD8 2. /tmp//-S/ydb/core/kqp/ut/query/kqp_types_ut.cpp:244: Execute_ @ 0x13CB91B7 3. /tmp//-S/ydb/core/kqp/ut/query/kqp_types_ut.cpp:10: operator() @ 0x13CB7AA6 4. /tmp//-S/library/cpp/testing/unittest/registar.cpp:373: Run @ 0x13F7298D 5. /tmp//-S/ydb/core/kqp/ut/query/kqp_types_ut.cpp:10: Execute @ 0x13CB7465 6. /tmp//-S/library/cpp/testing/unittest/registar.cpp:494: Execute @ 0x13F73102 7. /tmp//-S/library/cpp/testing/unittest/utmain.cpp:872: RunMain @ 0x13F84CAC 8. ??:0: ?? @ 0x7EFF576C6D8F 9. ??:0: ?? @ 0x7EFF576C6E3F 10. ??:0: ?? @ 0x12A6F028 Trying to start YDB, gRPC: 9220, MsgBus: 30808 2025-05-29T15:30:02.953229Z node 2 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7509890344469610058:2061];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:30:02.953249Z node 2 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/0013ae/r3tmp/tmpf4r5BD/pdisk_1.dat 2025-05-29T15:30:02.967285Z node 2 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:30:02.967461Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [2:7509890344469610039:2079] 1748532602953128 != 1748532602953131 TServer::EnableGrpc on GrpcPort 9220, node 2 2025-05-29T15:30:02.979642Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:30:02.979668Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-05-29T15:30:02.979669Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-05-29T15:30:02.979717Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:30808 TClient is connected to server localhost:30808 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-29T15:30:03.058007Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:30:03.058035Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:30:03.058312Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:30:03.058991Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-29T15:30:03.338672Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7509890348764577993:2329], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:30:03.338713Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:30:03.340289Z node 2 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [2:7509890348764578012:2293] txid# 281474976715658, issues: { message: "Type \'Datetime64\' specified for column \'DatetimePK\', but support for new date/time 64 types is disabled (EnableTableDatetime64 feature flag is off)" severity: 1 } ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/query/unittest >> KqpExplain::MultiJoinCteLinks Test command err: Trying to start YDB, gRPC: 22818, MsgBus: 16743 2025-05-29T15:29:44.744368Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509890265078815010:2201];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:29:44.744428Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/0013d6/r3tmp/tmpQ27eo7/pdisk_1.dat 2025-05-29T15:29:44.806531Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:29:44.807853Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [1:7509890265078814848:2079] 1748532584742451 != 1748532584742454 TServer::EnableGrpc on GrpcPort 22818, node 1 2025-05-29T15:29:44.821150Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:29:44.821167Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-05-29T15:29:44.821170Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-05-29T15:29:44.821220Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:16743 2025-05-29T15:29:44.846060Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:29:44.846095Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:29:44.849579Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:16743 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... 2025-05-29T15:29:44.903046Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:29:44.919974Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-05-29T15:29:44.941029Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-05-29T15:29:44.964291Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:29:44.984762Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-05-29T15:29:45.266992Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890269373783783:2403], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:29:45.267064Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:29:45.274924Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-05-29T15:29:45.287215Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-05-29T15:29:45.299297Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-05-29T15:29:45.313102Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-05-29T15:29:45.371100Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-05-29T15:29:45.387491Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-05-29T15:29:45.448930Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-05-29T15:29:45.466546Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890269373784442:2466], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:29:45.466573Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:29:45.466656Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890269373784447:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:29:45.467378Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710669:3, at schemeshard: 72057594046644480 2025-05-29T15:29:45.473821Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7509890269373784449:2470], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710669 completed, doublechecking } 2025-05-29T15:29:45.557008Z node 1 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [1:7509890269373784500:3397] txid# 281474976710670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-29T15:29:45.746803Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [1:7509890269373784509:2474], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:29:45.748497Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=1&id=NmY5OGI4Yy04YTJjYzA2MS1lMjc0MDRhLWViNzVkNjRk, ActorId: [1:7509890269373783765:2401], ActorState: ExecuteState, TraceId: 01jweakrzt64cavkmf633c7ftq, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: VERIFY failed (2025-05-29T15:29:45.759795Z): assertion failed in non-unittest thread with message: assertion failed at ydb/core/kqp/ut/common/kqp_ut_common.h:375, void NKikimr::NKqp::AssertSuccessResult(const NYdb::TStatus &): (result.IsSuccess())
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 library/cpp/testing/unittest/registar.cpp:36 RaiseError(): requirement UnittestThread failed 0. /-S/util/system/yassert.cpp:83: InternalPanicImpl @ 0x13DD8F65 1. /-S/util/system/yassert.cpp:55: Panic @ 0x13DCFF66 2. /tmp//-S/library/cpp/testing/unittest/registar.cpp:36: RaiseError @ 0x13F70BC6 3. /-S/ydb/core/kqp/ut/common/kqp_ut_common.h:375: AssertSuccessResult @ 0x13A3FAE2 4. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:365: CreateSampleTables @ 0x264F71F2 5. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:581: operator() @ 0x2651596C 6. /-S/library/cpp/threading/future/core/future-inl.h:493: SetValue @ 0x2651596C 7. /-S/library/cpp/threading/future/async.h:24: operator() @ 0x2651596C 8. /-S/util/thread/pool.h:71: Process @ 0x2651596C 9. /-S/util/thread/pool.cpp:418: DoExecute @ 0x13DE06B9 10. /-S/util/thread/factory.h:15: Execute @ 0x13DDF0A9 11. /-S/util/thread/factory.cpp:36: ThreadProc @ 0x13DDF0A9 12. /-S/util/system/thread.cpp:244: ThreadProxy @ 0x13DDA51C 13. ??:0: ?? @ 0x7F6A6700AAC2 14. ??:0: ?? @ 0x7F6A6709C84F Trying to start YDB, gRPC: 64691, MsgBus: 28675 2025-05-29T15:29:50.697050Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509890290975843040:2062];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:29:50.697279Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/0013d6/r3tmp/tmpd9bu74/pdisk_1.dat 2025-05-29T15:29:50.760219Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:29:50.760312Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [1:7509890290975843020:2079] 1748532590696874 != 1748532590696877 TServer::EnableGrpc on GrpcPort 64691, node 1 2025-05-29T15: ... 0x13DD8F65 1. /-S/util/system/yassert.cpp:55: Panic @ 0x13DCFF66 2. /tmp//-S/library/cpp/testing/unittest/registar.cpp:36: RaiseError @ 0x13F70BC6 3. /-S/ydb/core/kqp/ut/common/kqp_ut_common.h:375: AssertSuccessResult @ 0x13A3FAE2 4. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:365: CreateSampleTables @ 0x264F71F2 5. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:581: operator() @ 0x2651596C 6. /-S/library/cpp/threading/future/core/future-inl.h:493: SetValue @ 0x2651596C 7. /-S/library/cpp/threading/future/async.h:24: operator() @ 0x2651596C 8. /-S/util/thread/pool.h:71: Process @ 0x2651596C 9. /-S/util/thread/pool.cpp:418: DoExecute @ 0x13DE06B9 10. /-S/util/thread/factory.h:15: Execute @ 0x13DDF0A9 11. /-S/util/thread/factory.cpp:36: ThreadProc @ 0x13DDF0A9 12. /-S/util/system/thread.cpp:244: ThreadProxy @ 0x13DDA51C 13. ??:0: ?? @ 0x7FA37601CAC2 14. ??:0: ?? @ 0x7FA3760AE84F Trying to start YDB, gRPC: 64970, MsgBus: 6732 2025-05-29T15:30:03.946324Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509890346777592033:2065];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:30:03.946364Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/0013d6/r3tmp/tmpzvaDzN/pdisk_1.dat 2025-05-29T15:30:04.012883Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [1:7509890346777592003:2079] 1748532603946122 != 1748532603946125 2025-05-29T15:30:04.014962Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 64970, node 1 2025-05-29T15:30:04.023088Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:30:04.023098Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-05-29T15:30:04.023100Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-05-29T15:30:04.023140Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:6732 2025-05-29T15:30:04.047841Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:30:04.047878Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:30:04.048977Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:6732 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-29T15:30:04.090837Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:30:04.104039Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:30:04.168397Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:30:04.187889Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:30:04.198795Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:30:04.314274Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890351072560932:2403], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:30:04.314309Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:30:04.363926Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-05-29T15:30:04.370955Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-05-29T15:30:04.379349Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-05-29T15:30:04.393408Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-05-29T15:30:04.407487Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-05-29T15:30:04.421330Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-05-29T15:30:04.428501Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-05-29T15:30:04.444957Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890351072561585:2466], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:30:04.444980Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:30:04.444986Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890351072561590:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:30:04.445766Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715669:3, at schemeshard: 72057594046644480 2025-05-29T15:30:04.448462Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7509890351072561592:2470], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715669 completed, doublechecking } 2025-05-29T15:30:04.507939Z node 1 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [1:7509890351072561643:3395] txid# 281474976715670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-29T15:30:04.591042Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [1:7509890351072561659:2474], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:30:04.591155Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=1&id=Y2E0NDE1MmUtZjc5ZTlkN2YtMjBlM2RmOTYtZmM5NzlhMQ==, ActorId: [1:7509890351072560914:2401], ActorState: ExecuteState, TraceId: 01jweambgw6xb6exawx2jmycpt, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: VERIFY failed (2025-05-29T15:30:04.591824Z): assertion failed in non-unittest thread with message: assertion failed at ydb/core/kqp/ut/common/kqp_ut_common.h:375, void NKikimr::NKqp::AssertSuccessResult(const NYdb::TStatus &): (result.IsSuccess())
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 library/cpp/testing/unittest/registar.cpp:36 RaiseError(): requirement UnittestThread failed 0. /-S/util/system/yassert.cpp:83: InternalPanicImpl @ 0x13DD8F65 1. /-S/util/system/yassert.cpp:55: Panic @ 0x13DCFF66 2. /tmp//-S/library/cpp/testing/unittest/registar.cpp:36: RaiseError @ 0x13F70BC6 3. /-S/ydb/core/kqp/ut/common/kqp_ut_common.h:375: AssertSuccessResult @ 0x13A3FAE2 4. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:365: CreateSampleTables @ 0x264F71F2 5. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:581: operator() @ 0x2651596C 6. /-S/library/cpp/threading/future/core/future-inl.h:493: SetValue @ 0x2651596C 7. /-S/library/cpp/threading/future/async.h:24: operator() @ 0x2651596C 8. /-S/util/thread/pool.h:71: Process @ 0x2651596C 9. /-S/util/thread/pool.cpp:418: DoExecute @ 0x13DE06B9 10. /-S/util/thread/factory.h:15: Execute @ 0x13DDF0A9 11. /-S/util/thread/factory.cpp:36: ThreadProc @ 0x13DDF0A9 12. /-S/util/system/thread.cpp:244: ThreadProxy @ 0x13DDA51C 13. ??:0: ?? @ 0x7F780BE83AC2 14. ??:0: ?? @ 0x7F780BF1584F ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/ydb/backup_ut/unittest >> BackupRestoreS3::TestAllPrimitiveTypes-DYNUMBER [FAIL] Test command err: 2025-05-29T15:29:38.722196Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509890242710744477:2211];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:29:38.723113Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/001c92/r3tmp/tmpJ2vJqY/pdisk_1.dat 2025-05-29T15:29:38.826824Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 27164, node 1 2025-05-29T15:29:38.839449Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:29:38.839466Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-05-29T15:29:38.839468Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-05-29T15:29:38.839521Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:8312 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-29T15:29:38.875587Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:29:38.875619Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:29:38.876923Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-05-29T15:29:38.877423Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected waiting... 2025-05-29T15:29:39.132192Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890247005712599:2340], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:29:39.132200Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890247005712588:2337], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:29:39.132214Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:29:39.132281Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:315: actor# [1:7509890242710744549:2135] Handle TEvProposeTransaction 2025-05-29T15:29:39.132291Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:238: actor# [1:7509890242710744549:2135] TxId# 281474976715658 ProcessProposeTransaction 2025-05-29T15:29:39.132308Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:257: actor# [1:7509890242710744549:2135] Cookie# 0 userReqId# "" txid# 281474976715658 SEND to# [1:7509890247005712603:2609] 2025-05-29T15:29:39.142211Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1562: Actor# [1:7509890247005712603:2609] txid# 281474976715658 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/Root/.metadata/workload_manager/pools" OperationType: ESchemeOpCreateResourcePool ModifyACL { Name: "default" DiffACL: "\n!\010\000\022\035\010\001\020\201\004\032\024all-users@well-known \003\n\031\010\000\022\025\010\001\020\201\004\032\014root@builtin \003" NewOwner: "metadata@system" } Internal: true CreateResourcePool { Name: "default" Properties { Properties { key: "concurrent_query_limit" value: "-1" } Properties { key: "database_load_cpu_threshold" value: "-1" } Properties { key: "query_cancel_after_seconds" value: "0" } Properties { key: "query_cpu_limit_percent_per_node" value: "-1" } Properties { key: "query_memory_limit_percent_per_node" value: "-1" } Properties { key: "queue_size" value: "-1" } Properties { key: "resource_weight" value: "-1" } Properties { key: "total_cpu_limit_percent_per_node" value: "-1" } } } } } UserToken: "\n\017metadata@system\022\000" DatabaseName: "/Root" 2025-05-29T15:29:39.142260Z node 1 :TX_PROXY DEBUG: schemereq.cpp:569: Actor# [1:7509890247005712603:2609] txid# 281474976715658 Bootstrap, UserSID: metadata@system CheckAdministrator: 0 CheckDatabaseAdministrator: 0 2025-05-29T15:29:39.142264Z node 1 :TX_PROXY DEBUG: schemereq.cpp:578: Actor# [1:7509890247005712603:2609] txid# 281474976715658 Bootstrap, UserSID: metadata@system IsClusterAdministrator: 1 2025-05-29T15:29:39.142612Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1627: Actor# [1:7509890247005712603:2609] txid# 281474976715658 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P6 2025-05-29T15:29:39.142639Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1617: Actor# [1:7509890247005712603:2609] txid# 281474976715658 TEvNavigateKeySet requested from SchemeCache 2025-05-29T15:29:39.142704Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1450: Actor# [1:7509890247005712603:2609] txid# 281474976715658 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-05-29T15:29:39.142774Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1497: Actor# [1:7509890247005712603:2609] HANDLE EvNavigateKeySetResult, txid# 281474976715658 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2025-05-29T15:29:39.142794Z node 1 :TX_PROXY DEBUG: schemereq.cpp:103: Actor# [1:7509890247005712603:2609] txid# 281474976715658 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976715658 TabletId# 72057594046644480} 2025-05-29T15:29:39.142867Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1352: Actor# [1:7509890247005712603:2609] txid# 281474976715658 HANDLE EvClientConnected 2025-05-29T15:29:39.143148Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2025-05-29T15:29:39.144020Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1374: Actor# [1:7509890247005712603:2609] txid# 281474976715658 Status StatusAccepted HANDLE {TEvModifySchemeTransactionResult Status# StatusAccepted txid# 281474976715658} 2025-05-29T15:29:39.144040Z node 1 :TX_PROXY DEBUG: schemereq.cpp:549: Actor# [1:7509890247005712603:2609] txid# 281474976715658 SEND to# [1:7509890247005712602:2341] Source {TEvProposeTransactionStatus txid# 281474976715658 Status# 53} 2025-05-29T15:29:39.147894Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7509890247005712602:2341], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-05-29T15:29:39.221905Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:315: actor# [1:7509890242710744549:2135] Handle TEvProposeTransaction 2025-05-29T15:29:39.221926Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:238: actor# [1:7509890242710744549:2135] TxId# 281474976715659 ProcessProposeTransaction 2025-05-29T15:29:39.221952Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:257: actor# [1:7509890242710744549:2135] Cookie# 0 userReqId# "" txid# 281474976715659 SEND to# [1:7509890247005712673:2659] 2025-05-29T15:29:39.223153Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1562: Actor# [1:7509890247005712673:2659] txid# 281474976715659 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/Root/.metadata/workload_manager/pools" OperationType: ESchemeOpCreateResourcePool ModifyACL { Name: "default" DiffACL: "\n!\010\000\022\035\010\001\020\201\004\032\024all-users@well-known \003\n\031\010\000\022\025\010\001\020\201\004\032\014root@builtin \003" NewOwner: "metadata@system" } Internal: true CreateResourcePool { Name: "default" Properties { Properties { key: "concurrent_query_limit" value: "-1" } Properties { key: "database_load_cpu_threshold" value: "-1" } Properties { key: "query_cancel_after_seconds" value: "0" } Properties { key: "query_cpu_limit_percent_per_node" value: "-1" } Properties { key: "query_memory_limit_percent_per_node" value: "-1" } Properties { key: "queue_size" value: "-1" } Properties { key: "resource_weight" value: "-1" } Properties { key: "total_cpu_limit_percent_per_node" value: "-1" } } } } } UserToken: "\n\017metadata@system\022\000" DatabaseName: "/Root" 2025-05-29T15:29:39.223182Z node 1 :TX_PROXY DEBUG: schemereq.cpp:569: Actor# [1:7509890247005712673:2659] txid# 281474976715659 Bootstrap, UserSID: metadata@system CheckAdministrator: 0 CheckDatabaseAdministrator: 0 2025-05-29T15:29:39.223186Z node 1 :TX_PROXY DEBUG: schemereq.cpp:578: Actor# [1:7509890247005712673:2659] txid# 281474976715659 Bootstrap, UserSID: metadata@system IsClusterAdministrator: 1 2025-05-29T15:29:39.223375Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1627: Actor# [1:7509890247005712673:2659] txid# 281474976715659 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P6 2025-05-29T15:29:39.223402Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1617: Actor# [1:7509890247005712673:2659] txid# 281474976715659 TEvNavigateKeySet requested from SchemeCache 2025-05-29T15:29:39.223465Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1450: Actor# [1:7509890247005712673:2659] txid# 281474976715659 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-05-29T15:29:39.223509Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1497: Actor# [1:7509890247005712673:2659] HANDLE EvNavigateKeySetResult, txid# 281474976715659 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2025-05-29T15:29:39.223530Z node 1 :TX_PROXY DEBUG: schemereq.cpp:103: Actor# [1:7509890247005712673:2659] txid# 281474976715659 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976715659 TabletId# 72057594046644480} 2025-05-29T15:29:39.223591Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1352: Actor# [1:7509890247005712673:2659] txid# 281474976715659 HANDLE EvClientConnected 2025-05-29T15:29:39.224710Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1374: Actor# [1:7509890247005712673:2659] txid# 281474976715 ... r# [46:7509890339369540131:2598] txid# 281474976715658 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-05-29T15:30:01.061943Z node 46 :TX_PROXY DEBUG: schemereq.cpp:1497: Actor# [46:7509890339369540131:2598] HANDLE EvNavigateKeySetResult, txid# 281474976715658 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2025-05-29T15:30:01.061959Z node 46 :TX_PROXY DEBUG: schemereq.cpp:103: Actor# [46:7509890339369540131:2598] txid# 281474976715658 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976715658 TabletId# 72057594046644480} 2025-05-29T15:30:01.061996Z node 46 :TX_PROXY DEBUG: schemereq.cpp:1352: Actor# [46:7509890339369540131:2598] txid# 281474976715658 HANDLE EvClientConnected 2025-05-29T15:30:01.062293Z node 46 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2025-05-29T15:30:01.063124Z node 46 :TX_PROXY DEBUG: schemereq.cpp:1374: Actor# [46:7509890339369540131:2598] txid# 281474976715658 Status StatusAccepted HANDLE {TEvModifySchemeTransactionResult Status# StatusAccepted txid# 281474976715658} 2025-05-29T15:30:01.063141Z node 46 :TX_PROXY DEBUG: schemereq.cpp:549: Actor# [46:7509890339369540131:2598] txid# 281474976715658 SEND to# [46:7509890339369540130:2341] Source {TEvProposeTransactionStatus txid# 281474976715658 Status# 53} 2025-05-29T15:30:01.066165Z node 46 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [46:7509890339369540130:2341], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-05-29T15:30:01.153007Z node 46 :TX_PROXY DEBUG: proxy_impl.cpp:315: actor# [46:7509890335074572059:2113] Handle TEvProposeTransaction 2025-05-29T15:30:01.153025Z node 46 :TX_PROXY DEBUG: proxy_impl.cpp:238: actor# [46:7509890335074572059:2113] TxId# 281474976715659 ProcessProposeTransaction 2025-05-29T15:30:01.153049Z node 46 :TX_PROXY DEBUG: proxy_impl.cpp:257: actor# [46:7509890335074572059:2113] Cookie# 0 userReqId# "" txid# 281474976715659 SEND to# [46:7509890339369540201:2648] 2025-05-29T15:30:01.154006Z node 46 :TX_PROXY DEBUG: schemereq.cpp:1562: Actor# [46:7509890339369540201:2648] txid# 281474976715659 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/Root/.metadata/workload_manager/pools" OperationType: ESchemeOpCreateResourcePool ModifyACL { Name: "default" DiffACL: "\n!\010\000\022\035\010\001\020\201\004\032\024all-users@well-known \003\n\031\010\000\022\025\010\001\020\201\004\032\014root@builtin \003" NewOwner: "metadata@system" } Internal: true CreateResourcePool { Name: "default" Properties { Properties { key: "concurrent_query_limit" value: "-1" } Properties { key: "database_load_cpu_threshold" value: "-1" } Properties { key: "query_cancel_after_seconds" value: "0" } Properties { key: "query_cpu_limit_percent_per_node" value: "-1" } Properties { key: "query_memory_limit_percent_per_node" value: "-1" } Properties { key: "queue_size" value: "-1" } Properties { key: "resource_weight" value: "-1" } Properties { key: "total_cpu_limit_percent_per_node" value: "-1" } } } } } UserToken: "\n\017metadata@system\022\000" DatabaseName: "/Root" 2025-05-29T15:30:01.154022Z node 46 :TX_PROXY DEBUG: schemereq.cpp:569: Actor# [46:7509890339369540201:2648] txid# 281474976715659 Bootstrap, UserSID: metadata@system CheckAdministrator: 0 CheckDatabaseAdministrator: 0 2025-05-29T15:30:01.154026Z node 46 :TX_PROXY DEBUG: schemereq.cpp:578: Actor# [46:7509890339369540201:2648] txid# 281474976715659 Bootstrap, UserSID: metadata@system IsClusterAdministrator: 1 2025-05-29T15:30:01.154258Z node 46 :TX_PROXY DEBUG: schemereq.cpp:1627: Actor# [46:7509890339369540201:2648] txid# 281474976715659 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P6 2025-05-29T15:30:01.154272Z node 46 :TX_PROXY DEBUG: schemereq.cpp:1617: Actor# [46:7509890339369540201:2648] txid# 281474976715659 TEvNavigateKeySet requested from SchemeCache 2025-05-29T15:30:01.154300Z node 46 :TX_PROXY DEBUG: schemereq.cpp:1450: Actor# [46:7509890339369540201:2648] txid# 281474976715659 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-05-29T15:30:01.154337Z node 46 :TX_PROXY DEBUG: schemereq.cpp:1497: Actor# [46:7509890339369540201:2648] HANDLE EvNavigateKeySetResult, txid# 281474976715659 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2025-05-29T15:30:01.154351Z node 46 :TX_PROXY DEBUG: schemereq.cpp:103: Actor# [46:7509890339369540201:2648] txid# 281474976715659 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976715659 TabletId# 72057594046644480} 2025-05-29T15:30:01.154396Z node 46 :TX_PROXY DEBUG: schemereq.cpp:1352: Actor# [46:7509890339369540201:2648] txid# 281474976715659 HANDLE EvClientConnected 2025-05-29T15:30:01.157418Z node 46 :TX_PROXY DEBUG: schemereq.cpp:1374: Actor# [46:7509890339369540201:2648] txid# 281474976715659 Status StatusAlreadyExists HANDLE {TEvModifySchemeTransactionResult Status# StatusAlreadyExists txid# 281474976715659 Reason# Check failed: path: '/Root/.metadata/workload_manager/pools/default', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92} 2025-05-29T15:30:01.157499Z node 46 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [46:7509890339369540201:2648] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-29T15:30:01.157506Z node 46 :TX_PROXY DEBUG: schemereq.cpp:549: Actor# [46:7509890339369540201:2648] txid# 281474976715659 SEND to# [46:7509890339369540130:2341] Source {TEvProposeTransactionStatus txid# 281474976715659 Status# 48} 2025-05-29T15:30:01.163101Z node 46 :TX_PROXY DEBUG: proxy_impl.cpp:315: actor# [46:7509890335074572059:2113] Handle TEvProposeTransaction 2025-05-29T15:30:01.163119Z node 46 :TX_PROXY DEBUG: proxy_impl.cpp:238: actor# [46:7509890335074572059:2113] TxId# 281474976715660 ProcessProposeTransaction 2025-05-29T15:30:01.163141Z node 46 :TX_PROXY DEBUG: proxy_impl.cpp:257: actor# [46:7509890335074572059:2113] Cookie# 0 userReqId# "" txid# 281474976715660 SEND to# [46:7509890339369540234:2661] 2025-05-29T15:30:01.164180Z node 46 :TX_PROXY DEBUG: schemereq.cpp:1562: Actor# [46:7509890339369540234:2661] txid# 281474976715660 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/Root" OperationType: ESchemeOpCreateTable CreateTable { Name: "DyNumberTable" Columns { Name: "Key" Type: "DyNumber" NotNull: false } Columns { Name: "Value" Type: "Int32" NotNull: false } KeyColumnNames: "Key" PartitionConfig { } Temporary: false } FailedOnAlreadyExists: true } } UserToken: "" DatabaseName: "" RequestType: "" PeerName: "ipv6:[::1]:60520" 2025-05-29T15:30:01.164194Z node 46 :TX_PROXY DEBUG: schemereq.cpp:569: Actor# [46:7509890339369540234:2661] txid# 281474976715660 Bootstrap, UserSID: CheckAdministrator: 0 CheckDatabaseAdministrator: 0 2025-05-29T15:30:01.164371Z node 46 :TX_PROXY DEBUG: schemereq.cpp:1627: Actor# [46:7509890339369540234:2661] txid# 281474976715660 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P6 2025-05-29T15:30:01.164396Z node 46 :TX_PROXY DEBUG: schemereq.cpp:1617: Actor# [46:7509890339369540234:2661] txid# 281474976715660 TEvNavigateKeySet requested from SchemeCache 2025-05-29T15:30:01.164450Z node 46 :TX_PROXY DEBUG: schemereq.cpp:1450: Actor# [46:7509890339369540234:2661] txid# 281474976715660 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-05-29T15:30:01.164492Z node 46 :TX_PROXY DEBUG: schemereq.cpp:1497: Actor# [46:7509890339369540234:2661] HANDLE EvNavigateKeySetResult, txid# 281474976715660 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2025-05-29T15:30:01.164502Z node 46 :TX_PROXY DEBUG: schemereq.cpp:103: Actor# [46:7509890339369540234:2661] txid# 281474976715660 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976715660 TabletId# 72057594046644480} 2025-05-29T15:30:01.164571Z node 46 :TX_PROXY DEBUG: schemereq.cpp:1352: Actor# [46:7509890339369540234:2661] txid# 281474976715660 HANDLE EvClientConnected 2025-05-29T15:30:01.165081Z node 46 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 2025-05-29T15:30:01.171673Z node 46 :TX_PROXY DEBUG: schemereq.cpp:1374: Actor# [46:7509890339369540234:2661] txid# 281474976715660 Status StatusAccepted HANDLE {TEvModifySchemeTransactionResult Status# StatusAccepted txid# 281474976715660} 2025-05-29T15:30:01.171703Z node 46 :TX_PROXY DEBUG: schemereq.cpp:549: Actor# [46:7509890339369540234:2661] txid# 281474976715660 SEND to# [46:7509890339369540233:2334] Source {TEvProposeTransactionStatus txid# 281474976715660 Status# 53} 2025-05-29T15:30:01.215530Z node 46 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [46:7509890339369540375:2357], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:30:01.216186Z node 46 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=46&id=YzViZTEyMi0zMmUyNGUzMC1kMzgwMDJkLWEzMjIxM2Ez, ActorId: [46:7509890339369540112:2334], ActorState: ExecuteState, TraceId: 01jweam8bp3sb58tvkj43r4e8h, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: assertion failed at ydb/services/ydb/backup_ut/ydb_backup_ut.cpp:177, NQuery::TExecuteQueryResult (anonymous namespace)::ExecuteQuery(NQuery::TSession &, const TString &, bool): (result.IsSuccess()) query: UPSERT INTO `/Root/DyNumberTable` (Key, Value) VALUES (DyNumber("1"), 1); issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 TBackTrace::Capture()+28 (0x139A39CC) NUnitTest::NPrivate::RaiseError(char const*, TBasicString> const&, bool)+137 (0x13B57539) ??+0 (0x1384FC23) ??+0 (0x138603AA) NTestSuiteBackupRestoreS3::TTestCaseTestAllPrimitiveTypes::Execute_(NUnitTest::TTestContext&)+716 (0x1387193C) NTestSuiteBackupRestoreS3::TCurrentTest::Execute()::'lambda'()::operator()() const+71 (0x1389E0A7) NUnitTest::TTestBase::Run(std::__y1::function, TBasicString> const&, char const*, bool)+126 (0x13B593EE) NTestSuiteBackupRestoreS3::TCurrentTest::Execute()+436 (0x1389DA64) NUnitTest::TTestFactory::Execute()+803 (0x13B59B63) NUnitTest::RunMain(int, char**)+3021 (0x13B6B70D) ??+0 (0x7FB5689C1D90) __libc_start_main+128 (0x7FB5689C1E40) _start+41 (0x12914029) ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/query/unittest >> KqpTypes::MultipleCurrentUtcTimestamp Test command err: Trying to start YDB, gRPC: 1433, MsgBus: 17257 2025-05-29T15:29:49.064275Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509890286060912035:2063];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:29:49.064309Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/0013ca/r3tmp/tmpumy5HI/pdisk_1.dat 2025-05-29T15:29:49.131556Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [1:7509890286060912013:2079] 1748532589064122 != 1748532589064125 2025-05-29T15:29:49.134193Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 1433, node 1 2025-05-29T15:29:49.145394Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:29:49.145407Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-05-29T15:29:49.145408Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-05-29T15:29:49.145457Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:17257 2025-05-29T15:29:49.168981Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:29:49.169011Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:29:49.171270Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:17257 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... 2025-05-29T15:29:49.210852Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-05-29T15:29:49.216270Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:29:49.287141Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:29:49.312909Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:29:49.325426Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:29:49.492320Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890286060913650:2403], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:29:49.492349Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:29:49.547201Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-05-29T15:29:49.557289Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-05-29T15:29:49.567874Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-05-29T15:29:49.583529Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-05-29T15:29:49.639003Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-05-29T15:29:49.651902Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-05-29T15:29:49.665769Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-05-29T15:29:49.683547Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890286060914303:2466], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:29:49.683583Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:29:49.683588Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890286060914308:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:29:49.684619Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715669:3, at schemeshard: 72057594046644480 2025-05-29T15:29:49.693474Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7509890286060914310:2470], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715669 completed, doublechecking } 2025-05-29T15:29:49.754701Z node 1 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [1:7509890286060914361:3395] txid# 281474976715670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-29T15:29:49.873780Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [1:7509890286060914377:2474], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:29:49.873920Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=1&id=MTYyOThmMDgtYzljY2Q5YWYtNDJlM2JjMDktN2FkZjZhN2Q=, ActorId: [1:7509890286060913632:2401], ActorState: ExecuteState, TraceId: 01jweakx3ketr7gvg2d9p3cz8m, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: VERIFY failed (2025-05-29T15:29:49.874721Z): assertion failed in non-unittest thread with message: assertion failed at ydb/core/kqp/ut/common/kqp_ut_common.h:375, void NKikimr::NKqp::AssertSuccessResult(const NYdb::TStatus &): (result.IsSuccess())
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 library/cpp/testing/unittest/registar.cpp:36 RaiseError(): requirement UnittestThread failed 0. /-S/util/system/yassert.cpp:83: InternalPanicImpl @ 0x13DD8F65 1. /-S/util/system/yassert.cpp:55: Panic @ 0x13DCFF66 2. /tmp//-S/library/cpp/testing/unittest/registar.cpp:36: RaiseError @ 0x13F70BC6 3. /-S/ydb/core/kqp/ut/common/kqp_ut_common.h:375: AssertSuccessResult @ 0x13A3FAE2 4. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:365: CreateSampleTables @ 0x264F71F2 5. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:581: operator() @ 0x2651596C 6. /-S/library/cpp/threading/future/core/future-inl.h:493: SetValue @ 0x2651596C 7. /-S/library/cpp/threading/future/async.h:24: operator() @ 0x2651596C 8. /-S/util/thread/pool.h:71: Process @ 0x2651596C 9. /-S/util/thread/pool.cpp:418: DoExecute @ 0x13DE06B9 10. /-S/util/thread/factory.h:15: Execute @ 0x13DDF0A9 11. /-S/util/thread/factory.cpp:36: ThreadProc @ 0x13DDF0A9 12. /-S/util/system/thread.cpp:244: ThreadProxy @ 0x13DDA51C 13. ??:0: ?? @ 0x7F6B59C89AC2 14. ??:0: ?? @ 0x7F6B59D1B84F Trying to start YDB, gRPC: 32245, MsgBus: 5058 2025-05-29T15:29:53.824343Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509890303808989558:2064];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:29:53.824378Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/0013ca/r3tmp/tmpEOKnVD/pdisk_1.dat 2025-05-29T15:29:53.879937Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [1:7509890303808989534:2079] 1748532593824185 != 1748532593824188 2025-05-29T15:29:53.882656Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 32245, node 1 2025-05-29T15 ... 3DD8F65 1. /-S/util/system/yassert.cpp:55: Panic @ 0x13DCFF66 2. /tmp//-S/library/cpp/testing/unittest/registar.cpp:36: RaiseError @ 0x13F70BC6 3. /-S/ydb/core/kqp/ut/common/kqp_ut_common.h:375: AssertSuccessResult @ 0x13A3FAE2 4. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:365: CreateSampleTables @ 0x264F71F2 5. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:581: operator() @ 0x2651596C 6. /-S/library/cpp/threading/future/core/future-inl.h:493: SetValue @ 0x2651596C 7. /-S/library/cpp/threading/future/async.h:24: operator() @ 0x2651596C 8. /-S/util/thread/pool.h:71: Process @ 0x2651596C 9. /-S/util/thread/pool.cpp:418: DoExecute @ 0x13DE06B9 10. /-S/util/thread/factory.h:15: Execute @ 0x13DDF0A9 11. /-S/util/thread/factory.cpp:36: ThreadProc @ 0x13DDF0A9 12. /-S/util/system/thread.cpp:244: ThreadProxy @ 0x13DDA51C 13. ??:0: ?? @ 0x7F062046FAC2 14. ??:0: ?? @ 0x7F062050184F Trying to start YDB, gRPC: 30342, MsgBus: 29587 2025-05-29T15:30:02.830020Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509890342577943558:2063];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:30:02.830057Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/0013ca/r3tmp/tmpKdBQPH/pdisk_1.dat 2025-05-29T15:30:02.892388Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:30:02.892722Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [1:7509890342577943536:2079] 1748532602829875 != 1748532602829878 TServer::EnableGrpc on GrpcPort 30342, node 1 2025-05-29T15:30:02.907841Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:30:02.907866Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-05-29T15:30:02.907868Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-05-29T15:30:02.907921Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:29587 TClient is connected to server localhost:29587 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: 2025-05-29T15:30:02.970166Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:30:02.970211Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-29T15:30:02.971225Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-29T15:30:02.971985Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:30:02.982244Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:30:03.043769Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:30:03.061992Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:30:03.071925Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:30:03.245942Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890346872912464:2403], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:30:03.245986Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:30:03.284482Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-05-29T15:30:03.292816Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-05-29T15:30:03.301350Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-05-29T15:30:03.315664Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-05-29T15:30:03.329479Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-05-29T15:30:03.343338Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-05-29T15:30:03.357973Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-05-29T15:30:03.373601Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890346872913118:2466], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:30:03.373627Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890346872913123:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:30:03.373629Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:30:03.374437Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710669:3, at schemeshard: 72057594046644480 2025-05-29T15:30:03.377466Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7509890346872913125:2470], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710669 completed, doublechecking } 2025-05-29T15:30:03.476347Z node 1 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [1:7509890346872913176:3396] txid# 281474976710670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-29T15:30:03.552336Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [1:7509890346872913192:2474], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:30:03.552452Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=1&id=ZTliZGNjZDctZmY2NGYxNmUtY2ExMTUxZWYtM2JjYTQ2NjI=, ActorId: [1:7509890346872912461:2401], ActorState: ExecuteState, TraceId: 01jweamafd2bhkzpwzm6fa4hy8, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: VERIFY failed (2025-05-29T15:30:03.553154Z): assertion failed in non-unittest thread with message: assertion failed at ydb/core/kqp/ut/common/kqp_ut_common.h:375, void NKikimr::NKqp::AssertSuccessResult(const NYdb::TStatus &): (result.IsSuccess())
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 library/cpp/testing/unittest/registar.cpp:36 RaiseError(): requirement UnittestThread failed 0. /-S/util/system/yassert.cpp:83: InternalPanicImpl @ 0x13DD8F65 1. /-S/util/system/yassert.cpp:55: Panic @ 0x13DCFF66 2. /tmp//-S/library/cpp/testing/unittest/registar.cpp:36: RaiseError @ 0x13F70BC6 3. /-S/ydb/core/kqp/ut/common/kqp_ut_common.h:375: AssertSuccessResult @ 0x13A3FAE2 4. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:365: CreateSampleTables @ 0x264F71F2 5. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:581: operator() @ 0x2651596C 6. /-S/library/cpp/threading/future/core/future-inl.h:493: SetValue @ 0x2651596C 7. /-S/library/cpp/threading/future/async.h:24: operator() @ 0x2651596C 8. /-S/util/thread/pool.h:71: Process @ 0x2651596C 9. /-S/util/thread/pool.cpp:418: DoExecute @ 0x13DE06B9 10. /-S/util/thread/factory.h:15: Execute @ 0x13DDF0A9 11. /-S/util/thread/factory.cpp:36: ThreadProc @ 0x13DDF0A9 12. /-S/util/system/thread.cpp:244: ThreadProxy @ 0x13DDA51C 13. ??:0: ?? @ 0x7EFF81570AC2 14. ??:0: ?? @ 0x7EFF8160284F >> KqpLimits::OutOfSpaceYQLUpsertFail+useSink ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/query/unittest >> KqpQuery::CreateAsSelectPath-UseTablePathPrefix [FAIL] Test command err: Trying to start YDB, gRPC: 19303, MsgBus: 2940 2025-05-29T15:29:59.902502Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509890329836946904:2065];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:29:59.902525Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/001374/r3tmp/tmptnCJ2Q/pdisk_1.dat 2025-05-29T15:29:59.959414Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [1:7509890329836946870:2079] 1748532599902339 != 1748532599902342 2025-05-29T15:29:59.962371Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 19303, node 1 2025-05-29T15:29:59.970209Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:29:59.970223Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-05-29T15:29:59.970224Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-05-29T15:29:59.970262Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:2940 2025-05-29T15:30:00.004056Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:30:00.004088Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:30:00.005141Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:2940 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-29T15:30:00.034176Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:30:00.229771Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890334131914811:2327], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:30:00.229788Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890334131914837:2330], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:30:00.229793Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:30:00.230377Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2025-05-29T15:30:00.231949Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7509890334131914840:2331], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-05-29T15:30:00.287551Z node 1 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [1:7509890334131914891:2324] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-29T15:30:00.336472Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 2025-05-29T15:30:00.442214Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [1:7509890334131915010:2347], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:30:00.442315Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=1&id=YzI3Y2U5MmQtMTc4YmY5YjYtYjkwNTFkMmYtMTg4NmEzNTU=, ActorId: [1:7509890334131915003:2343], ActorState: ExecuteState, TraceId: 01jweam7jf6cpc7fbr67m7821e, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: assertion failed at ydb/core/kqp/ut/query/kqp_query_ut.cpp:2388, void NKikimr::NKqp::NTestSuiteKqpQuery::CreateAsSelectTypes(NUnitTest::TTestContext &) [NotNull = true, IsOlap = false]: (prepareResult.IsSuccess())
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 0. /-S/util/system/backtrace.cpp:284: ?? @ 0x13DBA79B 1. /tmp//-S/library/cpp/testing/unittest/registar.cpp:46: RaiseError @ 0x13F70AD8 2. /tmp//-S/ydb/core/kqp/ut/query/kqp_query_ut.cpp:2388: CreateAsSelectTypes @ 0x13C20DEE 3. /tmp//-S/ydb/core/kqp/ut/query/kqp_query_ut.cpp:38: operator() @ 0x13BFC896 4. /tmp//-S/library/cpp/testing/unittest/registar.cpp:373: Run @ 0x13F7298D 5. /tmp//-S/ydb/core/kqp/ut/query/kqp_query_ut.cpp:38: Execute @ 0x13BFC255 6. /tmp//-S/library/cpp/testing/unittest/registar.cpp:494: Execute @ 0x13F73102 7. /tmp//-S/library/cpp/testing/unittest/utmain.cpp:872: RunMain @ 0x13F84CAC 8. ??:0: ?? @ 0x7F45C769DD8F 9. ??:0: ?? @ 0x7F45C769DE3F 10. ??:0: ?? @ 0x12A6F028 Trying to start YDB, gRPC: 18731, MsgBus: 18302 2025-05-29T15:30:02.680487Z node 2 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7509890341900990873:2065];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:30:02.680516Z node 2 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/001374/r3tmp/tmpP7WPu1/pdisk_1.dat 2025-05-29T15:30:02.694828Z node 2 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 18731, node 2 2025-05-29T15:30:02.705704Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:30:02.705712Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-05-29T15:30:02.705714Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-05-29T15:30:02.705752Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:18302 TClient is connected to server localhost:18302 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-29T15:30:02.784987Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-05-29T15:30:02.785138Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:30:02.785157Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting waiting... 2025-05-29T15:30:02.786267Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-29T15:30:03.045657Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7509890346195958804:2330], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:30:03.045681Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7509890346195958793:2327], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:30:03.045752Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:30:03.046355Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2025-05-29T15:30:03.048087Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7509890346195958807:2331], DatabaseId: /Root, PoolId: default, Scheduled ... aiting... 2025-05-29T15:30:03.471336Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-29T15:30:03.669032Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7509890348854601137:2330], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:30:03.669050Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7509890348854601118:2327], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:30:03.669102Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:30:03.669788Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2025-05-29T15:30:03.671633Z node 3 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7509890348854601147:2331], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-05-29T15:30:03.738980Z node 3 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [3:7509890348854601198:2323] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-29T15:30:03.745990Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 2025-05-29T15:30:03.769936Z node 3 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [3:7509890348854601315:2347], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:30:03.770031Z node 3 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=3&id=OGE0YTE2MDktMzgyNTQwYTAtMjhmOGFkYjAtZmJjZWRiZjU=, ActorId: [3:7509890348854601308:2343], ActorState: ExecuteState, TraceId: 01jweamav9fb664q2a3kt5jjj2, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: assertion failed at ydb/core/kqp/ut/query/kqp_query_ut.cpp:2388, void NKikimr::NKqp::NTestSuiteKqpQuery::CreateAsSelectTypes(NUnitTest::TTestContext &) [NotNull = true, IsOlap = true]: (prepareResult.IsSuccess())
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 0. /-S/util/system/backtrace.cpp:284: ?? @ 0x13DBA79B 1. /tmp//-S/library/cpp/testing/unittest/registar.cpp:46: RaiseError @ 0x13F70AD8 2. /tmp//-S/ydb/core/kqp/ut/query/kqp_query_ut.cpp:2388: CreateAsSelectTypes @ 0x13C2780B 3. /tmp//-S/ydb/core/kqp/ut/query/kqp_query_ut.cpp:38: operator() @ 0x13BFC896 4. /tmp//-S/library/cpp/testing/unittest/registar.cpp:373: Run @ 0x13F7298D 5. /tmp//-S/ydb/core/kqp/ut/query/kqp_query_ut.cpp:38: Execute @ 0x13BFC255 6. /tmp//-S/library/cpp/testing/unittest/registar.cpp:494: Execute @ 0x13F73102 7. /tmp//-S/library/cpp/testing/unittest/utmain.cpp:872: RunMain @ 0x13F84CAC 8. ??:0: ?? @ 0x7F45C769DD8F 9. ??:0: ?? @ 0x7F45C769DE3F 10. ??:0: ?? @ 0x12A6F028 Trying to start YDB, gRPC: 10201, MsgBus: 29787 2025-05-29T15:30:04.057299Z node 4 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7509890350292822832:2061];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:30:04.057320Z node 4 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/001374/r3tmp/tmp5Pvgp5/pdisk_1.dat 2025-05-29T15:30:04.071627Z node 4 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:30:04.071821Z node 4 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [4:7509890350292822813:2079] 1748532604057178 != 1748532604057181 TServer::EnableGrpc on GrpcPort 10201, node 4 2025-05-29T15:30:04.083148Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:30:04.083162Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-05-29T15:30:04.083177Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-05-29T15:30:04.083226Z node 4 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:29787 TClient is connected to server localhost:29787 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-29T15:30:04.162494Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:30:04.162516Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:30:04.162832Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:30:04.163481Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-29T15:30:04.171058Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-05-29T15:30:04.176516Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715660:0, at schemeshard: 72057594046644480 2025-05-29T15:30:04.474634Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7509890350292823524:2337], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:30:04.474650Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7509890350292823516:2334], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:30:04.474667Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:30:04.475293Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715661:3, at schemeshard: 72057594046644480 2025-05-29T15:30:04.476884Z node 4 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [4:7509890350292823530:2338], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715661 completed, doublechecking } 2025-05-29T15:30:04.574794Z node 4 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [4:7509890350292823581:2352] txid# 281474976715662, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-29T15:30:04.579718Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-05-29T15:30:04.598673Z node 4 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [4:7509890350292823700:2354], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:30:04.598796Z node 4 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=4&id=MmZlYjlhYmEtMzI4MTc4ZDgtNjAxMjBiNzktMjA0ZTNhMDc=, ActorId: [4:7509890350292823693:2350], ActorState: ExecuteState, TraceId: 01jweambnc2jxrp7w9xsx76exx, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: assertion failed at ydb/core/kqp/ut/query/kqp_query_ut.cpp:2558, virtual void NKikimr::NKqp::NTestSuiteKqpQuery::TTestCaseCreateAsSelectPath::Execute_(NUnitTest::TTestContext &) [UseTablePathPrefix = false]: (prepareResult.IsSuccess())
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 0. /-S/util/system/backtrace.cpp:284: ?? @ 0x13DBA79B 1. /tmp//-S/library/cpp/testing/unittest/registar.cpp:46: RaiseError @ 0x13F70AD8 2. /tmp//-S/ydb/core/kqp/ut/query/kqp_query_ut.cpp:2558: Execute_ @ 0x13C406A8 3. /tmp//-S/ydb/core/kqp/ut/query/kqp_query_ut.cpp:38: operator() @ 0x13BFC896 4. /tmp//-S/library/cpp/testing/unittest/registar.cpp:373: Run @ 0x13F7298D 5. /tmp//-S/ydb/core/kqp/ut/query/kqp_query_ut.cpp:38: Execute @ 0x13BFC255 6. /tmp//-S/library/cpp/testing/unittest/registar.cpp:494: Execute @ 0x13F73102 7. /tmp//-S/library/cpp/testing/unittest/utmain.cpp:872: RunMain @ 0x13F84CAC 8. ??:0: ?? @ 0x7F45C769DD8F 9. ??:0: ?? @ 0x7F45C769DE3F 10. ??:0: ?? @ 0x12A6F028 >> KqpQuery::QueryCachePermissionsLoss >> KqpQuery::NoEvaluate >> KqpQuery::CreateAsSelect_BadCases [FAIL] >> KqpQuery::CreateAsSelectTypes-NotNull-IsOlap >> KqpQuery::TableSink_ReplaceDataShardDataQuery+UseSink [FAIL] >> KqpQuery::TableSink_ReplaceDataShardDataQuery-UseSink ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/query/unittest >> KqpLimits::CancelAfterRoTxWithFollowerStreamLookupDepededRead Test command err: Trying to start YDB, gRPC: 4548, MsgBus: 26282 2025-05-29T15:29:51.368079Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509890296418412552:2063];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:29:51.368109Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/0013b5/r3tmp/tmpAH5TzW/pdisk_1.dat TServer::EnableGrpc on GrpcPort 4548, node 1 2025-05-29T15:29:51.427524Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:29:51.427650Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [1:7509890296418412530:2079] 1748532591367856 != 1748532591367859 2025-05-29T15:29:51.432547Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:29:51.432562Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-05-29T15:29:51.432564Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-05-29T15:29:51.432613Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:26282 2025-05-29T15:29:51.470753Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:29:51.470784Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:29:51.471894Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:26282 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-29T15:29:51.497242Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:29:51.505398Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:29:51.524230Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:29:51.547704Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:29:51.558569Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:29:51.745350Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890296418414161:2403], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:29:51.745379Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:29:51.795697Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-05-29T15:29:51.803779Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-05-29T15:29:51.814916Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-05-29T15:29:51.828832Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-05-29T15:29:51.843252Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-05-29T15:29:51.857496Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-05-29T15:29:51.871146Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-05-29T15:29:51.895590Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890296418414811:2466], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:29:51.895631Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:29:51.895691Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890296418414816:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:29:51.896634Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715669:3, at schemeshard: 72057594046644480 2025-05-29T15:29:51.899241Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7509890296418414818:2470], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715669 completed, doublechecking } 2025-05-29T15:29:51.952605Z node 1 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [1:7509890296418414869:3393] txid# 281474976715670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-29T15:29:52.035720Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [1:7509890296418414885:2474], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:29:52.035861Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=1&id=NTI4MGYzZGMtNzdjN2I2NmMtOTkwMWZmYmItYTVkOGNjNmU=, ActorId: [1:7509890296418414143:2401], ActorState: ExecuteState, TraceId: 01jweakz8qegmc7e19b505d71m, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: VERIFY failed (2025-05-29T15:29:52.036543Z): assertion failed in non-unittest thread with message: assertion failed at ydb/core/kqp/ut/common/kqp_ut_common.h:375, void NKikimr::NKqp::AssertSuccessResult(const NYdb::TStatus &): (result.IsSuccess())
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 library/cpp/testing/unittest/registar.cpp:36 RaiseError(): requirement UnittestThread failed 0. /-S/util/system/yassert.cpp:83: InternalPanicImpl @ 0x13DD8F65 1. /-S/util/system/yassert.cpp:55: Panic @ 0x13DCFF66 2. /tmp//-S/library/cpp/testing/unittest/registar.cpp:36: RaiseError @ 0x13F70BC6 3. /-S/ydb/core/kqp/ut/common/kqp_ut_common.h:375: AssertSuccessResult @ 0x13A3FAE2 4. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:365: CreateSampleTables @ 0x264F71F2 5. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:581: operator() @ 0x2651596C 6. /-S/library/cpp/threading/future/core/future-inl.h:493: SetValue @ 0x2651596C 7. /-S/library/cpp/threading/future/async.h:24: operator() @ 0x2651596C 8. /-S/util/thread/pool.h:71: Process @ 0x2651596C 9. /-S/util/thread/pool.cpp:418: DoExecute @ 0x13DE06B9 10. /-S/util/thread/factory.h:15: Execute @ 0x13DDF0A9 11. /-S/util/thread/factory.cpp:36: ThreadProc @ 0x13DDF0A9 12. /-S/util/system/thread.cpp:244: ThreadProxy @ 0x13DDA51C 13. ??:0: ?? @ 0x7F51C03A8AC2 14. ??:0: ?? @ 0x7F51C043A84F Trying to start YDB, gRPC: 10060, MsgBus: 22518 2025-05-29T15:29:56.000124Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509890316669019399:2065];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:29:56.000147Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/0013b5/r3tmp/tmpLHhuEs/pdisk_1.dat 2025-05-29T15:29:56.059827Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [1:7509890312374052079:2079] 1748532595999898 != 1748532595999901 2025-05-29T15:29:56.061763Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 10060, node 1 2025-05-29T1 ... l @ 0x13DD8F65 1. /-S/util/system/yassert.cpp:55: Panic @ 0x13DCFF66 2. /tmp//-S/library/cpp/testing/unittest/registar.cpp:36: RaiseError @ 0x13F70BC6 3. /-S/ydb/core/kqp/ut/common/kqp_ut_common.h:375: AssertSuccessResult @ 0x13A3FAE2 4. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:365: CreateSampleTables @ 0x264F71F2 5. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:581: operator() @ 0x2651596C 6. /-S/library/cpp/threading/future/core/future-inl.h:493: SetValue @ 0x2651596C 7. /-S/library/cpp/threading/future/async.h:24: operator() @ 0x2651596C 8. /-S/util/thread/pool.h:71: Process @ 0x2651596C 9. /-S/util/thread/pool.cpp:418: DoExecute @ 0x13DE06B9 10. /-S/util/thread/factory.h:15: Execute @ 0x13DDF0A9 11. /-S/util/thread/factory.cpp:36: ThreadProc @ 0x13DDF0A9 12. /-S/util/system/thread.cpp:244: ThreadProxy @ 0x13DDA51C 13. ??:0: ?? @ 0x7FD80260CAC2 14. ??:0: ?? @ 0x7FD80269E84F Trying to start YDB, gRPC: 10611, MsgBus: 3627 2025-05-29T15:30:05.053241Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509890356218878531:2061];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:30:05.053319Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/0013b5/r3tmp/tmppkK7T9/pdisk_1.dat 2025-05-29T15:30:05.114269Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [1:7509890356218878512:2079] 1748532605053107 != 1748532605053110 2025-05-29T15:30:05.115353Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 10611, node 1 2025-05-29T15:30:05.128243Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:30:05.128261Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-05-29T15:30:05.128263Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-05-29T15:30:05.128315Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:3627 2025-05-29T15:30:05.155386Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:30:05.155415Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:30:05.156521Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:3627 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-29T15:30:05.196385Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:30:05.209458Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:30:05.272690Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:30:05.288463Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:30:05.297724Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:30:05.362469Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890356218880143:2403], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:30:05.362503Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:30:05.405512Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-05-29T15:30:05.413002Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-05-29T15:30:05.467747Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-05-29T15:30:05.477986Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-05-29T15:30:05.484994Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-05-29T15:30:05.500080Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-05-29T15:30:05.513699Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-05-29T15:30:05.529932Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890356218880799:2466], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:30:05.529959Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:30:05.529978Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890356218880804:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:30:05.530662Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710669:3, at schemeshard: 72057594046644480 2025-05-29T15:30:05.533419Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7509890356218880806:2470], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710669 completed, doublechecking } 2025-05-29T15:30:05.629053Z node 1 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [1:7509890356218880857:3394] txid# 281474976710670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-29T15:30:05.725059Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [1:7509890356218880873:2474], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:30:05.725188Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=1&id=M2Y0NjE2NWEtM2M0NWUxMC05ODdkNzJlLWRiYTQ5ZTMx, ActorId: [1:7509890356218880140:2401], ActorState: ExecuteState, TraceId: 01jweamcjsf281q4qpw1vqan1m, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: VERIFY failed (2025-05-29T15:30:05.725879Z): assertion failed in non-unittest thread with message: assertion failed at ydb/core/kqp/ut/common/kqp_ut_common.h:375, void NKikimr::NKqp::AssertSuccessResult(const NYdb::TStatus &): (result.IsSuccess())
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 library/cpp/testing/unittest/registar.cpp:36 RaiseError(): requirement UnittestThread failed 0. /-S/util/system/yassert.cpp:83: InternalPanicImpl @ 0x13DD8F65 1. /-S/util/system/yassert.cpp:55: Panic @ 0x13DCFF66 2. /tmp//-S/library/cpp/testing/unittest/registar.cpp:36: RaiseError @ 0x13F70BC6 3. /-S/ydb/core/kqp/ut/common/kqp_ut_common.h:375: AssertSuccessResult @ 0x13A3FAE2 4. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:365: CreateSampleTables @ 0x264F71F2 5. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:581: operator() @ 0x2651596C 6. /-S/library/cpp/threading/future/core/future-inl.h:493: SetValue @ 0x2651596C 7. /-S/library/cpp/threading/future/async.h:24: operator() @ 0x2651596C 8. /-S/util/thread/pool.h:71: Process @ 0x2651596C 9. /-S/util/thread/pool.cpp:418: DoExecute @ 0x13DE06B9 10. /-S/util/thread/factory.h:15: Execute @ 0x13DDF0A9 11. /-S/util/thread/factory.cpp:36: ThreadProc @ 0x13DDF0A9 12. /-S/util/system/thread.cpp:244: ThreadProxy @ 0x13DDA51C 13. ??:0: ?? @ 0x7FB52B909AC2 14. ??:0: ?? @ 0x7FB52B99B84F >> KqpStats::MultiTxStatsFullExpYql >> KqpExplain::UpdateSecondaryConditional-UseSink >> KqpStats::RequestUnitForSuccessExplicitPrepare >> KqpLimits::ComputeActorMemoryAllocationFailure+useSink >> KqpExplain::ExplainStream >> KqpParams::MissingParameter >> KqpStats::JoinNoStatsYql >> KqpQuery::CreateAsSelectTypes-NotNull-IsOlap [FAIL] >> KqpQuery::TableSink_ReplaceDataShardDataQuery-UseSink [FAIL] >> KqpQuery::TableSinkWithSubquery ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/ydb/backup_ut/unittest >> BackupPathTest::ChecksumsForSchemaMappingFiles [GOOD] Test command err: 2025-05-29T15:29:37.951334Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509890234239264798:2274];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:29:37.951360Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/001c9c/r3tmp/tmpO0Ne1v/pdisk_1.dat TServer::EnableGrpc on GrpcPort 24727, node 1 2025-05-29T15:29:38.028878Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:29:38.029279Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:29:38.029288Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-05-29T15:29:38.029290Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-05-29T15:29:38.029323Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:24081 WaitRootIsUp 'Root'... TClient::Ls request: Root 2025-05-29T15:29:38.050996Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:29:38.051023Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:29:38.052700Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-29T15:29:38.087570Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:29:38.384410Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890238534232807:2334], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:29:38.384452Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890238534232826:2337], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:29:38.384462Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:29:38.384554Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:315: actor# [1:7509890234239264818:2140] Handle TEvProposeTransaction 2025-05-29T15:29:38.384562Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:238: actor# [1:7509890234239264818:2140] TxId# 281474976715658 ProcessProposeTransaction 2025-05-29T15:29:38.384580Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:257: actor# [1:7509890234239264818:2140] Cookie# 0 userReqId# "" txid# 281474976715658 SEND to# [1:7509890238534232837:2596] 2025-05-29T15:29:38.397252Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1562: Actor# [1:7509890238534232837:2596] txid# 281474976715658 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/Root/.metadata/workload_manager/pools" OperationType: ESchemeOpCreateResourcePool ModifyACL { Name: "default" DiffACL: "\n!\010\000\022\035\010\001\020\201\004\032\024all-users@well-known \003\n\031\010\000\022\025\010\001\020\201\004\032\014root@builtin \003" NewOwner: "metadata@system" } Internal: true CreateResourcePool { Name: "default" Properties { Properties { key: "concurrent_query_limit" value: "-1" } Properties { key: "database_load_cpu_threshold" value: "-1" } Properties { key: "query_cancel_after_seconds" value: "0" } Properties { key: "query_cpu_limit_percent_per_node" value: "-1" } Properties { key: "query_memory_limit_percent_per_node" value: "-1" } Properties { key: "queue_size" value: "-1" } Properties { key: "resource_weight" value: "-1" } Properties { key: "total_cpu_limit_percent_per_node" value: "-1" } } } } } UserToken: "\n\017metadata@system\022\000" DatabaseName: "/Root" 2025-05-29T15:29:38.397300Z node 1 :TX_PROXY DEBUG: schemereq.cpp:569: Actor# [1:7509890238534232837:2596] txid# 281474976715658 Bootstrap, UserSID: metadata@system CheckAdministrator: 0 CheckDatabaseAdministrator: 0 2025-05-29T15:29:38.397304Z node 1 :TX_PROXY DEBUG: schemereq.cpp:578: Actor# [1:7509890238534232837:2596] txid# 281474976715658 Bootstrap, UserSID: metadata@system IsClusterAdministrator: 1 2025-05-29T15:29:38.397858Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1627: Actor# [1:7509890238534232837:2596] txid# 281474976715658 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P6 2025-05-29T15:29:38.397872Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1617: Actor# [1:7509890238534232837:2596] txid# 281474976715658 TEvNavigateKeySet requested from SchemeCache 2025-05-29T15:29:38.397904Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1450: Actor# [1:7509890238534232837:2596] txid# 281474976715658 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-05-29T15:29:38.397944Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1497: Actor# [1:7509890238534232837:2596] HANDLE EvNavigateKeySetResult, txid# 281474976715658 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2025-05-29T15:29:38.397955Z node 1 :TX_PROXY DEBUG: schemereq.cpp:103: Actor# [1:7509890238534232837:2596] txid# 281474976715658 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976715658 TabletId# 72057594046644480} 2025-05-29T15:29:38.397999Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1352: Actor# [1:7509890238534232837:2596] txid# 281474976715658 HANDLE EvClientConnected 2025-05-29T15:29:38.398032Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4877: StateWork, received event# 269877761, Sender [1:7509890238534232862:2602], Recipient [1:7509890238534232228:2200]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-05-29T15:29:38.398037Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4974: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-05-29T15:29:38.398039Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5753: Pipe server connected, at tablet: 72057594046644480 2025-05-29T15:29:38.398045Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4877: StateWork, received event# 271122432, Sender [1:7509890238534232837:2596], Recipient [1:7509890238534232228:2200]: {TEvModifySchemeTransaction txid# 281474976715658 TabletId# 72057594046644480} 2025-05-29T15:29:38.398048Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4888: StateWork, processing event TEvSchemeShard::TEvModifySchemeTransaction 2025-05-29T15:29:38.398714Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:372: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/Root" OperationType: ESchemeOpCreateResourcePool ModifyACL { Name: "default" DiffACL: "\n!\010\000\022\035\010\001\020\201\004\032\024all-users@well-known \003\n\031\010\000\022\025\010\001\020\201\004\032\014root@builtin \003" NewOwner: "metadata@system" } Internal: true CreateResourcePool { Name: ".metadata/workload_manager/pools/default" Properties { Properties { key: "concurrent_query_limit" value: "-1" } Properties { key: "database_load_cpu_threshold" value: "-1" } Properties { key: "query_cancel_after_seconds" value: "0" } Properties { key: "query_cpu_limit_percent_per_node" value: "-1" } Properties { key: "query_memory_limit_percent_per_node" value: "-1" } Properties { key: "queue_size" value: "-1" } Properties { key: "resource_weight" value: "-1" } Properties { key: "total_cpu_limit_percent_per_node" value: "-1" } } } } TxId: 281474976715658 TabletId: 72057594046644480 Owner: "metadata@system" UserToken: "***" PeerName: "" , at schemeshard: 72057594046644480 2025-05-29T15:29:38.398821Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_mkdir.cpp:115: TMkDir Propose, path: /Root/.metadata, operationId: 281474976715658:0, at schemeshard: 72057594046644480 2025-05-29T15:29:38.398851Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:319: AttachChild: child attached as only one child to the parent, parent id: [OwnerId: 72057594046644480, LocalPathId: 1], parent name: Root, child name: .metadata, child id: [OwnerId: 72057594046644480, LocalPathId: 2], at schemeshard: 72057594046644480 2025-05-29T15:29:38.398863Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046644480, LocalPathId: 2] was 0 2025-05-29T15:29:38.398879Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:130: IgniteOperation, opId: 281474976715658:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-05-29T15:29:38.398883Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_mkdir.cpp:115: TMkDir Propose, path: /Root/.metadata/workload_manager, operationId: 281474976715658:1, at schemeshard: 72057594046644480 2025-05-29T15:29:38.398907Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:319: AttachChild: child attached as only one child to the parent, parent id: [OwnerId: 72057594046644480, LocalPathId: 2], parent name: .metadata, child name: workload_manager, child id: [OwnerId: 72057594046644480, LocalPathId: 3], at schemeshard: 72057594046644480 2025-05-29T15:29:38.398911Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046644480, LocalPathId: 3] was 0 2025-05-29T15:29:38.398917Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:130: IgniteOperation, opId: 281474976715658:2, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-05-29T15:29:38.398920Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_mkdir.cpp:115: TMkDir Propose, path: /Root/.metadata/workload_manager/pools, operationId: 281474976715658:2, at schemeshard: 72057594046644480 2025-05-29T15:29:38.398929Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:319: AttachChild: child attached as only one child to the parent, parent id: [OwnerId: 72057594046644480, LocalPathId: 3], parent name: workl ... p_restore_common.h:233: TRestore TProposedWaitParts, opId: 281474976710765:0 HandleReply TEvSchemaChanged at tablet# 72057594046644480 message# Source { RawX1: 7509890364702610007 RawX2: 4503848735476072 } Origin: 72075186224037894 State: 2 TxId: 281474976710765 Step: 0 Generation: 1 OpResult { Success: false Explain: "Checksum mismatch for Prefix/Table2/data_00.csv expected# f3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855, got# e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855" BytesProcessed: 0 RowsProcessed: 0 } 2025-05-29T15:30:07.985486Z node 58 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:655: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 281474976710765:0, shardIdx: 72057594046644480:7, datashard: 72075186224037894, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046644480 2025-05-29T15:30:07.985490Z node 58 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:674: all shard schema changes has been received, operationId: 281474976710765:0, at schemeshard: 72057594046644480 2025-05-29T15:30:07.985492Z node 58 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:686: send schema changes ack message, operation: 281474976710765:0, datashard: 72075186224037894, at schemeshard: 72057594046644480 2025-05-29T15:30:07.985497Z node 58 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 281474976710765:0 129 -> 240 2025-05-29T15:30:07.985532Z node 58 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_backup_restore_common.h:116: Unable to make a bill: kind# TRestore, opId# 281474976710765:0, reason# domain is not a serverless db, domain# /Root, domainPathId# [OwnerId: 72057594046644480, LocalPathId: 1], IsDomainSchemeShard: 1, ParentDomainId: [OwnerId: 72057594046644480, LocalPathId: 1], ResourcesDomainId: [OwnerId: 72057594046644480, LocalPathId: 1] 2025-05-29T15:30:07.985571Z node 58 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:165: TSideEffects ApplyOnExecute at tablet# 72057594046644480 2025-05-29T15:30:07.985931Z node 58 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:647: TTxOperationReply complete, operationId: 281474976710765:0, at schemeshard: 72057594046644480 2025-05-29T15:30:07.985938Z node 58 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:207: TSideEffects ApplyOnComplete at tablet# 72057594046644480 2025-05-29T15:30:07.985940Z node 58 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:275: Activate send for 281474976710765:0 2025-05-29T15:30:07.985950Z node 58 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:630: Send to actor: [58:7509890364702610007:2408] msg type: 269552132 msg: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 281474976710765 at schemeshard: 72057594046644480 2025-05-29T15:30:07.985986Z node 58 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4877: StateWork, received event# 2146435072, Sender [58:7509890360407640552:2206], Recipient [58:7509890360407640552:2206]: NKikimr::NSchemeShard::TEvPrivate::TEvProgressOperation 2025-05-29T15:30:07.985993Z node 58 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4894: StateWork, processing event TEvPrivate::TEvProgressOperation 2025-05-29T15:30:07.986000Z node 58 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 281474976710765:0, at schemeshard: 72057594046644480 2025-05-29T15:30:07.986003Z node 58 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:482: [72057594046644480] TDone opId# 281474976710765:0 ProgressState 2025-05-29T15:30:07.986009Z node 58 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:165: TSideEffects ApplyOnExecute at tablet# 72057594046644480 2025-05-29T15:30:07.986011Z node 58 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:906: Part operation is done id#281474976710765:0 progress is 1/1 2025-05-29T15:30:07.986013Z node 58 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 281474976710765 ready parts: 1/1 2025-05-29T15:30:07.986015Z node 58 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:906: Part operation is done id#281474976710765:0 progress is 1/1 2025-05-29T15:30:07.986016Z node 58 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 281474976710765 ready parts: 1/1 2025-05-29T15:30:07.986019Z node 58 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1606: TOperation IsReadyToNotify, TxId: 281474976710765, ready parts: 1/1, is published: true 2025-05-29T15:30:07.986025Z node 58 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1629: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [58:7509890360407640552:2206] message: TxId: 281474976710765 2025-05-29T15:30:07.986028Z node 58 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 281474976710765 ready parts: 1/1 2025-05-29T15:30:07.986031Z node 58 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:973: Operation and all the parts is done, operation id: 281474976710765:0 2025-05-29T15:30:07.986032Z node 58 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5174: RemoveTx for txid 281474976710765:0 2025-05-29T15:30:07.986055Z node 58 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046644480, LocalPathId: 20] was 3 2025-05-29T15:30:07.986358Z node 58 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:207: TSideEffects ApplyOnComplete at tablet# 72057594046644480 2025-05-29T15:30:07.986377Z node 58 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:630: Send to actor: [58:7509890360407640552:2206] msg type: 271124998 msg: NKikimrScheme.TEvNotifyTxCompletionResult TxId: 281474976710765 at schemeshard: 72057594046644480 2025-05-29T15:30:07.986402Z node 58 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4877: StateWork, received event# 271124998, Sender [58:7509890360407640552:2206], Recipient [58:7509890360407640552:2206]: NKikimrScheme.TEvNotifyTxCompletionResult TxId: 281474976710765 2025-05-29T15:30:07.986408Z node 58 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5035: StateWork, processing event TEvSchemeShard::TEvNotifyTxCompletionResult 2025-05-29T15:30:07.986410Z node 58 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6746: Handle: TEvNotifyTxCompletionResult: txId# 281474976710765 2025-05-29T15:30:07.986412Z node 58 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6748: Message: TxId: 281474976710765 2025-05-29T15:30:07.986419Z node 58 :IMPORT DEBUG: schemeshard_import__create.cpp:361: TImport::TTxProgress: DoExecute 2025-05-29T15:30:07.986421Z node 58 :IMPORT DEBUG: schemeshard_import__create.cpp:1472: TImport::TTxProgress: OnNotifyResult: txId# 281474976710765 2025-05-29T15:30:07.986452Z node 58 :IMPORT NOTICE: schemeshard_import__create.cpp:753: TImport::TTxProgress: issues during restore, cancelling, info# { Id: 281474976715672 Uid: '' Kind: S3 DomainPathId: [OwnerId: 72057594046644480, LocalPathId: 1] UserSID: '(empty maybe)' State: Waiting Issue: '' Items: 1 }, item# { Idx: 0 DstPathName: '/Root/Prefix_6/Table2' DstPathId: [OwnerId: 72057594046644480, LocalPathId: 20] State: Transferring SubState: Subscribed WaitTxId: 0 Issue: 'shard: 72057594046644480:7, error: Checksum mismatch for Prefix/Table2/data_00.csv expected# f3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855, got# e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855' } 2025-05-29T15:30:07.986468Z node 58 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_xxport__tx_base.h:63: SendNotifications: : id# 281474976715672, subscribers count# 0 2025-05-29T15:30:07.986804Z node 58 :IMPORT DEBUG: schemeshard_import__create.cpp:385: TImport::TTxProgress: DoComplete 2025-05-29T15:30:07.987199Z node 58 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4877: StateWork, received event# 269877764, Sender [58:7509890364702610140:3907], Recipient [58:7509890360407640552:2206]: NKikimr::TEvTabletPipe::TEvServerDisconnected 2025-05-29T15:30:07.987211Z node 58 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4975: StateWork, processing event TEvTabletPipe::TEvServerDisconnected 2025-05-29T15:30:07.987214Z node 58 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5801: Server pipe is reset, at schemeshard: 72057594046644480 2025-05-29T15:30:08.003148Z node 58 :TX_PROXY DEBUG: rpc_operation_request_base.h:50: [GetImport] [58:7509890368997577448:2412] [0] Resolve database: name# /Root 2025-05-29T15:30:08.003342Z node 58 :TX_PROXY DEBUG: rpc_operation_request_base.h:66: [GetImport] [58:7509890368997577448:2412] [0] Handle TEvTxProxySchemeCache::TEvNavigateKeySetResult: request# { ErrorCount: 0 DatabaseName: /Root DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root TableId: [72057594046644480:1:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } }] } 2025-05-29T15:30:08.003356Z node 58 :TX_PROXY DEBUG: rpc_operation_request_base.h:106: [GetImport] [58:7509890368997577448:2412] [0] Send request: schemeShardId# 72057594046644480 2025-05-29T15:30:08.003459Z node 58 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4877: StateWork, received event# 269877761, Sender [58:7509890368997577451:3920], Recipient [58:7509890360407640552:2206]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-05-29T15:30:08.003470Z node 58 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4974: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-05-29T15:30:08.003477Z node 58 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5753: Pipe server connected, at tablet: 72057594046644480 2025-05-29T15:30:08.003502Z node 58 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4877: StateWork, received event# 275251202, Sender [58:7509890368997577448:2412], Recipient [58:7509890360407640552:2206]: NKikimrImport.TEvGetImportRequest Request { Id: 281474976715672 } DatabaseName: "/Root" 2025-05-29T15:30:08.003510Z node 58 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4990: StateWork, processing event TEvImport::TEvGetImportRequest 2025-05-29T15:30:08.003628Z node 58 :TX_PROXY DEBUG: rpc_get_operation.cpp:220: [GetImport] [58:7509890368997577448:2412] [0] Handle TEvImport::TEvGetImportResponse: record# Entry { Id: 281474976715672 Status: CANCELLED Issues { message: "shard: 72057594046644480:7, error: Checksum mismatch for Prefix/Table2/data_00.csv expected# f3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855, got# e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855" severity: 1 } Progress: PROGRESS_CANCELLED ImportFromS3Settings { endpoint: "localhost:28239" scheme: HTTP bucket: "test_bucket" source_prefix: "Prefix" destination_path: "/Root/Prefix_6" } StartTime { seconds: 1748532607 } EndTime { seconds: 1748532607 } } 2025-05-29T15:30:08.003825Z node 58 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4877: StateWork, received event# 269877764, Sender [58:7509890368997577451:3920], Recipient [58:7509890360407640552:2206]: NKikimr::TEvTabletPipe::TEvServerDisconnected 2025-05-29T15:30:08.003834Z node 58 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4975: StateWork, processing event TEvTabletPipe::TEvServerDisconnected 2025-05-29T15:30:08.003836Z node 58 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5801: Server pipe is reset, at schemeshard: 72057594046644480 >> KqpParams::ImplicitDifferentParameterTypesQueryCacheCheck ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/query/unittest >> KqpExplain::UpdateOnSecondary+UseSink Test command err: Trying to start YDB, gRPC: 21152, MsgBus: 31737 2025-05-29T15:29:52.159321Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509890300965700739:2061];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:29:52.159608Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/0013a9/r3tmp/tmpdZxz28/pdisk_1.dat 2025-05-29T15:29:52.216926Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [1:7509890300965700720:2079] 1748532592159166 != 1748532592159169 2025-05-29T15:29:52.218512Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 21152, node 1 2025-05-29T15:29:52.230504Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:29:52.230516Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-05-29T15:29:52.230519Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-05-29T15:29:52.230574Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:31737 TClient is connected to server localhost:31737 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-29T15:29:52.291771Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:29:52.293715Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:29:52.293744Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:29:52.294942Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-29T15:29:52.298674Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:29:52.316278Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:29:52.335277Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:29:52.347453Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:29:52.536127Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890300965702349:2403], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:29:52.536164Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:29:52.577945Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-05-29T15:29:52.585157Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-05-29T15:29:52.598588Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-05-29T15:29:52.613339Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-05-29T15:29:52.627010Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-05-29T15:29:52.640566Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-05-29T15:29:52.655306Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-05-29T15:29:52.670943Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890300965703002:2466], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:29:52.670978Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890300965703007:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:29:52.670979Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:29:52.671786Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710669:3, at schemeshard: 72057594046644480 2025-05-29T15:29:52.674725Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7509890300965703009:2470], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710669 completed, doublechecking } 2025-05-29T15:29:52.753537Z node 1 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [1:7509890300965703060:3395] txid# 281474976710670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-29T15:29:52.879759Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [1:7509890300965703076:2474], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:29:52.879888Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=1&id=NzAzMDAyYzAtNTg0ZDgyMDAtNWU2NTY1NDMtMzBlYzdhZTg=, ActorId: [1:7509890300965702346:2401], ActorState: ExecuteState, TraceId: 01jweam00y4yqb12b8pkr2a27s, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: VERIFY failed (2025-05-29T15:29:52.880570Z): assertion failed in non-unittest thread with message: assertion failed at ydb/core/kqp/ut/common/kqp_ut_common.h:375, void NKikimr::NKqp::AssertSuccessResult(const NYdb::TStatus &): (result.IsSuccess())
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 library/cpp/testing/unittest/registar.cpp:36 RaiseError(): requirement UnittestThread failed 0. /-S/util/system/yassert.cpp:83: InternalPanicImpl @ 0x13DD8F65 1. /-S/util/system/yassert.cpp:55: Panic @ 0x13DCFF66 2. /tmp//-S/library/cpp/testing/unittest/registar.cpp:36: RaiseError @ 0x13F70BC6 3. /-S/ydb/core/kqp/ut/common/kqp_ut_common.h:375: AssertSuccessResult @ 0x13A3FAE2 4. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:365: CreateSampleTables @ 0x264F71F2 5. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:581: operator() @ 0x2651596C 6. /-S/library/cpp/threading/future/core/future-inl.h:493: SetValue @ 0x2651596C 7. /-S/library/cpp/threading/future/async.h:24: operator() @ 0x2651596C 8. /-S/util/thread/pool.h:71: Process @ 0x2651596C 9. /-S/util/thread/pool.cpp:418: DoExecute @ 0x13DE06B9 10. /-S/util/thread/factory.h:15: Execute @ 0x13DDF0A9 11. /-S/util/thread/factory.cpp:36: ThreadProc @ 0x13DDF0A9 12. /-S/util/system/thread.cpp:244: ThreadProxy @ 0x13DDA51C 13. ??:0: ?? @ 0x7FF2E6388AC2 14. ??:0: ?? @ 0x7FF2E641A84F Trying to start YDB, gRPC: 3786, MsgBus: 23497 2025-05-29T15:29:56.792921Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509890317773570450:2065];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:29:56.792950Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/0013a9/r3tmp/tmpS4VvL5/pdisk_1.dat 2025-05-29T15:29:56.844976Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 3786, node 1 2025-05-29T15:29:56.859900Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:29:56.859918Z node 1 :NET_CLASSIFIER WAR ... x13DD8F65 1. /-S/util/system/yassert.cpp:55: Panic @ 0x13DCFF66 2. /tmp//-S/library/cpp/testing/unittest/registar.cpp:36: RaiseError @ 0x13F70BC6 3. /-S/ydb/core/kqp/ut/common/kqp_ut_common.h:375: AssertSuccessResult @ 0x13A3FAE2 4. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:365: CreateSampleTables @ 0x264F71F2 5. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:581: operator() @ 0x2651596C 6. /-S/library/cpp/threading/future/core/future-inl.h:493: SetValue @ 0x2651596C 7. /-S/library/cpp/threading/future/async.h:24: operator() @ 0x2651596C 8. /-S/util/thread/pool.h:71: Process @ 0x2651596C 9. /-S/util/thread/pool.cpp:418: DoExecute @ 0x13DE06B9 10. /-S/util/thread/factory.h:15: Execute @ 0x13DDF0A9 11. /-S/util/thread/factory.cpp:36: ThreadProc @ 0x13DDF0A9 12. /-S/util/system/thread.cpp:244: ThreadProxy @ 0x13DDA51C 13. ??:0: ?? @ 0x7F9E1130FAC2 14. ??:0: ?? @ 0x7F9E113A184F Trying to start YDB, gRPC: 4490, MsgBus: 20375 2025-05-29T15:30:05.851803Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509890356893106625:2063];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:30:05.852223Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/0013a9/r3tmp/tmpDlYHAp/pdisk_1.dat 2025-05-29T15:30:05.902583Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:30:05.902646Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [1:7509890356893106604:2079] 1748532605851604 != 1748532605851607 TServer::EnableGrpc on GrpcPort 4490, node 1 2025-05-29T15:30:05.917417Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:30:05.917432Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-05-29T15:30:05.917433Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-05-29T15:30:05.917471Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:20375 TClient is connected to server localhost:20375 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-29T15:30:05.981711Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:30:05.981744Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:30:05.982792Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-29T15:30:05.983436Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:30:05.991574Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:30:06.011469Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:30:06.036961Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:30:06.048177Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:30:06.177166Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890361188075531:2403], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:30:06.177202Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:30:06.219758Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-05-29T15:30:06.226678Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-05-29T15:30:06.233717Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-05-29T15:30:06.241329Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-05-29T15:30:06.255507Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-05-29T15:30:06.269340Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-05-29T15:30:06.283991Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-05-29T15:30:06.299671Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890361188076185:2466], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:30:06.299702Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:30:06.299708Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890361188076190:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:30:06.300463Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715669:3, at schemeshard: 72057594046644480 2025-05-29T15:30:06.303497Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7509890361188076192:2470], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715669 completed, doublechecking } 2025-05-29T15:30:06.367018Z node 1 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [1:7509890361188076243:3395] txid# 281474976715670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-29T15:30:06.448134Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [1:7509890361188076259:2474], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:30:06.448240Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=1&id=ZGRkYjlkY2EtYjQ1OTk0Y2YtNGNmMzJiYzQtMmY3MWE5ZmE=, ActorId: [1:7509890361188075528:2401], ActorState: ExecuteState, TraceId: 01jweamdav25m2bqdqa3gnj3g9, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: VERIFY failed (2025-05-29T15:30:06.448880Z): assertion failed in non-unittest thread with message: assertion failed at ydb/core/kqp/ut/common/kqp_ut_common.h:375, void NKikimr::NKqp::AssertSuccessResult(const NYdb::TStatus &): (result.IsSuccess())
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 library/cpp/testing/unittest/registar.cpp:36 RaiseError(): requirement UnittestThread failed 0. /-S/util/system/yassert.cpp:83: InternalPanicImpl @ 0x13DD8F65 1. /-S/util/system/yassert.cpp:55: Panic @ 0x13DCFF66 2. /tmp//-S/library/cpp/testing/unittest/registar.cpp:36: RaiseError @ 0x13F70BC6 3. /-S/ydb/core/kqp/ut/common/kqp_ut_common.h:375: AssertSuccessResult @ 0x13A3FAE2 4. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:365: CreateSampleTables @ 0x264F71F2 5. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:581: operator() @ 0x2651596C 6. /-S/library/cpp/threading/future/core/future-inl.h:493: SetValue @ 0x2651596C 7. /-S/library/cpp/threading/future/async.h:24: operator() @ 0x2651596C 8. /-S/util/thread/pool.h:71: Process @ 0x2651596C 9. /-S/util/thread/pool.cpp:418: DoExecute @ 0x13DE06B9 10. /-S/util/thread/factory.h:15: Execute @ 0x13DDF0A9 11. /-S/util/thread/factory.cpp:36: ThreadProc @ 0x13DDF0A9 12. /-S/util/system/thread.cpp:244: ThreadProxy @ 0x13DDA51C 13. ??:0: ?? @ 0x7FCF550ADAC2 14. ??:0: ?? @ 0x7FCF5513F84F >> KqpQuery::TableSinkWithSubquery [FAIL] >> KqpQuery::ExecuteDataQueryCollectMeta >> KqpLimits::ComputeActorMemoryAllocationFailure+useSink [GOOD] >> KqpLimits::ComputeActorMemoryAllocationFailure-useSink >> KqpLimits::CancelAfterRoTx ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/query/unittest >> KqpQuery::UpdateThenDelete+UseSink Test command err: Trying to start YDB, gRPC: 27185, MsgBus: 9159 2025-05-29T15:29:53.366944Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509890303269679976:2062];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:29:53.366974Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/0013a3/r3tmp/tmpsYTnuD/pdisk_1.dat 2025-05-29T15:29:53.438891Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [1:7509890303269679955:2079] 1748532593366768 != 1748532593366771 2025-05-29T15:29:53.441475Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 27185, node 1 2025-05-29T15:29:53.452153Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:29:53.452167Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-05-29T15:29:53.452168Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-05-29T15:29:53.452210Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:9159 TClient is connected to server localhost:9159 2025-05-29T15:29:53.507384Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:29:53.507421Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:29:53.508403Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-29T15:29:53.524567Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:29:53.527773Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-05-29T15:29:53.530122Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:29:53.594251Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:29:53.618221Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:29:53.642363Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:29:53.780196Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890303269681592:2403], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:29:53.780238Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:29:53.813661Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-05-29T15:29:53.820940Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-05-29T15:29:53.875414Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-05-29T15:29:53.886346Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-05-29T15:29:53.900463Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-05-29T15:29:53.914037Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-05-29T15:29:53.921015Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-05-29T15:29:53.937304Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890303269682247:2466], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:29:53.937324Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:29:53.937333Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890303269682252:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:29:53.938065Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715669:3, at schemeshard: 72057594046644480 2025-05-29T15:29:53.941153Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7509890303269682254:2470], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715669 completed, doublechecking } 2025-05-29T15:29:54.001927Z node 1 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [1:7509890307564649601:3396] txid# 281474976715670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-29T15:29:54.109853Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [1:7509890307564649617:2474], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:29:54.109938Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=1&id=NTcwODA5NjMtMzQ2NDFiZGItMjYzNjMzNTktYzIzMzhhNTI=, ActorId: [1:7509890303269681574:2401], ActorState: ExecuteState, TraceId: 01jweam18g8yhq4c5ae6c988x8, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: VERIFY failed (2025-05-29T15:29:54.110548Z): assertion failed in non-unittest thread with message: assertion failed at ydb/core/kqp/ut/common/kqp_ut_common.h:375, void NKikimr::NKqp::AssertSuccessResult(const NYdb::TStatus &): (result.IsSuccess())
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 library/cpp/testing/unittest/registar.cpp:36 RaiseError(): requirement UnittestThread failed 0. /-S/util/system/yassert.cpp:83: InternalPanicImpl @ 0x13DD8F65 1. /-S/util/system/yassert.cpp:55: Panic @ 0x13DCFF66 2. /tmp//-S/library/cpp/testing/unittest/registar.cpp:36: RaiseError @ 0x13F70BC6 3. /-S/ydb/core/kqp/ut/common/kqp_ut_common.h:375: AssertSuccessResult @ 0x13A3FAE2 4. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:365: CreateSampleTables @ 0x264F71F2 5. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:581: operator() @ 0x2651596C 6. /-S/library/cpp/threading/future/core/future-inl.h:493: SetValue @ 0x2651596C 7. /-S/library/cpp/threading/future/async.h:24: operator() @ 0x2651596C 8. /-S/util/thread/pool.h:71: Process @ 0x2651596C 9. /-S/util/thread/pool.cpp:418: DoExecute @ 0x13DE06B9 10. /-S/util/thread/factory.h:15: Execute @ 0x13DDF0A9 11. /-S/util/thread/factory.cpp:36: ThreadProc @ 0x13DDF0A9 12. /-S/util/system/thread.cpp:244: ThreadProxy @ 0x13DDA51C 13. ??:0: ?? @ 0x7FB09BA6BAC2 14. ??:0: ?? @ 0x7FB09BAFD84F Trying to start YDB, gRPC: 29814, MsgBus: 25319 2025-05-29T15:29:57.830214Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509890321947317576:2063];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:29:57.830233Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/0013a3/r3tmp/tmpiHl9kY/pdisk_1.dat 2025-05-29T15:29:57.873159Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [1:7509890321947317552:2079] 174853259783003 ... 3DD8F65 1. /-S/util/system/yassert.cpp:55: Panic @ 0x13DCFF66 2. /tmp//-S/library/cpp/testing/unittest/registar.cpp:36: RaiseError @ 0x13F70BC6 3. /-S/ydb/core/kqp/ut/common/kqp_ut_common.h:375: AssertSuccessResult @ 0x13A3FAE2 4. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:365: CreateSampleTables @ 0x264F71F2 5. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:581: operator() @ 0x2651596C 6. /-S/library/cpp/threading/future/core/future-inl.h:493: SetValue @ 0x2651596C 7. /-S/library/cpp/threading/future/async.h:24: operator() @ 0x2651596C 8. /-S/util/thread/pool.h:71: Process @ 0x2651596C 9. /-S/util/thread/pool.cpp:418: DoExecute @ 0x13DE06B9 10. /-S/util/thread/factory.h:15: Execute @ 0x13DDF0A9 11. /-S/util/thread/factory.cpp:36: ThreadProc @ 0x13DDF0A9 12. /-S/util/system/thread.cpp:244: ThreadProxy @ 0x13DDA51C 13. ??:0: ?? @ 0x7F6004A86AC2 14. ??:0: ?? @ 0x7F6004B1884F Trying to start YDB, gRPC: 22335, MsgBus: 64806 2025-05-29T15:30:06.285696Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509890361832339889:2062];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:30:06.286038Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/0013a3/r3tmp/tmpW7uKS0/pdisk_1.dat 2025-05-29T15:30:06.331580Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [1:7509890361832339869:2079] 1748532606285562 != 1748532606285565 2025-05-29T15:30:06.333699Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 22335, node 1 2025-05-29T15:30:06.346005Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:30:06.346019Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-05-29T15:30:06.346021Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-05-29T15:30:06.346069Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:64806 TClient is connected to server localhost:64806 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-29T15:30:06.411286Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:30:06.411320Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:30:06.412382Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-29T15:30:06.413033Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:30:06.416754Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:30:06.437756Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:30:06.497681Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:30:06.554668Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:30:06.629581Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890361832341526:2403], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:30:06.629603Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:30:06.678064Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-05-29T15:30:06.686161Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-05-29T15:30:06.696031Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-05-29T15:30:06.750713Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-05-29T15:30:06.759176Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-05-29T15:30:06.773492Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-05-29T15:30:06.787540Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-05-29T15:30:06.802952Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890361832342179:2466], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:30:06.802963Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890361832342184:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:30:06.802973Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:30:06.803641Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715669:3, at schemeshard: 72057594046644480 2025-05-29T15:30:06.807687Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7509890361832342186:2470], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715669 completed, doublechecking } 2025-05-29T15:30:06.860582Z node 1 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [1:7509890361832342237:3396] txid# 281474976715670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-29T15:30:06.930889Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [1:7509890361832342253:2474], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:30:06.931017Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=1&id=YzIyZTI1ZjEtM2VmYzA1NGMtNjJhMTdjYmQtZmMwYjY0YTg=, ActorId: [1:7509890361832341523:2401], ActorState: ExecuteState, TraceId: 01jweamdtj365va4p9f5na7hca, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: VERIFY failed (2025-05-29T15:30:06.931763Z): assertion failed in non-unittest thread with message: assertion failed at ydb/core/kqp/ut/common/kqp_ut_common.h:375, void NKikimr::NKqp::AssertSuccessResult(const NYdb::TStatus &): (result.IsSuccess())
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 library/cpp/testing/unittest/registar.cpp:36 RaiseError(): requirement UnittestThread failed 0. /-S/util/system/yassert.cpp:83: InternalPanicImpl @ 0x13DD8F65 1. /-S/util/system/yassert.cpp:55: Panic @ 0x13DCFF66 2. /tmp//-S/library/cpp/testing/unittest/registar.cpp:36: RaiseError @ 0x13F70BC6 3. /-S/ydb/core/kqp/ut/common/kqp_ut_common.h:375: AssertSuccessResult @ 0x13A3FAE2 4. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:365: CreateSampleTables @ 0x264F71F2 5. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:581: operator() @ 0x2651596C 6. /-S/library/cpp/threading/future/core/future-inl.h:493: SetValue @ 0x2651596C 7. /-S/library/cpp/threading/future/async.h:24: operator() @ 0x2651596C 8. /-S/util/thread/pool.h:71: Process @ 0x2651596C 9. /-S/util/thread/pool.cpp:418: DoExecute @ 0x13DE06B9 10. /-S/util/thread/factory.h:15: Execute @ 0x13DDF0A9 11. /-S/util/thread/factory.cpp:36: ThreadProc @ 0x13DDF0A9 12. /-S/util/system/thread.cpp:244: ThreadProxy @ 0x13DDA51C 13. ??:0: ?? @ 0x7FF5E44BAAC2 14. ??:0: ?? @ 0x7FF5E454C84F ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/query/unittest >> KqpStats::OneShardNonLocalExec-UseSink Test command err: Trying to start YDB, gRPC: 6268, MsgBus: 27088 2025-05-29T15:29:52.124231Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509890302320143519:2061];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:29:52.124294Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/0013ab/r3tmp/tmpDCHoEs/pdisk_1.dat 2025-05-29T15:29:52.178520Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:29:52.178619Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [1:7509890302320143500:2079] 1748532592124088 != 1748532592124091 TServer::EnableGrpc on GrpcPort 6268, node 1 2025-05-29T15:29:52.188234Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:29:52.188249Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-05-29T15:29:52.188251Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-05-29T15:29:52.188295Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:27088 TClient is connected to server localhost:27088 WaitRootIsUp 'Root'... TClient::Ls request: Root 2025-05-29T15:29:52.227342Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:29:52.227385Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting TClient::Ls response: 2025-05-29T15:29:52.228385Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-29T15:29:52.254655Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:29:52.266648Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:29:52.285217Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:29:52.306258Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:29:52.319144Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:29:52.462609Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890302320145130:2403], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:29:52.462645Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:29:52.506137Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-05-29T15:29:52.514238Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-05-29T15:29:52.569168Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-05-29T15:29:52.577276Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-05-29T15:29:52.591829Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-05-29T15:29:52.605884Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-05-29T15:29:52.619788Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-05-29T15:29:52.637414Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890302320145784:2466], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:29:52.637446Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:29:52.637484Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890302320145789:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:29:52.638355Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715669:3, at schemeshard: 72057594046644480 2025-05-29T15:29:52.646756Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7509890302320145791:2470], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715669 completed, doublechecking } 2025-05-29T15:29:52.731492Z node 1 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [1:7509890302320145842:3394] txid# 281474976715670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-29T15:29:52.865502Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [1:7509890302320145858:2474], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:29:52.865623Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=1&id=OGRkZWEwNDEtNmQxNTY3NGUtMzM5NWJjZDEtYmVhMGYzMg==, ActorId: [1:7509890302320145127:2401], ActorState: ExecuteState, TraceId: 01jweakzzw25he6cbwjrdwacwb, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: VERIFY failed (2025-05-29T15:29:52.866807Z): assertion failed in non-unittest thread with message: assertion failed at ydb/core/kqp/ut/common/kqp_ut_common.h:375, void NKikimr::NKqp::AssertSuccessResult(const NYdb::TStatus &): (result.IsSuccess())
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 library/cpp/testing/unittest/registar.cpp:36 RaiseError(): requirement UnittestThread failed 0. /-S/util/system/yassert.cpp:83: InternalPanicImpl @ 0x13DD8F65 1. /-S/util/system/yassert.cpp:55: Panic @ 0x13DCFF66 2. /tmp//-S/library/cpp/testing/unittest/registar.cpp:36: RaiseError @ 0x13F70BC6 3. /-S/ydb/core/kqp/ut/common/kqp_ut_common.h:375: AssertSuccessResult @ 0x13A3FAE2 4. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:365: CreateSampleTables @ 0x264F71F2 5. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:581: operator() @ 0x2651596C 6. /-S/library/cpp/threading/future/core/future-inl.h:493: SetValue @ 0x2651596C 7. /-S/library/cpp/threading/future/async.h:24: operator() @ 0x2651596C 8. /-S/util/thread/pool.h:71: Process @ 0x2651596C 9. /-S/util/thread/pool.cpp:418: DoExecute @ 0x13DE06B9 10. /-S/util/thread/factory.h:15: Execute @ 0x13DDF0A9 11. /-S/util/thread/factory.cpp:36: ThreadProc @ 0x13DDF0A9 12. /-S/util/system/thread.cpp:244: ThreadProxy @ 0x13DDA51C 13. ??:0: ?? @ 0x7F44D4B27AC2 14. ??:0: ?? @ 0x7F44D4BB984F Trying to start YDB, gRPC: 32517, MsgBus: 14842 2025-05-29T15:29:57.154597Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509890323288078858:2064];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:29:57.154655Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/0013ab/r3tmp/tmp5Qx5w3/pdisk_1.dat 2025-05-29T15:29:57.202973Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [1:7509890323288078835:2079] 1748532597154427 != 1748532597154430 2025-05-29T15:29:57.204687Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 32517, node 1 2025-05-29T1 ... 3DDF0A9 12. /-S/util/system/thread.cpp:244: ThreadProxy @ 0x13DDA51C 13. ??:0: ?? @ 0x7F8E9B7B0AC2 14. ??:0: ?? @ 0x7F8E9B84284F Trying to start YDB, gRPC: 11049, MsgBus: 30805 2025-05-29T15:30:06.598419Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509890359088093904:2075];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:30:06.598453Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-05-29T15:30:06.602200Z node 2 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7509890361024901636:2071];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:30:06.602237Z node 2 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/0013ab/r3tmp/tmpYR5AM6/pdisk_1.dat 2025-05-29T15:30:06.654524Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 11049, node 1 2025-05-29T15:30:06.669007Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:30:06.669016Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-05-29T15:30:06.669018Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-05-29T15:30:06.669051Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:30805 2025-05-29T15:30:06.699085Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:30:06.699116Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:30:06.700615Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:30805 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-29T15:30:06.729482Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:30:06.729508Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:30:06.730657Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-05-29T15:30:06.730693Z node 1 :HIVE WARN: hive_impl.cpp:771: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-05-29T15:30:06.730934Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected waiting... 2025-05-29T15:30:06.737740Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:30:06.802905Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:30:06.821701Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:30:06.835155Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:30:06.975546Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890359088095838:2372], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:30:06.975586Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:30:07.009888Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-05-29T15:30:07.021189Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-05-29T15:30:07.080766Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-05-29T15:30:07.091390Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-05-29T15:30:07.103068Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-05-29T15:30:07.117315Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-05-29T15:30:07.132022Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-05-29T15:30:07.147330Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890363383064063:2420], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:30:07.147353Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:30:07.147356Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890363383064068:2423], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:30:07.147900Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715669:3, at schemeshard: 72057594046644480 2025-05-29T15:30:07.151031Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7509890363383064070:2424], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715669 completed, doublechecking } 2025-05-29T15:30:07.209925Z node 1 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [1:7509890363383064156:4276] txid# 281474976715670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-29T15:30:07.290557Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [1:7509890363383064174:2428], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:30:07.290650Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=1&id=OWQ1YjQ5MzAtZDQ0NzYwY2MtYjI3MzljMWEtY2NjMDE0OA==, ActorId: [1:7509890359088095820:2370], ActorState: ExecuteState, TraceId: 01jweame5a4gx3trq9fpva0c1b, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: VERIFY failed (2025-05-29T15:30:07.291186Z): assertion failed in non-unittest thread with message: assertion failed at ydb/core/kqp/ut/common/kqp_ut_common.h:375, void NKikimr::NKqp::AssertSuccessResult(const NYdb::TStatus &): (result.IsSuccess())
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 library/cpp/testing/unittest/registar.cpp:36 RaiseError(): requirement UnittestThread failed 0. /-S/util/system/yassert.cpp:83: InternalPanicImpl @ 0x13DD8F65 1. /-S/util/system/yassert.cpp:55: Panic @ 0x13DCFF66 2. /tmp//-S/library/cpp/testing/unittest/registar.cpp:36: RaiseError @ 0x13F70BC6 3. /-S/ydb/core/kqp/ut/common/kqp_ut_common.h:375: AssertSuccessResult @ 0x13A3FAE2 4. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:365: CreateSampleTables @ 0x264F71F2 5. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:581: operator() @ 0x2651596C 6. /-S/library/cpp/threading/future/core/future-inl.h:493: SetValue @ 0x2651596C 7. /-S/library/cpp/threading/future/async.h:24: operator() @ 0x2651596C 8. /-S/util/thread/pool.h:71: Process @ 0x2651596C 9. /-S/util/thread/pool.cpp:418: DoExecute @ 0x13DE06B9 10. /-S/util/thread/factory.h:15: Execute @ 0x13DDF0A9 11. /-S/util/thread/factory.cpp:36: ThreadProc @ 0x13DDF0A9 12. /-S/util/system/thread.cpp:244: ThreadProxy @ 0x13DDA51C 13. ??:0: ?? @ 0x7FC40F078AC2 14. ??:0: ?? @ 0x7FC40F10A84F >> KqpQuery::QueryCacheInvalidate >> KqpParams::ExplicitSameParameterTypesQueryCacheCheck >> KqpQuery::QueryStats+UseSink ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/provider/ut/unittest >> KikimrIcGateway::TestLoadDataSourceProperties Test command err: Trying to start YDB, gRPC: 19536, MsgBus: 14945 2025-05-29T15:29:41.649283Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509890254477169706:2274];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:29:41.649403Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/00248a/r3tmp/tmp5o9XsA/pdisk_1.dat 2025-05-29T15:29:41.677109Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [1:7509890254477169457:2079] 1748532581607123 != 1748532581607126 2025-05-29T15:29:41.678231Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 19536, node 1 2025-05-29T15:29:41.695947Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:29:41.695965Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-05-29T15:29:41.695968Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-05-29T15:29:41.696039Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:14945 TClient is connected to server localhost:14945 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-29T15:29:41.753242Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:29:41.753286Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:29:41.754290Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-05-29T15:29:41.754310Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected waiting... 2025-05-29T15:29:41.768528Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:29:41.836395Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:29:41.861883Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:29:41.875511Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:29:42.007030Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890254477171091:2403], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:29:42.007069Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:29:42.092395Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-05-29T15:29:42.109890Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-05-29T15:29:42.127677Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-05-29T15:29:42.141275Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-05-29T15:29:42.157740Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-05-29T15:29:42.171363Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-05-29T15:29:42.192629Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-05-29T15:29:42.208971Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890258772139040:2466], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:29:42.208999Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:29:42.209070Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890258772139045:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:29:42.209869Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715669:3, at schemeshard: 72057594046644480 2025-05-29T15:29:42.216955Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7509890258772139047:2470], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715669 completed, doublechecking } 2025-05-29T15:29:42.312041Z node 1 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [1:7509890258772139098:3397] txid# 281474976715670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-29T15:29:42.405423Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [1:7509890258772139114:2474], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:29:42.405526Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=1&id=YWYyOGRmMmEtYTM2NTU3ZmItNzMxZjAzNzctOTM0MDJhYTI=, ActorId: [1:7509890254477171064:2400], ActorState: ExecuteState, TraceId: 01jweaknt03p7qcdr06fnw0d5p, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: VERIFY failed (2025-05-29T15:29:42.407099Z): assertion failed in non-unittest thread with message: assertion failed at ydb/core/kqp/ut/common/kqp_ut_common.h:375, void NKikimr::NKqp::AssertSuccessResult(const NYdb::TStatus &): (result.IsSuccess())
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 library/cpp/testing/unittest/registar.cpp:36 RaiseError(): requirement UnittestThread failed 0. /-S/util/system/yassert.cpp:83: InternalPanicImpl @ 0x13AE4A35 1. /-S/util/system/yassert.cpp:55: Panic @ 0x13ADBA36 2. /tmp//-S/library/cpp/testing/unittest/registar.cpp:36: RaiseError @ 0x13C7DD76 3. /-S/ydb/core/kqp/ut/common/kqp_ut_common.h:375: AssertSuccessResult @ 0x1398A682 4. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:365: CreateSampleTables @ 0x261085D2 5. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:581: operator() @ 0x26129F1C 6. /-S/library/cpp/threading/future/core/future-inl.h:493: SetValue @ 0x26129F1C 7. /-S/library/cpp/threading/future/async.h:24: operator() @ 0x26129F1C 8. /-S/util/thread/pool.h:71: Process @ 0x26129F1C 9. /-S/util/thread/pool.cpp:418: DoExecute @ 0x13AEBF69 10. /-S/util/thread/factory.h:15: Execute @ 0x13AEA959 11. /-S/util/thread/factory.cpp:36: ThreadProc @ 0x13AEA959 12. /-S/util/system/thread.cpp:244: ThreadProxy @ 0x13AE5DCC 13. ??:0: ?? @ 0x7F6FA9AD2AC2 14. ??:0: ?? @ 0x7F6FA9B6484F Trying to start YDB, gRPC: 6208, MsgBus: 9756 2025-05-29T15:30:02.838354Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509890343438107581:2061];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:30:02.838376Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/00248a/r3tmp/tmp91tdvy/pdisk_1.dat 2025-05-29T15:30:02.916298Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:30:02.916835Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [1:7509890343438107562:2079] 1748532602838190 != 1748532602838193 TServer::EnableGrpc on GrpcPort 6208, node 1 2025-05-29T15 ... 3AE4A35 1. /-S/util/system/yassert.cpp:55: Panic @ 0x13ADBA36 2. /tmp//-S/library/cpp/testing/unittest/registar.cpp:36: RaiseError @ 0x13C7DD76 3. /-S/ydb/core/kqp/ut/common/kqp_ut_common.h:375: AssertSuccessResult @ 0x1398A682 4. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:365: CreateSampleTables @ 0x261085D2 5. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:581: operator() @ 0x26129F1C 6. /-S/library/cpp/threading/future/core/future-inl.h:493: SetValue @ 0x26129F1C 7. /-S/library/cpp/threading/future/async.h:24: operator() @ 0x26129F1C 8. /-S/util/thread/pool.h:71: Process @ 0x26129F1C 9. /-S/util/thread/pool.cpp:418: DoExecute @ 0x13AEBF69 10. /-S/util/thread/factory.h:15: Execute @ 0x13AEA959 11. /-S/util/thread/factory.cpp:36: ThreadProc @ 0x13AEA959 12. /-S/util/system/thread.cpp:244: ThreadProxy @ 0x13AE5DCC 13. ??:0: ?? @ 0x7FBBAF176AC2 14. ??:0: ?? @ 0x7FBBAF20884F Trying to start YDB, gRPC: 27535, MsgBus: 21599 2025-05-29T15:30:06.959991Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509890362459139701:2063];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:30:06.960011Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/00248a/r3tmp/tmpkAiqr1/pdisk_1.dat 2025-05-29T15:30:07.013917Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:30:07.014157Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [1:7509890362459139679:2079] 1748532606959854 != 1748532606959857 TServer::EnableGrpc on GrpcPort 27535, node 1 2025-05-29T15:30:07.026137Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:30:07.026146Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-05-29T15:30:07.026148Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-05-29T15:30:07.026188Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:21599 2025-05-29T15:30:07.061592Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:30:07.061628Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:30:07.062799Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:21599 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-29T15:30:07.091092Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:30:07.097881Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:30:07.115071Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:30:07.134842Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:30:07.146790Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:30:07.259826Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890366754108608:2403], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:30:07.259844Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:30:07.292532Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-05-29T15:30:07.298940Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-05-29T15:30:07.304877Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-05-29T15:30:07.311609Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-05-29T15:30:07.318927Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-05-29T15:30:07.326093Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-05-29T15:30:07.340271Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-05-29T15:30:07.355984Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890366754109260:2466], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:30:07.356014Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:30:07.356056Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890366754109265:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:30:07.356748Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715669:3, at schemeshard: 72057594046644480 2025-05-29T15:30:07.360382Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7509890366754109267:2470], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715669 completed, doublechecking } 2025-05-29T15:30:07.438267Z node 1 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [1:7509890366754109318:3394] txid# 281474976715670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-29T15:30:07.509229Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [1:7509890366754109334:2474], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:30:07.509343Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=1&id=NWYyMTc4NDgtNWRmMWNjNjQtM2ExYjAwMTEtMjFjYjAwYmU=, ActorId: [1:7509890366754108580:2399], ActorState: ExecuteState, TraceId: 01jweamebv08932rrecf1vtv1d, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: VERIFY failed (2025-05-29T15:30:07.509935Z): assertion failed in non-unittest thread with message: assertion failed at ydb/core/kqp/ut/common/kqp_ut_common.h:375, void NKikimr::NKqp::AssertSuccessResult(const NYdb::TStatus &): (result.IsSuccess())
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 library/cpp/testing/unittest/registar.cpp:36 RaiseError(): requirement UnittestThread failed 0. /-S/util/system/yassert.cpp:83: InternalPanicImpl @ 0x13AE4A35 1. /-S/util/system/yassert.cpp:55: Panic @ 0x13ADBA36 2. /tmp//-S/library/cpp/testing/unittest/registar.cpp:36: RaiseError @ 0x13C7DD76 3. /-S/ydb/core/kqp/ut/common/kqp_ut_common.h:375: AssertSuccessResult @ 0x1398A682 4. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:365: CreateSampleTables @ 0x261085D2 5. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:581: operator() @ 0x26129F1C 6. /-S/library/cpp/threading/future/core/future-inl.h:493: SetValue @ 0x26129F1C 7. /-S/library/cpp/threading/future/async.h:24: operator() @ 0x26129F1C 8. /-S/util/thread/pool.h:71: Process @ 0x26129F1C 9. /-S/util/thread/pool.cpp:418: DoExecute @ 0x13AEBF69 10. /-S/util/thread/factory.h:15: Execute @ 0x13AEA959 11. /-S/util/thread/factory.cpp:36: ThreadProc @ 0x13AEA959 12. /-S/util/system/thread.cpp:244: ThreadProxy @ 0x13AE5DCC 13. ??:0: ?? @ 0x7F5EAB10EAC2 14. ??:0: ?? @ 0x7F5EAB1A084F >> KqpLimits::TooBigKey+useSink >> KqpLimits::ComputeActorMemoryAllocationFailure-useSink [GOOD] >> KqpLimits::ComputeActorMemoryAllocationFailureQueryService+useSink >> KqpLimits::OutOfSpaceYQLUpsertFail+useSink [FAIL] >> KqpLimits::ManyPartitionsSorting ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/query/unittest >> KqpQuery::CreateAsSelectTypes-NotNull-IsOlap [FAIL] Test command err: Trying to start YDB, gRPC: 15146, MsgBus: 4782 2025-05-29T15:29:57.598113Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509890323822033669:2063];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:29:57.598135Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/00138c/r3tmp/tmp8G2yf7/pdisk_1.dat 2025-05-29T15:29:57.659932Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:29:57.660435Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [1:7509890323822033647:2079] 1748532597597979 != 1748532597597982 TServer::EnableGrpc on GrpcPort 15146, node 1 2025-05-29T15:29:57.673951Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:29:57.673961Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-05-29T15:29:57.673962Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-05-29T15:29:57.673996Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:4782 TClient is connected to server localhost:4782 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: 2025-05-29T15:29:57.732700Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:29:57.732732Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:29:57.733783Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-29T15:29:57.736382Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:29:57.738906Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-05-29T15:29:57.742475Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:29:57.805322Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:29:57.823808Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:29:57.836149Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:29:57.907445Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890323822035278:2403], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:29:57.907470Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:29:57.949967Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-05-29T15:29:57.957258Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-05-29T15:29:57.967335Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-05-29T15:29:57.973786Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-05-29T15:29:57.988775Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-05-29T15:29:58.002634Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-05-29T15:29:58.017209Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-05-29T15:29:58.032707Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890328117003226:2466], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:29:58.032747Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:29:58.032748Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890328117003231:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:29:58.033449Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710669:3, at schemeshard: 72057594046644480 2025-05-29T15:29:58.036432Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7509890328117003233:2470], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710669 completed, doublechecking } 2025-05-29T15:29:58.104455Z node 1 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [1:7509890328117003284:3396] txid# 281474976710670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-29T15:29:58.218529Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [1:7509890328117003300:2474], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:29:58.218655Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=1&id=ZjM3M2JjMTUtYzg4ZjE5MjctYzUxZDNlZjAtZmQwMTZjMzU=, ActorId: [1:7509890323822035275:2401], ActorState: ExecuteState, TraceId: 01jweam58g7k5gpb5m33nmnahc, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: VERIFY failed (2025-05-29T15:29:58.219448Z): assertion failed in non-unittest thread with message: assertion failed at ydb/core/kqp/ut/common/kqp_ut_common.h:375, void NKikimr::NKqp::AssertSuccessResult(const NYdb::TStatus &): (result.IsSuccess())
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 library/cpp/testing/unittest/registar.cpp:36 RaiseError(): requirement UnittestThread failed 0. /-S/util/system/yassert.cpp:83: InternalPanicImpl @ 0x13DD8F65 1. /-S/util/system/yassert.cpp:55: Panic @ 0x13DCFF66 2. /tmp//-S/library/cpp/testing/unittest/registar.cpp:36: RaiseError @ 0x13F70BC6 3. /-S/ydb/core/kqp/ut/common/kqp_ut_common.h:375: AssertSuccessResult @ 0x13A3FAE2 4. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:365: CreateSampleTables @ 0x264F71F2 5. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:581: operator() @ 0x2651596C 6. /-S/library/cpp/threading/future/core/future-inl.h:493: SetValue @ 0x2651596C 7. /-S/library/cpp/threading/future/async.h:24: operator() @ 0x2651596C 8. /-S/util/thread/pool.h:71: Process @ 0x2651596C 9. /-S/util/thread/pool.cpp:418: DoExecute @ 0x13DE06B9 10. /-S/util/thread/factory.h:15: Execute @ 0x13DDF0A9 11. /-S/util/thread/factory.cpp:36: ThreadProc @ 0x13DDF0A9 12. /-S/util/system/thread.cpp:244: ThreadProxy @ 0x13DDA51C 13. ??:0: ?? @ 0x7F0B9DB13AC2 14. ??:0: ?? @ 0x7F0B9DBA584F Trying to start YDB, gRPC: 25626, MsgBus: 28441 2025-05-29T15:30:02.276475Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509890342684027198:2062];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:30:02.276497Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/00138c/r3tmp/tmptyFAlJ/pdisk_1.dat 2025-05-29T15:30:02.340587Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [1:7509890342684027177:2079] 174853260227632 ... =TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV0ChunksMeta;id=18; 2025-05-29T15:30:06.794376Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037897;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-05-29T15:30:06.820325Z node 1 :TX_COLUMNSHARD_TX WARN: log.cpp:784: tablet_id=72075186224037891;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715660;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715660; 2025-05-29T15:30:06.820662Z node 1 :TX_COLUMNSHARD_TX WARN: log.cpp:784: tablet_id=72075186224037893;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715660;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715660; 2025-05-29T15:30:06.821091Z node 1 :TX_COLUMNSHARD_TX WARN: log.cpp:784: tablet_id=72075186224037895;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715660;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715660; 2025-05-29T15:30:06.821354Z node 1 :TX_COLUMNSHARD_TX WARN: log.cpp:784: tablet_id=72075186224037897;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715660;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715660; 2025-05-29T15:30:06.821645Z node 1 :TX_COLUMNSHARD_TX WARN: log.cpp:784: tablet_id=72075186224037888;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715660;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715660; 2025-05-29T15:30:06.821877Z node 1 :TX_COLUMNSHARD_TX WARN: log.cpp:784: tablet_id=72075186224037894;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715660;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715660; 2025-05-29T15:30:06.822408Z node 1 :TX_COLUMNSHARD_TX WARN: log.cpp:784: tablet_id=72075186224037890;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715660;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715660; 2025-05-29T15:30:06.822412Z node 1 :TX_COLUMNSHARD_TX WARN: log.cpp:784: tablet_id=72075186224037892;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715660;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715660; 2025-05-29T15:30:06.822956Z node 1 :TX_COLUMNSHARD_TX WARN: log.cpp:784: tablet_id=72075186224037896;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715660;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715660; 2025-05-29T15:30:06.823248Z node 1 :TX_COLUMNSHARD_TX WARN: log.cpp:784: tablet_id=72075186224037889;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715660;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715660; 2025-05-29T15:30:06.824182Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 2025-05-29T15:30:06.895875Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [1:7509890359701673673:2465], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:30:06.895959Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=1&id=MTk0N2FlYzMtNzhkMzU0NjgtNDk3ZTViMDgtMzAwNzBmZjQ=, ActorId: [1:7509890359701673666:2461], ActorState: ExecuteState, TraceId: 01jweamdx3an3skd40e6bn8ap3, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: assertion failed at ydb/core/kqp/ut/query/kqp_query_ut.cpp:1931, virtual void NKikimr::NKqp::NTestSuiteKqpQuery::TTestCaseCreateAsSelect_BadCases::Execute_(NUnitTest::TTestContext &): (prepareResult.IsSuccess())
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 0. /-S/util/system/backtrace.cpp:284: ?? @ 0x13DBA79B 1. /tmp//-S/library/cpp/testing/unittest/registar.cpp:46: RaiseError @ 0x13F70AD8 2. /tmp//-S/ydb/core/kqp/ut/query/kqp_query_ut.cpp:1931: Execute_ @ 0x13BE7937 3. /tmp//-S/ydb/core/kqp/ut/query/kqp_query_ut.cpp:38: operator() @ 0x13BFC896 4. /tmp//-S/library/cpp/testing/unittest/registar.cpp:373: Run @ 0x13F7298D 5. /tmp//-S/ydb/core/kqp/ut/query/kqp_query_ut.cpp:38: Execute @ 0x13BFC255 6. /tmp//-S/library/cpp/testing/unittest/registar.cpp:494: Execute @ 0x13F73102 7. /tmp//-S/library/cpp/testing/unittest/utmain.cpp:872: RunMain @ 0x13F84CAC 8. ??:0: ?? @ 0x7FDDFFF0DD8F 9. ??:0: ?? @ 0x7FDDFFF0DE3F 10. ??:0: ?? @ 0x12A6F028 Trying to start YDB, gRPC: 21230, MsgBus: 11064 2025-05-29T15:30:08.632163Z node 2 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7509890368150700696:2064];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:30:08.632193Z node 2 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/00138c/r3tmp/tmpJpNLS1/pdisk_1.dat 2025-05-29T15:30:08.645215Z node 2 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:30:08.645412Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [2:7509890368150700665:2079] 1748532608632016 != 1748532608632019 TServer::EnableGrpc on GrpcPort 21230, node 2 2025-05-29T15:30:08.654759Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:30:08.654772Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-05-29T15:30:08.654774Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-05-29T15:30:08.654817Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:11064 TClient is connected to server localhost:11064 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-29T15:30:08.736444Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:30:08.736475Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:30:08.736781Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:30:08.737450Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-29T15:30:09.033316Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7509890372445668610:2327], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:30:09.033337Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7509890372445668626:2330], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:30:09.033342Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:30:09.033940Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2025-05-29T15:30:09.035526Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7509890372445668631:2331], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-05-29T15:30:09.109062Z node 2 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [2:7509890372445668682:2324] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-29T15:30:09.118622Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 2025-05-29T15:30:09.151380Z node 2 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [2:7509890372445668799:2347], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:30:09.151482Z node 2 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=2&id=NjliNzk2OTItYzMyYmQ2ZTAtNDY5ODYzMzMtNzI1MTViMjI=, ActorId: [2:7509890372445668792:2343], ActorState: ExecuteState, TraceId: 01jweamg35bp6xsz3d4c5bygrc, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: assertion failed at ydb/core/kqp/ut/query/kqp_query_ut.cpp:2388, void NKikimr::NKqp::NTestSuiteKqpQuery::CreateAsSelectTypes(NUnitTest::TTestContext &) [NotNull = false, IsOlap = false]: (prepareResult.IsSuccess())
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 0. /-S/util/system/backtrace.cpp:284: ?? @ 0x13DBA79B 1. /tmp//-S/library/cpp/testing/unittest/registar.cpp:46: RaiseError @ 0x13F70AD8 2. /tmp//-S/ydb/core/kqp/ut/query/kqp_query_ut.cpp:2388: CreateAsSelectTypes @ 0x13C1D83E 3. /tmp//-S/ydb/core/kqp/ut/query/kqp_query_ut.cpp:38: operator() @ 0x13BFC896 4. /tmp//-S/library/cpp/testing/unittest/registar.cpp:373: Run @ 0x13F7298D 5. /tmp//-S/ydb/core/kqp/ut/query/kqp_query_ut.cpp:38: Execute @ 0x13BFC255 6. /tmp//-S/library/cpp/testing/unittest/registar.cpp:494: Execute @ 0x13F73102 7. /tmp//-S/library/cpp/testing/unittest/utmain.cpp:872: RunMain @ 0x13F84CAC 8. ??:0: ?? @ 0x7FDDFFF0DD8F 9. ??:0: ?? @ 0x7FDDFFF0DE3F 10. ??:0: ?? @ 0x12A6F028 >> KqpStats::DeferredEffects+UseSink ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/query/unittest >> KqpExplain::UpdateConditionalKey+UseSink Test command err: Trying to start YDB, gRPC: 16303, MsgBus: 27991 2025-05-29T15:29:54.363292Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509890310672623624:2065];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:29:54.363382Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/001396/r3tmp/tmpEZDPsp/pdisk_1.dat 2025-05-29T15:29:54.425535Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:29:54.425654Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [1:7509890310672623600:2079] 1748532594363076 != 1748532594363079 TServer::EnableGrpc on GrpcPort 16303, node 1 2025-05-29T15:29:54.440399Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:29:54.440411Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-05-29T15:29:54.440414Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-05-29T15:29:54.440457Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:27991 TClient is connected to server localhost:27991 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... 2025-05-29T15:29:54.507226Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:29:54.507255Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:29:54.508085Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-05-29T15:29:54.510707Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-05-29T15:29:54.511118Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-29T15:29:54.535487Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:29:54.611155Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:29:54.644183Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:29:54.672296Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:29:54.796829Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890310672625232:2403], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:29:54.796858Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:29:54.840318Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-05-29T15:29:54.848922Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-05-29T15:29:54.860293Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-05-29T15:29:54.875355Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-05-29T15:29:54.887980Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-05-29T15:29:54.904342Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-05-29T15:29:54.915317Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-05-29T15:29:54.938251Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890310672625884:2466], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:29:54.938281Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:29:54.938410Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890310672625889:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:29:54.939298Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715669:3, at schemeshard: 72057594046644480 2025-05-29T15:29:54.943162Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976715669, at schemeshard: 72057594046644480 2025-05-29T15:29:54.943269Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7509890310672625891:2470], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715669 completed, doublechecking } 2025-05-29T15:29:55.020827Z node 1 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [1:7509890314967593238:3395] txid# 281474976715670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-29T15:29:55.125702Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [1:7509890314967593254:2474], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:29:55.125812Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=1&id=OGJmOTEyNzYtNjBmMTVhYzYtM2VmMmU3MmMtNzM0MzdkY2I=, ActorId: [1:7509890310672625214:2401], ActorState: ExecuteState, TraceId: 01jweam27s0x8y7xmrj7ngqsw0, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: VERIFY failed (2025-05-29T15:29:55.126477Z): assertion failed in non-unittest thread with message: assertion failed at ydb/core/kqp/ut/common/kqp_ut_common.h:375, void NKikimr::NKqp::AssertSuccessResult(const NYdb::TStatus &): (result.IsSuccess())
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 library/cpp/testing/unittest/registar.cpp:36 RaiseError(): requirement UnittestThread failed 0. /-S/util/system/yassert.cpp:83: InternalPanicImpl @ 0x13DD8F65 1. /-S/util/system/yassert.cpp:55: Panic @ 0x13DCFF66 2. /tmp//-S/library/cpp/testing/unittest/registar.cpp:36: RaiseError @ 0x13F70BC6 3. /-S/ydb/core/kqp/ut/common/kqp_ut_common.h:375: AssertSuccessResult @ 0x13A3FAE2 4. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:365: CreateSampleTables @ 0x264F71F2 5. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:581: operator() @ 0x2651596C 6. /-S/library/cpp/threading/future/core/future-inl.h:493: SetValue @ 0x2651596C 7. /-S/library/cpp/threading/future/async.h:24: operator() @ 0x2651596C 8. /-S/util/thread/pool.h:71: Process @ 0x2651596C 9. /-S/util/thread/pool.cpp:418: DoExecute @ 0x13DE06B9 10. /-S/util/thread/factory.h:15: Execute @ 0x13DDF0A9 11. /-S/util/thread/factory.cpp:36: ThreadProc @ 0x13DDF0A9 12. /-S/util/system/thread.cpp:244: ThreadProxy @ 0x13DDA51C 13. ??:0: ?? @ 0x7F1768761AC2 14. ??:0: ?? @ 0x7F17687F384F Trying to start YDB, gRPC: 27668, MsgBus: 65526 2025-05-29T15:29:58.861265Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509890326850120076:2061];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:29:58.861293Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/001396/r3tmp/tmpPhxbsf/p ... 3DD8F65 1. /-S/util/system/yassert.cpp:55: Panic @ 0x13DCFF66 2. /tmp//-S/library/cpp/testing/unittest/registar.cpp:36: RaiseError @ 0x13F70BC6 3. /-S/ydb/core/kqp/ut/common/kqp_ut_common.h:375: AssertSuccessResult @ 0x13A3FAE2 4. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:365: CreateSampleTables @ 0x264F71F2 5. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:581: operator() @ 0x2651596C 6. /-S/library/cpp/threading/future/core/future-inl.h:493: SetValue @ 0x2651596C 7. /-S/library/cpp/threading/future/async.h:24: operator() @ 0x2651596C 8. /-S/util/thread/pool.h:71: Process @ 0x2651596C 9. /-S/util/thread/pool.cpp:418: DoExecute @ 0x13DE06B9 10. /-S/util/thread/factory.h:15: Execute @ 0x13DDF0A9 11. /-S/util/thread/factory.cpp:36: ThreadProc @ 0x13DDF0A9 12. /-S/util/system/thread.cpp:244: ThreadProxy @ 0x13DDA51C 13. ??:0: ?? @ 0x7F3FA11EAAC2 14. ??:0: ?? @ 0x7F3FA127C84F Trying to start YDB, gRPC: 26722, MsgBus: 20973 2025-05-29T15:30:07.303913Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509890364545794738:2061];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:30:07.303960Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/001396/r3tmp/tmp8DOxmr/pdisk_1.dat 2025-05-29T15:30:07.357517Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [1:7509890364545794719:2079] 1748532607303803 != 1748532607303806 2025-05-29T15:30:07.358221Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 26722, node 1 2025-05-29T15:30:07.368877Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:30:07.368889Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-05-29T15:30:07.368891Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-05-29T15:30:07.368929Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:20973 2025-05-29T15:30:07.406026Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:30:07.406050Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting TClient is connected to server localhost:20973 2025-05-29T15:30:07.407168Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-29T15:30:07.435567Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:30:07.438659Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:30:07.456904Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:30:07.475306Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:30:07.486962Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:30:07.622343Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890364545796353:2403], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:30:07.622375Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:30:07.658699Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-05-29T15:30:07.665972Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-05-29T15:30:07.676147Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-05-29T15:30:07.683054Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-05-29T15:30:07.690303Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-05-29T15:30:07.704231Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-05-29T15:30:07.718388Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-05-29T15:30:07.733934Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890364545797005:2466], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:30:07.733971Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:30:07.733994Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890364545797010:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:30:07.734529Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715669:3, at schemeshard: 72057594046644480 2025-05-29T15:30:07.737798Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7509890364545797012:2470], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715669 completed, doublechecking } 2025-05-29T15:30:07.807690Z node 1 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [1:7509890364545797063:3395] txid# 281474976715670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-29T15:30:07.881174Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [1:7509890364545797079:2474], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:30:07.881254Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=1&id=ZTllY2NkNjYtNjk2NWFkZjAtYjQ3ZDAwMDUtNzFlZjZkMTM=, ActorId: [1:7509890364545796335:2401], ActorState: ExecuteState, TraceId: 01jweameqnagg2737jd0w1wsny, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: VERIFY failed (2025-05-29T15:30:07.881861Z): assertion failed in non-unittest thread with message: assertion failed at ydb/core/kqp/ut/common/kqp_ut_common.h:375, void NKikimr::NKqp::AssertSuccessResult(const NYdb::TStatus &): (result.IsSuccess())
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 library/cpp/testing/unittest/registar.cpp:36 RaiseError(): requirement UnittestThread failed 0. /-S/util/system/yassert.cpp:83: InternalPanicImpl @ 0x13DD8F65 1. /-S/util/system/yassert.cpp:55: Panic @ 0x13DCFF66 2. /tmp//-S/library/cpp/testing/unittest/registar.cpp:36: RaiseError @ 0x13F70BC6 3. /-S/ydb/core/kqp/ut/common/kqp_ut_common.h:375: AssertSuccessResult @ 0x13A3FAE2 4. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:365: CreateSampleTables @ 0x264F71F2 5. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:581: operator() @ 0x2651596C 6. /-S/library/cpp/threading/future/core/future-inl.h:493: SetValue @ 0x2651596C 7. /-S/library/cpp/threading/future/async.h:24: operator() @ 0x2651596C 8. /-S/util/thread/pool.h:71: Process @ 0x2651596C 9. /-S/util/thread/pool.cpp:418: DoExecute @ 0x13DE06B9 10. /-S/util/thread/factory.h:15: Execute @ 0x13DDF0A9 11. /-S/util/thread/factory.cpp:36: ThreadProc @ 0x13DDF0A9 12. /-S/util/system/thread.cpp:244: ThreadProxy @ 0x13DDA51C 13. ??:0: ?? @ 0x7F46B40A8AC2 14. ??:0: ?? @ 0x7F46B413A84F >> KqpExplain::ReadTableRanges >> KqpLimits::ComputeActorMemoryAllocationFailureQueryService+useSink [GOOD] >> KqpLimits::ComputeActorMemoryAllocationFailureQueryService-useSink >> KqpLimits::QSReplySizeEnsureMemoryLimits-useSink ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/provider/ut/unittest >> KikimrIcGateway::TestLoadMdbBasicSecretValueFromExternalDataSourceMetadata Test command err: Trying to start YDB, gRPC: 32724, MsgBus: 23554 2025-05-29T15:29:43.213469Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509890263782243244:2074];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/002472/r3tmp/tmptdsdj8/pdisk_1.dat 2025-05-29T15:29:43.255394Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-05-29T15:29:43.293912Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:29:43.295346Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [1:7509890263782243209:2079] 1748532583206011 != 1748532583206014 TServer::EnableGrpc on GrpcPort 32724, node 1 2025-05-29T15:29:43.313003Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:29:43.313016Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-05-29T15:29:43.313018Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-05-29T15:29:43.313056Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:23554 2025-05-29T15:29:43.353366Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:29:43.353396Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:29:43.354511Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:23554 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-29T15:29:43.385283Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:29:43.391110Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-05-29T15:29:43.395407Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:29:43.422920Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:29:43.449241Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-05-29T15:29:43.460217Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 2025-05-29T15:29:43.604176Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890263782244862:2403], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:29:43.604199Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:29:43.654869Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-05-29T15:29:43.671874Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-05-29T15:29:43.683536Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-05-29T15:29:43.701332Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-05-29T15:29:43.712572Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-05-29T15:29:43.726532Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-05-29T15:29:43.750463Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-05-29T15:29:43.783092Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890263782245515:2466], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:29:43.783117Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:29:43.783193Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890263782245520:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:29:43.784047Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715669:3, at schemeshard: 72057594046644480 2025-05-29T15:29:43.789278Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976715669, at schemeshard: 72057594046644480 2025-05-29T15:29:43.790071Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7509890263782245522:2470], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715669 completed, doublechecking } 2025-05-29T15:29:43.847930Z node 1 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [1:7509890263782245573:3397] txid# 281474976715670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-29T15:29:44.009000Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [1:7509890263782245582:2474], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:29:44.010228Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=1&id=YTlmOWNlMTEtZDNmM2VmNDEtYTYzNTYyYTgtYmJhYWJiN2Q=, ActorId: [1:7509890263782244836:2401], ActorState: ExecuteState, TraceId: 01jweakqb604rga5xqdxs2y76d, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: VERIFY failed (2025-05-29T15:29:44.011291Z): assertion failed in non-unittest thread with message: assertion failed at ydb/core/kqp/ut/common/kqp_ut_common.h:375, void NKikimr::NKqp::AssertSuccessResult(const NYdb::TStatus &): (result.IsSuccess())
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 library/cpp/testing/unittest/registar.cpp:36 RaiseError(): requirement UnittestThread failed 0. /-S/util/system/yassert.cpp:83: InternalPanicImpl @ 0x13AE4A35 1. /-S/util/system/yassert.cpp:55: Panic @ 0x13ADBA36 2. /tmp//-S/library/cpp/testing/unittest/registar.cpp:36: RaiseError @ 0x13C7DD76 3. /-S/ydb/core/kqp/ut/common/kqp_ut_common.h:375: AssertSuccessResult @ 0x1398A682 4. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:365: CreateSampleTables @ 0x261085D2 5. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:581: operator() @ 0x26129F1C 6. /-S/library/cpp/threading/future/core/future-inl.h:493: SetValue @ 0x26129F1C 7. /-S/library/cpp/threading/future/async.h:24: operator() @ 0x26129F1C 8. /-S/util/thread/pool.h:71: Process @ 0x26129F1C 9. /-S/util/thread/pool.cpp:418: DoExecute @ 0x13AEBF69 10. /-S/util/thread/factory.h:15: Execute @ 0x13AEA959 11. /-S/util/thread/factory.cpp:36: ThreadProc @ 0x13AEA959 12. /-S/util/system/thread.cpp:244: ThreadProxy @ 0x13AE5DCC 13. ??:0: ?? @ 0x7F7C55166AC2 14. ??:0: ?? @ 0x7F7C551F884F Trying to start YDB, gRPC: 15775, MsgBus: 11094 2025-05-29T15:30:02.832234Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509890344946912392:2063];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:30:02.832250Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/002472/r3tmp/tmpxai2qb/p ... 3AE4A35 1. /-S/util/system/yassert.cpp:55: Panic @ 0x13ADBA36 2. /tmp//-S/library/cpp/testing/unittest/registar.cpp:36: RaiseError @ 0x13C7DD76 3. /-S/ydb/core/kqp/ut/common/kqp_ut_common.h:375: AssertSuccessResult @ 0x1398A682 4. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:365: CreateSampleTables @ 0x261085D2 5. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:581: operator() @ 0x26129F1C 6. /-S/library/cpp/threading/future/core/future-inl.h:493: SetValue @ 0x26129F1C 7. /-S/library/cpp/threading/future/async.h:24: operator() @ 0x26129F1C 8. /-S/util/thread/pool.h:71: Process @ 0x26129F1C 9. /-S/util/thread/pool.cpp:418: DoExecute @ 0x13AEBF69 10. /-S/util/thread/factory.h:15: Execute @ 0x13AEA959 11. /-S/util/thread/factory.cpp:36: ThreadProc @ 0x13AEA959 12. /-S/util/system/thread.cpp:244: ThreadProxy @ 0x13AE5DCC 13. ??:0: ?? @ 0x7F6C16565AC2 14. ??:0: ?? @ 0x7F6C165F784F Trying to start YDB, gRPC: 19903, MsgBus: 19010 2025-05-29T15:30:07.138090Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509890366487702224:2137];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:30:07.138227Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/002472/r3tmp/tmpJnhEFk/pdisk_1.dat 2025-05-29T15:30:07.194047Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:30:07.196935Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [1:7509890366487702126:2079] 1748532607137501 != 1748532607137504 TServer::EnableGrpc on GrpcPort 19903, node 1 2025-05-29T15:30:07.207087Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:30:07.207104Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-05-29T15:30:07.207106Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-05-29T15:30:07.207153Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:19010 TClient is connected to server localhost:19010 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-29T15:30:07.257396Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:30:07.267618Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:30:07.273085Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:30:07.273112Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:30:07.274251Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-29T15:30:07.328993Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:30:07.346808Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:30:07.357068Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:30:07.515826Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890366487703758:2403], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:30:07.515849Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:30:07.550010Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-05-29T15:30:07.557949Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-05-29T15:30:07.571393Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-05-29T15:30:07.626631Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-05-29T15:30:07.681235Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-05-29T15:30:07.690330Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-05-29T15:30:07.705198Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-05-29T15:30:07.720974Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890366487704415:2466], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:30:07.721001Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890366487704420:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:30:07.721002Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:30:07.721840Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710669:3, at schemeshard: 72057594046644480 2025-05-29T15:30:07.724489Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7509890366487704422:2470], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710669 completed, doublechecking } 2025-05-29T15:30:07.795997Z node 1 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [1:7509890366487704473:3395] txid# 281474976710670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-29T15:30:07.896643Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [1:7509890366487704489:2474], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:30:07.896799Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=1&id=ZjkyOTBjMC05NDcwNzk5OC01YWUwZjEzYy1hMTE1NjE3YQ==, ActorId: [1:7509890366487703740:2401], ActorState: ExecuteState, TraceId: 01jweameq83g6zhv445q7wnrpp, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: VERIFY failed (2025-05-29T15:30:07.897459Z): assertion failed in non-unittest thread with message: assertion failed at ydb/core/kqp/ut/common/kqp_ut_common.h:375, void NKikimr::NKqp::AssertSuccessResult(const NYdb::TStatus &): (result.IsSuccess())
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 library/cpp/testing/unittest/registar.cpp:36 RaiseError(): requirement UnittestThread failed 0. /-S/util/system/yassert.cpp:83: InternalPanicImpl @ 0x13AE4A35 1. /-S/util/system/yassert.cpp:55: Panic @ 0x13ADBA36 2. /tmp//-S/library/cpp/testing/unittest/registar.cpp:36: RaiseError @ 0x13C7DD76 3. /-S/ydb/core/kqp/ut/common/kqp_ut_common.h:375: AssertSuccessResult @ 0x1398A682 4. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:365: CreateSampleTables @ 0x261085D2 5. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:581: operator() @ 0x26129F1C 6. /-S/library/cpp/threading/future/core/future-inl.h:493: SetValue @ 0x26129F1C 7. /-S/library/cpp/threading/future/async.h:24: operator() @ 0x26129F1C 8. /-S/util/thread/pool.h:71: Process @ 0x26129F1C 9. /-S/util/thread/pool.cpp:418: DoExecute @ 0x13AEBF69 10. /-S/util/thread/factory.h:15: Execute @ 0x13AEA959 11. /-S/util/thread/factory.cpp:36: ThreadProc @ 0x13AEA959 12. /-S/util/system/thread.cpp:244: ThreadProxy @ 0x13AE5DCC 13. ??:0: ?? @ 0x7F16AD397AC2 14. ??:0: ?? @ 0x7F16AD42984F ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/query/unittest >> KqpQuery::TableSinkWithSubquery [FAIL] Test command err: Trying to start YDB, gRPC: 1882, MsgBus: 65275 2025-05-29T15:30:02.291290Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509890342058099621:2061];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:30:02.291344Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/001311/r3tmp/tmpabi3VH/pdisk_1.dat TServer::EnableGrpc on GrpcPort 1882, node 1 2025-05-29T15:30:02.362535Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:30:02.362653Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [1:7509890342058099602:2079] 1748532602291158 != 1748532602291161 2025-05-29T15:30:02.365601Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:30:02.365609Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-05-29T15:30:02.365611Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-05-29T15:30:02.365644Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:65275 TClient is connected to server localhost:65275 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-29T15:30:02.433354Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:30:02.433409Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:30:02.434172Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-05-29T15:30:02.434467Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected waiting... 2025-05-29T15:30:02.448405Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:30:02.468457Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:30:02.493297Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:30:02.506089Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:30:02.631407Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890342058101236:2403], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:30:02.631456Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:30:02.681769Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-05-29T15:30:02.689793Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-05-29T15:30:02.744726Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-05-29T15:30:02.755261Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-05-29T15:30:02.769551Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-05-29T15:30:02.783786Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-05-29T15:30:02.797404Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-05-29T15:30:02.815605Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890342058101889:2466], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:30:02.815631Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:30:02.815633Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890342058101894:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:30:02.816397Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715669:3, at schemeshard: 72057594046644480 2025-05-29T15:30:02.824634Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7509890342058101896:2470], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715669 completed, doublechecking } 2025-05-29T15:30:02.900670Z node 1 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [1:7509890342058101947:3397] txid# 281474976715670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-29T15:30:03.019180Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [1:7509890342058101963:2474], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:30:03.019303Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=1&id=ZDc1YWJiOGQtMzM2ODA1ZjktMjBhMWQxYjYtZTJhMjE1M2Q=, ActorId: [1:7509890342058101218:2401], ActorState: ExecuteState, TraceId: 01jweam9xz8xpy7y41r2e1bfsc, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: VERIFY failed (2025-05-29T15:30:03.020013Z): assertion failed in non-unittest thread with message: assertion failed at ydb/core/kqp/ut/common/kqp_ut_common.h:375, void NKikimr::NKqp::AssertSuccessResult(const NYdb::TStatus &): (result.IsSuccess())
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 library/cpp/testing/unittest/registar.cpp:36 RaiseError(): requirement UnittestThread failed 0. /-S/util/system/yassert.cpp:83: InternalPanicImpl @ 0x13DD8F65 1. /-S/util/system/yassert.cpp:55: Panic @ 0x13DCFF66 2. /tmp//-S/library/cpp/testing/unittest/registar.cpp:36: RaiseError @ 0x13F70BC6 3. /-S/ydb/core/kqp/ut/common/kqp_ut_common.h:375: AssertSuccessResult @ 0x13A3FAE2 4. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:365: CreateSampleTables @ 0x264F71F2 5. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:581: operator() @ 0x2651596C 6. /-S/library/cpp/threading/future/core/future-inl.h:493: SetValue @ 0x2651596C 7. /-S/library/cpp/threading/future/async.h:24: operator() @ 0x2651596C 8. /-S/util/thread/pool.h:71: Process @ 0x2651596C 9. /-S/util/thread/pool.cpp:418: DoExecute @ 0x13DE06B9 10. /-S/util/thread/factory.h:15: Execute @ 0x13DDF0A9 11. /-S/util/thread/factory.cpp:36: ThreadProc @ 0x13DDF0A9 12. /-S/util/system/thread.cpp:244: ThreadProxy @ 0x13DDA51C 13. ??:0: ?? @ 0x7F6079952AC2 14. ??:0: ?? @ 0x7F60799E484F Trying to start YDB, gRPC: 17563, MsgBus: 2674 2025-05-29T15:30:06.352945Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509890361113760662:2061];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:30:06.352965Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/001311/r3tmp/tmpr0hHVY/pdisk_1.dat 2025-05-29T15:30:06.411312Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:30:06.411420Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [1:7509890361113760643:2079] 1748532606352850 != 1748532606352853 TServer::EnableGrpc on GrpcPort 17563, node 1 2025-05-29T15 ... ateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 2025-05-29T15:30:09.041598Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7509890372126283439:2437], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:30:09.041620Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7509890372126283444:2440], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:30:09.041632Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:30:09.042392Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715660:3, at schemeshard: 72057594046644480 2025-05-29T15:30:09.047580Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7509890372126283446:2441], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715660 completed, doublechecking } 2025-05-29T15:30:09.100127Z node 2 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [2:7509890372126283497:3176] txid# 281474976715661, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 7], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-29T15:30:09.111522Z node 2 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [2:7509890372126283513:2445], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:30:09.111657Z node 2 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=2&id=NmJhZjQzMGEtNjkyY2QyYWUtMjM5MzRmNGYtZDk0ODkzNDI=, ActorId: [2:7509890367831314804:2326], ActorState: ExecuteState, TraceId: 01jweamg0hd95nw1mfr5ts5kkp, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: assertion failed at ydb/core/kqp/ut/query/kqp_query_ut.cpp:1850, virtual void NKikimr::NKqp::NTestSuiteKqpQuery::TTestCaseTableSink_ReplaceDataShardDataQuery::Execute_(NUnitTest::TTestContext &) [UseSink = false]: (prepareResult.IsSuccess())
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 0. /-S/util/system/backtrace.cpp:284: ?? @ 0x13DBA79B 1. /tmp//-S/library/cpp/testing/unittest/registar.cpp:46: RaiseError @ 0x13F70AD8 2. /tmp//-S/ydb/core/kqp/ut/query/kqp_query_ut.cpp:1850: Execute_ @ 0x13C11B77 3. /tmp//-S/ydb/core/kqp/ut/query/kqp_query_ut.cpp:38: operator() @ 0x13BFC896 4. /tmp//-S/library/cpp/testing/unittest/registar.cpp:373: Run @ 0x13F7298D 5. /tmp//-S/ydb/core/kqp/ut/query/kqp_query_ut.cpp:38: Execute @ 0x13BFC255 6. /tmp//-S/library/cpp/testing/unittest/registar.cpp:494: Execute @ 0x13F73102 7. /tmp//-S/library/cpp/testing/unittest/utmain.cpp:872: RunMain @ 0x13F84CAC 8. ??:0: ?? @ 0x7F5ECD8F5D8F 9. ??:0: ?? @ 0x7F5ECD8F5E3F 10. ??:0: ?? @ 0x12A6F028 Trying to start YDB, gRPC: 25512, MsgBus: 30750 2025-05-29T15:30:09.417002Z node 3 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7509890373837828611:2073];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:30:09.417025Z node 3 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/001311/r3tmp/tmp82g3S8/pdisk_1.dat 2025-05-29T15:30:09.428626Z node 3 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:30:09.429087Z node 3 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [3:7509890373837828563:2079] 1748532609416775 != 1748532609416778 TServer::EnableGrpc on GrpcPort 25512, node 3 2025-05-29T15:30:09.441044Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:30:09.441059Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-05-29T15:30:09.441061Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-05-29T15:30:09.441114Z node 3 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:30750 TClient is connected to server localhost:30750 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-29T15:30:09.519262Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:30:09.519295Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:30:09.520113Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-05-29T15:30:09.520371Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected waiting... 2025-05-29T15:30:09.778642Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7509890373837829220:2328], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:30:09.778671Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:30:09.783118Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-05-29T15:30:09.837823Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 2025-05-29T15:30:09.847817Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7509890373837829393:2345], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:30:09.847841Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:30:09.847889Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7509890373837829398:2348], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:30:09.848543Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715660:3, at schemeshard: 72057594046644480 2025-05-29T15:30:09.852357Z node 3 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7509890373837829400:2349], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715660 completed, doublechecking } 2025-05-29T15:30:09.906977Z node 3 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [3:7509890373837829451:2427] txid# 281474976715661, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 7], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-29T15:30:09.912954Z node 3 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [3:7509890373837829467:2353], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:30:09.913048Z node 3 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=3&id=OTMwODJmMmMtNmNiNzY1NzgtNjFlMGFhODUtMzc4MTg3MDE=, ActorId: [3:7509890373837829391:2344], ActorState: ExecuteState, TraceId: 01jweamgsq54x4gh36saprz26m, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: assertion failed at ydb/core/kqp/ut/query/kqp_query_ut.cpp:2248, virtual void NKikimr::NKqp::NTestSuiteKqpQuery::TTestCaseTableSinkWithSubquery::Execute_(NUnitTest::TTestContext &): (prepareResult.IsSuccess())
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 0. /-S/util/system/backtrace.cpp:284: ?? @ 0x13DBA79B 1. /tmp//-S/library/cpp/testing/unittest/registar.cpp:46: RaiseError @ 0x13F70AD8 2. /tmp//-S/ydb/core/kqp/ut/query/kqp_query_ut.cpp:2248: Execute_ @ 0x13BF5C42 3. /tmp//-S/ydb/core/kqp/ut/query/kqp_query_ut.cpp:38: operator() @ 0x13BFC896 4. /tmp//-S/library/cpp/testing/unittest/registar.cpp:373: Run @ 0x13F7298D 5. /tmp//-S/ydb/core/kqp/ut/query/kqp_query_ut.cpp:38: Execute @ 0x13BFC255 6. /tmp//-S/library/cpp/testing/unittest/registar.cpp:494: Execute @ 0x13F73102 7. /tmp//-S/library/cpp/testing/unittest/utmain.cpp:872: RunMain @ 0x13F84CAC 8. ??:0: ?? @ 0x7F5ECD8F5D8F 9. ??:0: ?? @ 0x7F5ECD8F5E3F 10. ??:0: ?? @ 0x12A6F028 >> KqpParams::Decimal+QueryService-UseSink >> KqpLimits::ManyPartitionsSorting [GOOD] >> KqpParams::RowsList >> KqpLimits::ManyPartitionsSortingLimit >> KqpExplain::UpdateSecondaryConditional+UseSink >> KqpLimits::ComputeActorMemoryAllocationFailureQueryService-useSink [GOOD] >> KqpLimits::KqpMkqlMemoryLimitException |72.9%| [TA] $(B)/ydb/core/kqp/provider/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> KqpExplain::CreateTableAs+Stats ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/query/unittest >> KqpLimits::ComputeActorMemoryAllocationFailureQueryService-useSink [GOOD] Test command err: Trying to start YDB, gRPC: 10510, MsgBus: 10956 2025-05-29T15:30:09.272054Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509890373137640361:2061];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:30:09.272089Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/0012e9/r3tmp/tmp8lL1Vd/pdisk_1.dat 2025-05-29T15:30:09.325784Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:30:09.325860Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [1:7509890373137640342:2079] 1748532609271974 != 1748532609271977 TServer::EnableGrpc on GrpcPort 10510, node 1 2025-05-29T15:30:09.336051Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:30:09.336079Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-05-29T15:30:09.336081Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-05-29T15:30:09.336122Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:10956 2025-05-29T15:30:09.374826Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:30:09.374847Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:30:09.377957Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:10956 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-29T15:30:09.402672Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:30:09.407916Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-05-29T15:30:09.414620Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:30:09.614479Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890373137641352:2356], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:30:09.614482Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890373137641343:2353], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:30:09.614499Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:30:09.615209Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480 2025-05-29T15:30:09.616767Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7509890373137641357:2357], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2025-05-29T15:30:09.672606Z node 1 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [1:7509890373137641408:2548] txid# 281474976715660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-29T15:30:09.956133Z node 1 :KQP_COMPUTE WARN: log.cpp:784: fline=kqp_compute_actor_factory.cpp:40;problem=cannot_allocate_memory;tx_id=281474976715661;task_id=1;memory=1048576; 2025-05-29T15:30:09.956148Z node 1 :KQP_COMPUTE WARN: dq_compute_memory_quota.h:152: TxId: 281474976715661, task: 1. [Mem] memory 1048576 NOT granted 2025-05-29T15:30:09.974406Z node 1 :KQP_COMPUTE ERROR: dq_compute_actor_impl.h:678: SelfId: [1:7509890373137641449:2365], TxId: 281474976715661, task: 1. Ctx: { CustomerSuppliedId : . TraceId : 01jweamgjdfy4jtxpj1g4qybaw. SessionId : ydb://session/3?node_id=1&id=Nzk5MDMyZTMtZGI0MmQwZmUtOTJmZTRiYWMtN2I1ZGI4MDk=. CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. InternalError: OVERLOADED KIKIMR_PRECONDITION_FAILED: {
: Error: Mkql memory limit exceeded, allocated by task 1: 10, host: ghrun-lxxdcki4qu, canAllocateExtraMemory: 1, memory manager details for current node: TxResourcesInfo { TxId: 281474976715661, Database: /Root, PoolId: default, MemoryPoolPercent: 100.00, tx initially granted memory: 20B, tx total memory allocations: 1MiB, tx largest successful memory allocation: 1MiB, tx last failed memory allocation: 1MiB, tx total execution units: 2, started at: 2025-05-29T15:30:09.931174Z }, code: 2029 }. 2025-05-29T15:30:09.987948Z node 1 :KQP_COMPUTE ERROR: dq_compute_actor_impl.h:1210: SelfId: [1:7509890373137641451:2366], TxId: 281474976715661, task: 2. Ctx: { TraceId : 01jweamgjdfy4jtxpj1g4qybaw. CustomerSuppliedId : . SessionId : ydb://session/3?node_id=1&id=Nzk5MDMyZTMtZGI0MmQwZmUtOTJmZTRiYWMtN2I1ZGI4MDk=. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. Handle abort execution event from: [1:7509890373137641438:2351], status: OVERLOADED, reason: {
: Error: Terminate execution } 2025-05-29T15:30:09.988384Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2583: SessionId: ydb://session/3?node_id=1&id=Nzk5MDMyZTMtZGI0MmQwZmUtOTJmZTRiYWMtN2I1ZGI4MDk=, ActorId: [1:7509890373137641325:2351], ActorState: ExecuteState, TraceId: 01jweamgjdfy4jtxpj1g4qybaw, Create QueryResponse for error on request, msg:
: Error: Mkql memory limit exceeded, allocated by task 1: 10, host: ghrun-lxxdcki4qu, canAllocateExtraMemory: 1, memory manager details for current node: TxResourcesInfo { TxId: 281474976715661, Database: /Root, PoolId: default, MemoryPoolPercent: 100.00, tx initially granted memory: 20B, tx total memory allocations: 1MiB, tx largest successful memory allocation: 1MiB, tx last failed memory allocation: 1MiB, tx total execution units: 2, started at: 2025-05-29T15:30:09.931174Z } , code: 2029 Trying to start YDB, gRPC: 4197, MsgBus: 64043 2025-05-29T15:30:10.137191Z node 2 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7509890377802416447:2064];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:30:10.137210Z node 2 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/0012e9/r3tmp/tmpY6OT9i/pdisk_1.dat 2025-05-29T15:30:10.154278Z node 2 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:30:10.154364Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [2:7509890377802416424:2079] 1748532610137044 != 1748532610137047 TServer::EnableGrpc on GrpcPort 4197, node 2 2025-05-29T15:30:10.161097Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:30:10.161118Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-05-29T15:30:10.161121Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-05-29T15:30:10.161172Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:64043 TClient is connected to server localhost:64043 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-29T15:30:10.242309Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:30:10.242345Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:30:10.242627Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-05-29T15:30:10.243259Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected waiting... 2025-05-29T15:30:10.244125Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard ... Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-29T15:30:11.627475Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:30:11.878167Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7509890383548190341:2355], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:30:11.878188Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7509890383548190336:2352], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:30:11.878268Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:30:11.879029Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480 2025-05-29T15:30:11.881248Z node 4 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [4:7509890383548190350:2356], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2025-05-29T15:30:11.968748Z node 4 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [4:7509890383548190401:2547] txid# 281474976715660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-29T15:30:11.981264Z node 4 :KQP_COMPUTE WARN: log.cpp:784: fline=kqp_compute_actor_factory.cpp:40;problem=cannot_allocate_memory;tx_id=281474976715661;task_id=2;memory=1048576; 2025-05-29T15:30:11.981280Z node 4 :KQP_COMPUTE WARN: dq_compute_memory_quota.h:152: TxId: 281474976715661, task: 2. [Mem] memory 1048576 NOT granted 2025-05-29T15:30:11.981405Z node 4 :KQP_COMPUTE ERROR: dq_compute_actor_impl.h:678: SelfId: [4:7509890383548190456:2365], TxId: 281474976715661, task: 2. Ctx: { SessionId : ydb://session/3?node_id=4&id=YTVkMzM0ZWMtNzhmYWY2OTQtYmFjNDMzMTUtMzQ5MjU1ODM=. TraceId : 01jweamjj2aas8jngcjac3cjax. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. InternalError: OVERLOADED KIKIMR_PRECONDITION_FAILED: {
: Error: Mkql memory limit exceeded, allocated by task 2: 10, host: ghrun-lxxdcki4qu, canAllocateExtraMemory: 1, memory manager details for current node: TxResourcesInfo { TxId: 281474976715661, Database: /Root, PoolId: default, MemoryPoolPercent: 100.00, tx initially granted memory: 50B, tx total memory allocations: 1MiB, tx largest successful memory allocation: 1MiB, tx last failed memory allocation: 1MiB, tx total execution units: 5, started at: 2025-05-29T15:30:11.980999Z }, code: 2029 }. 2025-05-29T15:30:11.981525Z node 4 :KQP_COMPUTE ERROR: dq_compute_actor_impl.h:1210: SelfId: [4:7509890383548190455:2364], TxId: 281474976715661, task: 1. Ctx: { TraceId : 01jweamjj2aas8jngcjac3cjax. CustomerSuppliedId : . SessionId : ydb://session/3?node_id=4&id=YTVkMzM0ZWMtNzhmYWY2OTQtYmFjNDMzMTUtMzQ5MjU1ODM=. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. Handle abort execution event from: [4:7509890383548190431:2351], status: OVERLOADED, reason: {
: Error: Terminate execution } 2025-05-29T15:30:11.981546Z node 4 :KQP_COMPUTE WARN: log.cpp:784: fline=kqp_compute_actor_factory.cpp:40;problem=cannot_allocate_memory;tx_id=281474976715661;task_id=3;memory=1048576; 2025-05-29T15:30:11.981555Z node 4 :KQP_COMPUTE WARN: dq_compute_memory_quota.h:152: TxId: 281474976715661, task: 3. [Mem] memory 1048576 NOT granted 2025-05-29T15:30:11.981655Z node 4 :KQP_COMPUTE ERROR: dq_compute_actor_impl.h:678: SelfId: [4:7509890383548190458:2366], TxId: 281474976715661, task: 3. Ctx: { TraceId : 01jweamjj2aas8jngcjac3cjax. SessionId : ydb://session/3?node_id=4&id=YTVkMzM0ZWMtNzhmYWY2OTQtYmFjNDMzMTUtMzQ5MjU1ODM=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. InternalError: OVERLOADED KIKIMR_PRECONDITION_FAILED: {
: Error: Mkql memory limit exceeded, allocated by task 3: 10, host: ghrun-lxxdcki4qu, canAllocateExtraMemory: 1, memory manager details for current node: TxResourcesInfo { TxId: 281474976715661, Database: /Root, PoolId: default, MemoryPoolPercent: 100.00, tx initially granted memory: 30B, tx total memory allocations: 0B, tx largest successful memory allocation: 1MiB, tx last failed memory allocation: 1MiB, tx total execution units: 3, started at: 2025-05-29T15:30:11.980999Z }, code: 2029 }. 2025-05-29T15:30:11.981730Z node 4 :KQP_COMPUTE ERROR: dq_compute_actor_impl.h:1210: SelfId: [4:7509890383548190459:2367], TxId: 281474976715661, task: 4. Ctx: { TraceId : 01jweamjj2aas8jngcjac3cjax. SessionId : ydb://session/3?node_id=4&id=YTVkMzM0ZWMtNzhmYWY2OTQtYmFjNDMzMTUtMzQ5MjU1ODM=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. Handle abort execution event from: [4:7509890383548190431:2351], status: OVERLOADED, reason: {
: Error: Terminate execution } 2025-05-29T15:30:11.981742Z node 4 :KQP_COMPUTE WARN: log.cpp:784: fline=kqp_compute_actor_factory.cpp:40;problem=cannot_allocate_memory;tx_id=281474976715661;task_id=5;memory=1048576; 2025-05-29T15:30:11.981745Z node 4 :KQP_COMPUTE WARN: dq_compute_memory_quota.h:152: TxId: 281474976715661, task: 5. [Mem] memory 1048576 NOT granted 2025-05-29T15:30:11.981819Z node 4 :KQP_COMPUTE ERROR: dq_compute_actor_impl.h:678: SelfId: [4:7509890383548190460:2368], TxId: 281474976715661, task: 5. Ctx: { SessionId : ydb://session/3?node_id=4&id=YTVkMzM0ZWMtNzhmYWY2OTQtYmFjNDMzMTUtMzQ5MjU1ODM=. TraceId : 01jweamjj2aas8jngcjac3cjax. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. InternalError: OVERLOADED KIKIMR_PRECONDITION_FAILED: {
: Error: Mkql memory limit exceeded, allocated by task 5: 10, host: ghrun-lxxdcki4qu, canAllocateExtraMemory: 1, memory manager details for current node: TxResourcesInfo { TxId: 281474976715661, Database: /Root, PoolId: default, MemoryPoolPercent: 100.00, tx initially granted memory: 10B, tx total memory allocations: 0B, tx largest successful memory allocation: 1MiB, tx last failed memory allocation: 1MiB, tx total execution units: 1, started at: 2025-05-29T15:30:11.980999Z }, code: 2029 }. 2025-05-29T15:30:11.982083Z node 4 :KQP_SESSION WARN: kqp_session_actor.cpp:2583: SessionId: ydb://session/3?node_id=4&id=YTVkMzM0ZWMtNzhmYWY2OTQtYmFjNDMzMTUtMzQ5MjU1ODM=, ActorId: [4:7509890383548190334:2351], ActorState: ExecuteState, TraceId: 01jweamjj2aas8jngcjac3cjax, Create QueryResponse for error on request, msg:
: Error: Mkql memory limit exceeded, allocated by task 2: 10, host: ghrun-lxxdcki4qu, canAllocateExtraMemory: 1, memory manager details for current node: TxResourcesInfo { TxId: 281474976715661, Database: /Root, PoolId: default, MemoryPoolPercent: 100.00, tx initially granted memory: 50B, tx total memory allocations: 1MiB, tx largest successful memory allocation: 1MiB, tx last failed memory allocation: 1MiB, tx total execution units: 5, started at: 2025-05-29T15:30:11.980999Z } , code: 2029 query_phases { duration_us: 1654 table_access { name: "/Root/LargeTable" partitions_count: 3 } cpu_time_us: 1487 affected_shards: 8 } compilation { duration_us: 10826 cpu_time_us: 9916 } process_cpu_time_us: 92 query_plan: "{\"Plan\":{\"Plans\":[{\"PlanNodeId\":5,\"Plans\":[{\"PlanNodeId\":4,\"Plans\":[{\"PlanNodeId\":3,\"Plans\":[{\"PlanNodeId\":2,\"Plans\":[{\"Tables\":[\"LargeTable\"],\"PlanNodeId\":1,\"Operators\":[{\"Scan\":\"Parallel\",\"E-Size\":\"No estimate\",\"ReadRanges\":[\"Key (-\342\210\236, +\342\210\236)\",\"KeyText (-\342\210\236, +\342\210\236)\"],\"Name\":\"TableFullScan\",\"Inputs\":[],\"Path\":\"\\/Root\\/LargeTable\",\"ReadRangesPointPrefixLen\":\"0\",\"E-Rows\":\"No estimate\",\"Table\":\"LargeTable\",\"ReadColumns\":[\"Data\",\"DataText\",\"Key\",\"KeyText\"],\"E-Cost\":\"No estimate\"}],\"Node Type\":\"TableFullScan\"}],\"Node Type\":\"Collect\",\"Stats\":{\"UseLlvm\":\"undefined\",\"Output\":[{\"Pop\":{},\"Name\":\"4\",\"Push\":{}}],\"MaxMemoryUsage\":{\"Count\":4,\"Sum\":2097192,\"Max\":1048586,\"Min\":10,\"History\":[0,2097192]},\"Tasks\":4,\"FinishedTasks\":0,\"PhysicalStageId\":0,\"StageDurationUs\":0,\"Table\":[{\"Path\":\"\\/Root\\/LargeTable\"}],\"BaseTimeMs\":1748532611981,\"CpuTimeUs\":{\"Count\":4,\"Sum\":321,\"Max\":233,\"Min\":6,\"History\":[0,321]},\"Ingress\":[{\"Pop\":{},\"External\":{},\"Name\":\"KqpReadRangesSource\",\"Ingress\":{},\"Push\":{}}]}}],\"Node Type\":\"UnionAll\",\"PlanNodeType\":\"Connection\"}],\"Node Type\":\"Collect\",\"Stats\":{\"UseLlvm\":\"undefined\",\"MaxMemoryUsage\":{\"Count\":1,\"Sum\":10,\"Max\":10,\"Min\":10,\"History\":[0,10]},\"Tasks\":1,\"FinishedTasks\":0,\"PhysicalStageId\":1,\"StageDurationUs\":0,\"BaseTimeMs\":1748532611981,\"CpuTimeUs\":{\"Count\":1,\"Sum\":3,\"Max\":3,\"Min\":3,\"History\":[0,3]}}}],\"Node Type\":\"ResultSet\",\"PlanNodeType\":\"ResultSet\"}],\"Node Type\":\"Query\",\"Stats\":{\"Compilation\":{\"FromCache\":false,\"DurationUs\":10826,\"CpuTimeUs\":9916},\"ProcessCpuTimeUs\":92,\"TotalDurationUs\":104256,\"ResourcePoolId\":\"default\",\"QueuedTimeUs\":91131},\"PlanNodeType\":\"Query\"},\"meta\":{\"version\":\"0.2\",\"type\":\"query\"},\"SimplifiedPlan\":{\"PlanNodeId\":0,\"Plans\":[{\"PlanNodeId\":1,\"Plans\":[{\"PlanNodeId\":5,\"Operators\":[{\"Scan\":\"Parallel\",\"E-Size\":\"No estimate\",\"ReadRanges\":[\"Key (-\342\210\236, +\342\210\236)\",\"KeyText (-\342\210\236, +\342\210\236)\"],\"Name\":\"TableFullScan\",\"Path\":\"\\/Root\\/LargeTable\",\"ReadRangesPointPrefixLen\":\"0\",\"E-Rows\":\"No estimate\",\"Table\":\"LargeTable\",\"ReadColumns\":[\"Data\",\"DataText\",\"Key\",\"KeyText\"],\"E-Cost\":\"No estimate\"}],\"Node Type\":\"TableFullScan\"}],\"Node Type\":\"ResultSet\",\"PlanNodeType\":\"ResultSet\"}],\"Node Type\":\"Query\",\"PlanNodeType\":\"Query\"}}" query_ast: "(\n(let $1 (KqpTable \'\"/Root/LargeTable\" \'\"72057594046644480:2\" \'\"\" \'1))\n(let $2 \'(\'\"Data\" \'\"DataText\" \'\"Key\" \'\"KeyText\"))\n(let $3 (KqpRowsSourceSettings $1 $2 \'() (Void) \'()))\n(let $4 (lambda \'($10) $10))\n(let $5 (DqPhyStage \'((DqSource (DataSource \'\"KqpReadRangesSource\") $3)) $4 \'(\'(\'\"_logical_id\" \'354) \'(\'\"_id\" \'\"509a17bf-a307e879-b720d895-254dd1f9\"))))\n(let $6 (DqCnUnionAll (TDqOutput $5 \'\"0\")))\n(let $7 (DqPhyStage \'($6) $4 \'(\'(\'\"_logical_id\" \'377) \'(\'\"_id\" \'\"83747330-d7425520-ae3f6209-84d2ab08\"))))\n(let $8 (DqCnResult (TDqOutput $7 \'\"0\") \'()))\n(let $9 (OptionalType (DataType \'String)))\n(return (KqpPhysicalQuery \'((KqpPhysicalTx \'($5 $7) \'($8) \'() \'(\'(\'\"type\" \'\"generic\")))) \'((KqpTxResultBinding (ListType (StructType \'(\'\"Data\" (OptionalType (DataType \'Int64))) \'(\'\"DataText\" $9) \'(\'\"Key\" (OptionalType (DataType \'Uint64))) \'(\'\"KeyText\" $9))) \'\"0\" \'\"0\")) \'(\'(\'\"type\" \'\"query\"))))\n)\n" total_duration_us: 104256 total_cpu_time_us: 11495 >> KqpLimits::ManyPartitionsSortingLimit [GOOD] >> KqpParams::CheckCacheByAst >> KqpLimits::QSReplySizeEnsureMemoryLimits+useSink >> KqpQuery::QueryCancelWrite >> KqpExplain::SortStage ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/query/unittest >> KqpQuery::NoEvaluate Test command err: Trying to start YDB, gRPC: 13388, MsgBus: 9126 test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/001393/r3tmp/tmpGXkmxk/pdisk_1.dat 2025-05-29T15:29:55.700501Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509890313119144681:2065];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:29:55.701044Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-05-29T15:29:55.769940Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [1:7509890313119144657:2079] 1748532595699835 != 1748532595699838 2025-05-29T15:29:55.770458Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 13388, node 1 2025-05-29T15:29:55.781948Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:29:55.781961Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-05-29T15:29:55.781963Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-05-29T15:29:55.782016Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:9126 TClient is connected to server localhost:9126 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: 2025-05-29T15:29:55.846023Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:29:55.846050Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:29:55.847103Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-29T15:29:55.849855Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:29:55.856454Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:29:55.874660Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:29:55.892629Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:29:55.903032Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:29:56.047757Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890317414113587:2403], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:29:56.047795Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:29:56.083782Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-05-29T15:29:56.091427Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-05-29T15:29:56.148421Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-05-29T15:29:56.157944Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-05-29T15:29:56.174099Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-05-29T15:29:56.186083Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-05-29T15:29:56.197132Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-05-29T15:29:56.215935Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890317414114239:2466], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:29:56.215969Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:29:56.216058Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890317414114244:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:29:56.217116Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715669:3, at schemeshard: 72057594046644480 2025-05-29T15:29:56.223831Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7509890317414114246:2470], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715669 completed, doublechecking } 2025-05-29T15:29:56.310320Z node 1 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [1:7509890317414114297:3396] txid# 281474976715670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-29T15:29:56.409696Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [1:7509890317414114313:2474], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:29:56.410833Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=1&id=ZTNhMDNlNDUtZDg5YzRjOTctZjNjY2Y5MGYtYjczYzJjZWY=, ActorId: [1:7509890317414113569:2401], ActorState: ExecuteState, TraceId: 01jweam3fqefvk68ck875cvb4m, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: VERIFY failed (2025-05-29T15:29:56.411622Z): assertion failed in non-unittest thread with message: assertion failed at ydb/core/kqp/ut/common/kqp_ut_common.h:375, void NKikimr::NKqp::AssertSuccessResult(const NYdb::TStatus &): (result.IsSuccess())
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 library/cpp/testing/unittest/registar.cpp:36 RaiseError(): requirement UnittestThread failed 0. /-S/util/system/yassert.cpp:83: InternalPanicImpl @ 0x13DD8F65 1. /-S/util/system/yassert.cpp:55: Panic @ 0x13DCFF66 2. /tmp//-S/library/cpp/testing/unittest/registar.cpp:36: RaiseError @ 0x13F70BC6 3. /-S/ydb/core/kqp/ut/common/kqp_ut_common.h:375: AssertSuccessResult @ 0x13A3FAE2 4. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:365: CreateSampleTables @ 0x264F71F2 5. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:581: operator() @ 0x2651596C 6. /-S/library/cpp/threading/future/core/future-inl.h:493: SetValue @ 0x2651596C 7. /-S/library/cpp/threading/future/async.h:24: operator() @ 0x2651596C 8. /-S/util/thread/pool.h:71: Process @ 0x2651596C 9. /-S/util/thread/pool.cpp:418: DoExecute @ 0x13DE06B9 10. /-S/util/thread/factory.h:15: Execute @ 0x13DDF0A9 11. /-S/util/thread/factory.cpp:36: ThreadProc @ 0x13DDF0A9 12. /-S/util/system/thread.cpp:244: ThreadProxy @ 0x13DDA51C 13. ??:0: ?? @ 0x7F0074800AC2 14. ??:0: ?? @ 0x7F007489284F Trying to start YDB, gRPC: 21376, MsgBus: 16331 2025-05-29T15:29:59.979651Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509890331016895543:2062];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:29:59.979716Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/001393/r3tmp/tmpqUlFTR/pdisk_1.dat 2025-05-29T15:30:00.041464Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [1:7509890331016895523:2079] 1748532599979462 != 1748532599979465 2025-05-29T15:30:00.046168Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 21376, node 1 2025-05-29T15 ... 3DD8F65 1. /-S/util/system/yassert.cpp:55: Panic @ 0x13DCFF66 2. /tmp//-S/library/cpp/testing/unittest/registar.cpp:36: RaiseError @ 0x13F70BC6 3. /-S/ydb/core/kqp/ut/common/kqp_ut_common.h:375: AssertSuccessResult @ 0x13A3FAE2 4. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:365: CreateSampleTables @ 0x264F71F2 5. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:581: operator() @ 0x2651596C 6. /-S/library/cpp/threading/future/core/future-inl.h:493: SetValue @ 0x2651596C 7. /-S/library/cpp/threading/future/async.h:24: operator() @ 0x2651596C 8. /-S/util/thread/pool.h:71: Process @ 0x2651596C 9. /-S/util/thread/pool.cpp:418: DoExecute @ 0x13DE06B9 10. /-S/util/thread/factory.h:15: Execute @ 0x13DDF0A9 11. /-S/util/thread/factory.cpp:36: ThreadProc @ 0x13DDF0A9 12. /-S/util/system/thread.cpp:244: ThreadProxy @ 0x13DDA51C 13. ??:0: ?? @ 0x7F361B970AC2 14. ??:0: ?? @ 0x7F361BA0284F Trying to start YDB, gRPC: 26144, MsgBus: 14897 2025-05-29T15:30:08.692249Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509890369127969654:2062];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:30:08.692278Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/001393/r3tmp/tmp7cLIEd/pdisk_1.dat 2025-05-29T15:30:08.747985Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [1:7509890369127969633:2079] 1748532608692116 != 1748532608692119 2025-05-29T15:30:08.751127Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 26144, node 1 2025-05-29T15:30:08.760817Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:30:08.760831Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-05-29T15:30:08.760833Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-05-29T15:30:08.760879Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:14897 2025-05-29T15:30:08.795470Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:30:08.795507Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:30:08.796590Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:14897 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-29T15:30:08.824124Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:30:08.833266Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:30:08.896948Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:30:08.917347Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:30:08.927908Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:30:09.001308Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890373422938562:2403], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:30:09.001329Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:30:09.034256Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-05-29T15:30:09.041412Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-05-29T15:30:09.055223Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-05-29T15:30:09.061923Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-05-29T15:30:09.068568Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-05-29T15:30:09.076138Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-05-29T15:30:09.082978Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-05-29T15:30:09.099454Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890373422939216:2466], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:30:09.099475Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:30:09.099517Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890373422939221:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:30:09.100142Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710669:3, at schemeshard: 72057594046644480 2025-05-29T15:30:09.103381Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7509890373422939223:2470], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710669 completed, doublechecking } 2025-05-29T15:30:09.184865Z node 1 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [1:7509890373422939274:3396] txid# 281474976710670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-29T15:30:09.255674Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [1:7509890373422939290:2474], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:30:09.255786Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=1&id=MmE3ZTAyMDQtYjEwYWFmMjMtN2Y2MTU1MzQtOThlMGYxZWY=, ActorId: [1:7509890369127971263:2401], ActorState: ExecuteState, TraceId: 01jweamg2bf550vpqzszxb583x, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: VERIFY failed (2025-05-29T15:30:09.256385Z): assertion failed in non-unittest thread with message: assertion failed at ydb/core/kqp/ut/common/kqp_ut_common.h:375, void NKikimr::NKqp::AssertSuccessResult(const NYdb::TStatus &): (result.IsSuccess())
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 library/cpp/testing/unittest/registar.cpp:36 RaiseError(): requirement UnittestThread failed 0. /-S/util/system/yassert.cpp:83: InternalPanicImpl @ 0x13DD8F65 1. /-S/util/system/yassert.cpp:55: Panic @ 0x13DCFF66 2. /tmp//-S/library/cpp/testing/unittest/registar.cpp:36: RaiseError @ 0x13F70BC6 3. /-S/ydb/core/kqp/ut/common/kqp_ut_common.h:375: AssertSuccessResult @ 0x13A3FAE2 4. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:365: CreateSampleTables @ 0x264F71F2 5. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:581: operator() @ 0x2651596C 6. /-S/library/cpp/threading/future/core/future-inl.h:493: SetValue @ 0x2651596C 7. /-S/library/cpp/threading/future/async.h:24: operator() @ 0x2651596C 8. /-S/util/thread/pool.h:71: Process @ 0x2651596C 9. /-S/util/thread/pool.cpp:418: DoExecute @ 0x13DE06B9 10. /-S/util/thread/factory.h:15: Execute @ 0x13DDF0A9 11. /-S/util/thread/factory.cpp:36: ThreadProc @ 0x13DDF0A9 12. /-S/util/system/thread.cpp:244: ThreadProxy @ 0x13DDA51C 13. ??:0: ?? @ 0x7FD837DF8AC2 14. ??:0: ?? @ 0x7FD837E8A84F >> KqpStats::RequestUnitForExecute >> KqpStats::MultiTxStatsFullYql >> KqpExplain::UpdateSecondaryConditionalPrimaryKey+UseSink >> KqpExplain::ExplainScanQueryWithParams >> KqpParams::MissingOptionalParameter+UseSink >> KqpStats::MultiTxStatsFullExpScan >> KqpStats::JoinNoStatsScan >> KqpParams::InvalidJson >> KqpQuery::DeleteWhereInSubquery ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/query/unittest >> KqpLimits::ManyPartitionsSortingLimit [GOOD] Test command err: Trying to start YDB, gRPC: 3660, MsgBus: 22046 2025-05-29T15:30:04.503186Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509890352451821019:2063];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:30:04.503205Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/0012fa/r3tmp/tmpYVhXdZ/pdisk_1.dat 2025-05-29T15:30:04.567183Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [1:7509890352451820998:2079] 1748532604503075 != 1748532604503078 2025-05-29T15:30:04.574064Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 3660, node 1 2025-05-29T15:30:04.586621Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:30:04.586634Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-05-29T15:30:04.586635Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-05-29T15:30:04.586679Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:22046 TClient is connected to server localhost:22046 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-29T15:30:04.640067Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:30:04.640086Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:30:04.641308Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-05-29T15:30:04.641618Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected waiting... 2025-05-29T15:30:04.655326Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:30:04.721819Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:30:04.746659Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:30:04.763577Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:30:04.869141Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890352451823510:2408], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:30:04.869165Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:30:04.903771Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-05-29T15:30:04.919101Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-05-29T15:30:04.932282Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-05-29T15:30:04.943900Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-05-29T15:30:04.955901Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-05-29T15:30:04.968357Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-05-29T15:30:04.982604Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-05-29T15:30:04.998726Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890352451824635:2471], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:30:04.998768Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890352451824640:2474], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:30:04.998775Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:30:04.999400Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715669:3, at schemeshard: 72057594046644480 2025-05-29T15:30:05.009969Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7509890352451824642:2475], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715669 completed, doublechecking } 2025-05-29T15:30:05.093832Z node 1 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [1:7509890356746792018:4746] txid# 281474976715670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-29T15:30:05.174157Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [1:7509890356746792036:2479], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:30:05.174271Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=1&id=MTNhMzZmOGItOGJkZGNhMzctMWQyMzNkNi0xMjA2ZDYzZQ==, ActorId: [1:7509890352451823491:2406], ActorState: ExecuteState, TraceId: 01jweamc267z2n3ach840bnkj8, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: VERIFY failed (2025-05-29T15:30:05.174964Z): assertion failed in non-unittest thread with message: assertion failed at ydb/core/kqp/ut/common/kqp_ut_common.h:375, void NKikimr::NKqp::AssertSuccessResult(const NYdb::TStatus &): (result.IsSuccess())
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 library/cpp/testing/unittest/registar.cpp:36 RaiseError(): requirement UnittestThread failed 0. /-S/util/system/yassert.cpp:83: InternalPanicImpl @ 0x13DD8F65 1. /-S/util/system/yassert.cpp:55: Panic @ 0x13DCFF66 2. /tmp//-S/library/cpp/testing/unittest/registar.cpp:36: RaiseError @ 0x13F70BC6 3. /-S/ydb/core/kqp/ut/common/kqp_ut_common.h:375: AssertSuccessResult @ 0x13A3FAE2 4. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:365: CreateSampleTables @ 0x264F71F2 5. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:581: operator() @ 0x2651596C 6. /-S/library/cpp/threading/future/core/future-inl.h:493: SetValue @ 0x2651596C 7. /-S/library/cpp/threading/future/async.h:24: operator() @ 0x2651596C 8. /-S/util/thread/pool.h:71: Process @ 0x2651596C 9. /-S/util/thread/pool.cpp:418: DoExecute @ 0x13DE06B9 10. /-S/util/thread/factory.h:15: Execute @ 0x13DDF0A9 11. /-S/util/thread/factory.cpp:36: ThreadProc @ 0x13DDF0A9 12. /-S/util/system/thread.cpp:244: ThreadProxy @ 0x13DDA51C 13. ??:0: ?? @ 0x7FBCEA4BEAC2 14. ??:0: ?? @ 0x7FBCEA55084F Trying to start YDB, gRPC: 26253, MsgBus: 10919 2025-05-29T15:30:08.390223Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509890368009796338:2062];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:30:08.390254Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/0012fa/r3tmp/tmpWcTXCj/pdisk_1.dat 2025-05-29T15:30:08.455338Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [1:7509890368009796318:2079] 1748532608390089 != 1748532608390092 2025-05-29T15:30:08.459270Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 26253, node 1 2025-05-29T1 ... ":13,\"Min\":4},\"FirstMessageMs\":{\"Count\":1,\"Sum\":4,\"Max\":4,\"Min\":4},\"Bytes\":{\"Count\":1,\"Sum\":7696,\"Max\":7696,\"Min\":7696,\"History\":[13,7696]},\"ActiveTimeUs\":{\"Count\":1,\"Sum\":9000,\"Max\":9000,\"Min\":9000}},\"Name\":\"RESULT\",\"Push\":{\"Rows\":{\"Count\":1,\"Sum\":1100,\"Max\":1100,\"Min\":1100},\"LastMessageMs\":{\"Count\":1,\"Sum\":13,\"Max\":13,\"Min\":13},\"Chunks\":{\"Count\":1,\"Sum\":1100,\"Max\":1100,\"Min\":1100},\"ResumeMessageMs\":{\"Count\":1,\"Sum\":12,\"Max\":12,\"Min\":12},\"FirstMessageMs\":{\"Count\":1,\"Sum\":4,\"Max\":4,\"Min\":4},\"ActiveMessageMs\":{\"Count\":1,\"Max\":13,\"Min\":4},\"PauseMessageMs\":{\"Count\":1,\"Sum\":4,\"Max\":4,\"Min\":4},\"ActiveTimeUs\":{\"Count\":1,\"Sum\":9000,\"Max\":9000,\"Min\":9000},\"WaitTimeUs\":{\"Count\":1,\"Sum\":10480,\"Max\":10480,\"Min\":10480,\"History\":[13,10480]},\"WaitPeriods\":{\"Count\":1,\"Sum\":2,\"Max\":2,\"Min\":2},\"WaitMessageMs\":{\"Count\":1,\"Max\":12,\"Min\":4}}}],\"CpuTimeUs\":{\"Count\":1,\"Sum\":2214,\"Max\":2214,\"Min\":2214,\"History\":[1,110,13,2214]},\"StageDurationUs\":9000,\"ResultRows\":{\"Count\":1,\"Sum\":1100,\"Max\":1100,\"Min\":1100},\"ResultBytes\":{\"Count\":1,\"Sum\":7696,\"Max\":7696,\"Min\":7696},\"OutputBytes\":{\"Count\":1,\"Sum\":7696,\"Max\":7696,\"Min\":7696},\"Input\":[{\"Pop\":{\"Chunks\":{\"Count\":1,\"Sum\":27,\"Max\":27,\"Min\":27},\"Rows\":{\"Count\":1,\"Sum\":1100,\"Max\":1100,\"Min\":1100},\"LastMessageMs\":{\"Count\":1,\"Sum\":13,\"Max\":13,\"Min\":13},\"ActiveMessageMs\":{\"Count\":1,\"Max\":13,\"Min\":4},\"FirstMessageMs\":{\"Count\":1,\"Sum\":4,\"Max\":4,\"Min\":4},\"Bytes\":{\"Count\":1,\"Sum\":8168,\"Max\":8168,\"Min\":8168,\"History\":[13,8168]},\"ActiveTimeUs\":{\"Count\":1,\"Sum\":9000,\"Max\":9000,\"Min\":9000}},\"Name\":\"2\",\"Push\":{\"Rows\":{\"Count\":1,\"Sum\":1100,\"Max\":1100,\"Min\":1100},\"LastMessageMs\":{\"Count\":1,\"Sum\":12,\"Max\":12,\"Min\":12},\"Chunks\":{\"Count\":1,\"Sum\":100,\"Max\":100,\"Min\":100},\"ResumeMessageMs\":{\"Count\":1,\"Sum\":12,\"Max\":12,\"Min\":12},\"FirstMessageMs\":{\"Count\":1,\"Sum\":4,\"Max\":4,\"Min\":4},\"ActiveMessageMs\":{\"Count\":1,\"Max\":12,\"Min\":4},\"Bytes\":{\"Count\":1,\"Sum\":8168,\"Max\":8168,\"Min\":8168,\"History\":[13,8168]},\"PauseMessageMs\":{\"Count\":1,\"Sum\":1,\"Max\":1,\"Min\":1},\"ActiveTimeUs\":{\"Count\":1,\"Sum\":8000,\"Max\":8000,\"Min\":8000},\"WaitTimeUs\":{\"Count\":1,\"Sum\":2558,\"Max\":2558,\"Min\":2558,\"History\":[13,2558]},\"WaitPeriods\":{\"Count\":1,\"Sum\":5,\"Max\":5,\"Min\":5},\"WaitMessageMs\":{\"Count\":1,\"Max\":12,\"Min\":1}}}],\"UpdateTimeMs\":13,\"InputRows\":{\"Count\":1,\"Sum\":1100,\"Max\":1100,\"Min\":1100},\"Tasks\":1}}],\"Node Type\":\"ResultSet\",\"PlanNodeType\":\"ResultSet\"}],\"Node Type\":\"Query\",\"Stats\":{\"Compilation\":{\"FromCache\":false,\"DurationUs\":29675,\"CpuTimeUs\":28816},\"ProcessCpuTimeUs\":114,\"TotalDurationUs\":133786,\"ResourcePoolId\":\"default\",\"QueuedTimeUs\":87421},\"PlanNodeType\":\"Query\"},\"meta\":{\"version\":\"0.2\",\"type\":\"query\"},\"SimplifiedPlan\":{\"PlanNodeId\":0,\"Plans\":[{\"PlanNodeId\":1,\"Plans\":[{\"PlanNodeId\":5,\"Operators\":[{\"Scan\":\"Parallel\",\"E-Size\":\"No estimate\",\"ReadRanges\":[\"Key (-\342\210\236, +\342\210\236)\"],\"Reverse\":false,\"Name\":\"TableFullScan\",\"Path\":\"\\/Root\\/ManyShardsTable\",\"ReadRangesPointPrefixLen\":\"0\",\"E-Rows\":\"No estimate\",\"Table\":\"ManyShardsTable\",\"ReadColumns\":[\"Data\",\"Key\"],\"E-Cost\":\"No estimate\"}],\"Node Type\":\"TableFullScan\"}],\"Node Type\":\"ResultSet\",\"PlanNodeType\":\"ResultSet\"}],\"Node Type\":\"Query\",\"PlanNodeType\":\"Query\"}}" query_ast: "(\n(let $1 (KqpTable \'\"/Root/ManyShardsTable\" \'\"72057594046644480:2\" \'\"\" \'1))\n(let $2 (KqpRowsSourceSettings $1 \'(\'\"Data\" \'\"Key\") \'(\'(\'\"Sorted\")) (Void) \'()))\n(let $3 (StructType \'(\'\"Data\" (OptionalType (DataType \'Int32))) \'(\'\"Key\" (OptionalType (DataType \'Uint32)))))\n(let $4 \'(\'(\'\"_logical_id\" \'367) \'(\'\"_id\" \'\"18fdbb75-2f78026a-f1467713-ccee057c\") \'(\'\"_wide_channels\" $3)))\n(let $5 (DqPhyStage \'((DqSource (DataSource \'\"KqpReadRangesSource\") $2)) (lambda \'($9) (block \'(\n (let $10 (lambda \'($11) (Member $11 \'\"Data\") (Member $11 \'\"Key\")))\n (return (FromFlow (ExpandMap (ToFlow $9) $10)))\n))) $4))\n(let $6 (DqCnMerge (TDqOutput $5 \'\"0\") \'(\'(\'1 \'\"Asc\"))))\n(let $7 (DqPhyStage \'($6) (lambda \'($12) (FromFlow (NarrowMap (ToFlow $12) (lambda \'($13 $14) (AsStruct \'(\'\"Data\" $13) \'(\'\"Key\" $14)))))) \'(\'(\'\"_logical_id\" \'379) \'(\'\"_id\" \'\"7a2e62bf-e5bf4dd3-155646f3-3082d0a1\"))))\n(let $8 (DqCnResult (TDqOutput $7 \'\"0\") \'(\'\"Key\" \'\"Data\")))\n(return (KqpPhysicalQuery \'((KqpPhysicalTx \'($5 $7) \'($8) \'() \'(\'(\'\"type\" \'\"generic\")))) \'((KqpTxResultBinding (ListType $3) \'\"0\" \'\"0\")) \'(\'(\'\"type\" \'\"query\"))))\n)\n" total_duration_us: 133786 total_cpu_time_us: 47211 query_meta: "{\"query_database\":\"/Root\",\"query_parameter_types\":{},\"table_metadata\":[\"{\\\"DoesExist\\\":true,\\\"Cluster\\\":\\\"db\\\",\\\"Name\\\":\\\"/Root/ManyShardsTable\\\",\\\"SysView\\\":\\\"\\\",\\\"PathId\\\":{\\\"OwnerId\\\":72057594046644480,\\\"TableId\\\":2},\\\"SchemaVersion\\\":1,\\\"Kind\\\":1,\\\"Columns\\\":[{\\\"Name\\\":\\\"Data\\\",\\\"Id\\\":2,\\\"Type\\\":\\\"Int32\\\",\\\"TypeId\\\":1,\\\"NotNull\\\":false,\\\"DefaultFromSequence\\\":\\\"\\\",\\\"DefaultKind\\\":0,\\\"DefaultFromLiteral\\\":{},\\\"IsBuildInProgress\\\":false,\\\"DefaultFromSequencePathId\\\":{\\\"OwnerId\\\":18446744073709551615,\\\"TableId\\\":18446744073709551615}},{\\\"Name\\\":\\\"Key\\\",\\\"Id\\\":1,\\\"Type\\\":\\\"Uint32\\\",\\\"TypeId\\\":2,\\\"NotNull\\\":false,\\\"DefaultFromSequence\\\":\\\"\\\",\\\"DefaultKind\\\":0,\\\"DefaultFromLiteral\\\":{},\\\"IsBuildInProgress\\\":false,\\\"DefaultFromSequencePathId\\\":{\\\"OwnerId\\\":18446744073709551615,\\\"TableId\\\":18446744073709551615}}],\\\"KeyColunmNames\\\":[\\\"Key\\\"],\\\"RecordsCount\\\":0,\\\"DataSize\\\":0,\\\"StatsLoaded\\\":false}\"],\"table_meta_serialization_type\":2,\"created_at\":\"1748532611\",\"query_type\":\"QUERY_TYPE_SQL_GENERIC_CONCURRENT_QUERY\",\"query_syntax\":\"1\",\"query_cluster\":\"db\",\"query_id\":\"a37f8c1f-97e07ea4-c1af04a5-732219a2\",\"version\":\"1.0\"}" Trying to start YDB, gRPC: 16820, MsgBus: 21171 test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/0012fa/r3tmp/tmp1uO54u/pdisk_1.dat 2025-05-29T15:30:11.764046Z node 3 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-05-29T15:30:11.764377Z node 3 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:30:11.764602Z node 3 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [3:7509890384365915659:2079] 1748532611747805 != 1748532611747808 TServer::EnableGrpc on GrpcPort 16820, node 3 2025-05-29T15:30:11.777240Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:30:11.777258Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-05-29T15:30:11.777260Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-05-29T15:30:11.777317Z node 3 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:21171 TClient is connected to server localhost:21171 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-29T15:30:11.852488Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:30:11.852521Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:30:11.852829Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:30:11.853469Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-29T15:30:11.854549Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-05-29T15:30:11.858410Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:30:12.139785Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7509890388660887805:2633], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:30:12.139807Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:30:12.139820Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7509890388660887831:2636], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:30:12.140748Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480 2025-05-29T15:30:12.143264Z node 3 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7509890388660887834:2637], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2025-05-29T15:30:12.204495Z node 3 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [3:7509890388660887887:5093] txid# 281474976715660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } >> KqpQuery::YqlSyntaxV0 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/query/unittest >> KqpLimits::CancelAfterRoTx Test command err: Trying to start YDB, gRPC: 17911, MsgBus: 28196 2025-05-29T15:29:57.374399Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509890320132689770:2061];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:29:57.374428Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/00138f/r3tmp/tmpSdT4z3/pdisk_1.dat 2025-05-29T15:29:57.429059Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:29:57.429124Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [1:7509890320132689751:2079] 1748532597374312 != 1748532597374315 TServer::EnableGrpc on GrpcPort 17911, node 1 2025-05-29T15:29:57.440979Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:29:57.440994Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-05-29T15:29:57.440996Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-05-29T15:29:57.441044Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:28196 2025-05-29T15:29:57.477586Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:29:57.477629Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:29:57.478672Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:28196 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-29T15:29:57.505459Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:29:57.518702Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:29:57.540578Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:29:57.558600Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:29:57.570912Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:29:57.709956Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890320132691385:2403], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:29:57.709990Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:29:57.743625Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-05-29T15:29:57.751595Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-05-29T15:29:57.764709Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-05-29T15:29:57.819540Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-05-29T15:29:57.827707Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-05-29T15:29:57.841759Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-05-29T15:29:57.855916Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-05-29T15:29:57.871392Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890320132692038:2466], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:29:57.871412Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:29:57.871417Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890320132692043:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:29:57.872050Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715669:3, at schemeshard: 72057594046644480 2025-05-29T15:29:57.875017Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7509890320132692045:2470], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715669 completed, doublechecking } 2025-05-29T15:29:57.943323Z node 1 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [1:7509890320132692096:3396] txid# 281474976715670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-29T15:29:58.032039Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [1:7509890320132692112:2474], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:29:58.032137Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=1&id=NWU5ZDJiNmItOTk0Yzk0NjctZDc5MDk0YTgtZTFjZGEwZGE=, ActorId: [1:7509890320132691367:2401], ActorState: ExecuteState, TraceId: 01jweam53f5b414cnb7v7wv9nk, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: VERIFY failed (2025-05-29T15:29:58.032655Z): assertion failed in non-unittest thread with message: assertion failed at ydb/core/kqp/ut/common/kqp_ut_common.h:375, void NKikimr::NKqp::AssertSuccessResult(const NYdb::TStatus &): (result.IsSuccess())
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 library/cpp/testing/unittest/registar.cpp:36 RaiseError(): requirement UnittestThread failed 0. /-S/util/system/yassert.cpp:83: InternalPanicImpl @ 0x13DD8F65 1. /-S/util/system/yassert.cpp:55: Panic @ 0x13DCFF66 2. /tmp//-S/library/cpp/testing/unittest/registar.cpp:36: RaiseError @ 0x13F70BC6 3. /-S/ydb/core/kqp/ut/common/kqp_ut_common.h:375: AssertSuccessResult @ 0x13A3FAE2 4. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:365: CreateSampleTables @ 0x264F71F2 5. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:581: operator() @ 0x2651596C 6. /-S/library/cpp/threading/future/core/future-inl.h:493: SetValue @ 0x2651596C 7. /-S/library/cpp/threading/future/async.h:24: operator() @ 0x2651596C 8. /-S/util/thread/pool.h:71: Process @ 0x2651596C 9. /-S/util/thread/pool.cpp:418: DoExecute @ 0x13DE06B9 10. /-S/util/thread/factory.h:15: Execute @ 0x13DDF0A9 11. /-S/util/thread/factory.cpp:36: ThreadProc @ 0x13DDF0A9 12. /-S/util/system/thread.cpp:244: ThreadProxy @ 0x13DDA51C 13. ??:0: ?? @ 0x7FF30451AAC2 14. ??:0: ?? @ 0x7FF3045AC84F Trying to start YDB, gRPC: 31039, MsgBus: 8932 2025-05-29T15:30:02.122954Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509890345090728922:2062];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:30:02.122995Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/00138f/r3tmp/tmpqAZOch/pdisk_1.dat 2025-05-29T15:30:02.178812Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [1:7509890345090728902:2079] 1748532602122824 != 1748532602122827 2025-05-29T15:30:02.181411Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 31039, node 1 2025-05-29T ... @ 0x13DD8F65 1. /-S/util/system/yassert.cpp:55: Panic @ 0x13DCFF66 2. /tmp//-S/library/cpp/testing/unittest/registar.cpp:36: RaiseError @ 0x13F70BC6 3. /-S/ydb/core/kqp/ut/common/kqp_ut_common.h:375: AssertSuccessResult @ 0x13A3FAE2 4. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:365: CreateSampleTables @ 0x264F71F2 5. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:581: operator() @ 0x2651596C 6. /-S/library/cpp/threading/future/core/future-inl.h:493: SetValue @ 0x2651596C 7. /-S/library/cpp/threading/future/async.h:24: operator() @ 0x2651596C 8. /-S/util/thread/pool.h:71: Process @ 0x2651596C 9. /-S/util/thread/pool.cpp:418: DoExecute @ 0x13DE06B9 10. /-S/util/thread/factory.h:15: Execute @ 0x13DDF0A9 11. /-S/util/thread/factory.cpp:36: ThreadProc @ 0x13DDF0A9 12. /-S/util/system/thread.cpp:244: ThreadProxy @ 0x13DDA51C 13. ??:0: ?? @ 0x7FFAA5442AC2 14. ??:0: ?? @ 0x7FFAA54D484F Trying to start YDB, gRPC: 6477, MsgBus: 5816 2025-05-29T15:30:10.272035Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509890377299116536:2063];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:30:10.272066Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/00138f/r3tmp/tmpx1Lv0F/pdisk_1.dat 2025-05-29T15:30:10.323310Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [1:7509890377299116514:2079] 1748532610271889 != 1748532610271892 2025-05-29T15:30:10.323725Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 6477, node 1 2025-05-29T15:30:10.337792Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:30:10.337806Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-05-29T15:30:10.337809Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-05-29T15:30:10.337865Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:5816 2025-05-29T15:30:10.374342Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:30:10.374367Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:30:10.375451Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:5816 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-29T15:30:10.402634Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:30:10.409609Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:30:10.429946Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:30:10.453310Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:30:10.462943Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:30:10.730958Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890377299118146:2403], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:30:10.730999Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:30:10.764572Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-05-29T15:30:10.772699Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-05-29T15:30:10.784220Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-05-29T15:30:10.798503Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-05-29T15:30:10.812543Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-05-29T15:30:10.826818Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-05-29T15:30:10.841060Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-05-29T15:30:10.856855Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890377299118798:2466], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:30:10.856884Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:30:10.856892Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890377299118803:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:30:10.857496Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715669:3, at schemeshard: 72057594046644480 2025-05-29T15:30:10.860466Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7509890377299118805:2470], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715669 completed, doublechecking } 2025-05-29T15:30:10.920032Z node 1 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [1:7509890377299118857:3396] txid# 281474976715670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-29T15:30:11.037966Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [1:7509890377299118873:2474], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:30:11.038102Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=1&id=ZmMzYzdjYmMtY2IzZWQ2YTEtODU2OGE5YjgtNzdlN2VhNmQ=, ActorId: [1:7509890377299118128:2401], ActorState: ExecuteState, TraceId: 01jweamhs86665ybp4sw81kyfj, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: VERIFY failed (2025-05-29T15:30:11.038863Z): assertion failed in non-unittest thread with message: assertion failed at ydb/core/kqp/ut/common/kqp_ut_common.h:375, void NKikimr::NKqp::AssertSuccessResult(const NYdb::TStatus &): (result.IsSuccess())
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 library/cpp/testing/unittest/registar.cpp:36 RaiseError(): requirement UnittestThread failed 0. /-S/util/system/yassert.cpp:83: InternalPanicImpl @ 0x13DD8F65 1. /-S/util/system/yassert.cpp:55: Panic @ 0x13DCFF66 2. /tmp//-S/library/cpp/testing/unittest/registar.cpp:36: RaiseError @ 0x13F70BC6 3. /-S/ydb/core/kqp/ut/common/kqp_ut_common.h:375: AssertSuccessResult @ 0x13A3FAE2 4. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:365: CreateSampleTables @ 0x264F71F2 5. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:581: operator() @ 0x2651596C 6. /-S/library/cpp/threading/future/core/future-inl.h:493: SetValue @ 0x2651596C 7. /-S/library/cpp/threading/future/async.h:24: operator() @ 0x2651596C 8. /-S/util/thread/pool.h:71: Process @ 0x2651596C 9. /-S/util/thread/pool.cpp:418: DoExecute @ 0x13DE06B9 10. /-S/util/thread/factory.h:15: Execute @ 0x13DDF0A9 11. /-S/util/thread/factory.cpp:36: ThreadProc @ 0x13DDF0A9 12. /-S/util/system/thread.cpp:244: ThreadProxy @ 0x13DDA51C 13. ??:0: ?? @ 0x7F6CDEA76AC2 14. ??:0: ?? @ 0x7F6CDEB0884F >> KqpParams::DefaultParameterValue >> KqpQuery::Pure >> KqpQuery::QueryStats-UseSink >> KqpLimits::WaitCAsTimeout >> KqpExplain::CreateTableAs+Stats [FAIL] >> KqpExplain::CreateTableAs-Stats >> KqpLimits::TooBigKey-useSink >> KqpStats::DataQueryWithEffects+UseSink ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/query/unittest >> KqpQuery::ReadOverloaded-StreamLookup Test command err: Trying to start YDB, gRPC: 27833, MsgBus: 15842 2025-05-29T15:29:52.483685Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509890299773272830:2061];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:29:52.483729Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/0013a4/r3tmp/tmpmBW8fg/pdisk_1.dat 2025-05-29T15:29:52.540324Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:29:52.540404Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [1:7509890299773272810:2079] 1748532592483514 != 1748532592483517 TServer::EnableGrpc on GrpcPort 27833, node 1 2025-05-29T15:29:52.549069Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:29:52.549085Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-05-29T15:29:52.549086Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-05-29T15:29:52.549136Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:15842 TClient is connected to server localhost:15842 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: 2025-05-29T15:29:52.613934Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:29:52.613967Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:29:52.615029Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-29T15:29:52.625894Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:29:52.628756Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-05-29T15:29:52.631092Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:29:52.696819Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:29:52.719523Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:29:52.731841Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:29:52.855001Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890299773274465:2403], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:29:52.855030Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:29:52.895642Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-05-29T15:29:52.904830Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-05-29T15:29:52.912990Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-05-29T15:29:52.928282Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-05-29T15:29:52.942346Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-05-29T15:29:52.998680Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-05-29T15:29:53.012234Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-05-29T15:29:53.028893Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890304068242414:2466], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:29:53.028922Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890304068242419:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:29:53.028926Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:29:53.029776Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715669:3, at schemeshard: 72057594046644480 2025-05-29T15:29:53.039267Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7509890304068242421:2470], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715669 completed, doublechecking } 2025-05-29T15:29:53.120028Z node 1 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [1:7509890304068242472:3396] txid# 281474976715670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-29T15:29:53.201527Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [1:7509890304068242488:2474], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 VERIFY failed (2025-05-29T15:29:53.203558Z): assertion failed in non-unittest thread with message: assertion failed at ydb/core/kqp/ut/common/kqp_ut_common.h:375, void NKikimr::NKqp::AssertSuccessResult(const NYdb::TStatus &): (result.IsSuccess())
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 library/cpp/testing/unittest/registar.cpp:36 RaiseError(): requirement UnittestThread failed 2025-05-29T15:29:53.202835Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=1&id=NTAyNzNhZWEtZGI0N2I0MjgtMzQ4NDlmYjQtOWQ1OWQ4Nzk=, ActorId: [1:7509890299773274447:2401], ActorState: ExecuteState, TraceId: 01jweam0c43gkp3wmqfr12pv1d, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: 0. /-S/util/system/yassert.cpp:83: InternalPanicImpl @ 0x13DD8F65 1. /-S/util/system/yassert.cpp:55: Panic @ 0x13DCFF66 2. /tmp//-S/library/cpp/testing/unittest/registar.cpp:36: RaiseError @ 0x13F70BC6 3. /-S/ydb/core/kqp/ut/common/kqp_ut_common.h:375: AssertSuccessResult @ 0x13A3FAE2 4. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:365: CreateSampleTables @ 0x264F71F2 5. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:581: operator() @ 0x2651596C 6. /-S/library/cpp/threading/future/core/future-inl.h:493: SetValue @ 0x2651596C 7. /-S/library/cpp/threading/future/async.h:24: operator() @ 0x2651596C 8. /-S/util/thread/pool.h:71: Process @ 0x2651596C 9. /-S/util/thread/pool.cpp:418: DoExecute @ 0x13DE06B9 10. /-S/util/thread/factory.h:15: Execute @ 0x13DDF0A9 11. /-S/util/thread/factory.cpp:36: ThreadProc @ 0x13DDF0A9 12. /-S/util/system/thread.cpp:244: ThreadProxy @ 0x13DDA51C 13. ??:0: ?? @ 0x7FE04FC5FAC2 14. ??:0: ?? @ 0x7FE04FCF184F Trying to start YDB, gRPC: 15292, MsgBus: 4685 2025-05-29T15:29:57.134067Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509890322233191043:2063];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:29:57.134080Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/0013a4/r3tmp/tmpJNwv4A/pdisk_1.dat 2025-05-29T15:29:57.187868Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [1:7509890322233191021:2079] 1748532597133 ... ibrary/cpp/threading/future/core/future-inl.h:493: SetValue @ 0x2651596C 7. /-S/library/cpp/threading/future/async.h:24: operator() @ 0x2651596C 8. /-S/util/thread/pool.h:71: Process @ 0x2651596C 9. /-S/util/thread/pool.cpp:418: DoExecute @ 0x13DE06B9 10. /-S/util/thread/factory.h:15: Execute @ 0x13DDF0A9 11. /-S/util/thread/factory.cpp:36: ThreadProc @ 0x13DDF0A9 12. /-S/util/system/thread.cpp:244: ThreadProxy @ 0x13DDA51C 13. ??:0: ?? @ 0x7F850A919AC2 14. ??:0: ?? @ 0x7F850A9AB84F Trying to start YDB, gRPC: 27564, MsgBus: 22336 2025-05-29T15:30:08.516179Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:102:2148], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-05-29T15:30:08.516208Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-05-29T15:30:08.516219Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/0013a4/r3tmp/tmpkOIXVW/pdisk_1.dat TServer::EnableGrpc on GrpcPort 27564, node 1 TClient is connected to server localhost:22336 2025-05-29T15:30:08.646184Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:30:08.647197Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:30:08.647214Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-05-29T15:30:08.647220Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-05-29T15:30:08.647341Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-05-29T15:30:08.647752Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [1:32:2079] 1748532608172850 != 1748532608172854 TClient is connected to server localhost:22336 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-29T15:30:08.717996Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:30:08.718030Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:30:08.718608Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:30:08.720622Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-29T15:30:08.804922Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:30:08.997245Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:30:09.258189Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:30:09.467554Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:30:09.772675Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:1718:3314], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:30:09.772728Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:30:09.777307Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-05-29T15:30:09.960229Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-05-29T15:30:10.203356Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-05-29T15:30:10.405890Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-05-29T15:30:10.632514Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-05-29T15:30:10.838536Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-05-29T15:30:11.107385Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-05-29T15:30:11.329244Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2391:3811], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:30:11.329289Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:30:11.329346Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2396:3816], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:30:11.330567Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715669:3, at schemeshard: 72057594046644480 2025-05-29T15:30:11.479746Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:2398:3818], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715669 completed, doublechecking } 2025-05-29T15:30:11.522060Z node 1 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [1:2456:3857] txid# 281474976715670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-29T15:30:11.643851Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [1:2466:3866], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:30:11.646041Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=1&id=ZTZiNzkxMGEtYzhlOTc3MjUtZGRkZTk1NGUtMzgxNTRkZTI=, ActorId: [1:1704:3301], ActorState: ExecuteState, TraceId: 01jweamj80c0bpjwrh0ahfxast, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: VERIFY failed (2025-05-29T15:30:11.647055Z): assertion failed in non-unittest thread with message: assertion failed at ydb/core/kqp/ut/common/kqp_ut_common.h:375, void NKikimr::NKqp::AssertSuccessResult(const NYdb::TStatus &): (result.IsSuccess())
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 library/cpp/testing/unittest/registar.cpp:36 RaiseError(): requirement UnittestThread failed 2025-05-29T15:30:13.110688Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7301: Cannot get console configs 2025-05-29T15:30:13.110723Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 0. /-S/util/system/yassert.cpp:83: InternalPanicImpl @ 0x13DD8F65 1. /-S/util/system/yassert.cpp:55: Panic @ 0x13DCFF66 2. /tmp//-S/library/cpp/testing/unittest/registar.cpp:36: RaiseError @ 0x13F70BC6 3. /-S/ydb/core/kqp/ut/common/kqp_ut_common.h:375: AssertSuccessResult @ 0x13A3FAE2 4. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:365: CreateSampleTables @ 0x264F71F2 5. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:581: operator() @ 0x2651596C 6. /-S/library/cpp/threading/future/core/future-inl.h:493: SetValue @ 0x2651596C 7. /-S/library/cpp/threading/future/async.h:24: operator() @ 0x2651596C 8. /-S/util/thread/pool.h:71: Process @ 0x2651596C 9. /-S/util/thread/pool.cpp:418: DoExecute @ 0x13DE06B9 10. /-S/util/thread/factory.h:15: Execute @ 0x13DDF0A9 11. /-S/util/thread/factory.cpp:36: ThreadProc @ 0x13DDF0A9 12. /-S/util/system/thread.cpp:244: ThreadProxy @ 0x13DDA51C 13. ??:0: ?? @ 0x7FF5FB9A7AC2 14. ??:0: ?? @ 0x7FF5FBA3984F >> KqpExplain::Predicates >> KqpParams::Decimal+QueryService+UseSink >> KqpScheme::DisableResourcePoolClassifiersOnServerless [FAIL] >> KqpScheme::DisableMetadataObjectsOnServerless >> KqpQuery::CreateAsSelectBadTypes+IsOlap >> KqpExplain::CreateTableAs-Stats [FAIL] >> KqpLimits::QueryReplySize >> KqpQuery::CreateAsSelectBadTypes+IsOlap [GOOD] >> KqpQuery::CreateAsSelectBadTypes-IsOlap >> KqpLimits::OutOfSpaceYQLUpsertFail-useSink >> KqpExplain::UpdateOnSecondary-UseSink >> KqpParams::CheckCacheWithRecompilationQuery >> KqpLimits::LargeParametersAndMkqlFailure >> KqpExplain::SelfJoin3xSameLabels >> KqpQuery::QueryCancelWriteImmediate |73.0%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/sequenceproxy/ut/ydb-core-tx-sequenceproxy-ut |73.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/sequenceproxy/ut/ydb-core-tx-sequenceproxy-ut |73.0%| [TA] {RESULT} $(B)/ydb/core/kqp/provider/ut/test-results/unittest/{meta.json ... results_accumulator.log} |73.0%| [LD] {RESULT} $(B)/ydb/core/tx/sequenceproxy/ut/ydb-core-tx-sequenceproxy-ut >> KqpQuery::CreateAsSelectBadTypes-IsOlap [GOOD] >> KqpQuery::CreateAsSelectPath+UseTablePathPrefix >> KqpExplain::FewEffects+UseSink >> TAsyncIndexTests::OnlineBuild >> TAsyncIndexTests::DropTableWithInflightChanges[PipeResets] >> KqpStats::MultiTxStatsFullScan |73.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_index/unittest ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/query/unittest >> KqpExplain::CreateTableAs-Stats [FAIL] Test command err: Trying to start YDB, gRPC: 8656, MsgBus: 29521 2025-05-29T15:29:59.698726Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509890332460636911:2062];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:29:59.699048Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/001376/r3tmp/tmp6xHG61/pdisk_1.dat 2025-05-29T15:29:59.752935Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:29:59.753038Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [1:7509890332460636890:2079] 1748532599698300 != 1748532599698303 TServer::EnableGrpc on GrpcPort 8656, node 1 2025-05-29T15:29:59.764205Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:29:59.764230Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-05-29T15:29:59.764232Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-05-29T15:29:59.764278Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:29521 TClient is connected to server localhost:29521 WaitRootIsUp 'Root'... TClient::Ls request: Root 2025-05-29T15:29:59.801139Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:29:59.801176Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:29:59.802100Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-29T15:29:59.829522Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:29:59.841484Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:29:59.861108Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:29:59.920359Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:29:59.931558Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:30:00.011909Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890336755605836:2403], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:30:00.011936Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:30:00.060970Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-05-29T15:30:00.074540Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-05-29T15:30:00.082961Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-05-29T15:30:00.140046Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-05-29T15:30:00.159791Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-05-29T15:30:00.177842Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-05-29T15:30:00.186551Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-05-29T15:30:00.202653Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890336755606490:2466], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:30:00.202672Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:30:00.202730Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890336755606495:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:30:00.203341Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715669:3, at schemeshard: 72057594046644480 2025-05-29T15:30:00.206325Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7509890336755606497:2470], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715669 completed, doublechecking } 2025-05-29T15:30:00.264306Z node 1 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [1:7509890336755606548:3396] txid# 281474976715670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-29T15:30:00.362606Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [1:7509890336755606564:2474], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:30:00.362718Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=1&id=MzkzZDQ2ZDAtMzIwMDc1NGMtNzNkMTA3YjktZTc3N2YwMDI=, ActorId: [1:7509890336755605818:2401], ActorState: ExecuteState, TraceId: 01jweam7ca0622yh9n4fxnk3gt, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: VERIFY failed (2025-05-29T15:30:00.363476Z): assertion failed in non-unittest thread with message: assertion failed at ydb/core/kqp/ut/common/kqp_ut_common.h:375, void NKikimr::NKqp::AssertSuccessResult(const NYdb::TStatus &): (result.IsSuccess())
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 library/cpp/testing/unittest/registar.cpp:36 RaiseError(): requirement UnittestThread failed 0. /-S/util/system/yassert.cpp:83: InternalPanicImpl @ 0x13DD8F65 1. /-S/util/system/yassert.cpp:55: Panic @ 0x13DCFF66 2. /tmp//-S/library/cpp/testing/unittest/registar.cpp:36: RaiseError @ 0x13F70BC6 3. /-S/ydb/core/kqp/ut/common/kqp_ut_common.h:375: AssertSuccessResult @ 0x13A3FAE2 4. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:365: CreateSampleTables @ 0x264F71F2 5. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:581: operator() @ 0x2651596C 6. /-S/library/cpp/threading/future/core/future-inl.h:493: SetValue @ 0x2651596C 7. /-S/library/cpp/threading/future/async.h:24: operator() @ 0x2651596C 8. /-S/util/thread/pool.h:71: Process @ 0x2651596C 9. /-S/util/thread/pool.cpp:418: DoExecute @ 0x13DE06B9 10. /-S/util/thread/factory.h:15: Execute @ 0x13DDF0A9 11. /-S/util/thread/factory.cpp:36: ThreadProc @ 0x13DDF0A9 12. /-S/util/system/thread.cpp:244: ThreadProxy @ 0x13DDA51C 13. ??:0: ?? @ 0x7F614A389AC2 14. ??:0: ?? @ 0x7F614A41B84F Trying to start YDB, gRPC: 9245, MsgBus: 12480 2025-05-29T15:30:04.077979Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509890350292847103:2062];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:30:04.078026Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/001376/r3tmp/tmpt9nmN5/pdisk_1.dat 2025-05-29T15:30:04.141074Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:30:04.143925Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [1:7509890350292847083:2079] 1748532604077805 != 1748532604077808 TServer::EnableGrpc on GrpcPort 9245, node 1 2025-05-29T15: ... { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-29T15:30:12.591687Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:30:12.768993Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890387342229921:2330], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:30:12.769020Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890387342229896:2327], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:30:12.769088Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:30:12.769923Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2025-05-29T15:30:12.772535Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7509890387342229925:2331], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-05-29T15:30:12.843942Z node 1 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [1:7509890387342229976:2324] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-29T15:30:12.886204Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 2025-05-29T15:30:12.976283Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [1:7509890387342230126:2358], status: INTERNAL_ERROR, issues:
: Fatal: ydb/core/kqp/provider/yql_kikimr_provider.cpp:259 ExistingTable(): requirement desc->DoesExist() failed, code: 1 2025-05-29T15:30:12.976378Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=1&id=NTFkNmY5OGMtZDNjMDk4YjMtZmZiOTRlNmYtN2FiMTE2MTQ=, ActorId: [1:7509890387342230115:2353], ActorState: ExecuteState, TraceId: 01jweamkv68qwnfdztnn691m2h, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: assertion failed at ydb/core/kqp/ut/query/kqp_explain_ut.cpp:978, virtual void NKikimr::NKqp::NTestSuiteKqpExplain::TTestCaseCreateTableAs::Execute_(NUnitTest::TTestContext &) [Stats = true]: (result.GetStatus() == EStatus::SUCCESS) failed: (INTERNAL_ERROR != SUCCESS)
: Fatal: ydb/core/kqp/provider/yql_kikimr_provider.cpp:259 ExistingTable(): requirement desc->DoesExist() failed, code: 1 , with diff: (INT|SUCC)E(RNAL_ERROR|SS) 0. /-S/util/system/backtrace.cpp:284: ?? @ 0x13DBA79B 1. /tmp//-S/library/cpp/testing/unittest/registar.cpp:46: RaiseError @ 0x13F70AD8 2. /tmp//-S/ydb/core/kqp/ut/query/kqp_explain_ut.cpp:978: Execute_ @ 0x13A506D2 3. /tmp//-S/ydb/core/kqp/ut/query/kqp_explain_ut.cpp:66: operator() @ 0x13A3C2D6 4. /tmp//-S/library/cpp/testing/unittest/registar.cpp:373: Run @ 0x13F7298D 5. /tmp//-S/ydb/core/kqp/ut/query/kqp_explain_ut.cpp:66: Execute @ 0x13A3BC9B 6. /tmp//-S/library/cpp/testing/unittest/registar.cpp:494: Execute @ 0x13F73102 7. /tmp//-S/library/cpp/testing/unittest/utmain.cpp:872: RunMain @ 0x13F84CAC 8. ??:0: ?? @ 0x7F352503AD8F 9. ??:0: ?? @ 0x7F352503AE3F 10. ??:0: ?? @ 0x12A6F028 Trying to start YDB, gRPC: 15004, MsgBus: 4058 2025-05-29T15:30:15.383678Z node 2 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7509890400232606765:2061];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:30:15.383730Z node 2 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/001376/r3tmp/tmpmDSKUG/pdisk_1.dat 2025-05-29T15:30:15.396611Z node 2 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:30:15.396986Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [2:7509890400232606742:2079] 1748532615383553 != 1748532615383556 TServer::EnableGrpc on GrpcPort 15004, node 2 2025-05-29T15:30:15.409880Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:30:15.409892Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-05-29T15:30:15.409894Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-05-29T15:30:15.409949Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:4058 TClient is connected to server localhost:4058 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-29T15:30:15.486962Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:30:15.486994Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:30:15.487028Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:30:15.488033Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-29T15:30:15.704533Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7509890400232607383:2327], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:30:15.704553Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7509890400232607407:2330], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:30:15.704561Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:30:15.705238Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2025-05-29T15:30:15.706832Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7509890400232607412:2331], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-05-29T15:30:15.779895Z node 2 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [2:7509890400232607463:2323] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-29T15:30:15.785816Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 2025-05-29T15:30:15.817106Z node 2 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [2:7509890400232607612:2358], status: INTERNAL_ERROR, issues:
: Fatal: ydb/core/kqp/provider/yql_kikimr_provider.cpp:259 ExistingTable(): requirement desc->DoesExist() failed, code: 1 2025-05-29T15:30:15.817218Z node 2 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=2&id=MzBiZmI1ODEtMTQ1MjhmMTktZjJmOTZjNjctNzQ2NWUwN2I=, ActorId: [2:7509890400232607601:2353], ActorState: ExecuteState, TraceId: 01jweampm0ca44gpge9018ht0y, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: assertion failed at ydb/core/kqp/ut/query/kqp_explain_ut.cpp:978, virtual void NKikimr::NKqp::NTestSuiteKqpExplain::TTestCaseCreateTableAs::Execute_(NUnitTest::TTestContext &) [Stats = false]: (result.GetStatus() == EStatus::SUCCESS) failed: (INTERNAL_ERROR != SUCCESS)
: Fatal: ydb/core/kqp/provider/yql_kikimr_provider.cpp:259 ExistingTable(): requirement desc->DoesExist() failed, code: 1 , with diff: (INT|SUCC)E(RNAL_ERROR|SS) 0. /-S/util/system/backtrace.cpp:284: ?? @ 0x13DBA79B 1. /tmp//-S/library/cpp/testing/unittest/registar.cpp:46: RaiseError @ 0x13F70AD8 2. /tmp//-S/ydb/core/kqp/ut/query/kqp_explain_ut.cpp:978: Execute_ @ 0x13A5B132 3. /tmp//-S/ydb/core/kqp/ut/query/kqp_explain_ut.cpp:66: operator() @ 0x13A3C2D6 4. /tmp//-S/library/cpp/testing/unittest/registar.cpp:373: Run @ 0x13F7298D 5. /tmp//-S/ydb/core/kqp/ut/query/kqp_explain_ut.cpp:66: Execute @ 0x13A3BC9B 6. /tmp//-S/library/cpp/testing/unittest/registar.cpp:494: Execute @ 0x13F73102 7. /tmp//-S/library/cpp/testing/unittest/utmain.cpp:872: RunMain @ 0x13F84CAC 8. ??:0: ?? @ 0x7F352503AD8F 9. ??:0: ?? @ 0x7F352503AE3F 10. ??:0: ?? @ 0x12A6F028 >> KqpStats::StatsProfile >> KqpParams::MissingOptionalParameter-UseSink >> KqpExplain::UpdateSecondaryConditionalPrimaryKey-UseSink >> KqpStats::JoinStatsBasicScan >> TSchemeShardSplitByLoad::IndexTableDoesNotSplitsIfDisabledByMainTable [GOOD] >> KqpStats::JoinStatsBasicYql+StreamLookupJoin >> KqpQuery::DictJoin ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/query/unittest >> KqpParams::InvalidJson Test command err: Trying to start YDB, gRPC: 4879, MsgBus: 6947 2025-05-29T15:30:01.220507Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509890338883577641:2062];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:30:01.220558Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/001372/r3tmp/tmpNOnTiX/pdisk_1.dat 2025-05-29T15:30:01.290299Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [1:7509890338883577621:2079] 1748532601220358 != 1748532601220361 2025-05-29T15:30:01.290856Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 4879, node 1 2025-05-29T15:30:01.304474Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:30:01.304487Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-05-29T15:30:01.304489Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-05-29T15:30:01.304538Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:6947 TClient is connected to server localhost:6947 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: 2025-05-29T15:30:01.362668Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:30:01.362692Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:30:01.364389Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-29T15:30:01.370585Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:30:01.379597Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:30:01.397678Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:30:01.415270Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:30:01.426608Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:30:01.647287Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890338883579274:2403], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:30:01.647317Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:30:01.687418Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-05-29T15:30:01.695996Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-05-29T15:30:01.705648Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-05-29T15:30:01.719814Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-05-29T15:30:01.775872Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-05-29T15:30:01.789724Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-05-29T15:30:01.803780Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-05-29T15:30:01.820496Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890338883579927:2466], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:30:01.820542Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:30:01.820615Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890338883579932:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:30:01.821488Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715669:3, at schemeshard: 72057594046644480 2025-05-29T15:30:01.831567Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7509890338883579934:2470], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715669 completed, doublechecking } 2025-05-29T15:30:01.899442Z node 1 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [1:7509890338883579985:3398] txid# 281474976715670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-29T15:30:02.008428Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [1:7509890338883580001:2474], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:30:02.008576Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=1&id=MmY1NThlYmMtYjYyOTQ5OTQtMjMyZjQyM2YtYzNhYjk3ZjE=, ActorId: [1:7509890338883579256:2401], ActorState: ExecuteState, TraceId: 01jweam8yw63sxdm1rn6wpvdr3, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: VERIFY failed (2025-05-29T15:30:02.009501Z): assertion failed in non-unittest thread with message: assertion failed at ydb/core/kqp/ut/common/kqp_ut_common.h:375, void NKikimr::NKqp::AssertSuccessResult(const NYdb::TStatus &): (result.IsSuccess())
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 library/cpp/testing/unittest/registar.cpp:36 RaiseError(): requirement UnittestThread failed 0. /-S/util/system/yassert.cpp:83: InternalPanicImpl @ 0x13DD8F65 1. /-S/util/system/yassert.cpp:55: Panic @ 0x13DCFF66 2. /tmp//-S/library/cpp/testing/unittest/registar.cpp:36: RaiseError @ 0x13F70BC6 3. /-S/ydb/core/kqp/ut/common/kqp_ut_common.h:375: AssertSuccessResult @ 0x13A3FAE2 4. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:365: CreateSampleTables @ 0x264F71F2 5. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:581: operator() @ 0x2651596C 6. /-S/library/cpp/threading/future/core/future-inl.h:493: SetValue @ 0x2651596C 7. /-S/library/cpp/threading/future/async.h:24: operator() @ 0x2651596C 8. /-S/util/thread/pool.h:71: Process @ 0x2651596C 9. /-S/util/thread/pool.cpp:418: DoExecute @ 0x13DE06B9 10. /-S/util/thread/factory.h:15: Execute @ 0x13DDF0A9 11. /-S/util/thread/factory.cpp:36: ThreadProc @ 0x13DDF0A9 12. /-S/util/system/thread.cpp:244: ThreadProxy @ 0x13DDA51C 13. ??:0: ?? @ 0x7FF5989F2AC2 14. ??:0: ?? @ 0x7FF598A8484F Trying to start YDB, gRPC: 7846, MsgBus: 24870 2025-05-29T15:30:05.714107Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509890356371784056:2061];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:30:05.714130Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/001372/r3tmp/tmpXfo25R/pdisk_1.dat TServer::EnableGrpc on GrpcPort 7846, node 1 2025-05-29T15:30:05.773413Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [1:7509890356371784037:2079] 1748532605713988 != 1748532605713991 2025-05-29T15:30:05.773549Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or out ... :375: AssertSuccessResult @ 0x13A3FAE2 4. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:365: CreateSampleTables @ 0x264F71F2 5. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:581: operator() @ 0x2651596C 6. /-S/library/cpp/threading/future/core/future-inl.h:493: SetValue @ 0x2651596C 7. /-S/library/cpp/threading/future/async.h:24: operator() @ 0x2651596C 8. /-S/util/thread/pool.h:71: Process @ 0x2651596C 9. /-S/util/thread/pool.cpp:418: DoExecute @ 0x13DE06B9 10. /-S/util/thread/factory.h:15: Execute @ 0x13DDF0A9 11. /-S/util/thread/factory.cpp:36: ThreadProc @ 0x13DDF0A9 12. /-S/util/system/thread.cpp:244: ThreadProxy @ 0x13DDA51C 13. ??:0: ?? @ 0x7F1DB8B6AAC2 14. ??:0: ?? @ 0x7F1DB8BFC84F Trying to start YDB, gRPC: 22989, MsgBus: 18072 2025-05-29T15:30:14.024140Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509890395721120708:2196];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:30:14.024877Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/001372/r3tmp/tmpJBJbbq/pdisk_1.dat 2025-05-29T15:30:14.080005Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [1:7509890395721120551:2079] 1748532614023238 != 1748532614023241 2025-05-29T15:30:14.081109Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 22989, node 1 2025-05-29T15:30:14.096774Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:30:14.096790Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-05-29T15:30:14.096792Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-05-29T15:30:14.096844Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:18072 TClient is connected to server localhost:18072 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: 2025-05-29T15:30:14.161377Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:30:14.161410Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:30:14.162164Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-29T15:30:14.171674Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:30:14.178372Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:30:14.182620Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-05-29T15:30:14.214103Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:30:14.247591Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:30:14.308317Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:30:14.420201Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890395721122187:2403], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:30:14.420230Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:30:14.456886Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-05-29T15:30:14.512103Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-05-29T15:30:14.567388Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-05-29T15:30:14.578727Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-05-29T15:30:14.592694Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-05-29T15:30:14.606596Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-05-29T15:30:14.621241Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-05-29T15:30:14.637143Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890395721122841:2466], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:30:14.637180Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890395721122846:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:30:14.637182Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:30:14.637840Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715669:3, at schemeshard: 72057594046644480 2025-05-29T15:30:14.640629Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7509890395721122848:2470], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715669 completed, doublechecking } 2025-05-29T15:30:14.701664Z node 1 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [1:7509890395721122899:3395] txid# 281474976715670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-29T15:30:14.823584Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [1:7509890395721122915:2474], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:30:14.823685Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=1&id=NjRiNzJhZjEtNzc3YzNjYWUtNmQ4NTM0ZDAtMjVmZjBhOWM=, ActorId: [1:7509890395721122169:2401], ActorState: ExecuteState, TraceId: 01jweamnfcf8tvgr880pqgxms9, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: VERIFY failed (2025-05-29T15:30:14.824484Z): assertion failed in non-unittest thread with message: assertion failed at ydb/core/kqp/ut/common/kqp_ut_common.h:375, void NKikimr::NKqp::AssertSuccessResult(const NYdb::TStatus &): (result.IsSuccess())
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 library/cpp/testing/unittest/registar.cpp:36 RaiseError(): requirement UnittestThread failed 0. /-S/util/system/yassert.cpp:83: InternalPanicImpl @ 0x13DD8F65 1. /-S/util/system/yassert.cpp:55: Panic @ 0x13DCFF66 2. /tmp//-S/library/cpp/testing/unittest/registar.cpp:36: RaiseError @ 0x13F70BC6 3. /-S/ydb/core/kqp/ut/common/kqp_ut_common.h:375: AssertSuccessResult @ 0x13A3FAE2 4. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:365: CreateSampleTables @ 0x264F71F2 5. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:581: operator() @ 0x2651596C 6. /-S/library/cpp/threading/future/core/future-inl.h:493: SetValue @ 0x2651596C 7. /-S/library/cpp/threading/future/async.h:24: operator() @ 0x2651596C 8. /-S/util/thread/pool.h:71: Process @ 0x2651596C 9. /-S/util/thread/pool.cpp:418: DoExecute @ 0x13DE06B9 10. /-S/util/thread/factory.h:15: Execute @ 0x13DDF0A9 11. /-S/util/thread/factory.cpp:36: ThreadProc @ 0x13DDF0A9 12. /-S/util/system/thread.cpp:244: ThreadProxy @ 0x13DDA51C 13. ??:0: ?? @ 0x7FC8ED525AC2 14. ??:0: ?? @ 0x7FC8ED5B784F ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_split_merge/unittest >> TSchemeShardSplitByLoad::IndexTableDoesNotSplitsIfDisabledByMainTable [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:116:2058] recipient: [1:111:2141] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:116:2058] recipient: [1:111:2141] Leader for TabletID 72057594046678944 is [1:128:2152] sender: [1:132:2058] recipient: [1:111:2141] 2025-05-29T15:29:48.230048Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7488: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-05-29T15:29:48.230080Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7516: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:29:48.230086Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7402: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-05-29T15:29:48.230092Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7418: OperationsProcessing config: using default configuration 2025-05-29T15:29:48.230110Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-05-29T15:29:48.230115Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-05-29T15:29:48.230124Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7548: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:29:48.230138Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:39: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-05-29T15:29:48.230286Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7619: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-05-29T15:29:48.230368Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-05-29T15:29:48.246699Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7309: Cannot subscribe to console configs 2025-05-29T15:29:48.246728Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:29:48.250349Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-05-29T15:29:48.250500Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-05-29T15:29:48.250549Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-05-29T15:29:48.252570Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-05-29T15:29:48.252772Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:445: Clear TempDirsState with owners number: 0 2025-05-29T15:29:48.252935Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1348: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-05-29T15:29:48.253007Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:32: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-05-29T15:29:48.253573Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:157: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:29:48.253634Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:84: [RootDataErasureManager] Stop 2025-05-29T15:29:48.253952Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:29:48.253964Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:29:48.253986Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-05-29T15:29:48.253999Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-29T15:29:48.254005Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:202: TTxServerlessStorageBilling.Complete 2025-05-29T15:29:48.254048Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6671: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-05-29T15:29:48.255708Z node 1 :HIVE INFO: tablet_helpers.cpp:1130: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:128:2152] sender: [1:242:2058] recipient: [1:15:2062] 2025-05-29T15:29:48.283052Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:372: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-05-29T15:29:48.283144Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:29:48.283214Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-05-29T15:29:48.283265Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:130: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-05-29T15:29:48.283278Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:29:48.284431Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:451: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-05-29T15:29:48.284465Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-05-29T15:29:48.284525Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:29:48.284537Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:313: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-05-29T15:29:48.284543Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:367: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-05-29T15:29:48.284550Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 2 -> 3 2025-05-29T15:29:48.285085Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:29:48.285100Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-05-29T15:29:48.285107Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 3 -> 128 2025-05-29T15:29:48.285545Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:29:48.285558Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:29:48.285568Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:29:48.285587Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1650: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-05-29T15:29:48.286336Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1719: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-05-29T15:29:48.286816Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:652: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-05-29T15:29:48.286862Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1751: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-05-29T15:29:48.287071Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:676: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-05-29T15:29:48.287099Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:680: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 124 RawX2: 4294969445 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-05-29T15:29:48.287107Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:29:48.287169Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 128 -> 240 2025-05-29T15:29:48.287178Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:29:48.287213Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-05-29T15:29:48.287229Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:402: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-05-29T15:29:48.287735Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:29:48.287747Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-29T15:29:48.287797Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29 ... [OwnerId: 72057594046678944, LocalPathId: 4] state 'Ready' dataSize 0 rowCount 0 cpuUsage 100 TEST SplitByLoad, splitted 0 times, datashard count 1 2025-05-29T15:30:18.068053Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:267: PersistSingleStats for pathId 4 shard idx 72057594046678944:6 data size 0 row count 0 2025-05-29T15:30:18.068073Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:292: TTxStoreTableStats.PersistSingleStats: main stats from datashardId(TabletID)=72075186233409546 maps to shardIdx: 72057594046678944:6 followerId=0, pathId: [OwnerId: 72057594046678944, LocalPathId: 4], pathId map=indexImplTable, is column=0, is olap=0, RowCount 0, DataSize 0 2025-05-29T15:30:18.068139Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:485: Do not want to split tablet 72075186233409546 2025-05-29T15:30:18.068260Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Table/by-value/indexImplTable" Options { ReturnPartitioningInfo: true ReturnPartitionConfig: true BackupInfo: false ReturnBoundaries: true ShowPrivateTable: true }, at schemeshard: 72057594046678944 2025-05-29T15:30:18.068900Z node 2 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/Table/by-value/indexImplTable" took 132us result status StatusSuccess 2025-05-29T15:30:18.069130Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Table/by-value/indexImplTable" PathDescription { Self { Name: "indexImplTable" PathId: 4 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 3 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeSyncIndexImplTable Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "indexImplTable" Columns { Name: "value" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "value" KeyColumnNames: "key" KeyColumnIds: 1 KeyColumnIds: 2 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { SizeToSplit: 2147483648 MinPartitionsCount: 1 } } TableSchemaVersion: 1 IsBackup: false IsRestore: false } TablePartitions { EndOfRangeKeyPrefix: "" IsPoint: false IsInclusive: false DatashardId: 72075186233409546 } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 1 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 1000000 Memory: 119208 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 3 PathsLimit: 10000 ShardsInside: 6 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 4 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 TEST table final state: Status: StatusSuccess Path: "/MyRoot/Table/by-value/indexImplTable" PathDescription { Self { Name: "indexImplTable" PathId: 4 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 3 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeSyncIndexImplTable Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "indexImplTable" Columns { Name: "value" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "value" KeyColumnNames: "key" KeyColumnIds: 1 KeyColumnIds: 2 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { SizeToSplit: 2147483648 MinPartitionsCount: 1 } } TableSchemaVersion: 1 IsBackup: false IsRestore: false } TablePartitions { EndOfRangeKeyPrefix: "" IsPoint: false IsInclusive: false DatashardId: 72075186233409546 } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 1 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 1000000 Memory: 119208 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 3 PathsLimit: 10000 ShardsInside: 6 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 4 PathOwnerId: 72057594046678944 >> KqpQuery::YqlTableSample >> TVectorIndexTests::VectorKmeansTreeImplTable [GOOD] >> KqpParams::Decimal-QueryService-UseSink |73.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_index/unittest >> KqpLimits::OutOfSpaceYQLUpsertFail-useSink [FAIL] >> KqpLimits::QSReplySize+useSink |73.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_index/unittest >> TVectorIndexTests::VectorKmeansTreeImplTable [GOOD] >> TAsyncIndexTests::CdcAndMergeWithReboots[TabletReboots] >> KqpLimits::TooBigColumn+useSink ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/query/unittest >> KqpQuery::Pure Test command err: Trying to start YDB, gRPC: 1347, MsgBus: 3855 2025-05-29T15:30:01.886455Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509890338426449756:2064];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:30:01.886488Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/001314/r3tmp/tmpMkUZTC/pdisk_1.dat 2025-05-29T15:30:01.954566Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [1:7509890338426449733:2079] 1748532601886266 != 1748532601886269 2025-05-29T15:30:01.955282Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 1347, node 1 2025-05-29T15:30:01.967570Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:30:01.967586Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-05-29T15:30:01.967588Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-05-29T15:30:01.967641Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:3855 TClient is connected to server localhost:3855 WaitRootIsUp 'Root'... TClient::Ls request: Root 2025-05-29T15:30:02.021837Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:30:02.021870Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting TClient::Ls response: 2025-05-29T15:30:02.023205Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-29T15:30:02.034033Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:30:02.040542Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:30:02.062436Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-05-29T15:30:02.083076Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 2025-05-29T15:30:02.095822Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:30:02.301076Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890342721418682:2403], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:30:02.301107Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:30:02.343504Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-05-29T15:30:02.351315Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-05-29T15:30:02.407028Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-05-29T15:30:02.419638Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-05-29T15:30:02.434064Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-05-29T15:30:02.448015Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-05-29T15:30:02.461617Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-05-29T15:30:02.478164Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890342721419336:2466], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:30:02.478206Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:30:02.478221Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890342721419341:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:30:02.478986Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715669:3, at schemeshard: 72057594046644480 2025-05-29T15:30:02.481045Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7509890342721419343:2470], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715669 completed, doublechecking } 2025-05-29T15:30:02.565239Z node 1 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [1:7509890342721419394:3398] txid# 281474976715670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-29T15:30:02.654863Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [1:7509890342721419410:2474], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:30:02.656429Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=1&id=MTU3Nzc2NS1jNGE3NWFjYi1iMDA5ZmU3YS0xYzZhZTExYw==, ActorId: [1:7509890342721418679:2401], ActorState: ExecuteState, TraceId: 01jweam9kd7wsgd3dwseszazea, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: VERIFY failed (2025-05-29T15:30:02.657099Z): assertion failed in non-unittest thread with message: assertion failed at ydb/core/kqp/ut/common/kqp_ut_common.h:375, void NKikimr::NKqp::AssertSuccessResult(const NYdb::TStatus &): (result.IsSuccess())
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 library/cpp/testing/unittest/registar.cpp:36 RaiseError(): requirement UnittestThread failed 0. /-S/util/system/yassert.cpp:83: InternalPanicImpl @ 0x13DD8F65 1. /-S/util/system/yassert.cpp:55: Panic @ 0x13DCFF66 2. /tmp//-S/library/cpp/testing/unittest/registar.cpp:36: RaiseError @ 0x13F70BC6 3. /-S/ydb/core/kqp/ut/common/kqp_ut_common.h:375: AssertSuccessResult @ 0x13A3FAE2 4. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:365: CreateSampleTables @ 0x264F71F2 5. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:581: operator() @ 0x2651596C 6. /-S/library/cpp/threading/future/core/future-inl.h:493: SetValue @ 0x2651596C 7. /-S/library/cpp/threading/future/async.h:24: operator() @ 0x2651596C 8. /-S/util/thread/pool.h:71: Process @ 0x2651596C 9. /-S/util/thread/pool.cpp:418: DoExecute @ 0x13DE06B9 10. /-S/util/thread/factory.h:15: Execute @ 0x13DDF0A9 11. /-S/util/thread/factory.cpp:36: ThreadProc @ 0x13DDF0A9 12. /-S/util/system/thread.cpp:244: ThreadProxy @ 0x13DDA51C 13. ??:0: ?? @ 0x7EFD4F2F4AC2 14. ??:0: ?? @ 0x7EFD4F38684F Trying to start YDB, gRPC: 63318, MsgBus: 24536 2025-05-29T15:30:06.448753Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509890361348583541:2062];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:30:06.448774Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/001314/r3tmp/tmpJnid3S/pdisk_1.dat 2025-05-29T15:30:06.503894Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [1:7509890361348583520:2079] 1748532606448631 != 1748532606448634 2025-05-29T15:30:06.506292Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 63318, node 1 2025-05-29T15:3 ... 3DD8F65 1. /-S/util/system/yassert.cpp:55: Panic @ 0x13DCFF66 2. /tmp//-S/library/cpp/testing/unittest/registar.cpp:36: RaiseError @ 0x13F70BC6 3. /-S/ydb/core/kqp/ut/common/kqp_ut_common.h:375: AssertSuccessResult @ 0x13A3FAE2 4. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:365: CreateSampleTables @ 0x264F71F2 5. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:581: operator() @ 0x2651596C 6. /-S/library/cpp/threading/future/core/future-inl.h:493: SetValue @ 0x2651596C 7. /-S/library/cpp/threading/future/async.h:24: operator() @ 0x2651596C 8. /-S/util/thread/pool.h:71: Process @ 0x2651596C 9. /-S/util/thread/pool.cpp:418: DoExecute @ 0x13DE06B9 10. /-S/util/thread/factory.h:15: Execute @ 0x13DDF0A9 11. /-S/util/thread/factory.cpp:36: ThreadProc @ 0x13DDF0A9 12. /-S/util/system/thread.cpp:244: ThreadProxy @ 0x13DDA51C 13. ??:0: ?? @ 0x7FF334045AC2 14. ??:0: ?? @ 0x7FF3340D784F Trying to start YDB, gRPC: 18323, MsgBus: 26231 2025-05-29T15:30:14.917614Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509890396138339910:2063];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:30:14.917646Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/001314/r3tmp/tmp4XjScE/pdisk_1.dat 2025-05-29T15:30:14.981343Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [1:7509890396138339888:2079] 1748532614917474 != 1748532614917477 2025-05-29T15:30:14.981506Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 18323, node 1 2025-05-29T15:30:14.998138Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:30:14.998163Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-05-29T15:30:14.998165Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-05-29T15:30:14.998210Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:26231 TClient is connected to server localhost:26231 WaitRootIsUp 'Root'... TClient::Ls request: Root 2025-05-29T15:30:15.053499Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:30:15.053535Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting TClient::Ls response: 2025-05-29T15:30:15.054699Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... 2025-05-29T15:30:15.066792Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:30:15.070885Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-05-29T15:30:15.135501Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:30:15.153456Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:30:15.163705Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:30:15.288548Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890400433308820:2403], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:30:15.288595Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:30:15.315819Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-05-29T15:30:15.322407Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-05-29T15:30:15.334243Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-05-29T15:30:15.341272Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-05-29T15:30:15.355373Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-05-29T15:30:15.369410Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-05-29T15:30:15.383692Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-05-29T15:30:15.399279Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890400433309472:2466], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:30:15.399292Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890400433309477:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:30:15.399309Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:30:15.399966Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715669:3, at schemeshard: 72057594046644480 2025-05-29T15:30:15.403465Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7509890400433309479:2470], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715669 completed, doublechecking } 2025-05-29T15:30:15.479129Z node 1 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [1:7509890400433309530:3396] txid# 281474976715670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-29T15:30:15.585629Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [1:7509890400433309546:2474], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:30:15.585764Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=1&id=NzZmMTcwNGEtNGI5M2U0MTktNGU0YTMyNjMtZDVjYzVkNQ==, ActorId: [1:7509890400433308802:2401], ActorState: ExecuteState, TraceId: 01jweamp763yr5d3dx3mw2bwer, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: VERIFY failed (2025-05-29T15:30:15.586459Z): assertion failed in non-unittest thread with message: assertion failed at ydb/core/kqp/ut/common/kqp_ut_common.h:375, void NKikimr::NKqp::AssertSuccessResult(const NYdb::TStatus &): (result.IsSuccess())
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 library/cpp/testing/unittest/registar.cpp:36 RaiseError(): requirement UnittestThread failed 0. /-S/util/system/yassert.cpp:83: InternalPanicImpl @ 0x13DD8F65 1. /-S/util/system/yassert.cpp:55: Panic @ 0x13DCFF66 2. /tmp//-S/library/cpp/testing/unittest/registar.cpp:36: RaiseError @ 0x13F70BC6 3. /-S/ydb/core/kqp/ut/common/kqp_ut_common.h:375: AssertSuccessResult @ 0x13A3FAE2 4. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:365: CreateSampleTables @ 0x264F71F2 5. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:581: operator() @ 0x2651596C 6. /-S/library/cpp/threading/future/core/future-inl.h:493: SetValue @ 0x2651596C 7. /-S/library/cpp/threading/future/async.h:24: operator() @ 0x2651596C 8. /-S/util/thread/pool.h:71: Process @ 0x2651596C 9. /-S/util/thread/pool.cpp:418: DoExecute @ 0x13DE06B9 10. /-S/util/thread/factory.h:15: Execute @ 0x13DDF0A9 11. /-S/util/thread/factory.cpp:36: ThreadProc @ 0x13DDF0A9 12. /-S/util/system/thread.cpp:244: ThreadProxy @ 0x13DDA51C 13. ??:0: ?? @ 0x7F91C971DAC2 14. ??:0: ?? @ 0x7F91C97AF84F ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/query/unittest >> KqpQuery::QueryStats-UseSink Test command err: Trying to start YDB, gRPC: 27759, MsgBus: 26441 2025-05-29T15:30:02.602393Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509890344630772347:2062];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:30:02.602426Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/00130e/r3tmp/tmpLvyCIu/pdisk_1.dat 2025-05-29T15:30:02.661249Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:30:02.661585Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [1:7509890344630772326:2079] 1748532602602239 != 1748532602602242 TServer::EnableGrpc on GrpcPort 27759, node 1 2025-05-29T15:30:02.675054Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:30:02.675067Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-05-29T15:30:02.675069Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-05-29T15:30:02.675129Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:26441 TClient is connected to server localhost:26441 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-29T15:30:02.737049Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:30:02.737096Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:30:02.738131Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-29T15:30:02.738372Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:30:02.744759Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:30:02.764022Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:30:02.783448Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:30:02.794883Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:30:02.981409Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890344630773958:2403], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:30:02.981434Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:30:03.025446Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-05-29T15:30:03.034311Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-05-29T15:30:03.042651Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-05-29T15:30:03.056398Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-05-29T15:30:03.070222Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-05-29T15:30:03.085188Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-05-29T15:30:03.098761Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-05-29T15:30:03.115027Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890348925741907:2466], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:30:03.115063Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:30:03.115130Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890348925741912:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:30:03.115868Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710669:3, at schemeshard: 72057594046644480 2025-05-29T15:30:03.118030Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7509890348925741914:2470], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710669 completed, doublechecking } 2025-05-29T15:30:03.191699Z node 1 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [1:7509890348925741965:3399] txid# 281474976710670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-29T15:30:03.291314Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [1:7509890348925741981:2474], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:30:03.291431Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=1&id=YWVjNDBhMWQtYWI0OWViNGUtZTk4N2E5MzQtNDBjNWQ4NDQ=, ActorId: [1:7509890344630773955:2401], ActorState: ExecuteState, TraceId: 01jweama7abfc98k4qv019xnvt, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: VERIFY failed (2025-05-29T15:30:03.292213Z): assertion failed in non-unittest thread with message: assertion failed at ydb/core/kqp/ut/common/kqp_ut_common.h:375, void NKikimr::NKqp::AssertSuccessResult(const NYdb::TStatus &): (result.IsSuccess())
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 library/cpp/testing/unittest/registar.cpp:36 RaiseError(): requirement UnittestThread failed 0. /-S/util/system/yassert.cpp:83: InternalPanicImpl @ 0x13DD8F65 1. /-S/util/system/yassert.cpp:55: Panic @ 0x13DCFF66 2. /tmp//-S/library/cpp/testing/unittest/registar.cpp:36: RaiseError @ 0x13F70BC6 3. /-S/ydb/core/kqp/ut/common/kqp_ut_common.h:375: AssertSuccessResult @ 0x13A3FAE2 4. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:365: CreateSampleTables @ 0x264F71F2 5. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:581: operator() @ 0x2651596C 6. /-S/library/cpp/threading/future/core/future-inl.h:493: SetValue @ 0x2651596C 7. /-S/library/cpp/threading/future/async.h:24: operator() @ 0x2651596C 8. /-S/util/thread/pool.h:71: Process @ 0x2651596C 9. /-S/util/thread/pool.cpp:418: DoExecute @ 0x13DE06B9 10. /-S/util/thread/factory.h:15: Execute @ 0x13DDF0A9 11. /-S/util/thread/factory.cpp:36: ThreadProc @ 0x13DDF0A9 12. /-S/util/system/thread.cpp:244: ThreadProxy @ 0x13DDA51C 13. ??:0: ?? @ 0x7F9104A01AC2 14. ??:0: ?? @ 0x7F9104A9384F Trying to start YDB, gRPC: 30047, MsgBus: 20528 2025-05-29T15:30:06.625909Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509890361196173871:2064];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:30:06.625975Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/00130e/r3tmp/tmpkz1C2p/pdisk_1.dat 2025-05-29T15:30:06.673082Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [1:7509890361196173848:2079] 1748532606625725 != 1748532606625728 2025-05-29T15:30:06.677864Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 30047, node 1 2025-05-29 ... 3DD8F65 1. /-S/util/system/yassert.cpp:55: Panic @ 0x13DCFF66 2. /tmp//-S/library/cpp/testing/unittest/registar.cpp:36: RaiseError @ 0x13F70BC6 3. /-S/ydb/core/kqp/ut/common/kqp_ut_common.h:375: AssertSuccessResult @ 0x13A3FAE2 4. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:365: CreateSampleTables @ 0x264F71F2 5. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:581: operator() @ 0x2651596C 6. /-S/library/cpp/threading/future/core/future-inl.h:493: SetValue @ 0x2651596C 7. /-S/library/cpp/threading/future/async.h:24: operator() @ 0x2651596C 8. /-S/util/thread/pool.h:71: Process @ 0x2651596C 9. /-S/util/thread/pool.cpp:418: DoExecute @ 0x13DE06B9 10. /-S/util/thread/factory.h:15: Execute @ 0x13DDF0A9 11. /-S/util/thread/factory.cpp:36: ThreadProc @ 0x13DDF0A9 12. /-S/util/system/thread.cpp:244: ThreadProxy @ 0x13DDA51C 13. ??:0: ?? @ 0x7FC26A6A6AC2 14. ??:0: ?? @ 0x7FC26A73884F Trying to start YDB, gRPC: 14978, MsgBus: 24513 2025-05-29T15:30:15.113492Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509890398200436923:2061];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:30:15.113553Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/00130e/r3tmp/tmpnda6WO/pdisk_1.dat 2025-05-29T15:30:15.179874Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:30:15.179956Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [1:7509890398200436904:2079] 1748532615113355 != 1748532615113358 TServer::EnableGrpc on GrpcPort 14978, node 1 2025-05-29T15:30:15.194262Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:30:15.194277Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-05-29T15:30:15.194279Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-05-29T15:30:15.194326Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:24513 TClient is connected to server localhost:24513 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-29T15:30:15.246605Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:30:15.255856Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:30:15.255889Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:30:15.256951Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-29T15:30:15.257034Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:30:15.318638Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:30:15.340455Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:30:15.350230Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:30:15.562412Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890398200438559:2403], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:30:15.562447Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:30:15.613748Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-05-29T15:30:15.668643Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-05-29T15:30:15.723358Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-05-29T15:30:15.733293Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-05-29T15:30:15.791442Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-05-29T15:30:15.799461Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-05-29T15:30:15.810473Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-05-29T15:30:15.827182Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890398200439218:2466], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:30:15.827216Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:30:15.827219Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890398200439223:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:30:15.827898Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710669:3, at schemeshard: 72057594046644480 2025-05-29T15:30:15.830073Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7509890398200439225:2470], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710669 completed, doublechecking } 2025-05-29T15:30:15.891499Z node 1 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [1:7509890398200439276:3399] txid# 281474976710670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-29T15:30:16.004237Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [1:7509890398200439292:2474], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:30:16.004323Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=1&id=OWQzOTY1YmEtYTZlMmYwZWYtZTVmZDRmNDAtYTk0YTQ3NTU=, ActorId: [1:7509890398200438541:2401], ActorState: ExecuteState, TraceId: 01jweampmjcsa75w2rx8yd9nvx, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: VERIFY failed (2025-05-29T15:30:16.005146Z): assertion failed in non-unittest thread with message: assertion failed at ydb/core/kqp/ut/common/kqp_ut_common.h:375, void NKikimr::NKqp::AssertSuccessResult(const NYdb::TStatus &): (result.IsSuccess())
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 library/cpp/testing/unittest/registar.cpp:36 RaiseError(): requirement UnittestThread failed 0. /-S/util/system/yassert.cpp:83: InternalPanicImpl @ 0x13DD8F65 1. /-S/util/system/yassert.cpp:55: Panic @ 0x13DCFF66 2. /tmp//-S/library/cpp/testing/unittest/registar.cpp:36: RaiseError @ 0x13F70BC6 3. /-S/ydb/core/kqp/ut/common/kqp_ut_common.h:375: AssertSuccessResult @ 0x13A3FAE2 4. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:365: CreateSampleTables @ 0x264F71F2 5. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:581: operator() @ 0x2651596C 6. /-S/library/cpp/threading/future/core/future-inl.h:493: SetValue @ 0x2651596C 7. /-S/library/cpp/threading/future/async.h:24: operator() @ 0x2651596C 8. /-S/util/thread/pool.h:71: Process @ 0x2651596C 9. /-S/util/thread/pool.cpp:418: DoExecute @ 0x13DE06B9 10. /-S/util/thread/factory.h:15: Execute @ 0x13DDF0A9 11. /-S/util/thread/factory.cpp:36: ThreadProc @ 0x13DDF0A9 12. /-S/util/system/thread.cpp:244: ThreadProxy @ 0x13DDA51C 13. ??:0: ?? @ 0x7F918E1B4AC2 14. ??:0: ?? @ 0x7F918E24684F >> TAsyncIndexTests::OnlineBuild [GOOD] >> KqpStats::DataQueryWithEffects-UseSink >> KqpQuery::CreateAsSelectPath+UseTablePathPrefix [FAIL] |73.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_index/unittest |73.0%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/cms/ut_sentinel_unstable/ydb-core-cms-ut_sentinel_unstable >> TUniqueIndexTests::CreateTable |73.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/cms/ut_sentinel_unstable/ydb-core-cms-ut_sentinel_unstable |73.0%| [LD] {RESULT} $(B)/ydb/core/cms/ut_sentinel_unstable/ydb-core-cms-ut_sentinel_unstable ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/query/unittest >> KqpParams::Decimal+QueryService+UseSink Test command err: Trying to start YDB, gRPC: 13774, MsgBus: 30924 2025-05-29T15:30:03.429969Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509890347972326057:2062];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:30:03.429998Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/001308/r3tmp/tmpxJeGSY/pdisk_1.dat 2025-05-29T15:30:03.480714Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:30:03.480986Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [1:7509890347972326036:2079] 1748532603429793 != 1748532603429796 TServer::EnableGrpc on GrpcPort 13774, node 1 2025-05-29T15:30:03.492094Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:30:03.492108Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-05-29T15:30:03.492111Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-05-29T15:30:03.492162Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:30924 TClient is connected to server localhost:30924 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-29T15:30:03.558557Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:30:03.558583Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:30:03.559168Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-05-29T15:30:03.559607Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected waiting... 2025-05-29T15:30:03.566636Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:30:03.585898Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:30:03.605016Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:30:03.617345Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:30:03.757415Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890347972327666:2403], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:30:03.757437Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:30:03.803337Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-05-29T15:30:03.811535Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-05-29T15:30:03.819536Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-05-29T15:30:03.833939Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-05-29T15:30:03.848515Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-05-29T15:30:03.861665Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-05-29T15:30:03.875553Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-05-29T15:30:03.892162Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890347972328317:2466], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:30:03.892185Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:30:03.892191Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890347972328322:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:30:03.892974Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715669:3, at schemeshard: 72057594046644480 2025-05-29T15:30:03.895278Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7509890347972328324:2470], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715669 completed, doublechecking } 2025-05-29T15:30:03.947032Z node 1 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [1:7509890347972328375:3395] txid# 281474976715670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-29T15:30:04.055595Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [1:7509890347972328391:2474], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:30:04.055718Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=1&id=M2NlNzcwNDctMTRhMzgwMzEtNzdjNWZhZTctZTljYTc1OTg=, ActorId: [1:7509890347972327648:2401], ActorState: ExecuteState, TraceId: 01jweamazk9yb6xvnbk83es3s6, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: VERIFY failed (2025-05-29T15:30:04.056526Z): assertion failed in non-unittest thread with message: assertion failed at ydb/core/kqp/ut/common/kqp_ut_common.h:375, void NKikimr::NKqp::AssertSuccessResult(const NYdb::TStatus &): (result.IsSuccess())
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 library/cpp/testing/unittest/registar.cpp:36 RaiseError(): requirement UnittestThread failed 0. /-S/util/system/yassert.cpp:83: InternalPanicImpl @ 0x13DD8F65 1. /-S/util/system/yassert.cpp:55: Panic @ 0x13DCFF66 2. /tmp//-S/library/cpp/testing/unittest/registar.cpp:36: RaiseError @ 0x13F70BC6 3. /-S/ydb/core/kqp/ut/common/kqp_ut_common.h:375: AssertSuccessResult @ 0x13A3FAE2 4. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:365: CreateSampleTables @ 0x264F71F2 5. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:581: operator() @ 0x2651596C 6. /-S/library/cpp/threading/future/core/future-inl.h:493: SetValue @ 0x2651596C 7. /-S/library/cpp/threading/future/async.h:24: operator() @ 0x2651596C 8. /-S/util/thread/pool.h:71: Process @ 0x2651596C 9. /-S/util/thread/pool.cpp:418: DoExecute @ 0x13DE06B9 10. /-S/util/thread/factory.h:15: Execute @ 0x13DDF0A9 11. /-S/util/thread/factory.cpp:36: ThreadProc @ 0x13DDF0A9 12. /-S/util/system/thread.cpp:244: ThreadProxy @ 0x13DDA51C 13. ??:0: ?? @ 0x7FC5C84D5AC2 14. ??:0: ?? @ 0x7FC5C856784F Trying to start YDB, gRPC: 12275, MsgBus: 17427 2025-05-29T15:30:07.458131Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509890366071201157:2062];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:30:07.458171Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/001308/r3tmp/tmpAxU88K/pdisk_1.dat 2025-05-29T15:30:07.509810Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [1:7509890366071201136:2079] 1748532607458015 != 1748532607458018 2025-05-29T15:30:07.511914Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 12275, node 1 2025-05-29 ... 3DD8F65 1. /-S/util/system/yassert.cpp:55: Panic @ 0x13DCFF66 2. /tmp//-S/library/cpp/testing/unittest/registar.cpp:36: RaiseError @ 0x13F70BC6 3. /-S/ydb/core/kqp/ut/common/kqp_ut_common.h:375: AssertSuccessResult @ 0x13A3FAE2 4. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:365: CreateSampleTables @ 0x264F71F2 5. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:581: operator() @ 0x2651596C 6. /-S/library/cpp/threading/future/core/future-inl.h:493: SetValue @ 0x2651596C 7. /-S/library/cpp/threading/future/async.h:24: operator() @ 0x2651596C 8. /-S/util/thread/pool.h:71: Process @ 0x2651596C 9. /-S/util/thread/pool.cpp:418: DoExecute @ 0x13DE06B9 10. /-S/util/thread/factory.h:15: Execute @ 0x13DDF0A9 11. /-S/util/thread/factory.cpp:36: ThreadProc @ 0x13DDF0A9 12. /-S/util/system/thread.cpp:244: ThreadProxy @ 0x13DDA51C 13. ??:0: ?? @ 0x7FA29612AAC2 14. ??:0: ?? @ 0x7FA2961BC84F Trying to start YDB, gRPC: 63011, MsgBus: 10213 2025-05-29T15:30:15.897842Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509890399276343640:2064];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:30:15.897871Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/001308/r3tmp/tmpcVEwBS/pdisk_1.dat 2025-05-29T15:30:15.953019Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [1:7509890399276343617:2079] 1748532615897674 != 1748532615897677 2025-05-29T15:30:15.955591Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 63011, node 1 2025-05-29T15:30:15.965010Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:30:15.965024Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-05-29T15:30:15.965026Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-05-29T15:30:15.965074Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:10213 2025-05-29T15:30:16.000487Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:30:16.000523Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:30:16.001503Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:10213 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-29T15:30:16.033373Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:30:16.038678Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:30:16.111135Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:30:16.139893Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:30:16.152565Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:30:16.334136Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890403571312576:2403], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:30:16.334173Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:30:16.369335Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-05-29T15:30:16.375662Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-05-29T15:30:16.430728Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-05-29T15:30:16.440491Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-05-29T15:30:16.454582Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-05-29T15:30:16.468110Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-05-29T15:30:16.475143Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-05-29T15:30:16.491239Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890403571313232:2466], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:30:16.491269Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:30:16.491285Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890403571313237:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:30:16.492223Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715669:3, at schemeshard: 72057594046644480 2025-05-29T15:30:16.495293Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7509890403571313239:2470], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715669 completed, doublechecking } 2025-05-29T15:30:16.549471Z node 1 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [1:7509890403571313290:3397] txid# 281474976715670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-29T15:30:16.635072Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [1:7509890403571313306:2474], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:30:16.635221Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=1&id=ZWM4ZDNmYzAtNTA1ZDdiYjAtOTZkMWJiNmItY2JhMDg1NjM=, ActorId: [1:7509890403571312558:2401], ActorState: ExecuteState, TraceId: 01jweamq9ac5t6k382zj67yf0n, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: VERIFY failed (2025-05-29T15:30:16.635950Z): assertion failed in non-unittest thread with message: assertion failed at ydb/core/kqp/ut/common/kqp_ut_common.h:375, void NKikimr::NKqp::AssertSuccessResult(const NYdb::TStatus &): (result.IsSuccess())
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 library/cpp/testing/unittest/registar.cpp:36 RaiseError(): requirement UnittestThread failed 0. /-S/util/system/yassert.cpp:83: InternalPanicImpl @ 0x13DD8F65 1. /-S/util/system/yassert.cpp:55: Panic @ 0x13DCFF66 2. /tmp//-S/library/cpp/testing/unittest/registar.cpp:36: RaiseError @ 0x13F70BC6 3. /-S/ydb/core/kqp/ut/common/kqp_ut_common.h:375: AssertSuccessResult @ 0x13A3FAE2 4. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:365: CreateSampleTables @ 0x264F71F2 5. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:581: operator() @ 0x2651596C 6. /-S/library/cpp/threading/future/core/future-inl.h:493: SetValue @ 0x2651596C 7. /-S/library/cpp/threading/future/async.h:24: operator() @ 0x2651596C 8. /-S/util/thread/pool.h:71: Process @ 0x2651596C 9. /-S/util/thread/pool.cpp:418: DoExecute @ 0x13DE06B9 10. /-S/util/thread/factory.h:15: Execute @ 0x13DDF0A9 11. /-S/util/thread/factory.cpp:36: ThreadProc @ 0x13DDF0A9 12. /-S/util/system/thread.cpp:244: ThreadProxy @ 0x13DDA51C 13. ??:0: ?? @ 0x7F2062B2EAC2 14. ??:0: ?? @ 0x7F2062BC084F ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_index/unittest >> TAsyncIndexTests::OnlineBuild [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:116:2058] recipient: [1:111:2141] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:116:2058] recipient: [1:111:2141] Leader for TabletID 72057594046678944 is [1:128:2152] sender: [1:132:2058] recipient: [1:111:2141] 2025-05-29T15:30:18.773301Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7488: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-05-29T15:30:18.773325Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7516: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:30:18.773330Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7402: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-05-29T15:30:18.773336Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7418: OperationsProcessing config: using default configuration 2025-05-29T15:30:18.773345Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-05-29T15:30:18.773348Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-05-29T15:30:18.773354Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7548: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:30:18.773364Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:39: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-05-29T15:30:18.773466Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7619: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-05-29T15:30:18.788806Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-05-29T15:30:18.940231Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7309: Cannot subscribe to console configs 2025-05-29T15:30:18.940278Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:30:18.943401Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-05-29T15:30:18.943523Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-05-29T15:30:18.943590Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-05-29T15:30:18.952642Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-05-29T15:30:18.952817Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:445: Clear TempDirsState with owners number: 0 2025-05-29T15:30:18.958351Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1348: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-05-29T15:30:18.958461Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:32: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-05-29T15:30:18.964397Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:157: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:30:18.968084Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:84: [RootDataErasureManager] Stop 2025-05-29T15:30:18.969522Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:30:18.969554Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:30:18.969575Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-05-29T15:30:18.969584Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-29T15:30:18.969591Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:202: TTxServerlessStorageBilling.Complete 2025-05-29T15:30:18.971189Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6671: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-05-29T15:30:18.977575Z node 1 :HIVE INFO: tablet_helpers.cpp:1130: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:128:2152] sender: [1:242:2058] recipient: [1:15:2062] 2025-05-29T15:30:19.016645Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:372: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-05-29T15:30:19.023704Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:30:19.034091Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-05-29T15:30:19.034175Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:130: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-05-29T15:30:19.034212Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:30:19.035697Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:451: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-05-29T15:30:19.035724Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-05-29T15:30:19.035768Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:30:19.035779Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:313: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-05-29T15:30:19.035784Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:367: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-05-29T15:30:19.035790Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 2 -> 3 2025-05-29T15:30:19.036166Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:30:19.036173Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-05-29T15:30:19.036179Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 3 -> 128 2025-05-29T15:30:19.036475Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:30:19.036483Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:30:19.036489Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:30:19.036496Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1650: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-05-29T15:30:19.037241Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1719: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-05-29T15:30:19.037658Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:652: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-05-29T15:30:19.038459Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1751: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-05-29T15:30:19.040212Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:676: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-05-29T15:30:19.040257Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:680: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 124 RawX2: 4294969445 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-05-29T15:30:19.040267Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:30:19.040361Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 128 -> 240 2025-05-29T15:30:19.040372Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:30:19.041111Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-05-29T15:30:19.041140Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:402: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-05-29T15:30:19.042140Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:30:19.042151Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-29T15:30:19.042198Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29 ... 000004, ApplyTxId: 281474976710759, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, UnlockTxId: 281474976710760, UnlockTxStatus: StatusSuccess, UnlockTxDone: 0, ToUploadShards: 0, DoneShards: 0, Processed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }, Billed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }}, record: Status: StatusAccepted TxId: 281474976710760 SchemeshardId: 72057594046678944 PathId: 2 2025-05-29T15:30:19.422306Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 281474976710760:0, at schemeshard: 72057594046678944 2025-05-29T15:30:19.422313Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_drop_lock.cpp:30: [72057594046678944] TDropLock TPropose opId# 281474976710760:0 ProgressState 2025-05-29T15:30:19.422320Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1650: TOperation IsReadyToPropose , TxId: 281474976710760 ready parts: 1/1 2025-05-29T15:30:19.422344Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1719: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 281474976710760 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-05-29T15:30:19.422898Z node 1 :BUILD_INDEX NOTICE: schemeshard_build_index__progress.cpp:1121: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Execute: 102 Unlocking 2025-05-29T15:30:19.422922Z node 1 :BUILD_INDEX DEBUG: schemeshard_build_index__progress.cpp:1122: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Execute: 102 Unlocking TBuildInfo{ IndexBuildId: 102, Uid: , DomainPathId: [OwnerId: 72057594046678944, LocalPathId: 1], TablePathId: [OwnerId: 72057594046678944, LocalPathId: 2], IndexType: EIndexTypeGlobalAsync, IndexName: UserDefinedIndex, IndexColumn: indexed, State: Unlocking, IsCancellationRequested: 0, Issue: , SubscribersCount: 1, CreateSender: [1:386:2357], AlterMainTableTxId: 0, AlterMainTableTxStatus: StatusSuccess, AlterMainTableTxDone: 0, LockTxId: 281474976710757, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976710758, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 5000004, ApplyTxId: 281474976710759, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, UnlockTxId: 281474976710760, UnlockTxStatus: StatusAccepted, UnlockTxDone: 0, ToUploadShards: 0, DoneShards: 0, Processed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }, Billed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }} 2025-05-29T15:30:19.422968Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:652: Send tablet strongly msg operationId: 281474976710760:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:281474976710760 msg type: 269090816 2025-05-29T15:30:19.422995Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1751: TOperation RegisterRelationByTabletId, TxId: 281474976710760, partId: 4294967295, tablet: 72057594046316545 2025-05-29T15:30:19.423018Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__notify.cpp:30: NotifyTxCompletion operation in-flight, txId: 281474976710760, at schemeshard: 72057594046678944 2025-05-29T15:30:19.423023Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1606: TOperation IsReadyToNotify, TxId: 281474976710760, ready parts: 0/1, is published: true 2025-05-29T15:30:19.423029Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__notify.cpp:131: NotifyTxCompletion transaction is registered, txId: 281474976710760, at schemeshard: 72057594046678944 FAKE_COORDINATOR: Add transaction: 281474976710760 at step: 5000006 FAKE_COORDINATOR: advance: minStep5000006 State->FrontStep: 5000005 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710760 at step: 5000006 2025-05-29T15:30:19.423094Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:676: TTxOperationPlanStep Execute, stepId: 5000006, transactions count in step: 1, at schemeshard: 72057594046678944 2025-05-29T15:30:19.423114Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:680: TTxOperationPlanStep Execute, message: Transactions { TxId: 281474976710760 Coordinator: 72057594046316545 AckTo { RawX1: 124 RawX2: 4294969445 } } Step: 5000006 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-05-29T15:30:19.423122Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_drop_lock.cpp:44: [72057594046678944] TDropLock TPropose opId# 281474976710760:0 HandleReply TEvOperationPlan: step# 5000006 2025-05-29T15:30:19.423128Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 281474976710760:0 128 -> 240 2025-05-29T15:30:19.423458Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 281474976710760:0, at schemeshard: 72057594046678944 2025-05-29T15:30:19.423467Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:482: [72057594046678944] TDone opId# 281474976710760:0 ProgressState 2025-05-29T15:30:19.423479Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:906: Part operation is done id#281474976710760:0 progress is 1/1 2025-05-29T15:30:19.423483Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 281474976710760 ready parts: 1/1 2025-05-29T15:30:19.423489Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:906: Part operation is done id#281474976710760:0 progress is 1/1 2025-05-29T15:30:19.423494Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 281474976710760 ready parts: 1/1 2025-05-29T15:30:19.423499Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1606: TOperation IsReadyToNotify, TxId: 281474976710760, ready parts: 1/1, is published: true 2025-05-29T15:30:19.423509Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1629: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:128:2152] message: TxId: 281474976710760 2025-05-29T15:30:19.423516Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 281474976710760 ready parts: 1/1 2025-05-29T15:30:19.423520Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:973: Operation and all the parts is done, operation id: 281474976710760:0 2025-05-29T15:30:19.423524Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5174: RemoveTx for txid 281474976710760:0 2025-05-29T15:30:19.423535Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 FAKE_COORDINATOR: Erasing txId 281474976710760 2025-05-29T15:30:19.423844Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6746: Handle: TEvNotifyTxCompletionResult: txId# 281474976710760 2025-05-29T15:30:19.423856Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6748: Message: TxId: 281474976710760 2025-05-29T15:30:19.423865Z node 1 :BUILD_INDEX INFO: schemeshard_build_index__progress.cpp:2338: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxReply : TEvNotifyTxCompletionResult, txId# 281474976710760, buildInfoId: 102 2025-05-29T15:30:19.423878Z node 1 :BUILD_INDEX DEBUG: schemeshard_build_index__progress.cpp:2341: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxReply : TEvNotifyTxCompletionResult, txId# 281474976710760, buildInfo: TBuildInfo{ IndexBuildId: 102, Uid: , DomainPathId: [OwnerId: 72057594046678944, LocalPathId: 1], TablePathId: [OwnerId: 72057594046678944, LocalPathId: 2], IndexType: EIndexTypeGlobalAsync, IndexName: UserDefinedIndex, IndexColumn: indexed, State: Unlocking, IsCancellationRequested: 0, Issue: , SubscribersCount: 1, CreateSender: [1:386:2357], AlterMainTableTxId: 0, AlterMainTableTxStatus: StatusSuccess, AlterMainTableTxDone: 0, LockTxId: 281474976710757, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976710758, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 5000004, ApplyTxId: 281474976710759, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, UnlockTxId: 281474976710760, UnlockTxStatus: StatusAccepted, UnlockTxDone: 0, ToUploadShards: 0, DoneShards: 0, Processed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }, Billed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }} 2025-05-29T15:30:19.424159Z node 1 :BUILD_INDEX NOTICE: schemeshard_build_index__progress.cpp:1121: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Execute: 102 Unlocking 2025-05-29T15:30:19.424173Z node 1 :BUILD_INDEX DEBUG: schemeshard_build_index__progress.cpp:1122: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Execute: 102 Unlocking TBuildInfo{ IndexBuildId: 102, Uid: , DomainPathId: [OwnerId: 72057594046678944, LocalPathId: 1], TablePathId: [OwnerId: 72057594046678944, LocalPathId: 2], IndexType: EIndexTypeGlobalAsync, IndexName: UserDefinedIndex, IndexColumn: indexed, State: Unlocking, IsCancellationRequested: 0, Issue: , SubscribersCount: 1, CreateSender: [1:386:2357], AlterMainTableTxId: 0, AlterMainTableTxStatus: StatusSuccess, AlterMainTableTxDone: 0, LockTxId: 281474976710757, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976710758, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 5000004, ApplyTxId: 281474976710759, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, UnlockTxId: 281474976710760, UnlockTxStatus: StatusAccepted, UnlockTxDone: 1, ToUploadShards: 0, DoneShards: 0, Processed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }, Billed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }} 2025-05-29T15:30:19.424183Z node 1 :BUILD_INDEX INFO: schemeshard_build_index_tx_base.cpp:25: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: Change state from Unlocking to Done 2025-05-29T15:30:19.424453Z node 1 :BUILD_INDEX NOTICE: schemeshard_build_index__progress.cpp:1121: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Execute: 102 Done 2025-05-29T15:30:19.424466Z node 1 :BUILD_INDEX DEBUG: schemeshard_build_index__progress.cpp:1122: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Execute: 102 Done TBuildInfo{ IndexBuildId: 102, Uid: , DomainPathId: [OwnerId: 72057594046678944, LocalPathId: 1], TablePathId: [OwnerId: 72057594046678944, LocalPathId: 2], IndexType: EIndexTypeGlobalAsync, IndexName: UserDefinedIndex, IndexColumn: indexed, State: Done, IsCancellationRequested: 0, Issue: , SubscribersCount: 1, CreateSender: [1:386:2357], AlterMainTableTxId: 0, AlterMainTableTxStatus: StatusSuccess, AlterMainTableTxDone: 0, LockTxId: 281474976710757, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976710758, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 5000004, ApplyTxId: 281474976710759, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, UnlockTxId: 281474976710760, UnlockTxStatus: StatusAccepted, UnlockTxDone: 1, ToUploadShards: 0, DoneShards: 0, Processed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }, Billed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }} 2025-05-29T15:30:19.424471Z node 1 :BUILD_INDEX TRACE: schemeshard_build_index_tx_base.cpp:339: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TIndexBuildInfo SendNotifications: : id# 102, subscribers count# 1 2025-05-29T15:30:19.424488Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:227: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2025-05-29T15:30:19.424494Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:236: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [1:475:2435] TestWaitNotification: OK eventTxId 102 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/query/unittest >> KqpExplain::Predicates Test command err: Trying to start YDB, gRPC: 25705, MsgBus: 10259 2025-05-29T15:30:02.753578Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509890345627230333:2062];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:30:02.753601Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/00130b/r3tmp/tmphdZgTp/pdisk_1.dat 2025-05-29T15:30:02.806715Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [1:7509890345627230313:2079] 1748532602753459 != 1748532602753462 2025-05-29T15:30:02.806718Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 25705, node 1 2025-05-29T15:30:02.822933Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:30:02.822951Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-05-29T15:30:02.822953Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-05-29T15:30:02.823008Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:10259 TClient is connected to server localhost:10259 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: 2025-05-29T15:30:02.885781Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:30:02.885812Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-29T15:30:02.886939Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-29T15:30:02.888834Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:30:02.895007Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:30:02.966267Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:30:02.986421Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:30:02.996377Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:30:03.164789Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890349922199240:2403], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:30:03.164825Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:30:03.210032Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-05-29T15:30:03.217632Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-05-29T15:30:03.231742Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-05-29T15:30:03.245782Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-05-29T15:30:03.259727Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-05-29T15:30:03.274081Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-05-29T15:30:03.287991Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-05-29T15:30:03.304204Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890349922199896:2466], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:30:03.304240Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890349922199901:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:30:03.304252Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:30:03.305019Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715669:3, at schemeshard: 72057594046644480 2025-05-29T15:30:03.307462Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7509890349922199903:2470], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715669 completed, doublechecking } 2025-05-29T15:30:03.377750Z node 1 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [1:7509890349922199954:3396] txid# 281474976715670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-29T15:30:03.479325Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [1:7509890349922199970:2474], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:30:03.479452Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=1&id=OWRmZThmMjgtOWYzMWRiOC02MTEyZTE4My0yNjVjZWUyNw==, ActorId: [1:7509890349922199222:2401], ActorState: ExecuteState, TraceId: 01jweamad78ctnx1qthppb0kf5, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: VERIFY failed (2025-05-29T15:30:03.480165Z): assertion failed in non-unittest thread with message: assertion failed at ydb/core/kqp/ut/common/kqp_ut_common.h:375, void NKikimr::NKqp::AssertSuccessResult(const NYdb::TStatus &): (result.IsSuccess())
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 library/cpp/testing/unittest/registar.cpp:36 RaiseError(): requirement UnittestThread failed 0. /-S/util/system/yassert.cpp:83: InternalPanicImpl @ 0x13DD8F65 1. /-S/util/system/yassert.cpp:55: Panic @ 0x13DCFF66 2. /tmp//-S/library/cpp/testing/unittest/registar.cpp:36: RaiseError @ 0x13F70BC6 3. /-S/ydb/core/kqp/ut/common/kqp_ut_common.h:375: AssertSuccessResult @ 0x13A3FAE2 4. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:365: CreateSampleTables @ 0x264F71F2 5. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:581: operator() @ 0x2651596C 6. /-S/library/cpp/threading/future/core/future-inl.h:493: SetValue @ 0x2651596C 7. /-S/library/cpp/threading/future/async.h:24: operator() @ 0x2651596C 8. /-S/util/thread/pool.h:71: Process @ 0x2651596C 9. /-S/util/thread/pool.cpp:418: DoExecute @ 0x13DE06B9 10. /-S/util/thread/factory.h:15: Execute @ 0x13DDF0A9 11. /-S/util/thread/factory.cpp:36: ThreadProc @ 0x13DDF0A9 12. /-S/util/system/thread.cpp:244: ThreadProxy @ 0x13DDA51C 13. ??:0: ?? @ 0x7F6CB02D4AC2 14. ??:0: ?? @ 0x7F6CB036684F Trying to start YDB, gRPC: 19264, MsgBus: 9721 2025-05-29T15:30:07.192616Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509890363835411669:2064];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:30:07.192650Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/00130b/r3tmp/tmp8smHGn/pdisk_1.dat TServer::EnableGrpc on GrpcPort 19264, node 1 2025-05-29T15:30:07.243095Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:30:07.243368Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [1:7509890363835411646:2079] 1748532607192483 != 1748532607192486 2025-05-29T ... 3DD8F65 1. /-S/util/system/yassert.cpp:55: Panic @ 0x13DCFF66 2. /tmp//-S/library/cpp/testing/unittest/registar.cpp:36: RaiseError @ 0x13F70BC6 3. /-S/ydb/core/kqp/ut/common/kqp_ut_common.h:375: AssertSuccessResult @ 0x13A3FAE2 4. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:365: CreateSampleTables @ 0x264F71F2 5. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:581: operator() @ 0x2651596C 6. /-S/library/cpp/threading/future/core/future-inl.h:493: SetValue @ 0x2651596C 7. /-S/library/cpp/threading/future/async.h:24: operator() @ 0x2651596C 8. /-S/util/thread/pool.h:71: Process @ 0x2651596C 9. /-S/util/thread/pool.cpp:418: DoExecute @ 0x13DE06B9 10. /-S/util/thread/factory.h:15: Execute @ 0x13DDF0A9 11. /-S/util/thread/factory.cpp:36: ThreadProc @ 0x13DDF0A9 12. /-S/util/system/thread.cpp:244: ThreadProxy @ 0x13DDA51C 13. ??:0: ?? @ 0x7FB8EE814AC2 14. ??:0: ?? @ 0x7FB8EE8A684F Trying to start YDB, gRPC: 21132, MsgBus: 25543 2025-05-29T15:30:15.816448Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509890401321823054:2061];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:30:15.816467Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/00130b/r3tmp/tmpJRDfCF/pdisk_1.dat 2025-05-29T15:30:15.867562Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [1:7509890401321823035:2079] 1748532615816332 != 1748532615816335 2025-05-29T15:30:15.869334Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 21132, node 1 2025-05-29T15:30:15.880765Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:30:15.880780Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-05-29T15:30:15.880781Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-05-29T15:30:15.880821Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:25543 2025-05-29T15:30:15.919512Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:30:15.919542Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:30:15.920620Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:25543 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-29T15:30:15.945788Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:30:15.950905Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:30:16.013730Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:30:16.033050Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:30:16.045017Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:30:16.246302Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890405616791967:2403], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:30:16.246330Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:30:16.290326Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-05-29T15:30:16.297412Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-05-29T15:30:16.352699Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-05-29T15:30:16.407548Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-05-29T15:30:16.462050Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-05-29T15:30:16.475174Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-05-29T15:30:16.482512Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-05-29T15:30:16.498825Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890405616792625:2466], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:30:16.498854Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:30:16.498860Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890405616792630:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:30:16.499539Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715669:3, at schemeshard: 72057594046644480 2025-05-29T15:30:16.502429Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7509890405616792632:2470], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715669 completed, doublechecking } 2025-05-29T15:30:16.564179Z node 1 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [1:7509890405616792683:3397] txid# 281474976715670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-29T15:30:16.642415Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [1:7509890405616792699:2474], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:30:16.642548Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=1&id=N2Q4MTllMDQtYmM1NjMwZjctNGYzYzY1OC1kMTAyNjc4Zg==, ActorId: [1:7509890405616791949:2401], ActorState: ExecuteState, TraceId: 01jweamq9jf23gv8yb9jy9tb1x, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: VERIFY failed (2025-05-29T15:30:16.643247Z): assertion failed in non-unittest thread with message: assertion failed at ydb/core/kqp/ut/common/kqp_ut_common.h:375, void NKikimr::NKqp::AssertSuccessResult(const NYdb::TStatus &): (result.IsSuccess())
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 library/cpp/testing/unittest/registar.cpp:36 RaiseError(): requirement UnittestThread failed 0. /-S/util/system/yassert.cpp:83: InternalPanicImpl @ 0x13DD8F65 1. /-S/util/system/yassert.cpp:55: Panic @ 0x13DCFF66 2. /tmp//-S/library/cpp/testing/unittest/registar.cpp:36: RaiseError @ 0x13F70BC6 3. /-S/ydb/core/kqp/ut/common/kqp_ut_common.h:375: AssertSuccessResult @ 0x13A3FAE2 4. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:365: CreateSampleTables @ 0x264F71F2 5. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:581: operator() @ 0x2651596C 6. /-S/library/cpp/threading/future/core/future-inl.h:493: SetValue @ 0x2651596C 7. /-S/library/cpp/threading/future/async.h:24: operator() @ 0x2651596C 8. /-S/util/thread/pool.h:71: Process @ 0x2651596C 9. /-S/util/thread/pool.cpp:418: DoExecute @ 0x13DE06B9 10. /-S/util/thread/factory.h:15: Execute @ 0x13DDF0A9 11. /-S/util/thread/factory.cpp:36: ThreadProc @ 0x13DDF0A9 12. /-S/util/system/thread.cpp:244: ThreadProxy @ 0x13DDA51C 13. ??:0: ?? @ 0x7F6419E79AC2 14. ??:0: ?? @ 0x7F6419F0B84F >> KqpLimits::QueryExecTimeoutCancel |73.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_index/unittest >> TUniqueIndexTests::CreateTable [GOOD] >> TAsyncIndexTests::Decimal |73.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_index/unittest >> KqpExplain::UpdateOnSecondaryWithoutSecondaryKey+UseSink ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_index/unittest >> TUniqueIndexTests::CreateTable [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:116:2058] recipient: [1:111:2141] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:116:2058] recipient: [1:111:2141] Leader for TabletID 72057594046678944 is [1:128:2152] sender: [1:132:2058] recipient: [1:111:2141] 2025-05-29T15:30:19.934976Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7488: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-05-29T15:30:19.935004Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7516: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:30:19.935010Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7402: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-05-29T15:30:19.935017Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7418: OperationsProcessing config: using default configuration 2025-05-29T15:30:19.935036Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-05-29T15:30:19.935041Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-05-29T15:30:19.935050Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7548: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:30:19.935065Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:39: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-05-29T15:30:19.935167Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7619: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-05-29T15:30:19.935244Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-05-29T15:30:19.946172Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7309: Cannot subscribe to console configs 2025-05-29T15:30:19.946199Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:30:19.948618Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-05-29T15:30:19.948729Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-05-29T15:30:19.948767Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-05-29T15:30:19.950387Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-05-29T15:30:19.950797Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:445: Clear TempDirsState with owners number: 0 2025-05-29T15:30:19.950943Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1348: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-05-29T15:30:19.951008Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:32: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-05-29T15:30:19.952479Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:157: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:30:19.952532Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:84: [RootDataErasureManager] Stop 2025-05-29T15:30:19.952850Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:30:19.952864Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:30:19.952890Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-05-29T15:30:19.952899Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-29T15:30:19.952906Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:202: TTxServerlessStorageBilling.Complete 2025-05-29T15:30:19.952951Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6671: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-05-29T15:30:19.954669Z node 1 :HIVE INFO: tablet_helpers.cpp:1130: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:128:2152] sender: [1:242:2058] recipient: [1:15:2062] 2025-05-29T15:30:19.977564Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:372: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-05-29T15:30:19.977665Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:30:19.977740Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-05-29T15:30:19.977791Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:130: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-05-29T15:30:19.977803Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:30:19.978830Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:451: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-05-29T15:30:19.978865Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-05-29T15:30:19.978927Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:30:19.978939Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:313: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-05-29T15:30:19.978946Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:367: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-05-29T15:30:19.978952Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 2 -> 3 2025-05-29T15:30:19.979479Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:30:19.979494Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-05-29T15:30:19.979501Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 3 -> 128 2025-05-29T15:30:19.979935Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:30:19.979951Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:30:19.979958Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:30:19.979967Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1650: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-05-29T15:30:19.980720Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1719: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-05-29T15:30:19.981212Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:652: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-05-29T15:30:19.981263Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1751: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-05-29T15:30:19.981469Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:676: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-05-29T15:30:19.981496Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:680: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 124 RawX2: 4294969445 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-05-29T15:30:19.981504Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:30:19.981580Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 128 -> 240 2025-05-29T15:30:19.981588Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:30:19.981624Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-05-29T15:30:19.981636Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:402: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-05-29T15:30:19.982092Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:30:19.982103Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-29T15:30:19.982153Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29 ... 1009: NTableState::TProposedWaitParts operationId# 101:0 HandleReply TEvSchemaChanged at tablet: 72057594046678944 message: Source { RawX1: 333 RawX2: 4294969610 } Origin: 72075186233409547 State: 2 TxId: 101 Step: 0 Generation: 2 2025-05-29T15:30:20.088327Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:655: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 101:0, shardIdx: 72057594046678944:1, datashard: 72075186233409547, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2025-05-29T15:30:20.088330Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:674: all shard schema changes has been received, operationId: 101:0, at schemeshard: 72057594046678944 2025-05-29T15:30:20.088334Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:686: send schema changes ack message, operation: 101:0, datashard: 72075186233409547, at schemeshard: 72057594046678944 2025-05-29T15:30:20.088338Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 101:0 129 -> 240 2025-05-29T15:30:20.088922Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2025-05-29T15:30:20.088939Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2025-05-29T15:30:20.088946Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2025-05-29T15:30:20.089332Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2025-05-29T15:30:20.089352Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:647: TTxOperationReply complete, operationId: 101:2, at schemeshard: 72057594046678944 2025-05-29T15:30:20.089367Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:647: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2025-05-29T15:30:20.089378Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:647: TTxOperationReply complete, operationId: 101:2, at schemeshard: 72057594046678944 2025-05-29T15:30:20.089418Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:647: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2025-05-29T15:30:20.089430Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 101:2, at schemeshard: 72057594046678944 2025-05-29T15:30:20.089435Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:482: [72057594046678944] TDone opId# 101:2 ProgressState 2025-05-29T15:30:20.089445Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:906: Part operation is done id#101:2 progress is 2/3 2025-05-29T15:30:20.089448Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 101 ready parts: 2/3 2025-05-29T15:30:20.089452Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:906: Part operation is done id#101:2 progress is 2/3 2025-05-29T15:30:20.089455Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 101 ready parts: 2/3 2025-05-29T15:30:20.089459Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1606: TOperation IsReadyToNotify, TxId: 101, ready parts: 2/3, is published: true 2025-05-29T15:30:20.089505Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 101:0, at schemeshard: 72057594046678944 2025-05-29T15:30:20.089510Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:482: [72057594046678944] TDone opId# 101:0 ProgressState 2025-05-29T15:30:20.089516Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:906: Part operation is done id#101:0 progress is 3/3 2025-05-29T15:30:20.089519Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 101 ready parts: 3/3 2025-05-29T15:30:20.089523Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:906: Part operation is done id#101:0 progress is 3/3 2025-05-29T15:30:20.089526Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 101 ready parts: 3/3 2025-05-29T15:30:20.089529Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1606: TOperation IsReadyToNotify, TxId: 101, ready parts: 3/3, is published: true 2025-05-29T15:30:20.089541Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1629: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:384:2351] message: TxId: 101 2025-05-29T15:30:20.089546Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 101 ready parts: 3/3 2025-05-29T15:30:20.089553Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:973: Operation and all the parts is done, operation id: 101:0 2025-05-29T15:30:20.089557Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5174: RemoveTx for txid 101:0 2025-05-29T15:30:20.089576Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 2025-05-29T15:30:20.089580Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:973: Operation and all the parts is done, operation id: 101:1 2025-05-29T15:30:20.089583Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5174: RemoveTx for txid 101:1 2025-05-29T15:30:20.089587Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 3 2025-05-29T15:30:20.089591Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:973: Operation and all the parts is done, operation id: 101:2 2025-05-29T15:30:20.089594Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5174: RemoveTx for txid 101:2 2025-05-29T15:30:20.089600Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 3 2025-05-29T15:30:20.090000Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:227: tests -- TTxNotificationSubscriber for txId 101: got EvNotifyTxCompletionResult 2025-05-29T15:30:20.090011Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:236: tests -- TTxNotificationSubscriber for txId 101: satisfy waiter [1:385:2352] TestWaitNotification: OK eventTxId 101 2025-05-29T15:30:20.090114Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Table/UserDefinedIndex" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: true }, at schemeshard: 72057594046678944 2025-05-29T15:30:20.090150Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/Table/UserDefinedIndex" took 45us result status StatusSuccess 2025-05-29T15:30:20.090349Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Table/UserDefinedIndex" PathDescription { Self { Name: "UserDefinedIndex" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeTableIndex CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableIndexVersion: 1 } ChildrenExist: true } Children { Name: "indexImplTable" PathId: 4 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 3 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" PathSubType: EPathSubTypeSyncIndexImplTable Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 3 PathsLimit: 10000 ShardsInside: 2 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } TableIndex { Name: "UserDefinedIndex" LocalPathId: 3 Type: EIndexTypeGlobalUnique State: EIndexStateReady KeyColumnNames: "indexed" SchemaVersion: 1 PathOwnerId: 72057594046678944 DataSize: 0 IndexImplTableDescriptions { PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { SizeToSplit: 2147483648 MinPartitionsCount: 1 } } } } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> TSyncBrokerTests::ShouldReturnToken >> TSyncBrokerTests::ShouldReturnToken [GOOD] >> TSyncBrokerTests::ShouldReleaseToken >> TAsyncIndexTests::Decimal [GOOD] >> TSchemeShardSplitBySizeTest::MergeIndexTableShards >> TSyncBrokerTests::ShouldReleaseToken [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/query/unittest >> KqpQuery::QueryCancelWriteImmediate Test command err: Trying to start YDB, gRPC: 6892, MsgBus: 17094 2025-05-29T15:30:03.766129Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509890345896394226:2060];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:30:03.766166Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/001304/r3tmp/tmpoHDi2Q/pdisk_1.dat 2025-05-29T15:30:03.833618Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [1:7509890345896394208:2079] 1748532603765977 != 1748532603765980 2025-05-29T15:30:03.834630Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 6892, node 1 2025-05-29T15:30:03.846934Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:30:03.846946Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-05-29T15:30:03.846948Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-05-29T15:30:03.846995Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:17094 TClient is connected to server localhost:17094 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-29T15:30:03.909337Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:30:03.909364Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:30:03.910519Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-29T15:30:03.910561Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:30:03.918320Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:30:03.980864Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:30:04.001503Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:30:04.013025Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:30:04.113630Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890350191363136:2403], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:30:04.113664Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:30:04.150661Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-05-29T15:30:04.157730Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-05-29T15:30:04.169161Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-05-29T15:30:04.176368Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-05-29T15:30:04.230937Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-05-29T15:30:04.239308Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-05-29T15:30:04.253436Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-05-29T15:30:04.270341Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890350191363790:2466], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:30:04.270382Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:30:04.270383Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890350191363795:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:30:04.271166Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715669:3, at schemeshard: 72057594046644480 2025-05-29T15:30:04.273217Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7509890350191363797:2470], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715669 completed, doublechecking } 2025-05-29T15:30:04.371393Z node 1 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [1:7509890350191363848:3394] txid# 281474976715670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-29T15:30:04.476283Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [1:7509890350191363864:2474], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:30:04.476372Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=1&id=YjU3ZmY1OWEtMjk0ZmYxZWYtOTdlNjY4OTEtZWJlZmMyY2Y=, ActorId: [1:7509890350191363118:2401], ActorState: ExecuteState, TraceId: 01jweambbd6ef2mqewen9vjpmg, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: VERIFY failed (2025-05-29T15:30:04.477025Z): assertion failed in non-unittest thread with message: assertion failed at ydb/core/kqp/ut/common/kqp_ut_common.h:375, void NKikimr::NKqp::AssertSuccessResult(const NYdb::TStatus &): (result.IsSuccess())
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 library/cpp/testing/unittest/registar.cpp:36 RaiseError(): requirement UnittestThread failed 0. /-S/util/system/yassert.cpp:83: InternalPanicImpl @ 0x13DD8F65 1. /-S/util/system/yassert.cpp:55: Panic @ 0x13DCFF66 2. /tmp//-S/library/cpp/testing/unittest/registar.cpp:36: RaiseError @ 0x13F70BC6 3. /-S/ydb/core/kqp/ut/common/kqp_ut_common.h:375: AssertSuccessResult @ 0x13A3FAE2 4. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:365: CreateSampleTables @ 0x264F71F2 5. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:581: operator() @ 0x2651596C 6. /-S/library/cpp/threading/future/core/future-inl.h:493: SetValue @ 0x2651596C 7. /-S/library/cpp/threading/future/async.h:24: operator() @ 0x2651596C 8. /-S/util/thread/pool.h:71: Process @ 0x2651596C 9. /-S/util/thread/pool.cpp:418: DoExecute @ 0x13DE06B9 10. /-S/util/thread/factory.h:15: Execute @ 0x13DDF0A9 11. /-S/util/thread/factory.cpp:36: ThreadProc @ 0x13DDF0A9 12. /-S/util/system/thread.cpp:244: ThreadProxy @ 0x13DDA51C 13. ??:0: ?? @ 0x7EFC3B025AC2 14. ??:0: ?? @ 0x7EFC3B0B784F Trying to start YDB, gRPC: 24265, MsgBus: 9197 2025-05-29T15:30:08.440505Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509890370133746362:2063];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:30:08.440543Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/001304/r3tmp/tmpWRG6aF/pdisk_1.dat TServer::EnableGrpc on GrpcPort 24265, node 1 2025-05-29T15:30:08.500361Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:30:08.500453Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [1:7509890370133746340:2079] 1748532608440387 != 1748532608440390 2025-05-29T15 ... x13DD8F65 1. /-S/util/system/yassert.cpp:55: Panic @ 0x13DCFF66 2. /tmp//-S/library/cpp/testing/unittest/registar.cpp:36: RaiseError @ 0x13F70BC6 3. /-S/ydb/core/kqp/ut/common/kqp_ut_common.h:375: AssertSuccessResult @ 0x13A3FAE2 4. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:365: CreateSampleTables @ 0x264F71F2 5. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:581: operator() @ 0x2651596C 6. /-S/library/cpp/threading/future/core/future-inl.h:493: SetValue @ 0x2651596C 7. /-S/library/cpp/threading/future/async.h:24: operator() @ 0x2651596C 8. /-S/util/thread/pool.h:71: Process @ 0x2651596C 9. /-S/util/thread/pool.cpp:418: DoExecute @ 0x13DE06B9 10. /-S/util/thread/factory.h:15: Execute @ 0x13DDF0A9 11. /-S/util/thread/factory.cpp:36: ThreadProc @ 0x13DDF0A9 12. /-S/util/system/thread.cpp:244: ThreadProxy @ 0x13DDA51C 13. ??:0: ?? @ 0x7F30B50DEAC2 14. ??:0: ?? @ 0x7F30B517084F Trying to start YDB, gRPC: 6248, MsgBus: 10662 2025-05-29T15:30:16.992948Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509890403130479343:2063];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:30:16.992980Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/001304/r3tmp/tmp6DOVhf/pdisk_1.dat 2025-05-29T15:30:17.046971Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [1:7509890403130479322:2079] 1748532616992830 != 1748532616992833 2025-05-29T15:30:17.049064Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 6248, node 1 2025-05-29T15:30:17.060823Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:30:17.060835Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-05-29T15:30:17.060838Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-05-29T15:30:17.060880Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:10662 2025-05-29T15:30:17.095280Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:30:17.095312Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:30:17.096377Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:10662 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-29T15:30:17.124163Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:30:17.131740Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:30:17.192612Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:30:17.211734Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:30:17.222165Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:30:17.298978Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890407425448254:2403], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:30:17.298998Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:30:17.330937Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-05-29T15:30:17.336927Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-05-29T15:30:17.350080Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-05-29T15:30:17.357071Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-05-29T15:30:17.364359Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-05-29T15:30:17.378205Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-05-29T15:30:17.392681Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-05-29T15:30:17.408089Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890407425448906:2466], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:30:17.408111Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890407425448911:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:30:17.408120Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:30:17.408715Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715669:3, at schemeshard: 72057594046644480 2025-05-29T15:30:17.412307Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7509890407425448913:2470], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715669 completed, doublechecking } 2025-05-29T15:30:17.481661Z node 1 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [1:7509890407425448964:3397] txid# 281474976715670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-29T15:30:17.552545Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [1:7509890407425448980:2474], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:30:17.552646Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=1&id=NTNjNjA3YWMtMjNmNmFjZjUtNWE0MzdlMjAtY2VjZWI2NzE=, ActorId: [1:7509890407425448236:2401], ActorState: ExecuteState, TraceId: 01jweamr5z7fsnt1v5b7hxqx90, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: VERIFY failed (2025-05-29T15:30:17.553279Z): assertion failed in non-unittest thread with message: assertion failed at ydb/core/kqp/ut/common/kqp_ut_common.h:375, void NKikimr::NKqp::AssertSuccessResult(const NYdb::TStatus &): (result.IsSuccess())
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 library/cpp/testing/unittest/registar.cpp:36 RaiseError(): requirement UnittestThread failed 0. /-S/util/system/yassert.cpp:83: InternalPanicImpl @ 0x13DD8F65 1. /-S/util/system/yassert.cpp:55: Panic @ 0x13DCFF66 2. /tmp//-S/library/cpp/testing/unittest/registar.cpp:36: RaiseError @ 0x13F70BC6 3. /-S/ydb/core/kqp/ut/common/kqp_ut_common.h:375: AssertSuccessResult @ 0x13A3FAE2 4. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:365: CreateSampleTables @ 0x264F71F2 5. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:581: operator() @ 0x2651596C 6. /-S/library/cpp/threading/future/core/future-inl.h:493: SetValue @ 0x2651596C 7. /-S/library/cpp/threading/future/async.h:24: operator() @ 0x2651596C 8. /-S/util/thread/pool.h:71: Process @ 0x2651596C 9. /-S/util/thread/pool.cpp:418: DoExecute @ 0x13DE06B9 10. /-S/util/thread/factory.h:15: Execute @ 0x13DDF0A9 11. /-S/util/thread/factory.cpp:36: ThreadProc @ 0x13DDF0A9 12. /-S/util/system/thread.cpp:244: ThreadProxy @ 0x13DDA51C 13. ??:0: ?? @ 0x7F0B7AD5BAC2 14. ??:0: ?? @ 0x7F0B7ADED84F >> KqpExplain::SqlIn >> KqpLimits::DatashardReplySize ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/query/unittest >> KqpParams::CheckCacheWithRecompilationQuery Test command err: Trying to start YDB, gRPC: 19796, MsgBus: 27241 2025-05-29T15:30:04.071984Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509890353123611035:2061];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:30:04.072010Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/0012fd/r3tmp/tmp8VDF1Q/pdisk_1.dat 2025-05-29T15:30:04.127746Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:30:04.128143Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [1:7509890353123611015:2079] 1748532604071845 != 1748532604071848 TServer::EnableGrpc on GrpcPort 19796, node 1 2025-05-29T15:30:04.142087Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:30:04.142097Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-05-29T15:30:04.142099Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-05-29T15:30:04.142129Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:27241 2025-05-29T15:30:04.174980Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:30:04.175015Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:30:04.176078Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:27241 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-29T15:30:04.202367Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:30:04.209695Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:30:04.275341Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:30:04.294096Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:30:04.350730Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:30:04.414347Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890353123612652:2403], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:30:04.414373Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:30:04.453368Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-05-29T15:30:04.508476Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-05-29T15:30:04.563218Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-05-29T15:30:04.575066Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-05-29T15:30:04.589420Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-05-29T15:30:04.603614Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-05-29T15:30:04.617482Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-05-29T15:30:04.634041Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890353123613308:2466], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:30:04.634067Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890353123613313:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:30:04.634071Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:30:04.634834Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715669:3, at schemeshard: 72057594046644480 2025-05-29T15:30:04.637591Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7509890353123613315:2470], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715669 completed, doublechecking } 2025-05-29T15:30:04.726902Z node 1 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [1:7509890353123613366:3396] txid# 281474976715670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-29T15:30:04.822894Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [1:7509890353123613382:2474], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:30:04.824067Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=1&id=N2QyMDg2YmEtMWI0ZjhhZDItZTc0NTkyMmUtYjRkNGM0ODY=, ActorId: [1:7509890353123612634:2401], ActorState: ExecuteState, TraceId: 01jweambpsd5w9h4rgrs1t8cph, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: VERIFY failed (2025-05-29T15:30:04.824853Z): assertion failed in non-unittest thread with message: assertion failed at ydb/core/kqp/ut/common/kqp_ut_common.h:375, void NKikimr::NKqp::AssertSuccessResult(const NYdb::TStatus &): (result.IsSuccess())
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 library/cpp/testing/unittest/registar.cpp:36 RaiseError(): requirement UnittestThread failed 0. /-S/util/system/yassert.cpp:83: InternalPanicImpl @ 0x13DD8F65 1. /-S/util/system/yassert.cpp:55: Panic @ 0x13DCFF66 2. /tmp//-S/library/cpp/testing/unittest/registar.cpp:36: RaiseError @ 0x13F70BC6 3. /-S/ydb/core/kqp/ut/common/kqp_ut_common.h:375: AssertSuccessResult @ 0x13A3FAE2 4. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:365: CreateSampleTables @ 0x264F71F2 5. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:581: operator() @ 0x2651596C 6. /-S/library/cpp/threading/future/core/future-inl.h:493: SetValue @ 0x2651596C 7. /-S/library/cpp/threading/future/async.h:24: operator() @ 0x2651596C 8. /-S/util/thread/pool.h:71: Process @ 0x2651596C 9. /-S/util/thread/pool.cpp:418: DoExecute @ 0x13DE06B9 10. /-S/util/thread/factory.h:15: Execute @ 0x13DDF0A9 11. /-S/util/thread/factory.cpp:36: ThreadProc @ 0x13DDF0A9 12. /-S/util/system/thread.cpp:244: ThreadProxy @ 0x13DDA51C 13. ??:0: ?? @ 0x7FC0854EDAC2 14. ??:0: ?? @ 0x7FC08557F84F Trying to start YDB, gRPC: 26076, MsgBus: 15487 2025-05-29T15:30:08.189412Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509890370615462846:2061];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:30:08.189441Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/0012fd/r3tmp/tmpq33DgF/pdisk_1.dat 2025-05-29T15:30:08.238451Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [1:7509890370615462827:2079] 1748532608189307 != 1748532608189310 2025-05-29T15:30:08.245113Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 26076, node 1 2025-05-29 ... 3DD8F65 1. /-S/util/system/yassert.cpp:55: Panic @ 0x13DCFF66 2. /tmp//-S/library/cpp/testing/unittest/registar.cpp:36: RaiseError @ 0x13F70BC6 3. /-S/ydb/core/kqp/ut/common/kqp_ut_common.h:375: AssertSuccessResult @ 0x13A3FAE2 4. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:365: CreateSampleTables @ 0x264F71F2 5. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:581: operator() @ 0x2651596C 6. /-S/library/cpp/threading/future/core/future-inl.h:493: SetValue @ 0x2651596C 7. /-S/library/cpp/threading/future/async.h:24: operator() @ 0x2651596C 8. /-S/util/thread/pool.h:71: Process @ 0x2651596C 9. /-S/util/thread/pool.cpp:418: DoExecute @ 0x13DE06B9 10. /-S/util/thread/factory.h:15: Execute @ 0x13DDF0A9 11. /-S/util/thread/factory.cpp:36: ThreadProc @ 0x13DDF0A9 12. /-S/util/system/thread.cpp:244: ThreadProxy @ 0x13DDA51C 13. ??:0: ?? @ 0x7FEBA6502AC2 14. ??:0: ?? @ 0x7FEBA659484F Trying to start YDB, gRPC: 25748, MsgBus: 13636 2025-05-29T15:30:16.854940Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509890404475898273:2064];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:30:16.854969Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/0012fd/r3tmp/tmpIAE5wC/pdisk_1.dat 2025-05-29T15:30:16.913009Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:30:16.913073Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [1:7509890404475898251:2079] 1748532616854813 != 1748532616854816 TServer::EnableGrpc on GrpcPort 25748, node 1 2025-05-29T15:30:16.922923Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:30:16.922936Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-05-29T15:30:16.922938Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-05-29T15:30:16.922979Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:13636 TClient is connected to server localhost:13636 2025-05-29T15:30:16.957375Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:30:16.957419Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting WaitRootIsUp 'Root'... TClient::Ls request: Root 2025-05-29T15:30:16.958471Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-29T15:30:16.969962Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:30:16.979366Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:30:16.997053Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:30:17.056320Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:30:17.067452Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:30:17.272464Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890408770867194:2403], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:30:17.272493Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:30:17.320333Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-05-29T15:30:17.327029Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-05-29T15:30:17.335922Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-05-29T15:30:17.342439Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-05-29T15:30:17.349818Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-05-29T15:30:17.357127Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-05-29T15:30:17.371257Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-05-29T15:30:17.428174Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890408770867850:2466], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:30:17.428200Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890408770867855:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:30:17.428205Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:30:17.428866Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710669:3, at schemeshard: 72057594046644480 2025-05-29T15:30:17.433651Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7509890408770867857:2470], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710669 completed, doublechecking } 2025-05-29T15:30:17.530678Z node 1 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [1:7509890408770867908:3396] txid# 281474976710670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-29T15:30:17.659584Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [1:7509890408770867925:2475], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:30:17.659695Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=1&id=Y2RiZDgyODgtOGExZWIwNjYtNjAyMmFiZjgtNjJlNTQwYTA=, ActorId: [1:7509890408770867167:2400], ActorState: ExecuteState, TraceId: 01jweamr6k3k3b5ctbxx3kny62, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: VERIFY failed (2025-05-29T15:30:17.660398Z): assertion failed in non-unittest thread with message: assertion failed at ydb/core/kqp/ut/common/kqp_ut_common.h:375, void NKikimr::NKqp::AssertSuccessResult(const NYdb::TStatus &): (result.IsSuccess())
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 library/cpp/testing/unittest/registar.cpp:36 RaiseError(): requirement UnittestThread failed 0. /-S/util/system/yassert.cpp:83: InternalPanicImpl @ 0x13DD8F65 1. /-S/util/system/yassert.cpp:55: Panic @ 0x13DCFF66 2. /tmp//-S/library/cpp/testing/unittest/registar.cpp:36: RaiseError @ 0x13F70BC6 3. /-S/ydb/core/kqp/ut/common/kqp_ut_common.h:375: AssertSuccessResult @ 0x13A3FAE2 4. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:365: CreateSampleTables @ 0x264F71F2 5. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:581: operator() @ 0x2651596C 6. /-S/library/cpp/threading/future/core/future-inl.h:493: SetValue @ 0x2651596C 7. /-S/library/cpp/threading/future/async.h:24: operator() @ 0x2651596C 8. /-S/util/thread/pool.h:71: Process @ 0x2651596C 9. /-S/util/thread/pool.cpp:418: DoExecute @ 0x13DE06B9 10. /-S/util/thread/factory.h:15: Execute @ 0x13DDF0A9 11. /-S/util/thread/factory.cpp:36: ThreadProc @ 0x13DDF0A9 12. /-S/util/system/thread.cpp:244: ThreadProxy @ 0x13DDA51C 13. ??:0: ?? @ 0x7FCEC3DE8AC2 14. ??:0: ?? @ 0x7FCEC3E7A84F ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/vdisk/syncer/ut/unittest >> TSyncBrokerTests::ShouldReleaseToken [GOOD] Test command err: 2025-05-29T15:30:20.996681Z node 1 :BS_SYNCER DEBUG: blobstorage_syncer_broker.cpp:64: TEvQuerySyncToken, VDisk actor id: [0:1:1], actor id: [1:5:2052], token sent, active: 1, waiting: 0 2025-05-29T15:30:21.058479Z node 2 :BS_SYNCER DEBUG: blobstorage_syncer_broker.cpp:64: TEvQuerySyncToken, VDisk actor id: [0:1:1], actor id: [2:5:2052], token sent, active: 1, waiting: 0 2025-05-29T15:30:21.058528Z node 2 :BS_SYNCER DEBUG: blobstorage_syncer_broker.cpp:123: TEvReleaseSyncToken, VDisk actor id: [0:1:1], actor id: [2:5:2052], token released, active: 1, waiting: 0 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/query/unittest >> KqpQuery::CreateAsSelectPath+UseTablePathPrefix [FAIL] Test command err: Trying to start YDB, gRPC: 15961, MsgBus: 5460 2025-05-29T15:30:11.808973Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509890382708087221:2200];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:30:11.809118Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/0012be/r3tmp/tmpOtog74/pdisk_1.dat 2025-05-29T15:30:11.862887Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [1:7509890382708087060:2079] 1748532611806849 != 1748532611806852 2025-05-29T15:30:11.866356Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 15961, node 1 2025-05-29T15:30:11.879168Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:30:11.879184Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-05-29T15:30:11.879186Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-05-29T15:30:11.879247Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:5460 TClient is connected to server localhost:5460 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-29T15:30:11.936905Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:30:11.936932Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:30:11.937618Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-05-29T15:30:11.937966Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected waiting... 2025-05-29T15:30:11.949988Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:30:12.013310Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:30:12.035540Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:30:12.057217Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:30:12.190935Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890387003055992:2403], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:30:12.190990Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:30:12.230945Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-05-29T15:30:12.239108Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-05-29T15:30:12.247985Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-05-29T15:30:12.261521Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-05-29T15:30:12.275288Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-05-29T15:30:12.282385Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-05-29T15:30:12.296384Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-05-29T15:30:12.313128Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890387003056644:2466], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:30:12.313147Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890387003056649:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:30:12.313153Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:30:12.313970Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715669:3, at schemeshard: 72057594046644480 2025-05-29T15:30:12.316130Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7509890387003056651:2470], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715669 completed, doublechecking } 2025-05-29T15:30:12.368496Z node 1 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [1:7509890387003056702:3399] txid# 281474976715670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-29T15:30:12.458455Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [1:7509890387003056718:2474], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:30:12.458568Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=1&id=ZmVlYTBmMDEtOGM5ZGRkNzktZDdkMzY4N2UtYzc1ZjBmZmM=, ActorId: [1:7509890387003055974:2401], ActorState: ExecuteState, TraceId: 01jweamk6r2dsh4ysznxqvyhze, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: VERIFY failed (2025-05-29T15:30:12.459409Z): assertion failed in non-unittest thread with message: assertion failed at ydb/core/kqp/ut/common/kqp_ut_common.h:375, void NKikimr::NKqp::AssertSuccessResult(const NYdb::TStatus &): (result.IsSuccess())
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 library/cpp/testing/unittest/registar.cpp:36 RaiseError(): requirement UnittestThread failed 0. /-S/util/system/yassert.cpp:83: InternalPanicImpl @ 0x13DD8F65 1. /-S/util/system/yassert.cpp:55: Panic @ 0x13DCFF66 2. /tmp//-S/library/cpp/testing/unittest/registar.cpp:36: RaiseError @ 0x13F70BC6 3. /-S/ydb/core/kqp/ut/common/kqp_ut_common.h:375: AssertSuccessResult @ 0x13A3FAE2 4. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:365: CreateSampleTables @ 0x264F71F2 5. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:581: operator() @ 0x2651596C 6. /-S/library/cpp/threading/future/core/future-inl.h:493: SetValue @ 0x2651596C 7. /-S/library/cpp/threading/future/async.h:24: operator() @ 0x2651596C 8. /-S/util/thread/pool.h:71: Process @ 0x2651596C 9. /-S/util/thread/pool.cpp:418: DoExecute @ 0x13DE06B9 10. /-S/util/thread/factory.h:15: Execute @ 0x13DDF0A9 11. /-S/util/thread/factory.cpp:36: ThreadProc @ 0x13DDF0A9 12. /-S/util/system/thread.cpp:244: ThreadProxy @ 0x13DDA51C 13. ??:0: ?? @ 0x7F689370DAC2 14. ??:0: ?? @ 0x7F689379F84F Trying to start YDB, gRPC: 18144, MsgBus: 14379 2025-05-29T15:30:15.983577Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509890398431552541:2061];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:30:15.983622Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/0012be/r3tmp/tmpihRJRT/pdisk_1.dat 2025-05-29T15:30:16.034103Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:30:16.034157Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [1:7509890398431552522:2079] 1748532615983466 != 1748532615983469 TServer::EnableGrpc on GrpcPort 18144, node 1 2025-05-29T15 ... 225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7509890405570901465:2330], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:30:16.998778Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7509890405570901440:2327], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:30:16.998796Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:30:16.999371Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2025-05-29T15:30:17.000776Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7509890405570901469:2331], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-05-29T15:30:17.087603Z node 2 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [2:7509890409865868816:2324] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-29T15:30:17.093799Z node 2 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [2:7509890409865868837:2337], status: BAD_REQUEST, issues:
: Error: Type annotation, code: 1030
:6:62: Error: At function: KiCreateTable!
:6:20: Error: Invalid type for column: Value. Only YQL data types and PG types are currently supported, code: 2031 2025-05-29T15:30:17.093851Z node 2 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=2&id=MTBhY2U2YjUtMjE4ZWNiYjgtYzkxNDgyZjUtZTYyYjk5Ng==, ActorId: [2:7509890405570901438:2326], ActorState: ExecuteState, TraceId: 01jweamqjj1dh583kqaw2z9kvm, ReplyQueryCompileError, status BAD_REQUEST remove tx with tx_id: 2025-05-29T15:30:17.099467Z node 2 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [2:7509890409865868861:2346], status: BAD_REQUEST, issues:
: Error: Type annotation, code: 1030
:6:45: Error: At function: KiCreateTable!
:6:20: Error: Invalid type for column: Value. Only YQL data types and PG types are currently supported, code: 2031 2025-05-29T15:30:17.099529Z node 2 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=2&id=Y2ZmMWRkMjItOTgwNWIzYTYtN2JiNzFlZTAtNjliNmQ4NjU=, ActorId: [2:7509890409865868850:2340], ActorState: ExecuteState, TraceId: 01jweamqw64jmxfef90j3542hz, ReplyQueryCompileError, status BAD_REQUEST remove tx with tx_id: 2025-05-29T15:30:17.104555Z node 2 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [2:7509890409865868880:2352], status: BAD_REQUEST, issues:
: Error: Type annotation, code: 1030
:6:43: Error: At function: KiCreateTable!
:6:20: Error: Invalid type for column: Value. Only YQL data types and PG types are currently supported, code: 2031 2025-05-29T15:30:17.104607Z node 2 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=2&id=OWQ4NTdiNGEtZDdkNTYzODUtZDRkODk4NTctYjViYWIyYw==, ActorId: [2:7509890409865868874:2349], ActorState: ExecuteState, TraceId: 01jweamqwcfm4hesxaefekvazm, ReplyQueryCompileError, status BAD_REQUEST remove tx with tx_id: Trying to start YDB, gRPC: 11525, MsgBus: 21128 2025-05-29T15:30:17.396839Z node 3 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7509890407240622438:2060];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:30:17.396858Z node 3 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/0012be/r3tmp/tmpPULfIz/pdisk_1.dat 2025-05-29T15:30:17.413126Z node 3 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:30:17.413388Z node 3 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [3:7509890407240622420:2079] 1748532617396755 != 1748532617396758 TServer::EnableGrpc on GrpcPort 11525, node 3 2025-05-29T15:30:17.422412Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:30:17.422427Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-05-29T15:30:17.422429Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-05-29T15:30:17.422479Z node 3 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:21128 TClient is connected to server localhost:21128 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-29T15:30:17.501638Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:30:17.501664Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:30:17.501964Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:30:17.502601Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-29T15:30:17.507191Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-05-29T15:30:17.517862Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715660:0, at schemeshard: 72057594046644480 2025-05-29T15:30:17.720850Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7509890407240623124:2334], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:30:17.720870Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7509890407240623133:2337], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:30:17.720876Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:30:17.721561Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715661:3, at schemeshard: 72057594046644480 2025-05-29T15:30:17.723314Z node 3 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7509890407240623138:2338], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715661 completed, doublechecking } 2025-05-29T15:30:17.787394Z node 3 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [3:7509890407240623189:2352] txid# 281474976715662, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-29T15:30:17.793667Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-05-29T15:30:17.863329Z node 3 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [3:7509890407240623310:2354], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:30:17.863447Z node 3 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=3&id=Mjk1MTg0NjctYmFjOGU5My0xOGQxODE3OC0yNWExMTk2Yg==, ActorId: [3:7509890407240623303:2350], ActorState: ExecuteState, TraceId: 01jweamrkyd7tyhp1vv1d9qbfr, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: assertion failed at ydb/core/kqp/ut/query/kqp_query_ut.cpp:2558, virtual void NKikimr::NKqp::NTestSuiteKqpQuery::TTestCaseCreateAsSelectPath::Execute_(NUnitTest::TTestContext &) [UseTablePathPrefix = true]: (prepareResult.IsSuccess())
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 0. /-S/util/system/backtrace.cpp:284: ?? @ 0x13DBA79B 1. /tmp//-S/library/cpp/testing/unittest/registar.cpp:46: RaiseError @ 0x13F70AD8 2. /tmp//-S/ydb/core/kqp/ut/query/kqp_query_ut.cpp:2558: Execute_ @ 0x13C3D2C8 3. /tmp//-S/ydb/core/kqp/ut/query/kqp_query_ut.cpp:38: operator() @ 0x13BFC896 4. /tmp//-S/library/cpp/testing/unittest/registar.cpp:373: Run @ 0x13F7298D 5. /tmp//-S/ydb/core/kqp/ut/query/kqp_query_ut.cpp:38: Execute @ 0x13BFC255 6. /tmp//-S/library/cpp/testing/unittest/registar.cpp:494: Execute @ 0x13F73102 7. /tmp//-S/library/cpp/testing/unittest/utmain.cpp:872: RunMain @ 0x13F84CAC 8. ??:0: ?? @ 0x7F7FD8B0AD8F 9. ??:0: ?? @ 0x7F7FD8B0AE3F 10. ??:0: ?? @ 0x12A6F028 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_index/unittest >> TAsyncIndexTests::Decimal [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:116:2058] recipient: [1:111:2141] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:116:2058] recipient: [1:111:2141] Leader for TabletID 72057594046678944 is [1:128:2152] sender: [1:132:2058] recipient: [1:111:2141] 2025-05-29T15:30:20.608358Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7488: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-05-29T15:30:20.608385Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7516: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:30:20.608390Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7402: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-05-29T15:30:20.608395Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7418: OperationsProcessing config: using default configuration 2025-05-29T15:30:20.608410Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-05-29T15:30:20.608415Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-05-29T15:30:20.608423Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7548: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:30:20.608436Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:39: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-05-29T15:30:20.608546Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7619: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-05-29T15:30:20.608613Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-05-29T15:30:20.623097Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7309: Cannot subscribe to console configs 2025-05-29T15:30:20.623123Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:30:20.625723Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-05-29T15:30:20.625832Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-05-29T15:30:20.625867Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-05-29T15:30:20.627863Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-05-29T15:30:20.628059Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:445: Clear TempDirsState with owners number: 0 2025-05-29T15:30:20.628189Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1348: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-05-29T15:30:20.628239Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:32: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-05-29T15:30:20.628744Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:157: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:30:20.628784Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:84: [RootDataErasureManager] Stop 2025-05-29T15:30:20.629033Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:30:20.629046Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:30:20.629068Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-05-29T15:30:20.629076Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-29T15:30:20.629083Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:202: TTxServerlessStorageBilling.Complete 2025-05-29T15:30:20.629117Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6671: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-05-29T15:30:20.630594Z node 1 :HIVE INFO: tablet_helpers.cpp:1130: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:128:2152] sender: [1:242:2058] recipient: [1:15:2062] 2025-05-29T15:30:20.652049Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:372: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-05-29T15:30:20.652129Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:30:20.652187Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-05-29T15:30:20.652227Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:130: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-05-29T15:30:20.652239Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:30:20.653001Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:451: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-05-29T15:30:20.653028Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-05-29T15:30:20.653077Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:30:20.653087Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:313: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-05-29T15:30:20.653092Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:367: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-05-29T15:30:20.653098Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 2 -> 3 2025-05-29T15:30:20.653527Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:30:20.653541Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-05-29T15:30:20.653547Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 3 -> 128 2025-05-29T15:30:20.653948Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:30:20.653964Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:30:20.653970Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:30:20.653977Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1650: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-05-29T15:30:20.654685Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1719: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-05-29T15:30:20.655175Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:652: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-05-29T15:30:20.655217Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1751: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-05-29T15:30:20.655407Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:676: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-05-29T15:30:20.655435Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:680: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 124 RawX2: 4294969445 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-05-29T15:30:20.655443Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:30:20.655508Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 128 -> 240 2025-05-29T15:30:20.655517Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:30:20.655551Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-05-29T15:30:20.655564Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:402: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-05-29T15:30:20.656053Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:30:20.656064Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-29T15:30:20.656114Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29 ... 1009: NTableState::TProposedWaitParts operationId# 101:0 HandleReply TEvSchemaChanged at tablet: 72057594046678944 message: Source { RawX1: 333 RawX2: 4294969610 } Origin: 72075186233409547 State: 2 TxId: 101 Step: 0 Generation: 2 2025-05-29T15:30:20.803438Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:655: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 101:0, shardIdx: 72057594046678944:1, datashard: 72075186233409547, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2025-05-29T15:30:20.803442Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:674: all shard schema changes has been received, operationId: 101:0, at schemeshard: 72057594046678944 2025-05-29T15:30:20.803446Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:686: send schema changes ack message, operation: 101:0, datashard: 72075186233409547, at schemeshard: 72057594046678944 2025-05-29T15:30:20.803450Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 101:0 129 -> 240 2025-05-29T15:30:20.804360Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2025-05-29T15:30:20.804384Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2025-05-29T15:30:20.804400Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2025-05-29T15:30:20.805205Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2025-05-29T15:30:20.805243Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:647: TTxOperationReply complete, operationId: 101:2, at schemeshard: 72057594046678944 2025-05-29T15:30:20.805268Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:647: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2025-05-29T15:30:20.805317Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:647: TTxOperationReply complete, operationId: 101:2, at schemeshard: 72057594046678944 2025-05-29T15:30:20.805369Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:647: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2025-05-29T15:30:20.805475Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 101:2, at schemeshard: 72057594046678944 2025-05-29T15:30:20.805483Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:482: [72057594046678944] TDone opId# 101:2 ProgressState 2025-05-29T15:30:20.805493Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:906: Part operation is done id#101:2 progress is 2/3 2025-05-29T15:30:20.805497Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 101 ready parts: 2/3 2025-05-29T15:30:20.805501Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:906: Part operation is done id#101:2 progress is 2/3 2025-05-29T15:30:20.805505Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 101 ready parts: 2/3 2025-05-29T15:30:20.805509Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1606: TOperation IsReadyToNotify, TxId: 101, ready parts: 2/3, is published: true 2025-05-29T15:30:20.805569Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 101:0, at schemeshard: 72057594046678944 2025-05-29T15:30:20.805574Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:482: [72057594046678944] TDone opId# 101:0 ProgressState 2025-05-29T15:30:20.805581Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:906: Part operation is done id#101:0 progress is 3/3 2025-05-29T15:30:20.805585Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 101 ready parts: 3/3 2025-05-29T15:30:20.805589Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:906: Part operation is done id#101:0 progress is 3/3 2025-05-29T15:30:20.805593Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 101 ready parts: 3/3 2025-05-29T15:30:20.805597Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1606: TOperation IsReadyToNotify, TxId: 101, ready parts: 3/3, is published: true 2025-05-29T15:30:20.805609Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1629: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:384:2351] message: TxId: 101 2025-05-29T15:30:20.805614Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 101 ready parts: 3/3 2025-05-29T15:30:20.805623Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:973: Operation and all the parts is done, operation id: 101:0 2025-05-29T15:30:20.805626Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5174: RemoveTx for txid 101:0 2025-05-29T15:30:20.805646Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 2025-05-29T15:30:20.805651Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:973: Operation and all the parts is done, operation id: 101:1 2025-05-29T15:30:20.805654Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5174: RemoveTx for txid 101:1 2025-05-29T15:30:20.805659Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 3 2025-05-29T15:30:20.805663Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:973: Operation and all the parts is done, operation id: 101:2 2025-05-29T15:30:20.805666Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5174: RemoveTx for txid 101:2 2025-05-29T15:30:20.805672Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 3 2025-05-29T15:30:20.806384Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:227: tests -- TTxNotificationSubscriber for txId 101: got EvNotifyTxCompletionResult 2025-05-29T15:30:20.806396Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:236: tests -- TTxNotificationSubscriber for txId 101: satisfy waiter [1:385:2352] TestWaitNotification: OK eventTxId 101 2025-05-29T15:30:20.806500Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Table/UserDefinedIndex" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: true }, at schemeshard: 72057594046678944 2025-05-29T15:30:20.806540Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/Table/UserDefinedIndex" took 48us result status StatusSuccess 2025-05-29T15:30:20.806713Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Table/UserDefinedIndex" PathDescription { Self { Name: "UserDefinedIndex" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeTableIndex CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableIndexVersion: 1 } ChildrenExist: true } Children { Name: "indexImplTable" PathId: 4 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 3 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" PathSubType: EPathSubTypeAsyncIndexImplTable Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 3 PathsLimit: 10000 ShardsInside: 2 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } TableIndex { Name: "UserDefinedIndex" LocalPathId: 3 Type: EIndexTypeGlobalAsync State: EIndexStateReady KeyColumnNames: "indexed" SchemaVersion: 1 PathOwnerId: 72057594046678944 DataSize: 0 IndexImplTableDescriptions { PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { SizeToSplit: 2147483648 MinPartitionsCount: 1 } } } } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> TAsyncIndexTests::DropTableWithInflightChanges[TabletReboots] >> KqpLimits::QSReplySize-useSink |73.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_index/unittest >> KqpExplain::FewEffects-UseSink >> TSchemeShardSplitBySizeTest::SplitShardsWithPgKey [GOOD] >> KqpStats::SelfJoin >> KqpStats::OneShardLocalExec+UseSink ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/query/unittest >> KqpLimits::WaitCAsTimeout Test command err: Trying to start YDB, gRPC: 63118, MsgBus: 26964 2025-05-29T15:29:59.553458Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509890331493312567:2061];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:29:59.553489Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/001385/r3tmp/tmpyB54E2/pdisk_1.dat 2025-05-29T15:29:59.611334Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:29:59.611418Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [1:7509890331493312548:2079] 1748532599553347 != 1748532599553350 TServer::EnableGrpc on GrpcPort 63118, node 1 2025-05-29T15:29:59.623583Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:29:59.623598Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-05-29T15:29:59.623600Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-05-29T15:29:59.623644Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:26964 2025-05-29T15:29:59.656288Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:29:59.656322Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:29:59.657350Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:26964 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-29T15:29:59.685828Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:29:59.695430Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:29:59.711426Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:29:59.735031Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:29:59.746587Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:29:59.895361Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890331493314204:2403], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:29:59.895397Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:29:59.937125Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-05-29T15:29:59.944563Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-05-29T15:29:59.955651Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-05-29T15:29:59.969338Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-05-29T15:30:00.024958Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-05-29T15:30:00.033815Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-05-29T15:30:00.047084Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-05-29T15:30:00.066929Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890335788282157:2466], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:30:00.066960Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:30:00.067027Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890335788282162:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:30:00.068093Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715669:3, at schemeshard: 72057594046644480 2025-05-29T15:30:00.073552Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7509890335788282164:2470], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715669 completed, doublechecking } 2025-05-29T15:30:00.137896Z node 1 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [1:7509890335788282215:3398] txid# 281474976715670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-29T15:30:00.235496Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [1:7509890335788282231:2474], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:30:00.235598Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=1&id=ZjU1MjgwZjUtOTVhN2VmNjYtM2VmMjk4MTgtYzgyZWZiNzA=, ActorId: [1:7509890331493314186:2401], ActorState: ExecuteState, TraceId: 01jweam782bas3y6tfhktcbm0s, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: VERIFY failed (2025-05-29T15:30:00.236219Z): assertion failed in non-unittest thread with message: assertion failed at ydb/core/kqp/ut/common/kqp_ut_common.h:375, void NKikimr::NKqp::AssertSuccessResult(const NYdb::TStatus &): (result.IsSuccess())
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 library/cpp/testing/unittest/registar.cpp:36 RaiseError(): requirement UnittestThread failed 0. /-S/util/system/yassert.cpp:83: InternalPanicImpl @ 0x13DD8F65 1. /-S/util/system/yassert.cpp:55: Panic @ 0x13DCFF66 2. /tmp//-S/library/cpp/testing/unittest/registar.cpp:36: RaiseError @ 0x13F70BC6 3. /-S/ydb/core/kqp/ut/common/kqp_ut_common.h:375: AssertSuccessResult @ 0x13A3FAE2 4. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:365: CreateSampleTables @ 0x264F71F2 5. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:581: operator() @ 0x2651596C 6. /-S/library/cpp/threading/future/core/future-inl.h:493: SetValue @ 0x2651596C 7. /-S/library/cpp/threading/future/async.h:24: operator() @ 0x2651596C 8. /-S/util/thread/pool.h:71: Process @ 0x2651596C 9. /-S/util/thread/pool.cpp:418: DoExecute @ 0x13DE06B9 10. /-S/util/thread/factory.h:15: Execute @ 0x13DDF0A9 11. /-S/util/thread/factory.cpp:36: ThreadProc @ 0x13DDF0A9 12. /-S/util/system/thread.cpp:244: ThreadProxy @ 0x13DDA51C 13. ??:0: ?? @ 0x7FFA55EB3AC2 14. ??:0: ?? @ 0x7FFA55F4584F Trying to start YDB, gRPC: 63566, MsgBus: 13415 2025-05-29T15:30:04.109294Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509890352360447348:2065];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:30:04.109319Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/001385/r3tmp/tmpLr79aV/pdisk_1.dat 2025-05-29T15:30:04.161641Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [1:7509890352360447324:2079] 1748532604109090 != 1748532604109093 2025-05-29T15:30:04.162673Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 63566, node 1 2025-05-29 ... eTables @ 0x264F71F2 5. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:581: operator() @ 0x2651596C 6. /-S/library/cpp/threading/future/core/future-inl.h:493: SetValue @ 0x2651596C 7. /-S/library/cpp/threading/future/async.h:24: operator() @ 0x2651596C 8. /-S/util/thread/pool.h:71: Process @ 0x2651596C 9. /-S/util/thread/pool.cpp:418: DoExecute @ 0x13DE06B9 10. /-S/util/thread/factory.h:15: Execute @ 0x13DDF0A9 11. /-S/util/thread/factory.cpp:36: ThreadProc @ 0x13DDF0A9 12. /-S/util/system/thread.cpp:244: ThreadProxy @ 0x13DDA51C 13. ??:0: ?? @ 0x7F0053C29AC2 14. ??:0: ?? @ 0x7F0053CBB84F Trying to start YDB, gRPC: 20894, MsgBus: 5747 2025-05-29T15:30:15.459158Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:102:2148], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-05-29T15:30:15.459195Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-05-29T15:30:15.459209Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/001385/r3tmp/tmpk9WAVo/pdisk_1.dat TServer::EnableGrpc on GrpcPort 20894, node 1 TClient is connected to server localhost:5747 TClient is connected to server localhost:5747 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-29T15:30:15.623791Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:30:15.623816Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-05-29T15:30:15.623821Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-05-29T15:30:15.623947Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-05-29T15:30:15.624031Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [1:32:2079] 1748532615106215 != 1748532615106219 2025-05-29T15:30:15.686246Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:30:15.686287Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:30:15.686988Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:30:15.689115Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-29T15:30:15.772855Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:30:15.951812Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:30:16.225348Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:30:16.445034Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:30:16.752845Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:1721:3316], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:30:16.752910Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:30:16.756758Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-05-29T15:30:16.939486Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-05-29T15:30:17.162949Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-05-29T15:30:17.365690Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-05-29T15:30:17.581090Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-05-29T15:30:17.787809Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-05-29T15:30:18.048350Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-05-29T15:30:18.279638Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2392:3811], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:30:18.279684Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:30:18.279745Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2397:3816], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:30:18.280706Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715669:3, at schemeshard: 72057594046644480 2025-05-29T15:30:18.428924Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:2399:3818], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715669 completed, doublechecking } 2025-05-29T15:30:18.470888Z node 1 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [1:2457:3857] txid# 281474976715670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-29T15:30:18.548142Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [1:2467:3866], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:30:18.549665Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=1&id=Y2IyYTJiNGYtMWZmYmQ3ZGUtMTkwMTc5YzQtYzAzMzczYjY=, ActorId: [1:1707:3303], ActorState: ExecuteState, TraceId: 01jweams1711hq958bxhxrn4xp, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: VERIFY failed (2025-05-29T15:30:18.550337Z): assertion failed in non-unittest thread with message: assertion failed at ydb/core/kqp/ut/common/kqp_ut_common.h:375, void NKikimr::NKqp::AssertSuccessResult(const NYdb::TStatus &): (result.IsSuccess())
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 library/cpp/testing/unittest/registar.cpp:36 RaiseError(): requirement UnittestThread failed 2025-05-29T15:30:19.996118Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7301: Cannot get console configs 2025-05-29T15:30:19.996174Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 0. /-S/util/system/yassert.cpp:83: InternalPanicImpl @ 0x13DD8F65 1. /-S/util/system/yassert.cpp:55: Panic @ 0x13DCFF66 2. /tmp//-S/library/cpp/testing/unittest/registar.cpp:36: RaiseError @ 0x13F70BC6 3. /-S/ydb/core/kqp/ut/common/kqp_ut_common.h:375: AssertSuccessResult @ 0x13A3FAE2 4. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:365: CreateSampleTables @ 0x264F71F2 5. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:581: operator() @ 0x2651596C 6. /-S/library/cpp/threading/future/core/future-inl.h:493: SetValue @ 0x2651596C 7. /-S/library/cpp/threading/future/async.h:24: operator() @ 0x2651596C 8. /-S/util/thread/pool.h:71: Process @ 0x2651596C 9. /-S/util/thread/pool.cpp:418: DoExecute @ 0x13DE06B9 10. /-S/util/thread/factory.h:15: Execute @ 0x13DDF0A9 11. /-S/util/thread/factory.cpp:36: ThreadProc @ 0x13DDF0A9 12. /-S/util/system/thread.cpp:244: ThreadProxy @ 0x13DDA51C 13. ??:0: ?? @ 0x7F8CA363BAC2 14. ??:0: ?? @ 0x7F8CA36CD84F |73.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_index/unittest >> KqpStats::DeferredEffects-UseSink >> KqpParams::ParameterTypes |73.1%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/locks/ut_range_treap/ydb-core-tx-locks-ut_range_treap |73.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/locks/ut_range_treap/ydb-core-tx-locks-ut_range_treap |73.1%| [LD] {RESULT} $(B)/ydb/core/tx/locks/ut_range_treap/ydb-core-tx-locks-ut_range_treap ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_split_merge/unittest >> TSchemeShardSplitBySizeTest::SplitShardsWithPgKey [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:116:2058] recipient: [1:111:2141] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:116:2058] recipient: [1:111:2141] Leader for TabletID 72057594046678944 is [1:128:2152] sender: [1:132:2058] recipient: [1:111:2141] 2025-05-29T15:29:32.250316Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7488: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-05-29T15:29:32.250345Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7516: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:29:32.250351Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7402: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-05-29T15:29:32.250356Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7418: OperationsProcessing config: using default configuration 2025-05-29T15:29:32.250371Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-05-29T15:29:32.250376Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-05-29T15:29:32.250384Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7548: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:29:32.250398Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:39: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-05-29T15:29:32.250516Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7619: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-05-29T15:29:32.250591Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-05-29T15:29:32.263985Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7309: Cannot subscribe to console configs 2025-05-29T15:29:32.264015Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:29:32.266707Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-05-29T15:29:32.266852Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-05-29T15:29:32.266895Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-05-29T15:29:32.268438Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-05-29T15:29:32.268603Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:445: Clear TempDirsState with owners number: 0 2025-05-29T15:29:32.268724Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1348: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-05-29T15:29:32.268784Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:32: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-05-29T15:29:32.269448Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:157: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:29:32.269511Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:84: [RootDataErasureManager] Stop 2025-05-29T15:29:32.269845Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:29:32.269857Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:29:32.269881Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-05-29T15:29:32.269892Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-29T15:29:32.269899Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:202: TTxServerlessStorageBilling.Complete 2025-05-29T15:29:32.269937Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6671: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-05-29T15:29:32.271542Z node 1 :HIVE INFO: tablet_helpers.cpp:1130: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:128:2152] sender: [1:242:2058] recipient: [1:15:2062] 2025-05-29T15:29:32.294198Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:372: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-05-29T15:29:32.294285Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:29:32.294350Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-05-29T15:29:32.294400Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:130: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-05-29T15:29:32.294412Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:29:32.295294Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:451: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-05-29T15:29:32.295326Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-05-29T15:29:32.295381Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:29:32.295392Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:313: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-05-29T15:29:32.295398Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:367: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-05-29T15:29:32.295404Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 2 -> 3 2025-05-29T15:29:32.295900Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:29:32.295913Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-05-29T15:29:32.295919Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 3 -> 128 2025-05-29T15:29:32.296309Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:29:32.296321Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:29:32.296327Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:29:32.296345Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1650: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-05-29T15:29:32.297026Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1719: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-05-29T15:29:32.297484Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:652: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-05-29T15:29:32.297537Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1751: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-05-29T15:29:32.297730Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:676: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-05-29T15:29:32.297756Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:680: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 124 RawX2: 4294969445 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-05-29T15:29:32.297763Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:29:32.297827Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 128 -> 240 2025-05-29T15:29:32.297834Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:29:32.297870Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-05-29T15:29:32.297881Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:402: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-05-29T15:29:32.298394Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:29:32.298404Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-29T15:29:32.298449Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29 ... 4 to tablet: 72075186233409583 cookie: 72057594046678944:38 msg type: 269553152 2025-05-29T15:30:21.350963Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:652: Send tablet strongly msg operationId: 281474976710675:0 from tablet: 72057594046678944 to tablet: 72075186233409584 cookie: 72057594046678944:39 msg type: 269553152 2025-05-29T15:30:21.350984Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1751: TOperation RegisterRelationByTabletId, TxId: 281474976710675, partId: 0, tablet: 72075186233409583 2025-05-29T15:30:21.350987Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1751: TOperation RegisterRelationByTabletId, TxId: 281474976710675, partId: 0, tablet: 72075186233409584 2025-05-29T15:30:21.351114Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:652: Send tablet strongly msg operationId: 281474976710676:0 from tablet: 72057594046678944 to tablet: 72075186233409585 cookie: 72057594046678944:40 msg type: 269553152 2025-05-29T15:30:21.351134Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:652: Send tablet strongly msg operationId: 281474976710676:0 from tablet: 72057594046678944 to tablet: 72075186233409586 cookie: 72057594046678944:41 msg type: 269553152 2025-05-29T15:30:21.351150Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1751: TOperation RegisterRelationByTabletId, TxId: 281474976710676, partId: 0, tablet: 72075186233409585 2025-05-29T15:30:21.351153Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1751: TOperation RegisterRelationByTabletId, TxId: 281474976710676, partId: 0, tablet: 72075186233409586 2025-05-29T15:30:21.351219Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:652: Send tablet strongly msg operationId: 281474976710677:0 from tablet: 72057594046678944 to tablet: 72075186233409587 cookie: 72057594046678944:42 msg type: 269553152 2025-05-29T15:30:21.351237Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:652: Send tablet strongly msg operationId: 281474976710677:0 from tablet: 72057594046678944 to tablet: 72075186233409588 cookie: 72057594046678944:43 msg type: 269553152 2025-05-29T15:30:21.351256Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1751: TOperation RegisterRelationByTabletId, TxId: 281474976710677, partId: 0, tablet: 72075186233409587 2025-05-29T15:30:21.351259Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1751: TOperation RegisterRelationByTabletId, TxId: 281474976710677, partId: 0, tablet: 72075186233409588 2025-05-29T15:30:21.351354Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:652: Send tablet strongly msg operationId: 281474976710678:0 from tablet: 72057594046678944 to tablet: 72075186233409589 cookie: 72057594046678944:44 msg type: 269553152 2025-05-29T15:30:21.351383Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:652: Send tablet strongly msg operationId: 281474976710678:0 from tablet: 72057594046678944 to tablet: 72075186233409590 cookie: 72057594046678944:45 msg type: 269553152 2025-05-29T15:30:21.351406Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1751: TOperation RegisterRelationByTabletId, TxId: 281474976710678, partId: 0, tablet: 72075186233409589 2025-05-29T15:30:21.351411Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1751: TOperation RegisterRelationByTabletId, TxId: 281474976710678, partId: 0, tablet: 72075186233409590 2025-05-29T15:30:21.351629Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:652: Send tablet strongly msg operationId: 281474976710679:0 from tablet: 72057594046678944 to tablet: 72075186233409591 cookie: 72057594046678944:46 msg type: 269553152 2025-05-29T15:30:21.351652Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:652: Send tablet strongly msg operationId: 281474976710679:0 from tablet: 72057594046678944 to tablet: 72075186233409592 cookie: 72057594046678944:47 msg type: 269553152 2025-05-29T15:30:21.351672Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1751: TOperation RegisterRelationByTabletId, TxId: 281474976710679, partId: 0, tablet: 72075186233409591 2025-05-29T15:30:21.351678Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1751: TOperation RegisterRelationByTabletId, TxId: 281474976710679, partId: 0, tablet: 72075186233409592 2025-05-29T15:30:21.351801Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:652: Send tablet strongly msg operationId: 281474976710680:0 from tablet: 72057594046678944 to tablet: 72075186233409593 cookie: 72057594046678944:48 msg type: 269553152 2025-05-29T15:30:21.351823Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:652: Send tablet strongly msg operationId: 281474976710680:0 from tablet: 72057594046678944 to tablet: 72075186233409594 cookie: 72057594046678944:49 msg type: 269553152 2025-05-29T15:30:21.351840Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1751: TOperation RegisterRelationByTabletId, TxId: 281474976710680, partId: 0, tablet: 72075186233409593 2025-05-29T15:30:21.351846Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1751: TOperation RegisterRelationByTabletId, TxId: 281474976710680, partId: 0, tablet: 72075186233409594 2025-05-29T15:30:21.351942Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:652: Send tablet strongly msg operationId: 281474976710681:0 from tablet: 72057594046678944 to tablet: 72075186233409595 cookie: 72057594046678944:50 msg type: 269553152 2025-05-29T15:30:21.351959Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:652: Send tablet strongly msg operationId: 281474976710681:0 from tablet: 72057594046678944 to tablet: 72075186233409596 cookie: 72057594046678944:51 msg type: 269553152 2025-05-29T15:30:21.351974Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1751: TOperation RegisterRelationByTabletId, TxId: 281474976710681, partId: 0, tablet: 72075186233409595 2025-05-29T15:30:21.351977Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1751: TOperation RegisterRelationByTabletId, TxId: 281474976710681, partId: 0, tablet: 72075186233409596 2025-05-29T15:30:21.352116Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:652: Send tablet strongly msg operationId: 281474976710682:0 from tablet: 72057594046678944 to tablet: 72075186233409597 cookie: 72057594046678944:52 msg type: 269553152 2025-05-29T15:30:21.352135Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:652: Send tablet strongly msg operationId: 281474976710682:0 from tablet: 72057594046678944 to tablet: 72075186233409598 cookie: 72057594046678944:53 msg type: 269553152 2025-05-29T15:30:21.352183Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1751: TOperation RegisterRelationByTabletId, TxId: 281474976710682, partId: 0, tablet: 72075186233409597 2025-05-29T15:30:21.352186Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1751: TOperation RegisterRelationByTabletId, TxId: 281474976710682, partId: 0, tablet: 72075186233409598 2025-05-29T15:30:21.352352Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:652: Send tablet strongly msg operationId: 281474976710683:0 from tablet: 72057594046678944 to tablet: 72075186233409599 cookie: 72057594046678944:54 msg type: 269553152 2025-05-29T15:30:21.352391Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:652: Send tablet strongly msg operationId: 281474976710683:0 from tablet: 72057594046678944 to tablet: 72075186233409600 cookie: 72057594046678944:55 msg type: 269553152 2025-05-29T15:30:21.352415Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1751: TOperation RegisterRelationByTabletId, TxId: 281474976710683, partId: 0, tablet: 72075186233409599 2025-05-29T15:30:21.352421Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1751: TOperation RegisterRelationByTabletId, TxId: 281474976710683, partId: 0, tablet: 72075186233409600 2025-05-29T15:30:21.355210Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:652: Send tablet strongly msg operationId: 281474976710684:0 from tablet: 72057594046678944 to tablet: 72075186233409601 cookie: 72057594046678944:56 msg type: 269553152 2025-05-29T15:30:21.355269Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:652: Send tablet strongly msg operationId: 281474976710684:0 from tablet: 72057594046678944 to tablet: 72075186233409602 cookie: 72057594046678944:57 msg type: 269553152 2025-05-29T15:30:21.355289Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1751: TOperation RegisterRelationByTabletId, TxId: 281474976710684, partId: 0, tablet: 72075186233409601 2025-05-29T15:30:21.355294Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1751: TOperation RegisterRelationByTabletId, TxId: 281474976710684, partId: 0, tablet: 72075186233409602 2025-05-29T15:30:21.355344Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:652: Send tablet strongly msg operationId: 281474976710685:0 from tablet: 72057594046678944 to tablet: 72075186233409603 cookie: 72057594046678944:58 msg type: 269553152 2025-05-29T15:30:21.355379Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:652: Send tablet strongly msg operationId: 281474976710685:0 from tablet: 72057594046678944 to tablet: 72075186233409604 cookie: 72057594046678944:59 msg type: 269553152 2025-05-29T15:30:21.355394Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1751: TOperation RegisterRelationByTabletId, TxId: 281474976710685, partId: 0, tablet: 72075186233409603 2025-05-29T15:30:21.355397Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1751: TOperation RegisterRelationByTabletId, TxId: 281474976710685, partId: 0, tablet: 72075186233409604 2025-05-29T15:30:21.355484Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:652: Send tablet strongly msg operationId: 281474976710686:0 from tablet: 72057594046678944 to tablet: 72075186233409605 cookie: 72057594046678944:60 msg type: 269553152 2025-05-29T15:30:21.355499Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:652: Send tablet strongly msg operationId: 281474976710686:0 from tablet: 72057594046678944 to tablet: 72075186233409606 cookie: 72057594046678944:61 msg type: 269553152 2025-05-29T15:30:21.355513Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1751: TOperation RegisterRelationByTabletId, TxId: 281474976710686, partId: 0, tablet: 72075186233409605 2025-05-29T15:30:21.355517Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1751: TOperation RegisterRelationByTabletId, TxId: 281474976710686, partId: 0, tablet: 72075186233409606 2025-05-29T15:30:21.355547Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:652: Send tablet strongly msg operationId: 281474976710687:0 from tablet: 72057594046678944 to tablet: 72075186233409607 cookie: 72057594046678944:62 msg type: 269553152 2025-05-29T15:30:21.355562Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:652: Send tablet strongly msg operationId: 281474976710687:0 from tablet: 72057594046678944 to tablet: 72075186233409608 cookie: 72057594046678944:63 msg type: 269553152 2025-05-29T15:30:21.355590Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1751: TOperation RegisterRelationByTabletId, TxId: 281474976710687, partId: 0, tablet: 72075186233409607 2025-05-29T15:30:21.355594Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1751: TOperation RegisterRelationByTabletId, TxId: 281474976710687, partId: 0, tablet: 72075186233409608 |73.1%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/tests/fq/pq_async_io/ut/ydb-tests-fq-pq_async_io-ut >> KqpStats::JoinStatsBasicYql-StreamLookupJoin |73.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/fq/pq_async_io/ut/ydb-tests-fq-pq_async_io-ut |73.2%| [LD] {RESULT} $(B)/ydb/tests/fq/pq_async_io/ut/ydb-tests-fq-pq_async_io-ut >> KqpExplain::UpdateSecondaryConditionalSecondaryKey+UseSink >> TAsyncIndexTests::SplitIndexWithReboots[TabletReboots] |73.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_index/unittest |73.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_index/unittest >> KqpQuery::ExecuteWriteQuery |73.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_index/unittest >> TAsyncIndexTests::CreateTable >> TSchemeShardSplitBySizeTest::Split10Shards >> TVectorIndexTests::CreateTable >> TAsyncIndexTests::CreateTable [GOOD] >> KqpQuery::UpdateWhereInSubquery >> TAsyncIndexTests::CdcAndMergeWithReboots[PipeResets] |73.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_index/unittest >> TAsyncIndexTests::CdcAndSplitWithReboots[PipeResets] >> KqpParams::Decimal-QueryService+UseSink ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_index/unittest >> TAsyncIndexTests::CreateTable [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:116:2058] recipient: [1:111:2141] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:116:2058] recipient: [1:111:2141] Leader for TabletID 72057594046678944 is [1:128:2152] sender: [1:132:2058] recipient: [1:111:2141] 2025-05-29T15:30:22.798782Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7488: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-05-29T15:30:22.798820Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7516: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:30:22.798827Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7402: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-05-29T15:30:22.798833Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7418: OperationsProcessing config: using default configuration 2025-05-29T15:30:22.798850Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-05-29T15:30:22.798854Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-05-29T15:30:22.798865Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7548: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:30:22.798880Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:39: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-05-29T15:30:22.798991Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7619: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-05-29T15:30:22.799070Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-05-29T15:30:22.814197Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7309: Cannot subscribe to console configs 2025-05-29T15:30:22.814221Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:30:22.817008Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-05-29T15:30:22.817131Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-05-29T15:30:22.817175Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-05-29T15:30:22.819200Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-05-29T15:30:22.819380Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:445: Clear TempDirsState with owners number: 0 2025-05-29T15:30:22.819516Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1348: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-05-29T15:30:22.819562Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:32: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-05-29T15:30:22.820099Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:157: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:30:22.820141Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:84: [RootDataErasureManager] Stop 2025-05-29T15:30:22.820402Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:30:22.820415Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:30:22.820434Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-05-29T15:30:22.820442Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-29T15:30:22.820448Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:202: TTxServerlessStorageBilling.Complete 2025-05-29T15:30:22.820483Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6671: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-05-29T15:30:22.821920Z node 1 :HIVE INFO: tablet_helpers.cpp:1130: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:128:2152] sender: [1:242:2058] recipient: [1:15:2062] 2025-05-29T15:30:22.845053Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:372: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-05-29T15:30:22.845152Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:30:22.845228Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-05-29T15:30:22.845277Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:130: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-05-29T15:30:22.845289Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:30:22.846091Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:451: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-05-29T15:30:22.846125Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-05-29T15:30:22.846180Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:30:22.846191Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:313: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-05-29T15:30:22.846197Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:367: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-05-29T15:30:22.846203Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 2 -> 3 2025-05-29T15:30:22.846708Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:30:22.846723Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-05-29T15:30:22.846728Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 3 -> 128 2025-05-29T15:30:22.847164Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:30:22.847180Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:30:22.847187Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:30:22.847196Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1650: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-05-29T15:30:22.847951Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1719: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-05-29T15:30:22.848392Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:652: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-05-29T15:30:22.848441Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1751: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-05-29T15:30:22.848623Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:676: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-05-29T15:30:22.848652Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:680: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 124 RawX2: 4294969445 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-05-29T15:30:22.848661Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:30:22.848735Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 128 -> 240 2025-05-29T15:30:22.848743Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:30:22.848780Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-05-29T15:30:22.848794Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:402: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-05-29T15:30:22.849291Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:30:22.849302Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-29T15:30:22.849354Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29 ... 1009: NTableState::TProposedWaitParts operationId# 101:0 HandleReply TEvSchemaChanged at tablet: 72057594046678944 message: Source { RawX1: 333 RawX2: 4294969610 } Origin: 72075186233409547 State: 2 TxId: 101 Step: 0 Generation: 2 2025-05-29T15:30:22.968555Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:655: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 101:0, shardIdx: 72057594046678944:1, datashard: 72075186233409547, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2025-05-29T15:30:22.968560Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:674: all shard schema changes has been received, operationId: 101:0, at schemeshard: 72057594046678944 2025-05-29T15:30:22.968564Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:686: send schema changes ack message, operation: 101:0, datashard: 72075186233409547, at schemeshard: 72057594046678944 2025-05-29T15:30:22.968569Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 101:0 129 -> 240 2025-05-29T15:30:22.969729Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2025-05-29T15:30:22.969757Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2025-05-29T15:30:22.969773Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2025-05-29T15:30:22.970766Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2025-05-29T15:30:22.970835Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:647: TTxOperationReply complete, operationId: 101:2, at schemeshard: 72057594046678944 2025-05-29T15:30:22.970884Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:647: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2025-05-29T15:30:22.970904Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:647: TTxOperationReply complete, operationId: 101:2, at schemeshard: 72057594046678944 2025-05-29T15:30:22.971041Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 101:2, at schemeshard: 72057594046678944 2025-05-29T15:30:22.971053Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:482: [72057594046678944] TDone opId# 101:2 ProgressState 2025-05-29T15:30:22.971070Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:906: Part operation is done id#101:2 progress is 2/3 2025-05-29T15:30:22.971077Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 101 ready parts: 2/3 2025-05-29T15:30:22.971083Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:906: Part operation is done id#101:2 progress is 2/3 2025-05-29T15:30:22.971087Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 101 ready parts: 2/3 2025-05-29T15:30:22.971093Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1606: TOperation IsReadyToNotify, TxId: 101, ready parts: 2/3, is published: true 2025-05-29T15:30:22.971180Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:647: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2025-05-29T15:30:22.971203Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 101:0, at schemeshard: 72057594046678944 2025-05-29T15:30:22.971209Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:482: [72057594046678944] TDone opId# 101:0 ProgressState 2025-05-29T15:30:22.971218Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:906: Part operation is done id#101:0 progress is 3/3 2025-05-29T15:30:22.971222Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 101 ready parts: 3/3 2025-05-29T15:30:22.971227Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:906: Part operation is done id#101:0 progress is 3/3 2025-05-29T15:30:22.971231Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 101 ready parts: 3/3 2025-05-29T15:30:22.971235Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1606: TOperation IsReadyToNotify, TxId: 101, ready parts: 3/3, is published: true 2025-05-29T15:30:22.971254Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1629: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:384:2351] message: TxId: 101 2025-05-29T15:30:22.971261Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 101 ready parts: 3/3 2025-05-29T15:30:22.971271Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:973: Operation and all the parts is done, operation id: 101:0 2025-05-29T15:30:22.971276Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5174: RemoveTx for txid 101:0 2025-05-29T15:30:22.971321Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 2025-05-29T15:30:22.971328Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:973: Operation and all the parts is done, operation id: 101:1 2025-05-29T15:30:22.971332Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5174: RemoveTx for txid 101:1 2025-05-29T15:30:22.971338Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 3 2025-05-29T15:30:22.971341Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:973: Operation and all the parts is done, operation id: 101:2 2025-05-29T15:30:22.971345Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5174: RemoveTx for txid 101:2 2025-05-29T15:30:22.971353Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 3 2025-05-29T15:30:22.975474Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:227: tests -- TTxNotificationSubscriber for txId 101: got EvNotifyTxCompletionResult 2025-05-29T15:30:22.975498Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:236: tests -- TTxNotificationSubscriber for txId 101: satisfy waiter [1:385:2352] TestWaitNotification: OK eventTxId 101 2025-05-29T15:30:22.975658Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Table/UserDefinedIndex" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: true }, at schemeshard: 72057594046678944 2025-05-29T15:30:22.975735Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/Table/UserDefinedIndex" took 93us result status StatusSuccess 2025-05-29T15:30:22.975974Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Table/UserDefinedIndex" PathDescription { Self { Name: "UserDefinedIndex" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeTableIndex CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableIndexVersion: 1 } ChildrenExist: true } Children { Name: "indexImplTable" PathId: 4 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 3 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" PathSubType: EPathSubTypeAsyncIndexImplTable Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 3 PathsLimit: 10000 ShardsInside: 2 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } TableIndex { Name: "UserDefinedIndex" LocalPathId: 3 Type: EIndexTypeGlobalAsync State: EIndexStateReady KeyColumnNames: "indexed" SchemaVersion: 1 PathOwnerId: 72057594046678944 DataSize: 0 IndexImplTableDescriptions { PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { SizeToSplit: 2147483648 MinPartitionsCount: 1 } } } } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> TVectorIndexTests::CreateTable [GOOD] >> KqpLimits::TooBigColumn-useSink |73.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_index/unittest >> KqpStats::DataQueryMulti ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_index/unittest >> TVectorIndexTests::CreateTable [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:116:2058] recipient: [1:111:2141] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:116:2058] recipient: [1:111:2141] Leader for TabletID 72057594046678944 is [1:128:2152] sender: [1:132:2058] recipient: [1:111:2141] 2025-05-29T15:30:23.233043Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7488: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-05-29T15:30:23.233080Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7516: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:30:23.233086Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7402: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-05-29T15:30:23.233093Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7418: OperationsProcessing config: using default configuration 2025-05-29T15:30:23.233109Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-05-29T15:30:23.233113Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-05-29T15:30:23.233123Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7548: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:30:23.233138Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:39: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-05-29T15:30:23.233261Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7619: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-05-29T15:30:23.233375Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-05-29T15:30:23.248307Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7309: Cannot subscribe to console configs 2025-05-29T15:30:23.248339Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:30:23.251428Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-05-29T15:30:23.251590Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-05-29T15:30:23.251640Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-05-29T15:30:23.253310Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-05-29T15:30:23.253471Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:445: Clear TempDirsState with owners number: 0 2025-05-29T15:30:23.253611Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1348: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-05-29T15:30:23.253671Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:32: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-05-29T15:30:23.254209Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:157: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:30:23.254268Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:84: [RootDataErasureManager] Stop 2025-05-29T15:30:23.254559Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:30:23.254572Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:30:23.254600Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-05-29T15:30:23.254609Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-29T15:30:23.254616Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:202: TTxServerlessStorageBilling.Complete 2025-05-29T15:30:23.254660Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6671: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-05-29T15:30:23.256344Z node 1 :HIVE INFO: tablet_helpers.cpp:1130: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:128:2152] sender: [1:242:2058] recipient: [1:15:2062] 2025-05-29T15:30:23.279672Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:372: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-05-29T15:30:23.279771Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:30:23.279888Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-05-29T15:30:23.279935Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:130: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-05-29T15:30:23.279946Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:30:23.280876Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:451: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-05-29T15:30:23.280910Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-05-29T15:30:23.280968Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:30:23.280979Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:313: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-05-29T15:30:23.280985Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:367: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-05-29T15:30:23.280992Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 2 -> 3 2025-05-29T15:30:23.281488Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:30:23.281500Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-05-29T15:30:23.281507Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 3 -> 128 2025-05-29T15:30:23.281855Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:30:23.281865Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:30:23.281872Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:30:23.281880Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1650: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-05-29T15:30:23.282664Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1719: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-05-29T15:30:23.283205Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:652: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-05-29T15:30:23.283254Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1751: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-05-29T15:30:23.283453Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:676: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-05-29T15:30:23.283481Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:680: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 124 RawX2: 4294969445 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-05-29T15:30:23.283490Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:30:23.283584Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 128 -> 240 2025-05-29T15:30:23.283592Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:30:23.283632Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-05-29T15:30:23.283645Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:402: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-05-29T15:30:23.284117Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:30:23.284127Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-29T15:30:23.284183Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29 ... chemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 102 2025-05-29T15:30:23.438296Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72057594046678944, txId: 102 2025-05-29T15:30:23.438319Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:5834: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 102 2025-05-29T15:30:23.438329Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 102 2025-05-29T15:30:23.438333Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72057594046678944, txId: 102 2025-05-29T15:30:23.438484Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:5834: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 102 2025-05-29T15:30:23.438495Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 102 2025-05-29T15:30:23.438499Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72057594046678944, txId: 102 2025-05-29T15:30:23.438504Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 4], version: 18446744073709551615 2025-05-29T15:30:23.438509Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 4 2025-05-29T15:30:23.438904Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:5834: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 5 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 102 2025-05-29T15:30:23.438921Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 5 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 102 2025-05-29T15:30:23.438926Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72057594046678944, txId: 102 2025-05-29T15:30:23.438931Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 5], version: 18446744073709551615 2025-05-29T15:30:23.438936Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 4 2025-05-29T15:30:23.438949Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1606: TOperation IsReadyToNotify, TxId: 102, ready parts: 1/4, is published: true 2025-05-29T15:30:23.439552Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 102:3, at schemeshard: 72057594046678944 2025-05-29T15:30:23.439564Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_drop_table.cpp:414: TDropTable TProposedDeletePart operationId: 102:3 ProgressState, at schemeshard: 72057594046678944 2025-05-29T15:30:23.439625Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove table for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 3 2025-05-29T15:30:23.439658Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:906: Part operation is done id#102:3 progress is 2/4 2025-05-29T15:30:23.439662Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 102 ready parts: 2/4 2025-05-29T15:30:23.439667Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:906: Part operation is done id#102:3 progress is 2/4 2025-05-29T15:30:23.439671Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 102 ready parts: 2/4 2025-05-29T15:30:23.439676Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1606: TOperation IsReadyToNotify, TxId: 102, ready parts: 2/4, is published: true 2025-05-29T15:30:23.439836Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2025-05-29T15:30:23.439847Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2025-05-29T15:30:23.439852Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2025-05-29T15:30:23.439864Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 102:2, at schemeshard: 72057594046678944 2025-05-29T15:30:23.439869Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_drop_table.cpp:414: TDropTable TProposedDeletePart operationId: 102:2 ProgressState, at schemeshard: 72057594046678944 2025-05-29T15:30:23.439897Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove table for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 3 2025-05-29T15:30:23.439914Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:906: Part operation is done id#102:2 progress is 3/4 2025-05-29T15:30:23.439918Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 102 ready parts: 3/4 2025-05-29T15:30:23.439925Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:906: Part operation is done id#102:2 progress is 3/4 2025-05-29T15:30:23.439929Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 102 ready parts: 3/4 2025-05-29T15:30:23.439934Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1606: TOperation IsReadyToNotify, TxId: 102, ready parts: 3/4, is published: true 2025-05-29T15:30:23.439968Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2025-05-29T15:30:23.439974Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2025-05-29T15:30:23.439980Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2025-05-29T15:30:23.439994Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2025-05-29T15:30:23.439999Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_drop_table.cpp:414: TDropTable TProposedDeletePart operationId: 102:0 ProgressState, at schemeshard: 72057594046678944 2025-05-29T15:30:23.440025Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove table for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 2025-05-29T15:30:23.440040Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:906: Part operation is done id#102:0 progress is 4/4 2025-05-29T15:30:23.440045Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 102 ready parts: 4/4 2025-05-29T15:30:23.440050Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:906: Part operation is done id#102:0 progress is 4/4 2025-05-29T15:30:23.440054Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 102 ready parts: 4/4 2025-05-29T15:30:23.440058Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1606: TOperation IsReadyToNotify, TxId: 102, ready parts: 4/4, is published: true 2025-05-29T15:30:23.440074Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1629: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:419:2375] message: TxId: 102 2025-05-29T15:30:23.440080Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 102 ready parts: 4/4 2025-05-29T15:30:23.440086Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:973: Operation and all the parts is done, operation id: 102:0 2025-05-29T15:30:23.440091Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5174: RemoveTx for txid 102:0 2025-05-29T15:30:23.440113Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2025-05-29T15:30:23.440118Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:973: Operation and all the parts is done, operation id: 102:1 2025-05-29T15:30:23.440122Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5174: RemoveTx for txid 102:1 2025-05-29T15:30:23.440128Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 3 2025-05-29T15:30:23.440132Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:973: Operation and all the parts is done, operation id: 102:2 2025-05-29T15:30:23.440136Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5174: RemoveTx for txid 102:2 2025-05-29T15:30:23.440146Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 2 2025-05-29T15:30:23.440152Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:973: Operation and all the parts is done, operation id: 102:3 2025-05-29T15:30:23.440155Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5174: RemoveTx for txid 102:3 2025-05-29T15:30:23.440162Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 2 2025-05-29T15:30:23.440216Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2025-05-29T15:30:23.440614Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2025-05-29T15:30:23.441157Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:227: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2025-05-29T15:30:23.441169Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:236: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [1:538:2487] TestWaitNotification: OK eventTxId 102 >> TAsyncIndexTests::MergeBothWithReboots[PipeResets] |73.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_index/unittest >> KqpLimits::QueryExecTimeout >> KqpExplain::UpdateOnSecondaryWithoutSecondaryKey-UseSink |73.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_index/unittest |73.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_index/unittest >> KqpLimits::ManyPartitions |73.2%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/tests/tools/kqprun/kqprun |73.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/tools/kqprun/kqprun |73.2%| [LD] {RESULT} $(B)/ydb/tests/tools/kqprun/kqprun >> KqpExplain::ReadTableRangesFullScan >> TAsyncIndexTests::CdcAndSplitWithReboots[TabletReboots] >> TAsyncIndexTests::DropTableWithInflightChanges[PipeResets] [GOOD] >> KqpScheme::CreateResourcePoolClassifierOnServerless [FAIL] |73.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_index/unittest ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/query/unittest >> KqpLimits::QSReplySize-useSink Test command err: Trying to start YDB, gRPC: 2632, MsgBus: 17679 2025-05-29T15:30:12.611426Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509890386763158312:2060];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:30:12.611446Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/0012ab/r3tmp/tmpRYhAEm/pdisk_1.dat 2025-05-29T15:30:12.675231Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [1:7509890386763158294:2079] 1748532612611321 != 1748532612611324 2025-05-29T15:30:12.676539Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 2632, node 1 2025-05-29T15:30:12.689645Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:30:12.689659Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-05-29T15:30:12.689662Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-05-29T15:30:12.689707Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:17679 TClient is connected to server localhost:17679 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: 2025-05-29T15:30:12.753957Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:30:12.754021Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:30:12.755070Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-29T15:30:12.759581Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:30:12.770841Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:30:12.833939Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:30:12.851725Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:30:12.864218Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:30:12.923190Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890386763159928:2403], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:30:12.923218Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:30:12.957025Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-05-29T15:30:12.964038Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-05-29T15:30:12.974987Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-05-29T15:30:12.982666Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-05-29T15:30:12.996901Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-05-29T15:30:13.010815Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-05-29T15:30:13.025093Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-05-29T15:30:13.041222Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890391058127876:2466], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:30:13.041250Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:30:13.041252Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890391058127881:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:30:13.042097Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715669:3, at schemeshard: 72057594046644480 2025-05-29T15:30:13.044795Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7509890391058127883:2470], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715669 completed, doublechecking } 2025-05-29T15:30:13.108266Z node 1 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [1:7509890391058127935:3398] txid# 281474976715670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-29T15:30:13.212559Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [1:7509890391058127951:2474], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:30:13.212696Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=1&id=Mjk2ODVhNjEtNTQxNWY2NzctNjQxOGQzODctOTdlOTdhNGU=, ActorId: [1:7509890386763159925:2401], ActorState: ExecuteState, TraceId: 01jweamkxgcgst8kjzgwj30p2n, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: VERIFY failed (2025-05-29T15:30:13.213396Z): assertion failed in non-unittest thread with message: assertion failed at ydb/core/kqp/ut/common/kqp_ut_common.h:375, void NKikimr::NKqp::AssertSuccessResult(const NYdb::TStatus &): (result.IsSuccess())
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 library/cpp/testing/unittest/registar.cpp:36 RaiseError(): requirement UnittestThread failed 0. /-S/util/system/yassert.cpp:83: InternalPanicImpl @ 0x13DD8F65 1. /-S/util/system/yassert.cpp:55: Panic @ 0x13DCFF66 2. /tmp//-S/library/cpp/testing/unittest/registar.cpp:36: RaiseError @ 0x13F70BC6 3. /-S/ydb/core/kqp/ut/common/kqp_ut_common.h:375: AssertSuccessResult @ 0x13A3FAE2 4. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:365: CreateSampleTables @ 0x264F71F2 5. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:581: operator() @ 0x2651596C 6. /-S/library/cpp/threading/future/core/future-inl.h:493: SetValue @ 0x2651596C 7. /-S/library/cpp/threading/future/async.h:24: operator() @ 0x2651596C 8. /-S/util/thread/pool.h:71: Process @ 0x2651596C 9. /-S/util/thread/pool.cpp:418: DoExecute @ 0x13DE06B9 10. /-S/util/thread/factory.h:15: Execute @ 0x13DDF0A9 11. /-S/util/thread/factory.cpp:36: ThreadProc @ 0x13DDF0A9 12. /-S/util/system/thread.cpp:244: ThreadProxy @ 0x13DDA51C 13. ??:0: ?? @ 0x7F3C73508AC2 14. ??:0: ?? @ 0x7F3C7359A84F Trying to start YDB, gRPC: 19035, MsgBus: 21778 2025-05-29T15:30:16.668368Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509890405555366217:2268];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:30:16.668398Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/0012ab/r3tmp/tmpeKBgLK/pdisk_1.dat 2025-05-29T15:30:16.735271Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:30:16.735330Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [1:7509890405555365983:2079] 1748532616667317 != 1748532616667320 TServer::EnableGrpc on GrpcPort 19035, node 1 2025-05-29T1 ... 3DD8F65 1. /-S/util/system/yassert.cpp:55: Panic @ 0x13DCFF66 2. /tmp//-S/library/cpp/testing/unittest/registar.cpp:36: RaiseError @ 0x13F70BC6 3. /-S/ydb/core/kqp/ut/common/kqp_ut_common.h:375: AssertSuccessResult @ 0x13A3FAE2 4. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:365: CreateSampleTables @ 0x264F71F2 5. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:581: operator() @ 0x2651596C 6. /-S/library/cpp/threading/future/core/future-inl.h:493: SetValue @ 0x2651596C 7. /-S/library/cpp/threading/future/async.h:24: operator() @ 0x2651596C 8. /-S/util/thread/pool.h:71: Process @ 0x2651596C 9. /-S/util/thread/pool.cpp:418: DoExecute @ 0x13DE06B9 10. /-S/util/thread/factory.h:15: Execute @ 0x13DDF0A9 11. /-S/util/thread/factory.cpp:36: ThreadProc @ 0x13DDF0A9 12. /-S/util/system/thread.cpp:244: ThreadProxy @ 0x13DDA51C 13. ??:0: ?? @ 0x7F7BE72EEAC2 14. ??:0: ?? @ 0x7F7BE738084F Trying to start YDB, gRPC: 64299, MsgBus: 20389 2025-05-29T15:30:21.697285Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509890425413490502:2062];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:30:21.697351Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/0012ab/r3tmp/tmp5cKVic/pdisk_1.dat 2025-05-29T15:30:21.750479Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [1:7509890425413490482:2079] 1748532621697112 != 1748532621697115 2025-05-29T15:30:21.753291Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 64299, node 1 2025-05-29T15:30:21.765854Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:30:21.765872Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-05-29T15:30:21.765874Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-05-29T15:30:21.765917Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:20389 TClient is connected to server localhost:20389 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-29T15:30:21.825026Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:30:21.825059Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:30:21.825998Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-29T15:30:21.826055Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:30:21.855098Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:30:21.871738Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:30:21.894643Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:30:21.907344Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:30:22.081807Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890429708459409:2403], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:30:22.081844Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:30:22.134363Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-05-29T15:30:22.141522Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-05-29T15:30:22.153607Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-05-29T15:30:22.208456Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-05-29T15:30:22.263231Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-05-29T15:30:22.270846Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-05-29T15:30:22.278213Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-05-29T15:30:22.294387Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890429708460065:2466], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:30:22.294450Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:30:22.294577Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890429708460070:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:30:22.295290Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715669:3, at schemeshard: 72057594046644480 2025-05-29T15:30:22.298371Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7509890429708460072:2470], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715669 completed, doublechecking } 2025-05-29T15:30:22.353226Z node 1 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [1:7509890429708460123:3395] txid# 281474976715670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-29T15:30:22.444264Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [1:7509890429708460139:2474], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:30:22.444347Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=1&id=OTEyYTgzZmYtYWIxYWE5MzgtYjhhNGI1ZDAtY2E5YTZlOWU=, ActorId: [1:7509890429708459391:2401], ActorState: ExecuteState, TraceId: 01jweamwyp9p78rgmnyjdec42s, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: VERIFY failed (2025-05-29T15:30:22.444889Z): assertion failed in non-unittest thread with message: assertion failed at ydb/core/kqp/ut/common/kqp_ut_common.h:375, void NKikimr::NKqp::AssertSuccessResult(const NYdb::TStatus &): (result.IsSuccess())
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 library/cpp/testing/unittest/registar.cpp:36 RaiseError(): requirement UnittestThread failed 0. /-S/util/system/yassert.cpp:83: InternalPanicImpl @ 0x13DD8F65 1. /-S/util/system/yassert.cpp:55: Panic @ 0x13DCFF66 2. /tmp//-S/library/cpp/testing/unittest/registar.cpp:36: RaiseError @ 0x13F70BC6 3. /-S/ydb/core/kqp/ut/common/kqp_ut_common.h:375: AssertSuccessResult @ 0x13A3FAE2 4. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:365: CreateSampleTables @ 0x264F71F2 5. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:581: operator() @ 0x2651596C 6. /-S/library/cpp/threading/future/core/future-inl.h:493: SetValue @ 0x2651596C 7. /-S/library/cpp/threading/future/async.h:24: operator() @ 0x2651596C 8. /-S/util/thread/pool.h:71: Process @ 0x2651596C 9. /-S/util/thread/pool.cpp:418: DoExecute @ 0x13DE06B9 10. /-S/util/thread/factory.h:15: Execute @ 0x13DDF0A9 11. /-S/util/thread/factory.cpp:36: ThreadProc @ 0x13DDF0A9 12. /-S/util/system/thread.cpp:244: ThreadProxy @ 0x13DDA51C 13. ??:0: ?? @ 0x7F61A237CAC2 14. ??:0: ?? @ 0x7F61A240E84F >> KqpExplain::FullOuterJoin |73.2%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/datashard/ut_data_cleanup/ydb-core-tx-datashard-ut_data_cleanup |73.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_data_cleanup/ydb-core-tx-datashard-ut_data_cleanup |73.3%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_data_cleanup/ydb-core-tx-datashard-ut_data_cleanup ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_index/unittest >> TAsyncIndexTests::DropTableWithInflightChanges[PipeResets] [GOOD] Test command err: =========== RUN: Trace =========== Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2140] Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:116:2058] recipient: [1:110:2141] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:116:2058] recipient: [1:110:2141] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:117:2058] recipient: [1:111:2142] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:117:2058] recipient: [1:111:2142] Leader for TabletID 72057594046678944 is [1:126:2151] sender: [1:127:2058] recipient: [1:109:2140] Leader for TabletID 72057594046447617 is [1:130:2154] sender: [1:132:2058] recipient: [1:110:2141] Leader for TabletID 72057594046316545 is [1:133:2155] sender: [1:135:2058] recipient: [1:111:2142] 2025-05-29T15:30:18.770647Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7488: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-05-29T15:30:18.770683Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7516: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:30:18.770689Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7402: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2025-05-29T15:30:18.770695Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7418: OperationsProcessing config: using default configuration 2025-05-29T15:30:18.770710Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-05-29T15:30:18.770728Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-05-29T15:30:18.770754Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7548: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:30:18.770769Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:39: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-05-29T15:30:18.770893Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7619: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-05-29T15:30:18.788805Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-05-29T15:30:18.945871Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7651: Got new config: QueryServiceConfig { AllExternalDataSourcesAreAvailable: true } 2025-05-29T15:30:18.945912Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:30:18.946044Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7619: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# Leader for TabletID 72057594046447617 is [1:130:2154] sender: [1:175:2058] recipient: [1:15:2062] 2025-05-29T15:30:18.955520Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-05-29T15:30:18.955551Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-05-29T15:30:18.955590Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-05-29T15:30:18.958361Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-05-29T15:30:18.958439Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:445: Clear TempDirsState with owners number: 0 2025-05-29T15:30:18.958550Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1348: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-05-29T15:30:18.958770Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:32: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-05-29T15:30:18.964397Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:157: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:30:18.968068Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:84: [RootDataErasureManager] Stop 2025-05-29T15:30:18.969577Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:30:18.969596Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:30:18.971152Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-05-29T15:30:18.971170Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-29T15:30:18.971183Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:202: TTxServerlessStorageBilling.Complete 2025-05-29T15:30:18.971221Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6671: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:214:2058] recipient: [1:212:2212] IGNORE Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:214:2058] recipient: [1:212:2212] Leader for TabletID 72057594037968897 is [1:218:2216] sender: [1:219:2058] recipient: [1:212:2212] 2025-05-29T15:30:18.977595Z node 1 :HIVE INFO: tablet_helpers.cpp:1130: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:126:2151] sender: [1:239:2058] recipient: [1:15:2062] 2025-05-29T15:30:19.018467Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:372: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-05-29T15:30:19.023711Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:30:19.034092Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-05-29T15:30:19.034199Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:130: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-05-29T15:30:19.034220Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:30:19.035697Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:451: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-05-29T15:30:19.035724Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-05-29T15:30:19.035768Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:30:19.035779Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:313: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-05-29T15:30:19.035785Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:367: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-05-29T15:30:19.035790Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 2 -> 3 2025-05-29T15:30:19.036166Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:30:19.036174Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-05-29T15:30:19.036179Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 3 -> 128 2025-05-29T15:30:19.036508Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:30:19.036515Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:30:19.036520Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:30:19.036526Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1650: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-05-29T15:30:19.037251Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1719: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-05-29T15:30:19.037623Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:652: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-05-29T15:30:19.038469Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1751: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 Leader for TabletID 72057594046316545 is [1:133:2155] sender: [1:254:2058] recipient: [1:15:2062] FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-05-29T15:30:19.040291Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:676: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-05-29T15:30:19.040355Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:680: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 133 RawX2: 4294969451 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-05-29T15:30:19.040370Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:30:19.040464Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Ch ... 594046678944 Generation: 2 LocalPathId: 5 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 1003 2025-05-29T15:30:25.792851Z node 26 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72057594046678944, txId: 1003 2025-05-29T15:30:25.792855Z node 26 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 1003, pathId: [OwnerId: 72057594046678944, LocalPathId: 5], version: 18446744073709551615 2025-05-29T15:30:25.792860Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 3 2025-05-29T15:30:25.792873Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1606: TOperation IsReadyToNotify, TxId: 1003, ready parts: 2/3, is published: true 2025-05-29T15:30:25.793045Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1003:0, at schemeshard: 72057594046678944 2025-05-29T15:30:25.793052Z node 26 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_drop_table.cpp:414: TDropTable TProposedDeletePart operationId: 1003:0 ProgressState, at schemeshard: 72057594046678944 2025-05-29T15:30:25.793100Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove table for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 4 2025-05-29T15:30:25.793122Z node 26 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:906: Part operation is done id#1003:0 progress is 3/3 2025-05-29T15:30:25.793126Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 1003 ready parts: 3/3 2025-05-29T15:30:25.793131Z node 26 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:906: Part operation is done id#1003:0 progress is 3/3 2025-05-29T15:30:25.793134Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 1003 ready parts: 3/3 2025-05-29T15:30:25.793138Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1606: TOperation IsReadyToNotify, TxId: 1003, ready parts: 3/3, is published: true 2025-05-29T15:30:25.793143Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 1003 ready parts: 3/3 2025-05-29T15:30:25.793149Z node 26 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:973: Operation and all the parts is done, operation id: 1003:0 2025-05-29T15:30:25.793153Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5174: RemoveTx for txid 1003:0 2025-05-29T15:30:25.793176Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 3 2025-05-29T15:30:25.793180Z node 26 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:973: Operation and all the parts is done, operation id: 1003:1 2025-05-29T15:30:25.793183Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5174: RemoveTx for txid 1003:1 2025-05-29T15:30:25.793190Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 2 2025-05-29T15:30:25.793194Z node 26 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:973: Operation and all the parts is done, operation id: 1003:2 2025-05-29T15:30:25.793197Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5174: RemoveTx for txid 1003:2 2025-05-29T15:30:25.793205Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 2 2025-05-29T15:30:25.793363Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1003 2025-05-29T15:30:25.793647Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1003 2025-05-29T15:30:25.793664Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1003 2025-05-29T15:30:25.793671Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1003 2025-05-29T15:30:25.794521Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1003 2025-05-29T15:30:25.794559Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1003 2025-05-29T15:30:25.795062Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5549: Handle TEvStateChanged, at schemeshard: 72057594046678944, message: Source { RawX1: 344 RawX2: 111669152023 } TabletId: 72075186233409546 State: 4 2025-05-29T15:30:25.795083Z node 26 :FLAT_TX_SCHEMESHARD INFO: schemeshard__state_changed_reply.cpp:78: TTxShardStateChanged DoExecute, datashard informs about state changing, datashardId: 72075186233409546, state: Offline, at schemeshard: 72057594046678944 2025-05-29T15:30:25.795411Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:46: Free shard 72057594046678944:2 hive 72057594037968897 at ss 72057594046678944 2025-05-29T15:30:25.795495Z node 26 :HIVE INFO: tablet_helpers.cpp:1356: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 2 TxId_Deprecated: 2 TabletID: 72075186233409546 2025-05-29T15:30:25.795545Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5938: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 2 ShardOwnerId: 72057594046678944 ShardLocalIdx: 2, at schemeshard: 72057594046678944 2025-05-29T15:30:25.795600Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 1 2025-05-29T15:30:25.795658Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:123: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-05-29T15:30:25.795664Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:137: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 5], at schemeshard: 72057594046678944 2025-05-29T15:30:25.795677Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 1 2025-05-29T15:30:25.795684Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:137: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 4], at schemeshard: 72057594046678944 2025-05-29T15:30:25.795689Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 2 Forgetting tablet 72075186233409546 2025-05-29T15:30:25.796713Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:174: Deleted shardIdx 72057594046678944:2 2025-05-29T15:30:25.796729Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:180: Close pipe to deleted shardIdx 72057594046678944:2 tabletId 72075186233409546 2025-05-29T15:30:25.796765Z node 26 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:160: TTxCleanDroppedPaths Complete, done PersistRemovePath for 2 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestModificationResult got TxId: 1003, wait until txId: 1003 TestWaitNotification wait txId: 1003 2025-05-29T15:30:25.796824Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:210: tests -- TTxNotificationSubscriber for txId 1003: send EvNotifyTxCompletion 2025-05-29T15:30:25.796830Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:256: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 1003 2025-05-29T15:30:25.796962Z node 26 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 1003, at schemeshard: 72057594046678944 2025-05-29T15:30:25.796977Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:227: tests -- TTxNotificationSubscriber for txId 1003: got EvNotifyTxCompletionResult 2025-05-29T15:30:25.796982Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:236: tests -- TTxNotificationSubscriber for txId 1003: satisfy waiter [26:627:2552] 2025-05-29T15:30:25.797888Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5549: Handle TEvStateChanged, at schemeshard: 72057594046678944, message: Source { RawX1: 347 RawX2: 111669152025 } TabletId: 72075186233409547 State: 4 2025-05-29T15:30:25.797906Z node 26 :FLAT_TX_SCHEMESHARD INFO: schemeshard__state_changed_reply.cpp:78: TTxShardStateChanged DoExecute, datashard informs about state changing, datashardId: 72075186233409547, state: Offline, at schemeshard: 72057594046678944 2025-05-29T15:30:25.798202Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:46: Free shard 72057594046678944:1 hive 72057594037968897 at ss 72057594046678944 2025-05-29T15:30:25.798290Z node 26 :HIVE INFO: tablet_helpers.cpp:1356: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 1 TxId_Deprecated: 1 TabletID: 72075186233409547 2025-05-29T15:30:25.798333Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5938: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 1 ShardOwnerId: 72057594046678944 ShardLocalIdx: 1, at schemeshard: 72057594046678944 2025-05-29T15:30:25.798388Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 1 Forgetting tablet 72075186233409547 2025-05-29T15:30:25.798939Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:123: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-05-29T15:30:25.798949Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:137: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 3], at schemeshard: 72057594046678944 2025-05-29T15:30:25.798964Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-05-29T15:30:25.799545Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:174: Deleted shardIdx 72057594046678944:1 2025-05-29T15:30:25.799559Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:180: Close pipe to deleted shardIdx 72057594046678944:1 tabletId 72075186233409547 2025-05-29T15:30:25.799646Z node 26 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:160: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestWaitNotification: OK eventTxId 1003 wait until 72075186233409546 is deleted wait until 72075186233409547 is deleted 2025-05-29T15:30:25.799708Z node 26 :HIVE INFO: tablet_helpers.cpp:1476: [72057594037968897] TEvSubscribeToTabletDeletion, 72075186233409546 2025-05-29T15:30:25.799718Z node 26 :HIVE INFO: tablet_helpers.cpp:1476: [72057594037968897] TEvSubscribeToTabletDeletion, 72075186233409547 Deleted tabletId 72075186233409546 Deleted tabletId 72075186233409547 >> TAsyncIndexTests::SplitMainWithReboots[TabletReboots] >> KqpStats::OneShardLocalExec-UseSink >> KqpLimits::ManyPartitions [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/batch_operations/unittest >> KqpBatchDelete::Large_1 Test command err: Trying to start YDB, gRPC: 21819, MsgBus: 19059 2025-05-29T15:29:53.408260Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509890303768184599:2200];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:29:53.408377Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/002791/r3tmp/tmpfiDHlP/pdisk_1.dat 2025-05-29T15:29:53.718950Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [1:7509890303768184434:2079] 1748532593406082 != 1748532593406085 2025-05-29T15:29:53.719855Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:29:53.723479Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:29:53.723534Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:29:53.739560Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 21819, node 1 2025-05-29T15:29:54.088520Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:29:54.088533Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-05-29T15:29:54.088535Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-05-29T15:29:54.088590Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:19059 TClient is connected to server localhost:19059 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-29T15:29:54.615454Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:29:54.653635Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:29:54.785752Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:29:54.799996Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:29:54.858488Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:29:55.013988Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890312358120680:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:29:55.014019Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:29:55.859455Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-05-29T15:29:55.924086Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-05-29T15:29:55.932312Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-05-29T15:29:55.944613Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-05-29T15:29:56.018817Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-05-29T15:29:56.027694Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-05-29T15:29:56.035317Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-05-29T15:29:56.071450Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890316653088637:2470], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:29:56.071486Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890316653088642:2473], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:29:56.071492Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:29:56.081336Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710669:3, at schemeshard: 72057594046644480 2025-05-29T15:29:56.083495Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7509890316653088644:2474], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710669 completed, doublechecking } 2025-05-29T15:29:56.174852Z node 1 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [1:7509890316653088695:3407] txid# 281474976710670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-29T15:29:56.670465Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [1:7509890316653088711:2478], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:29:56.670656Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=1&id=NGYzODBkMTEtYmJlYWY1NzYtMmM1YzhmYy0yNzViYzY4ZA==, ActorId: [1:7509890308063153381:2403], ActorState: ExecuteState, TraceId: 01jweam3b64mp56n9q9fqy6jjd, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: VERIFY failed (2025-05-29T15:29:56.671792Z): assertion failed in non-unittest thread with message: assertion failed at ydb/core/kqp/ut/common/kqp_ut_common.h:375, void NKikimr::NKqp::AssertSuccessResult(const NYdb::TStatus &): (result.IsSuccess())
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 library/cpp/testing/unittest/registar.cpp:36 RaiseError(): requirement UnittestThread failed 2025-05-29T15:29:58.408126Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7509890303768184599:2200];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:29:58.408170Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-05-29T15:30:08.691148Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7301: Cannot get console configs 2025-05-29T15:30:08.691199Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 0. /-S/util/system/yassert.cpp:83: InternalPanicImpl @ 0x13A9DC85 1. /-S/util/system/yassert.cpp:55: Panic @ 0x13A94C86 2. /tmp//-S/library/cpp/testing/unittest/registar.cpp:36: RaiseError @ 0x13C36A16 3. /-S/ydb/core/kqp/ut/common/kqp_ut_common.h:375: AssertSuccessResult @ 0x260D85F2 4. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:365: CreateSampleTables @ 0x260D7EF2 5. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:581: operator() @ 0x260F9ACC 6. /-S/library/cpp/threading/future/core/future-inl.h:493: SetValue @ 0x260F9ACC 7. /-S/library/cpp/threading/future/async.h:24: operator() @ 0x260F9ACC 8. /-S/util/thread/pool.h:71: Process @ 0x260F9ACC 9. /-S/util/thread/pool.cpp:418: DoExecute @ 0x13AA5609 10. /-S/util/thread/factory.h:15: Execute @ 0x13AA3FF9 11. /-S/util/thread/factory.cpp:36: ThreadProc @ 0x13AA3FF9 12. /-S/util/system/thread.cpp:244: ThreadProxy @ 0x13A9F46C 13. ??:0: ?? @ 0x7F5C5CE03AC2 14. ??:0: ?? @ 0x7F5C5CE9584F |73.3%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/kafka_proxy/ut/ydb-core-kafka_proxy-ut |73.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kafka_proxy/ut/ydb-core-kafka_proxy-ut |73.3%| [LD] {RESULT} $(B)/ydb/core/kafka_proxy/ut/ydb-core-kafka_proxy-ut |73.3%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/datashard/ut_reassign/ydb-core-tx-datashard-ut_reassign |73.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_reassign/ydb-core-tx-datashard-ut_reassign |73.3%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_reassign/ydb-core-tx-datashard-ut_reassign ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/query/unittest >> KqpStats::SelfJoin Test command err: Trying to start YDB, gRPC: 7343, MsgBus: 19290 2025-05-29T15:30:09.253936Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509890374481237034:2062];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:30:09.253963Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/0012ec/r3tmp/tmp571Egj/pdisk_1.dat 2025-05-29T15:30:09.302546Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:30:09.302614Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [1:7509890374481237013:2079] 1748532609253797 != 1748532609253800 TServer::EnableGrpc on GrpcPort 7343, node 1 2025-05-29T15:30:09.314883Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:30:09.314898Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-05-29T15:30:09.314904Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-05-29T15:30:09.314950Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:19290 TClient is connected to server localhost:19290 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-29T15:30:09.380358Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:30:09.380385Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:30:09.381305Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-29T15:30:09.382544Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:30:09.388292Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:30:09.453813Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:30:09.473035Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:30:09.529630Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:30:09.615615Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890374481238653:2403], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:30:09.615652Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:30:09.648867Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-05-29T15:30:09.655409Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-05-29T15:30:09.664005Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-05-29T15:30:09.671038Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-05-29T15:30:09.678084Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-05-29T15:30:09.692562Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-05-29T15:30:09.706631Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-05-29T15:30:09.722866Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890374481239307:2466], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:30:09.722905Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:30:09.722923Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890374481239312:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:30:09.723635Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715669:3, at schemeshard: 72057594046644480 2025-05-29T15:30:09.726140Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7509890374481239314:2470], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715669 completed, doublechecking } 2025-05-29T15:30:09.799538Z node 1 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [1:7509890374481239365:3399] txid# 281474976715670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-29T15:30:09.910861Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [1:7509890374481239381:2474], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:30:09.910962Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=1&id=ZmRmOGQ3ZjMtZDMzNjdkZTItOWVkM2M0NjktYzU0MGJjYzU=, ActorId: [1:7509890374481238635:2401], ActorState: ExecuteState, TraceId: 01jweamgnt615nd99tv60gqgsa, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: VERIFY failed (2025-05-29T15:30:09.911600Z): assertion failed in non-unittest thread with message: assertion failed at ydb/core/kqp/ut/common/kqp_ut_common.h:375, void NKikimr::NKqp::AssertSuccessResult(const NYdb::TStatus &): (result.IsSuccess())
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 library/cpp/testing/unittest/registar.cpp:36 RaiseError(): requirement UnittestThread failed 0. /-S/util/system/yassert.cpp:83: InternalPanicImpl @ 0x13DD8F65 1. /-S/util/system/yassert.cpp:55: Panic @ 0x13DCFF66 2. /tmp//-S/library/cpp/testing/unittest/registar.cpp:36: RaiseError @ 0x13F70BC6 3. /-S/ydb/core/kqp/ut/common/kqp_ut_common.h:375: AssertSuccessResult @ 0x13A3FAE2 4. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:365: CreateSampleTables @ 0x264F71F2 5. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:581: operator() @ 0x2651596C 6. /-S/library/cpp/threading/future/core/future-inl.h:493: SetValue @ 0x2651596C 7. /-S/library/cpp/threading/future/async.h:24: operator() @ 0x2651596C 8. /-S/util/thread/pool.h:71: Process @ 0x2651596C 9. /-S/util/thread/pool.cpp:418: DoExecute @ 0x13DE06B9 10. /-S/util/thread/factory.h:15: Execute @ 0x13DDF0A9 11. /-S/util/thread/factory.cpp:36: ThreadProc @ 0x13DDF0A9 12. /-S/util/system/thread.cpp:244: ThreadProxy @ 0x13DDA51C 13. ??:0: ?? @ 0x7F58139ADAC2 14. ??:0: ?? @ 0x7F5813A3F84F Trying to start YDB, gRPC: 29710, MsgBus: 64900 2025-05-29T15:30:13.402079Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509890390223198120:2063];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:30:13.402101Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/0012ec/r3tmp/tmp4XgrQo/pdisk_1.dat 2025-05-29T15:30:13.459607Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [1:7509890390223198098:2079] 1748532613401907 != 1748532613401910 2025-05-29T15:30:13.461599Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 29710, node 1 2025-05-29T1 ... 3DD8F65 1. /-S/util/system/yassert.cpp:55: Panic @ 0x13DCFF66 2. /tmp//-S/library/cpp/testing/unittest/registar.cpp:36: RaiseError @ 0x13F70BC6 3. /-S/ydb/core/kqp/ut/common/kqp_ut_common.h:375: AssertSuccessResult @ 0x13A3FAE2 4. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:365: CreateSampleTables @ 0x264F71F2 5. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:581: operator() @ 0x2651596C 6. /-S/library/cpp/threading/future/core/future-inl.h:493: SetValue @ 0x2651596C 7. /-S/library/cpp/threading/future/async.h:24: operator() @ 0x2651596C 8. /-S/util/thread/pool.h:71: Process @ 0x2651596C 9. /-S/util/thread/pool.cpp:418: DoExecute @ 0x13DE06B9 10. /-S/util/thread/factory.h:15: Execute @ 0x13DDF0A9 11. /-S/util/thread/factory.cpp:36: ThreadProc @ 0x13DDF0A9 12. /-S/util/system/thread.cpp:244: ThreadProxy @ 0x13DDA51C 13. ??:0: ?? @ 0x7F62AA503AC2 14. ??:0: ?? @ 0x7F62AA59584F Trying to start YDB, gRPC: 17910, MsgBus: 17603 2025-05-29T15:30:21.926353Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509890425646141397:2062];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:30:21.926374Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/0012ec/r3tmp/tmpnqhp9N/pdisk_1.dat 2025-05-29T15:30:21.993845Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:30:21.993903Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [1:7509890425646141377:2079] 1748532621926194 != 1748532621926197 TServer::EnableGrpc on GrpcPort 17910, node 1 2025-05-29T15:30:22.007668Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:30:22.007687Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-05-29T15:30:22.007689Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-05-29T15:30:22.007742Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:17603 TClient is connected to server localhost:17603 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: 2025-05-29T15:30:22.069197Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:30:22.069231Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:30:22.070327Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-29T15:30:22.071549Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:30:22.075940Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:30:22.141641Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:30:22.163756Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:30:22.175870Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:30:22.314062Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890429941110306:2403], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:30:22.314122Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:30:22.363047Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-05-29T15:30:22.369368Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-05-29T15:30:22.375829Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-05-29T15:30:22.430575Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-05-29T15:30:22.438997Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-05-29T15:30:22.494637Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-05-29T15:30:22.502446Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-05-29T15:30:22.518991Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890429941110962:2466], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:30:22.519025Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890429941110967:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:30:22.519026Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:30:22.519725Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715669:3, at schemeshard: 72057594046644480 2025-05-29T15:30:22.522005Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7509890429941110969:2470], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715669 completed, doublechecking } 2025-05-29T15:30:22.619634Z node 1 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [1:7509890429941111020:3396] txid# 281474976715670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-29T15:30:22.726684Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [1:7509890429941111036:2474], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:30:22.726817Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=1&id=YzYxYWI0MDEtODBiNTVhYjgtOTBiNTY2MTYtMTdlMzA1YzM=, ActorId: [1:7509890429941110303:2401], ActorState: ExecuteState, TraceId: 01jweamx5pfjbnwbhvg0ega690, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: VERIFY failed (2025-05-29T15:30:22.734929Z): assertion failed in non-unittest thread with message: assertion failed at ydb/core/kqp/ut/common/kqp_ut_common.h:375, void NKikimr::NKqp::AssertSuccessResult(const NYdb::TStatus &): (result.IsSuccess())
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 library/cpp/testing/unittest/registar.cpp:36 RaiseError(): requirement UnittestThread failed 0. /-S/util/system/yassert.cpp:83: InternalPanicImpl @ 0x13DD8F65 1. /-S/util/system/yassert.cpp:55: Panic @ 0x13DCFF66 2. /tmp//-S/library/cpp/testing/unittest/registar.cpp:36: RaiseError @ 0x13F70BC6 3. /-S/ydb/core/kqp/ut/common/kqp_ut_common.h:375: AssertSuccessResult @ 0x13A3FAE2 4. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:365: CreateSampleTables @ 0x264F71F2 5. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:581: operator() @ 0x2651596C 6. /-S/library/cpp/threading/future/core/future-inl.h:493: SetValue @ 0x2651596C 7. /-S/library/cpp/threading/future/async.h:24: operator() @ 0x2651596C 8. /-S/util/thread/pool.h:71: Process @ 0x2651596C 9. /-S/util/thread/pool.cpp:418: DoExecute @ 0x13DE06B9 10. /-S/util/thread/factory.h:15: Execute @ 0x13DDF0A9 11. /-S/util/thread/factory.cpp:36: ThreadProc @ 0x13DDF0A9 12. /-S/util/system/thread.cpp:244: ThreadProxy @ 0x13DDA51C 13. ??:0: ?? @ 0x7FCF180BFAC2 14. ??:0: ?? @ 0x7FCF1815184F >> IcbAsActorTests::TestHttpPostReaction >> IcbAsActorTests::TestHttpPostReaction [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/query/unittest >> KqpLimits::ManyPartitions [GOOD] Test command err: Trying to start YDB, gRPC: 9253, MsgBus: 1171 2025-05-29T15:30:12.316348Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509890387186783314:2065];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:30:12.316365Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/0012b4/r3tmp/tmpVsPJWf/pdisk_1.dat 2025-05-29T15:30:12.381703Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [1:7509890387186783290:2079] 1748532612316175 != 1748532612316178 2025-05-29T15:30:12.381807Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 9253, node 1 2025-05-29T15:30:12.395051Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:30:12.395064Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-05-29T15:30:12.395066Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-05-29T15:30:12.395106Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:1171 TClient is connected to server localhost:1171 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-29T15:30:12.458041Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:30:12.458077Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:30:12.459243Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-29T15:30:12.459982Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:30:12.472584Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:30:12.492781Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:30:12.514609Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:30:12.527355Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:30:12.740419Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890387186784924:2403], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:30:12.740440Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:30:12.792521Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-05-29T15:30:12.800007Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-05-29T15:30:12.807358Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-05-29T15:30:12.821508Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-05-29T15:30:12.835480Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-05-29T15:30:12.849740Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-05-29T15:30:12.863321Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-05-29T15:30:12.879365Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890387186785577:2466], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:30:12.879393Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:30:12.879411Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890387186785582:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:30:12.880118Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715669:3, at schemeshard: 72057594046644480 2025-05-29T15:30:12.883355Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7509890387186785584:2470], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715669 completed, doublechecking } 2025-05-29T15:30:12.950908Z node 1 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [1:7509890387186785635:3397] txid# 281474976715670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-29T15:30:13.038308Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [1:7509890387186785651:2474], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:30:13.038425Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=1&id=MWE1MTg0MGEtNTZlMDVjZmEtYTEzMWMwNDEtZDgwZmJjY2I=, ActorId: [1:7509890387186784897:2400], ActorState: ExecuteState, TraceId: 01jweamkrf3xm0ektm4ntepzbb, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: VERIFY failed (2025-05-29T15:30:13.039215Z): assertion failed in non-unittest thread with message: assertion failed at ydb/core/kqp/ut/common/kqp_ut_common.h:375, void NKikimr::NKqp::AssertSuccessResult(const NYdb::TStatus &): (result.IsSuccess())
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 library/cpp/testing/unittest/registar.cpp:36 RaiseError(): requirement UnittestThread failed 0. /-S/util/system/yassert.cpp:83: InternalPanicImpl @ 0x13DD8F65 1. /-S/util/system/yassert.cpp:55: Panic @ 0x13DCFF66 2. /tmp//-S/library/cpp/testing/unittest/registar.cpp:36: RaiseError @ 0x13F70BC6 3. /-S/ydb/core/kqp/ut/common/kqp_ut_common.h:375: AssertSuccessResult @ 0x13A3FAE2 4. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:365: CreateSampleTables @ 0x264F71F2 5. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:581: operator() @ 0x2651596C 6. /-S/library/cpp/threading/future/core/future-inl.h:493: SetValue @ 0x2651596C 7. /-S/library/cpp/threading/future/async.h:24: operator() @ 0x2651596C 8. /-S/util/thread/pool.h:71: Process @ 0x2651596C 9. /-S/util/thread/pool.cpp:418: DoExecute @ 0x13DE06B9 10. /-S/util/thread/factory.h:15: Execute @ 0x13DDF0A9 11. /-S/util/thread/factory.cpp:36: ThreadProc @ 0x13DDF0A9 12. /-S/util/system/thread.cpp:244: ThreadProxy @ 0x13DDA51C 13. ??:0: ?? @ 0x7FD0F8ED7AC2 14. ??:0: ?? @ 0x7FD0F8F6984F Trying to start YDB, gRPC: 13819, MsgBus: 18933 2025-05-29T15:30:17.008006Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509890406856924390:2062];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:30:17.008025Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/0012b4/r3tmp/tmprKaF47/pdisk_1.dat 2025-05-29T15:30:17.061049Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [1:7509890406856924369:2079] 1748532617007849 != 1748532617007852 2025-05-29T15:30:17.061509Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 13819, node 1 2025-05-29T15:3 ... kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:30:21.639877Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-05-29T15:30:21.647834Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-05-29T15:30:21.655399Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-05-29T15:30:21.669722Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-05-29T15:30:21.683855Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-05-29T15:30:21.698278Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-05-29T15:30:21.711932Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-05-29T15:30:21.728191Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890423963920185:2466], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:30:21.728220Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890423963920190:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:30:21.728231Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:30:21.728992Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715669:3, at schemeshard: 72057594046644480 2025-05-29T15:30:21.731364Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7509890423963920192:2470], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715669 completed, doublechecking } 2025-05-29T15:30:21.809353Z node 1 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [1:7509890423963920243:3395] txid# 281474976715670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-29T15:30:21.922096Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [1:7509890423963920259:2474], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:30:21.922215Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=1&id=NDNiNzYzYjctM2YyZDM0OTgtOGY3NTA1ZTAtYzVmZmQ3OTg=, ActorId: [1:7509890423963919515:2401], ActorState: ExecuteState, TraceId: 01jweamwcz6kkp2ybdke365qxr, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: VERIFY failed (2025-05-29T15:30:21.922947Z): assertion failed in non-unittest thread with message: assertion failed at ydb/core/kqp/ut/common/kqp_ut_common.h:375, void NKikimr::NKqp::AssertSuccessResult(const NYdb::TStatus &): (result.IsSuccess())
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 library/cpp/testing/unittest/registar.cpp:36 RaiseError(): requirement UnittestThread failed 0. /-S/util/system/yassert.cpp:83: InternalPanicImpl @ 0x13DD8F65 1. /-S/util/system/yassert.cpp:55: Panic @ 0x13DCFF66 2. /tmp//-S/library/cpp/testing/unittest/registar.cpp:36: RaiseError @ 0x13F70BC6 3. /-S/ydb/core/kqp/ut/common/kqp_ut_common.h:375: AssertSuccessResult @ 0x13A3FAE2 4. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:365: CreateSampleTables @ 0x264F71F2 5. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:581: operator() @ 0x2651596C 6. /-S/library/cpp/threading/future/core/future-inl.h:493: SetValue @ 0x2651596C 7. /-S/library/cpp/threading/future/async.h:24: operator() @ 0x2651596C 8. /-S/util/thread/pool.h:71: Process @ 0x2651596C 9. /-S/util/thread/pool.cpp:418: DoExecute @ 0x13DE06B9 10. /-S/util/thread/factory.h:15: Execute @ 0x13DDF0A9 11. /-S/util/thread/factory.cpp:36: ThreadProc @ 0x13DDF0A9 12. /-S/util/system/thread.cpp:244: ThreadProxy @ 0x13DDA51C 13. ??:0: ?? @ 0x7F7F0977AAC2 14. ??:0: ?? @ 0x7F7F0980C84F Trying to start YDB, gRPC: 16820, MsgBus: 21172 2025-05-29T15:30:25.357645Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509890444495457824:2062];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:30:25.357664Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/0012b4/r3tmp/tmpJbSyIh/pdisk_1.dat 2025-05-29T15:30:25.410641Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:30:25.410747Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [1:7509890444495457803:2079] 1748532625357507 != 1748532625357510 TServer::EnableGrpc on GrpcPort 16820, node 1 2025-05-29T15:30:25.424909Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:30:25.424923Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-05-29T15:30:25.424925Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-05-29T15:30:25.424969Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:21172 2025-05-29T15:30:25.460281Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:30:25.460308Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:30:25.461359Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:21172 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-29T15:30:25.490623Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:30:25.498839Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:30:25.795843Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890444495462595:2629], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:30:25.795876Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:30:25.796053Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890444495462614:2632], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:30:25.797107Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480 2025-05-29T15:30:25.800013Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976715659, at schemeshard: 72057594046644480 2025-05-29T15:30:25.800661Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7509890444495462616:2633], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2025-05-29T15:30:25.886152Z node 1 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [1:7509890444495462669:5082] txid# 281474976715660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/query/unittest >> KqpStats::DeferredEffects-UseSink Test command err: Trying to start YDB, gRPC: 21031, MsgBus: 62491 2025-05-29T15:30:09.341197Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509890372054470994:2064];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:30:09.341536Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/0012e8/r3tmp/tmpTWnO9O/pdisk_1.dat 2025-05-29T15:30:09.385495Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [1:7509890372054470971:2079] 1748532609340951 != 1748532609340954 2025-05-29T15:30:09.387181Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 21031, node 1 2025-05-29T15:30:09.400327Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:30:09.400339Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-05-29T15:30:09.400342Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-05-29T15:30:09.400394Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:62491 2025-05-29T15:30:09.443475Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:30:09.443501Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:30:09.444605Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:62491 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-29T15:30:09.468056Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:30:09.479301Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:30:09.497059Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:30:09.518611Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:30:09.531279Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:30:09.753475Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890372054472606:2403], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:30:09.753514Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:30:09.786018Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-05-29T15:30:09.792926Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-05-29T15:30:09.847587Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-05-29T15:30:09.860098Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-05-29T15:30:09.915394Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-05-29T15:30:09.923643Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-05-29T15:30:09.937622Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-05-29T15:30:09.953389Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890372054473262:2466], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:30:09.953416Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:30:09.953418Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890372054473267:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:30:09.954105Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715669:3, at schemeshard: 72057594046644480 2025-05-29T15:30:09.957309Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7509890372054473269:2470], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715669 completed, doublechecking } 2025-05-29T15:30:10.027031Z node 1 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [1:7509890376349440616:3397] txid# 281474976715670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-29T15:30:10.136354Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [1:7509890376349440632:2474], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:30:10.136479Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=1&id=ZjdhNDhjMWEtMzcwMzFkNDYtNzI4YzcxMDUtZTFhYWEzMTg=, ActorId: [1:7509890372054472603:2401], ActorState: ExecuteState, TraceId: 01jweamgx13ve8g2ee42s5qsk9, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: VERIFY failed (2025-05-29T15:30:10.137162Z): assertion failed in non-unittest thread with message: assertion failed at ydb/core/kqp/ut/common/kqp_ut_common.h:375, void NKikimr::NKqp::AssertSuccessResult(const NYdb::TStatus &): (result.IsSuccess())
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 library/cpp/testing/unittest/registar.cpp:36 RaiseError(): requirement UnittestThread failed 0. /-S/util/system/yassert.cpp:83: InternalPanicImpl @ 0x13DD8F65 1. /-S/util/system/yassert.cpp:55: Panic @ 0x13DCFF66 2. /tmp//-S/library/cpp/testing/unittest/registar.cpp:36: RaiseError @ 0x13F70BC6 3. /-S/ydb/core/kqp/ut/common/kqp_ut_common.h:375: AssertSuccessResult @ 0x13A3FAE2 4. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:365: CreateSampleTables @ 0x264F71F2 5. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:581: operator() @ 0x2651596C 6. /-S/library/cpp/threading/future/core/future-inl.h:493: SetValue @ 0x2651596C 7. /-S/library/cpp/threading/future/async.h:24: operator() @ 0x2651596C 8. /-S/util/thread/pool.h:71: Process @ 0x2651596C 9. /-S/util/thread/pool.cpp:418: DoExecute @ 0x13DE06B9 10. /-S/util/thread/factory.h:15: Execute @ 0x13DDF0A9 11. /-S/util/thread/factory.cpp:36: ThreadProc @ 0x13DDF0A9 12. /-S/util/system/thread.cpp:244: ThreadProxy @ 0x13DDA51C 13. ??:0: ?? @ 0x7F76FA67CAC2 14. ??:0: ?? @ 0x7F76FA70E84F Trying to start YDB, gRPC: 61228, MsgBus: 15857 2025-05-29T15:30:14.019813Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509890396827004108:2201];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:30:14.019862Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/0012e8/r3tmp/tmpAfh8Ks/pdisk_1.dat 2025-05-29T15:30:14.098623Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:30:14.099052Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [1:7509890396827003946:2079] 1748532614017884 != 1748532614017887 TServer::EnableGrpc on GrpcPort 61228, node 1 2025-05-29 ... @ 0x13DD8F65 1. /-S/util/system/yassert.cpp:55: Panic @ 0x13DCFF66 2. /tmp//-S/library/cpp/testing/unittest/registar.cpp:36: RaiseError @ 0x13F70BC6 3. /-S/ydb/core/kqp/ut/common/kqp_ut_common.h:375: AssertSuccessResult @ 0x13A3FAE2 4. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:365: CreateSampleTables @ 0x264F71F2 5. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:581: operator() @ 0x2651596C 6. /-S/library/cpp/threading/future/core/future-inl.h:493: SetValue @ 0x2651596C 7. /-S/library/cpp/threading/future/async.h:24: operator() @ 0x2651596C 8. /-S/util/thread/pool.h:71: Process @ 0x2651596C 9. /-S/util/thread/pool.cpp:418: DoExecute @ 0x13DE06B9 10. /-S/util/thread/factory.h:15: Execute @ 0x13DDF0A9 11. /-S/util/thread/factory.cpp:36: ThreadProc @ 0x13DDF0A9 12. /-S/util/system/thread.cpp:244: ThreadProxy @ 0x13DDA51C 13. ??:0: ?? @ 0x7F769A9C3AC2 14. ??:0: ?? @ 0x7F769AA5584F Trying to start YDB, gRPC: 7962, MsgBus: 8357 2025-05-29T15:30:22.138748Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509890429436594087:2061];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:30:22.138771Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/0012e8/r3tmp/tmpqWrQqI/pdisk_1.dat 2025-05-29T15:30:22.192843Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:30:22.192932Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [1:7509890429436594068:2079] 1748532622138639 != 1748532622138642 TServer::EnableGrpc on GrpcPort 7962, node 1 2025-05-29T15:30:22.204120Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:30:22.204138Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-05-29T15:30:22.204141Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-05-29T15:30:22.204195Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:8357 2025-05-29T15:30:22.241557Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:30:22.241580Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:30:22.242534Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:8357 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-29T15:30:22.268925Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:30:22.277851Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:30:22.294242Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:30:22.310901Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:30:22.322224Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:30:22.497264Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890429436595701:2403], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:30:22.497292Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:30:22.533726Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-05-29T15:30:22.541257Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-05-29T15:30:22.551524Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-05-29T15:30:22.565144Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-05-29T15:30:22.572170Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-05-29T15:30:22.588313Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-05-29T15:30:22.600612Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-05-29T15:30:22.617579Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890429436596353:2466], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:30:22.617611Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:30:22.617617Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890429436596358:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:30:22.618456Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715669:3, at schemeshard: 72057594046644480 2025-05-29T15:30:22.627757Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7509890429436596360:2470], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715669 completed, doublechecking } 2025-05-29T15:30:22.721406Z node 1 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [1:7509890429436596411:3396] txid# 281474976715670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-29T15:30:22.823977Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [1:7509890429436596427:2474], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:30:22.824091Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=1&id=MjdkNjEyZGItMThlYzYzNWQtMjNmMTcyMjMtOTA0ODE5NGY=, ActorId: [1:7509890429436595698:2401], ActorState: ExecuteState, TraceId: 01jweamx8sbtn8d24d0k359waj, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: VERIFY failed (2025-05-29T15:30:22.824801Z): assertion failed in non-unittest thread with message: assertion failed at ydb/core/kqp/ut/common/kqp_ut_common.h:375, void NKikimr::NKqp::AssertSuccessResult(const NYdb::TStatus &): (result.IsSuccess())
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 library/cpp/testing/unittest/registar.cpp:36 RaiseError(): requirement UnittestThread failed 0. /-S/util/system/yassert.cpp:83: InternalPanicImpl @ 0x13DD8F65 1. /-S/util/system/yassert.cpp:55: Panic @ 0x13DCFF66 2. /tmp//-S/library/cpp/testing/unittest/registar.cpp:36: RaiseError @ 0x13F70BC6 3. /-S/ydb/core/kqp/ut/common/kqp_ut_common.h:375: AssertSuccessResult @ 0x13A3FAE2 4. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:365: CreateSampleTables @ 0x264F71F2 5. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:581: operator() @ 0x2651596C 6. /-S/library/cpp/threading/future/core/future-inl.h:493: SetValue @ 0x2651596C 7. /-S/library/cpp/threading/future/async.h:24: operator() @ 0x2651596C 8. /-S/util/thread/pool.h:71: Process @ 0x2651596C 9. /-S/util/thread/pool.cpp:418: DoExecute @ 0x13DE06B9 10. /-S/util/thread/factory.h:15: Execute @ 0x13DDF0A9 11. /-S/util/thread/factory.cpp:36: ThreadProc @ 0x13DDF0A9 12. /-S/util/system/thread.cpp:244: ThreadProxy @ 0x13DDA51C 13. ??:0: ?? @ 0x7FB7D7375AC2 14. ??:0: ?? @ 0x7FB7D740784F ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/query/unittest >> KqpExplain::UpdateSecondaryConditionalSecondaryKey+UseSink Test command err: Trying to start YDB, gRPC: 10906, MsgBus: 30236 2025-05-29T15:30:09.316170Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509890374402199370:2059];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:30:09.316201Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/0012eb/r3tmp/tmpm1cdFO/pdisk_1.dat 2025-05-29T15:30:09.383576Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [1:7509890374402199352:2079] 1748532609316036 != 1748532609316039 2025-05-29T15:30:09.385439Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 10906, node 1 2025-05-29T15:30:09.397779Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:30:09.397792Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-05-29T15:30:09.397794Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-05-29T15:30:09.397841Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:30236 TClient is connected to server localhost:30236 WaitRootIsUp 'Root'... TClient::Ls request: Root 2025-05-29T15:30:09.459766Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:30:09.459801Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:30:09.461176Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-29T15:30:09.475587Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:30:09.480363Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:30:09.496769Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:30:09.524268Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:30:09.535025Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:30:09.639438Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890374402200986:2403], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:30:09.639462Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:30:09.673589Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-05-29T15:30:09.681123Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-05-29T15:30:09.693195Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-05-29T15:30:09.706357Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-05-29T15:30:09.720793Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-05-29T15:30:09.734901Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-05-29T15:30:09.748578Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-05-29T15:30:09.764112Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890374402201637:2466], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:30:09.764136Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:30:09.764154Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890374402201642:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:30:09.764823Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715669:3, at schemeshard: 72057594046644480 2025-05-29T15:30:09.768298Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7509890374402201644:2470], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715669 completed, doublechecking } 2025-05-29T15:30:09.830611Z node 1 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [1:7509890374402201696:3396] txid# 281474976715670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-29T15:30:09.940445Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [1:7509890374402201712:2474], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:30:09.940553Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=1&id=YTQ3YTU0MC00YjI2ZDUzNC03YTc4ODIxNi04YjcwNjIxNg==, ActorId: [1:7509890374402200968:2401], ActorState: ExecuteState, TraceId: 01jweamgq3ex3pftfvjeyjgmxp, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: VERIFY failed (2025-05-29T15:30:09.941205Z): assertion failed in non-unittest thread with message: assertion failed at ydb/core/kqp/ut/common/kqp_ut_common.h:375, void NKikimr::NKqp::AssertSuccessResult(const NYdb::TStatus &): (result.IsSuccess())
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 library/cpp/testing/unittest/registar.cpp:36 RaiseError(): requirement UnittestThread failed 0. /-S/util/system/yassert.cpp:83: InternalPanicImpl @ 0x13DD8F65 1. /-S/util/system/yassert.cpp:55: Panic @ 0x13DCFF66 2. /tmp//-S/library/cpp/testing/unittest/registar.cpp:36: RaiseError @ 0x13F70BC6 3. /-S/ydb/core/kqp/ut/common/kqp_ut_common.h:375: AssertSuccessResult @ 0x13A3FAE2 4. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:365: CreateSampleTables @ 0x264F71F2 5. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:581: operator() @ 0x2651596C 6. /-S/library/cpp/threading/future/core/future-inl.h:493: SetValue @ 0x2651596C 7. /-S/library/cpp/threading/future/async.h:24: operator() @ 0x2651596C 8. /-S/util/thread/pool.h:71: Process @ 0x2651596C 9. /-S/util/thread/pool.cpp:418: DoExecute @ 0x13DE06B9 10. /-S/util/thread/factory.h:15: Execute @ 0x13DDF0A9 11. /-S/util/thread/factory.cpp:36: ThreadProc @ 0x13DDF0A9 12. /-S/util/system/thread.cpp:244: ThreadProxy @ 0x13DDA51C 13. ??:0: ?? @ 0x7FC6C4571AC2 14. ??:0: ?? @ 0x7FC6C460384F Trying to start YDB, gRPC: 8709, MsgBus: 2549 2025-05-29T15:30:13.479489Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509890389828549868:2062];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:30:13.479756Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/0012eb/r3tmp/tmpRkq9H9/pdisk_1.dat 2025-05-29T15:30:13.550254Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:30:13.550527Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [1:7509890389828549848:2079] 1748532613479355 != 1748532613479358 TServer::EnableGrpc on GrpcPort 8709, node 1 2025-05-29T15 ... 3DD8F65 1. /-S/util/system/yassert.cpp:55: Panic @ 0x13DCFF66 2. /tmp//-S/library/cpp/testing/unittest/registar.cpp:36: RaiseError @ 0x13F70BC6 3. /-S/ydb/core/kqp/ut/common/kqp_ut_common.h:375: AssertSuccessResult @ 0x13A3FAE2 4. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:365: CreateSampleTables @ 0x264F71F2 5. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:581: operator() @ 0x2651596C 6. /-S/library/cpp/threading/future/core/future-inl.h:493: SetValue @ 0x2651596C 7. /-S/library/cpp/threading/future/async.h:24: operator() @ 0x2651596C 8. /-S/util/thread/pool.h:71: Process @ 0x2651596C 9. /-S/util/thread/pool.cpp:418: DoExecute @ 0x13DE06B9 10. /-S/util/thread/factory.h:15: Execute @ 0x13DDF0A9 11. /-S/util/thread/factory.cpp:36: ThreadProc @ 0x13DDF0A9 12. /-S/util/system/thread.cpp:244: ThreadProxy @ 0x13DDA51C 13. ??:0: ?? @ 0x7EFC5384AAC2 14. ??:0: ?? @ 0x7EFC538DC84F Trying to start YDB, gRPC: 21326, MsgBus: 13323 2025-05-29T15:30:22.330811Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509890430898099315:2063];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:30:22.330829Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/0012eb/r3tmp/tmp608mol/pdisk_1.dat 2025-05-29T15:30:22.397686Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [1:7509890430898099293:2079] 1748532622330636 != 1748532622330639 2025-05-29T15:30:22.399913Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 21326, node 1 2025-05-29T15:30:22.408533Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:30:22.408543Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-05-29T15:30:22.408544Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-05-29T15:30:22.408590Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:13323 2025-05-29T15:30:22.433636Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:30:22.433660Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:30:22.434734Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:13323 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-29T15:30:22.474874Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:30:22.478769Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:30:22.543606Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:30:22.564158Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:30:22.577051Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:30:22.731717Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890430898100938:2403], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:30:22.731750Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:30:22.780116Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-05-29T15:30:22.788658Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-05-29T15:30:22.796862Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-05-29T15:30:22.810554Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-05-29T15:30:22.866108Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-05-29T15:30:22.875694Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-05-29T15:30:22.888354Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-05-29T15:30:22.904428Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890430898101594:2466], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:30:22.904459Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890430898101599:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:30:22.904461Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:30:22.905300Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715669:3, at schemeshard: 72057594046644480 2025-05-29T15:30:22.907636Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7509890430898101601:2470], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715669 completed, doublechecking } 2025-05-29T15:30:22.989323Z node 1 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [1:7509890430898101652:3399] txid# 281474976715670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-29T15:30:23.106414Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [1:7509890430898101668:2474], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:30:23.106551Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=1&id=OWVmYzU5NTctZmYyNjQ1NDMtZGE4ODQ2OWMtZWU5NDU3NmE=, ActorId: [1:7509890430898100920:2401], ActorState: ExecuteState, TraceId: 01jweamxhr5kwsz4e7pq8dyq87, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: VERIFY failed (2025-05-29T15:30:23.107380Z): assertion failed in non-unittest thread with message: assertion failed at ydb/core/kqp/ut/common/kqp_ut_common.h:375, void NKikimr::NKqp::AssertSuccessResult(const NYdb::TStatus &): (result.IsSuccess())
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 library/cpp/testing/unittest/registar.cpp:36 RaiseError(): requirement UnittestThread failed 0. /-S/util/system/yassert.cpp:83: InternalPanicImpl @ 0x13DD8F65 1. /-S/util/system/yassert.cpp:55: Panic @ 0x13DCFF66 2. /tmp//-S/library/cpp/testing/unittest/registar.cpp:36: RaiseError @ 0x13F70BC6 3. /-S/ydb/core/kqp/ut/common/kqp_ut_common.h:375: AssertSuccessResult @ 0x13A3FAE2 4. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:365: CreateSampleTables @ 0x264F71F2 5. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:581: operator() @ 0x2651596C 6. /-S/library/cpp/threading/future/core/future-inl.h:493: SetValue @ 0x2651596C 7. /-S/library/cpp/threading/future/async.h:24: operator() @ 0x2651596C 8. /-S/util/thread/pool.h:71: Process @ 0x2651596C 9. /-S/util/thread/pool.cpp:418: DoExecute @ 0x13DE06B9 10. /-S/util/thread/factory.h:15: Execute @ 0x13DDF0A9 11. /-S/util/thread/factory.cpp:36: ThreadProc @ 0x13DDF0A9 12. /-S/util/system/thread.cpp:244: ThreadProxy @ 0x13DDA51C 13. ??:0: ?? @ 0x7FC8E5739AC2 14. ??:0: ?? @ 0x7FC8E57CB84F ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/query/unittest >> KqpParams::ParameterTypes Test command err: Trying to start YDB, gRPC: 12796, MsgBus: 16966 2025-05-29T15:30:09.341073Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509890371942251185:2062];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:30:09.341123Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/0012f9/r3tmp/tmp2xE0La/pdisk_1.dat 2025-05-29T15:30:09.398976Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [1:7509890371942251164:2079] 1748532609340922 != 1748532609340925 2025-05-29T15:30:09.400814Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 12796, node 1 2025-05-29T15:30:09.412155Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:30:09.412174Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-05-29T15:30:09.412176Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-05-29T15:30:09.412224Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:16966 2025-05-29T15:30:09.444123Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:30:09.444149Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:30:09.445157Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:16966 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-29T15:30:09.478005Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:30:09.479936Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-05-29T15:30:09.485651Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:30:09.506409Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:30:09.564799Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:30:09.574479Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:30:09.699152Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890371942252802:2403], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:30:09.699189Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:30:09.748040Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-05-29T15:30:09.755363Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-05-29T15:30:09.769563Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-05-29T15:30:09.783407Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-05-29T15:30:09.838340Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-05-29T15:30:09.846022Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-05-29T15:30:09.853417Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-05-29T15:30:09.868405Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890371942253454:2466], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:30:09.868426Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890371942253459:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:30:09.868429Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:30:09.869086Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715669:3, at schemeshard: 72057594046644480 2025-05-29T15:30:09.873259Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7509890371942253461:2470], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715669 completed, doublechecking } 2025-05-29T15:30:09.959029Z node 1 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [1:7509890371942253512:3398] txid# 281474976715670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-29T15:30:10.033404Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [1:7509890371942253528:2474], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:30:10.033499Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=1&id=OGVhZmJjNjctNTE5ZjA2YmItZmRlODE4ZTYtNjQwNTNjZGI=, ActorId: [1:7509890371942252784:2401], ActorState: ExecuteState, TraceId: 01jweamgtceg76txwezy63yrpg, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: VERIFY failed (2025-05-29T15:30:10.034035Z): assertion failed in non-unittest thread with message: assertion failed at ydb/core/kqp/ut/common/kqp_ut_common.h:375, void NKikimr::NKqp::AssertSuccessResult(const NYdb::TStatus &): (result.IsSuccess())
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 library/cpp/testing/unittest/registar.cpp:36 RaiseError(): requirement UnittestThread failed 0. /-S/util/system/yassert.cpp:83: InternalPanicImpl @ 0x13DD8F65 1. /-S/util/system/yassert.cpp:55: Panic @ 0x13DCFF66 2. /tmp//-S/library/cpp/testing/unittest/registar.cpp:36: RaiseError @ 0x13F70BC6 3. /-S/ydb/core/kqp/ut/common/kqp_ut_common.h:375: AssertSuccessResult @ 0x13A3FAE2 4. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:365: CreateSampleTables @ 0x264F71F2 5. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:581: operator() @ 0x2651596C 6. /-S/library/cpp/threading/future/core/future-inl.h:493: SetValue @ 0x2651596C 7. /-S/library/cpp/threading/future/async.h:24: operator() @ 0x2651596C 8. /-S/util/thread/pool.h:71: Process @ 0x2651596C 9. /-S/util/thread/pool.cpp:418: DoExecute @ 0x13DE06B9 10. /-S/util/thread/factory.h:15: Execute @ 0x13DDF0A9 11. /-S/util/thread/factory.cpp:36: ThreadProc @ 0x13DDF0A9 12. /-S/util/system/thread.cpp:244: ThreadProxy @ 0x13DDA51C 13. ??:0: ?? @ 0x7F1599B24AC2 14. ??:0: ?? @ 0x7F1599BB684F Trying to start YDB, gRPC: 61762, MsgBus: 20014 2025-05-29T15:30:13.734946Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509890390708013204:2063];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:30:13.735150Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/0012f9/r3tmp/tmpv5sSUF/pdisk_1.dat 2025-05-29T15:30:13.788501Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [1:7509890390708013182:2079] 174853261373 ... x13DD8F65 1. /-S/util/system/yassert.cpp:55: Panic @ 0x13DCFF66 2. /tmp//-S/library/cpp/testing/unittest/registar.cpp:36: RaiseError @ 0x13F70BC6 3. /-S/ydb/core/kqp/ut/common/kqp_ut_common.h:375: AssertSuccessResult @ 0x13A3FAE2 4. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:365: CreateSampleTables @ 0x264F71F2 5. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:581: operator() @ 0x2651596C 6. /-S/library/cpp/threading/future/core/future-inl.h:493: SetValue @ 0x2651596C 7. /-S/library/cpp/threading/future/async.h:24: operator() @ 0x2651596C 8. /-S/util/thread/pool.h:71: Process @ 0x2651596C 9. /-S/util/thread/pool.cpp:418: DoExecute @ 0x13DE06B9 10. /-S/util/thread/factory.h:15: Execute @ 0x13DDF0A9 11. /-S/util/thread/factory.cpp:36: ThreadProc @ 0x13DDF0A9 12. /-S/util/system/thread.cpp:244: ThreadProxy @ 0x13DDA51C 13. ??:0: ?? @ 0x7F6E2FA7FAC2 14. ??:0: ?? @ 0x7F6E2FB1184F Trying to start YDB, gRPC: 1746, MsgBus: 10441 2025-05-29T15:30:22.144805Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509890428350662712:2061];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:30:22.144826Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/0012f9/r3tmp/tmp75eSXP/pdisk_1.dat 2025-05-29T15:30:22.200440Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:30:22.200521Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [1:7509890428350662693:2079] 1748532622144704 != 1748532622144707 TServer::EnableGrpc on GrpcPort 1746, node 1 2025-05-29T15:30:22.211828Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:30:22.211839Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-05-29T15:30:22.211842Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-05-29T15:30:22.211881Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:10441 2025-05-29T15:30:22.248156Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:30:22.248200Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting TClient is connected to server localhost:10441 2025-05-29T15:30:22.249188Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-29T15:30:22.276972Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:30:22.281372Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:30:22.342878Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:30:22.360538Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:30:22.370690Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:30:22.535884Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890428350664327:2403], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:30:22.535937Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:30:22.587555Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-05-29T15:30:22.594834Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-05-29T15:30:22.607567Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-05-29T15:30:22.621239Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-05-29T15:30:22.676695Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-05-29T15:30:22.686518Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-05-29T15:30:22.700117Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-05-29T15:30:22.720957Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890428350664982:2466], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:30:22.720994Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890428350664987:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:30:22.721009Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:30:22.721861Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715669:3, at schemeshard: 72057594046644480 2025-05-29T15:30:22.725979Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7509890428350664989:2470], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715669 completed, doublechecking } 2025-05-29T15:30:22.816918Z node 1 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [1:7509890428350665040:3395] txid# 281474976715670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-29T15:30:22.939234Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [1:7509890428350665056:2474], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:30:22.939366Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=1&id=ODhjODhmZWItNzYwNjY2MDgtN2JhMmM1M2EtZmYyYjFjMDY=, ActorId: [1:7509890428350664309:2401], ActorState: ExecuteState, TraceId: 01jweamxc03gaw74py3zvhwmjz, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: VERIFY failed (2025-05-29T15:30:22.941118Z): assertion failed in non-unittest thread with message: assertion failed at ydb/core/kqp/ut/common/kqp_ut_common.h:375, void NKikimr::NKqp::AssertSuccessResult(const NYdb::TStatus &): (result.IsSuccess())
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 library/cpp/testing/unittest/registar.cpp:36 RaiseError(): requirement UnittestThread failed 0. /-S/util/system/yassert.cpp:83: InternalPanicImpl @ 0x13DD8F65 1. /-S/util/system/yassert.cpp:55: Panic @ 0x13DCFF66 2. /tmp//-S/library/cpp/testing/unittest/registar.cpp:36: RaiseError @ 0x13F70BC6 3. /-S/ydb/core/kqp/ut/common/kqp_ut_common.h:375: AssertSuccessResult @ 0x13A3FAE2 4. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:365: CreateSampleTables @ 0x264F71F2 5. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:581: operator() @ 0x2651596C 6. /-S/library/cpp/threading/future/core/future-inl.h:493: SetValue @ 0x2651596C 7. /-S/library/cpp/threading/future/async.h:24: operator() @ 0x2651596C 8. /-S/util/thread/pool.h:71: Process @ 0x2651596C 9. /-S/util/thread/pool.cpp:418: DoExecute @ 0x13DE06B9 10. /-S/util/thread/factory.h:15: Execute @ 0x13DDF0A9 11. /-S/util/thread/factory.cpp:36: ThreadProc @ 0x13DDF0A9 12. /-S/util/system/thread.cpp:244: ThreadProxy @ 0x13DDA51C 13. ??:0: ?? @ 0x7F5967E70AC2 14. ??:0: ?? @ 0x7F5967F0284F ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/query/unittest >> KqpStats::JoinStatsBasicYql-StreamLookupJoin Test command err: Trying to start YDB, gRPC: 12096, MsgBus: 12769 2025-05-29T15:30:09.237446Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509890375168717541:2088];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:30:09.237600Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/0012f4/r3tmp/tmprGM8lw/pdisk_1.dat 2025-05-29T15:30:09.285821Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:30:09.286167Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [1:7509890375168717492:2079] 1748532609236678 != 1748532609236681 TServer::EnableGrpc on GrpcPort 12096, node 1 2025-05-29T15:30:09.300455Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:30:09.300486Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-05-29T15:30:09.300489Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-05-29T15:30:09.300526Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:12769 TClient is connected to server localhost:12769 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-29T15:30:09.365060Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:30:09.365086Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:30:09.366089Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-29T15:30:09.366261Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:30:09.371160Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:30:09.435657Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:30:09.455419Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:30:09.466915Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:30:09.589919Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890375168719126:2403], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:30:09.589966Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:30:09.636689Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-05-29T15:30:09.643788Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-05-29T15:30:09.698551Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-05-29T15:30:09.753112Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-05-29T15:30:09.761825Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-05-29T15:30:09.776423Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-05-29T15:30:09.790337Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-05-29T15:30:09.806088Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890375168719782:2466], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:30:09.806108Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890375168719787:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:30:09.806109Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:30:09.806722Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715669:3, at schemeshard: 72057594046644480 2025-05-29T15:30:09.810238Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7509890375168719789:2470], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715669 completed, doublechecking } 2025-05-29T15:30:09.888045Z node 1 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [1:7509890375168719840:3394] txid# 281474976715670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-29T15:30:09.966608Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [1:7509890375168719856:2474], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:30:09.966704Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=1&id=YmEyNzhkYTctMzRkMDNhOS0yNWYzNjg1My01NDJlNzlk, ActorId: [1:7509890375168719108:2401], ActorState: ExecuteState, TraceId: 01jweamgrd8cva25v94d1dz85h, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: VERIFY failed (2025-05-29T15:30:09.967406Z): assertion failed in non-unittest thread with message: assertion failed at ydb/core/kqp/ut/common/kqp_ut_common.h:375, void NKikimr::NKqp::AssertSuccessResult(const NYdb::TStatus &): (result.IsSuccess())
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 library/cpp/testing/unittest/registar.cpp:36 RaiseError(): requirement UnittestThread failed 0. /-S/util/system/yassert.cpp:83: InternalPanicImpl @ 0x13DD8F65 1. /-S/util/system/yassert.cpp:55: Panic @ 0x13DCFF66 2. /tmp//-S/library/cpp/testing/unittest/registar.cpp:36: RaiseError @ 0x13F70BC6 3. /-S/ydb/core/kqp/ut/common/kqp_ut_common.h:375: AssertSuccessResult @ 0x13A3FAE2 4. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:365: CreateSampleTables @ 0x264F71F2 5. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:581: operator() @ 0x2651596C 6. /-S/library/cpp/threading/future/core/future-inl.h:493: SetValue @ 0x2651596C 7. /-S/library/cpp/threading/future/async.h:24: operator() @ 0x2651596C 8. /-S/util/thread/pool.h:71: Process @ 0x2651596C 9. /-S/util/thread/pool.cpp:418: DoExecute @ 0x13DE06B9 10. /-S/util/thread/factory.h:15: Execute @ 0x13DDF0A9 11. /-S/util/thread/factory.cpp:36: ThreadProc @ 0x13DDF0A9 12. /-S/util/system/thread.cpp:244: ThreadProxy @ 0x13DDA51C 13. ??:0: ?? @ 0x7FEE47953AC2 14. ??:0: ?? @ 0x7FEE479E584F Trying to start YDB, gRPC: 3164, MsgBus: 64722 2025-05-29T15:30:13.899563Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509890390303296243:2066];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:30:13.899592Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/0012f4/r3tmp/tmpmt8jPm/pdisk_1.dat 2025-05-29T15:30:13.954586Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 3164, node 1 2025-05-29T15:30:13.964901Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:30:13.964917Z node 1 :NET_CLASSIFIER WARN: n ... x13DD8F65 1. /-S/util/system/yassert.cpp:55: Panic @ 0x13DCFF66 2. /tmp//-S/library/cpp/testing/unittest/registar.cpp:36: RaiseError @ 0x13F70BC6 3. /-S/ydb/core/kqp/ut/common/kqp_ut_common.h:375: AssertSuccessResult @ 0x13A3FAE2 4. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:365: CreateSampleTables @ 0x264F71F2 5. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:581: operator() @ 0x2651596C 6. /-S/library/cpp/threading/future/core/future-inl.h:493: SetValue @ 0x2651596C 7. /-S/library/cpp/threading/future/async.h:24: operator() @ 0x2651596C 8. /-S/util/thread/pool.h:71: Process @ 0x2651596C 9. /-S/util/thread/pool.cpp:418: DoExecute @ 0x13DE06B9 10. /-S/util/thread/factory.h:15: Execute @ 0x13DDF0A9 11. /-S/util/thread/factory.cpp:36: ThreadProc @ 0x13DDF0A9 12. /-S/util/system/thread.cpp:244: ThreadProxy @ 0x13DDA51C 13. ??:0: ?? @ 0x7F4A1104FAC2 14. ??:0: ?? @ 0x7F4A110E184F Trying to start YDB, gRPC: 6996, MsgBus: 20581 2025-05-29T15:30:22.227942Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509890430930080709:2062];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:30:22.227980Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/0012f4/r3tmp/tmpgKqJLM/pdisk_1.dat 2025-05-29T15:30:22.278636Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [1:7509890430930080688:2079] 1748532622227814 != 1748532622227817 2025-05-29T15:30:22.281313Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 6996, node 1 2025-05-29T15:30:22.292400Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:30:22.292418Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-05-29T15:30:22.292420Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-05-29T15:30:22.292467Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:20581 TClient is connected to server localhost:20581 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-29T15:30:22.356078Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:30:22.356112Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:30:22.357165Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-29T15:30:22.357699Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:30:22.364262Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:30:22.380832Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:30:22.402405Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:30:22.413926Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:30:22.554962Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890430930082319:2403], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:30:22.554991Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:30:22.597075Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-05-29T15:30:22.604814Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-05-29T15:30:22.614491Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-05-29T15:30:22.628344Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-05-29T15:30:22.683919Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-05-29T15:30:22.693797Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-05-29T15:30:22.749641Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-05-29T15:30:22.767707Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890430930082977:2466], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:30:22.767734Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:30:22.767833Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890430930082982:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:30:22.768841Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715669:3, at schemeshard: 72057594046644480 2025-05-29T15:30:22.774938Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7509890430930082984:2470], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715669 completed, doublechecking } 2025-05-29T15:30:22.856367Z node 1 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [1:7509890430930083035:3396] txid# 281474976715670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-29T15:30:22.961428Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [1:7509890430930083051:2474], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:30:22.961536Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=1&id=ZGY5MGI2ZjktYTUwZGNhZjItYjVkYWIyYjMtNWM1YmZlM2M=, ActorId: [1:7509890430930082301:2401], ActorState: ExecuteState, TraceId: 01jweamxdfejp499tm3qtkcrez, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: VERIFY failed (2025-05-29T15:30:22.962806Z): assertion failed in non-unittest thread with message: assertion failed at ydb/core/kqp/ut/common/kqp_ut_common.h:375, void NKikimr::NKqp::AssertSuccessResult(const NYdb::TStatus &): (result.IsSuccess())
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 library/cpp/testing/unittest/registar.cpp:36 RaiseError(): requirement UnittestThread failed 0. /-S/util/system/yassert.cpp:83: InternalPanicImpl @ 0x13DD8F65 1. /-S/util/system/yassert.cpp:55: Panic @ 0x13DCFF66 2. /tmp//-S/library/cpp/testing/unittest/registar.cpp:36: RaiseError @ 0x13F70BC6 3. /-S/ydb/core/kqp/ut/common/kqp_ut_common.h:375: AssertSuccessResult @ 0x13A3FAE2 4. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:365: CreateSampleTables @ 0x264F71F2 5. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:581: operator() @ 0x2651596C 6. /-S/library/cpp/threading/future/core/future-inl.h:493: SetValue @ 0x2651596C 7. /-S/library/cpp/threading/future/async.h:24: operator() @ 0x2651596C 8. /-S/util/thread/pool.h:71: Process @ 0x2651596C 9. /-S/util/thread/pool.cpp:418: DoExecute @ 0x13DE06B9 10. /-S/util/thread/factory.h:15: Execute @ 0x13DDF0A9 11. /-S/util/thread/factory.cpp:36: ThreadProc @ 0x13DDF0A9 12. /-S/util/system/thread.cpp:244: ThreadProxy @ 0x13DDA51C 13. ??:0: ?? @ 0x7FF15AC17AC2 14. ??:0: ?? @ 0x7FF15ACA984F |73.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/control/ut/unittest >> IcbAsActorTests::TestHttpPostReaction [GOOD] |73.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_index/unittest >> TSchemeShardSplitBySample::FlatList [GOOD] >> TSchemeShardSplitBySample::EntryOrderDoesNotCount [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/query/unittest >> KqpQuery::ExecuteWriteQuery Test command err: Trying to start YDB, gRPC: 23954, MsgBus: 18727 2025-05-29T15:30:10.103942Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509890376079113957:2062];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:30:10.103962Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/0012dc/r3tmp/tmpBgoy0s/pdisk_1.dat 2025-05-29T15:30:10.150075Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:30:10.151999Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [1:7509890376079113936:2079] 1748532610103822 != 1748532610103825 TServer::EnableGrpc on GrpcPort 23954, node 1 2025-05-29T15:30:10.163387Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:30:10.163401Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-05-29T15:30:10.163403Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-05-29T15:30:10.163449Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:18727 TClient is connected to server localhost:18727 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-29T15:30:10.229747Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:30:10.229779Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:30:10.230388Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-05-29T15:30:10.230894Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected waiting... 2025-05-29T15:30:10.249070Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:30:10.263480Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:30:10.279277Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:30:10.288366Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:30:10.405602Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890376079115569:2403], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:30:10.405625Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:30:10.442547Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-05-29T15:30:10.449819Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-05-29T15:30:10.462700Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-05-29T15:30:10.517504Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-05-29T15:30:10.525795Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-05-29T15:30:10.539856Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-05-29T15:30:10.553872Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-05-29T15:30:10.569618Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890376079116223:2466], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:30:10.569646Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890376079116228:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:30:10.569645Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:30:10.570342Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715669:3, at schemeshard: 72057594046644480 2025-05-29T15:30:10.573093Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7509890376079116230:2470], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715669 completed, doublechecking } 2025-05-29T15:30:10.661982Z node 1 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [1:7509890376079116281:3396] txid# 281474976715670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-29T15:30:10.740206Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [1:7509890376079116297:2474], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:30:10.740311Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=1&id=YmQ0YjRlMWItNWRmYmQxY2MtNjIyYjFhNjQtMjdjMWRmNTc=, ActorId: [1:7509890376079115551:2401], ActorState: ExecuteState, TraceId: 01jweamhg93rt4panmtqwdarr2, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: VERIFY failed (2025-05-29T15:30:10.740949Z): assertion failed in non-unittest thread with message: assertion failed at ydb/core/kqp/ut/common/kqp_ut_common.h:375, void NKikimr::NKqp::AssertSuccessResult(const NYdb::TStatus &): (result.IsSuccess())
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 library/cpp/testing/unittest/registar.cpp:36 RaiseError(): requirement UnittestThread failed 0. /-S/util/system/yassert.cpp:83: InternalPanicImpl @ 0x13DD8F65 1. /-S/util/system/yassert.cpp:55: Panic @ 0x13DCFF66 2. /tmp//-S/library/cpp/testing/unittest/registar.cpp:36: RaiseError @ 0x13F70BC6 3. /-S/ydb/core/kqp/ut/common/kqp_ut_common.h:375: AssertSuccessResult @ 0x13A3FAE2 4. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:365: CreateSampleTables @ 0x264F71F2 5. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:581: operator() @ 0x2651596C 6. /-S/library/cpp/threading/future/core/future-inl.h:493: SetValue @ 0x2651596C 7. /-S/library/cpp/threading/future/async.h:24: operator() @ 0x2651596C 8. /-S/util/thread/pool.h:71: Process @ 0x2651596C 9. /-S/util/thread/pool.cpp:418: DoExecute @ 0x13DE06B9 10. /-S/util/thread/factory.h:15: Execute @ 0x13DDF0A9 11. /-S/util/thread/factory.cpp:36: ThreadProc @ 0x13DDF0A9 12. /-S/util/system/thread.cpp:244: ThreadProxy @ 0x13DDA51C 13. ??:0: ?? @ 0x7F5E164BEAC2 14. ??:0: ?? @ 0x7F5E1655084F Trying to start YDB, gRPC: 13319, MsgBus: 11311 2025-05-29T15:30:14.159365Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509890393703681256:2233];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:30:14.159428Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/0012dc/r3tmp/tmpayGGnG/pdisk_1.dat 2025-05-29T15:30:14.226489Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [1:7509890393703681062:2079] 1748532614158547 != 1748532614158550 2025-05-29T15:30:14.226946Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 13319, node 1 2025-05-29 ... 3DD8F65 1. /-S/util/system/yassert.cpp:55: Panic @ 0x13DCFF66 2. /tmp//-S/library/cpp/testing/unittest/registar.cpp:36: RaiseError @ 0x13F70BC6 3. /-S/ydb/core/kqp/ut/common/kqp_ut_common.h:375: AssertSuccessResult @ 0x13A3FAE2 4. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:365: CreateSampleTables @ 0x264F71F2 5. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:581: operator() @ 0x2651596C 6. /-S/library/cpp/threading/future/core/future-inl.h:493: SetValue @ 0x2651596C 7. /-S/library/cpp/threading/future/async.h:24: operator() @ 0x2651596C 8. /-S/util/thread/pool.h:71: Process @ 0x2651596C 9. /-S/util/thread/pool.cpp:418: DoExecute @ 0x13DE06B9 10. /-S/util/thread/factory.h:15: Execute @ 0x13DDF0A9 11. /-S/util/thread/factory.cpp:36: ThreadProc @ 0x13DDF0A9 12. /-S/util/system/thread.cpp:244: ThreadProxy @ 0x13DDA51C 13. ??:0: ?? @ 0x7FD3FD544AC2 14. ??:0: ?? @ 0x7FD3FD5D684F Trying to start YDB, gRPC: 29250, MsgBus: 15913 2025-05-29T15:30:22.445954Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509890427987172296:2062];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:30:22.445986Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/0012dc/r3tmp/tmp0guKNP/pdisk_1.dat 2025-05-29T15:30:22.504183Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:30:22.505009Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [1:7509890427987172275:2079] 1748532622445818 != 1748532622445821 TServer::EnableGrpc on GrpcPort 29250, node 1 2025-05-29T15:30:22.519173Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:30:22.519187Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-05-29T15:30:22.519189Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-05-29T15:30:22.519237Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:15913 TClient is connected to server localhost:15913 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: 2025-05-29T15:30:22.579899Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:30:22.579942Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:30:22.581017Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-29T15:30:22.585364Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:30:22.592817Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:30:22.612778Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:30:22.635259Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:30:22.649367Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:30:22.870482Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890427987173903:2403], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:30:22.870553Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:30:22.879172Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-05-29T15:30:22.890184Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-05-29T15:30:22.907237Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-05-29T15:30:22.916942Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-05-29T15:30:22.930780Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-05-29T15:30:22.987364Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-05-29T15:30:23.000814Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-05-29T15:30:23.016883Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890432282141854:2466], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:30:23.016933Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:30:23.016959Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890432282141859:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:30:23.017638Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710669:3, at schemeshard: 72057594046644480 2025-05-29T15:30:23.019623Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7509890432282141861:2470], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710669 completed, doublechecking } 2025-05-29T15:30:23.109581Z node 1 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [1:7509890432282141912:3394] txid# 281474976710670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-29T15:30:23.219498Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [1:7509890432282141928:2474], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:30:23.219639Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=1&id=YThiM2YxMWItMWI0MDg2ZGItNjcwYWM1MGItYWQ5NTg2ZDY=, ActorId: [1:7509890427987173885:2401], ActorState: ExecuteState, TraceId: 01jweamxn893pbktn3z6f78z87, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: VERIFY failed (2025-05-29T15:30:23.220445Z): assertion failed in non-unittest thread with message: assertion failed at ydb/core/kqp/ut/common/kqp_ut_common.h:375, void NKikimr::NKqp::AssertSuccessResult(const NYdb::TStatus &): (result.IsSuccess())
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 library/cpp/testing/unittest/registar.cpp:36 RaiseError(): requirement UnittestThread failed 0. /-S/util/system/yassert.cpp:83: InternalPanicImpl @ 0x13DD8F65 1. /-S/util/system/yassert.cpp:55: Panic @ 0x13DCFF66 2. /tmp//-S/library/cpp/testing/unittest/registar.cpp:36: RaiseError @ 0x13F70BC6 3. /-S/ydb/core/kqp/ut/common/kqp_ut_common.h:375: AssertSuccessResult @ 0x13A3FAE2 4. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:365: CreateSampleTables @ 0x264F71F2 5. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:581: operator() @ 0x2651596C 6. /-S/library/cpp/threading/future/core/future-inl.h:493: SetValue @ 0x2651596C 7. /-S/library/cpp/threading/future/async.h:24: operator() @ 0x2651596C 8. /-S/util/thread/pool.h:71: Process @ 0x2651596C 9. /-S/util/thread/pool.cpp:418: DoExecute @ 0x13DE06B9 10. /-S/util/thread/factory.h:15: Execute @ 0x13DDF0A9 11. /-S/util/thread/factory.cpp:36: ThreadProc @ 0x13DDF0A9 12. /-S/util/system/thread.cpp:244: ThreadProxy @ 0x13DDA51C 13. ??:0: ?? @ 0x7F00A54F7AC2 14. ??:0: ?? @ 0x7F00A558984F |73.4%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/graph/shard/ut/ydb-core-graph-shard-ut |73.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/graph/shard/ut/ydb-core-graph-shard-ut |73.4%| [LD] {RESULT} $(B)/ydb/core/graph/shard/ut/ydb-core-graph-shard-ut |73.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_index/unittest >> HttpRequest::ProbeServerless >> KqpQuery::UpdateThenDelete-UseSink |73.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_split_merge/unittest >> TSchemeShardSplitBySample::EntryOrderDoesNotCount [GOOD] |73.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_index/unittest ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/scheme/unittest >> KqpScheme::CreateResourcePoolClassifierOnServerless [FAIL] Test command err: Trying to start YDB, gRPC: 12420, MsgBus: 3056 2025-05-29T15:29:02.603381Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509890087390815654:2209];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:29:02.603442Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/001184/r3tmp/tmpbsrxOx/pdisk_1.dat 2025-05-29T15:29:02.680570Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [1:7509890087390815466:2079] 1748532542602143 != 1748532542602146 2025-05-29T15:29:02.682644Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 12420, node 1 2025-05-29T15:29:02.703445Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:29:02.703478Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:29:02.704159Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:29:02.704165Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-05-29T15:29:02.704167Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-05-29T15:29:02.704214Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-05-29T15:29:02.704381Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:3056 TClient is connected to server localhost:3056 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-29T15:29:02.773995Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:29:02.787602Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:29:02.805500Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:29:02.831598Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:29:02.848010Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:29:02.981958Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890087390817098:2403], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:29:02.981992Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:29:03.037275Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-05-29T15:29:03.048400Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-05-29T15:29:03.066275Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-05-29T15:29:03.076822Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-05-29T15:29:03.091619Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-05-29T15:29:03.102136Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-05-29T15:29:03.117202Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-05-29T15:29:03.133342Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890091685785045:2466], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:29:03.133393Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:29:03.133504Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890091685785050:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:29:03.134413Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715669:3, at schemeshard: 72057594046644480 2025-05-29T15:29:03.142992Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7509890091685785052:2470], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715669 completed, doublechecking } 2025-05-29T15:29:03.200565Z node 1 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [1:7509890091685785103:3396] txid# 281474976715670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-29T15:29:03.308906Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [1:7509890091685785112:2474], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:29:03.309058Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=1&id=MjNlYTkyMWEtNGRjOGEyYTAtOWRmY2YyZWYtZjUxOTAxMDA=, ActorId: [1:7509890087390817071:2400], ActorState: ExecuteState, TraceId: 01jweajfmw4j65k5y9b26a9xdw, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: VERIFY failed (2025-05-29T15:29:03.309797Z): assertion failed in non-unittest thread with message: assertion failed at ydb/core/kqp/ut/common/kqp_ut_common.h:375, void NKikimr::NKqp::AssertSuccessResult(const NYdb::TStatus &): (result.IsSuccess())
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 library/cpp/testing/unittest/registar.cpp:36 RaiseError(): requirement UnittestThread failed 0. /-S/util/system/yassert.cpp:83: InternalPanicImpl @ 0x1633E055 1. /-S/util/system/yassert.cpp:55: Panic @ 0x16335056 2. /tmp//-S/library/cpp/testing/unittest/registar.cpp:36: RaiseError @ 0x164D75B6 3. /-S/ydb/core/kqp/ut/common/kqp_ut_common.h:375: AssertSuccessResult @ 0x15DE0A22 4. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:365: CreateSampleTables @ 0x289A0492 5. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:581: operator() @ 0x289C1A1C 6. /-S/library/cpp/threading/future/core/future-inl.h:493: SetValue @ 0x289C1A1C 7. /-S/library/cpp/threading/future/async.h:24: operator() @ 0x289C1A1C 8. /-S/util/thread/pool.h:71: Process @ 0x289C1A1C 9. /-S/util/thread/pool.cpp:418: DoExecute @ 0x163457A9 10. /-S/util/thread/factory.h:15: Execute @ 0x16344199 11. /-S/util/thread/factory.cpp:36: ThreadProc @ 0x16344199 12. /-S/util/system/thread.cpp:244: ThreadProxy @ 0x1633F60C 13. ??:0: ?? @ 0x7FB089395AC2 14. ??:0: ?? @ 0x7FB08942784F Trying to start YDB, gRPC: 16060, MsgBus: 15725 2025-05-29T15:29:07.268712Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509890108151277967:2145];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:29:07.269453Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/001184/r3tmp/tmpgVjw99/pdisk_1.dat 2025-05-29T15:29:07.334261Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 16060, node 1 2025-05-29T15:29:07.353285Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:29:07.353302Z node 1 :NET_CLASSIFIER WARN ... :METADATA_PROVIDER ERROR: log.cpp:784: fline=request_actor.h:64;event=unexpected reply;error_message=operation { ready: true status: INTERNAL_ERROR issues { message: "Execution" issue_code: 1060 issues { message: "yql/essentials/ast/yql_expr.h:1874: index out of range" issue_code: 1 } } result { [type.googleapis.com/Ydb.Table.ExecuteQueryResult] { tx_meta { } } } } ;request=session_id: "ydb://session/3?node_id=2&id=NzkyMWNjNGItODc4NjRhNTItZDI1ZmZmYzgtMTBhMGYxMGQ=" tx_control { tx_id: "01jweamzb5b80j9v9dbzmarb7c" commit_tx: true } query { yql_text: "DECLARE $objects AS List>;\nUPSERT INTO `//Root/test-shared/.metadata/initialization/migrations`\nSELECT componentId,modificationId,instant FROM AS_TABLE($objects)\n" } parameters { key: "$objects" value { type { list_type { item { struct_type { members { name: "componentId" type { type_id: UTF8 } } members { name: "modificationId" type { type_id: UTF8 } } members { name: "instant" type { type_id: UINT32 } } } } } } value { items { items { text_value: "INITIALIZATION" } items { text_value: "create" } items { uint32_value: 1748532624 } } } } } ; 2025-05-29T15:30:24.756522Z node 2 :METADATA_INITIALIZER ERROR: log.cpp:784: fline=accessor_init.cpp:81;event=OnAlteringProblem;error=cannot UPSERT objects:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 ; 2025-05-29T15:30:25.007144Z node 2 :KQP_SESSION DEBUG: kqp_session_actor.cpp:223: SessionId: ydb://session/3?node_id=2&id=MjMzOTJhNy1iYjM5NTA2Ni0yNzE4NzJhMC1mY2JiM2E3Mw==, ActorId: [0:0:0], ActorState: unknown state, Create session actor with id MjMzOTJhNy1iYjM5NTA2Ni0yNzE4NzJhMC1mY2JiM2E3Mw== 2025-05-29T15:30:25.007209Z node 2 :KQP_SESSION DEBUG: kqp_session_actor.cpp:227: SessionId: ydb://session/3?node_id=2&id=MjMzOTJhNy1iYjM5NTA2Ni0yNzE4NzJhMC1mY2JiM2E3Mw==, ActorId: [2:7509890441009084873:4393], ActorState: unknown state, session actor bootstrapped 2025-05-29T15:30:25.007396Z node 2 :KQP_SESSION DEBUG: kqp_session_actor.cpp:443: SessionId: ydb://session/3?node_id=2&id=MjMzOTJhNy1iYjM5NTA2Ni0yNzE4NzJhMC1mY2JiM2E3Mw==, ActorId: [2:7509890441009084873:4393], ActorState: ReadyState, TraceId: 01jweamzkfahqhwsa1xa0cbc27, received request, proxyRequestId: 310 prepared: 0 tx_control: 1 action: QUERY_ACTION_EXECUTE type: QUERY_TYPE_SQL_DML text: SELECT * FROM `//Root/test-shared/.metadata/initialization/migrations`; rpcActor: [2:7509890441009084874:4394] database: /Root/test-shared databaseId: /Root/test-shared pool id: default 2025-05-29T15:30:25.007404Z node 2 :KQP_SESSION DEBUG: kqp_session_actor.cpp:264: SessionId: ydb://session/3?node_id=2&id=MjMzOTJhNy1iYjM5NTA2Ni0yNzE4NzJhMC1mY2JiM2E3Mw==, ActorId: [2:7509890441009084873:4393], ActorState: ReadyState, TraceId: 01jweamzkfahqhwsa1xa0cbc27, request placed into pool from cache: default 2025-05-29T15:30:25.007427Z node 2 :KQP_SESSION DEBUG: kqp_session_actor.cpp:575: SessionId: ydb://session/3?node_id=2&id=MjMzOTJhNy1iYjM5NTA2Ni0yNzE4NzJhMC1mY2JiM2E3Mw==, ActorId: [2:7509890441009084873:4393], ActorState: ExecuteState, TraceId: 01jweamzkfahqhwsa1xa0cbc27, Sending CompileQuery request 2025-05-29T15:30:25.024944Z node 2 :KQP_SESSION DEBUG: kqp_session_actor.cpp:775: SessionId: ydb://session/3?node_id=2&id=MjMzOTJhNy1iYjM5NTA2Ni0yNzE4NzJhMC1mY2JiM2E3Mw==, ActorId: [2:7509890441009084873:4393], ActorState: ExecuteState, TraceId: 01jweamzkfahqhwsa1xa0cbc27, acquire mvcc snapshot 2025-05-29T15:30:25.025758Z node 2 :KQP_SESSION TRACE: kqp_session_actor.cpp:813: SessionId: ydb://session/3?node_id=2&id=MjMzOTJhNy1iYjM5NTA2Ni0yNzE4NzJhMC1mY2JiM2E3Mw==, ActorId: [2:7509890441009084873:4393], ActorState: ExecuteState, TraceId: 01jweamzkfahqhwsa1xa0cbc27, read snapshot result: UNAVAILABLE, step: 1748532625000, tx id: 18446744073709551615 2025-05-29T15:30:25.025781Z node 2 :KQP_SESSION DEBUG: kqp_session_actor.cpp:1303: SessionId: ydb://session/3?node_id=2&id=MjMzOTJhNy1iYjM5NTA2Ni0yNzE4NzJhMC1mY2JiM2E3Mw==, ActorId: [2:7509890441009084873:4393], ActorState: ExecuteState, TraceId: 01jweamzkfahqhwsa1xa0cbc27, ExecutePhyTx, tx: 0x000050365A025318 literal: 0 commit: 0 txCtx.DeferredEffects.size(): 0 2025-05-29T15:30:25.025788Z node 2 :KQP_SESSION DEBUG: kqp_session_actor.cpp:1454: SessionId: ydb://session/3?node_id=2&id=MjMzOTJhNy1iYjM5NTA2Ni0yNzE4NzJhMC1mY2JiM2E3Mw==, ActorId: [2:7509890441009084873:4393], ActorState: ExecuteState, TraceId: 01jweamzkfahqhwsa1xa0cbc27, Sending to Executer TraceId: 0 8 2025-05-29T15:30:25.025812Z node 2 :KQP_SESSION DEBUG: kqp_session_actor.cpp:1512: SessionId: ydb://session/3?node_id=2&id=MjMzOTJhNy1iYjM5NTA2Ni0yNzE4NzJhMC1mY2JiM2E3Mw==, ActorId: [2:7509890441009084873:4393], ActorState: ExecuteState, TraceId: 01jweamzkfahqhwsa1xa0cbc27, Created new KQP executer: [2:7509890441009084888:4393] isRollback: 0 2025-05-29T15:30:25.026843Z node 2 :KQP_SESSION DEBUG: kqp_session_actor.cpp:1704: SessionId: ydb://session/3?node_id=2&id=MjMzOTJhNy1iYjM5NTA2Ni0yNzE4NzJhMC1mY2JiM2E3Mw==, ActorId: [2:7509890441009084873:4393], ActorState: ExecuteState, TraceId: 01jweamzkfahqhwsa1xa0cbc27, TEvTxResponse, CurrentTx: 1/1 response.status: SUCCESS 2025-05-29T15:30:25.026895Z node 2 :KQP_SESSION INFO: kqp_session_actor.cpp:1963: SessionId: ydb://session/3?node_id=2&id=MjMzOTJhNy1iYjM5NTA2Ni0yNzE4NzJhMC1mY2JiM2E3Mw==, ActorId: [2:7509890441009084873:4393], ActorState: ExecuteState, TraceId: 01jweamzkfahqhwsa1xa0cbc27, txInfo Status: Active Kind: ReadOnly TotalDuration: 0 ServerDuration: 1.954 QueriesCount: 2 2025-05-29T15:30:25.026924Z node 2 :KQP_SESSION DEBUG: kqp_session_actor.cpp:2118: SessionId: ydb://session/3?node_id=2&id=MjMzOTJhNy1iYjM5NTA2Ni0yNzE4NzJhMC1mY2JiM2E3Mw==, ActorId: [2:7509890441009084873:4393], ActorState: ExecuteState, TraceId: 01jweamzkfahqhwsa1xa0cbc27, Create QueryResponse for action: QUERY_ACTION_EXECUTE with SUCCESS status 2025-05-29T15:30:25.026985Z node 2 :KQP_SESSION INFO: kqp_session_actor.cpp:2478: SessionId: ydb://session/3?node_id=2&id=MjMzOTJhNy1iYjM5NTA2Ni0yNzE4NzJhMC1mY2JiM2E3Mw==, ActorId: [2:7509890441009084873:4393], ActorState: ExecuteState, TraceId: 01jweamzkfahqhwsa1xa0cbc27, Cleanup start, isFinal: 0 CleanupCtx: 0 TransactionsToBeAborted.size(): 0 WorkerId: [0:0:0] WorkloadServiceCleanup: 0 2025-05-29T15:30:25.026993Z node 2 :KQP_SESSION DEBUG: kqp_session_actor.cpp:2539: SessionId: ydb://session/3?node_id=2&id=MjMzOTJhNy1iYjM5NTA2Ni0yNzE4NzJhMC1mY2JiM2E3Mw==, ActorId: [2:7509890441009084873:4393], ActorState: ExecuteState, TraceId: 01jweamzkfahqhwsa1xa0cbc27, EndCleanup, isFinal: 0 2025-05-29T15:30:25.027004Z node 2 :KQP_SESSION DEBUG: kqp_session_actor.cpp:2275: SessionId: ydb://session/3?node_id=2&id=MjMzOTJhNy1iYjM5NTA2Ni0yNzE4NzJhMC1mY2JiM2E3Mw==, ActorId: [2:7509890441009084873:4393], ActorState: ExecuteState, TraceId: 01jweamzkfahqhwsa1xa0cbc27, Sent query response back to proxy, proxyRequestId: 310, proxyId: [2:7509890179016071886:2277] 2025-05-29T15:30:25.027578Z node 2 :KQP_SESSION INFO: kqp_session_actor.cpp:2320: SessionId: ydb://session/3?node_id=2&id=MjMzOTJhNy1iYjM5NTA2Ni0yNzE4NzJhMC1mY2JiM2E3Mw==, ActorId: [2:7509890441009084873:4393], ActorState: ReadyState, Session closed due to explicit close event 2025-05-29T15:30:25.027606Z node 2 :KQP_SESSION DEBUG: kqp_session_actor.cpp:1454: SessionId: ydb://session/3?node_id=2&id=MjMzOTJhNy1iYjM5NTA2Ni0yNzE4NzJhMC1mY2JiM2E3Mw==, ActorId: [2:7509890441009084873:4393], ActorState: ReadyState, Sending to Executer TraceId: 0 8 2025-05-29T15:30:25.027621Z node 2 :KQP_SESSION DEBUG: kqp_session_actor.cpp:1512: SessionId: ydb://session/3?node_id=2&id=MjMzOTJhNy1iYjM5NTA2Ni0yNzE4NzJhMC1mY2JiM2E3Mw==, ActorId: [2:7509890441009084873:4393], ActorState: ReadyState, Created new KQP executer: [2:7509890441009084898:4393] isRollback: 1 2025-05-29T15:30:25.027639Z node 2 :KQP_SESSION INFO: kqp_session_actor.cpp:2478: SessionId: ydb://session/3?node_id=2&id=MjMzOTJhNy1iYjM5NTA2Ni0yNzE4NzJhMC1mY2JiM2E3Mw==, ActorId: [2:7509890441009084873:4393], ActorState: ReadyState, Cleanup start, isFinal: 1 CleanupCtx: 1 TransactionsToBeAborted.size(): 1 WorkerId: [0:0:0] WorkloadServiceCleanup: 0 2025-05-29T15:30:25.027764Z node 2 :KQP_SESSION DEBUG: kqp_session_actor.cpp:2539: SessionId: ydb://session/3?node_id=2&id=MjMzOTJhNy1iYjM5NTA2Ni0yNzE4NzJhMC1mY2JiM2E3Mw==, ActorId: [2:7509890441009084873:4393], ActorState: CleanupState, EndCleanup, isFinal: 1 2025-05-29T15:30:25.027773Z node 2 :KQP_SESSION DEBUG: kqp_session_actor.cpp:2551: SessionId: ydb://session/3?node_id=2&id=MjMzOTJhNy1iYjM5NTA2Ni0yNzE4NzJhMC1mY2JiM2E3Mw==, ActorId: [2:7509890441009084873:4393], ActorState: unknown state, Cleanup temp tables: 0 2025-05-29T15:30:25.027802Z node 2 :KQP_SESSION DEBUG: kqp_session_actor.cpp:2642: SessionId: ydb://session/3?node_id=2&id=MjMzOTJhNy1iYjM5NTA2Ni0yNzE4NzJhMC1mY2JiM2E3Mw==, ActorId: [2:7509890441009084873:4393], ActorState: unknown state, Session actor destroyed 2025-05-29T15:30:25.030625Z node 1 :HIVE WARN: tx__status.cpp:57: HIVE#72057594037968897 THive::TTxStatus(status=2 node=Connected) - killing node 3 2025-05-29T15:30:25.030654Z node 1 :KQP_SESSION INFO: kqp_session_actor.cpp:2320: SessionId: ydb://session/3?node_id=1&id=YWI1ZDhkYjItNzQyYmRkYjEtNDNlNGE1ZmQtYmU3MWZjOGM=, ActorId: [1:7509890179818137680:2331], ActorState: ReadyState, Session closed due to explicit close event 2025-05-29T15:30:25.030673Z node 1 :KQP_SESSION INFO: kqp_session_actor.cpp:2478: SessionId: ydb://session/3?node_id=1&id=YWI1ZDhkYjItNzQyYmRkYjEtNDNlNGE1ZmQtYmU3MWZjOGM=, ActorId: [1:7509890179818137680:2331], ActorState: ReadyState, Cleanup start, isFinal: 1 CleanupCtx: 0 TransactionsToBeAborted.size(): 0 WorkerId: [0:0:0] WorkloadServiceCleanup: 0 2025-05-29T15:30:25.030678Z node 1 :KQP_SESSION DEBUG: kqp_session_actor.cpp:2539: SessionId: ydb://session/3?node_id=1&id=YWI1ZDhkYjItNzQyYmRkYjEtNDNlNGE1ZmQtYmU3MWZjOGM=, ActorId: [1:7509890179818137680:2331], ActorState: ReadyState, EndCleanup, isFinal: 1 2025-05-29T15:30:25.030681Z node 1 :KQP_SESSION DEBUG: kqp_session_actor.cpp:2551: SessionId: ydb://session/3?node_id=1&id=YWI1ZDhkYjItNzQyYmRkYjEtNDNlNGE1ZmQtYmU3MWZjOGM=, ActorId: [1:7509890179818137680:2331], ActorState: unknown state, Cleanup temp tables: 0 2025-05-29T15:30:25.030704Z node 1 :KQP_SESSION DEBUG: kqp_session_actor.cpp:2642: SessionId: ydb://session/3?node_id=1&id=YWI1ZDhkYjItNzQyYmRkYjEtNDNlNGE1ZmQtYmU3MWZjOGM=, ActorId: [1:7509890179818137680:2331], ActorState: unknown state, Session actor destroyed 2025-05-29T15:30:25.030860Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connected -> Disconnected 2025-05-29T15:30:25.030905Z node 1 :HIVE WARN: tx__status.cpp:57: HIVE#72057594037968897 THive::TTxStatus(status=2 node=Connected) - killing node 2 2025-05-29T15:30:25.030933Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connected -> Disconnected 2025-05-29T15:30:25.034295Z node 2 :SYSTEM_VIEWS WARN: sysview_service.cpp:811: Summary delivery problem: service id# [2:7509890179016071722:2118], processor id# 72075186224038891, database# /Root/test-shared 2025-05-29T15:30:25.293363Z node 2 :SYSTEM_VIEWS WARN: sysview_service.cpp:811: Summary delivery problem: service id# [2:7509890179016071722:2118], processor id# 72075186224038891, database# /Root/test-shared (NThreading::TFutureException) library/cpp/threading/future/core/future-inl.h:58: wait timeout >> TAsyncIndexTests::SplitBothWithReboots[PipeResets] >> TAsyncIndexTests::MergeMainWithReboots[TabletReboots] >> TVectorIndexTests::CreateTableMultiColumn |73.4%| [TA] $(B)/ydb/core/tablet_flat/ut/test-results/unittest/{meta.json ... results_accumulator.log} |73.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_index/unittest ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/query/unittest >> KqpParams::Decimal-QueryService+UseSink Test command err: Trying to start YDB, gRPC: 27717, MsgBus: 27716 2025-05-29T15:30:10.676629Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509890378995355470:2062];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:30:10.677034Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/0012d6/r3tmp/tmpIEeRtC/pdisk_1.dat 2025-05-29T15:30:10.727617Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [1:7509890378995355449:2079] 1748532610676421 != 1748532610676424 2025-05-29T15:30:10.729175Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 27717, node 1 2025-05-29T15:30:10.740197Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:30:10.740207Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-05-29T15:30:10.740209Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-05-29T15:30:10.740242Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:27716 TClient is connected to server localhost:27716 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-29T15:30:10.806962Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:30:10.806996Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:30:10.808123Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-29T15:30:10.812931Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:30:10.826995Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:30:10.843869Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:30:10.864525Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:30:10.876728Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:30:11.024964Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890383290324399:2403], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:30:11.024988Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:30:11.061713Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-05-29T15:30:11.069087Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-05-29T15:30:11.078768Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-05-29T15:30:11.092990Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-05-29T15:30:11.106245Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-05-29T15:30:11.120835Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-05-29T15:30:11.134558Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-05-29T15:30:11.149946Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890383290325050:2466], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:30:11.149970Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890383290325055:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:30:11.149974Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:30:11.150611Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715669:3, at schemeshard: 72057594046644480 2025-05-29T15:30:11.154202Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7509890383290325057:2470], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715669 completed, doublechecking } 2025-05-29T15:30:11.216938Z node 1 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [1:7509890383290325108:3398] txid# 281474976715670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-29T15:30:11.318193Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [1:7509890383290325124:2474], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:30:11.318313Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=1&id=YzdlMWNlZjAtMjI2ZTI4MDItYWM3ZDM1OGItNDhhNWJiYjQ=, ActorId: [1:7509890383290324381:2401], ActorState: ExecuteState, TraceId: 01jweamj2dczphp8h21tyb8g77, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: VERIFY failed (2025-05-29T15:30:11.319073Z): assertion failed in non-unittest thread with message: assertion failed at ydb/core/kqp/ut/common/kqp_ut_common.h:375, void NKikimr::NKqp::AssertSuccessResult(const NYdb::TStatus &): (result.IsSuccess())
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 library/cpp/testing/unittest/registar.cpp:36 RaiseError(): requirement UnittestThread failed 0. /-S/util/system/yassert.cpp:83: InternalPanicImpl @ 0x13DD8F65 1. /-S/util/system/yassert.cpp:55: Panic @ 0x13DCFF66 2. /tmp//-S/library/cpp/testing/unittest/registar.cpp:36: RaiseError @ 0x13F70BC6 3. /-S/ydb/core/kqp/ut/common/kqp_ut_common.h:375: AssertSuccessResult @ 0x13A3FAE2 4. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:365: CreateSampleTables @ 0x264F71F2 5. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:581: operator() @ 0x2651596C 6. /-S/library/cpp/threading/future/core/future-inl.h:493: SetValue @ 0x2651596C 7. /-S/library/cpp/threading/future/async.h:24: operator() @ 0x2651596C 8. /-S/util/thread/pool.h:71: Process @ 0x2651596C 9. /-S/util/thread/pool.cpp:418: DoExecute @ 0x13DE06B9 10. /-S/util/thread/factory.h:15: Execute @ 0x13DDF0A9 11. /-S/util/thread/factory.cpp:36: ThreadProc @ 0x13DDF0A9 12. /-S/util/system/thread.cpp:244: ThreadProxy @ 0x13DDA51C 13. ??:0: ?? @ 0x7FB27F75BAC2 14. ??:0: ?? @ 0x7FB27F7ED84F Trying to start YDB, gRPC: 12979, MsgBus: 25238 2025-05-29T15:30:14.833858Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509890394715266137:2062];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:30:14.833897Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/0012d6/r3tmp/tmpMY9U1n/pdisk_1.dat 2025-05-29T15:30:14.890169Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:30:14.890272Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [1:7509890394715266117:2079] 1748532614833738 != 1748532614833741 TServer::EnableGrpc on GrpcPort 12979, node 1 2025-05-29 ... 3DD8F65 1. /-S/util/system/yassert.cpp:55: Panic @ 0x13DCFF66 2. /tmp//-S/library/cpp/testing/unittest/registar.cpp:36: RaiseError @ 0x13F70BC6 3. /-S/ydb/core/kqp/ut/common/kqp_ut_common.h:375: AssertSuccessResult @ 0x13A3FAE2 4. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:365: CreateSampleTables @ 0x264F71F2 5. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:581: operator() @ 0x2651596C 6. /-S/library/cpp/threading/future/core/future-inl.h:493: SetValue @ 0x2651596C 7. /-S/library/cpp/threading/future/async.h:24: operator() @ 0x2651596C 8. /-S/util/thread/pool.h:71: Process @ 0x2651596C 9. /-S/util/thread/pool.cpp:418: DoExecute @ 0x13DE06B9 10. /-S/util/thread/factory.h:15: Execute @ 0x13DDF0A9 11. /-S/util/thread/factory.cpp:36: ThreadProc @ 0x13DDF0A9 12. /-S/util/system/thread.cpp:244: ThreadProxy @ 0x13DDA51C 13. ??:0: ?? @ 0x7F306F64EAC2 14. ??:0: ?? @ 0x7F306F6E084F Trying to start YDB, gRPC: 26807, MsgBus: 15355 2025-05-29T15:30:23.451871Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509890435003732820:2063];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:30:23.451887Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/0012d6/r3tmp/tmp6qnWQh/pdisk_1.dat TServer::EnableGrpc on GrpcPort 26807, node 1 2025-05-29T15:30:23.529180Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:30:23.529763Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [1:7509890435003732798:2079] 1748532623451688 != 1748532623451691 2025-05-29T15:30:23.534226Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:30:23.534246Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-05-29T15:30:23.534248Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-05-29T15:30:23.534287Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:15355 TClient is connected to server localhost:15355 WaitRootIsUp 'Root'... TClient::Ls request: Root 2025-05-29T15:30:23.597990Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:30:23.598016Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:30:23.598816Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... 2025-05-29T15:30:23.608954Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-05-29T15:30:23.617021Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-05-29T15:30:23.685590Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 2025-05-29T15:30:23.711521Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:30:23.725816Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:30:23.839114Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890435003734449:2403], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:30:23.839142Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:30:23.879521Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-05-29T15:30:23.886905Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-05-29T15:30:23.943401Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-05-29T15:30:23.953297Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-05-29T15:30:23.960594Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-05-29T15:30:23.972157Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-05-29T15:30:23.979170Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-05-29T15:30:23.994841Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890435003735104:2466], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:30:23.994858Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:30:23.994866Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890435003735109:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:30:23.995560Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715669:3, at schemeshard: 72057594046644480 2025-05-29T15:30:23.999615Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7509890435003735111:2470], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715669 completed, doublechecking } 2025-05-29T15:30:24.095286Z node 1 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [1:7509890439298702458:3397] txid# 281474976715670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-29T15:30:24.178516Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [1:7509890439298702474:2474], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:30:24.178648Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=1&id=Yzc0YzdiMmQtZjVlZjM3M2QtZjRiYWI5ZjEtZWQzYmI4YWI=, ActorId: [1:7509890435003734431:2401], ActorState: ExecuteState, TraceId: 01jweamykt3zbz5d4akkgg9gn9, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: VERIFY failed (2025-05-29T15:30:24.179714Z): assertion failed in non-unittest thread with message: assertion failed at ydb/core/kqp/ut/common/kqp_ut_common.h:375, void NKikimr::NKqp::AssertSuccessResult(const NYdb::TStatus &): (result.IsSuccess())
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 library/cpp/testing/unittest/registar.cpp:36 RaiseError(): requirement UnittestThread failed 0. /-S/util/system/yassert.cpp:83: InternalPanicImpl @ 0x13DD8F65 1. /-S/util/system/yassert.cpp:55: Panic @ 0x13DCFF66 2. /tmp//-S/library/cpp/testing/unittest/registar.cpp:36: RaiseError @ 0x13F70BC6 3. /-S/ydb/core/kqp/ut/common/kqp_ut_common.h:375: AssertSuccessResult @ 0x13A3FAE2 4. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:365: CreateSampleTables @ 0x264F71F2 5. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:581: operator() @ 0x2651596C 6. /-S/library/cpp/threading/future/core/future-inl.h:493: SetValue @ 0x2651596C 7. /-S/library/cpp/threading/future/async.h:24: operator() @ 0x2651596C 8. /-S/util/thread/pool.h:71: Process @ 0x2651596C 9. /-S/util/thread/pool.cpp:418: DoExecute @ 0x13DE06B9 10. /-S/util/thread/factory.h:15: Execute @ 0x13DDF0A9 11. /-S/util/thread/factory.cpp:36: ThreadProc @ 0x13DDF0A9 12. /-S/util/system/thread.cpp:244: ThreadProxy @ 0x13DDA51C 13. ??:0: ?? @ 0x7FEF00A17AC2 14. ??:0: ?? @ 0x7FEF00AA984F |73.4%| [TA] $(B)/ydb/core/control/ut/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/query/unittest >> KqpLimits::TooBigColumn-useSink Test command err: Trying to start YDB, gRPC: 12168, MsgBus: 61125 2025-05-29T15:30:10.842228Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509890377688767270:2062];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:30:10.842250Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/0012d3/r3tmp/tmpVs0fo7/pdisk_1.dat 2025-05-29T15:30:10.901516Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:30:10.901579Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [1:7509890377688767250:2079] 1748532610842076 != 1748532610842079 TServer::EnableGrpc on GrpcPort 12168, node 1 2025-05-29T15:30:10.914140Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:30:10.914152Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-05-29T15:30:10.914154Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-05-29T15:30:10.914191Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:61125 2025-05-29T15:30:10.944889Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:30:10.944913Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:30:10.945969Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:61125 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-29T15:30:10.978429Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:30:10.983702Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:30:11.045960Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:30:11.063117Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:30:11.074413Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:30:11.179799Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890381983736181:2403], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:30:11.179826Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:30:11.212552Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-05-29T15:30:11.219537Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-05-29T15:30:11.232748Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-05-29T15:30:11.246382Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-05-29T15:30:11.261651Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-05-29T15:30:11.274622Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-05-29T15:30:11.288381Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-05-29T15:30:11.304583Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890381983736833:2466], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:30:11.304608Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890381983736838:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:30:11.304618Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:30:11.305217Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710669:3, at schemeshard: 72057594046644480 2025-05-29T15:30:11.308086Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7509890381983736840:2470], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710669 completed, doublechecking } 2025-05-29T15:30:11.401282Z node 1 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [1:7509890381983736891:3398] txid# 281474976710670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-29T15:30:11.488054Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [1:7509890381983736907:2474], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:30:11.488166Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=1&id=YTliOGRjMjQtZTIyYWE4M2MtYzI3Nzk4OTktY2VkZmIxZjA=, ActorId: [1:7509890381983736163:2401], ActorState: ExecuteState, TraceId: 01jweamj78fz6jv7jtg80p8hrh, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: VERIFY failed (2025-05-29T15:30:11.488781Z): assertion failed in non-unittest thread with message: assertion failed at ydb/core/kqp/ut/common/kqp_ut_common.h:375, void NKikimr::NKqp::AssertSuccessResult(const NYdb::TStatus &): (result.IsSuccess())
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 library/cpp/testing/unittest/registar.cpp:36 RaiseError(): requirement UnittestThread failed 0. /-S/util/system/yassert.cpp:83: InternalPanicImpl @ 0x13DD8F65 1. /-S/util/system/yassert.cpp:55: Panic @ 0x13DCFF66 2. /tmp//-S/library/cpp/testing/unittest/registar.cpp:36: RaiseError @ 0x13F70BC6 3. /-S/ydb/core/kqp/ut/common/kqp_ut_common.h:375: AssertSuccessResult @ 0x13A3FAE2 4. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:365: CreateSampleTables @ 0x264F71F2 5. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:581: operator() @ 0x2651596C 6. /-S/library/cpp/threading/future/core/future-inl.h:493: SetValue @ 0x2651596C 7. /-S/library/cpp/threading/future/async.h:24: operator() @ 0x2651596C 8. /-S/util/thread/pool.h:71: Process @ 0x2651596C 9. /-S/util/thread/pool.cpp:418: DoExecute @ 0x13DE06B9 10. /-S/util/thread/factory.h:15: Execute @ 0x13DDF0A9 11. /-S/util/thread/factory.cpp:36: ThreadProc @ 0x13DDF0A9 12. /-S/util/system/thread.cpp:244: ThreadProxy @ 0x13DDA51C 13. ??:0: ?? @ 0x7F8A6646DAC2 14. ??:0: ?? @ 0x7F8A664FF84F Trying to start YDB, gRPC: 32618, MsgBus: 7193 2025-05-29T15:30:15.363633Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509890399584807196:2064];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:30:15.363653Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/0012d3/r3tmp/tmpPDUP6l/pdisk_1.dat 2025-05-29T15:30:15.426181Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:30:15.426266Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [1:7509890399584807174:2079] 1748532615363417 != 1748532615363420 TServer::EnableGrpc on GrpcPort 32618, node 1 2025-05-29T ... x13DD8F65 1. /-S/util/system/yassert.cpp:55: Panic @ 0x13DCFF66 2. /tmp//-S/library/cpp/testing/unittest/registar.cpp:36: RaiseError @ 0x13F70BC6 3. /-S/ydb/core/kqp/ut/common/kqp_ut_common.h:375: AssertSuccessResult @ 0x13A3FAE2 4. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:365: CreateSampleTables @ 0x264F71F2 5. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:581: operator() @ 0x2651596C 6. /-S/library/cpp/threading/future/core/future-inl.h:493: SetValue @ 0x2651596C 7. /-S/library/cpp/threading/future/async.h:24: operator() @ 0x2651596C 8. /-S/util/thread/pool.h:71: Process @ 0x2651596C 9. /-S/util/thread/pool.cpp:418: DoExecute @ 0x13DE06B9 10. /-S/util/thread/factory.h:15: Execute @ 0x13DDF0A9 11. /-S/util/thread/factory.cpp:36: ThreadProc @ 0x13DDF0A9 12. /-S/util/system/thread.cpp:244: ThreadProxy @ 0x13DDA51C 13. ??:0: ?? @ 0x7F2D5B6B6AC2 14. ??:0: ?? @ 0x7F2D5B74884F Trying to start YDB, gRPC: 9596, MsgBus: 22057 2025-05-29T15:30:23.624631Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509890435094996594:2062];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:30:23.624667Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/0012d3/r3tmp/tmpWfRkTH/pdisk_1.dat 2025-05-29T15:30:23.694711Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [1:7509890435094996574:2079] 1748532623624489 != 1748532623624492 2025-05-29T15:30:23.697213Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 9596, node 1 2025-05-29T15:30:23.719637Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:30:23.719649Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-05-29T15:30:23.719651Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-05-29T15:30:23.719696Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:22057 2025-05-29T15:30:23.767680Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:30:23.767711Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:30:23.768770Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:22057 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-29T15:30:23.794904Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:30:23.802056Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:30:23.822908Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:30:23.840176Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:30:23.850111Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:30:24.014313Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890439389965508:2403], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:30:24.014343Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:30:24.058580Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-05-29T15:30:24.066064Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-05-29T15:30:24.077135Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-05-29T15:30:24.091565Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-05-29T15:30:24.105495Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-05-29T15:30:24.119488Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-05-29T15:30:24.133731Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-05-29T15:30:24.149892Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890439389966162:2466], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:30:24.149926Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:30:24.150026Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890439389966167:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:30:24.150866Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715669:3, at schemeshard: 72057594046644480 2025-05-29T15:30:24.153510Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7509890439389966169:2470], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715669 completed, doublechecking } 2025-05-29T15:30:24.251269Z node 1 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [1:7509890439389966220:3398] txid# 281474976715670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-29T15:30:24.374668Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [1:7509890439389966236:2474], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:30:24.374825Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=1&id=OWRjY2QzMjEtMmRmYTM3MDctZTJiM2ZiNTEtYWQ5ZjczZjQ=, ActorId: [1:7509890439389965490:2401], ActorState: ExecuteState, TraceId: 01jweamyrn612h4sh7t9g2x9h8, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: VERIFY failed (2025-05-29T15:30:24.375578Z): assertion failed in non-unittest thread with message: assertion failed at ydb/core/kqp/ut/common/kqp_ut_common.h:375, void NKikimr::NKqp::AssertSuccessResult(const NYdb::TStatus &): (result.IsSuccess())
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 library/cpp/testing/unittest/registar.cpp:36 RaiseError(): requirement UnittestThread failed 0. /-S/util/system/yassert.cpp:83: InternalPanicImpl @ 0x13DD8F65 1. /-S/util/system/yassert.cpp:55: Panic @ 0x13DCFF66 2. /tmp//-S/library/cpp/testing/unittest/registar.cpp:36: RaiseError @ 0x13F70BC6 3. /-S/ydb/core/kqp/ut/common/kqp_ut_common.h:375: AssertSuccessResult @ 0x13A3FAE2 4. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:365: CreateSampleTables @ 0x264F71F2 5. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:581: operator() @ 0x2651596C 6. /-S/library/cpp/threading/future/core/future-inl.h:493: SetValue @ 0x2651596C 7. /-S/library/cpp/threading/future/async.h:24: operator() @ 0x2651596C 8. /-S/util/thread/pool.h:71: Process @ 0x2651596C 9. /-S/util/thread/pool.cpp:418: DoExecute @ 0x13DE06B9 10. /-S/util/thread/factory.h:15: Execute @ 0x13DDF0A9 11. /-S/util/thread/factory.cpp:36: ThreadProc @ 0x13DDF0A9 12. /-S/util/system/thread.cpp:244: ThreadProxy @ 0x13DDA51C 13. ??:0: ?? @ 0x7F0FBD595AC2 14. ??:0: ?? @ 0x7F0FBD62784F |73.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_index/unittest |73.4%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/datashard/ut_replication/ydb-core-tx-datashard-ut_replication |73.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_replication/ydb-core-tx-datashard-ut_replication >> TVectorIndexTests::CreateTableMultiColumn [GOOD] >> TSchemeShardSplitByLoad::TableSplitsUpToMaxPartitionsCount >> TEvLocalSyncDataTests::SqueezeBlocks1 [GOOD] >> TEvLocalSyncDataTests::SqueezeBlocks2 [GOOD] |73.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_index/unittest |73.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_index/unittest |73.5%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/kqp/ut/federated_query/large_results/ydb-core-kqp-ut-federated_query-large_results |73.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/ut/federated_query/large_results/ydb-core-kqp-ut-federated_query-large_results ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/query/unittest >> KqpStats::DataQueryMulti Test command err: Trying to start YDB, gRPC: 1204, MsgBus: 13177 2025-05-29T15:30:11.249561Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509890380738310158:2064];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:30:11.249838Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/0012c8/r3tmp/tmpmLWxWK/pdisk_1.dat 2025-05-29T15:30:11.305422Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:30:11.305505Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [1:7509890380738310135:2079] 1748532611249412 != 1748532611249415 TServer::EnableGrpc on GrpcPort 1204, node 1 2025-05-29T15:30:11.315038Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:30:11.315049Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-05-29T15:30:11.315051Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-05-29T15:30:11.315094Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:13177 2025-05-29T15:30:11.352112Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:30:11.352144Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:30:11.353185Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:13177 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-29T15:30:11.380960Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:30:11.394249Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:30:11.411881Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:30:11.434406Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:30:11.446830Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:30:11.591828Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890380738311788:2403], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:30:11.591861Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:30:11.645165Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-05-29T15:30:11.653514Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-05-29T15:30:11.709183Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-05-29T15:30:11.722765Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-05-29T15:30:11.736874Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-05-29T15:30:11.750965Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-05-29T15:30:11.764846Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-05-29T15:30:11.787835Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890380738312443:2466], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:30:11.787872Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:30:11.787986Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890380738312448:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:30:11.788984Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715669:3, at schemeshard: 72057594046644480 2025-05-29T15:30:11.792123Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7509890380738312450:2470], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715669 completed, doublechecking } 2025-05-29T15:30:11.850649Z node 1 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [1:7509890380738312501:3397] txid# 281474976715670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-29T15:30:11.965617Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [1:7509890380738312517:2474], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:30:11.965717Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=1&id=N2YwYWM5NWQtMTRhZGFjYWQtOWY4N2Q2NDktMzI3Y2IyY2M=, ActorId: [1:7509890380738311770:2401], ActorState: ExecuteState, TraceId: 01jweamjpb2y221tq7sxsn1112, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: VERIFY failed (2025-05-29T15:30:11.966394Z): assertion failed in non-unittest thread with message: assertion failed at ydb/core/kqp/ut/common/kqp_ut_common.h:375, void NKikimr::NKqp::AssertSuccessResult(const NYdb::TStatus &): (result.IsSuccess())
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 library/cpp/testing/unittest/registar.cpp:36 RaiseError(): requirement UnittestThread failed 0. /-S/util/system/yassert.cpp:83: InternalPanicImpl @ 0x13DD8F65 1. /-S/util/system/yassert.cpp:55: Panic @ 0x13DCFF66 2. /tmp//-S/library/cpp/testing/unittest/registar.cpp:36: RaiseError @ 0x13F70BC6 3. /-S/ydb/core/kqp/ut/common/kqp_ut_common.h:375: AssertSuccessResult @ 0x13A3FAE2 4. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:365: CreateSampleTables @ 0x264F71F2 5. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:581: operator() @ 0x2651596C 6. /-S/library/cpp/threading/future/core/future-inl.h:493: SetValue @ 0x2651596C 7. /-S/library/cpp/threading/future/async.h:24: operator() @ 0x2651596C 8. /-S/util/thread/pool.h:71: Process @ 0x2651596C 9. /-S/util/thread/pool.cpp:418: DoExecute @ 0x13DE06B9 10. /-S/util/thread/factory.h:15: Execute @ 0x13DDF0A9 11. /-S/util/thread/factory.cpp:36: ThreadProc @ 0x13DDF0A9 12. /-S/util/system/thread.cpp:244: ThreadProxy @ 0x13DDA51C 13. ??:0: ?? @ 0x7F76653C0AC2 14. ??:0: ?? @ 0x7F766545284F Trying to start YDB, gRPC: 12207, MsgBus: 15125 2025-05-29T15:30:15.654583Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509890400711985554:2061];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:30:15.654910Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/0012c8/r3tmp/tmppqZCMV/pdisk_1.dat 2025-05-29T15:30:15.707527Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [1:7509890400711985534:2079] 1748532615654419 != 1748532615654422 2025-05-29T15:30:15.709881Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 12207, node 1 2025-05-29T1 ... x13DD8F65 1. /-S/util/system/yassert.cpp:55: Panic @ 0x13DCFF66 2. /tmp//-S/library/cpp/testing/unittest/registar.cpp:36: RaiseError @ 0x13F70BC6 3. /-S/ydb/core/kqp/ut/common/kqp_ut_common.h:375: AssertSuccessResult @ 0x13A3FAE2 4. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:365: CreateSampleTables @ 0x264F71F2 5. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:581: operator() @ 0x2651596C 6. /-S/library/cpp/threading/future/core/future-inl.h:493: SetValue @ 0x2651596C 7. /-S/library/cpp/threading/future/async.h:24: operator() @ 0x2651596C 8. /-S/util/thread/pool.h:71: Process @ 0x2651596C 9. /-S/util/thread/pool.cpp:418: DoExecute @ 0x13DE06B9 10. /-S/util/thread/factory.h:15: Execute @ 0x13DDF0A9 11. /-S/util/thread/factory.cpp:36: ThreadProc @ 0x13DDF0A9 12. /-S/util/system/thread.cpp:244: ThreadProxy @ 0x13DDA51C 13. ??:0: ?? @ 0x7F1B862A6AC2 14. ??:0: ?? @ 0x7F1B8633884F Trying to start YDB, gRPC: 5161, MsgBus: 18495 2025-05-29T15:30:23.958143Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509890433766630437:2062];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:30:23.958180Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/0012c8/r3tmp/tmpF5mO4i/pdisk_1.dat 2025-05-29T15:30:24.013781Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [1:7509890433766630417:2079] 1748532623958023 != 1748532623958026 2025-05-29T15:30:24.015334Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 5161, node 1 2025-05-29T15:30:24.027196Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:30:24.027213Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-05-29T15:30:24.027214Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-05-29T15:30:24.027251Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:18495 2025-05-29T15:30:24.060469Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:30:24.060497Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:30:24.061575Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:18495 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-29T15:30:24.089581Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:30:24.095814Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:30:24.160466Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:30:24.181704Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:30:24.193849Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:30:24.320431Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890438061599369:2403], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:30:24.320458Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:30:24.375404Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-05-29T15:30:24.384289Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-05-29T15:30:24.392347Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-05-29T15:30:24.449097Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-05-29T15:30:24.462663Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-05-29T15:30:24.477004Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-05-29T15:30:24.490126Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-05-29T15:30:24.505846Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890438061600022:2466], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:30:24.505864Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890438061600027:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:30:24.505870Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:30:24.506484Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715669:3, at schemeshard: 72057594046644480 2025-05-29T15:30:24.510344Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7509890438061600029:2470], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715669 completed, doublechecking } 2025-05-29T15:30:24.584748Z node 1 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [1:7509890438061600080:3397] txid# 281474976715670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-29T15:30:24.700251Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [1:7509890438061600096:2474], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:30:24.701755Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=1&id=YmVmYzI1NWItN2M0ZTg4NTktMWE2MTc4YzUtNTQ1MTlkNWE=, ActorId: [1:7509890438061599350:2401], ActorState: ExecuteState, TraceId: 01jweamz3saeme52navnzyhws5, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: VERIFY failed (2025-05-29T15:30:24.702417Z): assertion failed in non-unittest thread with message: assertion failed at ydb/core/kqp/ut/common/kqp_ut_common.h:375, void NKikimr::NKqp::AssertSuccessResult(const NYdb::TStatus &): (result.IsSuccess())
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 library/cpp/testing/unittest/registar.cpp:36 RaiseError(): requirement UnittestThread failed 0. /-S/util/system/yassert.cpp:83: InternalPanicImpl @ 0x13DD8F65 1. /-S/util/system/yassert.cpp:55: Panic @ 0x13DCFF66 2. /tmp//-S/library/cpp/testing/unittest/registar.cpp:36: RaiseError @ 0x13F70BC6 3. /-S/ydb/core/kqp/ut/common/kqp_ut_common.h:375: AssertSuccessResult @ 0x13A3FAE2 4. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:365: CreateSampleTables @ 0x264F71F2 5. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:581: operator() @ 0x2651596C 6. /-S/library/cpp/threading/future/core/future-inl.h:493: SetValue @ 0x2651596C 7. /-S/library/cpp/threading/future/async.h:24: operator() @ 0x2651596C 8. /-S/util/thread/pool.h:71: Process @ 0x2651596C 9. /-S/util/thread/pool.cpp:418: DoExecute @ 0x13DE06B9 10. /-S/util/thread/factory.h:15: Execute @ 0x13DDF0A9 11. /-S/util/thread/factory.cpp:36: ThreadProc @ 0x13DDF0A9 12. /-S/util/system/thread.cpp:244: ThreadProxy @ 0x13DDA51C 13. ??:0: ?? @ 0x7F8679AF2AC2 14. ??:0: ?? @ 0x7F8679B8484F >> TSchemeShardTTLTests::LegacyTtlSettingsNoTiersAlterTable |73.5%| [TA] {RESULT} $(B)/ydb/core/control/ut/test-results/unittest/{meta.json ... results_accumulator.log} |73.5%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_replication/ydb-core-tx-datashard-ut_replication |73.5%| [LD] {RESULT} $(B)/ydb/core/kqp/ut/federated_query/large_results/ydb-core-kqp-ut-federated_query-large_results |73.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/statistics/service/ut/unittest ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_index/unittest >> TVectorIndexTests::CreateTableMultiColumn [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:116:2058] recipient: [1:111:2141] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:116:2058] recipient: [1:111:2141] Leader for TabletID 72057594046678944 is [1:128:2152] sender: [1:132:2058] recipient: [1:111:2141] 2025-05-29T15:30:28.231909Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7488: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-05-29T15:30:28.231932Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7516: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:30:28.231939Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7402: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-05-29T15:30:28.231944Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7418: OperationsProcessing config: using default configuration 2025-05-29T15:30:28.231959Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-05-29T15:30:28.231964Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-05-29T15:30:28.231973Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7548: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:30:28.231987Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:39: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-05-29T15:30:28.232113Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7619: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-05-29T15:30:28.232181Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-05-29T15:30:28.249954Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7309: Cannot subscribe to console configs 2025-05-29T15:30:28.249982Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:30:28.252573Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-05-29T15:30:28.252680Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-05-29T15:30:28.252718Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-05-29T15:30:28.254272Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-05-29T15:30:28.254415Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:445: Clear TempDirsState with owners number: 0 2025-05-29T15:30:28.254505Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1348: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-05-29T15:30:28.254534Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:32: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-05-29T15:30:28.254934Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:157: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:30:28.254965Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:84: [RootDataErasureManager] Stop 2025-05-29T15:30:28.255145Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:30:28.255151Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:30:28.255165Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-05-29T15:30:28.255171Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-29T15:30:28.255175Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:202: TTxServerlessStorageBilling.Complete 2025-05-29T15:30:28.255198Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6671: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-05-29T15:30:28.256201Z node 1 :HIVE INFO: tablet_helpers.cpp:1130: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:128:2152] sender: [1:242:2058] recipient: [1:15:2062] 2025-05-29T15:30:28.278350Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:372: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-05-29T15:30:28.278429Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:30:28.278485Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-05-29T15:30:28.278526Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:130: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-05-29T15:30:28.278538Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:30:28.279320Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:451: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-05-29T15:30:28.279350Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-05-29T15:30:28.279386Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:30:28.279396Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:313: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-05-29T15:30:28.279401Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:367: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-05-29T15:30:28.279407Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 2 -> 3 2025-05-29T15:30:28.279920Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:30:28.279934Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-05-29T15:30:28.279940Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 3 -> 128 2025-05-29T15:30:28.280385Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:30:28.280398Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:30:28.280404Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:30:28.280411Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1650: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-05-29T15:30:28.281086Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1719: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-05-29T15:30:28.281554Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:652: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-05-29T15:30:28.281597Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1751: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-05-29T15:30:28.281795Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:676: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-05-29T15:30:28.281824Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:680: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 124 RawX2: 4294969445 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-05-29T15:30:28.281831Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:30:28.281898Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 128 -> 240 2025-05-29T15:30:28.281906Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:30:28.281937Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-05-29T15:30:28.281949Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:402: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-05-29T15:30:28.282423Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:30:28.282433Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-29T15:30:28.282473Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29 ... on: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 4 PathsLimit: 10000 ShardsInside: 3 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } TableIndex { Name: "idx_vector" LocalPathId: 3 Type: EIndexTypeGlobalVectorKmeansTree State: EIndexStateReady KeyColumnNames: "embedding" SchemaVersion: 1 PathOwnerId: 72057594046678944 DataColumnNames: "covered1" DataColumnNames: "covered2" DataSize: 0 IndexImplTableDescriptions { PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { SizeToSplit: 2147483648 MinPartitionsCount: 1 } } } IndexImplTableDescriptions { PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { SizeToSplit: 2147483648 MinPartitionsCount: 1 } } } VectorIndexKmeansTreeDescription { Settings { settings { metric: DISTANCE_COSINE vector_type: VECTOR_TYPE_FLOAT vector_dimension: 1024 } } } } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-05-29T15:30:28.416120Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/vectors/idx_vector/indexImplLevelTable" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: true }, at schemeshard: 72057594046678944 2025-05-29T15:30:28.416148Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/vectors/idx_vector/indexImplLevelTable" took 30us result status StatusSuccess 2025-05-29T15:30:28.416225Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/vectors/idx_vector/indexImplLevelTable" PathDescription { Self { Name: "indexImplLevelTable" PathId: 4 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 3 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeVectorKmeansTreeIndexImplTable Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "indexImplLevelTable" Columns { Name: "__ydb_parent" Type: "Uint64" TypeId: 4 Id: 1 NotNull: true IsBuildInProgress: false } Columns { Name: "__ydb_id" Type: "Uint64" TypeId: 4 Id: 2 NotNull: true IsBuildInProgress: false } Columns { Name: "__ydb_centroid" Type: "String" TypeId: 4097 Id: 3 NotNull: true IsBuildInProgress: false } KeyColumnNames: "__ydb_parent" KeyColumnNames: "__ydb_id" KeyColumnIds: 1 KeyColumnIds: 2 TableSchemaVersion: 1 IsBackup: false IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 4 PathsLimit: 10000 ShardsInside: 3 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 4 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-05-29T15:30:28.416297Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/vectors/idx_vector/indexImplPostingTable" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: true }, at schemeshard: 72057594046678944 2025-05-29T15:30:28.416316Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/vectors/idx_vector/indexImplPostingTable" took 21us result status StatusSuccess 2025-05-29T15:30:28.416376Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/vectors/idx_vector/indexImplPostingTable" PathDescription { Self { Name: "indexImplPostingTable" PathId: 5 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 3 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeVectorKmeansTreeIndexImplTable Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "indexImplPostingTable" Columns { Name: "__ydb_parent" Type: "Uint64" TypeId: 4 Id: 1 NotNull: true IsBuildInProgress: false } Columns { Name: "id1" Type: "String" TypeId: 4097 Id: 2 NotNull: false IsBuildInProgress: false } Columns { Name: "id2" Type: "String" TypeId: 4097 Id: 3 NotNull: false IsBuildInProgress: false } Columns { Name: "covered1" Type: "String" TypeId: 4097 Id: 4 NotNull: false IsBuildInProgress: false } Columns { Name: "covered2" Type: "String" TypeId: 4097 Id: 5 NotNull: false IsBuildInProgress: false } KeyColumnNames: "__ydb_parent" KeyColumnNames: "id1" KeyColumnNames: "id2" KeyColumnIds: 1 KeyColumnIds: 2 KeyColumnIds: 3 TableSchemaVersion: 1 IsBackup: false IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 4 PathsLimit: 10000 ShardsInside: 3 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 5 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> TxUsage::WriteToTopic_Invalid_Session_Table >> TSchemeShardSplitBySizeTest::ConcurrentSplitOneShard |73.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/vdisk/syncer/ut/unittest >> TEvLocalSyncDataTests::SqueezeBlocks2 [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/query/unittest >> KqpLimits::QueryExecTimeout Test command err: Trying to start YDB, gRPC: 12154, MsgBus: 6641 2025-05-29T15:30:11.593951Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509890384219392425:2061];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:30:11.593970Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/0012c7/r3tmp/tmpxHuy5x/pdisk_1.dat 2025-05-29T15:30:11.654178Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [1:7509890384219392406:2079] 1748532611593831 != 1748532611593834 2025-05-29T15:30:11.654452Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 12154, node 1 2025-05-29T15:30:11.669124Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:30:11.669141Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-05-29T15:30:11.669143Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-05-29T15:30:11.669179Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:6641 TClient is connected to server localhost:6641 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: 2025-05-29T15:30:11.730149Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:30:11.730191Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:30:11.731305Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-29T15:30:11.734349Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:30:11.737371Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-05-29T15:30:11.738668Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:30:11.762986Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-05-29T15:30:11.823439Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-05-29T15:30:11.837821Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:30:11.981635Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890384219394054:2403], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:30:11.981661Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:30:12.024434Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-05-29T15:30:12.031468Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-05-29T15:30:12.044722Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-05-29T15:30:12.058110Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-05-29T15:30:12.072957Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-05-29T15:30:12.086897Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-05-29T15:30:12.101709Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-05-29T15:30:12.117578Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890388514362003:2466], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:30:12.117613Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:30:12.117622Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890388514362008:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:30:12.118335Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710669:3, at schemeshard: 72057594046644480 2025-05-29T15:30:12.120429Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7509890388514362010:2470], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710669 completed, doublechecking } 2025-05-29T15:30:12.181242Z node 1 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [1:7509890388514362061:3397] txid# 281474976710670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-29T15:30:12.299716Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [1:7509890388514362077:2474], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:30:12.299842Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=1&id=ODk5YmZhYzEtZGQ3NTkzNjctODhmN2Q5MDctOTk3MWNmNzM=, ActorId: [1:7509890384219394036:2401], ActorState: ExecuteState, TraceId: 01jweamk0n084bgzjn8mxm2ydw, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: VERIFY failed (2025-05-29T15:30:12.300535Z): assertion failed in non-unittest thread with message: assertion failed at ydb/core/kqp/ut/common/kqp_ut_common.h:375, void NKikimr::NKqp::AssertSuccessResult(const NYdb::TStatus &): (result.IsSuccess())
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 library/cpp/testing/unittest/registar.cpp:36 RaiseError(): requirement UnittestThread failed 0. /-S/util/system/yassert.cpp:83: InternalPanicImpl @ 0x13DD8F65 1. /-S/util/system/yassert.cpp:55: Panic @ 0x13DCFF66 2. /tmp//-S/library/cpp/testing/unittest/registar.cpp:36: RaiseError @ 0x13F70BC6 3. /-S/ydb/core/kqp/ut/common/kqp_ut_common.h:375: AssertSuccessResult @ 0x13A3FAE2 4. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:365: CreateSampleTables @ 0x264F71F2 5. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:581: operator() @ 0x2651596C 6. /-S/library/cpp/threading/future/core/future-inl.h:493: SetValue @ 0x2651596C 7. /-S/library/cpp/threading/future/async.h:24: operator() @ 0x2651596C 8. /-S/util/thread/pool.h:71: Process @ 0x2651596C 9. /-S/util/thread/pool.cpp:418: DoExecute @ 0x13DE06B9 10. /-S/util/thread/factory.h:15: Execute @ 0x13DDF0A9 11. /-S/util/thread/factory.cpp:36: ThreadProc @ 0x13DDF0A9 12. /-S/util/system/thread.cpp:244: ThreadProxy @ 0x13DDA51C 13. ??:0: ?? @ 0x7F5040E1CAC2 14. ??:0: ?? @ 0x7F5040EAE84F Trying to start YDB, gRPC: 29894, MsgBus: 28385 2025-05-29T15:30:16.036081Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509890402777563128:2062];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:30:16.036110Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/0012c7/r3tmp/tmpUTIRCV/pdisk_1.dat 2025-05-29T15:30:16.093279Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [1:7509890402777563107:2079] 174853261603586 ... 3DD8F65 1. /-S/util/system/yassert.cpp:55: Panic @ 0x13DCFF66 2. /tmp//-S/library/cpp/testing/unittest/registar.cpp:36: RaiseError @ 0x13F70BC6 3. /-S/ydb/core/kqp/ut/common/kqp_ut_common.h:375: AssertSuccessResult @ 0x13A3FAE2 4. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:365: CreateSampleTables @ 0x264F71F2 5. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:581: operator() @ 0x2651596C 6. /-S/library/cpp/threading/future/core/future-inl.h:493: SetValue @ 0x2651596C 7. /-S/library/cpp/threading/future/async.h:24: operator() @ 0x2651596C 8. /-S/util/thread/pool.h:71: Process @ 0x2651596C 9. /-S/util/thread/pool.cpp:418: DoExecute @ 0x13DE06B9 10. /-S/util/thread/factory.h:15: Execute @ 0x13DDF0A9 11. /-S/util/thread/factory.cpp:36: ThreadProc @ 0x13DDF0A9 12. /-S/util/system/thread.cpp:244: ThreadProxy @ 0x13DDA51C 13. ??:0: ?? @ 0x7F83E8814AC2 14. ??:0: ?? @ 0x7F83E88A684F Trying to start YDB, gRPC: 11859, MsgBus: 14075 2025-05-29T15:30:24.559933Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509890437004055126:2061];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:30:24.560196Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/0012c7/r3tmp/tmpQPz28N/pdisk_1.dat 2025-05-29T15:30:24.614065Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:30:24.614180Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [1:7509890437004055106:2079] 1748532624559762 != 1748532624559765 TServer::EnableGrpc on GrpcPort 11859, node 1 2025-05-29T15:30:24.627325Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:30:24.627347Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-05-29T15:30:24.627349Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-05-29T15:30:24.627394Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:14075 TClient is connected to server localhost:14075 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: 2025-05-29T15:30:24.691502Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:30:24.691537Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-29T15:30:24.692572Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-29T15:30:24.692908Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:30:24.702142Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-05-29T15:30:24.728107Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 2025-05-29T15:30:24.751983Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:30:24.762587Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:30:24.910159Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890437004056738:2403], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:30:24.910190Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:30:24.948735Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-05-29T15:30:24.959782Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-05-29T15:30:24.974012Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-05-29T15:30:24.987686Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-05-29T15:30:25.002144Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-05-29T15:30:25.015941Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-05-29T15:30:25.029835Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-05-29T15:30:25.046908Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890441299024689:2466], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:30:25.046935Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:30:25.046979Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890441299024694:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:30:25.047869Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715669:3, at schemeshard: 72057594046644480 2025-05-29T15:30:25.056683Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7509890441299024696:2470], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715669 completed, doublechecking } 2025-05-29T15:30:25.113519Z node 1 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [1:7509890441299024747:3396] txid# 281474976715670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-29T15:30:25.226562Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [1:7509890441299024763:2474], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:30:25.226697Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=1&id=Nzk4OWJlMmEtNTcyMDZkNTAtNTE1MDZjMDEtY2VkZmJjNjk=, ActorId: [1:7509890437004056735:2401], ActorState: ExecuteState, TraceId: 01jweamzmp5rn1kztdhjafdjgj, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: VERIFY failed (2025-05-29T15:30:25.227448Z): assertion failed in non-unittest thread with message: assertion failed at ydb/core/kqp/ut/common/kqp_ut_common.h:375, void NKikimr::NKqp::AssertSuccessResult(const NYdb::TStatus &): (result.IsSuccess())
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 library/cpp/testing/unittest/registar.cpp:36 RaiseError(): requirement UnittestThread failed 0. /-S/util/system/yassert.cpp:83: InternalPanicImpl @ 0x13DD8F65 1. /-S/util/system/yassert.cpp:55: Panic @ 0x13DCFF66 2. /tmp//-S/library/cpp/testing/unittest/registar.cpp:36: RaiseError @ 0x13F70BC6 3. /-S/ydb/core/kqp/ut/common/kqp_ut_common.h:375: AssertSuccessResult @ 0x13A3FAE2 4. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:365: CreateSampleTables @ 0x264F71F2 5. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:581: operator() @ 0x2651596C 6. /-S/library/cpp/threading/future/core/future-inl.h:493: SetValue @ 0x2651596C 7. /-S/library/cpp/threading/future/async.h:24: operator() @ 0x2651596C 8. /-S/util/thread/pool.h:71: Process @ 0x2651596C 9. /-S/util/thread/pool.cpp:418: DoExecute @ 0x13DE06B9 10. /-S/util/thread/factory.h:15: Execute @ 0x13DDF0A9 11. /-S/util/thread/factory.cpp:36: ThreadProc @ 0x13DDF0A9 12. /-S/util/system/thread.cpp:244: ThreadProxy @ 0x13DDA51C 13. ??:0: ?? @ 0x7F3A5C526AC2 14. ??:0: ?? @ 0x7F3A5C5B884F |73.5%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/datashard/ut_stats/ydb-core-tx-datashard-ut_stats >> TxUsage::WriteToTopic_Demo_3_Table >> TSchemeShardSplitBySizeTest::ConcurrentSplitOneShard [GOOD] |73.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_stats/ydb-core-tx-datashard-ut_stats |73.5%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_stats/ydb-core-tx-datashard-ut_stats |73.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/statistics/service/ut/unittest |73.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/tx_proxy/ut_encrypted_storage/unittest |73.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/tx_proxy/ut_encrypted_storage/unittest ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/query/unittest >> KqpExplain::UpdateOnSecondaryWithoutSecondaryKey-UseSink Test command err: Trying to start YDB, gRPC: 64590, MsgBus: 28893 2025-05-29T15:30:12.129664Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509890386977713209:2061];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:30:12.129713Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/0012b5/r3tmp/tmpRarHvZ/pdisk_1.dat 2025-05-29T15:30:12.189171Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [1:7509890386977713190:2079] 1748532612129541 != 1748532612129544 2025-05-29T15:30:12.190296Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 64590, node 1 2025-05-29T15:30:12.202556Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:30:12.202570Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-05-29T15:30:12.202571Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-05-29T15:30:12.202612Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:28893 TClient is connected to server localhost:28893 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: 2025-05-29T15:30:12.260475Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:30:12.260501Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:30:12.261676Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-29T15:30:12.268638Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:30:12.275035Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:30:12.341107Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:30:12.362179Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:30:12.373249Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:30:12.485311Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890386977714826:2403], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:30:12.485336Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:30:12.533446Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-05-29T15:30:12.541522Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-05-29T15:30:12.555140Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-05-29T15:30:12.562057Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-05-29T15:30:12.576350Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-05-29T15:30:12.590550Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-05-29T15:30:12.604449Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-05-29T15:30:12.620183Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890386977715477:2466], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:30:12.620212Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890386977715482:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:30:12.620217Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:30:12.620869Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715669:3, at schemeshard: 72057594046644480 2025-05-29T15:30:12.624391Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7509890386977715484:2470], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715669 completed, doublechecking } 2025-05-29T15:30:12.691814Z node 1 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [1:7509890386977715535:3397] txid# 281474976715670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-29T15:30:12.777037Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [1:7509890386977715551:2474], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:30:12.777188Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=1&id=OGM0YmI1NjktN2RlNzRmNzUtZWUyZDliNDUtNzkzMTYyN2I=, ActorId: [1:7509890386977714807:2401], ActorState: ExecuteState, TraceId: 01jweamkgbeg7hedb6tqcn5qa6, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: VERIFY failed (2025-05-29T15:30:12.778280Z): assertion failed in non-unittest thread with message: assertion failed at ydb/core/kqp/ut/common/kqp_ut_common.h:375, void NKikimr::NKqp::AssertSuccessResult(const NYdb::TStatus &): (result.IsSuccess())
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 library/cpp/testing/unittest/registar.cpp:36 RaiseError(): requirement UnittestThread failed 0. /-S/util/system/yassert.cpp:83: InternalPanicImpl @ 0x13DD8F65 1. /-S/util/system/yassert.cpp:55: Panic @ 0x13DCFF66 2. /tmp//-S/library/cpp/testing/unittest/registar.cpp:36: RaiseError @ 0x13F70BC6 3. /-S/ydb/core/kqp/ut/common/kqp_ut_common.h:375: AssertSuccessResult @ 0x13A3FAE2 4. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:365: CreateSampleTables @ 0x264F71F2 5. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:581: operator() @ 0x2651596C 6. /-S/library/cpp/threading/future/core/future-inl.h:493: SetValue @ 0x2651596C 7. /-S/library/cpp/threading/future/async.h:24: operator() @ 0x2651596C 8. /-S/util/thread/pool.h:71: Process @ 0x2651596C 9. /-S/util/thread/pool.cpp:418: DoExecute @ 0x13DE06B9 10. /-S/util/thread/factory.h:15: Execute @ 0x13DDF0A9 11. /-S/util/thread/factory.cpp:36: ThreadProc @ 0x13DDF0A9 12. /-S/util/system/thread.cpp:244: ThreadProxy @ 0x13DDA51C 13. ??:0: ?? @ 0x7F25682D8AC2 14. ??:0: ?? @ 0x7F256836A84F Trying to start YDB, gRPC: 25619, MsgBus: 6570 2025-05-29T15:30:16.681626Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509890402785013318:2063];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:30:16.681702Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/0012b5/r3tmp/tmpqwEdSO/pdisk_1.dat 2025-05-29T15:30:16.732085Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [1:7509890402785013296:2079] 1748532616681446 != 1748532616681449 2025-05-29T15:30:16.732462Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 25619, node 1 2025-05-29T ... 3DD8F65 1. /-S/util/system/yassert.cpp:55: Panic @ 0x13DCFF66 2. /tmp//-S/library/cpp/testing/unittest/registar.cpp:36: RaiseError @ 0x13F70BC6 3. /-S/ydb/core/kqp/ut/common/kqp_ut_common.h:375: AssertSuccessResult @ 0x13A3FAE2 4. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:365: CreateSampleTables @ 0x264F71F2 5. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:581: operator() @ 0x2651596C 6. /-S/library/cpp/threading/future/core/future-inl.h:493: SetValue @ 0x2651596C 7. /-S/library/cpp/threading/future/async.h:24: operator() @ 0x2651596C 8. /-S/util/thread/pool.h:71: Process @ 0x2651596C 9. /-S/util/thread/pool.cpp:418: DoExecute @ 0x13DE06B9 10. /-S/util/thread/factory.h:15: Execute @ 0x13DDF0A9 11. /-S/util/thread/factory.cpp:36: ThreadProc @ 0x13DDF0A9 12. /-S/util/system/thread.cpp:244: ThreadProxy @ 0x13DDA51C 13. ??:0: ?? @ 0x7F93BBDA6AC2 14. ??:0: ?? @ 0x7F93BBE3884F Trying to start YDB, gRPC: 18471, MsgBus: 29197 2025-05-29T15:30:24.829495Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509890440243990891:2062];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:30:24.829755Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/0012b5/r3tmp/tmpXJzfUC/pdisk_1.dat 2025-05-29T15:30:24.880876Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [1:7509890440243990871:2079] 1748532624829369 != 1748532624829372 2025-05-29T15:30:24.881715Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 18471, node 1 2025-05-29T15:30:24.892116Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:30:24.892129Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-05-29T15:30:24.892130Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-05-29T15:30:24.892173Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:29197 TClient is connected to server localhost:29197 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-29T15:30:24.957567Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:30:24.957614Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:30:24.958648Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-29T15:30:24.958878Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:30:24.972723Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:30:25.039131Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:30:25.058029Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:30:25.068768Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:30:25.138996Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890444538959822:2403], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:30:25.139029Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:30:25.185391Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-05-29T15:30:25.192880Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-05-29T15:30:25.247672Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-05-29T15:30:25.260470Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-05-29T15:30:25.274260Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-05-29T15:30:25.281061Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-05-29T15:30:25.288445Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-05-29T15:30:25.304242Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890444538960474:2466], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:30:25.304266Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890444538960479:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:30:25.304271Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:30:25.304966Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715669:3, at schemeshard: 72057594046644480 2025-05-29T15:30:25.308659Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7509890444538960481:2470], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715669 completed, doublechecking } 2025-05-29T15:30:25.393689Z node 1 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [1:7509890444538960532:3396] txid# 281474976715670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-29T15:30:25.485595Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [1:7509890444538960548:2474], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:30:25.486823Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=1&id=Y2E1MGRmN2EtMWE3ZDU4ODktMWE0NGE5NGItMWY3N2JlNjY=, ActorId: [1:7509890444538959803:2401], ActorState: ExecuteState, TraceId: 01jweamzwq2kcvc2ef80efyy55, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: VERIFY failed (2025-05-29T15:30:25.488115Z): assertion failed in non-unittest thread with message: assertion failed at ydb/core/kqp/ut/common/kqp_ut_common.h:375, void NKikimr::NKqp::AssertSuccessResult(const NYdb::TStatus &): (result.IsSuccess())
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 library/cpp/testing/unittest/registar.cpp:36 RaiseError(): requirement UnittestThread failed 0. /-S/util/system/yassert.cpp:83: InternalPanicImpl @ 0x13DD8F65 1. /-S/util/system/yassert.cpp:55: Panic @ 0x13DCFF66 2. /tmp//-S/library/cpp/testing/unittest/registar.cpp:36: RaiseError @ 0x13F70BC6 3. /-S/ydb/core/kqp/ut/common/kqp_ut_common.h:375: AssertSuccessResult @ 0x13A3FAE2 4. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:365: CreateSampleTables @ 0x264F71F2 5. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:581: operator() @ 0x2651596C 6. /-S/library/cpp/threading/future/core/future-inl.h:493: SetValue @ 0x2651596C 7. /-S/library/cpp/threading/future/async.h:24: operator() @ 0x2651596C 8. /-S/util/thread/pool.h:71: Process @ 0x2651596C 9. /-S/util/thread/pool.cpp:418: DoExecute @ 0x13DE06B9 10. /-S/util/thread/factory.h:15: Execute @ 0x13DDF0A9 11. /-S/util/thread/factory.cpp:36: ThreadProc @ 0x13DDF0A9 12. /-S/util/system/thread.cpp:244: ThreadProxy @ 0x13DDA51C 13. ??:0: ?? @ 0x7F9DAE283AC2 14. ??:0: ?? @ 0x7F9DAE31584F >> TQuorumTrackerTests::ErasureNoneNeverHasQuorum_4_1 [GOOD] >> TQuorumTrackerTests::ErasureMirror3IncludingMyFailDomain_5_2 [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_split_merge/unittest >> TSchemeShardSplitBySizeTest::ConcurrentSplitOneShard [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:116:2058] recipient: [1:111:2141] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:116:2058] recipient: [1:111:2141] Leader for TabletID 72057594046678944 is [1:128:2152] sender: [1:132:2058] recipient: [1:111:2141] 2025-05-29T15:30:29.131707Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7488: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-05-29T15:30:29.131742Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7516: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:30:29.131748Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7402: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-05-29T15:30:29.131754Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7418: OperationsProcessing config: using default configuration 2025-05-29T15:30:29.131771Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-05-29T15:30:29.131775Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-05-29T15:30:29.131784Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7548: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:30:29.131799Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:39: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-05-29T15:30:29.131931Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7619: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-05-29T15:30:29.132016Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-05-29T15:30:29.147216Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7309: Cannot subscribe to console configs 2025-05-29T15:30:29.147238Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:30:29.150786Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-05-29T15:30:29.150894Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-05-29T15:30:29.150928Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-05-29T15:30:29.152321Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-05-29T15:30:29.152493Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:445: Clear TempDirsState with owners number: 0 2025-05-29T15:30:29.152632Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1348: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-05-29T15:30:29.152695Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:32: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-05-29T15:30:29.153168Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:157: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:30:29.153225Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:84: [RootDataErasureManager] Stop 2025-05-29T15:30:29.153527Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:30:29.153536Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:30:29.153560Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-05-29T15:30:29.153568Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-29T15:30:29.153573Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:202: TTxServerlessStorageBilling.Complete 2025-05-29T15:30:29.153613Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6671: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-05-29T15:30:29.154997Z node 1 :HIVE INFO: tablet_helpers.cpp:1130: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:128:2152] sender: [1:242:2058] recipient: [1:15:2062] 2025-05-29T15:30:29.171847Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:372: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-05-29T15:30:29.171941Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:30:29.172013Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-05-29T15:30:29.172063Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:130: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-05-29T15:30:29.172077Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:30:29.172867Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:451: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-05-29T15:30:29.172892Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-05-29T15:30:29.172935Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:30:29.172944Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:313: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-05-29T15:30:29.172948Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:367: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-05-29T15:30:29.172952Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 2 -> 3 2025-05-29T15:30:29.173241Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:30:29.173249Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-05-29T15:30:29.173252Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 3 -> 128 2025-05-29T15:30:29.173522Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:30:29.173532Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:30:29.173540Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:30:29.173556Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1650: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-05-29T15:30:29.174193Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1719: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-05-29T15:30:29.174579Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:652: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-05-29T15:30:29.174625Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1751: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-05-29T15:30:29.174835Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:676: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-05-29T15:30:29.174855Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:680: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 124 RawX2: 4294969445 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-05-29T15:30:29.174860Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:30:29.174909Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 128 -> 240 2025-05-29T15:30:29.174914Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:30:29.174939Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-05-29T15:30:29.174948Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:402: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-05-29T15:30:29.175241Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:30:29.175247Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-29T15:30:29.175290Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29 ... n remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 6 2025-05-29T15:30:29.416807Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1606: TOperation IsReadyToNotify, TxId: 102, ready parts: 0/1, is published: true 2025-05-29T15:30:29.417301Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:652: Send tablet strongly msg operationId: 102:0 from tablet: 72057594046678944 to tablet: 72075186233409546 cookie: 72057594046678944:1 msg type: 269553158 2025-05-29T15:30:29.417617Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2025-05-29T15:30:29.417869Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:619: TTxOperationReply execute, operationId: 102:0, at schemeshard: 72057594046678944, message: OperationCookie: 102 TabletId: 72075186233409546 2025-05-29T15:30:29.417878Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_split_merge.cpp:386: TSplitMerge TNotifySrc, operationId: 102:0 HandleReply TEvSplitPartitioningChangedAck, from datashard: 72075186233409546, at schemeshard: 72057594046678944 2025-05-29T15:30:29.417892Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:906: Part operation is done id#102:0 progress is 1/1 2025-05-29T15:30:29.417896Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-05-29T15:30:29.417899Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:906: Part operation is done id#102:0 progress is 1/1 2025-05-29T15:30:29.417902Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-05-29T15:30:29.417905Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1606: TOperation IsReadyToNotify, TxId: 102, ready parts: 1/1, is published: true 2025-05-29T15:30:29.417911Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-05-29T15:30:29.417914Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:973: Operation and all the parts is done, operation id: 102:0 2025-05-29T15:30:29.417918Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5174: RemoveTx for txid 102:0 2025-05-29T15:30:29.417944Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 5 2025-05-29T15:30:29.418588Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:647: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2025-05-29T15:30:29.418602Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:260: Unable to activate 102:0 Leader for TabletID 72057594046678944 is [1:471:2419] sender: [1:689:2058] recipient: [1:15:2062] 2025-05-29T15:30:29.419011Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5549: Handle TEvStateChanged, at schemeshard: 72057594046678944, message: Source { RawX1: 310 RawX2: 4294969592 } TabletId: 72075186233409546 State: 4 2025-05-29T15:30:29.419027Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__state_changed_reply.cpp:78: TTxShardStateChanged DoExecute, datashard informs about state changing, datashardId: 72075186233409546, state: Offline, at schemeshard: 72057594046678944 2025-05-29T15:30:29.419429Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:46: Free shard 72057594046678944:1 hive 72057594037968897 at ss 72057594046678944 2025-05-29T15:30:29.419519Z node 1 :HIVE INFO: tablet_helpers.cpp:1356: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 1 TxId_Deprecated: 1 TabletID: 72075186233409546 Forgetting tablet 72075186233409546 2025-05-29T15:30:29.431501Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5938: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 1 ShardOwnerId: 72057594046678944 ShardLocalIdx: 1, at schemeshard: 72057594046678944 2025-05-29T15:30:29.431638Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 2025-05-29T15:30:29.432603Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:174: Deleted shardIdx 72057594046678944:1 2025-05-29T15:30:29.432629Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:180: Close pipe to deleted shardIdx 72057594046678944:1 tabletId 72075186233409546 TestWaitNotification wait txId: 102 2025-05-29T15:30:29.453267Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:210: tests -- TTxNotificationSubscriber for txId 102: send EvNotifyTxCompletion 2025-05-29T15:30:29.453300Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:256: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 102 TestWaitNotification wait txId: 103 2025-05-29T15:30:29.453328Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:210: tests -- TTxNotificationSubscriber for txId 103: send EvNotifyTxCompletion 2025-05-29T15:30:29.453332Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:256: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 103 2025-05-29T15:30:29.453487Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 103, at schemeshard: 72057594046678944 2025-05-29T15:30:29.453539Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:227: tests -- TTxNotificationSubscriber for txId 103: got EvNotifyTxCompletionResult 2025-05-29T15:30:29.453546Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:236: tests -- TTxNotificationSubscriber for txId 103: satisfy waiter [1:699:2606] 2025-05-29T15:30:29.453571Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 102, at schemeshard: 72057594046678944 2025-05-29T15:30:29.453588Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:227: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2025-05-29T15:30:29.453592Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:236: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [1:699:2606] TestWaitNotification: OK eventTxId 103 TestWaitNotification: OK eventTxId 102 wait until 72075186233409546 is deleted 2025-05-29T15:30:29.453665Z node 1 :HIVE INFO: tablet_helpers.cpp:1476: [72057594037968897] TEvSubscribeToTabletDeletion, 72075186233409546 Deleted tabletId 72075186233409546 2025-05-29T15:30:29.453828Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Table" Options { ReturnPartitioningInfo: true ReturnPartitionConfig: true BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-05-29T15:30:29.453913Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/Table" took 116us result status StatusSuccess 2025-05-29T15:30:29.454187Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Table" PathDescription { Self { Name: "Table" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 4 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 4 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 2 } ChildrenExist: false } Table { Name: "Table" Columns { Name: "Key" Type: "Utf8" TypeId: 4608 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "Value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "Key" KeyColumnNames: "Value" KeyColumnIds: 1 KeyColumnIds: 2 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { MinPartitionsCount: 1 } } TableSchemaVersion: 1 IsBackup: false IsRestore: false } TablePartitions { EndOfRangeKeyPrefix: "\002\000\001\000\000\000A\000\000\000\200" IsPoint: false IsInclusive: false DatashardId: 72075186233409547 } TablePartitions { EndOfRangeKeyPrefix: "" IsPoint: false IsInclusive: false DatashardId: 72075186233409548 } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 2 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 2 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 |73.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/statistics/service/ut/unittest >> TStorageTenantTest::Empty [GOOD] >> HttpRequest::Analyze |73.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/tx_proxy/ut_encrypted_storage/unittest >> TSchemeShardSplitBySizeTest::Make11MergeOperationsWithInflyLimit10 |73.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/tx_proxy/ut_encrypted_storage/unittest |73.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/vdisk/syncer/ut/unittest >> TQuorumTrackerTests::ErasureMirror3IncludingMyFailDomain_5_2 [GOOD] >> TSchemeShardTTLTests::LegacyTtlSettingsNoTiersAlterTable [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/query/unittest >> KqpExplain::ReadTableRangesFullScan Test command err: Trying to start YDB, gRPC: 22673, MsgBus: 28161 2025-05-29T15:30:12.750852Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509890385296448628:2064];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:30:12.750884Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/0012a9/r3tmp/tmpVTkLBA/pdisk_1.dat 2025-05-29T15:30:12.811933Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [1:7509890385296448605:2079] 1748532612750630 != 1748532612750633 2025-05-29T15:30:12.813919Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 22673, node 1 2025-05-29T15:30:12.828899Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:30:12.828913Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-05-29T15:30:12.828915Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-05-29T15:30:12.828966Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:28161 2025-05-29T15:30:12.853188Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:30:12.853212Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:30:12.854333Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:28161 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-29T15:30:12.898146Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:30:12.902940Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:30:12.966868Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:30:12.984441Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:30:12.997038Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:30:13.170542Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890389591417535:2403], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:30:13.170571Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:30:13.218044Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-05-29T15:30:13.226116Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-05-29T15:30:13.234413Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-05-29T15:30:13.248658Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-05-29T15:30:13.262180Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-05-29T15:30:13.276726Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-05-29T15:30:13.290909Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-05-29T15:30:13.307199Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890389591418188:2466], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:30:13.307234Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:30:13.307238Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890389591418193:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:30:13.308117Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710669:3, at schemeshard: 72057594046644480 2025-05-29T15:30:13.310127Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7509890389591418195:2470], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710669 completed, doublechecking } 2025-05-29T15:30:13.393775Z node 1 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [1:7509890389591418246:3397] txid# 281474976710670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-29T15:30:13.491383Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [1:7509890389591418262:2474], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:30:13.491508Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=1&id=N2U3MGNkMjEtZDA3ZGUzMzMtMzEyZDZiYzAtMTkyN2MwN2U=, ActorId: [1:7509890389591417517:2401], ActorState: ExecuteState, TraceId: 01jweamm5t8n1gfw02gyc496e4, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: VERIFY failed (2025-05-29T15:30:13.492213Z): assertion failed in non-unittest thread with message: assertion failed at ydb/core/kqp/ut/common/kqp_ut_common.h:375, void NKikimr::NKqp::AssertSuccessResult(const NYdb::TStatus &): (result.IsSuccess())
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 library/cpp/testing/unittest/registar.cpp:36 RaiseError(): requirement UnittestThread failed 0. /-S/util/system/yassert.cpp:83: InternalPanicImpl @ 0x13DD8F65 1. /-S/util/system/yassert.cpp:55: Panic @ 0x13DCFF66 2. /tmp//-S/library/cpp/testing/unittest/registar.cpp:36: RaiseError @ 0x13F70BC6 3. /-S/ydb/core/kqp/ut/common/kqp_ut_common.h:375: AssertSuccessResult @ 0x13A3FAE2 4. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:365: CreateSampleTables @ 0x264F71F2 5. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:581: operator() @ 0x2651596C 6. /-S/library/cpp/threading/future/core/future-inl.h:493: SetValue @ 0x2651596C 7. /-S/library/cpp/threading/future/async.h:24: operator() @ 0x2651596C 8. /-S/util/thread/pool.h:71: Process @ 0x2651596C 9. /-S/util/thread/pool.cpp:418: DoExecute @ 0x13DE06B9 10. /-S/util/thread/factory.h:15: Execute @ 0x13DDF0A9 11. /-S/util/thread/factory.cpp:36: ThreadProc @ 0x13DDF0A9 12. /-S/util/system/thread.cpp:244: ThreadProxy @ 0x13DDA51C 13. ??:0: ?? @ 0x7F8500E8EAC2 14. ??:0: ?? @ 0x7F8500F2084F Trying to start YDB, gRPC: 22282, MsgBus: 15778 2025-05-29T15:30:17.045631Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509890407143245041:2063];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:30:17.045664Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/0012a9/r3tmp/tmpEL0NT4/pdisk_1.dat 2025-05-29T15:30:17.103097Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:30:17.103159Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [1:7509890407143245020:2079] 1748532617045480 != 1748532617045483 TServer::EnableGrpc on GrpcPort 22282, node 1 2025-05-29 ... operator() @ 0x2651596C 6. /-S/library/cpp/threading/future/core/future-inl.h:493: SetValue @ 0x2651596C 7. /-S/library/cpp/threading/future/async.h:24: operator() @ 0x2651596C 8. /-S/util/thread/pool.h:71: Process @ 0x2651596C 9. /-S/util/thread/pool.cpp:418: DoExecute @ 0x13DE06B9 10. /-S/util/thread/factory.h:15: Execute @ 0x13DDF0A9 11. /-S/util/thread/factory.cpp:36: ThreadProc @ 0x13DDF0A9 12. /-S/util/system/thread.cpp:244: ThreadProxy @ 0x13DDA51C 13. ??:0: ?? @ 0x7F389A2CBAC2 14. ??:0: ?? @ 0x7F389A35D84F Trying to start YDB, gRPC: 16875, MsgBus: 4728 2025-05-29T15:30:25.556398Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509890440948948878:2061];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:30:25.556424Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/0012a9/r3tmp/tmpmWgERl/pdisk_1.dat 2025-05-29T15:30:25.622455Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:30:25.622821Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [1:7509890440948948859:2079] 1748532625556258 != 1748532625556261 TServer::EnableGrpc on GrpcPort 16875, node 1 2025-05-29T15:30:25.635044Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:30:25.635058Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-05-29T15:30:25.635059Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-05-29T15:30:25.635106Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:4728 TClient is connected to server localhost:4728 WaitRootIsUp 'Root'... TClient::Ls request: Root 2025-05-29T15:30:25.693603Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:30:25.693633Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:30:25.694775Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-29T15:30:25.709965Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:30:25.715461Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:30:25.717721Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-05-29T15:30:25.789577Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:30:25.819186Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:30:25.835665Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:30:25.955459Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890440948950492:2403], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:30:25.955490Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:30:25.998563Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-05-29T15:30:26.011560Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-05-29T15:30:26.025984Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-05-29T15:30:26.038047Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-05-29T15:30:26.052738Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-05-29T15:30:26.067452Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-05-29T15:30:26.080628Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-05-29T15:30:26.111233Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890445243918439:2466], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:30:26.111266Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:30:26.111283Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890445243918444:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:30:26.112455Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710669:3, at schemeshard: 72057594046644480 2025-05-29T15:30:26.116521Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976710669, at schemeshard: 72057594046644480 2025-05-29T15:30:26.116879Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7509890445243918446:2470], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710669 completed, doublechecking } 2025-05-29T15:30:26.207160Z node 1 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [1:7509890445243918497:3395] txid# 281474976710670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-29T15:30:26.326559Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [1:7509890445243918513:2474], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:30:26.326730Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=1&id=NDhlNDY0YjItNmZlMTQzNjktZjM3NzQ4N2QtMzU3MGQ5MWY=, ActorId: [1:7509890440948950475:2401], ActorState: ExecuteState, TraceId: 01jwean0nybx89h0jv23ghwkzx, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: VERIFY failed (2025-05-29T15:30:26.331063Z): assertion failed in non-unittest thread with message: assertion failed at ydb/core/kqp/ut/common/kqp_ut_common.h:375, void NKikimr::NKqp::AssertSuccessResult(const NYdb::TStatus &): (result.IsSuccess())
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 library/cpp/testing/unittest/registar.cpp:36 RaiseError(): requirement UnittestThread failed 0. /-S/util/system/yassert.cpp:83: InternalPanicImpl @ 0x13DD8F65 1. /-S/util/system/yassert.cpp:55: Panic @ 0x13DCFF66 2. /tmp//-S/library/cpp/testing/unittest/registar.cpp:36: RaiseError @ 0x13F70BC6 3. /-S/ydb/core/kqp/ut/common/kqp_ut_common.h:375: AssertSuccessResult @ 0x13A3FAE2 4. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:365: CreateSampleTables @ 0x264F71F2 5. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:581: operator() @ 0x2651596C 6. /-S/library/cpp/threading/future/core/future-inl.h:493: SetValue @ 0x2651596C 7. /-S/library/cpp/threading/future/async.h:24: operator() @ 0x2651596C 8. /-S/util/thread/pool.h:71: Process @ 0x2651596C 9. /-S/util/thread/pool.cpp:418: DoExecute @ 0x13DE06B9 10. /-S/util/thread/factory.h:15: Execute @ 0x13DDF0A9 11. /-S/util/thread/factory.cpp:36: ThreadProc @ 0x13DDF0A9 12. /-S/util/system/thread.cpp:244: ThreadProxy @ 0x13DDA51C 13. ??:0: ?? @ 0x7F9477A86AC2 14. ??:0: ?? @ 0x7F9477B1884F |73.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/tx_proxy/ut_encrypted_storage/unittest >> TStorageTenantTest::Empty [GOOD] >> TSyncBrokerTests::ShouldReturnTokensWithSameVDiskId [GOOD] >> TSyncNeighborsTests::SerDes1 [GOOD] >> HttpRequest::AnalyzeServerless >> TStorageTenantTest::CreateTableOutsideDatabaseFailToStartTabletsButDropIsOk [GOOD] |73.6%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/ymq/actor/ut/ydb-core-ymq-actor-ut |73.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/tx_proxy/ut_encrypted_storage/unittest |73.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/ymq/actor/ut/ydb-core-ymq-actor-ut |73.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/statistics/service/ut/unittest |73.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/tx_proxy/ut_encrypted_storage/unittest |73.6%| [LD] {RESULT} $(B)/ydb/core/ymq/actor/ut/ydb-core-ymq-actor-ut |73.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/tx_proxy/ut_encrypted_storage/unittest >> TStorageTenantTest::CreateTableOutsideDatabaseFailToStartTabletsButDropIsOk [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/vdisk/syncer/ut/unittest >> TSyncNeighborsTests::SerDes1 [GOOD] Test command err: 2025-05-29T15:30:30.716012Z node 1 :BS_SYNCER DEBUG: blobstorage_syncer_broker.cpp:64: TEvQuerySyncToken, VDisk actor id: [0:1:1], actor id: [1:5:2052], token sent, active: 1, waiting: 0 2025-05-29T15:30:30.716067Z node 1 :BS_SYNCER DEBUG: blobstorage_syncer_broker.cpp:50: TEvQuerySyncToken, VDisk actor id: [0:1:1], actor id: [1:6:2053], token sent, active: 1, waiting: 0 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_ttl/unittest >> TSchemeShardTTLTests::LegacyTtlSettingsNoTiersAlterTable [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:116:2058] recipient: [1:111:2141] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:116:2058] recipient: [1:111:2141] Leader for TabletID 72057594046678944 is [1:128:2152] sender: [1:132:2058] recipient: [1:111:2141] 2025-05-29T15:30:29.911860Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7488: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-05-29T15:30:29.911935Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7516: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:30:29.911942Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7402: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-05-29T15:30:29.911951Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7418: OperationsProcessing config: using default configuration 2025-05-29T15:30:29.911966Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-05-29T15:30:29.911970Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-05-29T15:30:29.911981Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7548: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:30:29.911997Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:39: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-05-29T15:30:29.912115Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7619: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-05-29T15:30:29.912236Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-05-29T15:30:29.947583Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7309: Cannot subscribe to console configs 2025-05-29T15:30:29.947639Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:30:29.954592Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-05-29T15:30:29.954833Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-05-29T15:30:29.954909Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-05-29T15:30:29.968924Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-05-29T15:30:29.970312Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:445: Clear TempDirsState with owners number: 0 2025-05-29T15:30:29.974895Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1348: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-05-29T15:30:29.975757Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:32: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-05-29T15:30:29.977483Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:157: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:30:29.977608Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:84: [RootDataErasureManager] Stop 2025-05-29T15:30:29.983516Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:30:29.983554Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:30:29.983584Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-05-29T15:30:29.983600Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-29T15:30:29.983609Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:202: TTxServerlessStorageBilling.Complete 2025-05-29T15:30:29.983662Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6671: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-05-29T15:30:29.989863Z node 1 :HIVE INFO: tablet_helpers.cpp:1130: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:128:2152] sender: [1:242:2058] recipient: [1:15:2062] 2025-05-29T15:30:30.054676Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:372: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-05-29T15:30:30.059819Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:30:30.059955Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-05-29T15:30:30.060059Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:130: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-05-29T15:30:30.060089Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:30:30.061355Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:451: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-05-29T15:30:30.061398Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-05-29T15:30:30.061494Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:30:30.061509Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:313: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-05-29T15:30:30.061515Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:367: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-05-29T15:30:30.061522Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 2 -> 3 2025-05-29T15:30:30.063812Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:30:30.063840Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-05-29T15:30:30.063851Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 3 -> 128 2025-05-29T15:30:30.064602Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:30:30.064618Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:30:30.064627Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:30:30.064638Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1650: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-05-29T15:30:30.065563Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1719: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-05-29T15:30:30.066395Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:652: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-05-29T15:30:30.066466Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1751: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-05-29T15:30:30.066719Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:676: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-05-29T15:30:30.066784Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:680: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 124 RawX2: 4294969445 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-05-29T15:30:30.066794Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:30:30.070385Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 128 -> 240 2025-05-29T15:30:30.070441Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:30:30.070494Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-05-29T15:30:30.070516Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:402: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-05-29T15:30:30.075434Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:30:30.075461Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-29T15:30:30.075530Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29 ... chemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 102, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2025-05-29T15:30:30.372598Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:30:30.372606Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:209:2210], at schemeshard: 72057594046678944, txId: 102, path id: 2 2025-05-29T15:30:30.372624Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2025-05-29T15:30:30.372634Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:1036: NTableState::TProposedWaitParts operationId# 102:0 ProgressState at tablet: 72057594046678944 2025-05-29T15:30:30.372961Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:5834: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 4 PathOwnerId: 72057594046678944, cookie: 102 2025-05-29T15:30:30.372979Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 4 PathOwnerId: 72057594046678944, cookie: 102 2025-05-29T15:30:30.372985Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72057594046678944, txId: 102 2025-05-29T15:30:30.372992Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 4 2025-05-29T15:30:30.373001Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 2025-05-29T15:30:30.373024Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1606: TOperation IsReadyToNotify, TxId: 102, ready parts: 0/1, is published: true 2025-05-29T15:30:30.375259Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 FAKE_COORDINATOR: Erasing txId 102 2025-05-29T15:30:30.387214Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6285: Handle TEvProposeTransactionResult, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409546 Status: COMPLETE TxId: 102 Step: 5000003 OrderId: 102 ExecLatency: 0 ProposeLatency: 4 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409546 CpuTimeUsec: 391 } } CommitVersion { Step: 5000003 TxId: 102 } 2025-05-29T15:30:30.387243Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1764: TOperation FindRelatedPartByTabletId, TxId: 102, tablet: 72075186233409546, partId: 0 2025-05-29T15:30:30.387282Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:619: TTxOperationReply execute, operationId: 102:0, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409546 Status: COMPLETE TxId: 102 Step: 5000003 OrderId: 102 ExecLatency: 0 ProposeLatency: 4 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409546 CpuTimeUsec: 391 } } CommitVersion { Step: 5000003 TxId: 102 } 2025-05-29T15:30:30.387301Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_part.cpp:108: HandleReply TEvDataShard::TEvProposeTransactionResult Ignore message: tablet# 72057594046678944, ev# TxKind: TX_KIND_SCHEME Origin: 72075186233409546 Status: COMPLETE TxId: 102 Step: 5000003 OrderId: 102 ExecLatency: 0 ProposeLatency: 4 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409546 CpuTimeUsec: 391 } } CommitVersion { Step: 5000003 TxId: 102 } 2025-05-29T15:30:30.387627Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5512: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 310 RawX2: 4294969592 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 2025-05-29T15:30:30.387642Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1764: TOperation FindRelatedPartByTabletId, TxId: 102, tablet: 72075186233409546, partId: 0 2025-05-29T15:30:30.387663Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:619: TTxOperationReply execute, operationId: 102:0, at schemeshard: 72057594046678944, message: Source { RawX1: 310 RawX2: 4294969592 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 2025-05-29T15:30:30.387671Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:1005: NTableState::TProposedWaitParts operationId# 102:0 HandleReply TEvSchemaChanged at tablet: 72057594046678944 2025-05-29T15:30:30.387681Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:1009: NTableState::TProposedWaitParts operationId# 102:0 HandleReply TEvSchemaChanged at tablet: 72057594046678944 message: Source { RawX1: 310 RawX2: 4294969592 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 2025-05-29T15:30:30.387698Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:655: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 102:0, shardIdx: 72057594046678944:1, datashard: 72075186233409546, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2025-05-29T15:30:30.387703Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:674: all shard schema changes has been received, operationId: 102:0, at schemeshard: 72057594046678944 2025-05-29T15:30:30.387709Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:686: send schema changes ack message, operation: 102:0, datashard: 72075186233409546, at schemeshard: 72057594046678944 2025-05-29T15:30:30.387717Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 102:0 129 -> 240 2025-05-29T15:30:30.389194Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:647: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2025-05-29T15:30:30.390374Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:647: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2025-05-29T15:30:30.390444Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2025-05-29T15:30:30.390456Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:482: [72057594046678944] TDone opId# 102:0 ProgressState 2025-05-29T15:30:30.390479Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:906: Part operation is done id#102:0 progress is 1/1 2025-05-29T15:30:30.390485Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-05-29T15:30:30.390492Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:906: Part operation is done id#102:0 progress is 1/1 2025-05-29T15:30:30.390496Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-05-29T15:30:30.390502Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1606: TOperation IsReadyToNotify, TxId: 102, ready parts: 1/1, is published: true 2025-05-29T15:30:30.390527Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1629: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:338:2316] message: TxId: 102 2025-05-29T15:30:30.390554Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-05-29T15:30:30.390561Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:973: Operation and all the parts is done, operation id: 102:0 2025-05-29T15:30:30.390566Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5174: RemoveTx for txid 102:0 2025-05-29T15:30:30.390616Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2025-05-29T15:30:30.391434Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:227: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2025-05-29T15:30:30.391449Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:236: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [1:396:2367] TestWaitNotification: OK eventTxId 102 2025-05-29T15:30:30.391596Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/TTLEnabledTable" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-05-29T15:30:30.391662Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/TTLEnabledTable" took 79us result status StatusSuccess 2025-05-29T15:30:30.391837Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/TTLEnabledTable" PathDescription { Self { Name: "TTLEnabledTable" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 4 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 4 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 2 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "TTLEnabledTable" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "modified_at" Type: "Timestamp" TypeId: 50 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 TableSchemaVersion: 2 TTLSettings { Enabled { ColumnName: "modified_at" ExpireAfterSeconds: 3600 } } IsBackup: false IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 1 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/query/unittest >> KqpStats::OneShardLocalExec-UseSink Test command err: Trying to start YDB, gRPC: 2094, MsgBus: 24397 2025-05-29T15:30:13.487556Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509890391202747690:2063];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:30:13.487595Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/001295/r3tmp/tmptm9cqO/pdisk_1.dat 2025-05-29T15:30:13.535739Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:30:13.535788Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [1:7509890391202747669:2079] 1748532613487430 != 1748532613487433 TServer::EnableGrpc on GrpcPort 2094, node 1 2025-05-29T15:30:13.550480Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:30:13.550494Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-05-29T15:30:13.550497Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-05-29T15:30:13.550536Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:24397 2025-05-29T15:30:13.590287Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:30:13.590323Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:30:13.591455Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:24397 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-29T15:30:13.618408Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:30:13.623899Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:30:13.648278Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:30:13.669995Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:30:13.684526Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:30:13.835611Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890391202749326:2403], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:30:13.835640Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:30:13.874034Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-05-29T15:30:13.881640Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-05-29T15:30:13.892778Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-05-29T15:30:13.908952Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-05-29T15:30:13.963771Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-05-29T15:30:13.977626Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-05-29T15:30:13.990386Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-05-29T15:30:14.051394Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890395497717285:2466], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:30:14.051427Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:30:14.051453Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890395497717290:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:30:14.052237Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715669:3, at schemeshard: 72057594046644480 2025-05-29T15:30:14.054175Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7509890395497717292:2470], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715669 completed, doublechecking } 2025-05-29T15:30:14.111489Z node 1 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [1:7509890395497717343:3400] txid# 281474976715670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-29T15:30:14.244093Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [1:7509890395497717352:2474], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:30:14.246922Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=1&id=NDlhMjFhNjItNDkwYzVjYjAtZjZiMDRhZmEtZTk2ZTkxNDE=, ActorId: [1:7509890391202749308:2401], ActorState: ExecuteState, TraceId: 01jweammx3cy10fsnxbyjsyn80, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: VERIFY failed (2025-05-29T15:30:14.248772Z): assertion failed in non-unittest thread with message: assertion failed at ydb/core/kqp/ut/common/kqp_ut_common.h:375, void NKikimr::NKqp::AssertSuccessResult(const NYdb::TStatus &): (result.IsSuccess())
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 library/cpp/testing/unittest/registar.cpp:36 RaiseError(): requirement UnittestThread failed 0. /-S/util/system/yassert.cpp:83: InternalPanicImpl @ 0x13DD8F65 1. /-S/util/system/yassert.cpp:55: Panic @ 0x13DCFF66 2. /tmp//-S/library/cpp/testing/unittest/registar.cpp:36: RaiseError @ 0x13F70BC6 3. /-S/ydb/core/kqp/ut/common/kqp_ut_common.h:375: AssertSuccessResult @ 0x13A3FAE2 4. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:365: CreateSampleTables @ 0x264F71F2 5. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:581: operator() @ 0x2651596C 6. /-S/library/cpp/threading/future/core/future-inl.h:493: SetValue @ 0x2651596C 7. /-S/library/cpp/threading/future/async.h:24: operator() @ 0x2651596C 8. /-S/util/thread/pool.h:71: Process @ 0x2651596C 9. /-S/util/thread/pool.cpp:418: DoExecute @ 0x13DE06B9 10. /-S/util/thread/factory.h:15: Execute @ 0x13DDF0A9 11. /-S/util/thread/factory.cpp:36: ThreadProc @ 0x13DDF0A9 12. /-S/util/system/thread.cpp:244: ThreadProxy @ 0x13DDA51C 13. ??:0: ?? @ 0x7F505C863AC2 14. ??:0: ?? @ 0x7F505C8F584F Trying to start YDB, gRPC: 19031, MsgBus: 61468 2025-05-29T15:30:17.787915Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509890406560229259:2062];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:30:17.787983Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/001295/r3tmp/tmp4qZKpu/pdisk_1.dat 2025-05-29T15:30:17.835916Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [1:7509890406560229239:2079] 1748532617787763 != 1748532617787766 2025-05-29T15:30:17.837162Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 19031, node 1 2025-05-29T1 ... 3DD8F65 1. /-S/util/system/yassert.cpp:55: Panic @ 0x13DCFF66 2. /tmp//-S/library/cpp/testing/unittest/registar.cpp:36: RaiseError @ 0x13F70BC6 3. /-S/ydb/core/kqp/ut/common/kqp_ut_common.h:375: AssertSuccessResult @ 0x13A3FAE2 4. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:365: CreateSampleTables @ 0x264F71F2 5. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:581: operator() @ 0x2651596C 6. /-S/library/cpp/threading/future/core/future-inl.h:493: SetValue @ 0x2651596C 7. /-S/library/cpp/threading/future/async.h:24: operator() @ 0x2651596C 8. /-S/util/thread/pool.h:71: Process @ 0x2651596C 9. /-S/util/thread/pool.cpp:418: DoExecute @ 0x13DE06B9 10. /-S/util/thread/factory.h:15: Execute @ 0x13DDF0A9 11. /-S/util/thread/factory.cpp:36: ThreadProc @ 0x13DDF0A9 12. /-S/util/system/thread.cpp:244: ThreadProxy @ 0x13DDA51C 13. ??:0: ?? @ 0x7FB0F441EAC2 14. ??:0: ?? @ 0x7FB0F44B084F Trying to start YDB, gRPC: 10219, MsgBus: 19846 2025-05-29T15:30:26.441664Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509890446242862753:2211];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/001295/r3tmp/tmpCYlmbR/pdisk_1.dat 2025-05-29T15:30:26.448171Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-05-29T15:30:26.476478Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [1:7509890446242862568:2079] 1748532626382402 != 1748532626382405 2025-05-29T15:30:26.479179Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 10219, node 1 2025-05-29T15:30:26.488396Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:30:26.488408Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-05-29T15:30:26.488409Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-05-29T15:30:26.488443Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:19846 TClient is connected to server localhost:19846 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-29T15:30:26.542790Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:30:26.542836Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:30:26.543371Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-05-29T15:30:26.543770Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected waiting... 2025-05-29T15:30:26.546381Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:30:26.609734Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:30:26.631229Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:30:26.642345Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:30:26.778607Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890446242864200:2403], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:30:26.778627Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:30:26.827646Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-05-29T15:30:26.836171Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-05-29T15:30:26.849191Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-05-29T15:30:26.904547Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-05-29T15:30:26.960354Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-05-29T15:30:26.970193Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-05-29T15:30:26.983721Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-05-29T15:30:27.005824Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890450537832152:2466], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:30:27.005856Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:30:27.005859Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890450537832157:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:30:27.007028Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715669:3, at schemeshard: 72057594046644480 2025-05-29T15:30:27.009609Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7509890450537832159:2470], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715669 completed, doublechecking } 2025-05-29T15:30:27.077095Z node 1 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [1:7509890450537832210:3396] txid# 281474976715670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-29T15:30:27.183252Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [1:7509890450537832226:2474], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:30:27.183381Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=1&id=MzVhYzA1MWItMTYyZWEzZmYtY2I4ODgzZmYtOTNiOGI5N2M=, ActorId: [1:7509890446242864182:2401], ActorState: ExecuteState, TraceId: 01jwean1hx4amzv0zee970tnpk, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: VERIFY failed (2025-05-29T15:30:27.190811Z): assertion failed in non-unittest thread with message: assertion failed at ydb/core/kqp/ut/common/kqp_ut_common.h:375, void NKikimr::NKqp::AssertSuccessResult(const NYdb::TStatus &): (result.IsSuccess())
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 library/cpp/testing/unittest/registar.cpp:36 RaiseError(): requirement UnittestThread failed 0. /-S/util/system/yassert.cpp:83: InternalPanicImpl @ 0x13DD8F65 1. /-S/util/system/yassert.cpp:55: Panic @ 0x13DCFF66 2. /tmp//-S/library/cpp/testing/unittest/registar.cpp:36: RaiseError @ 0x13F70BC6 3. /-S/ydb/core/kqp/ut/common/kqp_ut_common.h:375: AssertSuccessResult @ 0x13A3FAE2 4. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:365: CreateSampleTables @ 0x264F71F2 5. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:581: operator() @ 0x2651596C 6. /-S/library/cpp/threading/future/core/future-inl.h:493: SetValue @ 0x2651596C 7. /-S/library/cpp/threading/future/async.h:24: operator() @ 0x2651596C 8. /-S/util/thread/pool.h:71: Process @ 0x2651596C 9. /-S/util/thread/pool.cpp:418: DoExecute @ 0x13DE06B9 10. /-S/util/thread/factory.h:15: Execute @ 0x13DDF0A9 11. /-S/util/thread/factory.cpp:36: ThreadProc @ 0x13DDF0A9 12. /-S/util/system/thread.cpp:244: ThreadProxy @ 0x13DDA51C 13. ??:0: ?? @ 0x7F94F8B9CAC2 14. ??:0: ?? @ 0x7F94F8C2E84F >> TPersQueueTest::BadTopic ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/query/unittest >> KqpExplain::FullOuterJoin Test command err: Trying to start YDB, gRPC: 15536, MsgBus: 17242 2025-05-29T15:30:09.271886Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509890372196952969:2061];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:30:09.271905Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/0012e1/r3tmp/tmp2j5qTJ/pdisk_1.dat 2025-05-29T15:30:09.321196Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [1:7509890372196952950:2079] 1748532609271801 != 1748532609271804 2025-05-29T15:30:09.321645Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 15536, node 1 2025-05-29T15:30:09.332711Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:30:09.332721Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-05-29T15:30:09.332722Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-05-29T15:30:09.332753Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:17242 TClient is connected to server localhost:17242 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: 2025-05-29T15:30:09.398812Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:30:09.398842Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:30:09.399937Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-29T15:30:09.401471Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:30:09.409061Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:30:09.429246Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:30:09.454348Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:30:09.466926Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:30:09.640633Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890372196954594:2403], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:30:09.640669Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:30:09.676284Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-05-29T15:30:09.682936Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-05-29T15:30:09.692525Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-05-29T15:30:09.747529Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-05-29T15:30:09.755370Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-05-29T15:30:09.769626Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-05-29T15:30:09.783276Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-05-29T15:30:09.799469Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890372196955248:2466], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:30:09.799496Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:30:09.799498Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890372196955253:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:30:09.800101Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710669:3, at schemeshard: 72057594046644480 2025-05-29T15:30:09.803494Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7509890372196955255:2470], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710669 completed, doublechecking } 2025-05-29T15:30:09.902313Z node 1 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [1:7509890372196955306:3396] txid# 281474976710670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-29T15:30:10.003688Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [1:7509890372196955322:2474], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:30:10.003803Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=1&id=NjM2MmE2ZTEtYjY4MDE4OGYtODJiOTlmMy02ODVhYWU5MQ==, ActorId: [1:7509890372196954591:2401], ActorState: ExecuteState, TraceId: 01jweamgr739sn28ddmssttn8h, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: VERIFY failed (2025-05-29T15:30:10.004467Z): assertion failed in non-unittest thread with message: assertion failed at ydb/core/kqp/ut/common/kqp_ut_common.h:375, void NKikimr::NKqp::AssertSuccessResult(const NYdb::TStatus &): (result.IsSuccess())
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 library/cpp/testing/unittest/registar.cpp:36 RaiseError(): requirement UnittestThread failed 0. /-S/util/system/yassert.cpp:83: InternalPanicImpl @ 0x13DD8F65 1. /-S/util/system/yassert.cpp:55: Panic @ 0x13DCFF66 2. /tmp//-S/library/cpp/testing/unittest/registar.cpp:36: RaiseError @ 0x13F70BC6 3. /-S/ydb/core/kqp/ut/common/kqp_ut_common.h:375: AssertSuccessResult @ 0x13A3FAE2 4. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:365: CreateSampleTables @ 0x264F71F2 5. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:581: operator() @ 0x2651596C 6. /-S/library/cpp/threading/future/core/future-inl.h:493: SetValue @ 0x2651596C 7. /-S/library/cpp/threading/future/async.h:24: operator() @ 0x2651596C 8. /-S/util/thread/pool.h:71: Process @ 0x2651596C 9. /-S/util/thread/pool.cpp:418: DoExecute @ 0x13DE06B9 10. /-S/util/thread/factory.h:15: Execute @ 0x13DDF0A9 11. /-S/util/thread/factory.cpp:36: ThreadProc @ 0x13DDF0A9 12. /-S/util/system/thread.cpp:244: ThreadProxy @ 0x13DDA51C 13. ??:0: ?? @ 0x7F442ED9EAC2 14. ??:0: ?? @ 0x7F442EE3084F Trying to start YDB, gRPC: 7088, MsgBus: 62026 2025-05-29T15:30:13.627092Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509890393023005881:2062];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:30:13.627159Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/0012e1/r3tmp/tmpgcvm1D/pdisk_1.dat 2025-05-29T15:30:13.681259Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [1:7509890393023005861:2079] 1748532613626938 != 1748532613626941 2025-05-29T15:30:13.682571Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 7088, node 1 2025-05-29T1 ... 3DD8F65 1. /-S/util/system/yassert.cpp:55: Panic @ 0x13DCFF66 2. /tmp//-S/library/cpp/testing/unittest/registar.cpp:36: RaiseError @ 0x13F70BC6 3. /-S/ydb/core/kqp/ut/common/kqp_ut_common.h:375: AssertSuccessResult @ 0x13A3FAE2 4. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:365: CreateSampleTables @ 0x264F71F2 5. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:581: operator() @ 0x2651596C 6. /-S/library/cpp/threading/future/core/future-inl.h:493: SetValue @ 0x2651596C 7. /-S/library/cpp/threading/future/async.h:24: operator() @ 0x2651596C 8. /-S/util/thread/pool.h:71: Process @ 0x2651596C 9. /-S/util/thread/pool.cpp:418: DoExecute @ 0x13DE06B9 10. /-S/util/thread/factory.h:15: Execute @ 0x13DDF0A9 11. /-S/util/thread/factory.cpp:36: ThreadProc @ 0x13DDF0A9 12. /-S/util/system/thread.cpp:244: ThreadProxy @ 0x13DDA51C 13. ??:0: ?? @ 0x7F6E7CF2BAC2 14. ??:0: ?? @ 0x7F6E7CFBD84F Trying to start YDB, gRPC: 13235, MsgBus: 32089 2025-05-29T15:30:26.383022Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509890445194679706:2088];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:30:26.383304Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/0012e1/r3tmp/tmpEBNEzW/pdisk_1.dat 2025-05-29T15:30:26.475609Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [1:7509890445194679633:2079] 1748532626374352 != 1748532626374355 2025-05-29T15:30:26.475617Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 13235, node 1 2025-05-29T15:30:26.488797Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:30:26.488809Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-05-29T15:30:26.488811Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-05-29T15:30:26.488848Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:32089 TClient is connected to server localhost:32089 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-29T15:30:26.542775Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:30:26.542805Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:30:26.543473Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-05-29T15:30:26.543722Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected waiting... 2025-05-29T15:30:26.548899Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:30:26.567359Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:30:26.585971Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:30:26.642295Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:30:26.810634Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890445194681265:2403], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:30:26.810665Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:30:26.852516Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-05-29T15:30:26.859560Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-05-29T15:30:26.870432Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-05-29T15:30:26.925505Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-05-29T15:30:26.933287Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-05-29T15:30:26.947874Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-05-29T15:30:26.956027Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-05-29T15:30:26.971449Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890445194681920:2466], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:30:26.971484Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890445194681925:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:30:26.971491Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:30:26.972244Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715669:3, at schemeshard: 72057594046644480 2025-05-29T15:30:26.974797Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7509890445194681927:2470], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715669 completed, doublechecking } 2025-05-29T15:30:27.076034Z node 1 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [1:7509890449489649274:3397] txid# 281474976715670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-29T15:30:27.183573Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [1:7509890449489649290:2474], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:30:27.183690Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=1&id=NWY1NGJhNDgtNTVjMWZjZTUtOGI2YWVjY2EtZjRiOWI2ZGU=, ActorId: [1:7509890445194681247:2401], ActorState: ExecuteState, TraceId: 01jwean1gtfxgsp26bsegveej5, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: VERIFY failed (2025-05-29T15:30:27.186812Z): assertion failed in non-unittest thread with message: assertion failed at ydb/core/kqp/ut/common/kqp_ut_common.h:375, void NKikimr::NKqp::AssertSuccessResult(const NYdb::TStatus &): (result.IsSuccess())
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 library/cpp/testing/unittest/registar.cpp:36 RaiseError(): requirement UnittestThread failed 0. /-S/util/system/yassert.cpp:83: InternalPanicImpl @ 0x13DD8F65 1. /-S/util/system/yassert.cpp:55: Panic @ 0x13DCFF66 2. /tmp//-S/library/cpp/testing/unittest/registar.cpp:36: RaiseError @ 0x13F70BC6 3. /-S/ydb/core/kqp/ut/common/kqp_ut_common.h:375: AssertSuccessResult @ 0x13A3FAE2 4. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:365: CreateSampleTables @ 0x264F71F2 5. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:581: operator() @ 0x2651596C 6. /-S/library/cpp/threading/future/core/future-inl.h:493: SetValue @ 0x2651596C 7. /-S/library/cpp/threading/future/async.h:24: operator() @ 0x2651596C 8. /-S/util/thread/pool.h:71: Process @ 0x2651596C 9. /-S/util/thread/pool.cpp:418: DoExecute @ 0x13DE06B9 10. /-S/util/thread/factory.h:15: Execute @ 0x13DDF0A9 11. /-S/util/thread/factory.cpp:36: ThreadProc @ 0x13DDF0A9 12. /-S/util/system/thread.cpp:244: ThreadProxy @ 0x13DDA51C 13. ??:0: ?? @ 0x7F30EAEB9AC2 14. ??:0: ?? @ 0x7F30EAF4B84F |73.7%| [TA] {RESULT} $(B)/ydb/core/tablet_flat/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> TSchemeShardTTLTests::CreateTableShouldSucceed-EnableTablePgTypes-true >> TSyncNeighborsTests::SerDes3 [GOOD] |73.7%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/public_http/ut/ydb-core-public_http-ut |73.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/public_http/ut/ydb-core-public_http-ut |73.7%| [LD] {RESULT} $(B)/ydb/core/public_http/ut/ydb-core-public_http-ut |73.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/vdisk/syncer/ut/unittest >> TSyncNeighborsTests::SerDes3 [GOOD] |73.7%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/datashard/ut_export/ydb-core-tx-datashard-ut_export |73.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_export/ydb-core-tx-datashard-ut_export |73.7%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_export/ydb-core-tx-datashard-ut_export >> TSchemeShardSplitBySizeTest::Make11MergeOperationsWithInflyLimit10 [GOOD] >> TxUsage::WriteToTopic_Invalid_Session_Table [FAIL] >> TxUsage::WriteToTopic_Invalid_Session_Query >> TxUsage::WriteToTopic_Demo_3_Table [FAIL] >> TxUsage::WriteToTopic_Demo_3_Query >> DemoTx::Scenario_1 >> TPersQueueTest::SetupLockSession2 >> TAsyncIndexTests::SplitBothWithReboots[TabletReboots] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/query/unittest >> KqpQuery::UpdateThenDelete-UseSink Test command err: Trying to start YDB, gRPC: 28570, MsgBus: 6474 2025-05-29T15:30:14.527845Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509890393892848000:2064];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:30:14.528228Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/001262/r3tmp/tmpWEMn31/pdisk_1.dat 2025-05-29T15:30:14.582332Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:30:14.582400Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [1:7509890393892847977:2079] 1748532614527669 != 1748532614527672 TServer::EnableGrpc on GrpcPort 28570, node 1 2025-05-29T15:30:14.596879Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:30:14.596892Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-05-29T15:30:14.596894Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-05-29T15:30:14.596952Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:6474 TClient is connected to server localhost:6474 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-29T15:30:14.657983Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:30:14.658005Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:30:14.659074Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-29T15:30:14.659741Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:30:14.664527Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:30:14.684124Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:30:14.703517Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:30:14.714590Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:30:14.859237Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890393892849630:2403], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:30:14.859261Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:30:14.908831Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-05-29T15:30:14.917392Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-05-29T15:30:14.927792Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-05-29T15:30:14.935078Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-05-29T15:30:14.942144Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-05-29T15:30:14.956387Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-05-29T15:30:14.970544Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-05-29T15:30:14.990644Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890393892850285:2466], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:30:14.990669Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:30:14.990719Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890393892850290:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:30:14.991494Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715669:3, at schemeshard: 72057594046644480 2025-05-29T15:30:14.997816Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7509890393892850292:2470], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715669 completed, doublechecking } 2025-05-29T15:30:15.054651Z node 1 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [1:7509890398187817639:3398] txid# 281474976715670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-29T15:30:15.170370Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [1:7509890398187817655:2474], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:30:15.170480Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=1&id=NzRlODM0N2EtYzk5MTRlYTItMTcwN2RkOWMtNzllMWIxMDk=, ActorId: [1:7509890393892849612:2401], ActorState: ExecuteState, TraceId: 01jweamnte1e9az2f7dkw67e1w, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: VERIFY failed (2025-05-29T15:30:15.171417Z): assertion failed in non-unittest thread with message: assertion failed at ydb/core/kqp/ut/common/kqp_ut_common.h:375, void NKikimr::NKqp::AssertSuccessResult(const NYdb::TStatus &): (result.IsSuccess())
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 library/cpp/testing/unittest/registar.cpp:36 RaiseError(): requirement UnittestThread failed 0. /-S/util/system/yassert.cpp:83: InternalPanicImpl @ 0x13DD8F65 1. /-S/util/system/yassert.cpp:55: Panic @ 0x13DCFF66 2. /tmp//-S/library/cpp/testing/unittest/registar.cpp:36: RaiseError @ 0x13F70BC6 3. /-S/ydb/core/kqp/ut/common/kqp_ut_common.h:375: AssertSuccessResult @ 0x13A3FAE2 4. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:365: CreateSampleTables @ 0x264F71F2 5. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:581: operator() @ 0x2651596C 6. /-S/library/cpp/threading/future/core/future-inl.h:493: SetValue @ 0x2651596C 7. /-S/library/cpp/threading/future/async.h:24: operator() @ 0x2651596C 8. /-S/util/thread/pool.h:71: Process @ 0x2651596C 9. /-S/util/thread/pool.cpp:418: DoExecute @ 0x13DE06B9 10. /-S/util/thread/factory.h:15: Execute @ 0x13DDF0A9 11. /-S/util/thread/factory.cpp:36: ThreadProc @ 0x13DDF0A9 12. /-S/util/system/thread.cpp:244: ThreadProxy @ 0x13DDA51C 13. ??:0: ?? @ 0x7F9B08E5CAC2 14. ??:0: ?? @ 0x7F9B08EEE84F Trying to start YDB, gRPC: 7118, MsgBus: 62880 2025-05-29T15:30:18.840955Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509890412343192594:2061];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:30:18.840992Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/001262/r3tmp/tmp0KYsSM/pdisk_1.dat 2025-05-29T15:30:18.905973Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [1:7509890412343192574:2079] 1748532618840821 != 1748532618840824 2025-05-29T15:30:18.909255Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 7118, node 1 2025-05-29T15:3 ... 3DD8F65 1. /-S/util/system/yassert.cpp:55: Panic @ 0x13DCFF66 2. /tmp//-S/library/cpp/testing/unittest/registar.cpp:36: RaiseError @ 0x13F70BC6 3. /-S/ydb/core/kqp/ut/common/kqp_ut_common.h:375: AssertSuccessResult @ 0x13A3FAE2 4. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:365: CreateSampleTables @ 0x264F71F2 5. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:581: operator() @ 0x2651596C 6. /-S/library/cpp/threading/future/core/future-inl.h:493: SetValue @ 0x2651596C 7. /-S/library/cpp/threading/future/async.h:24: operator() @ 0x2651596C 8. /-S/util/thread/pool.h:71: Process @ 0x2651596C 9. /-S/util/thread/pool.cpp:418: DoExecute @ 0x13DE06B9 10. /-S/util/thread/factory.h:15: Execute @ 0x13DDF0A9 11. /-S/util/thread/factory.cpp:36: ThreadProc @ 0x13DDF0A9 12. /-S/util/system/thread.cpp:244: ThreadProxy @ 0x13DDA51C 13. ??:0: ?? @ 0x7F54DFC33AC2 14. ??:0: ?? @ 0x7F54DFCC584F Trying to start YDB, gRPC: 19368, MsgBus: 14846 2025-05-29T15:30:27.932723Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509890452979813774:2060];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:30:27.932749Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/001262/r3tmp/tmpcyxxkW/pdisk_1.dat 2025-05-29T15:30:27.997993Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [1:7509890452979813755:2079] 1748532627932541 != 1748532627932544 2025-05-29T15:30:28.005256Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 19368, node 1 2025-05-29T15:30:28.018673Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:30:28.018693Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-05-29T15:30:28.018695Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-05-29T15:30:28.018761Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:14846 TClient is connected to server localhost:14846 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-29T15:30:28.076548Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:30:28.076580Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:30:28.077260Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-05-29T15:30:28.077711Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected waiting... 2025-05-29T15:30:28.080804Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:30:28.098163Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:30:28.119097Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:30:28.131147Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:30:28.259965Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890457274782682:2403], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:30:28.259989Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:30:28.308022Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-05-29T15:30:28.316485Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-05-29T15:30:28.328350Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-05-29T15:30:28.387441Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-05-29T15:30:28.397441Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-05-29T15:30:28.411329Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-05-29T15:30:28.466887Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-05-29T15:30:28.483527Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890457274783343:2466], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:30:28.483556Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:30:28.483562Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890457274783348:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:30:28.484271Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715669:3, at schemeshard: 72057594046644480 2025-05-29T15:30:28.486823Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7509890457274783350:2470], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715669 completed, doublechecking } 2025-05-29T15:30:28.541466Z node 1 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [1:7509890457274783401:3398] txid# 281474976715670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-29T15:30:28.627985Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [1:7509890457274783417:2474], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:30:28.628124Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=1&id=NDVjNmM5MDItNzAzMzhkMzAtMWZlYTAwZDctNzMzOTlkODE=, ActorId: [1:7509890457274782664:2401], ActorState: ExecuteState, TraceId: 01jwean30329snsy7rxm0nf87a, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: VERIFY failed (2025-05-29T15:30:28.628940Z): assertion failed in non-unittest thread with message: assertion failed at ydb/core/kqp/ut/common/kqp_ut_common.h:375, void NKikimr::NKqp::AssertSuccessResult(const NYdb::TStatus &): (result.IsSuccess())
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 library/cpp/testing/unittest/registar.cpp:36 RaiseError(): requirement UnittestThread failed 0. /-S/util/system/yassert.cpp:83: InternalPanicImpl @ 0x13DD8F65 1. /-S/util/system/yassert.cpp:55: Panic @ 0x13DCFF66 2. /tmp//-S/library/cpp/testing/unittest/registar.cpp:36: RaiseError @ 0x13F70BC6 3. /-S/ydb/core/kqp/ut/common/kqp_ut_common.h:375: AssertSuccessResult @ 0x13A3FAE2 4. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:365: CreateSampleTables @ 0x264F71F2 5. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:581: operator() @ 0x2651596C 6. /-S/library/cpp/threading/future/core/future-inl.h:493: SetValue @ 0x2651596C 7. /-S/library/cpp/threading/future/async.h:24: operator() @ 0x2651596C 8. /-S/util/thread/pool.h:71: Process @ 0x2651596C 9. /-S/util/thread/pool.cpp:418: DoExecute @ 0x13DE06B9 10. /-S/util/thread/factory.h:15: Execute @ 0x13DDF0A9 11. /-S/util/thread/factory.cpp:36: ThreadProc @ 0x13DDF0A9 12. /-S/util/system/thread.cpp:244: ThreadProxy @ 0x13DDA51C 13. ??:0: ?? @ 0x7F9A51A43AC2 14. ??:0: ?? @ 0x7F9A51AD584F |73.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_index/unittest >> BasicStatistics::ServerlessGlobalIndex >> TAsyncIndexTests::MergeIndexWithReboots[TabletReboots] |73.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_index/unittest |73.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_index/unittest ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_split_merge/unittest >> TSchemeShardSplitBySizeTest::Make11MergeOperationsWithInflyLimit10 [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:116:2058] recipient: [1:111:2141] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:116:2058] recipient: [1:111:2141] Leader for TabletID 72057594046678944 is [1:128:2152] sender: [1:132:2058] recipient: [1:111:2141] 2025-05-29T15:30:30.500883Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7488: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-05-29T15:30:30.500913Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7516: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:30:30.500920Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7402: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-05-29T15:30:30.500926Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7418: OperationsProcessing config: using default configuration 2025-05-29T15:30:30.500941Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-05-29T15:30:30.500946Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-05-29T15:30:30.500956Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7548: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:30:30.500971Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:39: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-05-29T15:30:30.501099Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7619: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-05-29T15:30:30.501178Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-05-29T15:30:30.514714Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7309: Cannot subscribe to console configs 2025-05-29T15:30:30.514749Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:30:30.517537Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-05-29T15:30:30.517656Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-05-29T15:30:30.517696Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-05-29T15:30:30.519143Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-05-29T15:30:30.519310Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:445: Clear TempDirsState with owners number: 0 2025-05-29T15:30:30.519444Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1348: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-05-29T15:30:30.519495Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:32: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-05-29T15:30:30.519918Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:157: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:30:30.519970Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:84: [RootDataErasureManager] Stop 2025-05-29T15:30:30.520254Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:30:30.520264Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:30:30.520287Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-05-29T15:30:30.520295Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-29T15:30:30.520302Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:202: TTxServerlessStorageBilling.Complete 2025-05-29T15:30:30.520338Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6671: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-05-29T15:30:30.521697Z node 1 :HIVE INFO: tablet_helpers.cpp:1130: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:128:2152] sender: [1:242:2058] recipient: [1:15:2062] 2025-05-29T15:30:30.539784Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:372: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-05-29T15:30:30.539876Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:30:30.539945Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-05-29T15:30:30.539990Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:130: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-05-29T15:30:30.540001Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:30:30.540868Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:451: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-05-29T15:30:30.540902Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-05-29T15:30:30.540963Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:30:30.540974Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:313: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-05-29T15:30:30.540980Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:367: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-05-29T15:30:30.540987Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 2 -> 3 2025-05-29T15:30:30.541373Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:30:30.541385Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-05-29T15:30:30.541390Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 3 -> 128 2025-05-29T15:30:30.541686Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:30:30.541696Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:30:30.541702Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:30:30.541719Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1650: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-05-29T15:30:30.542327Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1719: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-05-29T15:30:30.542677Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:652: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-05-29T15:30:30.542720Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1751: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-05-29T15:30:30.542903Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:676: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-05-29T15:30:30.542921Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:680: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 124 RawX2: 4294969445 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-05-29T15:30:30.542926Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:30:30.542970Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 128 -> 240 2025-05-29T15:30:30.542975Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:30:30.543005Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-05-29T15:30:30.543012Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:402: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-05-29T15:30:30.543299Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:30:30.543304Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-29T15:30:30.543346Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29 ... 33Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_split_merge.cpp:462: Notify src datashard 72075186233409567 on partitioning changed splitOp# 135 at tablet 72057594046678944 2025-05-29T15:30:32.103562Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:5834: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 3 LocalPathId: 2 Version: 14 PathOwnerId: 72057594046678944, cookie: 135 2025-05-29T15:30:32.103572Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 3 LocalPathId: 2 Version: 14 PathOwnerId: 72057594046678944, cookie: 135 2025-05-29T15:30:32.103576Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72057594046678944, txId: 135 2025-05-29T15:30:32.103585Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 135, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 14 2025-05-29T15:30:32.103592Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 16 2025-05-29T15:30:32.103609Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1606: TOperation IsReadyToNotify, TxId: 135, ready parts: 0/1, is published: true 2025-05-29T15:30:32.103923Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:652: Send tablet strongly msg operationId: 135:0 from tablet: 72057594046678944 to tablet: 72075186233409566 cookie: 72057594046678944:21 msg type: 269553158 2025-05-29T15:30:32.103943Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:652: Send tablet strongly msg operationId: 135:0 from tablet: 72057594046678944 to tablet: 72075186233409567 cookie: 72057594046678944:22 msg type: 269553158 2025-05-29T15:30:32.104517Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 135 2025-05-29T15:30:32.104952Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:619: TTxOperationReply execute, operationId: 135:0, at schemeshard: 72057594046678944, message: OperationCookie: 135 TabletId: 72075186233409566 2025-05-29T15:30:32.104963Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_split_merge.cpp:386: TSplitMerge TNotifySrc, operationId: 135:0 HandleReply TEvSplitPartitioningChangedAck, from datashard: 72075186233409566, at schemeshard: 72057594046678944 2025-05-29T15:30:32.105012Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:619: TTxOperationReply execute, operationId: 135:0, at schemeshard: 72057594046678944, message: OperationCookie: 135 TabletId: 72075186233409567 2025-05-29T15:30:32.105017Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_split_merge.cpp:386: TSplitMerge TNotifySrc, operationId: 135:0 HandleReply TEvSplitPartitioningChangedAck, from datashard: 72075186233409567, at schemeshard: 72057594046678944 2025-05-29T15:30:32.105032Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:906: Part operation is done id#135:0 progress is 1/1 2025-05-29T15:30:32.105037Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 135 ready parts: 1/1 2025-05-29T15:30:32.105042Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:906: Part operation is done id#135:0 progress is 1/1 2025-05-29T15:30:32.105046Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 135 ready parts: 1/1 2025-05-29T15:30:32.105051Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1606: TOperation IsReadyToNotify, TxId: 135, ready parts: 1/1, is published: true 2025-05-29T15:30:32.105060Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1629: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:1119:2827] message: TxId: 135 2025-05-29T15:30:32.105067Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 135 ready parts: 1/1 2025-05-29T15:30:32.105072Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:973: Operation and all the parts is done, operation id: 135:0 2025-05-29T15:30:32.105076Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5174: RemoveTx for txid 135:0 2025-05-29T15:30:32.105100Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 15 2025-05-29T15:30:32.105782Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:647: TTxOperationReply complete, operationId: 135:0, at schemeshard: 72057594046678944 2025-05-29T15:30:32.105809Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:647: TTxOperationReply complete, operationId: 135:0, at schemeshard: 72057594046678944 2025-05-29T15:30:32.105813Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:260: Unable to activate 135:0 2025-05-29T15:30:32.105883Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:227: tests -- TTxNotificationSubscriber for txId 135: got EvNotifyTxCompletionResult 2025-05-29T15:30:32.105889Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:236: tests -- TTxNotificationSubscriber for txId 135: satisfy waiter [1:3158:4613] 2025-05-29T15:30:32.106522Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5549: Handle TEvStateChanged, at schemeshard: 72057594046678944, message: Source { RawX1: 697 RawX2: 4294969838 } TabletId: 72075186233409566 State: 4 2025-05-29T15:30:32.106552Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__state_changed_reply.cpp:78: TTxShardStateChanged DoExecute, datashard informs about state changing, datashardId: 72075186233409566, state: Offline, at schemeshard: 72057594046678944 2025-05-29T15:30:32.106630Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5549: Handle TEvStateChanged, at schemeshard: 72057594046678944, message: Source { RawX1: 704 RawX2: 4294969842 } TabletId: 72075186233409567 State: 4 2025-05-29T15:30:32.106637Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__state_changed_reply.cpp:78: TTxShardStateChanged DoExecute, datashard informs about state changing, datashardId: 72075186233409567, state: Offline, at schemeshard: 72057594046678944 2025-05-29T15:30:32.107451Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:46: Free shard 72057594046678944:21 hive 72057594037968897 at ss 72057594046678944 2025-05-29T15:30:32.107577Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:46: Free shard 72057594046678944:22 hive 72057594037968897 at ss 72057594046678944 2025-05-29T15:30:32.107662Z node 1 :HIVE INFO: tablet_helpers.cpp:1356: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 21 TxId_Deprecated: 21 TabletID: 72075186233409566 2025-05-29T15:30:32.107763Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5938: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 21 ShardOwnerId: 72057594046678944 ShardLocalIdx: 21, at schemeshard: 72057594046678944 2025-05-29T15:30:32.107831Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 14 Forgetting tablet 72075186233409566 2025-05-29T15:30:32.108298Z node 1 :HIVE INFO: tablet_helpers.cpp:1356: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 22 TxId_Deprecated: 22 TabletID: 72075186233409567 Forgetting tablet 72075186233409567 2025-05-29T15:30:32.108487Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5938: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 22 ShardOwnerId: 72057594046678944 ShardLocalIdx: 22, at schemeshard: 72057594046678944 2025-05-29T15:30:32.108541Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 13 2025-05-29T15:30:32.109344Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:174: Deleted shardIdx 72057594046678944:21 2025-05-29T15:30:32.109357Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:180: Close pipe to deleted shardIdx 72057594046678944:21 tabletId 72075186233409566 2025-05-29T15:30:32.109591Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:174: Deleted shardIdx 72057594046678944:22 2025-05-29T15:30:32.109601Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:180: Close pipe to deleted shardIdx 72057594046678944:22 tabletId 72075186233409567 TestWaitNotification: OK eventTxId 135 2025-05-29T15:30:32.109750Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Table" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-05-29T15:30:32.109809Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/Table" took 67us result status StatusSuccess 2025-05-29T15:30:32.109912Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Table" PathDescription { Self { Name: "Table" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 123 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 14 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 14 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 12 } ChildrenExist: false } Table { Name: "Table" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 TableSchemaVersion: 1 IsBackup: false IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 11 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 11 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> TPersQueueTest::BadTopic [FAIL] >> TPersQueueTest::CloseActiveWriteSessionOnClusterDisable |73.7%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/mediator/ut/ydb-core-tx-mediator-ut |73.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/mediator/ut/ydb-core-tx-mediator-ut |73.7%| [LD] {RESULT} $(B)/ydb/core/tx/mediator/ut/ydb-core-tx-mediator-ut >> TVectorIndexTests::CreateTablePrefix |73.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/tx_proxy/ut_encrypted_storage/unittest >> TSchemeShardSplitBySizeTest::MergeIndexTableShards [GOOD] >> TxUsage::WriteToTopic_Invalid_Session_Query [FAIL] >> TxUsage::WriteToTopic_Two_WriteSession_Table >> TxUsage::WriteToTopic_Demo_3_Query [FAIL] >> TxUsage::WriteToTopic_Demo_4_Query >> TVectorIndexTests::CreateTablePrefixCovering |73.8%| [TA] $(B)/ydb/core/tx/tx_proxy/ut_encrypted_storage/test-results/unittest/{meta.json ... results_accumulator.log} |73.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_index/unittest >> TVectorIndexTests::CreateTablePrefix [GOOD] >> TPersQueueTest::SetupLockSession2 [FAIL] >> TPersQueueTest::SetupLockSession ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_split_merge/unittest >> TSchemeShardSplitBySizeTest::MergeIndexTableShards [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:116:2058] recipient: [1:111:2141] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:116:2058] recipient: [1:111:2141] Leader for TabletID 72057594046678944 is [1:128:2152] sender: [1:132:2058] recipient: [1:111:2141] 2025-05-29T15:30:21.188823Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7488: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-05-29T15:30:21.188851Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7516: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:30:21.188858Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7402: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-05-29T15:30:21.188863Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7418: OperationsProcessing config: using default configuration 2025-05-29T15:30:21.188877Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-05-29T15:30:21.188881Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-05-29T15:30:21.188890Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7548: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:30:21.188904Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:39: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-05-29T15:30:21.189020Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7619: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-05-29T15:30:21.189087Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-05-29T15:30:21.202727Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7309: Cannot subscribe to console configs 2025-05-29T15:30:21.202766Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:30:21.205059Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-05-29T15:30:21.205181Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-05-29T15:30:21.205222Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-05-29T15:30:21.206573Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-05-29T15:30:21.206778Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:445: Clear TempDirsState with owners number: 0 2025-05-29T15:30:21.206901Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1348: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-05-29T15:30:21.206956Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:32: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-05-29T15:30:21.207402Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:157: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:30:21.207451Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:84: [RootDataErasureManager] Stop 2025-05-29T15:30:21.207723Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:30:21.207732Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:30:21.207755Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-05-29T15:30:21.207763Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-29T15:30:21.207768Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:202: TTxServerlessStorageBilling.Complete 2025-05-29T15:30:21.207802Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6671: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-05-29T15:30:21.209040Z node 1 :HIVE INFO: tablet_helpers.cpp:1130: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:128:2152] sender: [1:242:2058] recipient: [1:15:2062] 2025-05-29T15:30:21.227881Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:372: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-05-29T15:30:21.227953Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:30:21.228011Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-05-29T15:30:21.228057Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:130: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-05-29T15:30:21.228071Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:30:21.228655Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:451: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-05-29T15:30:21.228680Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-05-29T15:30:21.228724Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:30:21.228734Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:313: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-05-29T15:30:21.228739Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:367: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-05-29T15:30:21.228744Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 2 -> 3 2025-05-29T15:30:21.229054Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:30:21.229063Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-05-29T15:30:21.229068Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 3 -> 128 2025-05-29T15:30:21.229589Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:30:21.229621Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:30:21.229632Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:30:21.229660Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1650: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-05-29T15:30:21.230456Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1719: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-05-29T15:30:21.231158Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:652: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-05-29T15:30:21.231227Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1751: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-05-29T15:30:21.231529Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:676: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-05-29T15:30:21.231568Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:680: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 124 RawX2: 4294969445 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-05-29T15:30:21.231581Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:30:21.231659Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 128 -> 240 2025-05-29T15:30:21.231671Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:30:21.231714Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-05-29T15:30:21.231732Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:402: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-05-29T15:30:21.232529Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:30:21.232545Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-29T15:30:21.232622Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29 ... 72057594046678944, cookie: 281474976710659 2025-05-29T15:30:33.070228Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:619: TTxOperationReply execute, operationId: 281474976710659:0, at schemeshard: 72057594046678944, message: OperationCookie: 281474976710659 TabletId: 72075186233409551 2025-05-29T15:30:33.070240Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_split_merge.cpp:386: TSplitMerge TNotifySrc, operationId: 281474976710659:0 HandleReply TEvSplitPartitioningChangedAck, from datashard: 72075186233409551, at schemeshard: 72057594046678944 2025-05-29T15:30:33.070463Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:619: TTxOperationReply execute, operationId: 281474976710659:0, at schemeshard: 72057594046678944, message: OperationCookie: 281474976710659 TabletId: 72075186233409552 2025-05-29T15:30:33.070471Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_split_merge.cpp:386: TSplitMerge TNotifySrc, operationId: 281474976710659:0 HandleReply TEvSplitPartitioningChangedAck, from datashard: 72075186233409552, at schemeshard: 72057594046678944 2025-05-29T15:30:33.070487Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:906: Part operation is done id#281474976710659:0 progress is 1/1 2025-05-29T15:30:33.070492Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 281474976710659 ready parts: 1/1 2025-05-29T15:30:33.070497Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:906: Part operation is done id#281474976710659:0 progress is 1/1 2025-05-29T15:30:33.070500Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 281474976710659 ready parts: 1/1 2025-05-29T15:30:33.070505Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1606: TOperation IsReadyToNotify, TxId: 281474976710659, ready parts: 1/1, is published: true 2025-05-29T15:30:33.070512Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 281474976710659 ready parts: 1/1 2025-05-29T15:30:33.070518Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:973: Operation and all the parts is done, operation id: 281474976710659:0 2025-05-29T15:30:33.070526Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5174: RemoveTx for txid 281474976710659:0 2025-05-29T15:30:33.070558Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 5 2025-05-29T15:30:33.071216Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:647: TTxOperationReply complete, operationId: 281474976710659:0, at schemeshard: 72057594046678944 2025-05-29T15:30:33.071250Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:647: TTxOperationReply complete, operationId: 281474976710659:0, at schemeshard: 72057594046678944 2025-05-29T15:30:33.071254Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:260: Unable to activate 281474976710659:0 2025-05-29T15:30:33.071344Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5549: Handle TEvStateChanged, at schemeshard: 72057594046678944, message: Source { RawX1: 754 RawX2: 4294969959 } TabletId: 72075186233409551 State: 4 2025-05-29T15:30:33.071359Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__state_changed_reply.cpp:78: TTxShardStateChanged DoExecute, datashard informs about state changing, datashardId: 72075186233409551, state: Offline, at schemeshard: 72057594046678944 2025-05-29T15:30:33.071406Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5549: Handle TEvStateChanged, at schemeshard: 72057594046678944, message: Source { RawX1: 802 RawX2: 4294969996 } TabletId: 72075186233409552 State: 4 2025-05-29T15:30:33.071412Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__state_changed_reply.cpp:78: TTxShardStateChanged DoExecute, datashard informs about state changing, datashardId: 72075186233409552, state: Offline, at schemeshard: 72057594046678944 2025-05-29T15:30:33.071909Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:46: Free shard 72057594046678944:6 hive 72057594037968897 at ss 72057594046678944 2025-05-29T15:30:33.072019Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:46: Free shard 72057594046678944:7 hive 72057594037968897 at ss 72057594046678944 2025-05-29T15:30:33.072049Z node 1 :HIVE INFO: tablet_helpers.cpp:1356: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 6 TxId_Deprecated: 6 TabletID: 72075186233409551 Forgetting tablet 72075186233409551 2025-05-29T15:30:33.072435Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5938: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 6 ShardOwnerId: 72057594046678944 ShardLocalIdx: 6, at schemeshard: 72057594046678944 2025-05-29T15:30:33.072482Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 4 2025-05-29T15:30:33.072596Z node 1 :HIVE INFO: tablet_helpers.cpp:1356: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 7 TxId_Deprecated: 7 TabletID: 72075186233409552 Forgetting tablet 72075186233409552 2025-05-29T15:30:33.072894Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5938: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 7 ShardOwnerId: 72057594046678944 ShardLocalIdx: 7, at schemeshard: 72057594046678944 2025-05-29T15:30:33.072928Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 3 2025-05-29T15:30:33.073310Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:174: Deleted shardIdx 72057594046678944:6 2025-05-29T15:30:33.073320Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:180: Close pipe to deleted shardIdx 72057594046678944:6 tabletId 72075186233409551 2025-05-29T15:30:33.073368Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:174: Deleted shardIdx 72057594046678944:7 2025-05-29T15:30:33.073373Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:180: Close pipe to deleted shardIdx 72057594046678944:7 tabletId 72075186233409552 Deleted tabletId 72075186233409551 Deleted tabletId 72075186233409552 2025-05-29T15:30:33.073465Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Table/ByValue/indexImplTable" Options { ReturnPartitioningInfo: true ReturnPartitionConfig: true BackupInfo: false ReturnBoundaries: false ShowPrivateTable: true }, at schemeshard: 72057594046678944 2025-05-29T15:30:33.073525Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/Table/ByValue/indexImplTable" took 66us result status StatusSuccess 2025-05-29T15:30:33.073678Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Table/ByValue/indexImplTable" PathDescription { Self { Name: "indexImplTable" PathId: 4 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 3 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 6 PathSubType: EPathSubTypeSyncIndexImplTable Version { GeneralVersion: 6 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 4 } ChildrenExist: false } Table { Name: "indexImplTable" Columns { Name: "value" Type: "Utf8" TypeId: 4608 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "value" KeyColumnNames: "key" KeyColumnIds: 1 KeyColumnIds: 2 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { SizeToSplit: 2147483648 MinPartitionsCount: 1 } } TableSchemaVersion: 1 IsBackup: false IsRestore: false } TablePartitions { EndOfRangeKeyPrefix: "" IsPoint: false IsInclusive: false DatashardId: 72075186233409553 } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 4 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 3 PathsLimit: 10000 ShardsInside: 2 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 4 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> DemoTx::Scenario_1 [FAIL] >> DemoTx::Scenario_2 >> TPersQueueTest::UpdatePartitionLocation |73.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_index/unittest |73.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/statistics/service/ut/unittest >> TPersQueueTest::CloseActiveWriteSessionOnClusterDisable [FAIL] >> TPersQueueTest::Cache >> TVectorIndexTests::CreateTablePrefixCovering [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_index/unittest >> TVectorIndexTests::CreateTablePrefix [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:116:2058] recipient: [1:111:2141] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:116:2058] recipient: [1:111:2141] Leader for TabletID 72057594046678944 is [1:128:2152] sender: [1:132:2058] recipient: [1:111:2141] 2025-05-29T15:30:33.176420Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7488: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-05-29T15:30:33.176446Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7516: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:30:33.176451Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7402: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-05-29T15:30:33.176456Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7418: OperationsProcessing config: using default configuration 2025-05-29T15:30:33.176469Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-05-29T15:30:33.176474Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-05-29T15:30:33.176482Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7548: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:30:33.176496Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:39: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-05-29T15:30:33.176604Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7619: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-05-29T15:30:33.176675Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-05-29T15:30:33.190608Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7309: Cannot subscribe to console configs 2025-05-29T15:30:33.190633Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:30:33.193249Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-05-29T15:30:33.193363Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-05-29T15:30:33.193402Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-05-29T15:30:33.194886Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-05-29T15:30:33.195056Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:445: Clear TempDirsState with owners number: 0 2025-05-29T15:30:33.195172Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1348: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-05-29T15:30:33.195216Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:32: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-05-29T15:30:33.195658Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:157: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:30:33.195693Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:84: [RootDataErasureManager] Stop 2025-05-29T15:30:33.195939Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:30:33.195951Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:30:33.195973Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-05-29T15:30:33.195979Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-29T15:30:33.195985Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:202: TTxServerlessStorageBilling.Complete 2025-05-29T15:30:33.196017Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6671: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-05-29T15:30:33.197233Z node 1 :HIVE INFO: tablet_helpers.cpp:1130: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:128:2152] sender: [1:242:2058] recipient: [1:15:2062] 2025-05-29T15:30:33.219532Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:372: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-05-29T15:30:33.219611Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:30:33.219669Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-05-29T15:30:33.219710Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:130: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-05-29T15:30:33.219721Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:30:33.220547Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:451: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-05-29T15:30:33.220573Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-05-29T15:30:33.220618Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:30:33.220628Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:313: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-05-29T15:30:33.220633Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:367: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-05-29T15:30:33.220639Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 2 -> 3 2025-05-29T15:30:33.221088Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:30:33.221098Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-05-29T15:30:33.221103Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 3 -> 128 2025-05-29T15:30:33.221505Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:30:33.221515Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:30:33.221520Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:30:33.221528Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1650: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-05-29T15:30:33.222199Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1719: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-05-29T15:30:33.222654Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:652: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-05-29T15:30:33.222691Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1751: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-05-29T15:30:33.222883Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:676: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-05-29T15:30:33.222907Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:680: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 124 RawX2: 4294969445 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-05-29T15:30:33.222914Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:30:33.222975Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 128 -> 240 2025-05-29T15:30:33.222981Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:30:33.223012Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-05-29T15:30:33.223024Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:402: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-05-29T15:30:33.223454Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:30:33.223463Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-29T15:30:33.223506Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29 ... ementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 4 2025-05-29T15:30:33.416753Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:5834: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 6 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 102 2025-05-29T15:30:33.416771Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 6 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 102 2025-05-29T15:30:33.416776Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72057594046678944, txId: 102 2025-05-29T15:30:33.416781Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 6], version: 18446744073709551615 2025-05-29T15:30:33.416786Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 6] was 4 2025-05-29T15:30:33.416799Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1606: TOperation IsReadyToNotify, TxId: 102, ready parts: 1/5, is published: true 2025-05-29T15:30:33.417458Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 102:3, at schemeshard: 72057594046678944 2025-05-29T15:30:33.417473Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_drop_table.cpp:414: TDropTable TProposedDeletePart operationId: 102:3 ProgressState, at schemeshard: 72057594046678944 2025-05-29T15:30:33.417530Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove table for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 3 2025-05-29T15:30:33.417556Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:906: Part operation is done id#102:3 progress is 2/5 2025-05-29T15:30:33.417561Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 102 ready parts: 2/5 2025-05-29T15:30:33.417566Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:906: Part operation is done id#102:3 progress is 2/5 2025-05-29T15:30:33.417569Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 102 ready parts: 2/5 2025-05-29T15:30:33.417573Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1606: TOperation IsReadyToNotify, TxId: 102, ready parts: 2/5, is published: true 2025-05-29T15:30:33.417767Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2025-05-29T15:30:33.417827Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2025-05-29T15:30:33.417834Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2025-05-29T15:30:33.417842Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2025-05-29T15:30:33.417858Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 102:4, at schemeshard: 72057594046678944 2025-05-29T15:30:33.417863Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_drop_table.cpp:414: TDropTable TProposedDeletePart operationId: 102:4 ProgressState, at schemeshard: 72057594046678944 2025-05-29T15:30:33.417891Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove table for pathId [OwnerId: 72057594046678944, LocalPathId: 6] was 3 2025-05-29T15:30:33.417908Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:906: Part operation is done id#102:4 progress is 3/5 2025-05-29T15:30:33.417911Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 102 ready parts: 3/5 2025-05-29T15:30:33.417915Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:906: Part operation is done id#102:4 progress is 3/5 2025-05-29T15:30:33.417918Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 102 ready parts: 3/5 2025-05-29T15:30:33.417922Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1606: TOperation IsReadyToNotify, TxId: 102, ready parts: 3/5, is published: true 2025-05-29T15:30:33.417979Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 102:2, at schemeshard: 72057594046678944 2025-05-29T15:30:33.417984Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_drop_table.cpp:414: TDropTable TProposedDeletePart operationId: 102:2 ProgressState, at schemeshard: 72057594046678944 2025-05-29T15:30:33.418004Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove table for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 3 2025-05-29T15:30:33.418017Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:906: Part operation is done id#102:2 progress is 4/5 2025-05-29T15:30:33.418020Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 102 ready parts: 4/5 2025-05-29T15:30:33.418024Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:906: Part operation is done id#102:2 progress is 4/5 2025-05-29T15:30:33.418027Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 102 ready parts: 4/5 2025-05-29T15:30:33.418030Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1606: TOperation IsReadyToNotify, TxId: 102, ready parts: 4/5, is published: true 2025-05-29T15:30:33.418058Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2025-05-29T15:30:33.418062Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_drop_table.cpp:414: TDropTable TProposedDeletePart operationId: 102:0 ProgressState, at schemeshard: 72057594046678944 2025-05-29T15:30:33.418081Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove table for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 2025-05-29T15:30:33.418092Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:906: Part operation is done id#102:0 progress is 5/5 2025-05-29T15:30:33.418095Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 102 ready parts: 5/5 2025-05-29T15:30:33.418099Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:906: Part operation is done id#102:0 progress is 5/5 2025-05-29T15:30:33.418104Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 102 ready parts: 5/5 2025-05-29T15:30:33.418108Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1606: TOperation IsReadyToNotify, TxId: 102, ready parts: 5/5, is published: true 2025-05-29T15:30:33.418121Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1629: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:455:2400] message: TxId: 102 2025-05-29T15:30:33.418126Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 102 ready parts: 5/5 2025-05-29T15:30:33.418132Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:973: Operation and all the parts is done, operation id: 102:0 2025-05-29T15:30:33.418136Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5174: RemoveTx for txid 102:0 2025-05-29T15:30:33.418150Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2025-05-29T15:30:33.418155Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:973: Operation and all the parts is done, operation id: 102:1 2025-05-29T15:30:33.418158Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5174: RemoveTx for txid 102:1 2025-05-29T15:30:33.418163Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 4 2025-05-29T15:30:33.418166Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:973: Operation and all the parts is done, operation id: 102:2 2025-05-29T15:30:33.418169Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5174: RemoveTx for txid 102:2 2025-05-29T15:30:33.418176Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 2 2025-05-29T15:30:33.418180Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:973: Operation and all the parts is done, operation id: 102:3 2025-05-29T15:30:33.418183Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5174: RemoveTx for txid 102:3 2025-05-29T15:30:33.418189Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 2 2025-05-29T15:30:33.418193Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:973: Operation and all the parts is done, operation id: 102:4 2025-05-29T15:30:33.418196Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5174: RemoveTx for txid 102:4 2025-05-29T15:30:33.418201Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 6] was 2 2025-05-29T15:30:33.418246Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2025-05-29T15:30:33.418319Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2025-05-29T15:30:33.418325Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2025-05-29T15:30:33.418337Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2025-05-29T15:30:33.418355Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2025-05-29T15:30:33.418692Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2025-05-29T15:30:33.419293Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:227: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2025-05-29T15:30:33.419308Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:236: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [1:601:2539] TestWaitNotification: OK eventTxId 102 >> TxUsage::WriteToTopic_Two_WriteSession_Table [FAIL] >> TxUsage::WriteToTopic_Two_WriteSession_Query >> TxUsage::WriteToTopic_Demo_4_Query [FAIL] >> TxUsage::WriteToTopic_Demo_40_Table |73.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_index/unittest ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_index/unittest >> TVectorIndexTests::CreateTablePrefixCovering [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:116:2058] recipient: [1:111:2141] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:116:2058] recipient: [1:111:2141] Leader for TabletID 72057594046678944 is [1:128:2152] sender: [1:132:2058] recipient: [1:111:2141] 2025-05-29T15:30:33.684055Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7488: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-05-29T15:30:33.684090Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7516: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:30:33.684097Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7402: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-05-29T15:30:33.684103Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7418: OperationsProcessing config: using default configuration 2025-05-29T15:30:33.684120Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-05-29T15:30:33.684124Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-05-29T15:30:33.684135Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7548: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:30:33.684150Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:39: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-05-29T15:30:33.684263Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7619: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-05-29T15:30:33.684362Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-05-29T15:30:33.699345Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7309: Cannot subscribe to console configs 2025-05-29T15:30:33.699366Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:30:33.702358Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-05-29T15:30:33.702489Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-05-29T15:30:33.702530Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-05-29T15:30:33.706625Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-05-29T15:30:33.706852Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:445: Clear TempDirsState with owners number: 0 2025-05-29T15:30:33.706999Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1348: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-05-29T15:30:33.707054Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:32: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-05-29T15:30:33.707616Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:157: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:30:33.707765Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:84: [RootDataErasureManager] Stop 2025-05-29T15:30:33.708079Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:30:33.708090Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:30:33.708118Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-05-29T15:30:33.708126Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-29T15:30:33.708133Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:202: TTxServerlessStorageBilling.Complete 2025-05-29T15:30:33.708174Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6671: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-05-29T15:30:33.709973Z node 1 :HIVE INFO: tablet_helpers.cpp:1130: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:128:2152] sender: [1:242:2058] recipient: [1:15:2062] 2025-05-29T15:30:33.734073Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:372: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-05-29T15:30:33.734163Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:30:33.734229Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-05-29T15:30:33.734296Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:130: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-05-29T15:30:33.734309Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:30:33.735772Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:451: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-05-29T15:30:33.735809Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-05-29T15:30:33.735871Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:30:33.735896Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:313: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-05-29T15:30:33.735902Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:367: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-05-29T15:30:33.735910Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 2 -> 3 2025-05-29T15:30:33.736699Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:30:33.736720Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-05-29T15:30:33.736728Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 3 -> 128 2025-05-29T15:30:33.737187Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:30:33.737199Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:30:33.737206Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:30:33.737214Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1650: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-05-29T15:30:33.738059Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1719: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-05-29T15:30:33.738559Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:652: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-05-29T15:30:33.738611Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1751: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-05-29T15:30:33.738813Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:676: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-05-29T15:30:33.738844Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:680: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 124 RawX2: 4294969445 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-05-29T15:30:33.738853Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:30:33.738932Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 128 -> 240 2025-05-29T15:30:33.738941Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:30:33.738979Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-05-29T15:30:33.738993Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:402: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-05-29T15:30:33.740147Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:30:33.740159Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-29T15:30:33.740200Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29 ... ementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 4 2025-05-29T15:30:33.962553Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:5834: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 6 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 102 2025-05-29T15:30:33.962562Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 6 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 102 2025-05-29T15:30:33.962566Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72057594046678944, txId: 102 2025-05-29T15:30:33.962571Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 6], version: 18446744073709551615 2025-05-29T15:30:33.962575Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 6] was 4 2025-05-29T15:30:33.962583Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1606: TOperation IsReadyToNotify, TxId: 102, ready parts: 1/5, is published: true 2025-05-29T15:30:33.963393Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 102:3, at schemeshard: 72057594046678944 2025-05-29T15:30:33.963413Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_drop_table.cpp:414: TDropTable TProposedDeletePart operationId: 102:3 ProgressState, at schemeshard: 72057594046678944 2025-05-29T15:30:33.963483Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove table for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 3 2025-05-29T15:30:33.963517Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:906: Part operation is done id#102:3 progress is 2/5 2025-05-29T15:30:33.963522Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 102 ready parts: 2/5 2025-05-29T15:30:33.963527Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:906: Part operation is done id#102:3 progress is 2/5 2025-05-29T15:30:33.963530Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 102 ready parts: 2/5 2025-05-29T15:30:33.963534Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1606: TOperation IsReadyToNotify, TxId: 102, ready parts: 2/5, is published: true 2025-05-29T15:30:33.964068Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 102:4, at schemeshard: 72057594046678944 2025-05-29T15:30:33.964087Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_drop_table.cpp:414: TDropTable TProposedDeletePart operationId: 102:4 ProgressState, at schemeshard: 72057594046678944 2025-05-29T15:30:33.964131Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove table for pathId [OwnerId: 72057594046678944, LocalPathId: 6] was 3 2025-05-29T15:30:33.964154Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:906: Part operation is done id#102:4 progress is 3/5 2025-05-29T15:30:33.964159Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 102 ready parts: 3/5 2025-05-29T15:30:33.964163Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:906: Part operation is done id#102:4 progress is 3/5 2025-05-29T15:30:33.964167Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 102 ready parts: 3/5 2025-05-29T15:30:33.964172Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1606: TOperation IsReadyToNotify, TxId: 102, ready parts: 3/5, is published: true 2025-05-29T15:30:33.964254Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 102:2, at schemeshard: 72057594046678944 2025-05-29T15:30:33.964259Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_drop_table.cpp:414: TDropTable TProposedDeletePart operationId: 102:2 ProgressState, at schemeshard: 72057594046678944 2025-05-29T15:30:33.964284Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove table for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 3 2025-05-29T15:30:33.964298Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:906: Part operation is done id#102:2 progress is 4/5 2025-05-29T15:30:33.964302Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 102 ready parts: 4/5 2025-05-29T15:30:33.964306Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:906: Part operation is done id#102:2 progress is 4/5 2025-05-29T15:30:33.964309Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 102 ready parts: 4/5 2025-05-29T15:30:33.964312Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1606: TOperation IsReadyToNotify, TxId: 102, ready parts: 4/5, is published: true 2025-05-29T15:30:33.964352Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2025-05-29T15:30:33.964387Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2025-05-29T15:30:33.964394Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2025-05-29T15:30:33.964399Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2025-05-29T15:30:33.964433Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2025-05-29T15:30:33.964439Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_drop_table.cpp:414: TDropTable TProposedDeletePart operationId: 102:0 ProgressState, at schemeshard: 72057594046678944 2025-05-29T15:30:33.964466Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove table for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 2025-05-29T15:30:33.964480Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:906: Part operation is done id#102:0 progress is 5/5 2025-05-29T15:30:33.964484Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 102 ready parts: 5/5 2025-05-29T15:30:33.964489Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:906: Part operation is done id#102:0 progress is 5/5 2025-05-29T15:30:33.964492Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 102 ready parts: 5/5 2025-05-29T15:30:33.964496Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1606: TOperation IsReadyToNotify, TxId: 102, ready parts: 5/5, is published: true 2025-05-29T15:30:33.964515Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1629: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:452:2397] message: TxId: 102 2025-05-29T15:30:33.964521Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 102 ready parts: 5/5 2025-05-29T15:30:33.964528Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:973: Operation and all the parts is done, operation id: 102:0 2025-05-29T15:30:33.964533Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5174: RemoveTx for txid 102:0 2025-05-29T15:30:33.964556Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2025-05-29T15:30:33.964563Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:973: Operation and all the parts is done, operation id: 102:1 2025-05-29T15:30:33.964566Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5174: RemoveTx for txid 102:1 2025-05-29T15:30:33.964572Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 4 2025-05-29T15:30:33.964576Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:973: Operation and all the parts is done, operation id: 102:2 2025-05-29T15:30:33.964579Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5174: RemoveTx for txid 102:2 2025-05-29T15:30:33.964587Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 2 2025-05-29T15:30:33.964592Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:973: Operation and all the parts is done, operation id: 102:3 2025-05-29T15:30:33.964597Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5174: RemoveTx for txid 102:3 2025-05-29T15:30:33.964604Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 2 2025-05-29T15:30:33.964608Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:973: Operation and all the parts is done, operation id: 102:4 2025-05-29T15:30:33.964611Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5174: RemoveTx for txid 102:4 2025-05-29T15:30:33.964617Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 6] was 2 2025-05-29T15:30:33.964739Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2025-05-29T15:30:33.964755Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2025-05-29T15:30:33.964764Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2025-05-29T15:30:33.964821Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2025-05-29T15:30:33.964835Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2025-05-29T15:30:33.964854Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2025-05-29T15:30:33.965646Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:227: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2025-05-29T15:30:33.965661Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:236: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [1:594:2532] TestWaitNotification: OK eventTxId 102 >> ListObjectsInS3Export::PagingParameters [GOOD] >> TAsyncIndexTests::SplitIndexWithReboots[PipeResets] >> TPersQueueTest::SetupLockSession [FAIL] >> TPersQueueTest::StreamReadCreateAndDestroyMsgs |73.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_index/unittest >> TxUsage::WriteToTopic_Two_WriteSession_Query [FAIL] >> TxUsage::WriteToTopic_Demo_4_Table >> TxUsage::WriteToTopic_Demo_40_Table [FAIL] >> TxUsage::WriteToTopic_Demo_40_Query |73.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_index/unittest |73.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_index/unittest >> DemoTx::Scenario_2 [FAIL] >> DemoTx::Scenario_3 >> ListObjectsInS3Export::ParametersValidation >> TPersQueueTest::UpdatePartitionLocation [FAIL] >> TPersQueueTest::TopicServiceCommitOffset >> TPersQueueTest::Cache [FAIL] >> TPersQueueTest::CacheHead |73.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_index/unittest >> TPersQueueTest::DirectReadPreCached |73.8%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/client/minikql_compile/ut/ydb-core-client-minikql_compile-ut |73.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/client/minikql_compile/ut/ydb-core-client-minikql_compile-ut |73.8%| [TA] {RESULT} $(B)/ydb/core/tx/tx_proxy/ut_encrypted_storage/test-results/unittest/{meta.json ... results_accumulator.log} |73.8%| [LD] {RESULT} $(B)/ydb/core/client/minikql_compile/ut/ydb-core-client-minikql_compile-ut |73.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_index/unittest >> TxUsage::WriteToTopic_Demo_4_Table [FAIL] >> TxUsage::WriteToTopic_Demo_5_Table >> TxUsage::WriteToTopic_Demo_40_Query [FAIL] >> TxUsage::WriteToTopic_Demo_41_Table >> TPersQueueTest::StreamReadCreateAndDestroyMsgs [FAIL] >> TPersQueueTest::StreamReadCommitAndStatusMsgs >> ListObjectsInS3Export::ParametersValidation [GOOD] |73.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_index/unittest >> TAsyncIndexTests::MergeIndexWithReboots[PipeResets] >> DemoTx::Scenario_3 [FAIL] >> DemoTx::Scenario_4 |73.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_index/unittest |73.9%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/fq/libs/row_dispatcher/ut/ydb-core-fq-libs-row_dispatcher-ut |73.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/fq/libs/row_dispatcher/ut/ydb-core-fq-libs-row_dispatcher-ut |73.9%| [LD] {RESULT} $(B)/ydb/core/fq/libs/row_dispatcher/ut/ydb-core-fq-libs-row_dispatcher-ut >> TPersQueueTest::TopicServiceCommitOffset [FAIL] >> TPersQueueTest::TopicServiceCommitOffsetBadOffsets >> TPersQueueTest::CacheHead [FAIL] >> TPersQueueTest::CheckACLForGrpcWrite >> TPersQueueTest::DirectReadPreCached [FAIL] >> TPersQueueTest::DirectReadNotCached |73.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_index/unittest >> TxUsage::WriteToTopic_Demo_5_Table [FAIL] >> TxUsage::WriteToTopic_Demo_5_Query |73.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_index/unittest >> TxUsage::WriteToTopic_Demo_41_Table [FAIL] >> TxUsage::WriteToTopic_Demo_41_Query |73.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_index/unittest >> TPersQueueTest::StreamReadCommitAndStatusMsgs [FAIL] >> TPersQueueTest::StreamReadManyUpdateTokenAndRead >> TPersQueueTest::DirectReadNotCached [FAIL] >> TPersQueueTest::DirectReadBudgetOnRestart >> TPersQueueTest::CheckACLForGrpcWrite [FAIL] >> TPersQueueTest::CheckACLForGrpcRead |73.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_index/unittest >> EvWrite::WriteInTransaction >> DemoTx::Scenario_4 [FAIL] >> DemoTx::Scenario_5 >> TPersQueueTest::TopicServiceCommitOffsetBadOffsets [FAIL] >> TPersQueueTest::TopicServiceReadBudget |73.9%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/sequenceshard/ut/ydb-core-tx-sequenceshard-ut |73.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/sequenceshard/ut/ydb-core-tx-sequenceshard-ut |73.9%| [LD] {RESULT} $(B)/ydb/core/tx/sequenceshard/ut/ydb-core-tx-sequenceshard-ut |73.9%| [TA] $(B)/ydb/core/mind/hive/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> TAsyncIndexTests::MergeMainWithReboots[PipeResets] >> TxUsage::WriteToTopic_Demo_5_Query [FAIL] >> TxUsage::WriteToTopic_Demo_6_Table >> TVectorIndexTests::CreateTablePrefixInvalidKeyType >> TxUsage::WriteToTopic_Demo_41_Query [FAIL] >> TxUsage::WriteToTopic_Demo_42_Table >> TAsyncIndexTests::MergeBothWithReboots[TabletReboots] >> TVectorIndexTests::CreateTablePrefixInvalidKeyType [GOOD] >> BasicStatistics::ServerlessGlobalIndex [FAIL] >> TPersQueueTest::DirectReadBudgetOnRestart [FAIL] >> TPersQueueTest::DirectReadCorrectOffsetsOnRestart >> TPersQueueTest::CheckACLForGrpcRead [FAIL] >> TPersQueueTest::CheckKillBalancer ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/statistics/service/ut/unittest >> ColumnStatistics::CountMinSketchServerlessStatistics [FAIL] Test command err: 2025-05-29T15:27:07.206358Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:609:2415], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-05-29T15:27:07.206432Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-05-29T15:27:07.206459Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/001564/r3tmp/tmpAZPn7B/pdisk_1.dat 2025-05-29T15:27:07.331489Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 2455, node 1 2025-05-29T15:27:07.453353Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:27:07.453373Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-05-29T15:27:07.453378Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-05-29T15:27:07.453463Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-05-29T15:27:07.454089Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-05-29T15:27:07.567447Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:27:07.567484Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:27:07.584192Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:5364 2025-05-29T15:27:07.953115Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-05-29T15:27:08.811304Z node 4 :STATISTICS INFO: service_impl.cpp:232: Subscribed for config changes on node 4 2025-05-29T15:27:08.829482Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:27:08.829526Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:27:08.868532Z node 1 :HIVE WARN: hive_impl.cpp:771: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 4 Cookie 4 2025-05-29T15:27:08.869700Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-29T15:27:09.037214Z node 4 :HIVE WARN: tx__create_tablet.cpp:342: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-05-29T15:27:09.037576Z node 4 :HIVE WARN: tx__create_tablet.cpp:342: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-05-29T15:27:09.037661Z node 4 :HIVE WARN: tx__create_tablet.cpp:342: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-05-29T15:27:09.037701Z node 4 :HIVE WARN: tx__create_tablet.cpp:342: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-05-29T15:27:09.037762Z node 4 :HIVE WARN: tx__create_tablet.cpp:342: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-05-29T15:27:09.037779Z node 4 :HIVE WARN: tx__create_tablet.cpp:342: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-05-29T15:27:09.037800Z node 4 :HIVE WARN: tx__create_tablet.cpp:342: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-05-29T15:27:09.037874Z node 4 :HIVE WARN: tx__create_tablet.cpp:342: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-05-29T15:27:09.037897Z node 4 :HIVE WARN: tx__create_tablet.cpp:342: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-05-29T15:27:09.195379Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:27:09.195446Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:27:09.207353Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-29T15:27:09.254179Z node 4 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:27:09.271044Z node 4 :STATISTICS INFO: aggregator_impl.cpp:45: [72075186224037894] OnActivateExecutor 2025-05-29T15:27:09.271079Z node 4 :STATISTICS DEBUG: tx_init_schema.cpp:13: [72075186224037894] TTxInitSchema::Execute 2025-05-29T15:27:09.277601Z node 4 :STATISTICS DEBUG: tx_init_schema.cpp:34: [72075186224037894] TTxInitSchema::Complete 2025-05-29T15:27:09.277876Z node 4 :STATISTICS DEBUG: tx_init.cpp:18: [72075186224037894] TTxInit::Execute 2025-05-29T15:27:09.277900Z node 4 :STATISTICS DEBUG: tx_init.cpp:118: [72075186224037894] Loaded BaseStatistics: schemeshard count# 0 2025-05-29T15:27:09.277904Z node 4 :STATISTICS DEBUG: tx_init.cpp:143: [72075186224037894] Loaded ColumnStatistics: column count# 0 2025-05-29T15:27:09.277908Z node 4 :STATISTICS DEBUG: tx_init.cpp:182: [72075186224037894] Loaded ScheduleTraversals: table count# 0 2025-05-29T15:27:09.277914Z node 4 :STATISTICS DEBUG: tx_init.cpp:216: [72075186224037894] Loaded ForceTraversalOperations: table count# 0 2025-05-29T15:27:09.277918Z node 4 :STATISTICS DEBUG: tx_init.cpp:264: [72075186224037894] Loaded ForceTraversalTables: table count# 0 2025-05-29T15:27:09.277923Z node 4 :STATISTICS DEBUG: tx_init.cpp:271: [72075186224037894] TTxInit::Complete 2025-05-29T15:27:09.278061Z node 4 :STATISTICS INFO: aggregator_impl.cpp:62: [72075186224037894] Subscribed for config changes 2025-05-29T15:27:09.293278Z node 4 :STATISTICS DEBUG: schemeshard_impl.cpp:7864: ResolveSA(), StatisticsAggregatorId=72075186224037894, at schemeshard: 72075186224037897 2025-05-29T15:27:09.293308Z node 4 :STATISTICS DEBUG: schemeshard_impl.cpp:7894: ConnectToSA(), pipe client id: [4:2026:2600], at schemeshard: 72075186224037897, StatisticsAggregatorId: 72075186224037894, at schemeshard: 72075186224037897 2025-05-29T15:27:09.298316Z node 4 :STATISTICS DEBUG: aggregator_impl.cpp:86: [72075186224037894] EvServerConnected, pipe server id = [4:2039:2610] 2025-05-29T15:27:09.300368Z node 4 :STATISTICS DEBUG: aggregator_impl.cpp:86: [72075186224037894] EvServerConnected, pipe server id = [4:2061:2621] 2025-05-29T15:27:09.300552Z node 4 :STATISTICS DEBUG: aggregator_impl.cpp:213: [72075186224037894] EvConnectSchemeShard, pipe server id = [4:2061:2621], schemeshard id = 72075186224037897 2025-05-29T15:27:09.302777Z node 4 :STATISTICS DEBUG: tx_configure.cpp:21: [72075186224037894] TTxConfigure::Execute: database# /Root/Shared 2025-05-29T15:27:09.333505Z node 4 :STATISTICS DEBUG: table_creator.cpp:147: Table _statistics updater. Describe result: PathErrorUnknown 2025-05-29T15:27:09.333540Z node 4 :STATISTICS NOTICE: table_creator.cpp:167: Table _statistics updater. Creating table 2025-05-29T15:27:09.333557Z node 4 :STATISTICS DEBUG: table_creator.cpp:100: Table _statistics updater. Full table path:/Root/Shared/.metadata/_statistics 2025-05-29T15:27:09.342558Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720657:1, at schemeshard: 72075186224037897 2025-05-29T15:27:09.345694Z node 4 :STATISTICS DEBUG: table_creator.cpp:190: Table _statistics updater. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720657 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037897 PathId: 3 } 2025-05-29T15:27:09.345746Z node 4 :STATISTICS DEBUG: table_creator.cpp:261: Table _statistics updater. Subscribe on create table tx: 281474976720657 2025-05-29T15:27:09.478459Z node 4 :STATISTICS DEBUG: tx_configure.cpp:36: [72075186224037894] TTxConfigure::Complete 2025-05-29T15:27:09.560005Z node 4 :STATISTICS DEBUG: table_creator.cpp:290: Table _statistics updater. Request: create. Transaction completed: 281474976720657. Doublechecking... 2025-05-29T15:27:09.623711Z node 4 :STATISTICS DEBUG: table_creator.cpp:362: Table _statistics updater. Column diff is empty, finishing 2025-05-29T15:27:10.379116Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715659:0, at schemeshard: 72057594046644480 2025-05-29T15:27:11.388361Z node 3 :STATISTICS INFO: service_impl.cpp:232: Subscribed for config changes on node 3 2025-05-29T15:27:11.412934Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:27:11.412977Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:27:11.413097Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:27:11.413120Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:27:11.459452Z node 1 :HIVE WARN: hive_impl.cpp:771: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 3 Cookie 3 2025-05-29T15:27:11.459862Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-29T15:27:11.460301Z node 4 :HIVE WARN: hive_impl.cpp:771: HIVE#72075186224037888 Handle TEvInterconnect::TEvNodeConnected, NodeId 3 Cookie 3 2025-05-29T15:27:11.460901Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-29T15:27:11.485295Z node 4 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:27:11.613100Z node 4 :STATISTICS DEBUG: schemeshard_impl.cpp:7809: Handle TEvTxProxySchemeCache::TEvNavigateKeySetResult, at schemeshard: 72075186224037899 2025-05-29T15:27:11.613125Z node 4 :STATISTICS DEBUG: schemeshard_impl.cpp:7825: Handle TEvTxProxySchemeCache::TEvNavigateKeySetResult, StatisticsAggregatorId=72075186224037894, at schemeshard: 72075186224037899 2025-05-29T15:27:11.613134Z node 4 :STATISTICS DEBUG: schemeshard_impl.cpp:7894: ConnectToSA(), pipe client id: [4:3070:2940], at schemeshard: 72075186224037899, StatisticsAggregatorId: 72075186224037894, at schemeshard: 72075186224037899 2025-05-29T15:27:11.613815Z node 4 :STATISTICS DEBUG: aggregator_impl.cpp:86: [72075186224037894] EvServerConnected, pipe server id = [4:3080:2946] 2025-05-29T15:27:11.613876Z node 4 :STATISTICS DEBUG: aggregator_impl.cpp:213: [72075186224037894] EvConnectSchemeShard, pipe server id = [4:3080:2946], schemeshard id = 72075186224037899 2025-05-29T15:27:12.651821Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715661:0, at schemeshard: 72057594046644480 2025-05-29T15:27:13.556122Z node 2 :STA ... ZTQtYWUxMTQyODUtY2MxM2EyOTQtOWEwY2EzZjU=, TxId: 2025-05-29T15:29:59.395007Z node 4 :STATISTICS WARN: query_actor.cpp:372: [TQueryBase] Finish with INTERNAL_ERROR, Issues: {
: Fatal: Execution, code: 1060 subissue: {
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 } }, SessionId: ydb://session/3?node_id=4&id=ODg2ZGIwZTQtYWUxMTQyODUtY2MxM2EyOTQtOWEwY2EzZjU=, TxId: 2025-05-29T15:29:59.416344Z node 4 :STATISTICS DEBUG: service_impl.cpp:252: Event round 2 is different from the current 0 2025-05-29T15:29:59.416379Z node 4 :STATISTICS DEBUG: service_impl.cpp:379: Skip TEvDispatchKeepAlive 2025-05-29T15:30:00.314307Z node 4 :STATISTICS DEBUG: query_actor.cpp:134: [TQueryBase] Bootstrap. Database: /Root/Shared 2025-05-29T15:30:00.315340Z node 4 :STATISTICS DEBUG: query_actor.cpp:197: [TQueryBase] RunDataQuery: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DECLARE $stat_type AS Uint32; DECLARE $column_tags AS List; DECLARE $data AS List; UPSERT INTO `.metadata/_statistics` (owner_id, local_path_id, stat_type, column_tag, data) VALUES ($owner_id, $local_path_id, $stat_type, $column_tags[0], $data[0]), ($owner_id, $local_path_id, $stat_type, $column_tags[1], $data[1]); 2025-05-29T15:30:00.320440Z node 4 :STATISTICS DEBUG: service_impl.cpp:771: [TStatService::TEvGetStatistics] RequestId[ 15 ], ReplyToActorId[ [4:12565:7737]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-05-29T15:30:00.320528Z node 4 :STATISTICS DEBUG: service_impl.cpp:787: [TStatService::TEvNavigateKeySetResult] RequestId[ 15 ] 2025-05-29T15:30:00.320538Z node 4 :STATISTICS DEBUG: service_impl.cpp:1260: ReplySuccess(), request id = 15, ReplyToActorId = [4:12565:7737], StatRequests.size() = 1 2025-05-29T15:30:00.328686Z node 4 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [4:12561:7733], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:30:00.329415Z node 4 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=4&id=MzI3Mjg4ODQtYWU4ZThjOGQtODMyYWZhMzItNmRiNzIyOTE=, ActorId: [4:12558:7730], ActorState: ExecuteState, TraceId: 01jweam7fvextpr33xmdh3k654, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: 2025-05-29T15:30:00.329637Z node 4 :STATISTICS DEBUG: query_actor.cpp:240: [TQueryBase] TEvDataQueryResult INTERNAL_ERROR, Issues: {
: Fatal: Execution, code: 1060 subissue: {
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 } }, SessionId: ydb://session/3?node_id=4&id=MzI3Mjg4ODQtYWU4ZThjOGQtODMyYWZhMzItNmRiNzIyOTE=, TxId: 2025-05-29T15:30:00.329645Z node 4 :STATISTICS WARN: query_actor.cpp:372: [TQueryBase] Finish with INTERNAL_ERROR, Issues: {
: Fatal: Execution, code: 1060 subissue: {
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 } }, SessionId: ydb://session/3?node_id=4&id=MzI3Mjg4ODQtYWU4ZThjOGQtODMyYWZhMzItNmRiNzIyOTE=, TxId: 2025-05-29T15:30:00.329948Z node 4 :STATISTICS DEBUG: tx_finish_trasersal.cpp:26: [72075186224037894] TTxFinishTraversal::Execute 2025-05-29T15:30:00.341743Z node 4 :STATISTICS DEBUG: tx_finish_trasersal.cpp:36: [72075186224037894] TTxFinishTraversal::Complete schedule traversal for path [OwnerId: 72075186224037899, LocalPathId: 2] 2025-05-29T15:30:00.341768Z node 4 :STATISTICS DEBUG: tx_finish_trasersal.cpp:39: [72075186224037894] TTxFinishTraversal::Complete. No ActorId to send reply. 2025-05-29T15:30:01.588310Z node 4 :STATISTICS DEBUG: aggregator_impl.cpp:330: [72075186224037894] PropagateStatistics(), node count = 1, schemeshard count = 1 2025-05-29T15:30:01.588397Z node 4 :STATISTICS DEBUG: service_impl.cpp:937: EvPropagateStatistics, node id = 4 2025-05-29T15:30:02.945670Z node 4 :STATISTICS DEBUG: aggregator_impl.cpp:660: [72075186224037894] ScheduleNextTraversal 2025-05-29T15:30:02.945711Z node 4 :STATISTICS DEBUG: aggregator_impl.cpp:668: [72075186224037894] ScheduleNextTraversal. No force traversals. 2025-05-29T15:30:02.945723Z node 4 :STATISTICS DEBUG: aggregator_impl.cpp:802: [72075186224037894] IsColumnTable. Path [OwnerId: 72075186224037905, LocalPathId: 2] is column table. 2025-05-29T15:30:02.945728Z node 4 :STATISTICS DEBUG: aggregator_impl.cpp:732: [72075186224037894] Start schedule traversal navigate for path [OwnerId: 72075186224037905, LocalPathId: 2] 2025-05-29T15:30:02.946982Z node 4 :STATISTICS DEBUG: tx_navigate.cpp:19: [72075186224037894] TTxNavigate::Execute 2025-05-29T15:30:02.960021Z node 4 :STATISTICS DEBUG: tx_navigate.cpp:72: [72075186224037894] TTxNavigate::Complete 2025-05-29T15:30:02.960313Z node 4 :STATISTICS DEBUG: tx_resolve.cpp:102: [72075186224037894] TTxResolve::Execute 2025-05-29T15:30:02.960333Z node 4 :STATISTICS DEBUG: tx_resolve.cpp:133: [72075186224037894] TTxResolve::Complete 2025-05-29T15:30:02.960545Z node 4 :STATISTICS DEBUG: tx_response_tablet_distribution.cpp:58: [72075186224037894] TTxResponseTabletDistribution::Execute. Node count = 1 2025-05-29T15:30:02.998048Z node 4 :STATISTICS DEBUG: tx_response_tablet_distribution.cpp:92: [72075186224037894] TTxResponseTabletDistribution::Complete 2025-05-29T15:30:02.998129Z node 4 :STATISTICS DEBUG: service_impl.cpp:588: Received TEvAggregateStatistics from node: 4, Round: 3, current Round: 0 2025-05-29T15:30:02.998784Z node 4 :STATISTICS DEBUG: service_impl.cpp:1086: EvClientConnected, node id = 4, client id = [4:12691:7806], server id = [4:12692:7807], tablet id = 72075186224037912, status = OK 2025-05-29T15:30:02.998838Z node 4 :STATISTICS DEBUG: service_impl.cpp:1055: TEvStatisticsRequest send, client id = [4:12691:7806], path = { OwnerId: 72075186224037905 LocalId: 2 } 2025-05-29T15:30:03.000084Z node 4 :STATISTICS DEBUG: service_impl.cpp:317: Received TEvStatisticsResponse TabletId: 72075186224037912 2025-05-29T15:30:03.000104Z node 4 :STATISTICS DEBUG: service_impl.cpp:502: Send aggregate statistics response to node: 4 2025-05-29T15:30:03.000165Z node 4 :STATISTICS DEBUG: tx_aggr_stat_response.cpp:27: [72075186224037894] TTxAggregateStatisticsResponse::Execute 2025-05-29T15:30:03.000199Z node 4 :STATISTICS DEBUG: tx_aggr_stat_response.cpp:118: [72075186224037894] TTxAggregateStatisticsResponse::Complete 2025-05-29T15:30:03.000287Z node 4 :STATISTICS DEBUG: query_actor.cpp:134: [TQueryBase] Bootstrap. Database: /Root/Shared 2025-05-29T15:30:03.000865Z node 4 :STATISTICS DEBUG: service_impl.cpp:1121: EvClientDestroyed, node id = 4, client id = [4:12691:7806], server id = [4:12692:7807], tablet id = 72075186224037912 2025-05-29T15:30:03.000874Z node 4 :STATISTICS DEBUG: service_impl.cpp:1139: Skip EvClientDestroyed 2025-05-29T15:30:03.001066Z node 4 :STATISTICS DEBUG: query_actor.cpp:197: [TQueryBase] RunDataQuery: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DECLARE $stat_type AS Uint32; DECLARE $column_tags AS List; DECLARE $data AS List; UPSERT INTO `.metadata/_statistics` (owner_id, local_path_id, stat_type, column_tag, data) VALUES ($owner_id, $local_path_id, $stat_type, $column_tags[0], $data[0]), ($owner_id, $local_path_id, $stat_type, $column_tags[1], $data[1]); 2025-05-29T15:30:03.006026Z node 4 :STATISTICS DEBUG: service_impl.cpp:771: [TStatService::TEvGetStatistics] RequestId[ 16 ], ReplyToActorId[ [4:12707:7821]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-05-29T15:30:03.006109Z node 4 :STATISTICS DEBUG: service_impl.cpp:787: [TStatService::TEvNavigateKeySetResult] RequestId[ 16 ] 2025-05-29T15:30:03.006118Z node 4 :STATISTICS DEBUG: service_impl.cpp:1260: ReplySuccess(), request id = 16, ReplyToActorId = [4:12707:7821], StatRequests.size() = 1 2025-05-29T15:30:03.013587Z node 4 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [4:12703:7817], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:30:03.014268Z node 4 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=4&id=Nzk0YjNlMTQtMjVlNTU2MDEtZmMzNDE1YjgtOTdmNmU4ZmU=, ActorId: [4:12700:7814], ActorState: ExecuteState, TraceId: 01jweama3se8nm7pzxc5pa8jdh, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: 2025-05-29T15:30:03.014474Z node 4 :STATISTICS DEBUG: query_actor.cpp:240: [TQueryBase] TEvDataQueryResult INTERNAL_ERROR, Issues: {
: Fatal: Execution, code: 1060 subissue: {
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 } }, SessionId: ydb://session/3?node_id=4&id=Nzk0YjNlMTQtMjVlNTU2MDEtZmMzNDE1YjgtOTdmNmU4ZmU=, TxId: 2025-05-29T15:30:03.014483Z node 4 :STATISTICS WARN: query_actor.cpp:372: [TQueryBase] Finish with INTERNAL_ERROR, Issues: {
: Fatal: Execution, code: 1060 subissue: {
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 } }, SessionId: ydb://session/3?node_id=4&id=Nzk0YjNlMTQtMjVlNTU2MDEtZmMzNDE1YjgtOTdmNmU4ZmU=, TxId: 2025-05-29T15:30:03.026132Z node 1 :STATISTICS DEBUG: service_impl.cpp:771: [TStatService::TEvGetStatistics] RequestId[ 1 ], ReplyToActorId[ [1:12711:6245]], StatType[ 2 ], StatRequestsCount[ 1 ] 2025-05-29T15:30:03.026311Z node 1 :STATISTICS DEBUG: service_impl.cpp:787: [TStatService::TEvNavigateKeySetResult] RequestId[ 1 ] 2025-05-29T15:30:03.026324Z node 1 :STATISTICS DEBUG: service_impl.cpp:812: [TStatService::TEvNavigateKeySetResult] RequestId[ 1 ] resolve DatabasePath[ [OwnerId: 72057594046644480, LocalPathId: 2] ] 2025-05-29T15:30:03.027025Z node 1 :STATISTICS DEBUG: service_impl.cpp:787: [TStatService::TEvNavigateKeySetResult] RequestId[ 1 ] 2025-05-29T15:30:03.027043Z node 1 :STATISTICS DEBUG: service_impl.cpp:715: [TStatService::QueryStatistics] RequestId[ 1 ], Database[ Root/Shared ], TablePath[ /Root/Shared/.metadata/_statistics ] 2025-05-29T15:30:03.027050Z node 1 :STATISTICS DEBUG: service_impl.cpp:656: [TStatService::LoadStatistics] QueryId[ 1 ], PathId[ [OwnerId: 72075186224037899, LocalPathId: 2] ], StatType[ 2 ], ColumnTag[ 1 ] 2025-05-29T15:30:03.065466Z node 1 :STATISTICS ERROR: service_impl.cpp:691: [TStatService::ReadRowsResponse] QueryId[ 1 ], RowsCount[ 0 ] 2025-05-29T15:30:03.065578Z node 1 :STATISTICS DEBUG: service_impl.cpp:1152: TEvLoadStatisticsQueryResponse, request id = 1 assertion failed at ydb/core/statistics/service/ut/ut_column_statistics.cpp:50, void NKikimr::NStat::CheckColumnStatistics(TTestActorRuntime &, const TPathId &, const TActorId &, const std::vector &): (stat.Success) TBackTrace::Capture()+28 (0x137F904C) NUnitTest::NPrivate::RaiseError(char const*, TBasicString> const&, bool)+137 (0x139AC3D9) NKikimr::NStat::CheckColumnStatistics(NActors::TTestActorRuntime&, NKikimr::TPathId const&, NActors::TActorId const&, std::__y1::vector> const&)+2379 (0x136EE4CB) NKikimr::NStat::NTestSuiteColumnStatistics::TTestCaseCountMinSketchServerlessStatistics::Execute_(NUnitTest::TTestContext&)+2233 (0x136F04C9) NKikimr::NStat::NTestSuiteColumnStatistics::TCurrentTest::Execute()::'lambda'()::operator()() const+71 (0x136F2A77) NUnitTest::TTestBase::Run(std::__y1::function, TBasicString> const&, char const*, bool)+126 (0x139AE28E) NKikimr::NStat::NTestSuiteColumnStatistics::TCurrentTest::Execute()+419 (0x136F2433) NUnitTest::TTestFactory::Execute()+803 (0x139AEA03) NUnitTest::RunMain(int, char**)+3021 (0x139BCD1D) ??+0 (0x7F919188BD90) __libc_start_main+128 (0x7F919188BE40) _start+41 (0x1283B029) >> TPersQueueTest::StreamReadManyUpdateTokenAndRead [FAIL] >> TPersQueueTest::SetupWriteSession ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_index/unittest >> TVectorIndexTests::CreateTablePrefixInvalidKeyType [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:116:2058] recipient: [1:111:2141] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:116:2058] recipient: [1:111:2141] Leader for TabletID 72057594046678944 is [1:128:2152] sender: [1:132:2058] recipient: [1:111:2141] 2025-05-29T15:30:38.461575Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7488: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-05-29T15:30:38.461608Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7516: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:30:38.461614Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7402: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-05-29T15:30:38.461620Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7418: OperationsProcessing config: using default configuration 2025-05-29T15:30:38.461638Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-05-29T15:30:38.461642Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-05-29T15:30:38.461652Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7548: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:30:38.461666Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:39: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-05-29T15:30:38.461784Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7619: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-05-29T15:30:38.461864Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-05-29T15:30:38.474439Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7309: Cannot subscribe to console configs 2025-05-29T15:30:38.474463Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:30:38.477722Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-05-29T15:30:38.477859Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-05-29T15:30:38.477930Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-05-29T15:30:38.480344Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-05-29T15:30:38.480570Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:445: Clear TempDirsState with owners number: 0 2025-05-29T15:30:38.480726Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1348: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-05-29T15:30:38.480785Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:32: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-05-29T15:30:38.481419Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:157: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:30:38.481469Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:84: [RootDataErasureManager] Stop 2025-05-29T15:30:38.481774Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:30:38.481787Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:30:38.481814Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-05-29T15:30:38.481822Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-29T15:30:38.481828Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:202: TTxServerlessStorageBilling.Complete 2025-05-29T15:30:38.481869Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6671: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-05-29T15:30:38.483803Z node 1 :HIVE INFO: tablet_helpers.cpp:1130: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:128:2152] sender: [1:242:2058] recipient: [1:15:2062] 2025-05-29T15:30:38.503985Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:372: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-05-29T15:30:38.504065Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:30:38.504130Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-05-29T15:30:38.504173Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:130: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-05-29T15:30:38.504184Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:30:38.505002Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:451: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-05-29T15:30:38.505030Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-05-29T15:30:38.505077Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:30:38.505087Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:313: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-05-29T15:30:38.505092Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:367: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-05-29T15:30:38.505097Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 2 -> 3 2025-05-29T15:30:38.505574Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:30:38.505586Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-05-29T15:30:38.505591Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 3 -> 128 2025-05-29T15:30:38.505977Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:30:38.505988Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:30:38.505995Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:30:38.506002Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1650: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-05-29T15:30:38.506711Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1719: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-05-29T15:30:38.507174Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:652: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-05-29T15:30:38.507214Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1751: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-05-29T15:30:38.507397Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:676: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-05-29T15:30:38.507422Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:680: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 124 RawX2: 4294969445 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-05-29T15:30:38.507429Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:30:38.507503Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 128 -> 240 2025-05-29T15:30:38.507511Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:30:38.507547Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-05-29T15:30:38.507560Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:402: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-05-29T15:30:38.508018Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:30:38.508027Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-29T15:30:38.508071Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:30:38.508076Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:209:2210], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-05-29T15:30:38.508157Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:30:38.508164Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:482: [72057594046678944] TDone opId# 1:0 ProgressState 2025-05-29T15:30:38.508178Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:906: Part operation is done id#1:0 progress is 1/1 2025-05-29T15:30:38.508182Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-05-29T15:30:38.508189Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:906: Part operation is done id#1:0 progress is 1/1 2025-05-29T15:30:38.508192Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-05-29T15:30:38.508196Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1606: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-05-29T15:30:38.508202Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-05-29T15:30:38.508207Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:973: Operation and all the parts is done, operation id: 1:0 2025-05-29T15:30:38.508211Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5174: RemoveTx for txid 1:0 2025-05-29T15:30:38.508222Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-05-29T15:30:38.508229Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:982: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-05-29T15:30:38.508233Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:989: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-05-29T15:30:38.508595Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:5834: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-05-29T15:30:38.508612Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-05-29T15:30:38.508617Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2025-05-29T15:30:38.508623Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2025-05-29T15:30:38.508628Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-05-29T15:30:38.508643Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 1, subscribers: 0 2025-05-29T15:30:38.510003Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1 2025-05-29T15:30:38.510123Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 1, at schemeshard: 72057594046678944 TestModificationResults wait txId: 101 2025-05-29T15:30:38.510561Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:434: actor# [1:272:2262] Bootstrap 2025-05-29T15:30:38.512658Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:453: actor# [1:272:2262] Become StateWork (SchemeCache [1:277:2267]) 2025-05-29T15:30:38.513448Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:372: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpCreateIndexedTable CreateIndexedTable { TableDescription { Name: "vectors" Columns { Name: "id" Type: "Uint64" } Columns { Name: "embedding" Type: "String" } Columns { Name: "covered" Type: "String" } Columns { Name: "prefix" Type: "Float" } KeyColumnNames: "id" } IndexDescription { Name: "idx_vector" KeyColumnNames: "prefix" KeyColumnNames: "embedding" Type: EIndexTypeGlobalVectorKmeansTree DataColumnNames: "covered" VectorIndexKmeansTreeDescription { Settings { settings { metric: DISTANCE_COSINE vector_type: VECTOR_TYPE_FLOAT vector_dimension: 1024 } clusters: 4 levels: 5 } } } } } TxId: 101 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-05-29T15:30:38.513549Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_create_indexed_table.cpp:101: TCreateTableIndex construct operation table path: /MyRoot/vectors domain path id: [OwnerId: 72057594046678944, LocalPathId: 1] domain path: /MyRoot shardsToCreate: 2 GetShardsInside: 0 MaxShards: 200000 2025-05-29T15:30:38.513591Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_just_reject.cpp:47: TReject Propose, opId: 101:0, explain: Column 'prefix' has wrong key type Float for being key, at schemeshard: 72057594046678944 2025-05-29T15:30:38.513597Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:130: IgniteOperation, opId: 101:1, propose status:StatusInvalidParameter, reason: Column 'prefix' has wrong key type Float for being key, at schemeshard: 72057594046678944 2025-05-29T15:30:38.513755Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:213: actor# [1:272:2262] HANDLE TEvClientConnected success connect from tablet# 72057594046447617 2025-05-29T15:30:38.514710Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:451: TTxOperationPropose Complete, txId: 101, response: Status: StatusInvalidParameter Reason: "Column \'prefix\' has wrong key type Float for being key" TxId: 101 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2025-05-29T15:30:38.514762Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 101, database: /MyRoot, subject: , status: StatusInvalidParameter, reason: Column 'prefix' has wrong key type Float for being key, operation: CREATE TABLE WITH INDEXES, path: /MyRoot/vectors 2025-05-29T15:30:38.514883Z node 1 :TX_PROXY DEBUG: client.cpp:89: Handle TEvAllocateResult ACCEPTED RangeBegin# 281474976715656 RangeEnd# 281474976720656 txAllocator# 72057594046447617 TestModificationResult got TxId: 101, wait until txId: 101 TestWaitNotification wait txId: 101 2025-05-29T15:30:38.514935Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:210: tests -- TTxNotificationSubscriber for txId 101: send EvNotifyTxCompletion 2025-05-29T15:30:38.514943Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:256: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 101 2025-05-29T15:30:38.515002Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 101, at schemeshard: 72057594046678944 2025-05-29T15:30:38.515023Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:227: tests -- TTxNotificationSubscriber for txId 101: got EvNotifyTxCompletionResult 2025-05-29T15:30:38.515028Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:236: tests -- TTxNotificationSubscriber for txId 101: satisfy waiter [1:287:2277] TestWaitNotification: OK eventTxId 101 2025-05-29T15:30:38.515108Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/vectors/idx_vector" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: true }, at schemeshard: 72057594046678944 2025-05-29T15:30:38.515143Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/vectors/idx_vector" took 42us result status StatusPathDoesNotExist 2025-05-29T15:30:38.515186Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/vectors/idx_vector\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1]), source_location: ydb/core/tx/schemeshard/schemeshard_path_describer.cpp:1157" Path: "/MyRoot/vectors/idx_vector" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: false } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 >> TPersQueueTest::TopicServiceReadBudget [FAIL] >> TPersQueueTest::TopicServiceSimpleHappyWrites >> TxUsage::WriteToTopic_Demo_6_Table [FAIL] >> TxUsage::WriteToTopic_Demo_6_Query >> DemoTx::Scenario_5 [FAIL] >> TFstClassSrcIdPQTest::TestTableCreated |73.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_index/unittest >> TxUsage::WriteToTopic_Demo_42_Table [FAIL] >> TxUsage::WriteToTopic_Demo_42_Query >> EvWrite::WriteInTransaction [GOOD] >> EvWrite::WriteWithLock |73.9%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/datashard/ut_keys/ydb-core-tx-datashard-ut_keys |73.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_keys/ydb-core-tx-datashard-ut_keys >> TPersQueueTest::DirectReadCorrectOffsetsOnRestart [FAIL] >> TSchemeShardTTLTests::CreateTableShouldSucceed-EnableTablePgTypes-true [GOOD] >> TxUsage::WriteToTopic_Demo_6_Query [FAIL] >> TKeyValueTest::TestRewriteThenLastValueNewApi [GOOD] >> TPersQueueTest::CheckKillBalancer [FAIL] >> TVectorIndexTests::CreateTableCoveredEmbedding >> TPersQueueTest::SetupWriteSession [FAIL] >> KqpOlapIndexes::IndexesInLocalMetadata >> TxUsage::WriteToTopic_Demo_42_Query [FAIL] >> TFstClassSrcIdPQTest::TestTableCreated [FAIL] >> TPersQueueTest::TopicServiceSimpleHappyWrites [FAIL] >> EvWrite::WriteWithLock [GOOD] >> TSchemeShardSplitByLoad::TableSplitsUpToMaxPartitionsCount [GOOD] >> TPersQueueTest::DirectReadBadCases >> TFstClassSrcIdPQTest::NoMapping >> TPersQueueTest::DirectReadBadCases [FAIL] >> TPersQueueTest::DirectReadWrongGeneration >> TxUsage::WriteToTopic_Demo_7_Table >> TPersQueueTest::CheckDeleteTopic >> TxUsage::WriteToTopic_Demo_43_Table >> TPersQueueTest::StoreNoMoreThanXSourceIDs >> TVectorIndexTests::CreateTableCoveredEmbedding [GOOD] >> TSchemeShardSplitBySample::DifferentSizeKeysWorks [GOOD] >> TPersQueueTest::WhenDisableNodeAndCreateTopic_ThenAllPartitionsAreOnOtherNode |73.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_index/unittest |73.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_index/unittest >> TxUsage::WriteToTopic_Demo_7_Table [FAIL] >> TxUsage::WriteToTopic_Demo_7_Query >> TxUsage::WriteToTopic_Demo_43_Table [FAIL] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_index/unittest >> TVectorIndexTests::CreateTableCoveredEmbedding [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:116:2058] recipient: [1:111:2141] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:116:2058] recipient: [1:111:2141] Leader for TabletID 72057594046678944 is [1:128:2152] sender: [1:132:2058] recipient: [1:111:2141] 2025-05-29T15:30:40.467818Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7488: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-05-29T15:30:40.467841Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7516: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:30:40.467863Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7402: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-05-29T15:30:40.467868Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7418: OperationsProcessing config: using default configuration 2025-05-29T15:30:40.467882Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-05-29T15:30:40.467885Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-05-29T15:30:40.467893Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7548: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:30:40.467905Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:39: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-05-29T15:30:40.468014Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7619: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-05-29T15:30:40.468083Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-05-29T15:30:40.477961Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7309: Cannot subscribe to console configs 2025-05-29T15:30:40.477997Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:30:40.480203Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-05-29T15:30:40.480307Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-05-29T15:30:40.480348Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-05-29T15:30:40.481940Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-05-29T15:30:40.482094Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:445: Clear TempDirsState with owners number: 0 2025-05-29T15:30:40.482211Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1348: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-05-29T15:30:40.482242Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:32: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-05-29T15:30:40.482761Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:157: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:30:40.482803Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:84: [RootDataErasureManager] Stop 2025-05-29T15:30:40.483092Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:30:40.483104Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:30:40.483145Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-05-29T15:30:40.483154Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-29T15:30:40.483160Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:202: TTxServerlessStorageBilling.Complete 2025-05-29T15:30:40.483192Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6671: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-05-29T15:30:40.484326Z node 1 :HIVE INFO: tablet_helpers.cpp:1130: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:128:2152] sender: [1:242:2058] recipient: [1:15:2062] 2025-05-29T15:30:40.501794Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:372: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-05-29T15:30:40.501885Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:30:40.501953Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-05-29T15:30:40.502008Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:130: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-05-29T15:30:40.502021Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:30:40.503293Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:451: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-05-29T15:30:40.503331Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-05-29T15:30:40.503400Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:30:40.503411Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:313: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-05-29T15:30:40.503417Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:367: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-05-29T15:30:40.503424Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 2 -> 3 2025-05-29T15:30:40.505100Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:30:40.505137Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-05-29T15:30:40.505145Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 3 -> 128 2025-05-29T15:30:40.507421Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:30:40.507447Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:30:40.507456Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:30:40.507466Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1650: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-05-29T15:30:40.508442Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1719: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-05-29T15:30:40.509055Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:652: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-05-29T15:30:40.509111Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1751: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-05-29T15:30:40.509313Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:676: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-05-29T15:30:40.509347Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:680: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 124 RawX2: 4294969445 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-05-29T15:30:40.509356Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:30:40.509449Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 128 -> 240 2025-05-29T15:30:40.509459Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:30:40.509501Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-05-29T15:30:40.509515Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:402: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-05-29T15:30:40.510076Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:30:40.510086Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-29T15:30:40.510141Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29 ... PathType: EPathTypeTable CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 3 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" PathSubType: EPathSubTypeVectorKmeansTreeIndexImplTable Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 4 PathsLimit: 10000 ShardsInside: 3 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } TableIndex { Name: "idx_vector" LocalPathId: 3 Type: EIndexTypeGlobalVectorKmeansTree State: EIndexStateReady KeyColumnNames: "embedding" SchemaVersion: 1 PathOwnerId: 72057594046678944 DataColumnNames: "embedding" DataSize: 0 IndexImplTableDescriptions { PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { SizeToSplit: 2147483648 MinPartitionsCount: 1 } } } IndexImplTableDescriptions { PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { SizeToSplit: 2147483648 MinPartitionsCount: 1 } } } VectorIndexKmeansTreeDescription { Settings { settings { metric: DISTANCE_COSINE vector_type: VECTOR_TYPE_FLOAT vector_dimension: 1024 } clusters: 4 levels: 5 } } } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-05-29T15:30:40.675860Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/vectors/idx_vector/indexImplLevelTable" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: true }, at schemeshard: 72057594046678944 2025-05-29T15:30:40.675892Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/vectors/idx_vector/indexImplLevelTable" took 34us result status StatusSuccess 2025-05-29T15:30:40.676005Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/vectors/idx_vector/indexImplLevelTable" PathDescription { Self { Name: "indexImplLevelTable" PathId: 4 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 3 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeVectorKmeansTreeIndexImplTable Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "indexImplLevelTable" Columns { Name: "__ydb_parent" Type: "Uint64" TypeId: 4 Id: 1 NotNull: true IsBuildInProgress: false } Columns { Name: "__ydb_id" Type: "Uint64" TypeId: 4 Id: 2 NotNull: true IsBuildInProgress: false } Columns { Name: "__ydb_centroid" Type: "String" TypeId: 4097 Id: 3 NotNull: true IsBuildInProgress: false } KeyColumnNames: "__ydb_parent" KeyColumnNames: "__ydb_id" KeyColumnIds: 1 KeyColumnIds: 2 TableSchemaVersion: 1 IsBackup: false IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 4 PathsLimit: 10000 ShardsInside: 3 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 4 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-05-29T15:30:40.676165Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/vectors/idx_vector/indexImplPostingTable" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: true }, at schemeshard: 72057594046678944 2025-05-29T15:30:40.676192Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/vectors/idx_vector/indexImplPostingTable" took 33us result status StatusSuccess 2025-05-29T15:30:40.676259Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/vectors/idx_vector/indexImplPostingTable" PathDescription { Self { Name: "indexImplPostingTable" PathId: 5 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 3 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeVectorKmeansTreeIndexImplTable Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "indexImplPostingTable" Columns { Name: "__ydb_parent" Type: "Uint64" TypeId: 4 Id: 1 NotNull: true IsBuildInProgress: false } Columns { Name: "id" Type: "Uint64" TypeId: 4 Id: 2 NotNull: false IsBuildInProgress: false } Columns { Name: "embedding" Type: "String" TypeId: 4097 Id: 3 NotNull: false IsBuildInProgress: false } KeyColumnNames: "__ydb_parent" KeyColumnNames: "id" KeyColumnIds: 1 KeyColumnIds: 2 TableSchemaVersion: 1 IsBackup: false IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 4 PathsLimit: 10000 ShardsInside: 3 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 5 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_split_merge/unittest >> TSchemeShardSplitBySample::DifferentSizeKeysWorks [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:116:2058] recipient: [1:111:2141] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:116:2058] recipient: [1:111:2141] Leader for TabletID 72057594046678944 is [1:128:2152] sender: [1:132:2058] recipient: [1:111:2141] 2025-05-29T15:30:28.710783Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7488: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-05-29T15:30:28.710812Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7516: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:30:28.710817Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7402: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-05-29T15:30:28.710823Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7418: OperationsProcessing config: using default configuration 2025-05-29T15:30:28.710837Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-05-29T15:30:28.710841Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-05-29T15:30:28.710852Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7548: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:30:28.710865Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:39: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-05-29T15:30:28.710983Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7619: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-05-29T15:30:28.711056Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-05-29T15:30:28.721230Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7309: Cannot subscribe to console configs 2025-05-29T15:30:28.721254Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:30:28.723948Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-05-29T15:30:28.724087Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-05-29T15:30:28.724125Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-05-29T15:30:28.725654Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-05-29T15:30:28.725828Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:445: Clear TempDirsState with owners number: 0 2025-05-29T15:30:28.725963Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1348: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-05-29T15:30:28.726016Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:32: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-05-29T15:30:28.726525Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:157: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:30:28.726577Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:84: [RootDataErasureManager] Stop 2025-05-29T15:30:28.726897Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:30:28.726909Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:30:28.726934Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-05-29T15:30:28.726944Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-29T15:30:28.726951Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:202: TTxServerlessStorageBilling.Complete 2025-05-29T15:30:28.726986Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6671: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-05-29T15:30:28.728270Z node 1 :HIVE INFO: tablet_helpers.cpp:1130: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:128:2152] sender: [1:242:2058] recipient: [1:15:2062] 2025-05-29T15:30:28.748647Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:372: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-05-29T15:30:28.748731Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:30:28.748794Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-05-29T15:30:28.748841Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:130: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-05-29T15:30:28.748852Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:30:28.749453Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:451: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-05-29T15:30:28.749481Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-05-29T15:30:28.749526Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:30:28.749538Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:313: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-05-29T15:30:28.749543Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:367: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-05-29T15:30:28.749549Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 2 -> 3 2025-05-29T15:30:28.749921Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:30:28.749931Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-05-29T15:30:28.749937Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 3 -> 128 2025-05-29T15:30:28.750257Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:30:28.750268Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:30:28.750274Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:30:28.750291Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1650: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-05-29T15:30:28.750976Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1719: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-05-29T15:30:28.751396Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:652: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-05-29T15:30:28.751433Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1751: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-05-29T15:30:28.751614Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:676: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-05-29T15:30:28.751636Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:680: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 124 RawX2: 4294969445 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-05-29T15:30:28.751643Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:30:28.751697Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 128 -> 240 2025-05-29T15:30:28.751704Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:30:28.751736Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-05-29T15:30:28.751749Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:402: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-05-29T15:30:28.752177Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:30:28.752186Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-29T15:30:28.752235Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29 ... ldInProgress: false } Columns { Name: "value" Type: "Uint64" TypeId: 4 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { MinPartitionsCount: 1 MaxPartitionsCount: 5 SplitByLoadSettings { Enabled: true CpuPercentageThreshold: 1 } } } SplitBoundary { KeyPrefix { Tuple { Optional { Uint64: 62500 } } } } SplitBoundary { KeyPrefix { Tuple { Optional { Uint64: 125000 } } } } SplitBoundary { KeyPrefix { Tuple { Optional { Uint64: 250000 } } } } SplitBoundary { KeyPrefix { Tuple { Optional { Uint64: 500000 } } } } TableSchemaVersion: 1 IsBackup: false IsRestore: false } TablePartitions { EndOfRangeKeyPrefix: "\001\000\010\000\000\000$\364\000\000\000\000\000\000" IsPoint: false IsInclusive: false DatashardId: 72075186233409553 } TablePartitions { EndOfRangeKeyPrefix: "\001\000\010\000\000\000H\350\001\000\000\000\000\000" IsPoint: false IsInclusive: false DatashardId: 72075186233409554 } TablePartitions { EndOfRangeKeyPrefix: "\001\000\010\000\000\000\220\320\003\000\000\000\000\000" IsPoint: false IsInclusive: false DatashardId: 72075186233409552 } TablePartitions { EndOfRangeKeyPrefix: "\001\000\010\000\000\000 \241\007\000\000\000\000\000" IsPoint: false IsInclusive: false DatashardId: 72075186233409550 } TablePartitions { EndOfRangeKeyPrefix: "" IsPoint: false IsInclusive: false DatashardId: 72075186233409548 } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 1 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 5 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 5000000 Memory: 430152 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 5 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 TEST table final state: Status: StatusSuccess Path: "/MyRoot/Table" PathDescription { Self { Name: "Table" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 7 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 7 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 5 } ChildrenExist: false } Table { Name: "Table" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Uint64" TypeId: 4 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { MinPartitionsCount: 1 MaxPartitionsCount: 5 SplitByLoadSettings { Enabled: true CpuPercentageThreshold: 1 } } } SplitBoundary { KeyPrefix { Tuple { Optional { Uint64: 62500 } } } } SplitBoundary { KeyPrefix { Tuple { Optional { Uint64: 125000 } } } } SplitBoundary { KeyPrefix { Tuple { Optional { Uint64: 250000 } } } } SplitBoundary { KeyPrefix { Tuple { Optional { Uint64: 500000 } } } } TableSchemaVersion: 1 IsBackup: false IsRestore: false } TablePartitions { EndOfRangeKeyPrefix: "\001\000\010\000\000\000$\364\000\000\000\000\000\000" IsPoint: false IsInclusive: false DatashardId: 72075186233409553 } TablePartitions { EndOfRangeKeyPrefix: "\001\000\010\000\000\000H\350\001\000\000\000\000\000" IsPoint: false IsInclusive: false DatashardId: 72075186233409554 } TablePartitions { EndOfRangeKeyPrefix: "\001\000\010\000\000\000\220\320\003\000\000\000\000\000" IsPoint: false IsInclusive: false DatashardId: 72075186233409552 } TablePartitions { EndOfRangeKeyPrefix: "\001\000\010\000\000\000 \241\007\000\000\000\000\000" IsPoint: false IsInclusive: false DatashardId: 72075186233409550 } TablePartitions { EndOfRangeKeyPrefix: "" IsPoint: false IsInclusive: false DatashardId: 72075186233409548 } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 1 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 5 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 5000000 Memory: 430152 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 5 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 2 PathOwnerId: 72057594046678944 >> TPersQueueTest::StoreNoMoreThanXSourceIDs [FAIL] >> TPersQueueTest::SetupWriteSessionOnDisabledCluster >> TPersQueueTest::DirectReadWrongGeneration [FAIL] >> TPersQueueTest::CheckDeleteTopic [FAIL] >> TxUsage::WriteToTopic_Demo_43_Query >> TPersQueueTest::WhenDisableNodeAndCreateTopic_ThenAllPartitionsAreOnOtherNode [FAIL] >> TFstClassSrcIdPQTest::NoMapping [FAIL] >> TxUsage::WriteToTopic_Demo_7_Query [FAIL] >> HttpRequest::AnalyzeServerless [GOOD] >> HttpRequest::Analyze [GOOD] >> TSchemeShardSplitBySizeTest::SplitShardsWithDecimalKey [GOOD] >> TConsoleConfigSubscriptionTests::TestConfigSubscriptionsCleanup [GOOD] >> TAsyncIndexTests::DropTableWithInflightChanges[TabletReboots] [GOOD] >> TAsyncIndexTests::SplitIndexWithReboots[PipeResets] [GOOD] >> TPersQueueTest::SetupWriteSessionOnDisabledCluster [FAIL] >> TPersQueueTest::DirectReadStop >> KqpAnalyze::AnalyzeTable+ColumnStore [FAIL] >> KqpAnalyze::AnalyzeTable-ColumnStore |73.9%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/tools/stress_tool/ut/ydb-tools-stress_tool-ut |73.9%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/kqp/ut/federated_query/s3/ydb-core-kqp-ut-federated_query-s3 |74.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tools/stress_tool/ut/ydb-tools-stress_tool-ut |74.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/ut/federated_query/s3/ydb-core-kqp-ut-federated_query-s3 |74.0%| [LD] {RESULT} $(B)/ydb/tools/stress_tool/ut/ydb-tools-stress_tool-ut |74.0%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_keys/ydb-core-tx-datashard-ut_keys |74.0%| [LD] {RESULT} $(B)/ydb/core/kqp/ut/federated_query/s3/ydb-core-kqp-ut-federated_query-s3 >> TSchemeShardSplitBySizeTest::Merge1KShards [GOOD] |74.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/statistics/service/ut/unittest >> TConsoleConfigTests::TestAddConfigItem >> TPersQueueTest::CheckDecompressionTasksWithoutSession >> TPersQueueTest::SetupReadSession >> TPersQueueTest::DirectReadStop [FAIL] >> TPersQueueTest::DirectReadCleanCache >> TxUsage::WriteToTopic_Demo_43_Query [FAIL] >> TPersQueueTest::WhenTheTopicIsDeletedAfterDecompressingTheData_Compressed >> TFstClassSrcIdPQTest::ProperPartitionSelected >> TPersQueueTest::SetupReadSession [FAIL] >> KqpAnalyze::AnalyzeTable-ColumnStore [GOOD] >> TxUsage::WriteToTopic_Demo_8_Table >> TConsoleConfigTests::TestAddConfigItem [GOOD] >> KqpExplain::AggGroupLimit >> TPersQueueTest::DirectReadCleanCache [FAIL] >> TPersQueueTest::CheckDecompressionTasksWithoutSession [FAIL] >> TxUsage::WriteToTopic_Demo_44_Table >> TPersQueueTest::TestBigMessage >> TPersQueueTest::WhenTheTopicIsDeletedAfterDecompressingTheData_Compressed [FAIL] >> TConsoleConfigTests::TestAutoKind >> TPersQueueTest::DirectReadRestartPQRB >> TPersQueueTest::Codecs_InitWriteSession_DefaultTopicSupportedCodecsInInitResponse >> TFstClassSrcIdPQTest::ProperPartitionSelected [FAIL] >> TPersQueueTest::TestBigMessage [FAIL] >> TxUsage::WriteToTopic_Demo_8_Table [FAIL] >> TxUsage::WriteToTopic_Demo_8_Query >> TPersQueueTest::WhenTheTopicIsDeletedAfterDecompressingTheData_Uncompressed >> TConsoleConfigTests::TestAutoKind [GOOD] >> TxUsage::WriteToTopic_Demo_44_Table [FAIL] >> TPersQueueTest::SetMeteringMode >> TPersQueueTest::Codecs_InitWriteSession_DefaultTopicSupportedCodecsInInitResponse [FAIL] >> TPQCompatTest::DiscoverTopics >> TPersQueueTest::DirectReadRestartPQRB [FAIL] >> TxUsage::WriteToTopic_Demo_8_Query [FAIL] >> TPersQueueTest::WhenTheTopicIsDeletedAfterDecompressingTheData_Uncompressed [FAIL] >> TConsoleConfigTests::TestAllowedScopes >> TxUsage::WriteToTopic_Demo_44_Query >> TPersQueueTest::SetMeteringMode [FAIL] >> TPersQueueTest::DirectReadRestartTablet >> TPQCompatTest::DiscoverTopics [FAIL] >> TxUsage::WriteToTopic_Demo_9_Table >> TPersQueueTest::TestWriteStat >> TPersQueueTest::Codecs_WriteMessageWithDefaultCodecs_MessagesAreAcknowledged >> TPQCompatTest::SetupLockSession >> TConsoleConfigTests::TestAllowedScopes [GOOD] >> TxUsage::WriteToTopic_Demo_44_Query [FAIL] >> TPersQueueTest::TClusterTrackerTest >> TConsoleConfigTests::TestAffectedConfigs >> TxUsage::WriteToTopic_Demo_9_Table [FAIL] >> TPersQueueTest::TestWriteStat [FAIL] >> TPersQueueTest::Codecs_WriteMessageWithDefaultCodecs_MessagesAreAcknowledged [FAIL] >> TPersQueueTest::DirectReadRestartTablet [FAIL] >> TPersQueueTest::Codecs_WriteMessageWithNonDefaultCodecThatHasToBeConfiguredAdditionally_SessionClosedWithBadRequestError >> TxUsage::WriteToTopic_Demo_45_Table >> TPQCompatTest::SetupLockSession [FAIL] >> TConsoleConfigTests::TestAffectedConfigs [GOOD] >> TPersQueueTest::TClusterTrackerTest [FAIL] >> TxUsage::WriteToTopic_Demo_9_Query >> TPersQueueTest::TestWriteSessionsConflicts >> TPersQueueTest::EachMessageGetsExactlyOneAcknowledgementInCorrectOrder >> TxUsage::WriteToTopic_Demo_45_Table [FAIL] >> TPQCompatTest::BadTopics >> TPersQueueTest::TestReadPartitionByGroupId >> TxUsage::WriteToTopic_Demo_9_Query [FAIL] >> TPersQueueTest::TestWriteSessionsConflicts [FAIL] >> TPersQueueTest::Codecs_WriteMessageWithNonDefaultCodecThatHasToBeConfiguredAdditionally_SessionClosedWithBadRequestError [FAIL] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/statistics/service/ut/unittest >> HttpRequest::AnalyzeServerless [GOOD] >> TPersQueueTest::CreateTopicWithMeteringMode Test command err: 2025-05-29T15:30:31.240266Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:453:2413], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-05-29T15:30:31.240326Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-05-29T15:30:31.240353Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/001527/r3tmp/tmpwqQDjK/pdisk_1.dat 2025-05-29T15:30:31.355127Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 23388, node 1 2025-05-29T15:30:31.451713Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:30:31.451737Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-05-29T15:30:31.451740Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-05-29T15:30:31.451834Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-05-29T15:30:31.452356Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-05-29T15:30:31.531856Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:30:31.531903Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:30:31.548078Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:64013 2025-05-29T15:30:31.965344Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-05-29T15:30:32.789699Z node 2 :STATISTICS INFO: service_impl.cpp:232: Subscribed for config changes on node 2 2025-05-29T15:30:32.799832Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:30:32.799874Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:30:32.843609Z node 1 :HIVE WARN: hive_impl.cpp:771: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-05-29T15:30:32.844255Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-29T15:30:32.990281Z node 2 :HIVE WARN: tx__create_tablet.cpp:342: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-05-29T15:30:32.990501Z node 2 :HIVE WARN: tx__create_tablet.cpp:342: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-05-29T15:30:32.990662Z node 2 :HIVE WARN: tx__create_tablet.cpp:342: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-05-29T15:30:32.990698Z node 2 :HIVE WARN: tx__create_tablet.cpp:342: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-05-29T15:30:32.990776Z node 2 :HIVE WARN: tx__create_tablet.cpp:342: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-05-29T15:30:32.990796Z node 2 :HIVE WARN: tx__create_tablet.cpp:342: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-05-29T15:30:32.990815Z node 2 :HIVE WARN: tx__create_tablet.cpp:342: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-05-29T15:30:32.990832Z node 2 :HIVE WARN: tx__create_tablet.cpp:342: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-05-29T15:30:32.990855Z node 2 :HIVE WARN: tx__create_tablet.cpp:342: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-05-29T15:30:33.144361Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:30:33.144410Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:30:33.159553Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-29T15:30:33.200910Z node 2 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:30:33.210367Z node 2 :STATISTICS INFO: aggregator_impl.cpp:45: [72075186224037894] OnActivateExecutor 2025-05-29T15:30:33.210416Z node 2 :STATISTICS DEBUG: tx_init_schema.cpp:13: [72075186224037894] TTxInitSchema::Execute 2025-05-29T15:30:33.219001Z node 2 :STATISTICS DEBUG: tx_init_schema.cpp:34: [72075186224037894] TTxInitSchema::Complete 2025-05-29T15:30:33.219303Z node 2 :STATISTICS DEBUG: tx_init.cpp:18: [72075186224037894] TTxInit::Execute 2025-05-29T15:30:33.219329Z node 2 :STATISTICS DEBUG: tx_init.cpp:118: [72075186224037894] Loaded BaseStatistics: schemeshard count# 0 2025-05-29T15:30:33.219335Z node 2 :STATISTICS DEBUG: tx_init.cpp:143: [72075186224037894] Loaded ColumnStatistics: column count# 0 2025-05-29T15:30:33.219342Z node 2 :STATISTICS DEBUG: tx_init.cpp:182: [72075186224037894] Loaded ScheduleTraversals: table count# 0 2025-05-29T15:30:33.219349Z node 2 :STATISTICS DEBUG: tx_init.cpp:216: [72075186224037894] Loaded ForceTraversalOperations: table count# 0 2025-05-29T15:30:33.219355Z node 2 :STATISTICS DEBUG: tx_init.cpp:264: [72075186224037894] Loaded ForceTraversalTables: table count# 0 2025-05-29T15:30:33.219363Z node 2 :STATISTICS DEBUG: tx_init.cpp:271: [72075186224037894] TTxInit::Complete 2025-05-29T15:30:33.219750Z node 2 :STATISTICS INFO: aggregator_impl.cpp:62: [72075186224037894] Subscribed for config changes 2025-05-29T15:30:33.237421Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:7864: ResolveSA(), StatisticsAggregatorId=72075186224037894, at schemeshard: 72075186224037897 2025-05-29T15:30:33.237460Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:7894: ConnectToSA(), pipe client id: [2:1864:2600], at schemeshard: 72075186224037897, StatisticsAggregatorId: 72075186224037894, at schemeshard: 72075186224037897 2025-05-29T15:30:33.239763Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:86: [72075186224037894] EvServerConnected, pipe server id = [2:1874:2608] 2025-05-29T15:30:33.241165Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:86: [72075186224037894] EvServerConnected, pipe server id = [2:1907:2623] 2025-05-29T15:30:33.241270Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:213: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:1907:2623], schemeshard id = 72075186224037897 2025-05-29T15:30:33.243703Z node 2 :STATISTICS DEBUG: tx_configure.cpp:21: [72075186224037894] TTxConfigure::Execute: database# /Root/Shared 2025-05-29T15:30:33.248008Z node 2 :STATISTICS DEBUG: table_creator.cpp:147: Table _statistics updater. Describe result: PathErrorUnknown 2025-05-29T15:30:33.248031Z node 2 :STATISTICS NOTICE: table_creator.cpp:167: Table _statistics updater. Creating table 2025-05-29T15:30:33.248044Z node 2 :STATISTICS DEBUG: table_creator.cpp:100: Table _statistics updater. Full table path:/Root/Shared/.metadata/_statistics 2025-05-29T15:30:33.250786Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720657:1, at schemeshard: 72075186224037897 2025-05-29T15:30:33.252684Z node 2 :STATISTICS DEBUG: table_creator.cpp:190: Table _statistics updater. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720657 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037897 PathId: 3 } 2025-05-29T15:30:33.252720Z node 2 :STATISTICS DEBUG: table_creator.cpp:261: Table _statistics updater. Subscribe on create table tx: 281474976720657 2025-05-29T15:30:33.355184Z node 2 :STATISTICS DEBUG: tx_configure.cpp:36: [72075186224037894] TTxConfigure::Complete 2025-05-29T15:30:33.481058Z node 2 :STATISTICS DEBUG: table_creator.cpp:290: Table _statistics updater. Request: create. Transaction completed: 281474976720657. Doublechecking... 2025-05-29T15:30:33.579075Z node 2 :STATISTICS DEBUG: table_creator.cpp:362: Table _statistics updater. Column diff is empty, finishing 2025-05-29T15:30:34.331917Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715659:0, at schemeshard: 72057594046644480 2025-05-29T15:30:34.958451Z node 2 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:30:35.069256Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:7809: Handle TEvTxProxySchemeCache::TEvNavigateKeySetResult, at schemeshard: 72075186224037899 2025-05-29T15:30:35.069282Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:7825: Handle TEvTxProxySchemeCache::TEvNavigateKeySetResult, StatisticsAggregatorId=72075186224037894, at schemeshard: 72075186224037899 2025-05-29T15:30:35.069295Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:7894: ConnectToSA(), pipe client id: [2:2570:2940], at schemeshard: 72075186224037899, StatisticsAggregatorId: 72075186224037894, at schemeshard: 72075186224037899 2025-05-29T15:30:35.069671Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:86: [72075186224037894] EvServerConnected, pipe server id = [2:2572:2941] 2025-05-29T15:30:35.069756Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:213: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:2572:2941], schemeshard id = 72075186224037899 2025-05-29T15:30:36.136488Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2698:3232], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:30:36.136543Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:30:36.142186Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976715661:0, at schemeshard: 72075186224037899 2025-05-29T15:30:36.251701Z node 2 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037905;self_id=[2:2840:3074];tablet_id=72075186224037905;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-05-29T15:30:36.251829Z node 2 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037905;self_id=[2:2840:3074];tablet_id=72075186224037905;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-05-29T15:30:36.251946Z node 2 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037905;self_id=[2:2840:3074];tablet_id=72075186224037905;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register ... HARD_TX WARN: log.cpp:784: tablet_id=72075186224037912;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715662; 2025-05-29T15:30:37.942297Z node 2 :TX_COLUMNSHARD_TX WARN: log.cpp:784: tablet_id=72075186224037908;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715662; 2025-05-29T15:30:37.942345Z node 2 :TX_COLUMNSHARD_TX WARN: log.cpp:784: tablet_id=72075186224037909;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715662; 2025-05-29T15:30:37.942389Z node 2 :TX_COLUMNSHARD_TX WARN: log.cpp:784: tablet_id=72075186224037907;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715662; 2025-05-29T15:30:37.942430Z node 2 :TX_COLUMNSHARD_TX WARN: log.cpp:784: tablet_id=72075186224037914;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715662; 2025-05-29T15:30:37.942619Z node 2 :TX_COLUMNSHARD_TX WARN: log.cpp:784: tablet_id=72075186224037911;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715662; 2025-05-29T15:30:37.942662Z node 2 :TX_COLUMNSHARD_TX WARN: log.cpp:784: tablet_id=72075186224037910;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715662; 2025-05-29T15:30:37.942701Z node 2 :TX_COLUMNSHARD_TX WARN: log.cpp:784: tablet_id=72075186224037913;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715662; 2025-05-29T15:30:39.126593Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:4309:3403], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:30:39.126633Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:30:39.129829Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976715663:0, at schemeshard: 72075186224037899 2025-05-29T15:30:39.172322Z node 2 :TX_COLUMNSHARD_TX WARN: log.cpp:784: tablet_id=72075186224037905;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715663;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715663; 2025-05-29T15:30:39.172438Z node 2 :TX_COLUMNSHARD_TX WARN: log.cpp:784: tablet_id=72075186224037906;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715663;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715663; 2025-05-29T15:30:39.172525Z node 2 :TX_COLUMNSHARD_TX WARN: log.cpp:784: tablet_id=72075186224037912;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715663;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715663; 2025-05-29T15:30:39.173094Z node 2 :TX_COLUMNSHARD_TX WARN: log.cpp:784: tablet_id=72075186224037908;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715663;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715663; 2025-05-29T15:30:39.173192Z node 2 :TX_COLUMNSHARD_TX WARN: log.cpp:784: tablet_id=72075186224037911;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715663;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715663; 2025-05-29T15:30:39.173271Z node 2 :TX_COLUMNSHARD_TX WARN: log.cpp:784: tablet_id=72075186224037910;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715663;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715663; 2025-05-29T15:30:39.173349Z node 2 :TX_COLUMNSHARD_TX WARN: log.cpp:784: tablet_id=72075186224037909;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715663;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715663; 2025-05-29T15:30:39.173425Z node 2 :TX_COLUMNSHARD_TX WARN: log.cpp:784: tablet_id=72075186224037907;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715663;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715663; 2025-05-29T15:30:39.173502Z node 2 :TX_COLUMNSHARD_TX WARN: log.cpp:784: tablet_id=72075186224037913;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715663;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715663; 2025-05-29T15:30:39.173581Z node 2 :TX_COLUMNSHARD_TX WARN: log.cpp:784: tablet_id=72075186224037914;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715663;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715663; 2025-05-29T15:30:39.830143Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:4445:3445], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:30:39.832038Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:30:39.833274Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976715664:0, at schemeshard: 72075186224037899 2025-05-29T15:30:39.861482Z node 2 :TX_COLUMNSHARD_TX WARN: log.cpp:784: tablet_id=72075186224037905;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715664;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715664; 2025-05-29T15:30:39.861589Z node 2 :TX_COLUMNSHARD_TX WARN: log.cpp:784: tablet_id=72075186224037906;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715664;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715664; 2025-05-29T15:30:39.861676Z node 2 :TX_COLUMNSHARD_TX WARN: log.cpp:784: tablet_id=72075186224037908;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715664;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715664; 2025-05-29T15:30:39.861857Z node 2 :TX_COLUMNSHARD_TX WARN: log.cpp:784: tablet_id=72075186224037909;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715664;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715664; 2025-05-29T15:30:39.861929Z node 2 :TX_COLUMNSHARD_TX WARN: log.cpp:784: tablet_id=72075186224037907;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715664;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715664; 2025-05-29T15:30:39.862007Z node 2 :TX_COLUMNSHARD_TX WARN: log.cpp:784: tablet_id=72075186224037912;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715664;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715664; 2025-05-29T15:30:39.862104Z node 2 :TX_COLUMNSHARD_TX WARN: log.cpp:784: tablet_id=72075186224037914;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715664;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715664; 2025-05-29T15:30:39.862298Z node 2 :TX_COLUMNSHARD_TX WARN: log.cpp:784: tablet_id=72075186224037911;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715664;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715664; 2025-05-29T15:30:39.862374Z node 2 :TX_COLUMNSHARD_TX WARN: log.cpp:784: tablet_id=72075186224037910;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715664;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715664; 2025-05-29T15:30:39.862441Z node 2 :TX_COLUMNSHARD_TX WARN: log.cpp:784: tablet_id=72075186224037913;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715664;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715664; waiting actualization: 0/0.000013s 2025-05-29T15:30:45.038355Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:86: [72075186224037894] EvServerConnected, pipe server id = [2:6345:5626] 2025-05-29T15:30:45.038990Z node 2 :STATISTICS DEBUG: tx_analyze.cpp:22: [72075186224037894] TTxAnalyze::Execute. ReplyToActorId [1:6341:4042] , Record { OperationId: "\000\000\000\000\"\017ur-\262\251\241\374R\343p" Tables { PathId { OwnerId: 72057594046644480 LocalId: 2 } } } 2025-05-29T15:30:45.039002Z node 2 :STATISTICS DEBUG: tx_analyze.cpp:47: [72075186224037894] TTxAnalyze::Execute. Create new force traversal operation, OperationId="ur-Rp 2025-05-29T15:30:45.039006Z node 2 :STATISTICS DEBUG: tx_analyze.cpp:65: [72075186224037894] TTxAnalyze::Execute. Create new force traversal table, OperationId="ur-Rp , PathId [OwnerId: 72057594046644480, LocalPathId: 2] Answer: 'Analyze sent. OperationId: 00000008gfens2vcn9m7y55rvg' FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=abstract.cpp:105;event=AbortEmergency;reason=TTxWriteIndex destructor withno CompleteReady flag;prev_reason=; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=manager.cpp:51;message=aborted data locks manager; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=abstract.cpp:105;event=AbortEmergency;reason=TTxWriteIndex destructor withno CompleteReady flag;prev_reason=; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=manager.cpp:51;message=aborted data locks manager; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=abstract.cpp:105;event=AbortEmergency;reason=TTxWriteIndex destructor withno CompleteReady flag;prev_reason=; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=manager.cpp:51;message=aborted data locks manager; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=abstract.cpp:105;event=AbortEmergency;reason=TTxWriteIndex destructor withno CompleteReady flag;prev_reason=; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=manager.cpp:51;message=aborted data locks manager; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=abstract.cpp:105;event=AbortEmergency;reason=TTxWriteIndex destructor withno CompleteReady flag;prev_reason=; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=manager.cpp:51;message=aborted data locks manager; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=abstract.cpp:105;event=AbortEmergency;reason=TTxWriteIndex destructor withno CompleteReady flag;prev_reason=; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=manager.cpp:51;message=aborted data locks manager; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=abstract.cpp:105;event=AbortEmergency;reason=TTxWriteIndex destructor withno CompleteReady flag;prev_reason=; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=manager.cpp:51;message=aborted data locks manager; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=abstract.cpp:105;event=AbortEmergency;reason=TTxWriteIndex destructor withno CompleteReady flag;prev_reason=; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=manager.cpp:51;message=aborted data locks manager; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=abstract.cpp:105;event=AbortEmergency;reason=TTxWriteIndex destructor withno CompleteReady flag;prev_reason=; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=manager.cpp:51;message=aborted data locks manager; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=abstract.cpp:105;event=AbortEmergency;reason=TTxWriteIndex destructor withno CompleteReady flag;prev_reason=; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=manager.cpp:51;message=aborted data locks manager; >> TxUsage::WriteToTopic_Demo_50_Table >> TPersQueueTest::EachMessageGetsExactlyOneAcknowledgementInCorrectOrder [FAIL] >> TPQCompatTest::BadTopics [FAIL] >> TPersQueueTest::TestReadPartitionByGroupId [FAIL] >> TxUsage::WriteToTopic_Demo_45_Query ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/columnshard/ut_rw/unittest >> EvWrite::WriteWithLock [GOOD] >> TPQCompatTest::CommitOffsets >> TPersQueueTest::TestReadRuleServiceTypePassword Test command err: 2025-05-29T15:30:38.424671Z node 1 :TX_COLUMNSHARD INFO: log.cpp:784: tablet_id=9437184;self_id=[1:138:2169];fline=columnshard.cpp:102;event=initialize_shard;step=OnActivateExecutor; 2025-05-29T15:30:38.429142Z node 1 :TX_COLUMNSHARD INFO: log.cpp:784: tablet_id=9437184;self_id=[1:138:2169];fline=columnshard.cpp:120;event=initialize_shard;step=initialize_tiring_finished; 2025-05-29T15:30:38.430060Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Execute at tablet 9437184 2025-05-29T15:30:38.433887Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=9437184;self_id=[1:138:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-05-29T15:30:38.433972Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=9437184;self_id=[1:138:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-05-29T15:30:38.434038Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=9437184;self_id=[1:138:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-05-29T15:30:38.434082Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=9437184;self_id=[1:138:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-05-29T15:30:38.434105Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=9437184;self_id=[1:138:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-05-29T15:30:38.434139Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=9437184;self_id=[1:138:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-05-29T15:30:38.434162Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=9437184;self_id=[1:138:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-05-29T15:30:38.434186Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=9437184;self_id=[1:138:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-05-29T15:30:38.434209Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=9437184;self_id=[1:138:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-05-29T15:30:38.434232Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=9437184;self_id=[1:138:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-05-29T15:30:38.434270Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=9437184;self_id=[1:138:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2025-05-29T15:30:38.434300Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=9437184;self_id=[1:138:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2025-05-29T15:30:38.441498Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Complete at tablet 9437184 2025-05-29T15:30:38.441556Z node 1 :TX_COLUMNSHARD INFO: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:137;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2025-05-29T15:30:38.441570Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-05-29T15:30:38.441607Z node 1 :TX_COLUMNSHARD INFO: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-05-29T15:30:38.443311Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-05-29T15:30:38.443351Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-05-29T15:30:38.443361Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-05-29T15:30:38.443384Z node 1 :TX_COLUMNSHARD INFO: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2025-05-29T15:30:38.443394Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-05-29T15:30:38.443402Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-05-29T15:30:38.443406Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-05-29T15:30:38.443428Z node 1 :TX_COLUMNSHARD INFO: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-05-29T15:30:38.443436Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-05-29T15:30:38.443443Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-05-29T15:30:38.443448Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-05-29T15:30:38.443458Z node 1 :TX_COLUMNSHARD INFO: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-05-29T15:30:38.443466Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-05-29T15:30:38.443475Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-05-29T15:30:38.443479Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=8;type=CleanInsertionDedup; 2025-05-29T15:30:38.443493Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-05-29T15:30:38.443501Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-05-29T15:30:38.443505Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-05-29T15:30:38.443514Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-05-29T15:30:38.443527Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-05-29T15:30:38.443531Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-05-29T15:30:38.443560Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-05-29T15:30:38.443569Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-05-29T15:30:38.443573Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2025-05-29T15:30:38.443595Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-05-29T15:30:38.443602Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-05-29T15:30:38.443607Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2025-05-29T15:30:38.443621Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-05-29T15:30:38.443628Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanDeprecatedSnapshot;id=CleanDeprecatedSnapshot; 2025-05-29T15:30:38.443632Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=17;type=CleanDeprecatedSnapshot; 2025-05-29T15:30:38.443641Z node 1 :TX_COLUMNSHARD CRIT: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_deprecated_snapshot.cpp:30;tasks_for_rewrite=0; 2025-05-29T15:30:38.443649Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanDeprecatedSnapshot;id=17; 2025-05-29T15:30:38.443657Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV0ChunksMeta;id=RestoreV0ChunksMeta; 2025-05-29T15:30:38.443661Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=18;type=RestoreV0ChunksMeta; 2025-05-29T15:30:38.443748Z node 1 :TX_COLUMNSHARD INFO: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=13; 2025-05-29T15:30:38.443777Z node 1 :TX_COLUMNSHARD INFO: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=24; 2025-05-29T15:30:38.443786Z node 1 :TX_COLUMNSHARD INFO: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execut ... dexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1;column_names=key;);;ff=(column_ids=1,2;column_names=field,key;);;program_input=(column_ids=1,2;column_names=field,key;);;;);columns=2;rows=2048; 2025-05-29T15:30:40.379987Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:784: SelfId=[2:204:2217];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:250;stage=data_format;batch_size=229376;num_rows=2048;batch_columns=key,field; 2025-05-29T15:30:40.380016Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:784: SelfId=[2:204:2217];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:366;event=send_data;compute_actor_id=[2:203:2216];bytes=229376;rows=2048;faults=0;finished=0;fault=0;schema=key: uint64 field: string; 2025-05-29T15:30:40.380027Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:784: SelfId=[2:204:2217];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:270;stage=finished;iterator=ready_results:(count:1;records_count:2048;schema=key: uint64 field: string;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1;column_names=key;);;ff=(column_ids=1,2;column_names=field,key;);;program_input=(column_ids=1,2;column_names=field,key;);;;); 2025-05-29T15:30:40.380037Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:784: SelfId=[2:204:2217];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:188;stage=start;iterator=ready_results:(count:1;records_count:2048;schema=key: uint64 field: string;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1;column_names=key;);;ff=(column_ids=1,2;column_names=field,key;);;program_input=(column_ids=1,2;column_names=field,key;);;;); 2025-05-29T15:30:40.380040Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:784: SelfId=[2:204:2217];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;method=produce result;fline=plain_read_data.cpp:73;event=DoExtractReadyResults;result=0;count=0;finished=1; 2025-05-29T15:30:40.380043Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:784: SelfId=[2:204:2217];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:199;stage=limit exhausted;limit=limits:(bytes=0;chunks=0);; 2025-05-29T15:30:40.380116Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:784: SelfId=[2:204:2217];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=actor.cpp:105;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2025-05-29T15:30:40.380123Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:784: SelfId=[2:204:2217];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:188;stage=start;iterator=ready_results:(count:1;records_count:2048;schema=key: uint64 field: string;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1;column_names=key;);;ff=(column_ids=1,2;column_names=field,key;);;program_input=(column_ids=1,2;column_names=field,key;);;;); 2025-05-29T15:30:40.380125Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:784: SelfId=[2:204:2217];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;method=produce result;fline=plain_read_data.cpp:73;event=DoExtractReadyResults;result=0;count=0;finished=1; 2025-05-29T15:30:40.380131Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:784: SelfId=[2:204:2217];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:230;stage=ready result;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1;column_names=key;);;ff=(column_ids=1,2;column_names=field,key;);;program_input=(column_ids=1,2;column_names=field,key;);;;);columns=2;rows=2048; 2025-05-29T15:30:40.380135Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:784: SelfId=[2:204:2217];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:250;stage=data_format;batch_size=229376;num_rows=2048;batch_columns=key,field; 2025-05-29T15:30:40.380149Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:784: SelfId=[2:204:2217];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:366;event=send_data;compute_actor_id=[2:203:2216];bytes=229376;rows=2048;faults=0;finished=0;fault=0;schema=key: uint64 field: string; 2025-05-29T15:30:40.380155Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:784: SelfId=[2:204:2217];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:270;stage=finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1;column_names=key;);;ff=(column_ids=1,2;column_names=field,key;);;program_input=(column_ids=1,2;column_names=field,key;);;;); 2025-05-29T15:30:40.380163Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:784: SelfId=[2:204:2217];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:188;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1;column_names=key;);;ff=(column_ids=1,2;column_names=field,key;);;program_input=(column_ids=1,2;column_names=field,key;);;;); 2025-05-29T15:30:40.380169Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:784: SelfId=[2:204:2217];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:193;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1;column_names=key;);;ff=(column_ids=1,2;column_names=field,key;);;program_input=(column_ids=1,2;column_names=field,key;);;;); 2025-05-29T15:30:40.380229Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:784: SelfId=[2:204:2217];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=actor.cpp:105;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2025-05-29T15:30:40.380235Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:784: SelfId=[2:204:2217];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:188;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1;column_names=key;);;ff=(column_ids=1,2;column_names=field,key;);;program_input=(column_ids=1,2;column_names=field,key;);;;); 2025-05-29T15:30:40.380240Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:784: SelfId=[2:204:2217];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:193;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1;column_names=key;);;ff=(column_ids=1,2;column_names=field,key;);;program_input=(column_ids=1,2;column_names=field,key;);;;); 2025-05-29T15:30:40.380244Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: actor.cpp:410: Scan [2:204:2217] finished for tablet 9437184 2025-05-29T15:30:40.380302Z node 2 :TX_COLUMNSHARD_SCAN INFO: log.cpp:784: SelfId=[2:204:2217];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=actor.cpp:416;event=scan_finish;compute_actor_id=[2:203:2216];stats={"p":[{"events":["f_bootstrap","l_bootstrap","f_processing","f_ProduceResults","f_task_result"],"t":0},{"events":["f_ack","l_task_result"],"t":0.009},{"events":["l_ack","l_processing","l_ProduceResults","f_Finish","l_Finish"],"t":0.01}],"full":{"a":1748532640370215,"name":"_full_task","f":1748532640370215,"d_finished":0,"c":0,"l":1748532640380253,"d":10038},"events":[{"name":"bootstrap","f":1748532640370292,"d_finished":550,"c":1,"l":1748532640370842,"d":550},{"a":1748532640380228,"name":"ack","f":1748532640379951,"d_finished":151,"c":2,"l":1748532640380171,"d":176},{"a":1748532640380227,"name":"processing","f":1748532640370856,"d_finished":5599,"c":18,"l":1748532640380171,"d":5625},{"name":"ProduceResults","f":1748532640370585,"d_finished":543,"c":22,"l":1748532640380242,"d":543},{"a":1748532640380242,"name":"Finish","f":1748532640380242,"d_finished":0,"c":0,"l":1748532640380253,"d":11},{"name":"task_result","f":1748532640370861,"d_finished":5402,"c":16,"l":1748532640379923,"d":5402}],"id":"9437184::5"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1;column_names=key;);;ff=(column_ids=1,2;column_names=field,key;);;program_input=(column_ids=1,2;column_names=field,key;);;;); 2025-05-29T15:30:40.380328Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:784: SelfId=[2:204:2217];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=actor.cpp:366;event=send_data;compute_actor_id=[2:203:2216];bytes=0;rows=0;faults=0;finished=1;fault=0;schema=; 2025-05-29T15:30:40.380355Z node 2 :TX_COLUMNSHARD_SCAN INFO: log.cpp:784: SelfId=[2:204:2217];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=actor.cpp:371;event=scan_finished;compute_actor_id=[2:203:2216];packs_sum=0;bytes=0;bytes_sum=0;rows=0;rows_sum=0;faults=0;finished=1;fault=0;stats={"p":[{"events":["f_bootstrap","l_bootstrap","f_processing","f_ProduceResults","f_task_result"],"t":0},{"events":["f_ack","l_task_result"],"t":0.009},{"events":["l_ack","l_processing","l_ProduceResults","f_Finish","l_Finish"],"t":0.01}],"full":{"a":1748532640370215,"name":"_full_task","f":1748532640370215,"d_finished":0,"c":0,"l":1748532640380333,"d":10118},"events":[{"name":"bootstrap","f":1748532640370292,"d_finished":550,"c":1,"l":1748532640370842,"d":550},{"a":1748532640380228,"name":"ack","f":1748532640379951,"d_finished":151,"c":2,"l":1748532640380171,"d":256},{"a":1748532640380227,"name":"processing","f":1748532640370856,"d_finished":5599,"c":18,"l":1748532640380171,"d":5705},{"name":"ProduceResults","f":1748532640370585,"d_finished":543,"c":22,"l":1748532640380242,"d":543},{"a":1748532640380242,"name":"Finish","f":1748532640380242,"d_finished":0,"c":0,"l":1748532640380333,"d":91},{"name":"task_result","f":1748532640370861,"d_finished":5402,"c":16,"l":1748532640379923,"d":5402}],"id":"9437184::5"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1;column_names=key;);;ff=(column_ids=1,2;column_names=field,key;);;program_input=(column_ids=1,2;column_names=field,key;);;;); 2025-05-29T15:30:40.380365Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:784: SelfId=[2:204:2217];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=stats.cpp:8;event=statistic;begin=2025-05-29T15:30:40.370068Z;index_granules=0;index_portions=2;index_batches=88;committed_batches=0;schema_columns=2;filter_columns=0;additional_columns=0;compacted_portions_bytes=0;inserted_portions_bytes=474480;committed_portions_bytes=0;data_filter_bytes=0;data_additional_bytes=0;delta_bytes=474480;selected_rows=0; 2025-05-29T15:30:40.380369Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:784: SelfId=[2:204:2217];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=read_context.h:190;event=scan_aborted;reason=unexpected on destructor; 2025-05-29T15:30:40.380412Z node 2 :TX_COLUMNSHARD_SCAN INFO: log.cpp:784: SelfId=[2:204:2217];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=context.h:81;fetching=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1;column_names=key;);;ff=(column_ids=1,2;column_names=field,key;);;program_input=(column_ids=1,2;column_names=field,key;);;; ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/statistics/service/ut/unittest >> BasicStatistics::ServerlessGlobalIndex [FAIL] Test command err: 2025-05-29T15:30:32.979293Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:453:2413], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-05-29T15:30:32.979335Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-05-29T15:30:32.979353Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/00151f/r3tmp/tmpVuigYs/pdisk_1.dat 2025-05-29T15:30:33.072887Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 27050, node 1 2025-05-29T15:30:33.178718Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:30:33.178823Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-05-29T15:30:33.178837Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-05-29T15:30:33.178923Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-05-29T15:30:33.179491Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-05-29T15:30:33.257585Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:30:33.257619Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:30:33.269923Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:20302 2025-05-29T15:30:33.617785Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-05-29T15:30:34.527906Z node 2 :STATISTICS INFO: service_impl.cpp:232: Subscribed for config changes on node 2 2025-05-29T15:30:34.539650Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:30:34.539692Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:30:34.599460Z node 1 :HIVE WARN: hive_impl.cpp:771: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-05-29T15:30:34.603228Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-29T15:30:34.755677Z node 2 :HIVE WARN: tx__create_tablet.cpp:342: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-05-29T15:30:34.755875Z node 2 :HIVE WARN: tx__create_tablet.cpp:342: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-05-29T15:30:34.756046Z node 2 :HIVE WARN: tx__create_tablet.cpp:342: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-05-29T15:30:34.756079Z node 2 :HIVE WARN: tx__create_tablet.cpp:342: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-05-29T15:30:34.756096Z node 2 :HIVE WARN: tx__create_tablet.cpp:342: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-05-29T15:30:34.756142Z node 2 :HIVE WARN: tx__create_tablet.cpp:342: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-05-29T15:30:34.756160Z node 2 :HIVE WARN: tx__create_tablet.cpp:342: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-05-29T15:30:34.756178Z node 2 :HIVE WARN: tx__create_tablet.cpp:342: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-05-29T15:30:34.756214Z node 2 :HIVE WARN: tx__create_tablet.cpp:342: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-05-29T15:30:34.916629Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:30:34.916670Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:30:34.929053Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-29T15:30:34.973161Z node 2 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:30:34.995154Z node 2 :STATISTICS INFO: aggregator_impl.cpp:45: [72075186224037894] OnActivateExecutor 2025-05-29T15:30:34.995187Z node 2 :STATISTICS DEBUG: tx_init_schema.cpp:13: [72075186224037894] TTxInitSchema::Execute 2025-05-29T15:30:35.005782Z node 2 :STATISTICS DEBUG: tx_init_schema.cpp:34: [72075186224037894] TTxInitSchema::Complete 2025-05-29T15:30:35.006052Z node 2 :STATISTICS DEBUG: tx_init.cpp:18: [72075186224037894] TTxInit::Execute 2025-05-29T15:30:35.006086Z node 2 :STATISTICS DEBUG: tx_init.cpp:118: [72075186224037894] Loaded BaseStatistics: schemeshard count# 0 2025-05-29T15:30:35.006092Z node 2 :STATISTICS DEBUG: tx_init.cpp:143: [72075186224037894] Loaded ColumnStatistics: column count# 0 2025-05-29T15:30:35.006099Z node 2 :STATISTICS DEBUG: tx_init.cpp:182: [72075186224037894] Loaded ScheduleTraversals: table count# 0 2025-05-29T15:30:35.006104Z node 2 :STATISTICS DEBUG: tx_init.cpp:216: [72075186224037894] Loaded ForceTraversalOperations: table count# 0 2025-05-29T15:30:35.006108Z node 2 :STATISTICS DEBUG: tx_init.cpp:264: [72075186224037894] Loaded ForceTraversalTables: table count# 0 2025-05-29T15:30:35.006115Z node 2 :STATISTICS DEBUG: tx_init.cpp:271: [72075186224037894] TTxInit::Complete 2025-05-29T15:30:35.006304Z node 2 :STATISTICS INFO: aggregator_impl.cpp:62: [72075186224037894] Subscribed for config changes 2025-05-29T15:30:35.023842Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:7864: ResolveSA(), StatisticsAggregatorId=72075186224037894, at schemeshard: 72075186224037897 2025-05-29T15:30:35.023881Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:7894: ConnectToSA(), pipe client id: [2:1863:2597], at schemeshard: 72075186224037897, StatisticsAggregatorId: 72075186224037894, at schemeshard: 72075186224037897 2025-05-29T15:30:35.025094Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:86: [72075186224037894] EvServerConnected, pipe server id = [2:1874:2605] 2025-05-29T15:30:35.026405Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:86: [72075186224037894] EvServerConnected, pipe server id = [2:1905:2622] 2025-05-29T15:30:35.026656Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:213: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:1905:2622], schemeshard id = 72075186224037897 2025-05-29T15:30:35.027301Z node 2 :STATISTICS DEBUG: tx_configure.cpp:21: [72075186224037894] TTxConfigure::Execute: database# /Root/Shared 2025-05-29T15:30:35.030753Z node 2 :STATISTICS DEBUG: table_creator.cpp:147: Table _statistics updater. Describe result: PathErrorUnknown 2025-05-29T15:30:35.030766Z node 2 :STATISTICS NOTICE: table_creator.cpp:167: Table _statistics updater. Creating table 2025-05-29T15:30:35.030776Z node 2 :STATISTICS DEBUG: table_creator.cpp:100: Table _statistics updater. Full table path:/Root/Shared/.metadata/_statistics 2025-05-29T15:30:35.033149Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720657:1, at schemeshard: 72075186224037897 2025-05-29T15:30:35.034503Z node 2 :STATISTICS DEBUG: table_creator.cpp:190: Table _statistics updater. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720657 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037897 PathId: 3 } 2025-05-29T15:30:35.034535Z node 2 :STATISTICS DEBUG: table_creator.cpp:261: Table _statistics updater. Subscribe on create table tx: 281474976720657 2025-05-29T15:30:35.128697Z node 2 :STATISTICS DEBUG: tx_configure.cpp:36: [72075186224037894] TTxConfigure::Complete 2025-05-29T15:30:35.212576Z node 2 :STATISTICS DEBUG: table_creator.cpp:290: Table _statistics updater. Request: create. Transaction completed: 281474976720657. Doublechecking... 2025-05-29T15:30:35.266183Z node 2 :STATISTICS DEBUG: table_creator.cpp:362: Table _statistics updater. Column diff is empty, finishing 2025-05-29T15:30:35.798423Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715659:0, at schemeshard: 72057594046644480 2025-05-29T15:30:36.301361Z node 2 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:30:36.438496Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:7809: Handle TEvTxProxySchemeCache::TEvNavigateKeySetResult, at schemeshard: 72075186224037899 2025-05-29T15:30:36.438528Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:7825: Handle TEvTxProxySchemeCache::TEvNavigateKeySetResult, StatisticsAggregatorId=72075186224037894, at schemeshard: 72075186224037899 2025-05-29T15:30:36.438548Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:7894: ConnectToSA(), pipe client id: [2:2566:2937], at schemeshard: 72075186224037899, StatisticsAggregatorId: 72075186224037894, at schemeshard: 72075186224037899 2025-05-29T15:30:36.439074Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:86: [72075186224037894] EvServerConnected, pipe server id = [2:2567:2938] 2025-05-29T15:30:36.439160Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:213: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:2567:2938], schemeshard id = 72075186224037899 2025-05-29T15:30:37.282789Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2689:3228], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:30:37.282848Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:30:37.294306Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72075186224037899 2025-05-29T15:30:37.423597Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2920:3273], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:30:37.423648Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:30:37.439123Z node 1 :STATISTICS DEBUG: service_impl.cpp:771: [TStatService::TEvGetStatistics] RequestId[ 1 ], ReplyToActorId[ [1:2925:3277]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-05-29T15:30:37.439195Z node 1 :STATISTICS DEBUG: service_impl.cpp:787: [TStatService::TEvNavigateKeySetResult] RequestId[ 1 ] 2025-05-29T15:30:37.439234Z node 1 :STATISTICS DEBUG: service_impl.cpp:787: [TStatService::TEvNavigateKeySetResult] RequestId[ 18446744073709551615 ] 2025-05-29T15:30:37.439246Z node 1 :STATISTICS DEBUG: service_impl.cpp:1219: ConnectToSA(), pipe client id = [1:2928:3280] 2025-05-29T15:30:37.439261Z node 1 :STATISTICS DEBUG: service_impl.cpp:1248: SyncNode(), pipe client id = [1:2928:3280] 2025-05-29T15:30:37.439522Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:86: [72075186224037894] EvServerConnected, pipe server id = [2:2929:3120] 2025-05-29T15:30:37.439623Z node 1 :STATISTICS DEBUG: service_impl.cpp:1086: EvClientConnected, node id = 1, client id = [1:2928:3280], server id = [2:2929:3120], tablet id = 72075186224037894, status = OK 2025-05-29T15:30:37.439700Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:133: [72075186224037894] EvConnectNode, pipe server id = [2:2929:3120], node id = 1, have schemeshards count = 0, need schemeshards count = 1 2025-05-29T15:30:37.439717Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:314: [72075186224037894] SendStatisticsToNode(), node id = 1, schemeshard count = 1 2025-05-29T15:30:37.439988Z node 1 :STATISTICS DEBUG: service_impl.cpp:937: EvPropagateStatistics, node id = 1 2025-05-29T15:30:37.440006Z node 1 :STATISTICS DEBUG: service_impl.cpp:1260: ReplySuccess(), request id = 1, ReplyToActorId = [1:2925:3277], StatRequests.size() = 1 2025-05-29T15:30:37.442734Z node 1 :STATISTICS DEBUG: service_impl.cpp:771: [TStatService::TEvGetStatistics] RequestId[ 2 ], ReplyToActorId[ [1:2962:3289]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-05-29T15:30:37.442799Z node 1 :STATISTICS DEBUG: service_impl.cpp:787: [TStatService::TEvNavigateKeySetResult] RequestId[ 2 ] 2025-05-29T15:30:37.442806Z node 1 :STATISTICS DEBUG: service_impl.cpp:1260: ReplySuccess(), request id = 2, ReplyToActorId = [1:2962:3289], StatRequests.size() = 1 2025-05-29T15:30:37.442848Z node 1 :STATISTICS DEBUG: service_impl.cpp:771: [TStatService::TEvGetStatistics] RequestId[ 3 ], ReplyToActorId[ [1:2964:3291]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-05-29T15:30:37.442867Z node 1 :STATISTICS DEBUG: service_impl.cpp:787: [TStatService::TEvNavigateKeySetResult] RequestId[ 3 ] 2025-05-29T15:30:37.442870Z node 1 :STATISTICS DEBUG: service_impl.cpp:1260: ReplySuccess(), request id = 3, ReplyToActorId = [1:2964:3291], StatRequests.size() = 1 2025-05-29T15:30:37.446432Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2969:3296], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:30:37.446474Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:30:37.446570Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2974:3301], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:30:37.448551Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715662:3, at schemeshard: 72057594046644480 2025-05-29T15:30:37.545796Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:217: [72075186224037894] EvFastPropagateCheck 2025-05-29T15:30:37.545841Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:357: [72075186224037894] PropagateFastStatistics(), node count = 0, schemeshard count = 0 2025-05-29T15:30:37.624893Z node 1 :STATISTICS DEBUG: service_impl.cpp:1189: EvRequestTimeout, pipe client id = [1:2928:3280], schemeshard count = 1 2025-05-29T15:30:37.846556Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:2976:3303], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715662 completed, doublechecking } 2025-05-29T15:30:37.993055Z node 1 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [1:3087:3372] txid# 281474976715663, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 7], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-29T15:30:37.995108Z node 1 :STATISTICS DEBUG: service_impl.cpp:771: [TStatService::TEvGetStatistics] RequestId[ 4 ], ReplyToActorId[ [1:3110:3388]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-05-29T15:30:37.995145Z node 1 :STATISTICS DEBUG: service_impl.cpp:787: [TStatService::TEvNavigateKeySetResult] RequestId[ 4 ] 2025-05-29T15:30:37.995150Z node 1 :STATISTICS DEBUG: service_impl.cpp:1260: ReplySuccess(), request id = 4, ReplyToActorId = [1:3110:3388], StatRequests.size() = 1 2025-05-29T15:30:37.995606Z node 1 :STATISTICS DEBUG: service_impl.cpp:771: [TStatService::TEvGetStatistics] RequestId[ 5 ], ReplyToActorId[ [1:3116:3394]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-05-29T15:30:37.995629Z node 1 :STATISTICS DEBUG: service_impl.cpp:787: [TStatService::TEvNavigateKeySetResult] RequestId[ 5 ] 2025-05-29T15:30:37.995632Z node 1 :STATISTICS DEBUG: service_impl.cpp:1260: ReplySuccess(), request id = 5, ReplyToActorId = [1:3116:3394], StatRequests.size() = 1 2025-05-29T15:30:37.995652Z node 1 :STATISTICS DEBUG: service_impl.cpp:771: [TStatService::TEvGetStatistics] RequestId[ 6 ], ReplyToActorId[ [1:3118:3396]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-05-29T15:30:37.995672Z node 1 :STATISTICS DEBUG: service_impl.cpp:787: [TStatService::TEvNavigateKeySetResult] RequestId[ 6 ] 2025-05-29T15:30:37.995675Z node 1 :STATISTICS DEBUG: service_impl.cpp:1260: ReplySuccess(), request id = 6, ReplyToActorId = [1:3118:3396], StatRequests.size() = 1 2025-05-29T15:30:38.157446Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [1:3099:3383], status: INTERNAL_ERROR, issues:
: Fatal: Default error
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:30:38.158285Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=1&id=MmZiM2QxYjktOWY5ODM5NWItNGQyMGZkNWUtNzgyN2U1OQ==, ActorId: [1:2967:3294], ActorState: ExecuteState, TraceId: 01jweanbqf1x9rm33wb7psfj3e, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: assertion failed at ydb/core/statistics/ut_common/ut_common.cpp:237, Ydb::StatusIds::StatusCode NKikimr::NStat::ExecuteYqlScript(TTestEnv &, const TString &, bool): (response.operation().status() == Ydb::StatusIds::SUCCESS) failed: (INTERNAL_ERROR != SUCCESS) , with diff: (INT|SUCC)E(RNAL_ERROR|SS) TBackTrace::Capture()+28 (0x137F904C) NUnitTest::NPrivate::RaiseError(char const*, TBasicString> const&, bool)+137 (0x139AC3D9) NKikimr::NStat::ExecuteYqlScript(NKikimr::NStat::TTestEnv&, TBasicString> const&, bool)+1783 (0x25EAECD7) ??+0 (0x136E9066) ??+0 (0x136E4AA9) NKikimr::NStat::NTestSuiteBasicStatistics::TTestCaseServerlessGlobalIndex::Execute_(NUnitTest::TTestContext&)+717 (0x136E4DDD) NKikimr::NStat::NTestSuiteBasicStatistics::TCurrentTest::Execute()::'lambda'()::operator()() const+71 (0x136E84F7) NUnitTest::TTestBase::Run(std::__y1::function, TBasicString> const&, char const*, bool)+126 (0x139AE28E) NKikimr::NStat::NTestSuiteBasicStatistics::TCurrentTest::Execute()+436 (0x136E7D54) NUnitTest::TTestFactory::Execute()+803 (0x139AEA03) NUnitTest::RunMain(int, char**)+3021 (0x139BCD1D) ??+0 (0x7F52A857ED90) __libc_start_main+128 (0x7F52A857EE40) _start+41 (0x1283B029) >> TPersQueueTest::Delete >> TxUsage::WriteToTopic_Demo_45_Query [FAIL] >> TPQCompatTest::CommitOffsets [FAIL] >> TPersQueueTest::CreateTopicWithMeteringMode [FAIL] >> TxUsage::WriteToTopic_Demo_50_Table [FAIL] >> TPersQueueTest::TestReadRuleServiceTypePassword [FAIL] >> TPQCompatTest::LongProducerAndLongMessageGroupId >> TPersQueueTest::SrcIdCompatibility >> TPersQueueTest::DefaultMeteringMode >> TxUsage::WriteToTopic_Demo_50_Query >> TPersQueueTest::Delete [FAIL] >> TxUsage::WriteToTopic_Demo_46_Table >> TPersQueueTest::TestReadPartitionStatus >> TPQCompatTest::LongProducerAndLongMessageGroupId [FAIL] >> TPersQueueTest::DefaultMeteringMode [FAIL] >> TPersQueueTest::SrcIdCompatibility [FAIL] >> TPersQueueTest::DisableWrongSettings >> TxUsage::WriteToTopic_Demo_46_Table [FAIL] >> TPersQueueTest::TestReadPartitionStatus [FAIL] >> TPQCompatTest::ReadWriteSessions >> TxUsage::WriteToTopic_Demo_50_Query [FAIL] >> TxUsage::WriteToTopic_Demo_46_Query >> TPersQueueTest::TxCounters >> TPersQueueTest::DisableWrongSettings [FAIL] >> TPQCompatTest::ReadWriteSessions [FAIL] >> TxUsage::Write_Random_Sized_Messages_In_Wide_Transactions_Table >> TxUsage::WriteToTopic_Demo_46_Query [FAIL] >> TPersQueueTest::DisableDeduplication >> TPersQueueTest::TxCounters [FAIL] >> TxUsage::Write_Random_Sized_Messages_In_Wide_Transactions_Table [FAIL] >> TxUsage::Write_Random_Sized_Messages_In_Wide_Transactions_Query >> TxUsage::WriteToTopic_Demo_47_Table >> TPersQueueTest::DisableDeduplication [FAIL] >> TxUsage::Write_Random_Sized_Messages_In_Wide_Transactions_Query [FAIL] >> TxUsage::WriteToTopic_Demo_47_Table [FAIL] >> TxUsage::Write_Only_Big_Messages_In_Wide_Transactions_Table >> TxUsage::WriteToTopic_Demo_47_Query >> TxUsage::Write_Only_Big_Messages_In_Wide_Transactions_Table [FAIL] >> TxUsage::WriteToTopic_Demo_47_Query [FAIL] >> TxUsage::WriteToTopic_Demo_48_Table >> TxUsage::Write_Only_Big_Messages_In_Wide_Transactions_Query ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_split_merge/unittest >> TSchemeShardSplitBySizeTest::SplitShardsWithDecimalKey [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:116:2058] recipient: [1:111:2141] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:116:2058] recipient: [1:111:2141] Leader for TabletID 72057594046678944 is [1:128:2152] sender: [1:132:2058] recipient: [1:111:2141] 2025-05-29T15:29:59.102363Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7488: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-05-29T15:29:59.102390Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7516: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:29:59.102396Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7402: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-05-29T15:29:59.102402Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7418: OperationsProcessing config: using default configuration 2025-05-29T15:29:59.102416Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-05-29T15:29:59.102421Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-05-29T15:29:59.102431Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7548: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:29:59.102445Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:39: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-05-29T15:29:59.102563Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7619: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-05-29T15:29:59.102650Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-05-29T15:29:59.116780Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7309: Cannot subscribe to console configs 2025-05-29T15:29:59.116808Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:29:59.122787Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-05-29T15:29:59.122961Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-05-29T15:29:59.123013Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-05-29T15:29:59.129814Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-05-29T15:29:59.130079Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:445: Clear TempDirsState with owners number: 0 2025-05-29T15:29:59.130267Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1348: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-05-29T15:29:59.130345Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:32: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-05-29T15:29:59.131276Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:157: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:29:59.131347Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:84: [RootDataErasureManager] Stop 2025-05-29T15:29:59.131711Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:29:59.131727Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:29:59.131752Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-05-29T15:29:59.131763Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-29T15:29:59.131770Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:202: TTxServerlessStorageBilling.Complete 2025-05-29T15:29:59.131818Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6671: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-05-29T15:29:59.134505Z node 1 :HIVE INFO: tablet_helpers.cpp:1130: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:128:2152] sender: [1:242:2058] recipient: [1:15:2062] 2025-05-29T15:29:59.160508Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:372: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-05-29T15:29:59.160610Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:29:59.160689Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-05-29T15:29:59.160741Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:130: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-05-29T15:29:59.160754Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:29:59.161785Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:451: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-05-29T15:29:59.161823Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-05-29T15:29:59.161884Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:29:59.161896Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:313: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-05-29T15:29:59.161901Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:367: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-05-29T15:29:59.161906Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 2 -> 3 2025-05-29T15:29:59.162372Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:29:59.162383Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-05-29T15:29:59.162389Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 3 -> 128 2025-05-29T15:29:59.162755Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:29:59.162765Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:29:59.162771Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:29:59.162789Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1650: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-05-29T15:29:59.163430Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1719: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-05-29T15:29:59.163852Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:652: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-05-29T15:29:59.163896Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1751: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-05-29T15:29:59.164107Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:676: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-05-29T15:29:59.164132Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:680: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 124 RawX2: 4294969445 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-05-29T15:29:59.164140Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:29:59.164207Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 128 -> 240 2025-05-29T15:29:59.164213Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:29:59.164250Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-05-29T15:29:59.164264Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:402: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-05-29T15:29:59.164834Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:29:59.164843Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-29T15:29:59.164900Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29 ... nerIdx: 45 TabletType: DataShard ObjectDomain { SchemeShard: 72057594046678944 PathId: 1 } ObjectId: 2 BindedChannels { StoragePoolName: "pool-1" } BindedChannels { StoragePoolName: "pool-1" } BindedChannels { StoragePoolName: "pool-1" } AllowedDomains { SchemeShard: 72057594046678944 PathId: 1 } 2025-05-29T15:30:46.331211Z node 1 :HIVE INFO: tablet_helpers.cpp:1245: [72057594037968897] TEvCreateTablet, Owner 72057594046678944, OwnerIdx 45, type DataShard, boot OK, tablet id 72075186233409590 2025-05-29T15:30:46.331375Z node 1 :HIVE INFO: tablet_helpers.cpp:1181: [72057594037968897] TEvCreateTablet, msg: Owner: 72057594046678944 OwnerIdx: 46 TabletType: DataShard ObjectDomain { SchemeShard: 72057594046678944 PathId: 1 } ObjectId: 2 BindedChannels { StoragePoolName: "pool-1" } BindedChannels { StoragePoolName: "pool-1" } BindedChannels { StoragePoolName: "pool-1" } AllowedDomains { SchemeShard: 72057594046678944 PathId: 1 } 2025-05-29T15:30:46.331397Z node 1 :HIVE INFO: tablet_helpers.cpp:1245: [72057594037968897] TEvCreateTablet, Owner 72057594046678944, OwnerIdx 46, type DataShard, boot OK, tablet id 72075186233409591 2025-05-29T15:30:46.331491Z node 1 :HIVE INFO: tablet_helpers.cpp:1181: [72057594037968897] TEvCreateTablet, msg: Owner: 72057594046678944 OwnerIdx: 47 TabletType: DataShard ObjectDomain { SchemeShard: 72057594046678944 PathId: 1 } ObjectId: 2 BindedChannels { StoragePoolName: "pool-1" } BindedChannels { StoragePoolName: "pool-1" } BindedChannels { StoragePoolName: "pool-1" } AllowedDomains { SchemeShard: 72057594046678944 PathId: 1 } 2025-05-29T15:30:46.331508Z node 1 :HIVE INFO: tablet_helpers.cpp:1245: [72057594037968897] TEvCreateTablet, Owner 72057594046678944, OwnerIdx 47, type DataShard, boot OK, tablet id 72075186233409592 2025-05-29T15:30:46.331607Z node 1 :HIVE INFO: tablet_helpers.cpp:1181: [72057594037968897] TEvCreateTablet, msg: Owner: 72057594046678944 OwnerIdx: 48 TabletType: DataShard ObjectDomain { SchemeShard: 72057594046678944 PathId: 1 } ObjectId: 2 BindedChannels { StoragePoolName: "pool-1" } BindedChannels { StoragePoolName: "pool-1" } BindedChannels { StoragePoolName: "pool-1" } AllowedDomains { SchemeShard: 72057594046678944 PathId: 1 } 2025-05-29T15:30:46.331621Z node 1 :HIVE INFO: tablet_helpers.cpp:1245: [72057594037968897] TEvCreateTablet, Owner 72057594046678944, OwnerIdx 48, type DataShard, boot OK, tablet id 72075186233409593 2025-05-29T15:30:46.331720Z node 1 :HIVE INFO: tablet_helpers.cpp:1181: [72057594037968897] TEvCreateTablet, msg: Owner: 72057594046678944 OwnerIdx: 49 TabletType: DataShard ObjectDomain { SchemeShard: 72057594046678944 PathId: 1 } ObjectId: 2 BindedChannels { StoragePoolName: "pool-1" } BindedChannels { StoragePoolName: "pool-1" } BindedChannels { StoragePoolName: "pool-1" } AllowedDomains { SchemeShard: 72057594046678944 PathId: 1 } 2025-05-29T15:30:46.331736Z node 1 :HIVE INFO: tablet_helpers.cpp:1245: [72057594037968897] TEvCreateTablet, Owner 72057594046678944, OwnerIdx 49, type DataShard, boot OK, tablet id 72075186233409594 2025-05-29T15:30:46.331810Z node 1 :HIVE INFO: tablet_helpers.cpp:1181: [72057594037968897] TEvCreateTablet, msg: Owner: 72057594046678944 OwnerIdx: 50 TabletType: DataShard ObjectDomain { SchemeShard: 72057594046678944 PathId: 1 } ObjectId: 2 BindedChannels { StoragePoolName: "pool-1" } BindedChannels { StoragePoolName: "pool-1" } BindedChannels { StoragePoolName: "pool-1" } AllowedDomains { SchemeShard: 72057594046678944 PathId: 1 } 2025-05-29T15:30:46.331837Z node 1 :HIVE INFO: tablet_helpers.cpp:1245: [72057594037968897] TEvCreateTablet, Owner 72057594046678944, OwnerIdx 50, type DataShard, boot OK, tablet id 72075186233409595 2025-05-29T15:30:46.331913Z node 1 :HIVE INFO: tablet_helpers.cpp:1181: [72057594037968897] TEvCreateTablet, msg: Owner: 72057594046678944 OwnerIdx: 51 TabletType: DataShard ObjectDomain { SchemeShard: 72057594046678944 PathId: 1 } ObjectId: 2 BindedChannels { StoragePoolName: "pool-1" } BindedChannels { StoragePoolName: "pool-1" } BindedChannels { StoragePoolName: "pool-1" } AllowedDomains { SchemeShard: 72057594046678944 PathId: 1 } 2025-05-29T15:30:46.331928Z node 1 :HIVE INFO: tablet_helpers.cpp:1245: [72057594037968897] TEvCreateTablet, Owner 72057594046678944, OwnerIdx 51, type DataShard, boot OK, tablet id 72075186233409596 2025-05-29T15:30:46.332013Z node 1 :HIVE INFO: tablet_helpers.cpp:1181: [72057594037968897] TEvCreateTablet, msg: Owner: 72057594046678944 OwnerIdx: 52 TabletType: DataShard ObjectDomain { SchemeShard: 72057594046678944 PathId: 1 } ObjectId: 2 BindedChannels { StoragePoolName: "pool-1" } BindedChannels { StoragePoolName: "pool-1" } BindedChannels { StoragePoolName: "pool-1" } AllowedDomains { SchemeShard: 72057594046678944 PathId: 1 } 2025-05-29T15:30:46.332028Z node 1 :HIVE INFO: tablet_helpers.cpp:1245: [72057594037968897] TEvCreateTablet, Owner 72057594046678944, OwnerIdx 52, type DataShard, boot OK, tablet id 72075186233409597 2025-05-29T15:30:46.332082Z node 1 :HIVE INFO: tablet_helpers.cpp:1181: [72057594037968897] TEvCreateTablet, msg: Owner: 72057594046678944 OwnerIdx: 53 TabletType: DataShard ObjectDomain { SchemeShard: 72057594046678944 PathId: 1 } ObjectId: 2 BindedChannels { StoragePoolName: "pool-1" } BindedChannels { StoragePoolName: "pool-1" } BindedChannels { StoragePoolName: "pool-1" } AllowedDomains { SchemeShard: 72057594046678944 PathId: 1 } 2025-05-29T15:30:46.332096Z node 1 :HIVE INFO: tablet_helpers.cpp:1245: [72057594037968897] TEvCreateTablet, Owner 72057594046678944, OwnerIdx 53, type DataShard, boot OK, tablet id 72075186233409598 2025-05-29T15:30:46.332241Z node 1 :HIVE INFO: tablet_helpers.cpp:1181: [72057594037968897] TEvCreateTablet, msg: Owner: 72057594046678944 OwnerIdx: 54 TabletType: DataShard ObjectDomain { SchemeShard: 72057594046678944 PathId: 1 } ObjectId: 2 BindedChannels { StoragePoolName: "pool-1" } BindedChannels { StoragePoolName: "pool-1" } BindedChannels { StoragePoolName: "pool-1" } AllowedDomains { SchemeShard: 72057594046678944 PathId: 1 } 2025-05-29T15:30:46.332258Z node 1 :HIVE INFO: tablet_helpers.cpp:1245: [72057594037968897] TEvCreateTablet, Owner 72057594046678944, OwnerIdx 54, type DataShard, boot OK, tablet id 72075186233409599 2025-05-29T15:30:46.332431Z node 1 :HIVE INFO: tablet_helpers.cpp:1181: [72057594037968897] TEvCreateTablet, msg: Owner: 72057594046678944 OwnerIdx: 55 TabletType: DataShard ObjectDomain { SchemeShard: 72057594046678944 PathId: 1 } ObjectId: 2 BindedChannels { StoragePoolName: "pool-1" } BindedChannels { StoragePoolName: "pool-1" } BindedChannels { StoragePoolName: "pool-1" } AllowedDomains { SchemeShard: 72057594046678944 PathId: 1 } 2025-05-29T15:30:46.332454Z node 1 :HIVE INFO: tablet_helpers.cpp:1245: [72057594037968897] TEvCreateTablet, Owner 72057594046678944, OwnerIdx 55, type DataShard, boot OK, tablet id 72075186233409600 2025-05-29T15:30:46.333341Z node 1 :HIVE INFO: tablet_helpers.cpp:1181: [72057594037968897] TEvCreateTablet, msg: Owner: 72057594046678944 OwnerIdx: 56 TabletType: DataShard ObjectDomain { SchemeShard: 72057594046678944 PathId: 1 } ObjectId: 2 BindedChannels { StoragePoolName: "pool-1" } BindedChannels { StoragePoolName: "pool-1" } BindedChannels { StoragePoolName: "pool-1" } AllowedDomains { SchemeShard: 72057594046678944 PathId: 1 } 2025-05-29T15:30:46.333369Z node 1 :HIVE INFO: tablet_helpers.cpp:1245: [72057594037968897] TEvCreateTablet, Owner 72057594046678944, OwnerIdx 56, type DataShard, boot OK, tablet id 72075186233409601 2025-05-29T15:30:46.333484Z node 1 :HIVE INFO: tablet_helpers.cpp:1181: [72057594037968897] TEvCreateTablet, msg: Owner: 72057594046678944 OwnerIdx: 57 TabletType: DataShard ObjectDomain { SchemeShard: 72057594046678944 PathId: 1 } ObjectId: 2 BindedChannels { StoragePoolName: "pool-1" } BindedChannels { StoragePoolName: "pool-1" } BindedChannels { StoragePoolName: "pool-1" } AllowedDomains { SchemeShard: 72057594046678944 PathId: 1 } 2025-05-29T15:30:46.333503Z node 1 :HIVE INFO: tablet_helpers.cpp:1245: [72057594037968897] TEvCreateTablet, Owner 72057594046678944, OwnerIdx 57, type DataShard, boot OK, tablet id 72075186233409602 2025-05-29T15:30:46.333597Z node 1 :HIVE INFO: tablet_helpers.cpp:1181: [72057594037968897] TEvCreateTablet, msg: Owner: 72057594046678944 OwnerIdx: 58 TabletType: DataShard ObjectDomain { SchemeShard: 72057594046678944 PathId: 1 } ObjectId: 2 BindedChannels { StoragePoolName: "pool-1" } BindedChannels { StoragePoolName: "pool-1" } BindedChannels { StoragePoolName: "pool-1" } AllowedDomains { SchemeShard: 72057594046678944 PathId: 1 } 2025-05-29T15:30:46.333611Z node 1 :HIVE INFO: tablet_helpers.cpp:1245: [72057594037968897] TEvCreateTablet, Owner 72057594046678944, OwnerIdx 58, type DataShard, boot OK, tablet id 72075186233409603 2025-05-29T15:30:46.333679Z node 1 :HIVE INFO: tablet_helpers.cpp:1181: [72057594037968897] TEvCreateTablet, msg: Owner: 72057594046678944 OwnerIdx: 59 TabletType: DataShard ObjectDomain { SchemeShard: 72057594046678944 PathId: 1 } ObjectId: 2 BindedChannels { StoragePoolName: "pool-1" } BindedChannels { StoragePoolName: "pool-1" } BindedChannels { StoragePoolName: "pool-1" } AllowedDomains { SchemeShard: 72057594046678944 PathId: 1 } 2025-05-29T15:30:46.333691Z node 1 :HIVE INFO: tablet_helpers.cpp:1245: [72057594037968897] TEvCreateTablet, Owner 72057594046678944, OwnerIdx 59, type DataShard, boot OK, tablet id 72075186233409604 2025-05-29T15:30:46.333783Z node 1 :HIVE INFO: tablet_helpers.cpp:1181: [72057594037968897] TEvCreateTablet, msg: Owner: 72057594046678944 OwnerIdx: 60 TabletType: DataShard ObjectDomain { SchemeShard: 72057594046678944 PathId: 1 } ObjectId: 2 BindedChannels { StoragePoolName: "pool-1" } BindedChannels { StoragePoolName: "pool-1" } BindedChannels { StoragePoolName: "pool-1" } AllowedDomains { SchemeShard: 72057594046678944 PathId: 1 } 2025-05-29T15:30:46.333798Z node 1 :HIVE INFO: tablet_helpers.cpp:1245: [72057594037968897] TEvCreateTablet, Owner 72057594046678944, OwnerIdx 60, type DataShard, boot OK, tablet id 72075186233409605 2025-05-29T15:30:46.333895Z node 1 :HIVE INFO: tablet_helpers.cpp:1181: [72057594037968897] TEvCreateTablet, msg: Owner: 72057594046678944 OwnerIdx: 61 TabletType: DataShard ObjectDomain { SchemeShard: 72057594046678944 PathId: 1 } ObjectId: 2 BindedChannels { StoragePoolName: "pool-1" } BindedChannels { StoragePoolName: "pool-1" } BindedChannels { StoragePoolName: "pool-1" } AllowedDomains { SchemeShard: 72057594046678944 PathId: 1 } 2025-05-29T15:30:46.333919Z node 1 :HIVE INFO: tablet_helpers.cpp:1245: [72057594037968897] TEvCreateTablet, Owner 72057594046678944, OwnerIdx 61, type DataShard, boot OK, tablet id 72075186233409606 2025-05-29T15:30:46.333974Z node 1 :HIVE INFO: tablet_helpers.cpp:1181: [72057594037968897] TEvCreateTablet, msg: Owner: 72057594046678944 OwnerIdx: 62 TabletType: DataShard ObjectDomain { SchemeShard: 72057594046678944 PathId: 1 } ObjectId: 2 BindedChannels { StoragePoolName: "pool-1" } BindedChannels { StoragePoolName: "pool-1" } BindedChannels { StoragePoolName: "pool-1" } AllowedDomains { SchemeShard: 72057594046678944 PathId: 1 } 2025-05-29T15:30:46.333990Z node 1 :HIVE INFO: tablet_helpers.cpp:1245: [72057594037968897] TEvCreateTablet, Owner 72057594046678944, OwnerIdx 62, type DataShard, boot OK, tablet id 72075186233409607 2025-05-29T15:30:46.334044Z node 1 :HIVE INFO: tablet_helpers.cpp:1181: [72057594037968897] TEvCreateTablet, msg: Owner: 72057594046678944 OwnerIdx: 63 TabletType: DataShard ObjectDomain { SchemeShard: 72057594046678944 PathId: 1 } ObjectId: 2 BindedChannels { StoragePoolName: "pool-1" } BindedChannels { StoragePoolName: "pool-1" } BindedChannels { StoragePoolName: "pool-1" } AllowedDomains { SchemeShard: 72057594046678944 PathId: 1 } 2025-05-29T15:30:46.334057Z node 1 :HIVE INFO: tablet_helpers.cpp:1245: [72057594037968897] TEvCreateTablet, Owner 72057594046678944, OwnerIdx 63, type DataShard, boot OK, tablet id 72075186233409608 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_ttl/unittest >> TSchemeShardTTLTests::CreateTableShouldSucceed-EnableTablePgTypes-true [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:116:2058] recipient: [1:111:2141] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:116:2058] recipient: [1:111:2141] Leader for TabletID 72057594046678944 is [1:128:2152] sender: [1:132:2058] recipient: [1:111:2141] 2025-05-29T15:30:31.733517Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7488: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-05-29T15:30:31.733551Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7516: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:30:31.733557Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7402: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-05-29T15:30:31.733563Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7418: OperationsProcessing config: using default configuration 2025-05-29T15:30:31.733578Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-05-29T15:30:31.733583Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-05-29T15:30:31.733593Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7548: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:30:31.733607Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:39: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-05-29T15:30:31.733727Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7619: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-05-29T15:30:31.733844Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-05-29T15:30:31.752163Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7309: Cannot subscribe to console configs 2025-05-29T15:30:31.752191Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:30:31.756338Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-05-29T15:30:31.756488Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-05-29T15:30:31.756533Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-05-29T15:30:31.758222Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-05-29T15:30:31.758409Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:445: Clear TempDirsState with owners number: 0 2025-05-29T15:30:31.758520Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1348: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-05-29T15:30:31.758581Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:32: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-05-29T15:30:31.759076Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:157: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:30:31.759143Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:84: [RootDataErasureManager] Stop 2025-05-29T15:30:31.759426Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:30:31.759439Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:30:31.759460Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-05-29T15:30:31.759468Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-29T15:30:31.759475Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:202: TTxServerlessStorageBilling.Complete 2025-05-29T15:30:31.759511Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6671: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-05-29T15:30:31.760874Z node 1 :HIVE INFO: tablet_helpers.cpp:1130: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:128:2152] sender: [1:242:2058] recipient: [1:15:2062] 2025-05-29T15:30:31.782008Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:372: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-05-29T15:30:31.782116Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:30:31.782186Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-05-29T15:30:31.782237Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:130: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-05-29T15:30:31.782266Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:30:31.783987Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:451: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-05-29T15:30:31.784040Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-05-29T15:30:31.784133Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:30:31.784148Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:313: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-05-29T15:30:31.784155Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:367: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-05-29T15:30:31.784163Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 2 -> 3 2025-05-29T15:30:31.784971Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:30:31.784987Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-05-29T15:30:31.784994Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 3 -> 128 2025-05-29T15:30:31.785439Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:30:31.785451Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:30:31.785458Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:30:31.785467Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1650: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-05-29T15:30:31.786320Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1719: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-05-29T15:30:31.786804Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:652: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-05-29T15:30:31.786860Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1751: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-05-29T15:30:31.787097Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:676: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-05-29T15:30:31.787129Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:680: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 124 RawX2: 4294969445 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-05-29T15:30:31.787139Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:30:31.787237Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 128 -> 240 2025-05-29T15:30:31.787247Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:30:31.787285Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-05-29T15:30:31.787299Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:402: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-05-29T15:30:31.787783Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:30:31.787792Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-29T15:30:31.787858Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29 ... scheme_board.cpp:108: Operation in-flight, at schemeshard: 72057594046678944, txId: 101 2025-05-29T15:30:39.992383Z node 28 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 5 2025-05-29T15:30:39.992393Z node 28 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-05-29T15:30:39.992602Z node 28 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:5834: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046678944, cookie: 101 2025-05-29T15:30:39.992614Z node 28 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046678944, cookie: 101 2025-05-29T15:30:39.992619Z node 28 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72057594046678944, txId: 101 2025-05-29T15:30:39.992624Z node 28 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 3 2025-05-29T15:30:39.992628Z node 28 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 2025-05-29T15:30:39.992638Z node 28 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1606: TOperation IsReadyToNotify, TxId: 101, ready parts: 0/1, is published: true 2025-05-29T15:30:39.992848Z node 28 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6285: Handle TEvProposeTransactionResult, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409546 Status: COMPLETE TxId: 101 Step: 5000002 OrderId: 101 ExecLatency: 0 ProposeLatency: 3 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409546 CpuTimeUsec: 433 } } CommitVersion { Step: 5000002 TxId: 101 } 2025-05-29T15:30:39.992857Z node 28 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1764: TOperation FindRelatedPartByTabletId, TxId: 101, tablet: 72075186233409546, partId: 0 2025-05-29T15:30:39.992879Z node 28 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:619: TTxOperationReply execute, operationId: 101:0, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409546 Status: COMPLETE TxId: 101 Step: 5000002 OrderId: 101 ExecLatency: 0 ProposeLatency: 3 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409546 CpuTimeUsec: 433 } } CommitVersion { Step: 5000002 TxId: 101 } 2025-05-29T15:30:39.992895Z node 28 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_part.cpp:108: HandleReply TEvDataShard::TEvProposeTransactionResult Ignore message: tablet# 72057594046678944, ev# TxKind: TX_KIND_SCHEME Origin: 72075186233409546 Status: COMPLETE TxId: 101 Step: 5000002 OrderId: 101 ExecLatency: 0 ProposeLatency: 3 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409546 CpuTimeUsec: 433 } } CommitVersion { Step: 5000002 TxId: 101 } 2025-05-29T15:30:39.993055Z node 28 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5512: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 307 RawX2: 120259086581 } Origin: 72075186233409546 State: 2 TxId: 101 Step: 0 Generation: 2 2025-05-29T15:30:39.993061Z node 28 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1764: TOperation FindRelatedPartByTabletId, TxId: 101, tablet: 72075186233409546, partId: 0 2025-05-29T15:30:39.993074Z node 28 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:619: TTxOperationReply execute, operationId: 101:0, at schemeshard: 72057594046678944, message: Source { RawX1: 307 RawX2: 120259086581 } Origin: 72075186233409546 State: 2 TxId: 101 Step: 0 Generation: 2 2025-05-29T15:30:39.993083Z node 28 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:1005: NTableState::TProposedWaitParts operationId# 101:0 HandleReply TEvSchemaChanged at tablet: 72057594046678944 2025-05-29T15:30:39.993090Z node 28 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:1009: NTableState::TProposedWaitParts operationId# 101:0 HandleReply TEvSchemaChanged at tablet: 72057594046678944 message: Source { RawX1: 307 RawX2: 120259086581 } Origin: 72075186233409546 State: 2 TxId: 101 Step: 0 Generation: 2 2025-05-29T15:30:39.993104Z node 28 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:655: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 101:0, shardIdx: 72057594046678944:1, datashard: 72075186233409546, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2025-05-29T15:30:39.993108Z node 28 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:674: all shard schema changes has been received, operationId: 101:0, at schemeshard: 72057594046678944 2025-05-29T15:30:39.993113Z node 28 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:686: send schema changes ack message, operation: 101:0, datashard: 72075186233409546, at schemeshard: 72057594046678944 2025-05-29T15:30:39.993120Z node 28 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 101:0 129 -> 240 2025-05-29T15:30:39.995531Z node 28 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2025-05-29T15:30:40.000602Z node 28 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2025-05-29T15:30:40.000690Z node 28 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:647: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2025-05-29T15:30:40.000729Z node 28 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:647: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2025-05-29T15:30:40.000776Z node 28 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 101:0, at schemeshard: 72057594046678944 2025-05-29T15:30:40.000789Z node 28 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:482: [72057594046678944] TDone opId# 101:0 ProgressState 2025-05-29T15:30:40.000815Z node 28 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:906: Part operation is done id#101:0 progress is 1/1 2025-05-29T15:30:40.000822Z node 28 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2025-05-29T15:30:40.000828Z node 28 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:906: Part operation is done id#101:0 progress is 1/1 2025-05-29T15:30:40.000831Z node 28 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2025-05-29T15:30:40.000837Z node 28 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1606: TOperation IsReadyToNotify, TxId: 101, ready parts: 1/1, is published: true 2025-05-29T15:30:40.000865Z node 28 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1629: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [28:333:2311] message: TxId: 101 2025-05-29T15:30:40.000876Z node 28 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2025-05-29T15:30:40.000884Z node 28 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:973: Operation and all the parts is done, operation id: 101:0 2025-05-29T15:30:40.000893Z node 28 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5174: RemoveTx for txid 101:0 2025-05-29T15:30:40.000954Z node 28 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2025-05-29T15:30:40.012579Z node 28 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:227: tests -- TTxNotificationSubscriber for txId 101: got EvNotifyTxCompletionResult 2025-05-29T15:30:40.012613Z node 28 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:236: tests -- TTxNotificationSubscriber for txId 101: satisfy waiter [28:334:2312] TestWaitNotification: OK eventTxId 101 2025-05-29T15:30:40.012754Z node 28 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/TTLTableWithpgint8Column_UNIT_NANOSECONDS" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-05-29T15:30:40.012842Z node 28 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/TTLTableWithpgint8Column_UNIT_NANOSECONDS" took 96us result status StatusSuccess 2025-05-29T15:30:40.012996Z node 28 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/TTLTableWithpgint8Column_UNIT_NANOSECONDS" PathDescription { Self { Name: "TTLTableWithpgint8Column_UNIT_NANOSECONDS" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "TTLTableWithpgint8Column_UNIT_NANOSECONDS" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "modified_at" Type: "pgint8" TypeId: 12288 Id: 2 NotNull: false TypeInfo { PgTypeId: 20 } IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 TableSchemaVersion: 1 TTLSettings { Enabled { ColumnName: "modified_at" ExpireAfterSeconds: 3600 ColumnUnit: UNIT_NANOSECONDS Tiers { ApplyAfterSeconds: 3600 Delete { } } } } IsBackup: false IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 1 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> TxUsage::WriteToTopic_Demo_48_Table [FAIL] >> TxUsage::WriteToTopic_Demo_48_Query >> TxUsage::Write_Only_Big_Messages_In_Wide_Transactions_Query [FAIL] >> TxUsage::WriteToTopic_Demo_48_Query [FAIL] |74.0%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/external_sources/hive_metastore/ut/ydb-core-external_sources-hive_metastore-ut |74.0%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/schemeshard/ut_move_reboots/ydb-core-tx-schemeshard-ut_move_reboots ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_index/unittest >> TAsyncIndexTests::SplitIndexWithReboots[PipeResets] [GOOD] Test command err: =========== RUN: Trace =========== Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2140] Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:116:2058] recipient: [1:110:2141] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:116:2058] recipient: [1:110:2141] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:117:2058] recipient: [1:111:2142] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:117:2058] recipient: [1:111:2142] Leader for TabletID 72057594046678944 is [1:126:2151] sender: [1:127:2058] recipient: [1:109:2140] Leader for TabletID 72057594046447617 is [1:130:2154] sender: [1:132:2058] recipient: [1:110:2141] Leader for TabletID 72057594046316545 is [1:133:2155] sender: [1:135:2058] recipient: [1:111:2142] 2025-05-29T15:30:34.998147Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7488: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-05-29T15:30:34.998164Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7516: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:30:34.998168Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7402: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2025-05-29T15:30:34.998172Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7418: OperationsProcessing config: using default configuration 2025-05-29T15:30:34.998182Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-05-29T15:30:34.998185Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-05-29T15:30:34.998190Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7548: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:30:34.998200Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:39: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-05-29T15:30:34.998287Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7619: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-05-29T15:30:34.998339Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-05-29T15:30:35.007320Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7651: Got new config: QueryServiceConfig { AllExternalDataSourcesAreAvailable: true } 2025-05-29T15:30:35.007336Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:30:35.007427Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7619: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# Leader for TabletID 72057594046447617 is [1:130:2154] sender: [1:175:2058] recipient: [1:15:2062] 2025-05-29T15:30:35.009635Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-05-29T15:30:35.009659Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-05-29T15:30:35.009687Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-05-29T15:30:35.012299Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-05-29T15:30:35.012364Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:445: Clear TempDirsState with owners number: 0 2025-05-29T15:30:35.012471Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1348: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-05-29T15:30:35.012637Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:32: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-05-29T15:30:35.013205Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:157: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:30:35.013235Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:84: [RootDataErasureManager] Stop 2025-05-29T15:30:35.013427Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:30:35.013438Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:30:35.013466Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-05-29T15:30:35.013472Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-29T15:30:35.013479Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:202: TTxServerlessStorageBilling.Complete 2025-05-29T15:30:35.013497Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6671: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:214:2058] recipient: [1:212:2212] IGNORE Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:214:2058] recipient: [1:212:2212] Leader for TabletID 72057594037968897 is [1:218:2216] sender: [1:219:2058] recipient: [1:212:2212] 2025-05-29T15:30:35.014783Z node 1 :HIVE INFO: tablet_helpers.cpp:1130: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:126:2151] sender: [1:239:2058] recipient: [1:15:2062] 2025-05-29T15:30:35.029927Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:372: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-05-29T15:30:35.029993Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:30:35.030037Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-05-29T15:30:35.030077Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:130: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-05-29T15:30:35.030087Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:30:35.030585Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:451: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-05-29T15:30:35.030604Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-05-29T15:30:35.030635Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:30:35.030642Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:313: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-05-29T15:30:35.030645Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:367: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-05-29T15:30:35.030649Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 2 -> 3 2025-05-29T15:30:35.031005Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:30:35.031016Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-05-29T15:30:35.031021Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 3 -> 128 2025-05-29T15:30:35.031322Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:30:35.031328Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:30:35.031331Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:30:35.031336Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1650: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-05-29T15:30:35.031752Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1719: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-05-29T15:30:35.032047Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:652: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-05-29T15:30:35.032073Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1751: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 Leader for TabletID 72057594046316545 is [1:133:2155] sender: [1:254:2058] recipient: [1:15:2062] FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-05-29T15:30:35.032215Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:676: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-05-29T15:30:35.032236Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:680: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 133 RawX2: 4294969451 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-05-29T15:30:35.032243Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:30:35.032288Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Ch ... "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: true } Table { Name: "Table" Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "indexed" Type: "Uint32" TypeId: 2 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { MinPartitionsCount: 1 } } TableIndexes { Name: "UserDefinedIndex" LocalPathId: 4 Type: EIndexTypeGlobalAsync State: EIndexStateReady KeyColumnNames: "indexed" SchemaVersion: 1 PathOwnerId: 72057594046678944 DataSize: 0 IndexImplTableDescriptions { PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { SizeToSplit: 2147483648 MinPartitionsCount: 1 } } } } TableSchemaVersion: 1 IsBackup: false IsRestore: false } TablePartitions { EndOfRangeKeyPrefix: "" IsPoint: false IsInclusive: false DatashardId: 72075186233409547 } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 4 PathsLimit: 10000 ShardsInside: 3 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-05-29T15:30:55.921843Z node 22 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Table/UserDefinedIndex/indexImplTable" Options { ReturnPartitioningInfo: true ReturnPartitionConfig: true BackupInfo: false ReturnBoundaries: true ShowPrivateTable: true }, at schemeshard: 72057594046678944 2025-05-29T15:30:55.921873Z node 22 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/Table/UserDefinedIndex/indexImplTable" took 35us result status StatusSuccess 2025-05-29T15:30:55.921965Z node 22 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Table/UserDefinedIndex/indexImplTable" PathDescription { Self { Name: "indexImplTable" PathId: 5 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 1002 CreateStep: 5000003 ParentPathId: 4 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 4 PathSubType: EPathSubTypeAsyncIndexImplTable Version { GeneralVersion: 4 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 2 } ChildrenExist: false } Table { Name: "indexImplTable" Columns { Name: "indexed" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "indexed" KeyColumnNames: "key" KeyColumnIds: 1 KeyColumnIds: 2 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { SizeToSplit: 2147483648 MinPartitionsCount: 1 } } SplitBoundary { KeyPrefix { Tuple { Optional { Uint32: 50 } } Tuple { } } } TableSchemaVersion: 1 IsBackup: false IsRestore: false } TablePartitions { EndOfRangeKeyPrefix: "\002\000\004\000\000\0002\000\000\000\000\000\000\200" IsPoint: false IsInclusive: false DatashardId: 72075186233409548 } TablePartitions { EndOfRangeKeyPrefix: "" IsPoint: false IsInclusive: false DatashardId: 72075186233409549 } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 2 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 4 PathsLimit: 10000 ShardsInside: 3 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 5 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 |74.0%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/datashard/ut_sequence/ydb-core-tx-datashard-ut_sequence |74.0%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/schemeshard/ut_data_erasure_reboots/ydb-core-tx-schemeshard-ut_data_erasure_reboots |74.0%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tablet_flat/benchmark/core_tablet_flat_benchmark |74.0%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/tools/stress_tool/ydb_stress_tool ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_index/unittest >> TAsyncIndexTests::DropTableWithInflightChanges[TabletReboots] [GOOD] Test command err: =========== RUN: Trace =========== Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2140] Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:116:2058] recipient: [1:110:2141] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:116:2058] recipient: [1:110:2141] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:117:2058] recipient: [1:111:2142] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:117:2058] recipient: [1:111:2142] Leader for TabletID 72057594046678944 is [1:126:2151] sender: [1:127:2058] recipient: [1:109:2140] Leader for TabletID 72057594046447617 is [1:130:2154] sender: [1:132:2058] recipient: [1:110:2141] Leader for TabletID 72057594046316545 is [1:133:2155] sender: [1:135:2058] recipient: [1:111:2142] 2025-05-29T15:30:21.710576Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7488: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-05-29T15:30:21.710598Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7516: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:30:21.710603Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7402: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2025-05-29T15:30:21.710607Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7418: OperationsProcessing config: using default configuration 2025-05-29T15:30:21.710621Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-05-29T15:30:21.710624Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-05-29T15:30:21.710630Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7548: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:30:21.710641Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:39: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-05-29T15:30:21.710714Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7619: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-05-29T15:30:21.710805Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-05-29T15:30:21.721263Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7651: Got new config: QueryServiceConfig { AllExternalDataSourcesAreAvailable: true } 2025-05-29T15:30:21.721286Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:30:21.721365Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7619: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# Leader for TabletID 72057594046447617 is [1:130:2154] sender: [1:175:2058] recipient: [1:15:2062] 2025-05-29T15:30:21.723953Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-05-29T15:30:21.723986Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-05-29T15:30:21.724025Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-05-29T15:30:21.726709Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-05-29T15:30:21.726804Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:445: Clear TempDirsState with owners number: 0 2025-05-29T15:30:21.726935Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1348: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-05-29T15:30:21.727130Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:32: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-05-29T15:30:21.727820Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:157: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:30:21.727860Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:84: [RootDataErasureManager] Stop 2025-05-29T15:30:21.728090Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:30:21.728097Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:30:21.728135Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-05-29T15:30:21.728144Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-29T15:30:21.728149Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:202: TTxServerlessStorageBilling.Complete 2025-05-29T15:30:21.728165Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6671: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:214:2058] recipient: [1:212:2212] IGNORE Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:214:2058] recipient: [1:212:2212] Leader for TabletID 72057594037968897 is [1:218:2216] sender: [1:219:2058] recipient: [1:212:2212] 2025-05-29T15:30:21.729393Z node 1 :HIVE INFO: tablet_helpers.cpp:1130: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:126:2151] sender: [1:239:2058] recipient: [1:15:2062] 2025-05-29T15:30:21.745131Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:372: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-05-29T15:30:21.745216Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:30:21.745282Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-05-29T15:30:21.745325Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:130: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-05-29T15:30:21.745336Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:30:21.746241Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:451: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-05-29T15:30:21.746266Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-05-29T15:30:21.746315Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:30:21.746323Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:313: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-05-29T15:30:21.746327Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:367: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-05-29T15:30:21.746332Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 2 -> 3 2025-05-29T15:30:21.746787Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:30:21.746800Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-05-29T15:30:21.746806Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 3 -> 128 2025-05-29T15:30:21.747234Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:30:21.747246Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:30:21.747253Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:30:21.747262Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1650: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-05-29T15:30:21.747983Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1719: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-05-29T15:30:21.748458Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:652: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-05-29T15:30:21.748500Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1751: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 Leader for TabletID 72057594046316545 is [1:133:2155] sender: [1:254:2058] recipient: [1:15:2062] FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-05-29T15:30:21.748723Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:676: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-05-29T15:30:21.748751Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:680: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 133 RawX2: 4294969451 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-05-29T15:30:21.748758Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:30:21.748836Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Ch ... 709551615 PathOwnerId: 72057594046678944, cookie: 1003 2025-05-29T15:30:53.340278Z node 114 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72057594046678944, txId: 1003 2025-05-29T15:30:53.340281Z node 114 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 1003, pathId: [OwnerId: 72057594046678944, LocalPathId: 5], version: 18446744073709551615 2025-05-29T15:30:53.340285Z node 114 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 3 2025-05-29T15:30:53.340297Z node 114 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1606: TOperation IsReadyToNotify, TxId: 1003, ready parts: 2/3, is published: true 2025-05-29T15:30:53.340455Z node 114 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1003:0, at schemeshard: 72057594046678944 2025-05-29T15:30:53.340461Z node 114 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_drop_table.cpp:414: TDropTable TProposedDeletePart operationId: 1003:0 ProgressState, at schemeshard: 72057594046678944 2025-05-29T15:30:53.340491Z node 114 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove table for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 4 2025-05-29T15:30:53.340507Z node 114 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:906: Part operation is done id#1003:0 progress is 3/3 2025-05-29T15:30:53.340510Z node 114 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 1003 ready parts: 3/3 2025-05-29T15:30:53.340513Z node 114 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:906: Part operation is done id#1003:0 progress is 3/3 2025-05-29T15:30:53.340515Z node 114 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 1003 ready parts: 3/3 2025-05-29T15:30:53.340518Z node 114 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1606: TOperation IsReadyToNotify, TxId: 1003, ready parts: 3/3, is published: true 2025-05-29T15:30:53.340520Z node 114 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 1003 ready parts: 3/3 2025-05-29T15:30:53.340526Z node 114 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:973: Operation and all the parts is done, operation id: 1003:0 2025-05-29T15:30:53.340529Z node 114 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5174: RemoveTx for txid 1003:0 2025-05-29T15:30:53.340540Z node 114 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 3 2025-05-29T15:30:53.340543Z node 114 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:973: Operation and all the parts is done, operation id: 1003:1 2025-05-29T15:30:53.340546Z node 114 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5174: RemoveTx for txid 1003:1 2025-05-29T15:30:53.340549Z node 114 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 2 2025-05-29T15:30:53.340551Z node 114 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:973: Operation and all the parts is done, operation id: 1003:2 2025-05-29T15:30:53.340553Z node 114 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5174: RemoveTx for txid 1003:2 2025-05-29T15:30:53.340557Z node 114 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 2 2025-05-29T15:30:53.340630Z node 114 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1003 2025-05-29T15:30:53.340961Z node 114 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1003 2025-05-29T15:30:53.341135Z node 114 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1003 2025-05-29T15:30:53.341141Z node 114 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1003 2025-05-29T15:30:53.341475Z node 114 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1003 2025-05-29T15:30:53.341489Z node 114 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1003 2025-05-29T15:30:53.341743Z node 114 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5549: Handle TEvStateChanged, at schemeshard: 72057594046678944, message: Source { RawX1: 346 RawX2: 489626274073 } TabletId: 72075186233409546 State: 4 2025-05-29T15:30:53.341753Z node 114 :FLAT_TX_SCHEMESHARD INFO: schemeshard__state_changed_reply.cpp:78: TTxShardStateChanged DoExecute, datashard informs about state changing, datashardId: 72075186233409546, state: Offline, at schemeshard: 72057594046678944 2025-05-29T15:30:53.341974Z node 114 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:46: Free shard 72057594046678944:2 hive 72057594037968897 at ss 72057594046678944 2025-05-29T15:30:53.342028Z node 114 :HIVE INFO: tablet_helpers.cpp:1356: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 2 TxId_Deprecated: 2 TabletID: 72075186233409546 2025-05-29T15:30:53.342063Z node 114 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5938: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 2 ShardOwnerId: 72057594046678944 ShardLocalIdx: 2, at schemeshard: 72057594046678944 2025-05-29T15:30:53.342097Z node 114 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 1 2025-05-29T15:30:53.342132Z node 114 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:123: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-05-29T15:30:53.342136Z node 114 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:137: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 5], at schemeshard: 72057594046678944 2025-05-29T15:30:53.342142Z node 114 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 1 2025-05-29T15:30:53.342145Z node 114 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:137: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 4], at schemeshard: 72057594046678944 2025-05-29T15:30:53.342148Z node 114 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 2 Forgetting tablet 72075186233409546 2025-05-29T15:30:53.342857Z node 114 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:174: Deleted shardIdx 72057594046678944:2 2025-05-29T15:30:53.342867Z node 114 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:180: Close pipe to deleted shardIdx 72057594046678944:2 tabletId 72075186233409546 2025-05-29T15:30:53.342889Z node 114 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:160: TTxCleanDroppedPaths Complete, done PersistRemovePath for 2 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestModificationResult got TxId: 1003, wait until txId: 1003 TestWaitNotification wait txId: 1003 2025-05-29T15:30:53.342924Z node 114 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:210: tests -- TTxNotificationSubscriber for txId 1003: send EvNotifyTxCompletion 2025-05-29T15:30:53.342928Z node 114 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:256: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 1003 2025-05-29T15:30:53.343004Z node 114 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 1003, at schemeshard: 72057594046678944 2025-05-29T15:30:53.343014Z node 114 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:227: tests -- TTxNotificationSubscriber for txId 1003: got EvNotifyTxCompletionResult 2025-05-29T15:30:53.343018Z node 114 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:236: tests -- TTxNotificationSubscriber for txId 1003: satisfy waiter [114:633:2558] 2025-05-29T15:30:53.343675Z node 114 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5549: Handle TEvStateChanged, at schemeshard: 72057594046678944, message: Source { RawX1: 350 RawX2: 489626274076 } TabletId: 72075186233409547 State: 4 2025-05-29T15:30:53.343686Z node 114 :FLAT_TX_SCHEMESHARD INFO: schemeshard__state_changed_reply.cpp:78: TTxShardStateChanged DoExecute, datashard informs about state changing, datashardId: 72075186233409547, state: Offline, at schemeshard: 72057594046678944 2025-05-29T15:30:53.343907Z node 114 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:46: Free shard 72057594046678944:1 hive 72057594037968897 at ss 72057594046678944 2025-05-29T15:30:53.343951Z node 114 :HIVE INFO: tablet_helpers.cpp:1356: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 1 TxId_Deprecated: 1 TabletID: 72075186233409547 2025-05-29T15:30:53.343978Z node 114 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5938: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 1 ShardOwnerId: 72057594046678944 ShardLocalIdx: 1, at schemeshard: 72057594046678944 2025-05-29T15:30:53.344005Z node 114 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 1 Forgetting tablet 72075186233409547 2025-05-29T15:30:53.344316Z node 114 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:123: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-05-29T15:30:53.344322Z node 114 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:137: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 3], at schemeshard: 72057594046678944 2025-05-29T15:30:53.344328Z node 114 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-05-29T15:30:53.344745Z node 114 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:174: Deleted shardIdx 72057594046678944:1 2025-05-29T15:30:53.344753Z node 114 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:180: Close pipe to deleted shardIdx 72057594046678944:1 tabletId 72075186233409547 2025-05-29T15:30:53.344799Z node 114 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:160: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestWaitNotification: OK eventTxId 1003 wait until 72075186233409546 is deleted wait until 72075186233409547 is deleted 2025-05-29T15:30:53.344836Z node 114 :HIVE INFO: tablet_helpers.cpp:1476: [72057594037968897] TEvSubscribeToTabletDeletion, 72075186233409546 2025-05-29T15:30:53.344843Z node 114 :HIVE INFO: tablet_helpers.cpp:1476: [72057594037968897] TEvSubscribeToTabletDeletion, 72075186233409547 Deleted tabletId 72075186233409546 Deleted tabletId 72075186233409547 |74.0%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/schemeshard/ut_split_merge_reboots/ydb-core-tx-schemeshard-ut_split_merge_reboots >> KqpExplain::ComplexJoin >> TAsyncIndexTests::MergeMainWithReboots[PipeResets] [GOOD] >> TAsyncIndexTests::MergeIndexWithReboots[PipeResets] [GOOD] >> TAsyncIndexTests::SplitMainWithReboots[PipeResets] |74.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/external_sources/hive_metastore/ut/ydb-core-external_sources-hive_metastore-ut |74.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_sequence/ydb-core-tx-datashard-ut_sequence |74.0%| [LD] {RESULT} $(B)/ydb/core/external_sources/hive_metastore/ut/ydb-core-external_sources-hive_metastore-ut |74.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_move_reboots/ydb-core-tx-schemeshard-ut_move_reboots ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/persqueue_v1/ut/unittest >> TPersQueueTest::SrcIdCompatibility [FAIL] Test command err: === Start server === Server->StartServer(false); 2025-05-29T15:30:32.520476Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509890473814925863:2218];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:30:32.520539Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-05-29T15:30:32.540339Z node 2 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7509890474601711095:2221];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:30:32.547513Z node 2 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/001eb8/r3tmp/tmphUiDac/pdisk_1.dat 2025-05-29T15:30:32.595359Z node 1 :PQ_READ_PROXY DEBUG: caching_service.cpp:44: Direct read cache: : Created 2025-05-29T15:30:32.596389Z node 2 :PQ_READ_PROXY DEBUG: caching_service.cpp:44: Direct read cache: : Created 2025-05-29T15:30:32.631405Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:30:32.631439Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:30:32.632101Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:30:32.634646Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 16871, node 1 2025-05-29T15:30:32.658800Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/ciyv/001eb8/r3tmp/yandex8d4U1C.tmp 2025-05-29T15:30:32.658815Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: /home/runner/.ya/build/build_root/ciyv/001eb8/r3tmp/yandex8d4U1C.tmp 2025-05-29T15:30:32.658887Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:202: successfully initialized from file: /home/runner/.ya/build/build_root/ciyv/001eb8/r3tmp/yandex8d4U1C.tmp 2025-05-29T15:30:32.658928Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-05-29T15:30:32.665423Z INFO: TTestServer started on Port 25150 GrpcPort 16871 TClient is connected to server localhost:25150 PQClient connected to localhost:16871 === TenantModeEnabled() = 0 === Init PQ - start server on port 16871 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: 2025-05-29T15:30:32.695426Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:30:32.695454Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:30:32.699161Z node 1 :HIVE WARN: hive_impl.cpp:771: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-05-29T15:30:32.699489Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-29T15:30:32.730466Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:372: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "Root" StoragePools { Name: "/Root:test" Kind: "test" } } } TxId: 281474976720657 TabletId: 72057594046644480 PeerName: "" , at schemeshard: 72057594046644480 2025-05-29T15:30:32.730530Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //Root, opId: 281474976720657:0, at schemeshard: 72057594046644480 2025-05-29T15:30:32.730597Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 0 2025-05-29T15:30:32.730655Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:130: IgniteOperation, opId: 281474976720657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-05-29T15:30:32.730662Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976720657:0, at schemeshard: 72057594046644480 2025-05-29T15:30:32.731565Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:451: TTxOperationPropose Complete, txId: 281474976720657, response: Status: StatusAccepted TxId: 281474976720657 SchemeshardId: 72057594046644480 PathId: 1, at schemeshard: 72057594046644480 2025-05-29T15:30:32.731605Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 281474976720657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2025-05-29T15:30:32.731659Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 281474976720657:0, at schemeshard: 72057594046644480 2025-05-29T15:30:32.731672Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:313: TCreateParts opId# 281474976720657:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046644480 2025-05-29T15:30:32.731674Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:367: TCreateParts opId# 281474976720657:0 ProgressState no shards to create, do next state 2025-05-29T15:30:32.731678Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 281474976720657:0 2 -> 3 2025-05-29T15:30:32.732274Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 281474976720657:0, at schemeshard: 72057594046644480 2025-05-29T15:30:32.732288Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 281474976720657:0 ProgressState, at schemeshard: 72057594046644480 2025-05-29T15:30:32.732293Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 281474976720657:0 3 -> 128 waiting... 2025-05-29T15:30:32.732929Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 281474976720657:0, at schemeshard: 72057594046644480 2025-05-29T15:30:32.732941Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 281474976720657:0, at schemeshard: 72057594046644480 2025-05-29T15:30:32.732945Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 281474976720657:0, at tablet# 72057594046644480 2025-05-29T15:30:32.732951Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1650: TOperation IsReadyToPropose , TxId: 281474976720657 ready parts: 1/1 2025-05-29T15:30:32.733837Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1719: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046644480 Flags: 2 } ExecLevel: 0 TxId: 281474976720657 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-05-29T15:30:32.733926Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__notify.cpp:30: NotifyTxCompletion operation in-flight, txId: 281474976720657, at schemeshard: 72057594046644480 2025-05-29T15:30:32.733929Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1606: TOperation IsReadyToNotify, TxId: 281474976720657, ready parts: 0/1, is published: true 2025-05-29T15:30:32.733934Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__notify.cpp:131: NotifyTxCompletion transaction is registered, txId: 281474976720657, at schemeshard: 72057594046644480 2025-05-29T15:30:32.734537Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:652: Send tablet strongly msg operationId: 281474976720657:4294967295 from tablet: 72057594046644480 to tablet: 72057594046316545 cookie: 0:281474976720657 msg type: 269090816 2025-05-29T15:30:32.734597Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1751: TOperation RegisterRelationByTabletId, TxId: 281474976720657, partId: 4294967295, tablet: 72057594046316545 2025-05-29T15:30:32.735611Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:676: TTxOperationPlanStep Execute, stepId: 1748532632784, transactions count in step: 1, at schemeshard: 72057594046644480 2025-05-29T15:30:32.735671Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:680: TTxOperationPlanStep Execute, message: Transactions { TxId: 281474976720657 AckTo { RawX1: 0 RawX2: 0 } } Step: 1748532632784 MediatorID: 72057594046382081 TabletID: 72057594046644480, at schemeshard: 72057594046644480 2025-05-29T15:30:32.735681Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976720657:0, at tablet# 72057594046644480 2025-05-29T15:30:32.735763Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 281474976720657:0 128 -> 240 2025-05-29T15:30:32.735775Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976720657:0, at tablet# 72057594046644480 2025-05-29T15:30:32.735828Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 1 2025-05-29T15:30:32.735844Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:402: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046644480, LocalPathId: 1], at schemeshard: 72057594046644480 2025-05-29T15:30:32.737099Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2025-05-29T15:30:32.737109Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046644480, txId: 281474976720657, path id: [OwnerId: 72057594046644480, LocalPathId: 1] 2025-05-29T15:30:32.737171Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2025-05-29T15:30:32.737175Z node 1 :FLAT_TX_SCHEME ... node 27 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(28, (0,0,0,0)) VolatileState: Disconnected -> Connecting waiting... 2025-05-29T15:30:48.201642Z node 27 :HIVE WARN: hive_impl.cpp:771: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 28 Cookie 28 2025-05-29T15:30:48.201819Z node 27 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(28, (0,0,0,0)) VolatileState: Connecting -> Connected waiting... 2025-05-29T15:30:48.206323Z node 27 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... waiting... waiting... 2025-05-29T15:30:48.381410Z node 28 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [28:7509890540086289870:2313], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:30:48.381427Z node 28 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [28:7509890540086289845:2310], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:30:48.381444Z node 28 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:30:48.382049Z node 27 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [27:7509890539516322304:2339], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:30:48.382064Z node 27 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [27:7509890539516322282:2336], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:30:48.382073Z node 27 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:30:48.382290Z node 27 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976720657:3, at schemeshard: 72057594046644480 2025-05-29T15:30:48.383080Z node 27 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [27:7509890539516322313:2669] txid# 281474976715662, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exists but creating right now (id: [OwnerId: 72057594046644480, LocalPathId: 9], type: EPathTypeResourcePool, state: EPathStateCreate), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-29T15:30:48.385704Z node 27 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [27:7509890539516322312:2340], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976720657 completed, doublechecking } 2025-05-29T15:30:48.385743Z node 28 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [28:7509890540086289874:2314], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976720657 completed, doublechecking } 2025-05-29T15:30:48.386461Z node 27 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-05-29T15:30:48.443845Z node 27 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-05-29T15:30:48.450936Z node 27 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [27:7509890539516322604:2875] txid# 281474976715665, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 9], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-29T15:30:48.454266Z node 27 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [27:7509890539516322628:2358], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:5:17: Error: At function: KiReadTable!
:5:17: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Versions]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-05-29T15:30:48.454331Z node 27 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=27&id=ZTYyZjczMjAtYTY3YzQ4OTQtM2MzOThmNWMtOTAzYTgyYzI=, ActorId: [27:7509890539516322279:2334], ActorState: ExecuteState, TraceId: 01jweanpdx6xkve2gjr84b4j1p, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-05-29T15:30:48.454415Z node 27 :PERSQUEUE_CLUSTER_TRACKER ERROR: cluster_tracker.cpp:167: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 5 column: 17 } message: "At function: KiReadTable!" end_position { row: 5 column: 17 } severity: 1 issues { position { row: 5 column: 17 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Versions]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 5 column: 17 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-05-29T15:30:48.458203Z node 27 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-05-29T15:30:48.466598Z node 28 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [28:7509890540086289943:2195] txid# 281474976720658, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 9], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost", true, true, 1000), ("dc2", "dc2.logbroker.yandex.net", false, true, 1000); 2025-05-29T15:30:48.477387Z node 27 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [27:7509890539516322812:2381], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:30:48.477462Z node 27 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=27&id=YWYyMmQyYzMtOGJkNDI3MWYtOTZhMTViMi02NDQ4NDM4MA==, ActorId: [27:7509890539516322809:2379], ActorState: ExecuteState, TraceId: 01jweanpgn2jev1xgekrzbwg0x, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: 2025-05-29T15:30:48.490399Z node 28 :PERSQUEUE_CLUSTER_TRACKER ERROR: cluster_tracker.cpp:167: failed to list clusters: { Response { QueryIssues { message: "Execution" issue_code: 1060 severity: 2 issues { position { row: 3 column: 120 } message: "Cost Based Optimizer could not be applied to this query: couldn\'t load statistics" end_position { row: 3 column: 120 } issue_code: 8001 severity: 2 } } TxMeta { } YdbResults { columns { name: "C.name" type { optional_type { item { type_id: UTF8 } } } } columns { name: "C.balancer" type { optional_type { item { type_id: UTF8 } } } } columns { name: "C.local" type { optional_type { item { type_id: BOOL } } } } columns { name: "C.enabled" type { optional_type { item { type_id: BOOL } } } } columns { name: "C.weight" type { optional_type { item { type_id: UINT64 } } } } columns { name: "V.version" type { optional_type { item { type_id: INT64 } } } } } QueryDiagnostics: "" } YdbStatus: SUCCESS ConsumedRu: 14 } assertion failed at ydb/core/testlib/test_pq_client.h:537, TMaybe NKikimr::NPersQueueTests::TFlatMsgBusPQClient::RunYqlDataQueryWithParams(TString, const NYdb::TParams &): (result.IsSuccess())
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 TBackTrace::Capture()+28 (0x13B9173C) NUnitTest::NPrivate::RaiseError(char const*, TBasicString> const&, bool)+137 (0x13D44119) NKikimr::NPersQueueTests::TFlatMsgBusPQClient::RunYqlDataQueryWithParams(TBasicString>, NYdb::Dev::TParams const&)+932 (0x139C81C4) NKikimr::NPersQueueTests::TFlatMsgBusPQClient::RunYqlDataQuery(TBasicString>)+88 (0x139F6148) NKikimr::NPersQueueTests::TFlatMsgBusPQClient::InitDCs(THashMap>, NKikimr::NPersQueueTests::TPQTestClusterInfo, THash>>, TEqualTo>>, std::__y1::allocator>>>, TBasicString> const&)+1170 (0x13995592) NKikimr::NPersQueueTests::TFlatMsgBusPQClient::FullInit(THashMap>, NKikimr::NPersQueueTests::TPQTestClusterInfo, THash>>, TEqualTo>>, std::__y1::allocator>>>, TBasicString> const&, TBasicString> const&)+327 (0x139C2D47) NPersQueue::TTestServer::StartServer(bool, TMaybe>, NMaybe::TPolicyUndefinedExcept>)+888 (0x138D5468) NPersQueue::TTestServer::TTestServer(NKikimr::Tests::TServerSettings const&, bool, TVector> const&, NActors::NLog::EPriority, TMaybe, TDelete>, NMaybe::TPolicyUndefinedExcept>)+1563 (0x138D474B) NPersQueue::TTestServer::TTestServer(bool)+134 (0x138D0986) NKikimr::NPersQueueTests::NTestSuiteTPersQueueTest::TTestCaseSrcIdCompatibility::Execute_(NUnitTest::TTestContext&)+38 (0x13996AC6) NKikimr::NPersQueueTests::NTestSuiteTPersQueueTest::TCurrentTest::Execute()::'lambda'()::operator()() const+71 (0x139BF897) NUnitTest::TTestBase::Run(std::__y1::function, TBasicString> const&, char const*, bool)+126 (0x13D45FCE) NKikimr::NPersQueueTests::NTestSuiteTPersQueueTest::TCurrentTest::Execute()+436 (0x139BF1B4) NUnitTest::TTestFactory::Execute()+803 (0x13D46743) NUnitTest::RunMain(int, char**)+3021 (0x13D582DD) ??+0 (0x7F19673A0D90) __libc_start_main+128 (0x7F19673A0E40) _start+41 (0x129C9029) |74.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_data_erasure_reboots/ydb-core-tx-schemeshard-ut_data_erasure_reboots |74.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tablet_flat/benchmark/core_tablet_flat_benchmark |74.1%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_move_reboots/ydb-core-tx-schemeshard-ut_move_reboots |74.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tools/stress_tool/ydb_stress_tool |74.1%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_data_erasure_reboots/ydb-core-tx-schemeshard-ut_data_erasure_reboots |74.1%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_sequence/ydb-core-tx-datashard-ut_sequence |74.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_split_merge_reboots/ydb-core-tx-schemeshard-ut_split_merge_reboots |74.1%| [LD] {RESULT} $(B)/ydb/core/tablet_flat/benchmark/core_tablet_flat_benchmark ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/persqueue_v1/ut/unittest >> TPQCompatTest::ReadWriteSessions [FAIL] Test command err: 2025-05-29T15:30:32.509457Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509890470776400559:2212];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:30:32.510042Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-05-29T15:30:32.534764Z node 2 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7509890473591050568:2221];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:30:32.578193Z node 1 :PQ_READ_PROXY DEBUG: caching_service.cpp:44: Direct read cache: : Created test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/001eb3/r3tmp/tmpDkjyiv/pdisk_1.dat 2025-05-29T15:30:32.587230Z node 2 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-05-29T15:30:32.587340Z node 2 :PQ_READ_PROXY DEBUG: caching_service.cpp:44: Direct read cache: : Created 2025-05-29T15:30:32.623183Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:30:32.623213Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:30:32.628072Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:30:32.635381Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 22513, node 1 2025-05-29T15:30:32.650338Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/ciyv/001eb3/r3tmp/yandexL3dFPr.tmp 2025-05-29T15:30:32.650350Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: /home/runner/.ya/build/build_root/ciyv/001eb3/r3tmp/yandexL3dFPr.tmp 2025-05-29T15:30:32.650423Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:202: successfully initialized from file: /home/runner/.ya/build/build_root/ciyv/001eb3/r3tmp/yandexL3dFPr.tmp 2025-05-29T15:30:32.650457Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-05-29T15:30:32.657326Z INFO: TTestServer started on Port 26930 GrpcPort 22513 TClient is connected to server localhost:26930 PQClient connected to localhost:22513 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: 2025-05-29T15:30:32.685864Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:30:32.685899Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:30:32.687260Z node 1 :HIVE WARN: hive_impl.cpp:771: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-29T15:30:32.687544Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-29T15:30:32.688532Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-05-29T15:30:32.709622Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... waiting... waiting... 2025-05-29T15:30:32.948482Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890470776401444:2339], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:30:32.948505Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890470776401427:2336], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:30:32.948552Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:30:32.949381Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890470776401479:2342], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:30:32.949403Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715662:3, at schemeshard: 72057594046644480 2025-05-29T15:30:32.949415Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:30:32.954946Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7509890470776401454:2340], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715662 completed, doublechecking } 2025-05-29T15:30:32.984752Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-05-29T15:30:32.985089Z node 2 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [2:7509890473591050797:2315], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-05-29T15:30:32.985200Z node 2 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=2&id=MjlkMjYwMjMtMjY0ZDAzNjgtZWI5MGI5NmYtYWE3ZDVkNTE=, ActorId: [2:7509890473591050757:2309], ActorState: ExecuteState, TraceId: 01jwean7btagezk8sebjvmxsyc, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-05-29T15:30:32.985771Z node 2 :PERSQUEUE_CLUSTER_TRACKER ERROR: cluster_tracker.cpp:167: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-05-29T15:30:33.021284Z node 1 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [1:7509890475071368933:2789] txid# 281474976715664, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 9], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-29T15:30:33.026022Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [1:7509890475071368952:2354], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-05-29T15:30:33.026117Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=1&id=NTE2NzgzODctOTRjMzg0MDUtZjljYTZjNTUtODA0OTc5ZDM=, ActorId: [1:7509890470776401422:2334], ActorState: ExecuteState, TraceId: 01jwean7bkaa5jmeyp3bw6eb5v, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-05-29T15:30:33.026289Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: cluster_tracker.cpp:167: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-05-29T15:30:33.044063Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-05-29T15:30:33.065241Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost", true, true, 1000), ("dc2", "dc2.logbroker.yandex.net", false, true, 1000); 2025-05-29T15:30:33.096609Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [1:7509890475071369241:2381], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:30:33.096726Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=1&id=MzFjNWFmYTEtMzE4MGYwMGUt ... ... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-29T15:30:48.590667Z node 27 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(27, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:30:48.590691Z node 27 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(27, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:30:48.592349Z node 27 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(27, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-29T15:30:48.593460Z node 27 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(28, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:30:48.593477Z node 27 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(28, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:30:48.593899Z node 27 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-05-29T15:30:48.594387Z node 27 :HIVE WARN: hive_impl.cpp:771: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 28 Cookie 28 waiting... 2025-05-29T15:30:48.594580Z node 27 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(28, (0,0,0,0)) VolatileState: Connecting -> Connected waiting... 2025-05-29T15:30:48.605799Z node 27 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... waiting... waiting... 2025-05-29T15:30:48.769287Z node 28 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [28:7509890541490807810:2313], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:30:48.769300Z node 28 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [28:7509890541490807785:2310], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:30:48.769307Z node 28 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:30:48.770117Z node 27 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976720657:3, at schemeshard: 72057594046644480 2025-05-29T15:30:48.773117Z node 28 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [28:7509890541490807814:2314], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976720657 completed, doublechecking } 2025-05-29T15:30:48.792761Z node 27 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [27:7509890539887062939:2341], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-05-29T15:30:48.793065Z node 27 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=27&id=ODFkODNhYjMtYTljZmYyYy1mZGNlNDkwYi0zNDViYWY3MQ==, ActorId: [27:7509890539887062913:2334], ActorState: ExecuteState, TraceId: 01jweanptn9qvmh32g4r54h2vx, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-05-29T15:30:48.793166Z node 27 :PERSQUEUE_CLUSTER_TRACKER ERROR: cluster_tracker.cpp:167: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-05-29T15:30:48.793737Z node 27 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-05-29T15:30:48.850398Z node 28 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [28:7509890541490807842:2166] txid# 281474976720658, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 9], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-29T15:30:48.851594Z node 27 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-05-29T15:30:48.853737Z node 28 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [28:7509890541490807856:2318], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:5:17: Error: At function: KiReadTable!
:5:17: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Versions]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-05-29T15:30:48.853790Z node 28 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=28&id=NmExZTZkNGEtM2RiYzc3NzctOTcyYWFhYzQtZjkwNWQ2YjM=, ActorId: [28:7509890541490807783:2309], ActorState: ExecuteState, TraceId: 01jweanpt12e2f8srdq38m6cmg, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-05-29T15:30:48.853890Z node 28 :PERSQUEUE_CLUSTER_TRACKER ERROR: cluster_tracker.cpp:167: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 5 column: 17 } message: "At function: KiReadTable!" end_position { row: 5 column: 17 } severity: 1 issues { position { row: 5 column: 17 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Versions]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 5 column: 17 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-05-29T15:30:48.915858Z node 27 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost", true, true, 1000), ("dc2", "dc2.logbroker.yandex.net", false, true, 1000); 2025-05-29T15:30:48.936252Z node 27 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [27:7509890539887063336:2376], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:30:48.936341Z node 27 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=27&id=ZmUxMzlkYzgtNTlhZDYwNWItYjI5NGQ3MTItOWZmMGM0ZmU=, ActorId: [27:7509890539887063333:2374], ActorState: ExecuteState, TraceId: 01jweanpyzfg7vgaxa5b4fdgpy, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: assertion failed at ydb/core/testlib/test_pq_client.h:537, TMaybe NKikimr::NPersQueueTests::TFlatMsgBusPQClient::RunYqlDataQueryWithParams(TString, const NYdb::TParams &): (result.IsSuccess())
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 TBackTrace::Capture()+28 (0x13B9173C) NUnitTest::NPrivate::RaiseError(char const*, TBasicString> const&, bool)+137 (0x13D44119) NKikimr::NPersQueueTests::TFlatMsgBusPQClient::RunYqlDataQueryWithParams(TBasicString>, NYdb::Dev::TParams const&)+932 (0x139C81C4) NKikimr::NPersQueueTests::TFlatMsgBusPQClient::RunYqlDataQuery(TBasicString>)+88 (0x139F6148) NKikimr::NPersQueueTests::TFlatMsgBusPQClient::InitDCs(THashMap>, NKikimr::NPersQueueTests::TPQTestClusterInfo, THash>>, TEqualTo>>, std::__y1::allocator>>>, TBasicString> const&)+1170 (0x13995592) NKikimr::NPersQueueTests::TFlatMsgBusPQClient::FullInit(THashMap>, NKikimr::NPersQueueTests::TPQTestClusterInfo, THash>>, TEqualTo>>, std::__y1::allocator>>>, TBasicString> const&, TBasicString> const&)+327 (0x139C2D47) NPersQueue::TTestServer::StartServer(bool, TMaybe>, NMaybe::TPolicyUndefinedExcept>)+888 (0x138D5468) NKikimr::NPersQueueTests::TPQv1CompatTestBase::TPQv1CompatTestBase()+385 (0x13A1EF71) NKikimr::NPersQueueTests::NTestSuiteTPQCompatTest::TTestCaseReadWriteSessions::Execute_(NUnitTest::TTestContext&)+32 (0x13A295A0) NKikimr::NPersQueueTests::NTestSuiteTPQCompatTest::TCurrentTest::Execute()::'lambda'()::operator()() const+71 (0x13A31567) NUnitTest::TTestBase::Run(std::__y1::function, TBasicString> const&, char const*, bool)+126 (0x13D45FCE) NKikimr::NPersQueueTests::NTestSuiteTPQCompatTest::TCurrentTest::Execute()+436 (0x13A30F24) NUnitTest::TTestFactory::Execute()+803 (0x13D46743) NUnitTest::RunMain(int, char**)+3021 (0x13D582DD) ??+0 (0x7FC5214A6D90) __libc_start_main+128 (0x7FC5214A6E40) _start+41 (0x129C9029) |74.1%| [LD] {RESULT} $(B)/ydb/tools/stress_tool/ydb_stress_tool |74.1%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_split_merge_reboots/ydb-core-tx-schemeshard-ut_split_merge_reboots |74.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_index/unittest |74.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_index/unittest |74.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_index/unittest |74.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_index/unittest |74.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_index/unittest ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/statistics/service/ut/unittest >> HttpRequest::Analyze [GOOD] Test command err: 2025-05-29T15:30:30.688646Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:453:2413], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-05-29T15:30:30.688701Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-05-29T15:30:30.688728Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/00152a/r3tmp/tmpdX7Hu2/pdisk_1.dat 2025-05-29T15:30:30.811750Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 18838, node 1 2025-05-29T15:30:30.916627Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:30:30.916645Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-05-29T15:30:30.916649Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-05-29T15:30:30.916692Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-05-29T15:30:30.917166Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-05-29T15:30:30.993026Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:30:30.993065Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:30:31.004841Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:61924 2025-05-29T15:30:31.346480Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-05-29T15:30:32.214895Z node 2 :STATISTICS INFO: service_impl.cpp:232: Subscribed for config changes on node 2 2025-05-29T15:30:32.221981Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:30:32.222012Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:30:32.276312Z node 1 :HIVE WARN: hive_impl.cpp:771: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-05-29T15:30:32.276963Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-29T15:30:32.427947Z node 2 :HIVE WARN: tx__create_tablet.cpp:342: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-05-29T15:30:32.428132Z node 2 :HIVE WARN: tx__create_tablet.cpp:342: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-05-29T15:30:32.428297Z node 2 :HIVE WARN: tx__create_tablet.cpp:342: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-05-29T15:30:32.428330Z node 2 :HIVE WARN: tx__create_tablet.cpp:342: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-05-29T15:30:32.428346Z node 2 :HIVE WARN: tx__create_tablet.cpp:342: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-05-29T15:30:32.428392Z node 2 :HIVE WARN: tx__create_tablet.cpp:342: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-05-29T15:30:32.428412Z node 2 :HIVE WARN: tx__create_tablet.cpp:342: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-05-29T15:30:32.428431Z node 2 :HIVE WARN: tx__create_tablet.cpp:342: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-05-29T15:30:32.428469Z node 2 :HIVE WARN: tx__create_tablet.cpp:342: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-05-29T15:30:32.593632Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:30:32.593682Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:30:32.605998Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-29T15:30:32.658542Z node 2 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:30:32.666656Z node 2 :STATISTICS INFO: aggregator_impl.cpp:45: [72075186224037894] OnActivateExecutor 2025-05-29T15:30:32.666688Z node 2 :STATISTICS DEBUG: tx_init_schema.cpp:13: [72075186224037894] TTxInitSchema::Execute 2025-05-29T15:30:32.674642Z node 2 :STATISTICS DEBUG: tx_init_schema.cpp:34: [72075186224037894] TTxInitSchema::Complete 2025-05-29T15:30:32.674937Z node 2 :STATISTICS DEBUG: tx_init.cpp:18: [72075186224037894] TTxInit::Execute 2025-05-29T15:30:32.674964Z node 2 :STATISTICS DEBUG: tx_init.cpp:118: [72075186224037894] Loaded BaseStatistics: schemeshard count# 0 2025-05-29T15:30:32.674970Z node 2 :STATISTICS DEBUG: tx_init.cpp:143: [72075186224037894] Loaded ColumnStatistics: column count# 0 2025-05-29T15:30:32.674977Z node 2 :STATISTICS DEBUG: tx_init.cpp:182: [72075186224037894] Loaded ScheduleTraversals: table count# 0 2025-05-29T15:30:32.674984Z node 2 :STATISTICS DEBUG: tx_init.cpp:216: [72075186224037894] Loaded ForceTraversalOperations: table count# 0 2025-05-29T15:30:32.674990Z node 2 :STATISTICS DEBUG: tx_init.cpp:264: [72075186224037894] Loaded ForceTraversalTables: table count# 0 2025-05-29T15:30:32.674997Z node 2 :STATISTICS DEBUG: tx_init.cpp:271: [72075186224037894] TTxInit::Complete 2025-05-29T15:30:32.675186Z node 2 :STATISTICS INFO: aggregator_impl.cpp:62: [72075186224037894] Subscribed for config changes 2025-05-29T15:30:32.696512Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:7864: ResolveSA(), StatisticsAggregatorId=72075186224037894, at schemeshard: 72075186224037897 2025-05-29T15:30:32.696554Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:7894: ConnectToSA(), pipe client id: [2:1863:2597], at schemeshard: 72075186224037897, StatisticsAggregatorId: 72075186224037894, at schemeshard: 72075186224037897 2025-05-29T15:30:32.698199Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:86: [72075186224037894] EvServerConnected, pipe server id = [2:1874:2605] 2025-05-29T15:30:32.699654Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:86: [72075186224037894] EvServerConnected, pipe server id = [2:1905:2622] 2025-05-29T15:30:32.699909Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:213: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:1905:2622], schemeshard id = 72075186224037897 2025-05-29T15:30:32.700550Z node 2 :STATISTICS DEBUG: tx_configure.cpp:21: [72075186224037894] TTxConfigure::Execute: database# /Root/Database 2025-05-29T15:30:32.704747Z node 2 :STATISTICS DEBUG: table_creator.cpp:147: Table _statistics updater. Describe result: PathErrorUnknown 2025-05-29T15:30:32.704771Z node 2 :STATISTICS NOTICE: table_creator.cpp:167: Table _statistics updater. Creating table 2025-05-29T15:30:32.704786Z node 2 :STATISTICS DEBUG: table_creator.cpp:100: Table _statistics updater. Full table path:/Root/Database/.metadata/_statistics 2025-05-29T15:30:32.707824Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720657:1, at schemeshard: 72075186224037897 2025-05-29T15:30:32.710312Z node 2 :STATISTICS DEBUG: table_creator.cpp:190: Table _statistics updater. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720657 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037897 PathId: 3 } 2025-05-29T15:30:32.710355Z node 2 :STATISTICS DEBUG: table_creator.cpp:261: Table _statistics updater. Subscribe on create table tx: 281474976720657 2025-05-29T15:30:32.816460Z node 2 :STATISTICS DEBUG: tx_configure.cpp:36: [72075186224037894] TTxConfigure::Complete 2025-05-29T15:30:32.900422Z node 2 :STATISTICS DEBUG: table_creator.cpp:290: Table _statistics updater. Request: create. Transaction completed: 281474976720657. Doublechecking... 2025-05-29T15:30:32.975056Z node 2 :STATISTICS DEBUG: table_creator.cpp:362: Table _statistics updater. Column diff is empty, finishing 2025-05-29T15:30:33.488325Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2212:3057], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:30:33.488363Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:30:33.492686Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976715659:0, at schemeshard: 72075186224037897 2025-05-29T15:30:33.556362Z node 2 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037899;self_id=[2:2357:2872];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-05-29T15:30:33.556442Z node 2 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037899;self_id=[2:2357:2872];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-05-29T15:30:33.556509Z node 2 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037899;self_id=[2:2357:2872];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-05-29T15:30:33.556542Z node 2 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037899;self_id=[2:2357:2872];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-05-29T15:30:33.556567Z node 2 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037899;self_id=[2:2357:2872];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-05-29T15:30:33.556596Z node 2 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037899;self_id=[2:2357:2872];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-05-29T15:30:33.556619Z node 2 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037899;self_id=[2:2357:2872];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-05-29T15:30:33.556643Z node 2 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037899;self_id=[2:2357:2872];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_re ... _TX WARN: log.cpp:784: tablet_id=72075186224037899;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715660;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715660; 2025-05-29T15:30:35.115365Z node 2 :TX_COLUMNSHARD_TX WARN: log.cpp:784: tablet_id=72075186224037902;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715660;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715660; 2025-05-29T15:30:35.115439Z node 2 :TX_COLUMNSHARD_TX WARN: log.cpp:784: tablet_id=72075186224037903;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715660;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715660; 2025-05-29T15:30:35.115508Z node 2 :TX_COLUMNSHARD_TX WARN: log.cpp:784: tablet_id=72075186224037904;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715660;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715660; 2025-05-29T15:30:35.115579Z node 2 :TX_COLUMNSHARD_TX WARN: log.cpp:784: tablet_id=72075186224037905;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715660;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715660; 2025-05-29T15:30:35.115995Z node 2 :TX_COLUMNSHARD_TX WARN: log.cpp:784: tablet_id=72075186224037907;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715660;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715660; 2025-05-29T15:30:35.116075Z node 2 :TX_COLUMNSHARD_TX WARN: log.cpp:784: tablet_id=72075186224037908;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715660;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715660; 2025-05-29T15:30:35.116143Z node 2 :TX_COLUMNSHARD_TX WARN: log.cpp:784: tablet_id=72075186224037906;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715660;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715660; 2025-05-29T15:30:35.950972Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:3773:3212], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:30:35.951025Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:30:35.959998Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976715661:0, at schemeshard: 72075186224037897 2025-05-29T15:30:36.044476Z node 2 :TX_COLUMNSHARD_TX WARN: log.cpp:784: tablet_id=72075186224037899;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715661;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715661; 2025-05-29T15:30:36.044574Z node 2 :TX_COLUMNSHARD_TX WARN: log.cpp:784: tablet_id=72075186224037900;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715661;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715661; 2025-05-29T15:30:36.044637Z node 2 :TX_COLUMNSHARD_TX WARN: log.cpp:784: tablet_id=72075186224037902;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715661;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715661; 2025-05-29T15:30:36.044703Z node 2 :TX_COLUMNSHARD_TX WARN: log.cpp:784: tablet_id=72075186224037903;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715661;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715661; 2025-05-29T15:30:36.044766Z node 2 :TX_COLUMNSHARD_TX WARN: log.cpp:784: tablet_id=72075186224037901;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715661;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715661; 2025-05-29T15:30:36.045162Z node 2 :TX_COLUMNSHARD_TX WARN: log.cpp:784: tablet_id=72075186224037904;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715661;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715661; 2025-05-29T15:30:36.045228Z node 2 :TX_COLUMNSHARD_TX WARN: log.cpp:784: tablet_id=72075186224037905;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715661;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715661; 2025-05-29T15:30:36.045286Z node 2 :TX_COLUMNSHARD_TX WARN: log.cpp:784: tablet_id=72075186224037907;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715661;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715661; 2025-05-29T15:30:36.045344Z node 2 :TX_COLUMNSHARD_TX WARN: log.cpp:784: tablet_id=72075186224037906;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715661;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715661; 2025-05-29T15:30:36.051045Z node 2 :TX_COLUMNSHARD_TX WARN: log.cpp:784: tablet_id=72075186224037908;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715661;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715661; 2025-05-29T15:30:36.744650Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:3914:3259], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:30:36.744715Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:30:36.749819Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976715662:0, at schemeshard: 72075186224037897 2025-05-29T15:30:36.787168Z node 2 :TX_COLUMNSHARD_TX WARN: log.cpp:784: tablet_id=72075186224037900;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715662; 2025-05-29T15:30:36.787324Z node 2 :TX_COLUMNSHARD_TX WARN: log.cpp:784: tablet_id=72075186224037901;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715662; 2025-05-29T15:30:36.787436Z node 2 :TX_COLUMNSHARD_TX WARN: log.cpp:784: tablet_id=72075186224037899;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715662; 2025-05-29T15:30:36.787495Z node 2 :TX_COLUMNSHARD_TX WARN: log.cpp:784: tablet_id=72075186224037902;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715662; 2025-05-29T15:30:36.787579Z node 2 :TX_COLUMNSHARD_TX WARN: log.cpp:784: tablet_id=72075186224037903;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715662; 2025-05-29T15:30:36.787888Z node 2 :TX_COLUMNSHARD_TX WARN: log.cpp:784: tablet_id=72075186224037904;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715662; 2025-05-29T15:30:36.787952Z node 2 :TX_COLUMNSHARD_TX WARN: log.cpp:784: tablet_id=72075186224037905;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715662; 2025-05-29T15:30:36.788028Z node 2 :TX_COLUMNSHARD_TX WARN: log.cpp:784: tablet_id=72075186224037907;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715662; 2025-05-29T15:30:36.788085Z node 2 :TX_COLUMNSHARD_TX WARN: log.cpp:784: tablet_id=72075186224037906;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715662; 2025-05-29T15:30:36.788361Z node 2 :TX_COLUMNSHARD_TX WARN: log.cpp:784: tablet_id=72075186224037908;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715662; waiting actualization: 0/0.000018s 2025-05-29T15:30:42.409132Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:86: [72075186224037894] EvServerConnected, pipe server id = [2:5828:5406] 2025-05-29T15:30:42.410024Z node 2 :STATISTICS DEBUG: tx_analyze.cpp:22: [72075186224037894] TTxAnalyze::Execute. ReplyToActorId [1:5825:3867] , Record { OperationId: "\000\000\000\000\034-k\370\322\025\220\344\206,\324\322" Tables { PathId { OwnerId: 72075186224037897 LocalId: 4 } } } 2025-05-29T15:30:42.410042Z node 2 :STATISTICS DEBUG: tx_analyze.cpp:47: [72075186224037894] TTxAnalyze::Execute. Create new force traversal operation, OperationId= -k, 2025-05-29T15:30:42.410051Z node 2 :STATISTICS DEBUG: tx_analyze.cpp:65: [72075186224037894] TTxAnalyze::Execute. Create new force traversal table, OperationId= -k, , PathId [OwnerId: 72075186224037897, LocalPathId: 4] Answer: 'Analyze sent. OperationId: 000000071ddfwd45cgwj32sn6j' FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=abstract.cpp:105;event=AbortEmergency;reason=TTxWriteIndex destructor withno CompleteReady flag;prev_reason=; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=manager.cpp:51;message=aborted data locks manager; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=abstract.cpp:105;event=AbortEmergency;reason=TTxWriteIndex destructor withno CompleteReady flag;prev_reason=; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=manager.cpp:51;message=aborted data locks manager; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=abstract.cpp:105;event=AbortEmergency;reason=TTxWriteIndex destructor withno CompleteReady flag;prev_reason=; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=manager.cpp:51;message=aborted data locks manager; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=abstract.cpp:105;event=AbortEmergency;reason=TTxWriteIndex destructor withno CompleteReady flag;prev_reason=; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=manager.cpp:51;message=aborted data locks manager; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=abstract.cpp:105;event=AbortEmergency;reason=TTxWriteIndex destructor withno CompleteReady flag;prev_reason=; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=manager.cpp:51;message=aborted data locks manager; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=abstract.cpp:105;event=AbortEmergency;reason=TTxWriteIndex destructor withno CompleteReady flag;prev_reason=; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=manager.cpp:51;message=aborted data locks manager; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=abstract.cpp:105;event=AbortEmergency;reason=TTxWriteIndex destructor withno CompleteReady flag;prev_reason=; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=manager.cpp:51;message=aborted data locks manager; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=abstract.cpp:105;event=AbortEmergency;reason=TTxWriteIndex destructor withno CompleteReady flag;prev_reason=; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=manager.cpp:51;message=aborted data locks manager; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=abstract.cpp:105;event=AbortEmergency;reason=TTxWriteIndex destructor withno CompleteReady flag;prev_reason=; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=manager.cpp:51;message=aborted data locks manager; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=abstract.cpp:105;event=AbortEmergency;reason=TTxWriteIndex destructor withno CompleteReady flag;prev_reason=; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=manager.cpp:51;message=aborted data locks manager; ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/cms/console/ut/unittest >> TConsoleConfigTests::TestAffectedConfigs [GOOD] Test command err: 2025-05-29T15:24:15.354888Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7488: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-05-29T15:24:15.354917Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7516: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:24:15.354923Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7402: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-05-29T15:24:15.354928Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7418: OperationsProcessing config: using default configuration 2025-05-29T15:24:15.354942Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-05-29T15:24:15.354947Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-05-29T15:24:15.354962Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7548: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:24:15.354986Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:39: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-05-29T15:24:15.355125Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7619: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-05-29T15:24:15.355195Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-05-29T15:24:15.359559Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7309: Cannot subscribe to console configs 2025-05-29T15:24:15.359583Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:24:15.362269Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-05-29T15:24:15.362317Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-05-29T15:24:15.362334Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046578944 2025-05-29T15:24:15.363832Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-05-29T15:24:15.363951Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:445: Clear TempDirsState with owners number: 0 2025-05-29T15:24:15.364056Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1348: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046578944 2025-05-29T15:24:15.364152Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:32: TTxInitRoot DoExecute, path: dc-1, pathId: [OwnerId: 72057594046578944, LocalPathId: 1], at schemeshard: 72057594046578944 2025-05-29T15:24:15.364752Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:157: TTxInitRoot DoComplete, at schemeshard: 72057594046578944 2025-05-29T15:24:15.364792Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:84: [RootDataErasureManager] Stop 2025-05-29T15:24:15.365043Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046578944 2025-05-29T15:24:15.365055Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046578944 2025-05-29T15:24:15.365084Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-05-29T15:24:15.365095Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046578944, domainId: [OwnerId: 72057594046578944, LocalPathId: 1] 2025-05-29T15:24:15.365102Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:202: TTxServerlessStorageBilling.Complete 2025-05-29T15:24:15.365127Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6671: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046578944 2025-05-29T15:24:15.406198Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:372: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "dc-1" StoragePools { Name: "" Kind: "hdd" } StoragePools { Name: "" Kind: "hdd-3" } StoragePools { Name: "" Kind: "hdd-1" } StoragePools { Name: "" Kind: "hdd-2" } } } TxId: 1 TabletId: 72057594046578944 , at schemeshard: 72057594046578944 2025-05-29T15:24:15.406292Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //dc-1, opId: 1:0, at schemeshard: 72057594046578944 2025-05-29T15:24:15.406359Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046578944, LocalPathId: 1] was 0 2025-05-29T15:24:15.406415Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:130: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046578944 2025-05-29T15:24:15.406427Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944 2025-05-29T15:24:15.407264Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:451: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046578944 PathId: 1, at schemeshard: 72057594046578944 2025-05-29T15:24:15.407300Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //dc-1 2025-05-29T15:24:15.407353Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046578944 2025-05-29T15:24:15.407364Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:313: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046578944 2025-05-29T15:24:15.407370Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:367: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-05-29T15:24:15.407375Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 2 -> 3 2025-05-29T15:24:15.407820Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046578944 2025-05-29T15:24:15.407835Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046578944 2025-05-29T15:24:15.407841Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 3 -> 128 2025-05-29T15:24:15.408262Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046578944 2025-05-29T15:24:15.408278Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046578944 2025-05-29T15:24:15.408296Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046578944 2025-05-29T15:24:15.408304Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1650: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-05-29T15:24:15.408810Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1719: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046578944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-05-29T15:24:15.409139Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:652: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046578944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-05-29T15:24:15.409181Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1751: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 2025-05-29T15:24:15.409337Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__notify.cpp:30: NotifyTxCompletion operation in-flight, txId: 1, at schemeshard: 72057594046578944 2025-05-29T15:24:15.409343Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1606: TOperation IsReadyToNotify, TxId: 1, ready parts: 0/1, is published: true 2025-05-29T15:24:15.409346Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__notify.cpp:131: NotifyTxCompletion transaction is registered, txId: 1, at schemeshard: 72057594046578944 2025-05-29T15:24:15.645862Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:676: TTxOperationPlanStep Execute, stepId: 500, transactions count in step: 1, at schemeshard: 72057594046578944 2025-05-29T15:24:15.645933Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:680: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 AckTo { RawX1: 0 RawX2: 0 } } Step: 500 MediatorID: 72057594046382081 TabletID: 72057594046578944, at schemeshard: 72057594046578944 2025-05-29T15:24:15.645945Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046578944 2025-05-29T15:24:15.646038Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 128 -> 240 2025-05-29T15:24:15.646050Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046578944 2025-05-29T15:24:15.646090Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046578944, LocalPathId: 1] was 1 2025-05-29T15:24:15.646110Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:402: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046578944, LocalPathId: 1], at schemeshard: 72057594046578944 2025-05-29T15:24:15.647049Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046578944 2025-05-29T15:24:15.647067Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046578944, txId: 1, path id: [OwnerId: 72057594046578944, LocalPathId: 1] 2025-05-29T15:24:15.647114Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046578944 2025-05-29T15:24:15.647120Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:261:2249], at schemeshard: 72057594046578944, txId: 1, path id: 1 202 ... __operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046578944 2025-05-29T15:30:52.499325Z node 29 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:482: [72057594046578944] TDone opId# 1:0 ProgressState 2025-05-29T15:30:52.499336Z node 29 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:906: Part operation is done id#1:0 progress is 1/1 2025-05-29T15:30:52.499340Z node 29 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-05-29T15:30:52.499344Z node 29 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:906: Part operation is done id#1:0 progress is 1/1 2025-05-29T15:30:52.499346Z node 29 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-05-29T15:30:52.499350Z node 29 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1606: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-05-29T15:30:52.499354Z node 29 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-05-29T15:30:52.499357Z node 29 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:973: Operation and all the parts is done, operation id: 1:0 2025-05-29T15:30:52.499360Z node 29 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5174: RemoveTx for txid 1:0 2025-05-29T15:30:52.499367Z node 29 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046578944, LocalPathId: 1] was 2 2025-05-29T15:30:52.499372Z node 29 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:982: Publication still in progress, tx: 1, publications: 1, subscribers: 1 2025-05-29T15:30:52.499379Z node 29 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:989: Publication details: tx: 1, [OwnerId: 72057594046578944, LocalPathId: 1], 3 2025-05-29T15:30:52.499469Z node 29 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:5834: Handle TEvUpdateAck, at schemeshard: 72057594046578944, msg: Owner: 72057594046578944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046578944, cookie: 1 2025-05-29T15:30:52.499478Z node 29 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046578944, msg: Owner: 72057594046578944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046578944, cookie: 1 2025-05-29T15:30:52.499481Z node 29 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046578944, txId: 1 2025-05-29T15:30:52.499485Z node 29 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046578944, txId: 1, pathId: [OwnerId: 72057594046578944, LocalPathId: 1], version: 3 2025-05-29T15:30:52.499488Z node 29 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046578944, LocalPathId: 1] was 1 2025-05-29T15:30:52.499500Z node 29 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046578944, txId: 1, subscribers: 1 2025-05-29T15:30:52.499505Z node 29 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:212: TTxAckPublishToSchemeBoard Notify send TEvNotifyTxCompletionResult, at schemeshard: 72057594046578944, to actorId: [29:115:2149] 2025-05-29T15:30:52.500077Z node 29 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046578944, cookie: 1 2025-05-29T15:30:52.500138Z node 29 :TX_PROXY DEBUG: proxy_impl.cpp:434: actor# [29:311:2289] Bootstrap 2025-05-29T15:30:52.501384Z node 29 :TX_PROXY DEBUG: proxy_impl.cpp:453: actor# [29:311:2289] Become StateWork (SchemeCache [29:316:2294]) 2025-05-29T15:30:52.501550Z node 29 :TX_PROXY DEBUG: proxy_impl.cpp:213: actor# [29:311:2289] HANDLE TEvClientConnected success connect from tablet# 72057594046447617 2025-05-29T15:30:52.501927Z node 29 :TX_PROXY DEBUG: client.cpp:89: Handle TEvAllocateResult ACCEPTED RangeBegin# 281474976715656 RangeEnd# 281474976720656 txAllocator# 72057594046447617 2025-05-29T15:30:52.503091Z node 29 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2057} StateInit event Type# 268828672 Event# NKikimr::TEvTablet::TEvBoot 2025-05-29T15:30:52.503607Z node 29 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2057} StateInit event Type# 268828673 Event# NKikimr::TEvTablet::TEvRestored 2025-05-29T15:30:52.503639Z node 29 :BS_CONTROLLER DEBUG: {BSC22@console_interaction.cpp:14} Console interaction started 2025-05-29T15:30:52.503823Z node 29 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2057} StateInit event Type# 268828684 Event# NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-05-29T15:30:52.503879Z node 29 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2057} StateInit event Type# 268639244 Event# NKikimr::TEvNodeWardenStorageConfig 2025-05-29T15:30:52.503929Z node 29 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2057} StateInit event Type# 131082 Event# NActors::TEvInterconnect::TEvNodesInfo 2025-05-29T15:30:52.503935Z node 29 :BS_CONTROLLER DEBUG: {BSC01@bsc.cpp:521} Handle TEvInterconnect::TEvNodesInfo 2025-05-29T15:30:52.503958Z node 29 :BS_CONTROLLER DEBUG: {BSCTXIS01@init_scheme.cpp:17} TTxInitScheme Execute 2025-05-29T15:30:52.505342Z node 29 :BS_CONTROLLER DEBUG: {BSCTXIS03@init_scheme.cpp:44} TTxInitScheme Complete 2025-05-29T15:30:52.505362Z node 29 :BS_CONTROLLER DEBUG: {BSCTXM01@migrate.cpp:190} Execute tx 2025-05-29T15:30:52.505379Z node 29 :BS_CONTROLLER DEBUG: {BSCTXM02@migrate.cpp:251} Complete tx IncompatibleData# false 2025-05-29T15:30:52.505408Z node 29 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxTrimUnusedSlots 2025-05-29T15:30:52.505419Z node 29 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxTrimUnusedSlots 2025-05-29T15:30:52.505439Z node 29 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateSchemaVersion 2025-05-29T15:30:52.526440Z node 29 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateSchemaVersion 2025-05-29T15:30:52.526488Z node 29 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxGenerateInstanceId 2025-05-29T15:30:52.537117Z node 29 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxGenerateInstanceId 2025-05-29T15:30:52.537154Z node 29 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateStaticPDiskInfo 2025-05-29T15:30:52.537164Z node 29 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateStaticPDiskInfo 2025-05-29T15:30:52.537173Z node 29 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxFillInNonNullConfigForPDisk 2025-05-29T15:30:52.537191Z node 29 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxFillInNonNullConfigForPDisk 2025-05-29T15:30:52.537198Z node 29 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxDropDriveStatus 2025-05-29T15:30:52.537203Z node 29 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxDropDriveStatus 2025-05-29T15:30:52.537209Z node 29 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateCompatibilityInfo 2025-05-29T15:30:52.547817Z node 29 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateCompatibilityInfo 2025-05-29T15:30:52.547847Z node 29 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateEnableConfigV2 2025-05-29T15:30:52.558468Z node 29 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateEnableConfigV2 2025-05-29T15:30:52.558501Z node 29 :BS_CONTROLLER DEBUG: {BSCTXLE01@load_everything.cpp:19} TTxLoadEverything Execute 2025-05-29T15:30:52.558616Z node 29 :BS_CONTROLLER DEBUG: {BSCTXLE03@load_everything.cpp:557} TTxLoadEverything Complete 2025-05-29T15:30:52.558621Z node 29 :BS_CONTROLLER DEBUG: {BSC09@impl.h:2188} LoadFinished 2025-05-29T15:30:52.558680Z node 29 :BS_CONTROLLER DEBUG: {BSC18@console_interaction.cpp:31} Console connection service started 2025-05-29T15:30:52.558685Z node 29 :BS_CONTROLLER DEBUG: {BSCTXLE04@load_everything.cpp:562} TTxLoadEverything InitQueue processed 2025-05-29T15:30:52.558830Z node 29 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:398} Execute TEvControllerConfigRequest Request# {Command { DefineHostConfig { HostConfigId: 1 Drive { Path: "/home/runner/.ya/build/build_root/ciyv/000dcb/r3tmp/tmpgjwFEH/pdisk_1.dat" } } } Command { DefineBox { BoxId: 1 Host { Key { Fqdn: "::1" IcPort: 12001 } HostConfigId: 1 } } } } 2025-05-29T15:30:52.558871Z node 29 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:340} Create new pdisk PDiskId# 29:1 Path# /home/runner/.ya/build/build_root/ciyv/000dcb/r3tmp/tmpgjwFEH/pdisk_1.dat 2025-05-29T15:30:52.569708Z node 29 :TENANT_POOL DEBUG: tenant_pool.cpp:826: TTenantPool::Bootstrap 2025-05-29T15:30:52.569769Z node 29 :LOCAL DEBUG: local.cpp:1491: TLocal::Bootstrap 2025-05-29T15:30:52.569779Z node 29 :TENANT_POOL DEBUG: tenant_pool.cpp:412: TDomainTenantPool(dc-1) Bootstrap 2025-05-29T15:30:52.569800Z node 29 :TENANT_POOL DEBUG: tenant_pool.cpp:286: TDomainTenantPool(dc-1) send request to add tenant /dc-1 with resources CPU: 1 Memory: 1 Network: 1 2025-05-29T15:30:52.569817Z node 29 :LOCAL DEBUG: local.cpp:1441: TDomainLocal(dc-1): Bootstrap 2025-05-29T15:30:52.569882Z node 29 :LOCAL DEBUG: local.cpp:1149: TDomainLocal(dc-1): Binding to hive 72057594046578946 at domain dc-1 (allocated resources: CPU: 1 Memory: 1 Network: 1) 2025-05-29T15:30:52.569887Z node 29 :LOCAL DEBUG: local.cpp:975: TLocalNodeRegistrar::Bootstrap 2025-05-29T15:30:52.569890Z node 29 :LOCAL DEBUG: local.cpp:181: TLocalNodeRegistrar::TryToRegister 2025-05-29T15:30:52.569903Z node 29 :LOCAL DEBUG: local.cpp:213: TLocalNodeRegistrar::TryToRegister pipe to hive, pipe:[29:408:2362] 2025-05-29T15:30:52.570209Z node 29 :TENANT_POOL NOTICE: tenant_pool.cpp:526: TDomainTenantPool(dc-1) started tenant /dc-1 2025-05-29T15:30:52.570214Z node 29 :TENANT_POOL DEBUG: tenant_pool.cpp:274: TDomainTenantPool(dc-1) send status update to [29:402:2358] 2025-05-29T15:30:52.570317Z node 29 :LOCAL DEBUG: local.cpp:260: TEvTabletPipe::TEvClientConnected {TabletId=72057594046578946 Status=OK ClientId=[29:408:2362]} 2025-05-29T15:30:52.570323Z node 29 :LOCAL DEBUG: local.cpp:324: TLocalNodeRegistrar::Handle TEvLocal::TEvPing 2025-05-29T15:30:52.570328Z node 29 :LOCAL DEBUG: local.cpp:380: TLocalNodeRegistrar TEvPing - CONNECTED 2025-05-29T15:30:52.570330Z node 29 :LOCAL DEBUG: local.cpp:297: TLocalNodeRegistrar SendStatusOk ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_index/unittest >> TAsyncIndexTests::MergeMainWithReboots[PipeResets] [GOOD] Test command err: =========== RUN: Trace =========== Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2140] Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:116:2058] recipient: [1:110:2141] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:116:2058] recipient: [1:110:2141] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:117:2058] recipient: [1:111:2142] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:117:2058] recipient: [1:111:2142] Leader for TabletID 72057594046678944 is [1:126:2151] sender: [1:127:2058] recipient: [1:109:2140] Leader for TabletID 72057594046447617 is [1:130:2154] sender: [1:132:2058] recipient: [1:110:2141] Leader for TabletID 72057594046316545 is [1:133:2155] sender: [1:135:2058] recipient: [1:111:2142] 2025-05-29T15:30:38.281810Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7488: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-05-29T15:30:38.281839Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7516: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:30:38.281846Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7402: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2025-05-29T15:30:38.281851Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7418: OperationsProcessing config: using default configuration 2025-05-29T15:30:38.281867Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-05-29T15:30:38.281872Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-05-29T15:30:38.281882Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7548: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:30:38.281896Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:39: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-05-29T15:30:38.281987Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7619: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-05-29T15:30:38.282073Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-05-29T15:30:38.294949Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7651: Got new config: QueryServiceConfig { AllExternalDataSourcesAreAvailable: true } 2025-05-29T15:30:38.294969Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:30:38.295043Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7619: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# Leader for TabletID 72057594046447617 is [1:130:2154] sender: [1:175:2058] recipient: [1:15:2062] 2025-05-29T15:30:38.297280Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-05-29T15:30:38.297303Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-05-29T15:30:38.297331Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-05-29T15:30:38.300128Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-05-29T15:30:38.300206Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:445: Clear TempDirsState with owners number: 0 2025-05-29T15:30:38.300337Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1348: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-05-29T15:30:38.300509Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:32: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-05-29T15:30:38.301139Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:157: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:30:38.301180Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:84: [RootDataErasureManager] Stop 2025-05-29T15:30:38.301428Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:30:38.301440Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:30:38.301474Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-05-29T15:30:38.301482Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-29T15:30:38.301488Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:202: TTxServerlessStorageBilling.Complete 2025-05-29T15:30:38.301507Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6671: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:214:2058] recipient: [1:212:2212] IGNORE Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:214:2058] recipient: [1:212:2212] Leader for TabletID 72057594037968897 is [1:218:2216] sender: [1:219:2058] recipient: [1:212:2212] 2025-05-29T15:30:38.302984Z node 1 :HIVE INFO: tablet_helpers.cpp:1130: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:126:2151] sender: [1:239:2058] recipient: [1:15:2062] 2025-05-29T15:30:38.321582Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:372: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-05-29T15:30:38.321679Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:30:38.321755Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-05-29T15:30:38.321799Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:130: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-05-29T15:30:38.321810Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:30:38.322450Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:451: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-05-29T15:30:38.322476Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-05-29T15:30:38.322519Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:30:38.322529Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:313: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-05-29T15:30:38.322534Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:367: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-05-29T15:30:38.322539Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 2 -> 3 2025-05-29T15:30:38.322926Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:30:38.322937Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-05-29T15:30:38.322943Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 3 -> 128 2025-05-29T15:30:38.323588Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:30:38.323604Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:30:38.323610Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:30:38.323618Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1650: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-05-29T15:30:38.324327Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1719: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-05-29T15:30:38.327054Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:652: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-05-29T15:30:38.327104Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1751: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 Leader for TabletID 72057594046316545 is [1:133:2155] sender: [1:254:2058] recipient: [1:15:2062] FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-05-29T15:30:38.327301Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:676: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-05-29T15:30:38.327340Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:680: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 133 RawX2: 4294969451 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-05-29T15:30:38.327349Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:30:38.327428Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Ch ... rceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { MinPartitionsCount: 1 } } TableIndexes { Name: "UserDefinedIndex" LocalPathId: 4 Type: EIndexTypeGlobalAsync State: EIndexStateReady KeyColumnNames: "indexed" SchemaVersion: 1 PathOwnerId: 72057594046678944 DataSize: 0 IndexImplTableDescriptions { PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { SizeToSplit: 2147483648 MinPartitionsCount: 1 } } } } TableSchemaVersion: 1 IsBackup: false IsRestore: false } TablePartitions { EndOfRangeKeyPrefix: "" IsPoint: false IsInclusive: false DatashardId: 72075186233409549 } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 4 PathsLimit: 10000 ShardsInside: 3 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-05-29T15:31:04.290315Z node 30 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Table/UserDefinedIndex/indexImplTable" Options { ReturnPartitioningInfo: true ReturnPartitionConfig: true BackupInfo: false ReturnBoundaries: true ShowPrivateTable: true }, at schemeshard: 72057594046678944 2025-05-29T15:31:04.290344Z node 30 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/Table/UserDefinedIndex/indexImplTable" took 45us result status StatusSuccess 2025-05-29T15:31:04.290424Z node 30 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Table/UserDefinedIndex/indexImplTable" PathDescription { Self { Name: "indexImplTable" PathId: 5 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 1002 CreateStep: 5000003 ParentPathId: 4 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeAsyncIndexImplTable Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "indexImplTable" Columns { Name: "indexed" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "indexed" KeyColumnNames: "key" KeyColumnIds: 1 KeyColumnIds: 2 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { SizeToSplit: 2147483648 MinPartitionsCount: 1 } } TableSchemaVersion: 1 IsBackup: false IsRestore: false } TablePartitions { EndOfRangeKeyPrefix: "" IsPoint: false IsInclusive: false DatashardId: 72075186233409546 } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 4 PathsLimit: 10000 ShardsInside: 3 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 5 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-05-29T15:31:04.300632Z node 30 :CHANGE_EXCHANGE DEBUG: change_sender_table_base.cpp:78: [TableChangeSenderShard][72075186233409549:2][72075186233409546][30:835:2678] Handshake NKikimrChangeExchange.TEvStatus Status: STATUS_OK LastRecordOrder: 0 2025-05-29T15:31:04.300657Z node 30 :CHANGE_EXCHANGE DEBUG: change_sender_async_index.cpp:239: [AsyncIndexChangeSenderMain][72075186233409549:2][30:788:2678] Handle NKikimr::NChangeExchange::TEvChangeExchangePrivate::TEvReady { PartitionId: 72075186233409546 } 2025-05-29T15:31:04.300694Z node 30 :CHANGE_EXCHANGE DEBUG: change_sender_table_base.cpp:123: [TableChangeSenderShard][72075186233409549:2][72075186233409546][30:835:2678] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRecords { Records [{ Order: 1 Group: 1748532664288968 Step: 5000003 TxId: 18446744073709551615 PathId: [OwnerId: 72057594046678944, LocalPathId: 4] Kind: AsyncIndex Source: Unspecified Body: 28b TableId: [OwnerId: 72057594046678944, LocalPathId: 3] SchemaVersion: 1 LockId: 0 LockOffset: 0 },{ Order: 2 Group: 1748532664288968 Step: 5000003 TxId: 18446744073709551615 PathId: [OwnerId: 72057594046678944, LocalPathId: 4] Kind: AsyncIndex Source: Unspecified Body: 28b TableId: [OwnerId: 72057594046678944, LocalPathId: 3] SchemaVersion: 1 LockId: 0 LockOffset: 0 },{ Order: 3 Group: 1748532664288968 Step: 5000003 TxId: 18446744073709551615 PathId: [OwnerId: 72057594046678944, LocalPathId: 4] Kind: AsyncIndex Source: Unspecified Body: 28b TableId: [OwnerId: 72057594046678944, LocalPathId: 3] SchemaVersion: 1 LockId: 0 LockOffset: 0 }] } 2025-05-29T15:31:04.301338Z node 30 :CHANGE_EXCHANGE DEBUG: change_sender_table_base.cpp:200: [TableChangeSenderShard][72075186233409549:2][72075186233409546][30:835:2678] Handle NKikimrChangeExchange.TEvStatus Status: STATUS_OK RecordStatuses { Order: 1 Status: STATUS_OK Reason: REASON_NONE } RecordStatuses { Order: 2 Status: STATUS_OK Reason: REASON_NONE } RecordStatuses { Order: 3 Status: STATUS_OK Reason: REASON_NONE } LastRecordOrder: 3 2025-05-29T15:31:04.301359Z node 30 :CHANGE_EXCHANGE DEBUG: change_sender_async_index.cpp:239: [AsyncIndexChangeSenderMain][72075186233409549:2][30:788:2678] Handle NKikimr::NChangeExchange::TEvChangeExchangePrivate::TEvReady { PartitionId: 72075186233409546 } ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_index/unittest >> TAsyncIndexTests::MergeIndexWithReboots[PipeResets] [GOOD] Test command err: =========== RUN: Trace =========== Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2140] Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:116:2058] recipient: [1:110:2141] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:116:2058] recipient: [1:110:2141] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:117:2058] recipient: [1:111:2142] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:117:2058] recipient: [1:111:2142] Leader for TabletID 72057594046678944 is [1:126:2151] sender: [1:127:2058] recipient: [1:109:2140] Leader for TabletID 72057594046447617 is [1:130:2154] sender: [1:132:2058] recipient: [1:110:2141] Leader for TabletID 72057594046316545 is [1:133:2155] sender: [1:135:2058] recipient: [1:111:2142] 2025-05-29T15:30:36.616208Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7488: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-05-29T15:30:36.616227Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7516: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:30:36.616232Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7402: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2025-05-29T15:30:36.616235Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7418: OperationsProcessing config: using default configuration 2025-05-29T15:30:36.616247Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-05-29T15:30:36.616250Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-05-29T15:30:36.616256Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7548: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:30:36.616267Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:39: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-05-29T15:30:36.616339Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7619: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-05-29T15:30:36.616396Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-05-29T15:30:36.626509Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7651: Got new config: QueryServiceConfig { AllExternalDataSourcesAreAvailable: true } 2025-05-29T15:30:36.626528Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:30:36.626596Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7619: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# Leader for TabletID 72057594046447617 is [1:130:2154] sender: [1:175:2058] recipient: [1:15:2062] 2025-05-29T15:30:36.628858Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-05-29T15:30:36.628881Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-05-29T15:30:36.628908Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-05-29T15:30:36.631707Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-05-29T15:30:36.631800Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:445: Clear TempDirsState with owners number: 0 2025-05-29T15:30:36.631926Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1348: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-05-29T15:30:36.632113Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:32: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-05-29T15:30:36.632763Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:157: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:30:36.632806Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:84: [RootDataErasureManager] Stop 2025-05-29T15:30:36.633060Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:30:36.633072Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:30:36.633104Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-05-29T15:30:36.633112Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-29T15:30:36.633119Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:202: TTxServerlessStorageBilling.Complete 2025-05-29T15:30:36.633137Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6671: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:214:2058] recipient: [1:212:2212] IGNORE Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:214:2058] recipient: [1:212:2212] Leader for TabletID 72057594037968897 is [1:218:2216] sender: [1:219:2058] recipient: [1:212:2212] 2025-05-29T15:30:36.634707Z node 1 :HIVE INFO: tablet_helpers.cpp:1130: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:126:2151] sender: [1:239:2058] recipient: [1:15:2062] 2025-05-29T15:30:36.654470Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:372: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-05-29T15:30:36.654547Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:30:36.654607Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-05-29T15:30:36.654650Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:130: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-05-29T15:30:36.654661Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:30:36.655413Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:451: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-05-29T15:30:36.655439Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-05-29T15:30:36.655485Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:30:36.655496Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:313: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-05-29T15:30:36.655501Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:367: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-05-29T15:30:36.655506Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 2 -> 3 2025-05-29T15:30:36.655931Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:30:36.655943Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-05-29T15:30:36.655948Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 3 -> 128 2025-05-29T15:30:36.656965Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:30:36.656979Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:30:36.656985Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:30:36.656992Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1650: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-05-29T15:30:36.657678Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1719: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-05-29T15:30:36.658061Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:652: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-05-29T15:30:36.658098Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1751: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 Leader for TabletID 72057594046316545 is [1:133:2155] sender: [1:254:2058] recipient: [1:15:2062] FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-05-29T15:30:36.658293Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:676: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-05-29T15:30:36.658319Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:680: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 133 RawX2: 4294969451 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-05-29T15:30:36.658326Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:30:36.658393Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Ch ... lt: Status: StatusSuccess Path: "/MyRoot/Table" PathDescription { Self { Name: "Table" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 1002 CreateStep: 5000003 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: true } Table { Name: "Table" Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "indexed" Type: "Uint32" TypeId: 2 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { MinPartitionsCount: 1 } } TableIndexes { Name: "UserDefinedIndex" LocalPathId: 4 Type: EIndexTypeGlobalAsync State: EIndexStateReady KeyColumnNames: "indexed" SchemaVersion: 1 PathOwnerId: 72057594046678944 DataSize: 0 IndexImplTableDescriptions { PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { MinPartitionsCount: 1 } } } } TableSchemaVersion: 1 IsBackup: false IsRestore: false } TablePartitions { EndOfRangeKeyPrefix: "" IsPoint: false IsInclusive: false DatashardId: 72075186233409548 } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 4 PathsLimit: 10000 ShardsInside: 2 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-05-29T15:31:05.270806Z node 26 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Table/UserDefinedIndex/indexImplTable" Options { ReturnPartitioningInfo: true ReturnPartitionConfig: true BackupInfo: false ReturnBoundaries: true ShowPrivateTable: true }, at schemeshard: 72057594046678944 2025-05-29T15:31:05.270842Z node 26 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/Table/UserDefinedIndex/indexImplTable" took 42us result status StatusSuccess 2025-05-29T15:31:05.270931Z node 26 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Table/UserDefinedIndex/indexImplTable" PathDescription { Self { Name: "indexImplTable" PathId: 5 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 1002 CreateStep: 5000003 ParentPathId: 4 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 4 PathSubType: EPathSubTypeAsyncIndexImplTable Version { GeneralVersion: 4 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 2 } ChildrenExist: false } Table { Name: "indexImplTable" Columns { Name: "indexed" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "indexed" KeyColumnNames: "key" KeyColumnIds: 1 KeyColumnIds: 2 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { MinPartitionsCount: 1 } } TableSchemaVersion: 1 IsBackup: false IsRestore: false } TablePartitions { EndOfRangeKeyPrefix: "" IsPoint: false IsInclusive: false DatashardId: 72075186233409549 } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 4 PathsLimit: 10000 ShardsInside: 2 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 5 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> TVectorIndexTests::CreateTableWithError >> TSchemeShardSplitBySizeTest::Split10Shards [GOOD] >> KqpExplain::CompoundKeyRange >> TVectorIndexTests::CreateTableWithError [GOOD] |74.2%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/quoter/quoter_service_bandwidth_test/quoter_service_bandwidth_test |74.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/quoter/quoter_service_bandwidth_test/quoter_service_bandwidth_test |74.2%| [LD] {RESULT} $(B)/ydb/core/quoter/quoter_service_bandwidth_test/quoter_service_bandwidth_test |74.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_external_table_reboots/unittest |74.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_external_table_reboots/unittest |74.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/statistics/service/ut/unittest |74.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_external_table_reboots/unittest ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/persqueue_v1/ut/unittest >> TPersQueueTest::DisableDeduplication [FAIL] Test command err: === Server->StartServer(false); 2025-05-29T15:30:35.594981Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509890484019676651:2069];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:30:35.596149Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-05-29T15:30:35.659870Z node 1 :PQ_READ_PROXY DEBUG: caching_service.cpp:44: Direct read cache: : Created test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/001e9d/r3tmp/tmpgTAU7h/pdisk_1.dat 2025-05-29T15:30:35.702283Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:30:35.702315Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:30:35.706202Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 27914, node 1 2025-05-29T15:30:35.734442Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:30:35.751008Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/ciyv/001e9d/r3tmp/yandexranhtk.tmp 2025-05-29T15:30:35.751020Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: /home/runner/.ya/build/build_root/ciyv/001e9d/r3tmp/yandexranhtk.tmp 2025-05-29T15:30:35.751102Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:202: successfully initialized from file: /home/runner/.ya/build/build_root/ciyv/001e9d/r3tmp/yandexranhtk.tmp 2025-05-29T15:30:35.751152Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-05-29T15:30:35.757807Z INFO: TTestServer started on Port 65143 GrpcPort 27914 TClient is connected to server localhost:65143 PQClient connected to localhost:27914 === TenantModeEnabled() = 0 === Init PQ - start server on port 27914 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-29T15:30:35.821530Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:372: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "Root" StoragePools { Name: "/Root:test" Kind: "test" } } } TxId: 281474976715657 TabletId: 72057594046644480 PeerName: "" , at schemeshard: 72057594046644480 2025-05-29T15:30:35.821617Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //Root, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-05-29T15:30:35.821695Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 0 2025-05-29T15:30:35.821766Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:130: IgniteOperation, opId: 281474976715657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-05-29T15:30:35.821777Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-05-29T15:30:35.822013Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:451: TTxOperationPropose Complete, txId: 281474976715657, response: Status: StatusAccepted TxId: 281474976715657 SchemeshardId: 72057594046644480 PathId: 1, at schemeshard: 72057594046644480 2025-05-29T15:30:35.822034Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 281474976715657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2025-05-29T15:30:35.822080Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 281474976715657:0, at schemeshard: 72057594046644480 2025-05-29T15:30:35.822087Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:313: TCreateParts opId# 281474976715657:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046644480 2025-05-29T15:30:35.822089Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:367: TCreateParts opId# 281474976715657:0 ProgressState no shards to create, do next state 2025-05-29T15:30:35.822092Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 281474976715657:0 2 -> 3 2025-05-29T15:30:35.822211Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 281474976715657:0, at schemeshard: 72057594046644480 2025-05-29T15:30:35.822214Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 281474976715657:0 ProgressState, at schemeshard: 72057594046644480 2025-05-29T15:30:35.822216Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 281474976715657:0 3 -> 128 2025-05-29T15:30:35.822249Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 281474976715657:0, at schemeshard: 72057594046644480 2025-05-29T15:30:35.822251Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 281474976715657:0, at schemeshard: 72057594046644480 2025-05-29T15:30:35.822272Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 281474976715657:0, at tablet# 72057594046644480 2025-05-29T15:30:35.822277Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1650: TOperation IsReadyToPropose , TxId: 281474976715657 ready parts: 1/1 waiting... 2025-05-29T15:30:35.823442Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1719: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046644480 Flags: 2 } ExecLevel: 0 TxId: 281474976715657 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-05-29T15:30:35.823566Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:652: Send tablet strongly msg operationId: 281474976715657:4294967295 from tablet: 72057594046644480 to tablet: 72057594046316545 cookie: 0:281474976715657 msg type: 269090816 2025-05-29T15:30:35.823603Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1751: TOperation RegisterRelationByTabletId, TxId: 281474976715657, partId: 4294967295, tablet: 72057594046316545 2025-05-29T15:30:35.823752Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__notify.cpp:30: NotifyTxCompletion operation in-flight, txId: 281474976715657, at schemeshard: 72057594046644480 2025-05-29T15:30:35.823755Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1606: TOperation IsReadyToNotify, TxId: 281474976715657, ready parts: 0/1, is published: true 2025-05-29T15:30:35.823758Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__notify.cpp:131: NotifyTxCompletion transaction is registered, txId: 281474976715657, at schemeshard: 72057594046644480 2025-05-29T15:30:35.823991Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:676: TTxOperationPlanStep Execute, stepId: 1748532635871, transactions count in step: 1, at schemeshard: 72057594046644480 2025-05-29T15:30:35.824012Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:680: TTxOperationPlanStep Execute, message: Transactions { TxId: 281474976715657 AckTo { RawX1: 0 RawX2: 0 } } Step: 1748532635871 MediatorID: 72057594046382081 TabletID: 72057594046644480, at schemeshard: 72057594046644480 2025-05-29T15:30:35.824019Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976715657:0, at tablet# 72057594046644480 2025-05-29T15:30:35.824093Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 281474976715657:0 128 -> 240 2025-05-29T15:30:35.824100Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976715657:0, at tablet# 72057594046644480 2025-05-29T15:30:35.824132Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 1 2025-05-29T15:30:35.824152Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:402: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046644480, LocalPathId: 1], at schemeshard: 72057594046644480 2025-05-29T15:30:35.824253Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2025-05-29T15:30:35.824255Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046644480, txId: 281474976715657, path id: [OwnerId: 72057594046644480, LocalPathId: 1] 2025-05-29T15:30:35.824305Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2025-05-29T15:30:35.824310Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:7509890484019677141:2239], at schemeshard: 72057594046644480, txId: 281474976715657, path id: 1 2025-05-29T15:30:35.824317Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 281474976715657:0, at schemeshard: 72057594046644480 2025-05-29T15:30:35.824323Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:482: [72057594046644480] TDone opId# 281474976715657:0 ProgressState 2025-05-29T15:30:35.824335Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:906: Part operation is done id#281474976715657:0 progress is 1/1 2025-05-29T15:30:35.824337Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 281474976715657 ready parts: 1/1 2025-05-29T15:30:35.824341Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:906: Part operation is done id#281474976715657:0 progress is 1/1 2025-05-29T15:30:35.824342Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDo ... NoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-29T15:30:47.907451Z node 17 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(17, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:30:47.907485Z node 17 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(17, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:30:47.909026Z node 17 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(17, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-29T15:30:47.910220Z node 17 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(18, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:30:47.910240Z node 17 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(18, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:30:47.910659Z node 17 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-05-29T15:30:47.911161Z node 17 :HIVE WARN: hive_impl.cpp:771: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 18 Cookie 18 2025-05-29T15:30:47.911296Z node 17 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(18, (0,0,0,0)) VolatileState: Connecting -> Connected waiting... waiting... 2025-05-29T15:30:47.920016Z node 17 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... waiting... waiting... 2025-05-29T15:30:48.103955Z node 18 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [18:7509890542044470826:2310], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:30:48.103970Z node 18 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [18:7509890542044470837:2313], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:30:48.103978Z node 18 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:30:48.104876Z node 17 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976720657:3, at schemeshard: 72057594046644480 2025-05-29T15:30:48.108090Z node 18 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [18:7509890542044470841:2314], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976720657 completed, doublechecking } 2025-05-29T15:30:48.121928Z node 17 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [17:7509890540227690981:2341], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-05-29T15:30:48.122297Z node 17 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=17&id=NjEzMzU1MS1kZGNhMTkzMS00YzVhNWNmOC1mNGZlYjU5NQ==, ActorId: [17:7509890540227690940:2334], ActorState: ExecuteState, TraceId: 01jweanp5p34z45zk7ag9t2mw0, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-05-29T15:30:48.122392Z node 17 :PERSQUEUE_CLUSTER_TRACKER ERROR: cluster_tracker.cpp:167: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-05-29T15:30:48.122956Z node 17 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-05-29T15:30:48.180954Z node 17 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-05-29T15:30:48.189941Z node 18 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [18:7509890542044470899:2185] txid# 281474976720658, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 9], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-29T15:30:48.193405Z node 18 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [18:7509890542044470913:2320], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:5:17: Error: At function: KiReadTable!
:5:17: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Versions]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-05-29T15:30:48.193470Z node 18 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=18&id=YjhlNmY3ZDMtY2IzZjczMTUtYjVlNmYyYjMtNDQyZWYxYjI=, ActorId: [18:7509890542044470810:2309], ActorState: ExecuteState, TraceId: 01jweanp57b51wkxd52g1cc5eq, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-05-29T15:30:48.193577Z node 18 :PERSQUEUE_CLUSTER_TRACKER ERROR: cluster_tracker.cpp:167: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 5 column: 17 } message: "At function: KiReadTable!" end_position { row: 5 column: 17 } severity: 1 issues { position { row: 5 column: 17 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Versions]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 5 column: 17 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-05-29T15:30:48.244570Z node 17 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost", true, true, 1000), ("dc2", "dc2.logbroker.yandex.net", false, true, 1000); 2025-05-29T15:30:48.266881Z node 17 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [17:7509890540227691377:2376], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:30:48.266992Z node 17 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=17&id=YzFjYjRlMzAtYzQ1NWVkNDctOTMwNmM5OGEtNDc4NmUwNzU=, ActorId: [17:7509890540227691374:2374], ActorState: ExecuteState, TraceId: 01jweanpa01ttxmf4etdvetn7v, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: assertion failed at ydb/core/testlib/test_pq_client.h:537, TMaybe NKikimr::NPersQueueTests::TFlatMsgBusPQClient::RunYqlDataQueryWithParams(TString, const NYdb::TParams &): (result.IsSuccess())
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 TBackTrace::Capture()+28 (0x13B9173C) NUnitTest::NPrivate::RaiseError(char const*, TBasicString> const&, bool)+137 (0x13D44119) NKikimr::NPersQueueTests::TFlatMsgBusPQClient::RunYqlDataQueryWithParams(TBasicString>, NYdb::Dev::TParams const&)+932 (0x139C81C4) NKikimr::NPersQueueTests::TFlatMsgBusPQClient::RunYqlDataQuery(TBasicString>)+88 (0x139F6148) NKikimr::NPersQueueTests::TFlatMsgBusPQClient::InitDCs(THashMap>, NKikimr::NPersQueueTests::TPQTestClusterInfo, THash>>, TEqualTo>>, std::__y1::allocator>>>, TBasicString> const&)+1170 (0x13995592) NKikimr::NPersQueueTests::TFlatMsgBusPQClient::FullInit(THashMap>, NKikimr::NPersQueueTests::TPQTestClusterInfo, THash>>, TEqualTo>>, std::__y1::allocator>>>, TBasicString> const&, TBasicString> const&)+327 (0x139C2D47) NPersQueue::TTestServer::StartServer(bool, TMaybe>, NMaybe::TPolicyUndefinedExcept>)+888 (0x138D5468) NPersQueue::TTestServer::TTestServer(NKikimr::Tests::TServerSettings const&, bool, TVector> const&, NActors::NLog::EPriority, TMaybe, TDelete>, NMaybe::TPolicyUndefinedExcept>)+1563 (0x138D474B) NPersQueue::TTestServer::TTestServer(bool)+134 (0x138D0986) NKikimr::NPersQueueTests::NTestSuiteTPersQueueTest::TTestCaseDisableDeduplication::Execute_(NUnitTest::TTestContext&)+37 (0x139A3795) NKikimr::NPersQueueTests::NTestSuiteTPersQueueTest::TCurrentTest::Execute()::'lambda'()::operator()() const+71 (0x139BF897) NUnitTest::TTestBase::Run(std::__y1::function, TBasicString> const&, char const*, bool)+126 (0x13D45FCE) NKikimr::NPersQueueTests::NTestSuiteTPersQueueTest::TCurrentTest::Execute()+436 (0x139BF1B4) NUnitTest::TTestFactory::Execute()+803 (0x13D46743) NUnitTest::RunMain(int, char**)+3021 (0x13D582DD) ??+0 (0x7FEDE5046D90) __libc_start_main+128 (0x7FEDE5046E40) _start+41 (0x129C9029) |74.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_external_table_reboots/unittest |74.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_index/unittest >> TExternalTableTestReboots::CreateExternalTableWithReboots |74.2%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/sys_view/ut_large/ydb-core-sys_view-ut_large ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/persqueue_v1/ut/unittest >> TPersQueueTest::TxCounters [FAIL] Test command err: === Server->StartServer(false); 2025-05-29T15:30:33.987559Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509890476477832305:2076];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:30:33.987580Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-05-29T15:30:33.991755Z node 2 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7509890478877007994:2073];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:30:33.991920Z node 2 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/001ea8/r3tmp/tmpRGihk7/pdisk_1.dat 2025-05-29T15:30:34.033646Z node 1 :PQ_READ_PROXY DEBUG: caching_service.cpp:44: Direct read cache: : Created 2025-05-29T15:30:34.043158Z node 2 :PQ_READ_PROXY DEBUG: caching_service.cpp:44: Direct read cache: : Created 2025-05-29T15:30:34.075326Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 4838, node 1 2025-05-29T15:30:34.088811Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:30:34.088842Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:30:34.090895Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-29T15:30:34.098671Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/ciyv/001ea8/r3tmp/yandexLtE8xU.tmp 2025-05-29T15:30:34.098688Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: /home/runner/.ya/build/build_root/ciyv/001ea8/r3tmp/yandexLtE8xU.tmp 2025-05-29T15:30:34.098780Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:202: successfully initialized from file: /home/runner/.ya/build/build_root/ciyv/001ea8/r3tmp/yandexLtE8xU.tmp 2025-05-29T15:30:34.098810Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-05-29T15:30:34.104089Z INFO: TTestServer started on Port 4376 GrpcPort 4838 TClient is connected to server localhost:4376 PQClient connected to localhost:4838 === TenantModeEnabled() = 0 === Init PQ - start server on port 4838 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: 2025-05-29T15:30:34.132252Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:30:34.132285Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:30:34.137423Z node 1 :HIVE WARN: hive_impl.cpp:771: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-05-29T15:30:34.138050Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-29T15:30:34.156331Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:372: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "Root" StoragePools { Name: "/Root:test" Kind: "test" } } } TxId: 281474976720657 TabletId: 72057594046644480 PeerName: "" , at schemeshard: 72057594046644480 2025-05-29T15:30:34.156422Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //Root, opId: 281474976720657:0, at schemeshard: 72057594046644480 2025-05-29T15:30:34.156485Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 0 2025-05-29T15:30:34.156560Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:130: IgniteOperation, opId: 281474976720657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-05-29T15:30:34.156569Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976720657:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:30:34.159387Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:451: TTxOperationPropose Complete, txId: 281474976720657, response: Status: StatusAccepted TxId: 281474976720657 SchemeshardId: 72057594046644480 PathId: 1, at schemeshard: 72057594046644480 2025-05-29T15:30:34.159422Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 281474976720657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2025-05-29T15:30:34.159503Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 281474976720657:0, at schemeshard: 72057594046644480 2025-05-29T15:30:34.159515Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:313: TCreateParts opId# 281474976720657:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046644480 2025-05-29T15:30:34.159527Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:367: TCreateParts opId# 281474976720657:0 ProgressState no shards to create, do next state 2025-05-29T15:30:34.159532Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 281474976720657:0 2 -> 3 2025-05-29T15:30:34.160619Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__notify.cpp:30: NotifyTxCompletion operation in-flight, txId: 281474976720657, at schemeshard: 72057594046644480 2025-05-29T15:30:34.160626Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1606: TOperation IsReadyToNotify, TxId: 281474976720657, ready parts: 0/1, is published: true 2025-05-29T15:30:34.160632Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__notify.cpp:131: NotifyTxCompletion transaction is registered, txId: 281474976720657, at schemeshard: 72057594046644480 2025-05-29T15:30:34.165858Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 281474976720657:0, at schemeshard: 72057594046644480 2025-05-29T15:30:34.165872Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 281474976720657:0 ProgressState, at schemeshard: 72057594046644480 2025-05-29T15:30:34.165877Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 281474976720657:0 3 -> 128 2025-05-29T15:30:34.166407Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 281474976720657:0, at schemeshard: 72057594046644480 2025-05-29T15:30:34.166410Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 281474976720657:0, at schemeshard: 72057594046644480 2025-05-29T15:30:34.166414Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 281474976720657:0, at tablet# 72057594046644480 2025-05-29T15:30:34.166419Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1650: TOperation IsReadyToPropose , TxId: 281474976720657 ready parts: 1/1 2025-05-29T15:30:34.167288Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1719: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046644480 Flags: 2 } ExecLevel: 0 TxId: 281474976720657 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-05-29T15:30:34.171569Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:652: Send tablet strongly msg operationId: 281474976720657:4294967295 from tablet: 72057594046644480 to tablet: 72057594046316545 cookie: 0:281474976720657 msg type: 269090816 2025-05-29T15:30:34.171628Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1751: TOperation RegisterRelationByTabletId, TxId: 281474976720657, partId: 4294967295, tablet: 72057594046316545 2025-05-29T15:30:34.174901Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:676: TTxOperationPlanStep Execute, stepId: 1748532634219, transactions count in step: 1, at schemeshard: 72057594046644480 2025-05-29T15:30:34.174970Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:680: TTxOperationPlanStep Execute, message: Transactions { TxId: 281474976720657 AckTo { RawX1: 0 RawX2: 0 } } Step: 1748532634219 MediatorID: 72057594046382081 TabletID: 72057594046644480, at schemeshard: 72057594046644480 2025-05-29T15:30:34.174981Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976720657:0, at tablet# 72057594046644480 2025-05-29T15:30:34.175071Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 281474976720657:0 128 -> 240 2025-05-29T15:30:34.175080Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976720657:0, at tablet# 72057594046644480 2025-05-29T15:30:34.175115Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 1 2025-05-29T15:30:34.175126Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:402: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046644480, LocalPathId: 1], at schemeshard: 72057594046644480 2025-05-29T15:30:34.175798Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2025-05-29T15:30:34.175802Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046644480, txId: 281474976720657, path id: [OwnerId: 72057594046644480, LocalPathId: 1] 2025-05-29T15:30:34.175862Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2025-05-29T15:30:34.175866Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshar ... ;self_id=[24:7509890541840117625:2062];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:30:48.174550Z node 24 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/001ea8/r3tmp/tmpO6dpSw/pdisk_1.dat 2025-05-29T15:30:48.178715Z node 24 :PQ_READ_PROXY DEBUG: caching_service.cpp:44: Direct read cache: : Created 2025-05-29T15:30:48.185042Z node 24 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:30:48.185214Z node 24 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [24:7509890541840117605:2079] 1748532648174435 != 1748532648174438 TServer::EnableGrpc on GrpcPort 22250, node 24 2025-05-29T15:30:48.194702Z node 24 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/ciyv/001ea8/r3tmp/yandexkBIAyv.tmp 2025-05-29T15:30:48.194714Z node 24 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: /home/runner/.ya/build/build_root/ciyv/001ea8/r3tmp/yandexkBIAyv.tmp 2025-05-29T15:30:48.194778Z node 24 :NET_CLASSIFIER WARN: net_classifier.cpp:202: successfully initialized from file: /home/runner/.ya/build/build_root/ciyv/001ea8/r3tmp/yandexkBIAyv.tmp 2025-05-29T15:30:48.194811Z node 24 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-05-29T15:30:48.199055Z INFO: TTestServer started on Port 26372 GrpcPort 22250 TClient is connected to server localhost:26372 PQClient connected to localhost:22250 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-29T15:30:48.277901Z node 24 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(24, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:30:48.277927Z node 24 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(24, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:30:48.278172Z node 24 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:30:48.278872Z node 24 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(24, (0,0,0,0)) VolatileState: Connecting -> Connected waiting... 2025-05-29T15:30:48.282418Z node 24 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... waiting... waiting... 2025-05-29T15:30:48.423556Z node 24 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [24:7509890541840118423:2338], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:30:48.423570Z node 24 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [24:7509890541840118412:2335], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:30:48.423623Z node 24 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:30:48.424029Z node 24 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [24:7509890541840118454:2341], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:30:48.424036Z node 24 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:30:48.424238Z node 24 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715662:3, at schemeshard: 72057594046644480 2025-05-29T15:30:48.425918Z node 24 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [24:7509890541840118426:2339], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715662 completed, doublechecking } 2025-05-29T15:30:48.427474Z node 24 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-05-29T15:30:48.436920Z node 24 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-05-29T15:30:48.449613Z node 24 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost", true, true, 1000), ("dc2", "dc2.logbroker.yandex.net", false, true, 1000); 2025-05-29T15:30:48.468553Z node 24 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [24:7509890541840118729:2378], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:30:48.468625Z node 24 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=24&id=NmM1YjZhYWMtZGE5ZWE2NjgtMzMyMTRiMmMtYTRjYmY0YzI=, ActorId: [24:7509890541840118726:2376], ActorState: ExecuteState, TraceId: 01jweanpgb42jja053xy6nw70d, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: 2025-05-29T15:30:48.481717Z node 24 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [24:7509890541840118741:2582] txid# 281474976715666, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 9], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-29T15:30:48.521641Z node 24 :PERSQUEUE_CLUSTER_TRACKER ERROR: cluster_tracker.cpp:167: failed to list clusters: { Response { QueryIssues { message: "Execution" issue_code: 1060 severity: 2 issues { position { row: 3 column: 120 } message: "Cost Based Optimizer could not be applied to this query: couldn\'t load statistics" end_position { row: 3 column: 120 } issue_code: 8001 severity: 2 } } TxMeta { } YdbResults { columns { name: "C.name" type { optional_type { item { type_id: UTF8 } } } } columns { name: "C.balancer" type { optional_type { item { type_id: UTF8 } } } } columns { name: "C.local" type { optional_type { item { type_id: BOOL } } } } columns { name: "C.enabled" type { optional_type { item { type_id: BOOL } } } } columns { name: "C.weight" type { optional_type { item { type_id: UINT64 } } } } columns { name: "V.version" type { optional_type { item { type_id: INT64 } } } } } QueryDiagnostics: "" } YdbStatus: SUCCESS ConsumedRu: 25 } assertion failed at ydb/core/testlib/test_pq_client.h:537, TMaybe NKikimr::NPersQueueTests::TFlatMsgBusPQClient::RunYqlDataQueryWithParams(TString, const NYdb::TParams &): (result.IsSuccess())
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 TBackTrace::Capture()+28 (0x13B9173C) NUnitTest::NPrivate::RaiseError(char const*, TBasicString> const&, bool)+137 (0x13D44119) NKikimr::NPersQueueTests::TFlatMsgBusPQClient::RunYqlDataQueryWithParams(TBasicString>, NYdb::Dev::TParams const&)+932 (0x139C81C4) NKikimr::NPersQueueTests::TFlatMsgBusPQClient::RunYqlDataQuery(TBasicString>)+88 (0x139F6148) NKikimr::NPersQueueTests::TFlatMsgBusPQClient::InitDCs(THashMap>, NKikimr::NPersQueueTests::TPQTestClusterInfo, THash>>, TEqualTo>>, std::__y1::allocator>>>, TBasicString> const&)+1170 (0x13995592) NKikimr::NPersQueueTests::TFlatMsgBusPQClient::FullInit(THashMap>, NKikimr::NPersQueueTests::TPQTestClusterInfo, THash>>, TEqualTo>>, std::__y1::allocator>>>, TBasicString> const&, TBasicString> const&)+327 (0x139C2D47) NPersQueue::TTestServer::StartServer(bool, TMaybe>, NMaybe::TPolicyUndefinedExcept>)+888 (0x138D5468) NPersQueue::TTestServer::TTestServer(NKikimr::Tests::TServerSettings const&, bool, TVector> const&, NActors::NLog::EPriority, TMaybe, TDelete>, NMaybe::TPolicyUndefinedExcept>)+1563 (0x138D474B) NKikimr::NPersQueueTests::NTestSuiteTPersQueueTest::TTestCaseTxCounters::Execute_(NUnitTest::TTestContext&)+636 (0x139B13BC) NKikimr::NPersQueueTests::NTestSuiteTPersQueueTest::TCurrentTest::Execute()::'lambda'()::operator()() const+71 (0x139BF897) NUnitTest::TTestBase::Run(std::__y1::function, TBasicString> const&, char const*, bool)+126 (0x13D45FCE) NKikimr::NPersQueueTests::NTestSuiteTPersQueueTest::TCurrentTest::Execute()+436 (0x139BF1B4) NUnitTest::TTestFactory::Execute()+803 (0x13D46743) NUnitTest::RunMain(int, char**)+3021 (0x13D582DD) ??+0 (0x7FE330713D90) __libc_start_main+128 (0x7FE330713E40) _start+41 (0x129C9029) |74.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/sys_view/ut_large/ydb-core-sys_view-ut_large |74.2%| [LD] {RESULT} $(B)/ydb/core/sys_view/ut_large/ydb-core-sys_view-ut_large ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_split_merge/unittest >> TSchemeShardSplitBySizeTest::Split10Shards [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:116:2058] recipient: [1:111:2141] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:116:2058] recipient: [1:111:2141] Leader for TabletID 72057594046678944 is [1:128:2152] sender: [1:132:2058] recipient: [1:111:2141] 2025-05-29T15:30:23.124115Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7488: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-05-29T15:30:23.124147Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7516: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:30:23.124153Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7402: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-05-29T15:30:23.124159Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7418: OperationsProcessing config: using default configuration 2025-05-29T15:30:23.124175Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-05-29T15:30:23.124180Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-05-29T15:30:23.124191Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7548: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:30:23.124207Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:39: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-05-29T15:30:23.124341Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7619: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-05-29T15:30:23.124415Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-05-29T15:30:23.138490Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7309: Cannot subscribe to console configs 2025-05-29T15:30:23.138519Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:30:23.141028Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-05-29T15:30:23.141169Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-05-29T15:30:23.141201Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-05-29T15:30:23.142378Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-05-29T15:30:23.142519Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:445: Clear TempDirsState with owners number: 0 2025-05-29T15:30:23.142617Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1348: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-05-29T15:30:23.142658Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:32: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-05-29T15:30:23.143140Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:157: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:30:23.143198Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:84: [RootDataErasureManager] Stop 2025-05-29T15:30:23.143505Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:30:23.143516Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:30:23.143543Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-05-29T15:30:23.143552Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-29T15:30:23.143559Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:202: TTxServerlessStorageBilling.Complete 2025-05-29T15:30:23.143597Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6671: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-05-29T15:30:23.144962Z node 1 :HIVE INFO: tablet_helpers.cpp:1130: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:128:2152] sender: [1:242:2058] recipient: [1:15:2062] 2025-05-29T15:30:23.158085Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:372: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-05-29T15:30:23.158178Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:30:23.158271Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-05-29T15:30:23.158323Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:130: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-05-29T15:30:23.158335Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:30:23.159083Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:451: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-05-29T15:30:23.159119Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-05-29T15:30:23.159177Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:30:23.159191Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:313: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-05-29T15:30:23.159197Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:367: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-05-29T15:30:23.159203Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 2 -> 3 2025-05-29T15:30:23.159632Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:30:23.159646Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-05-29T15:30:23.159652Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 3 -> 128 2025-05-29T15:30:23.160061Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:30:23.160073Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:30:23.160080Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:30:23.160099Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1650: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-05-29T15:30:23.160787Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1719: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-05-29T15:30:23.161273Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:652: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-05-29T15:30:23.161321Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1751: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-05-29T15:30:23.161479Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:676: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-05-29T15:30:23.161505Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:680: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 124 RawX2: 4294969445 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-05-29T15:30:23.161513Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:30:23.161560Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 128 -> 240 2025-05-29T15:30:23.161564Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:30:23.161590Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-05-29T15:30:23.161598Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:402: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-05-29T15:30:23.161957Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:30:23.161965Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-29T15:30:23.162017Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29 ... 9565 } TablePartitions { EndOfRangeKeyPrefix: "\001\000\010\000\000\000\200\234\031\025\000\000\000\000" IsPoint: false IsInclusive: false DatashardId: 72075186233409566 } TablePartitions { EndOfRangeKeyPrefix: "\001\000\010\000\000\000@\341\235\030\000\000\000\000" IsPoint: false IsInclusive: false DatashardId: 72075186233409567 } TablePartitions { EndOfRangeKeyPrefix: "\001\000\010\000\000\000\000&\"\034\000\000\000\000" IsPoint: false IsInclusive: false DatashardId: 72075186233409568 } TablePartitions { EndOfRangeKeyPrefix: "\001\000\010\000\000\000\300j\246\037\000\000\000\000" IsPoint: false IsInclusive: false DatashardId: 72075186233409569 } TablePartitions { EndOfRangeKeyPrefix: "\001\000\010\000\000\000\200\257*#\000\000\000\000" IsPoint: false IsInclusive: false DatashardId: 72075186233409570 } TablePartitions { EndOfRangeKeyPrefix: "\001\000\010\000\000\000@\364\256&\000\000\000\000" IsPoint: false IsInclusive: false DatashardId: 72075186233409571 } TablePartitions { EndOfRangeKeyPrefix: "\001\000\010\000\000\000\00093*\000\000\000\000" IsPoint: false IsInclusive: false DatashardId: 72075186233409572 } TablePartitions { EndOfRangeKeyPrefix: "\001\000\010\000\000\000\300}\267-\000\000\000\000" IsPoint: false IsInclusive: false DatashardId: 72075186233409573 } TablePartitions { EndOfRangeKeyPrefix: "\001\000\010\000\000\000\201\302;1\000\000\000\000" IsPoint: false IsInclusive: false DatashardId: 72075186233409574 } TablePartitions { EndOfRangeKeyPrefix: "\001\000\010\000\000\000\201I\3174\000\000\000\000" IsPoint: false IsInclusive: false DatashardId: 72075186233409575 } TablePartitions { EndOfRangeKeyPrefix: "" IsPoint: false IsInclusive: false DatashardId: 72075186233409576 } TableStats { DataSize: 119380 RowCount: 1000 IndexSize: 306 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 1 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 16 RangeReadRows: 0 StoragePools { PoolsUsage { PoolKind: "pool-kind-1" DataSize: 119380 IndexSize: 306 } } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 791 Memory: 1468688 Network: 0 Storage: 121185 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 18 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 119686 DataSize: 119380 IndexSize: 306 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } StoragePoolsUsage { PoolKind: "pool-kind-1" TotalSize: 119686 DataSize: 119380 IndexSize: 306 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-05-29T15:31:07.131188Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 281474976710672:0, at schemeshard: 72057594046678944 2025-05-29T15:31:07.131197Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:313: TCreateParts opId# 281474976710672:0 ProgressState, operation type: TxSplitTablePartition, at tablet# 72057594046678944 2025-05-29T15:31:07.131227Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:357: TCreateParts opId# 281474976710672:0 CreateRequest Event to Hive: 72057594037968897 msg: Owner: 72057594046678944 OwnerIdx: 32 TabletType: DataShard ObjectDomain { SchemeShard: 72057594046678944 PathId: 1 } ObjectId: 2 BindedChannels { StoragePoolName: "pool-1" } BindedChannels { StoragePoolName: "pool-1" } BindedChannels { StoragePoolName: "pool-1" } AllowedDomains { SchemeShard: 72057594046678944 PathId: 1 } 2025-05-29T15:31:07.131240Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:357: TCreateParts opId# 281474976710672:0 CreateRequest Event to Hive: 72057594037968897 msg: Owner: 72057594046678944 OwnerIdx: 33 TabletType: DataShard ObjectDomain { SchemeShard: 72057594046678944 PathId: 1 } ObjectId: 2 BindedChannels { StoragePoolName: "pool-1" } BindedChannels { StoragePoolName: "pool-1" } BindedChannels { StoragePoolName: "pool-1" } AllowedDomains { SchemeShard: 72057594046678944 PathId: 1 } 2025-05-29T15:31:07.131878Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:652: Send tablet strongly msg operationId: 281474976710672:0 from tablet: 72057594046678944 to tablet: 72057594037968897 cookie: 72057594046678944:32 msg type: 268697601 2025-05-29T15:31:07.131903Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:652: Send tablet strongly msg operationId: 281474976710672:0 from tablet: 72057594046678944 to tablet: 72057594037968897 cookie: 72057594046678944:33 msg type: 268697601 2025-05-29T15:31:07.131913Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1751: TOperation RegisterRelationByTabletId, TxId: 281474976710672, partId: 0, tablet: 72057594037968897 2025-05-29T15:31:07.131917Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1779: TOperation RegisterRelationByShardIdx, TxId: 281474976710672, shardIdx: 72057594046678944:32, partId: 0 2025-05-29T15:31:07.131921Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1779: TOperation RegisterRelationByShardIdx, TxId: 281474976710672, shardIdx: 72057594046678944:33, partId: 0 2025-05-29T15:31:07.131984Z node 1 :HIVE INFO: tablet_helpers.cpp:1181: [72057594037968897] TEvCreateTablet, msg: Owner: 72057594046678944 OwnerIdx: 32 TabletType: DataShard ObjectDomain { SchemeShard: 72057594046678944 PathId: 1 } ObjectId: 2 BindedChannels { StoragePoolName: "pool-1" } BindedChannels { StoragePoolName: "pool-1" } BindedChannels { StoragePoolName: "pool-1" } AllowedDomains { SchemeShard: 72057594046678944 PathId: 1 } 2025-05-29T15:31:07.133214Z node 1 :HIVE INFO: tablet_helpers.cpp:1245: [72057594037968897] TEvCreateTablet, Owner 72057594046678944, OwnerIdx 32, type DataShard, boot OK, tablet id 72075186233409577 2025-05-29T15:31:07.133303Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5867: Handle TEvCreateTabletReply at schemeshard: 72057594046678944 message: Status: OK Owner: 72057594046678944 OwnerIdx: 32 TabletID: 72075186233409577 Origin: 72057594037968897 2025-05-29T15:31:07.133309Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1793: TOperation FindRelatedPartByShardIdx, TxId: 281474976710672, shardIdx: 72057594046678944:32, partId: 0 2025-05-29T15:31:07.133322Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:619: TTxOperationReply execute, operationId: 281474976710672:0, at schemeshard: 72057594046678944, message: Status: OK Owner: 72057594046678944 OwnerIdx: 32 TabletID: 72075186233409577 Origin: 72057594037968897 2025-05-29T15:31:07.133327Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:175: TCreateParts opId# 281474976710672:0 HandleReply TEvCreateTabletReply, at tabletId: 72057594046678944 2025-05-29T15:31:07.133332Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:178: TCreateParts opId# 281474976710672:0 HandleReply TEvCreateTabletReply, message: Status: OK Owner: 72057594046678944 OwnerIdx: 32 TabletID: 72075186233409577 Origin: 72057594037968897 2025-05-29T15:31:07.133413Z node 1 :HIVE INFO: tablet_helpers.cpp:1181: [72057594037968897] TEvCreateTablet, msg: Owner: 72057594046678944 OwnerIdx: 33 TabletType: DataShard ObjectDomain { SchemeShard: 72057594046678944 PathId: 1 } ObjectId: 2 BindedChannels { StoragePoolName: "pool-1" } BindedChannels { StoragePoolName: "pool-1" } BindedChannels { StoragePoolName: "pool-1" } AllowedDomains { SchemeShard: 72057594046678944 PathId: 1 } 2025-05-29T15:31:07.133435Z node 1 :HIVE INFO: tablet_helpers.cpp:1245: [72057594037968897] TEvCreateTablet, Owner 72057594046678944, OwnerIdx 33, type DataShard, boot OK, tablet id 72075186233409578 2025-05-29T15:31:07.133473Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5867: Handle TEvCreateTabletReply at schemeshard: 72057594046678944 message: Status: OK Owner: 72057594046678944 OwnerIdx: 33 TabletID: 72075186233409578 Origin: 72057594037968897 2025-05-29T15:31:07.133477Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1793: TOperation FindRelatedPartByShardIdx, TxId: 281474976710672, shardIdx: 72057594046678944:33, partId: 0 2025-05-29T15:31:07.133485Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:619: TTxOperationReply execute, operationId: 281474976710672:0, at schemeshard: 72057594046678944, message: Status: OK Owner: 72057594046678944 OwnerIdx: 33 TabletID: 72075186233409578 Origin: 72057594037968897 2025-05-29T15:31:07.133488Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:175: TCreateParts opId# 281474976710672:0 HandleReply TEvCreateTabletReply, at tabletId: 72057594046678944 2025-05-29T15:31:07.133492Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:178: TCreateParts opId# 281474976710672:0 HandleReply TEvCreateTabletReply, message: Status: OK Owner: 72057594046678944 OwnerIdx: 33 TabletID: 72075186233409578 Origin: 72057594037968897 2025-05-29T15:31:07.133499Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 281474976710672:0 2 -> 3 2025-05-29T15:31:07.134516Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:647: TTxOperationReply complete, operationId: 281474976710672:0, at schemeshard: 72057594046678944 2025-05-29T15:31:07.134660Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:647: TTxOperationReply complete, operationId: 281474976710672:0, at schemeshard: 72057594046678944 2025-05-29T15:31:07.134710Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 281474976710672:0, at schemeshard: 72057594046678944 2025-05-29T15:31:07.134714Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_split_merge.cpp:83: TSplitMerge TConfigureDestination ProgressState, operationId: 281474976710672:0, at schemeshard: 72057594046678944 2025-05-29T15:31:07.134733Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_split_merge.cpp:140: Initializing scheme on dst datashard: 72075186233409577 splitOp: 281474976710672:0 alterVersion: 2 at tablet: 72057594046678944 2025-05-29T15:31:07.134778Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_split_merge.cpp:140: Initializing scheme on dst datashard: 72075186233409578 splitOp: 281474976710672:0 alterVersion: 2 at tablet: 72057594046678944 2025-05-29T15:31:07.135902Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:652: Send tablet strongly msg operationId: 281474976710672:0 from tablet: 72057594046678944 to tablet: 72075186233409577 cookie: 72057594046678944:32 msg type: 269553152 2025-05-29T15:31:07.135959Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:652: Send tablet strongly msg operationId: 281474976710672:0 from tablet: 72057594046678944 to tablet: 72075186233409578 cookie: 72057594046678944:33 msg type: 269553152 2025-05-29T15:31:07.135984Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1751: TOperation RegisterRelationByTabletId, TxId: 281474976710672, partId: 0, tablet: 72075186233409577 2025-05-29T15:31:07.135990Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1751: TOperation RegisterRelationByTabletId, TxId: 281474976710672, partId: 0, tablet: 72075186233409578 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/keyvalue/ut/unittest >> TKeyValueTest::TestRewriteThenLastValueNewApi [GOOD] Test command err: Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:55:2057] recipient: [1:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:55:2057] recipient: [1:51:2095] Leader for TabletID 72057594037927937 is [1:57:2097] sender: [1:58:2057] recipient: [1:51:2095] Leader for TabletID 72057594037927937 is [1:57:2097] sender: [1:75:2057] recipient: [1:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:55:2057] recipient: [2:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:55:2057] recipient: [2:51:2095] Leader for TabletID 72057594037927937 is [2:57:2097] sender: [2:58:2057] recipient: [2:51:2095] Leader for TabletID 72057594037927937 is [2:57:2097] sender: [2:75:2057] recipient: [2:14:2061] !Reboot 72057594037927937 (actor [2:57:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [2:57:2097] sender: [2:77:2057] recipient: [2:36:2083] Leader for TabletID 72057594037927937 is [2:57:2097] sender: [2:80:2057] recipient: [2:14:2061] Leader for TabletID 72057594037927937 is [2:57:2097] sender: [2:81:2057] recipient: [2:79:2110] Leader for TabletID 72057594037927937 is [2:82:2111] sender: [2:83:2057] recipient: [2:79:2110] !Reboot 72057594037927937 (actor [2:57:2097]) rebooted! !Reboot 72057594037927937 (actor [2:57:2097]) tablet resolver refreshed! new actor is[2:82:2111] Leader for TabletID 72057594037927937 is [2:82:2111] sender: [2:168:2057] recipient: [2:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:55:2057] recipient: [3:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:55:2057] recipient: [3:51:2095] Leader for TabletID 72057594037927937 is [3:57:2097] sender: [3:58:2057] recipient: [3:51:2095] Leader for TabletID 72057594037927937 is [3:57:2097] sender: [3:75:2057] recipient: [3:14:2061] !Reboot 72057594037927937 (actor [3:57:2097]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [3:57:2097] sender: [3:77:2057] recipient: [3:36:2083] Leader for TabletID 72057594037927937 is [3:57:2097] sender: [3:80:2057] recipient: [3:79:2110] Leader for TabletID 72057594037927937 is [3:57:2097] sender: [3:81:2057] recipient: [3:14:2061] Leader for TabletID 72057594037927937 is [3:82:2111] sender: [3:83:2057] recipient: [3:79:2110] !Reboot 72057594037927937 (actor [3:57:2097]) rebooted! !Reboot 72057594037927937 (actor [3:57:2097]) tablet resolver refreshed! new actor is[3:82:2111] Leader for TabletID 72057594037927937 is [3:82:2111] sender: [3:168:2057] recipient: [3:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [4:55:2057] recipient: [4:50:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [4:55:2057] recipient: [4:50:2095] Leader for TabletID 72057594037927937 is [4:57:2097] sender: [4:58:2057] recipient: [4:50:2095] Leader for TabletID 72057594037927937 is [4:57:2097] sender: [4:75:2057] recipient: [4:14:2061] !Reboot 72057594037927937 (actor [4:57:2097]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [4:57:2097] sender: [4:78:2057] recipient: [4:36:2083] Leader for TabletID 72057594037927937 is [4:57:2097] sender: [4:81:2057] recipient: [4:80:2110] Leader for TabletID 72057594037927937 is [4:57:2097] sender: [4:82:2057] recipient: [4:14:2061] Leader for TabletID 72057594037927937 is [4:83:2111] sender: [4:84:2057] recipient: [4:80:2110] !Reboot 72057594037927937 (actor [4:57:2097]) rebooted! !Reboot 72057594037927937 (actor [4:57:2097]) tablet resolver refreshed! new actor is[4:83:2111] Leader for TabletID 72057594037927937 is [4:83:2111] sender: [4:169:2057] recipient: [4:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [5:55:2057] recipient: [5:52:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [5:55:2057] recipient: [5:52:2095] Leader for TabletID 72057594037927937 is [5:57:2097] sender: [5:58:2057] recipient: [5:52:2095] Leader for TabletID 72057594037927937 is [5:57:2097] sender: [5:75:2057] recipient: [5:14:2061] !Reboot 72057594037927937 (actor [5:57:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [5:57:2097] sender: [5:81:2057] recipient: [5:36:2083] Leader for TabletID 72057594037927937 is [5:57:2097] sender: [5:84:2057] recipient: [5:83:2113] Leader for TabletID 72057594037927937 is [5:57:2097] sender: [5:85:2057] recipient: [5:14:2061] Leader for TabletID 72057594037927937 is [5:86:2114] sender: [5:87:2057] recipient: [5:83:2113] !Reboot 72057594037927937 (actor [5:57:2097]) rebooted! !Reboot 72057594037927937 (actor [5:57:2097]) tablet resolver refreshed! new actor is[5:86:2114] Leader for TabletID 72057594037927937 is [5:86:2114] sender: [5:172:2057] recipient: [5:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [6:55:2057] recipient: [6:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [6:55:2057] recipient: [6:51:2095] Leader for TabletID 72057594037927937 is [6:57:2097] sender: [6:58:2057] recipient: [6:51:2095] Leader for TabletID 72057594037927937 is [6:57:2097] sender: [6:75:2057] recipient: [6:14:2061] !Reboot 72057594037927937 (actor [6:57:2097]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [6:57:2097] sender: [6:81:2057] recipient: [6:36:2083] Leader for TabletID 72057594037927937 is [6:57:2097] sender: [6:84:2057] recipient: [6:14:2061] Leader for TabletID 72057594037927937 is [6:57:2097] sender: [6:85:2057] recipient: [6:83:2113] Leader for TabletID 72057594037927937 is [6:86:2114] sender: [6:87:2057] recipient: [6:83:2113] !Reboot 72057594037927937 (actor [6:57:2097]) rebooted! !Reboot 72057594037927937 (actor [6:57:2097]) tablet resolver refreshed! new actor is[6:86:2114] Leader for TabletID 72057594037927937 is [6:86:2114] sender: [6:172:2057] recipient: [6:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [7:55:2057] recipient: [7:52:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [7:55:2057] recipient: [7:52:2095] Leader for TabletID 72057594037927937 is [7:57:2097] sender: [7:58:2057] recipient: [7:52:2095] Leader for TabletID 72057594037927937 is [7:57:2097] sender: [7:75:2057] recipient: [7:14:2061] !Reboot 72057594037927937 (actor [7:57:2097]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [7:57:2097] sender: [7:82:2057] recipient: [7:36:2083] Leader for TabletID 72057594037927937 is [7:57:2097] sender: [7:85:2057] recipient: [7:14:2061] Leader for TabletID 72057594037927937 is [7:57:2097] sender: [7:86:2057] recipient: [7:84:2113] Leader for TabletID 72057594037927937 is [7:87:2114] sender: [7:88:2057] recipient: [7:84:2113] !Reboot 72057594037927937 (actor [7:57:2097]) rebooted! !Reboot 72057594037927937 (actor [7:57:2097]) tablet resolver refreshed! new actor is[7:87:2114] Leader for TabletID 72057594037927937 is [7:87:2114] sender: [7:173:2057] recipient: [7:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [8:55:2057] recipient: [8:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [8:55:2057] recipient: [8:51:2095] Leader for TabletID 72057594037927937 is [8:57:2097] sender: [8:58:2057] recipient: [8:51:2095] Leader for TabletID 72057594037927937 is [8:57:2097] sender: [8:75:2057] recipient: [8:14:2061] !Reboot 72057594037927937 (actor [8:57:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [8:57:2097] sender: [8:84:2057] recipient: [8:36:2083] Leader for TabletID 72057594037927937 is [8:57:2097] sender: [8:87:2057] recipient: [8:14:2061] Leader for TabletID 72057594037927937 is [8:57:2097] sender: [8:88:2057] recipient: [8:86:2115] Leader for TabletID 72057594037927937 is [8:89:2116] sender: [8:90:2057] recipient: [8:86:2115] !Reboot 72057594037927937 (actor [8:57:2097]) rebooted! !Reboot 72057594037927937 (actor [8:57:2097]) tablet resolver refreshed! new actor is[8:89:2116] Leader for TabletID 72057594037927937 is [8:89:2116] sender: [8:175:2057] recipient: [8:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [9:55:2057] recipient: [9:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [9:55:2057] recipient: [9:51:2095] Leader for TabletID 72057594037927937 is [9:57:2097] sender: [9:58:2057] recipient: [9:51:2095] Leader for TabletID 72057594037927937 is [9:57:2097] sender: [9:75:2057] recipient: [9:14:2061] !Reboot 72057594037927937 (actor [9:57:2097]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [9:57:2097] sender: [9:84:2057] recipient: [9:36:2083] Leader for TabletID 72057594037927937 is [9:57:2097] sender: [9:87:2057] recipient: [9:14:2061] Leader for TabletID 72057594037927937 is [9:57:2097] sender: [9:88:2057] recipient: [9:86:2115] Leader for TabletID 72057594037927937 is [9:89:2116] sender: [9:90:2057] recipient: [9:86:2115] !Reboot 72057594037927937 (actor [9:57:2097]) rebooted! !Reboot 72057594037927937 (actor [9:57:2097]) tablet resolver refreshed! new actor is[9:89:2116] Leader for TabletID 72057594037927937 is [9:89:2116] sender: [9:175:2057] recipient: [9:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [10:55:2057] recipient: [10:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [10:55:2057] recipient: [10:51:2095] Leader for TabletID 72057594037927937 is [10:57:2097] sender: [10:58:2057] recipient: [10:51:2095] Leader for TabletID 72057594037927937 is [10:57:2097] sender: [10:75:2057] recipient: [10:14:2061] !Reboot 72057594037927937 (actor [10:57:2097]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [10:57:2097] sender: [10:85:2057] recipient: [10:36:2083] Leader for TabletID 72057594037927937 is [10:57:2097] sender: [10:88:2057] recipient: [10:14:2061] Leader for TabletID 72057594037927937 is [10:57:2097] sender: [10:89:2057] recipient: [10:87:2115] Leader for TabletID 72057594037927937 is [10:90:2116] sender: [10:91:2057] recipient: [10:87:2115] !Reboot 72057594037927937 (actor [10:57:2097]) rebooted! !Reboot 72057594037927937 (actor [10:57:2097]) tablet resolver refreshed! new actor is[10:90:2116] Leader for TabletID 72057594037927937 is [10:90:2116] sender: [10:176:2057] recipient: [10:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [11:55:2057] recipient: [11:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [11:55:2057] recipient: [11:51:2095] Leader for TabletID 72057594037927937 is [11:57:2097] sender: [11:58:2057] recipient: [11:51:2095] Leader for TabletID 72057594037927937 is [11:57:2097] sender: [11:75:2057] recipient: [11:14:2061] !Reboot 72057594037927937 (actor [11:57:2097]) on event NKikimr::TEvKeyValue::TEvCollect ! Leader for TabletID 72057594037927937 is [11:57:2097] sender: [11:86:2057] recipient: [11:36:2083] Leader for TabletID 72057594037927937 is [11:57:2097] sender: [11:89:2057] recipient: [11:14:2061] Leader for TabletID 72057594037927937 is [11:57:2097] sender: [11:90:2057] recipient: [11:88:2116] Leader for TabletID 72057594037927937 is [11:91:2117] sender: [11:92:2057] recipient: [11:88:2116] !Reboot 72057594037927937 (actor [11:57:2097]) rebooted! !Reboot 72057594037927937 (actor [11:57:2097]) tablet resolver refreshed! new actor is[11:91:2117] Leader for TabletID 72057594037927937 is [11:91:2117] sender: [11:111:2057] recipient: [11:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [12:55:2057] recipient: [12:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [12:55:2057] recipient: [12:51:2095] Leader for TabletID 72057594037927937 is [12:57:2097] sender: [12:58:2057] recipient: [12:51:2095] Leader for TabletID 72057594037927937 is [12:57:2097] sender: [12:75:2057] recipient: [12:14:2061] !Reboot 72057594037927937 (actor [12:57:2 ... 2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [106:55:2057] recipient: [106:52:2095] Leader for TabletID 72057594037927937 is [106:57:2097] sender: [106:58:2057] recipient: [106:52:2095] Leader for TabletID 72057594037927937 is [106:57:2097] sender: [106:75:2057] recipient: [106:14:2061] !Reboot 72057594037927937 (actor [106:57:2097]) on event NKikimr::TEvKeyValue::TEvCompleteGC ! Leader for TabletID 72057594037927937 is [106:57:2097] sender: [106:128:2057] recipient: [106:36:2083] Leader for TabletID 72057594037927937 is [106:57:2097] sender: [106:131:2057] recipient: [106:14:2061] Leader for TabletID 72057594037927937 is [106:57:2097] sender: [106:132:2057] recipient: [106:130:2148] Leader for TabletID 72057594037927937 is [106:133:2149] sender: [106:134:2057] recipient: [106:130:2148] !Reboot 72057594037927937 (actor [106:57:2097]) rebooted! !Reboot 72057594037927937 (actor [106:57:2097]) tablet resolver refreshed! new actor is[106:133:2149] Leader for TabletID 72057594037927937 is [106:133:2149] sender: [106:153:2057] recipient: [106:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [107:55:2057] recipient: [107:50:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [107:55:2057] recipient: [107:50:2095] Leader for TabletID 72057594037927937 is [107:57:2097] sender: [107:58:2057] recipient: [107:50:2095] Leader for TabletID 72057594037927937 is [107:57:2097] sender: [107:75:2057] recipient: [107:14:2061] !Reboot 72057594037927937 (actor [107:57:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [107:57:2097] sender: [107:131:2057] recipient: [107:36:2083] Leader for TabletID 72057594037927937 is [107:57:2097] sender: [107:134:2057] recipient: [107:14:2061] Leader for TabletID 72057594037927937 is [107:57:2097] sender: [107:135:2057] recipient: [107:133:2151] Leader for TabletID 72057594037927937 is [107:136:2152] sender: [107:137:2057] recipient: [107:133:2151] !Reboot 72057594037927937 (actor [107:57:2097]) rebooted! !Reboot 72057594037927937 (actor [107:57:2097]) tablet resolver refreshed! new actor is[107:136:2152] Leader for TabletID 72057594037927937 is [107:136:2152] sender: [107:222:2057] recipient: [107:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [108:55:2057] recipient: [108:52:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [108:55:2057] recipient: [108:52:2095] Leader for TabletID 72057594037927937 is [108:57:2097] sender: [108:58:2057] recipient: [108:52:2095] Leader for TabletID 72057594037927937 is [108:57:2097] sender: [108:75:2057] recipient: [108:14:2061] !Reboot 72057594037927937 (actor [108:57:2097]) on event NKikimr::TEvKeyValue::TEvRead ! Leader for TabletID 72057594037927937 is [108:57:2097] sender: [108:131:2057] recipient: [108:36:2083] Leader for TabletID 72057594037927937 is [108:57:2097] sender: [108:133:2057] recipient: [108:14:2061] Leader for TabletID 72057594037927937 is [108:57:2097] sender: [108:135:2057] recipient: [108:134:2151] Leader for TabletID 72057594037927937 is [108:136:2152] sender: [108:137:2057] recipient: [108:134:2151] !Reboot 72057594037927937 (actor [108:57:2097]) rebooted! !Reboot 72057594037927937 (actor [108:57:2097]) tablet resolver refreshed! new actor is[108:136:2152] Leader for TabletID 72057594037927937 is [108:136:2152] sender: [108:222:2057] recipient: [108:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [109:55:2057] recipient: [109:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [109:55:2057] recipient: [109:51:2095] Leader for TabletID 72057594037927937 is [109:57:2097] sender: [109:58:2057] recipient: [109:51:2095] Leader for TabletID 72057594037927937 is [109:57:2097] sender: [109:75:2057] recipient: [109:14:2061] !Reboot 72057594037927937 (actor [109:57:2097]) on event NKikimr::TEvKeyValue::TEvNotify ! Leader for TabletID 72057594037927937 is [109:57:2097] sender: [109:132:2057] recipient: [109:36:2083] Leader for TabletID 72057594037927937 is [109:57:2097] sender: [109:135:2057] recipient: [109:14:2061] Leader for TabletID 72057594037927937 is [109:57:2097] sender: [109:136:2057] recipient: [109:134:2151] Leader for TabletID 72057594037927937 is [109:137:2152] sender: [109:138:2057] recipient: [109:134:2151] !Reboot 72057594037927937 (actor [109:57:2097]) rebooted! !Reboot 72057594037927937 (actor [109:57:2097]) tablet resolver refreshed! new actor is[109:137:2152] Leader for TabletID 72057594037927937 is [109:137:2152] sender: [109:155:2057] recipient: [109:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [110:55:2057] recipient: [110:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [110:55:2057] recipient: [110:51:2095] Leader for TabletID 72057594037927937 is [110:57:2097] sender: [110:58:2057] recipient: [110:51:2095] Leader for TabletID 72057594037927937 is [110:57:2097] sender: [110:75:2057] recipient: [110:14:2061] !Reboot 72057594037927937 (actor [110:57:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [110:57:2097] sender: [110:134:2057] recipient: [110:36:2083] Leader for TabletID 72057594037927937 is [110:57:2097] sender: [110:137:2057] recipient: [110:14:2061] Leader for TabletID 72057594037927937 is [110:57:2097] sender: [110:138:2057] recipient: [110:136:2153] Leader for TabletID 72057594037927937 is [110:139:2154] sender: [110:140:2057] recipient: [110:136:2153] !Reboot 72057594037927937 (actor [110:57:2097]) rebooted! !Reboot 72057594037927937 (actor [110:57:2097]) tablet resolver refreshed! new actor is[110:139:2154] Leader for TabletID 72057594037927937 is [110:139:2154] sender: [110:225:2057] recipient: [110:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [111:55:2057] recipient: [111:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [111:55:2057] recipient: [111:51:2095] Leader for TabletID 72057594037927937 is [111:57:2097] sender: [111:58:2057] recipient: [111:51:2095] Leader for TabletID 72057594037927937 is [111:57:2097] sender: [111:75:2057] recipient: [111:14:2061] !Reboot 72057594037927937 (actor [111:57:2097]) on event NKikimr::TEvKeyValue::TEvExecuteTransaction ! Leader for TabletID 72057594037927937 is [111:57:2097] sender: [111:134:2057] recipient: [111:36:2083] Leader for TabletID 72057594037927937 is [111:57:2097] sender: [111:137:2057] recipient: [111:136:2153] Leader for TabletID 72057594037927937 is [111:57:2097] sender: [111:138:2057] recipient: [111:14:2061] Leader for TabletID 72057594037927937 is [111:139:2154] sender: [111:140:2057] recipient: [111:136:2153] !Reboot 72057594037927937 (actor [111:57:2097]) rebooted! !Reboot 72057594037927937 (actor [111:57:2097]) tablet resolver refreshed! new actor is[111:139:2154] Leader for TabletID 72057594037927937 is [111:139:2154] sender: [111:225:2057] recipient: [111:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [112:55:2057] recipient: [112:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [112:55:2057] recipient: [112:51:2095] Leader for TabletID 72057594037927937 is [112:57:2097] sender: [112:58:2057] recipient: [112:51:2095] Leader for TabletID 72057594037927937 is [112:57:2097] sender: [112:75:2057] recipient: [112:14:2061] !Reboot 72057594037927937 (actor [112:57:2097]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [112:57:2097] sender: [112:135:2057] recipient: [112:36:2083] Leader for TabletID 72057594037927937 is [112:57:2097] sender: [112:137:2057] recipient: [112:14:2061] Leader for TabletID 72057594037927937 is [112:57:2097] sender: [112:139:2057] recipient: [112:138:2153] Leader for TabletID 72057594037927937 is [112:140:2154] sender: [112:141:2057] recipient: [112:138:2153] !Reboot 72057594037927937 (actor [112:57:2097]) rebooted! !Reboot 72057594037927937 (actor [112:57:2097]) tablet resolver refreshed! new actor is[112:140:2154] Leader for TabletID 72057594037927937 is [112:140:2154] sender: [112:226:2057] recipient: [112:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [113:55:2057] recipient: [113:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [113:55:2057] recipient: [113:51:2095] Leader for TabletID 72057594037927937 is [113:57:2097] sender: [113:58:2057] recipient: [113:51:2095] Leader for TabletID 72057594037927937 is [113:57:2097] sender: [113:75:2057] recipient: [113:14:2061] !Reboot 72057594037927937 (actor [113:57:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [113:57:2097] sender: [113:138:2057] recipient: [113:36:2083] Leader for TabletID 72057594037927937 is [113:57:2097] sender: [113:141:2057] recipient: [113:14:2061] Leader for TabletID 72057594037927937 is [113:57:2097] sender: [113:142:2057] recipient: [113:140:2156] Leader for TabletID 72057594037927937 is [113:143:2157] sender: [113:144:2057] recipient: [113:140:2156] !Reboot 72057594037927937 (actor [113:57:2097]) rebooted! !Reboot 72057594037927937 (actor [113:57:2097]) tablet resolver refreshed! new actor is[113:143:2157] Leader for TabletID 72057594037927937 is [113:143:2157] sender: [113:229:2057] recipient: [113:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [114:55:2057] recipient: [114:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [114:55:2057] recipient: [114:51:2095] Leader for TabletID 72057594037927937 is [114:57:2097] sender: [114:58:2057] recipient: [114:51:2095] Leader for TabletID 72057594037927937 is [114:57:2097] sender: [114:75:2057] recipient: [114:14:2061] !Reboot 72057594037927937 (actor [114:57:2097]) on event NKikimr::TEvKeyValue::TEvRead ! Leader for TabletID 72057594037927937 is [114:57:2097] sender: [114:138:2057] recipient: [114:36:2083] Leader for TabletID 72057594037927937 is [114:57:2097] sender: [114:140:2057] recipient: [114:14:2061] Leader for TabletID 72057594037927937 is [114:57:2097] sender: [114:142:2057] recipient: [114:141:2156] Leader for TabletID 72057594037927937 is [114:143:2157] sender: [114:144:2057] recipient: [114:141:2156] !Reboot 72057594037927937 (actor [114:57:2097]) rebooted! !Reboot 72057594037927937 (actor [114:57:2097]) tablet resolver refreshed! new actor is[114:143:2157] Leader for TabletID 72057594037927937 is [114:143:2157] sender: [114:229:2057] recipient: [114:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [115:55:2057] recipient: [115:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [115:55:2057] recipient: [115:51:2095] Leader for TabletID 72057594037927937 is [115:57:2097] sender: [115:58:2057] recipient: [115:51:2095] Leader for TabletID 72057594037927937 is [115:57:2097] sender: [115:75:2057] recipient: [115:14:2061] !Reboot 72057594037927937 (actor [115:57:2097]) on event NKikimr::TEvKeyValue::TEvNotify ! Leader for TabletID 72057594037927937 is [115:57:2097] sender: [115:139:2057] recipient: [115:36:2083] Leader for TabletID 72057594037927937 is [115:57:2097] sender: [115:142:2057] recipient: [115:14:2061] Leader for TabletID 72057594037927937 is [115:57:2097] sender: [115:143:2057] recipient: [115:141:2156] Leader for TabletID 72057594037927937 is [115:144:2157] sender: [115:145:2057] recipient: [115:141:2156] !Reboot 72057594037927937 (actor [115:57:2097]) rebooted! !Reboot 72057594037927937 (actor [115:57:2097]) tablet resolver refreshed! new actor is[115:144:2157] Leader for TabletID 72057594037927937 is [0:0:0] sender: [116:55:2057] recipient: [116:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [116:55:2057] recipient: [116:51:2095] Leader for TabletID 72057594037927937 is [116:57:2097] sender: [116:58:2057] recipient: [116:51:2095] Leader for TabletID 72057594037927937 is [116:57:2097] sender: [116:75:2057] recipient: [116:14:2061] >> TConsistentOpsWithReboots::DropNotNullColumnTableWithReboots |74.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_external_table_reboots/unittest |74.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_external_table_reboots/unittest ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_index/unittest >> TVectorIndexTests::CreateTableWithError [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:116:2058] recipient: [1:111:2141] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:116:2058] recipient: [1:111:2141] Leader for TabletID 72057594046678944 is [1:128:2152] sender: [1:132:2058] recipient: [1:111:2141] 2025-05-29T15:31:07.397077Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7488: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-05-29T15:31:07.397099Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7516: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:31:07.397104Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7402: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-05-29T15:31:07.397107Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7418: OperationsProcessing config: using default configuration 2025-05-29T15:31:07.397119Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-05-29T15:31:07.397122Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-05-29T15:31:07.397129Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7548: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:31:07.397140Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:39: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-05-29T15:31:07.397222Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7619: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-05-29T15:31:07.397280Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-05-29T15:31:07.406127Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7309: Cannot subscribe to console configs 2025-05-29T15:31:07.406145Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:31:07.407996Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-05-29T15:31:07.408084Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-05-29T15:31:07.408114Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-05-29T15:31:07.409512Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-05-29T15:31:07.409641Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:445: Clear TempDirsState with owners number: 0 2025-05-29T15:31:07.409746Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1348: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-05-29T15:31:07.409786Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:32: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-05-29T15:31:07.410179Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:157: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:31:07.410211Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:84: [RootDataErasureManager] Stop 2025-05-29T15:31:07.410439Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:31:07.410448Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:31:07.410464Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-05-29T15:31:07.410473Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-29T15:31:07.410477Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:202: TTxServerlessStorageBilling.Complete 2025-05-29T15:31:07.410504Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6671: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-05-29T15:31:07.411513Z node 1 :HIVE INFO: tablet_helpers.cpp:1130: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:128:2152] sender: [1:242:2058] recipient: [1:15:2062] 2025-05-29T15:31:07.424594Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:372: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-05-29T15:31:07.424668Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:31:07.424719Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-05-29T15:31:07.424759Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:130: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-05-29T15:31:07.424768Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:31:07.425386Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:451: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-05-29T15:31:07.425410Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-05-29T15:31:07.425448Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:31:07.425456Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:313: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-05-29T15:31:07.425460Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:367: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-05-29T15:31:07.425464Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 2 -> 3 2025-05-29T15:31:07.425735Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:31:07.425743Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-05-29T15:31:07.425748Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 3 -> 128 2025-05-29T15:31:07.426000Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:31:07.426009Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:31:07.426018Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:31:07.426026Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1650: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-05-29T15:31:07.426491Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1719: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-05-29T15:31:07.426827Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:652: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-05-29T15:31:07.426858Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1751: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-05-29T15:31:07.426986Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:676: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-05-29T15:31:07.427005Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:680: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 124 RawX2: 4294969445 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-05-29T15:31:07.427010Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:31:07.427066Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 128 -> 240 2025-05-29T15:31:07.427072Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:31:07.427098Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-05-29T15:31:07.427107Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:402: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-05-29T15:31:07.427423Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:31:07.427429Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-29T15:31:07.427462Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:31:07.427465Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:209:2210], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-05-29T15:31:07.427518Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:31:07.427526Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:482: [72057594046678944] TDone opId# 1:0 ProgressState 2025-05-29T15:31:07.427537Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:906: Part operation is done id#1:0 progress is 1/1 2025-05-29T15:31:07.427542Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-05-29T15:31:07.427548Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:906: Part operation is done id#1:0 progress is 1/1 2025-05-29T15:31:07.427551Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-05-29T15:31:07.427556Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1606: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-05-29T15:31:07.427569Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-05-29T15:31:07.427573Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:973: Operation and all the parts is done, operation id: 1:0 2025-05-29T15:31:07.427576Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5174: RemoveTx for txid 1:0 2025-05-29T15:31:07.427584Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-05-29T15:31:07.427589Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:982: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-05-29T15:31:07.427592Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:989: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-05-29T15:31:07.427843Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:5834: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-05-29T15:31:07.427856Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-05-29T15:31:07.427859Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2025-05-29T15:31:07.427863Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2025-05-29T15:31:07.427866Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-05-29T15:31:07.427877Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 1, subscribers: 0 2025-05-29T15:31:07.428383Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1 2025-05-29T15:31:07.428462Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 1, at schemeshard: 72057594046678944 TestModificationResults wait txId: 101 2025-05-29T15:31:07.428761Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:434: actor# [1:272:2262] Bootstrap 2025-05-29T15:31:07.430087Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:453: actor# [1:272:2262] Become StateWork (SchemeCache [1:277:2267]) 2025-05-29T15:31:07.430643Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:372: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpCreateIndexedTable CreateIndexedTable { TableDescription { Name: "vectors" Columns { Name: "id" Type: "Uint64" } Columns { Name: "__ydb_parent" Type: "String" } KeyColumnNames: "id" } IndexDescription { Name: "idx_vector" KeyColumnNames: "__ydb_parent" Type: EIndexTypeGlobalVectorKmeansTree VectorIndexKmeansTreeDescription { Settings { settings { metric: DISTANCE_COSINE vector_type: VECTOR_TYPE_FLOAT vector_dimension: 1024 } } } } } } TxId: 101 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-05-29T15:31:07.430710Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_create_indexed_table.cpp:101: TCreateTableIndex construct operation table path: /MyRoot/vectors domain path id: [OwnerId: 72057594046678944, LocalPathId: 1] domain path: /MyRoot shardsToCreate: 2 GetShardsInside: 0 MaxShards: 200000 2025-05-29T15:31:07.430753Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_just_reject.cpp:47: TReject Propose, opId: 101:0, explain: index key column shouldn't have a reserved name, at schemeshard: 72057594046678944 2025-05-29T15:31:07.430762Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:130: IgniteOperation, opId: 101:1, propose status:StatusInvalidParameter, reason: index key column shouldn't have a reserved name, at schemeshard: 72057594046678944 2025-05-29T15:31:07.430894Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:213: actor# [1:272:2262] HANDLE TEvClientConnected success connect from tablet# 72057594046447617 2025-05-29T15:31:07.431383Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:451: TTxOperationPropose Complete, txId: 101, response: Status: StatusInvalidParameter Reason: "index key column shouldn\'t have a reserved name" TxId: 101 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2025-05-29T15:31:07.431413Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 101, database: /MyRoot, subject: , status: StatusInvalidParameter, reason: index key column shouldn't have a reserved name, operation: CREATE TABLE WITH INDEXES, path: /MyRoot/vectors 2025-05-29T15:31:07.431482Z node 1 :TX_PROXY DEBUG: client.cpp:89: Handle TEvAllocateResult ACCEPTED RangeBegin# 281474976715656 RangeEnd# 281474976720656 txAllocator# 72057594046447617 TestModificationResult got TxId: 101, wait until txId: 101 TestModificationResults wait txId: 102 2025-05-29T15:31:07.431974Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:372: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpCreateIndexedTable CreateIndexedTable { TableDescription { Name: "vectors" Columns { Name: "id" Type: "Uint64" } Columns { Name: "embedding" Type: "String" } KeyColumnNames: "id" } IndexDescription { Name: "idx_vector" KeyColumnNames: "embedding" Type: EIndexTypeGlobalVectorKmeansTree DataColumnNames: "id" VectorIndexKmeansTreeDescription { Settings { settings { metric: DISTANCE_COSINE vector_type: VECTOR_TYPE_FLOAT vector_dimension: 1024 } } } } } } TxId: 102 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-05-29T15:31:07.432007Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_create_indexed_table.cpp:101: TCreateTableIndex construct operation table path: /MyRoot/vectors domain path id: [OwnerId: 72057594046678944, LocalPathId: 1] domain path: /MyRoot shardsToCreate: 2 GetShardsInside: 0 MaxShards: 200000 2025-05-29T15:31:07.432020Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_just_reject.cpp:47: TReject Propose, opId: 102:0, explain: the same column can't be used as key and data column for one index, for example id, at schemeshard: 72057594046678944 2025-05-29T15:31:07.432024Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:130: IgniteOperation, opId: 102:1, propose status:StatusInvalidParameter, reason: the same column can't be used as key and data column for one index, for example id, at schemeshard: 72057594046678944 2025-05-29T15:31:07.432328Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:451: TTxOperationPropose Complete, txId: 102, response: Status: StatusInvalidParameter Reason: "the same column can\'t be used as key and data column for one index, for example id" TxId: 102 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2025-05-29T15:31:07.432357Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 102, database: /MyRoot, subject: , status: StatusInvalidParameter, reason: the same column can't be used as key and data column for one index, for example id, operation: CREATE TABLE WITH INDEXES, path: /MyRoot/vectors TestModificationResult got TxId: 102, wait until txId: 102 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/persqueue_v1/ut/unittest >> TPersQueueTest::DefaultMeteringMode [FAIL] Test command err: 2025-05-29T15:30:31.527470Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509890469668273190:2207];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:30:31.527514Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/001ebf/r3tmp/tmplfqSQx/pdisk_1.dat 2025-05-29T15:30:31.564617Z node 2 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7509890469538269934:2222];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:30:31.564754Z node 2 :PQ_READ_PROXY DEBUG: caching_service.cpp:44: Direct read cache: : Created 2025-05-29T15:30:31.565192Z node 1 :PQ_READ_PROXY DEBUG: caching_service.cpp:44: Direct read cache: : Created 2025-05-29T15:30:31.566948Z node 2 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-05-29T15:30:31.627364Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:30:31.627395Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:30:31.629157Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:30:31.634040Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 62856, node 1 2025-05-29T15:30:31.653632Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/ciyv/001ebf/r3tmp/yandexw2lYZn.tmp 2025-05-29T15:30:31.653643Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: /home/runner/.ya/build/build_root/ciyv/001ebf/r3tmp/yandexw2lYZn.tmp 2025-05-29T15:30:31.653723Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:202: successfully initialized from file: /home/runner/.ya/build/build_root/ciyv/001ebf/r3tmp/yandexw2lYZn.tmp 2025-05-29T15:30:31.653748Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-05-29T15:30:31.656722Z INFO: TTestServer started on Port 23750 GrpcPort 62856 2025-05-29T15:30:31.670593Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:30:31.670617Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:30:31.671778Z node 1 :HIVE WARN: hive_impl.cpp:771: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-05-29T15:30:31.672274Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:23750 PQClient connected to localhost:62856 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-29T15:30:31.719877Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-05-29T15:30:31.739239Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-05-29T15:30:31.839739Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976715660, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:30:31.995544Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890469668274068:2335], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:30:31.995583Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:30:31.995774Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890469668274104:2339], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:30:31.996567Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890469668274116:2342], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:30:31.996593Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:30:31.996710Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715662:3, at schemeshard: 72057594046644480 2025-05-29T15:30:32.003130Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7509890469668274106:2340], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715662 completed, doublechecking } 2025-05-29T15:30:32.030595Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-05-29T15:30:32.044703Z node 2 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [2:7509890473833237456:2315], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-05-29T15:30:32.044826Z node 2 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=2&id=ODdhOTQzNzAtNWEwZDU5YmUtOWVkMmQ3ZmYtODBkNTFiYTg=, ActorId: [2:7509890473833237415:2309], ActorState: ExecuteState, TraceId: 01jwean6exc15cfx6pzv40wt9e, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-05-29T15:30:32.045279Z node 2 :PERSQUEUE_CLUSTER_TRACKER ERROR: cluster_tracker.cpp:167: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-05-29T15:30:32.074221Z node 1 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [1:7509890473963241587:2805] txid# 281474976715664, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 9], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-29T15:30:32.078782Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [1:7509890473963241606:2354], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-05-29T15:30:32.078893Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=1&id=OGI4Y2Q1MGEtODcxZmExZGQtNGRmY2YxNzMtMWNjYmNlMGI=, ActorId: [1:7509890469668274065:2333], ActorState: ExecuteState, TraceId: 01jwean6dv78d04f514ks1kg0v, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-05-29T15:30:32.079052Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: cluster_tracker.cpp:167: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-05-29T15:30:32.090470Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-05-29T15:30:32.157035Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost", true, true, 1000), ("dc2", "dc2.logbroker.yandex.net", false, true, 1000); 2025-05-29T15:30:32.188537Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [1:7509890473963241895:2381], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yq ... ild_root/ciyv/001ebf/r3tmp/yandexI9zQQs.tmp 2025-05-29T15:30:47.317652Z node 25 :NET_CLASSIFIER WARN: net_classifier.cpp:202: successfully initialized from file: /home/runner/.ya/build/build_root/ciyv/001ebf/r3tmp/yandexI9zQQs.tmp 2025-05-29T15:30:47.317684Z node 25 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-05-29T15:30:47.321591Z INFO: TTestServer started on Port 22697 GrpcPort 18060 TClient is connected to server localhost:22697 PQClient connected to localhost:18060 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-29T15:30:47.394587Z node 25 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(25, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:30:47.394610Z node 25 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(25, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:30:47.396218Z node 25 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(25, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-29T15:30:47.397152Z node 25 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976720657:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:30:47.397907Z node 25 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(26, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:30:47.397924Z node 25 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(26, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:30:47.398882Z node 25 :HIVE WARN: hive_impl.cpp:771: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 26 Cookie 26 2025-05-29T15:30:47.399134Z node 25 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(26, (0,0,0,0)) VolatileState: Connecting -> Connected waiting... 2025-05-29T15:30:47.408285Z node 25 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720659:0, at schemeshard: 72057594046644480 waiting... waiting... waiting... 2025-05-29T15:30:47.615166Z node 25 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [25:7509890535865056580:2336], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:30:47.615207Z node 25 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:30:47.615316Z node 25 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [25:7509890535865056600:2339], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:30:47.615883Z node 25 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976720662:3, at schemeshard: 72057594046644480 2025-05-29T15:30:47.616091Z node 25 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [25:7509890535865056631:2342], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:30:47.616120Z node 25 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:30:47.618821Z node 25 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [25:7509890535865056602:2340], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976720662 completed, doublechecking } 2025-05-29T15:30:47.620119Z node 25 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720663:0, at schemeshard: 72057594046644480 2025-05-29T15:30:47.670145Z node 25 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [25:7509890535865056794:2803] txid# 281474976720664, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 9], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-29T15:30:47.673237Z node 25 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [25:7509890535865056813:2354], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-05-29T15:30:47.673311Z node 25 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=25&id=Njc1YTIzZGUtNGU4MjZkOTMtNzI2Yjg4NTAtZDEzNGU2NjU=, ActorId: [25:7509890535865056562:2334], ActorState: ExecuteState, TraceId: 01jweannnyfxdcf69v3weab6sb, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-05-29T15:30:47.673430Z node 25 :PERSQUEUE_CLUSTER_TRACKER ERROR: cluster_tracker.cpp:167: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-05-29T15:30:47.677668Z node 25 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720665:0, at schemeshard: 72057594046644480 2025-05-29T15:30:47.695577Z node 25 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720666:0, at schemeshard: 72057594046644480 === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost", true, true, 1000), ("dc2", "dc2.logbroker.yandex.net", false, true, 1000); 2025-05-29T15:30:47.717081Z node 25 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [25:7509890535865057098:2381], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:30:47.717187Z node 25 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=25&id=ZWFjNmViOS04YmRkNTJhYS01N2U0ZjRkLTRkYTVkNzg1, ActorId: [25:7509890535865057095:2379], ActorState: ExecuteState, TraceId: 01jweannrvd8xfpzd0a7gcfwd6, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: assertion failed at ydb/core/testlib/test_pq_client.h:537, TMaybe NKikimr::NPersQueueTests::TFlatMsgBusPQClient::RunYqlDataQueryWithParams(TString, const NYdb::TParams &): (result.IsSuccess())
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 TBackTrace::Capture()+28 (0x13B9173C) NUnitTest::NPrivate::RaiseError(char const*, TBasicString> const&, bool)+137 (0x13D44119) NKikimr::NPersQueueTests::TFlatMsgBusPQClient::RunYqlDataQueryWithParams(TBasicString>, NYdb::Dev::TParams const&)+932 (0x139C81C4) NKikimr::NPersQueueTests::TFlatMsgBusPQClient::RunYqlDataQuery(TBasicString>)+88 (0x139F6148) NKikimr::NPersQueueTests::TFlatMsgBusPQClient::InitDCs(THashMap>, NKikimr::NPersQueueTests::TPQTestClusterInfo, THash>>, TEqualTo>>, std::__y1::allocator>>>, TBasicString> const&)+1170 (0x13995592) NKikimr::NPersQueueTests::TFlatMsgBusPQClient::FullInit(THashMap>, NKikimr::NPersQueueTests::TPQTestClusterInfo, THash>>, TEqualTo>>, std::__y1::allocator>>>, TBasicString> const&, TBasicString> const&)+327 (0x139C2D47) NPersQueue::TTestServer::StartServer(bool, TMaybe>, NMaybe::TPolicyUndefinedExcept>)+888 (0x138D5468) NPersQueue::TTestServer::TTestServer(NKikimr::Tests::TServerSettings const&, bool, TVector> const&, NActors::NLog::EPriority, TMaybe, TDelete>, NMaybe::TPolicyUndefinedExcept>)+1563 (0x138D474B) NKikimr::NPersQueueTests::NTestSuiteTPersQueueTest::DefaultMeteringMode(bool)+387 (0x13992183) NKikimr::NPersQueueTests::NTestSuiteTPersQueueTest::TTestCaseDefaultMeteringMode::Execute_(NUnitTest::TTestContext&)+11 (0x1399365B) NKikimr::NPersQueueTests::NTestSuiteTPersQueueTest::TCurrentTest::Execute()::'lambda'()::operator()() const+71 (0x139BF897) NUnitTest::TTestBase::Run(std::__y1::function, TBasicString> const&, char const*, bool)+126 (0x13D45FCE) NKikimr::NPersQueueTests::NTestSuiteTPersQueueTest::TCurrentTest::Execute()+436 (0x139BF1B4) NUnitTest::TTestFactory::Execute()+803 (0x13D46743) NUnitTest::RunMain(int, char**)+3021 (0x13D582DD) ??+0 (0x7FC3B4E63D90) __libc_start_main+128 (0x7FC3B4E63E40) _start+41 (0x129C9029) ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_split_merge/unittest >> TSchemeShardSplitBySizeTest::Merge1KShards [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:116:2058] recipient: [1:111:2141] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:116:2058] recipient: [1:111:2141] Leader for TabletID 72057594046678944 is [1:128:2152] sender: [1:132:2058] recipient: [1:111:2141] 2025-05-29T15:29:42.716754Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7488: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-05-29T15:29:42.716785Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7516: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:29:42.716790Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7402: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2025-05-29T15:29:42.716796Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7418: OperationsProcessing config: using default configuration 2025-05-29T15:29:42.716811Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-05-29T15:29:42.716815Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-05-29T15:29:42.716826Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7548: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:29:42.716840Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:39: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-05-29T15:29:42.716982Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7619: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-05-29T15:29:42.717068Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-05-29T15:29:42.735196Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7309: Cannot subscribe to console configs 2025-05-29T15:29:42.735227Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:29:42.738799Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-05-29T15:29:42.738953Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-05-29T15:29:42.738994Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-05-29T15:29:42.740612Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-05-29T15:29:42.740786Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:445: Clear TempDirsState with owners number: 0 2025-05-29T15:29:42.740929Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1348: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-05-29T15:29:42.740982Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:32: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-05-29T15:29:42.741484Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:157: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:29:42.741535Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:84: [RootDataErasureManager] Stop 2025-05-29T15:29:42.741814Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:29:42.741825Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:29:42.741849Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-05-29T15:29:42.741858Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-29T15:29:42.741864Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:202: TTxServerlessStorageBilling.Complete 2025-05-29T15:29:42.741901Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6671: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-05-29T15:29:42.743359Z node 1 :HIVE INFO: tablet_helpers.cpp:1130: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:128:2152] sender: [1:242:2058] recipient: [1:15:2062] 2025-05-29T15:29:42.767078Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:372: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-05-29T15:29:42.767174Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:29:42.767246Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-05-29T15:29:42.767299Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:130: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-05-29T15:29:42.767312Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:29:42.768657Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:451: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-05-29T15:29:42.768693Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-05-29T15:29:42.768750Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:29:42.768764Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:313: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-05-29T15:29:42.768769Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:367: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-05-29T15:29:42.768775Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 2 -> 3 2025-05-29T15:29:42.769357Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:29:42.769371Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-05-29T15:29:42.769378Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 3 -> 128 2025-05-29T15:29:42.769770Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:29:42.769781Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:29:42.769788Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:29:42.769807Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1650: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-05-29T15:29:42.770593Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1719: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-05-29T15:29:42.771096Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:652: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-05-29T15:29:42.771147Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1751: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-05-29T15:29:42.771349Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:676: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-05-29T15:29:42.771377Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:680: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 124 RawX2: 4294969445 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-05-29T15:29:42.771386Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:29:42.771451Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 128 -> 240 2025-05-29T15:29:42.771460Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:29:42.771495Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-05-29T15:29:42.771507Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:402: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-05-29T15:29:42.771963Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:29:42.771974Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-29T15:29:42.772021Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29T1 ... d tabletId 72075186233410289 Deleted tabletId 72075186233410290 Deleted tabletId 72075186233410291 Deleted tabletId 72075186233410292 Deleted tabletId 72075186233410293 Deleted tabletId 72075186233410294 Deleted tabletId 72075186233410295 Deleted tabletId 72075186233410296 Deleted tabletId 72075186233410297 Deleted tabletId 72075186233410298 Deleted tabletId 72075186233410299 Deleted tabletId 72075186233410300 Deleted tabletId 72075186233410301 Deleted tabletId 72075186233410302 Deleted tabletId 72075186233410303 Deleted tabletId 72075186233410304 Deleted tabletId 72075186233410305 Deleted tabletId 72075186233410306 Deleted tabletId 72075186233410307 Deleted tabletId 72075186233410308 Deleted tabletId 72075186233410309 Deleted tabletId 72075186233410310 Deleted tabletId 72075186233410311 Deleted tabletId 72075186233410312 Deleted tabletId 72075186233410313 Deleted tabletId 72075186233410314 Deleted tabletId 72075186233410315 Deleted tabletId 72075186233410316 Deleted tabletId 72075186233410317 Deleted tabletId 72075186233410318 Deleted tabletId 72075186233410319 Deleted tabletId 72075186233410320 Deleted tabletId 72075186233410321 Deleted tabletId 72075186233410322 Deleted tabletId 72075186233410323 Deleted tabletId 72075186233410324 Deleted tabletId 72075186233410325 Deleted tabletId 72075186233410326 Deleted tabletId 72075186233410327 Deleted tabletId 72075186233410328 Deleted tabletId 72075186233410329 Deleted tabletId 72075186233410330 Deleted tabletId 72075186233410331 Deleted tabletId 72075186233410332 Deleted tabletId 72075186233410333 Deleted tabletId 72075186233410334 Deleted tabletId 72075186233410335 Deleted tabletId 72075186233410336 Deleted tabletId 72075186233410337 Deleted tabletId 72075186233410338 Deleted tabletId 72075186233410339 Deleted tabletId 72075186233410340 Deleted tabletId 72075186233410341 Deleted tabletId 72075186233410342 Deleted tabletId 72075186233410343 Deleted tabletId 72075186233410344 Deleted tabletId 72075186233410345 Deleted tabletId 72075186233410346 Deleted tabletId 72075186233410347 Deleted tabletId 72075186233410348 Deleted tabletId 72075186233410349 Deleted tabletId 72075186233410350 Deleted tabletId 72075186233410351 Deleted tabletId 72075186233410352 Deleted tabletId 72075186233410353 Deleted tabletId 72075186233410354 Deleted tabletId 72075186233410355 Deleted tabletId 72075186233410356 Deleted tabletId 72075186233410357 Deleted tabletId 72075186233410358 Deleted tabletId 72075186233410359 Deleted tabletId 72075186233410360 Deleted tabletId 72075186233410361 Deleted tabletId 72075186233410363 Deleted tabletId 72075186233410364 Deleted tabletId 72075186233410362 Deleted tabletId 72075186233410365 Deleted tabletId 72075186233410366 Deleted tabletId 72075186233410367 Deleted tabletId 72075186233410368 Deleted tabletId 72075186233410369 Deleted tabletId 72075186233410370 Deleted tabletId 72075186233410371 Deleted tabletId 72075186233410372 Deleted tabletId 72075186233410373 Deleted tabletId 72075186233410374 Deleted tabletId 72075186233410375 Deleted tabletId 72075186233410376 Deleted tabletId 72075186233410377 Deleted tabletId 72075186233410378 Deleted tabletId 72075186233410379 Deleted tabletId 72075186233410380 Deleted tabletId 72075186233410381 Deleted tabletId 72075186233410382 Deleted tabletId 72075186233410383 Deleted tabletId 72075186233410384 Deleted tabletId 72075186233410385 Deleted tabletId 72075186233410386 Deleted tabletId 72075186233410387 Deleted tabletId 72075186233410388 Deleted tabletId 72075186233410389 Deleted tabletId 72075186233409546 Deleted tabletId 72075186233409547 Deleted tabletId 72075186233409548 Deleted tabletId 72075186233410390 Deleted tabletId 72075186233409817 Deleted tabletId 72075186233409819 Deleted tabletId 72075186233409820 Deleted tabletId 72075186233409821 Deleted tabletId 72075186233409824 Deleted tabletId 72075186233409937 Deleted tabletId 72075186233409938 Deleted tabletId 72075186233409951 Deleted tabletId 72075186233410065 Deleted tabletId 72075186233410066 Deleted tabletId 72075186233410067 Deleted tabletId 72075186233410537 Deleted tabletId 72075186233410538 Deleted tabletId 72075186233410070 Deleted tabletId 72075186233410071 Deleted tabletId 72075186233410072 Deleted tabletId 72075186233410539 Deleted tabletId 72075186233410540 Deleted tabletId 72075186233410541 Deleted tabletId 72075186233410542 Deleted tabletId 72075186233410543 Deleted tabletId 72075186233410544 Deleted tabletId 72075186233409701 Deleted tabletId 72075186233409702 Deleted tabletId 72075186233409703 Deleted tabletId 72075186233410545 Deleted tabletId 72075186233409704 Deleted tabletId 72075186233409549 Deleted tabletId 72075186233409551 Deleted tabletId 72075186233409552 Deleted tabletId 72075186233409553 Deleted tabletId 72075186233409718 Deleted tabletId 72075186233409556 Deleted tabletId 72075186233409557 Deleted tabletId 72075186233409558 Deleted tabletId 72075186233409554 Deleted tabletId 72075186233409555 Deleted tabletId 72075186233409559 Deleted tabletId 72075186233409560 Deleted tabletId 72075186233409561 Deleted tabletId 72075186233409562 Deleted tabletId 72075186233409563 Deleted tabletId 72075186233409564 Deleted tabletId 72075186233409565 Deleted tabletId 72075186233409566 Deleted tabletId 72075186233409567 Deleted tabletId 72075186233409568 Deleted tabletId 72075186233409569 Deleted tabletId 72075186233409570 Deleted tabletId 72075186233409571 Deleted tabletId 72075186233409572 Deleted tabletId 72075186233409573 Deleted tabletId 72075186233409574 Deleted tabletId 72075186233409576 Deleted tabletId 72075186233409577 Deleted tabletId 72075186233409575 Deleted tabletId 72075186233409578 Deleted tabletId 72075186233409579 Deleted tabletId 72075186233409580 Deleted tabletId 72075186233409581 Deleted tabletId 72075186233409582 Deleted tabletId 72075186233409583 Deleted tabletId 72075186233409584 Deleted tabletId 72075186233409585 Deleted tabletId 72075186233409586 Deleted tabletId 72075186233409587 Deleted tabletId 72075186233409588 Deleted tabletId 72075186233409589 Deleted tabletId 72075186233409590 Deleted tabletId 72075186233409591 Deleted tabletId 72075186233409592 Deleted tabletId 72075186233409593 Deleted tabletId 72075186233409594 Deleted tabletId 72075186233409595 Deleted tabletId 72075186233409596 Deleted tabletId 72075186233409597 Deleted tabletId 72075186233409609 Deleted tabletId 72075186233409610 Deleted tabletId 72075186233409611 Deleted tabletId 72075186233409612 Deleted tabletId 72075186233409613 Deleted tabletId 72075186233409614 Deleted tabletId 72075186233409617 Deleted tabletId 72075186233409618 Deleted tabletId 72075186233409619 Deleted tabletId 72075186233409620 Deleted tabletId 72075186233409621 Deleted tabletId 72075186233409622 Deleted tabletId 72075186233409624 Deleted tabletId 72075186233409627 Deleted tabletId 72075186233409628 Deleted tabletId 72075186233409625 Deleted tabletId 72075186233409630 Deleted tabletId 72075186233409641 2025-05-29T15:31:03.050412Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Table" Options { ReturnPartitioningInfo: true ReturnPartitionConfig: true BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-05-29T15:31:03.050510Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/Table" took 129us result status StatusSuccess 2025-05-29T15:31:03.050758Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Table" PathDescription { Self { Name: "Table" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 2 TablePartitionVersion: 2 } ChildrenExist: false } Table { Name: "Table" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { SizeToSplit: 100500 MinPartitionsCount: 1 } } TableSchemaVersion: 2 IsBackup: false IsRestore: false } TablePartitions { EndOfRangeKeyPrefix: "" IsPoint: false IsInclusive: false DatashardId: 72075186233410546 } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 2000 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 1 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/ydb/backup_ut/unittest >> ListObjectsInS3Export::ParametersValidation [GOOD] Test command err: 2025-05-29T15:29:36.184494Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509890233802208012:2208];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:29:36.184549Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/001cb7/r3tmp/tmpFkp8E1/pdisk_1.dat 2025-05-29T15:29:36.422959Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 11976, node 1 2025-05-29T15:29:36.484936Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:29:36.484954Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-05-29T15:29:36.484956Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-05-29T15:29:36.485012Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-05-29T15:29:36.517806Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:29:36.517846Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:29:36.523783Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:16103 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-29T15:29:36.546641Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:29:36.676129Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890233802208840:2333], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:29:36.676158Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:29:36.676242Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890233802208861:2337], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:29:36.676327Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:315: actor# [1:7509890233802208094:2115] Handle TEvProposeTransaction 2025-05-29T15:29:36.676336Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:238: actor# [1:7509890233802208094:2115] TxId# 281474976715658 ProcessProposeTransaction 2025-05-29T15:29:36.676343Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:257: actor# [1:7509890233802208094:2115] Cookie# 0 userReqId# "" txid# 281474976715658 SEND to# [1:7509890233802208864:2599] 2025-05-29T15:29:36.688366Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1562: Actor# [1:7509890233802208864:2599] txid# 281474976715658 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/Root/.metadata/workload_manager/pools" OperationType: ESchemeOpCreateResourcePool ModifyACL { Name: "default" DiffACL: "\n!\010\000\022\035\010\001\020\201\004\032\024all-users@well-known \003\n\031\010\000\022\025\010\001\020\201\004\032\014root@builtin \003" NewOwner: "metadata@system" } Internal: true CreateResourcePool { Name: "default" Properties { Properties { key: "concurrent_query_limit" value: "-1" } Properties { key: "database_load_cpu_threshold" value: "-1" } Properties { key: "query_cancel_after_seconds" value: "0" } Properties { key: "query_cpu_limit_percent_per_node" value: "-1" } Properties { key: "query_memory_limit_percent_per_node" value: "-1" } Properties { key: "queue_size" value: "-1" } Properties { key: "resource_weight" value: "-1" } Properties { key: "total_cpu_limit_percent_per_node" value: "-1" } } } } } UserToken: "\n\017metadata@system\022\000" DatabaseName: "/Root" 2025-05-29T15:29:36.688416Z node 1 :TX_PROXY DEBUG: schemereq.cpp:569: Actor# [1:7509890233802208864:2599] txid# 281474976715658 Bootstrap, UserSID: metadata@system CheckAdministrator: 0 CheckDatabaseAdministrator: 0 2025-05-29T15:29:36.688421Z node 1 :TX_PROXY DEBUG: schemereq.cpp:578: Actor# [1:7509890233802208864:2599] txid# 281474976715658 Bootstrap, UserSID: metadata@system IsClusterAdministrator: 1 2025-05-29T15:29:36.688842Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1627: Actor# [1:7509890233802208864:2599] txid# 281474976715658 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P6 2025-05-29T15:29:36.688867Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1617: Actor# [1:7509890233802208864:2599] txid# 281474976715658 TEvNavigateKeySet requested from SchemeCache 2025-05-29T15:29:36.688898Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1450: Actor# [1:7509890233802208864:2599] txid# 281474976715658 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-05-29T15:29:36.688932Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1497: Actor# [1:7509890233802208864:2599] HANDLE EvNavigateKeySetResult, txid# 281474976715658 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2025-05-29T15:29:36.688950Z node 1 :TX_PROXY DEBUG: schemereq.cpp:103: Actor# [1:7509890233802208864:2599] txid# 281474976715658 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976715658 TabletId# 72057594046644480} 2025-05-29T15:29:36.688991Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1352: Actor# [1:7509890233802208864:2599] txid# 281474976715658 HANDLE EvClientConnected 2025-05-29T15:29:36.689026Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4877: StateWork, received event# 269877761, Sender [1:7509890233802208889:2605], Recipient [1:7509890233802208336:2267]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-05-29T15:29:36.689035Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4974: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-05-29T15:29:36.689038Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5753: Pipe server connected, at tablet: 72057594046644480 2025-05-29T15:29:36.689043Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4877: StateWork, received event# 271122432, Sender [1:7509890233802208864:2599], Recipient [1:7509890233802208336:2267]: {TEvModifySchemeTransaction txid# 281474976715658 TabletId# 72057594046644480} 2025-05-29T15:29:36.689045Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4888: StateWork, processing event TEvSchemeShard::TEvModifySchemeTransaction 2025-05-29T15:29:36.689663Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:372: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/Root" OperationType: ESchemeOpCreateResourcePool ModifyACL { Name: "default" DiffACL: "\n!\010\000\022\035\010\001\020\201\004\032\024all-users@well-known \003\n\031\010\000\022\025\010\001\020\201\004\032\014root@builtin \003" NewOwner: "metadata@system" } Internal: true CreateResourcePool { Name: ".metadata/workload_manager/pools/default" Properties { Properties { key: "concurrent_query_limit" value: "-1" } Properties { key: "database_load_cpu_threshold" value: "-1" } Properties { key: "query_cancel_after_seconds" value: "0" } Properties { key: "query_cpu_limit_percent_per_node" value: "-1" } Properties { key: "query_memory_limit_percent_per_node" value: "-1" } Properties { key: "queue_size" value: "-1" } Properties { key: "resource_weight" value: "-1" } Properties { key: "total_cpu_limit_percent_per_node" value: "-1" } } } } TxId: 281474976715658 TabletId: 72057594046644480 Owner: "metadata@system" UserToken: "***" PeerName: "" , at schemeshard: 72057594046644480 2025-05-29T15:29:36.689761Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_mkdir.cpp:115: TMkDir Propose, path: /Root/.metadata, operationId: 281474976715658:0, at schemeshard: 72057594046644480 2025-05-29T15:29:36.689790Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:319: AttachChild: child attached as only one child to the parent, parent id: [OwnerId: 72057594046644480, LocalPathId: 1], parent name: Root, child name: .metadata, child id: [OwnerId: 72057594046644480, LocalPathId: 2], at schemeshard: 72057594046644480 2025-05-29T15:29:36.689800Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046644480, LocalPathId: 2] was 0 2025-05-29T15:29:36.689813Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:130: IgniteOperation, opId: 281474976715658:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-05-29T15:29:36.689818Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_mkdir.cpp:115: TMkDir Propose, path: /Root/.metadata/workload_manager, operationId: 281474976715658:1, at schemeshard: 72057594046644480 2025-05-29T15:29:36.689841Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:319: AttachChild: child attached as only one child to the parent, parent id: [OwnerId: 72057594046644480, LocalPathId: 2], parent name: .metadata, child name: workload_manager, child id: [OwnerId: 72057594046644480, LocalPathId: 3], at schemeshard: 72057594046644480 2025-05-29T15:29:36.689844Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046644480, LocalPathId: 3] was 0 2025-05-29T15:29:36.689850Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:130: IgniteOperation, opId: 281474976715658:2, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-05-29T15:29:36.689852Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_mkdir.cpp:115: TMkDir Propose, path: /Root/.metadata/workload_manager/pools, operationId: 281474976715658:2, at schemeshard: 72057594046644480 2025-05-29T15:29:36.689861Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:319: AttachChild: child attached as only one child to the parent, parent id: [OwnerId: 72057594046644480, LocalPathId: 3], parent name: workl ... 3698, Sender [55:7509890488515596739:2383], Recipient [55:7509890484220627730:2193]: NKikimrExport.TEvGetExportRequest Request { Id: 281474976715664 } DatabaseName: "/Root" 2025-05-29T15:30:36.336080Z node 55 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4979: StateWork, processing event TEvExport::TEvGetExportRequest 2025-05-29T15:30:36.336296Z node 55 :TX_PROXY DEBUG: rpc_get_operation.cpp:209: [GetExport] [55:7509890488515596739:2383] [0] Handle TEvExport::TEvGetExportResponse: record# Entry { Id: 281474976715664 Status: SUCCESS Progress: PROGRESS_DONE ExportToS3Settings { endpoint: "localhost:1502" scheme: HTTP bucket: "test_bucket" items { source_path: "dir1/Table1" destination_prefix: "Prefix/dir1/Table1" } items { source_path: "Table0" destination_prefix: "Prefix/Table0" } items { source_path: "dir1/dir2/Table2" destination_prefix: "Prefix/dir1/dir2/Table2" } source_path: "/Root" destination_prefix: "Prefix//" } ItemsProgress { parts_total: 1 parts_completed: 1 start_time { seconds: 1748532636 } end_time { seconds: 1748532636 } } ItemsProgress { parts_total: 1 parts_completed: 1 start_time { seconds: 1748532636 } end_time { seconds: 1748532636 } } ItemsProgress { parts_total: 1 parts_completed: 1 start_time { seconds: 1748532636 } end_time { seconds: 1748532636 } } StartTime { seconds: 1748532636 } EndTime { seconds: 1748532636 } } 2025-05-29T15:30:36.336891Z node 55 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4877: StateWork, received event# 269877764, Sender [55:7509890488515596742:3472], Recipient [55:7509890484220627730:2193]: NKikimr::TEvTabletPipe::TEvServerDisconnected 2025-05-29T15:30:36.336908Z node 55 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4975: StateWork, processing event TEvTabletPipe::TEvServerDisconnected 2025-05-29T15:30:36.336911Z node 55 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5801: Server pipe is reset, at schemeshard: 72057594046644480 2025-05-29T15:30:36.339842Z node 55 :TX_PROXY DEBUG: rpc_list_objects_in_s3_export.cpp:57: [ListObjectsInS3Export] [55:7509890488515596743:2384] Resolve database: name# /Root 2025-05-29T15:30:36.340080Z node 55 :TX_PROXY DEBUG: rpc_list_objects_in_s3_export.cpp:73: [ListObjectsInS3Export] [55:7509890488515596743:2384] Handle TEvTxProxySchemeCache::TEvNavigateKeySetResult: request# { ErrorCount: 0 DatabaseName: /Root DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root TableId: [72057594046644480:1:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } }] } 2025-05-29T15:30:36.340088Z node 55 :TX_PROXY DEBUG: rpc_list_objects_in_s3_export.cpp:134: [ListObjectsInS3Export] [55:7509890488515596743:2384] Send request: schemeShardId# 72057594046644480 2025-05-29T15:30:36.340298Z node 55 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4877: StateWork, received event# 269877761, Sender [55:7509890488515596746:3474], Recipient [55:7509890484220627730:2193]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-05-29T15:30:36.340304Z node 55 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4974: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-05-29T15:30:36.340308Z node 55 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5753: Pipe server connected, at tablet: 72057594046644480 2025-05-29T15:30:36.340450Z node 55 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4877: StateWork, received event# 275251210, Sender [55:7509890488515596743:2384], Recipient [55:7509890484220627730:2193]: NKikimrImport.TEvListObjectsInS3ExportRequest OperationParams { } Settings { endpoint: "localhost:1502" scheme: HTTP bucket: "test_bucket" access_key: "test_key" secret_key: "test_secret" } PageSize: 0 PageToken: "" 2025-05-29T15:30:36.340456Z node 55 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4994: StateWork, processing event TEvImport::TEvListObjectsInS3ExportRequest 2025-05-29T15:30:36.340500Z node 55 :IMPORT INFO: schemeshard_import_getters.cpp:1308: Reply: self# [55:7509890488515596747:3475], status# 400010, error# Empty S3 prefix specified 2025-05-29T15:30:36.340640Z node 55 :TX_PROXY DEBUG: rpc_list_objects_in_s3_export.cpp:156: [ListObjectsInS3Export] [55:7509890488515596743:2384] Handle TListObjectsInS3ExportRPC::TEvListObjectsInS3ExportResponse: record# Status: BAD_REQUEST Issues { message: "Empty S3 prefix specified" } 2025-05-29T15:30:36.340944Z node 55 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4877: StateWork, received event# 269877764, Sender [55:7509890488515596746:3474], Recipient [55:7509890484220627730:2193]: NKikimr::TEvTabletPipe::TEvServerDisconnected 2025-05-29T15:30:36.340952Z node 55 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4975: StateWork, processing event TEvTabletPipe::TEvServerDisconnected 2025-05-29T15:30:36.340955Z node 55 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5801: Server pipe is reset, at schemeshard: 72057594046644480 2025-05-29T15:30:36.344430Z node 55 :TX_PROXY DEBUG: rpc_list_objects_in_s3_export.cpp:57: [ListObjectsInS3Export] [55:7509890488515596748:2385] Resolve database: name# /Root 2025-05-29T15:30:36.344648Z node 55 :TX_PROXY DEBUG: rpc_list_objects_in_s3_export.cpp:73: [ListObjectsInS3Export] [55:7509890488515596748:2385] Handle TEvTxProxySchemeCache::TEvNavigateKeySetResult: request# { ErrorCount: 0 DatabaseName: /Root DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root TableId: [72057594046644480:1:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } }] } 2025-05-29T15:30:36.344655Z node 55 :TX_PROXY DEBUG: rpc_list_objects_in_s3_export.cpp:134: [ListObjectsInS3Export] [55:7509890488515596748:2385] Send request: schemeShardId# 72057594046644480 2025-05-29T15:30:36.344818Z node 55 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4877: StateWork, received event# 269877761, Sender [55:7509890488515596751:3477], Recipient [55:7509890484220627730:2193]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-05-29T15:30:36.344824Z node 55 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4974: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-05-29T15:30:36.344828Z node 55 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5753: Pipe server connected, at tablet: 72057594046644480 2025-05-29T15:30:36.344944Z node 55 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4877: StateWork, received event# 275251210, Sender [55:7509890488515596748:2385], Recipient [55:7509890484220627730:2193]: NKikimrImport.TEvListObjectsInS3ExportRequest OperationParams { } Settings { endpoint: "localhost:1502" scheme: HTTP bucket: "test_bucket" access_key: "test_key" secret_key: "test_secret" prefix: "Prefix" } PageSize: -42 PageToken: "" 2025-05-29T15:30:36.344948Z node 55 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4994: StateWork, processing event TEvImport::TEvListObjectsInS3ExportRequest 2025-05-29T15:30:36.344992Z node 55 :IMPORT INFO: schemeshard_import_getters.cpp:1308: Reply: self# [55:7509890488515596752:3478], status# 400010, error# Page size should be greater than or equal to 0 2025-05-29T15:30:36.345062Z node 55 :TX_PROXY DEBUG: rpc_list_objects_in_s3_export.cpp:156: [ListObjectsInS3Export] [55:7509890488515596748:2385] Handle TListObjectsInS3ExportRPC::TEvListObjectsInS3ExportResponse: record# Status: BAD_REQUEST Issues { message: "Page size should be greater than or equal to 0" } 2025-05-29T15:30:36.345517Z node 55 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4877: StateWork, received event# 269877764, Sender [55:7509890488515596751:3477], Recipient [55:7509890484220627730:2193]: NKikimr::TEvTabletPipe::TEvServerDisconnected 2025-05-29T15:30:36.345523Z node 55 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4975: StateWork, processing event TEvTabletPipe::TEvServerDisconnected 2025-05-29T15:30:36.345526Z node 55 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5801: Server pipe is reset, at schemeshard: 72057594046644480 2025-05-29T15:30:36.346195Z node 55 :TX_PROXY DEBUG: rpc_list_objects_in_s3_export.cpp:57: [ListObjectsInS3Export] [55:7509890488515596756:2386] Resolve database: name# /Root 2025-05-29T15:30:36.346333Z node 55 :TX_PROXY DEBUG: rpc_list_objects_in_s3_export.cpp:73: [ListObjectsInS3Export] [55:7509890488515596756:2386] Handle TEvTxProxySchemeCache::TEvNavigateKeySetResult: request# { ErrorCount: 0 DatabaseName: /Root DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root TableId: [72057594046644480:1:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } }] } 2025-05-29T15:30:36.346337Z node 55 :TX_PROXY DEBUG: rpc_list_objects_in_s3_export.cpp:134: [ListObjectsInS3Export] [55:7509890488515596756:2386] Send request: schemeShardId# 72057594046644480 2025-05-29T15:30:36.346481Z node 55 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4877: StateWork, received event# 269877761, Sender [55:7509890488515596759:3483], Recipient [55:7509890484220627730:2193]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-05-29T15:30:36.346485Z node 55 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4974: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-05-29T15:30:36.346488Z node 55 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5753: Pipe server connected, at tablet: 72057594046644480 2025-05-29T15:30:36.346582Z node 55 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4877: StateWork, received event# 275251210, Sender [55:7509890488515596756:2386], Recipient [55:7509890484220627730:2193]: NKikimrImport.TEvListObjectsInS3ExportRequest OperationParams { } Settings { endpoint: "localhost:1502" scheme: HTTP bucket: "test_bucket" access_key: "test_key" secret_key: "test_secret" prefix: "Prefix" } PageSize: 42 PageToken: "incorrect page token" 2025-05-29T15:30:36.346587Z node 55 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4994: StateWork, processing event TEvImport::TEvListObjectsInS3ExportRequest 2025-05-29T15:30:36.346613Z node 55 :IMPORT INFO: schemeshard_import_getters.cpp:1308: Reply: self# [55:7509890488515596760:3484], status# 400010, error# Failed to parse page token 2025-05-29T15:30:36.346698Z node 55 :TX_PROXY DEBUG: rpc_list_objects_in_s3_export.cpp:156: [ListObjectsInS3Export] [55:7509890488515596756:2386] Handle TListObjectsInS3ExportRPC::TEvListObjectsInS3ExportResponse: record# Status: BAD_REQUEST Issues { message: "Failed to parse page token" } 2025-05-29T15:30:36.347766Z node 55 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4877: StateWork, received event# 269877764, Sender [55:7509890488515596759:3483], Recipient [55:7509890484220627730:2193]: NKikimr::TEvTabletPipe::TEvServerDisconnected 2025-05-29T15:30:36.347776Z node 55 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4975: StateWork, processing event TEvTabletPipe::TEvServerDisconnected 2025-05-29T15:30:36.347779Z node 55 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5801: Server pipe is reset, at schemeshard: 72057594046644480 >> TExternalTableTestReboots::CreateDroppedExternalTableAndDropWithReboots >> TAsyncIndexTests::CdcAndSplitWithReboots[PipeResets] [GOOD] |74.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_external_table_reboots/unittest |74.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_external_table_reboots/unittest |74.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_external_table_reboots/unittest |74.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_external_table_reboots/unittest |74.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_external_table_reboots/unittest |74.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_external_table_reboots/unittest |74.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_external_table_reboots/unittest |74.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_external_table_reboots/unittest |74.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_external_table_reboots/unittest |74.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_external_table_reboots/unittest |74.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_external_table_reboots/unittest |74.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_external_table_reboots/unittest |74.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_external_table_reboots/unittest |74.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_external_table_reboots/unittest ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_index/unittest >> TAsyncIndexTests::CdcAndSplitWithReboots[PipeResets] [GOOD] Test command err: =========== RUN: Trace =========== Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2140] Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:116:2058] recipient: [1:110:2141] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:116:2058] recipient: [1:110:2141] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:117:2058] recipient: [1:111:2142] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:117:2058] recipient: [1:111:2142] Leader for TabletID 72057594046678944 is [1:126:2151] sender: [1:127:2058] recipient: [1:109:2140] Leader for TabletID 72057594046447617 is [1:130:2154] sender: [1:132:2058] recipient: [1:110:2141] Leader for TabletID 72057594046316545 is [1:133:2155] sender: [1:135:2058] recipient: [1:111:2142] 2025-05-29T15:30:23.501343Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7488: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-05-29T15:30:23.501372Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7516: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:30:23.501378Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7402: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2025-05-29T15:30:23.501384Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7418: OperationsProcessing config: using default configuration 2025-05-29T15:30:23.501400Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-05-29T15:30:23.501404Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-05-29T15:30:23.501414Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7548: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:30:23.501434Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:39: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-05-29T15:30:23.501544Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7619: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-05-29T15:30:23.501637Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-05-29T15:30:23.515044Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7651: Got new config: QueryServiceConfig { AllExternalDataSourcesAreAvailable: true } 2025-05-29T15:30:23.515072Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:30:23.515180Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7619: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# Leader for TabletID 72057594046447617 is [1:130:2154] sender: [1:175:2058] recipient: [1:15:2062] 2025-05-29T15:30:23.518080Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-05-29T15:30:23.518119Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-05-29T15:30:23.518150Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-05-29T15:30:23.521502Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-05-29T15:30:23.521628Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:445: Clear TempDirsState with owners number: 0 2025-05-29T15:30:23.521780Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1348: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-05-29T15:30:23.521970Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:32: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-05-29T15:30:23.522773Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:157: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:30:23.522820Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:84: [RootDataErasureManager] Stop 2025-05-29T15:30:23.523109Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:30:23.523120Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:30:23.523170Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-05-29T15:30:23.523179Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-29T15:30:23.523185Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:202: TTxServerlessStorageBilling.Complete 2025-05-29T15:30:23.523208Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6671: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:214:2058] recipient: [1:212:2212] IGNORE Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:214:2058] recipient: [1:212:2212] Leader for TabletID 72057594037968897 is [1:218:2216] sender: [1:219:2058] recipient: [1:212:2212] 2025-05-29T15:30:23.524785Z node 1 :HIVE INFO: tablet_helpers.cpp:1130: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:126:2151] sender: [1:239:2058] recipient: [1:15:2062] 2025-05-29T15:30:23.548735Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:372: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-05-29T15:30:23.548825Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:30:23.548902Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-05-29T15:30:23.548951Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:130: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-05-29T15:30:23.548962Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:30:23.549604Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:451: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-05-29T15:30:23.549637Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-05-29T15:30:23.549686Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:30:23.549696Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:313: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-05-29T15:30:23.549703Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:367: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-05-29T15:30:23.549709Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 2 -> 3 2025-05-29T15:30:23.550108Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:30:23.550119Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-05-29T15:30:23.550125Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 3 -> 128 2025-05-29T15:30:23.550467Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:30:23.550476Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:30:23.550482Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:30:23.550490Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1650: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-05-29T15:30:23.551248Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1719: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-05-29T15:30:23.551608Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:652: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-05-29T15:30:23.551645Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1751: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 Leader for TabletID 72057594046316545 is [1:133:2155] sender: [1:254:2058] recipient: [1:15:2062] FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-05-29T15:30:23.551860Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:676: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-05-29T15:30:23.551884Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:680: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 133 RawX2: 4294969451 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-05-29T15:30:23.551891Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:30:23.551974Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Ch ... ogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { MinPartitionsCount: 1 } } SplitBoundary { KeyPrefix { Tuple { Optional { Uint32: 50 } } } } TableIndexes { Name: "UserDefinedIndex" LocalPathId: 4 Type: EIndexTypeGlobalAsync State: EIndexStateReady KeyColumnNames: "indexed" SchemaVersion: 1 PathOwnerId: 72057594046678944 DataSize: 0 IndexImplTableDescriptions { PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { SizeToSplit: 2147483648 MinPartitionsCount: 1 } } } } TableSchemaVersion: 2 IsBackup: false CdcStreams { Name: "Stream" Mode: ECdcStreamModeKeysOnly PathId { OwnerId: 72057594046678944 LocalId: 6 } State: ECdcStreamStateReady SchemaVersion: 1 Format: ECdcStreamFormatProto VirtualTimestamps: false AwsRegion: "" ResolvedTimestampsIntervalMs: 0 } IsRestore: false } TablePartitions { EndOfRangeKeyPrefix: "\001\000\004\000\000\0002\000\000\000" IsPoint: false IsInclusive: false DatashardId: 72075186233409550 } TablePartitions { EndOfRangeKeyPrefix: "" IsPoint: false IsInclusive: false DatashardId: 72075186233409551 } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 2 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 6 PathsLimit: 10000 ShardsInside: 6 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 1 PQPartitionsLimit: 1000000 } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-05-29T15:31:08.252290Z node 34 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Table/UserDefinedIndex/indexImplTable" Options { ReturnPartitioningInfo: true ReturnPartitionConfig: true BackupInfo: false ReturnBoundaries: true ShowPrivateTable: true }, at schemeshard: 72057594046678944 2025-05-29T15:31:08.252321Z node 34 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/Table/UserDefinedIndex/indexImplTable" took 38us result status StatusSuccess 2025-05-29T15:31:08.252411Z node 34 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Table/UserDefinedIndex/indexImplTable" PathDescription { Self { Name: "indexImplTable" PathId: 5 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 1002 CreateStep: 5000003 ParentPathId: 4 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeAsyncIndexImplTable Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "indexImplTable" Columns { Name: "indexed" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "indexed" KeyColumnNames: "key" KeyColumnIds: 1 KeyColumnIds: 2 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { SizeToSplit: 2147483648 MinPartitionsCount: 1 } } TableSchemaVersion: 1 IsBackup: false IsRestore: false } TablePartitions { EndOfRangeKeyPrefix: "" IsPoint: false IsInclusive: false DatashardId: 72075186233409546 } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 6 PathsLimit: 10000 ShardsInside: 6 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 1 PQPartitionsLimit: 1000000 } } PathId: 5 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-05-29T15:31:08.262621Z node 34 :CHANGE_EXCHANGE DEBUG: change_sender_table_base.cpp:78: [TableChangeSenderShard][72075186233409550:2][72075186233409546][34:1084:2866] Handshake NKikimrChangeExchange.TEvStatus Status: STATUS_OK LastRecordOrder: 0 2025-05-29T15:31:08.262643Z node 34 :CHANGE_EXCHANGE DEBUG: change_sender_async_index.cpp:239: [AsyncIndexChangeSenderMain][72075186233409550:2][34:1042:2866] Handle NKikimr::NChangeExchange::TEvChangeExchangePrivate::TEvReady { PartitionId: 72075186233409546 } 2025-05-29T15:31:08.262667Z node 34 :CHANGE_EXCHANGE DEBUG: change_sender_table_base.cpp:123: [TableChangeSenderShard][72075186233409550:2][72075186233409546][34:1084:2866] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRecords { Records [{ Order: 1 Group: 1748532668248545 Step: 5000004 TxId: 18446744073709551615 PathId: [OwnerId: 72057594046678944, LocalPathId: 4] Kind: AsyncIndex Source: Unspecified Body: 28b TableId: [OwnerId: 72057594046678944, LocalPathId: 3] SchemaVersion: 2 LockId: 0 LockOffset: 0 },{ Order: 3 Group: 1748532668248545 Step: 5000004 TxId: 18446744073709551615 PathId: [OwnerId: 72057594046678944, LocalPathId: 4] Kind: AsyncIndex Source: Unspecified Body: 28b TableId: [OwnerId: 72057594046678944, LocalPathId: 3] SchemaVersion: 2 LockId: 0 LockOffset: 0 },{ Order: 5 Group: 1748532668248545 Step: 5000004 TxId: 18446744073709551615 PathId: [OwnerId: 72057594046678944, LocalPathId: 4] Kind: AsyncIndex Source: Unspecified Body: 28b TableId: [OwnerId: 72057594046678944, LocalPathId: 3] SchemaVersion: 2 LockId: 0 LockOffset: 0 }] } 2025-05-29T15:31:08.263226Z node 34 :CHANGE_EXCHANGE DEBUG: change_sender_table_base.cpp:200: [TableChangeSenderShard][72075186233409550:2][72075186233409546][34:1084:2866] Handle NKikimrChangeExchange.TEvStatus Status: STATUS_OK RecordStatuses { Order: 1 Status: STATUS_OK Reason: REASON_NONE } RecordStatuses { Order: 3 Status: STATUS_OK Reason: REASON_NONE } RecordStatuses { Order: 5 Status: STATUS_OK Reason: REASON_NONE } LastRecordOrder: 5 2025-05-29T15:31:08.263242Z node 34 :CHANGE_EXCHANGE DEBUG: change_sender_async_index.cpp:239: [AsyncIndexChangeSenderMain][72075186233409550:2][34:1042:2866] Handle NKikimr::NChangeExchange::TEvChangeExchangePrivate::TEvReady { PartitionId: 72075186233409546 } |74.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_external_table_reboots/unittest |74.4%| [TA] {RESULT} $(B)/ydb/core/mind/hive/ut/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/query/unittest >> KqpExplain::CompoundKeyRange Test command err: 2025-05-29T15:28:50.414032Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509890034461739331:2075];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:28:50.414052Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/0013ef/r3tmp/tmprhkyp4/pdisk_1.dat 2025-05-29T15:28:50.505429Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:28:50.515753Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:28:50.515783Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:28:50.519728Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 3025, node 1 2025-05-29T15:28:50.545848Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:28:50.545863Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-05-29T15:28:50.545865Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-05-29T15:28:50.545915Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-05-29T15:28:50.553302Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 TClient is connected to server localhost:13033 2025-05-29T15:28:50.589329Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-05-29T15:28:50.598540Z node 2 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7509890033577310226:2074];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:28:50.598559Z node 2 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/Database/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-05-29T15:28:50.605062Z node 2 :STATISTICS INFO: service_impl.cpp:232: Subscribed for config changes on node 2 2025-05-29T15:28:50.609883Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:28:50.609911Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:28:50.611422Z node 1 :HIVE WARN: hive_impl.cpp:771: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-05-29T15:28:50.614595Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-29T15:28:50.653124Z node 2 :HIVE WARN: tx__create_tablet.cpp:342: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-05-29T15:28:50.653211Z node 2 :HIVE WARN: tx__create_tablet.cpp:342: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-05-29T15:28:50.653246Z node 2 :HIVE WARN: tx__create_tablet.cpp:342: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-05-29T15:28:50.653261Z node 2 :HIVE WARN: tx__create_tablet.cpp:342: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-05-29T15:28:50.653294Z node 2 :HIVE WARN: tx__create_tablet.cpp:342: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-05-29T15:28:50.653308Z node 2 :HIVE WARN: tx__create_tablet.cpp:342: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-05-29T15:28:50.653322Z node 2 :HIVE WARN: tx__create_tablet.cpp:342: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-05-29T15:28:50.653336Z node 2 :HIVE WARN: tx__create_tablet.cpp:342: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-05-29T15:28:50.653352Z node 2 :HIVE WARN: tx__create_tablet.cpp:342: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-05-29T15:28:50.710537Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:28:50.710561Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:28:50.712233Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-29T15:28:50.722405Z node 2 :STATISTICS INFO: aggregator_impl.cpp:45: [72075186224037894] OnActivateExecutor 2025-05-29T15:28:50.722427Z node 2 :STATISTICS DEBUG: tx_init_schema.cpp:13: [72075186224037894] TTxInitSchema::Execute 2025-05-29T15:28:50.729614Z node 2 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:28:50.733705Z node 2 :STATISTICS DEBUG: tx_init_schema.cpp:34: [72075186224037894] TTxInitSchema::Complete 2025-05-29T15:28:50.733735Z node 2 :STATISTICS DEBUG: tx_init.cpp:18: [72075186224037894] TTxInit::Execute 2025-05-29T15:28:50.733757Z node 2 :STATISTICS DEBUG: tx_init.cpp:118: [72075186224037894] Loaded BaseStatistics: schemeshard count# 0 2025-05-29T15:28:50.733761Z node 2 :STATISTICS DEBUG: tx_init.cpp:143: [72075186224037894] Loaded ColumnStatistics: column count# 0 2025-05-29T15:28:50.733765Z node 2 :STATISTICS DEBUG: tx_init.cpp:182: [72075186224037894] Loaded ScheduleTraversals: table count# 0 2025-05-29T15:28:50.733771Z node 2 :STATISTICS DEBUG: tx_init.cpp:216: [72075186224037894] Loaded ForceTraversalOperations: table count# 0 2025-05-29T15:28:50.733775Z node 2 :STATISTICS DEBUG: tx_init.cpp:264: [72075186224037894] Loaded ForceTraversalTables: table count# 0 2025-05-29T15:28:50.733781Z node 2 :STATISTICS DEBUG: tx_init.cpp:271: [72075186224037894] TTxInit::Complete 2025-05-29T15:28:50.733870Z node 2 :STATISTICS INFO: aggregator_impl.cpp:62: [72075186224037894] Subscribed for config changes 2025-05-29T15:28:50.820923Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:7864: ResolveSA(), StatisticsAggregatorId=72075186224037894, at schemeshard: 72075186224037897 2025-05-29T15:28:50.820942Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:7894: ConnectToSA(), pipe client id: [2:7509890033577310811:2313], at schemeshard: 72075186224037897, StatisticsAggregatorId: 72075186224037894, at schemeshard: 72075186224037897 2025-05-29T15:28:50.822501Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:86: [72075186224037894] EvServerConnected, pipe server id = [2:7509890033577310830:2323] 2025-05-29T15:28:50.822512Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:86: [72075186224037894] EvServerConnected, pipe server id = [2:7509890033577310840:2327] 2025-05-29T15:28:50.824949Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:213: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:7509890033577310830:2323], schemeshard id = 72075186224037897 2025-05-29T15:28:50.824979Z node 2 :STATISTICS DEBUG: tx_configure.cpp:21: [72075186224037894] TTxConfigure::Execute: database# /Root/Database 2025-05-29T15:28:50.825424Z node 2 :STATISTICS DEBUG: table_creator.cpp:147: Table _statistics updater. Describe result: PathErrorUnknown 2025-05-29T15:28:50.825431Z node 2 :STATISTICS NOTICE: table_creator.cpp:167: Table _statistics updater. Creating table 2025-05-29T15:28:50.825439Z node 2 :STATISTICS DEBUG: table_creator.cpp:100: Table _statistics updater. Full table path:/Root/Database/.metadata/_statistics 2025-05-29T15:28:50.826498Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720657:1, at schemeshard: 72075186224037897 2025-05-29T15:28:50.827258Z node 2 :STATISTICS DEBUG: tx_configure.cpp:36: [72075186224037894] TTxConfigure::Complete 2025-05-29T15:28:50.828822Z node 2 :STATISTICS DEBUG: table_creator.cpp:190: Table _statistics updater. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720657 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037897 PathId: 3 } 2025-05-29T15:28:50.828836Z node 2 :STATISTICS DEBUG: table_creator.cpp:261: Table _statistics updater. Subscribe on create table tx: 281474976720657 2025-05-29T15:28:50.882833Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890034461740451:2331], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:28:50.882877Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:28:50.932955Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976715659:0, at schemeshard: 72075186224037897 2025-05-29T15:28:50.950669Z node 2 :STATISTICS DEBUG: table_creator.cpp:290: Table _statistics updater. Request: create. Transaction completed: 281474976720657. Doublechecking... 2025-05-29T15:28:50.992663Z node 2 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037913;self_id=[2:7509890033577311235:2366];tablet_id=72075186224037913;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-05-29T15:28:50.992745Z node 2 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037913;self_id=[2:7509890033577311235:2366];tablet_id=72075186224037913;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-05-29T15:28:50.992802Z node 2 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037913;self_id=[2:7509890033577311235:2366];tablet_id=72075186224037913;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-05-29T15:28:50.992834Z node 2 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037913;self_id=[2:7509890033577311235:2366];tablet_id=72075186224037913;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-05-29T15:28:50.992860Z node 2 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037913;self_id=[2:7509890033577311235:2366];tablet_id=72075186224037913;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-05-29T15:28:50.992882Z node 2 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037913;self_id=[2:7509890033577311235:2366];tablet_id=72075186224037913;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-05-29T15:28:50.992900Z node 2 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037913;self_id=[2:7509890033577311235:2366];tablet_id=72075186224037913;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-05-29T15:28:50.9 ... 3DD8F65 1. /-S/util/system/yassert.cpp:55: Panic @ 0x13DCFF66 2. /tmp//-S/library/cpp/testing/unittest/registar.cpp:36: RaiseError @ 0x13F70BC6 3. /-S/ydb/core/kqp/ut/common/kqp_ut_common.h:375: AssertSuccessResult @ 0x13A3FAE2 4. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:365: CreateSampleTables @ 0x264F71F2 5. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:581: operator() @ 0x2651596C 6. /-S/library/cpp/threading/future/core/future-inl.h:493: SetValue @ 0x2651596C 7. /-S/library/cpp/threading/future/async.h:24: operator() @ 0x2651596C 8. /-S/util/thread/pool.h:71: Process @ 0x2651596C 9. /-S/util/thread/pool.cpp:418: DoExecute @ 0x13DE06B9 10. /-S/util/thread/factory.h:15: Execute @ 0x13DDF0A9 11. /-S/util/thread/factory.cpp:36: ThreadProc @ 0x13DDF0A9 12. /-S/util/system/thread.cpp:244: ThreadProxy @ 0x13DDA51C 13. ??:0: ?? @ 0x7F558144CAC2 14. ??:0: ?? @ 0x7F55814DE84F Trying to start YDB, gRPC: 21048, MsgBus: 10177 2025-05-29T15:31:07.417150Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509890624661553453:2064];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:31:07.417372Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/0013ef/r3tmp/tmpWLHlyg/pdisk_1.dat 2025-05-29T15:31:07.458968Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:31:07.460165Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [1:7509890624661553431:2079] 1748532667417011 != 1748532667417014 TServer::EnableGrpc on GrpcPort 21048, node 1 2025-05-29T15:31:07.469918Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:31:07.469932Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-05-29T15:31:07.469933Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-05-29T15:31:07.469973Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:10177 TClient is connected to server localhost:10177 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-29T15:31:07.539570Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:31:07.539608Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:31:07.540566Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-29T15:31:07.541083Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:31:07.544415Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:31:07.559187Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:31:07.574494Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:31:07.583170Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:31:07.686449Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890624661555065:2403], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:31:07.686469Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:31:07.712437Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-05-29T15:31:07.719174Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-05-29T15:31:07.728953Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-05-29T15:31:07.736139Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-05-29T15:31:07.743361Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-05-29T15:31:07.757458Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-05-29T15:31:07.771595Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-05-29T15:31:07.787553Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890624661555718:2466], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:31:07.787577Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:31:07.787594Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890624661555723:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:31:07.788306Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715669:3, at schemeshard: 72057594046644480 2025-05-29T15:31:07.791521Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7509890624661555725:2470], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715669 completed, doublechecking } 2025-05-29T15:31:07.877304Z node 1 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [1:7509890624661555776:3394] txid# 281474976715670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-29T15:31:07.943594Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [1:7509890624661555792:2474], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:31:07.943673Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=1&id=OWNmMTg5ZWUtYjZhMzYwZjQtMTc4NjkyODctZjU3M2M1OTA=, ActorId: [1:7509890624661555047:2401], ActorState: ExecuteState, TraceId: 01jweap9cb7fdd0yb0a06ttqkn, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: VERIFY failed (2025-05-29T15:31:07.944268Z): assertion failed in non-unittest thread with message: assertion failed at ydb/core/kqp/ut/common/kqp_ut_common.h:375, void NKikimr::NKqp::AssertSuccessResult(const NYdb::TStatus &): (result.IsSuccess())
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 library/cpp/testing/unittest/registar.cpp:36 RaiseError(): requirement UnittestThread failed 0. /-S/util/system/yassert.cpp:83: InternalPanicImpl @ 0x13DD8F65 1. /-S/util/system/yassert.cpp:55: Panic @ 0x13DCFF66 2. /tmp//-S/library/cpp/testing/unittest/registar.cpp:36: RaiseError @ 0x13F70BC6 3. /-S/ydb/core/kqp/ut/common/kqp_ut_common.h:375: AssertSuccessResult @ 0x13A3FAE2 4. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:365: CreateSampleTables @ 0x264F71F2 5. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:581: operator() @ 0x2651596C 6. /-S/library/cpp/threading/future/core/future-inl.h:493: SetValue @ 0x2651596C 7. /-S/library/cpp/threading/future/async.h:24: operator() @ 0x2651596C 8. /-S/util/thread/pool.h:71: Process @ 0x2651596C 9. /-S/util/thread/pool.cpp:418: DoExecute @ 0x13DE06B9 10. /-S/util/thread/factory.h:15: Execute @ 0x13DDF0A9 11. /-S/util/thread/factory.cpp:36: ThreadProc @ 0x13DDF0A9 12. /-S/util/system/thread.cpp:244: ThreadProxy @ 0x13DDA51C 13. ??:0: ?? @ 0x7F7E0C6B0AC2 14. ??:0: ?? @ 0x7F7E0C74284F |74.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_external_table_reboots/unittest |74.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_external_table_reboots/unittest >> TExternalTableTestReboots::SimpleDropExternalTableWithReboots2 >> TExternalTableTestReboots::DropReplacedExternalTableWithReboots >> TExternalTableTestReboots::CreateDroppedExternalTableWithReboots |74.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_external_table_reboots/unittest |74.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_external_table_reboots/unittest |74.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_external_table_reboots/unittest |74.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_external_table_reboots/unittest |74.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_external_table_reboots/unittest |74.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_external_table_reboots/unittest |74.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_external_table_reboots/unittest |74.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_external_table_reboots/unittest |74.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_external_table_reboots/unittest |74.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_external_table_reboots/unittest |74.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_external_table_reboots/unittest |74.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_external_table_reboots/unittest |74.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_external_table_reboots/unittest |74.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_external_table_reboots/unittest |74.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_external_table_reboots/unittest |74.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_external_table_reboots/unittest >> TExternalTableTestReboots::SimpleDropExternalTableWithReboots |74.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_external_table_reboots/unittest |74.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_external_table_reboots/unittest |74.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_external_table_reboots/unittest |74.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_external_table_reboots/unittest |74.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_external_table_reboots/unittest >> TExternalTableTestReboots::ParallelCreateDrop |74.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_external_table_reboots/unittest |74.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_external_table_reboots/unittest |74.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_external_table_reboots/unittest |74.5%| [TA] $(B)/ydb/core/cms/console/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> TExternalTableTestReboots::DropExternalTableWithReboots |74.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_external_table_reboots/unittest |74.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_external_table_reboots/unittest >> KqpImmediateEffects::ConflictingKeyR1WR2 >> KqpEffects::InsertAbort_Select_Success >> KqpImmediateEffects::Insert >> KqpImmediateEffects::DeleteOnAfterInsertWithIndex >> KqpImmediateEffects::InteractiveTxWithReadAtTheEnd+UseSink >> KqpWrite::InsertRevert >> KqpImmediateEffects::ConflictingKeyW1WR2 >> KqpImmediateEffects::InsertExistingKey-UseSink |74.5%| [TA] $(B)/ydb/core/kqp/ut/query/test-results/unittest/{meta.json ... results_accumulator.log} >> KqpWrite::UpsertNullKey >> KqpEffects::AlterAfterUpsertTransaction+UseSink |74.6%| [TA] {RESULT} $(B)/ydb/core/cms/console/ut/test-results/unittest/{meta.json ... results_accumulator.log} |74.6%| [TA] {RESULT} $(B)/ydb/core/kqp/ut/query/test-results/unittest/{meta.json ... results_accumulator.log} >> BsControllerConfig::AddDriveSerial |74.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_index/unittest |74.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_external_table_reboots/unittest >> KqpEffects::InsertAbort_Literal_Success >> KqpImmediateEffects::InteractiveTxWithWriteAtTheEnd >> KqpEffects::InsertRevert_Literal_Conflict >> KqpImmediateEffects::ImmediateUpdateSelect >> KqpScheme::DisableMetadataObjectsOnServerless [FAIL] >> KqpImmediateEffects::ForceImmediateEffectsExecution+UseSink >> KqpImmediateEffects::ConflictingKeyR1RWR2 >> KqpWrite::ProjectReplace+UseSink >> KqpImmediateEffects::ConflictingKeyW1RWR2 >> KqpImmediateEffects::InteractiveTxWithReadAtTheEnd-UseSink >> KqpImmediateEffects::Interactive >> KqpWrite::ProjectReplace-UseSink >> KqpEffects::AlterAfterUpsertTransaction-UseSink >> BsControllerConfig::AddDriveSerial [GOOD] >> BsControllerConfig::AddDriveSerialMassive ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/scheme/unittest >> KqpScheme::DisableMetadataObjectsOnServerless [FAIL] Test command err: 2025-05-29T15:29:00.556061Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509890078888383314:2212];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:29:00.556114Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/001225/r3tmp/tmpjAURZP/pdisk_1.dat 2025-05-29T15:29:00.641280Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:29:00.654989Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:29:00.655020Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting TServer::EnableGrpc on GrpcPort 10889, node 1 2025-05-29T15:29:00.657171Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-29T15:29:00.668455Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:29:00.668472Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-05-29T15:29:00.668474Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-05-29T15:29:00.668527Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:18049 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-29T15:29:00.706591Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:29:00.944606Z node 1 :KQP_WORKLOAD_SERVICE INFO: kqp_workload_service.cpp:440: [WorkloadService] [Service] Started workload service initialization 2025-05-29T15:29:00.946018Z node 1 :KQP_SESSION DEBUG: kqp_session_actor.cpp:223: SessionId: ydb://session/3?node_id=1&id=YWJmYTZlNWItNzU5OTExOTAtNThkNThkMDUtMjgwMDc2NTg=, ActorId: [0:0:0], ActorState: unknown state, Create session actor with id YWJmYTZlNWItNzU5OTExOTAtNThkNThkMDUtMjgwMDc2NTg= 2025-05-29T15:29:00.946268Z node 1 :KQP_WORKLOAD_SERVICE TRACE: kqp_workload_service.cpp:125: [WorkloadService] [Service] Updated node info, noode count: 3 2025-05-29T15:29:00.946278Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: kqp_workload_service.cpp:100: [WorkloadService] [Service] Subscribed for config changes 2025-05-29T15:29:00.946280Z node 1 :KQP_WORKLOAD_SERVICE INFO: kqp_workload_service.cpp:111: [WorkloadService] [Service] Resource pools was enanbled 2025-05-29T15:29:00.948637Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: table_queries.cpp:241: [WorkloadService] [TCleanupTablesActor] ActorId: [1:7509890078888383933:2330], Start check tables existence, number paths: 2 2025-05-29T15:29:00.948666Z node 1 :KQP_SESSION DEBUG: kqp_session_actor.cpp:227: SessionId: ydb://session/3?node_id=1&id=YWJmYTZlNWItNzU5OTExOTAtNThkNThkMDUtMjgwMDc2NTg=, ActorId: [1:7509890078888383934:2331], ActorState: unknown state, session actor bootstrapped 2025-05-29T15:29:00.948967Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: table_queries.cpp:182: [WorkloadService] [TCleanupTablesActor] ActorId: [1:7509890078888383933:2330], Describe table /Root/.metadata/workload_manager/delayed_requests status PathErrorUnknown 2025-05-29T15:29:00.948980Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: table_queries.cpp:182: [WorkloadService] [TCleanupTablesActor] ActorId: [1:7509890078888383933:2330], Describe table /Root/.metadata/workload_manager/running_requests status PathErrorUnknown 2025-05-29T15:29:00.948984Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: table_queries.cpp:289: [WorkloadService] [TCleanupTablesActor] ActorId: [1:7509890078888383933:2330], Successfully finished 2025-05-29T15:29:00.948996Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: kqp_workload_service.cpp:367: [WorkloadService] [Service] Cleanup completed, tables exists: 0 2025-05-29T15:29:00.957735Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-05-29T15:29:00.962853Z node 3 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7509890075407095006:2073];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:29:00.962873Z node 3 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/test-dedicated/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-05-29T15:29:00.967567Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:29:00.967605Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:29:00.968443Z node 1 :HIVE WARN: hive_impl.cpp:771: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 3 Cookie 3 2025-05-29T15:29:00.969314Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-29T15:29:01.028955Z node 3 :HIVE WARN: tx__create_tablet.cpp:342: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-05-29T15:29:01.029034Z node 3 :HIVE WARN: tx__create_tablet.cpp:342: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-05-29T15:29:01.029046Z node 3 :HIVE WARN: tx__create_tablet.cpp:342: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-05-29T15:29:01.029054Z node 3 :HIVE WARN: tx__create_tablet.cpp:342: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-05-29T15:29:01.029069Z node 3 :HIVE WARN: tx__create_tablet.cpp:342: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-05-29T15:29:01.029080Z node 3 :HIVE WARN: tx__create_tablet.cpp:342: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-05-29T15:29:01.029090Z node 3 :HIVE WARN: tx__create_tablet.cpp:342: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-05-29T15:29:01.029099Z node 3 :HIVE WARN: tx__create_tablet.cpp:342: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-05-29T15:29:01.029106Z node 3 :HIVE WARN: tx__create_tablet.cpp:342: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-05-29T15:29:01.071623Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:29:01.071666Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:29:01.076985Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-29T15:29:01.094566Z node 3 :STATISTICS WARN: tx_init.cpp:287: [72075186224037894] TTxInit::Complete. EnableColumnStatistics=false 2025-05-29T15:29:01.095117Z node 3 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:29:01.181370Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-05-29T15:29:01.198495Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:29:01.198518Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:29:01.200264Z node 1 :HIVE WARN: hive_impl.cpp:771: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-05-29T15:29:01.203894Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-29T15:29:01.250563Z node 2 :HIVE WARN: tx__create_tablet.cpp:342: HIVE#72075186224038889 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-05-29T15:29:01.250625Z node 2 :HIVE WARN: tx__create_tablet.cpp:342: HIVE#72075186224038889 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-05-29T15:29:01.250637Z node 2 :HIVE WARN: tx__create_tablet.cpp:342: HIVE#72075186224038889 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-05-29T15:29:01.250645Z node 2 :HIVE WARN: tx__create_tablet.cpp:342: HIVE#72075186224038889 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-05-29T15:29:01.250655Z node 2 :HIVE WARN: tx__create_tablet.cpp:342: HIVE#72075186224038889 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-05-29T15:29:01.250663Z node 2 :HIVE WARN: tx__create_tablet.cpp:342: HIVE#72075186224038889 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-05-29T15:29:01.250674Z node 2 :HIVE WARN: tx__create_tablet.cpp:342: HIVE#72075186224038889 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-05-29T15:29:01.250685Z node 2 :HIVE WARN: tx__create_tablet.cpp:342: HIVE#72075186224038889 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-05-29T15:29:01.250693Z node 2 :HIVE WARN: tx__create_tablet.cpp:342: HIVE#72075186224038889 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-05-29T15:29:01.304162Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224038889 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:29:01.304199Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224038889 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:29:01.307522Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224038889 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-29T15:29:01.329566Z node 2 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:29:01.337548Z node 2 :STATISTICS WARN: tx_init.cpp:287: [72075186224038895] TTxInit::Complete. EnableColumnStatistics=false 2025-05-29T15:29:01.444603Z node 3 :KQP_WORKLOAD_SERVICE INFO: kqp_workload_service.cpp:440: [WorkloadService] [Service] Started workload service initialization 2025-05-29T15:29:01.444645Z node 3 :KQP_WORKLOAD_SERVICE DEBUG: kqp_workload_service.cpp:100: [WorkloadService] [Service] Subscribed for config changes 2025-05-29T15:29:01.444648Z node 3 ... nal: 0 2025-05-29T15:31:16.612875Z node 5 :KQP_SESSION DEBUG: kqp_session_actor.cpp:2275: SessionId: ydb://session/3?node_id=5&id=ODQ0NDc5ZjAtZjNjOGM3MWUtMmNlYzc4OWYtYzMyMjE4MWU=, ActorId: [5:7509890662538593659:4809], ActorState: ExecuteState, TraceId: 01jweaphznc82c3f7gxx5zkar3, Sent query response back to proxy, proxyRequestId: 382, proxyId: [5:7509890404840546771:2273] 2025-05-29T15:31:16.613018Z node 5 :KQP_SESSION INFO: kqp_session_actor.cpp:2320: SessionId: ydb://session/3?node_id=5&id=ODQ0NDc5ZjAtZjNjOGM3MWUtMmNlYzc4OWYtYzMyMjE4MWU=, ActorId: [5:7509890662538593659:4809], ActorState: ReadyState, Session closed due to explicit close event 2025-05-29T15:31:16.613036Z node 5 :KQP_SESSION DEBUG: kqp_session_actor.cpp:1454: SessionId: ydb://session/3?node_id=5&id=ODQ0NDc5ZjAtZjNjOGM3MWUtMmNlYzc4OWYtYzMyMjE4MWU=, ActorId: [5:7509890662538593659:4809], ActorState: ReadyState, Sending to Executer TraceId: 0 8 2025-05-29T15:31:16.613047Z node 5 :KQP_SESSION DEBUG: kqp_session_actor.cpp:1512: SessionId: ydb://session/3?node_id=5&id=ODQ0NDc5ZjAtZjNjOGM3MWUtMmNlYzc4OWYtYzMyMjE4MWU=, ActorId: [5:7509890662538593659:4809], ActorState: ReadyState, Created new KQP executer: [5:7509890662538593688:4809] isRollback: 1 2025-05-29T15:31:16.613061Z node 5 :KQP_SESSION INFO: kqp_session_actor.cpp:2478: SessionId: ydb://session/3?node_id=5&id=ODQ0NDc5ZjAtZjNjOGM3MWUtMmNlYzc4OWYtYzMyMjE4MWU=, ActorId: [5:7509890662538593659:4809], ActorState: ReadyState, Cleanup start, isFinal: 1 CleanupCtx: 1 TransactionsToBeAborted.size(): 1 WorkerId: [0:0:0] WorkloadServiceCleanup: 0 2025-05-29T15:31:16.613149Z node 5 :KQP_SESSION DEBUG: kqp_session_actor.cpp:2539: SessionId: ydb://session/3?node_id=5&id=ODQ0NDc5ZjAtZjNjOGM3MWUtMmNlYzc4OWYtYzMyMjE4MWU=, ActorId: [5:7509890662538593659:4809], ActorState: CleanupState, EndCleanup, isFinal: 1 2025-05-29T15:31:16.613157Z node 5 :KQP_SESSION DEBUG: kqp_session_actor.cpp:2551: SessionId: ydb://session/3?node_id=5&id=ODQ0NDc5ZjAtZjNjOGM3MWUtMmNlYzc4OWYtYzMyMjE4MWU=, ActorId: [5:7509890662538593659:4809], ActorState: unknown state, Cleanup temp tables: 0 2025-05-29T15:31:16.613178Z node 5 :KQP_SESSION DEBUG: kqp_session_actor.cpp:2642: SessionId: ydb://session/3?node_id=5&id=ODQ0NDc5ZjAtZjNjOGM3MWUtMmNlYzc4OWYtYzMyMjE4MWU=, ActorId: [5:7509890662538593659:4809], ActorState: unknown state, Session actor destroyed 2025-05-29T15:31:16.862203Z node 4 :KQP_SESSION INFO: kqp_session_actor.cpp:2320: SessionId: ydb://session/3?node_id=4&id=OWZmMjY5YjEtNjg2NzgzMDEtOTYxZDZiZDctYzU4YTI4Yjc=, ActorId: [4:7509890402898630339:2331], ActorState: ReadyState, Session closed due to explicit close event 2025-05-29T15:31:16.862231Z node 4 :KQP_SESSION INFO: kqp_session_actor.cpp:2478: SessionId: ydb://session/3?node_id=4&id=OWZmMjY5YjEtNjg2NzgzMDEtOTYxZDZiZDctYzU4YTI4Yjc=, ActorId: [4:7509890402898630339:2331], ActorState: ReadyState, Cleanup start, isFinal: 1 CleanupCtx: 0 TransactionsToBeAborted.size(): 0 WorkerId: [0:0:0] WorkloadServiceCleanup: 0 2025-05-29T15:31:16.862235Z node 4 :KQP_SESSION DEBUG: kqp_session_actor.cpp:2539: SessionId: ydb://session/3?node_id=4&id=OWZmMjY5YjEtNjg2NzgzMDEtOTYxZDZiZDctYzU4YTI4Yjc=, ActorId: [4:7509890402898630339:2331], ActorState: ReadyState, EndCleanup, isFinal: 1 2025-05-29T15:31:16.862239Z node 4 :KQP_SESSION DEBUG: kqp_session_actor.cpp:2551: SessionId: ydb://session/3?node_id=4&id=OWZmMjY5YjEtNjg2NzgzMDEtOTYxZDZiZDctYzU4YTI4Yjc=, ActorId: [4:7509890402898630339:2331], ActorState: unknown state, Cleanup temp tables: 0 2025-05-29T15:31:16.862293Z node 4 :KQP_SESSION DEBUG: kqp_session_actor.cpp:2642: SessionId: ydb://session/3?node_id=4&id=OWZmMjY5YjEtNjg2NzgzMDEtOTYxZDZiZDctYzU4YTI4Yjc=, ActorId: [4:7509890402898630339:2331], ActorState: unknown state, Session actor destroyed 2025-05-29T15:31:16.862342Z node 4 :HIVE WARN: tx__status.cpp:57: HIVE#72057594037968897 THive::TTxStatus(status=2 node=Connected) - killing node 6 2025-05-29T15:31:16.862521Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Connected -> Disconnected 2025-05-29T15:31:16.862571Z node 4 :HIVE WARN: tx__status.cpp:57: HIVE#72057594037968897 THive::TTxStatus(status=2 node=Connected) - killing node 5 2025-05-29T15:31:16.862598Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Connected -> Disconnected 2025-05-29T15:31:16.865717Z node 5 :SYSTEM_VIEWS WARN: sysview_service.cpp:811: Summary delivery problem: service id# [5:7509890404840546593:2101], processor id# 72075186224038891, database# /Root/test-shared 2025-05-29T15:31:17.019402Z node 5 :KQP_SESSION DEBUG: kqp_session_actor.cpp:223: SessionId: ydb://session/3?node_id=5&id=ZDJjMDdmZDgtNzIwZDU0NzMtNDdlNDAxNWMtNWNhZjVlMTQ=, ActorId: [0:0:0], ActorState: unknown state, Create session actor with id ZDJjMDdmZDgtNzIwZDU0NzMtNDdlNDAxNWMtNWNhZjVlMTQ= 2025-05-29T15:31:17.019461Z node 5 :KQP_SESSION DEBUG: kqp_session_actor.cpp:227: SessionId: ydb://session/3?node_id=5&id=ZDJjMDdmZDgtNzIwZDU0NzMtNDdlNDAxNWMtNWNhZjVlMTQ=, ActorId: [5:7509890666833561079:4823], ActorState: unknown state, session actor bootstrapped 2025-05-29T15:31:17.019710Z node 5 :KQP_SESSION DEBUG: kqp_session_actor.cpp:443: SessionId: ydb://session/3?node_id=5&id=ZDJjMDdmZDgtNzIwZDU0NzMtNDdlNDAxNWMtNWNhZjVlMTQ=, ActorId: [5:7509890666833561079:4823], ActorState: ReadyState, TraceId: 01jweapjcvey4k28nk66pj1xy6, received request, proxyRequestId: 384 prepared: 0 tx_control: 1 action: QUERY_ACTION_EXECUTE type: QUERY_TYPE_SQL_DML text: DECLARE $ids AS List>; SELECT * FROM `//Root/test-shared/.metadata/initialization/migrations` WHERE (componentId, modificationId) IN $ids rpcActor: [5:7509890666833561082:4826] database: /Root/test-shared databaseId: /Root/test-shared pool id: default 2025-05-29T15:31:17.019720Z node 5 :KQP_SESSION DEBUG: kqp_session_actor.cpp:264: SessionId: ydb://session/3?node_id=5&id=ZDJjMDdmZDgtNzIwZDU0NzMtNDdlNDAxNWMtNWNhZjVlMTQ=, ActorId: [5:7509890666833561079:4823], ActorState: ReadyState, TraceId: 01jweapjcvey4k28nk66pj1xy6, request placed into pool from cache: default 2025-05-29T15:31:17.019740Z node 5 :KQP_SESSION DEBUG: kqp_session_actor.cpp:575: SessionId: ydb://session/3?node_id=5&id=ZDJjMDdmZDgtNzIwZDU0NzMtNDdlNDAxNWMtNWNhZjVlMTQ=, ActorId: [5:7509890666833561079:4823], ActorState: ExecuteState, TraceId: 01jweapjcvey4k28nk66pj1xy6, Sending CompileQuery request 2025-05-29T15:31:17.022399Z node 5 :SCHEME_BOARD_SUBSCRIBER WARN: subscriber.cpp:948: [main][5:7509890409135514937:2570][/Root/test-shared/.metadata/initialization/migrations] Sync is done: cookie# 437, size# 3, half# 1, successes# 0, faulires# 2, partial# 1 2025-05-29T15:31:17.022438Z node 5 :SCHEME_BOARD_SUBSCRIBER WARN: subscriber.cpp:948: [main][5:7509890409135514937:2570][/Root/test-shared/.metadata/initialization/migrations] Sync is done: cookie# 438, size# 3, half# 1, successes# 0, faulires# 2, partial# 1 2025-05-29T15:31:17.022680Z node 5 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [5:7509890666833561084:4827], status: UNAVAILABLE, issues:
: Error: Table metadata loading, code: 1050
:2:1: Error: Failed to load metadata for table: db.[//Root/test-shared/.metadata/initialization/migrations]
: Error: LookupError, code: 2005 2025-05-29T15:31:17.022806Z node 5 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=5&id=ZDJjMDdmZDgtNzIwZDU0NzMtNDdlNDAxNWMtNWNhZjVlMTQ=, ActorId: [5:7509890666833561079:4823], ActorState: ExecuteState, TraceId: 01jweapjcvey4k28nk66pj1xy6, ReplyQueryCompileError, status UNAVAILABLE remove tx with tx_id: 2025-05-29T15:31:17.022824Z node 5 :KQP_SESSION INFO: kqp_session_actor.cpp:2478: SessionId: ydb://session/3?node_id=5&id=ZDJjMDdmZDgtNzIwZDU0NzMtNDdlNDAxNWMtNWNhZjVlMTQ=, ActorId: [5:7509890666833561079:4823], ActorState: ExecuteState, TraceId: 01jweapjcvey4k28nk66pj1xy6, Cleanup start, isFinal: 0 CleanupCtx: 0 TransactionsToBeAborted.size(): 0 WorkerId: [0:0:0] WorkloadServiceCleanup: 0 2025-05-29T15:31:17.022827Z node 5 :KQP_SESSION DEBUG: kqp_session_actor.cpp:2539: SessionId: ydb://session/3?node_id=5&id=ZDJjMDdmZDgtNzIwZDU0NzMtNDdlNDAxNWMtNWNhZjVlMTQ=, ActorId: [5:7509890666833561079:4823], ActorState: ExecuteState, TraceId: 01jweapjcvey4k28nk66pj1xy6, EndCleanup, isFinal: 0 2025-05-29T15:31:17.022945Z node 5 :KQP_SESSION DEBUG: kqp_session_actor.cpp:2275: SessionId: ydb://session/3?node_id=5&id=ZDJjMDdmZDgtNzIwZDU0NzMtNDdlNDAxNWMtNWNhZjVlMTQ=, ActorId: [5:7509890666833561079:4823], ActorState: ExecuteState, TraceId: 01jweapjcvey4k28nk66pj1xy6, Sent query response back to proxy, proxyRequestId: 384, proxyId: [5:7509890404840546771:2273] 2025-05-29T15:31:17.023321Z node 5 :METADATA_PROVIDER ERROR: log.cpp:784: fline=request_actor.h:64;event=unexpected reply;error_message=operation { ready: true status: UNAVAILABLE issues { message: "Table metadata loading" issue_code: 1050 severity: 1 issues { position { row: 2 column: 1 } message: "Failed to load metadata for table: db.[//Root/test-shared/.metadata/initialization/migrations]" end_position { row: 2 column: 1 } severity: 1 issues { message: "LookupError" issue_code: 2005 severity: 1 } } } result { [type.googleapis.com/Ydb.Table.ExecuteQueryResult] { tx_meta { } } } } ;request=session_id: "ydb://session/3?node_id=5&id=ZDJjMDdmZDgtNzIwZDU0NzMtNDdlNDAxNWMtNWNhZjVlMTQ=" tx_control { begin_tx { serializable_read_write { } } } query { yql_text: "DECLARE $ids AS List>;\nSELECT * FROM `//Root/test-shared/.metadata/initialization/migrations`\nWHERE (componentId, modificationId) IN $ids\n" } parameters { key: "$ids" value { type { list_type { item { tuple_type { elements { type_id: UTF8 } elements { type_id: UTF8 } } } } } value { items { items { text_value: "INITIALIZATION" } items { text_value: "create" } } } } } ; 2025-05-29T15:31:17.023424Z node 5 :METADATA_INITIALIZER ERROR: log.cpp:784: fline=accessor_init.cpp:81;event=OnAlteringProblem;error=cannot restore objects: cannot execute yql request; 2025-05-29T15:31:17.023496Z node 5 :KQP_SESSION INFO: kqp_session_actor.cpp:2320: SessionId: ydb://session/3?node_id=5&id=ZDJjMDdmZDgtNzIwZDU0NzMtNDdlNDAxNWMtNWNhZjVlMTQ=, ActorId: [5:7509890666833561079:4823], ActorState: ReadyState, Session closed due to explicit close event 2025-05-29T15:31:17.023507Z node 5 :KQP_SESSION INFO: kqp_session_actor.cpp:2478: SessionId: ydb://session/3?node_id=5&id=ZDJjMDdmZDgtNzIwZDU0NzMtNDdlNDAxNWMtNWNhZjVlMTQ=, ActorId: [5:7509890666833561079:4823], ActorState: ReadyState, Cleanup start, isFinal: 1 CleanupCtx: 0 TransactionsToBeAborted.size(): 0 WorkerId: [0:0:0] WorkloadServiceCleanup: 0 2025-05-29T15:31:17.023509Z node 5 :KQP_SESSION DEBUG: kqp_session_actor.cpp:2539: SessionId: ydb://session/3?node_id=5&id=ZDJjMDdmZDgtNzIwZDU0NzMtNDdlNDAxNWMtNWNhZjVlMTQ=, ActorId: [5:7509890666833561079:4823], ActorState: ReadyState, EndCleanup, isFinal: 1 2025-05-29T15:31:17.023511Z node 5 :KQP_SESSION DEBUG: kqp_session_actor.cpp:2551: SessionId: ydb://session/3?node_id=5&id=ZDJjMDdmZDgtNzIwZDU0NzMtNDdlNDAxNWMtNWNhZjVlMTQ=, ActorId: [5:7509890666833561079:4823], ActorState: unknown state, Cleanup temp tables: 0 2025-05-29T15:31:17.023529Z node 5 :KQP_SESSION DEBUG: kqp_session_actor.cpp:2642: SessionId: ydb://session/3?node_id=5&id=ZDJjMDdmZDgtNzIwZDU0NzMtNDdlNDAxNWMtNWNhZjVlMTQ=, ActorId: [5:7509890666833561079:4823], ActorState: unknown state, Session actor destroyed (NThreading::TFutureException) library/cpp/threading/future/core/future-inl.h:58: wait timeout |74.6%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/kesus/tablet/quoter_performance_test/quoter_performance_test |74.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kesus/tablet/quoter_performance_test/quoter_performance_test |74.6%| [LD] {RESULT} $(B)/ydb/core/kesus/tablet/quoter_performance_test/quoter_performance_test |74.6%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/schemeshard/ut_login_large/ydb-core-tx-schemeshard-ut_login_large |74.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_login_large/ydb-core-tx-schemeshard-ut_login_large |74.6%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_login_large/ydb-core-tx-schemeshard-ut_login_large >> KqpEffects::InsertAbort_Literal_Duplicates-UseSink >> KqpImmediateEffects::InsertDuplicates-UseSink >> TExternalTableTestReboots::CreateDroppedExternalTableAndDropWithReboots [GOOD] >> KqpImmediateEffects::ManyFlushes ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/effects/unittest >> KqpEffects::InsertRevert_Literal_Conflict Test command err: Trying to start YDB, gRPC: 65142, MsgBus: 4103 2025-05-29T15:31:13.509958Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509890646689603675:2071];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:31:13.509975Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/001e1d/r3tmp/tmpvDPWjK/pdisk_1.dat 2025-05-29T15:31:13.552561Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 65142, node 1 2025-05-29T15:31:13.567031Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:31:13.567039Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-05-29T15:31:13.567040Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-05-29T15:31:13.567077Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:4103 2025-05-29T15:31:13.610804Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:31:13.610828Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:31:13.611916Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:4103 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-29T15:31:13.635094Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:31:13.639483Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:31:13.655029Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:31:13.669744Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:31:13.679905Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:31:13.799881Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890646689605264:2403], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:31:13.799906Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:31:13.835510Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-05-29T15:31:13.841896Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-05-29T15:31:13.853657Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-05-29T15:31:13.860806Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-05-29T15:31:13.867585Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-05-29T15:31:13.875104Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-05-29T15:31:13.882285Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-05-29T15:31:13.898141Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890646689605917:2466], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:31:13.898172Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:31:13.898177Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890646689605922:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:31:13.898914Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715669:3, at schemeshard: 72057594046644480 2025-05-29T15:31:13.902535Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7509890646689605924:2470], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715669 completed, doublechecking } 2025-05-29T15:31:13.976532Z node 1 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [1:7509890646689605975:3395] txid# 281474976715670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-29T15:31:14.053213Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [1:7509890646689605991:2474], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:31:14.053288Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=1&id=ZjIwNmVhOGEtMzY3MjhiZWEtZjQzOTJlMmMtODM5NWRkMQ==, ActorId: [1:7509890646689605261:2401], ActorState: ExecuteState, TraceId: 01jweapfb9ajtknbaa1bwghyc4, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: VERIFY failed (2025-05-29T15:31:14.053849Z): assertion failed in non-unittest thread with message: assertion failed at ydb/core/kqp/ut/common/kqp_ut_common.h:375, void NKikimr::NKqp::AssertSuccessResult(const NYdb::TStatus &): (result.IsSuccess())
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 library/cpp/testing/unittest/registar.cpp:36 RaiseError(): requirement UnittestThread failed 0. /-S/util/system/yassert.cpp:83: InternalPanicImpl @ 0x15F23B95 1. /-S/util/system/yassert.cpp:55: Panic @ 0x15F1AB96 2. /tmp//-S/library/cpp/testing/unittest/registar.cpp:36: RaiseError @ 0x160BC066 3. /-S/ydb/core/kqp/ut/common/kqp_ut_common.h:375: AssertSuccessResult @ 0x15D88C82 4. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:365: CreateSampleTables @ 0x2855B9A2 5. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:581: operator() @ 0x2857CF4C 6. /-S/library/cpp/threading/future/core/future-inl.h:493: SetValue @ 0x2857CF4C 7. /-S/library/cpp/threading/future/async.h:24: operator() @ 0x2857CF4C 8. /-S/util/thread/pool.h:71: Process @ 0x2857CF4C 9. /-S/util/thread/pool.cpp:418: DoExecute @ 0x15F2B519 10. /-S/util/thread/factory.h:15: Execute @ 0x15F29F09 11. /-S/util/thread/factory.cpp:36: ThreadProc @ 0x15F29F09 12. /-S/util/system/thread.cpp:244: ThreadProxy @ 0x15F2537C 13. ??:0: ?? @ 0x7F86816F1AC2 14. ??:0: ?? @ 0x7F868178384F Trying to start YDB, gRPC: 22716, MsgBus: 32516 2025-05-29T15:31:17.506890Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509890664418324888:2066];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:31:17.506921Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/001e1d/r3tmp/tmpOct6k4/pdisk_1.dat 2025-05-29T15:31:17.559808Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [1:7509890664418324857:2079] 1748532677506779 != 1748532677506782 2025-05-29T15:31:17.561693Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 22716, node 1 2025-05-29T15:31:17.576236Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:31:17.576253Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-05-29T15:31:17.576255Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-05-29T15:31:17.576286Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:32516 2025-05-29T15:31:17.608694Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:31:17.608724Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:31:17.609817Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:32516 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-29T15:31:17.642014Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:31:17.646983Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:31:17.662192Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:31:17.720175Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:31:17.730562Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:31:17.798078Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890664418326503:2403], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:31:17.798098Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:31:17.832881Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-05-29T15:31:17.838985Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-05-29T15:31:17.850781Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-05-29T15:31:17.857716Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-05-29T15:31:17.864813Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-05-29T15:31:17.871478Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-05-29T15:31:17.878649Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-05-29T15:31:17.887496Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890664418327156:2466], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:31:17.887514Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:31:17.887533Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890664418327161:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:31:17.888054Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715669:3, at schemeshard: 72057594046644480 2025-05-29T15:31:17.892531Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7509890664418327163:2470], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715669 completed, doublechecking } 2025-05-29T15:31:17.992766Z node 1 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [1:7509890664418327214:3395] txid# 281474976715670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } VERIFY failed (2025-05-29T15:31:18.100638Z): assertion failed in non-unittest thread with message: assertion failed at ydb/core/kqp/ut/common/kqp_ut_common.h:375, void NKikimr::NKqp::AssertSuccessResult(const NYdb::TStatus &): (result.IsSuccess())
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 library/cpp/testing/unittest/registar.cpp:36 RaiseError(): requirement UnittestThread failed 2025-05-29T15:31:18.099808Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [1:7509890664418327230:2474], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:31:18.099944Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=1&id=Yjg1ODk2NjYtYWFjY2E5NS01OTExZTI4NC0xZGU2MGY4Ng==, ActorId: [1:7509890664418326485:2401], ActorState: ExecuteState, TraceId: 01jweapk7zc7w474s64j6npeg6, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: 0. /-S/util/system/yassert.cpp:83: InternalPanicImpl @ 0x15F23B95 1. /-S/util/system/yassert.cpp:55: Panic @ 0x15F1AB96 2. /tmp//-S/library/cpp/testing/unittest/registar.cpp:36: RaiseError @ 0x160BC066 3. /-S/ydb/core/kqp/ut/common/kqp_ut_common.h:375: AssertSuccessResult @ 0x15D88C82 4. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:365: CreateSampleTables @ 0x2855B9A2 5. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:581: operator() @ 0x2857CF4C 6. /-S/library/cpp/threading/future/core/future-inl.h:493: SetValue @ 0x2857CF4C 7. /-S/library/cpp/threading/future/async.h:24: operator() @ 0x2857CF4C 8. /-S/util/thread/pool.h:71: Process @ 0x2857CF4C 9. /-S/util/thread/pool.cpp:418: DoExecute @ 0x15F2B519 10. /-S/util/thread/factory.h:15: Execute @ 0x15F29F09 11. /-S/util/thread/factory.cpp:36: ThreadProc @ 0x15F29F09 12. /-S/util/system/thread.cpp:244: ThreadProxy @ 0x15F2537C 13. ??:0: ?? @ 0x7FDB43A75AC2 14. ??:0: ?? @ 0x7FDB43B0784F ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_external_table_reboots/unittest >> TExternalTableTestReboots::CreateDroppedExternalTableAndDropWithReboots [GOOD] Test command err: ==== RunWithTabletReboots =========== RUN: Trace =========== Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2140] Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:116:2058] recipient: [1:110:2141] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:116:2058] recipient: [1:110:2141] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:117:2058] recipient: [1:111:2142] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:117:2058] recipient: [1:111:2142] Leader for TabletID 72057594046678944 is [1:126:2151] sender: [1:127:2058] recipient: [1:109:2140] Leader for TabletID 72057594046447617 is [1:130:2154] sender: [1:132:2058] recipient: [1:110:2141] Leader for TabletID 72057594046316545 is [1:133:2155] sender: [1:135:2058] recipient: [1:111:2142] 2025-05-29T15:31:08.764643Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7488: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-05-29T15:31:08.764662Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7516: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:31:08.764666Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7402: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2025-05-29T15:31:08.764670Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7418: OperationsProcessing config: using default configuration 2025-05-29T15:31:08.764673Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-05-29T15:31:08.764676Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-05-29T15:31:08.764682Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7548: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:31:08.764692Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:39: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-05-29T15:31:08.764767Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7619: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-05-29T15:31:08.764827Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-05-29T15:31:08.774351Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7651: Got new config: QueryServiceConfig { AllExternalDataSourcesAreAvailable: true } 2025-05-29T15:31:08.774369Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:31:08.774445Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7619: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# Leader for TabletID 72057594046447617 is [1:130:2154] sender: [1:175:2058] recipient: [1:15:2062] 2025-05-29T15:31:08.776490Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-05-29T15:31:08.776514Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-05-29T15:31:08.776539Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-05-29T15:31:08.778769Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-05-29T15:31:08.778836Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:445: Clear TempDirsState with owners number: 0 2025-05-29T15:31:08.778911Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1348: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-05-29T15:31:08.779071Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:32: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-05-29T15:31:08.779556Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:157: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:31:08.779586Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:84: [RootDataErasureManager] Stop 2025-05-29T15:31:08.779754Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:31:08.779760Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:31:08.779783Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-05-29T15:31:08.779789Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-29T15:31:08.779793Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:202: TTxServerlessStorageBilling.Complete 2025-05-29T15:31:08.779806Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6671: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:214:2058] recipient: [1:212:2212] IGNORE Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:214:2058] recipient: [1:212:2212] Leader for TabletID 72057594037968897 is [1:218:2216] sender: [1:219:2058] recipient: [1:212:2212] 2025-05-29T15:31:08.780842Z node 1 :HIVE INFO: tablet_helpers.cpp:1130: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:126:2151] sender: [1:239:2058] recipient: [1:15:2062] 2025-05-29T15:31:08.794160Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:372: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-05-29T15:31:08.794230Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:31:08.794281Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-05-29T15:31:08.794326Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:130: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-05-29T15:31:08.794334Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:31:08.794969Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:451: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-05-29T15:31:08.794989Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-05-29T15:31:08.795034Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:31:08.795046Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:313: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-05-29T15:31:08.795050Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:367: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-05-29T15:31:08.795053Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 2 -> 3 2025-05-29T15:31:08.795427Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:31:08.795441Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-05-29T15:31:08.795447Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 3 -> 128 2025-05-29T15:31:08.795696Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:31:08.795702Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:31:08.795707Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:31:08.795711Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1650: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-05-29T15:31:08.796168Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1719: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-05-29T15:31:08.796446Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:652: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-05-29T15:31:08.796478Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1751: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 Leader for TabletID 72057594046316545 is [1:133:2155] sender: [1:254:2058] recipient: [1:15:2062] FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-05-29T15:31:08.796619Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:676: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-05-29T15:31:08.796636Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:680: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 133 RawX2: 4294969451 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-05-29T15:31:08.796650Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:31:08.796693Z node 1 :FLAT_TX_SCHEMESHARD INFO: sch ... node 50 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1006, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-29T15:31:20.938441Z node 50 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1006, path id: [OwnerId: 72057594046678944, LocalPathId: 5] 2025-05-29T15:31:20.938452Z node 50 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1006, path id: [OwnerId: 72057594046678944, LocalPathId: 3] 2025-05-29T15:31:20.938463Z node 50 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:31:20.938466Z node 50 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [50:206:2207], at schemeshard: 72057594046678944, txId: 1006, path id: 1 2025-05-29T15:31:20.938469Z node 50 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [50:206:2207], at schemeshard: 72057594046678944, txId: 1006, path id: 5 2025-05-29T15:31:20.938474Z node 50 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [50:206:2207], at schemeshard: 72057594046678944, txId: 1006, path id: 3 2025-05-29T15:31:20.938503Z node 50 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1006:0, at schemeshard: 72057594046678944 2025-05-29T15:31:20.938508Z node 50 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:482: [72057594046678944] TDone opId# 1006:0 ProgressState 2025-05-29T15:31:20.938516Z node 50 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:906: Part operation is done id#1006:0 progress is 1/1 2025-05-29T15:31:20.938518Z node 50 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 1006 ready parts: 1/1 2025-05-29T15:31:20.938522Z node 50 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:906: Part operation is done id#1006:0 progress is 1/1 2025-05-29T15:31:20.938524Z node 50 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 1006 ready parts: 1/1 2025-05-29T15:31:20.938526Z node 50 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1606: TOperation IsReadyToNotify, TxId: 1006, ready parts: 1/1, is published: false 2025-05-29T15:31:20.938529Z node 50 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 1006 ready parts: 1/1 2025-05-29T15:31:20.938532Z node 50 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:973: Operation and all the parts is done, operation id: 1006:0 2025-05-29T15:31:20.938535Z node 50 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5174: RemoveTx for txid 1006:0 2025-05-29T15:31:20.938542Z node 50 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 2 2025-05-29T15:31:20.938545Z node 50 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove txstate source path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 3 2025-05-29T15:31:20.938547Z node 50 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:982: Publication still in progress, tx: 1006, publications: 3, subscribers: 0 2025-05-29T15:31:20.938550Z node 50 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:989: Publication details: tx: 1006, [OwnerId: 72057594046678944, LocalPathId: 1], 15 2025-05-29T15:31:20.938552Z node 50 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:989: Publication details: tx: 1006, [OwnerId: 72057594046678944, LocalPathId: 3], 2 2025-05-29T15:31:20.938554Z node 50 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:989: Publication details: tx: 1006, [OwnerId: 72057594046678944, LocalPathId: 5], 18446744073709551615 2025-05-29T15:31:20.938596Z node 50 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:5834: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 5 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 1006 2025-05-29T15:31:20.938602Z node 50 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 5 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 1006 2025-05-29T15:31:20.938605Z node 50 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 3, at schemeshard: 72057594046678944, txId: 1006 2025-05-29T15:31:20.938608Z node 50 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 1006, pathId: [OwnerId: 72057594046678944, LocalPathId: 5], version: 18446744073709551615 2025-05-29T15:31:20.938612Z node 50 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 1 2025-05-29T15:31:20.938653Z node 50 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:123: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-05-29T15:31:20.938657Z node 50 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:137: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 5], at schemeshard: 72057594046678944 2025-05-29T15:31:20.938662Z node 50 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 4 2025-05-29T15:31:20.938724Z node 50 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:5834: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 15 PathOwnerId: 72057594046678944, cookie: 1006 2025-05-29T15:31:20.938730Z node 50 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 15 PathOwnerId: 72057594046678944, cookie: 1006 2025-05-29T15:31:20.938733Z node 50 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 1006 2025-05-29T15:31:20.938755Z node 50 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 1006, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 15 2025-05-29T15:31:20.938760Z node 50 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2025-05-29T15:31:20.938917Z node 50 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:5834: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 2 PathOwnerId: 72057594046678944, cookie: 1006 2025-05-29T15:31:20.938928Z node 50 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 2 PathOwnerId: 72057594046678944, cookie: 1006 2025-05-29T15:31:20.938934Z node 50 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1006 2025-05-29T15:31:20.938937Z node 50 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 1006, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 2 2025-05-29T15:31:20.938939Z node 50 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 2 2025-05-29T15:31:20.938946Z node 50 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 1006, subscribers: 0 2025-05-29T15:31:20.939324Z node 50 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1006 2025-05-29T15:31:20.939343Z node 50 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:160: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 2025-05-29T15:31:20.939491Z node 50 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1006 2025-05-29T15:31:20.939548Z node 50 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1006 TestModificationResult got TxId: 1006, wait until txId: 1006 TestWaitNotification wait txId: 1006 2025-05-29T15:31:20.939582Z node 50 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:210: tests -- TTxNotificationSubscriber for txId 1006: send EvNotifyTxCompletion 2025-05-29T15:31:20.939587Z node 50 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:256: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 1006 2025-05-29T15:31:20.939631Z node 50 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 1006, at schemeshard: 72057594046678944 2025-05-29T15:31:20.939643Z node 50 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:227: tests -- TTxNotificationSubscriber for txId 1006: got EvNotifyTxCompletionResult 2025-05-29T15:31:20.939646Z node 50 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:236: tests -- TTxNotificationSubscriber for txId 1006: satisfy waiter [50:449:2439] TestWaitNotification: OK eventTxId 1006 2025-05-29T15:31:20.939694Z node 50 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/ExternalTable" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-05-29T15:31:20.939712Z node 50 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/ExternalTable" took 26us result status StatusPathDoesNotExist 2025-05-29T15:31:20.939738Z node 50 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/ExternalTable\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1]), source_location: ydb/core/tx/schemeshard/schemeshard_path_describer.cpp:1157" Path: "/MyRoot/ExternalTable" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: true } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/effects/unittest >> KqpImmediateEffects::ConflictingKeyW1RWR2 Test command err: Trying to start YDB, gRPC: 20768, MsgBus: 16198 2025-05-29T15:31:13.592802Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509890648219481402:2069];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:31:13.592998Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/001e0a/r3tmp/tmpbrO6Rl/pdisk_1.dat TServer::EnableGrpc on GrpcPort 20768, node 1 2025-05-29T15:31:13.651853Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:31:13.652105Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [1:7509890648219481362:2079] 1748532673592662 != 1748532673592665 2025-05-29T15:31:13.654024Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:31:13.654036Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-05-29T15:31:13.654038Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-05-29T15:31:13.654077Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:16198 2025-05-29T15:31:13.693870Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:31:13.693893Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:31:13.694995Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:16198 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-29T15:31:13.718045Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:31:13.723994Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:31:13.788199Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:31:13.810560Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:31:13.822303Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:31:13.918876Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890648219482995:2403], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:31:13.918905Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:31:13.965434Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-05-29T15:31:13.973408Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-05-29T15:31:13.987519Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-05-29T15:31:14.041836Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-05-29T15:31:14.050234Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-05-29T15:31:14.064413Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-05-29T15:31:14.078464Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-05-29T15:31:14.135770Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890652514450947:2466], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:31:14.135790Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890652514450952:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:31:14.135795Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:31:14.136381Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715669:3, at schemeshard: 72057594046644480 2025-05-29T15:31:14.139932Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7509890652514450954:2470], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715669 completed, doublechecking } 2025-05-29T15:31:14.192996Z node 1 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [1:7509890652514451005:3395] txid# 281474976715670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-29T15:31:14.276800Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [1:7509890652514451021:2474], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:31:14.276918Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=1&id=YzdjOTc0YTQtYzQ5ZThjOWItZmI5ZDQ2OWQtNTQ0OTI5MGY=, ActorId: [1:7509890648219482977:2401], ActorState: ExecuteState, TraceId: 01jweapfjq9hz2gyv3mst7cmjj, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: VERIFY failed (2025-05-29T15:31:14.277578Z): assertion failed in non-unittest thread with message: assertion failed at ydb/core/kqp/ut/common/kqp_ut_common.h:375, void NKikimr::NKqp::AssertSuccessResult(const NYdb::TStatus &): (result.IsSuccess())
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 library/cpp/testing/unittest/registar.cpp:36 RaiseError(): requirement UnittestThread failed 0. /-S/util/system/yassert.cpp:83: InternalPanicImpl @ 0x15F23B95 1. /-S/util/system/yassert.cpp:55: Panic @ 0x15F1AB96 2. /tmp//-S/library/cpp/testing/unittest/registar.cpp:36: RaiseError @ 0x160BC066 3. /-S/ydb/core/kqp/ut/common/kqp_ut_common.h:375: AssertSuccessResult @ 0x15D88C82 4. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:365: CreateSampleTables @ 0x2855B9A2 5. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:581: operator() @ 0x2857CF4C 6. /-S/library/cpp/threading/future/core/future-inl.h:493: SetValue @ 0x2857CF4C 7. /-S/library/cpp/threading/future/async.h:24: operator() @ 0x2857CF4C 8. /-S/util/thread/pool.h:71: Process @ 0x2857CF4C 9. /-S/util/thread/pool.cpp:418: DoExecute @ 0x15F2B519 10. /-S/util/thread/factory.h:15: Execute @ 0x15F29F09 11. /-S/util/thread/factory.cpp:36: ThreadProc @ 0x15F29F09 12. /-S/util/system/thread.cpp:244: ThreadProxy @ 0x15F2537C 13. ??:0: ?? @ 0x7F1066E01AC2 14. ??:0: ?? @ 0x7F1066E9384F Trying to start YDB, gRPC: 11505, MsgBus: 17088 2025-05-29T15:31:17.654039Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509890667724570661:2066];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:31:17.654061Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/001e0a/r3tmp/tmpuCHndB/pdisk_1.dat TServer::EnableGrpc on GrpcPort 11505, node 1 2025-05-29T15:31:17.712441Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:31:17.715089Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:31:17.715101Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-05-29T15:31:17.715102Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-05-29T15:31:17.715146Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:17088 TClient is connected to server localhost:17088 WaitRootIsUp 'Root'... TClient::Ls request: Root 2025-05-29T15:31:17.755126Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:31:17.755153Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting TClient::Ls response: 2025-05-29T15:31:17.756222Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-29T15:31:17.780828Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:31:17.784500Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:31:17.803566Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:31:17.822985Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:31:17.834046Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:31:17.985998Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890667724572260:2403], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:31:17.986039Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:31:18.032729Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-05-29T15:31:18.039387Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-05-29T15:31:18.046771Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-05-29T15:31:18.054806Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-05-29T15:31:18.069243Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-05-29T15:31:18.083559Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-05-29T15:31:18.097929Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-05-29T15:31:18.113844Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890672019540209:2466], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:31:18.113878Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890672019540214:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:31:18.113908Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:31:18.114713Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715669:3, at schemeshard: 72057594046644480 2025-05-29T15:31:18.124338Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7509890672019540216:2470], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715669 completed, doublechecking } 2025-05-29T15:31:18.184116Z node 1 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [1:7509890672019540268:3397] txid# 281474976715670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } VERIFY failed (2025-05-29T15:31:18.294458Z): assertion failed in non-unittest thread with message: assertion failed at ydb/core/kqp/ut/common/kqp_ut_common.h:375, void NKikimr::NKqp::AssertSuccessResult(const NYdb::TStatus &): (result.IsSuccess())
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 library/cpp/testing/unittest/registar.cpp:36 RaiseError(): requirement UnittestThread failed 2025-05-29T15:31:18.293598Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [1:7509890672019540284:2474], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:31:18.293735Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=1&id=YzI2YTJlYzQtZDQ3N2FkNmMtMTFjNmM4Ny1iN2ZlYjdiZQ==, ActorId: [1:7509890667724572242:2401], ActorState: ExecuteState, TraceId: 01jweapkf14t4v94p3a7ygx0vy, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: 0. /-S/util/system/yassert.cpp:83: InternalPanicImpl @ 0x15F23B95 1. /-S/util/system/yassert.cpp:55: Panic @ 0x15F1AB96 2. /tmp//-S/library/cpp/testing/unittest/registar.cpp:36: RaiseError @ 0x160BC066 3. /-S/ydb/core/kqp/ut/common/kqp_ut_common.h:375: AssertSuccessResult @ 0x15D88C82 4. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:365: CreateSampleTables @ 0x2855B9A2 5. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:581: operator() @ 0x2857CF4C 6. /-S/library/cpp/threading/future/core/future-inl.h:493: SetValue @ 0x2857CF4C 7. /-S/library/cpp/threading/future/async.h:24: operator() @ 0x2857CF4C 8. /-S/util/thread/pool.h:71: Process @ 0x2857CF4C 9. /-S/util/thread/pool.cpp:418: DoExecute @ 0x15F2B519 10. /-S/util/thread/factory.h:15: Execute @ 0x15F29F09 11. /-S/util/thread/factory.cpp:36: ThreadProc @ 0x15F29F09 12. /-S/util/system/thread.cpp:244: ThreadProxy @ 0x15F2537C 13. ??:0: ?? @ 0x7F34D131AAC2 14. ??:0: ?? @ 0x7F34D13AC84F ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/effects/unittest >> KqpWrite::ProjectReplace+UseSink Test command err: Trying to start YDB, gRPC: 8880, MsgBus: 30038 2025-05-29T15:31:13.578384Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509890649424642931:2065];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:31:13.578416Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/001e12/r3tmp/tmpe7TQJo/pdisk_1.dat TServer::EnableGrpc on GrpcPort 8880, node 1 2025-05-29T15:31:13.632485Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:31:13.632858Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [1:7509890649424642899:2079] 1748532673578256 != 1748532673578259 2025-05-29T15:31:13.639941Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:31:13.639953Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-05-29T15:31:13.639955Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-05-29T15:31:13.639998Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:30038 TClient is connected to server localhost:30038 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: 2025-05-29T15:31:13.679497Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:31:13.679523Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:31:13.680580Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-29T15:31:13.708882Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:31:13.716453Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:31:13.779070Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:31:13.798150Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:31:13.807826Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:31:13.948734Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890649424644532:2403], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:31:13.948755Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:31:13.983726Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-05-29T15:31:13.989707Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-05-29T15:31:14.000884Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-05-29T15:31:14.008002Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-05-29T15:31:14.014518Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-05-29T15:31:14.021829Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-05-29T15:31:14.028967Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-05-29T15:31:14.045133Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890653719612479:2466], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:31:14.045170Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890653719612484:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:31:14.045174Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:31:14.045819Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715669:3, at schemeshard: 72057594046644480 2025-05-29T15:31:14.049414Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7509890653719612486:2470], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715669 completed, doublechecking } 2025-05-29T15:31:14.141539Z node 1 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [1:7509890653719612537:3395] txid# 281474976715670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-29T15:31:14.204697Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [1:7509890653719612553:2474], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:31:14.204793Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=1&id=MWExNWQzYTgtMTNmMjdiOWItZTM0NDlkYTUtM2M3MDM1ZTk=, ActorId: [1:7509890649424644514:2401], ActorState: ExecuteState, TraceId: 01jweapffw3j55dc566m5vt8xh, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: VERIFY failed (2025-05-29T15:31:14.205446Z): assertion failed in non-unittest thread with message: assertion failed at ydb/core/kqp/ut/common/kqp_ut_common.h:375, void NKikimr::NKqp::AssertSuccessResult(const NYdb::TStatus &): (result.IsSuccess())
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 library/cpp/testing/unittest/registar.cpp:36 RaiseError(): requirement UnittestThread failed 0. /-S/util/system/yassert.cpp:83: InternalPanicImpl @ 0x15F23B95 1. /-S/util/system/yassert.cpp:55: Panic @ 0x15F1AB96 2. /tmp//-S/library/cpp/testing/unittest/registar.cpp:36: RaiseError @ 0x160BC066 3. /-S/ydb/core/kqp/ut/common/kqp_ut_common.h:375: AssertSuccessResult @ 0x15D88C82 4. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:365: CreateSampleTables @ 0x2855B9A2 5. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:581: operator() @ 0x2857CF4C 6. /-S/library/cpp/threading/future/core/future-inl.h:493: SetValue @ 0x2857CF4C 7. /-S/library/cpp/threading/future/async.h:24: operator() @ 0x2857CF4C 8. /-S/util/thread/pool.h:71: Process @ 0x2857CF4C 9. /-S/util/thread/pool.cpp:418: DoExecute @ 0x15F2B519 10. /-S/util/thread/factory.h:15: Execute @ 0x15F29F09 11. /-S/util/thread/factory.cpp:36: ThreadProc @ 0x15F29F09 12. /-S/util/system/thread.cpp:244: ThreadProxy @ 0x15F2537C 13. ??:0: ?? @ 0x7EFE641BEAC2 14. ??:0: ?? @ 0x7EFE6425084F Trying to start YDB, gRPC: 27586, MsgBus: 30188 2025-05-29T15:31:17.578584Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509890667680428142:2069];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:31:17.578603Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/001e12/r3tmp/tmpat4XEA/pdisk_1.dat TServer::EnableGrpc on GrpcPort 27586, node 1 2025-05-29T15:31:17.639111Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:31:17.640999Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:31:17.641011Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-05-29T15:31:17.641013Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-05-29T15:31:17.641057Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:30188 TClient is connected to server localhost:30188 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: 2025-05-29T15:31:17.679902Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:31:17.679922Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:31:17.681033Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-29T15:31:17.712817Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:31:17.718175Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:31:17.780001Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:31:17.802065Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:31:17.814095Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:31:17.888250Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890667680429761:2403], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:31:17.888277Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:31:17.941966Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-05-29T15:31:17.949610Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-05-29T15:31:17.956949Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-05-29T15:31:17.970423Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-05-29T15:31:17.984505Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-05-29T15:31:17.998267Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-05-29T15:31:18.012921Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-05-29T15:31:18.028558Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890671975397709:2466], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:31:18.028584Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:31:18.028635Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890671975397714:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:31:18.029267Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715669:3, at schemeshard: 72057594046644480 2025-05-29T15:31:18.032484Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7509890671975397716:2470], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715669 completed, doublechecking } 2025-05-29T15:31:18.123005Z node 1 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [1:7509890671975397767:3396] txid# 281474976715670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-29T15:31:18.226278Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [1:7509890671975397783:2474], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:31:18.226415Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=1&id=OTViMjMyZDMtODUyNTIwZi1kODI4MDNjNS0xMDk5Njk1Ng==, ActorId: [1:7509890667680429743:2401], ActorState: ExecuteState, TraceId: 01jweapkcc6fandmm18zwmfabz, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: VERIFY failed (2025-05-29T15:31:18.235087Z): assertion failed in non-unittest thread with message: assertion failed at ydb/core/kqp/ut/common/kqp_ut_common.h:375, void NKikimr::NKqp::AssertSuccessResult(const NYdb::TStatus &): (result.IsSuccess())
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 library/cpp/testing/unittest/registar.cpp:36 RaiseError(): requirement UnittestThread failed 0. /-S/util/system/yassert.cpp:83: InternalPanicImpl @ 0x15F23B95 1. /-S/util/system/yassert.cpp:55: Panic @ 0x15F1AB96 2. /tmp//-S/library/cpp/testing/unittest/registar.cpp:36: RaiseError @ 0x160BC066 3. /-S/ydb/core/kqp/ut/common/kqp_ut_common.h:375: AssertSuccessResult @ 0x15D88C82 4. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:365: CreateSampleTables @ 0x2855B9A2 5. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:581: operator() @ 0x2857CF4C 6. /-S/library/cpp/threading/future/core/future-inl.h:493: SetValue @ 0x2857CF4C 7. /-S/library/cpp/threading/future/async.h:24: operator() @ 0x2857CF4C 8. /-S/util/thread/pool.h:71: Process @ 0x2857CF4C 9. /-S/util/thread/pool.cpp:418: DoExecute @ 0x15F2B519 10. /-S/util/thread/factory.h:15: Execute @ 0x15F29F09 11. /-S/util/thread/factory.cpp:36: ThreadProc @ 0x15F29F09 12. /-S/util/system/thread.cpp:244: ThreadProxy @ 0x15F2537C 13. ??:0: ?? @ 0x7FB3693A7AC2 14. ??:0: ?? @ 0x7FB36943984F ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/effects/unittest >> KqpImmediateEffects::InteractiveTxWithReadAtTheEnd-UseSink Test command err: Trying to start YDB, gRPC: 2809, MsgBus: 2169 2025-05-29T15:31:13.487944Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509890650249043045:2065];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:31:13.487961Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/001e2e/r3tmp/tmpF087bF/pdisk_1.dat 2025-05-29T15:31:13.535456Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [1:7509890650249043013:2079] 1748532673487843 != 1748532673487846 2025-05-29T15:31:13.537395Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 2809, node 1 2025-05-29T15:31:13.548048Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:31:13.548060Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-05-29T15:31:13.548061Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-05-29T15:31:13.548097Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:2169 2025-05-29T15:31:13.588975Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:31:13.589013Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:31:13.590085Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:2169 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-29T15:31:13.612527Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:31:13.619557Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:31:13.637152Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:31:13.656261Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:31:13.666690Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:31:13.779884Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890650249044641:2403], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:31:13.779911Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:31:13.822470Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-05-29T15:31:13.829339Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-05-29T15:31:13.840098Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-05-29T15:31:13.846855Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-05-29T15:31:13.901257Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-05-29T15:31:13.910234Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-05-29T15:31:13.917142Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-05-29T15:31:13.933283Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890650249045296:2466], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:31:13.933300Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890650249045301:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:31:13.933309Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:31:13.933932Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710669:3, at schemeshard: 72057594046644480 2025-05-29T15:31:13.937499Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7509890650249045303:2470], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710669 completed, doublechecking } 2025-05-29T15:31:13.991145Z node 1 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [1:7509890650249045354:3394] txid# 281474976710670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-29T15:31:14.079618Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [1:7509890650249045370:2474], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:31:14.079722Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=1&id=NWFmMWEzNzYtOTk5ZWMxMzEtZGFkMDc5YTAtZGY0YzI5YTc=, ActorId: [1:7509890650249044623:2401], ActorState: ExecuteState, TraceId: 01jweapfcca4ee5fsmjkpmw0x1, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: VERIFY failed (2025-05-29T15:31:14.080391Z): assertion failed in non-unittest thread with message: assertion failed at ydb/core/kqp/ut/common/kqp_ut_common.h:375, void NKikimr::NKqp::AssertSuccessResult(const NYdb::TStatus &): (result.IsSuccess())
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 library/cpp/testing/unittest/registar.cpp:36 RaiseError(): requirement UnittestThread failed 0. /-S/util/system/yassert.cpp:83: InternalPanicImpl @ 0x15F23B95 1. /-S/util/system/yassert.cpp:55: Panic @ 0x15F1AB96 2. /tmp//-S/library/cpp/testing/unittest/registar.cpp:36: RaiseError @ 0x160BC066 3. /-S/ydb/core/kqp/ut/common/kqp_ut_common.h:375: AssertSuccessResult @ 0x15D88C82 4. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:365: CreateSampleTables @ 0x2855B9A2 5. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:581: operator() @ 0x2857CF4C 6. /-S/library/cpp/threading/future/core/future-inl.h:493: SetValue @ 0x2857CF4C 7. /-S/library/cpp/threading/future/async.h:24: operator() @ 0x2857CF4C 8. /-S/util/thread/pool.h:71: Process @ 0x2857CF4C 9. /-S/util/thread/pool.cpp:418: DoExecute @ 0x15F2B519 10. /-S/util/thread/factory.h:15: Execute @ 0x15F29F09 11. /-S/util/thread/factory.cpp:36: ThreadProc @ 0x15F29F09 12. /-S/util/system/thread.cpp:244: ThreadProxy @ 0x15F2537C 13. ??:0: ?? @ 0x7F5F5BF11AC2 14. ??:0: ?? @ 0x7F5F5BFA384F Trying to start YDB, gRPC: 12292, MsgBus: 23683 2025-05-29T15:31:17.605401Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509890665032413065:2068];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:31:17.605430Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/001e2e/r3tmp/tmpEpKMs8/pdisk_1.dat 2025-05-29T15:31:17.663593Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 12292, node 1 2025-05-29T15:31:17.675740Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:31:17.675752Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-05-29T15:31:17.675754Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-05-29T15:31:17.675798Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:23683 2025-05-29T15:31:17.706230Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:31:17.706258Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:31:17.707330Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:23683 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-29T15:31:17.741561Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:31:17.747504Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:31:17.811440Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:31:17.831689Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:31:17.842364Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:31:17.918729Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890665032414665:2403], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:31:17.918766Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:31:17.968946Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-05-29T15:31:17.976610Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-05-29T15:31:18.031746Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-05-29T15:31:18.040743Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-05-29T15:31:18.054825Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-05-29T15:31:18.069301Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-05-29T15:31:18.082945Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-05-29T15:31:18.104670Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890669327382616:2466], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:31:18.104701Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:31:18.104743Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890669327382621:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:31:18.105636Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715669:3, at schemeshard: 72057594046644480 2025-05-29T15:31:18.109722Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7509890669327382623:2470], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715669 completed, doublechecking } 2025-05-29T15:31:18.168667Z node 1 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [1:7509890669327382674:3396] txid# 281474976715670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-29T15:31:18.289907Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [1:7509890669327382690:2474], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:31:18.290027Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=1&id=NzdjM2MwYzItMzg0MGYyOTktNmE4NDBhNDktODIxNGQ2YjU=, ActorId: [1:7509890665032414647:2401], ActorState: ExecuteState, TraceId: 01jweapkerffvr2nenft611pfb, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: VERIFY failed (2025-05-29T15:31:18.290818Z): assertion failed in non-unittest thread with message: assertion failed at ydb/core/kqp/ut/common/kqp_ut_common.h:375, void NKikimr::NKqp::AssertSuccessResult(const NYdb::TStatus &): (result.IsSuccess())
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 library/cpp/testing/unittest/registar.cpp:36 RaiseError(): requirement UnittestThread failed 0. /-S/util/system/yassert.cpp:83: InternalPanicImpl @ 0x15F23B95 1. /-S/util/system/yassert.cpp:55: Panic @ 0x15F1AB96 2. /tmp//-S/library/cpp/testing/unittest/registar.cpp:36: RaiseError @ 0x160BC066 3. /-S/ydb/core/kqp/ut/common/kqp_ut_common.h:375: AssertSuccessResult @ 0x15D88C82 4. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:365: CreateSampleTables @ 0x2855B9A2 5. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:581: operator() @ 0x2857CF4C 6. /-S/library/cpp/threading/future/core/future-inl.h:493: SetValue @ 0x2857CF4C 7. /-S/library/cpp/threading/future/async.h:24: operator() @ 0x2857CF4C 8. /-S/util/thread/pool.h:71: Process @ 0x2857CF4C 9. /-S/util/thread/pool.cpp:418: DoExecute @ 0x15F2B519 10. /-S/util/thread/factory.h:15: Execute @ 0x15F29F09 11. /-S/util/thread/factory.cpp:36: ThreadProc @ 0x15F29F09 12. /-S/util/system/thread.cpp:244: ThreadProxy @ 0x15F2537C 13. ??:0: ?? @ 0x7F1CD08F0AC2 14. ??:0: ?? @ 0x7F1CD098284F ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/effects/unittest >> KqpImmediateEffects::ImmediateUpdateSelect Test command err: Trying to start YDB, gRPC: 23263, MsgBus: 6870 2025-05-29T15:31:13.519458Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509890650661535790:2072];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:31:13.519475Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/001e4f/r3tmp/tmpmU954e/pdisk_1.dat 2025-05-29T15:31:13.576639Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 23263, node 1 2025-05-29T15:31:13.592077Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:31:13.592087Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-05-29T15:31:13.592088Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-05-29T15:31:13.592126Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:6870 2025-05-29T15:31:13.620731Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:31:13.620746Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:31:13.621820Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:6870 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-29T15:31:13.653484Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:31:13.661270Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:31:13.676550Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:31:13.692655Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:31:13.701533Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:31:13.844469Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890650661537402:2403], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:31:13.844498Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:31:13.884931Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-05-29T15:31:13.892102Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-05-29T15:31:13.902949Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-05-29T15:31:13.909970Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-05-29T15:31:13.917144Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-05-29T15:31:13.931392Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-05-29T15:31:13.945387Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-05-29T15:31:13.962052Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890650661538054:2466], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:31:13.962064Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890650661538059:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:31:13.962077Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:31:13.962696Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715669:3, at schemeshard: 72057594046644480 2025-05-29T15:31:13.965032Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7509890650661538061:2470], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715669 completed, doublechecking } 2025-05-29T15:31:14.062114Z node 1 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [1:7509890654956505408:3396] txid# 281474976715670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-29T15:31:14.136753Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [1:7509890654956505424:2474], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:31:14.136868Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=1&id=M2FjZGNiNjktMWQyMmQzNWUtMzkwMzdkZmMtYmZkZjg5NDU=, ActorId: [1:7509890650661537384:2401], ActorState: ExecuteState, TraceId: 01jweapfd9acjj3bs04hxwyn3y, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: VERIFY failed (2025-05-29T15:31:14.137438Z): assertion failed in non-unittest thread with message: assertion failed at ydb/core/kqp/ut/common/kqp_ut_common.h:375, void NKikimr::NKqp::AssertSuccessResult(const NYdb::TStatus &): (result.IsSuccess())
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 library/cpp/testing/unittest/registar.cpp:36 RaiseError(): requirement UnittestThread failed 0. /-S/util/system/yassert.cpp:83: InternalPanicImpl @ 0x15F23B95 1. /-S/util/system/yassert.cpp:55: Panic @ 0x15F1AB96 2. /tmp//-S/library/cpp/testing/unittest/registar.cpp:36: RaiseError @ 0x160BC066 3. /-S/ydb/core/kqp/ut/common/kqp_ut_common.h:375: AssertSuccessResult @ 0x15D88C82 4. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:365: CreateSampleTables @ 0x2855B9A2 5. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:581: operator() @ 0x2857CF4C 6. /-S/library/cpp/threading/future/core/future-inl.h:493: SetValue @ 0x2857CF4C 7. /-S/library/cpp/threading/future/async.h:24: operator() @ 0x2857CF4C 8. /-S/util/thread/pool.h:71: Process @ 0x2857CF4C 9. /-S/util/thread/pool.cpp:418: DoExecute @ 0x15F2B519 10. /-S/util/thread/factory.h:15: Execute @ 0x15F29F09 11. /-S/util/thread/factory.cpp:36: ThreadProc @ 0x15F29F09 12. /-S/util/system/thread.cpp:244: ThreadProxy @ 0x15F2537C 13. ??:0: ?? @ 0x7FE47F42BAC2 14. ??:0: ?? @ 0x7FE47F4BD84F Trying to start YDB, gRPC: 19491, MsgBus: 7774 2025-05-29T15:31:17.543457Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509890667573653951:2071];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:31:17.543480Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/001e4f/r3tmp/tmp4072wP/pdisk_1.dat 2025-05-29T15:31:17.606449Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 19491, node 1 2025-05-29T15:31:17.618200Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:31:17.618210Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-05-29T15:31:17.618212Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-05-29T15:31:17.618243Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:7774 2025-05-29T15:31:17.644486Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:31:17.644517Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:31:17.645635Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:7774 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-29T15:31:17.668583Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:31:17.676402Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:31:17.693896Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:31:17.752358Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:31:17.764427Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:31:17.849536Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890667573655551:2403], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:31:17.849561Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:31:17.881786Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-05-29T15:31:17.887850Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-05-29T15:31:17.900427Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-05-29T15:31:17.913797Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-05-29T15:31:17.920955Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-05-29T15:31:17.935141Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-05-29T15:31:17.949650Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-05-29T15:31:17.961810Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890667573656203:2466], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:31:17.961841Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:31:17.961849Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890667573656208:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:31:17.962702Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710669:3, at schemeshard: 72057594046644480 2025-05-29T15:31:17.969875Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7509890667573656210:2470], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710669 completed, doublechecking } 2025-05-29T15:31:18.037133Z node 1 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [1:7509890671868623557:3399] txid# 281474976710670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-29T15:31:18.141486Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [1:7509890671868623573:2474], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:31:18.142847Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=1&id=ZDIxNjY4ZWYtMzYwOWVmOWEtOTdjNDBiZjctNjFmOGQzOTg=, ActorId: [1:7509890667573655533:2401], ActorState: ExecuteState, TraceId: 01jweapka9bvedy794chbhsrng, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: VERIFY failed (2025-05-29T15:31:18.150826Z): assertion failed in non-unittest thread with message: assertion failed at ydb/core/kqp/ut/common/kqp_ut_common.h:375, void NKikimr::NKqp::AssertSuccessResult(const NYdb::TStatus &): (result.IsSuccess())
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 library/cpp/testing/unittest/registar.cpp:36 RaiseError(): requirement UnittestThread failed 0. /-S/util/system/yassert.cpp:83: InternalPanicImpl @ 0x15F23B95 1. /-S/util/system/yassert.cpp:55: Panic @ 0x15F1AB96 2. /tmp//-S/library/cpp/testing/unittest/registar.cpp:36: RaiseError @ 0x160BC066 3. /-S/ydb/core/kqp/ut/common/kqp_ut_common.h:375: AssertSuccessResult @ 0x15D88C82 4. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:365: CreateSampleTables @ 0x2855B9A2 5. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:581: operator() @ 0x2857CF4C 6. /-S/library/cpp/threading/future/core/future-inl.h:493: SetValue @ 0x2857CF4C 7. /-S/library/cpp/threading/future/async.h:24: operator() @ 0x2857CF4C 8. /-S/util/thread/pool.h:71: Process @ 0x2857CF4C 9. /-S/util/thread/pool.cpp:418: DoExecute @ 0x15F2B519 10. /-S/util/thread/factory.h:15: Execute @ 0x15F29F09 11. /-S/util/thread/factory.cpp:36: ThreadProc @ 0x15F29F09 12. /-S/util/system/thread.cpp:244: ThreadProxy @ 0x15F2537C 13. ??:0: ?? @ 0x7F342D469AC2 14. ??:0: ?? @ 0x7F342D4FB84F ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/effects/unittest >> KqpImmediateEffects::ForceImmediateEffectsExecution+UseSink Test command err: Trying to start YDB, gRPC: 19911, MsgBus: 30877 2025-05-29T15:31:13.467154Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509890649489288663:2069];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:31:13.467168Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/001e3e/r3tmp/tmp3OJM5L/pdisk_1.dat 2025-05-29T15:31:13.520758Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 19911, node 1 2025-05-29T15:31:13.532189Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:31:13.532203Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-05-29T15:31:13.532205Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-05-29T15:31:13.532245Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:30877 2025-05-29T15:31:13.568464Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:31:13.568487Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:31:13.569503Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:30877 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-29T15:31:13.597546Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:31:13.601847Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:31:13.618099Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:31:13.638195Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:31:13.648326Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:31:13.820644Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890649489290260:2403], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:31:13.820693Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:31:13.864410Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-05-29T15:31:13.872018Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-05-29T15:31:13.881970Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-05-29T15:31:13.888889Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-05-29T15:31:13.896081Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-05-29T15:31:13.910232Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-05-29T15:31:13.924101Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-05-29T15:31:13.940252Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890649489290914:2466], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:31:13.940279Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890649489290919:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:31:13.940280Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:31:13.940854Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715669:3, at schemeshard: 72057594046644480 2025-05-29T15:31:13.944208Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7509890649489290921:2470], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715669 completed, doublechecking } 2025-05-29T15:31:14.024797Z node 1 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [1:7509890653784258268:3398] txid# 281474976715670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-29T15:31:14.097918Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [1:7509890653784258284:2474], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:31:14.098009Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=1&id=ZDkxZTgxOWQtMTkyYjMzZTMtNWQ2ZGU1MmEtYWZhOTEwZTQ=, ActorId: [1:7509890649489290242:2401], ActorState: ExecuteState, TraceId: 01jweapfckddp5p0qhpctej714, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: VERIFY failed (2025-05-29T15:31:14.098621Z): assertion failed in non-unittest thread with message: assertion failed at ydb/core/kqp/ut/common/kqp_ut_common.h:375, void NKikimr::NKqp::AssertSuccessResult(const NYdb::TStatus &): (result.IsSuccess())
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 library/cpp/testing/unittest/registar.cpp:36 RaiseError(): requirement UnittestThread failed 0. /-S/util/system/yassert.cpp:83: InternalPanicImpl @ 0x15F23B95 1. /-S/util/system/yassert.cpp:55: Panic @ 0x15F1AB96 2. /tmp//-S/library/cpp/testing/unittest/registar.cpp:36: RaiseError @ 0x160BC066 3. /-S/ydb/core/kqp/ut/common/kqp_ut_common.h:375: AssertSuccessResult @ 0x15D88C82 4. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:365: CreateSampleTables @ 0x2855B9A2 5. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:581: operator() @ 0x2857CF4C 6. /-S/library/cpp/threading/future/core/future-inl.h:493: SetValue @ 0x2857CF4C 7. /-S/library/cpp/threading/future/async.h:24: operator() @ 0x2857CF4C 8. /-S/util/thread/pool.h:71: Process @ 0x2857CF4C 9. /-S/util/thread/pool.cpp:418: DoExecute @ 0x15F2B519 10. /-S/util/thread/factory.h:15: Execute @ 0x15F29F09 11. /-S/util/thread/factory.cpp:36: ThreadProc @ 0x15F29F09 12. /-S/util/system/thread.cpp:244: ThreadProxy @ 0x15F2537C 13. ??:0: ?? @ 0x7FA3F2985AC2 14. ??:0: ?? @ 0x7FA3F2A1784F Trying to start YDB, gRPC: 7220, MsgBus: 4853 2025-05-29T15:31:17.525280Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509890666427293819:2073];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:31:17.525297Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/001e3e/r3tmp/tmpijN3gp/pdisk_1.dat 2025-05-29T15:31:17.576556Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 7220, node 1 2025-05-29T15:31:17.595684Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:31:17.595699Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-05-29T15:31:17.595701Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-05-29T15:31:17.595748Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:4853 2025-05-29T15:31:17.626532Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:31:17.626568Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:31:17.627604Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:4853 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-29T15:31:17.657599Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:31:17.665369Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:31:17.726567Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:31:17.744766Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:31:17.755129Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:31:17.828755Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890666427295410:2403], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:31:17.828792Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:31:17.875574Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-05-29T15:31:17.882228Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-05-29T15:31:17.893201Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-05-29T15:31:17.899916Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-05-29T15:31:17.906968Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-05-29T15:31:17.921419Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-05-29T15:31:17.936043Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-05-29T15:31:17.951561Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890666427296064:2466], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:31:17.951595Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:31:17.951686Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890666427296069:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:31:17.952440Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715669:3, at schemeshard: 72057594046644480 2025-05-29T15:31:17.955643Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7509890666427296071:2470], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715669 completed, doublechecking } 2025-05-29T15:31:18.008775Z node 1 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [1:7509890670722263418:3397] txid# 281474976715670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-29T15:31:18.097313Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [1:7509890670722263434:2474], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:31:18.098839Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=1&id=MTRiOGY0NDQtNjI3NmIyN2MtMzNiNTE0MWUtMjFjODYxNA==, ActorId: [1:7509890666427295392:2401], ActorState: ExecuteState, TraceId: 01jweapk9z1m48kwyezdd5v9jf, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: VERIFY failed (2025-05-29T15:31:18.106845Z): assertion failed in non-unittest thread with message: assertion failed at ydb/core/kqp/ut/common/kqp_ut_common.h:375, void NKikimr::NKqp::AssertSuccessResult(const NYdb::TStatus &): (result.IsSuccess())
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 library/cpp/testing/unittest/registar.cpp:36 RaiseError(): requirement UnittestThread failed 0. /-S/util/system/yassert.cpp:83: InternalPanicImpl @ 0x15F23B95 1. /-S/util/system/yassert.cpp:55: Panic @ 0x15F1AB96 2. /tmp//-S/library/cpp/testing/unittest/registar.cpp:36: RaiseError @ 0x160BC066 3. /-S/ydb/core/kqp/ut/common/kqp_ut_common.h:375: AssertSuccessResult @ 0x15D88C82 4. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:365: CreateSampleTables @ 0x2855B9A2 5. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:581: operator() @ 0x2857CF4C 6. /-S/library/cpp/threading/future/core/future-inl.h:493: SetValue @ 0x2857CF4C 7. /-S/library/cpp/threading/future/async.h:24: operator() @ 0x2857CF4C 8. /-S/util/thread/pool.h:71: Process @ 0x2857CF4C 9. /-S/util/thread/pool.cpp:418: DoExecute @ 0x15F2B519 10. /-S/util/thread/factory.h:15: Execute @ 0x15F29F09 11. /-S/util/thread/factory.cpp:36: ThreadProc @ 0x15F29F09 12. /-S/util/system/thread.cpp:244: ThreadProxy @ 0x15F2537C 13. ??:0: ?? @ 0x7F6C9047FAC2 14. ??:0: ?? @ 0x7F6C9051184F ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/effects/unittest >> KqpImmediateEffects::ConflictingKeyR1RWR2 Test command err: Trying to start YDB, gRPC: 9279, MsgBus: 21230 2025-05-29T15:31:13.464805Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509890647766777251:2073];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:31:13.464827Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/001e51/r3tmp/tmp6vcrzf/pdisk_1.dat 2025-05-29T15:31:13.525569Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 9279, node 1 2025-05-29T15:31:13.539478Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:31:13.539493Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-05-29T15:31:13.539495Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-05-29T15:31:13.539539Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:21230 2025-05-29T15:31:13.566116Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:31:13.566142Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:31:13.567210Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:21230 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-29T15:31:13.601760Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:31:13.606638Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:31:13.667252Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:31:13.683795Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:31:13.693863Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:31:13.776590Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890647766778839:2403], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:31:13.776625Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:31:13.820492Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-05-29T15:31:13.827522Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-05-29T15:31:13.840164Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-05-29T15:31:13.847074Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-05-29T15:31:13.853612Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-05-29T15:31:13.860898Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-05-29T15:31:13.868114Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-05-29T15:31:13.884307Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890647766779492:2466], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:31:13.884333Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:31:13.884352Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890647766779497:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:31:13.884964Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715669:3, at schemeshard: 72057594046644480 2025-05-29T15:31:13.888273Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7509890647766779499:2470], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715669 completed, doublechecking } 2025-05-29T15:31:13.945756Z node 1 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [1:7509890647766779550:3394] txid# 281474976715670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-29T15:31:14.022184Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [1:7509890647766779566:2474], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:31:14.022279Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=1&id=Y2MwZDhlOC05YWE0ZTgtNDJmZWQwNzctMmE3ZWFhMmE=, ActorId: [1:7509890647766778820:2400], ActorState: ExecuteState, TraceId: 01jweapfav54f8yh5949yqf8pr, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: VERIFY failed (2025-05-29T15:31:14.022924Z): assertion failed in non-unittest thread with message: assertion failed at ydb/core/kqp/ut/common/kqp_ut_common.h:375, void NKikimr::NKqp::AssertSuccessResult(const NYdb::TStatus &): (result.IsSuccess())
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 library/cpp/testing/unittest/registar.cpp:36 RaiseError(): requirement UnittestThread failed 0. /-S/util/system/yassert.cpp:83: InternalPanicImpl @ 0x15F23B95 1. /-S/util/system/yassert.cpp:55: Panic @ 0x15F1AB96 2. /tmp//-S/library/cpp/testing/unittest/registar.cpp:36: RaiseError @ 0x160BC066 3. /-S/ydb/core/kqp/ut/common/kqp_ut_common.h:375: AssertSuccessResult @ 0x15D88C82 4. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:365: CreateSampleTables @ 0x2855B9A2 5. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:581: operator() @ 0x2857CF4C 6. /-S/library/cpp/threading/future/core/future-inl.h:493: SetValue @ 0x2857CF4C 7. /-S/library/cpp/threading/future/async.h:24: operator() @ 0x2857CF4C 8. /-S/util/thread/pool.h:71: Process @ 0x2857CF4C 9. /-S/util/thread/pool.cpp:418: DoExecute @ 0x15F2B519 10. /-S/util/thread/factory.h:15: Execute @ 0x15F29F09 11. /-S/util/thread/factory.cpp:36: ThreadProc @ 0x15F29F09 12. /-S/util/system/thread.cpp:244: ThreadProxy @ 0x15F2537C 13. ??:0: ?? @ 0x7EFF9626BAC2 14. ??:0: ?? @ 0x7EFF962FD84F Trying to start YDB, gRPC: 21045, MsgBus: 61067 2025-05-29T15:31:17.521190Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509890667966297672:2070];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:31:17.521455Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/001e51/r3tmp/tmp4kfDqQ/pdisk_1.dat 2025-05-29T15:31:17.570620Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 21045, node 1 2025-05-29T15:31:17.585571Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:31:17.585582Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-05-29T15:31:17.585584Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-05-29T15:31:17.585620Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:61067 TClient is connected to server localhost:61067 WaitRootIsUp 'Root'... TClient::Ls request: Root 2025-05-29T15:31:17.621959Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:31:17.621984Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:31:17.623165Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-29T15:31:17.648911Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:31:17.653151Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:31:17.717976Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:31:17.735783Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:31:17.746502Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:31:17.819551Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890667966299269:2403], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:31:17.819591Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:31:17.852332Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-05-29T15:31:17.858426Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-05-29T15:31:17.864629Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-05-29T15:31:17.871428Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-05-29T15:31:17.878661Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-05-29T15:31:17.886243Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-05-29T15:31:17.900482Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-05-29T15:31:17.916261Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890667966299923:2466], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:31:17.916287Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890667966299928:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:31:17.916291Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:31:17.916884Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715669:3, at schemeshard: 72057594046644480 2025-05-29T15:31:17.920210Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7509890667966299930:2470], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715669 completed, doublechecking } 2025-05-29T15:31:18.012996Z node 1 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [1:7509890672261267277:3395] txid# 281474976715670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-29T15:31:18.126506Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [1:7509890672261267293:2474], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:31:18.126612Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=1&id=YTk3ZWM4NDgtZmI0OTY1ZDYtZmIwZmU2MDktNGQwOTFiZjQ=, ActorId: [1:7509890667966299251:2401], ActorState: ExecuteState, TraceId: 01jweapk8va7rz5q0zqs2rrb9x, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: VERIFY failed (2025-05-29T15:31:18.130933Z): assertion failed in non-unittest thread with message: assertion failed at ydb/core/kqp/ut/common/kqp_ut_common.h:375, void NKikimr::NKqp::AssertSuccessResult(const NYdb::TStatus &): (result.IsSuccess())
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 library/cpp/testing/unittest/registar.cpp:36 RaiseError(): requirement UnittestThread failed 0. /-S/util/system/yassert.cpp:83: InternalPanicImpl @ 0x15F23B95 1. /-S/util/system/yassert.cpp:55: Panic @ 0x15F1AB96 2. /tmp//-S/library/cpp/testing/unittest/registar.cpp:36: RaiseError @ 0x160BC066 3. /-S/ydb/core/kqp/ut/common/kqp_ut_common.h:375: AssertSuccessResult @ 0x15D88C82 4. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:365: CreateSampleTables @ 0x2855B9A2 5. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:581: operator() @ 0x2857CF4C 6. /-S/library/cpp/threading/future/core/future-inl.h:493: SetValue @ 0x2857CF4C 7. /-S/library/cpp/threading/future/async.h:24: operator() @ 0x2857CF4C 8. /-S/util/thread/pool.h:71: Process @ 0x2857CF4C 9. /-S/util/thread/pool.cpp:418: DoExecute @ 0x15F2B519 10. /-S/util/thread/factory.h:15: Execute @ 0x15F29F09 11. /-S/util/thread/factory.cpp:36: ThreadProc @ 0x15F29F09 12. /-S/util/system/thread.cpp:244: ThreadProxy @ 0x15F2537C 13. ??:0: ?? @ 0x7F0013DE7AC2 14. ??:0: ?? @ 0x7F0013E7984F ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/effects/unittest >> KqpImmediateEffects::Interactive Test command err: Trying to start YDB, gRPC: 64122, MsgBus: 30753 2025-05-29T15:31:13.572403Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509890648512727291:2071];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:31:13.572427Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/001e0e/r3tmp/tmpLjSkpQ/pdisk_1.dat 2025-05-29T15:31:13.627744Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 64122, node 1 2025-05-29T15:31:13.642333Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:31:13.642347Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-05-29T15:31:13.642349Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-05-29T15:31:13.642387Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:30753 2025-05-29T15:31:13.673346Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:31:13.673375Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:31:13.674437Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:30753 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-29T15:31:13.704772Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:31:13.710696Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:31:13.772754Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:31:13.792454Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:31:13.803204Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:31:13.896431Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890648512728907:2403], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:31:13.896454Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:31:13.936506Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-05-29T15:31:13.943392Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-05-29T15:31:13.952354Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-05-29T15:31:13.966422Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-05-29T15:31:13.980632Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-05-29T15:31:13.994528Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-05-29T15:31:14.008964Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-05-29T15:31:14.023946Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890652807696855:2466], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:31:14.023977Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:31:14.024010Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890652807696860:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:31:14.024624Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715669:3, at schemeshard: 72057594046644480 2025-05-29T15:31:14.028362Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7509890652807696862:2470], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715669 completed, doublechecking } 2025-05-29T15:31:14.119273Z node 1 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [1:7509890652807696913:3398] txid# 281474976715670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-29T15:31:14.198784Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [1:7509890652807696929:2474], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:31:14.198893Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=1&id=YjZjYjY4ODktN2EzMDYzYzQtZGE5YzhiNmUtZWRiNDM0YTE=, ActorId: [1:7509890648512728889:2401], ActorState: ExecuteState, TraceId: 01jweapff738ma8a0bkp9dyrmw, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: VERIFY failed (2025-05-29T15:31:14.199625Z): assertion failed in non-unittest thread with message: assertion failed at ydb/core/kqp/ut/common/kqp_ut_common.h:375, void NKikimr::NKqp::AssertSuccessResult(const NYdb::TStatus &): (result.IsSuccess())
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 library/cpp/testing/unittest/registar.cpp:36 RaiseError(): requirement UnittestThread failed 0. /-S/util/system/yassert.cpp:83: InternalPanicImpl @ 0x15F23B95 1. /-S/util/system/yassert.cpp:55: Panic @ 0x15F1AB96 2. /tmp//-S/library/cpp/testing/unittest/registar.cpp:36: RaiseError @ 0x160BC066 3. /-S/ydb/core/kqp/ut/common/kqp_ut_common.h:375: AssertSuccessResult @ 0x15D88C82 4. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:365: CreateSampleTables @ 0x2855B9A2 5. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:581: operator() @ 0x2857CF4C 6. /-S/library/cpp/threading/future/core/future-inl.h:493: SetValue @ 0x2857CF4C 7. /-S/library/cpp/threading/future/async.h:24: operator() @ 0x2857CF4C 8. /-S/util/thread/pool.h:71: Process @ 0x2857CF4C 9. /-S/util/thread/pool.cpp:418: DoExecute @ 0x15F2B519 10. /-S/util/thread/factory.h:15: Execute @ 0x15F29F09 11. /-S/util/thread/factory.cpp:36: ThreadProc @ 0x15F29F09 12. /-S/util/system/thread.cpp:244: ThreadProxy @ 0x15F2537C 13. ??:0: ?? @ 0x7FB235CE4AC2 14. ??:0: ?? @ 0x7FB235D7684F Trying to start YDB, gRPC: 3850, MsgBus: 7880 2025-05-29T15:31:17.703048Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509890667607921630:2073];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:31:17.703074Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/001e0e/r3tmp/tmpji4uXx/pdisk_1.dat 2025-05-29T15:31:17.758823Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 3850, node 1 2025-05-29T15:31:17.774685Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:31:17.774697Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-05-29T15:31:17.774699Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-05-29T15:31:17.774731Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:7880 2025-05-29T15:31:17.804219Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:31:17.804252Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:31:17.805372Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:7880 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-29T15:31:17.834596Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:31:17.843575Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:31:17.906723Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:31:17.927007Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:31:17.940067Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:31:18.079287Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890671902890537:2403], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:31:18.079315Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:31:18.136169Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-05-29T15:31:18.144795Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-05-29T15:31:18.152703Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-05-29T15:31:18.167229Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-05-29T15:31:18.181088Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-05-29T15:31:18.195311Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-05-29T15:31:18.209379Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-05-29T15:31:18.231638Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890671902891190:2466], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:31:18.231675Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:31:18.231698Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890671902891195:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:31:18.232810Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715669:3, at schemeshard: 72057594046644480 2025-05-29T15:31:18.235308Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7509890671902891197:2470], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715669 completed, doublechecking } 2025-05-29T15:31:18.310714Z node 1 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [1:7509890671902891248:3398] txid# 281474976715670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-29T15:31:18.405246Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [1:7509890671902891264:2474], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:31:18.406832Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=1&id=ODZmYjMxZGEtZjkwMzhhOTItYjUzMzc0YjUtYmRhMDVkMjY=, ActorId: [1:7509890671902890519:2401], ActorState: ExecuteState, TraceId: 01jweapkjp6j7nzampcpbzdwbr, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: VERIFY failed (2025-05-29T15:31:18.408864Z): assertion failed in non-unittest thread with message: assertion failed at ydb/core/kqp/ut/common/kqp_ut_common.h:375, void NKikimr::NKqp::AssertSuccessResult(const NYdb::TStatus &): (result.IsSuccess())
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 library/cpp/testing/unittest/registar.cpp:36 RaiseError(): requirement UnittestThread failed 0. /-S/util/system/yassert.cpp:83: InternalPanicImpl @ 0x15F23B95 1. /-S/util/system/yassert.cpp:55: Panic @ 0x15F1AB96 2. /tmp//-S/library/cpp/testing/unittest/registar.cpp:36: RaiseError @ 0x160BC066 3. /-S/ydb/core/kqp/ut/common/kqp_ut_common.h:375: AssertSuccessResult @ 0x15D88C82 4. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:365: CreateSampleTables @ 0x2855B9A2 5. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:581: operator() @ 0x2857CF4C 6. /-S/library/cpp/threading/future/core/future-inl.h:493: SetValue @ 0x2857CF4C 7. /-S/library/cpp/threading/future/async.h:24: operator() @ 0x2857CF4C 8. /-S/util/thread/pool.h:71: Process @ 0x2857CF4C 9. /-S/util/thread/pool.cpp:418: DoExecute @ 0x15F2B519 10. /-S/util/thread/factory.h:15: Execute @ 0x15F29F09 11. /-S/util/thread/factory.cpp:36: ThreadProc @ 0x15F29F09 12. /-S/util/system/thread.cpp:244: ThreadProxy @ 0x15F2537C 13. ??:0: ?? @ 0x7F36144BBAC2 14. ??:0: ?? @ 0x7F361454D84F >> KqpEffects::AlterAfterUpsertBeforeUpsertTransaction-UseSink >> KqpEffects::InsertAbort_Select_Duplicates+UseSink >> KqpImmediateEffects::DeleteAfterUpsert >> TPersQueueTest::FetchRequest >> KqpEffects::AlterAfterUpsertBeforeUpsertTransaction+UseSink >> KqpImmediateEffects::TxWithReadAtTheEnd-UseSink ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/effects/unittest >> KqpWrite::ProjectReplace-UseSink Test command err: Trying to start YDB, gRPC: 22920, MsgBus: 20453 2025-05-29T15:31:14.257077Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509890650967174747:2065];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:31:14.257106Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/001dfc/r3tmp/tmpV9cWeH/pdisk_1.dat 2025-05-29T15:31:14.308046Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [1:7509890650967174719:2079] 1748532674256959 != 1748532674256962 2025-05-29T15:31:14.309653Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 22920, node 1 2025-05-29T15:31:14.325898Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:31:14.325911Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-05-29T15:31:14.325913Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-05-29T15:31:14.325959Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:20453 2025-05-29T15:31:14.358974Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:31:14.359004Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:31:14.360138Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:20453 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-29T15:31:14.381724Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:31:14.386807Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:31:14.450672Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:31:14.469144Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:31:14.479622Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:31:14.613208Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890650967176350:2403], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:31:14.613237Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:31:14.657316Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-05-29T15:31:14.712724Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-05-29T15:31:14.767770Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-05-29T15:31:14.777672Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-05-29T15:31:14.784941Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-05-29T15:31:14.792085Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-05-29T15:31:14.806329Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-05-29T15:31:14.821346Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890650967177004:2466], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:31:14.821363Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:31:14.821382Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890650967177009:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:31:14.821923Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710669:3, at schemeshard: 72057594046644480 2025-05-29T15:31:14.826108Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7509890650967177011:2470], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710669 completed, doublechecking } 2025-05-29T15:31:14.901054Z node 1 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [1:7509890650967177062:3393] txid# 281474976710670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-29T15:31:15.005317Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [1:7509890650967177078:2474], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:31:15.005454Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=1&id=MjRjZTI1Y2EtMTkwZWE2YjItZWEyMDQwMDktNTY2OGIwODU=, ActorId: [1:7509890650967176332:2401], ActorState: ExecuteState, TraceId: 01jweapg85evytq6jb6ck85w0z, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: VERIFY failed (2025-05-29T15:31:15.006195Z): assertion failed in non-unittest thread with message: assertion failed at ydb/core/kqp/ut/common/kqp_ut_common.h:375, void NKikimr::NKqp::AssertSuccessResult(const NYdb::TStatus &): (result.IsSuccess())
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 library/cpp/testing/unittest/registar.cpp:36 RaiseError(): requirement UnittestThread failed 0. /-S/util/system/yassert.cpp:83: InternalPanicImpl @ 0x15F23B95 1. /-S/util/system/yassert.cpp:55: Panic @ 0x15F1AB96 2. /tmp//-S/library/cpp/testing/unittest/registar.cpp:36: RaiseError @ 0x160BC066 3. /-S/ydb/core/kqp/ut/common/kqp_ut_common.h:375: AssertSuccessResult @ 0x15D88C82 4. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:365: CreateSampleTables @ 0x2855B9A2 5. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:581: operator() @ 0x2857CF4C 6. /-S/library/cpp/threading/future/core/future-inl.h:493: SetValue @ 0x2857CF4C 7. /-S/library/cpp/threading/future/async.h:24: operator() @ 0x2857CF4C 8. /-S/util/thread/pool.h:71: Process @ 0x2857CF4C 9. /-S/util/thread/pool.cpp:418: DoExecute @ 0x15F2B519 10. /-S/util/thread/factory.h:15: Execute @ 0x15F29F09 11. /-S/util/thread/factory.cpp:36: ThreadProc @ 0x15F29F09 12. /-S/util/system/thread.cpp:244: ThreadProxy @ 0x15F2537C 13. ??:0: ?? @ 0x7F660ADD8AC2 14. ??:0: ?? @ 0x7F660AE6A84F Trying to start YDB, gRPC: 20704, MsgBus: 18884 2025-05-29T15:31:18.598600Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509890670331801342:2209];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:31:18.598650Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/001dfc/r3tmp/tmpgpQB9v/pdisk_1.dat 2025-05-29T15:31:18.696007Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 20704, node 1 2025-05-29T15:31:18.698608Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:31:18.698643Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:31:18.699989Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-29T15:31:18.710589Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:31:18.710612Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-05-29T15:31:18.710614Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-05-29T15:31:18.710702Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:18884 TClient is connected to server localhost:18884 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-29T15:31:18.765919Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:31:18.770336Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:31:18.833334Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:31:18.853710Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:31:18.864654Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:31:18.987847Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890670331802806:2403], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:31:18.987873Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:31:19.043617Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-05-29T15:31:19.051901Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-05-29T15:31:19.062393Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-05-29T15:31:19.078838Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-05-29T15:31:19.090618Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-05-29T15:31:19.104862Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-05-29T15:31:19.119745Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-05-29T15:31:19.135974Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890674626770756:2466], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:31:19.136006Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:31:19.137353Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890674626770761:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:31:19.138209Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715669:3, at schemeshard: 72057594046644480 2025-05-29T15:31:19.146133Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7509890674626770763:2470], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715669 completed, doublechecking } 2025-05-29T15:31:19.197625Z node 1 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [1:7509890674626770814:3396] txid# 281474976715670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-29T15:31:19.279681Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [1:7509890674626770830:2474], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:31:19.279789Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=1&id=M2E3N2Q5NDEtZjY3MjM3ZGYtNDU5MWVkMS1mNGU0YWE4OA==, ActorId: [1:7509890670331802788:2401], ActorState: ExecuteState, TraceId: 01jweapmez5fk9k23pe1nx8ad1, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: VERIFY failed (2025-05-29T15:31:19.280451Z): assertion failed in non-unittest thread with message: assertion failed at ydb/core/kqp/ut/common/kqp_ut_common.h:375, void NKikimr::NKqp::AssertSuccessResult(const NYdb::TStatus &): (result.IsSuccess())
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 library/cpp/testing/unittest/registar.cpp:36 RaiseError(): requirement UnittestThread failed 0. /-S/util/system/yassert.cpp:83: InternalPanicImpl @ 0x15F23B95 1. /-S/util/system/yassert.cpp:55: Panic @ 0x15F1AB96 2. /tmp//-S/library/cpp/testing/unittest/registar.cpp:36: RaiseError @ 0x160BC066 3. /-S/ydb/core/kqp/ut/common/kqp_ut_common.h:375: AssertSuccessResult @ 0x15D88C82 4. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:365: CreateSampleTables @ 0x2855B9A2 5. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:581: operator() @ 0x2857CF4C 6. /-S/library/cpp/threading/future/core/future-inl.h:493: SetValue @ 0x2857CF4C 7. /-S/library/cpp/threading/future/async.h:24: operator() @ 0x2857CF4C 8. /-S/util/thread/pool.h:71: Process @ 0x2857CF4C 9. /-S/util/thread/pool.cpp:418: DoExecute @ 0x15F2B519 10. /-S/util/thread/factory.h:15: Execute @ 0x15F29F09 11. /-S/util/thread/factory.cpp:36: ThreadProc @ 0x15F29F09 12. /-S/util/system/thread.cpp:244: ThreadProxy @ 0x15F2537C 13. ??:0: ?? @ 0x7FC516E35AC2 14. ??:0: ?? @ 0x7FC516EC784F >> KqpInplaceUpdate::SingleRowSimple+UseSink >> KqpImmediateEffects::ConflictingKeyRW1WR2 >> KqpInplaceUpdate::Negative_BatchUpdate+UseSink >> KqpImmediateEffects::UpsertDuplicates >> KqpEffects::InsertRevert_Literal_Success >> BsControllerConfig::AddDriveSerialMassive [GOOD] >> DataShardVolatile::DistributedWriteLostPlanThenDrop [FAIL] >> DataShardVolatile::DistributedWriteLostPlanThenSplit |74.7%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/tools/query_replay_yt/query_replay_yt |74.7%| [LD] {RESULT} $(B)/ydb/tools/query_replay_yt/query_replay_yt |74.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tools/query_replay_yt/query_replay_yt |74.7%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/tools/query_replay/ydb_query_replay |74.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tools/query_replay/ydb_query_replay |74.7%| [LD] {RESULT} $(B)/ydb/tools/query_replay/ydb_query_replay ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/mind/bscontroller/ut_bscontroller/unittest >> BsControllerConfig::AddDriveSerialMassive [GOOD] Test command err: Leader for TabletID 72057594037932033 is [0:0:0] sender: [1:215:2066] recipient: [1:193:2076] IGNORE Leader for TabletID 72057594037932033 is [0:0:0] sender: [1:215:2066] recipient: [1:193:2076] Leader for TabletID 72057594037932033 is [1:223:2078] sender: [1:224:2066] recipient: [1:193:2076] 2025-05-29T15:31:15.452388Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2057} StateInit event Type# 268828672 Event# NKikimr::TEvTablet::TEvBoot 2025-05-29T15:31:15.453069Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2057} StateInit event Type# 268828673 Event# NKikimr::TEvTablet::TEvRestored 2025-05-29T15:31:15.453135Z node 1 :BS_CONTROLLER DEBUG: {BSC22@console_interaction.cpp:14} Console interaction started 2025-05-29T15:31:15.453221Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2057} StateInit event Type# 268828684 Event# NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-05-29T15:31:15.453447Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2057} StateInit event Type# 268639244 Event# NKikimr::TEvNodeWardenStorageConfig 2025-05-29T15:31:15.453485Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2057} StateInit event Type# 131082 Event# NActors::TEvInterconnect::TEvNodesInfo 2025-05-29T15:31:15.453489Z node 1 :BS_CONTROLLER DEBUG: {BSC01@bsc.cpp:521} Handle TEvInterconnect::TEvNodesInfo 2025-05-29T15:31:15.453523Z node 1 :BS_CONTROLLER DEBUG: {BSCTXIS01@init_scheme.cpp:17} TTxInitScheme Execute 2025-05-29T15:31:15.454252Z node 1 :BS_CONTROLLER DEBUG: {BSCTXIS03@init_scheme.cpp:44} TTxInitScheme Complete 2025-05-29T15:31:15.454277Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM01@migrate.cpp:190} Execute tx 2025-05-29T15:31:15.454308Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM02@migrate.cpp:251} Complete tx IncompatibleData# false 2025-05-29T15:31:15.454340Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxTrimUnusedSlots 2025-05-29T15:31:15.454356Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxTrimUnusedSlots 2025-05-29T15:31:15.454366Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateSchemaVersion Leader for TabletID 72057594037932033 is [1:223:2078] sender: [1:247:2066] recipient: [1:20:2067] 2025-05-29T15:31:15.464719Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateSchemaVersion 2025-05-29T15:31:15.464764Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxGenerateInstanceId 2025-05-29T15:31:15.475063Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxGenerateInstanceId 2025-05-29T15:31:15.475116Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateStaticPDiskInfo 2025-05-29T15:31:15.475133Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateStaticPDiskInfo 2025-05-29T15:31:15.475144Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxFillInNonNullConfigForPDisk 2025-05-29T15:31:15.475185Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxFillInNonNullConfigForPDisk 2025-05-29T15:31:15.475193Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxDropDriveStatus 2025-05-29T15:31:15.475200Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxDropDriveStatus 2025-05-29T15:31:15.475212Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateCompatibilityInfo 2025-05-29T15:31:15.485502Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateCompatibilityInfo 2025-05-29T15:31:15.485574Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateEnableConfigV2 2025-05-29T15:31:15.495875Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateEnableConfigV2 2025-05-29T15:31:15.495956Z node 1 :BS_CONTROLLER DEBUG: {BSCTXLE01@load_everything.cpp:19} TTxLoadEverything Execute 2025-05-29T15:31:15.496185Z node 1 :BS_CONTROLLER DEBUG: {BSCTXLE03@load_everything.cpp:557} TTxLoadEverything Complete 2025-05-29T15:31:15.496192Z node 1 :BS_CONTROLLER DEBUG: {BSC09@impl.h:2188} LoadFinished 2025-05-29T15:31:15.496236Z node 1 :BS_CONTROLLER DEBUG: {BSC18@console_interaction.cpp:31} Console connection service started 2025-05-29T15:31:15.496245Z node 1 :BS_CONTROLLER DEBUG: {BSCTXLE04@load_everything.cpp:562} TTxLoadEverything InitQueue processed 2025-05-29T15:31:15.498366Z node 1 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:398} Execute TEvControllerConfigRequest Request# {Command { AddDriveSerial { Serial: "SN_123" BoxId: 1 } } } 2025-05-29T15:31:15.508569Z node 1 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:398} Execute TEvControllerConfigRequest Request# {Command { AddDriveSerial { Serial: "SN_123" BoxId: 1 } } } 2025-05-29T15:31:15.508714Z node 1 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:398} Execute TEvControllerConfigRequest Request# {Command { AddDriveSerial { Serial: "SN_123" BoxId: 1 } } } Leader for TabletID 72057594037932033 is [0:0:0] sender: [11:215:2066] recipient: [11:186:2076] IGNORE Leader for TabletID 72057594037932033 is [0:0:0] sender: [11:215:2066] recipient: [11:186:2076] Leader for TabletID 72057594037932033 is [11:224:2078] sender: [11:226:2066] recipient: [11:186:2076] 2025-05-29T15:31:17.452374Z node 11 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2057} StateInit event Type# 268828672 Event# NKikimr::TEvTablet::TEvBoot 2025-05-29T15:31:17.452537Z node 11 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2057} StateInit event Type# 268828673 Event# NKikimr::TEvTablet::TEvRestored 2025-05-29T15:31:17.452575Z node 11 :BS_CONTROLLER DEBUG: {BSC22@console_interaction.cpp:14} Console interaction started 2025-05-29T15:31:17.452699Z node 11 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2057} StateInit event Type# 268828684 Event# NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-05-29T15:31:17.452800Z node 11 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2057} StateInit event Type# 268639244 Event# NKikimr::TEvNodeWardenStorageConfig 2025-05-29T15:31:17.452832Z node 11 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2057} StateInit event Type# 131082 Event# NActors::TEvInterconnect::TEvNodesInfo 2025-05-29T15:31:17.452836Z node 11 :BS_CONTROLLER DEBUG: {BSC01@bsc.cpp:521} Handle TEvInterconnect::TEvNodesInfo 2025-05-29T15:31:17.452866Z node 11 :BS_CONTROLLER DEBUG: {BSCTXIS01@init_scheme.cpp:17} TTxInitScheme Execute 2025-05-29T15:31:17.453543Z node 11 :BS_CONTROLLER DEBUG: {BSCTXIS03@init_scheme.cpp:44} TTxInitScheme Complete 2025-05-29T15:31:17.453562Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM01@migrate.cpp:190} Execute tx 2025-05-29T15:31:17.453580Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM02@migrate.cpp:251} Complete tx IncompatibleData# false 2025-05-29T15:31:17.453592Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxTrimUnusedSlots 2025-05-29T15:31:17.453600Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxTrimUnusedSlots 2025-05-29T15:31:17.453607Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateSchemaVersion Leader for TabletID 72057594037932033 is [11:224:2078] sender: [11:247:2066] recipient: [11:20:2067] 2025-05-29T15:31:17.463982Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateSchemaVersion 2025-05-29T15:31:17.464038Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxGenerateInstanceId 2025-05-29T15:31:17.474395Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxGenerateInstanceId 2025-05-29T15:31:17.474447Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateStaticPDiskInfo 2025-05-29T15:31:17.474463Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateStaticPDiskInfo 2025-05-29T15:31:17.474476Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxFillInNonNullConfigForPDisk 2025-05-29T15:31:17.474506Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxFillInNonNullConfigForPDisk 2025-05-29T15:31:17.474516Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxDropDriveStatus 2025-05-29T15:31:17.474524Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxDropDriveStatus 2025-05-29T15:31:17.474535Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateCompatibilityInfo 2025-05-29T15:31:17.484885Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateCompatibilityInfo 2025-05-29T15:31:17.484942Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateEnableConfigV2 2025-05-29T15:31:17.495285Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateEnableConfigV2 2025-05-29T15:31:17.495330Z node 11 :BS_CONTROLLER DEBUG: {BSCTXLE01@load_everything.cpp:19} TTxLoadEverything Execute 2025-05-29T15:31:17.495471Z node 11 :BS_CONTROLLER DEBUG: {BSCTXLE03@load_everything.cpp:557} TTxLoadEverything Complete 2025-05-29T15:31:17.495477Z node 11 :BS_CONTROLLER DEBUG: {BSC09@impl.h:2188} LoadFinished 2025-05-29T15:31:17.495508Z node 11 :BS_CONTROLLER DEBUG: {BSC18@console_interaction.cpp:31} Console connection service started 2025-05-29T15:31:17.495513Z node 11 :BS_CONTROLLER DEBUG: {BSCTXLE04@load_everything.cpp:562} TTxLoadEverything InitQueue processed 2025-05-29T15:31:17.495696Z node 11 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:398} Execute TEvControllerConfigRequest Request# {Command { AddDriveSerial { Serial: "SN_123" BoxId: 1 } } } 2025-05-29T15:31:17.495900Z node 11 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:398} Execute TEvControllerConfigRequest Request# ... ommand { AddDriveSerial { Serial: "SN_5" BoxId: 1 } } } 2025-05-29T15:31:19.514948Z node 21 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:398} Execute TEvControllerConfigRequest Request# {Command { AddDriveSerial { Serial: "SN_6" BoxId: 1 } } } 2025-05-29T15:31:19.515006Z node 21 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:398} Execute TEvControllerConfigRequest Request# {Command { AddDriveSerial { Serial: "SN_7" BoxId: 1 } } } 2025-05-29T15:31:19.515076Z node 21 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:398} Execute TEvControllerConfigRequest Request# {Command { AddDriveSerial { Serial: "SN_8" BoxId: 1 } } } 2025-05-29T15:31:19.515164Z node 21 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:398} Execute TEvControllerConfigRequest Request# {Command { AddDriveSerial { Serial: "SN_9" BoxId: 1 } } } 2025-05-29T15:31:19.515255Z node 21 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:398} Execute TEvControllerConfigRequest Request# {Command { RemoveDriveSerial { Serial: "SN_0" } } } 2025-05-29T15:31:19.515322Z node 21 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:398} Execute TEvControllerConfigRequest Request# {Command { RemoveDriveSerial { Serial: "SN_1" } } } 2025-05-29T15:31:19.515413Z node 21 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:398} Execute TEvControllerConfigRequest Request# {Command { RemoveDriveSerial { Serial: "SN_2" } } } 2025-05-29T15:31:19.515494Z node 21 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:398} Execute TEvControllerConfigRequest Request# {Command { RemoveDriveSerial { Serial: "SN_3" } } } 2025-05-29T15:31:19.515602Z node 21 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:398} Execute TEvControllerConfigRequest Request# {Command { RemoveDriveSerial { Serial: "SN_4" } } } 2025-05-29T15:31:19.515703Z node 21 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:398} Execute TEvControllerConfigRequest Request# {Command { RemoveDriveSerial { Serial: "SN_5" } } } 2025-05-29T15:31:19.515795Z node 21 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:398} Execute TEvControllerConfigRequest Request# {Command { RemoveDriveSerial { Serial: "SN_6" } } } 2025-05-29T15:31:19.515882Z node 21 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:398} Execute TEvControllerConfigRequest Request# {Command { RemoveDriveSerial { Serial: "SN_7" } } } 2025-05-29T15:31:19.515940Z node 21 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:398} Execute TEvControllerConfigRequest Request# {Command { RemoveDriveSerial { Serial: "SN_8" } } } 2025-05-29T15:31:19.515998Z node 21 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:398} Execute TEvControllerConfigRequest Request# {Command { RemoveDriveSerial { Serial: "SN_9" } } } Leader for TabletID 72057594037932033 is [0:0:0] sender: [31:220:2066] recipient: [31:197:2076] IGNORE Leader for TabletID 72057594037932033 is [0:0:0] sender: [31:220:2066] recipient: [31:197:2076] Leader for TabletID 72057594037932033 is [31:226:2078] sender: [31:227:2066] recipient: [31:197:2076] 2025-05-29T15:31:21.497882Z node 31 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2057} StateInit event Type# 268828672 Event# NKikimr::TEvTablet::TEvBoot 2025-05-29T15:31:21.498098Z node 31 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2057} StateInit event Type# 268828673 Event# NKikimr::TEvTablet::TEvRestored 2025-05-29T15:31:21.498156Z node 31 :BS_CONTROLLER DEBUG: {BSC22@console_interaction.cpp:14} Console interaction started 2025-05-29T15:31:21.498347Z node 31 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2057} StateInit event Type# 268828684 Event# NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-05-29T15:31:21.498437Z node 31 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2057} StateInit event Type# 268639244 Event# NKikimr::TEvNodeWardenStorageConfig 2025-05-29T15:31:21.498476Z node 31 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2057} StateInit event Type# 131082 Event# NActors::TEvInterconnect::TEvNodesInfo 2025-05-29T15:31:21.498482Z node 31 :BS_CONTROLLER DEBUG: {BSC01@bsc.cpp:521} Handle TEvInterconnect::TEvNodesInfo 2025-05-29T15:31:21.498536Z node 31 :BS_CONTROLLER DEBUG: {BSCTXIS01@init_scheme.cpp:17} TTxInitScheme Execute 2025-05-29T15:31:21.499515Z node 31 :BS_CONTROLLER DEBUG: {BSCTXIS03@init_scheme.cpp:44} TTxInitScheme Complete 2025-05-29T15:31:21.499541Z node 31 :BS_CONTROLLER DEBUG: {BSCTXM01@migrate.cpp:190} Execute tx 2025-05-29T15:31:21.499570Z node 31 :BS_CONTROLLER DEBUG: {BSCTXM02@migrate.cpp:251} Complete tx IncompatibleData# false 2025-05-29T15:31:21.499591Z node 31 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxTrimUnusedSlots 2025-05-29T15:31:21.499606Z node 31 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxTrimUnusedSlots 2025-05-29T15:31:21.499616Z node 31 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateSchemaVersion Leader for TabletID 72057594037932033 is [31:226:2078] sender: [31:247:2066] recipient: [31:20:2067] 2025-05-29T15:31:21.509979Z node 31 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateSchemaVersion 2025-05-29T15:31:21.510050Z node 31 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxGenerateInstanceId 2025-05-29T15:31:21.520361Z node 31 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxGenerateInstanceId 2025-05-29T15:31:21.520411Z node 31 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateStaticPDiskInfo 2025-05-29T15:31:21.520424Z node 31 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateStaticPDiskInfo 2025-05-29T15:31:21.520433Z node 31 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxFillInNonNullConfigForPDisk 2025-05-29T15:31:21.520456Z node 31 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxFillInNonNullConfigForPDisk 2025-05-29T15:31:21.520462Z node 31 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxDropDriveStatus 2025-05-29T15:31:21.520466Z node 31 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxDropDriveStatus 2025-05-29T15:31:21.520471Z node 31 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateCompatibilityInfo 2025-05-29T15:31:21.530806Z node 31 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateCompatibilityInfo 2025-05-29T15:31:21.530865Z node 31 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateEnableConfigV2 2025-05-29T15:31:21.541208Z node 31 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateEnableConfigV2 2025-05-29T15:31:21.541277Z node 31 :BS_CONTROLLER DEBUG: {BSCTXLE01@load_everything.cpp:19} TTxLoadEverything Execute 2025-05-29T15:31:21.541480Z node 31 :BS_CONTROLLER DEBUG: {BSCTXLE03@load_everything.cpp:557} TTxLoadEverything Complete 2025-05-29T15:31:21.541486Z node 31 :BS_CONTROLLER DEBUG: {BSC09@impl.h:2188} LoadFinished 2025-05-29T15:31:21.541527Z node 31 :BS_CONTROLLER DEBUG: {BSC18@console_interaction.cpp:31} Console connection service started 2025-05-29T15:31:21.541535Z node 31 :BS_CONTROLLER DEBUG: {BSCTXLE04@load_everything.cpp:562} TTxLoadEverything InitQueue processed 2025-05-29T15:31:21.541716Z node 31 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:398} Execute TEvControllerConfigRequest Request# {Command { AddDriveSerial { Serial: "SN_0" BoxId: 1 } } } 2025-05-29T15:31:21.541938Z node 31 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:398} Execute TEvControllerConfigRequest Request# {Command { AddDriveSerial { Serial: "SN_1" BoxId: 1 } } } 2025-05-29T15:31:21.542037Z node 31 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:398} Execute TEvControllerConfigRequest Request# {Command { AddDriveSerial { Serial: "SN_2" BoxId: 1 } } } 2025-05-29T15:31:21.542108Z node 31 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:398} Execute TEvControllerConfigRequest Request# {Command { AddDriveSerial { Serial: "SN_3" BoxId: 1 } } } 2025-05-29T15:31:21.542177Z node 31 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:398} Execute TEvControllerConfigRequest Request# {Command { AddDriveSerial { Serial: "SN_4" BoxId: 1 } } } 2025-05-29T15:31:21.542258Z node 31 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:398} Execute TEvControllerConfigRequest Request# {Command { AddDriveSerial { Serial: "SN_5" BoxId: 1 } } } 2025-05-29T15:31:21.542353Z node 31 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:398} Execute TEvControllerConfigRequest Request# {Command { AddDriveSerial { Serial: "SN_6" BoxId: 1 } } } 2025-05-29T15:31:21.542436Z node 31 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:398} Execute TEvControllerConfigRequest Request# {Command { AddDriveSerial { Serial: "SN_7" BoxId: 1 } } } 2025-05-29T15:31:21.542506Z node 31 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:398} Execute TEvControllerConfigRequest Request# {Command { AddDriveSerial { Serial: "SN_8" BoxId: 1 } } } 2025-05-29T15:31:21.542612Z node 31 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:398} Execute TEvControllerConfigRequest Request# {Command { AddDriveSerial { Serial: "SN_9" BoxId: 1 } } } 2025-05-29T15:31:21.542685Z node 31 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:398} Execute TEvControllerConfigRequest Request# {Command { RemoveDriveSerial { Serial: "SN_0" } } } 2025-05-29T15:31:21.542778Z node 31 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:398} Execute TEvControllerConfigRequest Request# {Command { RemoveDriveSerial { Serial: "SN_1" } } } 2025-05-29T15:31:21.542855Z node 31 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:398} Execute TEvControllerConfigRequest Request# {Command { RemoveDriveSerial { Serial: "SN_2" } } } 2025-05-29T15:31:21.542928Z node 31 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:398} Execute TEvControllerConfigRequest Request# {Command { RemoveDriveSerial { Serial: "SN_3" } } } 2025-05-29T15:31:21.543004Z node 31 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:398} Execute TEvControllerConfigRequest Request# {Command { RemoveDriveSerial { Serial: "SN_4" } } } 2025-05-29T15:31:21.543080Z node 31 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:398} Execute TEvControllerConfigRequest Request# {Command { RemoveDriveSerial { Serial: "SN_5" } } } 2025-05-29T15:31:21.543155Z node 31 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:398} Execute TEvControllerConfigRequest Request# {Command { RemoveDriveSerial { Serial: "SN_6" } } } 2025-05-29T15:31:21.543234Z node 31 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:398} Execute TEvControllerConfigRequest Request# {Command { RemoveDriveSerial { Serial: "SN_7" } } } 2025-05-29T15:31:21.543314Z node 31 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:398} Execute TEvControllerConfigRequest Request# {Command { RemoveDriveSerial { Serial: "SN_8" } } } 2025-05-29T15:31:21.543392Z node 31 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:398} Execute TEvControllerConfigRequest Request# {Command { RemoveDriveSerial { Serial: "SN_9" } } } >> KqpImmediateEffects::UpsertAfterInsert >> TExternalTableTestReboots::CreateDroppedExternalTableWithReboots [GOOD] >> TPersQueueTest::FetchRequest [FAIL] >> TPersQueueTest::Init >> TExternalTableTestReboots::SimpleDropExternalTableWithReboots2 [GOOD] >> TAsyncIndexTests::SplitBothWithReboots[PipeResets] [GOOD] >> TSyncBrokerTests::ShouldEnqueue [GOOD] >> TSyncBrokerTests::ShouldEnqueueWithSameVDiskId |74.7%| [TA] $(B)/ydb/core/mind/bscontroller/ut_bscontroller/test-results/unittest/{meta.json ... results_accumulator.log} |74.7%| [TA] {RESULT} $(B)/ydb/core/mind/bscontroller/ut_bscontroller/test-results/unittest/{meta.json ... results_accumulator.log} >> TSyncBrokerTests::ShouldEnqueueWithSameVDiskId [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_external_table_reboots/unittest >> TExternalTableTestReboots::CreateDroppedExternalTableWithReboots [GOOD] Test command err: ==== RunWithTabletReboots =========== RUN: Trace =========== Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2140] Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:116:2058] recipient: [1:110:2141] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:116:2058] recipient: [1:110:2141] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:117:2058] recipient: [1:111:2142] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:117:2058] recipient: [1:111:2142] Leader for TabletID 72057594046678944 is [1:126:2151] sender: [1:127:2058] recipient: [1:109:2140] Leader for TabletID 72057594046447617 is [1:130:2154] sender: [1:132:2058] recipient: [1:110:2141] Leader for TabletID 72057594046316545 is [1:133:2155] sender: [1:135:2058] recipient: [1:111:2142] 2025-05-29T15:31:11.617224Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7488: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-05-29T15:31:11.617241Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7516: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:31:11.617244Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7402: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2025-05-29T15:31:11.617248Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7418: OperationsProcessing config: using default configuration 2025-05-29T15:31:11.617252Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-05-29T15:31:11.617255Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-05-29T15:31:11.617261Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7548: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:31:11.617270Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:39: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-05-29T15:31:11.617348Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7619: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-05-29T15:31:11.617402Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-05-29T15:31:11.626783Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7651: Got new config: QueryServiceConfig { AllExternalDataSourcesAreAvailable: true } 2025-05-29T15:31:11.626802Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:31:11.626881Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7619: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# Leader for TabletID 72057594046447617 is [1:130:2154] sender: [1:175:2058] recipient: [1:15:2062] 2025-05-29T15:31:11.629284Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-05-29T15:31:11.629306Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-05-29T15:31:11.629328Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-05-29T15:31:11.631251Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-05-29T15:31:11.631313Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:445: Clear TempDirsState with owners number: 0 2025-05-29T15:31:11.631390Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1348: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-05-29T15:31:11.631520Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:32: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-05-29T15:31:11.632127Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:157: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:31:11.632164Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:84: [RootDataErasureManager] Stop 2025-05-29T15:31:11.632368Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:31:11.632376Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:31:11.632399Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-05-29T15:31:11.632404Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-29T15:31:11.632409Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:202: TTxServerlessStorageBilling.Complete 2025-05-29T15:31:11.632424Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6671: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:214:2058] recipient: [1:212:2212] IGNORE Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:214:2058] recipient: [1:212:2212] Leader for TabletID 72057594037968897 is [1:218:2216] sender: [1:219:2058] recipient: [1:212:2212] 2025-05-29T15:31:11.633460Z node 1 :HIVE INFO: tablet_helpers.cpp:1130: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:126:2151] sender: [1:239:2058] recipient: [1:15:2062] 2025-05-29T15:31:11.646894Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:372: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-05-29T15:31:11.646960Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:31:11.647007Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-05-29T15:31:11.647039Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:130: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-05-29T15:31:11.647046Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:31:11.647596Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:451: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-05-29T15:31:11.647614Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-05-29T15:31:11.647656Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:31:11.647668Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:313: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-05-29T15:31:11.647672Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:367: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-05-29T15:31:11.647676Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 2 -> 3 2025-05-29T15:31:11.648046Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:31:11.648060Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-05-29T15:31:11.648066Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 3 -> 128 2025-05-29T15:31:11.648411Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:31:11.648420Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:31:11.648424Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:31:11.648429Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1650: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-05-29T15:31:11.648836Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1719: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-05-29T15:31:11.649167Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:652: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-05-29T15:31:11.649196Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1751: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 Leader for TabletID 72057594046316545 is [1:133:2155] sender: [1:254:2058] recipient: [1:15:2062] FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-05-29T15:31:11.649340Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:676: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-05-29T15:31:11.649358Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:680: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 133 RawX2: 4294969451 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-05-29T15:31:11.649372Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:31:11.649414Z node 1 :FLAT_TX_SCHEMESHARD INFO: sch ... TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 2 2025-05-29T15:31:23.718491Z node 49 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1006:0 128 -> 240 2025-05-29T15:31:23.718508Z node 49 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2025-05-29T15:31:23.718514Z node 49 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 1 2025-05-29T15:31:23.718517Z node 49 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 2 2025-05-29T15:31:23.718665Z node 49 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1006 2025-05-29T15:31:23.718724Z node 49 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1006 FAKE_COORDINATOR: Erasing txId 1006 2025-05-29T15:31:23.718990Z node 49 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:31:23.718995Z node 49 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1006, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-29T15:31:23.719012Z node 49 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1006, path id: [OwnerId: 72057594046678944, LocalPathId: 5] 2025-05-29T15:31:23.719022Z node 49 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1006, path id: [OwnerId: 72057594046678944, LocalPathId: 3] 2025-05-29T15:31:23.719034Z node 49 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:31:23.719037Z node 49 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [49:207:2208], at schemeshard: 72057594046678944, txId: 1006, path id: 1 2025-05-29T15:31:23.719040Z node 49 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [49:207:2208], at schemeshard: 72057594046678944, txId: 1006, path id: 5 2025-05-29T15:31:23.719043Z node 49 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [49:207:2208], at schemeshard: 72057594046678944, txId: 1006, path id: 3 2025-05-29T15:31:23.719071Z node 49 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1006:0, at schemeshard: 72057594046678944 2025-05-29T15:31:23.719075Z node 49 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:482: [72057594046678944] TDone opId# 1006:0 ProgressState 2025-05-29T15:31:23.719083Z node 49 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:906: Part operation is done id#1006:0 progress is 1/1 2025-05-29T15:31:23.719086Z node 49 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 1006 ready parts: 1/1 2025-05-29T15:31:23.719089Z node 49 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:906: Part operation is done id#1006:0 progress is 1/1 2025-05-29T15:31:23.719091Z node 49 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 1006 ready parts: 1/1 2025-05-29T15:31:23.719094Z node 49 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1606: TOperation IsReadyToNotify, TxId: 1006, ready parts: 1/1, is published: false 2025-05-29T15:31:23.719098Z node 49 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 1006 ready parts: 1/1 2025-05-29T15:31:23.719101Z node 49 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:973: Operation and all the parts is done, operation id: 1006:0 2025-05-29T15:31:23.719103Z node 49 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5174: RemoveTx for txid 1006:0 2025-05-29T15:31:23.719110Z node 49 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 2 2025-05-29T15:31:23.719113Z node 49 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove txstate source path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 3 2025-05-29T15:31:23.719116Z node 49 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:982: Publication still in progress, tx: 1006, publications: 3, subscribers: 0 2025-05-29T15:31:23.719118Z node 49 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:989: Publication details: tx: 1006, [OwnerId: 72057594046678944, LocalPathId: 1], 15 2025-05-29T15:31:23.719122Z node 49 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:989: Publication details: tx: 1006, [OwnerId: 72057594046678944, LocalPathId: 3], 2 2025-05-29T15:31:23.719125Z node 49 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:989: Publication details: tx: 1006, [OwnerId: 72057594046678944, LocalPathId: 5], 18446744073709551615 2025-05-29T15:31:23.719164Z node 49 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:5834: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 5 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 1006 2025-05-29T15:31:23.719171Z node 49 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 5 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 1006 2025-05-29T15:31:23.719174Z node 49 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 3, at schemeshard: 72057594046678944, txId: 1006 2025-05-29T15:31:23.719177Z node 49 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 1006, pathId: [OwnerId: 72057594046678944, LocalPathId: 5], version: 18446744073709551615 2025-05-29T15:31:23.719179Z node 49 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 1 2025-05-29T15:31:23.719215Z node 49 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:123: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-05-29T15:31:23.719219Z node 49 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:137: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 5], at schemeshard: 72057594046678944 2025-05-29T15:31:23.719224Z node 49 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 4 2025-05-29T15:31:23.719260Z node 49 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:5834: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 15 PathOwnerId: 72057594046678944, cookie: 1006 2025-05-29T15:31:23.719265Z node 49 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 15 PathOwnerId: 72057594046678944, cookie: 1006 2025-05-29T15:31:23.719268Z node 49 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 1006 2025-05-29T15:31:23.719270Z node 49 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 1006, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 15 2025-05-29T15:31:23.719273Z node 49 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2025-05-29T15:31:23.719294Z node 49 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:5834: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 2 PathOwnerId: 72057594046678944, cookie: 1006 2025-05-29T15:31:23.719300Z node 49 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 2 PathOwnerId: 72057594046678944, cookie: 1006 2025-05-29T15:31:23.719303Z node 49 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1006 2025-05-29T15:31:23.719306Z node 49 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 1006, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 2 2025-05-29T15:31:23.719308Z node 49 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 2 2025-05-29T15:31:23.719313Z node 49 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 1006, subscribers: 0 2025-05-29T15:31:23.719653Z node 49 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1006 2025-05-29T15:31:23.719882Z node 49 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:160: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 2025-05-29T15:31:23.719900Z node 49 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1006 2025-05-29T15:31:23.719910Z node 49 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1006 TestModificationResult got TxId: 1006, wait until txId: 1006 TestWaitNotification wait txId: 1006 2025-05-29T15:31:23.719951Z node 49 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:210: tests -- TTxNotificationSubscriber for txId 1006: send EvNotifyTxCompletion 2025-05-29T15:31:23.719957Z node 49 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:256: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 1006 2025-05-29T15:31:23.720003Z node 49 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 1006, at schemeshard: 72057594046678944 2025-05-29T15:31:23.720014Z node 49 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:227: tests -- TTxNotificationSubscriber for txId 1006: got EvNotifyTxCompletionResult 2025-05-29T15:31:23.720016Z node 49 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:236: tests -- TTxNotificationSubscriber for txId 1006: satisfy waiter [49:446:2436] TestWaitNotification: OK eventTxId 1006 >> KqpImmediateEffects::InsertExistingKey+UseSink >> KqpImmediateEffects::Upsert ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_external_table_reboots/unittest >> TExternalTableTestReboots::SimpleDropExternalTableWithReboots2 [GOOD] Test command err: ==== RunWithTabletReboots =========== RUN: Trace =========== Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2140] Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:116:2058] recipient: [1:110:2141] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:116:2058] recipient: [1:110:2141] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:117:2058] recipient: [1:111:2142] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:117:2058] recipient: [1:111:2142] Leader for TabletID 72057594046678944 is [1:126:2151] sender: [1:127:2058] recipient: [1:109:2140] Leader for TabletID 72057594046447617 is [1:130:2154] sender: [1:132:2058] recipient: [1:110:2141] Leader for TabletID 72057594046316545 is [1:133:2155] sender: [1:135:2058] recipient: [1:111:2142] 2025-05-29T15:31:11.570855Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7488: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-05-29T15:31:11.570874Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7516: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:31:11.570878Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7402: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2025-05-29T15:31:11.570882Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7418: OperationsProcessing config: using default configuration 2025-05-29T15:31:11.570886Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-05-29T15:31:11.570889Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-05-29T15:31:11.570895Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7548: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:31:11.570905Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:39: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-05-29T15:31:11.570973Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7619: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-05-29T15:31:11.571032Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-05-29T15:31:11.580389Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7651: Got new config: QueryServiceConfig { AllExternalDataSourcesAreAvailable: true } 2025-05-29T15:31:11.580408Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:31:11.580482Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7619: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# Leader for TabletID 72057594046447617 is [1:130:2154] sender: [1:175:2058] recipient: [1:15:2062] 2025-05-29T15:31:11.582467Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-05-29T15:31:11.582489Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-05-29T15:31:11.582512Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-05-29T15:31:11.584576Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-05-29T15:31:11.584638Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:445: Clear TempDirsState with owners number: 0 2025-05-29T15:31:11.584708Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1348: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-05-29T15:31:11.584844Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:32: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-05-29T15:31:11.585338Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:157: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:31:11.585366Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:84: [RootDataErasureManager] Stop 2025-05-29T15:31:11.585530Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:31:11.585537Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:31:11.585558Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-05-29T15:31:11.585565Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-29T15:31:11.585571Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:202: TTxServerlessStorageBilling.Complete 2025-05-29T15:31:11.585590Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6671: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:214:2058] recipient: [1:212:2212] IGNORE Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:214:2058] recipient: [1:212:2212] Leader for TabletID 72057594037968897 is [1:218:2216] sender: [1:219:2058] recipient: [1:212:2212] 2025-05-29T15:31:11.586516Z node 1 :HIVE INFO: tablet_helpers.cpp:1130: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:126:2151] sender: [1:239:2058] recipient: [1:15:2062] 2025-05-29T15:31:11.599375Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:372: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-05-29T15:31:11.599435Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:31:11.599481Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-05-29T15:31:11.599513Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:130: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-05-29T15:31:11.599520Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:31:11.600021Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:451: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-05-29T15:31:11.600038Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-05-29T15:31:11.600079Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:31:11.600092Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:313: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-05-29T15:31:11.600096Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:367: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-05-29T15:31:11.600099Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 2 -> 3 2025-05-29T15:31:11.600430Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:31:11.600442Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-05-29T15:31:11.600447Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 3 -> 128 2025-05-29T15:31:11.600715Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:31:11.600722Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:31:11.600726Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:31:11.600730Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1650: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-05-29T15:31:11.601132Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1719: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-05-29T15:31:11.601372Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:652: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-05-29T15:31:11.601396Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1751: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 Leader for TabletID 72057594046316545 is [1:133:2155] sender: [1:254:2058] recipient: [1:15:2062] FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-05-29T15:31:11.601527Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:676: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-05-29T15:31:11.601542Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:680: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 133 RawX2: 4294969451 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-05-29T15:31:11.601556Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:31:11.601595Z node 1 :FLAT_TX_SCHEMESHARD INFO: sch ... node 50 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1004, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-29T15:31:23.784006Z node 50 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1004, path id: [OwnerId: 72057594046678944, LocalPathId: 4] 2025-05-29T15:31:23.784017Z node 50 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1004, path id: [OwnerId: 72057594046678944, LocalPathId: 3] 2025-05-29T15:31:23.784028Z node 50 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:31:23.784031Z node 50 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [50:206:2207], at schemeshard: 72057594046678944, txId: 1004, path id: 1 2025-05-29T15:31:23.784034Z node 50 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [50:206:2207], at schemeshard: 72057594046678944, txId: 1004, path id: 4 2025-05-29T15:31:23.784037Z node 50 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [50:206:2207], at schemeshard: 72057594046678944, txId: 1004, path id: 3 2025-05-29T15:31:23.784065Z node 50 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1004:0, at schemeshard: 72057594046678944 2025-05-29T15:31:23.784069Z node 50 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:482: [72057594046678944] TDone opId# 1004:0 ProgressState 2025-05-29T15:31:23.784078Z node 50 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:906: Part operation is done id#1004:0 progress is 1/1 2025-05-29T15:31:23.784080Z node 50 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 1004 ready parts: 1/1 2025-05-29T15:31:23.784085Z node 50 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:906: Part operation is done id#1004:0 progress is 1/1 2025-05-29T15:31:23.784088Z node 50 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 1004 ready parts: 1/1 2025-05-29T15:31:23.784090Z node 50 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1606: TOperation IsReadyToNotify, TxId: 1004, ready parts: 1/1, is published: false 2025-05-29T15:31:23.784094Z node 50 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 1004 ready parts: 1/1 2025-05-29T15:31:23.784096Z node 50 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:973: Operation and all the parts is done, operation id: 1004:0 2025-05-29T15:31:23.784099Z node 50 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5174: RemoveTx for txid 1004:0 2025-05-29T15:31:23.784106Z node 50 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 2 2025-05-29T15:31:23.784108Z node 50 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove txstate source path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 3 2025-05-29T15:31:23.784111Z node 50 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:982: Publication still in progress, tx: 1004, publications: 3, subscribers: 0 2025-05-29T15:31:23.784114Z node 50 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:989: Publication details: tx: 1004, [OwnerId: 72057594046678944, LocalPathId: 1], 11 2025-05-29T15:31:23.784116Z node 50 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:989: Publication details: tx: 1004, [OwnerId: 72057594046678944, LocalPathId: 3], 2 2025-05-29T15:31:23.784118Z node 50 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:989: Publication details: tx: 1004, [OwnerId: 72057594046678944, LocalPathId: 4], 18446744073709551615 2025-05-29T15:31:23.784155Z node 50 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:5834: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 1004 2025-05-29T15:31:23.784161Z node 50 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 1004 2025-05-29T15:31:23.784164Z node 50 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 3, at schemeshard: 72057594046678944, txId: 1004 2025-05-29T15:31:23.784166Z node 50 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 1004, pathId: [OwnerId: 72057594046678944, LocalPathId: 4], version: 18446744073709551615 2025-05-29T15:31:23.784169Z node 50 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 1 2025-05-29T15:31:23.784215Z node 50 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:123: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-05-29T15:31:23.784219Z node 50 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:137: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 4], at schemeshard: 72057594046678944 2025-05-29T15:31:23.784224Z node 50 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 4 2025-05-29T15:31:23.784258Z node 50 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:5834: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 11 PathOwnerId: 72057594046678944, cookie: 1004 2025-05-29T15:31:23.784263Z node 50 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 11 PathOwnerId: 72057594046678944, cookie: 1004 2025-05-29T15:31:23.784266Z node 50 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 1004 2025-05-29T15:31:23.784268Z node 50 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 1004, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 11 2025-05-29T15:31:23.784271Z node 50 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2025-05-29T15:31:23.784324Z node 50 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:5834: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 2 PathOwnerId: 72057594046678944, cookie: 1004 2025-05-29T15:31:23.784330Z node 50 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 2 PathOwnerId: 72057594046678944, cookie: 1004 2025-05-29T15:31:23.784332Z node 50 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1004 2025-05-29T15:31:23.784335Z node 50 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 1004, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 2 2025-05-29T15:31:23.784337Z node 50 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 2 2025-05-29T15:31:23.784342Z node 50 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 1004, subscribers: 0 2025-05-29T15:31:23.784814Z node 50 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1004 2025-05-29T15:31:23.784836Z node 50 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:160: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 2025-05-29T15:31:23.784871Z node 50 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1004 2025-05-29T15:31:23.785070Z node 50 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1004 TestModificationResult got TxId: 1004, wait until txId: 1004 TestWaitNotification wait txId: 1004 2025-05-29T15:31:23.785132Z node 50 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:210: tests -- TTxNotificationSubscriber for txId 1004: send EvNotifyTxCompletion 2025-05-29T15:31:23.785138Z node 50 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:256: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 1004 2025-05-29T15:31:23.785185Z node 50 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 1004, at schemeshard: 72057594046678944 2025-05-29T15:31:23.785199Z node 50 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:227: tests -- TTxNotificationSubscriber for txId 1004: got EvNotifyTxCompletionResult 2025-05-29T15:31:23.785202Z node 50 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:236: tests -- TTxNotificationSubscriber for txId 1004: satisfy waiter [50:390:2380] TestWaitNotification: OK eventTxId 1004 2025-05-29T15:31:23.785248Z node 50 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/ExternalTable" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-05-29T15:31:23.785267Z node 50 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/ExternalTable" took 25us result status StatusPathDoesNotExist 2025-05-29T15:31:23.785292Z node 50 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/ExternalTable\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1]), source_location: ydb/core/tx/schemeshard/schemeshard_path_describer.cpp:1157" Path: "/MyRoot/ExternalTable" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: true } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/vdisk/syncer/ut/unittest >> TSyncBrokerTests::ShouldEnqueueWithSameVDiskId [GOOD] Test command err: 2025-05-29T15:31:24.193806Z node 1 :BS_SYNCER DEBUG: blobstorage_syncer_broker.cpp:64: TEvQuerySyncToken, VDisk actor id: [0:1:1], actor id: [1:5:2052], token sent, active: 1, waiting: 0 2025-05-29T15:31:24.193850Z node 1 :BS_SYNCER DEBUG: blobstorage_syncer_broker.cpp:90: TEvQuerySyncToken, VDisk actor id: [0:1:2], actor id: [1:6:2053], enqueued, active: 1, waiting: 1 2025-05-29T15:31:24.211533Z node 2 :BS_SYNCER DEBUG: blobstorage_syncer_broker.cpp:64: TEvQuerySyncToken, VDisk actor id: [0:1:1], actor id: [2:5:2052], token sent, active: 1, waiting: 0 2025-05-29T15:31:24.211563Z node 2 :BS_SYNCER DEBUG: blobstorage_syncer_broker.cpp:90: TEvQuerySyncToken, VDisk actor id: [0:1:2], actor id: [2:6:2053], enqueued, active: 1, waiting: 1 2025-05-29T15:31:24.211567Z node 2 :BS_SYNCER DEBUG: blobstorage_syncer_broker.cpp:79: TEvQuerySyncToken, VDisk actor id: [0:1:2], actor id: [2:7:2054], enqueued, active: 1, waiting: 1 >> KqpEffects::InsertAbort_Params_Conflict+UseSink ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_index/unittest >> TAsyncIndexTests::SplitBothWithReboots[PipeResets] [GOOD] Test command err: =========== RUN: Trace =========== Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2140] Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:116:2058] recipient: [1:110:2141] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:116:2058] recipient: [1:110:2141] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:117:2058] recipient: [1:111:2142] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:117:2058] recipient: [1:111:2142] Leader for TabletID 72057594046678944 is [1:126:2151] sender: [1:127:2058] recipient: [1:109:2140] Leader for TabletID 72057594046447617 is [1:130:2154] sender: [1:132:2058] recipient: [1:110:2141] Leader for TabletID 72057594046316545 is [1:133:2155] sender: [1:135:2058] recipient: [1:111:2142] 2025-05-29T15:30:28.157288Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7488: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-05-29T15:30:28.157314Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7516: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:30:28.157321Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7402: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2025-05-29T15:30:28.157329Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7418: OperationsProcessing config: using default configuration 2025-05-29T15:30:28.157345Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-05-29T15:30:28.157349Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-05-29T15:30:28.157360Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7548: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:30:28.157375Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:39: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-05-29T15:30:28.157474Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7619: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-05-29T15:30:28.157557Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-05-29T15:30:28.172751Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7651: Got new config: QueryServiceConfig { AllExternalDataSourcesAreAvailable: true } 2025-05-29T15:30:28.172773Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:30:28.172868Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7619: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# Leader for TabletID 72057594046447617 is [1:130:2154] sender: [1:175:2058] recipient: [1:15:2062] 2025-05-29T15:30:28.175677Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-05-29T15:30:28.175708Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-05-29T15:30:28.175744Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-05-29T15:30:28.178580Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-05-29T15:30:28.178661Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:445: Clear TempDirsState with owners number: 0 2025-05-29T15:30:28.178815Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1348: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-05-29T15:30:28.178995Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:32: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-05-29T15:30:28.179680Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:157: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:30:28.179723Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:84: [RootDataErasureManager] Stop 2025-05-29T15:30:28.179965Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:30:28.179976Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:30:28.180013Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-05-29T15:30:28.180021Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-29T15:30:28.180027Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:202: TTxServerlessStorageBilling.Complete 2025-05-29T15:30:28.180047Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6671: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:214:2058] recipient: [1:212:2212] IGNORE Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:214:2058] recipient: [1:212:2212] Leader for TabletID 72057594037968897 is [1:218:2216] sender: [1:219:2058] recipient: [1:212:2212] 2025-05-29T15:30:28.181494Z node 1 :HIVE INFO: tablet_helpers.cpp:1130: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:126:2151] sender: [1:239:2058] recipient: [1:15:2062] 2025-05-29T15:30:28.203652Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:372: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-05-29T15:30:28.203736Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:30:28.203801Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-05-29T15:30:28.203859Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:130: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-05-29T15:30:28.203871Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:30:28.204588Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:451: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-05-29T15:30:28.204618Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-05-29T15:30:28.204664Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:30:28.204675Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:313: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-05-29T15:30:28.204682Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:367: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-05-29T15:30:28.204691Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 2 -> 3 2025-05-29T15:30:28.205166Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:30:28.205180Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-05-29T15:30:28.205186Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 3 -> 128 2025-05-29T15:30:28.205572Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:30:28.205585Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:30:28.205592Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:30:28.205599Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1650: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-05-29T15:30:28.206344Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1719: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-05-29T15:30:28.206860Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:652: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-05-29T15:30:28.206901Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1751: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 Leader for TabletID 72057594046316545 is [1:133:2155] sender: [1:254:2058] recipient: [1:15:2062] FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-05-29T15:30:28.207114Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:676: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-05-29T15:30:28.207141Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:680: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 133 RawX2: 4294969451 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-05-29T15:30:28.207149Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:30:28.207222Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Ch ... on { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { SizeToSplit: 2147483648 MinPartitionsCount: 1 } } } } TableSchemaVersion: 1 IsBackup: false IsRestore: false } TablePartitions { EndOfRangeKeyPrefix: "\001\000\004\000\000\0002\000\000\000" IsPoint: false IsInclusive: false DatashardId: 72075186233409548 } TablePartitions { EndOfRangeKeyPrefix: "" IsPoint: false IsInclusive: false DatashardId: 72075186233409549 } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 2 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 4 PathsLimit: 10000 ShardsInside: 5 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-05-29T15:31:23.718782Z node 46 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Table/UserDefinedIndex/indexImplTable" Options { ReturnPartitioningInfo: true ReturnPartitionConfig: true BackupInfo: false ReturnBoundaries: true ShowPrivateTable: true }, at schemeshard: 72057594046678944 2025-05-29T15:31:23.718826Z node 46 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/Table/UserDefinedIndex/indexImplTable" took 54us result status StatusSuccess 2025-05-29T15:31:23.718996Z node 46 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Table/UserDefinedIndex/indexImplTable" PathDescription { Self { Name: "indexImplTable" PathId: 5 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 1002 CreateStep: 5000003 ParentPathId: 4 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 4 PathSubType: EPathSubTypeAsyncIndexImplTable Version { GeneralVersion: 4 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 2 } ChildrenExist: false } Table { Name: "indexImplTable" Columns { Name: "indexed" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "indexed" KeyColumnNames: "key" KeyColumnIds: 1 KeyColumnIds: 2 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { SizeToSplit: 2147483648 MinPartitionsCount: 1 } } SplitBoundary { KeyPrefix { Tuple { Optional { Uint32: 50 } } Tuple { } } } TableSchemaVersion: 1 IsBackup: false IsRestore: false } TablePartitions { EndOfRangeKeyPrefix: "\002\000\004\000\000\0002\000\000\000\000\000\000\200" IsPoint: false IsInclusive: false DatashardId: 72075186233409550 } TablePartitions { EndOfRangeKeyPrefix: "" IsPoint: false IsInclusive: false DatashardId: 72075186233409551 } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 2 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 4 PathsLimit: 10000 ShardsInside: 5 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 5 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-05-29T15:31:23.729288Z node 46 :CHANGE_EXCHANGE DEBUG: change_sender_table_base.cpp:78: [TableChangeSenderShard][72075186233409548:2][72075186233409550][46:1016:2806] Handshake NKikimrChangeExchange.TEvStatus Status: STATUS_OK LastRecordOrder: 0 2025-05-29T15:31:23.729314Z node 46 :CHANGE_EXCHANGE DEBUG: change_sender_table_base.cpp:78: [TableChangeSenderShard][72075186233409548:2][72075186233409551][46:1017:2806] Handshake NKikimrChangeExchange.TEvStatus Status: STATUS_OK LastRecordOrder: 0 2025-05-29T15:31:23.729325Z node 46 :CHANGE_EXCHANGE DEBUG: change_sender_async_index.cpp:239: [AsyncIndexChangeSenderMain][72075186233409548:2][46:965:2806] Handle NKikimr::NChangeExchange::TEvChangeExchangePrivate::TEvReady { PartitionId: 72075186233409550 } 2025-05-29T15:31:23.729338Z node 46 :CHANGE_EXCHANGE DEBUG: change_sender_async_index.cpp:239: [AsyncIndexChangeSenderMain][72075186233409548:2][46:965:2806] Handle NKikimr::NChangeExchange::TEvChangeExchangePrivate::TEvReady { PartitionId: 72075186233409551 } 2025-05-29T15:31:23.729355Z node 46 :CHANGE_EXCHANGE DEBUG: change_sender_table_base.cpp:123: [TableChangeSenderShard][72075186233409548:2][72075186233409550][46:1016:2806] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRecords { Records [{ Order: 1 Group: 1748532683716221 Step: 5000003 TxId: 18446744073709551615 PathId: [OwnerId: 72057594046678944, LocalPathId: 4] Kind: AsyncIndex Source: Unspecified Body: 28b TableId: [OwnerId: 72057594046678944, LocalPathId: 3] SchemaVersion: 1 LockId: 0 LockOffset: 0 },{ Order: 2 Group: 1748532683716221 Step: 5000003 TxId: 18446744073709551615 PathId: [OwnerId: 72057594046678944, LocalPathId: 4] Kind: AsyncIndex Source: Unspecified Body: 28b TableId: [OwnerId: 72057594046678944, LocalPathId: 3] SchemaVersion: 1 LockId: 0 LockOffset: 0 }] } 2025-05-29T15:31:23.729391Z node 46 :CHANGE_EXCHANGE DEBUG: change_sender_table_base.cpp:123: [TableChangeSenderShard][72075186233409548:2][72075186233409551][46:1017:2806] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRecords { Records [{ Order: 3 Group: 1748532683716221 Step: 5000003 TxId: 18446744073709551615 PathId: [OwnerId: 72057594046678944, LocalPathId: 4] Kind: AsyncIndex Source: Unspecified Body: 28b TableId: [OwnerId: 72057594046678944, LocalPathId: 3] SchemaVersion: 1 LockId: 0 LockOffset: 0 }] } 2025-05-29T15:31:23.730238Z node 46 :CHANGE_EXCHANGE DEBUG: change_sender_table_base.cpp:200: [TableChangeSenderShard][72075186233409548:2][72075186233409550][46:1016:2806] Handle NKikimrChangeExchange.TEvStatus Status: STATUS_OK RecordStatuses { Order: 1 Status: STATUS_OK Reason: REASON_NONE } RecordStatuses { Order: 2 Status: STATUS_OK Reason: REASON_NONE } LastRecordOrder: 2 2025-05-29T15:31:23.730263Z node 46 :CHANGE_EXCHANGE DEBUG: change_sender_async_index.cpp:239: [AsyncIndexChangeSenderMain][72075186233409548:2][46:965:2806] Handle NKikimr::NChangeExchange::TEvChangeExchangePrivate::TEvReady { PartitionId: 72075186233409550 } 2025-05-29T15:31:23.730394Z node 46 :CHANGE_EXCHANGE DEBUG: change_sender_table_base.cpp:200: [TableChangeSenderShard][72075186233409548:2][72075186233409551][46:1017:2806] Handle NKikimrChangeExchange.TEvStatus Status: STATUS_OK RecordStatuses { Order: 3 Status: STATUS_OK Reason: REASON_NONE } LastRecordOrder: 3 2025-05-29T15:31:23.730402Z node 46 :CHANGE_EXCHANGE DEBUG: change_sender_async_index.cpp:239: [AsyncIndexChangeSenderMain][72075186233409548:2][46:965:2806] Handle NKikimr::NChangeExchange::TEvChangeExchangePrivate::TEvReady { PartitionId: 72075186233409551 } >> TExternalTableTestReboots::SimpleDropExternalTableWithReboots [GOOD] |74.8%| [TA] $(B)/ydb/core/blobstorage/vdisk/syncer/ut/test-results/unittest/{meta.json ... results_accumulator.log} |74.8%| [TA] {RESULT} $(B)/ydb/core/blobstorage/vdisk/syncer/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> TPersQueueTest::Init [FAIL] >> TPersQueueTest::EventBatching >> KqpInplaceUpdate::SingleRowIf-UseSink >> KqpInplaceUpdate::Negative_SingleRowWithKeyCast+UseSink >> TExternalTableTestReboots::CreateExternalTableWithReboots [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_external_table_reboots/unittest >> TExternalTableTestReboots::SimpleDropExternalTableWithReboots [GOOD] Test command err: ==== RunWithTabletReboots =========== RUN: Trace =========== Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2140] Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:116:2058] recipient: [1:110:2141] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:116:2058] recipient: [1:110:2141] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:117:2058] recipient: [1:111:2142] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:117:2058] recipient: [1:111:2142] Leader for TabletID 72057594046678944 is [1:126:2151] sender: [1:127:2058] recipient: [1:109:2140] Leader for TabletID 72057594046447617 is [1:130:2154] sender: [1:132:2058] recipient: [1:110:2141] Leader for TabletID 72057594046316545 is [1:133:2155] sender: [1:135:2058] recipient: [1:111:2142] 2025-05-29T15:31:12.619419Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7488: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-05-29T15:31:12.619446Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7516: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:31:12.619452Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7402: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2025-05-29T15:31:12.619457Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7418: OperationsProcessing config: using default configuration 2025-05-29T15:31:12.619463Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-05-29T15:31:12.619467Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-05-29T15:31:12.619477Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7548: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:31:12.619491Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:39: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-05-29T15:31:12.619607Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7619: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-05-29T15:31:12.619698Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-05-29T15:31:12.634225Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7651: Got new config: QueryServiceConfig { AllExternalDataSourcesAreAvailable: true } 2025-05-29T15:31:12.634247Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:31:12.634389Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7619: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# Leader for TabletID 72057594046447617 is [1:130:2154] sender: [1:175:2058] recipient: [1:15:2062] 2025-05-29T15:31:12.637105Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-05-29T15:31:12.637131Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-05-29T15:31:12.637168Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-05-29T15:31:12.639929Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-05-29T15:31:12.640013Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:445: Clear TempDirsState with owners number: 0 2025-05-29T15:31:12.640119Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1348: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-05-29T15:31:12.640321Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:32: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-05-29T15:31:12.641093Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:157: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:31:12.641137Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:84: [RootDataErasureManager] Stop 2025-05-29T15:31:12.641367Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:31:12.641381Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:31:12.641410Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-05-29T15:31:12.641419Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-29T15:31:12.641426Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:202: TTxServerlessStorageBilling.Complete 2025-05-29T15:31:12.641444Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6671: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:214:2058] recipient: [1:212:2212] IGNORE Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:214:2058] recipient: [1:212:2212] Leader for TabletID 72057594037968897 is [1:218:2216] sender: [1:219:2058] recipient: [1:212:2212] 2025-05-29T15:31:12.642649Z node 1 :HIVE INFO: tablet_helpers.cpp:1130: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:126:2151] sender: [1:239:2058] recipient: [1:15:2062] 2025-05-29T15:31:12.655523Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:372: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-05-29T15:31:12.655600Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:31:12.655661Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-05-29T15:31:12.655697Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:130: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-05-29T15:31:12.655704Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:31:12.656382Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:451: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-05-29T15:31:12.656402Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-05-29T15:31:12.656460Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:31:12.656477Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:313: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-05-29T15:31:12.656483Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:367: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-05-29T15:31:12.656489Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 2 -> 3 2025-05-29T15:31:12.656796Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:31:12.656804Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-05-29T15:31:12.656808Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 3 -> 128 2025-05-29T15:31:12.657043Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:31:12.657050Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:31:12.657054Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:31:12.657060Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1650: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-05-29T15:31:12.657449Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1719: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-05-29T15:31:12.657715Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:652: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-05-29T15:31:12.657746Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1751: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 Leader for TabletID 72057594046316545 is [1:133:2155] sender: [1:254:2058] recipient: [1:15:2062] FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-05-29T15:31:12.657910Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:676: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-05-29T15:31:12.657928Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:680: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 133 RawX2: 4294969451 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-05-29T15:31:12.657944Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:31:12.657988Z node 1 :FLAT_TX_SCHEMESHARD INFO: sch ... node 50 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1004, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-29T15:31:24.813969Z node 50 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1004, path id: [OwnerId: 72057594046678944, LocalPathId: 4] 2025-05-29T15:31:24.813985Z node 50 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1004, path id: [OwnerId: 72057594046678944, LocalPathId: 3] 2025-05-29T15:31:24.814005Z node 50 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:31:24.814010Z node 50 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [50:206:2207], at schemeshard: 72057594046678944, txId: 1004, path id: 1 2025-05-29T15:31:24.814015Z node 50 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [50:206:2207], at schemeshard: 72057594046678944, txId: 1004, path id: 4 2025-05-29T15:31:24.814019Z node 50 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [50:206:2207], at schemeshard: 72057594046678944, txId: 1004, path id: 3 2025-05-29T15:31:24.814066Z node 50 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1004:0, at schemeshard: 72057594046678944 2025-05-29T15:31:24.814073Z node 50 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:482: [72057594046678944] TDone opId# 1004:0 ProgressState 2025-05-29T15:31:24.814084Z node 50 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:906: Part operation is done id#1004:0 progress is 1/1 2025-05-29T15:31:24.814089Z node 50 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 1004 ready parts: 1/1 2025-05-29T15:31:24.814094Z node 50 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:906: Part operation is done id#1004:0 progress is 1/1 2025-05-29T15:31:24.814098Z node 50 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 1004 ready parts: 1/1 2025-05-29T15:31:24.814102Z node 50 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1606: TOperation IsReadyToNotify, TxId: 1004, ready parts: 1/1, is published: false 2025-05-29T15:31:24.814108Z node 50 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 1004 ready parts: 1/1 2025-05-29T15:31:24.814112Z node 50 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:973: Operation and all the parts is done, operation id: 1004:0 2025-05-29T15:31:24.814117Z node 50 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5174: RemoveTx for txid 1004:0 2025-05-29T15:31:24.814128Z node 50 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 2 2025-05-29T15:31:24.814132Z node 50 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove txstate source path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 3 2025-05-29T15:31:24.814137Z node 50 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:982: Publication still in progress, tx: 1004, publications: 3, subscribers: 0 2025-05-29T15:31:24.814142Z node 50 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:989: Publication details: tx: 1004, [OwnerId: 72057594046678944, LocalPathId: 1], 11 2025-05-29T15:31:24.814145Z node 50 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:989: Publication details: tx: 1004, [OwnerId: 72057594046678944, LocalPathId: 3], 2 2025-05-29T15:31:24.814150Z node 50 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:989: Publication details: tx: 1004, [OwnerId: 72057594046678944, LocalPathId: 4], 18446744073709551615 2025-05-29T15:31:24.814208Z node 50 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:5834: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 1004 2025-05-29T15:31:24.814221Z node 50 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 1004 2025-05-29T15:31:24.814226Z node 50 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 3, at schemeshard: 72057594046678944, txId: 1004 2025-05-29T15:31:24.814231Z node 50 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 1004, pathId: [OwnerId: 72057594046678944, LocalPathId: 4], version: 18446744073709551615 2025-05-29T15:31:24.814235Z node 50 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 1 2025-05-29T15:31:24.814313Z node 50 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:123: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-05-29T15:31:24.814319Z node 50 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:137: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 4], at schemeshard: 72057594046678944 2025-05-29T15:31:24.814328Z node 50 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 4 2025-05-29T15:31:24.814393Z node 50 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:5834: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 11 PathOwnerId: 72057594046678944, cookie: 1004 2025-05-29T15:31:24.814403Z node 50 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 11 PathOwnerId: 72057594046678944, cookie: 1004 2025-05-29T15:31:24.814408Z node 50 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 1004 2025-05-29T15:31:24.814412Z node 50 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 1004, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 11 2025-05-29T15:31:24.814417Z node 50 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2025-05-29T15:31:24.814500Z node 50 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:5834: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 2 PathOwnerId: 72057594046678944, cookie: 1004 2025-05-29T15:31:24.814510Z node 50 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 2 PathOwnerId: 72057594046678944, cookie: 1004 2025-05-29T15:31:24.814514Z node 50 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1004 2025-05-29T15:31:24.814518Z node 50 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 1004, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 2 2025-05-29T15:31:24.814523Z node 50 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 2 2025-05-29T15:31:24.814533Z node 50 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 1004, subscribers: 0 2025-05-29T15:31:24.815217Z node 50 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1004 2025-05-29T15:31:24.815242Z node 50 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:160: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 2025-05-29T15:31:24.815283Z node 50 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1004 2025-05-29T15:31:24.815467Z node 50 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1004 TestModificationResult got TxId: 1004, wait until txId: 1004 TestWaitNotification wait txId: 1004 2025-05-29T15:31:24.815553Z node 50 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:210: tests -- TTxNotificationSubscriber for txId 1004: send EvNotifyTxCompletion 2025-05-29T15:31:24.815559Z node 50 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:256: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 1004 2025-05-29T15:31:24.815623Z node 50 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 1004, at schemeshard: 72057594046678944 2025-05-29T15:31:24.815638Z node 50 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:227: tests -- TTxNotificationSubscriber for txId 1004: got EvNotifyTxCompletionResult 2025-05-29T15:31:24.815643Z node 50 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:236: tests -- TTxNotificationSubscriber for txId 1004: satisfy waiter [50:390:2380] TestWaitNotification: OK eventTxId 1004 2025-05-29T15:31:24.815713Z node 50 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/ExternalTable" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-05-29T15:31:24.815736Z node 50 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/ExternalTable" took 34us result status StatusPathDoesNotExist 2025-05-29T15:31:24.815773Z node 50 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/ExternalTable\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1]), source_location: ydb/core/tx/schemeshard/schemeshard_path_describer.cpp:1157" Path: "/MyRoot/ExternalTable" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: true } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/effects/unittest >> KqpImmediateEffects::ManyFlushes Test command err: Trying to start YDB, gRPC: 4454, MsgBus: 23734 2025-05-29T15:31:17.232691Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509890664378136445:2072];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:31:17.232711Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/001df5/r3tmp/tmpuqFVz6/pdisk_1.dat 2025-05-29T15:31:17.295011Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 4454, node 1 2025-05-29T15:31:17.327022Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:31:17.327042Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-05-29T15:31:17.327045Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-05-29T15:31:17.327102Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-05-29T15:31:17.333553Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:31:17.333591Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:31:17.334663Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:23734 TClient is connected to server localhost:23734 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-29T15:31:17.382372Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:31:17.393016Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:31:17.454298Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:31:17.477709Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:31:17.488620Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:31:17.575257Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890664378138054:2403], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:31:17.575295Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:31:17.631142Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-05-29T15:31:17.638970Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-05-29T15:31:17.648153Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-05-29T15:31:17.703312Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-05-29T15:31:17.757825Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-05-29T15:31:17.767648Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-05-29T15:31:17.781272Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-05-29T15:31:17.797805Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890664378138710:2466], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:31:17.797835Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:31:17.797913Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890664378138715:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:31:17.798582Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710669:3, at schemeshard: 72057594046644480 2025-05-29T15:31:17.800943Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7509890664378138717:2470], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710669 completed, doublechecking } 2025-05-29T15:31:17.895422Z node 1 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [1:7509890664378138768:3397] txid# 281474976710670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-29T15:31:17.985313Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [1:7509890664378138784:2474], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:31:17.985429Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=1&id=MWY3YTNlMDktMjgxZDFhNzAtYjRhOGM4ZDEtZDQ0MTU3ZmU=, ActorId: [1:7509890664378138036:2401], ActorState: ExecuteState, TraceId: 01jweapk550abcy7rewkvhbyz1, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: VERIFY failed (2025-05-29T15:31:17.986101Z): assertion failed in non-unittest thread with message: assertion failed at ydb/core/kqp/ut/common/kqp_ut_common.h:375, void NKikimr::NKqp::AssertSuccessResult(const NYdb::TStatus &): (result.IsSuccess())
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 library/cpp/testing/unittest/registar.cpp:36 RaiseError(): requirement UnittestThread failed 0. /-S/util/system/yassert.cpp:83: InternalPanicImpl @ 0x15F23B95 1. /-S/util/system/yassert.cpp:55: Panic @ 0x15F1AB96 2. /tmp//-S/library/cpp/testing/unittest/registar.cpp:36: RaiseError @ 0x160BC066 3. /-S/ydb/core/kqp/ut/common/kqp_ut_common.h:375: AssertSuccessResult @ 0x15D88C82 4. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:365: CreateSampleTables @ 0x2855B9A2 5. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:581: operator() @ 0x2857CF4C 6. /-S/library/cpp/threading/future/core/future-inl.h:493: SetValue @ 0x2857CF4C 7. /-S/library/cpp/threading/future/async.h:24: operator() @ 0x2857CF4C 8. /-S/util/thread/pool.h:71: Process @ 0x2857CF4C 9. /-S/util/thread/pool.cpp:418: DoExecute @ 0x15F2B519 10. /-S/util/thread/factory.h:15: Execute @ 0x15F29F09 11. /-S/util/thread/factory.cpp:36: ThreadProc @ 0x15F29F09 12. /-S/util/system/thread.cpp:244: ThreadProxy @ 0x15F2537C 13. ??:0: ?? @ 0x7F26FAF99AC2 14. ??:0: ?? @ 0x7F26FB02B84F Trying to start YDB, gRPC: 6235, MsgBus: 20634 2025-05-29T15:31:21.393189Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509890684582919681:2071];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:31:21.393210Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/001df5/r3tmp/tmp7aOk6S/pdisk_1.dat 2025-05-29T15:31:21.446247Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 6235, node 1 2025-05-29T15:31:21.462734Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:31:21.462769Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-05-29T15:31:21.462771Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-05-29T15:31:21.462821Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:20634 2025-05-29T15:31:21.494681Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:31:21.494728Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:31:21.495788Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:20634 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-29T15:31:21.524660Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:31:21.536514Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:31:21.554316Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:31:21.573654Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:31:21.585680Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:31:21.737162Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890684582921274:2403], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:31:21.737203Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:31:21.797464Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-05-29T15:31:21.805553Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-05-29T15:31:21.861584Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-05-29T15:31:21.869198Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-05-29T15:31:21.925267Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-05-29T15:31:21.934254Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-05-29T15:31:21.946978Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-05-29T15:31:21.967778Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890684582921930:2466], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:31:21.967801Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:31:21.967897Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890684582921935:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:31:21.969019Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715669:3, at schemeshard: 72057594046644480 2025-05-29T15:31:21.973224Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7509890684582921937:2470], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715669 completed, doublechecking } 2025-05-29T15:31:22.068410Z node 1 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [1:7509890688877889284:3398] txid# 281474976715670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-29T15:31:22.189167Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [1:7509890688877889293:2474], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:31:22.189418Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=1&id=OWQ5NjFmNzUtYzY5OTJiZGUtODYzZmMwMTEtYzM3MTM2OA==, ActorId: [1:7509890684582921271:2401], ActorState: ExecuteState, TraceId: 01jweapq7e8sx1syvtma6pe0q7, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: VERIFY failed (2025-05-29T15:31:22.190950Z): assertion failed in non-unittest thread with message: assertion failed at ydb/core/kqp/ut/common/kqp_ut_common.h:375, void NKikimr::NKqp::AssertSuccessResult(const NYdb::TStatus &): (result.IsSuccess())
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 library/cpp/testing/unittest/registar.cpp:36 RaiseError(): requirement UnittestThread failed 0. /-S/util/system/yassert.cpp:83: InternalPanicImpl @ 0x15F23B95 1. /-S/util/system/yassert.cpp:55: Panic @ 0x15F1AB96 2. /tmp//-S/library/cpp/testing/unittest/registar.cpp:36: RaiseError @ 0x160BC066 3. /-S/ydb/core/kqp/ut/common/kqp_ut_common.h:375: AssertSuccessResult @ 0x15D88C82 4. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:365: CreateSampleTables @ 0x2855B9A2 5. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:581: operator() @ 0x2857CF4C 6. /-S/library/cpp/threading/future/core/future-inl.h:493: SetValue @ 0x2857CF4C 7. /-S/library/cpp/threading/future/async.h:24: operator() @ 0x2857CF4C 8. /-S/util/thread/pool.h:71: Process @ 0x2857CF4C 9. /-S/util/thread/pool.cpp:418: DoExecute @ 0x15F2B519 10. /-S/util/thread/factory.h:15: Execute @ 0x15F29F09 11. /-S/util/thread/factory.cpp:36: ThreadProc @ 0x15F29F09 12. /-S/util/system/thread.cpp:244: ThreadProxy @ 0x15F2537C 13. ??:0: ?? @ 0x7F5B7C98EAC2 14. ??:0: ?? @ 0x7F5B7CA2084F >> KqpEffects::UpdateOn_Select >> TAsyncIndexTests::SplitMainWithReboots[PipeResets] [GOOD] >> KqpImmediateEffects::UpdateAfterUpsert >> TExternalTableTestReboots::DropExternalTableWithReboots [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_external_table_reboots/unittest >> TExternalTableTestReboots::CreateExternalTableWithReboots [GOOD] Test command err: ==== RunWithTabletReboots =========== RUN: Trace =========== Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2140] Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:116:2058] recipient: [1:110:2141] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:116:2058] recipient: [1:110:2141] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:117:2058] recipient: [1:111:2142] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:117:2058] recipient: [1:111:2142] Leader for TabletID 72057594046678944 is [1:126:2151] sender: [1:127:2058] recipient: [1:109:2140] Leader for TabletID 72057594046447617 is [1:130:2154] sender: [1:132:2058] recipient: [1:110:2141] Leader for TabletID 72057594046316545 is [1:133:2155] sender: [1:135:2058] recipient: [1:111:2142] 2025-05-29T15:31:08.635418Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7488: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-05-29T15:31:08.635457Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7516: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:31:08.635462Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7402: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2025-05-29T15:31:08.635468Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7418: OperationsProcessing config: using default configuration 2025-05-29T15:31:08.635475Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-05-29T15:31:08.635480Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-05-29T15:31:08.635490Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7548: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:31:08.635505Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:39: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-05-29T15:31:08.635608Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7619: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-05-29T15:31:08.646184Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-05-29T15:31:08.712492Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7651: Got new config: QueryServiceConfig { AllExternalDataSourcesAreAvailable: true } 2025-05-29T15:31:08.712525Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:31:08.712612Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7619: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# Leader for TabletID 72057594046447617 is [1:130:2154] sender: [1:175:2058] recipient: [1:15:2062] 2025-05-29T15:31:08.717464Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-05-29T15:31:08.717490Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-05-29T15:31:08.717523Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-05-29T15:31:08.724525Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-05-29T15:31:08.728777Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:445: Clear TempDirsState with owners number: 0 2025-05-29T15:31:08.729551Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1348: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-05-29T15:31:08.736122Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:32: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-05-29T15:31:08.743950Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:157: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:31:08.744007Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:84: [RootDataErasureManager] Stop 2025-05-29T15:31:08.745939Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:31:08.745953Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:31:08.745982Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-05-29T15:31:08.745988Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-29T15:31:08.745994Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:202: TTxServerlessStorageBilling.Complete 2025-05-29T15:31:08.746012Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6671: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:214:2058] recipient: [1:212:2212] IGNORE Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:214:2058] recipient: [1:212:2212] Leader for TabletID 72057594037968897 is [1:218:2216] sender: [1:219:2058] recipient: [1:212:2212] 2025-05-29T15:31:08.747204Z node 1 :HIVE INFO: tablet_helpers.cpp:1130: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:126:2151] sender: [1:239:2058] recipient: [1:15:2062] 2025-05-29T15:31:08.766448Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:372: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-05-29T15:31:08.766524Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:31:08.766589Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-05-29T15:31:08.766628Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:130: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-05-29T15:31:08.766639Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:31:08.767349Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:451: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-05-29T15:31:08.767378Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-05-29T15:31:08.767435Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:31:08.767452Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:313: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-05-29T15:31:08.767459Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:367: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-05-29T15:31:08.767464Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 2 -> 3 2025-05-29T15:31:08.767858Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:31:08.767870Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-05-29T15:31:08.767876Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 3 -> 128 2025-05-29T15:31:08.768241Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:31:08.768253Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:31:08.768260Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:31:08.768267Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1650: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-05-29T15:31:08.768860Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1719: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-05-29T15:31:08.769241Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:652: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-05-29T15:31:08.769289Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1751: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 Leader for TabletID 72057594046316545 is [1:133:2155] sender: [1:254:2058] recipient: [1:15:2062] FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-05-29T15:31:08.769511Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:676: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-05-29T15:31:08.769536Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:680: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 133 RawX2: 4294969451 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-05-29T15:31:08.770049Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:31:08.770117Z node 1 :FLAT_TX_SCHEMESHARD INFO: sch ... publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1004, path id: [OwnerId: 72057594046678944, LocalPathId: 4] 2025-05-29T15:31:25.191257Z node 68 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1004, path id: [OwnerId: 72057594046678944, LocalPathId: 5] 2025-05-29T15:31:25.191266Z node 68 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1004, path id: [OwnerId: 72057594046678944, LocalPathId: 5] 2025-05-29T15:31:25.191271Z node 68 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1004, path id: [OwnerId: 72057594046678944, LocalPathId: 3] 2025-05-29T15:31:25.191281Z node 68 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:31:25.191284Z node 68 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [68:208:2209], at schemeshard: 72057594046678944, txId: 1004, path id: 4 2025-05-29T15:31:25.191287Z node 68 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [68:208:2209], at schemeshard: 72057594046678944, txId: 1004, path id: 5 2025-05-29T15:31:25.191290Z node 68 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [68:208:2209], at schemeshard: 72057594046678944, txId: 1004, path id: 5 2025-05-29T15:31:25.191292Z node 68 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [68:208:2209], at schemeshard: 72057594046678944, txId: 1004, path id: 3 2025-05-29T15:31:25.191325Z node 68 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1004:0, at schemeshard: 72057594046678944 2025-05-29T15:31:25.191329Z node 68 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:482: [72057594046678944] TDone opId# 1004:0 ProgressState 2025-05-29T15:31:25.191335Z node 68 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:906: Part operation is done id#1004:0 progress is 1/1 2025-05-29T15:31:25.191337Z node 68 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 1004 ready parts: 1/1 2025-05-29T15:31:25.191340Z node 68 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:906: Part operation is done id#1004:0 progress is 1/1 2025-05-29T15:31:25.191343Z node 68 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 1004 ready parts: 1/1 2025-05-29T15:31:25.191345Z node 68 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1606: TOperation IsReadyToNotify, TxId: 1004, ready parts: 1/1, is published: false 2025-05-29T15:31:25.191348Z node 68 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 1004 ready parts: 1/1 2025-05-29T15:31:25.191351Z node 68 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:973: Operation and all the parts is done, operation id: 1004:0 2025-05-29T15:31:25.191353Z node 68 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5174: RemoveTx for txid 1004:0 2025-05-29T15:31:25.191360Z node 68 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 3 2025-05-29T15:31:25.191365Z node 68 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove txstate source path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 3 2025-05-29T15:31:25.191368Z node 68 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:982: Publication still in progress, tx: 1004, publications: 3, subscribers: 1 2025-05-29T15:31:25.191370Z node 68 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:989: Publication details: tx: 1004, [OwnerId: 72057594046678944, LocalPathId: 3], 2 2025-05-29T15:31:25.191372Z node 68 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:989: Publication details: tx: 1004, [OwnerId: 72057594046678944, LocalPathId: 4], 5 2025-05-29T15:31:25.191374Z node 68 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:989: Publication details: tx: 1004, [OwnerId: 72057594046678944, LocalPathId: 5], 2 2025-05-29T15:31:25.191479Z node 68 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:5834: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 5 PathOwnerId: 72057594046678944, cookie: 1004 2025-05-29T15:31:25.191486Z node 68 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 5 PathOwnerId: 72057594046678944, cookie: 1004 2025-05-29T15:31:25.191489Z node 68 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 3, at schemeshard: 72057594046678944, txId: 1004 2025-05-29T15:31:25.191491Z node 68 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 1004, pathId: [OwnerId: 72057594046678944, LocalPathId: 4], version: 5 2025-05-29T15:31:25.191494Z node 68 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 2 2025-05-29T15:31:25.191660Z node 68 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:5834: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 5 Version: 2 PathOwnerId: 72057594046678944, cookie: 1004 2025-05-29T15:31:25.191667Z node 68 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 5 Version: 2 PathOwnerId: 72057594046678944, cookie: 1004 2025-05-29T15:31:25.191670Z node 68 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 1004 2025-05-29T15:31:25.191672Z node 68 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 1004, pathId: [OwnerId: 72057594046678944, LocalPathId: 5], version: 2 2025-05-29T15:31:25.191674Z node 68 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 2 2025-05-29T15:31:25.191760Z node 68 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:5834: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 2 PathOwnerId: 72057594046678944, cookie: 1004 2025-05-29T15:31:25.191766Z node 68 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 2 PathOwnerId: 72057594046678944, cookie: 1004 2025-05-29T15:31:25.191769Z node 68 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1004 2025-05-29T15:31:25.191772Z node 68 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 1004, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 2 2025-05-29T15:31:25.191774Z node 68 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 2 2025-05-29T15:31:25.191780Z node 68 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 1004, subscribers: 1 2025-05-29T15:31:25.191783Z node 68 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:212: TTxAckPublishToSchemeBoard Notify send TEvNotifyTxCompletionResult, at schemeshard: 72057594046678944, to actorId: [68:307:2297] 2025-05-29T15:31:25.192007Z node 68 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1004 2025-05-29T15:31:25.192149Z node 68 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1004 2025-05-29T15:31:25.192291Z node 68 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1004 2025-05-29T15:31:25.192303Z node 68 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:227: tests -- TTxNotificationSubscriber for txId 1004: got EvNotifyTxCompletionResult 2025-05-29T15:31:25.192306Z node 68 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:236: tests -- TTxNotificationSubscriber for txId 1004: satisfy waiter [68:338:2328] TestWaitNotification: OK eventTxId 1002 TestWaitNotification: OK eventTxId 1003 TestWaitNotification: OK eventTxId 1004 2025-05-29T15:31:25.192366Z node 68 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/DirExternalTable/ExternalTable" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-05-29T15:31:25.192386Z node 68 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/DirExternalTable/ExternalTable" took 24us result status StatusSuccess 2025-05-29T15:31:25.192429Z node 68 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/DirExternalTable/ExternalTable" PathDescription { Self { Name: "ExternalTable" PathId: 5 SchemeshardId: 72057594046678944 PathType: EPathTypeExternalTable CreateFinished: true CreateTxId: 1004 CreateStep: 5000005 ParentPathId: 4 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 ExternalTableVersion: 1 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 4 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } ExternalTableDescription { Name: "ExternalTable" PathId { OwnerId: 72057594046678944 LocalId: 5 } Version: 1 SourceType: "ObjectStorage" DataSourcePath: "/MyRoot/ExternalDataSource" Location: "/" Columns { Name: "a" Type: "Int32" TypeId: 1 Id: 1 NotNull: true } Columns { Name: "b" Type: "Int32" TypeId: 1 Id: 2 NotNull: true } Content: "" } } PathId: 5 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_index/unittest >> TAsyncIndexTests::SplitMainWithReboots[PipeResets] [GOOD] Test command err: =========== RUN: Trace =========== Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2140] Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:116:2058] recipient: [1:110:2141] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:116:2058] recipient: [1:110:2141] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:117:2058] recipient: [1:111:2142] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:117:2058] recipient: [1:111:2142] Leader for TabletID 72057594046678944 is [1:126:2151] sender: [1:127:2058] recipient: [1:109:2140] Leader for TabletID 72057594046447617 is [1:130:2154] sender: [1:132:2058] recipient: [1:110:2141] Leader for TabletID 72057594046316545 is [1:133:2155] sender: [1:135:2058] recipient: [1:111:2142] 2025-05-29T15:31:04.893418Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7488: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-05-29T15:31:04.893439Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7516: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:31:04.893444Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7402: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2025-05-29T15:31:04.893448Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7418: OperationsProcessing config: using default configuration 2025-05-29T15:31:04.893461Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-05-29T15:31:04.893463Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-05-29T15:31:04.893470Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7548: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:31:04.893483Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:39: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-05-29T15:31:04.893559Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7619: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-05-29T15:31:04.893618Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-05-29T15:31:04.902884Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7651: Got new config: QueryServiceConfig { AllExternalDataSourcesAreAvailable: true } 2025-05-29T15:31:04.902903Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:31:04.902970Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7619: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# Leader for TabletID 72057594046447617 is [1:130:2154] sender: [1:175:2058] recipient: [1:15:2062] 2025-05-29T15:31:04.905086Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-05-29T15:31:04.905111Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-05-29T15:31:04.905144Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-05-29T15:31:04.907000Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-05-29T15:31:04.907054Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:445: Clear TempDirsState with owners number: 0 2025-05-29T15:31:04.907138Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1348: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-05-29T15:31:04.907249Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:32: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-05-29T15:31:04.907679Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:157: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:31:04.907708Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:84: [RootDataErasureManager] Stop 2025-05-29T15:31:04.907897Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:31:04.907903Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:31:04.907933Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-05-29T15:31:04.907938Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-29T15:31:04.907942Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:202: TTxServerlessStorageBilling.Complete 2025-05-29T15:31:04.907956Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6671: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:214:2058] recipient: [1:212:2212] IGNORE Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:214:2058] recipient: [1:212:2212] Leader for TabletID 72057594037968897 is [1:218:2216] sender: [1:219:2058] recipient: [1:212:2212] 2025-05-29T15:31:04.908850Z node 1 :HIVE INFO: tablet_helpers.cpp:1130: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:126:2151] sender: [1:239:2058] recipient: [1:15:2062] 2025-05-29T15:31:04.920923Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:372: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-05-29T15:31:04.920990Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:31:04.921043Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-05-29T15:31:04.921076Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:130: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-05-29T15:31:04.921084Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:31:04.921574Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:451: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-05-29T15:31:04.921593Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-05-29T15:31:04.921624Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:31:04.921631Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:313: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-05-29T15:31:04.921635Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:367: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-05-29T15:31:04.921639Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 2 -> 3 2025-05-29T15:31:04.921932Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:31:04.921939Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-05-29T15:31:04.921944Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 3 -> 128 2025-05-29T15:31:04.922165Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:31:04.922171Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:31:04.922175Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:31:04.922181Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1650: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-05-29T15:31:04.922620Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1719: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-05-29T15:31:04.922926Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:652: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-05-29T15:31:04.922956Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1751: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 Leader for TabletID 72057594046316545 is [1:133:2155] sender: [1:254:2058] recipient: [1:15:2062] FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-05-29T15:31:04.923092Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:676: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-05-29T15:31:04.923109Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:680: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 133 RawX2: 4294969451 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-05-29T15:31:04.923114Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:31:04.923163Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Ch ... TaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { MinPartitionsCount: 1 } } SplitBoundary { KeyPrefix { Tuple { Optional { Uint32: 50 } } } } TableIndexes { Name: "UserDefinedIndex" LocalPathId: 4 Type: EIndexTypeGlobalAsync State: EIndexStateReady KeyColumnNames: "indexed" SchemaVersion: 1 PathOwnerId: 72057594046678944 DataSize: 0 IndexImplTableDescriptions { PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { SizeToSplit: 2147483648 MinPartitionsCount: 1 } } } } TableSchemaVersion: 1 IsBackup: false IsRestore: false } TablePartitions { EndOfRangeKeyPrefix: "\001\000\004\000\000\0002\000\000\000" IsPoint: false IsInclusive: false DatashardId: 72075186233409548 } TablePartitions { EndOfRangeKeyPrefix: "" IsPoint: false IsInclusive: false DatashardId: 72075186233409549 } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 2 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 4 PathsLimit: 10000 ShardsInside: 4 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-05-29T15:31:25.218524Z node 24 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Table/UserDefinedIndex/indexImplTable" Options { ReturnPartitioningInfo: true ReturnPartitionConfig: true BackupInfo: false ReturnBoundaries: true ShowPrivateTable: true }, at schemeshard: 72057594046678944 2025-05-29T15:31:25.218572Z node 24 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/Table/UserDefinedIndex/indexImplTable" took 57us result status StatusSuccess 2025-05-29T15:31:25.218712Z node 24 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Table/UserDefinedIndex/indexImplTable" PathDescription { Self { Name: "indexImplTable" PathId: 5 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 1002 CreateStep: 5000003 ParentPathId: 4 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeAsyncIndexImplTable Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "indexImplTable" Columns { Name: "indexed" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "indexed" KeyColumnNames: "key" KeyColumnIds: 1 KeyColumnIds: 2 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { SizeToSplit: 2147483648 MinPartitionsCount: 1 } } TableSchemaVersion: 1 IsBackup: false IsRestore: false } TablePartitions { EndOfRangeKeyPrefix: "" IsPoint: false IsInclusive: false DatashardId: 72075186233409546 } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 4 PathsLimit: 10000 ShardsInside: 4 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 5 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-05-29T15:31:25.228986Z node 24 :CHANGE_EXCHANGE DEBUG: change_sender_table_base.cpp:78: [TableChangeSenderShard][72075186233409548:2][72075186233409546][24:793:2626] Handshake NKikimrChangeExchange.TEvStatus Status: STATUS_OK LastRecordOrder: 0 2025-05-29T15:31:25.229018Z node 24 :CHANGE_EXCHANGE DEBUG: change_sender_async_index.cpp:239: [AsyncIndexChangeSenderMain][72075186233409548:2][24:731:2626] Handle NKikimr::NChangeExchange::TEvChangeExchangePrivate::TEvReady { PartitionId: 72075186233409546 } 2025-05-29T15:31:25.229049Z node 24 :CHANGE_EXCHANGE DEBUG: change_sender_table_base.cpp:123: [TableChangeSenderShard][72075186233409548:2][72075186233409546][24:793:2626] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRecords { Records [{ Order: 1 Group: 1748532685215644 Step: 5000003 TxId: 18446744073709551615 PathId: [OwnerId: 72057594046678944, LocalPathId: 4] Kind: AsyncIndex Source: Unspecified Body: 28b TableId: [OwnerId: 72057594046678944, LocalPathId: 3] SchemaVersion: 1 LockId: 0 LockOffset: 0 },{ Order: 2 Group: 1748532685215644 Step: 5000003 TxId: 18446744073709551615 PathId: [OwnerId: 72057594046678944, LocalPathId: 4] Kind: AsyncIndex Source: Unspecified Body: 28b TableId: [OwnerId: 72057594046678944, LocalPathId: 3] SchemaVersion: 1 LockId: 0 LockOffset: 0 },{ Order: 3 Group: 1748532685215644 Step: 5000003 TxId: 18446744073709551615 PathId: [OwnerId: 72057594046678944, LocalPathId: 4] Kind: AsyncIndex Source: Unspecified Body: 28b TableId: [OwnerId: 72057594046678944, LocalPathId: 3] SchemaVersion: 1 LockId: 0 LockOffset: 0 }] } 2025-05-29T15:31:25.229651Z node 24 :CHANGE_EXCHANGE DEBUG: change_sender_table_base.cpp:200: [TableChangeSenderShard][72075186233409548:2][72075186233409546][24:793:2626] Handle NKikimrChangeExchange.TEvStatus Status: STATUS_OK RecordStatuses { Order: 1 Status: STATUS_OK Reason: REASON_NONE } RecordStatuses { Order: 2 Status: STATUS_OK Reason: REASON_NONE } RecordStatuses { Order: 3 Status: STATUS_OK Reason: REASON_NONE } LastRecordOrder: 3 2025-05-29T15:31:25.229665Z node 24 :CHANGE_EXCHANGE DEBUG: change_sender_async_index.cpp:239: [AsyncIndexChangeSenderMain][72075186233409548:2][24:731:2626] Handle NKikimr::NChangeExchange::TEvChangeExchangePrivate::TEvReady { PartitionId: 72075186233409546 } >> KqpImmediateEffects::DeleteAfterInsert >> KqpInplaceUpdate::BigRow >> KqpImmediateEffects::Replace >> TPersQueueTest::EventBatching [FAIL] >> TPersQueueTest::NoDecompressionMemoryLeaks ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_external_table_reboots/unittest >> TExternalTableTestReboots::DropExternalTableWithReboots [GOOD] Test command err: ==== RunWithTabletReboots =========== RUN: Trace =========== Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2140] Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:116:2058] recipient: [1:110:2141] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:116:2058] recipient: [1:110:2141] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:117:2058] recipient: [1:111:2142] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:117:2058] recipient: [1:111:2142] Leader for TabletID 72057594046678944 is [1:126:2151] sender: [1:127:2058] recipient: [1:109:2140] Leader for TabletID 72057594046447617 is [1:130:2154] sender: [1:132:2058] recipient: [1:110:2141] Leader for TabletID 72057594046316545 is [1:133:2155] sender: [1:135:2058] recipient: [1:111:2142] 2025-05-29T15:31:13.477636Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7488: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-05-29T15:31:13.477656Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7516: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:31:13.477662Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7402: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2025-05-29T15:31:13.477666Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7418: OperationsProcessing config: using default configuration 2025-05-29T15:31:13.477672Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-05-29T15:31:13.477675Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-05-29T15:31:13.477683Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7548: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:31:13.477696Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:39: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-05-29T15:31:13.477779Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7619: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-05-29T15:31:13.477848Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-05-29T15:31:13.491781Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7651: Got new config: QueryServiceConfig { AllExternalDataSourcesAreAvailable: true } 2025-05-29T15:31:13.491799Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:31:13.491870Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7619: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# Leader for TabletID 72057594046447617 is [1:130:2154] sender: [1:175:2058] recipient: [1:15:2062] 2025-05-29T15:31:13.493952Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-05-29T15:31:13.493973Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-05-29T15:31:13.493999Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-05-29T15:31:13.496184Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-05-29T15:31:13.496242Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:445: Clear TempDirsState with owners number: 0 2025-05-29T15:31:13.496317Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1348: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-05-29T15:31:13.496485Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:32: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-05-29T15:31:13.496987Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:157: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:31:13.497016Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:84: [RootDataErasureManager] Stop 2025-05-29T15:31:13.497186Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:31:13.497193Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:31:13.497216Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-05-29T15:31:13.497221Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-29T15:31:13.497225Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:202: TTxServerlessStorageBilling.Complete 2025-05-29T15:31:13.497238Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6671: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:214:2058] recipient: [1:212:2212] IGNORE Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:214:2058] recipient: [1:212:2212] Leader for TabletID 72057594037968897 is [1:218:2216] sender: [1:219:2058] recipient: [1:212:2212] 2025-05-29T15:31:13.498195Z node 1 :HIVE INFO: tablet_helpers.cpp:1130: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:126:2151] sender: [1:239:2058] recipient: [1:15:2062] 2025-05-29T15:31:13.510660Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:372: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-05-29T15:31:13.510717Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:31:13.510782Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-05-29T15:31:13.510812Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:130: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-05-29T15:31:13.510819Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:31:13.511337Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:451: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-05-29T15:31:13.511373Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-05-29T15:31:13.511412Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:31:13.511425Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:313: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-05-29T15:31:13.511429Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:367: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-05-29T15:31:13.511432Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 2 -> 3 2025-05-29T15:31:13.511856Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:31:13.511866Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-05-29T15:31:13.511870Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 3 -> 128 2025-05-29T15:31:13.512139Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:31:13.512147Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:31:13.512151Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:31:13.512155Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1650: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-05-29T15:31:13.512574Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1719: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-05-29T15:31:13.512888Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:652: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-05-29T15:31:13.512912Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1751: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 Leader for TabletID 72057594046316545 is [1:133:2155] sender: [1:254:2058] recipient: [1:15:2062] FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-05-29T15:31:13.513038Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:676: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-05-29T15:31:13.513055Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:680: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 133 RawX2: 4294969451 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-05-29T15:31:13.513069Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:31:13.513110Z node 1 :FLAT_TX_SCHEMESHARD INFO: sch ... node 50 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1005, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-29T15:31:25.623552Z node 50 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1005, path id: [OwnerId: 72057594046678944, LocalPathId: 5] 2025-05-29T15:31:25.623562Z node 50 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1005, path id: [OwnerId: 72057594046678944, LocalPathId: 3] 2025-05-29T15:31:25.623573Z node 50 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:31:25.623576Z node 50 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [50:206:2207], at schemeshard: 72057594046678944, txId: 1005, path id: 1 2025-05-29T15:31:25.623579Z node 50 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [50:206:2207], at schemeshard: 72057594046678944, txId: 1005, path id: 5 2025-05-29T15:31:25.623584Z node 50 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [50:206:2207], at schemeshard: 72057594046678944, txId: 1005, path id: 3 2025-05-29T15:31:25.623614Z node 50 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1005:0, at schemeshard: 72057594046678944 2025-05-29T15:31:25.623618Z node 50 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:482: [72057594046678944] TDone opId# 1005:0 ProgressState 2025-05-29T15:31:25.623625Z node 50 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:906: Part operation is done id#1005:0 progress is 1/1 2025-05-29T15:31:25.623627Z node 50 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 1005 ready parts: 1/1 2025-05-29T15:31:25.623630Z node 50 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:906: Part operation is done id#1005:0 progress is 1/1 2025-05-29T15:31:25.623633Z node 50 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 1005 ready parts: 1/1 2025-05-29T15:31:25.623635Z node 50 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1606: TOperation IsReadyToNotify, TxId: 1005, ready parts: 1/1, is published: false 2025-05-29T15:31:25.623638Z node 50 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 1005 ready parts: 1/1 2025-05-29T15:31:25.623641Z node 50 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:973: Operation and all the parts is done, operation id: 1005:0 2025-05-29T15:31:25.623643Z node 50 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5174: RemoveTx for txid 1005:0 2025-05-29T15:31:25.623651Z node 50 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 2 2025-05-29T15:31:25.623653Z node 50 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove txstate source path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 3 2025-05-29T15:31:25.623656Z node 50 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:982: Publication still in progress, tx: 1005, publications: 3, subscribers: 0 2025-05-29T15:31:25.623658Z node 50 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:989: Publication details: tx: 1005, [OwnerId: 72057594046678944, LocalPathId: 1], 15 2025-05-29T15:31:25.623661Z node 50 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:989: Publication details: tx: 1005, [OwnerId: 72057594046678944, LocalPathId: 3], 2 2025-05-29T15:31:25.623663Z node 50 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:989: Publication details: tx: 1005, [OwnerId: 72057594046678944, LocalPathId: 5], 18446744073709551615 2025-05-29T15:31:25.623699Z node 50 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:5834: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 5 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 1005 2025-05-29T15:31:25.623705Z node 50 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 5 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 1005 2025-05-29T15:31:25.623708Z node 50 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 3, at schemeshard: 72057594046678944, txId: 1005 2025-05-29T15:31:25.623710Z node 50 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 1005, pathId: [OwnerId: 72057594046678944, LocalPathId: 5], version: 18446744073709551615 2025-05-29T15:31:25.623714Z node 50 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 1 2025-05-29T15:31:25.623756Z node 50 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:123: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-05-29T15:31:25.623760Z node 50 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:137: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 5], at schemeshard: 72057594046678944 2025-05-29T15:31:25.623765Z node 50 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 4 2025-05-29T15:31:25.623797Z node 50 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:5834: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 15 PathOwnerId: 72057594046678944, cookie: 1005 2025-05-29T15:31:25.623802Z node 50 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 15 PathOwnerId: 72057594046678944, cookie: 1005 2025-05-29T15:31:25.623805Z node 50 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 1005 2025-05-29T15:31:25.623807Z node 50 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 1005, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 15 2025-05-29T15:31:25.623809Z node 50 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2025-05-29T15:31:25.624000Z node 50 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:5834: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 2 PathOwnerId: 72057594046678944, cookie: 1005 2025-05-29T15:31:25.624010Z node 50 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 2 PathOwnerId: 72057594046678944, cookie: 1005 2025-05-29T15:31:25.624013Z node 50 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1005 2025-05-29T15:31:25.624015Z node 50 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 1005, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 2 2025-05-29T15:31:25.624018Z node 50 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 2 2025-05-29T15:31:25.624025Z node 50 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 1005, subscribers: 0 2025-05-29T15:31:25.624410Z node 50 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1005 2025-05-29T15:31:25.624432Z node 50 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:160: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 2025-05-29T15:31:25.624535Z node 50 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1005 2025-05-29T15:31:25.624589Z node 50 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1005 TestModificationResult got TxId: 1005, wait until txId: 1005 TestWaitNotification wait txId: 1005 2025-05-29T15:31:25.624621Z node 50 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:210: tests -- TTxNotificationSubscriber for txId 1005: send EvNotifyTxCompletion 2025-05-29T15:31:25.624626Z node 50 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:256: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 1005 2025-05-29T15:31:25.624665Z node 50 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 1005, at schemeshard: 72057594046678944 2025-05-29T15:31:25.624675Z node 50 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:227: tests -- TTxNotificationSubscriber for txId 1005: got EvNotifyTxCompletionResult 2025-05-29T15:31:25.624678Z node 50 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:236: tests -- TTxNotificationSubscriber for txId 1005: satisfy waiter [50:446:2436] TestWaitNotification: OK eventTxId 1005 2025-05-29T15:31:25.624721Z node 50 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/ExternalTable" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-05-29T15:31:25.624738Z node 50 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/ExternalTable" took 22us result status StatusPathDoesNotExist 2025-05-29T15:31:25.624759Z node 50 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/ExternalTable\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1]), source_location: ydb/core/tx/schemeshard/schemeshard_path_describer.cpp:1157" Path: "/MyRoot/ExternalTable" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: true } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 >> KqpImmediateEffects::Delete >> KqpEffects::InsertAbort_Select_Duplicates-UseSink >> KqpEffects::AlterAfterUpsertBeforeUpsertSelectTransaction+UseSink >> KqpInplaceUpdate::SingleRowPgNotNull-UseSink >> KqpImmediateEffects::ConflictingKeyRW1RWR2 >> KqpImmediateEffects::TxWithWriteAtTheEnd+UseSink >> KqpImmediateEffects::UpsertConflictInteractiveTxAborted ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/effects/unittest >> KqpEffects::AlterAfterUpsertBeforeUpsertTransaction-UseSink Test command err: Trying to start YDB, gRPC: 13929, MsgBus: 20394 2025-05-29T15:31:14.311882Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509890651013987842:2065];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:31:14.311929Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/001e03/r3tmp/tmp5pLCPp/pdisk_1.dat 2025-05-29T15:31:14.372416Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 13929, node 1 2025-05-29T15:31:14.386065Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:31:14.386078Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-05-29T15:31:14.386080Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-05-29T15:31:14.386118Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:20394 2025-05-29T15:31:14.413049Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:31:14.413083Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:31:14.414096Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:20394 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-29T15:31:14.446603Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:31:14.454501Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:31:14.471310Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:31:14.488195Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:31:14.499296Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:31:14.642120Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890651013989457:2403], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:31:14.642155Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:31:14.705678Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-05-29T15:31:14.713684Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-05-29T15:31:14.722355Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-05-29T15:31:14.777169Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-05-29T15:31:14.785510Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-05-29T15:31:14.799497Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-05-29T15:31:14.813147Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-05-29T15:31:14.829389Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890651013990110:2466], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:31:14.829412Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890651013990115:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:31:14.829417Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:31:14.830113Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710669:3, at schemeshard: 72057594046644480 2025-05-29T15:31:14.833213Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7509890651013990117:2470], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710669 completed, doublechecking } 2025-05-29T15:31:14.904811Z node 1 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [1:7509890651013990168:3395] txid# 281474976710670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-29T15:31:14.988000Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [1:7509890651013990184:2474], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:31:14.988102Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=1&id=OWNkODljN2MtNzM4OWMyODMtNTc3ZmQ1NDQtZGE2OTFmZTE=, ActorId: [1:7509890651013989439:2401], ActorState: ExecuteState, TraceId: 01jweapg8dd1176a7aavr4994y, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: VERIFY failed (2025-05-29T15:31:14.988750Z): assertion failed in non-unittest thread with message: assertion failed at ydb/core/kqp/ut/common/kqp_ut_common.h:375, void NKikimr::NKqp::AssertSuccessResult(const NYdb::TStatus &): (result.IsSuccess())
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 library/cpp/testing/unittest/registar.cpp:36 RaiseError(): requirement UnittestThread failed 0. /-S/util/system/yassert.cpp:83: InternalPanicImpl @ 0x15F23B95 1. /-S/util/system/yassert.cpp:55: Panic @ 0x15F1AB96 2. /tmp//-S/library/cpp/testing/unittest/registar.cpp:36: RaiseError @ 0x160BC066 3. /-S/ydb/core/kqp/ut/common/kqp_ut_common.h:375: AssertSuccessResult @ 0x15D88C82 4. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:365: CreateSampleTables @ 0x2855B9A2 5. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:581: operator() @ 0x2857CF4C 6. /-S/library/cpp/threading/future/core/future-inl.h:493: SetValue @ 0x2857CF4C 7. /-S/library/cpp/threading/future/async.h:24: operator() @ 0x2857CF4C 8. /-S/util/thread/pool.h:71: Process @ 0x2857CF4C 9. /-S/util/thread/pool.cpp:418: DoExecute @ 0x15F2B519 10. /-S/util/thread/factory.h:15: Execute @ 0x15F29F09 11. /-S/util/thread/factory.cpp:36: ThreadProc @ 0x15F29F09 12. /-S/util/system/thread.cpp:244: ThreadProxy @ 0x15F2537C 13. ??:0: ?? @ 0x7F216831AAC2 14. ??:0: ?? @ 0x7F21683AC84F Trying to start YDB, gRPC: 6741, MsgBus: 16165 2025-05-29T15:31:18.572314Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509890670943273708:2071];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:31:18.572337Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/001e03/r3tmp/tmpT4ZCZo/pdisk_1.dat 2025-05-29T15:31:18.648706Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 6741, node 1 2025-05-29T15:31:18.669262Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:31:18.669277Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-05-29T15:31:18.669279Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2 ... s/ast/yql_expr.h:1874: index out of range, code: 1 library/cpp/testing/unittest/registar.cpp:36 RaiseError(): requirement UnittestThread failed 0. /-S/util/system/yassert.cpp:83: InternalPanicImpl @ 0x15F23B95 1. /-S/util/system/yassert.cpp:55: Panic @ 0x15F1AB96 2. /tmp//-S/library/cpp/testing/unittest/registar.cpp:36: RaiseError @ 0x160BC066 3. /-S/ydb/core/kqp/ut/common/kqp_ut_common.h:375: AssertSuccessResult @ 0x15D88C82 4. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:365: CreateSampleTables @ 0x2855B9A2 5. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:581: operator() @ 0x2857CF4C 6. /-S/library/cpp/threading/future/core/future-inl.h:493: SetValue @ 0x2857CF4C 7. /-S/library/cpp/threading/future/async.h:24: operator() @ 0x2857CF4C 8. /-S/util/thread/pool.h:71: Process @ 0x2857CF4C 9. /-S/util/thread/pool.cpp:418: DoExecute @ 0x15F2B519 10. /-S/util/thread/factory.h:15: Execute @ 0x15F29F09 11. /-S/util/thread/factory.cpp:36: ThreadProc @ 0x15F29F09 12. /-S/util/system/thread.cpp:244: ThreadProxy @ 0x15F2537C 13. ??:0: ?? @ 0x7F733E5BCAC2 14. ??:0: ?? @ 0x7F733E64E84F Trying to start YDB, gRPC: 32732, MsgBus: 5696 2025-05-29T15:31:22.623635Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509890689073783187:2072];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:31:22.623650Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/001e03/r3tmp/tmpuvet8a/pdisk_1.dat 2025-05-29T15:31:22.668572Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 32732, node 1 2025-05-29T15:31:22.684840Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:31:22.684852Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-05-29T15:31:22.684853Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-05-29T15:31:22.684888Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:5696 2025-05-29T15:31:22.723905Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:31:22.723926Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:31:22.724979Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:5696 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-29T15:31:22.749604Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:31:22.760277Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:31:22.823385Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:31:22.838525Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:31:22.849828Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:31:22.921604Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890689073784776:2403], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:31:22.921626Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:31:22.971811Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-05-29T15:31:22.978637Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-05-29T15:31:22.989047Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-05-29T15:31:22.996147Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-05-29T15:31:23.002725Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-05-29T15:31:23.010002Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-05-29T15:31:23.017322Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-05-29T15:31:23.033978Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890693368752725:2466], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:31:23.034007Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:31:23.034038Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890693368752730:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:31:23.034654Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715669:3, at schemeshard: 72057594046644480 2025-05-29T15:31:23.037572Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7509890693368752732:2470], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715669 completed, doublechecking } 2025-05-29T15:31:23.137410Z node 1 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [1:7509890693368752783:3396] txid# 281474976715670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-29T15:31:23.239777Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [1:7509890693368752799:2474], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:31:23.239898Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=1&id=NDg3YTFhN2EtZTMwMGFmN2MtMjEzZWU5Y2QtNTBhMmEzOGQ=, ActorId: [1:7509890689073784758:2401], ActorState: ExecuteState, TraceId: 01jweapr8s6wxqwrv5ewv9sgpr, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: VERIFY failed (2025-05-29T15:31:23.240516Z): assertion failed in non-unittest thread with message: assertion failed at ydb/core/kqp/ut/common/kqp_ut_common.h:375, void NKikimr::NKqp::AssertSuccessResult(const NYdb::TStatus &): (result.IsSuccess())
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 library/cpp/testing/unittest/registar.cpp:36 RaiseError(): requirement UnittestThread failed 0. /-S/util/system/yassert.cpp:83: InternalPanicImpl @ 0x15F23B95 1. /-S/util/system/yassert.cpp:55: Panic @ 0x15F1AB96 2. /tmp//-S/library/cpp/testing/unittest/registar.cpp:36: RaiseError @ 0x160BC066 3. /-S/ydb/core/kqp/ut/common/kqp_ut_common.h:375: AssertSuccessResult @ 0x15D88C82 4. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:365: CreateSampleTables @ 0x2855B9A2 5. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:581: operator() @ 0x2857CF4C 6. /-S/library/cpp/threading/future/core/future-inl.h:493: SetValue @ 0x2857CF4C 7. /-S/library/cpp/threading/future/async.h:24: operator() @ 0x2857CF4C 8. /-S/util/thread/pool.h:71: Process @ 0x2857CF4C 9. /-S/util/thread/pool.cpp:418: DoExecute @ 0x15F2B519 10. /-S/util/thread/factory.h:15: Execute @ 0x15F29F09 11. /-S/util/thread/factory.cpp:36: ThreadProc @ 0x15F29F09 12. /-S/util/system/thread.cpp:244: ThreadProxy @ 0x15F2537C 13. ??:0: ?? @ 0x7FCCF8607AC2 14. ??:0: ?? @ 0x7FCCF869984F >> KqpEffects::InsertRevert_Literal_Duplicates >> KqpInplaceUpdate::Negative_SingleRowWithKeyCast-UseSink >> KqpImmediateEffects::InsertDuplicates+UseSink >> KqpImmediateEffects::ForceImmediateEffectsExecution-UseSink >> KqpImmediateEffects::UpsertAfterInsertWithIndex >> TPersQueueTest::NoDecompressionMemoryLeaks [FAIL] >> TPersQueueTest::PreferredCluster_TwoEnabledClustersAndWriteSessionsWithDifferentPreferredCluster_SessionWithMismatchedClusterDiesAndOthersAlive >> KqpEffects::InsertAbort_Params_Duplicates+UseSink >> TConsistentOpsWithReboots::DropNotNullColumnTableWithReboots [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/effects/unittest >> KqpImmediateEffects::InsertExistingKey+UseSink Test command err: Trying to start YDB, gRPC: 16154, MsgBus: 14248 2025-05-29T15:31:20.760053Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509890676666085078:2069];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:31:20.760080Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/001ded/r3tmp/tmpgGt2Lp/pdisk_1.dat 2025-05-29T15:31:20.812777Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 16154, node 1 2025-05-29T15:31:20.828154Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:31:20.828171Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-05-29T15:31:20.828173Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-05-29T15:31:20.828223Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:14248 2025-05-29T15:31:20.861181Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:31:20.861218Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:31:20.862268Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:14248 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-29T15:31:20.895774Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:31:20.905784Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:31:20.920761Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:31:20.939704Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:31:20.950204Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:31:21.078876Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890680961053968:2403], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:31:21.078901Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:31:21.136203Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-05-29T15:31:21.143267Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-05-29T15:31:21.155400Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-05-29T15:31:21.169189Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-05-29T15:31:21.183819Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-05-29T15:31:21.197800Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-05-29T15:31:21.211535Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-05-29T15:31:21.227669Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890680961054620:2466], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:31:21.227708Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:31:21.227717Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890680961054625:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:31:21.228392Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715669:3, at schemeshard: 72057594046644480 2025-05-29T15:31:21.231054Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7509890680961054627:2470], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715669 completed, doublechecking } 2025-05-29T15:31:21.316944Z node 1 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [1:7509890680961054678:3396] txid# 281474976715670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-29T15:31:21.403788Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [1:7509890680961054694:2474], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:31:21.403956Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=1&id=MmNkNGFjYTEtODlhMmZhMGEtODFhYmZhNGQtODMxNDZhZmQ=, ActorId: [1:7509890680961053950:2401], ActorState: ExecuteState, TraceId: 01jweappgbfwh252ryqny34mye, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: VERIFY failed (2025-05-29T15:31:21.404804Z): assertion failed in non-unittest thread with message: assertion failed at ydb/core/kqp/ut/common/kqp_ut_common.h:375, void NKikimr::NKqp::AssertSuccessResult(const NYdb::TStatus &): (result.IsSuccess())
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 library/cpp/testing/unittest/registar.cpp:36 RaiseError(): requirement UnittestThread failed 0. /-S/util/system/yassert.cpp:83: InternalPanicImpl @ 0x15F23B95 1. /-S/util/system/yassert.cpp:55: Panic @ 0x15F1AB96 2. /tmp//-S/library/cpp/testing/unittest/registar.cpp:36: RaiseError @ 0x160BC066 3. /-S/ydb/core/kqp/ut/common/kqp_ut_common.h:375: AssertSuccessResult @ 0x15D88C82 4. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:365: CreateSampleTables @ 0x2855B9A2 5. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:581: operator() @ 0x2857CF4C 6. /-S/library/cpp/threading/future/core/future-inl.h:493: SetValue @ 0x2857CF4C 7. /-S/library/cpp/threading/future/async.h:24: operator() @ 0x2857CF4C 8. /-S/util/thread/pool.h:71: Process @ 0x2857CF4C 9. /-S/util/thread/pool.cpp:418: DoExecute @ 0x15F2B519 10. /-S/util/thread/factory.h:15: Execute @ 0x15F29F09 11. /-S/util/thread/factory.cpp:36: ThreadProc @ 0x15F29F09 12. /-S/util/system/thread.cpp:244: ThreadProxy @ 0x15F2537C 13. ??:0: ?? @ 0x7F6C3AF3BAC2 14. ??:0: ?? @ 0x7F6C3AFCD84F Trying to start YDB, gRPC: 12411, MsgBus: 15752 2025-05-29T15:31:24.504071Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509890695641569492:2067];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:31:24.504118Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/001ded/r3tmp/tmpx3LozV/pdisk_1.dat 2025-05-29T15:31:24.553607Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 12411, node 1 2025-05-29T15:31:24.568140Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:31:24.568153Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-05-29T15:31:24.568154Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-05-29T15:31:24.568197Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:15752 TClient is connected to server localhost:15752 WaitRootIsUp 'Root'... TClient::Ls request: Root 2025-05-29T15:31:24.605234Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:31:24.605264Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting TClient::Ls response: 2025-05-29T15:31:24.606362Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-29T15:31:24.616510Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:31:24.624280Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:31:24.685586Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:31:24.705562Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:31:24.714418Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:31:24.776310Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890695641571086:2403], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:31:24.776328Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:31:24.827225Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-05-29T15:31:24.833569Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-05-29T15:31:24.843750Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-05-29T15:31:24.850953Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-05-29T15:31:24.858128Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-05-29T15:31:24.872364Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-05-29T15:31:24.879219Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-05-29T15:31:24.894593Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890695641571737:2466], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:31:24.894615Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890695641571742:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:31:24.894616Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:31:24.895169Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710669:3, at schemeshard: 72057594046644480 2025-05-29T15:31:24.899309Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7509890695641571744:2470], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710669 completed, doublechecking } 2025-05-29T15:31:24.979659Z node 1 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [1:7509890695641571795:3393] txid# 281474976710670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-29T15:31:25.048113Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [1:7509890695641571811:2474], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:31:25.048216Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=1&id=MWVlYTYxNTctNzUzNjE2OTctNTczZjQ3Yi1mNjJmZmMxNA==, ActorId: [1:7509890695641571083:2401], ActorState: ExecuteState, TraceId: 01jweapt2y2vjr9swh9mbgsw7b, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: VERIFY failed (2025-05-29T15:31:25.048834Z): assertion failed in non-unittest thread with message: assertion failed at ydb/core/kqp/ut/common/kqp_ut_common.h:375, void NKikimr::NKqp::AssertSuccessResult(const NYdb::TStatus &): (result.IsSuccess())
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 library/cpp/testing/unittest/registar.cpp:36 RaiseError(): requirement UnittestThread failed 0. /-S/util/system/yassert.cpp:83: InternalPanicImpl @ 0x15F23B95 1. /-S/util/system/yassert.cpp:55: Panic @ 0x15F1AB96 2. /tmp//-S/library/cpp/testing/unittest/registar.cpp:36: RaiseError @ 0x160BC066 3. /-S/ydb/core/kqp/ut/common/kqp_ut_common.h:375: AssertSuccessResult @ 0x15D88C82 4. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:365: CreateSampleTables @ 0x2855B9A2 5. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:581: operator() @ 0x2857CF4C 6. /-S/library/cpp/threading/future/core/future-inl.h:493: SetValue @ 0x2857CF4C 7. /-S/library/cpp/threading/future/async.h:24: operator() @ 0x2857CF4C 8. /-S/util/thread/pool.h:71: Process @ 0x2857CF4C 9. /-S/util/thread/pool.cpp:418: DoExecute @ 0x15F2B519 10. /-S/util/thread/factory.h:15: Execute @ 0x15F29F09 11. /-S/util/thread/factory.cpp:36: ThreadProc @ 0x15F29F09 12. /-S/util/system/thread.cpp:244: ThreadProxy @ 0x15F2537C 13. ??:0: ?? @ 0x7FBAFE317AC2 14. ??:0: ?? @ 0x7FBAFE3A984F >> KqpImmediateEffects::UpdateOn >> TPersQueueTest::PreferredCluster_TwoEnabledClustersAndWriteSessionsWithDifferentPreferredCluster_SessionWithMismatchedClusterDiesAndOthersAlive [FAIL] >> TPersQueueTest::PreferredCluster_DisabledRemoteClusterAndWriteSessionsWithDifferentPreferredClusterAndLaterRemoteClusterEnabled_SessionWithMismatchedClusterDiesAfterPreferredClusterEnabledAndOtherSessionsAlive ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_reboots/unittest >> TConsistentOpsWithReboots::DropNotNullColumnTableWithReboots [GOOD] Test command err: ==== RunWithTabletReboots =========== RUN: Trace =========== Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2140] Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:116:2058] recipient: [1:110:2141] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:116:2058] recipient: [1:110:2141] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:117:2058] recipient: [1:111:2142] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:117:2058] recipient: [1:111:2142] Leader for TabletID 72057594046678944 is [1:126:2151] sender: [1:127:2058] recipient: [1:109:2140] Leader for TabletID 72057594046447617 is [1:130:2154] sender: [1:132:2058] recipient: [1:110:2141] Leader for TabletID 72057594046316545 is [1:133:2155] sender: [1:135:2058] recipient: [1:111:2142] 2025-05-29T15:31:08.036132Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7488: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-05-29T15:31:08.036150Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7516: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:31:08.036153Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7402: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2025-05-29T15:31:08.036157Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7418: OperationsProcessing config: using default configuration 2025-05-29T15:31:08.036161Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-05-29T15:31:08.036163Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-05-29T15:31:08.036169Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7548: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:31:08.036178Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:39: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-05-29T15:31:08.036243Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7619: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-05-29T15:31:08.036295Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-05-29T15:31:08.044867Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7651: Got new config: QueryServiceConfig { AllExternalDataSourcesAreAvailable: true } 2025-05-29T15:31:08.044883Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:31:08.044942Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7619: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# Leader for TabletID 72057594046447617 is [1:130:2154] sender: [1:175:2058] recipient: [1:15:2062] 2025-05-29T15:31:08.046688Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-05-29T15:31:08.046706Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-05-29T15:31:08.046730Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-05-29T15:31:08.048399Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-05-29T15:31:08.048467Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:445: Clear TempDirsState with owners number: 0 2025-05-29T15:31:08.048536Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1348: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-05-29T15:31:08.048670Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:32: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-05-29T15:31:08.049169Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:157: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:31:08.049197Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:84: [RootDataErasureManager] Stop 2025-05-29T15:31:08.049389Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:31:08.049395Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:31:08.049417Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-05-29T15:31:08.049422Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-29T15:31:08.049425Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:202: TTxServerlessStorageBilling.Complete 2025-05-29T15:31:08.049437Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6671: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:214:2058] recipient: [1:212:2212] IGNORE Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:214:2058] recipient: [1:212:2212] Leader for TabletID 72057594037968897 is [1:218:2216] sender: [1:219:2058] recipient: [1:212:2212] 2025-05-29T15:31:08.050259Z node 1 :HIVE INFO: tablet_helpers.cpp:1130: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:126:2151] sender: [1:239:2058] recipient: [1:15:2062] 2025-05-29T15:31:08.069561Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:372: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-05-29T15:31:08.069647Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:31:08.069710Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-05-29T15:31:08.069755Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:130: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-05-29T15:31:08.069765Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:31:08.070512Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:451: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-05-29T15:31:08.070537Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-05-29T15:31:08.070582Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:31:08.070589Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:313: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-05-29T15:31:08.070593Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:367: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-05-29T15:31:08.070597Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 2 -> 3 2025-05-29T15:31:08.070967Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:31:08.070975Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-05-29T15:31:08.070979Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 3 -> 128 2025-05-29T15:31:08.071215Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:31:08.071220Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:31:08.071224Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:31:08.071229Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1650: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-05-29T15:31:08.071628Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1719: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-05-29T15:31:08.071915Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:652: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-05-29T15:31:08.071939Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1751: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 Leader for TabletID 72057594046316545 is [1:133:2155] sender: [1:254:2058] recipient: [1:15:2062] FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-05-29T15:31:08.072090Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:676: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-05-29T15:31:08.072106Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:680: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 133 RawX2: 4294969451 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-05-29T15:31:08.072121Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:31:08.072178Z node 1 :FLAT_TX_SCHEMESHARD INFO: sch ... 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 7 PathOwnerId: 72057594046678944, cookie: 1004 2025-05-29T15:31:27.822049Z node 80 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72057594046678944, txId: 1004 2025-05-29T15:31:27.822054Z node 80 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 1004, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 7 2025-05-29T15:31:27.822058Z node 80 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 2 2025-05-29T15:31:27.822219Z node 80 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:5834: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 1004 2025-05-29T15:31:27.822230Z node 80 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 1004 2025-05-29T15:31:27.822235Z node 80 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72057594046678944, txId: 1004 2025-05-29T15:31:27.822239Z node 80 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 1004, pathId: [OwnerId: 72057594046678944, LocalPathId: 4], version: 18446744073709551615 2025-05-29T15:31:27.822243Z node 80 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 4 2025-05-29T15:31:27.822253Z node 80 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1606: TOperation IsReadyToNotify, TxId: 1004, ready parts: 0/1, is published: true 2025-05-29T15:31:27.822502Z node 80 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1004:0, at schemeshard: 72057594046678944 2025-05-29T15:31:27.822509Z node 80 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_drop_table.cpp:414: TDropTable TProposedDeletePart operationId: 1004:0 ProgressState, at schemeshard: 72057594046678944 2025-05-29T15:31:27.822565Z node 80 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove table for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 3 2025-05-29T15:31:27.822590Z node 80 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:906: Part operation is done id#1004:0 progress is 1/1 2025-05-29T15:31:27.822594Z node 80 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 1004 ready parts: 1/1 2025-05-29T15:31:27.822599Z node 80 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:906: Part operation is done id#1004:0 progress is 1/1 2025-05-29T15:31:27.822603Z node 80 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 1004 ready parts: 1/1 2025-05-29T15:31:27.822610Z node 80 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1606: TOperation IsReadyToNotify, TxId: 1004, ready parts: 1/1, is published: true 2025-05-29T15:31:27.822614Z node 80 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 1004 ready parts: 1/1 2025-05-29T15:31:27.822619Z node 80 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:973: Operation and all the parts is done, operation id: 1004:0 2025-05-29T15:31:27.822623Z node 80 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5174: RemoveTx for txid 1004:0 2025-05-29T15:31:27.822639Z node 80 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 2 2025-05-29T15:31:27.823004Z node 80 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1004 2025-05-29T15:31:27.823078Z node 80 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1004 2025-05-29T15:31:27.823440Z node 80 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1004 2025-05-29T15:31:27.824190Z node 80 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5549: Handle TEvStateChanged, at schemeshard: 72057594046678944, message: Source { RawX1: 364 RawX2: 343597386030 } TabletId: 72075186233409546 State: 4 2025-05-29T15:31:27.824208Z node 80 :FLAT_TX_SCHEMESHARD INFO: schemeshard__state_changed_reply.cpp:78: TTxShardStateChanged DoExecute, datashard informs about state changing, datashardId: 72075186233409546, state: Offline, at schemeshard: 72057594046678944 2025-05-29T15:31:27.824525Z node 80 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:46: Free shard 72057594046678944:1 hive 72057594037968897 at ss 72057594046678944 2025-05-29T15:31:27.824592Z node 80 :HIVE INFO: tablet_helpers.cpp:1356: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 1 TxId_Deprecated: 1 TabletID: 72075186233409546 Forgetting tablet 72075186233409546 2025-05-29T15:31:27.825070Z node 80 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5938: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 1 ShardOwnerId: 72057594046678944 ShardLocalIdx: 1, at schemeshard: 72057594046678944 2025-05-29T15:31:27.825115Z node 80 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 1 2025-05-29T15:31:27.825159Z node 80 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:123: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-05-29T15:31:27.825165Z node 80 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:137: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 4], at schemeshard: 72057594046678944 2025-05-29T15:31:27.825175Z node 80 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 1 2025-05-29T15:31:27.825675Z node 80 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:174: Deleted shardIdx 72057594046678944:1 2025-05-29T15:31:27.825687Z node 80 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:180: Close pipe to deleted shardIdx 72057594046678944:1 tabletId 72075186233409546 2025-05-29T15:31:27.825711Z node 80 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:160: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestModificationResult got TxId: 1004, wait until txId: 1004 TestWaitNotification wait txId: 1004 2025-05-29T15:31:27.825764Z node 80 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:210: tests -- TTxNotificationSubscriber for txId 1004: send EvNotifyTxCompletion 2025-05-29T15:31:27.825773Z node 80 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:256: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 1004 2025-05-29T15:31:27.825823Z node 80 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 1004, at schemeshard: 72057594046678944 2025-05-29T15:31:27.825838Z node 80 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:227: tests -- TTxNotificationSubscriber for txId 1004: got EvNotifyTxCompletionResult 2025-05-29T15:31:27.825843Z node 80 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:236: tests -- TTxNotificationSubscriber for txId 1004: satisfy waiter [80:516:2488] TestWaitNotification: OK eventTxId 1004 wait until 72075186233409546 is deleted wait until 72075186233409556 is deleted 2025-05-29T15:31:27.825893Z node 80 :HIVE INFO: tablet_helpers.cpp:1476: [72057594037968897] TEvSubscribeToTabletDeletion, 72075186233409546 2025-05-29T15:31:27.825904Z node 80 :HIVE INFO: tablet_helpers.cpp:1476: [72057594037968897] TEvSubscribeToTabletDeletion, 72075186233409556 Deleted tabletId 72075186233409546 Deleted tabletId 72075186233409556 2025-05-29T15:31:27.825963Z node 80 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/DirB" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-05-29T15:31:27.825995Z node 80 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/DirB" took 40us result status StatusSuccess 2025-05-29T15:31:27.826067Z node 80 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/DirB" PathDescription { Self { Name: "DirB" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1002 CreateStep: 5000003 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 7 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 7 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 6 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 2 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-05-29T15:31:27.826118Z node 80 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/DirB/TestNotNullTable" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-05-29T15:31:27.826137Z node 80 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/DirB/TestNotNullTable" took 21us result status StatusPathDoesNotExist 2025-05-29T15:31:27.826156Z node 80 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/DirB/TestNotNullTable\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot/DirB\' (id: [OwnerId: 72057594046678944, LocalPathId: 3]), source_location: ydb/core/tx/schemeshard/schemeshard_path_describer.cpp:1157" Path: "/MyRoot/DirB/TestNotNullTable" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot/DirB" LastExistedPrefixPathId: 3 LastExistedPrefixDescription { Self { Name: "DirB" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1002 CreateStep: 5000003 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: false } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/effects/unittest >> KqpEffects::InsertAbort_Params_Conflict+UseSink Test command err: Trying to start YDB, gRPC: 21144, MsgBus: 5865 2025-05-29T15:31:16.489331Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509890659784061234:2071];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:31:16.489356Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/001dfb/r3tmp/tmpQOfOJ3/pdisk_1.dat 2025-05-29T15:31:16.544270Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 21144, node 1 2025-05-29T15:31:16.560693Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:31:16.560709Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-05-29T15:31:16.560711Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-05-29T15:31:16.560756Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:5865 2025-05-29T15:31:16.590637Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:31:16.590666Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:31:16.591736Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:5865 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-29T15:31:16.621165Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:31:16.629708Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:31:16.649335Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:31:16.670356Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:31:16.680987Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:31:16.800776Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890659784062822:2403], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:31:16.800803Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:31:16.843739Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-05-29T15:31:16.851727Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-05-29T15:31:16.907701Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-05-29T15:31:16.920754Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-05-29T15:31:16.935073Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-05-29T15:31:16.949204Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-05-29T15:31:16.959652Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-05-29T15:31:16.971676Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890659784063477:2466], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:31:16.971698Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:31:16.971702Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890659784063482:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:31:16.972443Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715669:3, at schemeshard: 72057594046644480 2025-05-29T15:31:16.975539Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7509890659784063484:2470], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715669 completed, doublechecking } 2025-05-29T15:31:17.046449Z node 1 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [1:7509890664079030831:3396] txid# 281474976715670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-29T15:31:17.126772Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [1:7509890664079030847:2474], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:31:17.126887Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=1&id=Yzg0ODU4MzMtNWRhMzg1YzMtOWM2M2NjMC0zYjhmMTFhNA==, ActorId: [1:7509890659784062819:2401], ActorState: ExecuteState, TraceId: 01jweapjbb983zfvxdrb6qt4py, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: VERIFY failed (2025-05-29T15:31:17.127668Z): assertion failed in non-unittest thread with message: assertion failed at ydb/core/kqp/ut/common/kqp_ut_common.h:375, void NKikimr::NKqp::AssertSuccessResult(const NYdb::TStatus &): (result.IsSuccess())
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 library/cpp/testing/unittest/registar.cpp:36 RaiseError(): requirement UnittestThread failed 0. /-S/util/system/yassert.cpp:83: InternalPanicImpl @ 0x15F23B95 1. /-S/util/system/yassert.cpp:55: Panic @ 0x15F1AB96 2. /tmp//-S/library/cpp/testing/unittest/registar.cpp:36: RaiseError @ 0x160BC066 3. /-S/ydb/core/kqp/ut/common/kqp_ut_common.h:375: AssertSuccessResult @ 0x15D88C82 4. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:365: CreateSampleTables @ 0x2855B9A2 5. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:581: operator() @ 0x2857CF4C 6. /-S/library/cpp/threading/future/core/future-inl.h:493: SetValue @ 0x2857CF4C 7. /-S/library/cpp/threading/future/async.h:24: operator() @ 0x2857CF4C 8. /-S/util/thread/pool.h:71: Process @ 0x2857CF4C 9. /-S/util/thread/pool.cpp:418: DoExecute @ 0x15F2B519 10. /-S/util/thread/factory.h:15: Execute @ 0x15F29F09 11. /-S/util/thread/factory.cpp:36: ThreadProc @ 0x15F29F09 12. /-S/util/system/thread.cpp:244: ThreadProxy @ 0x15F2537C 13. ??:0: ?? @ 0x7FB7E7B17AC2 14. ??:0: ?? @ 0x7FB7E7BA984F Trying to start YDB, gRPC: 10508, MsgBus: 12375 2025-05-29T15:31:20.668358Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509890677170870467:2068];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:31:20.668751Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/001dfb/r3tmp/tmpxmxfaR/pdisk_1.dat 2025-05-29T15:31:20.716131Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 10508, node 1 2025-05-29T15:31:20.729893Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:31:20.729903Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-05-29T15:31:20.729904Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 20 ... als/ast/yql_expr.h:1874: index out of range, code: 1 library/cpp/testing/unittest/registar.cpp:36 RaiseError(): requirement UnittestThread failed 0. /-S/util/system/yassert.cpp:83: InternalPanicImpl @ 0x15F23B95 1. /-S/util/system/yassert.cpp:55: Panic @ 0x15F1AB96 2. /tmp//-S/library/cpp/testing/unittest/registar.cpp:36: RaiseError @ 0x160BC066 3. /-S/ydb/core/kqp/ut/common/kqp_ut_common.h:375: AssertSuccessResult @ 0x15D88C82 4. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:365: CreateSampleTables @ 0x2855B9A2 5. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:581: operator() @ 0x2857CF4C 6. /-S/library/cpp/threading/future/core/future-inl.h:493: SetValue @ 0x2857CF4C 7. /-S/library/cpp/threading/future/async.h:24: operator() @ 0x2857CF4C 8. /-S/util/thread/pool.h:71: Process @ 0x2857CF4C 9. /-S/util/thread/pool.cpp:418: DoExecute @ 0x15F2B519 10. /-S/util/thread/factory.h:15: Execute @ 0x15F29F09 11. /-S/util/thread/factory.cpp:36: ThreadProc @ 0x15F29F09 12. /-S/util/system/thread.cpp:244: ThreadProxy @ 0x15F2537C 13. ??:0: ?? @ 0x7F7CD9FA6AC2 14. ??:0: ?? @ 0x7F7CDA03884F Trying to start YDB, gRPC: 4959, MsgBus: 1680 2025-05-29T15:31:24.619675Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509890697724460540:2067];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:31:24.619696Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/001dfb/r3tmp/tmpiEy1jb/pdisk_1.dat 2025-05-29T15:31:24.677011Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 4959, node 1 2025-05-29T15:31:24.691848Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:31:24.691862Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-05-29T15:31:24.691863Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-05-29T15:31:24.691902Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:1680 2025-05-29T15:31:24.720707Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:31:24.720731Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:31:24.721762Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:1680 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-29T15:31:24.744417Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:31:24.754439Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:31:24.815229Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:31:24.835749Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:31:24.846956Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:31:24.923867Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890697724462145:2403], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:31:24.923901Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:31:24.960251Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-05-29T15:31:24.966262Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-05-29T15:31:24.976830Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-05-29T15:31:24.983543Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-05-29T15:31:24.991041Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-05-29T15:31:24.998152Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-05-29T15:31:25.012167Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-05-29T15:31:25.027984Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890702019430095:2466], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:31:25.028003Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:31:25.028008Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890702019430100:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:31:25.028556Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710669:3, at schemeshard: 72057594046644480 2025-05-29T15:31:25.032220Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7509890702019430102:2470], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710669 completed, doublechecking } 2025-05-29T15:31:25.121186Z node 1 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [1:7509890702019430153:3397] txid# 281474976710670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-29T15:31:25.219242Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [1:7509890702019430169:2474], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:31:25.219370Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=1&id=YWY0ZWMyZDUtM2I3NWZhOWItN2U1ZDBlMTAtOTAwZTAwYmI=, ActorId: [1:7509890697724462127:2401], ActorState: ExecuteState, TraceId: 01jweapt7342v693aqrp5nts9a, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: VERIFY failed (2025-05-29T15:31:25.220077Z): assertion failed in non-unittest thread with message: assertion failed at ydb/core/kqp/ut/common/kqp_ut_common.h:375, void NKikimr::NKqp::AssertSuccessResult(const NYdb::TStatus &): (result.IsSuccess())
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 library/cpp/testing/unittest/registar.cpp:36 RaiseError(): requirement UnittestThread failed 0. /-S/util/system/yassert.cpp:83: InternalPanicImpl @ 0x15F23B95 1. /-S/util/system/yassert.cpp:55: Panic @ 0x15F1AB96 2. /tmp//-S/library/cpp/testing/unittest/registar.cpp:36: RaiseError @ 0x160BC066 3. /-S/ydb/core/kqp/ut/common/kqp_ut_common.h:375: AssertSuccessResult @ 0x15D88C82 4. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:365: CreateSampleTables @ 0x2855B9A2 5. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:581: operator() @ 0x2857CF4C 6. /-S/library/cpp/threading/future/core/future-inl.h:493: SetValue @ 0x2857CF4C 7. /-S/library/cpp/threading/future/async.h:24: operator() @ 0x2857CF4C 8. /-S/util/thread/pool.h:71: Process @ 0x2857CF4C 9. /-S/util/thread/pool.cpp:418: DoExecute @ 0x15F2B519 10. /-S/util/thread/factory.h:15: Execute @ 0x15F29F09 11. /-S/util/thread/factory.cpp:36: ThreadProc @ 0x15F29F09 12. /-S/util/system/thread.cpp:244: ThreadProxy @ 0x15F2537C 13. ??:0: ?? @ 0x7FA0525DEAC2 14. ??:0: ?? @ 0x7FA05267084F >> KqpInplaceUpdate::SingleRowPgNotNull+UseSink >> KqpImmediateEffects::AlreadyBrokenImmediateEffects >> KqpInplaceUpdate::SingleRowArithm+UseSink >> KqpInplaceUpdate::Negative_SingleRowListFromRange-UseSink >> TExternalTableTestReboots::ParallelCreateDrop [GOOD] >> KqpImmediateEffects::UpdateAfterInsert >> KqpEffects::UpdateOn_Literal >> KqpInplaceUpdate::Negative_SingleRowListFromRange+UseSink >> TPersQueueTest::PreferredCluster_DisabledRemoteClusterAndWriteSessionsWithDifferentPreferredClusterAndLaterRemoteClusterEnabled_SessionWithMismatchedClusterDiesAfterPreferredClusterEnabledAndOtherSessionsAlive [FAIL] >> TPersQueueTest::PreferredCluster_EnabledRemotePreferredClusterAndCloseClientSessionWithEnabledRemotePreferredClusterDelaySec_SessionDiesOnlyAfterDelay ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_external_table_reboots/unittest >> TExternalTableTestReboots::ParallelCreateDrop [GOOD] Test command err: ==== RunWithTabletReboots =========== RUN: Trace =========== Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2140] Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:116:2058] recipient: [1:110:2141] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:116:2058] recipient: [1:110:2141] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:117:2058] recipient: [1:111:2142] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:117:2058] recipient: [1:111:2142] Leader for TabletID 72057594046678944 is [1:126:2151] sender: [1:127:2058] recipient: [1:109:2140] Leader for TabletID 72057594046447617 is [1:130:2154] sender: [1:132:2058] recipient: [1:110:2141] Leader for TabletID 72057594046316545 is [1:133:2155] sender: [1:135:2058] recipient: [1:111:2142] 2025-05-29T15:31:12.763058Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7488: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-05-29T15:31:12.763077Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7516: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:31:12.763081Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7402: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2025-05-29T15:31:12.763084Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7418: OperationsProcessing config: using default configuration 2025-05-29T15:31:12.763088Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-05-29T15:31:12.763091Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-05-29T15:31:12.763096Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7548: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:31:12.763106Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:39: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-05-29T15:31:12.763172Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7619: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-05-29T15:31:12.763228Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-05-29T15:31:12.772597Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7651: Got new config: QueryServiceConfig { AllExternalDataSourcesAreAvailable: true } 2025-05-29T15:31:12.772615Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:31:12.772689Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7619: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# Leader for TabletID 72057594046447617 is [1:130:2154] sender: [1:175:2058] recipient: [1:15:2062] 2025-05-29T15:31:12.774823Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-05-29T15:31:12.774847Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-05-29T15:31:12.774871Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-05-29T15:31:12.778046Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-05-29T15:31:12.778122Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:445: Clear TempDirsState with owners number: 0 2025-05-29T15:31:12.778210Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1348: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-05-29T15:31:12.778435Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:32: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-05-29T15:31:12.779225Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:157: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:31:12.779262Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:84: [RootDataErasureManager] Stop 2025-05-29T15:31:12.779475Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:31:12.779485Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:31:12.779510Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-05-29T15:31:12.779517Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-29T15:31:12.779523Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:202: TTxServerlessStorageBilling.Complete 2025-05-29T15:31:12.779540Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6671: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:214:2058] recipient: [1:212:2212] IGNORE Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:214:2058] recipient: [1:212:2212] Leader for TabletID 72057594037968897 is [1:218:2216] sender: [1:219:2058] recipient: [1:212:2212] 2025-05-29T15:31:12.780846Z node 1 :HIVE INFO: tablet_helpers.cpp:1130: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:126:2151] sender: [1:239:2058] recipient: [1:15:2062] 2025-05-29T15:31:12.795331Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:372: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-05-29T15:31:12.795388Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:31:12.795429Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-05-29T15:31:12.795461Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:130: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-05-29T15:31:12.795468Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:31:12.796039Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:451: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-05-29T15:31:12.796062Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-05-29T15:31:12.796112Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:31:12.796131Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:313: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-05-29T15:31:12.796137Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:367: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-05-29T15:31:12.796142Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 2 -> 3 2025-05-29T15:31:12.796584Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:31:12.796596Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-05-29T15:31:12.796601Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 3 -> 128 2025-05-29T15:31:12.796898Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:31:12.796907Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:31:12.796912Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:31:12.796918Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1650: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-05-29T15:31:12.797564Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1719: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-05-29T15:31:12.797957Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:652: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-05-29T15:31:12.797984Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1751: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 Leader for TabletID 72057594046316545 is [1:133:2155] sender: [1:254:2058] recipient: [1:15:2062] FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-05-29T15:31:12.798149Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:676: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-05-29T15:31:12.798171Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:680: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 133 RawX2: 4294969451 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-05-29T15:31:12.798192Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:31:12.798255Z node 1 :FLAT_TX_SCHEMESHARD INFO: sch ... 2025-05-29T15:31:29.101918Z node 67 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1004, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-29T15:31:29.101947Z node 67 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1004, path id: [OwnerId: 72057594046678944, LocalPathId: 4] 2025-05-29T15:31:29.101964Z node 67 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1004, path id: [OwnerId: 72057594046678944, LocalPathId: 3] 2025-05-29T15:31:29.101984Z node 67 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:31:29.101989Z node 67 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [67:208:2209], at schemeshard: 72057594046678944, txId: 1004, path id: 1 2025-05-29T15:31:29.101994Z node 67 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [67:208:2209], at schemeshard: 72057594046678944, txId: 1004, path id: 4 2025-05-29T15:31:29.101998Z node 67 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [67:208:2209], at schemeshard: 72057594046678944, txId: 1004, path id: 3 2025-05-29T15:31:29.102051Z node 67 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1004:0, at schemeshard: 72057594046678944 2025-05-29T15:31:29.102058Z node 67 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:482: [72057594046678944] TDone opId# 1004:0 ProgressState 2025-05-29T15:31:29.102071Z node 67 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:906: Part operation is done id#1004:0 progress is 1/1 2025-05-29T15:31:29.102078Z node 67 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 1004 ready parts: 1/1 2025-05-29T15:31:29.102084Z node 67 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:906: Part operation is done id#1004:0 progress is 1/1 2025-05-29T15:31:29.102087Z node 67 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 1004 ready parts: 1/1 2025-05-29T15:31:29.102092Z node 67 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1606: TOperation IsReadyToNotify, TxId: 1004, ready parts: 1/1, is published: false 2025-05-29T15:31:29.102097Z node 67 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 1004 ready parts: 1/1 2025-05-29T15:31:29.102102Z node 67 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:973: Operation and all the parts is done, operation id: 1004:0 2025-05-29T15:31:29.102106Z node 67 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5174: RemoveTx for txid 1004:0 2025-05-29T15:31:29.102119Z node 67 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 2 2025-05-29T15:31:29.102123Z node 67 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove txstate source path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 3 2025-05-29T15:31:29.102129Z node 67 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:982: Publication still in progress, tx: 1004, publications: 3, subscribers: 0 2025-05-29T15:31:29.102133Z node 67 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:989: Publication details: tx: 1004, [OwnerId: 72057594046678944, LocalPathId: 1], 11 2025-05-29T15:31:29.102137Z node 67 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:989: Publication details: tx: 1004, [OwnerId: 72057594046678944, LocalPathId: 3], 2 2025-05-29T15:31:29.102141Z node 67 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:989: Publication details: tx: 1004, [OwnerId: 72057594046678944, LocalPathId: 4], 18446744073709551615 2025-05-29T15:31:29.102207Z node 67 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:5834: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 1004 2025-05-29T15:31:29.102219Z node 67 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 1004 2025-05-29T15:31:29.102224Z node 67 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 3, at schemeshard: 72057594046678944, txId: 1004 2025-05-29T15:31:29.102228Z node 67 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 1004, pathId: [OwnerId: 72057594046678944, LocalPathId: 4], version: 18446744073709551615 2025-05-29T15:31:29.102233Z node 67 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 1 2025-05-29T15:31:29.102315Z node 67 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:123: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-05-29T15:31:29.102322Z node 67 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:137: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 4], at schemeshard: 72057594046678944 2025-05-29T15:31:29.102333Z node 67 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 4 2025-05-29T15:31:29.102410Z node 67 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:5834: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 11 PathOwnerId: 72057594046678944, cookie: 1004 2025-05-29T15:31:29.102421Z node 67 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 11 PathOwnerId: 72057594046678944, cookie: 1004 2025-05-29T15:31:29.102426Z node 67 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 1004 2025-05-29T15:31:29.102430Z node 67 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 1004, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 11 2025-05-29T15:31:29.102434Z node 67 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2025-05-29T15:31:29.102475Z node 67 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:5834: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 2 PathOwnerId: 72057594046678944, cookie: 1004 2025-05-29T15:31:29.102484Z node 67 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 2 PathOwnerId: 72057594046678944, cookie: 1004 2025-05-29T15:31:29.102487Z node 67 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1004 2025-05-29T15:31:29.102492Z node 67 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 1004, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 2 2025-05-29T15:31:29.102496Z node 67 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 2 2025-05-29T15:31:29.102504Z node 67 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 1004, subscribers: 0 2025-05-29T15:31:29.103130Z node 67 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1004 2025-05-29T15:31:29.103414Z node 67 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:160: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 2025-05-29T15:31:29.103434Z node 67 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1004 2025-05-29T15:31:29.103448Z node 67 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1004 TestModificationResult got TxId: 1004, wait until txId: 1004 TestWaitNotification wait txId: 1004 2025-05-29T15:31:29.103502Z node 67 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:210: tests -- TTxNotificationSubscriber for txId 1004: send EvNotifyTxCompletion 2025-05-29T15:31:29.103512Z node 67 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:256: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 1004 2025-05-29T15:31:29.103591Z node 67 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 1004, at schemeshard: 72057594046678944 2025-05-29T15:31:29.103612Z node 67 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:227: tests -- TTxNotificationSubscriber for txId 1004: got EvNotifyTxCompletionResult 2025-05-29T15:31:29.103618Z node 67 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:236: tests -- TTxNotificationSubscriber for txId 1004: satisfy waiter [67:392:2382] TestWaitNotification: OK eventTxId 1004 2025-05-29T15:31:29.103693Z node 67 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/DropMe" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-05-29T15:31:29.103720Z node 67 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/DropMe" took 37us result status StatusPathDoesNotExist 2025-05-29T15:31:29.103753Z node 67 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/DropMe\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1]), source_location: ydb/core/tx/schemeshard/schemeshard_path_describer.cpp:1157" Path: "/MyRoot/DropMe" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: true } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 >> KqpImmediateEffects::ConflictingKeyW1WRR2 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/effects/unittest >> KqpInplaceUpdate::BigRow Test command err: Trying to start YDB, gRPC: 20399, MsgBus: 32117 2025-05-29T15:31:22.799501Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509890687866802619:2070];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:31:22.799524Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/001dd9/r3tmp/tmpfHQbv8/pdisk_1.dat 2025-05-29T15:31:22.864506Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 20399, node 1 2025-05-29T15:31:22.881358Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:31:22.881373Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-05-29T15:31:22.881375Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-05-29T15:31:22.881414Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:32117 2025-05-29T15:31:22.900932Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:31:22.900958Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:31:22.902059Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:32117 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-29T15:31:22.929825Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:31:22.939347Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:31:23.003122Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:31:23.020180Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:31:23.075124Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:31:23.120645Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890692161771510:2403], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:31:23.120671Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:31:23.153167Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-05-29T15:31:23.158632Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-05-29T15:31:23.171511Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-05-29T15:31:23.184918Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-05-29T15:31:23.191908Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-05-29T15:31:23.206420Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-05-29T15:31:23.220337Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-05-29T15:31:23.237169Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890692161772161:2466], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:31:23.237197Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890692161772166:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:31:23.237208Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:31:23.237832Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710669:3, at schemeshard: 72057594046644480 2025-05-29T15:31:23.239895Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7509890692161772168:2470], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710669 completed, doublechecking } 2025-05-29T15:31:23.327235Z node 1 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [1:7509890692161772219:3396] txid# 281474976710670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-29T15:31:23.408683Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [1:7509890692161772235:2474], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:31:23.408902Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=1&id=YWU2MzdlNTMtZjNjYWQ4YzEtYzU5NzdjMjYtMTBjMDUzMTA=, ActorId: [1:7509890692161771492:2401], ActorState: ExecuteState, TraceId: 01jweaprf46cd6qs6qzym1vpa6, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: VERIFY failed (2025-05-29T15:31:23.409612Z): assertion failed in non-unittest thread with message: assertion failed at ydb/core/kqp/ut/common/kqp_ut_common.h:375, void NKikimr::NKqp::AssertSuccessResult(const NYdb::TStatus &): (result.IsSuccess())
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 library/cpp/testing/unittest/registar.cpp:36 RaiseError(): requirement UnittestThread failed 0. /-S/util/system/yassert.cpp:83: InternalPanicImpl @ 0x15F23B95 1. /-S/util/system/yassert.cpp:55: Panic @ 0x15F1AB96 2. /tmp//-S/library/cpp/testing/unittest/registar.cpp:36: RaiseError @ 0x160BC066 3. /-S/ydb/core/kqp/ut/common/kqp_ut_common.h:375: AssertSuccessResult @ 0x15D88C82 4. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:365: CreateSampleTables @ 0x2855B9A2 5. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:581: operator() @ 0x2857CF4C 6. /-S/library/cpp/threading/future/core/future-inl.h:493: SetValue @ 0x2857CF4C 7. /-S/library/cpp/threading/future/async.h:24: operator() @ 0x2857CF4C 8. /-S/util/thread/pool.h:71: Process @ 0x2857CF4C 9. /-S/util/thread/pool.cpp:418: DoExecute @ 0x15F2B519 10. /-S/util/thread/factory.h:15: Execute @ 0x15F29F09 11. /-S/util/thread/factory.cpp:36: ThreadProc @ 0x15F29F09 12. /-S/util/system/thread.cpp:244: ThreadProxy @ 0x15F2537C 13. ??:0: ?? @ 0x7EFE9B247AC2 14. ??:0: ?? @ 0x7EFE9B2D984F Trying to start YDB, gRPC: 15768, MsgBus: 14856 2025-05-29T15:31:26.227878Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509890704347540446:2065];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:31:26.227899Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/001dd9/r3tmp/tmpij4z89/pdisk_1.dat 2025-05-29T15:31:26.279820Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:31:26.279886Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [1:7509890704347540414:2079] 1748532686227783 != 1748532686227786 TServer::EnableGrpc on GrpcPort 15768, node 1 2025-05-29T15:31:26.299031Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:31:26.299050Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-05-29T15:31:26.299051Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-05-29T15:31:26.299102Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:14856 2025-05-29T15:31:26.329282Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:31:26.329308Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:31:26.330371Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:14856 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-29T15:31:26.359244Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:31:26.367901Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:31:26.385825Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:31:26.405083Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:31:26.416549Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:31:26.526869Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890704347542046:2403], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:31:26.526891Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:31:26.562047Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-05-29T15:31:26.569019Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-05-29T15:31:26.580269Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-05-29T15:31:26.593953Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-05-29T15:31:26.601243Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-05-29T15:31:26.615758Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-05-29T15:31:26.629622Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-05-29T15:31:26.645658Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890704347542697:2466], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:31:26.645690Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:31:26.645716Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890704347542702:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:31:26.646366Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715669:3, at schemeshard: 72057594046644480 2025-05-29T15:31:26.649019Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7509890704347542704:2470], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715669 completed, doublechecking } 2025-05-29T15:31:26.709395Z node 1 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [1:7509890704347542755:3394] txid# 281474976715670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-29T15:31:26.799182Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [1:7509890704347542771:2474], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:31:26.799274Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=1&id=ZDAyYTE3YTYtN2Q3M2FiMTQtZWJjM2U0NS03M2E3NWQ4ZA==, ActorId: [1:7509890704347542043:2401], ActorState: ExecuteState, TraceId: 01jweapvsnb8gj2h1sav9e6qs3, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: VERIFY failed (2025-05-29T15:31:26.799841Z): assertion failed in non-unittest thread with message: assertion failed at ydb/core/kqp/ut/common/kqp_ut_common.h:375, void NKikimr::NKqp::AssertSuccessResult(const NYdb::TStatus &): (result.IsSuccess())
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 library/cpp/testing/unittest/registar.cpp:36 RaiseError(): requirement UnittestThread failed 0. /-S/util/system/yassert.cpp:83: InternalPanicImpl @ 0x15F23B95 1. /-S/util/system/yassert.cpp:55: Panic @ 0x15F1AB96 2. /tmp//-S/library/cpp/testing/unittest/registar.cpp:36: RaiseError @ 0x160BC066 3. /-S/ydb/core/kqp/ut/common/kqp_ut_common.h:375: AssertSuccessResult @ 0x15D88C82 4. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:365: CreateSampleTables @ 0x2855B9A2 5. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:581: operator() @ 0x2857CF4C 6. /-S/library/cpp/threading/future/core/future-inl.h:493: SetValue @ 0x2857CF4C 7. /-S/library/cpp/threading/future/async.h:24: operator() @ 0x2857CF4C 8. /-S/util/thread/pool.h:71: Process @ 0x2857CF4C 9. /-S/util/thread/pool.cpp:418: DoExecute @ 0x15F2B519 10. /-S/util/thread/factory.h:15: Execute @ 0x15F29F09 11. /-S/util/thread/factory.cpp:36: ThreadProc @ 0x15F29F09 12. /-S/util/system/thread.cpp:244: ThreadProxy @ 0x15F2537C 13. ??:0: ?? @ 0x7F30CC4CFAC2 14. ??:0: ?? @ 0x7F30CC56184F >> KqpEffects::AlterAfterUpsertBeforeUpsertSelectTransaction-UseSink >> KqpImmediateEffects::ReplaceDuplicates ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/effects/unittest >> KqpEffects::InsertAbort_Select_Duplicates-UseSink Test command err: Trying to start YDB, gRPC: 17985, MsgBus: 6630 2025-05-29T15:31:22.648909Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509890687040865168:2072];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:31:22.648938Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/001de7/r3tmp/tmpnU0kyh/pdisk_1.dat 2025-05-29T15:31:22.691215Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 17985, node 1 2025-05-29T15:31:22.706999Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:31:22.707012Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-05-29T15:31:22.707013Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-05-29T15:31:22.707052Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:6630 TClient is connected to server localhost:6630 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: 2025-05-29T15:31:22.749538Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:31:22.749564Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:31:22.750683Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-29T15:31:22.773450Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:31:22.785482Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:31:22.849789Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:31:22.867883Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:31:22.878141Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:31:22.954801Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890687040866758:2403], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:31:22.954827Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:31:23.005552Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-05-29T15:31:23.011847Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-05-29T15:31:23.024122Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-05-29T15:31:23.078679Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-05-29T15:31:23.087663Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-05-29T15:31:23.101419Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-05-29T15:31:23.115472Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-05-29T15:31:23.131006Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890691335834709:2466], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:31:23.131023Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890691335834714:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:31:23.131027Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:31:23.131560Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715669:3, at schemeshard: 72057594046644480 2025-05-29T15:31:23.135544Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7509890691335834716:2470], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715669 completed, doublechecking } 2025-05-29T15:31:23.201015Z node 1 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [1:7509890691335834767:3395] txid# 281474976715670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-29T15:31:23.278236Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [1:7509890691335834783:2474], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:31:23.278343Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=1&id=NTg3NTExZjQtNzhmYjVlZjItY2E5MWY2YjMtYzc4NWU5YmY=, ActorId: [1:7509890687040866740:2401], ActorState: ExecuteState, TraceId: 01jweaprbtb12atkhxeyh97cjg, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: VERIFY failed (2025-05-29T15:31:23.278985Z): assertion failed in non-unittest thread with message: assertion failed at ydb/core/kqp/ut/common/kqp_ut_common.h:375, void NKikimr::NKqp::AssertSuccessResult(const NYdb::TStatus &): (result.IsSuccess())
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 library/cpp/testing/unittest/registar.cpp:36 RaiseError(): requirement UnittestThread failed 0. /-S/util/system/yassert.cpp:83: InternalPanicImpl @ 0x15F23B95 1. /-S/util/system/yassert.cpp:55: Panic @ 0x15F1AB96 2. /tmp//-S/library/cpp/testing/unittest/registar.cpp:36: RaiseError @ 0x160BC066 3. /-S/ydb/core/kqp/ut/common/kqp_ut_common.h:375: AssertSuccessResult @ 0x15D88C82 4. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:365: CreateSampleTables @ 0x2855B9A2 5. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:581: operator() @ 0x2857CF4C 6. /-S/library/cpp/threading/future/core/future-inl.h:493: SetValue @ 0x2857CF4C 7. /-S/library/cpp/threading/future/async.h:24: operator() @ 0x2857CF4C 8. /-S/util/thread/pool.h:71: Process @ 0x2857CF4C 9. /-S/util/thread/pool.cpp:418: DoExecute @ 0x15F2B519 10. /-S/util/thread/factory.h:15: Execute @ 0x15F29F09 11. /-S/util/thread/factory.cpp:36: ThreadProc @ 0x15F29F09 12. /-S/util/system/thread.cpp:244: ThreadProxy @ 0x15F2537C 13. ??:0: ?? @ 0x7FEAF7241AC2 14. ??:0: ?? @ 0x7FEAF72D384F Trying to start YDB, gRPC: 11046, MsgBus: 29394 2025-05-29T15:31:26.413023Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509890704255334996:2068];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:31:26.413042Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/001de7/r3tmp/tmpvG60eG/pdisk_1.dat 2025-05-29T15:31:26.468905Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 11046, node 1 2025-05-29T15:31:26.483067Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:31:26.483079Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-05-29T15:31:26.483080Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-05-29T15:31:26.483121Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:29394 2025-05-29T15:31:26.514316Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:31:26.514361Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:31:26.515410Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:29394 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-29T15:31:26.543921Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:31:26.554877Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:31:26.618431Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:31:26.636638Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:31:26.647358Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:31:26.732102Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890704255336591:2403], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:31:26.732132Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:31:26.769660Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-05-29T15:31:26.776937Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-05-29T15:31:26.831958Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-05-29T15:31:26.846719Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-05-29T15:31:26.860529Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-05-29T15:31:26.874620Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-05-29T15:31:26.888698Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-05-29T15:31:26.904312Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890704255337245:2466], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:31:26.904341Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:31:26.904344Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890704255337250:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:31:26.904967Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715669:3, at schemeshard: 72057594046644480 2025-05-29T15:31:26.908349Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7509890704255337252:2470], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715669 completed, doublechecking } 2025-05-29T15:31:26.966403Z node 1 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [1:7509890704255337303:3393] txid# 281474976715670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-29T15:31:27.050912Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [1:7509890704255337319:2474], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:31:27.051007Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=1&id=ZDk1YmY5ZTItZmNjMDhjZjYtNjE5ZjlkOWEtMzY1YjkxMWU=, ActorId: [1:7509890704255336588:2401], ActorState: ExecuteState, TraceId: 01jweapw1r9qkqvbzgdm4nstg9, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: VERIFY failed (2025-05-29T15:31:27.051608Z): assertion failed in non-unittest thread with message: assertion failed at ydb/core/kqp/ut/common/kqp_ut_common.h:375, void NKikimr::NKqp::AssertSuccessResult(const NYdb::TStatus &): (result.IsSuccess())
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 library/cpp/testing/unittest/registar.cpp:36 RaiseError(): requirement UnittestThread failed 0. /-S/util/system/yassert.cpp:83: InternalPanicImpl @ 0x15F23B95 1. /-S/util/system/yassert.cpp:55: Panic @ 0x15F1AB96 2. /tmp//-S/library/cpp/testing/unittest/registar.cpp:36: RaiseError @ 0x160BC066 3. /-S/ydb/core/kqp/ut/common/kqp_ut_common.h:375: AssertSuccessResult @ 0x15D88C82 4. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:365: CreateSampleTables @ 0x2855B9A2 5. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:581: operator() @ 0x2857CF4C 6. /-S/library/cpp/threading/future/core/future-inl.h:493: SetValue @ 0x2857CF4C 7. /-S/library/cpp/threading/future/async.h:24: operator() @ 0x2857CF4C 8. /-S/util/thread/pool.h:71: Process @ 0x2857CF4C 9. /-S/util/thread/pool.cpp:418: DoExecute @ 0x15F2B519 10. /-S/util/thread/factory.h:15: Execute @ 0x15F29F09 11. /-S/util/thread/factory.cpp:36: ThreadProc @ 0x15F29F09 12. /-S/util/system/thread.cpp:244: ThreadProxy @ 0x15F2537C 13. ??:0: ?? @ 0x7F5A7F0B4AC2 14. ??:0: ?? @ 0x7F5A7F14684F ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/effects/unittest >> KqpImmediateEffects::UpsertConflictInteractiveTxAborted Test command err: Trying to start YDB, gRPC: 3376, MsgBus: 25445 2025-05-29T15:31:22.985898Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509890689192343649:2069];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:31:22.985924Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/001dd3/r3tmp/tmpnLYrSw/pdisk_1.dat 2025-05-29T15:31:23.040567Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 3376, node 1 2025-05-29T15:31:23.056484Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:31:23.056498Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-05-29T15:31:23.056500Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-05-29T15:31:23.056553Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:25445 2025-05-29T15:31:23.087061Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:31:23.087084Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:31:23.088228Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:25445 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-29T15:31:23.119577Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:31:23.130733Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:31:23.191053Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:31:23.207790Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:31:23.221003Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:31:23.289635Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890693487312542:2403], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:31:23.289668Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:31:23.326800Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-05-29T15:31:23.381678Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-05-29T15:31:23.387897Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-05-29T15:31:23.394591Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-05-29T15:31:23.401746Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-05-29T15:31:23.409065Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-05-29T15:31:23.416665Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-05-29T15:31:23.432200Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890693487313198:2466], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:31:23.432250Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890693487313203:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:31:23.432249Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:31:23.432916Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715669:3, at schemeshard: 72057594046644480 2025-05-29T15:31:23.436450Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7509890693487313205:2470], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715669 completed, doublechecking } 2025-05-29T15:31:23.504630Z node 1 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [1:7509890693487313256:3399] txid# 281474976715670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-29T15:31:23.579830Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [1:7509890693487313272:2474], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:31:23.579955Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=1&id=NTc5YThkMmQtNDJhODU4ODUtOGI0ODdjNzItYTMwNDBmMDI=, ActorId: [1:7509890693487312524:2401], ActorState: ExecuteState, TraceId: 01jweaprn75qbd558qdazhjkp1, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: VERIFY failed (2025-05-29T15:31:23.580587Z): assertion failed in non-unittest thread with message: assertion failed at ydb/core/kqp/ut/common/kqp_ut_common.h:375, void NKikimr::NKqp::AssertSuccessResult(const NYdb::TStatus &): (result.IsSuccess())
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 library/cpp/testing/unittest/registar.cpp:36 RaiseError(): requirement UnittestThread failed 0. /-S/util/system/yassert.cpp:83: InternalPanicImpl @ 0x15F23B95 1. /-S/util/system/yassert.cpp:55: Panic @ 0x15F1AB96 2. /tmp//-S/library/cpp/testing/unittest/registar.cpp:36: RaiseError @ 0x160BC066 3. /-S/ydb/core/kqp/ut/common/kqp_ut_common.h:375: AssertSuccessResult @ 0x15D88C82 4. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:365: CreateSampleTables @ 0x2855B9A2 5. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:581: operator() @ 0x2857CF4C 6. /-S/library/cpp/threading/future/core/future-inl.h:493: SetValue @ 0x2857CF4C 7. /-S/library/cpp/threading/future/async.h:24: operator() @ 0x2857CF4C 8. /-S/util/thread/pool.h:71: Process @ 0x2857CF4C 9. /-S/util/thread/pool.cpp:418: DoExecute @ 0x15F2B519 10. /-S/util/thread/factory.h:15: Execute @ 0x15F29F09 11. /-S/util/thread/factory.cpp:36: ThreadProc @ 0x15F29F09 12. /-S/util/system/thread.cpp:244: ThreadProxy @ 0x15F2537C 13. ??:0: ?? @ 0x7F7DD3EFDAC2 14. ??:0: ?? @ 0x7F7DD3F8F84F Trying to start YDB, gRPC: 1821, MsgBus: 30112 2025-05-29T15:31:26.660805Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509890706274374457:2070];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:31:26.660821Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/001dd3/r3tmp/tmpCPaMYs/pdisk_1.dat 2025-05-29T15:31:26.713155Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 1821, node 1 2025-05-29T15:31:26.731716Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:31:26.731733Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-05-29T15:31:26.731735Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-05-29T15:31:26.731773Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:30112 2025-05-29T15:31:26.761359Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:31:26.761387Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:31:26.762544Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:30112 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-29T15:31:26.796750Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:31:26.802361Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:31:26.863934Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:31:26.881838Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:31:26.891311Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:31:27.023659Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890710569343360:2403], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:31:27.023688Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:31:27.057245Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-05-29T15:31:27.063607Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-05-29T15:31:27.069628Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-05-29T15:31:27.076431Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-05-29T15:31:27.083654Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-05-29T15:31:27.090922Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-05-29T15:31:27.097688Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-05-29T15:31:27.106723Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890710569344011:2466], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:31:27.106757Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890710569344016:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:31:27.106764Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:31:27.107361Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710669:3, at schemeshard: 72057594046644480 2025-05-29T15:31:27.111294Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7509890710569344018:2470], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710669 completed, doublechecking } 2025-05-29T15:31:27.204263Z node 1 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [1:7509890710569344069:3396] txid# 281474976710670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-29T15:31:27.272591Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [1:7509890710569344085:2474], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:31:27.272675Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=1&id=YzQ3Mzg2ZWYtZjYyM2M0N2MtOGFhODMzMmMtNGNmZmMzYjA=, ActorId: [1:7509890710569343342:2401], ActorState: ExecuteState, TraceId: 01jweapw823sgh4cgqm1hjxjks, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: VERIFY failed (2025-05-29T15:31:27.273217Z): assertion failed in non-unittest thread with message: assertion failed at ydb/core/kqp/ut/common/kqp_ut_common.h:375, void NKikimr::NKqp::AssertSuccessResult(const NYdb::TStatus &): (result.IsSuccess())
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 library/cpp/testing/unittest/registar.cpp:36 RaiseError(): requirement UnittestThread failed 0. /-S/util/system/yassert.cpp:83: InternalPanicImpl @ 0x15F23B95 1. /-S/util/system/yassert.cpp:55: Panic @ 0x15F1AB96 2. /tmp//-S/library/cpp/testing/unittest/registar.cpp:36: RaiseError @ 0x160BC066 3. /-S/ydb/core/kqp/ut/common/kqp_ut_common.h:375: AssertSuccessResult @ 0x15D88C82 4. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:365: CreateSampleTables @ 0x2855B9A2 5. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:581: operator() @ 0x2857CF4C 6. /-S/library/cpp/threading/future/core/future-inl.h:493: SetValue @ 0x2857CF4C 7. /-S/library/cpp/threading/future/async.h:24: operator() @ 0x2857CF4C 8. /-S/util/thread/pool.h:71: Process @ 0x2857CF4C 9. /-S/util/thread/pool.cpp:418: DoExecute @ 0x15F2B519 10. /-S/util/thread/factory.h:15: Execute @ 0x15F29F09 11. /-S/util/thread/factory.cpp:36: ThreadProc @ 0x15F29F09 12. /-S/util/system/thread.cpp:244: ThreadProxy @ 0x15F2537C 13. ??:0: ?? @ 0x7FA4F7D73AC2 14. ??:0: ?? @ 0x7FA4F7E0584F >> KqpInplaceUpdate::Negative_SingleRowWithValueCast+UseSink ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/effects/unittest >> KqpImmediateEffects::DeleteAfterInsert Test command err: Trying to start YDB, gRPC: 29513, MsgBus: 8508 2025-05-29T15:31:22.665388Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509890688496243836:2064];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:31:22.665404Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/001dea/r3tmp/tmpq3qHtZ/pdisk_1.dat 2025-05-29T15:31:22.710494Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:31:22.712184Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [1:7509890688496243808:2079] 1748532682665284 != 1748532682665287 TServer::EnableGrpc on GrpcPort 29513, node 1 2025-05-29T15:31:22.725897Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:31:22.725906Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-05-29T15:31:22.725907Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-05-29T15:31:22.725939Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:8508 TClient is connected to server localhost:8508 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: 2025-05-29T15:31:22.766833Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:31:22.766861Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:31:22.767928Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-29T15:31:22.791595Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:31:22.801534Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:31:22.817135Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:31:22.837877Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:31:22.850130Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:31:22.961218Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890688496245445:2403], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:31:22.961243Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:31:22.996278Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-05-29T15:31:23.002905Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-05-29T15:31:23.009921Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-05-29T15:31:23.016966Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-05-29T15:31:23.023587Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-05-29T15:31:23.030892Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-05-29T15:31:23.038004Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-05-29T15:31:23.054284Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890692791213393:2466], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:31:23.054302Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890692791213398:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:31:23.054308Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:31:23.054853Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715669:3, at schemeshard: 72057594046644480 2025-05-29T15:31:23.058487Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7509890692791213400:2470], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715669 completed, doublechecking } 2025-05-29T15:31:23.144037Z node 1 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [1:7509890692791213451:3397] txid# 281474976715670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-29T15:31:23.220676Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [1:7509890692791213467:2474], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:31:23.220779Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=1&id=ZTllMmQxNDctYmI3OTRhZjktNDQyNmMzYjUtNTAxZjc1ZDE=, ActorId: [1:7509890688496245442:2401], ActorState: ExecuteState, TraceId: 01jweapr9e540pxfj3a2k85kar, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: VERIFY failed (2025-05-29T15:31:23.221393Z): assertion failed in non-unittest thread with message: assertion failed at ydb/core/kqp/ut/common/kqp_ut_common.h:375, void NKikimr::NKqp::AssertSuccessResult(const NYdb::TStatus &): (result.IsSuccess())
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 library/cpp/testing/unittest/registar.cpp:36 RaiseError(): requirement UnittestThread failed 0. /-S/util/system/yassert.cpp:83: InternalPanicImpl @ 0x15F23B95 1. /-S/util/system/yassert.cpp:55: Panic @ 0x15F1AB96 2. /tmp//-S/library/cpp/testing/unittest/registar.cpp:36: RaiseError @ 0x160BC066 3. /-S/ydb/core/kqp/ut/common/kqp_ut_common.h:375: AssertSuccessResult @ 0x15D88C82 4. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:365: CreateSampleTables @ 0x2855B9A2 5. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:581: operator() @ 0x2857CF4C 6. /-S/library/cpp/threading/future/core/future-inl.h:493: SetValue @ 0x2857CF4C 7. /-S/library/cpp/threading/future/async.h:24: operator() @ 0x2857CF4C 8. /-S/util/thread/pool.h:71: Process @ 0x2857CF4C 9. /-S/util/thread/pool.cpp:418: DoExecute @ 0x15F2B519 10. /-S/util/thread/factory.h:15: Execute @ 0x15F29F09 11. /-S/util/thread/factory.cpp:36: ThreadProc @ 0x15F29F09 12. /-S/util/system/thread.cpp:244: ThreadProxy @ 0x15F2537C 13. ??:0: ?? @ 0x7FD6D706AAC2 14. ??:0: ?? @ 0x7FD6D70FC84F Trying to start YDB, gRPC: 21195, MsgBus: 26104 2025-05-29T15:31:26.227083Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509890705273445043:2072];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:31:26.227109Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/001dea/r3tmp/tmpmPeRgS/pdisk_1.dat 2025-05-29T15:31:26.279558Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 21195, node 1 2025-05-29T15:31:26.299930Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:31:26.299940Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-05-29T15:31:26.299941Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-05-29T15:31:26.299976Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:26104 2025-05-29T15:31:26.328373Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:31:26.328410Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:31:26.329507Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:26104 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-29T15:31:26.359353Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:31:26.367725Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:31:26.431970Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:31:26.490056Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:31:26.499296Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:31:26.544715Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890705273446637:2403], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:31:26.544739Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:31:26.584054Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-05-29T15:31:26.591243Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-05-29T15:31:26.645787Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-05-29T15:31:26.657321Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-05-29T15:31:26.671258Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-05-29T15:31:26.685727Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-05-29T15:31:26.699604Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-05-29T15:31:26.715625Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890705273447290:2466], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:31:26.715646Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:31:26.715673Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890705273447295:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:31:26.716390Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715669:3, at schemeshard: 72057594046644480 2025-05-29T15:31:26.719386Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7509890705273447297:2470], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715669 completed, doublechecking } 2025-05-29T15:31:26.773735Z node 1 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [1:7509890705273447348:3396] txid# 281474976715670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-29T15:31:26.863811Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [1:7509890705273447364:2474], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:31:26.863937Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=1&id=NzczZDMxOTctZDMxMDU3MTYtZjQ4YTRlM2ItODBjNzI2OWE=, ActorId: [1:7509890705273446619:2401], ActorState: ExecuteState, TraceId: 01jweapvvv52nywysm2thhemdq, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: VERIFY failed (2025-05-29T15:31:26.864617Z): assertion failed in non-unittest thread with message: assertion failed at ydb/core/kqp/ut/common/kqp_ut_common.h:375, void NKikimr::NKqp::AssertSuccessResult(const NYdb::TStatus &): (result.IsSuccess())
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 library/cpp/testing/unittest/registar.cpp:36 RaiseError(): requirement UnittestThread failed 0. /-S/util/system/yassert.cpp:83: InternalPanicImpl @ 0x15F23B95 1. /-S/util/system/yassert.cpp:55: Panic @ 0x15F1AB96 2. /tmp//-S/library/cpp/testing/unittest/registar.cpp:36: RaiseError @ 0x160BC066 3. /-S/ydb/core/kqp/ut/common/kqp_ut_common.h:375: AssertSuccessResult @ 0x15D88C82 4. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:365: CreateSampleTables @ 0x2855B9A2 5. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:581: operator() @ 0x2857CF4C 6. /-S/library/cpp/threading/future/core/future-inl.h:493: SetValue @ 0x2857CF4C 7. /-S/library/cpp/threading/future/async.h:24: operator() @ 0x2857CF4C 8. /-S/util/thread/pool.h:71: Process @ 0x2857CF4C 9. /-S/util/thread/pool.cpp:418: DoExecute @ 0x15F2B519 10. /-S/util/thread/factory.h:15: Execute @ 0x15F29F09 11. /-S/util/thread/factory.cpp:36: ThreadProc @ 0x15F29F09 12. /-S/util/system/thread.cpp:244: ThreadProxy @ 0x15F2537C 13. ??:0: ?? @ 0x7FF7223EFAC2 14. ??:0: ?? @ 0x7FF72248184F ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/effects/unittest >> KqpInplaceUpdate::SingleRowPgNotNull-UseSink Test command err: Trying to start YDB, gRPC: 10754, MsgBus: 24522 2025-05-29T15:31:22.789618Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509890686138060667:2066];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:31:22.789759Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/001dde/r3tmp/tmpAn2Oim/pdisk_1.dat 2025-05-29T15:31:22.841108Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 10754, node 1 2025-05-29T15:31:22.855301Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:31:22.855313Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-05-29T15:31:22.855315Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-05-29T15:31:22.855356Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:24522 2025-05-29T15:31:22.890977Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:31:22.891005Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:31:22.892106Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:24522 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-29T15:31:22.920458Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:31:22.933897Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:31:22.998820Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:31:23.019512Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:31:23.031030Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:31:23.139707Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890690433029566:2403], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:31:23.139739Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:31:23.174475Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-05-29T15:31:23.180917Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-05-29T15:31:23.191965Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-05-29T15:31:23.199250Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-05-29T15:31:23.213144Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-05-29T15:31:23.227173Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-05-29T15:31:23.234321Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-05-29T15:31:23.249848Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890690433030219:2466], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:31:23.249875Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:31:23.249882Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890690433030224:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:31:23.250496Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715669:3, at schemeshard: 72057594046644480 2025-05-29T15:31:23.254157Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7509890690433030226:2470], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715669 completed, doublechecking } 2025-05-29T15:31:23.314503Z node 1 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [1:7509890690433030277:3397] txid# 281474976715670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-29T15:31:23.383008Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [1:7509890690433030293:2474], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:31:23.383100Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=1&id=YTJiMzg2YzYtMTY0OGU2Y2QtZjUyZWQ1NzMtNTA1MWQxMDU=, ActorId: [1:7509890690433029548:2401], ActorState: ExecuteState, TraceId: 01jweaprfhfxprmm3yskqevmts, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: VERIFY failed (2025-05-29T15:31:23.383697Z): assertion failed in non-unittest thread with message: assertion failed at ydb/core/kqp/ut/common/kqp_ut_common.h:375, void NKikimr::NKqp::AssertSuccessResult(const NYdb::TStatus &): (result.IsSuccess())
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 library/cpp/testing/unittest/registar.cpp:36 RaiseError(): requirement UnittestThread failed 0. /-S/util/system/yassert.cpp:83: InternalPanicImpl @ 0x15F23B95 1. /-S/util/system/yassert.cpp:55: Panic @ 0x15F1AB96 2. /tmp//-S/library/cpp/testing/unittest/registar.cpp:36: RaiseError @ 0x160BC066 3. /-S/ydb/core/kqp/ut/common/kqp_ut_common.h:375: AssertSuccessResult @ 0x15D88C82 4. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:365: CreateSampleTables @ 0x2855B9A2 5. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:581: operator() @ 0x2857CF4C 6. /-S/library/cpp/threading/future/core/future-inl.h:493: SetValue @ 0x2857CF4C 7. /-S/library/cpp/threading/future/async.h:24: operator() @ 0x2857CF4C 8. /-S/util/thread/pool.h:71: Process @ 0x2857CF4C 9. /-S/util/thread/pool.cpp:418: DoExecute @ 0x15F2B519 10. /-S/util/thread/factory.h:15: Execute @ 0x15F29F09 11. /-S/util/thread/factory.cpp:36: ThreadProc @ 0x15F29F09 12. /-S/util/system/thread.cpp:244: ThreadProxy @ 0x15F2537C 13. ??:0: ?? @ 0x7F8CEEEA2AC2 14. ??:0: ?? @ 0x7F8CEEF3484F Trying to start YDB, gRPC: 15289, MsgBus: 18434 2025-05-29T15:31:26.510332Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509890704624547567:2240];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:31:26.510454Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/001dde/r3tmp/tmple5u9c/pdisk_1.dat TServer::EnableGrpc on GrpcPort 15289, node 1 2025-05-29T15:31:26.568727Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:31:26.576797Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:31:26.576808Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-05-29T15:31:26.576810Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-05-29T15:31:26.576841Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:18434 2025-05-29T15:31:26.610892Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:31:26.610917Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:31:26.611916Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:18434 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-29T15:31:26.640083Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:31:26.645278Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:31:26.706385Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:31:26.722826Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:31:26.735091Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:31:26.958602Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890704624548983:2403], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:31:26.958634Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:31:26.998011Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-05-29T15:31:27.005820Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-05-29T15:31:27.013691Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-05-29T15:31:27.067794Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-05-29T15:31:27.076757Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-05-29T15:31:27.083805Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-05-29T15:31:27.090920Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-05-29T15:31:27.106895Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890708919516933:2466], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:31:27.106919Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890708919516938:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:31:27.106923Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:31:27.107477Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715669:3, at schemeshard: 72057594046644480 2025-05-29T15:31:27.111291Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7509890708919516940:2470], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715669 completed, doublechecking } 2025-05-29T15:31:27.170203Z node 1 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [1:7509890708919516991:3397] txid# 281474976715670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-29T15:31:27.243102Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [1:7509890708919517007:2474], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:31:27.243195Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=1&id=MTM0NmQ2MmYtOTgxOWQ4ZWQtMTMyZWViNDMtZmFhZjdjNGU=, ActorId: [1:7509890704624548965:2401], ActorState: ExecuteState, TraceId: 01jweapw822fv6zece8q4p9exe, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: VERIFY failed (2025-05-29T15:31:27.243816Z): assertion failed in non-unittest thread with message: assertion failed at ydb/core/kqp/ut/common/kqp_ut_common.h:375, void NKikimr::NKqp::AssertSuccessResult(const NYdb::TStatus &): (result.IsSuccess())
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 library/cpp/testing/unittest/registar.cpp:36 RaiseError(): requirement UnittestThread failed 0. /-S/util/system/yassert.cpp:83: InternalPanicImpl @ 0x15F23B95 1. /-S/util/system/yassert.cpp:55: Panic @ 0x15F1AB96 2. /tmp//-S/library/cpp/testing/unittest/registar.cpp:36: RaiseError @ 0x160BC066 3. /-S/ydb/core/kqp/ut/common/kqp_ut_common.h:375: AssertSuccessResult @ 0x15D88C82 4. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:365: CreateSampleTables @ 0x2855B9A2 5. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:581: operator() @ 0x2857CF4C 6. /-S/library/cpp/threading/future/core/future-inl.h:493: SetValue @ 0x2857CF4C 7. /-S/library/cpp/threading/future/async.h:24: operator() @ 0x2857CF4C 8. /-S/util/thread/pool.h:71: Process @ 0x2857CF4C 9. /-S/util/thread/pool.cpp:418: DoExecute @ 0x15F2B519 10. /-S/util/thread/factory.h:15: Execute @ 0x15F29F09 11. /-S/util/thread/factory.cpp:36: ThreadProc @ 0x15F29F09 12. /-S/util/system/thread.cpp:244: ThreadProxy @ 0x15F2537C 13. ??:0: ?? @ 0x7F06C590BAC2 14. ??:0: ?? @ 0x7F06C599D84F >> KqpImmediateEffects::InsertConflictTxAborted >> KqpEffects::DeletePkPrefixWithIndex ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/effects/unittest >> KqpEffects::InsertRevert_Literal_Duplicates Test command err: Trying to start YDB, gRPC: 65268, MsgBus: 6843 2025-05-29T15:31:22.934381Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509890688996924620:2068];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:31:22.934403Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/001dd5/r3tmp/tmpYRWI2d/pdisk_1.dat 2025-05-29T15:31:22.990383Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 65268, node 1 2025-05-29T15:31:23.003670Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:31:23.003685Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-05-29T15:31:23.003688Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-05-29T15:31:23.003728Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:6843 2025-05-29T15:31:23.035574Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:31:23.035596Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:31:23.036737Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:6843 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-29T15:31:23.066078Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:31:23.070869Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:31:23.132276Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:31:23.149357Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:31:23.159318Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:31:23.230989Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890693291893510:2403], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:31:23.231013Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:31:23.267228Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-05-29T15:31:23.274533Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-05-29T15:31:23.282810Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-05-29T15:31:23.337138Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-05-29T15:31:23.345755Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-05-29T15:31:23.352933Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-05-29T15:31:23.360296Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-05-29T15:31:23.376400Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890693291894164:2466], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:31:23.376419Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:31:23.376433Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890693291894169:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:31:23.376945Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715669:3, at schemeshard: 72057594046644480 2025-05-29T15:31:23.380444Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7509890693291894171:2470], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715669 completed, doublechecking } 2025-05-29T15:31:23.440897Z node 1 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [1:7509890693291894222:3396] txid# 281474976715670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-29T15:31:23.508518Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [1:7509890693291894238:2474], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:31:23.508628Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=1&id=MjJhZDA3OGUtOTg1NzlhMmUtYzU2YmU1ODQtZWY1ZDY3NGY=, ActorId: [1:7509890693291893507:2401], ActorState: ExecuteState, TraceId: 01jweaprkg5anqq1b2mfkbd4kw, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: VERIFY failed (2025-05-29T15:31:23.509170Z): assertion failed in non-unittest thread with message: assertion failed at ydb/core/kqp/ut/common/kqp_ut_common.h:375, void NKikimr::NKqp::AssertSuccessResult(const NYdb::TStatus &): (result.IsSuccess())
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 library/cpp/testing/unittest/registar.cpp:36 RaiseError(): requirement UnittestThread failed 0. /-S/util/system/yassert.cpp:83: InternalPanicImpl @ 0x15F23B95 1. /-S/util/system/yassert.cpp:55: Panic @ 0x15F1AB96 2. /tmp//-S/library/cpp/testing/unittest/registar.cpp:36: RaiseError @ 0x160BC066 3. /-S/ydb/core/kqp/ut/common/kqp_ut_common.h:375: AssertSuccessResult @ 0x15D88C82 4. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:365: CreateSampleTables @ 0x2855B9A2 5. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:581: operator() @ 0x2857CF4C 6. /-S/library/cpp/threading/future/core/future-inl.h:493: SetValue @ 0x2857CF4C 7. /-S/library/cpp/threading/future/async.h:24: operator() @ 0x2857CF4C 8. /-S/util/thread/pool.h:71: Process @ 0x2857CF4C 9. /-S/util/thread/pool.cpp:418: DoExecute @ 0x15F2B519 10. /-S/util/thread/factory.h:15: Execute @ 0x15F29F09 11. /-S/util/thread/factory.cpp:36: ThreadProc @ 0x15F29F09 12. /-S/util/system/thread.cpp:244: ThreadProxy @ 0x15F2537C 13. ??:0: ?? @ 0x7FA5F30A0AC2 14. ??:0: ?? @ 0x7FA5F313284F Trying to start YDB, gRPC: 12726, MsgBus: 27982 2025-05-29T15:31:26.688912Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509890704693871552:2064];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:31:26.688938Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/001dd5/r3tmp/tmphV6AfO/pdisk_1.dat 2025-05-29T15:31:26.745876Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 12726, node 1 2025-05-29T15:31:26.765985Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:31:26.765999Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-05-29T15:31:26.766001Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-05-29T15:31:26.766058Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:27982 2025-05-29T15:31:26.789944Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:31:26.789971Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:31:26.791092Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:27982 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-29T15:31:26.832328Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:31:26.841468Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:31:26.903259Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:31:26.919223Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:31:26.930873Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:31:27.059501Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890708988840453:2403], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:31:27.059533Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:31:27.099076Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-05-29T15:31:27.153578Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-05-29T15:31:27.208203Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-05-29T15:31:27.216913Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-05-29T15:31:27.224498Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-05-29T15:31:27.238592Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-05-29T15:31:27.252398Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-05-29T15:31:27.268507Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890708988841109:2466], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:31:27.268547Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890708988841114:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:31:27.268546Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:31:27.269276Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715669:3, at schemeshard: 72057594046644480 2025-05-29T15:31:27.271992Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7509890708988841116:2470], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715669 completed, doublechecking } 2025-05-29T15:31:27.363223Z node 1 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [1:7509890708988841167:3398] txid# 281474976715670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-29T15:31:27.444494Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [1:7509890708988841183:2474], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:31:27.444605Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=1&id=NDViZDkxZTktNmFkM2M3OGQtZGJiYzA3MTctOWVhYzNiNmE=, ActorId: [1:7509890708988840450:2401], ActorState: ExecuteState, TraceId: 01jweapwd4az8bf5x4xm8dzkzm, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: VERIFY failed (2025-05-29T15:31:27.445266Z): assertion failed in non-unittest thread with message: assertion failed at ydb/core/kqp/ut/common/kqp_ut_common.h:375, void NKikimr::NKqp::AssertSuccessResult(const NYdb::TStatus &): (result.IsSuccess())
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 library/cpp/testing/unittest/registar.cpp:36 RaiseError(): requirement UnittestThread failed 0. /-S/util/system/yassert.cpp:83: InternalPanicImpl @ 0x15F23B95 1. /-S/util/system/yassert.cpp:55: Panic @ 0x15F1AB96 2. /tmp//-S/library/cpp/testing/unittest/registar.cpp:36: RaiseError @ 0x160BC066 3. /-S/ydb/core/kqp/ut/common/kqp_ut_common.h:375: AssertSuccessResult @ 0x15D88C82 4. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:365: CreateSampleTables @ 0x2855B9A2 5. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:581: operator() @ 0x2857CF4C 6. /-S/library/cpp/threading/future/core/future-inl.h:493: SetValue @ 0x2857CF4C 7. /-S/library/cpp/threading/future/async.h:24: operator() @ 0x2857CF4C 8. /-S/util/thread/pool.h:71: Process @ 0x2857CF4C 9. /-S/util/thread/pool.cpp:418: DoExecute @ 0x15F2B519 10. /-S/util/thread/factory.h:15: Execute @ 0x15F29F09 11. /-S/util/thread/factory.cpp:36: ThreadProc @ 0x15F29F09 12. /-S/util/system/thread.cpp:244: ThreadProxy @ 0x15F2537C 13. ??:0: ?? @ 0x7F8A98E7DAC2 14. ??:0: ?? @ 0x7F8A98F0F84F ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/effects/unittest >> KqpImmediateEffects::ConflictingKeyRW1RWR2 Test command err: Trying to start YDB, gRPC: 26301, MsgBus: 6229 2025-05-29T15:31:22.800008Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509890686880072741:2064];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:31:22.800033Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/001ddc/r3tmp/tmp3Jyszg/pdisk_1.dat 2025-05-29T15:31:22.868056Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 26301, node 1 2025-05-29T15:31:22.884167Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:31:22.884178Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-05-29T15:31:22.884180Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-05-29T15:31:22.884222Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-05-29T15:31:22.901092Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:31:22.901118Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:31:22.902179Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:6229 TClient is connected to server localhost:6229 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-29T15:31:22.948700Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:31:22.954135Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:31:23.015203Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:31:23.035484Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:31:23.045566Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:31:23.110635Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890691175041640:2403], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:31:23.110657Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:31:23.153366Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-05-29T15:31:23.159568Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-05-29T15:31:23.170929Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-05-29T15:31:23.177942Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-05-29T15:31:23.184923Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-05-29T15:31:23.191718Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-05-29T15:31:23.198827Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-05-29T15:31:23.215370Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890691175042291:2466], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:31:23.215390Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:31:23.215402Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890691175042296:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:31:23.216113Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715669:3, at schemeshard: 72057594046644480 2025-05-29T15:31:23.219428Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7509890691175042298:2470], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715669 completed, doublechecking } 2025-05-29T15:31:23.310008Z node 1 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [1:7509890691175042349:3394] txid# 281474976715670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-29T15:31:23.396877Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [1:7509890691175042365:2474], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:31:23.396947Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=1&id=MWVjNjA4MWMtNmVhMDRkNmQtYzliMjk3NzItMmMwMjdlN2Y=, ActorId: [1:7509890691175041637:2401], ActorState: ExecuteState, TraceId: 01jweapreefwnpdjdsw7b4wg84, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: VERIFY failed (2025-05-29T15:31:23.397566Z): assertion failed in non-unittest thread with message: assertion failed at ydb/core/kqp/ut/common/kqp_ut_common.h:375, void NKikimr::NKqp::AssertSuccessResult(const NYdb::TStatus &): (result.IsSuccess())
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 library/cpp/testing/unittest/registar.cpp:36 RaiseError(): requirement UnittestThread failed 0. /-S/util/system/yassert.cpp:83: InternalPanicImpl @ 0x15F23B95 1. /-S/util/system/yassert.cpp:55: Panic @ 0x15F1AB96 2. /tmp//-S/library/cpp/testing/unittest/registar.cpp:36: RaiseError @ 0x160BC066 3. /-S/ydb/core/kqp/ut/common/kqp_ut_common.h:375: AssertSuccessResult @ 0x15D88C82 4. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:365: CreateSampleTables @ 0x2855B9A2 5. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:581: operator() @ 0x2857CF4C 6. /-S/library/cpp/threading/future/core/future-inl.h:493: SetValue @ 0x2857CF4C 7. /-S/library/cpp/threading/future/async.h:24: operator() @ 0x2857CF4C 8. /-S/util/thread/pool.h:71: Process @ 0x2857CF4C 9. /-S/util/thread/pool.cpp:418: DoExecute @ 0x15F2B519 10. /-S/util/thread/factory.h:15: Execute @ 0x15F29F09 11. /-S/util/thread/factory.cpp:36: ThreadProc @ 0x15F29F09 12. /-S/util/system/thread.cpp:244: ThreadProxy @ 0x15F2537C 13. ??:0: ?? @ 0x7FD4EACB5AC2 14. ??:0: ?? @ 0x7FD4EAD4784F Trying to start YDB, gRPC: 21442, MsgBus: 17133 2025-05-29T15:31:26.535707Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509890704101834263:2068];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:31:26.535731Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/001ddc/r3tmp/tmprJUAXV/pdisk_1.dat 2025-05-29T15:31:26.583734Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 21442, node 1 2025-05-29T15:31:26.600705Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:31:26.600716Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-05-29T15:31:26.600718Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-05-29T15:31:26.600766Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:17133 TClient is connected to server localhost:17133 2025-05-29T15:31:26.636652Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:31:26.636679Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting WaitRootIsUp 'Root'... TClient::Ls request: Root 2025-05-29T15:31:26.637726Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-29T15:31:26.662824Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:31:26.668027Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:31:26.730315Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:31:26.750557Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:31:26.761287Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:31:26.976692Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890704101835882:2403], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:31:26.976720Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:31:27.018839Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-05-29T15:31:27.026482Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-05-29T15:31:27.080988Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-05-29T15:31:27.091759Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-05-29T15:31:27.105410Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-05-29T15:31:27.119552Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-05-29T15:31:27.133724Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-05-29T15:31:27.149250Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890708396803832:2466], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:31:27.149275Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890708396803837:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:31:27.149276Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:31:27.149847Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715669:3, at schemeshard: 72057594046644480 2025-05-29T15:31:27.153587Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7509890708396803839:2470], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715669 completed, doublechecking } 2025-05-29T15:31:27.231345Z node 1 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [1:7509890708396803890:3397] txid# 281474976715670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-29T15:31:27.317931Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [1:7509890708396803906:2474], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:31:27.318007Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=1&id=MmM2N2ZhMDgtMjdjZmZhMy1mMTc0ZmI1MC1hZjdkZjcyZQ==, ActorId: [1:7509890704101835879:2401], ActorState: ExecuteState, TraceId: 01jweapw9c5xwqsr84kfjwef74, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: VERIFY failed (2025-05-29T15:31:27.318625Z): assertion failed in non-unittest thread with message: assertion failed at ydb/core/kqp/ut/common/kqp_ut_common.h:375, void NKikimr::NKqp::AssertSuccessResult(const NYdb::TStatus &): (result.IsSuccess())
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 library/cpp/testing/unittest/registar.cpp:36 RaiseError(): requirement UnittestThread failed 0. /-S/util/system/yassert.cpp:83: InternalPanicImpl @ 0x15F23B95 1. /-S/util/system/yassert.cpp:55: Panic @ 0x15F1AB96 2. /tmp//-S/library/cpp/testing/unittest/registar.cpp:36: RaiseError @ 0x160BC066 3. /-S/ydb/core/kqp/ut/common/kqp_ut_common.h:375: AssertSuccessResult @ 0x15D88C82 4. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:365: CreateSampleTables @ 0x2855B9A2 5. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:581: operator() @ 0x2857CF4C 6. /-S/library/cpp/threading/future/core/future-inl.h:493: SetValue @ 0x2857CF4C 7. /-S/library/cpp/threading/future/async.h:24: operator() @ 0x2857CF4C 8. /-S/util/thread/pool.h:71: Process @ 0x2857CF4C 9. /-S/util/thread/pool.cpp:418: DoExecute @ 0x15F2B519 10. /-S/util/thread/factory.h:15: Execute @ 0x15F29F09 11. /-S/util/thread/factory.cpp:36: ThreadProc @ 0x15F29F09 12. /-S/util/system/thread.cpp:244: ThreadProxy @ 0x15F2537C 13. ??:0: ?? @ 0x7F343BBC1AC2 14. ??:0: ?? @ 0x7F343BC5384F >> TPersQueueTest::PreferredCluster_EnabledRemotePreferredClusterAndCloseClientSessionWithEnabledRemotePreferredClusterDelaySec_SessionDiesOnlyAfterDelay [FAIL] >> TPersQueueTest::PreferredCluster_NonExistentPreferredCluster_SessionDiesOnlyAfterDelay ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/effects/unittest >> KqpImmediateEffects::TxWithWriteAtTheEnd+UseSink Test command err: Trying to start YDB, gRPC: 1356, MsgBus: 8263 2025-05-29T15:31:22.785534Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509890685576522386:2063];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:31:22.785553Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/001de2/r3tmp/tmptTc4xA/pdisk_1.dat 2025-05-29T15:31:22.844673Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:31:22.844756Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [1:7509890685576522355:2079] 1748532682785421 != 1748532682785424 TServer::EnableGrpc on GrpcPort 1356, node 1 2025-05-29T15:31:22.862897Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:31:22.862909Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-05-29T15:31:22.862910Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-05-29T15:31:22.862953Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:8263 2025-05-29T15:31:22.886916Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:31:22.886941Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:31:22.887886Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:8263 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-29T15:31:22.930671Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:31:22.940643Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:31:22.957007Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:31:22.971966Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:31:23.028715Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:31:23.092337Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890689871491285:2403], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:31:23.092364Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:31:23.133691Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-05-29T15:31:23.140406Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-05-29T15:31:23.149870Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-05-29T15:31:23.157045Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-05-29T15:31:23.163706Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-05-29T15:31:23.171028Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-05-29T15:31:23.178144Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-05-29T15:31:23.193631Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890689871491936:2466], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:31:23.193653Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:31:23.193655Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890689871491941:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:31:23.194289Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715669:3, at schemeshard: 72057594046644480 2025-05-29T15:31:23.198293Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7509890689871491943:2470], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715669 completed, doublechecking } 2025-05-29T15:31:23.263099Z node 1 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [1:7509890689871491994:3395] txid# 281474976715670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-29T15:31:23.359886Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [1:7509890689871492010:2474], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:31:23.359969Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=1&id=ZWRkMjVmYTgtYjk4Y2RiMGQtNGNlNGY2NDYtNDE3MjlmZmQ=, ActorId: [1:7509890689871491267:2401], ActorState: ExecuteState, TraceId: 01jweaprdsa7tk2pksd22xsarv, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: VERIFY failed (2025-05-29T15:31:23.360636Z): assertion failed in non-unittest thread with message: assertion failed at ydb/core/kqp/ut/common/kqp_ut_common.h:375, void NKikimr::NKqp::AssertSuccessResult(const NYdb::TStatus &): (result.IsSuccess())
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 library/cpp/testing/unittest/registar.cpp:36 RaiseError(): requirement UnittestThread failed 0. /-S/util/system/yassert.cpp:83: InternalPanicImpl @ 0x15F23B95 1. /-S/util/system/yassert.cpp:55: Panic @ 0x15F1AB96 2. /tmp//-S/library/cpp/testing/unittest/registar.cpp:36: RaiseError @ 0x160BC066 3. /-S/ydb/core/kqp/ut/common/kqp_ut_common.h:375: AssertSuccessResult @ 0x15D88C82 4. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:365: CreateSampleTables @ 0x2855B9A2 5. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:581: operator() @ 0x2857CF4C 6. /-S/library/cpp/threading/future/core/future-inl.h:493: SetValue @ 0x2857CF4C 7. /-S/library/cpp/threading/future/async.h:24: operator() @ 0x2857CF4C 8. /-S/util/thread/pool.h:71: Process @ 0x2857CF4C 9. /-S/util/thread/pool.cpp:418: DoExecute @ 0x15F2B519 10. /-S/util/thread/factory.h:15: Execute @ 0x15F29F09 11. /-S/util/thread/factory.cpp:36: ThreadProc @ 0x15F29F09 12. /-S/util/system/thread.cpp:244: ThreadProxy @ 0x15F2537C 13. ??:0: ?? @ 0x7F6E7DF5DAC2 14. ??:0: ?? @ 0x7F6E7DFEF84F Trying to start YDB, gRPC: 4079, MsgBus: 3186 2025-05-29T15:31:26.526773Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509890706393839169:2066];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:31:26.526802Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/001de2/r3tmp/tmpy4Sgmk/pdisk_1.dat 2025-05-29T15:31:26.595893Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 4079, node 1 2025-05-29T15:31:26.609101Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:31:26.609119Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-05-29T15:31:26.609120Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-05-29T15:31:26.609173Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:3186 2025-05-29T15:31:26.628359Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:31:26.628381Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:31:26.629481Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:3186 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-29T15:31:26.670518Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:31:26.676588Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:31:26.737635Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:31:26.757765Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:31:26.769606Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:31:26.888956Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890706393840770:2403], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:31:26.888986Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:31:26.935371Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-05-29T15:31:26.942687Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-05-29T15:31:26.951285Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-05-29T15:31:26.959083Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-05-29T15:31:27.013887Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-05-29T15:31:27.021086Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-05-29T15:31:27.035430Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-05-29T15:31:27.051702Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890710688808722:2466], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:31:27.051724Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:31:27.051750Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890710688808727:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:31:27.052372Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715669:3, at schemeshard: 72057594046644480 2025-05-29T15:31:27.055130Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7509890710688808729:2470], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715669 completed, doublechecking } 2025-05-29T15:31:27.131025Z node 1 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [1:7509890710688808780:3396] txid# 281474976715670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-29T15:31:27.201265Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [1:7509890710688808796:2474], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:31:27.201368Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=1&id=ZDIyMGJhNjAtZTBjMTE2Y2QtNGQwZjBlMWYtYjhhNmZlZDM=, ActorId: [1:7509890706393840752:2401], ActorState: ExecuteState, TraceId: 01jweapw6ba59fqd2xrn15c20q, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: VERIFY failed (2025-05-29T15:31:27.201975Z): assertion failed in non-unittest thread with message: assertion failed at ydb/core/kqp/ut/common/kqp_ut_common.h:375, void NKikimr::NKqp::AssertSuccessResult(const NYdb::TStatus &): (result.IsSuccess())
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 library/cpp/testing/unittest/registar.cpp:36 RaiseError(): requirement UnittestThread failed 0. /-S/util/system/yassert.cpp:83: InternalPanicImpl @ 0x15F23B95 1. /-S/util/system/yassert.cpp:55: Panic @ 0x15F1AB96 2. /tmp//-S/library/cpp/testing/unittest/registar.cpp:36: RaiseError @ 0x160BC066 3. /-S/ydb/core/kqp/ut/common/kqp_ut_common.h:375: AssertSuccessResult @ 0x15D88C82 4. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:365: CreateSampleTables @ 0x2855B9A2 5. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:581: operator() @ 0x2857CF4C 6. /-S/library/cpp/threading/future/core/future-inl.h:493: SetValue @ 0x2857CF4C 7. /-S/library/cpp/threading/future/async.h:24: operator() @ 0x2857CF4C 8. /-S/util/thread/pool.h:71: Process @ 0x2857CF4C 9. /-S/util/thread/pool.cpp:418: DoExecute @ 0x15F2B519 10. /-S/util/thread/factory.h:15: Execute @ 0x15F29F09 11. /-S/util/thread/factory.cpp:36: ThreadProc @ 0x15F29F09 12. /-S/util/system/thread.cpp:244: ThreadProxy @ 0x15F2537C 13. ??:0: ?? @ 0x7FC9D94BEAC2 14. ??:0: ?? @ 0x7FC9D955084F >> KqpInplaceUpdate::SingleRowArithm-UseSink ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/effects/unittest >> KqpImmediateEffects::UpsertAfterInsertWithIndex Test command err: Trying to start YDB, gRPC: 14848, MsgBus: 2733 2025-05-29T15:31:23.653504Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509890691213651601:2066];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:31:23.653526Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/001dd2/r3tmp/tmpOtypGx/pdisk_1.dat 2025-05-29T15:31:23.705678Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:31:23.706460Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [1:7509890691213651571:2079] 1748532683653392 != 1748532683653395 TServer::EnableGrpc on GrpcPort 14848, node 1 2025-05-29T15:31:23.717506Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:31:23.717521Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-05-29T15:31:23.717523Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-05-29T15:31:23.717569Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:2733 2025-05-29T15:31:23.755434Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:31:23.755457Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:31:23.756569Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:2733 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-29T15:31:23.782435Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:31:23.788667Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:31:23.853764Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:31:23.914111Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:31:23.926525Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:31:23.970677Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890691213653221:2403], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:31:23.970711Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:31:24.006586Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-05-29T15:31:24.013926Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-05-29T15:31:24.025392Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-05-29T15:31:24.031859Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-05-29T15:31:24.039038Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-05-29T15:31:24.046653Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-05-29T15:31:24.061002Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-05-29T15:31:24.076419Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890695508621173:2466], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:31:24.076444Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890695508621178:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:31:24.076448Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:31:24.077323Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710669:3, at schemeshard: 72057594046644480 2025-05-29T15:31:24.080558Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7509890695508621180:2470], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710669 completed, doublechecking } 2025-05-29T15:31:24.168530Z node 1 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [1:7509890695508621231:3396] txid# 281474976710670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-29T15:31:24.241756Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [1:7509890695508621247:2474], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:31:24.241875Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=1&id=N2Y5NmY2ZWMtOTViYjBhZWQtYjc1OTU1M2YtM2UwZmJlNTg=, ActorId: [1:7509890691213653218:2401], ActorState: ExecuteState, TraceId: 01jweaps9c68ezpyghsesrc2tt, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: VERIFY failed (2025-05-29T15:31:24.242544Z): assertion failed in non-unittest thread with message: assertion failed at ydb/core/kqp/ut/common/kqp_ut_common.h:375, void NKikimr::NKqp::AssertSuccessResult(const NYdb::TStatus &): (result.IsSuccess())
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 library/cpp/testing/unittest/registar.cpp:36 RaiseError(): requirement UnittestThread failed 0. /-S/util/system/yassert.cpp:83: InternalPanicImpl @ 0x15F23B95 1. /-S/util/system/yassert.cpp:55: Panic @ 0x15F1AB96 2. /tmp//-S/library/cpp/testing/unittest/registar.cpp:36: RaiseError @ 0x160BC066 3. /-S/ydb/core/kqp/ut/common/kqp_ut_common.h:375: AssertSuccessResult @ 0x15D88C82 4. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:365: CreateSampleTables @ 0x2855B9A2 5. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:581: operator() @ 0x2857CF4C 6. /-S/library/cpp/threading/future/core/future-inl.h:493: SetValue @ 0x2857CF4C 7. /-S/library/cpp/threading/future/async.h:24: operator() @ 0x2857CF4C 8. /-S/util/thread/pool.h:71: Process @ 0x2857CF4C 9. /-S/util/thread/pool.cpp:418: DoExecute @ 0x15F2B519 10. /-S/util/thread/factory.h:15: Execute @ 0x15F29F09 11. /-S/util/thread/factory.cpp:36: ThreadProc @ 0x15F29F09 12. /-S/util/system/thread.cpp:244: ThreadProxy @ 0x15F2537C 13. ??:0: ?? @ 0x7F5502A21AC2 14. ??:0: ?? @ 0x7F5502AB384F Trying to start YDB, gRPC: 20452, MsgBus: 22897 2025-05-29T15:31:27.198061Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509890708951268931:2072];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:31:27.198082Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/001dd2/r3tmp/tmpkPhFIr/pdisk_1.dat 2025-05-29T15:31:27.261832Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 20452, node 1 2025-05-29T15:31:27.275301Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:31:27.275314Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-05-29T15:31:27.275316Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-05-29T15:31:27.275360Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:22897 2025-05-29T15:31:27.299033Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:31:27.299056Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:31:27.300095Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:22897 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-29T15:31:27.340148Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:31:27.352371Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:31:27.413110Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:31:27.433273Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:31:27.443900Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:31:27.495073Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890708951270525:2403], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:31:27.495099Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:31:27.527490Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-05-29T15:31:27.533865Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-05-29T15:31:27.545805Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-05-29T15:31:27.552806Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-05-29T15:31:27.560290Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-05-29T15:31:27.574212Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-05-29T15:31:27.581217Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-05-29T15:31:27.596879Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890708951271178:2466], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:31:27.596892Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890708951271183:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:31:27.596897Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:31:27.597548Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715669:3, at schemeshard: 72057594046644480 2025-05-29T15:31:27.601287Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7509890708951271185:2470], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715669 completed, doublechecking } 2025-05-29T15:31:27.694546Z node 1 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [1:7509890708951271236:3396] txid# 281474976715670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-29T15:31:27.786258Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [1:7509890708951271252:2474], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:31:27.786360Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=1&id=N2UxNDlhMGItMWQ2M2IzNjEtNmI0MjIzNzYtM2U4Y2ZmZQ==, ActorId: [1:7509890708951270507:2401], ActorState: ExecuteState, TraceId: 01jweapwqcbyta388e8asb0jpx, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: VERIFY failed (2025-05-29T15:31:27.786978Z): assertion failed in non-unittest thread with message: assertion failed at ydb/core/kqp/ut/common/kqp_ut_common.h:375, void NKikimr::NKqp::AssertSuccessResult(const NYdb::TStatus &): (result.IsSuccess())
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 library/cpp/testing/unittest/registar.cpp:36 RaiseError(): requirement UnittestThread failed 0. /-S/util/system/yassert.cpp:83: InternalPanicImpl @ 0x15F23B95 1. /-S/util/system/yassert.cpp:55: Panic @ 0x15F1AB96 2. /tmp//-S/library/cpp/testing/unittest/registar.cpp:36: RaiseError @ 0x160BC066 3. /-S/ydb/core/kqp/ut/common/kqp_ut_common.h:375: AssertSuccessResult @ 0x15D88C82 4. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:365: CreateSampleTables @ 0x2855B9A2 5. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:581: operator() @ 0x2857CF4C 6. /-S/library/cpp/threading/future/core/future-inl.h:493: SetValue @ 0x2857CF4C 7. /-S/library/cpp/threading/future/async.h:24: operator() @ 0x2857CF4C 8. /-S/util/thread/pool.h:71: Process @ 0x2857CF4C 9. /-S/util/thread/pool.cpp:418: DoExecute @ 0x15F2B519 10. /-S/util/thread/factory.h:15: Execute @ 0x15F29F09 11. /-S/util/thread/factory.cpp:36: ThreadProc @ 0x15F29F09 12. /-S/util/system/thread.cpp:244: ThreadProxy @ 0x15F2537C 13. ??:0: ?? @ 0x7F542CD0BAC2 14. ??:0: ?? @ 0x7F542CD9D84F >> KqpImmediateEffects::ImmediateUpdate >> KqpImmediateEffects::TxWithWriteAtTheEnd-UseSink >> KqpImmediateEffects::ConflictingKeyW1RR2 >> KqpEffects::InsertAbort_Params_Duplicates-UseSink >> TGRpcStreamingTest::ClientNeverWrites >> TGRpcStreamingTest::ReadFinish |74.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/grpc_streaming/ut/unittest |74.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/grpc_streaming/ut/unittest >> TPersQueueTest::PreferredCluster_NonExistentPreferredCluster_SessionDiesOnlyAfterDelay [FAIL] >> TPersQueueTest::PreferredCluster_EnabledRemotePreferredClusterAndRemoteClusterEnabledDelaySec_SessionDiesOnlyAfterDelay >> TExternalTableTestReboots::DropReplacedExternalTableWithReboots [GOOD] >> TGRpcStreamingTest::ClientNeverWrites [GOOD] >> TGRpcStreamingTest::ReadFinish [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/effects/unittest >> KqpImmediateEffects::UpdateOn Test command err: Trying to start YDB, gRPC: 21106, MsgBus: 8636 2025-05-29T15:31:24.528431Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509890694933747931:2063];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:31:24.528706Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/001dd1/r3tmp/tmphxVVAP/pdisk_1.dat TServer::EnableGrpc on GrpcPort 21106, node 1 2025-05-29T15:31:24.588011Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:31:24.588136Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [1:7509890694933747902:2079] 1748532684528278 != 1748532684528281 2025-05-29T15:31:24.592311Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:31:24.592321Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-05-29T15:31:24.592322Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-05-29T15:31:24.592356Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:8636 TClient is connected to server localhost:8636 WaitRootIsUp 'Root'... TClient::Ls request: Root 2025-05-29T15:31:24.629439Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:31:24.629471Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:31:24.630787Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-29T15:31:24.655971Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:31:24.660453Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:31:24.675534Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:31:24.692814Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:31:24.705517Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:31:24.821080Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890694933749536:2403], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:31:24.821108Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:31:24.862076Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-05-29T15:31:24.868125Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-05-29T15:31:24.922907Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-05-29T15:31:24.977427Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-05-29T15:31:24.991104Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-05-29T15:31:24.998119Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-05-29T15:31:25.005148Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-05-29T15:31:25.021228Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890699228717488:2466], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:31:25.021247Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890699228717493:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:31:25.021255Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:31:25.021916Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715669:3, at schemeshard: 72057594046644480 2025-05-29T15:31:25.025347Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7509890699228717495:2470], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715669 completed, doublechecking } 2025-05-29T15:31:25.099237Z node 1 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [1:7509890699228717546:3398] txid# 281474976715670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-29T15:31:25.177959Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [1:7509890699228717562:2474], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:31:25.178057Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=1&id=ZmNiZWMwZTItNjBkNjUzMjItMjE0NTkwNDQtNzVmOTIwOWI=, ActorId: [1:7509890694933749533:2401], ActorState: ExecuteState, TraceId: 01jweapt6w7r33s89n1vswta5e, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: VERIFY failed (2025-05-29T15:31:25.178859Z): assertion failed in non-unittest thread with message: assertion failed at ydb/core/kqp/ut/common/kqp_ut_common.h:375, void NKikimr::NKqp::AssertSuccessResult(const NYdb::TStatus &): (result.IsSuccess())
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 library/cpp/testing/unittest/registar.cpp:36 RaiseError(): requirement UnittestThread failed 0. /-S/util/system/yassert.cpp:83: InternalPanicImpl @ 0x15F23B95 1. /-S/util/system/yassert.cpp:55: Panic @ 0x15F1AB96 2. /tmp//-S/library/cpp/testing/unittest/registar.cpp:36: RaiseError @ 0x160BC066 3. /-S/ydb/core/kqp/ut/common/kqp_ut_common.h:375: AssertSuccessResult @ 0x15D88C82 4. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:365: CreateSampleTables @ 0x2855B9A2 5. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:581: operator() @ 0x2857CF4C 6. /-S/library/cpp/threading/future/core/future-inl.h:493: SetValue @ 0x2857CF4C 7. /-S/library/cpp/threading/future/async.h:24: operator() @ 0x2857CF4C 8. /-S/util/thread/pool.h:71: Process @ 0x2857CF4C 9. /-S/util/thread/pool.cpp:418: DoExecute @ 0x15F2B519 10. /-S/util/thread/factory.h:15: Execute @ 0x15F29F09 11. /-S/util/thread/factory.cpp:36: ThreadProc @ 0x15F29F09 12. /-S/util/system/thread.cpp:244: ThreadProxy @ 0x15F2537C 13. ??:0: ?? @ 0x7FED82AC6AC2 14. ??:0: ?? @ 0x7FED82B5884F Trying to start YDB, gRPC: 64234, MsgBus: 6488 2025-05-29T15:31:28.401546Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509890711837773422:2069];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:31:28.401569Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/001dd1/r3tmp/tmpxTvG4J/pdisk_1.dat 2025-05-29T15:31:28.459964Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 64234, node 1 2025-05-29T15:31:28.474148Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:31:28.474159Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-05-29T15:31:28.474161Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-05-29T15:31:28.474205Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:6488 2025-05-29T15:31:28.502867Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:31:28.502893Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:31:28.503950Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:6488 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-29T15:31:28.520304Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:31:28.530645Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:31:28.546838Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:31:28.568584Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:31:28.579714Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:31:28.694538Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890711837775037:2403], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:31:28.694576Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:31:28.743258Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-05-29T15:31:28.750390Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-05-29T15:31:28.764010Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-05-29T15:31:28.771083Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-05-29T15:31:28.825956Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-05-29T15:31:28.834381Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-05-29T15:31:28.848660Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-05-29T15:31:28.864362Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890711837775691:2466], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:31:28.864386Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:31:28.864398Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890711837775696:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:31:28.865116Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710669:3, at schemeshard: 72057594046644480 2025-05-29T15:31:28.868231Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7509890711837775698:2470], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710669 completed, doublechecking } 2025-05-29T15:31:28.924734Z node 1 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [1:7509890711837775749:3397] txid# 281474976710670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-29T15:31:29.026997Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [1:7509890711837775765:2474], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:31:29.027127Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=1&id=ZTdhMzg5MWEtYTdjMTQ4MzYtMzJjY2Y3NDAtZTM3NDc0MTE=, ActorId: [1:7509890711837775034:2401], ActorState: ExecuteState, TraceId: 01jweapxz0529e5we2jwpvbaph, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: VERIFY failed (2025-05-29T15:31:29.027842Z): assertion failed in non-unittest thread with message: assertion failed at ydb/core/kqp/ut/common/kqp_ut_common.h:375, void NKikimr::NKqp::AssertSuccessResult(const NYdb::TStatus &): (result.IsSuccess())
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 library/cpp/testing/unittest/registar.cpp:36 RaiseError(): requirement UnittestThread failed 0. /-S/util/system/yassert.cpp:83: InternalPanicImpl @ 0x15F23B95 1. /-S/util/system/yassert.cpp:55: Panic @ 0x15F1AB96 2. /tmp//-S/library/cpp/testing/unittest/registar.cpp:36: RaiseError @ 0x160BC066 3. /-S/ydb/core/kqp/ut/common/kqp_ut_common.h:375: AssertSuccessResult @ 0x15D88C82 4. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:365: CreateSampleTables @ 0x2855B9A2 5. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:581: operator() @ 0x2857CF4C 6. /-S/library/cpp/threading/future/core/future-inl.h:493: SetValue @ 0x2857CF4C 7. /-S/library/cpp/threading/future/async.h:24: operator() @ 0x2857CF4C 8. /-S/util/thread/pool.h:71: Process @ 0x2857CF4C 9. /-S/util/thread/pool.cpp:418: DoExecute @ 0x15F2B519 10. /-S/util/thread/factory.h:15: Execute @ 0x15F29F09 11. /-S/util/thread/factory.cpp:36: ThreadProc @ 0x15F29F09 12. /-S/util/system/thread.cpp:244: ThreadProxy @ 0x15F2537C 13. ??:0: ?? @ 0x7F47AC4E7AC2 14. ??:0: ?? @ 0x7F47AC57984F ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/grpc_streaming/ut/unittest >> TGRpcStreamingTest::ClientNeverWrites [GOOD] Test command err: 2025-05-29T15:31:31.764973Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509890723852547400:2062];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:31:31.765011Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/00192f/r3tmp/tmpJx13aX/pdisk_1.dat 2025-05-29T15:31:31.808983Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [1:7509890723852547379:2079] 1748532691764798 != 1748532691764801 2025-05-29T15:31:31.810080Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:31:31.820332Z node 1 :GRPC_SERVER DEBUG: grpc_streaming.h:227: [0x1116fedcc380] stream accepted Name# Session ok# true peer# ipv6:[::1]:47446 2025-05-29T15:31:31.820444Z node 1 :GRPC_SERVER DEBUG: grpc_streaming.h:301: [0x1116fedcc380] facade attach Name# Session actor# [1:7509890723852547918:2250] peer# ipv6:[::1]:47446 2025-05-29T15:31:31.820457Z node 1 :GRPC_SERVER DEBUG: grpc_streaming.h:325: [0x1116fedcc380] facade read Name# Session peer# ipv6:[::1]:47446 2025-05-29T15:31:31.820479Z node 1 :GRPC_SERVER DEBUG: grpc_streaming.h:401: [0x1116fedcc380] facade write Name# Session data# peer# ipv6:[::1]:47446 2025-05-29T15:31:31.820584Z node 1 :GRPC_SERVER DEBUG: grpc_streaming.h:511: [0x1116fedcc380] facade finish Name# Session peer# ipv6:[::1]:47446 grpc status# (0) message# 2025-05-29T15:31:31.820593Z node 1 :GRPC_SERVER DEBUG: grpc_streaming.h:456: [0x1116fedcc380] write finished Name# Session ok# true peer# ipv6:[::1]:47446 2025-05-29T15:31:31.820599Z node 1 :GRPC_SERVER DEBUG: grpc_streaming_ut.cpp:187: Received TEvWriteFinished, success = 1 2025-05-29T15:31:31.820657Z node 1 :GRPC_SERVER DEBUG: grpc_streaming.h:353: [0x1116fedcc380] read finished Name# Session ok# false data# peer# ipv6:[::1]:47446 2025-05-29T15:31:31.820670Z node 1 :GRPC_SERVER DEBUG: grpc_streaming.h:268: [0x1116fedcc380] stream done notification Name# Session ok# true peer# ipv6:[::1]:47446 2025-05-29T15:31:31.820673Z node 1 :GRPC_SERVER DEBUG: grpc_streaming.h:547: [0x1116fedcc380] stream finished Name# Session ok# true peer# ipv6:[::1]:47446 grpc status# (0) message# 2025-05-29T15:31:31.820680Z node 1 :GRPC_SERVER DEBUG: grpc_streaming_ut.cpp:181: Received TEvReadFinished, success = 0 2025-05-29T15:31:31.820682Z node 1 :GRPC_SERVER DEBUG: grpc_streaming_ut.cpp:194: Received TEvNotifiedWhenDone 2025-05-29T15:31:31.820685Z node 1 :GRPC_SERVER DEBUG: grpc_streaming.h:580: [0x1116fedcc380] deregistering request Name# Session peer# ipv6:[::1]:47446 (finish done) 2025-05-29T15:31:31.867504Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:31:31.867550Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:31:31.868687Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_external_table_reboots/unittest >> TExternalTableTestReboots::DropReplacedExternalTableWithReboots [GOOD] Test command err: ==== RunWithTabletReboots =========== RUN: Trace =========== Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2140] Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:116:2058] recipient: [1:110:2141] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:116:2058] recipient: [1:110:2141] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:117:2058] recipient: [1:111:2142] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:117:2058] recipient: [1:111:2142] Leader for TabletID 72057594046678944 is [1:126:2151] sender: [1:127:2058] recipient: [1:109:2140] Leader for TabletID 72057594046447617 is [1:130:2154] sender: [1:132:2058] recipient: [1:110:2141] Leader for TabletID 72057594046316545 is [1:133:2155] sender: [1:135:2058] recipient: [1:111:2142] 2025-05-29T15:31:11.554530Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7488: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-05-29T15:31:11.554547Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7516: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:31:11.554551Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7402: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2025-05-29T15:31:11.554555Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7418: OperationsProcessing config: using default configuration 2025-05-29T15:31:11.554558Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-05-29T15:31:11.554561Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-05-29T15:31:11.554567Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7548: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:31:11.554578Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:39: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-05-29T15:31:11.554649Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7619: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-05-29T15:31:11.554707Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-05-29T15:31:11.563562Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7651: Got new config: QueryServiceConfig { AllExternalDataSourcesAreAvailable: true } 2025-05-29T15:31:11.563580Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:31:11.563665Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7619: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# Leader for TabletID 72057594046447617 is [1:130:2154] sender: [1:175:2058] recipient: [1:15:2062] 2025-05-29T15:31:11.566002Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-05-29T15:31:11.566028Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-05-29T15:31:11.566060Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-05-29T15:31:11.568330Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-05-29T15:31:11.568396Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:445: Clear TempDirsState with owners number: 0 2025-05-29T15:31:11.568464Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1348: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-05-29T15:31:11.568606Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:32: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-05-29T15:31:11.569077Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:157: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:31:11.569099Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:84: [RootDataErasureManager] Stop 2025-05-29T15:31:11.569246Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:31:11.569252Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:31:11.569272Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-05-29T15:31:11.569278Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-29T15:31:11.569284Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:202: TTxServerlessStorageBilling.Complete 2025-05-29T15:31:11.569301Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6671: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:214:2058] recipient: [1:212:2212] IGNORE Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:214:2058] recipient: [1:212:2212] Leader for TabletID 72057594037968897 is [1:218:2216] sender: [1:219:2058] recipient: [1:212:2212] 2025-05-29T15:31:11.570245Z node 1 :HIVE INFO: tablet_helpers.cpp:1130: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:126:2151] sender: [1:239:2058] recipient: [1:15:2062] 2025-05-29T15:31:11.582474Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:372: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-05-29T15:31:11.582542Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:31:11.582603Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-05-29T15:31:11.582637Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:130: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-05-29T15:31:11.582645Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:31:11.583257Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:451: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-05-29T15:31:11.583278Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-05-29T15:31:11.583319Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:31:11.583334Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:313: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-05-29T15:31:11.583338Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:367: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-05-29T15:31:11.583341Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 2 -> 3 2025-05-29T15:31:11.583613Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:31:11.583623Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-05-29T15:31:11.583628Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 3 -> 128 2025-05-29T15:31:11.583836Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:31:11.583842Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:31:11.583846Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:31:11.583851Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1650: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-05-29T15:31:11.584250Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1719: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-05-29T15:31:11.584520Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:652: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-05-29T15:31:11.584547Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1751: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 Leader for TabletID 72057594046316545 is [1:133:2155] sender: [1:254:2058] recipient: [1:15:2062] FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-05-29T15:31:11.584690Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:676: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-05-29T15:31:11.584707Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:680: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 133 RawX2: 4294969451 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-05-29T15:31:11.584722Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:31:11.584763Z node 1 :FLAT_TX_SCHEMESHARD INFO: sch ... TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 2 2025-05-29T15:31:31.838089Z node 83 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1005:0 128 -> 240 2025-05-29T15:31:31.838112Z node 83 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2025-05-29T15:31:31.838122Z node 83 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 1 2025-05-29T15:31:31.838128Z node 83 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 2 2025-05-29T15:31:31.838319Z node 83 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1005 2025-05-29T15:31:31.838456Z node 83 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1005 FAKE_COORDINATOR: Erasing txId 1005 2025-05-29T15:31:31.838548Z node 83 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:31:31.838553Z node 83 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1005, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-29T15:31:31.838573Z node 83 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1005, path id: [OwnerId: 72057594046678944, LocalPathId: 4] 2025-05-29T15:31:31.838588Z node 83 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1005, path id: [OwnerId: 72057594046678944, LocalPathId: 3] 2025-05-29T15:31:31.838605Z node 83 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:31:31.838610Z node 83 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [83:206:2207], at schemeshard: 72057594046678944, txId: 1005, path id: 1 2025-05-29T15:31:31.838615Z node 83 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [83:206:2207], at schemeshard: 72057594046678944, txId: 1005, path id: 4 2025-05-29T15:31:31.838620Z node 83 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [83:206:2207], at schemeshard: 72057594046678944, txId: 1005, path id: 3 2025-05-29T15:31:31.838658Z node 83 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1005:0, at schemeshard: 72057594046678944 2025-05-29T15:31:31.838664Z node 83 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:482: [72057594046678944] TDone opId# 1005:0 ProgressState 2025-05-29T15:31:31.838675Z node 83 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:906: Part operation is done id#1005:0 progress is 1/1 2025-05-29T15:31:31.838679Z node 83 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 1005 ready parts: 1/1 2025-05-29T15:31:31.838684Z node 83 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:906: Part operation is done id#1005:0 progress is 1/1 2025-05-29T15:31:31.838688Z node 83 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 1005 ready parts: 1/1 2025-05-29T15:31:31.838692Z node 83 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1606: TOperation IsReadyToNotify, TxId: 1005, ready parts: 1/1, is published: false 2025-05-29T15:31:31.838697Z node 83 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 1005 ready parts: 1/1 2025-05-29T15:31:31.838702Z node 83 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:973: Operation and all the parts is done, operation id: 1005:0 2025-05-29T15:31:31.838708Z node 83 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5174: RemoveTx for txid 1005:0 2025-05-29T15:31:31.838718Z node 83 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 2 2025-05-29T15:31:31.838722Z node 83 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove txstate source path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 3 2025-05-29T15:31:31.838727Z node 83 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:982: Publication still in progress, tx: 1005, publications: 3, subscribers: 0 2025-05-29T15:31:31.838731Z node 83 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:989: Publication details: tx: 1005, [OwnerId: 72057594046678944, LocalPathId: 1], 13 2025-05-29T15:31:31.838735Z node 83 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:989: Publication details: tx: 1005, [OwnerId: 72057594046678944, LocalPathId: 3], 2 2025-05-29T15:31:31.838756Z node 83 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:989: Publication details: tx: 1005, [OwnerId: 72057594046678944, LocalPathId: 4], 18446744073709551615 2025-05-29T15:31:31.838809Z node 83 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:5834: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 1005 2025-05-29T15:31:31.838818Z node 83 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 1005 2025-05-29T15:31:31.838822Z node 83 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 3, at schemeshard: 72057594046678944, txId: 1005 2025-05-29T15:31:31.838827Z node 83 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 1005, pathId: [OwnerId: 72057594046678944, LocalPathId: 4], version: 18446744073709551615 2025-05-29T15:31:31.838830Z node 83 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 1 2025-05-29T15:31:31.838887Z node 83 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:123: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-05-29T15:31:31.838893Z node 83 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:137: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 4], at schemeshard: 72057594046678944 2025-05-29T15:31:31.838901Z node 83 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 4 2025-05-29T15:31:31.838945Z node 83 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:5834: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 13 PathOwnerId: 72057594046678944, cookie: 1005 2025-05-29T15:31:31.838952Z node 83 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 13 PathOwnerId: 72057594046678944, cookie: 1005 2025-05-29T15:31:31.838956Z node 83 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 1005 2025-05-29T15:31:31.838960Z node 83 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 1005, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 13 2025-05-29T15:31:31.838966Z node 83 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2025-05-29T15:31:31.839011Z node 83 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:5834: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 2 PathOwnerId: 72057594046678944, cookie: 1005 2025-05-29T15:31:31.839020Z node 83 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 2 PathOwnerId: 72057594046678944, cookie: 1005 2025-05-29T15:31:31.839024Z node 83 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1005 2025-05-29T15:31:31.839027Z node 83 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 1005, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 2 2025-05-29T15:31:31.839031Z node 83 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 2 2025-05-29T15:31:31.839039Z node 83 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 1005, subscribers: 0 2025-05-29T15:31:31.839554Z node 83 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1005 2025-05-29T15:31:31.839634Z node 83 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:160: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 2025-05-29T15:31:31.839655Z node 83 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1005 2025-05-29T15:31:31.839852Z node 83 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1005 TestModificationResult got TxId: 1005, wait until txId: 1005 TestWaitNotification wait txId: 1005 2025-05-29T15:31:31.839898Z node 83 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:210: tests -- TTxNotificationSubscriber for txId 1005: send EvNotifyTxCompletion 2025-05-29T15:31:31.839905Z node 83 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:256: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 1005 2025-05-29T15:31:31.839963Z node 83 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 1005, at schemeshard: 72057594046678944 2025-05-29T15:31:31.839980Z node 83 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:227: tests -- TTxNotificationSubscriber for txId 1005: got EvNotifyTxCompletionResult 2025-05-29T15:31:31.839984Z node 83 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:236: tests -- TTxNotificationSubscriber for txId 1005: satisfy waiter [83:418:2408] TestWaitNotification: OK eventTxId 1005 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/grpc_streaming/ut/unittest >> TGRpcStreamingTest::ReadFinish [GOOD] Test command err: 2025-05-29T15:31:31.821519Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509890725763715955:2063];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:31:31.821551Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/00192d/r3tmp/tmp1FOFbZ/pdisk_1.dat 2025-05-29T15:31:31.863322Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [1:7509890725763715934:2079] 1748532691821352 != 1748532691821355 2025-05-29T15:31:31.864510Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:31:31.871602Z node 1 :GRPC_SERVER DEBUG: grpc_streaming.h:227: [0x53b63eb2e380] stream accepted Name# Session ok# true peer# ipv6:[::1]:58122 2025-05-29T15:31:31.871691Z node 1 :GRPC_SERVER DEBUG: grpc_streaming.h:301: [0x53b63eb2e380] facade attach Name# Session actor# [1:7509890725763716472:2250] peer# ipv6:[::1]:58122 2025-05-29T15:31:31.871700Z node 1 :GRPC_SERVER DEBUG: grpc_streaming.h:325: [0x53b63eb2e380] facade read Name# Session peer# ipv6:[::1]:58122 2025-05-29T15:31:31.871722Z node 1 :GRPC_SERVER DEBUG: grpc_streaming.h:511: [0x53b63eb2e380] facade finish Name# Session peer# ipv6:[::1]:58122 grpc status# (0) message# 2025-05-29T15:31:31.871825Z node 1 :GRPC_SERVER DEBUG: grpc_streaming.h:353: [0x53b63eb2e380] read finished Name# Session ok# false data# peer# ipv6:[::1]:58122 2025-05-29T15:31:31.871846Z node 1 :GRPC_SERVER DEBUG: grpc_streaming.h:268: [0x53b63eb2e380] stream done notification Name# Session ok# true peer# ipv6:[::1]:58122 2025-05-29T15:31:31.871846Z node 1 :GRPC_SERVER DEBUG: grpc_streaming_ut.cpp:265: Received TEvReadFinished, success = 0 2025-05-29T15:31:31.871881Z node 1 :GRPC_SERVER DEBUG: grpc_streaming.h:547: [0x53b63eb2e380] stream finished Name# Session ok# true peer# ipv6:[::1]:58122 grpc status# (0) message# 2025-05-29T15:31:31.871899Z node 1 :GRPC_SERVER DEBUG: grpc_streaming.h:580: [0x53b63eb2e380] deregistering request Name# Session peer# ipv6:[::1]:58122 (finish done) 2025-05-29T15:31:31.923988Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:31:31.924010Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:31:31.925051Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected >> TGRpcStreamingTest::WritesDoneFromClient |74.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_view/unittest ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/effects/unittest >> KqpInplaceUpdate::SingleRowPgNotNull+UseSink Test command err: Trying to start YDB, gRPC: 61074, MsgBus: 7009 2025-05-29T15:31:25.230144Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509890699716699822:2065];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:31:25.230463Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/001dce/r3tmp/tmpBuxguq/pdisk_1.dat 2025-05-29T15:31:25.281625Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [1:7509890699716699788:2079] 1748532685229998 != 1748532685230001 2025-05-29T15:31:25.284166Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 61074, node 1 2025-05-29T15:31:25.299580Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:31:25.299591Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-05-29T15:31:25.299592Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-05-29T15:31:25.299624Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:7009 2025-05-29T15:31:25.331730Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:31:25.331752Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:31:25.332830Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:7009 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-29T15:31:25.359800Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:31:25.367762Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:31:25.386010Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:31:25.407738Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:31:25.418917Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:31:25.548552Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890699716701425:2403], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:31:25.548578Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:31:25.583389Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-05-29T15:31:25.590650Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-05-29T15:31:25.600074Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-05-29T15:31:25.607212Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-05-29T15:31:25.614289Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-05-29T15:31:25.628813Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-05-29T15:31:25.642540Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-05-29T15:31:25.658407Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890699716702079:2466], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:31:25.658436Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:31:25.658440Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890699716702084:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:31:25.659118Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715669:3, at schemeshard: 72057594046644480 2025-05-29T15:31:25.661954Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7509890699716702086:2470], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715669 completed, doublechecking } 2025-05-29T15:31:25.747331Z node 1 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [1:7509890699716702137:3395] txid# 281474976715670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-29T15:31:25.842024Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [1:7509890699716702153:2474], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:31:25.842166Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=1&id=ZWEzMjc2ODgtYTZlYzQzLTFmMzM5ZjNiLWJkYzY0YWMw, ActorId: [1:7509890699716701407:2401], ActorState: ExecuteState, TraceId: 01jweapttt5n76gmv2bamx0c18, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: VERIFY failed (2025-05-29T15:31:25.842873Z): assertion failed in non-unittest thread with message: assertion failed at ydb/core/kqp/ut/common/kqp_ut_common.h:375, void NKikimr::NKqp::AssertSuccessResult(const NYdb::TStatus &): (result.IsSuccess())
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 library/cpp/testing/unittest/registar.cpp:36 RaiseError(): requirement UnittestThread failed 0. /-S/util/system/yassert.cpp:83: InternalPanicImpl @ 0x15F23B95 1. /-S/util/system/yassert.cpp:55: Panic @ 0x15F1AB96 2. /tmp//-S/library/cpp/testing/unittest/registar.cpp:36: RaiseError @ 0x160BC066 3. /-S/ydb/core/kqp/ut/common/kqp_ut_common.h:375: AssertSuccessResult @ 0x15D88C82 4. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:365: CreateSampleTables @ 0x2855B9A2 5. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:581: operator() @ 0x2857CF4C 6. /-S/library/cpp/threading/future/core/future-inl.h:493: SetValue @ 0x2857CF4C 7. /-S/library/cpp/threading/future/async.h:24: operator() @ 0x2857CF4C 8. /-S/util/thread/pool.h:71: Process @ 0x2857CF4C 9. /-S/util/thread/pool.cpp:418: DoExecute @ 0x15F2B519 10. /-S/util/thread/factory.h:15: Execute @ 0x15F29F09 11. /-S/util/thread/factory.cpp:36: ThreadProc @ 0x15F29F09 12. /-S/util/system/thread.cpp:244: ThreadProxy @ 0x15F2537C 13. ??:0: ?? @ 0x7FA49BC6EAC2 14. ??:0: ?? @ 0x7FA49BD0084F Trying to start YDB, gRPC: 3506, MsgBus: 3664 2025-05-29T15:31:29.055029Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509890716574820441:2071];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:31:29.055053Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/001dce/r3tmp/tmpcZUxcG/pdisk_1.dat 2025-05-29T15:31:29.110598Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 3506, node 1 2025-05-29T15:31:29.123730Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:31:29.123741Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-05-29T15:31:29.123764Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-05-29T15:31:29.123801Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:3664 2025-05-29T15:31:29.156460Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:31:29.156486Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:31:29.157534Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:3664 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-29T15:31:29.186473Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:31:29.193330Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:31:29.255875Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:31:29.275947Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:31:29.285732Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:31:29.349637Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890716574822035:2403], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:31:29.349676Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:31:29.391697Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-05-29T15:31:29.397276Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-05-29T15:31:29.408379Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-05-29T15:31:29.422174Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-05-29T15:31:29.435981Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-05-29T15:31:29.442982Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-05-29T15:31:29.449782Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-05-29T15:31:29.466139Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890716574822688:2466], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:31:29.466159Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:31:29.466206Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890716574822693:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:31:29.466899Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715669:3, at schemeshard: 72057594046644480 2025-05-29T15:31:29.470653Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7509890716574822695:2470], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715669 completed, doublechecking } 2025-05-29T15:31:29.569447Z node 1 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [1:7509890716574822746:3395] txid# 281474976715670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-29T15:31:29.678305Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [1:7509890716574822762:2474], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:31:29.678421Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=1&id=NGY1NGExY2ItYTU0ZDE4MWUtODBhYmQ4NzAtMjAyZmM1NmY=, ActorId: [1:7509890716574822032:2401], ActorState: ExecuteState, TraceId: 01jweapyhs509rxg9qttafn69d, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: VERIFY failed (2025-05-29T15:31:29.679074Z): assertion failed in non-unittest thread with message: assertion failed at ydb/core/kqp/ut/common/kqp_ut_common.h:375, void NKikimr::NKqp::AssertSuccessResult(const NYdb::TStatus &): (result.IsSuccess())
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 library/cpp/testing/unittest/registar.cpp:36 RaiseError(): requirement UnittestThread failed 0. /-S/util/system/yassert.cpp:83: InternalPanicImpl @ 0x15F23B95 1. /-S/util/system/yassert.cpp:55: Panic @ 0x15F1AB96 2. /tmp//-S/library/cpp/testing/unittest/registar.cpp:36: RaiseError @ 0x160BC066 3. /-S/ydb/core/kqp/ut/common/kqp_ut_common.h:375: AssertSuccessResult @ 0x15D88C82 4. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:365: CreateSampleTables @ 0x2855B9A2 5. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:581: operator() @ 0x2857CF4C 6. /-S/library/cpp/threading/future/core/future-inl.h:493: SetValue @ 0x2857CF4C 7. /-S/library/cpp/threading/future/async.h:24: operator() @ 0x2857CF4C 8. /-S/util/thread/pool.h:71: Process @ 0x2857CF4C 9. /-S/util/thread/pool.cpp:418: DoExecute @ 0x15F2B519 10. /-S/util/thread/factory.h:15: Execute @ 0x15F29F09 11. /-S/util/thread/factory.cpp:36: ThreadProc @ 0x15F29F09 12. /-S/util/system/thread.cpp:244: ThreadProxy @ 0x15F2537C 13. ??:0: ?? @ 0x7FA5AAD73AC2 14. ??:0: ?? @ 0x7FA5AAE0584F >> TPersQueueTest::PreferredCluster_EnabledRemotePreferredClusterAndRemoteClusterEnabledDelaySec_SessionDiesOnlyAfterDelay [FAIL] >> TPersQueueTest::PreferredCluster_RemotePreferredClusterEnabledWhileSessionInitializing_SessionDiesOnlyAfterInitializationAndDelay ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/effects/unittest >> KqpInplaceUpdate::Negative_SingleRowListFromRange-UseSink Test command err: Trying to start YDB, gRPC: 15739, MsgBus: 11507 2025-05-29T15:31:25.397470Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509890700497039635:2069];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:31:25.397496Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/001dc9/r3tmp/tmpEwfVf7/pdisk_1.dat 2025-05-29T15:31:25.448292Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 15739, node 1 2025-05-29T15:31:25.464973Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:31:25.464989Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-05-29T15:31:25.464991Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-05-29T15:31:25.465033Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:11507 2025-05-29T15:31:25.498534Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:31:25.498559Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:31:25.499650Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:11507 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-29T15:31:25.523910Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:31:25.528450Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:31:25.543399Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:31:25.561584Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:31:25.573643Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:31:25.704470Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890700497041233:2403], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:31:25.704504Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:31:25.740618Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-05-29T15:31:25.747635Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-05-29T15:31:25.761407Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-05-29T15:31:25.775224Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-05-29T15:31:25.782031Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-05-29T15:31:25.796316Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-05-29T15:31:25.803157Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-05-29T15:31:25.819318Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890700497041885:2466], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:31:25.819347Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:31:25.819348Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890700497041890:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:31:25.820013Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715669:3, at schemeshard: 72057594046644480 2025-05-29T15:31:25.823415Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7509890700497041892:2470], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715669 completed, doublechecking } 2025-05-29T15:31:25.902146Z node 1 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [1:7509890700497041943:3399] txid# 281474976715670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-29T15:31:26.021326Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [1:7509890700497041959:2474], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:31:26.021471Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=1&id=Y2QxZTJmMzUtNjcyZTIxN2UtYTlhM2ZhYzEtMzViZDA3YTE=, ActorId: [1:7509890700497041215:2401], ActorState: ExecuteState, TraceId: 01jweaptzt9xnp1k8xab6jn2z1, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: VERIFY failed (2025-05-29T15:31:26.021991Z): assertion failed in non-unittest thread with message: assertion failed at ydb/core/kqp/ut/common/kqp_ut_common.h:375, void NKikimr::NKqp::AssertSuccessResult(const NYdb::TStatus &): (result.IsSuccess())
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 library/cpp/testing/unittest/registar.cpp:36 RaiseError(): requirement UnittestThread failed 0. /-S/util/system/yassert.cpp:83: InternalPanicImpl @ 0x15F23B95 1. /-S/util/system/yassert.cpp:55: Panic @ 0x15F1AB96 2. /tmp//-S/library/cpp/testing/unittest/registar.cpp:36: RaiseError @ 0x160BC066 3. /-S/ydb/core/kqp/ut/common/kqp_ut_common.h:375: AssertSuccessResult @ 0x15D88C82 4. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:365: CreateSampleTables @ 0x2855B9A2 5. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:581: operator() @ 0x2857CF4C 6. /-S/library/cpp/threading/future/core/future-inl.h:493: SetValue @ 0x2857CF4C 7. /-S/library/cpp/threading/future/async.h:24: operator() @ 0x2857CF4C 8. /-S/util/thread/pool.h:71: Process @ 0x2857CF4C 9. /-S/util/thread/pool.cpp:418: DoExecute @ 0x15F2B519 10. /-S/util/thread/factory.h:15: Execute @ 0x15F29F09 11. /-S/util/thread/factory.cpp:36: ThreadProc @ 0x15F29F09 12. /-S/util/system/thread.cpp:244: ThreadProxy @ 0x15F2537C 13. ??:0: ?? @ 0x7F97D852EAC2 14. ??:0: ?? @ 0x7F97D85C084F Trying to start YDB, gRPC: 9805, MsgBus: 10407 2025-05-29T15:31:29.246944Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509890716919398123:2071];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:31:29.246961Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/001dc9/r3tmp/tmpuDA5DT/pdisk_1.dat 2025-05-29T15:31:29.295283Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 9805, node 1 2025-05-29T15:31:29.312524Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:31:29.312534Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-05-29T15:31:29.312536Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-05-29T15:31:29.312568Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:10407 2025-05-29T15:31:29.347264Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:31:29.347290Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:31:29.348316Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:10407 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-29T15:31:29.373126Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:31:29.386199Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:31:29.403113Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:31:29.420144Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:31:29.430209Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:31:29.548096Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890716919399709:2403], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:31:29.548125Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:31:29.585638Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-05-29T15:31:29.592547Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-05-29T15:31:29.604433Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-05-29T15:31:29.659006Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-05-29T15:31:29.667286Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-05-29T15:31:29.681585Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-05-29T15:31:29.695740Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-05-29T15:31:29.712319Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890716919400365:2466], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:31:29.712346Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:31:29.712348Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890716919400370:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:31:29.713172Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715669:3, at schemeshard: 72057594046644480 2025-05-29T15:31:29.715086Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7509890716919400372:2470], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715669 completed, doublechecking } 2025-05-29T15:31:29.816227Z node 1 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [1:7509890716919400423:3395] txid# 281474976715670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-29T15:31:29.904106Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [1:7509890716919400439:2474], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:31:29.904217Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=1&id=NDgzY2M0M2ItMWQ3OTEyNGUtMTBhNzU5NzItYWVkMjk1Zg==, ActorId: [1:7509890716919399706:2401], ActorState: ExecuteState, TraceId: 01jweapysf9akayycn6kg2zg95, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: VERIFY failed (2025-05-29T15:31:29.904954Z): assertion failed in non-unittest thread with message: assertion failed at ydb/core/kqp/ut/common/kqp_ut_common.h:375, void NKikimr::NKqp::AssertSuccessResult(const NYdb::TStatus &): (result.IsSuccess())
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 library/cpp/testing/unittest/registar.cpp:36 RaiseError(): requirement UnittestThread failed 0. /-S/util/system/yassert.cpp:83: InternalPanicImpl @ 0x15F23B95 1. /-S/util/system/yassert.cpp:55: Panic @ 0x15F1AB96 2. /tmp//-S/library/cpp/testing/unittest/registar.cpp:36: RaiseError @ 0x160BC066 3. /-S/ydb/core/kqp/ut/common/kqp_ut_common.h:375: AssertSuccessResult @ 0x15D88C82 4. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:365: CreateSampleTables @ 0x2855B9A2 5. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:581: operator() @ 0x2857CF4C 6. /-S/library/cpp/threading/future/core/future-inl.h:493: SetValue @ 0x2857CF4C 7. /-S/library/cpp/threading/future/async.h:24: operator() @ 0x2857CF4C 8. /-S/util/thread/pool.h:71: Process @ 0x2857CF4C 9. /-S/util/thread/pool.cpp:418: DoExecute @ 0x15F2B519 10. /-S/util/thread/factory.h:15: Execute @ 0x15F29F09 11. /-S/util/thread/factory.cpp:36: ThreadProc @ 0x15F29F09 12. /-S/util/system/thread.cpp:244: ThreadProxy @ 0x15F2537C 13. ??:0: ?? @ 0x7FFA22C1CAC2 14. ??:0: ?? @ 0x7FFA22CAE84F ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/effects/unittest >> KqpImmediateEffects::AlreadyBrokenImmediateEffects Test command err: Trying to start YDB, gRPC: 6694, MsgBus: 29797 2025-05-29T15:31:25.424976Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509890699108550979:2068];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:31:25.425010Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/001dc7/r3tmp/tmpIJ5y6z/pdisk_1.dat 2025-05-29T15:31:25.494174Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 6694, node 1 2025-05-29T15:31:25.506405Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:31:25.506427Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-05-29T15:31:25.506429Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-05-29T15:31:25.506471Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:29797 2025-05-29T15:31:25.526786Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:31:25.526827Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:31:25.527921Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:29797 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-29T15:31:25.568591Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:31:25.576562Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:31:25.637445Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:31:25.658097Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:31:25.668860Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:31:25.734960Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890699108552597:2403], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:31:25.734986Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:31:25.769946Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-05-29T15:31:25.776542Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-05-29T15:31:25.788781Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-05-29T15:31:25.796061Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-05-29T15:31:25.803172Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-05-29T15:31:25.817124Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-05-29T15:31:25.824219Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-05-29T15:31:25.840434Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890699108553249:2466], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:31:25.840463Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:31:25.840485Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890699108553254:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:31:25.841071Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715669:3, at schemeshard: 72057594046644480 2025-05-29T15:31:25.844321Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7509890699108553256:2470], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715669 completed, doublechecking } 2025-05-29T15:31:25.897865Z node 1 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [1:7509890699108553307:3397] txid# 281474976715670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-29T15:31:26.016081Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [1:7509890699108553323:2474], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:31:26.016204Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=1&id=MmFkMjM0NTEtNGRmYjMzNWMtNjMzMmM1MzYtODg3NTY5NTk=, ActorId: [1:7509890699108552594:2401], ActorState: ExecuteState, TraceId: 01jweapv0geyqccv505p6xp3e9, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: VERIFY failed (2025-05-29T15:31:26.016809Z): assertion failed in non-unittest thread with message: assertion failed at ydb/core/kqp/ut/common/kqp_ut_common.h:375, void NKikimr::NKqp::AssertSuccessResult(const NYdb::TStatus &): (result.IsSuccess())
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 library/cpp/testing/unittest/registar.cpp:36 RaiseError(): requirement UnittestThread failed 0. /-S/util/system/yassert.cpp:83: InternalPanicImpl @ 0x15F23B95 1. /-S/util/system/yassert.cpp:55: Panic @ 0x15F1AB96 2. /tmp//-S/library/cpp/testing/unittest/registar.cpp:36: RaiseError @ 0x160BC066 3. /-S/ydb/core/kqp/ut/common/kqp_ut_common.h:375: AssertSuccessResult @ 0x15D88C82 4. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:365: CreateSampleTables @ 0x2855B9A2 5. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:581: operator() @ 0x2857CF4C 6. /-S/library/cpp/threading/future/core/future-inl.h:493: SetValue @ 0x2857CF4C 7. /-S/library/cpp/threading/future/async.h:24: operator() @ 0x2857CF4C 8. /-S/util/thread/pool.h:71: Process @ 0x2857CF4C 9. /-S/util/thread/pool.cpp:418: DoExecute @ 0x15F2B519 10. /-S/util/thread/factory.h:15: Execute @ 0x15F29F09 11. /-S/util/thread/factory.cpp:36: ThreadProc @ 0x15F29F09 12. /-S/util/system/thread.cpp:244: ThreadProxy @ 0x15F2537C 13. ??:0: ?? @ 0x7FD317B61AC2 14. ??:0: ?? @ 0x7FD317BF384F Trying to start YDB, gRPC: 14987, MsgBus: 29740 2025-05-29T15:31:29.143312Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509890717798890053:2071];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:31:29.143596Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/001dc7/r3tmp/tmpsnaooH/pdisk_1.dat 2025-05-29T15:31:29.199829Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 14987, node 1 2025-05-29T15:31:29.216375Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:31:29.216391Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-05-29T15:31:29.216393Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-05-29T15:31:29.216424Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:29740 2025-05-29T15:31:29.244812Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:31:29.244844Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:31:29.245981Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:29740 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-29T15:31:29.286937Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:31:29.292545Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:31:29.354364Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:31:29.375609Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:31:29.386147Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:31:29.476461Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890717798891644:2403], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:31:29.476501Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:31:29.510914Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-05-29T15:31:29.517187Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-05-29T15:31:29.571798Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-05-29T15:31:29.583391Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-05-29T15:31:29.597196Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-05-29T15:31:29.611427Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-05-29T15:31:29.625549Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-05-29T15:31:29.641553Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890717798892298:2466], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:31:29.641578Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890717798892303:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:31:29.641585Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:31:29.642218Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715669:3, at schemeshard: 72057594046644480 2025-05-29T15:31:29.645607Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7509890717798892305:2470], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715669 completed, doublechecking } 2025-05-29T15:31:29.723444Z node 1 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [1:7509890717798892356:3396] txid# 281474976715670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-29T15:31:29.844373Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [1:7509890717798892372:2474], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:31:29.844491Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=1&id=NzQzOTYyYjctNzFjMmMyYTctYjhkOWQxYjktNWE0OGRkZg==, ActorId: [1:7509890717798891641:2401], ActorState: ExecuteState, TraceId: 01jweapyq9ag0pcp48jcj5qygy, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: VERIFY failed (2025-05-29T15:31:29.845088Z): assertion failed in non-unittest thread with message: assertion failed at ydb/core/kqp/ut/common/kqp_ut_common.h:375, void NKikimr::NKqp::AssertSuccessResult(const NYdb::TStatus &): (result.IsSuccess())
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 library/cpp/testing/unittest/registar.cpp:36 RaiseError(): requirement UnittestThread failed 0. /-S/util/system/yassert.cpp:83: InternalPanicImpl @ 0x15F23B95 1. /-S/util/system/yassert.cpp:55: Panic @ 0x15F1AB96 2. /tmp//-S/library/cpp/testing/unittest/registar.cpp:36: RaiseError @ 0x160BC066 3. /-S/ydb/core/kqp/ut/common/kqp_ut_common.h:375: AssertSuccessResult @ 0x15D88C82 4. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:365: CreateSampleTables @ 0x2855B9A2 5. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:581: operator() @ 0x2857CF4C 6. /-S/library/cpp/threading/future/core/future-inl.h:493: SetValue @ 0x2857CF4C 7. /-S/library/cpp/threading/future/async.h:24: operator() @ 0x2857CF4C 8. /-S/util/thread/pool.h:71: Process @ 0x2857CF4C 9. /-S/util/thread/pool.cpp:418: DoExecute @ 0x15F2B519 10. /-S/util/thread/factory.h:15: Execute @ 0x15F29F09 11. /-S/util/thread/factory.cpp:36: ThreadProc @ 0x15F29F09 12. /-S/util/system/thread.cpp:244: ThreadProxy @ 0x15F2537C 13. ??:0: ?? @ 0x7F1186C42AC2 14. ??:0: ?? @ 0x7F1186CD484F |74.9%| [TA] $(B)/ydb/core/tx/schemeshard/ut_view/test-results/unittest/{meta.json ... results_accumulator.log} >> KqpInplaceUpdate::Negative_SingleRowWithValueCast-UseSink |74.9%| [TA] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_view/test-results/unittest/{meta.json ... results_accumulator.log} >> TGRpcStreamingTest::WritesDoneFromClient [GOOD] >> TSchemeShardTTLTests::LegacyTtlSettingsNoTiers >> KqpEffects::UpdateOn_Params >> KqpBatchUpdate::NotIdempotent |74.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/grpc_streaming/ut/unittest |75.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/statistics/service/ut/unittest ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/grpc_streaming/ut/unittest >> TGRpcStreamingTest::WritesDoneFromClient [GOOD] Test command err: 2025-05-29T15:31:32.790250Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509890728510121449:2064];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:31:32.790275Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/001917/r3tmp/tmppI16BC/pdisk_1.dat 2025-05-29T15:31:32.848249Z node 1 :GRPC_SERVER DEBUG: grpc_streaming.h:227: [0x12c03efe0380] stream accepted Name# Session ok# true peer# ipv6:[::1]:47666 2025-05-29T15:31:32.848377Z node 1 :GRPC_SERVER DEBUG: grpc_streaming.h:301: [0x12c03efe0380] facade attach Name# Session actor# [1:7509890728510121966:2250] peer# ipv6:[::1]:47666 2025-05-29T15:31:32.848391Z node 1 :GRPC_SERVER DEBUG: grpc_streaming.h:325: [0x12c03efe0380] facade read Name# Session peer# ipv6:[::1]:47666 2025-05-29T15:31:32.848472Z node 1 :GRPC_SERVER DEBUG: grpc_streaming.h:353: [0x12c03efe0380] read finished Name# Session ok# false data# peer# ipv6:[::1]:47666 2025-05-29T15:31:32.848498Z node 1 :GRPC_SERVER DEBUG: grpc_streaming_ut.cpp:302: Received TEvReadFinished, success = 0 2025-05-29T15:31:32.848513Z node 1 :GRPC_SERVER DEBUG: grpc_streaming.h:511: [0x12c03efe0380] facade finish Name# Session peer# ipv6:[::1]:47666 grpc status# (9) message# Everything is A-OK 2025-05-29T15:31:32.848653Z node 1 :GRPC_SERVER DEBUG: grpc_streaming.h:268: [0x12c03efe0380] stream done notification Name# Session ok# true peer# ipv6:[::1]:47666 2025-05-29T15:31:32.848668Z node 1 :GRPC_SERVER DEBUG: grpc_streaming.h:547: [0x12c03efe0380] stream finished Name# Session ok# true peer# ipv6:[::1]:47666 grpc status# (9) message# Everything is A-OK 2025-05-29T15:31:32.848673Z node 1 :GRPC_SERVER DEBUG: grpc_streaming.h:580: [0x12c03efe0380] deregistering request Name# Session peer# ipv6:[::1]:47666 (finish done) 2025-05-29T15:31:32.848685Z node 1 :GRPC_SERVER DEBUG: grpc_streaming_ut.cpp:312: Received TEvNotifiedWhenDone 2025-05-29T15:31:32.850538Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:31:32.850805Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [1:7509890728510121426:2079] 1748532692790058 != 1748532692790061 2025-05-29T15:31:32.892635Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:31:32.892669Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:31:32.893832Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/effects/unittest >> KqpImmediateEffects::UpdateAfterInsert Test command err: Trying to start YDB, gRPC: 22909, MsgBus: 17138 2025-05-29T15:31:25.559723Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509890701473675490:2069];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:31:25.559750Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/001dc4/r3tmp/tmpgsEkAE/pdisk_1.dat 2025-05-29T15:31:25.615846Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 22909, node 1 2025-05-29T15:31:25.628687Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:31:25.628697Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-05-29T15:31:25.628698Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-05-29T15:31:25.628732Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:17138 2025-05-29T15:31:25.660848Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:31:25.660882Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:31:25.661959Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:17138 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-29T15:31:25.690711Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:31:25.694224Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:31:25.754973Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:31:25.777112Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:31:25.786784Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:31:25.868799Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890701473677087:2403], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:31:25.868840Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:31:25.905780Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-05-29T15:31:25.913105Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-05-29T15:31:25.968066Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-05-29T15:31:25.977787Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-05-29T15:31:25.984727Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-05-29T15:31:25.992124Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-05-29T15:31:26.006536Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-05-29T15:31:26.022477Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890705768645037:2466], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:31:26.022507Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:31:26.022512Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890705768645042:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:31:26.023279Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715669:3, at schemeshard: 72057594046644480 2025-05-29T15:31:26.026128Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7509890705768645044:2470], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715669 completed, doublechecking } 2025-05-29T15:31:26.122204Z node 1 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [1:7509890705768645095:3396] txid# 281474976715670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-29T15:31:26.193838Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [1:7509890705768645111:2474], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:31:26.194007Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=1&id=NTk1NmFlNWUtY2ZkZDE1NDAtMjRhN2ZhMzUtNzFiNjU5NzI=, ActorId: [1:7509890701473677069:2401], ActorState: ExecuteState, TraceId: 01jweapv66aht8s65n3hgygtz3, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: VERIFY failed (2025-05-29T15:31:26.194798Z): assertion failed in non-unittest thread with message: assertion failed at ydb/core/kqp/ut/common/kqp_ut_common.h:375, void NKikimr::NKqp::AssertSuccessResult(const NYdb::TStatus &): (result.IsSuccess())
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 library/cpp/testing/unittest/registar.cpp:36 RaiseError(): requirement UnittestThread failed 0. /-S/util/system/yassert.cpp:83: InternalPanicImpl @ 0x15F23B95 1. /-S/util/system/yassert.cpp:55: Panic @ 0x15F1AB96 2. /tmp//-S/library/cpp/testing/unittest/registar.cpp:36: RaiseError @ 0x160BC066 3. /-S/ydb/core/kqp/ut/common/kqp_ut_common.h:375: AssertSuccessResult @ 0x15D88C82 4. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:365: CreateSampleTables @ 0x2855B9A2 5. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:581: operator() @ 0x2857CF4C 6. /-S/library/cpp/threading/future/core/future-inl.h:493: SetValue @ 0x2857CF4C 7. /-S/library/cpp/threading/future/async.h:24: operator() @ 0x2857CF4C 8. /-S/util/thread/pool.h:71: Process @ 0x2857CF4C 9. /-S/util/thread/pool.cpp:418: DoExecute @ 0x15F2B519 10. /-S/util/thread/factory.h:15: Execute @ 0x15F29F09 11. /-S/util/thread/factory.cpp:36: ThreadProc @ 0x15F29F09 12. /-S/util/system/thread.cpp:244: ThreadProxy @ 0x15F2537C 13. ??:0: ?? @ 0x7F0B6BF7EAC2 14. ??:0: ?? @ 0x7F0B6C01084F Trying to start YDB, gRPC: 3492, MsgBus: 24845 2025-05-29T15:31:29.420716Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509890718467033617:2064];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:31:29.420742Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/001dc4/r3tmp/tmpQAeMmW/pdisk_1.dat TServer::EnableGrpc on GrpcPort 3492, node 1 2025-05-29T15:31:29.477531Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:31:29.477615Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [1:7509890718467033586:2079] 1748532689420579 != 1748532689420582 2025-05-29T15:31:29.486475Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:31:29.486489Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-05-29T15:31:29.486491Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-05-29T15:31:29.486536Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:24845 2025-05-29T15:31:29.521932Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:31:29.521964Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:31:29.523024Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:24845 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-29T15:31:29.551746Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:31:29.561513Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:31:29.622687Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:31:29.643563Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:31:29.654723Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:31:29.747970Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890718467035223:2403], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:31:29.747996Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:31:29.782425Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-05-29T15:31:29.789401Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-05-29T15:31:29.800101Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-05-29T15:31:29.854799Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-05-29T15:31:29.863349Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-05-29T15:31:29.877696Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-05-29T15:31:29.891389Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-05-29T15:31:29.907390Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890718467035877:2466], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:31:29.907416Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890718467035882:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:31:29.907418Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:31:29.908086Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715669:3, at schemeshard: 72057594046644480 2025-05-29T15:31:29.911298Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7509890718467035884:2470], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715669 completed, doublechecking } 2025-05-29T15:31:29.997440Z node 1 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [1:7509890718467035936:3397] txid# 281474976715670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-29T15:31:30.067829Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [1:7509890718467035952:2474], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:31:30.067917Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=1&id=MmJiNWM2MzAtNzkzNDlmNGYtNDRkOTE2MTAtNmZkYzQzMDM=, ActorId: [1:7509890718467035205:2401], ActorState: ExecuteState, TraceId: 01jweapyzk69qxgk61nn5ye3j3, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: VERIFY failed (2025-05-29T15:31:30.068567Z): assertion failed in non-unittest thread with message: assertion failed at ydb/core/kqp/ut/common/kqp_ut_common.h:375, void NKikimr::NKqp::AssertSuccessResult(const NYdb::TStatus &): (result.IsSuccess())
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 library/cpp/testing/unittest/registar.cpp:36 RaiseError(): requirement UnittestThread failed 0. /-S/util/system/yassert.cpp:83: InternalPanicImpl @ 0x15F23B95 1. /-S/util/system/yassert.cpp:55: Panic @ 0x15F1AB96 2. /tmp//-S/library/cpp/testing/unittest/registar.cpp:36: RaiseError @ 0x160BC066 3. /-S/ydb/core/kqp/ut/common/kqp_ut_common.h:375: AssertSuccessResult @ 0x15D88C82 4. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:365: CreateSampleTables @ 0x2855B9A2 5. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:581: operator() @ 0x2857CF4C 6. /-S/library/cpp/threading/future/core/future-inl.h:493: SetValue @ 0x2857CF4C 7. /-S/library/cpp/threading/future/async.h:24: operator() @ 0x2857CF4C 8. /-S/util/thread/pool.h:71: Process @ 0x2857CF4C 9. /-S/util/thread/pool.cpp:418: DoExecute @ 0x15F2B519 10. /-S/util/thread/factory.h:15: Execute @ 0x15F29F09 11. /-S/util/thread/factory.cpp:36: ThreadProc @ 0x15F29F09 12. /-S/util/system/thread.cpp:244: ThreadProxy @ 0x15F2537C 13. ??:0: ?? @ 0x7F29F57C0AC2 14. ??:0: ?? @ 0x7F29F585284F >> KqpInplaceUpdate::Negative_BatchUpdate-UseSink >> TSchemeShardTTLTests::LegacyTtlSettingsNoTiers [GOOD] |75.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_reboots/unittest >> BasicStatistics::TwoServerlessDbs >> Yq_1::DescribeConnection ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_ttl/unittest >> TSchemeShardTTLTests::LegacyTtlSettingsNoTiers [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:116:2058] recipient: [1:111:2141] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:116:2058] recipient: [1:111:2141] Leader for TabletID 72057594046678944 is [1:128:2152] sender: [1:132:2058] recipient: [1:111:2141] 2025-05-29T15:31:33.250289Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7488: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-05-29T15:31:33.250310Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7516: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:31:33.250314Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7402: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-05-29T15:31:33.250318Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7418: OperationsProcessing config: using default configuration 2025-05-29T15:31:33.250330Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-05-29T15:31:33.250333Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-05-29T15:31:33.250339Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7548: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:31:33.250360Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:39: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-05-29T15:31:33.250455Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7619: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-05-29T15:31:33.250524Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-05-29T15:31:33.260403Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7309: Cannot subscribe to console configs 2025-05-29T15:31:33.260430Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:31:33.263320Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-05-29T15:31:33.263475Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-05-29T15:31:33.263543Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-05-29T15:31:33.265194Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-05-29T15:31:33.265395Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:445: Clear TempDirsState with owners number: 0 2025-05-29T15:31:33.265521Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1348: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-05-29T15:31:33.265585Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:32: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-05-29T15:31:33.266174Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:157: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:31:33.266231Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:84: [RootDataErasureManager] Stop 2025-05-29T15:31:33.266543Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:31:33.266555Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:31:33.266586Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-05-29T15:31:33.266594Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-29T15:31:33.266601Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:202: TTxServerlessStorageBilling.Complete 2025-05-29T15:31:33.266636Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6671: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-05-29T15:31:33.268037Z node 1 :HIVE INFO: tablet_helpers.cpp:1130: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:128:2152] sender: [1:242:2058] recipient: [1:15:2062] 2025-05-29T15:31:33.289187Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:372: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-05-29T15:31:33.289287Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:31:33.289360Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-05-29T15:31:33.289407Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:130: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-05-29T15:31:33.289418Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:31:33.290249Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:451: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-05-29T15:31:33.290275Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-05-29T15:31:33.290339Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:31:33.290362Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:313: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-05-29T15:31:33.290369Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:367: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-05-29T15:31:33.290375Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 2 -> 3 2025-05-29T15:31:33.290815Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:31:33.290826Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-05-29T15:31:33.290832Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 3 -> 128 2025-05-29T15:31:33.291141Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:31:33.291152Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:31:33.291158Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:31:33.291167Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1650: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-05-29T15:31:33.291875Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1719: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-05-29T15:31:33.292240Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:652: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-05-29T15:31:33.292281Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1751: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-05-29T15:31:33.292479Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:676: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-05-29T15:31:33.292504Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:680: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 124 RawX2: 4294969445 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-05-29T15:31:33.292512Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:31:33.292580Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 128 -> 240 2025-05-29T15:31:33.292588Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:31:33.292621Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-05-29T15:31:33.292633Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:402: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-05-29T15:31:33.292997Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:31:33.293005Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-29T15:31:33.293049Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29 ... .cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 5 PathOwnerId: 72057594046678944, cookie: 101 2025-05-29T15:31:33.359988Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72057594046678944, txId: 101 2025-05-29T15:31:33.359994Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 5 2025-05-29T15:31:33.360004Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-05-29T15:31:33.360175Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:5834: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046678944, cookie: 101 2025-05-29T15:31:33.360187Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046678944, cookie: 101 2025-05-29T15:31:33.360191Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72057594046678944, txId: 101 2025-05-29T15:31:33.360195Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 3 2025-05-29T15:31:33.360200Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 2025-05-29T15:31:33.360210Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1606: TOperation IsReadyToNotify, TxId: 101, ready parts: 0/1, is published: true 2025-05-29T15:31:33.360319Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6285: Handle TEvProposeTransactionResult, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409546 Status: COMPLETE TxId: 101 Step: 5000002 OrderId: 101 ExecLatency: 0 ProposeLatency: 3 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409546 CpuTimeUsec: 320 } } CommitVersion { Step: 5000002 TxId: 101 } 2025-05-29T15:31:33.360326Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1764: TOperation FindRelatedPartByTabletId, TxId: 101, tablet: 72075186233409546, partId: 0 2025-05-29T15:31:33.360346Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:619: TTxOperationReply execute, operationId: 101:0, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409546 Status: COMPLETE TxId: 101 Step: 5000002 OrderId: 101 ExecLatency: 0 ProposeLatency: 3 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409546 CpuTimeUsec: 320 } } CommitVersion { Step: 5000002 TxId: 101 } 2025-05-29T15:31:33.360363Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_part.cpp:108: HandleReply TEvDataShard::TEvProposeTransactionResult Ignore message: tablet# 72057594046678944, ev# TxKind: TX_KIND_SCHEME Origin: 72075186233409546 Status: COMPLETE TxId: 101 Step: 5000002 OrderId: 101 ExecLatency: 0 ProposeLatency: 3 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409546 CpuTimeUsec: 320 } } CommitVersion { Step: 5000002 TxId: 101 } 2025-05-29T15:31:33.360514Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5512: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 310 RawX2: 4294969592 } Origin: 72075186233409546 State: 2 TxId: 101 Step: 0 Generation: 2 2025-05-29T15:31:33.360520Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1764: TOperation FindRelatedPartByTabletId, TxId: 101, tablet: 72075186233409546, partId: 0 2025-05-29T15:31:33.360534Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:619: TTxOperationReply execute, operationId: 101:0, at schemeshard: 72057594046678944, message: Source { RawX1: 310 RawX2: 4294969592 } Origin: 72075186233409546 State: 2 TxId: 101 Step: 0 Generation: 2 2025-05-29T15:31:33.360542Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:1005: NTableState::TProposedWaitParts operationId# 101:0 HandleReply TEvSchemaChanged at tablet: 72057594046678944 2025-05-29T15:31:33.360552Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:1009: NTableState::TProposedWaitParts operationId# 101:0 HandleReply TEvSchemaChanged at tablet: 72057594046678944 message: Source { RawX1: 310 RawX2: 4294969592 } Origin: 72075186233409546 State: 2 TxId: 101 Step: 0 Generation: 2 2025-05-29T15:31:33.360563Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:655: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 101:0, shardIdx: 72057594046678944:1, datashard: 72075186233409546, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2025-05-29T15:31:33.360568Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:674: all shard schema changes has been received, operationId: 101:0, at schemeshard: 72057594046678944 2025-05-29T15:31:33.360573Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:686: send schema changes ack message, operation: 101:0, datashard: 72075186233409546, at schemeshard: 72057594046678944 2025-05-29T15:31:33.360579Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 101:0 129 -> 240 2025-05-29T15:31:33.360951Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2025-05-29T15:31:33.361288Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2025-05-29T15:31:33.361306Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:647: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2025-05-29T15:31:33.361325Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:647: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2025-05-29T15:31:33.361342Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 101:0, at schemeshard: 72057594046678944 2025-05-29T15:31:33.361349Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:482: [72057594046678944] TDone opId# 101:0 ProgressState 2025-05-29T15:31:33.361363Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:906: Part operation is done id#101:0 progress is 1/1 2025-05-29T15:31:33.361368Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2025-05-29T15:31:33.361374Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:906: Part operation is done id#101:0 progress is 1/1 2025-05-29T15:31:33.361377Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2025-05-29T15:31:33.361383Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1606: TOperation IsReadyToNotify, TxId: 101, ready parts: 1/1, is published: true 2025-05-29T15:31:33.361395Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1629: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:338:2316] message: TxId: 101 2025-05-29T15:31:33.361402Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2025-05-29T15:31:33.361409Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:973: Operation and all the parts is done, operation id: 101:0 2025-05-29T15:31:33.361415Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5174: RemoveTx for txid 101:0 2025-05-29T15:31:33.361438Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2025-05-29T15:31:33.361805Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:227: tests -- TTxNotificationSubscriber for txId 101: got EvNotifyTxCompletionResult 2025-05-29T15:31:33.361815Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:236: tests -- TTxNotificationSubscriber for txId 101: satisfy waiter [1:339:2317] TestWaitNotification: OK eventTxId 101 2025-05-29T15:31:33.361937Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/TTLEnabledTable" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-05-29T15:31:33.361988Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/TTLEnabledTable" took 58us result status StatusSuccess 2025-05-29T15:31:33.362110Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/TTLEnabledTable" PathDescription { Self { Name: "TTLEnabledTable" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "TTLEnabledTable" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "modified_at" Type: "Timestamp" TypeId: 50 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 TableSchemaVersion: 1 TTLSettings { Enabled { ColumnName: "modified_at" ExpireAfterSeconds: 3600 } } IsBackup: false IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 1 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 |75.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_reboots/unittest ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/effects/unittest >> KqpImmediateEffects::ReplaceDuplicates Test command err: Trying to start YDB, gRPC: 9917, MsgBus: 12577 2025-05-29T15:31:26.281733Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509890704588526967:2068];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:31:26.281765Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/001dba/r3tmp/tmpyAUrFi/pdisk_1.dat 2025-05-29T15:31:26.337884Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 9917, node 1 2025-05-29T15:31:26.351424Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:31:26.351437Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-05-29T15:31:26.351439Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-05-29T15:31:26.351479Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:12577 2025-05-29T15:31:26.382592Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:31:26.382621Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:31:26.383696Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:12577 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-29T15:31:26.413199Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:31:26.419168Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:31:26.482422Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:31:26.502394Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:31:26.513203Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:31:26.631527Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890704588528564:2403], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:31:26.631565Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:31:26.666535Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-05-29T15:31:26.673404Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-05-29T15:31:26.685186Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-05-29T15:31:26.691467Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-05-29T15:31:26.698881Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-05-29T15:31:26.706168Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-05-29T15:31:26.720432Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-05-29T15:31:26.735921Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890704588529215:2466], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:31:26.735949Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:31:26.735957Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890704588529220:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:31:26.736614Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715669:3, at schemeshard: 72057594046644480 2025-05-29T15:31:26.740317Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7509890704588529222:2470], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715669 completed, doublechecking } 2025-05-29T15:31:26.836584Z node 1 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [1:7509890704588529273:3395] txid# 281474976715670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-29T15:31:26.913383Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [1:7509890704588529289:2474], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:31:26.913523Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=1&id=Y2Q5ZWJlLTRiNWNkOTE1LTllZDNmZTg1LTYyZGE1ZTQ0, ActorId: [1:7509890704588528561:2401], ActorState: ExecuteState, TraceId: 01jweapvwf9qm6dze8vwmtte3h, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: VERIFY failed (2025-05-29T15:31:26.914211Z): assertion failed in non-unittest thread with message: assertion failed at ydb/core/kqp/ut/common/kqp_ut_common.h:375, void NKikimr::NKqp::AssertSuccessResult(const NYdb::TStatus &): (result.IsSuccess())
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 library/cpp/testing/unittest/registar.cpp:36 RaiseError(): requirement UnittestThread failed 0. /-S/util/system/yassert.cpp:83: InternalPanicImpl @ 0x15F23B95 1. /-S/util/system/yassert.cpp:55: Panic @ 0x15F1AB96 2. /tmp//-S/library/cpp/testing/unittest/registar.cpp:36: RaiseError @ 0x160BC066 3. /-S/ydb/core/kqp/ut/common/kqp_ut_common.h:375: AssertSuccessResult @ 0x15D88C82 4. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:365: CreateSampleTables @ 0x2855B9A2 5. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:581: operator() @ 0x2857CF4C 6. /-S/library/cpp/threading/future/core/future-inl.h:493: SetValue @ 0x2857CF4C 7. /-S/library/cpp/threading/future/async.h:24: operator() @ 0x2857CF4C 8. /-S/util/thread/pool.h:71: Process @ 0x2857CF4C 9. /-S/util/thread/pool.cpp:418: DoExecute @ 0x15F2B519 10. /-S/util/thread/factory.h:15: Execute @ 0x15F29F09 11. /-S/util/thread/factory.cpp:36: ThreadProc @ 0x15F29F09 12. /-S/util/system/thread.cpp:244: ThreadProxy @ 0x15F2537C 13. ??:0: ?? @ 0x7FFA6D6AAAC2 14. ??:0: ?? @ 0x7FFA6D73C84F Trying to start YDB, gRPC: 3496, MsgBus: 19265 2025-05-29T15:31:30.150429Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509890719772744445:2072];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:31:30.150453Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/001dba/r3tmp/tmpgUeMbA/pdisk_1.dat 2025-05-29T15:31:30.209352Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 3496, node 1 2025-05-29T15:31:30.225175Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:31:30.225188Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-05-29T15:31:30.225190Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-05-29T15:31:30.225233Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:19265 2025-05-29T15:31:30.250950Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:31:30.250972Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:31:30.252024Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:19265 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-29T15:31:30.278133Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:31:30.284559Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:31:30.350226Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:31:30.408718Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:31:30.419458Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:31:30.455031Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890719772746051:2403], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:31:30.455054Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:31:30.504907Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-05-29T15:31:30.511903Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-05-29T15:31:30.521567Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-05-29T15:31:30.576340Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-05-29T15:31:30.584195Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-05-29T15:31:30.598378Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-05-29T15:31:30.605296Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-05-29T15:31:30.622208Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890719772746706:2466], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:31:30.622238Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890719772746711:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:31:30.622259Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:31:30.623026Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710669:3, at schemeshard: 72057594046644480 2025-05-29T15:31:30.625431Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7509890719772746713:2470], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710669 completed, doublechecking } 2025-05-29T15:31:30.687942Z node 1 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [1:7509890719772746764:3395] txid# 281474976710670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-29T15:31:30.792689Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [1:7509890719772746780:2474], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:31:30.792771Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=1&id=YjZhYzFlNTktODE0ZDljYmQtNzQzZjQ4MmMtYTQxMmUzMzA=, ActorId: [1:7509890719772746033:2401], ActorState: ExecuteState, TraceId: 01jweapznx6r6hrvzafmz6nrmy, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: VERIFY failed (2025-05-29T15:31:30.793319Z): assertion failed in non-unittest thread with message: assertion failed at ydb/core/kqp/ut/common/kqp_ut_common.h:375, void NKikimr::NKqp::AssertSuccessResult(const NYdb::TStatus &): (result.IsSuccess())
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 library/cpp/testing/unittest/registar.cpp:36 RaiseError(): requirement UnittestThread failed 0. /-S/util/system/yassert.cpp:83: InternalPanicImpl @ 0x15F23B95 1. /-S/util/system/yassert.cpp:55: Panic @ 0x15F1AB96 2. /tmp//-S/library/cpp/testing/unittest/registar.cpp:36: RaiseError @ 0x160BC066 3. /-S/ydb/core/kqp/ut/common/kqp_ut_common.h:375: AssertSuccessResult @ 0x15D88C82 4. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:365: CreateSampleTables @ 0x2855B9A2 5. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:581: operator() @ 0x2857CF4C 6. /-S/library/cpp/threading/future/core/future-inl.h:493: SetValue @ 0x2857CF4C 7. /-S/library/cpp/threading/future/async.h:24: operator() @ 0x2857CF4C 8. /-S/util/thread/pool.h:71: Process @ 0x2857CF4C 9. /-S/util/thread/pool.cpp:418: DoExecute @ 0x15F2B519 10. /-S/util/thread/factory.h:15: Execute @ 0x15F29F09 11. /-S/util/thread/factory.cpp:36: ThreadProc @ 0x15F29F09 12. /-S/util/system/thread.cpp:244: ThreadProxy @ 0x15F2537C 13. ??:0: ?? @ 0x7F2CC46DDAC2 14. ??:0: ?? @ 0x7F2CC476F84F |75.0%| [TA] $(B)/ydb/core/grpc_streaming/ut/test-results/unittest/{meta.json ... results_accumulator.log} |75.0%| [TA] $(B)/ydb/core/tx/schemeshard/ut_external_table_reboots/test-results/unittest/{meta.json ... results_accumulator.log} >> KqpImmediateEffects::ReplaceExistingKey >> TConsistentOpsWithReboots::DropWithData |75.0%| [TA] {RESULT} $(B)/ydb/core/grpc_streaming/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> IntermediateDirsReboots::CreateTableWithIntermediateDirsAndRejectInTable >> TPersQueueTest::PreferredCluster_RemotePreferredClusterEnabledWhileSessionInitializing_SessionDiesOnlyAfterInitializationAndDelay [FAIL] >> TPersQueueTest::PartitionsMapping >> KqpEffects::AlterDuringUpsertTransaction+UseSink >> KqpBatchDelete::DeleteOn |75.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_reboots/unittest |75.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/statistics/service/ut/unittest |75.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_reboots/unittest |75.0%| [TA] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_external_table_reboots/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/effects/unittest >> KqpImmediateEffects::ConflictingKeyW1WRR2 Test command err: Trying to start YDB, gRPC: 12985, MsgBus: 27142 2025-05-29T15:31:26.280080Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509890705980904350:2066];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:31:26.280100Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/001dbf/r3tmp/tmp6fSfnK/pdisk_1.dat 2025-05-29T15:31:26.333342Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 12985, node 1 2025-05-29T15:31:26.349131Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:31:26.349143Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-05-29T15:31:26.349145Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-05-29T15:31:26.349187Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:27142 2025-05-29T15:31:26.381341Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:31:26.381380Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:31:26.382481Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:27142 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-29T15:31:26.409813Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:31:26.416121Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:31:26.432108Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:31:26.451807Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:31:26.462652Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:31:26.617414Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890705980905950:2403], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:31:26.617435Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:31:26.657599Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-05-29T15:31:26.664696Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-05-29T15:31:26.678532Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-05-29T15:31:26.692450Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-05-29T15:31:26.706315Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-05-29T15:31:26.720896Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-05-29T15:31:26.735083Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-05-29T15:31:26.750594Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890705980906602:2466], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:31:26.750619Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:31:26.750667Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890705980906607:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:31:26.751447Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715669:3, at schemeshard: 72057594046644480 2025-05-29T15:31:26.754428Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7509890705980906609:2470], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715669 completed, doublechecking } 2025-05-29T15:31:26.845617Z node 1 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [1:7509890705980906660:3396] txid# 281474976715670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-29T15:31:26.932099Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [1:7509890705980906676:2474], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:31:26.932193Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=1&id=YjQxNDI1NTMtN2E0MmUyMjItMjZjYzExYzktZjNkYTRiOWE=, ActorId: [1:7509890705980905932:2401], ActorState: ExecuteState, TraceId: 01jweapvwy6pn39mk3feh7qtvj, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: VERIFY failed (2025-05-29T15:31:26.932907Z): assertion failed in non-unittest thread with message: assertion failed at ydb/core/kqp/ut/common/kqp_ut_common.h:375, void NKikimr::NKqp::AssertSuccessResult(const NYdb::TStatus &): (result.IsSuccess())
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 library/cpp/testing/unittest/registar.cpp:36 RaiseError(): requirement UnittestThread failed 0. /-S/util/system/yassert.cpp:83: InternalPanicImpl @ 0x15F23B95 1. /-S/util/system/yassert.cpp:55: Panic @ 0x15F1AB96 2. /tmp//-S/library/cpp/testing/unittest/registar.cpp:36: RaiseError @ 0x160BC066 3. /-S/ydb/core/kqp/ut/common/kqp_ut_common.h:375: AssertSuccessResult @ 0x15D88C82 4. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:365: CreateSampleTables @ 0x2855B9A2 5. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:581: operator() @ 0x2857CF4C 6. /-S/library/cpp/threading/future/core/future-inl.h:493: SetValue @ 0x2857CF4C 7. /-S/library/cpp/threading/future/async.h:24: operator() @ 0x2857CF4C 8. /-S/util/thread/pool.h:71: Process @ 0x2857CF4C 9. /-S/util/thread/pool.cpp:418: DoExecute @ 0x15F2B519 10. /-S/util/thread/factory.h:15: Execute @ 0x15F29F09 11. /-S/util/thread/factory.cpp:36: ThreadProc @ 0x15F29F09 12. /-S/util/system/thread.cpp:244: ThreadProxy @ 0x15F2537C 13. ??:0: ?? @ 0x7F1A2F1D7AC2 14. ??:0: ?? @ 0x7F1A2F26984F Trying to start YDB, gRPC: 7241, MsgBus: 10410 2025-05-29T15:31:30.077911Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509890722874975574:2071];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:31:30.077943Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/001dbf/r3tmp/tmpoIEhlL/pdisk_1.dat 2025-05-29T15:31:30.139843Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 7241, node 1 2025-05-29T15:31:30.154830Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:31:30.154846Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-05-29T15:31:30.154848Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-05-29T15:31:30.154898Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:10410 2025-05-29T15:31:30.179239Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:31:30.179269Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:31:30.180380Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:10410 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-29T15:31:30.209369Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:31:30.220590Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:31:30.283972Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:31:30.301155Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:31:30.311889Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:31:30.390414Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890722874977166:2403], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:31:30.390435Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:31:30.430983Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-05-29T15:31:30.485663Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-05-29T15:31:30.493338Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-05-29T15:31:30.548486Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-05-29T15:31:30.555922Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-05-29T15:31:30.610995Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-05-29T15:31:30.619361Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-05-29T15:31:30.635917Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890722874977825:2466], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:31:30.635950Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:31:30.635990Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890722874977830:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:31:30.636681Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710669:3, at schemeshard: 72057594046644480 2025-05-29T15:31:30.639366Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7509890722874977832:2470], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710669 completed, doublechecking } 2025-05-29T15:31:30.723819Z node 1 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [1:7509890722874977883:3397] txid# 281474976710670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-29T15:31:30.792375Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [1:7509890722874977899:2474], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:31:30.792465Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=1&id=NzczYmNkNTQtNzg0NmIyYmYtNWFmMjgzODYtOWI4Njk0ODk=, ActorId: [1:7509890722874977148:2401], ActorState: ExecuteState, TraceId: 01jweapzpb2r78c1vycmw6k38p, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: VERIFY failed (2025-05-29T15:31:30.792979Z): assertion failed in non-unittest thread with message: assertion failed at ydb/core/kqp/ut/common/kqp_ut_common.h:375, void NKikimr::NKqp::AssertSuccessResult(const NYdb::TStatus &): (result.IsSuccess())
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 library/cpp/testing/unittest/registar.cpp:36 RaiseError(): requirement UnittestThread failed 0. /-S/util/system/yassert.cpp:83: InternalPanicImpl @ 0x15F23B95 1. /-S/util/system/yassert.cpp:55: Panic @ 0x15F1AB96 2. /tmp//-S/library/cpp/testing/unittest/registar.cpp:36: RaiseError @ 0x160BC066 3. /-S/ydb/core/kqp/ut/common/kqp_ut_common.h:375: AssertSuccessResult @ 0x15D88C82 4. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:365: CreateSampleTables @ 0x2855B9A2 5. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:581: operator() @ 0x2857CF4C 6. /-S/library/cpp/threading/future/core/future-inl.h:493: SetValue @ 0x2857CF4C 7. /-S/library/cpp/threading/future/async.h:24: operator() @ 0x2857CF4C 8. /-S/util/thread/pool.h:71: Process @ 0x2857CF4C 9. /-S/util/thread/pool.cpp:418: DoExecute @ 0x15F2B519 10. /-S/util/thread/factory.h:15: Execute @ 0x15F29F09 11. /-S/util/thread/factory.cpp:36: ThreadProc @ 0x15F29F09 12. /-S/util/system/thread.cpp:244: ThreadProxy @ 0x15F2537C 13. ??:0: ?? @ 0x7F70EC7F6AC2 14. ??:0: ?? @ 0x7F70EC88884F ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/public/sdk/cpp/src/client/topic/ut/with_direct_read_ut/unittest >> TxUsage::Write_Only_Big_Messages_In_Wide_Transactions_Query [FAIL] Test command err: 2025-05-29T15:30:29.281308Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509890457693527062:2195];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:30:29.281382Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-05-29T15:30:29.458832Z node 1 :PQ_READ_PROXY DEBUG: caching_service.cpp:44: Direct read cache: : Created test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/0022e1/r3tmp/tmp1oqPzO/pdisk_1.dat 2025-05-29T15:30:29.645110Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [1:7509890457693526908:2079] 1748532629268888 != 1748532629268891 2025-05-29T15:30:29.686864Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:30:29.709073Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:30:29.709106Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting TServer::EnableGrpc on GrpcPort 11848, node 1 2025-05-29T15:30:29.710503Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-29T15:30:29.722406Z node 1 :GRPC_SERVER WARN: grpc_request_proxy.cpp:529: SchemeBoardDelete /Root Strong=0 2025-05-29T15:30:29.722422Z node 1 :GRPC_SERVER WARN: grpc_request_proxy.cpp:529: SchemeBoardDelete /Root Strong=0 2025-05-29T15:30:29.932703Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/ciyv/0022e1/r3tmp/yandexf9orjo.tmp 2025-05-29T15:30:29.932718Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: /home/runner/.ya/build/build_root/ciyv/0022e1/r3tmp/yandexf9orjo.tmp 2025-05-29T15:30:29.932904Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:202: successfully initialized from file: /home/runner/.ya/build/build_root/ciyv/0022e1/r3tmp/yandexf9orjo.tmp 2025-05-29T15:30:29.932986Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-05-29T15:30:30.064215Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890461988494835:2322], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:30:30.064241Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:30:30.071390Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890461988494847:2325], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:30:30.107665Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710657:3, at schemeshard: 72057594046644480 2025-05-29T15:30:30.126499Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-05-29T15:30:30.126625Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7509890461988494849:2326], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710657 completed, doublechecking } 2025-05-29T15:30:30.175603Z INFO: TTestServer started on Port 31843 GrpcPort 11848 TClient is connected to server localhost:31843 2025-05-29T15:30:30.246685Z node 1 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [1:7509890461988494906:2306] txid# 281474976710658, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } PQClient connected to localhost:11848 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".metadata" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976710657 CreateStep: 1748532630166 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } Children { Name: ".sys" PathId: 184467440737... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... 2025-05-29T15:30:30.304263Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710659:0, at schemeshard: 72057594046644480 2025-05-29T15:30:30.308637Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976710659, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:30:30.345736Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976710660, at schemeshard: 72057594046644480 2025-05-29T15:30:30.351986Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... waiting... waiting... 2025-05-29T15:30:30.592097Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [1:7509890461988494923:2330], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-05-29T15:30:30.593056Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=1&id=MTgyMjQwMjQtYThhZWQyMjUtNmEwODkxZDQtYTIyYzAxMzk=, ActorId: [1:7509890461988494833:2321], ActorState: ExecuteState, TraceId: 01jwean4hg66v62zsxfgkd4w0h, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-05-29T15:30:30.593666Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: cluster_tracker.cpp:167: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-05-29T15:30:30.743217Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-05-29T15:30:30.753697Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-05-29T15:30:30.781698Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost", true, true, 1000), ("dc2", "dc2.logbroker.yandex.net", false, true, 1000); 2025-05-29T15:30:31.055438Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [1:7509890461988495357:2380], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:30:31.055562Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=1&id=M2FjNmIzODAtYTEyM2RiOTgtZWUwMmY2ZGQtZDg5NTZhNzI=, ActorId: [1:7509890461988495354:2378], ActorState: ExecuteState, TraceId: 01jwean58d6jwvagkdx91b9sgm, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: 2025-05-29T15:30:31.886879Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: cluster_tracker.cpp:167: failed to list clusters: { Response { QueryIssues { message: "Execution" issue_code: 1060 severity: 2 issues { position { row: 3 column: 120 } message: "Cost Based Optimizer could not be applied to this query: couldn\'t load statistics" end_position { row: 3 column: 120 } issue_code: 8001 severity: 2 } } TxMeta { } YdbResults { columns { name: "C.name" type { optional_type { item { type_id: UTF8 } } } } columns { name: "C.balancer" type { optional_type { item { type_id: UTF8 } } } } columns { name: "C.local" type { optional_type { item { type_id: BOOL } } } } columns { name: "C.enabled" type { optional_type { item { type_id: BOOL } } } } columns { name: "C.weight" type { optional_type { item { type_id: UINT64 } } } } columns { name: "V.version" type { optional_type { item { type_id: INT64 } } } } } QueryDiagnostics: "" } YdbStatus: SUCCESS ConsumedRu: 187 } assertion failed at ydb/core/testlib/test_pq_client.h:537, TMaybe NKikimr::NPersQueueTests::TFlatMsgBusPQClient::RunYqlDataQueryWithParams(TString, const NYdb::TParams &): (result.IsSuccess())
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 TBackTrace::Capture()+28 (0x13C00AEC) NUnitTest::NPrivate::RaiseError(char const*, TBasicString> const&, bool)+137 (0x13DB87C9) NKikimr::NPersQueueTests::TFlatMsgBusPQClient::RunYqlDataQueryWithParams(TBasicString>, NYdb::Dev::TParams const&)+932 (0x26260E34) NKikimr::NPersQueueTests::TFlatMsgBusPQClient::RunYqlDataQuery(TBasicString>)+88 (0x26260598) NKikimr::NPersQueueTests::TFlatMsgBusPQClient::InitDCs(THashMap>, NKikimr::NPersQueueTests::TPQTestClusterInfo, THash>>, TEqualTo>>, std::__y1::allocator Disconnected 2025-05-29T15:30:49.196462Z node 21 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(21, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:30:49.196758Z node 21 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:30:49.197638Z node 21 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(21, (0,0,0,0)) VolatileState: Connecting -> Connected waiting... 2025-05-29T15:30:49.199338Z node 21 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... waiting... waiting... 2025-05-29T15:30:49.346580Z node 21 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [21:7509890543443308886:2339], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:30:49.346595Z node 21 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [21:7509890543443308863:2336], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:30:49.346648Z node 21 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:30:49.347155Z node 21 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715662:3, at schemeshard: 72057594046644480 2025-05-29T15:30:49.347196Z node 21 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [21:7509890543443308921:2342], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:30:49.347208Z node 21 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:30:49.348639Z node 21 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [21:7509890543443308892:2340], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715662 completed, doublechecking } 2025-05-29T15:30:49.350582Z node 21 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-05-29T15:30:49.405457Z node 21 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-05-29T15:30:49.422324Z node 21 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-05-29T15:30:49.436205Z node 21 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [21:7509890543443309166:2553] txid# 281474976715666, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 9], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-29T15:30:49.439480Z node 21 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [21:7509890543443309174:2371], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:5:17: Error: At function: KiReadTable!
:5:17: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Versions]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-05-29T15:30:49.439573Z node 21 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=21&id=Yjc3N2EwOWYtMWU1MGVjOWYtMzE1NDUyMjgtNDIxYjc0Mjg=, ActorId: [21:7509890543443308860:2334], ActorState: ExecuteState, TraceId: 01jweanqc228hk2gw7ej0qcqs7, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-05-29T15:30:49.439719Z node 21 :PERSQUEUE_CLUSTER_TRACKER ERROR: cluster_tracker.cpp:167: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 5 column: 17 } message: "At function: KiReadTable!" end_position { row: 5 column: 17 } severity: 1 issues { position { row: 5 column: 17 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Versions]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 5 column: 17 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost", true, true, 1000), ("dc2", "dc2.logbroker.yandex.net", false, true, 1000); 2025-05-29T15:30:49.491128Z node 21 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [21:7509890543443309212:2380], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:30:49.491232Z node 21 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=21&id=NmEwMGYzN2QtMWY1ODdiMTItNGUxOWFhNzQtNGU3ZDhkYTI=, ActorId: [21:7509890543443309209:2378], ActorState: ExecuteState, TraceId: 01jweanqg7cdge2t374wavk54e, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: assertion failed at ydb/core/testlib/test_pq_client.h:537, TMaybe NKikimr::NPersQueueTests::TFlatMsgBusPQClient::RunYqlDataQueryWithParams(TString, const NYdb::TParams &): (result.IsSuccess())
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 TBackTrace::Capture()+28 (0x13C00AEC) NUnitTest::NPrivate::RaiseError(char const*, TBasicString> const&, bool)+137 (0x13DB87C9) NKikimr::NPersQueueTests::TFlatMsgBusPQClient::RunYqlDataQueryWithParams(TBasicString>, NYdb::Dev::TParams const&)+932 (0x26260E34) NKikimr::NPersQueueTests::TFlatMsgBusPQClient::RunYqlDataQuery(TBasicString>)+88 (0x26260598) NKikimr::NPersQueueTests::TFlatMsgBusPQClient::InitDCs(THashMap>, NKikimr::NPersQueueTests::TPQTestClusterInfo, THash>>, TEqualTo>>, std::__y1::allocator>>>, TBasicString> const&)+1170 (0x2625F7E2) NKikimr::NPersQueueTests::TFlatMsgBusPQClient::FullInit(THashMap>, NKikimr::NPersQueueTests::TPQTestClusterInfo, THash>>, TEqualTo>>, std::__y1::allocator>>>, TBasicString> const&, TBasicString> const&)+327 (0x2625D837) NPersQueue::TTestServer::StartServer(bool, TMaybe>, NMaybe::TPolicyUndefinedExcept>)+888 (0x26255FB8) NYdb::NTopic::NTests::TTopicSdkTestSetup::TTopicSdkTestSetup(TBasicString> const&, NKikimr::Tests::TServerSettings const&, bool)+582 (0x262550B6) std::__y1::__unique_if::__unique_single std::__y1::make_unique[abi:fe200000](char const*&, NKikimr::Tests::TServerSettings&)+70 (0x13A73A36) NYdb::NTopic::NTests::NTestSuiteTxUsage::TFixture::SetUp(NUnitTest::TTestContext&)+392 (0x13A73508) NYdb::NTopic::NTests::NTestSuiteTxUsage::TCurrentTest::Execute()::'lambda'()::operator()() const+49 (0x13AE86D1) NUnitTest::TTestBase::Run(std::__y1::function, TBasicString> const&, char const*, bool)+126 (0x13DBA67E) NYdb::NTopic::NTests::NTestSuiteTxUsage::TCurrentTest::Execute()+422 (0x13AE80A6) NUnitTest::TTestFactory::Execute()+803 (0x13DBADF3) NUnitTest::RunMain(int, char**)+3021 (0x13DCC99D) ??+0 (0x7F76D6F6AD90) __libc_start_main+128 (0x7F76D6F6AE40) _start+41 (0x12ADC029) ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/effects/unittest >> KqpImmediateEffects::InsertConflictTxAborted Test command err: Trying to start YDB, gRPC: 27530, MsgBus: 2763 2025-05-29T15:31:26.746440Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509890702939489948:2063];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:31:26.746481Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/001db5/r3tmp/tmpCO4fC0/pdisk_1.dat 2025-05-29T15:31:26.808183Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:31:26.808264Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [1:7509890702939489918:2079] 1748532686746284 != 1748532686746287 TServer::EnableGrpc on GrpcPort 27530, node 1 2025-05-29T15:31:26.823939Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:31:26.823964Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-05-29T15:31:26.823966Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-05-29T15:31:26.824013Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:2763 2025-05-29T15:31:26.848251Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:31:26.848282Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:31:26.849350Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:2763 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-29T15:31:26.875957Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:31:26.881954Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:31:26.898705Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:31:26.918898Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:31:26.928965Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:31:27.069625Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890707234458844:2403], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:31:27.069649Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:31:27.109315Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-05-29T15:31:27.164126Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-05-29T15:31:27.175105Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-05-29T15:31:27.229550Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-05-29T15:31:27.284110Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-05-29T15:31:27.294655Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-05-29T15:31:27.308510Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-05-29T15:31:27.324574Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890707234459501:2466], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:31:27.324607Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:31:27.324621Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890707234459506:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:31:27.325321Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710669:3, at schemeshard: 72057594046644480 2025-05-29T15:31:27.328257Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7509890707234459508:2470], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710669 completed, doublechecking } 2025-05-29T15:31:27.387488Z node 1 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [1:7509890707234459559:3395] txid# 281474976710670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-29T15:31:27.461685Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [1:7509890707234459575:2474], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:31:27.461775Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=1&id=NTQ3MmUzMGQtNTNmNTQ5OC0xYTk2ZjIwMi0xODllOWUwYQ==, ActorId: [1:7509890707234458826:2401], ActorState: ExecuteState, TraceId: 01jweapwew92hmy38w1yw6nby9, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: VERIFY failed (2025-05-29T15:31:27.462476Z): assertion failed in non-unittest thread with message: assertion failed at ydb/core/kqp/ut/common/kqp_ut_common.h:375, void NKikimr::NKqp::AssertSuccessResult(const NYdb::TStatus &): (result.IsSuccess())
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 library/cpp/testing/unittest/registar.cpp:36 RaiseError(): requirement UnittestThread failed 0. /-S/util/system/yassert.cpp:83: InternalPanicImpl @ 0x15F23B95 1. /-S/util/system/yassert.cpp:55: Panic @ 0x15F1AB96 2. /tmp//-S/library/cpp/testing/unittest/registar.cpp:36: RaiseError @ 0x160BC066 3. /-S/ydb/core/kqp/ut/common/kqp_ut_common.h:375: AssertSuccessResult @ 0x15D88C82 4. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:365: CreateSampleTables @ 0x2855B9A2 5. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:581: operator() @ 0x2857CF4C 6. /-S/library/cpp/threading/future/core/future-inl.h:493: SetValue @ 0x2857CF4C 7. /-S/library/cpp/threading/future/async.h:24: operator() @ 0x2857CF4C 8. /-S/util/thread/pool.h:71: Process @ 0x2857CF4C 9. /-S/util/thread/pool.cpp:418: DoExecute @ 0x15F2B519 10. /-S/util/thread/factory.h:15: Execute @ 0x15F29F09 11. /-S/util/thread/factory.cpp:36: ThreadProc @ 0x15F29F09 12. /-S/util/system/thread.cpp:244: ThreadProxy @ 0x15F2537C 13. ??:0: ?? @ 0x7EFE4B5F1AC2 14. ??:0: ?? @ 0x7EFE4B68384F Trying to start YDB, gRPC: 32319, MsgBus: 30525 2025-05-29T15:31:30.637387Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509890722832722280:2069];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:31:30.637411Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/001db5/r3tmp/tmpHGFe9E/pdisk_1.dat 2025-05-29T15:31:30.684149Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 32319, node 1 2025-05-29T15:31:30.699497Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:31:30.699510Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-05-29T15:31:30.699511Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-05-29T15:31:30.699542Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:30525 TClient is connected to server localhost:30525 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: 2025-05-29T15:31:30.738627Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:31:30.738660Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:31:30.739802Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-29T15:31:30.745227Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:31:30.755676Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:31:30.819265Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:31:30.835913Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:31:30.846536Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:31:30.923948Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890722832723876:2403], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:31:30.923985Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:31:30.957825Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-05-29T15:31:30.965659Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-05-29T15:31:30.976790Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-05-29T15:31:30.990553Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-05-29T15:31:31.004291Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-05-29T15:31:31.018892Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-05-29T15:31:31.032744Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-05-29T15:31:31.048538Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890727127691825:2466], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:31:31.048547Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890727127691830:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:31:31.048571Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:31:31.049205Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710669:3, at schemeshard: 72057594046644480 2025-05-29T15:31:31.052574Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7509890727127691832:2470], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710669 completed, doublechecking } 2025-05-29T15:31:31.111901Z node 1 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [1:7509890727127691883:3396] txid# 281474976710670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-29T15:31:31.200009Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [1:7509890727127691899:2474], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:31:31.200107Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=1&id=MzNjMGQ1NmMtZTI0OWFmYjAtMjcyNDgxYWYtZmY4NmVhODA=, ActorId: [1:7509890722832723873:2401], ActorState: ExecuteState, TraceId: 01jweaq0385r3syacpq4hw9j4w, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: VERIFY failed (2025-05-29T15:31:31.200811Z): assertion failed in non-unittest thread with message: assertion failed at ydb/core/kqp/ut/common/kqp_ut_common.h:375, void NKikimr::NKqp::AssertSuccessResult(const NYdb::TStatus &): (result.IsSuccess())
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 library/cpp/testing/unittest/registar.cpp:36 RaiseError(): requirement UnittestThread failed 0. /-S/util/system/yassert.cpp:83: InternalPanicImpl @ 0x15F23B95 1. /-S/util/system/yassert.cpp:55: Panic @ 0x15F1AB96 2. /tmp//-S/library/cpp/testing/unittest/registar.cpp:36: RaiseError @ 0x160BC066 3. /-S/ydb/core/kqp/ut/common/kqp_ut_common.h:375: AssertSuccessResult @ 0x15D88C82 4. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:365: CreateSampleTables @ 0x2855B9A2 5. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:581: operator() @ 0x2857CF4C 6. /-S/library/cpp/threading/future/core/future-inl.h:493: SetValue @ 0x2857CF4C 7. /-S/library/cpp/threading/future/async.h:24: operator() @ 0x2857CF4C 8. /-S/util/thread/pool.h:71: Process @ 0x2857CF4C 9. /-S/util/thread/pool.cpp:418: DoExecute @ 0x15F2B519 10. /-S/util/thread/factory.h:15: Execute @ 0x15F29F09 11. /-S/util/thread/factory.cpp:36: ThreadProc @ 0x15F29F09 12. /-S/util/system/thread.cpp:244: ThreadProxy @ 0x15F2537C 13. ??:0: ?? @ 0x7F3AC109BAC2 14. ??:0: ?? @ 0x7F3AC112D84F ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/public/sdk/cpp/src/client/topic/ut/with_direct_read_ut/unittest >> TxUsage::WriteToTopic_Demo_48_Query [FAIL] Test command err: 2025-05-29T15:30:29.477584Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509890461163947142:2061];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:30:29.477626Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-05-29T15:30:29.515289Z node 1 :PQ_READ_PROXY DEBUG: caching_service.cpp:44: Direct read cache: : Created test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/0022bd/r3tmp/tmphXW74N/pdisk_1.dat 2025-05-29T15:30:29.646192Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:30:29.646238Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:30:29.672601Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-29T15:30:29.675815Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [1:7509890461163947123:2079] 1748532629477409 != 1748532629477412 2025-05-29T15:30:29.690248Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 9971, node 1 2025-05-29T15:30:29.721994Z node 1 :GRPC_SERVER WARN: grpc_request_proxy.cpp:529: SchemeBoardDelete /Root Strong=0 2025-05-29T15:30:29.722341Z node 1 :GRPC_SERVER WARN: grpc_request_proxy.cpp:529: SchemeBoardDelete /Root Strong=0 2025-05-29T15:30:29.932808Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/ciyv/0022bd/r3tmp/yandexz9LO9W.tmp 2025-05-29T15:30:29.932824Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: /home/runner/.ya/build/build_root/ciyv/0022bd/r3tmp/yandexz9LO9W.tmp 2025-05-29T15:30:29.932930Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:202: successfully initialized from file: /home/runner/.ya/build/build_root/ciyv/0022bd/r3tmp/yandexz9LO9W.tmp 2025-05-29T15:30:29.933008Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-05-29T15:30:30.063911Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890465458915044:2322], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:30:30.063948Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:30:30.071586Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890465458915056:2325], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:30:30.103841Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710657:3, at schemeshard: 72057594046644480 2025-05-29T15:30:30.126215Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7509890465458915058:2326], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710657 completed, doublechecking } 2025-05-29T15:30:30.178181Z INFO: TTestServer started on Port 64034 GrpcPort 9971 TClient is connected to server localhost:64034 2025-05-29T15:30:30.243489Z node 1 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [1:7509890465458915115:2305] txid# 281474976710658, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } PQClient connected to localhost:9971 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".metadata" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976710657 CreateStep: 1748532630159 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } Children { Name: ".sys" PathId: 184467440737... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-29T15:30:30.295692Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:30:30.303466Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976710659, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:30:30.317841Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... waiting... waiting... 2025-05-29T15:30:30.475255Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976710663, at schemeshard: 72057594046644480 2025-05-29T15:30:30.592056Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [1:7509890465458915132:2330], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-05-29T15:30:30.593084Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=1&id=ZWMzNDcyOTEtNTQzNjY3MDUtZDNjNWFkODAtOTg3Mjc1ZQ==, ActorId: [1:7509890465458915042:2321], ActorState: ExecuteState, TraceId: 01jwean4hg4dc80ear20anazkf, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-05-29T15:30:30.593660Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: cluster_tracker.cpp:167: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-05-29T15:30:30.743217Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-05-29T15:30:30.751210Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-05-29T15:30:30.781700Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost", true, true, 1000), ("dc2", "dc2.logbroker.yandex.net", false, true, 1000); 2025-05-29T15:30:31.055439Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [1:7509890465458915567:2379], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:30:31.055536Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=1&id=OWMyOTJmNmYtMTg1NWM1YTUtMzM1MjkyMjktZWEyNjg1ZGI=, ActorId: [1:7509890465458915564:2377], ActorState: ExecuteState, TraceId: 01jwean58dcc3yz66e2pwdq0jq, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: 2025-05-29T15:30:31.886892Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: cluster_tracker.cpp:167: failed to list clusters: { Response { QueryIssues { message: "Execution" issue_code: 1060 severity: 2 issues { position { row: 3 column: 120 } message: "Cost Based Optimizer could not be applied to this query: couldn\'t load statistics" end_position { row: 3 column: 120 } issue_code: 8001 severity: 2 } } TxMeta { } YdbResults { columns { name: "C.name" type { optional_type { item { type_id: UTF8 } } } } columns { name: "C.balancer" type { optional_type { item { type_id: UTF8 } } } } columns { name: "C.local" type { optional_type { item { type_id: BOOL } } } } columns { name: "C.enabled" type { optional_type { item { type_id: BOOL } } } } columns { name: "C.weight" type { optional_type { item { type_id: UINT64 } } } } columns { name: "V.version" type { optional_type { item { type_id: INT64 } } } } } QueryDiagnostics: "" } YdbStatus: SUCCESS ConsumedRu: 191 } assertion failed at ydb/core/testlib/test_pq_client.h:537, TMaybe NKikimr::NPersQueueTests::TFlatMsgBusPQClient::RunYqlDataQueryWithParams(TString, const NYdb::TParams &): (result.IsSuccess())
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 TBackTrace::Capture()+28 (0x13C00AEC) NUnitTest::NPrivate::RaiseError(char const*, TBasicString> const&, bool)+137 (0x13DB87C9) NKikimr::NPersQueueTests::TFlatMsgBusPQClient::RunYqlDataQueryWithParams(TBasicString>, NYdb::Dev::TParams const&)+932 (0x26260E34) NKikimr::NPersQueueTests::TFlatMsgBusPQClient::RunYqlDataQuery(TBasicString>)+88 (0x26260598) NKikimr::NPersQueueTests::TFlatMsgBusPQClient::InitDCs(THashMap>, NKikimr::NPersQueueTests::TPQTestClusterInfo, THash>>, TEqualTo>>, std::__y1::allocator>>>, TBasicString> const&)+1170 (0x2625F7E2) NKikimr::NPersQueueTests::TFlatMsgBusPQClient::FullInit(THashMap Disconnected 2025-05-29T15:30:49.028857Z node 21 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(21, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:30:49.029183Z node 21 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:30:49.029772Z node 21 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(21, (0,0,0,0)) VolatileState: Connecting -> Connected waiting... 2025-05-29T15:30:49.038896Z node 21 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... waiting... waiting... 2025-05-29T15:30:49.172840Z node 21 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [21:7509890546322336405:2336], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:30:49.172871Z node 21 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:30:49.172908Z node 21 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [21:7509890546322336417:2339], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:30:49.173478Z node 21 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715662:3, at schemeshard: 72057594046644480 2025-05-29T15:30:49.173662Z node 21 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [21:7509890546322336449:2342], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:30:49.173677Z node 21 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:30:49.174925Z node 21 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [21:7509890546322336419:2340], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715662 completed, doublechecking } 2025-05-29T15:30:49.178025Z node 21 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-05-29T15:30:49.232185Z node 21 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-05-29T15:30:49.241016Z node 21 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [21:7509890546322336623:2521] txid# 281474976715665, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 9], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-29T15:30:49.244546Z node 21 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [21:7509890546322336642:2360], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:5:17: Error: At function: KiReadTable!
:5:17: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Versions]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-05-29T15:30:49.244941Z node 21 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=21&id=NTBlNDZkMDQtMjkxMDM3ZGQtYmQxYTBmMzEtNzY2ZDU4MWM=, ActorId: [21:7509890546322336387:2334], ActorState: ExecuteState, TraceId: 01jweanq6ma520qse08jvztnhy, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-05-29T15:30:49.245052Z node 21 :PERSQUEUE_CLUSTER_TRACKER ERROR: cluster_tracker.cpp:167: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 5 column: 17 } message: "At function: KiReadTable!" end_position { row: 5 column: 17 } severity: 1 issues { position { row: 5 column: 17 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Versions]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 5 column: 17 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-05-29T15:30:49.246188Z node 21 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost", true, true, 1000), ("dc2", "dc2.logbroker.yandex.net", false, true, 1000); 2025-05-29T15:30:49.268759Z node 21 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [21:7509890546322336738:2380], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:30:49.268858Z node 21 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=21&id=YWFkNzM0Ni05YzE3ODE3Yy0yOGM5MzZhNS00YWQwYmZiNA==, ActorId: [21:7509890546322336735:2378], ActorState: ExecuteState, TraceId: 01jweanq9972qt0mapqvxm5qbp, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: assertion failed at ydb/core/testlib/test_pq_client.h:537, TMaybe NKikimr::NPersQueueTests::TFlatMsgBusPQClient::RunYqlDataQueryWithParams(TString, const NYdb::TParams &): (result.IsSuccess())
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 TBackTrace::Capture()+28 (0x13C00AEC) NUnitTest::NPrivate::RaiseError(char const*, TBasicString> const&, bool)+137 (0x13DB87C9) NKikimr::NPersQueueTests::TFlatMsgBusPQClient::RunYqlDataQueryWithParams(TBasicString>, NYdb::Dev::TParams const&)+932 (0x26260E34) NKikimr::NPersQueueTests::TFlatMsgBusPQClient::RunYqlDataQuery(TBasicString>)+88 (0x26260598) NKikimr::NPersQueueTests::TFlatMsgBusPQClient::InitDCs(THashMap>, NKikimr::NPersQueueTests::TPQTestClusterInfo, THash>>, TEqualTo>>, std::__y1::allocator>>>, TBasicString> const&)+1170 (0x2625F7E2) NKikimr::NPersQueueTests::TFlatMsgBusPQClient::FullInit(THashMap>, NKikimr::NPersQueueTests::TPQTestClusterInfo, THash>>, TEqualTo>>, std::__y1::allocator>>>, TBasicString> const&, TBasicString> const&)+327 (0x2625D837) NPersQueue::TTestServer::StartServer(bool, TMaybe>, NMaybe::TPolicyUndefinedExcept>)+888 (0x26255FB8) NYdb::NTopic::NTests::TTopicSdkTestSetup::TTopicSdkTestSetup(TBasicString> const&, NKikimr::Tests::TServerSettings const&, bool)+582 (0x262550B6) std::__y1::__unique_if::__unique_single std::__y1::make_unique[abi:fe200000](char const*&, NKikimr::Tests::TServerSettings&)+70 (0x13A73A36) NYdb::NTopic::NTests::NTestSuiteTxUsage::TFixture::SetUp(NUnitTest::TTestContext&)+392 (0x13A73508) NYdb::NTopic::NTests::NTestSuiteTxUsage::TCurrentTest::Execute()::'lambda'()::operator()() const+49 (0x13AE86D1) NUnitTest::TTestBase::Run(std::__y1::function, TBasicString> const&, char const*, bool)+126 (0x13DBA67E) NYdb::NTopic::NTests::NTestSuiteTxUsage::TCurrentTest::Execute()+422 (0x13AE80A6) NUnitTest::TTestFactory::Execute()+803 (0x13DBADF3) NUnitTest::RunMain(int, char**)+3021 (0x13DCC99D) ??+0 (0x7F7C7BE3AD90) __libc_start_main+128 (0x7F7C7BE3AE40) _start+41 (0x12ADC029) ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/effects/unittest >> KqpEffects::AlterAfterUpsertBeforeUpsertSelectTransaction-UseSink Test command err: Trying to start YDB, gRPC: 6675, MsgBus: 31029 2025-05-29T15:31:22.752799Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509890686873357814:2074];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:31:22.753068Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/001de5/r3tmp/tmp7zMaC6/pdisk_1.dat 2025-05-29T15:31:22.808966Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 6675, node 1 2025-05-29T15:31:22.822856Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:31:22.822874Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-05-29T15:31:22.822877Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-05-29T15:31:22.822920Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:31029 2025-05-29T15:31:22.854161Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:31:22.854188Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:31:22.855270Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:31029 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-29T15:31:22.886815Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:31:22.891777Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:31:22.954516Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:31:22.971791Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:31:22.983180Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:31:23.064102Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890691168326701:2403], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:31:23.064122Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:31:23.117571Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-05-29T15:31:23.123591Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-05-29T15:31:23.135915Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-05-29T15:31:23.142902Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-05-29T15:31:23.197158Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-05-29T15:31:23.205710Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-05-29T15:31:23.212877Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-05-29T15:31:23.222852Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890691168327355:2466], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:31:23.222875Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:31:23.222876Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890691168327360:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:31:23.223497Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715669:3, at schemeshard: 72057594046644480 2025-05-29T15:31:23.226002Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7509890691168327362:2470], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715669 completed, doublechecking } 2025-05-29T15:31:23.280466Z node 1 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [1:7509890691168327413:3396] txid# 281474976715670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-29T15:31:23.363006Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [1:7509890691168327429:2474], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:31:23.363147Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=1&id=YTllOTI3ZjMtN2U5NjhjODktNDA5M2U5OWEtMTJlNDZmNzI=, ActorId: [1:7509890691168326698:2401], ActorState: ExecuteState, TraceId: 01jweaprenfx5sesyg452vw5rb, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: VERIFY failed (2025-05-29T15:31:23.363800Z): assertion failed in non-unittest thread with message: assertion failed at ydb/core/kqp/ut/common/kqp_ut_common.h:375, void NKikimr::NKqp::AssertSuccessResult(const NYdb::TStatus &): (result.IsSuccess())
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 library/cpp/testing/unittest/registar.cpp:36 RaiseError(): requirement UnittestThread failed 0. /-S/util/system/yassert.cpp:83: InternalPanicImpl @ 0x15F23B95 1. /-S/util/system/yassert.cpp:55: Panic @ 0x15F1AB96 2. /tmp//-S/library/cpp/testing/unittest/registar.cpp:36: RaiseError @ 0x160BC066 3. /-S/ydb/core/kqp/ut/common/kqp_ut_common.h:375: AssertSuccessResult @ 0x15D88C82 4. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:365: CreateSampleTables @ 0x2855B9A2 5. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:581: operator() @ 0x2857CF4C 6. /-S/library/cpp/threading/future/core/future-inl.h:493: SetValue @ 0x2857CF4C 7. /-S/library/cpp/threading/future/async.h:24: operator() @ 0x2857CF4C 8. /-S/util/thread/pool.h:71: Process @ 0x2857CF4C 9. /-S/util/thread/pool.cpp:418: DoExecute @ 0x15F2B519 10. /-S/util/thread/factory.h:15: Execute @ 0x15F29F09 11. /-S/util/thread/factory.cpp:36: ThreadProc @ 0x15F29F09 12. /-S/util/system/thread.cpp:244: ThreadProxy @ 0x15F2537C 13. ??:0: ?? @ 0x7F6271FE6AC2 14. ??:0: ?? @ 0x7F627207884F Trying to start YDB, gRPC: 7981, MsgBus: 14088 2025-05-29T15:31:26.477094Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509890703279891628:2074];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:31:26.477248Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/001de5/r3tmp/tmpNRzKLO/pdisk_1.dat 2025-05-29T15:31:26.525215Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 7981, node 1 2025-05-29T15:31:26.543527Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:31:26.543540Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-05-29T15:31:26.543542Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 202 ... st/yql_expr.h:1874: index out of range, code: 1 library/cpp/testing/unittest/registar.cpp:36 RaiseError(): requirement UnittestThread failed 0. /-S/util/system/yassert.cpp:83: InternalPanicImpl @ 0x15F23B95 1. /-S/util/system/yassert.cpp:55: Panic @ 0x15F1AB96 2. /tmp//-S/library/cpp/testing/unittest/registar.cpp:36: RaiseError @ 0x160BC066 3. /-S/ydb/core/kqp/ut/common/kqp_ut_common.h:375: AssertSuccessResult @ 0x15D88C82 4. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:365: CreateSampleTables @ 0x2855B9A2 5. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:581: operator() @ 0x2857CF4C 6. /-S/library/cpp/threading/future/core/future-inl.h:493: SetValue @ 0x2857CF4C 7. /-S/library/cpp/threading/future/async.h:24: operator() @ 0x2857CF4C 8. /-S/util/thread/pool.h:71: Process @ 0x2857CF4C 9. /-S/util/thread/pool.cpp:418: DoExecute @ 0x15F2B519 10. /-S/util/thread/factory.h:15: Execute @ 0x15F29F09 11. /-S/util/thread/factory.cpp:36: ThreadProc @ 0x15F29F09 12. /-S/util/system/thread.cpp:244: ThreadProxy @ 0x15F2537C 13. ??:0: ?? @ 0x7F45A1498AC2 14. ??:0: ?? @ 0x7F45A152A84F Trying to start YDB, gRPC: 15720, MsgBus: 21472 2025-05-29T15:31:30.169190Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509890721149299259:2067];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:31:30.169210Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/001de5/r3tmp/tmpYn7HNM/pdisk_1.dat 2025-05-29T15:31:30.220536Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 15720, node 1 2025-05-29T15:31:30.238787Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:31:30.238802Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-05-29T15:31:30.238803Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-05-29T15:31:30.238844Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:21472 2025-05-29T15:31:30.271159Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:31:30.271185Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:31:30.272271Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:21472 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-29T15:31:30.290515Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:31:30.299325Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:31:30.314207Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:31:30.373902Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:31:30.386603Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:31:30.479274Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890721149300860:2403], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:31:30.479298Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:31:30.538054Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-05-29T15:31:30.544796Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-05-29T15:31:30.556536Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-05-29T15:31:30.570450Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-05-29T15:31:30.625232Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-05-29T15:31:30.633349Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-05-29T15:31:30.647610Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-05-29T15:31:30.664239Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890721149301514:2466], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:31:30.664271Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:31:30.664290Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890721149301519:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:31:30.665008Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710669:3, at schemeshard: 72057594046644480 2025-05-29T15:31:30.667176Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7509890721149301521:2470], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710669 completed, doublechecking } 2025-05-29T15:31:30.753922Z node 1 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [1:7509890721149301572:3397] txid# 281474976710670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-29T15:31:30.877086Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [1:7509890721149301588:2474], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:31:30.877197Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=1&id=ODgyZGU2ZDUtMWJmY2U1ZDAtMThkYTRjNWMtNzZlZGQ4N2U=, ActorId: [1:7509890721149300842:2401], ActorState: ExecuteState, TraceId: 01jweapzq7dkgx2q5z81ytbt9f, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: VERIFY failed (2025-05-29T15:31:30.877923Z): assertion failed in non-unittest thread with message: assertion failed at ydb/core/kqp/ut/common/kqp_ut_common.h:375, void NKikimr::NKqp::AssertSuccessResult(const NYdb::TStatus &): (result.IsSuccess())
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 library/cpp/testing/unittest/registar.cpp:36 RaiseError(): requirement UnittestThread failed 0. /-S/util/system/yassert.cpp:83: InternalPanicImpl @ 0x15F23B95 1. /-S/util/system/yassert.cpp:55: Panic @ 0x15F1AB96 2. /tmp//-S/library/cpp/testing/unittest/registar.cpp:36: RaiseError @ 0x160BC066 3. /-S/ydb/core/kqp/ut/common/kqp_ut_common.h:375: AssertSuccessResult @ 0x15D88C82 4. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:365: CreateSampleTables @ 0x2855B9A2 5. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:581: operator() @ 0x2857CF4C 6. /-S/library/cpp/threading/future/core/future-inl.h:493: SetValue @ 0x2857CF4C 7. /-S/library/cpp/threading/future/async.h:24: operator() @ 0x2857CF4C 8. /-S/util/thread/pool.h:71: Process @ 0x2857CF4C 9. /-S/util/thread/pool.cpp:418: DoExecute @ 0x15F2B519 10. /-S/util/thread/factory.h:15: Execute @ 0x15F29F09 11. /-S/util/thread/factory.cpp:36: ThreadProc @ 0x15F29F09 12. /-S/util/system/thread.cpp:244: ThreadProxy @ 0x15F2537C 13. ??:0: ?? @ 0x7F985784FAC2 14. ??:0: ?? @ 0x7F98578E184F ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/effects/unittest >> KqpInplaceUpdate::Negative_SingleRowWithValueCast+UseSink Test command err: Trying to start YDB, gRPC: 3345, MsgBus: 27665 2025-05-29T15:31:26.740863Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509890702868449319:2069];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:31:26.740882Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/001db4/r3tmp/tmpYiqzEZ/pdisk_1.dat 2025-05-29T15:31:26.786914Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 3345, node 1 2025-05-29T15:31:26.803856Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:31:26.803864Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-05-29T15:31:26.803866Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-05-29T15:31:26.803895Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:27665 TClient is connected to server localhost:27665 WaitRootIsUp 'Root'... TClient::Ls request: Root 2025-05-29T15:31:26.842154Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:31:26.842173Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting TClient::Ls response: 2025-05-29T15:31:26.843005Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-29T15:31:26.867136Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:31:26.871576Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:31:26.889660Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:31:26.908750Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:31:26.919007Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:31:27.033398Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890707163418209:2403], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:31:27.033422Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:31:27.066706Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-05-29T15:31:27.121720Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-05-29T15:31:27.133483Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-05-29T15:31:27.147574Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-05-29T15:31:27.161069Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-05-29T15:31:27.168073Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-05-29T15:31:27.175366Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-05-29T15:31:27.191075Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890707163418862:2466], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:31:27.191107Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:31:27.191106Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890707163418867:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:31:27.191691Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715669:3, at schemeshard: 72057594046644480 2025-05-29T15:31:27.195521Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7509890707163418869:2470], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715669 completed, doublechecking } 2025-05-29T15:31:27.252325Z node 1 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [1:7509890707163418920:3397] txid# 281474976715670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-29T15:31:27.342351Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [1:7509890707163418936:2474], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:31:27.342436Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=1&id=MjZjNjQ2NDYtYzhhNGU5OWQtNjMxZDZlZTYtZWM1NjZkNzI=, ActorId: [1:7509890707163418191:2401], ActorState: ExecuteState, TraceId: 01jweapwapdehm093c8q7hmj7y, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: VERIFY failed (2025-05-29T15:31:27.342986Z): assertion failed in non-unittest thread with message: assertion failed at ydb/core/kqp/ut/common/kqp_ut_common.h:375, void NKikimr::NKqp::AssertSuccessResult(const NYdb::TStatus &): (result.IsSuccess())
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 library/cpp/testing/unittest/registar.cpp:36 RaiseError(): requirement UnittestThread failed 0. /-S/util/system/yassert.cpp:83: InternalPanicImpl @ 0x15F23B95 1. /-S/util/system/yassert.cpp:55: Panic @ 0x15F1AB96 2. /tmp//-S/library/cpp/testing/unittest/registar.cpp:36: RaiseError @ 0x160BC066 3. /-S/ydb/core/kqp/ut/common/kqp_ut_common.h:375: AssertSuccessResult @ 0x15D88C82 4. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:365: CreateSampleTables @ 0x2855B9A2 5. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:581: operator() @ 0x2857CF4C 6. /-S/library/cpp/threading/future/core/future-inl.h:493: SetValue @ 0x2857CF4C 7. /-S/library/cpp/threading/future/async.h:24: operator() @ 0x2857CF4C 8. /-S/util/thread/pool.h:71: Process @ 0x2857CF4C 9. /-S/util/thread/pool.cpp:418: DoExecute @ 0x15F2B519 10. /-S/util/thread/factory.h:15: Execute @ 0x15F29F09 11. /-S/util/thread/factory.cpp:36: ThreadProc @ 0x15F29F09 12. /-S/util/system/thread.cpp:244: ThreadProxy @ 0x15F2537C 13. ??:0: ?? @ 0x7F18FEE9FAC2 14. ??:0: ?? @ 0x7F18FEF3184F Trying to start YDB, gRPC: 13584, MsgBus: 26251 2025-05-29T15:31:30.444971Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509890722969959572:2066];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:31:30.445003Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/001db4/r3tmp/tmpvvTznj/pdisk_1.dat TServer::EnableGrpc on GrpcPort 13584, node 1 2025-05-29T15:31:30.500323Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:31:30.500410Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [1:7509890722969959542:2079] 1748532690444858 != 1748532690444861 2025-05-29T15:31:30.510048Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:31:30.510060Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-05-29T15:31:30.510062Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-05-29T15:31:30.510104Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:26251 2025-05-29T15:31:30.546193Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:31:30.546214Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:31:30.547265Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:26251 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-29T15:31:30.575549Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:31:30.580357Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:31:30.598205Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:31:30.620996Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:31:30.631688Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:31:30.745074Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890722969961176:2403], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:31:30.745110Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:31:30.780196Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-05-29T15:31:30.787097Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-05-29T15:31:30.793994Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-05-29T15:31:30.801439Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-05-29T15:31:30.815406Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-05-29T15:31:30.829557Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-05-29T15:31:30.843957Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-05-29T15:31:30.859337Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890722969961828:2466], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:31:30.859374Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:31:30.859397Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890722969961833:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:31:30.860065Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715669:3, at schemeshard: 72057594046644480 2025-05-29T15:31:30.863323Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7509890722969961835:2470], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715669 completed, doublechecking } 2025-05-29T15:31:30.956249Z node 1 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [1:7509890722969961886:3397] txid# 281474976715670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-29T15:31:31.033676Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [1:7509890722969961902:2474], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:31:31.033802Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=1&id=NDk1ZWQ3ZjctNTBiNjk0ZTctNjBlY2E4YzAtMjQyNWVjZGM=, ActorId: [1:7509890722969961173:2401], ActorState: ExecuteState, TraceId: 01jweapzxb14eqe7x8f1hzt79v, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: VERIFY failed (2025-05-29T15:31:31.034494Z): assertion failed in non-unittest thread with message: assertion failed at ydb/core/kqp/ut/common/kqp_ut_common.h:375, void NKikimr::NKqp::AssertSuccessResult(const NYdb::TStatus &): (result.IsSuccess())
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 library/cpp/testing/unittest/registar.cpp:36 RaiseError(): requirement UnittestThread failed 0. /-S/util/system/yassert.cpp:83: InternalPanicImpl @ 0x15F23B95 1. /-S/util/system/yassert.cpp:55: Panic @ 0x15F1AB96 2. /tmp//-S/library/cpp/testing/unittest/registar.cpp:36: RaiseError @ 0x160BC066 3. /-S/ydb/core/kqp/ut/common/kqp_ut_common.h:375: AssertSuccessResult @ 0x15D88C82 4. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:365: CreateSampleTables @ 0x2855B9A2 5. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:581: operator() @ 0x2857CF4C 6. /-S/library/cpp/threading/future/core/future-inl.h:493: SetValue @ 0x2857CF4C 7. /-S/library/cpp/threading/future/async.h:24: operator() @ 0x2857CF4C 8. /-S/util/thread/pool.h:71: Process @ 0x2857CF4C 9. /-S/util/thread/pool.cpp:418: DoExecute @ 0x15F2B519 10. /-S/util/thread/factory.h:15: Execute @ 0x15F29F09 11. /-S/util/thread/factory.cpp:36: ThreadProc @ 0x15F29F09 12. /-S/util/system/thread.cpp:244: ThreadProxy @ 0x15F2537C 13. ??:0: ?? @ 0x7FD4C5975AC2 14. ??:0: ?? @ 0x7FD4C5A0784F >> ColumnStatistics::CountMinSketchStatistics >> TConsistentOpsWithReboots::DropIndexedTableAndForceDropSimultaneously >> TColumnShardTestReadWrite::WriteReadExoticTypes >> KqpInplaceUpdate::SingleRowIf+UseSink |75.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_reboots/unittest ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/effects/unittest >> KqpImmediateEffects::ImmediateUpdate Test command err: Trying to start YDB, gRPC: 31806, MsgBus: 8763 2025-05-29T15:31:27.142890Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509890706758817205:2072];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:31:27.142933Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/001db1/r3tmp/tmplQMh5v/pdisk_1.dat 2025-05-29T15:31:27.196115Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 31806, node 1 2025-05-29T15:31:27.208355Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:31:27.208366Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-05-29T15:31:27.208368Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-05-29T15:31:27.208401Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:8763 2025-05-29T15:31:27.243899Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:31:27.243935Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:31:27.244959Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:8763 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-29T15:31:27.263139Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:31:27.273377Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:31:27.336293Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:31:27.353910Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:31:27.365203Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:31:27.423960Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890706758818794:2403], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:31:27.423986Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:31:27.462825Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-05-29T15:31:27.470252Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-05-29T15:31:27.483417Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-05-29T15:31:27.496892Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-05-29T15:31:27.504059Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-05-29T15:31:27.511430Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-05-29T15:31:27.525349Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-05-29T15:31:27.582893Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890706758819449:2466], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:31:27.582905Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890706758819454:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:31:27.582912Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:31:27.583518Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710669:3, at schemeshard: 72057594046644480 2025-05-29T15:31:27.587035Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7509890706758819456:2470], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710669 completed, doublechecking } 2025-05-29T15:31:27.674476Z node 1 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [1:7509890706758819507:3396] txid# 281474976710670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-29T15:31:27.763810Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [1:7509890706758819523:2474], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:31:27.763923Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=1&id=MWUxNzY5YzItYTQxMmM0NzgtMjI4NGY0ODMtYjljMGExZDc=, ActorId: [1:7509890706758818776:2401], ActorState: ExecuteState, TraceId: 01jweapwpy00k16cvwz6nddxn0, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: VERIFY failed (2025-05-29T15:31:27.764577Z): assertion failed in non-unittest thread with message: assertion failed at ydb/core/kqp/ut/common/kqp_ut_common.h:375, void NKikimr::NKqp::AssertSuccessResult(const NYdb::TStatus &): (result.IsSuccess())
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 library/cpp/testing/unittest/registar.cpp:36 RaiseError(): requirement UnittestThread failed 0. /-S/util/system/yassert.cpp:83: InternalPanicImpl @ 0x15F23B95 1. /-S/util/system/yassert.cpp:55: Panic @ 0x15F1AB96 2. /tmp//-S/library/cpp/testing/unittest/registar.cpp:36: RaiseError @ 0x160BC066 3. /-S/ydb/core/kqp/ut/common/kqp_ut_common.h:375: AssertSuccessResult @ 0x15D88C82 4. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:365: CreateSampleTables @ 0x2855B9A2 5. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:581: operator() @ 0x2857CF4C 6. /-S/library/cpp/threading/future/core/future-inl.h:493: SetValue @ 0x2857CF4C 7. /-S/library/cpp/threading/future/async.h:24: operator() @ 0x2857CF4C 8. /-S/util/thread/pool.h:71: Process @ 0x2857CF4C 9. /-S/util/thread/pool.cpp:418: DoExecute @ 0x15F2B519 10. /-S/util/thread/factory.h:15: Execute @ 0x15F29F09 11. /-S/util/thread/factory.cpp:36: ThreadProc @ 0x15F29F09 12. /-S/util/system/thread.cpp:244: ThreadProxy @ 0x15F2537C 13. ??:0: ?? @ 0x7FC4E6484AC2 14. ??:0: ?? @ 0x7FC4E651684F Trying to start YDB, gRPC: 15996, MsgBus: 21500 2025-05-29T15:31:31.037442Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509890726188180393:2069];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:31:31.037464Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/001db1/r3tmp/tmpRsKqYS/pdisk_1.dat 2025-05-29T15:31:31.092987Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 15996, node 1 2025-05-29T15:31:31.106145Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:31:31.106157Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-05-29T15:31:31.106160Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-05-29T15:31:31.106218Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:21500 2025-05-29T15:31:31.138674Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:31:31.138716Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:31:31.139866Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:21500 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-29T15:31:31.169555Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:31:31.173424Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:31:31.192094Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:31:31.252141Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:31:31.263011Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:31:31.338540Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890726188181993:2403], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:31:31.338564Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:31:31.389074Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-05-29T15:31:31.395769Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-05-29T15:31:31.402834Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-05-29T15:31:31.409857Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-05-29T15:31:31.464487Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-05-29T15:31:31.473085Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-05-29T15:31:31.480579Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-05-29T15:31:31.496859Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890726188182649:2466], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:31:31.496888Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890726188182654:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:31:31.496900Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:31:31.497582Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715669:3, at schemeshard: 72057594046644480 2025-05-29T15:31:31.500408Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7509890726188182656:2470], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715669 completed, doublechecking } 2025-05-29T15:31:31.578899Z node 1 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [1:7509890726188182707:3397] txid# 281474976715670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-29T15:31:31.679917Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [1:7509890726188182723:2474], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:31:31.680021Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=1&id=Mzg5YTU4MTctMjU2OGFkYzAtYTUxNmFmYTgtNjgyZjkyMmY=, ActorId: [1:7509890726188181990:2401], ActorState: ExecuteState, TraceId: 01jweaq0h84yyec7zz0534hm5f, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: VERIFY failed (2025-05-29T15:31:31.680628Z): assertion failed in non-unittest thread with message: assertion failed at ydb/core/kqp/ut/common/kqp_ut_common.h:375, void NKikimr::NKqp::AssertSuccessResult(const NYdb::TStatus &): (result.IsSuccess())
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 library/cpp/testing/unittest/registar.cpp:36 RaiseError(): requirement UnittestThread failed 0. /-S/util/system/yassert.cpp:83: InternalPanicImpl @ 0x15F23B95 1. /-S/util/system/yassert.cpp:55: Panic @ 0x15F1AB96 2. /tmp//-S/library/cpp/testing/unittest/registar.cpp:36: RaiseError @ 0x160BC066 3. /-S/ydb/core/kqp/ut/common/kqp_ut_common.h:375: AssertSuccessResult @ 0x15D88C82 4. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:365: CreateSampleTables @ 0x2855B9A2 5. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:581: operator() @ 0x2857CF4C 6. /-S/library/cpp/threading/future/core/future-inl.h:493: SetValue @ 0x2857CF4C 7. /-S/library/cpp/threading/future/async.h:24: operator() @ 0x2857CF4C 8. /-S/util/thread/pool.h:71: Process @ 0x2857CF4C 9. /-S/util/thread/pool.cpp:418: DoExecute @ 0x15F2B519 10. /-S/util/thread/factory.h:15: Execute @ 0x15F29F09 11. /-S/util/thread/factory.cpp:36: ThreadProc @ 0x15F29F09 12. /-S/util/system/thread.cpp:244: ThreadProxy @ 0x15F2537C 13. ??:0: ?? @ 0x7F4966ED8AC2 14. ??:0: ?? @ 0x7F4966F6A84F >> TAsyncIndexTests::CdcAndMergeWithReboots[PipeResets] [GOOD] >> KqpImmediateEffects::ConflictingKeyRW1WRR2 >> KqpImmediateEffects::UnobservedUncommittedChangeConflict >> KqpBatchUpdate::MultiStatement >> TPersQueueTest::PartitionsMapping [FAIL] >> TPersQueueTest::MessageMetadata >> TSolomonReboots::CreateAlterSolomonWithReboots >> KqpEffects::InsertAbort_Params_Conflict-UseSink |75.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/statistics/service/ut/unittest |75.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_ttl/unittest |75.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_reboots/unittest >> TConsistentOpsWithReboots::CreateIndexedTableAndForceDrop >> IntermediateDirsReboots::CreateSolomonWithIntermediateDirs >> KqpBatchDelete::Large_3 |75.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/statistics/service/ut/unittest ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_index/unittest >> TAsyncIndexTests::CdcAndMergeWithReboots[PipeResets] [GOOD] Test command err: =========== RUN: Trace =========== Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2140] Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:116:2058] recipient: [1:110:2141] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:116:2058] recipient: [1:110:2141] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:117:2058] recipient: [1:111:2142] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:117:2058] recipient: [1:111:2142] Leader for TabletID 72057594046678944 is [1:126:2151] sender: [1:127:2058] recipient: [1:109:2140] Leader for TabletID 72057594046447617 is [1:130:2154] sender: [1:132:2058] recipient: [1:110:2141] Leader for TabletID 72057594046316545 is [1:133:2155] sender: [1:135:2058] recipient: [1:111:2142] 2025-05-29T15:30:23.571353Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7488: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-05-29T15:30:23.571376Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7516: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:30:23.571383Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7402: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2025-05-29T15:30:23.571390Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7418: OperationsProcessing config: using default configuration 2025-05-29T15:30:23.571406Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-05-29T15:30:23.571410Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-05-29T15:30:23.571420Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7548: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:30:23.571435Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:39: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-05-29T15:30:23.571533Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7619: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-05-29T15:30:23.571607Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-05-29T15:30:23.586033Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7651: Got new config: QueryServiceConfig { AllExternalDataSourcesAreAvailable: true } 2025-05-29T15:30:23.586056Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:30:23.586141Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7619: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# Leader for TabletID 72057594046447617 is [1:130:2154] sender: [1:175:2058] recipient: [1:15:2062] 2025-05-29T15:30:23.588810Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-05-29T15:30:23.588848Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-05-29T15:30:23.588892Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-05-29T15:30:23.591935Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-05-29T15:30:23.592038Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:445: Clear TempDirsState with owners number: 0 2025-05-29T15:30:23.592165Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1348: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-05-29T15:30:23.592392Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:32: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-05-29T15:30:23.593117Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:157: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:30:23.593159Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:84: [RootDataErasureManager] Stop 2025-05-29T15:30:23.593395Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:30:23.593403Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:30:23.593437Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-05-29T15:30:23.593445Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-29T15:30:23.593452Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:202: TTxServerlessStorageBilling.Complete 2025-05-29T15:30:23.593468Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6671: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:214:2058] recipient: [1:212:2212] IGNORE Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:214:2058] recipient: [1:212:2212] Leader for TabletID 72057594037968897 is [1:218:2216] sender: [1:219:2058] recipient: [1:212:2212] 2025-05-29T15:30:23.594695Z node 1 :HIVE INFO: tablet_helpers.cpp:1130: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:126:2151] sender: [1:239:2058] recipient: [1:15:2062] 2025-05-29T15:30:23.612138Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:372: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-05-29T15:30:23.612209Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:30:23.612263Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-05-29T15:30:23.612310Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:130: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-05-29T15:30:23.612321Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:30:23.612905Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:451: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-05-29T15:30:23.612930Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-05-29T15:30:23.612962Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:30:23.612969Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:313: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-05-29T15:30:23.612974Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:367: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-05-29T15:30:23.612978Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 2 -> 3 2025-05-29T15:30:23.613357Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:30:23.613367Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-05-29T15:30:23.613371Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 3 -> 128 2025-05-29T15:30:23.613672Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:30:23.613681Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:30:23.613688Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:30:23.613695Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1650: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-05-29T15:30:23.614284Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1719: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-05-29T15:30:23.614608Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:652: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-05-29T15:30:23.614639Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1751: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 Leader for TabletID 72057594046316545 is [1:133:2155] sender: [1:254:2058] recipient: [1:15:2062] FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-05-29T15:30:23.614847Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:676: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-05-29T15:30:23.614873Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:680: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 133 RawX2: 4294969451 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-05-29T15:30:23.614882Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:30:23.614949Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Ch ... shold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { MinPartitionsCount: 1 } } TableIndexes { Name: "UserDefinedIndex" LocalPathId: 4 Type: EIndexTypeGlobalAsync State: EIndexStateReady KeyColumnNames: "indexed" SchemaVersion: 1 PathOwnerId: 72057594046678944 DataSize: 0 IndexImplTableDescriptions { PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { SizeToSplit: 2147483648 MinPartitionsCount: 1 } } } } TableSchemaVersion: 2 IsBackup: false CdcStreams { Name: "Stream" Mode: ECdcStreamModeKeysOnly PathId { OwnerId: 72057594046678944 LocalId: 6 } State: ECdcStreamStateReady SchemaVersion: 1 Format: ECdcStreamFormatProto VirtualTimestamps: false AwsRegion: "" ResolvedTimestampsIntervalMs: 0 } IsRestore: false } TablePartitions { EndOfRangeKeyPrefix: "" IsPoint: false IsInclusive: false DatashardId: 72075186233409551 } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 6 PathsLimit: 10000 ShardsInside: 5 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 2 PQPartitionsLimit: 1000000 } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-05-29T15:31:34.516139Z node 54 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Table/UserDefinedIndex/indexImplTable" Options { ReturnPartitioningInfo: true ReturnPartitionConfig: true BackupInfo: false ReturnBoundaries: true ShowPrivateTable: true }, at schemeshard: 72057594046678944 2025-05-29T15:31:34.516174Z node 54 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/Table/UserDefinedIndex/indexImplTable" took 43us result status StatusSuccess 2025-05-29T15:31:34.516281Z node 54 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Table/UserDefinedIndex/indexImplTable" PathDescription { Self { Name: "indexImplTable" PathId: 5 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 1002 CreateStep: 5000003 ParentPathId: 4 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeAsyncIndexImplTable Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "indexImplTable" Columns { Name: "indexed" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "indexed" KeyColumnNames: "key" KeyColumnIds: 1 KeyColumnIds: 2 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { SizeToSplit: 2147483648 MinPartitionsCount: 1 } } TableSchemaVersion: 1 IsBackup: false IsRestore: false } TablePartitions { EndOfRangeKeyPrefix: "" IsPoint: false IsInclusive: false DatashardId: 72075186233409546 } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 6 PathsLimit: 10000 ShardsInside: 5 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 2 PQPartitionsLimit: 1000000 } } PathId: 5 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-05-29T15:31:34.526567Z node 54 :CHANGE_EXCHANGE DEBUG: change_sender_table_base.cpp:78: [TableChangeSenderShard][72075186233409551:2][72075186233409546][54:1171:2948] Handshake NKikimrChangeExchange.TEvStatus Status: STATUS_OK LastRecordOrder: 0 2025-05-29T15:31:34.526596Z node 54 :CHANGE_EXCHANGE DEBUG: change_sender_async_index.cpp:239: [AsyncIndexChangeSenderMain][72075186233409551:2][54:1140:2948] Handle NKikimr::NChangeExchange::TEvChangeExchangePrivate::TEvReady { PartitionId: 72075186233409546 } 2025-05-29T15:31:34.526623Z node 54 :CHANGE_EXCHANGE DEBUG: change_sender_table_base.cpp:123: [TableChangeSenderShard][72075186233409551:2][72075186233409546][54:1171:2948] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRecords { Records [{ Order: 1 Group: 1748532694511323 Step: 5000004 TxId: 18446744073709551615 PathId: [OwnerId: 72057594046678944, LocalPathId: 4] Kind: AsyncIndex Source: Unspecified Body: 28b TableId: [OwnerId: 72057594046678944, LocalPathId: 3] SchemaVersion: 2 LockId: 0 LockOffset: 0 },{ Order: 3 Group: 1748532694511323 Step: 5000004 TxId: 18446744073709551615 PathId: [OwnerId: 72057594046678944, LocalPathId: 4] Kind: AsyncIndex Source: Unspecified Body: 28b TableId: [OwnerId: 72057594046678944, LocalPathId: 3] SchemaVersion: 2 LockId: 0 LockOffset: 0 },{ Order: 5 Group: 1748532694511323 Step: 5000004 TxId: 18446744073709551615 PathId: [OwnerId: 72057594046678944, LocalPathId: 4] Kind: AsyncIndex Source: Unspecified Body: 28b TableId: [OwnerId: 72057594046678944, LocalPathId: 3] SchemaVersion: 2 LockId: 0 LockOffset: 0 }] } 2025-05-29T15:31:34.527288Z node 54 :CHANGE_EXCHANGE DEBUG: change_sender_table_base.cpp:200: [TableChangeSenderShard][72075186233409551:2][72075186233409546][54:1171:2948] Handle NKikimrChangeExchange.TEvStatus Status: STATUS_OK RecordStatuses { Order: 1 Status: STATUS_OK Reason: REASON_NONE } RecordStatuses { Order: 3 Status: STATUS_OK Reason: REASON_NONE } RecordStatuses { Order: 5 Status: STATUS_OK Reason: REASON_NONE } LastRecordOrder: 5 2025-05-29T15:31:34.527310Z node 54 :CHANGE_EXCHANGE DEBUG: change_sender_async_index.cpp:239: [AsyncIndexChangeSenderMain][72075186233409551:2][54:1140:2948] Handle NKikimr::NChangeExchange::TEvChangeExchangePrivate::TEvReady { PartitionId: 72075186233409546 } >> BasicStatistics::TwoTables |75.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_reboots/unittest >> Yq_1::DescribeConnection [FAIL] >> Yq_1::DeleteQuery ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/olap/unittest >> KqpOlapTiering::LoadTtlSettings 2025-05-29 15:31:32,750 WARNING devtools.ya.test.programs.test_tool.run_test.run_test: Wrapper execution timed out 2025-05-29 15:31:32,786 WARNING devtools.ya.test.programs.test_tool.run_test.run_test: Wrapper has overrun 600 secs timeout. Process tree before termination: pid rss ref pdirt 3532302 46.0M 46.0M 23.3M test_tool run_ut @/home/runner/.ya/build/build_root/ciyv/0026dc/ydb/core/kqp/ut/olap/test-results/unittest/testing_out_stuff/chunk186/testing_out_stuff/test_tool.args 3533022 170M 172M 137M └─ ydb-core-kqp-ut-olap --trace-path-append /home/runner/.ya/build/build_root/ciyv/0026dc/ydb/core/kqp/ut/olap/test-results/unittest/testing_out_stuff/chunk186/ytest.repor Test command err: Trying to start YDB, gRPC: 26080, MsgBus: 1569 test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/0026dc/r3tmp/tmpPkioas/pdisk_1.dat 2025-05-29T15:21:33.836986Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [1:7509888158758918495:2079] 1748532093669010 != 1748532093669013 2025-05-29T15:21:33.837038Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-05-29T15:21:33.837701Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 26080, node 1 2025-05-29T15:21:33.884162Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:21:33.884174Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-05-29T15:21:33.884176Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-05-29T15:21:33.884213Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-05-29T15:21:33.895178Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:21:33.895221Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:21:33.899189Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:1569 TClient is connected to server localhost:1569 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-29T15:21:34.056161Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:21:34.067220Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-05-29T15:21:34.390191Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 Status: 53 TxId: 281474976715658 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 2 2025-05-29T15:21:34.414354Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;self_id=[1:7509888163053886524:2330];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-05-29T15:21:34.414422Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;self_id=[1:7509888163053886524:2330];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-05-29T15:21:34.414488Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;self_id=[1:7509888163053886524:2330];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-05-29T15:21:34.414512Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;self_id=[1:7509888163053886524:2330];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-05-29T15:21:34.414535Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;self_id=[1:7509888163053886524:2330];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-05-29T15:21:34.414557Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;self_id=[1:7509888163053886524:2330];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-05-29T15:21:34.414577Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;self_id=[1:7509888163053886524:2330];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-05-29T15:21:34.414603Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;self_id=[1:7509888163053886524:2330];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-05-29T15:21:34.414623Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;self_id=[1:7509888163053886524:2330];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-05-29T15:21:34.414644Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;self_id=[1:7509888163053886524:2330];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-05-29T15:21:34.414667Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;self_id=[1:7509888163053886524:2330];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2025-05-29T15:21:34.414694Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;self_id=[1:7509888163053886524:2330];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2025-05-29T15:21:34.416156Z node 1 :TX_TIERING INFO: log.cpp:784: fline=manager.cpp:128;event=start_subscribing_metadata; 2025-05-29T15:21:34.419641Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037890;self_id=[1:7509888163053886529:2331];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-05-29T15:21:34.419701Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037890;self_id=[1:7509888163053886529:2331];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-05-29T15:21:34.419764Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037890;self_id=[1:7509888163053886529:2331];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-05-29T15:21:34.419787Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037890;self_id=[1:7509888163053886529:2331];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-05-29T15:21:34.419809Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037890;self_id=[1:7509888163053886529:2331];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-05-29T15:21:34.419832Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037890;self_id=[1:7509888163053886529:2331];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-05-29T15:21:34.419852Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037890;self_id=[1:7509888163053886529:2331];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-05-29T15:21:34.419870Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037890;self_id=[1:7509888163053886529:2331];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-05-29T15:21:34.419894Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037890;self_id=[1:7509888163053886529:2331];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-05-29T15:21:34.419914Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037890;self_id=[1:7509888163053886529:2331];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-05-29T15:21:34.419935Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037890;self_id=[1:7509888163053886529:2331];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2025-05-29T15:21:34.419958Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037890;self_id=[1:7509888163053886529:2331];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2025-05-29T15:21:34.420473Z node 1 :TX_TIERING INFO: log.cpp:784: fline=manager.cpp:128;event=start_subscribing_metadata; 2025-05-29T15:21:34.424785Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-05-29T15:21:34.424804Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-05-29T15:21:34.424819Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-05-29T15:21:34.424827Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-05-29T15:21:34.424851Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-05-29T15:21:34.424857Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:4 ... .cpp:121;internal_queue=0;external_queue=0; 2025-05-29T15:31:32.179227Z node 1 :TX_COLUMNSHARD_ACTUALIZATION DEBUG: log.cpp:784: tablet_id=72075186224037889;self_id=[1:7509888163053886553:2332];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037889;fline=column_engine_logs.cpp:364;event=StartTtl;rw_tasks_count=0; 2025-05-29T15:31:32.179227Z node 1 :TX_COLUMNSHARD_ACTUALIZATION DEBUG: log.cpp:784: tablet_id=72075186224037888;self_id=[1:7509888163053886524:2330];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037888;fline=column_engine_logs.cpp:364;event=StartTtl;rw_tasks_count=0; 2025-05-29T15:31:32.179241Z node 1 :TX_COLUMNSHARD_BLOBS_TIER INFO: log.cpp:784: tablet_id=72075186224037889;self_id=[1:7509888163053886553:2332];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037889;storage_id=__MEMORY;tablet_id=72075186224037889;fline=gc_info.h:24;event=extract_for_gc_skip;reason=no_data; 2025-05-29T15:31:32.179241Z node 1 :TX_COLUMNSHARD_BLOBS_TIER INFO: log.cpp:784: tablet_id=72075186224037888;self_id=[1:7509888163053886524:2330];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037888;storage_id=__MEMORY;tablet_id=72075186224037888;fline=gc_info.h:24;event=extract_for_gc_skip;reason=no_data; 2025-05-29T15:31:32.179245Z node 1 :TX_COLUMNSHARD_BLOBS_TIER INFO: log.cpp:784: tablet_id=72075186224037888;self_id=[1:7509888163053886524:2330];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037888;storage_id=__MEMORY;tablet_id=72075186224037888;fline=storage.cpp:39;event=start_gc_skipped;reason=cannot_extract; 2025-05-29T15:31:32.179245Z node 1 :TX_COLUMNSHARD_BLOBS_TIER INFO: log.cpp:784: tablet_id=72075186224037889;self_id=[1:7509888163053886553:2332];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037889;storage_id=__MEMORY;tablet_id=72075186224037889;fline=storage.cpp:39;event=start_gc_skipped;reason=cannot_extract; 2025-05-29T15:31:32.179270Z node 1 :TX_COLUMNSHARD_ACTUALIZATION DEBUG: log.cpp:784: tablet_id=72075186224037888;self_id=[1:7509888163053886524:2330];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037888;fline=column_engine_logs.cpp:335;event=StartTtl;external=0; 2025-05-29T15:31:32.179272Z node 1 :TX_COLUMNSHARD_ACTUALIZATION DEBUG: log.cpp:784: tablet_id=72075186224037889;self_id=[1:7509888163053886553:2332];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037889;fline=column_engine_logs.cpp:335;event=StartTtl;external=0; 2025-05-29T15:31:32.179275Z node 1 :TX_COLUMNSHARD_ACTUALIZATION DEBUG: log.cpp:784: tablet_id=72075186224037888;self_id=[1:7509888163053886524:2330];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037888;fline=column_engine_logs.cpp:364;event=StartTtl;rw_tasks_count=0; 2025-05-29T15:31:32.179278Z node 1 :TX_COLUMNSHARD_ACTUALIZATION DEBUG: log.cpp:784: tablet_id=72075186224037889;self_id=[1:7509888163053886553:2332];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037889;fline=column_engine_logs.cpp:364;event=StartTtl;rw_tasks_count=0; 2025-05-29T15:31:32.179280Z node 1 :TX_COLUMNSHARD_BLOBS_TIER INFO: log.cpp:784: tablet_id=72075186224037888;self_id=[1:7509888163053886524:2330];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037888;storage_id=__MEMORY;tablet_id=72075186224037888;fline=gc_info.h:24;event=extract_for_gc_skip;reason=no_data; 2025-05-29T15:31:32.179282Z node 1 :TX_COLUMNSHARD_BLOBS_TIER INFO: log.cpp:784: tablet_id=72075186224037889;self_id=[1:7509888163053886553:2332];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037889;storage_id=__MEMORY;tablet_id=72075186224037889;fline=gc_info.h:24;event=extract_for_gc_skip;reason=no_data; 2025-05-29T15:31:32.179282Z node 1 :TX_COLUMNSHARD_BLOBS_TIER INFO: log.cpp:784: tablet_id=72075186224037888;self_id=[1:7509888163053886524:2330];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037888;storage_id=__MEMORY;tablet_id=72075186224037888;fline=storage.cpp:39;event=start_gc_skipped;reason=cannot_extract; 2025-05-29T15:31:32.179285Z node 1 :TX_COLUMNSHARD_BLOBS_TIER INFO: log.cpp:784: tablet_id=72075186224037889;self_id=[1:7509888163053886553:2332];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037889;storage_id=__MEMORY;tablet_id=72075186224037889;fline=storage.cpp:39;event=start_gc_skipped;reason=cannot_extract; 2025-05-29T15:31:32.179308Z node 1 :TX_COLUMNSHARD_ACTUALIZATION DEBUG: log.cpp:784: tablet_id=72075186224037890;self_id=[1:7509888163053886529:2331];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037890;fline=column_engine_logs.cpp:335;event=StartTtl;external=0; 2025-05-29T15:31:32.179315Z node 1 :TX_COLUMNSHARD_ACTUALIZATION DEBUG: log.cpp:784: tablet_id=72075186224037890;self_id=[1:7509888163053886529:2331];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037890;queue=ttl;external_count=0;fline=tiering.cpp:214;event=ExtractTtlTasks;total_portions=0;tasks=0; 2025-05-29T15:31:32.179318Z node 1 :TX_COLUMNSHARD_ACTUALIZATION DEBUG: log.cpp:784: tablet_id=72075186224037890;self_id=[1:7509888163053886529:2331];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037890;queue=ttl;external_count=0;fline=scheme.cpp:67;rw_count=0; 2025-05-29T15:31:32.179321Z node 1 :TX_COLUMNSHARD_ACTUALIZATION DEBUG: log.cpp:784: tablet_id=72075186224037890;self_id=[1:7509888163053886529:2331];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037890;queue=ttl;external_count=0;fline=scheme.cpp:121;internal_queue=0;external_queue=0; 2025-05-29T15:31:32.179324Z node 1 :TX_COLUMNSHARD_ACTUALIZATION DEBUG: log.cpp:784: tablet_id=72075186224037890;self_id=[1:7509888163053886529:2331];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037890;fline=column_engine_logs.cpp:364;event=StartTtl;rw_tasks_count=0; 2025-05-29T15:31:32.179330Z node 1 :TX_COLUMNSHARD_BLOBS_TIER INFO: log.cpp:784: tablet_id=72075186224037890;self_id=[1:7509888163053886529:2331];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037890;storage_id=__MEMORY;tablet_id=72075186224037890;fline=gc_info.h:24;event=extract_for_gc_skip;reason=no_data; 2025-05-29T15:31:32.179332Z node 1 :TX_COLUMNSHARD_BLOBS_TIER INFO: log.cpp:784: tablet_id=72075186224037890;self_id=[1:7509888163053886529:2331];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037890;storage_id=__MEMORY;tablet_id=72075186224037890;fline=storage.cpp:39;event=start_gc_skipped;reason=cannot_extract; 2025-05-29T15:31:32.179346Z node 1 :TX_COLUMNSHARD_ACTUALIZATION DEBUG: log.cpp:784: tablet_id=72075186224037890;self_id=[1:7509888163053886529:2331];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037890;fline=column_engine_logs.cpp:335;event=StartTtl;external=0; 2025-05-29T15:31:32.179351Z node 1 :TX_COLUMNSHARD_ACTUALIZATION DEBUG: log.cpp:784: tablet_id=72075186224037890;self_id=[1:7509888163053886529:2331];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037890;fline=column_engine_logs.cpp:364;event=StartTtl;rw_tasks_count=0; 2025-05-29T15:31:32.179355Z node 1 :TX_COLUMNSHARD_BLOBS_TIER INFO: log.cpp:784: tablet_id=72075186224037890;self_id=[1:7509888163053886529:2331];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037890;storage_id=__MEMORY;tablet_id=72075186224037890;fline=gc_info.h:24;event=extract_for_gc_skip;reason=no_data; 2025-05-29T15:31:32.179358Z node 1 :TX_COLUMNSHARD_BLOBS_TIER INFO: log.cpp:784: tablet_id=72075186224037890;self_id=[1:7509888163053886529:2331];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037890;storage_id=__MEMORY;tablet_id=72075186224037890;fline=storage.cpp:39;event=start_gc_skipped;reason=cannot_extract; 2025-05-29T15:31:32.747176Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [1:7509890731444377577:9145], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:31:32.747320Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=1&id=ODM4ZDk4N2EtN2QwOGE0M2ItMjgzZWY0ZWQtYzBjYjQxZGY=, ActorId: [1:7509890731444377547:9188], ActorState: ExecuteState, TraceId: 01jweaq1r3d1468sp4agtt190n, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: 01jweaq1r13nb704y5jhfpc2me 2025-05-29T15:31:32.747775Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=request_actor.h:64;event=unexpected reply;error_message=operation { ready: true status: INTERNAL_ERROR issues { message: "Execution" issue_code: 1060 issues { message: "yql/essentials/ast/yql_expr.h:1874: index out of range" issue_code: 1 } } result { [type.googleapis.com/Ydb.Table.ExecuteQueryResult] { tx_meta { } } } } ;request=session_id: "ydb://session/3?node_id=1&id=ODM4ZDk4N2EtN2QwOGE0M2ItMjgzZWY0ZWQtYzBjYjQxZGY=" tx_control { tx_id: "01jweaq1r13nb704y5jhfpc2me" commit_tx: true } query { yql_text: "DECLARE $objects AS List>;\nUPSERT INTO `//Root/.metadata/initialization/migrations`\nSELECT componentId,modificationId,instant FROM AS_TABLE($objects)\n" } parameters { key: "$objects" value { type { list_type { item { struct_type { members { name: "componentId" type { type_id: UTF8 } } members { name: "modificationId" type { type_id: UTF8 } } members { name: "instant" type { type_id: UINT32 } } } } } } value { items { items { text_value: "INITIALIZATION" } items { text_value: "create" } items { uint32_value: 1748532692 } } } } } ; 2025-05-29T15:31:32.747814Z node 1 :METADATA_INITIALIZER ERROR: log.cpp:784: fline=accessor_init.cpp:81;event=OnAlteringProblem;error=cannot UPSERT objects:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 ; Traceback (most recent call last): File "library/python/testing/yatest_common/yatest/common/process.py", line 384, in wait wait_for( File "library/python/testing/yatest_common/yatest/common/process.py", line 764, in wait_for raise TimeoutError(truncate(message, MAX_MESSAGE_LEN)) yatest.common.process.TimeoutError: 600 second(s) wait timeout has expired: Command '['/home/runner/.ya/tools/v4/8689590287/test_tool', 'run_ut', '@/home/runner/.ya/build/build_root/ciyv/0026dc/ydb/core/kqp/ut/olap/test-results/unittest/testing_out_stuff/chunk186/testing_out_stuff/test_tool.args']' stopped by 600 seconds timeout During handling of the above exception, another exception occurred: Traceback (most recent call last): File "devtools/ya/test/programs/test_tool/run_test/run_test.py", line 1738, in main res.wait(check_exit_code=False, timeout=run_timeout, on_timeout=timeout_callback) File "library/python/testing/yatest_common/yatest/common/process.py", line 398, in wait raise ExecutionTimeoutError(self, str(e)) yatest.common.process.ExecutionTimeoutError: (("600 second(s) wait timeout has expired: Command '['/home/runner/.ya/tools/v4/8689590287/test_tool', 'run_ut', '@/home/runner/.ya/build/build_root/ciyv/0026dc/ydb/core/kqp/ut/olap/test-results/unittest/testing_out_stuff/chunk186/testing_out_stuff/test_tool.args']' stopped by 600 seconds timeout",), {}) |75.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/statistics/service/ut/unittest >> TSchemeShardColumnTableTTL::AlterColumnTable >> TPersQueueTest::MessageMetadata [FAIL] >> TPersQueueTest::LOGBROKER_7820 |75.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_reboots/unittest |75.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_reboots/unittest |75.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_reboots/unittest >> TColumnShardTestReadWrite::WriteReadExoticTypes [GOOD] |75.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/batch_operations/unittest ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/columnshard/ut_rw/unittest >> TColumnShardTestReadWrite::WriteReadExoticTypes [GOOD] Test command err: 2025-05-29T15:31:34.718209Z node 1 :TX_COLUMNSHARD INFO: log.cpp:784: tablet_id=9437184;self_id=[1:139:2170];fline=columnshard.cpp:102;event=initialize_shard;step=OnActivateExecutor; 2025-05-29T15:31:34.720571Z node 1 :TX_COLUMNSHARD INFO: log.cpp:784: tablet_id=9437184;self_id=[1:139:2170];fline=columnshard.cpp:120;event=initialize_shard;step=initialize_tiring_finished; 2025-05-29T15:31:34.720625Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Execute at tablet 9437184 2025-05-29T15:31:34.721162Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=9437184;self_id=[1:139:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-05-29T15:31:34.721200Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=9437184;self_id=[1:139:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-05-29T15:31:34.721224Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=9437184;self_id=[1:139:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-05-29T15:31:34.721237Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=9437184;self_id=[1:139:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-05-29T15:31:34.721249Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=9437184;self_id=[1:139:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-05-29T15:31:34.721265Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=9437184;self_id=[1:139:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-05-29T15:31:34.721275Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=9437184;self_id=[1:139:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-05-29T15:31:34.721288Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=9437184;self_id=[1:139:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-05-29T15:31:34.721299Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=9437184;self_id=[1:139:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-05-29T15:31:34.721313Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=9437184;self_id=[1:139:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-05-29T15:31:34.721333Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=9437184;self_id=[1:139:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2025-05-29T15:31:34.721365Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=9437184;self_id=[1:139:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2025-05-29T15:31:34.726041Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Complete at tablet 9437184 2025-05-29T15:31:34.726090Z node 1 :TX_COLUMNSHARD INFO: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:137;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2025-05-29T15:31:34.726099Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-05-29T15:31:34.726125Z node 1 :TX_COLUMNSHARD INFO: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-05-29T15:31:34.726161Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-05-29T15:31:34.726171Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-05-29T15:31:34.726175Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-05-29T15:31:34.726182Z node 1 :TX_COLUMNSHARD INFO: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2025-05-29T15:31:34.726189Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-05-29T15:31:34.726194Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-05-29T15:31:34.726197Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-05-29T15:31:34.726209Z node 1 :TX_COLUMNSHARD INFO: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-05-29T15:31:34.726214Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-05-29T15:31:34.726219Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-05-29T15:31:34.726222Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-05-29T15:31:34.726229Z node 1 :TX_COLUMNSHARD INFO: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-05-29T15:31:34.726233Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-05-29T15:31:34.726238Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-05-29T15:31:34.726241Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=8;type=CleanInsertionDedup; 2025-05-29T15:31:34.726251Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-05-29T15:31:34.726257Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-05-29T15:31:34.726259Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-05-29T15:31:34.726265Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-05-29T15:31:34.726272Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-05-29T15:31:34.726275Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-05-29T15:31:34.726291Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-05-29T15:31:34.726296Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-05-29T15:31:34.726299Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2025-05-29T15:31:34.726312Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-05-29T15:31:34.726316Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-05-29T15:31:34.726319Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2025-05-29T15:31:34.726328Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-05-29T15:31:34.726332Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanDeprecatedSnapshot;id=CleanDeprecatedSnapshot; 2025-05-29T15:31:34.726335Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=17;type=CleanDeprecatedSnapshot; 2025-05-29T15:31:34.726341Z node 1 :TX_COLUMNSHARD CRIT: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_deprecated_snapshot.cpp:30;tasks_for_rewrite=0; 2025-05-29T15:31:34.726358Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanDeprecatedSnapshot;id=17; 2025-05-29T15:31:34.726366Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV0ChunksMeta;id=RestoreV0ChunksMeta; 2025-05-29T15:31:34.726369Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=18;type=RestoreV0ChunksMeta; 2025-05-29T15:31:34.726421Z node 1 :TX_COLUMNSHARD INFO: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=9; 2025-05-29T15:31:34.726428Z node 1 :TX_COLUMNSHARD INFO: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=5; 2025-05-29T15:31:34.726435Z node 1 :TX_COLUMNSHARD INFO: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute; ... us] request_id: binary;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;;); 2025-05-29T15:31:36.311344Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:784: TEST_STEP=11;SelfId=[1:395:2407];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;method=produce result;fline=plain_read_data.cpp:73;event=DoExtractReadyResults;result=0;count=0;finished=1; 2025-05-29T15:31:36.311351Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:784: TEST_STEP=11;SelfId=[1:395:2407];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:230;stage=ready result;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;;);columns=10;rows=31; 2025-05-29T15:31:36.311359Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:784: TEST_STEP=11;SelfId=[1:395:2407];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:250;stage=data_format;batch_size=2759;num_rows=31;batch_columns=timestamp,resource_type,resource_id,uid,level,message,json_payload,ingested_at,saved_at,request_id; 2025-05-29T15:31:36.311381Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:784: TEST_STEP=11;SelfId=[1:395:2407];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:366;event=send_data;compute_actor_id=[1:394:2406];bytes=2759;rows=31;faults=0;finished=0;fault=0;schema=timestamp: timestamp[us] resource_type: string resource_id: string uid: string level: int32 message: binary json_payload: binary ingested_at: timestamp[us] saved_at: timestamp[us] request_id: binary; 2025-05-29T15:31:36.311390Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:784: TEST_STEP=11;SelfId=[1:395:2407];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:270;stage=finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;;); 2025-05-29T15:31:36.311397Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:784: TEST_STEP=11;SelfId=[1:395:2407];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:188;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;;); 2025-05-29T15:31:36.311404Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:784: TEST_STEP=11;SelfId=[1:395:2407];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:193;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;;); 2025-05-29T15:31:36.311419Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:784: TEST_STEP=11;SelfId=[1:395:2407];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=actor.cpp:105;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2025-05-29T15:31:36.311426Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:784: TEST_STEP=11;SelfId=[1:395:2407];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:188;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;;); 2025-05-29T15:31:36.311433Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:784: TEST_STEP=11;SelfId=[1:395:2407];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:193;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;;); 2025-05-29T15:31:36.311437Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: actor.cpp:410: Scan [1:395:2407] finished for tablet 9437184 2025-05-29T15:31:36.311468Z node 1 :TX_COLUMNSHARD_SCAN INFO: log.cpp:784: TEST_STEP=11;SelfId=[1:395:2407];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=actor.cpp:416;event=scan_finish;compute_actor_id=[1:394:2406];stats={"p":[{"events":["f_bootstrap","l_bootstrap","f_processing","f_ProduceResults","f_task_result"],"t":0},{"events":["f_ack","l_ack","l_processing","l_ProduceResults","f_Finish","l_Finish","l_task_result"],"t":0.001}],"full":{"a":1748532696310233,"name":"_full_task","f":1748532696310233,"d_finished":0,"c":0,"l":1748532696311441,"d":1208},"events":[{"name":"bootstrap","f":1748532696310255,"d_finished":244,"c":1,"l":1748532696310499,"d":244},{"a":1748532696311417,"name":"ack","f":1748532696311325,"d_finished":81,"c":1,"l":1748532696311406,"d":105},{"a":1748532696311416,"name":"processing","f":1748532696310594,"d_finished":489,"c":10,"l":1748532696311406,"d":514},{"name":"ProduceResults","f":1748532696310399,"d_finished":229,"c":13,"l":1748532696311435,"d":229},{"a":1748532696311435,"name":"Finish","f":1748532696311435,"d_finished":0,"c":0,"l":1748532696311441,"d":6},{"name":"task_result","f":1748532696310596,"d_finished":395,"c":9,"l":1748532696311307,"d":395}],"id":"9437184::12"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;;); 2025-05-29T15:31:36.311474Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:784: TEST_STEP=11;SelfId=[1:395:2407];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=actor.cpp:366;event=send_data;compute_actor_id=[1:394:2406];bytes=0;rows=0;faults=0;finished=1;fault=0;schema=; 2025-05-29T15:31:36.311497Z node 1 :TX_COLUMNSHARD_SCAN INFO: log.cpp:784: TEST_STEP=11;SelfId=[1:395:2407];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=actor.cpp:371;event=scan_finished;compute_actor_id=[1:394:2406];packs_sum=0;bytes=0;bytes_sum=0;rows=0;rows_sum=0;faults=0;finished=1;fault=0;stats={"p":[{"events":["f_bootstrap","l_bootstrap","f_processing","f_ProduceResults","f_task_result"],"t":0},{"events":["f_ack","l_ack","l_processing","l_ProduceResults","f_Finish","l_Finish","l_task_result"],"t":0.001}],"full":{"a":1748532696310233,"name":"_full_task","f":1748532696310233,"d_finished":0,"c":0,"l":1748532696311477,"d":1244},"events":[{"name":"bootstrap","f":1748532696310255,"d_finished":244,"c":1,"l":1748532696310499,"d":244},{"a":1748532696311417,"name":"ack","f":1748532696311325,"d_finished":81,"c":1,"l":1748532696311406,"d":141},{"a":1748532696311416,"name":"processing","f":1748532696310594,"d_finished":489,"c":10,"l":1748532696311406,"d":550},{"name":"ProduceResults","f":1748532696310399,"d_finished":229,"c":13,"l":1748532696311435,"d":229},{"a":1748532696311435,"name":"Finish","f":1748532696311435,"d_finished":0,"c":0,"l":1748532696311477,"d":42},{"name":"task_result","f":1748532696310596,"d_finished":395,"c":9,"l":1748532696311307,"d":395}],"id":"9437184::12"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;;); 2025-05-29T15:31:36.311505Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:784: TEST_STEP=11;SelfId=[1:395:2407];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=stats.cpp:8;event=statistic;begin=2025-05-29T15:31:36.310164Z;index_granules=0;index_portions=1;index_batches=1;committed_batches=0;schema_columns=10;filter_columns=0;additional_columns=0;compacted_portions_bytes=0;inserted_portions_bytes=7928;committed_portions_bytes=0;data_filter_bytes=0;data_additional_bytes=0;delta_bytes=7928;selected_rows=0; 2025-05-29T15:31:36.311508Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:784: TEST_STEP=11;SelfId=[1:395:2407];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=read_context.h:190;event=scan_aborted;reason=unexpected on destructor; 2025-05-29T15:31:36.311529Z node 1 :TX_COLUMNSHARD_SCAN INFO: log.cpp:784: TEST_STEP=11;SelfId=[1:395:2407];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=context.h:81;fetching=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;; ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/effects/unittest >> KqpInplaceUpdate::Negative_SingleRowWithValueCast-UseSink Test command err: Trying to start YDB, gRPC: 64624, MsgBus: 21905 2025-05-29T15:31:29.212860Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509890716559342650:2072];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:31:29.212877Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/001da9/r3tmp/tmpXVwEZo/pdisk_1.dat TServer::EnableGrpc on GrpcPort 64624, node 1 2025-05-29T15:31:29.261233Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:31:29.268857Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:31:29.268870Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-05-29T15:31:29.268872Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-05-29T15:31:29.268914Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:21905 TClient is connected to server localhost:21905 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: 2025-05-29T15:31:29.313711Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:31:29.313731Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:31:29.314846Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-29T15:31:29.343589Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:31:29.347983Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:31:29.365576Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:31:29.381743Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:31:29.391785Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:31:29.502657Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890716559344241:2403], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:31:29.502687Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:31:29.542285Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-05-29T15:31:29.549542Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-05-29T15:31:29.562145Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-05-29T15:31:29.569063Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-05-29T15:31:29.623488Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-05-29T15:31:29.632377Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-05-29T15:31:29.646836Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-05-29T15:31:29.661943Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890716559344896:2466], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:31:29.661964Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:31:29.661992Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890716559344901:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:31:29.662681Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715669:3, at schemeshard: 72057594046644480 2025-05-29T15:31:29.666450Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7509890716559344903:2470], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715669 completed, doublechecking } 2025-05-29T15:31:29.735009Z node 1 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [1:7509890716559344954:3399] txid# 281474976715670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-29T15:31:29.804394Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [1:7509890716559344970:2474], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:31:29.804519Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=1&id=MmRhZTFmNTctMmUxNTMyNi0zMWQ4OWFiNC0xMjA4ZDhiMg==, ActorId: [1:7509890716559344223:2401], ActorState: ExecuteState, TraceId: 01jweapyqxe3vpgwsbek57wrvm, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: VERIFY failed (2025-05-29T15:31:29.805150Z): assertion failed in non-unittest thread with message: assertion failed at ydb/core/kqp/ut/common/kqp_ut_common.h:375, void NKikimr::NKqp::AssertSuccessResult(const NYdb::TStatus &): (result.IsSuccess())
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 library/cpp/testing/unittest/registar.cpp:36 RaiseError(): requirement UnittestThread failed 0. /-S/util/system/yassert.cpp:83: InternalPanicImpl @ 0x15F23B95 1. /-S/util/system/yassert.cpp:55: Panic @ 0x15F1AB96 2. /tmp//-S/library/cpp/testing/unittest/registar.cpp:36: RaiseError @ 0x160BC066 3. /-S/ydb/core/kqp/ut/common/kqp_ut_common.h:375: AssertSuccessResult @ 0x15D88C82 4. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:365: CreateSampleTables @ 0x2855B9A2 5. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:581: operator() @ 0x2857CF4C 6. /-S/library/cpp/threading/future/core/future-inl.h:493: SetValue @ 0x2857CF4C 7. /-S/library/cpp/threading/future/async.h:24: operator() @ 0x2857CF4C 8. /-S/util/thread/pool.h:71: Process @ 0x2857CF4C 9. /-S/util/thread/pool.cpp:418: DoExecute @ 0x15F2B519 10. /-S/util/thread/factory.h:15: Execute @ 0x15F29F09 11. /-S/util/thread/factory.cpp:36: ThreadProc @ 0x15F29F09 12. /-S/util/system/thread.cpp:244: ThreadProxy @ 0x15F2537C 13. ??:0: ?? @ 0x7F0994DABAC2 14. ??:0: ?? @ 0x7F0994E3D84F Trying to start YDB, gRPC: 14844, MsgBus: 28458 2025-05-29T15:31:33.115036Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509890733186420258:2071];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:31:33.115343Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/001da9/r3tmp/tmpp2nmuG/pdisk_1.dat 2025-05-29T15:31:33.167605Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 14844, node 1 2025-05-29T15:31:33.181236Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:31:33.181255Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-05-29T15:31:33.181257Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-05-29T15:31:33.181307Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:28458 2025-05-29T15:31:33.216274Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:31:33.216301Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:31:33.217325Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:28458 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-29T15:31:33.246222Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:31:33.250881Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:31:33.269291Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:31:33.286514Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:31:33.298536Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:31:33.445466Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890733186421849:2403], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:31:33.445487Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:31:33.481187Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-05-29T15:31:33.489244Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-05-29T15:31:33.544120Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-05-29T15:31:33.599040Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-05-29T15:31:33.608500Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-05-29T15:31:33.622753Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-05-29T15:31:33.636407Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-05-29T15:31:33.652545Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890733186422505:2466], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:31:33.652569Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890733186422510:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:31:33.652576Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:31:33.653138Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715669:3, at schemeshard: 72057594046644480 2025-05-29T15:31:33.656421Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7509890733186422512:2470], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715669 completed, doublechecking } 2025-05-29T15:31:33.710821Z node 1 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [1:7509890733186422563:3396] txid# 281474976715670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-29T15:31:33.796887Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [1:7509890733186422579:2474], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:31:33.796982Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=1&id=MmI5ZTE2MGYtOTZlZTJiZjQtYWUwMTcyMjAtYWNkYzU2NjE=, ActorId: [1:7509890733186421831:2401], ActorState: ExecuteState, TraceId: 01jweaq2mm4aarzxn5d5tdgm2y, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: VERIFY failed (2025-05-29T15:31:33.797657Z): assertion failed in non-unittest thread with message: assertion failed at ydb/core/kqp/ut/common/kqp_ut_common.h:375, void NKikimr::NKqp::AssertSuccessResult(const NYdb::TStatus &): (result.IsSuccess())
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 library/cpp/testing/unittest/registar.cpp:36 RaiseError(): requirement UnittestThread failed 0. /-S/util/system/yassert.cpp:83: InternalPanicImpl @ 0x15F23B95 1. /-S/util/system/yassert.cpp:55: Panic @ 0x15F1AB96 2. /tmp//-S/library/cpp/testing/unittest/registar.cpp:36: RaiseError @ 0x160BC066 3. /-S/ydb/core/kqp/ut/common/kqp_ut_common.h:375: AssertSuccessResult @ 0x15D88C82 4. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:365: CreateSampleTables @ 0x2855B9A2 5. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:581: operator() @ 0x2857CF4C 6. /-S/library/cpp/threading/future/core/future-inl.h:493: SetValue @ 0x2857CF4C 7. /-S/library/cpp/threading/future/async.h:24: operator() @ 0x2857CF4C 8. /-S/util/thread/pool.h:71: Process @ 0x2857CF4C 9. /-S/util/thread/pool.cpp:418: DoExecute @ 0x15F2B519 10. /-S/util/thread/factory.h:15: Execute @ 0x15F29F09 11. /-S/util/thread/factory.cpp:36: ThreadProc @ 0x15F29F09 12. /-S/util/system/thread.cpp:244: ThreadProxy @ 0x15F2537C 13. ??:0: ?? @ 0x7F8A799E7AC2 14. ??:0: ?? @ 0x7F8A79A7984F ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/effects/unittest >> KqpEffects::UpdateOn_Params Test command err: Trying to start YDB, gRPC: 27923, MsgBus: 30793 2025-05-29T15:31:29.499896Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509890718021440955:2067];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:31:29.500103Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/001da4/r3tmp/tmpZUwXtK/pdisk_1.dat 2025-05-29T15:31:29.547383Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [1:7509890718021440923:2079] 1748532689499752 != 1748532689499755 2025-05-29T15:31:29.548577Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 27923, node 1 2025-05-29T15:31:29.562816Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:31:29.562826Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-05-29T15:31:29.562828Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-05-29T15:31:29.562879Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:30793 2025-05-29T15:31:29.601309Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:31:29.601336Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:31:29.602422Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:30793 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-29T15:31:29.631142Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:31:29.639696Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:31:29.701026Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:31:29.718048Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:31:29.730943Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:31:29.877888Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890718021442557:2403], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:31:29.877910Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:31:29.914672Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-05-29T15:31:29.921031Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-05-29T15:31:29.975385Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-05-29T15:31:29.982144Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-05-29T15:31:29.996561Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-05-29T15:31:30.010682Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-05-29T15:31:30.025013Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-05-29T15:31:30.040000Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890722316410506:2466], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:31:30.040031Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:31:30.040050Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890722316410511:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:31:30.040793Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715669:3, at schemeshard: 72057594046644480 2025-05-29T15:31:30.044600Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7509890722316410513:2470], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715669 completed, doublechecking } 2025-05-29T15:31:30.114986Z node 1 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [1:7509890722316410564:3396] txid# 281474976715670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-29T15:31:30.225301Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [1:7509890722316410580:2474], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:31:30.225398Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=1&id=NTdlZTcyYTAtZmM1NWJkMzMtMWJkYzM1ZjQtODk1MjViMDE=, ActorId: [1:7509890718021442539:2401], ActorState: ExecuteState, TraceId: 01jweapz3q356e9qnxtt7h67m6, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: VERIFY failed (2025-05-29T15:31:30.226006Z): assertion failed in non-unittest thread with message: assertion failed at ydb/core/kqp/ut/common/kqp_ut_common.h:375, void NKikimr::NKqp::AssertSuccessResult(const NYdb::TStatus &): (result.IsSuccess())
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 library/cpp/testing/unittest/registar.cpp:36 RaiseError(): requirement UnittestThread failed 0. /-S/util/system/yassert.cpp:83: InternalPanicImpl @ 0x15F23B95 1. /-S/util/system/yassert.cpp:55: Panic @ 0x15F1AB96 2. /tmp//-S/library/cpp/testing/unittest/registar.cpp:36: RaiseError @ 0x160BC066 3. /-S/ydb/core/kqp/ut/common/kqp_ut_common.h:375: AssertSuccessResult @ 0x15D88C82 4. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:365: CreateSampleTables @ 0x2855B9A2 5. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:581: operator() @ 0x2857CF4C 6. /-S/library/cpp/threading/future/core/future-inl.h:493: SetValue @ 0x2857CF4C 7. /-S/library/cpp/threading/future/async.h:24: operator() @ 0x2857CF4C 8. /-S/util/thread/pool.h:71: Process @ 0x2857CF4C 9. /-S/util/thread/pool.cpp:418: DoExecute @ 0x15F2B519 10. /-S/util/thread/factory.h:15: Execute @ 0x15F29F09 11. /-S/util/thread/factory.cpp:36: ThreadProc @ 0x15F29F09 12. /-S/util/system/thread.cpp:244: ThreadProxy @ 0x15F2537C 13. ??:0: ?? @ 0x7F6D561CBAC2 14. ??:0: ?? @ 0x7F6D5625D84F Trying to start YDB, gRPC: 11877, MsgBus: 29692 2025-05-29T15:31:33.255526Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509890735900188610:2068];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:31:33.255695Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/001da4/r3tmp/tmpdx1PDz/pdisk_1.dat 2025-05-29T15:31:33.303272Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 11877, node 1 2025-05-29T15:31:33.318295Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:31:33.318310Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-05-29T15:31:33.318312Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-05-29T15:31:33.318360Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:29692 TClient is connected to server localhost:29692 WaitRootIsUp 'Root'... TClient::Ls request: Root 2025-05-29T15:31:33.357034Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:31:33.357065Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:31:33.358055Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-29T15:31:33.382660Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:31:33.386966Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:31:33.407581Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:31:33.429986Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:31:33.442188Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:31:33.557675Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890735900190207:2403], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:31:33.557701Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:31:33.592300Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-05-29T15:31:33.600734Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-05-29T15:31:33.608485Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-05-29T15:31:33.622482Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-05-29T15:31:33.677874Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-05-29T15:31:33.686320Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-05-29T15:31:33.700019Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-05-29T15:31:33.715526Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890735900190865:2466], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:31:33.715545Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:31:33.715588Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890735900190870:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:31:33.716304Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715669:3, at schemeshard: 72057594046644480 2025-05-29T15:31:33.719427Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7509890735900190872:2470], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715669 completed, doublechecking } 2025-05-29T15:31:33.800217Z node 1 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [1:7509890735900190923:3397] txid# 281474976715670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-29T15:31:33.881230Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [1:7509890735900190939:2474], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:31:33.881354Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=1&id=YjhkY2Q2OWEtZjliZTA0OGEtNThhZTY0MjQtN2EzNTEyMTU=, ActorId: [1:7509890735900190204:2401], ActorState: ExecuteState, TraceId: 01jweaq2pkbr9rb7kwqnv8n5z5, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: VERIFY failed (2025-05-29T15:31:33.882054Z): assertion failed in non-unittest thread with message: assertion failed at ydb/core/kqp/ut/common/kqp_ut_common.h:375, void NKikimr::NKqp::AssertSuccessResult(const NYdb::TStatus &): (result.IsSuccess())
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 library/cpp/testing/unittest/registar.cpp:36 RaiseError(): requirement UnittestThread failed 0. /-S/util/system/yassert.cpp:83: InternalPanicImpl @ 0x15F23B95 1. /-S/util/system/yassert.cpp:55: Panic @ 0x15F1AB96 2. /tmp//-S/library/cpp/testing/unittest/registar.cpp:36: RaiseError @ 0x160BC066 3. /-S/ydb/core/kqp/ut/common/kqp_ut_common.h:375: AssertSuccessResult @ 0x15D88C82 4. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:365: CreateSampleTables @ 0x2855B9A2 5. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:581: operator() @ 0x2857CF4C 6. /-S/library/cpp/threading/future/core/future-inl.h:493: SetValue @ 0x2857CF4C 7. /-S/library/cpp/threading/future/async.h:24: operator() @ 0x2857CF4C 8. /-S/util/thread/pool.h:71: Process @ 0x2857CF4C 9. /-S/util/thread/pool.cpp:418: DoExecute @ 0x15F2B519 10. /-S/util/thread/factory.h:15: Execute @ 0x15F29F09 11. /-S/util/thread/factory.cpp:36: ThreadProc @ 0x15F29F09 12. /-S/util/system/thread.cpp:244: ThreadProxy @ 0x15F2537C 13. ??:0: ?? @ 0x7FFA9B5AFAC2 14. ??:0: ?? @ 0x7FFA9B64184F >> BasicStatistics::NotFullStatisticsDatashard ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/batch_operations/unittest >> KqpBatchUpdate::NotIdempotent Test command err: Trying to start YDB, gRPC: 5881, MsgBus: 3266 2025-05-29T15:31:33.348977Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509890735365470010:2059];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:31:33.348991Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/00278f/r3tmp/tmp7aelUQ/pdisk_1.dat 2025-05-29T15:31:33.417921Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:31:33.418905Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [1:7509890735365469993:2079] 1748532693348815 != 1748532693348818 TServer::EnableGrpc on GrpcPort 5881, node 1 2025-05-29T15:31:33.455049Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:31:33.455425Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-05-29T15:31:33.455433Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-05-29T15:31:33.455479Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:3266 2025-05-29T15:31:33.484975Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:31:33.485367Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:31:33.486626Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:3266 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-29T15:31:33.523119Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:31:33.530145Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:31:33.594265Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:31:33.610806Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:31:33.624369Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:31:33.701790Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890735365471650:2403], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:31:33.701825Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:31:33.769942Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-05-29T15:31:33.776934Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-05-29T15:31:33.790218Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-05-29T15:31:33.797090Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-05-29T15:31:33.804461Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-05-29T15:31:33.818422Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-05-29T15:31:33.832424Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-05-29T15:31:33.851409Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890735365472304:2466], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:31:33.851433Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:31:33.851438Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890735365472309:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:31:33.852562Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710669:3, at schemeshard: 72057594046644480 2025-05-29T15:31:33.859736Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7509890735365472311:2470], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710669 completed, doublechecking } 2025-05-29T15:31:33.945461Z node 1 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [1:7509890735365472362:3397] txid# 281474976710670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-29T15:31:34.076713Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [1:7509890735365472378:2474], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:31:34.076822Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=1&id=ZDExYWZlMTAtMzJiZDAyMjAtZTA4ZjMyZTktZGY4OWViMzU=, ActorId: [1:7509890735365471632:2401], ActorState: ExecuteState, TraceId: 01jweaq2ttcc2e1msh4sckzcfp, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: VERIFY failed (2025-05-29T15:31:34.078199Z): assertion failed in non-unittest thread with message: assertion failed at ydb/core/kqp/ut/common/kqp_ut_common.h:375, void NKikimr::NKqp::AssertSuccessResult(const NYdb::TStatus &): (result.IsSuccess())
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 library/cpp/testing/unittest/registar.cpp:36 RaiseError(): requirement UnittestThread failed 0. /-S/util/system/yassert.cpp:83: InternalPanicImpl @ 0x13A9DC85 1. /-S/util/system/yassert.cpp:55: Panic @ 0x13A94C86 2. /tmp//-S/library/cpp/testing/unittest/registar.cpp:36: RaiseError @ 0x13C36A16 3. /-S/ydb/core/kqp/ut/common/kqp_ut_common.h:375: AssertSuccessResult @ 0x260D85F2 4. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:365: CreateSampleTables @ 0x260D7EF2 5. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:581: operator() @ 0x260F9ACC 6. /-S/library/cpp/threading/future/core/future-inl.h:493: SetValue @ 0x260F9ACC 7. /-S/library/cpp/threading/future/async.h:24: operator() @ 0x260F9ACC 8. /-S/util/thread/pool.h:71: Process @ 0x260F9ACC 9. /-S/util/thread/pool.cpp:418: DoExecute @ 0x13AA5609 10. /-S/util/thread/factory.h:15: Execute @ 0x13AA3FF9 11. /-S/util/thread/factory.cpp:36: ThreadProc @ 0x13AA3FF9 12. /-S/util/system/thread.cpp:244: ThreadProxy @ 0x13A9F46C 13. ??:0: ?? @ 0x7FEE77AFCAC2 14. ??:0: ?? @ 0x7FEE77B8E84F >> IntermediateDirsReboots::CreateSolomonWithIntermediateDirsForceDrop |75.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_reboots/unittest >> IntermediateDirsReboots::CreateKesusWithIntermediateDirsForceDrop |75.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/statistics/service/ut/unittest ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/effects/unittest >> KqpInplaceUpdate::Negative_BatchUpdate-UseSink Test command err: Trying to start YDB, gRPC: 27977, MsgBus: 28709 2025-05-29T15:31:29.538079Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509890718011269848:2070];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:31:29.538098Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/001da3/r3tmp/tmp4eOv6i/pdisk_1.dat 2025-05-29T15:31:29.591304Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 27977, node 1 2025-05-29T15:31:29.606031Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:31:29.606043Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-05-29T15:31:29.606045Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-05-29T15:31:29.606098Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:28709 2025-05-29T15:31:29.639634Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:31:29.639665Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:31:29.640804Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:28709 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-29T15:31:29.669080Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:31:29.673831Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:31:29.690261Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:31:29.710466Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:31:29.721681Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:31:29.830130Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890718011271437:2403], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:31:29.830158Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:31:29.883910Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-05-29T15:31:29.890291Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-05-29T15:31:29.945038Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-05-29T15:31:30.000331Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-05-29T15:31:30.010638Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-05-29T15:31:30.024927Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-05-29T15:31:30.038563Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-05-29T15:31:30.054828Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890722306239389:2466], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:31:30.054859Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:31:30.054861Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890722306239394:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:31:30.055431Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715669:3, at schemeshard: 72057594046644480 2025-05-29T15:31:30.058094Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7509890722306239396:2470], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715669 completed, doublechecking } 2025-05-29T15:31:30.150988Z node 1 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [1:7509890722306239447:3395] txid# 281474976715670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-29T15:31:30.217277Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [1:7509890722306239463:2474], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:31:30.217360Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=1&id=ZmVkMjgyMTgtNTUxYjNlZWEtN2Y2YmZmZS00OGM4OGE3MA==, ActorId: [1:7509890718011271434:2401], ActorState: ExecuteState, TraceId: 01jweapz464k9b10byn915kwy1, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: VERIFY failed (2025-05-29T15:31:30.217975Z): assertion failed in non-unittest thread with message: assertion failed at ydb/core/kqp/ut/common/kqp_ut_common.h:375, void NKikimr::NKqp::AssertSuccessResult(const NYdb::TStatus &): (result.IsSuccess())
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 library/cpp/testing/unittest/registar.cpp:36 RaiseError(): requirement UnittestThread failed 0. /-S/util/system/yassert.cpp:83: InternalPanicImpl @ 0x15F23B95 1. /-S/util/system/yassert.cpp:55: Panic @ 0x15F1AB96 2. /tmp//-S/library/cpp/testing/unittest/registar.cpp:36: RaiseError @ 0x160BC066 3. /-S/ydb/core/kqp/ut/common/kqp_ut_common.h:375: AssertSuccessResult @ 0x15D88C82 4. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:365: CreateSampleTables @ 0x2855B9A2 5. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:581: operator() @ 0x2857CF4C 6. /-S/library/cpp/threading/future/core/future-inl.h:493: SetValue @ 0x2857CF4C 7. /-S/library/cpp/threading/future/async.h:24: operator() @ 0x2857CF4C 8. /-S/util/thread/pool.h:71: Process @ 0x2857CF4C 9. /-S/util/thread/pool.cpp:418: DoExecute @ 0x15F2B519 10. /-S/util/thread/factory.h:15: Execute @ 0x15F29F09 11. /-S/util/thread/factory.cpp:36: ThreadProc @ 0x15F29F09 12. /-S/util/system/thread.cpp:244: ThreadProxy @ 0x15F2537C 13. ??:0: ?? @ 0x7FE84CB76AC2 14. ??:0: ?? @ 0x7FE84CC0884F Trying to start YDB, gRPC: 3233, MsgBus: 20347 2025-05-29T15:31:33.426867Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509890735070853788:2071];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:31:33.426883Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/001da3/r3tmp/tmpbTuQCR/pdisk_1.dat TServer::EnableGrpc on GrpcPort 3233, node 1 2025-05-29T15:31:33.485686Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:31:33.490820Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:31:33.490833Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-05-29T15:31:33.490836Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-05-29T15:31:33.490890Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:20347 2025-05-29T15:31:33.527473Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:31:33.527535Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:31:33.528570Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:20347 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-29T15:31:33.544276Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:31:33.554937Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:31:33.619583Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:31:33.639523Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:31:33.651583Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:31:33.775807Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890735070855375:2403], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:31:33.775833Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:31:33.819239Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-05-29T15:31:33.826246Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-05-29T15:31:33.839696Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-05-29T15:31:33.853774Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-05-29T15:31:33.867700Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-05-29T15:31:33.882070Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-05-29T15:31:33.896022Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-05-29T15:31:33.911646Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890735070856027:2466], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:31:33.911662Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890735070856032:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:31:33.911673Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:31:33.912497Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710669:3, at schemeshard: 72057594046644480 2025-05-29T15:31:33.915496Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7509890735070856034:2470], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710669 completed, doublechecking } 2025-05-29T15:31:34.014934Z node 1 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [1:7509890739365823381:3397] txid# 281474976710670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-29T15:31:34.110037Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [1:7509890739365823397:2474], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:31:34.110194Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=1&id=OTY2NTI4NjMtZmJmNmVhNDItYmZiNjQ5MDYtZDBjMmY3NmI=, ActorId: [1:7509890735070855372:2401], ActorState: ExecuteState, TraceId: 01jweaq2wq030ewt05ma1f6daz, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: VERIFY failed (2025-05-29T15:31:34.110868Z): assertion failed in non-unittest thread with message: assertion failed at ydb/core/kqp/ut/common/kqp_ut_common.h:375, void NKikimr::NKqp::AssertSuccessResult(const NYdb::TStatus &): (result.IsSuccess())
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 library/cpp/testing/unittest/registar.cpp:36 RaiseError(): requirement UnittestThread failed 0. /-S/util/system/yassert.cpp:83: InternalPanicImpl @ 0x15F23B95 1. /-S/util/system/yassert.cpp:55: Panic @ 0x15F1AB96 2. /tmp//-S/library/cpp/testing/unittest/registar.cpp:36: RaiseError @ 0x160BC066 3. /-S/ydb/core/kqp/ut/common/kqp_ut_common.h:375: AssertSuccessResult @ 0x15D88C82 4. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:365: CreateSampleTables @ 0x2855B9A2 5. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:581: operator() @ 0x2857CF4C 6. /-S/library/cpp/threading/future/core/future-inl.h:493: SetValue @ 0x2857CF4C 7. /-S/library/cpp/threading/future/async.h:24: operator() @ 0x2857CF4C 8. /-S/util/thread/pool.h:71: Process @ 0x2857CF4C 9. /-S/util/thread/pool.cpp:418: DoExecute @ 0x15F2B519 10. /-S/util/thread/factory.h:15: Execute @ 0x15F29F09 11. /-S/util/thread/factory.cpp:36: ThreadProc @ 0x15F29F09 12. /-S/util/system/thread.cpp:244: ThreadProxy @ 0x15F2537C 13. ??:0: ?? @ 0x7FC89AC80AC2 14. ??:0: ?? @ 0x7FC89AD1284F >> TPersQueueTest::LOGBROKER_7820 [FAIL] >> TPersQueueTest::InflightLimit >> Yq_1::DeleteQuery [FAIL] >> KqpImmediateEffects::TxWithReadAtTheEnd+UseSink >> HttpRequest::Status |75.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_reboots/unittest >> IntermediateDirsReboots::CreateWithIntermediateDirsForceDrop >> IntermediateDirsReboots::CreateDirWithIntermediateDirs >> KqpEffects::AlterDuringUpsertTransaction-UseSink |75.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/batch_operations/unittest ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/batch_operations/unittest >> KqpBatchDelete::DeleteOn Test command err: Trying to start YDB, gRPC: 12668, MsgBus: 13923 2025-05-29T15:31:34.341889Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509890740573746349:2061];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:31:34.341916Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/00278d/r3tmp/tmpz72NO0/pdisk_1.dat 2025-05-29T15:31:34.383762Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [1:7509890740573746327:2079] 1748532694341751 != 1748532694341754 2025-05-29T15:31:34.386268Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 12668, node 1 2025-05-29T15:31:34.395485Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:31:34.395498Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-05-29T15:31:34.395500Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-05-29T15:31:34.395541Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:13923 TClient is connected to server localhost:13923 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: 2025-05-29T15:31:34.443392Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:31:34.443418Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-29T15:31:34.444517Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-29T15:31:34.467748Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:31:34.472944Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:31:34.536568Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:31:34.554991Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:31:34.566144Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:31:34.676666Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890740573747961:2403], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:31:34.676703Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:31:34.710669Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-05-29T15:31:34.717557Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-05-29T15:31:34.728265Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-05-29T15:31:34.783118Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-05-29T15:31:34.838490Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-05-29T15:31:34.847093Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-05-29T15:31:34.854111Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-05-29T15:31:34.869933Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890740573748620:2466], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:31:34.869956Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:31:34.869961Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890740573748625:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:31:34.870599Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715669:3, at schemeshard: 72057594046644480 2025-05-29T15:31:34.874264Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7509890740573748627:2470], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715669 completed, doublechecking } 2025-05-29T15:31:34.937888Z node 1 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [1:7509890740573748678:3395] txid# 281474976715670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-29T15:31:35.006256Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [1:7509890740573748694:2474], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:31:35.006394Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=1&id=MzM3NzE0MGYtODMzZjNiMDAtNzhlZWQ0NTEtM2E3NGRiNjg=, ActorId: [1:7509890740573747943:2401], ActorState: ExecuteState, TraceId: 01jweaq3tn1fzwbw2j3r713gtb, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: VERIFY failed (2025-05-29T15:31:35.007119Z): assertion failed in non-unittest thread with message: assertion failed at ydb/core/kqp/ut/common/kqp_ut_common.h:375, void NKikimr::NKqp::AssertSuccessResult(const NYdb::TStatus &): (result.IsSuccess())
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 library/cpp/testing/unittest/registar.cpp:36 RaiseError(): requirement UnittestThread failed 0. /-S/util/system/yassert.cpp:83: InternalPanicImpl @ 0x13A9DC85 1. /-S/util/system/yassert.cpp:55: Panic @ 0x13A94C86 2. /tmp//-S/library/cpp/testing/unittest/registar.cpp:36: RaiseError @ 0x13C36A16 3. /-S/ydb/core/kqp/ut/common/kqp_ut_common.h:375: AssertSuccessResult @ 0x260D85F2 4. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:365: CreateSampleTables @ 0x260D7EF2 5. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:581: operator() @ 0x260F9ACC 6. /-S/library/cpp/threading/future/core/future-inl.h:493: SetValue @ 0x260F9ACC 7. /-S/library/cpp/threading/future/async.h:24: operator() @ 0x260F9ACC 8. /-S/util/thread/pool.h:71: Process @ 0x260F9ACC 9. /-S/util/thread/pool.cpp:418: DoExecute @ 0x13AA5609 10. /-S/util/thread/factory.h:15: Execute @ 0x13AA3FF9 11. /-S/util/thread/factory.cpp:36: ThreadProc @ 0x13AA3FF9 12. /-S/util/system/thread.cpp:244: ThreadProxy @ 0x13A9F46C 13. ??:0: ?? @ 0x7FA9885D0AC2 14. ??:0: ?? @ 0x7FA98866284F |75.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/statistics/service/ut/unittest |75.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_reboots/unittest |75.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_reboots/unittest >> TPersQueueTest::InflightLimit [FAIL] >> KqpOlapAggregations::Aggregation_Avg_Null >> TSchemeShardColumnTableTTL::AlterColumnTable [GOOD] >> KqpOlapJson::CompactionVariants ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/effects/unittest >> KqpInplaceUpdate::SingleRowIf+UseSink Test command err: Trying to start YDB, gRPC: 31844, MsgBus: 61576 2025-05-29T15:31:30.891074Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509890723018658054:2072];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:31:30.891098Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/001d9f/r3tmp/tmpr5PbsH/pdisk_1.dat 2025-05-29T15:31:30.937706Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 31844, node 1 2025-05-29T15:31:30.956141Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:31:30.956153Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-05-29T15:31:30.956155Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-05-29T15:31:30.956198Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:61576 2025-05-29T15:31:30.991871Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:31:30.991898Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:31:30.992952Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:61576 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-29T15:31:31.019279Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:31:31.030102Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:31:31.093081Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:31:31.111074Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:31:31.121852Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:31:31.206587Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890727313626940:2403], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:31:31.206620Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:31:31.242611Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-05-29T15:31:31.249379Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-05-29T15:31:31.256109Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-05-29T15:31:31.263333Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-05-29T15:31:31.318372Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-05-29T15:31:31.326705Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-05-29T15:31:31.340746Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-05-29T15:31:31.356034Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890727313627594:2466], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:31:31.356068Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890727313627599:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:31:31.356067Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:31:31.356802Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715669:3, at schemeshard: 72057594046644480 2025-05-29T15:31:31.360372Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7509890727313627601:2470], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715669 completed, doublechecking } 2025-05-29T15:31:31.454473Z node 1 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [1:7509890727313627652:3396] txid# 281474976715670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-29T15:31:31.530558Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [1:7509890727313627668:2474], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:31:31.530654Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=1&id=YjBjMDM5OTgtNTQ3OWFjNDItN2UxM2Q1ZmYtMTBlYzc3NzI=, ActorId: [1:7509890727313626922:2401], ActorState: ExecuteState, TraceId: 01jweaq0cvc7dv04pbzyk2kky1, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: VERIFY failed (2025-05-29T15:31:31.531326Z): assertion failed in non-unittest thread with message: assertion failed at ydb/core/kqp/ut/common/kqp_ut_common.h:375, void NKikimr::NKqp::AssertSuccessResult(const NYdb::TStatus &): (result.IsSuccess())
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 library/cpp/testing/unittest/registar.cpp:36 RaiseError(): requirement UnittestThread failed 0. /-S/util/system/yassert.cpp:83: InternalPanicImpl @ 0x15F23B95 1. /-S/util/system/yassert.cpp:55: Panic @ 0x15F1AB96 2. /tmp//-S/library/cpp/testing/unittest/registar.cpp:36: RaiseError @ 0x160BC066 3. /-S/ydb/core/kqp/ut/common/kqp_ut_common.h:375: AssertSuccessResult @ 0x15D88C82 4. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:365: CreateSampleTables @ 0x2855B9A2 5. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:581: operator() @ 0x2857CF4C 6. /-S/library/cpp/threading/future/core/future-inl.h:493: SetValue @ 0x2857CF4C 7. /-S/library/cpp/threading/future/async.h:24: operator() @ 0x2857CF4C 8. /-S/util/thread/pool.h:71: Process @ 0x2857CF4C 9. /-S/util/thread/pool.cpp:418: DoExecute @ 0x15F2B519 10. /-S/util/thread/factory.h:15: Execute @ 0x15F29F09 11. /-S/util/thread/factory.cpp:36: ThreadProc @ 0x15F29F09 12. /-S/util/system/thread.cpp:244: ThreadProxy @ 0x15F2537C 13. ??:0: ?? @ 0x7FE0FC2ACAC2 14. ??:0: ?? @ 0x7FE0FC33E84F Trying to start YDB, gRPC: 8233, MsgBus: 1153 2025-05-29T15:31:34.832746Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509890739069425799:2067];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:31:34.832769Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/001d9f/r3tmp/tmpimN3a0/pdisk_1.dat 2025-05-29T15:31:34.887558Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 8233, node 1 2025-05-29T15:31:34.902153Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:31:34.902165Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-05-29T15:31:34.902166Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-05-29T15:31:34.902202Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:1153 2025-05-29T15:31:34.934064Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:31:34.934090Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:31:34.935163Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:1153 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-29T15:31:34.963070Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:31:34.975825Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:31:34.994634Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:31:35.015905Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:31:35.027835Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:31:35.148660Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890743364394691:2403], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:31:35.148681Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:31:35.181741Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-05-29T15:31:35.188717Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-05-29T15:31:35.243677Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-05-29T15:31:35.253306Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-05-29T15:31:35.267406Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-05-29T15:31:35.281854Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-05-29T15:31:35.295358Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-05-29T15:31:35.311704Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890743364395346:2466], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:31:35.311739Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:31:35.311749Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890743364395351:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:31:35.312554Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715669:3, at schemeshard: 72057594046644480 2025-05-29T15:31:35.315231Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7509890743364395353:2470], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715669 completed, doublechecking } 2025-05-29T15:31:35.376417Z node 1 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [1:7509890743364395404:3395] txid# 281474976715670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-29T15:31:35.464190Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [1:7509890743364395420:2474], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:31:35.464285Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=1&id=NGI2ZDRhMjQtNzA1MTFmYS04NmZmN2I2My02YTUxZWZjNQ==, ActorId: [1:7509890743364394673:2401], ActorState: ExecuteState, TraceId: 01jweaq48f4tc27h829tgrdnjh, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: VERIFY failed (2025-05-29T15:31:35.464911Z): assertion failed in non-unittest thread with message: assertion failed at ydb/core/kqp/ut/common/kqp_ut_common.h:375, void NKikimr::NKqp::AssertSuccessResult(const NYdb::TStatus &): (result.IsSuccess())
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 library/cpp/testing/unittest/registar.cpp:36 RaiseError(): requirement UnittestThread failed 0. /-S/util/system/yassert.cpp:83: InternalPanicImpl @ 0x15F23B95 1. /-S/util/system/yassert.cpp:55: Panic @ 0x15F1AB96 2. /tmp//-S/library/cpp/testing/unittest/registar.cpp:36: RaiseError @ 0x160BC066 3. /-S/ydb/core/kqp/ut/common/kqp_ut_common.h:375: AssertSuccessResult @ 0x15D88C82 4. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:365: CreateSampleTables @ 0x2855B9A2 5. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:581: operator() @ 0x2857CF4C 6. /-S/library/cpp/threading/future/core/future-inl.h:493: SetValue @ 0x2857CF4C 7. /-S/library/cpp/threading/future/async.h:24: operator() @ 0x2857CF4C 8. /-S/util/thread/pool.h:71: Process @ 0x2857CF4C 9. /-S/util/thread/pool.cpp:418: DoExecute @ 0x15F2B519 10. /-S/util/thread/factory.h:15: Execute @ 0x15F29F09 11. /-S/util/thread/factory.cpp:36: ThreadProc @ 0x15F29F09 12. /-S/util/system/thread.cpp:244: ThreadProxy @ 0x15F2537C 13. ??:0: ?? @ 0x7F0C35E74AC2 14. ??:0: ?? @ 0x7F0C35F0684F ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/effects/unittest >> KqpImmediateEffects::ConflictingKeyRW1WRR2 Test command err: Trying to start YDB, gRPC: 29814, MsgBus: 22841 2025-05-29T15:31:31.141064Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509890726431797015:2071];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:31:31.141080Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/001d97/r3tmp/tmpxiEclS/pdisk_1.dat 2025-05-29T15:31:31.188783Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 29814, node 1 2025-05-29T15:31:31.203888Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:31:31.203903Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-05-29T15:31:31.203905Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-05-29T15:31:31.203946Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:22841 TClient is connected to server localhost:22841 WaitRootIsUp 'Root'... TClient::Ls request: Root 2025-05-29T15:31:31.242168Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:31:31.242190Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:31:31.243202Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-29T15:31:31.269943Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:31:31.282486Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:31:31.298038Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:31:31.318648Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:31:31.329679Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:31:31.438823Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890726431798600:2403], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:31:31.438864Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:31:31.479967Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-05-29T15:31:31.534915Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-05-29T15:31:31.543042Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-05-29T15:31:31.549578Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-05-29T15:31:31.556979Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-05-29T15:31:31.564254Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-05-29T15:31:31.578457Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-05-29T15:31:31.594142Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890726431799253:2466], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:31:31.594160Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890726431799258:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:31:31.594165Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:31:31.594858Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715669:3, at schemeshard: 72057594046644480 2025-05-29T15:31:31.598119Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7509890726431799260:2470], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715669 completed, doublechecking } 2025-05-29T15:31:31.691900Z node 1 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [1:7509890726431799311:3394] txid# 281474976715670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-29T15:31:31.765070Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [1:7509890726431799327:2474], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:31:31.765165Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=1&id=NjQ4NjI2NTctZDRiYjRiYWItN2NiYzcxZi03YzljOGE4Nw==, ActorId: [1:7509890726431798597:2401], ActorState: ExecuteState, TraceId: 01jweaq0m98a6v2tmk4m8vcjxb, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: VERIFY failed (2025-05-29T15:31:31.765721Z): assertion failed in non-unittest thread with message: assertion failed at ydb/core/kqp/ut/common/kqp_ut_common.h:375, void NKikimr::NKqp::AssertSuccessResult(const NYdb::TStatus &): (result.IsSuccess())
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 library/cpp/testing/unittest/registar.cpp:36 RaiseError(): requirement UnittestThread failed 0. /-S/util/system/yassert.cpp:83: InternalPanicImpl @ 0x15F23B95 1. /-S/util/system/yassert.cpp:55: Panic @ 0x15F1AB96 2. /tmp//-S/library/cpp/testing/unittest/registar.cpp:36: RaiseError @ 0x160BC066 3. /-S/ydb/core/kqp/ut/common/kqp_ut_common.h:375: AssertSuccessResult @ 0x15D88C82 4. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:365: CreateSampleTables @ 0x2855B9A2 5. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:581: operator() @ 0x2857CF4C 6. /-S/library/cpp/threading/future/core/future-inl.h:493: SetValue @ 0x2857CF4C 7. /-S/library/cpp/threading/future/async.h:24: operator() @ 0x2857CF4C 8. /-S/util/thread/pool.h:71: Process @ 0x2857CF4C 9. /-S/util/thread/pool.cpp:418: DoExecute @ 0x15F2B519 10. /-S/util/thread/factory.h:15: Execute @ 0x15F29F09 11. /-S/util/thread/factory.cpp:36: ThreadProc @ 0x15F29F09 12. /-S/util/system/thread.cpp:244: ThreadProxy @ 0x15F2537C 13. ??:0: ?? @ 0x7FF5F453FAC2 14. ??:0: ?? @ 0x7FF5F45D184F Trying to start YDB, gRPC: 63128, MsgBus: 12715 2025-05-29T15:31:34.926571Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509890738759665652:2072];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:31:34.926597Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/001d97/r3tmp/tmpnGVu4W/pdisk_1.dat 2025-05-29T15:31:34.981765Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 63128, node 1 2025-05-29T15:31:34.995403Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:31:34.995414Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-05-29T15:31:34.995416Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-05-29T15:31:34.995448Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:12715 2025-05-29T15:31:35.027625Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:31:35.027654Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:31:35.028756Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:12715 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-29T15:31:35.058020Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:31:35.064835Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:31:35.081330Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:31:35.103324Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:31:35.115175Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:31:35.235757Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890743054634537:2403], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:31:35.235779Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:31:35.275205Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-05-29T15:31:35.282162Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-05-29T15:31:35.295108Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-05-29T15:31:35.302041Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-05-29T15:31:35.316464Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-05-29T15:31:35.331036Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-05-29T15:31:35.344667Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-05-29T15:31:35.359839Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890743054635189:2466], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:31:35.359856Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:31:35.359899Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890743054635194:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:31:35.360595Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715669:3, at schemeshard: 72057594046644480 2025-05-29T15:31:35.364342Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7509890743054635196:2470], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715669 completed, doublechecking } 2025-05-29T15:31:35.416715Z node 1 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [1:7509890743054635247:3396] txid# 281474976715670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-29T15:31:35.515675Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [1:7509890743054635263:2474], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:31:35.515776Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=1&id=YzIzYjE4MjktODQyNjBkMGYtZDBmZjJlYTYtYTk3ZjUxNzY=, ActorId: [1:7509890743054634519:2401], ActorState: ExecuteState, TraceId: 01jweaq49z5evwzh02kd8zxj6a, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: VERIFY failed (2025-05-29T15:31:35.516486Z): assertion failed in non-unittest thread with message: assertion failed at ydb/core/kqp/ut/common/kqp_ut_common.h:375, void NKikimr::NKqp::AssertSuccessResult(const NYdb::TStatus &): (result.IsSuccess())
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 library/cpp/testing/unittest/registar.cpp:36 RaiseError(): requirement UnittestThread failed 0. /-S/util/system/yassert.cpp:83: InternalPanicImpl @ 0x15F23B95 1. /-S/util/system/yassert.cpp:55: Panic @ 0x15F1AB96 2. /tmp//-S/library/cpp/testing/unittest/registar.cpp:36: RaiseError @ 0x160BC066 3. /-S/ydb/core/kqp/ut/common/kqp_ut_common.h:375: AssertSuccessResult @ 0x15D88C82 4. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:365: CreateSampleTables @ 0x2855B9A2 5. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:581: operator() @ 0x2857CF4C 6. /-S/library/cpp/threading/future/core/future-inl.h:493: SetValue @ 0x2857CF4C 7. /-S/library/cpp/threading/future/async.h:24: operator() @ 0x2857CF4C 8. /-S/util/thread/pool.h:71: Process @ 0x2857CF4C 9. /-S/util/thread/pool.cpp:418: DoExecute @ 0x15F2B519 10. /-S/util/thread/factory.h:15: Execute @ 0x15F29F09 11. /-S/util/thread/factory.cpp:36: ThreadProc @ 0x15F29F09 12. /-S/util/system/thread.cpp:244: ThreadProxy @ 0x15F2537C 13. ??:0: ?? @ 0x7F5E581A2AC2 14. ??:0: ?? @ 0x7F5E5823484F ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_ttl/unittest >> TSchemeShardColumnTableTTL::AlterColumnTable [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:116:2058] recipient: [1:111:2141] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:116:2058] recipient: [1:111:2141] Leader for TabletID 72057594046678944 is [1:128:2152] sender: [1:132:2058] recipient: [1:111:2141] 2025-05-29T15:31:36.468819Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7488: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-05-29T15:31:36.468840Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7516: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:31:36.468844Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7402: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-05-29T15:31:36.468848Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7418: OperationsProcessing config: using default configuration 2025-05-29T15:31:36.468860Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-05-29T15:31:36.468862Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-05-29T15:31:36.468869Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7548: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:31:36.468880Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:39: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-05-29T15:31:36.468968Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7619: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-05-29T15:31:36.469033Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-05-29T15:31:36.477693Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7309: Cannot subscribe to console configs 2025-05-29T15:31:36.477715Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:31:36.479859Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-05-29T15:31:36.479951Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-05-29T15:31:36.479989Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-05-29T15:31:36.481715Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-05-29T15:31:36.481882Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:445: Clear TempDirsState with owners number: 0 2025-05-29T15:31:36.481964Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1348: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-05-29T15:31:36.482012Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:32: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-05-29T15:31:36.482413Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:157: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:31:36.482457Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:84: [RootDataErasureManager] Stop 2025-05-29T15:31:36.482664Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:31:36.482671Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:31:36.482688Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-05-29T15:31:36.482693Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-29T15:31:36.482698Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:202: TTxServerlessStorageBilling.Complete 2025-05-29T15:31:36.482721Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6671: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-05-29T15:31:36.483820Z node 1 :HIVE INFO: tablet_helpers.cpp:1130: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:128:2152] sender: [1:242:2058] recipient: [1:15:2062] 2025-05-29T15:31:36.496244Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:372: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-05-29T15:31:36.496328Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:31:36.496389Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-05-29T15:31:36.496425Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:130: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-05-29T15:31:36.496432Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:31:36.497133Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:451: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-05-29T15:31:36.497156Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-05-29T15:31:36.497216Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:31:36.497223Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:313: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-05-29T15:31:36.497228Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:367: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-05-29T15:31:36.497232Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 2 -> 3 2025-05-29T15:31:36.497585Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:31:36.497593Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-05-29T15:31:36.497598Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 3 -> 128 2025-05-29T15:31:36.497889Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:31:36.497897Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:31:36.497902Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:31:36.497907Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1650: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-05-29T15:31:36.498359Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1719: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-05-29T15:31:36.498702Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:652: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-05-29T15:31:36.498750Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1751: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-05-29T15:31:36.498908Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:676: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-05-29T15:31:36.498927Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:680: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 124 RawX2: 4294969445 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-05-29T15:31:36.498933Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:31:36.498984Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 128 -> 240 2025-05-29T15:31:36.498989Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:31:36.499013Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-05-29T15:31:36.499022Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:402: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-05-29T15:31:36.499357Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:31:36.499364Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-29T15:31:36.499403Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29 ... 057594046678944 2025-05-29T15:31:38.460816Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:647: TTxOperationReply complete, operationId: 103:0, at schemeshard: 72057594046678944 2025-05-29T15:31:38.460827Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:647: TTxOperationReply complete, operationId: 103:0, at schemeshard: 72057594046678944 2025-05-29T15:31:38.460843Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:647: TTxOperationReply complete, operationId: 103:0, at schemeshard: 72057594046678944 2025-05-29T15:31:38.460854Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:647: TTxOperationReply complete, operationId: 103:0, at schemeshard: 72057594046678944 2025-05-29T15:31:38.460866Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:647: TTxOperationReply complete, operationId: 103:0, at schemeshard: 72057594046678944 2025-05-29T15:31:38.462000Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:647: TTxOperationReply complete, operationId: 103:0, at schemeshard: 72057594046678944 2025-05-29T15:31:38.462024Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:647: TTxOperationReply complete, operationId: 103:0, at schemeshard: 72057594046678944 2025-05-29T15:31:38.462039Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:647: TTxOperationReply complete, operationId: 103:0, at schemeshard: 72057594046678944 2025-05-29T15:31:38.462068Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 103:0, at schemeshard: 72057594046678944 2025-05-29T15:31:38.462077Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:482: [72057594046678944] TDone opId# 103:0 ProgressState 2025-05-29T15:31:38.462094Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:906: Part operation is done id#103:0 progress is 1/1 2025-05-29T15:31:38.462097Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2025-05-29T15:31:38.462102Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:906: Part operation is done id#103:0 progress is 1/1 2025-05-29T15:31:38.462104Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2025-05-29T15:31:38.462108Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1606: TOperation IsReadyToNotify, TxId: 103, ready parts: 1/1, is published: true 2025-05-29T15:31:38.462126Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1629: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:2802:4003] message: TxId: 103 2025-05-29T15:31:38.462134Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2025-05-29T15:31:38.462147Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:973: Operation and all the parts is done, operation id: 103:0 2025-05-29T15:31:38.462153Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5174: RemoveTx for txid 103:0 2025-05-29T15:31:38.462400Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 66 2025-05-29T15:31:38.463034Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:227: tests -- TTxNotificationSubscriber for txId 103: got EvNotifyTxCompletionResult 2025-05-29T15:31:38.463045Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:236: tests -- TTxNotificationSubscriber for txId 103: satisfy waiter [1:4020:5151] TestWaitNotification: OK eventTxId 103 2025-05-29T15:31:38.463166Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/TTLEnabledTable" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-05-29T15:31:38.463224Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/TTLEnabledTable" took 65us result status StatusSuccess 2025-05-29T15:31:38.463329Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/TTLEnabledTable" PathDescription { Self { Name: "TTLEnabledTable" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeColumnTable CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 11 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 11 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 3 ColumnTableVersion: 3 ColumnTableSchemaVersion: 1 ColumnTableTtlSettingsVersion: 3 } ChildrenExist: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 0 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 64 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } ColumnTableDescription { Name: "TTLEnabledTable" Schema { Columns { Id: 1 Name: "key" Type: "Uint64" TypeId: 4 NotNull: true StorageId: "" DefaultValue { } ColumnFamilyId: 0 } Columns { Id: 2 Name: "modified_at" Type: "Timestamp" TypeId: 50 NotNull: true StorageId: "" DefaultValue { } ColumnFamilyId: 0 } Columns { Id: 3 Name: "saved_at" Type: "Datetime" TypeId: 49 NotNull: false StorageId: "" DefaultValue { } ColumnFamilyId: 0 } Columns { Id: 4 Name: "data" Type: "Utf8" TypeId: 4608 NotNull: false StorageId: "" DefaultValue { } ColumnFamilyId: 0 } KeyColumnNames: "modified_at" NextColumnId: 5 Version: 1 Options { SchemeNeedActualization: false } ColumnFamilies { Id: 0 Name: "default" } NextColumnFamilyId: 1 } TtlSettings { Disabled { } Version: 3 } ColumnShardCount: 64 Sharding { ColumnShards: 72075186233409546 ColumnShards: 72075186233409547 ColumnShards: 72075186233409548 ColumnShards: 72075186233409549 ColumnShards: 72075186233409550 ColumnShards: 72075186233409551 ColumnShards: 72075186233409552 ColumnShards: 72075186233409553 ColumnShards: 72075186233409554 ColumnShards: 72075186233409555 ColumnShards: 72075186233409556 ColumnShards: 72075186233409557 ColumnShards: 72075186233409558 ColumnShards: 72075186233409559 ColumnShards: 72075186233409560 ColumnShards: 72075186233409561 ColumnShards: 72075186233409562 ColumnShards: 72075186233409563 ColumnShards: 72075186233409564 ColumnShards: 72075186233409565 ColumnShards: 72075186233409566 ColumnShards: 72075186233409567 ColumnShards: 72075186233409568 ColumnShards: 72075186233409569 ColumnShards: 72075186233409570 ColumnShards: 72075186233409571 ColumnShards: 72075186233409572 ColumnShards: 72075186233409573 ColumnShards: 72075186233409574 ColumnShards: 72075186233409575 ColumnShards: 72075186233409576 ColumnShards: 72075186233409577 ColumnShards: 72075186233409578 ColumnShards: 72075186233409579 ColumnShards: 72075186233409580 ColumnShards: 72075186233409581 ColumnShards: 72075186233409582 ColumnShards: 72075186233409583 ColumnShards: 72075186233409584 ColumnShards: 72075186233409585 ColumnShards: 72075186233409586 ColumnShards: 72075186233409587 ColumnShards: 72075186233409588 ColumnShards: 72075186233409589 ColumnShards: 72075186233409590 ColumnShards: 72075186233409591 ColumnShards: 72075186233409592 ColumnShards: 72075186233409593 ColumnShards: 72075186233409594 ColumnShards: 72075186233409595 ColumnShards: 72075186233409596 ColumnShards: 72075186233409597 ColumnShards: 72075186233409598 ColumnShards: 72075186233409599 ColumnShards: 72075186233409600 ColumnShards: 72075186233409601 ColumnShards: 72075186233409602 ColumnShards: 72075186233409603 ColumnShards: 72075186233409604 ColumnShards: 72075186233409605 ColumnShards: 72075186233409606 ColumnShards: 72075186233409607 ColumnShards: 72075186233409608 ColumnShards: 72075186233409609 HashSharding { Function: HASH_FUNCTION_CONSISTENCY_64 Columns: "modified_at" } } StorageConfig { DataChannelCount: 64 } } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 TestModificationResults wait txId: 104 2025-05-29T15:31:38.463864Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:372: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpAlterColumnTable AlterColumnTable { Name: "TTLEnabledTable" AlterSchema { AlterColumns { Name: "data" DefaultValue: "10" } } } } TxId: 104 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-05-29T15:31:38.463898Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: alter_table.cpp:282: TAlterColumnTable Propose, path: /MyRoot/TTLEnabledTable, opId: 104:0, at schemeshard: 72057594046678944 2025-05-29T15:31:38.469131Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:130: IgniteOperation, opId: 104:1, propose status:StatusSchemeError, reason: schema update error: sparsed columns are disabled, at schemeshard: 72057594046678944 2025-05-29T15:31:38.470722Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:451: TTxOperationPropose Complete, txId: 104, response: Status: StatusSchemeError Reason: "schema update error: sparsed columns are disabled" TxId: 104 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2025-05-29T15:31:38.470772Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 104, database: /MyRoot, subject: , status: StatusSchemeError, reason: schema update error: sparsed columns are disabled, operation: ALTER COLUMN TABLE, path: /MyRoot/TTLEnabledTable TestModificationResult got TxId: 104, wait until txId: 104 TestWaitNotification wait txId: 104 2025-05-29T15:31:38.470856Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:210: tests -- TTxNotificationSubscriber for txId 104: send EvNotifyTxCompletion 2025-05-29T15:31:38.470861Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:256: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 104 2025-05-29T15:31:38.470954Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 104, at schemeshard: 72057594046678944 2025-05-29T15:31:38.470975Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:227: tests -- TTxNotificationSubscriber for txId 104: got EvNotifyTxCompletionResult 2025-05-29T15:31:38.470978Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:236: tests -- TTxNotificationSubscriber for txId 104: satisfy waiter [1:4358:5489] TestWaitNotification: OK eventTxId 104 >> TColumnShardTestReadWrite::WriteReadNoCompression ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/batch_operations/unittest >> KqpBatchUpdate::MultiStatement Test command err: Trying to start YDB, gRPC: 20318, MsgBus: 30227 2025-05-29T15:31:35.307317Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509890741174739389:2062];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:31:35.307523Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/00278c/r3tmp/tmpegHn5y/pdisk_1.dat 2025-05-29T15:31:35.370056Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:31:35.370446Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [1:7509890741174739368:2079] 1748532695307111 != 1748532695307114 TServer::EnableGrpc on GrpcPort 20318, node 1 2025-05-29T15:31:35.385169Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:31:35.385182Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-05-29T15:31:35.385184Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-05-29T15:31:35.385223Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:30227 TClient is connected to server localhost:30227 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-29T15:31:35.448821Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:31:35.448861Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:31:35.449850Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-29T15:31:35.449945Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:31:35.454086Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:31:35.516226Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:31:35.537707Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:31:35.547930Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:31:35.666488Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890741174741002:2403], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:31:35.666508Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:31:35.699227Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-05-29T15:31:35.707773Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-05-29T15:31:35.762754Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-05-29T15:31:35.771328Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-05-29T15:31:35.785517Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-05-29T15:31:35.799684Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-05-29T15:31:35.813873Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-05-29T15:31:35.829547Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890741174741655:2466], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:31:35.829561Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890741174741660:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:31:35.829571Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:31:35.830412Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715669:3, at schemeshard: 72057594046644480 2025-05-29T15:31:35.833592Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7509890741174741662:2470], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715669 completed, doublechecking } 2025-05-29T15:31:35.925402Z node 1 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [1:7509890741174741713:3397] txid# 281474976715670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-29T15:31:36.009704Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [1:7509890741174741729:2474], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:31:36.009807Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=1&id=NGFiMjE0NmEtM2Y1OThmZDQtZWNlMjlkMDItN2ExYjYyNTg=, ActorId: [1:7509890741174740984:2401], ActorState: ExecuteState, TraceId: 01jweaq4rn8atp6f6zcsk3yfr5, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: VERIFY failed (2025-05-29T15:31:36.010537Z): assertion failed in non-unittest thread with message: assertion failed at ydb/core/kqp/ut/common/kqp_ut_common.h:375, void NKikimr::NKqp::AssertSuccessResult(const NYdb::TStatus &): (result.IsSuccess())
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 library/cpp/testing/unittest/registar.cpp:36 RaiseError(): requirement UnittestThread failed 0. /-S/util/system/yassert.cpp:83: InternalPanicImpl @ 0x13A9DC85 1. /-S/util/system/yassert.cpp:55: Panic @ 0x13A94C86 2. /tmp//-S/library/cpp/testing/unittest/registar.cpp:36: RaiseError @ 0x13C36A16 3. /-S/ydb/core/kqp/ut/common/kqp_ut_common.h:375: AssertSuccessResult @ 0x260D85F2 4. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:365: CreateSampleTables @ 0x260D7EF2 5. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:581: operator() @ 0x260F9ACC 6. /-S/library/cpp/threading/future/core/future-inl.h:493: SetValue @ 0x260F9ACC 7. /-S/library/cpp/threading/future/async.h:24: operator() @ 0x260F9ACC 8. /-S/util/thread/pool.h:71: Process @ 0x260F9ACC 9. /-S/util/thread/pool.cpp:418: DoExecute @ 0x13AA5609 10. /-S/util/thread/factory.h:15: Execute @ 0x13AA3FF9 11. /-S/util/thread/factory.cpp:36: ThreadProc @ 0x13AA3FF9 12. /-S/util/system/thread.cpp:244: ThreadProxy @ 0x13A9F46C 13. ??:0: ?? @ 0x7F7AB2ABCAC2 14. ??:0: ?? @ 0x7F7AB2B4E84F |75.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_ttl/unittest ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/batch_operations/unittest >> KqpBatchDelete::Large_3 Test command err: Trying to start YDB, gRPC: 24523, MsgBus: 16262 2025-05-29T15:31:35.435888Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509890742100348505:2063];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:31:35.435954Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/00278b/r3tmp/tmpobaUor/pdisk_1.dat 2025-05-29T15:31:35.494510Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [1:7509890742100348483:2079] 1748532695435706 != 1748532695435709 2025-05-29T15:31:35.495852Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 24523, node 1 2025-05-29T15:31:35.508046Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:31:35.508055Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-05-29T15:31:35.508056Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-05-29T15:31:35.508090Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:16262 2025-05-29T15:31:35.538463Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:31:35.538482Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:31:35.539499Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:16262 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-29T15:31:35.572397Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:31:35.579563Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:31:35.641728Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:31:35.661026Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:31:35.670844Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:31:35.719853Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890742100350114:2403], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:31:35.719884Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:31:35.749638Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-05-29T15:31:35.757016Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-05-29T15:31:35.764141Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-05-29T15:31:35.778432Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-05-29T15:31:35.834003Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-05-29T15:31:35.840775Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-05-29T15:31:35.848736Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-05-29T15:31:35.864009Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890742100350769:2466], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:31:35.864028Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:31:35.864043Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890742100350774:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:31:35.864781Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715669:3, at schemeshard: 72057594046644480 2025-05-29T15:31:35.868509Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7509890742100350776:2470], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715669 completed, doublechecking } 2025-05-29T15:31:35.959430Z node 1 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [1:7509890742100350827:3394] txid# 281474976715670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-29T15:31:36.034785Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [1:7509890742100350843:2474], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:31:36.034917Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=1&id=ZGM4MzM0MWQtMTNjMjg4MTUtZjEwMmE4MGUtODc1Zjg5YzY=, ActorId: [1:7509890742100350097:2401], ActorState: ExecuteState, TraceId: 01jweaq4sqdx16c1qzsdqwv8bb, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: VERIFY failed (2025-05-29T15:31:36.035627Z): assertion failed in non-unittest thread with message: assertion failed at ydb/core/kqp/ut/common/kqp_ut_common.h:375, void NKikimr::NKqp::AssertSuccessResult(const NYdb::TStatus &): (result.IsSuccess())
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 library/cpp/testing/unittest/registar.cpp:36 RaiseError(): requirement UnittestThread failed 0. /-S/util/system/yassert.cpp:83: InternalPanicImpl @ 0x13A9DC85 1. /-S/util/system/yassert.cpp:55: Panic @ 0x13A94C86 2. /tmp//-S/library/cpp/testing/unittest/registar.cpp:36: RaiseError @ 0x13C36A16 3. /-S/ydb/core/kqp/ut/common/kqp_ut_common.h:375: AssertSuccessResult @ 0x260D85F2 4. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:365: CreateSampleTables @ 0x260D7EF2 5. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:581: operator() @ 0x260F9ACC 6. /-S/library/cpp/threading/future/core/future-inl.h:493: SetValue @ 0x260F9ACC 7. /-S/library/cpp/threading/future/async.h:24: operator() @ 0x260F9ACC 8. /-S/util/thread/pool.h:71: Process @ 0x260F9ACC 9. /-S/util/thread/pool.cpp:418: DoExecute @ 0x13AA5609 10. /-S/util/thread/factory.h:15: Execute @ 0x13AA3FF9 11. /-S/util/thread/factory.cpp:36: ThreadProc @ 0x13AA3FF9 12. /-S/util/system/thread.cpp:244: ThreadProxy @ 0x13A9F46C 13. ??:0: ?? @ 0x7FCB4FF05AC2 14. ??:0: ?? @ 0x7FCB4FF9784F ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/effects/unittest >> KqpImmediateEffects::UnobservedUncommittedChangeConflict Test command err: Trying to start YDB, gRPC: 16778, MsgBus: 29118 2025-05-29T15:31:31.199620Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509890725205519950:2071];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:31:31.199643Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/001d96/r3tmp/tmpR40iha/pdisk_1.dat 2025-05-29T15:31:31.260801Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 16778, node 1 2025-05-29T15:31:31.271749Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:31:31.271761Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-05-29T15:31:31.271763Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-05-29T15:31:31.271802Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:29118 2025-05-29T15:31:31.300644Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:31:31.300666Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:31:31.301716Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:29118 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-29T15:31:31.318572Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:31:31.329635Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:31:31.394560Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:31:31.453586Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:31:31.463883Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:31:31.494042Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890725205521557:2403], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:31:31.494070Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:31:31.538858Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-05-29T15:31:31.545536Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-05-29T15:31:31.557565Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-05-29T15:31:31.571343Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-05-29T15:31:31.585370Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-05-29T15:31:31.599709Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-05-29T15:31:31.613755Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-05-29T15:31:31.629331Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890725205522210:2466], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:31:31.629355Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890725205522215:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:31:31.629359Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:31:31.630044Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710669:3, at schemeshard: 72057594046644480 2025-05-29T15:31:31.633243Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7509890725205522217:2470], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710669 completed, doublechecking } 2025-05-29T15:31:31.710605Z node 1 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [1:7509890725205522268:3397] txid# 281474976710670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-29T15:31:31.799304Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [1:7509890725205522284:2474], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:31:31.799432Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=1&id=YTZhNGYwNzItZDllMzdlZi02YTRjZWIwYS1lMTI1ZGQ5Ng==, ActorId: [1:7509890725205521554:2401], ActorState: ExecuteState, TraceId: 01jweaq0nd70p97jxzpthp3e45, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: VERIFY failed (2025-05-29T15:31:31.800126Z): assertion failed in non-unittest thread with message: assertion failed at ydb/core/kqp/ut/common/kqp_ut_common.h:375, void NKikimr::NKqp::AssertSuccessResult(const NYdb::TStatus &): (result.IsSuccess())
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 library/cpp/testing/unittest/registar.cpp:36 RaiseError(): requirement UnittestThread failed 0. /-S/util/system/yassert.cpp:83: InternalPanicImpl @ 0x15F23B95 1. /-S/util/system/yassert.cpp:55: Panic @ 0x15F1AB96 2. /tmp//-S/library/cpp/testing/unittest/registar.cpp:36: RaiseError @ 0x160BC066 3. /-S/ydb/core/kqp/ut/common/kqp_ut_common.h:375: AssertSuccessResult @ 0x15D88C82 4. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:365: CreateSampleTables @ 0x2855B9A2 5. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:581: operator() @ 0x2857CF4C 6. /-S/library/cpp/threading/future/core/future-inl.h:493: SetValue @ 0x2857CF4C 7. /-S/library/cpp/threading/future/async.h:24: operator() @ 0x2857CF4C 8. /-S/util/thread/pool.h:71: Process @ 0x2857CF4C 9. /-S/util/thread/pool.cpp:418: DoExecute @ 0x15F2B519 10. /-S/util/thread/factory.h:15: Execute @ 0x15F29F09 11. /-S/util/thread/factory.cpp:36: ThreadProc @ 0x15F29F09 12. /-S/util/system/thread.cpp:244: ThreadProxy @ 0x15F2537C 13. ??:0: ?? @ 0x7F086A57AAC2 14. ??:0: ?? @ 0x7F086A60C84F Trying to start YDB, gRPC: 10274, MsgBus: 9357 2025-05-29T15:31:35.159725Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509890744071489243:2218];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:31:35.159841Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/001d96/r3tmp/tmpjyFFk6/pdisk_1.dat 2025-05-29T15:31:35.212115Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 10274, node 1 2025-05-29T15:31:35.228435Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:31:35.228451Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-05-29T15:31:35.228453Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-05-29T15:31:35.228504Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:9357 2025-05-29T15:31:35.260296Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:31:35.260326Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:31:35.261400Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:9357 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-29T15:31:35.293971Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:31:35.300395Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:31:35.318209Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:31:35.337613Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:31:35.349563Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:31:35.469143Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890744071490677:2403], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:31:35.469177Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:31:35.508996Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-05-29T15:31:35.516604Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-05-29T15:31:35.526171Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-05-29T15:31:35.533071Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-05-29T15:31:35.587521Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-05-29T15:31:35.595978Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-05-29T15:31:35.602684Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-05-29T15:31:35.619145Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890744071491332:2466], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:31:35.619170Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:31:35.619183Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890744071491337:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:31:35.619689Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715669:3, at schemeshard: 72057594046644480 2025-05-29T15:31:35.622775Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7509890744071491339:2470], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715669 completed, doublechecking } 2025-05-29T15:31:35.687389Z node 1 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [1:7509890744071491390:3397] txid# 281474976715670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-29T15:31:35.761649Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [1:7509890744071491406:2474], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:31:35.761756Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=1&id=MjIwYjhlZTgtNWZkYzQ4Y2YtZDI4ZTNiNWQtM2UxZmQxOWU=, ActorId: [1:7509890744071490674:2401], ActorState: ExecuteState, TraceId: 01jweaq4j29x2sxt6ht3jap5mv, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: VERIFY failed (2025-05-29T15:31:35.762509Z): assertion failed in non-unittest thread with message: assertion failed at ydb/core/kqp/ut/common/kqp_ut_common.h:375, void NKikimr::NKqp::AssertSuccessResult(const NYdb::TStatus &): (result.IsSuccess())
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 library/cpp/testing/unittest/registar.cpp:36 RaiseError(): requirement UnittestThread failed 0. /-S/util/system/yassert.cpp:83: InternalPanicImpl @ 0x15F23B95 1. /-S/util/system/yassert.cpp:55: Panic @ 0x15F1AB96 2. /tmp//-S/library/cpp/testing/unittest/registar.cpp:36: RaiseError @ 0x160BC066 3. /-S/ydb/core/kqp/ut/common/kqp_ut_common.h:375: AssertSuccessResult @ 0x15D88C82 4. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:365: CreateSampleTables @ 0x2855B9A2 5. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:581: operator() @ 0x2857CF4C 6. /-S/library/cpp/threading/future/core/future-inl.h:493: SetValue @ 0x2857CF4C 7. /-S/library/cpp/threading/future/async.h:24: operator() @ 0x2857CF4C 8. /-S/util/thread/pool.h:71: Process @ 0x2857CF4C 9. /-S/util/thread/pool.cpp:418: DoExecute @ 0x15F2B519 10. /-S/util/thread/factory.h:15: Execute @ 0x15F29F09 11. /-S/util/thread/factory.cpp:36: ThreadProc @ 0x15F29F09 12. /-S/util/system/thread.cpp:244: ThreadProxy @ 0x15F2537C 13. ??:0: ?? @ 0x7F883CFA4AC2 14. ??:0: ?? @ 0x7F883D03684F |75.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/batch_operations/unittest |75.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/statistics/service/ut/unittest ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/effects/unittest >> KqpEffects::InsertAbort_Params_Conflict-UseSink Test command err: Trying to start YDB, gRPC: 5157, MsgBus: 62536 2025-05-29T15:31:27.522456Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509890709072725693:2071];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:31:27.522476Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/001daa/r3tmp/tmpMHmtGN/pdisk_1.dat 2025-05-29T15:31:27.576235Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 5157, node 1 2025-05-29T15:31:27.585714Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:31:27.585724Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-05-29T15:31:27.585725Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-05-29T15:31:27.585774Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:62536 2025-05-29T15:31:27.623516Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:31:27.623544Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting TClient is connected to server localhost:62536 2025-05-29T15:31:27.624592Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-29T15:31:27.652938Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:31:27.656263Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:31:27.717440Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:31:27.734251Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:31:27.745629Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:31:27.833490Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890709072727285:2403], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:31:27.833517Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:31:27.868886Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-05-29T15:31:27.876531Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-05-29T15:31:27.889649Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-05-29T15:31:27.903389Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-05-29T15:31:27.917285Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-05-29T15:31:27.931738Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-05-29T15:31:27.945644Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-05-29T15:31:27.961946Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890709072727942:2466], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:31:27.961977Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:31:27.961983Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890709072727947:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:31:27.962656Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715669:3, at schemeshard: 72057594046644480 2025-05-29T15:31:27.965158Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7509890709072727949:2470], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715669 completed, doublechecking } 2025-05-29T15:31:28.065798Z node 1 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [1:7509890713367695296:3396] txid# 281474976715670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-29T15:31:28.187094Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [1:7509890713367695312:2474], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:31:28.187233Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=1&id=MmQ1MTdhNGQtNTNhM2Y1ZWMtZDg4YjZhYzctM2Y4MGMwY2E=, ActorId: [1:7509890709072727267:2401], ActorState: ExecuteState, TraceId: 01jweapx2s58frkh1ywk5x7sqm, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: VERIFY failed (2025-05-29T15:31:28.187954Z): assertion failed in non-unittest thread with message: assertion failed at ydb/core/kqp/ut/common/kqp_ut_common.h:375, void NKikimr::NKqp::AssertSuccessResult(const NYdb::TStatus &): (result.IsSuccess())
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 library/cpp/testing/unittest/registar.cpp:36 RaiseError(): requirement UnittestThread failed 0. /-S/util/system/yassert.cpp:83: InternalPanicImpl @ 0x15F23B95 1. /-S/util/system/yassert.cpp:55: Panic @ 0x15F1AB96 2. /tmp//-S/library/cpp/testing/unittest/registar.cpp:36: RaiseError @ 0x160BC066 3. /-S/ydb/core/kqp/ut/common/kqp_ut_common.h:375: AssertSuccessResult @ 0x15D88C82 4. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:365: CreateSampleTables @ 0x2855B9A2 5. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:581: operator() @ 0x2857CF4C 6. /-S/library/cpp/threading/future/core/future-inl.h:493: SetValue @ 0x2857CF4C 7. /-S/library/cpp/threading/future/async.h:24: operator() @ 0x2857CF4C 8. /-S/util/thread/pool.h:71: Process @ 0x2857CF4C 9. /-S/util/thread/pool.cpp:418: DoExecute @ 0x15F2B519 10. /-S/util/thread/factory.h:15: Execute @ 0x15F29F09 11. /-S/util/thread/factory.cpp:36: ThreadProc @ 0x15F29F09 12. /-S/util/system/thread.cpp:244: ThreadProxy @ 0x15F2537C 13. ??:0: ?? @ 0x7F9955877AC2 14. ??:0: ?? @ 0x7F995590984F Trying to start YDB, gRPC: 8724, MsgBus: 65012 2025-05-29T15:31:31.361939Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509890727977687269:2072];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:31:31.361962Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/001daa/r3tmp/tmpTj65NK/pdisk_1.dat 2025-05-29T15:31:31.408544Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 8724, node 1 2025-05-29T15:31:31.420899Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:31:31.420911Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-05-29T15:31:31.420913Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 202 ... @ 0x15F23B95 1. /-S/util/system/yassert.cpp:55: Panic @ 0x15F1AB96 2. /tmp//-S/library/cpp/testing/unittest/registar.cpp:36: RaiseError @ 0x160BC066 3. /-S/ydb/core/kqp/ut/common/kqp_ut_common.h:375: AssertSuccessResult @ 0x15D88C82 4. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:365: CreateSampleTables @ 0x2855B9A2 5. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:581: operator() @ 0x2857CF4C 6. /-S/library/cpp/threading/future/core/future-inl.h:493: SetValue @ 0x2857CF4C 7. /-S/library/cpp/threading/future/async.h:24: operator() @ 0x2857CF4C 8. /-S/util/thread/pool.h:71: Process @ 0x2857CF4C 9. /-S/util/thread/pool.cpp:418: DoExecute @ 0x15F2B519 10. /-S/util/thread/factory.h:15: Execute @ 0x15F29F09 11. /-S/util/thread/factory.cpp:36: ThreadProc @ 0x15F29F09 12. /-S/util/system/thread.cpp:244: ThreadProxy @ 0x15F2537C 13. ??:0: ?? @ 0x7F12F0D76AC2 14. ??:0: ?? @ 0x7F12F0E0884F Trying to start YDB, gRPC: 2916, MsgBus: 1789 2025-05-29T15:31:35.335774Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509890741022635722:2065];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:31:35.335887Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/001daa/r3tmp/tmpQNshgx/pdisk_1.dat TServer::EnableGrpc on GrpcPort 2916, node 1 2025-05-29T15:31:35.396168Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:31:35.396259Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [1:7509890741022635688:2079] 1748532695335640 != 1748532695335643 2025-05-29T15:31:35.401788Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:31:35.401800Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-05-29T15:31:35.401802Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-05-29T15:31:35.401849Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:1789 2025-05-29T15:31:35.437397Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:31:35.437440Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting TClient is connected to server localhost:1789 2025-05-29T15:31:35.438400Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-29T15:31:35.466208Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:31:35.470674Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:31:35.486937Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:31:35.545603Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:31:35.557426Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:31:35.658156Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890741022637329:2403], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:31:35.658180Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:31:35.699593Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-05-29T15:31:35.707302Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-05-29T15:31:35.762317Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-05-29T15:31:35.771518Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-05-29T15:31:35.785751Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-05-29T15:31:35.800211Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-05-29T15:31:35.813912Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-05-29T15:31:35.829656Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890741022637983:2466], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:31:35.829680Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:31:35.829690Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890741022637988:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:31:35.830363Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715669:3, at schemeshard: 72057594046644480 2025-05-29T15:31:35.833342Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7509890741022637990:2470], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715669 completed, doublechecking } 2025-05-29T15:31:35.900804Z node 1 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [1:7509890741022638041:3397] txid# 281474976715670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-29T15:31:36.024571Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [1:7509890741022638057:2474], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:31:36.024671Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=1&id=NDVhYTBmMzEtZjY4NGI3MC1jNzIzOTRjYi1hZThlNGMwNA==, ActorId: [1:7509890741022637326:2401], ActorState: ExecuteState, TraceId: 01jweaq4rn61r8dtzqb34r3tg2, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: VERIFY failed (2025-05-29T15:31:36.025359Z): assertion failed in non-unittest thread with message: assertion failed at ydb/core/kqp/ut/common/kqp_ut_common.h:375, void NKikimr::NKqp::AssertSuccessResult(const NYdb::TStatus &): (result.IsSuccess())
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 library/cpp/testing/unittest/registar.cpp:36 RaiseError(): requirement UnittestThread failed 0. /-S/util/system/yassert.cpp:83: InternalPanicImpl @ 0x15F23B95 1. /-S/util/system/yassert.cpp:55: Panic @ 0x15F1AB96 2. /tmp//-S/library/cpp/testing/unittest/registar.cpp:36: RaiseError @ 0x160BC066 3. /-S/ydb/core/kqp/ut/common/kqp_ut_common.h:375: AssertSuccessResult @ 0x15D88C82 4. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:365: CreateSampleTables @ 0x2855B9A2 5. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:581: operator() @ 0x2857CF4C 6. /-S/library/cpp/threading/future/core/future-inl.h:493: SetValue @ 0x2857CF4C 7. /-S/library/cpp/threading/future/async.h:24: operator() @ 0x2857CF4C 8. /-S/util/thread/pool.h:71: Process @ 0x2857CF4C 9. /-S/util/thread/pool.cpp:418: DoExecute @ 0x15F2B519 10. /-S/util/thread/factory.h:15: Execute @ 0x15F29F09 11. /-S/util/thread/factory.cpp:36: ThreadProc @ 0x15F29F09 12. /-S/util/system/thread.cpp:244: ThreadProxy @ 0x15F2537C 13. ??:0: ?? @ 0x7F71F28DBAC2 14. ??:0: ?? @ 0x7F71F296D84F >> KqpOlapAggregations::Aggregation_Avg_Null [GOOD] >> KqpOlapAggregations::Aggregation_Avg_NullGroupBy >> TConsistentOpsWithReboots::CopyWithData |75.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_reboots/unittest >> BasicStatistics::TwoNodes |75.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_reboots/unittest |75.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/batch_operations/unittest >> IntermediateDirsReboots::CreateTableWithIntermediateDirs >> BasicStatistics::Serverless >> BasicStatistics::TwoTables [FAIL] |75.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_reboots/unittest |75.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_reboots/unittest |75.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/statistics/service/ut/unittest >> BasicStatistics::TwoServerlessDbs [FAIL] |75.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_reboots/unittest ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/fq/ut_integration/unittest >> Yq_1::DeleteQuery [FAIL] Test command err: 2025-05-29T15:31:33.794579Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509890733118290185:2075];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:31:33.794915Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; E0529 15:31:33.836413493 59865 dns_resolver_ares.cc:452] no server name supplied in dns URI E0529 15:31:33.836460884 59865 channel.cc:120] channel stack builder failed: UNKNOWN: the target uri is not valid: dns:/// test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/0008ae/r3tmp/tmpzMjNYK/pdisk_1.dat 2025-05-29T15:31:34.134162Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:31:34.134193Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:31:34.136008Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 3971, node 1 TClient is connected to server localhost:27214 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-29T15:31:34.206413Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:31:34.206835Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:31:34.206845Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-05-29T15:31:34.206848Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-05-29T15:31:34.206906Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-05-29T15:31:34.449515Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:31:34.840251Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: schema.cpp:293: Create table "Root/yq/quotas". Create session OK 2025-05-29T15:31:34.840272Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: schema.cpp:113: Call create table "Root/yq/quotas" 2025-05-29T15:31:34.840274Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: schema.cpp:354: Call create table "Root/yq/quotas" 2025-05-29T15:31:34.840311Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: schema.cpp:293: Create table "Root/yq/connections". Create session OK 2025-05-29T15:31:34.840328Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: schema.cpp:113: Call create table "Root/yq/connections" 2025-05-29T15:31:34.840330Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: schema.cpp:354: Call create table "Root/yq/connections" 2025-05-29T15:31:34.840683Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: schema.cpp:293: Create table "Root/yq/jobs". Create session OK 2025-05-29T15:31:34.840696Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: schema.cpp:113: Call create table "Root/yq/jobs" 2025-05-29T15:31:34.840697Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: schema.cpp:354: Call create table "Root/yq/jobs" 2025-05-29T15:31:34.840701Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: schema.cpp:293: Create table "Root/yq/nodes". Create session OK 2025-05-29T15:31:34.840704Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: schema.cpp:113: Call create table "Root/yq/nodes" 2025-05-29T15:31:34.840706Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: schema.cpp:354: Call create table "Root/yq/nodes" 2025-05-29T15:31:34.840825Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: schema.cpp:293: Create table "Root/yq/tenants". Create session OK 2025-05-29T15:31:34.840835Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: schema.cpp:113: Call create table "Root/yq/tenants" 2025-05-29T15:31:34.840836Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: schema.cpp:293: Create table "Root/yq/compute_databases". Create session OK 2025-05-29T15:31:34.840837Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: schema.cpp:354: Call create table "Root/yq/tenants" 2025-05-29T15:31:34.840838Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: schema.cpp:113: Call create table "Root/yq/compute_databases" 2025-05-29T15:31:34.840839Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: schema.cpp:354: Call create table "Root/yq/compute_databases" 2025-05-29T15:31:34.840895Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: schema.cpp:293: Create table "Root/yq/bindings". Create session OK 2025-05-29T15:31:34.840902Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: schema.cpp:113: Call create table "Root/yq/bindings" 2025-05-29T15:31:34.840904Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: schema.cpp:354: Call create table "Root/yq/bindings" 2025-05-29T15:31:34.840914Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: schema.cpp:293: Create table "Root/yq/result_sets". Create session OK 2025-05-29T15:31:34.840916Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: schema.cpp:113: Call create table "Root/yq/result_sets" 2025-05-29T15:31:34.840917Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: schema.cpp:354: Call create table "Root/yq/result_sets" 2025-05-29T15:31:34.840957Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: schema.cpp:293: Create table "Root/yq/idempotency_keys". Create session OK 2025-05-29T15:31:34.840965Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: schema.cpp:113: Call create table "Root/yq/idempotency_keys" 2025-05-29T15:31:34.840966Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: schema.cpp:354: Call create table "Root/yq/idempotency_keys" 2025-05-29T15:31:34.841011Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: schema.cpp:293: Create table "Root/yq/pending_small". Create session OK 2025-05-29T15:31:34.841019Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: schema.cpp:113: Call create table "Root/yq/pending_small" 2025-05-29T15:31:34.841020Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: schema.cpp:354: Call create table "Root/yq/pending_small" 2025-05-29T15:31:34.841021Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: schema.cpp:293: Create table "Root/yq/tenant_acks". Create session OK 2025-05-29T15:31:34.841023Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: schema.cpp:113: Call create table "Root/yq/tenant_acks" 2025-05-29T15:31:34.841024Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: schema.cpp:354: Call create table "Root/yq/tenant_acks" 2025-05-29T15:31:34.841081Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976715658, at schemeshard: 72057594046644480 2025-05-29T15:31:34.841100Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: schema.cpp:293: Create table "Root/yq/queries". Create session OK 2025-05-29T15:31:34.841101Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: schema.cpp:293: Create table "Root/yq/mappings". Create session OK 2025-05-29T15:31:34.841102Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: schema.cpp:113: Call create table "Root/yq/mappings" 2025-05-29T15:31:34.841102Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: schema.cpp:113: Call create table "Root/yq/queries" 2025-05-29T15:31:34.841103Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: schema.cpp:354: Call create table "Root/yq/mappings" 2025-05-29T15:31:34.841104Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: schema.cpp:354: Call create table "Root/yq/queries" 2025-05-29T15:31:34.841692Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: schema.cpp:155: Successfully created directory "Root/yq" 2025-05-29T15:31:34.841701Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: schema.cpp:122: Reply for create directory "Root/yq": 2025-05-29T15:31:34.842786Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 2025-05-29T15:31:34.843078Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 2025-05-29T15:31:34.843322Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-05-29T15:31:34.843537Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 2025-05-29T15:31:34.843715Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-05-29T15:31:34.843919Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-05-29T15:31:34.844083Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715670:0, at schemeshard: 72057594046644480 2025-05-29T15:31:34.844187Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890737413258687:2392], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:31:34.844193Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890737413258703:2397], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:31:34.844206Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890737413258704:2398], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:31:34.844218Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:31:34.844276Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself ... FQ_QUOTA_SERVICE ERROR: quota_manager.cpp:648: SyncQuota finished with error: 2025-05-29T15:31:37.225109Z node 4 :FQ_QUOTA_SERVICE ERROR: quota_manager.cpp:648: SyncQuota finished with error: 2025-05-29T15:31:37.225113Z node 4 :FQ_QUOTA_SERVICE ERROR: quota_manager.cpp:648: SyncQuota finished with error: 2025-05-29T15:31:37.225124Z node 4 :FQ_QUOTA_SERVICE ERROR: quota_manager.cpp:648: SyncQuota finished with error: 2025-05-29T15:31:37.225130Z node 4 :FQ_QUOTA_SERVICE ERROR: quota_manager.cpp:648: SyncQuota finished with error: 2025-05-29T15:31:37.225133Z node 4 :FQ_QUOTA_SERVICE ERROR: quota_manager.cpp:648: SyncQuota finished with error: 2025-05-29T15:31:37.225144Z node 4 :FQ_QUOTA_SERVICE ERROR: quota_manager.cpp:648: SyncQuota finished with error: 2025-05-29T15:31:37.225151Z node 4 :FQ_QUOTA_SERVICE ERROR: quota_manager.cpp:648: SyncQuota finished with error: 2025-05-29T15:31:37.225153Z node 4 :FQ_QUOTA_SERVICE ERROR: quota_manager.cpp:648: SyncQuota finished with error: 2025-05-29T15:31:37.225157Z node 4 :FQ_QUOTA_SERVICE ERROR: quota_manager.cpp:648: SyncQuota finished with error: 2025-05-29T15:31:37.225167Z node 4 :FQ_QUOTA_SERVICE ERROR: quota_manager.cpp:648: SyncQuota finished with error: 2025-05-29T15:31:37.225173Z node 4 :FQ_QUOTA_SERVICE ERROR: quota_manager.cpp:648: SyncQuota finished with error: 2025-05-29T15:31:37.225176Z node 4 :FQ_QUOTA_SERVICE ERROR: quota_manager.cpp:648: SyncQuota finished with error: 2025-05-29T15:31:37.225180Z node 4 :FQ_QUOTA_SERVICE ERROR: quota_manager.cpp:648: SyncQuota finished with error: 2025-05-29T15:31:37.225191Z node 4 :FQ_QUOTA_SERVICE ERROR: quota_manager.cpp:648: SyncQuota finished with error: 2025-05-29T15:31:37.225198Z node 4 :FQ_QUOTA_SERVICE ERROR: quota_manager.cpp:648: SyncQuota finished with error: 2025-05-29T15:31:37.225202Z node 4 :FQ_QUOTA_SERVICE ERROR: quota_manager.cpp:648: SyncQuota finished with error: 2025-05-29T15:31:37.225205Z node 4 :FQ_QUOTA_SERVICE ERROR: quota_manager.cpp:648: SyncQuota finished with error: 2025-05-29T15:31:37.225218Z node 4 :FQ_QUOTA_SERVICE ERROR: quota_manager.cpp:648: SyncQuota finished with error: 2025-05-29T15:31:37.225225Z node 4 :FQ_QUOTA_SERVICE ERROR: quota_manager.cpp:648: SyncQuota finished with error: 2025-05-29T15:31:37.225227Z node 4 :FQ_QUOTA_SERVICE ERROR: quota_manager.cpp:648: SyncQuota finished with error: 2025-05-29T15:31:37.225236Z node 4 :FQ_QUOTA_SERVICE ERROR: quota_manager.cpp:648: SyncQuota finished with error: 2025-05-29T15:31:37.225243Z node 4 :FQ_QUOTA_SERVICE ERROR: quota_manager.cpp:648: SyncQuota finished with error: 2025-05-29T15:31:37.225247Z node 4 :FQ_QUOTA_SERVICE ERROR: quota_manager.cpp:648: SyncQuota finished with error: 2025-05-29T15:31:37.225250Z node 4 :FQ_QUOTA_SERVICE ERROR: quota_manager.cpp:648: SyncQuota finished with error: 2025-05-29T15:31:37.225256Z node 4 :FQ_QUOTA_SERVICE ERROR: quota_manager.cpp:648: SyncQuota finished with error: 2025-05-29T15:31:37.225261Z node 4 :FQ_QUOTA_SERVICE ERROR: quota_manager.cpp:648: SyncQuota finished with error: 2025-05-29T15:31:37.225266Z node 4 :FQ_QUOTA_SERVICE ERROR: quota_manager.cpp:648: SyncQuota finished with error: 2025-05-29T15:31:37.225272Z node 4 :FQ_QUOTA_SERVICE ERROR: quota_manager.cpp:648: SyncQuota finished with error: 2025-05-29T15:31:37.225279Z node 4 :FQ_QUOTA_SERVICE ERROR: quota_manager.cpp:648: SyncQuota finished with error: 2025-05-29T15:31:37.225284Z node 4 :FQ_QUOTA_SERVICE ERROR: quota_manager.cpp:648: SyncQuota finished with error: 2025-05-29T15:31:37.225290Z node 4 :FQ_QUOTA_SERVICE ERROR: quota_manager.cpp:648: SyncQuota finished with error: 2025-05-29T15:31:37.225301Z node 4 :FQ_QUOTA_SERVICE ERROR: quota_manager.cpp:648: SyncQuota finished with error: 2025-05-29T15:31:37.225308Z node 4 :FQ_QUOTA_SERVICE ERROR: quota_manager.cpp:648: SyncQuota finished with error: 2025-05-29T15:31:37.225311Z node 4 :FQ_QUOTA_SERVICE ERROR: quota_manager.cpp:648: SyncQuota finished with error: 2025-05-29T15:31:37.225314Z node 4 :FQ_QUOTA_SERVICE ERROR: quota_manager.cpp:648: SyncQuota finished with error: 2025-05-29T15:31:37.225327Z node 4 :FQ_QUOTA_SERVICE ERROR: quota_manager.cpp:648: SyncQuota finished with error: 2025-05-29T15:31:37.225333Z node 4 :FQ_QUOTA_SERVICE ERROR: quota_manager.cpp:648: SyncQuota finished with error: 2025-05-29T15:31:37.225336Z node 4 :FQ_QUOTA_SERVICE ERROR: quota_manager.cpp:648: SyncQuota finished with error: 2025-05-29T15:31:37.225347Z node 4 :FQ_QUOTA_SERVICE ERROR: quota_manager.cpp:648: SyncQuota finished with error: 2025-05-29T15:31:37.225354Z node 4 :FQ_QUOTA_SERVICE ERROR: quota_manager.cpp:648: SyncQuota finished with error: 2025-05-29T15:31:37.225357Z node 4 :FQ_QUOTA_SERVICE ERROR: quota_manager.cpp:648: SyncQuota finished with error: 2025-05-29T15:31:37.225369Z node 4 :FQ_QUOTA_SERVICE ERROR: quota_manager.cpp:648: SyncQuota finished with error: 2025-05-29T15:31:37.225376Z node 4 :FQ_QUOTA_SERVICE ERROR: quota_manager.cpp:648: SyncQuota finished with error: 2025-05-29T15:31:37.225379Z node 4 :FQ_QUOTA_SERVICE ERROR: quota_manager.cpp:648: SyncQuota finished with error: 2025-05-29T15:31:37.225388Z node 4 :FQ_QUOTA_SERVICE ERROR: quota_manager.cpp:648: SyncQuota finished with error: 2025-05-29T15:31:37.225395Z node 4 :FQ_QUOTA_SERVICE ERROR: quota_manager.cpp:648: SyncQuota finished with error: 2025-05-29T15:31:37.225398Z node 4 :FQ_QUOTA_SERVICE ERROR: quota_manager.cpp:648: SyncQuota finished with error: 2025-05-29T15:31:37.225401Z node 4 :FQ_QUOTA_SERVICE ERROR: quota_manager.cpp:648: SyncQuota finished with error: 2025-05-29T15:31:37.225410Z node 4 :FQ_QUOTA_SERVICE ERROR: quota_manager.cpp:648: SyncQuota finished with error: 2025-05-29T15:31:37.225418Z node 4 :FQ_QUOTA_SERVICE ERROR: quota_manager.cpp:648: SyncQuota finished with error: 2025-05-29T15:31:37.225420Z node 4 :FQ_QUOTA_SERVICE ERROR: quota_manager.cpp:648: SyncQuota finished with error: 2025-05-29T15:31:37.225424Z node 4 :FQ_QUOTA_SERVICE ERROR: quota_manager.cpp:648: SyncQuota finished with error: 2025-05-29T15:31:37.225435Z node 4 :FQ_QUOTA_SERVICE ERROR: quota_manager.cpp:648: SyncQuota finished with error: 2025-05-29T15:31:37.225442Z node 4 :FQ_QUOTA_SERVICE ERROR: quota_manager.cpp:648: SyncQuota finished with error: 2025-05-29T15:31:37.225445Z node 4 :FQ_QUOTA_SERVICE ERROR: quota_manager.cpp:648: SyncQuota finished with error: 2025-05-29T15:31:37.225450Z node 4 :FQ_QUOTA_SERVICE ERROR: quota_manager.cpp:648: SyncQuota finished with error: 2025-05-29T15:31:37.225455Z node 4 :FQ_QUOTA_SERVICE ERROR: quota_manager.cpp:648: SyncQuota finished with error: 2025-05-29T15:31:37.225461Z node 4 :FQ_QUOTA_SERVICE ERROR: quota_manager.cpp:648: SyncQuota finished with error: 2025-05-29T15:31:37.225468Z node 4 :FQ_QUOTA_SERVICE ERROR: quota_manager.cpp:648: SyncQuota finished with error: 2025-05-29T15:31:37.225475Z node 4 :FQ_QUOTA_SERVICE ERROR: quota_manager.cpp:648: SyncQuota finished with error: 2025-05-29T15:31:37.225481Z node 4 :FQ_QUOTA_SERVICE ERROR: quota_manager.cpp:648: SyncQuota finished with error: 2025-05-29T15:31:37.225487Z node 4 :FQ_QUOTA_SERVICE ERROR: quota_manager.cpp:648: SyncQuota finished with error: 2025-05-29T15:31:37.225491Z node 4 :FQ_QUOTA_SERVICE ERROR: quota_manager.cpp:648: SyncQuota finished with error: 2025-05-29T15:31:37.225495Z node 4 :FQ_QUOTA_SERVICE ERROR: quota_manager.cpp:648: SyncQuota finished with error: 2025-05-29T15:31:37.225502Z node 4 :FQ_QUOTA_SERVICE ERROR: quota_manager.cpp:648: SyncQuota finished with error: 2025-05-29T15:31:37.225513Z node 4 :FQ_QUOTA_SERVICE ERROR: quota_manager.cpp:648: SyncQuota finished with error: 2025-05-29T15:31:37.225520Z node 4 :FQ_QUOTA_SERVICE ERROR: quota_manager.cpp:648: SyncQuota finished with error: 2025-05-29T15:31:37.225523Z node 4 :FQ_QUOTA_SERVICE ERROR: quota_manager.cpp:648: SyncQuota finished with error: 2025-05-29T15:31:37.225529Z node 4 :FQ_QUOTA_SERVICE ERROR: quota_manager.cpp:648: SyncQuota finished with error: 2025-05-29T15:31:37.225538Z node 4 :FQ_QUOTA_SERVICE ERROR: quota_manager.cpp:648: SyncQuota finished with error: 2025-05-29T15:31:37.225541Z node 4 :FQ_QUOTA_SERVICE ERROR: quota_manager.cpp:648: SyncQuota finished with error: 2025-05-29T15:31:37.225551Z node 4 :FQ_QUOTA_SERVICE ERROR: quota_manager.cpp:648: SyncQuota finished with error: 2025-05-29T15:31:37.225558Z node 4 :FQ_QUOTA_SERVICE ERROR: quota_manager.cpp:648: SyncQuota finished with error: 2025-05-29T15:31:37.225561Z node 4 :FQ_QUOTA_SERVICE ERROR: quota_manager.cpp:648: SyncQuota finished with error: 2025-05-29T15:31:37.225569Z node 4 :FQ_QUOTA_SERVICE ERROR: quota_manager.cpp:648: SyncQuota finished with error: 2025-05-29T15:31:37.225575Z node 4 :FQ_QUOTA_SERVICE ERROR: quota_manager.cpp:648: SyncQuota finished with error: 2025-05-29T15:31:37.225578Z node 4 :FQ_QUOTA_SERVICE ERROR: quota_manager.cpp:648: SyncQuota finished with error: 2025-05-29T15:31:37.225581Z node 4 :FQ_QUOTA_SERVICE ERROR: quota_manager.cpp:648: SyncQuota finished with error: 2025-05-29T15:31:37.225588Z node 4 :FQ_QUOTA_SERVICE ERROR: quota_manager.cpp:648: SyncQuota finished with error: 2025-05-29T15:31:37.225594Z node 4 :FQ_QUOTA_SERVICE ERROR: quota_manager.cpp:648: SyncQuota finished with error: 2025-05-29T15:31:37.225606Z node 4 :FQ_QUOTA_SERVICE ERROR: quota_manager.cpp:648: SyncQuota finished with error: 2025-05-29T15:31:37.225612Z node 4 :FQ_QUOTA_SERVICE ERROR: quota_manager.cpp:648: SyncQuota finished with error: 2025-05-29T15:31:37.225615Z node 4 :FQ_QUOTA_SERVICE ERROR: quota_manager.cpp:648: SyncQuota finished with error: 2025-05-29T15:31:37.225627Z node 4 :FQ_QUOTA_SERVICE ERROR: quota_manager.cpp:648: SyncQuota finished with error: 2025-05-29T15:31:37.225634Z node 4 :FQ_QUOTA_SERVICE ERROR: quota_manager.cpp:648: SyncQuota finished with error: 2025-05-29T15:31:37.225637Z node 4 :FQ_QUOTA_SERVICE ERROR: quota_manager.cpp:648: SyncQuota finished with error: 2025-05-29T15:31:37.225649Z node 4 :FQ_QUOTA_SERVICE ERROR: quota_manager.cpp:648: SyncQuota finished with error: 2025-05-29T15:31:37.225665Z node 4 :FQ_QUOTA_SERVICE ERROR: quota_manager.cpp:648: SyncQuota finished with error: 2025-05-29T15:31:37.225669Z node 4 :FQ_QUOTA_SERVICE ERROR: quota_manager.cpp:648: SyncQuota finished with error: 2025-05-29T15:31:37.225681Z node 4 :FQ_QUOTA_SERVICE ERROR: quota_manager.cpp:648: SyncQuota finished with error: 2025-05-29T15:31:37.225691Z node 4 :FQ_QUOTA_SERVICE ERROR: quota_manager.cpp:648: SyncQuota finished with error: 2025-05-29T15:31:37.225694Z node 4 :FQ_QUOTA_SERVICE ERROR: quota_manager.cpp:648: SyncQuota finished with error: assertion failed at ydb/services/fq/ut_integration/fq_ut.cpp:57, TString (anonymous namespace)::CreateNewHistoryAndWaitFinish(const TString &, NYdb::NFq::TClient &, const TString &, const FederatedQuery::QueryMeta::ComputeStatus &): (result.GetStatus() == EStatus::SUCCESS) failed: (BAD_REQUEST != SUCCESS)
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 , with diff: (BAD_REQ|S)U(|CC)ES(T|S) TBackTrace::Capture()+28 (0x139D25FC) NUnitTest::NPrivate::RaiseError(char const*, TBasicString> const&, bool)+137 (0x13B861F9) ??+0 (0x1388D3A4) NTestSuiteYq_1::TTestCaseDeleteQuery::Execute_(NUnitTest::TTestContext&)+1074 (0x138AADA2) NTestSuiteYq_1::TCurrentTest::Execute()::'lambda'()::operator()() const+71 (0x138BC937) NUnitTest::TTestBase::Run(std::__y1::function, TBasicString> const&, char const*, bool)+126 (0x13B880AE) NTestSuiteYq_1::TCurrentTest::Execute()+415 (0x138BC24F) NUnitTest::TTestFactory::Execute()+803 (0x13B88823) NUnitTest::RunMain(int, char**)+3021 (0x13B9A3CD) ??+0 (0x7F8D1881ED90) __libc_start_main+128 (0x7F8D1881EE40) _start+41 (0x129E1029) >> TSchemeShardTTLUtility::ValidateTiers [GOOD] |75.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_reboots/unittest >> TAsyncIndexTests::MergeBothWithReboots[PipeResets] [GOOD] >> BasicStatistics::NotFullStatisticsDatashard [FAIL] |75.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/batch_operations/unittest |75.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_ttl/unittest >> TSchemeShardTTLUtility::ValidateTiers [GOOD] |75.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_reboots/unittest |75.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/statistics/service/ut/unittest |75.3%| [TA] $(B)/ydb/services/fq/ut_integration/test-results/unittest/{meta.json ... results_accumulator.log} >> BasicStatistics::NotFullStatisticsColumnshard |75.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_reboots/unittest |75.3%| [TA] {RESULT} $(B)/ydb/services/fq/ut_integration/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/persqueue_v1/ut/unittest >> TPersQueueTest::InflightLimit [FAIL] Test command err: 2025-05-29T15:31:22.828873Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509890688923852042:2075];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:31:22.829146Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-05-29T15:31:22.832158Z node 2 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7509890688296324598:2169];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:31:22.859336Z node 1 :PQ_READ_PROXY DEBUG: caching_service.cpp:44: Direct read cache: : Created 2025-05-29T15:31:22.864441Z node 2 :PQ_READ_PROXY DEBUG: caching_service.cpp:44: Direct read cache: : Created 2025-05-29T15:31:22.866086Z node 2 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/001e94/r3tmp/tmplsuHt8/pdisk_1.dat 2025-05-29T15:31:22.896374Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 62456, node 1 2025-05-29T15:31:22.910246Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/ciyv/001e94/r3tmp/yandex60IiwQ.tmp 2025-05-29T15:31:22.910256Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: /home/runner/.ya/build/build_root/ciyv/001e94/r3tmp/yandex60IiwQ.tmp 2025-05-29T15:31:22.910318Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:202: successfully initialized from file: /home/runner/.ya/build/build_root/ciyv/001e94/r3tmp/yandex60IiwQ.tmp 2025-05-29T15:31:22.910372Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-05-29T15:31:22.914446Z INFO: TTestServer started on Port 31154 GrpcPort 62456 TClient is connected to server localhost:31154 PQClient connected to localhost:62456 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: 2025-05-29T15:31:22.929556Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:31:22.929584Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:31:22.931043Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-29T15:31:22.969530Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:31:22.969558Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:31:22.970649Z node 1 :HIVE WARN: hive_impl.cpp:771: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-05-29T15:31:22.970874Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-05-29T15:31:22.970919Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected waiting... waiting... 2025-05-29T15:31:22.983537Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... waiting... waiting... 2025-05-29T15:31:23.145356Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890693218820343:2336], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:31:23.145385Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:31:23.145488Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890693218820356:2339], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:31:23.146174Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715662:3, at schemeshard: 72057594046644480 2025-05-29T15:31:23.146291Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890693218820387:2342], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:31:23.146315Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:31:23.149763Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7509890693218820358:2340], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715662 completed, doublechecking } 2025-05-29T15:31:23.174164Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-05-29T15:31:23.182510Z node 2 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [2:7509890692591292174:2315], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-05-29T15:31:23.182622Z node 2 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=2&id=MzYwNTYyN2EtYjlhODUzZjAtMmEwNGZmMmMtNDA2ZDVjMzc=, ActorId: [2:7509890692591292135:2309], ActorState: ExecuteState, TraceId: 01jweaprcy7pvsn0frgf7089r9, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-05-29T15:31:23.183101Z node 2 :PERSQUEUE_CLUSTER_TRACKER ERROR: cluster_tracker.cpp:167: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-05-29T15:31:23.233675Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-05-29T15:31:23.248040Z node 1 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [1:7509890693218820638:2884] txid# 281474976715665, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 9], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-29T15:31:23.252010Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [1:7509890693218820656:2358], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:5:17: Error: At function: KiReadTable!
:5:17: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Versions]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-05-29T15:31:23.252099Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=1&id=OTk3YWFhMWQtNWJmMDdmZS01ODEyMzFlZi1hYmUwZDc0ZQ==, ActorId: [1:7509890693218820326:2334], ActorState: ExecuteState, TraceId: 01jweaprc74abx6zt3f1ff4j58, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-05-29T15:31:23.252243Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: cluster_tracker.cpp:167: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 5 column: 17 } message: "At function: KiReadTable!" end_position { row: 5 column: 17 } severity: 1 issues { position { row: 5 column: 17 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Versions]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 5 column: 17 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-05-29T15:31:23.298691Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost", true, true, 1000), ("dc2", "dc2.logbroker.yandex.net", false, true, 1000); 2025-05-29T15:31:23.322002Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [1:7509890693218820845:2382], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:31:23.322107Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=1&id=ZjAxNjdmOGYtNjZkODJiMm ... : 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-29T15:31:37.649510Z node 27 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(27, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:31:37.649533Z node 27 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(27, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:31:37.651146Z node 27 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(27, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-29T15:31:37.653976Z node 27 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(28, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:31:37.653997Z node 27 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(28, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:31:37.654444Z node 27 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:31:37.655243Z node 27 :HIVE WARN: hive_impl.cpp:771: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 28 Cookie 28 2025-05-29T15:31:37.655469Z node 27 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(28, (0,0,0,0)) VolatileState: Connecting -> Connected waiting... 2025-05-29T15:31:37.669124Z node 27 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... waiting... waiting... 2025-05-29T15:31:37.865825Z node 28 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [28:7509890749912330888:2313], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:31:37.865851Z node 28 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [28:7509890749912330865:2310], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:31:37.865863Z node 28 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:31:37.867174Z node 27 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976720657:3, at schemeshard: 72057594046644480 2025-05-29T15:31:37.871580Z node 28 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [28:7509890749912330892:2314], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976720657 completed, doublechecking } 2025-05-29T15:31:37.885569Z node 27 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [27:7509890751796583218:2341], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-05-29T15:31:37.885643Z node 27 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=27&id=NGQwZmIzMzktYTgwZTFjOGEtMWFiMzQzYjItYTA1MWY5NDg=, ActorId: [27:7509890751796583192:2334], ActorState: ExecuteState, TraceId: 01jweaq6rsfk6vfsecw15sn9tv, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-05-29T15:31:37.885751Z node 27 :PERSQUEUE_CLUSTER_TRACKER ERROR: cluster_tracker.cpp:167: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-05-29T15:31:37.886257Z node 27 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-05-29T15:31:37.946691Z node 27 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-05-29T15:31:37.951053Z node 28 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [28:7509890749912330927:2169] txid# 281474976720658, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 9], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-29T15:31:37.954771Z node 28 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [28:7509890749912330956:2319], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:5:17: Error: At function: KiReadTable!
:5:17: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Versions]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-05-29T15:31:37.954857Z node 28 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=28&id=YzIxYTU5MjQtNTUyNDY4ZmQtOWVlYTFhY2YtMzc2NWRmZDQ=, ActorId: [28:7509890749912330861:2309], ActorState: ExecuteState, TraceId: 01jweaq6r939xbv2pzvedjxy6y, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-05-29T15:31:37.955025Z node 28 :PERSQUEUE_CLUSTER_TRACKER ERROR: cluster_tracker.cpp:167: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 5 column: 17 } message: "At function: KiReadTable!" end_position { row: 5 column: 17 } severity: 1 issues { position { row: 5 column: 17 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Versions]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 5 column: 17 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-05-29T15:31:38.012784Z node 27 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost", true, true, 1000), ("dc2", "dc2.logbroker.yandex.net", false, true, 1000); 2025-05-29T15:31:38.035785Z node 27 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [27:7509890756091550904:2376], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:31:38.035884Z node 27 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=27&id=ZjM3YjI2ZDktNjM1ZDQ1YmMtYmYwZGE1ZDctMjg3ZGIyMzk=, ActorId: [27:7509890756091550901:2374], ActorState: ExecuteState, TraceId: 01jweaq6xa8fzeyq6hyag5eqck, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: assertion failed at ydb/core/testlib/test_pq_client.h:537, TMaybe NKikimr::NPersQueueTests::TFlatMsgBusPQClient::RunYqlDataQueryWithParams(TString, const NYdb::TParams &): (result.IsSuccess())
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 TBackTrace::Capture()+28 (0x13B9173C) NUnitTest::NPrivate::RaiseError(char const*, TBasicString> const&, bool)+137 (0x13D44119) NKikimr::NPersQueueTests::TFlatMsgBusPQClient::RunYqlDataQueryWithParams(TBasicString>, NYdb::Dev::TParams const&)+932 (0x139C81C4) NKikimr::NPersQueueTests::TFlatMsgBusPQClient::RunYqlDataQuery(TBasicString>)+88 (0x139F6148) NKikimr::NPersQueueTests::TFlatMsgBusPQClient::InitDCs(THashMap>, NKikimr::NPersQueueTests::TPQTestClusterInfo, THash>>, TEqualTo>>, std::__y1::allocator>>>, TBasicString> const&)+1170 (0x13995592) NKikimr::NPersQueueTests::TFlatMsgBusPQClient::FullInit(THashMap>, NKikimr::NPersQueueTests::TPQTestClusterInfo, THash>>, TEqualTo>>, std::__y1::allocator>>>, TBasicString> const&, TBasicString> const&)+327 (0x139C2D47) NPersQueue::TTestServer::StartServer(bool, TMaybe>, NMaybe::TPolicyUndefinedExcept>)+888 (0x138D5468) NPersQueue::TTestServer::TTestServer(NKikimr::Tests::TServerSettings const&, bool, TVector> const&, NActors::NLog::EPriority, TMaybe, TDelete>, NMaybe::TPolicyUndefinedExcept>)+1563 (0x138D474B) ??+0 (0x139AE84C) NKikimr::NPersQueueTests::NTestSuiteTPersQueueTest::TTestCaseInflightLimit::Execute_(NUnitTest::TTestContext&)+40 (0x139ADEF8) NKikimr::NPersQueueTests::NTestSuiteTPersQueueTest::TCurrentTest::Execute()::'lambda'()::operator()() const+71 (0x139BF897) NUnitTest::TTestBase::Run(std::__y1::function, TBasicString> const&, char const*, bool)+126 (0x13D45FCE) NKikimr::NPersQueueTests::NTestSuiteTPersQueueTest::TCurrentTest::Execute()+436 (0x139BF1B4) NUnitTest::TTestFactory::Execute()+803 (0x13D46743) NUnitTest::RunMain(int, char**)+3021 (0x13D582DD) ??+0 (0x7F4994EF7D90) __libc_start_main+128 (0x7F4994EF7E40) _start+41 (0x129C9029) ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/effects/unittest >> KqpEffects::AlterDuringUpsertTransaction-UseSink Test command err: Trying to start YDB, gRPC: 8894, MsgBus: 23147 2025-05-29T15:31:30.666099Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509890719844774353:2070];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:31:30.666393Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/001da0/r3tmp/tmpezYES0/pdisk_1.dat 2025-05-29T15:31:30.715775Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:31:30.715838Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [1:7509890719844774312:2079] 1748532690665897 != 1748532690665900 TServer::EnableGrpc on GrpcPort 8894, node 1 2025-05-29T15:31:30.726795Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:31:30.726805Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-05-29T15:31:30.726806Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-05-29T15:31:30.726839Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:23147 TClient is connected to server localhost:23147 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: 2025-05-29T15:31:30.767309Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:31:30.767331Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:31:30.768393Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-29T15:31:30.777410Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:31:30.783985Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:31:30.801288Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:31:30.820903Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:31:30.830971Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:31:30.961167Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890719844775945:2403], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:31:30.961189Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:31:30.996715Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-05-29T15:31:31.003543Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-05-29T15:31:31.010807Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-05-29T15:31:31.018271Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-05-29T15:31:31.032149Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-05-29T15:31:31.086983Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-05-29T15:31:31.095231Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-05-29T15:31:31.111476Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890724139743896:2466], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:31:31.111508Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890724139743901:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:31:31.111520Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:31:31.112089Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710669:3, at schemeshard: 72057594046644480 2025-05-29T15:31:31.115354Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7509890724139743903:2470], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710669 completed, doublechecking } 2025-05-29T15:31:31.187434Z node 1 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [1:7509890724139743955:3396] txid# 281474976710670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-29T15:31:31.260320Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [1:7509890724139743971:2474], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:31:31.260406Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=1&id=OWI5ZTE0ODctYmE3NDBiN2ItNjEyZDQwMjItYzAyYjJjOWQ=, ActorId: [1:7509890719844775927:2401], ActorState: ExecuteState, TraceId: 01jweaq057acwktkthyhrhf8sf, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: VERIFY failed (2025-05-29T15:31:31.260993Z): assertion failed in non-unittest thread with message: assertion failed at ydb/core/kqp/ut/common/kqp_ut_common.h:375, void NKikimr::NKqp::AssertSuccessResult(const NYdb::TStatus &): (result.IsSuccess())
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 library/cpp/testing/unittest/registar.cpp:36 RaiseError(): requirement UnittestThread failed 0. /-S/util/system/yassert.cpp:83: InternalPanicImpl @ 0x15F23B95 1. /-S/util/system/yassert.cpp:55: Panic @ 0x15F1AB96 2. /tmp//-S/library/cpp/testing/unittest/registar.cpp:36: RaiseError @ 0x160BC066 3. /-S/ydb/core/kqp/ut/common/kqp_ut_common.h:375: AssertSuccessResult @ 0x15D88C82 4. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:365: CreateSampleTables @ 0x2855B9A2 5. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:581: operator() @ 0x2857CF4C 6. /-S/library/cpp/threading/future/core/future-inl.h:493: SetValue @ 0x2857CF4C 7. /-S/library/cpp/threading/future/async.h:24: operator() @ 0x2857CF4C 8. /-S/util/thread/pool.h:71: Process @ 0x2857CF4C 9. /-S/util/thread/pool.cpp:418: DoExecute @ 0x15F2B519 10. /-S/util/thread/factory.h:15: Execute @ 0x15F29F09 11. /-S/util/thread/factory.cpp:36: ThreadProc @ 0x15F29F09 12. /-S/util/system/thread.cpp:244: ThreadProxy @ 0x15F2537C 13. ??:0: ?? @ 0x7FF15DC8BAC2 14. ??:0: ?? @ 0x7FF15DD1D84F Trying to start YDB, gRPC: 17307, MsgBus: 17963 2025-05-29T15:31:34.265985Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509890740446203818:2073];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:31:34.266006Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/001da0/r3tmp/tmp33kgKU/pdisk_1.dat 2025-05-29T15:31:34.323398Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 17307, node 1 2025-05-29T15:31:34.337395Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:31:34.337408Z node 1 :NET_CLASSIFIER WAR ... /ast/yql_expr.h:1874: index out of range, code: 1 library/cpp/testing/unittest/registar.cpp:36 RaiseError(): requirement UnittestThread failed 0. /-S/util/system/yassert.cpp:83: InternalPanicImpl @ 0x15F23B95 1. /-S/util/system/yassert.cpp:55: Panic @ 0x15F1AB96 2. /tmp//-S/library/cpp/testing/unittest/registar.cpp:36: RaiseError @ 0x160BC066 3. /-S/ydb/core/kqp/ut/common/kqp_ut_common.h:375: AssertSuccessResult @ 0x15D88C82 4. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:365: CreateSampleTables @ 0x2855B9A2 5. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:581: operator() @ 0x2857CF4C 6. /-S/library/cpp/threading/future/core/future-inl.h:493: SetValue @ 0x2857CF4C 7. /-S/library/cpp/threading/future/async.h:24: operator() @ 0x2857CF4C 8. /-S/util/thread/pool.h:71: Process @ 0x2857CF4C 9. /-S/util/thread/pool.cpp:418: DoExecute @ 0x15F2B519 10. /-S/util/thread/factory.h:15: Execute @ 0x15F29F09 11. /-S/util/thread/factory.cpp:36: ThreadProc @ 0x15F29F09 12. /-S/util/system/thread.cpp:244: ThreadProxy @ 0x15F2537C 13. ??:0: ?? @ 0x7F3F0C58BAC2 14. ??:0: ?? @ 0x7F3F0C61D84F Trying to start YDB, gRPC: 5615, MsgBus: 19699 2025-05-29T15:31:37.920619Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509890753646135949:2064];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:31:37.920643Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/001da0/r3tmp/tmphpzAvm/pdisk_1.dat TServer::EnableGrpc on GrpcPort 5615, node 1 2025-05-29T15:31:37.974594Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:31:37.982946Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:31:37.982961Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-05-29T15:31:37.982963Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-05-29T15:31:37.983008Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:19699 TClient is connected to server localhost:19699 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: 2025-05-29T15:31:38.021643Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:31:38.021662Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:31:38.022724Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-29T15:31:38.029229Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:31:38.039047Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:31:38.057605Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:31:38.072878Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:31:38.082879Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:31:38.209708Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890757941104843:2403], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:31:38.209733Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:31:38.242589Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-05-29T15:31:38.248441Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-05-29T15:31:38.256129Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-05-29T15:31:38.262625Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-05-29T15:31:38.269720Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-05-29T15:31:38.277064Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-05-29T15:31:38.332570Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-05-29T15:31:38.342574Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890757941105497:2466], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:31:38.342593Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:31:38.342620Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890757941105502:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:31:38.343295Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710669:3, at schemeshard: 72057594046644480 2025-05-29T15:31:38.345901Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7509890757941105504:2470], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710669 completed, doublechecking } 2025-05-29T15:31:38.434696Z node 1 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [1:7509890757941105555:3396] txid# 281474976710670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-29T15:31:38.514300Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [1:7509890757941105571:2474], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:31:38.514416Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=1&id=NWFiZTdkZGMtZjFlYWM4Y2MtNDAwMTk1MzgtY2U0NWYxNTE=, ActorId: [1:7509890757941104825:2401], ActorState: ExecuteState, TraceId: 01jweaq776fchf6kctf2bv2bja, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: VERIFY failed (2025-05-29T15:31:38.515225Z): assertion failed in non-unittest thread with message: assertion failed at ydb/core/kqp/ut/common/kqp_ut_common.h:375, void NKikimr::NKqp::AssertSuccessResult(const NYdb::TStatus &): (result.IsSuccess())
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 library/cpp/testing/unittest/registar.cpp:36 RaiseError(): requirement UnittestThread failed 0. /-S/util/system/yassert.cpp:83: InternalPanicImpl @ 0x15F23B95 1. /-S/util/system/yassert.cpp:55: Panic @ 0x15F1AB96 2. /tmp//-S/library/cpp/testing/unittest/registar.cpp:36: RaiseError @ 0x160BC066 3. /-S/ydb/core/kqp/ut/common/kqp_ut_common.h:375: AssertSuccessResult @ 0x15D88C82 4. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:365: CreateSampleTables @ 0x2855B9A2 5. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:581: operator() @ 0x2857CF4C 6. /-S/library/cpp/threading/future/core/future-inl.h:493: SetValue @ 0x2857CF4C 7. /-S/library/cpp/threading/future/async.h:24: operator() @ 0x2857CF4C 8. /-S/util/thread/pool.h:71: Process @ 0x2857CF4C 9. /-S/util/thread/pool.cpp:418: DoExecute @ 0x15F2B519 10. /-S/util/thread/factory.h:15: Execute @ 0x15F29F09 11. /-S/util/thread/factory.cpp:36: ThreadProc @ 0x15F29F09 12. /-S/util/system/thread.cpp:244: ThreadProxy @ 0x15F2537C 13. ??:0: ?? @ 0x7F41F404DAC2 14. ??:0: ?? @ 0x7F41F40DF84F ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_index/unittest >> TAsyncIndexTests::MergeBothWithReboots[PipeResets] [GOOD] Test command err: =========== RUN: Trace =========== Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2140] Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:116:2058] recipient: [1:110:2141] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:116:2058] recipient: [1:110:2141] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:117:2058] recipient: [1:111:2142] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:117:2058] recipient: [1:111:2142] Leader for TabletID 72057594046678944 is [1:126:2151] sender: [1:127:2058] recipient: [1:109:2140] Leader for TabletID 72057594046447617 is [1:130:2154] sender: [1:132:2058] recipient: [1:110:2141] Leader for TabletID 72057594046316545 is [1:133:2155] sender: [1:135:2058] recipient: [1:111:2142] 2025-05-29T15:30:24.466137Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7488: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-05-29T15:30:24.466160Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7516: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:30:24.466166Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7402: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2025-05-29T15:30:24.466172Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7418: OperationsProcessing config: using default configuration 2025-05-29T15:30:24.466187Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-05-29T15:30:24.466192Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-05-29T15:30:24.466201Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7548: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:30:24.466215Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:39: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-05-29T15:30:24.466329Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7619: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-05-29T15:30:24.466422Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-05-29T15:30:24.478721Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7651: Got new config: QueryServiceConfig { AllExternalDataSourcesAreAvailable: true } 2025-05-29T15:30:24.478764Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:30:24.478851Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7619: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# Leader for TabletID 72057594046447617 is [1:130:2154] sender: [1:175:2058] recipient: [1:15:2062] 2025-05-29T15:30:24.481605Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-05-29T15:30:24.481637Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-05-29T15:30:24.481670Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-05-29T15:30:24.484424Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-05-29T15:30:24.484499Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:445: Clear TempDirsState with owners number: 0 2025-05-29T15:30:24.484623Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1348: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-05-29T15:30:24.484795Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:32: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-05-29T15:30:24.485462Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:157: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:30:24.485503Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:84: [RootDataErasureManager] Stop 2025-05-29T15:30:24.485790Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:30:24.485800Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:30:24.485836Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-05-29T15:30:24.485844Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-29T15:30:24.485851Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:202: TTxServerlessStorageBilling.Complete 2025-05-29T15:30:24.485869Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6671: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:214:2058] recipient: [1:212:2212] IGNORE Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:214:2058] recipient: [1:212:2212] Leader for TabletID 72057594037968897 is [1:218:2216] sender: [1:219:2058] recipient: [1:212:2212] 2025-05-29T15:30:24.487230Z node 1 :HIVE INFO: tablet_helpers.cpp:1130: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:126:2151] sender: [1:239:2058] recipient: [1:15:2062] 2025-05-29T15:30:24.502369Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:372: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-05-29T15:30:24.502454Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:30:24.502525Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-05-29T15:30:24.502568Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:130: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-05-29T15:30:24.502577Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:30:24.503396Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:451: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-05-29T15:30:24.503421Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-05-29T15:30:24.503465Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:30:24.503475Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:313: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-05-29T15:30:24.503481Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:367: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-05-29T15:30:24.503488Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 2 -> 3 2025-05-29T15:30:24.503852Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:30:24.503861Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-05-29T15:30:24.503864Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 3 -> 128 2025-05-29T15:30:24.504114Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:30:24.504121Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:30:24.504125Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:30:24.504131Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1650: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-05-29T15:30:24.504606Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1719: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-05-29T15:30:24.504973Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:652: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-05-29T15:30:24.505023Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1751: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 Leader for TabletID 72057594046316545 is [1:133:2155] sender: [1:254:2058] recipient: [1:15:2062] FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-05-29T15:30:24.505197Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:676: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-05-29T15:30:24.505223Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:680: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 133 RawX2: 4294969451 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-05-29T15:30:24.505230Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:30:24.505293Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Ch ... BrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { MinPartitionsCount: 1 } } TableIndexes { Name: "UserDefinedIndex" LocalPathId: 4 Type: EIndexTypeGlobalAsync State: EIndexStateReady KeyColumnNames: "indexed" SchemaVersion: 1 PathOwnerId: 72057594046678944 DataSize: 0 IndexImplTableDescriptions { PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { MinPartitionsCount: 1 } } } } TableSchemaVersion: 1 IsBackup: false IsRestore: false } TablePartitions { EndOfRangeKeyPrefix: "" IsPoint: false IsInclusive: false DatashardId: 72075186233409550 } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 4 PathsLimit: 10000 ShardsInside: 3 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-05-29T15:31:40.811192Z node 54 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Table/UserDefinedIndex/indexImplTable" Options { ReturnPartitioningInfo: true ReturnPartitionConfig: true BackupInfo: false ReturnBoundaries: true ShowPrivateTable: true }, at schemeshard: 72057594046678944 2025-05-29T15:31:40.811229Z node 54 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/Table/UserDefinedIndex/indexImplTable" took 46us result status StatusSuccess 2025-05-29T15:31:40.811329Z node 54 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Table/UserDefinedIndex/indexImplTable" PathDescription { Self { Name: "indexImplTable" PathId: 5 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 1002 CreateStep: 5000003 ParentPathId: 4 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 4 PathSubType: EPathSubTypeAsyncIndexImplTable Version { GeneralVersion: 4 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 2 } ChildrenExist: false } Table { Name: "indexImplTable" Columns { Name: "indexed" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "indexed" KeyColumnNames: "key" KeyColumnIds: 1 KeyColumnIds: 2 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { MinPartitionsCount: 1 } } TableSchemaVersion: 1 IsBackup: false IsRestore: false } TablePartitions { EndOfRangeKeyPrefix: "" IsPoint: false IsInclusive: false DatashardId: 72075186233409551 } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 4 PathsLimit: 10000 ShardsInside: 3 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 5 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-05-29T15:31:40.821599Z node 54 :CHANGE_EXCHANGE DEBUG: change_sender_table_base.cpp:78: [TableChangeSenderShard][72075186233409550:2][72075186233409551][54:1098:2873] Handshake NKikimrChangeExchange.TEvStatus Status: STATUS_OK LastRecordOrder: 0 2025-05-29T15:31:40.821632Z node 54 :CHANGE_EXCHANGE DEBUG: change_sender_async_index.cpp:239: [AsyncIndexChangeSenderMain][72075186233409550:2][54:1058:2873] Handle NKikimr::NChangeExchange::TEvChangeExchangePrivate::TEvReady { PartitionId: 72075186233409551 } 2025-05-29T15:31:40.821661Z node 54 :CHANGE_EXCHANGE DEBUG: change_sender_table_base.cpp:123: [TableChangeSenderShard][72075186233409550:2][72075186233409551][54:1098:2873] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRecords { Records [{ Order: 1 Group: 1748532700809138 Step: 5000003 TxId: 18446744073709551615 PathId: [OwnerId: 72057594046678944, LocalPathId: 4] Kind: AsyncIndex Source: Unspecified Body: 28b TableId: [OwnerId: 72057594046678944, LocalPathId: 3] SchemaVersion: 1 LockId: 0 LockOffset: 0 },{ Order: 2 Group: 1748532700809138 Step: 5000003 TxId: 18446744073709551615 PathId: [OwnerId: 72057594046678944, LocalPathId: 4] Kind: AsyncIndex Source: Unspecified Body: 28b TableId: [OwnerId: 72057594046678944, LocalPathId: 3] SchemaVersion: 1 LockId: 0 LockOffset: 0 },{ Order: 3 Group: 1748532700809138 Step: 5000003 TxId: 18446744073709551615 PathId: [OwnerId: 72057594046678944, LocalPathId: 4] Kind: AsyncIndex Source: Unspecified Body: 28b TableId: [OwnerId: 72057594046678944, LocalPathId: 3] SchemaVersion: 1 LockId: 0 LockOffset: 0 }] } 2025-05-29T15:31:40.822394Z node 54 :CHANGE_EXCHANGE DEBUG: change_sender_table_base.cpp:200: [TableChangeSenderShard][72075186233409550:2][72075186233409551][54:1098:2873] Handle NKikimrChangeExchange.TEvStatus Status: STATUS_OK RecordStatuses { Order: 1 Status: STATUS_OK Reason: REASON_NONE } RecordStatuses { Order: 2 Status: STATUS_OK Reason: REASON_NONE } RecordStatuses { Order: 3 Status: STATUS_OK Reason: REASON_NONE } LastRecordOrder: 3 2025-05-29T15:31:40.822420Z node 54 :CHANGE_EXCHANGE DEBUG: change_sender_async_index.cpp:239: [AsyncIndexChangeSenderMain][72075186233409550:2][54:1058:2873] Handle NKikimr::NChangeExchange::TEvChangeExchangePrivate::TEvReady { PartitionId: 72075186233409551 } ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/effects/unittest >> KqpImmediateEffects::TxWithReadAtTheEnd+UseSink Test command err: Trying to start YDB, gRPC: 30535, MsgBus: 19144 2025-05-29T15:31:33.871569Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509890734726804579:2071];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:31:33.871589Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/001d90/r3tmp/tmp4Xgh16/pdisk_1.dat 2025-05-29T15:31:33.927587Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 30535, node 1 2025-05-29T15:31:33.943261Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:31:33.943272Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-05-29T15:31:33.943274Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-05-29T15:31:33.943303Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:19144 2025-05-29T15:31:33.972516Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:31:33.972549Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:31:33.973644Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:19144 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-29T15:31:34.003430Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:31:34.011151Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:31:34.025723Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:31:34.047164Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:31:34.057178Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:31:34.192507Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890739021773474:2403], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:31:34.192524Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:31:34.242697Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-05-29T15:31:34.248746Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-05-29T15:31:34.258904Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-05-29T15:31:34.265290Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-05-29T15:31:34.272635Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-05-29T15:31:34.327269Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-05-29T15:31:34.335924Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-05-29T15:31:34.351662Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890739021774131:2466], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:31:34.351682Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:31:34.351699Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890739021774136:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:31:34.352201Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715669:3, at schemeshard: 72057594046644480 2025-05-29T15:31:34.356182Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7509890739021774138:2470], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715669 completed, doublechecking } 2025-05-29T15:31:34.423402Z node 1 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [1:7509890739021774189:3396] txid# 281474976715670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-29T15:31:34.516421Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [1:7509890739021774205:2474], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:31:34.516521Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=1&id=NWJmZjA1YzMtNzhiNDMyYzItNTAwODc0MjEtODJkYWEyMzQ=, ActorId: [1:7509890739021773471:2401], ActorState: ExecuteState, TraceId: 01jweaq3af20pdy97nrd2gc42y, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: VERIFY failed (2025-05-29T15:31:34.517054Z): assertion failed in non-unittest thread with message: assertion failed at ydb/core/kqp/ut/common/kqp_ut_common.h:375, void NKikimr::NKqp::AssertSuccessResult(const NYdb::TStatus &): (result.IsSuccess())
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 library/cpp/testing/unittest/registar.cpp:36 RaiseError(): requirement UnittestThread failed 0. /-S/util/system/yassert.cpp:83: InternalPanicImpl @ 0x15F23B95 1. /-S/util/system/yassert.cpp:55: Panic @ 0x15F1AB96 2. /tmp//-S/library/cpp/testing/unittest/registar.cpp:36: RaiseError @ 0x160BC066 3. /-S/ydb/core/kqp/ut/common/kqp_ut_common.h:375: AssertSuccessResult @ 0x15D88C82 4. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:365: CreateSampleTables @ 0x2855B9A2 5. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:581: operator() @ 0x2857CF4C 6. /-S/library/cpp/threading/future/core/future-inl.h:493: SetValue @ 0x2857CF4C 7. /-S/library/cpp/threading/future/async.h:24: operator() @ 0x2857CF4C 8. /-S/util/thread/pool.h:71: Process @ 0x2857CF4C 9. /-S/util/thread/pool.cpp:418: DoExecute @ 0x15F2B519 10. /-S/util/thread/factory.h:15: Execute @ 0x15F29F09 11. /-S/util/thread/factory.cpp:36: ThreadProc @ 0x15F29F09 12. /-S/util/system/thread.cpp:244: ThreadProxy @ 0x15F2537C 13. ??:0: ?? @ 0x7F9B5D0BCAC2 14. ??:0: ?? @ 0x7F9B5D14E84F Trying to start YDB, gRPC: 26804, MsgBus: 5668 2025-05-29T15:31:37.742923Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509890752130183675:2073];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:31:37.742948Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/001d90/r3tmp/tmpXPZgiN/pdisk_1.dat 2025-05-29T15:31:37.799002Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 26804, node 1 2025-05-29T15:31:37.810858Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:31:37.810875Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-05-29T15:31:37.810878Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-05-29T15:31:37.810928Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:5668 2025-05-29T15:31:37.844460Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:31:37.844486Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:31:37.845519Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:5668 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-29T15:31:37.873769Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:31:37.882686Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:31:37.945100Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:31:37.964597Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:31:37.977258Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:31:38.042539Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890756425152563:2403], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:31:38.042574Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:31:38.076829Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-05-29T15:31:38.083618Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-05-29T15:31:38.138383Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-05-29T15:31:38.151341Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-05-29T15:31:38.158523Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-05-29T15:31:38.172542Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-05-29T15:31:38.179471Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-05-29T15:31:38.195695Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890756425153219:2466], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:31:38.195717Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890756425153224:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:31:38.195722Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:31:38.196308Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715669:3, at schemeshard: 72057594046644480 2025-05-29T15:31:38.199366Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7509890756425153226:2470], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715669 completed, doublechecking } 2025-05-29T15:31:38.278751Z node 1 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [1:7509890756425153277:3396] txid# 281474976715670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-29T15:31:38.344377Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [1:7509890756425153293:2474], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:31:38.344463Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=1&id=MTc3OWE4NjQtYzY5MmYzOTYtMWNhYTQ1Y2YtYTNlNGFiNmY=, ActorId: [1:7509890756425152545:2401], ActorState: ExecuteState, TraceId: 01jweaq72k6qhykbjcaddg9d47, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: VERIFY failed (2025-05-29T15:31:38.345049Z): assertion failed in non-unittest thread with message: assertion failed at ydb/core/kqp/ut/common/kqp_ut_common.h:375, void NKikimr::NKqp::AssertSuccessResult(const NYdb::TStatus &): (result.IsSuccess())
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 library/cpp/testing/unittest/registar.cpp:36 RaiseError(): requirement UnittestThread failed 0. /-S/util/system/yassert.cpp:83: InternalPanicImpl @ 0x15F23B95 1. /-S/util/system/yassert.cpp:55: Panic @ 0x15F1AB96 2. /tmp//-S/library/cpp/testing/unittest/registar.cpp:36: RaiseError @ 0x160BC066 3. /-S/ydb/core/kqp/ut/common/kqp_ut_common.h:375: AssertSuccessResult @ 0x15D88C82 4. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:365: CreateSampleTables @ 0x2855B9A2 5. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:581: operator() @ 0x2857CF4C 6. /-S/library/cpp/threading/future/core/future-inl.h:493: SetValue @ 0x2857CF4C 7. /-S/library/cpp/threading/future/async.h:24: operator() @ 0x2857CF4C 8. /-S/util/thread/pool.h:71: Process @ 0x2857CF4C 9. /-S/util/thread/pool.cpp:418: DoExecute @ 0x15F2B519 10. /-S/util/thread/factory.h:15: Execute @ 0x15F29F09 11. /-S/util/thread/factory.cpp:36: ThreadProc @ 0x15F29F09 12. /-S/util/system/thread.cpp:244: ThreadProxy @ 0x15F2537C 13. ??:0: ?? @ 0x7F5688602AC2 14. ??:0: ?? @ 0x7F568869484F >> TColumnShardTestReadWrite::WriteReadNoCompression [GOOD] >> YdbSdkSessionsPool::StressTestAsync/1 [GOOD] >> IntermediateDirsReboots::CreateWithIntermediateDirs ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/columnshard/ut_rw/unittest >> TColumnShardTestReadWrite::WriteReadNoCompression [GOOD] Test command err: 2025-05-29T15:31:39.169210Z node 1 :TX_COLUMNSHARD INFO: log.cpp:784: tablet_id=9437184;self_id=[1:139:2170];fline=columnshard.cpp:102;event=initialize_shard;step=OnActivateExecutor; 2025-05-29T15:31:39.172205Z node 1 :TX_COLUMNSHARD INFO: log.cpp:784: tablet_id=9437184;self_id=[1:139:2170];fline=columnshard.cpp:120;event=initialize_shard;step=initialize_tiring_finished; 2025-05-29T15:31:39.172267Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Execute at tablet 9437184 2025-05-29T15:31:39.172842Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=9437184;self_id=[1:139:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-05-29T15:31:39.172888Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=9437184;self_id=[1:139:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-05-29T15:31:39.172925Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=9437184;self_id=[1:139:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-05-29T15:31:39.172945Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=9437184;self_id=[1:139:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-05-29T15:31:39.172966Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=9437184;self_id=[1:139:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-05-29T15:31:39.172981Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=9437184;self_id=[1:139:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-05-29T15:31:39.172999Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=9437184;self_id=[1:139:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-05-29T15:31:39.173019Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=9437184;self_id=[1:139:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-05-29T15:31:39.173031Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=9437184;self_id=[1:139:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-05-29T15:31:39.173044Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=9437184;self_id=[1:139:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-05-29T15:31:39.173058Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=9437184;self_id=[1:139:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2025-05-29T15:31:39.173076Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=9437184;self_id=[1:139:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2025-05-29T15:31:39.178530Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Complete at tablet 9437184 2025-05-29T15:31:39.178580Z node 1 :TX_COLUMNSHARD INFO: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:137;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2025-05-29T15:31:39.178588Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-05-29T15:31:39.178617Z node 1 :TX_COLUMNSHARD INFO: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-05-29T15:31:39.178657Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-05-29T15:31:39.178666Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-05-29T15:31:39.178670Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-05-29T15:31:39.178677Z node 1 :TX_COLUMNSHARD INFO: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2025-05-29T15:31:39.178684Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-05-29T15:31:39.178689Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-05-29T15:31:39.178692Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-05-29T15:31:39.178705Z node 1 :TX_COLUMNSHARD INFO: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-05-29T15:31:39.178710Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-05-29T15:31:39.178715Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-05-29T15:31:39.178718Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-05-29T15:31:39.178725Z node 1 :TX_COLUMNSHARD INFO: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-05-29T15:31:39.178730Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-05-29T15:31:39.178751Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-05-29T15:31:39.178758Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=8;type=CleanInsertionDedup; 2025-05-29T15:31:39.178772Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-05-29T15:31:39.178779Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-05-29T15:31:39.178784Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-05-29T15:31:39.178791Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-05-29T15:31:39.178798Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-05-29T15:31:39.178801Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-05-29T15:31:39.178818Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-05-29T15:31:39.178825Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-05-29T15:31:39.178828Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2025-05-29T15:31:39.178856Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-05-29T15:31:39.178862Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-05-29T15:31:39.178866Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2025-05-29T15:31:39.178874Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-05-29T15:31:39.178879Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanDeprecatedSnapshot;id=CleanDeprecatedSnapshot; 2025-05-29T15:31:39.178882Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=17;type=CleanDeprecatedSnapshot; 2025-05-29T15:31:39.178887Z node 1 :TX_COLUMNSHARD CRIT: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_deprecated_snapshot.cpp:30;tasks_for_rewrite=0; 2025-05-29T15:31:39.178893Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanDeprecatedSnapshot;id=17; 2025-05-29T15:31:39.178898Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV0ChunksMeta;id=RestoreV0ChunksMeta; 2025-05-29T15:31:39.178901Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=18;type=RestoreV0ChunksMeta; 2025-05-29T15:31:39.178955Z node 1 :TX_COLUMNSHARD INFO: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=6; 2025-05-29T15:31:39.178962Z node 1 :TX_COLUMNSHARD INFO: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=4; 2025-05-29T15:31:39.178968Z node 1 :TX_COLUMNSHARD INFO: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute; ... g;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;;); 2025-05-29T15:31:41.725233Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:784: TEST_STEP=11;SelfId=[1:1015:2871];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;method=produce result;fline=plain_read_data.cpp:73;event=DoExtractReadyResults;result=0;count=0;finished=1; 2025-05-29T15:31:41.725241Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:784: TEST_STEP=11;SelfId=[1:1015:2871];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:230;stage=ready result;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;;);columns=10;rows=31; 2025-05-29T15:31:41.725248Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:784: TEST_STEP=11;SelfId=[1:1015:2871];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:250;stage=data_format;batch_size=1984;num_rows=31;batch_columns=timestamp,resource_type,resource_id,uid,level,message,json_payload,ingested_at,saved_at,request_id; 2025-05-29T15:31:41.725273Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:784: TEST_STEP=11;SelfId=[1:1015:2871];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:366;event=send_data;compute_actor_id=[1:1014:2870];bytes=1984;rows=31;faults=0;finished=0;fault=0;schema=timestamp: timestamp[us] resource_type: string resource_id: string uid: string level: int32 message: string json_payload: string ingested_at: timestamp[us] saved_at: timestamp[us] request_id: string; 2025-05-29T15:31:41.725281Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:784: TEST_STEP=11;SelfId=[1:1015:2871];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:270;stage=finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;;); 2025-05-29T15:31:41.725289Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:784: TEST_STEP=11;SelfId=[1:1015:2871];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:188;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;;); 2025-05-29T15:31:41.725295Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:784: TEST_STEP=11;SelfId=[1:1015:2871];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:193;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;;); 2025-05-29T15:31:41.725312Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:784: TEST_STEP=11;SelfId=[1:1015:2871];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=actor.cpp:105;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2025-05-29T15:31:41.725318Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:784: TEST_STEP=11;SelfId=[1:1015:2871];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:188;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;;); 2025-05-29T15:31:41.725325Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:784: TEST_STEP=11;SelfId=[1:1015:2871];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:193;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;;); 2025-05-29T15:31:41.725329Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: actor.cpp:410: Scan [1:1015:2871] finished for tablet 9437184 2025-05-29T15:31:41.725361Z node 1 :TX_COLUMNSHARD_SCAN INFO: log.cpp:784: TEST_STEP=11;SelfId=[1:1015:2871];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=actor.cpp:416;event=scan_finish;compute_actor_id=[1:1014:2870];stats={"p":[{"events":["f_bootstrap","l_bootstrap","f_processing","f_ProduceResults","f_task_result"],"t":0},{"events":["f_ack","l_ack","l_processing","l_ProduceResults","f_Finish","l_Finish","l_task_result"],"t":0.001}],"full":{"a":1748532701724177,"name":"_full_task","f":1748532701724177,"d_finished":0,"c":0,"l":1748532701725333,"d":1156},"events":[{"name":"bootstrap","f":1748532701724200,"d_finished":241,"c":1,"l":1748532701724441,"d":241},{"a":1748532701725310,"name":"ack","f":1748532701725213,"d_finished":84,"c":1,"l":1748532701725297,"d":107},{"a":1748532701725309,"name":"processing","f":1748532701724548,"d_finished":470,"c":10,"l":1748532701725297,"d":494},{"name":"ProduceResults","f":1748532701724339,"d_finished":228,"c":13,"l":1748532701725327,"d":228},{"a":1748532701725327,"name":"Finish","f":1748532701725327,"d_finished":0,"c":0,"l":1748532701725333,"d":6},{"name":"task_result","f":1748532701724550,"d_finished":376,"c":9,"l":1748532701725190,"d":376}],"id":"9437184::12"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;;); 2025-05-29T15:31:41.725366Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:784: TEST_STEP=11;SelfId=[1:1015:2871];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=actor.cpp:366;event=send_data;compute_actor_id=[1:1014:2870];bytes=0;rows=0;faults=0;finished=1;fault=0;schema=; 2025-05-29T15:31:41.725389Z node 1 :TX_COLUMNSHARD_SCAN INFO: log.cpp:784: TEST_STEP=11;SelfId=[1:1015:2871];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=actor.cpp:371;event=scan_finished;compute_actor_id=[1:1014:2870];packs_sum=0;bytes=0;bytes_sum=0;rows=0;rows_sum=0;faults=0;finished=1;fault=0;stats={"p":[{"events":["f_bootstrap","l_bootstrap","f_processing","f_ProduceResults","f_task_result"],"t":0},{"events":["f_ack","l_ack","l_processing","l_ProduceResults","f_Finish","l_Finish","l_task_result"],"t":0.001}],"full":{"a":1748532701724177,"name":"_full_task","f":1748532701724177,"d_finished":0,"c":0,"l":1748532701725370,"d":1193},"events":[{"name":"bootstrap","f":1748532701724200,"d_finished":241,"c":1,"l":1748532701724441,"d":241},{"a":1748532701725310,"name":"ack","f":1748532701725213,"d_finished":84,"c":1,"l":1748532701725297,"d":144},{"a":1748532701725309,"name":"processing","f":1748532701724548,"d_finished":470,"c":10,"l":1748532701725297,"d":531},{"name":"ProduceResults","f":1748532701724339,"d_finished":228,"c":13,"l":1748532701725327,"d":228},{"a":1748532701725327,"name":"Finish","f":1748532701725327,"d_finished":0,"c":0,"l":1748532701725370,"d":43},{"name":"task_result","f":1748532701724550,"d_finished":376,"c":9,"l":1748532701725190,"d":376}],"id":"9437184::12"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;;); 2025-05-29T15:31:41.725399Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:784: TEST_STEP=11;SelfId=[1:1015:2871];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=stats.cpp:8;event=statistic;begin=2025-05-29T15:31:41.724093Z;index_granules=0;index_portions=1;index_batches=1;committed_batches=0;schema_columns=10;filter_columns=0;additional_columns=0;compacted_portions_bytes=0;inserted_portions_bytes=8392;committed_portions_bytes=0;data_filter_bytes=0;data_additional_bytes=0;delta_bytes=8392;selected_rows=0; 2025-05-29T15:31:41.725402Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:784: TEST_STEP=11;SelfId=[1:1015:2871];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=read_context.h:190;event=scan_aborted;reason=unexpected on destructor; 2025-05-29T15:31:41.725423Z node 1 :TX_COLUMNSHARD_SCAN INFO: log.cpp:784: TEST_STEP=11;SelfId=[1:1015:2871];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=context.h:81;fetching=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;; >> KqpBatchDelete::SimpleOnePartition >> IntermediateDirsReboots::CreateSubDomainWithIntermediateDirs |75.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/statistics/service/ut/unittest |75.4%| [TA] $(B)/ydb/services/persqueue_v1/ut/test-results/unittest/{meta.json ... results_accumulator.log} |75.4%| [TA] $(B)/ydb/core/kqp/ut/effects/test-results/unittest/{meta.json ... results_accumulator.log} |75.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_reboots/unittest >> TConsistentOpsWithReboots::CreateIndexedTableWithReboots >> IntermediateDirsReboots::Fake [GOOD] >> IntermediateDirsReboots::CreateTableWithIntermediateDirsAndRejectInSolomon |75.4%| [TA] {RESULT} $(B)/ydb/services/persqueue_v1/ut/test-results/unittest/{meta.json ... results_accumulator.log} |75.4%| [TA] {RESULT} $(B)/ydb/core/kqp/ut/effects/test-results/unittest/{meta.json ... results_accumulator.log} |75.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/statistics/service/ut/unittest >> TSolomonReboots::AdoptDropSolomonWithReboots >> KqpOlapAggregations::Aggregation_Avg_NullGroupBy [GOOD] |75.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_reboots/unittest |75.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_reboots/unittest >> IntermediateDirsReboots::Fake [GOOD] >> IntermediateDirsReboots::CreateDirWithIntermediateDirsForceDrop ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/olap/unittest >> KqpOlapAggregations::Aggregation_Avg_NullGroupBy [GOOD] Test command err: Trying to start YDB, gRPC: 3181, MsgBus: 9785 2025-05-29T15:31:38.650289Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509890757134789236:2064];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:31:38.650313Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/00263c/r3tmp/tmpU3SPR1/pdisk_1.dat 2025-05-29T15:31:38.706880Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [1:7509890757134789213:2079] 1748532698650119 != 1748532698650122 2025-05-29T15:31:38.708162Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 3181, node 1 2025-05-29T15:31:38.720980Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:31:38.720994Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-05-29T15:31:38.720996Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-05-29T15:31:38.721039Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:9785 TClient is connected to server localhost:9785 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-29T15:31:38.780543Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:31:38.780574Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:31:38.781565Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-29T15:31:38.787363Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:31:38.793816Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 Status: 53 TxId: 281474976715658 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 2 2025-05-29T15:31:38.804223Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037890;self_id=[1:7509890757134789915:2314];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-05-29T15:31:38.804288Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037890;self_id=[1:7509890757134789915:2314];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-05-29T15:31:38.804329Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037890;self_id=[1:7509890757134789915:2314];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-05-29T15:31:38.804347Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037890;self_id=[1:7509890757134789915:2314];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-05-29T15:31:38.804372Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037890;self_id=[1:7509890757134789915:2314];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-05-29T15:31:38.804390Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037890;self_id=[1:7509890757134789915:2314];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-05-29T15:31:38.804408Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037890;self_id=[1:7509890757134789915:2314];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-05-29T15:31:38.804429Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037890;self_id=[1:7509890757134789915:2314];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-05-29T15:31:38.804442Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037890;self_id=[1:7509890757134789915:2314];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-05-29T15:31:38.804457Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037890;self_id=[1:7509890757134789915:2314];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-05-29T15:31:38.804468Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037890;self_id=[1:7509890757134789915:2314];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2025-05-29T15:31:38.804488Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037890;self_id=[1:7509890757134789915:2314];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2025-05-29T15:31:38.804895Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-05-29T15:31:38.804908Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-05-29T15:31:38.804920Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-05-29T15:31:38.804923Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-05-29T15:31:38.804934Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-05-29T15:31:38.804937Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-05-29T15:31:38.804943Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-05-29T15:31:38.804945Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-05-29T15:31:38.804953Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-05-29T15:31:38.804956Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-05-29T15:31:38.804960Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-05-29T15:31:38.804967Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-05-29T15:31:38.804981Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-05-29T15:31:38.804990Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-05-29T15:31:38.805007Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-05-29T15:31:38.805012Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-05-29T15:31:38.805020Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-05-29T15:31:38.805025Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanDeprecatedSnapshot;id=CleanDeprecatedSnapshot; 2025-05-29T15:31:38.805030Z node 1 :TX_COLUMNSHARD CRIT: log.cpp:784: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=clean_deprecated_snapshot.cpp:30;tasks_for_rewrite=0; 2025-05-29T15:31:38.805037Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanDeprecatedSnapshot;id=17; 2025-05-29T15:31:38.805039Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV0ChunksMeta;id=RestoreV0Chu ... 4: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-05-29T15:31:39.669036Z node 2 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-05-29T15:31:39.669042Z node 2 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-05-29T15:31:39.669048Z node 2 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-05-29T15:31:39.669050Z node 2 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanDeprecatedSnapshot;id=CleanDeprecatedSnapshot; 2025-05-29T15:31:39.669054Z node 2 :TX_COLUMNSHARD CRIT: log.cpp:784: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=clean_deprecated_snapshot.cpp:30;tasks_for_rewrite=0; 2025-05-29T15:31:39.669058Z node 2 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanDeprecatedSnapshot;id=17; 2025-05-29T15:31:39.669061Z node 2 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV0ChunksMeta;id=RestoreV0ChunksMeta; 2025-05-29T15:31:39.669087Z node 2 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV0ChunksMeta;id=18; 2025-05-29T15:31:39.669092Z node 2 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-05-29T15:31:39.706600Z node 2 :TX_COLUMNSHARD_TX WARN: log.cpp:784: tablet_id=72075186224037889;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715658;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715658; 2025-05-29T15:31:39.707552Z node 2 :TX_COLUMNSHARD_TX WARN: log.cpp:784: tablet_id=72075186224037891;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715658;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715658; 2025-05-29T15:31:39.708451Z node 2 :TX_COLUMNSHARD_TX WARN: log.cpp:784: tablet_id=72075186224037888;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715658;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715658; 2025-05-29T15:31:39.709311Z node 2 :TX_COLUMNSHARD_TX WARN: log.cpp:784: tablet_id=72075186224037890;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715658;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715658; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=1448;columns=6; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=1448;columns=6; REQUEST: --!syntax_v1 PRAGMA Kikimr.OptUseFinalizeByKey; SELECT id, AVG(level) FROM `/Root/tableWithNulls` WHERE id BETWEEN 6 AND 7 GROUP BY id ORDER BY id; 2025-05-29T15:31:39.816487Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7509890759285168215:2366], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:31:39.816507Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7509890759285168189:2363], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:31:39.816528Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:31:39.817271Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480 2025-05-29T15:31:39.819133Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7509890759285168218:2367], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2025-05-29T15:31:39.913944Z node 2 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [2:7509890759285168269:2451] txid# 281474976715660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-29T15:31:42.487335Z node 2 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:238: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1748532700000, txId: 18446744073709551615] shutting down REQUEST: --!syntax_v1 PRAGMA Kikimr.OptUseFinalizeByKey; SELECT id, AVG(level) FROM `/Root/tableWithNulls` WHERE id BETWEEN 6 AND 7 GROUP BY id ORDER BY id; JSON Plan: {"Plan":{"Plans":[{"PlanNodeId":6,"Plans":[{"PlanNodeId":5,"Plans":[{"PlanNodeId":4,"Plans":[{"PlanNodeId":3,"Plans":[{"PlanNodeId":2,"Plans":[{"Tables":["tableWithNulls"],"PlanNodeId":1,"Operators":[{"Scan":"Parallel","E-Size":"No estimate","ReadRanges":["id [6, 7]"],"Name":"TableRangeScan","Inputs":[],"Path":"\/Root\/tableWithNulls","E-Rows":"No estimate","Table":"tableWithNulls","ReadRangesKeys":["id"],"ReadColumns":["id","level"],"SsaProgram":{"Command":[{"GroupBy":{"Aggregates":[{"Function":{"Id":5,"Arguments":[{"Id":3}]},"Column":{"Id":7}},{"Function":{"Id":2,"Arguments":[{"Id":3}]},"Column":{"Id":8}}],"KeyColumns":[{"Id":1}]}},{"Projection":{"Columns":[{"Id":8},{"Id":7},{"Id":1}]}}]},"E-Cost":"No estimate","ReadRangesExpectedSize":1}],"Node Type":"TableRangeScan"}],"Node Type":"HashShuffle","KeyColumns":["id"],"PlanNodeType":"Connection"}],"Operators":[{"Inputs":[{"InternalOperatorId":1}],"SortBy":"row.id","Name":"Sort"},{"Inputs":[{"ExternalPlanNodeId":2}],"Name":"Aggregate","Phase":"Final"}],"Node Type":"Sort-Aggregate"}],"Node Type":"Merge","SortColumns":["id (Asc)"],"PlanNodeType":"Connection"}],"Node Type":"Stage"}],"Node Type":"ResultSet_1","PlanNodeType":"ResultSet"}],"Node Type":"Query","Stats":{"ResourcePoolId":"default"},"PlanNodeType":"Query"},"meta":{"version":"0.2","type":"query"},"tables":[{"name":"\/Root\/tableWithNulls","reads":[{"columns":["id","level"],"scan_by":["id [6, 7]"],"type":"Scan"}]}],"SimplifiedPlan":{"PlanNodeId":0,"Plans":[{"PlanNodeId":1,"Plans":[{"PlanNodeId":4,"Plans":[{"PlanNodeId":5,"Plans":[{"PlanNodeId":6,"Plans":[{"PlanNodeId":7,"Operators":[{"Scan":"Parallel","E-Size":"No estimate","ReadRanges":["id [6, 7]"],"Name":"TableRangeScan","Path":"\/Root\/tableWithNulls","E-Rows":"No estimate","Table":"tableWithNulls","ReadRangesKeys":["id"],"ReadColumns":["id","level"],"SsaProgram":{"Command":[{"GroupBy":{"Aggregates":[{"Function":{"Id":5,"Arguments":[{"Id":3}]},"Column":{"Id":7}},{"Function":{"Id":2,"Arguments":[{"Id":3}]},"Column":{"Id":8}}],"KeyColumns":[{"Id":1}]}},{"Projection":{"Columns":[{"Id":8},{"Id":7},{"Id":1}]}}]},"E-Cost":"No estimate","ReadRangesExpectedSize":1}],"Node Type":"TableRangeScan"}],"Node Type":"HashShuffle (KeyColumns: [\"id\"])","PlanNodeType":"Connection"}],"Operators":[{"Name":"Aggregate","Phase":"Final"}],"Node Type":"Aggregate"}],"Operators":[{"SortBy":"row.id","Name":"Sort"}],"Node Type":"Sort"}],"Node Type":"ResultSet_1","PlanNodeType":"ResultSet"}],"Node Type":"Query","OptimizerStats":{"EquiJoinsCount":0,"JoinsCount":0},"PlanNodeType":"Query"}} AST: ( (declare %kqp%tx_result_binding_0_0 (TupleType (ListType (TupleType (TupleType (OptionalType (DataType 'Int32)) (DataType 'Int32)) (TupleType (OptionalType (DataType 'Int32)) (DataType 'Int32)))))) (let $1 (DataType 'Int32)) (let $2 (OptionalType $1)) (let $3 '('('"_logical_id" '1795) '('"_id" '"a05551b1-ea312616-8d9cf7ef-4d95cae6") '('"_partition_mode" '"single"))) (let $4 (DqPhyStage '() (lambda '() (block '( (let $26 (Int32 '1)) (let $27 '((Nothing $2) (Int32 '0))) (return (ToStream (Just '((RangeFinalize (RangeMultiply (Uint64 '10000) (RangeUnion (RangeIntersect (RangeCreate (AsList '('((Just (Int32 '"6")) $26) $27))) (RangeCreate (AsList '($27 '((Just (Int32 '"7")) $26)))))))))))) ))) $3)) (let $5 (DqCnValue (TDqOutput $4 '0))) (let $6 (KqpPhysicalTx '($4) '($5) '() '('('"type" '"compute")))) (let $7 '"%kqp%tx_result_binding_0_0") (let $8 (TupleType $2 $1)) (let $9 (TupleType (ListType (TupleType $8 $8)))) (let $10 (DataType 'Double)) (let $11 (OptionalType (TupleType $10 (DataType 'Uint64)))) (let $12 '('"id" $1)) (let $13 '('('"_logical_id" '1854) '('"_id" '"760dcbec-541457bf-5fa630ec-fb1ed8eb") '('"_wide_channels" (StructType '('_yql_agg_0 $11) $12)))) (let $14 (DqPhyStage '() (lambda '() (block '( (let $28 (KqpTable '"/Root/tableWithNulls" '"72057594046644480:2" '"" '1)) (let $29 '('"id")) (let $30 '('('"UsedKeyColumns" $29) '('"ExpectedMaxRanges" '1) '('"PointPrefixLen" '0))) (let $31 (KqpWideReadOlapTableRanges $28 %kqp%tx_result_binding_0_0 '('"id" '"level") '() $30 (lambda '($33) (block '( (let $34 '('"_yql_agg_0_sum" '"sum" '"level")) (let $35 '('"_yql_agg_0_cnt" '"count" '"level")) (return (TKqpOlapAgg $33 '($34 $35) $29)) ))))) (let $32 (lambda '($36 $37 $38) (block '( (let $39 (IfPresent $37 (lambda '($40) (Just '((Convert $40 'Double) $36))) (Nothing $11))) (return $39 $38) )))) (return (FromFlow (WideMap $31 $32))) ))) $13)) (let $15 (DqCnHashShuffle (TDqOutput $14 '0) '('1))) (let $16 (OptionalType $10)) (let $17 (StructType '('"column1" $16) $12)) (let $18 '('('"_logical_id" '3442) '('"_id" '"e35fda37-ad44ec54-6e529032-fbfb1e2") '('"_wide_channels" $17))) (let $19 (DqPhyStage '($15) (lambda '($41) (block '( (let $42 (lambda '($55 $56) (block '( (let $57 (IfPresent $56 (lambda '($58) (Just (Div (Nth $58 '0) (Nth $58 '1)))) (Nothing $16))) (return $57 $55) )))) (let $43 (WideCombiner (ToFlow $41) '"" (lambda '($44 $45) $45) (lambda '($46 $47 $48) $47) (lambda '($49 $50 $51 $52) (IfPresent $50 (lambda '($53) (IfPresent $52 (lambda '($54) (Just '((AggrAdd (Nth $53 '0) (Nth $54 '0)) (AggrAdd (Nth $53 '1) (Nth $54 '1))))) $50)) $52)) $42)) (return (FromFlow (WideSort $43 '('('1 (Bool 'true)))))) ))) $18)) (let $20 (DqCnMerge (TDqOutput $19 '0) '('('1 '"Asc")))) (let $21 (DqPhyStage '($20) (lambda '($59) (FromFlow (NarrowMap (ToFlow $59) (lambda '($60 $61) (AsStruct '('"column1" $60) '('"id" $61)))))) '('('"_logical_id" '3454) '('"_id" '"b10958f9-4006772e-69e41f92-22541cb2")))) (let $22 '($14 $19 $21)) (let $23 (DqCnResult (TDqOutput $21 '0) '('"id" '"column1"))) (let $24 (KqpTxResultBinding $9 '0 '0)) (let $25 (KqpPhysicalTx $22 '($23) '('($7 $24)) '('('"type" '"scan")))) (return (KqpPhysicalQuery '($6 $25) '((KqpTxResultBinding (ListType $17) '1 '0)) '('('"type" '"scan_query")))) ) |75.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_reboots/unittest >> TSolomonReboots::CreateDropSolomonWithReboots ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/statistics/service/ut/unittest >> BasicStatistics::TwoServerlessDbs [FAIL] Test command err: 2025-05-29T15:31:34.113251Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:453:2413], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-05-29T15:31:34.113291Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-05-29T15:31:34.113307Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/001511/r3tmp/tmp6OPUgf/pdisk_1.dat 2025-05-29T15:31:34.205254Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 26284, node 1 2025-05-29T15:31:34.311877Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:31:34.311897Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-05-29T15:31:34.311902Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-05-29T15:31:34.311958Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-05-29T15:31:34.312599Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-05-29T15:31:34.389115Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:31:34.389144Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:31:34.400763Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:13876 2025-05-29T15:31:34.732069Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-05-29T15:31:35.520121Z node 2 :STATISTICS INFO: service_impl.cpp:232: Subscribed for config changes on node 2 2025-05-29T15:31:35.528827Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:31:35.528865Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:31:35.583025Z node 1 :HIVE WARN: hive_impl.cpp:771: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-05-29T15:31:35.583593Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-29T15:31:35.726465Z node 2 :HIVE WARN: tx__create_tablet.cpp:342: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-05-29T15:31:35.726624Z node 2 :HIVE WARN: tx__create_tablet.cpp:342: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-05-29T15:31:35.726794Z node 2 :HIVE WARN: tx__create_tablet.cpp:342: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-05-29T15:31:35.726822Z node 2 :HIVE WARN: tx__create_tablet.cpp:342: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-05-29T15:31:35.726834Z node 2 :HIVE WARN: tx__create_tablet.cpp:342: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-05-29T15:31:35.726870Z node 2 :HIVE WARN: tx__create_tablet.cpp:342: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-05-29T15:31:35.726883Z node 2 :HIVE WARN: tx__create_tablet.cpp:342: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-05-29T15:31:35.726896Z node 2 :HIVE WARN: tx__create_tablet.cpp:342: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-05-29T15:31:35.726920Z node 2 :HIVE WARN: tx__create_tablet.cpp:342: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-05-29T15:31:35.879508Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:31:35.879549Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:31:35.891122Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-29T15:31:35.923354Z node 2 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:31:35.929919Z node 2 :STATISTICS INFO: aggregator_impl.cpp:45: [72075186224037894] OnActivateExecutor 2025-05-29T15:31:35.929949Z node 2 :STATISTICS DEBUG: tx_init_schema.cpp:13: [72075186224037894] TTxInitSchema::Execute 2025-05-29T15:31:35.936172Z node 2 :STATISTICS DEBUG: tx_init_schema.cpp:34: [72075186224037894] TTxInitSchema::Complete 2025-05-29T15:31:35.936370Z node 2 :STATISTICS DEBUG: tx_init.cpp:18: [72075186224037894] TTxInit::Execute 2025-05-29T15:31:35.936406Z node 2 :STATISTICS DEBUG: tx_init.cpp:118: [72075186224037894] Loaded BaseStatistics: schemeshard count# 0 2025-05-29T15:31:35.936411Z node 2 :STATISTICS DEBUG: tx_init.cpp:143: [72075186224037894] Loaded ColumnStatistics: column count# 0 2025-05-29T15:31:35.936415Z node 2 :STATISTICS DEBUG: tx_init.cpp:182: [72075186224037894] Loaded ScheduleTraversals: table count# 0 2025-05-29T15:31:35.936420Z node 2 :STATISTICS DEBUG: tx_init.cpp:216: [72075186224037894] Loaded ForceTraversalOperations: table count# 0 2025-05-29T15:31:35.936423Z node 2 :STATISTICS DEBUG: tx_init.cpp:264: [72075186224037894] Loaded ForceTraversalTables: table count# 0 2025-05-29T15:31:35.936428Z node 2 :STATISTICS DEBUG: tx_init.cpp:271: [72075186224037894] TTxInit::Complete 2025-05-29T15:31:35.936570Z node 2 :STATISTICS INFO: aggregator_impl.cpp:62: [72075186224037894] Subscribed for config changes 2025-05-29T15:31:35.953008Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:7864: ResolveSA(), StatisticsAggregatorId=72075186224037894, at schemeshard: 72075186224037897 2025-05-29T15:31:35.953034Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:7894: ConnectToSA(), pipe client id: [2:1863:2597], at schemeshard: 72075186224037897, StatisticsAggregatorId: 72075186224037894, at schemeshard: 72075186224037897 2025-05-29T15:31:35.954032Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:86: [72075186224037894] EvServerConnected, pipe server id = [2:1874:2605] 2025-05-29T15:31:35.955113Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:86: [72075186224037894] EvServerConnected, pipe server id = [2:1905:2622] 2025-05-29T15:31:35.955344Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:213: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:1905:2622], schemeshard id = 72075186224037897 2025-05-29T15:31:35.955876Z node 2 :STATISTICS DEBUG: tx_configure.cpp:21: [72075186224037894] TTxConfigure::Execute: database# /Root/Shared 2025-05-29T15:31:35.959269Z node 2 :STATISTICS DEBUG: table_creator.cpp:147: Table _statistics updater. Describe result: PathErrorUnknown 2025-05-29T15:31:35.959281Z node 2 :STATISTICS NOTICE: table_creator.cpp:167: Table _statistics updater. Creating table 2025-05-29T15:31:35.959291Z node 2 :STATISTICS DEBUG: table_creator.cpp:100: Table _statistics updater. Full table path:/Root/Shared/.metadata/_statistics 2025-05-29T15:31:35.962165Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720657:1, at schemeshard: 72075186224037897 2025-05-29T15:31:35.964466Z node 2 :STATISTICS DEBUG: table_creator.cpp:190: Table _statistics updater. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720657 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037897 PathId: 3 } 2025-05-29T15:31:35.964498Z node 2 :STATISTICS DEBUG: table_creator.cpp:261: Table _statistics updater. Subscribe on create table tx: 281474976720657 2025-05-29T15:31:36.055582Z node 2 :STATISTICS DEBUG: tx_configure.cpp:36: [72075186224037894] TTxConfigure::Complete 2025-05-29T15:31:36.135788Z node 2 :STATISTICS DEBUG: table_creator.cpp:290: Table _statistics updater. Request: create. Transaction completed: 281474976720657. Doublechecking... 2025-05-29T15:31:36.199535Z node 2 :STATISTICS DEBUG: table_creator.cpp:362: Table _statistics updater. Column diff is empty, finishing 2025-05-29T15:31:36.674786Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715659:0, at schemeshard: 72057594046644480 2025-05-29T15:31:37.101522Z node 2 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:31:37.209484Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:7809: Handle TEvTxProxySchemeCache::TEvNavigateKeySetResult, at schemeshard: 72075186224037899 2025-05-29T15:31:37.209507Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:7825: Handle TEvTxProxySchemeCache::TEvNavigateKeySetResult, StatisticsAggregatorId=72075186224037894, at schemeshard: 72075186224037899 2025-05-29T15:31:37.209517Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:7894: ConnectToSA(), pipe client id: [2:2564:2936], at schemeshard: 72075186224037899, StatisticsAggregatorId: 72075186224037894, at schemeshard: 72075186224037899 2025-05-29T15:31:37.209657Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:86: [72075186224037894] EvServerConnected, pipe server id = [2:2566:2938] 2025-05-29T15:31:37.209714Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:213: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:2566:2938], schemeshard id = 72075186224037899 2025-05-29T15:31:37.943115Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715661:0, at schemeshard: 72057594046644480 2025-05-29T15:31:38.287715Z node 2 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:31:38.445673Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:7809: Handle TEvTxProxySchemeCache::TEvNavigateKeySetResult, at schemeshard: 72075186224037905 2025-05-29T15:31:38.445696Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:7825: Handle TEvTxProxySchemeCache::TEvNavigateKeySetResult, StatisticsAggregatorId=72075186224037894, at schemeshard: 72075186224037905 2025-05-29T15:31:38.445715Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:7894: ConnectToSA(), pipe client id: [2:3043:3134], at schemeshard: 72075186224037905, StatisticsAggregatorId: 72075186224037894, at schemeshard: 72075186224037905 2025-05-29T15:31:38.446142Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:86: [72075186224037894] EvServerConnected, pipe server id = [2:3045:3135] 2025-05-29T15:31:38.446215Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:213: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:3045:3135], schemeshard id = 72075186224037905 2025-05-29T15:31:39.254662Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:3165:3392], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:31:39.254705Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:31:39.258718Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72075186224037899 2025-05-29T15:31:39.326912Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:3317:3428], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:31:39.326954Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:31:39.335092Z node 1 :STATISTICS DEBUG: service_impl.cpp:771: [TStatService::TEvGetStatistics] RequestId[ 1 ], ReplyToActorId[ [1:3322:3432]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-05-29T15:31:39.335150Z node 1 :STATISTICS DEBUG: service_impl.cpp:787: [TStatService::TEvNavigateKeySetResult] RequestId[ 1 ] 2025-05-29T15:31:39.335188Z node 1 :STATISTICS DEBUG: service_impl.cpp:787: [TStatService::TEvNavigateKeySetResult] RequestId[ 18446744073709551615 ] 2025-05-29T15:31:39.335197Z node 1 :STATISTICS DEBUG: service_impl.cpp:1219: ConnectToSA(), pipe client id = [1:3325:3435] 2025-05-29T15:31:39.335209Z node 1 :STATISTICS DEBUG: service_impl.cpp:1248: SyncNode(), pipe client id = [1:3325:3435] 2025-05-29T15:31:39.335397Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:86: [72075186224037894] EvServerConnected, pipe server id = [2:3326:3262] 2025-05-29T15:31:39.335485Z node 1 :STATISTICS DEBUG: service_impl.cpp:1086: EvClientConnected, node id = 1, client id = [1:3325:3435], server id = [2:3326:3262], tablet id = 72075186224037894, status = OK 2025-05-29T15:31:39.335560Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:133: [72075186224037894] EvConnectNode, pipe server id = [2:3326:3262], node id = 1, have schemeshards count = 0, need schemeshards count = 1 2025-05-29T15:31:39.335578Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:314: [72075186224037894] SendStatisticsToNode(), node id = 1, schemeshard count = 1 2025-05-29T15:31:39.335631Z node 1 :STATISTICS DEBUG: service_impl.cpp:937: EvPropagateStatistics, node id = 1 2025-05-29T15:31:39.335641Z node 1 :STATISTICS DEBUG: service_impl.cpp:1260: ReplySuccess(), request id = 1, ReplyToActorId = [1:3322:3432], StatRequests.size() = 1 2025-05-29T15:31:39.339332Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:3330:3439], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:31:39.339375Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:31:39.339486Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:3335:3444], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:31:39.341066Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715664:3, at schemeshard: 72057594046644480 2025-05-29T15:31:39.570922Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:217: [72075186224037894] EvFastPropagateCheck 2025-05-29T15:31:39.570954Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:357: [72075186224037894] PropagateFastStatistics(), node count = 0, schemeshard count = 0 2025-05-29T15:31:39.633315Z node 1 :STATISTICS DEBUG: service_impl.cpp:1189: EvRequestTimeout, pipe client id = [1:3325:3435], schemeshard count = 1 2025-05-29T15:31:39.867864Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:3337:3446], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715664 completed, doublechecking } 2025-05-29T15:31:39.988547Z node 1 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [1:3446:3517] txid# 281474976715665, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 8], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-29T15:31:39.990614Z node 1 :STATISTICS DEBUG: service_impl.cpp:771: [TStatService::TEvGetStatistics] RequestId[ 2 ], ReplyToActorId[ [1:3469:3533]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-05-29T15:31:39.990655Z node 1 :STATISTICS DEBUG: service_impl.cpp:787: [TStatService::TEvNavigateKeySetResult] RequestId[ 2 ] 2025-05-29T15:31:39.990660Z node 1 :STATISTICS DEBUG: service_impl.cpp:1260: ReplySuccess(), request id = 2, ReplyToActorId = [1:3469:3533], StatRequests.size() = 1 2025-05-29T15:31:39.996288Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [1:3458:3528], status: INTERNAL_ERROR, issues:
: Fatal: Default error
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:31:39.996811Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=1&id=YjQ3MmYyMi1lZWExMDU1Yi1iZWIzYzJjYi01NjYyNTgyMQ==, ActorId: [1:3328:3437], ActorState: ExecuteState, TraceId: 01jweaq85y3fve3ssqrc51p386, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: assertion failed at ydb/core/statistics/ut_common/ut_common.cpp:237, Ydb::StatusIds::StatusCode NKikimr::NStat::ExecuteYqlScript(TTestEnv &, const TString &, bool): (response.operation().status() == Ydb::StatusIds::SUCCESS) failed: (INTERNAL_ERROR != SUCCESS) , with diff: (INT|SUCC)E(RNAL_ERROR|SS) TBackTrace::Capture()+28 (0x137F904C) NUnitTest::NPrivate::RaiseError(char const*, TBasicString> const&, bool)+137 (0x139AC3D9) NKikimr::NStat::ExecuteYqlScript(NKikimr::NStat::TTestEnv&, TBasicString> const&, bool)+1783 (0x25EAECD7) ??+0 (0x136E9066) ??+0 (0x136DDB6A) NKikimr::NStat::NTestSuiteBasicStatistics::TTestCaseTwoServerlessDbs::Execute_(NUnitTest::TTestContext&)+1038 (0x136E010E) NKikimr::NStat::NTestSuiteBasicStatistics::TCurrentTest::Execute()::'lambda'()::operator()() const+71 (0x136E84F7) NUnitTest::TTestBase::Run(std::__y1::function, TBasicString> const&, char const*, bool)+126 (0x139AE28E) NKikimr::NStat::NTestSuiteBasicStatistics::TCurrentTest::Execute()+436 (0x136E7D54) NUnitTest::TTestFactory::Execute()+803 (0x139AEA03) NUnitTest::RunMain(int, char**)+3021 (0x139BCD1D) ??+0 (0x7F4BF2890D90) __libc_start_main+128 (0x7F4BF2890E40) _start+41 (0x1283B029) ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/statistics/service/ut/unittest >> BasicStatistics::TwoTables [FAIL] Test command err: 2025-05-29T15:31:36.141124Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:453:2413], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-05-29T15:31:36.141166Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-05-29T15:31:36.141183Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/001503/r3tmp/tmpuN9Au1/pdisk_1.dat 2025-05-29T15:31:36.229300Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 31785, node 1 2025-05-29T15:31:36.340509Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:31:36.340527Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-05-29T15:31:36.340531Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-05-29T15:31:36.340625Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-05-29T15:31:36.341118Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-05-29T15:31:36.416768Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:31:36.416804Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:31:36.428919Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:7986 2025-05-29T15:31:36.763386Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-05-29T15:31:37.518011Z node 2 :STATISTICS INFO: service_impl.cpp:232: Subscribed for config changes on node 2 2025-05-29T15:31:37.524535Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:31:37.524565Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:31:37.567540Z node 1 :HIVE WARN: hive_impl.cpp:771: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-05-29T15:31:37.568180Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-29T15:31:37.710863Z node 2 :HIVE WARN: tx__create_tablet.cpp:342: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-05-29T15:31:37.711025Z node 2 :HIVE WARN: tx__create_tablet.cpp:342: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-05-29T15:31:37.711154Z node 2 :HIVE WARN: tx__create_tablet.cpp:342: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-05-29T15:31:37.711179Z node 2 :HIVE WARN: tx__create_tablet.cpp:342: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-05-29T15:31:37.711191Z node 2 :HIVE WARN: tx__create_tablet.cpp:342: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-05-29T15:31:37.711227Z node 2 :HIVE WARN: tx__create_tablet.cpp:342: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-05-29T15:31:37.711238Z node 2 :HIVE WARN: tx__create_tablet.cpp:342: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-05-29T15:31:37.711250Z node 2 :HIVE WARN: tx__create_tablet.cpp:342: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-05-29T15:31:37.711267Z node 2 :HIVE WARN: tx__create_tablet.cpp:342: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-05-29T15:31:37.861513Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:31:37.861551Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:31:37.872906Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-29T15:31:37.907257Z node 2 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:31:37.915373Z node 2 :STATISTICS INFO: aggregator_impl.cpp:45: [72075186224037894] OnActivateExecutor 2025-05-29T15:31:37.915403Z node 2 :STATISTICS DEBUG: tx_init_schema.cpp:13: [72075186224037894] TTxInitSchema::Execute 2025-05-29T15:31:37.921953Z node 2 :STATISTICS DEBUG: tx_init_schema.cpp:34: [72075186224037894] TTxInitSchema::Complete 2025-05-29T15:31:37.922018Z node 2 :STATISTICS DEBUG: tx_init.cpp:18: [72075186224037894] TTxInit::Execute 2025-05-29T15:31:37.922045Z node 2 :STATISTICS DEBUG: tx_init.cpp:118: [72075186224037894] Loaded BaseStatistics: schemeshard count# 0 2025-05-29T15:31:37.922052Z node 2 :STATISTICS DEBUG: tx_init.cpp:143: [72075186224037894] Loaded ColumnStatistics: column count# 0 2025-05-29T15:31:37.922059Z node 2 :STATISTICS DEBUG: tx_init.cpp:182: [72075186224037894] Loaded ScheduleTraversals: table count# 0 2025-05-29T15:31:37.922065Z node 2 :STATISTICS DEBUG: tx_init.cpp:216: [72075186224037894] Loaded ForceTraversalOperations: table count# 0 2025-05-29T15:31:37.922071Z node 2 :STATISTICS DEBUG: tx_init.cpp:264: [72075186224037894] Loaded ForceTraversalTables: table count# 0 2025-05-29T15:31:37.922079Z node 2 :STATISTICS DEBUG: tx_init.cpp:271: [72075186224037894] TTxInit::Complete 2025-05-29T15:31:37.922211Z node 2 :STATISTICS INFO: aggregator_impl.cpp:62: [72075186224037894] Subscribed for config changes 2025-05-29T15:31:37.935820Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:7864: ResolveSA(), StatisticsAggregatorId=72075186224037894, at schemeshard: 72075186224037897 2025-05-29T15:31:37.935854Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:7894: ConnectToSA(), pipe client id: [2:1859:2595], at schemeshard: 72075186224037897, StatisticsAggregatorId: 72075186224037894, at schemeshard: 72075186224037897 2025-05-29T15:31:37.937099Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:86: [72075186224037894] EvServerConnected, pipe server id = [2:1871:2604] 2025-05-29T15:31:37.937882Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:86: [72075186224037894] EvServerConnected, pipe server id = [2:1889:2614] 2025-05-29T15:31:37.937973Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:213: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:1889:2614], schemeshard id = 72075186224037897 2025-05-29T15:31:37.939940Z node 2 :STATISTICS DEBUG: tx_configure.cpp:21: [72075186224037894] TTxConfigure::Execute: database# /Root/Database 2025-05-29T15:31:37.944990Z node 2 :STATISTICS DEBUG: table_creator.cpp:147: Table _statistics updater. Describe result: PathErrorUnknown 2025-05-29T15:31:37.945008Z node 2 :STATISTICS NOTICE: table_creator.cpp:167: Table _statistics updater. Creating table 2025-05-29T15:31:37.945020Z node 2 :STATISTICS DEBUG: table_creator.cpp:100: Table _statistics updater. Full table path:/Root/Database/.metadata/_statistics 2025-05-29T15:31:37.950718Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720657:1, at schemeshard: 72075186224037897 2025-05-29T15:31:37.953201Z node 2 :STATISTICS DEBUG: table_creator.cpp:190: Table _statistics updater. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720657 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037897 PathId: 3 } 2025-05-29T15:31:37.953244Z node 2 :STATISTICS DEBUG: table_creator.cpp:261: Table _statistics updater. Subscribe on create table tx: 281474976720657 2025-05-29T15:31:38.046878Z node 2 :STATISTICS DEBUG: tx_configure.cpp:36: [72075186224037894] TTxConfigure::Complete 2025-05-29T15:31:38.126061Z node 2 :STATISTICS DEBUG: table_creator.cpp:290: Table _statistics updater. Request: create. Transaction completed: 281474976720657. Doublechecking... 2025-05-29T15:31:38.199625Z node 2 :STATISTICS DEBUG: table_creator.cpp:362: Table _statistics updater. Column diff is empty, finishing 2025-05-29T15:31:38.852017Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2212:3058], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:31:38.852050Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:31:38.854983Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72075186224037897 2025-05-29T15:31:38.966098Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2349:3092], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:31:38.966154Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:31:38.966637Z node 1 :STATISTICS DEBUG: service_impl.cpp:771: [TStatService::TEvGetStatistics] RequestId[ 1 ], ReplyToActorId[ [1:2354:3096]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-05-29T15:31:38.966671Z node 1 :STATISTICS DEBUG: service_impl.cpp:787: [TStatService::TEvNavigateKeySetResult] RequestId[ 1 ] 2025-05-29T15:31:38.966681Z node 1 :STATISTICS DEBUG: service_impl.cpp:1219: ConnectToSA(), pipe client id = [1:2356:3098] 2025-05-29T15:31:38.966690Z node 1 :STATISTICS DEBUG: service_impl.cpp:1248: SyncNode(), pipe client id = [1:2356:3098] 2025-05-29T15:31:38.966876Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:86: [72075186224037894] EvServerConnected, pipe server id = [2:2357:2865] 2025-05-29T15:31:38.966929Z node 1 :STATISTICS DEBUG: service_impl.cpp:1086: EvClientConnected, node id = 1, client id = [1:2356:3098], server id = [2:2357:2865], tablet id = 72075186224037894, status = OK 2025-05-29T15:31:38.966960Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:133: [72075186224037894] EvConnectNode, pipe server id = [2:2357:2865], node id = 1, have schemeshards count = 0, need schemeshards count = 1 2025-05-29T15:31:38.966973Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:314: [72075186224037894] SendStatisticsToNode(), node id = 1, schemeshard count = 1 2025-05-29T15:31:38.967031Z node 1 :STATISTICS DEBUG: service_impl.cpp:937: EvPropagateStatistics, node id = 1 2025-05-29T15:31:38.967039Z node 1 :STATISTICS DEBUG: service_impl.cpp:1260: ReplySuccess(), request id = 1, ReplyToActorId = [1:2354:3096], StatRequests.size() = 1 2025-05-29T15:31:38.969981Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2361:3102], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:31:38.970009Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:31:38.970078Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2366:3107], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:31:38.971338Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715660:3, at schemeshard: 72057594046644480 2025-05-29T15:31:39.122811Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:217: [72075186224037894] EvFastPropagateCheck 2025-05-29T15:31:39.122842Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:357: [72075186224037894] PropagateFastStatistics(), node count = 0, schemeshard count = 0 2025-05-29T15:31:39.227033Z node 1 :STATISTICS DEBUG: service_impl.cpp:1189: EvRequestTimeout, pipe client id = [1:2356:3098], schemeshard count = 1 2025-05-29T15:31:39.563622Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:2368:3109], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715660 completed, doublechecking } 2025-05-29T15:31:39.663569Z node 1 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [1:2467:3176] txid# 281474976715661, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-29T15:31:39.666012Z node 1 :STATISTICS DEBUG: service_impl.cpp:771: [TStatService::TEvGetStatistics] RequestId[ 2 ], ReplyToActorId[ [1:2490:3192]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-05-29T15:31:39.666066Z node 1 :STATISTICS DEBUG: service_impl.cpp:787: [TStatService::TEvNavigateKeySetResult] RequestId[ 2 ] 2025-05-29T15:31:39.666073Z node 1 :STATISTICS DEBUG: service_impl.cpp:1260: ReplySuccess(), request id = 2, ReplyToActorId = [1:2490:3192], StatRequests.size() = 1 2025-05-29T15:31:39.673294Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [1:2478:3186], status: INTERNAL_ERROR, issues:
: Fatal: Default error
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:31:39.673821Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=1&id=NTc2YTEwOGYtNTNmZDVlMjAtZDBhYjMyYjctZTg3ZjhlYjI=, ActorId: [1:2359:3100], ActorState: ExecuteState, TraceId: 01jweaq7tg3f74pzpfwhzbr7fz, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: assertion failed at ydb/core/statistics/ut_common/ut_common.cpp:237, Ydb::StatusIds::StatusCode NKikimr::NStat::ExecuteYqlScript(TTestEnv &, const TString &, bool): (response.operation().status() == Ydb::StatusIds::SUCCESS) failed: (INTERNAL_ERROR != SUCCESS) , with diff: (INT|SUCC)E(RNAL_ERROR|SS) TBackTrace::Capture()+28 (0x137F904C) NUnitTest::NPrivate::RaiseError(char const*, TBasicString> const&, bool)+137 (0x139AC3D9) NKikimr::NStat::ExecuteYqlScript(NKikimr::NStat::TTestEnv&, TBasicString> const&, bool)+1783 (0x25EAECD7) ??+0 (0x136E9066) ??+0 (0x136DDB6A) NKikimr::NStat::NTestSuiteBasicStatistics::TTestCaseTwoTables::Execute_(NUnitTest::TTestContext&)+453 (0x136DEEF5) NKikimr::NStat::NTestSuiteBasicStatistics::TCurrentTest::Execute()::'lambda'()::operator()() const+71 (0x136E84F7) NUnitTest::TTestBase::Run(std::__y1::function, TBasicString> const&, char const*, bool)+126 (0x139AE28E) NKikimr::NStat::NTestSuiteBasicStatistics::TCurrentTest::Execute()+436 (0x136E7D54) NUnitTest::TTestFactory::Execute()+803 (0x139AEA03) NUnitTest::RunMain(int, char**)+3021 (0x139BCD1D) ??+0 (0x7FB80AB9CD90) __libc_start_main+128 (0x7FB80AB9CE40) _start+41 (0x1283B029) |75.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/public/sdk/cpp/tests/integration/sessions_pool/gtest >> YdbSdkSessionsPool::StressTestAsync/1 [GOOD] |75.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_reboots/unittest >> TConsistentOpsWithReboots::Fake [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/statistics/service/ut/unittest >> BasicStatistics::NotFullStatisticsDatashard [FAIL] Test command err: 2025-05-29T15:31:37.285181Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:453:2413], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-05-29T15:31:37.285241Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-05-29T15:31:37.285268Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/0014fb/r3tmp/tmp1X6Qpm/pdisk_1.dat 2025-05-29T15:31:37.396495Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 12290, node 1 2025-05-29T15:31:37.499183Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:31:37.499204Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-05-29T15:31:37.499208Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-05-29T15:31:37.499254Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-05-29T15:31:37.499821Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-05-29T15:31:37.575846Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:31:37.575879Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:31:37.587466Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:29117 2025-05-29T15:31:37.918544Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-05-29T15:31:38.750428Z node 2 :STATISTICS INFO: service_impl.cpp:232: Subscribed for config changes on node 2 2025-05-29T15:31:38.759296Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:31:38.759327Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:31:38.813330Z node 1 :HIVE WARN: hive_impl.cpp:771: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-05-29T15:31:38.814042Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-29T15:31:38.965573Z node 2 :HIVE WARN: tx__create_tablet.cpp:342: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-05-29T15:31:38.965764Z node 2 :HIVE WARN: tx__create_tablet.cpp:342: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-05-29T15:31:38.965918Z node 2 :HIVE WARN: tx__create_tablet.cpp:342: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-05-29T15:31:38.965972Z node 2 :HIVE WARN: tx__create_tablet.cpp:342: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-05-29T15:31:38.965988Z node 2 :HIVE WARN: tx__create_tablet.cpp:342: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-05-29T15:31:38.966036Z node 2 :HIVE WARN: tx__create_tablet.cpp:342: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-05-29T15:31:38.966055Z node 2 :HIVE WARN: tx__create_tablet.cpp:342: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-05-29T15:31:38.966086Z node 2 :HIVE WARN: tx__create_tablet.cpp:342: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-05-29T15:31:38.966116Z node 2 :HIVE WARN: tx__create_tablet.cpp:342: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-05-29T15:31:39.116253Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:31:39.116305Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:31:39.127870Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-29T15:31:39.159800Z node 2 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:31:39.165441Z node 2 :STATISTICS INFO: aggregator_impl.cpp:45: [72075186224037894] OnActivateExecutor 2025-05-29T15:31:39.165466Z node 2 :STATISTICS DEBUG: tx_init_schema.cpp:13: [72075186224037894] TTxInitSchema::Execute 2025-05-29T15:31:39.171501Z node 2 :STATISTICS DEBUG: tx_init_schema.cpp:34: [72075186224037894] TTxInitSchema::Complete 2025-05-29T15:31:39.171733Z node 2 :STATISTICS DEBUG: tx_init.cpp:18: [72075186224037894] TTxInit::Execute 2025-05-29T15:31:39.171757Z node 2 :STATISTICS DEBUG: tx_init.cpp:118: [72075186224037894] Loaded BaseStatistics: schemeshard count# 0 2025-05-29T15:31:39.171763Z node 2 :STATISTICS DEBUG: tx_init.cpp:143: [72075186224037894] Loaded ColumnStatistics: column count# 0 2025-05-29T15:31:39.171768Z node 2 :STATISTICS DEBUG: tx_init.cpp:182: [72075186224037894] Loaded ScheduleTraversals: table count# 0 2025-05-29T15:31:39.171774Z node 2 :STATISTICS DEBUG: tx_init.cpp:216: [72075186224037894] Loaded ForceTraversalOperations: table count# 0 2025-05-29T15:31:39.171779Z node 2 :STATISTICS DEBUG: tx_init.cpp:264: [72075186224037894] Loaded ForceTraversalTables: table count# 0 2025-05-29T15:31:39.171785Z node 2 :STATISTICS DEBUG: tx_init.cpp:271: [72075186224037894] TTxInit::Complete 2025-05-29T15:31:39.172131Z node 2 :STATISTICS INFO: aggregator_impl.cpp:62: [72075186224037894] Subscribed for config changes 2025-05-29T15:31:39.188341Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:7864: ResolveSA(), StatisticsAggregatorId=72075186224037894, at schemeshard: 72075186224037897 2025-05-29T15:31:39.188374Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:7894: ConnectToSA(), pipe client id: [2:1863:2597], at schemeshard: 72075186224037897, StatisticsAggregatorId: 72075186224037894, at schemeshard: 72075186224037897 2025-05-29T15:31:39.189524Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:86: [72075186224037894] EvServerConnected, pipe server id = [2:1874:2605] 2025-05-29T15:31:39.190801Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:86: [72075186224037894] EvServerConnected, pipe server id = [2:1905:2622] 2025-05-29T15:31:39.191066Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:213: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:1905:2622], schemeshard id = 72075186224037897 2025-05-29T15:31:39.191723Z node 2 :STATISTICS DEBUG: tx_configure.cpp:21: [72075186224037894] TTxConfigure::Execute: database# /Root/Database 2025-05-29T15:31:39.195827Z node 2 :STATISTICS DEBUG: table_creator.cpp:147: Table _statistics updater. Describe result: PathErrorUnknown 2025-05-29T15:31:39.195841Z node 2 :STATISTICS NOTICE: table_creator.cpp:167: Table _statistics updater. Creating table 2025-05-29T15:31:39.195854Z node 2 :STATISTICS DEBUG: table_creator.cpp:100: Table _statistics updater. Full table path:/Root/Database/.metadata/_statistics 2025-05-29T15:31:39.198694Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720657:1, at schemeshard: 72075186224037897 2025-05-29T15:31:39.200436Z node 2 :STATISTICS DEBUG: table_creator.cpp:190: Table _statistics updater. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720657 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037897 PathId: 3 } 2025-05-29T15:31:39.200462Z node 2 :STATISTICS DEBUG: table_creator.cpp:261: Table _statistics updater. Subscribe on create table tx: 281474976720657 2025-05-29T15:31:39.297155Z node 2 :STATISTICS DEBUG: tx_configure.cpp:36: [72075186224037894] TTxConfigure::Complete 2025-05-29T15:31:39.382286Z node 2 :STATISTICS DEBUG: table_creator.cpp:290: Table _statistics updater. Request: create. Transaction completed: 281474976720657. Doublechecking... 2025-05-29T15:31:39.435639Z node 2 :STATISTICS DEBUG: table_creator.cpp:362: Table _statistics updater. Column diff is empty, finishing 2025-05-29T15:31:39.928665Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2212:3057], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:31:39.928694Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:31:39.931873Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72075186224037897 2025-05-29T15:31:40.194902Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2517:3106], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:31:40.194941Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:31:40.195352Z node 1 :STATISTICS DEBUG: service_impl.cpp:771: [TStatService::TEvGetStatistics] RequestId[ 1 ], ReplyToActorId[ [1:2522:3110]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-05-29T15:31:40.195380Z node 1 :STATISTICS DEBUG: service_impl.cpp:787: [TStatService::TEvNavigateKeySetResult] RequestId[ 1 ] 2025-05-29T15:31:40.195390Z node 1 :STATISTICS DEBUG: service_impl.cpp:1219: ConnectToSA(), pipe client id = [1:2524:3112] 2025-05-29T15:31:40.195398Z node 1 :STATISTICS DEBUG: service_impl.cpp:1248: SyncNode(), pipe client id = [1:2524:3112] 2025-05-29T15:31:40.195531Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:86: [72075186224037894] EvServerConnected, pipe server id = [2:2525:2985] 2025-05-29T15:31:40.195583Z node 1 :STATISTICS DEBUG: service_impl.cpp:1086: EvClientConnected, node id = 1, client id = [1:2524:3112], server id = [2:2525:2985], tablet id = 72075186224037894, status = OK 2025-05-29T15:31:40.195632Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:133: [72075186224037894] EvConnectNode, pipe server id = [2:2525:2985], node id = 1, have schemeshards count = 0, need schemeshards count = 1 2025-05-29T15:31:40.195645Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:314: [72075186224037894] SendStatisticsToNode(), node id = 1, schemeshard count = 1 2025-05-29T15:31:40.195682Z node 1 :STATISTICS DEBUG: service_impl.cpp:937: EvPropagateStatistics, node id = 1 2025-05-29T15:31:40.195689Z node 1 :STATISTICS DEBUG: service_impl.cpp:1260: ReplySuccess(), request id = 1, ReplyToActorId = [1:2522:3110], StatRequests.size() = 1 2025-05-29T15:31:40.197869Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2529:3116], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:31:40.197889Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:31:40.197957Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2534:3121], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:31:40.199116Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715660:3, at schemeshard: 72057594046644480 2025-05-29T15:31:40.311163Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:217: [72075186224037894] EvFastPropagateCheck 2025-05-29T15:31:40.311190Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:357: [72075186224037894] PropagateFastStatistics(), node count = 0, schemeshard count = 0 2025-05-29T15:31:40.394599Z node 1 :STATISTICS DEBUG: service_impl.cpp:1189: EvRequestTimeout, pipe client id = [1:2524:3112], schemeshard count = 1 2025-05-29T15:31:40.649358Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:2536:3123], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715660 completed, doublechecking } 2025-05-29T15:31:40.752511Z node 1 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [1:2649:3193] txid# 281474976715661, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-29T15:31:40.755160Z node 1 :STATISTICS DEBUG: service_impl.cpp:771: [TStatService::TEvGetStatistics] RequestId[ 2 ], ReplyToActorId[ [1:2672:3209]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-05-29T15:31:40.755209Z node 1 :STATISTICS DEBUG: service_impl.cpp:787: [TStatService::TEvNavigateKeySetResult] RequestId[ 2 ] 2025-05-29T15:31:40.755215Z node 1 :STATISTICS DEBUG: service_impl.cpp:1260: ReplySuccess(), request id = 2, ReplyToActorId = [1:2672:3209], StatRequests.size() = 1 2025-05-29T15:31:40.762881Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [1:2660:3203], status: INTERNAL_ERROR, issues:
: Fatal: Default error
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:31:40.763454Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=1&id=NTA1NTEwMTctN2NhNjBkM2EtZThmZDJkZWItNjliMDA0ZTM=, ActorId: [1:2527:3114], ActorState: ExecuteState, TraceId: 01jweaq90xa4cf53dt7scxp7d6, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: assertion failed at ydb/core/statistics/ut_common/ut_common.cpp:237, Ydb::StatusIds::StatusCode NKikimr::NStat::ExecuteYqlScript(TTestEnv &, const TString &, bool): (response.operation().status() == Ydb::StatusIds::SUCCESS) failed: (INTERNAL_ERROR != SUCCESS) , with diff: (INT|SUCC)E(RNAL_ERROR|SS) TBackTrace::Capture()+28 (0x137F904C) NUnitTest::NPrivate::RaiseError(char const*, TBasicString> const&, bool)+137 (0x139AC3D9) NKikimr::NStat::ExecuteYqlScript(NKikimr::NStat::TTestEnv&, TBasicString> const&, bool)+1783 (0x25EAECD7) NKikimr::NStat::CreateUniformTable(NKikimr::NStat::TTestEnv&, TBasicString> const&, TBasicString> const&)+877 (0x25EAF9AD) NKikimr::NStat::NTestSuiteBasicStatistics::TTestCaseNotFullStatisticsDatashard::Execute_(NUnitTest::TTestContext&)+396 (0x136E42DC) NKikimr::NStat::NTestSuiteBasicStatistics::TCurrentTest::Execute()::'lambda'()::operator()() const+71 (0x136E84F7) NUnitTest::TTestBase::Run(std::__y1::function, TBasicString> const&, char const*, bool)+126 (0x139AE28E) NKikimr::NStat::NTestSuiteBasicStatistics::TCurrentTest::Execute()+436 (0x136E7D54) NUnitTest::TTestFactory::Execute()+803 (0x139AEA03) NUnitTest::RunMain(int, char**)+3021 (0x139BCD1D) ??+0 (0x7F7C15983D90) __libc_start_main+128 (0x7F7C15983E40) _start+41 (0x1283B029) |75.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_reboots/unittest >> TConsistentOpsWithReboots::Fake [GOOD] >> TReplicationWithRebootsTests::Alter >> TReplicationWithRebootsTests::Create >> BasicStatistics::TwoNodes [FAIL] |75.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_replication_reboots/unittest |75.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_replication_reboots/unittest |75.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_replication_reboots/unittest |75.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_replication_reboots/unittest |75.5%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/schemeshard/ut_pq_reboots/ydb-core-tx-schemeshard-ut_pq_reboots |75.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_pq_reboots/ydb-core-tx-schemeshard-ut_pq_reboots |75.5%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_pq_reboots/ydb-core-tx-schemeshard-ut_pq_reboots |75.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_replication_reboots/unittest >> BasicStatistics::Serverless [FAIL] >> TReplicationWithRebootsTests::CreateInParallelWithInitialController >> KqpOlapJson::CompactionVariants [FAIL] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/batch_operations/unittest >> KqpBatchDelete::SimpleOnePartition Test command err: Trying to start YDB, gRPC: 26676, MsgBus: 29185 2025-05-29T15:31:42.216171Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509890772275296828:2063];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:31:42.216189Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/002778/r3tmp/tmpXbpGCF/pdisk_1.dat 2025-05-29T15:31:42.267597Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:31:42.267698Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [1:7509890772275296807:2079] 1748532702216052 != 1748532702216055 TServer::EnableGrpc on GrpcPort 26676, node 1 2025-05-29T15:31:42.280721Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:31:42.280734Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-05-29T15:31:42.280736Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-05-29T15:31:42.280767Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:29185 TClient is connected to server localhost:29185 WaitRootIsUp 'Root'... TClient::Ls request: Root 2025-05-29T15:31:42.318447Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:31:42.318471Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:31:42.319544Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-29T15:31:42.330013Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:31:42.339483Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:31:42.403872Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:31:42.419307Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:31:42.429164Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:31:42.525480Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890772275298445:2403], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:31:42.525509Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:31:42.577432Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-05-29T15:31:42.585700Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-05-29T15:31:42.640876Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-05-29T15:31:42.652702Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-05-29T15:31:42.666733Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-05-29T15:31:42.680640Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-05-29T15:31:42.695083Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-05-29T15:31:42.711202Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890772275299098:2466], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:31:42.711223Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:31:42.711272Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890772275299103:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:31:42.712067Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710669:3, at schemeshard: 72057594046644480 2025-05-29T15:31:42.714617Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7509890772275299105:2470], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710669 completed, doublechecking } 2025-05-29T15:31:42.775255Z node 1 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [1:7509890772275299156:3396] txid# 281474976710670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-29T15:31:42.858101Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [1:7509890772275299172:2474], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:31:42.858219Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=1&id=ZmIzZTk2ODEtMWVkODBkNDEtZWE0YzU0OTgtNDZhMzg5YjY=, ActorId: [1:7509890772275298427:2401], ActorState: ExecuteState, TraceId: 01jweaqbfp5gzxp6cnwe0jxqnc, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: VERIFY failed (2025-05-29T15:31:42.858956Z): assertion failed in non-unittest thread with message: assertion failed at ydb/core/kqp/ut/common/kqp_ut_common.h:375, void NKikimr::NKqp::AssertSuccessResult(const NYdb::TStatus &): (result.IsSuccess())
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 library/cpp/testing/unittest/registar.cpp:36 RaiseError(): requirement UnittestThread failed 0. /-S/util/system/yassert.cpp:83: InternalPanicImpl @ 0x13A9DC85 1. /-S/util/system/yassert.cpp:55: Panic @ 0x13A94C86 2. /tmp//-S/library/cpp/testing/unittest/registar.cpp:36: RaiseError @ 0x13C36A16 3. /-S/ydb/core/kqp/ut/common/kqp_ut_common.h:375: AssertSuccessResult @ 0x260D85F2 4. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:365: CreateSampleTables @ 0x260D7EF2 5. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:581: operator() @ 0x260F9ACC 6. /-S/library/cpp/threading/future/core/future-inl.h:493: SetValue @ 0x260F9ACC 7. /-S/library/cpp/threading/future/async.h:24: operator() @ 0x260F9ACC 8. /-S/util/thread/pool.h:71: Process @ 0x260F9ACC 9. /-S/util/thread/pool.cpp:418: DoExecute @ 0x13AA5609 10. /-S/util/thread/factory.h:15: Execute @ 0x13AA3FF9 11. /-S/util/thread/factory.cpp:36: ThreadProc @ 0x13AA3FF9 12. /-S/util/system/thread.cpp:244: ThreadProxy @ 0x13A9F46C 13. ??:0: ?? @ 0x7F02C98F4AC2 14. ??:0: ?? @ 0x7F02C998684F |75.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_replication_reboots/unittest >> TestProgram::SimpleFunction [GOOD] |75.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/columnshard/engines/ut/unittest |75.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/columnshard/engines/ut/unittest |75.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/columnshard/engines/ut/unittest ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/olap/unittest >> KqpOlapJson::CompactionVariants [FAIL] Test command err: Trying to start YDB, gRPC: 10825, MsgBus: 20718 2025-05-29T15:31:38.793929Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509890756768545432:2065];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:31:38.793952Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/002683/r3tmp/tmpGoTh6b/pdisk_1.dat 2025-05-29T15:31:38.846121Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [1:7509890756768545408:2079] 1748532698793729 != 1748532698793732 2025-05-29T15:31:38.847551Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 10825, node 1 2025-05-29T15:31:38.858266Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:31:38.858276Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-05-29T15:31:38.858277Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-05-29T15:31:38.858316Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:20718 TClient is connected to server localhost:20718 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: 2025-05-29T15:31:38.923454Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:31:38.923478Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-29T15:31:38.924522Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-29T15:31:38.925454Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... EXECUTE: CREATE TABLE `/Root/ColumnTable` ( Col1 Uint64 NOT NULL, Col2 JsonDocument, PRIMARY KEY (Col1) ) PARTITION BY HASH(Col1) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 1); 2025-05-29T15:31:39.084797Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890761063513365:2328], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:31:39.084830Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:31:39.215384Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-05-29T15:31:39.223718Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;self_id=[1:7509890761063513441:2332];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-05-29T15:31:39.223767Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;self_id=[1:7509890761063513441:2332];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-05-29T15:31:39.223826Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;self_id=[1:7509890761063513441:2332];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-05-29T15:31:39.223844Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;self_id=[1:7509890761063513441:2332];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-05-29T15:31:39.223863Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;self_id=[1:7509890761063513441:2332];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-05-29T15:31:39.223881Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;self_id=[1:7509890761063513441:2332];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-05-29T15:31:39.223896Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;self_id=[1:7509890761063513441:2332];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-05-29T15:31:39.223912Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;self_id=[1:7509890761063513441:2332];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-05-29T15:31:39.223934Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;self_id=[1:7509890761063513441:2332];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-05-29T15:31:39.223954Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;self_id=[1:7509890761063513441:2332];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-05-29T15:31:39.223970Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;self_id=[1:7509890761063513441:2332];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2025-05-29T15:31:39.223987Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;self_id=[1:7509890761063513441:2332];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2025-05-29T15:31:39.224394Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-05-29T15:31:39.224405Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-05-29T15:31:39.224415Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-05-29T15:31:39.224418Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-05-29T15:31:39.224431Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-05-29T15:31:39.224437Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-05-29T15:31:39.224444Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-05-29T15:31:39.224450Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-05-29T15:31:39.224456Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-05-29T15:31:39.224460Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-05-29T15:31:39.224464Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-05-29T15:31:39.224471Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-05-29T15:31:39.224484Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-05-29T15:31:39.224496Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-05-29T15:31:39.224512Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-05-29T15:31:39.224521Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-05-29T15:31:39.224532Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-05-29T15:31:39.224540Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_N ... pshot;id=CleanDeprecatedSnapshot; 2025-05-29T15:31:39.224549Z node 1 :TX_COLUMNSHARD CRIT: log.cpp:784: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=clean_deprecated_snapshot.cpp:30;tasks_for_rewrite=0; 2025-05-29T15:31:39.224559Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanDeprecatedSnapshot;id=17; 2025-05-29T15:31:39.224562Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV0ChunksMeta;id=RestoreV0ChunksMeta; 2025-05-29T15:31:39.224626Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV0ChunksMeta;id=18; 2025-05-29T15:31:39.224632Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-05-29T15:31:39.271295Z node 1 :TX_COLUMNSHARD_TX WARN: log.cpp:784: tablet_id=72075186224037888;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715658;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715658; EXECUTE: ALTER OBJECT `/Root/ColumnTable` (TYPE TABLE) SET (ACTION=UPSERT_OPTIONS, `SCAN_READER_POLICY_NAME`=`SIMPLE`) 2025-05-29T15:31:39.274624Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890761063513567:2397], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:31:39.274640Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:31:39.277031Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 2025-05-29T15:31:39.283666Z node 1 :TX_COLUMNSHARD_TX WARN: log.cpp:784: tablet_id=72075186224037888;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715659;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715659; EXECUTE: ALTER OBJECT `/Root/ColumnTable` (TYPE TABLE) SET (ACTION=ALTER_COLUMN, NAME=Col2, `DATA_ACCESSOR_CONSTRUCTOR.CLASS_NAME`=`SUB_COLUMNS`, `FORCE_SIMD_PARSING`=`true`, `COLUMNS_LIMIT`=`0`, `SPARSED_DETECTOR_KFF`=`0`, `MEM_LIMIT_CHUNK`=`0`, `OTHERS_ALLOWED_FRACTION`=`0`) 2025-05-29T15:31:39.287082Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890761063513597:2402], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:31:39.287098Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:31:39.289331Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 2025-05-29T15:31:39.298027Z node 1 :TX_COLUMNSHARD_TX WARN: log.cpp:784: tablet_id=72075186224037888;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715660;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715660; EXECUTE: REPLACE INTO `/Root/ColumnTable` (Col1, Col2) VALUES(1u, JsonDocument('{"a" : "a1"}')), (2u, JsonDocument('{"a" : "a2"}')), (3u, JsonDocument('{"b" : "b3"}')), (4u, JsonDocument('{"b" : "b4", "a" : "a4"}')) 2025-05-29T15:31:39.310663Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890761063513628:2408], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:31:39.310685Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890761063513633:2411], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:31:39.310693Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:31:39.311361Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715661:3, at schemeshard: 72057594046644480 2025-05-29T15:31:39.312851Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7509890761063513635:2412], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715661 completed, doublechecking } 2025-05-29T15:31:39.395283Z node 1 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [1:7509890761063513686:2453] txid# 281474976715662, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-29T15:31:39.436937Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [1:7509890761063513702:2416], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:31:39.437076Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=1&id=ODYxNGU0NzAtZjc4M2JhYjktMjViYjY1MGMtOGY0MzA5ODk=, ActorId: [1:7509890761063513626:2407], ActorState: ExecuteState, TraceId: 01jweaq85ed9xz5p2yphe9647z, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: 2025-05-29T15:31:40.226903Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;self_id=[1:7509890761063513441:2332];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037888;fline=columnshard_impl.cpp:830;event=skip_compaction;reason=disabled; 2025-05-29T15:31:40.227724Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;self_id=[1:7509890761063513441:2332];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037888;fline=columnshard_impl.cpp:830;event=skip_compaction;reason=disabled; 2025-05-29T15:31:41.227779Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;self_id=[1:7509890761063513441:2332];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037888;fline=columnshard_impl.cpp:830;event=skip_compaction;reason=disabled; 2025-05-29T15:31:41.227863Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;self_id=[1:7509890761063513441:2332];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037888;fline=columnshard_impl.cpp:830;event=skip_compaction;reason=disabled; 2025-05-29T15:31:42.228336Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;self_id=[1:7509890761063513441:2332];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037888;fline=columnshard_impl.cpp:830;event=skip_compaction;reason=disabled; 2025-05-29T15:31:42.228694Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;self_id=[1:7509890761063513441:2332];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037888;fline=columnshard_impl.cpp:830;event=skip_compaction;reason=disabled; 2025-05-29T15:31:43.228719Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;self_id=[1:7509890761063513441:2332];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037888;fline=columnshard_impl.cpp:830;event=skip_compaction;reason=disabled; 2025-05-29T15:31:43.228820Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;self_id=[1:7509890761063513441:2332];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037888;fline=columnshard_impl.cpp:830;event=skip_compaction;reason=disabled; 2025-05-29T15:31:43.794467Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7509890756768545432:2065];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:31:43.794520Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-05-29T15:31:44.229142Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;self_id=[1:7509890761063513441:2332];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037888;fline=columnshard_impl.cpp:830;event=skip_compaction;reason=disabled; 2025-05-29T15:31:44.229221Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;self_id=[1:7509890761063513441:2332];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037888;fline=columnshard_impl.cpp:830;event=skip_compaction;reason=disabled; 2025-05-29T15:31:45.229580Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;self_id=[1:7509890761063513441:2332];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037888;fline=columnshard_impl.cpp:830;event=skip_compaction;reason=disabled; 2025-05-29T15:31:45.229705Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;self_id=[1:7509890761063513441:2332];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037888;fline=columnshard_impl.cpp:830;event=skip_compaction;reason=disabled; assertion failed at ydb/core/kqp/ut/olap/combinatory/execute.h:41, virtual TConclusionStatus NKikimr::NKqp::TDataCommand::DoExecute(TKikimrRunner &): (prepareResult.IsSuccess())
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 0. /-S/util/system/backtrace.cpp:284: ?? @ 0x13D433CB 1. /tmp//-S/library/cpp/testing/unittest/registar.cpp:46: RaiseError @ 0x13EFB238 2. /-S/ydb/core/kqp/ut/olap/combinatory/execute.h:41: DoExecute @ 0x265B8EA9 3. /-S/ydb/core/kqp/ut/olap/combinatory/abstract.h:75: Execute @ 0x265B0763 4. /tmp//-S/ydb/core/kqp/ut/olap/combinatory/executor.cpp:22: Execute @ 0x265B0763 5. /-S/ydb/core/kqp/ut/olap/combinatory/variator.h:27: Execute @ 0x13B8D5E7 6. /tmp//-S/ydb/core/kqp/ut/olap/json_ut.cpp:556: Execute_ @ 0x13B8D5E7 7. /tmp//-S/ydb/core/kqp/ut/olap/json_ut.cpp:27: operator() @ 0x13B90F46 8. /tmp//-S/library/cpp/testing/unittest/registar.cpp:373: Run @ 0x13EFD0ED 9. /tmp//-S/ydb/core/kqp/ut/olap/json_ut.cpp:27: Execute @ 0x13B9090C 10. /tmp//-S/library/cpp/testing/unittest/registar.cpp:494: Execute @ 0x13EFD862 11. /tmp//-S/library/cpp/testing/unittest/utmain.cpp:872: RunMain @ 0x13F0F40C 12. ??:0: ?? @ 0x7FC16B495D8F 13. ??:0: ?? @ 0x7FC16B495E3F 14. ??:0: ?? @ 0x12AB1028 |75.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_replication_reboots/unittest |75.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/columnshard/engines/ut/unittest ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/columnshard/engines/ut/unittest >> TestProgram::SimpleFunction [GOOD] Test command err: FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:33;event=parse_program;program=Command { Assign { Column { Id: 15 } Function { Id: 8 Arguments { Id: 2 } } } } Command { Projection { Columns { Id: 15 } } } ; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:102;parse_proto_program=Command { Assign { Column { Id: 15 } Function { Id: 8 Arguments { Id: 2 } } } } Command { Projection { Columns { Id: 15 } } } ; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2101;fline=graph_execute.cpp:162;graph_constructed=digraph program {N0[shape=box, label="N3(15):{\"i\":\"2\",\"p\":{\"kernel\":{\"class_name\":\"SIMPLE\"}},\"o\":\"15\",\"t\":\"Calculation\"}\nREMOVE:2"]; N2 -> N0[label="1"]; N1[shape=box, label="N1(2):{\"i\":\"0\",\"p\":{\"data\":[{\"name\":\"uid\",\"id\":2}]},\"o\":\"2\",\"t\":\"FetchOriginalData\"}\n",style=filled,color="#FFFF88"]; N4 -> N1[label="1"]; N2[shape=box, label="N2(7):{\"i\":\"2\",\"p\":{\"address\":{\"name\":\"uid\",\"id\":2}},\"o\":\"2\",\"t\":\"AssembleOriginalData\"}\n",style=filled,color="#FFFF88"]; N1 -> N2[label="1"]; N3[shape=box, label="N4(15):{\"i\":\"15\",\"t\":\"Projection\"}\n",style=filled,color="#FFAAAA"]; N0 -> N3[label="1"]; N4[shape=box, label="N0(0):{\"p\":{\"data\":[{\"name\":\"uid\",\"id\":2}]},\"o\":\"0\",\"t\":\"ReserveMemory\"}\n"]; N4->N1->N2->N0->N3[color=red]; }; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:51;event=program_parsed;result={"edges":[{"owner_id":0,"inputs":[{"from":2}]},{"owner_id":1,"inputs":[{"from":4}]},{"owner_id":2,"inputs":[{"from":1}]},{"owner_id":3,"inputs":[{"from":0}]},{"owner_id":4,"inputs":[]}],"nodes":{"1":{"p":{"i":"0","p":{"data":[{"name":"uid","id":2}]},"o":"2","t":"FetchOriginalData"},"w":2,"id":1},"3":{"p":{"i":"15","t":"Projection"},"w":15,"id":3},"2":{"p":{"i":"2","p":{"address":{"name":"uid","id":2}},"o":"2","t":"AssembleOriginalData"},"w":7,"id":2},"4":{"p":{"p":{"data":[{"name":"uid","id":2}]},"o":"0","t":"ReserveMemory"},"w":0,"id":4},"0":{"p":{"i":"2","p":{"kernel":{"class_name":"SIMPLE"}},"o":"15","t":"Calculation"},"w":15,"id":0}}}; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:502;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:502;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:502;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:502;T=N5arrow10UInt64TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:502;T=N5arrow10UInt64TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:502;T=N5arrow10UInt64TypeE; >> IntermediateDirsReboots::CreateWithIntermediateDirsForceDrop [GOOD] >> TestProgram::YqlKernel >> TestProgram::YqlKernel [GOOD] |75.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/columnshard/engines/ut/unittest ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_reboots/unittest >> IntermediateDirsReboots::CreateWithIntermediateDirsForceDrop [GOOD] Test command err: ==== RunWithTabletReboots =========== RUN: Trace =========== Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2140] Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:116:2058] recipient: [1:110:2141] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:116:2058] recipient: [1:110:2141] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:117:2058] recipient: [1:111:2142] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:117:2058] recipient: [1:111:2142] Leader for TabletID 72057594046678944 is [1:126:2151] sender: [1:127:2058] recipient: [1:109:2140] Leader for TabletID 72057594046447617 is [1:130:2154] sender: [1:132:2058] recipient: [1:110:2141] Leader for TabletID 72057594046316545 is [1:133:2155] sender: [1:135:2058] recipient: [1:111:2142] 2025-05-29T15:31:37.958681Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7488: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-05-29T15:31:37.958700Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7516: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:31:37.958704Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7402: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2025-05-29T15:31:37.958708Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7418: OperationsProcessing config: using default configuration 2025-05-29T15:31:37.958713Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-05-29T15:31:37.958717Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-05-29T15:31:37.958724Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7548: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:31:37.958734Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:39: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-05-29T15:31:37.958821Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7619: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-05-29T15:31:37.958885Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-05-29T15:31:37.969668Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7651: Got new config: QueryServiceConfig { AllExternalDataSourcesAreAvailable: true } 2025-05-29T15:31:37.969685Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:31:37.969764Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7619: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# Leader for TabletID 72057594046447617 is [1:130:2154] sender: [1:175:2058] recipient: [1:15:2062] 2025-05-29T15:31:37.971769Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-05-29T15:31:37.971788Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-05-29T15:31:37.971812Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-05-29T15:31:37.974079Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-05-29T15:31:37.974153Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:445: Clear TempDirsState with owners number: 0 2025-05-29T15:31:37.974248Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1348: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-05-29T15:31:37.974432Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:32: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-05-29T15:31:37.975074Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:157: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:31:37.975100Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:84: [RootDataErasureManager] Stop 2025-05-29T15:31:37.975263Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:31:37.975269Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:31:37.975285Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-05-29T15:31:37.975290Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-29T15:31:37.975294Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:202: TTxServerlessStorageBilling.Complete 2025-05-29T15:31:37.975305Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6671: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:214:2058] recipient: [1:212:2212] IGNORE Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:214:2058] recipient: [1:212:2212] Leader for TabletID 72057594037968897 is [1:218:2216] sender: [1:219:2058] recipient: [1:212:2212] 2025-05-29T15:31:37.976111Z node 1 :HIVE INFO: tablet_helpers.cpp:1130: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:126:2151] sender: [1:239:2058] recipient: [1:15:2062] 2025-05-29T15:31:37.988243Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:372: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-05-29T15:31:37.988296Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:31:37.988340Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-05-29T15:31:37.988376Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:130: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-05-29T15:31:37.988383Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:31:37.988927Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:451: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-05-29T15:31:37.988946Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-05-29T15:31:37.988987Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:31:37.988995Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:313: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-05-29T15:31:37.989000Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:367: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-05-29T15:31:37.989005Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 2 -> 3 2025-05-29T15:31:37.989320Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:31:37.989327Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-05-29T15:31:37.989331Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 3 -> 128 2025-05-29T15:31:37.989584Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:31:37.989589Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:31:37.989593Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:31:37.989596Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1650: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-05-29T15:31:37.989998Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1719: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-05-29T15:31:37.990294Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:652: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-05-29T15:31:37.990315Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1751: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 Leader for TabletID 72057594046316545 is [1:133:2155] sender: [1:254:2058] recipient: [1:15:2062] FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-05-29T15:31:37.990461Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:676: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-05-29T15:31:37.990482Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:680: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 133 RawX2: 4294969451 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-05-29T15:31:37.990498Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:31:37.990542Z node 1 :FLAT_TX_SCHEMESHARD INFO: sch ... ESHARD INFO: schemeshard__operation_common.cpp:416: [72057594046678944] TDeleteParts opId# 1003:0 ProgressState 2025-05-29T15:31:47.499493Z node 40 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:906: Part operation is done id#1003:0 progress is 1/1 2025-05-29T15:31:47.499495Z node 40 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 1003 ready parts: 1/1 2025-05-29T15:31:47.499499Z node 40 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:906: Part operation is done id#1003:0 progress is 1/1 2025-05-29T15:31:47.499501Z node 40 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 1003 ready parts: 1/1 2025-05-29T15:31:47.499504Z node 40 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1606: TOperation IsReadyToNotify, TxId: 1003, ready parts: 1/1, is published: false 2025-05-29T15:31:47.499506Z node 40 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 1003 ready parts: 1/1 2025-05-29T15:31:47.499509Z node 40 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:973: Operation and all the parts is done, operation id: 1003:0 2025-05-29T15:31:47.499511Z node 40 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5174: RemoveTx for txid 1003:0 2025-05-29T15:31:47.499519Z node 40 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 3 2025-05-29T15:31:47.499522Z node 40 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:982: Publication still in progress, tx: 1003, publications: 4, subscribers: 1 2025-05-29T15:31:47.499525Z node 40 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:989: Publication details: tx: 1003, [OwnerId: 72057594046678944, LocalPathId: 1], 9 2025-05-29T15:31:47.499529Z node 40 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:989: Publication details: tx: 1003, [OwnerId: 72057594046678944, LocalPathId: 3], 18446744073709551615 2025-05-29T15:31:47.499531Z node 40 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:989: Publication details: tx: 1003, [OwnerId: 72057594046678944, LocalPathId: 4], 18446744073709551615 2025-05-29T15:31:47.499534Z node 40 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:989: Publication details: tx: 1003, [OwnerId: 72057594046678944, LocalPathId: 5], 18446744073709551615 2025-05-29T15:31:47.499774Z node 40 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1003 2025-05-29T15:31:47.499788Z node 40 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1003 2025-05-29T15:31:47.499792Z node 40 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:31:47.499795Z node 40 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [40:208:2209], at schemeshard: 72057594046678944, txId: 1003, path id: 1 2025-05-29T15:31:47.499799Z node 40 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [40:208:2209], at schemeshard: 72057594046678944, txId: 1003, path id: 5 2025-05-29T15:31:47.499801Z node 40 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [40:208:2209], at schemeshard: 72057594046678944, txId: 1003, path id: 3 2025-05-29T15:31:47.499804Z node 40 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [40:208:2209], at schemeshard: 72057594046678944, txId: 1003, path id: 4 2025-05-29T15:31:47.499853Z node 40 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:5834: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 5 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 1003 2025-05-29T15:31:47.499861Z node 40 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 5 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 1003 2025-05-29T15:31:47.499864Z node 40 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 4, at schemeshard: 72057594046678944, txId: 1003 2025-05-29T15:31:47.499867Z node 40 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 1003, pathId: [OwnerId: 72057594046678944, LocalPathId: 5], version: 18446744073709551615 2025-05-29T15:31:47.499870Z node 40 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 2 2025-05-29T15:31:47.499943Z node 40 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:5834: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 9 PathOwnerId: 72057594046678944, cookie: 1003 2025-05-29T15:31:47.499950Z node 40 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 9 PathOwnerId: 72057594046678944, cookie: 1003 2025-05-29T15:31:47.499952Z node 40 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 3, at schemeshard: 72057594046678944, txId: 1003 2025-05-29T15:31:47.499957Z node 40 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 1003, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 9 2025-05-29T15:31:47.499959Z node 40 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2025-05-29T15:31:47.500033Z node 40 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:5834: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 1003 2025-05-29T15:31:47.500040Z node 40 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 1003 2025-05-29T15:31:47.500042Z node 40 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 1003 2025-05-29T15:31:47.500045Z node 40 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 1003, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 18446744073709551615 2025-05-29T15:31:47.500048Z node 40 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 2 2025-05-29T15:31:47.500231Z node 40 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:5834: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 1003 2025-05-29T15:31:47.500240Z node 40 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 1003 2025-05-29T15:31:47.500243Z node 40 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1003 2025-05-29T15:31:47.500246Z node 40 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 1003, pathId: [OwnerId: 72057594046678944, LocalPathId: 4], version: 18446744073709551615 2025-05-29T15:31:47.500249Z node 40 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 2 2025-05-29T15:31:47.500256Z node 40 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 1003, subscribers: 1 2025-05-29T15:31:47.500259Z node 40 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:212: TTxAckPublishToSchemeBoard Notify send TEvNotifyTxCompletionResult, at schemeshard: 72057594046678944, to actorId: [40:304:2294] 2025-05-29T15:31:47.500514Z node 40 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1003 2025-05-29T15:31:47.500536Z node 40 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1003 2025-05-29T15:31:47.500759Z node 40 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1003 2025-05-29T15:31:47.500774Z node 40 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1003 2025-05-29T15:31:47.500783Z node 40 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:227: tests -- TTxNotificationSubscriber for txId 1003: got EvNotifyTxCompletionResult 2025-05-29T15:31:47.500786Z node 40 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:236: tests -- TTxNotificationSubscriber for txId 1003: satisfy waiter [40:305:2295] TestWaitNotification: OK eventTxId 1002 TestWaitNotification: OK eventTxId 1003 2025-05-29T15:31:47.500853Z node 40 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/x" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-05-29T15:31:47.500876Z node 40 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/x" took 28us result status StatusPathDoesNotExist 2025-05-29T15:31:47.500907Z node 40 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/x\', error: path has been deleted (id: [OwnerId: 72057594046678944, LocalPathId: 3], type: EPathTypeDir, state: EPathStateNotExist), drop stepId: 5000003, drop txId: 1003, source_location: ydb/core/tx/schemeshard/schemeshard_path_describer.cpp:1157" Path: "/MyRoot/x" PathId: 3 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: true } } PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 |75.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/columnshard/engines/ut/unittest |75.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/columnshard/engines/ut/unittest |75.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/columnshard/engines/ut/unittest ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/statistics/service/ut/unittest >> BasicStatistics::TwoNodes [FAIL] Test command err: 2025-05-29T15:31:40.380537Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:524:2411], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-05-29T15:31:40.380587Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-05-29T15:31:40.380619Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/0014d7/r3tmp/tmpmfIwtD/pdisk_1.dat 2025-05-29T15:31:40.500906Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 20421, node 1 2025-05-29T15:31:40.580609Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:31:40.580626Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-05-29T15:31:40.580630Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-05-29T15:31:40.580705Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-05-29T15:31:40.581237Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-05-29T15:31:40.667926Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:31:40.667962Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:31:40.679199Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:7728 2025-05-29T15:31:41.042249Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-05-29T15:31:42.039282Z node 3 :STATISTICS INFO: service_impl.cpp:232: Subscribed for config changes on node 3 2025-05-29T15:31:42.039398Z node 2 :STATISTICS INFO: service_impl.cpp:232: Subscribed for config changes on node 2 2025-05-29T15:31:42.049088Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:31:42.049120Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:31:42.049193Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:31:42.049203Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:31:42.082402Z node 1 :HIVE WARN: hive_impl.cpp:771: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-05-29T15:31:42.082509Z node 1 :HIVE WARN: hive_impl.cpp:771: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 3 Cookie 3 2025-05-29T15:31:42.083219Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-29T15:31:42.083421Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-29T15:31:42.252117Z node 2 :HIVE WARN: tx__create_tablet.cpp:342: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-05-29T15:31:42.252265Z node 2 :HIVE WARN: tx__create_tablet.cpp:342: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-05-29T15:31:42.252397Z node 2 :HIVE WARN: tx__create_tablet.cpp:342: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-05-29T15:31:42.252432Z node 2 :HIVE WARN: tx__create_tablet.cpp:342: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-05-29T15:31:42.252478Z node 2 :HIVE WARN: tx__create_tablet.cpp:342: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-05-29T15:31:42.252494Z node 2 :HIVE WARN: tx__create_tablet.cpp:342: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-05-29T15:31:42.252510Z node 2 :HIVE WARN: tx__create_tablet.cpp:342: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-05-29T15:31:42.252540Z node 2 :HIVE WARN: tx__create_tablet.cpp:342: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-05-29T15:31:42.252556Z node 2 :HIVE WARN: tx__create_tablet.cpp:342: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-05-29T15:31:42.402582Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:31:42.402620Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:31:42.402810Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:31:42.402818Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:31:42.413904Z node 2 :HIVE WARN: hive_impl.cpp:771: HIVE#72075186224037888 Handle TEvInterconnect::TEvNodeConnected, NodeId 3 Cookie 3 2025-05-29T15:31:42.414057Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-29T15:31:42.414404Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-29T15:31:42.450070Z node 2 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:31:42.459529Z node 2 :STATISTICS INFO: aggregator_impl.cpp:45: [72075186224037894] OnActivateExecutor 2025-05-29T15:31:42.459562Z node 2 :STATISTICS DEBUG: tx_init_schema.cpp:13: [72075186224037894] TTxInitSchema::Execute 2025-05-29T15:31:42.467248Z node 2 :STATISTICS DEBUG: tx_init_schema.cpp:34: [72075186224037894] TTxInitSchema::Complete 2025-05-29T15:31:42.467541Z node 2 :STATISTICS DEBUG: tx_init.cpp:18: [72075186224037894] TTxInit::Execute 2025-05-29T15:31:42.467570Z node 2 :STATISTICS DEBUG: tx_init.cpp:118: [72075186224037894] Loaded BaseStatistics: schemeshard count# 0 2025-05-29T15:31:42.467576Z node 2 :STATISTICS DEBUG: tx_init.cpp:143: [72075186224037894] Loaded ColumnStatistics: column count# 0 2025-05-29T15:31:42.467583Z node 2 :STATISTICS DEBUG: tx_init.cpp:182: [72075186224037894] Loaded ScheduleTraversals: table count# 0 2025-05-29T15:31:42.467589Z node 2 :STATISTICS DEBUG: tx_init.cpp:216: [72075186224037894] Loaded ForceTraversalOperations: table count# 0 2025-05-29T15:31:42.467595Z node 2 :STATISTICS DEBUG: tx_init.cpp:264: [72075186224037894] Loaded ForceTraversalTables: table count# 0 2025-05-29T15:31:42.467602Z node 2 :STATISTICS DEBUG: tx_init.cpp:271: [72075186224037894] TTxInit::Complete 2025-05-29T15:31:42.467747Z node 2 :STATISTICS INFO: aggregator_impl.cpp:62: [72075186224037894] Subscribed for config changes 2025-05-29T15:31:42.483182Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:7864: ResolveSA(), StatisticsAggregatorId=72075186224037894, at schemeshard: 72075186224037897 2025-05-29T15:31:42.483213Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:7894: ConnectToSA(), pipe client id: [2:2285:2602], at schemeshard: 72075186224037897, StatisticsAggregatorId: 72075186224037894, at schemeshard: 72075186224037897 2025-05-29T15:31:42.484613Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:86: [72075186224037894] EvServerConnected, pipe server id = [2:2297:2611] 2025-05-29T15:31:42.485686Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:86: [72075186224037894] EvServerConnected, pipe server id = [2:2329:2627] 2025-05-29T15:31:42.485788Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:213: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:2329:2627], schemeshard id = 72075186224037897 2025-05-29T15:31:42.486377Z node 2 :STATISTICS DEBUG: tx_configure.cpp:21: [72075186224037894] TTxConfigure::Execute: database# /Root/Database 2025-05-29T15:31:42.490109Z node 2 :STATISTICS DEBUG: table_creator.cpp:147: Table _statistics updater. Describe result: PathErrorUnknown 2025-05-29T15:31:42.490121Z node 2 :STATISTICS NOTICE: table_creator.cpp:167: Table _statistics updater. Creating table 2025-05-29T15:31:42.490130Z node 2 :STATISTICS DEBUG: table_creator.cpp:100: Table _statistics updater. Full table path:/Root/Database/.metadata/_statistics 2025-05-29T15:31:42.493430Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720657:1, at schemeshard: 72075186224037897 2025-05-29T15:31:42.495268Z node 2 :STATISTICS DEBUG: table_creator.cpp:190: Table _statistics updater. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720657 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037897 PathId: 3 } 2025-05-29T15:31:42.495299Z node 2 :STATISTICS DEBUG: table_creator.cpp:261: Table _statistics updater. Subscribe on create table tx: 281474976720657 2025-05-29T15:31:42.587533Z node 2 :STATISTICS DEBUG: tx_configure.cpp:36: [72075186224037894] TTxConfigure::Complete 2025-05-29T15:31:42.708798Z node 2 :STATISTICS DEBUG: table_creator.cpp:290: Table _statistics updater. Request: create. Transaction completed: 281474976720657. Doublechecking... 2025-05-29T15:31:42.802931Z node 2 :STATISTICS DEBUG: table_creator.cpp:362: Table _statistics updater. Column diff is empty, finishing 2025-05-29T15:31:43.536629Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2669:3074], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:31:43.536670Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:31:43.540343Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72075186224037897 2025-05-29T15:31:43.666080Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2821:3113], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:31:43.666122Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:31:43.666592Z node 1 :STATISTICS DEBUG: service_impl.cpp:771: [TStatService::TEvGetStatistics] RequestId[ 1 ], ReplyToActorId[ [1:2826:3117]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-05-29T15:31:43.666628Z node 1 :STATISTICS DEBUG: service_impl.cpp:787: [TStatService::TEvNavigateKeySetResult] RequestId[ 1 ] 2025-05-29T15:31:43.666636Z node 1 :STATISTICS DEBUG: service_impl.cpp:1219: ConnectToSA(), pipe client id = [1:2828:3119] 2025-05-29T15:31:43.666644Z node 1 :STATISTICS DEBUG: service_impl.cpp:1248: SyncNode(), pipe client id = [1:2828:3119] 2025-05-29T15:31:43.666830Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:86: [72075186224037894] EvServerConnected, pipe server id = [2:2829:2812] 2025-05-29T15:31:43.666891Z node 1 :STATISTICS DEBUG: service_impl.cpp:1086: EvClientConnected, node id = 1, client id = [1:2828:3119], server id = [2:2829:2812], tablet id = 72075186224037894, status = OK 2025-05-29T15:31:43.666922Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:133: [72075186224037894] EvConnectNode, pipe server id = [2:2829:2812], node id = 1, have schemeshards count = 0, need schemeshards count = 1 2025-05-29T15:31:43.666934Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:314: [72075186224037894] SendStatisticsToNode(), node id = 1, schemeshard count = 1 2025-05-29T15:31:43.666974Z node 1 :STATISTICS DEBUG: service_impl.cpp:937: EvPropagateStatistics, node id = 1 2025-05-29T15:31:43.666982Z node 1 :STATISTICS DEBUG: service_impl.cpp:1260: ReplySuccess(), request id = 1, ReplyToActorId = [1:2826:3117], StatRequests.size() = 1 2025-05-29T15:31:43.669398Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2833:3123], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:31:43.669418Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:31:43.669484Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2838:3128], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:31:43.670629Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715660:3, at schemeshard: 72057594046644480 2025-05-29T15:31:43.875019Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:217: [72075186224037894] EvFastPropagateCheck 2025-05-29T15:31:43.875048Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:357: [72075186224037894] PropagateFastStatistics(), node count = 0, schemeshard count = 0 2025-05-29T15:31:43.947592Z node 1 :STATISTICS DEBUG: service_impl.cpp:1189: EvRequestTimeout, pipe client id = [1:2828:3119], schemeshard count = 1 2025-05-29T15:31:44.303829Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:2840:3130], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715660 completed, doublechecking } 2025-05-29T15:31:44.427817Z node 1 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [1:2944:3199] txid# 281474976715661, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-29T15:31:44.430027Z node 1 :STATISTICS DEBUG: service_impl.cpp:771: [TStatService::TEvGetStatistics] RequestId[ 2 ], ReplyToActorId[ [1:2967:3215]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-05-29T15:31:44.430070Z node 1 :STATISTICS DEBUG: service_impl.cpp:787: [TStatService::TEvNavigateKeySetResult] RequestId[ 2 ] 2025-05-29T15:31:44.430075Z node 1 :STATISTICS DEBUG: service_impl.cpp:1260: ReplySuccess(), request id = 2, ReplyToActorId = [1:2967:3215], StatRequests.size() = 1 2025-05-29T15:31:44.435066Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [1:2955:3209], status: INTERNAL_ERROR, issues:
: Fatal: Default error
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:31:44.435518Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=1&id=OTMzYWEwY2YtYmRhYjE0ZTYtZmI0NzNlNzQtOTdkZmI3YWY=, ActorId: [1:2831:3121], ActorState: ExecuteState, TraceId: 01jweaqcdda82adr4fdhjavrgb, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: assertion failed at ydb/core/statistics/ut_common/ut_common.cpp:237, Ydb::StatusIds::StatusCode NKikimr::NStat::ExecuteYqlScript(TTestEnv &, const TString &, bool): (response.operation().status() == Ydb::StatusIds::SUCCESS) failed: (INTERNAL_ERROR != SUCCESS) , with diff: (INT|SUCC)E(RNAL_ERROR|SS) TBackTrace::Capture()+28 (0x137F904C) NUnitTest::NPrivate::RaiseError(char const*, TBasicString> const&, bool)+137 (0x139AC3D9) NKikimr::NStat::ExecuteYqlScript(NKikimr::NStat::TTestEnv&, TBasicString> const&, bool)+1783 (0x25EAECD7) ??+0 (0x136E9066) ??+0 (0x136DDB6A) NKikimr::NStat::NTestSuiteBasicStatistics::TTestCaseTwoNodes::Execute_(NUnitTest::TTestContext&)+447 (0x136DEB5F) NKikimr::NStat::NTestSuiteBasicStatistics::TCurrentTest::Execute()::'lambda'()::operator()() const+71 (0x136E84F7) NUnitTest::TTestBase::Run(std::__y1::function, TBasicString> const&, char const*, bool)+126 (0x139AE28E) NKikimr::NStat::NTestSuiteBasicStatistics::TCurrentTest::Execute()+436 (0x136E7D54) NUnitTest::TTestFactory::Execute()+803 (0x139AEA03) NUnitTest::RunMain(int, char**)+3021 (0x139BCD1D) ??+0 (0x7F3F78FE1D90) __libc_start_main+128 (0x7F3F78FE1E40) _start+41 (0x1283B029) |75.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/columnshard/engines/ut/unittest >> TColumnEngineTestLogs::IndexWriteOverload ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/columnshard/engines/ut/unittest >> TestProgram::YqlKernel [GOOD] Test command err: FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:33;event=parse_program;program=Command { Assign { Column { Id: 15 } Function { Arguments { Id: 3 } Arguments { Id: 4 } FunctionType: YQL_KERNEL KernelIdx: 0 } } } Command { Projection { Columns { Id: 15 } } } Kernels: "O\002\030BlockAsTuple\t\211\004\235\213\004\213\002\203\002\213\002?\000\001\235?\002\001\235?\004\001\002\000\t\211\002?\n\235?\000\001\002\000\t\251\000?\020\014Arg\000\000\t\211\002?\014?\020\002\000\t\211\006?\020\203\005@?\020?\020$BlockFunc\000\003?\034\006Add?\026?\026\001\000\000/" ; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:102;parse_proto_program=Command { Assign { Column { Id: 15 } Function { Arguments { Id: 3 } Arguments { Id: 4 } FunctionType: YQL_KERNEL KernelIdx: 0 } } } Command { Projection { Columns { Id: 15 } } } Kernels: "O\002\030BlockAsTuple\t\211\004\235\213\004\213\002\203\002\213\002?\000\001\235?\002\001\235?\004\001\002\000\t\211\002?\n\235?\000\001\002\000\t\251\000?\020\014Arg\000\000\t\211\002?\014?\020\002\000\t\211\006?\020\203\005@?\020?\020$BlockFunc\000\003?\034\006Add?\026?\026\001\000\000/" ; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2101;fline=graph_execute.cpp:162;graph_constructed=digraph program {N7[shape=box, label="N0(0):{\"p\":{\"data\":[{\"name\":\"sum\",\"id\":3},{\"name\":\"vat\",\"id\":4}]},\"o\":\"0\",\"t\":\"ReserveMemory\"}\n"]; N0[shape=box, label="N4(26):{\"i\":\"3,4\",\"p\":{\"kernel\":{\"class_name\":\"SIMPLE\"}},\"o\":\"15\",\"t\":\"Calculation\"}\nREMOVE:3,4"]; N2 -> N0[label="1"]; N4 -> N0[label="2"]; N2[shape=box, label="N2(9):{\"i\":\"3\",\"p\":{\"address\":{\"name\":\"sum\",\"id\":3}},\"o\":\"3\",\"t\":\"AssembleOriginalData\"}\n",style=filled,color="#FFFF88"]; N6 -> N2[label="1"]; N4[shape=box, label="N3(9):{\"i\":\"4\",\"p\":{\"address\":{\"name\":\"vat\",\"id\":4}},\"o\":\"4\",\"t\":\"AssembleOriginalData\"}\n",style=filled,color="#FFFF88"]; N6 -> N4[label="1"]; N5[shape=box, label="N5(26):{\"i\":\"15\",\"t\":\"Projection\"}\n",style=filled,color="#FFAAAA"]; N0 -> N5[label="1"]; N6[shape=box, label="N1(4):{\"i\":\"0\",\"p\":{\"data\":[{\"name\":\"sum\",\"id\":3},{\"name\":\"vat\",\"id\":4}]},\"o\":\"3,4\",\"t\":\"FetchOriginalData\"}\n",style=filled,color="#FFFF88"]; N7 -> N6[label="1"]; N7->N6->N2->N4->N0->N5[color=red]; }; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:51;event=program_parsed;result={"edges":[{"owner_id":7,"inputs":[]},{"owner_id":0,"inputs":[{"from":2},{"from":4}]},{"owner_id":2,"inputs":[{"from":6}]},{"owner_id":4,"inputs":[{"from":6}]},{"owner_id":5,"inputs":[{"from":0}]},{"owner_id":6,"inputs":[{"from":7}]}],"nodes":{"2":{"p":{"i":"3","p":{"address":{"name":"sum","id":3}},"o":"3","t":"AssembleOriginalData"},"w":9,"id":2},"6":{"p":{"i":"0","p":{"data":[{"name":"sum","id":3},{"name":"vat","id":4}]},"o":"3,4","t":"FetchOriginalData"},"w":4,"id":6},"7":{"p":{"p":{"data":[{"name":"sum","id":3},{"name":"vat","id":4}]},"o":"0","t":"ReserveMemory"},"w":0,"id":7},"5":{"p":{"i":"15","t":"Projection"},"w":26,"id":5},"4":{"p":{"i":"4","p":{"address":{"name":"vat","id":4}},"o":"4","t":"AssembleOriginalData"},"w":9,"id":4},"0":{"p":{"i":"3,4","p":{"kernel":{"class_name":"SIMPLE"}},"o":"15","t":"Calculation"},"w":26,"id":0}}}; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:502;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:502;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:502;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:502;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:502;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:502;T=N5arrow9Int32TypeE; ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/statistics/service/ut/unittest >> BasicStatistics::Serverless [FAIL] Test command err: 2025-05-29T15:31:40.451183Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:453:2413], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-05-29T15:31:40.451227Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-05-29T15:31:40.451245Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/0014d5/r3tmp/tmpnu4wdF/pdisk_1.dat 2025-05-29T15:31:40.547585Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 6990, node 1 2025-05-29T15:31:40.651554Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:31:40.651573Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-05-29T15:31:40.651579Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-05-29T15:31:40.651648Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-05-29T15:31:40.652335Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-05-29T15:31:40.729265Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:31:40.729302Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:31:40.741342Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:6408 2025-05-29T15:31:41.072966Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-05-29T15:31:41.842035Z node 2 :STATISTICS INFO: service_impl.cpp:232: Subscribed for config changes on node 2 2025-05-29T15:31:41.850774Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:31:41.850808Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:31:41.904624Z node 1 :HIVE WARN: hive_impl.cpp:771: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-05-29T15:31:41.905281Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-29T15:31:42.053673Z node 2 :HIVE WARN: tx__create_tablet.cpp:342: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-05-29T15:31:42.053821Z node 2 :HIVE WARN: tx__create_tablet.cpp:342: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-05-29T15:31:42.053949Z node 2 :HIVE WARN: tx__create_tablet.cpp:342: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-05-29T15:31:42.053975Z node 2 :HIVE WARN: tx__create_tablet.cpp:342: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-05-29T15:31:42.053986Z node 2 :HIVE WARN: tx__create_tablet.cpp:342: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-05-29T15:31:42.054021Z node 2 :HIVE WARN: tx__create_tablet.cpp:342: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-05-29T15:31:42.054034Z node 2 :HIVE WARN: tx__create_tablet.cpp:342: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-05-29T15:31:42.054045Z node 2 :HIVE WARN: tx__create_tablet.cpp:342: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-05-29T15:31:42.054066Z node 2 :HIVE WARN: tx__create_tablet.cpp:342: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-05-29T15:31:42.205105Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:31:42.205144Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:31:42.216212Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-29T15:31:42.244765Z node 2 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:31:42.250658Z node 2 :STATISTICS INFO: aggregator_impl.cpp:45: [72075186224037894] OnActivateExecutor 2025-05-29T15:31:42.250682Z node 2 :STATISTICS DEBUG: tx_init_schema.cpp:13: [72075186224037894] TTxInitSchema::Execute 2025-05-29T15:31:42.255821Z node 2 :STATISTICS DEBUG: tx_init_schema.cpp:34: [72075186224037894] TTxInitSchema::Complete 2025-05-29T15:31:42.256004Z node 2 :STATISTICS DEBUG: tx_init.cpp:18: [72075186224037894] TTxInit::Execute 2025-05-29T15:31:42.256021Z node 2 :STATISTICS DEBUG: tx_init.cpp:118: [72075186224037894] Loaded BaseStatistics: schemeshard count# 0 2025-05-29T15:31:42.256026Z node 2 :STATISTICS DEBUG: tx_init.cpp:143: [72075186224037894] Loaded ColumnStatistics: column count# 0 2025-05-29T15:31:42.256030Z node 2 :STATISTICS DEBUG: tx_init.cpp:182: [72075186224037894] Loaded ScheduleTraversals: table count# 0 2025-05-29T15:31:42.256034Z node 2 :STATISTICS DEBUG: tx_init.cpp:216: [72075186224037894] Loaded ForceTraversalOperations: table count# 0 2025-05-29T15:31:42.256038Z node 2 :STATISTICS DEBUG: tx_init.cpp:264: [72075186224037894] Loaded ForceTraversalTables: table count# 0 2025-05-29T15:31:42.256043Z node 2 :STATISTICS DEBUG: tx_init.cpp:271: [72075186224037894] TTxInit::Complete 2025-05-29T15:31:42.256157Z node 2 :STATISTICS INFO: aggregator_impl.cpp:62: [72075186224037894] Subscribed for config changes 2025-05-29T15:31:42.271197Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:7864: ResolveSA(), StatisticsAggregatorId=72075186224037894, at schemeshard: 72075186224037897 2025-05-29T15:31:42.271221Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:7894: ConnectToSA(), pipe client id: [2:1864:2598], at schemeshard: 72075186224037897, StatisticsAggregatorId: 72075186224037894, at schemeshard: 72075186224037897 2025-05-29T15:31:42.272282Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:86: [72075186224037894] EvServerConnected, pipe server id = [2:1873:2605] 2025-05-29T15:31:42.273035Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:86: [72075186224037894] EvServerConnected, pipe server id = [2:1905:2621] 2025-05-29T15:31:42.273139Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:213: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:1905:2621], schemeshard id = 72075186224037897 2025-05-29T15:31:42.274438Z node 2 :STATISTICS DEBUG: tx_configure.cpp:21: [72075186224037894] TTxConfigure::Execute: database# /Root/Shared 2025-05-29T15:31:42.281210Z node 2 :STATISTICS DEBUG: table_creator.cpp:147: Table _statistics updater. Describe result: PathErrorUnknown 2025-05-29T15:31:42.281232Z node 2 :STATISTICS NOTICE: table_creator.cpp:167: Table _statistics updater. Creating table 2025-05-29T15:31:42.281245Z node 2 :STATISTICS DEBUG: table_creator.cpp:100: Table _statistics updater. Full table path:/Root/Shared/.metadata/_statistics 2025-05-29T15:31:42.284911Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720657:1, at schemeshard: 72075186224037897 2025-05-29T15:31:42.286806Z node 2 :STATISTICS DEBUG: table_creator.cpp:190: Table _statistics updater. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720657 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037897 PathId: 3 } 2025-05-29T15:31:42.286839Z node 2 :STATISTICS DEBUG: table_creator.cpp:261: Table _statistics updater. Subscribe on create table tx: 281474976720657 2025-05-29T15:31:42.378404Z node 2 :STATISTICS DEBUG: tx_configure.cpp:36: [72075186224037894] TTxConfigure::Complete 2025-05-29T15:31:42.460889Z node 2 :STATISTICS DEBUG: table_creator.cpp:290: Table _statistics updater. Request: create. Transaction completed: 281474976720657. Doublechecking... 2025-05-29T15:31:42.513419Z node 2 :STATISTICS DEBUG: table_creator.cpp:362: Table _statistics updater. Column diff is empty, finishing 2025-05-29T15:31:43.003126Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715659:0, at schemeshard: 72057594046644480 2025-05-29T15:31:43.438537Z node 2 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:31:43.539388Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:7809: Handle TEvTxProxySchemeCache::TEvNavigateKeySetResult, at schemeshard: 72075186224037899 2025-05-29T15:31:43.539409Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:7825: Handle TEvTxProxySchemeCache::TEvNavigateKeySetResult, StatisticsAggregatorId=72075186224037894, at schemeshard: 72075186224037899 2025-05-29T15:31:43.539420Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:7894: ConnectToSA(), pipe client id: [2:2564:2937], at schemeshard: 72075186224037899, StatisticsAggregatorId: 72075186224037894, at schemeshard: 72075186224037899 2025-05-29T15:31:43.539695Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:86: [72075186224037894] EvServerConnected, pipe server id = [2:2566:2939] 2025-05-29T15:31:43.539808Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:213: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:2566:2939], schemeshard id = 72075186224037899 2025-05-29T15:31:44.306826Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2689:3228], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:31:44.306871Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:31:44.310650Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72075186224037899 2025-05-29T15:31:44.374878Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2845:3265], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:31:44.374913Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:31:44.379614Z node 1 :STATISTICS DEBUG: service_impl.cpp:771: [TStatService::TEvGetStatistics] RequestId[ 1 ], ReplyToActorId[ [1:2850:3269]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-05-29T15:31:44.379652Z node 1 :STATISTICS DEBUG: service_impl.cpp:787: [TStatService::TEvNavigateKeySetResult] RequestId[ 1 ] 2025-05-29T15:31:44.379682Z node 1 :STATISTICS DEBUG: service_impl.cpp:787: [TStatService::TEvNavigateKeySetResult] RequestId[ 18446744073709551615 ] 2025-05-29T15:31:44.379688Z node 1 :STATISTICS DEBUG: service_impl.cpp:1219: ConnectToSA(), pipe client id = [1:2853:3272] 2025-05-29T15:31:44.379695Z node 1 :STATISTICS DEBUG: service_impl.cpp:1248: SyncNode(), pipe client id = [1:2853:3272] 2025-05-29T15:31:44.379856Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:86: [72075186224037894] EvServerConnected, pipe server id = [2:2854:3065] 2025-05-29T15:31:44.379918Z node 1 :STATISTICS DEBUG: service_impl.cpp:1086: EvClientConnected, node id = 1, client id = [1:2853:3272], server id = [2:2854:3065], tablet id = 72075186224037894, status = OK 2025-05-29T15:31:44.379965Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:133: [72075186224037894] EvConnectNode, pipe server id = [2:2854:3065], node id = 1, have schemeshards count = 0, need schemeshards count = 1 2025-05-29T15:31:44.379978Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:314: [72075186224037894] SendStatisticsToNode(), node id = 1, schemeshard count = 1 2025-05-29T15:31:44.380029Z node 1 :STATISTICS DEBUG: service_impl.cpp:937: EvPropagateStatistics, node id = 1 2025-05-29T15:31:44.380038Z node 1 :STATISTICS DEBUG: service_impl.cpp:1260: ReplySuccess(), request id = 1, ReplyToActorId = [1:2850:3269], StatRequests.size() = 1 2025-05-29T15:31:44.382469Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2858:3276], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:31:44.382504Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:31:44.382599Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2863:3281], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:31:44.384095Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715662:3, at schemeshard: 72057594046644480 2025-05-29T15:31:44.494012Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:217: [72075186224037894] EvFastPropagateCheck 2025-05-29T15:31:44.494044Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:357: [72075186224037894] PropagateFastStatistics(), node count = 0, schemeshard count = 0 2025-05-29T15:31:44.556154Z node 1 :STATISTICS DEBUG: service_impl.cpp:1189: EvRequestTimeout, pipe client id = [1:2853:3272], schemeshard count = 1 2025-05-29T15:31:44.767110Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:2865:3283], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715662 completed, doublechecking } 2025-05-29T15:31:44.931610Z node 1 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [1:2969:3346] txid# 281474976715663, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 7], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-29T15:31:44.933745Z node 1 :STATISTICS DEBUG: service_impl.cpp:771: [TStatService::TEvGetStatistics] RequestId[ 2 ], ReplyToActorId[ [1:2992:3362]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-05-29T15:31:44.933789Z node 1 :STATISTICS DEBUG: service_impl.cpp:787: [TStatService::TEvNavigateKeySetResult] RequestId[ 2 ] 2025-05-29T15:31:44.933795Z node 1 :STATISTICS DEBUG: service_impl.cpp:1260: ReplySuccess(), request id = 2, ReplyToActorId = [1:2992:3362], StatRequests.size() = 1 2025-05-29T15:31:44.939269Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [1:2982:3358], status: INTERNAL_ERROR, issues:
: Fatal: Default error
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:31:44.939662Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=1&id=OTA1MzE4OTgtODg3OWI3YTgtZjhmM2U1OTgtMjhlYjUyZTk=, ActorId: [1:2856:3274], ActorState: ExecuteState, TraceId: 01jweaqd3p48aj2mdr2vbwy0h4, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: assertion failed at ydb/core/statistics/ut_common/ut_common.cpp:237, Ydb::StatusIds::StatusCode NKikimr::NStat::ExecuteYqlScript(TTestEnv &, const TString &, bool): (response.operation().status() == Ydb::StatusIds::SUCCESS) failed: (INTERNAL_ERROR != SUCCESS) , with diff: (INT|SUCC)E(RNAL_ERROR|SS) TBackTrace::Capture()+28 (0x137F904C) NUnitTest::NPrivate::RaiseError(char const*, TBasicString> const&, bool)+137 (0x139AC3D9) NKikimr::NStat::ExecuteYqlScript(NKikimr::NStat::TTestEnv&, TBasicString> const&, bool)+1783 (0x25EAECD7) ??+0 (0x136E9066) ??+0 (0x136DDB6A) NKikimr::NStat::NTestSuiteBasicStatistics::TTestCaseServerless::Execute_(NUnitTest::TTestContext&)+752 (0x136DFB40) NKikimr::NStat::NTestSuiteBasicStatistics::TCurrentTest::Execute()::'lambda'()::operator()() const+71 (0x136E84F7) NUnitTest::TTestBase::Run(std::__y1::function, TBasicString> const&, char const*, bool)+126 (0x139AE28E) NKikimr::NStat::NTestSuiteBasicStatistics::TCurrentTest::Execute()+436 (0x136E7D54) NUnitTest::TTestFactory::Execute()+803 (0x139AEA03) NUnitTest::RunMain(int, char**)+3021 (0x139BCD1D) ??+0 (0x7F21F17BBD90) __libc_start_main+128 (0x7F21F17BBE40) _start+41 (0x1283B029) >> HttpRequest::Status [GOOD] >> TColumnEngineTestLogs::IndexWriteLoadReadStrPK [GOOD] >> IntermediateDirsReboots::CreateDirWithIntermediateDirs [GOOD] |75.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/columnshard/engines/ut/unittest |75.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/columnshard/engines/ut/unittest >> TestProgram::CountUIDByVAT [GOOD] >> TColumnEngineTestLogs::IndexReadWithPredicatesStrPK >> TColumnEngineTestLogs::IndexReadWithPredicatesStrPK [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/statistics/service/ut/unittest >> HttpRequest::Status [GOOD] Test command err: 2025-05-29T15:31:38.130195Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:453:2413], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-05-29T15:31:38.130258Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-05-29T15:31:38.130290Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/0014f2/r3tmp/tmpP4HwYz/pdisk_1.dat 2025-05-29T15:31:38.234689Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 25550, node 1 2025-05-29T15:31:38.338263Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:31:38.338284Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-05-29T15:31:38.338288Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-05-29T15:31:38.338341Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-05-29T15:31:38.338886Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-05-29T15:31:38.415867Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:31:38.415907Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:31:38.428193Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:26277 2025-05-29T15:31:38.764462Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-05-29T15:31:39.479726Z node 2 :STATISTICS INFO: service_impl.cpp:232: Subscribed for config changes on node 2 2025-05-29T15:31:39.487200Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:31:39.487232Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:31:39.540358Z node 1 :HIVE WARN: hive_impl.cpp:771: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-05-29T15:31:39.540842Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-29T15:31:39.682219Z node 2 :HIVE WARN: tx__create_tablet.cpp:342: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-05-29T15:31:39.682385Z node 2 :HIVE WARN: tx__create_tablet.cpp:342: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-05-29T15:31:39.682513Z node 2 :HIVE WARN: tx__create_tablet.cpp:342: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-05-29T15:31:39.682540Z node 2 :HIVE WARN: tx__create_tablet.cpp:342: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-05-29T15:31:39.682551Z node 2 :HIVE WARN: tx__create_tablet.cpp:342: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-05-29T15:31:39.682586Z node 2 :HIVE WARN: tx__create_tablet.cpp:342: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-05-29T15:31:39.682611Z node 2 :HIVE WARN: tx__create_tablet.cpp:342: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-05-29T15:31:39.682625Z node 2 :HIVE WARN: tx__create_tablet.cpp:342: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-05-29T15:31:39.682643Z node 2 :HIVE WARN: tx__create_tablet.cpp:342: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-05-29T15:31:39.833246Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:31:39.833294Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:31:39.844769Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-29T15:31:39.881886Z node 2 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:31:39.890181Z node 2 :STATISTICS INFO: aggregator_impl.cpp:45: [72075186224037894] OnActivateExecutor 2025-05-29T15:31:39.890208Z node 2 :STATISTICS DEBUG: tx_init_schema.cpp:13: [72075186224037894] TTxInitSchema::Execute 2025-05-29T15:31:39.897725Z node 2 :STATISTICS DEBUG: tx_init_schema.cpp:34: [72075186224037894] TTxInitSchema::Complete 2025-05-29T15:31:39.897965Z node 2 :STATISTICS DEBUG: tx_init.cpp:18: [72075186224037894] TTxInit::Execute 2025-05-29T15:31:39.897990Z node 2 :STATISTICS DEBUG: tx_init.cpp:118: [72075186224037894] Loaded BaseStatistics: schemeshard count# 0 2025-05-29T15:31:39.897996Z node 2 :STATISTICS DEBUG: tx_init.cpp:143: [72075186224037894] Loaded ColumnStatistics: column count# 0 2025-05-29T15:31:39.898003Z node 2 :STATISTICS DEBUG: tx_init.cpp:182: [72075186224037894] Loaded ScheduleTraversals: table count# 0 2025-05-29T15:31:39.898009Z node 2 :STATISTICS DEBUG: tx_init.cpp:216: [72075186224037894] Loaded ForceTraversalOperations: table count# 0 2025-05-29T15:31:39.898015Z node 2 :STATISTICS DEBUG: tx_init.cpp:264: [72075186224037894] Loaded ForceTraversalTables: table count# 0 2025-05-29T15:31:39.898022Z node 2 :STATISTICS DEBUG: tx_init.cpp:271: [72075186224037894] TTxInit::Complete 2025-05-29T15:31:39.898395Z node 2 :STATISTICS INFO: aggregator_impl.cpp:62: [72075186224037894] Subscribed for config changes 2025-05-29T15:31:39.914266Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:7864: ResolveSA(), StatisticsAggregatorId=72075186224037894, at schemeshard: 72075186224037897 2025-05-29T15:31:39.914295Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:7894: ConnectToSA(), pipe client id: [2:1863:2597], at schemeshard: 72075186224037897, StatisticsAggregatorId: 72075186224037894, at schemeshard: 72075186224037897 2025-05-29T15:31:39.915411Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:86: [72075186224037894] EvServerConnected, pipe server id = [2:1874:2605] 2025-05-29T15:31:39.916447Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:86: [72075186224037894] EvServerConnected, pipe server id = [2:1905:2622] 2025-05-29T15:31:39.916774Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:213: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:1905:2622], schemeshard id = 72075186224037897 2025-05-29T15:31:39.917755Z node 2 :STATISTICS DEBUG: tx_configure.cpp:21: [72075186224037894] TTxConfigure::Execute: database# /Root/Database 2025-05-29T15:31:39.922514Z node 2 :STATISTICS DEBUG: table_creator.cpp:147: Table _statistics updater. Describe result: PathErrorUnknown 2025-05-29T15:31:39.922535Z node 2 :STATISTICS NOTICE: table_creator.cpp:167: Table _statistics updater. Creating table 2025-05-29T15:31:39.922548Z node 2 :STATISTICS DEBUG: table_creator.cpp:100: Table _statistics updater. Full table path:/Root/Database/.metadata/_statistics 2025-05-29T15:31:39.925595Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720657:1, at schemeshard: 72075186224037897 2025-05-29T15:31:39.927325Z node 2 :STATISTICS DEBUG: table_creator.cpp:190: Table _statistics updater. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720657 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037897 PathId: 3 } 2025-05-29T15:31:39.927357Z node 2 :STATISTICS DEBUG: table_creator.cpp:261: Table _statistics updater. Subscribe on create table tx: 281474976720657 2025-05-29T15:31:40.026148Z node 2 :STATISTICS DEBUG: tx_configure.cpp:36: [72075186224037894] TTxConfigure::Complete 2025-05-29T15:31:40.112302Z node 2 :STATISTICS DEBUG: table_creator.cpp:290: Table _statistics updater. Request: create. Transaction completed: 281474976720657. Doublechecking... 2025-05-29T15:31:40.178598Z node 2 :STATISTICS DEBUG: table_creator.cpp:362: Table _statistics updater. Column diff is empty, finishing 2025-05-29T15:31:40.658919Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2212:3057], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:31:40.658952Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:31:40.663561Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976715659:0, at schemeshard: 72075186224037897 2025-05-29T15:31:40.718147Z node 2 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037899;self_id=[2:2357:2872];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-05-29T15:31:40.718216Z node 2 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037899;self_id=[2:2357:2872];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-05-29T15:31:40.718257Z node 2 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037899;self_id=[2:2357:2872];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-05-29T15:31:40.718280Z node 2 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037899;self_id=[2:2357:2872];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-05-29T15:31:40.718295Z node 2 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037899;self_id=[2:2357:2872];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-05-29T15:31:40.718312Z node 2 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037899;self_id=[2:2357:2872];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-05-29T15:31:40.718325Z node 2 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037899;self_id=[2:2357:2872];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-05-29T15:31:40.718343Z node 2 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037899;self_id=[2:2357:2872];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_re ... d: 72075186224037897 2025-05-29T15:31:42.145083Z node 2 :TX_COLUMNSHARD_TX WARN: log.cpp:784: tablet_id=72075186224037899;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715660;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715660; 2025-05-29T15:31:42.145172Z node 2 :TX_COLUMNSHARD_TX WARN: log.cpp:784: tablet_id=72075186224037900;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715660;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715660; 2025-05-29T15:31:42.145221Z node 2 :TX_COLUMNSHARD_TX WARN: log.cpp:784: tablet_id=72075186224037902;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715660;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715660; 2025-05-29T15:31:42.145269Z node 2 :TX_COLUMNSHARD_TX WARN: log.cpp:784: tablet_id=72075186224037903;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715660;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715660; 2025-05-29T15:31:42.145315Z node 2 :TX_COLUMNSHARD_TX WARN: log.cpp:784: tablet_id=72075186224037901;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715660;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715660; 2025-05-29T15:31:42.145505Z node 2 :TX_COLUMNSHARD_TX WARN: log.cpp:784: tablet_id=72075186224037904;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715660;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715660; 2025-05-29T15:31:42.145546Z node 2 :TX_COLUMNSHARD_TX WARN: log.cpp:784: tablet_id=72075186224037905;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715660;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715660; 2025-05-29T15:31:42.145665Z node 2 :TX_COLUMNSHARD_TX WARN: log.cpp:784: tablet_id=72075186224037907;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715660;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715660; 2025-05-29T15:31:42.145705Z node 2 :TX_COLUMNSHARD_TX WARN: log.cpp:784: tablet_id=72075186224037908;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715660;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715660; 2025-05-29T15:31:42.145743Z node 2 :TX_COLUMNSHARD_TX WARN: log.cpp:784: tablet_id=72075186224037906;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715660;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715660; 2025-05-29T15:31:42.954031Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:3772:3211], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:31:42.954168Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:31:42.955319Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976715661:0, at schemeshard: 72075186224037897 2025-05-29T15:31:42.976631Z node 2 :TX_COLUMNSHARD_TX WARN: log.cpp:784: tablet_id=72075186224037899;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715661;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715661; 2025-05-29T15:31:42.976733Z node 2 :TX_COLUMNSHARD_TX WARN: log.cpp:784: tablet_id=72075186224037900;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715661;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715661; 2025-05-29T15:31:42.976799Z node 2 :TX_COLUMNSHARD_TX WARN: log.cpp:784: tablet_id=72075186224037902;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715661;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715661; 2025-05-29T15:31:42.976878Z node 2 :TX_COLUMNSHARD_TX WARN: log.cpp:784: tablet_id=72075186224037903;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715661;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715661; 2025-05-29T15:31:42.976955Z node 2 :TX_COLUMNSHARD_TX WARN: log.cpp:784: tablet_id=72075186224037901;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715661;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715661; 2025-05-29T15:31:42.977175Z node 2 :TX_COLUMNSHARD_TX WARN: log.cpp:784: tablet_id=72075186224037904;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715661;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715661; 2025-05-29T15:31:42.977252Z node 2 :TX_COLUMNSHARD_TX WARN: log.cpp:784: tablet_id=72075186224037905;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715661;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715661; 2025-05-29T15:31:42.977584Z node 2 :TX_COLUMNSHARD_TX WARN: log.cpp:784: tablet_id=72075186224037907;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715661;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715661; 2025-05-29T15:31:42.977706Z node 2 :TX_COLUMNSHARD_TX WARN: log.cpp:784: tablet_id=72075186224037906;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715661;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715661; 2025-05-29T15:31:42.977843Z node 2 :TX_COLUMNSHARD_TX WARN: log.cpp:784: tablet_id=72075186224037908;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715661;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715661; 2025-05-29T15:31:43.683453Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:3911:3257], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:31:43.683489Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:31:43.686165Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976715662:0, at schemeshard: 72075186224037897 2025-05-29T15:31:43.705664Z node 2 :TX_COLUMNSHARD_TX WARN: log.cpp:784: tablet_id=72075186224037900;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715662; 2025-05-29T15:31:43.705785Z node 2 :TX_COLUMNSHARD_TX WARN: log.cpp:784: tablet_id=72075186224037901;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715662; 2025-05-29T15:31:43.705923Z node 2 :TX_COLUMNSHARD_TX WARN: log.cpp:784: tablet_id=72075186224037899;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715662; 2025-05-29T15:31:43.705971Z node 2 :TX_COLUMNSHARD_TX WARN: log.cpp:784: tablet_id=72075186224037902;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715662; 2025-05-29T15:31:43.706015Z node 2 :TX_COLUMNSHARD_TX WARN: log.cpp:784: tablet_id=72075186224037903;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715662; 2025-05-29T15:31:43.706061Z node 2 :TX_COLUMNSHARD_TX WARN: log.cpp:784: tablet_id=72075186224037904;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715662; 2025-05-29T15:31:43.706105Z node 2 :TX_COLUMNSHARD_TX WARN: log.cpp:784: tablet_id=72075186224037905;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715662; 2025-05-29T15:31:43.706228Z node 2 :TX_COLUMNSHARD_TX WARN: log.cpp:784: tablet_id=72075186224037907;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715662; 2025-05-29T15:31:43.706273Z node 2 :TX_COLUMNSHARD_TX WARN: log.cpp:784: tablet_id=72075186224037906;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715662; 2025-05-29T15:31:43.706474Z node 2 :TX_COLUMNSHARD_TX WARN: log.cpp:784: tablet_id=72075186224037908;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715662; waiting actualization: 0/0.000012s 2025-05-29T15:31:48.204824Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:86: [72075186224037894] EvServerConnected, pipe server id = [2:5814:5399] 2025-05-29T15:31:48.205584Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:478: [72075186224037894] Send TEvStatistics::TEvAnalyzeStatusResponse. Status STATUS_NO_OPERATION Answer: 'No analyze operation' FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=abstract.cpp:105;event=AbortEmergency;reason=TTxWriteIndex destructor withno CompleteReady flag;prev_reason=; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=manager.cpp:51;message=aborted data locks manager; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=abstract.cpp:105;event=AbortEmergency;reason=TTxWriteIndex destructor withno CompleteReady flag;prev_reason=; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=manager.cpp:51;message=aborted data locks manager; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=abstract.cpp:105;event=AbortEmergency;reason=TTxWriteIndex destructor withno CompleteReady flag;prev_reason=; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=manager.cpp:51;message=aborted data locks manager; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=abstract.cpp:105;event=AbortEmergency;reason=TTxWriteIndex destructor withno CompleteReady flag;prev_reason=; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=manager.cpp:51;message=aborted data locks manager; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=abstract.cpp:105;event=AbortEmergency;reason=TTxWriteIndex destructor withno CompleteReady flag;prev_reason=; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=manager.cpp:51;message=aborted data locks manager; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=abstract.cpp:105;event=AbortEmergency;reason=TTxWriteIndex destructor withno CompleteReady flag;prev_reason=; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=manager.cpp:51;message=aborted data locks manager; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=abstract.cpp:105;event=AbortEmergency;reason=TTxWriteIndex destructor withno CompleteReady flag;prev_reason=; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=manager.cpp:51;message=aborted data locks manager; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=abstract.cpp:105;event=AbortEmergency;reason=TTxWriteIndex destructor withno CompleteReady flag;prev_reason=; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=manager.cpp:51;message=aborted data locks manager; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=abstract.cpp:105;event=AbortEmergency;reason=TTxWriteIndex destructor withno CompleteReady flag;prev_reason=; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=manager.cpp:51;message=aborted data locks manager; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=abstract.cpp:105;event=AbortEmergency;reason=TTxWriteIndex destructor withno CompleteReady flag;prev_reason=; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=manager.cpp:51;message=aborted data locks manager; |75.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/columnshard/engines/ut/unittest ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_reboots/unittest >> IntermediateDirsReboots::CreateDirWithIntermediateDirs [GOOD] Test command err: ==== RunWithTabletReboots =========== RUN: Trace =========== Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2140] Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:116:2058] recipient: [1:110:2141] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:116:2058] recipient: [1:110:2141] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:117:2058] recipient: [1:111:2142] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:117:2058] recipient: [1:111:2142] Leader for TabletID 72057594046678944 is [1:126:2151] sender: [1:127:2058] recipient: [1:109:2140] Leader for TabletID 72057594046447617 is [1:130:2154] sender: [1:132:2058] recipient: [1:110:2141] Leader for TabletID 72057594046316545 is [1:133:2155] sender: [1:135:2058] recipient: [1:111:2142] 2025-05-29T15:31:37.975261Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7488: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-05-29T15:31:37.975282Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7516: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:31:37.975286Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7402: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2025-05-29T15:31:37.975290Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7418: OperationsProcessing config: using default configuration 2025-05-29T15:31:37.975294Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-05-29T15:31:37.975297Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-05-29T15:31:37.975303Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7548: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:31:37.975314Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:39: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-05-29T15:31:37.975381Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7619: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-05-29T15:31:37.975443Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-05-29T15:31:37.984567Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7651: Got new config: QueryServiceConfig { AllExternalDataSourcesAreAvailable: true } 2025-05-29T15:31:37.984586Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:31:37.984653Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7619: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# Leader for TabletID 72057594046447617 is [1:130:2154] sender: [1:175:2058] recipient: [1:15:2062] 2025-05-29T15:31:37.986626Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-05-29T15:31:37.986648Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-05-29T15:31:37.986676Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-05-29T15:31:37.989562Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-05-29T15:31:37.989637Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:445: Clear TempDirsState with owners number: 0 2025-05-29T15:31:37.989725Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1348: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-05-29T15:31:37.989892Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:32: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-05-29T15:31:37.990511Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:157: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:31:37.990545Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:84: [RootDataErasureManager] Stop 2025-05-29T15:31:37.990828Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:31:37.990841Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:31:37.990873Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-05-29T15:31:37.990880Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-29T15:31:37.990886Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:202: TTxServerlessStorageBilling.Complete 2025-05-29T15:31:37.990904Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6671: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:214:2058] recipient: [1:212:2212] IGNORE Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:214:2058] recipient: [1:212:2212] Leader for TabletID 72057594037968897 is [1:218:2216] sender: [1:219:2058] recipient: [1:212:2212] 2025-05-29T15:31:37.992262Z node 1 :HIVE INFO: tablet_helpers.cpp:1130: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:126:2151] sender: [1:239:2058] recipient: [1:15:2062] 2025-05-29T15:31:38.006005Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:372: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-05-29T15:31:38.006074Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:31:38.006125Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-05-29T15:31:38.006162Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:130: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-05-29T15:31:38.006171Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:31:38.006832Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:451: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-05-29T15:31:38.006853Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-05-29T15:31:38.006900Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:31:38.006907Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:313: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-05-29T15:31:38.006911Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:367: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-05-29T15:31:38.006914Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 2 -> 3 2025-05-29T15:31:38.007261Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:31:38.007269Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-05-29T15:31:38.007273Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 3 -> 128 2025-05-29T15:31:38.007532Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:31:38.007540Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:31:38.007543Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:31:38.007548Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1650: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-05-29T15:31:38.007993Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1719: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-05-29T15:31:38.008320Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:652: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-05-29T15:31:38.008347Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1751: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 Leader for TabletID 72057594046316545 is [1:133:2155] sender: [1:254:2058] recipient: [1:15:2062] FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-05-29T15:31:38.008539Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:676: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-05-29T15:31:38.008557Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:680: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 133 RawX2: 4294969451 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-05-29T15:31:38.008572Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:31:38.008618Z node 1 :FLAT_TX_SCHEMESHARD INFO: sch ... DEBUG: schemeshard__operation_side_effects.cpp:989: Publication details: tx: 1003, [OwnerId: 72057594046678944, LocalPathId: 3], 6 2025-05-29T15:31:48.486970Z node 44 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:989: Publication details: tx: 1003, [OwnerId: 72057594046678944, LocalPathId: 4], 6 2025-05-29T15:31:48.486972Z node 44 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:989: Publication details: tx: 1003, [OwnerId: 72057594046678944, LocalPathId: 5], 5 2025-05-29T15:31:48.486975Z node 44 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:989: Publication details: tx: 1003, [OwnerId: 72057594046678944, LocalPathId: 6], 3 2025-05-29T15:31:48.487276Z node 44 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:5834: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 8 PathOwnerId: 72057594046678944, cookie: 1003 2025-05-29T15:31:48.487288Z node 44 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 8 PathOwnerId: 72057594046678944, cookie: 1003 2025-05-29T15:31:48.487293Z node 44 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 5, at schemeshard: 72057594046678944, txId: 1003 2025-05-29T15:31:48.487296Z node 44 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 1003, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 8 2025-05-29T15:31:48.487299Z node 44 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2025-05-29T15:31:48.487387Z node 44 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:5834: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 6 PathOwnerId: 72057594046678944, cookie: 1003 2025-05-29T15:31:48.487393Z node 44 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 6 PathOwnerId: 72057594046678944, cookie: 1003 2025-05-29T15:31:48.487396Z node 44 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 4, at schemeshard: 72057594046678944, txId: 1003 2025-05-29T15:31:48.487398Z node 44 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 1003, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 6 2025-05-29T15:31:48.487400Z node 44 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 2 2025-05-29T15:31:48.487487Z node 44 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:5834: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 6 PathOwnerId: 72057594046678944, cookie: 1003 2025-05-29T15:31:48.487494Z node 44 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 6 PathOwnerId: 72057594046678944, cookie: 1003 2025-05-29T15:31:48.487496Z node 44 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 3, at schemeshard: 72057594046678944, txId: 1003 2025-05-29T15:31:48.487499Z node 44 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 1003, pathId: [OwnerId: 72057594046678944, LocalPathId: 4], version: 6 2025-05-29T15:31:48.487501Z node 44 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 2 2025-05-29T15:31:48.487719Z node 44 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:5834: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 5 Version: 5 PathOwnerId: 72057594046678944, cookie: 1003 2025-05-29T15:31:48.487728Z node 44 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 5 Version: 5 PathOwnerId: 72057594046678944, cookie: 1003 2025-05-29T15:31:48.487731Z node 44 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 1003 2025-05-29T15:31:48.487733Z node 44 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 1003, pathId: [OwnerId: 72057594046678944, LocalPathId: 5], version: 5 2025-05-29T15:31:48.487738Z node 44 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 2 2025-05-29T15:31:48.487772Z node 44 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:5834: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 6 Version: 3 PathOwnerId: 72057594046678944, cookie: 1003 2025-05-29T15:31:48.487777Z node 44 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 6 Version: 3 PathOwnerId: 72057594046678944, cookie: 1003 2025-05-29T15:31:48.487780Z node 44 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1003 2025-05-29T15:31:48.487782Z node 44 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 1003, pathId: [OwnerId: 72057594046678944, LocalPathId: 6], version: 3 2025-05-29T15:31:48.487784Z node 44 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 6] was 1 2025-05-29T15:31:48.487789Z node 44 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 1003, subscribers: 0 2025-05-29T15:31:48.488342Z node 44 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1003 2025-05-29T15:31:48.488375Z node 44 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1003 2025-05-29T15:31:48.488384Z node 44 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1003 2025-05-29T15:31:48.488392Z node 44 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1003 2025-05-29T15:31:48.488649Z node 44 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1003 TestModificationResult got TxId: 1003, wait until txId: 1003 TestWaitNotification wait txId: 1003 2025-05-29T15:31:48.488691Z node 44 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:210: tests -- TTxNotificationSubscriber for txId 1003: send EvNotifyTxCompletion 2025-05-29T15:31:48.488697Z node 44 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:256: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 1003 2025-05-29T15:31:48.488756Z node 44 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 1003, at schemeshard: 72057594046678944 2025-05-29T15:31:48.488770Z node 44 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:227: tests -- TTxNotificationSubscriber for txId 1003: got EvNotifyTxCompletionResult 2025-05-29T15:31:48.488774Z node 44 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:236: tests -- TTxNotificationSubscriber for txId 1003: satisfy waiter [44:348:2338] TestWaitNotification: OK eventTxId 1003 2025-05-29T15:31:48.488827Z node 44 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Valid/x/y/z" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-05-29T15:31:48.488853Z node 44 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/Valid/x/y/z" took 37us result status StatusSuccess 2025-05-29T15:31:48.488912Z node 44 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Valid/x/y/z" PathDescription { Self { Name: "z" PathId: 6 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1003 CreateStep: 5000003 ParentPathId: 5 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 2 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 5 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 6 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-05-29T15:31:48.488942Z node 44 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Invalid" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-05-29T15:31:48.488955Z node 44 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/Invalid" took 14us result status StatusPathDoesNotExist 2025-05-29T15:31:48.488968Z node 44 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/Invalid\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1]), source_location: ydb/core/tx/schemeshard/schemeshard_path_describer.cpp:1157" Path: "/MyRoot/Invalid" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: true } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 |75.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/columnshard/engines/ut/unittest ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/columnshard/engines/ut/unittest >> TColumnEngineTestLogs::IndexWriteLoadReadStrPK [GOOD] Test command err: FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=3912;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=3912;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=column_engine.h:144;event=RegisterTable;path_id=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=column_engine.h:144;event=RegisterTable;path_id=2; FALLBACK_ACTOR_LOGGING;priority=INFO;component=2100;fline=simple_arrays_cache.h:49;event=insert_to_cache;key=uint64::0;records=1;size=8; FALLBACK_ACTOR_LOGGING;priority=INFO;component=2100;fline=simple_arrays_cache.h:65;event=slice_from_cache;key=uint64::0;records=1;count=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=152;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=152;columns=1; FALLBACK_ACTOR_LOGGING;priority=INFO;component=2100;fline=simple_arrays_cache.h:65;event=slice_from_cache;key=uint64::0;records=1;count=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=152;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=152;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=3912;columns=5; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=3912;columns=5; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;field_name=_yql_plan_step;fline=native.cpp:71;event=parsing;size=944;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=_yql_plan_step;fline=native.cpp:110;event=serialize;size=944;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;field_name=timestamp;fline=native.cpp:71;event=parsing;size=944;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=timestamp;fline=native.cpp:110;event=serialize;size=944;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;field_name=_yql_tx_id;fline=native.cpp:71;event=parsing;size=944;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=_yql_tx_id;fline=native.cpp:110;event=serialize;size=944;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;field_name=resource_type;fline=native.cpp:71;event=parsing;size=1072;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=resource_type;fline=native.cpp:110;event=serialize;size=1072;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;field_name=resource_id;fline=native.cpp:71;event=parsing;size=760;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=resource_id;fline=native.cpp:110;event=serialize;size=760;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;field_name=uid;fline=native.cpp:71;event=parsing;size=760;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=uid;fline=native.cpp:110;event=serialize;size=760;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;field_name=message;fline=native.cpp:71;event=parsing;size=760;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=message;fline=native.cpp:110;event=serialize;size=760;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=constructor_meta.cpp:54;memory_size=94;data_size=66;sum=94;count=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=constructor_meta.cpp:75;memory_size=254;data_size=242;sum=254;count=2;size_of_meta=144; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=constructor_portion.cpp:40;memory_size=326;data_size=314;sum=326;count=1;size_of_portion=216; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=432;columns=4; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=432;columns=4; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=abstract.cpp:53;event=WriteIndexComplete;type=CS::INDEXATION;success=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=332;fline=granule.cpp:17;event=upsert_portion;portion=(portion_id:1;path_id:1;records_count:100;schema_version:1;level:0;;column_size:6184;index_size:0;meta:((produced=INSERTED;)););path_id=1; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=abstract.cpp:105;event=AbortEmergency;reason=testing;prev_reason=; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=332;fline=portion_info.h:380;event=IsVisible;analyze_portion=(portion_id:1;path_id:1;records_count:100;schema_version:1;level:0;;column_size:6184;index_size:0;meta:((produced=INSERTED;)););visible=0;snapshot=plan_step=1;tx_id=0;; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=332;fline=portion_info.h:380;event=IsVisible;analyze_portion=(portion_id:1;path_id:1;records_count:100;schema_version:1;level:0;;column_size:6184;index_size:0;meta:((produced=INSERTED;)););visible=0;snapshot=plan_step=1;tx_id=2;; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=332;fline=portion_info.h:380;event=IsVisible;analyze_portion=(portion_id:1;path_id:1;records_count:100;schema_version:1;level:0;;column_size:6184;index_size:0;meta:((produced=INSERTED;)););visible=1;snapshot=plan_step=2;tx_id=1;; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=334;fline=column_engine_logs.cpp:457;event=portion_selected;pathId=1;portion=(portion_id:1;path_id:1;records_count:100;schema_version:1;level:0;;column_size:6184;index_size:0;meta:((produced=INSERTED;));); |75.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/columnshard/engines/ut/unittest ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/columnshard/engines/ut/unittest >> TestProgram::CountUIDByVAT [GOOD] Test command err: FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:33;event=parse_program;program=Command { GroupBy { Aggregates { Column { Id: 10001 } Function { Id: 2 Arguments { Id: 2 } } } KeyColumns { Id: 4 } } } Command { Projection { Columns { Id: 10001 } Columns { Id: 4 } } } ; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:102;parse_proto_program=Command { GroupBy { Aggregates { Column { Id: 10001 } Function { Id: 2 Arguments { Id: 2 } } } KeyColumns { Id: 4 } } } Command { Projection { Columns { Id: 10001 } Columns { Id: 4 } } } ; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2101;fline=graph_execute.cpp:162;graph_constructed=digraph program {N7[shape=box, label="N0(0):{\"p\":{\"data\":[{\"name\":\"uid\",\"id\":2},{\"name\":\"vat\",\"id\":4}]},\"o\":\"0\",\"t\":\"ReserveMemory\"}\n"]; N0[shape=box, label="N4(18):{\"i\":\"2,4\",\"o\":\"10001\",\"t\":\"Aggregation\"}\nREMOVE:2"]; N2 -> N0[label="1"]; N4 -> N0[label="2"]; N2[shape=box, label="N3(9):{\"i\":\"2\",\"p\":{\"address\":{\"name\":\"uid\",\"id\":2}},\"o\":\"2\",\"t\":\"AssembleOriginalData\"}\n",style=filled,color="#FFFF88"]; N6 -> N2[label="1"]; N4[shape=box, label="N2(9):{\"i\":\"4\",\"p\":{\"address\":{\"name\":\"vat\",\"id\":4}},\"o\":\"4\",\"t\":\"AssembleOriginalData\"}\n",style=filled,color="#FFFF88"]; N6 -> N4[label="1"]; N5[shape=box, label="N5(27):{\"i\":\"10001,4\",\"t\":\"Projection\"}\n",style=filled,color="#FFAAAA"]; N4 -> N5[label="1"]; N0 -> N5[label="2"]; N6[shape=box, label="N1(4):{\"i\":\"0\",\"p\":{\"data\":[{\"name\":\"uid\",\"id\":2},{\"name\":\"vat\",\"id\":4}]},\"o\":\"2,4\",\"t\":\"FetchOriginalData\"}\n",style=filled,color="#FFFF88"]; N7 -> N6[label="1"]; N7->N6->N4->N2->N0->N5[color=red]; }; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:51;event=program_parsed;result={"edges":[{"owner_id":7,"inputs":[]},{"owner_id":0,"inputs":[{"from":2},{"from":4}]},{"owner_id":2,"inputs":[{"from":6}]},{"owner_id":4,"inputs":[{"from":6}]},{"owner_id":5,"inputs":[{"from":4},{"from":0}]},{"owner_id":6,"inputs":[{"from":7}]}],"nodes":{"2":{"p":{"i":"2","p":{"address":{"name":"uid","id":2}},"o":"2","t":"AssembleOriginalData"},"w":9,"id":2},"6":{"p":{"i":"0","p":{"data":[{"name":"uid","id":2},{"name":"vat","id":4}]},"o":"2,4","t":"FetchOriginalData"},"w":4,"id":6},"7":{"p":{"p":{"data":[{"name":"uid","id":2},{"name":"vat","id":4}]},"o":"0","t":"ReserveMemory"},"w":0,"id":7},"5":{"p":{"i":"10001,4","t":"Projection"},"w":27,"id":5},"4":{"p":{"i":"4","p":{"address":{"name":"vat","id":4}},"o":"4","t":"AssembleOriginalData"},"w":9,"id":4},"0":{"p":{"i":"2,4","o":"10001","t":"Aggregation"},"w":18,"id":0}}}; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:502;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:502;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:502;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:502;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:502;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:502;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:502;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:502;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:502;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:502;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:502;T=N5arrow10UInt64TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:502;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:502;T=N5arrow10UInt64TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:502;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:502;T=N5arrow10UInt64TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:502;T=N5arrow9Int32TypeE; ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/columnshard/engines/ut/unittest >> TColumnEngineTestLogs::IndexReadWithPredicatesStrPK [GOOD] Test command err: FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=column_engine.h:144;event=RegisterTable;path_id=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=38120;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=38120;columns=5; FALLBACK_ACTOR_LOGGING;priority=INFO;component=2100;fline=simple_arrays_cache.h:49;event=insert_to_cache;key=uint64::0;records=1;size=8; FALLBACK_ACTOR_LOGGING;priority=INFO;component=2100;fline=simple_arrays_cache.h:65;event=slice_from_cache;key=uint64::0;records=1;count=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=152;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=152;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=38120;columns=5; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;field_name=_yql_plan_step;fline=native.cpp:71;event=parsing;size=232;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=_yql_plan_step;fline=native.cpp:110;event=serialize;size=232;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;field_name=timestamp;fline=native.cpp:71;event=parsing;size=4192;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=timestamp;fline=native.cpp:110;event=serialize;size=4192;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;field_name=_yql_tx_id;fline=native.cpp:71;event=parsing;size=232;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=_yql_tx_id;fline=native.cpp:110;event=serialize;size=232;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;field_name=resource_type;fline=native.cpp:71;event=parsing;size=8200;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=resource_type;fline=native.cpp:110;event=serialize;size=8200;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;field_name=resource_id;fline=native.cpp:71;event=parsing;size=6840;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=resource_id;fline=native.cpp:110;event=serialize;size=6840;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;field_name=uid;fline=native.cpp:71;event=parsing;size=6840;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=uid;fline=native.cpp:110;event=serialize;size=6840;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;field_name=message;fline=native.cpp:71;event=parsing;size=6840;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=message;fline=native.cpp:110;event=serialize;size=6840;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=constructor_meta.cpp:54;memory_size=94;data_size=68;sum=94;count=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=constructor_meta.cpp:75;memory_size=254;data_size=244;sum=254;count=2;size_of_meta=144; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=constructor_portion.cpp:40;memory_size=326;data_size=316;sum=326;count=1;size_of_portion=216; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=432;columns=4; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=432;columns=4; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=abstract.cpp:53;event=WriteIndexComplete;type=CS::INDEXATION;success=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=332;fline=granule.cpp:17;event=upsert_portion;portion=(portion_id:1;path_id:1;records_count:1000;schema_version:1;level:0;;column_size:33376;index_size:0;meta:((produced=INSERTED;)););path_id=1; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=abstract.cpp:105;event=AbortEmergency;reason=testing;prev_reason=; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=41432;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=41432;columns=5; FALLBACK_ACTOR_LOGGING;priority=INFO;component=2100;fline=simple_arrays_cache.h:65;event=slice_from_cache;key=uint64::0;records=1;count=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=152;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=152;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=41432;columns=5; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;field_name=_yql_plan_step;fline=native.cpp:71;event=parsing;size=232;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=_yql_plan_step;fline=native.cpp:110;event=serialize;size=232;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;field_name=timestamp;fline=native.cpp:71;event=parsing;size=4192;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=timestamp;fline=native.cpp:110;event=serialize;size=4192;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;field_name=_yql_tx_id;fline=native.cpp:71;event=parsing;size=232;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=_yql_tx_id;fline=native.cpp:110;event=serialize;size=232;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;field_name=resource_type;fline=native.cpp:71;event=parsing;size=8232;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=resource_type;fline=native.cpp:110;event=serialize;size=8232;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;field_name=resource_id;fline=native.cpp:71;event=parsing;size=8008;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=resource_id;fline=native.cpp:110;event=serialize;size=8008;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;field_name=uid;fline=native.cpp:71;event=parsing;size=8008;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=uid;fline=native.cpp:110;event=serialize;size=8008;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;field_name=message;fline=native.cpp:71;event=parsing;size=8008;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=message;fline=native.cpp:110;event=serialize;size=8008;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=constructor_meta.cpp:54;memory_size=94;data_size=76;sum=188;count=3; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=constructor_meta.cpp:75;memory_size=254;data_size=252;sum=508;count=4;size_of_meta=144; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=constructor_portion.cpp:40;memory_size=326;data_size=324;sum=652;count=2;size_of_portion=216; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=432;columns=4; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=432;columns=4; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=abstract.cpp:53;event=WriteIndexComplete;type=CS::INDEXATION;success=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=332;fline=granule.cpp:17;event=upsert_portion;portion=(portion_id:2;path_id:1;records_count:1000;schema_version:1;level:0;;column_size:36912;index_size:0;meta:((produced=INSERTED;)););path_id=1; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=abstract.cpp:105;event=AbortEmergency;reason=testing;prev_reason=; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=41432;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=41432;columns=5; FALLBACK_ACTOR_LOGGING;priority=INFO;component=2100;fline=simple_arrays_cache.h:65;event=slice_from_cache;key=uint64::0;records=1;count=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=152;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=152;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=41432;columns=5; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;field_name=_yql_plan_step;fline=native.cpp:71;event=parsing;size=232;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=_yql_plan_step;fline=native.cpp:110;event=serialize;size=232;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;field_name=timestamp;fline=native.cpp:71;event=parsing;size=4192;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=timestamp;fline=native.cpp:110;event=serialize;size=4192;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;field_name=_yql_tx_id;fline=native.cpp:71;event=parsing;size=232;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=_yql_tx_id;fline=native.cpp:110;event=serialize;size=232;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;field_name=resource_type;fline=native.cpp:71;event=parsing;size=8240;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=resource_type;fline=native.cpp:110;event=serialize;size=8240;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;field_name=resource_id;fline=native.cpp:71;event=parsing;size=8016;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=resource_id;fline=native.cpp:110;event=serialize;size=8016;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;field_name=uid;fline=native.cpp:71;event=parsing;size=8016;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=uid;fline=native.cpp:110;event=serialize;size=8016;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;field_name=message;fline=native.cpp:71;event=parsing;size=8016;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=message;fline=native.cpp:110;event=serialize;size=8016;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=constructor_meta.cpp:54;memory_size=94;data_size=76;sum=282;count=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=constructor_meta.cpp:75;memory_size=254;data_size=252;sum=762;count=6;size_of_meta=144; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=constructor_portion.cpp:40;memory_size=326;data_size=324;sum=978;count=3;size_of_portion=216; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=432;columns=4; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=432;columns=4; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=abstract.cpp:53;event=WriteIndexComplete;type=CS::INDEXATION;success=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=332;fline=granule.cpp:17;event=upsert_portion;portion=(portion_id:3;path_id:1;records_count:1000;schema_version:1;level:0;;column_size:36944;index_size:0;meta:((produced=INSERTED;)););path_id=1; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=abstract.cpp:105;event=AbortEmergency;reason=testing;prev_reason=; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=41432;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=41432;columns=5; FALLBACK_ACTOR_LOGGING;priority=INFO;component=2100;fline=simple_arrays_cache.h:65;event=slice_from_cache;key=uint64::0;records=1;count=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=152;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=152;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=41432;columns=5; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;field_name=_yql_plan_step;fline=native.cpp:71;ev ... _engine_logs.cpp:457;event=portion_selected;pathId=1;portion=(portion_id:18;path_id:1;records_count:1000;schema_version:1;level:0;;column_size:37616;index_size:0;meta:((produced=INSERTED;));); FALLBACK_ACTOR_LOGGING;priority=TRACE;component=332;fline=portion_info.h:380;event=IsVisible;analyze_portion=(portion_id:19;path_id:1;records_count:1000;schema_version:1;level:0;;column_size:37616;index_size:0;meta:((produced=INSERTED;)););visible=1;snapshot=plan_step=3;tx_id=1;; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=334;fline=column_engine_logs.cpp:457;event=portion_selected;pathId=1;portion=(portion_id:19;path_id:1;records_count:1000;schema_version:1;level:0;;column_size:37616;index_size:0;meta:((produced=INSERTED;));); FALLBACK_ACTOR_LOGGING;priority=TRACE;component=332;fline=portion_info.h:380;event=IsVisible;analyze_portion=(portion_id:20;path_id:1;records_count:1000;schema_version:1;level:0;;column_size:37592;index_size:0;meta:((produced=INSERTED;)););visible=1;snapshot=plan_step=3;tx_id=1;; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=334;fline=column_engine_logs.cpp:457;event=portion_selected;pathId=1;portion=(portion_id:20;path_id:1;records_count:1000;schema_version:1;level:0;;column_size:37592;index_size:0;meta:((produced=INSERTED;));); FALLBACK_ACTOR_LOGGING;priority=TRACE;component=332;fline=portion_info.h:380;event=IsVisible;analyze_portion=(portion_id:1;path_id:1;records_count:1000;schema_version:1;level:0;;column_size:33376;index_size:0;meta:((produced=INSERTED;)););visible=1;snapshot=plan_step=3;tx_id=1;; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=334;fline=column_engine_logs.cpp:457;event=portion_selected;pathId=1;portion=(portion_id:1;path_id:1;records_count:1000;schema_version:1;level:0;;column_size:33376;index_size:0;meta:((produced=INSERTED;));); FALLBACK_ACTOR_LOGGING;priority=TRACE;component=332;fline=portion_info.h:380;event=IsVisible;analyze_portion=(portion_id:2;path_id:1;records_count:1000;schema_version:1;level:0;;column_size:36912;index_size:0;meta:((produced=INSERTED;)););visible=1;snapshot=plan_step=3;tx_id=1;; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=334;fline=column_engine_logs.cpp:457;event=portion_selected;pathId=1;portion=(portion_id:2;path_id:1;records_count:1000;schema_version:1;level:0;;column_size:36912;index_size:0;meta:((produced=INSERTED;));); FALLBACK_ACTOR_LOGGING;priority=TRACE;component=332;fline=portion_info.h:380;event=IsVisible;analyze_portion=(portion_id:3;path_id:1;records_count:1000;schema_version:1;level:0;;column_size:36944;index_size:0;meta:((produced=INSERTED;)););visible=1;snapshot=plan_step=3;tx_id=1;; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=334;fline=column_engine_logs.cpp:457;event=portion_selected;pathId=1;portion=(portion_id:3;path_id:1;records_count:1000;schema_version:1;level:0;;column_size:36944;index_size:0;meta:((produced=INSERTED;));); FALLBACK_ACTOR_LOGGING;priority=TRACE;component=332;fline=portion_info.h:380;event=IsVisible;analyze_portion=(portion_id:4;path_id:1;records_count:1000;schema_version:1;level:0;;column_size:36976;index_size:0;meta:((produced=INSERTED;)););visible=1;snapshot=plan_step=3;tx_id=1;; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=334;fline=column_engine_logs.cpp:457;event=portion_selected;pathId=1;portion=(portion_id:4;path_id:1;records_count:1000;schema_version:1;level:0;;column_size:36976;index_size:0;meta:((produced=INSERTED;));); FALLBACK_ACTOR_LOGGING;priority=TRACE;component=332;fline=portion_info.h:380;event=IsVisible;analyze_portion=(portion_id:5;path_id:1;records_count:1000;schema_version:1;level:0;;column_size:37024;index_size:0;meta:((produced=INSERTED;)););visible=1;snapshot=plan_step=3;tx_id=1;; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=334;fline=column_engine_logs.cpp:457;event=portion_selected;pathId=1;portion=(portion_id:5;path_id:1;records_count:1000;schema_version:1;level:0;;column_size:37024;index_size:0;meta:((produced=INSERTED;));); FALLBACK_ACTOR_LOGGING;priority=TRACE;component=332;fline=portion_info.h:380;event=IsVisible;analyze_portion=(portion_id:6;path_id:1;records_count:1000;schema_version:1;level:0;;column_size:37072;index_size:0;meta:((produced=INSERTED;)););visible=1;snapshot=plan_step=3;tx_id=1;; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=334;fline=column_engine_logs.cpp:457;event=portion_selected;pathId=1;portion=(portion_id:6;path_id:1;records_count:1000;schema_version:1;level:0;;column_size:37072;index_size:0;meta:((produced=INSERTED;));); FALLBACK_ACTOR_LOGGING;priority=TRACE;component=332;fline=portion_info.h:380;event=IsVisible;analyze_portion=(portion_id:7;path_id:1;records_count:1000;schema_version:1;level:0;;column_size:37136;index_size:0;meta:((produced=INSERTED;)););visible=1;snapshot=plan_step=3;tx_id=1;; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=334;fline=column_engine_logs.cpp:457;event=portion_selected;pathId=1;portion=(portion_id:7;path_id:1;records_count:1000;schema_version:1;level:0;;column_size:37136;index_size:0;meta:((produced=INSERTED;));); FALLBACK_ACTOR_LOGGING;priority=TRACE;component=332;fline=portion_info.h:380;event=IsVisible;analyze_portion=(portion_id:8;path_id:1;records_count:1000;schema_version:1;level:0;;column_size:37120;index_size:0;meta:((produced=INSERTED;)););visible=1;snapshot=plan_step=3;tx_id=1;; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=334;fline=column_engine_logs.cpp:457;event=portion_selected;pathId=1;portion=(portion_id:8;path_id:1;records_count:1000;schema_version:1;level:0;;column_size:37120;index_size:0;meta:((produced=INSERTED;));); FALLBACK_ACTOR_LOGGING;priority=TRACE;component=332;fline=portion_info.h:380;event=IsVisible;analyze_portion=(portion_id:9;path_id:1;records_count:1000;schema_version:1;level:0;;column_size:37160;index_size:0;meta:((produced=INSERTED;)););visible=1;snapshot=plan_step=3;tx_id=1;; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=334;fline=column_engine_logs.cpp:457;event=portion_selected;pathId=1;portion=(portion_id:9;path_id:1;records_count:1000;schema_version:1;level:0;;column_size:37160;index_size:0;meta:((produced=INSERTED;));); FALLBACK_ACTOR_LOGGING;priority=TRACE;component=332;fline=portion_info.h:380;event=IsVisible;analyze_portion=(portion_id:10;path_id:1;records_count:1000;schema_version:1;level:0;;column_size:37088;index_size:0;meta:((produced=INSERTED;)););visible=1;snapshot=plan_step=3;tx_id=1;; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=334;fline=column_engine_logs.cpp:457;event=portion_skipped;pathId=1;portion=(portion_id:10;path_id:1;records_count:1000;schema_version:1;level:0;;column_size:37088;index_size:0;meta:((produced=INSERTED;));); FALLBACK_ACTOR_LOGGING;priority=TRACE;component=332;fline=portion_info.h:380;event=IsVisible;analyze_portion=(portion_id:11;path_id:1;records_count:1000;schema_version:1;level:0;;column_size:37560;index_size:0;meta:((produced=INSERTED;)););visible=1;snapshot=plan_step=3;tx_id=1;; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=334;fline=column_engine_logs.cpp:457;event=portion_skipped;pathId=1;portion=(portion_id:11;path_id:1;records_count:1000;schema_version:1;level:0;;column_size:37560;index_size:0;meta:((produced=INSERTED;));); FALLBACK_ACTOR_LOGGING;priority=TRACE;component=332;fline=portion_info.h:380;event=IsVisible;analyze_portion=(portion_id:12;path_id:1;records_count:1000;schema_version:1;level:0;;column_size:37488;index_size:0;meta:((produced=INSERTED;)););visible=1;snapshot=plan_step=3;tx_id=1;; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=334;fline=column_engine_logs.cpp:457;event=portion_skipped;pathId=1;portion=(portion_id:12;path_id:1;records_count:1000;schema_version:1;level:0;;column_size:37488;index_size:0;meta:((produced=INSERTED;));); FALLBACK_ACTOR_LOGGING;priority=TRACE;component=332;fline=portion_info.h:380;event=IsVisible;analyze_portion=(portion_id:13;path_id:1;records_count:1000;schema_version:1;level:0;;column_size:37624;index_size:0;meta:((produced=INSERTED;)););visible=1;snapshot=plan_step=3;tx_id=1;; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=334;fline=column_engine_logs.cpp:457;event=portion_skipped;pathId=1;portion=(portion_id:13;path_id:1;records_count:1000;schema_version:1;level:0;;column_size:37624;index_size:0;meta:((produced=INSERTED;));); FALLBACK_ACTOR_LOGGING;priority=TRACE;component=332;fline=portion_info.h:380;event=IsVisible;analyze_portion=(portion_id:14;path_id:1;records_count:1000;schema_version:1;level:0;;column_size:37616;index_size:0;meta:((produced=INSERTED;)););visible=1;snapshot=plan_step=3;tx_id=1;; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=334;fline=column_engine_logs.cpp:457;event=portion_skipped;pathId=1;portion=(portion_id:14;path_id:1;records_count:1000;schema_version:1;level:0;;column_size:37616;index_size:0;meta:((produced=INSERTED;));); FALLBACK_ACTOR_LOGGING;priority=TRACE;component=332;fline=portion_info.h:380;event=IsVisible;analyze_portion=(portion_id:15;path_id:1;records_count:1000;schema_version:1;level:0;;column_size:37616;index_size:0;meta:((produced=INSERTED;)););visible=1;snapshot=plan_step=3;tx_id=1;; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=334;fline=column_engine_logs.cpp:457;event=portion_skipped;pathId=1;portion=(portion_id:15;path_id:1;records_count:1000;schema_version:1;level:0;;column_size:37616;index_size:0;meta:((produced=INSERTED;));); FALLBACK_ACTOR_LOGGING;priority=TRACE;component=332;fline=portion_info.h:380;event=IsVisible;analyze_portion=(portion_id:16;path_id:1;records_count:1000;schema_version:1;level:0;;column_size:37624;index_size:0;meta:((produced=INSERTED;)););visible=1;snapshot=plan_step=3;tx_id=1;; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=334;fline=column_engine_logs.cpp:457;event=portion_skipped;pathId=1;portion=(portion_id:16;path_id:1;records_count:1000;schema_version:1;level:0;;column_size:37624;index_size:0;meta:((produced=INSERTED;));); FALLBACK_ACTOR_LOGGING;priority=TRACE;component=332;fline=portion_info.h:380;event=IsVisible;analyze_portion=(portion_id:17;path_id:1;records_count:1000;schema_version:1;level:0;;column_size:37616;index_size:0;meta:((produced=INSERTED;)););visible=1;snapshot=plan_step=3;tx_id=1;; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=334;fline=column_engine_logs.cpp:457;event=portion_skipped;pathId=1;portion=(portion_id:17;path_id:1;records_count:1000;schema_version:1;level:0;;column_size:37616;index_size:0;meta:((produced=INSERTED;));); FALLBACK_ACTOR_LOGGING;priority=TRACE;component=332;fline=portion_info.h:380;event=IsVisible;analyze_portion=(portion_id:18;path_id:1;records_count:1000;schema_version:1;level:0;;column_size:37616;index_size:0;meta:((produced=INSERTED;)););visible=1;snapshot=plan_step=3;tx_id=1;; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=334;fline=column_engine_logs.cpp:457;event=portion_skipped;pathId=1;portion=(portion_id:18;path_id:1;records_count:1000;schema_version:1;level:0;;column_size:37616;index_size:0;meta:((produced=INSERTED;));); FALLBACK_ACTOR_LOGGING;priority=TRACE;component=332;fline=portion_info.h:380;event=IsVisible;analyze_portion=(portion_id:19;path_id:1;records_count:1000;schema_version:1;level:0;;column_size:37616;index_size:0;meta:((produced=INSERTED;)););visible=1;snapshot=plan_step=3;tx_id=1;; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=334;fline=column_engine_logs.cpp:457;event=portion_skipped;pathId=1;portion=(portion_id:19;path_id:1;records_count:1000;schema_version:1;level:0;;column_size:37616;index_size:0;meta:((produced=INSERTED;));); FALLBACK_ACTOR_LOGGING;priority=TRACE;component=332;fline=portion_info.h:380;event=IsVisible;analyze_portion=(portion_id:20;path_id:1;records_count:1000;schema_version:1;level:0;;column_size:37592;index_size:0;meta:((produced=INSERTED;)););visible=1;snapshot=plan_step=3;tx_id=1;; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=334;fline=column_engine_logs.cpp:457;event=portion_skipped;pathId=1;portion=(portion_id:20;path_id:1;records_count:1000;schema_version:1;level:0;;column_size:37592;index_size:0;meta:((produced=INSERTED;));); |75.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/columnshard/engines/ut/unittest >> IntermediateDirsReboots::CreateKesusWithIntermediateDirsForceDrop [GOOD] >> TestProgram::JsonValueBinary >> TColumnEngineTestLogs::IndexWriteOverload [GOOD] >> TestProgram::JsonValueBinary [GOOD] |75.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/columnshard/engines/ut/unittest ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_reboots/unittest >> IntermediateDirsReboots::CreateKesusWithIntermediateDirsForceDrop [GOOD] Test command err: ==== RunWithTabletReboots =========== RUN: Trace =========== Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2140] Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:116:2058] recipient: [1:110:2141] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:116:2058] recipient: [1:110:2141] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:117:2058] recipient: [1:111:2142] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:117:2058] recipient: [1:111:2142] Leader for TabletID 72057594046678944 is [1:126:2151] sender: [1:127:2058] recipient: [1:109:2140] Leader for TabletID 72057594046447617 is [1:130:2154] sender: [1:132:2058] recipient: [1:110:2141] Leader for TabletID 72057594046316545 is [1:133:2155] sender: [1:135:2058] recipient: [1:111:2142] 2025-05-29T15:31:37.419230Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7488: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-05-29T15:31:37.419252Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7516: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:31:37.419258Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7402: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2025-05-29T15:31:37.419263Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7418: OperationsProcessing config: using default configuration 2025-05-29T15:31:37.419269Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-05-29T15:31:37.419273Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-05-29T15:31:37.419282Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7548: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:31:37.419295Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:39: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-05-29T15:31:37.419392Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7619: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-05-29T15:31:37.419460Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-05-29T15:31:37.432745Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7651: Got new config: QueryServiceConfig { AllExternalDataSourcesAreAvailable: true } 2025-05-29T15:31:37.432764Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:31:37.432852Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7619: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# Leader for TabletID 72057594046447617 is [1:130:2154] sender: [1:175:2058] recipient: [1:15:2062] 2025-05-29T15:31:37.435294Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-05-29T15:31:37.435321Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-05-29T15:31:37.435353Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-05-29T15:31:37.437813Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-05-29T15:31:37.437898Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:445: Clear TempDirsState with owners number: 0 2025-05-29T15:31:37.438009Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1348: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-05-29T15:31:37.438174Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:32: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-05-29T15:31:37.438964Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:157: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:31:37.439003Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:84: [RootDataErasureManager] Stop 2025-05-29T15:31:37.439250Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:31:37.439262Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:31:37.439290Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-05-29T15:31:37.439299Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-29T15:31:37.439304Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:202: TTxServerlessStorageBilling.Complete 2025-05-29T15:31:37.439323Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6671: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:214:2058] recipient: [1:212:2212] IGNORE Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:214:2058] recipient: [1:212:2212] Leader for TabletID 72057594037968897 is [1:218:2216] sender: [1:219:2058] recipient: [1:212:2212] 2025-05-29T15:31:37.440477Z node 1 :HIVE INFO: tablet_helpers.cpp:1130: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:126:2151] sender: [1:239:2058] recipient: [1:15:2062] 2025-05-29T15:31:37.455024Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:372: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-05-29T15:31:37.455086Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:31:37.455138Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-05-29T15:31:37.455174Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:130: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-05-29T15:31:37.455182Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:31:37.455679Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:451: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-05-29T15:31:37.455696Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-05-29T15:31:37.455735Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:31:37.455740Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:313: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-05-29T15:31:37.455744Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:367: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-05-29T15:31:37.455747Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 2 -> 3 2025-05-29T15:31:37.456036Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:31:37.456042Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-05-29T15:31:37.456046Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 3 -> 128 2025-05-29T15:31:37.456250Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:31:37.456255Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:31:37.456259Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:31:37.456263Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1650: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-05-29T15:31:37.456654Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1719: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-05-29T15:31:37.456904Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:652: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-05-29T15:31:37.456928Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1751: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 Leader for TabletID 72057594046316545 is [1:133:2155] sender: [1:254:2058] recipient: [1:15:2062] FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-05-29T15:31:37.457055Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:676: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-05-29T15:31:37.457070Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:680: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 133 RawX2: 4294969451 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-05-29T15:31:37.457084Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:31:37.457129Z node 1 :FLAT_TX_SCHEMESHARD INFO: sch ... 003 2025-05-29T15:31:49.170254Z node 49 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1003 2025-05-29T15:31:49.170261Z node 49 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:31:49.170265Z node 49 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [49:207:2208], at schemeshard: 72057594046678944, txId: 1003, path id: 1 2025-05-29T15:31:49.170268Z node 49 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [49:207:2208], at schemeshard: 72057594046678944, txId: 1003, path id: 5 2025-05-29T15:31:49.170271Z node 49 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [49:207:2208], at schemeshard: 72057594046678944, txId: 1003, path id: 3 2025-05-29T15:31:49.170273Z node 49 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [49:207:2208], at schemeshard: 72057594046678944, txId: 1003, path id: 4 2025-05-29T15:31:49.170339Z node 49 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:5834: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 5 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 1003 2025-05-29T15:31:49.170365Z node 49 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 5 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 1003 2025-05-29T15:31:49.170380Z node 49 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 4, at schemeshard: 72057594046678944, txId: 1003 2025-05-29T15:31:49.170385Z node 49 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 1003, pathId: [OwnerId: 72057594046678944, LocalPathId: 5], version: 18446744073709551615 2025-05-29T15:31:49.170388Z node 49 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 2 2025-05-29T15:31:49.170484Z node 49 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:5834: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 9 PathOwnerId: 72057594046678944, cookie: 1003 2025-05-29T15:31:49.170492Z node 49 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 9 PathOwnerId: 72057594046678944, cookie: 1003 2025-05-29T15:31:49.170495Z node 49 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 3, at schemeshard: 72057594046678944, txId: 1003 2025-05-29T15:31:49.170500Z node 49 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 1003, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 9 2025-05-29T15:31:49.170502Z node 49 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2025-05-29T15:31:49.170728Z node 49 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:5834: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 1003 2025-05-29T15:31:49.170756Z node 49 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 1003 2025-05-29T15:31:49.170763Z node 49 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 1003 2025-05-29T15:31:49.170767Z node 49 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 1003, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 18446744073709551615 2025-05-29T15:31:49.170772Z node 49 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 2 2025-05-29T15:31:49.170860Z node 49 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:5834: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 1003 2025-05-29T15:31:49.170867Z node 49 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 1003 2025-05-29T15:31:49.170870Z node 49 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1003 2025-05-29T15:31:49.170872Z node 49 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 1003, pathId: [OwnerId: 72057594046678944, LocalPathId: 4], version: 18446744073709551615 2025-05-29T15:31:49.170875Z node 49 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 2 2025-05-29T15:31:49.170881Z node 49 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 1003, subscribers: 1 2025-05-29T15:31:49.170884Z node 49 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:212: TTxAckPublishToSchemeBoard Notify send TEvNotifyTxCompletionResult, at schemeshard: 72057594046678944, to actorId: [49:305:2295] 2025-05-29T15:31:49.170948Z node 49 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:46: Free shard 72057594046678944:1 hive 72057594037968897 at ss 72057594046678944 Leader for TabletID 72057594037968897 is [49:219:2217] sender: [49:345:2058] recipient: [49:15:2062] 2025-05-29T15:31:49.171286Z node 49 :HIVE INFO: tablet_helpers.cpp:1356: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 1 TxId_Deprecated: 1 2025-05-29T15:31:49.171335Z node 49 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5938: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 1 ShardOwnerId: 72057594046678944 ShardLocalIdx: 1, at schemeshard: 72057594046678944 2025-05-29T15:31:49.171380Z node 49 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 1 2025-05-29T15:31:49.171423Z node 49 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1003 2025-05-29T15:31:49.171451Z node 49 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:123: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-05-29T15:31:49.171455Z node 49 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:137: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 5], at schemeshard: 72057594046678944 2025-05-29T15:31:49.171462Z node 49 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 1 2025-05-29T15:31:49.171466Z node 49 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:137: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 4], at schemeshard: 72057594046678944 2025-05-29T15:31:49.171469Z node 49 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 1 2025-05-29T15:31:49.171472Z node 49 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:137: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 3], at schemeshard: 72057594046678944 2025-05-29T15:31:49.171475Z node 49 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-05-29T15:31:49.171493Z node 49 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1003 2025-05-29T15:31:49.171816Z node 49 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1003 2025-05-29T15:31:49.171834Z node 49 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1003 2025-05-29T15:31:49.171843Z node 49 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:227: tests -- TTxNotificationSubscriber for txId 1003: got EvNotifyTxCompletionResult 2025-05-29T15:31:49.171846Z node 49 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:236: tests -- TTxNotificationSubscriber for txId 1003: satisfy waiter [49:306:2296] 2025-05-29T15:31:49.172116Z node 49 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:174: Deleted shardIdx 72057594046678944:1 2025-05-29T15:31:49.172161Z node 49 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:160: TTxCleanDroppedPaths Complete, done PersistRemovePath for 3 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestWaitNotification: OK eventTxId 1002 TestWaitNotification: OK eventTxId 1003 2025-05-29T15:31:49.172224Z node 49 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/x" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-05-29T15:31:49.172248Z node 49 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/x" took 29us result status StatusPathDoesNotExist 2025-05-29T15:31:49.172275Z node 49 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/x\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1]), source_location: ydb/core/tx/schemeshard/schemeshard_path_describer.cpp:1157" Path: "/MyRoot/x" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: true } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 >> TestProgram::JsonExistsBinary >> TestScript::StepMerging [GOOD] >> TColumnEngineTestLogs::IndexReadWithPredicates |75.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/columnshard/engines/ut/unittest ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/columnshard/engines/ut/unittest >> TColumnEngineTestLogs::IndexWriteOverload [GOOD] Test command err: FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=column_engine.h:144;event=RegisterTable;path_id=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=38120;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=38120;columns=5; FALLBACK_ACTOR_LOGGING;priority=INFO;component=2100;fline=simple_arrays_cache.h:49;event=insert_to_cache;key=uint64::0;records=1;size=8; FALLBACK_ACTOR_LOGGING;priority=INFO;component=2100;fline=simple_arrays_cache.h:65;event=slice_from_cache;key=uint64::0;records=1;count=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=152;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=152;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=38120;columns=5; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;field_name=_yql_plan_step;fline=native.cpp:71;event=parsing;size=232;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=_yql_plan_step;fline=native.cpp:110;event=serialize;size=232;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;field_name=timestamp;fline=native.cpp:71;event=parsing;size=4192;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=timestamp;fline=native.cpp:110;event=serialize;size=4192;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;field_name=_yql_tx_id;fline=native.cpp:71;event=parsing;size=232;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=_yql_tx_id;fline=native.cpp:110;event=serialize;size=232;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;field_name=resource_type;fline=native.cpp:71;event=parsing;size=8200;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=resource_type;fline=native.cpp:110;event=serialize;size=8200;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;field_name=resource_id;fline=native.cpp:71;event=parsing;size=6840;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=resource_id;fline=native.cpp:110;event=serialize;size=6840;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;field_name=uid;fline=native.cpp:71;event=parsing;size=6840;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=uid;fline=native.cpp:110;event=serialize;size=6840;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;field_name=message;fline=native.cpp:71;event=parsing;size=6840;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=message;fline=native.cpp:110;event=serialize;size=6840;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=constructor_meta.cpp:54;memory_size=94;data_size=68;sum=94;count=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=constructor_meta.cpp:75;memory_size=382;data_size=372;sum=382;count=2;size_of_meta=144; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=constructor_portion.cpp:40;memory_size=454;data_size=444;sum=454;count=1;size_of_portion=216; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=432;columns=4; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=432;columns=4; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=abstract.cpp:53;event=WriteIndexComplete;type=CS::INDEXATION;success=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=332;fline=granule.cpp:17;event=upsert_portion;portion=(portion_id:1;path_id:1;records_count:1000;schema_version:1;level:0;;column_size:33376;index_size:0;meta:((produced=INSERTED;)););path_id=1; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=abstract.cpp:105;event=AbortEmergency;reason=testing;prev_reason=; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=41432;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=41432;columns=5; FALLBACK_ACTOR_LOGGING;priority=INFO;component=2100;fline=simple_arrays_cache.h:65;event=slice_from_cache;key=uint64::0;records=1;count=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=152;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=152;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=41432;columns=5; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;field_name=_yql_plan_step;fline=native.cpp:71;event=parsing;size=232;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=_yql_plan_step;fline=native.cpp:110;event=serialize;size=232;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;field_name=timestamp;fline=native.cpp:71;event=parsing;size=4192;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=timestamp;fline=native.cpp:110;event=serialize;size=4192;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;field_name=_yql_tx_id;fline=native.cpp:71;event=parsing;size=232;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=_yql_tx_id;fline=native.cpp:110;event=serialize;size=232;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;field_name=resource_type;fline=native.cpp:71;event=parsing;size=8232;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=resource_type;fline=native.cpp:110;event=serialize;size=8232;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;field_name=resource_id;fline=native.cpp:71;event=parsing;size=8008;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=resource_id;fline=native.cpp:110;event=serialize;size=8008;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;field_name=uid;fline=native.cpp:71;event=parsing;size=8008;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=uid;fline=native.cpp:110;event=serialize;size=8008;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;field_name=message;fline=native.cpp:71;event=parsing;size=8008;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=message;fline=native.cpp:110;event=serialize;size=8008;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=constructor_meta.cpp:54;memory_size=94;data_size=76;sum=188;count=3; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=constructor_meta.cpp:75;memory_size=382;data_size=380;sum=764;count=4;size_of_meta=144; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=constructor_portion.cpp:40;memory_size=454;data_size=452;sum=908;count=2;size_of_portion=216; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=432;columns=4; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=432;columns=4; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=abstract.cpp:53;event=WriteIndexComplete;type=CS::INDEXATION;success=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=332;fline=granule.cpp:17;event=upsert_portion;portion=(portion_id:2;path_id:1;records_count:1000;schema_version:1;level:0;;column_size:36912;index_size:0;meta:((produced=INSERTED;)););path_id=1; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=abstract.cpp:105;event=AbortEmergency;reason=testing;prev_reason=; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=41432;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=41432;columns=5; FALLBACK_ACTOR_LOGGING;priority=INFO;component=2100;fline=simple_arrays_cache.h:65;event=slice_from_cache;key=uint64::0;records=1;count=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=152;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=152;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=41432;columns=5; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;field_name=_yql_plan_step;fline=native.cpp:71;event=parsing;size=232;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=_yql_plan_step;fline=native.cpp:110;event=serialize;size=232;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;field_name=timestamp;fline=native.cpp:71;event=parsing;size=4192;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=timestamp;fline=native.cpp:110;event=serialize;size=4192;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;field_name=_yql_tx_id;fline=native.cpp:71;event=parsing;size=232;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=_yql_tx_id;fline=native.cpp:110;event=serialize;size=232;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;field_name=resource_type;fline=native.cpp:71;event=parsing;size=8240;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=resource_type;fline=native.cpp:110;event=serialize;size=8240;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;field_name=resource_id;fline=native.cpp:71;event=parsing;size=8016;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=resource_id;fline=native.cpp:110;event=serialize;size=8016;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;field_name=uid;fline=native.cpp:71;event=parsing;size=8016;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=uid;fline=native.cpp:110;event=serialize;size=8016;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;field_name=message;fline=native.cpp:71;event=parsing;size=8016;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=message;fline=native.cpp:110;event=serialize;size=8016;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=constructor_meta.cpp:54;memory_size=94;data_size=76;sum=282;count=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=constructor_meta.cpp:75;memory_size=382;data_size=380;sum=1146;count=6;size_of_meta=144; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=constructor_portion.cpp:40;memory_size=454;data_size=452;sum=1362;count=3;size_of_portion=216; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=432;columns=4; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=432;columns=4; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=abstract.cpp:53;event=WriteIndexComplete;type=CS::INDEXATION;success=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=332;fline=granule.cpp:17;event=upsert_portion;portion=(portion_id:3;path_id:1;records_count:1000;schema_version:1;level:0;;column_size:36944;index_size:0;meta:((produced=INSERTED;)););path_id=1; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=abstract.cpp:105;event=AbortEmergency;reason=testing;prev_reason=; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=41432;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=41432;columns=5; FALLBACK_ACTOR_LOGGING;priority=INFO;component=2100;fline=simple_arrays_cache.h:65;event=slice_from_cache;key=uint64::0;records=1;count=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=152;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=152;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=41432;columns=5; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;field_name=_yql_plan_step;fline=native.cpp:71; ... nt=332;fline=constructor_portion.cpp:40;memory_size=470;data_size=456;sum=262424;count=380;size_of_portion=216; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=constructor_meta.cpp:54;memory_size=110;data_size=80;sum=41254;count=761; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=constructor_meta.cpp:75;memory_size=398;data_size=384;sum=235462;count=762;size_of_meta=144; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=constructor_portion.cpp:40;memory_size=470;data_size=456;sum=262894;count=381;size_of_portion=216; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=constructor_meta.cpp:54;memory_size=110;data_size=80;sum=41364;count=763; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=constructor_meta.cpp:75;memory_size=398;data_size=384;sum=235860;count=764;size_of_meta=144; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=constructor_portion.cpp:40;memory_size=470;data_size=456;sum=263364;count=382;size_of_portion=216; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=constructor_meta.cpp:54;memory_size=110;data_size=80;sum=41474;count=765; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=constructor_meta.cpp:75;memory_size=398;data_size=384;sum=236258;count=766;size_of_meta=144; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=constructor_portion.cpp:40;memory_size=470;data_size=456;sum=263834;count=383;size_of_portion=216; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=constructor_meta.cpp:54;memory_size=110;data_size=80;sum=41584;count=767; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=constructor_meta.cpp:75;memory_size=398;data_size=384;sum=236656;count=768;size_of_meta=144; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=constructor_portion.cpp:40;memory_size=470;data_size=456;sum=264304;count=384;size_of_portion=216; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=constructor_meta.cpp:54;memory_size=110;data_size=80;sum=41694;count=769; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=constructor_meta.cpp:75;memory_size=398;data_size=384;sum=237054;count=770;size_of_meta=144; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=constructor_portion.cpp:40;memory_size=470;data_size=456;sum=264774;count=385;size_of_portion=216; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=constructor_meta.cpp:54;memory_size=110;data_size=80;sum=41804;count=771; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=constructor_meta.cpp:75;memory_size=398;data_size=384;sum=237452;count=772;size_of_meta=144; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=constructor_portion.cpp:40;memory_size=470;data_size=456;sum=265244;count=386;size_of_portion=216; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=constructor_meta.cpp:54;memory_size=110;data_size=80;sum=41914;count=773; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=constructor_meta.cpp:75;memory_size=398;data_size=384;sum=237850;count=774;size_of_meta=144; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=constructor_portion.cpp:40;memory_size=470;data_size=456;sum=265714;count=387;size_of_portion=216; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=constructor_meta.cpp:54;memory_size=110;data_size=80;sum=42024;count=775; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=constructor_meta.cpp:75;memory_size=398;data_size=384;sum=238248;count=776;size_of_meta=144; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=constructor_portion.cpp:40;memory_size=470;data_size=456;sum=266184;count=388;size_of_portion=216; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=constructor_meta.cpp:54;memory_size=110;data_size=80;sum=42134;count=777; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=constructor_meta.cpp:75;memory_size=398;data_size=384;sum=238646;count=778;size_of_meta=144; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=constructor_portion.cpp:40;memory_size=470;data_size=456;sum=266654;count=389;size_of_portion=216; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=constructor_meta.cpp:54;memory_size=110;data_size=80;sum=42244;count=779; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=constructor_meta.cpp:75;memory_size=398;data_size=384;sum=239044;count=780;size_of_meta=144; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=constructor_portion.cpp:40;memory_size=470;data_size=456;sum=267124;count=390;size_of_portion=216; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=constructor_meta.cpp:54;memory_size=110;data_size=80;sum=42354;count=781; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=constructor_meta.cpp:75;memory_size=398;data_size=384;sum=239442;count=782;size_of_meta=144; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=constructor_portion.cpp:40;memory_size=470;data_size=456;sum=267594;count=391;size_of_portion=216; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=constructor_meta.cpp:54;memory_size=110;data_size=80;sum=42464;count=783; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=constructor_meta.cpp:75;memory_size=398;data_size=384;sum=239840;count=784;size_of_meta=144; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=constructor_portion.cpp:40;memory_size=470;data_size=456;sum=268064;count=392;size_of_portion=216; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=constructor_meta.cpp:54;memory_size=110;data_size=80;sum=42574;count=785; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=constructor_meta.cpp:75;memory_size=398;data_size=384;sum=240238;count=786;size_of_meta=144; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=constructor_portion.cpp:40;memory_size=470;data_size=456;sum=268534;count=393;size_of_portion=216; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=constructor_meta.cpp:54;memory_size=110;data_size=80;sum=42684;count=787; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=constructor_meta.cpp:75;memory_size=398;data_size=384;sum=240636;count=788;size_of_meta=144; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=constructor_portion.cpp:40;memory_size=470;data_size=456;sum=269004;count=394;size_of_portion=216; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=constructor_meta.cpp:54;memory_size=110;data_size=80;sum=42794;count=789; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=constructor_meta.cpp:75;memory_size=398;data_size=384;sum=241034;count=790;size_of_meta=144; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=constructor_portion.cpp:40;memory_size=470;data_size=456;sum=269474;count=395;size_of_portion=216; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=constructor_meta.cpp:54;memory_size=110;data_size=80;sum=42904;count=791; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=constructor_meta.cpp:75;memory_size=398;data_size=384;sum=241432;count=792;size_of_meta=144; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=constructor_portion.cpp:40;memory_size=470;data_size=456;sum=269944;count=396;size_of_portion=216; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=constructor_meta.cpp:54;memory_size=110;data_size=80;sum=43014;count=793; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=constructor_meta.cpp:75;memory_size=398;data_size=384;sum=241830;count=794;size_of_meta=144; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=constructor_portion.cpp:40;memory_size=470;data_size=456;sum=270414;count=397;size_of_portion=216; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=constructor_meta.cpp:54;memory_size=110;data_size=80;sum=43124;count=795; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=constructor_meta.cpp:75;memory_size=398;data_size=384;sum=242228;count=796;size_of_meta=144; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=constructor_portion.cpp:40;memory_size=470;data_size=456;sum=270884;count=398;size_of_portion=216; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=constructor_meta.cpp:54;memory_size=110;data_size=80;sum=43234;count=797; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=constructor_meta.cpp:75;memory_size=398;data_size=384;sum=242626;count=798;size_of_meta=144; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=constructor_portion.cpp:40;memory_size=470;data_size=456;sum=271354;count=399;size_of_portion=216; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=constructor_meta.cpp:54;memory_size=110;data_size=80;sum=43344;count=799; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=constructor_meta.cpp:75;memory_size=398;data_size=384;sum=243024;count=800;size_of_meta=144; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=constructor_portion.cpp:40;memory_size=470;data_size=456;sum=271824;count=400;size_of_portion=216; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=constructor_meta.cpp:54;memory_size=110;data_size=80;sum=43454;count=801; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=constructor_meta.cpp:75;memory_size=398;data_size=384;sum=243422;count=802;size_of_meta=144; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=constructor_portion.cpp:40;memory_size=470;data_size=456;sum=272294;count=401;size_of_portion=216; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=constructor_meta.cpp:54;memory_size=110;data_size=80;sum=43564;count=803; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=constructor_meta.cpp:75;memory_size=398;data_size=384;sum=243820;count=804;size_of_meta=144; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=constructor_portion.cpp:40;memory_size=470;data_size=456;sum=272764;count=402;size_of_portion=216; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=constructor_meta.cpp:54;memory_size=110;data_size=80;sum=43674;count=805; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=constructor_meta.cpp:75;memory_size=398;data_size=384;sum=244218;count=806;size_of_meta=144; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=constructor_portion.cpp:40;memory_size=470;data_size=456;sum=273234;count=403;size_of_portion=216; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=constructor_meta.cpp:54;memory_size=110;data_size=80;sum=43784;count=807; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=constructor_meta.cpp:75;memory_size=398;data_size=384;sum=244616;count=808;size_of_meta=144; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=constructor_portion.cpp:40;memory_size=470;data_size=456;sum=273704;count=404;size_of_portion=216; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=constructor_meta.cpp:54;memory_size=102;data_size=72;sum=43886;count=809; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=constructor_meta.cpp:75;memory_size=13126;data_size=13112;sum=257742;count=810;size_of_meta=144; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=constructor_portion.cpp:40;memory_size=13198;data_size=13184;sum=286902;count=405;size_of_portion=216; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=constructor_meta.cpp:54;memory_size=110;data_size=86;sum=43996;count=811; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=constructor_meta.cpp:75;memory_size=398;data_size=390;sum=258140;count=812;size_of_meta=144; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=constructor_portion.cpp:40;memory_size=470;data_size=462;sum=287372;count=406;size_of_portion=216; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=constructor_meta.cpp:54;memory_size=110;data_size=86;sum=44106;count=813; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=constructor_meta.cpp:75;memory_size=398;data_size=390;sum=258538;count=814;size_of_meta=144; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=constructor_portion.cpp:40;memory_size=470;data_size=462;sum=287842;count=407;size_of_portion=216; >> TestProgram::JsonExistsBinary [GOOD] >> TestProgram::Like >> TColumnEngineTestLogs::IndexReadWithPredicates [GOOD] >> TestProgram::YqlKernelStartsWith [GOOD] |75.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/columnshard/engines/ut/unittest ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/columnshard/engines/ut/unittest >> TestProgram::JsonValueBinary [GOOD] Test command err: FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:33;event=parse_program;program=Command { Assign { Column { Id: 15 } Constant { Text: "$.key" } } } Command { Assign { Column { Id: 16 } Function { Id: 8 Arguments { Id: 6 } Arguments { Id: 15 } FunctionType: YQL_KERNEL KernelIdx: 0 } } } Command { Projection { Columns { Id: 16 } } } Kernels: "O\016\006Arg\020JsonNode\020JsonPath\006UDF\006Udf\014Apply2\030BlockAsTuple\t\211\004\235\213\004\213\004\207\203\021H\203\001H\213\002\207?\004\001\235?\006\001\235?\n\001\032\000\t\211\004?\020\235?\002\001\235?\004\000\032\000\t\251\000?\026\002\000\t\251\000?\030\002\000\000\t\211\002?\022\235?\010\001\032\000\t\211\n?&?\026?\030?\002?\004?\010,ScalarApply\000?\036?\"\t\251\000?\002\002\000\t\251\000?\004\002\000\t\211\010?\010?\002?\000\207?\004?4$IfPresent\000?.\t\251\000?\000\002\000\t\211\n?4\201\213\004\213\004\203\n\203\005@\207\203\001H?@?4?D?D VisitAll\000\t\211\020?H\211\006?H\207\203\021H\214\n\210\203\001H\214\006\016\000\203\004\203\005@\203\004\203\004\207?\000\214\n\210\203\001H\214\006\026\000\t\211\010?X\203\005@\200\203\005@\202\022\000\003?nNJson2.JsonDocumentSqlValueConvertToUtf8\202\003?p\000\002\017\003?Z\000\003?\\\000\003?^\000\003?`\000\027?b?:\t\211\014?d\211\002?d\203\001H\016\000\203\004\203\005@\203\004\203\004?\004\026\000\t\211\010?\206\203\005@\200\203\005@\202\022\000\003?\222\"Json2.CompilePath\202\003?\224\000\002\017\003?\210\000\003?\212\000\003?\214\000\003?\216\000?2\036\010\000?j\276\t\251\000?@\002\000\'?4\t\251\000?D\002\000?\264\004\'?4\010\000\000\000/" ; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:102;parse_proto_program=Command { Assign { Column { Id: 15 } Constant { Text: "$.key" } } } Command { Assign { Column { Id: 16 } Function { Id: 8 Arguments { Id: 6 } Arguments { Id: 15 } FunctionType: YQL_KERNEL KernelIdx: 0 } } } Command { Projection { Columns { Id: 16 } } } Kernels: "O\016\006Arg\020JsonNode\020JsonPath\006UDF\006Udf\014Apply2\030BlockAsTuple\t\211\004\235\213\004\213\004\207\203\021H\203\001H\213\002\207?\004\001\235?\006\001\235?\n\001\032\000\t\211\004?\020\235?\002\001\235?\004\000\032\000\t\251\000?\026\002\000\t\251\000?\030\002\000\000\t\211\002?\022\235?\010\001\032\000\t\211\n?&?\026?\030?\002?\004?\010,ScalarApply\000?\036?\"\t\251\000?\002\002\000\t\251\000?\004\002\000\t\211\010?\010?\002?\000\207?\004?4$IfPresent\000?.\t\251\000?\000\002\000\t\211\n?4\201\213\004\213\004\203\n\203\005@\207\203\001H?@?4?D?D VisitAll\000\t\211\020?H\211\006?H\207\203\021H\214\n\210\203\001H\214\006\016\000\203\004\203\005@\203\004\203\004\207?\000\214\n\210\203\001H\214\006\026\000\t\211\010?X\203\005@\200\203\005@\202\022\000\003?nNJson2.JsonDocumentSqlValueConvertToUtf8\202\003?p\000\002\017\003?Z\000\003?\\\000\003?^\000\003?`\000\027?b?:\t\211\014?d\211\002?d\203\001H\016\000\203\004\203\005@\203\004\203\004?\004\026\000\t\211\010?\206\203\005@\200\203\005@\202\022\000\003?\222\"Json2.CompilePath\202\003?\224\000\002\017\003?\210\000\003?\212\000\003?\214\000\003?\216\000?2\036\010\000?j\276\t\251\000?@\002\000\'?4\t\251\000?D\002\000?\264\004\'?4\010\000\000\000/" ; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2101;fline=graph_execute.cpp:162;graph_constructed=digraph program {N0[shape=box, label="N0(0):{\"p\":{\"v\":\"$.key\"},\"o\":\"15\",\"t\":\"Const\"}\n"]; N1[shape=box, label="N4(15):{\"i\":\"6,15\",\"p\":{\"kernel\":{\"class_name\":\"SIMPLE\"}},\"o\":\"16\",\"t\":\"Calculation\"}\nREMOVE:15,6"]; N0 -> N1[label="1"]; N3 -> N1[label="2"]; N2[shape=box, label="N2(2):{\"i\":\"0\",\"p\":{\"data\":[{\"name\":\"json_binary\",\"id\":6}]},\"o\":\"6\",\"t\":\"FetchOriginalData\"}\n",style=filled,color="#FFFF88"]; N5 -> N2[label="1"]; N3[shape=box, label="N3(7):{\"i\":\"6\",\"p\":{\"address\":{\"name\":\"json_binary\",\"id\":6}},\"o\":\"6\",\"t\":\"AssembleOriginalData\"}\n",style=filled,color="#FFFF88"]; N2 -> N3[label="1"]; N4[shape=box, label="N5(15):{\"i\":\"16\",\"t\":\"Projection\"}\n",style=filled,color="#FFAAAA"]; N1 -> N4[label="1"]; N5[shape=box, label="N1(0):{\"p\":{\"data\":[{\"name\":\"json_binary\",\"id\":6}]},\"o\":\"0\",\"t\":\"ReserveMemory\"}\n"]; N0->N5->N2->N3->N1->N4[color=red]; }; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:51;event=program_parsed;result={"edges":[{"owner_id":0,"inputs":[]},{"owner_id":1,"inputs":[{"from":0},{"from":3}]},{"owner_id":2,"inputs":[{"from":5}]},{"owner_id":3,"inputs":[{"from":2}]},{"owner_id":4,"inputs":[{"from":1}]},{"owner_id":5,"inputs":[]}],"nodes":{"1":{"p":{"i":"6,15","p":{"kernel":{"class_name":"SIMPLE"}},"o":"16","t":"Calculation"},"w":15,"id":1},"3":{"p":{"i":"6","p":{"address":{"name":"json_binary","id":6}},"o":"6","t":"AssembleOriginalData"},"w":7,"id":3},"2":{"p":{"i":"0","p":{"data":[{"name":"json_binary","id":6}]},"o":"6","t":"FetchOriginalData"},"w":2,"id":2},"5":{"p":{"p":{"data":[{"name":"json_binary","id":6}]},"o":"0","t":"ReserveMemory"},"w":0,"id":5},"4":{"p":{"i":"16","t":"Projection"},"w":15,"id":4},"0":{"p":{"p":{"v":"$.key"},"o":"15","t":"Const"},"w":0,"id":0}}}; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:502;T=N5arrow10BinaryTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:502;T=N5arrow10BinaryTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:502;T=N5arrow10BinaryTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:502;T=N5arrow10BinaryTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:502;T=N5arrow10BinaryTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:502;T=N5arrow10BinaryTypeE; json_binary: [ 7B226B6579223A2276616C7565227D, 7B226B6579223A31307D, 7B226B6579223A302E317D, 7B226B6579223A66616C73657D, 7B22616E6F74686572223A2276616C7565227D, 5B5D ] json_binary: [ 010200002100000014000000030300000200000000040000C00400006B65790076616C756500, 0102000021000000140000008403000001000000800300006B6579000000000000002440, 0102000021000000140000008403000001000000800300006B6579009A9999999999B93F, 0102000021000000140000000000000001000000800300006B657900, 01020000210000001400000003030000020000008004000040050000616E6F746865720076616C756500, 010100000000000000000000 ] Check output for Utf8 FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:502;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:502;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:502;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:502;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:33;event=parse_program;program=Command { Assign { Column { Id: 15 } Constant { Text: "$.key" } } } Command { Assign { Column { Id: 16 } Function { Id: 8 Arguments { Id: 6 } Arguments { Id: 15 } FunctionType: YQL_KERNEL KernelIdx: 0 } } } Command { Projection { Columns { Id: 16 } } } Kernels: "O\016\006Arg\020JsonNode\020JsonPath\006UDF\006Udf\014Apply2\030BlockAsTuple\t\211\004\235\213\004\213\004\207\203\021H\203\001H\213\002\207\203\014\001\235?\006\001\235?\014\001\032\000\t\211\004?\022\235?\002\001\235?\004\000\032\000\t\251\000?\030\002\000\t\251\000?\032\002\000\000\t\211\002?\024\235?\n\001\032\000\t\211\n?(?\030?\032?\002?\004?\n,ScalarApply\000? ?$\t\251\000?\002\002\000\t\251\000?\004\002\000\t\211\010?\n?\002?\000\207?\010?6$IfPresent\000?0\t\251\000?\000\002\000\t\211\n?6\201\213\004\213\004\203\n\203\005@\207\203\014?B?6?F?F VisitAll\000\t\211\020?J\211\006?J\207\203\021H\214\n\210\203\001H\214\006\016\000\203\004\203\005@\203\004\203\004\207?\000\214\n\210\203\001H\214\006\026\000\t\211\010?Z\203\005@\200\203\005@\202\022\000\003?p N1[label="1"]; N3 -> N1[label="2"]; N2[shape=box, label="N2(2):{\"i\":\"0\",\"p\":{\"data\":[{\"name\":\"json_binary\",\"id\":6}]},\"o\":\"6\",\"t\":\"FetchOriginalData\"}\n",style=filled,color="#FFFF88"]; N5 -> N2[label="1"]; N3[shape=box, label="N3(7):{\"i\":\"6\",\"p\":{\"address\":{\"name\":\"json_binary\",\"id\":6}},\"o\":\"6\",\"t\":\"AssembleOriginalData\"}\n",style=filled,color="#FFFF88"]; N2 -> N3[label="1"]; N4[shape=box, label="N5(15):{\"i\":\"16\",\"t\":\"Projection\"}\n",style=filled,color="#FFAAAA"]; N1 -> N4[label="1"]; N5[shape=box, label="N1(0):{\"p\":{\"data\":[{\"name\":\"json_binary\",\"id\":6}]},\"o\":\"0\",\"t\":\"ReserveMemory\"}\n"]; N0->N5->N2->N3->N1->N4[color=red]; }; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:51;event=program_parsed;result={"edges":[{"owner_id":0,"inputs":[]},{"owner_id":1,"inputs":[{"from":0},{"from":3}]},{"owner_id":2,"inputs":[{"from":5}]},{"owner_id":3,"inputs":[{"from":2}]},{"owner_id":4,"inputs":[{"from":1}]},{"owner_id":5,"inputs":[]}],"nodes":{"1":{"p":{"i":"6,15","p":{"kernel":{"class_name":"SIMPLE"}},"o":"16","t" ... { Column { Id: 15 } Constant { Text: "$.key" } } } Command { Assign { Column { Id: 16 } Function { Id: 8 Arguments { Id: 6 } Arguments { Id: 15 } FunctionType: YQL_KERNEL KernelIdx: 0 } } } Command { Projection { Columns { Id: 16 } } } Kernels: "O\016\006Arg\020JsonNode\020JsonPath\006UDF\006Udf\014Apply2\030BlockAsTuple\t\211\004\235\213\004\213\004\207\203\021H\203\001H\213\002\207\203B\001\235?\006\001\235?\014\001\032\000\t\211\004?\022\235?\002\001\235?\004\000\032\000\t\251\000?\030\002\000\t\251\000?\032\002\000\000\t\211\002?\024\235?\n\001\032\000\t\211\n?(?\030?\032?\002?\004?\n,ScalarApply\000? ?$\t\251\000?\002\002\000\t\251\000?\004\002\000\t\211\010?\n?\002?\000\207?\010?6$IfPresent\000?0\t\251\000?\000\002\000\t\211\n?6\201\213\004\213\004\203\n\203\005@\207\203@?B?6?F?6 VisitAll\000\t\211\020?J\211\006?J\207\203\021H\214\n\210\203\001H\214\006\016\000\203\004\203\005@\203\004\203\004\207?\000\214\n\210\203\001H\214\006\026\000\t\211\010?Z\203\005@\200\203\005@\202\022\000\003?p@Json2.JsonDocumentSqlValueNumber\202\003?r\000\002\017\003?\\\000\003?^\000\003?`\000\003?b\000\027?d?<\t\211\014?f\211\002?f\203\001H\016\000\203\004\203\005@\203\004\203\004?\004\026\000\t\211\010?\210\203\005@\200\203\005@\202\022\000\003?\224\"Json2.CompilePath\202\003?\226\000\002\017\003?\212\000\003?\214\000\003?\216\000\003?\220\000?4\036\010\000?l\276\t\251\000?B\002\000\'?6\t\251\000?F\002\000\t\211\004?6\203\005@?F\030Invoke\000\003?\270\016Convert?\266\001\004\'?6\010\000\000\000/" ; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2101;fline=graph_execute.cpp:162;graph_constructed=digraph program {N0[shape=box, label="N0(0):{\"p\":{\"v\":\"$.key\"},\"o\":\"15\",\"t\":\"Const\"}\n"]; N1[shape=box, label="N4(15):{\"i\":\"6,15\",\"p\":{\"kernel\":{\"class_name\":\"SIMPLE\"}},\"o\":\"16\",\"t\":\"Calculation\"}\nREMOVE:15,6"]; N0 -> N1[label="1"]; N3 -> N1[label="2"]; N2[shape=box, label="N2(2):{\"i\":\"0\",\"p\":{\"data\":[{\"name\":\"json_binary\",\"id\":6}]},\"o\":\"6\",\"t\":\"FetchOriginalData\"}\n",style=filled,color="#FFFF88"]; N5 -> N2[label="1"]; N3[shape=box, label="N3(7):{\"i\":\"6\",\"p\":{\"address\":{\"name\":\"json_binary\",\"id\":6}},\"o\":\"6\",\"t\":\"AssembleOriginalData\"}\n",style=filled,color="#FFFF88"]; N2 -> N3[label="1"]; N4[shape=box, label="N5(15):{\"i\":\"16\",\"t\":\"Projection\"}\n",style=filled,color="#FFAAAA"]; N1 -> N4[label="1"]; N5[shape=box, label="N1(0):{\"p\":{\"data\":[{\"name\":\"json_binary\",\"id\":6}]},\"o\":\"0\",\"t\":\"ReserveMemory\"}\n"]; N0->N5->N2->N3->N1->N4[color=red]; }; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:51;event=program_parsed;result={"edges":[{"owner_id":0,"inputs":[]},{"owner_id":1,"inputs":[{"from":0},{"from":3}]},{"owner_id":2,"inputs":[{"from":5}]},{"owner_id":3,"inputs":[{"from":2}]},{"owner_id":4,"inputs":[{"from":1}]},{"owner_id":5,"inputs":[]}],"nodes":{"1":{"p":{"i":"6,15","p":{"kernel":{"class_name":"SIMPLE"}},"o":"16","t":"Calculation"},"w":15,"id":1},"3":{"p":{"i":"6","p":{"address":{"name":"json_binary","id":6}},"o":"6","t":"AssembleOriginalData"},"w":7,"id":3},"2":{"p":{"i":"0","p":{"data":[{"name":"json_binary","id":6}]},"o":"6","t":"FetchOriginalData"},"w":2,"id":2},"5":{"p":{"p":{"data":[{"name":"json_binary","id":6}]},"o":"0","t":"ReserveMemory"},"w":0,"id":5},"4":{"p":{"i":"16","t":"Projection"},"w":15,"id":4},"0":{"p":{"p":{"v":"$.key"},"o":"15","t":"Const"},"w":0,"id":0}}}; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:502;T=N5arrow10BinaryTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:502;T=N5arrow10BinaryTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:502;T=N5arrow10BinaryTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:502;T=N5arrow10BinaryTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:502;T=N5arrow10BinaryTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:502;T=N5arrow10BinaryTypeE; json_binary: [ 7B226B6579223A2276616C7565227D, 7B226B6579223A31307D, 7B226B6579223A302E317D, 7B226B6579223A66616C73657D, 7B22616E6F74686572223A2276616C7565227D, 5B5D ] json_binary: [ 010200002100000014000000030300000200000000040000C00400006B65790076616C756500, 0102000021000000140000008403000001000000800300006B6579000000000000002440, 0102000021000000140000008403000001000000800300006B6579009A9999999999B93F, 0102000021000000140000000000000001000000800300006B657900, 01020000210000001400000003030000020000008004000040050000616E6F746865720076616C756500, 010100000000000000000000 ] Check output for Float FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:502;T=N5arrow10DoubleTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:502;T=N5arrow10DoubleTypeE; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:33;event=parse_program;program=Command { Assign { Column { Id: 15 } Constant { Text: "$.key" } } } Command { Assign { Column { Id: 16 } Function { Id: 8 Arguments { Id: 6 } Arguments { Id: 15 } FunctionType: YQL_KERNEL KernelIdx: 0 } } } Command { Projection { Columns { Id: 16 } } } Kernels: "O\016\006Arg\020JsonNode\020JsonPath\006UDF\006Udf\014Apply2\030BlockAsTuple\t\211\004\235\213\004\213\004\207\203\021H\203\001H\213\002\207\203@\001\235?\006\001\235?\014\001\032\000\t\211\004?\022\235?\002\001\235?\004\000\032\000\t\251\000?\030\002\000\t\251\000?\032\002\000\000\t\211\002?\024\235?\n\001\032\000\t\211\n?(?\030?\032?\002?\004?\n,ScalarApply\000? ?$\t\251\000?\002\002\000\t\251\000?\004\002\000\t\211\010?\n?\002?\000\207?\010?6$IfPresent\000?0\t\251\000?\000\002\000\t\211\n?6\201\213\004\213\004\203\n\203\005@\207\203@?B?6?F?F VisitAll\000\t\211\020?J\211\006?J\207\203\021H\214\n\210\203\001H\214\006\016\000\203\004\203\005@\203\004\203\004\207?\000\214\n\210\203\001H\214\006\026\000\t\211\010?Z\203\005@\200\203\005@\202\022\000\003?p@Json2.JsonDocumentSqlValueNumber\202\003?r\000\002\017\003?\\\000\003?^\000\003?`\000\003?b\000\027?d?<\t\211\014?f\211\002?f\203\001H\016\000\203\004\203\005@\203\004\203\004?\004\026\000\t\211\010?\210\203\005@\200\203\005@\202\022\000\003?\224\"Json2.CompilePath\202\003?\226\000\002\017\003?\212\000\003?\214\000\003?\216\000\003?\220\000?4\036\010\000?l\276\t\251\000?B\002\000\'?6\t\251\000?F\002\000?\266\004\'?6\010\000\000\000/" ; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:102;parse_proto_program=Command { Assign { Column { Id: 15 } Constant { Text: "$.key" } } } Command { Assign { Column { Id: 16 } Function { Id: 8 Arguments { Id: 6 } Arguments { Id: 15 } FunctionType: YQL_KERNEL KernelIdx: 0 } } } Command { Projection { Columns { Id: 16 } } } Kernels: "O\016\006Arg\020JsonNode\020JsonPath\006UDF\006Udf\014Apply2\030BlockAsTuple\t\211\004\235\213\004\213\004\207\203\021H\203\001H\213\002\207\203@\001\235?\006\001\235?\014\001\032\000\t\211\004?\022\235?\002\001\235?\004\000\032\000\t\251\000?\030\002\000\t\251\000?\032\002\000\000\t\211\002?\024\235?\n\001\032\000\t\211\n?(?\030?\032?\002?\004?\n,ScalarApply\000? ?$\t\251\000?\002\002\000\t\251\000?\004\002\000\t\211\010?\n?\002?\000\207?\010?6$IfPresent\000?0\t\251\000?\000\002\000\t\211\n?6\201\213\004\213\004\203\n\203\005@\207\203@?B?6?F?F VisitAll\000\t\211\020?J\211\006?J\207\203\021H\214\n\210\203\001H\214\006\016\000\203\004\203\005@\203\004\203\004\207?\000\214\n\210\203\001H\214\006\026\000\t\211\010?Z\203\005@\200\203\005@\202\022\000\003?p@Json2.JsonDocumentSqlValueNumber\202\003?r\000\002\017\003?\\\000\003?^\000\003?`\000\003?b\000\027?d?<\t\211\014?f\211\002?f\203\001H\016\000\203\004\203\005@\203\004\203\004?\004\026\000\t\211\010?\210\203\005@\200\203\005@\202\022\000\003?\224\"Json2.CompilePath\202\003?\226\000\002\017\003?\212\000\003?\214\000\003?\216\000\003?\220\000?4\036\010\000?l\276\t\251\000?B\002\000\'?6\t\251\000?F\002\000?\266\004\'?6\010\000\000\000/" ; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2101;fline=graph_execute.cpp:162;graph_constructed=digraph program {N0[shape=box, label="N0(0):{\"p\":{\"v\":\"$.key\"},\"o\":\"15\",\"t\":\"Const\"}\n"]; N1[shape=box, label="N4(15):{\"i\":\"6,15\",\"p\":{\"kernel\":{\"class_name\":\"SIMPLE\"}},\"o\":\"16\",\"t\":\"Calculation\"}\nREMOVE:15,6"]; N0 -> N1[label="1"]; N3 -> N1[label="2"]; N2[shape=box, label="N2(2):{\"i\":\"0\",\"p\":{\"data\":[{\"name\":\"json_binary\",\"id\":6}]},\"o\":\"6\",\"t\":\"FetchOriginalData\"}\n",style=filled,color="#FFFF88"]; N5 -> N2[label="1"]; N3[shape=box, label="N3(7):{\"i\":\"6\",\"p\":{\"address\":{\"name\":\"json_binary\",\"id\":6}},\"o\":\"6\",\"t\":\"AssembleOriginalData\"}\n",style=filled,color="#FFFF88"]; N2 -> N3[label="1"]; N4[shape=box, label="N5(15):{\"i\":\"16\",\"t\":\"Projection\"}\n",style=filled,color="#FFAAAA"]; N1 -> N4[label="1"]; N5[shape=box, label="N1(0):{\"p\":{\"data\":[{\"name\":\"json_binary\",\"id\":6}]},\"o\":\"0\",\"t\":\"ReserveMemory\"}\n"]; N0->N5->N2->N3->N1->N4[color=red]; }; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:51;event=program_parsed;result={"edges":[{"owner_id":0,"inputs":[]},{"owner_id":1,"inputs":[{"from":0},{"from":3}]},{"owner_id":2,"inputs":[{"from":5}]},{"owner_id":3,"inputs":[{"from":2}]},{"owner_id":4,"inputs":[{"from":1}]},{"owner_id":5,"inputs":[]}],"nodes":{"1":{"p":{"i":"6,15","p":{"kernel":{"class_name":"SIMPLE"}},"o":"16","t":"Calculation"},"w":15,"id":1},"3":{"p":{"i":"6","p":{"address":{"name":"json_binary","id":6}},"o":"6","t":"AssembleOriginalData"},"w":7,"id":3},"2":{"p":{"i":"0","p":{"data":[{"name":"json_binary","id":6}]},"o":"6","t":"FetchOriginalData"},"w":2,"id":2},"5":{"p":{"p":{"data":[{"name":"json_binary","id":6}]},"o":"0","t":"ReserveMemory"},"w":0,"id":5},"4":{"p":{"i":"16","t":"Projection"},"w":15,"id":4},"0":{"p":{"p":{"v":"$.key"},"o":"15","t":"Const"},"w":0,"id":0}}}; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:502;T=N5arrow10BinaryTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:502;T=N5arrow10BinaryTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:502;T=N5arrow10BinaryTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:502;T=N5arrow10BinaryTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:502;T=N5arrow10BinaryTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:502;T=N5arrow10BinaryTypeE; json_binary: [ 7B226B6579223A2276616C7565227D, 7B226B6579223A31307D, 7B226B6579223A302E317D, 7B226B6579223A66616C73657D, 7B22616E6F74686572223A2276616C7565227D, 5B5D ] json_binary: [ 010200002100000014000000030300000200000000040000C00400006B65790076616C756500, 0102000021000000140000008403000001000000800300006B6579000000000000002440, 0102000021000000140000008403000001000000800300006B6579009A9999999999B93F, 0102000021000000140000000000000001000000800300006B657900, 01020000210000001400000003030000020000008004000040050000616E6F746865720076616C756500, 010100000000000000000000 ] Check output for Double FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:502;T=N5arrow10DoubleTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:502;T=N5arrow10DoubleTypeE; ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/columnshard/engines/ut/unittest >> TColumnEngineTestLogs::IndexReadWithPredicates [GOOD] Test command err: FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=column_engine.h:144;event=RegisterTable;path_id=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=38120;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=38120;columns=5; FALLBACK_ACTOR_LOGGING;priority=INFO;component=2100;fline=simple_arrays_cache.h:49;event=insert_to_cache;key=uint64::0;records=1;size=8; FALLBACK_ACTOR_LOGGING;priority=INFO;component=2100;fline=simple_arrays_cache.h:65;event=slice_from_cache;key=uint64::0;records=1;count=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=152;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=152;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=38120;columns=5; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;field_name=_yql_plan_step;fline=native.cpp:71;event=parsing;size=232;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=_yql_plan_step;fline=native.cpp:110;event=serialize;size=232;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;field_name=timestamp;fline=native.cpp:71;event=parsing;size=4192;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=timestamp;fline=native.cpp:110;event=serialize;size=4192;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;field_name=_yql_tx_id;fline=native.cpp:71;event=parsing;size=232;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=_yql_tx_id;fline=native.cpp:110;event=serialize;size=232;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;field_name=resource_type;fline=native.cpp:71;event=parsing;size=8200;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=resource_type;fline=native.cpp:110;event=serialize;size=8200;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;field_name=resource_id;fline=native.cpp:71;event=parsing;size=6840;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=resource_id;fline=native.cpp:110;event=serialize;size=6840;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;field_name=uid;fline=native.cpp:71;event=parsing;size=6840;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=uid;fline=native.cpp:110;event=serialize;size=6840;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;field_name=message;fline=native.cpp:71;event=parsing;size=6840;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=message;fline=native.cpp:110;event=serialize;size=6840;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=constructor_meta.cpp:54;memory_size=94;data_size=68;sum=94;count=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=constructor_meta.cpp:75;memory_size=254;data_size=244;sum=254;count=2;size_of_meta=144; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=constructor_portion.cpp:40;memory_size=326;data_size=316;sum=326;count=1;size_of_portion=216; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=432;columns=4; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=432;columns=4; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=abstract.cpp:53;event=WriteIndexComplete;type=CS::INDEXATION;success=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=332;fline=granule.cpp:17;event=upsert_portion;portion=(portion_id:1;path_id:1;records_count:1000;schema_version:1;level:0;;column_size:33376;index_size:0;meta:((produced=INSERTED;)););path_id=1; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=abstract.cpp:105;event=AbortEmergency;reason=testing;prev_reason=; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=41432;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=41432;columns=5; FALLBACK_ACTOR_LOGGING;priority=INFO;component=2100;fline=simple_arrays_cache.h:65;event=slice_from_cache;key=uint64::0;records=1;count=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=152;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=152;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=41432;columns=5; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;field_name=_yql_plan_step;fline=native.cpp:71;event=parsing;size=232;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=_yql_plan_step;fline=native.cpp:110;event=serialize;size=232;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;field_name=timestamp;fline=native.cpp:71;event=parsing;size=4192;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=timestamp;fline=native.cpp:110;event=serialize;size=4192;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;field_name=_yql_tx_id;fline=native.cpp:71;event=parsing;size=232;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=_yql_tx_id;fline=native.cpp:110;event=serialize;size=232;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;field_name=resource_type;fline=native.cpp:71;event=parsing;size=8232;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=resource_type;fline=native.cpp:110;event=serialize;size=8232;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;field_name=resource_id;fline=native.cpp:71;event=parsing;size=8008;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=resource_id;fline=native.cpp:110;event=serialize;size=8008;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;field_name=uid;fline=native.cpp:71;event=parsing;size=8008;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=uid;fline=native.cpp:110;event=serialize;size=8008;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;field_name=message;fline=native.cpp:71;event=parsing;size=8008;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=message;fline=native.cpp:110;event=serialize;size=8008;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=constructor_meta.cpp:54;memory_size=94;data_size=76;sum=188;count=3; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=constructor_meta.cpp:75;memory_size=254;data_size=252;sum=508;count=4;size_of_meta=144; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=constructor_portion.cpp:40;memory_size=326;data_size=324;sum=652;count=2;size_of_portion=216; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=432;columns=4; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=432;columns=4; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=abstract.cpp:53;event=WriteIndexComplete;type=CS::INDEXATION;success=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=332;fline=granule.cpp:17;event=upsert_portion;portion=(portion_id:2;path_id:1;records_count:1000;schema_version:1;level:0;;column_size:36912;index_size:0;meta:((produced=INSERTED;)););path_id=1; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=abstract.cpp:105;event=AbortEmergency;reason=testing;prev_reason=; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=41432;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=41432;columns=5; FALLBACK_ACTOR_LOGGING;priority=INFO;component=2100;fline=simple_arrays_cache.h:65;event=slice_from_cache;key=uint64::0;records=1;count=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=152;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=152;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=41432;columns=5; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;field_name=_yql_plan_step;fline=native.cpp:71;event=parsing;size=232;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=_yql_plan_step;fline=native.cpp:110;event=serialize;size=232;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;field_name=timestamp;fline=native.cpp:71;event=parsing;size=4192;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=timestamp;fline=native.cpp:110;event=serialize;size=4192;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;field_name=_yql_tx_id;fline=native.cpp:71;event=parsing;size=232;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=_yql_tx_id;fline=native.cpp:110;event=serialize;size=232;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;field_name=resource_type;fline=native.cpp:71;event=parsing;size=8240;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=resource_type;fline=native.cpp:110;event=serialize;size=8240;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;field_name=resource_id;fline=native.cpp:71;event=parsing;size=8016;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=resource_id;fline=native.cpp:110;event=serialize;size=8016;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;field_name=uid;fline=native.cpp:71;event=parsing;size=8016;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=uid;fline=native.cpp:110;event=serialize;size=8016;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;field_name=message;fline=native.cpp:71;event=parsing;size=8016;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=message;fline=native.cpp:110;event=serialize;size=8016;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=constructor_meta.cpp:54;memory_size=94;data_size=76;sum=282;count=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=constructor_meta.cpp:75;memory_size=254;data_size=252;sum=762;count=6;size_of_meta=144; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=constructor_portion.cpp:40;memory_size=326;data_size=324;sum=978;count=3;size_of_portion=216; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=432;columns=4; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=432;columns=4; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=abstract.cpp:53;event=WriteIndexComplete;type=CS::INDEXATION;success=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=332;fline=granule.cpp:17;event=upsert_portion;portion=(portion_id:3;path_id:1;records_count:1000;schema_version:1;level:0;;column_size:36944;index_size:0;meta:((produced=INSERTED;)););path_id=1; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=abstract.cpp:105;event=AbortEmergency;reason=testing;prev_reason=; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=41432;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=41432;columns=5; FALLBACK_ACTOR_LOGGING;priority=INFO;component=2100;fline=simple_arrays_cache.h:65;event=slice_from_cache;key=uint64::0;records=1;count=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=152;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=152;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=41432;columns=5; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;field_name=_yql_plan_step;fline=native.cpp:71;ev ... _engine_logs.cpp:457;event=portion_selected;pathId=1;portion=(portion_id:18;path_id:1;records_count:1000;schema_version:1;level:0;;column_size:37616;index_size:0;meta:((produced=INSERTED;));); FALLBACK_ACTOR_LOGGING;priority=TRACE;component=332;fline=portion_info.h:380;event=IsVisible;analyze_portion=(portion_id:19;path_id:1;records_count:1000;schema_version:1;level:0;;column_size:37616;index_size:0;meta:((produced=INSERTED;)););visible=1;snapshot=plan_step=3;tx_id=1;; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=334;fline=column_engine_logs.cpp:457;event=portion_selected;pathId=1;portion=(portion_id:19;path_id:1;records_count:1000;schema_version:1;level:0;;column_size:37616;index_size:0;meta:((produced=INSERTED;));); FALLBACK_ACTOR_LOGGING;priority=TRACE;component=332;fline=portion_info.h:380;event=IsVisible;analyze_portion=(portion_id:20;path_id:1;records_count:1000;schema_version:1;level:0;;column_size:37592;index_size:0;meta:((produced=INSERTED;)););visible=1;snapshot=plan_step=3;tx_id=1;; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=334;fline=column_engine_logs.cpp:457;event=portion_selected;pathId=1;portion=(portion_id:20;path_id:1;records_count:1000;schema_version:1;level:0;;column_size:37592;index_size:0;meta:((produced=INSERTED;));); FALLBACK_ACTOR_LOGGING;priority=TRACE;component=332;fline=portion_info.h:380;event=IsVisible;analyze_portion=(portion_id:1;path_id:1;records_count:1000;schema_version:1;level:0;;column_size:33376;index_size:0;meta:((produced=INSERTED;)););visible=1;snapshot=plan_step=3;tx_id=1;; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=334;fline=column_engine_logs.cpp:457;event=portion_selected;pathId=1;portion=(portion_id:1;path_id:1;records_count:1000;schema_version:1;level:0;;column_size:33376;index_size:0;meta:((produced=INSERTED;));); FALLBACK_ACTOR_LOGGING;priority=TRACE;component=332;fline=portion_info.h:380;event=IsVisible;analyze_portion=(portion_id:2;path_id:1;records_count:1000;schema_version:1;level:0;;column_size:36912;index_size:0;meta:((produced=INSERTED;)););visible=1;snapshot=plan_step=3;tx_id=1;; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=334;fline=column_engine_logs.cpp:457;event=portion_selected;pathId=1;portion=(portion_id:2;path_id:1;records_count:1000;schema_version:1;level:0;;column_size:36912;index_size:0;meta:((produced=INSERTED;));); FALLBACK_ACTOR_LOGGING;priority=TRACE;component=332;fline=portion_info.h:380;event=IsVisible;analyze_portion=(portion_id:3;path_id:1;records_count:1000;schema_version:1;level:0;;column_size:36944;index_size:0;meta:((produced=INSERTED;)););visible=1;snapshot=plan_step=3;tx_id=1;; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=334;fline=column_engine_logs.cpp:457;event=portion_selected;pathId=1;portion=(portion_id:3;path_id:1;records_count:1000;schema_version:1;level:0;;column_size:36944;index_size:0;meta:((produced=INSERTED;));); FALLBACK_ACTOR_LOGGING;priority=TRACE;component=332;fline=portion_info.h:380;event=IsVisible;analyze_portion=(portion_id:4;path_id:1;records_count:1000;schema_version:1;level:0;;column_size:36976;index_size:0;meta:((produced=INSERTED;)););visible=1;snapshot=plan_step=3;tx_id=1;; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=334;fline=column_engine_logs.cpp:457;event=portion_selected;pathId=1;portion=(portion_id:4;path_id:1;records_count:1000;schema_version:1;level:0;;column_size:36976;index_size:0;meta:((produced=INSERTED;));); FALLBACK_ACTOR_LOGGING;priority=TRACE;component=332;fline=portion_info.h:380;event=IsVisible;analyze_portion=(portion_id:5;path_id:1;records_count:1000;schema_version:1;level:0;;column_size:37024;index_size:0;meta:((produced=INSERTED;)););visible=1;snapshot=plan_step=3;tx_id=1;; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=334;fline=column_engine_logs.cpp:457;event=portion_selected;pathId=1;portion=(portion_id:5;path_id:1;records_count:1000;schema_version:1;level:0;;column_size:37024;index_size:0;meta:((produced=INSERTED;));); FALLBACK_ACTOR_LOGGING;priority=TRACE;component=332;fline=portion_info.h:380;event=IsVisible;analyze_portion=(portion_id:6;path_id:1;records_count:1000;schema_version:1;level:0;;column_size:37072;index_size:0;meta:((produced=INSERTED;)););visible=1;snapshot=plan_step=3;tx_id=1;; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=334;fline=column_engine_logs.cpp:457;event=portion_selected;pathId=1;portion=(portion_id:6;path_id:1;records_count:1000;schema_version:1;level:0;;column_size:37072;index_size:0;meta:((produced=INSERTED;));); FALLBACK_ACTOR_LOGGING;priority=TRACE;component=332;fline=portion_info.h:380;event=IsVisible;analyze_portion=(portion_id:7;path_id:1;records_count:1000;schema_version:1;level:0;;column_size:37136;index_size:0;meta:((produced=INSERTED;)););visible=1;snapshot=plan_step=3;tx_id=1;; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=334;fline=column_engine_logs.cpp:457;event=portion_selected;pathId=1;portion=(portion_id:7;path_id:1;records_count:1000;schema_version:1;level:0;;column_size:37136;index_size:0;meta:((produced=INSERTED;));); FALLBACK_ACTOR_LOGGING;priority=TRACE;component=332;fline=portion_info.h:380;event=IsVisible;analyze_portion=(portion_id:8;path_id:1;records_count:1000;schema_version:1;level:0;;column_size:37120;index_size:0;meta:((produced=INSERTED;)););visible=1;snapshot=plan_step=3;tx_id=1;; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=334;fline=column_engine_logs.cpp:457;event=portion_selected;pathId=1;portion=(portion_id:8;path_id:1;records_count:1000;schema_version:1;level:0;;column_size:37120;index_size:0;meta:((produced=INSERTED;));); FALLBACK_ACTOR_LOGGING;priority=TRACE;component=332;fline=portion_info.h:380;event=IsVisible;analyze_portion=(portion_id:9;path_id:1;records_count:1000;schema_version:1;level:0;;column_size:37160;index_size:0;meta:((produced=INSERTED;)););visible=1;snapshot=plan_step=3;tx_id=1;; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=334;fline=column_engine_logs.cpp:457;event=portion_selected;pathId=1;portion=(portion_id:9;path_id:1;records_count:1000;schema_version:1;level:0;;column_size:37160;index_size:0;meta:((produced=INSERTED;));); FALLBACK_ACTOR_LOGGING;priority=TRACE;component=332;fline=portion_info.h:380;event=IsVisible;analyze_portion=(portion_id:10;path_id:1;records_count:1000;schema_version:1;level:0;;column_size:37088;index_size:0;meta:((produced=INSERTED;)););visible=1;snapshot=plan_step=3;tx_id=1;; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=334;fline=column_engine_logs.cpp:457;event=portion_skipped;pathId=1;portion=(portion_id:10;path_id:1;records_count:1000;schema_version:1;level:0;;column_size:37088;index_size:0;meta:((produced=INSERTED;));); FALLBACK_ACTOR_LOGGING;priority=TRACE;component=332;fline=portion_info.h:380;event=IsVisible;analyze_portion=(portion_id:11;path_id:1;records_count:1000;schema_version:1;level:0;;column_size:37560;index_size:0;meta:((produced=INSERTED;)););visible=1;snapshot=plan_step=3;tx_id=1;; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=334;fline=column_engine_logs.cpp:457;event=portion_skipped;pathId=1;portion=(portion_id:11;path_id:1;records_count:1000;schema_version:1;level:0;;column_size:37560;index_size:0;meta:((produced=INSERTED;));); FALLBACK_ACTOR_LOGGING;priority=TRACE;component=332;fline=portion_info.h:380;event=IsVisible;analyze_portion=(portion_id:12;path_id:1;records_count:1000;schema_version:1;level:0;;column_size:37488;index_size:0;meta:((produced=INSERTED;)););visible=1;snapshot=plan_step=3;tx_id=1;; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=334;fline=column_engine_logs.cpp:457;event=portion_skipped;pathId=1;portion=(portion_id:12;path_id:1;records_count:1000;schema_version:1;level:0;;column_size:37488;index_size:0;meta:((produced=INSERTED;));); FALLBACK_ACTOR_LOGGING;priority=TRACE;component=332;fline=portion_info.h:380;event=IsVisible;analyze_portion=(portion_id:13;path_id:1;records_count:1000;schema_version:1;level:0;;column_size:37624;index_size:0;meta:((produced=INSERTED;)););visible=1;snapshot=plan_step=3;tx_id=1;; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=334;fline=column_engine_logs.cpp:457;event=portion_skipped;pathId=1;portion=(portion_id:13;path_id:1;records_count:1000;schema_version:1;level:0;;column_size:37624;index_size:0;meta:((produced=INSERTED;));); FALLBACK_ACTOR_LOGGING;priority=TRACE;component=332;fline=portion_info.h:380;event=IsVisible;analyze_portion=(portion_id:14;path_id:1;records_count:1000;schema_version:1;level:0;;column_size:37616;index_size:0;meta:((produced=INSERTED;)););visible=1;snapshot=plan_step=3;tx_id=1;; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=334;fline=column_engine_logs.cpp:457;event=portion_skipped;pathId=1;portion=(portion_id:14;path_id:1;records_count:1000;schema_version:1;level:0;;column_size:37616;index_size:0;meta:((produced=INSERTED;));); FALLBACK_ACTOR_LOGGING;priority=TRACE;component=332;fline=portion_info.h:380;event=IsVisible;analyze_portion=(portion_id:15;path_id:1;records_count:1000;schema_version:1;level:0;;column_size:37616;index_size:0;meta:((produced=INSERTED;)););visible=1;snapshot=plan_step=3;tx_id=1;; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=334;fline=column_engine_logs.cpp:457;event=portion_skipped;pathId=1;portion=(portion_id:15;path_id:1;records_count:1000;schema_version:1;level:0;;column_size:37616;index_size:0;meta:((produced=INSERTED;));); FALLBACK_ACTOR_LOGGING;priority=TRACE;component=332;fline=portion_info.h:380;event=IsVisible;analyze_portion=(portion_id:16;path_id:1;records_count:1000;schema_version:1;level:0;;column_size:37624;index_size:0;meta:((produced=INSERTED;)););visible=1;snapshot=plan_step=3;tx_id=1;; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=334;fline=column_engine_logs.cpp:457;event=portion_skipped;pathId=1;portion=(portion_id:16;path_id:1;records_count:1000;schema_version:1;level:0;;column_size:37624;index_size:0;meta:((produced=INSERTED;));); FALLBACK_ACTOR_LOGGING;priority=TRACE;component=332;fline=portion_info.h:380;event=IsVisible;analyze_portion=(portion_id:17;path_id:1;records_count:1000;schema_version:1;level:0;;column_size:37616;index_size:0;meta:((produced=INSERTED;)););visible=1;snapshot=plan_step=3;tx_id=1;; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=334;fline=column_engine_logs.cpp:457;event=portion_skipped;pathId=1;portion=(portion_id:17;path_id:1;records_count:1000;schema_version:1;level:0;;column_size:37616;index_size:0;meta:((produced=INSERTED;));); FALLBACK_ACTOR_LOGGING;priority=TRACE;component=332;fline=portion_info.h:380;event=IsVisible;analyze_portion=(portion_id:18;path_id:1;records_count:1000;schema_version:1;level:0;;column_size:37616;index_size:0;meta:((produced=INSERTED;)););visible=1;snapshot=plan_step=3;tx_id=1;; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=334;fline=column_engine_logs.cpp:457;event=portion_skipped;pathId=1;portion=(portion_id:18;path_id:1;records_count:1000;schema_version:1;level:0;;column_size:37616;index_size:0;meta:((produced=INSERTED;));); FALLBACK_ACTOR_LOGGING;priority=TRACE;component=332;fline=portion_info.h:380;event=IsVisible;analyze_portion=(portion_id:19;path_id:1;records_count:1000;schema_version:1;level:0;;column_size:37616;index_size:0;meta:((produced=INSERTED;)););visible=1;snapshot=plan_step=3;tx_id=1;; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=334;fline=column_engine_logs.cpp:457;event=portion_skipped;pathId=1;portion=(portion_id:19;path_id:1;records_count:1000;schema_version:1;level:0;;column_size:37616;index_size:0;meta:((produced=INSERTED;));); FALLBACK_ACTOR_LOGGING;priority=TRACE;component=332;fline=portion_info.h:380;event=IsVisible;analyze_portion=(portion_id:20;path_id:1;records_count:1000;schema_version:1;level:0;;column_size:37592;index_size:0;meta:((produced=INSERTED;)););visible=1;snapshot=plan_step=3;tx_id=1;; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=334;fline=column_engine_logs.cpp:457;event=portion_skipped;pathId=1;portion=(portion_id:20;path_id:1;records_count:1000;schema_version:1;level:0;;column_size:37592;index_size:0;meta:((produced=INSERTED;));); >> TestProgram::Like [GOOD] >> TestProgram::YqlKernelStartsWithScalar [GOOD] |75.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/columnshard/engines/ut/unittest ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/columnshard/engines/ut/unittest >> TestProgram::JsonExistsBinary [GOOD] Test command err: FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:33;event=parse_program;program=Command { Assign { Column { Id: 15 } Constant { Text: "$.key" } } } Command { Assign { Column { Id: 16 } Function { Id: 8 Arguments { Id: 6 } Arguments { Id: 15 } FunctionType: YQL_KERNEL KernelIdx: 0 } } } Command { Projection { Columns { Id: 16 } } } Kernels: "O\016\020JsonNode\006Arg\020JsonPath\006UDF\006Udf\014Apply2\030BlockAsTuple\t\211\004\235\213\004\213\004\207\203\021H\203\001H\213\002\207\203\014\001\235?\006\001\235?\014\001\032\000\t\211\004?\022\235?\002\001\235?\004\000\032\000\t\251\000?\030\006\000\t\251\000?\032\006\000\000\t\211\002?\024\235?\n\001\032\000\t\211\n?(?\030?\032?\002?\004?\n,ScalarApply\000? ?$\t\251\000?\002\006\000\t\251\000?\004\006\000\t\211\010?\n?\002?\000\207?\010?6$IfPresent\000?0\t\251\000?\000\006\000\t\211\022?6\211\010?6\207\203\021H\214\n\210\203\001H\214\002?6\016\000\203\004\203\005@\203\004\203\004\207?\000\214\n\210\203\001H\214\002\207\203\014\026\000\t\211\010?J\203\005@\200\203\005@\202\022\000\003?d6Json2.JsonDocumentSqlExists\202\003?f\000\002\017\003?L\000\003?N\000\003?P\000\003?R\000\027?T?<\t\211\014?V\211\002?V\203\001H\016\000\203\004\203\005@\203\004\203\004?\004\026\000\t\211\010?|\203\005@\200\203\005@\202\022\000\003?\210\"Json2.CompilePath\202\003?\212\000\002\017\003?~\000\003?\200\000\003?\202\000\003?\204\000?4\036\010\000?\\7?`\003?^\000\276\001\'?6\010\000\000\000/" ; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:102;parse_proto_program=Command { Assign { Column { Id: 15 } Constant { Text: "$.key" } } } Command { Assign { Column { Id: 16 } Function { Id: 8 Arguments { Id: 6 } Arguments { Id: 15 } FunctionType: YQL_KERNEL KernelIdx: 0 } } } Command { Projection { Columns { Id: 16 } } } Kernels: "O\016\020JsonNode\006Arg\020JsonPath\006UDF\006Udf\014Apply2\030BlockAsTuple\t\211\004\235\213\004\213\004\207\203\021H\203\001H\213\002\207\203\014\001\235?\006\001\235?\014\001\032\000\t\211\004?\022\235?\002\001\235?\004\000\032\000\t\251\000?\030\006\000\t\251\000?\032\006\000\000\t\211\002?\024\235?\n\001\032\000\t\211\n?(?\030?\032?\002?\004?\n,ScalarApply\000? ?$\t\251\000?\002\006\000\t\251\000?\004\006\000\t\211\010?\n?\002?\000\207?\010?6$IfPresent\000?0\t\251\000?\000\006\000\t\211\022?6\211\010?6\207\203\021H\214\n\210\203\001H\214\002?6\016\000\203\004\203\005@\203\004\203\004\207?\000\214\n\210\203\001H\214\002\207\203\014\026\000\t\211\010?J\203\005@\200\203\005@\202\022\000\003?d6Json2.JsonDocumentSqlExists\202\003?f\000\002\017\003?L\000\003?N\000\003?P\000\003?R\000\027?T?<\t\211\014?V\211\002?V\203\001H\016\000\203\004\203\005@\203\004\203\004?\004\026\000\t\211\010?|\203\005@\200\203\005@\202\022\000\003?\210\"Json2.CompilePath\202\003?\212\000\002\017\003?~\000\003?\200\000\003?\202\000\003?\204\000?4\036\010\000?\\7?`\003?^\000\276\001\'?6\010\000\000\000/" ; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2101;fline=graph_execute.cpp:162;graph_constructed=digraph program {N0[shape=box, label="N0(0):{\"p\":{\"v\":\"$.key\"},\"o\":\"15\",\"t\":\"Const\"}\n"]; N1[shape=box, label="N4(15):{\"i\":\"6,15\",\"p\":{\"kernel\":{\"class_name\":\"SIMPLE\"}},\"o\":\"16\",\"t\":\"Calculation\"}\nREMOVE:15,6"]; N0 -> N1[label="1"]; N3 -> N1[label="2"]; N2[shape=box, label="N2(2):{\"i\":\"0\",\"p\":{\"data\":[{\"name\":\"json_binary\",\"id\":6}]},\"o\":\"6\",\"t\":\"FetchOriginalData\"}\n",style=filled,color="#FFFF88"]; N5 -> N2[label="1"]; N3[shape=box, label="N3(7):{\"i\":\"6\",\"p\":{\"address\":{\"name\":\"json_binary\",\"id\":6}},\"o\":\"6\",\"t\":\"AssembleOriginalData\"}\n",style=filled,color="#FFFF88"]; N2 -> N3[label="1"]; N4[shape=box, label="N5(15):{\"i\":\"16\",\"t\":\"Projection\"}\n",style=filled,color="#FFAAAA"]; N1 -> N4[label="1"]; N5[shape=box, label="N1(0):{\"p\":{\"data\":[{\"name\":\"json_binary\",\"id\":6}]},\"o\":\"0\",\"t\":\"ReserveMemory\"}\n"]; N0->N5->N2->N3->N1->N4[color=red]; }; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:51;event=program_parsed;result={"edges":[{"owner_id":0,"inputs":[]},{"owner_id":1,"inputs":[{"from":0},{"from":3}]},{"owner_id":2,"inputs":[{"from":5}]},{"owner_id":3,"inputs":[{"from":2}]},{"owner_id":4,"inputs":[{"from":1}]},{"owner_id":5,"inputs":[]}],"nodes":{"1":{"p":{"i":"6,15","p":{"kernel":{"class_name":"SIMPLE"}},"o":"16","t":"Calculation"},"w":15,"id":1},"3":{"p":{"i":"6","p":{"address":{"name":"json_binary","id":6}},"o":"6","t":"AssembleOriginalData"},"w":7,"id":3},"2":{"p":{"i":"0","p":{"data":[{"name":"json_binary","id":6}]},"o":"6","t":"FetchOriginalData"},"w":2,"id":2},"5":{"p":{"p":{"data":[{"name":"json_binary","id":6}]},"o":"0","t":"ReserveMemory"},"w":0,"id":5},"4":{"p":{"i":"16","t":"Projection"},"w":15,"id":4},"0":{"p":{"p":{"v":"$.key"},"o":"15","t":"Const"},"w":0,"id":0}}}; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:502;T=N5arrow10BinaryTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:502;T=N5arrow10BinaryTypeE; json_binary: [ 7B226B6579223A2276616C7565227D, 5B5D ] json_binary: [ 010200002100000014000000030300000200000000040000C00400006B65790076616C756500, 010100000000000000000000 ] FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:502;T=N5arrow9UInt8TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:502;T=N5arrow9UInt8TypeE; |75.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/columnshard/engines/ut/unittest >> TestScript::StepMerging [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/columnshard/engines/ut/unittest >> TestProgram::Like [GOOD] Test command err: FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:33;event=parse_program;program=Command { Assign { Column { Id: 15 } Constant { Bytes: "001" } } } Command { Assign { Column { Id: 16 } Constant { Bytes: "uid" } } } Command { Assign { Column { Id: 17 } Function { Id: 33 Arguments { Id: 7 } Arguments { Id: 16 } FunctionType: YQL_KERNEL KernelIdx: 0 } } } Command { Assign { Column { Id: 18 } Function { Id: 34 Arguments { Id: 7 } Arguments { Id: 15 } FunctionType: YQL_KERNEL KernelIdx: 1 } } } Command { Assign { Column { Id: 19 } Function { Id: 18 Arguments { Id: 17 } FunctionType: SIMPLE_ARROW } } } Command { Assign { Column { Id: 20 } Function { Id: 18 Arguments { Id: 18 } FunctionType: SIMPLE_ARROW } } } Command { Assign { Column { Id: 21 } Function { Id: 11 Arguments { Id: 19 } Arguments { Id: 20 } FunctionType: SIMPLE_ARROW } } } Command { Projection { Columns { Id: 21 } } } Kernels: "O\006\006Arg\022BlockFunc\030BlockAsTuple\t\211\004\235\213\004\213\004\203\001H\203\005@\213\004\203\014?\006\001\235?\004\001\235?\010\001\n\000\t\211\004?\016\235?\000\001\235?\002\000\n\000\t\251\000?\024\002\000\t\251\000?\026\002\000\000\t\211\004?\020\235?\006\001?$\n\000\t\211\006?$\203\005@?\024?\026\006\000\003?(\024StartsWith?\034? \001\t\211\006?$\203\005@?\024?\026\006\000\003?0\020EndsWith?\034? \001\000\000/" ; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:102;parse_proto_program=Command { Assign { Column { Id: 15 } Constant { Bytes: "001" } } } Command { Assign { Column { Id: 16 } Constant { Bytes: "uid" } } } Command { Assign { Column { Id: 17 } Function { Id: 33 Arguments { Id: 7 } Arguments { Id: 16 } FunctionType: YQL_KERNEL KernelIdx: 0 } } } Command { Assign { Column { Id: 18 } Function { Id: 34 Arguments { Id: 7 } Arguments { Id: 15 } FunctionType: YQL_KERNEL KernelIdx: 1 } } } Command { Assign { Column { Id: 19 } Function { Id: 18 Arguments { Id: 17 } FunctionType: SIMPLE_ARROW } } } Command { Assign { Column { Id: 20 } Function { Id: 18 Arguments { Id: 18 } FunctionType: SIMPLE_ARROW } } } Command { Assign { Column { Id: 21 } Function { Id: 11 Arguments { Id: 19 } Arguments { Id: 20 } FunctionType: SIMPLE_ARROW } } } Command { Projection { Columns { Id: 21 } } } Kernels: "O\006\006Arg\022BlockFunc\030BlockAsTuple\t\211\004\235\213\004\213\004\203\001H\203\005@\213\004\203\014?\006\001\235?\004\001\235?\010\001\n\000\t\211\004?\016\235?\000\001\235?\002\000\n\000\t\251\000?\024\002\000\t\251\000?\026\002\000\000\t\211\004?\020\235?\006\001?$\n\000\t\211\006?$\203\005@?\024?\026\006\000\003?(\024StartsWith?\034? \001\t\211\006?$\203\005@?\024?\026\006\000\003?0\020EndsWith?\034? \001\000\000/" ; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2101;fline=graph_execute.cpp:162;graph_constructed=digraph program {N0[shape=box, label="N6(0):{\"p\":{\"v\":\"001\"},\"o\":\"15\",\"t\":\"Const\"}\n"]; N1[shape=box, label="N0(0):{\"p\":{\"v\":\"uid\"},\"o\":\"16\",\"t\":\"Const\"}\n"]; N2[shape=box, label="N4(15):{\"i\":\"7,16\",\"p\":{\"kernel\":{\"class_name\":\"SIMPLE\"}},\"o\":\"17\",\"t\":\"Calculation\"}\nREMOVE:16"]; N1 -> N2[label="1"]; N4 -> N2[label="2"]; N3[shape=box, label="N2(2):{\"i\":\"0\",\"p\":{\"data\":[{\"name\":\"string\",\"id\":7}]},\"o\":\"7\",\"t\":\"FetchOriginalData\"}\n",style=filled,color="#FFFF88"]; N10 -> N3[label="1"]; N4[shape=box, label="N3(7):{\"i\":\"7\",\"p\":{\"address\":{\"name\":\"string\",\"id\":7}},\"o\":\"7\",\"t\":\"AssembleOriginalData\"}\n",style=filled,color="#FFFF88"]; N3 -> N4[label="1"]; N5[shape=box, label="N7(15):{\"i\":\"7,15\",\"p\":{\"kernel\":{\"class_name\":\"SIMPLE\"}},\"o\":\"18\",\"t\":\"Calculation\"}\nREMOVE:7,15"]; N0 -> N5[label="1"]; N4 -> N5[label="2"]; N6[shape=box, label="N5(23):{\"i\":\"17\",\"p\":{\"kernel\":{\"class_name\":\"SIMPLE\"}},\"o\":\"19\",\"t\":\"Calculation\"}\nREMOVE:17"]; N2 -> N6[label="1"]; N7[shape=box, label="N8(23):{\"i\":\"18\",\"p\":{\"kernel\":{\"class_name\":\"SIMPLE\"}},\"o\":\"20\",\"t\":\"Calculation\"}\nREMOVE:18"]; N5 -> N7[label="1"]; N8[shape=box, label="N9(54):{\"i\":\"19,20\",\"p\":{\"kernel\":{\"class_name\":\"SIMPLE\"}},\"o\":\"21\",\"t\":\"Calculation\"}\nREMOVE:19,20"]; N6 -> N8[label="1"]; N7 -> N8[label="2"]; N9[shape=box, label="N10(54):{\"i\":\"21\",\"t\":\"Projection\"}\n",style=filled,color="#FFAAAA"]; N8 -> N9[label="1"]; N10[shape=box, label="N1(0):{\"p\":{\"data\":[{\"name\":\"string\",\"id\":7}]},\"o\":\"0\",\"t\":\"ReserveMemory\"}\n"]; N1->N10->N3->N4->N2->N6->N0->N5->N7->N8->N9[color=red]; }; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:51;event=program_parsed;result={"edges":[{"owner_id":0,"inputs":[]},{"owner_id":1,"inputs":[]},{"owner_id":2,"inputs":[{"from":1},{"from":4}]},{"owner_id":3,"inputs":[{"from":10}]},{"owner_id":4,"inputs":[{"from":3}]},{"owner_id":5,"inputs":[{"from":0},{"from":4}]},{"owner_id":6,"inputs":[{"from":2}]},{"owner_id":7,"inputs":[{"from":5}]},{"owner_id":8,"inputs":[{"from":6},{"from":7}]},{"owner_id":9,"inputs":[{"from":8}]},{"owner_id":10,"inputs":[]}],"nodes":{"1":{"p":{"p":{"v":"uid"},"o":"16","t":"Const"},"w":0,"id":1},"3":{"p":{"i":"0","p":{"data":[{"name":"string","id":7}]},"o":"7","t":"FetchOriginalData"},"w":2,"id":3},"8":{"p":{"i":"19,20","p":{"kernel":{"class_name":"SIMPLE"}},"o":"21","t":"Calculation"},"w":54,"id":8},"2":{"p":{"i":"7,16","p":{"kernel":{"class_name":"SIMPLE"}},"o":"17","t":"Calculation"},"w":15,"id":2},"0":{"p":{"p":{"v":"001"},"o":"15","t":"Const"},"w":0,"id":0},"5":{"p":{"i":"7,15","p":{"kernel":{"class_name":"SIMPLE"}},"o":"18","t":"Calculation"},"w":15,"id":5},"9":{"p":{"i":"21","t":"Projection"},"w":54,"id":9},"7":{"p":{"i":"18","p":{"kernel":{"class_name":"SIMPLE"}},"o":"20","t":"Calculation"},"w":23,"id":7},"4":{"p":{"i":"7","p":{"address":{"name":"string","id":7}},"o":"7","t":"AssembleOriginalData"},"w":7,"id":4},"10":{"p":{"p":{"data":[{"name":"string","id":7}]},"o":"0","t":"ReserveMemory"},"w":0,"id":10},"6":{"p":{"i":"17","p":{"kernel":{"class_name":"SIMPLE"}},"o":"19","t":"Calculation"},"w":23,"id":6}}}; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:502;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:502;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:502;T=N5arrow11BooleanTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:502;T=N5arrow11BooleanTypeE; >> TAsyncIndexTests::SplitMainWithReboots[TabletReboots] [GOOD] >> IntermediateDirsReboots::CreateSolomonWithIntermediateDirsForceDrop [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/columnshard/engines/ut/unittest >> TestProgram::YqlKernelStartsWith [GOOD] Test command err: FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:33;event=parse_program;program=Command { Assign { Column { Id: 15 } Function { Arguments { Id: 7 } Arguments { Id: 9 } FunctionType: YQL_KERNEL KernelIdx: 0 } } } Command { Projection { Columns { Id: 15 } } } Kernels: "O\002\030BlockAsTuple\t\211\004\235\213\004\213\002\203\001H\213\002\203\014\001\235?\002\001\235?\006\001\002\000\t\211\002?\014\235?\000\001\002\000\t\251\000?\022\014Arg\000\000\t\211\002?\016\235?\004\001\002\000\t\211\006?\034\203\005@?\022?\022$BlockFunc\000\003? \024StartsWith?\030?\030\001\000\000/" ; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:102;parse_proto_program=Command { Assign { Column { Id: 15 } Function { Arguments { Id: 7 } Arguments { Id: 9 } FunctionType: YQL_KERNEL KernelIdx: 0 } } } Command { Projection { Columns { Id: 15 } } } Kernels: "O\002\030BlockAsTuple\t\211\004\235\213\004\213\002\203\001H\213\002\203\014\001\235?\002\001\235?\006\001\002\000\t\211\002?\014\235?\000\001\002\000\t\251\000?\022\014Arg\000\000\t\211\002?\016\235?\004\001\002\000\t\211\006?\034\203\005@?\022?\022$BlockFunc\000\003? \024StartsWith?\030?\030\001\000\000/" ; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2101;fline=graph_execute.cpp:162;graph_constructed=digraph program {N7[shape=box, label="N0(0):{\"p\":{\"data\":[{\"name\":\"string\",\"id\":7},{\"name\":\"substring\",\"id\":9}]},\"o\":\"0\",\"t\":\"ReserveMemory\"}\n"]; N0[shape=box, label="N4(26):{\"i\":\"7,9\",\"p\":{\"kernel\":{\"class_name\":\"SIMPLE\"}},\"o\":\"15\",\"t\":\"Calculation\"}\nREMOVE:7,9"]; N2 -> N0[label="1"]; N4 -> N0[label="2"]; N2[shape=box, label="N2(9):{\"i\":\"7\",\"p\":{\"address\":{\"name\":\"string\",\"id\":7}},\"o\":\"7\",\"t\":\"AssembleOriginalData\"}\n",style=filled,color="#FFFF88"]; N6 -> N2[label="1"]; N4[shape=box, label="N3(9):{\"i\":\"9\",\"p\":{\"address\":{\"name\":\"substring\",\"id\":9}},\"o\":\"9\",\"t\":\"AssembleOriginalData\"}\n",style=filled,color="#FFFF88"]; N6 -> N4[label="1"]; N5[shape=box, label="N5(26):{\"i\":\"15\",\"t\":\"Projection\"}\n",style=filled,color="#FFAAAA"]; N0 -> N5[label="1"]; N6[shape=box, label="N1(4):{\"i\":\"0\",\"p\":{\"data\":[{\"name\":\"string\",\"id\":7},{\"name\":\"substring\",\"id\":9}]},\"o\":\"7,9\",\"t\":\"FetchOriginalData\"}\n",style=filled,color="#FFFF88"]; N7 -> N6[label="1"]; N7->N6->N2->N4->N0->N5[color=red]; }; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:51;event=program_parsed;result={"edges":[{"owner_id":7,"inputs":[]},{"owner_id":0,"inputs":[{"from":2},{"from":4}]},{"owner_id":2,"inputs":[{"from":6}]},{"owner_id":4,"inputs":[{"from":6}]},{"owner_id":5,"inputs":[{"from":0}]},{"owner_id":6,"inputs":[{"from":7}]}],"nodes":{"2":{"p":{"i":"7","p":{"address":{"name":"string","id":7}},"o":"7","t":"AssembleOriginalData"},"w":9,"id":2},"6":{"p":{"i":"0","p":{"data":[{"name":"string","id":7},{"name":"substring","id":9}]},"o":"7,9","t":"FetchOriginalData"},"w":4,"id":6},"7":{"p":{"p":{"data":[{"name":"string","id":7},{"name":"substring","id":9}]},"o":"0","t":"ReserveMemory"},"w":0,"id":7},"5":{"p":{"i":"15","t":"Projection"},"w":26,"id":5},"4":{"p":{"i":"9","p":{"address":{"name":"substring","id":9}},"o":"9","t":"AssembleOriginalData"},"w":9,"id":4},"0":{"p":{"i":"7,9","p":{"kernel":{"class_name":"SIMPLE"}},"o":"15","t":"Calculation"},"w":26,"id":0}}}; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:502;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:502;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:502;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:502;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:502;T=N5arrow9UInt8TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:502;T=N5arrow9UInt8TypeE; ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/columnshard/engines/ut/unittest >> TestProgram::YqlKernelStartsWithScalar [GOOD] Test command err: FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:33;event=parse_program;program=Command { Assign { Column { Id: 15 } Constant { Bytes: "Lorem" } } } Command { Assign { Column { Id: 16 } Function { Arguments { Id: 7 } Arguments { Id: 15 } FunctionType: YQL_KERNEL KernelIdx: 0 } } } Command { Projection { Columns { Id: 16 } } } Kernels: "O\004\006Arg\030BlockAsTuple\t\211\004\235\213\004\213\004\203\001H\203\005@\213\002\203\014\001\235?\004\001\235?\010\001\006\000\t\211\004?\016\235?\000\001\235?\002\000\006\000\t\251\000?\024\002\000\t\251\000?\026\002\000\000\t\211\002?\020\235?\006\001\006\000\t\211\006?$\203\005@?\024?\026$BlockFunc\000\003?(\024StartsWith?\034? \001\000\000/" ; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:102;parse_proto_program=Command { Assign { Column { Id: 15 } Constant { Bytes: "Lorem" } } } Command { Assign { Column { Id: 16 } Function { Arguments { Id: 7 } Arguments { Id: 15 } FunctionType: YQL_KERNEL KernelIdx: 0 } } } Command { Projection { Columns { Id: 16 } } } Kernels: "O\004\006Arg\030BlockAsTuple\t\211\004\235\213\004\213\004\203\001H\203\005@\213\002\203\014\001\235?\004\001\235?\010\001\006\000\t\211\004?\016\235?\000\001\235?\002\000\006\000\t\251\000?\024\002\000\t\251\000?\026\002\000\000\t\211\002?\020\235?\006\001\006\000\t\211\006?$\203\005@?\024?\026$BlockFunc\000\003?(\024StartsWith?\034? \001\000\000/" ; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2101;fline=graph_execute.cpp:162;graph_constructed=digraph program {N0[shape=box, label="N0(0):{\"p\":{\"v\":\"Lorem\"},\"o\":\"15\",\"t\":\"Const\"}\n"]; N1[shape=box, label="N4(15):{\"i\":\"7,15\",\"p\":{\"kernel\":{\"class_name\":\"SIMPLE\"}},\"o\":\"16\",\"t\":\"Calculation\"}\nREMOVE:7,15"]; N0 -> N1[label="1"]; N3 -> N1[label="2"]; N2[shape=box, label="N2(2):{\"i\":\"0\",\"p\":{\"data\":[{\"name\":\"string\",\"id\":7}]},\"o\":\"7\",\"t\":\"FetchOriginalData\"}\n",style=filled,color="#FFFF88"]; N5 -> N2[label="1"]; N3[shape=box, label="N3(7):{\"i\":\"7\",\"p\":{\"address\":{\"name\":\"string\",\"id\":7}},\"o\":\"7\",\"t\":\"AssembleOriginalData\"}\n",style=filled,color="#FFFF88"]; N2 -> N3[label="1"]; N4[shape=box, label="N5(15):{\"i\":\"16\",\"t\":\"Projection\"}\n",style=filled,color="#FFAAAA"]; N1 -> N4[label="1"]; N5[shape=box, label="N1(0):{\"p\":{\"data\":[{\"name\":\"string\",\"id\":7}]},\"o\":\"0\",\"t\":\"ReserveMemory\"}\n"]; N0->N5->N2->N3->N1->N4[color=red]; }; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:51;event=program_parsed;result={"edges":[{"owner_id":0,"inputs":[]},{"owner_id":1,"inputs":[{"from":0},{"from":3}]},{"owner_id":2,"inputs":[{"from":5}]},{"owner_id":3,"inputs":[{"from":2}]},{"owner_id":4,"inputs":[{"from":1}]},{"owner_id":5,"inputs":[]}],"nodes":{"1":{"p":{"i":"7,15","p":{"kernel":{"class_name":"SIMPLE"}},"o":"16","t":"Calculation"},"w":15,"id":1},"3":{"p":{"i":"7","p":{"address":{"name":"string","id":7}},"o":"7","t":"AssembleOriginalData"},"w":7,"id":3},"2":{"p":{"i":"0","p":{"data":[{"name":"string","id":7}]},"o":"7","t":"FetchOriginalData"},"w":2,"id":2},"5":{"p":{"p":{"data":[{"name":"string","id":7}]},"o":"0","t":"ReserveMemory"},"w":0,"id":5},"4":{"p":{"i":"16","t":"Projection"},"w":15,"id":4},"0":{"p":{"p":{"v":"Lorem"},"o":"15","t":"Const"},"w":0,"id":0}}}; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:502;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:502;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:502;T=N5arrow9UInt8TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:502;T=N5arrow9UInt8TypeE; >> TestProgram::YqlKernelEquals [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_reboots/unittest >> IntermediateDirsReboots::CreateSolomonWithIntermediateDirsForceDrop [GOOD] Test command err: ==== RunWithTabletReboots =========== RUN: Trace =========== Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2140] Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:116:2058] recipient: [1:110:2141] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:116:2058] recipient: [1:110:2141] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:117:2058] recipient: [1:111:2142] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:117:2058] recipient: [1:111:2142] Leader for TabletID 72057594046678944 is [1:126:2151] sender: [1:127:2058] recipient: [1:109:2140] Leader for TabletID 72057594046447617 is [1:130:2154] sender: [1:132:2058] recipient: [1:110:2141] Leader for TabletID 72057594046316545 is [1:133:2155] sender: [1:135:2058] recipient: [1:111:2142] 2025-05-29T15:31:37.293849Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7488: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-05-29T15:31:37.293868Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7516: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:31:37.293873Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7402: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2025-05-29T15:31:37.293878Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7418: OperationsProcessing config: using default configuration 2025-05-29T15:31:37.293883Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-05-29T15:31:37.293887Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-05-29T15:31:37.293894Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7548: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:31:37.293905Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:39: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-05-29T15:31:37.293986Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7619: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-05-29T15:31:37.294054Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-05-29T15:31:37.306857Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7651: Got new config: QueryServiceConfig { AllExternalDataSourcesAreAvailable: true } 2025-05-29T15:31:37.306875Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:31:37.306957Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7619: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# Leader for TabletID 72057594046447617 is [1:130:2154] sender: [1:175:2058] recipient: [1:15:2062] 2025-05-29T15:31:37.309860Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-05-29T15:31:37.309895Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-05-29T15:31:37.309929Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-05-29T15:31:37.312923Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-05-29T15:31:37.313001Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:445: Clear TempDirsState with owners number: 0 2025-05-29T15:31:37.313102Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1348: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-05-29T15:31:37.313275Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:32: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-05-29T15:31:37.313960Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:157: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:31:37.313994Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:84: [RootDataErasureManager] Stop 2025-05-29T15:31:37.314225Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:31:37.314234Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:31:37.314259Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-05-29T15:31:37.314266Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-29T15:31:37.314272Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:202: TTxServerlessStorageBilling.Complete 2025-05-29T15:31:37.314288Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6671: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:214:2058] recipient: [1:212:2212] IGNORE Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:214:2058] recipient: [1:212:2212] Leader for TabletID 72057594037968897 is [1:218:2216] sender: [1:219:2058] recipient: [1:212:2212] 2025-05-29T15:31:37.315499Z node 1 :HIVE INFO: tablet_helpers.cpp:1130: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:126:2151] sender: [1:239:2058] recipient: [1:15:2062] 2025-05-29T15:31:37.334599Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:372: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-05-29T15:31:37.334666Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:31:37.334715Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-05-29T15:31:37.334772Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:130: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-05-29T15:31:37.334782Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:31:37.335389Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:451: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-05-29T15:31:37.335412Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-05-29T15:31:37.335454Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:31:37.335462Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:313: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-05-29T15:31:37.335467Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:367: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-05-29T15:31:37.335472Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 2 -> 3 2025-05-29T15:31:37.335909Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:31:37.335918Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-05-29T15:31:37.335923Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 3 -> 128 2025-05-29T15:31:37.336260Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:31:37.336267Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:31:37.336272Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:31:37.336279Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1650: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-05-29T15:31:37.336870Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1719: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-05-29T15:31:37.337288Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:652: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-05-29T15:31:37.337318Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1751: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 Leader for TabletID 72057594046316545 is [1:133:2155] sender: [1:254:2058] recipient: [1:15:2062] FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-05-29T15:31:37.337483Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:676: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-05-29T15:31:37.337504Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:680: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 133 RawX2: 4294969451 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-05-29T15:31:37.337522Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:31:37.337583Z node 1 :FLAT_TX_SCHEMESHARD INFO: sch ... ard Send, to populator: [53:206:2207], at schemeshard: 72057594046678944, txId: 1003, path id: 3 2025-05-29T15:31:50.040514Z node 53 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [53:206:2207], at schemeshard: 72057594046678944, txId: 1003, path id: 4 2025-05-29T15:31:50.040599Z node 53 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:5834: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 5 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 1003 2025-05-29T15:31:50.040611Z node 53 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 5 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 1003 2025-05-29T15:31:50.040616Z node 53 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 4, at schemeshard: 72057594046678944, txId: 1003 2025-05-29T15:31:50.040622Z node 53 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 1003, pathId: [OwnerId: 72057594046678944, LocalPathId: 5], version: 18446744073709551615 2025-05-29T15:31:50.040630Z node 53 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 3 2025-05-29T15:31:50.040809Z node 53 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:5834: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 9 PathOwnerId: 72057594046678944, cookie: 1003 2025-05-29T15:31:50.040823Z node 53 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 9 PathOwnerId: 72057594046678944, cookie: 1003 2025-05-29T15:31:50.040828Z node 53 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 3, at schemeshard: 72057594046678944, txId: 1003 2025-05-29T15:31:50.040833Z node 53 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 1003, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 9 2025-05-29T15:31:50.040838Z node 53 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2025-05-29T15:31:50.041118Z node 53 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:5834: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 1003 2025-05-29T15:31:50.041135Z node 53 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 1003 2025-05-29T15:31:50.041141Z node 53 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 1003 2025-05-29T15:31:50.041146Z node 53 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 1003, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 18446744073709551615 2025-05-29T15:31:50.041151Z node 53 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 2 2025-05-29T15:31:50.041431Z node 53 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:5834: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 1003 2025-05-29T15:31:50.041449Z node 53 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 1003 2025-05-29T15:31:50.041454Z node 53 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1003 2025-05-29T15:31:50.041459Z node 53 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 1003, pathId: [OwnerId: 72057594046678944, LocalPathId: 4], version: 18446744073709551615 2025-05-29T15:31:50.041464Z node 53 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 2 2025-05-29T15:31:50.041481Z node 53 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 1003, subscribers: 1 2025-05-29T15:31:50.041487Z node 53 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:212: TTxAckPublishToSchemeBoard Notify send TEvNotifyTxCompletionResult, at schemeshard: 72057594046678944, to actorId: [53:306:2296] 2025-05-29T15:31:50.041575Z node 53 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:46: Free shard 72057594046678944:1 hive 72057594037968897 at ss 72057594046678944 2025-05-29T15:31:50.041583Z node 53 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:46: Free shard 72057594046678944:2 hive 72057594037968897 at ss 72057594046678944 2025-05-29T15:31:50.041695Z node 53 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1003 Leader for TabletID 72057594037968897 is [53:218:2216] sender: [53:346:2058] recipient: [53:15:2062] 2025-05-29T15:31:50.042078Z node 53 :HIVE INFO: tablet_helpers.cpp:1356: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 1 TxId_Deprecated: 1 2025-05-29T15:31:50.042132Z node 53 :HIVE INFO: tablet_helpers.cpp:1356: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 2 TxId_Deprecated: 2 2025-05-29T15:31:50.042162Z node 53 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5938: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 1 ShardOwnerId: 72057594046678944 ShardLocalIdx: 1, at schemeshard: 72057594046678944 2025-05-29T15:31:50.042226Z node 53 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 2 2025-05-29T15:31:50.042293Z node 53 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5938: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 2 ShardOwnerId: 72057594046678944 ShardLocalIdx: 2, at schemeshard: 72057594046678944 2025-05-29T15:31:50.042321Z node 53 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 1 2025-05-29T15:31:50.042387Z node 53 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:123: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-05-29T15:31:50.042394Z node 53 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:137: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 5], at schemeshard: 72057594046678944 2025-05-29T15:31:50.042406Z node 53 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 1 2025-05-29T15:31:50.042413Z node 53 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:137: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 4], at schemeshard: 72057594046678944 2025-05-29T15:31:50.042420Z node 53 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 1 2025-05-29T15:31:50.042424Z node 53 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:137: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 3], at schemeshard: 72057594046678944 2025-05-29T15:31:50.042431Z node 53 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-05-29T15:31:50.042765Z node 53 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1003 2025-05-29T15:31:50.042818Z node 53 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1003 2025-05-29T15:31:50.042897Z node 53 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1003 2025-05-29T15:31:50.042911Z node 53 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:227: tests -- TTxNotificationSubscriber for txId 1003: got EvNotifyTxCompletionResult 2025-05-29T15:31:50.042917Z node 53 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:236: tests -- TTxNotificationSubscriber for txId 1003: satisfy waiter [53:307:2297] 2025-05-29T15:31:50.043411Z node 53 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:174: Deleted shardIdx 72057594046678944:1 2025-05-29T15:31:50.043437Z node 53 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:174: Deleted shardIdx 72057594046678944:2 2025-05-29T15:31:50.043453Z node 53 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:160: TTxCleanDroppedPaths Complete, done PersistRemovePath for 3 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestWaitNotification: OK eventTxId 1002 TestWaitNotification: OK eventTxId 1003 2025-05-29T15:31:50.043583Z node 53 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/x" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-05-29T15:31:50.043620Z node 53 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/x" took 48us result status StatusPathDoesNotExist 2025-05-29T15:31:50.043663Z node 53 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/x\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1]), source_location: ydb/core/tx/schemeshard/schemeshard_path_describer.cpp:1157" Path: "/MyRoot/x" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: true } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 >> TestProgram::CountWithNulls [GOOD] |75.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/columnshard/engines/ut/unittest ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_index/unittest >> TAsyncIndexTests::SplitMainWithReboots[TabletReboots] [GOOD] Test command err: =========== RUN: Trace =========== Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2140] Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:116:2058] recipient: [1:110:2141] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:116:2058] recipient: [1:110:2141] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:117:2058] recipient: [1:111:2142] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:117:2058] recipient: [1:111:2142] Leader for TabletID 72057594046678944 is [1:126:2151] sender: [1:127:2058] recipient: [1:109:2140] Leader for TabletID 72057594046447617 is [1:130:2154] sender: [1:132:2058] recipient: [1:110:2141] Leader for TabletID 72057594046316545 is [1:133:2155] sender: [1:135:2058] recipient: [1:111:2142] 2025-05-29T15:30:26.540726Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7488: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-05-29T15:30:26.540757Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7516: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:30:26.540764Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7402: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2025-05-29T15:30:26.540771Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7418: OperationsProcessing config: using default configuration 2025-05-29T15:30:26.540790Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-05-29T15:30:26.540795Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-05-29T15:30:26.540806Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7548: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:30:26.540821Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:39: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-05-29T15:30:26.540937Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7619: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-05-29T15:30:26.541044Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-05-29T15:30:26.557349Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7651: Got new config: QueryServiceConfig { AllExternalDataSourcesAreAvailable: true } 2025-05-29T15:30:26.557378Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:30:26.557503Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7619: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# Leader for TabletID 72057594046447617 is [1:130:2154] sender: [1:175:2058] recipient: [1:15:2062] 2025-05-29T15:30:26.561258Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-05-29T15:30:26.561301Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-05-29T15:30:26.561341Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-05-29T15:30:26.565177Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-05-29T15:30:26.565293Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:445: Clear TempDirsState with owners number: 0 2025-05-29T15:30:26.565493Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1348: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-05-29T15:30:26.565722Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:32: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-05-29T15:30:26.566648Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:157: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:30:26.566703Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:84: [RootDataErasureManager] Stop 2025-05-29T15:30:26.567066Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:30:26.567086Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:30:26.567136Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-05-29T15:30:26.567147Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-29T15:30:26.567156Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:202: TTxServerlessStorageBilling.Complete 2025-05-29T15:30:26.567184Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6671: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:214:2058] recipient: [1:212:2212] IGNORE Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:214:2058] recipient: [1:212:2212] Leader for TabletID 72057594037968897 is [1:218:2216] sender: [1:219:2058] recipient: [1:212:2212] 2025-05-29T15:30:26.569125Z node 1 :HIVE INFO: tablet_helpers.cpp:1130: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:126:2151] sender: [1:239:2058] recipient: [1:15:2062] 2025-05-29T15:30:26.593697Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:372: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-05-29T15:30:26.593798Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:30:26.593881Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-05-29T15:30:26.593939Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:130: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-05-29T15:30:26.593953Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:30:26.594993Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:451: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-05-29T15:30:26.595033Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-05-29T15:30:26.595101Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:30:26.595115Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:313: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-05-29T15:30:26.595122Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:367: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-05-29T15:30:26.595129Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 2 -> 3 2025-05-29T15:30:26.595774Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:30:26.595791Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-05-29T15:30:26.595798Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 3 -> 128 2025-05-29T15:30:26.596218Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:30:26.596233Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:30:26.596240Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:30:26.596249Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1650: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-05-29T15:30:26.597050Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1719: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-05-29T15:30:26.597575Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:652: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-05-29T15:30:26.597632Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1751: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 Leader for TabletID 72057594046316545 is [1:133:2155] sender: [1:254:2058] recipient: [1:15:2062] FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-05-29T15:30:26.597893Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:676: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-05-29T15:30:26.597927Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:680: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 133 RawX2: 4294969451 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-05-29T15:30:26.597937Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:30:26.598021Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Ch ... TaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { MinPartitionsCount: 1 } } SplitBoundary { KeyPrefix { Tuple { Optional { Uint32: 50 } } } } TableIndexes { Name: "UserDefinedIndex" LocalPathId: 4 Type: EIndexTypeGlobalAsync State: EIndexStateReady KeyColumnNames: "indexed" SchemaVersion: 1 PathOwnerId: 72057594046678944 DataSize: 0 IndexImplTableDescriptions { PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { SizeToSplit: 2147483648 MinPartitionsCount: 1 } } } } TableSchemaVersion: 1 IsBackup: false IsRestore: false } TablePartitions { EndOfRangeKeyPrefix: "\001\000\004\000\000\0002\000\000\000" IsPoint: false IsInclusive: false DatashardId: 72075186233409548 } TablePartitions { EndOfRangeKeyPrefix: "" IsPoint: false IsInclusive: false DatashardId: 72075186233409549 } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 2 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 4 PathsLimit: 10000 ShardsInside: 4 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-05-29T15:31:49.886940Z node 93 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Table/UserDefinedIndex/indexImplTable" Options { ReturnPartitioningInfo: true ReturnPartitionConfig: true BackupInfo: false ReturnBoundaries: true ShowPrivateTable: true }, at schemeshard: 72057594046678944 2025-05-29T15:31:49.886975Z node 93 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/Table/UserDefinedIndex/indexImplTable" took 42us result status StatusSuccess 2025-05-29T15:31:49.887065Z node 93 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Table/UserDefinedIndex/indexImplTable" PathDescription { Self { Name: "indexImplTable" PathId: 5 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 1002 CreateStep: 5000003 ParentPathId: 4 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeAsyncIndexImplTable Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "indexImplTable" Columns { Name: "indexed" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "indexed" KeyColumnNames: "key" KeyColumnIds: 1 KeyColumnIds: 2 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { SizeToSplit: 2147483648 MinPartitionsCount: 1 } } TableSchemaVersion: 1 IsBackup: false IsRestore: false } TablePartitions { EndOfRangeKeyPrefix: "" IsPoint: false IsInclusive: false DatashardId: 72075186233409546 } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 4 PathsLimit: 10000 ShardsInside: 4 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 5 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-05-29T15:31:49.897281Z node 93 :CHANGE_EXCHANGE DEBUG: change_sender_table_base.cpp:78: [TableChangeSenderShard][72075186233409548:2][72075186233409546][93:790:2623] Handshake NKikimrChangeExchange.TEvStatus Status: STATUS_OK LastRecordOrder: 0 2025-05-29T15:31:49.897316Z node 93 :CHANGE_EXCHANGE DEBUG: change_sender_async_index.cpp:239: [AsyncIndexChangeSenderMain][72075186233409548:2][93:728:2623] Handle NKikimr::NChangeExchange::TEvChangeExchangePrivate::TEvReady { PartitionId: 72075186233409546 } 2025-05-29T15:31:49.897346Z node 93 :CHANGE_EXCHANGE DEBUG: change_sender_table_base.cpp:123: [TableChangeSenderShard][72075186233409548:2][72075186233409546][93:790:2623] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRecords { Records [{ Order: 1 Group: 1748532709884585 Step: 5000003 TxId: 18446744073709551615 PathId: [OwnerId: 72057594046678944, LocalPathId: 4] Kind: AsyncIndex Source: Unspecified Body: 28b TableId: [OwnerId: 72057594046678944, LocalPathId: 3] SchemaVersion: 1 LockId: 0 LockOffset: 0 },{ Order: 2 Group: 1748532709884585 Step: 5000003 TxId: 18446744073709551615 PathId: [OwnerId: 72057594046678944, LocalPathId: 4] Kind: AsyncIndex Source: Unspecified Body: 28b TableId: [OwnerId: 72057594046678944, LocalPathId: 3] SchemaVersion: 1 LockId: 0 LockOffset: 0 },{ Order: 3 Group: 1748532709884585 Step: 5000003 TxId: 18446744073709551615 PathId: [OwnerId: 72057594046678944, LocalPathId: 4] Kind: AsyncIndex Source: Unspecified Body: 28b TableId: [OwnerId: 72057594046678944, LocalPathId: 3] SchemaVersion: 1 LockId: 0 LockOffset: 0 }] } 2025-05-29T15:31:49.898101Z node 93 :CHANGE_EXCHANGE DEBUG: change_sender_table_base.cpp:200: [TableChangeSenderShard][72075186233409548:2][72075186233409546][93:790:2623] Handle NKikimrChangeExchange.TEvStatus Status: STATUS_OK RecordStatuses { Order: 1 Status: STATUS_OK Reason: REASON_NONE } RecordStatuses { Order: 2 Status: STATUS_OK Reason: REASON_NONE } RecordStatuses { Order: 3 Status: STATUS_OK Reason: REASON_NONE } LastRecordOrder: 3 2025-05-29T15:31:49.898125Z node 93 :CHANGE_EXCHANGE DEBUG: change_sender_async_index.cpp:239: [AsyncIndexChangeSenderMain][72075186233409548:2][93:728:2623] Handle NKikimr::NChangeExchange::TEvChangeExchangePrivate::TEvReady { PartitionId: 72075186233409546 } |75.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/columnshard/engines/ut/unittest ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/columnshard/engines/ut/unittest >> TestProgram::YqlKernelEquals [GOOD] Test command err: FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:33;event=parse_program;program=Command { Assign { Column { Id: 15 } Function { Arguments { Id: 10 } Arguments { Id: 11 } FunctionType: YQL_KERNEL KernelIdx: 0 } } } Command { Projection { Columns { Id: 15 } } } Kernels: "O\004\006Arg\030BlockAsTuple\t\211\004\235\213\004\213\004\203\020\203B\213\002\203\014\001\235?\004\001\235?\010\001\006\000\t\211\004?\016\235?\000\001\235?\002\001\006\000\t\251\000?\024\002\000\t\251\000?\026\002\000\000\t\211\002?\020\235?\006\001\006\000\t\211\006?$\203\005@?\024?\026$BlockFunc\000\003?(\014Equals?\034? \001\000\000/" ; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:102;parse_proto_program=Command { Assign { Column { Id: 15 } Function { Arguments { Id: 10 } Arguments { Id: 11 } FunctionType: YQL_KERNEL KernelIdx: 0 } } } Command { Projection { Columns { Id: 15 } } } Kernels: "O\004\006Arg\030BlockAsTuple\t\211\004\235\213\004\213\004\203\020\203B\213\002\203\014\001\235?\004\001\235?\010\001\006\000\t\211\004?\016\235?\000\001\235?\002\001\006\000\t\251\000?\024\002\000\t\251\000?\026\002\000\000\t\211\002?\020\235?\006\001\006\000\t\211\006?$\203\005@?\024?\026$BlockFunc\000\003?(\014Equals?\034? \001\000\000/" ; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2101;fline=graph_execute.cpp:162;graph_constructed=digraph program {N7[shape=box, label="N0(0):{\"p\":{\"data\":[{\"name\":\"i16\",\"id\":10},{\"name\":\"float\",\"id\":11}]},\"o\":\"0\",\"t\":\"ReserveMemory\"}\n"]; N0[shape=box, label="N4(26):{\"i\":\"10,11\",\"p\":{\"kernel\":{\"class_name\":\"SIMPLE\"}},\"o\":\"15\",\"t\":\"Calculation\"}\nREMOVE:10,11"]; N2 -> N0[label="1"]; N4 -> N0[label="2"]; N2[shape=box, label="N2(9):{\"i\":\"10\",\"p\":{\"address\":{\"name\":\"i16\",\"id\":10}},\"o\":\"10\",\"t\":\"AssembleOriginalData\"}\n",style=filled,color="#FFFF88"]; N6 -> N2[label="1"]; N4[shape=box, label="N3(9):{\"i\":\"11\",\"p\":{\"address\":{\"name\":\"float\",\"id\":11}},\"o\":\"11\",\"t\":\"AssembleOriginalData\"}\n",style=filled,color="#FFFF88"]; N6 -> N4[label="1"]; N5[shape=box, label="N5(26):{\"i\":\"15\",\"t\":\"Projection\"}\n",style=filled,color="#FFAAAA"]; N0 -> N5[label="1"]; N6[shape=box, label="N1(4):{\"i\":\"0\",\"p\":{\"data\":[{\"name\":\"i16\",\"id\":10},{\"name\":\"float\",\"id\":11}]},\"o\":\"10,11\",\"t\":\"FetchOriginalData\"}\n",style=filled,color="#FFFF88"]; N7 -> N6[label="1"]; N7->N6->N2->N4->N0->N5[color=red]; }; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:51;event=program_parsed;result={"edges":[{"owner_id":7,"inputs":[]},{"owner_id":0,"inputs":[{"from":2},{"from":4}]},{"owner_id":2,"inputs":[{"from":6}]},{"owner_id":4,"inputs":[{"from":6}]},{"owner_id":5,"inputs":[{"from":0}]},{"owner_id":6,"inputs":[{"from":7}]}],"nodes":{"2":{"p":{"i":"10","p":{"address":{"name":"i16","id":10}},"o":"10","t":"AssembleOriginalData"},"w":9,"id":2},"6":{"p":{"i":"0","p":{"data":[{"name":"i16","id":10},{"name":"float","id":11}]},"o":"10,11","t":"FetchOriginalData"},"w":4,"id":6},"7":{"p":{"p":{"data":[{"name":"i16","id":10},{"name":"float","id":11}]},"o":"0","t":"ReserveMemory"},"w":0,"id":7},"5":{"p":{"i":"15","t":"Projection"},"w":26,"id":5},"4":{"p":{"i":"11","p":{"address":{"name":"float","id":11}},"o":"11","t":"AssembleOriginalData"},"w":9,"id":4},"0":{"p":{"i":"10,11","p":{"kernel":{"class_name":"SIMPLE"}},"o":"15","t":"Calculation"},"w":26,"id":0}}}; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:502;T=N5arrow9Int16TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:502;T=N5arrow9FloatTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:502;T=N5arrow9Int16TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:502;T=N5arrow9FloatTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:502;T=N5arrow9Int16TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:502;T=N5arrow9FloatTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:502;T=N5arrow9Int16TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:502;T=N5arrow9FloatTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:502;T=N5arrow9Int16TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:502;T=N5arrow9FloatTypeE; digraph program {N7[shape=box, label="N0(0):{\"p\":{\"data\":[{\"name\":\"i16\",\"id\":10},{\"name\":\"float\",\"id\":11}]},\"o\":\"0\",\"t\":\"ReserveMemory\"}\n"]; N0[shape=box, label="N4(26):{\"i\":\"10,11\",\"p\":{\"kernel\":{\"class_name\":\"SIMPLE\"}},\"o\":\"15\",\"t\":\"Calculation\"}\nREMOVE:10,11"]; N2 -> N0[label="1"]; N4 -> N0[label="2"]; N2[shape=box, label="N2(9):{\"i\":\"10\",\"p\":{\"address\":{\"name\":\"i16\",\"id\":10}},\"o\":\"10\",\"t\":\"AssembleOriginalData\"}\n",style=filled,color="#FFFF88"]; N6 -> N2[label="1"]; N4[shape=box, label="N3(9):{\"i\":\"11\",\"p\":{\"address\":{\"name\":\"float\",\"id\":11}},\"o\":\"11\",\"t\":\"AssembleOriginalData\"}\n",style=filled,color="#FFFF88"]; N6 -> N4[label="1"]; N5[shape=box, label="N5(26):{\"i\":\"15\",\"t\":\"Projection\"}\n",style=filled,color="#FFAAAA"]; N0 -> N5[label="1"]; N6[shape=box, label="N1(4):{\"i\":\"0\",\"p\":{\"data\":[{\"name\":\"i16\",\"id\":10},{\"name\":\"float\",\"id\":11}]},\"o\":\"10,11\",\"t\":\"FetchOriginalData\"}\n",style=filled,color="#FFFF88"]; N7 -> N6[label="1"]; N7->N6->N2->N4->N0->N5[color=red]; } FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:502;T=N5arrow9UInt8TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:502;T=N5arrow9UInt8TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:502;T=N5arrow9UInt8TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:502;T=N5arrow9UInt8TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:502;T=N5arrow9UInt8TypeE; ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/columnshard/engines/ut/unittest >> TestProgram::CountWithNulls [GOOD] Test command err: FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:33;event=parse_program;program=Command { GroupBy { Aggregates { Column { Id: 10001 } Function { Id: 2 Arguments { Id: 2 } } } } } Command { Projection { Columns { Id: 10001 } } } ; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:102;parse_proto_program=Command { GroupBy { Aggregates { Column { Id: 10001 } Function { Id: 2 Arguments { Id: 2 } } } } } Command { Projection { Columns { Id: 10001 } } } ; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2101;fline=graph_execute.cpp:162;graph_constructed=digraph program {N0[shape=box, label="N3(15):{\"i\":\"2\",\"p\":{\"kernel\":{\"class_name\":\"SIMPLE\"}},\"o\":\"10001\",\"t\":\"Calculation\"}\nREMOVE:2"]; N2 -> N0[label="1"]; N1[shape=box, label="N1(2):{\"i\":\"0\",\"p\":{\"data\":[{\"name\":\"uid\",\"id\":2}]},\"o\":\"2\",\"t\":\"FetchOriginalData\"}\n",style=filled,color="#FFFF88"]; N4 -> N1[label="1"]; N2[shape=box, label="N2(7):{\"i\":\"2\",\"p\":{\"address\":{\"name\":\"uid\",\"id\":2}},\"o\":\"2\",\"t\":\"AssembleOriginalData\"}\n",style=filled,color="#FFFF88"]; N1 -> N2[label="1"]; N3[shape=box, label="N4(15):{\"i\":\"10001\",\"t\":\"Projection\"}\n",style=filled,color="#FFAAAA"]; N0 -> N3[label="1"]; N4[shape=box, label="N0(0):{\"p\":{\"data\":[{\"name\":\"uid\",\"id\":2}]},\"o\":\"0\",\"t\":\"ReserveMemory\"}\n"]; N4->N1->N2->N0->N3[color=red]; }; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:51;event=program_parsed;result={"edges":[{"owner_id":0,"inputs":[{"from":2}]},{"owner_id":1,"inputs":[{"from":4}]},{"owner_id":2,"inputs":[{"from":1}]},{"owner_id":3,"inputs":[{"from":0}]},{"owner_id":4,"inputs":[]}],"nodes":{"1":{"p":{"i":"0","p":{"data":[{"name":"uid","id":2}]},"o":"2","t":"FetchOriginalData"},"w":2,"id":1},"3":{"p":{"i":"10001","t":"Projection"},"w":15,"id":3},"2":{"p":{"i":"2","p":{"address":{"name":"uid","id":2}},"o":"2","t":"AssembleOriginalData"},"w":7,"id":2},"4":{"p":{"p":{"data":[{"name":"uid","id":2}]},"o":"0","t":"ReserveMemory"},"w":0,"id":4},"0":{"p":{"i":"2","p":{"kernel":{"class_name":"SIMPLE"}},"o":"10001","t":"Calculation"},"w":15,"id":0}}}; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:502;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:502;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:502;T=N5arrow10UInt64TypeE; |75.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/columnshard/engines/ut/unittest |75.8%| [TA] $(B)/ydb/core/tx/columnshard/engines/ut/test-results/unittest/{meta.json ... results_accumulator.log} |75.8%| [TA] {RESULT} $(B)/ydb/core/tx/columnshard/engines/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> StatisticsSaveLoad::ForbidAccess |75.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/statistics/database/ut/unittest |75.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/statistics/database/ut/unittest |75.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/statistics/database/ut/unittest |75.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/statistics/database/ut/unittest |75.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/statistics/database/ut/unittest >> IntermediateDirsReboots::CreateWithIntermediateDirs [GOOD] >> IntermediateDirsReboots::CreateDirWithIntermediateDirsForceDrop [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_reboots/unittest >> IntermediateDirsReboots::CreateWithIntermediateDirs [GOOD] Test command err: ==== RunWithTabletReboots =========== RUN: Trace =========== Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2140] Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:116:2058] recipient: [1:110:2141] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:116:2058] recipient: [1:110:2141] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:117:2058] recipient: [1:111:2142] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:117:2058] recipient: [1:111:2142] Leader for TabletID 72057594046678944 is [1:126:2151] sender: [1:127:2058] recipient: [1:109:2140] Leader for TabletID 72057594046447617 is [1:130:2154] sender: [1:132:2058] recipient: [1:110:2141] Leader for TabletID 72057594046316545 is [1:133:2155] sender: [1:135:2058] recipient: [1:111:2142] 2025-05-29T15:31:42.020896Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7488: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-05-29T15:31:42.020913Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7516: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:31:42.020917Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7402: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2025-05-29T15:31:42.020921Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7418: OperationsProcessing config: using default configuration 2025-05-29T15:31:42.020925Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-05-29T15:31:42.020927Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-05-29T15:31:42.020933Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7548: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:31:42.020941Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:39: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-05-29T15:31:42.021013Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7619: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-05-29T15:31:42.021072Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-05-29T15:31:42.030570Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7651: Got new config: QueryServiceConfig { AllExternalDataSourcesAreAvailable: true } 2025-05-29T15:31:42.030588Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:31:42.030654Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7619: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# Leader for TabletID 72057594046447617 is [1:130:2154] sender: [1:175:2058] recipient: [1:15:2062] 2025-05-29T15:31:42.032584Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-05-29T15:31:42.032607Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-05-29T15:31:42.032638Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-05-29T15:31:42.034722Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-05-29T15:31:42.034814Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:445: Clear TempDirsState with owners number: 0 2025-05-29T15:31:42.034911Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1348: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-05-29T15:31:42.035077Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:32: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-05-29T15:31:42.035624Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:157: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:31:42.035653Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:84: [RootDataErasureManager] Stop 2025-05-29T15:31:42.035837Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:31:42.035843Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:31:42.035875Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-05-29T15:31:42.035882Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-29T15:31:42.035888Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:202: TTxServerlessStorageBilling.Complete 2025-05-29T15:31:42.035906Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6671: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:214:2058] recipient: [1:212:2212] IGNORE Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:214:2058] recipient: [1:212:2212] Leader for TabletID 72057594037968897 is [1:218:2216] sender: [1:219:2058] recipient: [1:212:2212] 2025-05-29T15:31:42.036871Z node 1 :HIVE INFO: tablet_helpers.cpp:1130: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:126:2151] sender: [1:239:2058] recipient: [1:15:2062] 2025-05-29T15:31:42.048918Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:372: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-05-29T15:31:42.048978Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:31:42.049016Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-05-29T15:31:42.049044Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:130: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-05-29T15:31:42.049051Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:31:42.049523Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:451: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-05-29T15:31:42.049541Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-05-29T15:31:42.049573Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:31:42.049579Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:313: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-05-29T15:31:42.049583Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:367: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-05-29T15:31:42.049586Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 2 -> 3 2025-05-29T15:31:42.049949Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:31:42.049958Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-05-29T15:31:42.049961Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 3 -> 128 2025-05-29T15:31:42.050274Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:31:42.050283Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:31:42.050287Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:31:42.050292Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1650: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-05-29T15:31:42.050864Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1719: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-05-29T15:31:42.051219Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:652: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-05-29T15:31:42.051244Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1751: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 Leader for TabletID 72057594046316545 is [1:133:2155] sender: [1:254:2058] recipient: [1:15:2062] FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-05-29T15:31:42.051393Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:676: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-05-29T15:31:42.051413Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:680: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 133 RawX2: 4294969451 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-05-29T15:31:42.051431Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:31:42.051486Z node 1 :FLAT_TX_SCHEMESHARD INFO: sch ... nerId: 72057594046678944, LocalPathId: 3], 6 2025-05-29T15:31:52.966425Z node 46 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:989: Publication details: tx: 1003, [OwnerId: 72057594046678944, LocalPathId: 4], 6 2025-05-29T15:31:52.966427Z node 46 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:989: Publication details: tx: 1003, [OwnerId: 72057594046678944, LocalPathId: 5], 5 2025-05-29T15:31:52.966429Z node 46 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:989: Publication details: tx: 1003, [OwnerId: 72057594046678944, LocalPathId: 6], 2 2025-05-29T15:31:52.966563Z node 46 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:5834: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 8 PathOwnerId: 72057594046678944, cookie: 1003 2025-05-29T15:31:52.966572Z node 46 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 8 PathOwnerId: 72057594046678944, cookie: 1003 2025-05-29T15:31:52.966575Z node 46 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 5, at schemeshard: 72057594046678944, txId: 1003 2025-05-29T15:31:52.966578Z node 46 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 1003, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 8 2025-05-29T15:31:52.966580Z node 46 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2025-05-29T15:31:52.966798Z node 46 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:5834: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 6 PathOwnerId: 72057594046678944, cookie: 1003 2025-05-29T15:31:52.966807Z node 46 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 6 PathOwnerId: 72057594046678944, cookie: 1003 2025-05-29T15:31:52.966810Z node 46 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 4, at schemeshard: 72057594046678944, txId: 1003 2025-05-29T15:31:52.966812Z node 46 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 1003, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 6 2025-05-29T15:31:52.966815Z node 46 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 2 2025-05-29T15:31:52.966880Z node 46 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:5834: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 6 PathOwnerId: 72057594046678944, cookie: 1003 2025-05-29T15:31:52.966886Z node 46 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 6 PathOwnerId: 72057594046678944, cookie: 1003 2025-05-29T15:31:52.966891Z node 46 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 3, at schemeshard: 72057594046678944, txId: 1003 2025-05-29T15:31:52.966893Z node 46 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 1003, pathId: [OwnerId: 72057594046678944, LocalPathId: 4], version: 6 2025-05-29T15:31:52.966895Z node 46 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 2 2025-05-29T15:31:52.966993Z node 46 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:5834: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 5 Version: 5 PathOwnerId: 72057594046678944, cookie: 1003 2025-05-29T15:31:52.967000Z node 46 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 5 Version: 5 PathOwnerId: 72057594046678944, cookie: 1003 2025-05-29T15:31:52.967002Z node 46 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 1003 2025-05-29T15:31:52.967005Z node 46 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 1003, pathId: [OwnerId: 72057594046678944, LocalPathId: 5], version: 5 2025-05-29T15:31:52.967007Z node 46 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 2 2025-05-29T15:31:52.967186Z node 46 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:5834: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 6 Version: 2 PathOwnerId: 72057594046678944, cookie: 1003 2025-05-29T15:31:52.967191Z node 46 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 6 Version: 2 PathOwnerId: 72057594046678944, cookie: 1003 2025-05-29T15:31:52.967194Z node 46 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1003 2025-05-29T15:31:52.967196Z node 46 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 1003, pathId: [OwnerId: 72057594046678944, LocalPathId: 6], version: 2 2025-05-29T15:31:52.967199Z node 46 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 6] was 2 2025-05-29T15:31:52.967204Z node 46 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 1003, subscribers: 0 2025-05-29T15:31:52.967556Z node 46 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1003 2025-05-29T15:31:52.967569Z node 46 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1003 2025-05-29T15:31:52.967583Z node 46 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1003 2025-05-29T15:31:52.967766Z node 46 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1003 2025-05-29T15:31:52.967776Z node 46 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1003 TestModificationResult got TxId: 1003, wait until txId: 1003 TestWaitNotification wait txId: 1003 2025-05-29T15:31:52.967805Z node 46 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:210: tests -- TTxNotificationSubscriber for txId 1003: send EvNotifyTxCompletion 2025-05-29T15:31:52.967810Z node 46 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:256: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 1003 2025-05-29T15:31:52.967846Z node 46 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 1003, at schemeshard: 72057594046678944 2025-05-29T15:31:52.967857Z node 46 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:227: tests -- TTxNotificationSubscriber for txId 1003: got EvNotifyTxCompletionResult 2025-05-29T15:31:52.967860Z node 46 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:236: tests -- TTxNotificationSubscriber for txId 1003: satisfy waiter [46:352:2342] TestWaitNotification: OK eventTxId 1003 2025-05-29T15:31:52.967903Z node 46 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Valid/x/y/z" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-05-29T15:31:52.967921Z node 46 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/Valid/x/y/z" took 24us result status StatusSuccess 2025-05-29T15:31:52.967957Z node 46 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Valid/x/y/z" PathDescription { Self { Name: "z" PathId: 6 SchemeshardId: 72057594046678944 PathType: EPathTypeRtmrVolume CreateFinished: true CreateTxId: 1003 CreateStep: 5000003 ParentPathId: 5 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 RTMRVersion: 1 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 5 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } RtmrVolumeDescription { Name: "z" PathId: 6 PartitionsCount: 0 } } PathId: 6 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-05-29T15:31:52.967987Z node 46 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Invalid" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-05-29T15:31:52.967998Z node 46 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/Invalid" took 12us result status StatusPathDoesNotExist 2025-05-29T15:31:52.968010Z node 46 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/Invalid\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1]), source_location: ydb/core/tx/schemeshard/schemeshard_path_describer.cpp:1157" Path: "/MyRoot/Invalid" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: true } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_reboots/unittest >> IntermediateDirsReboots::CreateDirWithIntermediateDirsForceDrop [GOOD] Test command err: ==== RunWithTabletReboots =========== RUN: Trace =========== Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2140] Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:116:2058] recipient: [1:110:2141] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:116:2058] recipient: [1:110:2141] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:117:2058] recipient: [1:111:2142] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:117:2058] recipient: [1:111:2142] Leader for TabletID 72057594046678944 is [1:126:2151] sender: [1:127:2058] recipient: [1:109:2140] Leader for TabletID 72057594046447617 is [1:130:2154] sender: [1:132:2058] recipient: [1:110:2141] Leader for TabletID 72057594046316545 is [1:133:2155] sender: [1:135:2058] recipient: [1:111:2142] 2025-05-29T15:31:43.328075Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7488: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-05-29T15:31:43.328092Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7516: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:31:43.328096Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7402: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2025-05-29T15:31:43.328099Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7418: OperationsProcessing config: using default configuration 2025-05-29T15:31:43.328103Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-05-29T15:31:43.328106Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-05-29T15:31:43.328111Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7548: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:31:43.328121Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:39: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-05-29T15:31:43.328197Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7619: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-05-29T15:31:43.328265Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-05-29T15:31:43.339558Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7651: Got new config: QueryServiceConfig { AllExternalDataSourcesAreAvailable: true } 2025-05-29T15:31:43.339579Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:31:43.339683Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7619: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# Leader for TabletID 72057594046447617 is [1:130:2154] sender: [1:175:2058] recipient: [1:15:2062] 2025-05-29T15:31:43.341908Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-05-29T15:31:43.341935Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-05-29T15:31:43.341964Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-05-29T15:31:43.345482Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-05-29T15:31:43.345616Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:445: Clear TempDirsState with owners number: 0 2025-05-29T15:31:43.345756Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1348: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-05-29T15:31:43.345974Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:32: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-05-29T15:31:43.346870Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:157: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:31:43.346919Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:84: [RootDataErasureManager] Stop 2025-05-29T15:31:43.347216Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:31:43.347232Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:31:43.347265Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-05-29T15:31:43.347275Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-29T15:31:43.347282Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:202: TTxServerlessStorageBilling.Complete 2025-05-29T15:31:43.347302Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6671: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:214:2058] recipient: [1:212:2212] IGNORE Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:214:2058] recipient: [1:212:2212] Leader for TabletID 72057594037968897 is [1:218:2216] sender: [1:219:2058] recipient: [1:212:2212] 2025-05-29T15:31:43.348827Z node 1 :HIVE INFO: tablet_helpers.cpp:1130: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:126:2151] sender: [1:239:2058] recipient: [1:15:2062] 2025-05-29T15:31:43.368171Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:372: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-05-29T15:31:43.368230Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:31:43.368284Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-05-29T15:31:43.368324Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:130: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-05-29T15:31:43.368334Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:31:43.368940Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:451: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-05-29T15:31:43.368958Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-05-29T15:31:43.368997Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:31:43.369005Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:313: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-05-29T15:31:43.369009Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:367: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-05-29T15:31:43.369013Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 2 -> 3 2025-05-29T15:31:43.369375Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:31:43.369383Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-05-29T15:31:43.369388Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 3 -> 128 2025-05-29T15:31:43.369671Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:31:43.369679Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:31:43.369683Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:31:43.369689Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1650: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-05-29T15:31:43.370273Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1719: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-05-29T15:31:43.370674Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:652: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-05-29T15:31:43.370704Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1751: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 Leader for TabletID 72057594046316545 is [1:133:2155] sender: [1:254:2058] recipient: [1:15:2062] FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-05-29T15:31:43.370879Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:676: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-05-29T15:31:43.370901Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:680: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 133 RawX2: 4294969451 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-05-29T15:31:43.370918Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:31:43.370983Z node 1 :FLAT_TX_SCHEMESHARD INFO: sch ... rd__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1002 2025-05-29T15:31:53.012843Z node 41 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:227: tests -- TTxNotificationSubscriber for txId 1002: got EvNotifyTxCompletionResult 2025-05-29T15:31:53.012847Z node 41 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:236: tests -- TTxNotificationSubscriber for txId 1002: satisfy waiter [41:306:2296] 2025-05-29T15:31:53.012911Z node 41 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1003 2025-05-29T15:31:53.013116Z node 41 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1003 2025-05-29T15:31:53.013123Z node 41 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:31:53.013126Z node 41 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [41:207:2208], at schemeshard: 72057594046678944, txId: 1003, path id: 1 2025-05-29T15:31:53.013130Z node 41 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [41:207:2208], at schemeshard: 72057594046678944, txId: 1003, path id: 5 2025-05-29T15:31:53.013132Z node 41 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [41:207:2208], at schemeshard: 72057594046678944, txId: 1003, path id: 3 2025-05-29T15:31:53.013135Z node 41 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [41:207:2208], at schemeshard: 72057594046678944, txId: 1003, path id: 4 2025-05-29T15:31:53.013248Z node 41 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:5834: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 9 PathOwnerId: 72057594046678944, cookie: 1003 2025-05-29T15:31:53.013259Z node 41 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 9 PathOwnerId: 72057594046678944, cookie: 1003 2025-05-29T15:31:53.013264Z node 41 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 4, at schemeshard: 72057594046678944, txId: 1003 2025-05-29T15:31:53.013268Z node 41 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 1003, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 9 2025-05-29T15:31:53.013273Z node 41 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2025-05-29T15:31:53.013329Z node 41 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:5834: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 5 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 1003 2025-05-29T15:31:53.013337Z node 41 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 5 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 1003 2025-05-29T15:31:53.013340Z node 41 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 3, at schemeshard: 72057594046678944, txId: 1003 2025-05-29T15:31:53.013343Z node 41 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 1003, pathId: [OwnerId: 72057594046678944, LocalPathId: 5], version: 18446744073709551615 2025-05-29T15:31:53.013345Z node 41 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 1 2025-05-29T15:31:53.013372Z node 41 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:123: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-05-29T15:31:53.013376Z node 41 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:137: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 5], at schemeshard: 72057594046678944 2025-05-29T15:31:53.013382Z node 41 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 2 2025-05-29T15:31:53.013398Z node 41 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:5834: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 1003 2025-05-29T15:31:53.013403Z node 41 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 1003 2025-05-29T15:31:53.013405Z node 41 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 1003 2025-05-29T15:31:53.013408Z node 41 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 1003, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 18446744073709551615 2025-05-29T15:31:53.013410Z node 41 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 2 2025-05-29T15:31:53.013480Z node 41 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:5834: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 1003 2025-05-29T15:31:53.013486Z node 41 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 1003 2025-05-29T15:31:53.013489Z node 41 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1003 2025-05-29T15:31:53.013491Z node 41 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 1003, pathId: [OwnerId: 72057594046678944, LocalPathId: 4], version: 18446744073709551615 2025-05-29T15:31:53.013495Z node 41 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 1 2025-05-29T15:31:53.013501Z node 41 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 1003, subscribers: 1 2025-05-29T15:31:53.013504Z node 41 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:212: TTxAckPublishToSchemeBoard Notify send TEvNotifyTxCompletionResult, at schemeshard: 72057594046678944, to actorId: [41:305:2295] 2025-05-29T15:31:53.013735Z node 41 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1003 2025-05-29T15:31:53.013884Z node 41 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1003 2025-05-29T15:31:53.013911Z node 41 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:160: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 1 candidates, at schemeshard: 72057594046678944 2025-05-29T15:31:53.013960Z node 41 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:123: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-05-29T15:31:53.013964Z node 41 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:137: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 4], at schemeshard: 72057594046678944 2025-05-29T15:31:53.013971Z node 41 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 1 2025-05-29T15:31:53.013976Z node 41 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:137: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 3], at schemeshard: 72057594046678944 2025-05-29T15:31:53.013981Z node 41 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-05-29T15:31:53.014178Z node 41 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1003 2025-05-29T15:31:53.014192Z node 41 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1003 2025-05-29T15:31:53.014199Z node 41 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:227: tests -- TTxNotificationSubscriber for txId 1003: got EvNotifyTxCompletionResult 2025-05-29T15:31:53.014202Z node 41 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:236: tests -- TTxNotificationSubscriber for txId 1003: satisfy waiter [41:306:2296] 2025-05-29T15:31:53.014388Z node 41 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:160: TTxCleanDroppedPaths Complete, done PersistRemovePath for 2 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestWaitNotification: OK eventTxId 1002 TestWaitNotification: OK eventTxId 1003 2025-05-29T15:31:53.014440Z node 41 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/x" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-05-29T15:31:53.014457Z node 41 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/x" took 22us result status StatusPathDoesNotExist 2025-05-29T15:31:53.014482Z node 41 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/x\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1]), source_location: ydb/core/tx/schemeshard/schemeshard_path_describer.cpp:1157" Path: "/MyRoot/x" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: true } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 >> TConsistentOpsWithReboots::CreateIndexedTableAndForceDrop [GOOD] >> TReplicationWithRebootsTests::Alter [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_reboots/unittest >> TConsistentOpsWithReboots::CreateIndexedTableAndForceDrop [GOOD] Test command err: ==== RunWithTabletReboots =========== RUN: Trace =========== Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2140] Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:116:2058] recipient: [1:110:2141] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:116:2058] recipient: [1:110:2141] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:117:2058] recipient: [1:111:2142] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:117:2058] recipient: [1:111:2142] Leader for TabletID 72057594046678944 is [1:126:2151] sender: [1:127:2058] recipient: [1:109:2140] Leader for TabletID 72057594046447617 is [1:130:2154] sender: [1:132:2058] recipient: [1:110:2141] Leader for TabletID 72057594046316545 is [1:133:2155] sender: [1:135:2058] recipient: [1:111:2142] 2025-05-29T15:31:35.479783Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7488: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-05-29T15:31:35.479803Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7516: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:31:35.479808Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7402: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2025-05-29T15:31:35.479812Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7418: OperationsProcessing config: using default configuration 2025-05-29T15:31:35.479816Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-05-29T15:31:35.479820Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-05-29T15:31:35.479828Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7548: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:31:35.479837Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:39: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-05-29T15:31:35.479910Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7619: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-05-29T15:31:35.479973Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-05-29T15:31:35.492231Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7651: Got new config: QueryServiceConfig { AllExternalDataSourcesAreAvailable: true } 2025-05-29T15:31:35.492252Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:31:35.492340Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7619: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# Leader for TabletID 72057594046447617 is [1:130:2154] sender: [1:175:2058] recipient: [1:15:2062] 2025-05-29T15:31:35.494732Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-05-29T15:31:35.494784Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-05-29T15:31:35.494816Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-05-29T15:31:35.497177Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-05-29T15:31:35.497251Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:445: Clear TempDirsState with owners number: 0 2025-05-29T15:31:35.497356Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1348: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-05-29T15:31:35.497529Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:32: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-05-29T15:31:35.498130Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:157: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:31:35.498164Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:84: [RootDataErasureManager] Stop 2025-05-29T15:31:35.498394Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:31:35.498404Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:31:35.498434Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-05-29T15:31:35.498441Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-29T15:31:35.498447Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:202: TTxServerlessStorageBilling.Complete 2025-05-29T15:31:35.498463Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6671: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:214:2058] recipient: [1:212:2212] IGNORE Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:214:2058] recipient: [1:212:2212] Leader for TabletID 72057594037968897 is [1:218:2216] sender: [1:219:2058] recipient: [1:212:2212] 2025-05-29T15:31:35.499728Z node 1 :HIVE INFO: tablet_helpers.cpp:1130: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:126:2151] sender: [1:239:2058] recipient: [1:15:2062] 2025-05-29T15:31:35.516927Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:372: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-05-29T15:31:35.516980Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:31:35.517026Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-05-29T15:31:35.517067Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:130: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-05-29T15:31:35.517078Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:31:35.517643Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:451: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-05-29T15:31:35.517664Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-05-29T15:31:35.517704Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:31:35.517711Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:313: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-05-29T15:31:35.517716Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:367: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-05-29T15:31:35.517720Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 2 -> 3 2025-05-29T15:31:35.518084Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:31:35.518096Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-05-29T15:31:35.518101Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 3 -> 128 2025-05-29T15:31:35.518430Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:31:35.518440Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:31:35.518445Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:31:35.518451Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1650: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-05-29T15:31:35.519033Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1719: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-05-29T15:31:35.519370Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:652: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-05-29T15:31:35.519395Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1751: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 Leader for TabletID 72057594046316545 is [1:133:2155] sender: [1:254:2058] recipient: [1:15:2062] FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-05-29T15:31:35.519540Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:676: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-05-29T15:31:35.519560Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:680: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 133 RawX2: 4294969451 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-05-29T15:31:35.519575Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:31:35.519632Z node 1 :FLAT_TX_SCHEMESHARD INFO: sch ... chemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 1004, subscribers: 0 2025-05-29T15:31:53.475710Z node 74 :HIVE INFO: tablet_helpers.cpp:1356: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 3 TxId_Deprecated: 3 TabletID: 72075186233409546 2025-05-29T15:31:53.475757Z node 74 :HIVE INFO: tablet_helpers.cpp:1356: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 2 TxId_Deprecated: 2 TabletID: 72075186233409547 2025-05-29T15:31:53.476104Z node 74 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5938: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 3 ShardOwnerId: 72057594046678944 ShardLocalIdx: 3, at schemeshard: 72057594046678944 2025-05-29T15:31:53.476133Z node 74 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 8] was 1 Forgetting tablet 72075186233409546 2025-05-29T15:31:53.476395Z node 74 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5938: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 2 ShardOwnerId: 72057594046678944 ShardLocalIdx: 2, at schemeshard: 72057594046678944 2025-05-29T15:31:53.476422Z node 74 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 6] was 1 Forgetting tablet 72075186233409547 2025-05-29T15:31:53.476473Z node 74 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:123: TTxCleanDroppedPaths Execute, 2 paths in candidate queue, at schemeshard: 72057594046678944 2025-05-29T15:31:53.476476Z node 74 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:137: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 8], at schemeshard: 72057594046678944 2025-05-29T15:31:53.476482Z node 74 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 7] was 1 2025-05-29T15:31:53.476486Z node 74 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:137: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 7], at schemeshard: 72057594046678944 2025-05-29T15:31:53.476489Z node 74 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 2 2025-05-29T15:31:53.476491Z node 74 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:137: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 6], at schemeshard: 72057594046678944 2025-05-29T15:31:53.476494Z node 74 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 1 2025-05-29T15:31:53.476497Z node 74 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:137: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 5], at schemeshard: 72057594046678944 2025-05-29T15:31:53.476500Z node 74 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 1 2025-05-29T15:31:53.476502Z node 74 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:137: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 4], at schemeshard: 72057594046678944 2025-05-29T15:31:53.476505Z node 74 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 1 2025-05-29T15:31:53.476507Z node 74 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:137: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 3], at schemeshard: 72057594046678944 2025-05-29T15:31:53.476510Z node 74 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-05-29T15:31:53.476608Z node 74 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1004 2025-05-29T15:31:53.476653Z node 74 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1004 2025-05-29T15:31:53.476856Z node 74 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1004 2025-05-29T15:31:53.476866Z node 74 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1004 2025-05-29T15:31:53.477287Z node 74 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1004 2025-05-29T15:31:53.477311Z node 74 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:174: Deleted shardIdx 72057594046678944:1 2025-05-29T15:31:53.477318Z node 74 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:180: Close pipe to deleted shardIdx 72057594046678944:1 tabletId 72075186233409548 2025-05-29T15:31:53.477327Z node 74 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1004 2025-05-29T15:31:53.477334Z node 74 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:174: Deleted shardIdx 72057594046678944:3 2025-05-29T15:31:53.477336Z node 74 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:180: Close pipe to deleted shardIdx 72057594046678944:3 tabletId 72075186233409546 2025-05-29T15:31:53.477343Z node 74 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:174: Deleted shardIdx 72057594046678944:2 2025-05-29T15:31:53.477346Z node 74 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:180: Close pipe to deleted shardIdx 72057594046678944:2 tabletId 72075186233409547 2025-05-29T15:31:53.477369Z node 74 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:160: TTxCleanDroppedPaths Complete, done PersistRemovePath for 6 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestModificationResult got TxId: 1004, wait until txId: 1004 TestWaitNotification wait txId: 1004 2025-05-29T15:31:53.477404Z node 74 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:210: tests -- TTxNotificationSubscriber for txId 1004: send EvNotifyTxCompletion 2025-05-29T15:31:53.477408Z node 74 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:256: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 1004 2025-05-29T15:31:53.477446Z node 74 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 1004, at schemeshard: 72057594046678944 2025-05-29T15:31:53.477457Z node 74 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:227: tests -- TTxNotificationSubscriber for txId 1004: got EvNotifyTxCompletionResult 2025-05-29T15:31:53.477460Z node 74 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:236: tests -- TTxNotificationSubscriber for txId 1004: satisfy waiter [74:610:2560] TestWaitNotification: OK eventTxId 1004 wait until 72075186233409546 is deleted wait until 72075186233409547 is deleted wait until 72075186233409548 is deleted wait until 72075186233409549 is deleted wait until 72075186233409550 is deleted wait until 72075186233409551 is deleted wait until 72075186233409552 is deleted wait until 72075186233409553 is deleted wait until 72075186233409554 is deleted wait until 72075186233409555 is deleted 2025-05-29T15:31:53.477508Z node 74 :HIVE INFO: tablet_helpers.cpp:1476: [72057594037968897] TEvSubscribeToTabletDeletion, 72075186233409546 2025-05-29T15:31:53.477515Z node 74 :HIVE INFO: tablet_helpers.cpp:1476: [72057594037968897] TEvSubscribeToTabletDeletion, 72075186233409547 2025-05-29T15:31:53.477520Z node 74 :HIVE INFO: tablet_helpers.cpp:1476: [72057594037968897] TEvSubscribeToTabletDeletion, 72075186233409548 2025-05-29T15:31:53.477524Z node 74 :HIVE INFO: tablet_helpers.cpp:1476: [72057594037968897] TEvSubscribeToTabletDeletion, 72075186233409549 2025-05-29T15:31:53.477530Z node 74 :HIVE INFO: tablet_helpers.cpp:1476: [72057594037968897] TEvSubscribeToTabletDeletion, 72075186233409550 2025-05-29T15:31:53.477535Z node 74 :HIVE INFO: tablet_helpers.cpp:1476: [72057594037968897] TEvSubscribeToTabletDeletion, 72075186233409551 2025-05-29T15:31:53.477538Z node 74 :HIVE INFO: tablet_helpers.cpp:1476: [72057594037968897] TEvSubscribeToTabletDeletion, 72075186233409552 2025-05-29T15:31:53.477544Z node 74 :HIVE INFO: tablet_helpers.cpp:1476: [72057594037968897] TEvSubscribeToTabletDeletion, 72075186233409553 2025-05-29T15:31:53.477550Z node 74 :HIVE INFO: tablet_helpers.cpp:1476: [72057594037968897] TEvSubscribeToTabletDeletion, 72075186233409554 2025-05-29T15:31:53.477555Z node 74 :HIVE INFO: tablet_helpers.cpp:1476: [72057594037968897] TEvSubscribeToTabletDeletion, 72075186233409555 Deleted tabletId 72075186233409546 Deleted tabletId 72075186233409547 Deleted tabletId 72075186233409548 Deleted tabletId 72075186233409549 Deleted tabletId 72075186233409550 Deleted tabletId 72075186233409551 Deleted tabletId 72075186233409552 Deleted tabletId 72075186233409553 Deleted tabletId 72075186233409554 Deleted tabletId 72075186233409555 2025-05-29T15:31:53.477624Z node 74 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-05-29T15:31:53.477642Z node 74 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot" took 26us result status StatusSuccess 2025-05-29T15:31:53.477693Z node 74 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot" PathDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 10 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 10 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 8 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: "DirA" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1000 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 1 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/MyRoot" } } } PathId: 1 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_replication_reboots/unittest >> TReplicationWithRebootsTests::Alter [GOOD] Test command err: ==== RunWithTabletReboots =========== RUN: Trace =========== Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2140] Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:116:2058] recipient: [1:110:2141] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:116:2058] recipient: [1:110:2141] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:117:2058] recipient: [1:111:2142] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:117:2058] recipient: [1:111:2142] Leader for TabletID 72057594046678944 is [1:126:2151] sender: [1:127:2058] recipient: [1:109:2140] Leader for TabletID 72057594046447617 is [1:130:2154] sender: [1:132:2058] recipient: [1:110:2141] Leader for TabletID 72057594046316545 is [1:133:2155] sender: [1:135:2058] recipient: [1:111:2142] 2025-05-29T15:31:45.319508Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7488: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-05-29T15:31:45.319533Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7516: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:31:45.319537Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7402: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2025-05-29T15:31:45.319541Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7418: OperationsProcessing config: using default configuration 2025-05-29T15:31:45.319550Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-05-29T15:31:45.319552Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-05-29T15:31:45.319559Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7548: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:31:45.319569Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:39: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-05-29T15:31:45.319647Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7619: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-05-29T15:31:45.319723Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-05-29T15:31:45.332265Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7651: Got new config: QueryServiceConfig { AllExternalDataSourcesAreAvailable: true } 2025-05-29T15:31:45.332285Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:31:45.332363Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7619: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# Leader for TabletID 72057594046447617 is [1:130:2154] sender: [1:175:2058] recipient: [1:15:2062] 2025-05-29T15:31:45.334535Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-05-29T15:31:45.334558Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-05-29T15:31:45.335025Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-05-29T15:31:45.338607Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-05-29T15:31:45.338677Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:445: Clear TempDirsState with owners number: 0 2025-05-29T15:31:45.343697Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1348: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-05-29T15:31:45.344639Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:32: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-05-29T15:31:45.346469Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:157: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:31:45.346534Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:84: [RootDataErasureManager] Stop 2025-05-29T15:31:45.348117Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:31:45.348128Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:31:45.348150Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-05-29T15:31:45.348156Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-29T15:31:45.348161Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:202: TTxServerlessStorageBilling.Complete 2025-05-29T15:31:45.348175Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6671: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:214:2058] recipient: [1:212:2212] IGNORE Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:214:2058] recipient: [1:212:2212] Leader for TabletID 72057594037968897 is [1:218:2216] sender: [1:219:2058] recipient: [1:212:2212] 2025-05-29T15:31:45.349360Z node 1 :HIVE INFO: tablet_helpers.cpp:1130: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:126:2151] sender: [1:239:2058] recipient: [1:15:2062] 2025-05-29T15:31:45.366494Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:372: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-05-29T15:31:45.366558Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:31:45.366603Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-05-29T15:31:45.367144Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:130: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-05-29T15:31:45.367173Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:31:45.367780Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:451: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-05-29T15:31:45.367797Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-05-29T15:31:45.367836Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:31:45.367842Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:313: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-05-29T15:31:45.367845Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:367: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-05-29T15:31:45.367848Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 2 -> 3 2025-05-29T15:31:45.368159Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:31:45.368171Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-05-29T15:31:45.368176Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 3 -> 128 2025-05-29T15:31:45.368443Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:31:45.368450Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:31:45.368453Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:31:45.368457Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1650: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-05-29T15:31:45.369288Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1719: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-05-29T15:31:45.369657Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:652: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-05-29T15:31:45.369685Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1751: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 Leader for TabletID 72057594046316545 is [1:133:2155] sender: [1:254:2058] recipient: [1:15:2062] FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-05-29T15:31:45.369851Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:676: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-05-29T15:31:45.369877Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:680: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 133 RawX2: 4294969451 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-05-29T15:31:45.369883Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:31:45.370513Z node 1 :FLAT_TX_SCHEMESHARD INFO: sch ... d__operation_side_effects.cpp:693: Ack tablet strongly msg opId: 1003:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1003 2025-05-29T15:31:53.713649Z node 35 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:207: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2025-05-29T15:31:53.713655Z node 35 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:384: Ack coordinator stepId#5000004 first txId#1003 countTxs#1 2025-05-29T15:31:53.713659Z node 35 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:354: Ack mediator stepId#5000004 2025-05-29T15:31:53.713662Z node 35 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:275: Activate send for 1003:0 2025-05-29T15:31:53.713686Z node 35 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4877: StateWork, received event# 2146435072, Sender [35:126:2151], Recipient [35:126:2151]: NKikimr::NSchemeShard::TEvPrivate::TEvProgressOperation 2025-05-29T15:31:53.713689Z node 35 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4894: StateWork, processing event TEvPrivate::TEvProgressOperation FAKE_COORDINATOR: Erasing txId 1003 2025-05-29T15:31:53.713699Z node 35 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:31:53.713702Z node 35 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1003, path id: [OwnerId: 72057594046678944, LocalPathId: 3] 2025-05-29T15:31:53.713733Z node 35 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:31:53.713736Z node 35 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [35:208:2209], at schemeshard: 72057594046678944, txId: 1003, path id: 3 2025-05-29T15:31:53.713812Z node 35 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1003:0, at schemeshard: 72057594046678944 2025-05-29T15:31:53.713817Z node 35 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:482: [72057594046678944] TDone opId# 1003:0 ProgressState 2025-05-29T15:31:53.713824Z node 35 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:165: TSideEffects ApplyOnExecute at tablet# 72057594046678944 2025-05-29T15:31:53.713827Z node 35 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:906: Part operation is done id#1003:0 progress is 1/1 2025-05-29T15:31:53.713829Z node 35 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 1003 ready parts: 1/1 2025-05-29T15:31:53.713833Z node 35 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:906: Part operation is done id#1003:0 progress is 1/1 2025-05-29T15:31:53.713835Z node 35 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 1003 ready parts: 1/1 2025-05-29T15:31:53.713837Z node 35 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1606: TOperation IsReadyToNotify, TxId: 1003, ready parts: 1/1, is published: false 2025-05-29T15:31:53.713841Z node 35 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 1003 ready parts: 1/1 2025-05-29T15:31:53.713844Z node 35 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:973: Operation and all the parts is done, operation id: 1003:0 2025-05-29T15:31:53.713846Z node 35 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5174: RemoveTx for txid 1003:0 2025-05-29T15:31:53.713862Z node 35 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 4 2025-05-29T15:31:53.713866Z node 35 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:982: Publication still in progress, tx: 1003, publications: 1, subscribers: 0 2025-05-29T15:31:53.713868Z node 35 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:989: Publication details: tx: 1003, [OwnerId: 72057594046678944, LocalPathId: 3], 3 2025-05-29T15:31:53.713937Z node 35 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4877: StateWork, received event# 274137603, Sender [35:208:2209], Recipient [35:126:2151]: NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 3] Version: 3 } 2025-05-29T15:31:53.713941Z node 35 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4961: StateWork, processing event NSchemeBoard::NSchemeshardEvents::TEvUpdateAck 2025-05-29T15:31:53.713951Z node 35 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:5834: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 3 PathOwnerId: 72057594046678944, cookie: 1003 2025-05-29T15:31:53.713958Z node 35 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 3 PathOwnerId: 72057594046678944, cookie: 1003 2025-05-29T15:31:53.713961Z node 35 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1003 2025-05-29T15:31:53.713964Z node 35 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 1003, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 3 2025-05-29T15:31:53.713967Z node 35 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 3 2025-05-29T15:31:53.713975Z node 35 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 1003, subscribers: 0 2025-05-29T15:31:53.713977Z node 35 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:165: TSideEffects ApplyOnExecute at tablet# 72057594046678944 2025-05-29T15:31:53.714337Z node 35 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:207: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2025-05-29T15:31:53.714410Z node 35 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1003 2025-05-29T15:31:53.714414Z node 35 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:207: TSideEffects ApplyOnComplete at tablet# 72057594046678944 TestModificationResult got TxId: 1003, wait until txId: 1003 TestWaitNotification wait txId: 1003 2025-05-29T15:31:53.714441Z node 35 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:210: tests -- TTxNotificationSubscriber for txId 1003: send EvNotifyTxCompletion 2025-05-29T15:31:53.714446Z node 35 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:256: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 1003 2025-05-29T15:31:53.714479Z node 35 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4877: StateWork, received event# 269877761, Sender [35:458:2412], Recipient [35:126:2151]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-05-29T15:31:53.714483Z node 35 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4974: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-05-29T15:31:53.714485Z node 35 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5753: Pipe server connected, at tablet: 72057594046678944 2025-05-29T15:31:53.714500Z node 35 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4877: StateWork, received event# 271124996, Sender [35:424:2378], Recipient [35:126:2151]: NKikimrScheme.TEvNotifyTxCompletion TxId: 1003 2025-05-29T15:31:53.714503Z node 35 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4890: StateWork, processing event TEvSchemeShard::TEvNotifyTxCompletion 2025-05-29T15:31:53.714511Z node 35 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 1003, at schemeshard: 72057594046678944 2025-05-29T15:31:53.714522Z node 35 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:227: tests -- TTxNotificationSubscriber for txId 1003: got EvNotifyTxCompletionResult 2025-05-29T15:31:53.714526Z node 35 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:236: tests -- TTxNotificationSubscriber for txId 1003: satisfy waiter [35:456:2410] 2025-05-29T15:31:53.714543Z node 35 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4877: StateWork, received event# 269877764, Sender [35:458:2412], Recipient [35:126:2151]: NKikimr::TEvTabletPipe::TEvServerDisconnected 2025-05-29T15:31:53.714550Z node 35 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4975: StateWork, processing event TEvTabletPipe::TEvServerDisconnected 2025-05-29T15:31:53.714554Z node 35 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5801: Server pipe is reset, at schemeshard: 72057594046678944 TestWaitNotification: OK eventTxId 1003 2025-05-29T15:31:53.714592Z node 35 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4877: StateWork, received event# 271122945, Sender [35:459:2413], Recipient [35:126:2151]: NKikimrSchemeOp.TDescribePath Path: "/MyRoot/Replication" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false } 2025-05-29T15:31:53.714595Z node 35 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4889: StateWork, processing event TEvSchemeShard::TEvDescribeScheme 2025-05-29T15:31:53.714602Z node 35 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Replication" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-05-29T15:31:53.714627Z node 35 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/Replication" took 23us result status StatusSuccess 2025-05-29T15:31:53.714677Z node 35 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Replication" PathDescription { Self { Name: "Replication" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeReplication CreateFinished: true CreateTxId: 1002 CreateStep: 5000003 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 ReplicationVersion: 2 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 2 PathsLimit: 10000 ShardsInside: 1 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } ReplicationDescription { Name: "Replication" Config { SrcConnectionParams { StaticCredentials { User: "user" } } Specific { Targets { SrcPath: "/MyRoot1/Table" DstPath: "/MyRoot2/Table" } } ConsistencySettings { Row { } } } PathId { OwnerId: 72057594046678944 LocalId: 3 } Version: 2 ControllerId: 72075186233409546 State { Done { FailoverMode: FAILOVER_MODE_FORCE } } } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> IntermediateDirsReboots::CreateTableWithIntermediateDirsAndRejectInTable [GOOD] >> TAsyncIndexTests::SplitIndexWithReboots[TabletReboots] [GOOD] >> KqpDataIntegrityTrails::BrokenReadLock-UseSink >> KqpDataIntegrityTrails::Upsert+LogEnabled-UseSink |75.8%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/data_integrity/unittest |75.8%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/data_integrity/unittest |75.8%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/data_integrity/unittest |75.9%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/data_integrity/unittest >> KqpDataIntegrityTrails::Upsert-LogEnabled-UseSink |75.9%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/data_integrity/unittest |75.9%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/data_integrity/unittest |75.9%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/data_integrity/unittest |75.9%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/data_integrity/unittest |75.9%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/data_integrity/unittest |75.9%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/data_integrity/unittest |75.9%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/data_integrity/unittest |75.9%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/data_integrity/unittest |75.9%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/data_integrity/unittest |75.9%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/data_integrity/unittest |75.9%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/data_integrity/unittest ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_reboots/unittest >> IntermediateDirsReboots::CreateTableWithIntermediateDirsAndRejectInTable [GOOD] Test command err: ==== RunWithTabletReboots =========== RUN: Trace =========== Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2140] Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:116:2058] recipient: [1:110:2141] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:116:2058] recipient: [1:110:2141] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:117:2058] recipient: [1:111:2142] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:117:2058] recipient: [1:111:2142] Leader for TabletID 72057594046678944 is [1:126:2151] sender: [1:127:2058] recipient: [1:109:2140] Leader for TabletID 72057594046447617 is [1:130:2154] sender: [1:132:2058] recipient: [1:110:2141] Leader for TabletID 72057594046316545 is [1:133:2155] sender: [1:135:2058] recipient: [1:111:2142] 2025-05-29T15:31:34.063167Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7488: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-05-29T15:31:34.063188Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7516: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:31:34.063194Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7402: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2025-05-29T15:31:34.063199Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7418: OperationsProcessing config: using default configuration 2025-05-29T15:31:34.063205Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-05-29T15:31:34.063209Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-05-29T15:31:34.063218Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7548: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:31:34.063229Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:39: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-05-29T15:31:34.063316Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7619: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-05-29T15:31:34.063385Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-05-29T15:31:34.072448Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7651: Got new config: QueryServiceConfig { AllExternalDataSourcesAreAvailable: true } 2025-05-29T15:31:34.072463Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:31:34.072526Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7619: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# Leader for TabletID 72057594046447617 is [1:130:2154] sender: [1:175:2058] recipient: [1:15:2062] 2025-05-29T15:31:34.074509Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-05-29T15:31:34.074531Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-05-29T15:31:34.074552Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-05-29T15:31:34.076728Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-05-29T15:31:34.076787Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:445: Clear TempDirsState with owners number: 0 2025-05-29T15:31:34.076863Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1348: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-05-29T15:31:34.077064Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:32: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-05-29T15:31:34.077670Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:157: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:31:34.077701Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:84: [RootDataErasureManager] Stop 2025-05-29T15:31:34.077883Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:31:34.077889Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:31:34.077912Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-05-29T15:31:34.077917Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-29T15:31:34.077921Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:202: TTxServerlessStorageBilling.Complete 2025-05-29T15:31:34.077934Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6671: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:214:2058] recipient: [1:212:2212] IGNORE Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:214:2058] recipient: [1:212:2212] Leader for TabletID 72057594037968897 is [1:218:2216] sender: [1:219:2058] recipient: [1:212:2212] 2025-05-29T15:31:34.078900Z node 1 :HIVE INFO: tablet_helpers.cpp:1130: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:126:2151] sender: [1:239:2058] recipient: [1:15:2062] 2025-05-29T15:31:34.091226Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:372: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-05-29T15:31:34.091283Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:31:34.091324Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-05-29T15:31:34.091354Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:130: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-05-29T15:31:34.091361Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:31:34.091911Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:451: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-05-29T15:31:34.091927Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-05-29T15:31:34.091963Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:31:34.091969Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:313: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-05-29T15:31:34.091972Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:367: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-05-29T15:31:34.091975Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 2 -> 3 2025-05-29T15:31:34.092302Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:31:34.092310Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-05-29T15:31:34.092313Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 3 -> 128 2025-05-29T15:31:34.092569Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:31:34.092577Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:31:34.092582Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:31:34.092588Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1650: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-05-29T15:31:34.093039Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1719: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-05-29T15:31:34.093361Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:652: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-05-29T15:31:34.093383Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1751: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 Leader for TabletID 72057594046316545 is [1:133:2155] sender: [1:254:2058] recipient: [1:15:2062] FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-05-29T15:31:34.093535Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:676: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-05-29T15:31:34.093551Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:680: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 133 RawX2: 4294969451 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-05-29T15:31:34.093564Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:31:34.093603Z node 1 :FLAT_TX_SCHEMESHARD INFO: sch ... pp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 10 Version: 5 PathOwnerId: 72057594046678944, cookie: 1003 2025-05-29T15:31:55.498229Z node 88 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72057594046678944, txId: 1003 2025-05-29T15:31:55.498232Z node 88 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 1003, pathId: [OwnerId: 72057594046678944, LocalPathId: 10], version: 5 2025-05-29T15:31:55.498234Z node 88 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 10] was 3 2025-05-29T15:31:55.498330Z node 88 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:5834: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 11 Version: 3 PathOwnerId: 72057594046678944, cookie: 1003 2025-05-29T15:31:55.498336Z node 88 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 11 Version: 3 PathOwnerId: 72057594046678944, cookie: 1003 2025-05-29T15:31:55.498339Z node 88 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72057594046678944, txId: 1003 2025-05-29T15:31:55.498343Z node 88 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 1003, pathId: [OwnerId: 72057594046678944, LocalPathId: 11], version: 3 2025-05-29T15:31:55.498345Z node 88 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 11] was 4 2025-05-29T15:31:55.498350Z node 88 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1606: TOperation IsReadyToNotify, TxId: 1003, ready parts: 4/5, is published: true 2025-05-29T15:31:55.499085Z node 88 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1003 2025-05-29T15:31:55.499102Z node 88 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1003 2025-05-29T15:31:55.499115Z node 88 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:647: TTxOperationReply complete, operationId: 1003:4, at schemeshard: 72057594046678944 2025-05-29T15:31:55.499125Z node 88 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1003 2025-05-29T15:31:55.499139Z node 88 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1003 2025-05-29T15:31:55.499147Z node 88 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:647: TTxOperationReply complete, operationId: 1003:4, at schemeshard: 72057594046678944 2025-05-29T15:31:55.499204Z node 88 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1003 2025-05-29T15:31:55.499239Z node 88 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1003:4, at schemeshard: 72057594046678944 2025-05-29T15:31:55.499244Z node 88 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:482: [72057594046678944] TDone opId# 1003:4 ProgressState 2025-05-29T15:31:55.499251Z node 88 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:906: Part operation is done id#1003:4 progress is 5/5 2025-05-29T15:31:55.499253Z node 88 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 1003 ready parts: 5/5 2025-05-29T15:31:55.499256Z node 88 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:906: Part operation is done id#1003:4 progress is 5/5 2025-05-29T15:31:55.499258Z node 88 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 1003 ready parts: 5/5 2025-05-29T15:31:55.499261Z node 88 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1606: TOperation IsReadyToNotify, TxId: 1003, ready parts: 5/5, is published: true 2025-05-29T15:31:55.499264Z node 88 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 1003 ready parts: 5/5 2025-05-29T15:31:55.499268Z node 88 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:973: Operation and all the parts is done, operation id: 1003:0 2025-05-29T15:31:55.499271Z node 88 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5174: RemoveTx for txid 1003:0 2025-05-29T15:31:55.499277Z node 88 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 7] was 2 2025-05-29T15:31:55.499280Z node 88 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:973: Operation and all the parts is done, operation id: 1003:1 2025-05-29T15:31:55.499285Z node 88 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5174: RemoveTx for txid 1003:1 2025-05-29T15:31:55.499288Z node 88 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 8] was 2 2025-05-29T15:31:55.499290Z node 88 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:973: Operation and all the parts is done, operation id: 1003:2 2025-05-29T15:31:55.499292Z node 88 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5174: RemoveTx for txid 1003:2 2025-05-29T15:31:55.499295Z node 88 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 9] was 2 2025-05-29T15:31:55.499297Z node 88 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:973: Operation and all the parts is done, operation id: 1003:3 2025-05-29T15:31:55.499299Z node 88 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5174: RemoveTx for txid 1003:3 2025-05-29T15:31:55.499302Z node 88 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 10] was 2 2025-05-29T15:31:55.499305Z node 88 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:973: Operation and all the parts is done, operation id: 1003:4 2025-05-29T15:31:55.499307Z node 88 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5174: RemoveTx for txid 1003:4 2025-05-29T15:31:55.499315Z node 88 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 11] was 3 2025-05-29T15:31:55.499486Z node 88 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1003 TestModificationResult got TxId: 1003, wait until txId: 1003 TestWaitNotification wait txId: 1003 2025-05-29T15:31:55.499760Z node 88 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:210: tests -- TTxNotificationSubscriber for txId 1003: send EvNotifyTxCompletion 2025-05-29T15:31:55.499767Z node 88 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:256: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 1003 2025-05-29T15:31:55.499804Z node 88 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 1003, at schemeshard: 72057594046678944 2025-05-29T15:31:55.499817Z node 88 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:227: tests -- TTxNotificationSubscriber for txId 1003: got EvNotifyTxCompletionResult 2025-05-29T15:31:55.499820Z node 88 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:236: tests -- TTxNotificationSubscriber for txId 1003: satisfy waiter [88:433:2405] TestWaitNotification: OK eventTxId 1003 2025-05-29T15:31:55.499865Z node 88 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Valid/x/y/z" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-05-29T15:31:55.499886Z node 88 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/Valid/x/y/z" took 28us result status StatusSuccess 2025-05-29T15:31:55.499946Z node 88 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Valid/x/y/z" PathDescription { Self { Name: "z" PathId: 10 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1003 CreateStep: 5000003 ParentPathId: 9 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 } ChildrenExist: true } Children { Name: "table_name" PathId: 11 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 1003 CreateStep: 5000003 ParentPathId: 10 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 6 PathsLimit: 10000 ShardsInside: 1 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 10 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-05-29T15:31:55.499980Z node 88 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Invalid" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-05-29T15:31:55.499993Z node 88 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/Invalid" took 14us result status StatusPathDoesNotExist 2025-05-29T15:31:55.500006Z node 88 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/Invalid\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1]), source_location: ydb/core/tx/schemeshard/schemeshard_path_describer.cpp:1157" Path: "/MyRoot/Invalid" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: true } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 >> IntermediateDirsReboots::CreateSolomonWithIntermediateDirs [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_index/unittest >> TAsyncIndexTests::SplitIndexWithReboots[TabletReboots] [GOOD] Test command err: =========== RUN: Trace =========== Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2140] Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:116:2058] recipient: [1:110:2141] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:116:2058] recipient: [1:110:2141] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:117:2058] recipient: [1:111:2142] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:117:2058] recipient: [1:111:2142] Leader for TabletID 72057594046678944 is [1:126:2151] sender: [1:127:2058] recipient: [1:109:2140] Leader for TabletID 72057594046447617 is [1:130:2154] sender: [1:132:2058] recipient: [1:110:2141] Leader for TabletID 72057594046316545 is [1:133:2155] sender: [1:135:2058] recipient: [1:111:2142] 2025-05-29T15:30:22.462295Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7488: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-05-29T15:30:22.462322Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7516: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:30:22.462328Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7402: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2025-05-29T15:30:22.462334Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7418: OperationsProcessing config: using default configuration 2025-05-29T15:30:22.462349Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-05-29T15:30:22.462353Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-05-29T15:30:22.462363Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7548: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:30:22.462376Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:39: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-05-29T15:30:22.462466Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7619: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-05-29T15:30:22.462532Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-05-29T15:30:22.476192Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7651: Got new config: QueryServiceConfig { AllExternalDataSourcesAreAvailable: true } 2025-05-29T15:30:22.476213Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:30:22.476284Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7619: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# Leader for TabletID 72057594046447617 is [1:130:2154] sender: [1:175:2058] recipient: [1:15:2062] 2025-05-29T15:30:22.478900Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-05-29T15:30:22.478938Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-05-29T15:30:22.478980Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-05-29T15:30:22.481724Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-05-29T15:30:22.481786Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:445: Clear TempDirsState with owners number: 0 2025-05-29T15:30:22.481886Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1348: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-05-29T15:30:22.482064Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:32: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-05-29T15:30:22.482675Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:157: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:30:22.482709Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:84: [RootDataErasureManager] Stop 2025-05-29T15:30:22.482980Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:30:22.482995Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:30:22.483034Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-05-29T15:30:22.483042Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-29T15:30:22.483049Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:202: TTxServerlessStorageBilling.Complete 2025-05-29T15:30:22.483068Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6671: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:214:2058] recipient: [1:212:2212] IGNORE Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:214:2058] recipient: [1:212:2212] Leader for TabletID 72057594037968897 is [1:218:2216] sender: [1:219:2058] recipient: [1:212:2212] 2025-05-29T15:30:22.484852Z node 1 :HIVE INFO: tablet_helpers.cpp:1130: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:126:2151] sender: [1:239:2058] recipient: [1:15:2062] 2025-05-29T15:30:22.507860Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:372: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-05-29T15:30:22.507945Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:30:22.508004Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-05-29T15:30:22.508054Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:130: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-05-29T15:30:22.508065Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:30:22.508825Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:451: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-05-29T15:30:22.508851Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-05-29T15:30:22.508893Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:30:22.508903Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:313: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-05-29T15:30:22.508908Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:367: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-05-29T15:30:22.508914Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 2 -> 3 2025-05-29T15:30:22.509374Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:30:22.509389Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-05-29T15:30:22.509396Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 3 -> 128 2025-05-29T15:30:22.509823Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:30:22.509833Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:30:22.509839Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:30:22.509846Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1650: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-05-29T15:30:22.510510Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1719: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-05-29T15:30:22.511470Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:652: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-05-29T15:30:22.511517Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1751: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 Leader for TabletID 72057594046316545 is [1:133:2155] sender: [1:254:2058] recipient: [1:15:2062] FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-05-29T15:30:22.511726Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:676: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-05-29T15:30:22.511759Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:680: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 133 RawX2: 4294969451 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-05-29T15:30:22.511766Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:30:22.511858Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Ch ... "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: true } Table { Name: "Table" Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "indexed" Type: "Uint32" TypeId: 2 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { MinPartitionsCount: 1 } } TableIndexes { Name: "UserDefinedIndex" LocalPathId: 4 Type: EIndexTypeGlobalAsync State: EIndexStateReady KeyColumnNames: "indexed" SchemaVersion: 1 PathOwnerId: 72057594046678944 DataSize: 0 IndexImplTableDescriptions { PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { SizeToSplit: 2147483648 MinPartitionsCount: 1 } } } } TableSchemaVersion: 1 IsBackup: false IsRestore: false } TablePartitions { EndOfRangeKeyPrefix: "" IsPoint: false IsInclusive: false DatashardId: 72075186233409547 } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 4 PathsLimit: 10000 ShardsInside: 3 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-05-29T15:31:55.569990Z node 94 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Table/UserDefinedIndex/indexImplTable" Options { ReturnPartitioningInfo: true ReturnPartitionConfig: true BackupInfo: false ReturnBoundaries: true ShowPrivateTable: true }, at schemeshard: 72057594046678944 2025-05-29T15:31:55.570036Z node 94 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/Table/UserDefinedIndex/indexImplTable" took 52us result status StatusSuccess 2025-05-29T15:31:55.570155Z node 94 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Table/UserDefinedIndex/indexImplTable" PathDescription { Self { Name: "indexImplTable" PathId: 5 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 1002 CreateStep: 5000003 ParentPathId: 4 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 4 PathSubType: EPathSubTypeAsyncIndexImplTable Version { GeneralVersion: 4 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 2 } ChildrenExist: false } Table { Name: "indexImplTable" Columns { Name: "indexed" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "indexed" KeyColumnNames: "key" KeyColumnIds: 1 KeyColumnIds: 2 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { SizeToSplit: 2147483648 MinPartitionsCount: 1 } } SplitBoundary { KeyPrefix { Tuple { Optional { Uint32: 50 } } Tuple { } } } TableSchemaVersion: 1 IsBackup: false IsRestore: false } TablePartitions { EndOfRangeKeyPrefix: "\002\000\004\000\000\0002\000\000\000\000\000\000\200" IsPoint: false IsInclusive: false DatashardId: 72075186233409548 } TablePartitions { EndOfRangeKeyPrefix: "" IsPoint: false IsInclusive: false DatashardId: 72075186233409549 } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 2 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 4 PathsLimit: 10000 ShardsInside: 3 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 5 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> KqpDataIntegrityTrails::UpsertEvWriteQueryService+isOlap-useOltpSink >> KqpDataIntegrityTrails::UpsertEvWriteQueryService-isOlap-useOltpSink >> KqpDataIntegrityTrails::BrokenReadLockAbortedTx |75.9%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/data_integrity/unittest |75.9%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/data_integrity/unittest |75.9%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/data_integrity/unittest |75.9%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/data_integrity/unittest |76.0%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/data_integrity/unittest |76.0%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/data_integrity/unittest |76.0%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/data_integrity/unittest |76.0%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/data_integrity/unittest |76.0%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/data_integrity/unittest >> KqpDataIntegrityTrails::UpsertEvWriteQueryService-isOlap+useOltpSink |76.0%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/data_integrity/unittest |76.0%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/data_integrity/unittest ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_reboots/unittest >> IntermediateDirsReboots::CreateSolomonWithIntermediateDirs [GOOD] Test command err: ==== RunWithTabletReboots =========== RUN: Trace =========== Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2140] Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:116:2058] recipient: [1:110:2141] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:116:2058] recipient: [1:110:2141] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:117:2058] recipient: [1:111:2142] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:117:2058] recipient: [1:111:2142] Leader for TabletID 72057594046678944 is [1:126:2151] sender: [1:127:2058] recipient: [1:109:2140] Leader for TabletID 72057594046447617 is [1:130:2154] sender: [1:132:2058] recipient: [1:110:2141] Leader for TabletID 72057594046316545 is [1:133:2155] sender: [1:135:2058] recipient: [1:111:2142] 2025-05-29T15:31:35.464250Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7488: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-05-29T15:31:35.464274Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7516: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:31:35.464280Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7402: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2025-05-29T15:31:35.464285Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7418: OperationsProcessing config: using default configuration 2025-05-29T15:31:35.464290Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-05-29T15:31:35.464295Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-05-29T15:31:35.464304Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7548: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:31:35.464315Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:39: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-05-29T15:31:35.464402Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7619: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-05-29T15:31:35.464471Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-05-29T15:31:35.478621Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7651: Got new config: QueryServiceConfig { AllExternalDataSourcesAreAvailable: true } 2025-05-29T15:31:35.478641Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:31:35.478761Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7619: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# Leader for TabletID 72057594046447617 is [1:130:2154] sender: [1:175:2058] recipient: [1:15:2062] 2025-05-29T15:31:35.481296Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-05-29T15:31:35.481322Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-05-29T15:31:35.481355Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-05-29T15:31:35.484678Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-05-29T15:31:35.484806Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:445: Clear TempDirsState with owners number: 0 2025-05-29T15:31:35.484918Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1348: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-05-29T15:31:35.485156Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:32: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-05-29T15:31:35.486037Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:157: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:31:35.486076Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:84: [RootDataErasureManager] Stop 2025-05-29T15:31:35.486368Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:31:35.486381Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:31:35.486412Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-05-29T15:31:35.486420Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-29T15:31:35.486426Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:202: TTxServerlessStorageBilling.Complete 2025-05-29T15:31:35.486444Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6671: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:214:2058] recipient: [1:212:2212] IGNORE Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:214:2058] recipient: [1:212:2212] Leader for TabletID 72057594037968897 is [1:218:2216] sender: [1:219:2058] recipient: [1:212:2212] 2025-05-29T15:31:35.487758Z node 1 :HIVE INFO: tablet_helpers.cpp:1130: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:126:2151] sender: [1:239:2058] recipient: [1:15:2062] 2025-05-29T15:31:35.502157Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:372: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-05-29T15:31:35.502211Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:31:35.502253Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-05-29T15:31:35.502288Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:130: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-05-29T15:31:35.502295Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:31:35.502811Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:451: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-05-29T15:31:35.502830Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-05-29T15:31:35.502869Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:31:35.502878Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:313: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-05-29T15:31:35.502883Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:367: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-05-29T15:31:35.502888Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 2 -> 3 2025-05-29T15:31:35.503301Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:31:35.503316Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-05-29T15:31:35.503322Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 3 -> 128 2025-05-29T15:31:35.503675Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:31:35.503684Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:31:35.503688Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:31:35.503692Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1650: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-05-29T15:31:35.504104Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1719: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-05-29T15:31:35.504437Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:652: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-05-29T15:31:35.504461Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1751: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 Leader for TabletID 72057594046316545 is [1:133:2155] sender: [1:254:2058] recipient: [1:15:2062] FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-05-29T15:31:35.504582Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:676: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-05-29T15:31:35.504598Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:680: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 133 RawX2: 4294969451 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-05-29T15:31:35.504611Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:31:35.504656Z node 1 :FLAT_TX_SCHEMESHARD INFO: sch ... .257063Z node 86 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:989: Publication details: tx: 1003, [OwnerId: 72057594046678944, LocalPathId: 6], 2 2025-05-29T15:31:56.257629Z node 86 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:5834: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 8 PathOwnerId: 72057594046678944, cookie: 1003 2025-05-29T15:31:56.257647Z node 86 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 8 PathOwnerId: 72057594046678944, cookie: 1003 2025-05-29T15:31:56.257651Z node 86 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 5, at schemeshard: 72057594046678944, txId: 1003 2025-05-29T15:31:56.257655Z node 86 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 1003, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 8 2025-05-29T15:31:56.257659Z node 86 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2025-05-29T15:31:56.257911Z node 86 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:5834: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 6 PathOwnerId: 72057594046678944, cookie: 1003 2025-05-29T15:31:56.257923Z node 86 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 6 PathOwnerId: 72057594046678944, cookie: 1003 2025-05-29T15:31:56.257927Z node 86 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 4, at schemeshard: 72057594046678944, txId: 1003 2025-05-29T15:31:56.257930Z node 86 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 1003, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 6 2025-05-29T15:31:56.257933Z node 86 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 2 2025-05-29T15:31:56.258453Z node 86 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:5834: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 6 PathOwnerId: 72057594046678944, cookie: 1003 2025-05-29T15:31:56.258469Z node 86 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 6 PathOwnerId: 72057594046678944, cookie: 1003 2025-05-29T15:31:56.258473Z node 86 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 3, at schemeshard: 72057594046678944, txId: 1003 2025-05-29T15:31:56.258476Z node 86 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 1003, pathId: [OwnerId: 72057594046678944, LocalPathId: 4], version: 6 2025-05-29T15:31:56.258479Z node 86 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 2 2025-05-29T15:31:56.258536Z node 86 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:5834: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 5 Version: 5 PathOwnerId: 72057594046678944, cookie: 1003 2025-05-29T15:31:56.258545Z node 86 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 5 Version: 5 PathOwnerId: 72057594046678944, cookie: 1003 2025-05-29T15:31:56.258550Z node 86 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 1003 2025-05-29T15:31:56.258557Z node 86 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 1003, pathId: [OwnerId: 72057594046678944, LocalPathId: 5], version: 5 2025-05-29T15:31:56.258561Z node 86 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 2 2025-05-29T15:31:56.258604Z node 86 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:5834: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 6 Version: 2 PathOwnerId: 72057594046678944, cookie: 1003 2025-05-29T15:31:56.258616Z node 86 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 6 Version: 2 PathOwnerId: 72057594046678944, cookie: 1003 2025-05-29T15:31:56.258620Z node 86 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1003 2025-05-29T15:31:56.258625Z node 86 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 1003, pathId: [OwnerId: 72057594046678944, LocalPathId: 6], version: 2 2025-05-29T15:31:56.258629Z node 86 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 6] was 4 2025-05-29T15:31:56.258635Z node 86 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 1003, subscribers: 0 2025-05-29T15:31:56.259473Z node 86 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1003 2025-05-29T15:31:56.259497Z node 86 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1003 2025-05-29T15:31:56.260500Z node 86 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1003 2025-05-29T15:31:56.260523Z node 86 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1003 2025-05-29T15:31:56.260543Z node 86 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1003 TestModificationResult got TxId: 1003, wait until txId: 1003 TestWaitNotification wait txId: 1003 2025-05-29T15:31:56.260602Z node 86 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:210: tests -- TTxNotificationSubscriber for txId 1003: send EvNotifyTxCompletion 2025-05-29T15:31:56.260609Z node 86 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:256: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 1003 2025-05-29T15:31:56.260667Z node 86 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 1003, at schemeshard: 72057594046678944 2025-05-29T15:31:56.260683Z node 86 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:227: tests -- TTxNotificationSubscriber for txId 1003: got EvNotifyTxCompletionResult 2025-05-29T15:31:56.260686Z node 86 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:236: tests -- TTxNotificationSubscriber for txId 1003: satisfy waiter [86:431:2402] TestWaitNotification: OK eventTxId 1003 2025-05-29T15:31:56.260746Z node 86 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Valid/x/y/z" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-05-29T15:31:56.260791Z node 86 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/Valid/x/y/z" took 55us result status StatusSuccess 2025-05-29T15:31:56.260857Z node 86 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Valid/x/y/z" PathDescription { Self { Name: "z" PathId: 6 SchemeshardId: 72057594046678944 PathType: EPathTypeSolomonVolume CreateFinished: true CreateTxId: 1003 CreateStep: 5000003 ParentPathId: 5 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 SolomonVersion: 1 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 5 PathsLimit: 10000 ShardsInside: 2 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } SolomonDescription { Name: "z" PathId: 6 PartitionCount: 2 Partitions { PartitionId: 0 TabletId: 72075186233409546 ShardIdx: 1 } Partitions { PartitionId: 1 TabletId: 72075186233409547 ShardIdx: 2 } BoundChannels { StoragePoolName: "pool-1" StoragePoolKind: "pool-kind-1" } BoundChannels { StoragePoolName: "pool-1" StoragePoolKind: "pool-kind-1" } BoundChannels { StoragePoolName: "pool-1" StoragePoolKind: "pool-kind-1" } BoundChannels { StoragePoolName: "pool-1" StoragePoolKind: "pool-kind-1" } } } PathId: 6 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-05-29T15:31:56.260897Z node 86 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Invalid" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-05-29T15:31:56.260911Z node 86 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/Invalid" took 15us result status StatusPathDoesNotExist 2025-05-29T15:31:56.260924Z node 86 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/Invalid\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1]), source_location: ydb/core/tx/schemeshard/schemeshard_path_describer.cpp:1157" Path: "/MyRoot/Invalid" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: true } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 |76.0%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/data_integrity/unittest |76.0%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/data_integrity/unittest >> KqpDataIntegrityTrails::Upsert+LogEnabled+UseSink >> KqpDataIntegrityTrails::UpsertEvWriteQueryService+isOlap+useOltpSink |76.0%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/data_integrity/unittest |76.0%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/data_integrity/unittest |76.0%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/data_integrity/unittest |76.0%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/data_integrity/unittest |76.0%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/data_integrity/unittest |76.0%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/data_integrity/unittest >> KqpDataIntegrityTrails::Upsert-LogEnabled+UseSink >> KqpDataIntegrityTrails::Ddl >> KqpDataIntegrityTrails::Select |76.0%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/data_integrity/unittest |76.0%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/data_integrity/unittest >> TReplicationWithRebootsTests::Create [GOOD] >> KqpDataIntegrityTrails::BrokenReadLock+UseSink ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_replication_reboots/unittest >> TReplicationWithRebootsTests::Create [GOOD] Test command err: ==== RunWithTabletReboots =========== RUN: Trace =========== Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2140] Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:116:2058] recipient: [1:110:2141] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:116:2058] recipient: [1:110:2141] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:117:2058] recipient: [1:111:2142] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:117:2058] recipient: [1:111:2142] Leader for TabletID 72057594046678944 is [1:126:2151] sender: [1:127:2058] recipient: [1:109:2140] Leader for TabletID 72057594046447617 is [1:130:2154] sender: [1:132:2058] recipient: [1:110:2141] Leader for TabletID 72057594046316545 is [1:133:2155] sender: [1:135:2058] recipient: [1:111:2142] 2025-05-29T15:31:45.319512Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7488: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-05-29T15:31:45.319536Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7516: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:31:45.319541Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7402: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2025-05-29T15:31:45.319545Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7418: OperationsProcessing config: using default configuration 2025-05-29T15:31:45.319553Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-05-29T15:31:45.319557Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-05-29T15:31:45.319564Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7548: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:31:45.319575Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:39: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-05-29T15:31:45.319669Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7619: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-05-29T15:31:45.319743Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-05-29T15:31:45.332260Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7651: Got new config: QueryServiceConfig { AllExternalDataSourcesAreAvailable: true } 2025-05-29T15:31:45.332281Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:31:45.332361Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7619: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# Leader for TabletID 72057594046447617 is [1:130:2154] sender: [1:175:2058] recipient: [1:15:2062] 2025-05-29T15:31:45.334546Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-05-29T15:31:45.334567Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-05-29T15:31:45.335019Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-05-29T15:31:45.338608Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-05-29T15:31:45.338671Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:445: Clear TempDirsState with owners number: 0 2025-05-29T15:31:45.343700Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1348: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-05-29T15:31:45.344641Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:32: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-05-29T15:31:45.346494Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:157: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:31:45.346538Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:84: [RootDataErasureManager] Stop 2025-05-29T15:31:45.348081Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:31:45.348092Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:31:45.348118Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-05-29T15:31:45.348124Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-29T15:31:45.348129Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:202: TTxServerlessStorageBilling.Complete 2025-05-29T15:31:45.348147Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6671: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:214:2058] recipient: [1:212:2212] IGNORE Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:214:2058] recipient: [1:212:2212] Leader for TabletID 72057594037968897 is [1:218:2216] sender: [1:219:2058] recipient: [1:212:2212] 2025-05-29T15:31:45.349342Z node 1 :HIVE INFO: tablet_helpers.cpp:1130: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:126:2151] sender: [1:239:2058] recipient: [1:15:2062] 2025-05-29T15:31:45.365391Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:372: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-05-29T15:31:45.366165Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:31:45.366223Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-05-29T15:31:45.366833Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:130: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-05-29T15:31:45.366851Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:31:45.367556Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:451: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-05-29T15:31:45.367578Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-05-29T15:31:45.367637Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:31:45.367645Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:313: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-05-29T15:31:45.367652Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:367: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-05-29T15:31:45.367657Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 2 -> 3 2025-05-29T15:31:45.368016Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:31:45.368025Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-05-29T15:31:45.368028Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 3 -> 128 2025-05-29T15:31:45.368301Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:31:45.368308Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:31:45.368312Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:31:45.368318Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1650: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-05-29T15:31:45.369213Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1719: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-05-29T15:31:45.369537Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:652: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-05-29T15:31:45.369575Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1751: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 Leader for TabletID 72057594046316545 is [1:133:2155] sender: [1:254:2058] recipient: [1:15:2062] FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-05-29T15:31:45.369752Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:676: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-05-29T15:31:45.369770Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:680: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 133 RawX2: 4294969451 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-05-29T15:31:45.369775Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:31:45.370492Z node 1 :FLAT_TX_SCHEMESHARD INFO: sch ... e 2025-05-29T15:31:58.755570Z node 56 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:165: TSideEffects ApplyOnExecute at tablet# 72057594046678944 2025-05-29T15:31:58.755575Z node 56 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:906: Part operation is done id#1002:0 progress is 1/1 2025-05-29T15:31:58.755577Z node 56 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 1002 ready parts: 1/1 2025-05-29T15:31:58.755582Z node 56 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:906: Part operation is done id#1002:0 progress is 1/1 2025-05-29T15:31:58.755584Z node 56 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 1002 ready parts: 1/1 2025-05-29T15:31:58.755587Z node 56 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1606: TOperation IsReadyToNotify, TxId: 1002, ready parts: 1/1, is published: false 2025-05-29T15:31:58.755590Z node 56 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 1002 ready parts: 1/1 2025-05-29T15:31:58.755593Z node 56 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:973: Operation and all the parts is done, operation id: 1002:0 2025-05-29T15:31:58.755596Z node 56 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5174: RemoveTx for txid 1002:0 2025-05-29T15:31:58.755613Z node 56 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 4 2025-05-29T15:31:58.755617Z node 56 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:982: Publication still in progress, tx: 1002, publications: 2, subscribers: 0 2025-05-29T15:31:58.755620Z node 56 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:989: Publication details: tx: 1002, [OwnerId: 72057594046678944, LocalPathId: 1], 7 2025-05-29T15:31:58.755622Z node 56 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:989: Publication details: tx: 1002, [OwnerId: 72057594046678944, LocalPathId: 3], 2 2025-05-29T15:31:58.755766Z node 56 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4877: StateWork, received event# 274137603, Sender [56:208:2209], Recipient [56:126:2151]: NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 1] Version: 7 } 2025-05-29T15:31:58.755773Z node 56 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4961: StateWork, processing event NSchemeBoard::NSchemeshardEvents::TEvUpdateAck 2025-05-29T15:31:58.755786Z node 56 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:5834: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 7 PathOwnerId: 72057594046678944, cookie: 1002 2025-05-29T15:31:58.755796Z node 56 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 7 PathOwnerId: 72057594046678944, cookie: 1002 2025-05-29T15:31:58.755802Z node 56 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 1002 2025-05-29T15:31:58.755806Z node 56 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 1002, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 7 2025-05-29T15:31:58.755810Z node 56 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2025-05-29T15:31:58.755823Z node 56 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:165: TSideEffects ApplyOnExecute at tablet# 72057594046678944 2025-05-29T15:31:58.755895Z node 56 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4877: StateWork, received event# 274137603, Sender [56:208:2209], Recipient [56:126:2151]: NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 3] Version: 2 } 2025-05-29T15:31:58.755900Z node 56 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4961: StateWork, processing event NSchemeBoard::NSchemeshardEvents::TEvUpdateAck 2025-05-29T15:31:58.755905Z node 56 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:5834: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 2 PathOwnerId: 72057594046678944, cookie: 1002 2025-05-29T15:31:58.755911Z node 56 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 2 PathOwnerId: 72057594046678944, cookie: 1002 2025-05-29T15:31:58.755914Z node 56 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1002 2025-05-29T15:31:58.755916Z node 56 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 1002, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 2 2025-05-29T15:31:58.755919Z node 56 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 3 2025-05-29T15:31:58.755924Z node 56 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 1002, subscribers: 0 2025-05-29T15:31:58.755927Z node 56 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:165: TSideEffects ApplyOnExecute at tablet# 72057594046678944 2025-05-29T15:31:58.756364Z node 56 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:207: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2025-05-29T15:31:58.756640Z node 56 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1002 2025-05-29T15:31:58.756649Z node 56 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:207: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2025-05-29T15:31:58.756663Z node 56 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1002 2025-05-29T15:31:58.756667Z node 56 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:207: TSideEffects ApplyOnComplete at tablet# 72057594046678944 TestModificationResult got TxId: 1002, wait until txId: 1002 TestWaitNotification wait txId: 1002 2025-05-29T15:31:58.756713Z node 56 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:210: tests -- TTxNotificationSubscriber for txId 1002: send EvNotifyTxCompletion 2025-05-29T15:31:58.756722Z node 56 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:256: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 1002 2025-05-29T15:31:58.756768Z node 56 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4877: StateWork, received event# 269877761, Sender [56:431:2385], Recipient [56:126:2151]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-05-29T15:31:58.756774Z node 56 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4974: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-05-29T15:31:58.756778Z node 56 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5753: Pipe server connected, at tablet: 72057594046678944 2025-05-29T15:31:58.756800Z node 56 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4877: StateWork, received event# 271124996, Sender [56:428:2382], Recipient [56:126:2151]: NKikimrScheme.TEvNotifyTxCompletion TxId: 1002 2025-05-29T15:31:58.756805Z node 56 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4890: StateWork, processing event TEvSchemeShard::TEvNotifyTxCompletion 2025-05-29T15:31:58.756817Z node 56 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 1002, at schemeshard: 72057594046678944 2025-05-29T15:31:58.756835Z node 56 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:227: tests -- TTxNotificationSubscriber for txId 1002: got EvNotifyTxCompletionResult 2025-05-29T15:31:58.756842Z node 56 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:236: tests -- TTxNotificationSubscriber for txId 1002: satisfy waiter [56:429:2383] 2025-05-29T15:31:58.756864Z node 56 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4877: StateWork, received event# 269877764, Sender [56:431:2385], Recipient [56:126:2151]: NKikimr::TEvTabletPipe::TEvServerDisconnected 2025-05-29T15:31:58.756869Z node 56 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4975: StateWork, processing event TEvTabletPipe::TEvServerDisconnected 2025-05-29T15:31:58.756873Z node 56 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5801: Server pipe is reset, at schemeshard: 72057594046678944 TestWaitNotification: OK eventTxId 1002 2025-05-29T15:31:58.756923Z node 56 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4877: StateWork, received event# 271122945, Sender [56:432:2386], Recipient [56:126:2151]: NKikimrSchemeOp.TDescribePath Path: "/MyRoot/Replication" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false } 2025-05-29T15:31:58.756928Z node 56 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4889: StateWork, processing event TEvSchemeShard::TEvDescribeScheme 2025-05-29T15:31:58.756939Z node 56 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Replication" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-05-29T15:31:58.756976Z node 56 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/Replication" took 33us result status StatusSuccess 2025-05-29T15:31:58.757052Z node 56 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Replication" PathDescription { Self { Name: "Replication" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeReplication CreateFinished: true CreateTxId: 1002 CreateStep: 5000003 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 ReplicationVersion: 1 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 2 PathsLimit: 10000 ShardsInside: 1 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } ReplicationDescription { Name: "Replication" Config { SrcConnectionParams { StaticCredentials { User: "user" } } Specific { Targets { SrcPath: "/MyRoot1/Table" DstPath: "/MyRoot2/Table" } } ConsistencySettings { Row { } } } PathId { OwnerId: 72057594046678944 LocalId: 3 } Version: 1 ControllerId: 72075186233409546 State { StandBy { } } } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> QueryStats::Ranges [GOOD] |76.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/sys_view/query_stats/ut/unittest |76.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/sys_view/query_stats/ut/unittest >> StatisticsSaveLoad::ForbidAccess [FAIL] |76.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/sys_view/query_stats/ut/unittest |76.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/sys_view/query_stats/ut/unittest |76.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/sys_view/query_stats/ut/unittest |76.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/sys_view/query_stats/ut/unittest |76.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/sys_view/query_stats/ut/unittest |76.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/sys_view/query_stats/ut/unittest >> QueryStats::Ranges [GOOD] |76.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/sys_view/query_stats/ut/unittest >> IntermediateDirsReboots::CreateTableWithIntermediateDirs [GOOD] >> TSolomonReboots::AdoptDropSolomonWithReboots [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_reboots/unittest >> IntermediateDirsReboots::CreateTableWithIntermediateDirs [GOOD] Test command err: ==== RunWithTabletReboots =========== RUN: Trace =========== Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2140] Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:116:2058] recipient: [1:110:2141] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:116:2058] recipient: [1:110:2141] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:117:2058] recipient: [1:111:2142] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:117:2058] recipient: [1:111:2142] Leader for TabletID 72057594046678944 is [1:126:2151] sender: [1:127:2058] recipient: [1:109:2140] Leader for TabletID 72057594046447617 is [1:130:2154] sender: [1:132:2058] recipient: [1:110:2141] Leader for TabletID 72057594046316545 is [1:133:2155] sender: [1:135:2058] recipient: [1:111:2142] 2025-05-29T15:31:40.184788Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7488: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-05-29T15:31:40.184806Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7516: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:31:40.184811Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7402: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2025-05-29T15:31:40.184816Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7418: OperationsProcessing config: using default configuration 2025-05-29T15:31:40.184821Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-05-29T15:31:40.184825Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-05-29T15:31:40.184834Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7548: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:31:40.184848Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:39: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-05-29T15:31:40.184925Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7619: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-05-29T15:31:40.184984Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-05-29T15:31:40.194144Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7651: Got new config: QueryServiceConfig { AllExternalDataSourcesAreAvailable: true } 2025-05-29T15:31:40.194163Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:31:40.194231Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7619: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# Leader for TabletID 72057594046447617 is [1:130:2154] sender: [1:175:2058] recipient: [1:15:2062] 2025-05-29T15:31:40.196096Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-05-29T15:31:40.196116Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-05-29T15:31:40.196139Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-05-29T15:31:40.198145Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-05-29T15:31:40.198232Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:445: Clear TempDirsState with owners number: 0 2025-05-29T15:31:40.198318Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1348: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-05-29T15:31:40.198477Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:32: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-05-29T15:31:40.199058Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:157: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:31:40.199089Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:84: [RootDataErasureManager] Stop 2025-05-29T15:31:40.199271Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:31:40.199277Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:31:40.199298Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-05-29T15:31:40.199302Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-29T15:31:40.199306Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:202: TTxServerlessStorageBilling.Complete 2025-05-29T15:31:40.199319Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6671: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:214:2058] recipient: [1:212:2212] IGNORE Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:214:2058] recipient: [1:212:2212] Leader for TabletID 72057594037968897 is [1:218:2216] sender: [1:219:2058] recipient: [1:212:2212] 2025-05-29T15:31:40.200451Z node 1 :HIVE INFO: tablet_helpers.cpp:1130: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:126:2151] sender: [1:239:2058] recipient: [1:15:2062] 2025-05-29T15:31:40.214038Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:372: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-05-29T15:31:40.214100Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:31:40.214141Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-05-29T15:31:40.214175Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:130: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-05-29T15:31:40.214183Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:31:40.214711Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:451: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-05-29T15:31:40.214732Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-05-29T15:31:40.214794Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:31:40.214803Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:313: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-05-29T15:31:40.214807Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:367: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-05-29T15:31:40.214810Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 2 -> 3 2025-05-29T15:31:40.215171Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:31:40.215181Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-05-29T15:31:40.215185Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 3 -> 128 2025-05-29T15:31:40.215486Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:31:40.215500Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:31:40.215505Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:31:40.215511Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1650: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-05-29T15:31:40.216081Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1719: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-05-29T15:31:40.216433Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:652: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-05-29T15:31:40.216456Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1751: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 Leader for TabletID 72057594046316545 is [1:133:2155] sender: [1:254:2058] recipient: [1:15:2062] FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-05-29T15:31:40.216578Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:676: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-05-29T15:31:40.216595Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:680: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 133 RawX2: 4294969451 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-05-29T15:31:40.216609Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:31:40.216652Z node 1 :FLAT_TX_SCHEMESHARD INFO: sch ... 5 Version: 5 PathOwnerId: 72057594046678944, cookie: 1003 2025-05-29T15:32:00.560122Z node 84 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 5 Version: 5 PathOwnerId: 72057594046678944, cookie: 1003 2025-05-29T15:32:00.560124Z node 84 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72057594046678944, txId: 1003 2025-05-29T15:32:00.560126Z node 84 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 1003, pathId: [OwnerId: 72057594046678944, LocalPathId: 5], version: 5 2025-05-29T15:32:00.560128Z node 84 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 3 2025-05-29T15:32:00.560149Z node 84 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:5834: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 6 Version: 3 PathOwnerId: 72057594046678944, cookie: 1003 2025-05-29T15:32:00.560155Z node 84 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 6 Version: 3 PathOwnerId: 72057594046678944, cookie: 1003 2025-05-29T15:32:00.560159Z node 84 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72057594046678944, txId: 1003 2025-05-29T15:32:00.560161Z node 84 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 1003, pathId: [OwnerId: 72057594046678944, LocalPathId: 6], version: 3 2025-05-29T15:32:00.560164Z node 84 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 6] was 4 2025-05-29T15:32:00.560168Z node 84 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1606: TOperation IsReadyToNotify, TxId: 1003, ready parts: 3/4, is published: true 2025-05-29T15:32:00.560948Z node 84 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1003 2025-05-29T15:32:00.560963Z node 84 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:647: TTxOperationReply complete, operationId: 1003:3, at schemeshard: 72057594046678944 2025-05-29T15:32:00.560972Z node 84 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1003 2025-05-29T15:32:00.560980Z node 84 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:647: TTxOperationReply complete, operationId: 1003:3, at schemeshard: 72057594046678944 2025-05-29T15:32:00.561013Z node 84 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1003 2025-05-29T15:32:00.561023Z node 84 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1003 2025-05-29T15:32:00.561033Z node 84 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1003 2025-05-29T15:32:00.561039Z node 84 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1003:3, at schemeshard: 72057594046678944 2025-05-29T15:32:00.561043Z node 84 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:482: [72057594046678944] TDone opId# 1003:3 ProgressState 2025-05-29T15:32:00.561049Z node 84 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:906: Part operation is done id#1003:3 progress is 4/4 2025-05-29T15:32:00.561052Z node 84 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 1003 ready parts: 4/4 2025-05-29T15:32:00.561055Z node 84 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:906: Part operation is done id#1003:3 progress is 4/4 2025-05-29T15:32:00.561057Z node 84 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 1003 ready parts: 4/4 2025-05-29T15:32:00.561060Z node 84 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1606: TOperation IsReadyToNotify, TxId: 1003, ready parts: 4/4, is published: true 2025-05-29T15:32:00.561063Z node 84 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 1003 ready parts: 4/4 2025-05-29T15:32:00.561066Z node 84 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:973: Operation and all the parts is done, operation id: 1003:0 2025-05-29T15:32:00.561069Z node 84 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5174: RemoveTx for txid 1003:0 2025-05-29T15:32:00.561074Z node 84 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 2 2025-05-29T15:32:00.561080Z node 84 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:973: Operation and all the parts is done, operation id: 1003:1 2025-05-29T15:32:00.561082Z node 84 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5174: RemoveTx for txid 1003:1 2025-05-29T15:32:00.561085Z node 84 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 2 2025-05-29T15:32:00.561088Z node 84 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:973: Operation and all the parts is done, operation id: 1003:2 2025-05-29T15:32:00.561090Z node 84 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5174: RemoveTx for txid 1003:2 2025-05-29T15:32:00.561092Z node 84 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 2 2025-05-29T15:32:00.561095Z node 84 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:973: Operation and all the parts is done, operation id: 1003:3 2025-05-29T15:32:00.561097Z node 84 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5174: RemoveTx for txid 1003:3 2025-05-29T15:32:00.561104Z node 84 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 6] was 3 TestModificationResult got TxId: 1003, wait until txId: 1003 TestWaitNotification wait txId: 1003 2025-05-29T15:32:00.561464Z node 84 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:210: tests -- TTxNotificationSubscriber for txId 1003: send EvNotifyTxCompletion 2025-05-29T15:32:00.561470Z node 84 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:256: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 1003 2025-05-29T15:32:00.561502Z node 84 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 1003, at schemeshard: 72057594046678944 2025-05-29T15:32:00.561513Z node 84 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:227: tests -- TTxNotificationSubscriber for txId 1003: got EvNotifyTxCompletionResult 2025-05-29T15:32:00.561517Z node 84 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:236: tests -- TTxNotificationSubscriber for txId 1003: satisfy waiter [84:421:2393] TestWaitNotification: OK eventTxId 1003 2025-05-29T15:32:00.561558Z node 84 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Valid/x/y/z" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-05-29T15:32:00.561580Z node 84 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/Valid/x/y/z" took 28us result status StatusSuccess 2025-05-29T15:32:00.561646Z node 84 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Valid/x/y/z" PathDescription { Self { Name: "z" PathId: 6 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 1003 CreateStep: 5000003 ParentPathId: 5 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "z" Columns { Name: "RowId" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } KeyColumnNames: "RowId" KeyColumnIds: 1 TableSchemaVersion: 1 IsBackup: false IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 5 PathsLimit: 10000 ShardsInside: 1 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 6 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-05-29T15:32:00.561688Z node 84 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Invalid" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-05-29T15:32:00.561701Z node 84 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/Invalid" took 14us result status StatusPathDoesNotExist 2025-05-29T15:32:00.561713Z node 84 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/Invalid\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1]), source_location: ydb/core/tx/schemeshard/schemeshard_path_describer.cpp:1157" Path: "/MyRoot/Invalid" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: true } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_reboots/unittest >> TSolomonReboots::AdoptDropSolomonWithReboots [GOOD] Test command err: ==== RunWithTabletReboots =========== RUN: Trace =========== Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2140] Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:116:2058] recipient: [1:110:2141] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:116:2058] recipient: [1:110:2141] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:117:2058] recipient: [1:111:2142] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:117:2058] recipient: [1:111:2142] Leader for TabletID 72057594046678944 is [1:126:2151] sender: [1:127:2058] recipient: [1:109:2140] Leader for TabletID 72057594046447617 is [1:130:2154] sender: [1:132:2058] recipient: [1:110:2141] Leader for TabletID 72057594046316545 is [1:133:2155] sender: [1:135:2058] recipient: [1:111:2142] 2025-05-29T15:31:42.818882Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7488: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-05-29T15:31:42.818905Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7516: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:31:42.818910Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7402: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2025-05-29T15:31:42.818915Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7418: OperationsProcessing config: using default configuration 2025-05-29T15:31:42.818920Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-05-29T15:31:42.818925Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-05-29T15:31:42.818933Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7548: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:31:42.818946Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:39: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-05-29T15:31:42.819048Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7619: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-05-29T15:31:42.819127Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-05-29T15:31:42.833003Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7651: Got new config: QueryServiceConfig { AllExternalDataSourcesAreAvailable: true } 2025-05-29T15:31:42.833024Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:31:42.833113Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7619: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# Leader for TabletID 72057594046447617 is [1:130:2154] sender: [1:175:2058] recipient: [1:15:2062] 2025-05-29T15:31:42.835631Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-05-29T15:31:42.835660Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-05-29T15:31:42.835690Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-05-29T15:31:42.838838Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-05-29T15:31:42.838934Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:445: Clear TempDirsState with owners number: 0 2025-05-29T15:31:42.839052Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1348: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-05-29T15:31:42.839321Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:32: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-05-29T15:31:42.840072Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:157: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:31:42.840116Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:84: [RootDataErasureManager] Stop 2025-05-29T15:31:42.840373Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:31:42.840383Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:31:42.840416Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-05-29T15:31:42.840424Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-29T15:31:42.840430Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:202: TTxServerlessStorageBilling.Complete 2025-05-29T15:31:42.840448Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6671: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:214:2058] recipient: [1:212:2212] IGNORE Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:214:2058] recipient: [1:212:2212] Leader for TabletID 72057594037968897 is [1:218:2216] sender: [1:219:2058] recipient: [1:212:2212] 2025-05-29T15:31:42.841687Z node 1 :HIVE INFO: tablet_helpers.cpp:1130: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:126:2151] sender: [1:239:2058] recipient: [1:15:2062] 2025-05-29T15:31:42.861597Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:372: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-05-29T15:31:42.861684Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:31:42.861748Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-05-29T15:31:42.861795Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:130: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-05-29T15:31:42.861807Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:31:42.862573Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:451: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-05-29T15:31:42.862601Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-05-29T15:31:42.862664Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:31:42.862676Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:313: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-05-29T15:31:42.862682Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:367: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-05-29T15:31:42.862688Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 2 -> 3 2025-05-29T15:31:42.863221Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:31:42.863239Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-05-29T15:31:42.863246Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 3 -> 128 2025-05-29T15:31:42.864022Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:31:42.864034Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:31:42.864040Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:31:42.864048Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1650: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-05-29T15:31:42.864711Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1719: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-05-29T15:31:42.865176Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:652: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-05-29T15:31:42.865207Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1751: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 Leader for TabletID 72057594046316545 is [1:133:2155] sender: [1:254:2058] recipient: [1:15:2062] FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-05-29T15:31:42.865380Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:676: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-05-29T15:31:42.865405Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:680: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 133 RawX2: 4294969451 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-05-29T15:31:42.865423Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:31:42.865491Z node 1 :FLAT_TX_SCHEMESHARD INFO: sch ... xPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:32:00.704401Z node 74 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [74:206:2207], at schemeshard: 72057594046678944, txId: 1004, path id: 1 2025-05-29T15:32:00.704404Z node 74 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [74:206:2207], at schemeshard: 72057594046678944, txId: 1004, path id: 4 2025-05-29T15:32:00.704435Z node 74 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1004:0, at schemeshard: 72057594046678944 2025-05-29T15:32:00.704440Z node 74 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:416: [72057594046678944] TDeleteParts opId# 1004:0 ProgressState 2025-05-29T15:32:00.704444Z node 74 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:906: Part operation is done id#1004:0 progress is 1/1 2025-05-29T15:32:00.704447Z node 74 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 1004 ready parts: 1/1 2025-05-29T15:32:00.704450Z node 74 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:906: Part operation is done id#1004:0 progress is 1/1 2025-05-29T15:32:00.704452Z node 74 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 1004 ready parts: 1/1 2025-05-29T15:32:00.704455Z node 74 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1606: TOperation IsReadyToNotify, TxId: 1004, ready parts: 1/1, is published: false 2025-05-29T15:32:00.704458Z node 74 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 1004 ready parts: 1/1 2025-05-29T15:32:00.704460Z node 74 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:973: Operation and all the parts is done, operation id: 1004:0 2025-05-29T15:32:00.704463Z node 74 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5174: RemoveTx for txid 1004:0 2025-05-29T15:32:00.704479Z node 74 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 4 2025-05-29T15:32:00.704483Z node 74 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:982: Publication still in progress, tx: 1004, publications: 2, subscribers: 0 2025-05-29T15:32:00.704485Z node 74 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:989: Publication details: tx: 1004, [OwnerId: 72057594046678944, LocalPathId: 1], 11 2025-05-29T15:32:00.704488Z node 74 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:989: Publication details: tx: 1004, [OwnerId: 72057594046678944, LocalPathId: 4], 18446744073709551615 2025-05-29T15:32:00.704534Z node 74 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:5834: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 1004 2025-05-29T15:32:00.704541Z node 74 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 1004 2025-05-29T15:32:00.704544Z node 74 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 1004 2025-05-29T15:32:00.704547Z node 74 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 1004, pathId: [OwnerId: 72057594046678944, LocalPathId: 4], version: 18446744073709551615 2025-05-29T15:32:00.704549Z node 74 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 3 2025-05-29T15:32:00.704580Z node 74 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:5834: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 11 PathOwnerId: 72057594046678944, cookie: 1004 2025-05-29T15:32:00.704586Z node 74 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 11 PathOwnerId: 72057594046678944, cookie: 1004 2025-05-29T15:32:00.704588Z node 74 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1004 2025-05-29T15:32:00.704591Z node 74 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 1004, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 11 2025-05-29T15:32:00.704593Z node 74 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 4 2025-05-29T15:32:00.704598Z node 74 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 1004, subscribers: 0 2025-05-29T15:32:00.704853Z node 74 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:46: Free shard 72057594046678944:3 hive 72057594037968897 at ss 72057594046678944 2025-05-29T15:32:00.704860Z node 74 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:46: Free shard 72057594046678944:4 hive 72057594037968897 at ss 72057594046678944 2025-05-29T15:32:00.705037Z node 74 :HIVE INFO: tablet_helpers.cpp:1356: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 3 TxId_Deprecated: 3 TabletID: 72075186233409546 2025-05-29T15:32:00.705097Z node 74 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5938: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 3 ShardOwnerId: 72057594046678944 ShardLocalIdx: 3, at schemeshard: 72057594046678944 2025-05-29T15:32:00.705123Z node 74 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 2 2025-05-29T15:32:00.705153Z node 74 :HIVE INFO: tablet_helpers.cpp:1356: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 4 TxId_Deprecated: 4 TabletID: 72075186233409547 2025-05-29T15:32:00.705355Z node 74 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5938: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 4 ShardOwnerId: 72057594046678944 ShardLocalIdx: 4, at schemeshard: 72057594046678944 2025-05-29T15:32:00.705369Z node 74 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 1 Forgetting tablet 72075186233409546 2025-05-29T15:32:00.705504Z node 74 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:123: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-05-29T15:32:00.705508Z node 74 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:137: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 4], at schemeshard: 72057594046678944 2025-05-29T15:32:00.705514Z node 74 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 Forgetting tablet 72075186233409547 2025-05-29T15:32:00.705542Z node 74 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1004 2025-05-29T15:32:00.705558Z node 74 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1004 2025-05-29T15:32:00.706021Z node 74 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:174: Deleted shardIdx 72057594046678944:3 2025-05-29T15:32:00.706029Z node 74 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:180: Close pipe to deleted shardIdx 72057594046678944:3 tabletId 72075186233409546 2025-05-29T15:32:00.706048Z node 74 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:174: Deleted shardIdx 72057594046678944:4 2025-05-29T15:32:00.706052Z node 74 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:180: Close pipe to deleted shardIdx 72057594046678944:4 tabletId 72075186233409547 2025-05-29T15:32:00.706104Z node 74 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:160: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestModificationResult got TxId: 1004, wait until txId: 1004 TestWaitNotification wait txId: 1004 2025-05-29T15:32:00.706136Z node 74 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:210: tests -- TTxNotificationSubscriber for txId 1004: send EvNotifyTxCompletion 2025-05-29T15:32:00.706142Z node 74 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:256: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 1004 2025-05-29T15:32:00.706182Z node 74 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 1004, at schemeshard: 72057594046678944 2025-05-29T15:32:00.706194Z node 74 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:227: tests -- TTxNotificationSubscriber for txId 1004: got EvNotifyTxCompletionResult 2025-05-29T15:32:00.706197Z node 74 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:236: tests -- TTxNotificationSubscriber for txId 1004: satisfy waiter [74:487:2458] TestWaitNotification: OK eventTxId 1004 wait until 72075186233409546 is deleted wait until 72075186233409547 is deleted 2025-05-29T15:32:00.706236Z node 74 :HIVE INFO: tablet_helpers.cpp:1476: [72057594037968897] TEvSubscribeToTabletDeletion, 72075186233409546 2025-05-29T15:32:00.706245Z node 74 :HIVE INFO: tablet_helpers.cpp:1476: [72057594037968897] TEvSubscribeToTabletDeletion, 72075186233409547 Deleted tabletId 72075186233409546 Deleted tabletId 72075186233409547 2025-05-29T15:32:00.706286Z node 74 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "MyRoot/Solomon" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-05-29T15:32:00.706303Z node 74 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "MyRoot/Solomon" took 22us result status StatusPathDoesNotExist 2025-05-29T15:32:00.706323Z node 74 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/Solomon\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1]), source_location: ydb/core/tx/schemeshard/schemeshard_path_describer.cpp:1157" Path: "MyRoot/Solomon" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: true } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 |76.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/sys_view/query_stats/ut/unittest |76.1%| [TA] $(B)/ydb/core/sys_view/query_stats/ut/test-results/unittest/{meta.json ... results_accumulator.log} |76.1%| [TA] {RESULT} $(B)/ydb/core/sys_view/query_stats/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> IntermediateDirsReboots::CreateSubDomainWithIntermediateDirs [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_reboots/unittest >> IntermediateDirsReboots::CreateSubDomainWithIntermediateDirs [GOOD] Test command err: ==== RunWithTabletReboots =========== RUN: Trace =========== Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2140] Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:116:2058] recipient: [1:110:2141] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:116:2058] recipient: [1:110:2141] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:117:2058] recipient: [1:111:2142] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:117:2058] recipient: [1:111:2142] Leader for TabletID 72057594046678944 is [1:126:2151] sender: [1:127:2058] recipient: [1:109:2140] Leader for TabletID 72057594046447617 is [1:130:2154] sender: [1:132:2058] recipient: [1:110:2141] Leader for TabletID 72057594046316545 is [1:133:2155] sender: [1:135:2058] recipient: [1:111:2142] 2025-05-29T15:31:42.262438Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7488: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-05-29T15:31:42.262455Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7516: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:31:42.262459Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7402: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2025-05-29T15:31:42.262463Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7418: OperationsProcessing config: using default configuration 2025-05-29T15:31:42.262467Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-05-29T15:31:42.262469Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-05-29T15:31:42.262475Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7548: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:31:42.262485Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:39: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-05-29T15:31:42.262567Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7619: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-05-29T15:31:42.262631Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-05-29T15:31:42.272138Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7651: Got new config: QueryServiceConfig { AllExternalDataSourcesAreAvailable: true } 2025-05-29T15:31:42.272155Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:31:42.272230Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7619: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# Leader for TabletID 72057594046447617 is [1:130:2154] sender: [1:175:2058] recipient: [1:15:2062] 2025-05-29T15:31:42.274345Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-05-29T15:31:42.274392Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-05-29T15:31:42.274418Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-05-29T15:31:42.276586Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-05-29T15:31:42.276657Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:445: Clear TempDirsState with owners number: 0 2025-05-29T15:31:42.276745Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1348: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-05-29T15:31:42.276970Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:32: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-05-29T15:31:42.277617Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:157: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:31:42.277658Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:84: [RootDataErasureManager] Stop 2025-05-29T15:31:42.277891Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:31:42.277898Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:31:42.277918Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-05-29T15:31:42.277925Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-29T15:31:42.277930Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:202: TTxServerlessStorageBilling.Complete 2025-05-29T15:31:42.277944Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6671: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:214:2058] recipient: [1:212:2212] IGNORE Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:214:2058] recipient: [1:212:2212] Leader for TabletID 72057594037968897 is [1:218:2216] sender: [1:219:2058] recipient: [1:212:2212] 2025-05-29T15:31:42.279114Z node 1 :HIVE INFO: tablet_helpers.cpp:1130: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:126:2151] sender: [1:239:2058] recipient: [1:15:2062] 2025-05-29T15:31:42.294653Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:372: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-05-29T15:31:42.294705Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:31:42.294760Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-05-29T15:31:42.294794Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:130: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-05-29T15:31:42.294801Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:31:42.295275Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:451: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-05-29T15:31:42.295288Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-05-29T15:31:42.295319Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:31:42.295325Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:313: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-05-29T15:31:42.295328Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:367: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-05-29T15:31:42.295331Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 2 -> 3 2025-05-29T15:31:42.295604Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:31:42.295610Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-05-29T15:31:42.295613Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 3 -> 128 2025-05-29T15:31:42.295805Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:31:42.295809Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:31:42.295813Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:31:42.295817Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1650: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-05-29T15:31:42.296203Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1719: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-05-29T15:31:42.296469Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:652: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-05-29T15:31:42.296488Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1751: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 Leader for TabletID 72057594046316545 is [1:133:2155] sender: [1:254:2058] recipient: [1:15:2062] FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-05-29T15:31:42.296603Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:676: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-05-29T15:31:42.296619Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:680: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 133 RawX2: 4294969451 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-05-29T15:31:42.296633Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:31:42.296677Z node 1 :FLAT_TX_SCHEMESHARD INFO: sch ... EMESHARD DEBUG: schemeshard__operation_side_effects.cpp:989: Publication details: tx: 1003, [OwnerId: 72057594046678944, LocalPathId: 4], 6 2025-05-29T15:32:02.218165Z node 83 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:989: Publication details: tx: 1003, [OwnerId: 72057594046678944, LocalPathId: 5], 5 2025-05-29T15:32:02.218167Z node 83 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:989: Publication details: tx: 1003, [OwnerId: 72057594046678944, LocalPathId: 6], 3 2025-05-29T15:32:02.218440Z node 83 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:5834: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 8 PathOwnerId: 72057594046678944, cookie: 1003 2025-05-29T15:32:02.218452Z node 83 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 8 PathOwnerId: 72057594046678944, cookie: 1003 2025-05-29T15:32:02.218455Z node 83 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 5, at schemeshard: 72057594046678944, txId: 1003 2025-05-29T15:32:02.218459Z node 83 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 1003, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 8 2025-05-29T15:32:02.218462Z node 83 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2025-05-29T15:32:02.218537Z node 83 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:5834: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 6 PathOwnerId: 72057594046678944, cookie: 1003 2025-05-29T15:32:02.218544Z node 83 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 6 PathOwnerId: 72057594046678944, cookie: 1003 2025-05-29T15:32:02.218546Z node 83 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 4, at schemeshard: 72057594046678944, txId: 1003 2025-05-29T15:32:02.218549Z node 83 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 1003, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 6 2025-05-29T15:32:02.218551Z node 83 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 2 2025-05-29T15:32:02.218652Z node 83 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:5834: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 6 PathOwnerId: 72057594046678944, cookie: 1003 2025-05-29T15:32:02.218660Z node 83 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 6 PathOwnerId: 72057594046678944, cookie: 1003 2025-05-29T15:32:02.218666Z node 83 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 3, at schemeshard: 72057594046678944, txId: 1003 2025-05-29T15:32:02.218668Z node 83 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 1003, pathId: [OwnerId: 72057594046678944, LocalPathId: 4], version: 6 2025-05-29T15:32:02.218671Z node 83 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 2 2025-05-29T15:32:02.218837Z node 83 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:5834: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 5 Version: 5 PathOwnerId: 72057594046678944, cookie: 1003 2025-05-29T15:32:02.218845Z node 83 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 5 Version: 5 PathOwnerId: 72057594046678944, cookie: 1003 2025-05-29T15:32:02.218848Z node 83 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 1003 2025-05-29T15:32:02.218850Z node 83 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 1003, pathId: [OwnerId: 72057594046678944, LocalPathId: 5], version: 5 2025-05-29T15:32:02.218853Z node 83 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 2 2025-05-29T15:32:02.218884Z node 83 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:5834: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 6 Version: 3 PathOwnerId: 72057594046678944, cookie: 1003 2025-05-29T15:32:02.218890Z node 83 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 6 Version: 3 PathOwnerId: 72057594046678944, cookie: 1003 2025-05-29T15:32:02.218893Z node 83 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1003 2025-05-29T15:32:02.218896Z node 83 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 1003, pathId: [OwnerId: 72057594046678944, LocalPathId: 6], version: 3 2025-05-29T15:32:02.218898Z node 83 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 6] was 4 2025-05-29T15:32:02.218903Z node 83 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 1003, subscribers: 0 2025-05-29T15:32:02.219223Z node 83 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1003 2025-05-29T15:32:02.219234Z node 83 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1003 2025-05-29T15:32:02.219241Z node 83 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1003 2025-05-29T15:32:02.219436Z node 83 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1003 2025-05-29T15:32:02.219447Z node 83 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1003 TestModificationResult got TxId: 1003, wait until txId: 1003 TestWaitNotification wait txId: 1003 2025-05-29T15:32:02.219484Z node 83 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:210: tests -- TTxNotificationSubscriber for txId 1003: send EvNotifyTxCompletion 2025-05-29T15:32:02.219490Z node 83 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:256: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 1003 2025-05-29T15:32:02.219531Z node 83 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 1003, at schemeshard: 72057594046678944 2025-05-29T15:32:02.219543Z node 83 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:227: tests -- TTxNotificationSubscriber for txId 1003: got EvNotifyTxCompletionResult 2025-05-29T15:32:02.219546Z node 83 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:236: tests -- TTxNotificationSubscriber for txId 1003: satisfy waiter [83:455:2422] TestWaitNotification: OK eventTxId 1003 2025-05-29T15:32:02.219595Z node 83 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Valid/x/y/z" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-05-29T15:32:02.219619Z node 83 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/Valid/x/y/z" took 32us result status StatusSuccess 2025-05-29T15:32:02.219681Z node 83 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Valid/x/y/z" PathDescription { Self { Name: "z" PathId: 6 SchemeshardId: 72057594046678944 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 1003 CreateStep: 5000003 ParentPathId: 5 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 1 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72075186233409546 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409547 } DomainKey { SchemeShard: 72057594046678944 PathId: 6 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 2 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 6 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { } } } PathId: 6 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-05-29T15:32:02.219714Z node 83 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Invalid" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-05-29T15:32:02.219727Z node 83 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/Invalid" took 14us result status StatusPathDoesNotExist 2025-05-29T15:32:02.219740Z node 83 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/Invalid\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1]), source_location: ydb/core/tx/schemeshard/schemeshard_path_describer.cpp:1157" Path: "/MyRoot/Invalid" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: true } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 >> TCdcStreamWithRebootsTests::CreateStreamOnIndexTable[TabletReboots] >> TCdcStreamWithRebootsTests::CreateStreamExplicitReady[PipeResets] >> TCdcStreamWithRebootsTests::WithPqTransactions[TabletReboots] >> TCdcStreamWithRebootsTests::DropStreamOnIndexTableExplicitReady[PipeResets] >> TCdcStreamWithRebootsTests::CreateStream[TabletReboots] >> TCdcStreamWithRebootsTests::CreateStreamWithVirtualTimestamps[PipeResets] |76.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_cdc_stream_reboots/unittest >> IncrementalRestoreScan::ChangeSenderSimple >> IncrementalRestoreScan::Empty >> IncrementalRestoreScan::ChangeSenderEmpty >> IntermediateDirsReboots::CreateTableWithIntermediateDirsAndRejectInSolomon [GOOD] |76.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/datashard/ut_incremental_restore_scan/unittest |76.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_cdc_stream_reboots/unittest ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_reboots/unittest >> IntermediateDirsReboots::CreateTableWithIntermediateDirsAndRejectInSolomon [GOOD] Test command err: ==== RunWithTabletReboots =========== RUN: Trace =========== Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2140] Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:116:2058] recipient: [1:110:2141] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:116:2058] recipient: [1:110:2141] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:117:2058] recipient: [1:111:2142] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:117:2058] recipient: [1:111:2142] Leader for TabletID 72057594046678944 is [1:126:2151] sender: [1:127:2058] recipient: [1:109:2140] Leader for TabletID 72057594046447617 is [1:130:2154] sender: [1:132:2058] recipient: [1:110:2141] Leader for TabletID 72057594046316545 is [1:133:2155] sender: [1:135:2058] recipient: [1:111:2142] 2025-05-29T15:31:42.733427Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7488: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-05-29T15:31:42.733447Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7516: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:31:42.733453Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7402: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2025-05-29T15:31:42.733458Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7418: OperationsProcessing config: using default configuration 2025-05-29T15:31:42.733463Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-05-29T15:31:42.733467Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-05-29T15:31:42.733477Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7548: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:31:42.733489Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:39: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-05-29T15:31:42.733576Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7619: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-05-29T15:31:42.733644Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-05-29T15:31:42.747039Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7651: Got new config: QueryServiceConfig { AllExternalDataSourcesAreAvailable: true } 2025-05-29T15:31:42.747060Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:31:42.747152Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7619: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# Leader for TabletID 72057594046447617 is [1:130:2154] sender: [1:175:2058] recipient: [1:15:2062] 2025-05-29T15:31:42.749870Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-05-29T15:31:42.749897Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-05-29T15:31:42.749928Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-05-29T15:31:42.752525Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-05-29T15:31:42.752583Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:445: Clear TempDirsState with owners number: 0 2025-05-29T15:31:42.752660Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1348: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-05-29T15:31:42.752787Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:32: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-05-29T15:31:42.753410Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:157: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:31:42.753444Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:84: [RootDataErasureManager] Stop 2025-05-29T15:31:42.753639Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:31:42.753645Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:31:42.753666Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-05-29T15:31:42.753671Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-29T15:31:42.753675Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:202: TTxServerlessStorageBilling.Complete 2025-05-29T15:31:42.753688Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6671: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:214:2058] recipient: [1:212:2212] IGNORE Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:214:2058] recipient: [1:212:2212] Leader for TabletID 72057594037968897 is [1:218:2216] sender: [1:219:2058] recipient: [1:212:2212] 2025-05-29T15:31:42.754605Z node 1 :HIVE INFO: tablet_helpers.cpp:1130: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:126:2151] sender: [1:239:2058] recipient: [1:15:2062] 2025-05-29T15:31:42.768105Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:372: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-05-29T15:31:42.768166Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:31:42.768212Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-05-29T15:31:42.768255Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:130: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-05-29T15:31:42.768265Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:31:42.768820Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:451: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-05-29T15:31:42.768839Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-05-29T15:31:42.768884Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:31:42.768891Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:313: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-05-29T15:31:42.768894Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:367: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-05-29T15:31:42.768897Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 2 -> 3 2025-05-29T15:31:42.769180Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:31:42.769189Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-05-29T15:31:42.769194Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 3 -> 128 2025-05-29T15:31:42.769438Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:31:42.769444Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:31:42.769447Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:31:42.769452Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1650: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-05-29T15:31:42.769855Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1719: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-05-29T15:31:42.770147Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:652: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-05-29T15:31:42.770172Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1751: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 Leader for TabletID 72057594046316545 is [1:133:2155] sender: [1:254:2058] recipient: [1:15:2062] FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-05-29T15:31:42.770303Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:676: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-05-29T15:31:42.770320Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:680: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 133 RawX2: 4294969451 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-05-29T15:31:42.770333Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:31:42.770387Z node 1 :FLAT_TX_SCHEMESHARD INFO: sch ... .406522Z node 86 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:989: Publication details: tx: 1003, [OwnerId: 72057594046678944, LocalPathId: 9], 2 2025-05-29T15:32:03.407107Z node 86 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:5834: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 8 PathOwnerId: 72057594046678944, cookie: 1003 2025-05-29T15:32:03.407125Z node 86 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 8 PathOwnerId: 72057594046678944, cookie: 1003 2025-05-29T15:32:03.407130Z node 86 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 5, at schemeshard: 72057594046678944, txId: 1003 2025-05-29T15:32:03.407135Z node 86 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 1003, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 8 2025-05-29T15:32:03.407139Z node 86 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2025-05-29T15:32:03.407352Z node 86 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:5834: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 6 Version: 6 PathOwnerId: 72057594046678944, cookie: 1003 2025-05-29T15:32:03.407372Z node 86 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 6 Version: 6 PathOwnerId: 72057594046678944, cookie: 1003 2025-05-29T15:32:03.407377Z node 86 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 4, at schemeshard: 72057594046678944, txId: 1003 2025-05-29T15:32:03.407381Z node 86 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 1003, pathId: [OwnerId: 72057594046678944, LocalPathId: 6], version: 6 2025-05-29T15:32:03.407388Z node 86 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 6] was 2 2025-05-29T15:32:03.407876Z node 86 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:5834: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 7 Version: 6 PathOwnerId: 72057594046678944, cookie: 1003 2025-05-29T15:32:03.407891Z node 86 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 7 Version: 6 PathOwnerId: 72057594046678944, cookie: 1003 2025-05-29T15:32:03.407895Z node 86 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 3, at schemeshard: 72057594046678944, txId: 1003 2025-05-29T15:32:03.407900Z node 86 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 1003, pathId: [OwnerId: 72057594046678944, LocalPathId: 7], version: 6 2025-05-29T15:32:03.407904Z node 86 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 7] was 2 2025-05-29T15:32:03.407963Z node 86 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:5834: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 8 Version: 5 PathOwnerId: 72057594046678944, cookie: 1003 2025-05-29T15:32:03.407972Z node 86 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 8 Version: 5 PathOwnerId: 72057594046678944, cookie: 1003 2025-05-29T15:32:03.407975Z node 86 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 1003 2025-05-29T15:32:03.407979Z node 86 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 1003, pathId: [OwnerId: 72057594046678944, LocalPathId: 8], version: 5 2025-05-29T15:32:03.407982Z node 86 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 8] was 2 2025-05-29T15:32:03.408021Z node 86 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:5834: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 9 Version: 2 PathOwnerId: 72057594046678944, cookie: 1003 2025-05-29T15:32:03.408030Z node 86 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 9 Version: 2 PathOwnerId: 72057594046678944, cookie: 1003 2025-05-29T15:32:03.408034Z node 86 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1003 2025-05-29T15:32:03.408038Z node 86 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 1003, pathId: [OwnerId: 72057594046678944, LocalPathId: 9], version: 2 2025-05-29T15:32:03.408042Z node 86 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 9] was 4 2025-05-29T15:32:03.408049Z node 86 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 1003, subscribers: 0 2025-05-29T15:32:03.408741Z node 86 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1003 2025-05-29T15:32:03.408785Z node 86 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1003 2025-05-29T15:32:03.409630Z node 86 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1003 2025-05-29T15:32:03.409652Z node 86 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1003 2025-05-29T15:32:03.409668Z node 86 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1003 TestModificationResult got TxId: 1003, wait until txId: 1003 TestWaitNotification wait txId: 1003 2025-05-29T15:32:03.409716Z node 86 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:210: tests -- TTxNotificationSubscriber for txId 1003: send EvNotifyTxCompletion 2025-05-29T15:32:03.409721Z node 86 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:256: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 1003 2025-05-29T15:32:03.409759Z node 86 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 1003, at schemeshard: 72057594046678944 2025-05-29T15:32:03.409772Z node 86 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:227: tests -- TTxNotificationSubscriber for txId 1003: got EvNotifyTxCompletionResult 2025-05-29T15:32:03.409775Z node 86 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:236: tests -- TTxNotificationSubscriber for txId 1003: satisfy waiter [86:431:2402] TestWaitNotification: OK eventTxId 1003 2025-05-29T15:32:03.409819Z node 86 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Valid/x/y/z" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-05-29T15:32:03.409842Z node 86 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/Valid/x/y/z" took 30us result status StatusSuccess 2025-05-29T15:32:03.409891Z node 86 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Valid/x/y/z" PathDescription { Self { Name: "z" PathId: 9 SchemeshardId: 72057594046678944 PathType: EPathTypeSolomonVolume CreateFinished: true CreateTxId: 1003 CreateStep: 5000003 ParentPathId: 8 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 SolomonVersion: 1 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 5 PathsLimit: 10000 ShardsInside: 2 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } SolomonDescription { Name: "z" PathId: 9 PartitionCount: 2 Partitions { PartitionId: 0 TabletId: 72075186233409546 ShardIdx: 1 } Partitions { PartitionId: 1 TabletId: 72075186233409547 ShardIdx: 2 } BoundChannels { StoragePoolName: "pool-1" StoragePoolKind: "pool-kind-1" } BoundChannels { StoragePoolName: "pool-1" StoragePoolKind: "pool-kind-1" } BoundChannels { StoragePoolName: "pool-1" StoragePoolKind: "pool-kind-1" } BoundChannels { StoragePoolName: "pool-1" StoragePoolKind: "pool-kind-1" } } } PathId: 9 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-05-29T15:32:03.409925Z node 86 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Invalid" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-05-29T15:32:03.409938Z node 86 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/Invalid" took 13us result status StatusPathDoesNotExist 2025-05-29T15:32:03.409950Z node 86 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/Invalid\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1]), source_location: ydb/core/tx/schemeshard/schemeshard_path_describer.cpp:1157" Path: "/MyRoot/Invalid" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: true } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 >> IncrementalRestoreScan::Empty [GOOD] |76.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_cdc_stream_reboots/unittest ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/datashard/ut_incremental_restore_scan/unittest >> IncrementalRestoreScan::Empty [GOOD] Test command err: 2025-05-29T15:32:03.761327Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:102:2148], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-05-29T15:32:03.761401Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-05-29T15:32:03.761418Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/000eda/r3tmp/tmp9Vz33R/pdisk_1.dat 2025-05-29T15:32:03.921794Z node 1 :CHANGE_EXCHANGE DEBUG: incr_restore_scan.cpp:178: [TIncrementalRestoreScan][1337][OwnerId: 1, LocalPathId: 2][OwnerId: 3, LocalPathId: 4][1:596:2521] Exhausted 2025-05-29T15:32:03.921829Z node 1 :CHANGE_EXCHANGE DEBUG: incr_restore_scan.cpp:127: [TIncrementalRestoreScan][1337][OwnerId: 1, LocalPathId: 2][OwnerId: 3, LocalPathId: 4][1:596:2521] Handle TEvIncrementalRestoreScan::TEvFinished NKikimr::NDataShard::TEvIncrementalRestoreScan::TEvFinished 2025-05-29T15:32:03.921837Z node 1 :CHANGE_EXCHANGE DEBUG: incr_restore_scan.cpp:191: [TIncrementalRestoreScan][1337][OwnerId: 1, LocalPathId: 2][OwnerId: 3, LocalPathId: 4][1:596:2521] Finish Done >> TCdcStreamWithRebootsTests::DropStreamCreatedWithInitialScan[PipeResets] |76.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_cdc_stream_reboots/unittest >> IncrementalRestoreScan::ChangeSenderSimple [GOOD] >> IncrementalRestoreScan::ChangeSenderEmpty [GOOD] >> TCdcStreamWithRebootsTests::CreateStreamOnIndexTableExplicitReady[PipeResets] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/datashard/ut_incremental_restore_scan/unittest >> IncrementalRestoreScan::ChangeSenderSimple [GOOD] Test command err: 2025-05-29T15:32:03.785017Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:102:2148], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-05-29T15:32:03.785064Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-05-29T15:32:03.785082Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/001006/r3tmp/tmpk4WbSi/pdisk_1.dat 2025-05-29T15:32:03.943720Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:372: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "Root" StoragePools { Name: "/Root:test" Kind: "test" } } } TxId: 1 TabletId: 72057594046644480 , at schemeshard: 72057594046644480 2025-05-29T15:32:03.943792Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //Root, opId: 1:0, at schemeshard: 72057594046644480 2025-05-29T15:32:03.943846Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 0 2025-05-29T15:32:03.943914Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:130: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-05-29T15:32:03.943925Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-05-29T15:32:03.944117Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:451: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046644480 PathId: 1, at schemeshard: 72057594046644480 2025-05-29T15:32:03.944137Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2025-05-29T15:32:03.944175Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046644480 2025-05-29T15:32:03.944184Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:313: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046644480 2025-05-29T15:32:03.944189Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:367: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-05-29T15:32:03.944194Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 2 -> 3 2025-05-29T15:32:03.944300Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046644480 2025-05-29T15:32:03.944313Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046644480 2025-05-29T15:32:03.944318Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 3 -> 128 2025-05-29T15:32:03.944381Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046644480 2025-05-29T15:32:03.944387Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046644480 2025-05-29T15:32:03.944392Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046644480 2025-05-29T15:32:03.944399Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1650: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-05-29T15:32:03.945064Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1719: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046644480 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-05-29T15:32:03.945197Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:652: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046644480 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-05-29T15:32:03.945237Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1751: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 2025-05-29T15:32:03.945438Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__notify.cpp:30: NotifyTxCompletion operation in-flight, txId: 1, at schemeshard: 72057594046644480 2025-05-29T15:32:03.945446Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1606: TOperation IsReadyToNotify, TxId: 1, ready parts: 0/1, is published: true 2025-05-29T15:32:03.945458Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__notify.cpp:131: NotifyTxCompletion transaction is registered, txId: 1, at schemeshard: 72057594046644480 2025-05-29T15:32:03.957961Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7651: Got new config: QueryServiceConfig { AvailableExternalDataSources: "ObjectStorage" } 2025-05-29T15:32:03.957980Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:32:03.958040Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7619: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# ObjectStorage 2025-05-29T15:32:03.958049Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:84: [RootDataErasureManager] Stop 2025-05-29T15:32:03.960732Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [1:32:2079] 1748532723407474 != 1748532723407478 2025-05-29T15:32:04.002557Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:213: actor# [1:59:2106] HANDLE TEvClientConnected success connect from tablet# 72057594046447617 2025-05-29T15:32:04.002814Z node 1 :TX_PROXY DEBUG: client.cpp:89: Handle TEvAllocateResult ACCEPTED RangeBegin# 281474976715656 RangeEnd# 281474976720656 txAllocator# 72057594046447617 2025-05-29T15:32:04.002866Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:32:04.002891Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:32:04.015158Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-29T15:32:04.086674Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:676: TTxOperationPlanStep Execute, stepId: 500, transactions count in step: 1, at schemeshard: 72057594046644480 2025-05-29T15:32:04.086729Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:680: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 AckTo { RawX1: 0 RawX2: 0 } } Step: 500 MediatorID: 72057594046382081 TabletID: 72057594046644480, at schemeshard: 72057594046644480 2025-05-29T15:32:04.086753Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046644480 2025-05-29T15:32:04.086825Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 128 -> 240 2025-05-29T15:32:04.086832Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046644480 2025-05-29T15:32:04.086858Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 1 2025-05-29T15:32:04.086869Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:402: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046644480, LocalPathId: 1], at schemeshard: 72057594046644480 2025-05-29T15:32:04.087006Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2025-05-29T15:32:04.087011Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046644480, txId: 1, path id: [OwnerId: 72057594046644480, LocalPathId: 1] 2025-05-29T15:32:04.087032Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2025-05-29T15:32:04.087035Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:572:2500], at schemeshard: 72057594046644480, txId: 1, path id: 1 2025-05-29T15:32:04.087072Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046644480 2025-05-29T15:32:04.087077Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:482: [72057594046644480] TDone opId# 1:0 ProgressState 2025-05-29T15:32:04.087084Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:906: Part operation is done id#1:0 progress is 1/1 2025-05-29T15:32:04.087087Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-05-29T15:32:04.087090Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:906: Part operation is done id#1:0 progress is 1/1 2025-05-29T15:32:04.087092Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-05-29T15:32:04.087095Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1606: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-05-29T15:32:04.087098Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-05-29T15:32:04.087101Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:973: Operation and all the parts is done, operation id: 1:0 2025-05-29T15:32:04.087103Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5174: RemoveTx for txid 1:0 2025-05-29T15:32:04.087108Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 2 2025-05-29T15:32:04.087111Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:982: Publication still in progress, tx: 1, publications: 1, subscribers: 1 2025-05-29T15:32:04.087116Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:989: Publication details: tx: 1, [OwnerId: 72057594046644480, LocalPathId: 1], 3 2025-05-29T15:32:04.087449Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:5834: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046644480, cookie: 1 2025-05-29T15:3 ... shard__operation.cpp:1606: TOperation IsReadyToNotify, TxId: 281474976715658, ready parts: 1/1, is published: true 2025-05-29T15:32:04.529178Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1629: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:592:2518] message: TxId: 281474976715658 2025-05-29T15:32:04.529184Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 281474976715658 ready parts: 1/1 2025-05-29T15:32:04.529188Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:973: Operation and all the parts is done, operation id: 281474976715658:0 2025-05-29T15:32:04.529193Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5174: RemoveTx for txid 281474976715658:0 2025-05-29T15:32:04.529208Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046644480, LocalPathId: 3] was 3 2025-05-29T15:32:04.529313Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:403: actor# [1:59:2106] Handle TEvNavigate describe path /Root/IncrBackupTable 2025-05-29T15:32:04.530930Z node 1 :TX_PROXY DEBUG: describe.cpp:272: Actor# [1:808:2666] HANDLE EvNavigateScheme /Root/IncrBackupTable 2025-05-29T15:32:04.531229Z node 1 :TX_PROXY DEBUG: describe.cpp:356: Actor# [1:808:2666] HANDLE EvNavigateKeySetResult TDescribeReq marker# P5 ErrorCount# 0 2025-05-29T15:32:04.531249Z node 1 :TX_PROXY DEBUG: describe.cpp:435: Actor# [1:808:2666] SEND to# 72057594046644480 shardToRequest NKikimrSchemeOp.TDescribePath Path: "/Root/IncrBackupTable" Options { ShowPrivateTable: true } 2025-05-29T15:32:04.531474Z node 1 :TX_PROXY DEBUG: describe.cpp:448: Actor# [1:808:2666] Handle TEvDescribeSchemeResult Forward to# [1:592:2518] Cookie: 0 TEvDescribeSchemeResult: NKikimrScheme.TEvDescribeSchemeResult PreSerializedData size# 20 Record# Status: StatusSuccess Path: "/Root/IncrBackupTable" PathDescription { Self { Name: "IncrBackupTable" PathId: 3 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976715658 CreateStep: 1500 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "IncrBackupTable" Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Uint32" TypeId: 2 Id: 2 NotNull: false IsBuildInProgress: false } Columns { Name: "__ydb_incrBackupImpl_deleted" Type: "Bool" TypeId: 6 Id: 3 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { MinPartitionsCount: 1 } ColumnFamilies { Id: 0 Name: "default" } } TableSchemaVersion: 1 IsBackup: false IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 500 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 2 PathsLimit: 10000 ShardsInside: 2 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 3 PathOwnerId: 72057594046644480 2025-05-29T15:32:04.531790Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3710: Server connected at leader tablet# 72075186224037889, clientId# [1:818:2670], serverId# [1:819:2671], sessionId# [0:0:0] 2025-05-29T15:32:04.531901Z node 1 :CHANGE_EXCHANGE DEBUG: change_sender_table_base.h:65: [IncrRestoreChangeSenderMain][[OwnerId: 72057594046644480, LocalPathId: 2]][1:820:2672] HandleUserTable TEvTxProxySchemeCache::TEvNavigateKeySetResult: result# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root/IncrBackupTable TableId: [72057594046644480:3:1] RequestType: ByTableId Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Ok Kind: KindTable DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 500 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } }] } 2025-05-29T15:32:04.531930Z node 1 :CHANGE_EXCHANGE DEBUG: change_sender_table_base.h:131: [IncrRestoreChangeSenderMain][[OwnerId: 72057594046644480, LocalPathId: 2]][1:820:2672] HandleTargetTable TEvTxProxySchemeCache::TEvNavigateKeySetResult: result# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root/Table TableId: [72057594046644480:2:1] RequestType: ByTableId Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Ok Kind: KindTable DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 500 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } }] } 2025-05-29T15:32:04.531968Z node 1 :CHANGE_EXCHANGE DEBUG: change_sender_table_base.h:227: [IncrRestoreChangeSenderMain][[OwnerId: 72057594046644480, LocalPathId: 2]][1:820:2672] HandleKeys TEvTxProxySchemeCache::TEvResolveKeySetResult: result# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 ResultSet [{ TableId: [OwnerId: 72057594046644480, LocalPathId: 2] Access: 0 SyncVersion: false Status: OkData Kind: KindRegularTable PartitionsCount: 1 DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 500 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } From: (Uint32 : NULL) IncFrom: 1 To: () IncTo: 0 }] } 2025-05-29T15:32:04.531994Z node 1 :CHANGE_EXCHANGE DEBUG: change_sender_incr_restore.cpp:139: [IncrRestoreChangeSenderMain][[OwnerId: 72057594046644480, LocalPathId: 2]][1:820:2672] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvEnqueueRecords { Records [{ Order: 0 PathId: [OwnerId: 72057594046644480, LocalPathId: 2] BodySize: 18 }] } 2025-05-29T15:32:04.532011Z node 1 :CHANGE_EXCHANGE DEBUG: change_sender_incr_restore.cpp:144: [IncrRestoreChangeSenderMain][[OwnerId: 72057594046644480, LocalPathId: 2]][1:820:2672] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRecords { Records [{ Order: 0 Group: 0 Step: 0 TxId: 0 PathId: [OwnerId: 72057594046644480, LocalPathId: 2] Kind: IncrementalRestore Source: InitialScan Body: 18b TableId: [OwnerId: 72057594046644480, LocalPathId: 3] SchemaVersion: 0 LockId: 0 LockOffset: 0 }] } 2025-05-29T15:32:04.532036Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:392: actor# [1:59:2106] Handle TEvGetProxyServicesRequest 2025-05-29T15:32:04.532047Z node 1 :CHANGE_EXCHANGE DEBUG: change_sender_table_base.cpp:40: [TableChangeSenderShard][0:0][72075186224037888][1:824:2672] Handle NKikimr::TEvTxUserProxy::TEvGetProxyServicesResponse 2025-05-29T15:32:04.532112Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3710: Server connected at leader tablet# 72075186224037888, clientId# [1:825:2676], serverId# [1:826:2677], sessionId# [0:0:0] 2025-05-29T15:32:04.573042Z node 1 :CHANGE_EXCHANGE DEBUG: change_sender_table_base.cpp:78: [TableChangeSenderShard][0:0][72075186224037888][1:824:2672] Handshake NKikimrChangeExchange.TEvStatus Status: STATUS_OK LastRecordOrder: 0 2025-05-29T15:32:04.573074Z node 1 :CHANGE_EXCHANGE DEBUG: change_sender_incr_restore.cpp:154: [IncrRestoreChangeSenderMain][[OwnerId: 72057594046644480, LocalPathId: 2]][1:820:2672] Handle NKikimr::NChangeExchange::TEvChangeExchangePrivate::TEvReady { PartitionId: 72075186224037888 } 2025-05-29T15:32:04.573101Z node 1 :CHANGE_EXCHANGE DEBUG: change_sender_table_base.cpp:123: [TableChangeSenderShard][0:0][72075186224037888][1:824:2672] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRecords { Records [{ Order: 0 Group: 0 Step: 0 TxId: 0 PathId: [OwnerId: 72057594046644480, LocalPathId: 2] Kind: IncrementalRestore Source: InitialScan Body: 18b TableId: [OwnerId: 72057594046644480, LocalPathId: 3] SchemaVersion: 0 LockId: 0 LockOffset: 0 }] } 2025-05-29T15:32:04.573111Z node 1 :CHANGE_EXCHANGE DEBUG: change_sender_incr_restore.cpp:154: [IncrRestoreChangeSenderMain][[OwnerId: 72057594046644480, LocalPathId: 2]][1:820:2672] Handle NKikimr::NChangeExchange::TEvChangeExchangePrivate::TEvReady { PartitionId: 72075186224037888 } 2025-05-29T15:32:04.573142Z node 1 :CHANGE_EXCHANGE DEBUG: change_sender_incr_restore.cpp:176: [IncrRestoreChangeSenderMain][[OwnerId: 72057594046644480, LocalPathId: 2]][1:820:2672] Handle NKikimr::NDataShard::TEvIncrementalRestoreScan::TEvNoMoreData ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/datashard/ut_incremental_restore_scan/unittest >> IncrementalRestoreScan::ChangeSenderEmpty [GOOD] Test command err: 2025-05-29T15:32:03.761327Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:102:2148], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-05-29T15:32:03.761372Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-05-29T15:32:03.761389Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/000ec1/r3tmp/tmpqLPK5C/pdisk_1.dat 2025-05-29T15:32:03.938418Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:372: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "Root" StoragePools { Name: "/Root:test" Kind: "test" } } } TxId: 1 TabletId: 72057594046644480 , at schemeshard: 72057594046644480 2025-05-29T15:32:03.939505Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //Root, opId: 1:0, at schemeshard: 72057594046644480 2025-05-29T15:32:03.939551Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 0 2025-05-29T15:32:03.939610Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:130: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-05-29T15:32:03.939621Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-05-29T15:32:03.939827Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:451: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046644480 PathId: 1, at schemeshard: 72057594046644480 2025-05-29T15:32:03.939843Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2025-05-29T15:32:03.939874Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046644480 2025-05-29T15:32:03.939880Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:313: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046644480 2025-05-29T15:32:03.939885Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:367: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-05-29T15:32:03.939890Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 2 -> 3 2025-05-29T15:32:03.941528Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046644480 2025-05-29T15:32:03.941546Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046644480 2025-05-29T15:32:03.941551Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 3 -> 128 2025-05-29T15:32:03.941603Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046644480 2025-05-29T15:32:03.941607Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046644480 2025-05-29T15:32:03.941611Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046644480 2025-05-29T15:32:03.942364Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1650: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-05-29T15:32:03.942999Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1719: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046644480 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-05-29T15:32:03.943106Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:652: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046644480 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-05-29T15:32:03.944104Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1751: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 2025-05-29T15:32:03.944325Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__notify.cpp:30: NotifyTxCompletion operation in-flight, txId: 1, at schemeshard: 72057594046644480 2025-05-29T15:32:03.944333Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1606: TOperation IsReadyToNotify, TxId: 1, ready parts: 0/1, is published: true 2025-05-29T15:32:03.944398Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__notify.cpp:131: NotifyTxCompletion transaction is registered, txId: 1, at schemeshard: 72057594046644480 2025-05-29T15:32:03.957931Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7651: Got new config: QueryServiceConfig { AvailableExternalDataSources: "ObjectStorage" } 2025-05-29T15:32:03.957951Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:32:03.958016Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7619: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# ObjectStorage 2025-05-29T15:32:03.958026Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:84: [RootDataErasureManager] Stop 2025-05-29T15:32:03.960721Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [1:32:2079] 1748532723407492 != 1748532723407496 2025-05-29T15:32:04.002557Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:213: actor# [1:59:2106] HANDLE TEvClientConnected success connect from tablet# 72057594046447617 2025-05-29T15:32:04.002809Z node 1 :TX_PROXY DEBUG: client.cpp:89: Handle TEvAllocateResult ACCEPTED RangeBegin# 281474976715656 RangeEnd# 281474976720656 txAllocator# 72057594046447617 2025-05-29T15:32:04.002867Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:32:04.002891Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:32:04.015158Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-29T15:32:04.086599Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:676: TTxOperationPlanStep Execute, stepId: 500, transactions count in step: 1, at schemeshard: 72057594046644480 2025-05-29T15:32:04.086657Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:680: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 AckTo { RawX1: 0 RawX2: 0 } } Step: 500 MediatorID: 72057594046382081 TabletID: 72057594046644480, at schemeshard: 72057594046644480 2025-05-29T15:32:04.086668Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046644480 2025-05-29T15:32:04.086763Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 128 -> 240 2025-05-29T15:32:04.086775Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046644480 2025-05-29T15:32:04.086806Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 1 2025-05-29T15:32:04.086818Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:402: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046644480, LocalPathId: 1], at schemeshard: 72057594046644480 2025-05-29T15:32:04.086973Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2025-05-29T15:32:04.086978Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046644480, txId: 1, path id: [OwnerId: 72057594046644480, LocalPathId: 1] 2025-05-29T15:32:04.087005Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2025-05-29T15:32:04.087008Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:572:2500], at schemeshard: 72057594046644480, txId: 1, path id: 1 2025-05-29T15:32:04.087052Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046644480 2025-05-29T15:32:04.087057Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:482: [72057594046644480] TDone opId# 1:0 ProgressState 2025-05-29T15:32:04.087066Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:906: Part operation is done id#1:0 progress is 1/1 2025-05-29T15:32:04.087069Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-05-29T15:32:04.087072Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:906: Part operation is done id#1:0 progress is 1/1 2025-05-29T15:32:04.087074Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-05-29T15:32:04.087078Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1606: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-05-29T15:32:04.087081Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-05-29T15:32:04.087084Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:973: Operation and all the parts is done, operation id: 1:0 2025-05-29T15:32:04.087087Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5174: RemoveTx for txid 1:0 2025-05-29T15:32:04.087093Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 2 2025-05-29T15:32:04.087097Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:982: Publication still in progress, tx: 1, publications: 1, subscribers: 1 2025-05-29T15:32:04.087101Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:989: Publication details: tx: 1, [OwnerId: 72057594046644480, LocalPathId: 1], 3 2025-05-29T15:32:04.087482Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:5834: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046644480, cookie: 1 2025-05-29T15:3 ... veACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "Table" Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Uint32" TypeId: 2 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { MinPartitionsCount: 1 } ColumnFamilies { Id: 0 Name: "default" } } TableSchemaVersion: 1 IsBackup: false IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 500 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 2 PathsLimit: 10000 ShardsInside: 2 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 2 PathOwnerId: 72057594046644480 2025-05-29T15:32:04.529639Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:403: actor# [1:59:2106] Handle TEvNavigate describe path /Root/IncrBackupTable 2025-05-29T15:32:04.530346Z node 1 :TX_PROXY DEBUG: describe.cpp:272: Actor# [1:820:2672] HANDLE EvNavigateScheme /Root/IncrBackupTable 2025-05-29T15:32:04.530420Z node 1 :TX_PROXY DEBUG: describe.cpp:356: Actor# [1:820:2672] HANDLE EvNavigateKeySetResult TDescribeReq marker# P5 ErrorCount# 0 2025-05-29T15:32:04.530428Z node 1 :TX_PROXY DEBUG: describe.cpp:435: Actor# [1:820:2672] SEND to# 72057594046644480 shardToRequest NKikimrSchemeOp.TDescribePath Path: "/Root/IncrBackupTable" 2025-05-29T15:32:04.530540Z node 1 :TX_PROXY DEBUG: describe.cpp:448: Actor# [1:820:2672] Handle TEvDescribeSchemeResult Forward to# [1:592:2518] Cookie: 0 TEvDescribeSchemeResult: NKikimrScheme.TEvDescribeSchemeResult PreSerializedData size# 20 Record# Status: StatusSuccess Path: "/Root/IncrBackupTable" PathDescription { Self { Name: "IncrBackupTable" PathId: 3 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976715658 CreateStep: 1500 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "IncrBackupTable" Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Uint32" TypeId: 2 Id: 2 NotNull: false IsBuildInProgress: false } Columns { Name: "__ydb_incrBackupImpl_deleted" Type: "Bool" TypeId: 6 Id: 3 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { MinPartitionsCount: 1 } ColumnFamilies { Id: 0 Name: "default" } } TableSchemaVersion: 1 IsBackup: false IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 500 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 2 PathsLimit: 10000 ShardsInside: 2 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 3 PathOwnerId: 72057594046644480 2025-05-29T15:32:04.530689Z node 1 :CHANGE_EXCHANGE DEBUG: change_sender_table_base.h:65: [IncrRestoreChangeSenderMain][[OwnerId: 72057594046644480, LocalPathId: 2]][1:822:2674] HandleUserTable TEvTxProxySchemeCache::TEvNavigateKeySetResult: result# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root/IncrBackupTable TableId: [72057594046644480:3:1] RequestType: ByTableId Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Ok Kind: KindTable DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 500 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } }] } 2025-05-29T15:32:04.530714Z node 1 :CHANGE_EXCHANGE DEBUG: change_sender_table_base.h:131: [IncrRestoreChangeSenderMain][[OwnerId: 72057594046644480, LocalPathId: 2]][1:822:2674] HandleTargetTable TEvTxProxySchemeCache::TEvNavigateKeySetResult: result# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root/Table TableId: [72057594046644480:2:1] RequestType: ByTableId Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Ok Kind: KindTable DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 500 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } }] } 2025-05-29T15:32:04.530782Z node 1 :CHANGE_EXCHANGE DEBUG: change_sender_table_base.h:227: [IncrRestoreChangeSenderMain][[OwnerId: 72057594046644480, LocalPathId: 2]][1:822:2674] HandleKeys TEvTxProxySchemeCache::TEvResolveKeySetResult: result# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 ResultSet [{ TableId: [OwnerId: 72057594046644480, LocalPathId: 2] Access: 0 SyncVersion: false Status: OkData Kind: KindRegularTable PartitionsCount: 1 DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 500 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } From: (Uint32 : NULL) IncFrom: 1 To: () IncTo: 0 }] } 2025-05-29T15:32:04.530799Z node 1 :CHANGE_EXCHANGE DEBUG: change_sender_incr_restore.cpp:176: [IncrRestoreChangeSenderMain][[OwnerId: 72057594046644480, LocalPathId: 2]][1:822:2674] Handle NKikimr::NDataShard::TEvIncrementalRestoreScan::TEvNoMoreData >> TCdcStreamWithRebootsTests::DisableStream[TabletReboots] >> TCdcStreamWithRebootsTests::SplitTable[PipeResets] |76.2%| [TA] $(B)/ydb/core/tx/datashard/ut_incremental_restore_scan/test-results/unittest/{meta.json ... results_accumulator.log} |76.2%| [TA] {RESULT} $(B)/ydb/core/tx/datashard/ut_incremental_restore_scan/test-results/unittest/{meta.json ... results_accumulator.log} >> TCdcStreamWithRebootsTests::DropStreamExplicitReady[PipeResets] >> TCdcStreamWithRebootsTests::RacySplitAndDropTable[TabletReboots] |76.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_cdc_stream_reboots/unittest |76.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_cdc_stream_reboots/unittest ------- [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/data_integrity/unittest >> KqpDataIntegrityTrails::BrokenReadLockAbortedTx Test command err: Trying to start YDB, gRPC: 25268, MsgBus: 26069 test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/002602/r3tmp/tmpSHz9LM/pdisk_1.dat TServer::EnableGrpc on GrpcPort 25268, node 1 TClient is connected to server localhost:26069 TClient is connected to server localhost:26069 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... waiting... waiting... waiting... waiting... VERIFY failed (2025-05-29T15:31:57.762519Z): assertion failed in non-unittest thread with message: assertion failed at ydb/core/kqp/ut/common/kqp_ut_common.h:375, void NKikimr::NKqp::AssertSuccessResult(const NYdb::TStatus &): (result.IsSuccess())
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 library/cpp/testing/unittest/registar.cpp:36 RaiseError(): requirement UnittestThread failed 0. /-S/util/system/yassert.cpp:83: InternalPanicImpl @ 0x13A92685 1. /-S/util/system/yassert.cpp:55: Panic @ 0x13A89686 2. /tmp//-S/library/cpp/testing/unittest/registar.cpp:36: RaiseError @ 0x13C2B416 3. /-S/ydb/core/kqp/ut/common/kqp_ut_common.h:375: AssertSuccessResult @ 0x260C0152 4. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:365: CreateSampleTables @ 0x260BFA52 5. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:581: operator() @ 0x260E168C 6. /-S/library/cpp/threading/future/core/future-inl.h:493: SetValue @ 0x260E168C 7. /-S/library/cpp/threading/future/async.h:24: operator() @ 0x260E168C 8. /-S/util/thread/pool.h:71: Process @ 0x260E168C 9. /-S/util/thread/pool.cpp:418: DoExecute @ 0x13A9A009 10. /-S/util/thread/factory.h:15: Execute @ 0x13A989F9 11. /-S/util/thread/factory.cpp:36: ThreadProc @ 0x13A989F9 12. /-S/util/system/thread.cpp:244: ThreadProxy @ 0x13A93E6C 13. ??:0: ?? @ 0x7F9F28C8EAC2 14. ??:0: ?? @ 0x7F9F28D2084F ------- [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/data_integrity/unittest >> KqpDataIntegrityTrails::UpsertEvWriteQueryService-isOlap+useOltpSink Test command err: Trying to start YDB, gRPC: 26759, MsgBus: 13220 test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/002600/r3tmp/tmpy01o68/pdisk_1.dat TServer::EnableGrpc on GrpcPort 26759, node 1 TClient is connected to server localhost:13220 TClient is connected to server localhost:13220 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... waiting... waiting... waiting... waiting... VERIFY failed (2025-05-29T15:31:57.743251Z): assertion failed in non-unittest thread with message: assertion failed at ydb/core/kqp/ut/common/kqp_ut_common.h:375, void NKikimr::NKqp::AssertSuccessResult(const NYdb::TStatus &): (result.IsSuccess())
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 library/cpp/testing/unittest/registar.cpp:36 RaiseError(): requirement UnittestThread failed 0. /-S/util/system/yassert.cpp:83: InternalPanicImpl @ 0x13A92685 1. /-S/util/system/yassert.cpp:55: Panic @ 0x13A89686 2. /tmp//-S/library/cpp/testing/unittest/registar.cpp:36: RaiseError @ 0x13C2B416 3. /-S/ydb/core/kqp/ut/common/kqp_ut_common.h:375: AssertSuccessResult @ 0x260C0152 4. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:365: CreateSampleTables @ 0x260BFA52 5. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:581: operator() @ 0x260E168C 6. /-S/library/cpp/threading/future/core/future-inl.h:493: SetValue @ 0x260E168C 7. /-S/library/cpp/threading/future/async.h:24: operator() @ 0x260E168C 8. /-S/util/thread/pool.h:71: Process @ 0x260E168C 9. /-S/util/thread/pool.cpp:418: DoExecute @ 0x13A9A009 10. /-S/util/thread/factory.h:15: Execute @ 0x13A989F9 11. /-S/util/thread/factory.cpp:36: ThreadProc @ 0x13A989F9 12. /-S/util/system/thread.cpp:244: ThreadProxy @ 0x13A93E6C 13. ??:0: ?? @ 0x7F877D38BAC2 14. ??:0: ?? @ 0x7F877D41D84F >> TCdcStreamWithRebootsTests::CreateStreamOnIndexTable[PipeResets] ------- [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/data_integrity/unittest >> KqpDataIntegrityTrails::UpsertEvWriteQueryService+isOlap+useOltpSink Test command err: Trying to start YDB, gRPC: 29613, MsgBus: 11930 test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/0025be/r3tmp/tmpfkppnv/pdisk_1.dat TServer::EnableGrpc on GrpcPort 29613, node 1 TClient is connected to server localhost:11930 TClient is connected to server localhost:11930 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... waiting... waiting... waiting... waiting... VERIFY failed (2025-05-29T15:31:58.649742Z): assertion failed in non-unittest thread with message: assertion failed at ydb/core/kqp/ut/common/kqp_ut_common.h:375, void NKikimr::NKqp::AssertSuccessResult(const NYdb::TStatus &): (result.IsSuccess())
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 library/cpp/testing/unittest/registar.cpp:36 RaiseError(): requirement UnittestThread failed 0. /-S/util/system/yassert.cpp:83: InternalPanicImpl @ 0x13A92685 1. /-S/util/system/yassert.cpp:55: Panic @ 0x13A89686 2. /tmp//-S/library/cpp/testing/unittest/registar.cpp:36: RaiseError @ 0x13C2B416 3. /-S/ydb/core/kqp/ut/common/kqp_ut_common.h:375: AssertSuccessResult @ 0x260C0152 4. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:365: CreateSampleTables @ 0x260BFA52 5. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:581: operator() @ 0x260E168C 6. /-S/library/cpp/threading/future/core/future-inl.h:493: SetValue @ 0x260E168C 7. /-S/library/cpp/threading/future/async.h:24: operator() @ 0x260E168C 8. /-S/util/thread/pool.h:71: Process @ 0x260E168C 9. /-S/util/thread/pool.cpp:418: DoExecute @ 0x13A9A009 10. /-S/util/thread/factory.h:15: Execute @ 0x13A989F9 11. /-S/util/thread/factory.cpp:36: ThreadProc @ 0x13A989F9 12. /-S/util/system/thread.cpp:244: ThreadProxy @ 0x13A93E6C 13. ??:0: ?? @ 0x7F673FA0FAC2 14. ??:0: ?? @ 0x7F673FAA184F ------- [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/data_integrity/unittest >> KqpDataIntegrityTrails::Select Test command err: Trying to start YDB, gRPC: 4040, MsgBus: 29081 test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/0025b6/r3tmp/tmpp2zhzV/pdisk_1.dat TServer::EnableGrpc on GrpcPort 4040, node 1 TClient is connected to server localhost:29081 TClient is connected to server localhost:29081 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... waiting... waiting... waiting... waiting... VERIFY failed (2025-05-29T15:31:58.674476Z): assertion failed in non-unittest thread with message: assertion failed at ydb/core/kqp/ut/common/kqp_ut_common.h:375, void NKikimr::NKqp::AssertSuccessResult(const NYdb::TStatus &): (result.IsSuccess())
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 library/cpp/testing/unittest/registar.cpp:36 RaiseError(): requirement UnittestThread failed 0. /-S/util/system/yassert.cpp:83: InternalPanicImpl @ 0x13A92685 1. /-S/util/system/yassert.cpp:55: Panic @ 0x13A89686 2. /tmp//-S/library/cpp/testing/unittest/registar.cpp:36: RaiseError @ 0x13C2B416 3. /-S/ydb/core/kqp/ut/common/kqp_ut_common.h:375: AssertSuccessResult @ 0x260C0152 4. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:365: CreateSampleTables @ 0x260BFA52 5. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:581: operator() @ 0x260E168C 6. /-S/library/cpp/threading/future/core/future-inl.h:493: SetValue @ 0x260E168C 7. /-S/library/cpp/threading/future/async.h:24: operator() @ 0x260E168C 8. /-S/util/thread/pool.h:71: Process @ 0x260E168C 9. /-S/util/thread/pool.cpp:418: DoExecute @ 0x13A9A009 10. /-S/util/thread/factory.h:15: Execute @ 0x13A989F9 11. /-S/util/thread/factory.cpp:36: ThreadProc @ 0x13A989F9 12. /-S/util/system/thread.cpp:244: ThreadProxy @ 0x13A93E6C 13. ??:0: ?? @ 0x7FC886AF3AC2 14. ??:0: ?? @ 0x7FC886B8584F ------- [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/data_integrity/unittest >> KqpDataIntegrityTrails::Upsert+LogEnabled-UseSink Test command err: Trying to start YDB, gRPC: 6340, MsgBus: 29424 test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/002632/r3tmp/tmpyR51Yi/pdisk_1.dat TServer::EnableGrpc on GrpcPort 6340, node 1 TClient is connected to server localhost:29424 TClient is connected to server localhost:29424 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... waiting... waiting... waiting... waiting... VERIFY failed (2025-05-29T15:31:57.825964Z): assertion failed in non-unittest thread with message: assertion failed at ydb/core/kqp/ut/common/kqp_ut_common.h:375, void NKikimr::NKqp::AssertSuccessResult(const NYdb::TStatus &): (result.IsSuccess())
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 library/cpp/testing/unittest/registar.cpp:36 RaiseError(): requirement UnittestThread failed 0. /-S/util/system/yassert.cpp:83: InternalPanicImpl @ 0x13A92685 1. /-S/util/system/yassert.cpp:55: Panic @ 0x13A89686 2. /tmp//-S/library/cpp/testing/unittest/registar.cpp:36: RaiseError @ 0x13C2B416 3. /-S/ydb/core/kqp/ut/common/kqp_ut_common.h:375: AssertSuccessResult @ 0x260C0152 4. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:365: CreateSampleTables @ 0x260BFA52 5. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:581: operator() @ 0x260E168C 6. /-S/library/cpp/threading/future/core/future-inl.h:493: SetValue @ 0x260E168C 7. /-S/library/cpp/threading/future/async.h:24: operator() @ 0x260E168C 8. /-S/util/thread/pool.h:71: Process @ 0x260E168C 9. /-S/util/thread/pool.cpp:418: DoExecute @ 0x13A9A009 10. /-S/util/thread/factory.h:15: Execute @ 0x13A989F9 11. /-S/util/thread/factory.cpp:36: ThreadProc @ 0x13A989F9 12. /-S/util/system/thread.cpp:244: ThreadProxy @ 0x13A93E6C 13. ??:0: ?? @ 0x7F971B7D8AC2 14. ??:0: ?? @ 0x7F971B86A84F ------- [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/data_integrity/unittest >> KqpDataIntegrityTrails::Upsert-LogEnabled+UseSink Test command err: Trying to start YDB, gRPC: 17495, MsgBus: 6114 test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/0025e6/r3tmp/tmpBZFI15/pdisk_1.dat TServer::EnableGrpc on GrpcPort 17495, node 1 TClient is connected to server localhost:6114 TClient is connected to server localhost:6114 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... waiting... waiting... waiting... waiting... VERIFY failed (2025-05-29T15:31:58.460047Z): assertion failed in non-unittest thread with message: assertion failed at ydb/core/kqp/ut/common/kqp_ut_common.h:375, void NKikimr::NKqp::AssertSuccessResult(const NYdb::TStatus &): (result.IsSuccess())
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 library/cpp/testing/unittest/registar.cpp:36 RaiseError(): requirement UnittestThread failed 0. /-S/util/system/yassert.cpp:83: InternalPanicImpl @ 0x13A92685 1. /-S/util/system/yassert.cpp:55: Panic @ 0x13A89686 2. /tmp//-S/library/cpp/testing/unittest/registar.cpp:36: RaiseError @ 0x13C2B416 3. /-S/ydb/core/kqp/ut/common/kqp_ut_common.h:375: AssertSuccessResult @ 0x260C0152 4. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:365: CreateSampleTables @ 0x260BFA52 5. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:581: operator() @ 0x260E168C 6. /-S/library/cpp/threading/future/core/future-inl.h:493: SetValue @ 0x260E168C 7. /-S/library/cpp/threading/future/async.h:24: operator() @ 0x260E168C 8. /-S/util/thread/pool.h:71: Process @ 0x260E168C 9. /-S/util/thread/pool.cpp:418: DoExecute @ 0x13A9A009 10. /-S/util/thread/factory.h:15: Execute @ 0x13A989F9 11. /-S/util/thread/factory.cpp:36: ThreadProc @ 0x13A989F9 12. /-S/util/system/thread.cpp:244: ThreadProxy @ 0x13A93E6C 13. ??:0: ?? @ 0x7F65EB256AC2 14. ??:0: ?? @ 0x7F65EB2E884F ------- [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/data_integrity/unittest >> KqpDataIntegrityTrails::BrokenReadLock-UseSink Test command err: Trying to start YDB, gRPC: 18389, MsgBus: 6543 test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/002655/r3tmp/tmpdU5I97/pdisk_1.dat TServer::EnableGrpc on GrpcPort 18389, node 1 TClient is connected to server localhost:6543 TClient is connected to server localhost:6543 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... waiting... waiting... waiting... waiting... VERIFY failed (2025-05-29T15:31:57.743240Z): assertion failed in non-unittest thread with message: assertion failed at ydb/core/kqp/ut/common/kqp_ut_common.h:375, void NKikimr::NKqp::AssertSuccessResult(const NYdb::TStatus &): (result.IsSuccess())
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 library/cpp/testing/unittest/registar.cpp:36 RaiseError(): requirement UnittestThread failed 0. /-S/util/system/yassert.cpp:83: InternalPanicImpl @ 0x13A92685 1. /-S/util/system/yassert.cpp:55: Panic @ 0x13A89686 2. /tmp//-S/library/cpp/testing/unittest/registar.cpp:36: RaiseError @ 0x13C2B416 3. /-S/ydb/core/kqp/ut/common/kqp_ut_common.h:375: AssertSuccessResult @ 0x260C0152 4. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:365: CreateSampleTables @ 0x260BFA52 5. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:581: operator() @ 0x260E168C 6. /-S/library/cpp/threading/future/core/future-inl.h:493: SetValue @ 0x260E168C 7. /-S/library/cpp/threading/future/async.h:24: operator() @ 0x260E168C 8. /-S/util/thread/pool.h:71: Process @ 0x260E168C 9. /-S/util/thread/pool.cpp:418: DoExecute @ 0x13A9A009 10. /-S/util/thread/factory.h:15: Execute @ 0x13A989F9 11. /-S/util/thread/factory.cpp:36: ThreadProc @ 0x13A989F9 12. /-S/util/system/thread.cpp:244: ThreadProxy @ 0x13A93E6C 13. ??:0: ?? @ 0x7F5834DE0AC2 14. ??:0: ?? @ 0x7F5834E7284F ------- [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/data_integrity/unittest >> KqpDataIntegrityTrails::UpsertEvWriteQueryService+isOlap-useOltpSink Test command err: Trying to start YDB, gRPC: 19939, MsgBus: 12047 test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/00260a/r3tmp/tmprGt9lX/pdisk_1.dat TServer::EnableGrpc on GrpcPort 19939, node 1 TClient is connected to server localhost:12047 TClient is connected to server localhost:12047 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... waiting... waiting... waiting... waiting... VERIFY failed (2025-05-29T15:31:57.775286Z): assertion failed in non-unittest thread with message: assertion failed at ydb/core/kqp/ut/common/kqp_ut_common.h:375, void NKikimr::NKqp::AssertSuccessResult(const NYdb::TStatus &): (result.IsSuccess())
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 library/cpp/testing/unittest/registar.cpp:36 RaiseError(): requirement UnittestThread failed 0. /-S/util/system/yassert.cpp:83: InternalPanicImpl @ 0x13A92685 1. /-S/util/system/yassert.cpp:55: Panic @ 0x13A89686 2. /tmp//-S/library/cpp/testing/unittest/registar.cpp:36: RaiseError @ 0x13C2B416 3. /-S/ydb/core/kqp/ut/common/kqp_ut_common.h:375: AssertSuccessResult @ 0x260C0152 4. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:365: CreateSampleTables @ 0x260BFA52 5. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:581: operator() @ 0x260E168C 6. /-S/library/cpp/threading/future/core/future-inl.h:493: SetValue @ 0x260E168C 7. /-S/library/cpp/threading/future/async.h:24: operator() @ 0x260E168C 8. /-S/util/thread/pool.h:71: Process @ 0x260E168C 9. /-S/util/thread/pool.cpp:418: DoExecute @ 0x13A9A009 10. /-S/util/thread/factory.h:15: Execute @ 0x13A989F9 11. /-S/util/thread/factory.cpp:36: ThreadProc @ 0x13A989F9 12. /-S/util/system/thread.cpp:244: ThreadProxy @ 0x13A93E6C 13. ??:0: ?? @ 0x7FF0A2D1EAC2 14. ??:0: ?? @ 0x7FF0A2DB084F ------- [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/data_integrity/unittest >> KqpDataIntegrityTrails::UpsertEvWriteQueryService-isOlap-useOltpSink Test command err: Trying to start YDB, gRPC: 22636, MsgBus: 14358 test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/002612/r3tmp/tmpoOgUU4/pdisk_1.dat TServer::EnableGrpc on GrpcPort 22636, node 1 TClient is connected to server localhost:14358 TClient is connected to server localhost:14358 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... waiting... waiting... waiting... waiting... VERIFY failed (2025-05-29T15:31:57.811822Z): assertion failed in non-unittest thread with message: assertion failed at ydb/core/kqp/ut/common/kqp_ut_common.h:375, void NKikimr::NKqp::AssertSuccessResult(const NYdb::TStatus &): (result.IsSuccess())
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 library/cpp/testing/unittest/registar.cpp:36 RaiseError(): requirement UnittestThread failed 0. /-S/util/system/yassert.cpp:83: InternalPanicImpl @ 0x13A92685 1. /-S/util/system/yassert.cpp:55: Panic @ 0x13A89686 2. /tmp//-S/library/cpp/testing/unittest/registar.cpp:36: RaiseError @ 0x13C2B416 3. /-S/ydb/core/kqp/ut/common/kqp_ut_common.h:375: AssertSuccessResult @ 0x260C0152 4. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:365: CreateSampleTables @ 0x260BFA52 5. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:581: operator() @ 0x260E168C 6. /-S/library/cpp/threading/future/core/future-inl.h:493: SetValue @ 0x260E168C 7. /-S/library/cpp/threading/future/async.h:24: operator() @ 0x260E168C 8. /-S/util/thread/pool.h:71: Process @ 0x260E168C 9. /-S/util/thread/pool.cpp:418: DoExecute @ 0x13A9A009 10. /-S/util/thread/factory.h:15: Execute @ 0x13A989F9 11. /-S/util/thread/factory.cpp:36: ThreadProc @ 0x13A989F9 12. /-S/util/system/thread.cpp:244: ThreadProxy @ 0x13A93E6C 13. ??:0: ?? @ 0x7FB7945A3AC2 14. ??:0: ?? @ 0x7FB79463584F ------- [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/data_integrity/unittest >> KqpDataIntegrityTrails::Upsert-LogEnabled-UseSink Test command err: Trying to start YDB, gRPC: 25704, MsgBus: 4192 test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/002622/r3tmp/tmppcGARM/pdisk_1.dat TServer::EnableGrpc on GrpcPort 25704, node 1 TClient is connected to server localhost:4192 TClient is connected to server localhost:4192 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... waiting... waiting... waiting... waiting... VERIFY failed (2025-05-29T15:31:57.743488Z): assertion failed in non-unittest thread with message: assertion failed at ydb/core/kqp/ut/common/kqp_ut_common.h:375, void NKikimr::NKqp::AssertSuccessResult(const NYdb::TStatus &): (result.IsSuccess())
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 library/cpp/testing/unittest/registar.cpp:36 RaiseError(): requirement UnittestThread failed 0. /-S/util/system/yassert.cpp:83: InternalPanicImpl @ 0x13A92685 1. /-S/util/system/yassert.cpp:55: Panic @ 0x13A89686 2. /tmp//-S/library/cpp/testing/unittest/registar.cpp:36: RaiseError @ 0x13C2B416 3. /-S/ydb/core/kqp/ut/common/kqp_ut_common.h:375: AssertSuccessResult @ 0x260C0152 4. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:365: CreateSampleTables @ 0x260BFA52 5. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:581: operator() @ 0x260E168C 6. /-S/library/cpp/threading/future/core/future-inl.h:493: SetValue @ 0x260E168C 7. /-S/library/cpp/threading/future/async.h:24: operator() @ 0x260E168C 8. /-S/util/thread/pool.h:71: Process @ 0x260E168C 9. /-S/util/thread/pool.cpp:418: DoExecute @ 0x13A9A009 10. /-S/util/thread/factory.h:15: Execute @ 0x13A989F9 11. /-S/util/thread/factory.cpp:36: ThreadProc @ 0x13A989F9 12. /-S/util/system/thread.cpp:244: ThreadProxy @ 0x13A93E6C 13. ??:0: ?? @ 0x7FFBCB68BAC2 14. ??:0: ?? @ 0x7FFBCB71D84F ------- [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/data_integrity/unittest >> KqpDataIntegrityTrails::Upsert+LogEnabled+UseSink Test command err: Trying to start YDB, gRPC: 23191, MsgBus: 14223 test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/0025e1/r3tmp/tmpVgVoef/pdisk_1.dat TServer::EnableGrpc on GrpcPort 23191, node 1 TClient is connected to server localhost:14223 TClient is connected to server localhost:14223 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... waiting... waiting... waiting... waiting... VERIFY failed (2025-05-29T15:31:58.477460Z): assertion failed in non-unittest thread with message: assertion failed at ydb/core/kqp/ut/common/kqp_ut_common.h:375, void NKikimr::NKqp::AssertSuccessResult(const NYdb::TStatus &): (result.IsSuccess())
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 library/cpp/testing/unittest/registar.cpp:36 RaiseError(): requirement UnittestThread failed 0. /-S/util/system/yassert.cpp:83: InternalPanicImpl @ 0x13A92685 1. /-S/util/system/yassert.cpp:55: Panic @ 0x13A89686 2. /tmp//-S/library/cpp/testing/unittest/registar.cpp:36: RaiseError @ 0x13C2B416 3. /-S/ydb/core/kqp/ut/common/kqp_ut_common.h:375: AssertSuccessResult @ 0x260C0152 4. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:365: CreateSampleTables @ 0x260BFA52 5. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:581: operator() @ 0x260E168C 6. /-S/library/cpp/threading/future/core/future-inl.h:493: SetValue @ 0x260E168C 7. /-S/library/cpp/threading/future/async.h:24: operator() @ 0x260E168C 8. /-S/util/thread/pool.h:71: Process @ 0x260E168C 9. /-S/util/thread/pool.cpp:418: DoExecute @ 0x13A9A009 10. /-S/util/thread/factory.h:15: Execute @ 0x13A989F9 11. /-S/util/thread/factory.cpp:36: ThreadProc @ 0x13A989F9 12. /-S/util/system/thread.cpp:244: ThreadProxy @ 0x13A93E6C 13. ??:0: ?? @ 0x7FF21FD21AC2 14. ??:0: ?? @ 0x7FF21FDB384F ------- [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/data_integrity/unittest >> KqpDataIntegrityTrails::BrokenReadLock+UseSink Test command err: Trying to start YDB, gRPC: 17170, MsgBus: 19978 test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/0025a8/r3tmp/tmpx0azyh/pdisk_1.dat TServer::EnableGrpc on GrpcPort 17170, node 1 TClient is connected to server localhost:19978 TClient is connected to server localhost:19978 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... waiting... waiting... waiting... waiting... VERIFY failed (2025-05-29T15:31:59.674614Z): assertion failed in non-unittest thread with message: assertion failed at ydb/core/kqp/ut/common/kqp_ut_common.h:375, void NKikimr::NKqp::AssertSuccessResult(const NYdb::TStatus &): (result.IsSuccess())
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 library/cpp/testing/unittest/registar.cpp:36 RaiseError(): requirement UnittestThread failed 0. /-S/util/system/yassert.cpp:83: InternalPanicImpl @ 0x13A92685 1. /-S/util/system/yassert.cpp:55: Panic @ 0x13A89686 2. /tmp//-S/library/cpp/testing/unittest/registar.cpp:36: RaiseError @ 0x13C2B416 3. /-S/ydb/core/kqp/ut/common/kqp_ut_common.h:375: AssertSuccessResult @ 0x260C0152 4. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:365: CreateSampleTables @ 0x260BFA52 5. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:581: operator() @ 0x260E168C 6. /-S/library/cpp/threading/future/core/future-inl.h:493: SetValue @ 0x260E168C 7. /-S/library/cpp/threading/future/async.h:24: operator() @ 0x260E168C 8. /-S/util/thread/pool.h:71: Process @ 0x260E168C 9. /-S/util/thread/pool.cpp:418: DoExecute @ 0x13A9A009 10. /-S/util/thread/factory.h:15: Execute @ 0x13A989F9 11. /-S/util/thread/factory.cpp:36: ThreadProc @ 0x13A989F9 12. /-S/util/system/thread.cpp:244: ThreadProxy @ 0x13A93E6C 13. ??:0: ?? @ 0x7FBC7A341AC2 14. ??:0: ?? @ 0x7FBC7A3D384F ------- [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/data_integrity/unittest >> KqpDataIntegrityTrails::Ddl Test command err: Trying to start YDB, gRPC: 2002, MsgBus: 4152 test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/0025da/r3tmp/tmpv2SGKV/pdisk_1.dat TServer::EnableGrpc on GrpcPort 2002, node 1 TClient is connected to server localhost:4152 TClient is connected to server localhost:4152 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... waiting... waiting... waiting... waiting... VERIFY failed (2025-05-29T15:31:58.466210Z): assertion failed in non-unittest thread with message: assertion failed at ydb/core/kqp/ut/common/kqp_ut_common.h:375, void NKikimr::NKqp::AssertSuccessResult(const NYdb::TStatus &): (result.IsSuccess())
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 library/cpp/testing/unittest/registar.cpp:36 RaiseError(): requirement UnittestThread failed 0. /-S/util/system/yassert.cpp:83: InternalPanicImpl @ 0x13A92685 1. /-S/util/system/yassert.cpp:55: Panic @ 0x13A89686 2. /tmp//-S/library/cpp/testing/unittest/registar.cpp:36: RaiseError @ 0x13C2B416 3. /-S/ydb/core/kqp/ut/common/kqp_ut_common.h:375: AssertSuccessResult @ 0x260C0152 4. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:365: CreateSampleTables @ 0x260BFA52 5. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:581: operator() @ 0x260E168C 6. /-S/library/cpp/threading/future/core/future-inl.h:493: SetValue @ 0x260E168C 7. /-S/library/cpp/threading/future/async.h:24: operator() @ 0x260E168C 8. /-S/util/thread/pool.h:71: Process @ 0x260E168C 9. /-S/util/thread/pool.cpp:418: DoExecute @ 0x13A9A009 10. /-S/util/thread/factory.h:15: Execute @ 0x13A989F9 11. /-S/util/thread/factory.cpp:36: ThreadProc @ 0x13A989F9 12. /-S/util/system/thread.cpp:244: ThreadProxy @ 0x13A93E6C 13. ??:0: ?? @ 0x7F756C40EAC2 14. ??:0: ?? @ 0x7F756C4A084F >> TAsyncIndexTests::MergeMainWithReboots[TabletReboots] [GOOD] |76.3%| [TA] $(B)/ydb/core/kqp/ut/data_integrity/test-results/unittest/{meta.json ... results_accumulator.log} |76.3%| [TA] {RESULT} $(B)/ydb/core/kqp/ut/data_integrity/test-results/unittest/{meta.json ... results_accumulator.log} >> TCdcStreamWithRebootsTests::CreateStreamOnIndexTableExplicitReady[TabletReboots] >> TCdcStreamWithRebootsTests::CreateStreamWithInitialScan[TabletReboots] >> TCdcStreamWithRebootsTests::SplitTableResolvedTimestamps[TabletReboots] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_index/unittest >> TAsyncIndexTests::MergeMainWithReboots[TabletReboots] [GOOD] Test command err: =========== RUN: Trace =========== Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2140] Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:116:2058] recipient: [1:110:2141] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:116:2058] recipient: [1:110:2141] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:117:2058] recipient: [1:111:2142] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:117:2058] recipient: [1:111:2142] Leader for TabletID 72057594046678944 is [1:126:2151] sender: [1:127:2058] recipient: [1:109:2140] Leader for TabletID 72057594046447617 is [1:130:2154] sender: [1:132:2058] recipient: [1:110:2141] Leader for TabletID 72057594046316545 is [1:133:2155] sender: [1:135:2058] recipient: [1:111:2142] 2025-05-29T15:30:28.165760Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7488: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-05-29T15:30:28.165780Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7516: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:30:28.165786Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7402: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2025-05-29T15:30:28.165791Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7418: OperationsProcessing config: using default configuration 2025-05-29T15:30:28.165804Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-05-29T15:30:28.165809Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-05-29T15:30:28.165817Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7548: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:30:28.165831Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:39: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-05-29T15:30:28.165913Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7619: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-05-29T15:30:28.165979Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-05-29T15:30:28.177432Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7651: Got new config: QueryServiceConfig { AllExternalDataSourcesAreAvailable: true } 2025-05-29T15:30:28.177447Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:30:28.177507Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7619: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# Leader for TabletID 72057594046447617 is [1:130:2154] sender: [1:175:2058] recipient: [1:15:2062] 2025-05-29T15:30:28.179568Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-05-29T15:30:28.179590Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-05-29T15:30:28.179613Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-05-29T15:30:28.181667Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-05-29T15:30:28.181722Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:445: Clear TempDirsState with owners number: 0 2025-05-29T15:30:28.181803Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1348: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-05-29T15:30:28.181953Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:32: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-05-29T15:30:28.182502Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:157: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:30:28.182535Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:84: [RootDataErasureManager] Stop 2025-05-29T15:30:28.182715Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:30:28.182721Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:30:28.182763Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-05-29T15:30:28.182770Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-29T15:30:28.182775Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:202: TTxServerlessStorageBilling.Complete 2025-05-29T15:30:28.182788Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6671: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:214:2058] recipient: [1:212:2212] IGNORE Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:214:2058] recipient: [1:212:2212] Leader for TabletID 72057594037968897 is [1:218:2216] sender: [1:219:2058] recipient: [1:212:2212] 2025-05-29T15:30:28.183798Z node 1 :HIVE INFO: tablet_helpers.cpp:1130: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:126:2151] sender: [1:239:2058] recipient: [1:15:2062] 2025-05-29T15:30:28.196124Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:372: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-05-29T15:30:28.196172Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:30:28.196206Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-05-29T15:30:28.196234Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:130: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-05-29T15:30:28.196241Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:30:28.196686Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:451: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-05-29T15:30:28.196701Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-05-29T15:30:28.196725Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:30:28.196731Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:313: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-05-29T15:30:28.196734Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:367: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-05-29T15:30:28.196737Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 2 -> 3 2025-05-29T15:30:28.197009Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:30:28.197017Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-05-29T15:30:28.197022Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 3 -> 128 2025-05-29T15:30:28.197239Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:30:28.197244Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:30:28.197248Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:30:28.197252Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1650: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-05-29T15:30:28.197647Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1719: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-05-29T15:30:28.197953Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:652: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-05-29T15:30:28.197974Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1751: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 Leader for TabletID 72057594046316545 is [1:133:2155] sender: [1:254:2058] recipient: [1:15:2062] FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-05-29T15:30:28.198096Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:676: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-05-29T15:30:28.198117Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:680: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 133 RawX2: 4294969451 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-05-29T15:30:28.198122Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:30:28.198161Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Ch ... : "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { MinPartitionsCount: 1 } } TableIndexes { Name: "UserDefinedIndex" LocalPathId: 4 Type: EIndexTypeGlobalAsync State: EIndexStateReady KeyColumnNames: "indexed" SchemaVersion: 1 PathOwnerId: 72057594046678944 DataSize: 0 IndexImplTableDescriptions { PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { SizeToSplit: 2147483648 MinPartitionsCount: 1 } } } } TableSchemaVersion: 1 IsBackup: false IsRestore: false } TablePartitions { EndOfRangeKeyPrefix: "" IsPoint: false IsInclusive: false DatashardId: 72075186233409549 } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 4 PathsLimit: 10000 ShardsInside: 3 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-05-29T15:32:07.516605Z node 107 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Table/UserDefinedIndex/indexImplTable" Options { ReturnPartitioningInfo: true ReturnPartitionConfig: true BackupInfo: false ReturnBoundaries: true ShowPrivateTable: true }, at schemeshard: 72057594046678944 2025-05-29T15:32:07.516649Z node 107 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/Table/UserDefinedIndex/indexImplTable" took 51us result status StatusSuccess 2025-05-29T15:32:07.516794Z node 107 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Table/UserDefinedIndex/indexImplTable" PathDescription { Self { Name: "indexImplTable" PathId: 5 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 1002 CreateStep: 5000003 ParentPathId: 4 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeAsyncIndexImplTable Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "indexImplTable" Columns { Name: "indexed" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "indexed" KeyColumnNames: "key" KeyColumnIds: 1 KeyColumnIds: 2 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { SizeToSplit: 2147483648 MinPartitionsCount: 1 } } TableSchemaVersion: 1 IsBackup: false IsRestore: false } TablePartitions { EndOfRangeKeyPrefix: "" IsPoint: false IsInclusive: false DatashardId: 72075186233409546 } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 4 PathsLimit: 10000 ShardsInside: 3 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 5 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-05-29T15:32:07.527091Z node 107 :CHANGE_EXCHANGE DEBUG: change_sender_table_base.cpp:78: [TableChangeSenderShard][72075186233409549:2][72075186233409546][107:833:2676] Handshake NKikimrChangeExchange.TEvStatus Status: STATUS_OK LastRecordOrder: 0 2025-05-29T15:32:07.527126Z node 107 :CHANGE_EXCHANGE DEBUG: change_sender_async_index.cpp:239: [AsyncIndexChangeSenderMain][72075186233409549:2][107:786:2676] Handle NKikimr::NChangeExchange::TEvChangeExchangePrivate::TEvReady { PartitionId: 72075186233409546 } 2025-05-29T15:32:07.527167Z node 107 :CHANGE_EXCHANGE DEBUG: change_sender_table_base.cpp:123: [TableChangeSenderShard][72075186233409549:2][72075186233409546][107:833:2676] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRecords { Records [{ Order: 1 Group: 1748532727514035 Step: 5000003 TxId: 18446744073709551615 PathId: [OwnerId: 72057594046678944, LocalPathId: 4] Kind: AsyncIndex Source: Unspecified Body: 28b TableId: [OwnerId: 72057594046678944, LocalPathId: 3] SchemaVersion: 1 LockId: 0 LockOffset: 0 },{ Order: 2 Group: 1748532727514035 Step: 5000003 TxId: 18446744073709551615 PathId: [OwnerId: 72057594046678944, LocalPathId: 4] Kind: AsyncIndex Source: Unspecified Body: 28b TableId: [OwnerId: 72057594046678944, LocalPathId: 3] SchemaVersion: 1 LockId: 0 LockOffset: 0 },{ Order: 3 Group: 1748532727514035 Step: 5000003 TxId: 18446744073709551615 PathId: [OwnerId: 72057594046678944, LocalPathId: 4] Kind: AsyncIndex Source: Unspecified Body: 28b TableId: [OwnerId: 72057594046678944, LocalPathId: 3] SchemaVersion: 1 LockId: 0 LockOffset: 0 }] } 2025-05-29T15:32:07.528070Z node 107 :CHANGE_EXCHANGE DEBUG: change_sender_table_base.cpp:200: [TableChangeSenderShard][72075186233409549:2][72075186233409546][107:833:2676] Handle NKikimrChangeExchange.TEvStatus Status: STATUS_OK RecordStatuses { Order: 1 Status: STATUS_OK Reason: REASON_NONE } RecordStatuses { Order: 2 Status: STATUS_OK Reason: REASON_NONE } RecordStatuses { Order: 3 Status: STATUS_OK Reason: REASON_NONE } LastRecordOrder: 3 2025-05-29T15:32:07.528090Z node 107 :CHANGE_EXCHANGE DEBUG: change_sender_async_index.cpp:239: [AsyncIndexChangeSenderMain][72075186233409549:2][107:786:2676] Handle NKikimr::NChangeExchange::TEvChangeExchangePrivate::TEvReady { PartitionId: 72075186233409546 } |76.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_cdc_stream_reboots/unittest >> TCdcStreamWithRebootsTests::RacySplitTableAndCreateStream[PipeResets] >> TCdcStreamWithRebootsTests::DropStream[PipeResets] >> TCdcStreamWithRebootsTests::InitialScan[PipeResets] >> TCdcStreamWithRebootsTests::CreateStream[PipeResets] >> TCdcStreamWithRebootsTests::CreateDropRecreate[TabletReboots] >> TCdcStreamWithRebootsTests::DropStreamOnIndexTable[PipeResets] |76.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_cdc_stream_reboots/unittest |76.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_cdc_stream_reboots/unittest |76.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_cdc_stream_reboots/unittest >> TCdcStreamWithRebootsTests::DropStreamOnIndexTableExplicitReady[PipeResets] [GOOD] >> TCdcStreamWithRebootsTests::InitialScan[PipeResets] [GOOD] >> TSolomonReboots::CreateDropSolomonWithReboots [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_cdc_stream_reboots/unittest >> TCdcStreamWithRebootsTests::InitialScan[PipeResets] [GOOD] Test command err: =========== RUN: Trace =========== Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2140] Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:116:2058] recipient: [1:110:2141] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:116:2058] recipient: [1:110:2141] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:117:2058] recipient: [1:111:2142] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:117:2058] recipient: [1:111:2142] Leader for TabletID 72057594046678944 is [1:126:2151] sender: [1:127:2058] recipient: [1:109:2140] Leader for TabletID 72057594046447617 is [1:130:2154] sender: [1:132:2058] recipient: [1:110:2141] Leader for TabletID 72057594046316545 is [1:133:2155] sender: [1:135:2058] recipient: [1:111:2142] 2025-05-29T15:32:08.795487Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7488: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-05-29T15:32:08.795510Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7516: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:32:08.795516Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7402: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2025-05-29T15:32:08.795521Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7418: OperationsProcessing config: using default configuration 2025-05-29T15:32:08.795535Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-05-29T15:32:08.795539Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-05-29T15:32:08.795550Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7548: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:32:08.795564Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:39: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-05-29T15:32:08.795662Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7619: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-05-29T15:32:08.795747Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-05-29T15:32:08.810982Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7651: Got new config: QueryServiceConfig { AllExternalDataSourcesAreAvailable: true } 2025-05-29T15:32:08.811000Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:32:08.811087Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7619: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# Leader for TabletID 72057594046447617 is [1:130:2154] sender: [1:175:2058] recipient: [1:15:2062] 2025-05-29T15:32:08.813388Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-05-29T15:32:08.813412Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-05-29T15:32:08.813435Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-05-29T15:32:08.816237Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-05-29T15:32:08.816313Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:445: Clear TempDirsState with owners number: 0 2025-05-29T15:32:08.816422Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1348: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-05-29T15:32:08.816614Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:32: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-05-29T15:32:08.817275Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:157: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:32:08.817311Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:84: [RootDataErasureManager] Stop 2025-05-29T15:32:08.817542Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:32:08.817551Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:32:08.817584Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-05-29T15:32:08.817592Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-29T15:32:08.817600Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:202: TTxServerlessStorageBilling.Complete 2025-05-29T15:32:08.817618Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6671: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:214:2058] recipient: [1:212:2212] IGNORE Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:214:2058] recipient: [1:212:2212] Leader for TabletID 72057594037968897 is [1:218:2216] sender: [1:219:2058] recipient: [1:212:2212] 2025-05-29T15:32:08.818835Z node 1 :HIVE INFO: tablet_helpers.cpp:1130: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:126:2151] sender: [1:239:2058] recipient: [1:15:2062] 2025-05-29T15:32:08.837786Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:372: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-05-29T15:32:08.837832Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:32:08.837870Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-05-29T15:32:08.837900Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:130: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-05-29T15:32:08.837907Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:32:08.838341Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:451: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-05-29T15:32:08.838356Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-05-29T15:32:08.838387Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:32:08.838393Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:313: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-05-29T15:32:08.838411Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:367: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-05-29T15:32:08.838415Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 2 -> 3 2025-05-29T15:32:08.838675Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:32:08.838682Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-05-29T15:32:08.838686Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 3 -> 128 2025-05-29T15:32:08.838957Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:32:08.838972Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:32:08.838978Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:32:08.838982Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1650: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-05-29T15:32:08.839488Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1719: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-05-29T15:32:08.839789Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:652: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-05-29T15:32:08.839812Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1751: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 Leader for TabletID 72057594046316545 is [1:133:2155] sender: [1:254:2058] recipient: [1:15:2062] FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-05-29T15:32:08.839952Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:676: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-05-29T15:32:08.839971Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:680: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 133 RawX2: 4294969451 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-05-29T15:32:08.839977Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:32:08.840022Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Ch ... schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 281474976715657, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 5 2025-05-29T15:32:09.129633Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 6 2025-05-29T15:32:09.129642Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1606: TOperation IsReadyToNotify, TxId: 281474976715657, ready parts: 2/3, is published: true FAKE_COORDINATOR: Erasing txId 281474976715657 2025-05-29T15:32:09.129769Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6285: Handle TEvProposeTransactionResult, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409546 Status: COMPLETE TxId: 281474976715657 Step: 5000005 OrderId: 281474976715657 ExecLatency: 0 ProposeLatency: 2 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409546 CpuTimeUsec: 134 } } CommitVersion { Step: 5000005 TxId: 281474976715657 } 2025-05-29T15:32:09.129775Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1764: TOperation FindRelatedPartByTabletId, TxId: 281474976715657, tablet: 72075186233409546, partId: 1 2025-05-29T15:32:09.129794Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:619: TTxOperationReply execute, operationId: 281474976715657:1, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409546 Status: COMPLETE TxId: 281474976715657 Step: 5000005 OrderId: 281474976715657 ExecLatency: 0 ProposeLatency: 2 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409546 CpuTimeUsec: 134 } } CommitVersion { Step: 5000005 TxId: 281474976715657 } 2025-05-29T15:32:09.129810Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_part.cpp:108: HandleReply TEvDataShard::TEvProposeTransactionResult Ignore message: tablet# 72057594046678944, ev# TxKind: TX_KIND_SCHEME Origin: 72075186233409546 Status: COMPLETE TxId: 281474976715657 Step: 5000005 OrderId: 281474976715657 ExecLatency: 0 ProposeLatency: 2 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409546 CpuTimeUsec: 134 } } CommitVersion { Step: 5000005 TxId: 281474976715657 } 2025-05-29T15:32:09.130190Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5512: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 332 RawX2: 8589936910 } Origin: 72075186233409546 State: 2 TxId: 281474976715657 Step: 0 Generation: 2 2025-05-29T15:32:09.130200Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1764: TOperation FindRelatedPartByTabletId, TxId: 281474976715657, tablet: 72075186233409546, partId: 1 2025-05-29T15:32:09.130216Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:619: TTxOperationReply execute, operationId: 281474976715657:1, at schemeshard: 72057594046678944, message: Source { RawX1: 332 RawX2: 8589936910 } Origin: 72075186233409546 State: 2 TxId: 281474976715657 Step: 0 Generation: 2 2025-05-29T15:32:09.130222Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:1005: NTableState::TProposedWaitParts operationId# 281474976715657:1 HandleReply TEvSchemaChanged at tablet: 72057594046678944 2025-05-29T15:32:09.130235Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:1009: NTableState::TProposedWaitParts operationId# 281474976715657:1 HandleReply TEvSchemaChanged at tablet: 72057594046678944 message: Source { RawX1: 332 RawX2: 8589936910 } Origin: 72075186233409546 State: 2 TxId: 281474976715657 Step: 0 Generation: 2 2025-05-29T15:32:09.130244Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:655: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 281474976715657:1, shardIdx: 72057594046678944:1, datashard: 72075186233409546, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2025-05-29T15:32:09.130249Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:674: all shard schema changes has been received, operationId: 281474976715657:1, at schemeshard: 72057594046678944 2025-05-29T15:32:09.130254Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:686: send schema changes ack message, operation: 281474976715657:1, datashard: 72075186233409546, at schemeshard: 72057594046678944 2025-05-29T15:32:09.130260Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 281474976715657:1 129 -> 240 2025-05-29T15:32:09.130940Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976715657 2025-05-29T15:32:09.130964Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976715657 2025-05-29T15:32:09.130978Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:647: TTxOperationReply complete, operationId: 281474976715657:1, at schemeshard: 72057594046678944 2025-05-29T15:32:09.130994Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:647: TTxOperationReply complete, operationId: 281474976715657:1, at schemeshard: 72057594046678944 2025-05-29T15:32:09.131055Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 281474976715657:1, at schemeshard: 72057594046678944 2025-05-29T15:32:09.131062Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:482: [72057594046678944] TDone opId# 281474976715657:1 ProgressState 2025-05-29T15:32:09.131073Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:906: Part operation is done id#281474976715657:1 progress is 3/3 2025-05-29T15:32:09.131077Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 281474976715657 ready parts: 3/3 2025-05-29T15:32:09.131083Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:906: Part operation is done id#281474976715657:1 progress is 3/3 2025-05-29T15:32:09.131087Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 281474976715657 ready parts: 3/3 2025-05-29T15:32:09.131092Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1606: TOperation IsReadyToNotify, TxId: 281474976715657, ready parts: 3/3, is published: true 2025-05-29T15:32:09.131097Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 281474976715657 ready parts: 3/3 2025-05-29T15:32:09.131103Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:973: Operation and all the parts is done, operation id: 281474976715657:0 2025-05-29T15:32:09.131107Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5174: RemoveTx for txid 281474976715657:0 2025-05-29T15:32:09.131116Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 3 2025-05-29T15:32:09.131124Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:973: Operation and all the parts is done, operation id: 281474976715657:1 2025-05-29T15:32:09.131128Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5174: RemoveTx for txid 281474976715657:1 2025-05-29T15:32:09.131142Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 5 2025-05-29T15:32:09.131146Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:973: Operation and all the parts is done, operation id: 281474976715657:2 2025-05-29T15:32:09.131150Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5174: RemoveTx for txid 281474976715657:2 2025-05-29T15:32:09.131155Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 4 TestModificationResult got TxId: 1003, wait until txId: 1003 TestWaitNotification wait txId: 1003 2025-05-29T15:32:09.131654Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:210: tests -- TTxNotificationSubscriber for txId 1003: send EvNotifyTxCompletion 2025-05-29T15:32:09.131663Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:256: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 1003 2025-05-29T15:32:09.131715Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 1003, at schemeshard: 72057594046678944 2025-05-29T15:32:09.131733Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:227: tests -- TTxNotificationSubscriber for txId 1003: got EvNotifyTxCompletionResult 2025-05-29T15:32:09.131737Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:236: tests -- TTxNotificationSubscriber for txId 1003: satisfy waiter [2:763:2679] TestWaitNotification: OK eventTxId 1003 2025-05-29T15:32:09.131800Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Table/Stream" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: true }, at schemeshard: 72057594046678944 2025-05-29T15:32:09.131835Z node 2 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/Table/Stream" took 41us result status StatusSuccess 2025-05-29T15:32:09.131929Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Table/Stream" PathDescription { Self { Name: "Stream" PathId: 4 SchemeshardId: 72057594046678944 PathType: EPathTypeCdcStream CreateFinished: true CreateTxId: 1003 CreateStep: 5000004 ParentPathId: 3 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 2 CdcStreamVersion: 2 } ChildrenExist: true } Children { Name: "streamImpl" PathId: 5 SchemeshardId: 72057594046678944 PathType: EPathTypePersQueueGroup CreateFinished: true CreateTxId: 1003 CreateStep: 5000004 ParentPathId: 4 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" PathSubType: EPathSubTypeStreamImpl ChildrenExist: false BalancerTabletID: 72075186233409548 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 4 PathsLimit: 10000 ShardsInside: 3 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 1 PQPartitionsLimit: 1000000 } CdcStreamDescription { Name: "Stream" Mode: ECdcStreamModeKeysOnly PathId { OwnerId: 72057594046678944 LocalId: 4 } State: ECdcStreamStateReady SchemaVersion: 2 Format: ECdcStreamFormatProto VirtualTimestamps: false AwsRegion: "" ResolvedTimestampsIntervalMs: 0 ScanProgress { ShardsTotal: 1 ShardsCompleted: 1 } } } PathId: 4 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 |76.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_cdc_stream_reboots/unittest >> TCdcStreamWithRebootsTests::DropStreamOnIndexTable[TabletReboots] >> TCdcStreamWithRebootsTests::CreateStreamWithAwsRegion[PipeResets] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_cdc_stream_reboots/unittest >> TCdcStreamWithRebootsTests::DropStreamOnIndexTableExplicitReady[PipeResets] [GOOD] Test command err: =========== RUN: Trace =========== Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2140] Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:116:2058] recipient: [1:110:2141] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:116:2058] recipient: [1:110:2141] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:117:2058] recipient: [1:111:2142] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:117:2058] recipient: [1:111:2142] Leader for TabletID 72057594046678944 is [1:126:2151] sender: [1:127:2058] recipient: [1:109:2140] Leader for TabletID 72057594046447617 is [1:130:2154] sender: [1:132:2058] recipient: [1:110:2141] Leader for TabletID 72057594046316545 is [1:133:2155] sender: [1:135:2058] recipient: [1:111:2142] 2025-05-29T15:32:03.326982Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7488: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-05-29T15:32:03.327003Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7516: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:32:03.327007Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7402: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2025-05-29T15:32:03.327010Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7418: OperationsProcessing config: using default configuration 2025-05-29T15:32:03.327020Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-05-29T15:32:03.327022Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-05-29T15:32:03.327028Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7548: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:32:03.327036Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:39: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-05-29T15:32:03.327107Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7619: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-05-29T15:32:03.327175Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-05-29T15:32:03.345414Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7651: Got new config: QueryServiceConfig { AllExternalDataSourcesAreAvailable: true } 2025-05-29T15:32:03.345432Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:32:03.345494Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7619: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# Leader for TabletID 72057594046447617 is [1:130:2154] sender: [1:175:2058] recipient: [1:15:2062] 2025-05-29T15:32:03.347600Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-05-29T15:32:03.347619Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-05-29T15:32:03.348101Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-05-29T15:32:03.352339Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-05-29T15:32:03.352397Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:445: Clear TempDirsState with owners number: 0 2025-05-29T15:32:03.354817Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1348: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-05-29T15:32:03.355786Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:32: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-05-29T15:32:03.356493Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:157: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:32:03.356539Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:84: [RootDataErasureManager] Stop 2025-05-29T15:32:03.358329Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:32:03.358344Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:32:03.358372Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-05-29T15:32:03.358381Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-29T15:32:03.358396Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:202: TTxServerlessStorageBilling.Complete 2025-05-29T15:32:03.358417Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6671: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:214:2058] recipient: [1:212:2212] IGNORE Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:214:2058] recipient: [1:212:2212] Leader for TabletID 72057594037968897 is [1:218:2216] sender: [1:219:2058] recipient: [1:212:2212] 2025-05-29T15:32:03.359548Z node 1 :HIVE INFO: tablet_helpers.cpp:1130: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:126:2151] sender: [1:239:2058] recipient: [1:15:2062] 2025-05-29T15:32:03.374964Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:372: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-05-29T15:32:03.376324Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:32:03.376394Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-05-29T15:32:03.376432Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:130: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-05-29T15:32:03.376441Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:32:03.377016Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:451: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-05-29T15:32:03.377034Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-05-29T15:32:03.377084Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:32:03.377093Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:313: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-05-29T15:32:03.377098Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:367: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-05-29T15:32:03.377104Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 2 -> 3 2025-05-29T15:32:03.377402Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:32:03.377410Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-05-29T15:32:03.377413Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 3 -> 128 2025-05-29T15:32:03.377669Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:32:03.377676Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:32:03.377680Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:32:03.377684Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1650: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-05-29T15:32:03.378096Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1719: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-05-29T15:32:03.378409Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:652: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-05-29T15:32:03.378440Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1751: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 Leader for TabletID 72057594046316545 is [1:133:2155] sender: [1:254:2058] recipient: [1:15:2062] FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-05-29T15:32:03.378593Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:676: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-05-29T15:32:03.378609Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:680: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 133 RawX2: 4294969451 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-05-29T15:32:03.378615Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:32:03.379661Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Ch ... 72057594046678944, LocalPathId: 5] was 5 2025-05-29T15:32:09.081877Z node 23 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 6 PathOwnerId: 72057594046678944, cookie: 1004 2025-05-29T15:32:09.081883Z node 23 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72057594046678944, txId: 1004 2025-05-29T15:32:09.081886Z node 23 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 1004, pathId: [OwnerId: 72057594046678944, LocalPathId: 4], version: 6 2025-05-29T15:32:09.081888Z node 23 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 4 2025-05-29T15:32:09.081916Z node 23 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:5834: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 6 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 1004 2025-05-29T15:32:09.081936Z node 23 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 5 PathOwnerId: 72057594046678944, cookie: 1004 2025-05-29T15:32:09.081938Z node 23 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72057594046678944, txId: 1004 2025-05-29T15:32:09.081941Z node 23 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 1004, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 5 2025-05-29T15:32:09.081943Z node 23 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 4 2025-05-29T15:32:09.081948Z node 23 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1606: TOperation IsReadyToNotify, TxId: 1004, ready parts: 3/4, is published: true 2025-05-29T15:32:09.082032Z node 23 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 6 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 1004 2025-05-29T15:32:09.082037Z node 23 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72057594046678944, txId: 1004 2025-05-29T15:32:09.082040Z node 23 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1606: TOperation IsReadyToNotify, TxId: 1004, ready parts: 3/4, is published: true 2025-05-29T15:32:09.082459Z node 23 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:647: TTxOperationReply complete, operationId: 1004:0, at schemeshard: 72057594046678944 2025-05-29T15:32:09.082484Z node 23 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:174: Deleted shardIdx 72057594046678944:3 2025-05-29T15:32:09.082503Z node 23 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:174: Deleted shardIdx 72057594046678944:4 2025-05-29T15:32:09.082522Z node 23 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:647: TTxOperationReply complete, operationId: 1004:0, at schemeshard: 72057594046678944 2025-05-29T15:32:09.082582Z node 23 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1004 2025-05-29T15:32:09.082596Z node 23 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1004:0, at schemeshard: 72057594046678944 2025-05-29T15:32:09.082601Z node 23 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:482: [72057594046678944] TDone opId# 1004:0 ProgressState 2025-05-29T15:32:09.082612Z node 23 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:906: Part operation is done id#1004:0 progress is 4/4 2025-05-29T15:32:09.082616Z node 23 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 1004 ready parts: 4/4 2025-05-29T15:32:09.082621Z node 23 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:906: Part operation is done id#1004:0 progress is 4/4 2025-05-29T15:32:09.082625Z node 23 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 1004 ready parts: 4/4 2025-05-29T15:32:09.082630Z node 23 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1606: TOperation IsReadyToNotify, TxId: 1004, ready parts: 4/4, is published: true 2025-05-29T15:32:09.082635Z node 23 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 1004 ready parts: 4/4 2025-05-29T15:32:09.082644Z node 23 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:973: Operation and all the parts is done, operation id: 1004:0 2025-05-29T15:32:09.082649Z node 23 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5174: RemoveTx for txid 1004:0 2025-05-29T15:32:09.082666Z node 23 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 4 2025-05-29T15:32:09.082670Z node 23 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:973: Operation and all the parts is done, operation id: 1004:1 2025-05-29T15:32:09.082672Z node 23 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5174: RemoveTx for txid 1004:1 2025-05-29T15:32:09.082676Z node 23 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 3 2025-05-29T15:32:09.082679Z node 23 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:973: Operation and all the parts is done, operation id: 1004:2 2025-05-29T15:32:09.082681Z node 23 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5174: RemoveTx for txid 1004:2 2025-05-29T15:32:09.082685Z node 23 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 6] was 2 2025-05-29T15:32:09.082687Z node 23 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:973: Operation and all the parts is done, operation id: 1004:3 2025-05-29T15:32:09.082689Z node 23 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5174: RemoveTx for txid 1004:3 2025-05-29T15:32:09.082695Z node 23 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 7] was 1 2025-05-29T15:32:09.082828Z node 23 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:123: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-05-29T15:32:09.082834Z node 23 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:137: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 7], at schemeshard: 72057594046678944 2025-05-29T15:32:09.082842Z node 23 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 6] was 1 2025-05-29T15:32:09.082849Z node 23 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:137: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 6], at schemeshard: 72057594046678944 2025-05-29T15:32:09.082854Z node 23 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 3 2025-05-29T15:32:09.083229Z node 23 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1004 2025-05-29T15:32:09.083257Z node 23 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1004 2025-05-29T15:32:09.083268Z node 23 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1004 2025-05-29T15:32:09.083280Z node 23 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1004 2025-05-29T15:32:09.083286Z node 23 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1004 2025-05-29T15:32:09.083814Z node 23 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:160: TTxCleanDroppedPaths Complete, done PersistRemovePath for 2 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestModificationResult got TxId: 1004, wait until txId: 1004 TestWaitNotification wait txId: 1004 2025-05-29T15:32:09.083856Z node 23 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:210: tests -- TTxNotificationSubscriber for txId 1004: send EvNotifyTxCompletion 2025-05-29T15:32:09.083860Z node 23 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:256: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 1004 2025-05-29T15:32:09.083898Z node 23 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 1004, at schemeshard: 72057594046678944 2025-05-29T15:32:09.083910Z node 23 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:227: tests -- TTxNotificationSubscriber for txId 1004: got EvNotifyTxCompletionResult 2025-05-29T15:32:09.083913Z node 23 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:236: tests -- TTxNotificationSubscriber for txId 1004: satisfy waiter [23:823:2727] TestWaitNotification: OK eventTxId 1004 2025-05-29T15:32:09.083958Z node 23 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Table/Index/indexImplTable/Stream" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: true }, at schemeshard: 72057594046678944 2025-05-29T15:32:09.083982Z node 23 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/Table/Index/indexImplTable/Stream" took 31us result status StatusPathDoesNotExist 2025-05-29T15:32:09.084007Z node 23 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/Table/Index/indexImplTable/Stream\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot/Table/Index/indexImplTable\' (id: [OwnerId: 72057594046678944, LocalPathId: 5]), source_location: ydb/core/tx/schemeshard/schemeshard_path_describer.cpp:1157" Path: "/MyRoot/Table/Index/indexImplTable/Stream" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot/Table/Index/indexImplTable" LastExistedPrefixPathId: 5 LastExistedPrefixDescription { Self { Name: "indexImplTable" PathId: 5 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 1002 CreateStep: 5000003 ParentPathId: 4 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeSyncIndexImplTable ChildrenExist: false } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 >> TCdcStreamWithRebootsTests::DropStreamCreatedWithInitialScan[TabletReboots] |76.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_cdc_stream_reboots/unittest >> TCdcStreamWithRebootsTests::CreateStreamWithVirtualTimestamps[PipeResets] [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/statistics/database/ut/unittest >> StatisticsSaveLoad::ForbidAccess [FAIL] Test command err: 2025-05-29T15:31:54.164016Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:453:2413], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-05-29T15:31:54.164093Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-05-29T15:31:54.164113Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/002157/r3tmp/tmp29b6x1/pdisk_1.dat 2025-05-29T15:31:54.554403Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 19765, node 1 2025-05-29T15:31:55.487820Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:31:55.487838Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-05-29T15:31:55.487841Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-05-29T15:31:55.487898Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-05-29T15:31:55.506768Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-05-29T15:31:55.597696Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:31:55.597729Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:31:55.633635Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:11914 2025-05-29T15:31:56.024430Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-05-29T15:31:56.749626Z node 2 :STATISTICS INFO: service_impl.cpp:232: Subscribed for config changes on node 2 2025-05-29T15:31:56.760705Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:31:56.760745Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:31:56.829288Z node 1 :HIVE WARN: hive_impl.cpp:771: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-05-29T15:31:56.829882Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-29T15:31:57.000474Z node 2 :HIVE WARN: tx__create_tablet.cpp:342: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-05-29T15:31:57.003226Z node 2 :HIVE WARN: tx__create_tablet.cpp:342: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-05-29T15:31:57.003392Z node 2 :HIVE WARN: tx__create_tablet.cpp:342: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-05-29T15:31:57.003423Z node 2 :HIVE WARN: tx__create_tablet.cpp:342: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-05-29T15:31:57.003434Z node 2 :HIVE WARN: tx__create_tablet.cpp:342: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-05-29T15:31:57.003468Z node 2 :HIVE WARN: tx__create_tablet.cpp:342: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-05-29T15:31:57.003482Z node 2 :HIVE WARN: tx__create_tablet.cpp:342: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-05-29T15:31:57.003513Z node 2 :HIVE WARN: tx__create_tablet.cpp:342: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-05-29T15:31:57.003539Z node 2 :HIVE WARN: tx__create_tablet.cpp:342: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-05-29T15:31:57.155679Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:31:57.155732Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:31:57.167327Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-29T15:31:57.205954Z node 2 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:31:57.214564Z node 2 :STATISTICS INFO: aggregator_impl.cpp:45: [72075186224037894] OnActivateExecutor 2025-05-29T15:31:57.214597Z node 2 :STATISTICS DEBUG: tx_init_schema.cpp:13: [72075186224037894] TTxInitSchema::Execute 2025-05-29T15:31:57.225872Z node 2 :STATISTICS DEBUG: tx_init_schema.cpp:34: [72075186224037894] TTxInitSchema::Complete 2025-05-29T15:31:57.231252Z node 2 :STATISTICS DEBUG: tx_init.cpp:18: [72075186224037894] TTxInit::Execute 2025-05-29T15:31:57.231300Z node 2 :STATISTICS DEBUG: tx_init.cpp:118: [72075186224037894] Loaded BaseStatistics: schemeshard count# 0 2025-05-29T15:31:57.231307Z node 2 :STATISTICS DEBUG: tx_init.cpp:143: [72075186224037894] Loaded ColumnStatistics: column count# 0 2025-05-29T15:31:57.231313Z node 2 :STATISTICS DEBUG: tx_init.cpp:182: [72075186224037894] Loaded ScheduleTraversals: table count# 0 2025-05-29T15:31:57.231319Z node 2 :STATISTICS DEBUG: tx_init.cpp:216: [72075186224037894] Loaded ForceTraversalOperations: table count# 0 2025-05-29T15:31:57.231324Z node 2 :STATISTICS DEBUG: tx_init.cpp:264: [72075186224037894] Loaded ForceTraversalTables: table count# 0 2025-05-29T15:31:57.231336Z node 2 :STATISTICS DEBUG: tx_init.cpp:271: [72075186224037894] TTxInit::Complete 2025-05-29T15:31:57.232112Z node 2 :STATISTICS INFO: aggregator_impl.cpp:62: [72075186224037894] Subscribed for config changes 2025-05-29T15:31:57.248963Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:7864: ResolveSA(), StatisticsAggregatorId=72075186224037894, at schemeshard: 72075186224037897 2025-05-29T15:31:57.248990Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:7894: ConnectToSA(), pipe client id: [2:1863:2597], at schemeshard: 72075186224037897, StatisticsAggregatorId: 72075186224037894, at schemeshard: 72075186224037897 2025-05-29T15:31:57.250862Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:86: [72075186224037894] EvServerConnected, pipe server id = [2:1878:2608] 2025-05-29T15:31:57.253133Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:86: [72075186224037894] EvServerConnected, pipe server id = [2:1895:2618] 2025-05-29T15:31:57.253334Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:213: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:1895:2618], schemeshard id = 72075186224037897 2025-05-29T15:31:57.256672Z node 2 :STATISTICS DEBUG: tx_configure.cpp:21: [72075186224037894] TTxConfigure::Execute: database# /Root/Database 2025-05-29T15:31:57.262551Z node 2 :STATISTICS DEBUG: table_creator.cpp:147: Table _statistics updater. Describe result: PathErrorUnknown 2025-05-29T15:31:57.262575Z node 2 :STATISTICS NOTICE: table_creator.cpp:167: Table _statistics updater. Creating table 2025-05-29T15:31:57.262589Z node 2 :STATISTICS DEBUG: table_creator.cpp:100: Table _statistics updater. Full table path:/Root/Database/.metadata/_statistics 2025-05-29T15:31:57.266674Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720657:1, at schemeshard: 72075186224037897 2025-05-29T15:31:57.268713Z node 2 :STATISTICS DEBUG: table_creator.cpp:190: Table _statistics updater. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720657 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037897 PathId: 3 } 2025-05-29T15:31:57.268750Z node 2 :STATISTICS DEBUG: table_creator.cpp:261: Table _statistics updater. Subscribe on create table tx: 281474976720657 2025-05-29T15:31:57.366619Z node 2 :STATISTICS DEBUG: tx_configure.cpp:36: [72075186224037894] TTxConfigure::Complete 2025-05-29T15:31:57.469820Z node 2 :STATISTICS DEBUG: table_creator.cpp:290: Table _statistics updater. Request: create. Transaction completed: 281474976720657. Doublechecking... 2025-05-29T15:31:57.543029Z node 2 :STATISTICS DEBUG: table_creator.cpp:362: Table _statistics updater. Column diff is empty, finishing 2025-05-29T15:31:58.091505Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2220:3063], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:31:58.091544Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:31:58.123406Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72075186224037897 2025-05-29T15:31:58.327590Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2523:3115], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:31:58.327655Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:31:58.328210Z node 1 :STATISTICS DEBUG: service_impl.cpp:771: [TStatService::TEvGetStatistics] RequestId[ 1 ], ReplyToActorId[ [1:2528:3119]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-05-29T15:31:58.328255Z node 1 :STATISTICS DEBUG: service_impl.cpp:787: [TStatService::TEvNavigateKeySetResult] RequestId[ 1 ] 2025-05-29T15:31:58.328268Z node 1 :STATISTICS DEBUG: service_impl.cpp:1219: ConnectToSA(), pipe client id = [1:2530:3121] 2025-05-29T15:31:58.328714Z node 1 :STATISTICS DEBUG: service_impl.cpp:1248: SyncNode(), pipe client id = [1:2530:3121] 2025-05-29T15:31:58.328940Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:86: [72075186224037894] EvServerConnected, pipe server id = [2:2531:2982] 2025-05-29T15:31:58.329021Z node 1 :STATISTICS DEBUG: service_impl.cpp:1086: EvClientConnected, node id = 1, client id = [1:2530:3121], server id = [2:2531:2982], tablet id = 72075186224037894, status = OK 2025-05-29T15:31:58.329067Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:133: [72075186224037894] EvConnectNode, pipe server id = [2:2531:2982], node id = 1, have schemeshards count = 0, need schemeshards count = 1 2025-05-29T15:31:58.330113Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:314: [72075186224037894] SendStatisticsToNode(), node id = 1, schemeshard count = 1 2025-05-29T15:31:58.330244Z node 1 :STATISTICS DEBUG: service_impl.cpp:937: EvPropagateStatistics, node id = 1 2025-05-29T15:31:58.330260Z node 1 :STATISTICS DEBUG: service_impl.cpp:1260: ReplySuccess(), request id = 1, ReplyToActorId = [1:2528:3119], StatRequests.size() = 1 2025-05-29T15:31:58.341612Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2535:3125], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:31:58.341647Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:31:58.341754Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2540:3130], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:31:58.343426Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715660:3, at schemeshard: 72057594046644480 2025-05-29T15:31:58.453886Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:217: [72075186224037894] EvFastPropagateCheck 2025-05-29T15:31:58.453913Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:357: [72075186224037894] PropagateFastStatistics(), node count = 0, schemeshard count = 0 2025-05-29T15:31:58.547613Z node 1 :STATISTICS DEBUG: service_impl.cpp:1189: EvRequestTimeout, pipe client id = [1:2530:3121], schemeshard count = 1 2025-05-29T15:31:58.841279Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:2542:3132], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715660 completed, doublechecking } 2025-05-29T15:31:58.921480Z node 1 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [1:2651:3200] txid# 281474976715661, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-29T15:31:58.924175Z node 1 :STATISTICS DEBUG: service_impl.cpp:771: [TStatService::TEvGetStatistics] RequestId[ 2 ], ReplyToActorId[ [1:2674:3216]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-05-29T15:31:58.924210Z node 1 :STATISTICS DEBUG: service_impl.cpp:787: [TStatService::TEvNavigateKeySetResult] RequestId[ 2 ] 2025-05-29T15:31:58.924218Z node 1 :STATISTICS DEBUG: service_impl.cpp:1260: ReplySuccess(), request id = 2, ReplyToActorId = [1:2674:3216], StatRequests.size() = 1 2025-05-29T15:31:58.969173Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [1:2663:3211], status: INTERNAL_ERROR, issues:
: Fatal: Default error
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:31:58.969724Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=1&id=NzU1ZTY1NzUtZWQwNzhhMDktOTI5OWUzYjgtNDBkN2M4NDc=, ActorId: [1:2533:3123], ActorState: ExecuteState, TraceId: 01jweaqtq7bdy4gramrwcn2dg0, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: assertion failed at ydb/core/statistics/ut_common/ut_common.cpp:237, Ydb::StatusIds::StatusCode NKikimr::NStat::ExecuteYqlScript(TTestEnv &, const TString &, bool): (response.operation().status() == Ydb::StatusIds::SUCCESS) failed: (INTERNAL_ERROR != SUCCESS) , with diff: (INT|SUCC)E(RNAL_ERROR|SS) TBackTrace::Capture()+28 (0x137DB91C) NUnitTest::NPrivate::RaiseError(char const*, TBasicString> const&, bool)+137 (0x1398F6A9) NKikimr::NStat::ExecuteYqlScript(NKikimr::NStat::TTestEnv&, TBasicString> const&, bool)+1783 (0x25E91E37) NKikimr::NStat::CreateUniformTable(NKikimr::NStat::TTestEnv&, TBasicString> const&, TBasicString> const&)+877 (0x25E92B0D) NKikimr::NStat::NTestSuiteStatisticsSaveLoad::TTestCaseForbidAccess::Execute_(NUnitTest::TTestContext&)+453 (0x136D6EC5) NKikimr::NStat::NTestSuiteStatisticsSaveLoad::TCurrentTest::Execute()::'lambda'()::operator()() const+71 (0x136DA847) NUnitTest::TTestBase::Run(std::__y1::function, TBasicString> const&, char const*, bool)+126 (0x1399155E) NKikimr::NStat::NTestSuiteStatisticsSaveLoad::TCurrentTest::Execute()+425 (0x136DA0A9) NUnitTest::TTestFactory::Execute()+803 (0x13991CD3) NUnitTest::RunMain(int, char**)+3021 (0x1399FFED) ??+0 (0x7F775B4FFD90) __libc_start_main+128 (0x7F775B4FFE40) _start+41 (0x12830029) >> TCdcStreamWithRebootsTests::CreateStreamExplicitReady[PipeResets] [GOOD] >> TCdcStreamWithRebootsTests::DropStreamCreatedWithInitialScan[PipeResets] [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_reboots/unittest >> TSolomonReboots::CreateDropSolomonWithReboots [GOOD] Test command err: ==== RunWithTabletReboots =========== RUN: Trace =========== Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2140] Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:116:2058] recipient: [1:110:2141] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:116:2058] recipient: [1:110:2141] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:117:2058] recipient: [1:111:2142] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:117:2058] recipient: [1:111:2142] Leader for TabletID 72057594046678944 is [1:126:2151] sender: [1:127:2058] recipient: [1:109:2140] Leader for TabletID 72057594046447617 is [1:130:2154] sender: [1:132:2058] recipient: [1:110:2141] Leader for TabletID 72057594046316545 is [1:133:2155] sender: [1:135:2058] recipient: [1:111:2142] 2025-05-29T15:31:43.677062Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7488: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-05-29T15:31:43.677083Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7516: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:31:43.677087Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7402: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2025-05-29T15:31:43.677091Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7418: OperationsProcessing config: using default configuration 2025-05-29T15:31:43.677095Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-05-29T15:31:43.677098Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-05-29T15:31:43.677104Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7548: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:31:43.677116Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:39: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-05-29T15:31:43.677187Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7619: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-05-29T15:31:43.677253Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-05-29T15:31:43.687034Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7651: Got new config: QueryServiceConfig { AllExternalDataSourcesAreAvailable: true } 2025-05-29T15:31:43.687054Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:31:43.687143Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7619: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# Leader for TabletID 72057594046447617 is [1:130:2154] sender: [1:175:2058] recipient: [1:15:2062] 2025-05-29T15:31:43.689282Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-05-29T15:31:43.689304Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-05-29T15:31:43.689330Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-05-29T15:31:43.691742Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-05-29T15:31:43.691810Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:445: Clear TempDirsState with owners number: 0 2025-05-29T15:31:43.691896Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1348: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-05-29T15:31:43.692085Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:32: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-05-29T15:31:43.692807Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:157: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:31:43.692841Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:84: [RootDataErasureManager] Stop 2025-05-29T15:31:43.693057Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:31:43.693064Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:31:43.693102Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-05-29T15:31:43.693107Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-29T15:31:43.693112Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:202: TTxServerlessStorageBilling.Complete 2025-05-29T15:31:43.693125Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6671: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:214:2058] recipient: [1:212:2212] IGNORE Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:214:2058] recipient: [1:212:2212] Leader for TabletID 72057594037968897 is [1:218:2216] sender: [1:219:2058] recipient: [1:212:2212] 2025-05-29T15:31:43.694075Z node 1 :HIVE INFO: tablet_helpers.cpp:1130: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:126:2151] sender: [1:239:2058] recipient: [1:15:2062] 2025-05-29T15:31:43.712910Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:372: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-05-29T15:31:43.712984Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:31:43.713035Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-05-29T15:31:43.713080Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:130: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-05-29T15:31:43.713090Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:31:43.713738Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:451: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-05-29T15:31:43.713762Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-05-29T15:31:43.713811Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:31:43.713820Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:313: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-05-29T15:31:43.713826Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:367: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-05-29T15:31:43.713831Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 2 -> 3 2025-05-29T15:31:43.714276Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:31:43.714289Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-05-29T15:31:43.714294Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 3 -> 128 2025-05-29T15:31:43.714640Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:31:43.714652Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:31:43.714657Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:31:43.714663Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1650: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-05-29T15:31:43.715327Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1719: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-05-29T15:31:43.715765Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:652: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-05-29T15:31:43.715799Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1751: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 Leader for TabletID 72057594046316545 is [1:133:2155] sender: [1:254:2058] recipient: [1:15:2062] FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-05-29T15:31:43.715976Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:676: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-05-29T15:31:43.716003Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:680: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 133 RawX2: 4294969451 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-05-29T15:31:43.716025Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:31:43.716096Z node 1 :FLAT_TX_SCHEMESHARD INFO: sch ... 7594046678944 2025-05-29T15:32:09.265680Z node 106 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [106:206:2207], at schemeshard: 72057594046678944, txId: 1004, path id: 1 2025-05-29T15:32:09.265683Z node 106 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [106:206:2207], at schemeshard: 72057594046678944, txId: 1004, path id: 3 2025-05-29T15:32:09.265715Z node 106 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1004:0, at schemeshard: 72057594046678944 2025-05-29T15:32:09.265720Z node 106 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:416: [72057594046678944] TDeleteParts opId# 1004:0 ProgressState 2025-05-29T15:32:09.265725Z node 106 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:906: Part operation is done id#1004:0 progress is 1/1 2025-05-29T15:32:09.265728Z node 106 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 1004 ready parts: 1/1 2025-05-29T15:32:09.265731Z node 106 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:906: Part operation is done id#1004:0 progress is 1/1 2025-05-29T15:32:09.265733Z node 106 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 1004 ready parts: 1/1 2025-05-29T15:32:09.265735Z node 106 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1606: TOperation IsReadyToNotify, TxId: 1004, ready parts: 1/1, is published: false 2025-05-29T15:32:09.265738Z node 106 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 1004 ready parts: 1/1 2025-05-29T15:32:09.265741Z node 106 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:973: Operation and all the parts is done, operation id: 1004:0 2025-05-29T15:32:09.265743Z node 106 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5174: RemoveTx for txid 1004:0 2025-05-29T15:32:09.265763Z node 106 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 4 2025-05-29T15:32:09.265766Z node 106 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:982: Publication still in progress, tx: 1004, publications: 2, subscribers: 0 2025-05-29T15:32:09.265769Z node 106 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:989: Publication details: tx: 1004, [OwnerId: 72057594046678944, LocalPathId: 1], 9 2025-05-29T15:32:09.265771Z node 106 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:989: Publication details: tx: 1004, [OwnerId: 72057594046678944, LocalPathId: 3], 18446744073709551615 2025-05-29T15:32:09.265824Z node 106 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:5834: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 1004 2025-05-29T15:32:09.265830Z node 106 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 1004 2025-05-29T15:32:09.265833Z node 106 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 1004 2025-05-29T15:32:09.265836Z node 106 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 1004, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 18446744073709551615 2025-05-29T15:32:09.265840Z node 106 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 3 2025-05-29T15:32:09.265881Z node 106 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:5834: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 9 PathOwnerId: 72057594046678944, cookie: 1004 2025-05-29T15:32:09.265886Z node 106 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 9 PathOwnerId: 72057594046678944, cookie: 1004 2025-05-29T15:32:09.265889Z node 106 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1004 2025-05-29T15:32:09.265891Z node 106 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 1004, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 9 2025-05-29T15:32:09.265893Z node 106 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2025-05-29T15:32:09.265898Z node 106 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 1004, subscribers: 0 2025-05-29T15:32:09.266204Z node 106 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:46: Free shard 72057594046678944:1 hive 72057594037968897 at ss 72057594046678944 2025-05-29T15:32:09.266213Z node 106 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:46: Free shard 72057594046678944:2 hive 72057594037968897 at ss 72057594046678944 2025-05-29T15:32:09.266463Z node 106 :HIVE INFO: tablet_helpers.cpp:1356: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 1 TxId_Deprecated: 1 TabletID: 72075186233409546 2025-05-29T15:32:09.266518Z node 106 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5938: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 1 ShardOwnerId: 72057594046678944 ShardLocalIdx: 1, at schemeshard: 72057594046678944 2025-05-29T15:32:09.266556Z node 106 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 2 2025-05-29T15:32:09.266588Z node 106 :HIVE INFO: tablet_helpers.cpp:1356: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 2 TxId_Deprecated: 2 TabletID: 72075186233409547 2025-05-29T15:32:09.266606Z node 106 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5938: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 2 ShardOwnerId: 72057594046678944 ShardLocalIdx: 2, at schemeshard: 72057594046678944 2025-05-29T15:32:09.266621Z node 106 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 1 Forgetting tablet 72075186233409546 2025-05-29T15:32:09.266838Z node 106 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:123: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-05-29T15:32:09.266845Z node 106 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:137: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 3], at schemeshard: 72057594046678944 2025-05-29T15:32:09.266852Z node 106 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 Forgetting tablet 72075186233409547 2025-05-29T15:32:09.266910Z node 106 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1004 2025-05-29T15:32:09.266936Z node 106 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1004 2025-05-29T15:32:09.267333Z node 106 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:174: Deleted shardIdx 72057594046678944:1 2025-05-29T15:32:09.267341Z node 106 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:180: Close pipe to deleted shardIdx 72057594046678944:1 tabletId 72075186233409546 2025-05-29T15:32:09.267361Z node 106 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:174: Deleted shardIdx 72057594046678944:2 2025-05-29T15:32:09.267365Z node 106 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:180: Close pipe to deleted shardIdx 72057594046678944:2 tabletId 72075186233409547 2025-05-29T15:32:09.267403Z node 106 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:160: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestModificationResult got TxId: 1004, wait until txId: 1004 TestWaitNotification wait txId: 1004 2025-05-29T15:32:09.267432Z node 106 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:210: tests -- TTxNotificationSubscriber for txId 1004: send EvNotifyTxCompletion 2025-05-29T15:32:09.267436Z node 106 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:256: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 1004 2025-05-29T15:32:09.267472Z node 106 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 1004, at schemeshard: 72057594046678944 2025-05-29T15:32:09.267483Z node 106 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:227: tests -- TTxNotificationSubscriber for txId 1004: got EvNotifyTxCompletionResult 2025-05-29T15:32:09.267486Z node 106 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:236: tests -- TTxNotificationSubscriber for txId 1004: satisfy waiter [106:449:2420] TestWaitNotification: OK eventTxId 1004 wait until 72075186233409546 is deleted wait until 72075186233409547 is deleted 2025-05-29T15:32:09.267520Z node 106 :HIVE INFO: tablet_helpers.cpp:1476: [72057594037968897] TEvSubscribeToTabletDeletion, 72075186233409546 2025-05-29T15:32:09.267527Z node 106 :HIVE INFO: tablet_helpers.cpp:1476: [72057594037968897] TEvSubscribeToTabletDeletion, 72075186233409547 Deleted tabletId 72075186233409546 Deleted tabletId 72075186233409547 2025-05-29T15:32:09.267565Z node 106 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "MyRoot/Solomon" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-05-29T15:32:09.267582Z node 106 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "MyRoot/Solomon" took 23us result status StatusPathDoesNotExist 2025-05-29T15:32:09.267604Z node 106 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/Solomon\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1]), source_location: ydb/core/tx/schemeshard/schemeshard_path_describer.cpp:1157" Path: "MyRoot/Solomon" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: true } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_cdc_stream_reboots/unittest >> TCdcStreamWithRebootsTests::CreateStreamWithVirtualTimestamps[PipeResets] [GOOD] Test command err: =========== RUN: Trace =========== Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2140] Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:116:2058] recipient: [1:110:2141] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:116:2058] recipient: [1:110:2141] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:117:2058] recipient: [1:111:2142] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:117:2058] recipient: [1:111:2142] Leader for TabletID 72057594046678944 is [1:126:2151] sender: [1:127:2058] recipient: [1:109:2140] Leader for TabletID 72057594046447617 is [1:130:2154] sender: [1:132:2058] recipient: [1:110:2141] Leader for TabletID 72057594046316545 is [1:133:2155] sender: [1:135:2058] recipient: [1:111:2142] 2025-05-29T15:32:03.326995Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7488: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-05-29T15:32:03.327022Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7516: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:32:03.327027Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7402: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2025-05-29T15:32:03.327031Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7418: OperationsProcessing config: using default configuration 2025-05-29T15:32:03.327038Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-05-29T15:32:03.327041Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-05-29T15:32:03.327047Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7548: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:32:03.327058Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:39: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-05-29T15:32:03.327140Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7619: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-05-29T15:32:03.327203Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-05-29T15:32:03.345415Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7651: Got new config: QueryServiceConfig { AllExternalDataSourcesAreAvailable: true } 2025-05-29T15:32:03.345433Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:32:03.345508Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7619: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# Leader for TabletID 72057594046447617 is [1:130:2154] sender: [1:175:2058] recipient: [1:15:2062] 2025-05-29T15:32:03.347704Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-05-29T15:32:03.347724Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-05-29T15:32:03.348109Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-05-29T15:32:03.352310Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-05-29T15:32:03.352363Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:445: Clear TempDirsState with owners number: 0 2025-05-29T15:32:03.354816Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1348: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-05-29T15:32:03.355763Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:32: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-05-29T15:32:03.356292Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:157: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:32:03.356326Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:84: [RootDataErasureManager] Stop 2025-05-29T15:32:03.358200Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:32:03.358213Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:32:03.358232Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-05-29T15:32:03.358237Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-29T15:32:03.358241Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:202: TTxServerlessStorageBilling.Complete 2025-05-29T15:32:03.358252Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6671: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:214:2058] recipient: [1:212:2212] IGNORE Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:214:2058] recipient: [1:212:2212] Leader for TabletID 72057594037968897 is [1:218:2216] sender: [1:219:2058] recipient: [1:212:2212] 2025-05-29T15:32:03.359553Z node 1 :HIVE INFO: tablet_helpers.cpp:1130: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:126:2151] sender: [1:239:2058] recipient: [1:15:2062] 2025-05-29T15:32:03.375306Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:372: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-05-29T15:32:03.376316Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:32:03.376367Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-05-29T15:32:03.376417Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:130: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-05-29T15:32:03.376425Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:32:03.377010Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:451: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-05-29T15:32:03.377025Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-05-29T15:32:03.377067Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:32:03.377074Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:313: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-05-29T15:32:03.377078Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:367: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-05-29T15:32:03.377082Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 2 -> 3 2025-05-29T15:32:03.377348Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:32:03.377355Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-05-29T15:32:03.377358Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 3 -> 128 2025-05-29T15:32:03.377616Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:32:03.377622Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:32:03.377625Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:32:03.377629Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1650: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-05-29T15:32:03.378282Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1719: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-05-29T15:32:03.378645Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:652: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-05-29T15:32:03.378666Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1751: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 Leader for TabletID 72057594046316545 is [1:133:2155] sender: [1:254:2058] recipient: [1:15:2062] FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-05-29T15:32:03.378814Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:676: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-05-29T15:32:03.378829Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:680: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 133 RawX2: 4294969451 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-05-29T15:32:03.378833Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:32:03.379688Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Ch ... .ReadyForNotifications: 0, at schemeshard: 72057594046678944 2025-05-29T15:32:09.614334Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_pq.cpp:629: NPQState::TPropose operationId# 1003:2 HandleReply TEvProposeTransactionResult CollectPQConfigChanged: true 2025-05-29T15:32:09.614375Z node 26 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1003:2 128 -> 240 2025-05-29T15:32:09.614421Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 3 2025-05-29T15:32:09.614436Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 4 2025-05-29T15:32:09.615060Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:647: TTxOperationReply complete, operationId: 1003:2, at schemeshard: 72057594046678944 2025-05-29T15:32:09.615166Z node 26 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:32:09.615175Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1003, path id: [OwnerId: 72057594046678944, LocalPathId: 4] 2025-05-29T15:32:09.615207Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1003, path id: [OwnerId: 72057594046678944, LocalPathId: 5] 2025-05-29T15:32:09.615239Z node 26 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:32:09.615244Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [26:206:2207], at schemeshard: 72057594046678944, txId: 1003, path id: 4 2025-05-29T15:32:09.615249Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [26:206:2207], at schemeshard: 72057594046678944, txId: 1003, path id: 5 2025-05-29T15:32:09.615353Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1003:2, at schemeshard: 72057594046678944 2025-05-29T15:32:09.615362Z node 26 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:482: [72057594046678944] TDone opId# 1003:2 ProgressState 2025-05-29T15:32:09.615374Z node 26 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:906: Part operation is done id#1003:2 progress is 3/3 2025-05-29T15:32:09.615378Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 1003 ready parts: 3/3 2025-05-29T15:32:09.615384Z node 26 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:906: Part operation is done id#1003:2 progress is 3/3 2025-05-29T15:32:09.615388Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 1003 ready parts: 3/3 2025-05-29T15:32:09.615392Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1606: TOperation IsReadyToNotify, TxId: 1003, ready parts: 3/3, is published: false 2025-05-29T15:32:09.615398Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 1003 ready parts: 3/3 2025-05-29T15:32:09.615403Z node 26 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:973: Operation and all the parts is done, operation id: 1003:0 2025-05-29T15:32:09.615408Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5174: RemoveTx for txid 1003:0 2025-05-29T15:32:09.615420Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 4 2025-05-29T15:32:09.615425Z node 26 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:973: Operation and all the parts is done, operation id: 1003:1 2025-05-29T15:32:09.615428Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5174: RemoveTx for txid 1003:1 2025-05-29T15:32:09.615442Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 4 2025-05-29T15:32:09.615446Z node 26 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:973: Operation and all the parts is done, operation id: 1003:2 2025-05-29T15:32:09.615450Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5174: RemoveTx for txid 1003:2 2025-05-29T15:32:09.615460Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 5 2025-05-29T15:32:09.615465Z node 26 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:982: Publication still in progress, tx: 1003, publications: 2, subscribers: 0 2025-05-29T15:32:09.615470Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:989: Publication details: tx: 1003, [OwnerId: 72057594046678944, LocalPathId: 4], 4 2025-05-29T15:32:09.615476Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:989: Publication details: tx: 1003, [OwnerId: 72057594046678944, LocalPathId: 5], 2 2025-05-29T15:32:09.615683Z node 26 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:5834: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 4 PathOwnerId: 72057594046678944, cookie: 1003 2025-05-29T15:32:09.615700Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 4 PathOwnerId: 72057594046678944, cookie: 1003 2025-05-29T15:32:09.615705Z node 26 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 1003 2025-05-29T15:32:09.615710Z node 26 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 1003, pathId: [OwnerId: 72057594046678944, LocalPathId: 4], version: 4 2025-05-29T15:32:09.615715Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 3 2025-05-29T15:32:09.615999Z node 26 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:5834: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 5 Version: 2 PathOwnerId: 72057594046678944, cookie: 1003 2025-05-29T15:32:09.616022Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 5 Version: 2 PathOwnerId: 72057594046678944, cookie: 1003 2025-05-29T15:32:09.616028Z node 26 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1003 2025-05-29T15:32:09.616033Z node 26 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 1003, pathId: [OwnerId: 72057594046678944, LocalPathId: 5], version: 2 2025-05-29T15:32:09.616038Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 4 2025-05-29T15:32:09.616053Z node 26 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 1003, subscribers: 0 2025-05-29T15:32:09.617081Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1003 2025-05-29T15:32:09.617125Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1003 TestModificationResult got TxId: 1003, wait until txId: 1003 TestWaitNotification wait txId: 1003 2025-05-29T15:32:09.619135Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:210: tests -- TTxNotificationSubscriber for txId 1003: send EvNotifyTxCompletion 2025-05-29T15:32:09.619147Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:256: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 1003 2025-05-29T15:32:09.619220Z node 26 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 1003, at schemeshard: 72057594046678944 2025-05-29T15:32:09.619245Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:227: tests -- TTxNotificationSubscriber for txId 1003: got EvNotifyTxCompletionResult 2025-05-29T15:32:09.619251Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:236: tests -- TTxNotificationSubscriber for txId 1003: satisfy waiter [26:661:2578] TestWaitNotification: OK eventTxId 1003 2025-05-29T15:32:09.619334Z node 26 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Table/Stream" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: true }, at schemeshard: 72057594046678944 2025-05-29T15:32:09.619377Z node 26 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/Table/Stream" took 51us result status StatusSuccess 2025-05-29T15:32:09.619481Z node 26 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Table/Stream" PathDescription { Self { Name: "Stream" PathId: 4 SchemeshardId: 72057594046678944 PathType: EPathTypeCdcStream CreateFinished: true CreateTxId: 1003 CreateStep: 5000004 ParentPathId: 3 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 4 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 4 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 2 CdcStreamVersion: 1 } ChildrenExist: true } Children { Name: "streamImpl" PathId: 5 SchemeshardId: 72057594046678944 PathType: EPathTypePersQueueGroup CreateFinished: true CreateTxId: 1003 CreateStep: 5000004 ParentPathId: 4 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" PathSubType: EPathSubTypeStreamImpl ChildrenExist: false BalancerTabletID: 72075186233409548 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 4 PathsLimit: 10000 ShardsInside: 3 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 1 PQPartitionsLimit: 1000000 } CdcStreamDescription { Name: "Stream" Mode: ECdcStreamModeKeysOnly PathId { OwnerId: 72057594046678944 LocalId: 4 } State: ECdcStreamStateReady SchemaVersion: 1 Format: ECdcStreamFormatProto VirtualTimestamps: true AwsRegion: "" ResolvedTimestampsIntervalMs: 0 } } PathId: 4 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 |76.4%| [TA] $(B)/ydb/core/statistics/database/ut/test-results/unittest/{meta.json ... results_accumulator.log} |76.4%| [TA] {RESULT} $(B)/ydb/core/statistics/database/ut/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_cdc_stream_reboots/unittest >> TCdcStreamWithRebootsTests::CreateStreamExplicitReady[PipeResets] [GOOD] Test command err: =========== RUN: Trace =========== Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2140] Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:116:2058] recipient: [1:110:2141] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:116:2058] recipient: [1:110:2141] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:117:2058] recipient: [1:111:2142] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:117:2058] recipient: [1:111:2142] Leader for TabletID 72057594046678944 is [1:126:2151] sender: [1:127:2058] recipient: [1:109:2140] Leader for TabletID 72057594046447617 is [1:130:2154] sender: [1:132:2058] recipient: [1:110:2141] Leader for TabletID 72057594046316545 is [1:133:2155] sender: [1:135:2058] recipient: [1:111:2142] 2025-05-29T15:32:03.326982Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7488: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-05-29T15:32:03.327003Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7516: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:32:03.327006Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7402: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2025-05-29T15:32:03.327010Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7418: OperationsProcessing config: using default configuration 2025-05-29T15:32:03.327024Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-05-29T15:32:03.327027Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-05-29T15:32:03.327033Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7548: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:32:03.327041Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:39: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-05-29T15:32:03.327112Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7619: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-05-29T15:32:03.327173Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-05-29T15:32:03.345420Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7651: Got new config: QueryServiceConfig { AllExternalDataSourcesAreAvailable: true } 2025-05-29T15:32:03.345438Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:32:03.345503Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7619: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# Leader for TabletID 72057594046447617 is [1:130:2154] sender: [1:175:2058] recipient: [1:15:2062] 2025-05-29T15:32:03.347622Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-05-29T15:32:03.347642Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-05-29T15:32:03.348103Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-05-29T15:32:03.352354Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-05-29T15:32:03.352401Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:445: Clear TempDirsState with owners number: 0 2025-05-29T15:32:03.354797Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1348: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-05-29T15:32:03.355785Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:32: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-05-29T15:32:03.356493Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:157: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:32:03.356525Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:84: [RootDataErasureManager] Stop 2025-05-29T15:32:03.358206Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:32:03.358218Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:32:03.358234Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-05-29T15:32:03.358239Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-29T15:32:03.358243Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:202: TTxServerlessStorageBilling.Complete 2025-05-29T15:32:03.358254Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6671: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:214:2058] recipient: [1:212:2212] IGNORE Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:214:2058] recipient: [1:212:2212] Leader for TabletID 72057594037968897 is [1:218:2216] sender: [1:219:2058] recipient: [1:212:2212] 2025-05-29T15:32:03.359258Z node 1 :HIVE INFO: tablet_helpers.cpp:1130: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:126:2151] sender: [1:239:2058] recipient: [1:15:2062] 2025-05-29T15:32:03.374656Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:372: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-05-29T15:32:03.376311Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:32:03.376368Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-05-29T15:32:03.376414Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:130: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-05-29T15:32:03.376422Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:32:03.377024Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:451: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-05-29T15:32:03.377040Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-05-29T15:32:03.377079Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:32:03.377087Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:313: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-05-29T15:32:03.377092Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:367: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-05-29T15:32:03.377097Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 2 -> 3 2025-05-29T15:32:03.377440Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:32:03.377449Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-05-29T15:32:03.377452Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 3 -> 128 2025-05-29T15:32:03.377693Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:32:03.377699Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:32:03.377702Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:32:03.377706Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1650: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-05-29T15:32:03.378118Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1719: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-05-29T15:32:03.378451Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:652: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-05-29T15:32:03.378482Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1751: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 Leader for TabletID 72057594046316545 is [1:133:2155] sender: [1:254:2058] recipient: [1:15:2062] FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-05-29T15:32:03.378629Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:676: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-05-29T15:32:03.378650Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:680: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 133 RawX2: 4294969451 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-05-29T15:32:03.378655Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:32:03.379657Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Ch ... ReadyForNotifications: 0, at schemeshard: 72057594046678944 2025-05-29T15:32:09.749122Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_pq.cpp:629: NPQState::TPropose operationId# 1003:2 HandleReply TEvProposeTransactionResult CollectPQConfigChanged: true 2025-05-29T15:32:09.749158Z node 26 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1003:2 128 -> 240 2025-05-29T15:32:09.749178Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 3 2025-05-29T15:32:09.749187Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 4 2025-05-29T15:32:09.749783Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:647: TTxOperationReply complete, operationId: 1003:2, at schemeshard: 72057594046678944 2025-05-29T15:32:09.749879Z node 26 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:32:09.749886Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1003, path id: [OwnerId: 72057594046678944, LocalPathId: 4] 2025-05-29T15:32:09.749907Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1003, path id: [OwnerId: 72057594046678944, LocalPathId: 5] 2025-05-29T15:32:09.749934Z node 26 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:32:09.749940Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [26:206:2207], at schemeshard: 72057594046678944, txId: 1003, path id: 4 2025-05-29T15:32:09.749944Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [26:206:2207], at schemeshard: 72057594046678944, txId: 1003, path id: 5 2025-05-29T15:32:09.750032Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1003:2, at schemeshard: 72057594046678944 2025-05-29T15:32:09.750038Z node 26 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:482: [72057594046678944] TDone opId# 1003:2 ProgressState 2025-05-29T15:32:09.750045Z node 26 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:906: Part operation is done id#1003:2 progress is 3/3 2025-05-29T15:32:09.750051Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 1003 ready parts: 3/3 2025-05-29T15:32:09.750057Z node 26 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:906: Part operation is done id#1003:2 progress is 3/3 2025-05-29T15:32:09.750059Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 1003 ready parts: 3/3 2025-05-29T15:32:09.750064Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1606: TOperation IsReadyToNotify, TxId: 1003, ready parts: 3/3, is published: false 2025-05-29T15:32:09.750067Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 1003 ready parts: 3/3 2025-05-29T15:32:09.750071Z node 26 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:973: Operation and all the parts is done, operation id: 1003:0 2025-05-29T15:32:09.750074Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5174: RemoveTx for txid 1003:0 2025-05-29T15:32:09.750082Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 4 2025-05-29T15:32:09.750084Z node 26 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:973: Operation and all the parts is done, operation id: 1003:1 2025-05-29T15:32:09.750086Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5174: RemoveTx for txid 1003:1 2025-05-29T15:32:09.750098Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 4 2025-05-29T15:32:09.750104Z node 26 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:973: Operation and all the parts is done, operation id: 1003:2 2025-05-29T15:32:09.750108Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5174: RemoveTx for txid 1003:2 2025-05-29T15:32:09.750117Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 5 2025-05-29T15:32:09.750122Z node 26 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:982: Publication still in progress, tx: 1003, publications: 2, subscribers: 0 2025-05-29T15:32:09.750126Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:989: Publication details: tx: 1003, [OwnerId: 72057594046678944, LocalPathId: 4], 4 2025-05-29T15:32:09.750129Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:989: Publication details: tx: 1003, [OwnerId: 72057594046678944, LocalPathId: 5], 2 2025-05-29T15:32:09.750311Z node 26 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:5834: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 4 PathOwnerId: 72057594046678944, cookie: 1003 2025-05-29T15:32:09.750323Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 4 PathOwnerId: 72057594046678944, cookie: 1003 2025-05-29T15:32:09.750326Z node 26 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 1003 2025-05-29T15:32:09.750329Z node 26 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 1003, pathId: [OwnerId: 72057594046678944, LocalPathId: 4], version: 4 2025-05-29T15:32:09.750332Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 3 2025-05-29T15:32:09.750634Z node 26 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:5834: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 5 Version: 2 PathOwnerId: 72057594046678944, cookie: 1003 2025-05-29T15:32:09.750647Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 5 Version: 2 PathOwnerId: 72057594046678944, cookie: 1003 2025-05-29T15:32:09.750653Z node 26 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1003 2025-05-29T15:32:09.750655Z node 26 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 1003, pathId: [OwnerId: 72057594046678944, LocalPathId: 5], version: 2 2025-05-29T15:32:09.750658Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 4 2025-05-29T15:32:09.750669Z node 26 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 1003, subscribers: 0 2025-05-29T15:32:09.751791Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1003 2025-05-29T15:32:09.751826Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1003 TestModificationResult got TxId: 1003, wait until txId: 1003 TestWaitNotification wait txId: 1003 2025-05-29T15:32:09.753475Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:210: tests -- TTxNotificationSubscriber for txId 1003: send EvNotifyTxCompletion 2025-05-29T15:32:09.753483Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:256: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 1003 2025-05-29T15:32:09.753533Z node 26 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 1003, at schemeshard: 72057594046678944 2025-05-29T15:32:09.753547Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:227: tests -- TTxNotificationSubscriber for txId 1003: got EvNotifyTxCompletionResult 2025-05-29T15:32:09.753550Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:236: tests -- TTxNotificationSubscriber for txId 1003: satisfy waiter [26:661:2578] TestWaitNotification: OK eventTxId 1003 2025-05-29T15:32:09.753609Z node 26 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Table/Stream" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: true }, at schemeshard: 72057594046678944 2025-05-29T15:32:09.753644Z node 26 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/Table/Stream" took 41us result status StatusSuccess 2025-05-29T15:32:09.753714Z node 26 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Table/Stream" PathDescription { Self { Name: "Stream" PathId: 4 SchemeshardId: 72057594046678944 PathType: EPathTypeCdcStream CreateFinished: true CreateTxId: 1003 CreateStep: 5000004 ParentPathId: 3 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 4 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 4 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 2 CdcStreamVersion: 1 } ChildrenExist: true } Children { Name: "streamImpl" PathId: 5 SchemeshardId: 72057594046678944 PathType: EPathTypePersQueueGroup CreateFinished: true CreateTxId: 1003 CreateStep: 5000004 ParentPathId: 4 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" PathSubType: EPathSubTypeStreamImpl ChildrenExist: false BalancerTabletID: 72075186233409548 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 4 PathsLimit: 10000 ShardsInside: 3 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 1 PQPartitionsLimit: 1000000 } CdcStreamDescription { Name: "Stream" Mode: ECdcStreamModeKeysOnly PathId { OwnerId: 72057594046678944 LocalId: 4 } State: ECdcStreamStateReady SchemaVersion: 1 Format: ECdcStreamFormatProto VirtualTimestamps: false AwsRegion: "" ResolvedTimestampsIntervalMs: 0 } } PathId: 4 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> TCdcStreamWithRebootsTests::RacySplitTableAndCreateStream[TabletReboots] >> TCdcStreamWithRebootsTests::MergeTable[PipeResets] >> TCdcStreamWithRebootsTests::CreateStreamOnIndexTableWithVirtualTimestamps[TabletReboots] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_cdc_stream_reboots/unittest >> TCdcStreamWithRebootsTests::DropStreamCreatedWithInitialScan[PipeResets] [GOOD] Test command err: =========== RUN: Trace =========== Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2140] Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:116:2058] recipient: [1:110:2141] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:116:2058] recipient: [1:110:2141] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:117:2058] recipient: [1:111:2142] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:117:2058] recipient: [1:111:2142] Leader for TabletID 72057594046678944 is [1:126:2151] sender: [1:127:2058] recipient: [1:109:2140] Leader for TabletID 72057594046447617 is [1:130:2154] sender: [1:132:2058] recipient: [1:110:2141] Leader for TabletID 72057594046316545 is [1:133:2155] sender: [1:135:2058] recipient: [1:111:2142] 2025-05-29T15:32:04.606083Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7488: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-05-29T15:32:04.606104Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7516: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:32:04.606110Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7402: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2025-05-29T15:32:04.606116Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7418: OperationsProcessing config: using default configuration 2025-05-29T15:32:04.606128Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-05-29T15:32:04.606133Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-05-29T15:32:04.606142Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7548: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:32:04.606155Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:39: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-05-29T15:32:04.606256Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7619: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-05-29T15:32:04.606327Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-05-29T15:32:04.621365Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7651: Got new config: QueryServiceConfig { AllExternalDataSourcesAreAvailable: true } 2025-05-29T15:32:04.621386Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:32:04.621488Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7619: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# Leader for TabletID 72057594046447617 is [1:130:2154] sender: [1:175:2058] recipient: [1:15:2062] 2025-05-29T15:32:04.624056Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-05-29T15:32:04.624084Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-05-29T15:32:04.624113Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-05-29T15:32:04.627013Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-05-29T15:32:04.627089Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:445: Clear TempDirsState with owners number: 0 2025-05-29T15:32:04.627203Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1348: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-05-29T15:32:04.627387Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:32: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-05-29T15:32:04.628088Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:157: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:32:04.628130Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:84: [RootDataErasureManager] Stop 2025-05-29T15:32:04.628372Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:32:04.628381Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:32:04.628408Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-05-29T15:32:04.628415Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-29T15:32:04.628422Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:202: TTxServerlessStorageBilling.Complete 2025-05-29T15:32:04.628442Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6671: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:214:2058] recipient: [1:212:2212] IGNORE Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:214:2058] recipient: [1:212:2212] Leader for TabletID 72057594037968897 is [1:218:2216] sender: [1:219:2058] recipient: [1:212:2212] 2025-05-29T15:32:04.629663Z node 1 :HIVE INFO: tablet_helpers.cpp:1130: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:126:2151] sender: [1:239:2058] recipient: [1:15:2062] 2025-05-29T15:32:04.650158Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:372: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-05-29T15:32:04.650233Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:32:04.650293Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-05-29T15:32:04.650338Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:130: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-05-29T15:32:04.650349Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:32:04.651206Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:451: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-05-29T15:32:04.651236Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-05-29T15:32:04.651298Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:32:04.651310Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:313: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-05-29T15:32:04.651315Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:367: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-05-29T15:32:04.651321Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 2 -> 3 2025-05-29T15:32:04.651816Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:32:04.651829Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-05-29T15:32:04.651835Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 3 -> 128 2025-05-29T15:32:04.652243Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:32:04.652255Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:32:04.652262Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:32:04.652269Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1650: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-05-29T15:32:04.652973Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1719: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-05-29T15:32:04.653395Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:652: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-05-29T15:32:04.653439Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1751: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 Leader for TabletID 72057594046316545 is [1:133:2155] sender: [1:254:2058] recipient: [1:15:2062] FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-05-29T15:32:04.653655Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:676: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-05-29T15:32:04.653681Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:680: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 133 RawX2: 4294969451 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-05-29T15:32:04.653688Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:32:04.653744Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Ch ... Id: 72057594046678944, cookie: 1004 2025-05-29T15:32:09.926332Z node 22 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72057594046678944, txId: 1004 2025-05-29T15:32:09.926337Z node 22 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 1004, pathId: [OwnerId: 72057594046678944, LocalPathId: 5], version: 18446744073709551615 2025-05-29T15:32:09.926344Z node 22 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 2 2025-05-29T15:32:09.926394Z node 22 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 5 PathOwnerId: 72057594046678944, cookie: 1004 2025-05-29T15:32:09.926415Z node 22 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72057594046678944, txId: 1004 2025-05-29T15:32:09.926419Z node 22 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 1004, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 5 2025-05-29T15:32:09.926430Z node 22 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 6 2025-05-29T15:32:09.926505Z node 22 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 9 PathOwnerId: 72057594046678944, cookie: 1004 2025-05-29T15:32:09.926513Z node 22 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72057594046678944, txId: 1004 2025-05-29T15:32:09.926517Z node 22 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 1004, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 9 2025-05-29T15:32:09.926521Z node 22 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2025-05-29T15:32:09.926531Z node 22 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1606: TOperation IsReadyToNotify, TxId: 1004, ready parts: 3/4, is published: true 2025-05-29T15:32:09.926597Z node 22 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 1004 2025-05-29T15:32:09.926608Z node 22 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72057594046678944, txId: 1004 2025-05-29T15:32:09.926613Z node 22 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1606: TOperation IsReadyToNotify, TxId: 1004, ready parts: 3/4, is published: true 2025-05-29T15:32:09.927319Z node 22 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:647: TTxOperationReply complete, operationId: 1004:0, at schemeshard: 72057594046678944 2025-05-29T15:32:09.927361Z node 22 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:174: Deleted shardIdx 72057594046678944:3 2025-05-29T15:32:09.927379Z node 22 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:647: TTxOperationReply complete, operationId: 1004:0, at schemeshard: 72057594046678944 2025-05-29T15:32:09.927445Z node 22 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1004:0, at schemeshard: 72057594046678944 2025-05-29T15:32:09.927453Z node 22 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:482: [72057594046678944] TDone opId# 1004:0 ProgressState 2025-05-29T15:32:09.927469Z node 22 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:906: Part operation is done id#1004:0 progress is 4/4 2025-05-29T15:32:09.927479Z node 22 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 1004 ready parts: 4/4 2025-05-29T15:32:09.927485Z node 22 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:906: Part operation is done id#1004:0 progress is 4/4 2025-05-29T15:32:09.927488Z node 22 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 1004 ready parts: 4/4 2025-05-29T15:32:09.927493Z node 22 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1606: TOperation IsReadyToNotify, TxId: 1004, ready parts: 4/4, is published: true 2025-05-29T15:32:09.927498Z node 22 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 1004 ready parts: 4/4 2025-05-29T15:32:09.927504Z node 22 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:973: Operation and all the parts is done, operation id: 1004:0 2025-05-29T15:32:09.927508Z node 22 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5174: RemoveTx for txid 1004:0 2025-05-29T15:32:09.927534Z node 22 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 5 2025-05-29T15:32:09.927542Z node 22 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:973: Operation and all the parts is done, operation id: 1004:1 2025-05-29T15:32:09.927546Z node 22 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5174: RemoveTx for txid 1004:1 2025-05-29T15:32:09.927551Z node 22 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 4 2025-05-29T15:32:09.927556Z node 22 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:973: Operation and all the parts is done, operation id: 1004:2 2025-05-29T15:32:09.927560Z node 22 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5174: RemoveTx for txid 1004:2 2025-05-29T15:32:09.927565Z node 22 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 2 2025-05-29T15:32:09.927569Z node 22 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:973: Operation and all the parts is done, operation id: 1004:3 2025-05-29T15:32:09.927573Z node 22 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5174: RemoveTx for txid 1004:3 2025-05-29T15:32:09.927583Z node 22 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 1 2025-05-29T15:32:09.927643Z node 22 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1004 2025-05-29T15:32:09.927666Z node 22 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:123: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-05-29T15:32:09.927672Z node 22 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:137: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 5], at schemeshard: 72057594046678944 2025-05-29T15:32:09.927681Z node 22 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 1 2025-05-29T15:32:09.927688Z node 22 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:137: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 4], at schemeshard: 72057594046678944 2025-05-29T15:32:09.927694Z node 22 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 3 2025-05-29T15:32:09.927726Z node 22 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:174: Deleted shardIdx 72057594046678944:2 2025-05-29T15:32:09.928137Z node 22 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1004 2025-05-29T15:32:09.928155Z node 22 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1004 2025-05-29T15:32:09.928166Z node 22 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1004 2025-05-29T15:32:09.928171Z node 22 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1004 2025-05-29T15:32:09.928649Z node 22 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:160: TTxCleanDroppedPaths Complete, done PersistRemovePath for 2 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestModificationResult got TxId: 1004, wait until txId: 1004 TestWaitNotification wait txId: 1004 2025-05-29T15:32:09.928712Z node 22 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:210: tests -- TTxNotificationSubscriber for txId 1004: send EvNotifyTxCompletion 2025-05-29T15:32:09.928721Z node 22 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:256: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 1004 2025-05-29T15:32:09.928785Z node 22 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 1004, at schemeshard: 72057594046678944 2025-05-29T15:32:09.928810Z node 22 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:227: tests -- TTxNotificationSubscriber for txId 1004: got EvNotifyTxCompletionResult 2025-05-29T15:32:09.928816Z node 22 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:236: tests -- TTxNotificationSubscriber for txId 1004: satisfy waiter [22:757:2673] TestWaitNotification: OK eventTxId 1004 2025-05-29T15:32:09.928896Z node 22 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Table/Stream" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: true }, at schemeshard: 72057594046678944 2025-05-29T15:32:09.928931Z node 22 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/Table/Stream" took 40us result status StatusPathDoesNotExist 2025-05-29T15:32:09.928968Z node 22 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/Table/Stream\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot/Table\' (id: [OwnerId: 72057594046678944, LocalPathId: 3]), source_location: ydb/core/tx/schemeshard/schemeshard_path_describer.cpp:1157" Path: "/MyRoot/Table/Stream" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot/Table" LastExistedPrefixPathId: 3 LastExistedPrefixDescription { Self { Name: "Table" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 1002 CreateStep: 5000003 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: false } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 |76.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_cdc_stream_reboots/unittest >> TCdcStreamWithRebootsTests::CreateStreamExplicitReady[TabletReboots] >> TCdcStreamWithRebootsTests::CreateStreamOnIndexTableWithVirtualTimestamps[PipeResets] >> TCdcStreamWithRebootsTests::DropStreamExplicitReady[PipeResets] [GOOD] |76.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_cdc_stream_reboots/unittest >> TCdcStreamWithRebootsTests::DropStreamExplicitReady[TabletReboots] >> TCdcStreamWithRebootsTests::CreateStreamOnIndexTableExplicitReady[PipeResets] [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_cdc_stream_reboots/unittest >> TCdcStreamWithRebootsTests::DropStreamExplicitReady[PipeResets] [GOOD] Test command err: =========== RUN: Trace =========== Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2140] Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:116:2058] recipient: [1:110:2141] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:116:2058] recipient: [1:110:2141] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:117:2058] recipient: [1:111:2142] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:117:2058] recipient: [1:111:2142] Leader for TabletID 72057594046678944 is [1:126:2151] sender: [1:127:2058] recipient: [1:109:2140] Leader for TabletID 72057594046447617 is [1:130:2154] sender: [1:132:2058] recipient: [1:110:2141] Leader for TabletID 72057594046316545 is [1:133:2155] sender: [1:135:2058] recipient: [1:111:2142] 2025-05-29T15:32:05.421397Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7488: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-05-29T15:32:05.421412Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7516: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:32:05.421416Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7402: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2025-05-29T15:32:05.421420Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7418: OperationsProcessing config: using default configuration 2025-05-29T15:32:05.421427Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-05-29T15:32:05.421430Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-05-29T15:32:05.421437Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7548: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:32:05.421445Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:39: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-05-29T15:32:05.421510Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7619: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-05-29T15:32:05.421561Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-05-29T15:32:05.431597Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7651: Got new config: QueryServiceConfig { AllExternalDataSourcesAreAvailable: true } 2025-05-29T15:32:05.431615Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:32:05.431690Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7619: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# Leader for TabletID 72057594046447617 is [1:130:2154] sender: [1:175:2058] recipient: [1:15:2062] 2025-05-29T15:32:05.434077Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-05-29T15:32:05.434103Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-05-29T15:32:05.434131Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-05-29T15:32:05.436706Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-05-29T15:32:05.436776Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:445: Clear TempDirsState with owners number: 0 2025-05-29T15:32:05.436888Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1348: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-05-29T15:32:05.437043Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:32: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-05-29T15:32:05.437659Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:157: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:32:05.437693Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:84: [RootDataErasureManager] Stop 2025-05-29T15:32:05.437922Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:32:05.437933Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:32:05.437966Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-05-29T15:32:05.437974Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-29T15:32:05.437981Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:202: TTxServerlessStorageBilling.Complete 2025-05-29T15:32:05.438000Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6671: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:214:2058] recipient: [1:212:2212] IGNORE Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:214:2058] recipient: [1:212:2212] Leader for TabletID 72057594037968897 is [1:218:2216] sender: [1:219:2058] recipient: [1:212:2212] 2025-05-29T15:32:05.438945Z node 1 :HIVE INFO: tablet_helpers.cpp:1130: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:126:2151] sender: [1:239:2058] recipient: [1:15:2062] 2025-05-29T15:32:05.451169Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:372: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-05-29T15:32:05.451217Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:32:05.451256Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-05-29T15:32:05.451285Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:130: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-05-29T15:32:05.451292Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:32:05.451756Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:451: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-05-29T15:32:05.451771Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-05-29T15:32:05.451802Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:32:05.451807Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:313: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-05-29T15:32:05.451810Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:367: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-05-29T15:32:05.451814Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 2 -> 3 2025-05-29T15:32:05.452072Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:32:05.452078Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-05-29T15:32:05.452081Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 3 -> 128 2025-05-29T15:32:05.452331Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:32:05.452336Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:32:05.452339Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:32:05.452343Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1650: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-05-29T15:32:05.452784Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1719: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-05-29T15:32:05.453143Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:652: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-05-29T15:32:05.453176Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1751: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 Leader for TabletID 72057594046316545 is [1:133:2155] sender: [1:254:2058] recipient: [1:15:2062] FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-05-29T15:32:05.453317Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:676: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-05-29T15:32:05.453336Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:680: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 133 RawX2: 4294969451 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-05-29T15:32:05.453341Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:32:05.453380Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Ch ... chemeshard_impl.cpp:5834: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 1004 2025-05-29T15:32:10.727241Z node 22 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 5 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 1004 2025-05-29T15:32:10.727243Z node 22 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72057594046678944, txId: 1004 2025-05-29T15:32:10.727245Z node 22 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 1004, pathId: [OwnerId: 72057594046678944, LocalPathId: 5], version: 18446744073709551615 2025-05-29T15:32:10.727248Z node 22 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 2 2025-05-29T15:32:10.727281Z node 22 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 5 PathOwnerId: 72057594046678944, cookie: 1004 2025-05-29T15:32:10.727284Z node 22 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72057594046678944, txId: 1004 2025-05-29T15:32:10.727286Z node 22 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 1004, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 5 2025-05-29T15:32:10.727289Z node 22 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 5 2025-05-29T15:32:10.727344Z node 22 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 9 PathOwnerId: 72057594046678944, cookie: 1004 2025-05-29T15:32:10.727348Z node 22 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72057594046678944, txId: 1004 2025-05-29T15:32:10.727351Z node 22 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 1004, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 9 2025-05-29T15:32:10.727353Z node 22 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2025-05-29T15:32:10.727358Z node 22 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1606: TOperation IsReadyToNotify, TxId: 1004, ready parts: 2/3, is published: true 2025-05-29T15:32:10.727374Z node 22 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 1004 2025-05-29T15:32:10.727377Z node 22 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72057594046678944, txId: 1004 2025-05-29T15:32:10.727381Z node 22 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1606: TOperation IsReadyToNotify, TxId: 1004, ready parts: 2/3, is published: true 2025-05-29T15:32:10.727698Z node 22 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:647: TTxOperationReply complete, operationId: 1004:0, at schemeshard: 72057594046678944 2025-05-29T15:32:10.727720Z node 22 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:174: Deleted shardIdx 72057594046678944:3 2025-05-29T15:32:10.728081Z node 22 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:647: TTxOperationReply complete, operationId: 1004:0, at schemeshard: 72057594046678944 2025-05-29T15:32:10.728130Z node 22 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1004:0, at schemeshard: 72057594046678944 2025-05-29T15:32:10.728135Z node 22 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:482: [72057594046678944] TDone opId# 1004:0 ProgressState 2025-05-29T15:32:10.728142Z node 22 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:906: Part operation is done id#1004:0 progress is 3/3 2025-05-29T15:32:10.728145Z node 22 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 1004 ready parts: 3/3 2025-05-29T15:32:10.728148Z node 22 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:906: Part operation is done id#1004:0 progress is 3/3 2025-05-29T15:32:10.728150Z node 22 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 1004 ready parts: 3/3 2025-05-29T15:32:10.728153Z node 22 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1606: TOperation IsReadyToNotify, TxId: 1004, ready parts: 3/3, is published: true 2025-05-29T15:32:10.728156Z node 22 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 1004 ready parts: 3/3 2025-05-29T15:32:10.728159Z node 22 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:973: Operation and all the parts is done, operation id: 1004:0 2025-05-29T15:32:10.728161Z node 22 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5174: RemoveTx for txid 1004:0 2025-05-29T15:32:10.728179Z node 22 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 4 2025-05-29T15:32:10.728182Z node 22 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:973: Operation and all the parts is done, operation id: 1004:1 2025-05-29T15:32:10.728184Z node 22 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5174: RemoveTx for txid 1004:1 2025-05-29T15:32:10.728187Z node 22 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 2 2025-05-29T15:32:10.728190Z node 22 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:973: Operation and all the parts is done, operation id: 1004:2 2025-05-29T15:32:10.728192Z node 22 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5174: RemoveTx for txid 1004:2 2025-05-29T15:32:10.728197Z node 22 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 1 2025-05-29T15:32:10.728237Z node 22 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:174: Deleted shardIdx 72057594046678944:2 2025-05-29T15:32:10.728256Z node 22 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:123: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-05-29T15:32:10.728259Z node 22 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:137: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 5], at schemeshard: 72057594046678944 2025-05-29T15:32:10.728265Z node 22 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 1 2025-05-29T15:32:10.728268Z node 22 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:137: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 4], at schemeshard: 72057594046678944 2025-05-29T15:32:10.728271Z node 22 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 3 2025-05-29T15:32:10.728284Z node 22 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1004 2025-05-29T15:32:10.728301Z node 22 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1004 2025-05-29T15:32:10.728311Z node 22 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1004 2025-05-29T15:32:10.728321Z node 22 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1004 2025-05-29T15:32:10.728325Z node 22 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1004 2025-05-29T15:32:10.728642Z node 22 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:160: TTxCleanDroppedPaths Complete, done PersistRemovePath for 2 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestModificationResult got TxId: 1004, wait until txId: 1004 TestWaitNotification wait txId: 1004 2025-05-29T15:32:10.728696Z node 22 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:210: tests -- TTxNotificationSubscriber for txId 1004: send EvNotifyTxCompletion 2025-05-29T15:32:10.728700Z node 22 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:256: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 1004 2025-05-29T15:32:10.728740Z node 22 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 1004, at schemeshard: 72057594046678944 2025-05-29T15:32:10.728751Z node 22 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:227: tests -- TTxNotificationSubscriber for txId 1004: got EvNotifyTxCompletionResult 2025-05-29T15:32:10.728754Z node 22 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:236: tests -- TTxNotificationSubscriber for txId 1004: satisfy waiter [22:755:2671] TestWaitNotification: OK eventTxId 1004 2025-05-29T15:32:10.728799Z node 22 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Table/Stream" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: true }, at schemeshard: 72057594046678944 2025-05-29T15:32:10.728824Z node 22 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/Table/Stream" took 31us result status StatusPathDoesNotExist 2025-05-29T15:32:10.728856Z node 22 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/Table/Stream\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot/Table\' (id: [OwnerId: 72057594046678944, LocalPathId: 3]), source_location: ydb/core/tx/schemeshard/schemeshard_path_describer.cpp:1157" Path: "/MyRoot/Table/Stream" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot/Table" LastExistedPrefixPathId: 3 LastExistedPrefixDescription { Self { Name: "Table" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 1002 CreateStep: 5000003 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: false } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 >> TCdcStreamWithRebootsTests::InitialScan[TabletReboots] |76.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_cdc_stream_reboots/unittest ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_cdc_stream_reboots/unittest >> TCdcStreamWithRebootsTests::CreateStreamOnIndexTableExplicitReady[PipeResets] [GOOD] Test command err: =========== RUN: Trace =========== Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2140] Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:116:2058] recipient: [1:110:2141] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:116:2058] recipient: [1:110:2141] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:117:2058] recipient: [1:111:2142] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:117:2058] recipient: [1:111:2142] Leader for TabletID 72057594046678944 is [1:126:2151] sender: [1:127:2058] recipient: [1:109:2140] Leader for TabletID 72057594046447617 is [1:130:2154] sender: [1:132:2058] recipient: [1:110:2141] Leader for TabletID 72057594046316545 is [1:133:2155] sender: [1:135:2058] recipient: [1:111:2142] 2025-05-29T15:32:04.924759Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7488: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-05-29T15:32:04.924779Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7516: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:32:04.924783Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7402: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2025-05-29T15:32:04.924787Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7418: OperationsProcessing config: using default configuration 2025-05-29T15:32:04.924796Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-05-29T15:32:04.924799Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-05-29T15:32:04.924806Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7548: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:32:04.924815Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:39: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-05-29T15:32:04.924915Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7619: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-05-29T15:32:04.924996Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-05-29T15:32:04.934809Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7651: Got new config: QueryServiceConfig { AllExternalDataSourcesAreAvailable: true } 2025-05-29T15:32:04.934829Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:32:04.934911Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7619: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# Leader for TabletID 72057594046447617 is [1:130:2154] sender: [1:175:2058] recipient: [1:15:2062] 2025-05-29T15:32:04.937129Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-05-29T15:32:04.937151Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-05-29T15:32:04.937176Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-05-29T15:32:04.939411Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-05-29T15:32:04.939470Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:445: Clear TempDirsState with owners number: 0 2025-05-29T15:32:04.939563Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1348: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-05-29T15:32:04.939726Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:32: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-05-29T15:32:04.940236Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:157: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:32:04.940270Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:84: [RootDataErasureManager] Stop 2025-05-29T15:32:04.940459Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:32:04.940467Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:32:04.940491Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-05-29T15:32:04.940496Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-29T15:32:04.940501Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:202: TTxServerlessStorageBilling.Complete 2025-05-29T15:32:04.940514Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6671: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:214:2058] recipient: [1:212:2212] IGNORE Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:214:2058] recipient: [1:212:2212] Leader for TabletID 72057594037968897 is [1:218:2216] sender: [1:219:2058] recipient: [1:212:2212] 2025-05-29T15:32:04.941486Z node 1 :HIVE INFO: tablet_helpers.cpp:1130: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:126:2151] sender: [1:239:2058] recipient: [1:15:2062] 2025-05-29T15:32:04.954544Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:372: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-05-29T15:32:04.954606Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:32:04.954657Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-05-29T15:32:04.954693Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:130: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-05-29T15:32:04.954701Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:32:04.955294Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:451: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-05-29T15:32:04.955313Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-05-29T15:32:04.955357Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:32:04.955364Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:313: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-05-29T15:32:04.955367Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:367: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-05-29T15:32:04.955371Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 2 -> 3 2025-05-29T15:32:04.955700Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:32:04.955712Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-05-29T15:32:04.955717Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 3 -> 128 2025-05-29T15:32:04.956016Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:32:04.956024Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:32:04.956028Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:32:04.956033Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1650: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-05-29T15:32:04.956490Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1719: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-05-29T15:32:04.956915Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:652: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-05-29T15:32:04.956966Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1751: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 Leader for TabletID 72057594046316545 is [1:133:2155] sender: [1:254:2058] recipient: [1:15:2062] FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-05-29T15:32:04.957127Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:676: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-05-29T15:32:04.957148Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:680: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 133 RawX2: 4294969451 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-05-29T15:32:04.957153Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:32:04.957195Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Ch ... : 72057594046678944, LocalPathId: 6] was 3 2025-05-29T15:32:11.180555Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 7] was 4 2025-05-29T15:32:11.181102Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:647: TTxOperationReply complete, operationId: 1003:3, at schemeshard: 72057594046678944 2025-05-29T15:32:11.181150Z node 26 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:32:11.181156Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1003, path id: [OwnerId: 72057594046678944, LocalPathId: 6] 2025-05-29T15:32:11.181183Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1003, path id: [OwnerId: 72057594046678944, LocalPathId: 7] 2025-05-29T15:32:11.181214Z node 26 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:32:11.181218Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [26:206:2207], at schemeshard: 72057594046678944, txId: 1003, path id: 6 2025-05-29T15:32:11.181224Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [26:206:2207], at schemeshard: 72057594046678944, txId: 1003, path id: 7 2025-05-29T15:32:11.181301Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1003:3, at schemeshard: 72057594046678944 2025-05-29T15:32:11.181310Z node 26 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:482: [72057594046678944] TDone opId# 1003:3 ProgressState 2025-05-29T15:32:11.181321Z node 26 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:906: Part operation is done id#1003:3 progress is 4/4 2025-05-29T15:32:11.181325Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 1003 ready parts: 4/4 2025-05-29T15:32:11.181330Z node 26 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:906: Part operation is done id#1003:3 progress is 4/4 2025-05-29T15:32:11.181334Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 1003 ready parts: 4/4 2025-05-29T15:32:11.181339Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1606: TOperation IsReadyToNotify, TxId: 1003, ready parts: 4/4, is published: false 2025-05-29T15:32:11.181343Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 1003 ready parts: 4/4 2025-05-29T15:32:11.181348Z node 26 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:973: Operation and all the parts is done, operation id: 1003:0 2025-05-29T15:32:11.181352Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5174: RemoveTx for txid 1003:0 2025-05-29T15:32:11.181361Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 3 2025-05-29T15:32:11.181365Z node 26 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:973: Operation and all the parts is done, operation id: 1003:1 2025-05-29T15:32:11.181368Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5174: RemoveTx for txid 1003:1 2025-05-29T15:32:11.181373Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 6] was 4 2025-05-29T15:32:11.181376Z node 26 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:973: Operation and all the parts is done, operation id: 1003:2 2025-05-29T15:32:11.181379Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5174: RemoveTx for txid 1003:2 2025-05-29T15:32:11.181392Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 4 2025-05-29T15:32:11.181396Z node 26 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:973: Operation and all the parts is done, operation id: 1003:3 2025-05-29T15:32:11.181399Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5174: RemoveTx for txid 1003:3 2025-05-29T15:32:11.181406Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 7] was 5 2025-05-29T15:32:11.181410Z node 26 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:982: Publication still in progress, tx: 1003, publications: 2, subscribers: 0 2025-05-29T15:32:11.181414Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:989: Publication details: tx: 1003, [OwnerId: 72057594046678944, LocalPathId: 6], 4 2025-05-29T15:32:11.181417Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:989: Publication details: tx: 1003, [OwnerId: 72057594046678944, LocalPathId: 7], 2 2025-05-29T15:32:11.181618Z node 26 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:5834: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 6 Version: 4 PathOwnerId: 72057594046678944, cookie: 1003 2025-05-29T15:32:11.181632Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 6 Version: 4 PathOwnerId: 72057594046678944, cookie: 1003 2025-05-29T15:32:11.181640Z node 26 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 1003 2025-05-29T15:32:11.181645Z node 26 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 1003, pathId: [OwnerId: 72057594046678944, LocalPathId: 6], version: 4 2025-05-29T15:32:11.181650Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 6] was 3 2025-05-29T15:32:11.181836Z node 26 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:5834: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 7 Version: 2 PathOwnerId: 72057594046678944, cookie: 1003 2025-05-29T15:32:11.181851Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 7 Version: 2 PathOwnerId: 72057594046678944, cookie: 1003 2025-05-29T15:32:11.181855Z node 26 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1003 2025-05-29T15:32:11.181859Z node 26 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 1003, pathId: [OwnerId: 72057594046678944, LocalPathId: 7], version: 2 2025-05-29T15:32:11.181864Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 7] was 4 2025-05-29T15:32:11.181875Z node 26 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 1003, subscribers: 0 2025-05-29T15:32:11.183015Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1003 2025-05-29T15:32:11.183061Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1003 TestModificationResult got TxId: 1003, wait until txId: 1003 TestWaitNotification wait txId: 1003 2025-05-29T15:32:11.184383Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:210: tests -- TTxNotificationSubscriber for txId 1003: send EvNotifyTxCompletion 2025-05-29T15:32:11.184392Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:256: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 1003 2025-05-29T15:32:11.184447Z node 26 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 1003, at schemeshard: 72057594046678944 2025-05-29T15:32:11.184462Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:227: tests -- TTxNotificationSubscriber for txId 1003: got EvNotifyTxCompletionResult 2025-05-29T15:32:11.184467Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:236: tests -- TTxNotificationSubscriber for txId 1003: satisfy waiter [26:733:2639] TestWaitNotification: OK eventTxId 1003 2025-05-29T15:32:11.184533Z node 26 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Table/Index/indexImplTable/Stream" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: true }, at schemeshard: 72057594046678944 2025-05-29T15:32:11.184570Z node 26 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/Table/Index/indexImplTable/Stream" took 44us result status StatusSuccess 2025-05-29T15:32:11.184657Z node 26 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Table/Index/indexImplTable/Stream" PathDescription { Self { Name: "Stream" PathId: 6 SchemeshardId: 72057594046678944 PathType: EPathTypeCdcStream CreateFinished: true CreateTxId: 1003 CreateStep: 5000004 ParentPathId: 5 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 4 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 4 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 2 CdcStreamVersion: 1 } ChildrenExist: true } Children { Name: "streamImpl" PathId: 7 SchemeshardId: 72057594046678944 PathType: EPathTypePersQueueGroup CreateFinished: true CreateTxId: 1003 CreateStep: 5000004 ParentPathId: 6 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" PathSubType: EPathSubTypeStreamImpl ChildrenExist: false BalancerTabletID: 72075186233409549 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 6 PathsLimit: 10000 ShardsInside: 4 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 1 PQPartitionsLimit: 1000000 } CdcStreamDescription { Name: "Stream" Mode: ECdcStreamModeKeysOnly PathId { OwnerId: 72057594046678944 LocalId: 6 } State: ECdcStreamStateReady SchemaVersion: 1 Format: ECdcStreamFormatProto VirtualTimestamps: false AwsRegion: "" ResolvedTimestampsIntervalMs: 0 } } PathId: 6 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> TCdcStreamWithRebootsTests::DropStreamOnIndexTableCreatedWithInitialScan[TabletReboots] |76.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_cdc_stream_reboots/unittest >> TCdcStreamWithRebootsTests::WithPqTransactions[PipeResets] |76.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_cdc_stream_reboots/unittest >> TCdcStreamWithRebootsTests::GetReadyStream[TabletReboots] |76.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_cdc_stream_reboots/unittest >> TCdcStreamWithRebootsTests::DropStream[PipeResets] [GOOD] >> TCdcStreamWithRebootsTests::CreateStreamOnIndexTable[PipeResets] [GOOD] |76.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_cdc_stream_reboots/unittest >> TCdcStreamWithRebootsTests::DropStreamOnIndexTable[PipeResets] [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_cdc_stream_reboots/unittest >> TCdcStreamWithRebootsTests::DropStream[PipeResets] [GOOD] Test command err: =========== RUN: Trace =========== Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2140] Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:116:2058] recipient: [1:110:2141] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:116:2058] recipient: [1:110:2141] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:117:2058] recipient: [1:111:2142] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:117:2058] recipient: [1:111:2142] Leader for TabletID 72057594046678944 is [1:126:2151] sender: [1:127:2058] recipient: [1:109:2140] Leader for TabletID 72057594046447617 is [1:130:2154] sender: [1:132:2058] recipient: [1:110:2141] Leader for TabletID 72057594046316545 is [1:133:2155] sender: [1:135:2058] recipient: [1:111:2142] 2025-05-29T15:32:08.781510Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7488: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-05-29T15:32:08.781533Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7516: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:32:08.781539Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7402: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2025-05-29T15:32:08.781544Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7418: OperationsProcessing config: using default configuration 2025-05-29T15:32:08.781556Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-05-29T15:32:08.781561Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-05-29T15:32:08.781570Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7548: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:32:08.781587Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:39: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-05-29T15:32:08.781686Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7619: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-05-29T15:32:08.781756Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-05-29T15:32:08.797094Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7651: Got new config: QueryServiceConfig { AllExternalDataSourcesAreAvailable: true } 2025-05-29T15:32:08.797109Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:32:08.797175Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7619: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# Leader for TabletID 72057594046447617 is [1:130:2154] sender: [1:175:2058] recipient: [1:15:2062] 2025-05-29T15:32:08.799545Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-05-29T15:32:08.799570Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-05-29T15:32:08.799598Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-05-29T15:32:08.802561Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-05-29T15:32:08.802636Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:445: Clear TempDirsState with owners number: 0 2025-05-29T15:32:08.802764Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1348: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-05-29T15:32:08.802973Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:32: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-05-29T15:32:08.803789Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:157: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:32:08.803826Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:84: [RootDataErasureManager] Stop 2025-05-29T15:32:08.804065Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:32:08.804078Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:32:08.804130Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-05-29T15:32:08.804139Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-29T15:32:08.804146Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:202: TTxServerlessStorageBilling.Complete 2025-05-29T15:32:08.804167Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6671: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:214:2058] recipient: [1:212:2212] IGNORE Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:214:2058] recipient: [1:212:2212] Leader for TabletID 72057594037968897 is [1:218:2216] sender: [1:219:2058] recipient: [1:212:2212] 2025-05-29T15:32:08.805405Z node 1 :HIVE INFO: tablet_helpers.cpp:1130: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:126:2151] sender: [1:239:2058] recipient: [1:15:2062] 2025-05-29T15:32:08.824885Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:372: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-05-29T15:32:08.824947Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:32:08.825000Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-05-29T15:32:08.825050Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:130: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-05-29T15:32:08.825062Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:32:08.825635Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:451: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-05-29T15:32:08.825654Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-05-29T15:32:08.825701Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:32:08.825712Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:313: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-05-29T15:32:08.825717Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:367: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-05-29T15:32:08.825722Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 2 -> 3 2025-05-29T15:32:08.826107Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:32:08.826122Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-05-29T15:32:08.826128Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 3 -> 128 2025-05-29T15:32:08.826483Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:32:08.826498Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:32:08.826505Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:32:08.826511Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1650: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-05-29T15:32:08.827258Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1719: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-05-29T15:32:08.827641Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:652: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-05-29T15:32:08.827674Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1751: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 Leader for TabletID 72057594046316545 is [1:133:2155] sender: [1:254:2058] recipient: [1:15:2062] FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-05-29T15:32:08.827862Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:676: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-05-29T15:32:08.827886Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:680: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 133 RawX2: 4294969451 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-05-29T15:32:08.827893Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:32:08.827945Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Ch ... chemeshard_impl.cpp:5834: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 1004 2025-05-29T15:32:14.058908Z node 22 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 5 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 1004 2025-05-29T15:32:14.058910Z node 22 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72057594046678944, txId: 1004 2025-05-29T15:32:14.058914Z node 22 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 1004, pathId: [OwnerId: 72057594046678944, LocalPathId: 5], version: 18446744073709551615 2025-05-29T15:32:14.058918Z node 22 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 2 2025-05-29T15:32:14.058965Z node 22 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 5 PathOwnerId: 72057594046678944, cookie: 1004 2025-05-29T15:32:14.058969Z node 22 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72057594046678944, txId: 1004 2025-05-29T15:32:14.058971Z node 22 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 1004, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 5 2025-05-29T15:32:14.058974Z node 22 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 5 2025-05-29T15:32:14.059033Z node 22 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 9 PathOwnerId: 72057594046678944, cookie: 1004 2025-05-29T15:32:14.059036Z node 22 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72057594046678944, txId: 1004 2025-05-29T15:32:14.059039Z node 22 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 1004, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 9 2025-05-29T15:32:14.059041Z node 22 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2025-05-29T15:32:14.059046Z node 22 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1606: TOperation IsReadyToNotify, TxId: 1004, ready parts: 2/3, is published: true 2025-05-29T15:32:14.059063Z node 22 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 1004 2025-05-29T15:32:14.059066Z node 22 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72057594046678944, txId: 1004 2025-05-29T15:32:14.059068Z node 22 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1606: TOperation IsReadyToNotify, TxId: 1004, ready parts: 2/3, is published: true 2025-05-29T15:32:14.059332Z node 22 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:647: TTxOperationReply complete, operationId: 1004:0, at schemeshard: 72057594046678944 2025-05-29T15:32:14.059349Z node 22 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:174: Deleted shardIdx 72057594046678944:3 2025-05-29T15:32:14.059756Z node 22 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:647: TTxOperationReply complete, operationId: 1004:0, at schemeshard: 72057594046678944 2025-05-29T15:32:14.059806Z node 22 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1004:0, at schemeshard: 72057594046678944 2025-05-29T15:32:14.059811Z node 22 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:482: [72057594046678944] TDone opId# 1004:0 ProgressState 2025-05-29T15:32:14.059819Z node 22 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:906: Part operation is done id#1004:0 progress is 3/3 2025-05-29T15:32:14.059821Z node 22 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 1004 ready parts: 3/3 2025-05-29T15:32:14.059825Z node 22 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:906: Part operation is done id#1004:0 progress is 3/3 2025-05-29T15:32:14.059827Z node 22 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 1004 ready parts: 3/3 2025-05-29T15:32:14.059830Z node 22 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1606: TOperation IsReadyToNotify, TxId: 1004, ready parts: 3/3, is published: true 2025-05-29T15:32:14.059833Z node 22 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 1004 ready parts: 3/3 2025-05-29T15:32:14.059836Z node 22 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:973: Operation and all the parts is done, operation id: 1004:0 2025-05-29T15:32:14.059839Z node 22 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5174: RemoveTx for txid 1004:0 2025-05-29T15:32:14.059855Z node 22 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 4 2025-05-29T15:32:14.059858Z node 22 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:973: Operation and all the parts is done, operation id: 1004:1 2025-05-29T15:32:14.059860Z node 22 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5174: RemoveTx for txid 1004:1 2025-05-29T15:32:14.059863Z node 22 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 2 2025-05-29T15:32:14.059866Z node 22 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:973: Operation and all the parts is done, operation id: 1004:2 2025-05-29T15:32:14.059868Z node 22 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5174: RemoveTx for txid 1004:2 2025-05-29T15:32:14.059873Z node 22 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 1 2025-05-29T15:32:14.059914Z node 22 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:174: Deleted shardIdx 72057594046678944:2 2025-05-29T15:32:14.059932Z node 22 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:123: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-05-29T15:32:14.059936Z node 22 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:137: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 5], at schemeshard: 72057594046678944 2025-05-29T15:32:14.059944Z node 22 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 1 2025-05-29T15:32:14.059950Z node 22 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:137: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 4], at schemeshard: 72057594046678944 2025-05-29T15:32:14.059955Z node 22 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 3 2025-05-29T15:32:14.059977Z node 22 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1004 2025-05-29T15:32:14.059998Z node 22 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1004 2025-05-29T15:32:14.060009Z node 22 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1004 2025-05-29T15:32:14.060017Z node 22 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1004 2025-05-29T15:32:14.060020Z node 22 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1004 2025-05-29T15:32:14.060375Z node 22 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:160: TTxCleanDroppedPaths Complete, done PersistRemovePath for 2 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestModificationResult got TxId: 1004, wait until txId: 1004 TestWaitNotification wait txId: 1004 2025-05-29T15:32:14.060426Z node 22 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:210: tests -- TTxNotificationSubscriber for txId 1004: send EvNotifyTxCompletion 2025-05-29T15:32:14.060431Z node 22 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:256: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 1004 2025-05-29T15:32:14.060478Z node 22 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 1004, at schemeshard: 72057594046678944 2025-05-29T15:32:14.060495Z node 22 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:227: tests -- TTxNotificationSubscriber for txId 1004: got EvNotifyTxCompletionResult 2025-05-29T15:32:14.060499Z node 22 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:236: tests -- TTxNotificationSubscriber for txId 1004: satisfy waiter [22:755:2671] TestWaitNotification: OK eventTxId 1004 2025-05-29T15:32:14.060543Z node 22 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Table/Stream" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: true }, at schemeshard: 72057594046678944 2025-05-29T15:32:14.060563Z node 22 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/Table/Stream" took 25us result status StatusPathDoesNotExist 2025-05-29T15:32:14.060586Z node 22 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/Table/Stream\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot/Table\' (id: [OwnerId: 72057594046678944, LocalPathId: 3]), source_location: ydb/core/tx/schemeshard/schemeshard_path_describer.cpp:1157" Path: "/MyRoot/Table/Stream" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot/Table" LastExistedPrefixPathId: 3 LastExistedPrefixDescription { Self { Name: "Table" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 1002 CreateStep: 5000003 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: false } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_cdc_stream_reboots/unittest >> TCdcStreamWithRebootsTests::CreateStreamOnIndexTable[PipeResets] [GOOD] Test command err: =========== RUN: Trace =========== Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2140] Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:116:2058] recipient: [1:110:2141] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:116:2058] recipient: [1:110:2141] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:117:2058] recipient: [1:111:2142] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:117:2058] recipient: [1:111:2142] Leader for TabletID 72057594046678944 is [1:126:2151] sender: [1:127:2058] recipient: [1:109:2140] Leader for TabletID 72057594046447617 is [1:130:2154] sender: [1:132:2058] recipient: [1:110:2141] Leader for TabletID 72057594046316545 is [1:133:2155] sender: [1:135:2058] recipient: [1:111:2142] 2025-05-29T15:32:07.772689Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7488: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-05-29T15:32:07.772715Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7516: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:32:07.772720Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7402: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2025-05-29T15:32:07.772726Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7418: OperationsProcessing config: using default configuration 2025-05-29T15:32:07.772745Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-05-29T15:32:07.772749Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-05-29T15:32:07.772759Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7548: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:32:07.772776Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:39: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-05-29T15:32:07.772892Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7619: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-05-29T15:32:07.772991Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-05-29T15:32:07.784169Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7651: Got new config: QueryServiceConfig { AllExternalDataSourcesAreAvailable: true } 2025-05-29T15:32:07.784189Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:32:07.784259Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7619: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# Leader for TabletID 72057594046447617 is [1:130:2154] sender: [1:175:2058] recipient: [1:15:2062] 2025-05-29T15:32:07.786408Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-05-29T15:32:07.786435Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-05-29T15:32:07.786460Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-05-29T15:32:07.789323Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-05-29T15:32:07.789397Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:445: Clear TempDirsState with owners number: 0 2025-05-29T15:32:07.789506Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1348: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-05-29T15:32:07.789690Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:32: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-05-29T15:32:07.790297Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:157: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:32:07.790330Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:84: [RootDataErasureManager] Stop 2025-05-29T15:32:07.790557Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:32:07.790566Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:32:07.790589Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-05-29T15:32:07.790594Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-29T15:32:07.790598Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:202: TTxServerlessStorageBilling.Complete 2025-05-29T15:32:07.790611Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6671: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:214:2058] recipient: [1:212:2212] IGNORE Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:214:2058] recipient: [1:212:2212] Leader for TabletID 72057594037968897 is [1:218:2216] sender: [1:219:2058] recipient: [1:212:2212] 2025-05-29T15:32:07.791696Z node 1 :HIVE INFO: tablet_helpers.cpp:1130: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:126:2151] sender: [1:239:2058] recipient: [1:15:2062] 2025-05-29T15:32:07.808232Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:372: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-05-29T15:32:07.808306Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:32:07.808382Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-05-29T15:32:07.808436Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:130: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-05-29T15:32:07.808448Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:32:07.809242Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:451: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-05-29T15:32:07.809266Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-05-29T15:32:07.809319Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:32:07.809328Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:313: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-05-29T15:32:07.809334Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:367: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-05-29T15:32:07.809339Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 2 -> 3 2025-05-29T15:32:07.809743Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:32:07.809754Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-05-29T15:32:07.809759Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 3 -> 128 2025-05-29T15:32:07.810140Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:32:07.810152Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:32:07.810162Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:32:07.810175Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1650: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-05-29T15:32:07.810833Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1719: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-05-29T15:32:07.811392Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:652: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-05-29T15:32:07.811429Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1751: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 Leader for TabletID 72057594046316545 is [1:133:2155] sender: [1:254:2058] recipient: [1:15:2062] FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-05-29T15:32:07.811626Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:676: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-05-29T15:32:07.811649Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:680: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 133 RawX2: 4294969451 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-05-29T15:32:07.811656Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:32:07.811709Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Ch ... : 72057594046678944, LocalPathId: 6] was 3 2025-05-29T15:32:14.049942Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 7] was 4 2025-05-29T15:32:14.050363Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:647: TTxOperationReply complete, operationId: 1003:3, at schemeshard: 72057594046678944 2025-05-29T15:32:14.050418Z node 26 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:32:14.050423Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1003, path id: [OwnerId: 72057594046678944, LocalPathId: 6] 2025-05-29T15:32:14.050446Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1003, path id: [OwnerId: 72057594046678944, LocalPathId: 7] 2025-05-29T15:32:14.050469Z node 26 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:32:14.050473Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [26:206:2207], at schemeshard: 72057594046678944, txId: 1003, path id: 6 2025-05-29T15:32:14.050476Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [26:206:2207], at schemeshard: 72057594046678944, txId: 1003, path id: 7 2025-05-29T15:32:14.050531Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1003:3, at schemeshard: 72057594046678944 2025-05-29T15:32:14.050535Z node 26 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:482: [72057594046678944] TDone opId# 1003:3 ProgressState 2025-05-29T15:32:14.050542Z node 26 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:906: Part operation is done id#1003:3 progress is 4/4 2025-05-29T15:32:14.050545Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 1003 ready parts: 4/4 2025-05-29T15:32:14.050548Z node 26 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:906: Part operation is done id#1003:3 progress is 4/4 2025-05-29T15:32:14.050550Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 1003 ready parts: 4/4 2025-05-29T15:32:14.050552Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1606: TOperation IsReadyToNotify, TxId: 1003, ready parts: 4/4, is published: false 2025-05-29T15:32:14.050555Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 1003 ready parts: 4/4 2025-05-29T15:32:14.050559Z node 26 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:973: Operation and all the parts is done, operation id: 1003:0 2025-05-29T15:32:14.050561Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5174: RemoveTx for txid 1003:0 2025-05-29T15:32:14.050570Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 3 2025-05-29T15:32:14.050572Z node 26 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:973: Operation and all the parts is done, operation id: 1003:1 2025-05-29T15:32:14.050574Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5174: RemoveTx for txid 1003:1 2025-05-29T15:32:14.050577Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 6] was 4 2025-05-29T15:32:14.050580Z node 26 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:973: Operation and all the parts is done, operation id: 1003:2 2025-05-29T15:32:14.050581Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5174: RemoveTx for txid 1003:2 2025-05-29T15:32:14.050593Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 4 2025-05-29T15:32:14.050598Z node 26 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:973: Operation and all the parts is done, operation id: 1003:3 2025-05-29T15:32:14.050600Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5174: RemoveTx for txid 1003:3 2025-05-29T15:32:14.050606Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 7] was 5 2025-05-29T15:32:14.050608Z node 26 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:982: Publication still in progress, tx: 1003, publications: 2, subscribers: 0 2025-05-29T15:32:14.050611Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:989: Publication details: tx: 1003, [OwnerId: 72057594046678944, LocalPathId: 6], 4 2025-05-29T15:32:14.050613Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:989: Publication details: tx: 1003, [OwnerId: 72057594046678944, LocalPathId: 7], 2 2025-05-29T15:32:14.050800Z node 26 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:5834: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 6 Version: 4 PathOwnerId: 72057594046678944, cookie: 1003 2025-05-29T15:32:14.050811Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 6 Version: 4 PathOwnerId: 72057594046678944, cookie: 1003 2025-05-29T15:32:14.050814Z node 26 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 1003 2025-05-29T15:32:14.050818Z node 26 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 1003, pathId: [OwnerId: 72057594046678944, LocalPathId: 6], version: 4 2025-05-29T15:32:14.050820Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 6] was 3 2025-05-29T15:32:14.050956Z node 26 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:5834: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 7 Version: 2 PathOwnerId: 72057594046678944, cookie: 1003 2025-05-29T15:32:14.050965Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 7 Version: 2 PathOwnerId: 72057594046678944, cookie: 1003 2025-05-29T15:32:14.050967Z node 26 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1003 2025-05-29T15:32:14.050972Z node 26 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 1003, pathId: [OwnerId: 72057594046678944, LocalPathId: 7], version: 2 2025-05-29T15:32:14.050975Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 7] was 4 2025-05-29T15:32:14.050981Z node 26 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 1003, subscribers: 0 2025-05-29T15:32:14.051824Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1003 2025-05-29T15:32:14.051856Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1003 TestModificationResult got TxId: 1003, wait until txId: 1003 TestWaitNotification wait txId: 1003 2025-05-29T15:32:14.052961Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:210: tests -- TTxNotificationSubscriber for txId 1003: send EvNotifyTxCompletion 2025-05-29T15:32:14.052968Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:256: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 1003 2025-05-29T15:32:14.053010Z node 26 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 1003, at schemeshard: 72057594046678944 2025-05-29T15:32:14.053020Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:227: tests -- TTxNotificationSubscriber for txId 1003: got EvNotifyTxCompletionResult 2025-05-29T15:32:14.053023Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:236: tests -- TTxNotificationSubscriber for txId 1003: satisfy waiter [26:733:2639] TestWaitNotification: OK eventTxId 1003 2025-05-29T15:32:14.053068Z node 26 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Table/Index/indexImplTable/Stream" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: true }, at schemeshard: 72057594046678944 2025-05-29T15:32:14.053097Z node 26 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/Table/Index/indexImplTable/Stream" took 35us result status StatusSuccess 2025-05-29T15:32:14.053181Z node 26 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Table/Index/indexImplTable/Stream" PathDescription { Self { Name: "Stream" PathId: 6 SchemeshardId: 72057594046678944 PathType: EPathTypeCdcStream CreateFinished: true CreateTxId: 1003 CreateStep: 5000004 ParentPathId: 5 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 4 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 4 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 2 CdcStreamVersion: 1 } ChildrenExist: true } Children { Name: "streamImpl" PathId: 7 SchemeshardId: 72057594046678944 PathType: EPathTypePersQueueGroup CreateFinished: true CreateTxId: 1003 CreateStep: 5000004 ParentPathId: 6 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" PathSubType: EPathSubTypeStreamImpl ChildrenExist: false BalancerTabletID: 72075186233409549 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 6 PathsLimit: 10000 ShardsInside: 4 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 1 PQPartitionsLimit: 1000000 } CdcStreamDescription { Name: "Stream" Mode: ECdcStreamModeKeysOnly PathId { OwnerId: 72057594046678944 LocalId: 6 } State: ECdcStreamStateReady SchemaVersion: 1 Format: ECdcStreamFormatProto VirtualTimestamps: false AwsRegion: "" ResolvedTimestampsIntervalMs: 0 } } PathId: 6 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> TCdcStreamWithRebootsTests::InitialScan[TabletReboots] [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_cdc_stream_reboots/unittest >> TCdcStreamWithRebootsTests::DropStreamOnIndexTable[PipeResets] [GOOD] Test command err: =========== RUN: Trace =========== Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2140] Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:116:2058] recipient: [1:110:2141] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:116:2058] recipient: [1:110:2141] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:117:2058] recipient: [1:111:2142] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:117:2058] recipient: [1:111:2142] Leader for TabletID 72057594046678944 is [1:126:2151] sender: [1:127:2058] recipient: [1:109:2140] Leader for TabletID 72057594046447617 is [1:130:2154] sender: [1:132:2058] recipient: [1:110:2141] Leader for TabletID 72057594046316545 is [1:133:2155] sender: [1:135:2058] recipient: [1:111:2142] 2025-05-29T15:32:08.838279Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7488: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-05-29T15:32:08.838295Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7516: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:32:08.838299Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7402: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2025-05-29T15:32:08.838302Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7418: OperationsProcessing config: using default configuration 2025-05-29T15:32:08.838310Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-05-29T15:32:08.838312Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-05-29T15:32:08.838318Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7548: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:32:08.838327Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:39: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-05-29T15:32:08.838416Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7619: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-05-29T15:32:08.838470Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-05-29T15:32:08.849556Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7651: Got new config: QueryServiceConfig { AllExternalDataSourcesAreAvailable: true } 2025-05-29T15:32:08.849576Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:32:08.849657Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7619: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# Leader for TabletID 72057594046447617 is [1:130:2154] sender: [1:175:2058] recipient: [1:15:2062] 2025-05-29T15:32:08.852057Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-05-29T15:32:08.852082Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-05-29T15:32:08.852105Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-05-29T15:32:08.854469Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-05-29T15:32:08.854556Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:445: Clear TempDirsState with owners number: 0 2025-05-29T15:32:08.854662Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1348: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-05-29T15:32:08.854858Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:32: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-05-29T15:32:08.855472Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:157: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:32:08.855505Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:84: [RootDataErasureManager] Stop 2025-05-29T15:32:08.855693Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:32:08.855701Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:32:08.855725Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-05-29T15:32:08.855731Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-29T15:32:08.855736Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:202: TTxServerlessStorageBilling.Complete 2025-05-29T15:32:08.855752Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6671: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:214:2058] recipient: [1:212:2212] IGNORE Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:214:2058] recipient: [1:212:2212] Leader for TabletID 72057594037968897 is [1:218:2216] sender: [1:219:2058] recipient: [1:212:2212] 2025-05-29T15:32:08.856819Z node 1 :HIVE INFO: tablet_helpers.cpp:1130: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:126:2151] sender: [1:239:2058] recipient: [1:15:2062] 2025-05-29T15:32:08.876495Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:372: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-05-29T15:32:08.876564Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:32:08.876614Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-05-29T15:32:08.876659Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:130: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-05-29T15:32:08.876677Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:32:08.877352Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:451: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-05-29T15:32:08.877373Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-05-29T15:32:08.877428Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:32:08.877437Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:313: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-05-29T15:32:08.877450Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:367: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-05-29T15:32:08.877455Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 2 -> 3 2025-05-29T15:32:08.877959Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:32:08.877978Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-05-29T15:32:08.877984Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 3 -> 128 2025-05-29T15:32:08.878584Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:32:08.878598Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:32:08.878603Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:32:08.878609Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1650: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-05-29T15:32:08.879357Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1719: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-05-29T15:32:08.879929Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:652: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-05-29T15:32:08.879980Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1751: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 Leader for TabletID 72057594046316545 is [1:133:2155] sender: [1:254:2058] recipient: [1:15:2062] FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-05-29T15:32:08.880309Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:676: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-05-29T15:32:08.880359Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:680: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 133 RawX2: 4294969451 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-05-29T15:32:08.880371Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:32:08.880450Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Ch ... 72057594046678944, LocalPathId: 5] was 5 2025-05-29T15:32:14.375057Z node 23 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 6 PathOwnerId: 72057594046678944, cookie: 1004 2025-05-29T15:32:14.375062Z node 23 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72057594046678944, txId: 1004 2025-05-29T15:32:14.375065Z node 23 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 1004, pathId: [OwnerId: 72057594046678944, LocalPathId: 4], version: 6 2025-05-29T15:32:14.375067Z node 23 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 4 2025-05-29T15:32:14.375108Z node 23 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:5834: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 6 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 1004 2025-05-29T15:32:14.375131Z node 23 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 5 PathOwnerId: 72057594046678944, cookie: 1004 2025-05-29T15:32:14.375135Z node 23 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72057594046678944, txId: 1004 2025-05-29T15:32:14.375141Z node 23 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 1004, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 5 2025-05-29T15:32:14.375145Z node 23 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 4 2025-05-29T15:32:14.375153Z node 23 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1606: TOperation IsReadyToNotify, TxId: 1004, ready parts: 3/4, is published: true 2025-05-29T15:32:14.375269Z node 23 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 6 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 1004 2025-05-29T15:32:14.375276Z node 23 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72057594046678944, txId: 1004 2025-05-29T15:32:14.375280Z node 23 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1606: TOperation IsReadyToNotify, TxId: 1004, ready parts: 3/4, is published: true 2025-05-29T15:32:14.375755Z node 23 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:647: TTxOperationReply complete, operationId: 1004:0, at schemeshard: 72057594046678944 2025-05-29T15:32:14.375781Z node 23 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:174: Deleted shardIdx 72057594046678944:3 2025-05-29T15:32:14.375798Z node 23 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:174: Deleted shardIdx 72057594046678944:4 2025-05-29T15:32:14.375821Z node 23 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:647: TTxOperationReply complete, operationId: 1004:0, at schemeshard: 72057594046678944 2025-05-29T15:32:14.375893Z node 23 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1004 2025-05-29T15:32:14.375908Z node 23 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1004:0, at schemeshard: 72057594046678944 2025-05-29T15:32:14.375913Z node 23 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:482: [72057594046678944] TDone opId# 1004:0 ProgressState 2025-05-29T15:32:14.375924Z node 23 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:906: Part operation is done id#1004:0 progress is 4/4 2025-05-29T15:32:14.375928Z node 23 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 1004 ready parts: 4/4 2025-05-29T15:32:14.375932Z node 23 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:906: Part operation is done id#1004:0 progress is 4/4 2025-05-29T15:32:14.375936Z node 23 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 1004 ready parts: 4/4 2025-05-29T15:32:14.375940Z node 23 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1606: TOperation IsReadyToNotify, TxId: 1004, ready parts: 4/4, is published: true 2025-05-29T15:32:14.375945Z node 23 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 1004 ready parts: 4/4 2025-05-29T15:32:14.375950Z node 23 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:973: Operation and all the parts is done, operation id: 1004:0 2025-05-29T15:32:14.375955Z node 23 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5174: RemoveTx for txid 1004:0 2025-05-29T15:32:14.375977Z node 23 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 4 2025-05-29T15:32:14.375984Z node 23 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:973: Operation and all the parts is done, operation id: 1004:1 2025-05-29T15:32:14.375988Z node 23 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5174: RemoveTx for txid 1004:1 2025-05-29T15:32:14.375993Z node 23 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 3 2025-05-29T15:32:14.375997Z node 23 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:973: Operation and all the parts is done, operation id: 1004:2 2025-05-29T15:32:14.376000Z node 23 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5174: RemoveTx for txid 1004:2 2025-05-29T15:32:14.376005Z node 23 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 6] was 2 2025-05-29T15:32:14.376009Z node 23 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:973: Operation and all the parts is done, operation id: 1004:3 2025-05-29T15:32:14.376012Z node 23 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5174: RemoveTx for txid 1004:3 2025-05-29T15:32:14.376021Z node 23 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 7] was 1 2025-05-29T15:32:14.376130Z node 23 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:123: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-05-29T15:32:14.376137Z node 23 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:137: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 7], at schemeshard: 72057594046678944 2025-05-29T15:32:14.376147Z node 23 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 6] was 1 2025-05-29T15:32:14.376152Z node 23 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:137: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 6], at schemeshard: 72057594046678944 2025-05-29T15:32:14.376157Z node 23 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 3 2025-05-29T15:32:14.376508Z node 23 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1004 2025-05-29T15:32:14.376530Z node 23 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1004 2025-05-29T15:32:14.376542Z node 23 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1004 2025-05-29T15:32:14.376552Z node 23 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1004 2025-05-29T15:32:14.376558Z node 23 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1004 2025-05-29T15:32:14.376974Z node 23 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:160: TTxCleanDroppedPaths Complete, done PersistRemovePath for 2 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestModificationResult got TxId: 1004, wait until txId: 1004 TestWaitNotification wait txId: 1004 2025-05-29T15:32:14.377024Z node 23 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:210: tests -- TTxNotificationSubscriber for txId 1004: send EvNotifyTxCompletion 2025-05-29T15:32:14.377032Z node 23 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:256: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 1004 2025-05-29T15:32:14.377082Z node 23 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 1004, at schemeshard: 72057594046678944 2025-05-29T15:32:14.377096Z node 23 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:227: tests -- TTxNotificationSubscriber for txId 1004: got EvNotifyTxCompletionResult 2025-05-29T15:32:14.377100Z node 23 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:236: tests -- TTxNotificationSubscriber for txId 1004: satisfy waiter [23:823:2727] TestWaitNotification: OK eventTxId 1004 2025-05-29T15:32:14.377162Z node 23 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Table/Index/indexImplTable/Stream" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: true }, at schemeshard: 72057594046678944 2025-05-29T15:32:14.377192Z node 23 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/Table/Index/indexImplTable/Stream" took 39us result status StatusPathDoesNotExist 2025-05-29T15:32:14.377229Z node 23 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/Table/Index/indexImplTable/Stream\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot/Table/Index/indexImplTable\' (id: [OwnerId: 72057594046678944, LocalPathId: 5]), source_location: ydb/core/tx/schemeshard/schemeshard_path_describer.cpp:1157" Path: "/MyRoot/Table/Index/indexImplTable/Stream" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot/Table/Index/indexImplTable" LastExistedPrefixPathId: 5 LastExistedPrefixDescription { Self { Name: "indexImplTable" PathId: 5 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 1002 CreateStep: 5000003 ParentPathId: 4 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeSyncIndexImplTable ChildrenExist: false } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 >> TCdcStreamWithRebootsTests::CreateStream[PipeResets] [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_cdc_stream_reboots/unittest >> TCdcStreamWithRebootsTests::InitialScan[TabletReboots] [GOOD] Test command err: =========== RUN: Trace =========== Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2140] Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:116:2058] recipient: [1:110:2141] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:116:2058] recipient: [1:110:2141] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:117:2058] recipient: [1:111:2142] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:117:2058] recipient: [1:111:2142] Leader for TabletID 72057594046678944 is [1:126:2151] sender: [1:127:2058] recipient: [1:109:2140] Leader for TabletID 72057594046447617 is [1:130:2154] sender: [1:132:2058] recipient: [1:110:2141] Leader for TabletID 72057594046316545 is [1:133:2155] sender: [1:135:2058] recipient: [1:111:2142] 2025-05-29T15:32:11.520004Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7488: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-05-29T15:32:11.520025Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7516: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:32:11.520031Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7402: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2025-05-29T15:32:11.520035Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7418: OperationsProcessing config: using default configuration 2025-05-29T15:32:11.520047Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-05-29T15:32:11.520051Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-05-29T15:32:11.520059Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7548: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:32:11.520072Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:39: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-05-29T15:32:11.520162Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7619: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-05-29T15:32:11.520239Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-05-29T15:32:11.530053Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7651: Got new config: QueryServiceConfig { AllExternalDataSourcesAreAvailable: true } 2025-05-29T15:32:11.530068Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:32:11.530122Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7619: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# Leader for TabletID 72057594046447617 is [1:130:2154] sender: [1:175:2058] recipient: [1:15:2062] 2025-05-29T15:32:11.531885Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-05-29T15:32:11.531903Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-05-29T15:32:11.531919Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-05-29T15:32:11.533819Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-05-29T15:32:11.533882Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:445: Clear TempDirsState with owners number: 0 2025-05-29T15:32:11.533976Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1348: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-05-29T15:32:11.534148Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:32: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-05-29T15:32:11.534789Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:157: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:32:11.534820Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:84: [RootDataErasureManager] Stop 2025-05-29T15:32:11.535006Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:32:11.535016Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:32:11.535041Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-05-29T15:32:11.535048Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-29T15:32:11.535054Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:202: TTxServerlessStorageBilling.Complete 2025-05-29T15:32:11.535070Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6671: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:214:2058] recipient: [1:212:2212] IGNORE Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:214:2058] recipient: [1:212:2212] Leader for TabletID 72057594037968897 is [1:218:2216] sender: [1:219:2058] recipient: [1:212:2212] 2025-05-29T15:32:11.536117Z node 1 :HIVE INFO: tablet_helpers.cpp:1130: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:126:2151] sender: [1:239:2058] recipient: [1:15:2062] 2025-05-29T15:32:11.554102Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:372: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-05-29T15:32:11.554153Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:32:11.554199Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-05-29T15:32:11.554228Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:130: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-05-29T15:32:11.554235Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:32:11.554734Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:451: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-05-29T15:32:11.554771Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-05-29T15:32:11.554809Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:32:11.554816Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:313: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-05-29T15:32:11.554819Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:367: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-05-29T15:32:11.554822Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 2 -> 3 2025-05-29T15:32:11.555155Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:32:11.555162Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-05-29T15:32:11.555166Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 3 -> 128 2025-05-29T15:32:11.555399Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:32:11.555404Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:32:11.555407Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:32:11.555411Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1650: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-05-29T15:32:11.555817Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1719: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-05-29T15:32:11.556132Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:652: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-05-29T15:32:11.556155Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1751: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 Leader for TabletID 72057594046316545 is [1:133:2155] sender: [1:254:2058] recipient: [1:15:2062] FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-05-29T15:32:11.556273Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:676: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-05-29T15:32:11.556289Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:680: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 133 RawX2: 4294969451 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-05-29T15:32:11.556293Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:32:11.556330Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Ch ... 944, cookie: 281474976715657 2025-05-29T15:32:14.820144Z node 14 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 5 PathOwnerId: 72057594046678944, cookie: 281474976715657 2025-05-29T15:32:14.820147Z node 14 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72057594046678944, txId: 281474976715657 2025-05-29T15:32:14.820150Z node 14 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 281474976715657, pathId: [OwnerId: 72057594046678944, LocalPathId: 4], version: 5 2025-05-29T15:32:14.820152Z node 14 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 4 2025-05-29T15:32:14.820186Z node 14 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:5834: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 5 PathOwnerId: 72057594046678944, cookie: 281474976715657 2025-05-29T15:32:14.820191Z node 14 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 5 PathOwnerId: 72057594046678944, cookie: 281474976715657 2025-05-29T15:32:14.820193Z node 14 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72057594046678944, txId: 281474976715657 2025-05-29T15:32:14.820196Z node 14 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 281474976715657, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 5 2025-05-29T15:32:14.820198Z node 14 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 6 2025-05-29T15:32:14.820202Z node 14 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1606: TOperation IsReadyToNotify, TxId: 281474976715657, ready parts: 2/3, is published: true 2025-05-29T15:32:14.820366Z node 14 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5512: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 332 RawX2: 60129544462 } Origin: 72075186233409546 State: 2 TxId: 281474976715657 Step: 0 Generation: 2 2025-05-29T15:32:14.820372Z node 14 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1764: TOperation FindRelatedPartByTabletId, TxId: 281474976715657, tablet: 72075186233409546, partId: 1 2025-05-29T15:32:14.820383Z node 14 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:619: TTxOperationReply execute, operationId: 281474976715657:1, at schemeshard: 72057594046678944, message: Source { RawX1: 332 RawX2: 60129544462 } Origin: 72075186233409546 State: 2 TxId: 281474976715657 Step: 0 Generation: 2 2025-05-29T15:32:14.820387Z node 14 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:1005: NTableState::TProposedWaitParts operationId# 281474976715657:1 HandleReply TEvSchemaChanged at tablet: 72057594046678944 2025-05-29T15:32:14.820394Z node 14 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:1009: NTableState::TProposedWaitParts operationId# 281474976715657:1 HandleReply TEvSchemaChanged at tablet: 72057594046678944 message: Source { RawX1: 332 RawX2: 60129544462 } Origin: 72075186233409546 State: 2 TxId: 281474976715657 Step: 0 Generation: 2 2025-05-29T15:32:14.820403Z node 14 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:655: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 281474976715657:1, shardIdx: 72057594046678944:1, datashard: 72075186233409546, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2025-05-29T15:32:14.820409Z node 14 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:674: all shard schema changes has been received, operationId: 281474976715657:1, at schemeshard: 72057594046678944 2025-05-29T15:32:14.820414Z node 14 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:686: send schema changes ack message, operation: 281474976715657:1, datashard: 72075186233409546, at schemeshard: 72057594046678944 2025-05-29T15:32:14.820419Z node 14 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 281474976715657:1 129 -> 240 2025-05-29T15:32:14.821000Z node 14 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:647: TTxOperationReply complete, operationId: 281474976715657:1, at schemeshard: 72057594046678944 2025-05-29T15:32:14.821015Z node 14 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976715657 2025-05-29T15:32:14.821022Z node 14 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976715657 2025-05-29T15:32:14.821030Z node 14 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:647: TTxOperationReply complete, operationId: 281474976715657:1, at schemeshard: 72057594046678944 2025-05-29T15:32:14.821073Z node 14 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 281474976715657:1, at schemeshard: 72057594046678944 2025-05-29T15:32:14.821077Z node 14 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:482: [72057594046678944] TDone opId# 281474976715657:1 ProgressState 2025-05-29T15:32:14.821084Z node 14 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:906: Part operation is done id#281474976715657:1 progress is 3/3 2025-05-29T15:32:14.821086Z node 14 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 281474976715657 ready parts: 3/3 2025-05-29T15:32:14.821089Z node 14 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:906: Part operation is done id#281474976715657:1 progress is 3/3 2025-05-29T15:32:14.821091Z node 14 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 281474976715657 ready parts: 3/3 2025-05-29T15:32:14.821094Z node 14 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1606: TOperation IsReadyToNotify, TxId: 281474976715657, ready parts: 3/3, is published: true 2025-05-29T15:32:14.821097Z node 14 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 281474976715657 ready parts: 3/3 2025-05-29T15:32:14.821100Z node 14 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:973: Operation and all the parts is done, operation id: 281474976715657:0 2025-05-29T15:32:14.821102Z node 14 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5174: RemoveTx for txid 281474976715657:0 2025-05-29T15:32:14.821108Z node 14 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 3 2025-05-29T15:32:14.821110Z node 14 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:973: Operation and all the parts is done, operation id: 281474976715657:1 2025-05-29T15:32:14.821112Z node 14 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5174: RemoveTx for txid 281474976715657:1 2025-05-29T15:32:14.821122Z node 14 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 5 2025-05-29T15:32:14.821127Z node 14 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:973: Operation and all the parts is done, operation id: 281474976715657:2 2025-05-29T15:32:14.821129Z node 14 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5174: RemoveTx for txid 281474976715657:2 2025-05-29T15:32:14.821132Z node 14 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 4 TestModificationResult got TxId: 1003, wait until txId: 1003 TestWaitNotification wait txId: 1003 2025-05-29T15:32:14.821487Z node 14 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:210: tests -- TTxNotificationSubscriber for txId 1003: send EvNotifyTxCompletion 2025-05-29T15:32:14.821495Z node 14 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:256: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 1003 2025-05-29T15:32:14.821536Z node 14 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 1003, at schemeshard: 72057594046678944 2025-05-29T15:32:14.821548Z node 14 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:227: tests -- TTxNotificationSubscriber for txId 1003: got EvNotifyTxCompletionResult 2025-05-29T15:32:14.821551Z node 14 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:236: tests -- TTxNotificationSubscriber for txId 1003: satisfy waiter [14:763:2679] TestWaitNotification: OK eventTxId 1003 2025-05-29T15:32:14.821601Z node 14 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Table/Stream" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: true }, at schemeshard: 72057594046678944 2025-05-29T15:32:14.821622Z node 14 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/Table/Stream" took 27us result status StatusSuccess 2025-05-29T15:32:14.821689Z node 14 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Table/Stream" PathDescription { Self { Name: "Stream" PathId: 4 SchemeshardId: 72057594046678944 PathType: EPathTypeCdcStream CreateFinished: true CreateTxId: 1003 CreateStep: 5000004 ParentPathId: 3 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 2 CdcStreamVersion: 2 } ChildrenExist: true } Children { Name: "streamImpl" PathId: 5 SchemeshardId: 72057594046678944 PathType: EPathTypePersQueueGroup CreateFinished: true CreateTxId: 1003 CreateStep: 5000004 ParentPathId: 4 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" PathSubType: EPathSubTypeStreamImpl ChildrenExist: false BalancerTabletID: 72075186233409548 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 4 PathsLimit: 10000 ShardsInside: 3 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 1 PQPartitionsLimit: 1000000 } CdcStreamDescription { Name: "Stream" Mode: ECdcStreamModeKeysOnly PathId { OwnerId: 72057594046678944 LocalId: 4 } State: ECdcStreamStateReady SchemaVersion: 2 Format: ECdcStreamFormatProto VirtualTimestamps: false AwsRegion: "" ResolvedTimestampsIntervalMs: 0 ScanProgress { ShardsTotal: 1 ShardsCompleted: 1 } } } PathId: 4 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> TCdcStreamWithRebootsTests::CreateStreamOnIndexTableWithInitialScan[TabletReboots] >> TCdcStreamWithRebootsTests::MergeTableResolvedTimestamps[TabletReboots] >> TCdcStreamWithRebootsTests::SplitTable[TabletReboots] >> TCdcStreamWithRebootsTests::DisableStream[TabletReboots] [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_cdc_stream_reboots/unittest >> TCdcStreamWithRebootsTests::CreateStream[PipeResets] [GOOD] Test command err: =========== RUN: Trace =========== Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2140] Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:116:2058] recipient: [1:110:2141] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:116:2058] recipient: [1:110:2141] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:117:2058] recipient: [1:111:2142] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:117:2058] recipient: [1:111:2142] Leader for TabletID 72057594046678944 is [1:126:2151] sender: [1:127:2058] recipient: [1:109:2140] Leader for TabletID 72057594046447617 is [1:130:2154] sender: [1:132:2058] recipient: [1:110:2141] Leader for TabletID 72057594046316545 is [1:133:2155] sender: [1:135:2058] recipient: [1:111:2142] 2025-05-29T15:32:08.735534Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7488: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-05-29T15:32:08.735556Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7516: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:32:08.735561Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7402: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2025-05-29T15:32:08.735565Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7418: OperationsProcessing config: using default configuration 2025-05-29T15:32:08.735577Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-05-29T15:32:08.735580Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-05-29T15:32:08.735587Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7548: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:32:08.735597Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:39: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-05-29T15:32:08.735702Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7619: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-05-29T15:32:08.735780Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-05-29T15:32:08.746031Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7651: Got new config: QueryServiceConfig { AllExternalDataSourcesAreAvailable: true } 2025-05-29T15:32:08.746050Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:32:08.746113Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7619: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# Leader for TabletID 72057594046447617 is [1:130:2154] sender: [1:175:2058] recipient: [1:15:2062] 2025-05-29T15:32:08.748144Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-05-29T15:32:08.748169Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-05-29T15:32:08.748190Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-05-29T15:32:08.750590Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-05-29T15:32:08.750644Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:445: Clear TempDirsState with owners number: 0 2025-05-29T15:32:08.750725Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1348: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-05-29T15:32:08.750939Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:32: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-05-29T15:32:08.751722Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:157: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:32:08.751762Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:84: [RootDataErasureManager] Stop 2025-05-29T15:32:08.751965Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:32:08.751973Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:32:08.751995Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-05-29T15:32:08.752000Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-29T15:32:08.752005Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:202: TTxServerlessStorageBilling.Complete 2025-05-29T15:32:08.752019Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6671: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:214:2058] recipient: [1:212:2212] IGNORE Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:214:2058] recipient: [1:212:2212] Leader for TabletID 72057594037968897 is [1:218:2216] sender: [1:219:2058] recipient: [1:212:2212] 2025-05-29T15:32:08.753182Z node 1 :HIVE INFO: tablet_helpers.cpp:1130: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:126:2151] sender: [1:239:2058] recipient: [1:15:2062] 2025-05-29T15:32:08.769375Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:372: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-05-29T15:32:08.769429Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:32:08.769492Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-05-29T15:32:08.769529Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:130: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-05-29T15:32:08.769537Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:32:08.770040Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:451: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-05-29T15:32:08.770057Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-05-29T15:32:08.770093Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:32:08.770101Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:313: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-05-29T15:32:08.770106Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:367: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-05-29T15:32:08.770110Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 2 -> 3 2025-05-29T15:32:08.770454Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:32:08.770463Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-05-29T15:32:08.770467Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 3 -> 128 2025-05-29T15:32:08.770836Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:32:08.770850Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:32:08.770861Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:32:08.770870Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1650: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-05-29T15:32:08.771347Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1719: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-05-29T15:32:08.771680Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:652: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-05-29T15:32:08.771708Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1751: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 Leader for TabletID 72057594046316545 is [1:133:2155] sender: [1:254:2058] recipient: [1:15:2062] FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-05-29T15:32:08.771859Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:676: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-05-29T15:32:08.771885Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:680: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 133 RawX2: 4294969451 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-05-29T15:32:08.771893Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:32:08.771942Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Ch ... ReadyForNotifications: 0, at schemeshard: 72057594046678944 2025-05-29T15:32:14.993771Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_pq.cpp:629: NPQState::TPropose operationId# 1003:2 HandleReply TEvProposeTransactionResult CollectPQConfigChanged: true 2025-05-29T15:32:14.993803Z node 26 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1003:2 128 -> 240 2025-05-29T15:32:14.993822Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 3 2025-05-29T15:32:14.993829Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 4 2025-05-29T15:32:14.994365Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:647: TTxOperationReply complete, operationId: 1003:2, at schemeshard: 72057594046678944 2025-05-29T15:32:14.994455Z node 26 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:32:14.994461Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1003, path id: [OwnerId: 72057594046678944, LocalPathId: 4] 2025-05-29T15:32:14.994485Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1003, path id: [OwnerId: 72057594046678944, LocalPathId: 5] 2025-05-29T15:32:14.994508Z node 26 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:32:14.994511Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [26:206:2207], at schemeshard: 72057594046678944, txId: 1003, path id: 4 2025-05-29T15:32:14.994515Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [26:206:2207], at schemeshard: 72057594046678944, txId: 1003, path id: 5 2025-05-29T15:32:14.994602Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1003:2, at schemeshard: 72057594046678944 2025-05-29T15:32:14.994609Z node 26 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:482: [72057594046678944] TDone opId# 1003:2 ProgressState 2025-05-29T15:32:14.994616Z node 26 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:906: Part operation is done id#1003:2 progress is 3/3 2025-05-29T15:32:14.994619Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 1003 ready parts: 3/3 2025-05-29T15:32:14.994622Z node 26 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:906: Part operation is done id#1003:2 progress is 3/3 2025-05-29T15:32:14.994624Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 1003 ready parts: 3/3 2025-05-29T15:32:14.994630Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1606: TOperation IsReadyToNotify, TxId: 1003, ready parts: 3/3, is published: false 2025-05-29T15:32:14.994633Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 1003 ready parts: 3/3 2025-05-29T15:32:14.994637Z node 26 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:973: Operation and all the parts is done, operation id: 1003:0 2025-05-29T15:32:14.994641Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5174: RemoveTx for txid 1003:0 2025-05-29T15:32:14.994648Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 4 2025-05-29T15:32:14.994651Z node 26 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:973: Operation and all the parts is done, operation id: 1003:1 2025-05-29T15:32:14.994653Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5174: RemoveTx for txid 1003:1 2025-05-29T15:32:14.994663Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 4 2025-05-29T15:32:14.994666Z node 26 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:973: Operation and all the parts is done, operation id: 1003:2 2025-05-29T15:32:14.994668Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5174: RemoveTx for txid 1003:2 2025-05-29T15:32:14.994674Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 5 2025-05-29T15:32:14.994677Z node 26 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:982: Publication still in progress, tx: 1003, publications: 2, subscribers: 0 2025-05-29T15:32:14.994680Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:989: Publication details: tx: 1003, [OwnerId: 72057594046678944, LocalPathId: 4], 4 2025-05-29T15:32:14.994682Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:989: Publication details: tx: 1003, [OwnerId: 72057594046678944, LocalPathId: 5], 2 2025-05-29T15:32:14.994849Z node 26 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:5834: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 4 PathOwnerId: 72057594046678944, cookie: 1003 2025-05-29T15:32:14.994860Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 4 PathOwnerId: 72057594046678944, cookie: 1003 2025-05-29T15:32:14.994864Z node 26 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 1003 2025-05-29T15:32:14.994867Z node 26 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 1003, pathId: [OwnerId: 72057594046678944, LocalPathId: 4], version: 4 2025-05-29T15:32:14.994870Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 3 2025-05-29T15:32:14.995113Z node 26 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:5834: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 5 Version: 2 PathOwnerId: 72057594046678944, cookie: 1003 2025-05-29T15:32:14.995121Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 5 Version: 2 PathOwnerId: 72057594046678944, cookie: 1003 2025-05-29T15:32:14.995126Z node 26 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1003 2025-05-29T15:32:14.995129Z node 26 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 1003, pathId: [OwnerId: 72057594046678944, LocalPathId: 5], version: 2 2025-05-29T15:32:14.995131Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 4 2025-05-29T15:32:14.995138Z node 26 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 1003, subscribers: 0 2025-05-29T15:32:14.995818Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1003 2025-05-29T15:32:14.995843Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1003 TestModificationResult got TxId: 1003, wait until txId: 1003 TestWaitNotification wait txId: 1003 2025-05-29T15:32:14.996930Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:210: tests -- TTxNotificationSubscriber for txId 1003: send EvNotifyTxCompletion 2025-05-29T15:32:14.996939Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:256: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 1003 2025-05-29T15:32:14.996980Z node 26 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 1003, at schemeshard: 72057594046678944 2025-05-29T15:32:14.996990Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:227: tests -- TTxNotificationSubscriber for txId 1003: got EvNotifyTxCompletionResult 2025-05-29T15:32:14.996993Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:236: tests -- TTxNotificationSubscriber for txId 1003: satisfy waiter [26:661:2578] TestWaitNotification: OK eventTxId 1003 2025-05-29T15:32:14.997039Z node 26 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Table/Stream" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: true }, at schemeshard: 72057594046678944 2025-05-29T15:32:14.997065Z node 26 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/Table/Stream" took 32us result status StatusSuccess 2025-05-29T15:32:14.997126Z node 26 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Table/Stream" PathDescription { Self { Name: "Stream" PathId: 4 SchemeshardId: 72057594046678944 PathType: EPathTypeCdcStream CreateFinished: true CreateTxId: 1003 CreateStep: 5000004 ParentPathId: 3 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 4 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 4 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 2 CdcStreamVersion: 1 } ChildrenExist: true } Children { Name: "streamImpl" PathId: 5 SchemeshardId: 72057594046678944 PathType: EPathTypePersQueueGroup CreateFinished: true CreateTxId: 1003 CreateStep: 5000004 ParentPathId: 4 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" PathSubType: EPathSubTypeStreamImpl ChildrenExist: false BalancerTabletID: 72075186233409548 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 4 PathsLimit: 10000 ShardsInside: 3 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 1 PQPartitionsLimit: 1000000 } CdcStreamDescription { Name: "Stream" Mode: ECdcStreamModeKeysOnly PathId { OwnerId: 72057594046678944 LocalId: 4 } State: ECdcStreamStateReady SchemaVersion: 1 Format: ECdcStreamFormatProto VirtualTimestamps: false AwsRegion: "" ResolvedTimestampsIntervalMs: 0 } } PathId: 4 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> TCdcStreamWithRebootsTests::CreateDropRecreate[PipeResets] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_cdc_stream_reboots/unittest >> TCdcStreamWithRebootsTests::DisableStream[TabletReboots] [GOOD] Test command err: =========== RUN: Trace =========== Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2140] Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:116:2058] recipient: [1:110:2141] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:116:2058] recipient: [1:110:2141] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:117:2058] recipient: [1:111:2142] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:117:2058] recipient: [1:111:2142] Leader for TabletID 72057594046678944 is [1:126:2151] sender: [1:127:2058] recipient: [1:109:2140] Leader for TabletID 72057594046447617 is [1:130:2154] sender: [1:132:2058] recipient: [1:110:2141] Leader for TabletID 72057594046316545 is [1:133:2155] sender: [1:135:2058] recipient: [1:111:2142] 2025-05-29T15:32:05.109106Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7488: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-05-29T15:32:05.109127Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7516: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:32:05.109133Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7402: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2025-05-29T15:32:05.109138Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7418: OperationsProcessing config: using default configuration 2025-05-29T15:32:05.109151Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-05-29T15:32:05.109155Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-05-29T15:32:05.109165Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7548: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:32:05.109179Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:39: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-05-29T15:32:05.109275Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7619: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-05-29T15:32:05.109348Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-05-29T15:32:05.119667Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7651: Got new config: QueryServiceConfig { AllExternalDataSourcesAreAvailable: true } 2025-05-29T15:32:05.119684Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:32:05.119760Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7619: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# Leader for TabletID 72057594046447617 is [1:130:2154] sender: [1:175:2058] recipient: [1:15:2062] 2025-05-29T15:32:05.122176Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-05-29T15:32:05.122204Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-05-29T15:32:05.122230Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-05-29T15:32:05.124584Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-05-29T15:32:05.124648Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:445: Clear TempDirsState with owners number: 0 2025-05-29T15:32:05.124737Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1348: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-05-29T15:32:05.124884Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:32: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-05-29T15:32:05.125385Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:157: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:32:05.125414Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:84: [RootDataErasureManager] Stop 2025-05-29T15:32:05.125587Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:32:05.125595Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:32:05.125615Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-05-29T15:32:05.125620Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-29T15:32:05.125625Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:202: TTxServerlessStorageBilling.Complete 2025-05-29T15:32:05.125638Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6671: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:214:2058] recipient: [1:212:2212] IGNORE Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:214:2058] recipient: [1:212:2212] Leader for TabletID 72057594037968897 is [1:218:2216] sender: [1:219:2058] recipient: [1:212:2212] 2025-05-29T15:32:05.126607Z node 1 :HIVE INFO: tablet_helpers.cpp:1130: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:126:2151] sender: [1:239:2058] recipient: [1:15:2062] 2025-05-29T15:32:05.140975Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:372: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-05-29T15:32:05.141032Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:32:05.141078Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-05-29T15:32:05.141113Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:130: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-05-29T15:32:05.141122Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:32:05.141615Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:451: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-05-29T15:32:05.141636Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-05-29T15:32:05.141676Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:32:05.141683Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:313: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-05-29T15:32:05.141686Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:367: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-05-29T15:32:05.141690Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 2 -> 3 2025-05-29T15:32:05.142018Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:32:05.142027Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-05-29T15:32:05.142030Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 3 -> 128 2025-05-29T15:32:05.142312Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:32:05.142320Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:32:05.142324Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:32:05.142328Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1650: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-05-29T15:32:05.142795Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1719: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-05-29T15:32:05.143109Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:652: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-05-29T15:32:05.143137Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1751: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 Leader for TabletID 72057594046316545 is [1:133:2155] sender: [1:254:2058] recipient: [1:15:2062] FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-05-29T15:32:05.143275Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:676: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-05-29T15:32:05.143293Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:680: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 133 RawX2: 4294969451 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-05-29T15:32:05.143298Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:32:05.143338Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Ch ... IND_SCHEME Origin: 72075186233409546 Status: COMPLETE TxId: 1004 Step: 5000005 OrderId: 1004 ExecLatency: 0 ProposeLatency: 2 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409546 CpuTimeUsec: 127 } } CommitVersion { Step: 5000005 TxId: 1004 } 2025-05-29T15:32:15.350668Z node 42 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_part.cpp:108: HandleReply TEvDataShard::TEvProposeTransactionResult Ignore message: tablet# 72057594046678944, ev# TxKind: TX_KIND_SCHEME Origin: 72075186233409546 Status: COMPLETE TxId: 1004 Step: 5000005 OrderId: 1004 ExecLatency: 0 ProposeLatency: 2 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409546 CpuTimeUsec: 127 } } CommitVersion { Step: 5000005 TxId: 1004 } 2025-05-29T15:32:15.350756Z node 42 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5512: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 331 RawX2: 180388628749 } Origin: 72075186233409546 State: 2 TxId: 1004 Step: 0 Generation: 2 2025-05-29T15:32:15.350763Z node 42 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1764: TOperation FindRelatedPartByTabletId, TxId: 1004, tablet: 72075186233409546, partId: 1 2025-05-29T15:32:15.350776Z node 42 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:619: TTxOperationReply execute, operationId: 1004:1, at schemeshard: 72057594046678944, message: Source { RawX1: 331 RawX2: 180388628749 } Origin: 72075186233409546 State: 2 TxId: 1004 Step: 0 Generation: 2 2025-05-29T15:32:15.350781Z node 42 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:1005: NTableState::TProposedWaitParts operationId# 1004:1 HandleReply TEvSchemaChanged at tablet: 72057594046678944 2025-05-29T15:32:15.350789Z node 42 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:1009: NTableState::TProposedWaitParts operationId# 1004:1 HandleReply TEvSchemaChanged at tablet: 72057594046678944 message: Source { RawX1: 331 RawX2: 180388628749 } Origin: 72075186233409546 State: 2 TxId: 1004 Step: 0 Generation: 2 2025-05-29T15:32:15.350797Z node 42 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:655: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 1004:1, shardIdx: 72057594046678944:1, datashard: 72075186233409546, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2025-05-29T15:32:15.350801Z node 42 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:674: all shard schema changes has been received, operationId: 1004:1, at schemeshard: 72057594046678944 2025-05-29T15:32:15.350805Z node 42 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:686: send schema changes ack message, operation: 1004:1, datashard: 72075186233409546, at schemeshard: 72057594046678944 2025-05-29T15:32:15.350810Z node 42 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1004:1 129 -> 240 2025-05-29T15:32:15.350911Z node 42 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:5834: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 5 PathOwnerId: 72057594046678944, cookie: 1004 2025-05-29T15:32:15.350922Z node 42 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 5 PathOwnerId: 72057594046678944, cookie: 1004 2025-05-29T15:32:15.350930Z node 42 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72057594046678944, txId: 1004 2025-05-29T15:32:15.350935Z node 42 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 1004, pathId: [OwnerId: 72057594046678944, LocalPathId: 4], version: 5 2025-05-29T15:32:15.350940Z node 42 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 4 2025-05-29T15:32:15.350984Z node 42 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:5834: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 5 PathOwnerId: 72057594046678944, cookie: 1004 2025-05-29T15:32:15.350993Z node 42 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 5 PathOwnerId: 72057594046678944, cookie: 1004 2025-05-29T15:32:15.350997Z node 42 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72057594046678944, txId: 1004 2025-05-29T15:32:15.351001Z node 42 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 1004, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 5 2025-05-29T15:32:15.351005Z node 42 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 5 2025-05-29T15:32:15.351013Z node 42 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1606: TOperation IsReadyToNotify, TxId: 1004, ready parts: 1/2, is published: true 2025-05-29T15:32:15.351818Z node 42 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:647: TTxOperationReply complete, operationId: 1004:1, at schemeshard: 72057594046678944 2025-05-29T15:32:15.351841Z node 42 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:647: TTxOperationReply complete, operationId: 1004:1, at schemeshard: 72057594046678944 2025-05-29T15:32:15.351909Z node 42 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1004:1, at schemeshard: 72057594046678944 2025-05-29T15:32:15.351916Z node 42 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:482: [72057594046678944] TDone opId# 1004:1 ProgressState 2025-05-29T15:32:15.351926Z node 42 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:906: Part operation is done id#1004:1 progress is 2/2 2025-05-29T15:32:15.351930Z node 42 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 1004 ready parts: 2/2 2025-05-29T15:32:15.351935Z node 42 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:906: Part operation is done id#1004:1 progress is 2/2 2025-05-29T15:32:15.351938Z node 42 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 1004 ready parts: 2/2 2025-05-29T15:32:15.351942Z node 42 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1606: TOperation IsReadyToNotify, TxId: 1004, ready parts: 2/2, is published: true 2025-05-29T15:32:15.351946Z node 42 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 1004 ready parts: 2/2 2025-05-29T15:32:15.351951Z node 42 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:973: Operation and all the parts is done, operation id: 1004:0 2025-05-29T15:32:15.351957Z node 42 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5174: RemoveTx for txid 1004:0 2025-05-29T15:32:15.351965Z node 42 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 3 2025-05-29T15:32:15.351969Z node 42 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:973: Operation and all the parts is done, operation id: 1004:1 2025-05-29T15:32:15.351972Z node 42 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5174: RemoveTx for txid 1004:1 2025-05-29T15:32:15.351983Z node 42 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 4 2025-05-29T15:32:15.352041Z node 42 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1004 2025-05-29T15:32:15.352084Z node 42 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1004 TestWaitNotification wait txId: 1004 2025-05-29T15:32:15.352494Z node 42 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:210: tests -- TTxNotificationSubscriber for txId 1004: send EvNotifyTxCompletion 2025-05-29T15:32:15.352503Z node 42 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:256: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 1004 2025-05-29T15:32:15.352556Z node 42 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 1004, at schemeshard: 72057594046678944 2025-05-29T15:32:15.352570Z node 42 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:227: tests -- TTxNotificationSubscriber for txId 1004: got EvNotifyTxCompletionResult 2025-05-29T15:32:15.352574Z node 42 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:236: tests -- TTxNotificationSubscriber for txId 1004: satisfy waiter [42:712:2629] TestWaitNotification: OK eventTxId 1004 2025-05-29T15:32:15.352637Z node 42 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Table/Stream" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: true }, at schemeshard: 72057594046678944 2025-05-29T15:32:15.352661Z node 42 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/Table/Stream" took 32us result status StatusSuccess 2025-05-29T15:32:15.352743Z node 42 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Table/Stream" PathDescription { Self { Name: "Stream" PathId: 4 SchemeshardId: 72057594046678944 PathType: EPathTypeCdcStream CreateFinished: true CreateTxId: 1003 CreateStep: 5000004 ParentPathId: 3 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 2 CdcStreamVersion: 2 } ChildrenExist: true } Children { Name: "streamImpl" PathId: 5 SchemeshardId: 72057594046678944 PathType: EPathTypePersQueueGroup CreateFinished: true CreateTxId: 1003 CreateStep: 5000004 ParentPathId: 4 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" PathSubType: EPathSubTypeStreamImpl ChildrenExist: false BalancerTabletID: 72075186233409548 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 4 PathsLimit: 10000 ShardsInside: 3 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 1 PQPartitionsLimit: 1000000 } CdcStreamDescription { Name: "Stream" Mode: ECdcStreamModeKeysOnly PathId { OwnerId: 72057594046678944 LocalId: 4 } State: ECdcStreamStateDisabled SchemaVersion: 2 Format: ECdcStreamFormatProto VirtualTimestamps: false AwsRegion: "" ResolvedTimestampsIntervalMs: 0 } } PathId: 4 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> TCdcStreamWithRebootsTests::CreateStreamWithAwsRegion[PipeResets] [GOOD] |76.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_cdc_stream_reboots/unittest >> TCdcStreamWithRebootsTests::CreateStreamWithInitialScan[PipeResets] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_cdc_stream_reboots/unittest >> TCdcStreamWithRebootsTests::CreateStreamWithAwsRegion[PipeResets] [GOOD] Test command err: =========== RUN: Trace =========== Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2140] Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:116:2058] recipient: [1:110:2141] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:116:2058] recipient: [1:110:2141] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:117:2058] recipient: [1:111:2142] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:117:2058] recipient: [1:111:2142] Leader for TabletID 72057594046678944 is [1:126:2151] sender: [1:127:2058] recipient: [1:109:2140] Leader for TabletID 72057594046447617 is [1:130:2154] sender: [1:132:2058] recipient: [1:110:2141] Leader for TabletID 72057594046316545 is [1:133:2155] sender: [1:135:2058] recipient: [1:111:2142] 2025-05-29T15:32:09.685783Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7488: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-05-29T15:32:09.685801Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7516: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:32:09.685804Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7402: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2025-05-29T15:32:09.685808Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7418: OperationsProcessing config: using default configuration 2025-05-29T15:32:09.685817Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-05-29T15:32:09.685820Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-05-29T15:32:09.685827Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7548: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:32:09.685836Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:39: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-05-29T15:32:09.685900Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7619: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-05-29T15:32:09.685960Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-05-29T15:32:09.696182Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7651: Got new config: QueryServiceConfig { AllExternalDataSourcesAreAvailable: true } 2025-05-29T15:32:09.696205Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:32:09.696275Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7619: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# Leader for TabletID 72057594046447617 is [1:130:2154] sender: [1:175:2058] recipient: [1:15:2062] 2025-05-29T15:32:09.698286Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-05-29T15:32:09.698312Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-05-29T15:32:09.698333Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-05-29T15:32:09.701322Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-05-29T15:32:09.701412Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:445: Clear TempDirsState with owners number: 0 2025-05-29T15:32:09.701534Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1348: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-05-29T15:32:09.701761Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:32: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-05-29T15:32:09.702521Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:157: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:32:09.702561Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:84: [RootDataErasureManager] Stop 2025-05-29T15:32:09.702824Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:32:09.702836Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:32:09.702872Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-05-29T15:32:09.702882Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-29T15:32:09.702889Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:202: TTxServerlessStorageBilling.Complete 2025-05-29T15:32:09.702909Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6671: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:214:2058] recipient: [1:212:2212] IGNORE Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:214:2058] recipient: [1:212:2212] Leader for TabletID 72057594037968897 is [1:218:2216] sender: [1:219:2058] recipient: [1:212:2212] 2025-05-29T15:32:09.704070Z node 1 :HIVE INFO: tablet_helpers.cpp:1130: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:126:2151] sender: [1:239:2058] recipient: [1:15:2062] 2025-05-29T15:32:09.725402Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:372: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-05-29T15:32:09.725466Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:32:09.725520Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-05-29T15:32:09.725564Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:130: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-05-29T15:32:09.725576Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:32:09.726057Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:451: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-05-29T15:32:09.726078Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-05-29T15:32:09.726116Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:32:09.726122Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:313: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-05-29T15:32:09.726125Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:367: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-05-29T15:32:09.726129Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 2 -> 3 2025-05-29T15:32:09.726439Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:32:09.726449Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-05-29T15:32:09.726454Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 3 -> 128 2025-05-29T15:32:09.726859Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:32:09.726870Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:32:09.726873Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:32:09.726878Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1650: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-05-29T15:32:09.727319Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1719: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-05-29T15:32:09.727635Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:652: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-05-29T15:32:09.727661Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1751: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 Leader for TabletID 72057594046316545 is [1:133:2155] sender: [1:254:2058] recipient: [1:15:2062] FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-05-29T15:32:09.727811Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:676: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-05-29T15:32:09.727834Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:680: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 133 RawX2: 4294969451 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-05-29T15:32:09.727841Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:32:09.727892Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Ch ... emeshard: 72057594046678944 2025-05-29T15:32:15.938057Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_pq.cpp:629: NPQState::TPropose operationId# 1003:2 HandleReply TEvProposeTransactionResult CollectPQConfigChanged: true 2025-05-29T15:32:15.938084Z node 26 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1003:2 128 -> 240 2025-05-29T15:32:15.938103Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 3 2025-05-29T15:32:15.938110Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 4 2025-05-29T15:32:15.938557Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:647: TTxOperationReply complete, operationId: 1003:2, at schemeshard: 72057594046678944 2025-05-29T15:32:15.938637Z node 26 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:32:15.938643Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1003, path id: [OwnerId: 72057594046678944, LocalPathId: 4] 2025-05-29T15:32:15.938664Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1003, path id: [OwnerId: 72057594046678944, LocalPathId: 5] 2025-05-29T15:32:15.938686Z node 26 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:32:15.938689Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [26:206:2207], at schemeshard: 72057594046678944, txId: 1003, path id: 4 2025-05-29T15:32:15.938692Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [26:206:2207], at schemeshard: 72057594046678944, txId: 1003, path id: 5 2025-05-29T15:32:15.938785Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1003:2, at schemeshard: 72057594046678944 2025-05-29T15:32:15.938793Z node 26 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:482: [72057594046678944] TDone opId# 1003:2 ProgressState 2025-05-29T15:32:15.938803Z node 26 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:906: Part operation is done id#1003:2 progress is 3/3 2025-05-29T15:32:15.938808Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 1003 ready parts: 3/3 2025-05-29T15:32:15.938813Z node 26 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:906: Part operation is done id#1003:2 progress is 3/3 2025-05-29T15:32:15.938816Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 1003 ready parts: 3/3 2025-05-29T15:32:15.938824Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1606: TOperation IsReadyToNotify, TxId: 1003, ready parts: 3/3, is published: false 2025-05-29T15:32:15.938829Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 1003 ready parts: 3/3 2025-05-29T15:32:15.938835Z node 26 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:973: Operation and all the parts is done, operation id: 1003:0 2025-05-29T15:32:15.938839Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5174: RemoveTx for txid 1003:0 2025-05-29T15:32:15.938849Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 4 2025-05-29T15:32:15.938853Z node 26 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:973: Operation and all the parts is done, operation id: 1003:1 2025-05-29T15:32:15.938857Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5174: RemoveTx for txid 1003:1 2025-05-29T15:32:15.938868Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 4 2025-05-29T15:32:15.938873Z node 26 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:973: Operation and all the parts is done, operation id: 1003:2 2025-05-29T15:32:15.938876Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5174: RemoveTx for txid 1003:2 2025-05-29T15:32:15.938885Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 5 2025-05-29T15:32:15.938889Z node 26 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:982: Publication still in progress, tx: 1003, publications: 2, subscribers: 0 2025-05-29T15:32:15.938893Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:989: Publication details: tx: 1003, [OwnerId: 72057594046678944, LocalPathId: 4], 4 2025-05-29T15:32:15.938896Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:989: Publication details: tx: 1003, [OwnerId: 72057594046678944, LocalPathId: 5], 2 2025-05-29T15:32:15.939064Z node 26 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:5834: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 4 PathOwnerId: 72057594046678944, cookie: 1003 2025-05-29T15:32:15.939078Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 4 PathOwnerId: 72057594046678944, cookie: 1003 2025-05-29T15:32:15.939083Z node 26 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 1003 2025-05-29T15:32:15.939087Z node 26 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 1003, pathId: [OwnerId: 72057594046678944, LocalPathId: 4], version: 4 2025-05-29T15:32:15.939090Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 3 2025-05-29T15:32:15.939333Z node 26 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:5834: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 5 Version: 2 PathOwnerId: 72057594046678944, cookie: 1003 2025-05-29T15:32:15.939345Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 5 Version: 2 PathOwnerId: 72057594046678944, cookie: 1003 2025-05-29T15:32:15.939350Z node 26 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1003 2025-05-29T15:32:15.939354Z node 26 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 1003, pathId: [OwnerId: 72057594046678944, LocalPathId: 5], version: 2 2025-05-29T15:32:15.939357Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 4 2025-05-29T15:32:15.939366Z node 26 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 1003, subscribers: 0 2025-05-29T15:32:15.940061Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1003 2025-05-29T15:32:15.940087Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1003 TestModificationResult got TxId: 1003, wait until txId: 1003 TestWaitNotification wait txId: 1003 2025-05-29T15:32:15.941247Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:210: tests -- TTxNotificationSubscriber for txId 1003: send EvNotifyTxCompletion 2025-05-29T15:32:15.941254Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:256: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 1003 2025-05-29T15:32:15.941303Z node 26 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 1003, at schemeshard: 72057594046678944 2025-05-29T15:32:15.941317Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:227: tests -- TTxNotificationSubscriber for txId 1003: got EvNotifyTxCompletionResult 2025-05-29T15:32:15.941321Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:236: tests -- TTxNotificationSubscriber for txId 1003: satisfy waiter [26:661:2578] TestWaitNotification: OK eventTxId 1003 2025-05-29T15:32:15.941366Z node 26 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Table/Stream" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: true }, at schemeshard: 72057594046678944 2025-05-29T15:32:15.941391Z node 26 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/Table/Stream" took 30us result status StatusSuccess 2025-05-29T15:32:15.941450Z node 26 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Table/Stream" PathDescription { Self { Name: "Stream" PathId: 4 SchemeshardId: 72057594046678944 PathType: EPathTypeCdcStream CreateFinished: true CreateTxId: 1003 CreateStep: 5000004 ParentPathId: 3 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 4 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 4 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 2 CdcStreamVersion: 1 } ChildrenExist: true } Children { Name: "streamImpl" PathId: 5 SchemeshardId: 72057594046678944 PathType: EPathTypePersQueueGroup CreateFinished: true CreateTxId: 1003 CreateStep: 5000004 ParentPathId: 4 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" PathSubType: EPathSubTypeStreamImpl ChildrenExist: false BalancerTabletID: 72075186233409548 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 4 PathsLimit: 10000 ShardsInside: 3 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 1 PQPartitionsLimit: 1000000 } CdcStreamDescription { Name: "Stream" Mode: ECdcStreamModeNewAndOldImages PathId { OwnerId: 72057594046678944 LocalId: 4 } State: ECdcStreamStateReady SchemaVersion: 1 Format: ECdcStreamFormatDynamoDBStreamsJson VirtualTimestamps: false AwsRegion: "ru-central1" ResolvedTimestampsIntervalMs: 0 } } PathId: 4 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> TCdcStreamWithRebootsTests::Attributes[PipeResets] >> TSolomonReboots::CreateAlterSolomonWithReboots [GOOD] >> TCdcStreamWithRebootsTests::RacySplitAndDropTable[PipeResets] >> TCdcStreamWithRebootsTests::WithoutPqTransactions[TabletReboots] >> TCdcStreamWithRebootsTests::CreateStreamOnIndexTableWithVirtualTimestamps[PipeResets] [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_reboots/unittest >> TSolomonReboots::CreateAlterSolomonWithReboots [GOOD] Test command err: ==== RunWithTabletReboots =========== RUN: Trace =========== Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2140] Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:116:2058] recipient: [1:110:2141] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:116:2058] recipient: [1:110:2141] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:117:2058] recipient: [1:111:2142] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:117:2058] recipient: [1:111:2142] Leader for TabletID 72057594046678944 is [1:126:2151] sender: [1:127:2058] recipient: [1:109:2140] Leader for TabletID 72057594046447617 is [1:130:2154] sender: [1:132:2058] recipient: [1:110:2141] Leader for TabletID 72057594046316545 is [1:133:2155] sender: [1:135:2058] recipient: [1:111:2142] 2025-05-29T15:31:35.424364Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7488: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-05-29T15:31:35.424387Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7516: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:31:35.424392Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7402: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2025-05-29T15:31:35.424397Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7418: OperationsProcessing config: using default configuration 2025-05-29T15:31:35.424403Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-05-29T15:31:35.424406Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-05-29T15:31:35.424415Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7548: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:31:35.424426Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:39: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-05-29T15:31:35.424516Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7619: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-05-29T15:31:35.424582Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-05-29T15:31:35.437826Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7651: Got new config: QueryServiceConfig { AllExternalDataSourcesAreAvailable: true } 2025-05-29T15:31:35.437845Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:31:35.437934Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7619: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# Leader for TabletID 72057594046447617 is [1:130:2154] sender: [1:175:2058] recipient: [1:15:2062] 2025-05-29T15:31:35.440668Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-05-29T15:31:35.440694Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-05-29T15:31:35.440723Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-05-29T15:31:35.443649Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-05-29T15:31:35.443741Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:445: Clear TempDirsState with owners number: 0 2025-05-29T15:31:35.443858Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1348: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-05-29T15:31:35.444120Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:32: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-05-29T15:31:35.444900Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:157: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:31:35.444940Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:84: [RootDataErasureManager] Stop 2025-05-29T15:31:35.445192Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:31:35.445202Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:31:35.445232Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-05-29T15:31:35.445240Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-29T15:31:35.445246Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:202: TTxServerlessStorageBilling.Complete 2025-05-29T15:31:35.445266Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6671: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:214:2058] recipient: [1:212:2212] IGNORE Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:214:2058] recipient: [1:212:2212] Leader for TabletID 72057594037968897 is [1:218:2216] sender: [1:219:2058] recipient: [1:212:2212] 2025-05-29T15:31:35.446661Z node 1 :HIVE INFO: tablet_helpers.cpp:1130: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:126:2151] sender: [1:239:2058] recipient: [1:15:2062] 2025-05-29T15:31:35.466917Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:372: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-05-29T15:31:35.466984Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:31:35.467042Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-05-29T15:31:35.467086Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:130: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-05-29T15:31:35.467095Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:31:35.467739Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:451: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-05-29T15:31:35.467762Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-05-29T15:31:35.467812Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:31:35.467821Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:313: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-05-29T15:31:35.467827Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:367: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-05-29T15:31:35.467832Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 2 -> 3 2025-05-29T15:31:35.468212Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:31:35.468221Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-05-29T15:31:35.468227Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 3 -> 128 2025-05-29T15:31:35.468547Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:31:35.468554Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:31:35.468559Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:31:35.468566Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1650: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-05-29T15:31:35.469197Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1719: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-05-29T15:31:35.469514Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:652: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-05-29T15:31:35.469545Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1751: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 Leader for TabletID 72057594046316545 is [1:133:2155] sender: [1:254:2058] recipient: [1:15:2062] FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-05-29T15:31:35.469719Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:676: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-05-29T15:31:35.469739Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:680: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 133 RawX2: 4294969451 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-05-29T15:31:35.469753Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:31:35.469808Z node 1 :FLAT_TX_SCHEMESHARD INFO: sch ... 32:16.574707Z node 170 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:5834: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 1006 2025-05-29T15:32:16.574716Z node 170 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 1006 2025-05-29T15:32:16.574721Z node 170 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 1006 2025-05-29T15:32:16.574726Z node 170 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 1006, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 18446744073709551615 2025-05-29T15:32:16.574729Z node 170 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 5 2025-05-29T15:32:16.574783Z node 170 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:5834: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 9 PathOwnerId: 72057594046678944, cookie: 1006 2025-05-29T15:32:16.574790Z node 170 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 9 PathOwnerId: 72057594046678944, cookie: 1006 2025-05-29T15:32:16.574793Z node 170 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1006 2025-05-29T15:32:16.574795Z node 170 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 1006, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 9 2025-05-29T15:32:16.574798Z node 170 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2025-05-29T15:32:16.574803Z node 170 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 1006, subscribers: 0 2025-05-29T15:32:16.575088Z node 170 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:46: Free shard 72057594046678944:3 hive 72057594037968897 at ss 72057594046678944 2025-05-29T15:32:16.575096Z node 170 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:46: Free shard 72057594046678944:1 hive 72057594037968897 at ss 72057594046678944 2025-05-29T15:32:16.575099Z node 170 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:46: Free shard 72057594046678944:4 hive 72057594037968897 at ss 72057594046678944 2025-05-29T15:32:16.575102Z node 170 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:46: Free shard 72057594046678944:2 hive 72057594037968897 at ss 72057594046678944 2025-05-29T15:32:16.575304Z node 170 :HIVE INFO: tablet_helpers.cpp:1356: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 3 TxId_Deprecated: 3 TabletID: 72075186233409548 Forgetting tablet 72075186233409548 2025-05-29T15:32:16.575458Z node 170 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5938: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 3 ShardOwnerId: 72057594046678944 ShardLocalIdx: 3, at schemeshard: 72057594046678944 2025-05-29T15:32:16.575501Z node 170 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 4 2025-05-29T15:32:16.575590Z node 170 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1006 2025-05-29T15:32:16.575604Z node 170 :HIVE INFO: tablet_helpers.cpp:1356: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 1 TxId_Deprecated: 1 TabletID: 72075186233409546 2025-05-29T15:32:16.575628Z node 170 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1006 2025-05-29T15:32:16.575642Z node 170 :HIVE INFO: tablet_helpers.cpp:1356: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 4 TxId_Deprecated: 4 TabletID: 72075186233409549 2025-05-29T15:32:16.575787Z node 170 :HIVE INFO: tablet_helpers.cpp:1356: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 2 TxId_Deprecated: 2 TabletID: 72075186233409547 2025-05-29T15:32:16.575827Z node 170 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5938: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 1 ShardOwnerId: 72057594046678944 ShardLocalIdx: 1, at schemeshard: 72057594046678944 2025-05-29T15:32:16.575858Z node 170 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 3 2025-05-29T15:32:16.575945Z node 170 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5938: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 4 ShardOwnerId: 72057594046678944 ShardLocalIdx: 4, at schemeshard: 72057594046678944 2025-05-29T15:32:16.575962Z node 170 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 2 Forgetting tablet 72075186233409546 Forgetting tablet 72075186233409549 2025-05-29T15:32:16.576116Z node 170 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5938: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 2 ShardOwnerId: 72057594046678944 ShardLocalIdx: 2, at schemeshard: 72057594046678944 2025-05-29T15:32:16.576134Z node 170 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 1 Forgetting tablet 72075186233409547 2025-05-29T15:32:16.576207Z node 170 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:123: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-05-29T15:32:16.576214Z node 170 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:137: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 3], at schemeshard: 72057594046678944 2025-05-29T15:32:16.576222Z node 170 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-05-29T15:32:16.576455Z node 170 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:174: Deleted shardIdx 72057594046678944:3 2025-05-29T15:32:16.576464Z node 170 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:180: Close pipe to deleted shardIdx 72057594046678944:3 tabletId 72075186233409548 2025-05-29T15:32:16.576768Z node 170 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:174: Deleted shardIdx 72057594046678944:1 2025-05-29T15:32:16.576779Z node 170 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:180: Close pipe to deleted shardIdx 72057594046678944:1 tabletId 72075186233409546 2025-05-29T15:32:16.576795Z node 170 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:174: Deleted shardIdx 72057594046678944:4 2025-05-29T15:32:16.576798Z node 170 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:180: Close pipe to deleted shardIdx 72057594046678944:4 tabletId 72075186233409549 2025-05-29T15:32:16.576805Z node 170 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:174: Deleted shardIdx 72057594046678944:2 2025-05-29T15:32:16.576809Z node 170 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:180: Close pipe to deleted shardIdx 72057594046678944:2 tabletId 72075186233409547 2025-05-29T15:32:16.576834Z node 170 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:160: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestModificationResult got TxId: 1006, wait until txId: 1006 TestWaitNotification wait txId: 1006 2025-05-29T15:32:16.576875Z node 170 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:210: tests -- TTxNotificationSubscriber for txId 1006: send EvNotifyTxCompletion 2025-05-29T15:32:16.576880Z node 170 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:256: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 1006 2025-05-29T15:32:16.576920Z node 170 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 1006, at schemeshard: 72057594046678944 2025-05-29T15:32:16.576933Z node 170 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:227: tests -- TTxNotificationSubscriber for txId 1006: got EvNotifyTxCompletionResult 2025-05-29T15:32:16.576937Z node 170 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:236: tests -- TTxNotificationSubscriber for txId 1006: satisfy waiter [170:555:2508] TestWaitNotification: OK eventTxId 1006 wait until 72075186233409546 is deleted wait until 72075186233409547 is deleted wait until 72075186233409548 is deleted wait until 72075186233409549 is deleted 2025-05-29T15:32:16.576979Z node 170 :HIVE INFO: tablet_helpers.cpp:1476: [72057594037968897] TEvSubscribeToTabletDeletion, 72075186233409546 2025-05-29T15:32:16.576988Z node 170 :HIVE INFO: tablet_helpers.cpp:1476: [72057594037968897] TEvSubscribeToTabletDeletion, 72075186233409547 2025-05-29T15:32:16.576993Z node 170 :HIVE INFO: tablet_helpers.cpp:1476: [72057594037968897] TEvSubscribeToTabletDeletion, 72075186233409548 2025-05-29T15:32:16.576999Z node 170 :HIVE INFO: tablet_helpers.cpp:1476: [72057594037968897] TEvSubscribeToTabletDeletion, 72075186233409549 Deleted tabletId 72075186233409546 Deleted tabletId 72075186233409547 Deleted tabletId 72075186233409548 Deleted tabletId 72075186233409549 2025-05-29T15:32:16.577049Z node 170 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "MyRoot/Solomon" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-05-29T15:32:16.577068Z node 170 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "MyRoot/Solomon" took 26us result status StatusPathDoesNotExist 2025-05-29T15:32:16.577093Z node 170 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/Solomon\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1]), source_location: ydb/core/tx/schemeshard/schemeshard_path_describer.cpp:1157" Path: "MyRoot/Solomon" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: true } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_cdc_stream_reboots/unittest >> TCdcStreamWithRebootsTests::CreateStreamOnIndexTableWithVirtualTimestamps[PipeResets] [GOOD] Test command err: =========== RUN: Trace =========== Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2140] Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:116:2058] recipient: [1:110:2141] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:116:2058] recipient: [1:110:2141] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:117:2058] recipient: [1:111:2142] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:117:2058] recipient: [1:111:2142] Leader for TabletID 72057594046678944 is [1:126:2151] sender: [1:127:2058] recipient: [1:109:2140] Leader for TabletID 72057594046447617 is [1:130:2154] sender: [1:132:2058] recipient: [1:110:2141] Leader for TabletID 72057594046316545 is [1:133:2155] sender: [1:135:2058] recipient: [1:111:2142] 2025-05-29T15:32:10.887499Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7488: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-05-29T15:32:10.887515Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7516: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:32:10.887519Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7402: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2025-05-29T15:32:10.887522Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7418: OperationsProcessing config: using default configuration 2025-05-29T15:32:10.887531Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-05-29T15:32:10.887533Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-05-29T15:32:10.887539Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7548: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:32:10.887548Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:39: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-05-29T15:32:10.887606Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7619: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-05-29T15:32:10.887659Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-05-29T15:32:10.896431Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7651: Got new config: QueryServiceConfig { AllExternalDataSourcesAreAvailable: true } 2025-05-29T15:32:10.896444Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:32:10.896499Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7619: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# Leader for TabletID 72057594046447617 is [1:130:2154] sender: [1:175:2058] recipient: [1:15:2062] 2025-05-29T15:32:10.898567Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-05-29T15:32:10.898590Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-05-29T15:32:10.898609Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-05-29T15:32:10.900793Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-05-29T15:32:10.900846Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:445: Clear TempDirsState with owners number: 0 2025-05-29T15:32:10.900935Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1348: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-05-29T15:32:10.901066Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:32: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-05-29T15:32:10.901661Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:157: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:32:10.901693Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:84: [RootDataErasureManager] Stop 2025-05-29T15:32:10.901857Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:32:10.901864Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:32:10.901883Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-05-29T15:32:10.901887Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-29T15:32:10.901892Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:202: TTxServerlessStorageBilling.Complete 2025-05-29T15:32:10.901916Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6671: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:214:2058] recipient: [1:212:2212] IGNORE Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:214:2058] recipient: [1:212:2212] Leader for TabletID 72057594037968897 is [1:218:2216] sender: [1:219:2058] recipient: [1:212:2212] 2025-05-29T15:32:10.902849Z node 1 :HIVE INFO: tablet_helpers.cpp:1130: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:126:2151] sender: [1:239:2058] recipient: [1:15:2062] 2025-05-29T15:32:10.919198Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:372: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-05-29T15:32:10.919259Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:32:10.919305Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-05-29T15:32:10.919343Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:130: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-05-29T15:32:10.919353Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:32:10.919940Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:451: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-05-29T15:32:10.919963Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-05-29T15:32:10.920008Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:32:10.920017Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:313: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-05-29T15:32:10.920022Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:367: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-05-29T15:32:10.920025Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 2 -> 3 2025-05-29T15:32:10.920546Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:32:10.920562Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-05-29T15:32:10.920568Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 3 -> 128 2025-05-29T15:32:10.920953Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:32:10.920962Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:32:10.920968Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:32:10.920973Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1650: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-05-29T15:32:10.921506Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1719: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-05-29T15:32:10.921863Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:652: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-05-29T15:32:10.921885Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1751: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 Leader for TabletID 72057594046316545 is [1:133:2155] sender: [1:254:2058] recipient: [1:15:2062] FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-05-29T15:32:10.922008Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:676: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-05-29T15:32:10.922026Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:680: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 133 RawX2: 4294969451 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-05-29T15:32:10.922030Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:32:10.922066Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Ch ... d: 72057594046678944, LocalPathId: 6] was 3 2025-05-29T15:32:17.151306Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 7] was 4 2025-05-29T15:32:17.151868Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:647: TTxOperationReply complete, operationId: 1003:3, at schemeshard: 72057594046678944 2025-05-29T15:32:17.151919Z node 26 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:32:17.151925Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1003, path id: [OwnerId: 72057594046678944, LocalPathId: 6] 2025-05-29T15:32:17.151954Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1003, path id: [OwnerId: 72057594046678944, LocalPathId: 7] 2025-05-29T15:32:17.151988Z node 26 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:32:17.151993Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [26:206:2207], at schemeshard: 72057594046678944, txId: 1003, path id: 6 2025-05-29T15:32:17.151998Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [26:206:2207], at schemeshard: 72057594046678944, txId: 1003, path id: 7 2025-05-29T15:32:17.152075Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1003:3, at schemeshard: 72057594046678944 2025-05-29T15:32:17.152085Z node 26 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:482: [72057594046678944] TDone opId# 1003:3 ProgressState 2025-05-29T15:32:17.152096Z node 26 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:906: Part operation is done id#1003:3 progress is 4/4 2025-05-29T15:32:17.152100Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 1003 ready parts: 4/4 2025-05-29T15:32:17.152105Z node 26 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:906: Part operation is done id#1003:3 progress is 4/4 2025-05-29T15:32:17.152108Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 1003 ready parts: 4/4 2025-05-29T15:32:17.152113Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1606: TOperation IsReadyToNotify, TxId: 1003, ready parts: 4/4, is published: false 2025-05-29T15:32:17.152118Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 1003 ready parts: 4/4 2025-05-29T15:32:17.152123Z node 26 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:973: Operation and all the parts is done, operation id: 1003:0 2025-05-29T15:32:17.152128Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5174: RemoveTx for txid 1003:0 2025-05-29T15:32:17.152138Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 3 2025-05-29T15:32:17.152141Z node 26 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:973: Operation and all the parts is done, operation id: 1003:1 2025-05-29T15:32:17.152143Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5174: RemoveTx for txid 1003:1 2025-05-29T15:32:17.152147Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 6] was 4 2025-05-29T15:32:17.152149Z node 26 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:973: Operation and all the parts is done, operation id: 1003:2 2025-05-29T15:32:17.152151Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5174: RemoveTx for txid 1003:2 2025-05-29T15:32:17.152161Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 4 2025-05-29T15:32:17.152164Z node 26 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:973: Operation and all the parts is done, operation id: 1003:3 2025-05-29T15:32:17.152165Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5174: RemoveTx for txid 1003:3 2025-05-29T15:32:17.152172Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 7] was 5 2025-05-29T15:32:17.152175Z node 26 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:982: Publication still in progress, tx: 1003, publications: 2, subscribers: 0 2025-05-29T15:32:17.152177Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:989: Publication details: tx: 1003, [OwnerId: 72057594046678944, LocalPathId: 6], 4 2025-05-29T15:32:17.152180Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:989: Publication details: tx: 1003, [OwnerId: 72057594046678944, LocalPathId: 7], 2 2025-05-29T15:32:17.152360Z node 26 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:5834: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 6 Version: 4 PathOwnerId: 72057594046678944, cookie: 1003 2025-05-29T15:32:17.152373Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 6 Version: 4 PathOwnerId: 72057594046678944, cookie: 1003 2025-05-29T15:32:17.152380Z node 26 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 1003 2025-05-29T15:32:17.152386Z node 26 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 1003, pathId: [OwnerId: 72057594046678944, LocalPathId: 6], version: 4 2025-05-29T15:32:17.152390Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 6] was 3 2025-05-29T15:32:17.152560Z node 26 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:5834: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 7 Version: 2 PathOwnerId: 72057594046678944, cookie: 1003 2025-05-29T15:32:17.152570Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 7 Version: 2 PathOwnerId: 72057594046678944, cookie: 1003 2025-05-29T15:32:17.152572Z node 26 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1003 2025-05-29T15:32:17.152575Z node 26 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 1003, pathId: [OwnerId: 72057594046678944, LocalPathId: 7], version: 2 2025-05-29T15:32:17.152578Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 7] was 4 2025-05-29T15:32:17.152585Z node 26 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 1003, subscribers: 0 2025-05-29T15:32:17.153593Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1003 2025-05-29T15:32:17.153628Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1003 TestModificationResult got TxId: 1003, wait until txId: 1003 TestWaitNotification wait txId: 1003 2025-05-29T15:32:17.154660Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:210: tests -- TTxNotificationSubscriber for txId 1003: send EvNotifyTxCompletion 2025-05-29T15:32:17.154669Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:256: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 1003 2025-05-29T15:32:17.154713Z node 26 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 1003, at schemeshard: 72057594046678944 2025-05-29T15:32:17.154724Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:227: tests -- TTxNotificationSubscriber for txId 1003: got EvNotifyTxCompletionResult 2025-05-29T15:32:17.154727Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:236: tests -- TTxNotificationSubscriber for txId 1003: satisfy waiter [26:733:2639] TestWaitNotification: OK eventTxId 1003 2025-05-29T15:32:17.154809Z node 26 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Table/Index/indexImplTable/Stream" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: true }, at schemeshard: 72057594046678944 2025-05-29T15:32:17.154839Z node 26 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/Table/Index/indexImplTable/Stream" took 38us result status StatusSuccess 2025-05-29T15:32:17.154910Z node 26 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Table/Index/indexImplTable/Stream" PathDescription { Self { Name: "Stream" PathId: 6 SchemeshardId: 72057594046678944 PathType: EPathTypeCdcStream CreateFinished: true CreateTxId: 1003 CreateStep: 5000004 ParentPathId: 5 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 4 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 4 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 2 CdcStreamVersion: 1 } ChildrenExist: true } Children { Name: "streamImpl" PathId: 7 SchemeshardId: 72057594046678944 PathType: EPathTypePersQueueGroup CreateFinished: true CreateTxId: 1003 CreateStep: 5000004 ParentPathId: 6 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" PathSubType: EPathSubTypeStreamImpl ChildrenExist: false BalancerTabletID: 72075186233409549 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 6 PathsLimit: 10000 ShardsInside: 4 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 1 PQPartitionsLimit: 1000000 } CdcStreamDescription { Name: "Stream" Mode: ECdcStreamModeKeysOnly PathId { OwnerId: 72057594046678944 LocalId: 6 } State: ECdcStreamStateReady SchemaVersion: 1 Format: ECdcStreamFormatProto VirtualTimestamps: true AwsRegion: "" ResolvedTimestampsIntervalMs: 0 } } PathId: 6 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> TReplicationWithRebootsTests::CreateInParallelWithInitialController [GOOD] |76.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_cdc_stream_reboots/unittest >> TCdcStreamWithRebootsTests::GetReadyStream[PipeResets] >> TCdcStreamWithRebootsTests::CreateStreamWithVirtualTimestamps[TabletReboots] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_replication_reboots/unittest >> TReplicationWithRebootsTests::CreateInParallelWithInitialController [GOOD] Test command err: ==== RunWithTabletReboots =========== RUN: Trace =========== Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2140] Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:116:2058] recipient: [1:110:2141] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:116:2058] recipient: [1:110:2141] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:117:2058] recipient: [1:111:2142] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:117:2058] recipient: [1:111:2142] Leader for TabletID 72057594046678944 is [1:126:2151] sender: [1:127:2058] recipient: [1:109:2140] Leader for TabletID 72057594046447617 is [1:130:2154] sender: [1:132:2058] recipient: [1:110:2141] Leader for TabletID 72057594046316545 is [1:133:2155] sender: [1:135:2058] recipient: [1:111:2142] 2025-05-29T15:31:45.811479Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7488: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-05-29T15:31:45.811503Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7516: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:31:45.811509Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7402: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2025-05-29T15:31:45.811514Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7418: OperationsProcessing config: using default configuration 2025-05-29T15:31:45.811527Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-05-29T15:31:45.811531Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-05-29T15:31:45.811539Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7548: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:31:45.811553Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:39: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-05-29T15:31:45.811671Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7619: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-05-29T15:31:45.811746Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-05-29T15:31:45.826325Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7651: Got new config: QueryServiceConfig { AllExternalDataSourcesAreAvailable: true } 2025-05-29T15:31:45.826347Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:31:45.826465Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7619: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# Leader for TabletID 72057594046447617 is [1:130:2154] sender: [1:175:2058] recipient: [1:15:2062] 2025-05-29T15:31:45.829192Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-05-29T15:31:45.829224Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-05-29T15:31:45.829252Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-05-29T15:31:45.832092Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-05-29T15:31:45.832175Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:445: Clear TempDirsState with owners number: 0 2025-05-29T15:31:45.832308Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1348: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-05-29T15:31:45.832475Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:32: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-05-29T15:31:45.833158Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:157: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:31:45.833211Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:84: [RootDataErasureManager] Stop 2025-05-29T15:31:45.833471Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:31:45.833484Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:31:45.833520Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-05-29T15:31:45.833529Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-29T15:31:45.833536Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:202: TTxServerlessStorageBilling.Complete 2025-05-29T15:31:45.833556Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6671: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:214:2058] recipient: [1:212:2212] IGNORE Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:214:2058] recipient: [1:212:2212] Leader for TabletID 72057594037968897 is [1:218:2216] sender: [1:219:2058] recipient: [1:212:2212] 2025-05-29T15:31:45.834857Z node 1 :HIVE INFO: tablet_helpers.cpp:1130: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:126:2151] sender: [1:239:2058] recipient: [1:15:2062] 2025-05-29T15:31:45.852887Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:372: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-05-29T15:31:45.852977Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:31:45.853037Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-05-29T15:31:45.853081Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:130: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-05-29T15:31:45.853090Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:31:45.853845Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:451: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-05-29T15:31:45.853867Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-05-29T15:31:45.853923Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:31:45.853931Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:313: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-05-29T15:31:45.853935Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:367: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-05-29T15:31:45.853940Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 2 -> 3 2025-05-29T15:31:45.854289Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:31:45.854298Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-05-29T15:31:45.854302Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 3 -> 128 2025-05-29T15:31:45.854534Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:31:45.854541Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:31:45.854545Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:31:45.854550Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1650: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-05-29T15:31:45.855090Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1719: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-05-29T15:31:45.855385Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:652: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-05-29T15:31:45.855415Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1751: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 Leader for TabletID 72057594046316545 is [1:133:2155] sender: [1:254:2058] recipient: [1:15:2062] FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-05-29T15:31:45.855582Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:676: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-05-29T15:31:45.855607Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:680: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 133 RawX2: 4294969451 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-05-29T15:31:45.855615Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:31:45.855671Z node 1 :FLAT_TX_SCHEMESHARD INFO: sch ... ipe::TEvServerConnected 2025-05-29T15:32:18.186506Z node 131 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4974: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-05-29T15:32:18.186508Z node 131 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5753: Pipe server connected, at tablet: 72057594046678944 2025-05-29T15:32:18.186514Z node 131 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4877: StateWork, received event# 269877764, Sender [131:750:2635], Recipient [131:131:2154]: NKikimr::TEvTabletPipe::TEvServerDisconnected 2025-05-29T15:32:18.186517Z node 131 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4975: StateWork, processing event TEvTabletPipe::TEvServerDisconnected 2025-05-29T15:32:18.186520Z node 131 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5801: Server pipe is reset, at schemeshard: 72057594046678944 2025-05-29T15:32:18.186524Z node 131 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4877: StateWork, received event# 271124996, Sender [131:424:2378], Recipient [131:131:2154]: NKikimrScheme.TEvNotifyTxCompletion TxId: 1004 2025-05-29T15:32:18.186527Z node 131 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4890: StateWork, processing event TEvSchemeShard::TEvNotifyTxCompletion 2025-05-29T15:32:18.186530Z node 131 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 1004, at schemeshard: 72057594046678944 2025-05-29T15:32:18.186537Z node 131 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:227: tests -- TTxNotificationSubscriber for txId 1004: got EvNotifyTxCompletionResult 2025-05-29T15:32:18.186539Z node 131 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:236: tests -- TTxNotificationSubscriber for txId 1004: satisfy waiter [131:746:2631] 2025-05-29T15:32:18.186546Z node 131 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4877: StateWork, received event# 271124996, Sender [131:424:2378], Recipient [131:131:2154]: NKikimrScheme.TEvNotifyTxCompletion TxId: 1005 2025-05-29T15:32:18.186548Z node 131 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4890: StateWork, processing event TEvSchemeShard::TEvNotifyTxCompletion 2025-05-29T15:32:18.186551Z node 131 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 1005, at schemeshard: 72057594046678944 2025-05-29T15:32:18.186560Z node 131 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4877: StateWork, received event# 269877764, Sender [131:751:2636], Recipient [131:131:2154]: NKikimr::TEvTabletPipe::TEvServerDisconnected 2025-05-29T15:32:18.186562Z node 131 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4975: StateWork, processing event TEvTabletPipe::TEvServerDisconnected 2025-05-29T15:32:18.186564Z node 131 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5801: Server pipe is reset, at schemeshard: 72057594046678944 2025-05-29T15:32:18.186566Z node 131 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:227: tests -- TTxNotificationSubscriber for txId 1005: got EvNotifyTxCompletionResult 2025-05-29T15:32:18.186569Z node 131 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:236: tests -- TTxNotificationSubscriber for txId 1005: satisfy waiter [131:746:2631] 2025-05-29T15:32:18.186579Z node 131 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4877: StateWork, received event# 269877764, Sender [131:752:2637], Recipient [131:131:2154]: NKikimr::TEvTabletPipe::TEvServerDisconnected 2025-05-29T15:32:18.186581Z node 131 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4975: StateWork, processing event TEvTabletPipe::TEvServerDisconnected 2025-05-29T15:32:18.186583Z node 131 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5801: Server pipe is reset, at schemeshard: 72057594046678944 TestWaitNotification: OK eventTxId 1003 TestWaitNotification: OK eventTxId 1004 TestWaitNotification: OK eventTxId 1005 2025-05-29T15:32:18.186624Z node 131 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4877: StateWork, received event# 271122945, Sender [131:753:2638], Recipient [131:131:2154]: NKikimrSchemeOp.TDescribePath Path: "/MyRoot/Replication1" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false } 2025-05-29T15:32:18.186626Z node 131 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4889: StateWork, processing event TEvSchemeShard::TEvDescribeScheme 2025-05-29T15:32:18.186633Z node 131 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Replication1" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-05-29T15:32:18.186665Z node 131 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/Replication1" took 29us result status StatusSuccess 2025-05-29T15:32:18.186720Z node 131 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Replication1" PathDescription { Self { Name: "Replication1" PathId: 4 SchemeshardId: 72057594046678944 PathType: EPathTypeReplication CreateFinished: true CreateTxId: 1003 CreateStep: 5000004 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 ReplicationVersion: 1 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 5 PathsLimit: 10000 ShardsInside: 4 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } ReplicationDescription { Name: "Replication1" Config { SrcConnectionParams { StaticCredentials { User: "user" } } Specific { Targets { SrcPath: "/MyRoot1/Table" DstPath: "/MyRoot2/Table" } } ConsistencySettings { Row { } } } PathId { OwnerId: 72057594046678944 LocalId: 4 } Version: 1 ControllerId: 72075186233409547 State { StandBy { } } } } PathId: 4 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-05-29T15:32:18.186808Z node 131 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4877: StateWork, received event# 271122945, Sender [131:754:2639], Recipient [131:131:2154]: NKikimrSchemeOp.TDescribePath Path: "/MyRoot/Replication2" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false } 2025-05-29T15:32:18.186813Z node 131 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4889: StateWork, processing event TEvSchemeShard::TEvDescribeScheme 2025-05-29T15:32:18.186819Z node 131 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Replication2" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-05-29T15:32:18.186833Z node 131 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/Replication2" took 14us result status StatusSuccess 2025-05-29T15:32:18.186858Z node 131 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Replication2" PathDescription { Self { Name: "Replication2" PathId: 5 SchemeshardId: 72057594046678944 PathType: EPathTypeReplication CreateFinished: true CreateTxId: 1004 CreateStep: 5000005 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 ReplicationVersion: 1 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 5 PathsLimit: 10000 ShardsInside: 4 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } ReplicationDescription { Name: "Replication2" Config { SrcConnectionParams { StaticCredentials { User: "user" } } Specific { Targets { SrcPath: "/MyRoot1/Table" DstPath: "/MyRoot2/Table" } } ConsistencySettings { Row { } } } PathId { OwnerId: 72057594046678944 LocalId: 5 } Version: 1 ControllerId: 72075186233409548 State { StandBy { } } } } PathId: 5 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-05-29T15:32:18.186910Z node 131 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4877: StateWork, received event# 271122945, Sender [131:755:2640], Recipient [131:131:2154]: NKikimrSchemeOp.TDescribePath Path: "/MyRoot/Replication3" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false } 2025-05-29T15:32:18.186913Z node 131 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4889: StateWork, processing event TEvSchemeShard::TEvDescribeScheme 2025-05-29T15:32:18.186918Z node 131 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Replication3" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-05-29T15:32:18.186927Z node 131 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/Replication3" took 9us result status StatusSuccess 2025-05-29T15:32:18.186949Z node 131 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Replication3" PathDescription { Self { Name: "Replication3" PathId: 6 SchemeshardId: 72057594046678944 PathType: EPathTypeReplication CreateFinished: true CreateTxId: 1005 CreateStep: 5000006 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 ReplicationVersion: 1 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 5 PathsLimit: 10000 ShardsInside: 4 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } ReplicationDescription { Name: "Replication3" Config { SrcConnectionParams { StaticCredentials { User: "user" } } Specific { Targets { SrcPath: "/MyRoot1/Table" DstPath: "/MyRoot2/Table" } } ConsistencySettings { Row { } } } PathId { OwnerId: 72057594046678944 LocalId: 6 } Version: 1 ControllerId: 72075186233409549 State { StandBy { } } } } PathId: 6 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> TConsistentOpsWithReboots::CopyWithData [GOOD] >> TConsistentOpsWithReboots::DropWithData [GOOD] >> TCdcStreamWithRebootsTests::CreateStreamOnIndexTableWithInitialScan[PipeResets] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_reboots/unittest >> TConsistentOpsWithReboots::CopyWithData [GOOD] Test command err: ==== RunWithTabletReboots =========== RUN: Trace =========== Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2140] Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:116:2058] recipient: [1:110:2141] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:116:2058] recipient: [1:110:2141] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:117:2058] recipient: [1:111:2142] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:117:2058] recipient: [1:111:2142] Leader for TabletID 72057594046678944 is [1:126:2151] sender: [1:127:2058] recipient: [1:109:2140] Leader for TabletID 72057594046447617 is [1:130:2154] sender: [1:132:2058] recipient: [1:110:2141] Leader for TabletID 72057594046316545 is [1:133:2155] sender: [1:135:2058] recipient: [1:111:2142] 2025-05-29T15:31:39.748235Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7488: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-05-29T15:31:39.748261Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7516: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:31:39.748267Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7402: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2025-05-29T15:31:39.748273Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7418: OperationsProcessing config: using default configuration 2025-05-29T15:31:39.748279Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-05-29T15:31:39.748283Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-05-29T15:31:39.748293Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7548: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:31:39.748307Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:39: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-05-29T15:31:39.748407Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7619: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-05-29T15:31:39.748488Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-05-29T15:31:39.763587Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7651: Got new config: QueryServiceConfig { AllExternalDataSourcesAreAvailable: true } 2025-05-29T15:31:39.763611Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:31:39.763713Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7619: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# Leader for TabletID 72057594046447617 is [1:130:2154] sender: [1:175:2058] recipient: [1:15:2062] 2025-05-29T15:31:39.766580Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-05-29T15:31:39.766610Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-05-29T15:31:39.766643Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-05-29T15:31:39.769745Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-05-29T15:31:39.769833Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:445: Clear TempDirsState with owners number: 0 2025-05-29T15:31:39.769948Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1348: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-05-29T15:31:39.770157Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:32: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-05-29T15:31:39.771024Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:157: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:31:39.771063Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:84: [RootDataErasureManager] Stop 2025-05-29T15:31:39.771258Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:31:39.771264Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:31:39.771282Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-05-29T15:31:39.771287Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-29T15:31:39.771291Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:202: TTxServerlessStorageBilling.Complete 2025-05-29T15:31:39.771304Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6671: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:214:2058] recipient: [1:212:2212] IGNORE Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:214:2058] recipient: [1:212:2212] Leader for TabletID 72057594037968897 is [1:218:2216] sender: [1:219:2058] recipient: [1:212:2212] 2025-05-29T15:31:39.772370Z node 1 :HIVE INFO: tablet_helpers.cpp:1130: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:126:2151] sender: [1:239:2058] recipient: [1:15:2062] 2025-05-29T15:31:39.784953Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:372: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-05-29T15:31:39.785017Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:31:39.785061Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-05-29T15:31:39.785099Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:130: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-05-29T15:31:39.785107Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:31:39.785635Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:451: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-05-29T15:31:39.785650Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-05-29T15:31:39.785686Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:31:39.785691Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:313: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-05-29T15:31:39.785694Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:367: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-05-29T15:31:39.785697Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 2 -> 3 2025-05-29T15:31:39.786015Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:31:39.786029Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-05-29T15:31:39.786034Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 3 -> 128 2025-05-29T15:31:39.786341Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:31:39.786348Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:31:39.786351Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:31:39.786364Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1650: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-05-29T15:31:39.786832Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1719: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-05-29T15:31:39.787142Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:652: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-05-29T15:31:39.787170Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1751: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 Leader for TabletID 72057594046316545 is [1:133:2155] sender: [1:254:2058] recipient: [1:15:2062] FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-05-29T15:31:39.787293Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:676: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-05-29T15:31:39.787310Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:680: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 133 RawX2: 4294969451 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-05-29T15:31:39.787325Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:31:39.787370Z node 1 :FLAT_TX_SCHEMESHARD INFO: sch ... sLimit: 1000000 } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-05-29T15:32:19.399698Z node 161 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/DirB/src1" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-05-29T15:32:19.399717Z node 161 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/DirB/src1" took 20us result status StatusSuccess 2025-05-29T15:32:19.399770Z node 161 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/DirB/src1" PathDescription { Self { Name: "src1" PathId: 4 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 1003 CreateStep: 5000004 ParentPathId: 3 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "src1" Columns { Name: "key1" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "key2" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } Columns { Name: "key3" Type: "Uint64" TypeId: 4 Id: 3 NotNull: false IsBuildInProgress: false } Columns { Name: "Value" Type: "Utf8" TypeId: 4608 Id: 4 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key1" KeyColumnNames: "key2" KeyColumnNames: "key3" KeyColumnIds: 1 KeyColumnIds: 2 KeyColumnIds: 3 TableSchemaVersion: 1 IsBackup: false IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 6 PathsLimit: 10000 ShardsInside: 4 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 4 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-05-29T15:32:19.399808Z node 161 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/DirB/src2" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-05-29T15:32:19.399819Z node 161 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/DirB/src2" took 12us result status StatusSuccess 2025-05-29T15:32:19.399853Z node 161 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/DirB/src2" PathDescription { Self { Name: "src2" PathId: 5 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 1004 CreateStep: 5000005 ParentPathId: 3 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "src2" Columns { Name: "key1" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "key2" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } Columns { Name: "key3" Type: "Uint64" TypeId: 4 Id: 3 NotNull: false IsBuildInProgress: false } Columns { Name: "Value" Type: "Utf8" TypeId: 4608 Id: 4 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key1" KeyColumnNames: "key2" KeyColumnNames: "key3" KeyColumnIds: 1 KeyColumnIds: 2 KeyColumnIds: 3 TableSchemaVersion: 1 IsBackup: false IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 6 PathsLimit: 10000 ShardsInside: 4 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 5 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-05-29T15:32:19.399885Z node 161 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/DirB/dst1" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-05-29T15:32:19.399895Z node 161 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/DirB/dst1" took 10us result status StatusSuccess 2025-05-29T15:32:19.399928Z node 161 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/DirB/dst1" PathDescription { Self { Name: "dst1" PathId: 6 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 1005 CreateStep: 5000006 ParentPathId: 3 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "dst1" Columns { Name: "key1" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "key2" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } Columns { Name: "key3" Type: "Uint64" TypeId: 4 Id: 3 NotNull: false IsBuildInProgress: false } Columns { Name: "Value" Type: "Utf8" TypeId: 4608 Id: 4 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key1" KeyColumnNames: "key2" KeyColumnNames: "key3" KeyColumnIds: 1 KeyColumnIds: 2 KeyColumnIds: 3 TableSchemaVersion: 1 IsBackup: false IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 6 PathsLimit: 10000 ShardsInside: 4 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 6 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-05-29T15:32:19.399974Z node 161 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/DirB/dst2" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-05-29T15:32:19.399986Z node 161 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/DirB/dst2" took 13us result status StatusSuccess 2025-05-29T15:32:19.400019Z node 161 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/DirB/dst2" PathDescription { Self { Name: "dst2" PathId: 7 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 1005 CreateStep: 5000006 ParentPathId: 3 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "dst2" Columns { Name: "key1" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "key2" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } Columns { Name: "key3" Type: "Uint64" TypeId: 4 Id: 3 NotNull: false IsBuildInProgress: false } Columns { Name: "Value" Type: "Utf8" TypeId: 4608 Id: 4 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key1" KeyColumnNames: "key2" KeyColumnNames: "key3" KeyColumnIds: 1 KeyColumnIds: 2 KeyColumnIds: 3 TableSchemaVersion: 1 IsBackup: false IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 6 PathsLimit: 10000 ShardsInside: 4 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 7 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 |76.5%| [TA] $(B)/ydb/core/tx/schemeshard/ut_replication_reboots/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_reboots/unittest >> TConsistentOpsWithReboots::DropWithData [GOOD] Test command err: ==== RunWithTabletReboots =========== RUN: Trace =========== Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2140] Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:116:2058] recipient: [1:110:2141] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:116:2058] recipient: [1:110:2141] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:117:2058] recipient: [1:111:2142] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:117:2058] recipient: [1:111:2142] Leader for TabletID 72057594046678944 is [1:126:2151] sender: [1:127:2058] recipient: [1:109:2140] Leader for TabletID 72057594046447617 is [1:130:2154] sender: [1:132:2058] recipient: [1:110:2141] Leader for TabletID 72057594046316545 is [1:133:2155] sender: [1:135:2058] recipient: [1:111:2142] 2025-05-29T15:31:33.989773Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7488: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-05-29T15:31:33.989791Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7516: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:31:33.989795Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7402: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2025-05-29T15:31:33.989798Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7418: OperationsProcessing config: using default configuration 2025-05-29T15:31:33.989802Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-05-29T15:31:33.989804Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-05-29T15:31:33.989810Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7548: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:31:33.989819Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:39: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-05-29T15:31:33.989912Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7619: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-05-29T15:31:33.989970Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-05-29T15:31:33.999783Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7651: Got new config: QueryServiceConfig { AllExternalDataSourcesAreAvailable: true } 2025-05-29T15:31:33.999802Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:31:33.999869Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7619: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# Leader for TabletID 72057594046447617 is [1:130:2154] sender: [1:175:2058] recipient: [1:15:2062] 2025-05-29T15:31:34.002065Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-05-29T15:31:34.002087Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-05-29T15:31:34.002113Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-05-29T15:31:34.004349Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-05-29T15:31:34.004422Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:445: Clear TempDirsState with owners number: 0 2025-05-29T15:31:34.004493Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1348: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-05-29T15:31:34.004655Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:32: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-05-29T15:31:34.005276Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:157: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:31:34.005305Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:84: [RootDataErasureManager] Stop 2025-05-29T15:31:34.005514Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:31:34.005521Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:31:34.005543Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-05-29T15:31:34.005550Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-29T15:31:34.005557Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:202: TTxServerlessStorageBilling.Complete 2025-05-29T15:31:34.005575Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6671: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:214:2058] recipient: [1:212:2212] IGNORE Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:214:2058] recipient: [1:212:2212] Leader for TabletID 72057594037968897 is [1:218:2216] sender: [1:219:2058] recipient: [1:212:2212] 2025-05-29T15:31:34.006615Z node 1 :HIVE INFO: tablet_helpers.cpp:1130: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:126:2151] sender: [1:239:2058] recipient: [1:15:2062] 2025-05-29T15:31:34.019652Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:372: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-05-29T15:31:34.019719Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:31:34.019769Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-05-29T15:31:34.019806Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:130: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-05-29T15:31:34.019815Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:31:34.020468Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:451: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-05-29T15:31:34.020493Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-05-29T15:31:34.020539Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:31:34.020547Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:313: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-05-29T15:31:34.020550Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:367: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-05-29T15:31:34.020554Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 2 -> 3 2025-05-29T15:31:34.020943Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:31:34.020952Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-05-29T15:31:34.020955Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 3 -> 128 2025-05-29T15:31:34.021232Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:31:34.021240Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:31:34.021244Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:31:34.021248Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1650: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-05-29T15:31:34.021690Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1719: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-05-29T15:31:34.022022Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:652: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-05-29T15:31:34.022049Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1751: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 Leader for TabletID 72057594046316545 is [1:133:2155] sender: [1:254:2058] recipient: [1:15:2062] FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-05-29T15:31:34.022189Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:676: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-05-29T15:31:34.022206Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:680: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 133 RawX2: 4294969451 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-05-29T15:31:34.022221Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:31:34.022270Z node 1 :FLAT_TX_SCHEMESHARD INFO: sch ... tabletId 72075186233409548 2025-05-29T15:32:19.738693Z node 145 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:160: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestWaitNotification: OK eventTxId 1006 TestWaitNotification: OK eventTxId 1007 wait until 72075186233409546 is deleted wait until 72075186233409548 is deleted 2025-05-29T15:32:19.738793Z node 145 :HIVE INFO: tablet_helpers.cpp:1476: [72057594037968897] TEvSubscribeToTabletDeletion, 72075186233409546 2025-05-29T15:32:19.738803Z node 145 :HIVE INFO: tablet_helpers.cpp:1476: [72057594037968897] TEvSubscribeToTabletDeletion, 72075186233409548 Deleted tabletId 72075186233409546 Deleted tabletId 72075186233409548 2025-05-29T15:32:19.738860Z node 145 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/DirB" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-05-29T15:32:19.738889Z node 145 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/DirB" took 36us result status StatusSuccess 2025-05-29T15:32:19.738961Z node 145 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/DirB" PathDescription { Self { Name: "DirB" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1002 CreateStep: 5000003 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 15 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 15 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 14 } ChildrenExist: true } Children { Name: "dst2" PathId: 7 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 1005 CreateStep: 5000006 ParentPathId: 3 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" ChildrenExist: false } Children { Name: "src2" PathId: 5 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 1004 CreateStep: 5000005 ParentPathId: 3 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 4 PathsLimit: 10000 ShardsInside: 2 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-05-29T15:32:19.739015Z node 145 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/DirB/src1" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-05-29T15:32:19.739032Z node 145 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/DirB/src1" took 19us result status StatusPathDoesNotExist 2025-05-29T15:32:19.739045Z node 145 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/DirB/src1\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot/DirB\' (id: [OwnerId: 72057594046678944, LocalPathId: 3]), source_location: ydb/core/tx/schemeshard/schemeshard_path_describer.cpp:1157" Path: "/MyRoot/DirB/src1" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot/DirB" LastExistedPrefixPathId: 3 LastExistedPrefixDescription { Self { Name: "DirB" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1002 CreateStep: 5000003 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: true } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 2025-05-29T15:32:19.739079Z node 145 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/DirB/src2" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-05-29T15:32:19.739100Z node 145 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/DirB/src2" took 22us result status StatusSuccess 2025-05-29T15:32:19.739161Z node 145 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/DirB/src2" PathDescription { Self { Name: "src2" PathId: 5 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 1004 CreateStep: 5000005 ParentPathId: 3 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "src2" Columns { Name: "key1" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "key2" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } Columns { Name: "key3" Type: "Uint64" TypeId: 4 Id: 3 NotNull: false IsBuildInProgress: false } Columns { Name: "Value" Type: "Utf8" TypeId: 4608 Id: 4 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key1" KeyColumnNames: "key2" KeyColumnNames: "key3" KeyColumnIds: 1 KeyColumnIds: 2 KeyColumnIds: 3 TableSchemaVersion: 1 IsBackup: false IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 4 PathsLimit: 10000 ShardsInside: 2 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 5 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-05-29T15:32:19.739202Z node 145 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/DirB/dst1" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-05-29T15:32:19.739213Z node 145 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/DirB/dst1" took 11us result status StatusPathDoesNotExist 2025-05-29T15:32:19.739223Z node 145 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/DirB/dst1\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot/DirB\' (id: [OwnerId: 72057594046678944, LocalPathId: 3]), source_location: ydb/core/tx/schemeshard/schemeshard_path_describer.cpp:1157" Path: "/MyRoot/DirB/dst1" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot/DirB" LastExistedPrefixPathId: 3 LastExistedPrefixDescription { Self { Name: "DirB" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1002 CreateStep: 5000003 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: true } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 2025-05-29T15:32:19.739252Z node 145 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/DirB/dst2" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-05-29T15:32:19.739263Z node 145 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/DirB/dst2" took 11us result status StatusSuccess 2025-05-29T15:32:19.739297Z node 145 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/DirB/dst2" PathDescription { Self { Name: "dst2" PathId: 7 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 1005 CreateStep: 5000006 ParentPathId: 3 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "dst2" Columns { Name: "key1" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "key2" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } Columns { Name: "key3" Type: "Uint64" TypeId: 4 Id: 3 NotNull: false IsBuildInProgress: false } Columns { Name: "Value" Type: "Utf8" TypeId: 4608 Id: 4 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key1" KeyColumnNames: "key2" KeyColumnNames: "key3" KeyColumnIds: 1 KeyColumnIds: 2 KeyColumnIds: 3 TableSchemaVersion: 1 IsBackup: false IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 4 PathsLimit: 10000 ShardsInside: 2 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 7 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> TCdcStreamWithRebootsTests::SplitTableResolvedTimestamps[PipeResets] >> TCdcStreamWithRebootsTests::GetReadyStream[PipeResets] [GOOD] |76.5%| [TA] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_replication_reboots/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_cdc_stream_reboots/unittest >> TCdcStreamWithRebootsTests::GetReadyStream[PipeResets] [GOOD] Test command err: =========== RUN: Trace =========== Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2140] Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:116:2058] recipient: [1:110:2141] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:116:2058] recipient: [1:110:2141] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:117:2058] recipient: [1:111:2142] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:117:2058] recipient: [1:111:2142] Leader for TabletID 72057594046678944 is [1:126:2151] sender: [1:127:2058] recipient: [1:109:2140] Leader for TabletID 72057594046447617 is [1:130:2154] sender: [1:132:2058] recipient: [1:110:2141] Leader for TabletID 72057594046316545 is [1:133:2155] sender: [1:135:2058] recipient: [1:111:2142] 2025-05-29T15:32:18.560040Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7488: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-05-29T15:32:18.560058Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7516: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:32:18.560062Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7402: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2025-05-29T15:32:18.560066Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7418: OperationsProcessing config: using default configuration 2025-05-29T15:32:18.560075Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-05-29T15:32:18.560078Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-05-29T15:32:18.560085Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7548: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:32:18.560094Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:39: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-05-29T15:32:18.560171Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7619: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-05-29T15:32:18.560224Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-05-29T15:32:18.569597Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7651: Got new config: QueryServiceConfig { AllExternalDataSourcesAreAvailable: true } 2025-05-29T15:32:18.569612Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:32:18.569673Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7619: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# Leader for TabletID 72057594046447617 is [1:130:2154] sender: [1:175:2058] recipient: [1:15:2062] 2025-05-29T15:32:18.571889Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-05-29T15:32:18.571912Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-05-29T15:32:18.571929Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-05-29T15:32:18.573697Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-05-29T15:32:18.573744Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:445: Clear TempDirsState with owners number: 0 2025-05-29T15:32:18.573822Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1348: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-05-29T15:32:18.573931Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:32: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-05-29T15:32:18.574352Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:157: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:32:18.574377Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:84: [RootDataErasureManager] Stop 2025-05-29T15:32:18.574549Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:32:18.574555Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:32:18.574576Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-05-29T15:32:18.574580Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-29T15:32:18.574584Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:202: TTxServerlessStorageBilling.Complete 2025-05-29T15:32:18.574597Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6671: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:214:2058] recipient: [1:212:2212] IGNORE Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:214:2058] recipient: [1:212:2212] Leader for TabletID 72057594037968897 is [1:218:2216] sender: [1:219:2058] recipient: [1:212:2212] 2025-05-29T15:32:18.575477Z node 1 :HIVE INFO: tablet_helpers.cpp:1130: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:126:2151] sender: [1:239:2058] recipient: [1:15:2062] 2025-05-29T15:32:18.588214Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:372: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-05-29T15:32:18.588268Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:32:18.588316Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-05-29T15:32:18.588346Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:130: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-05-29T15:32:18.588354Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:32:18.588822Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:451: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-05-29T15:32:18.588839Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-05-29T15:32:18.588874Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:32:18.588880Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:313: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-05-29T15:32:18.588884Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:367: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-05-29T15:32:18.588888Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 2 -> 3 2025-05-29T15:32:18.589189Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:32:18.589198Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-05-29T15:32:18.589201Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 3 -> 128 2025-05-29T15:32:18.589423Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:32:18.589429Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:32:18.589433Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:32:18.589437Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1650: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-05-29T15:32:18.589935Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1719: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-05-29T15:32:18.590230Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:652: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-05-29T15:32:18.590255Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1751: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 Leader for TabletID 72057594046316545 is [1:133:2155] sender: [1:254:2058] recipient: [1:15:2062] FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-05-29T15:32:18.590405Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:676: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-05-29T15:32:18.590437Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:680: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 133 RawX2: 4294969451 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-05-29T15:32:18.590442Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:32:18.590477Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Ch ... HEME Origin: 72075186233409546 Status: COMPLETE TxId: 1004 Step: 5000005 OrderId: 1004 ExecLatency: 0 ProposeLatency: 2 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409546 CpuTimeUsec: 163 } } CommitVersion { Step: 5000005 TxId: 1004 } 2025-05-29T15:32:21.308636Z node 12 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_part.cpp:108: HandleReply TEvDataShard::TEvProposeTransactionResult Ignore message: tablet# 72057594046678944, ev# TxKind: TX_KIND_SCHEME Origin: 72075186233409546 Status: COMPLETE TxId: 1004 Step: 5000005 OrderId: 1004 ExecLatency: 0 ProposeLatency: 2 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409546 CpuTimeUsec: 163 } } CommitVersion { Step: 5000005 TxId: 1004 } 2025-05-29T15:32:21.308818Z node 12 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:5834: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 5 PathOwnerId: 72057594046678944, cookie: 1004 2025-05-29T15:32:21.308866Z node 12 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:619: TTxOperationReply execute, operationId: 1004:1, at schemeshard: 72057594046678944, message: Source { RawX1: 330 RawX2: 51539609868 } Origin: 72075186233409546 State: 2 TxId: 1004 Step: 0 Generation: 2 2025-05-29T15:32:21.308872Z node 12 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:1005: NTableState::TProposedWaitParts operationId# 1004:1 HandleReply TEvSchemaChanged at tablet: 72057594046678944 2025-05-29T15:32:21.308882Z node 12 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:1009: NTableState::TProposedWaitParts operationId# 1004:1 HandleReply TEvSchemaChanged at tablet: 72057594046678944 message: Source { RawX1: 330 RawX2: 51539609868 } Origin: 72075186233409546 State: 2 TxId: 1004 Step: 0 Generation: 2 2025-05-29T15:32:21.308891Z node 12 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:655: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 1004:1, shardIdx: 72057594046678944:1, datashard: 72075186233409546, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2025-05-29T15:32:21.308895Z node 12 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:674: all shard schema changes has been received, operationId: 1004:1, at schemeshard: 72057594046678944 2025-05-29T15:32:21.308900Z node 12 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:686: send schema changes ack message, operation: 1004:1, datashard: 72075186233409546, at schemeshard: 72057594046678944 2025-05-29T15:32:21.308906Z node 12 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1004:1 129 -> 240 2025-05-29T15:32:21.308973Z node 12 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:5834: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 5 PathOwnerId: 72057594046678944, cookie: 1004 2025-05-29T15:32:21.309036Z node 12 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 5 PathOwnerId: 72057594046678944, cookie: 1004 2025-05-29T15:32:21.309042Z node 12 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72057594046678944, txId: 1004 2025-05-29T15:32:21.309047Z node 12 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 1004, pathId: [OwnerId: 72057594046678944, LocalPathId: 4], version: 5 2025-05-29T15:32:21.309052Z node 12 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 4 2025-05-29T15:32:21.309165Z node 12 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 5 PathOwnerId: 72057594046678944, cookie: 1004 2025-05-29T15:32:21.309172Z node 12 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72057594046678944, txId: 1004 2025-05-29T15:32:21.309177Z node 12 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 1004, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 5 2025-05-29T15:32:21.309182Z node 12 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 6 2025-05-29T15:32:21.309195Z node 12 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1606: TOperation IsReadyToNotify, TxId: 1004, ready parts: 2/3, is published: true 2025-05-29T15:32:21.309953Z node 12 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:647: TTxOperationReply complete, operationId: 1004:1, at schemeshard: 72057594046678944 2025-05-29T15:32:21.309981Z node 12 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:647: TTxOperationReply complete, operationId: 1004:1, at schemeshard: 72057594046678944 2025-05-29T15:32:21.310050Z node 12 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1004:1, at schemeshard: 72057594046678944 2025-05-29T15:32:21.310056Z node 12 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:482: [72057594046678944] TDone opId# 1004:1 ProgressState 2025-05-29T15:32:21.310067Z node 12 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:906: Part operation is done id#1004:1 progress is 3/3 2025-05-29T15:32:21.310071Z node 12 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 1004 ready parts: 3/3 2025-05-29T15:32:21.310077Z node 12 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:906: Part operation is done id#1004:1 progress is 3/3 2025-05-29T15:32:21.310080Z node 12 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 1004 ready parts: 3/3 2025-05-29T15:32:21.310085Z node 12 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1606: TOperation IsReadyToNotify, TxId: 1004, ready parts: 3/3, is published: true 2025-05-29T15:32:21.310090Z node 12 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 1004 ready parts: 3/3 2025-05-29T15:32:21.310095Z node 12 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:973: Operation and all the parts is done, operation id: 1004:0 2025-05-29T15:32:21.310099Z node 12 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5174: RemoveTx for txid 1004:0 2025-05-29T15:32:21.310108Z node 12 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 3 2025-05-29T15:32:21.310112Z node 12 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:973: Operation and all the parts is done, operation id: 1004:1 2025-05-29T15:32:21.310116Z node 12 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5174: RemoveTx for txid 1004:1 2025-05-29T15:32:21.310128Z node 12 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 5 2025-05-29T15:32:21.310132Z node 12 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:973: Operation and all the parts is done, operation id: 1004:2 2025-05-29T15:32:21.310136Z node 12 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5174: RemoveTx for txid 1004:2 2025-05-29T15:32:21.310142Z node 12 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 4 2025-05-29T15:32:21.310187Z node 12 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1004 2025-05-29T15:32:21.310209Z node 12 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1004 TestWaitNotification wait txId: 1004 2025-05-29T15:32:21.310662Z node 12 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:210: tests -- TTxNotificationSubscriber for txId 1004: send EvNotifyTxCompletion 2025-05-29T15:32:21.310673Z node 12 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:256: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 1004 2025-05-29T15:32:21.310729Z node 12 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 1004, at schemeshard: 72057594046678944 2025-05-29T15:32:21.310763Z node 12 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:227: tests -- TTxNotificationSubscriber for txId 1004: got EvNotifyTxCompletionResult 2025-05-29T15:32:21.310769Z node 12 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:236: tests -- TTxNotificationSubscriber for txId 1004: satisfy waiter [12:714:2631] TestWaitNotification: OK eventTxId 1004 2025-05-29T15:32:21.310835Z node 12 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Table/Stream" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: true }, at schemeshard: 72057594046678944 2025-05-29T15:32:21.310863Z node 12 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/Table/Stream" took 34us result status StatusSuccess 2025-05-29T15:32:21.310952Z node 12 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Table/Stream" PathDescription { Self { Name: "Stream" PathId: 4 SchemeshardId: 72057594046678944 PathType: EPathTypeCdcStream CreateFinished: true CreateTxId: 1003 CreateStep: 5000004 ParentPathId: 3 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 2 CdcStreamVersion: 2 } ChildrenExist: true } Children { Name: "streamImpl" PathId: 5 SchemeshardId: 72057594046678944 PathType: EPathTypePersQueueGroup CreateFinished: true CreateTxId: 1003 CreateStep: 5000004 ParentPathId: 4 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" PathSubType: EPathSubTypeStreamImpl ChildrenExist: false BalancerTabletID: 72075186233409548 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 4 PathsLimit: 10000 ShardsInside: 3 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 1 PQPartitionsLimit: 1000000 } CdcStreamDescription { Name: "Stream" Mode: ECdcStreamModeKeysOnly PathId { OwnerId: 72057594046678944 LocalId: 4 } State: ECdcStreamStateReady SchemaVersion: 2 Format: ECdcStreamFormatProto VirtualTimestamps: false AwsRegion: "" ResolvedTimestampsIntervalMs: 0 } } PathId: 4 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> TCdcStreamWithRebootsTests::DropStreamOnIndexTableExplicitReady[TabletReboots] >> TCdcStreamWithRebootsTests::WithoutPqTransactions[PipeResets] >> TCdcStreamWithRebootsTests::CreateStreamWithInitialScan[PipeResets] [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_cdc_stream_reboots/unittest >> TCdcStreamWithRebootsTests::CreateStreamWithInitialScan[PipeResets] [GOOD] Test command err: =========== RUN: Trace =========== Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2140] Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:116:2058] recipient: [1:110:2141] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:116:2058] recipient: [1:110:2141] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:117:2058] recipient: [1:111:2142] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:117:2058] recipient: [1:111:2142] Leader for TabletID 72057594046678944 is [1:126:2151] sender: [1:127:2058] recipient: [1:109:2140] Leader for TabletID 72057594046447617 is [1:130:2154] sender: [1:132:2058] recipient: [1:110:2141] Leader for TabletID 72057594046316545 is [1:133:2155] sender: [1:135:2058] recipient: [1:111:2142] 2025-05-29T15:32:16.439739Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7488: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-05-29T15:32:16.439756Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7516: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:32:16.439760Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7402: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2025-05-29T15:32:16.439763Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7418: OperationsProcessing config: using default configuration 2025-05-29T15:32:16.439772Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-05-29T15:32:16.439775Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-05-29T15:32:16.439781Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7548: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:32:16.439791Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:39: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-05-29T15:32:16.439859Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7619: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-05-29T15:32:16.439916Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-05-29T15:32:16.450959Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7651: Got new config: QueryServiceConfig { AllExternalDataSourcesAreAvailable: true } 2025-05-29T15:32:16.450977Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:32:16.451060Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7619: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# Leader for TabletID 72057594046447617 is [1:130:2154] sender: [1:175:2058] recipient: [1:15:2062] 2025-05-29T15:32:16.452899Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-05-29T15:32:16.452918Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-05-29T15:32:16.452938Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-05-29T15:32:16.455059Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-05-29T15:32:16.455120Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:445: Clear TempDirsState with owners number: 0 2025-05-29T15:32:16.455201Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1348: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-05-29T15:32:16.455344Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:32: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-05-29T15:32:16.455822Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:157: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:32:16.455859Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:84: [RootDataErasureManager] Stop 2025-05-29T15:32:16.456038Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:32:16.456050Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:32:16.456079Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-05-29T15:32:16.456087Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-29T15:32:16.456094Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:202: TTxServerlessStorageBilling.Complete 2025-05-29T15:32:16.456111Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6671: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:214:2058] recipient: [1:212:2212] IGNORE Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:214:2058] recipient: [1:212:2212] Leader for TabletID 72057594037968897 is [1:218:2216] sender: [1:219:2058] recipient: [1:212:2212] 2025-05-29T15:32:16.457002Z node 1 :HIVE INFO: tablet_helpers.cpp:1130: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:126:2151] sender: [1:239:2058] recipient: [1:15:2062] 2025-05-29T15:32:16.469858Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:372: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-05-29T15:32:16.469909Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:32:16.469958Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-05-29T15:32:16.469991Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:130: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-05-29T15:32:16.469998Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:32:16.470482Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:451: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-05-29T15:32:16.470499Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-05-29T15:32:16.470535Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:32:16.470541Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:313: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-05-29T15:32:16.470545Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:367: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-05-29T15:32:16.470548Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 2 -> 3 2025-05-29T15:32:16.470863Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:32:16.470871Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-05-29T15:32:16.470874Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 3 -> 128 2025-05-29T15:32:16.471109Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:32:16.471116Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:32:16.471119Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:32:16.471124Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1650: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-05-29T15:32:16.471570Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1719: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-05-29T15:32:16.471950Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:652: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-05-29T15:32:16.471976Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1751: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 Leader for TabletID 72057594046316545 is [1:133:2155] sender: [1:254:2058] recipient: [1:15:2062] FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-05-29T15:32:16.472098Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:676: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-05-29T15:32:16.472116Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:680: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 133 RawX2: 4294969451 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-05-29T15:32:16.472121Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:32:16.472157Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Ch ... Board DescribePath, at schemeshard: 72057594046678944, txId: 1003, path id: [OwnerId: 72057594046678944, LocalPathId: 4] 2025-05-29T15:32:22.602333Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1003, path id: [OwnerId: 72057594046678944, LocalPathId: 5] 2025-05-29T15:32:22.602358Z node 26 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:32:22.602365Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [26:206:2207], at schemeshard: 72057594046678944, txId: 1003, path id: 4 2025-05-29T15:32:22.602368Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [26:206:2207], at schemeshard: 72057594046678944, txId: 1003, path id: 5 2025-05-29T15:32:22.602459Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1003:3, at schemeshard: 72057594046678944 2025-05-29T15:32:22.602465Z node 26 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:482: [72057594046678944] TDone opId# 1003:3 ProgressState 2025-05-29T15:32:22.602474Z node 26 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:906: Part operation is done id#1003:3 progress is 3/4 2025-05-29T15:32:22.602476Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 1003 ready parts: 3/4 2025-05-29T15:32:22.602480Z node 26 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1103: All parts have reached barrier, tx: 1003, done: 3, blocked: 1 2025-05-29T15:32:22.602485Z node 26 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_create_cdc_stream.cpp:400: [72057594046678944] TDone opId# 1003:2HandleReply TEvCompleteBarrier 2025-05-29T15:32:22.602492Z node 26 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:906: Part operation is done id#1003:2 progress is 4/4 2025-05-29T15:32:22.602494Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 1003 ready parts: 4/4 2025-05-29T15:32:22.602497Z node 26 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:906: Part operation is done id#1003:3 progress is 4/4 2025-05-29T15:32:22.602499Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 1003 ready parts: 4/4 2025-05-29T15:32:22.602502Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1606: TOperation IsReadyToNotify, TxId: 1003, ready parts: 3/4, is published: false 2025-05-29T15:32:22.602504Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1606: TOperation IsReadyToNotify, TxId: 1003, ready parts: 4/4, is published: false 2025-05-29T15:32:22.602507Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 1003 ready parts: 4/4 2025-05-29T15:32:22.602511Z node 26 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:973: Operation and all the parts is done, operation id: 1003:0 2025-05-29T15:32:22.602513Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5174: RemoveTx for txid 1003:0 2025-05-29T15:32:22.602521Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 5 2025-05-29T15:32:22.602524Z node 26 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:973: Operation and all the parts is done, operation id: 1003:1 2025-05-29T15:32:22.602526Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5174: RemoveTx for txid 1003:1 2025-05-29T15:32:22.602529Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 4 2025-05-29T15:32:22.602531Z node 26 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:973: Operation and all the parts is done, operation id: 1003:2 2025-05-29T15:32:22.602533Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5174: RemoveTx for txid 1003:2 2025-05-29T15:32:22.602544Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 4 2025-05-29T15:32:22.602549Z node 26 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:973: Operation and all the parts is done, operation id: 1003:3 2025-05-29T15:32:22.602551Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5174: RemoveTx for txid 1003:3 2025-05-29T15:32:22.602558Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 5 2025-05-29T15:32:22.602561Z node 26 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:982: Publication still in progress, tx: 1003, publications: 2, subscribers: 0 2025-05-29T15:32:22.602564Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:989: Publication details: tx: 1003, [OwnerId: 72057594046678944, LocalPathId: 4], 4 2025-05-29T15:32:22.602566Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:989: Publication details: tx: 1003, [OwnerId: 72057594046678944, LocalPathId: 5], 2 2025-05-29T15:32:22.602720Z node 26 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:5834: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 4 PathOwnerId: 72057594046678944, cookie: 1003 2025-05-29T15:32:22.602730Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 4 PathOwnerId: 72057594046678944, cookie: 1003 2025-05-29T15:32:22.602733Z node 26 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 1003 2025-05-29T15:32:22.602753Z node 26 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 1003, pathId: [OwnerId: 72057594046678944, LocalPathId: 4], version: 4 2025-05-29T15:32:22.602759Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 3 2025-05-29T15:32:22.603012Z node 26 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:5834: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 5 Version: 2 PathOwnerId: 72057594046678944, cookie: 1003 2025-05-29T15:32:22.603023Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 5 Version: 2 PathOwnerId: 72057594046678944, cookie: 1003 2025-05-29T15:32:22.603026Z node 26 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1003 2025-05-29T15:32:22.603029Z node 26 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 1003, pathId: [OwnerId: 72057594046678944, LocalPathId: 5], version: 2 2025-05-29T15:32:22.603032Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 4 2025-05-29T15:32:22.603040Z node 26 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 1003, subscribers: 0 2025-05-29T15:32:22.603686Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1003 2025-05-29T15:32:22.603809Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1003 TestModificationResult got TxId: 1003, wait until txId: 1003 TestWaitNotification wait txId: 1003 2025-05-29T15:32:22.604669Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:210: tests -- TTxNotificationSubscriber for txId 1003: send EvNotifyTxCompletion 2025-05-29T15:32:22.604676Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:256: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 1003 2025-05-29T15:32:22.604723Z node 26 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 1003, at schemeshard: 72057594046678944 2025-05-29T15:32:22.604733Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:227: tests -- TTxNotificationSubscriber for txId 1003: got EvNotifyTxCompletionResult 2025-05-29T15:32:22.604737Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:236: tests -- TTxNotificationSubscriber for txId 1003: satisfy waiter [26:661:2578] TestWaitNotification: OK eventTxId 1003 2025-05-29T15:32:22.604787Z node 26 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Table/Stream" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: true }, at schemeshard: 72057594046678944 2025-05-29T15:32:22.604815Z node 26 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/Table/Stream" took 35us result status StatusSuccess 2025-05-29T15:32:22.604881Z node 26 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Table/Stream" PathDescription { Self { Name: "Stream" PathId: 4 SchemeshardId: 72057594046678944 PathType: EPathTypeCdcStream CreateFinished: true CreateTxId: 1003 CreateStep: 5000004 ParentPathId: 3 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 4 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 4 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 2 CdcStreamVersion: 1 } ChildrenExist: true } Children { Name: "streamImpl" PathId: 5 SchemeshardId: 72057594046678944 PathType: EPathTypePersQueueGroup CreateFinished: true CreateTxId: 1003 CreateStep: 5000004 ParentPathId: 4 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" PathSubType: EPathSubTypeStreamImpl ChildrenExist: false BalancerTabletID: 72075186233409548 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 4 PathsLimit: 10000 ShardsInside: 3 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 1 PQPartitionsLimit: 1000000 } CdcStreamDescription { Name: "Stream" Mode: ECdcStreamModeKeysOnly PathId { OwnerId: 72057594046678944 LocalId: 4 } State: ECdcStreamStateScan SchemaVersion: 1 Format: ECdcStreamFormatProto VirtualTimestamps: false AwsRegion: "" ResolvedTimestampsIntervalMs: 0 } } PathId: 4 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> TCdcStreamWithRebootsTests::Attributes[PipeResets] [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_cdc_stream_reboots/unittest >> TCdcStreamWithRebootsTests::Attributes[PipeResets] [GOOD] Test command err: =========== RUN: Trace =========== Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2140] Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:116:2058] recipient: [1:110:2141] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:116:2058] recipient: [1:110:2141] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:117:2058] recipient: [1:111:2142] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:117:2058] recipient: [1:111:2142] Leader for TabletID 72057594046678944 is [1:126:2151] sender: [1:127:2058] recipient: [1:109:2140] Leader for TabletID 72057594046447617 is [1:130:2154] sender: [1:132:2058] recipient: [1:110:2141] Leader for TabletID 72057594046316545 is [1:133:2155] sender: [1:135:2058] recipient: [1:111:2142] 2025-05-29T15:32:16.749141Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7488: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-05-29T15:32:16.749167Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7516: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:32:16.749175Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7402: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2025-05-29T15:32:16.749180Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7418: OperationsProcessing config: using default configuration 2025-05-29T15:32:16.749193Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-05-29T15:32:16.749197Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-05-29T15:32:16.749207Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7548: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:32:16.749220Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:39: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-05-29T15:32:16.749310Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7619: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-05-29T15:32:16.749376Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-05-29T15:32:16.763412Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7651: Got new config: QueryServiceConfig { AllExternalDataSourcesAreAvailable: true } 2025-05-29T15:32:16.763433Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:32:16.763526Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7619: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# Leader for TabletID 72057594046447617 is [1:130:2154] sender: [1:175:2058] recipient: [1:15:2062] 2025-05-29T15:32:16.766040Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-05-29T15:32:16.766065Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-05-29T15:32:16.766090Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-05-29T15:32:16.768611Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-05-29T15:32:16.768677Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:445: Clear TempDirsState with owners number: 0 2025-05-29T15:32:16.768795Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1348: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-05-29T15:32:16.768958Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:32: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-05-29T15:32:16.769534Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:157: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:32:16.769566Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:84: [RootDataErasureManager] Stop 2025-05-29T15:32:16.769775Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:32:16.769785Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:32:16.769814Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-05-29T15:32:16.769821Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-29T15:32:16.769828Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:202: TTxServerlessStorageBilling.Complete 2025-05-29T15:32:16.769846Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6671: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:214:2058] recipient: [1:212:2212] IGNORE Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:214:2058] recipient: [1:212:2212] Leader for TabletID 72057594037968897 is [1:218:2216] sender: [1:219:2058] recipient: [1:212:2212] 2025-05-29T15:32:16.771053Z node 1 :HIVE INFO: tablet_helpers.cpp:1130: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:126:2151] sender: [1:239:2058] recipient: [1:15:2062] 2025-05-29T15:32:16.793029Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:372: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-05-29T15:32:16.793090Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:32:16.793134Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-05-29T15:32:16.793176Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:130: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-05-29T15:32:16.793186Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:32:16.793719Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:451: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-05-29T15:32:16.793742Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-05-29T15:32:16.793787Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:32:16.793796Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:313: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-05-29T15:32:16.793801Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:367: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-05-29T15:32:16.793806Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 2 -> 3 2025-05-29T15:32:16.794189Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:32:16.794200Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-05-29T15:32:16.794205Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 3 -> 128 2025-05-29T15:32:16.794549Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:32:16.794558Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:32:16.794564Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:32:16.794570Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1650: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-05-29T15:32:16.795256Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1719: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-05-29T15:32:16.795558Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:652: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-05-29T15:32:16.795579Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1751: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 Leader for TabletID 72057594046316545 is [1:133:2155] sender: [1:254:2058] recipient: [1:15:2062] FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-05-29T15:32:16.795698Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:676: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-05-29T15:32:16.795714Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:680: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 133 RawX2: 4294969451 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-05-29T15:32:16.795718Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:32:16.795753Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Ch ... schemeshard: 72057594046678944 2025-05-29T15:32:22.994398Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_pq.cpp:629: NPQState::TPropose operationId# 1003:2 HandleReply TEvProposeTransactionResult CollectPQConfigChanged: true 2025-05-29T15:32:22.994438Z node 26 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1003:2 128 -> 240 2025-05-29T15:32:22.994456Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 3 2025-05-29T15:32:22.994462Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 4 2025-05-29T15:32:22.995066Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:647: TTxOperationReply complete, operationId: 1003:2, at schemeshard: 72057594046678944 2025-05-29T15:32:22.995137Z node 26 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:32:22.995141Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1003, path id: [OwnerId: 72057594046678944, LocalPathId: 4] 2025-05-29T15:32:22.995161Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1003, path id: [OwnerId: 72057594046678944, LocalPathId: 5] 2025-05-29T15:32:22.995183Z node 26 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:32:22.995186Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [26:206:2207], at schemeshard: 72057594046678944, txId: 1003, path id: 4 2025-05-29T15:32:22.995189Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [26:206:2207], at schemeshard: 72057594046678944, txId: 1003, path id: 5 2025-05-29T15:32:22.995246Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1003:2, at schemeshard: 72057594046678944 2025-05-29T15:32:22.995250Z node 26 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:482: [72057594046678944] TDone opId# 1003:2 ProgressState 2025-05-29T15:32:22.995257Z node 26 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:906: Part operation is done id#1003:2 progress is 3/3 2025-05-29T15:32:22.995260Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 1003 ready parts: 3/3 2025-05-29T15:32:22.995263Z node 26 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:906: Part operation is done id#1003:2 progress is 3/3 2025-05-29T15:32:22.995265Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 1003 ready parts: 3/3 2025-05-29T15:32:22.995271Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1606: TOperation IsReadyToNotify, TxId: 1003, ready parts: 3/3, is published: false 2025-05-29T15:32:22.995274Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 1003 ready parts: 3/3 2025-05-29T15:32:22.995277Z node 26 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:973: Operation and all the parts is done, operation id: 1003:0 2025-05-29T15:32:22.995279Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5174: RemoveTx for txid 1003:0 2025-05-29T15:32:22.995285Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 4 2025-05-29T15:32:22.995288Z node 26 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:973: Operation and all the parts is done, operation id: 1003:1 2025-05-29T15:32:22.995290Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5174: RemoveTx for txid 1003:1 2025-05-29T15:32:22.995302Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 4 2025-05-29T15:32:22.995305Z node 26 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:973: Operation and all the parts is done, operation id: 1003:2 2025-05-29T15:32:22.995306Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5174: RemoveTx for txid 1003:2 2025-05-29T15:32:22.995312Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 5 2025-05-29T15:32:22.995315Z node 26 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:982: Publication still in progress, tx: 1003, publications: 2, subscribers: 0 2025-05-29T15:32:22.995318Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:989: Publication details: tx: 1003, [OwnerId: 72057594046678944, LocalPathId: 4], 4 2025-05-29T15:32:22.995320Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:989: Publication details: tx: 1003, [OwnerId: 72057594046678944, LocalPathId: 5], 2 2025-05-29T15:32:22.995440Z node 26 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:5834: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 4 PathOwnerId: 72057594046678944, cookie: 1003 2025-05-29T15:32:22.995451Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 4 PathOwnerId: 72057594046678944, cookie: 1003 2025-05-29T15:32:22.995456Z node 26 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 1003 2025-05-29T15:32:22.995459Z node 26 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 1003, pathId: [OwnerId: 72057594046678944, LocalPathId: 4], version: 4 2025-05-29T15:32:22.995464Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 3 2025-05-29T15:32:22.995688Z node 26 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:5834: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 5 Version: 2 PathOwnerId: 72057594046678944, cookie: 1003 2025-05-29T15:32:22.995697Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 5 Version: 2 PathOwnerId: 72057594046678944, cookie: 1003 2025-05-29T15:32:22.995702Z node 26 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1003 2025-05-29T15:32:22.995705Z node 26 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 1003, pathId: [OwnerId: 72057594046678944, LocalPathId: 5], version: 2 2025-05-29T15:32:22.995707Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 4 2025-05-29T15:32:22.995714Z node 26 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 1003, subscribers: 0 2025-05-29T15:32:22.996152Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1003 2025-05-29T15:32:22.996589Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1003 TestWaitNotification wait txId: 1003 2025-05-29T15:32:22.997469Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:210: tests -- TTxNotificationSubscriber for txId 1003: send EvNotifyTxCompletion 2025-05-29T15:32:22.997476Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:256: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 1003 2025-05-29T15:32:22.997517Z node 26 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 1003, at schemeshard: 72057594046678944 2025-05-29T15:32:22.997528Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:227: tests -- TTxNotificationSubscriber for txId 1003: got EvNotifyTxCompletionResult 2025-05-29T15:32:22.997532Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:236: tests -- TTxNotificationSubscriber for txId 1003: satisfy waiter [26:665:2582] TestWaitNotification: OK eventTxId 1003 2025-05-29T15:32:22.997592Z node 26 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Table/Stream" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: true }, at schemeshard: 72057594046678944 2025-05-29T15:32:22.997623Z node 26 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/Table/Stream" took 38us result status StatusSuccess 2025-05-29T15:32:22.997692Z node 26 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Table/Stream" PathDescription { Self { Name: "Stream" PathId: 4 SchemeshardId: 72057594046678944 PathType: EPathTypeCdcStream CreateFinished: true CreateTxId: 1003 CreateStep: 5000004 ParentPathId: 3 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 4 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 4 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 2 CdcStreamVersion: 1 } ChildrenExist: true } Children { Name: "streamImpl" PathId: 5 SchemeshardId: 72057594046678944 PathType: EPathTypePersQueueGroup CreateFinished: true CreateTxId: 1003 CreateStep: 5000004 ParentPathId: 4 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" PathSubType: EPathSubTypeStreamImpl ChildrenExist: false BalancerTabletID: 72075186233409548 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 4 PathsLimit: 10000 ShardsInside: 3 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 1 PQPartitionsLimit: 1000000 } UserAttributes { Key: "key" Value: "value" } CdcStreamDescription { Name: "Stream" Mode: ECdcStreamModeKeysOnly PathId { OwnerId: 72057594046678944 LocalId: 4 } State: ECdcStreamStateReady SchemaVersion: 1 Format: ECdcStreamFormatProto VirtualTimestamps: false UserAttributes { Key: "key" Value: "value" } AwsRegion: "" ResolvedTimestampsIntervalMs: 0 } } PathId: 4 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> TCdcStreamWithRebootsTests::SplitTable[PipeResets] [GOOD] >> TCdcStreamWithRebootsTests::DropStreamOnIndexTableCreatedWithInitialScan[PipeResets] >> TCdcStreamWithRebootsTests::GetReadyStream[TabletReboots] [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_cdc_stream_reboots/unittest >> TCdcStreamWithRebootsTests::SplitTable[PipeResets] [GOOD] Test command err: =========== RUN: Trace =========== Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2140] Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:116:2058] recipient: [1:110:2141] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:116:2058] recipient: [1:110:2141] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:117:2058] recipient: [1:111:2142] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:117:2058] recipient: [1:111:2142] Leader for TabletID 72057594046678944 is [1:126:2151] sender: [1:127:2058] recipient: [1:109:2140] Leader for TabletID 72057594046447617 is [1:130:2154] sender: [1:132:2058] recipient: [1:110:2141] Leader for TabletID 72057594046316545 is [1:133:2155] sender: [1:135:2058] recipient: [1:111:2142] 2025-05-29T15:32:05.262722Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7488: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-05-29T15:32:05.262761Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7516: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:32:05.262768Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7402: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2025-05-29T15:32:05.262773Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7418: OperationsProcessing config: using default configuration 2025-05-29T15:32:05.262784Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-05-29T15:32:05.262788Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-05-29T15:32:05.262798Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7548: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:32:05.262810Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:39: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-05-29T15:32:05.262902Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7619: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-05-29T15:32:05.262965Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-05-29T15:32:05.277020Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7651: Got new config: QueryServiceConfig { AllExternalDataSourcesAreAvailable: true } 2025-05-29T15:32:05.277038Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:32:05.277135Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7619: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# Leader for TabletID 72057594046447617 is [1:130:2154] sender: [1:175:2058] recipient: [1:15:2062] 2025-05-29T15:32:05.279741Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-05-29T15:32:05.279768Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-05-29T15:32:05.279795Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-05-29T15:32:05.282414Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-05-29T15:32:05.282486Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:445: Clear TempDirsState with owners number: 0 2025-05-29T15:32:05.282597Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1348: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-05-29T15:32:05.282756Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:32: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-05-29T15:32:05.283232Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:157: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:32:05.283258Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:84: [RootDataErasureManager] Stop 2025-05-29T15:32:05.283401Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:32:05.283407Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:32:05.283426Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-05-29T15:32:05.283431Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-29T15:32:05.283434Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:202: TTxServerlessStorageBilling.Complete 2025-05-29T15:32:05.283446Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6671: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:214:2058] recipient: [1:212:2212] IGNORE Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:214:2058] recipient: [1:212:2212] Leader for TabletID 72057594037968897 is [1:218:2216] sender: [1:219:2058] recipient: [1:212:2212] 2025-05-29T15:32:05.284230Z node 1 :HIVE INFO: tablet_helpers.cpp:1130: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:126:2151] sender: [1:239:2058] recipient: [1:15:2062] 2025-05-29T15:32:05.296316Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:372: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-05-29T15:32:05.296367Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:32:05.296414Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-05-29T15:32:05.296443Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:130: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-05-29T15:32:05.296451Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:32:05.296899Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:451: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-05-29T15:32:05.296915Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-05-29T15:32:05.296948Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:32:05.296955Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:313: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-05-29T15:32:05.296958Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:367: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-05-29T15:32:05.296961Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 2 -> 3 2025-05-29T15:32:05.297225Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:32:05.297231Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-05-29T15:32:05.297234Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 3 -> 128 2025-05-29T15:32:05.297586Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:32:05.297602Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:32:05.297608Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:32:05.297616Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1650: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-05-29T15:32:05.298338Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1719: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-05-29T15:32:05.298724Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:652: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-05-29T15:32:05.298773Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1751: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 Leader for TabletID 72057594046316545 is [1:133:2155] sender: [1:254:2058] recipient: [1:15:2062] FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-05-29T15:32:05.298958Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:676: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-05-29T15:32:05.298982Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:680: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 133 RawX2: 4294969451 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-05-29T15:32:05.298989Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:32:05.299039Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Ch ... g { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { MinPartitionsCount: 1 } } SplitBoundary { KeyPrefix { Tuple { Optional { Uint32: 2 } } } } TableSchemaVersion: 2 IsBackup: false CdcStreams { Name: "Stream" Mode: ECdcStreamModeKeysOnly PathId { OwnerId: 72057594046678944 LocalId: 4 } State: ECdcStreamStateReady SchemaVersion: 1 Format: ECdcStreamFormatProto VirtualTimestamps: false AwsRegion: "" ResolvedTimestampsIntervalMs: 0 } IsRestore: false } TablePartitions { EndOfRangeKeyPrefix: "\001\000\004\000\000\000\002\000\000\000" IsPoint: false IsInclusive: false DatashardId: 72075186233409549 } TablePartitions { EndOfRangeKeyPrefix: "" IsPoint: false IsInclusive: false DatashardId: 72075186233409550 } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 2 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 4 PathsLimit: 10000 ShardsInside: 4 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 1 PQPartitionsLimit: 1000000 } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-05-29T15:32:23.089616Z node 28 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Table" Options { ReturnPartitioningInfo: true ReturnPartitionConfig: true BackupInfo: false ReturnBoundaries: true ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-05-29T15:32:23.089661Z node 28 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/Table" took 51us result status StatusSuccess 2025-05-29T15:32:23.089801Z node 28 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Table" PathDescription { Self { Name: "Table" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 1002 CreateStep: 5000003 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 2 TablePartitionVersion: 2 } ChildrenExist: true } Table { Name: "Table" Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Uint32" TypeId: 2 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { MinPartitionsCount: 1 } } SplitBoundary { KeyPrefix { Tuple { Optional { Uint32: 2 } } } } TableSchemaVersion: 2 IsBackup: false CdcStreams { Name: "Stream" Mode: ECdcStreamModeKeysOnly PathId { OwnerId: 72057594046678944 LocalId: 4 } State: ECdcStreamStateReady SchemaVersion: 1 Format: ECdcStreamFormatProto VirtualTimestamps: false AwsRegion: "" ResolvedTimestampsIntervalMs: 0 } IsRestore: false } TablePartitions { EndOfRangeKeyPrefix: "\001\000\004\000\000\000\002\000\000\000" IsPoint: false IsInclusive: false DatashardId: 72075186233409549 } TablePartitions { EndOfRangeKeyPrefix: "" IsPoint: false IsInclusive: false DatashardId: 72075186233409550 } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 2 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 4 PathsLimit: 10000 ShardsInside: 4 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 1 PQPartitionsLimit: 1000000 } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-05-29T15:32:23.089854Z node 28 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Table/Stream/streamImpl" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: true }, at schemeshard: 72057594046678944 2025-05-29T15:32:23.089873Z node 28 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/Table/Stream/streamImpl" took 20us result status StatusSuccess 2025-05-29T15:32:23.089931Z node 28 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Table/Stream/streamImpl" PathDescription { Self { Name: "streamImpl" PathId: 5 SchemeshardId: 72057594046678944 PathType: EPathTypePersQueueGroup CreateFinished: true CreateTxId: 1003 CreateStep: 5000004 ParentPathId: 4 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeStreamImpl Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 PQVersion: 1 } ChildrenExist: false BalancerTabletID: 72075186233409548 } PersQueueGroup { Name: "streamImpl" PathId: 5 TotalGroupCount: 1 PartitionPerTablet: 2 PQTabletConfig { PartitionConfig { MaxCountInPartition: 2147483647 LifetimeSeconds: 86400 WriteSpeedInBytesPerSecond: 1048576 BurstSize: 1048576 } TopicName: "Stream" TopicPath: "/MyRoot/Table/Stream/streamImpl" YdbDatabasePath: "/MyRoot" PartitionKeySchema { Name: "key" TypeId: 2 } MeteringMode: METERING_MODE_REQUEST_UNITS } Partitions { PartitionId: 0 TabletId: 72075186233409547 Status: Active } AlterVersion: 1 BalancerTabletID: 72075186233409548 NextPartitionId: 1 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 4 PathsLimit: 10000 ShardsInside: 4 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 1 PQPartitionsLimit: 1000000 } } PathId: 5 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> TCdcStreamWithRebootsTests::MergeTable[TabletReboots] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_cdc_stream_reboots/unittest >> TCdcStreamWithRebootsTests::GetReadyStream[TabletReboots] [GOOD] Test command err: =========== RUN: Trace =========== Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2140] Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:116:2058] recipient: [1:110:2141] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:116:2058] recipient: [1:110:2141] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:117:2058] recipient: [1:111:2142] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:117:2058] recipient: [1:111:2142] Leader for TabletID 72057594046678944 is [1:126:2151] sender: [1:127:2058] recipient: [1:109:2140] Leader for TabletID 72057594046447617 is [1:130:2154] sender: [1:132:2058] recipient: [1:110:2141] Leader for TabletID 72057594046316545 is [1:133:2155] sender: [1:135:2058] recipient: [1:111:2142] 2025-05-29T15:32:13.087766Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7488: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-05-29T15:32:13.087781Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7516: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:32:13.087785Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7402: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2025-05-29T15:32:13.087788Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7418: OperationsProcessing config: using default configuration 2025-05-29T15:32:13.087795Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-05-29T15:32:13.087797Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-05-29T15:32:13.087803Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7548: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:32:13.087810Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:39: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-05-29T15:32:13.087883Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7619: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-05-29T15:32:13.087944Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-05-29T15:32:13.096633Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7651: Got new config: QueryServiceConfig { AllExternalDataSourcesAreAvailable: true } 2025-05-29T15:32:13.096646Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:32:13.096695Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7619: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# Leader for TabletID 72057594046447617 is [1:130:2154] sender: [1:175:2058] recipient: [1:15:2062] 2025-05-29T15:32:13.098351Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-05-29T15:32:13.098369Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-05-29T15:32:13.098385Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-05-29T15:32:13.100387Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-05-29T15:32:13.100446Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:445: Clear TempDirsState with owners number: 0 2025-05-29T15:32:13.100524Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1348: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-05-29T15:32:13.100667Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:32: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-05-29T15:32:13.101190Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:157: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:32:13.101222Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:84: [RootDataErasureManager] Stop 2025-05-29T15:32:13.101408Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:32:13.101417Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:32:13.101442Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-05-29T15:32:13.101448Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-29T15:32:13.101454Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:202: TTxServerlessStorageBilling.Complete 2025-05-29T15:32:13.101470Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6671: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:214:2058] recipient: [1:212:2212] IGNORE Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:214:2058] recipient: [1:212:2212] Leader for TabletID 72057594037968897 is [1:218:2216] sender: [1:219:2058] recipient: [1:212:2212] 2025-05-29T15:32:13.102322Z node 1 :HIVE INFO: tablet_helpers.cpp:1130: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:126:2151] sender: [1:239:2058] recipient: [1:15:2062] 2025-05-29T15:32:13.114323Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:372: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-05-29T15:32:13.114361Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:32:13.114396Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-05-29T15:32:13.114434Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:130: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-05-29T15:32:13.114440Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:32:13.114826Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:451: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-05-29T15:32:13.114839Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-05-29T15:32:13.114868Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:32:13.114874Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:313: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-05-29T15:32:13.114877Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:367: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-05-29T15:32:13.114880Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 2 -> 3 2025-05-29T15:32:13.115163Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:32:13.115170Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-05-29T15:32:13.115173Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 3 -> 128 2025-05-29T15:32:13.115390Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:32:13.115396Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:32:13.115398Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:32:13.115402Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1650: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-05-29T15:32:13.115843Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1719: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-05-29T15:32:13.116111Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:652: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-05-29T15:32:13.116132Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1751: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 Leader for TabletID 72057594046316545 is [1:133:2155] sender: [1:254:2058] recipient: [1:15:2062] FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-05-29T15:32:13.116238Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:676: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-05-29T15:32:13.116254Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:680: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 133 RawX2: 4294969451 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-05-29T15:32:13.116258Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:32:13.116287Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Ch ... X_KIND_SCHEME Origin: 72075186233409546 Status: COMPLETE TxId: 1004 Step: 5000005 OrderId: 1004 ExecLatency: 0 ProposeLatency: 2 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409546 CpuTimeUsec: 147 } } CommitVersion { Step: 5000005 TxId: 1004 } 2025-05-29T15:32:23.760429Z node 44 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:5834: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 5 PathOwnerId: 72057594046678944, cookie: 1004 2025-05-29T15:32:23.760435Z node 44 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 5 PathOwnerId: 72057594046678944, cookie: 1004 2025-05-29T15:32:23.760438Z node 44 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72057594046678944, txId: 1004 2025-05-29T15:32:23.760441Z node 44 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 1004, pathId: [OwnerId: 72057594046678944, LocalPathId: 4], version: 5 2025-05-29T15:32:23.760445Z node 44 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 4 2025-05-29T15:32:23.760535Z node 44 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:5834: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 5 PathOwnerId: 72057594046678944, cookie: 1004 2025-05-29T15:32:23.760543Z node 44 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 5 PathOwnerId: 72057594046678944, cookie: 1004 2025-05-29T15:32:23.760546Z node 44 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72057594046678944, txId: 1004 2025-05-29T15:32:23.760549Z node 44 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 1004, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 5 2025-05-29T15:32:23.760551Z node 44 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 6 2025-05-29T15:32:23.760557Z node 44 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1606: TOperation IsReadyToNotify, TxId: 1004, ready parts: 2/3, is published: true 2025-05-29T15:32:23.760584Z node 44 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5512: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 336 RawX2: 188978563345 } Origin: 72075186233409546 State: 2 TxId: 1004 Step: 0 Generation: 2 2025-05-29T15:32:23.760587Z node 44 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1764: TOperation FindRelatedPartByTabletId, TxId: 1004, tablet: 72075186233409546, partId: 1 2025-05-29T15:32:23.760595Z node 44 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:619: TTxOperationReply execute, operationId: 1004:1, at schemeshard: 72057594046678944, message: Source { RawX1: 336 RawX2: 188978563345 } Origin: 72075186233409546 State: 2 TxId: 1004 Step: 0 Generation: 2 2025-05-29T15:32:23.760599Z node 44 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:1005: NTableState::TProposedWaitParts operationId# 1004:1 HandleReply TEvSchemaChanged at tablet: 72057594046678944 2025-05-29T15:32:23.760604Z node 44 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:1009: NTableState::TProposedWaitParts operationId# 1004:1 HandleReply TEvSchemaChanged at tablet: 72057594046678944 message: Source { RawX1: 336 RawX2: 188978563345 } Origin: 72075186233409546 State: 2 TxId: 1004 Step: 0 Generation: 2 2025-05-29T15:32:23.760609Z node 44 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:655: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 1004:1, shardIdx: 72057594046678944:1, datashard: 72075186233409546, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2025-05-29T15:32:23.760612Z node 44 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:674: all shard schema changes has been received, operationId: 1004:1, at schemeshard: 72057594046678944 2025-05-29T15:32:23.760615Z node 44 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:686: send schema changes ack message, operation: 1004:1, datashard: 72075186233409546, at schemeshard: 72057594046678944 2025-05-29T15:32:23.760618Z node 44 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1004:1 129 -> 240 2025-05-29T15:32:23.761289Z node 44 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:647: TTxOperationReply complete, operationId: 1004:1, at schemeshard: 72057594046678944 2025-05-29T15:32:23.761307Z node 44 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1004 2025-05-29T15:32:23.761315Z node 44 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1004 2025-05-29T15:32:23.761325Z node 44 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:647: TTxOperationReply complete, operationId: 1004:1, at schemeshard: 72057594046678944 2025-05-29T15:32:23.761364Z node 44 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1004:1, at schemeshard: 72057594046678944 2025-05-29T15:32:23.761369Z node 44 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:482: [72057594046678944] TDone opId# 1004:1 ProgressState 2025-05-29T15:32:23.761376Z node 44 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:906: Part operation is done id#1004:1 progress is 3/3 2025-05-29T15:32:23.761378Z node 44 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 1004 ready parts: 3/3 2025-05-29T15:32:23.761381Z node 44 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:906: Part operation is done id#1004:1 progress is 3/3 2025-05-29T15:32:23.761383Z node 44 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 1004 ready parts: 3/3 2025-05-29T15:32:23.761386Z node 44 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1606: TOperation IsReadyToNotify, TxId: 1004, ready parts: 3/3, is published: true 2025-05-29T15:32:23.761389Z node 44 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 1004 ready parts: 3/3 2025-05-29T15:32:23.761392Z node 44 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:973: Operation and all the parts is done, operation id: 1004:0 2025-05-29T15:32:23.761395Z node 44 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5174: RemoveTx for txid 1004:0 2025-05-29T15:32:23.761400Z node 44 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 3 2025-05-29T15:32:23.761403Z node 44 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:973: Operation and all the parts is done, operation id: 1004:1 2025-05-29T15:32:23.761405Z node 44 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5174: RemoveTx for txid 1004:1 2025-05-29T15:32:23.761415Z node 44 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 5 2025-05-29T15:32:23.761418Z node 44 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:973: Operation and all the parts is done, operation id: 1004:2 2025-05-29T15:32:23.761420Z node 44 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5174: RemoveTx for txid 1004:2 2025-05-29T15:32:23.761423Z node 44 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 4 TestWaitNotification wait txId: 1004 2025-05-29T15:32:23.761793Z node 44 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:210: tests -- TTxNotificationSubscriber for txId 1004: send EvNotifyTxCompletion 2025-05-29T15:32:23.761798Z node 44 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:256: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 1004 2025-05-29T15:32:23.761838Z node 44 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 1004, at schemeshard: 72057594046678944 2025-05-29T15:32:23.761849Z node 44 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:227: tests -- TTxNotificationSubscriber for txId 1004: got EvNotifyTxCompletionResult 2025-05-29T15:32:23.761852Z node 44 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:236: tests -- TTxNotificationSubscriber for txId 1004: satisfy waiter [44:715:2631] TestWaitNotification: OK eventTxId 1004 2025-05-29T15:32:23.761895Z node 44 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Table/Stream" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: true }, at schemeshard: 72057594046678944 2025-05-29T15:32:23.761917Z node 44 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/Table/Stream" took 29us result status StatusSuccess 2025-05-29T15:32:23.761989Z node 44 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Table/Stream" PathDescription { Self { Name: "Stream" PathId: 4 SchemeshardId: 72057594046678944 PathType: EPathTypeCdcStream CreateFinished: true CreateTxId: 1003 CreateStep: 5000004 ParentPathId: 3 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 2 CdcStreamVersion: 2 } ChildrenExist: true } Children { Name: "streamImpl" PathId: 5 SchemeshardId: 72057594046678944 PathType: EPathTypePersQueueGroup CreateFinished: true CreateTxId: 1003 CreateStep: 5000004 ParentPathId: 4 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" PathSubType: EPathSubTypeStreamImpl ChildrenExist: false BalancerTabletID: 72075186233409548 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 4 PathsLimit: 10000 ShardsInside: 3 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 1 PQPartitionsLimit: 1000000 } CdcStreamDescription { Name: "Stream" Mode: ECdcStreamModeKeysOnly PathId { OwnerId: 72057594046678944 LocalId: 4 } State: ECdcStreamStateReady SchemaVersion: 2 Format: ECdcStreamFormatProto VirtualTimestamps: false AwsRegion: "" ResolvedTimestampsIntervalMs: 0 } } PathId: 4 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 |76.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_cdc_stream_reboots/unittest >> TCdcStreamWithRebootsTests::CreateStreamWithResolvedTimestamps[PipeResets] >> TCdcStreamWithRebootsTests::CreateStreamWithResolvedTimestamps[TabletReboots] >> TCdcStreamWithRebootsTests::CreateStreamOnIndexTableWithInitialScan[PipeResets] [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_cdc_stream_reboots/unittest >> TCdcStreamWithRebootsTests::CreateStreamOnIndexTableWithInitialScan[PipeResets] [GOOD] Test command err: =========== RUN: Trace =========== Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2140] Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:116:2058] recipient: [1:110:2141] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:116:2058] recipient: [1:110:2141] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:117:2058] recipient: [1:111:2142] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:117:2058] recipient: [1:111:2142] Leader for TabletID 72057594046678944 is [1:126:2151] sender: [1:127:2058] recipient: [1:109:2140] Leader for TabletID 72057594046447617 is [1:130:2154] sender: [1:132:2058] recipient: [1:110:2141] Leader for TabletID 72057594046316545 is [1:133:2155] sender: [1:135:2058] recipient: [1:111:2142] 2025-05-29T15:32:20.030336Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7488: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-05-29T15:32:20.030350Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7516: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:32:20.030353Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7402: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2025-05-29T15:32:20.030356Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7418: OperationsProcessing config: using default configuration 2025-05-29T15:32:20.030363Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-05-29T15:32:20.030366Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-05-29T15:32:20.030371Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7548: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:32:20.030379Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:39: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-05-29T15:32:20.030453Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7619: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-05-29T15:32:20.030502Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-05-29T15:32:20.039176Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7651: Got new config: QueryServiceConfig { AllExternalDataSourcesAreAvailable: true } 2025-05-29T15:32:20.039189Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:32:20.039242Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7619: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# Leader for TabletID 72057594046447617 is [1:130:2154] sender: [1:175:2058] recipient: [1:15:2062] 2025-05-29T15:32:20.040959Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-05-29T15:32:20.040979Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-05-29T15:32:20.040998Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-05-29T15:32:20.043565Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-05-29T15:32:20.043628Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:445: Clear TempDirsState with owners number: 0 2025-05-29T15:32:20.043750Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1348: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-05-29T15:32:20.043906Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:32: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-05-29T15:32:20.044491Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:157: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:32:20.044519Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:84: [RootDataErasureManager] Stop 2025-05-29T15:32:20.044699Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:32:20.044708Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:32:20.044730Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-05-29T15:32:20.044737Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-29T15:32:20.044742Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:202: TTxServerlessStorageBilling.Complete 2025-05-29T15:32:20.044757Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6671: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:214:2058] recipient: [1:212:2212] IGNORE Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:214:2058] recipient: [1:212:2212] Leader for TabletID 72057594037968897 is [1:218:2216] sender: [1:219:2058] recipient: [1:212:2212] 2025-05-29T15:32:20.045860Z node 1 :HIVE INFO: tablet_helpers.cpp:1130: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:126:2151] sender: [1:239:2058] recipient: [1:15:2062] 2025-05-29T15:32:20.057987Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:372: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-05-29T15:32:20.058034Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:32:20.058067Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-05-29T15:32:20.058094Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:130: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-05-29T15:32:20.058101Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:32:20.058506Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:451: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-05-29T15:32:20.058521Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-05-29T15:32:20.058553Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:32:20.058558Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:313: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-05-29T15:32:20.058561Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:367: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-05-29T15:32:20.058564Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 2 -> 3 2025-05-29T15:32:20.058823Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:32:20.058829Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-05-29T15:32:20.058832Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 3 -> 128 2025-05-29T15:32:20.059135Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:32:20.059149Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:32:20.059153Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:32:20.059157Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1650: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-05-29T15:32:20.059615Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1719: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-05-29T15:32:20.059909Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:652: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-05-29T15:32:20.059929Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1751: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 Leader for TabletID 72057594046316545 is [1:133:2155] sender: [1:254:2058] recipient: [1:15:2062] FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-05-29T15:32:20.060041Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:676: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-05-29T15:32:20.060057Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:680: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 133 RawX2: 4294969451 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-05-29T15:32:20.060062Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:32:20.060098Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Ch ... T15:32:26.264501Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [26:206:2207], at schemeshard: 72057594046678944, txId: 1003, path id: 6 2025-05-29T15:32:26.264506Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [26:206:2207], at schemeshard: 72057594046678944, txId: 1003, path id: 7 2025-05-29T15:32:26.264534Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1003:4, at schemeshard: 72057594046678944 2025-05-29T15:32:26.264539Z node 26 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:482: [72057594046678944] TDone opId# 1003:4 ProgressState 2025-05-29T15:32:26.264546Z node 26 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:906: Part operation is done id#1003:4 progress is 4/5 2025-05-29T15:32:26.264549Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 1003 ready parts: 4/5 2025-05-29T15:32:26.264553Z node 26 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1103: All parts have reached barrier, tx: 1003, done: 4, blocked: 1 2025-05-29T15:32:26.264560Z node 26 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_create_cdc_stream.cpp:400: [72057594046678944] TDone opId# 1003:3HandleReply TEvCompleteBarrier 2025-05-29T15:32:26.264571Z node 26 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:906: Part operation is done id#1003:4 progress is 4/5 2025-05-29T15:32:26.264578Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 1003 ready parts: 4/5 2025-05-29T15:32:26.264582Z node 26 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:906: Part operation is done id#1003:3 progress is 5/5 2025-05-29T15:32:26.264585Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 1003 ready parts: 5/5 2025-05-29T15:32:26.264590Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1606: TOperation IsReadyToNotify, TxId: 1003, ready parts: 4/5, is published: false 2025-05-29T15:32:26.264594Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1606: TOperation IsReadyToNotify, TxId: 1003, ready parts: 5/5, is published: false 2025-05-29T15:32:26.264599Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 1003 ready parts: 5/5 2025-05-29T15:32:26.264605Z node 26 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:973: Operation and all the parts is done, operation id: 1003:0 2025-05-29T15:32:26.264609Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5174: RemoveTx for txid 1003:0 2025-05-29T15:32:26.264620Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 5 2025-05-29T15:32:26.264625Z node 26 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:973: Operation and all the parts is done, operation id: 1003:1 2025-05-29T15:32:26.264628Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5174: RemoveTx for txid 1003:1 2025-05-29T15:32:26.264634Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 3 2025-05-29T15:32:26.264638Z node 26 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:973: Operation and all the parts is done, operation id: 1003:2 2025-05-29T15:32:26.264641Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5174: RemoveTx for txid 1003:2 2025-05-29T15:32:26.264646Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 6] was 4 2025-05-29T15:32:26.264650Z node 26 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:973: Operation and all the parts is done, operation id: 1003:3 2025-05-29T15:32:26.264653Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5174: RemoveTx for txid 1003:3 2025-05-29T15:32:26.264670Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 4 2025-05-29T15:32:26.264674Z node 26 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:973: Operation and all the parts is done, operation id: 1003:4 2025-05-29T15:32:26.264677Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5174: RemoveTx for txid 1003:4 2025-05-29T15:32:26.264687Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 7] was 5 2025-05-29T15:32:26.264691Z node 26 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:982: Publication still in progress, tx: 1003, publications: 2, subscribers: 0 2025-05-29T15:32:26.264696Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:989: Publication details: tx: 1003, [OwnerId: 72057594046678944, LocalPathId: 6], 4 2025-05-29T15:32:26.264699Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:989: Publication details: tx: 1003, [OwnerId: 72057594046678944, LocalPathId: 7], 2 2025-05-29T15:32:26.264979Z node 26 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:5834: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 6 Version: 4 PathOwnerId: 72057594046678944, cookie: 1003 2025-05-29T15:32:26.264995Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 6 Version: 4 PathOwnerId: 72057594046678944, cookie: 1003 2025-05-29T15:32:26.265001Z node 26 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 1003 2025-05-29T15:32:26.265006Z node 26 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 1003, pathId: [OwnerId: 72057594046678944, LocalPathId: 6], version: 4 2025-05-29T15:32:26.265010Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 6] was 3 2025-05-29T15:32:26.265322Z node 26 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:5834: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 7 Version: 2 PathOwnerId: 72057594046678944, cookie: 1003 2025-05-29T15:32:26.265333Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 7 Version: 2 PathOwnerId: 72057594046678944, cookie: 1003 2025-05-29T15:32:26.265336Z node 26 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1003 2025-05-29T15:32:26.265339Z node 26 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 1003, pathId: [OwnerId: 72057594046678944, LocalPathId: 7], version: 2 2025-05-29T15:32:26.265341Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 7] was 4 2025-05-29T15:32:26.265349Z node 26 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 1003, subscribers: 0 2025-05-29T15:32:26.265953Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1003 2025-05-29T15:32:26.266509Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1003 TestModificationResult got TxId: 1003, wait until txId: 1003 TestWaitNotification wait txId: 1003 2025-05-29T15:32:26.267240Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:210: tests -- TTxNotificationSubscriber for txId 1003: send EvNotifyTxCompletion 2025-05-29T15:32:26.267247Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:256: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 1003 2025-05-29T15:32:26.267290Z node 26 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 1003, at schemeshard: 72057594046678944 2025-05-29T15:32:26.267300Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:227: tests -- TTxNotificationSubscriber for txId 1003: got EvNotifyTxCompletionResult 2025-05-29T15:32:26.267303Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:236: tests -- TTxNotificationSubscriber for txId 1003: satisfy waiter [26:733:2639] TestWaitNotification: OK eventTxId 1003 2025-05-29T15:32:26.267353Z node 26 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Table/Index/indexImplTable/Stream" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: true }, at schemeshard: 72057594046678944 2025-05-29T15:32:26.267391Z node 26 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/Table/Index/indexImplTable/Stream" took 48us result status StatusSuccess 2025-05-29T15:32:26.267477Z node 26 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Table/Index/indexImplTable/Stream" PathDescription { Self { Name: "Stream" PathId: 6 SchemeshardId: 72057594046678944 PathType: EPathTypeCdcStream CreateFinished: true CreateTxId: 1003 CreateStep: 5000004 ParentPathId: 5 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 4 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 4 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 2 CdcStreamVersion: 1 } ChildrenExist: true } Children { Name: "streamImpl" PathId: 7 SchemeshardId: 72057594046678944 PathType: EPathTypePersQueueGroup CreateFinished: true CreateTxId: 1003 CreateStep: 5000004 ParentPathId: 6 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" PathSubType: EPathSubTypeStreamImpl ChildrenExist: false BalancerTabletID: 72075186233409549 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 6 PathsLimit: 10000 ShardsInside: 4 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 1 PQPartitionsLimit: 1000000 } CdcStreamDescription { Name: "Stream" Mode: ECdcStreamModeKeysOnly PathId { OwnerId: 72057594046678944 LocalId: 6 } State: ECdcStreamStateScan SchemaVersion: 1 Format: ECdcStreamFormatProto VirtualTimestamps: false AwsRegion: "" ResolvedTimestampsIntervalMs: 0 } } PathId: 6 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> TCdcStreamWithRebootsTests::MergeTableResolvedTimestamps[PipeResets] >> TCdcStreamWithRebootsTests::DropStreamOnIndexTableCreatedWithInitialScan[PipeResets] [GOOD] >> TAsyncIndexTests::MergeIndexWithReboots[TabletReboots] [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_cdc_stream_reboots/unittest >> TCdcStreamWithRebootsTests::DropStreamOnIndexTableCreatedWithInitialScan[PipeResets] [GOOD] Test command err: =========== RUN: Trace =========== Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2140] Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:116:2058] recipient: [1:110:2141] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:116:2058] recipient: [1:110:2141] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:117:2058] recipient: [1:111:2142] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:117:2058] recipient: [1:111:2142] Leader for TabletID 72057594046678944 is [1:126:2151] sender: [1:127:2058] recipient: [1:109:2140] Leader for TabletID 72057594046447617 is [1:130:2154] sender: [1:132:2058] recipient: [1:110:2141] Leader for TabletID 72057594046316545 is [1:133:2155] sender: [1:135:2058] recipient: [1:111:2142] 2025-05-29T15:32:23.970668Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7488: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-05-29T15:32:23.970684Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7516: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:32:23.970688Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7402: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2025-05-29T15:32:23.970691Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7418: OperationsProcessing config: using default configuration 2025-05-29T15:32:23.970700Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-05-29T15:32:23.970703Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-05-29T15:32:23.970709Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7548: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:32:23.970718Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:39: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-05-29T15:32:23.970811Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7619: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-05-29T15:32:23.970878Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-05-29T15:32:23.980396Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7651: Got new config: QueryServiceConfig { AllExternalDataSourcesAreAvailable: true } 2025-05-29T15:32:23.980414Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:32:23.980490Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7619: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# Leader for TabletID 72057594046447617 is [1:130:2154] sender: [1:175:2058] recipient: [1:15:2062] 2025-05-29T15:32:23.982458Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-05-29T15:32:23.982478Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-05-29T15:32:23.982497Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-05-29T15:32:23.984466Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-05-29T15:32:23.984515Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:445: Clear TempDirsState with owners number: 0 2025-05-29T15:32:23.984620Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1348: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-05-29T15:32:23.984774Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:32: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-05-29T15:32:23.985285Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:157: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:32:23.985311Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:84: [RootDataErasureManager] Stop 2025-05-29T15:32:23.985470Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:32:23.985476Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:32:23.985494Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-05-29T15:32:23.985498Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-29T15:32:23.985502Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:202: TTxServerlessStorageBilling.Complete 2025-05-29T15:32:23.985514Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6671: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:214:2058] recipient: [1:212:2212] IGNORE Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:214:2058] recipient: [1:212:2212] Leader for TabletID 72057594037968897 is [1:218:2216] sender: [1:219:2058] recipient: [1:212:2212] 2025-05-29T15:32:23.986348Z node 1 :HIVE INFO: tablet_helpers.cpp:1130: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:126:2151] sender: [1:239:2058] recipient: [1:15:2062] 2025-05-29T15:32:24.000008Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:372: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-05-29T15:32:24.000065Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:32:24.000108Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-05-29T15:32:24.000143Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:130: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-05-29T15:32:24.000152Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:32:24.000715Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:451: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-05-29T15:32:24.000736Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-05-29T15:32:24.000773Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:32:24.000780Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:313: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-05-29T15:32:24.000783Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:367: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-05-29T15:32:24.000787Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 2 -> 3 2025-05-29T15:32:24.001170Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:32:24.001179Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-05-29T15:32:24.001183Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 3 -> 128 2025-05-29T15:32:24.001463Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:32:24.001472Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:32:24.001476Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:32:24.001480Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1650: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-05-29T15:32:24.001918Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1719: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-05-29T15:32:24.002210Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:652: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-05-29T15:32:24.002233Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1751: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 Leader for TabletID 72057594046316545 is [1:133:2155] sender: [1:254:2058] recipient: [1:15:2062] FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-05-29T15:32:24.002361Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:676: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-05-29T15:32:24.002378Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:680: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 133 RawX2: 4294969451 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-05-29T15:32:24.002384Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:32:24.002438Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Ch ... 678944, txId: 1004 2025-05-29T15:32:29.217319Z node 22 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 1004, pathId: [OwnerId: 72057594046678944, LocalPathId: 4], version: 6 2025-05-29T15:32:29.217321Z node 22 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 4 2025-05-29T15:32:29.217353Z node 22 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:5834: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 6 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 1004 2025-05-29T15:32:29.217393Z node 22 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 5 PathOwnerId: 72057594046678944, cookie: 1004 2025-05-29T15:32:29.217397Z node 22 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72057594046678944, txId: 1004 2025-05-29T15:32:29.217399Z node 22 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 1004, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 5 2025-05-29T15:32:29.217402Z node 22 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 4 2025-05-29T15:32:29.217407Z node 22 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1606: TOperation IsReadyToNotify, TxId: 1004, ready parts: 4/5, is published: true 2025-05-29T15:32:29.217424Z node 22 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 6 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 1004 2025-05-29T15:32:29.217426Z node 22 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72057594046678944, txId: 1004 2025-05-29T15:32:29.217429Z node 22 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1606: TOperation IsReadyToNotify, TxId: 1004, ready parts: 4/5, is published: true 2025-05-29T15:32:29.217782Z node 22 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:647: TTxOperationReply complete, operationId: 1004:0, at schemeshard: 72057594046678944 2025-05-29T15:32:29.218282Z node 22 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:174: Deleted shardIdx 72057594046678944:3 2025-05-29T15:32:29.218300Z node 22 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:647: TTxOperationReply complete, operationId: 1004:0, at schemeshard: 72057594046678944 2025-05-29T15:32:29.218315Z node 22 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1004:0, at schemeshard: 72057594046678944 2025-05-29T15:32:29.218320Z node 22 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:482: [72057594046678944] TDone opId# 1004:0 ProgressState 2025-05-29T15:32:29.218326Z node 22 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:906: Part operation is done id#1004:0 progress is 5/5 2025-05-29T15:32:29.218329Z node 22 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 1004 ready parts: 5/5 2025-05-29T15:32:29.218332Z node 22 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:906: Part operation is done id#1004:0 progress is 5/5 2025-05-29T15:32:29.218334Z node 22 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 1004 ready parts: 5/5 2025-05-29T15:32:29.218337Z node 22 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1606: TOperation IsReadyToNotify, TxId: 1004, ready parts: 5/5, is published: true 2025-05-29T15:32:29.218340Z node 22 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 1004 ready parts: 5/5 2025-05-29T15:32:29.218343Z node 22 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:973: Operation and all the parts is done, operation id: 1004:0 2025-05-29T15:32:29.218348Z node 22 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5174: RemoveTx for txid 1004:0 2025-05-29T15:32:29.218365Z node 22 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 5 2025-05-29T15:32:29.218368Z node 22 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:973: Operation and all the parts is done, operation id: 1004:1 2025-05-29T15:32:29.218370Z node 22 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5174: RemoveTx for txid 1004:1 2025-05-29T15:32:29.218374Z node 22 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 4 2025-05-29T15:32:29.218376Z node 22 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:973: Operation and all the parts is done, operation id: 1004:2 2025-05-29T15:32:29.218378Z node 22 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5174: RemoveTx for txid 1004:2 2025-05-29T15:32:29.218381Z node 22 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 3 2025-05-29T15:32:29.218384Z node 22 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:973: Operation and all the parts is done, operation id: 1004:3 2025-05-29T15:32:29.218386Z node 22 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5174: RemoveTx for txid 1004:3 2025-05-29T15:32:29.218389Z node 22 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 6] was 2 2025-05-29T15:32:29.218391Z node 22 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:973: Operation and all the parts is done, operation id: 1004:4 2025-05-29T15:32:29.218393Z node 22 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5174: RemoveTx for txid 1004:4 2025-05-29T15:32:29.218399Z node 22 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 7] was 1 2025-05-29T15:32:29.218474Z node 22 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:174: Deleted shardIdx 72057594046678944:4 2025-05-29T15:32:29.218499Z node 22 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:123: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-05-29T15:32:29.218502Z node 22 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:137: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 7], at schemeshard: 72057594046678944 2025-05-29T15:32:29.218509Z node 22 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 6] was 1 2025-05-29T15:32:29.218513Z node 22 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:137: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 6], at schemeshard: 72057594046678944 2025-05-29T15:32:29.218516Z node 22 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 3 2025-05-29T15:32:29.218534Z node 22 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1004 2025-05-29T15:32:29.218550Z node 22 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1004 2025-05-29T15:32:29.218558Z node 22 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1004 2025-05-29T15:32:29.218570Z node 22 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1004 2025-05-29T15:32:29.218576Z node 22 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1004 2025-05-29T15:32:29.218579Z node 22 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1004 2025-05-29T15:32:29.218953Z node 22 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:160: TTxCleanDroppedPaths Complete, done PersistRemovePath for 2 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestModificationResult got TxId: 1004, wait until txId: 1004 TestWaitNotification wait txId: 1004 2025-05-29T15:32:29.219012Z node 22 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:210: tests -- TTxNotificationSubscriber for txId 1004: send EvNotifyTxCompletion 2025-05-29T15:32:29.219017Z node 22 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:256: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 1004 2025-05-29T15:32:29.219054Z node 22 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 1004, at schemeshard: 72057594046678944 2025-05-29T15:32:29.219065Z node 22 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:227: tests -- TTxNotificationSubscriber for txId 1004: got EvNotifyTxCompletionResult 2025-05-29T15:32:29.219068Z node 22 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:236: tests -- TTxNotificationSubscriber for txId 1004: satisfy waiter [22:818:2723] TestWaitNotification: OK eventTxId 1004 2025-05-29T15:32:29.219115Z node 22 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Table/Index/indexImplTable/Stream" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: true }, at schemeshard: 72057594046678944 2025-05-29T15:32:29.219136Z node 22 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/Table/Index/indexImplTable/Stream" took 27us result status StatusPathDoesNotExist 2025-05-29T15:32:29.219161Z node 22 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/Table/Index/indexImplTable/Stream\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot/Table/Index/indexImplTable\' (id: [OwnerId: 72057594046678944, LocalPathId: 5]), source_location: ydb/core/tx/schemeshard/schemeshard_path_describer.cpp:1157" Path: "/MyRoot/Table/Index/indexImplTable/Stream" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot/Table/Index/indexImplTable" LastExistedPrefixPathId: 5 LastExistedPrefixDescription { Self { Name: "indexImplTable" PathId: 5 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 1002 CreateStep: 5000003 ParentPathId: 4 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeSyncIndexImplTable ChildrenExist: false } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 >> TCdcStreamWithRebootsTests::DropStreamCreatedWithInitialScan[TabletReboots] [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_index/unittest >> TAsyncIndexTests::MergeIndexWithReboots[TabletReboots] [GOOD] Test command err: =========== RUN: Trace =========== Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2140] Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:116:2058] recipient: [1:110:2141] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:116:2058] recipient: [1:110:2141] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:117:2058] recipient: [1:111:2142] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:117:2058] recipient: [1:111:2142] Leader for TabletID 72057594046678944 is [1:126:2151] sender: [1:127:2058] recipient: [1:109:2140] Leader for TabletID 72057594046447617 is [1:130:2154] sender: [1:132:2058] recipient: [1:110:2141] Leader for TabletID 72057594046316545 is [1:133:2155] sender: [1:135:2058] recipient: [1:111:2142] 2025-05-29T15:30:32.657161Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7488: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-05-29T15:30:32.657180Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7516: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:30:32.657185Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7402: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2025-05-29T15:30:32.657190Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7418: OperationsProcessing config: using default configuration 2025-05-29T15:30:32.657202Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-05-29T15:30:32.657205Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-05-29T15:30:32.657211Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7548: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:30:32.657223Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:39: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-05-29T15:30:32.657314Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7619: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-05-29T15:30:32.657382Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-05-29T15:30:32.669705Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7651: Got new config: QueryServiceConfig { AllExternalDataSourcesAreAvailable: true } 2025-05-29T15:30:32.669733Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:30:32.669847Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7619: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# Leader for TabletID 72057594046447617 is [1:130:2154] sender: [1:175:2058] recipient: [1:15:2062] 2025-05-29T15:30:32.673168Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-05-29T15:30:32.673199Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-05-29T15:30:32.673233Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-05-29T15:30:32.676486Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-05-29T15:30:32.676600Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:445: Clear TempDirsState with owners number: 0 2025-05-29T15:30:32.676732Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1348: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-05-29T15:30:32.676924Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:32: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-05-29T15:30:32.677642Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:157: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:30:32.677690Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:84: [RootDataErasureManager] Stop 2025-05-29T15:30:32.677978Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:30:32.677988Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:30:32.678028Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-05-29T15:30:32.678037Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-29T15:30:32.678044Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:202: TTxServerlessStorageBilling.Complete 2025-05-29T15:30:32.678066Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6671: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:214:2058] recipient: [1:212:2212] IGNORE Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:214:2058] recipient: [1:212:2212] Leader for TabletID 72057594037968897 is [1:218:2216] sender: [1:219:2058] recipient: [1:212:2212] 2025-05-29T15:30:32.679680Z node 1 :HIVE INFO: tablet_helpers.cpp:1130: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:126:2151] sender: [1:239:2058] recipient: [1:15:2062] 2025-05-29T15:30:32.702490Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:372: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-05-29T15:30:32.702590Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:30:32.702683Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-05-29T15:30:32.702756Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:130: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-05-29T15:30:32.702770Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:30:32.703627Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:451: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-05-29T15:30:32.703659Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-05-29T15:30:32.703714Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:30:32.703725Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:313: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-05-29T15:30:32.703731Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:367: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-05-29T15:30:32.703738Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 2 -> 3 2025-05-29T15:30:32.704202Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:30:32.704213Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-05-29T15:30:32.704219Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 3 -> 128 2025-05-29T15:30:32.704598Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:30:32.704610Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:30:32.704617Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:30:32.704627Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1650: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-05-29T15:30:32.705429Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1719: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-05-29T15:30:32.706006Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:652: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-05-29T15:30:32.706058Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1751: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 Leader for TabletID 72057594046316545 is [1:133:2155] sender: [1:254:2058] recipient: [1:15:2062] FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-05-29T15:30:32.706331Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:676: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-05-29T15:30:32.706363Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:680: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 133 RawX2: 4294969451 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-05-29T15:30:32.706372Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:30:32.706456Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Ch ... Status: StatusSuccess Path: "/MyRoot/Table" PathDescription { Self { Name: "Table" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 1002 CreateStep: 5000003 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: true } Table { Name: "Table" Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "indexed" Type: "Uint32" TypeId: 2 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { MinPartitionsCount: 1 } } TableIndexes { Name: "UserDefinedIndex" LocalPathId: 4 Type: EIndexTypeGlobalAsync State: EIndexStateReady KeyColumnNames: "indexed" SchemaVersion: 1 PathOwnerId: 72057594046678944 DataSize: 0 IndexImplTableDescriptions { PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { MinPartitionsCount: 1 } } } } TableSchemaVersion: 1 IsBackup: false IsRestore: false } TablePartitions { EndOfRangeKeyPrefix: "" IsPoint: false IsInclusive: false DatashardId: 72075186233409548 } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 4 PathsLimit: 10000 ShardsInside: 2 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-05-29T15:32:29.255169Z node 103 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Table/UserDefinedIndex/indexImplTable" Options { ReturnPartitioningInfo: true ReturnPartitionConfig: true BackupInfo: false ReturnBoundaries: true ShowPrivateTable: true }, at schemeshard: 72057594046678944 2025-05-29T15:32:29.255201Z node 103 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/Table/UserDefinedIndex/indexImplTable" took 39us result status StatusSuccess 2025-05-29T15:32:29.255284Z node 103 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Table/UserDefinedIndex/indexImplTable" PathDescription { Self { Name: "indexImplTable" PathId: 5 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 1002 CreateStep: 5000003 ParentPathId: 4 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 4 PathSubType: EPathSubTypeAsyncIndexImplTable Version { GeneralVersion: 4 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 2 } ChildrenExist: false } Table { Name: "indexImplTable" Columns { Name: "indexed" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "indexed" KeyColumnNames: "key" KeyColumnIds: 1 KeyColumnIds: 2 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { MinPartitionsCount: 1 } } TableSchemaVersion: 1 IsBackup: false IsRestore: false } TablePartitions { EndOfRangeKeyPrefix: "" IsPoint: false IsInclusive: false DatashardId: 72075186233409549 } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 4 PathsLimit: 10000 ShardsInside: 2 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 5 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> TCdcStreamWithRebootsTests::DropStreamOnIndexTable[TabletReboots] [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_cdc_stream_reboots/unittest >> TCdcStreamWithRebootsTests::DropStreamCreatedWithInitialScan[TabletReboots] [GOOD] Test command err: =========== RUN: Trace =========== Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2140] Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:116:2058] recipient: [1:110:2141] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:116:2058] recipient: [1:110:2141] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:117:2058] recipient: [1:111:2142] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:117:2058] recipient: [1:111:2142] Leader for TabletID 72057594046678944 is [1:126:2151] sender: [1:127:2058] recipient: [1:109:2140] Leader for TabletID 72057594046447617 is [1:130:2154] sender: [1:132:2058] recipient: [1:110:2141] Leader for TabletID 72057594046316545 is [1:133:2155] sender: [1:135:2058] recipient: [1:111:2142] 2025-05-29T15:32:09.758716Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7488: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-05-29T15:32:09.758752Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7516: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:32:09.758759Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7402: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2025-05-29T15:32:09.758765Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7418: OperationsProcessing config: using default configuration 2025-05-29T15:32:09.758777Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-05-29T15:32:09.758783Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-05-29T15:32:09.758794Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7548: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:32:09.758804Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:39: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-05-29T15:32:09.758887Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7619: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-05-29T15:32:09.758954Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-05-29T15:32:09.768879Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7651: Got new config: QueryServiceConfig { AllExternalDataSourcesAreAvailable: true } 2025-05-29T15:32:09.768902Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:32:09.768997Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7619: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# Leader for TabletID 72057594046447617 is [1:130:2154] sender: [1:175:2058] recipient: [1:15:2062] 2025-05-29T15:32:09.771164Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-05-29T15:32:09.771187Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-05-29T15:32:09.771207Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-05-29T15:32:09.773705Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-05-29T15:32:09.773759Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:445: Clear TempDirsState with owners number: 0 2025-05-29T15:32:09.773844Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1348: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-05-29T15:32:09.773981Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:32: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-05-29T15:32:09.774435Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:157: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:32:09.774460Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:84: [RootDataErasureManager] Stop 2025-05-29T15:32:09.774638Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:32:09.774645Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:32:09.774666Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-05-29T15:32:09.774671Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-29T15:32:09.774675Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:202: TTxServerlessStorageBilling.Complete 2025-05-29T15:32:09.774688Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6671: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:214:2058] recipient: [1:212:2212] IGNORE Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:214:2058] recipient: [1:212:2212] Leader for TabletID 72057594037968897 is [1:218:2216] sender: [1:219:2058] recipient: [1:212:2212] 2025-05-29T15:32:09.775630Z node 1 :HIVE INFO: tablet_helpers.cpp:1130: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:126:2151] sender: [1:239:2058] recipient: [1:15:2062] 2025-05-29T15:32:09.790572Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:372: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-05-29T15:32:09.790633Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:32:09.790674Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-05-29T15:32:09.790706Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:130: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-05-29T15:32:09.790715Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:32:09.791304Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:451: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-05-29T15:32:09.791329Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-05-29T15:32:09.791374Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:32:09.791383Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:313: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-05-29T15:32:09.791387Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:367: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-05-29T15:32:09.791402Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 2 -> 3 2025-05-29T15:32:09.791805Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:32:09.791814Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-05-29T15:32:09.791817Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 3 -> 128 2025-05-29T15:32:09.792086Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:32:09.792094Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:32:09.792097Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:32:09.792101Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1650: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-05-29T15:32:09.792622Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1719: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-05-29T15:32:09.793057Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:652: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-05-29T15:32:09.793091Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1751: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 Leader for TabletID 72057594046316545 is [1:133:2155] sender: [1:254:2058] recipient: [1:15:2062] FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-05-29T15:32:09.793321Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:676: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-05-29T15:32:09.793350Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:680: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 133 RawX2: 4294969451 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-05-29T15:32:09.793358Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:32:09.793424Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Ch ... eshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 1004, pathId: [OwnerId: 72057594046678944, LocalPathId: 5], version: 18446744073709551615 2025-05-29T15:32:29.802998Z node 82 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 2 2025-05-29T15:32:29.803029Z node 82 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:5834: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 1004 2025-05-29T15:32:29.803089Z node 82 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 5 PathOwnerId: 72057594046678944, cookie: 1004 2025-05-29T15:32:29.803092Z node 82 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72057594046678944, txId: 1004 2025-05-29T15:32:29.803095Z node 82 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 1004, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 5 2025-05-29T15:32:29.803098Z node 82 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 6 2025-05-29T15:32:29.803155Z node 82 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 9 PathOwnerId: 72057594046678944, cookie: 1004 2025-05-29T15:32:29.803159Z node 82 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72057594046678944, txId: 1004 2025-05-29T15:32:29.803161Z node 82 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 1004, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 9 2025-05-29T15:32:29.803164Z node 82 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2025-05-29T15:32:29.803171Z node 82 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1606: TOperation IsReadyToNotify, TxId: 1004, ready parts: 3/4, is published: true 2025-05-29T15:32:29.803185Z node 82 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 1004 2025-05-29T15:32:29.803188Z node 82 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72057594046678944, txId: 1004 2025-05-29T15:32:29.803190Z node 82 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1606: TOperation IsReadyToNotify, TxId: 1004, ready parts: 3/4, is published: true 2025-05-29T15:32:29.803829Z node 82 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:647: TTxOperationReply complete, operationId: 1004:0, at schemeshard: 72057594046678944 2025-05-29T15:32:29.803852Z node 82 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:647: TTxOperationReply complete, operationId: 1004:0, at schemeshard: 72057594046678944 2025-05-29T15:32:29.803895Z node 82 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1004:0, at schemeshard: 72057594046678944 2025-05-29T15:32:29.803900Z node 82 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:482: [72057594046678944] TDone opId# 1004:0 ProgressState 2025-05-29T15:32:29.803907Z node 82 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:906: Part operation is done id#1004:0 progress is 4/4 2025-05-29T15:32:29.803911Z node 82 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 1004 ready parts: 4/4 2025-05-29T15:32:29.803916Z node 82 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:906: Part operation is done id#1004:0 progress is 4/4 2025-05-29T15:32:29.803920Z node 82 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 1004 ready parts: 4/4 2025-05-29T15:32:29.803924Z node 82 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1606: TOperation IsReadyToNotify, TxId: 1004, ready parts: 4/4, is published: true 2025-05-29T15:32:29.803928Z node 82 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 1004 ready parts: 4/4 2025-05-29T15:32:29.803932Z node 82 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:973: Operation and all the parts is done, operation id: 1004:0 2025-05-29T15:32:29.803937Z node 82 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5174: RemoveTx for txid 1004:0 2025-05-29T15:32:29.803955Z node 82 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 5 2025-05-29T15:32:29.803959Z node 82 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:973: Operation and all the parts is done, operation id: 1004:1 2025-05-29T15:32:29.803961Z node 82 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5174: RemoveTx for txid 1004:1 2025-05-29T15:32:29.803964Z node 82 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 4 2025-05-29T15:32:29.803966Z node 82 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:973: Operation and all the parts is done, operation id: 1004:2 2025-05-29T15:32:29.803968Z node 82 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5174: RemoveTx for txid 1004:2 2025-05-29T15:32:29.803971Z node 82 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 2 2025-05-29T15:32:29.803976Z node 82 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:973: Operation and all the parts is done, operation id: 1004:3 2025-05-29T15:32:29.803978Z node 82 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5174: RemoveTx for txid 1004:3 2025-05-29T15:32:29.803983Z node 82 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 1 2025-05-29T15:32:29.804067Z node 82 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:174: Deleted shardIdx 72057594046678944:3 2025-05-29T15:32:29.804077Z node 82 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:174: Deleted shardIdx 72057594046678944:2 2025-05-29T15:32:29.804103Z node 82 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1004 2025-05-29T15:32:29.804123Z node 82 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1004 2025-05-29T15:32:29.804137Z node 82 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:123: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-05-29T15:32:29.804143Z node 82 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:137: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 5], at schemeshard: 72057594046678944 2025-05-29T15:32:29.804152Z node 82 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 1 2025-05-29T15:32:29.804158Z node 82 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:137: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 4], at schemeshard: 72057594046678944 2025-05-29T15:32:29.804163Z node 82 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 3 2025-05-29T15:32:29.804189Z node 82 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1004 2025-05-29T15:32:29.804196Z node 82 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1004 2025-05-29T15:32:29.804199Z node 82 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1004 2025-05-29T15:32:29.804555Z node 82 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:160: TTxCleanDroppedPaths Complete, done PersistRemovePath for 2 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestModificationResult got TxId: 1004, wait until txId: 1004 TestWaitNotification wait txId: 1004 2025-05-29T15:32:29.804592Z node 82 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:210: tests -- TTxNotificationSubscriber for txId 1004: send EvNotifyTxCompletion 2025-05-29T15:32:29.804596Z node 82 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:256: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 1004 2025-05-29T15:32:29.804631Z node 82 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 1004, at schemeshard: 72057594046678944 2025-05-29T15:32:29.804641Z node 82 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:227: tests -- TTxNotificationSubscriber for txId 1004: got EvNotifyTxCompletionResult 2025-05-29T15:32:29.804644Z node 82 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:236: tests -- TTxNotificationSubscriber for txId 1004: satisfy waiter [82:754:2670] TestWaitNotification: OK eventTxId 1004 2025-05-29T15:32:29.804688Z node 82 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Table/Stream" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: true }, at schemeshard: 72057594046678944 2025-05-29T15:32:29.804706Z node 82 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/Table/Stream" took 24us result status StatusPathDoesNotExist 2025-05-29T15:32:29.804729Z node 82 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/Table/Stream\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot/Table\' (id: [OwnerId: 72057594046678944, LocalPathId: 3]), source_location: ydb/core/tx/schemeshard/schemeshard_path_describer.cpp:1157" Path: "/MyRoot/Table/Stream" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot/Table" LastExistedPrefixPathId: 3 LastExistedPrefixDescription { Self { Name: "Table" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 1002 CreateStep: 5000003 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: false } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 >> TCdcStreamWithRebootsTests::DropStream[TabletReboots] >> TCdcStreamWithRebootsTests::DropStreamExplicitReady[TabletReboots] [GOOD] >> TCdcStreamWithRebootsTests::Attributes[TabletReboots] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_cdc_stream_reboots/unittest >> TCdcStreamWithRebootsTests::DropStreamOnIndexTable[TabletReboots] [GOOD] Test command err: =========== RUN: Trace =========== Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2140] Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:116:2058] recipient: [1:110:2141] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:116:2058] recipient: [1:110:2141] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:117:2058] recipient: [1:111:2142] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:117:2058] recipient: [1:111:2142] Leader for TabletID 72057594046678944 is [1:126:2151] sender: [1:127:2058] recipient: [1:109:2140] Leader for TabletID 72057594046447617 is [1:130:2154] sender: [1:132:2058] recipient: [1:110:2141] Leader for TabletID 72057594046316545 is [1:133:2155] sender: [1:135:2058] recipient: [1:111:2142] 2025-05-29T15:32:09.637028Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7488: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-05-29T15:32:09.637045Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7516: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:32:09.637048Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7402: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2025-05-29T15:32:09.637052Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7418: OperationsProcessing config: using default configuration 2025-05-29T15:32:09.637064Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-05-29T15:32:09.637069Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-05-29T15:32:09.637076Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7548: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:32:09.637086Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:39: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-05-29T15:32:09.637154Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7619: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-05-29T15:32:09.637212Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-05-29T15:32:09.646636Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7651: Got new config: QueryServiceConfig { AllExternalDataSourcesAreAvailable: true } 2025-05-29T15:32:09.646651Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:32:09.646710Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7619: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# Leader for TabletID 72057594046447617 is [1:130:2154] sender: [1:175:2058] recipient: [1:15:2062] 2025-05-29T15:32:09.648380Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-05-29T15:32:09.648399Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-05-29T15:32:09.648415Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-05-29T15:32:09.650089Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-05-29T15:32:09.650138Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:445: Clear TempDirsState with owners number: 0 2025-05-29T15:32:09.650206Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1348: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-05-29T15:32:09.650322Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:32: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-05-29T15:32:09.650795Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:157: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:32:09.650821Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:84: [RootDataErasureManager] Stop 2025-05-29T15:32:09.650965Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:32:09.650971Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:32:09.651003Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-05-29T15:32:09.651014Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-29T15:32:09.651021Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:202: TTxServerlessStorageBilling.Complete 2025-05-29T15:32:09.651038Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6671: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:214:2058] recipient: [1:212:2212] IGNORE Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:214:2058] recipient: [1:212:2212] Leader for TabletID 72057594037968897 is [1:218:2216] sender: [1:219:2058] recipient: [1:212:2212] 2025-05-29T15:32:09.651864Z node 1 :HIVE INFO: tablet_helpers.cpp:1130: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:126:2151] sender: [1:239:2058] recipient: [1:15:2062] 2025-05-29T15:32:09.664691Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:372: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-05-29T15:32:09.664739Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:32:09.664778Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-05-29T15:32:09.664809Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:130: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-05-29T15:32:09.664820Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:32:09.665295Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:451: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-05-29T15:32:09.665313Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-05-29T15:32:09.665355Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:32:09.665362Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:313: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-05-29T15:32:09.665366Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:367: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-05-29T15:32:09.665369Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 2 -> 3 2025-05-29T15:32:09.665711Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:32:09.665723Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-05-29T15:32:09.665728Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 3 -> 128 2025-05-29T15:32:09.666009Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:32:09.666015Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:32:09.666019Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:32:09.666023Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1650: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-05-29T15:32:09.666467Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1719: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-05-29T15:32:09.666796Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:652: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-05-29T15:32:09.666820Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1751: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 Leader for TabletID 72057594046316545 is [1:133:2155] sender: [1:254:2058] recipient: [1:15:2062] FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-05-29T15:32:09.666953Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:676: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-05-29T15:32:09.666969Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:680: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 133 RawX2: 4294969451 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-05-29T15:32:09.666974Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:32:09.667009Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Ch ... 72057594046678944, LocalPathId: 5] was 5 2025-05-29T15:32:30.143431Z node 84 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 6 PathOwnerId: 72057594046678944, cookie: 1004 2025-05-29T15:32:30.143435Z node 84 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72057594046678944, txId: 1004 2025-05-29T15:32:30.143438Z node 84 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 1004, pathId: [OwnerId: 72057594046678944, LocalPathId: 4], version: 6 2025-05-29T15:32:30.143440Z node 84 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 4 2025-05-29T15:32:30.143512Z node 84 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:5834: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 6 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 1004 2025-05-29T15:32:30.143532Z node 84 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 5 PathOwnerId: 72057594046678944, cookie: 1004 2025-05-29T15:32:30.143535Z node 84 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72057594046678944, txId: 1004 2025-05-29T15:32:30.143537Z node 84 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 1004, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 5 2025-05-29T15:32:30.143540Z node 84 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 4 2025-05-29T15:32:30.143546Z node 84 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1606: TOperation IsReadyToNotify, TxId: 1004, ready parts: 3/4, is published: true 2025-05-29T15:32:30.143570Z node 84 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 6 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 1004 2025-05-29T15:32:30.143573Z node 84 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72057594046678944, txId: 1004 2025-05-29T15:32:30.143577Z node 84 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1606: TOperation IsReadyToNotify, TxId: 1004, ready parts: 3/4, is published: true 2025-05-29T15:32:30.143972Z node 84 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:647: TTxOperationReply complete, operationId: 1004:0, at schemeshard: 72057594046678944 2025-05-29T15:32:30.144017Z node 84 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:174: Deleted shardIdx 72057594046678944:3 2025-05-29T15:32:30.144027Z node 84 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:174: Deleted shardIdx 72057594046678944:4 2025-05-29T15:32:30.144057Z node 84 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:647: TTxOperationReply complete, operationId: 1004:0, at schemeshard: 72057594046678944 2025-05-29T15:32:30.144073Z node 84 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1004:0, at schemeshard: 72057594046678944 2025-05-29T15:32:30.144078Z node 84 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:482: [72057594046678944] TDone opId# 1004:0 ProgressState 2025-05-29T15:32:30.144087Z node 84 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:906: Part operation is done id#1004:0 progress is 4/4 2025-05-29T15:32:30.144089Z node 84 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 1004 ready parts: 4/4 2025-05-29T15:32:30.144093Z node 84 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:906: Part operation is done id#1004:0 progress is 4/4 2025-05-29T15:32:30.144095Z node 84 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 1004 ready parts: 4/4 2025-05-29T15:32:30.144098Z node 84 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1606: TOperation IsReadyToNotify, TxId: 1004, ready parts: 4/4, is published: true 2025-05-29T15:32:30.144102Z node 84 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 1004 ready parts: 4/4 2025-05-29T15:32:30.144105Z node 84 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:973: Operation and all the parts is done, operation id: 1004:0 2025-05-29T15:32:30.144109Z node 84 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5174: RemoveTx for txid 1004:0 2025-05-29T15:32:30.144132Z node 84 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 4 2025-05-29T15:32:30.144136Z node 84 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:973: Operation and all the parts is done, operation id: 1004:1 2025-05-29T15:32:30.144138Z node 84 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5174: RemoveTx for txid 1004:1 2025-05-29T15:32:30.144142Z node 84 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 3 2025-05-29T15:32:30.144144Z node 84 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:973: Operation and all the parts is done, operation id: 1004:2 2025-05-29T15:32:30.144146Z node 84 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5174: RemoveTx for txid 1004:2 2025-05-29T15:32:30.144149Z node 84 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 6] was 2 2025-05-29T15:32:30.144152Z node 84 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:973: Operation and all the parts is done, operation id: 1004:3 2025-05-29T15:32:30.144154Z node 84 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5174: RemoveTx for txid 1004:3 2025-05-29T15:32:30.144162Z node 84 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 7] was 1 2025-05-29T15:32:30.144237Z node 84 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:123: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-05-29T15:32:30.144241Z node 84 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:137: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 7], at schemeshard: 72057594046678944 2025-05-29T15:32:30.144251Z node 84 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 6] was 1 2025-05-29T15:32:30.144257Z node 84 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:137: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 6], at schemeshard: 72057594046678944 2025-05-29T15:32:30.144262Z node 84 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 3 2025-05-29T15:32:30.144683Z node 84 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1004 2025-05-29T15:32:30.144701Z node 84 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1004 2025-05-29T15:32:30.144709Z node 84 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1004 2025-05-29T15:32:30.144720Z node 84 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1004 2025-05-29T15:32:30.144727Z node 84 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1004 2025-05-29T15:32:30.144731Z node 84 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1004 2025-05-29T15:32:30.145062Z node 84 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:160: TTxCleanDroppedPaths Complete, done PersistRemovePath for 2 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestModificationResult got TxId: 1004, wait until txId: 1004 TestWaitNotification wait txId: 1004 2025-05-29T15:32:30.145123Z node 84 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:210: tests -- TTxNotificationSubscriber for txId 1004: send EvNotifyTxCompletion 2025-05-29T15:32:30.145128Z node 84 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:256: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 1004 2025-05-29T15:32:30.145172Z node 84 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 1004, at schemeshard: 72057594046678944 2025-05-29T15:32:30.145186Z node 84 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:227: tests -- TTxNotificationSubscriber for txId 1004: got EvNotifyTxCompletionResult 2025-05-29T15:32:30.145189Z node 84 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:236: tests -- TTxNotificationSubscriber for txId 1004: satisfy waiter [84:817:2721] TestWaitNotification: OK eventTxId 1004 2025-05-29T15:32:30.145241Z node 84 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Table/Index/indexImplTable/Stream" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: true }, at schemeshard: 72057594046678944 2025-05-29T15:32:30.145270Z node 84 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/Table/Index/indexImplTable/Stream" took 38us result status StatusPathDoesNotExist 2025-05-29T15:32:30.145313Z node 84 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/Table/Index/indexImplTable/Stream\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot/Table/Index/indexImplTable\' (id: [OwnerId: 72057594046678944, LocalPathId: 5]), source_location: ydb/core/tx/schemeshard/schemeshard_path_describer.cpp:1157" Path: "/MyRoot/Table/Index/indexImplTable/Stream" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot/Table/Index/indexImplTable" LastExistedPrefixPathId: 5 LastExistedPrefixDescription { Self { Name: "indexImplTable" PathId: 5 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 1002 CreateStep: 5000003 ParentPathId: 4 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeSyncIndexImplTable ChildrenExist: false } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_cdc_stream_reboots/unittest >> TCdcStreamWithRebootsTests::DropStreamExplicitReady[TabletReboots] [GOOD] Test command err: =========== RUN: Trace =========== Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2140] Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:116:2058] recipient: [1:110:2141] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:116:2058] recipient: [1:110:2141] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:117:2058] recipient: [1:111:2142] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:117:2058] recipient: [1:111:2142] Leader for TabletID 72057594046678944 is [1:126:2151] sender: [1:127:2058] recipient: [1:109:2140] Leader for TabletID 72057594046447617 is [1:130:2154] sender: [1:132:2058] recipient: [1:110:2141] Leader for TabletID 72057594046316545 is [1:133:2155] sender: [1:135:2058] recipient: [1:111:2142] 2025-05-29T15:32:11.203390Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7488: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-05-29T15:32:11.203408Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7516: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:32:11.203412Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7402: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2025-05-29T15:32:11.203415Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7418: OperationsProcessing config: using default configuration 2025-05-29T15:32:11.203424Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-05-29T15:32:11.203426Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-05-29T15:32:11.203432Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7548: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:32:11.203442Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:39: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-05-29T15:32:11.203517Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7619: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-05-29T15:32:11.203569Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-05-29T15:32:11.213238Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7651: Got new config: QueryServiceConfig { AllExternalDataSourcesAreAvailable: true } 2025-05-29T15:32:11.213252Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:32:11.213322Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7619: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# Leader for TabletID 72057594046447617 is [1:130:2154] sender: [1:175:2058] recipient: [1:15:2062] 2025-05-29T15:32:11.215155Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-05-29T15:32:11.215174Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-05-29T15:32:11.215193Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-05-29T15:32:11.217428Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-05-29T15:32:11.217483Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:445: Clear TempDirsState with owners number: 0 2025-05-29T15:32:11.217568Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1348: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-05-29T15:32:11.217717Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:32: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-05-29T15:32:11.218329Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:157: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:32:11.218359Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:84: [RootDataErasureManager] Stop 2025-05-29T15:32:11.218549Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:32:11.218558Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:32:11.218577Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-05-29T15:32:11.218582Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-29T15:32:11.218586Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:202: TTxServerlessStorageBilling.Complete 2025-05-29T15:32:11.218599Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6671: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:214:2058] recipient: [1:212:2212] IGNORE Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:214:2058] recipient: [1:212:2212] Leader for TabletID 72057594037968897 is [1:218:2216] sender: [1:219:2058] recipient: [1:212:2212] 2025-05-29T15:32:11.219673Z node 1 :HIVE INFO: tablet_helpers.cpp:1130: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:126:2151] sender: [1:239:2058] recipient: [1:15:2062] 2025-05-29T15:32:11.232263Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:372: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-05-29T15:32:11.232315Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:32:11.232354Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-05-29T15:32:11.232388Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:130: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-05-29T15:32:11.232396Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:32:11.232894Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:451: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-05-29T15:32:11.232921Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-05-29T15:32:11.232963Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:32:11.232970Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:313: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-05-29T15:32:11.232974Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:367: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-05-29T15:32:11.232977Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 2 -> 3 2025-05-29T15:32:11.233355Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:32:11.233365Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-05-29T15:32:11.233369Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 3 -> 128 2025-05-29T15:32:11.233631Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:32:11.233638Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:32:11.233645Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:32:11.233651Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1650: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-05-29T15:32:11.234111Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1719: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-05-29T15:32:11.234477Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:652: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-05-29T15:32:11.234498Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1751: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 Leader for TabletID 72057594046316545 is [1:133:2155] sender: [1:254:2058] recipient: [1:15:2062] FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-05-29T15:32:11.234614Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:676: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-05-29T15:32:11.234630Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:680: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 133 RawX2: 4294969451 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-05-29T15:32:11.234635Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:32:11.234668Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Ch ... schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 5 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 1004 2025-05-29T15:32:30.507930Z node 79 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72057594046678944, txId: 1004 2025-05-29T15:32:30.507933Z node 79 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 1004, pathId: [OwnerId: 72057594046678944, LocalPathId: 5], version: 18446744073709551615 2025-05-29T15:32:30.507935Z node 79 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 2 2025-05-29T15:32:30.507982Z node 79 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:5834: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 1004 2025-05-29T15:32:30.508015Z node 79 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 5 PathOwnerId: 72057594046678944, cookie: 1004 2025-05-29T15:32:30.508019Z node 79 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72057594046678944, txId: 1004 2025-05-29T15:32:30.508021Z node 79 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 1004, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 5 2025-05-29T15:32:30.508024Z node 79 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 5 2025-05-29T15:32:30.508065Z node 79 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 9 PathOwnerId: 72057594046678944, cookie: 1004 2025-05-29T15:32:30.508069Z node 79 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72057594046678944, txId: 1004 2025-05-29T15:32:30.508072Z node 79 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 1004, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 9 2025-05-29T15:32:30.508075Z node 79 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2025-05-29T15:32:30.508079Z node 79 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1606: TOperation IsReadyToNotify, TxId: 1004, ready parts: 2/3, is published: true 2025-05-29T15:32:30.508092Z node 79 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 1004 2025-05-29T15:32:30.508094Z node 79 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72057594046678944, txId: 1004 2025-05-29T15:32:30.508097Z node 79 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1606: TOperation IsReadyToNotify, TxId: 1004, ready parts: 2/3, is published: true 2025-05-29T15:32:30.508439Z node 79 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:647: TTxOperationReply complete, operationId: 1004:0, at schemeshard: 72057594046678944 2025-05-29T15:32:30.508458Z node 79 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:174: Deleted shardIdx 72057594046678944:3 2025-05-29T15:32:30.508476Z node 79 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:647: TTxOperationReply complete, operationId: 1004:0, at schemeshard: 72057594046678944 2025-05-29T15:32:30.508516Z node 79 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1004:0, at schemeshard: 72057594046678944 2025-05-29T15:32:30.508521Z node 79 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:482: [72057594046678944] TDone opId# 1004:0 ProgressState 2025-05-29T15:32:30.508527Z node 79 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:906: Part operation is done id#1004:0 progress is 3/3 2025-05-29T15:32:30.508530Z node 79 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 1004 ready parts: 3/3 2025-05-29T15:32:30.508533Z node 79 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:906: Part operation is done id#1004:0 progress is 3/3 2025-05-29T15:32:30.508535Z node 79 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 1004 ready parts: 3/3 2025-05-29T15:32:30.508538Z node 79 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1606: TOperation IsReadyToNotify, TxId: 1004, ready parts: 3/3, is published: true 2025-05-29T15:32:30.508541Z node 79 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 1004 ready parts: 3/3 2025-05-29T15:32:30.508544Z node 79 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:973: Operation and all the parts is done, operation id: 1004:0 2025-05-29T15:32:30.508547Z node 79 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5174: RemoveTx for txid 1004:0 2025-05-29T15:32:30.508560Z node 79 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 4 2025-05-29T15:32:30.508565Z node 79 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:973: Operation and all the parts is done, operation id: 1004:1 2025-05-29T15:32:30.508567Z node 79 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5174: RemoveTx for txid 1004:1 2025-05-29T15:32:30.508570Z node 79 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 2 2025-05-29T15:32:30.508573Z node 79 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:973: Operation and all the parts is done, operation id: 1004:2 2025-05-29T15:32:30.508575Z node 79 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5174: RemoveTx for txid 1004:2 2025-05-29T15:32:30.508581Z node 79 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 1 2025-05-29T15:32:30.508610Z node 79 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:123: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-05-29T15:32:30.508613Z node 79 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:137: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 5], at schemeshard: 72057594046678944 2025-05-29T15:32:30.508620Z node 79 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 1 2025-05-29T15:32:30.508623Z node 79 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:137: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 4], at schemeshard: 72057594046678944 2025-05-29T15:32:30.508626Z node 79 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 3 2025-05-29T15:32:30.509064Z node 79 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1004 2025-05-29T15:32:30.509082Z node 79 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:174: Deleted shardIdx 72057594046678944:2 2025-05-29T15:32:30.509107Z node 79 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1004 2025-05-29T15:32:30.509114Z node 79 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1004 2025-05-29T15:32:30.509132Z node 79 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1004 2025-05-29T15:32:30.509136Z node 79 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1004 2025-05-29T15:32:30.509427Z node 79 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:160: TTxCleanDroppedPaths Complete, done PersistRemovePath for 2 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestModificationResult got TxId: 1004, wait until txId: 1004 TestWaitNotification wait txId: 1004 2025-05-29T15:32:30.509470Z node 79 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:210: tests -- TTxNotificationSubscriber for txId 1004: send EvNotifyTxCompletion 2025-05-29T15:32:30.509474Z node 79 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:256: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 1004 2025-05-29T15:32:30.509511Z node 79 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 1004, at schemeshard: 72057594046678944 2025-05-29T15:32:30.509523Z node 79 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:227: tests -- TTxNotificationSubscriber for txId 1004: got EvNotifyTxCompletionResult 2025-05-29T15:32:30.509526Z node 79 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:236: tests -- TTxNotificationSubscriber for txId 1004: satisfy waiter [79:743:2659] TestWaitNotification: OK eventTxId 1004 2025-05-29T15:32:30.509569Z node 79 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Table/Stream" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: true }, at schemeshard: 72057594046678944 2025-05-29T15:32:30.509587Z node 79 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/Table/Stream" took 23us result status StatusPathDoesNotExist 2025-05-29T15:32:30.509608Z node 79 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/Table/Stream\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot/Table\' (id: [OwnerId: 72057594046678944, LocalPathId: 3]), source_location: ydb/core/tx/schemeshard/schemeshard_path_describer.cpp:1157" Path: "/MyRoot/Table/Stream" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot/Table" LastExistedPrefixPathId: 3 LastExistedPrefixDescription { Self { Name: "Table" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 1002 CreateStep: 5000003 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: false } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 >> TCdcStreamWithRebootsTests::CreateStreamWithAwsRegion[TabletReboots] >> TCdcStreamWithRebootsTests::CreateStreamWithResolvedTimestamps[PipeResets] [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_cdc_stream_reboots/unittest >> TCdcStreamWithRebootsTests::CreateStreamWithResolvedTimestamps[PipeResets] [GOOD] Test command err: =========== RUN: Trace =========== Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2140] Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:116:2058] recipient: [1:110:2141] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:116:2058] recipient: [1:110:2141] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:117:2058] recipient: [1:111:2142] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:117:2058] recipient: [1:111:2142] Leader for TabletID 72057594046678944 is [1:126:2151] sender: [1:127:2058] recipient: [1:109:2140] Leader for TabletID 72057594046447617 is [1:130:2154] sender: [1:132:2058] recipient: [1:110:2141] Leader for TabletID 72057594046316545 is [1:133:2155] sender: [1:135:2058] recipient: [1:111:2142] 2025-05-29T15:32:25.273039Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7488: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-05-29T15:32:25.273060Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7516: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:32:25.273066Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7402: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2025-05-29T15:32:25.273071Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7418: OperationsProcessing config: using default configuration 2025-05-29T15:32:25.273083Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-05-29T15:32:25.273086Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-05-29T15:32:25.273093Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7548: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:32:25.273102Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:39: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-05-29T15:32:25.273175Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7619: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-05-29T15:32:25.273230Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-05-29T15:32:25.282533Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7651: Got new config: QueryServiceConfig { AllExternalDataSourcesAreAvailable: true } 2025-05-29T15:32:25.282550Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:32:25.282620Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7619: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# Leader for TabletID 72057594046447617 is [1:130:2154] sender: [1:175:2058] recipient: [1:15:2062] 2025-05-29T15:32:25.284514Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-05-29T15:32:25.284538Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-05-29T15:32:25.284559Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-05-29T15:32:25.286361Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-05-29T15:32:25.286410Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:445: Clear TempDirsState with owners number: 0 2025-05-29T15:32:25.286500Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1348: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-05-29T15:32:25.286645Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:32: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-05-29T15:32:25.287163Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:157: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:32:25.287190Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:84: [RootDataErasureManager] Stop 2025-05-29T15:32:25.287354Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:32:25.287361Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:32:25.287380Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-05-29T15:32:25.287385Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-29T15:32:25.287389Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:202: TTxServerlessStorageBilling.Complete 2025-05-29T15:32:25.287401Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6671: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:214:2058] recipient: [1:212:2212] IGNORE Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:214:2058] recipient: [1:212:2212] Leader for TabletID 72057594037968897 is [1:218:2216] sender: [1:219:2058] recipient: [1:212:2212] 2025-05-29T15:32:25.288227Z node 1 :HIVE INFO: tablet_helpers.cpp:1130: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:126:2151] sender: [1:239:2058] recipient: [1:15:2062] 2025-05-29T15:32:25.305900Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:372: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-05-29T15:32:25.305956Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:32:25.305998Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-05-29T15:32:25.306036Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:130: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-05-29T15:32:25.306046Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:32:25.306589Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:451: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-05-29T15:32:25.306609Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-05-29T15:32:25.306660Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:32:25.306669Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:313: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-05-29T15:32:25.306675Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:367: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-05-29T15:32:25.306680Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 2 -> 3 2025-05-29T15:32:25.307066Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:32:25.307077Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-05-29T15:32:25.307083Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 3 -> 128 2025-05-29T15:32:25.307401Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:32:25.307412Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:32:25.307417Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:32:25.307422Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1650: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-05-29T15:32:25.308104Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1719: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-05-29T15:32:25.308507Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:652: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-05-29T15:32:25.308538Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1751: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 Leader for TabletID 72057594046316545 is [1:133:2155] sender: [1:254:2058] recipient: [1:15:2062] FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-05-29T15:32:25.308708Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:676: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-05-29T15:32:25.308735Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:680: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 133 RawX2: 4294969451 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-05-29T15:32:25.308743Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:32:25.308790Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Ch ... dyForNotifications: 0, at schemeshard: 72057594046678944 2025-05-29T15:32:31.470908Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_pq.cpp:629: NPQState::TPropose operationId# 1003:2 HandleReply TEvProposeTransactionResult CollectPQConfigChanged: true 2025-05-29T15:32:31.470947Z node 26 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1003:2 128 -> 240 2025-05-29T15:32:31.470971Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 3 2025-05-29T15:32:31.470981Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 4 2025-05-29T15:32:31.471523Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:647: TTxOperationReply complete, operationId: 1003:2, at schemeshard: 72057594046678944 2025-05-29T15:32:31.471589Z node 26 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:32:31.471595Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1003, path id: [OwnerId: 72057594046678944, LocalPathId: 4] 2025-05-29T15:32:31.471622Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1003, path id: [OwnerId: 72057594046678944, LocalPathId: 5] 2025-05-29T15:32:31.471654Z node 26 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:32:31.471659Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [26:206:2207], at schemeshard: 72057594046678944, txId: 1003, path id: 4 2025-05-29T15:32:31.471665Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [26:206:2207], at schemeshard: 72057594046678944, txId: 1003, path id: 5 2025-05-29T15:32:31.471700Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1003:2, at schemeshard: 72057594046678944 2025-05-29T15:32:31.471706Z node 26 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:482: [72057594046678944] TDone opId# 1003:2 ProgressState 2025-05-29T15:32:31.471716Z node 26 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:906: Part operation is done id#1003:2 progress is 3/3 2025-05-29T15:32:31.471720Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 1003 ready parts: 3/3 2025-05-29T15:32:31.471724Z node 26 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:906: Part operation is done id#1003:2 progress is 3/3 2025-05-29T15:32:31.471728Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 1003 ready parts: 3/3 2025-05-29T15:32:31.471736Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1606: TOperation IsReadyToNotify, TxId: 1003, ready parts: 3/3, is published: false 2025-05-29T15:32:31.471741Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 1003 ready parts: 3/3 2025-05-29T15:32:31.471746Z node 26 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:973: Operation and all the parts is done, operation id: 1003:0 2025-05-29T15:32:31.471750Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5174: RemoveTx for txid 1003:0 2025-05-29T15:32:31.471761Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 4 2025-05-29T15:32:31.471766Z node 26 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:973: Operation and all the parts is done, operation id: 1003:1 2025-05-29T15:32:31.471769Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5174: RemoveTx for txid 1003:1 2025-05-29T15:32:31.471785Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 4 2025-05-29T15:32:31.471789Z node 26 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:973: Operation and all the parts is done, operation id: 1003:2 2025-05-29T15:32:31.471792Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5174: RemoveTx for txid 1003:2 2025-05-29T15:32:31.471801Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 5 2025-05-29T15:32:31.471806Z node 26 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:982: Publication still in progress, tx: 1003, publications: 2, subscribers: 0 2025-05-29T15:32:31.471809Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:989: Publication details: tx: 1003, [OwnerId: 72057594046678944, LocalPathId: 4], 4 2025-05-29T15:32:31.471812Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:989: Publication details: tx: 1003, [OwnerId: 72057594046678944, LocalPathId: 5], 2 2025-05-29T15:32:31.472092Z node 26 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:5834: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 4 PathOwnerId: 72057594046678944, cookie: 1003 2025-05-29T15:32:31.472108Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 4 PathOwnerId: 72057594046678944, cookie: 1003 2025-05-29T15:32:31.472114Z node 26 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 1003 2025-05-29T15:32:31.472118Z node 26 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 1003, pathId: [OwnerId: 72057594046678944, LocalPathId: 4], version: 4 2025-05-29T15:32:31.472123Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 3 2025-05-29T15:32:31.472281Z node 26 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:5834: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 5 Version: 2 PathOwnerId: 72057594046678944, cookie: 1003 2025-05-29T15:32:31.472293Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 5 Version: 2 PathOwnerId: 72057594046678944, cookie: 1003 2025-05-29T15:32:31.472301Z node 26 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1003 2025-05-29T15:32:31.472305Z node 26 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 1003, pathId: [OwnerId: 72057594046678944, LocalPathId: 5], version: 2 2025-05-29T15:32:31.472309Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 4 2025-05-29T15:32:31.472319Z node 26 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 1003, subscribers: 0 2025-05-29T15:32:31.473328Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1003 2025-05-29T15:32:31.473362Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1003 TestModificationResult got TxId: 1003, wait until txId: 1003 TestWaitNotification wait txId: 1003 2025-05-29T15:32:31.474958Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:210: tests -- TTxNotificationSubscriber for txId 1003: send EvNotifyTxCompletion 2025-05-29T15:32:31.474968Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:256: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 1003 2025-05-29T15:32:31.475024Z node 26 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 1003, at schemeshard: 72057594046678944 2025-05-29T15:32:31.475039Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:227: tests -- TTxNotificationSubscriber for txId 1003: got EvNotifyTxCompletionResult 2025-05-29T15:32:31.475044Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:236: tests -- TTxNotificationSubscriber for txId 1003: satisfy waiter [26:662:2579] TestWaitNotification: OK eventTxId 1003 2025-05-29T15:32:31.475106Z node 26 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Table/Stream" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: true }, at schemeshard: 72057594046678944 2025-05-29T15:32:31.475142Z node 26 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/Table/Stream" took 47us result status StatusSuccess 2025-05-29T15:32:31.475240Z node 26 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Table/Stream" PathDescription { Self { Name: "Stream" PathId: 4 SchemeshardId: 72057594046678944 PathType: EPathTypeCdcStream CreateFinished: true CreateTxId: 1003 CreateStep: 5000004 ParentPathId: 3 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 4 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 4 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 2 CdcStreamVersion: 1 } ChildrenExist: true } Children { Name: "streamImpl" PathId: 5 SchemeshardId: 72057594046678944 PathType: EPathTypePersQueueGroup CreateFinished: true CreateTxId: 1003 CreateStep: 5000004 ParentPathId: 4 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" PathSubType: EPathSubTypeStreamImpl ChildrenExist: false BalancerTabletID: 72075186233409548 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 4 PathsLimit: 10000 ShardsInside: 3 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 1 PQPartitionsLimit: 1000000 } CdcStreamDescription { Name: "Stream" Mode: ECdcStreamModeKeysOnly PathId { OwnerId: 72057594046678944 LocalId: 4 } State: ECdcStreamStateReady SchemaVersion: 1 Format: ECdcStreamFormatProto VirtualTimestamps: false AwsRegion: "" ResolvedTimestampsIntervalMs: 1000 } } PathId: 4 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> TCdcStreamWithRebootsTests::RacySplitAndDropTable[PipeResets] [GOOD] >> TCdcStreamWithRebootsTests::DisableStream[PipeResets] >> TCdcStreamWithRebootsTests::CreateDropRecreate[PipeResets] [GOOD] >> TCdcStreamWithRebootsTests::DropStreamOnIndexTableCreatedWithInitialScan[TabletReboots] [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_cdc_stream_reboots/unittest >> TCdcStreamWithRebootsTests::RacySplitAndDropTable[PipeResets] [GOOD] Test command err: =========== RUN: Trace =========== Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2140] Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:116:2058] recipient: [1:110:2141] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:116:2058] recipient: [1:110:2141] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:117:2058] recipient: [1:111:2142] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:117:2058] recipient: [1:111:2142] Leader for TabletID 72057594046678944 is [1:126:2151] sender: [1:127:2058] recipient: [1:109:2140] Leader for TabletID 72057594046447617 is [1:130:2154] sender: [1:132:2058] recipient: [1:110:2141] Leader for TabletID 72057594046316545 is [1:133:2155] sender: [1:135:2058] recipient: [1:111:2142] 2025-05-29T15:32:17.002558Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7488: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-05-29T15:32:17.002573Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7516: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:32:17.002577Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7402: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2025-05-29T15:32:17.002580Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7418: OperationsProcessing config: using default configuration 2025-05-29T15:32:17.002589Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-05-29T15:32:17.002591Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-05-29T15:32:17.002597Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7548: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:32:17.002607Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:39: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-05-29T15:32:17.002674Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7619: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-05-29T15:32:17.002721Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-05-29T15:32:17.011644Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7651: Got new config: QueryServiceConfig { AllExternalDataSourcesAreAvailable: true } 2025-05-29T15:32:17.011657Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:32:17.011718Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7619: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# Leader for TabletID 72057594046447617 is [1:130:2154] sender: [1:175:2058] recipient: [1:15:2062] 2025-05-29T15:32:17.013303Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-05-29T15:32:17.013320Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-05-29T15:32:17.013335Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-05-29T15:32:17.015067Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-05-29T15:32:17.015112Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:445: Clear TempDirsState with owners number: 0 2025-05-29T15:32:17.015182Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1348: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-05-29T15:32:17.015339Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:32: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-05-29T15:32:17.015901Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:157: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:32:17.015931Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:84: [RootDataErasureManager] Stop 2025-05-29T15:32:17.016089Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:32:17.016095Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:32:17.016114Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-05-29T15:32:17.016118Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-29T15:32:17.016122Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:202: TTxServerlessStorageBilling.Complete 2025-05-29T15:32:17.016134Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6671: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:214:2058] recipient: [1:212:2212] IGNORE Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:214:2058] recipient: [1:212:2212] Leader for TabletID 72057594037968897 is [1:218:2216] sender: [1:219:2058] recipient: [1:212:2212] 2025-05-29T15:32:17.017121Z node 1 :HIVE INFO: tablet_helpers.cpp:1130: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:126:2151] sender: [1:239:2058] recipient: [1:15:2062] 2025-05-29T15:32:17.032790Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:372: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-05-29T15:32:17.032847Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:32:17.032890Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-05-29T15:32:17.032927Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:130: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-05-29T15:32:17.032937Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:32:17.033441Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:451: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-05-29T15:32:17.033461Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-05-29T15:32:17.033500Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:32:17.033507Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:313: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-05-29T15:32:17.033512Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:367: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-05-29T15:32:17.033517Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 2 -> 3 2025-05-29T15:32:17.033887Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:32:17.033897Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-05-29T15:32:17.033901Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 3 -> 128 2025-05-29T15:32:17.034155Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:32:17.034162Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:32:17.034167Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:32:17.034173Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1650: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-05-29T15:32:17.034805Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1719: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-05-29T15:32:17.035164Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:652: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-05-29T15:32:17.035188Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1751: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 Leader for TabletID 72057594046316545 is [1:133:2155] sender: [1:254:2058] recipient: [1:15:2062] FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-05-29T15:32:17.035338Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:676: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-05-29T15:32:17.035358Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:680: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 133 RawX2: 4294969451 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-05-29T15:32:17.035364Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:32:17.035405Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Ch ... FO: schemeshard__operation_drop_table.cpp:414: TDropTable TProposedDeletePart operationId: 1005:0 ProgressState, at schemeshard: 72057594046678944 2025-05-29T15:32:32.553858Z node 64 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove table for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 5 2025-05-29T15:32:32.553874Z node 64 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:906: Part operation is done id#1005:0 progress is 3/3 2025-05-29T15:32:32.553877Z node 64 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 1005 ready parts: 3/3 2025-05-29T15:32:32.553880Z node 64 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:906: Part operation is done id#1005:0 progress is 3/3 2025-05-29T15:32:32.553882Z node 64 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 1005 ready parts: 3/3 2025-05-29T15:32:32.553885Z node 64 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1606: TOperation IsReadyToNotify, TxId: 1005, ready parts: 3/3, is published: true 2025-05-29T15:32:32.553888Z node 64 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 1005 ready parts: 3/3 2025-05-29T15:32:32.553891Z node 64 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:973: Operation and all the parts is done, operation id: 1005:0 2025-05-29T15:32:32.553894Z node 64 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5174: RemoveTx for txid 1005:0 2025-05-29T15:32:32.553908Z node 64 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 4 2025-05-29T15:32:32.553911Z node 64 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:973: Operation and all the parts is done, operation id: 1005:1 2025-05-29T15:32:32.553913Z node 64 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5174: RemoveTx for txid 1005:1 2025-05-29T15:32:32.553916Z node 64 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 2 2025-05-29T15:32:32.553919Z node 64 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:973: Operation and all the parts is done, operation id: 1005:2 2025-05-29T15:32:32.553921Z node 64 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5174: RemoveTx for txid 1005:2 2025-05-29T15:32:32.553925Z node 64 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 1 2025-05-29T15:32:32.553989Z node 64 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1005 2025-05-29T15:32:32.554025Z node 64 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:123: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-05-29T15:32:32.554029Z node 64 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:137: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 5], at schemeshard: 72057594046678944 2025-05-29T15:32:32.554034Z node 64 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 1 2025-05-29T15:32:32.554038Z node 64 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:137: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 4], at schemeshard: 72057594046678944 2025-05-29T15:32:32.554041Z node 64 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 3 2025-05-29T15:32:32.554083Z node 64 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1005 2025-05-29T15:32:32.554791Z node 64 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:160: TTxCleanDroppedPaths Complete, done PersistRemovePath for 2 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 2025-05-29T15:32:32.555147Z node 64 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5549: Handle TEvStateChanged, at schemeshard: 72057594046678944, message: Source { RawX1: 698 RawX2: 274877909553 } TabletId: 72075186233409549 State: 4 2025-05-29T15:32:32.555160Z node 64 :FLAT_TX_SCHEMESHARD INFO: schemeshard__state_changed_reply.cpp:78: TTxShardStateChanged DoExecute, datashard informs about state changing, datashardId: 72075186233409549, state: Offline, at schemeshard: 72057594046678944 2025-05-29T15:32:32.555200Z node 64 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5549: Handle TEvStateChanged, at schemeshard: 72057594046678944, message: Source { RawX1: 701 RawX2: 274877909554 } TabletId: 72075186233409550 State: 4 2025-05-29T15:32:32.555205Z node 64 :FLAT_TX_SCHEMESHARD INFO: schemeshard__state_changed_reply.cpp:78: TTxShardStateChanged DoExecute, datashard informs about state changing, datashardId: 72075186233409550, state: Offline, at schemeshard: 72057594046678944 2025-05-29T15:32:32.555537Z node 64 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:46: Free shard 72057594046678944:4 hive 72057594037968897 at ss 72057594046678944 2025-05-29T15:32:32.555585Z node 64 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:46: Free shard 72057594046678944:5 hive 72057594037968897 at ss 72057594046678944 2025-05-29T15:32:32.555603Z node 64 :HIVE INFO: tablet_helpers.cpp:1356: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 4 TxId_Deprecated: 4 TabletID: 72075186233409549 2025-05-29T15:32:32.555633Z node 64 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5938: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 4 ShardOwnerId: 72057594046678944 ShardLocalIdx: 4, at schemeshard: 72057594046678944 2025-05-29T15:32:32.555665Z node 64 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 2 Forgetting tablet 72075186233409549 2025-05-29T15:32:32.555951Z node 64 :HIVE INFO: tablet_helpers.cpp:1356: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 5 TxId_Deprecated: 5 TabletID: 72075186233409550 2025-05-29T15:32:32.556047Z node 64 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5938: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 5 ShardOwnerId: 72057594046678944 ShardLocalIdx: 5, at schemeshard: 72057594046678944 2025-05-29T15:32:32.556070Z node 64 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 1 Forgetting tablet 72075186233409550 2025-05-29T15:32:32.556323Z node 64 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:123: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-05-29T15:32:32.556329Z node 64 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:137: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 3], at schemeshard: 72057594046678944 2025-05-29T15:32:32.556335Z node 64 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-05-29T15:32:32.556645Z node 64 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:174: Deleted shardIdx 72057594046678944:4 2025-05-29T15:32:32.556654Z node 64 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:180: Close pipe to deleted shardIdx 72057594046678944:4 tabletId 72075186233409549 2025-05-29T15:32:32.556821Z node 64 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:174: Deleted shardIdx 72057594046678944:5 2025-05-29T15:32:32.556827Z node 64 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:180: Close pipe to deleted shardIdx 72057594046678944:5 tabletId 72075186233409550 2025-05-29T15:32:32.556848Z node 64 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:160: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestModificationResult got TxId: 1005, wait until txId: 1005 TestWaitNotification wait txId: 1004 2025-05-29T15:32:32.556883Z node 64 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:210: tests -- TTxNotificationSubscriber for txId 1004: send EvNotifyTxCompletion 2025-05-29T15:32:32.556888Z node 64 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:256: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 1004 TestWaitNotification wait txId: 1005 2025-05-29T15:32:32.556897Z node 64 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:210: tests -- TTxNotificationSubscriber for txId 1005: send EvNotifyTxCompletion 2025-05-29T15:32:32.556900Z node 64 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:256: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 1005 2025-05-29T15:32:32.556937Z node 64 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 1004, at schemeshard: 72057594046678944 2025-05-29T15:32:32.556951Z node 64 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:227: tests -- TTxNotificationSubscriber for txId 1004: got EvNotifyTxCompletionResult 2025-05-29T15:32:32.556955Z node 64 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:236: tests -- TTxNotificationSubscriber for txId 1004: satisfy waiter [64:1000:2885] 2025-05-29T15:32:32.556969Z node 64 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 1005, at schemeshard: 72057594046678944 2025-05-29T15:32:32.556976Z node 64 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:227: tests -- TTxNotificationSubscriber for txId 1005: got EvNotifyTxCompletionResult 2025-05-29T15:32:32.556978Z node 64 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:236: tests -- TTxNotificationSubscriber for txId 1005: satisfy waiter [64:1000:2885] TestWaitNotification: OK eventTxId 1004 TestWaitNotification: OK eventTxId 1005 2025-05-29T15:32:32.557024Z node 64 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Table/Stream" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: true }, at schemeshard: 72057594046678944 2025-05-29T15:32:32.557042Z node 64 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/Table/Stream" took 25us result status StatusPathDoesNotExist 2025-05-29T15:32:32.557065Z node 64 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/Table/Stream\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1]), source_location: ydb/core/tx/schemeshard/schemeshard_path_describer.cpp:1157" Path: "/MyRoot/Table/Stream" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: true } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_cdc_stream_reboots/unittest >> TCdcStreamWithRebootsTests::DropStreamOnIndexTableCreatedWithInitialScan[TabletReboots] [GOOD] Test command err: =========== RUN: Trace =========== Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2140] Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:116:2058] recipient: [1:110:2141] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:116:2058] recipient: [1:110:2141] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:117:2058] recipient: [1:111:2142] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:117:2058] recipient: [1:111:2142] Leader for TabletID 72057594046678944 is [1:126:2151] sender: [1:127:2058] recipient: [1:109:2140] Leader for TabletID 72057594046447617 is [1:130:2154] sender: [1:132:2058] recipient: [1:110:2141] Leader for TabletID 72057594046316545 is [1:133:2155] sender: [1:135:2058] recipient: [1:111:2142] 2025-05-29T15:32:11.963849Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7488: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-05-29T15:32:11.963868Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7516: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:32:11.963873Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7402: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2025-05-29T15:32:11.963878Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7418: OperationsProcessing config: using default configuration 2025-05-29T15:32:11.963889Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-05-29T15:32:11.963893Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-05-29T15:32:11.963900Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7548: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:32:11.963910Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:39: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-05-29T15:32:11.963989Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7619: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-05-29T15:32:11.964058Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-05-29T15:32:11.977227Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7651: Got new config: QueryServiceConfig { AllExternalDataSourcesAreAvailable: true } 2025-05-29T15:32:11.977243Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:32:11.977327Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7619: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# Leader for TabletID 72057594046447617 is [1:130:2154] sender: [1:175:2058] recipient: [1:15:2062] 2025-05-29T15:32:11.979767Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-05-29T15:32:11.979788Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-05-29T15:32:11.979808Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-05-29T15:32:11.982385Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-05-29T15:32:11.982463Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:445: Clear TempDirsState with owners number: 0 2025-05-29T15:32:11.982558Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1348: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-05-29T15:32:11.982775Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:32: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-05-29T15:32:11.983379Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:157: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:32:11.983411Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:84: [RootDataErasureManager] Stop 2025-05-29T15:32:11.983599Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:32:11.983611Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:32:11.983634Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-05-29T15:32:11.983641Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-29T15:32:11.983651Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:202: TTxServerlessStorageBilling.Complete 2025-05-29T15:32:11.983668Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6671: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:214:2058] recipient: [1:212:2212] IGNORE Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:214:2058] recipient: [1:212:2212] Leader for TabletID 72057594037968897 is [1:218:2216] sender: [1:219:2058] recipient: [1:212:2212] 2025-05-29T15:32:11.984720Z node 1 :HIVE INFO: tablet_helpers.cpp:1130: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:126:2151] sender: [1:239:2058] recipient: [1:15:2062] 2025-05-29T15:32:12.004378Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:372: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-05-29T15:32:12.004426Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:32:12.004464Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-05-29T15:32:12.004492Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:130: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-05-29T15:32:12.004498Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:32:12.004972Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:451: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-05-29T15:32:12.004993Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-05-29T15:32:12.005036Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:32:12.005045Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:313: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-05-29T15:32:12.005050Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:367: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-05-29T15:32:12.005055Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 2 -> 3 2025-05-29T15:32:12.005427Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:32:12.005439Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-05-29T15:32:12.005445Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 3 -> 128 2025-05-29T15:32:12.005759Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:32:12.005768Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:32:12.005771Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:32:12.005776Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1650: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-05-29T15:32:12.006426Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1719: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-05-29T15:32:12.006799Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:652: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-05-29T15:32:12.006830Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1751: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 Leader for TabletID 72057594046316545 is [1:133:2155] sender: [1:254:2058] recipient: [1:15:2062] FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-05-29T15:32:12.006995Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:676: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-05-29T15:32:12.007018Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:680: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 133 RawX2: 4294969451 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-05-29T15:32:12.007026Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:32:12.007073Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Ch ... 678944, txId: 1004 2025-05-29T15:32:32.925334Z node 85 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 1004, pathId: [OwnerId: 72057594046678944, LocalPathId: 4], version: 6 2025-05-29T15:32:32.925336Z node 85 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 4 2025-05-29T15:32:32.925372Z node 85 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:5834: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 6 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 1004 2025-05-29T15:32:32.925391Z node 85 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 5 PathOwnerId: 72057594046678944, cookie: 1004 2025-05-29T15:32:32.925394Z node 85 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72057594046678944, txId: 1004 2025-05-29T15:32:32.925399Z node 85 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 1004, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 5 2025-05-29T15:32:32.925401Z node 85 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 4 2025-05-29T15:32:32.925406Z node 85 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1606: TOperation IsReadyToNotify, TxId: 1004, ready parts: 4/5, is published: true 2025-05-29T15:32:32.925443Z node 85 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 6 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 1004 2025-05-29T15:32:32.925446Z node 85 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72057594046678944, txId: 1004 2025-05-29T15:32:32.925448Z node 85 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1606: TOperation IsReadyToNotify, TxId: 1004, ready parts: 4/5, is published: true 2025-05-29T15:32:32.926013Z node 85 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:647: TTxOperationReply complete, operationId: 1004:0, at schemeshard: 72057594046678944 2025-05-29T15:32:32.926045Z node 85 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:174: Deleted shardIdx 72057594046678944:3 2025-05-29T15:32:32.926059Z node 85 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:174: Deleted shardIdx 72057594046678944:4 2025-05-29T15:32:32.926077Z node 85 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:647: TTxOperationReply complete, operationId: 1004:0, at schemeshard: 72057594046678944 2025-05-29T15:32:32.926148Z node 85 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1004:0, at schemeshard: 72057594046678944 2025-05-29T15:32:32.926153Z node 85 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:482: [72057594046678944] TDone opId# 1004:0 ProgressState 2025-05-29T15:32:32.926163Z node 85 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:906: Part operation is done id#1004:0 progress is 5/5 2025-05-29T15:32:32.926166Z node 85 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 1004 ready parts: 5/5 2025-05-29T15:32:32.926169Z node 85 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:906: Part operation is done id#1004:0 progress is 5/5 2025-05-29T15:32:32.926172Z node 85 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 1004 ready parts: 5/5 2025-05-29T15:32:32.926175Z node 85 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1606: TOperation IsReadyToNotify, TxId: 1004, ready parts: 5/5, is published: true 2025-05-29T15:32:32.926178Z node 85 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 1004 ready parts: 5/5 2025-05-29T15:32:32.926182Z node 85 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:973: Operation and all the parts is done, operation id: 1004:0 2025-05-29T15:32:32.926185Z node 85 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5174: RemoveTx for txid 1004:0 2025-05-29T15:32:32.926202Z node 85 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 5 2025-05-29T15:32:32.926206Z node 85 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:973: Operation and all the parts is done, operation id: 1004:1 2025-05-29T15:32:32.926212Z node 85 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5174: RemoveTx for txid 1004:1 2025-05-29T15:32:32.926215Z node 85 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 4 2025-05-29T15:32:32.926218Z node 85 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:973: Operation and all the parts is done, operation id: 1004:2 2025-05-29T15:32:32.926220Z node 85 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5174: RemoveTx for txid 1004:2 2025-05-29T15:32:32.926224Z node 85 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 3 2025-05-29T15:32:32.926226Z node 85 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:973: Operation and all the parts is done, operation id: 1004:3 2025-05-29T15:32:32.926228Z node 85 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5174: RemoveTx for txid 1004:3 2025-05-29T15:32:32.926232Z node 85 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 6] was 2 2025-05-29T15:32:32.926235Z node 85 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:973: Operation and all the parts is done, operation id: 1004:4 2025-05-29T15:32:32.926237Z node 85 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5174: RemoveTx for txid 1004:4 2025-05-29T15:32:32.926243Z node 85 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 7] was 1 2025-05-29T15:32:32.926301Z node 85 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:123: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-05-29T15:32:32.926305Z node 85 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:137: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 7], at schemeshard: 72057594046678944 2025-05-29T15:32:32.926313Z node 85 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 6] was 1 2025-05-29T15:32:32.926317Z node 85 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:137: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 6], at schemeshard: 72057594046678944 2025-05-29T15:32:32.926320Z node 85 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 3 2025-05-29T15:32:32.926823Z node 85 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1004 2025-05-29T15:32:32.926843Z node 85 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1004 2025-05-29T15:32:32.926852Z node 85 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1004 2025-05-29T15:32:32.926860Z node 85 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1004 2025-05-29T15:32:32.926869Z node 85 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1004 2025-05-29T15:32:32.926873Z node 85 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1004 2025-05-29T15:32:32.927206Z node 85 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:160: TTxCleanDroppedPaths Complete, done PersistRemovePath for 2 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestModificationResult got TxId: 1004, wait until txId: 1004 TestWaitNotification wait txId: 1004 2025-05-29T15:32:32.927260Z node 85 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:210: tests -- TTxNotificationSubscriber for txId 1004: send EvNotifyTxCompletion 2025-05-29T15:32:32.927264Z node 85 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:256: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 1004 2025-05-29T15:32:32.927303Z node 85 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 1004, at schemeshard: 72057594046678944 2025-05-29T15:32:32.927315Z node 85 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:227: tests -- TTxNotificationSubscriber for txId 1004: got EvNotifyTxCompletionResult 2025-05-29T15:32:32.927318Z node 85 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:236: tests -- TTxNotificationSubscriber for txId 1004: satisfy waiter [85:819:2723] TestWaitNotification: OK eventTxId 1004 2025-05-29T15:32:32.927366Z node 85 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Table/Index/indexImplTable/Stream" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: true }, at schemeshard: 72057594046678944 2025-05-29T15:32:32.927391Z node 85 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/Table/Index/indexImplTable/Stream" took 33us result status StatusPathDoesNotExist 2025-05-29T15:32:32.927417Z node 85 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/Table/Index/indexImplTable/Stream\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot/Table/Index/indexImplTable\' (id: [OwnerId: 72057594046678944, LocalPathId: 5]), source_location: ydb/core/tx/schemeshard/schemeshard_path_describer.cpp:1157" Path: "/MyRoot/Table/Index/indexImplTable/Stream" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot/Table/Index/indexImplTable" LastExistedPrefixPathId: 5 LastExistedPrefixDescription { Self { Name: "indexImplTable" PathId: 5 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 1002 CreateStep: 5000003 ParentPathId: 4 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeSyncIndexImplTable ChildrenExist: false } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_cdc_stream_reboots/unittest >> TCdcStreamWithRebootsTests::CreateDropRecreate[PipeResets] [GOOD] Test command err: =========== RUN: Trace =========== Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2140] Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:116:2058] recipient: [1:110:2141] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:116:2058] recipient: [1:110:2141] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:117:2058] recipient: [1:111:2142] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:117:2058] recipient: [1:111:2142] Leader for TabletID 72057594046678944 is [1:126:2151] sender: [1:127:2058] recipient: [1:109:2140] Leader for TabletID 72057594046447617 is [1:130:2154] sender: [1:132:2058] recipient: [1:110:2141] Leader for TabletID 72057594046316545 is [1:133:2155] sender: [1:135:2058] recipient: [1:111:2142] 2025-05-29T15:32:15.824661Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7488: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-05-29T15:32:15.824678Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7516: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:32:15.824682Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7402: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2025-05-29T15:32:15.824685Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7418: OperationsProcessing config: using default configuration 2025-05-29T15:32:15.824694Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-05-29T15:32:15.824697Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-05-29T15:32:15.824703Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7548: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:32:15.824713Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:39: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-05-29T15:32:15.824782Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7619: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-05-29T15:32:15.824830Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-05-29T15:32:15.833890Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7651: Got new config: QueryServiceConfig { AllExternalDataSourcesAreAvailable: true } 2025-05-29T15:32:15.833905Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:32:15.833973Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7619: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# Leader for TabletID 72057594046447617 is [1:130:2154] sender: [1:175:2058] recipient: [1:15:2062] 2025-05-29T15:32:15.835851Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-05-29T15:32:15.835870Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-05-29T15:32:15.835886Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-05-29T15:32:15.837790Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-05-29T15:32:15.837837Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:445: Clear TempDirsState with owners number: 0 2025-05-29T15:32:15.837914Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1348: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-05-29T15:32:15.838038Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:32: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-05-29T15:32:15.838499Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:157: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:32:15.838525Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:84: [RootDataErasureManager] Stop 2025-05-29T15:32:15.838685Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:32:15.838691Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:32:15.838712Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-05-29T15:32:15.838717Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-29T15:32:15.838720Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:202: TTxServerlessStorageBilling.Complete 2025-05-29T15:32:15.838733Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6671: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:214:2058] recipient: [1:212:2212] IGNORE Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:214:2058] recipient: [1:212:2212] Leader for TabletID 72057594037968897 is [1:218:2216] sender: [1:219:2058] recipient: [1:212:2212] 2025-05-29T15:32:15.839763Z node 1 :HIVE INFO: tablet_helpers.cpp:1130: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:126:2151] sender: [1:239:2058] recipient: [1:15:2062] 2025-05-29T15:32:15.852711Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:372: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-05-29T15:32:15.852761Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:32:15.852796Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-05-29T15:32:15.852825Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:130: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-05-29T15:32:15.852832Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:32:15.853377Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:451: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-05-29T15:32:15.853401Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-05-29T15:32:15.853444Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:32:15.853453Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:313: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-05-29T15:32:15.853458Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:367: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-05-29T15:32:15.853463Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 2 -> 3 2025-05-29T15:32:15.853912Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:32:15.853926Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-05-29T15:32:15.853934Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 3 -> 128 2025-05-29T15:32:15.854286Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:32:15.854296Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:32:15.854301Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:32:15.854307Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1650: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-05-29T15:32:15.854987Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1719: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-05-29T15:32:15.855502Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:652: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-05-29T15:32:15.855533Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1751: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 Leader for TabletID 72057594046316545 is [1:133:2155] sender: [1:254:2058] recipient: [1:15:2062] FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-05-29T15:32:15.855696Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:676: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-05-29T15:32:15.855721Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:680: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 133 RawX2: 4294969451 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-05-29T15:32:15.855728Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:32:15.855788Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Ch ... eshard__operation_side_effects.cpp:973: Operation and all the parts is done, operation id: 1005:0 2025-05-29T15:32:32.990088Z node 70 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5174: RemoveTx for txid 1005:0 2025-05-29T15:32:32.990094Z node 70 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 6] was 4 2025-05-29T15:32:32.990097Z node 70 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:973: Operation and all the parts is done, operation id: 1005:1 2025-05-29T15:32:32.990099Z node 70 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5174: RemoveTx for txid 1005:1 2025-05-29T15:32:32.990110Z node 70 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 4 2025-05-29T15:32:32.990112Z node 70 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:973: Operation and all the parts is done, operation id: 1005:2 2025-05-29T15:32:32.990114Z node 70 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5174: RemoveTx for txid 1005:2 2025-05-29T15:32:32.990122Z node 70 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 7] was 5 2025-05-29T15:32:32.990124Z node 70 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:982: Publication still in progress, tx: 1005, publications: 2, subscribers: 0 2025-05-29T15:32:32.990127Z node 70 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:989: Publication details: tx: 1005, [OwnerId: 72057594046678944, LocalPathId: 6], 4 2025-05-29T15:32:32.990129Z node 70 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:989: Publication details: tx: 1005, [OwnerId: 72057594046678944, LocalPathId: 7], 2 2025-05-29T15:32:32.990233Z node 70 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4877: StateWork, received event# 274137603, Sender [70:207:2208], Recipient [70:126:2151]: NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 6] Version: 4 } 2025-05-29T15:32:32.990238Z node 70 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4961: StateWork, processing event NSchemeBoard::NSchemeshardEvents::TEvUpdateAck 2025-05-29T15:32:32.990248Z node 70 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:5834: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 6 Version: 4 PathOwnerId: 72057594046678944, cookie: 1005 2025-05-29T15:32:32.990254Z node 70 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 6 Version: 4 PathOwnerId: 72057594046678944, cookie: 1005 2025-05-29T15:32:32.990257Z node 70 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 1005 2025-05-29T15:32:32.990260Z node 70 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 1005, pathId: [OwnerId: 72057594046678944, LocalPathId: 6], version: 4 2025-05-29T15:32:32.990262Z node 70 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 6] was 3 2025-05-29T15:32:32.990271Z node 70 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:165: TSideEffects ApplyOnExecute at tablet# 72057594046678944 2025-05-29T15:32:32.990573Z node 70 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4877: StateWork, received event# 274137603, Sender [70:207:2208], Recipient [70:126:2151]: NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 7] Version: 2 } 2025-05-29T15:32:32.990580Z node 70 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4961: StateWork, processing event NSchemeBoard::NSchemeshardEvents::TEvUpdateAck 2025-05-29T15:32:32.990588Z node 70 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:5834: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 7 Version: 2 PathOwnerId: 72057594046678944, cookie: 1005 2025-05-29T15:32:32.990596Z node 70 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 7 Version: 2 PathOwnerId: 72057594046678944, cookie: 1005 2025-05-29T15:32:32.990598Z node 70 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1005 2025-05-29T15:32:32.990601Z node 70 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 1005, pathId: [OwnerId: 72057594046678944, LocalPathId: 7], version: 2 2025-05-29T15:32:32.990606Z node 70 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 7] was 4 2025-05-29T15:32:32.990617Z node 70 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 1005, subscribers: 0 2025-05-29T15:32:32.990620Z node 70 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:165: TSideEffects ApplyOnExecute at tablet# 72057594046678944 2025-05-29T15:32:32.990917Z node 70 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:207: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2025-05-29T15:32:32.991400Z node 70 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1005 2025-05-29T15:32:32.991412Z node 70 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:207: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2025-05-29T15:32:32.991467Z node 70 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1005 2025-05-29T15:32:32.991471Z node 70 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:207: TSideEffects ApplyOnComplete at tablet# 72057594046678944 TestWaitNotification wait txId: 1005 2025-05-29T15:32:32.993095Z node 70 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:210: tests -- TTxNotificationSubscriber for txId 1005: send EvNotifyTxCompletion 2025-05-29T15:32:32.993105Z node 70 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:256: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 1005 2025-05-29T15:32:32.993158Z node 70 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4877: StateWork, received event# 269877761, Sender [70:983:2856], Recipient [70:126:2151]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-05-29T15:32:32.993165Z node 70 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4974: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-05-29T15:32:32.993169Z node 70 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5753: Pipe server connected, at tablet: 72057594046678944 2025-05-29T15:32:32.993191Z node 70 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4877: StateWork, received event# 271124996, Sender [70:407:2379], Recipient [70:126:2151]: NKikimrScheme.TEvNotifyTxCompletion TxId: 1005 2025-05-29T15:32:32.993196Z node 70 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4890: StateWork, processing event TEvSchemeShard::TEvNotifyTxCompletion 2025-05-29T15:32:32.993207Z node 70 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 1005, at schemeshard: 72057594046678944 2025-05-29T15:32:32.993223Z node 70 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:227: tests -- TTxNotificationSubscriber for txId 1005: got EvNotifyTxCompletionResult 2025-05-29T15:32:32.993228Z node 70 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:236: tests -- TTxNotificationSubscriber for txId 1005: satisfy waiter [70:981:2854] 2025-05-29T15:32:32.993250Z node 70 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4877: StateWork, received event# 269877764, Sender [70:983:2856], Recipient [70:126:2151]: NKikimr::TEvTabletPipe::TEvServerDisconnected 2025-05-29T15:32:32.993255Z node 70 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4975: StateWork, processing event TEvTabletPipe::TEvServerDisconnected 2025-05-29T15:32:32.993259Z node 70 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5801: Server pipe is reset, at schemeshard: 72057594046678944 TestWaitNotification: OK eventTxId 1005 2025-05-29T15:32:32.993313Z node 70 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4877: StateWork, received event# 271122945, Sender [70:984:2857], Recipient [70:126:2151]: NKikimrSchemeOp.TDescribePath Path: "/MyRoot/Table/Stream" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: true } 2025-05-29T15:32:32.993321Z node 70 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4889: StateWork, processing event TEvSchemeShard::TEvDescribeScheme 2025-05-29T15:32:32.993332Z node 70 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Table/Stream" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: true }, at schemeshard: 72057594046678944 2025-05-29T15:32:32.993367Z node 70 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/Table/Stream" took 33us result status StatusSuccess 2025-05-29T15:32:32.993462Z node 70 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Table/Stream" PathDescription { Self { Name: "Stream" PathId: 6 SchemeshardId: 72057594046678944 PathType: EPathTypeCdcStream CreateFinished: true CreateTxId: 1005 CreateStep: 5000006 ParentPathId: 3 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 4 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 4 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 2 CdcStreamVersion: 1 } ChildrenExist: true } Children { Name: "streamImpl" PathId: 7 SchemeshardId: 72057594046678944 PathType: EPathTypePersQueueGroup CreateFinished: true CreateTxId: 1005 CreateStep: 5000006 ParentPathId: 6 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" PathSubType: EPathSubTypeStreamImpl ChildrenExist: false BalancerTabletID: 72075186233409550 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 4 PathsLimit: 10000 ShardsInside: 3 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 1 PQPartitionsLimit: 1000000 } CdcStreamDescription { Name: "Stream" Mode: ECdcStreamModeKeysOnly PathId { OwnerId: 72057594046678944 LocalId: 6 } State: ECdcStreamStateReady SchemaVersion: 1 Format: ECdcStreamFormatProto VirtualTimestamps: false AwsRegion: "" ResolvedTimestampsIntervalMs: 0 } } PathId: 6 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> KqpSystemView::FailNavigate >> KqpSystemView::QueryStatsSimple >> KqpSysColV1::SelectRange >> KqpSysColV1::InnerJoinSelectAsterisk |76.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/sysview/unittest >> KqpOlapIndexes::IndexesInLocalMetadata [GOOD] >> TCdcStreamWithRebootsTests::MergeTable[PipeResets] [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/olap/unittest >> KqpOlapIndexes::IndexesInLocalMetadata [GOOD] Test command err: Trying to start YDB, gRPC: 29012, MsgBus: 9936 2025-05-29T15:30:40.727172Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509890507048045139:2201];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:30:40.727268Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/002684/r3tmp/tmpDPxGIo/pdisk_1.dat 2025-05-29T15:30:41.057570Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [1:7509890507048044973:2079] 1748532640721682 != 1748532640721685 2025-05-29T15:30:41.058026Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:30:41.092109Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:30:41.092139Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:30:41.093286Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 29012, node 1 2025-05-29T15:30:41.380234Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:30:41.380248Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-05-29T15:30:41.380252Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-05-29T15:30:41.380308Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:9936 TClient is connected to server localhost:9936 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-29T15:30:41.982130Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:30:42.070671Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnStore, opId: 281474976710658:0, at schemeshard: 72057594046644480 Status: 53 TxId: 281474976710658 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 2 2025-05-29T15:30:42.147698Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037890;self_id=[1:7509890515637980321:2327];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-05-29T15:30:42.157114Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;self_id=[1:7509890515637980320:2326];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-05-29T15:30:42.157182Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;self_id=[1:7509890515637980320:2326];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-05-29T15:30:42.157253Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;self_id=[1:7509890515637980320:2326];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-05-29T15:30:42.157279Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;self_id=[1:7509890515637980320:2326];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-05-29T15:30:42.157298Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;self_id=[1:7509890515637980320:2326];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-05-29T15:30:42.157317Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;self_id=[1:7509890515637980320:2326];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-05-29T15:30:42.157339Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;self_id=[1:7509890515637980320:2326];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-05-29T15:30:42.157362Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;self_id=[1:7509890515637980320:2326];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-05-29T15:30:42.157381Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;self_id=[1:7509890515637980320:2326];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-05-29T15:30:42.157402Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;self_id=[1:7509890515637980320:2326];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-05-29T15:30:42.157420Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;self_id=[1:7509890515637980320:2326];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2025-05-29T15:30:42.157441Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;self_id=[1:7509890515637980320:2326];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2025-05-29T15:30:42.163243Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037890;self_id=[1:7509890515637980321:2327];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-05-29T15:30:42.163335Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037890;self_id=[1:7509890515637980321:2327];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-05-29T15:30:42.163355Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037890;self_id=[1:7509890515637980321:2327];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-05-29T15:30:42.163376Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037890;self_id=[1:7509890515637980321:2327];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-05-29T15:30:42.163395Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037890;self_id=[1:7509890515637980321:2327];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-05-29T15:30:42.163414Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037890;self_id=[1:7509890515637980321:2327];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-05-29T15:30:42.163442Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037890;self_id=[1:7509890515637980321:2327];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-05-29T15:30:42.163463Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037890;self_id=[1:7509890515637980321:2327];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-05-29T15:30:42.163481Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037890;self_id=[1:7509890515637980321:2327];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-05-29T15:30:42.163501Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037890;self_id=[1:7509890515637980321:2327];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2025-05-29T15:30:42.163533Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037890;self_id=[1:7509890515637980321:2327];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2025-05-29T15:30:42.164383Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-05-29T15:30:42.164395Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-05-29T15:30:42.164408Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-05-29T15:30:42.164413Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-05-29T15:30:42.164434Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-05-29T15:30:42.164439Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-05-29T15:30:42.164453Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute; ... [1u]];expected=[[1u;]]; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=indexes_ut.cpp:383;skip=0;check=816;no_data=0; 2025-05-29T15:32:31.948263Z node 1 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:238: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1748532751826, txId: 18446744073709551615] shutting down FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=indexes_ut.cpp:381;result=[[1u]];expected=[[1u;]]; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=indexes_ut.cpp:383;skip=0;check=819;no_data=0; 2025-05-29T15:32:32.048653Z node 1 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:238: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1748532752000, txId: 18446744073709551615] shutting down FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=indexes_ut.cpp:381;result=[[1u]];expected=[[1u;]]; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=indexes_ut.cpp:383;skip=0;check=822;no_data=0; 2025-05-29T15:32:32.138165Z node 1 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:238: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1748532752029, txId: 18446744073709551615] shutting down FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=indexes_ut.cpp:381;result=[[1u]];expected=[[1u;]]; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=indexes_ut.cpp:383;skip=0;check=825;no_data=0; 2025-05-29T15:32:32.236887Z node 1 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:238: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1748532752127, txId: 18446744073709551615] shutting down FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=indexes_ut.cpp:381;result=[[1u]];expected=[[1u;]]; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=indexes_ut.cpp:383;skip=0;check=828;no_data=0; 2025-05-29T15:32:32.345806Z node 1 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:238: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1748532752232, txId: 18446744073709551615] shutting down FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=indexes_ut.cpp:381;result=[[1u]];expected=[[1u;]]; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=indexes_ut.cpp:383;skip=0;check=831;no_data=0; 2025-05-29T15:32:32.438376Z node 1 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:238: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1748532752316, txId: 18446744073709551615] shutting down FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=indexes_ut.cpp:381;result=[[1u]];expected=[[1u;]]; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=indexes_ut.cpp:383;skip=0;check=834;no_data=0; 2025-05-29T15:32:32.531942Z node 1 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:238: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1748532752428, txId: 18446744073709551615] shutting down FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=indexes_ut.cpp:381;result=[[1u]];expected=[[1u;]]; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=indexes_ut.cpp:383;skip=0;check=837;no_data=0; 2025-05-29T15:32:32.622459Z node 1 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:238: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1748532752519, txId: 18446744073709551615] shutting down FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=indexes_ut.cpp:381;result=[[1u]];expected=[[1u;]]; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=indexes_ut.cpp:383;skip=0;check=840;no_data=0; 2025-05-29T15:32:32.714105Z node 1 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:238: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1748532752610, txId: 18446744073709551615] shutting down FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=indexes_ut.cpp:381;result=[[1u]];expected=[[1u;]]; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=indexes_ut.cpp:383;skip=0;check=843;no_data=0; 2025-05-29T15:32:32.805112Z node 1 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:238: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1748532752701, txId: 18446744073709551615] shutting down FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=indexes_ut.cpp:381;result=[[1u]];expected=[[1u;]]; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=indexes_ut.cpp:383;skip=0;check=846;no_data=0; 2025-05-29T15:32:32.896396Z node 1 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:238: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1748532752792, txId: 18446744073709551615] shutting down FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=indexes_ut.cpp:381;result=[[1u]];expected=[[1u;]]; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=indexes_ut.cpp:383;skip=0;check=849;no_data=0; 2025-05-29T15:32:33.010603Z node 1 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:238: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1748532752883, txId: 18446744073709551615] shutting down FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=indexes_ut.cpp:381;result=[[1u]];expected=[[1u;]]; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=indexes_ut.cpp:383;skip=0;check=852;no_data=0; 2025-05-29T15:32:33.098653Z node 1 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:238: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1748532753000, txId: 18446744073709551615] shutting down FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=indexes_ut.cpp:381;result=[[1u]];expected=[[1u;]]; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=indexes_ut.cpp:383;skip=0;check=855;no_data=0; 2025-05-29T15:32:33.200518Z node 1 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:238: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1748532753086, txId: 18446744073709551615] shutting down FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=indexes_ut.cpp:381;result=[[1u]];expected=[[1u;]]; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=indexes_ut.cpp:383;skip=0;check=858;no_data=0; 2025-05-29T15:32:33.296446Z node 1 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:238: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1748532753191, txId: 18446744073709551615] shutting down FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=indexes_ut.cpp:381;result=[[1u]];expected=[[1u;]]; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=indexes_ut.cpp:383;skip=0;check=861;no_data=0; 2025-05-29T15:32:33.391992Z node 1 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:238: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1748532753289, txId: 18446744073709551615] shutting down FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=indexes_ut.cpp:381;result=[[1u]];expected=[[1u;]]; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=indexes_ut.cpp:383;skip=0;check=864;no_data=0; 2025-05-29T15:32:33.483403Z node 1 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:238: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1748532753373, txId: 18446744073709551615] shutting down FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=indexes_ut.cpp:381;result=[[1u]];expected=[[1u;]]; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=indexes_ut.cpp:383;skip=0;check=867;no_data=0; 2025-05-29T15:32:33.578211Z node 1 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:238: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1748532753471, txId: 18446744073709551615] shutting down FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=indexes_ut.cpp:381;result=[[1u]];expected=[[1u;]]; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=indexes_ut.cpp:383;skip=0;check=870;no_data=0; 2025-05-29T15:32:33.672967Z node 1 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:238: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1748532753569, txId: 18446744073709551615] shutting down FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=indexes_ut.cpp:381;result=[[1u]];expected=[[1u;]]; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=indexes_ut.cpp:383;skip=0;check=873;no_data=0; 2025-05-29T15:32:33.778859Z node 1 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:238: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1748532753660, txId: 18446744073709551615] shutting down FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=indexes_ut.cpp:381;result=[[1u]];expected=[[1u;]]; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=indexes_ut.cpp:383;skip=0;check=876;no_data=0; 2025-05-29T15:32:33.875902Z node 1 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:238: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1748532753758, txId: 18446744073709551615] shutting down FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=indexes_ut.cpp:381;result=[[1u]];expected=[[1u;]]; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=indexes_ut.cpp:383;skip=0;check=879;no_data=0; 2025-05-29T15:32:33.992857Z node 1 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:238: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1748532753856, txId: 18446744073709551615] shutting down FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=indexes_ut.cpp:381;result=[[1u]];expected=[[1u;]]; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=indexes_ut.cpp:383;skip=0;check=882;no_data=0; 2025-05-29T15:32:34.080439Z node 1 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:238: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1748532754000, txId: 18446744073709551615] shutting down FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=indexes_ut.cpp:381;result=[[1u]];expected=[[1u;]]; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=indexes_ut.cpp:383;skip=0;check=885;no_data=0; 2025-05-29T15:32:34.170710Z node 1 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:238: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1748532754073, txId: 18446744073709551615] shutting down FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=indexes_ut.cpp:381;result=[[1u]];expected=[[1u;]]; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=indexes_ut.cpp:383;skip=0;check=888;no_data=0; 2025-05-29T15:32:34.267632Z node 1 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:238: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1748532754157, txId: 18446744073709551615] shutting down FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=indexes_ut.cpp:381;result=[[1u]];expected=[[1u;]]; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=indexes_ut.cpp:383;skip=0;check=891;no_data=0; 2025-05-29T15:32:34.378547Z node 1 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:238: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1748532754255, txId: 18446744073709551615] shutting down FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=indexes_ut.cpp:381;result=[[1u]];expected=[[1u;]]; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=indexes_ut.cpp:383;skip=0;check=894;no_data=0; 2025-05-29T15:32:34.507265Z node 1 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:238: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1748532754360, txId: 18446744073709551615] shutting down FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=indexes_ut.cpp:381;result=[[1u]];expected=[[1u;]]; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=indexes_ut.cpp:383;skip=0;check=897;no_data=0; 2025-05-29T15:32:34.631297Z node 1 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:238: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1748532754472, txId: 18446744073709551615] shutting down FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=indexes_ut.cpp:381;result=[[1u]];expected=[[1u;]]; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=indexes_ut.cpp:383;skip=0;check=900;no_data=0; >> KqpSysColV1::StreamInnerJoinSelectAsterisk ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_cdc_stream_reboots/unittest >> TCdcStreamWithRebootsTests::MergeTable[PipeResets] [GOOD] Test command err: =========== RUN: Trace =========== Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2140] Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:116:2058] recipient: [1:110:2141] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:116:2058] recipient: [1:110:2141] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:117:2058] recipient: [1:111:2142] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:117:2058] recipient: [1:111:2142] Leader for TabletID 72057594046678944 is [1:126:2151] sender: [1:127:2058] recipient: [1:109:2140] Leader for TabletID 72057594046447617 is [1:130:2154] sender: [1:132:2058] recipient: [1:110:2141] Leader for TabletID 72057594046316545 is [1:133:2155] sender: [1:135:2058] recipient: [1:111:2142] 2025-05-29T15:32:10.512858Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7488: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-05-29T15:32:10.512878Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7516: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:32:10.512884Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7402: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2025-05-29T15:32:10.512889Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7418: OperationsProcessing config: using default configuration 2025-05-29T15:32:10.512900Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-05-29T15:32:10.512904Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-05-29T15:32:10.512913Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7548: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:32:10.512926Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:39: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-05-29T15:32:10.513017Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7619: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-05-29T15:32:10.513089Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-05-29T15:32:10.522427Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7651: Got new config: QueryServiceConfig { AllExternalDataSourcesAreAvailable: true } 2025-05-29T15:32:10.522443Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:32:10.522499Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7619: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# Leader for TabletID 72057594046447617 is [1:130:2154] sender: [1:175:2058] recipient: [1:15:2062] 2025-05-29T15:32:10.524767Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-05-29T15:32:10.524792Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-05-29T15:32:10.524815Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-05-29T15:32:10.527431Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-05-29T15:32:10.527505Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:445: Clear TempDirsState with owners number: 0 2025-05-29T15:32:10.527605Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1348: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-05-29T15:32:10.527777Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:32: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-05-29T15:32:10.528435Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:157: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:32:10.528465Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:84: [RootDataErasureManager] Stop 2025-05-29T15:32:10.528633Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:32:10.528639Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:32:10.528657Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-05-29T15:32:10.528662Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-29T15:32:10.528666Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:202: TTxServerlessStorageBilling.Complete 2025-05-29T15:32:10.528678Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6671: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:214:2058] recipient: [1:212:2212] IGNORE Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:214:2058] recipient: [1:212:2212] Leader for TabletID 72057594037968897 is [1:218:2216] sender: [1:219:2058] recipient: [1:212:2212] 2025-05-29T15:32:10.529761Z node 1 :HIVE INFO: tablet_helpers.cpp:1130: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:126:2151] sender: [1:239:2058] recipient: [1:15:2062] 2025-05-29T15:32:10.546179Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:372: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-05-29T15:32:10.546227Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:32:10.546262Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-05-29T15:32:10.546291Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:130: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-05-29T15:32:10.546298Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:32:10.546796Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:451: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-05-29T15:32:10.546810Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-05-29T15:32:10.546846Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:32:10.546852Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:313: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-05-29T15:32:10.546855Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:367: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-05-29T15:32:10.546858Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 2 -> 3 2025-05-29T15:32:10.547126Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:32:10.547132Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-05-29T15:32:10.547135Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 3 -> 128 2025-05-29T15:32:10.547339Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:32:10.547344Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:32:10.547347Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:32:10.547351Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1650: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-05-29T15:32:10.547746Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1719: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-05-29T15:32:10.548027Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:652: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-05-29T15:32:10.548049Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1751: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 Leader for TabletID 72057594046316545 is [1:133:2155] sender: [1:254:2058] recipient: [1:15:2062] FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-05-29T15:32:10.548166Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:676: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-05-29T15:32:10.548181Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:680: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 133 RawX2: 4294969451 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-05-29T15:32:10.548186Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:32:10.548218Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Ch ... Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Uint32" TypeId: 2 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { MinPartitionsCount: 1 } } TableSchemaVersion: 2 IsBackup: false CdcStreams { Name: "Stream" Mode: ECdcStreamModeKeysOnly PathId { OwnerId: 72057594046678944 LocalId: 4 } State: ECdcStreamStateReady SchemaVersion: 1 Format: ECdcStreamFormatProto VirtualTimestamps: false AwsRegion: "" ResolvedTimestampsIntervalMs: 0 } IsRestore: false } TablePartitions { EndOfRangeKeyPrefix: "" IsPoint: false IsInclusive: false DatashardId: 72075186233409550 } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 4 PathsLimit: 10000 ShardsInside: 3 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 2 PQPartitionsLimit: 1000000 } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-05-29T15:32:34.588344Z node 38 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Table" Options { ReturnPartitioningInfo: true ReturnPartitionConfig: true BackupInfo: false ReturnBoundaries: true ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-05-29T15:32:34.588386Z node 38 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/Table" took 50us result status StatusSuccess 2025-05-29T15:32:34.588532Z node 38 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Table" PathDescription { Self { Name: "Table" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 1002 CreateStep: 5000003 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 2 TablePartitionVersion: 2 } ChildrenExist: true } Table { Name: "Table" Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Uint32" TypeId: 2 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { MinPartitionsCount: 1 } } TableSchemaVersion: 2 IsBackup: false CdcStreams { Name: "Stream" Mode: ECdcStreamModeKeysOnly PathId { OwnerId: 72057594046678944 LocalId: 4 } State: ECdcStreamStateReady SchemaVersion: 1 Format: ECdcStreamFormatProto VirtualTimestamps: false AwsRegion: "" ResolvedTimestampsIntervalMs: 0 } IsRestore: false } TablePartitions { EndOfRangeKeyPrefix: "" IsPoint: false IsInclusive: false DatashardId: 72075186233409550 } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 4 PathsLimit: 10000 ShardsInside: 3 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 2 PQPartitionsLimit: 1000000 } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-05-29T15:32:34.588603Z node 38 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Table/Stream/streamImpl" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: true }, at schemeshard: 72057594046678944 2025-05-29T15:32:34.588626Z node 38 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/Table/Stream/streamImpl" took 25us result status StatusSuccess 2025-05-29T15:32:34.588692Z node 38 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Table/Stream/streamImpl" PathDescription { Self { Name: "streamImpl" PathId: 5 SchemeshardId: 72057594046678944 PathType: EPathTypePersQueueGroup CreateFinished: true CreateTxId: 1003 CreateStep: 5000004 ParentPathId: 4 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeStreamImpl Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 PQVersion: 1 } ChildrenExist: false BalancerTabletID: 72075186233409549 } PersQueueGroup { Name: "streamImpl" PathId: 5 TotalGroupCount: 2 PartitionPerTablet: 2 PQTabletConfig { PartitionConfig { MaxCountInPartition: 2147483647 LifetimeSeconds: 86400 WriteSpeedInBytesPerSecond: 1048576 BurstSize: 1048576 } TopicName: "Stream" TopicPath: "/MyRoot/Table/Stream/streamImpl" YdbDatabasePath: "/MyRoot" PartitionKeySchema { Name: "key" TypeId: 2 } MeteringMode: METERING_MODE_REQUEST_UNITS } Partitions { PartitionId: 0 TabletId: 72075186233409548 KeyRange { ToBound: "\001\000\004\000\000\000\377\377\377\177" } Status: Active } Partitions { PartitionId: 1 TabletId: 72075186233409548 KeyRange { FromBound: "\001\000\004\000\000\000\377\377\377\177" } Status: Active } AlterVersion: 1 BalancerTabletID: 72075186233409549 NextPartitionId: 2 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 4 PathsLimit: 10000 ShardsInside: 3 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 2 PQPartitionsLimit: 1000000 } } PathId: 5 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> TCdcStreamWithRebootsTests::DisableStream[PipeResets] [GOOD] >> KqpSysColV0::SelectRowById ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_cdc_stream_reboots/unittest >> TCdcStreamWithRebootsTests::DisableStream[PipeResets] [GOOD] Test command err: =========== RUN: Trace =========== Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2140] Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:116:2058] recipient: [1:110:2141] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:116:2058] recipient: [1:110:2141] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:117:2058] recipient: [1:111:2142] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:117:2058] recipient: [1:111:2142] Leader for TabletID 72057594046678944 is [1:126:2151] sender: [1:127:2058] recipient: [1:109:2140] Leader for TabletID 72057594046447617 is [1:130:2154] sender: [1:132:2058] recipient: [1:110:2141] Leader for TabletID 72057594046316545 is [1:133:2155] sender: [1:135:2058] recipient: [1:111:2142] 2025-05-29T15:32:32.761392Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7488: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-05-29T15:32:32.761416Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7516: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:32:32.761423Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7402: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2025-05-29T15:32:32.761428Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7418: OperationsProcessing config: using default configuration 2025-05-29T15:32:32.761442Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-05-29T15:32:32.761447Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-05-29T15:32:32.761457Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7548: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:32:32.761471Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:39: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-05-29T15:32:32.761584Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7619: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-05-29T15:32:32.761670Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-05-29T15:32:32.772185Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7651: Got new config: QueryServiceConfig { AllExternalDataSourcesAreAvailable: true } 2025-05-29T15:32:32.772203Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:32:32.772275Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7619: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# Leader for TabletID 72057594046447617 is [1:130:2154] sender: [1:175:2058] recipient: [1:15:2062] 2025-05-29T15:32:32.774161Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-05-29T15:32:32.774180Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-05-29T15:32:32.774202Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-05-29T15:32:32.776460Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-05-29T15:32:32.776530Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:445: Clear TempDirsState with owners number: 0 2025-05-29T15:32:32.776627Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1348: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-05-29T15:32:32.776817Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:32: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-05-29T15:32:32.777388Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:157: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:32:32.777420Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:84: [RootDataErasureManager] Stop 2025-05-29T15:32:32.777611Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:32:32.777617Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:32:32.777639Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-05-29T15:32:32.777644Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-29T15:32:32.777649Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:202: TTxServerlessStorageBilling.Complete 2025-05-29T15:32:32.777662Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6671: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:214:2058] recipient: [1:212:2212] IGNORE Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:214:2058] recipient: [1:212:2212] Leader for TabletID 72057594037968897 is [1:218:2216] sender: [1:219:2058] recipient: [1:212:2212] 2025-05-29T15:32:32.778696Z node 1 :HIVE INFO: tablet_helpers.cpp:1130: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:126:2151] sender: [1:239:2058] recipient: [1:15:2062] 2025-05-29T15:32:32.792709Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:372: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-05-29T15:32:32.792768Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:32:32.792810Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-05-29T15:32:32.792843Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:130: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-05-29T15:32:32.792850Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:32:32.793418Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:451: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-05-29T15:32:32.793435Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-05-29T15:32:32.793471Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:32:32.793477Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:313: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-05-29T15:32:32.793481Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:367: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-05-29T15:32:32.793484Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 2 -> 3 2025-05-29T15:32:32.793864Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:32:32.793873Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-05-29T15:32:32.793877Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 3 -> 128 2025-05-29T15:32:32.794171Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:32:32.794179Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:32:32.794182Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:32:32.794187Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1650: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-05-29T15:32:32.794664Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1719: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-05-29T15:32:32.795075Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:652: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-05-29T15:32:32.795101Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1751: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 Leader for TabletID 72057594046316545 is [1:133:2155] sender: [1:254:2058] recipient: [1:15:2062] FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-05-29T15:32:32.795233Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:676: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-05-29T15:32:32.795252Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:680: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 133 RawX2: 4294969451 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-05-29T15:32:32.795258Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:32:32.795298Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Ch ... X_KIND_SCHEME Origin: 72075186233409546 Status: COMPLETE TxId: 1004 Step: 5000005 OrderId: 1004 ExecLatency: 0 ProposeLatency: 2 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409546 CpuTimeUsec: 165 } } CommitVersion { Step: 5000005 TxId: 1004 } 2025-05-29T15:32:35.542786Z node 12 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_part.cpp:108: HandleReply TEvDataShard::TEvProposeTransactionResult Ignore message: tablet# 72057594046678944, ev# TxKind: TX_KIND_SCHEME Origin: 72075186233409546 Status: COMPLETE TxId: 1004 Step: 5000005 OrderId: 1004 ExecLatency: 0 ProposeLatency: 2 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409546 CpuTimeUsec: 165 } } CommitVersion { Step: 5000005 TxId: 1004 } 2025-05-29T15:32:35.542970Z node 12 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5512: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 330 RawX2: 51539609868 } Origin: 72075186233409546 State: 2 TxId: 1004 Step: 0 Generation: 2 2025-05-29T15:32:35.542982Z node 12 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1764: TOperation FindRelatedPartByTabletId, TxId: 1004, tablet: 72075186233409546, partId: 1 2025-05-29T15:32:35.543002Z node 12 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:619: TTxOperationReply execute, operationId: 1004:1, at schemeshard: 72057594046678944, message: Source { RawX1: 330 RawX2: 51539609868 } Origin: 72075186233409546 State: 2 TxId: 1004 Step: 0 Generation: 2 2025-05-29T15:32:35.543011Z node 12 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:1005: NTableState::TProposedWaitParts operationId# 1004:1 HandleReply TEvSchemaChanged at tablet: 72057594046678944 2025-05-29T15:32:35.543021Z node 12 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:1009: NTableState::TProposedWaitParts operationId# 1004:1 HandleReply TEvSchemaChanged at tablet: 72057594046678944 message: Source { RawX1: 330 RawX2: 51539609868 } Origin: 72075186233409546 State: 2 TxId: 1004 Step: 0 Generation: 2 2025-05-29T15:32:35.543032Z node 12 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:655: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 1004:1, shardIdx: 72057594046678944:1, datashard: 72075186233409546, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2025-05-29T15:32:35.543037Z node 12 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:674: all shard schema changes has been received, operationId: 1004:1, at schemeshard: 72057594046678944 2025-05-29T15:32:35.543042Z node 12 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:686: send schema changes ack message, operation: 1004:1, datashard: 72075186233409546, at schemeshard: 72057594046678944 2025-05-29T15:32:35.543051Z node 12 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1004:1 129 -> 240 2025-05-29T15:32:35.543235Z node 12 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:5834: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 5 PathOwnerId: 72057594046678944, cookie: 1004 2025-05-29T15:32:35.543257Z node 12 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 5 PathOwnerId: 72057594046678944, cookie: 1004 2025-05-29T15:32:35.543263Z node 12 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72057594046678944, txId: 1004 2025-05-29T15:32:35.543270Z node 12 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 1004, pathId: [OwnerId: 72057594046678944, LocalPathId: 4], version: 5 2025-05-29T15:32:35.543278Z node 12 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 4 2025-05-29T15:32:35.543360Z node 12 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:5834: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 5 PathOwnerId: 72057594046678944, cookie: 1004 2025-05-29T15:32:35.543380Z node 12 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 5 PathOwnerId: 72057594046678944, cookie: 1004 2025-05-29T15:32:35.543384Z node 12 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72057594046678944, txId: 1004 2025-05-29T15:32:35.543389Z node 12 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 1004, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 5 2025-05-29T15:32:35.543395Z node 12 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 5 2025-05-29T15:32:35.543406Z node 12 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1606: TOperation IsReadyToNotify, TxId: 1004, ready parts: 1/2, is published: true 2025-05-29T15:32:35.544270Z node 12 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:647: TTxOperationReply complete, operationId: 1004:1, at schemeshard: 72057594046678944 2025-05-29T15:32:35.544306Z node 12 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:647: TTxOperationReply complete, operationId: 1004:1, at schemeshard: 72057594046678944 2025-05-29T15:32:35.544475Z node 12 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1004:1, at schemeshard: 72057594046678944 2025-05-29T15:32:35.544491Z node 12 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:482: [72057594046678944] TDone opId# 1004:1 ProgressState 2025-05-29T15:32:35.544507Z node 12 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:906: Part operation is done id#1004:1 progress is 2/2 2025-05-29T15:32:35.544514Z node 12 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 1004 ready parts: 2/2 2025-05-29T15:32:35.544522Z node 12 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:906: Part operation is done id#1004:1 progress is 2/2 2025-05-29T15:32:35.544529Z node 12 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 1004 ready parts: 2/2 2025-05-29T15:32:35.544535Z node 12 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1606: TOperation IsReadyToNotify, TxId: 1004, ready parts: 2/2, is published: true 2025-05-29T15:32:35.544542Z node 12 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 1004 ready parts: 2/2 2025-05-29T15:32:35.544550Z node 12 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:973: Operation and all the parts is done, operation id: 1004:0 2025-05-29T15:32:35.544556Z node 12 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5174: RemoveTx for txid 1004:0 2025-05-29T15:32:35.544572Z node 12 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 3 2025-05-29T15:32:35.544578Z node 12 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:973: Operation and all the parts is done, operation id: 1004:1 2025-05-29T15:32:35.544582Z node 12 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5174: RemoveTx for txid 1004:1 2025-05-29T15:32:35.544600Z node 12 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 4 2025-05-29T15:32:35.545086Z node 12 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1004 2025-05-29T15:32:35.545133Z node 12 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1004 TestWaitNotification wait txId: 1004 2025-05-29T15:32:35.545758Z node 12 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:210: tests -- TTxNotificationSubscriber for txId 1004: send EvNotifyTxCompletion 2025-05-29T15:32:35.545773Z node 12 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:256: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 1004 2025-05-29T15:32:35.545863Z node 12 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 1004, at schemeshard: 72057594046678944 2025-05-29T15:32:35.545892Z node 12 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:227: tests -- TTxNotificationSubscriber for txId 1004: got EvNotifyTxCompletionResult 2025-05-29T15:32:35.545899Z node 12 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:236: tests -- TTxNotificationSubscriber for txId 1004: satisfy waiter [12:706:2623] TestWaitNotification: OK eventTxId 1004 2025-05-29T15:32:35.545999Z node 12 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Table/Stream" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: true }, at schemeshard: 72057594046678944 2025-05-29T15:32:35.546042Z node 12 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/Table/Stream" took 55us result status StatusSuccess 2025-05-29T15:32:35.546159Z node 12 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Table/Stream" PathDescription { Self { Name: "Stream" PathId: 4 SchemeshardId: 72057594046678944 PathType: EPathTypeCdcStream CreateFinished: true CreateTxId: 1003 CreateStep: 5000004 ParentPathId: 3 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 2 CdcStreamVersion: 2 } ChildrenExist: true } Children { Name: "streamImpl" PathId: 5 SchemeshardId: 72057594046678944 PathType: EPathTypePersQueueGroup CreateFinished: true CreateTxId: 1003 CreateStep: 5000004 ParentPathId: 4 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" PathSubType: EPathSubTypeStreamImpl ChildrenExist: false BalancerTabletID: 72075186233409548 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 4 PathsLimit: 10000 ShardsInside: 3 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 1 PQPartitionsLimit: 1000000 } CdcStreamDescription { Name: "Stream" Mode: ECdcStreamModeKeysOnly PathId { OwnerId: 72057594046678944 LocalId: 4 } State: ECdcStreamStateDisabled SchemaVersion: 2 Format: ECdcStreamFormatProto VirtualTimestamps: false AwsRegion: "" ResolvedTimestampsIntervalMs: 0 } } PathId: 4 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> KqpSysColV1::SelectRowAsterisk >> KqpSysColV1::StreamInnerJoinTables >> TCdcStreamWithRebootsTests::WithPqTransactions[PipeResets] [GOOD] >> TConsistentOpsWithReboots::CreateIndexedTableWithReboots [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_cdc_stream_reboots/unittest >> TCdcStreamWithRebootsTests::WithPqTransactions[PipeResets] [GOOD] Test command err: =========== RUN: Trace =========== Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2140] Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:116:2058] recipient: [1:110:2141] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:116:2058] recipient: [1:110:2141] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:117:2058] recipient: [1:111:2142] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:117:2058] recipient: [1:111:2142] Leader for TabletID 72057594046678944 is [1:126:2151] sender: [1:127:2058] recipient: [1:109:2140] Leader for TabletID 72057594046447617 is [1:130:2154] sender: [1:132:2058] recipient: [1:110:2141] Leader for TabletID 72057594046316545 is [1:133:2155] sender: [1:135:2058] recipient: [1:111:2142] 2025-05-29T15:32:12.296431Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7488: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-05-29T15:32:12.296449Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7516: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:32:12.296453Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7402: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2025-05-29T15:32:12.296456Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7418: OperationsProcessing config: using default configuration 2025-05-29T15:32:12.296464Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-05-29T15:32:12.296467Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-05-29T15:32:12.296472Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7548: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:32:12.296481Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:39: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-05-29T15:32:12.296547Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7619: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-05-29T15:32:12.296595Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-05-29T15:32:12.305491Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7651: Got new config: QueryServiceConfig { AllExternalDataSourcesAreAvailable: true } 2025-05-29T15:32:12.305507Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:32:12.305564Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7619: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# Leader for TabletID 72057594046447617 is [1:130:2154] sender: [1:175:2058] recipient: [1:15:2062] 2025-05-29T15:32:12.307621Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-05-29T15:32:12.307645Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-05-29T15:32:12.307664Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-05-29T15:32:12.310121Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-05-29T15:32:12.310184Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:445: Clear TempDirsState with owners number: 0 2025-05-29T15:32:12.310281Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1348: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-05-29T15:32:12.310452Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:32: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-05-29T15:32:12.311076Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:157: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:32:12.311104Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:84: [RootDataErasureManager] Stop 2025-05-29T15:32:12.311268Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:32:12.311275Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:32:12.311302Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-05-29T15:32:12.311309Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-29T15:32:12.311315Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:202: TTxServerlessStorageBilling.Complete 2025-05-29T15:32:12.311331Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6671: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:214:2058] recipient: [1:212:2212] IGNORE Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:214:2058] recipient: [1:212:2212] Leader for TabletID 72057594037968897 is [1:218:2216] sender: [1:219:2058] recipient: [1:212:2212] 2025-05-29T15:32:12.312255Z node 1 :HIVE INFO: tablet_helpers.cpp:1130: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:126:2151] sender: [1:239:2058] recipient: [1:15:2062] 2025-05-29T15:32:12.325481Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:372: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-05-29T15:32:12.325526Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:32:12.325562Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-05-29T15:32:12.325592Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:130: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-05-29T15:32:12.325599Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:32:12.326016Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:451: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-05-29T15:32:12.326032Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-05-29T15:32:12.326062Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:32:12.326067Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:313: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-05-29T15:32:12.326070Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:367: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-05-29T15:32:12.326073Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 2 -> 3 2025-05-29T15:32:12.326368Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:32:12.326375Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-05-29T15:32:12.326380Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 3 -> 128 2025-05-29T15:32:12.326634Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:32:12.326650Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:32:12.326653Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:32:12.326657Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1650: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-05-29T15:32:12.327133Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1719: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-05-29T15:32:12.327422Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:652: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-05-29T15:32:12.327442Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1751: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 Leader for TabletID 72057594046316545 is [1:133:2155] sender: [1:254:2058] recipient: [1:15:2062] FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-05-29T15:32:12.327559Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:676: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-05-29T15:32:12.327574Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:680: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 133 RawX2: 4294969451 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-05-29T15:32:12.327579Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:32:12.327615Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Ch ... meshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 281474976715657, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 5 2025-05-29T15:32:36.295223Z node 38 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 6 2025-05-29T15:32:36.295228Z node 38 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1606: TOperation IsReadyToNotify, TxId: 281474976715657, ready parts: 2/3, is published: true 2025-05-29T15:32:36.295386Z node 38 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6285: Handle TEvProposeTransactionResult, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409546 Status: COMPLETE TxId: 281474976715657 Step: 5000005 OrderId: 281474976715657 ExecLatency: 0 ProposeLatency: 2 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409546 CpuTimeUsec: 141 } } CommitVersion { Step: 5000005 TxId: 281474976715657 } 2025-05-29T15:32:36.295392Z node 38 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1764: TOperation FindRelatedPartByTabletId, TxId: 281474976715657, tablet: 72075186233409546, partId: 1 2025-05-29T15:32:36.295408Z node 38 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:619: TTxOperationReply execute, operationId: 281474976715657:1, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409546 Status: COMPLETE TxId: 281474976715657 Step: 5000005 OrderId: 281474976715657 ExecLatency: 0 ProposeLatency: 2 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409546 CpuTimeUsec: 141 } } CommitVersion { Step: 5000005 TxId: 281474976715657 } 2025-05-29T15:32:36.295418Z node 38 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_part.cpp:108: HandleReply TEvDataShard::TEvProposeTransactionResult Ignore message: tablet# 72057594046678944, ev# TxKind: TX_KIND_SCHEME Origin: 72075186233409546 Status: COMPLETE TxId: 281474976715657 Step: 5000005 OrderId: 281474976715657 ExecLatency: 0 ProposeLatency: 2 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409546 CpuTimeUsec: 141 } } CommitVersion { Step: 5000005 TxId: 281474976715657 } 2025-05-29T15:32:36.295630Z node 38 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5512: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 330 RawX2: 163208759564 } Origin: 72075186233409546 State: 2 TxId: 281474976715657 Step: 0 Generation: 2 2025-05-29T15:32:36.295637Z node 38 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1764: TOperation FindRelatedPartByTabletId, TxId: 281474976715657, tablet: 72075186233409546, partId: 1 2025-05-29T15:32:36.295652Z node 38 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:619: TTxOperationReply execute, operationId: 281474976715657:1, at schemeshard: 72057594046678944, message: Source { RawX1: 330 RawX2: 163208759564 } Origin: 72075186233409546 State: 2 TxId: 281474976715657 Step: 0 Generation: 2 2025-05-29T15:32:36.295658Z node 38 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:1005: NTableState::TProposedWaitParts operationId# 281474976715657:1 HandleReply TEvSchemaChanged at tablet: 72057594046678944 2025-05-29T15:32:36.295666Z node 38 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:1009: NTableState::TProposedWaitParts operationId# 281474976715657:1 HandleReply TEvSchemaChanged at tablet: 72057594046678944 message: Source { RawX1: 330 RawX2: 163208759564 } Origin: 72075186233409546 State: 2 TxId: 281474976715657 Step: 0 Generation: 2 2025-05-29T15:32:36.295675Z node 38 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:655: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 281474976715657:1, shardIdx: 72057594046678944:1, datashard: 72075186233409546, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2025-05-29T15:32:36.295679Z node 38 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:674: all shard schema changes has been received, operationId: 281474976715657:1, at schemeshard: 72057594046678944 2025-05-29T15:32:36.295682Z node 38 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:686: send schema changes ack message, operation: 281474976715657:1, datashard: 72075186233409546, at schemeshard: 72057594046678944 2025-05-29T15:32:36.295688Z node 38 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 281474976715657:1 129 -> 240 2025-05-29T15:32:36.296308Z node 38 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976715657 2025-05-29T15:32:36.296438Z node 38 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976715657 2025-05-29T15:32:36.296456Z node 38 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:647: TTxOperationReply complete, operationId: 281474976715657:1, at schemeshard: 72057594046678944 2025-05-29T15:32:36.296504Z node 38 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:647: TTxOperationReply complete, operationId: 281474976715657:1, at schemeshard: 72057594046678944 2025-05-29T15:32:36.296566Z node 38 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 281474976715657:1, at schemeshard: 72057594046678944 2025-05-29T15:32:36.296572Z node 38 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:482: [72057594046678944] TDone opId# 281474976715657:1 ProgressState 2025-05-29T15:32:36.296580Z node 38 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:906: Part operation is done id#281474976715657:1 progress is 3/3 2025-05-29T15:32:36.296582Z node 38 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 281474976715657 ready parts: 3/3 2025-05-29T15:32:36.296585Z node 38 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:906: Part operation is done id#281474976715657:1 progress is 3/3 2025-05-29T15:32:36.296587Z node 38 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 281474976715657 ready parts: 3/3 2025-05-29T15:32:36.296590Z node 38 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1606: TOperation IsReadyToNotify, TxId: 281474976715657, ready parts: 3/3, is published: true 2025-05-29T15:32:36.296593Z node 38 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 281474976715657 ready parts: 3/3 2025-05-29T15:32:36.296597Z node 38 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:973: Operation and all the parts is done, operation id: 281474976715657:0 2025-05-29T15:32:36.296599Z node 38 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5174: RemoveTx for txid 281474976715657:0 2025-05-29T15:32:36.296606Z node 38 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 3 2025-05-29T15:32:36.296609Z node 38 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:973: Operation and all the parts is done, operation id: 281474976715657:1 2025-05-29T15:32:36.296611Z node 38 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5174: RemoveTx for txid 281474976715657:1 2025-05-29T15:32:36.296621Z node 38 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 5 2025-05-29T15:32:36.296623Z node 38 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:973: Operation and all the parts is done, operation id: 281474976715657:2 2025-05-29T15:32:36.296625Z node 38 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5174: RemoveTx for txid 281474976715657:2 2025-05-29T15:32:36.296628Z node 38 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 4 TestModificationResult got TxId: 1003, wait until txId: 1003 TestWaitNotification wait txId: 1003 2025-05-29T15:32:36.296984Z node 38 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:210: tests -- TTxNotificationSubscriber for txId 1003: send EvNotifyTxCompletion 2025-05-29T15:32:36.296989Z node 38 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:256: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 1003 2025-05-29T15:32:36.297026Z node 38 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 1003, at schemeshard: 72057594046678944 2025-05-29T15:32:36.297037Z node 38 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:227: tests -- TTxNotificationSubscriber for txId 1003: got EvNotifyTxCompletionResult 2025-05-29T15:32:36.297040Z node 38 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:236: tests -- TTxNotificationSubscriber for txId 1003: satisfy waiter [38:703:2631] TestWaitNotification: OK eventTxId 1003 2025-05-29T15:32:36.663005Z node 38 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Table/Stream" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: true }, at schemeshard: 72057594046678944 2025-05-29T15:32:36.663063Z node 38 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/Table/Stream" took 68us result status StatusSuccess 2025-05-29T15:32:36.663160Z node 38 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Table/Stream" PathDescription { Self { Name: "Stream" PathId: 4 SchemeshardId: 72057594046678944 PathType: EPathTypeCdcStream CreateFinished: true CreateTxId: 1003 CreateStep: 5000004 ParentPathId: 3 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 2 CdcStreamVersion: 2 } ChildrenExist: true } Children { Name: "streamImpl" PathId: 5 SchemeshardId: 72057594046678944 PathType: EPathTypePersQueueGroup CreateFinished: true CreateTxId: 1003 CreateStep: 5000004 ParentPathId: 4 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" PathSubType: EPathSubTypeStreamImpl ChildrenExist: false BalancerTabletID: 72075186233409548 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 4 PathsLimit: 10000 ShardsInside: 3 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 1 PQPartitionsLimit: 1000000 } CdcStreamDescription { Name: "Stream" Mode: ECdcStreamModeKeysOnly PathId { OwnerId: 72057594046678944 LocalId: 4 } State: ECdcStreamStateReady SchemaVersion: 2 Format: ECdcStreamFormatProto VirtualTimestamps: false AwsRegion: "" ResolvedTimestampsIntervalMs: 0 ScanProgress { ShardsTotal: 1 ShardsCompleted: 1 } } } PathId: 4 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> TCdcStreamWithRebootsTests::CreateStream[TabletReboots] [GOOD] >> TConsistentOpsWithReboots::DropIndexedTableAndForceDropSimultaneously [GOOD] >> KqpSysColV1::InnerJoinSelect ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_cdc_stream_reboots/unittest >> TCdcStreamWithRebootsTests::CreateStream[TabletReboots] [GOOD] Test command err: =========== RUN: Trace =========== Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2140] Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:116:2058] recipient: [1:110:2141] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:116:2058] recipient: [1:110:2141] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:117:2058] recipient: [1:111:2142] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:117:2058] recipient: [1:111:2142] Leader for TabletID 72057594046678944 is [1:126:2151] sender: [1:127:2058] recipient: [1:109:2140] Leader for TabletID 72057594046447617 is [1:130:2154] sender: [1:132:2058] recipient: [1:110:2141] Leader for TabletID 72057594046316545 is [1:133:2155] sender: [1:135:2058] recipient: [1:111:2142] 2025-05-29T15:32:03.326983Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7488: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-05-29T15:32:03.327003Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7516: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:32:03.327006Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7402: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2025-05-29T15:32:03.327010Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7418: OperationsProcessing config: using default configuration 2025-05-29T15:32:03.327023Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-05-29T15:32:03.327026Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-05-29T15:32:03.327031Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7548: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:32:03.327038Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:39: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-05-29T15:32:03.327106Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7619: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-05-29T15:32:03.327175Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-05-29T15:32:03.345401Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7651: Got new config: QueryServiceConfig { AllExternalDataSourcesAreAvailable: true } 2025-05-29T15:32:03.345421Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:32:03.345494Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7619: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# Leader for TabletID 72057594046447617 is [1:130:2154] sender: [1:175:2058] recipient: [1:15:2062] 2025-05-29T15:32:03.347497Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-05-29T15:32:03.347517Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-05-29T15:32:03.348099Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-05-29T15:32:03.352301Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-05-29T15:32:03.352350Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:445: Clear TempDirsState with owners number: 0 2025-05-29T15:32:03.354790Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1348: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-05-29T15:32:03.355754Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:32: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-05-29T15:32:03.356345Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:157: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:32:03.356377Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:84: [RootDataErasureManager] Stop 2025-05-29T15:32:03.358226Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:32:03.358237Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:32:03.358252Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-05-29T15:32:03.358258Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-29T15:32:03.358261Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:202: TTxServerlessStorageBilling.Complete 2025-05-29T15:32:03.358273Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6671: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:214:2058] recipient: [1:212:2212] IGNORE Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:214:2058] recipient: [1:212:2212] Leader for TabletID 72057594037968897 is [1:218:2216] sender: [1:219:2058] recipient: [1:212:2212] 2025-05-29T15:32:03.359401Z node 1 :HIVE INFO: tablet_helpers.cpp:1130: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:126:2151] sender: [1:239:2058] recipient: [1:15:2062] 2025-05-29T15:32:03.379540Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:372: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-05-29T15:32:03.379598Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:32:03.379636Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-05-29T15:32:03.379681Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:130: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-05-29T15:32:03.379690Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:32:03.380232Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:451: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-05-29T15:32:03.380251Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-05-29T15:32:03.380291Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:32:03.380297Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:313: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-05-29T15:32:03.380300Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:367: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-05-29T15:32:03.380303Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 2 -> 3 2025-05-29T15:32:03.380622Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:32:03.380630Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-05-29T15:32:03.380633Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 3 -> 128 2025-05-29T15:32:03.380881Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:32:03.380887Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:32:03.380890Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:32:03.380894Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1650: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-05-29T15:32:03.381288Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1719: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-05-29T15:32:03.381568Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:652: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-05-29T15:32:03.381590Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1751: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 Leader for TabletID 72057594046316545 is [1:133:2155] sender: [1:254:2058] recipient: [1:15:2062] FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-05-29T15:32:03.381707Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:676: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-05-29T15:32:03.381722Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:680: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 133 RawX2: 4294969451 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-05-29T15:32:03.381726Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:32:03.381770Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Ch ... 8944 2025-05-29T15:32:37.524231Z node 139 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_pq.cpp:629: NPQState::TPropose operationId# 1003:2 HandleReply TEvProposeTransactionResult CollectPQConfigChanged: true 2025-05-29T15:32:37.524267Z node 139 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1003:2 128 -> 240 2025-05-29T15:32:37.524288Z node 139 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 3 2025-05-29T15:32:37.524297Z node 139 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 4 2025-05-29T15:32:37.524825Z node 139 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:647: TTxOperationReply complete, operationId: 1003:2, at schemeshard: 72057594046678944 2025-05-29T15:32:37.524855Z node 139 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:32:37.524858Z node 139 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1003, path id: [OwnerId: 72057594046678944, LocalPathId: 4] 2025-05-29T15:32:37.524882Z node 139 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1003, path id: [OwnerId: 72057594046678944, LocalPathId: 5] 2025-05-29T15:32:37.524905Z node 139 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:32:37.524909Z node 139 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [139:206:2207], at schemeshard: 72057594046678944, txId: 1003, path id: 4 2025-05-29T15:32:37.524912Z node 139 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [139:206:2207], at schemeshard: 72057594046678944, txId: 1003, path id: 5 2025-05-29T15:32:37.524968Z node 139 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1003:2, at schemeshard: 72057594046678944 2025-05-29T15:32:37.524975Z node 139 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:482: [72057594046678944] TDone opId# 1003:2 ProgressState 2025-05-29T15:32:37.524983Z node 139 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:906: Part operation is done id#1003:2 progress is 3/3 2025-05-29T15:32:37.524986Z node 139 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 1003 ready parts: 3/3 2025-05-29T15:32:37.524990Z node 139 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:906: Part operation is done id#1003:2 progress is 3/3 2025-05-29T15:32:37.524992Z node 139 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 1003 ready parts: 3/3 2025-05-29T15:32:37.524995Z node 139 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1606: TOperation IsReadyToNotify, TxId: 1003, ready parts: 3/3, is published: false 2025-05-29T15:32:37.524998Z node 139 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 1003 ready parts: 3/3 2025-05-29T15:32:37.525002Z node 139 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:973: Operation and all the parts is done, operation id: 1003:0 2025-05-29T15:32:37.525005Z node 139 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5174: RemoveTx for txid 1003:0 2025-05-29T15:32:37.525013Z node 139 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 4 2025-05-29T15:32:37.525018Z node 139 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:973: Operation and all the parts is done, operation id: 1003:1 2025-05-29T15:32:37.525021Z node 139 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5174: RemoveTx for txid 1003:1 2025-05-29T15:32:37.525037Z node 139 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 4 2025-05-29T15:32:37.525041Z node 139 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:973: Operation and all the parts is done, operation id: 1003:2 2025-05-29T15:32:37.525045Z node 139 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5174: RemoveTx for txid 1003:2 2025-05-29T15:32:37.525066Z node 139 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 5 2025-05-29T15:32:37.525071Z node 139 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:982: Publication still in progress, tx: 1003, publications: 2, subscribers: 0 2025-05-29T15:32:37.525074Z node 139 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:989: Publication details: tx: 1003, [OwnerId: 72057594046678944, LocalPathId: 4], 4 2025-05-29T15:32:37.525076Z node 139 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:989: Publication details: tx: 1003, [OwnerId: 72057594046678944, LocalPathId: 5], 2 2025-05-29T15:32:37.525265Z node 139 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:5834: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 4 PathOwnerId: 72057594046678944, cookie: 1003 2025-05-29T15:32:37.525276Z node 139 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 4 PathOwnerId: 72057594046678944, cookie: 1003 2025-05-29T15:32:37.525279Z node 139 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 1003 2025-05-29T15:32:37.525283Z node 139 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 1003, pathId: [OwnerId: 72057594046678944, LocalPathId: 4], version: 4 2025-05-29T15:32:37.525289Z node 139 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 3 2025-05-29T15:32:37.525460Z node 139 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:5834: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 5 Version: 2 PathOwnerId: 72057594046678944, cookie: 1003 2025-05-29T15:32:37.525468Z node 139 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 5 Version: 2 PathOwnerId: 72057594046678944, cookie: 1003 2025-05-29T15:32:37.525471Z node 139 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1003 2025-05-29T15:32:37.525474Z node 139 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 1003, pathId: [OwnerId: 72057594046678944, LocalPathId: 5], version: 2 2025-05-29T15:32:37.525476Z node 139 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 4 2025-05-29T15:32:37.525482Z node 139 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 1003, subscribers: 0 2025-05-29T15:32:37.526180Z node 139 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1003 2025-05-29T15:32:37.526295Z node 139 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1003 TestModificationResult got TxId: 1003, wait until txId: 1003 TestWaitNotification wait txId: 1003 2025-05-29T15:32:37.527351Z node 139 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:210: tests -- TTxNotificationSubscriber for txId 1003: send EvNotifyTxCompletion 2025-05-29T15:32:37.527360Z node 139 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:256: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 1003 2025-05-29T15:32:37.527404Z node 139 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 1003, at schemeshard: 72057594046678944 2025-05-29T15:32:37.527417Z node 139 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:227: tests -- TTxNotificationSubscriber for txId 1003: got EvNotifyTxCompletionResult 2025-05-29T15:32:37.527421Z node 139 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:236: tests -- TTxNotificationSubscriber for txId 1003: satisfy waiter [139:659:2576] TestWaitNotification: OK eventTxId 1003 2025-05-29T15:32:37.527469Z node 139 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Table/Stream" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: true }, at schemeshard: 72057594046678944 2025-05-29T15:32:37.527498Z node 139 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/Table/Stream" took 36us result status StatusSuccess 2025-05-29T15:32:37.527568Z node 139 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Table/Stream" PathDescription { Self { Name: "Stream" PathId: 4 SchemeshardId: 72057594046678944 PathType: EPathTypeCdcStream CreateFinished: true CreateTxId: 1003 CreateStep: 5000004 ParentPathId: 3 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 4 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 4 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 2 CdcStreamVersion: 1 } ChildrenExist: true } Children { Name: "streamImpl" PathId: 5 SchemeshardId: 72057594046678944 PathType: EPathTypePersQueueGroup CreateFinished: true CreateTxId: 1003 CreateStep: 5000004 ParentPathId: 4 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" PathSubType: EPathSubTypeStreamImpl ChildrenExist: false BalancerTabletID: 72075186233409548 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 4 PathsLimit: 10000 ShardsInside: 3 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 1 PQPartitionsLimit: 1000000 } CdcStreamDescription { Name: "Stream" Mode: ECdcStreamModeKeysOnly PathId { OwnerId: 72057594046678944 LocalId: 4 } State: ECdcStreamStateReady SchemaVersion: 1 Format: ECdcStreamFormatProto VirtualTimestamps: false AwsRegion: "" ResolvedTimestampsIntervalMs: 0 } } PathId: 4 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_reboots/unittest >> TConsistentOpsWithReboots::CreateIndexedTableWithReboots [GOOD] Test command err: ==== RunWithTabletReboots =========== RUN: Trace =========== Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2140] Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:116:2058] recipient: [1:110:2141] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:116:2058] recipient: [1:110:2141] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:117:2058] recipient: [1:111:2142] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:117:2058] recipient: [1:111:2142] Leader for TabletID 72057594046678944 is [1:126:2151] sender: [1:127:2058] recipient: [1:109:2140] Leader for TabletID 72057594046447617 is [1:130:2154] sender: [1:132:2058] recipient: [1:110:2141] Leader for TabletID 72057594046316545 is [1:133:2155] sender: [1:135:2058] recipient: [1:111:2142] 2025-05-29T15:31:42.551938Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7488: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-05-29T15:31:42.551954Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7516: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:31:42.551957Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7402: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2025-05-29T15:31:42.551961Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7418: OperationsProcessing config: using default configuration 2025-05-29T15:31:42.551964Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-05-29T15:31:42.551966Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-05-29T15:31:42.551972Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7548: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:31:42.551981Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:39: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-05-29T15:31:42.552038Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7619: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-05-29T15:31:42.552086Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-05-29T15:31:42.561310Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7651: Got new config: QueryServiceConfig { AllExternalDataSourcesAreAvailable: true } 2025-05-29T15:31:42.561326Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:31:42.561399Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7619: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# Leader for TabletID 72057594046447617 is [1:130:2154] sender: [1:175:2058] recipient: [1:15:2062] 2025-05-29T15:31:42.563793Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-05-29T15:31:42.563838Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-05-29T15:31:42.563876Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-05-29T15:31:42.566402Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-05-29T15:31:42.566479Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:445: Clear TempDirsState with owners number: 0 2025-05-29T15:31:42.566568Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1348: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-05-29T15:31:42.566755Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:32: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-05-29T15:31:42.567392Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:157: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:31:42.567425Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:84: [RootDataErasureManager] Stop 2025-05-29T15:31:42.567655Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:31:42.567667Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:31:42.567691Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-05-29T15:31:42.567698Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-29T15:31:42.567705Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:202: TTxServerlessStorageBilling.Complete 2025-05-29T15:31:42.567722Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6671: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:214:2058] recipient: [1:212:2212] IGNORE Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:214:2058] recipient: [1:212:2212] Leader for TabletID 72057594037968897 is [1:218:2216] sender: [1:219:2058] recipient: [1:212:2212] 2025-05-29T15:31:42.568763Z node 1 :HIVE INFO: tablet_helpers.cpp:1130: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:126:2151] sender: [1:239:2058] recipient: [1:15:2062] 2025-05-29T15:31:42.584831Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:372: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-05-29T15:31:42.584886Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:31:42.584930Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-05-29T15:31:42.584963Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:130: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-05-29T15:31:42.584971Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:31:42.585530Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:451: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-05-29T15:31:42.585550Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-05-29T15:31:42.585589Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:31:42.585597Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:313: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-05-29T15:31:42.585601Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:367: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-05-29T15:31:42.585606Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 2 -> 3 2025-05-29T15:31:42.585999Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:31:42.586009Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-05-29T15:31:42.586014Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 3 -> 128 2025-05-29T15:31:42.586326Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:31:42.586334Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:31:42.586339Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:31:42.586345Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1650: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-05-29T15:31:42.586914Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1719: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-05-29T15:31:42.587520Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:652: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-05-29T15:31:42.587585Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1751: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 Leader for TabletID 72057594046316545 is [1:133:2155] sender: [1:254:2058] recipient: [1:15:2062] FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-05-29T15:31:42.587863Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:676: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-05-29T15:31:42.587907Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:680: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 133 RawX2: 4294969451 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-05-29T15:31:42.587935Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:31:42.588035Z node 1 :FLAT_TX_SCHEMESHARD INFO: sch ... rokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { SizeToSplit: 2147483648 MinPartitionsCount: 1 } } } } } PathId: 7 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-05-29T15:32:36.959372Z node 223 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/DirB/Table1/UserDefinedIndexByValues/indexImplTable" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: true }, at schemeshard: 72057594046678944 2025-05-29T15:32:36.959385Z node 223 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/DirB/Table1/UserDefinedIndexByValues/indexImplTable" took 14us result status StatusSuccess 2025-05-29T15:32:36.959418Z node 223 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/DirB/Table1/UserDefinedIndexByValues/indexImplTable" PathDescription { Self { Name: "indexImplTable" PathId: 8 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 1003 CreateStep: 5000004 ParentPathId: 7 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeSyncIndexImplTable Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "indexImplTable" Columns { Name: "value0" Type: "Utf8" TypeId: 4608 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value1" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 3 NotNull: false IsBuildInProgress: false } KeyColumnNames: "value0" KeyColumnNames: "value1" KeyColumnNames: "key" KeyColumnIds: 1 KeyColumnIds: 2 KeyColumnIds: 3 TableSchemaVersion: 1 IsBackup: false IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 9 PathsLimit: 10000 ShardsInside: 4 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 8 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-05-29T15:32:36.959449Z node 223 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/DirB/Table1/UserDefinedIndexByValue0CoveringValue1" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: true }, at schemeshard: 72057594046678944 2025-05-29T15:32:36.959461Z node 223 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/DirB/Table1/UserDefinedIndexByValue0CoveringValue1" took 13us result status StatusSuccess 2025-05-29T15:32:36.959514Z node 223 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/DirB/Table1/UserDefinedIndexByValue0CoveringValue1" PathDescription { Self { Name: "UserDefinedIndexByValue0CoveringValue1" PathId: 9 SchemeshardId: 72057594046678944 PathType: EPathTypeTableIndex CreateFinished: true CreateTxId: 1003 CreateStep: 5000004 ParentPathId: 4 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableIndexVersion: 1 } ChildrenExist: true } Children { Name: "indexImplTable" PathId: 10 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 1003 CreateStep: 5000004 ParentPathId: 9 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" PathSubType: EPathSubTypeSyncIndexImplTable Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 9 PathsLimit: 10000 ShardsInside: 4 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } TableIndex { Name: "UserDefinedIndexByValue0CoveringValue1" LocalPathId: 9 Type: EIndexTypeGlobal State: EIndexStateReady KeyColumnNames: "value0" SchemaVersion: 1 PathOwnerId: 72057594046678944 DataColumnNames: "value1" DataSize: 0 IndexImplTableDescriptions { PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { SizeToSplit: 2147483648 MinPartitionsCount: 1 } } } } } PathId: 9 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-05-29T15:32:36.959555Z node 223 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/DirB/Table1/UserDefinedIndexByValue0CoveringValue1/indexImplTable" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: true }, at schemeshard: 72057594046678944 2025-05-29T15:32:36.959580Z node 223 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/DirB/Table1/UserDefinedIndexByValue0CoveringValue1/indexImplTable" took 26us result status StatusSuccess 2025-05-29T15:32:36.959612Z node 223 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/DirB/Table1/UserDefinedIndexByValue0CoveringValue1/indexImplTable" PathDescription { Self { Name: "indexImplTable" PathId: 10 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 1003 CreateStep: 5000004 ParentPathId: 9 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeSyncIndexImplTable Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "indexImplTable" Columns { Name: "value0" Type: "Utf8" TypeId: 4608 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 2 NotNull: false IsBuildInProgress: false } Columns { Name: "value1" Type: "Utf8" TypeId: 4608 Id: 3 NotNull: false IsBuildInProgress: false } KeyColumnNames: "value0" KeyColumnNames: "key" KeyColumnIds: 1 KeyColumnIds: 2 TableSchemaVersion: 1 IsBackup: false IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 9 PathsLimit: 10000 ShardsInside: 4 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 10 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> TCdcStreamWithRebootsTests::CreateStreamOnIndexTable[TabletReboots] [GOOD] >> KqpSystemView::NodesRange2 |76.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/sysview/unittest ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_cdc_stream_reboots/unittest >> TCdcStreamWithRebootsTests::CreateStreamOnIndexTable[TabletReboots] [GOOD] Test command err: =========== RUN: Trace =========== Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2140] Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:116:2058] recipient: [1:110:2141] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:116:2058] recipient: [1:110:2141] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:117:2058] recipient: [1:111:2142] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:117:2058] recipient: [1:111:2142] Leader for TabletID 72057594046678944 is [1:126:2151] sender: [1:127:2058] recipient: [1:109:2140] Leader for TabletID 72057594046447617 is [1:130:2154] sender: [1:132:2058] recipient: [1:110:2141] Leader for TabletID 72057594046316545 is [1:133:2155] sender: [1:135:2058] recipient: [1:111:2142] 2025-05-29T15:32:03.326985Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7488: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-05-29T15:32:03.327003Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7516: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:32:03.327007Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7402: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2025-05-29T15:32:03.327010Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7418: OperationsProcessing config: using default configuration 2025-05-29T15:32:03.327016Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-05-29T15:32:03.327019Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-05-29T15:32:03.327025Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7548: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:32:03.327035Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:39: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-05-29T15:32:03.327107Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7619: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-05-29T15:32:03.327172Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-05-29T15:32:03.345411Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7651: Got new config: QueryServiceConfig { AllExternalDataSourcesAreAvailable: true } 2025-05-29T15:32:03.345427Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:32:03.345494Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7619: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# Leader for TabletID 72057594046447617 is [1:130:2154] sender: [1:175:2058] recipient: [1:15:2062] 2025-05-29T15:32:03.347497Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-05-29T15:32:03.347517Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-05-29T15:32:03.348079Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-05-29T15:32:03.352306Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-05-29T15:32:03.352361Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:445: Clear TempDirsState with owners number: 0 2025-05-29T15:32:03.354795Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1348: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-05-29T15:32:03.355759Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:32: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-05-29T15:32:03.356287Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:157: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:32:03.356326Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:84: [RootDataErasureManager] Stop 2025-05-29T15:32:03.358190Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:32:03.358201Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:32:03.358222Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-05-29T15:32:03.358228Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-29T15:32:03.358233Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:202: TTxServerlessStorageBilling.Complete 2025-05-29T15:32:03.358245Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6671: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:214:2058] recipient: [1:212:2212] IGNORE Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:214:2058] recipient: [1:212:2212] Leader for TabletID 72057594037968897 is [1:218:2216] sender: [1:219:2058] recipient: [1:212:2212] 2025-05-29T15:32:03.359287Z node 1 :HIVE INFO: tablet_helpers.cpp:1130: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:126:2151] sender: [1:239:2058] recipient: [1:15:2062] 2025-05-29T15:32:03.373296Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:372: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-05-29T15:32:03.376314Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:32:03.376374Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-05-29T15:32:03.376418Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:130: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-05-29T15:32:03.376426Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:32:03.377001Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:451: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-05-29T15:32:03.377018Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-05-29T15:32:03.377068Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:32:03.377076Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:313: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-05-29T15:32:03.377081Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:367: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-05-29T15:32:03.377086Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 2 -> 3 2025-05-29T15:32:03.377547Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:32:03.377555Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-05-29T15:32:03.377558Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 3 -> 128 2025-05-29T15:32:03.377810Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:32:03.377815Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:32:03.377818Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:32:03.377822Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1650: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-05-29T15:32:03.378268Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1719: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-05-29T15:32:03.378625Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:652: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-05-29T15:32:03.378657Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1751: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 Leader for TabletID 72057594046316545 is [1:133:2155] sender: [1:254:2058] recipient: [1:15:2062] FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-05-29T15:32:03.378803Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:676: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-05-29T15:32:03.378823Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:680: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 133 RawX2: 4294969451 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-05-29T15:32:03.378827Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:32:03.379674Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Ch ... 5:32:38.393158Z node 142 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 7] was 4 2025-05-29T15:32:38.393625Z node 142 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:647: TTxOperationReply complete, operationId: 1003:3, at schemeshard: 72057594046678944 2025-05-29T15:32:38.393742Z node 142 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:32:38.393749Z node 142 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1003, path id: [OwnerId: 72057594046678944, LocalPathId: 6] 2025-05-29T15:32:38.393773Z node 142 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1003, path id: [OwnerId: 72057594046678944, LocalPathId: 7] 2025-05-29T15:32:38.393797Z node 142 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:32:38.393801Z node 142 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [142:206:2207], at schemeshard: 72057594046678944, txId: 1003, path id: 6 2025-05-29T15:32:38.393805Z node 142 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [142:206:2207], at schemeshard: 72057594046678944, txId: 1003, path id: 7 2025-05-29T15:32:38.393857Z node 142 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1003:3, at schemeshard: 72057594046678944 2025-05-29T15:32:38.393862Z node 142 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:482: [72057594046678944] TDone opId# 1003:3 ProgressState 2025-05-29T15:32:38.393870Z node 142 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:906: Part operation is done id#1003:3 progress is 4/4 2025-05-29T15:32:38.393873Z node 142 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 1003 ready parts: 4/4 2025-05-29T15:32:38.393877Z node 142 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:906: Part operation is done id#1003:3 progress is 4/4 2025-05-29T15:32:38.393879Z node 142 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 1003 ready parts: 4/4 2025-05-29T15:32:38.393882Z node 142 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1606: TOperation IsReadyToNotify, TxId: 1003, ready parts: 4/4, is published: false 2025-05-29T15:32:38.393885Z node 142 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 1003 ready parts: 4/4 2025-05-29T15:32:38.393889Z node 142 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:973: Operation and all the parts is done, operation id: 1003:0 2025-05-29T15:32:38.393892Z node 142 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5174: RemoveTx for txid 1003:0 2025-05-29T15:32:38.393899Z node 142 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 3 2025-05-29T15:32:38.393902Z node 142 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:973: Operation and all the parts is done, operation id: 1003:1 2025-05-29T15:32:38.393905Z node 142 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5174: RemoveTx for txid 1003:1 2025-05-29T15:32:38.393908Z node 142 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 6] was 4 2025-05-29T15:32:38.393912Z node 142 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:973: Operation and all the parts is done, operation id: 1003:2 2025-05-29T15:32:38.393916Z node 142 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5174: RemoveTx for txid 1003:2 2025-05-29T15:32:38.393936Z node 142 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 4 2025-05-29T15:32:38.393941Z node 142 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:973: Operation and all the parts is done, operation id: 1003:3 2025-05-29T15:32:38.393945Z node 142 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5174: RemoveTx for txid 1003:3 2025-05-29T15:32:38.393953Z node 142 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 7] was 5 2025-05-29T15:32:38.393956Z node 142 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:982: Publication still in progress, tx: 1003, publications: 2, subscribers: 0 2025-05-29T15:32:38.393959Z node 142 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:989: Publication details: tx: 1003, [OwnerId: 72057594046678944, LocalPathId: 6], 4 2025-05-29T15:32:38.393961Z node 142 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:989: Publication details: tx: 1003, [OwnerId: 72057594046678944, LocalPathId: 7], 2 2025-05-29T15:32:38.394083Z node 142 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:5834: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 6 Version: 4 PathOwnerId: 72057594046678944, cookie: 1003 2025-05-29T15:32:38.394092Z node 142 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 6 Version: 4 PathOwnerId: 72057594046678944, cookie: 1003 2025-05-29T15:32:38.394095Z node 142 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 1003 2025-05-29T15:32:38.394099Z node 142 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 1003, pathId: [OwnerId: 72057594046678944, LocalPathId: 6], version: 4 2025-05-29T15:32:38.394101Z node 142 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 6] was 3 2025-05-29T15:32:38.394263Z node 142 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:5834: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 7 Version: 2 PathOwnerId: 72057594046678944, cookie: 1003 2025-05-29T15:32:38.394272Z node 142 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 7 Version: 2 PathOwnerId: 72057594046678944, cookie: 1003 2025-05-29T15:32:38.394275Z node 142 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1003 2025-05-29T15:32:38.394278Z node 142 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 1003, pathId: [OwnerId: 72057594046678944, LocalPathId: 7], version: 2 2025-05-29T15:32:38.394280Z node 142 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 7] was 4 2025-05-29T15:32:38.394288Z node 142 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 1003, subscribers: 0 2025-05-29T15:32:38.395132Z node 142 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1003 2025-05-29T15:32:38.395160Z node 142 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1003 TestModificationResult got TxId: 1003, wait until txId: 1003 TestWaitNotification wait txId: 1003 2025-05-29T15:32:38.396465Z node 142 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:210: tests -- TTxNotificationSubscriber for txId 1003: send EvNotifyTxCompletion 2025-05-29T15:32:38.396474Z node 142 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:256: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 1003 2025-05-29T15:32:38.396533Z node 142 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 1003, at schemeshard: 72057594046678944 2025-05-29T15:32:38.396545Z node 142 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:227: tests -- TTxNotificationSubscriber for txId 1003: got EvNotifyTxCompletionResult 2025-05-29T15:32:38.396548Z node 142 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:236: tests -- TTxNotificationSubscriber for txId 1003: satisfy waiter [142:737:2643] TestWaitNotification: OK eventTxId 1003 2025-05-29T15:32:38.396597Z node 142 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Table/Index/indexImplTable/Stream" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: true }, at schemeshard: 72057594046678944 2025-05-29T15:32:38.396627Z node 142 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/Table/Index/indexImplTable/Stream" took 37us result status StatusSuccess 2025-05-29T15:32:38.396697Z node 142 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Table/Index/indexImplTable/Stream" PathDescription { Self { Name: "Stream" PathId: 6 SchemeshardId: 72057594046678944 PathType: EPathTypeCdcStream CreateFinished: true CreateTxId: 1003 CreateStep: 5000004 ParentPathId: 5 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 4 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 4 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 2 CdcStreamVersion: 1 } ChildrenExist: true } Children { Name: "streamImpl" PathId: 7 SchemeshardId: 72057594046678944 PathType: EPathTypePersQueueGroup CreateFinished: true CreateTxId: 1003 CreateStep: 5000004 ParentPathId: 6 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" PathSubType: EPathSubTypeStreamImpl ChildrenExist: false BalancerTabletID: 72075186233409549 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 6 PathsLimit: 10000 ShardsInside: 4 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 1 PQPartitionsLimit: 1000000 } CdcStreamDescription { Name: "Stream" Mode: ECdcStreamModeKeysOnly PathId { OwnerId: 72057594046678944 LocalId: 6 } State: ECdcStreamStateReady SchemaVersion: 1 Format: ECdcStreamFormatProto VirtualTimestamps: false AwsRegion: "" ResolvedTimestampsIntervalMs: 0 } } PathId: 6 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_reboots/unittest >> TConsistentOpsWithReboots::DropIndexedTableAndForceDropSimultaneously [GOOD] Test command err: ==== RunWithTabletReboots =========== RUN: Trace =========== Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2140] Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:116:2058] recipient: [1:110:2141] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:116:2058] recipient: [1:110:2141] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:117:2058] recipient: [1:111:2142] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:117:2058] recipient: [1:111:2142] Leader for TabletID 72057594046678944 is [1:126:2151] sender: [1:127:2058] recipient: [1:109:2140] Leader for TabletID 72057594046447617 is [1:130:2154] sender: [1:132:2058] recipient: [1:110:2141] Leader for TabletID 72057594046316545 is [1:133:2155] sender: [1:135:2058] recipient: [1:111:2142] 2025-05-29T15:31:34.759252Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7488: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-05-29T15:31:34.759269Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7516: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:31:34.759273Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7402: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2025-05-29T15:31:34.759277Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7418: OperationsProcessing config: using default configuration 2025-05-29T15:31:34.759281Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-05-29T15:31:34.759284Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-05-29T15:31:34.759289Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7548: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:31:34.759298Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:39: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-05-29T15:31:34.759364Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7619: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-05-29T15:31:34.759418Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-05-29T15:31:34.768607Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7651: Got new config: QueryServiceConfig { AllExternalDataSourcesAreAvailable: true } 2025-05-29T15:31:34.768625Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:31:34.768695Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7619: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# Leader for TabletID 72057594046447617 is [1:130:2154] sender: [1:175:2058] recipient: [1:15:2062] 2025-05-29T15:31:34.770695Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-05-29T15:31:34.770715Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-05-29T15:31:34.770756Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-05-29T15:31:34.773112Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-05-29T15:31:34.773188Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:445: Clear TempDirsState with owners number: 0 2025-05-29T15:31:34.773266Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1348: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-05-29T15:31:34.773424Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:32: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-05-29T15:31:34.773947Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:157: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:31:34.773977Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:84: [RootDataErasureManager] Stop 2025-05-29T15:31:34.774171Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:31:34.774179Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:31:34.774203Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-05-29T15:31:34.774208Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-29T15:31:34.774212Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:202: TTxServerlessStorageBilling.Complete 2025-05-29T15:31:34.774226Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6671: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:214:2058] recipient: [1:212:2212] IGNORE Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:214:2058] recipient: [1:212:2212] Leader for TabletID 72057594037968897 is [1:218:2216] sender: [1:219:2058] recipient: [1:212:2212] 2025-05-29T15:31:34.775251Z node 1 :HIVE INFO: tablet_helpers.cpp:1130: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:126:2151] sender: [1:239:2058] recipient: [1:15:2062] 2025-05-29T15:31:34.788341Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:372: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-05-29T15:31:34.788418Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:31:34.788473Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-05-29T15:31:34.788505Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:130: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-05-29T15:31:34.788512Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:31:34.789155Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:451: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-05-29T15:31:34.789176Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-05-29T15:31:34.789226Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:31:34.789233Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:313: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-05-29T15:31:34.789236Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:367: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-05-29T15:31:34.789240Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 2 -> 3 2025-05-29T15:31:34.789556Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:31:34.789564Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-05-29T15:31:34.789567Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 3 -> 128 2025-05-29T15:31:34.789796Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:31:34.789802Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:31:34.789806Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:31:34.789810Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1650: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-05-29T15:31:34.790198Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1719: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-05-29T15:31:34.790479Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:652: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-05-29T15:31:34.790505Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1751: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 Leader for TabletID 72057594046316545 is [1:133:2155] sender: [1:254:2058] recipient: [1:15:2062] FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-05-29T15:31:34.790637Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:676: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-05-29T15:31:34.790652Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:680: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 133 RawX2: 4294969451 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-05-29T15:31:34.790665Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:31:34.790710Z node 1 :FLAT_TX_SCHEMESHARD INFO: sch ... progress is 1/1 2025-05-29T15:32:37.495019Z node 254 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 1005 ready parts: 1/1 2025-05-29T15:32:37.495022Z node 254 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:906: Part operation is done id#1005:0 progress is 1/1 2025-05-29T15:32:37.495024Z node 254 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 1005 ready parts: 1/1 2025-05-29T15:32:37.495028Z node 254 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1606: TOperation IsReadyToNotify, TxId: 1005, ready parts: 1/1, is published: false 2025-05-29T15:32:37.495031Z node 254 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 1005 ready parts: 1/1 2025-05-29T15:32:37.495034Z node 254 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:973: Operation and all the parts is done, operation id: 1005:0 2025-05-29T15:32:37.495037Z node 254 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5174: RemoveTx for txid 1005:0 2025-05-29T15:32:37.495047Z node 254 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 2 2025-05-29T15:32:37.495052Z node 254 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:982: Publication still in progress, tx: 1005, publications: 2, subscribers: 0 2025-05-29T15:32:37.495055Z node 254 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:989: Publication details: tx: 1005, [OwnerId: 72057594046678944, LocalPathId: 1], 11 2025-05-29T15:32:37.495058Z node 254 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:989: Publication details: tx: 1005, [OwnerId: 72057594046678944, LocalPathId: 3], 18446744073709551615 2025-05-29T15:32:37.495183Z node 254 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:5834: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 11 PathOwnerId: 72057594046678944, cookie: 1005 2025-05-29T15:32:37.495199Z node 254 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 11 PathOwnerId: 72057594046678944, cookie: 1005 2025-05-29T15:32:37.495205Z node 254 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 1005 2025-05-29T15:32:37.495210Z node 254 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 1005, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 11 2025-05-29T15:32:37.495217Z node 254 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2025-05-29T15:32:37.495379Z node 254 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:5834: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 1005 2025-05-29T15:32:37.495390Z node 254 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 1005 2025-05-29T15:32:37.495393Z node 254 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1005 2025-05-29T15:32:37.495396Z node 254 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 1005, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 18446744073709551615 2025-05-29T15:32:37.495398Z node 254 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 1 2025-05-29T15:32:37.495408Z node 254 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 1005, subscribers: 0 2025-05-29T15:32:37.495433Z node 254 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:123: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-05-29T15:32:37.495437Z node 254 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:137: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 3], at schemeshard: 72057594046678944 2025-05-29T15:32:37.495443Z node 254 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-05-29T15:32:37.496069Z node 254 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1005 2025-05-29T15:32:37.496259Z node 254 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1005 2025-05-29T15:32:37.496271Z node 254 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:160: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestModificationResult got TxId: 1005, wait until txId: 1005 TestWaitNotification wait txId: 1004 2025-05-29T15:32:37.496318Z node 254 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:210: tests -- TTxNotificationSubscriber for txId 1004: send EvNotifyTxCompletion 2025-05-29T15:32:37.496324Z node 254 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:256: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 1004 TestWaitNotification wait txId: 1005 2025-05-29T15:32:37.496337Z node 254 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:210: tests -- TTxNotificationSubscriber for txId 1005: send EvNotifyTxCompletion 2025-05-29T15:32:37.496340Z node 254 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:256: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 1005 2025-05-29T15:32:37.496404Z node 254 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 1004, at schemeshard: 72057594046678944 2025-05-29T15:32:37.496427Z node 254 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:227: tests -- TTxNotificationSubscriber for txId 1004: got EvNotifyTxCompletionResult 2025-05-29T15:32:37.496434Z node 254 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:236: tests -- TTxNotificationSubscriber for txId 1004: satisfy waiter [254:833:2772] 2025-05-29T15:32:37.496448Z node 254 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 1005, at schemeshard: 72057594046678944 2025-05-29T15:32:37.496459Z node 254 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:227: tests -- TTxNotificationSubscriber for txId 1005: got EvNotifyTxCompletionResult 2025-05-29T15:32:37.496461Z node 254 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:236: tests -- TTxNotificationSubscriber for txId 1005: satisfy waiter [254:833:2772] TestWaitNotification: OK eventTxId 1004 TestWaitNotification: OK eventTxId 1005 wait until 72075186233409546 is deleted wait until 72075186233409547 is deleted wait until 72075186233409548 is deleted wait until 72075186233409549 is deleted wait until 72075186233409550 is deleted wait until 72075186233409551 is deleted wait until 72075186233409552 is deleted wait until 72075186233409553 is deleted wait until 72075186233409554 is deleted wait until 72075186233409555 is deleted 2025-05-29T15:32:37.496521Z node 254 :HIVE INFO: tablet_helpers.cpp:1476: [72057594037968897] TEvSubscribeToTabletDeletion, 72075186233409546 2025-05-29T15:32:37.496529Z node 254 :HIVE INFO: tablet_helpers.cpp:1476: [72057594037968897] TEvSubscribeToTabletDeletion, 72075186233409547 2025-05-29T15:32:37.496535Z node 254 :HIVE INFO: tablet_helpers.cpp:1476: [72057594037968897] TEvSubscribeToTabletDeletion, 72075186233409548 2025-05-29T15:32:37.496540Z node 254 :HIVE INFO: tablet_helpers.cpp:1476: [72057594037968897] TEvSubscribeToTabletDeletion, 72075186233409549 2025-05-29T15:32:37.496546Z node 254 :HIVE INFO: tablet_helpers.cpp:1476: [72057594037968897] TEvSubscribeToTabletDeletion, 72075186233409550 2025-05-29T15:32:37.496551Z node 254 :HIVE INFO: tablet_helpers.cpp:1476: [72057594037968897] TEvSubscribeToTabletDeletion, 72075186233409551 2025-05-29T15:32:37.496557Z node 254 :HIVE INFO: tablet_helpers.cpp:1476: [72057594037968897] TEvSubscribeToTabletDeletion, 72075186233409552 2025-05-29T15:32:37.496561Z node 254 :HIVE INFO: tablet_helpers.cpp:1476: [72057594037968897] TEvSubscribeToTabletDeletion, 72075186233409553 2025-05-29T15:32:37.496567Z node 254 :HIVE INFO: tablet_helpers.cpp:1476: [72057594037968897] TEvSubscribeToTabletDeletion, 72075186233409554 2025-05-29T15:32:37.496573Z node 254 :HIVE INFO: tablet_helpers.cpp:1476: [72057594037968897] TEvSubscribeToTabletDeletion, 72075186233409555 Deleted tabletId 72075186233409546 Deleted tabletId 72075186233409547 Deleted tabletId 72075186233409548 Deleted tabletId 72075186233409549 Deleted tabletId 72075186233409550 Deleted tabletId 72075186233409551 Deleted tabletId 72075186233409552 Deleted tabletId 72075186233409553 Deleted tabletId 72075186233409554 Deleted tabletId 72075186233409555 2025-05-29T15:32:37.496656Z node 254 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-05-29T15:32:37.496690Z node 254 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot" took 45us result status StatusSuccess 2025-05-29T15:32:37.496773Z node 254 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot" PathDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 11 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 11 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 9 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: "DirA" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1000 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 1 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/MyRoot" } } } PathId: 1 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> KqpSystemView::PartitionStatsRange3 >> KqpSysColV1::SelectRowById |76.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/sysview/unittest |76.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/sysview/unittest >> KqpSystemView::Sessions >> TCdcStreamWithRebootsTests::RacySplitTableAndCreateStream[PipeResets] [GOOD] >> TCdcStreamWithRebootsTests::DropStreamOnIndexTableExplicitReady[TabletReboots] [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_cdc_stream_reboots/unittest >> TCdcStreamWithRebootsTests::RacySplitTableAndCreateStream[PipeResets] [GOOD] Test command err: =========== RUN: Trace =========== Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2140] Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:116:2058] recipient: [1:110:2141] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:116:2058] recipient: [1:110:2141] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:117:2058] recipient: [1:111:2142] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:117:2058] recipient: [1:111:2142] Leader for TabletID 72057594046678944 is [1:126:2151] sender: [1:127:2058] recipient: [1:109:2140] Leader for TabletID 72057594046447617 is [1:130:2154] sender: [1:132:2058] recipient: [1:110:2141] Leader for TabletID 72057594046316545 is [1:133:2155] sender: [1:135:2058] recipient: [1:111:2142] 2025-05-29T15:32:08.765110Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7488: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-05-29T15:32:08.765133Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7516: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:32:08.765138Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7402: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2025-05-29T15:32:08.765143Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7418: OperationsProcessing config: using default configuration 2025-05-29T15:32:08.765156Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-05-29T15:32:08.765160Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-05-29T15:32:08.765168Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7548: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:32:08.765181Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:39: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-05-29T15:32:08.765287Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7619: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-05-29T15:32:08.765375Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-05-29T15:32:08.780411Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7651: Got new config: QueryServiceConfig { AllExternalDataSourcesAreAvailable: true } 2025-05-29T15:32:08.780433Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:32:08.780536Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7619: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# Leader for TabletID 72057594046447617 is [1:130:2154] sender: [1:175:2058] recipient: [1:15:2062] 2025-05-29T15:32:08.783149Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-05-29T15:32:08.783185Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-05-29T15:32:08.783211Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-05-29T15:32:08.785818Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-05-29T15:32:08.785894Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:445: Clear TempDirsState with owners number: 0 2025-05-29T15:32:08.786011Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1348: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-05-29T15:32:08.786188Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:32: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-05-29T15:32:08.786898Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:157: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:32:08.786934Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:84: [RootDataErasureManager] Stop 2025-05-29T15:32:08.787161Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:32:08.787172Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:32:08.787207Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-05-29T15:32:08.787214Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-29T15:32:08.787220Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:202: TTxServerlessStorageBilling.Complete 2025-05-29T15:32:08.787238Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6671: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:214:2058] recipient: [1:212:2212] IGNORE Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:214:2058] recipient: [1:212:2212] Leader for TabletID 72057594037968897 is [1:218:2216] sender: [1:219:2058] recipient: [1:212:2212] 2025-05-29T15:32:08.788616Z node 1 :HIVE INFO: tablet_helpers.cpp:1130: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:126:2151] sender: [1:239:2058] recipient: [1:15:2062] 2025-05-29T15:32:08.810827Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:372: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-05-29T15:32:08.810899Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:32:08.810951Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-05-29T15:32:08.810996Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:130: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-05-29T15:32:08.811008Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:32:08.811567Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:451: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-05-29T15:32:08.811591Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-05-29T15:32:08.811636Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:32:08.811645Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:313: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-05-29T15:32:08.811651Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:367: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-05-29T15:32:08.811655Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 2 -> 3 2025-05-29T15:32:08.812018Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:32:08.812030Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-05-29T15:32:08.812035Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 3 -> 128 2025-05-29T15:32:08.812374Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:32:08.812385Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:32:08.812390Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:32:08.812397Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1650: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-05-29T15:32:08.813059Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1719: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-05-29T15:32:08.813444Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:652: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-05-29T15:32:08.813480Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1751: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 Leader for TabletID 72057594046316545 is [1:133:2155] sender: [1:254:2058] recipient: [1:15:2062] FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-05-29T15:32:08.813669Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:676: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-05-29T15:32:08.813694Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:680: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 133 RawX2: 4294969451 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-05-29T15:32:08.813703Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:32:08.813757Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Ch ... 004, publications: 2, subscribers: 1 2025-05-29T15:32:41.959460Z node 52 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:989: Publication details: tx: 1004, [OwnerId: 72057594046678944, LocalPathId: 4], 4 2025-05-29T15:32:41.959464Z node 52 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:989: Publication details: tx: 1004, [OwnerId: 72057594046678944, LocalPathId: 5], 2 2025-05-29T15:32:41.959629Z node 52 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:5834: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 4 PathOwnerId: 72057594046678944, cookie: 1004 2025-05-29T15:32:41.959642Z node 52 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 4 PathOwnerId: 72057594046678944, cookie: 1004 2025-05-29T15:32:41.959647Z node 52 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 1004 2025-05-29T15:32:41.959652Z node 52 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 1004, pathId: [OwnerId: 72057594046678944, LocalPathId: 4], version: 4 2025-05-29T15:32:41.959657Z node 52 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 3 2025-05-29T15:32:41.959885Z node 52 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:5834: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 5 Version: 2 PathOwnerId: 72057594046678944, cookie: 1004 2025-05-29T15:32:41.959897Z node 52 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 5 Version: 2 PathOwnerId: 72057594046678944, cookie: 1004 2025-05-29T15:32:41.959901Z node 52 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1004 2025-05-29T15:32:41.959905Z node 52 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 1004, pathId: [OwnerId: 72057594046678944, LocalPathId: 5], version: 2 2025-05-29T15:32:41.959910Z node 52 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 4 2025-05-29T15:32:41.959920Z node 52 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 1004, subscribers: 1 2025-05-29T15:32:41.959925Z node 52 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:212: TTxAckPublishToSchemeBoard Notify send TEvNotifyTxCompletionResult, at schemeshard: 72057594046678944, to actorId: [52:404:2376] 2025-05-29T15:32:41.960619Z node 52 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1004 2025-05-29T15:32:41.960950Z node 52 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1004 2025-05-29T15:32:41.960993Z node 52 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:227: tests -- TTxNotificationSubscriber for txId 1004: got EvNotifyTxCompletionResult 2025-05-29T15:32:41.960999Z node 52 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:236: tests -- TTxNotificationSubscriber for txId 1004: satisfy waiter [52:410:2382] TestWaitNotification: OK eventTxId 1003 TestWaitNotification: OK eventTxId 1004 2025-05-29T15:32:41.962231Z node 52 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Table" Options { ReturnPartitioningInfo: true ReturnPartitionConfig: true BackupInfo: false ReturnBoundaries: true ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-05-29T15:32:41.962289Z node 52 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/Table" took 66us result status StatusSuccess 2025-05-29T15:32:41.962476Z node 52 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Table" PathDescription { Self { Name: "Table" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 1002 CreateStep: 5000003 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 2 TablePartitionVersion: 2 } ChildrenExist: true } Table { Name: "Table" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Uint64" TypeId: 4 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { MinPartitionsCount: 1 } } SplitBoundary { KeyPrefix { Tuple { Optional { Uint64: 2 } } } } TableSchemaVersion: 2 IsBackup: false CdcStreams { Name: "Stream" Mode: ECdcStreamModeKeysOnly PathId { OwnerId: 72057594046678944 LocalId: 4 } State: ECdcStreamStateReady SchemaVersion: 1 Format: ECdcStreamFormatProto VirtualTimestamps: false AwsRegion: "" ResolvedTimestampsIntervalMs: 0 } IsRestore: false } TablePartitions { EndOfRangeKeyPrefix: "\001\000\010\000\000\000\002\000\000\000\000\000\000\000" IsPoint: false IsInclusive: false DatashardId: 72075186233409547 } TablePartitions { EndOfRangeKeyPrefix: "" IsPoint: false IsInclusive: false DatashardId: 72075186233409548 } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 2 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 4 PathsLimit: 10000 ShardsInside: 4 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 1 PQPartitionsLimit: 1000000 } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-05-29T15:32:41.962533Z node 52 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Table/Stream/streamImpl" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: true }, at schemeshard: 72057594046678944 2025-05-29T15:32:41.962549Z node 52 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/Table/Stream/streamImpl" took 18us result status StatusSuccess 2025-05-29T15:32:41.962604Z node 52 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Table/Stream/streamImpl" PathDescription { Self { Name: "streamImpl" PathId: 5 SchemeshardId: 72057594046678944 PathType: EPathTypePersQueueGroup CreateFinished: true CreateTxId: 1004 CreateStep: 5000004 ParentPathId: 4 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeStreamImpl Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 PQVersion: 1 } ChildrenExist: false BalancerTabletID: 72075186233409550 } PersQueueGroup { Name: "streamImpl" PathId: 5 TotalGroupCount: 1 PartitionPerTablet: 2 PQTabletConfig { PartitionConfig { MaxCountInPartition: 2147483647 LifetimeSeconds: 86400 WriteSpeedInBytesPerSecond: 1048576 BurstSize: 1048576 } TopicName: "Stream" TopicPath: "/MyRoot/Table/Stream/streamImpl" YdbDatabasePath: "/MyRoot" PartitionKeySchema { Name: "key" TypeId: 4 } MeteringMode: METERING_MODE_REQUEST_UNITS } Partitions { PartitionId: 0 TabletId: 72075186233409549 Status: Active } AlterVersion: 1 BalancerTabletID: 72075186233409550 NextPartitionId: 1 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 4 PathsLimit: 10000 ShardsInside: 4 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 1 PQPartitionsLimit: 1000000 } } PathId: 5 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> TCdcStreamWithRebootsTests::CreateStreamWithInitialScan[TabletReboots] [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/sysview/unittest >> KqpSysColV1::InnerJoinSelect Test command err: Trying to start YDB, gRPC: 22226, MsgBus: 22429 2025-05-29T15:32:38.014486Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509891013912013433:2062];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:32:38.014516Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/001af8/r3tmp/tmpQ1F3oJ/pdisk_1.dat 2025-05-29T15:32:38.060001Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:32:38.060077Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [1:7509891013912013413:2079] 1748532758014335 != 1748532758014338 TServer::EnableGrpc on GrpcPort 22226, node 1 2025-05-29T15:32:38.069158Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:32:38.069172Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-05-29T15:32:38.069175Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-05-29T15:32:38.069225Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:22429 TClient is connected to server localhost:22429 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-29T15:32:38.113708Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:32:38.124565Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:32:38.139031Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:32:38.139055Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:32:38.140215Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-29T15:32:38.184102Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:32:38.205218Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:32:38.215346Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:32:38.297537Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509891013912015047:2403], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:32:38.297560Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:32:38.329692Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-05-29T15:32:38.335900Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-05-29T15:32:38.343847Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-05-29T15:32:38.350782Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-05-29T15:32:38.358078Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-05-29T15:32:38.364661Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-05-29T15:32:38.372077Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-05-29T15:32:38.387936Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509891013912015701:2466], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:32:38.387952Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:32:38.387990Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509891013912015706:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:32:38.388611Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710669:3, at schemeshard: 72057594046644480 2025-05-29T15:32:38.392251Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7509891013912015708:2470], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710669 completed, doublechecking } 2025-05-29T15:32:38.472296Z node 1 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [1:7509891013912015759:3395] txid# 281474976710670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-29T15:32:38.544013Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [1:7509891013912015775:2474], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:32:38.544116Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=1&id=MjEyMWFkMWEtMzZiOTY3YjgtYThjYzhjM2UtMWNlYjYwOWY=, ActorId: [1:7509891013912015044:2401], ActorState: ExecuteState, TraceId: 01jweas1vk87vf6drteqe1a3dt, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: VERIFY failed (2025-05-29T15:32:38.544726Z): assertion failed in non-unittest thread with message: assertion failed at ydb/core/kqp/ut/common/kqp_ut_common.h:375, void NKikimr::NKqp::AssertSuccessResult(const NYdb::TStatus &): (result.IsSuccess())
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 library/cpp/testing/unittest/registar.cpp:36 RaiseError(): requirement UnittestThread failed 0. /-S/util/system/yassert.cpp:83: InternalPanicImpl @ 0x13A9EAF5 1. /-S/util/system/yassert.cpp:55: Panic @ 0x13A95AF6 2. /tmp//-S/library/cpp/testing/unittest/registar.cpp:36: RaiseError @ 0x13C37886 3. /-S/ydb/core/kqp/ut/common/kqp_ut_common.h:375: AssertSuccessResult @ 0x13979752 4. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:365: CreateSampleTables @ 0x260D6A12 5. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:581: operator() @ 0x260F7FBC 6. /-S/library/cpp/threading/future/core/future-inl.h:493: SetValue @ 0x260F7FBC 7. /-S/library/cpp/threading/future/async.h:24: operator() @ 0x260F7FBC 8. /-S/util/thread/pool.h:71: Process @ 0x260F7FBC 9. /-S/util/thread/pool.cpp:418: DoExecute @ 0x13AA6479 10. /-S/util/thread/factory.h:15: Execute @ 0x13AA4E69 11. /-S/util/thread/factory.cpp:36: ThreadProc @ 0x13AA4E69 12. /-S/util/system/thread.cpp:244: ThreadProxy @ 0x13AA02DC 13. ??:0: ?? @ 0x7FB35917AAC2 14. ??:0: ?? @ 0x7FB35920C84F ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/sysview/unittest >> KqpSysColV0::SelectRowById Test command err: Trying to start YDB, gRPC: 18417, MsgBus: 17888 2025-05-29T15:32:36.033052Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509891004887984643:2265];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:32:36.033094Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/001b02/r3tmp/tmp9sgqsK/pdisk_1.dat 2025-05-29T15:32:36.071688Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [1:7509891004887984416:2079] 1748532756032308 != 1748532756032311 2025-05-29T15:32:36.072190Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 18417, node 1 2025-05-29T15:32:36.085048Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:32:36.085061Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-05-29T15:32:36.085063Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-05-29T15:32:36.085110Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:17888 TClient is connected to server localhost:17888 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-29T15:32:36.155510Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:32:36.155541Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:32:36.156345Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-05-29T15:32:36.156661Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected waiting... 2025-05-29T15:32:36.159046Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:32:36.218626Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:32:36.234969Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:32:36.244624Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:32:36.289360Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509891004887986054:2403], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:32:36.289393Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:32:36.322571Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-05-29T15:32:36.328401Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-05-29T15:32:36.341828Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-05-29T15:32:36.348773Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-05-29T15:32:36.355349Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-05-29T15:32:36.362689Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-05-29T15:32:36.370122Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-05-29T15:32:36.385515Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509891004887986708:2466], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:32:36.385536Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509891004887986713:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:32:36.385543Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:32:36.386094Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715669:3, at schemeshard: 72057594046644480 2025-05-29T15:32:36.390189Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7509891004887986715:2470], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715669 completed, doublechecking } 2025-05-29T15:32:36.459523Z node 1 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [1:7509891004887986766:3397] txid# 281474976715670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-29T15:32:36.534910Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [1:7509891004887986782:2474], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:32:36.534984Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=1&id=MTVmNDUzMi02ZTllMzdkLTdmYzdlZTFjLTQ2OWVjYWU2, ActorId: [1:7509891004887986051:2401], ActorState: ExecuteState, TraceId: 01jwearzx124b4fnfd961c96d6, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: VERIFY failed (2025-05-29T15:32:36.535611Z): assertion failed in non-unittest thread with message: assertion failed at ydb/core/kqp/ut/common/kqp_ut_common.h:375, void NKikimr::NKqp::AssertSuccessResult(const NYdb::TStatus &): (result.IsSuccess())
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 library/cpp/testing/unittest/registar.cpp:36 RaiseError(): requirement UnittestThread failed 0. /-S/util/system/yassert.cpp:83: InternalPanicImpl @ 0x13A9EAF5 1. /-S/util/system/yassert.cpp:55: Panic @ 0x13A95AF6 2. /tmp//-S/library/cpp/testing/unittest/registar.cpp:36: RaiseError @ 0x13C37886 3. /-S/ydb/core/kqp/ut/common/kqp_ut_common.h:375: AssertSuccessResult @ 0x13979752 4. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:365: CreateSampleTables @ 0x260D6A12 5. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:581: operator() @ 0x260F7FBC 6. /-S/library/cpp/threading/future/core/future-inl.h:493: SetValue @ 0x260F7FBC 7. /-S/library/cpp/threading/future/async.h:24: operator() @ 0x260F7FBC 8. /-S/util/thread/pool.h:71: Process @ 0x260F7FBC 9. /-S/util/thread/pool.cpp:418: DoExecute @ 0x13AA6479 10. /-S/util/thread/factory.h:15: Execute @ 0x13AA4E69 11. /-S/util/thread/factory.cpp:36: ThreadProc @ 0x13AA4E69 12. /-S/util/system/thread.cpp:244: ThreadProxy @ 0x13AA02DC 13. ??:0: ?? @ 0x7F9969AF0AC2 14. ??:0: ?? @ 0x7F9969B8284F ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/sysview/unittest >> KqpSysColV1::SelectRowAsterisk Test command err: Trying to start YDB, gRPC: 13699, MsgBus: 15799 2025-05-29T15:32:36.386117Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509891005436252020:2065];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:32:36.386130Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/001afb/r3tmp/tmpbQrRfd/pdisk_1.dat 2025-05-29T15:32:36.427906Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [1:7509891005436251996:2079] 1748532756385984 != 1748532756385987 2025-05-29T15:32:36.429295Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 13699, node 1 2025-05-29T15:32:36.438636Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:32:36.438644Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-05-29T15:32:36.438645Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-05-29T15:32:36.438673Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:15799 TClient is connected to server localhost:15799 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-29T15:32:36.480643Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:32:36.492890Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:32:36.511342Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:32:36.511362Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:32:36.512486Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-29T15:32:36.552112Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:32:36.566357Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:32:36.574845Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:32:36.656178Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509891005436253631:2403], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:32:36.656202Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:32:36.683051Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-05-29T15:32:36.689368Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-05-29T15:32:36.743467Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-05-29T15:32:36.754878Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-05-29T15:32:36.761843Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-05-29T15:32:36.768661Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-05-29T15:32:36.775794Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-05-29T15:32:36.784477Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509891005436254287:2466], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:32:36.784491Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:32:36.784498Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509891005436254292:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:32:36.785125Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710669:3, at schemeshard: 72057594046644480 2025-05-29T15:32:36.789353Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7509891005436254294:2470], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710669 completed, doublechecking } 2025-05-29T15:32:36.850014Z node 1 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [1:7509891005436254345:3398] txid# 281474976710670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-29T15:32:36.913887Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [1:7509891005436254361:2474], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:32:36.913961Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=1&id=NjcxZDc3MDEtMWZhYjliNDMtMjNhOTMxYTQtODU3NTFkN2Q=, ActorId: [1:7509891005436253628:2401], ActorState: ExecuteState, TraceId: 01jweas09gdb3v7srdeqqnshzy, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: VERIFY failed (2025-05-29T15:32:36.914566Z): assertion failed in non-unittest thread with message: assertion failed at ydb/core/kqp/ut/common/kqp_ut_common.h:375, void NKikimr::NKqp::AssertSuccessResult(const NYdb::TStatus &): (result.IsSuccess())
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 library/cpp/testing/unittest/registar.cpp:36 RaiseError(): requirement UnittestThread failed 0. /-S/util/system/yassert.cpp:83: InternalPanicImpl @ 0x13A9EAF5 1. /-S/util/system/yassert.cpp:55: Panic @ 0x13A95AF6 2. /tmp//-S/library/cpp/testing/unittest/registar.cpp:36: RaiseError @ 0x13C37886 3. /-S/ydb/core/kqp/ut/common/kqp_ut_common.h:375: AssertSuccessResult @ 0x13979752 4. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:365: CreateSampleTables @ 0x260D6A12 5. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:581: operator() @ 0x260F7FBC 6. /-S/library/cpp/threading/future/core/future-inl.h:493: SetValue @ 0x260F7FBC 7. /-S/library/cpp/threading/future/async.h:24: operator() @ 0x260F7FBC 8. /-S/util/thread/pool.h:71: Process @ 0x260F7FBC 9. /-S/util/thread/pool.cpp:418: DoExecute @ 0x13AA6479 10. /-S/util/thread/factory.h:15: Execute @ 0x13AA4E69 11. /-S/util/thread/factory.cpp:36: ThreadProc @ 0x13AA4E69 12. /-S/util/system/thread.cpp:244: ThreadProxy @ 0x13AA02DC 13. ??:0: ?? @ 0x7FF83BB2EAC2 14. ??:0: ?? @ 0x7FF83BBC084F ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/sysview/unittest >> KqpSystemView::FailNavigate Test command err: Trying to start YDB, gRPC: 24246, MsgBus: 27515 2025-05-29T15:32:34.251989Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509890997274701836:2199];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:32:34.252019Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/001b16/r3tmp/tmpExlXTJ/pdisk_1.dat 2025-05-29T15:32:34.346340Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [1:7509890997274701674:2079] 1748532754250371 != 1748532754250374 2025-05-29T15:32:34.347319Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 24246, node 1 2025-05-29T15:32:34.360864Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:32:34.360907Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:32:34.362399Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-29T15:32:34.393387Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:32:34.393399Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-05-29T15:32:34.393484Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-05-29T15:32:34.393525Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:27515 TClient is connected to server localhost:27515 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-29T15:32:34.513296Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:32:34.529551Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-05-29T15:32:34.530667Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:32:34.551160Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:32:34.563052Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:32:34.572242Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:32:34.622028Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890997274703314:2404], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:32:34.622051Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:32:34.766510Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-05-29T15:32:34.773742Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-05-29T15:32:34.781211Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-05-29T15:32:34.795183Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-05-29T15:32:34.849746Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-05-29T15:32:34.858133Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-05-29T15:32:34.865258Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480 2025-05-29T15:32:34.881452Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890997274703968:2467], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:32:34.881475Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:32:34.881479Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890997274703973:2470], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:32:34.882090Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480 2025-05-29T15:32:34.885304Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7509890997274703975:2471], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2025-05-29T15:32:34.942938Z node 1 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [1:7509890997274704026:3403] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-29T15:32:35.078474Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [1:7509890997274704042:2475], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:32:35.078594Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=1&id=NjJkNzliZDItNGUxNDE5OTgtNjlhNDM3NWEtZjdiNzMxMzk=, ActorId: [1:7509890997274703311:2402], ActorState: ExecuteState, TraceId: 01jwearye13qvs0tsh0qgpetq0, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: VERIFY failed (2025-05-29T15:32:35.079301Z): assertion failed in non-unittest thread with message: assertion failed at ydb/core/kqp/ut/common/kqp_ut_common.h:375, void NKikimr::NKqp::AssertSuccessResult(const NYdb::TStatus &): (result.IsSuccess())
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 library/cpp/testing/unittest/registar.cpp:36 RaiseError(): requirement UnittestThread failed 2025-05-29T15:32:39.252388Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7509890997274701836:2199];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:32:39.252426Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 0. /-S/util/system/yassert.cpp:83: InternalPanicImpl @ 0x13A9EAF5 1. /-S/util/system/yassert.cpp:55: Panic @ 0x13A95AF6 2. /tmp//-S/library/cpp/testing/unittest/registar.cpp:36: RaiseError @ 0x13C37886 3. /-S/ydb/core/kqp/ut/common/kqp_ut_common.h:375: AssertSuccessResult @ 0x13979752 4. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:365: CreateSampleTables @ 0x260D6A12 5. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:581: operator() @ 0x260F7FBC 6. /-S/library/cpp/threading/future/core/future-inl.h:493: SetValue @ 0x260F7FBC 7. /-S/library/cpp/threading/future/async.h:24: operator() @ 0x260F7FBC 8. /-S/util/thread/pool.h:71: Process @ 0x260F7FBC 9. /-S/util/thread/pool.cpp:418: DoExecute @ 0x13AA6479 10. /-S/util/thread/factory.h:15: Execute @ 0x13AA4E69 11. /-S/util/thread/factory.cpp:36: ThreadProc @ 0x13AA4E69 12. /-S/util/system/thread.cpp:244: ThreadProxy @ 0x13AA02DC 13. ??:0: ?? @ 0x7FBBC0DD9AC2 14. ??:0: ?? @ 0x7FBBC0E6B84F ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/sysview/unittest >> KqpSystemView::PartitionStatsRange3 Test command err: Trying to start YDB, gRPC: 7833, MsgBus: 8078 2025-05-29T15:32:39.930929Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509891018885798384:2061];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:32:39.930943Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/001aec/r3tmp/tmpAIQnUw/pdisk_1.dat 2025-05-29T15:32:39.984259Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [1:7509891018885798365:2079] 1748532759930813 != 1748532759930816 2025-05-29T15:32:39.985607Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 7833, node 1 2025-05-29T15:32:39.997540Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:32:39.997550Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-05-29T15:32:39.997552Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-05-29T15:32:39.997591Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:8078 TClient is connected to server localhost:8078 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-29T15:32:40.051391Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:32:40.059856Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:32:40.065525Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:32:40.065555Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:32:40.066753Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-29T15:32:40.119964Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:32:40.141758Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:32:40.153025Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:32:40.222848Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509891023180767300:2403], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:32:40.222890Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:32:40.256622Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-05-29T15:32:40.263354Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-05-29T15:32:40.276265Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-05-29T15:32:40.283196Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-05-29T15:32:40.338061Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-05-29T15:32:40.346228Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-05-29T15:32:40.360408Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-05-29T15:32:40.376066Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509891023180767954:2466], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:32:40.376077Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509891023180767959:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:32:40.376088Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:32:40.376700Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710669:3, at schemeshard: 72057594046644480 2025-05-29T15:32:40.380252Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7509891023180767961:2470], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710669 completed, doublechecking } 2025-05-29T15:32:40.474619Z node 1 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [1:7509891023180768012:3396] txid# 281474976710670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-29T15:32:40.554837Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [1:7509891023180768028:2474], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:32:40.554974Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=1&id=Njg3ZWE5MjctZGI1YTUzNGUtY2Y1NTNiOTYtNjQyOTM3YTg=, ActorId: [1:7509891023180767282:2401], ActorState: ExecuteState, TraceId: 01jweas3sq4twtfma0dgjnx2jd, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: VERIFY failed (2025-05-29T15:32:40.555665Z): assertion failed in non-unittest thread with message: assertion failed at ydb/core/kqp/ut/common/kqp_ut_common.h:375, void NKikimr::NKqp::AssertSuccessResult(const NYdb::TStatus &): (result.IsSuccess())
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 library/cpp/testing/unittest/registar.cpp:36 RaiseError(): requirement UnittestThread failed 0. /-S/util/system/yassert.cpp:83: InternalPanicImpl @ 0x13A9EAF5 1. /-S/util/system/yassert.cpp:55: Panic @ 0x13A95AF6 2. /tmp//-S/library/cpp/testing/unittest/registar.cpp:36: RaiseError @ 0x13C37886 3. /-S/ydb/core/kqp/ut/common/kqp_ut_common.h:375: AssertSuccessResult @ 0x13979752 4. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:365: CreateSampleTables @ 0x260D6A12 5. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:581: operator() @ 0x260F7FBC 6. /-S/library/cpp/threading/future/core/future-inl.h:493: SetValue @ 0x260F7FBC 7. /-S/library/cpp/threading/future/async.h:24: operator() @ 0x260F7FBC 8. /-S/util/thread/pool.h:71: Process @ 0x260F7FBC 9. /-S/util/thread/pool.cpp:418: DoExecute @ 0x13AA6479 10. /-S/util/thread/factory.h:15: Execute @ 0x13AA4E69 11. /-S/util/thread/factory.cpp:36: ThreadProc @ 0x13AA4E69 12. /-S/util/system/thread.cpp:244: ThreadProxy @ 0x13AA02DC 13. ??:0: ?? @ 0x7F3D893CEAC2 14. ??:0: ?? @ 0x7F3D8946084F ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/sysview/unittest >> KqpSysColV1::InnerJoinSelectAsterisk Test command err: Trying to start YDB, gRPC: 26574, MsgBus: 23730 2025-05-29T15:32:34.503114Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509890998663568889:2062];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:32:34.503130Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/001b15/r3tmp/tmpDXpAlb/pdisk_1.dat 2025-05-29T15:32:34.549239Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:32:34.549282Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [1:7509890998663568868:2079] 1748532754503001 != 1748532754503004 TServer::EnableGrpc on GrpcPort 26574, node 1 2025-05-29T15:32:34.562167Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:32:34.562176Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-05-29T15:32:34.562178Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-05-29T15:32:34.562210Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:23730 TClient is connected to server localhost:23730 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-29T15:32:34.616950Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:32:34.627562Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:32:34.628051Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:32:34.628074Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:32:34.629045Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-29T15:32:34.686441Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:32:34.704029Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:32:34.713939Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:32:34.778787Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890998663570503:2403], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:32:34.778814Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:32:34.812262Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-05-29T15:32:34.818857Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-05-29T15:32:34.829962Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-05-29T15:32:34.836432Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-05-29T15:32:34.843704Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-05-29T15:32:34.898535Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-05-29T15:32:34.907126Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-05-29T15:32:34.922310Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890998663571158:2466], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:32:34.922328Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:32:34.922374Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890998663571163:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:32:34.922965Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710669:3, at schemeshard: 72057594046644480 2025-05-29T15:32:34.927190Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7509890998663571165:2470], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710669 completed, doublechecking } 2025-05-29T15:32:35.021465Z node 1 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [1:7509891002958538512:3397] txid# 281474976710670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-29T15:32:35.097254Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [1:7509891002958538528:2474], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:32:35.097336Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=1&id=NzQ0M2I4ZDgtNWMzYjQ0YmItNzEyNDkxNWEtZjFhZDFkZjk=, ActorId: [1:7509890998663570500:2401], ActorState: ExecuteState, TraceId: 01jwearyfa74se5nbzq7hqpwk9, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: VERIFY failed (2025-05-29T15:32:35.097878Z): assertion failed in non-unittest thread with message: assertion failed at ydb/core/kqp/ut/common/kqp_ut_common.h:375, void NKikimr::NKqp::AssertSuccessResult(const NYdb::TStatus &): (result.IsSuccess())
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 library/cpp/testing/unittest/registar.cpp:36 RaiseError(): requirement UnittestThread failed 2025-05-29T15:32:39.503612Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7509890998663568889:2062];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:32:39.503647Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 0. /-S/util/system/yassert.cpp:83: InternalPanicImpl @ 0x13A9EAF5 1. /-S/util/system/yassert.cpp:55: Panic @ 0x13A95AF6 2. /tmp//-S/library/cpp/testing/unittest/registar.cpp:36: RaiseError @ 0x13C37886 3. /-S/ydb/core/kqp/ut/common/kqp_ut_common.h:375: AssertSuccessResult @ 0x13979752 4. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:365: CreateSampleTables @ 0x260D6A12 5. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:581: operator() @ 0x260F7FBC 6. /-S/library/cpp/threading/future/core/future-inl.h:493: SetValue @ 0x260F7FBC 7. /-S/library/cpp/threading/future/async.h:24: operator() @ 0x260F7FBC 8. /-S/util/thread/pool.h:71: Process @ 0x260F7FBC 9. /-S/util/thread/pool.cpp:418: DoExecute @ 0x13AA6479 10. /-S/util/thread/factory.h:15: Execute @ 0x13AA4E69 11. /-S/util/thread/factory.cpp:36: ThreadProc @ 0x13AA4E69 12. /-S/util/system/thread.cpp:244: ThreadProxy @ 0x13AA02DC 13. ??:0: ?? @ 0x7F62E2FE5AC2 14. ??:0: ?? @ 0x7F62E307784F ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_cdc_stream_reboots/unittest >> TCdcStreamWithRebootsTests::DropStreamOnIndexTableExplicitReady[TabletReboots] [GOOD] Test command err: =========== RUN: Trace =========== Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2140] Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:116:2058] recipient: [1:110:2141] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:116:2058] recipient: [1:110:2141] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:117:2058] recipient: [1:111:2142] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:117:2058] recipient: [1:111:2142] Leader for TabletID 72057594046678944 is [1:126:2151] sender: [1:127:2058] recipient: [1:109:2140] Leader for TabletID 72057594046447617 is [1:130:2154] sender: [1:132:2058] recipient: [1:110:2141] Leader for TabletID 72057594046316545 is [1:133:2155] sender: [1:135:2058] recipient: [1:111:2142] 2025-05-29T15:32:22.063081Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7488: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-05-29T15:32:22.063098Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7516: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:32:22.063102Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7402: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2025-05-29T15:32:22.063105Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7418: OperationsProcessing config: using default configuration 2025-05-29T15:32:22.063115Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-05-29T15:32:22.063117Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-05-29T15:32:22.063123Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7548: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:32:22.063132Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:39: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-05-29T15:32:22.063191Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7619: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-05-29T15:32:22.063240Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-05-29T15:32:22.072265Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7651: Got new config: QueryServiceConfig { AllExternalDataSourcesAreAvailable: true } 2025-05-29T15:32:22.072279Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:32:22.072334Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7619: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# Leader for TabletID 72057594046447617 is [1:130:2154] sender: [1:175:2058] recipient: [1:15:2062] 2025-05-29T15:32:22.074005Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-05-29T15:32:22.074028Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-05-29T15:32:22.074044Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-05-29T15:32:22.075725Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-05-29T15:32:22.075771Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:445: Clear TempDirsState with owners number: 0 2025-05-29T15:32:22.075841Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1348: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-05-29T15:32:22.075963Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:32: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-05-29T15:32:22.076405Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:157: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:32:22.076429Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:84: [RootDataErasureManager] Stop 2025-05-29T15:32:22.076576Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:32:22.076582Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:32:22.076600Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-05-29T15:32:22.076604Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-29T15:32:22.076608Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:202: TTxServerlessStorageBilling.Complete 2025-05-29T15:32:22.076620Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6671: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:214:2058] recipient: [1:212:2212] IGNORE Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:214:2058] recipient: [1:212:2212] Leader for TabletID 72057594037968897 is [1:218:2216] sender: [1:219:2058] recipient: [1:212:2212] 2025-05-29T15:32:22.077409Z node 1 :HIVE INFO: tablet_helpers.cpp:1130: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:126:2151] sender: [1:239:2058] recipient: [1:15:2062] 2025-05-29T15:32:22.089420Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:372: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-05-29T15:32:22.089474Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:32:22.089513Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-05-29T15:32:22.089541Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:130: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-05-29T15:32:22.089548Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:32:22.089959Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:451: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-05-29T15:32:22.089980Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-05-29T15:32:22.090017Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:32:22.090023Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:313: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-05-29T15:32:22.090027Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:367: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-05-29T15:32:22.090030Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 2 -> 3 2025-05-29T15:32:22.090327Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:32:22.090335Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-05-29T15:32:22.090338Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 3 -> 128 2025-05-29T15:32:22.090580Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:32:22.090587Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:32:22.090591Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:32:22.090595Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1650: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-05-29T15:32:22.091029Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1719: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-05-29T15:32:22.091317Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:652: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-05-29T15:32:22.091338Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1751: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 Leader for TabletID 72057594046316545 is [1:133:2155] sender: [1:254:2058] recipient: [1:15:2062] FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-05-29T15:32:22.091452Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:676: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-05-29T15:32:22.091468Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:680: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 133 RawX2: 4294969451 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-05-29T15:32:22.091473Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:32:22.091505Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Ch ... 72057594046678944, LocalPathId: 5] was 5 2025-05-29T15:32:42.541408Z node 84 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 6 PathOwnerId: 72057594046678944, cookie: 1004 2025-05-29T15:32:42.541412Z node 84 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72057594046678944, txId: 1004 2025-05-29T15:32:42.541414Z node 84 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 1004, pathId: [OwnerId: 72057594046678944, LocalPathId: 4], version: 6 2025-05-29T15:32:42.541418Z node 84 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 4 2025-05-29T15:32:42.541460Z node 84 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:5834: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 6 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 1004 2025-05-29T15:32:42.541475Z node 84 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 5 PathOwnerId: 72057594046678944, cookie: 1004 2025-05-29T15:32:42.541478Z node 84 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72057594046678944, txId: 1004 2025-05-29T15:32:42.541480Z node 84 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 1004, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 5 2025-05-29T15:32:42.541483Z node 84 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 4 2025-05-29T15:32:42.541488Z node 84 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1606: TOperation IsReadyToNotify, TxId: 1004, ready parts: 3/4, is published: true 2025-05-29T15:32:42.541507Z node 84 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 6 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 1004 2025-05-29T15:32:42.541510Z node 84 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72057594046678944, txId: 1004 2025-05-29T15:32:42.541512Z node 84 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1606: TOperation IsReadyToNotify, TxId: 1004, ready parts: 3/4, is published: true 2025-05-29T15:32:42.541807Z node 84 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:647: TTxOperationReply complete, operationId: 1004:0, at schemeshard: 72057594046678944 2025-05-29T15:32:42.541824Z node 84 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:174: Deleted shardIdx 72057594046678944:3 2025-05-29T15:32:42.541832Z node 84 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:174: Deleted shardIdx 72057594046678944:4 2025-05-29T15:32:42.541855Z node 84 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:647: TTxOperationReply complete, operationId: 1004:0, at schemeshard: 72057594046678944 2025-05-29T15:32:42.541867Z node 84 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1004:0, at schemeshard: 72057594046678944 2025-05-29T15:32:42.541871Z node 84 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:482: [72057594046678944] TDone opId# 1004:0 ProgressState 2025-05-29T15:32:42.541878Z node 84 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:906: Part operation is done id#1004:0 progress is 4/4 2025-05-29T15:32:42.541880Z node 84 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 1004 ready parts: 4/4 2025-05-29T15:32:42.541883Z node 84 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:906: Part operation is done id#1004:0 progress is 4/4 2025-05-29T15:32:42.541885Z node 84 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 1004 ready parts: 4/4 2025-05-29T15:32:42.541890Z node 84 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1606: TOperation IsReadyToNotify, TxId: 1004, ready parts: 4/4, is published: true 2025-05-29T15:32:42.541894Z node 84 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 1004 ready parts: 4/4 2025-05-29T15:32:42.541897Z node 84 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:973: Operation and all the parts is done, operation id: 1004:0 2025-05-29T15:32:42.541899Z node 84 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5174: RemoveTx for txid 1004:0 2025-05-29T15:32:42.541913Z node 84 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 4 2025-05-29T15:32:42.541916Z node 84 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:973: Operation and all the parts is done, operation id: 1004:1 2025-05-29T15:32:42.541918Z node 84 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5174: RemoveTx for txid 1004:1 2025-05-29T15:32:42.541921Z node 84 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 3 2025-05-29T15:32:42.541924Z node 84 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:973: Operation and all the parts is done, operation id: 1004:2 2025-05-29T15:32:42.541926Z node 84 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5174: RemoveTx for txid 1004:2 2025-05-29T15:32:42.541929Z node 84 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 6] was 2 2025-05-29T15:32:42.541931Z node 84 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:973: Operation and all the parts is done, operation id: 1004:3 2025-05-29T15:32:42.541933Z node 84 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5174: RemoveTx for txid 1004:3 2025-05-29T15:32:42.541938Z node 84 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 7] was 1 2025-05-29T15:32:42.541990Z node 84 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:123: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-05-29T15:32:42.541993Z node 84 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:137: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 7], at schemeshard: 72057594046678944 2025-05-29T15:32:42.542000Z node 84 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 6] was 1 2025-05-29T15:32:42.542005Z node 84 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:137: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 6], at schemeshard: 72057594046678944 2025-05-29T15:32:42.542010Z node 84 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 3 2025-05-29T15:32:42.542359Z node 84 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1004 2025-05-29T15:32:42.542375Z node 84 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1004 2025-05-29T15:32:42.542383Z node 84 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1004 2025-05-29T15:32:42.542392Z node 84 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1004 2025-05-29T15:32:42.542401Z node 84 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1004 2025-05-29T15:32:42.542404Z node 84 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1004 2025-05-29T15:32:42.542665Z node 84 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:160: TTxCleanDroppedPaths Complete, done PersistRemovePath for 2 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestModificationResult got TxId: 1004, wait until txId: 1004 TestWaitNotification wait txId: 1004 2025-05-29T15:32:42.542715Z node 84 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:210: tests -- TTxNotificationSubscriber for txId 1004: send EvNotifyTxCompletion 2025-05-29T15:32:42.542719Z node 84 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:256: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 1004 2025-05-29T15:32:42.542779Z node 84 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 1004, at schemeshard: 72057594046678944 2025-05-29T15:32:42.542793Z node 84 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:227: tests -- TTxNotificationSubscriber for txId 1004: got EvNotifyTxCompletionResult 2025-05-29T15:32:42.542796Z node 84 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:236: tests -- TTxNotificationSubscriber for txId 1004: satisfy waiter [84:817:2721] TestWaitNotification: OK eventTxId 1004 2025-05-29T15:32:42.542843Z node 84 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Table/Index/indexImplTable/Stream" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: true }, at schemeshard: 72057594046678944 2025-05-29T15:32:42.542864Z node 84 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/Table/Index/indexImplTable/Stream" took 27us result status StatusPathDoesNotExist 2025-05-29T15:32:42.542885Z node 84 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/Table/Index/indexImplTable/Stream\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot/Table/Index/indexImplTable\' (id: [OwnerId: 72057594046678944, LocalPathId: 5]), source_location: ydb/core/tx/schemeshard/schemeshard_path_describer.cpp:1157" Path: "/MyRoot/Table/Index/indexImplTable/Stream" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot/Table/Index/indexImplTable" LastExistedPrefixPathId: 5 LastExistedPrefixDescription { Self { Name: "indexImplTable" PathId: 5 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 1002 CreateStep: 5000003 ParentPathId: 4 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeSyncIndexImplTable ChildrenExist: false } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/sysview/unittest >> KqpSysColV1::StreamInnerJoinSelectAsterisk Test command err: Trying to start YDB, gRPC: 64103, MsgBus: 4088 2025-05-29T15:32:35.429997Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509890999442655499:2061];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:32:35.430027Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/001b03/r3tmp/tmp8mxrqI/pdisk_1.dat 2025-05-29T15:32:35.472765Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [1:7509890999442655479:2079] 1748532755429877 != 1748532755429880 2025-05-29T15:32:35.473983Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 64103, node 1 2025-05-29T15:32:35.483560Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:32:35.483571Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-05-29T15:32:35.483573Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-05-29T15:32:35.483604Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:4088 TClient is connected to server localhost:4088 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-29T15:32:35.525841Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:32:35.538270Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:32:35.555298Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:32:35.555318Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:32:35.556434Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-29T15:32:35.597938Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:32:35.617215Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:32:35.629132Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:32:35.692179Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890999442657115:2403], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:32:35.692201Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:32:35.720170Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-05-29T15:32:35.774752Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-05-29T15:32:35.828963Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-05-29T15:32:35.883513Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-05-29T15:32:35.894283Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-05-29T15:32:35.908335Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-05-29T15:32:35.915463Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-05-29T15:32:35.930849Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890999442657774:2466], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:32:35.930872Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:32:35.930875Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890999442657779:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:32:35.931443Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710669:3, at schemeshard: 72057594046644480 2025-05-29T15:32:35.935395Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7509890999442657781:2470], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710669 completed, doublechecking } 2025-05-29T15:32:36.012324Z node 1 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [1:7509891003737625128:3398] txid# 281474976710670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-29T15:32:36.107891Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [1:7509891003737625144:2474], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:32:36.107959Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=1&id=ZDc5YTcwMTMtMzg3NzkxYzAtZjVmNDA1MDItOWRhNjA4ZQ==, ActorId: [1:7509890999442657097:2401], ActorState: ExecuteState, TraceId: 01jwearzetf5nsyts6m08hp8q6, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: VERIFY failed (2025-05-29T15:32:36.108544Z): assertion failed in non-unittest thread with message: assertion failed at ydb/core/kqp/ut/common/kqp_ut_common.h:375, void NKikimr::NKqp::AssertSuccessResult(const NYdb::TStatus &): (result.IsSuccess())
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 library/cpp/testing/unittest/registar.cpp:36 RaiseError(): requirement UnittestThread failed 2025-05-29T15:32:40.430555Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7509890999442655499:2061];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:32:40.430607Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 0. /-S/util/system/yassert.cpp:83: InternalPanicImpl @ 0x13A9EAF5 1. /-S/util/system/yassert.cpp:55: Panic @ 0x13A95AF6 2. /tmp//-S/library/cpp/testing/unittest/registar.cpp:36: RaiseError @ 0x13C37886 3. /-S/ydb/core/kqp/ut/common/kqp_ut_common.h:375: AssertSuccessResult @ 0x13979752 4. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:365: CreateSampleTables @ 0x260D6A12 5. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:581: operator() @ 0x260F7FBC 6. /-S/library/cpp/threading/future/core/future-inl.h:493: SetValue @ 0x260F7FBC 7. /-S/library/cpp/threading/future/async.h:24: operator() @ 0x260F7FBC 8. /-S/util/thread/pool.h:71: Process @ 0x260F7FBC 9. /-S/util/thread/pool.cpp:418: DoExecute @ 0x13AA6479 10. /-S/util/thread/factory.h:15: Execute @ 0x13AA4E69 11. /-S/util/thread/factory.cpp:36: ThreadProc @ 0x13AA4E69 12. /-S/util/system/thread.cpp:244: ThreadProxy @ 0x13AA02DC 13. ??:0: ?? @ 0x7F50CF15CAC2 14. ??:0: ?? @ 0x7F50CF1EE84F ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/sysview/unittest >> KqpSystemView::QueryStatsSimple Test command err: Trying to start YDB, gRPC: 32388, MsgBus: 23580 2025-05-29T15:32:34.316180Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509890995780713181:2075];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:32:34.316205Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-05-29T15:32:34.318496Z node 2 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7509890994875522443:2073];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:32:34.318509Z node 2 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-05-29T15:32:34.318848Z node 3 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7509890996891405027:2073];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:32:34.318947Z node 3 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/001b2e/r3tmp/tmpGPjPSq/pdisk_1.dat 2025-05-29T15:32:34.377811Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 32388, node 1 2025-05-29T15:32:34.393389Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:32:34.393403Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-05-29T15:32:34.393412Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-05-29T15:32:34.393442Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-05-29T15:32:34.416330Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:32:34.416401Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:32:34.417858Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-29T15:32:34.449392Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:32:34.449430Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:32:34.449510Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:32:34.449529Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:32:34.450812Z node 1 :HIVE WARN: hive_impl.cpp:771: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-05-29T15:32:34.450832Z node 1 :HIVE WARN: hive_impl.cpp:771: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 3 Cookie 3 2025-05-29T15:32:34.451097Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-29T15:32:34.451165Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:23580 TClient is connected to server localhost:23580 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-29T15:32:34.513069Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:32:34.527363Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:32:34.595075Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:32:34.611547Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:32:34.623044Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:32:34.651119Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890995780714985:2360], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:32:34.651161Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:32:34.766583Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-05-29T15:32:34.778689Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-05-29T15:32:34.837490Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-05-29T15:32:34.848651Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-05-29T15:32:34.859639Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-05-29T15:32:34.874284Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-05-29T15:32:34.887649Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-05-29T15:32:34.904109Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890995780715822:2402], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:32:34.904128Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890995780715827:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:32:34.904132Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:32:34.904734Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715669:3, at schemeshard: 72057594046644480 2025-05-29T15:32:34.914375Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7509890995780715829:2406], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715669 completed, doublechecking } 2025-05-29T15:32:34.974667Z node 1 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [1:7509890995780715904:4105] txid# 281474976715670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-29T15:32:35.077911Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [1:7509890995780715922:2410], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:32:35.078058Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=1&id=M2I0YzZiNDAtYWUxZWViOTgtMzQxNTVlZjItNjJiYTE2ZjI=, ActorId: [1:7509890995780714967:2358], ActorState: ExecuteState, TraceId: 01jwearyeq7r1072mcmwbyahjz, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: VERIFY failed (2025-05-29T15:32:35.078703Z): assertion failed in non-unittest thread with message: assertion failed at ydb/core/kqp/ut/common/kqp_ut_common.h:375, void NKikimr::NKqp::AssertSuccessResult(const NYdb::TStatus &): (result.IsSuccess())
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 library/cpp/testing/unittest/registar.cpp:36 RaiseError(): requirement UnittestThread failed 2025-05-29T15:32:39.316531Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7509890995780713181:2075];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:32:39.316578Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-05-29T15:32:39.319042Z node 2 :METADATA_PROVIDER ERROR: log.cpp:784: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7509890994875522443:2073];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:32:39.319071Z node 2 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-05-29T15:32:39.319074Z node 3 :METADATA_PROVIDER ERROR: log.cpp:784: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7509890996891405027:2073];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:32:39.319101Z node 3 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 0. /-S/util/system/yassert.cpp:83: InternalPanicImpl @ 0x13A9EAF5 1. /-S/util/system/yassert.cpp:55: Panic @ 0x13A95AF6 2. /tmp//-S/library/cpp/testing/unittest/registar.cpp:36: RaiseError @ 0x13C37886 3. /-S/ydb/core/kqp/ut/common/kqp_ut_common.h:375: AssertSuccessResult @ 0x13979752 4. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:365: CreateSampleTables @ 0x260D6A12 5. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:581: operator() @ 0x260F7FBC 6. /-S/library/cpp/threading/future/core/future-inl.h:493: SetValue @ 0x260F7FBC 7. /-S/library/cpp/threading/future/async.h:24: operator() @ 0x260F7FBC 8. /-S/util/thread/pool.h:71: Process @ 0x260F7FBC 9. /-S/util/thread/pool.cpp:418: DoExecute @ 0x13AA6479 10. /-S/util/thread/factory.h:15: Execute @ 0x13AA4E69 11. /-S/util/thread/factory.cpp:36: ThreadProc @ 0x13AA4E69 12. /-S/util/system/thread.cpp:244: ThreadProxy @ 0x13AA02DC 13. ??:0: ?? @ 0x7F44E0889AC2 14. ??:0: ?? @ 0x7F44E091B84F ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/sysview/unittest >> KqpSystemView::NodesRange2 Test command err: Trying to start YDB, gRPC: 26386, MsgBus: 61491 2025-05-29T15:32:39.148256Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509891018908620444:2074];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:32:39.148278Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-05-29T15:32:39.153091Z node 2 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7509891019217603639:2074];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:32:39.153428Z node 2 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-05-29T15:32:39.153730Z node 3 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7509891017589096847:2222];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:32:39.153754Z node 4 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7509891016731380337:2072];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:32:39.153800Z node 3 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-05-29T15:32:39.153876Z node 4 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-05-29T15:32:39.154172Z node 5 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[5:7509891018861971957:2072];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:32:39.154193Z node 5 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/001aee/r3tmp/tmpCSMarf/pdisk_1.dat 2025-05-29T15:32:39.227197Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 26386, node 1 2025-05-29T15:32:39.239979Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:32:39.239989Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-05-29T15:32:39.239991Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-05-29T15:32:39.240025Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-05-29T15:32:39.248941Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:32:39.248961Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:32:39.250433Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:61491 TClient is connected to server localhost:61491 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: 2025-05-29T15:32:39.280851Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:32:39.280868Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:32:39.280903Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:32:39.280910Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:32:39.281427Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:32:39.281441Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:32:39.281526Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:32:39.281546Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:32:39.281880Z node 1 :HIVE WARN: hive_impl.cpp:771: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-05-29T15:32:39.281890Z node 1 :HIVE WARN: hive_impl.cpp:771: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 5 Cookie 5 2025-05-29T15:32:39.281892Z node 1 :HIVE WARN: hive_impl.cpp:771: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 3 Cookie 3 2025-05-29T15:32:39.281894Z node 1 :HIVE WARN: hive_impl.cpp:771: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 4 Cookie 4 2025-05-29T15:32:39.282151Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-29T15:32:39.282199Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-29T15:32:39.282214Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-29T15:32:39.282231Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-29T15:32:39.287687Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976720657:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:32:39.297987Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720658:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:32:39.361989Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720659:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:32:39.424520Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720660:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:32:39.437637Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720661:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:32:39.499335Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509891018908622122:2351], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:32:39.499357Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:32:39.529841Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720662:0, at schemeshard: 72057594046644480 2025-05-29T15:32:39.542055Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720663:0, at schemeshard: 72057594046644480 2025-05-29T15:32:39.556631Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720664:0, at schemeshard: 72057594046644480 2025-05-29T15:32:39.615162Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720665:0, at schemeshard: 72057594046644480 2025-05-29T15:32:39.626352Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720666:0, at schemeshard: 72057594046644480 2025-05-29T15:32:39.641060Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720667:0, at schemeshard: 72057594046644480 2025-05-29T15:32:39.654124Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720668:0, at schemeshard: 72057594046644480 2025-05-29T15:32:39.670401Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509891018908622883:2387], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:32:39.670419Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:32:39.670421Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509891018908622888:2390], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:32:39.670962Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976720669:3, at schemeshard: 72057594046644480 2025-05-29T15:32:39.673561Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7509891018908622890:2391], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976720669 completed, doublechecking } 2025-05-29T15:32:39.756578Z node 1 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [1:7509891018908622962:3958] txid# 281474976720670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-29T15:32:39.837862Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [1:7509891018908622980:2395], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:32:39.837987Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=1&id=ZWEyYTAwOWItNzU5ODk0YjEtZTZlNTJjMWMtYzM5NTVkN2E=, ActorId: [1:7509891018908622104:2349], ActorState: ExecuteState, TraceId: 01jweas33pdfrhm6zsyfmvxdyk, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: VERIFY failed (2025-05-29T15:32:39.838659Z): assertion failed in non-unittest thread with message: assertion failed at ydb/core/kqp/ut/common/kqp_ut_common.h:375, void NKikimr::NKqp::AssertSuccessResult(const NYdb::TStatus &): (result.IsSuccess())
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 library/cpp/testing/unittest/registar.cpp:36 RaiseError(): requirement UnittestThread failed 0. /-S/util/system/yassert.cpp:83: InternalPanicImpl @ 0x13A9EAF5 1. /-S/util/system/yassert.cpp:55: Panic @ 0x13A95AF6 2. /tmp//-S/library/cpp/testing/unittest/registar.cpp:36: RaiseError @ 0x13C37886 3. /-S/ydb/core/kqp/ut/common/kqp_ut_common.h:375: AssertSuccessResult @ 0x13979752 4. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:365: CreateSampleTables @ 0x260D6A12 5. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:581: operator() @ 0x260F7FBC 6. /-S/library/cpp/threading/future/core/future-inl.h:493: SetValue @ 0x260F7FBC 7. /-S/library/cpp/threading/future/async.h:24: operator() @ 0x260F7FBC 8. /-S/util/thread/pool.h:71: Process @ 0x260F7FBC 9. /-S/util/thread/pool.cpp:418: DoExecute @ 0x13AA6479 10. /-S/util/thread/factory.h:15: Execute @ 0x13AA4E69 11. /-S/util/thread/factory.cpp:36: ThreadProc @ 0x13AA4E69 12. /-S/util/system/thread.cpp:244: ThreadProxy @ 0x13AA02DC 13. ??:0: ?? @ 0x7FCD1848EAC2 14. ??:0: ?? @ 0x7FCD1852084F ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/sysview/unittest >> KqpSysColV1::StreamInnerJoinTables Test command err: Trying to start YDB, gRPC: 20900, MsgBus: 27115 2025-05-29T15:32:36.886015Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509891003946024701:2061];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:32:36.886031Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/001afa/r3tmp/tmp3EDUry/pdisk_1.dat 2025-05-29T15:32:36.932847Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:32:36.932932Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [1:7509891003946024681:2079] 1748532756885884 != 1748532756885887 TServer::EnableGrpc on GrpcPort 20900, node 1 2025-05-29T15:32:36.945536Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:32:36.945545Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-05-29T15:32:36.945547Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-05-29T15:32:36.945576Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:27115 TClient is connected to server localhost:27115 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-29T15:32:37.014314Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:32:37.014339Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:32:37.015396Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-29T15:32:37.015974Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:32:37.026324Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:32:37.090964Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:32:37.107235Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:32:37.120629Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:32:37.187585Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509891008240993611:2403], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:32:37.187611Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:32:37.225474Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-05-29T15:32:37.231858Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-05-29T15:32:37.286329Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-05-29T15:32:37.341027Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-05-29T15:32:37.395690Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-05-29T15:32:37.406104Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-05-29T15:32:37.420948Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-05-29T15:32:37.436617Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509891008240994267:2466], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:32:37.436639Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509891008240994272:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:32:37.436651Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:32:37.437581Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715669:3, at schemeshard: 72057594046644480 2025-05-29T15:32:37.440891Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7509891008240994274:2470], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715669 completed, doublechecking } 2025-05-29T15:32:37.532037Z node 1 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [1:7509891008240994325:3395] txid# 281474976715670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-29T15:32:37.636220Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [1:7509891008240994341:2474], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:32:37.636318Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=1&id=Yjc0OGZjNTYtYzQ4NTM3ODgtZmJjMTU3ZDUtZThiY2E2MzI=, ActorId: [1:7509891008240993593:2401], ActorState: ExecuteState, TraceId: 01jweas0xw0bgvydr9p29frjp8, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: VERIFY failed (2025-05-29T15:32:37.636937Z): assertion failed in non-unittest thread with message: assertion failed at ydb/core/kqp/ut/common/kqp_ut_common.h:375, void NKikimr::NKqp::AssertSuccessResult(const NYdb::TStatus &): (result.IsSuccess())
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 library/cpp/testing/unittest/registar.cpp:36 RaiseError(): requirement UnittestThread failed 0. /-S/util/system/yassert.cpp:83: InternalPanicImpl @ 0x13A9EAF5 1. /-S/util/system/yassert.cpp:55: Panic @ 0x13A95AF6 2. /tmp//-S/library/cpp/testing/unittest/registar.cpp:36: RaiseError @ 0x13C37886 3. /-S/ydb/core/kqp/ut/common/kqp_ut_common.h:375: AssertSuccessResult @ 0x13979752 4. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:365: CreateSampleTables @ 0x260D6A12 5. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:581: operator() @ 0x260F7FBC 6. /-S/library/cpp/threading/future/core/future-inl.h:493: SetValue @ 0x260F7FBC 7. /-S/library/cpp/threading/future/async.h:24: operator() @ 0x260F7FBC 8. /-S/util/thread/pool.h:71: Process @ 0x260F7FBC 9. /-S/util/thread/pool.cpp:418: DoExecute @ 0x13AA6479 10. /-S/util/thread/factory.h:15: Execute @ 0x13AA4E69 11. /-S/util/thread/factory.cpp:36: ThreadProc @ 0x13AA4E69 12. /-S/util/system/thread.cpp:244: ThreadProxy @ 0x13AA02DC 13. ??:0: ?? @ 0x7F1F3820CAC2 14. ??:0: ?? @ 0x7F1F3829E84F ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/sysview/unittest >> KqpSysColV1::SelectRange Test command err: Trying to start YDB, gRPC: 1095, MsgBus: 15480 2025-05-29T15:32:34.279085Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509890994755580445:2062];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:32:34.279099Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/001b1f/r3tmp/tmpWoMqjR/pdisk_1.dat 2025-05-29T15:32:34.344013Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [1:7509890994755580424:2079] 1748532754278951 != 1748532754278954 2025-05-29T15:32:34.345950Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 1095, node 1 2025-05-29T15:32:34.393450Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:32:34.393468Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-05-29T15:32:34.393480Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-05-29T15:32:34.393535Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-05-29T15:32:34.403339Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:32:34.403368Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:32:34.404400Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:15480 TClient is connected to server localhost:15480 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-29T15:32:34.511675Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:32:34.527320Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:32:34.551034Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:32:34.566560Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:32:34.578788Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:32:34.622139Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890994755582056:2403], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:32:34.622156Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:32:34.766518Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-05-29T15:32:34.773868Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-05-29T15:32:34.781248Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-05-29T15:32:34.795501Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-05-29T15:32:34.811147Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-05-29T15:32:34.823759Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-05-29T15:32:34.837302Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-05-29T15:32:34.858353Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890994755582707:2466], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:32:34.858401Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:32:34.858430Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890994755582712:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:32:34.859194Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710669:3, at schemeshard: 72057594046644480 2025-05-29T15:32:34.864564Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7509890994755582714:2470], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710669 completed, doublechecking } 2025-05-29T15:32:34.937113Z node 1 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [1:7509890994755582765:3398] txid# 281474976710670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-29T15:32:35.079021Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [1:7509890994755582781:2474], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:32:35.079118Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=1&id=NDFiMDVjM2MtN2JmZjg5N2QtZGIzMDFmNGYtMzAwMGE3OTM=, ActorId: [1:7509890994755582053:2401], ActorState: ExecuteState, TraceId: 01jwearyda3fwtdhnpdn5r4cb9, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: VERIFY failed (2025-05-29T15:32:35.079679Z): assertion failed in non-unittest thread with message: assertion failed at ydb/core/kqp/ut/common/kqp_ut_common.h:375, void NKikimr::NKqp::AssertSuccessResult(const NYdb::TStatus &): (result.IsSuccess())
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 library/cpp/testing/unittest/registar.cpp:36 RaiseError(): requirement UnittestThread failed 2025-05-29T15:32:39.279509Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7509890994755580445:2062];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:32:39.279559Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 0. /-S/util/system/yassert.cpp:83: InternalPanicImpl @ 0x13A9EAF5 1. /-S/util/system/yassert.cpp:55: Panic @ 0x13A95AF6 2. /tmp//-S/library/cpp/testing/unittest/registar.cpp:36: RaiseError @ 0x13C37886 3. /-S/ydb/core/kqp/ut/common/kqp_ut_common.h:375: AssertSuccessResult @ 0x13979752 4. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:365: CreateSampleTables @ 0x260D6A12 5. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:581: operator() @ 0x260F7FBC 6. /-S/library/cpp/threading/future/core/future-inl.h:493: SetValue @ 0x260F7FBC 7. /-S/library/cpp/threading/future/async.h:24: operator() @ 0x260F7FBC 8. /-S/util/thread/pool.h:71: Process @ 0x260F7FBC 9. /-S/util/thread/pool.cpp:418: DoExecute @ 0x13AA6479 10. /-S/util/thread/factory.h:15: Execute @ 0x13AA4E69 11. /-S/util/thread/factory.cpp:36: ThreadProc @ 0x13AA4E69 12. /-S/util/system/thread.cpp:244: ThreadProxy @ 0x13AA02DC 13. ??:0: ?? @ 0x7F2DBEADAAC2 14. ??:0: ?? @ 0x7F2DBEB6C84F ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/sysview/unittest >> KqpSysColV1::SelectRowById Test command err: Trying to start YDB, gRPC: 6995, MsgBus: 62312 2025-05-29T15:32:40.013239Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509891022717558884:2061];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:32:40.013267Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/001ae8/r3tmp/tmpNkgqkq/pdisk_1.dat 2025-05-29T15:32:40.054697Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [1:7509891022717558864:2079] 1748532760013083 != 1748532760013086 2025-05-29T15:32:40.055741Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 6995, node 1 2025-05-29T15:32:40.065926Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:32:40.065940Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-05-29T15:32:40.065942Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-05-29T15:32:40.065981Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:62312 TClient is connected to server localhost:62312 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-29T15:32:40.136698Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:32:40.136731Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:32:40.137737Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-29T15:32:40.138339Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:32:40.143215Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:32:40.160091Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:32:40.179325Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:32:40.189849Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:32:40.321244Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509891022717560497:2403], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:32:40.321267Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:32:40.362972Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-05-29T15:32:40.418541Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-05-29T15:32:40.429967Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-05-29T15:32:40.436966Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-05-29T15:32:40.443853Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-05-29T15:32:40.451413Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-05-29T15:32:40.464809Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-05-29T15:32:40.481431Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509891022717561149:2466], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:32:40.481457Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:32:40.481463Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509891022717561154:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:32:40.482008Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715669:3, at schemeshard: 72057594046644480 2025-05-29T15:32:40.484985Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7509891022717561156:2470], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715669 completed, doublechecking } 2025-05-29T15:32:40.561482Z node 1 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [1:7509891022717561207:3396] txid# 281474976715670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-29T15:32:40.639232Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [1:7509891022717561223:2474], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:32:40.639314Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=1&id=NzNmYWI1MDQtMzBiMGRiMTYtNzZjY2VkYzItNjEzOWYxNzA=, ActorId: [1:7509891022717560494:2401], ActorState: ExecuteState, TraceId: 01jweas3x19awb2r0zsn4jscz1, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: VERIFY failed (2025-05-29T15:32:40.639937Z): assertion failed in non-unittest thread with message: assertion failed at ydb/core/kqp/ut/common/kqp_ut_common.h:375, void NKikimr::NKqp::AssertSuccessResult(const NYdb::TStatus &): (result.IsSuccess())
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 library/cpp/testing/unittest/registar.cpp:36 RaiseError(): requirement UnittestThread failed 0. /-S/util/system/yassert.cpp:83: InternalPanicImpl @ 0x13A9EAF5 1. /-S/util/system/yassert.cpp:55: Panic @ 0x13A95AF6 2. /tmp//-S/library/cpp/testing/unittest/registar.cpp:36: RaiseError @ 0x13C37886 3. /-S/ydb/core/kqp/ut/common/kqp_ut_common.h:375: AssertSuccessResult @ 0x13979752 4. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:365: CreateSampleTables @ 0x260D6A12 5. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:581: operator() @ 0x260F7FBC 6. /-S/library/cpp/threading/future/core/future-inl.h:493: SetValue @ 0x260F7FBC 7. /-S/library/cpp/threading/future/async.h:24: operator() @ 0x260F7FBC 8. /-S/util/thread/pool.h:71: Process @ 0x260F7FBC 9. /-S/util/thread/pool.cpp:418: DoExecute @ 0x13AA6479 10. /-S/util/thread/factory.h:15: Execute @ 0x13AA4E69 11. /-S/util/thread/factory.cpp:36: ThreadProc @ 0x13AA4E69 12. /-S/util/system/thread.cpp:244: ThreadProxy @ 0x13AA02DC 13. ??:0: ?? @ 0x7F7ED7EA3AC2 14. ??:0: ?? @ 0x7F7ED7F3584F >> TCdcStreamWithRebootsTests::CreateStreamOnIndexTableExplicitReady[TabletReboots] [GOOD] >> KqpSystemView::NodesSimple ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_cdc_stream_reboots/unittest >> TCdcStreamWithRebootsTests::CreateStreamWithInitialScan[TabletReboots] [GOOD] Test command err: =========== RUN: Trace =========== Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2140] Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:116:2058] recipient: [1:110:2141] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:116:2058] recipient: [1:110:2141] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:117:2058] recipient: [1:111:2142] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:117:2058] recipient: [1:111:2142] Leader for TabletID 72057594046678944 is [1:126:2151] sender: [1:127:2058] recipient: [1:109:2140] Leader for TabletID 72057594046447617 is [1:130:2154] sender: [1:132:2058] recipient: [1:110:2141] Leader for TabletID 72057594046316545 is [1:133:2155] sender: [1:135:2058] recipient: [1:111:2142] 2025-05-29T15:32:08.677049Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7488: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-05-29T15:32:08.677069Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7516: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:32:08.677075Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7402: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2025-05-29T15:32:08.677079Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7418: OperationsProcessing config: using default configuration 2025-05-29T15:32:08.677090Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-05-29T15:32:08.677094Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-05-29T15:32:08.677103Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7548: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:32:08.677113Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:39: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-05-29T15:32:08.677213Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7619: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-05-29T15:32:08.677275Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-05-29T15:32:08.690495Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7651: Got new config: QueryServiceConfig { AllExternalDataSourcesAreAvailable: true } 2025-05-29T15:32:08.690515Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:32:08.690588Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7619: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# Leader for TabletID 72057594046447617 is [1:130:2154] sender: [1:175:2058] recipient: [1:15:2062] 2025-05-29T15:32:08.692612Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-05-29T15:32:08.692633Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-05-29T15:32:08.692666Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-05-29T15:32:08.695176Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-05-29T15:32:08.695255Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:445: Clear TempDirsState with owners number: 0 2025-05-29T15:32:08.695363Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1348: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-05-29T15:32:08.695558Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:32: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-05-29T15:32:08.696203Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:157: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:32:08.696236Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:84: [RootDataErasureManager] Stop 2025-05-29T15:32:08.696471Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:32:08.696483Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:32:08.696516Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-05-29T15:32:08.696524Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-29T15:32:08.696530Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:202: TTxServerlessStorageBilling.Complete 2025-05-29T15:32:08.696548Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6671: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:214:2058] recipient: [1:212:2212] IGNORE Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:214:2058] recipient: [1:212:2212] Leader for TabletID 72057594037968897 is [1:218:2216] sender: [1:219:2058] recipient: [1:212:2212] 2025-05-29T15:32:08.697588Z node 1 :HIVE INFO: tablet_helpers.cpp:1130: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:126:2151] sender: [1:239:2058] recipient: [1:15:2062] 2025-05-29T15:32:08.715053Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:372: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-05-29T15:32:08.715106Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:32:08.715149Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-05-29T15:32:08.715184Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:130: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-05-29T15:32:08.715192Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:32:08.715698Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:451: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-05-29T15:32:08.715721Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-05-29T15:32:08.715761Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:32:08.715774Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:313: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-05-29T15:32:08.715779Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:367: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-05-29T15:32:08.715782Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 2 -> 3 2025-05-29T15:32:08.716093Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:32:08.716102Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-05-29T15:32:08.716105Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 3 -> 128 2025-05-29T15:32:08.716426Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:32:08.716436Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:32:08.716441Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:32:08.716448Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1650: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-05-29T15:32:08.716987Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1719: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-05-29T15:32:08.717315Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:652: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-05-29T15:32:08.717340Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1751: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 Leader for TabletID 72057594046316545 is [1:133:2155] sender: [1:254:2058] recipient: [1:15:2062] FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-05-29T15:32:08.717489Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:676: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-05-29T15:32:08.717506Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:680: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 133 RawX2: 4294969451 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-05-29T15:32:08.717512Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:32:08.717554Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Ch ... xId: 1003, path id: [OwnerId: 72057594046678944, LocalPathId: 4] 2025-05-29T15:32:43.181089Z node 141 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1003, path id: [OwnerId: 72057594046678944, LocalPathId: 5] 2025-05-29T15:32:43.181113Z node 141 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:32:43.181117Z node 141 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [141:206:2207], at schemeshard: 72057594046678944, txId: 1003, path id: 4 2025-05-29T15:32:43.181121Z node 141 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [141:206:2207], at schemeshard: 72057594046678944, txId: 1003, path id: 5 2025-05-29T15:32:43.181207Z node 141 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1003:3, at schemeshard: 72057594046678944 2025-05-29T15:32:43.181215Z node 141 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:482: [72057594046678944] TDone opId# 1003:3 ProgressState 2025-05-29T15:32:43.181224Z node 141 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:906: Part operation is done id#1003:3 progress is 3/4 2025-05-29T15:32:43.181227Z node 141 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 1003 ready parts: 3/4 2025-05-29T15:32:43.181231Z node 141 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1103: All parts have reached barrier, tx: 1003, done: 3, blocked: 1 2025-05-29T15:32:43.181235Z node 141 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_create_cdc_stream.cpp:400: [72057594046678944] TDone opId# 1003:2HandleReply TEvCompleteBarrier 2025-05-29T15:32:43.181247Z node 141 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:906: Part operation is done id#1003:2 progress is 4/4 2025-05-29T15:32:43.181249Z node 141 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 1003 ready parts: 4/4 2025-05-29T15:32:43.181252Z node 141 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:906: Part operation is done id#1003:3 progress is 4/4 2025-05-29T15:32:43.181254Z node 141 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 1003 ready parts: 4/4 2025-05-29T15:32:43.181257Z node 141 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1606: TOperation IsReadyToNotify, TxId: 1003, ready parts: 3/4, is published: false 2025-05-29T15:32:43.181259Z node 141 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1606: TOperation IsReadyToNotify, TxId: 1003, ready parts: 4/4, is published: false 2025-05-29T15:32:43.181262Z node 141 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 1003 ready parts: 4/4 2025-05-29T15:32:43.181266Z node 141 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:973: Operation and all the parts is done, operation id: 1003:0 2025-05-29T15:32:43.181269Z node 141 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5174: RemoveTx for txid 1003:0 2025-05-29T15:32:43.181276Z node 141 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 5 2025-05-29T15:32:43.181280Z node 141 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:973: Operation and all the parts is done, operation id: 1003:1 2025-05-29T15:32:43.181282Z node 141 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5174: RemoveTx for txid 1003:1 2025-05-29T15:32:43.181285Z node 141 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 4 2025-05-29T15:32:43.181287Z node 141 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:973: Operation and all the parts is done, operation id: 1003:2 2025-05-29T15:32:43.181289Z node 141 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5174: RemoveTx for txid 1003:2 2025-05-29T15:32:43.181299Z node 141 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 4 2025-05-29T15:32:43.181314Z node 141 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:973: Operation and all the parts is done, operation id: 1003:3 2025-05-29T15:32:43.181317Z node 141 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5174: RemoveTx for txid 1003:3 2025-05-29T15:32:43.181323Z node 141 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 5 2025-05-29T15:32:43.181327Z node 141 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:982: Publication still in progress, tx: 1003, publications: 2, subscribers: 0 2025-05-29T15:32:43.181329Z node 141 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:989: Publication details: tx: 1003, [OwnerId: 72057594046678944, LocalPathId: 4], 4 2025-05-29T15:32:43.181331Z node 141 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:989: Publication details: tx: 1003, [OwnerId: 72057594046678944, LocalPathId: 5], 2 2025-05-29T15:32:43.181484Z node 141 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:5834: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 4 PathOwnerId: 72057594046678944, cookie: 1003 2025-05-29T15:32:43.181493Z node 141 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 4 PathOwnerId: 72057594046678944, cookie: 1003 2025-05-29T15:32:43.181508Z node 141 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 1003 2025-05-29T15:32:43.181511Z node 141 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 1003, pathId: [OwnerId: 72057594046678944, LocalPathId: 4], version: 4 2025-05-29T15:32:43.181513Z node 141 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 3 2025-05-29T15:32:43.181654Z node 141 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:5834: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 5 Version: 2 PathOwnerId: 72057594046678944, cookie: 1003 2025-05-29T15:32:43.181663Z node 141 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 5 Version: 2 PathOwnerId: 72057594046678944, cookie: 1003 2025-05-29T15:32:43.181666Z node 141 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1003 2025-05-29T15:32:43.181668Z node 141 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 1003, pathId: [OwnerId: 72057594046678944, LocalPathId: 5], version: 2 2025-05-29T15:32:43.181671Z node 141 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 4 2025-05-29T15:32:43.181678Z node 141 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 1003, subscribers: 0 2025-05-29T15:32:43.182329Z node 141 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1003 2025-05-29T15:32:43.182374Z node 141 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1003 TestModificationResult got TxId: 1003, wait until txId: 1003 TestWaitNotification wait txId: 1003 2025-05-29T15:32:43.183830Z node 141 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:210: tests -- TTxNotificationSubscriber for txId 1003: send EvNotifyTxCompletion 2025-05-29T15:32:43.183839Z node 141 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:256: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 1003 2025-05-29T15:32:43.183888Z node 141 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 1003, at schemeshard: 72057594046678944 2025-05-29T15:32:43.183901Z node 141 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:227: tests -- TTxNotificationSubscriber for txId 1003: got EvNotifyTxCompletionResult 2025-05-29T15:32:43.183905Z node 141 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:236: tests -- TTxNotificationSubscriber for txId 1003: satisfy waiter [141:665:2582] TestWaitNotification: OK eventTxId 1003 2025-05-29T15:32:43.183959Z node 141 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Table/Stream" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: true }, at schemeshard: 72057594046678944 2025-05-29T15:32:43.183993Z node 141 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/Table/Stream" took 42us result status StatusSuccess 2025-05-29T15:32:43.184077Z node 141 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Table/Stream" PathDescription { Self { Name: "Stream" PathId: 4 SchemeshardId: 72057594046678944 PathType: EPathTypeCdcStream CreateFinished: true CreateTxId: 1003 CreateStep: 5000004 ParentPathId: 3 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 4 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 4 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 2 CdcStreamVersion: 1 } ChildrenExist: true } Children { Name: "streamImpl" PathId: 5 SchemeshardId: 72057594046678944 PathType: EPathTypePersQueueGroup CreateFinished: true CreateTxId: 1003 CreateStep: 5000004 ParentPathId: 4 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" PathSubType: EPathSubTypeStreamImpl ChildrenExist: false BalancerTabletID: 72075186233409548 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 4 PathsLimit: 10000 ShardsInside: 3 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 1 PQPartitionsLimit: 1000000 } CdcStreamDescription { Name: "Stream" Mode: ECdcStreamModeKeysOnly PathId { OwnerId: 72057594046678944 LocalId: 4 } State: ECdcStreamStateScan SchemaVersion: 1 Format: ECdcStreamFormatProto VirtualTimestamps: false AwsRegion: "" ResolvedTimestampsIntervalMs: 0 } } PathId: 4 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> KqpSysColV1::InnerJoinTables >> KqpSysColV0::SelectRange >> KqpSystemView::PartitionStatsRange1 >> KqpSystemView::QueryStatsScan >> KqpSysColV0::InnerJoinSelect >> KqpSystemView::PartitionStatsParametricRanges |76.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/sysview/unittest ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_cdc_stream_reboots/unittest >> TCdcStreamWithRebootsTests::CreateStreamOnIndexTableExplicitReady[TabletReboots] [GOOD] Test command err: =========== RUN: Trace =========== Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2140] Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:116:2058] recipient: [1:110:2141] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:116:2058] recipient: [1:110:2141] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:117:2058] recipient: [1:111:2142] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:117:2058] recipient: [1:111:2142] Leader for TabletID 72057594046678944 is [1:126:2151] sender: [1:127:2058] recipient: [1:109:2140] Leader for TabletID 72057594046447617 is [1:130:2154] sender: [1:132:2058] recipient: [1:110:2141] Leader for TabletID 72057594046316545 is [1:133:2155] sender: [1:135:2058] recipient: [1:111:2142] 2025-05-29T15:32:08.580898Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7488: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-05-29T15:32:08.580918Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7516: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:32:08.580923Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7402: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2025-05-29T15:32:08.580926Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7418: OperationsProcessing config: using default configuration 2025-05-29T15:32:08.580934Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-05-29T15:32:08.580936Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-05-29T15:32:08.580942Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7548: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:32:08.580955Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:39: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-05-29T15:32:08.581034Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7619: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-05-29T15:32:08.581094Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-05-29T15:32:08.591277Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7651: Got new config: QueryServiceConfig { AllExternalDataSourcesAreAvailable: true } 2025-05-29T15:32:08.591293Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:32:08.591376Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7619: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# Leader for TabletID 72057594046447617 is [1:130:2154] sender: [1:175:2058] recipient: [1:15:2062] 2025-05-29T15:32:08.593911Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-05-29T15:32:08.593939Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-05-29T15:32:08.593968Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-05-29T15:32:08.597050Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-05-29T15:32:08.597122Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:445: Clear TempDirsState with owners number: 0 2025-05-29T15:32:08.597234Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1348: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-05-29T15:32:08.597418Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:32: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-05-29T15:32:08.598135Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:157: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:32:08.598179Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:84: [RootDataErasureManager] Stop 2025-05-29T15:32:08.598452Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:32:08.598464Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:32:08.598494Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-05-29T15:32:08.598501Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-29T15:32:08.598508Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:202: TTxServerlessStorageBilling.Complete 2025-05-29T15:32:08.598537Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6671: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:214:2058] recipient: [1:212:2212] IGNORE Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:214:2058] recipient: [1:212:2212] Leader for TabletID 72057594037968897 is [1:218:2216] sender: [1:219:2058] recipient: [1:212:2212] 2025-05-29T15:32:08.599962Z node 1 :HIVE INFO: tablet_helpers.cpp:1130: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:126:2151] sender: [1:239:2058] recipient: [1:15:2062] 2025-05-29T15:32:08.614383Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:372: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-05-29T15:32:08.614453Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:32:08.614488Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-05-29T15:32:08.614519Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:130: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-05-29T15:32:08.614527Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:32:08.614995Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:451: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-05-29T15:32:08.615015Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-05-29T15:32:08.615051Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:32:08.615062Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:313: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-05-29T15:32:08.615068Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:367: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-05-29T15:32:08.615071Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 2 -> 3 2025-05-29T15:32:08.615411Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:32:08.615418Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-05-29T15:32:08.615421Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 3 -> 128 2025-05-29T15:32:08.615673Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:32:08.615680Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:32:08.615684Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:32:08.615688Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1650: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-05-29T15:32:08.616163Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1719: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-05-29T15:32:08.616483Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:652: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-05-29T15:32:08.616504Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1751: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 Leader for TabletID 72057594046316545 is [1:133:2155] sender: [1:254:2058] recipient: [1:15:2062] FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-05-29T15:32:08.616618Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:676: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-05-29T15:32:08.616634Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:680: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 133 RawX2: 4294969451 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-05-29T15:32:08.616640Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:32:08.616674Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Ch ... 5:32:43.481399Z node 142 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 7] was 4 2025-05-29T15:32:43.481831Z node 142 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:647: TTxOperationReply complete, operationId: 1003:3, at schemeshard: 72057594046678944 2025-05-29T15:32:43.481948Z node 142 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:32:43.481957Z node 142 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1003, path id: [OwnerId: 72057594046678944, LocalPathId: 6] 2025-05-29T15:32:43.481988Z node 142 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1003, path id: [OwnerId: 72057594046678944, LocalPathId: 7] 2025-05-29T15:32:43.482013Z node 142 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:32:43.482017Z node 142 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [142:206:2207], at schemeshard: 72057594046678944, txId: 1003, path id: 6 2025-05-29T15:32:43.482022Z node 142 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [142:206:2207], at schemeshard: 72057594046678944, txId: 1003, path id: 7 2025-05-29T15:32:43.482091Z node 142 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1003:3, at schemeshard: 72057594046678944 2025-05-29T15:32:43.482097Z node 142 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:482: [72057594046678944] TDone opId# 1003:3 ProgressState 2025-05-29T15:32:43.482107Z node 142 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:906: Part operation is done id#1003:3 progress is 4/4 2025-05-29T15:32:43.482112Z node 142 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 1003 ready parts: 4/4 2025-05-29T15:32:43.482117Z node 142 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:906: Part operation is done id#1003:3 progress is 4/4 2025-05-29T15:32:43.482120Z node 142 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 1003 ready parts: 4/4 2025-05-29T15:32:43.482125Z node 142 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1606: TOperation IsReadyToNotify, TxId: 1003, ready parts: 4/4, is published: false 2025-05-29T15:32:43.482130Z node 142 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 1003 ready parts: 4/4 2025-05-29T15:32:43.482136Z node 142 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:973: Operation and all the parts is done, operation id: 1003:0 2025-05-29T15:32:43.482141Z node 142 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5174: RemoveTx for txid 1003:0 2025-05-29T15:32:43.482152Z node 142 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 3 2025-05-29T15:32:43.482157Z node 142 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:973: Operation and all the parts is done, operation id: 1003:1 2025-05-29T15:32:43.482161Z node 142 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5174: RemoveTx for txid 1003:1 2025-05-29T15:32:43.482166Z node 142 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 6] was 4 2025-05-29T15:32:43.482170Z node 142 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:973: Operation and all the parts is done, operation id: 1003:2 2025-05-29T15:32:43.482174Z node 142 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5174: RemoveTx for txid 1003:2 2025-05-29T15:32:43.482191Z node 142 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 4 2025-05-29T15:32:43.482194Z node 142 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:973: Operation and all the parts is done, operation id: 1003:3 2025-05-29T15:32:43.482196Z node 142 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5174: RemoveTx for txid 1003:3 2025-05-29T15:32:43.482202Z node 142 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 7] was 5 2025-05-29T15:32:43.482205Z node 142 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:982: Publication still in progress, tx: 1003, publications: 2, subscribers: 0 2025-05-29T15:32:43.482208Z node 142 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:989: Publication details: tx: 1003, [OwnerId: 72057594046678944, LocalPathId: 6], 4 2025-05-29T15:32:43.482210Z node 142 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:989: Publication details: tx: 1003, [OwnerId: 72057594046678944, LocalPathId: 7], 2 2025-05-29T15:32:43.482343Z node 142 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:5834: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 6 Version: 4 PathOwnerId: 72057594046678944, cookie: 1003 2025-05-29T15:32:43.482353Z node 142 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 6 Version: 4 PathOwnerId: 72057594046678944, cookie: 1003 2025-05-29T15:32:43.482356Z node 142 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 1003 2025-05-29T15:32:43.482360Z node 142 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 1003, pathId: [OwnerId: 72057594046678944, LocalPathId: 6], version: 4 2025-05-29T15:32:43.482365Z node 142 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 6] was 3 2025-05-29T15:32:43.482584Z node 142 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:5834: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 7 Version: 2 PathOwnerId: 72057594046678944, cookie: 1003 2025-05-29T15:32:43.482601Z node 142 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 7 Version: 2 PathOwnerId: 72057594046678944, cookie: 1003 2025-05-29T15:32:43.482605Z node 142 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1003 2025-05-29T15:32:43.482609Z node 142 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 1003, pathId: [OwnerId: 72057594046678944, LocalPathId: 7], version: 2 2025-05-29T15:32:43.482614Z node 142 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 7] was 4 2025-05-29T15:32:43.482628Z node 142 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 1003, subscribers: 0 2025-05-29T15:32:43.483721Z node 142 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1003 2025-05-29T15:32:43.483766Z node 142 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1003 TestModificationResult got TxId: 1003, wait until txId: 1003 TestWaitNotification wait txId: 1003 2025-05-29T15:32:43.485532Z node 142 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:210: tests -- TTxNotificationSubscriber for txId 1003: send EvNotifyTxCompletion 2025-05-29T15:32:43.485545Z node 142 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:256: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 1003 2025-05-29T15:32:43.485609Z node 142 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 1003, at schemeshard: 72057594046678944 2025-05-29T15:32:43.485628Z node 142 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:227: tests -- TTxNotificationSubscriber for txId 1003: got EvNotifyTxCompletionResult 2025-05-29T15:32:43.485632Z node 142 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:236: tests -- TTxNotificationSubscriber for txId 1003: satisfy waiter [142:737:2643] TestWaitNotification: OK eventTxId 1003 2025-05-29T15:32:43.485701Z node 142 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Table/Index/indexImplTable/Stream" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: true }, at schemeshard: 72057594046678944 2025-05-29T15:32:43.485740Z node 142 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/Table/Index/indexImplTable/Stream" took 47us result status StatusSuccess 2025-05-29T15:32:43.485838Z node 142 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Table/Index/indexImplTable/Stream" PathDescription { Self { Name: "Stream" PathId: 6 SchemeshardId: 72057594046678944 PathType: EPathTypeCdcStream CreateFinished: true CreateTxId: 1003 CreateStep: 5000004 ParentPathId: 5 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 4 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 4 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 2 CdcStreamVersion: 1 } ChildrenExist: true } Children { Name: "streamImpl" PathId: 7 SchemeshardId: 72057594046678944 PathType: EPathTypePersQueueGroup CreateFinished: true CreateTxId: 1003 CreateStep: 5000004 ParentPathId: 6 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" PathSubType: EPathSubTypeStreamImpl ChildrenExist: false BalancerTabletID: 72075186233409549 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 6 PathsLimit: 10000 ShardsInside: 4 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 1 PQPartitionsLimit: 1000000 } CdcStreamDescription { Name: "Stream" Mode: ECdcStreamModeKeysOnly PathId { OwnerId: 72057594046678944 LocalId: 6 } State: ECdcStreamStateReady SchemaVersion: 1 Format: ECdcStreamFormatProto VirtualTimestamps: false AwsRegion: "" ResolvedTimestampsIntervalMs: 0 } } PathId: 6 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 |76.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/sysview/unittest |76.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/sysview/unittest |76.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/sysview/unittest >> KqpSystemView::Join |76.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/sysview/unittest >> KqpSysColV1::UpdateAndDelete >> KqpSysColV1::StreamSelectRange >> TCdcStreamWithRebootsTests::CreateStreamExplicitReady[TabletReboots] [GOOD] >> KqpSysColV1::StreamSelectRowAsterisk >> KqpSystemView::PartitionStatsRange2 |76.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/sysview/unittest >> KqpSysColV0::InnerJoinTables >> KqpSysColV1::StreamSelectRowById |76.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/sysview/unittest >> TCdcStreamWithRebootsTests::CreateStreamOnIndexTableWithVirtualTimestamps[TabletReboots] [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/sysview/unittest >> KqpSystemView::Sessions Test command err: Trying to start YDB, gRPC: 17347, MsgBus: 10997 2025-05-29T15:32:42.018005Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509891030554666610:2061];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:32:42.018025Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/001add/r3tmp/tmpvtPdOv/pdisk_1.dat 2025-05-29T15:32:42.072860Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:32:42.072930Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [1:7509891030554666591:2079] 1748532762017854 != 1748532762017857 TServer::EnableGrpc on GrpcPort 17347, node 1 2025-05-29T15:32:42.080403Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:32:42.080418Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-05-29T15:32:42.080420Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-05-29T15:32:42.080466Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:10997 TClient is connected to server localhost:10997 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: 2025-05-29T15:32:42.120349Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:32:42.120393Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:32:42.121469Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-29T15:32:42.148648Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:32:42.156445Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-05-29T15:32:42.157603Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:32:42.219447Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:32:42.235853Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:32:42.244997Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:32:42.343150Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509891030554668237:2404], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:32:42.343177Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:32:42.372417Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-05-29T15:32:42.379545Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-05-29T15:32:42.390005Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-05-29T15:32:42.444938Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-05-29T15:32:42.453276Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-05-29T15:32:42.467756Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-05-29T15:32:42.481705Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480 2025-05-29T15:32:42.497475Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509891030554668892:2467], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:32:42.497499Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:32:42.497510Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509891030554668897:2470], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:32:42.498144Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480 2025-05-29T15:32:42.501481Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7509891030554668899:2471], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2025-05-29T15:32:42.592221Z node 1 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [1:7509891030554668950:3402] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-29T15:32:42.662938Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [1:7509891030554668966:2475], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:32:42.663060Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=1&id=MmFlMGY5NmMtYjMyYzFjOGQtNzk0MTNjYzgtZTE5YjUzYjY=, ActorId: [1:7509891030554668219:2402], ActorState: ExecuteState, TraceId: 01jweas5w1ddj1r27r1jg1p786, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: VERIFY failed (2025-05-29T15:32:42.663676Z): assertion failed in non-unittest thread with message: assertion failed at ydb/core/kqp/ut/common/kqp_ut_common.h:375, void NKikimr::NKqp::AssertSuccessResult(const NYdb::TStatus &): (result.IsSuccess())
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 library/cpp/testing/unittest/registar.cpp:36 RaiseError(): requirement UnittestThread failed 0. /-S/util/system/yassert.cpp:83: InternalPanicImpl @ 0x13A9EAF5 1. /-S/util/system/yassert.cpp:55: Panic @ 0x13A95AF6 2. /tmp//-S/library/cpp/testing/unittest/registar.cpp:36: RaiseError @ 0x13C37886 3. /-S/ydb/core/kqp/ut/common/kqp_ut_common.h:375: AssertSuccessResult @ 0x13979752 4. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:365: CreateSampleTables @ 0x260D6A12 5. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:581: operator() @ 0x260F7FBC 6. /-S/library/cpp/threading/future/core/future-inl.h:493: SetValue @ 0x260F7FBC 7. /-S/library/cpp/threading/future/async.h:24: operator() @ 0x260F7FBC 8. /-S/util/thread/pool.h:71: Process @ 0x260F7FBC 9. /-S/util/thread/pool.cpp:418: DoExecute @ 0x13AA6479 10. /-S/util/thread/factory.h:15: Execute @ 0x13AA4E69 11. /-S/util/thread/factory.cpp:36: ThreadProc @ 0x13AA4E69 12. /-S/util/system/thread.cpp:244: ThreadProxy @ 0x13AA02DC 13. ??:0: ?? @ 0x7F2B8CEFBAC2 14. ??:0: ?? @ 0x7F2B8CF8D84F ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_cdc_stream_reboots/unittest >> TCdcStreamWithRebootsTests::CreateStreamExplicitReady[TabletReboots] [GOOD] Test command err: =========== RUN: Trace =========== Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2140] Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:116:2058] recipient: [1:110:2141] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:116:2058] recipient: [1:110:2141] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:117:2058] recipient: [1:111:2142] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:117:2058] recipient: [1:111:2142] Leader for TabletID 72057594046678944 is [1:126:2151] sender: [1:127:2058] recipient: [1:109:2140] Leader for TabletID 72057594046447617 is [1:130:2154] sender: [1:132:2058] recipient: [1:110:2141] Leader for TabletID 72057594046316545 is [1:133:2155] sender: [1:135:2058] recipient: [1:111:2142] 2025-05-29T15:32:10.802887Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7488: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-05-29T15:32:10.802906Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7516: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:32:10.802911Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7402: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2025-05-29T15:32:10.802917Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7418: OperationsProcessing config: using default configuration 2025-05-29T15:32:10.802930Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-05-29T15:32:10.802934Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-05-29T15:32:10.802944Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7548: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:32:10.802957Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:39: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-05-29T15:32:10.803045Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7619: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-05-29T15:32:10.803112Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-05-29T15:32:10.812056Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7651: Got new config: QueryServiceConfig { AllExternalDataSourcesAreAvailable: true } 2025-05-29T15:32:10.812071Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:32:10.812127Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7619: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# Leader for TabletID 72057594046447617 is [1:130:2154] sender: [1:175:2058] recipient: [1:15:2062] 2025-05-29T15:32:10.813798Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-05-29T15:32:10.813814Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-05-29T15:32:10.813831Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-05-29T15:32:10.815560Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-05-29T15:32:10.815605Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:445: Clear TempDirsState with owners number: 0 2025-05-29T15:32:10.815674Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1348: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-05-29T15:32:10.815814Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:32: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-05-29T15:32:10.816232Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:157: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:32:10.816254Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:84: [RootDataErasureManager] Stop 2025-05-29T15:32:10.816400Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:32:10.816406Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:32:10.816425Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-05-29T15:32:10.816429Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-29T15:32:10.816433Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:202: TTxServerlessStorageBilling.Complete 2025-05-29T15:32:10.816444Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6671: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:214:2058] recipient: [1:212:2212] IGNORE Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:214:2058] recipient: [1:212:2212] Leader for TabletID 72057594037968897 is [1:218:2216] sender: [1:219:2058] recipient: [1:212:2212] 2025-05-29T15:32:10.817185Z node 1 :HIVE INFO: tablet_helpers.cpp:1130: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:126:2151] sender: [1:239:2058] recipient: [1:15:2062] 2025-05-29T15:32:10.828805Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:372: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-05-29T15:32:10.828856Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:32:10.828896Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-05-29T15:32:10.828927Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:130: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-05-29T15:32:10.828933Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:32:10.829363Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:451: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-05-29T15:32:10.829379Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-05-29T15:32:10.829409Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:32:10.829414Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:313: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-05-29T15:32:10.829418Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:367: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-05-29T15:32:10.829421Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 2 -> 3 2025-05-29T15:32:10.829681Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:32:10.829687Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-05-29T15:32:10.829691Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 3 -> 128 2025-05-29T15:32:10.829902Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:32:10.829908Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:32:10.829911Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:32:10.829915Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1650: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-05-29T15:32:10.830337Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1719: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-05-29T15:32:10.830650Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:652: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-05-29T15:32:10.830671Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1751: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 Leader for TabletID 72057594046316545 is [1:133:2155] sender: [1:254:2058] recipient: [1:15:2062] FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-05-29T15:32:10.830800Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:676: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-05-29T15:32:10.830816Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:680: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 133 RawX2: 4294969451 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-05-29T15:32:10.830821Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:32:10.830854Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Ch ... 8944 2025-05-29T15:32:45.044345Z node 139 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_pq.cpp:629: NPQState::TPropose operationId# 1003:2 HandleReply TEvProposeTransactionResult CollectPQConfigChanged: true 2025-05-29T15:32:45.044375Z node 139 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1003:2 128 -> 240 2025-05-29T15:32:45.044395Z node 139 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 3 2025-05-29T15:32:45.044402Z node 139 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 4 2025-05-29T15:32:45.044847Z node 139 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:647: TTxOperationReply complete, operationId: 1003:2, at schemeshard: 72057594046678944 2025-05-29T15:32:45.044877Z node 139 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:32:45.044881Z node 139 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1003, path id: [OwnerId: 72057594046678944, LocalPathId: 4] 2025-05-29T15:32:45.044904Z node 139 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1003, path id: [OwnerId: 72057594046678944, LocalPathId: 5] 2025-05-29T15:32:45.044927Z node 139 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:32:45.044931Z node 139 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [139:206:2207], at schemeshard: 72057594046678944, txId: 1003, path id: 4 2025-05-29T15:32:45.044934Z node 139 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [139:206:2207], at schemeshard: 72057594046678944, txId: 1003, path id: 5 2025-05-29T15:32:45.044982Z node 139 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1003:2, at schemeshard: 72057594046678944 2025-05-29T15:32:45.044990Z node 139 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:482: [72057594046678944] TDone opId# 1003:2 ProgressState 2025-05-29T15:32:45.044997Z node 139 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:906: Part operation is done id#1003:2 progress is 3/3 2025-05-29T15:32:45.045000Z node 139 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 1003 ready parts: 3/3 2025-05-29T15:32:45.045004Z node 139 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:906: Part operation is done id#1003:2 progress is 3/3 2025-05-29T15:32:45.045006Z node 139 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 1003 ready parts: 3/3 2025-05-29T15:32:45.045008Z node 139 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1606: TOperation IsReadyToNotify, TxId: 1003, ready parts: 3/3, is published: false 2025-05-29T15:32:45.045011Z node 139 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 1003 ready parts: 3/3 2025-05-29T15:32:45.045015Z node 139 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:973: Operation and all the parts is done, operation id: 1003:0 2025-05-29T15:32:45.045018Z node 139 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5174: RemoveTx for txid 1003:0 2025-05-29T15:32:45.045025Z node 139 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 4 2025-05-29T15:32:45.045028Z node 139 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:973: Operation and all the parts is done, operation id: 1003:1 2025-05-29T15:32:45.045031Z node 139 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5174: RemoveTx for txid 1003:1 2025-05-29T15:32:45.045041Z node 139 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 4 2025-05-29T15:32:45.045044Z node 139 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:973: Operation and all the parts is done, operation id: 1003:2 2025-05-29T15:32:45.045046Z node 139 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5174: RemoveTx for txid 1003:2 2025-05-29T15:32:45.045052Z node 139 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 5 2025-05-29T15:32:45.045054Z node 139 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:982: Publication still in progress, tx: 1003, publications: 2, subscribers: 0 2025-05-29T15:32:45.045057Z node 139 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:989: Publication details: tx: 1003, [OwnerId: 72057594046678944, LocalPathId: 4], 4 2025-05-29T15:32:45.045060Z node 139 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:989: Publication details: tx: 1003, [OwnerId: 72057594046678944, LocalPathId: 5], 2 2025-05-29T15:32:45.045243Z node 139 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:5834: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 4 PathOwnerId: 72057594046678944, cookie: 1003 2025-05-29T15:32:45.045254Z node 139 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 4 PathOwnerId: 72057594046678944, cookie: 1003 2025-05-29T15:32:45.045257Z node 139 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 1003 2025-05-29T15:32:45.045261Z node 139 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 1003, pathId: [OwnerId: 72057594046678944, LocalPathId: 4], version: 4 2025-05-29T15:32:45.045266Z node 139 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 3 2025-05-29T15:32:45.045434Z node 139 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:5834: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 5 Version: 2 PathOwnerId: 72057594046678944, cookie: 1003 2025-05-29T15:32:45.045441Z node 139 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 5 Version: 2 PathOwnerId: 72057594046678944, cookie: 1003 2025-05-29T15:32:45.045444Z node 139 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1003 2025-05-29T15:32:45.045447Z node 139 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 1003, pathId: [OwnerId: 72057594046678944, LocalPathId: 5], version: 2 2025-05-29T15:32:45.045449Z node 139 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 4 2025-05-29T15:32:45.045456Z node 139 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 1003, subscribers: 0 2025-05-29T15:32:45.046079Z node 139 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1003 2025-05-29T15:32:45.046174Z node 139 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1003 TestModificationResult got TxId: 1003, wait until txId: 1003 TestWaitNotification wait txId: 1003 2025-05-29T15:32:45.047268Z node 139 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:210: tests -- TTxNotificationSubscriber for txId 1003: send EvNotifyTxCompletion 2025-05-29T15:32:45.047277Z node 139 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:256: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 1003 2025-05-29T15:32:45.047328Z node 139 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 1003, at schemeshard: 72057594046678944 2025-05-29T15:32:45.047345Z node 139 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:227: tests -- TTxNotificationSubscriber for txId 1003: got EvNotifyTxCompletionResult 2025-05-29T15:32:45.047350Z node 139 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:236: tests -- TTxNotificationSubscriber for txId 1003: satisfy waiter [139:659:2576] TestWaitNotification: OK eventTxId 1003 2025-05-29T15:32:45.047416Z node 139 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Table/Stream" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: true }, at schemeshard: 72057594046678944 2025-05-29T15:32:45.047453Z node 139 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/Table/Stream" took 44us result status StatusSuccess 2025-05-29T15:32:45.047525Z node 139 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Table/Stream" PathDescription { Self { Name: "Stream" PathId: 4 SchemeshardId: 72057594046678944 PathType: EPathTypeCdcStream CreateFinished: true CreateTxId: 1003 CreateStep: 5000004 ParentPathId: 3 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 4 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 4 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 2 CdcStreamVersion: 1 } ChildrenExist: true } Children { Name: "streamImpl" PathId: 5 SchemeshardId: 72057594046678944 PathType: EPathTypePersQueueGroup CreateFinished: true CreateTxId: 1003 CreateStep: 5000004 ParentPathId: 4 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" PathSubType: EPathSubTypeStreamImpl ChildrenExist: false BalancerTabletID: 72075186233409548 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 4 PathsLimit: 10000 ShardsInside: 3 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 1 PQPartitionsLimit: 1000000 } CdcStreamDescription { Name: "Stream" Mode: ECdcStreamModeKeysOnly PathId { OwnerId: 72057594046678944 LocalId: 4 } State: ECdcStreamStateReady SchemaVersion: 1 Format: ECdcStreamFormatProto VirtualTimestamps: false AwsRegion: "" ResolvedTimestampsIntervalMs: 0 } } PathId: 4 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 |76.8%| [TA] $(B)/ydb/core/tx/schemeshard/ut_reboots/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_cdc_stream_reboots/unittest >> TCdcStreamWithRebootsTests::CreateStreamOnIndexTableWithVirtualTimestamps[TabletReboots] [GOOD] Test command err: =========== RUN: Trace =========== Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2140] Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:116:2058] recipient: [1:110:2141] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:116:2058] recipient: [1:110:2141] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:117:2058] recipient: [1:111:2142] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:117:2058] recipient: [1:111:2142] Leader for TabletID 72057594046678944 is [1:126:2151] sender: [1:127:2058] recipient: [1:109:2140] Leader for TabletID 72057594046447617 is [1:130:2154] sender: [1:132:2058] recipient: [1:110:2141] Leader for TabletID 72057594046316545 is [1:133:2155] sender: [1:135:2058] recipient: [1:111:2142] 2025-05-29T15:32:10.476009Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7488: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-05-29T15:32:10.476028Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7516: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:32:10.476034Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7402: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2025-05-29T15:32:10.476038Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7418: OperationsProcessing config: using default configuration 2025-05-29T15:32:10.476049Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-05-29T15:32:10.476053Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-05-29T15:32:10.476061Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7548: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:32:10.476074Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:39: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-05-29T15:32:10.476159Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7619: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-05-29T15:32:10.476234Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-05-29T15:32:10.489348Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7651: Got new config: QueryServiceConfig { AllExternalDataSourcesAreAvailable: true } 2025-05-29T15:32:10.489364Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:32:10.489436Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7619: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# Leader for TabletID 72057594046447617 is [1:130:2154] sender: [1:175:2058] recipient: [1:15:2062] 2025-05-29T15:32:10.491815Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-05-29T15:32:10.491838Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-05-29T15:32:10.491856Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-05-29T15:32:10.494440Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-05-29T15:32:10.494503Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:445: Clear TempDirsState with owners number: 0 2025-05-29T15:32:10.494597Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1348: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-05-29T15:32:10.494791Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:32: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-05-29T15:32:10.495434Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:157: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:32:10.495467Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:84: [RootDataErasureManager] Stop 2025-05-29T15:32:10.495668Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:32:10.495681Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:32:10.495702Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-05-29T15:32:10.495709Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-29T15:32:10.495716Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:202: TTxServerlessStorageBilling.Complete 2025-05-29T15:32:10.495733Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6671: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:214:2058] recipient: [1:212:2212] IGNORE Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:214:2058] recipient: [1:212:2212] Leader for TabletID 72057594037968897 is [1:218:2216] sender: [1:219:2058] recipient: [1:212:2212] 2025-05-29T15:32:10.496859Z node 1 :HIVE INFO: tablet_helpers.cpp:1130: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:126:2151] sender: [1:239:2058] recipient: [1:15:2062] 2025-05-29T15:32:10.511496Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:372: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-05-29T15:32:10.511542Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:32:10.511574Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-05-29T15:32:10.511601Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:130: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-05-29T15:32:10.511607Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:32:10.512028Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:451: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-05-29T15:32:10.512041Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-05-29T15:32:10.512071Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:32:10.512077Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:313: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-05-29T15:32:10.512080Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:367: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-05-29T15:32:10.512083Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 2 -> 3 2025-05-29T15:32:10.512359Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:32:10.512365Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-05-29T15:32:10.512368Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 3 -> 128 2025-05-29T15:32:10.512571Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:32:10.512576Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:32:10.512580Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:32:10.512584Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1650: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-05-29T15:32:10.512979Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1719: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-05-29T15:32:10.513276Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:652: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-05-29T15:32:10.513295Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1751: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 Leader for TabletID 72057594046316545 is [1:133:2155] sender: [1:254:2058] recipient: [1:15:2062] FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-05-29T15:32:10.513410Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:676: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-05-29T15:32:10.513425Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:680: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 133 RawX2: 4294969451 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-05-29T15:32:10.513430Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:32:10.513461Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Ch ... 15:32:45.353624Z node 142 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 7] was 4 2025-05-29T15:32:45.354150Z node 142 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:647: TTxOperationReply complete, operationId: 1003:3, at schemeshard: 72057594046678944 2025-05-29T15:32:45.354265Z node 142 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:32:45.354273Z node 142 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1003, path id: [OwnerId: 72057594046678944, LocalPathId: 6] 2025-05-29T15:32:45.354303Z node 142 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1003, path id: [OwnerId: 72057594046678944, LocalPathId: 7] 2025-05-29T15:32:45.354334Z node 142 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:32:45.354340Z node 142 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [142:206:2207], at schemeshard: 72057594046678944, txId: 1003, path id: 6 2025-05-29T15:32:45.354345Z node 142 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [142:206:2207], at schemeshard: 72057594046678944, txId: 1003, path id: 7 2025-05-29T15:32:45.354412Z node 142 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1003:3, at schemeshard: 72057594046678944 2025-05-29T15:32:45.354419Z node 142 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:482: [72057594046678944] TDone opId# 1003:3 ProgressState 2025-05-29T15:32:45.354429Z node 142 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:906: Part operation is done id#1003:3 progress is 4/4 2025-05-29T15:32:45.354434Z node 142 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 1003 ready parts: 4/4 2025-05-29T15:32:45.354441Z node 142 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:906: Part operation is done id#1003:3 progress is 4/4 2025-05-29T15:32:45.354444Z node 142 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 1003 ready parts: 4/4 2025-05-29T15:32:45.354461Z node 142 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1606: TOperation IsReadyToNotify, TxId: 1003, ready parts: 4/4, is published: false 2025-05-29T15:32:45.354466Z node 142 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 1003 ready parts: 4/4 2025-05-29T15:32:45.354472Z node 142 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:973: Operation and all the parts is done, operation id: 1003:0 2025-05-29T15:32:45.354476Z node 142 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5174: RemoveTx for txid 1003:0 2025-05-29T15:32:45.354486Z node 142 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 3 2025-05-29T15:32:45.354492Z node 142 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:973: Operation and all the parts is done, operation id: 1003:1 2025-05-29T15:32:45.354495Z node 142 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5174: RemoveTx for txid 1003:1 2025-05-29T15:32:45.354500Z node 142 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 6] was 4 2025-05-29T15:32:45.354504Z node 142 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:973: Operation and all the parts is done, operation id: 1003:2 2025-05-29T15:32:45.354507Z node 142 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5174: RemoveTx for txid 1003:2 2025-05-29T15:32:45.354521Z node 142 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 4 2025-05-29T15:32:45.354525Z node 142 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:973: Operation and all the parts is done, operation id: 1003:3 2025-05-29T15:32:45.354528Z node 142 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5174: RemoveTx for txid 1003:3 2025-05-29T15:32:45.354537Z node 142 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 7] was 5 2025-05-29T15:32:45.354542Z node 142 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:982: Publication still in progress, tx: 1003, publications: 2, subscribers: 0 2025-05-29T15:32:45.354546Z node 142 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:989: Publication details: tx: 1003, [OwnerId: 72057594046678944, LocalPathId: 6], 4 2025-05-29T15:32:45.354549Z node 142 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:989: Publication details: tx: 1003, [OwnerId: 72057594046678944, LocalPathId: 7], 2 2025-05-29T15:32:45.354710Z node 142 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:5834: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 6 Version: 4 PathOwnerId: 72057594046678944, cookie: 1003 2025-05-29T15:32:45.354724Z node 142 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 6 Version: 4 PathOwnerId: 72057594046678944, cookie: 1003 2025-05-29T15:32:45.354729Z node 142 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 1003 2025-05-29T15:32:45.354734Z node 142 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 1003, pathId: [OwnerId: 72057594046678944, LocalPathId: 6], version: 4 2025-05-29T15:32:45.354759Z node 142 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 6] was 3 2025-05-29T15:32:45.354980Z node 142 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:5834: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 7 Version: 2 PathOwnerId: 72057594046678944, cookie: 1003 2025-05-29T15:32:45.354993Z node 142 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 7 Version: 2 PathOwnerId: 72057594046678944, cookie: 1003 2025-05-29T15:32:45.354998Z node 142 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1003 2025-05-29T15:32:45.355003Z node 142 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 1003, pathId: [OwnerId: 72057594046678944, LocalPathId: 7], version: 2 2025-05-29T15:32:45.355007Z node 142 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 7] was 4 2025-05-29T15:32:45.355018Z node 142 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 1003, subscribers: 0 2025-05-29T15:32:45.355916Z node 142 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1003 2025-05-29T15:32:45.355950Z node 142 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1003 TestModificationResult got TxId: 1003, wait until txId: 1003 TestWaitNotification wait txId: 1003 2025-05-29T15:32:45.357400Z node 142 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:210: tests -- TTxNotificationSubscriber for txId 1003: send EvNotifyTxCompletion 2025-05-29T15:32:45.357409Z node 142 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:256: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 1003 2025-05-29T15:32:45.357464Z node 142 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 1003, at schemeshard: 72057594046678944 2025-05-29T15:32:45.357479Z node 142 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:227: tests -- TTxNotificationSubscriber for txId 1003: got EvNotifyTxCompletionResult 2025-05-29T15:32:45.357483Z node 142 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:236: tests -- TTxNotificationSubscriber for txId 1003: satisfy waiter [142:737:2643] TestWaitNotification: OK eventTxId 1003 2025-05-29T15:32:45.357565Z node 142 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Table/Index/indexImplTable/Stream" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: true }, at schemeshard: 72057594046678944 2025-05-29T15:32:45.357603Z node 142 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/Table/Index/indexImplTable/Stream" took 46us result status StatusSuccess 2025-05-29T15:32:45.357696Z node 142 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Table/Index/indexImplTable/Stream" PathDescription { Self { Name: "Stream" PathId: 6 SchemeshardId: 72057594046678944 PathType: EPathTypeCdcStream CreateFinished: true CreateTxId: 1003 CreateStep: 5000004 ParentPathId: 5 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 4 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 4 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 2 CdcStreamVersion: 1 } ChildrenExist: true } Children { Name: "streamImpl" PathId: 7 SchemeshardId: 72057594046678944 PathType: EPathTypePersQueueGroup CreateFinished: true CreateTxId: 1003 CreateStep: 5000004 ParentPathId: 6 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" PathSubType: EPathSubTypeStreamImpl ChildrenExist: false BalancerTabletID: 72075186233409549 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 6 PathsLimit: 10000 ShardsInside: 4 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 1 PQPartitionsLimit: 1000000 } CdcStreamDescription { Name: "Stream" Mode: ECdcStreamModeKeysOnly PathId { OwnerId: 72057594046678944 LocalId: 6 } State: ECdcStreamStateReady SchemaVersion: 1 Format: ECdcStreamFormatProto VirtualTimestamps: true AwsRegion: "" ResolvedTimestampsIntervalMs: 0 } } PathId: 6 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 |76.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/sysview/unittest >> KqpSystemView::PartitionStatsFollower >> KqpSystemView::ReadSuccess >> KqpSystemView::PartitionStatsRanges >> KqpSystemView::FailResolve >> KqpSysColV0::UpdateAndDelete >> TCdcStreamWithRebootsTests::WithoutPqTransactions[PipeResets] [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/sysview/unittest >> KqpSysColV0::InnerJoinSelect Test command err: Trying to start YDB, gRPC: 64013, MsgBus: 64387 2025-05-29T15:32:44.342659Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509891038473954670:2063];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:32:44.343040Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/001ac4/r3tmp/tmp7nt8vh/pdisk_1.dat 2025-05-29T15:32:44.395557Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:32:44.395620Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [1:7509891038473954649:2079] 1748532764342514 != 1748532764342517 TServer::EnableGrpc on GrpcPort 64013, node 1 2025-05-29T15:32:44.408101Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:32:44.408115Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-05-29T15:32:44.408117Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-05-29T15:32:44.408164Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:64387 TClient is connected to server localhost:64387 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-29T15:32:44.471968Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:32:44.471997Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:32:44.473029Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-29T15:32:44.473214Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:32:44.484300Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:32:44.502842Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-05-29T15:32:44.527517Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 2025-05-29T15:32:44.540221Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:32:44.633755Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509891038473956280:2403], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:32:44.633780Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:32:44.662105Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-05-29T15:32:44.668818Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-05-29T15:32:44.679407Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-05-29T15:32:44.693426Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-05-29T15:32:44.700314Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-05-29T15:32:44.714278Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-05-29T15:32:44.728449Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-05-29T15:32:44.745056Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509891038473956933:2466], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:32:44.745091Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:32:44.745093Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509891038473956938:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:32:44.745762Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715669:3, at schemeshard: 72057594046644480 2025-05-29T15:32:44.748200Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7509891038473956940:2470], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715669 completed, doublechecking } 2025-05-29T15:32:44.846673Z node 1 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [1:7509891038473956991:3396] txid# 281474976715670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-29T15:32:44.922600Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [1:7509891038473957007:2474], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:32:44.922715Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=1&id=ZTIwZjFlNzAtYWQyNzA2ZGYtMmNlOWI3ZWItNzVmZWRlMjU=, ActorId: [1:7509891038473956277:2401], ActorState: ExecuteState, TraceId: 01jweas828enrw24dqw83fzvkc, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: VERIFY failed (2025-05-29T15:32:44.923343Z): assertion failed in non-unittest thread with message: assertion failed at ydb/core/kqp/ut/common/kqp_ut_common.h:375, void NKikimr::NKqp::AssertSuccessResult(const NYdb::TStatus &): (result.IsSuccess())
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 library/cpp/testing/unittest/registar.cpp:36 RaiseError(): requirement UnittestThread failed 0. /-S/util/system/yassert.cpp:83: InternalPanicImpl @ 0x13A9EAF5 1. /-S/util/system/yassert.cpp:55: Panic @ 0x13A95AF6 2. /tmp//-S/library/cpp/testing/unittest/registar.cpp:36: RaiseError @ 0x13C37886 3. /-S/ydb/core/kqp/ut/common/kqp_ut_common.h:375: AssertSuccessResult @ 0x13979752 4. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:365: CreateSampleTables @ 0x260D6A12 5. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:581: operator() @ 0x260F7FBC 6. /-S/library/cpp/threading/future/core/future-inl.h:493: SetValue @ 0x260F7FBC 7. /-S/library/cpp/threading/future/async.h:24: operator() @ 0x260F7FBC 8. /-S/util/thread/pool.h:71: Process @ 0x260F7FBC 9. /-S/util/thread/pool.cpp:418: DoExecute @ 0x13AA6479 10. /-S/util/thread/factory.h:15: Execute @ 0x13AA4E69 11. /-S/util/thread/factory.cpp:36: ThreadProc @ 0x13AA4E69 12. /-S/util/system/thread.cpp:244: ThreadProxy @ 0x13AA02DC 13. ??:0: ?? @ 0x7F0DFF38EAC2 14. ??:0: ?? @ 0x7F0DFF42084F ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/sysview/unittest >> KqpSystemView::PartitionStatsParametricRanges Test command err: Trying to start YDB, gRPC: 27872, MsgBus: 14124 2025-05-29T15:32:44.367025Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509891038194973036:2065];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:32:44.367078Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/001abc/r3tmp/tmpiG4HWg/pdisk_1.dat 2025-05-29T15:32:44.424599Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:32:44.424683Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [1:7509891038194973012:2079] 1748532764366833 != 1748532764366836 TServer::EnableGrpc on GrpcPort 27872, node 1 2025-05-29T15:32:44.431649Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:32:44.431658Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-05-29T15:32:44.431660Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-05-29T15:32:44.431682Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:14124 TClient is connected to server localhost:14124 WaitRootIsUp 'Root'... TClient::Ls request: Root 2025-05-29T15:32:44.468402Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:32:44.468437Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:32:44.469572Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-29T15:32:44.476926Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:32:44.487724Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:32:44.552406Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:32:44.574182Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:32:44.585658Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:32:44.693648Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509891038194974646:2403], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:32:44.693672Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:32:44.725352Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-05-29T15:32:44.732300Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-05-29T15:32:44.742104Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-05-29T15:32:44.796931Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-05-29T15:32:44.805078Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-05-29T15:32:44.819907Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-05-29T15:32:44.833800Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-05-29T15:32:44.849498Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509891038194975301:2466], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:32:44.849531Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:32:44.849556Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509891038194975306:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:32:44.850196Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710669:3, at schemeshard: 72057594046644480 2025-05-29T15:32:44.853118Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7509891038194975308:2470], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710669 completed, doublechecking } 2025-05-29T15:32:44.938915Z node 1 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [1:7509891038194975359:3397] txid# 281474976710670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-29T15:32:45.029071Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [1:7509891038194975375:2474], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:32:45.029164Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=1&id=OGJhNzUyMTctYjJiMjNhZjEtM2I5ODUxOWUtNTdjNDA5Mzc=, ActorId: [1:7509891038194974643:2401], ActorState: ExecuteState, TraceId: 01jweas85hfs5pqnketmc4hrpc, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: VERIFY failed (2025-05-29T15:32:45.029800Z): assertion failed in non-unittest thread with message: assertion failed at ydb/core/kqp/ut/common/kqp_ut_common.h:375, void NKikimr::NKqp::AssertSuccessResult(const NYdb::TStatus &): (result.IsSuccess())
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 library/cpp/testing/unittest/registar.cpp:36 RaiseError(): requirement UnittestThread failed 0. /-S/util/system/yassert.cpp:83: InternalPanicImpl @ 0x13A9EAF5 1. /-S/util/system/yassert.cpp:55: Panic @ 0x13A95AF6 2. /tmp//-S/library/cpp/testing/unittest/registar.cpp:36: RaiseError @ 0x13C37886 3. /-S/ydb/core/kqp/ut/common/kqp_ut_common.h:375: AssertSuccessResult @ 0x13979752 4. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:365: CreateSampleTables @ 0x260D6A12 5. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:581: operator() @ 0x260F7FBC 6. /-S/library/cpp/threading/future/core/future-inl.h:493: SetValue @ 0x260F7FBC 7. /-S/library/cpp/threading/future/async.h:24: operator() @ 0x260F7FBC 8. /-S/util/thread/pool.h:71: Process @ 0x260F7FBC 9. /-S/util/thread/pool.cpp:418: DoExecute @ 0x13AA6479 10. /-S/util/thread/factory.h:15: Execute @ 0x13AA4E69 11. /-S/util/thread/factory.cpp:36: ThreadProc @ 0x13AA4E69 12. /-S/util/system/thread.cpp:244: ThreadProxy @ 0x13AA02DC 13. ??:0: ?? @ 0x7F3579796AC2 14. ??:0: ?? @ 0x7F357982884F ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/sysview/unittest >> KqpSysColV0::SelectRange Test command err: Trying to start YDB, gRPC: 15624, MsgBus: 2835 2025-05-29T15:32:44.320576Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509891040542211259:2064];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:32:44.320600Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/001ac8/r3tmp/tmpymfFuf/pdisk_1.dat TServer::EnableGrpc on GrpcPort 15624, node 1 2025-05-29T15:32:44.388740Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:32:44.388937Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [1:7509891040542211236:2079] 1748532764320424 != 1748532764320427 2025-05-29T15:32:44.392200Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:32:44.392212Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-05-29T15:32:44.392214Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-05-29T15:32:44.392245Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:2835 2025-05-29T15:32:44.423481Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:32:44.423507Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:32:44.424579Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:2835 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-29T15:32:44.464279Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:32:44.468011Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:32:44.532524Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:32:44.553349Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:32:44.564192Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:32:44.661957Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509891040542212874:2403], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:32:44.661996Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:32:44.694034Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-05-29T15:32:44.700266Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-05-29T15:32:44.714263Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-05-29T15:32:44.728425Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-05-29T15:32:44.742349Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-05-29T15:32:44.756017Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-05-29T15:32:44.763028Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-05-29T15:32:44.779463Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509891040542213525:2466], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:32:44.779477Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509891040542213530:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:32:44.779487Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:32:44.780068Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715669:3, at schemeshard: 72057594046644480 2025-05-29T15:32:44.783152Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7509891040542213532:2470], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715669 completed, doublechecking } 2025-05-29T15:32:44.834817Z node 1 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [1:7509891040542213583:3396] txid# 281474976715670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-29T15:32:44.912152Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [1:7509891040542213599:2474], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:32:44.912262Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=1&id=NWUxOThlYWQtMzI1M2FkNzEtMjdjNTM3MWQtYmMwNTRlNzA=, ActorId: [1:7509891040542212856:2401], ActorState: ExecuteState, TraceId: 01jweas83bds4d8cdcmwyzk3az, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: VERIFY failed (2025-05-29T15:32:44.912872Z): assertion failed in non-unittest thread with message: assertion failed at ydb/core/kqp/ut/common/kqp_ut_common.h:375, void NKikimr::NKqp::AssertSuccessResult(const NYdb::TStatus &): (result.IsSuccess())
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 library/cpp/testing/unittest/registar.cpp:36 RaiseError(): requirement UnittestThread failed 0. /-S/util/system/yassert.cpp:83: InternalPanicImpl @ 0x13A9EAF5 1. /-S/util/system/yassert.cpp:55: Panic @ 0x13A95AF6 2. /tmp//-S/library/cpp/testing/unittest/registar.cpp:36: RaiseError @ 0x13C37886 3. /-S/ydb/core/kqp/ut/common/kqp_ut_common.h:375: AssertSuccessResult @ 0x13979752 4. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:365: CreateSampleTables @ 0x260D6A12 5. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:581: operator() @ 0x260F7FBC 6. /-S/library/cpp/threading/future/core/future-inl.h:493: SetValue @ 0x260F7FBC 7. /-S/library/cpp/threading/future/async.h:24: operator() @ 0x260F7FBC 8. /-S/util/thread/pool.h:71: Process @ 0x260F7FBC 9. /-S/util/thread/pool.cpp:418: DoExecute @ 0x13AA6479 10. /-S/util/thread/factory.h:15: Execute @ 0x13AA4E69 11. /-S/util/thread/factory.cpp:36: ThreadProc @ 0x13AA4E69 12. /-S/util/system/thread.cpp:244: ThreadProxy @ 0x13AA02DC 13. ??:0: ?? @ 0x7F0B4F518AC2 14. ??:0: ?? @ 0x7F0B4F5AA84F ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/sysview/unittest >> KqpSysColV1::InnerJoinTables Test command err: Trying to start YDB, gRPC: 4797, MsgBus: 15192 2025-05-29T15:32:44.283370Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509891037391037862:2063];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:32:44.283388Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/001ad2/r3tmp/tmp4bkYfX/pdisk_1.dat 2025-05-29T15:32:44.336552Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [1:7509891037391037840:2079] 1748532764283222 != 1748532764283225 2025-05-29T15:32:44.339049Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 4797, node 1 2025-05-29T15:32:44.347915Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:32:44.347933Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-05-29T15:32:44.347936Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-05-29T15:32:44.347980Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:15192 TClient is connected to server localhost:15192 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-29T15:32:44.413343Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:32:44.413370Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:32:44.414470Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-29T15:32:44.415034Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:32:44.424830Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:32:44.486463Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:32:44.507407Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:32:44.519172Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:32:44.611557Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509891037391039474:2403], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:32:44.611584Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:32:44.648347Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-05-29T15:32:44.655405Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-05-29T15:32:44.665430Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-05-29T15:32:44.679400Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-05-29T15:32:44.686048Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-05-29T15:32:44.693073Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-05-29T15:32:44.700102Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-05-29T15:32:44.716091Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509891037391040127:2466], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:32:44.716137Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:32:44.716146Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509891037391040132:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:32:44.716775Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715669:3, at schemeshard: 72057594046644480 2025-05-29T15:32:44.720277Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7509891037391040134:2470], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715669 completed, doublechecking } 2025-05-29T15:32:44.790022Z node 1 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [1:7509891037391040185:3396] txid# 281474976715670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-29T15:32:44.864297Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [1:7509891037391040201:2474], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:32:44.864417Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=1&id=NGM3MTgzZmYtOGVhZjY0ZjgtOWQ4NTBiZDctYzU5OTQ5Mw==, ActorId: [1:7509891037391039456:2401], ActorState: ExecuteState, TraceId: 01jweas81bd3wy9q65ykbq3w03, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: VERIFY failed (2025-05-29T15:32:44.865010Z): assertion failed in non-unittest thread with message: assertion failed at ydb/core/kqp/ut/common/kqp_ut_common.h:375, void NKikimr::NKqp::AssertSuccessResult(const NYdb::TStatus &): (result.IsSuccess())
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 library/cpp/testing/unittest/registar.cpp:36 RaiseError(): requirement UnittestThread failed 0. /-S/util/system/yassert.cpp:83: InternalPanicImpl @ 0x13A9EAF5 1. /-S/util/system/yassert.cpp:55: Panic @ 0x13A95AF6 2. /tmp//-S/library/cpp/testing/unittest/registar.cpp:36: RaiseError @ 0x13C37886 3. /-S/ydb/core/kqp/ut/common/kqp_ut_common.h:375: AssertSuccessResult @ 0x13979752 4. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:365: CreateSampleTables @ 0x260D6A12 5. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:581: operator() @ 0x260F7FBC 6. /-S/library/cpp/threading/future/core/future-inl.h:493: SetValue @ 0x260F7FBC 7. /-S/library/cpp/threading/future/async.h:24: operator() @ 0x260F7FBC 8. /-S/util/thread/pool.h:71: Process @ 0x260F7FBC 9. /-S/util/thread/pool.cpp:418: DoExecute @ 0x13AA6479 10. /-S/util/thread/factory.h:15: Execute @ 0x13AA4E69 11. /-S/util/thread/factory.cpp:36: ThreadProc @ 0x13AA4E69 12. /-S/util/system/thread.cpp:244: ThreadProxy @ 0x13AA02DC 13. ??:0: ?? @ 0x7F7DC8FBAAC2 14. ??:0: ?? @ 0x7F7DC904C84F >> KqpSystemView::PartitionStatsFollower [FAIL] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/sysview/unittest >> KqpSystemView::NodesSimple Test command err: Trying to start YDB, gRPC: 22152, MsgBus: 20954 2025-05-29T15:32:43.909624Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509891034795504958:2075];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:32:43.909958Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-05-29T15:32:43.912493Z node 2 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7509891033805955307:2073];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:32:43.912643Z node 3 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7509891033368663530:2073];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:32:43.912668Z node 3 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-05-29T15:32:43.912733Z node 2 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/001ad9/r3tmp/tmp94fGJ8/pdisk_1.dat 2025-05-29T15:32:43.974386Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 22152, node 1 2025-05-29T15:32:43.984562Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:32:43.984574Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-05-29T15:32:43.984575Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-05-29T15:32:43.984605Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:20954 2025-05-29T15:32:44.010047Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:32:44.010081Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:32:44.011496Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:20954 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-29T15:32:44.039062Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:32:44.039081Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:32:44.039126Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:32:44.039133Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:32:44.040613Z node 1 :HIVE WARN: hive_impl.cpp:771: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 3 Cookie 3 2025-05-29T15:32:44.040622Z node 1 :HIVE WARN: hive_impl.cpp:771: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-05-29T15:32:44.040864Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-29T15:32:44.040924Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-29T15:32:44.041211Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:32:44.051003Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:32:44.115559Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:32:44.133517Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:32:44.146333Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:32:44.222233Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509891039090474062:2360], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:32:44.222263Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:32:44.257619Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-05-29T15:32:44.270495Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-05-29T15:32:44.331246Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-05-29T15:32:44.345108Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-05-29T15:32:44.358299Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-05-29T15:32:44.372663Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-05-29T15:32:44.387017Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-05-29T15:32:44.449086Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509891039090474902:2402], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:32:44.449111Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:32:44.449115Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509891039090474907:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:32:44.449757Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715669:3, at schemeshard: 72057594046644480 2025-05-29T15:32:44.455125Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7509891039090474909:2406], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715669 completed, doublechecking } 2025-05-29T15:32:44.545145Z node 1 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [1:7509891039090474992:4111] txid# 281474976715670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-29T15:32:44.664980Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [1:7509891039090475010:2410], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:32:44.665077Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=1&id=NGViMWZjNWItMWMxMDhhY2ItYWQ0ZjY1ZjQtMTdkYjlmM2M=, ActorId: [1:7509891039090474044:2358], ActorState: ExecuteState, TraceId: 01jweas7s0e89gwb0n6y99gv5p, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: VERIFY failed (2025-05-29T15:32:44.665550Z): assertion failed in non-unittest thread with message: assertion failed at ydb/core/kqp/ut/common/kqp_ut_common.h:375, void NKikimr::NKqp::AssertSuccessResult(const NYdb::TStatus &): (result.IsSuccess())
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 library/cpp/testing/unittest/registar.cpp:36 RaiseError(): requirement UnittestThread failed 0. /-S/util/system/yassert.cpp:83: InternalPanicImpl @ 0x13A9EAF5 1. /-S/util/system/yassert.cpp:55: Panic @ 0x13A95AF6 2. /tmp//-S/library/cpp/testing/unittest/registar.cpp:36: RaiseError @ 0x13C37886 3. /-S/ydb/core/kqp/ut/common/kqp_ut_common.h:375: AssertSuccessResult @ 0x13979752 4. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:365: CreateSampleTables @ 0x260D6A12 5. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:581: operator() @ 0x260F7FBC 6. /-S/library/cpp/threading/future/core/future-inl.h:493: SetValue @ 0x260F7FBC 7. /-S/library/cpp/threading/future/async.h:24: operator() @ 0x260F7FBC 8. /-S/util/thread/pool.h:71: Process @ 0x260F7FBC 9. /-S/util/thread/pool.cpp:418: DoExecute @ 0x13AA6479 10. /-S/util/thread/factory.h:15: Execute @ 0x13AA4E69 11. /-S/util/thread/factory.cpp:36: ThreadProc @ 0x13AA4E69 12. /-S/util/system/thread.cpp:244: ThreadProxy @ 0x13AA02DC 13. ??:0: ?? @ 0x7FB4A35F0AC2 14. ??:0: ?? @ 0x7FB4A368284F >> KqpSysColV1::StreamInnerJoinSelect >> KqpSystemView::PartitionStatsSimple >> KqpSysColV0::InnerJoinSelectAsterisk ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_cdc_stream_reboots/unittest >> TCdcStreamWithRebootsTests::WithoutPqTransactions[PipeResets] [GOOD] Test command err: =========== RUN: Trace =========== Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2140] Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:116:2058] recipient: [1:110:2141] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:116:2058] recipient: [1:110:2141] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:117:2058] recipient: [1:111:2142] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:117:2058] recipient: [1:111:2142] Leader for TabletID 72057594046678944 is [1:126:2151] sender: [1:127:2058] recipient: [1:109:2140] Leader for TabletID 72057594046447617 is [1:130:2154] sender: [1:132:2058] recipient: [1:110:2141] Leader for TabletID 72057594046316545 is [1:133:2155] sender: [1:135:2058] recipient: [1:111:2142] 2025-05-29T15:32:22.680027Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7488: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-05-29T15:32:22.680046Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7516: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:32:22.680052Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7402: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2025-05-29T15:32:22.680057Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7418: OperationsProcessing config: using default configuration 2025-05-29T15:32:22.680070Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-05-29T15:32:22.680074Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-05-29T15:32:22.680084Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7548: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:32:22.680095Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:39: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-05-29T15:32:22.680192Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7619: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-05-29T15:32:22.680260Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-05-29T15:32:22.694289Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7651: Got new config: QueryServiceConfig { AllExternalDataSourcesAreAvailable: true } 2025-05-29T15:32:22.694306Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:32:22.694384Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7619: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# Leader for TabletID 72057594046447617 is [1:130:2154] sender: [1:175:2058] recipient: [1:15:2062] 2025-05-29T15:32:22.696832Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-05-29T15:32:22.696858Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-05-29T15:32:22.696881Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-05-29T15:32:22.699436Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-05-29T15:32:22.699506Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:445: Clear TempDirsState with owners number: 0 2025-05-29T15:32:22.699613Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1348: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-05-29T15:32:22.699773Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:32: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-05-29T15:32:22.700378Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:157: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:32:22.700413Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:84: [RootDataErasureManager] Stop 2025-05-29T15:32:22.700622Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:32:22.700632Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:32:22.700660Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-05-29T15:32:22.700667Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-29T15:32:22.700674Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:202: TTxServerlessStorageBilling.Complete 2025-05-29T15:32:22.700693Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6671: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:214:2058] recipient: [1:212:2212] IGNORE Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:214:2058] recipient: [1:212:2212] Leader for TabletID 72057594037968897 is [1:218:2216] sender: [1:219:2058] recipient: [1:212:2212] 2025-05-29T15:32:22.701800Z node 1 :HIVE INFO: tablet_helpers.cpp:1130: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:126:2151] sender: [1:239:2058] recipient: [1:15:2062] 2025-05-29T15:32:22.716475Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:372: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-05-29T15:32:22.716526Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:32:22.716562Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-05-29T15:32:22.716589Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:130: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-05-29T15:32:22.716596Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:32:22.717035Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:451: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-05-29T15:32:22.717050Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-05-29T15:32:22.717084Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:32:22.717090Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:313: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-05-29T15:32:22.717094Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:367: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-05-29T15:32:22.717097Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 2 -> 3 2025-05-29T15:32:22.717407Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:32:22.717414Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-05-29T15:32:22.717417Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 3 -> 128 2025-05-29T15:32:22.717662Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:32:22.717668Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:32:22.717671Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:32:22.717676Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1650: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-05-29T15:32:22.718107Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1719: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-05-29T15:32:22.718408Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:652: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-05-29T15:32:22.718438Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1751: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 Leader for TabletID 72057594046316545 is [1:133:2155] sender: [1:254:2058] recipient: [1:15:2062] FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-05-29T15:32:22.718555Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:676: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-05-29T15:32:22.718570Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:680: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 133 RawX2: 4294969451 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-05-29T15:32:22.718576Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:32:22.718609Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Ch ... 944, LocalPathId: 4] was 4 2025-05-29T15:32:46.725808Z node 38 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:5834: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 5 PathOwnerId: 72057594046678944, cookie: 281474976715657 2025-05-29T15:32:46.725818Z node 38 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 5 PathOwnerId: 72057594046678944, cookie: 281474976715657 2025-05-29T15:32:46.725822Z node 38 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72057594046678944, txId: 281474976715657 2025-05-29T15:32:46.725825Z node 38 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 281474976715657, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 5 2025-05-29T15:32:46.725829Z node 38 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 6 2025-05-29T15:32:46.725836Z node 38 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1606: TOperation IsReadyToNotify, TxId: 281474976715657, ready parts: 2/3, is published: true FAKE_COORDINATOR: Erasing txId 281474976715657 2025-05-29T15:32:46.726058Z node 38 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6285: Handle TEvProposeTransactionResult, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409546 Status: COMPLETE TxId: 281474976715657 Step: 5000005 OrderId: 281474976715657 ExecLatency: 0 ProposeLatency: 2 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409546 CpuTimeUsec: 135 } } CommitVersion { Step: 5000005 TxId: 281474976715657 } 2025-05-29T15:32:46.726065Z node 38 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1764: TOperation FindRelatedPartByTabletId, TxId: 281474976715657, tablet: 72075186233409546, partId: 1 2025-05-29T15:32:46.726083Z node 38 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:619: TTxOperationReply execute, operationId: 281474976715657:1, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409546 Status: COMPLETE TxId: 281474976715657 Step: 5000005 OrderId: 281474976715657 ExecLatency: 0 ProposeLatency: 2 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409546 CpuTimeUsec: 135 } } CommitVersion { Step: 5000005 TxId: 281474976715657 } 2025-05-29T15:32:46.726096Z node 38 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_part.cpp:108: HandleReply TEvDataShard::TEvProposeTransactionResult Ignore message: tablet# 72057594046678944, ev# TxKind: TX_KIND_SCHEME Origin: 72075186233409546 Status: COMPLETE TxId: 281474976715657 Step: 5000005 OrderId: 281474976715657 ExecLatency: 0 ProposeLatency: 2 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409546 CpuTimeUsec: 135 } } CommitVersion { Step: 5000005 TxId: 281474976715657 } 2025-05-29T15:32:46.726480Z node 38 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5512: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 330 RawX2: 163208759564 } Origin: 72075186233409546 State: 2 TxId: 281474976715657 Step: 0 Generation: 2 2025-05-29T15:32:46.726488Z node 38 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1764: TOperation FindRelatedPartByTabletId, TxId: 281474976715657, tablet: 72075186233409546, partId: 1 2025-05-29T15:32:46.726504Z node 38 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:619: TTxOperationReply execute, operationId: 281474976715657:1, at schemeshard: 72057594046678944, message: Source { RawX1: 330 RawX2: 163208759564 } Origin: 72075186233409546 State: 2 TxId: 281474976715657 Step: 0 Generation: 2 2025-05-29T15:32:46.726512Z node 38 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:1005: NTableState::TProposedWaitParts operationId# 281474976715657:1 HandleReply TEvSchemaChanged at tablet: 72057594046678944 2025-05-29T15:32:46.726521Z node 38 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:1009: NTableState::TProposedWaitParts operationId# 281474976715657:1 HandleReply TEvSchemaChanged at tablet: 72057594046678944 message: Source { RawX1: 330 RawX2: 163208759564 } Origin: 72075186233409546 State: 2 TxId: 281474976715657 Step: 0 Generation: 2 2025-05-29T15:32:46.726529Z node 38 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:655: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 281474976715657:1, shardIdx: 72057594046678944:1, datashard: 72075186233409546, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2025-05-29T15:32:46.726533Z node 38 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:674: all shard schema changes has been received, operationId: 281474976715657:1, at schemeshard: 72057594046678944 2025-05-29T15:32:46.726537Z node 38 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:686: send schema changes ack message, operation: 281474976715657:1, datashard: 72075186233409546, at schemeshard: 72057594046678944 2025-05-29T15:32:46.726541Z node 38 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 281474976715657:1 129 -> 240 2025-05-29T15:32:46.726755Z node 38 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976715657 2025-05-29T15:32:46.727092Z node 38 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976715657 2025-05-29T15:32:46.727116Z node 38 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:647: TTxOperationReply complete, operationId: 281474976715657:1, at schemeshard: 72057594046678944 2025-05-29T15:32:46.727350Z node 38 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:647: TTxOperationReply complete, operationId: 281474976715657:1, at schemeshard: 72057594046678944 2025-05-29T15:32:46.727406Z node 38 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 281474976715657:1, at schemeshard: 72057594046678944 2025-05-29T15:32:46.727412Z node 38 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:482: [72057594046678944] TDone opId# 281474976715657:1 ProgressState 2025-05-29T15:32:46.727421Z node 38 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:906: Part operation is done id#281474976715657:1 progress is 3/3 2025-05-29T15:32:46.727424Z node 38 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 281474976715657 ready parts: 3/3 2025-05-29T15:32:46.727428Z node 38 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:906: Part operation is done id#281474976715657:1 progress is 3/3 2025-05-29T15:32:46.727430Z node 38 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 281474976715657 ready parts: 3/3 2025-05-29T15:32:46.727433Z node 38 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1606: TOperation IsReadyToNotify, TxId: 281474976715657, ready parts: 3/3, is published: true 2025-05-29T15:32:46.727436Z node 38 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 281474976715657 ready parts: 3/3 2025-05-29T15:32:46.727441Z node 38 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:973: Operation and all the parts is done, operation id: 281474976715657:0 2025-05-29T15:32:46.727444Z node 38 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5174: RemoveTx for txid 281474976715657:0 2025-05-29T15:32:46.727449Z node 38 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 3 2025-05-29T15:32:46.727452Z node 38 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:973: Operation and all the parts is done, operation id: 281474976715657:1 2025-05-29T15:32:46.727453Z node 38 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5174: RemoveTx for txid 281474976715657:1 2025-05-29T15:32:46.727462Z node 38 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 5 2025-05-29T15:32:46.727464Z node 38 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:973: Operation and all the parts is done, operation id: 281474976715657:2 2025-05-29T15:32:46.727466Z node 38 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5174: RemoveTx for txid 281474976715657:2 2025-05-29T15:32:46.727469Z node 38 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 4 TestWaitNotification: OK eventTxId 1003 2025-05-29T15:32:47.053222Z node 38 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Table/Stream" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: true }, at schemeshard: 72057594046678944 2025-05-29T15:32:47.053289Z node 38 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/Table/Stream" took 84us result status StatusSuccess 2025-05-29T15:32:47.053378Z node 38 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Table/Stream" PathDescription { Self { Name: "Stream" PathId: 4 SchemeshardId: 72057594046678944 PathType: EPathTypeCdcStream CreateFinished: true CreateTxId: 1003 CreateStep: 5000004 ParentPathId: 3 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 2 CdcStreamVersion: 2 } ChildrenExist: true } Children { Name: "streamImpl" PathId: 5 SchemeshardId: 72057594046678944 PathType: EPathTypePersQueueGroup CreateFinished: true CreateTxId: 1003 CreateStep: 5000004 ParentPathId: 4 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" PathSubType: EPathSubTypeStreamImpl ChildrenExist: false BalancerTabletID: 72075186233409548 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 4 PathsLimit: 10000 ShardsInside: 3 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 1 PQPartitionsLimit: 1000000 } CdcStreamDescription { Name: "Stream" Mode: ECdcStreamModeKeysOnly PathId { OwnerId: 72057594046678944 LocalId: 4 } State: ECdcStreamStateReady SchemaVersion: 2 Format: ECdcStreamFormatProto VirtualTimestamps: false AwsRegion: "" ResolvedTimestampsIntervalMs: 0 ScanProgress { ShardsTotal: 1 ShardsCompleted: 1 } } } PathId: 4 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 |76.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/sysview/unittest ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/sysview/unittest >> KqpSystemView::QueryStatsScan Test command err: Trying to start YDB, gRPC: 61207, MsgBus: 64680 2025-05-29T15:32:44.348893Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509891040962493687:2061];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:32:44.348922Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/001ac6/r3tmp/tmpOUWRdi/pdisk_1.dat 2025-05-29T15:32:44.399238Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:32:44.399309Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [1:7509891040962493668:2079] 1748532764348783 != 1748532764348786 TServer::EnableGrpc on GrpcPort 61207, node 1 2025-05-29T15:32:44.409706Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:32:44.409719Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-05-29T15:32:44.409722Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-05-29T15:32:44.409762Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:64680 TClient is connected to server localhost:64680 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: 2025-05-29T15:32:44.451447Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:32:44.451469Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-29T15:32:44.452490Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-29T15:32:44.477209Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:32:44.484352Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:32:44.545768Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:32:44.605669Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:32:44.617463Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:32:44.681315Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509891040962495313:2403], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:32:44.681344Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:32:44.714971Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-05-29T15:32:44.721411Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-05-29T15:32:44.727738Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-05-29T15:32:44.735041Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-05-29T15:32:44.742354Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-05-29T15:32:44.756138Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-05-29T15:32:44.763376Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-05-29T15:32:44.779862Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509891040962495964:2466], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:32:44.779903Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:32:44.779923Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509891040962495969:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:32:44.780469Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715669:3, at schemeshard: 72057594046644480 2025-05-29T15:32:44.783610Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7509891040962495971:2470], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715669 completed, doublechecking } 2025-05-29T15:32:44.870215Z node 1 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [1:7509891040962496022:3396] txid# 281474976715670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-29T15:32:44.956789Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [1:7509891040962496038:2474], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:32:44.956883Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=1&id=MTRmODk4ZjMtNTZiMjAyYzItMjk1OTY5MWYtMzBkYTJlNmM=, ActorId: [1:7509891040962495295:2401], ActorState: ExecuteState, TraceId: 01jweas83bbj06qvwv5fjbhgjb, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: VERIFY failed (2025-05-29T15:32:44.957483Z): assertion failed in non-unittest thread with message: assertion failed at ydb/core/kqp/ut/common/kqp_ut_common.h:375, void NKikimr::NKqp::AssertSuccessResult(const NYdb::TStatus &): (result.IsSuccess())
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 library/cpp/testing/unittest/registar.cpp:36 RaiseError(): requirement UnittestThread failed 0. /-S/util/system/yassert.cpp:83: InternalPanicImpl @ 0x13A9EAF5 1. /-S/util/system/yassert.cpp:55: Panic @ 0x13A95AF6 2. /tmp//-S/library/cpp/testing/unittest/registar.cpp:36: RaiseError @ 0x13C37886 3. /-S/ydb/core/kqp/ut/common/kqp_ut_common.h:375: AssertSuccessResult @ 0x13979752 4. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:365: CreateSampleTables @ 0x260D6A12 5. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:581: operator() @ 0x260F7FBC 6. /-S/library/cpp/threading/future/core/future-inl.h:493: SetValue @ 0x260F7FBC 7. /-S/library/cpp/threading/future/async.h:24: operator() @ 0x260F7FBC 8. /-S/util/thread/pool.h:71: Process @ 0x260F7FBC 9. /-S/util/thread/pool.cpp:418: DoExecute @ 0x13AA6479 10. /-S/util/thread/factory.h:15: Execute @ 0x13AA4E69 11. /-S/util/thread/factory.cpp:36: ThreadProc @ 0x13AA4E69 12. /-S/util/system/thread.cpp:244: ThreadProxy @ 0x13AA02DC 13. ??:0: ?? @ 0x7FFB93C6DAC2 14. ??:0: ?? @ 0x7FFB93CFF84F |76.9%| [TA] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_reboots/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/sysview/unittest >> KqpSystemView::Join Test command err: Trying to start YDB, gRPC: 27265, MsgBus: 12906 2025-05-29T15:32:44.430096Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509891039965260901:2061];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:32:44.430129Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/001ab8/r3tmp/tmpsGFA21/pdisk_1.dat TServer::EnableGrpc on GrpcPort 27265, node 1 2025-05-29T15:32:44.486485Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:32:44.486616Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [1:7509891039965260881:2079] 1748532764429972 != 1748532764429975 2025-05-29T15:32:44.490980Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:32:44.490995Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-05-29T15:32:44.490997Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-05-29T15:32:44.491031Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:12906 TClient is connected to server localhost:12906 2025-05-29T15:32:44.532617Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:32:44.532638Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting WaitRootIsUp 'Root'... TClient::Ls request: Root 2025-05-29T15:32:44.533706Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-29T15:32:44.555934Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:32:44.564112Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:32:44.580745Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:32:44.597678Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:32:44.610168Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:32:44.785175Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509891039965262516:2403], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:32:44.785198Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:32:44.817629Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-05-29T15:32:44.872613Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-05-29T15:32:44.882177Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-05-29T15:32:44.896725Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-05-29T15:32:44.910708Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-05-29T15:32:44.924281Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-05-29T15:32:44.938752Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-05-29T15:32:44.954149Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509891039965263171:2466], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:32:44.954176Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:32:44.954179Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509891039965263176:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:32:44.954800Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715669:3, at schemeshard: 72057594046644480 2025-05-29T15:32:44.958454Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7509891039965263178:2470], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715669 completed, doublechecking } 2025-05-29T15:32:45.041266Z node 1 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [1:7509891044260230525:3396] txid# 281474976715670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-29T15:32:45.122578Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [1:7509891044260230541:2474], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:32:45.122663Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=1&id=ZDUxMzc4NTAtMjU5OWZiZjktZWI1MWRjMWMtZWY3MTA2Y2Q=, ActorId: [1:7509891039965262498:2401], ActorState: ExecuteState, TraceId: 01jweas88saxhtcay0va9j6cm8, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: VERIFY failed (2025-05-29T15:32:45.123241Z): assertion failed in non-unittest thread with message: assertion failed at ydb/core/kqp/ut/common/kqp_ut_common.h:375, void NKikimr::NKqp::AssertSuccessResult(const NYdb::TStatus &): (result.IsSuccess())
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 library/cpp/testing/unittest/registar.cpp:36 RaiseError(): requirement UnittestThread failed 0. /-S/util/system/yassert.cpp:83: InternalPanicImpl @ 0x13A9EAF5 1. /-S/util/system/yassert.cpp:55: Panic @ 0x13A95AF6 2. /tmp//-S/library/cpp/testing/unittest/registar.cpp:36: RaiseError @ 0x13C37886 3. /-S/ydb/core/kqp/ut/common/kqp_ut_common.h:375: AssertSuccessResult @ 0x13979752 4. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:365: CreateSampleTables @ 0x260D6A12 5. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:581: operator() @ 0x260F7FBC 6. /-S/library/cpp/threading/future/core/future-inl.h:493: SetValue @ 0x260F7FBC 7. /-S/library/cpp/threading/future/async.h:24: operator() @ 0x260F7FBC 8. /-S/util/thread/pool.h:71: Process @ 0x260F7FBC 9. /-S/util/thread/pool.cpp:418: DoExecute @ 0x13AA6479 10. /-S/util/thread/factory.h:15: Execute @ 0x13AA4E69 11. /-S/util/thread/factory.cpp:36: ThreadProc @ 0x13AA4E69 12. /-S/util/system/thread.cpp:244: ThreadProxy @ 0x13AA02DC 13. ??:0: ?? @ 0x7F0AAAD3FAC2 14. ??:0: ?? @ 0x7F0AAADD184F ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/sysview/unittest >> KqpSystemView::PartitionStatsRange1 Test command err: Trying to start YDB, gRPC: 30796, MsgBus: 17434 2025-05-29T15:32:44.273670Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509891039123265070:2061];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:32:44.273695Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/001ad8/r3tmp/tmpr0WCqn/pdisk_1.dat 2025-05-29T15:32:44.328991Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [1:7509891039123265051:2079] 1748532764273535 != 1748532764273538 2025-05-29T15:32:44.330939Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 30796, node 1 2025-05-29T15:32:44.340023Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:32:44.340034Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-05-29T15:32:44.340036Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-05-29T15:32:44.340071Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:17434 2025-05-29T15:32:44.375882Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:32:44.375909Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:32:44.376991Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:17434 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-29T15:32:44.405281Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:32:44.410493Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:32:44.475448Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:32:44.497694Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:32:44.507766Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:32:44.620472Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509891039123266685:2403], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:32:44.620499Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:32:44.661765Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-05-29T15:32:44.716878Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-05-29T15:32:44.728169Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-05-29T15:32:44.742224Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-05-29T15:32:44.756142Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-05-29T15:32:44.770630Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-05-29T15:32:44.784327Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-05-29T15:32:44.800746Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509891039123267338:2466], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:32:44.800769Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:32:44.800776Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509891039123267343:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:32:44.801454Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715669:3, at schemeshard: 72057594046644480 2025-05-29T15:32:44.804183Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7509891039123267345:2470], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715669 completed, doublechecking } 2025-05-29T15:32:44.884684Z node 1 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [1:7509891039123267396:3396] txid# 281474976715670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-29T15:32:44.949775Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [1:7509891039123267412:2474], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:32:44.949855Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=1&id=OTNmZGMwYjEtN2QwYzI5NGEtNTgzZjY0MjctMjVhMzFkY2U=, ActorId: [1:7509891039123266682:2401], ActorState: ExecuteState, TraceId: 01jweas8407abv20wj3j20beq2, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: VERIFY failed (2025-05-29T15:32:44.950466Z): assertion failed in non-unittest thread with message: assertion failed at ydb/core/kqp/ut/common/kqp_ut_common.h:375, void NKikimr::NKqp::AssertSuccessResult(const NYdb::TStatus &): (result.IsSuccess())
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 library/cpp/testing/unittest/registar.cpp:36 RaiseError(): requirement UnittestThread failed 0. /-S/util/system/yassert.cpp:83: InternalPanicImpl @ 0x13A9EAF5 1. /-S/util/system/yassert.cpp:55: Panic @ 0x13A95AF6 2. /tmp//-S/library/cpp/testing/unittest/registar.cpp:36: RaiseError @ 0x13C37886 3. /-S/ydb/core/kqp/ut/common/kqp_ut_common.h:375: AssertSuccessResult @ 0x13979752 4. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:365: CreateSampleTables @ 0x260D6A12 5. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:581: operator() @ 0x260F7FBC 6. /-S/library/cpp/threading/future/core/future-inl.h:493: SetValue @ 0x260F7FBC 7. /-S/library/cpp/threading/future/async.h:24: operator() @ 0x260F7FBC 8. /-S/util/thread/pool.h:71: Process @ 0x260F7FBC 9. /-S/util/thread/pool.cpp:418: DoExecute @ 0x13AA6479 10. /-S/util/thread/factory.h:15: Execute @ 0x13AA4E69 11. /-S/util/thread/factory.cpp:36: ThreadProc @ 0x13AA4E69 12. /-S/util/system/thread.cpp:244: ThreadProxy @ 0x13AA02DC 13. ??:0: ?? @ 0x7F2125675AC2 14. ??:0: ?? @ 0x7F212570784F ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/sysview/unittest >> KqpSysColV1::StreamSelectRange Test command err: Trying to start YDB, gRPC: 11851, MsgBus: 22557 2025-05-29T15:32:44.819181Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509891037680660683:2062];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:32:44.819204Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/001ab5/r3tmp/tmpPbgAEd/pdisk_1.dat 2025-05-29T15:32:44.862347Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [1:7509891037680660662:2079] 1748532764819001 != 1748532764819004 2025-05-29T15:32:44.864280Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 11851, node 1 2025-05-29T15:32:44.875439Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:32:44.875452Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-05-29T15:32:44.875455Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-05-29T15:32:44.875503Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:22557 TClient is connected to server localhost:22557 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-29T15:32:44.943676Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:32:44.943705Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:32:44.944312Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-05-29T15:32:44.944803Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected waiting... 2025-05-29T15:32:44.949521Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:32:44.964317Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:32:44.979962Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:32:44.988988Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:32:45.111534Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509891041975629594:2403], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:32:45.111559Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:32:45.140565Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-05-29T15:32:45.195757Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-05-29T15:32:45.250300Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-05-29T15:32:45.260388Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-05-29T15:32:45.315346Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-05-29T15:32:45.323007Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-05-29T15:32:45.337740Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-05-29T15:32:45.354394Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509891041975630252:2466], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:32:45.354429Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509891041975630257:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:32:45.354439Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:32:45.355152Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715669:3, at schemeshard: 72057594046644480 2025-05-29T15:32:45.357215Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7509891041975630259:2470], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715669 completed, doublechecking } 2025-05-29T15:32:45.414640Z node 1 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [1:7509891041975630310:3399] txid# 281474976715670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-29T15:32:45.508144Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [1:7509891041975630326:2474], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:32:45.508250Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=1&id=MmQ3YWUyODQtZDg3OTc2OTUtYjJhMjMzYjEtNDU3ZThhMg==, ActorId: [1:7509891041975629591:2401], ActorState: ExecuteState, TraceId: 01jweas8n90fzkdaq3d7qjanpw, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: VERIFY failed (2025-05-29T15:32:45.508981Z): assertion failed in non-unittest thread with message: assertion failed at ydb/core/kqp/ut/common/kqp_ut_common.h:375, void NKikimr::NKqp::AssertSuccessResult(const NYdb::TStatus &): (result.IsSuccess())
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 library/cpp/testing/unittest/registar.cpp:36 RaiseError(): requirement UnittestThread failed 0. /-S/util/system/yassert.cpp:83: InternalPanicImpl @ 0x13A9EAF5 1. /-S/util/system/yassert.cpp:55: Panic @ 0x13A95AF6 2. /tmp//-S/library/cpp/testing/unittest/registar.cpp:36: RaiseError @ 0x13C37886 3. /-S/ydb/core/kqp/ut/common/kqp_ut_common.h:375: AssertSuccessResult @ 0x13979752 4. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:365: CreateSampleTables @ 0x260D6A12 5. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:581: operator() @ 0x260F7FBC 6. /-S/library/cpp/threading/future/core/future-inl.h:493: SetValue @ 0x260F7FBC 7. /-S/library/cpp/threading/future/async.h:24: operator() @ 0x260F7FBC 8. /-S/util/thread/pool.h:71: Process @ 0x260F7FBC 9. /-S/util/thread/pool.cpp:418: DoExecute @ 0x13AA6479 10. /-S/util/thread/factory.h:15: Execute @ 0x13AA4E69 11. /-S/util/thread/factory.cpp:36: ThreadProc @ 0x13AA4E69 12. /-S/util/system/thread.cpp:244: ThreadProxy @ 0x13AA02DC 13. ??:0: ?? @ 0x7F22E26D9AC2 14. ??:0: ?? @ 0x7F22E276B84F ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/sysview/unittest >> KqpSysColV1::UpdateAndDelete Test command err: Trying to start YDB, gRPC: 27221, MsgBus: 22137 2025-05-29T15:32:44.562062Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509891040642681569:2061];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:32:44.562127Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/001ab7/r3tmp/tmpULCmBm/pdisk_1.dat TServer::EnableGrpc on GrpcPort 27221, node 1 2025-05-29T15:32:44.617342Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:32:44.617434Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [1:7509891040642681550:2079] 1748532764561957 != 1748532764561960 2025-05-29T15:32:44.621449Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:32:44.621461Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-05-29T15:32:44.621463Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-05-29T15:32:44.621499Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:22137 TClient is connected to server localhost:22137 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: 2025-05-29T15:32:44.664421Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:32:44.664445Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:32:44.665536Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-29T15:32:44.692741Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:32:44.702226Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:32:44.762803Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:32:44.783430Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:32:44.793293Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:32:44.872498Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509891040642683187:2403], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:32:44.872523Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:32:44.903286Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-05-29T15:32:44.909810Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-05-29T15:32:44.916771Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-05-29T15:32:44.923867Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-05-29T15:32:44.931102Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-05-29T15:32:44.945157Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-05-29T15:32:44.952108Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-05-29T15:32:44.968297Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509891040642683839:2466], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:32:44.968322Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:32:44.968327Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509891040642683844:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:32:44.968931Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715669:3, at schemeshard: 72057594046644480 2025-05-29T15:32:44.972316Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7509891040642683846:2470], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715669 completed, doublechecking } 2025-05-29T15:32:45.024225Z node 1 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [1:7509891044937651193:3397] txid# 281474976715670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-29T15:32:45.089402Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [1:7509891044937651209:2474], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:32:45.089497Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=1&id=NTEzZjVlMWItMjJmOTU5YjYtNmE4OWJkZTQtYzA1OTg3ODM=, ActorId: [1:7509891040642683184:2401], ActorState: ExecuteState, TraceId: 01jweas89840eb13abmgnkc3jc, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: VERIFY failed (2025-05-29T15:32:45.090089Z): assertion failed in non-unittest thread with message: assertion failed at ydb/core/kqp/ut/common/kqp_ut_common.h:375, void NKikimr::NKqp::AssertSuccessResult(const NYdb::TStatus &): (result.IsSuccess())
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 library/cpp/testing/unittest/registar.cpp:36 RaiseError(): requirement UnittestThread failed 0. /-S/util/system/yassert.cpp:83: InternalPanicImpl @ 0x13A9EAF5 1. /-S/util/system/yassert.cpp:55: Panic @ 0x13A95AF6 2. /tmp//-S/library/cpp/testing/unittest/registar.cpp:36: RaiseError @ 0x13C37886 3. /-S/ydb/core/kqp/ut/common/kqp_ut_common.h:375: AssertSuccessResult @ 0x13979752 4. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:365: CreateSampleTables @ 0x260D6A12 5. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:581: operator() @ 0x260F7FBC 6. /-S/library/cpp/threading/future/core/future-inl.h:493: SetValue @ 0x260F7FBC 7. /-S/library/cpp/threading/future/async.h:24: operator() @ 0x260F7FBC 8. /-S/util/thread/pool.h:71: Process @ 0x260F7FBC 9. /-S/util/thread/pool.cpp:418: DoExecute @ 0x13AA6479 10. /-S/util/thread/factory.h:15: Execute @ 0x13AA4E69 11. /-S/util/thread/factory.cpp:36: ThreadProc @ 0x13AA4E69 12. /-S/util/system/thread.cpp:244: ThreadProxy @ 0x13AA02DC 13. ??:0: ?? @ 0x7F3D8C204AC2 14. ??:0: ?? @ 0x7F3D8C29684F ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/sysview/unittest >> KqpSysColV1::StreamSelectRowById Test command err: Trying to start YDB, gRPC: 26185, MsgBus: 7904 2025-05-29T15:32:45.385661Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509891042836453905:2062];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:32:45.385693Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/001aa5/r3tmp/tmpKKbZE5/pdisk_1.dat 2025-05-29T15:32:45.445665Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:32:45.445827Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [1:7509891042836453884:2079] 1748532765385511 != 1748532765385514 TServer::EnableGrpc on GrpcPort 26185, node 1 2025-05-29T15:32:45.457941Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:32:45.457954Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-05-29T15:32:45.457956Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-05-29T15:32:45.457991Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:7904 TClient is connected to server localhost:7904 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-29T15:32:45.525559Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:32:45.525580Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:32:45.526699Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-29T15:32:45.527666Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:32:45.533030Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:32:45.549607Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:32:45.569734Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:32:45.580155Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:32:45.737840Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509891042836455518:2403], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:32:45.737858Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:32:45.769525Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-05-29T15:32:45.775903Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-05-29T15:32:45.784788Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-05-29T15:32:45.792187Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-05-29T15:32:45.806067Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-05-29T15:32:45.813124Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-05-29T15:32:45.819825Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-05-29T15:32:45.836742Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509891042836456171:2466], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:32:45.836767Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:32:45.836823Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509891042836456176:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:32:45.837461Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715669:3, at schemeshard: 72057594046644480 2025-05-29T15:32:45.840015Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7509891042836456178:2470], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715669 completed, doublechecking } 2025-05-29T15:32:45.902075Z node 1 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [1:7509891042836456229:3396] txid# 281474976715670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-29T15:32:45.995694Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [1:7509891042836456245:2474], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:32:45.995795Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=1&id=MmNjMjIwNzMtOTllMjA3MzUtZWYzZjBmMTQtOTU0NmUyZDE=, ActorId: [1:7509891042836455515:2401], ActorState: ExecuteState, TraceId: 01jweas94ce412x6vafn0pqybh, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: VERIFY failed (2025-05-29T15:32:45.996390Z): assertion failed in non-unittest thread with message: assertion failed at ydb/core/kqp/ut/common/kqp_ut_common.h:375, void NKikimr::NKqp::AssertSuccessResult(const NYdb::TStatus &): (result.IsSuccess())
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 library/cpp/testing/unittest/registar.cpp:36 RaiseError(): requirement UnittestThread failed 0. /-S/util/system/yassert.cpp:83: InternalPanicImpl @ 0x13A9EAF5 1. /-S/util/system/yassert.cpp:55: Panic @ 0x13A95AF6 2. /tmp//-S/library/cpp/testing/unittest/registar.cpp:36: RaiseError @ 0x13C37886 3. /-S/ydb/core/kqp/ut/common/kqp_ut_common.h:375: AssertSuccessResult @ 0x13979752 4. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:365: CreateSampleTables @ 0x260D6A12 5. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:581: operator() @ 0x260F7FBC 6. /-S/library/cpp/threading/future/core/future-inl.h:493: SetValue @ 0x260F7FBC 7. /-S/library/cpp/threading/future/async.h:24: operator() @ 0x260F7FBC 8. /-S/util/thread/pool.h:71: Process @ 0x260F7FBC 9. /-S/util/thread/pool.cpp:418: DoExecute @ 0x13AA6479 10. /-S/util/thread/factory.h:15: Execute @ 0x13AA4E69 11. /-S/util/thread/factory.cpp:36: ThreadProc @ 0x13AA4E69 12. /-S/util/system/thread.cpp:244: ThreadProxy @ 0x13AA02DC 13. ??:0: ?? @ 0x7FB0A80E8AC2 14. ??:0: ?? @ 0x7FB0A817A84F ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/sysview/unittest >> KqpSysColV1::StreamSelectRowAsterisk Test command err: Trying to start YDB, gRPC: 27997, MsgBus: 24562 2025-05-29T15:32:45.269892Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509891044096813502:2064];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:32:45.269913Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/001ab0/r3tmp/tmpKj2LLT/pdisk_1.dat 2025-05-29T15:32:45.324519Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:32:45.324556Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [1:7509891044096813480:2079] 1748532765269754 != 1748532765269757 TServer::EnableGrpc on GrpcPort 27997, node 1 2025-05-29T15:32:45.339404Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:32:45.339415Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-05-29T15:32:45.339417Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-05-29T15:32:45.339449Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:24562 TClient is connected to server localhost:24562 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-29T15:32:45.389297Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:32:45.397102Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:32:45.404771Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:32:45.404805Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:32:45.405932Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-29T15:32:45.457547Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:32:45.477065Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:32:45.487985Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:32:45.609253Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509891044096815113:2403], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:32:45.609286Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:32:45.641876Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-05-29T15:32:45.648325Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-05-29T15:32:45.658862Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-05-29T15:32:45.713378Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-05-29T15:32:45.722242Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-05-29T15:32:45.736318Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-05-29T15:32:45.750525Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-05-29T15:32:45.766497Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509891044096815767:2466], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:32:45.766521Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:32:45.766523Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509891044096815772:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:32:45.767210Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710669:3, at schemeshard: 72057594046644480 2025-05-29T15:32:45.770127Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7509891044096815774:2470], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710669 completed, doublechecking } 2025-05-29T15:32:45.843596Z node 1 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [1:7509891044096815825:3394] txid# 281474976710670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-29T15:32:45.918399Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [1:7509891044096815841:2474], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:32:45.918502Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=1&id=ZmJhYzllNjItOWY3YzVlYjUtZmYwMzBiNDAtYzVjODNhNGM=, ActorId: [1:7509891044096815095:2401], ActorState: ExecuteState, TraceId: 01jweas926frkggmemdydmtgf4, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: VERIFY failed (2025-05-29T15:32:45.919271Z): assertion failed in non-unittest thread with message: assertion failed at ydb/core/kqp/ut/common/kqp_ut_common.h:375, void NKikimr::NKqp::AssertSuccessResult(const NYdb::TStatus &): (result.IsSuccess())
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 library/cpp/testing/unittest/registar.cpp:36 RaiseError(): requirement UnittestThread failed 0. /-S/util/system/yassert.cpp:83: InternalPanicImpl @ 0x13A9EAF5 1. /-S/util/system/yassert.cpp:55: Panic @ 0x13A95AF6 2. /tmp//-S/library/cpp/testing/unittest/registar.cpp:36: RaiseError @ 0x13C37886 3. /-S/ydb/core/kqp/ut/common/kqp_ut_common.h:375: AssertSuccessResult @ 0x13979752 4. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:365: CreateSampleTables @ 0x260D6A12 5. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:581: operator() @ 0x260F7FBC 6. /-S/library/cpp/threading/future/core/future-inl.h:493: SetValue @ 0x260F7FBC 7. /-S/library/cpp/threading/future/async.h:24: operator() @ 0x260F7FBC 8. /-S/util/thread/pool.h:71: Process @ 0x260F7FBC 9. /-S/util/thread/pool.cpp:418: DoExecute @ 0x13AA6479 10. /-S/util/thread/factory.h:15: Execute @ 0x13AA4E69 11. /-S/util/thread/factory.cpp:36: ThreadProc @ 0x13AA4E69 12. /-S/util/system/thread.cpp:244: ThreadProxy @ 0x13AA02DC 13. ??:0: ?? @ 0x7F3F91153AC2 14. ??:0: ?? @ 0x7F3F911E584F ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/sysview/unittest >> KqpSystemView::ReadSuccess Test command err: Trying to start YDB, gRPC: 17507, MsgBus: 28820 2025-05-29T15:32:46.656772Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509891048848835488:2063];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:32:46.657032Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/001a99/r3tmp/tmp5LfrIU/pdisk_1.dat 2025-05-29T15:32:46.712364Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:32:46.712431Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [1:7509891048848835467:2079] 1748532766656639 != 1748532766656642 TServer::EnableGrpc on GrpcPort 17507, node 1 2025-05-29T15:32:46.723562Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:32:46.723573Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-05-29T15:32:46.723574Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-05-29T15:32:46.723604Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:28820 TClient is connected to server localhost:28820 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-29T15:32:46.786788Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:32:46.786832Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:32:46.787875Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-29T15:32:46.788040Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:32:46.794774Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:32:46.856622Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:32:46.874546Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:32:46.930897Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:32:46.955630Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509891048848837105:2403], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:32:46.955653Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:32:46.989259Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-05-29T15:32:46.995714Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-05-29T15:32:47.002615Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-05-29T15:32:47.009730Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-05-29T15:32:47.017112Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-05-29T15:32:47.031504Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-05-29T15:32:47.045489Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-05-29T15:32:47.061829Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509891053143805056:2466], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:32:47.061858Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:32:47.061875Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509891053143805061:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:32:47.062717Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715669:3, at schemeshard: 72057594046644480 2025-05-29T15:32:47.065604Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7509891053143805063:2470], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715669 completed, doublechecking } 2025-05-29T15:32:47.149110Z node 1 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [1:7509891053143805114:3395] txid# 281474976715670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-29T15:32:47.231510Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [1:7509891053143805130:2474], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:32:47.231616Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=1&id=MmNiOWYxMjItNGQ2OWRjNzYtMmJjZmFhNWMtZTcxZmU2NGE=, ActorId: [1:7509891048848837102:2401], ActorState: ExecuteState, TraceId: 01jweasaan8ftn7vvqgafk4fra, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: VERIFY failed (2025-05-29T15:32:47.232246Z): assertion failed in non-unittest thread with message: assertion failed at ydb/core/kqp/ut/common/kqp_ut_common.h:375, void NKikimr::NKqp::AssertSuccessResult(const NYdb::TStatus &): (result.IsSuccess())
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 library/cpp/testing/unittest/registar.cpp:36 RaiseError(): requirement UnittestThread failed 0. /-S/util/system/yassert.cpp:83: InternalPanicImpl @ 0x13A9EAF5 1. /-S/util/system/yassert.cpp:55: Panic @ 0x13A95AF6 2. /tmp//-S/library/cpp/testing/unittest/registar.cpp:36: RaiseError @ 0x13C37886 3. /-S/ydb/core/kqp/ut/common/kqp_ut_common.h:375: AssertSuccessResult @ 0x13979752 4. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:365: CreateSampleTables @ 0x260D6A12 5. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:581: operator() @ 0x260F7FBC 6. /-S/library/cpp/threading/future/core/future-inl.h:493: SetValue @ 0x260F7FBC 7. /-S/library/cpp/threading/future/async.h:24: operator() @ 0x260F7FBC 8. /-S/util/thread/pool.h:71: Process @ 0x260F7FBC 9. /-S/util/thread/pool.cpp:418: DoExecute @ 0x13AA6479 10. /-S/util/thread/factory.h:15: Execute @ 0x13AA4E69 11. /-S/util/thread/factory.cpp:36: ThreadProc @ 0x13AA4E69 12. /-S/util/system/thread.cpp:244: ThreadProxy @ 0x13AA02DC 13. ??:0: ?? @ 0x7F481E5E6AC2 14. ??:0: ?? @ 0x7F481E67884F ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/sysview/unittest >> KqpSysColV0::InnerJoinTables Test command err: Trying to start YDB, gRPC: 8232, MsgBus: 19690 2025-05-29T15:32:45.330115Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509891044047573147:2062];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:32:45.330464Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/001aab/r3tmp/tmpyg3bUT/pdisk_1.dat 2025-05-29T15:32:45.378423Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [1:7509891044047573127:2079] 1748532765329961 != 1748532765329964 2025-05-29T15:32:45.379891Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 8232, node 1 2025-05-29T15:32:45.390501Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:32:45.390510Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-05-29T15:32:45.390511Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-05-29T15:32:45.390542Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:19690 TClient is connected to server localhost:19690 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-29T15:32:45.459425Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:32:45.459447Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:32:45.459993Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-05-29T15:32:45.460514Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected waiting... 2025-05-29T15:32:45.471738Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:32:45.532854Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:32:45.552847Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:32:45.561743Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:32:45.641724Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509891044047574758:2403], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:32:45.641753Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:32:45.676069Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-05-29T15:32:45.682791Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-05-29T15:32:45.693837Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-05-29T15:32:45.700788Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-05-29T15:32:45.755671Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-05-29T15:32:45.764482Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-05-29T15:32:45.777960Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-05-29T15:32:45.794355Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509891044047575412:2466], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:32:45.794376Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:32:45.794378Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509891044047575417:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:32:45.795114Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715669:3, at schemeshard: 72057594046644480 2025-05-29T15:32:45.798189Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7509891044047575419:2470], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715669 completed, doublechecking } 2025-05-29T15:32:45.877165Z node 1 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [1:7509891044047575470:3394] txid# 281474976715670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-29T15:32:45.982611Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [1:7509891044047575486:2474], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:32:45.982704Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=1&id=NDU4NjUxNGMtNmIxMmRhNWYtYTNjMDA0NzEtNTk0ZjkyYTQ=, ActorId: [1:7509891044047574740:2401], ActorState: ExecuteState, TraceId: 01jweas932ckj39pyh1qzx53mz, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: VERIFY failed (2025-05-29T15:32:45.983298Z): assertion failed in non-unittest thread with message: assertion failed at ydb/core/kqp/ut/common/kqp_ut_common.h:375, void NKikimr::NKqp::AssertSuccessResult(const NYdb::TStatus &): (result.IsSuccess())
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 library/cpp/testing/unittest/registar.cpp:36 RaiseError(): requirement UnittestThread failed 0. /-S/util/system/yassert.cpp:83: InternalPanicImpl @ 0x13A9EAF5 1. /-S/util/system/yassert.cpp:55: Panic @ 0x13A95AF6 2. /tmp//-S/library/cpp/testing/unittest/registar.cpp:36: RaiseError @ 0x13C37886 3. /-S/ydb/core/kqp/ut/common/kqp_ut_common.h:375: AssertSuccessResult @ 0x13979752 4. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:365: CreateSampleTables @ 0x260D6A12 5. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:581: operator() @ 0x260F7FBC 6. /-S/library/cpp/threading/future/core/future-inl.h:493: SetValue @ 0x260F7FBC 7. /-S/library/cpp/threading/future/async.h:24: operator() @ 0x260F7FBC 8. /-S/util/thread/pool.h:71: Process @ 0x260F7FBC 9. /-S/util/thread/pool.cpp:418: DoExecute @ 0x13AA6479 10. /-S/util/thread/factory.h:15: Execute @ 0x13AA4E69 11. /-S/util/thread/factory.cpp:36: ThreadProc @ 0x13AA4E69 12. /-S/util/system/thread.cpp:244: ThreadProxy @ 0x13AA02DC 13. ??:0: ?? @ 0x7F769B9FEAC2 14. ??:0: ?? @ 0x7F769BA9084F ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/sysview/unittest >> KqpSystemView::PartitionStatsFollower [FAIL] Test command err: Trying to start YDB, gRPC: 3582, MsgBus: 11425 2025-05-29T15:32:46.282931Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509891046939960749:2060];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:32:46.282955Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/001aa0/r3tmp/tmpzK3Zgj/pdisk_1.dat 2025-05-29T15:32:46.330191Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:32:46.330673Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [1:7509891046939960730:2079] 1748532766282801 != 1748532766282804 TServer::EnableGrpc on GrpcPort 3582, node 1 2025-05-29T15:32:46.342082Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:32:46.342093Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-05-29T15:32:46.342095Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-05-29T15:32:46.342133Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:11425 TClient is connected to server localhost:11425 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-29T15:32:46.412731Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:32:46.412765Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:32:46.413445Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-05-29T15:32:46.413731Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected waiting... 2025-05-29T15:32:46.600287Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509891046939961390:2328], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:32:46.600310Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:32:46.628634Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4877: StateWork, received event# 269877761, Sender [1:7509891046939961414:2298], Recipient [1:7509891046939961175:2197]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-05-29T15:32:46.628653Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4974: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-05-29T15:32:46.628657Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5753: Pipe server connected, at tablet: 72057594046644480 2025-05-29T15:32:46.628670Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4877: StateWork, received event# 271122432, Sender [1:7509891046939961410:2295], Recipient [1:7509891046939961175:2197]: {TEvModifySchemeTransaction txid# 281474976715658 TabletId# 72057594046644480} 2025-05-29T15:32:46.628673Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4888: StateWork, processing event TEvSchemeShard::TEvModifySchemeTransaction 2025-05-29T15:32:46.638213Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:372: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/Root" OperationType: ESchemeOpCreateTable CreateTable { Name: "Followers" Columns { Name: "Key" Type: "Uint64" NotNull: false } Columns { Name: "Value" Type: "String" NotNull: false } KeyColumnNames: "Key" PartitionConfig { ColumnFamilies { Id: 0 StorageConfig { SysLog { PreferredPoolKind: "test" } Log { PreferredPoolKind: "test" } Data { PreferredPoolKind: "test" } } } FollowerGroups { FollowerCount: 3 RequireAllDataCenters: false } } Temporary: false } } TxId: 281474976715658 TabletId: 72057594046644480 PeerName: "" , at schemeshard: 72057594046644480 2025-05-29T15:32:46.638319Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_create_table.cpp:426: TCreateTable Propose, path: /Root/Followers, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-05-29T15:32:46.638349Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_create_table.cpp:433: TCreateTable Propose, path: /Root/Followers, opId: 281474976715658:0, schema: Name: "Followers" Columns { Name: "Key" Type: "Uint64" NotNull: false } Columns { Name: "Value" Type: "String" NotNull: false } KeyColumnNames: "Key" PartitionConfig { ColumnFamilies { Id: 0 StorageConfig { SysLog { PreferredPoolKind: "test" } Log { PreferredPoolKind: "test" } Data { PreferredPoolKind: "test" } } } FollowerGroups { FollowerCount: 3 RequireAllDataCenters: false } } Temporary: false, at schemeshard: 72057594046644480 2025-05-29T15:32:46.638470Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:319: AttachChild: child attached as only one child to the parent, parent id: [OwnerId: 72057594046644480, LocalPathId: 1], parent name: Root, child name: Followers, child id: [OwnerId: 72057594046644480, LocalPathId: 2], at schemeshard: 72057594046644480 2025-05-29T15:32:46.638490Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046644480, LocalPathId: 2] was 0 2025-05-29T15:32:46.638495Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason new shard created for pathId [OwnerId: 72057594046644480, LocalPathId: 2] was 1 2025-05-29T15:32:46.638533Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason new path created for pathId [OwnerId: 72057594046644480, LocalPathId: 2] was 2 2025-05-29T15:32:46.638543Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 281474976715658:0 1 -> 2 2025-05-29T15:32:46.638552Z node 1 :SYSTEM_VIEWS TRACE: partition_stats.cpp:83: TEvSysView::TEvSetPartitioning: domainKey [OwnerId: 72057594046644480, LocalPathId: 1] pathId [OwnerId: 72057594046644480, LocalPathId: 2] path /Root/Followers ShardIndices size 1 2025-05-29T15:32:46.638733Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_create_table.cpp:744: TCreateTable Propose creating new table opId# 281474976715658:0 path# /Root/Followers pathId# [OwnerId: 72057594046644480, LocalPathId: 2] schemeshard# 72057594046644480 tx# WorkingDir: "/Root" OperationType: ESchemeOpCreateTable CreateTable { Name: "Followers" Columns { Name: "Key" Type: "Uint64" NotNull: false } Columns { Name: "Value" Type: "String" NotNull: false } KeyColumnNames: "Key" PartitionConfig { ColumnFamilies { Id: 0 StorageConfig { SysLog { PreferredPoolKind: "test" } Log { PreferredPoolKind: "test" } Data { PreferredPoolKind: "test" } } } FollowerGroups { FollowerCount: 3 RequireAllDataCenters: false } } Temporary: false } FailOnExist: false 2025-05-29T15:32:46.638831Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:130: IgniteOperation, opId: 281474976715658:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-05-29T15:32:46.638842Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-05-29T15:32:46.638857Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:165: TSideEffects ApplyOnExecute at tablet# 72057594046644480 2025-05-29T15:32:46.638872Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 1 2025-05-29T15:32:46.638879Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046644480, LocalPathId: 2] was 3 2025-05-29T15:32:46.639062Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:451: TTxOperationPropose Complete, txId: 281474976715658, response: Status: StatusAccepted TxId: 281474976715658 SchemeshardId: 72057594046644480 PathId: 2, at schemeshard: 72057594046644480 2025-05-29T15:32:46.639089Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 281474976715658, database: /Root, subject: , status: StatusAccepted, operation: CREATE TABLE, path: /Root/Followers 2025-05-29T15:32:46.639102Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:207: TSideEffects ApplyOnComplete at tablet# 72057594046644480 2025-05-29T15:32:46.639111Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:275: Activate send for 281474976715658:0 2025-05-29T15:32:46.639163Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4877: StateWork, received event# 2146435072, Sender [1:7509891046939961175:2197], Recipient [1:7509891046939961175:2197]: NKikimr::NSchemeShard::TEvPrivate::TEvProgressOperation 2025-05-29T15:32:46.639171Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4894: StateWork, processing event TEvPrivate::TEvProgressOperation 2025-05-29T15:32:46.639179Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2025-05-29T15:32:46.639186Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046644480, txId: 281474976715658, path id: [OwnerId: 72057594046644480, LocalPathId: 1] 2025-05-29T15:32:46.639221Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046644480, txId: 281474976715658, path id: [OwnerId: 72057594046644480, LocalPathId: 2] 2025-05-29T15:32:46.639239Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2025-05-29T15:32:46.6 ... BUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046644480, cookie: 281474976715659 2025-05-29T15:32:46.666598Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:207: TSideEffects ApplyOnComplete at tablet# 72057594046644480 2025-05-29T15:32:46.666601Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046644480, cookie: 281474976715659 2025-05-29T15:32:46.666602Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:207: TSideEffects ApplyOnComplete at tablet# 72057594046644480 2025-05-29T15:32:46.666604Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046644480, cookie: 281474976715659 2025-05-29T15:32:46.666605Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:207: TSideEffects ApplyOnComplete at tablet# 72057594046644480 2025-05-29T15:32:46.666607Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046644480, cookie: 281474976715659 2025-05-29T15:32:46.666608Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:207: TSideEffects ApplyOnComplete at tablet# 72057594046644480 2025-05-29T15:32:46.666610Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046644480, cookie: 281474976715659 2025-05-29T15:32:46.666611Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:207: TSideEffects ApplyOnComplete at tablet# 72057594046644480 2025-05-29T15:32:46.666618Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:630: Send to actor: [1:7509891046939961531:2344] msg type: 271124998 msg: NKikimrScheme.TEvNotifyTxCompletionResult TxId: 281474976715659 at schemeshard: 72057594046644480 2025-05-29T15:32:46.666656Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7509891046939961531:2344], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2025-05-29T15:32:46.666723Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4877: StateWork, received event# 269877764, Sender [1:7509891046939961560:2378], Recipient [1:7509891046939961175:2197]: NKikimr::TEvTabletPipe::TEvServerDisconnected 2025-05-29T15:32:46.666734Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4975: StateWork, processing event TEvTabletPipe::TEvServerDisconnected 2025-05-29T15:32:46.666749Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5801: Server pipe is reset, at schemeshard: 72057594046644480 2025-05-29T15:32:46.721686Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4877: StateWork, received event# 269877761, Sender [1:7509891046939961586:2403], Recipient [1:7509891046939961175:2197]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-05-29T15:32:46.721699Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4974: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-05-29T15:32:46.721701Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5753: Pipe server connected, at tablet: 72057594046644480 2025-05-29T15:32:46.721706Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4877: StateWork, received event# 271122432, Sender [1:7509891046939961582:2400], Recipient [1:7509891046939961175:2197]: {TEvModifySchemeTransaction txid# 281474976715660 TabletId# 72057594046644480} 2025-05-29T15:32:46.721707Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4888: StateWork, processing event TEvSchemeShard::TEvModifySchemeTransaction 2025-05-29T15:32:46.722352Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:372: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/Root/.metadata/workload_manager/pools" OperationType: ESchemeOpCreateResourcePool ModifyACL { Name: "default" DiffACL: "\n!\010\000\022\035\010\001\020\201\004\032\024all-users@well-known \003\n\031\010\000\022\025\010\001\020\201\004\032\014root@builtin \003" NewOwner: "metadata@system" } Internal: true CreateResourcePool { Name: "default" Properties { Properties { key: "concurrent_query_limit" value: "-1" } Properties { key: "database_load_cpu_threshold" value: "-1" } Properties { key: "query_cancel_after_seconds" value: "0" } Properties { key: "query_cpu_limit_percent_per_node" value: "-1" } Properties { key: "query_memory_limit_percent_per_node" value: "-1" } Properties { key: "queue_size" value: "-1" } Properties { key: "resource_weight" value: "-1" } Properties { key: "total_cpu_limit_percent_per_node" value: "-1" } } } } TxId: 281474976715660 TabletId: 72057594046644480 Owner: "metadata@system" UserToken: "***" PeerName: "" , at schemeshard: 72057594046644480 2025-05-29T15:32:46.722422Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_create_resource_pool.cpp:148: [72057594046644480] TCreateResourcePool Propose: opId# 281474976715660:0, path# /Root/.metadata/workload_manager/pools/default 2025-05-29T15:32:46.722459Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:130: IgniteOperation, opId: 281474976715660:1, propose status:StatusAlreadyExists, reason: Check failed: path: '/Root/.metadata/workload_manager/pools/default', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92, at schemeshard: 72057594046644480 2025-05-29T15:32:46.722529Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:165: TSideEffects ApplyOnExecute at tablet# 72057594046644480 2025-05-29T15:32:46.722662Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:451: TTxOperationPropose Complete, txId: 281474976715660, response: Status: StatusAlreadyExists Reason: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" TxId: 281474976715660 SchemeshardId: 72057594046644480 PathId: 6 PathCreateTxId: 281474976715659, at schemeshard: 72057594046644480 2025-05-29T15:32:46.722694Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 281474976715660, database: /Root, subject: metadata@system, status: StatusAlreadyExists, reason: Check failed: path: '/Root/.metadata/workload_manager/pools/default', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92, operation: CREATE RESOURCE POOL, path: default, set owner:metadata@system, add access: +(SR|DS):all-users@well-known, add access: +(SR|DS):root@builtin 2025-05-29T15:32:46.722703Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:207: TSideEffects ApplyOnComplete at tablet# 72057594046644480 2025-05-29T15:32:46.723070Z node 1 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [1:7509891046939961582:2400] txid# 281474976715660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-29T15:32:46.723155Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4877: StateWork, received event# 269877764, Sender [1:7509891046939961586:2403], Recipient [1:7509891046939961175:2197]: NKikimr::TEvTabletPipe::TEvServerDisconnected 2025-05-29T15:32:46.723165Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4975: StateWork, processing event TEvTabletPipe::TEvServerDisconnected 2025-05-29T15:32:46.723167Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5801: Server pipe is reset, at schemeshard: 72057594046644480 2025-05-29T15:32:46.735782Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [1:7509891046939961598:2348], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:32:46.735879Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=1&id=NWZiYzUwYS1lYjBlNTI4MS0zNTQ2OWVjZS03OTMyNDgyNQ==, ActorId: [1:7509891046939961386:2326], ActorState: ExecuteState, TraceId: 01jweas9y03p6gt0b8dthjx27a, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: 2025-05-29T15:32:47.333046Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4877: StateWork, received event# 271125000, Sender [0:0:0], Recipient [1:7509891046939961175:2197]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-05-29T15:32:47.333077Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4885: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-05-29T15:32:47.333082Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4877: StateWork, received event# 271124999, Sender [1:7509891046939961175:2197], Recipient [1:7509891046939961175:2197]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2025-05-29T15:32:47.333084Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4884: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2025-05-29T15:32:48.000215Z node 1 :SYSTEM_VIEWS DEBUG: sysview_service.cpp:593: Handle TEvPrivate::TEvProcessInterval: service id# [1:7509891046939960803:2113], interval end# 2025-05-29T15:32:48.000000Z, event interval end# 2025-05-29T15:32:48.000000Z 2025-05-29T15:32:48.000249Z node 1 :SYSTEM_VIEWS DEBUG: sysview_service.cpp:408: Rotate logs: service id# [1:7509891046939960803:2113], query logs count# 0, processor ids count# 0, processor id to database count# 0 2025-05-29T15:32:48.000287Z node 1 :SYSTEM_VIEWS DEBUG: sysview_service.cpp:593: Handle TEvPrivate::TEvProcessInterval: service id# [1:7509891046939960719:2068], interval end# 2025-05-29T15:32:48.000000Z, event interval end# 2025-05-29T15:32:48.000000Z 2025-05-29T15:32:48.000290Z node 1 :SYSTEM_VIEWS DEBUG: sysview_service.cpp:408: Rotate logs: service id# [1:7509891046939960719:2068], query logs count# 0, processor ids count# 0, processor id to database count# 0 assertion failed at ydb/core/kqp/ut/common/kqp_ut_common.h:375, void NKikimr::NKqp::AssertSuccessResult(const NYdb::TStatus &): (result.IsSuccess())
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 0. /-S/util/system/backtrace.cpp:284: ?? @ 0x13A8032B 1. /tmp//-S/library/cpp/testing/unittest/registar.cpp:46: RaiseError @ 0x13C37798 2. /-S/ydb/core/kqp/ut/common/kqp_ut_common.h:375: AssertSuccessResult @ 0x13979752 3. /tmp//-S/ydb/core/kqp/ut/sysview/kqp_sys_view_ut.cpp:699: Execute_ @ 0x1397B95F 4. /tmp//-S/ydb/core/kqp/ut/sysview/kqp_sys_view_ut.cpp:16: operator() @ 0x139809E6 5. /tmp//-S/library/cpp/testing/unittest/registar.cpp:373: Run @ 0x13C3964D 6. /tmp//-S/ydb/core/kqp/ut/sysview/kqp_sys_view_ut.cpp:16: Execute @ 0x139803A3 7. /tmp//-S/library/cpp/testing/unittest/registar.cpp:494: Execute @ 0x13C39DC2 8. /tmp//-S/library/cpp/testing/unittest/utmain.cpp:872: RunMain @ 0x13C4B96C 9. ??:0: ?? @ 0x7F6249DC5D8F 10. ??:0: ?? @ 0x7F6249DC5E3F 11. ??:0: ?? @ 0x129B9028 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/sysview/unittest >> KqpSystemView::PartitionStatsRange2 Test command err: Trying to start YDB, gRPC: 5577, MsgBus: 14598 2025-05-29T15:32:45.283589Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509891044875028140:2063];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:32:45.283622Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/001aac/r3tmp/tmp0iWT2J/pdisk_1.dat 2025-05-29T15:32:45.351899Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:32:45.351959Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [1:7509891044875028118:2079] 1748532765283458 != 1748532765283461 TServer::EnableGrpc on GrpcPort 5577, node 1 2025-05-29T15:32:45.363969Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:32:45.363979Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-05-29T15:32:45.363980Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-05-29T15:32:45.364012Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:14598 2025-05-29T15:32:45.385892Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:32:45.385920Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:32:45.387082Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:14598 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-29T15:32:45.427193Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:32:45.437379Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:32:45.453128Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:32:45.472450Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:32:45.483204Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:32:45.628414Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509891044875029758:2403], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:32:45.628444Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:32:45.666224Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-05-29T15:32:45.673517Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-05-29T15:32:45.687168Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-05-29T15:32:45.693870Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-05-29T15:32:45.700827Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-05-29T15:32:45.708072Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-05-29T15:32:45.722332Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-05-29T15:32:45.738490Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509891044875030413:2466], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:32:45.738522Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:32:45.738542Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509891044875030418:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:32:45.739146Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715669:3, at schemeshard: 72057594046644480 2025-05-29T15:32:45.741922Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7509891044875030420:2470], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715669 completed, doublechecking } 2025-05-29T15:32:45.821038Z node 1 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [1:7509891044875030471:3398] txid# 281474976715670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-29T15:32:45.900677Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [1:7509891044875030487:2474], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:32:45.900777Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=1&id=NGQzOWNmMDMtZmZiNWY1NTgtMWNiOWMwMjAtNzYyYzFhYjY=, ActorId: [1:7509891044875029740:2401], ActorState: ExecuteState, TraceId: 01jweas91a8gqzj1r849b0tent, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: VERIFY failed (2025-05-29T15:32:45.901384Z): assertion failed in non-unittest thread with message: assertion failed at ydb/core/kqp/ut/common/kqp_ut_common.h:375, void NKikimr::NKqp::AssertSuccessResult(const NYdb::TStatus &): (result.IsSuccess())
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 library/cpp/testing/unittest/registar.cpp:36 RaiseError(): requirement UnittestThread failed 0. /-S/util/system/yassert.cpp:83: InternalPanicImpl @ 0x13A9EAF5 1. /-S/util/system/yassert.cpp:55: Panic @ 0x13A95AF6 2. /tmp//-S/library/cpp/testing/unittest/registar.cpp:36: RaiseError @ 0x13C37886 3. /-S/ydb/core/kqp/ut/common/kqp_ut_common.h:375: AssertSuccessResult @ 0x13979752 4. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:365: CreateSampleTables @ 0x260D6A12 5. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:581: operator() @ 0x260F7FBC 6. /-S/library/cpp/threading/future/core/future-inl.h:493: SetValue @ 0x260F7FBC 7. /-S/library/cpp/threading/future/async.h:24: operator() @ 0x260F7FBC 8. /-S/util/thread/pool.h:71: Process @ 0x260F7FBC 9. /-S/util/thread/pool.cpp:418: DoExecute @ 0x13AA6479 10. /-S/util/thread/factory.h:15: Execute @ 0x13AA4E69 11. /-S/util/thread/factory.cpp:36: ThreadProc @ 0x13AA4E69 12. /-S/util/system/thread.cpp:244: ThreadProxy @ 0x13AA02DC 13. ??:0: ?? @ 0x7F4244675AC2 14. ??:0: ?? @ 0x7F424470784F ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/sysview/unittest >> KqpSystemView::PartitionStatsRanges Test command err: Trying to start YDB, gRPC: 30634, MsgBus: 27078 2025-05-29T15:32:46.770840Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509891048128645834:2061];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:32:46.770882Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/001a98/r3tmp/tmpW8mEN1/pdisk_1.dat 2025-05-29T15:32:46.817139Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [1:7509891048128645814:2079] 1748532766770667 != 1748532766770670 2025-05-29T15:32:46.818666Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 30634, node 1 2025-05-29T15:32:46.829322Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:32:46.829335Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-05-29T15:32:46.829337Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-05-29T15:32:46.829382Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:27078 TClient is connected to server localhost:27078 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-29T15:32:46.896616Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:32:46.896644Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:32:46.897594Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-29T15:32:46.897673Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:32:46.904249Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:32:46.966659Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:32:46.983413Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:32:46.993171Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:32:47.047687Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509891052423614742:2403], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:32:47.047721Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:32:47.078431Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-05-29T15:32:47.084444Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-05-29T15:32:47.139428Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-05-29T15:32:47.194084Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-05-29T15:32:47.205930Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-05-29T15:32:47.212784Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-05-29T15:32:47.220022Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-05-29T15:32:47.235709Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509891052423615397:2466], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:32:47.235735Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:32:47.235736Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509891052423615402:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:32:47.236296Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715669:3, at schemeshard: 72057594046644480 2025-05-29T15:32:47.240293Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7509891052423615404:2470], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715669 completed, doublechecking } 2025-05-29T15:32:47.295201Z node 1 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [1:7509891052423615455:3396] txid# 281474976715670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-29T15:32:47.393030Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [1:7509891052423615471:2474], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:32:47.393138Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=1&id=MTMxMGVhMzktZDdhYmM4MjMtOTQxMGZmYzEtZjhhMTJlODQ=, ActorId: [1:7509891052423614739:2401], ActorState: ExecuteState, TraceId: 01jweasag36yh2sjcn3j7nyb8p, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: VERIFY failed (2025-05-29T15:32:47.393687Z): assertion failed in non-unittest thread with message: assertion failed at ydb/core/kqp/ut/common/kqp_ut_common.h:375, void NKikimr::NKqp::AssertSuccessResult(const NYdb::TStatus &): (result.IsSuccess())
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 library/cpp/testing/unittest/registar.cpp:36 RaiseError(): requirement UnittestThread failed 0. /-S/util/system/yassert.cpp:83: InternalPanicImpl @ 0x13A9EAF5 1. /-S/util/system/yassert.cpp:55: Panic @ 0x13A95AF6 2. /tmp//-S/library/cpp/testing/unittest/registar.cpp:36: RaiseError @ 0x13C37886 3. /-S/ydb/core/kqp/ut/common/kqp_ut_common.h:375: AssertSuccessResult @ 0x13979752 4. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:365: CreateSampleTables @ 0x260D6A12 5. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:581: operator() @ 0x260F7FBC 6. /-S/library/cpp/threading/future/core/future-inl.h:493: SetValue @ 0x260F7FBC 7. /-S/library/cpp/threading/future/async.h:24: operator() @ 0x260F7FBC 8. /-S/util/thread/pool.h:71: Process @ 0x260F7FBC 9. /-S/util/thread/pool.cpp:418: DoExecute @ 0x13AA6479 10. /-S/util/thread/factory.h:15: Execute @ 0x13AA4E69 11. /-S/util/thread/factory.cpp:36: ThreadProc @ 0x13AA4E69 12. /-S/util/system/thread.cpp:244: ThreadProxy @ 0x13AA02DC 13. ??:0: ?? @ 0x7FE1D4FC1AC2 14. ??:0: ?? @ 0x7FE1D505384F >> TCdcStreamWithRebootsTests::DropStream[TabletReboots] [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/sysview/unittest >> KqpSysColV0::UpdateAndDelete Test command err: Trying to start YDB, gRPC: 65414, MsgBus: 15173 2025-05-29T15:32:47.186940Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509891053465737844:2061];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:32:47.186964Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/001a94/r3tmp/tmp2NRFKB/pdisk_1.dat 2025-05-29T15:32:47.239958Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:32:47.240055Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [1:7509891053465737819:2079] 1748532767186819 != 1748532767186822 TServer::EnableGrpc on GrpcPort 65414, node 1 2025-05-29T15:32:47.250819Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:32:47.250835Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-05-29T15:32:47.250837Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-05-29T15:32:47.250881Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:15173 2025-05-29T15:32:47.288197Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:32:47.288221Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting TClient is connected to server localhost:15173 2025-05-29T15:32:47.289435Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-29T15:32:47.317052Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:32:47.321806Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:32:47.387289Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:32:47.408280Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:32:47.418528Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:32:47.537436Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509891053465739455:2403], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:32:47.537463Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:32:47.579751Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-05-29T15:32:47.587653Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-05-29T15:32:47.598027Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-05-29T15:32:47.612531Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-05-29T15:32:47.626725Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-05-29T15:32:47.640704Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-05-29T15:32:47.654923Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-05-29T15:32:47.671030Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509891053465740107:2466], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:32:47.671056Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:32:47.671117Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509891053465740112:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:32:47.671720Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715669:3, at schemeshard: 72057594046644480 2025-05-29T15:32:47.674463Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7509891053465740114:2470], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715669 completed, doublechecking } 2025-05-29T15:32:47.767176Z node 1 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [1:7509891053465740165:3397] txid# 281474976715670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-29T15:32:47.835342Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [1:7509891053465740181:2474], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:32:47.835414Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=1&id=NzU1YjQ1NGEtYjFmNGY2MWItYzk5OTk4ODUtNjI2MGFkMWY=, ActorId: [1:7509891053465739437:2401], ActorState: ExecuteState, TraceId: 01jweasaxpdtfjrzhhpr2zfzr7, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: VERIFY failed (2025-05-29T15:32:47.836070Z): assertion failed in non-unittest thread with message: assertion failed at ydb/core/kqp/ut/common/kqp_ut_common.h:375, void NKikimr::NKqp::AssertSuccessResult(const NYdb::TStatus &): (result.IsSuccess())
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 library/cpp/testing/unittest/registar.cpp:36 RaiseError(): requirement UnittestThread failed 0. /-S/util/system/yassert.cpp:83: InternalPanicImpl @ 0x13A9EAF5 1. /-S/util/system/yassert.cpp:55: Panic @ 0x13A95AF6 2. /tmp//-S/library/cpp/testing/unittest/registar.cpp:36: RaiseError @ 0x13C37886 3. /-S/ydb/core/kqp/ut/common/kqp_ut_common.h:375: AssertSuccessResult @ 0x13979752 4. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:365: CreateSampleTables @ 0x260D6A12 5. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:581: operator() @ 0x260F7FBC 6. /-S/library/cpp/threading/future/core/future-inl.h:493: SetValue @ 0x260F7FBC 7. /-S/library/cpp/threading/future/async.h:24: operator() @ 0x260F7FBC 8. /-S/util/thread/pool.h:71: Process @ 0x260F7FBC 9. /-S/util/thread/pool.cpp:418: DoExecute @ 0x13AA6479 10. /-S/util/thread/factory.h:15: Execute @ 0x13AA4E69 11. /-S/util/thread/factory.cpp:36: ThreadProc @ 0x13AA4E69 12. /-S/util/system/thread.cpp:244: ThreadProxy @ 0x13AA02DC 13. ??:0: ?? @ 0x7FE0670FAAC2 14. ??:0: ?? @ 0x7FE06718C84F ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_cdc_stream_reboots/unittest >> TCdcStreamWithRebootsTests::DropStream[TabletReboots] [GOOD] Test command err: =========== RUN: Trace =========== Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2140] Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:116:2058] recipient: [1:110:2141] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:116:2058] recipient: [1:110:2141] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:117:2058] recipient: [1:111:2142] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:117:2058] recipient: [1:111:2142] Leader for TabletID 72057594046678944 is [1:126:2151] sender: [1:127:2058] recipient: [1:109:2140] Leader for TabletID 72057594046447617 is [1:130:2154] sender: [1:132:2058] recipient: [1:110:2141] Leader for TabletID 72057594046316545 is [1:133:2155] sender: [1:135:2058] recipient: [1:111:2142] 2025-05-29T15:32:30.684862Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7488: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-05-29T15:32:30.684879Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7516: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:32:30.684882Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7402: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2025-05-29T15:32:30.684886Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7418: OperationsProcessing config: using default configuration 2025-05-29T15:32:30.684893Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-05-29T15:32:30.684895Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-05-29T15:32:30.684901Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7548: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:32:30.684910Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:39: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-05-29T15:32:30.684972Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7619: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-05-29T15:32:30.685024Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-05-29T15:32:30.693500Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7651: Got new config: QueryServiceConfig { AllExternalDataSourcesAreAvailable: true } 2025-05-29T15:32:30.693513Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:32:30.693574Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7619: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# Leader for TabletID 72057594046447617 is [1:130:2154] sender: [1:175:2058] recipient: [1:15:2062] 2025-05-29T15:32:30.695168Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-05-29T15:32:30.695187Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-05-29T15:32:30.695202Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-05-29T15:32:30.696688Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-05-29T15:32:30.696727Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:445: Clear TempDirsState with owners number: 0 2025-05-29T15:32:30.696797Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1348: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-05-29T15:32:30.696904Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:32: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-05-29T15:32:30.697263Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:157: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:32:30.697284Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:84: [RootDataErasureManager] Stop 2025-05-29T15:32:30.697416Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:32:30.697422Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:32:30.697438Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-05-29T15:32:30.697442Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-29T15:32:30.697446Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:202: TTxServerlessStorageBilling.Complete 2025-05-29T15:32:30.697458Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6671: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:214:2058] recipient: [1:212:2212] IGNORE Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:214:2058] recipient: [1:212:2212] Leader for TabletID 72057594037968897 is [1:218:2216] sender: [1:219:2058] recipient: [1:212:2212] 2025-05-29T15:32:30.698159Z node 1 :HIVE INFO: tablet_helpers.cpp:1130: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:126:2151] sender: [1:239:2058] recipient: [1:15:2062] 2025-05-29T15:32:30.709485Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:372: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-05-29T15:32:30.709531Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:32:30.709567Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-05-29T15:32:30.709596Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:130: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-05-29T15:32:30.709603Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:32:30.710039Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:451: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-05-29T15:32:30.710058Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-05-29T15:32:30.710091Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:32:30.710098Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:313: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-05-29T15:32:30.710101Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:367: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-05-29T15:32:30.710104Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 2 -> 3 2025-05-29T15:32:30.710471Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:32:30.710480Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-05-29T15:32:30.710484Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 3 -> 128 2025-05-29T15:32:30.710783Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:32:30.710793Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:32:30.710796Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:32:30.710800Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1650: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-05-29T15:32:30.711209Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1719: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-05-29T15:32:30.711474Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:652: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-05-29T15:32:30.711497Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1751: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 Leader for TabletID 72057594046316545 is [1:133:2155] sender: [1:254:2058] recipient: [1:15:2062] FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-05-29T15:32:30.711604Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:676: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-05-29T15:32:30.711620Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:680: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 133 RawX2: 4294969451 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-05-29T15:32:30.711624Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:32:30.711657Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Ch ... schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 5 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 1004 2025-05-29T15:32:50.096797Z node 79 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72057594046678944, txId: 1004 2025-05-29T15:32:50.096799Z node 79 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 1004, pathId: [OwnerId: 72057594046678944, LocalPathId: 5], version: 18446744073709551615 2025-05-29T15:32:50.096802Z node 79 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 2 2025-05-29T15:32:50.096841Z node 79 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:5834: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 1004 2025-05-29T15:32:50.096870Z node 79 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 5 PathOwnerId: 72057594046678944, cookie: 1004 2025-05-29T15:32:50.096873Z node 79 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72057594046678944, txId: 1004 2025-05-29T15:32:50.096875Z node 79 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 1004, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 5 2025-05-29T15:32:50.096877Z node 79 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 5 2025-05-29T15:32:50.096915Z node 79 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 9 PathOwnerId: 72057594046678944, cookie: 1004 2025-05-29T15:32:50.096918Z node 79 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72057594046678944, txId: 1004 2025-05-29T15:32:50.096920Z node 79 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 1004, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 9 2025-05-29T15:32:50.096922Z node 79 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2025-05-29T15:32:50.096927Z node 79 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1606: TOperation IsReadyToNotify, TxId: 1004, ready parts: 2/3, is published: true 2025-05-29T15:32:50.096939Z node 79 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 1004 2025-05-29T15:32:50.096941Z node 79 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72057594046678944, txId: 1004 2025-05-29T15:32:50.096943Z node 79 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1606: TOperation IsReadyToNotify, TxId: 1004, ready parts: 2/3, is published: true 2025-05-29T15:32:50.097255Z node 79 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:647: TTxOperationReply complete, operationId: 1004:0, at schemeshard: 72057594046678944 2025-05-29T15:32:50.097271Z node 79 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:174: Deleted shardIdx 72057594046678944:3 2025-05-29T15:32:50.097282Z node 79 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:647: TTxOperationReply complete, operationId: 1004:0, at schemeshard: 72057594046678944 2025-05-29T15:32:50.097315Z node 79 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1004:0, at schemeshard: 72057594046678944 2025-05-29T15:32:50.097320Z node 79 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:482: [72057594046678944] TDone opId# 1004:0 ProgressState 2025-05-29T15:32:50.097327Z node 79 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:906: Part operation is done id#1004:0 progress is 3/3 2025-05-29T15:32:50.097331Z node 79 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 1004 ready parts: 3/3 2025-05-29T15:32:50.097334Z node 79 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:906: Part operation is done id#1004:0 progress is 3/3 2025-05-29T15:32:50.097336Z node 79 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 1004 ready parts: 3/3 2025-05-29T15:32:50.097339Z node 79 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1606: TOperation IsReadyToNotify, TxId: 1004, ready parts: 3/3, is published: true 2025-05-29T15:32:50.097342Z node 79 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 1004 ready parts: 3/3 2025-05-29T15:32:50.097345Z node 79 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:973: Operation and all the parts is done, operation id: 1004:0 2025-05-29T15:32:50.097347Z node 79 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5174: RemoveTx for txid 1004:0 2025-05-29T15:32:50.097359Z node 79 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 4 2025-05-29T15:32:50.097362Z node 79 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:973: Operation and all the parts is done, operation id: 1004:1 2025-05-29T15:32:50.097364Z node 79 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5174: RemoveTx for txid 1004:1 2025-05-29T15:32:50.097368Z node 79 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 2 2025-05-29T15:32:50.097370Z node 79 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:973: Operation and all the parts is done, operation id: 1004:2 2025-05-29T15:32:50.097372Z node 79 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5174: RemoveTx for txid 1004:2 2025-05-29T15:32:50.097377Z node 79 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 1 2025-05-29T15:32:50.097405Z node 79 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:123: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-05-29T15:32:50.097409Z node 79 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:137: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 5], at schemeshard: 72057594046678944 2025-05-29T15:32:50.097414Z node 79 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 1 2025-05-29T15:32:50.097417Z node 79 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:137: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 4], at schemeshard: 72057594046678944 2025-05-29T15:32:50.097420Z node 79 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 3 2025-05-29T15:32:50.097808Z node 79 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1004 2025-05-29T15:32:50.097823Z node 79 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:174: Deleted shardIdx 72057594046678944:2 2025-05-29T15:32:50.097847Z node 79 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1004 2025-05-29T15:32:50.097853Z node 79 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1004 2025-05-29T15:32:50.097870Z node 79 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1004 2025-05-29T15:32:50.097874Z node 79 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1004 2025-05-29T15:32:50.098143Z node 79 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:160: TTxCleanDroppedPaths Complete, done PersistRemovePath for 2 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestModificationResult got TxId: 1004, wait until txId: 1004 TestWaitNotification wait txId: 1004 2025-05-29T15:32:50.098187Z node 79 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:210: tests -- TTxNotificationSubscriber for txId 1004: send EvNotifyTxCompletion 2025-05-29T15:32:50.098191Z node 79 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:256: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 1004 2025-05-29T15:32:50.098226Z node 79 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 1004, at schemeshard: 72057594046678944 2025-05-29T15:32:50.098235Z node 79 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:227: tests -- TTxNotificationSubscriber for txId 1004: got EvNotifyTxCompletionResult 2025-05-29T15:32:50.098238Z node 79 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:236: tests -- TTxNotificationSubscriber for txId 1004: satisfy waiter [79:743:2659] TestWaitNotification: OK eventTxId 1004 2025-05-29T15:32:50.098282Z node 79 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Table/Stream" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: true }, at schemeshard: 72057594046678944 2025-05-29T15:32:50.098300Z node 79 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/Table/Stream" took 24us result status StatusPathDoesNotExist 2025-05-29T15:32:50.098322Z node 79 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/Table/Stream\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot/Table\' (id: [OwnerId: 72057594046678944, LocalPathId: 3]), source_location: ydb/core/tx/schemeshard/schemeshard_path_describer.cpp:1157" Path: "/MyRoot/Table/Stream" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot/Table" LastExistedPrefixPathId: 3 LastExistedPrefixDescription { Self { Name: "Table" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 1002 CreateStep: 5000003 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: false } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/sysview/unittest >> KqpSystemView::FailResolve Test command err: Trying to start YDB, gRPC: 5840, MsgBus: 26488 2025-05-29T15:32:47.165019Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509891052447233341:2063];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:32:47.165037Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/001a8e/r3tmp/tmpUtg5K4/pdisk_1.dat 2025-05-29T15:32:47.220507Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:32:47.220627Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [1:7509891052447233319:2079] 1748532767164885 != 1748532767164888 TServer::EnableGrpc on GrpcPort 5840, node 1 2025-05-29T15:32:47.229044Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:32:47.229056Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-05-29T15:32:47.229059Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-05-29T15:32:47.229100Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:26488 2025-05-29T15:32:47.267448Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:32:47.267484Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting TClient is connected to server localhost:26488 2025-05-29T15:32:47.268479Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-29T15:32:47.297918Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:32:47.310295Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:32:47.325620Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:32:47.341808Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:32:47.353975Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:32:47.501746Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509891052447234955:2403], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:32:47.501776Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:32:47.541084Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-05-29T15:32:47.548221Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-05-29T15:32:47.556077Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-05-29T15:32:47.570552Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-05-29T15:32:47.626313Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-05-29T15:32:47.640724Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-05-29T15:32:47.655019Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-05-29T15:32:47.671306Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509891052447235608:2466], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:32:47.671330Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:32:47.671332Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509891052447235613:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:32:47.672005Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715669:3, at schemeshard: 72057594046644480 2025-05-29T15:32:47.674013Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7509891052447235615:2470], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715669 completed, doublechecking } 2025-05-29T15:32:47.749255Z node 1 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [1:7509891052447235666:3397] txid# 281474976715670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-29T15:32:47.822587Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [1:7509891052447235682:2474], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:32:47.822719Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=1&id=ODY1MTI1MjYtN2E4OGVmNWYtZmM5MzhhMDctNDNhNDMxMzU=, ActorId: [1:7509891052447234952:2401], ActorState: ExecuteState, TraceId: 01jweasaxp8ma0z7w9109h7d1p, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: VERIFY failed (2025-05-29T15:32:47.823445Z): assertion failed in non-unittest thread with message: assertion failed at ydb/core/kqp/ut/common/kqp_ut_common.h:375, void NKikimr::NKqp::AssertSuccessResult(const NYdb::TStatus &): (result.IsSuccess())
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 library/cpp/testing/unittest/registar.cpp:36 RaiseError(): requirement UnittestThread failed 0. /-S/util/system/yassert.cpp:83: InternalPanicImpl @ 0x13A9EAF5 1. /-S/util/system/yassert.cpp:55: Panic @ 0x13A95AF6 2. /tmp//-S/library/cpp/testing/unittest/registar.cpp:36: RaiseError @ 0x13C37886 3. /-S/ydb/core/kqp/ut/common/kqp_ut_common.h:375: AssertSuccessResult @ 0x13979752 4. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:365: CreateSampleTables @ 0x260D6A12 5. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:581: operator() @ 0x260F7FBC 6. /-S/library/cpp/threading/future/core/future-inl.h:493: SetValue @ 0x260F7FBC 7. /-S/library/cpp/threading/future/async.h:24: operator() @ 0x260F7FBC 8. /-S/util/thread/pool.h:71: Process @ 0x260F7FBC 9. /-S/util/thread/pool.cpp:418: DoExecute @ 0x13AA6479 10. /-S/util/thread/factory.h:15: Execute @ 0x13AA4E69 11. /-S/util/thread/factory.cpp:36: ThreadProc @ 0x13AA4E69 12. /-S/util/system/thread.cpp:244: ThreadProxy @ 0x13AA02DC 13. ??:0: ?? @ 0x7F81DB484AC2 14. ??:0: ?? @ 0x7F81DB51684F >> TCdcStreamWithRebootsTests::CreateStreamOnIndexTableWithInitialScan[TabletReboots] [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_cdc_stream_reboots/unittest >> TCdcStreamWithRebootsTests::CreateStreamOnIndexTableWithInitialScan[TabletReboots] [GOOD] Test command err: =========== RUN: Trace =========== Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2140] Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:116:2058] recipient: [1:110:2141] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:116:2058] recipient: [1:110:2141] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:117:2058] recipient: [1:111:2142] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:117:2058] recipient: [1:111:2142] Leader for TabletID 72057594046678944 is [1:126:2151] sender: [1:127:2058] recipient: [1:109:2140] Leader for TabletID 72057594046447617 is [1:130:2154] sender: [1:132:2058] recipient: [1:110:2141] Leader for TabletID 72057594046316545 is [1:133:2155] sender: [1:135:2058] recipient: [1:111:2142] 2025-05-29T15:32:15.371475Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7488: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-05-29T15:32:15.371492Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7516: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:32:15.371495Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7402: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2025-05-29T15:32:15.371499Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7418: OperationsProcessing config: using default configuration 2025-05-29T15:32:15.371508Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-05-29T15:32:15.371510Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-05-29T15:32:15.371516Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7548: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:32:15.371524Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:39: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-05-29T15:32:15.371583Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7619: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-05-29T15:32:15.371636Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-05-29T15:32:15.380995Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7651: Got new config: QueryServiceConfig { AllExternalDataSourcesAreAvailable: true } 2025-05-29T15:32:15.381011Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:32:15.381069Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7619: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# Leader for TabletID 72057594046447617 is [1:130:2154] sender: [1:175:2058] recipient: [1:15:2062] 2025-05-29T15:32:15.383098Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-05-29T15:32:15.383116Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-05-29T15:32:15.383131Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-05-29T15:32:15.385006Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-05-29T15:32:15.385052Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:445: Clear TempDirsState with owners number: 0 2025-05-29T15:32:15.385123Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1348: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-05-29T15:32:15.385241Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:32: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-05-29T15:32:15.385631Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:157: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:32:15.385653Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:84: [RootDataErasureManager] Stop 2025-05-29T15:32:15.385795Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:32:15.385802Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:32:15.385820Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-05-29T15:32:15.385824Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-29T15:32:15.385828Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:202: TTxServerlessStorageBilling.Complete 2025-05-29T15:32:15.385840Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6671: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:214:2058] recipient: [1:212:2212] IGNORE Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:214:2058] recipient: [1:212:2212] Leader for TabletID 72057594037968897 is [1:218:2216] sender: [1:219:2058] recipient: [1:212:2212] 2025-05-29T15:32:15.386688Z node 1 :HIVE INFO: tablet_helpers.cpp:1130: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:126:2151] sender: [1:239:2058] recipient: [1:15:2062] 2025-05-29T15:32:15.399249Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:372: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-05-29T15:32:15.399301Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:32:15.399340Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-05-29T15:32:15.399371Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:130: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-05-29T15:32:15.399379Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:32:15.399837Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:451: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-05-29T15:32:15.399853Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-05-29T15:32:15.399884Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:32:15.399890Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:313: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-05-29T15:32:15.399893Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:367: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-05-29T15:32:15.399896Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 2 -> 3 2025-05-29T15:32:15.400154Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:32:15.400161Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-05-29T15:32:15.400164Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 3 -> 128 2025-05-29T15:32:15.400358Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:32:15.400364Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:32:15.400367Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:32:15.400371Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1650: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-05-29T15:32:15.400795Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1719: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-05-29T15:32:15.401067Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:652: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-05-29T15:32:15.401086Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1751: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 Leader for TabletID 72057594046316545 is [1:133:2155] sender: [1:254:2058] recipient: [1:15:2062] FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-05-29T15:32:15.401206Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:676: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-05-29T15:32:15.401221Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:680: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 133 RawX2: 4294969451 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-05-29T15:32:15.401226Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:32:15.401260Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Ch ... hemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [144:206:2207], at schemeshard: 72057594046678944, txId: 1003, path id: 6 2025-05-29T15:32:50.799247Z node 144 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [144:206:2207], at schemeshard: 72057594046678944, txId: 1003, path id: 7 2025-05-29T15:32:50.799343Z node 144 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1003:4, at schemeshard: 72057594046678944 2025-05-29T15:32:50.799348Z node 144 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:482: [72057594046678944] TDone opId# 1003:4 ProgressState 2025-05-29T15:32:50.799358Z node 144 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:906: Part operation is done id#1003:4 progress is 4/5 2025-05-29T15:32:50.799360Z node 144 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 1003 ready parts: 4/5 2025-05-29T15:32:50.799364Z node 144 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1103: All parts have reached barrier, tx: 1003, done: 4, blocked: 1 2025-05-29T15:32:50.799370Z node 144 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_create_cdc_stream.cpp:400: [72057594046678944] TDone opId# 1003:3HandleReply TEvCompleteBarrier 2025-05-29T15:32:50.799381Z node 144 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:906: Part operation is done id#1003:4 progress is 4/5 2025-05-29T15:32:50.799384Z node 144 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 1003 ready parts: 4/5 2025-05-29T15:32:50.799387Z node 144 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:906: Part operation is done id#1003:3 progress is 5/5 2025-05-29T15:32:50.799390Z node 144 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 1003 ready parts: 5/5 2025-05-29T15:32:50.799395Z node 144 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1606: TOperation IsReadyToNotify, TxId: 1003, ready parts: 4/5, is published: false 2025-05-29T15:32:50.799399Z node 144 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1606: TOperation IsReadyToNotify, TxId: 1003, ready parts: 5/5, is published: false 2025-05-29T15:32:50.799405Z node 144 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 1003 ready parts: 5/5 2025-05-29T15:32:50.799408Z node 144 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:973: Operation and all the parts is done, operation id: 1003:0 2025-05-29T15:32:50.799411Z node 144 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5174: RemoveTx for txid 1003:0 2025-05-29T15:32:50.799417Z node 144 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 5 2025-05-29T15:32:50.799421Z node 144 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:973: Operation and all the parts is done, operation id: 1003:1 2025-05-29T15:32:50.799423Z node 144 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5174: RemoveTx for txid 1003:1 2025-05-29T15:32:50.799426Z node 144 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 3 2025-05-29T15:32:50.799428Z node 144 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:973: Operation and all the parts is done, operation id: 1003:2 2025-05-29T15:32:50.799429Z node 144 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5174: RemoveTx for txid 1003:2 2025-05-29T15:32:50.799432Z node 144 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 6] was 4 2025-05-29T15:32:50.799434Z node 144 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:973: Operation and all the parts is done, operation id: 1003:3 2025-05-29T15:32:50.799436Z node 144 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5174: RemoveTx for txid 1003:3 2025-05-29T15:32:50.799448Z node 144 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 4 2025-05-29T15:32:50.799451Z node 144 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:973: Operation and all the parts is done, operation id: 1003:4 2025-05-29T15:32:50.799454Z node 144 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5174: RemoveTx for txid 1003:4 2025-05-29T15:32:50.799459Z node 144 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 7] was 5 2025-05-29T15:32:50.799462Z node 144 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:982: Publication still in progress, tx: 1003, publications: 2, subscribers: 0 2025-05-29T15:32:50.799464Z node 144 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:989: Publication details: tx: 1003, [OwnerId: 72057594046678944, LocalPathId: 6], 4 2025-05-29T15:32:50.799466Z node 144 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:989: Publication details: tx: 1003, [OwnerId: 72057594046678944, LocalPathId: 7], 2 2025-05-29T15:32:50.799596Z node 144 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:5834: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 6 Version: 4 PathOwnerId: 72057594046678944, cookie: 1003 2025-05-29T15:32:50.799605Z node 144 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 6 Version: 4 PathOwnerId: 72057594046678944, cookie: 1003 2025-05-29T15:32:50.799608Z node 144 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 1003 2025-05-29T15:32:50.799611Z node 144 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 1003, pathId: [OwnerId: 72057594046678944, LocalPathId: 6], version: 4 2025-05-29T15:32:50.799614Z node 144 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 6] was 3 2025-05-29T15:32:50.799832Z node 144 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:5834: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 7 Version: 2 PathOwnerId: 72057594046678944, cookie: 1003 2025-05-29T15:32:50.799843Z node 144 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 7 Version: 2 PathOwnerId: 72057594046678944, cookie: 1003 2025-05-29T15:32:50.799846Z node 144 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1003 2025-05-29T15:32:50.799849Z node 144 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 1003, pathId: [OwnerId: 72057594046678944, LocalPathId: 7], version: 2 2025-05-29T15:32:50.799852Z node 144 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 7] was 4 2025-05-29T15:32:50.799861Z node 144 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 1003, subscribers: 0 2025-05-29T15:32:50.800543Z node 144 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1003 2025-05-29T15:32:50.800905Z node 144 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1003 TestModificationResult got TxId: 1003, wait until txId: 1003 TestWaitNotification wait txId: 1003 2025-05-29T15:32:50.801912Z node 144 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:210: tests -- TTxNotificationSubscriber for txId 1003: send EvNotifyTxCompletion 2025-05-29T15:32:50.801921Z node 144 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:256: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 1003 2025-05-29T15:32:50.801980Z node 144 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 1003, at schemeshard: 72057594046678944 2025-05-29T15:32:50.801994Z node 144 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:227: tests -- TTxNotificationSubscriber for txId 1003: got EvNotifyTxCompletionResult 2025-05-29T15:32:50.801998Z node 144 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:236: tests -- TTxNotificationSubscriber for txId 1003: satisfy waiter [144:734:2640] TestWaitNotification: OK eventTxId 1003 2025-05-29T15:32:50.802045Z node 144 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Table/Index/indexImplTable/Stream" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: true }, at schemeshard: 72057594046678944 2025-05-29T15:32:50.802077Z node 144 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/Table/Index/indexImplTable/Stream" took 38us result status StatusSuccess 2025-05-29T15:32:50.802149Z node 144 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Table/Index/indexImplTable/Stream" PathDescription { Self { Name: "Stream" PathId: 6 SchemeshardId: 72057594046678944 PathType: EPathTypeCdcStream CreateFinished: true CreateTxId: 1003 CreateStep: 5000004 ParentPathId: 5 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 4 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 4 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 2 CdcStreamVersion: 1 } ChildrenExist: true } Children { Name: "streamImpl" PathId: 7 SchemeshardId: 72057594046678944 PathType: EPathTypePersQueueGroup CreateFinished: true CreateTxId: 1003 CreateStep: 5000004 ParentPathId: 6 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" PathSubType: EPathSubTypeStreamImpl ChildrenExist: false BalancerTabletID: 72075186233409549 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 6 PathsLimit: 10000 ShardsInside: 4 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 1 PQPartitionsLimit: 1000000 } CdcStreamDescription { Name: "Stream" Mode: ECdcStreamModeKeysOnly PathId { OwnerId: 72057594046678944 LocalId: 6 } State: ECdcStreamStateScan SchemaVersion: 1 Format: ECdcStreamFormatProto VirtualTimestamps: false AwsRegion: "" ResolvedTimestampsIntervalMs: 0 } } PathId: 6 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> KqpSystemView::NodesRange1 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/sysview/unittest >> KqpSysColV0::InnerJoinSelectAsterisk Test command err: Trying to start YDB, gRPC: 18798, MsgBus: 8571 2025-05-29T15:32:49.246085Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509891060730984031:2065];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:32:49.246108Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/001a66/r3tmp/tmp6bfvvI/pdisk_1.dat 2025-05-29T15:32:49.294017Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [1:7509891060730984007:2079] 1748532769245906 != 1748532769245909 2025-05-29T15:32:49.294696Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 18798, node 1 2025-05-29T15:32:49.304859Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:32:49.304870Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-05-29T15:32:49.304871Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-05-29T15:32:49.304907Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:8571 TClient is connected to server localhost:8571 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-29T15:32:49.374937Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:32:49.374959Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:32:49.375640Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-05-29T15:32:49.375892Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected waiting... 2025-05-29T15:32:49.388801Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:32:49.448646Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:32:49.462829Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:32:49.474984Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:32:49.546073Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509891060730985640:2403], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:32:49.546093Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:32:49.575093Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-05-29T15:32:49.629398Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-05-29T15:32:49.641886Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-05-29T15:32:49.648233Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-05-29T15:32:49.655398Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-05-29T15:32:49.662633Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-05-29T15:32:49.669315Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-05-29T15:32:49.678438Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509891060730986295:2466], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:32:49.678459Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:32:49.678468Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509891060730986300:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:32:49.678921Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715669:3, at schemeshard: 72057594046644480 2025-05-29T15:32:49.683193Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7509891060730986302:2470], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715669 completed, doublechecking } 2025-05-29T15:32:49.777179Z node 1 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [1:7509891060730986353:3395] txid# 281474976715670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-29T15:32:49.844754Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [1:7509891060730986369:2474], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:32:49.844835Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=1&id=ODE5NWUwNWEtOGNmMWM4NjItNjE4ZGQ4ZGUtZTNlYTZhMDk=, ActorId: [1:7509891060730985637:2401], ActorState: ExecuteState, TraceId: 01jweascwe4550605pkn7n9cv3, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: VERIFY failed (2025-05-29T15:32:49.845367Z): assertion failed in non-unittest thread with message: assertion failed at ydb/core/kqp/ut/common/kqp_ut_common.h:375, void NKikimr::NKqp::AssertSuccessResult(const NYdb::TStatus &): (result.IsSuccess())
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 library/cpp/testing/unittest/registar.cpp:36 RaiseError(): requirement UnittestThread failed 0. /-S/util/system/yassert.cpp:83: InternalPanicImpl @ 0x13A9EAF5 1. /-S/util/system/yassert.cpp:55: Panic @ 0x13A95AF6 2. /tmp//-S/library/cpp/testing/unittest/registar.cpp:36: RaiseError @ 0x13C37886 3. /-S/ydb/core/kqp/ut/common/kqp_ut_common.h:375: AssertSuccessResult @ 0x13979752 4. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:365: CreateSampleTables @ 0x260D6A12 5. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:581: operator() @ 0x260F7FBC 6. /-S/library/cpp/threading/future/core/future-inl.h:493: SetValue @ 0x260F7FBC 7. /-S/library/cpp/threading/future/async.h:24: operator() @ 0x260F7FBC 8. /-S/util/thread/pool.h:71: Process @ 0x260F7FBC 9. /-S/util/thread/pool.cpp:418: DoExecute @ 0x13AA6479 10. /-S/util/thread/factory.h:15: Execute @ 0x13AA4E69 11. /-S/util/thread/factory.cpp:36: ThreadProc @ 0x13AA4E69 12. /-S/util/system/thread.cpp:244: ThreadProxy @ 0x13AA02DC 13. ??:0: ?? @ 0x7FFA1ACCEAC2 14. ??:0: ?? @ 0x7FFA1AD6084F ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/sysview/unittest >> KqpSysColV1::StreamInnerJoinSelect Test command err: Trying to start YDB, gRPC: 9870, MsgBus: 7490 2025-05-29T15:32:49.279003Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509891062394190646:2062];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:32:49.279283Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/001a7a/r3tmp/tmpPakfNR/pdisk_1.dat 2025-05-29T15:32:49.325070Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:32:49.325330Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [1:7509891062394190626:2079] 1748532769278846 != 1748532769278849 TServer::EnableGrpc on GrpcPort 9870, node 1 2025-05-29T15:32:49.337057Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:32:49.337068Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-05-29T15:32:49.337070Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-05-29T15:32:49.337104Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:7490 TClient is connected to server localhost:7490 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-29T15:32:49.406983Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:32:49.407007Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:32:49.408002Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-29T15:32:49.408502Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:32:49.418610Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:32:49.479287Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:32:49.495619Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:32:49.505076Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:32:49.566718Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509891062394192258:2403], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:32:49.566759Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:32:49.593431Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-05-29T15:32:49.599385Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-05-29T15:32:49.654017Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-05-29T15:32:49.662960Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-05-29T15:32:49.717491Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-05-29T15:32:49.726293Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-05-29T15:32:49.740778Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-05-29T15:32:49.756275Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509891062394192914:2466], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:32:49.756298Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:32:49.756318Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509891062394192919:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:32:49.756997Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715669:3, at schemeshard: 72057594046644480 2025-05-29T15:32:49.760518Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7509891062394192921:2470], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715669 completed, doublechecking } 2025-05-29T15:32:49.860301Z node 1 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [1:7509891062394192972:3395] txid# 281474976715670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-29T15:32:49.926186Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [1:7509891062394192988:2474], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:32:49.926273Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=1&id=YTNhMDZkNDUtZGYyNjBjZTUtMTg4ZjRhNzEtNTUwNWYxNGI=, ActorId: [1:7509891062394192240:2401], ActorState: ExecuteState, TraceId: 01jweascyv5v096f6604ktyxxb, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: VERIFY failed (2025-05-29T15:32:49.926889Z): assertion failed in non-unittest thread with message: assertion failed at ydb/core/kqp/ut/common/kqp_ut_common.h:375, void NKikimr::NKqp::AssertSuccessResult(const NYdb::TStatus &): (result.IsSuccess())
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 library/cpp/testing/unittest/registar.cpp:36 RaiseError(): requirement UnittestThread failed 0. /-S/util/system/yassert.cpp:83: InternalPanicImpl @ 0x13A9EAF5 1. /-S/util/system/yassert.cpp:55: Panic @ 0x13A95AF6 2. /tmp//-S/library/cpp/testing/unittest/registar.cpp:36: RaiseError @ 0x13C37886 3. /-S/ydb/core/kqp/ut/common/kqp_ut_common.h:375: AssertSuccessResult @ 0x13979752 4. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:365: CreateSampleTables @ 0x260D6A12 5. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:581: operator() @ 0x260F7FBC 6. /-S/library/cpp/threading/future/core/future-inl.h:493: SetValue @ 0x260F7FBC 7. /-S/library/cpp/threading/future/async.h:24: operator() @ 0x260F7FBC 8. /-S/util/thread/pool.h:71: Process @ 0x260F7FBC 9. /-S/util/thread/pool.cpp:418: DoExecute @ 0x13AA6479 10. /-S/util/thread/factory.h:15: Execute @ 0x13AA4E69 11. /-S/util/thread/factory.cpp:36: ThreadProc @ 0x13AA4E69 12. /-S/util/system/thread.cpp:244: ThreadProxy @ 0x13AA02DC 13. ??:0: ?? @ 0x7F41A3078AC2 14. ??:0: ?? @ 0x7F41A310A84F ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/sysview/unittest >> KqpSystemView::PartitionStatsSimple Test command err: Trying to start YDB, gRPC: 22214, MsgBus: 11838 2025-05-29T15:32:49.516428Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509891061544588571:2062];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:32:49.516442Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/001a24/r3tmp/tmp69G2Up/pdisk_1.dat 2025-05-29T15:32:49.559615Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [1:7509891061544588550:2079] 1748532769516297 != 1748532769516300 2025-05-29T15:32:49.561540Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 22214, node 1 2025-05-29T15:32:49.570500Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:32:49.570511Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-05-29T15:32:49.570513Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-05-29T15:32:49.570545Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:11838 TClient is connected to server localhost:11838 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-29T15:32:49.613444Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:32:49.625178Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:32:49.641420Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:32:49.641443Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:32:49.642557Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-29T15:32:49.684607Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:32:49.703775Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:32:49.714221Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:32:49.792149Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509891061544590182:2403], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:32:49.792177Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:32:49.822237Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-05-29T15:32:49.828627Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-05-29T15:32:49.837857Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-05-29T15:32:49.892217Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-05-29T15:32:49.946435Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-05-29T15:32:49.956804Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-05-29T15:32:49.963886Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-05-29T15:32:49.972791Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509891061544590838:2466], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:32:49.972808Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509891061544590843:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:32:49.972810Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:32:49.973370Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710669:3, at schemeshard: 72057594046644480 2025-05-29T15:32:49.976869Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7509891061544590845:2470], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710669 completed, doublechecking } 2025-05-29T15:32:50.046334Z node 1 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [1:7509891065839558192:3394] txid# 281474976710670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-29T15:32:50.117891Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [1:7509891065839558208:2474], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:32:50.117981Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=1&id=ZDM5Zjg4YWQtYjFiMmQ3NmQtYzdiMDA1MDYtM2ViNjRmMWQ=, ActorId: [1:7509891061544590164:2401], ActorState: ExecuteState, TraceId: 01jweasd5ma523wmm62mctqjek, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: VERIFY failed (2025-05-29T15:32:50.118668Z): assertion failed in non-unittest thread with message: assertion failed at ydb/core/kqp/ut/common/kqp_ut_common.h:375, void NKikimr::NKqp::AssertSuccessResult(const NYdb::TStatus &): (result.IsSuccess())
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 library/cpp/testing/unittest/registar.cpp:36 RaiseError(): requirement UnittestThread failed 0. /-S/util/system/yassert.cpp:83: InternalPanicImpl @ 0x13A9EAF5 1. /-S/util/system/yassert.cpp:55: Panic @ 0x13A95AF6 2. /tmp//-S/library/cpp/testing/unittest/registar.cpp:36: RaiseError @ 0x13C37886 3. /-S/ydb/core/kqp/ut/common/kqp_ut_common.h:375: AssertSuccessResult @ 0x13979752 4. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:365: CreateSampleTables @ 0x260D6A12 5. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:581: operator() @ 0x260F7FBC 6. /-S/library/cpp/threading/future/core/future-inl.h:493: SetValue @ 0x260F7FBC 7. /-S/library/cpp/threading/future/async.h:24: operator() @ 0x260F7FBC 8. /-S/util/thread/pool.h:71: Process @ 0x260F7FBC 9. /-S/util/thread/pool.cpp:418: DoExecute @ 0x13AA6479 10. /-S/util/thread/factory.h:15: Execute @ 0x13AA4E69 11. /-S/util/thread/factory.cpp:36: ThreadProc @ 0x13AA4E69 12. /-S/util/system/thread.cpp:244: ThreadProxy @ 0x13AA02DC 13. ??:0: ?? @ 0x7F0506015AC2 14. ??:0: ?? @ 0x7F05060A784F >> TCdcStreamWithRebootsTests::CreateStreamWithVirtualTimestamps[TabletReboots] [GOOD] >> YdbSdkSessionsPool1Session::CustomPlan/0 >> YdbSdkSessionsPool::StressTestSync/1 >> YdbSdkSessionsPool1Session::FailTest/0 [GOOD] >> YdbSdkSessions::MultiThreadMultipleRequestsOnSharedSessionsTableClient >> YdbSdkSessions::CloseSessionAfterDriverDtorWithoutSessionPool >> YdbSdkSessionsPool::WaitQueue/1 >> YdbSdkSessionsPool1Session::RunSmallPlan/0 >> YdbSdkSessionsPool::PeriodicTask/0 >> YdbSdkSessionsPool::StressTestSync/0 >> YdbSdkSessions::TestSessionPool >> YdbSdkSessions::TestMultipleSessions >> YdbSdkSessionsPool1Session::RunSmallPlan/0 [GOOD] >> YdbSdkSessionsPool::WaitQueue/0 >> YdbSdkSessions::CloseSessionWithSessionPoolExplicitDriverStopOnly >> YdbSdkSessions::MultiThreadSync >> YdbSdkSessions::CloseSessionAfterDriverDtorWithoutSessionPool [GOOD] >> YdbSdkSessions::CloseSessionWithSessionPoolExplicit |77.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/serverless/py3test ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_cdc_stream_reboots/unittest >> TCdcStreamWithRebootsTests::CreateStreamWithVirtualTimestamps[TabletReboots] [GOOD] Test command err: =========== RUN: Trace =========== Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2140] Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:116:2058] recipient: [1:110:2141] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:116:2058] recipient: [1:110:2141] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:117:2058] recipient: [1:111:2142] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:117:2058] recipient: [1:111:2142] Leader for TabletID 72057594046678944 is [1:126:2151] sender: [1:127:2058] recipient: [1:109:2140] Leader for TabletID 72057594046447617 is [1:130:2154] sender: [1:132:2058] recipient: [1:110:2141] Leader for TabletID 72057594046316545 is [1:133:2155] sender: [1:135:2058] recipient: [1:111:2142] 2025-05-29T15:32:19.150471Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7488: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-05-29T15:32:19.150487Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7516: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:32:19.150490Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7402: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2025-05-29T15:32:19.150493Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7418: OperationsProcessing config: using default configuration 2025-05-29T15:32:19.150501Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-05-29T15:32:19.150503Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-05-29T15:32:19.150509Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7548: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:32:19.150518Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:39: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-05-29T15:32:19.150579Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7619: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-05-29T15:32:19.150627Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-05-29T15:32:19.159348Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7651: Got new config: QueryServiceConfig { AllExternalDataSourcesAreAvailable: true } 2025-05-29T15:32:19.159363Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:32:19.159425Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7619: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# Leader for TabletID 72057594046447617 is [1:130:2154] sender: [1:175:2058] recipient: [1:15:2062] 2025-05-29T15:32:19.161290Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-05-29T15:32:19.161309Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-05-29T15:32:19.161325Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-05-29T15:32:19.163349Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-05-29T15:32:19.163398Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:445: Clear TempDirsState with owners number: 0 2025-05-29T15:32:19.163470Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1348: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-05-29T15:32:19.163620Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:32: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-05-29T15:32:19.164097Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:157: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:32:19.164123Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:84: [RootDataErasureManager] Stop 2025-05-29T15:32:19.164260Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:32:19.164265Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:32:19.164285Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-05-29T15:32:19.164290Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-29T15:32:19.164294Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:202: TTxServerlessStorageBilling.Complete 2025-05-29T15:32:19.164305Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6671: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:214:2058] recipient: [1:212:2212] IGNORE Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:214:2058] recipient: [1:212:2212] Leader for TabletID 72057594037968897 is [1:218:2216] sender: [1:219:2058] recipient: [1:212:2212] 2025-05-29T15:32:19.165082Z node 1 :HIVE INFO: tablet_helpers.cpp:1130: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:126:2151] sender: [1:239:2058] recipient: [1:15:2062] 2025-05-29T15:32:19.177277Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:372: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-05-29T15:32:19.177321Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:32:19.177353Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-05-29T15:32:19.177380Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:130: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-05-29T15:32:19.177387Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:32:19.177798Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:451: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-05-29T15:32:19.177814Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-05-29T15:32:19.177845Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:32:19.177851Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:313: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-05-29T15:32:19.177854Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:367: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-05-29T15:32:19.177858Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 2 -> 3 2025-05-29T15:32:19.178133Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:32:19.178139Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-05-29T15:32:19.178144Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 3 -> 128 2025-05-29T15:32:19.178361Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:32:19.178367Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:32:19.178370Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:32:19.178374Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1650: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-05-29T15:32:19.178808Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1719: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-05-29T15:32:19.179111Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:652: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-05-29T15:32:19.179130Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1751: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 Leader for TabletID 72057594046316545 is [1:133:2155] sender: [1:254:2058] recipient: [1:15:2062] FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-05-29T15:32:19.179238Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:676: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-05-29T15:32:19.179253Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:680: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 133 RawX2: 4294969451 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-05-29T15:32:19.179257Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:32:19.179288Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Ch ... 78944 2025-05-29T15:32:53.374719Z node 139 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_pq.cpp:629: NPQState::TPropose operationId# 1003:2 HandleReply TEvProposeTransactionResult CollectPQConfigChanged: true 2025-05-29T15:32:53.374792Z node 139 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1003:2 128 -> 240 2025-05-29T15:32:53.374829Z node 139 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 3 2025-05-29T15:32:53.374844Z node 139 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 4 2025-05-29T15:32:53.375694Z node 139 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:647: TTxOperationReply complete, operationId: 1003:2, at schemeshard: 72057594046678944 2025-05-29T15:32:53.375743Z node 139 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:32:53.375751Z node 139 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1003, path id: [OwnerId: 72057594046678944, LocalPathId: 4] 2025-05-29T15:32:53.375797Z node 139 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1003, path id: [OwnerId: 72057594046678944, LocalPathId: 5] 2025-05-29T15:32:53.375832Z node 139 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:32:53.375837Z node 139 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [139:206:2207], at schemeshard: 72057594046678944, txId: 1003, path id: 4 2025-05-29T15:32:53.375841Z node 139 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [139:206:2207], at schemeshard: 72057594046678944, txId: 1003, path id: 5 2025-05-29T15:32:53.375926Z node 139 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1003:2, at schemeshard: 72057594046678944 2025-05-29T15:32:53.375937Z node 139 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:482: [72057594046678944] TDone opId# 1003:2 ProgressState 2025-05-29T15:32:53.375947Z node 139 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:906: Part operation is done id#1003:2 progress is 3/3 2025-05-29T15:32:53.375951Z node 139 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 1003 ready parts: 3/3 2025-05-29T15:32:53.375955Z node 139 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:906: Part operation is done id#1003:2 progress is 3/3 2025-05-29T15:32:53.375957Z node 139 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 1003 ready parts: 3/3 2025-05-29T15:32:53.375961Z node 139 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1606: TOperation IsReadyToNotify, TxId: 1003, ready parts: 3/3, is published: false 2025-05-29T15:32:53.375965Z node 139 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 1003 ready parts: 3/3 2025-05-29T15:32:53.375969Z node 139 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:973: Operation and all the parts is done, operation id: 1003:0 2025-05-29T15:32:53.375972Z node 139 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5174: RemoveTx for txid 1003:0 2025-05-29T15:32:53.375982Z node 139 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 4 2025-05-29T15:32:53.375985Z node 139 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:973: Operation and all the parts is done, operation id: 1003:1 2025-05-29T15:32:53.375988Z node 139 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5174: RemoveTx for txid 1003:1 2025-05-29T15:32:53.376001Z node 139 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 4 2025-05-29T15:32:53.376005Z node 139 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:973: Operation and all the parts is done, operation id: 1003:2 2025-05-29T15:32:53.376007Z node 139 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5174: RemoveTx for txid 1003:2 2025-05-29T15:32:53.376013Z node 139 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 5 2025-05-29T15:32:53.376016Z node 139 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:982: Publication still in progress, tx: 1003, publications: 2, subscribers: 0 2025-05-29T15:32:53.376019Z node 139 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:989: Publication details: tx: 1003, [OwnerId: 72057594046678944, LocalPathId: 4], 4 2025-05-29T15:32:53.376021Z node 139 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:989: Publication details: tx: 1003, [OwnerId: 72057594046678944, LocalPathId: 5], 2 2025-05-29T15:32:53.376265Z node 139 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:5834: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 4 PathOwnerId: 72057594046678944, cookie: 1003 2025-05-29T15:32:53.376278Z node 139 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 4 PathOwnerId: 72057594046678944, cookie: 1003 2025-05-29T15:32:53.376282Z node 139 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 1003 2025-05-29T15:32:53.376286Z node 139 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 1003, pathId: [OwnerId: 72057594046678944, LocalPathId: 4], version: 4 2025-05-29T15:32:53.376304Z node 139 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 3 2025-05-29T15:32:53.376536Z node 139 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:5834: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 5 Version: 2 PathOwnerId: 72057594046678944, cookie: 1003 2025-05-29T15:32:53.376546Z node 139 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 5 Version: 2 PathOwnerId: 72057594046678944, cookie: 1003 2025-05-29T15:32:53.376549Z node 139 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1003 2025-05-29T15:32:53.376552Z node 139 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 1003, pathId: [OwnerId: 72057594046678944, LocalPathId: 5], version: 2 2025-05-29T15:32:53.376554Z node 139 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 4 2025-05-29T15:32:53.376563Z node 139 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 1003, subscribers: 0 2025-05-29T15:32:53.377458Z node 139 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1003 2025-05-29T15:32:53.377609Z node 139 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1003 TestModificationResult got TxId: 1003, wait until txId: 1003 TestWaitNotification wait txId: 1003 2025-05-29T15:32:53.379215Z node 139 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:210: tests -- TTxNotificationSubscriber for txId 1003: send EvNotifyTxCompletion 2025-05-29T15:32:53.379228Z node 139 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:256: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 1003 2025-05-29T15:32:53.379299Z node 139 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 1003, at schemeshard: 72057594046678944 2025-05-29T15:32:53.379318Z node 139 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:227: tests -- TTxNotificationSubscriber for txId 1003: got EvNotifyTxCompletionResult 2025-05-29T15:32:53.379323Z node 139 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:236: tests -- TTxNotificationSubscriber for txId 1003: satisfy waiter [139:659:2576] TestWaitNotification: OK eventTxId 1003 2025-05-29T15:32:53.379395Z node 139 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Table/Stream" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: true }, at schemeshard: 72057594046678944 2025-05-29T15:32:53.379439Z node 139 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/Table/Stream" took 55us result status StatusSuccess 2025-05-29T15:32:53.379548Z node 139 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Table/Stream" PathDescription { Self { Name: "Stream" PathId: 4 SchemeshardId: 72057594046678944 PathType: EPathTypeCdcStream CreateFinished: true CreateTxId: 1003 CreateStep: 5000004 ParentPathId: 3 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 4 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 4 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 2 CdcStreamVersion: 1 } ChildrenExist: true } Children { Name: "streamImpl" PathId: 5 SchemeshardId: 72057594046678944 PathType: EPathTypePersQueueGroup CreateFinished: true CreateTxId: 1003 CreateStep: 5000004 ParentPathId: 4 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" PathSubType: EPathSubTypeStreamImpl ChildrenExist: false BalancerTabletID: 72075186233409548 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 4 PathsLimit: 10000 ShardsInside: 3 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 1 PQPartitionsLimit: 1000000 } CdcStreamDescription { Name: "Stream" Mode: ECdcStreamModeKeysOnly PathId { OwnerId: 72057594046678944 LocalId: 4 } State: ECdcStreamStateReady SchemaVersion: 1 Format: ECdcStreamFormatProto VirtualTimestamps: true AwsRegion: "" ResolvedTimestampsIntervalMs: 0 } } PathId: 4 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> YdbSdkSessions::MultiThreadSync [GOOD] >> YdbSdkSessions::SessionsServerLimit [SKIPPED] >> YdbSdkSessions::TestMultipleSessions [GOOD] >> YdbSdkSessions::TestActiveSessionCountAfterTransportError >> YdbSdkSessions::TestSessionPool [GOOD] >> YdbSdkSessions::TestSdkFreeSessionAfterBadSessionQueryServiceStreamCall [SKIPPED] >> YdbSdkSessions::TestActiveSessionCountAfterTransportError [GOOD] >> YdbSdkSessions::CloseSessionWithSessionPoolExplicit [GOOD] >> YdbSdkSessions::MultiThreadSessionPoolLimitSyncTableClient >> YdbSdkSessions::CloseSessionWithSessionPoolExplicitDriverStopOnly [GOOD] >> YdbSdkSessions::CloseSessionWithSessionPoolFromDtors >> YdbSdkSessionsPool::WaitQueue/1 [GOOD] |77.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/multinode/py3test |77.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/multinode/py3test |77.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/multinode/py3test >> YdbSdkSessionsPool::WaitQueue/0 [GOOD] |77.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/public/sdk/cpp/tests/integration/sessions_pool/gtest >> YdbSdkSessionsPool1Session::FailTest/0 [GOOD] |77.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/multinode/py3test >> HttpRequest::ProbeServerless [FAIL] |77.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/public/sdk/cpp/tests/integration/sessions_pool/gtest >> YdbSdkSessionsPool1Session::RunSmallPlan/0 [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/public/sdk/cpp/tests/integration/sessions/gtest >> YdbSdkSessions::SessionsServerLimit [SKIPPED] Test command err: ydb/public/sdk/cpp/tests/integration/sessions/main.cpp:543: Enable after accepting a pull request with merging configs >> test_recompiles_requests.py::TestSqsRecompilesRequestsForOtherQueue::test_recompiles_queries[tables_format_v0-fifo] >> YdbSdkSessions::CloseSessionWithSessionPoolFromDtors [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/public/sdk/cpp/tests/integration/sessions/gtest >> YdbSdkSessions::TestSdkFreeSessionAfterBadSessionQueryServiceStreamCall [SKIPPED] Test command err: ydb/public/sdk/cpp/tests/integration/sessions/main.cpp:243: Test is failing right now ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/sysview/unittest >> KqpSystemView::NodesRange1 Test command err: Trying to start YDB, gRPC: 27644, MsgBus: 17578 2025-05-29T15:32:52.034069Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509891075407505956:2212];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:32:52.034232Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-05-29T15:32:52.038942Z node 3 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7509891072118095445:2208];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:32:52.039056Z node 3 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-05-29T15:32:52.043597Z node 4 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7509891075168187798:2286];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:32:52.043906Z node 5 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[5:7509891075546011495:2201];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/001a1d/r3tmp/tmp8jM6zq/pdisk_1.dat 2025-05-29T15:32:52.064505Z node 4 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-05-29T15:32:52.064634Z node 5 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-05-29T15:32:52.091355Z node 2 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-05-29T15:32:52.132779Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:32:52.132805Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:32:52.134194Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 27644, node 1 2025-05-29T15:32:52.143347Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:32:52.143662Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:32:52.143673Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-05-29T15:32:52.143676Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-05-29T15:32:52.143740Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:17578 2025-05-29T15:32:52.164844Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:32:52.164863Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:32:52.164902Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:32:52.164907Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:32:52.165236Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:32:52.165252Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:32:52.165635Z node 1 :HIVE WARN: hive_impl.cpp:771: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 3 Cookie 3 2025-05-29T15:32:52.165648Z node 1 :HIVE WARN: hive_impl.cpp:771: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 4 Cookie 4 2025-05-29T15:32:52.165651Z node 1 :HIVE WARN: hive_impl.cpp:771: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-05-29T15:32:52.165883Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-29T15:32:52.165933Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-29T15:32:52.165961Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-29T15:32:52.166840Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:32:52.166859Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:32:52.167261Z node 1 :HIVE WARN: hive_impl.cpp:771: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 5 Cookie 5 2025-05-29T15:32:52.167446Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:17578 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-29T15:32:52.203459Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976720657:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:32:52.208807Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720658:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:32:52.272949Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720659:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:32:52.335659Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720660:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:32:52.348872Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720661:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:32:52.380709Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509891075407507514:2351], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:32:52.380731Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:32:52.412638Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720662:0, at schemeshard: 72057594046644480 2025-05-29T15:32:52.424495Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720663:0, at schemeshard: 72057594046644480 2025-05-29T15:32:52.435978Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720664:0, at schemeshard: 72057594046644480 2025-05-29T15:32:52.450322Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720665:0, at schemeshard: 72057594046644480 2025-05-29T15:32:52.464182Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720666:0, at schemeshard: 72057594046644480 2025-05-29T15:32:52.478118Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720667:0, at schemeshard: 72057594046644480 2025-05-29T15:32:52.492507Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720668:0, at schemeshard: 72057594046644480 2025-05-29T15:32:52.508014Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509891075407508270:2387], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:32:52.508031Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509891075407508275:2390], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:32:52.508037Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:32:52.508570Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976720669:3, at schemeshard: 72057594046644480 2025-05-29T15:32:52.512232Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7509891075407508277:2391], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976720669 completed, doublechecking } 2025-05-29T15:32:52.586361Z node 1 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [1:7509891075407508356:3988] txid# 281474976720670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-29T15:32:52.666779Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [1:7509891075407508374:2395], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:32:52.666880Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=1&id=YTVhMmIxNzctNTdkNmEwNTYtM2FmMzE5YjgtNjM1OTY1Zjk=, ActorId: [1:7509891075407507511:2349], ActorState: ExecuteState, TraceId: 01jweasfmv8tcvhs1vax2jk5bm, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: VERIFY failed (2025-05-29T15:32:52.667456Z): assertion failed in non-unittest thread with message: assertion failed at ydb/core/kqp/ut/common/kqp_ut_common.h:375, void NKikimr::NKqp::AssertSuccessResult(const NYdb::TStatus &): (result.IsSuccess())
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 library/cpp/testing/unittest/registar.cpp:36 RaiseError(): requirement UnittestThread failed 0. /-S/util/system/yassert.cpp:83: InternalPanicImpl @ 0x13A9EAF5 1. /-S/util/system/yassert.cpp:55: Panic @ 0x13A95AF6 2. /tmp//-S/library/cpp/testing/unittest/registar.cpp:36: RaiseError @ 0x13C37886 3. /-S/ydb/core/kqp/ut/common/kqp_ut_common.h:375: AssertSuccessResult @ 0x13979752 4. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:365: CreateSampleTables @ 0x260D6A12 5. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:581: operator() @ 0x260F7FBC 6. /-S/library/cpp/threading/future/core/future-inl.h:493: SetValue @ 0x260F7FBC 7. /-S/library/cpp/threading/future/async.h:24: operator() @ 0x260F7FBC 8. /-S/util/thread/pool.h:71: Process @ 0x260F7FBC 9. /-S/util/thread/pool.cpp:418: DoExecute @ 0x13AA6479 10. /-S/util/thread/factory.h:15: Execute @ 0x13AA4E69 11. /-S/util/thread/factory.cpp:36: ThreadProc @ 0x13AA4E69 12. /-S/util/system/thread.cpp:244: ThreadProxy @ 0x13AA02DC 13. ??:0: ?? @ 0x7FE532602AC2 14. ??:0: ?? @ 0x7FE53269484F |77.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/public/sdk/cpp/tests/integration/sessions/gtest >> YdbSdkSessions::TestActiveSessionCountAfterTransportError [GOOD] |77.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/public/sdk/cpp/tests/integration/sessions/gtest >> YdbSdkSessions::TestSessionPool [GOOD] |77.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/public/sdk/cpp/tests/integration/sessions/gtest >> YdbSdkSessions::CloseSessionWithSessionPoolExplicit [GOOD] >> test_async_replication.py::TestAsyncReplication::test_async_replication[table_index_1__ASYNC-pk_types5-all_types5-index5---ASYNC] |77.0%| [TA] $(B)/ydb/core/kqp/ut/sysview/test-results/unittest/{meta.json ... results_accumulator.log} |77.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/public/sdk/cpp/tests/integration/sessions_pool/gtest >> YdbSdkSessionsPool::WaitQueue/1 [GOOD] |77.0%| [TA] {RESULT} $(B)/ydb/core/kqp/ut/sysview/test-results/unittest/{meta.json ... results_accumulator.log} |77.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/public/sdk/cpp/tests/integration/sessions_pool/gtest >> YdbSdkSessionsPool::WaitQueue/0 [GOOD] >> test_serverless.py::test_database_with_column_disk_quotas[enable_alter_database_create_hive_first--false] >> test_multinode_cluster.py::TestSqsMultinodeCluster::test_reassign_master[kick_tablets] |77.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/public/sdk/cpp/tests/integration/sessions/gtest >> YdbSdkSessions::CloseSessionWithSessionPoolFromDtors [GOOD] |77.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/multinode/py3test |77.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/multinode/py3test >> test_async_replication.py::TestAsyncReplication::test_async_replication[table_ttl_Timestamp-pk_types12-all_types12-index12-Timestamp--] |77.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/multinode/py3test |77.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/multinode/py3test >> test_multinode_cluster.py::TestSqsMultinodeCluster::test_sqs_writes_through_proxy_on_each_node[tables_format_v1-std] |77.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/serverless/py3test |77.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/multinode/py3test >> test_async_replication.py::TestAsyncReplication::test_async_replication[table_index_3__SYNC-pk_types1-all_types1-index1---SYNC] |77.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/multinode/py3test |77.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/multinode/py3test |77.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/multinode/py3test >> test_serverless.py::test_database_with_disk_quotas[enable_alter_database_create_hive_first--false] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/statistics/service/ut/unittest >> HttpRequest::ProbeServerless [FAIL] Test command err: 2025-05-29T15:30:28.312962Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:453:2413], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-05-29T15:30:28.313019Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-05-29T15:30:28.313044Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/00153e/r3tmp/tmp87aDzb/pdisk_1.dat 2025-05-29T15:30:28.428528Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 10532, node 1 2025-05-29T15:30:28.557784Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:30:28.557810Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-05-29T15:30:28.557815Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-05-29T15:30:28.557944Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-05-29T15:30:28.558680Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-05-29T15:30:28.636414Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:30:28.636463Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:30:28.649451Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:24026 2025-05-29T15:30:28.991557Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-05-29T15:30:29.783263Z node 2 :STATISTICS INFO: service_impl.cpp:232: Subscribed for config changes on node 2 2025-05-29T15:30:29.790164Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:30:29.790197Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:30:29.834474Z node 1 :HIVE WARN: hive_impl.cpp:771: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-05-29T15:30:29.835203Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-29T15:30:29.987549Z node 2 :HIVE WARN: tx__create_tablet.cpp:342: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-05-29T15:30:29.987760Z node 2 :HIVE WARN: tx__create_tablet.cpp:342: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-05-29T15:30:29.987920Z node 2 :HIVE WARN: tx__create_tablet.cpp:342: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-05-29T15:30:29.987961Z node 2 :HIVE WARN: tx__create_tablet.cpp:342: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-05-29T15:30:29.987985Z node 2 :HIVE WARN: tx__create_tablet.cpp:342: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-05-29T15:30:29.988043Z node 2 :HIVE WARN: tx__create_tablet.cpp:342: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-05-29T15:30:29.988064Z node 2 :HIVE WARN: tx__create_tablet.cpp:342: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-05-29T15:30:29.988104Z node 2 :HIVE WARN: tx__create_tablet.cpp:342: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-05-29T15:30:29.988123Z node 2 :HIVE WARN: tx__create_tablet.cpp:342: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-05-29T15:30:30.156174Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:30:30.156219Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:30:30.170265Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-29T15:30:30.229306Z node 2 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:30:30.244585Z node 2 :STATISTICS INFO: aggregator_impl.cpp:45: [72075186224037894] OnActivateExecutor 2025-05-29T15:30:30.244624Z node 2 :STATISTICS DEBUG: tx_init_schema.cpp:13: [72075186224037894] TTxInitSchema::Execute 2025-05-29T15:30:30.277568Z node 2 :STATISTICS DEBUG: tx_init_schema.cpp:34: [72075186224037894] TTxInitSchema::Complete 2025-05-29T15:30:30.277619Z node 2 :STATISTICS DEBUG: tx_init.cpp:18: [72075186224037894] TTxInit::Execute 2025-05-29T15:30:30.277646Z node 2 :STATISTICS DEBUG: tx_init.cpp:118: [72075186224037894] Loaded BaseStatistics: schemeshard count# 0 2025-05-29T15:30:30.277652Z node 2 :STATISTICS DEBUG: tx_init.cpp:143: [72075186224037894] Loaded ColumnStatistics: column count# 0 2025-05-29T15:30:30.277659Z node 2 :STATISTICS DEBUG: tx_init.cpp:182: [72075186224037894] Loaded ScheduleTraversals: table count# 0 2025-05-29T15:30:30.277664Z node 2 :STATISTICS DEBUG: tx_init.cpp:216: [72075186224037894] Loaded ForceTraversalOperations: table count# 0 2025-05-29T15:30:30.277669Z node 2 :STATISTICS DEBUG: tx_init.cpp:264: [72075186224037894] Loaded ForceTraversalTables: table count# 0 2025-05-29T15:30:30.277678Z node 2 :STATISTICS DEBUG: tx_init.cpp:271: [72075186224037894] TTxInit::Complete 2025-05-29T15:30:30.277809Z node 2 :STATISTICS INFO: aggregator_impl.cpp:62: [72075186224037894] Subscribed for config changes 2025-05-29T15:30:30.298757Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:7864: ResolveSA(), StatisticsAggregatorId=72075186224037894, at schemeshard: 72075186224037897 2025-05-29T15:30:30.298795Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:7894: ConnectToSA(), pipe client id: [2:1859:2595], at schemeshard: 72075186224037897, StatisticsAggregatorId: 72075186224037894, at schemeshard: 72075186224037897 2025-05-29T15:30:30.303864Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:86: [72075186224037894] EvServerConnected, pipe server id = [2:1871:2604] 2025-05-29T15:30:30.304945Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:86: [72075186224037894] EvServerConnected, pipe server id = [2:1889:2614] 2025-05-29T15:30:30.305076Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:213: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:1889:2614], schemeshard id = 72075186224037897 2025-05-29T15:30:30.316135Z node 2 :STATISTICS DEBUG: tx_configure.cpp:21: [72075186224037894] TTxConfigure::Execute: database# /Root/Shared 2025-05-29T15:30:30.338188Z node 2 :STATISTICS DEBUG: table_creator.cpp:147: Table _statistics updater. Describe result: PathErrorUnknown 2025-05-29T15:30:30.338210Z node 2 :STATISTICS NOTICE: table_creator.cpp:167: Table _statistics updater. Creating table 2025-05-29T15:30:30.338221Z node 2 :STATISTICS DEBUG: table_creator.cpp:100: Table _statistics updater. Full table path:/Root/Shared/.metadata/_statistics 2025-05-29T15:30:30.342456Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720657:1, at schemeshard: 72075186224037897 2025-05-29T15:30:30.344682Z node 2 :STATISTICS DEBUG: table_creator.cpp:190: Table _statistics updater. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720657 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037897 PathId: 3 } 2025-05-29T15:30:30.344726Z node 2 :STATISTICS DEBUG: table_creator.cpp:261: Table _statistics updater. Subscribe on create table tx: 281474976720657 2025-05-29T15:30:30.459181Z node 2 :STATISTICS DEBUG: tx_configure.cpp:36: [72075186224037894] TTxConfigure::Complete 2025-05-29T15:30:30.557383Z node 2 :STATISTICS DEBUG: table_creator.cpp:290: Table _statistics updater. Request: create. Transaction completed: 281474976720657. Doublechecking... 2025-05-29T15:30:30.632471Z node 2 :STATISTICS DEBUG: table_creator.cpp:362: Table _statistics updater. Column diff is empty, finishing 2025-05-29T15:30:31.275893Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715659:0, at schemeshard: 72057594046644480 2025-05-29T15:30:31.852265Z node 2 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:30:31.950893Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:7809: Handle TEvTxProxySchemeCache::TEvNavigateKeySetResult, at schemeshard: 72075186224037899 2025-05-29T15:30:31.950918Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:7825: Handle TEvTxProxySchemeCache::TEvNavigateKeySetResult, StatisticsAggregatorId=72075186224037894, at schemeshard: 72075186224037899 2025-05-29T15:30:31.950931Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:7894: ConnectToSA(), pipe client id: [2:2562:2935], at schemeshard: 72075186224037899, StatisticsAggregatorId: 72075186224037894, at schemeshard: 72075186224037899 2025-05-29T15:30:31.952541Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:86: [72075186224037894] EvServerConnected, pipe server id = [2:2564:2937] 2025-05-29T15:30:31.952752Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:213: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:2564:2937], schemeshard id = 72075186224037899 2025-05-29T15:30:32.939891Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2687:3228], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:30:32.939928Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:30:32.956484Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976715661:0, at schemeshard: 72075186224037899 2025-05-29T15:30:33.030715Z node 2 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037905;self_id=[2:2830:3068];tablet_id=72075186224037905;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-05-29T15:30:33.030805Z node 2 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037905;self_id=[2:2830:3068];tablet_id=72075186224037905;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-05-29T15:30:33.030860Z node 2 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037905;self_id=[2:2830:3068];tablet_id=72075186224037905;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register ... ode_id=2&id=ZmE2MzZlMjEtY2VlNjhmMjAtNzEwZWVjZjEtNTljYTU0Nzk=, ActorId: [2:11815:8809], ActorState: ExecuteState, TraceId: 01jweasgq19q7pqgq9zm36e142, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: 2025-05-29T15:32:53.618753Z node 2 :STATISTICS DEBUG: query_actor.cpp:240: [TQueryBase] TEvDataQueryResult INTERNAL_ERROR, Issues: {
: Fatal: Execution, code: 1060 subissue: {
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 } }, SessionId: ydb://session/3?node_id=2&id=ZmE2MzZlMjEtY2VlNjhmMjAtNzEwZWVjZjEtNTljYTU0Nzk=, TxId: 2025-05-29T15:32:53.618766Z node 2 :STATISTICS WARN: query_actor.cpp:372: [TQueryBase] Finish with INTERNAL_ERROR, Issues: {
: Fatal: Execution, code: 1060 subissue: {
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 } }, SessionId: ydb://session/3?node_id=2&id=ZmE2MzZlMjEtY2VlNjhmMjAtNzEwZWVjZjEtNTljYTU0Nzk=, TxId: 2025-05-29T15:32:53.733664Z node 2 :STATISTICS DEBUG: query_actor.cpp:134: [TQueryBase] Bootstrap. Database: /Root/Shared 2025-05-29T15:32:53.734582Z node 2 :STATISTICS DEBUG: query_actor.cpp:197: [TQueryBase] RunDataQuery: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DECLARE $stat_type AS Uint32; DECLARE $column_tags AS List; DECLARE $data AS List; UPSERT INTO `.metadata/_statistics` (owner_id, local_path_id, stat_type, column_tag, data) VALUES ($owner_id, $local_path_id, $stat_type, $column_tags[0], $data[0]), ($owner_id, $local_path_id, $stat_type, $column_tags[1], $data[1]); 2025-05-29T15:32:53.739988Z node 2 :STATISTICS DEBUG: service_impl.cpp:771: [TStatService::TEvGetStatistics] RequestId[ 22 ], ReplyToActorId[ [2:11835:8828]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-05-29T15:32:53.740069Z node 2 :STATISTICS DEBUG: service_impl.cpp:787: [TStatService::TEvNavigateKeySetResult] RequestId[ 22 ] 2025-05-29T15:32:53.740079Z node 2 :STATISTICS DEBUG: service_impl.cpp:1260: ReplySuccess(), request id = 22, ReplyToActorId = [2:11835:8828], StatRequests.size() = 1 2025-05-29T15:32:53.750425Z node 2 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [2:11831:8824], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:32:53.751282Z node 2 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=2&id=Mjc0OGMwMGYtOGFjMGUwN2YtYTM1MWNjN2MtODFlZjI2ZTQ=, ActorId: [2:11828:8821], ActorState: ExecuteState, TraceId: 01jweasgv64h3gnqvvwaw1trw1, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: 2025-05-29T15:32:53.751524Z node 2 :STATISTICS DEBUG: query_actor.cpp:240: [TQueryBase] TEvDataQueryResult INTERNAL_ERROR, Issues: {
: Fatal: Execution, code: 1060 subissue: {
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 } }, SessionId: ydb://session/3?node_id=2&id=Mjc0OGMwMGYtOGFjMGUwN2YtYTM1MWNjN2MtODFlZjI2ZTQ=, TxId: 2025-05-29T15:32:53.751541Z node 2 :STATISTICS WARN: query_actor.cpp:372: [TQueryBase] Finish with INTERNAL_ERROR, Issues: {
: Fatal: Execution, code: 1060 subissue: {
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 } }, SessionId: ydb://session/3?node_id=2&id=Mjc0OGMwMGYtOGFjMGUwN2YtYTM1MWNjN2MtODFlZjI2ZTQ=, TxId: 2025-05-29T15:32:54.004985Z node 2 :STATISTICS DEBUG: service_impl.cpp:252: Event round 3 is different from the current 0 2025-05-29T15:32:54.005022Z node 2 :STATISTICS DEBUG: service_impl.cpp:379: Skip TEvDispatchKeepAlive 2025-05-29T15:32:54.088178Z node 2 :STATISTICS DEBUG: query_actor.cpp:134: [TQueryBase] Bootstrap. Database: /Root/Shared 2025-05-29T15:32:54.089209Z node 2 :STATISTICS DEBUG: query_actor.cpp:197: [TQueryBase] RunDataQuery: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DECLARE $stat_type AS Uint32; DECLARE $column_tags AS List; DECLARE $data AS List; UPSERT INTO `.metadata/_statistics` (owner_id, local_path_id, stat_type, column_tag, data) VALUES ($owner_id, $local_path_id, $stat_type, $column_tags[0], $data[0]), ($owner_id, $local_path_id, $stat_type, $column_tags[1], $data[1]); 2025-05-29T15:32:54.093894Z node 2 :STATISTICS DEBUG: service_impl.cpp:771: [TStatService::TEvGetStatistics] RequestId[ 23 ], ReplyToActorId[ [2:11856:8848]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-05-29T15:32:54.093970Z node 2 :STATISTICS DEBUG: service_impl.cpp:787: [TStatService::TEvNavigateKeySetResult] RequestId[ 23 ] 2025-05-29T15:32:54.093980Z node 2 :STATISTICS DEBUG: service_impl.cpp:1260: ReplySuccess(), request id = 23, ReplyToActorId = [2:11856:8848], StatRequests.size() = 1 2025-05-29T15:32:54.100944Z node 2 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [2:11852:8844], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:32:54.101568Z node 2 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=2&id=Y2NiM2YxZmMtMmEyMTYzNmItMzlkNmJhYWUtNDYyNjQ3MDc=, ActorId: [2:11849:8841], ActorState: ExecuteState, TraceId: 01jweash69fyfmtbf3a1v16avy, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: 2025-05-29T15:32:54.101758Z node 2 :STATISTICS DEBUG: query_actor.cpp:240: [TQueryBase] TEvDataQueryResult INTERNAL_ERROR, Issues: {
: Fatal: Execution, code: 1060 subissue: {
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 } }, SessionId: ydb://session/3?node_id=2&id=Y2NiM2YxZmMtMmEyMTYzNmItMzlkNmJhYWUtNDYyNjQ3MDc=, TxId: 2025-05-29T15:32:54.101765Z node 2 :STATISTICS WARN: query_actor.cpp:372: [TQueryBase] Finish with INTERNAL_ERROR, Issues: {
: Fatal: Execution, code: 1060 subissue: {
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 } }, SessionId: ydb://session/3?node_id=2&id=Y2NiM2YxZmMtMmEyMTYzNmItMzlkNmJhYWUtNDYyNjQ3MDc=, TxId: 2025-05-29T15:32:54.449797Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:330: [72075186224037894] PropagateStatistics(), node count = 1, schemeshard count = 1 2025-05-29T15:32:54.449880Z node 2 :STATISTICS DEBUG: service_impl.cpp:937: EvPropagateStatistics, node id = 2 2025-05-29T15:32:54.700749Z node 2 :STATISTICS DEBUG: query_actor.cpp:134: [TQueryBase] Bootstrap. Database: /Root/Shared 2025-05-29T15:32:54.701643Z node 2 :STATISTICS DEBUG: query_actor.cpp:197: [TQueryBase] RunDataQuery: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DECLARE $stat_type AS Uint32; DECLARE $column_tags AS List; DECLARE $data AS List; UPSERT INTO `.metadata/_statistics` (owner_id, local_path_id, stat_type, column_tag, data) VALUES ($owner_id, $local_path_id, $stat_type, $column_tags[0], $data[0]), ($owner_id, $local_path_id, $stat_type, $column_tags[1], $data[1]); 2025-05-29T15:32:54.706965Z node 2 :STATISTICS DEBUG: service_impl.cpp:771: [TStatService::TEvGetStatistics] RequestId[ 24 ], ReplyToActorId[ [2:11889:8873]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-05-29T15:32:54.707043Z node 2 :STATISTICS DEBUG: service_impl.cpp:787: [TStatService::TEvNavigateKeySetResult] RequestId[ 24 ] 2025-05-29T15:32:54.707053Z node 2 :STATISTICS DEBUG: service_impl.cpp:1260: ReplySuccess(), request id = 24, ReplyToActorId = [2:11889:8873], StatRequests.size() = 1 2025-05-29T15:32:54.717142Z node 2 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [2:11885:8869], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:32:54.717899Z node 2 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=2&id=MjdmMzI1ZDgtZWM4ZDY1ODctYWE3NzUwMDItNzUxOTkxMWM=, ActorId: [2:11882:8866], ActorState: ExecuteState, TraceId: 01jweashsd306s1q8yz5mnmzvv, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: 2025-05-29T15:32:54.718157Z node 2 :STATISTICS DEBUG: query_actor.cpp:240: [TQueryBase] TEvDataQueryResult INTERNAL_ERROR, Issues: {
: Fatal: Execution, code: 1060 subissue: {
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 } }, SessionId: ydb://session/3?node_id=2&id=MjdmMzI1ZDgtZWM4ZDY1ODctYWE3NzUwMDItNzUxOTkxMWM=, TxId: 2025-05-29T15:32:54.718173Z node 2 :STATISTICS WARN: query_actor.cpp:372: [TQueryBase] Finish with INTERNAL_ERROR, Issues: {
: Fatal: Execution, code: 1060 subissue: {
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 } }, SessionId: ydb://session/3?node_id=2&id=MjdmMzI1ZDgtZWM4ZDY1ODctYWE3NzUwMDItNzUxOTkxMWM=, TxId: 2025-05-29T15:32:54.718559Z node 2 :STATISTICS DEBUG: tx_finish_trasersal.cpp:26: [72075186224037894] TTxFinishTraversal::Execute 2025-05-29T15:32:54.730477Z node 2 :STATISTICS DEBUG: tx_finish_trasersal.cpp:36: [72075186224037894] TTxFinishTraversal::Complete force traversal for path [OwnerId: 72075186224037899, LocalPathId: 2] 2025-05-29T15:32:54.730507Z node 2 :STATISTICS DEBUG: tx_finish_trasersal.cpp:50: [72075186224037894] TTxFinishTraversal::Complete. Send TEvAnalyzeResponse, OperationId= dlڀ x, ActorId=[1:6356:4046] 2025-05-29T15:32:54.730926Z node 1 :STATISTICS DEBUG: service_impl.cpp:771: [TStatService::TEvGetStatistics] RequestId[ 1 ], ReplyToActorId[ [1:11895:6270]], StatType[ 2 ], StatRequestsCount[ 1 ] 2025-05-29T15:32:54.731004Z node 1 :STATISTICS DEBUG: service_impl.cpp:787: [TStatService::TEvNavigateKeySetResult] RequestId[ 1 ] 2025-05-29T15:32:54.731012Z node 1 :STATISTICS DEBUG: service_impl.cpp:812: [TStatService::TEvNavigateKeySetResult] RequestId[ 1 ] resolve DatabasePath[ [OwnerId: 72057594046644480, LocalPathId: 2] ] 2025-05-29T15:32:54.731060Z node 1 :STATISTICS DEBUG: service_impl.cpp:787: [TStatService::TEvNavigateKeySetResult] RequestId[ 1 ] 2025-05-29T15:32:54.731071Z node 1 :STATISTICS DEBUG: service_impl.cpp:715: [TStatService::QueryStatistics] RequestId[ 1 ], Database[ Root/Shared ], TablePath[ /Root/Shared/.metadata/_statistics ] 2025-05-29T15:32:54.731082Z node 1 :STATISTICS DEBUG: service_impl.cpp:656: [TStatService::LoadStatistics] QueryId[ 1 ], PathId[ [OwnerId: 72075186224037899, LocalPathId: 2] ], StatType[ 2 ], ColumnTag[ 2 ] 2025-05-29T15:32:54.734081Z node 1 :STATISTICS ERROR: service_impl.cpp:691: [TStatService::ReadRowsResponse] QueryId[ 1 ], RowsCount[ 0 ] 2025-05-29T15:32:54.734161Z node 1 :STATISTICS DEBUG: service_impl.cpp:1152: TEvLoadStatisticsQueryResponse, request id = 1 Answer: 'Error occurred while loading statistics.' strings contains assertion failed at ydb/core/statistics/service/ut/ut_http_request.cpp:69, void NKikimr::NStat::ProbeTest(bool): "Error occurred while loading statistics." does not contain "/Root/Database/Table1[Value]=", TBackTrace::Capture()+28 (0x137F904C) NUnitTest::NPrivate::RaiseError(char const*, TBasicString> const&, bool)+137 (0x139AC3D9) NKikimr::NStat::ProbeTest(bool)+5323 (0x136F671B) NKikimr::NStat::NTestSuiteHttpRequest::TCurrentTest::Execute()::'lambda'()::operator()() const+71 (0x136FA107) NUnitTest::TTestBase::Run(std::__y1::function, TBasicString> const&, char const*, bool)+126 (0x139AE28E) NKikimr::NStat::NTestSuiteHttpRequest::TCurrentTest::Execute()+429 (0x136F9ACD) NUnitTest::TTestFactory::Execute()+803 (0x139AEA03) NUnitTest::RunMain(int, char**)+3021 (0x139BCD1D) ??+0 (0x7F8D73497D90) __libc_start_main+128 (0x7F8D73497E40) _start+41 (0x1283B029) |77.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/multinode/py3test |77.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/multinode/py3test >> test_recompiles_requests.py::TestSqsRecompilesRequestsForOtherQueue::test_recompiles_queries[tables_format_v1-std] >> test_recompiles_requests.py::TestSqsRecompilesRequestsForOtherQueue::test_recompiles_queries[tables_format_v0-std] >> test_recompiles_requests.py::TestSqsRecompilesRequestsForOtherQueue::test_recompiles_queries[tables_format_v0-fifo] [FAIL] |77.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/multinode/py3test |77.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/datashard/async_replication/py3test |77.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/multinode/py3test >> TCdcStreamWithRebootsTests::CreateStreamWithResolvedTimestamps[TabletReboots] [GOOD] |77.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/serverless/py3test ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/multinode/py3test >> test_recompiles_requests.py::TestSqsRecompilesRequestsForOtherQueue::test_recompiles_queries[tables_format_v0-fifo] [FAIL] Test command err: sys:1: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/ciyv/002857/ydb/tests/functional/sqs/multinode/test-results/py3test/testing_out_stuff/chunk11/testing_out_stuff/test_recompiles_requests.py.TestSqsRecompilesRequestsForOtherQueue.test_recompiles_queries.tables_format_v0-fifo/cluster/node_2/stdout'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/ciyv/002857/ydb/tests/functional/sqs/multinode/test-results/py3test/testing_out_stuff/chunk11/testing_out_stuff/test_recompiles_requests.py.TestSqsRecompilesRequestsForOtherQueue.test_recompiles_queries.tables_format_v0-fifo/cluster/node_2/stderr'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedRandom name='/home/runner/.ya/build/build_root/ciyv/002857/ydb/tests/functional/sqs/multinode/test-results/py3test/testing_out_stuff/chunk11/testing_out_stuff/test_recompiles_requests.py.TestSqsRecompilesRequestsForOtherQueue.test_recompiles_queries.tables_format_v0-fifo/cluster/node_2/logfile_a5xamdwm.log'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/ciyv/002857/ydb/tests/functional/sqs/multinode/test-results/py3test/testing_out_stuff/chunk11/testing_out_stuff/test_recompiles_requests.py.TestSqsRecompilesRequestsForOtherQueue.test_recompiles_queries.tables_format_v0-fifo/cluster/node_3/stdout'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/ciyv/002857/ydb/tests/functional/sqs/multinode/test-results/py3test/testing_out_stuff/chunk11/testing_out_stuff/test_recompiles_requests.py.TestSqsRecompilesRequestsForOtherQueue.test_recompiles_queries.tables_format_v0-fifo/cluster/node_3/stderr'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedRandom name='/home/runner/.ya/build/build_root/ciyv/002857/ydb/tests/functional/sqs/multinode/test-results/py3test/testing_out_stuff/chunk11/testing_out_stuff/test_recompiles_requests.py.TestSqsRecompilesRequestsForOtherQueue.test_recompiles_queries.tables_format_v0-fifo/cluster/node_3/logfile_7t4jc4zq.log'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/ciyv/002857/ydb/tests/functional/sqs/multinode/test-results/py3test/testing_out_stuff/chunk11/testing_out_stuff/test_recompiles_requests.py.TestSqsRecompilesRequestsForOtherQueue.test_recompiles_queries.tables_format_v0-fifo/cluster/node_4/stdout'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/ciyv/002857/ydb/tests/functional/sqs/multinode/test-results/py3test/testing_out_stuff/chunk11/testing_out_stuff/test_recompiles_requests.py.TestSqsRecompilesRequestsForOtherQueue.test_recompiles_queries.tables_format_v0-fifo/cluster/node_4/stderr'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedRandom name='/home/runner/.ya/build/build_root/ciyv/002857/ydb/tests/functional/sqs/multinode/test-results/py3test/testing_out_stuff/chunk11/testing_out_stuff/test_recompiles_requests.py.TestSqsRecompilesRequestsForOtherQueue.test_recompiles_queries.tables_format_v0-fifo/cluster/node_4/logfile_ms0ve3kb.log'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/ciyv/002857/ydb/tests/functional/sqs/multinode/test-results/py3test/testing_out_stuff/chunk11/testing_out_stuff/test_recompiles_requests.py.TestSqsRecompilesRequestsForOtherQueue.test_recompiles_queries.tables_format_v0-fifo/cluster/node_5/stdout'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/ciyv/002857/ydb/tests/functional/sqs/multinode/test-results/py3test/testing_out_stuff/chunk11/testing_out_stuff/test_recompiles_requests.py.TestSqsRecompilesRequestsForOtherQueue.test_recompiles_queries.tables_format_v0-fifo/cluster/node_5/stderr'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedRandom name='/home/runner/.ya/build/build_root/ciyv/002857/ydb/tests/functional/sqs/multinode/test-results/py3test/testing_out_stuff/chunk11/testing_out_stuff/test_recompiles_requests.py.TestSqsRecompilesRequestsForOtherQueue.test_recompiles_queries.tables_format_v0-fifo/cluster/node_5/logfile_blse73pz.log'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/ciyv/002857/ydb/tests/functional/sqs/multinode/test-results/py3test/testing_out_stuff/chunk11/testing_out_stuff/test_recompiles_requests.py.TestSqsRecompilesRequestsForOtherQueue.test_recompiles_queries.tables_format_v0-fifo/cluster/node_6/stdout'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/ciyv/002857/ydb/tests/functional/sqs/multinode/test-results/py3test/testing_out_stuff/chunk11/testing_out_stuff/test_recompiles_requests.py.TestSqsRecompilesRequestsForOtherQueue.test_recompiles_queries.tables_format_v0-fifo/cluster/node_6/stderr'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedRandom name='/home/runner/.ya/build/build_root/ciyv/002857/ydb/tests/functional/sqs/multinode/test-results/py3test/testing_out_stuff/chunk11/testing_out_stuff/test_recompiles_requests.py.TestSqsRecompilesRequestsForOtherQueue.test_recompiles_queries.tables_format_v0-fifo/cluster/node_6/logfile_qh3tjcw9.log'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/ciyv/002857/ydb/tests/functional/sqs/multinode/test-results/py3test/testing_out_stuff/chunk11/testing_out_stuff/test_recompiles_requests.py.TestSqsRecompilesRequestsForOtherQueue.test_recompiles_queries.tables_format_v0-fifo/cluster/node_7/stdout'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/ciyv/002857/ydb/tests/functional/sqs/multinode/test-results/py3test/testing_out_stuff/chunk11/testing_out_stuff/test_recompiles_requests.py.TestSqsRecompilesRequestsForOtherQueue.test_recompiles_queries.tables_format_v0-fifo/cluster/node_7/stderr'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedRandom name='/home/runner/.ya/build/build_root/ciyv/002857/ydb/tests/functional/sqs/multinode/test-results/py3test/testing_out_stuff/chunk11/testing_out_stuff/test_recompiles_requests.py.TestSqsRecompilesRequestsForOtherQueue.test_recompiles_queries.tables_format_v0-fifo/cluster/node_7/logfile_txemm4ob.log'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/ciyv/002857/ydb/tests/functional/sqs/multinode/test-results/py3test/testing_out_stuff/chunk11/testing_out_stuff/test_recompiles_requests.py.TestSqsRecompilesRequestsForOtherQueue.test_recompiles_queries.tables_format_v0-fifo/cluster/node_8/stdout'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/ciyv/002857/ydb/tests/functional/sqs/multinode/test-results/py3test/testing_out_stuff/chunk11/testing_out_stuff/test_recompiles_requests.py.TestSqsRecompilesRequestsForOtherQueue.test_recompiles_queries.tables_format_v0-fifo/cluster/node_8/stderr'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedRandom name='/home/runner/.ya/build/build_root/ciyv/002857/ydb/tests/functional/sqs/multinode/test-results/py3test/testing_out_stuff/chunk11/testing_out_stuff/test_recompiles_requests.py.TestSqsRecompilesRequestsForOtherQueue.test_recompiles_queries.tables_format_v0-fifo/cluster/node_8/logfile_m2oyespo.log'> ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/subprocess.py:1129: ResourceWarning: subprocess 353250 is still running ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/subprocess.py:1129: ResourceWarning: subprocess 353272 is still running ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/subprocess.py:1129: ResourceWarning: subprocess 353281 is still running ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/subprocess.py:1129: ResourceWarning: subprocess 353283 is still running ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/subprocess.py:1129: ResourceWarning: subprocess 353284 is still running ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/subprocess.py:1129: ResourceWarning: subprocess 353291 is still running ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/subprocess.py:1129: ResourceWarning: subprocess 353292 is still running ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/ciyv/002857/ydb/tests/functional/sqs/multinode/test-results/py3test/testing_out_stuff/chunk11/testing_out_stuff/test_recompiles_requests.py.TestSqsRecompilesRequestsForOtherQueue.test_recompiles_queries.tables_format_v0-fifo/cluster/node_1/stdout'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/ciyv/002857/ydb/tests/functional/sqs/multinode/test-results/py3test/testing_out_stuff/chunk11/testing_out_stuff/test_recompiles_requests.py.TestSqsRecompilesRequestsForOtherQueue.test_recompiles_queries.tables_format_v0-fifo/cluster/node_1/stderr'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedRandom name='/home/runner/.ya/build/build_root/ciyv/002857/ydb/tests/functional/sqs/multinode/test-results/py3test/testing_out_stuff/chunk11/testing_out_stuff/test_recompiles_requests.py.TestSqsRecompilesRequestsForOtherQueue.test_recompiles_queries.tables_format_v0-fifo/cluster/node_1/logfile_4x92nsdi.log'> ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/subprocess.py:1129: ResourceWarning: subprocess 353248 is still running ResourceWarning: Enable tracemalloc to get the object allocation traceback >> test_multinode_cluster.py::TestSqsMultinodeCluster::test_has_messages_counters[kick_tablets-fifo] >> test_multinode_cluster.py::TestSqsMultinodeCluster::test_reassign_master[kick_tablets] [FAIL] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_cdc_stream_reboots/unittest >> TCdcStreamWithRebootsTests::CreateStreamWithResolvedTimestamps[TabletReboots] [GOOD] Test command err: =========== RUN: Trace =========== Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2140] Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:116:2058] recipient: [1:110:2141] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:116:2058] recipient: [1:110:2141] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:117:2058] recipient: [1:111:2142] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:117:2058] recipient: [1:111:2142] Leader for TabletID 72057594046678944 is [1:126:2151] sender: [1:127:2058] recipient: [1:109:2140] Leader for TabletID 72057594046447617 is [1:130:2154] sender: [1:132:2058] recipient: [1:110:2141] Leader for TabletID 72057594046316545 is [1:133:2155] sender: [1:135:2058] recipient: [1:111:2142] 2025-05-29T15:32:25.635629Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7488: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-05-29T15:32:25.635649Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7516: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:32:25.635653Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7402: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2025-05-29T15:32:25.635658Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7418: OperationsProcessing config: using default configuration 2025-05-29T15:32:25.635668Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-05-29T15:32:25.635671Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-05-29T15:32:25.635679Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7548: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:32:25.635688Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:39: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-05-29T15:32:25.635774Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7619: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-05-29T15:32:25.635848Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-05-29T15:32:25.648951Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7651: Got new config: QueryServiceConfig { AllExternalDataSourcesAreAvailable: true } 2025-05-29T15:32:25.648968Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:32:25.649050Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7619: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# Leader for TabletID 72057594046447617 is [1:130:2154] sender: [1:175:2058] recipient: [1:15:2062] 2025-05-29T15:32:25.651407Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-05-29T15:32:25.651429Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-05-29T15:32:25.651450Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-05-29T15:32:25.653785Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-05-29T15:32:25.653840Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:445: Clear TempDirsState with owners number: 0 2025-05-29T15:32:25.653928Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1348: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-05-29T15:32:25.654083Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:32: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-05-29T15:32:25.654671Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:157: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:32:25.654699Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:84: [RootDataErasureManager] Stop 2025-05-29T15:32:25.654908Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:32:25.654918Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:32:25.654939Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-05-29T15:32:25.654946Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-29T15:32:25.654950Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:202: TTxServerlessStorageBilling.Complete 2025-05-29T15:32:25.654965Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6671: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:214:2058] recipient: [1:212:2212] IGNORE Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:214:2058] recipient: [1:212:2212] Leader for TabletID 72057594037968897 is [1:218:2216] sender: [1:219:2058] recipient: [1:212:2212] 2025-05-29T15:32:25.655929Z node 1 :HIVE INFO: tablet_helpers.cpp:1130: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:126:2151] sender: [1:239:2058] recipient: [1:15:2062] 2025-05-29T15:32:25.673831Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:372: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-05-29T15:32:25.673878Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:32:25.673911Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-05-29T15:32:25.673938Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:130: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-05-29T15:32:25.673945Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:32:25.674441Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:451: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-05-29T15:32:25.674457Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-05-29T15:32:25.674490Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:32:25.674496Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:313: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-05-29T15:32:25.674499Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:367: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-05-29T15:32:25.674502Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 2 -> 3 2025-05-29T15:32:25.674807Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:32:25.674814Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-05-29T15:32:25.674817Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 3 -> 128 2025-05-29T15:32:25.675059Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:32:25.675065Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:32:25.675068Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:32:25.675072Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1650: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-05-29T15:32:25.675475Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1719: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-05-29T15:32:25.675778Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:652: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-05-29T15:32:25.675797Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1751: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 Leader for TabletID 72057594046316545 is [1:133:2155] sender: [1:254:2058] recipient: [1:15:2062] FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-05-29T15:32:25.675909Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:676: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-05-29T15:32:25.675923Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:680: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 133 RawX2: 4294969451 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-05-29T15:32:25.675930Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:32:25.675961Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Ch ... 4 2025-05-29T15:33:00.560263Z node 141 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_pq.cpp:629: NPQState::TPropose operationId# 1003:2 HandleReply TEvProposeTransactionResult CollectPQConfigChanged: true 2025-05-29T15:33:00.560304Z node 141 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1003:2 128 -> 240 2025-05-29T15:33:00.560333Z node 141 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 3 2025-05-29T15:33:00.560344Z node 141 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 4 2025-05-29T15:33:00.561008Z node 141 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:647: TTxOperationReply complete, operationId: 1003:2, at schemeshard: 72057594046678944 2025-05-29T15:33:00.561060Z node 141 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:33:00.561067Z node 141 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1003, path id: [OwnerId: 72057594046678944, LocalPathId: 4] 2025-05-29T15:33:00.561103Z node 141 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1003, path id: [OwnerId: 72057594046678944, LocalPathId: 5] 2025-05-29T15:33:00.561136Z node 141 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:33:00.561142Z node 141 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [141:206:2207], at schemeshard: 72057594046678944, txId: 1003, path id: 4 2025-05-29T15:33:00.561147Z node 141 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [141:206:2207], at schemeshard: 72057594046678944, txId: 1003, path id: 5 2025-05-29T15:33:00.561220Z node 141 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1003:2, at schemeshard: 72057594046678944 2025-05-29T15:33:00.561230Z node 141 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:482: [72057594046678944] TDone opId# 1003:2 ProgressState 2025-05-29T15:33:00.561241Z node 141 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:906: Part operation is done id#1003:2 progress is 3/3 2025-05-29T15:33:00.561246Z node 141 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 1003 ready parts: 3/3 2025-05-29T15:33:00.561251Z node 141 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:906: Part operation is done id#1003:2 progress is 3/3 2025-05-29T15:33:00.561255Z node 141 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 1003 ready parts: 3/3 2025-05-29T15:33:00.561259Z node 141 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1606: TOperation IsReadyToNotify, TxId: 1003, ready parts: 3/3, is published: false 2025-05-29T15:33:00.561264Z node 141 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 1003 ready parts: 3/3 2025-05-29T15:33:00.561269Z node 141 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:973: Operation and all the parts is done, operation id: 1003:0 2025-05-29T15:33:00.561274Z node 141 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5174: RemoveTx for txid 1003:0 2025-05-29T15:33:00.561285Z node 141 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 4 2025-05-29T15:33:00.561289Z node 141 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:973: Operation and all the parts is done, operation id: 1003:1 2025-05-29T15:33:00.561292Z node 141 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5174: RemoveTx for txid 1003:1 2025-05-29T15:33:00.561307Z node 141 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 4 2025-05-29T15:33:00.561312Z node 141 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:973: Operation and all the parts is done, operation id: 1003:2 2025-05-29T15:33:00.561315Z node 141 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5174: RemoveTx for txid 1003:2 2025-05-29T15:33:00.561324Z node 141 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 5 2025-05-29T15:33:00.561329Z node 141 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:982: Publication still in progress, tx: 1003, publications: 2, subscribers: 0 2025-05-29T15:33:00.561333Z node 141 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:989: Publication details: tx: 1003, [OwnerId: 72057594046678944, LocalPathId: 4], 4 2025-05-29T15:33:00.561337Z node 141 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:989: Publication details: tx: 1003, [OwnerId: 72057594046678944, LocalPathId: 5], 2 2025-05-29T15:33:00.561592Z node 141 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:5834: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 4 PathOwnerId: 72057594046678944, cookie: 1003 2025-05-29T15:33:00.561608Z node 141 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 4 PathOwnerId: 72057594046678944, cookie: 1003 2025-05-29T15:33:00.561613Z node 141 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 1003 2025-05-29T15:33:00.561619Z node 141 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 1003, pathId: [OwnerId: 72057594046678944, LocalPathId: 4], version: 4 2025-05-29T15:33:00.561626Z node 141 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 3 2025-05-29T15:33:00.561984Z node 141 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:5834: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 5 Version: 2 PathOwnerId: 72057594046678944, cookie: 1003 2025-05-29T15:33:00.562003Z node 141 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 5 Version: 2 PathOwnerId: 72057594046678944, cookie: 1003 2025-05-29T15:33:00.562008Z node 141 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1003 2025-05-29T15:33:00.562012Z node 141 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 1003, pathId: [OwnerId: 72057594046678944, LocalPathId: 5], version: 2 2025-05-29T15:33:00.562017Z node 141 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 4 2025-05-29T15:33:00.562032Z node 141 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 1003, subscribers: 0 2025-05-29T15:33:00.562872Z node 141 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1003 2025-05-29T15:33:00.563907Z node 141 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1003 TestModificationResult got TxId: 1003, wait until txId: 1003 TestWaitNotification wait txId: 1003 2025-05-29T15:33:00.565002Z node 141 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:210: tests -- TTxNotificationSubscriber for txId 1003: send EvNotifyTxCompletion 2025-05-29T15:33:00.565013Z node 141 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:256: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 1003 2025-05-29T15:33:00.565080Z node 141 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 1003, at schemeshard: 72057594046678944 2025-05-29T15:33:00.565099Z node 141 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:227: tests -- TTxNotificationSubscriber for txId 1003: got EvNotifyTxCompletionResult 2025-05-29T15:33:00.565104Z node 141 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:236: tests -- TTxNotificationSubscriber for txId 1003: satisfy waiter [141:658:2575] TestWaitNotification: OK eventTxId 1003 2025-05-29T15:33:00.565176Z node 141 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Table/Stream" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: true }, at schemeshard: 72057594046678944 2025-05-29T15:33:00.565218Z node 141 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/Table/Stream" took 53us result status StatusSuccess 2025-05-29T15:33:00.565313Z node 141 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Table/Stream" PathDescription { Self { Name: "Stream" PathId: 4 SchemeshardId: 72057594046678944 PathType: EPathTypeCdcStream CreateFinished: true CreateTxId: 1003 CreateStep: 5000004 ParentPathId: 3 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 4 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 4 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 2 CdcStreamVersion: 1 } ChildrenExist: true } Children { Name: "streamImpl" PathId: 5 SchemeshardId: 72057594046678944 PathType: EPathTypePersQueueGroup CreateFinished: true CreateTxId: 1003 CreateStep: 5000004 ParentPathId: 4 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" PathSubType: EPathSubTypeStreamImpl ChildrenExist: false BalancerTabletID: 72075186233409548 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 4 PathsLimit: 10000 ShardsInside: 3 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 1 PQPartitionsLimit: 1000000 } CdcStreamDescription { Name: "Stream" Mode: ECdcStreamModeKeysOnly PathId { OwnerId: 72057594046678944 LocalId: 4 } State: ECdcStreamStateReady SchemaVersion: 1 Format: ECdcStreamFormatProto VirtualTimestamps: false AwsRegion: "" ResolvedTimestampsIntervalMs: 1000 } } PathId: 4 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> test_multinode_cluster.py::TestSqsMultinodeCluster::test_sqs_writes_through_proxy_on_each_node[tables_format_v0-fifo] >> TAsyncIndexTests::CdcAndSplitWithReboots[TabletReboots] [GOOD] >> test_async_replication.py::TestAsyncReplication::test_async_replication[table_ttl_Uint32-pk_types9-all_types9-index9-Uint32--] |77.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/multinode/py3test ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/multinode/py3test >> test_multinode_cluster.py::TestSqsMultinodeCluster::test_reassign_master[kick_tablets] [FAIL] Test command err: sys:1: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/ciyv/00284f/ydb/tests/functional/sqs/multinode/test-results/py3test/testing_out_stuff/chunk5/testing_out_stuff/test_multinode_cluster.py.TestSqsMultinodeCluster.test_reassign_master.kick_tablets/cluster/node_2/stdout'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/ciyv/00284f/ydb/tests/functional/sqs/multinode/test-results/py3test/testing_out_stuff/chunk5/testing_out_stuff/test_multinode_cluster.py.TestSqsMultinodeCluster.test_reassign_master.kick_tablets/cluster/node_2/stderr'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedRandom name='/home/runner/.ya/build/build_root/ciyv/00284f/ydb/tests/functional/sqs/multinode/test-results/py3test/testing_out_stuff/chunk5/testing_out_stuff/test_multinode_cluster.py.TestSqsMultinodeCluster.test_reassign_master.kick_tablets/cluster/node_2/logfile_5wswbine.log'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/ciyv/00284f/ydb/tests/functional/sqs/multinode/test-results/py3test/testing_out_stuff/chunk5/testing_out_stuff/test_multinode_cluster.py.TestSqsMultinodeCluster.test_reassign_master.kick_tablets/cluster/node_3/stdout'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/ciyv/00284f/ydb/tests/functional/sqs/multinode/test-results/py3test/testing_out_stuff/chunk5/testing_out_stuff/test_multinode_cluster.py.TestSqsMultinodeCluster.test_reassign_master.kick_tablets/cluster/node_3/stderr'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedRandom name='/home/runner/.ya/build/build_root/ciyv/00284f/ydb/tests/functional/sqs/multinode/test-results/py3test/testing_out_stuff/chunk5/testing_out_stuff/test_multinode_cluster.py.TestSqsMultinodeCluster.test_reassign_master.kick_tablets/cluster/node_3/logfile_rrj6_yux.log'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/ciyv/00284f/ydb/tests/functional/sqs/multinode/test-results/py3test/testing_out_stuff/chunk5/testing_out_stuff/test_multinode_cluster.py.TestSqsMultinodeCluster.test_reassign_master.kick_tablets/cluster/node_4/stdout'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/ciyv/00284f/ydb/tests/functional/sqs/multinode/test-results/py3test/testing_out_stuff/chunk5/testing_out_stuff/test_multinode_cluster.py.TestSqsMultinodeCluster.test_reassign_master.kick_tablets/cluster/node_4/stderr'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedRandom name='/home/runner/.ya/build/build_root/ciyv/00284f/ydb/tests/functional/sqs/multinode/test-results/py3test/testing_out_stuff/chunk5/testing_out_stuff/test_multinode_cluster.py.TestSqsMultinodeCluster.test_reassign_master.kick_tablets/cluster/node_4/logfile_6vxxo_71.log'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/ciyv/00284f/ydb/tests/functional/sqs/multinode/test-results/py3test/testing_out_stuff/chunk5/testing_out_stuff/test_multinode_cluster.py.TestSqsMultinodeCluster.test_reassign_master.kick_tablets/cluster/node_5/stdout'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/ciyv/00284f/ydb/tests/functional/sqs/multinode/test-results/py3test/testing_out_stuff/chunk5/testing_out_stuff/test_multinode_cluster.py.TestSqsMultinodeCluster.test_reassign_master.kick_tablets/cluster/node_5/stderr'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedRandom name='/home/runner/.ya/build/build_root/ciyv/00284f/ydb/tests/functional/sqs/multinode/test-results/py3test/testing_out_stuff/chunk5/testing_out_stuff/test_multinode_cluster.py.TestSqsMultinodeCluster.test_reassign_master.kick_tablets/cluster/node_5/logfile_8axobxe0.log'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/ciyv/00284f/ydb/tests/functional/sqs/multinode/test-results/py3test/testing_out_stuff/chunk5/testing_out_stuff/test_multinode_cluster.py.TestSqsMultinodeCluster.test_reassign_master.kick_tablets/cluster/node_6/stdout'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/ciyv/00284f/ydb/tests/functional/sqs/multinode/test-results/py3test/testing_out_stuff/chunk5/testing_out_stuff/test_multinode_cluster.py.TestSqsMultinodeCluster.test_reassign_master.kick_tablets/cluster/node_6/stderr'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedRandom name='/home/runner/.ya/build/build_root/ciyv/00284f/ydb/tests/functional/sqs/multinode/test-results/py3test/testing_out_stuff/chunk5/testing_out_stuff/test_multinode_cluster.py.TestSqsMultinodeCluster.test_reassign_master.kick_tablets/cluster/node_6/logfile_imz19tkc.log'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/ciyv/00284f/ydb/tests/functional/sqs/multinode/test-results/py3test/testing_out_stuff/chunk5/testing_out_stuff/test_multinode_cluster.py.TestSqsMultinodeCluster.test_reassign_master.kick_tablets/cluster/node_7/stdout'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/ciyv/00284f/ydb/tests/functional/sqs/multinode/test-results/py3test/testing_out_stuff/chunk5/testing_out_stuff/test_multinode_cluster.py.TestSqsMultinodeCluster.test_reassign_master.kick_tablets/cluster/node_7/stderr'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedRandom name='/home/runner/.ya/build/build_root/ciyv/00284f/ydb/tests/functional/sqs/multinode/test-results/py3test/testing_out_stuff/chunk5/testing_out_stuff/test_multinode_cluster.py.TestSqsMultinodeCluster.test_reassign_master.kick_tablets/cluster/node_7/logfile_mepdlo3h.log'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/ciyv/00284f/ydb/tests/functional/sqs/multinode/test-results/py3test/testing_out_stuff/chunk5/testing_out_stuff/test_multinode_cluster.py.TestSqsMultinodeCluster.test_reassign_master.kick_tablets/cluster/node_8/stdout'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/ciyv/00284f/ydb/tests/functional/sqs/multinode/test-results/py3test/testing_out_stuff/chunk5/testing_out_stuff/test_multinode_cluster.py.TestSqsMultinodeCluster.test_reassign_master.kick_tablets/cluster/node_8/stderr'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedRandom name='/home/runner/.ya/build/build_root/ciyv/00284f/ydb/tests/functional/sqs/multinode/test-results/py3test/testing_out_stuff/chunk5/testing_out_stuff/test_multinode_cluster.py.TestSqsMultinodeCluster.test_reassign_master.kick_tablets/cluster/node_8/logfile_twey1f3k.log'> ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/subprocess.py:1129: ResourceWarning: subprocess 355982 is still running ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/subprocess.py:1129: ResourceWarning: subprocess 355983 is still running ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/subprocess.py:1129: ResourceWarning: subprocess 356001 is still running ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/subprocess.py:1129: ResourceWarning: subprocess 356006 is still running ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/subprocess.py:1129: ResourceWarning: subprocess 356007 is still running ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/subprocess.py:1129: ResourceWarning: subprocess 356008 is still running ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/subprocess.py:1129: ResourceWarning: subprocess 356009 is still running ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/ciyv/00284f/ydb/tests/functional/sqs/multinode/test-results/py3test/testing_out_stuff/chunk5/testing_out_stuff/test_multinode_cluster.py.TestSqsMultinodeCluster.test_reassign_master.kick_tablets/cluster/node_1/stdout'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/ciyv/00284f/ydb/tests/functional/sqs/multinode/test-results/py3test/testing_out_stuff/chunk5/testing_out_stuff/test_multinode_cluster.py.TestSqsMultinodeCluster.test_reassign_master.kick_tablets/cluster/node_1/stderr'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedRandom name='/home/runner/.ya/build/build_root/ciyv/00284f/ydb/tests/functional/sqs/multinode/test-results/py3test/testing_out_stuff/chunk5/testing_out_stuff/test_multinode_cluster.py.TestSqsMultinodeCluster.test_reassign_master.kick_tablets/cluster/node_1/logfile_qbnv026n.log'> ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/subprocess.py:1129: ResourceWarning: subprocess 355981 is still running ResourceWarning: Enable tracemalloc to get the object allocation traceback >> test_async_replication.py::TestAsyncReplication::test_async_replication[table_index_1__ASYNC-pk_types5-all_types5-index5---ASYNC] [FAIL] >> test_multinode_cluster.py::TestSqsMultinodeCluster::test_ends_request_after_kill ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_index/unittest >> TAsyncIndexTests::CdcAndSplitWithReboots[TabletReboots] [GOOD] Test command err: =========== RUN: Trace =========== Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2140] Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:116:2058] recipient: [1:110:2141] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:116:2058] recipient: [1:110:2141] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:117:2058] recipient: [1:111:2142] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:117:2058] recipient: [1:111:2142] Leader for TabletID 72057594046678944 is [1:126:2151] sender: [1:127:2058] recipient: [1:109:2140] Leader for TabletID 72057594046447617 is [1:130:2154] sender: [1:132:2058] recipient: [1:110:2141] Leader for TabletID 72057594046316545 is [1:133:2155] sender: [1:135:2058] recipient: [1:111:2142] 2025-05-29T15:30:25.752166Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7488: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-05-29T15:30:25.752189Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7516: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:30:25.752193Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7402: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2025-05-29T15:30:25.752198Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7418: OperationsProcessing config: using default configuration 2025-05-29T15:30:25.752211Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-05-29T15:30:25.752214Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-05-29T15:30:25.752220Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7548: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:30:25.752233Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:39: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-05-29T15:30:25.752319Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7619: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-05-29T15:30:25.752385Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-05-29T15:30:25.763140Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7651: Got new config: QueryServiceConfig { AllExternalDataSourcesAreAvailable: true } 2025-05-29T15:30:25.763169Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:30:25.763279Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7619: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# Leader for TabletID 72057594046447617 is [1:130:2154] sender: [1:175:2058] recipient: [1:15:2062] 2025-05-29T15:30:25.765708Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-05-29T15:30:25.765736Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-05-29T15:30:25.765770Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-05-29T15:30:25.768296Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-05-29T15:30:25.768393Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:445: Clear TempDirsState with owners number: 0 2025-05-29T15:30:25.768534Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1348: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-05-29T15:30:25.768704Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:32: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-05-29T15:30:25.769393Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:157: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:30:25.769471Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:84: [RootDataErasureManager] Stop 2025-05-29T15:30:25.769791Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:30:25.769805Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:30:25.769848Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-05-29T15:30:25.769857Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-29T15:30:25.769865Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:202: TTxServerlessStorageBilling.Complete 2025-05-29T15:30:25.769888Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6671: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:214:2058] recipient: [1:212:2212] IGNORE Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:214:2058] recipient: [1:212:2212] Leader for TabletID 72057594037968897 is [1:218:2216] sender: [1:219:2058] recipient: [1:212:2212] 2025-05-29T15:30:25.771519Z node 1 :HIVE INFO: tablet_helpers.cpp:1130: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:126:2151] sender: [1:239:2058] recipient: [1:15:2062] 2025-05-29T15:30:25.793265Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:372: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-05-29T15:30:25.793361Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:30:25.793438Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-05-29T15:30:25.793501Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:130: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-05-29T15:30:25.793514Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:30:25.795338Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:451: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-05-29T15:30:25.795380Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-05-29T15:30:25.795431Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:30:25.795444Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:313: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-05-29T15:30:25.795450Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:367: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-05-29T15:30:25.795456Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 2 -> 3 2025-05-29T15:30:25.796697Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:30:25.796718Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-05-29T15:30:25.796725Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 3 -> 128 2025-05-29T15:30:25.797206Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:30:25.797220Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:30:25.797226Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:30:25.797233Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1650: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-05-29T15:30:25.798263Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1719: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-05-29T15:30:25.798769Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:652: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-05-29T15:30:25.798810Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1751: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 Leader for TabletID 72057594046316545 is [1:133:2155] sender: [1:254:2058] recipient: [1:15:2062] FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-05-29T15:30:25.799013Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:676: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-05-29T15:30:25.799041Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:680: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 133 RawX2: 4294969451 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-05-29T15:30:25.799049Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:30:25.799146Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Ch ... eToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { MinPartitionsCount: 1 } } SplitBoundary { KeyPrefix { Tuple { Optional { Uint32: 50 } } } } TableIndexes { Name: "UserDefinedIndex" LocalPathId: 4 Type: EIndexTypeGlobalAsync State: EIndexStateReady KeyColumnNames: "indexed" SchemaVersion: 1 PathOwnerId: 72057594046678944 DataSize: 0 IndexImplTableDescriptions { PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { SizeToSplit: 2147483648 MinPartitionsCount: 1 } } } } TableSchemaVersion: 2 IsBackup: false CdcStreams { Name: "Stream" Mode: ECdcStreamModeKeysOnly PathId { OwnerId: 72057594046678944 LocalId: 6 } State: ECdcStreamStateReady SchemaVersion: 1 Format: ECdcStreamFormatProto VirtualTimestamps: false AwsRegion: "" ResolvedTimestampsIntervalMs: 0 } IsRestore: false } TablePartitions { EndOfRangeKeyPrefix: "\001\000\004\000\000\0002\000\000\000" IsPoint: false IsInclusive: false DatashardId: 72075186233409550 } TablePartitions { EndOfRangeKeyPrefix: "" IsPoint: false IsInclusive: false DatashardId: 72075186233409551 } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 2 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 6 PathsLimit: 10000 ShardsInside: 6 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 1 PQPartitionsLimit: 1000000 } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-05-29T15:33:01.171775Z node 118 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Table/UserDefinedIndex/indexImplTable" Options { ReturnPartitioningInfo: true ReturnPartitionConfig: true BackupInfo: false ReturnBoundaries: true ShowPrivateTable: true }, at schemeshard: 72057594046678944 2025-05-29T15:33:01.171811Z node 118 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/Table/UserDefinedIndex/indexImplTable" took 42us result status StatusSuccess 2025-05-29T15:33:01.171915Z node 118 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Table/UserDefinedIndex/indexImplTable" PathDescription { Self { Name: "indexImplTable" PathId: 5 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 1002 CreateStep: 5000003 ParentPathId: 4 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeAsyncIndexImplTable Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "indexImplTable" Columns { Name: "indexed" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "indexed" KeyColumnNames: "key" KeyColumnIds: 1 KeyColumnIds: 2 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { SizeToSplit: 2147483648 MinPartitionsCount: 1 } } TableSchemaVersion: 1 IsBackup: false IsRestore: false } TablePartitions { EndOfRangeKeyPrefix: "" IsPoint: false IsInclusive: false DatashardId: 72075186233409546 } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 6 PathsLimit: 10000 ShardsInside: 6 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 1 PQPartitionsLimit: 1000000 } } PathId: 5 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-05-29T15:33:01.182228Z node 118 :CHANGE_EXCHANGE DEBUG: change_sender_table_base.cpp:78: [TableChangeSenderShard][72075186233409550:2][72075186233409546][118:1075:2860] Handshake NKikimrChangeExchange.TEvStatus Status: STATUS_OK LastRecordOrder: 0 2025-05-29T15:33:01.182263Z node 118 :CHANGE_EXCHANGE DEBUG: change_sender_async_index.cpp:239: [AsyncIndexChangeSenderMain][72075186233409550:2][118:1035:2860] Handle NKikimr::NChangeExchange::TEvChangeExchangePrivate::TEvReady { PartitionId: 72075186233409546 } 2025-05-29T15:33:01.182297Z node 118 :CHANGE_EXCHANGE DEBUG: change_sender_table_base.cpp:123: [TableChangeSenderShard][72075186233409550:2][72075186233409546][118:1075:2860] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRecords { Records [{ Order: 1 Group: 1748532781167178 Step: 5000004 TxId: 18446744073709551615 PathId: [OwnerId: 72057594046678944, LocalPathId: 4] Kind: AsyncIndex Source: Unspecified Body: 28b TableId: [OwnerId: 72057594046678944, LocalPathId: 3] SchemaVersion: 2 LockId: 0 LockOffset: 0 },{ Order: 3 Group: 1748532781167178 Step: 5000004 TxId: 18446744073709551615 PathId: [OwnerId: 72057594046678944, LocalPathId: 4] Kind: AsyncIndex Source: Unspecified Body: 28b TableId: [OwnerId: 72057594046678944, LocalPathId: 3] SchemaVersion: 2 LockId: 0 LockOffset: 0 },{ Order: 5 Group: 1748532781167178 Step: 5000004 TxId: 18446744073709551615 PathId: [OwnerId: 72057594046678944, LocalPathId: 4] Kind: AsyncIndex Source: Unspecified Body: 28b TableId: [OwnerId: 72057594046678944, LocalPathId: 3] SchemaVersion: 2 LockId: 0 LockOffset: 0 }] } 2025-05-29T15:33:01.183031Z node 118 :CHANGE_EXCHANGE DEBUG: change_sender_table_base.cpp:200: [TableChangeSenderShard][72075186233409550:2][72075186233409546][118:1075:2860] Handle NKikimrChangeExchange.TEvStatus Status: STATUS_OK RecordStatuses { Order: 1 Status: STATUS_OK Reason: REASON_NONE } RecordStatuses { Order: 3 Status: STATUS_OK Reason: REASON_NONE } RecordStatuses { Order: 5 Status: STATUS_OK Reason: REASON_NONE } LastRecordOrder: 5 2025-05-29T15:33:01.183053Z node 118 :CHANGE_EXCHANGE DEBUG: change_sender_async_index.cpp:239: [AsyncIndexChangeSenderMain][72075186233409550:2][118:1035:2860] Handle NKikimr::NChangeExchange::TEvChangeExchangePrivate::TEvReady { PartitionId: 72075186233409546 } |77.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/multinode/py3test |77.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/multinode/py3test |77.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/multinode/py3test >> test_multinode_cluster.py::TestSqsMultinodeCluster::test_sqs_writes_through_proxy_on_each_node[tables_format_v1-std] [FAIL] >> test_async_replication.py::TestAsyncReplication::test_async_replication[table_ttl_Timestamp-pk_types12-all_types12-index12-Timestamp--] [FAIL] |77.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/multinode/py3test >> test_async_replication.py::TestAsyncReplication::test_async_replication[table_all_types-pk_types7-all_types7-index7---] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/multinode/py3test >> test_multinode_cluster.py::TestSqsMultinodeCluster::test_sqs_writes_through_proxy_on_each_node[tables_format_v1-std] [FAIL] Test command err: sys:1: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/ciyv/00283b/ydb/tests/functional/sqs/multinode/test-results/py3test/testing_out_stuff/chunk10/testing_out_stuff/test_multinode_cluster.py.TestSqsMultinodeCluster.test_sqs_writes_through_proxy_on_each_node.tables_format_v1-std/cluster/node_2/stdout'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/ciyv/00283b/ydb/tests/functional/sqs/multinode/test-results/py3test/testing_out_stuff/chunk10/testing_out_stuff/test_multinode_cluster.py.TestSqsMultinodeCluster.test_sqs_writes_through_proxy_on_each_node.tables_format_v1-std/cluster/node_2/stderr'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedRandom name='/home/runner/.ya/build/build_root/ciyv/00283b/ydb/tests/functional/sqs/multinode/test-results/py3test/testing_out_stuff/chunk10/testing_out_stuff/test_multinode_cluster.py.TestSqsMultinodeCluster.test_sqs_writes_through_proxy_on_each_node.tables_format_v1-std/cluster/node_2/logfile_q3y8k86a.log'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/ciyv/00283b/ydb/tests/functional/sqs/multinode/test-results/py3test/testing_out_stuff/chunk10/testing_out_stuff/test_multinode_cluster.py.TestSqsMultinodeCluster.test_sqs_writes_through_proxy_on_each_node.tables_format_v1-std/cluster/node_3/stdout'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/ciyv/00283b/ydb/tests/functional/sqs/multinode/test-results/py3test/testing_out_stuff/chunk10/testing_out_stuff/test_multinode_cluster.py.TestSqsMultinodeCluster.test_sqs_writes_through_proxy_on_each_node.tables_format_v1-std/cluster/node_3/stderr'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedRandom name='/home/runner/.ya/build/build_root/ciyv/00283b/ydb/tests/functional/sqs/multinode/test-results/py3test/testing_out_stuff/chunk10/testing_out_stuff/test_multinode_cluster.py.TestSqsMultinodeCluster.test_sqs_writes_through_proxy_on_each_node.tables_format_v1-std/cluster/node_3/logfile_iob_b2lg.log'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/ciyv/00283b/ydb/tests/functional/sqs/multinode/test-results/py3test/testing_out_stuff/chunk10/testing_out_stuff/test_multinode_cluster.py.TestSqsMultinodeCluster.test_sqs_writes_through_proxy_on_each_node.tables_format_v1-std/cluster/node_4/stdout'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/ciyv/00283b/ydb/tests/functional/sqs/multinode/test-results/py3test/testing_out_stuff/chunk10/testing_out_stuff/test_multinode_cluster.py.TestSqsMultinodeCluster.test_sqs_writes_through_proxy_on_each_node.tables_format_v1-std/cluster/node_4/stderr'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedRandom name='/home/runner/.ya/build/build_root/ciyv/00283b/ydb/tests/functional/sqs/multinode/test-results/py3test/testing_out_stuff/chunk10/testing_out_stuff/test_multinode_cluster.py.TestSqsMultinodeCluster.test_sqs_writes_through_proxy_on_each_node.tables_format_v1-std/cluster/node_4/logfile_5urjs6dq.log'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/ciyv/00283b/ydb/tests/functional/sqs/multinode/test-results/py3test/testing_out_stuff/chunk10/testing_out_stuff/test_multinode_cluster.py.TestSqsMultinodeCluster.test_sqs_writes_through_proxy_on_each_node.tables_format_v1-std/cluster/node_5/stdout'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/ciyv/00283b/ydb/tests/functional/sqs/multinode/test-results/py3test/testing_out_stuff/chunk10/testing_out_stuff/test_multinode_cluster.py.TestSqsMultinodeCluster.test_sqs_writes_through_proxy_on_each_node.tables_format_v1-std/cluster/node_5/stderr'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedRandom name='/home/runner/.ya/build/build_root/ciyv/00283b/ydb/tests/functional/sqs/multinode/test-results/py3test/testing_out_stuff/chunk10/testing_out_stuff/test_multinode_cluster.py.TestSqsMultinodeCluster.test_sqs_writes_through_proxy_on_each_node.tables_format_v1-std/cluster/node_5/logfile_w20jkezl.log'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/ciyv/00283b/ydb/tests/functional/sqs/multinode/test-results/py3test/testing_out_stuff/chunk10/testing_out_stuff/test_multinode_cluster.py.TestSqsMultinodeCluster.test_sqs_writes_through_proxy_on_each_node.tables_format_v1-std/cluster/node_6/stdout'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/ciyv/00283b/ydb/tests/functional/sqs/multinode/test-results/py3test/testing_out_stuff/chunk10/testing_out_stuff/test_multinode_cluster.py.TestSqsMultinodeCluster.test_sqs_writes_through_proxy_on_each_node.tables_format_v1-std/cluster/node_6/stderr'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedRandom name='/home/runner/.ya/build/build_root/ciyv/00283b/ydb/tests/functional/sqs/multinode/test-results/py3test/testing_out_stuff/chunk10/testing_out_stuff/test_multinode_cluster.py.TestSqsMultinodeCluster.test_sqs_writes_through_proxy_on_each_node.tables_format_v1-std/cluster/node_6/logfile_6msz5skg.log'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/ciyv/00283b/ydb/tests/functional/sqs/multinode/test-results/py3test/testing_out_stuff/chunk10/testing_out_stuff/test_multinode_cluster.py.TestSqsMultinodeCluster.test_sqs_writes_through_proxy_on_each_node.tables_format_v1-std/cluster/node_7/stdout'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/ciyv/00283b/ydb/tests/functional/sqs/multinode/test-results/py3test/testing_out_stuff/chunk10/testing_out_stuff/test_multinode_cluster.py.TestSqsMultinodeCluster.test_sqs_writes_through_proxy_on_each_node.tables_format_v1-std/cluster/node_7/stderr'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedRandom name='/home/runner/.ya/build/build_root/ciyv/00283b/ydb/tests/functional/sqs/multinode/test-results/py3test/testing_out_stuff/chunk10/testing_out_stuff/test_multinode_cluster.py.TestSqsMultinodeCluster.test_sqs_writes_through_proxy_on_each_node.tables_format_v1-std/cluster/node_7/logfile_czd612eq.log'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/ciyv/00283b/ydb/tests/functional/sqs/multinode/test-results/py3test/testing_out_stuff/chunk10/testing_out_stuff/test_multinode_cluster.py.TestSqsMultinodeCluster.test_sqs_writes_through_proxy_on_each_node.tables_format_v1-std/cluster/node_8/stdout'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/ciyv/00283b/ydb/tests/functional/sqs/multinode/test-results/py3test/testing_out_stuff/chunk10/testing_out_stuff/test_multinode_cluster.py.TestSqsMultinodeCluster.test_sqs_writes_through_proxy_on_each_node.tables_format_v1-std/cluster/node_8/stderr'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedRandom name='/home/runner/.ya/build/build_root/ciyv/00283b/ydb/tests/functional/sqs/multinode/test-results/py3test/testing_out_stuff/chunk10/testing_out_stuff/test_multinode_cluster.py.TestSqsMultinodeCluster.test_sqs_writes_through_proxy_on_each_node.tables_format_v1-std/cluster/node_8/logfile_mbhv8cu7.log'> ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/subprocess.py:1129: ResourceWarning: subprocess 358513 is still running ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/subprocess.py:1129: ResourceWarning: subprocess 358538 is still running ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/subprocess.py:1129: ResourceWarning: subprocess 358544 is still running ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/subprocess.py:1129: ResourceWarning: subprocess 358555 is still running ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/subprocess.py:1129: ResourceWarning: subprocess 358562 is still running ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/subprocess.py:1129: ResourceWarning: subprocess 358565 is still running ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/subprocess.py:1129: ResourceWarning: subprocess 358576 is still running ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/ciyv/00283b/ydb/tests/functional/sqs/multinode/test-results/py3test/testing_out_stuff/chunk10/testing_out_stuff/test_multinode_cluster.py.TestSqsMultinodeCluster.test_sqs_writes_through_proxy_on_each_node.tables_format_v1-std/cluster/node_1/stdout'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/ciyv/00283b/ydb/tests/functional/sqs/multinode/test-results/py3test/testing_out_stuff/chunk10/testing_out_stuff/test_multinode_cluster.py.TestSqsMultinodeCluster.test_sqs_writes_through_proxy_on_each_node.tables_format_v1-std/cluster/node_1/stderr'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedRandom name='/home/runner/.ya/build/build_root/ciyv/00283b/ydb/tests/functional/sqs/multinode/test-results/py3test/testing_out_stuff/chunk10/testing_out_stuff/test_multinode_cluster.py.TestSqsMultinodeCluster.test_sqs_writes_through_proxy_on_each_node.tables_format_v1-std/cluster/node_1/logfile_g4nx5_e_.log'> ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/subprocess.py:1129: ResourceWarning: subprocess 358512 is still running ResourceWarning: Enable tracemalloc to get the object allocation traceback >> YdbSdkSessionsPool1Session::CustomPlan/0 [GOOD] >> YdbSdkSessions::MultiThreadMultipleRequestsOnSharedSessionsTableClient [GOOD] >> YdbSdkSessions::MultiThreadMultipleRequestsOnSharedSessionsQueryClient [SKIPPED] >> YdbSdkSessionsPool::StressTestSync/1 [GOOD] >> YdbSdkSessionsPool::StressTestSync/0 [FAIL] >> test_recompiles_requests.py::TestSqsRecompilesRequestsForOtherQueue::test_recompiles_queries[tables_format_v1-std] [FAIL] >> test_multinode_cluster.py::TestSqsMultinodeCluster::test_sqs_writes_through_proxy_on_each_node[tables_format_v1-fifo] >> YdbSdkSessions::MultiThreadSessionPoolLimitSyncTableClient [GOOD] >> YdbSdkSessions::MultiThreadSessionPoolLimitSyncQueryClient >> test_recompiles_requests.py::TestSqsRecompilesRequestsForOtherQueue::test_recompiles_queries[tables_format_v0-std] [FAIL] >> test_async_replication.py::TestAsyncReplication::test_async_replication[table_index_3__SYNC-pk_types1-all_types1-index1---SYNC] [FAIL] |77.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/multinode/py3test ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/multinode/py3test >> test_recompiles_requests.py::TestSqsRecompilesRequestsForOtherQueue::test_recompiles_queries[tables_format_v1-std] [FAIL] Test command err: sys:1: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/ciyv/00282f/ydb/tests/functional/sqs/multinode/test-results/py3test/testing_out_stuff/chunk14/testing_out_stuff/test_recompiles_requests.py.TestSqsRecompilesRequestsForOtherQueue.test_recompiles_queries.tables_format_v1-std/cluster/node_2/stdout'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/ciyv/00282f/ydb/tests/functional/sqs/multinode/test-results/py3test/testing_out_stuff/chunk14/testing_out_stuff/test_recompiles_requests.py.TestSqsRecompilesRequestsForOtherQueue.test_recompiles_queries.tables_format_v1-std/cluster/node_2/stderr'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedRandom name='/home/runner/.ya/build/build_root/ciyv/00282f/ydb/tests/functional/sqs/multinode/test-results/py3test/testing_out_stuff/chunk14/testing_out_stuff/test_recompiles_requests.py.TestSqsRecompilesRequestsForOtherQueue.test_recompiles_queries.tables_format_v1-std/cluster/node_2/logfile_2fkw30cw.log'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/ciyv/00282f/ydb/tests/functional/sqs/multinode/test-results/py3test/testing_out_stuff/chunk14/testing_out_stuff/test_recompiles_requests.py.TestSqsRecompilesRequestsForOtherQueue.test_recompiles_queries.tables_format_v1-std/cluster/node_3/stdout'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/ciyv/00282f/ydb/tests/functional/sqs/multinode/test-results/py3test/testing_out_stuff/chunk14/testing_out_stuff/test_recompiles_requests.py.TestSqsRecompilesRequestsForOtherQueue.test_recompiles_queries.tables_format_v1-std/cluster/node_3/stderr'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedRandom name='/home/runner/.ya/build/build_root/ciyv/00282f/ydb/tests/functional/sqs/multinode/test-results/py3test/testing_out_stuff/chunk14/testing_out_stuff/test_recompiles_requests.py.TestSqsRecompilesRequestsForOtherQueue.test_recompiles_queries.tables_format_v1-std/cluster/node_3/logfile_eib4peem.log'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/ciyv/00282f/ydb/tests/functional/sqs/multinode/test-results/py3test/testing_out_stuff/chunk14/testing_out_stuff/test_recompiles_requests.py.TestSqsRecompilesRequestsForOtherQueue.test_recompiles_queries.tables_format_v1-std/cluster/node_4/stdout'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/ciyv/00282f/ydb/tests/functional/sqs/multinode/test-results/py3test/testing_out_stuff/chunk14/testing_out_stuff/test_recompiles_requests.py.TestSqsRecompilesRequestsForOtherQueue.test_recompiles_queries.tables_format_v1-std/cluster/node_4/stderr'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedRandom name='/home/runner/.ya/build/build_root/ciyv/00282f/ydb/tests/functional/sqs/multinode/test-results/py3test/testing_out_stuff/chunk14/testing_out_stuff/test_recompiles_requests.py.TestSqsRecompilesRequestsForOtherQueue.test_recompiles_queries.tables_format_v1-std/cluster/node_4/logfile_x2v1hs5d.log'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/ciyv/00282f/ydb/tests/functional/sqs/multinode/test-results/py3test/testing_out_stuff/chunk14/testing_out_stuff/test_recompiles_requests.py.TestSqsRecompilesRequestsForOtherQueue.test_recompiles_queries.tables_format_v1-std/cluster/node_5/stdout'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/ciyv/00282f/ydb/tests/functional/sqs/multinode/test-results/py3test/testing_out_stuff/chunk14/testing_out_stuff/test_recompiles_requests.py.TestSqsRecompilesRequestsForOtherQueue.test_recompiles_queries.tables_format_v1-std/cluster/node_5/stderr'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedRandom name='/home/runner/.ya/build/build_root/ciyv/00282f/ydb/tests/functional/sqs/multinode/test-results/py3test/testing_out_stuff/chunk14/testing_out_stuff/test_recompiles_requests.py.TestSqsRecompilesRequestsForOtherQueue.test_recompiles_queries.tables_format_v1-std/cluster/node_5/logfile_tewfnvvk.log'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/ciyv/00282f/ydb/tests/functional/sqs/multinode/test-results/py3test/testing_out_stuff/chunk14/testing_out_stuff/test_recompiles_requests.py.TestSqsRecompilesRequestsForOtherQueue.test_recompiles_queries.tables_format_v1-std/cluster/node_6/stdout'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/ciyv/00282f/ydb/tests/functional/sqs/multinode/test-results/py3test/testing_out_stuff/chunk14/testing_out_stuff/test_recompiles_requests.py.TestSqsRecompilesRequestsForOtherQueue.test_recompiles_queries.tables_format_v1-std/cluster/node_6/stderr'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedRandom name='/home/runner/.ya/build/build_root/ciyv/00282f/ydb/tests/functional/sqs/multinode/test-results/py3test/testing_out_stuff/chunk14/testing_out_stuff/test_recompiles_requests.py.TestSqsRecompilesRequestsForOtherQueue.test_recompiles_queries.tables_format_v1-std/cluster/node_6/logfile_l32fufuq.log'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/ciyv/00282f/ydb/tests/functional/sqs/multinode/test-results/py3test/testing_out_stuff/chunk14/testing_out_stuff/test_recompiles_requests.py.TestSqsRecompilesRequestsForOtherQueue.test_recompiles_queries.tables_format_v1-std/cluster/node_7/stdout'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/ciyv/00282f/ydb/tests/functional/sqs/multinode/test-results/py3test/testing_out_stuff/chunk14/testing_out_stuff/test_recompiles_requests.py.TestSqsRecompilesRequestsForOtherQueue.test_recompiles_queries.tables_format_v1-std/cluster/node_7/stderr'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedRandom name='/home/runner/.ya/build/build_root/ciyv/00282f/ydb/tests/functional/sqs/multinode/test-results/py3test/testing_out_stuff/chunk14/testing_out_stuff/test_recompiles_requests.py.TestSqsRecompilesRequestsForOtherQueue.test_recompiles_queries.tables_format_v1-std/cluster/node_7/logfile_leuskcrt.log'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/ciyv/00282f/ydb/tests/functional/sqs/multinode/test-results/py3test/testing_out_stuff/chunk14/testing_out_stuff/test_recompiles_requests.py.TestSqsRecompilesRequestsForOtherQueue.test_recompiles_queries.tables_format_v1-std/cluster/node_8/stdout'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/ciyv/00282f/ydb/tests/functional/sqs/multinode/test-results/py3test/testing_out_stuff/chunk14/testing_out_stuff/test_recompiles_requests.py.TestSqsRecompilesRequestsForOtherQueue.test_recompiles_queries.tables_format_v1-std/cluster/node_8/stderr'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedRandom name='/home/runner/.ya/build/build_root/ciyv/00282f/ydb/tests/functional/sqs/multinode/test-results/py3test/testing_out_stuff/chunk14/testing_out_stuff/test_recompiles_requests.py.TestSqsRecompilesRequestsForOtherQueue.test_recompiles_queries.tables_format_v1-std/cluster/node_8/logfile_tt_mx8xw.log'> ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/subprocess.py:1129: ResourceWarning: subprocess 361994 is still running ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/subprocess.py:1129: ResourceWarning: subprocess 361995 is still running ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/subprocess.py:1129: ResourceWarning: subprocess 361996 is still running ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/subprocess.py:1129: ResourceWarning: subprocess 361997 is still running ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/subprocess.py:1129: ResourceWarning: subprocess 362004 is still running ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/subprocess.py:1129: ResourceWarning: subprocess 362014 is still running ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/subprocess.py:1129: ResourceWarning: subprocess 362015 is still running ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/ciyv/00282f/ydb/tests/functional/sqs/multinode/test-results/py3test/testing_out_stuff/chunk14/testing_out_stuff/test_recompiles_requests.py.TestSqsRecompilesRequestsForOtherQueue.test_recompiles_queries.tables_format_v1-std/cluster/node_1/stdout'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/ciyv/00282f/ydb/tests/functional/sqs/multinode/test-results/py3test/testing_out_stuff/chunk14/testing_out_stuff/test_recompiles_requests.py.TestSqsRecompilesRequestsForOtherQueue.test_recompiles_queries.tables_format_v1-std/cluster/node_1/stderr'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedRandom name='/home/runner/.ya/build/build_root/ciyv/00282f/ydb/tests/functional/sqs/multinode/test-results/py3test/testing_out_stuff/chunk14/testing_out_stuff/test_recompiles_requests.py.TestSqsRecompilesRequestsForOtherQueue.test_recompiles_queries.tables_format_v1-std/cluster/node_1/logfile_gkmv1v_u.log'> ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/subprocess.py:1129: ResourceWarning: subprocess 361993 is still running ResourceWarning: Enable tracemalloc to get the object allocation traceback >> test_async_replication.py::TestAsyncReplication::test_async_replication[table_index_2__SYNC-pk_types2-all_types2-index2---SYNC] >> test_recompiles_requests.py::TestSqsRecompilesRequestsForOtherQueue::test_recompiles_queries[tables_format_v1-fifo] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/multinode/py3test >> test_recompiles_requests.py::TestSqsRecompilesRequestsForOtherQueue::test_recompiles_queries[tables_format_v0-std] [FAIL] Test command err: sys:1: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/ciyv/00282a/ydb/tests/functional/sqs/multinode/test-results/py3test/testing_out_stuff/chunk12/testing_out_stuff/test_recompiles_requests.py.TestSqsRecompilesRequestsForOtherQueue.test_recompiles_queries.tables_format_v0-std/cluster/node_1/stdout'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/ciyv/00282a/ydb/tests/functional/sqs/multinode/test-results/py3test/testing_out_stuff/chunk12/testing_out_stuff/test_recompiles_requests.py.TestSqsRecompilesRequestsForOtherQueue.test_recompiles_queries.tables_format_v0-std/cluster/node_1/stderr'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedRandom name='/home/runner/.ya/build/build_root/ciyv/00282a/ydb/tests/functional/sqs/multinode/test-results/py3test/testing_out_stuff/chunk12/testing_out_stuff/test_recompiles_requests.py.TestSqsRecompilesRequestsForOtherQueue.test_recompiles_queries.tables_format_v0-std/cluster/node_1/logfile_fa_du_kw.log'> ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/subprocess.py:1129: ResourceWarning: subprocess 363672 is still running ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/ciyv/00282a/ydb/tests/functional/sqs/multinode/test-results/py3test/testing_out_stuff/chunk12/testing_out_stuff/test_recompiles_requests.py.TestSqsRecompilesRequestsForOtherQueue.test_recompiles_queries.tables_format_v0-std/cluster/node_2/stdout'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/ciyv/00282a/ydb/tests/functional/sqs/multinode/test-results/py3test/testing_out_stuff/chunk12/testing_out_stuff/test_recompiles_requests.py.TestSqsRecompilesRequestsForOtherQueue.test_recompiles_queries.tables_format_v0-std/cluster/node_2/stderr'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedRandom name='/home/runner/.ya/build/build_root/ciyv/00282a/ydb/tests/functional/sqs/multinode/test-results/py3test/testing_out_stuff/chunk12/testing_out_stuff/test_recompiles_requests.py.TestSqsRecompilesRequestsForOtherQueue.test_recompiles_queries.tables_format_v0-std/cluster/node_2/logfile_j3hg41s3.log'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/ciyv/00282a/ydb/tests/functional/sqs/multinode/test-results/py3test/testing_out_stuff/chunk12/testing_out_stuff/test_recompiles_requests.py.TestSqsRecompilesRequestsForOtherQueue.test_recompiles_queries.tables_format_v0-std/cluster/node_3/stdout'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/ciyv/00282a/ydb/tests/functional/sqs/multinode/test-results/py3test/testing_out_stuff/chunk12/testing_out_stuff/test_recompiles_requests.py.TestSqsRecompilesRequestsForOtherQueue.test_recompiles_queries.tables_format_v0-std/cluster/node_3/stderr'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedRandom name='/home/runner/.ya/build/build_root/ciyv/00282a/ydb/tests/functional/sqs/multinode/test-results/py3test/testing_out_stuff/chunk12/testing_out_stuff/test_recompiles_requests.py.TestSqsRecompilesRequestsForOtherQueue.test_recompiles_queries.tables_format_v0-std/cluster/node_3/logfile_ri3_u3z8.log'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/ciyv/00282a/ydb/tests/functional/sqs/multinode/test-results/py3test/testing_out_stuff/chunk12/testing_out_stuff/test_recompiles_requests.py.TestSqsRecompilesRequestsForOtherQueue.test_recompiles_queries.tables_format_v0-std/cluster/node_4/stdout'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/ciyv/00282a/ydb/tests/functional/sqs/multinode/test-results/py3test/testing_out_stuff/chunk12/testing_out_stuff/test_recompiles_requests.py.TestSqsRecompilesRequestsForOtherQueue.test_recompiles_queries.tables_format_v0-std/cluster/node_4/stderr'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedRandom name='/home/runner/.ya/build/build_root/ciyv/00282a/ydb/tests/functional/sqs/multinode/test-results/py3test/testing_out_stuff/chunk12/testing_out_stuff/test_recompiles_requests.py.TestSqsRecompilesRequestsForOtherQueue.test_recompiles_queries.tables_format_v0-std/cluster/node_4/logfile_bkdhan8t.log'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/ciyv/00282a/ydb/tests/functional/sqs/multinode/test-results/py3test/testing_out_stuff/chunk12/testing_out_stuff/test_recompiles_requests.py.TestSqsRecompilesRequestsForOtherQueue.test_recompiles_queries.tables_format_v0-std/cluster/node_5/stdout'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/ciyv/00282a/ydb/tests/functional/sqs/multinode/test-results/py3test/testing_out_stuff/chunk12/testing_out_stuff/test_recompiles_requests.py.TestSqsRecompilesRequestsForOtherQueue.test_recompiles_queries.tables_format_v0-std/cluster/node_5/stderr'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedRandom name='/home/runner/.ya/build/build_root/ciyv/00282a/ydb/tests/functional/sqs/multinode/test-results/py3test/testing_out_stuff/chunk12/testing_out_stuff/test_recompiles_requests.py.TestSqsRecompilesRequestsForOtherQueue.test_recompiles_queries.tables_format_v0-std/cluster/node_5/logfile_zvvvl_kr.log'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/ciyv/00282a/ydb/tests/functional/sqs/multinode/test-results/py3test/testing_out_stuff/chunk12/testing_out_stuff/test_recompiles_requests.py.TestSqsRecompilesRequestsForOtherQueue.test_recompiles_queries.tables_format_v0-std/cluster/node_6/stdout'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/ciyv/00282a/ydb/tests/functional/sqs/multinode/test-results/py3test/testing_out_stuff/chunk12/testing_out_stuff/test_recompiles_requests.py.TestSqsRecompilesRequestsForOtherQueue.test_recompiles_queries.tables_format_v0-std/cluster/node_6/stderr'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedRandom name='/home/runner/.ya/build/build_root/ciyv/00282a/ydb/tests/functional/sqs/multinode/test-results/py3test/testing_out_stuff/chunk12/testing_out_stuff/test_recompiles_requests.py.TestSqsRecompilesRequestsForOtherQueue.test_recompiles_queries.tables_format_v0-std/cluster/node_6/logfile_5_w8y4gw.log'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/ciyv/00282a/ydb/tests/functional/sqs/multinode/test-results/py3test/testing_out_stuff/chunk12/testing_out_stuff/test_recompiles_requests.py.TestSqsRecompilesRequestsForOtherQueue.test_recompiles_queries.tables_format_v0-std/cluster/node_7/stdout'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/ciyv/00282a/ydb/tests/functional/sqs/multinode/test-results/py3test/testing_out_stuff/chunk12/testing_out_stuff/test_recompiles_requests.py.TestSqsRecompilesRequestsForOtherQueue.test_recompiles_queries.tables_format_v0-std/cluster/node_7/stderr'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedRandom name='/home/runner/.ya/build/build_root/ciyv/00282a/ydb/tests/functional/sqs/multinode/test-results/py3test/testing_out_stuff/chunk12/testing_out_stuff/test_recompiles_requests.py.TestSqsRecompilesRequestsForOtherQueue.test_recompiles_queries.tables_format_v0-std/cluster/node_7/logfile_553reolp.log'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/ciyv/00282a/ydb/tests/functional/sqs/multinode/test-results/py3test/testing_out_stuff/chunk12/testing_out_stuff/test_recompiles_requests.py.TestSqsRecompilesRequestsForOtherQueue.test_recompiles_queries.tables_format_v0-std/cluster/node_8/stdout'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/ciyv/00282a/ydb/tests/functional/sqs/multinode/test-results/py3test/testing_out_stuff/chunk12/testing_out_stuff/test_recompiles_requests.py.TestSqsRecompilesRequestsForOtherQueue.test_recompiles_queries.tables_format_v0-std/cluster/node_8/stderr'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedRandom name='/home/runner/.ya/build/build_root/ciyv/00282a/ydb/tests/functional/sqs/multinode/test-results/py3test/testing_out_stuff/chunk12/testing_out_stuff/test_recompiles_requests.py.TestSqsRecompilesRequestsForOtherQueue.test_recompiles_queries.tables_format_v0-std/cluster/node_8/logfile_1i6dce1z.log'> ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/subprocess.py:1129: ResourceWarning: subprocess 363682 is still running ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/subprocess.py:1129: ResourceWarning: subprocess 363726 is still running ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/subprocess.py:1129: ResourceWarning: subprocess 363753 is still running ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/subprocess.py:1129: ResourceWarning: subprocess 363783 is still running ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/subprocess.py:1129: ResourceWarning: subprocess 363791 is still running ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/subprocess.py:1129: ResourceWarning: subprocess 363796 is still running ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/subprocess.py:1129: ResourceWarning: subprocess 363802 is still running ResourceWarning: Enable tracemalloc to get the object allocation traceback |77.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/multinode/py3test |77.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/multinode/py3test |77.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/multinode/py3test >> TCdcStreamWithRebootsTests::RacySplitAndDropTable[TabletReboots] [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/public/sdk/cpp/tests/integration/sessions_pool/gtest >> YdbSdkSessionsPool::StressTestSync/0 [FAIL] Test command err: ydb/public/sdk/cpp/tests/integration/sessions_pool/main.cpp:269: Expected equality of these values: Client->GetCurrentPoolSize() Which is: 0 activeSessionsLimit Which is: 1 |77.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/public/sdk/cpp/tests/integration/sessions_pool/gtest >> YdbSdkSessionsPool1Session::CustomPlan/0 [GOOD] >> test_multinode_cluster.py::TestSqsMultinodeCluster::test_has_messages_counters[stop_node-fifo] |77.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/public/sdk/cpp/tests/integration/sessions_pool/gtest >> YdbSdkSessionsPool::StressTestSync/1 [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/public/sdk/cpp/tests/integration/sessions/gtest >> YdbSdkSessions::MultiThreadMultipleRequestsOnSharedSessionsQueryClient [SKIPPED] Test command err: ydb/public/sdk/cpp/tests/integration/sessions/main.cpp:534: Enable after interactive tx support >> test_multinode_cluster.py::TestSqsMultinodeCluster::test_has_messages_counters[kick_tablets-fifo] [FAIL] >> TCdcStreamWithRebootsTests::Attributes[TabletReboots] [GOOD] >> test_multinode_cluster.py::TestSqsMultinodeCluster::test_sqs_writes_through_proxy_on_each_node[tables_format_v0-fifo] [FAIL] >> TCdcStreamWithRebootsTests::CreateStreamWithAwsRegion[TabletReboots] [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/multinode/py3test >> test_multinode_cluster.py::TestSqsMultinodeCluster::test_has_messages_counters[kick_tablets-fifo] [FAIL] Test command err: sys:1: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/ciyv/002826/ydb/tests/functional/sqs/multinode/test-results/py3test/testing_out_stuff/chunk1/testing_out_stuff/test_multinode_cluster.py.TestSqsMultinodeCluster.test_has_messages_counters.kick_tablets-fifo/cluster/node_1/stdout'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/ciyv/002826/ydb/tests/functional/sqs/multinode/test-results/py3test/testing_out_stuff/chunk1/testing_out_stuff/test_multinode_cluster.py.TestSqsMultinodeCluster.test_has_messages_counters.kick_tablets-fifo/cluster/node_1/stderr'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedRandom name='/home/runner/.ya/build/build_root/ciyv/002826/ydb/tests/functional/sqs/multinode/test-results/py3test/testing_out_stuff/chunk1/testing_out_stuff/test_multinode_cluster.py.TestSqsMultinodeCluster.test_has_messages_counters.kick_tablets-fifo/cluster/node_1/logfile_7w6cwyr7.log'> ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/subprocess.py:1129: ResourceWarning: subprocess 367066 is still running ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/ciyv/002826/ydb/tests/functional/sqs/multinode/test-results/py3test/testing_out_stuff/chunk1/testing_out_stuff/test_multinode_cluster.py.TestSqsMultinodeCluster.test_has_messages_counters.kick_tablets-fifo/cluster/node_2/stdout'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/ciyv/002826/ydb/tests/functional/sqs/multinode/test-results/py3test/testing_out_stuff/chunk1/testing_out_stuff/test_multinode_cluster.py.TestSqsMultinodeCluster.test_has_messages_counters.kick_tablets-fifo/cluster/node_2/stderr'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedRandom name='/home/runner/.ya/build/build_root/ciyv/002826/ydb/tests/functional/sqs/multinode/test-results/py3test/testing_out_stuff/chunk1/testing_out_stuff/test_multinode_cluster.py.TestSqsMultinodeCluster.test_has_messages_counters.kick_tablets-fifo/cluster/node_2/logfile_dlfyefu4.log'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/ciyv/002826/ydb/tests/functional/sqs/multinode/test-results/py3test/testing_out_stuff/chunk1/testing_out_stuff/test_multinode_cluster.py.TestSqsMultinodeCluster.test_has_messages_counters.kick_tablets-fifo/cluster/node_3/stdout'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/ciyv/002826/ydb/tests/functional/sqs/multinode/test-results/py3test/testing_out_stuff/chunk1/testing_out_stuff/test_multinode_cluster.py.TestSqsMultinodeCluster.test_has_messages_counters.kick_tablets-fifo/cluster/node_3/stderr'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedRandom name='/home/runner/.ya/build/build_root/ciyv/002826/ydb/tests/functional/sqs/multinode/test-results/py3test/testing_out_stuff/chunk1/testing_out_stuff/test_multinode_cluster.py.TestSqsMultinodeCluster.test_has_messages_counters.kick_tablets-fifo/cluster/node_3/logfile_7lytchqf.log'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/ciyv/002826/ydb/tests/functional/sqs/multinode/test-results/py3test/testing_out_stuff/chunk1/testing_out_stuff/test_multinode_cluster.py.TestSqsMultinodeCluster.test_has_messages_counters.kick_tablets-fifo/cluster/node_4/stdout'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/ciyv/002826/ydb/tests/functional/sqs/multinode/test-results/py3test/testing_out_stuff/chunk1/testing_out_stuff/test_multinode_cluster.py.TestSqsMultinodeCluster.test_has_messages_counters.kick_tablets-fifo/cluster/node_4/stderr'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedRandom name='/home/runner/.ya/build/build_root/ciyv/002826/ydb/tests/functional/sqs/multinode/test-results/py3test/testing_out_stuff/chunk1/testing_out_stuff/test_multinode_cluster.py.TestSqsMultinodeCluster.test_has_messages_counters.kick_tablets-fifo/cluster/node_4/logfile_7c087_oe.log'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/ciyv/002826/ydb/tests/functional/sqs/multinode/test-results/py3test/testing_out_stuff/chunk1/testing_out_stuff/test_multinode_cluster.py.TestSqsMultinodeCluster.test_has_messages_counters.kick_tablets-fifo/cluster/node_5/stdout'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/ciyv/002826/ydb/tests/functional/sqs/multinode/test-results/py3test/testing_out_stuff/chunk1/testing_out_stuff/test_multinode_cluster.py.TestSqsMultinodeCluster.test_has_messages_counters.kick_tablets-fifo/cluster/node_5/stderr'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedRandom name='/home/runner/.ya/build/build_root/ciyv/002826/ydb/tests/functional/sqs/multinode/test-results/py3test/testing_out_stuff/chunk1/testing_out_stuff/test_multinode_cluster.py.TestSqsMultinodeCluster.test_has_messages_counters.kick_tablets-fifo/cluster/node_5/logfile_r_md9ohx.log'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/ciyv/002826/ydb/tests/functional/sqs/multinode/test-results/py3test/testing_out_stuff/chunk1/testing_out_stuff/test_multinode_cluster.py.TestSqsMultinodeCluster.test_has_messages_counters.kick_tablets-fifo/cluster/node_6/stdout'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/ciyv/002826/ydb/tests/functional/sqs/multinode/test-results/py3test/testing_out_stuff/chunk1/testing_out_stuff/test_multinode_cluster.py.TestSqsMultinodeCluster.test_has_messages_counters.kick_tablets-fifo/cluster/node_6/stderr'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedRandom name='/home/runner/.ya/build/build_root/ciyv/002826/ydb/tests/functional/sqs/multinode/test-results/py3test/testing_out_stuff/chunk1/testing_out_stuff/test_multinode_cluster.py.TestSqsMultinodeCluster.test_has_messages_counters.kick_tablets-fifo/cluster/node_6/logfile_gtlcfol6.log'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/ciyv/002826/ydb/tests/functional/sqs/multinode/test-results/py3test/testing_out_stuff/chunk1/testing_out_stuff/test_multinode_cluster.py.TestSqsMultinodeCluster.test_has_messages_counters.kick_tablets-fifo/cluster/node_7/stdout'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/ciyv/002826/ydb/tests/functional/sqs/multinode/test-results/py3test/testing_out_stuff/chunk1/testing_out_stuff/test_multinode_cluster.py.TestSqsMultinodeCluster.test_has_messages_counters.kick_tablets-fifo/cluster/node_7/stderr'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedRandom name='/home/runner/.ya/build/build_root/ciyv/002826/ydb/tests/functional/sqs/multinode/test-results/py3test/testing_out_stuff/chunk1/testing_out_stuff/test_multinode_cluster.py.TestSqsMultinodeCluster.test_has_messages_counters.kick_tablets-fifo/cluster/node_7/logfile_xh7spv67.log'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/ciyv/002826/ydb/tests/functional/sqs/multinode/test-results/py3test/testing_out_stuff/chunk1/testing_out_stuff/test_multinode_cluster.py.TestSqsMultinodeCluster.test_has_messages_counters.kick_tablets-fifo/cluster/node_8/stdout'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/ciyv/002826/ydb/tests/functional/sqs/multinode/test-results/py3test/testing_out_stuff/chunk1/testing_out_stuff/test_multinode_cluster.py.TestSqsMultinodeCluster.test_has_messages_counters.kick_tablets-fifo/cluster/node_8/stderr'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedRandom name='/home/runner/.ya/build/build_root/ciyv/002826/ydb/tests/functional/sqs/multinode/test-results/py3test/testing_out_stuff/chunk1/testing_out_stuff/test_multinode_cluster.py.TestSqsMultinodeCluster.test_has_messages_counters.kick_tablets-fifo/cluster/node_8/logfile_tmr2vfj3.log'> ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/subprocess.py:1129: ResourceWarning: subprocess 367068 is still running ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/subprocess.py:1129: ResourceWarning: subprocess 367075 is still running ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/subprocess.py:1129: ResourceWarning: subprocess 367076 is still running ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/subprocess.py:1129: ResourceWarning: subprocess 367098 is still running ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/subprocess.py:1129: ResourceWarning: subprocess 367099 is still running ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/subprocess.py:1129: ResourceWarning: subprocess 367100 is still running ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/subprocess.py:1129: ResourceWarning: subprocess 367101 is still running ResourceWarning: Enable tracemalloc to get the object allocation traceback ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/multinode/py3test >> test_multinode_cluster.py::TestSqsMultinodeCluster::test_sqs_writes_through_proxy_on_each_node[tables_format_v0-fifo] [FAIL] Test command err: sys:1: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/ciyv/002821/ydb/tests/functional/sqs/multinode/test-results/py3test/testing_out_stuff/chunk7/testing_out_stuff/test_multinode_cluster.py.TestSqsMultinodeCluster.test_sqs_writes_through_proxy_on_each_node.tables_format_v0-fifo/cluster/node_1/stdout'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/ciyv/002821/ydb/tests/functional/sqs/multinode/test-results/py3test/testing_out_stuff/chunk7/testing_out_stuff/test_multinode_cluster.py.TestSqsMultinodeCluster.test_sqs_writes_through_proxy_on_each_node.tables_format_v0-fifo/cluster/node_1/stderr'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedRandom name='/home/runner/.ya/build/build_root/ciyv/002821/ydb/tests/functional/sqs/multinode/test-results/py3test/testing_out_stuff/chunk7/testing_out_stuff/test_multinode_cluster.py.TestSqsMultinodeCluster.test_sqs_writes_through_proxy_on_each_node.tables_format_v0-fifo/cluster/node_1/logfile_f1t66vb5.log'> ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/subprocess.py:1129: ResourceWarning: subprocess 368845 is still running ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/ciyv/002821/ydb/tests/functional/sqs/multinode/test-results/py3test/testing_out_stuff/chunk7/testing_out_stuff/test_multinode_cluster.py.TestSqsMultinodeCluster.test_sqs_writes_through_proxy_on_each_node.tables_format_v0-fifo/cluster/node_2/stdout'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/ciyv/002821/ydb/tests/functional/sqs/multinode/test-results/py3test/testing_out_stuff/chunk7/testing_out_stuff/test_multinode_cluster.py.TestSqsMultinodeCluster.test_sqs_writes_through_proxy_on_each_node.tables_format_v0-fifo/cluster/node_2/stderr'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedRandom name='/home/runner/.ya/build/build_root/ciyv/002821/ydb/tests/functional/sqs/multinode/test-results/py3test/testing_out_stuff/chunk7/testing_out_stuff/test_multinode_cluster.py.TestSqsMultinodeCluster.test_sqs_writes_through_proxy_on_each_node.tables_format_v0-fifo/cluster/node_2/logfile_wwkwn372.log'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/ciyv/002821/ydb/tests/functional/sqs/multinode/test-results/py3test/testing_out_stuff/chunk7/testing_out_stuff/test_multinode_cluster.py.TestSqsMultinodeCluster.test_sqs_writes_through_proxy_on_each_node.tables_format_v0-fifo/cluster/node_3/stdout'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/ciyv/002821/ydb/tests/functional/sqs/multinode/test-results/py3test/testing_out_stuff/chunk7/testing_out_stuff/test_multinode_cluster.py.TestSqsMultinodeCluster.test_sqs_writes_through_proxy_on_each_node.tables_format_v0-fifo/cluster/node_3/stderr'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedRandom name='/home/runner/.ya/build/build_root/ciyv/002821/ydb/tests/functional/sqs/multinode/test-results/py3test/testing_out_stuff/chunk7/testing_out_stuff/test_multinode_cluster.py.TestSqsMultinodeCluster.test_sqs_writes_through_proxy_on_each_node.tables_format_v0-fifo/cluster/node_3/logfile_k7fun96w.log'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/ciyv/002821/ydb/tests/functional/sqs/multinode/test-results/py3test/testing_out_stuff/chunk7/testing_out_stuff/test_multinode_cluster.py.TestSqsMultinodeCluster.test_sqs_writes_through_proxy_on_each_node.tables_format_v0-fifo/cluster/node_4/stdout'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/ciyv/002821/ydb/tests/functional/sqs/multinode/test-results/py3test/testing_out_stuff/chunk7/testing_out_stuff/test_multinode_cluster.py.TestSqsMultinodeCluster.test_sqs_writes_through_proxy_on_each_node.tables_format_v0-fifo/cluster/node_4/stderr'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedRandom name='/home/runner/.ya/build/build_root/ciyv/002821/ydb/tests/functional/sqs/multinode/test-results/py3test/testing_out_stuff/chunk7/testing_out_stuff/test_multinode_cluster.py.TestSqsMultinodeCluster.test_sqs_writes_through_proxy_on_each_node.tables_format_v0-fifo/cluster/node_4/logfile_pp9w_9pc.log'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/ciyv/002821/ydb/tests/functional/sqs/multinode/test-results/py3test/testing_out_stuff/chunk7/testing_out_stuff/test_multinode_cluster.py.TestSqsMultinodeCluster.test_sqs_writes_through_proxy_on_each_node.tables_format_v0-fifo/cluster/node_5/stdout'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/ciyv/002821/ydb/tests/functional/sqs/multinode/test-results/py3test/testing_out_stuff/chunk7/testing_out_stuff/test_multinode_cluster.py.TestSqsMultinodeCluster.test_sqs_writes_through_proxy_on_each_node.tables_format_v0-fifo/cluster/node_5/stderr'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedRandom name='/home/runner/.ya/build/build_root/ciyv/002821/ydb/tests/functional/sqs/multinode/test-results/py3test/testing_out_stuff/chunk7/testing_out_stuff/test_multinode_cluster.py.TestSqsMultinodeCluster.test_sqs_writes_through_proxy_on_each_node.tables_format_v0-fifo/cluster/node_5/logfile_m4p73185.log'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/ciyv/002821/ydb/tests/functional/sqs/multinode/test-results/py3test/testing_out_stuff/chunk7/testing_out_stuff/test_multinode_cluster.py.TestSqsMultinodeCluster.test_sqs_writes_through_proxy_on_each_node.tables_format_v0-fifo/cluster/node_6/stdout'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/ciyv/002821/ydb/tests/functional/sqs/multinode/test-results/py3test/testing_out_stuff/chunk7/testing_out_stuff/test_multinode_cluster.py.TestSqsMultinodeCluster.test_sqs_writes_through_proxy_on_each_node.tables_format_v0-fifo/cluster/node_6/stderr'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedRandom name='/home/runner/.ya/build/build_root/ciyv/002821/ydb/tests/functional/sqs/multinode/test-results/py3test/testing_out_stuff/chunk7/testing_out_stuff/test_multinode_cluster.py.TestSqsMultinodeCluster.test_sqs_writes_through_proxy_on_each_node.tables_format_v0-fifo/cluster/node_6/logfile_cv0edmn2.log'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/ciyv/002821/ydb/tests/functional/sqs/multinode/test-results/py3test/testing_out_stuff/chunk7/testing_out_stuff/test_multinode_cluster.py.TestSqsMultinodeCluster.test_sqs_writes_through_proxy_on_each_node.tables_format_v0-fifo/cluster/node_7/stdout'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/ciyv/002821/ydb/tests/functional/sqs/multinode/test-results/py3test/testing_out_stuff/chunk7/testing_out_stuff/test_multinode_cluster.py.TestSqsMultinodeCluster.test_sqs_writes_through_proxy_on_each_node.tables_format_v0-fifo/cluster/node_7/stderr'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedRandom name='/home/runner/.ya/build/build_root/ciyv/002821/ydb/tests/functional/sqs/multinode/test-results/py3test/testing_out_stuff/chunk7/testing_out_stuff/test_multinode_cluster.py.TestSqsMultinodeCluster.test_sqs_writes_through_proxy_on_each_node.tables_format_v0-fifo/cluster/node_7/logfile_7tmqg8zs.log'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/ciyv/002821/ydb/tests/functional/sqs/multinode/test-results/py3test/testing_out_stuff/chunk7/testing_out_stuff/test_multinode_cluster.py.TestSqsMultinodeCluster.test_sqs_writes_through_proxy_on_each_node.tables_format_v0-fifo/cluster/node_8/stdout'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/ciyv/002821/ydb/tests/functional/sqs/multinode/test-results/py3test/testing_out_stuff/chunk7/testing_out_stuff/test_multinode_cluster.py.TestSqsMultinodeCluster.test_sqs_writes_through_proxy_on_each_node.tables_format_v0-fifo/cluster/node_8/stderr'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedRandom name='/home/runner/.ya/build/build_root/ciyv/002821/ydb/tests/functional/sqs/multinode/test-results/py3test/testing_out_stuff/chunk7/testing_out_stuff/test_multinode_cluster.py.TestSqsMultinodeCluster.test_sqs_writes_through_proxy_on_each_node.tables_format_v0-fifo/cluster/node_8/logfile_x86zc2dr.log'> ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/subprocess.py:1129: ResourceWarning: subprocess 368846 is still running ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/subprocess.py:1129: ResourceWarning: subprocess 368848 is still running ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/subprocess.py:1129: ResourceWarning: subprocess 368851 is still running ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/subprocess.py:1129: ResourceWarning: subprocess 368854 is still running ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/subprocess.py:1129: ResourceWarning: subprocess 368856 is still running ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/subprocess.py:1129: ResourceWarning: subprocess 368878 is still running ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/subprocess.py:1129: ResourceWarning: subprocess 368886 is still running ResourceWarning: Enable tracemalloc to get the object allocation traceback >> test_multinode_cluster.py::TestSqsMultinodeCluster::test_sqs_writes_through_proxy_on_each_node[tables_format_v0-std] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_cdc_stream_reboots/unittest >> TCdcStreamWithRebootsTests::CreateStreamWithAwsRegion[TabletReboots] [GOOD] Test command err: =========== RUN: Trace =========== Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2140] Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:116:2058] recipient: [1:110:2141] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:116:2058] recipient: [1:110:2141] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:117:2058] recipient: [1:111:2142] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:117:2058] recipient: [1:111:2142] Leader for TabletID 72057594046678944 is [1:126:2151] sender: [1:127:2058] recipient: [1:109:2140] Leader for TabletID 72057594046447617 is [1:130:2154] sender: [1:132:2058] recipient: [1:110:2141] Leader for TabletID 72057594046316545 is [1:133:2155] sender: [1:135:2058] recipient: [1:111:2142] 2025-05-29T15:32:31.397266Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7488: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-05-29T15:32:31.397281Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7516: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:32:31.397285Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7402: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2025-05-29T15:32:31.397288Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7418: OperationsProcessing config: using default configuration 2025-05-29T15:32:31.397295Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-05-29T15:32:31.397298Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-05-29T15:32:31.397303Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7548: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:32:31.397311Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:39: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-05-29T15:32:31.397370Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7619: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-05-29T15:32:31.397417Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-05-29T15:32:31.406184Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7651: Got new config: QueryServiceConfig { AllExternalDataSourcesAreAvailable: true } 2025-05-29T15:32:31.406201Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:32:31.406266Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7619: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# Leader for TabletID 72057594046447617 is [1:130:2154] sender: [1:175:2058] recipient: [1:15:2062] 2025-05-29T15:32:31.408003Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-05-29T15:32:31.408020Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-05-29T15:32:31.408035Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-05-29T15:32:31.409766Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-05-29T15:32:31.409810Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:445: Clear TempDirsState with owners number: 0 2025-05-29T15:32:31.409874Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1348: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-05-29T15:32:31.410199Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:32: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-05-29T15:32:31.410642Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:157: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:32:31.410665Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:84: [RootDataErasureManager] Stop 2025-05-29T15:32:31.410823Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:32:31.410830Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:32:31.410847Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-05-29T15:32:31.410851Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-29T15:32:31.410855Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:202: TTxServerlessStorageBilling.Complete 2025-05-29T15:32:31.410866Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6671: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:214:2058] recipient: [1:212:2212] IGNORE Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:214:2058] recipient: [1:212:2212] Leader for TabletID 72057594037968897 is [1:218:2216] sender: [1:219:2058] recipient: [1:212:2212] 2025-05-29T15:32:31.411657Z node 1 :HIVE INFO: tablet_helpers.cpp:1130: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:126:2151] sender: [1:239:2058] recipient: [1:15:2062] 2025-05-29T15:32:31.423627Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:372: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-05-29T15:32:31.423669Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:32:31.423701Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-05-29T15:32:31.423728Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:130: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-05-29T15:32:31.423733Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:32:31.424158Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:451: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-05-29T15:32:31.424177Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-05-29T15:32:31.424209Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:32:31.424215Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:313: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-05-29T15:32:31.424218Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:367: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-05-29T15:32:31.424221Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 2 -> 3 2025-05-29T15:32:31.424519Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:32:31.424526Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-05-29T15:32:31.424530Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 3 -> 128 2025-05-29T15:32:31.424752Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:32:31.424758Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:32:31.424761Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:32:31.424765Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1650: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-05-29T15:32:31.425172Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1719: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-05-29T15:32:31.425432Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:652: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-05-29T15:32:31.425452Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1751: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 Leader for TabletID 72057594046316545 is [1:133:2155] sender: [1:254:2058] recipient: [1:15:2062] FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-05-29T15:32:31.425564Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:676: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-05-29T15:32:31.425580Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:680: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 133 RawX2: 4294969451 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-05-29T15:32:31.425584Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:32:31.425628Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Ch ... node 139 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_pq.cpp:629: NPQState::TPropose operationId# 1003:2 HandleReply TEvProposeTransactionResult CollectPQConfigChanged: true 2025-05-29T15:33:06.706609Z node 139 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1003:2 128 -> 240 2025-05-29T15:33:06.706637Z node 139 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 3 2025-05-29T15:33:06.706646Z node 139 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 4 2025-05-29T15:33:06.707480Z node 139 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:647: TTxOperationReply complete, operationId: 1003:2, at schemeshard: 72057594046678944 2025-05-29T15:33:06.707532Z node 139 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:33:06.707539Z node 139 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1003, path id: [OwnerId: 72057594046678944, LocalPathId: 4] 2025-05-29T15:33:06.707579Z node 139 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1003, path id: [OwnerId: 72057594046678944, LocalPathId: 5] 2025-05-29T15:33:06.707613Z node 139 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:33:06.707618Z node 139 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [139:206:2207], at schemeshard: 72057594046678944, txId: 1003, path id: 4 2025-05-29T15:33:06.707624Z node 139 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [139:206:2207], at schemeshard: 72057594046678944, txId: 1003, path id: 5 2025-05-29T15:33:06.707710Z node 139 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1003:2, at schemeshard: 72057594046678944 2025-05-29T15:33:06.707719Z node 139 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:482: [72057594046678944] TDone opId# 1003:2 ProgressState 2025-05-29T15:33:06.707731Z node 139 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:906: Part operation is done id#1003:2 progress is 3/3 2025-05-29T15:33:06.707735Z node 139 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 1003 ready parts: 3/3 2025-05-29T15:33:06.707741Z node 139 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:906: Part operation is done id#1003:2 progress is 3/3 2025-05-29T15:33:06.707744Z node 139 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 1003 ready parts: 3/3 2025-05-29T15:33:06.707748Z node 139 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1606: TOperation IsReadyToNotify, TxId: 1003, ready parts: 3/3, is published: false 2025-05-29T15:33:06.707753Z node 139 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 1003 ready parts: 3/3 2025-05-29T15:33:06.707759Z node 139 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:973: Operation and all the parts is done, operation id: 1003:0 2025-05-29T15:33:06.707764Z node 139 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5174: RemoveTx for txid 1003:0 2025-05-29T15:33:06.707775Z node 139 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 4 2025-05-29T15:33:06.707779Z node 139 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:973: Operation and all the parts is done, operation id: 1003:1 2025-05-29T15:33:06.707783Z node 139 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5174: RemoveTx for txid 1003:1 2025-05-29T15:33:06.707797Z node 139 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 4 2025-05-29T15:33:06.707801Z node 139 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:973: Operation and all the parts is done, operation id: 1003:2 2025-05-29T15:33:06.707804Z node 139 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5174: RemoveTx for txid 1003:2 2025-05-29T15:33:06.707813Z node 139 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 5 2025-05-29T15:33:06.707818Z node 139 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:982: Publication still in progress, tx: 1003, publications: 2, subscribers: 0 2025-05-29T15:33:06.707823Z node 139 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:989: Publication details: tx: 1003, [OwnerId: 72057594046678944, LocalPathId: 4], 4 2025-05-29T15:33:06.707827Z node 139 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:989: Publication details: tx: 1003, [OwnerId: 72057594046678944, LocalPathId: 5], 2 2025-05-29T15:33:06.708081Z node 139 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:5834: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 4 PathOwnerId: 72057594046678944, cookie: 1003 2025-05-29T15:33:06.708096Z node 139 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 4 PathOwnerId: 72057594046678944, cookie: 1003 2025-05-29T15:33:06.708101Z node 139 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 1003 2025-05-29T15:33:06.708107Z node 139 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 1003, pathId: [OwnerId: 72057594046678944, LocalPathId: 4], version: 4 2025-05-29T15:33:06.708116Z node 139 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 3 2025-05-29T15:33:06.708354Z node 139 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:5834: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 5 Version: 2 PathOwnerId: 72057594046678944, cookie: 1003 2025-05-29T15:33:06.708370Z node 139 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 5 Version: 2 PathOwnerId: 72057594046678944, cookie: 1003 2025-05-29T15:33:06.708375Z node 139 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1003 2025-05-29T15:33:06.708379Z node 139 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 1003, pathId: [OwnerId: 72057594046678944, LocalPathId: 5], version: 2 2025-05-29T15:33:06.708384Z node 139 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 4 2025-05-29T15:33:06.708397Z node 139 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 1003, subscribers: 0 2025-05-29T15:33:06.709544Z node 139 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1003 2025-05-29T15:33:06.709721Z node 139 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1003 TestModificationResult got TxId: 1003, wait until txId: 1003 TestWaitNotification wait txId: 1003 2025-05-29T15:33:06.712172Z node 139 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:210: tests -- TTxNotificationSubscriber for txId 1003: send EvNotifyTxCompletion 2025-05-29T15:33:06.712192Z node 139 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:256: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 1003 2025-05-29T15:33:06.712264Z node 139 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 1003, at schemeshard: 72057594046678944 2025-05-29T15:33:06.712295Z node 139 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:227: tests -- TTxNotificationSubscriber for txId 1003: got EvNotifyTxCompletionResult 2025-05-29T15:33:06.712301Z node 139 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:236: tests -- TTxNotificationSubscriber for txId 1003: satisfy waiter [139:659:2576] TestWaitNotification: OK eventTxId 1003 2025-05-29T15:33:06.712385Z node 139 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Table/Stream" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: true }, at schemeshard: 72057594046678944 2025-05-29T15:33:06.712443Z node 139 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/Table/Stream" took 67us result status StatusSuccess 2025-05-29T15:33:06.712533Z node 139 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Table/Stream" PathDescription { Self { Name: "Stream" PathId: 4 SchemeshardId: 72057594046678944 PathType: EPathTypeCdcStream CreateFinished: true CreateTxId: 1003 CreateStep: 5000004 ParentPathId: 3 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 4 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 4 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 2 CdcStreamVersion: 1 } ChildrenExist: true } Children { Name: "streamImpl" PathId: 5 SchemeshardId: 72057594046678944 PathType: EPathTypePersQueueGroup CreateFinished: true CreateTxId: 1003 CreateStep: 5000004 ParentPathId: 4 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" PathSubType: EPathSubTypeStreamImpl ChildrenExist: false BalancerTabletID: 72075186233409548 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 4 PathsLimit: 10000 ShardsInside: 3 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 1 PQPartitionsLimit: 1000000 } CdcStreamDescription { Name: "Stream" Mode: ECdcStreamModeNewAndOldImages PathId { OwnerId: 72057594046678944 LocalId: 4 } State: ECdcStreamStateReady SchemaVersion: 1 Format: ECdcStreamFormatDynamoDBStreamsJson VirtualTimestamps: false AwsRegion: "ru-central1" ResolvedTimestampsIntervalMs: 0 } } PathId: 4 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_cdc_stream_reboots/unittest >> TCdcStreamWithRebootsTests::RacySplitAndDropTable[TabletReboots] [GOOD] Test command err: =========== RUN: Trace =========== Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2140] Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:116:2058] recipient: [1:110:2141] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:116:2058] recipient: [1:110:2141] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:117:2058] recipient: [1:111:2142] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:117:2058] recipient: [1:111:2142] Leader for TabletID 72057594046678944 is [1:126:2151] sender: [1:127:2058] recipient: [1:109:2140] Leader for TabletID 72057594046447617 is [1:130:2154] sender: [1:132:2058] recipient: [1:110:2141] Leader for TabletID 72057594046316545 is [1:133:2155] sender: [1:135:2058] recipient: [1:111:2142] 2025-05-29T15:32:05.951881Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7488: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-05-29T15:32:05.951898Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7516: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:32:05.951901Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7402: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2025-05-29T15:32:05.951905Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7418: OperationsProcessing config: using default configuration 2025-05-29T15:32:05.951913Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-05-29T15:32:05.951916Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-05-29T15:32:05.951922Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7548: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:32:05.951931Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:39: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-05-29T15:32:05.951991Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7619: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-05-29T15:32:05.952038Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-05-29T15:32:05.961644Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7651: Got new config: QueryServiceConfig { AllExternalDataSourcesAreAvailable: true } 2025-05-29T15:32:05.961658Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:32:05.961723Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7619: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# Leader for TabletID 72057594046447617 is [1:130:2154] sender: [1:175:2058] recipient: [1:15:2062] 2025-05-29T15:32:05.963537Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-05-29T15:32:05.963557Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-05-29T15:32:05.963573Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-05-29T15:32:05.965404Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-05-29T15:32:05.965451Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:445: Clear TempDirsState with owners number: 0 2025-05-29T15:32:05.965523Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1348: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-05-29T15:32:05.965639Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:32: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-05-29T15:32:05.966083Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:157: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:32:05.966108Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:84: [RootDataErasureManager] Stop 2025-05-29T15:32:05.966265Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:32:05.966271Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:32:05.966291Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-05-29T15:32:05.966295Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-29T15:32:05.966300Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:202: TTxServerlessStorageBilling.Complete 2025-05-29T15:32:05.966312Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6671: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:214:2058] recipient: [1:212:2212] IGNORE Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:214:2058] recipient: [1:212:2212] Leader for TabletID 72057594037968897 is [1:218:2216] sender: [1:219:2058] recipient: [1:212:2212] 2025-05-29T15:32:05.967144Z node 1 :HIVE INFO: tablet_helpers.cpp:1130: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:126:2151] sender: [1:239:2058] recipient: [1:15:2062] 2025-05-29T15:32:05.979506Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:372: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-05-29T15:32:05.979556Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:32:05.979595Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-05-29T15:32:05.979630Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:130: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-05-29T15:32:05.979637Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:32:05.980055Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:451: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-05-29T15:32:05.980072Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-05-29T15:32:05.980103Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:32:05.980108Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:313: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-05-29T15:32:05.980112Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:367: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-05-29T15:32:05.980115Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 2 -> 3 2025-05-29T15:32:05.980397Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:32:05.980403Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-05-29T15:32:05.980406Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 3 -> 128 2025-05-29T15:32:05.980608Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:32:05.980614Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:32:05.980617Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:32:05.980622Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1650: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-05-29T15:32:05.981040Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1719: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-05-29T15:32:05.981305Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:652: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-05-29T15:32:05.981326Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1751: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 Leader for TabletID 72057594046316545 is [1:133:2155] sender: [1:254:2058] recipient: [1:15:2062] FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-05-29T15:32:05.981445Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:676: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-05-29T15:32:05.981459Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:680: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 133 RawX2: 4294969451 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-05-29T15:32:05.981465Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:32:05.981498Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Ch ... ProposedDeletePart operationId: 1005:0 ProgressState, at schemeshard: 72057594046678944 2025-05-29T15:33:05.539925Z node 232 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove table for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 5 2025-05-29T15:33:05.539974Z node 232 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:906: Part operation is done id#1005:0 progress is 3/3 2025-05-29T15:33:05.539979Z node 232 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 1005 ready parts: 3/3 2025-05-29T15:33:05.539985Z node 232 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:906: Part operation is done id#1005:0 progress is 3/3 2025-05-29T15:33:05.539989Z node 232 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 1005 ready parts: 3/3 2025-05-29T15:33:05.539994Z node 232 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1606: TOperation IsReadyToNotify, TxId: 1005, ready parts: 3/3, is published: true 2025-05-29T15:33:05.539998Z node 232 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 1005 ready parts: 3/3 2025-05-29T15:33:05.540005Z node 232 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:973: Operation and all the parts is done, operation id: 1005:0 2025-05-29T15:33:05.540009Z node 232 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5174: RemoveTx for txid 1005:0 2025-05-29T15:33:05.540035Z node 232 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 4 2025-05-29T15:33:05.540040Z node 232 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:973: Operation and all the parts is done, operation id: 1005:1 2025-05-29T15:33:05.540044Z node 232 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5174: RemoveTx for txid 1005:1 2025-05-29T15:33:05.540050Z node 232 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 2 2025-05-29T15:33:05.540054Z node 232 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:973: Operation and all the parts is done, operation id: 1005:2 2025-05-29T15:33:05.540057Z node 232 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5174: RemoveTx for txid 1005:2 2025-05-29T15:33:05.540066Z node 232 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 1 2025-05-29T15:33:05.540203Z node 232 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:123: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-05-29T15:33:05.540211Z node 232 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:137: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 5], at schemeshard: 72057594046678944 2025-05-29T15:33:05.540223Z node 232 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 1 2025-05-29T15:33:05.540230Z node 232 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:137: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 4], at schemeshard: 72057594046678944 2025-05-29T15:33:05.540236Z node 232 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 3 2025-05-29T15:33:05.540460Z node 232 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1005 2025-05-29T15:33:05.540797Z node 232 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1005 2025-05-29T15:33:05.542145Z node 232 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:160: TTxCleanDroppedPaths Complete, done PersistRemovePath for 2 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 2025-05-29T15:33:05.542866Z node 232 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5549: Handle TEvStateChanged, at schemeshard: 72057594046678944, message: Source { RawX1: 699 RawX2: 996432415280 } TabletId: 72075186233409549 State: 4 2025-05-29T15:33:05.542891Z node 232 :FLAT_TX_SCHEMESHARD INFO: schemeshard__state_changed_reply.cpp:78: TTxShardStateChanged DoExecute, datashard informs about state changing, datashardId: 72075186233409549, state: Offline, at schemeshard: 72057594046678944 2025-05-29T15:33:05.542949Z node 232 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5549: Handle TEvStateChanged, at schemeshard: 72057594046678944, message: Source { RawX1: 700 RawX2: 996432415281 } TabletId: 72075186233409550 State: 4 2025-05-29T15:33:05.542956Z node 232 :FLAT_TX_SCHEMESHARD INFO: schemeshard__state_changed_reply.cpp:78: TTxShardStateChanged DoExecute, datashard informs about state changing, datashardId: 72075186233409550, state: Offline, at schemeshard: 72057594046678944 2025-05-29T15:33:05.548826Z node 232 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:46: Free shard 72057594046678944:4 hive 72057594037968897 at ss 72057594046678944 2025-05-29T15:33:05.548941Z node 232 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:46: Free shard 72057594046678944:5 hive 72057594037968897 at ss 72057594046678944 2025-05-29T15:33:05.549006Z node 232 :HIVE INFO: tablet_helpers.cpp:1356: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 4 TxId_Deprecated: 4 TabletID: 72075186233409549 2025-05-29T15:33:05.551120Z node 232 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5938: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 4 ShardOwnerId: 72057594046678944 ShardLocalIdx: 4, at schemeshard: 72057594046678944 2025-05-29T15:33:05.551228Z node 232 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 2 Forgetting tablet 72075186233409549 2025-05-29T15:33:05.551907Z node 232 :HIVE INFO: tablet_helpers.cpp:1356: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 5 TxId_Deprecated: 5 TabletID: 72075186233409550 2025-05-29T15:33:05.552047Z node 232 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5938: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 5 ShardOwnerId: 72057594046678944 ShardLocalIdx: 5, at schemeshard: 72057594046678944 2025-05-29T15:33:05.552100Z node 232 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 1 Forgetting tablet 72075186233409550 2025-05-29T15:33:05.552653Z node 232 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:123: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-05-29T15:33:05.552666Z node 232 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:137: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 3], at schemeshard: 72057594046678944 2025-05-29T15:33:05.552682Z node 232 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-05-29T15:33:05.553477Z node 232 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:174: Deleted shardIdx 72057594046678944:4 2025-05-29T15:33:05.553492Z node 232 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:180: Close pipe to deleted shardIdx 72057594046678944:4 tabletId 72075186233409549 2025-05-29T15:33:05.554054Z node 232 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:174: Deleted shardIdx 72057594046678944:5 2025-05-29T15:33:05.554075Z node 232 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:180: Close pipe to deleted shardIdx 72057594046678944:5 tabletId 72075186233409550 2025-05-29T15:33:05.554548Z node 232 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:160: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestModificationResult got TxId: 1005, wait until txId: 1005 TestWaitNotification wait txId: 1004 2025-05-29T15:33:05.554636Z node 232 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:210: tests -- TTxNotificationSubscriber for txId 1004: send EvNotifyTxCompletion 2025-05-29T15:33:05.554658Z node 232 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:256: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 1004 TestWaitNotification wait txId: 1005 2025-05-29T15:33:05.554675Z node 232 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:210: tests -- TTxNotificationSubscriber for txId 1005: send EvNotifyTxCompletion 2025-05-29T15:33:05.554679Z node 232 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:256: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 1005 2025-05-29T15:33:05.554789Z node 232 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 1004, at schemeshard: 72057594046678944 2025-05-29T15:33:05.554818Z node 232 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:227: tests -- TTxNotificationSubscriber for txId 1004: got EvNotifyTxCompletionResult 2025-05-29T15:33:05.554823Z node 232 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:236: tests -- TTxNotificationSubscriber for txId 1004: satisfy waiter [232:1001:2886] 2025-05-29T15:33:05.555053Z node 232 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 1005, at schemeshard: 72057594046678944 2025-05-29T15:33:05.555088Z node 232 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:227: tests -- TTxNotificationSubscriber for txId 1005: got EvNotifyTxCompletionResult 2025-05-29T15:33:05.555094Z node 232 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:236: tests -- TTxNotificationSubscriber for txId 1005: satisfy waiter [232:1001:2886] TestWaitNotification: OK eventTxId 1004 TestWaitNotification: OK eventTxId 1005 2025-05-29T15:33:05.555188Z node 232 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Table/Stream" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: true }, at schemeshard: 72057594046678944 2025-05-29T15:33:05.555225Z node 232 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/Table/Stream" took 47us result status StatusPathDoesNotExist 2025-05-29T15:33:05.555261Z node 232 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/Table/Stream\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1]), source_location: ydb/core/tx/schemeshard/schemeshard_path_describer.cpp:1157" Path: "/MyRoot/Table/Stream" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: true } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 >> test_multinode_cluster.py::TestSqsMultinodeCluster::test_reassign_master[stop_node] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_cdc_stream_reboots/unittest >> TCdcStreamWithRebootsTests::Attributes[TabletReboots] [GOOD] Test command err: =========== RUN: Trace =========== Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2140] Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:116:2058] recipient: [1:110:2141] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:116:2058] recipient: [1:110:2141] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:117:2058] recipient: [1:111:2142] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:117:2058] recipient: [1:111:2142] Leader for TabletID 72057594046678944 is [1:126:2151] sender: [1:127:2058] recipient: [1:109:2140] Leader for TabletID 72057594046447617 is [1:130:2154] sender: [1:132:2058] recipient: [1:110:2141] Leader for TabletID 72057594046316545 is [1:133:2155] sender: [1:135:2058] recipient: [1:111:2142] 2025-05-29T15:32:30.978819Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7488: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-05-29T15:32:30.978837Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7516: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:32:30.978841Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7402: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2025-05-29T15:32:30.978844Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7418: OperationsProcessing config: using default configuration 2025-05-29T15:32:30.978854Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-05-29T15:32:30.978857Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-05-29T15:32:30.978863Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7548: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:32:30.978873Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:39: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-05-29T15:32:30.978956Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7619: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-05-29T15:32:30.979010Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-05-29T15:32:30.988746Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7651: Got new config: QueryServiceConfig { AllExternalDataSourcesAreAvailable: true } 2025-05-29T15:32:30.988761Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:32:30.988823Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7619: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# Leader for TabletID 72057594046447617 is [1:130:2154] sender: [1:175:2058] recipient: [1:15:2062] 2025-05-29T15:32:30.990554Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-05-29T15:32:30.990573Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-05-29T15:32:30.990589Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-05-29T15:32:30.992825Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-05-29T15:32:30.992886Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:445: Clear TempDirsState with owners number: 0 2025-05-29T15:32:30.992965Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1348: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-05-29T15:32:30.993174Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:32: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-05-29T15:32:30.993720Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:157: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:32:30.993744Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:84: [RootDataErasureManager] Stop 2025-05-29T15:32:30.993893Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:32:30.993899Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:32:30.993918Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-05-29T15:32:30.993922Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-29T15:32:30.993926Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:202: TTxServerlessStorageBilling.Complete 2025-05-29T15:32:30.993939Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6671: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:214:2058] recipient: [1:212:2212] IGNORE Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:214:2058] recipient: [1:212:2212] Leader for TabletID 72057594037968897 is [1:218:2216] sender: [1:219:2058] recipient: [1:212:2212] 2025-05-29T15:32:30.995202Z node 1 :HIVE INFO: tablet_helpers.cpp:1130: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:126:2151] sender: [1:239:2058] recipient: [1:15:2062] 2025-05-29T15:32:31.008780Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:372: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-05-29T15:32:31.008835Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:32:31.008877Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-05-29T15:32:31.008908Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:130: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-05-29T15:32:31.008916Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:32:31.009350Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:451: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-05-29T15:32:31.009366Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-05-29T15:32:31.009399Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:32:31.009405Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:313: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-05-29T15:32:31.009408Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:367: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-05-29T15:32:31.009412Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 2 -> 3 2025-05-29T15:32:31.009682Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:32:31.009688Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-05-29T15:32:31.009691Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 3 -> 128 2025-05-29T15:32:31.009887Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:32:31.009893Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:32:31.009897Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:32:31.009901Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1650: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-05-29T15:32:31.010327Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1719: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-05-29T15:32:31.010650Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:652: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-05-29T15:32:31.010671Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1751: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 Leader for TabletID 72057594046316545 is [1:133:2155] sender: [1:254:2058] recipient: [1:15:2062] FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-05-29T15:32:31.010828Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:676: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-05-29T15:32:31.010845Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:680: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 133 RawX2: 4294969451 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-05-29T15:32:31.010851Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:32:31.010887Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Ch ... 49Z node 140 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_pq.cpp:629: NPQState::TPropose operationId# 1003:2 HandleReply TEvProposeTransactionResult CollectPQConfigChanged: true 2025-05-29T15:33:06.624098Z node 140 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1003:2 128 -> 240 2025-05-29T15:33:06.624129Z node 140 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 3 2025-05-29T15:33:06.624143Z node 140 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 4 2025-05-29T15:33:06.625492Z node 140 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:647: TTxOperationReply complete, operationId: 1003:2, at schemeshard: 72057594046678944 2025-05-29T15:33:06.625557Z node 140 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:33:06.625564Z node 140 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1003, path id: [OwnerId: 72057594046678944, LocalPathId: 4] 2025-05-29T15:33:06.625600Z node 140 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1003, path id: [OwnerId: 72057594046678944, LocalPathId: 5] 2025-05-29T15:33:06.625639Z node 140 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:33:06.625648Z node 140 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [140:207:2208], at schemeshard: 72057594046678944, txId: 1003, path id: 4 2025-05-29T15:33:06.625654Z node 140 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [140:207:2208], at schemeshard: 72057594046678944, txId: 1003, path id: 5 2025-05-29T15:33:06.625666Z node 140 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1003:2, at schemeshard: 72057594046678944 2025-05-29T15:33:06.625673Z node 140 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:482: [72057594046678944] TDone opId# 1003:2 ProgressState 2025-05-29T15:33:06.625685Z node 140 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:906: Part operation is done id#1003:2 progress is 3/3 2025-05-29T15:33:06.625690Z node 140 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 1003 ready parts: 3/3 2025-05-29T15:33:06.625695Z node 140 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:906: Part operation is done id#1003:2 progress is 3/3 2025-05-29T15:33:06.625699Z node 140 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 1003 ready parts: 3/3 2025-05-29T15:33:06.625703Z node 140 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1606: TOperation IsReadyToNotify, TxId: 1003, ready parts: 3/3, is published: false 2025-05-29T15:33:06.625709Z node 140 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 1003 ready parts: 3/3 2025-05-29T15:33:06.625714Z node 140 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:973: Operation and all the parts is done, operation id: 1003:0 2025-05-29T15:33:06.625719Z node 140 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5174: RemoveTx for txid 1003:0 2025-05-29T15:33:06.625730Z node 140 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 4 2025-05-29T15:33:06.625735Z node 140 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:973: Operation and all the parts is done, operation id: 1003:1 2025-05-29T15:33:06.625738Z node 140 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5174: RemoveTx for txid 1003:1 2025-05-29T15:33:06.625757Z node 140 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 4 2025-05-29T15:33:06.625762Z node 140 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:973: Operation and all the parts is done, operation id: 1003:2 2025-05-29T15:33:06.625765Z node 140 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5174: RemoveTx for txid 1003:2 2025-05-29T15:33:06.625776Z node 140 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 5 2025-05-29T15:33:06.625781Z node 140 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:982: Publication still in progress, tx: 1003, publications: 2, subscribers: 0 2025-05-29T15:33:06.625786Z node 140 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:989: Publication details: tx: 1003, [OwnerId: 72057594046678944, LocalPathId: 4], 4 2025-05-29T15:33:06.625790Z node 140 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:989: Publication details: tx: 1003, [OwnerId: 72057594046678944, LocalPathId: 5], 2 2025-05-29T15:33:06.626391Z node 140 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:5834: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 4 PathOwnerId: 72057594046678944, cookie: 1003 2025-05-29T15:33:06.626429Z node 140 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 4 PathOwnerId: 72057594046678944, cookie: 1003 2025-05-29T15:33:06.626437Z node 140 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 1003 2025-05-29T15:33:06.626444Z node 140 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 1003, pathId: [OwnerId: 72057594046678944, LocalPathId: 4], version: 4 2025-05-29T15:33:06.626450Z node 140 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 3 2025-05-29T15:33:06.626579Z node 140 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:5834: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 5 Version: 2 PathOwnerId: 72057594046678944, cookie: 1003 2025-05-29T15:33:06.626591Z node 140 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 5 Version: 2 PathOwnerId: 72057594046678944, cookie: 1003 2025-05-29T15:33:06.626595Z node 140 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1003 2025-05-29T15:33:06.626599Z node 140 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 1003, pathId: [OwnerId: 72057594046678944, LocalPathId: 5], version: 2 2025-05-29T15:33:06.626604Z node 140 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 4 2025-05-29T15:33:06.626612Z node 140 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 1003, subscribers: 0 2025-05-29T15:33:06.629113Z node 140 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1003 2025-05-29T15:33:06.629185Z node 140 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1003 TestWaitNotification wait txId: 1003 2025-05-29T15:33:06.630766Z node 140 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:210: tests -- TTxNotificationSubscriber for txId 1003: send EvNotifyTxCompletion 2025-05-29T15:33:06.630777Z node 140 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:256: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 1003 2025-05-29T15:33:06.630844Z node 140 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 1003, at schemeshard: 72057594046678944 2025-05-29T15:33:06.630863Z node 140 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:227: tests -- TTxNotificationSubscriber for txId 1003: got EvNotifyTxCompletionResult 2025-05-29T15:33:06.630868Z node 140 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:236: tests -- TTxNotificationSubscriber for txId 1003: satisfy waiter [140:662:2579] TestWaitNotification: OK eventTxId 1003 2025-05-29T15:33:06.630942Z node 140 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Table/Stream" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: true }, at schemeshard: 72057594046678944 2025-05-29T15:33:06.630993Z node 140 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/Table/Stream" took 62us result status StatusSuccess 2025-05-29T15:33:06.631110Z node 140 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Table/Stream" PathDescription { Self { Name: "Stream" PathId: 4 SchemeshardId: 72057594046678944 PathType: EPathTypeCdcStream CreateFinished: true CreateTxId: 1003 CreateStep: 5000004 ParentPathId: 3 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 4 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 4 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 2 CdcStreamVersion: 1 } ChildrenExist: true } Children { Name: "streamImpl" PathId: 5 SchemeshardId: 72057594046678944 PathType: EPathTypePersQueueGroup CreateFinished: true CreateTxId: 1003 CreateStep: 5000004 ParentPathId: 4 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" PathSubType: EPathSubTypeStreamImpl ChildrenExist: false BalancerTabletID: 72075186233409548 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 4 PathsLimit: 10000 ShardsInside: 3 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 1 PQPartitionsLimit: 1000000 } UserAttributes { Key: "key" Value: "value" } CdcStreamDescription { Name: "Stream" Mode: ECdcStreamModeKeysOnly PathId { OwnerId: 72057594046678944 LocalId: 4 } State: ECdcStreamStateReady SchemaVersion: 1 Format: ECdcStreamFormatProto VirtualTimestamps: false UserAttributes { Key: "key" Value: "value" } AwsRegion: "" ResolvedTimestampsIntervalMs: 0 } } PathId: 4 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 |77.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/multinode/py3test >> test_multinode_cluster.py::TestSqsMultinodeCluster::test_has_messages_counters[kick_tablets-std] >> test_multinode_cluster.py::TestSqsMultinodeCluster::test_ends_request_after_kill [FAIL] >> test_async_replication.py::TestAsyncReplication::test_async_replication[table_ttl_Uint32-pk_types9-all_types9-index9-Uint32--] [FAIL] |77.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/multinode/py3test >> test_async_replication.py::TestAsyncReplication::test_async_replication[table_index_0__SYNC-pk_types4-all_types4-index4---SYNC] >> test_multinode_cluster.py::TestSqsMultinodeCluster::test_has_messages_counters[stop_node-std] |77.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/multinode/py3test ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/multinode/py3test >> test_multinode_cluster.py::TestSqsMultinodeCluster::test_ends_request_after_kill [FAIL] Test command err: sys:1: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/ciyv/002815/ydb/tests/functional/sqs/multinode/test-results/py3test/testing_out_stuff/chunk0/testing_out_stuff/test_multinode_cluster.py.TestSqsMultinodeCluster.test_ends_request_after_kill/cluster/node_1/stdout'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/ciyv/002815/ydb/tests/functional/sqs/multinode/test-results/py3test/testing_out_stuff/chunk0/testing_out_stuff/test_multinode_cluster.py.TestSqsMultinodeCluster.test_ends_request_after_kill/cluster/node_1/stderr'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedRandom name='/home/runner/.ya/build/build_root/ciyv/002815/ydb/tests/functional/sqs/multinode/test-results/py3test/testing_out_stuff/chunk0/testing_out_stuff/test_multinode_cluster.py.TestSqsMultinodeCluster.test_ends_request_after_kill/cluster/node_1/logfile_smlvh4dt.log'> ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/subprocess.py:1129: ResourceWarning: subprocess 372430 is still running ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/ciyv/002815/ydb/tests/functional/sqs/multinode/test-results/py3test/testing_out_stuff/chunk0/testing_out_stuff/test_multinode_cluster.py.TestSqsMultinodeCluster.test_ends_request_after_kill/cluster/node_2/stdout'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/ciyv/002815/ydb/tests/functional/sqs/multinode/test-results/py3test/testing_out_stuff/chunk0/testing_out_stuff/test_multinode_cluster.py.TestSqsMultinodeCluster.test_ends_request_after_kill/cluster/node_2/stderr'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedRandom name='/home/runner/.ya/build/build_root/ciyv/002815/ydb/tests/functional/sqs/multinode/test-results/py3test/testing_out_stuff/chunk0/testing_out_stuff/test_multinode_cluster.py.TestSqsMultinodeCluster.test_ends_request_after_kill/cluster/node_2/logfile_0_rdmz_l.log'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/ciyv/002815/ydb/tests/functional/sqs/multinode/test-results/py3test/testing_out_stuff/chunk0/testing_out_stuff/test_multinode_cluster.py.TestSqsMultinodeCluster.test_ends_request_after_kill/cluster/node_3/stdout'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/ciyv/002815/ydb/tests/functional/sqs/multinode/test-results/py3test/testing_out_stuff/chunk0/testing_out_stuff/test_multinode_cluster.py.TestSqsMultinodeCluster.test_ends_request_after_kill/cluster/node_3/stderr'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedRandom name='/home/runner/.ya/build/build_root/ciyv/002815/ydb/tests/functional/sqs/multinode/test-results/py3test/testing_out_stuff/chunk0/testing_out_stuff/test_multinode_cluster.py.TestSqsMultinodeCluster.test_ends_request_after_kill/cluster/node_3/logfile_vd4ysgfn.log'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/ciyv/002815/ydb/tests/functional/sqs/multinode/test-results/py3test/testing_out_stuff/chunk0/testing_out_stuff/test_multinode_cluster.py.TestSqsMultinodeCluster.test_ends_request_after_kill/cluster/node_4/stdout'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/ciyv/002815/ydb/tests/functional/sqs/multinode/test-results/py3test/testing_out_stuff/chunk0/testing_out_stuff/test_multinode_cluster.py.TestSqsMultinodeCluster.test_ends_request_after_kill/cluster/node_4/stderr'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedRandom name='/home/runner/.ya/build/build_root/ciyv/002815/ydb/tests/functional/sqs/multinode/test-results/py3test/testing_out_stuff/chunk0/testing_out_stuff/test_multinode_cluster.py.TestSqsMultinodeCluster.test_ends_request_after_kill/cluster/node_4/logfile_2l50bd2u.log'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/ciyv/002815/ydb/tests/functional/sqs/multinode/test-results/py3test/testing_out_stuff/chunk0/testing_out_stuff/test_multinode_cluster.py.TestSqsMultinodeCluster.test_ends_request_after_kill/cluster/node_5/stdout'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/ciyv/002815/ydb/tests/functional/sqs/multinode/test-results/py3test/testing_out_stuff/chunk0/testing_out_stuff/test_multinode_cluster.py.TestSqsMultinodeCluster.test_ends_request_after_kill/cluster/node_5/stderr'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedRandom name='/home/runner/.ya/build/build_root/ciyv/002815/ydb/tests/functional/sqs/multinode/test-results/py3test/testing_out_stuff/chunk0/testing_out_stuff/test_multinode_cluster.py.TestSqsMultinodeCluster.test_ends_request_after_kill/cluster/node_5/logfile_aa173jml.log'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/ciyv/002815/ydb/tests/functional/sqs/multinode/test-results/py3test/testing_out_stuff/chunk0/testing_out_stuff/test_multinode_cluster.py.TestSqsMultinodeCluster.test_ends_request_after_kill/cluster/node_6/stdout'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/ciyv/002815/ydb/tests/functional/sqs/multinode/test-results/py3test/testing_out_stuff/chunk0/testing_out_stuff/test_multinode_cluster.py.TestSqsMultinodeCluster.test_ends_request_after_kill/cluster/node_6/stderr'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedRandom name='/home/runner/.ya/build/build_root/ciyv/002815/ydb/tests/functional/sqs/multinode/test-results/py3test/testing_out_stuff/chunk0/testing_out_stuff/test_multinode_cluster.py.TestSqsMultinodeCluster.test_ends_request_after_kill/cluster/node_6/logfile_ay099kbr.log'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/ciyv/002815/ydb/tests/functional/sqs/multinode/test-results/py3test/testing_out_stuff/chunk0/testing_out_stuff/test_multinode_cluster.py.TestSqsMultinodeCluster.test_ends_request_after_kill/cluster/node_7/stdout'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/ciyv/002815/ydb/tests/functional/sqs/multinode/test-results/py3test/testing_out_stuff/chunk0/testing_out_stuff/test_multinode_cluster.py.TestSqsMultinodeCluster.test_ends_request_after_kill/cluster/node_7/stderr'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedRandom name='/home/runner/.ya/build/build_root/ciyv/002815/ydb/tests/functional/sqs/multinode/test-results/py3test/testing_out_stuff/chunk0/testing_out_stuff/test_multinode_cluster.py.TestSqsMultinodeCluster.test_ends_request_after_kill/cluster/node_7/logfile_yhjjd39d.log'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/ciyv/002815/ydb/tests/functional/sqs/multinode/test-results/py3test/testing_out_stuff/chunk0/testing_out_stuff/test_multinode_cluster.py.TestSqsMultinodeCluster.test_ends_request_after_kill/cluster/node_8/stdout'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/ciyv/002815/ydb/tests/functional/sqs/multinode/test-results/py3test/testing_out_stuff/chunk0/testing_out_stuff/test_multinode_cluster.py.TestSqsMultinodeCluster.test_ends_request_after_kill/cluster/node_8/stderr'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedRandom name='/home/runner/.ya/build/build_root/ciyv/002815/ydb/tests/functional/sqs/multinode/test-results/py3test/testing_out_stuff/chunk0/testing_out_stuff/test_multinode_cluster.py.TestSqsMultinodeCluster.test_ends_request_after_kill/cluster/node_8/logfile_8_aephu1.log'> ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/subprocess.py:1129: ResourceWarning: subprocess 372434 is still running ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/subprocess.py:1129: ResourceWarning: subprocess 372435 is still running ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/subprocess.py:1129: ResourceWarning: subprocess 372436 is still running ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/subprocess.py:1129: ResourceWarning: subprocess 372437 is still running ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/subprocess.py:1129: ResourceWarning: subprocess 372440 is still running ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/subprocess.py:1129: ResourceWarning: subprocess 372442 is still running ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/subprocess.py:1129: ResourceWarning: subprocess 372450 is still running ResourceWarning: Enable tracemalloc to get the object allocation traceback |77.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/multinode/py3test |77.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/multinode/py3test |77.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/multinode/py3test |77.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/multinode/py3test |77.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/multinode/py3test >> test_async_replication.py::TestAsyncReplication::test_async_replication[table_all_types-pk_types7-all_types7-index7---] [FAIL] |77.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/multinode/py3test |77.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/multinode/py3test |77.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/multinode/py3test |77.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/multinode/py3test >> test_recompiles_requests.py::TestSqsRecompilesRequestsForOtherQueue::test_recompiles_queries[tables_format_v1-fifo] [FAIL] >> test_multinode_cluster.py::TestSqsMultinodeCluster::test_sqs_writes_through_proxy_on_each_node[tables_format_v1-fifo] [FAIL] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/multinode/py3test >> test_multinode_cluster.py::TestSqsMultinodeCluster::test_sqs_writes_through_proxy_on_each_node[tables_format_v1-fifo] [FAIL] Test command err: sys:1: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/ciyv/002810/ydb/tests/functional/sqs/multinode/test-results/py3test/testing_out_stuff/chunk9/testing_out_stuff/test_multinode_cluster.py.TestSqsMultinodeCluster.test_sqs_writes_through_proxy_on_each_node.tables_format_v1-fifo/cluster/node_2/stdout'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/ciyv/002810/ydb/tests/functional/sqs/multinode/test-results/py3test/testing_out_stuff/chunk9/testing_out_stuff/test_multinode_cluster.py.TestSqsMultinodeCluster.test_sqs_writes_through_proxy_on_each_node.tables_format_v1-fifo/cluster/node_2/stderr'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedRandom name='/home/runner/.ya/build/build_root/ciyv/002810/ydb/tests/functional/sqs/multinode/test-results/py3test/testing_out_stuff/chunk9/testing_out_stuff/test_multinode_cluster.py.TestSqsMultinodeCluster.test_sqs_writes_through_proxy_on_each_node.tables_format_v1-fifo/cluster/node_2/logfile_6d7n_cqp.log'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/ciyv/002810/ydb/tests/functional/sqs/multinode/test-results/py3test/testing_out_stuff/chunk9/testing_out_stuff/test_multinode_cluster.py.TestSqsMultinodeCluster.test_sqs_writes_through_proxy_on_each_node.tables_format_v1-fifo/cluster/node_3/stdout'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/ciyv/002810/ydb/tests/functional/sqs/multinode/test-results/py3test/testing_out_stuff/chunk9/testing_out_stuff/test_multinode_cluster.py.TestSqsMultinodeCluster.test_sqs_writes_through_proxy_on_each_node.tables_format_v1-fifo/cluster/node_3/stderr'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedRandom name='/home/runner/.ya/build/build_root/ciyv/002810/ydb/tests/functional/sqs/multinode/test-results/py3test/testing_out_stuff/chunk9/testing_out_stuff/test_multinode_cluster.py.TestSqsMultinodeCluster.test_sqs_writes_through_proxy_on_each_node.tables_format_v1-fifo/cluster/node_3/logfile_9gig60e4.log'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/ciyv/002810/ydb/tests/functional/sqs/multinode/test-results/py3test/testing_out_stuff/chunk9/testing_out_stuff/test_multinode_cluster.py.TestSqsMultinodeCluster.test_sqs_writes_through_proxy_on_each_node.tables_format_v1-fifo/cluster/node_4/stdout'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/ciyv/002810/ydb/tests/functional/sqs/multinode/test-results/py3test/testing_out_stuff/chunk9/testing_out_stuff/test_multinode_cluster.py.TestSqsMultinodeCluster.test_sqs_writes_through_proxy_on_each_node.tables_format_v1-fifo/cluster/node_4/stderr'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedRandom name='/home/runner/.ya/build/build_root/ciyv/002810/ydb/tests/functional/sqs/multinode/test-results/py3test/testing_out_stuff/chunk9/testing_out_stuff/test_multinode_cluster.py.TestSqsMultinodeCluster.test_sqs_writes_through_proxy_on_each_node.tables_format_v1-fifo/cluster/node_4/logfile_146hprva.log'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/ciyv/002810/ydb/tests/functional/sqs/multinode/test-results/py3test/testing_out_stuff/chunk9/testing_out_stuff/test_multinode_cluster.py.TestSqsMultinodeCluster.test_sqs_writes_through_proxy_on_each_node.tables_format_v1-fifo/cluster/node_5/stdout'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/ciyv/002810/ydb/tests/functional/sqs/multinode/test-results/py3test/testing_out_stuff/chunk9/testing_out_stuff/test_multinode_cluster.py.TestSqsMultinodeCluster.test_sqs_writes_through_proxy_on_each_node.tables_format_v1-fifo/cluster/node_5/stderr'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedRandom name='/home/runner/.ya/build/build_root/ciyv/002810/ydb/tests/functional/sqs/multinode/test-results/py3test/testing_out_stuff/chunk9/testing_out_stuff/test_multinode_cluster.py.TestSqsMultinodeCluster.test_sqs_writes_through_proxy_on_each_node.tables_format_v1-fifo/cluster/node_5/logfile_8bor4kt1.log'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/ciyv/002810/ydb/tests/functional/sqs/multinode/test-results/py3test/testing_out_stuff/chunk9/testing_out_stuff/test_multinode_cluster.py.TestSqsMultinodeCluster.test_sqs_writes_through_proxy_on_each_node.tables_format_v1-fifo/cluster/node_6/stdout'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/ciyv/002810/ydb/tests/functional/sqs/multinode/test-results/py3test/testing_out_stuff/chunk9/testing_out_stuff/test_multinode_cluster.py.TestSqsMultinodeCluster.test_sqs_writes_through_proxy_on_each_node.tables_format_v1-fifo/cluster/node_6/stderr'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedRandom name='/home/runner/.ya/build/build_root/ciyv/002810/ydb/tests/functional/sqs/multinode/test-results/py3test/testing_out_stuff/chunk9/testing_out_stuff/test_multinode_cluster.py.TestSqsMultinodeCluster.test_sqs_writes_through_proxy_on_each_node.tables_format_v1-fifo/cluster/node_6/logfile_4xq547ym.log'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/ciyv/002810/ydb/tests/functional/sqs/multinode/test-results/py3test/testing_out_stuff/chunk9/testing_out_stuff/test_multinode_cluster.py.TestSqsMultinodeCluster.test_sqs_writes_through_proxy_on_each_node.tables_format_v1-fifo/cluster/node_7/stdout'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/ciyv/002810/ydb/tests/functional/sqs/multinode/test-results/py3test/testing_out_stuff/chunk9/testing_out_stuff/test_multinode_cluster.py.TestSqsMultinodeCluster.test_sqs_writes_through_proxy_on_each_node.tables_format_v1-fifo/cluster/node_7/stderr'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedRandom name='/home/runner/.ya/build/build_root/ciyv/002810/ydb/tests/functional/sqs/multinode/test-results/py3test/testing_out_stuff/chunk9/testing_out_stuff/test_multinode_cluster.py.TestSqsMultinodeCluster.test_sqs_writes_through_proxy_on_each_node.tables_format_v1-fifo/cluster/node_7/logfile_kljx5tcc.log'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/ciyv/002810/ydb/tests/functional/sqs/multinode/test-results/py3test/testing_out_stuff/chunk9/testing_out_stuff/test_multinode_cluster.py.TestSqsMultinodeCluster.test_sqs_writes_through_proxy_on_each_node.tables_format_v1-fifo/cluster/node_8/stdout'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/ciyv/002810/ydb/tests/functional/sqs/multinode/test-results/py3test/testing_out_stuff/chunk9/testing_out_stuff/test_multinode_cluster.py.TestSqsMultinodeCluster.test_sqs_writes_through_proxy_on_each_node.tables_format_v1-fifo/cluster/node_8/stderr'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedRandom name='/home/runner/.ya/build/build_root/ciyv/002810/ydb/tests/functional/sqs/multinode/test-results/py3test/testing_out_stuff/chunk9/testing_out_stuff/test_multinode_cluster.py.TestSqsMultinodeCluster.test_sqs_writes_through_proxy_on_each_node.tables_format_v1-fifo/cluster/node_8/logfile_mu6p464x.log'> ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/subprocess.py:1129: ResourceWarning: subprocess 375585 is still running ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/subprocess.py:1129: ResourceWarning: subprocess 375586 is still running ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/subprocess.py:1129: ResourceWarning: subprocess 375587 is still running ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/subprocess.py:1129: ResourceWarning: subprocess 375588 is still running ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/subprocess.py:1129: ResourceWarning: subprocess 375589 is still running ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/subprocess.py:1129: ResourceWarning: subprocess 375590 is still running ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/subprocess.py:1129: ResourceWarning: subprocess 375591 is still running ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/ciyv/002810/ydb/tests/functional/sqs/multinode/test-results/py3test/testing_out_stuff/chunk9/testing_out_stuff/test_multinode_cluster.py.TestSqsMultinodeCluster.test_sqs_writes_through_proxy_on_each_node.tables_format_v1-fifo/cluster/node_1/stdout'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/ciyv/002810/ydb/tests/functional/sqs/multinode/test-results/py3test/testing_out_stuff/chunk9/testing_out_stuff/test_multinode_cluster.py.TestSqsMultinodeCluster.test_sqs_writes_through_proxy_on_each_node.tables_format_v1-fifo/cluster/node_1/stderr'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedRandom name='/home/runner/.ya/build/build_root/ciyv/002810/ydb/tests/functional/sqs/multinode/test-results/py3test/testing_out_stuff/chunk9/testing_out_stuff/test_multinode_cluster.py.TestSqsMultinodeCluster.test_sqs_writes_through_proxy_on_each_node.tables_format_v1-fifo/cluster/node_1/logfile_9sqxl4rw.log'> ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/subprocess.py:1129: ResourceWarning: subprocess 375584 is still running ResourceWarning: Enable tracemalloc to get the object allocation traceback |77.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/multinode/py3test ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/multinode/py3test >> test_recompiles_requests.py::TestSqsRecompilesRequestsForOtherQueue::test_recompiles_queries[tables_format_v1-fifo] [FAIL] Test command err: sys:1: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/ciyv/00280a/ydb/tests/functional/sqs/multinode/test-results/py3test/testing_out_stuff/chunk13/testing_out_stuff/test_recompiles_requests.py.TestSqsRecompilesRequestsForOtherQueue.test_recompiles_queries.tables_format_v1-fifo/cluster/node_2/stdout'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/ciyv/00280a/ydb/tests/functional/sqs/multinode/test-results/py3test/testing_out_stuff/chunk13/testing_out_stuff/test_recompiles_requests.py.TestSqsRecompilesRequestsForOtherQueue.test_recompiles_queries.tables_format_v1-fifo/cluster/node_2/stderr'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedRandom name='/home/runner/.ya/build/build_root/ciyv/00280a/ydb/tests/functional/sqs/multinode/test-results/py3test/testing_out_stuff/chunk13/testing_out_stuff/test_recompiles_requests.py.TestSqsRecompilesRequestsForOtherQueue.test_recompiles_queries.tables_format_v1-fifo/cluster/node_2/logfile_og18btgh.log'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/ciyv/00280a/ydb/tests/functional/sqs/multinode/test-results/py3test/testing_out_stuff/chunk13/testing_out_stuff/test_recompiles_requests.py.TestSqsRecompilesRequestsForOtherQueue.test_recompiles_queries.tables_format_v1-fifo/cluster/node_3/stdout'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/ciyv/00280a/ydb/tests/functional/sqs/multinode/test-results/py3test/testing_out_stuff/chunk13/testing_out_stuff/test_recompiles_requests.py.TestSqsRecompilesRequestsForOtherQueue.test_recompiles_queries.tables_format_v1-fifo/cluster/node_3/stderr'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedRandom name='/home/runner/.ya/build/build_root/ciyv/00280a/ydb/tests/functional/sqs/multinode/test-results/py3test/testing_out_stuff/chunk13/testing_out_stuff/test_recompiles_requests.py.TestSqsRecompilesRequestsForOtherQueue.test_recompiles_queries.tables_format_v1-fifo/cluster/node_3/logfile_pxbqdw57.log'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/ciyv/00280a/ydb/tests/functional/sqs/multinode/test-results/py3test/testing_out_stuff/chunk13/testing_out_stuff/test_recompiles_requests.py.TestSqsRecompilesRequestsForOtherQueue.test_recompiles_queries.tables_format_v1-fifo/cluster/node_4/stdout'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/ciyv/00280a/ydb/tests/functional/sqs/multinode/test-results/py3test/testing_out_stuff/chunk13/testing_out_stuff/test_recompiles_requests.py.TestSqsRecompilesRequestsForOtherQueue.test_recompiles_queries.tables_format_v1-fifo/cluster/node_4/stderr'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedRandom name='/home/runner/.ya/build/build_root/ciyv/00280a/ydb/tests/functional/sqs/multinode/test-results/py3test/testing_out_stuff/chunk13/testing_out_stuff/test_recompiles_requests.py.TestSqsRecompilesRequestsForOtherQueue.test_recompiles_queries.tables_format_v1-fifo/cluster/node_4/logfile_0difcxs1.log'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/ciyv/00280a/ydb/tests/functional/sqs/multinode/test-results/py3test/testing_out_stuff/chunk13/testing_out_stuff/test_recompiles_requests.py.TestSqsRecompilesRequestsForOtherQueue.test_recompiles_queries.tables_format_v1-fifo/cluster/node_5/stdout'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/ciyv/00280a/ydb/tests/functional/sqs/multinode/test-results/py3test/testing_out_stuff/chunk13/testing_out_stuff/test_recompiles_requests.py.TestSqsRecompilesRequestsForOtherQueue.test_recompiles_queries.tables_format_v1-fifo/cluster/node_5/stderr'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedRandom name='/home/runner/.ya/build/build_root/ciyv/00280a/ydb/tests/functional/sqs/multinode/test-results/py3test/testing_out_stuff/chunk13/testing_out_stuff/test_recompiles_requests.py.TestSqsRecompilesRequestsForOtherQueue.test_recompiles_queries.tables_format_v1-fifo/cluster/node_5/logfile_gx3giycf.log'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/ciyv/00280a/ydb/tests/functional/sqs/multinode/test-results/py3test/testing_out_stuff/chunk13/testing_out_stuff/test_recompiles_requests.py.TestSqsRecompilesRequestsForOtherQueue.test_recompiles_queries.tables_format_v1-fifo/cluster/node_6/stdout'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/ciyv/00280a/ydb/tests/functional/sqs/multinode/test-results/py3test/testing_out_stuff/chunk13/testing_out_stuff/test_recompiles_requests.py.TestSqsRecompilesRequestsForOtherQueue.test_recompiles_queries.tables_format_v1-fifo/cluster/node_6/stderr'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedRandom name='/home/runner/.ya/build/build_root/ciyv/00280a/ydb/tests/functional/sqs/multinode/test-results/py3test/testing_out_stuff/chunk13/testing_out_stuff/test_recompiles_requests.py.TestSqsRecompilesRequestsForOtherQueue.test_recompiles_queries.tables_format_v1-fifo/cluster/node_6/logfile_8myorjkh.log'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/ciyv/00280a/ydb/tests/functional/sqs/multinode/test-results/py3test/testing_out_stuff/chunk13/testing_out_stuff/test_recompiles_requests.py.TestSqsRecompilesRequestsForOtherQueue.test_recompiles_queries.tables_format_v1-fifo/cluster/node_7/stdout'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/ciyv/00280a/ydb/tests/functional/sqs/multinode/test-results/py3test/testing_out_stuff/chunk13/testing_out_stuff/test_recompiles_requests.py.TestSqsRecompilesRequestsForOtherQueue.test_recompiles_queries.tables_format_v1-fifo/cluster/node_7/stderr'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedRandom name='/home/runner/.ya/build/build_root/ciyv/00280a/ydb/tests/functional/sqs/multinode/test-results/py3test/testing_out_stuff/chunk13/testing_out_stuff/test_recompiles_requests.py.TestSqsRecompilesRequestsForOtherQueue.test_recompiles_queries.tables_format_v1-fifo/cluster/node_7/logfile_ri03omii.log'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/ciyv/00280a/ydb/tests/functional/sqs/multinode/test-results/py3test/testing_out_stuff/chunk13/testing_out_stuff/test_recompiles_requests.py.TestSqsRecompilesRequestsForOtherQueue.test_recompiles_queries.tables_format_v1-fifo/cluster/node_8/stdout'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/ciyv/00280a/ydb/tests/functional/sqs/multinode/test-results/py3test/testing_out_stuff/chunk13/testing_out_stuff/test_recompiles_requests.py.TestSqsRecompilesRequestsForOtherQueue.test_recompiles_queries.tables_format_v1-fifo/cluster/node_8/stderr'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedRandom name='/home/runner/.ya/build/build_root/ciyv/00280a/ydb/tests/functional/sqs/multinode/test-results/py3test/testing_out_stuff/chunk13/testing_out_stuff/test_recompiles_requests.py.TestSqsRecompilesRequestsForOtherQueue.test_recompiles_queries.tables_format_v1-fifo/cluster/node_8/logfile_pebf72hq.log'> ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/subprocess.py:1129: ResourceWarning: subprocess 378153 is still running ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/subprocess.py:1129: ResourceWarning: subprocess 378161 is still running ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/subprocess.py:1129: ResourceWarning: subprocess 378166 is still running ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/subprocess.py:1129: ResourceWarning: subprocess 378194 is still running ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/subprocess.py:1129: ResourceWarning: subprocess 378198 is still running ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/subprocess.py:1129: ResourceWarning: subprocess 378201 is still running ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/subprocess.py:1129: ResourceWarning: subprocess 378205 is still running ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/ciyv/00280a/ydb/tests/functional/sqs/multinode/test-results/py3test/testing_out_stuff/chunk13/testing_out_stuff/test_recompiles_requests.py.TestSqsRecompilesRequestsForOtherQueue.test_recompiles_queries.tables_format_v1-fifo/cluster/node_1/stdout'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/ciyv/00280a/ydb/tests/functional/sqs/multinode/test-results/py3test/testing_out_stuff/chunk13/testing_out_stuff/test_recompiles_requests.py.TestSqsRecompilesRequestsForOtherQueue.test_recompiles_queries.tables_format_v1-fifo/cluster/node_1/stderr'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedRandom name='/home/runner/.ya/build/build_root/ciyv/00280a/ydb/tests/functional/sqs/multinode/test-results/py3test/testing_out_stuff/chunk13/testing_out_stuff/test_recompiles_requests.py.TestSqsRecompilesRequestsForOtherQueue.test_recompiles_queries.tables_format_v1-fifo/cluster/node_1/logfile_ziwb1jmj.log'> ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/subprocess.py:1129: ResourceWarning: subprocess 378149 is still running ResourceWarning: Enable tracemalloc to get the object allocation traceback >> test_multinode_cluster.py::TestSqsMultinodeCluster::test_has_messages_counters[stop_node-fifo] [FAIL] |77.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/multinode/py3test |77.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/multinode/py3test |77.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/multinode/py3test >> test_async_replication.py::TestAsyncReplication::test_async_replication[table_index_2__SYNC-pk_types2-all_types2-index2---SYNC] [FAIL] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/multinode/py3test >> test_multinode_cluster.py::TestSqsMultinodeCluster::test_has_messages_counters[stop_node-fifo] [FAIL] Test command err: sys:1: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/ciyv/002805/ydb/tests/functional/sqs/multinode/test-results/py3test/testing_out_stuff/chunk3/testing_out_stuff/test_multinode_cluster.py.TestSqsMultinodeCluster.test_has_messages_counters.stop_node-fifo/cluster/node_1/stdout'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/ciyv/002805/ydb/tests/functional/sqs/multinode/test-results/py3test/testing_out_stuff/chunk3/testing_out_stuff/test_multinode_cluster.py.TestSqsMultinodeCluster.test_has_messages_counters.stop_node-fifo/cluster/node_1/stderr'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedRandom name='/home/runner/.ya/build/build_root/ciyv/002805/ydb/tests/functional/sqs/multinode/test-results/py3test/testing_out_stuff/chunk3/testing_out_stuff/test_multinode_cluster.py.TestSqsMultinodeCluster.test_has_messages_counters.stop_node-fifo/cluster/node_1/logfile_6z3odo6l.log'> ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/subprocess.py:1129: ResourceWarning: subprocess 380057 is still running ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/ciyv/002805/ydb/tests/functional/sqs/multinode/test-results/py3test/testing_out_stuff/chunk3/testing_out_stuff/test_multinode_cluster.py.TestSqsMultinodeCluster.test_has_messages_counters.stop_node-fifo/cluster/node_2/stdout'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/ciyv/002805/ydb/tests/functional/sqs/multinode/test-results/py3test/testing_out_stuff/chunk3/testing_out_stuff/test_multinode_cluster.py.TestSqsMultinodeCluster.test_has_messages_counters.stop_node-fifo/cluster/node_2/stderr'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedRandom name='/home/runner/.ya/build/build_root/ciyv/002805/ydb/tests/functional/sqs/multinode/test-results/py3test/testing_out_stuff/chunk3/testing_out_stuff/test_multinode_cluster.py.TestSqsMultinodeCluster.test_has_messages_counters.stop_node-fifo/cluster/node_2/logfile_977knjs1.log'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/ciyv/002805/ydb/tests/functional/sqs/multinode/test-results/py3test/testing_out_stuff/chunk3/testing_out_stuff/test_multinode_cluster.py.TestSqsMultinodeCluster.test_has_messages_counters.stop_node-fifo/cluster/node_3/stdout'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/ciyv/002805/ydb/tests/functional/sqs/multinode/test-results/py3test/testing_out_stuff/chunk3/testing_out_stuff/test_multinode_cluster.py.TestSqsMultinodeCluster.test_has_messages_counters.stop_node-fifo/cluster/node_3/stderr'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedRandom name='/home/runner/.ya/build/build_root/ciyv/002805/ydb/tests/functional/sqs/multinode/test-results/py3test/testing_out_stuff/chunk3/testing_out_stuff/test_multinode_cluster.py.TestSqsMultinodeCluster.test_has_messages_counters.stop_node-fifo/cluster/node_3/logfile_gnknmg3o.log'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/ciyv/002805/ydb/tests/functional/sqs/multinode/test-results/py3test/testing_out_stuff/chunk3/testing_out_stuff/test_multinode_cluster.py.TestSqsMultinodeCluster.test_has_messages_counters.stop_node-fifo/cluster/node_4/stdout'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/ciyv/002805/ydb/tests/functional/sqs/multinode/test-results/py3test/testing_out_stuff/chunk3/testing_out_stuff/test_multinode_cluster.py.TestSqsMultinodeCluster.test_has_messages_counters.stop_node-fifo/cluster/node_4/stderr'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedRandom name='/home/runner/.ya/build/build_root/ciyv/002805/ydb/tests/functional/sqs/multinode/test-results/py3test/testing_out_stuff/chunk3/testing_out_stuff/test_multinode_cluster.py.TestSqsMultinodeCluster.test_has_messages_counters.stop_node-fifo/cluster/node_4/logfile_cva3pqt0.log'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/ciyv/002805/ydb/tests/functional/sqs/multinode/test-results/py3test/testing_out_stuff/chunk3/testing_out_stuff/test_multinode_cluster.py.TestSqsMultinodeCluster.test_has_messages_counters.stop_node-fifo/cluster/node_5/stdout'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/ciyv/002805/ydb/tests/functional/sqs/multinode/test-results/py3test/testing_out_stuff/chunk3/testing_out_stuff/test_multinode_cluster.py.TestSqsMultinodeCluster.test_has_messages_counters.stop_node-fifo/cluster/node_5/stderr'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedRandom name='/home/runner/.ya/build/build_root/ciyv/002805/ydb/tests/functional/sqs/multinode/test-results/py3test/testing_out_stuff/chunk3/testing_out_stuff/test_multinode_cluster.py.TestSqsMultinodeCluster.test_has_messages_counters.stop_node-fifo/cluster/node_5/logfile_nyz89733.log'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/ciyv/002805/ydb/tests/functional/sqs/multinode/test-results/py3test/testing_out_stuff/chunk3/testing_out_stuff/test_multinode_cluster.py.TestSqsMultinodeCluster.test_has_messages_counters.stop_node-fifo/cluster/node_6/stdout'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/ciyv/002805/ydb/tests/functional/sqs/multinode/test-results/py3test/testing_out_stuff/chunk3/testing_out_stuff/test_multinode_cluster.py.TestSqsMultinodeCluster.test_has_messages_counters.stop_node-fifo/cluster/node_6/stderr'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedRandom name='/home/runner/.ya/build/build_root/ciyv/002805/ydb/tests/functional/sqs/multinode/test-results/py3test/testing_out_stuff/chunk3/testing_out_stuff/test_multinode_cluster.py.TestSqsMultinodeCluster.test_has_messages_counters.stop_node-fifo/cluster/node_6/logfile_xm9vui5e.log'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/ciyv/002805/ydb/tests/functional/sqs/multinode/test-results/py3test/testing_out_stuff/chunk3/testing_out_stuff/test_multinode_cluster.py.TestSqsMultinodeCluster.test_has_messages_counters.stop_node-fifo/cluster/node_7/stdout'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/ciyv/002805/ydb/tests/functional/sqs/multinode/test-results/py3test/testing_out_stuff/chunk3/testing_out_stuff/test_multinode_cluster.py.TestSqsMultinodeCluster.test_has_messages_counters.stop_node-fifo/cluster/node_7/stderr'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedRandom name='/home/runner/.ya/build/build_root/ciyv/002805/ydb/tests/functional/sqs/multinode/test-results/py3test/testing_out_stuff/chunk3/testing_out_stuff/test_multinode_cluster.py.TestSqsMultinodeCluster.test_has_messages_counters.stop_node-fifo/cluster/node_7/logfile_erh77wk1.log'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/ciyv/002805/ydb/tests/functional/sqs/multinode/test-results/py3test/testing_out_stuff/chunk3/testing_out_stuff/test_multinode_cluster.py.TestSqsMultinodeCluster.test_has_messages_counters.stop_node-fifo/cluster/node_8/stdout'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/ciyv/002805/ydb/tests/functional/sqs/multinode/test-results/py3test/testing_out_stuff/chunk3/testing_out_stuff/test_multinode_cluster.py.TestSqsMultinodeCluster.test_has_messages_counters.stop_node-fifo/cluster/node_8/stderr'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedRandom name='/home/runner/.ya/build/build_root/ciyv/002805/ydb/tests/functional/sqs/multinode/test-results/py3test/testing_out_stuff/chunk3/testing_out_stuff/test_multinode_cluster.py.TestSqsMultinodeCluster.test_has_messages_counters.stop_node-fifo/cluster/node_8/logfile_dkyvjd20.log'> ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/subprocess.py:1129: ResourceWarning: subprocess 380058 is still running ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/subprocess.py:1129: ResourceWarning: subprocess 380080 is still running ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/subprocess.py:1129: ResourceWarning: subprocess 380084 is still running ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/subprocess.py:1129: ResourceWarning: subprocess 380100 is still running ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/subprocess.py:1129: ResourceWarning: subprocess 380145 is still running ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/subprocess.py:1129: ResourceWarning: subprocess 380148 is still running ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/subprocess.py:1129: ResourceWarning: subprocess 380164 is still running ResourceWarning: Enable tracemalloc to get the object allocation traceback >> test_serverless.py::test_database_with_disk_quotas[enable_alter_database_create_hive_first--false] [FAIL] |77.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/multinode/py3test >> test_serverless.py::test_database_with_disk_quotas[enable_alter_database_create_hive_first--true] >> TCdcStreamWithRebootsTests::SplitTableResolvedTimestamps[PipeResets] [GOOD] |77.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/multinode/py3test |77.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/multinode/py3test |77.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/multinode/py3test >> YdbSdkSessionsPool::PeriodicTask/0 [GOOD] >> YdbSdkSessionsPool::PeriodicTask/1 >> ttl_delete_s3.py::TestDeleteTtl::test_ttl_delete ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_cdc_stream_reboots/unittest >> TCdcStreamWithRebootsTests::SplitTableResolvedTimestamps[PipeResets] [GOOD] Test command err: =========== RUN: Trace =========== Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2140] Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:116:2058] recipient: [1:110:2141] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:116:2058] recipient: [1:110:2141] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:117:2058] recipient: [1:111:2142] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:117:2058] recipient: [1:111:2142] Leader for TabletID 72057594046678944 is [1:126:2151] sender: [1:127:2058] recipient: [1:109:2140] Leader for TabletID 72057594046447617 is [1:130:2154] sender: [1:132:2058] recipient: [1:110:2141] Leader for TabletID 72057594046316545 is [1:133:2155] sender: [1:135:2058] recipient: [1:111:2142] 2025-05-29T15:32:21.216641Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7488: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-05-29T15:32:21.216656Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7516: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:32:21.216660Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7402: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2025-05-29T15:32:21.216662Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7418: OperationsProcessing config: using default configuration 2025-05-29T15:32:21.216670Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-05-29T15:32:21.216672Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-05-29T15:32:21.216677Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7548: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:32:21.216685Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:39: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-05-29T15:32:21.216741Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7619: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-05-29T15:32:21.216786Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-05-29T15:32:21.225801Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7651: Got new config: QueryServiceConfig { AllExternalDataSourcesAreAvailable: true } 2025-05-29T15:32:21.225815Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:32:21.225887Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7619: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# Leader for TabletID 72057594046447617 is [1:130:2154] sender: [1:175:2058] recipient: [1:15:2062] 2025-05-29T15:32:21.227516Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-05-29T15:32:21.227533Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-05-29T15:32:21.227547Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-05-29T15:32:21.229225Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-05-29T15:32:21.229266Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:445: Clear TempDirsState with owners number: 0 2025-05-29T15:32:21.229331Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1348: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-05-29T15:32:21.229458Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:32: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-05-29T15:32:21.229861Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:157: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:32:21.229882Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:84: [RootDataErasureManager] Stop 2025-05-29T15:32:21.230012Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:32:21.230018Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:32:21.230033Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-05-29T15:32:21.230038Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-29T15:32:21.230041Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:202: TTxServerlessStorageBilling.Complete 2025-05-29T15:32:21.230053Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6671: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:214:2058] recipient: [1:212:2212] IGNORE Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:214:2058] recipient: [1:212:2212] Leader for TabletID 72057594037968897 is [1:218:2216] sender: [1:219:2058] recipient: [1:212:2212] 2025-05-29T15:32:21.230872Z node 1 :HIVE INFO: tablet_helpers.cpp:1130: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:126:2151] sender: [1:239:2058] recipient: [1:15:2062] 2025-05-29T15:32:21.243208Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:372: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-05-29T15:32:21.243252Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:32:21.243285Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-05-29T15:32:21.243315Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:130: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-05-29T15:32:21.243321Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:32:21.243717Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:451: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-05-29T15:32:21.243729Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-05-29T15:32:21.243757Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:32:21.243762Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:313: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-05-29T15:32:21.243765Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:367: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-05-29T15:32:21.243768Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 2 -> 3 2025-05-29T15:32:21.244124Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:32:21.244141Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-05-29T15:32:21.244147Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 3 -> 128 2025-05-29T15:32:21.244446Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:32:21.244453Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:32:21.244457Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:32:21.244461Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1650: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-05-29T15:32:21.244896Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1719: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-05-29T15:32:21.245219Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:652: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-05-29T15:32:21.245245Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1751: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 Leader for TabletID 72057594046316545 is [1:133:2155] sender: [1:254:2058] recipient: [1:15:2062] FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-05-29T15:32:21.245373Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:676: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-05-29T15:32:21.245389Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:680: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 133 RawX2: 4294969451 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-05-29T15:32:21.245394Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:32:21.245425Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Ch ... pactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { MinPartitionsCount: 1 } } SplitBoundary { KeyPrefix { Tuple { Optional { Uint32: 2 } } } } TableSchemaVersion: 2 IsBackup: false CdcStreams { Name: "Stream" Mode: ECdcStreamModeKeysOnly PathId { OwnerId: 72057594046678944 LocalId: 4 } State: ECdcStreamStateReady SchemaVersion: 1 Format: ECdcStreamFormatProto VirtualTimestamps: false AwsRegion: "" ResolvedTimestampsIntervalMs: 1000 } IsRestore: false } TablePartitions { EndOfRangeKeyPrefix: "\001\000\004\000\000\000\002\000\000\000" IsPoint: false IsInclusive: false DatashardId: 72075186233409549 } TablePartitions { EndOfRangeKeyPrefix: "" IsPoint: false IsInclusive: false DatashardId: 72075186233409550 } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 2 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 4 PathsLimit: 10000 ShardsInside: 4 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 1 PQPartitionsLimit: 1000000 } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-05-29T15:33:13.280445Z node 60 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Table" Options { ReturnPartitioningInfo: true ReturnPartitionConfig: true BackupInfo: false ReturnBoundaries: true ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-05-29T15:33:13.280530Z node 60 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/Table" took 104us result status StatusSuccess 2025-05-29T15:33:13.280781Z node 60 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Table" PathDescription { Self { Name: "Table" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 1002 CreateStep: 5000003 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 2 TablePartitionVersion: 2 } ChildrenExist: true } Table { Name: "Table" Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Uint32" TypeId: 2 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { MinPartitionsCount: 1 } } SplitBoundary { KeyPrefix { Tuple { Optional { Uint32: 2 } } } } TableSchemaVersion: 2 IsBackup: false CdcStreams { Name: "Stream" Mode: ECdcStreamModeKeysOnly PathId { OwnerId: 72057594046678944 LocalId: 4 } State: ECdcStreamStateReady SchemaVersion: 1 Format: ECdcStreamFormatProto VirtualTimestamps: false AwsRegion: "" ResolvedTimestampsIntervalMs: 1000 } IsRestore: false } TablePartitions { EndOfRangeKeyPrefix: "\001\000\004\000\000\000\002\000\000\000" IsPoint: false IsInclusive: false DatashardId: 72075186233409549 } TablePartitions { EndOfRangeKeyPrefix: "" IsPoint: false IsInclusive: false DatashardId: 72075186233409550 } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 2 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 4 PathsLimit: 10000 ShardsInside: 4 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 1 PQPartitionsLimit: 1000000 } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-05-29T15:33:13.280912Z node 60 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Table/Stream/streamImpl" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: true }, at schemeshard: 72057594046678944 2025-05-29T15:33:13.280948Z node 60 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/Table/Stream/streamImpl" took 41us result status StatusSuccess 2025-05-29T15:33:13.281045Z node 60 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Table/Stream/streamImpl" PathDescription { Self { Name: "streamImpl" PathId: 5 SchemeshardId: 72057594046678944 PathType: EPathTypePersQueueGroup CreateFinished: true CreateTxId: 1003 CreateStep: 5000004 ParentPathId: 4 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeStreamImpl Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 PQVersion: 1 } ChildrenExist: false BalancerTabletID: 72075186233409548 } PersQueueGroup { Name: "streamImpl" PathId: 5 TotalGroupCount: 1 PartitionPerTablet: 2 PQTabletConfig { PartitionConfig { MaxCountInPartition: 2147483647 LifetimeSeconds: 86400 WriteSpeedInBytesPerSecond: 1048576 BurstSize: 1048576 } TopicName: "Stream" TopicPath: "/MyRoot/Table/Stream/streamImpl" YdbDatabasePath: "/MyRoot" PartitionKeySchema { Name: "key" TypeId: 2 } MeteringMode: METERING_MODE_REQUEST_UNITS } Partitions { PartitionId: 0 TabletId: 72075186233409547 Status: Active } AlterVersion: 1 BalancerTabletID: 72075186233409548 NextPartitionId: 1 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 4 PathsLimit: 10000 ShardsInside: 4 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 1 PQPartitionsLimit: 1000000 } } PathId: 5 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> data_migration_when_alter_ttl.py::TestDataMigrationWhenAlterTtl::test >> data_correctness.py::TestDataCorrectness::test >> tier_delete.py::TestTierDelete::test_delete_s3_ttl >> YdbSdkSessions::MultiThreadSessionPoolLimitSyncQueryClient [GOOD] >> test_multinode_cluster.py::TestSqsMultinodeCluster::test_has_messages_counters[stop_node-std] [FAIL] >> test_async_replication.py::TestAsyncReplication::test_async_replication[table_index_0__SYNC-pk_types4-all_types4-index4---SYNC] [FAIL] >> ttl_unavailable_s3.py::TestUnavailableS3::test |77.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/olap/ttl_tiering/py3test >> ttl_delete_s3.py::TestDeleteS3Ttl::test_data_unchanged_after_ttl_change |77.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/olap/ttl_tiering/py3test >> unstable_connection.py::TestUnstableConnection::test ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/multinode/py3test >> test_multinode_cluster.py::TestSqsMultinodeCluster::test_has_messages_counters[stop_node-std] [FAIL] Test command err: sys:1: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/ciyv/0027f1/ydb/tests/functional/sqs/multinode/test-results/py3test/testing_out_stuff/chunk4/testing_out_stuff/test_multinode_cluster.py.TestSqsMultinodeCluster.test_has_messages_counters.stop_node-std/cluster/node_1/stdout'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/ciyv/0027f1/ydb/tests/functional/sqs/multinode/test-results/py3test/testing_out_stuff/chunk4/testing_out_stuff/test_multinode_cluster.py.TestSqsMultinodeCluster.test_has_messages_counters.stop_node-std/cluster/node_1/stderr'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedRandom name='/home/runner/.ya/build/build_root/ciyv/0027f1/ydb/tests/functional/sqs/multinode/test-results/py3test/testing_out_stuff/chunk4/testing_out_stuff/test_multinode_cluster.py.TestSqsMultinodeCluster.test_has_messages_counters.stop_node-std/cluster/node_1/logfile_8ci9_zp4.log'> ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/subprocess.py:1129: ResourceWarning: subprocess 387510 is still running ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/ciyv/0027f1/ydb/tests/functional/sqs/multinode/test-results/py3test/testing_out_stuff/chunk4/testing_out_stuff/test_multinode_cluster.py.TestSqsMultinodeCluster.test_has_messages_counters.stop_node-std/cluster/node_2/stdout'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/ciyv/0027f1/ydb/tests/functional/sqs/multinode/test-results/py3test/testing_out_stuff/chunk4/testing_out_stuff/test_multinode_cluster.py.TestSqsMultinodeCluster.test_has_messages_counters.stop_node-std/cluster/node_2/stderr'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedRandom name='/home/runner/.ya/build/build_root/ciyv/0027f1/ydb/tests/functional/sqs/multinode/test-results/py3test/testing_out_stuff/chunk4/testing_out_stuff/test_multinode_cluster.py.TestSqsMultinodeCluster.test_has_messages_counters.stop_node-std/cluster/node_2/logfile_mnqt8h9b.log'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/ciyv/0027f1/ydb/tests/functional/sqs/multinode/test-results/py3test/testing_out_stuff/chunk4/testing_out_stuff/test_multinode_cluster.py.TestSqsMultinodeCluster.test_has_messages_counters.stop_node-std/cluster/node_3/stdout'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/ciyv/0027f1/ydb/tests/functional/sqs/multinode/test-results/py3test/testing_out_stuff/chunk4/testing_out_stuff/test_multinode_cluster.py.TestSqsMultinodeCluster.test_has_messages_counters.stop_node-std/cluster/node_3/stderr'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedRandom name='/home/runner/.ya/build/build_root/ciyv/0027f1/ydb/tests/functional/sqs/multinode/test-results/py3test/testing_out_stuff/chunk4/testing_out_stuff/test_multinode_cluster.py.TestSqsMultinodeCluster.test_has_messages_counters.stop_node-std/cluster/node_3/logfile_dxmeq57r.log'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/ciyv/0027f1/ydb/tests/functional/sqs/multinode/test-results/py3test/testing_out_stuff/chunk4/testing_out_stuff/test_multinode_cluster.py.TestSqsMultinodeCluster.test_has_messages_counters.stop_node-std/cluster/node_4/stdout'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/ciyv/0027f1/ydb/tests/functional/sqs/multinode/test-results/py3test/testing_out_stuff/chunk4/testing_out_stuff/test_multinode_cluster.py.TestSqsMultinodeCluster.test_has_messages_counters.stop_node-std/cluster/node_4/stderr'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedRandom name='/home/runner/.ya/build/build_root/ciyv/0027f1/ydb/tests/functional/sqs/multinode/test-results/py3test/testing_out_stuff/chunk4/testing_out_stuff/test_multinode_cluster.py.TestSqsMultinodeCluster.test_has_messages_counters.stop_node-std/cluster/node_4/logfile_i10m9wq4.log'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/ciyv/0027f1/ydb/tests/functional/sqs/multinode/test-results/py3test/testing_out_stuff/chunk4/testing_out_stuff/test_multinode_cluster.py.TestSqsMultinodeCluster.test_has_messages_counters.stop_node-std/cluster/node_5/stdout'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/ciyv/0027f1/ydb/tests/functional/sqs/multinode/test-results/py3test/testing_out_stuff/chunk4/testing_out_stuff/test_multinode_cluster.py.TestSqsMultinodeCluster.test_has_messages_counters.stop_node-std/cluster/node_5/stderr'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedRandom name='/home/runner/.ya/build/build_root/ciyv/0027f1/ydb/tests/functional/sqs/multinode/test-results/py3test/testing_out_stuff/chunk4/testing_out_stuff/test_multinode_cluster.py.TestSqsMultinodeCluster.test_has_messages_counters.stop_node-std/cluster/node_5/logfile_z6jm575_.log'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/ciyv/0027f1/ydb/tests/functional/sqs/multinode/test-results/py3test/testing_out_stuff/chunk4/testing_out_stuff/test_multinode_cluster.py.TestSqsMultinodeCluster.test_has_messages_counters.stop_node-std/cluster/node_6/stdout'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/ciyv/0027f1/ydb/tests/functional/sqs/multinode/test-results/py3test/testing_out_stuff/chunk4/testing_out_stuff/test_multinode_cluster.py.TestSqsMultinodeCluster.test_has_messages_counters.stop_node-std/cluster/node_6/stderr'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedRandom name='/home/runner/.ya/build/build_root/ciyv/0027f1/ydb/tests/functional/sqs/multinode/test-results/py3test/testing_out_stuff/chunk4/testing_out_stuff/test_multinode_cluster.py.TestSqsMultinodeCluster.test_has_messages_counters.stop_node-std/cluster/node_6/logfile_x3gbu_el.log'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/ciyv/0027f1/ydb/tests/functional/sqs/multinode/test-results/py3test/testing_out_stuff/chunk4/testing_out_stuff/test_multinode_cluster.py.TestSqsMultinodeCluster.test_has_messages_counters.stop_node-std/cluster/node_7/stdout'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/ciyv/0027f1/ydb/tests/functional/sqs/multinode/test-results/py3test/testing_out_stuff/chunk4/testing_out_stuff/test_multinode_cluster.py.TestSqsMultinodeCluster.test_has_messages_counters.stop_node-std/cluster/node_7/stderr'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedRandom name='/home/runner/.ya/build/build_root/ciyv/0027f1/ydb/tests/functional/sqs/multinode/test-results/py3test/testing_out_stuff/chunk4/testing_out_stuff/test_multinode_cluster.py.TestSqsMultinodeCluster.test_has_messages_counters.stop_node-std/cluster/node_7/logfile_880tki90.log'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/ciyv/0027f1/ydb/tests/functional/sqs/multinode/test-results/py3test/testing_out_stuff/chunk4/testing_out_stuff/test_multinode_cluster.py.TestSqsMultinodeCluster.test_has_messages_counters.stop_node-std/cluster/node_8/stdout'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/ciyv/0027f1/ydb/tests/functional/sqs/multinode/test-results/py3test/testing_out_stuff/chunk4/testing_out_stuff/test_multinode_cluster.py.TestSqsMultinodeCluster.test_has_messages_counters.stop_node-std/cluster/node_8/stderr'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedRandom name='/home/runner/.ya/build/build_root/ciyv/0027f1/ydb/tests/functional/sqs/multinode/test-results/py3test/testing_out_stuff/chunk4/testing_out_stuff/test_multinode_cluster.py.TestSqsMultinodeCluster.test_has_messages_counters.stop_node-std/cluster/node_8/logfile__o22ewsd.log'> ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/subprocess.py:1129: ResourceWarning: subprocess 387529 is still running ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/subprocess.py:1129: ResourceWarning: subprocess 387601 is still running ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/subprocess.py:1129: ResourceWarning: subprocess 387635 is still running ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/subprocess.py:1129: ResourceWarning: subprocess 387664 is still running ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/subprocess.py:1129: ResourceWarning: subprocess 387674 is still running ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/subprocess.py:1129: ResourceWarning: subprocess 387768 is still running ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/subprocess.py:1129: ResourceWarning: subprocess 387771 is still running ResourceWarning: Enable tracemalloc to get the object allocation traceback >> HttpRequest::Probe [FAIL] |77.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/olap/ttl_tiering/py3test |77.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/datashard/async_replication/py3test |77.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/public/sdk/cpp/tests/integration/sessions/gtest >> YdbSdkSessions::MultiThreadSessionPoolLimitSyncQueryClient [GOOD] >> test_queues_managing.py::TestQueuesManagingWithTenant::test_purge_queue[tables_format_v0-std] >> test_queue_attributes_validation.py::TestQueueAttributesInCompatibilityMode::test_set_queue_attributes_no_validation[tables_format_v1-std] >> test_ping.py::TestPing::test_error_on_cgi_parameters >> test_multinode_cluster.py::TestSqsMultinodeCluster::test_sqs_writes_through_proxy_on_each_node[tables_format_v0-std] [FAIL] >> test_queue_tags.py::TestQueueTags::test_untag_queue[tables_format_v1-std] >> test_multinode_cluster.py::TestSqsMultinodeCluster::test_reassign_master[stop_node] [FAIL] >> test_multinode_cluster.py::TestSqsMultinodeCluster::test_has_messages_counters[kick_tablets-std] [FAIL] |77.5%| [TA] $(B)/ydb/public/sdk/cpp/tests/integration/sessions/test-results/gtest/{meta.json ... results_accumulator.log} |77.5%| [TA] {RESULT} $(B)/ydb/public/sdk/cpp/tests/integration/sessions/test-results/gtest/{meta.json ... results_accumulator.log} ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/multinode/py3test >> test_multinode_cluster.py::TestSqsMultinodeCluster::test_sqs_writes_through_proxy_on_each_node[tables_format_v0-std] [FAIL] Test command err: sys:1: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/ciyv/0027fc/ydb/tests/functional/sqs/multinode/test-results/py3test/testing_out_stuff/chunk8/testing_out_stuff/test_multinode_cluster.py.TestSqsMultinodeCluster.test_sqs_writes_through_proxy_on_each_node.tables_format_v0-std/cluster/node_2/stdout'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/ciyv/0027fc/ydb/tests/functional/sqs/multinode/test-results/py3test/testing_out_stuff/chunk8/testing_out_stuff/test_multinode_cluster.py.TestSqsMultinodeCluster.test_sqs_writes_through_proxy_on_each_node.tables_format_v0-std/cluster/node_2/stderr'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedRandom name='/home/runner/.ya/build/build_root/ciyv/0027fc/ydb/tests/functional/sqs/multinode/test-results/py3test/testing_out_stuff/chunk8/testing_out_stuff/test_multinode_cluster.py.TestSqsMultinodeCluster.test_sqs_writes_through_proxy_on_each_node.tables_format_v0-std/cluster/node_2/logfile_p30rn5pc.log'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/ciyv/0027fc/ydb/tests/functional/sqs/multinode/test-results/py3test/testing_out_stuff/chunk8/testing_out_stuff/test_multinode_cluster.py.TestSqsMultinodeCluster.test_sqs_writes_through_proxy_on_each_node.tables_format_v0-std/cluster/node_3/stdout'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/ciyv/0027fc/ydb/tests/functional/sqs/multinode/test-results/py3test/testing_out_stuff/chunk8/testing_out_stuff/test_multinode_cluster.py.TestSqsMultinodeCluster.test_sqs_writes_through_proxy_on_each_node.tables_format_v0-std/cluster/node_3/stderr'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedRandom name='/home/runner/.ya/build/build_root/ciyv/0027fc/ydb/tests/functional/sqs/multinode/test-results/py3test/testing_out_stuff/chunk8/testing_out_stuff/test_multinode_cluster.py.TestSqsMultinodeCluster.test_sqs_writes_through_proxy_on_each_node.tables_format_v0-std/cluster/node_3/logfile_w18eic8u.log'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/ciyv/0027fc/ydb/tests/functional/sqs/multinode/test-results/py3test/testing_out_stuff/chunk8/testing_out_stuff/test_multinode_cluster.py.TestSqsMultinodeCluster.test_sqs_writes_through_proxy_on_each_node.tables_format_v0-std/cluster/node_4/stdout'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/ciyv/0027fc/ydb/tests/functional/sqs/multinode/test-results/py3test/testing_out_stuff/chunk8/testing_out_stuff/test_multinode_cluster.py.TestSqsMultinodeCluster.test_sqs_writes_through_proxy_on_each_node.tables_format_v0-std/cluster/node_4/stderr'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedRandom name='/home/runner/.ya/build/build_root/ciyv/0027fc/ydb/tests/functional/sqs/multinode/test-results/py3test/testing_out_stuff/chunk8/testing_out_stuff/test_multinode_cluster.py.TestSqsMultinodeCluster.test_sqs_writes_through_proxy_on_each_node.tables_format_v0-std/cluster/node_4/logfile_fat5a8p6.log'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/ciyv/0027fc/ydb/tests/functional/sqs/multinode/test-results/py3test/testing_out_stuff/chunk8/testing_out_stuff/test_multinode_cluster.py.TestSqsMultinodeCluster.test_sqs_writes_through_proxy_on_each_node.tables_format_v0-std/cluster/node_5/stdout'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/ciyv/0027fc/ydb/tests/functional/sqs/multinode/test-results/py3test/testing_out_stuff/chunk8/testing_out_stuff/test_multinode_cluster.py.TestSqsMultinodeCluster.test_sqs_writes_through_proxy_on_each_node.tables_format_v0-std/cluster/node_5/stderr'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedRandom name='/home/runner/.ya/build/build_root/ciyv/0027fc/ydb/tests/functional/sqs/multinode/test-results/py3test/testing_out_stuff/chunk8/testing_out_stuff/test_multinode_cluster.py.TestSqsMultinodeCluster.test_sqs_writes_through_proxy_on_each_node.tables_format_v0-std/cluster/node_5/logfile_bpzym9at.log'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/ciyv/0027fc/ydb/tests/functional/sqs/multinode/test-results/py3test/testing_out_stuff/chunk8/testing_out_stuff/test_multinode_cluster.py.TestSqsMultinodeCluster.test_sqs_writes_through_proxy_on_each_node.tables_format_v0-std/cluster/node_6/stdout'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/ciyv/0027fc/ydb/tests/functional/sqs/multinode/test-results/py3test/testing_out_stuff/chunk8/testing_out_stuff/test_multinode_cluster.py.TestSqsMultinodeCluster.test_sqs_writes_through_proxy_on_each_node.tables_format_v0-std/cluster/node_6/stderr'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedRandom name='/home/runner/.ya/build/build_root/ciyv/0027fc/ydb/tests/functional/sqs/multinode/test-results/py3test/testing_out_stuff/chunk8/testing_out_stuff/test_multinode_cluster.py.TestSqsMultinodeCluster.test_sqs_writes_through_proxy_on_each_node.tables_format_v0-std/cluster/node_6/logfile_bclhjmcs.log'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/ciyv/0027fc/ydb/tests/functional/sqs/multinode/test-results/py3test/testing_out_stuff/chunk8/testing_out_stuff/test_multinode_cluster.py.TestSqsMultinodeCluster.test_sqs_writes_through_proxy_on_each_node.tables_format_v0-std/cluster/node_7/stdout'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/ciyv/0027fc/ydb/tests/functional/sqs/multinode/test-results/py3test/testing_out_stuff/chunk8/testing_out_stuff/test_multinode_cluster.py.TestSqsMultinodeCluster.test_sqs_writes_through_proxy_on_each_node.tables_format_v0-std/cluster/node_7/stderr'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedRandom name='/home/runner/.ya/build/build_root/ciyv/0027fc/ydb/tests/functional/sqs/multinode/test-results/py3test/testing_out_stuff/chunk8/testing_out_stuff/test_multinode_cluster.py.TestSqsMultinodeCluster.test_sqs_writes_through_proxy_on_each_node.tables_format_v0-std/cluster/node_7/logfile_m44luuxg.log'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/ciyv/0027fc/ydb/tests/functional/sqs/multinode/test-results/py3test/testing_out_stuff/chunk8/testing_out_stuff/test_multinode_cluster.py.TestSqsMultinodeCluster.test_sqs_writes_through_proxy_on_each_node.tables_format_v0-std/cluster/node_8/stdout'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/ciyv/0027fc/ydb/tests/functional/sqs/multinode/test-results/py3test/testing_out_stuff/chunk8/testing_out_stuff/test_multinode_cluster.py.TestSqsMultinodeCluster.test_sqs_writes_through_proxy_on_each_node.tables_format_v0-std/cluster/node_8/stderr'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedRandom name='/home/runner/.ya/build/build_root/ciyv/0027fc/ydb/tests/functional/sqs/multinode/test-results/py3test/testing_out_stuff/chunk8/testing_out_stuff/test_multinode_cluster.py.TestSqsMultinodeCluster.test_sqs_writes_through_proxy_on_each_node.tables_format_v0-std/cluster/node_8/logfile_83ur10p6.log'> ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/subprocess.py:1129: ResourceWarning: subprocess 382738 is still running ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/subprocess.py:1129: ResourceWarning: subprocess 382739 is still running ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/subprocess.py:1129: ResourceWarning: subprocess 382740 is still running ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/subprocess.py:1129: ResourceWarning: subprocess 382741 is still running ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/subprocess.py:1129: ResourceWarning: subprocess 382742 is still running ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/subprocess.py:1129: ResourceWarning: subprocess 382743 is still running ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/subprocess.py:1129: ResourceWarning: subprocess 382744 is still running ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/ciyv/0027fc/ydb/tests/functional/sqs/multinode/test-results/py3test/testing_out_stuff/chunk8/testing_out_stuff/test_multinode_cluster.py.TestSqsMultinodeCluster.test_sqs_writes_through_proxy_on_each_node.tables_format_v0-std/cluster/node_1/stdout'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/ciyv/0027fc/ydb/tests/functional/sqs/multinode/test-results/py3test/testing_out_stuff/chunk8/testing_out_stuff/test_multinode_cluster.py.TestSqsMultinodeCluster.test_sqs_writes_through_proxy_on_each_node.tables_format_v0-std/cluster/node_1/stderr'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedRandom name='/home/runner/.ya/build/build_root/ciyv/0027fc/ydb/tests/functional/sqs/multinode/test-results/py3test/testing_out_stuff/chunk8/testing_out_stuff/test_multinode_cluster.py.TestSqsMultinodeCluster.test_sqs_writes_through_proxy_on_each_node.tables_format_v0-std/cluster/node_1/logfile_452jxcih.log'> ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/subprocess.py:1129: ResourceWarning: subprocess 382737 is still running ResourceWarning: Enable tracemalloc to get the object allocation traceback ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/multinode/py3test >> test_multinode_cluster.py::TestSqsMultinodeCluster::test_reassign_master[stop_node] [FAIL] Test command err: sys:1: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/ciyv/0027fe/ydb/tests/functional/sqs/multinode/test-results/py3test/testing_out_stuff/chunk6/testing_out_stuff/test_multinode_cluster.py.TestSqsMultinodeCluster.test_reassign_master.stop_node/cluster/node_2/stdout'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/ciyv/0027fe/ydb/tests/functional/sqs/multinode/test-results/py3test/testing_out_stuff/chunk6/testing_out_stuff/test_multinode_cluster.py.TestSqsMultinodeCluster.test_reassign_master.stop_node/cluster/node_2/stderr'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedRandom name='/home/runner/.ya/build/build_root/ciyv/0027fe/ydb/tests/functional/sqs/multinode/test-results/py3test/testing_out_stuff/chunk6/testing_out_stuff/test_multinode_cluster.py.TestSqsMultinodeCluster.test_reassign_master.stop_node/cluster/node_2/logfile_re_no161.log'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/ciyv/0027fe/ydb/tests/functional/sqs/multinode/test-results/py3test/testing_out_stuff/chunk6/testing_out_stuff/test_multinode_cluster.py.TestSqsMultinodeCluster.test_reassign_master.stop_node/cluster/node_3/stdout'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/ciyv/0027fe/ydb/tests/functional/sqs/multinode/test-results/py3test/testing_out_stuff/chunk6/testing_out_stuff/test_multinode_cluster.py.TestSqsMultinodeCluster.test_reassign_master.stop_node/cluster/node_3/stderr'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedRandom name='/home/runner/.ya/build/build_root/ciyv/0027fe/ydb/tests/functional/sqs/multinode/test-results/py3test/testing_out_stuff/chunk6/testing_out_stuff/test_multinode_cluster.py.TestSqsMultinodeCluster.test_reassign_master.stop_node/cluster/node_3/logfile_uy888inj.log'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/ciyv/0027fe/ydb/tests/functional/sqs/multinode/test-results/py3test/testing_out_stuff/chunk6/testing_out_stuff/test_multinode_cluster.py.TestSqsMultinodeCluster.test_reassign_master.stop_node/cluster/node_4/stdout'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/ciyv/0027fe/ydb/tests/functional/sqs/multinode/test-results/py3test/testing_out_stuff/chunk6/testing_out_stuff/test_multinode_cluster.py.TestSqsMultinodeCluster.test_reassign_master.stop_node/cluster/node_4/stderr'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedRandom name='/home/runner/.ya/build/build_root/ciyv/0027fe/ydb/tests/functional/sqs/multinode/test-results/py3test/testing_out_stuff/chunk6/testing_out_stuff/test_multinode_cluster.py.TestSqsMultinodeCluster.test_reassign_master.stop_node/cluster/node_4/logfile_egzl575z.log'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/ciyv/0027fe/ydb/tests/functional/sqs/multinode/test-results/py3test/testing_out_stuff/chunk6/testing_out_stuff/test_multinode_cluster.py.TestSqsMultinodeCluster.test_reassign_master.stop_node/cluster/node_5/stdout'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/ciyv/0027fe/ydb/tests/functional/sqs/multinode/test-results/py3test/testing_out_stuff/chunk6/testing_out_stuff/test_multinode_cluster.py.TestSqsMultinodeCluster.test_reassign_master.stop_node/cluster/node_5/stderr'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedRandom name='/home/runner/.ya/build/build_root/ciyv/0027fe/ydb/tests/functional/sqs/multinode/test-results/py3test/testing_out_stuff/chunk6/testing_out_stuff/test_multinode_cluster.py.TestSqsMultinodeCluster.test_reassign_master.stop_node/cluster/node_5/logfile_jhegb5z8.log'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/ciyv/0027fe/ydb/tests/functional/sqs/multinode/test-results/py3test/testing_out_stuff/chunk6/testing_out_stuff/test_multinode_cluster.py.TestSqsMultinodeCluster.test_reassign_master.stop_node/cluster/node_6/stdout'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/ciyv/0027fe/ydb/tests/functional/sqs/multinode/test-results/py3test/testing_out_stuff/chunk6/testing_out_stuff/test_multinode_cluster.py.TestSqsMultinodeCluster.test_reassign_master.stop_node/cluster/node_6/stderr'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedRandom name='/home/runner/.ya/build/build_root/ciyv/0027fe/ydb/tests/functional/sqs/multinode/test-results/py3test/testing_out_stuff/chunk6/testing_out_stuff/test_multinode_cluster.py.TestSqsMultinodeCluster.test_reassign_master.stop_node/cluster/node_6/logfile_6x42n3hl.log'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/ciyv/0027fe/ydb/tests/functional/sqs/multinode/test-results/py3test/testing_out_stuff/chunk6/testing_out_stuff/test_multinode_cluster.py.TestSqsMultinodeCluster.test_reassign_master.stop_node/cluster/node_7/stdout'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/ciyv/0027fe/ydb/tests/functional/sqs/multinode/test-results/py3test/testing_out_stuff/chunk6/testing_out_stuff/test_multinode_cluster.py.TestSqsMultinodeCluster.test_reassign_master.stop_node/cluster/node_7/stderr'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedRandom name='/home/runner/.ya/build/build_root/ciyv/0027fe/ydb/tests/functional/sqs/multinode/test-results/py3test/testing_out_stuff/chunk6/testing_out_stuff/test_multinode_cluster.py.TestSqsMultinodeCluster.test_reassign_master.stop_node/cluster/node_7/logfile_qwmmp9w6.log'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/ciyv/0027fe/ydb/tests/functional/sqs/multinode/test-results/py3test/testing_out_stuff/chunk6/testing_out_stuff/test_multinode_cluster.py.TestSqsMultinodeCluster.test_reassign_master.stop_node/cluster/node_8/stdout'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/ciyv/0027fe/ydb/tests/functional/sqs/multinode/test-results/py3test/testing_out_stuff/chunk6/testing_out_stuff/test_multinode_cluster.py.TestSqsMultinodeCluster.test_reassign_master.stop_node/cluster/node_8/stderr'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedRandom name='/home/runner/.ya/build/build_root/ciyv/0027fe/ydb/tests/functional/sqs/multinode/test-results/py3test/testing_out_stuff/chunk6/testing_out_stuff/test_multinode_cluster.py.TestSqsMultinodeCluster.test_reassign_master.stop_node/cluster/node_8/logfile_ymg122mr.log'> ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/subprocess.py:1129: ResourceWarning: subprocess 382783 is still running ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/subprocess.py:1129: ResourceWarning: subprocess 382785 is still running ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/subprocess.py:1129: ResourceWarning: subprocess 382786 is still running ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/subprocess.py:1129: ResourceWarning: subprocess 382788 is still running ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/subprocess.py:1129: ResourceWarning: subprocess 382789 is still running ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/subprocess.py:1129: ResourceWarning: subprocess 382790 is still running ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/subprocess.py:1129: ResourceWarning: subprocess 382791 is still running ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/ciyv/0027fe/ydb/tests/functional/sqs/multinode/test-results/py3test/testing_out_stuff/chunk6/testing_out_stuff/test_multinode_cluster.py.TestSqsMultinodeCluster.test_reassign_master.stop_node/cluster/node_1/stdout'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/ciyv/0027fe/ydb/tests/functional/sqs/multinode/test-results/py3test/testing_out_stuff/chunk6/testing_out_stuff/test_multinode_cluster.py.TestSqsMultinodeCluster.test_reassign_master.stop_node/cluster/node_1/stderr'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedRandom name='/home/runner/.ya/build/build_root/ciyv/0027fe/ydb/tests/functional/sqs/multinode/test-results/py3test/testing_out_stuff/chunk6/testing_out_stuff/test_multinode_cluster.py.TestSqsMultinodeCluster.test_reassign_master.stop_node/cluster/node_1/logfile_zoidkk6g.log'> ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/subprocess.py:1129: ResourceWarning: subprocess 382775 is still running ResourceWarning: Enable tracemalloc to get the object allocation traceback ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/multinode/py3test >> test_multinode_cluster.py::TestSqsMultinodeCluster::test_has_messages_counters[kick_tablets-std] [FAIL] Test command err: sys:1: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/ciyv/0027fa/ydb/tests/functional/sqs/multinode/test-results/py3test/testing_out_stuff/chunk2/testing_out_stuff/test_multinode_cluster.py.TestSqsMultinodeCluster.test_has_messages_counters.kick_tablets-std/cluster/node_1/stdout'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/ciyv/0027fa/ydb/tests/functional/sqs/multinode/test-results/py3test/testing_out_stuff/chunk2/testing_out_stuff/test_multinode_cluster.py.TestSqsMultinodeCluster.test_has_messages_counters.kick_tablets-std/cluster/node_1/stderr'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedRandom name='/home/runner/.ya/build/build_root/ciyv/0027fa/ydb/tests/functional/sqs/multinode/test-results/py3test/testing_out_stuff/chunk2/testing_out_stuff/test_multinode_cluster.py.TestSqsMultinodeCluster.test_has_messages_counters.kick_tablets-std/cluster/node_1/logfile_hn_j7uhn.log'> ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/subprocess.py:1129: ResourceWarning: subprocess 383018 is still running ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/ciyv/0027fa/ydb/tests/functional/sqs/multinode/test-results/py3test/testing_out_stuff/chunk2/testing_out_stuff/test_multinode_cluster.py.TestSqsMultinodeCluster.test_has_messages_counters.kick_tablets-std/cluster/node_2/stdout'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/ciyv/0027fa/ydb/tests/functional/sqs/multinode/test-results/py3test/testing_out_stuff/chunk2/testing_out_stuff/test_multinode_cluster.py.TestSqsMultinodeCluster.test_has_messages_counters.kick_tablets-std/cluster/node_2/stderr'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedRandom name='/home/runner/.ya/build/build_root/ciyv/0027fa/ydb/tests/functional/sqs/multinode/test-results/py3test/testing_out_stuff/chunk2/testing_out_stuff/test_multinode_cluster.py.TestSqsMultinodeCluster.test_has_messages_counters.kick_tablets-std/cluster/node_2/logfile_z_n1bchp.log'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/ciyv/0027fa/ydb/tests/functional/sqs/multinode/test-results/py3test/testing_out_stuff/chunk2/testing_out_stuff/test_multinode_cluster.py.TestSqsMultinodeCluster.test_has_messages_counters.kick_tablets-std/cluster/node_3/stdout'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/ciyv/0027fa/ydb/tests/functional/sqs/multinode/test-results/py3test/testing_out_stuff/chunk2/testing_out_stuff/test_multinode_cluster.py.TestSqsMultinodeCluster.test_has_messages_counters.kick_tablets-std/cluster/node_3/stderr'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedRandom name='/home/runner/.ya/build/build_root/ciyv/0027fa/ydb/tests/functional/sqs/multinode/test-results/py3test/testing_out_stuff/chunk2/testing_out_stuff/test_multinode_cluster.py.TestSqsMultinodeCluster.test_has_messages_counters.kick_tablets-std/cluster/node_3/logfile_unuctxqm.log'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/ciyv/0027fa/ydb/tests/functional/sqs/multinode/test-results/py3test/testing_out_stuff/chunk2/testing_out_stuff/test_multinode_cluster.py.TestSqsMultinodeCluster.test_has_messages_counters.kick_tablets-std/cluster/node_4/stdout'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/ciyv/0027fa/ydb/tests/functional/sqs/multinode/test-results/py3test/testing_out_stuff/chunk2/testing_out_stuff/test_multinode_cluster.py.TestSqsMultinodeCluster.test_has_messages_counters.kick_tablets-std/cluster/node_4/stderr'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedRandom name='/home/runner/.ya/build/build_root/ciyv/0027fa/ydb/tests/functional/sqs/multinode/test-results/py3test/testing_out_stuff/chunk2/testing_out_stuff/test_multinode_cluster.py.TestSqsMultinodeCluster.test_has_messages_counters.kick_tablets-std/cluster/node_4/logfile_vz19y6rp.log'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/ciyv/0027fa/ydb/tests/functional/sqs/multinode/test-results/py3test/testing_out_stuff/chunk2/testing_out_stuff/test_multinode_cluster.py.TestSqsMultinodeCluster.test_has_messages_counters.kick_tablets-std/cluster/node_5/stdout'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/ciyv/0027fa/ydb/tests/functional/sqs/multinode/test-results/py3test/testing_out_stuff/chunk2/testing_out_stuff/test_multinode_cluster.py.TestSqsMultinodeCluster.test_has_messages_counters.kick_tablets-std/cluster/node_5/stderr'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedRandom name='/home/runner/.ya/build/build_root/ciyv/0027fa/ydb/tests/functional/sqs/multinode/test-results/py3test/testing_out_stuff/chunk2/testing_out_stuff/test_multinode_cluster.py.TestSqsMultinodeCluster.test_has_messages_counters.kick_tablets-std/cluster/node_5/logfile_hkif_pxq.log'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/ciyv/0027fa/ydb/tests/functional/sqs/multinode/test-results/py3test/testing_out_stuff/chunk2/testing_out_stuff/test_multinode_cluster.py.TestSqsMultinodeCluster.test_has_messages_counters.kick_tablets-std/cluster/node_6/stdout'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/ciyv/0027fa/ydb/tests/functional/sqs/multinode/test-results/py3test/testing_out_stuff/chunk2/testing_out_stuff/test_multinode_cluster.py.TestSqsMultinodeCluster.test_has_messages_counters.kick_tablets-std/cluster/node_6/stderr'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedRandom name='/home/runner/.ya/build/build_root/ciyv/0027fa/ydb/tests/functional/sqs/multinode/test-results/py3test/testing_out_stuff/chunk2/testing_out_stuff/test_multinode_cluster.py.TestSqsMultinodeCluster.test_has_messages_counters.kick_tablets-std/cluster/node_6/logfile_9r9lslyk.log'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/ciyv/0027fa/ydb/tests/functional/sqs/multinode/test-results/py3test/testing_out_stuff/chunk2/testing_out_stuff/test_multinode_cluster.py.TestSqsMultinodeCluster.test_has_messages_counters.kick_tablets-std/cluster/node_7/stdout'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/ciyv/0027fa/ydb/tests/functional/sqs/multinode/test-results/py3test/testing_out_stuff/chunk2/testing_out_stuff/test_multinode_cluster.py.TestSqsMultinodeCluster.test_has_messages_counters.kick_tablets-std/cluster/node_7/stderr'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedRandom name='/home/runner/.ya/build/build_root/ciyv/0027fa/ydb/tests/functional/sqs/multinode/test-results/py3test/testing_out_stuff/chunk2/testing_out_stuff/test_multinode_cluster.py.TestSqsMultinodeCluster.test_has_messages_counters.kick_tablets-std/cluster/node_7/logfile_nln9b39p.log'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/ciyv/0027fa/ydb/tests/functional/sqs/multinode/test-results/py3test/testing_out_stuff/chunk2/testing_out_stuff/test_multinode_cluster.py.TestSqsMultinodeCluster.test_has_messages_counters.kick_tablets-std/cluster/node_8/stdout'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/ciyv/0027fa/ydb/tests/functional/sqs/multinode/test-results/py3test/testing_out_stuff/chunk2/testing_out_stuff/test_multinode_cluster.py.TestSqsMultinodeCluster.test_has_messages_counters.kick_tablets-std/cluster/node_8/stderr'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedRandom name='/home/runner/.ya/build/build_root/ciyv/0027fa/ydb/tests/functional/sqs/multinode/test-results/py3test/testing_out_stuff/chunk2/testing_out_stuff/test_multinode_cluster.py.TestSqsMultinodeCluster.test_has_messages_counters.kick_tablets-std/cluster/node_8/logfile_13ndl_8q.log'> ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/subprocess.py:1129: ResourceWarning: subprocess 383045 is still running ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/subprocess.py:1129: ResourceWarning: subprocess 383093 is still running ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/subprocess.py:1129: ResourceWarning: subprocess 383168 is still running ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/subprocess.py:1129: ResourceWarning: subprocess 383189 is still running ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/subprocess.py:1129: ResourceWarning: subprocess 383248 is still running ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/subprocess.py:1129: ResourceWarning: subprocess 383268 is still running ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/subprocess.py:1129: ResourceWarning: subprocess 383330 is still running ResourceWarning: Enable tracemalloc to get the object allocation traceback >> test_queue_counters.py::TestSqsGettingCounters::test_receive_attempts_are_counted_separately_for_messages_in_one_batch >> test_queue_attributes_validation.py::TestQueueAttributesValidation::test_set_queue_attributes[tables_format_v1-std] |77.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/serverless/py3test >> test_queues_managing.py::TestQueuesManagingWithPathTestQueuesManagingWithPath::test_request_to_deleted_queue[tables_format_v0-std] >> test_queues_managing.py::TestQueuesManagingWithTenant::test_delete_and_create_queue[fifo] |77.5%| [TA] $(B)/ydb/tests/functional/sqs/multinode/test-results/py3test/{meta.json ... results_accumulator.log} >> test_serverless.py::test_fixtures[enable_alter_database_create_hive_first--false] |77.5%| [TA] {RESULT} $(B)/ydb/tests/functional/sqs/multinode/test-results/py3test/{meta.json ... results_accumulator.log} >> test_serverless.py::test_create_table_using_exclusive_nodes[enable_alter_database_create_hive_first--true] >> test_acl.py::TestSqsWithForceAuthorizationWithPath::test_invalid_token[tables_format_v1-invalid] >> test_multiplexing_tables_format.py::TestMultiplexingTablesFormatWithPath::test_read_message[fifo] >> test_queue_attributes_validation.py::TestQueueAttributesInCompatibilityMode::test_set_queue_attributes_no_validation[tables_format_v1-std] [FAIL] >> test_queue_attributes_validation.py::TestQueueAttributesValidation::test_create_queue_with_custom_attributes[tables_format_v0-fifo] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/statistics/service/ut/unittest >> HttpRequest::Probe [FAIL] Test command err: 2025-05-29T15:27:08.774499Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:453:2413], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-05-29T15:27:08.774546Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-05-29T15:27:08.774565Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/001544/r3tmp/tmpu263vZ/pdisk_1.dat 2025-05-29T15:27:08.950629Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 6153, node 1 2025-05-29T15:27:09.059026Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:27:09.059049Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-05-29T15:27:09.059054Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-05-29T15:27:09.059160Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-05-29T15:27:09.059754Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-05-29T15:27:09.138926Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:27:09.138981Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:27:09.159526Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:24814 2025-05-29T15:27:09.551872Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-05-29T15:27:10.384541Z node 2 :STATISTICS INFO: service_impl.cpp:232: Subscribed for config changes on node 2 2025-05-29T15:27:10.394016Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:27:10.394054Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:27:10.439234Z node 1 :HIVE WARN: hive_impl.cpp:771: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-05-29T15:27:10.440085Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-29T15:27:10.592051Z node 2 :HIVE WARN: tx__create_tablet.cpp:342: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-05-29T15:27:10.592369Z node 2 :HIVE WARN: tx__create_tablet.cpp:342: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-05-29T15:27:10.592708Z node 2 :HIVE WARN: tx__create_tablet.cpp:342: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-05-29T15:27:10.592830Z node 2 :HIVE WARN: tx__create_tablet.cpp:342: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-05-29T15:27:10.592941Z node 2 :HIVE WARN: tx__create_tablet.cpp:342: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-05-29T15:27:10.592988Z node 2 :HIVE WARN: tx__create_tablet.cpp:342: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-05-29T15:27:10.593051Z node 2 :HIVE WARN: tx__create_tablet.cpp:342: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-05-29T15:27:10.593079Z node 2 :HIVE WARN: tx__create_tablet.cpp:342: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-05-29T15:27:10.593119Z node 2 :HIVE WARN: tx__create_tablet.cpp:342: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-05-29T15:27:10.746875Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:27:10.746912Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:27:10.759514Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-29T15:27:10.798309Z node 2 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:27:10.809275Z node 2 :STATISTICS INFO: aggregator_impl.cpp:45: [72075186224037894] OnActivateExecutor 2025-05-29T15:27:10.809310Z node 2 :STATISTICS DEBUG: tx_init_schema.cpp:13: [72075186224037894] TTxInitSchema::Execute 2025-05-29T15:27:10.822694Z node 2 :STATISTICS DEBUG: tx_init_schema.cpp:34: [72075186224037894] TTxInitSchema::Complete 2025-05-29T15:27:10.823035Z node 2 :STATISTICS DEBUG: tx_init.cpp:18: [72075186224037894] TTxInit::Execute 2025-05-29T15:27:10.823066Z node 2 :STATISTICS DEBUG: tx_init.cpp:118: [72075186224037894] Loaded BaseStatistics: schemeshard count# 0 2025-05-29T15:27:10.823073Z node 2 :STATISTICS DEBUG: tx_init.cpp:143: [72075186224037894] Loaded ColumnStatistics: column count# 0 2025-05-29T15:27:10.823080Z node 2 :STATISTICS DEBUG: tx_init.cpp:182: [72075186224037894] Loaded ScheduleTraversals: table count# 0 2025-05-29T15:27:10.823087Z node 2 :STATISTICS DEBUG: tx_init.cpp:216: [72075186224037894] Loaded ForceTraversalOperations: table count# 0 2025-05-29T15:27:10.823093Z node 2 :STATISTICS DEBUG: tx_init.cpp:264: [72075186224037894] Loaded ForceTraversalTables: table count# 0 2025-05-29T15:27:10.823101Z node 2 :STATISTICS DEBUG: tx_init.cpp:271: [72075186224037894] TTxInit::Complete 2025-05-29T15:27:10.823457Z node 2 :STATISTICS INFO: aggregator_impl.cpp:62: [72075186224037894] Subscribed for config changes 2025-05-29T15:27:10.839546Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:7864: ResolveSA(), StatisticsAggregatorId=72075186224037894, at schemeshard: 72075186224037897 2025-05-29T15:27:10.839580Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:7894: ConnectToSA(), pipe client id: [2:1865:2600], at schemeshard: 72075186224037897, StatisticsAggregatorId: 72075186224037894, at schemeshard: 72075186224037897 2025-05-29T15:27:10.842533Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:86: [72075186224037894] EvServerConnected, pipe server id = [2:1875:2608] 2025-05-29T15:27:10.843905Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:86: [72075186224037894] EvServerConnected, pipe server id = [2:1908:2623] 2025-05-29T15:27:10.844005Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:213: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:1908:2623], schemeshard id = 72075186224037897 2025-05-29T15:27:10.846719Z node 2 :STATISTICS DEBUG: tx_configure.cpp:21: [72075186224037894] TTxConfigure::Execute: database# /Root/Database 2025-05-29T15:27:10.851732Z node 2 :STATISTICS DEBUG: table_creator.cpp:147: Table _statistics updater. Describe result: PathErrorUnknown 2025-05-29T15:27:10.851754Z node 2 :STATISTICS NOTICE: table_creator.cpp:167: Table _statistics updater. Creating table 2025-05-29T15:27:10.851766Z node 2 :STATISTICS DEBUG: table_creator.cpp:100: Table _statistics updater. Full table path:/Root/Database/.metadata/_statistics 2025-05-29T15:27:10.857426Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720657:1, at schemeshard: 72075186224037897 2025-05-29T15:27:10.859680Z node 2 :STATISTICS DEBUG: table_creator.cpp:190: Table _statistics updater. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720657 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037897 PathId: 3 } 2025-05-29T15:27:10.859722Z node 2 :STATISTICS DEBUG: table_creator.cpp:261: Table _statistics updater. Subscribe on create table tx: 281474976720657 2025-05-29T15:27:10.983284Z node 2 :STATISTICS DEBUG: tx_configure.cpp:36: [72075186224037894] TTxConfigure::Complete 2025-05-29T15:27:11.158196Z node 2 :STATISTICS DEBUG: table_creator.cpp:290: Table _statistics updater. Request: create. Transaction completed: 281474976720657. Doublechecking... 2025-05-29T15:27:11.222832Z node 2 :STATISTICS DEBUG: table_creator.cpp:362: Table _statistics updater. Column diff is empty, finishing 2025-05-29T15:27:12.017256Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2219:3062], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:27:12.017303Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:27:12.022542Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976715659:0, at schemeshard: 72075186224037897 2025-05-29T15:27:12.096450Z node 2 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037899;self_id=[2:2369:2881];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-05-29T15:27:12.096507Z node 2 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037899;self_id=[2:2369:2881];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-05-29T15:27:12.096558Z node 2 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037899;self_id=[2:2369:2881];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-05-29T15:27:12.096575Z node 2 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037899;self_id=[2:2369:2881];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-05-29T15:27:12.096596Z node 2 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037899;self_id=[2:2369:2881];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-05-29T15:27:12.096611Z node 2 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037899;self_id=[2:2369:2881];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-05-29T15:27:12.096624Z node 2 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037899;self_id=[2:2369:2881];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-05-29T15:27:12.096640Z node 2 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037899;self_id=[2:2369:2881];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_reg ... ZWQ5Y2Y2Ny0yMmIzYjlhYi1jNjg1ZjUxZS1mMWY2YmE2NQ==, ActorId: [2:19173:12147], ActorState: ExecuteState, TraceId: 01jweat4gn7tvvvkgzevfafmea, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: 2025-05-29T15:33:13.893343Z node 2 :STATISTICS DEBUG: query_actor.cpp:240: [TQueryBase] TEvDataQueryResult INTERNAL_ERROR, Issues: {
: Fatal: Execution, code: 1060 subissue: {
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 } }, SessionId: ydb://session/3?node_id=2&id=ZWQ5Y2Y2Ny0yMmIzYjlhYi1jNjg1ZjUxZS1mMWY2YmE2NQ==, TxId: 2025-05-29T15:33:13.893357Z node 2 :STATISTICS WARN: query_actor.cpp:372: [TQueryBase] Finish with INTERNAL_ERROR, Issues: {
: Fatal: Execution, code: 1060 subissue: {
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 } }, SessionId: ydb://session/3?node_id=2&id=ZWQ5Y2Y2Ny0yMmIzYjlhYi1jNjg1ZjUxZS1mMWY2YmE2NQ==, TxId: 2025-05-29T15:33:14.033862Z node 2 :STATISTICS DEBUG: query_actor.cpp:134: [TQueryBase] Bootstrap. Database: /Root/Database 2025-05-29T15:33:14.034669Z node 2 :STATISTICS DEBUG: query_actor.cpp:197: [TQueryBase] RunDataQuery: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DECLARE $stat_type AS Uint32; DECLARE $column_tags AS List; DECLARE $data AS List; UPSERT INTO `.metadata/_statistics` (owner_id, local_path_id, stat_type, column_tag, data) VALUES ($owner_id, $local_path_id, $stat_type, $column_tags[0], $data[0]), ($owner_id, $local_path_id, $stat_type, $column_tags[1], $data[1]); 2025-05-29T15:33:14.040795Z node 2 :STATISTICS DEBUG: service_impl.cpp:771: [TStatService::TEvGetStatistics] RequestId[ 21 ], ReplyToActorId[ [2:19195:12168]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-05-29T15:33:14.040873Z node 2 :STATISTICS DEBUG: service_impl.cpp:787: [TStatService::TEvNavigateKeySetResult] RequestId[ 21 ] 2025-05-29T15:33:14.040881Z node 2 :STATISTICS DEBUG: service_impl.cpp:1260: ReplySuccess(), request id = 21, ReplyToActorId = [2:19195:12168], StatRequests.size() = 1 2025-05-29T15:33:14.051877Z node 2 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [2:19191:12164], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:33:14.052774Z node 2 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=2&id=MTFmOWZjM2EtZDdlNGU1ZmItMTM3MWRiZmEtNmUwNzQ4ZTM=, ActorId: [2:19188:12161], ActorState: ExecuteState, TraceId: 01jweat4nj5fe60w1tqjkhy9xa, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: 2025-05-29T15:33:14.053008Z node 2 :STATISTICS DEBUG: query_actor.cpp:240: [TQueryBase] TEvDataQueryResult INTERNAL_ERROR, Issues: {
: Fatal: Execution, code: 1060 subissue: {
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 } }, SessionId: ydb://session/3?node_id=2&id=MTFmOWZjM2EtZDdlNGU1ZmItMTM3MWRiZmEtNmUwNzQ4ZTM=, TxId: 2025-05-29T15:33:14.053020Z node 2 :STATISTICS WARN: query_actor.cpp:372: [TQueryBase] Finish with INTERNAL_ERROR, Issues: {
: Fatal: Execution, code: 1060 subissue: {
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 } }, SessionId: ydb://session/3?node_id=2&id=MTFmOWZjM2EtZDdlNGU1ZmItMTM3MWRiZmEtNmUwNzQ4ZTM=, TxId: 2025-05-29T15:33:14.294350Z node 2 :STATISTICS DEBUG: service_impl.cpp:252: Event round 3 is different from the current 0 2025-05-29T15:33:14.294389Z node 2 :STATISTICS DEBUG: service_impl.cpp:379: Skip TEvDispatchKeepAlive 2025-05-29T15:33:14.336127Z node 2 :STATISTICS DEBUG: query_actor.cpp:134: [TQueryBase] Bootstrap. Database: /Root/Database 2025-05-29T15:33:14.337048Z node 2 :STATISTICS DEBUG: query_actor.cpp:197: [TQueryBase] RunDataQuery: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DECLARE $stat_type AS Uint32; DECLARE $column_tags AS List; DECLARE $data AS List; UPSERT INTO `.metadata/_statistics` (owner_id, local_path_id, stat_type, column_tag, data) VALUES ($owner_id, $local_path_id, $stat_type, $column_tags[0], $data[0]), ($owner_id, $local_path_id, $stat_type, $column_tags[1], $data[1]); 2025-05-29T15:33:14.342435Z node 2 :STATISTICS DEBUG: service_impl.cpp:771: [TStatService::TEvGetStatistics] RequestId[ 22 ], ReplyToActorId[ [2:19218:12180]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-05-29T15:33:14.342542Z node 2 :STATISTICS DEBUG: service_impl.cpp:787: [TStatService::TEvNavigateKeySetResult] RequestId[ 22 ] 2025-05-29T15:33:14.342551Z node 2 :STATISTICS DEBUG: service_impl.cpp:1260: ReplySuccess(), request id = 22, ReplyToActorId = [2:19218:12180], StatRequests.size() = 1 2025-05-29T15:33:14.352511Z node 2 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [2:19214:12176], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:33:14.353498Z node 2 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=2&id=ZGRiOGUyYjItYjQ0YjY3NC02Y2M1OGFkYy1lZjhlNjkyNA==, ActorId: [2:19211:12173], ActorState: ExecuteState, TraceId: 01jweat4z19z8emj1raq4036ga, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: 2025-05-29T15:33:14.353939Z node 2 :STATISTICS DEBUG: query_actor.cpp:240: [TQueryBase] TEvDataQueryResult INTERNAL_ERROR, Issues: {
: Fatal: Execution, code: 1060 subissue: {
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 } }, SessionId: ydb://session/3?node_id=2&id=ZGRiOGUyYjItYjQ0YjY3NC02Y2M1OGFkYy1lZjhlNjkyNA==, TxId: 2025-05-29T15:33:14.353978Z node 2 :STATISTICS WARN: query_actor.cpp:372: [TQueryBase] Finish with INTERNAL_ERROR, Issues: {
: Fatal: Execution, code: 1060 subissue: {
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 } }, SessionId: ydb://session/3?node_id=2&id=ZGRiOGUyYjItYjQ0YjY3NC02Y2M1OGFkYy1lZjhlNjkyNA==, TxId: 2025-05-29T15:33:14.674305Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:330: [72075186224037894] PropagateStatistics(), node count = 1, schemeshard count = 1 2025-05-29T15:33:14.674389Z node 2 :STATISTICS DEBUG: service_impl.cpp:937: EvPropagateStatistics, node id = 2 2025-05-29T15:33:14.991049Z node 2 :STATISTICS DEBUG: query_actor.cpp:134: [TQueryBase] Bootstrap. Database: /Root/Database 2025-05-29T15:33:14.991893Z node 2 :STATISTICS DEBUG: query_actor.cpp:197: [TQueryBase] RunDataQuery: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DECLARE $stat_type AS Uint32; DECLARE $column_tags AS List; DECLARE $data AS List; UPSERT INTO `.metadata/_statistics` (owner_id, local_path_id, stat_type, column_tag, data) VALUES ($owner_id, $local_path_id, $stat_type, $column_tags[0], $data[0]), ($owner_id, $local_path_id, $stat_type, $column_tags[1], $data[1]); 2025-05-29T15:33:14.996893Z node 2 :STATISTICS DEBUG: service_impl.cpp:771: [TStatService::TEvGetStatistics] RequestId[ 23 ], ReplyToActorId[ [2:19256:12206]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-05-29T15:33:14.996959Z node 2 :STATISTICS DEBUG: service_impl.cpp:787: [TStatService::TEvNavigateKeySetResult] RequestId[ 23 ] 2025-05-29T15:33:14.996967Z node 2 :STATISTICS DEBUG: service_impl.cpp:1260: ReplySuccess(), request id = 23, ReplyToActorId = [2:19256:12206], StatRequests.size() = 1 2025-05-29T15:33:15.006681Z node 2 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [2:19252:12202], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:33:15.007540Z node 2 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=2&id=NGRkNTBmZmYtYTEyYmFlM2ItZWE0ZmIwYzQtNzFkNjE3, ActorId: [2:19249:12199], ActorState: ExecuteState, TraceId: 01jweat5kg4vb3ycjs9p24a738, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: 2025-05-29T15:33:15.007786Z node 2 :STATISTICS DEBUG: query_actor.cpp:240: [TQueryBase] TEvDataQueryResult INTERNAL_ERROR, Issues: {
: Fatal: Execution, code: 1060 subissue: {
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 } }, SessionId: ydb://session/3?node_id=2&id=NGRkNTBmZmYtYTEyYmFlM2ItZWE0ZmIwYzQtNzFkNjE3, TxId: 2025-05-29T15:33:15.007798Z node 2 :STATISTICS WARN: query_actor.cpp:372: [TQueryBase] Finish with INTERNAL_ERROR, Issues: {
: Fatal: Execution, code: 1060 subissue: {
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 } }, SessionId: ydb://session/3?node_id=2&id=NGRkNTBmZmYtYTEyYmFlM2ItZWE0ZmIwYzQtNzFkNjE3, TxId: 2025-05-29T15:33:15.008154Z node 2 :STATISTICS DEBUG: tx_finish_trasersal.cpp:26: [72075186224037894] TTxFinishTraversal::Execute 2025-05-29T15:33:15.023330Z node 2 :STATISTICS DEBUG: tx_finish_trasersal.cpp:36: [72075186224037894] TTxFinishTraversal::Complete force traversal for path [OwnerId: 72075186224037897, LocalPathId: 4] 2025-05-29T15:33:15.023364Z node 2 :STATISTICS DEBUG: tx_finish_trasersal.cpp:50: [72075186224037894] TTxFinishTraversal::Complete. Send TEvAnalyzeResponse, OperationId= O]4, ActorId=[1:5806:3856] 2025-05-29T15:33:15.024298Z node 1 :STATISTICS DEBUG: service_impl.cpp:771: [TStatService::TEvGetStatistics] RequestId[ 1 ], ReplyToActorId[ [1:19262:10419]], StatType[ 2 ], StatRequestsCount[ 1 ] 2025-05-29T15:33:15.024376Z node 1 :STATISTICS DEBUG: service_impl.cpp:787: [TStatService::TEvNavigateKeySetResult] RequestId[ 1 ] 2025-05-29T15:33:15.024382Z node 1 :STATISTICS DEBUG: service_impl.cpp:812: [TStatService::TEvNavigateKeySetResult] RequestId[ 1 ] resolve DatabasePath[ [OwnerId: 72057594046644480, LocalPathId: 2] ] 2025-05-29T15:33:15.025105Z node 1 :STATISTICS DEBUG: service_impl.cpp:787: [TStatService::TEvNavigateKeySetResult] RequestId[ 1 ] 2025-05-29T15:33:15.025124Z node 1 :STATISTICS DEBUG: service_impl.cpp:715: [TStatService::QueryStatistics] RequestId[ 1 ], Database[ Root/Database ], TablePath[ /Root/Database/.metadata/_statistics ] 2025-05-29T15:33:15.025135Z node 1 :STATISTICS DEBUG: service_impl.cpp:656: [TStatService::LoadStatistics] QueryId[ 1 ], PathId[ [OwnerId: 72075186224037897, LocalPathId: 4] ], StatType[ 2 ], ColumnTag[ 2 ] 2025-05-29T15:33:15.027628Z node 1 :STATISTICS ERROR: service_impl.cpp:691: [TStatService::ReadRowsResponse] QueryId[ 1 ], RowsCount[ 0 ] 2025-05-29T15:33:15.027724Z node 1 :STATISTICS DEBUG: service_impl.cpp:1152: TEvLoadStatisticsQueryResponse, request id = 1 Answer: 'Error occurred while loading statistics.' strings contains assertion failed at ydb/core/statistics/service/ut/ut_http_request.cpp:69, void NKikimr::NStat::ProbeTest(bool): "Error occurred while loading statistics." does not contain "/Root/Database/Table1[Value]=", TBackTrace::Capture()+28 (0x137F904C) NUnitTest::NPrivate::RaiseError(char const*, TBasicString> const&, bool)+137 (0x139AC3D9) NKikimr::NStat::ProbeTest(bool)+5323 (0x136F671B) NKikimr::NStat::NTestSuiteHttpRequest::TCurrentTest::Execute()::'lambda'()::operator()() const+71 (0x136FA107) NUnitTest::TTestBase::Run(std::__y1::function, TBasicString> const&, char const*, bool)+126 (0x139AE28E) NKikimr::NStat::NTestSuiteHttpRequest::TCurrentTest::Execute()+429 (0x136F9ACD) NUnitTest::TTestFactory::Execute()+803 (0x139AEA03) NUnitTest::RunMain(int, char**)+3021 (0x139BCD1D) ??+0 (0x7F06B4F2FD90) __libc_start_main+128 (0x7F06B4F2FE40) _start+41 (0x1283B029) >> test_ping.py::TestPing::test_error_on_cgi_parameters [FAIL] >> test_ping.py::TestPing::test_error_on_non_ping_path >> test_queue_tags.py::TestQueueTags::test_untag_queue[tables_format_v1-std] [FAIL] >> test_queues_managing.py::TestQueuesManagingWithPathTestQueuesManagingWithPath::test_create_fifo_queue_wo_postfix[tables_format_v0] >> test_ping.py::TestPing::test_error_on_non_ping_path [FAIL] >> test_ping.py::TestPing::test_ping >> test_queues_managing.py::TestQueuesManagingWithTenant::test_create_fifo_queue_wo_postfix[tables_format_v1] >> test_queue_counters.py::TestSqsGettingCounters::test_receive_attempts_are_counted_separately_for_messages_in_one_batch [FAIL] >> test_queue_counters.py::TestSqsGettingCounters::test_receive_message_immediate_duration_counter >> test_ping.py::TestPing::test_ping [FAIL] >> test_queue_counters.py::TestSqsGettingCounters::test_receive_message_immediate_duration_counter [FAIL] >> test_queue_counters.py::TestSqsGettingCounters::test_sqs_action_counters >> test_queue_attributes_validation.py::TestQueueAttributesValidation::test_set_queue_attributes[tables_format_v1-std] [FAIL] >> test_queue_counters.py::TestSqsGettingCounters::test_action_duration_being_not_immediate ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/common/py3test >> test_ping.py::TestPing::test_ping [FAIL] Test command err: sys:1: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/ciyv/00278a/ydb/tests/functional/sqs/common/test-results/py3test/testing_out_stuff/chunk22/testing_out_stuff/test_ping.py.TestPing.test_error_on_cgi_parameters/cluster/node_1/stdout'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/ciyv/00278a/ydb/tests/functional/sqs/common/test-results/py3test/testing_out_stuff/chunk22/testing_out_stuff/test_ping.py.TestPing.test_error_on_cgi_parameters/cluster/node_1/stderr'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedRandom name='/home/runner/.ya/build/build_root/ciyv/00278a/ydb/tests/functional/sqs/common/test-results/py3test/testing_out_stuff/chunk22/testing_out_stuff/test_ping.py.TestPing.test_error_on_cgi_parameters/cluster/node_1/logfile_gw1cklbx.log'> ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/subprocess.py:1129: ResourceWarning: subprocess 395006 is still running ResourceWarning: Enable tracemalloc to get the object allocation traceback >> test_queues_managing.py::TestQueuesManagingWithPathTestQueuesManagingWithPath::test_request_to_deleted_queue[tables_format_v0-std] [FAIL] >> test_queues_managing.py::TestQueuesManagingWithPathTestQueuesManagingWithPath::test_request_to_deleted_queue[tables_format_v1-fifo] >> test_queue_counters.py::TestSqsGettingCounters::test_sqs_action_counters [FAIL] >> test_queues_managing.py::TestQueuesManagingWithPathTestQueuesManagingWithPath::test_request_to_deleted_queue[tables_format_v1-fifo] [FAIL] >> test_queues_managing.py::TestQueuesManagingWithPathTestQueuesManagingWithPath::test_request_to_deleted_queue[tables_format_v1-std] >> test_queues_managing.py::TestQueuesManagingWithPathTestQueuesManagingWithPath::test_request_to_deleted_queue[tables_format_v1-std] [FAIL] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/common/py3test >> test_queue_counters.py::TestSqsGettingCounters::test_sqs_action_counters [FAIL] Test command err: sys:1: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/ciyv/002781/ydb/tests/functional/sqs/common/test-results/py3test/testing_out_stuff/chunk30/testing_out_stuff/test_queue_counters.py.TestSqsGettingCounters.test_receive_attempts_are_counted_separately_for_messages_in_one_batch/cluster/node_1/stdout'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/ciyv/002781/ydb/tests/functional/sqs/common/test-results/py3test/testing_out_stuff/chunk30/testing_out_stuff/test_queue_counters.py.TestSqsGettingCounters.test_receive_attempts_are_counted_separately_for_messages_in_one_batch/cluster/node_1/stderr'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedRandom name='/home/runner/.ya/build/build_root/ciyv/002781/ydb/tests/functional/sqs/common/test-results/py3test/testing_out_stuff/chunk30/testing_out_stuff/test_queue_counters.py.TestSqsGettingCounters.test_receive_attempts_are_counted_separately_for_messages_in_one_batch/cluster/node_1/logfile_6k3zi88z.log'> ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/subprocess.py:1129: ResourceWarning: subprocess 396320 is still running ResourceWarning: Enable tracemalloc to get the object allocation traceback >> test_queues_managing.py::TestQueuesManagingWithPathTestQueuesManagingWithPath::test_delete_and_create_queue[std] >> test_queue_attributes_validation.py::TestQueueAttributesValidation::test_create_queue_with_custom_attributes[tables_format_v0-fifo] [FAIL] >> test_queue_attributes_validation.py::TestQueueAttributesValidation::test_create_queue_with_custom_attributes[tables_format_v0-std] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/common/py3test >> test_queues_managing.py::TestQueuesManagingWithPathTestQueuesManagingWithPath::test_request_to_deleted_queue[tables_format_v1-std] [FAIL] Test command err: sys:1: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/ciyv/00277e/ydb/tests/functional/sqs/common/test-results/py3test/testing_out_stuff/chunk46/testing_out_stuff/test_queues_managing.py.TestQueuesManagingWithPathTestQueuesManagingWithPath.test_request_to_deleted_queue.tables_format_v0-std/cluster/node_1/stdout'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/ciyv/00277e/ydb/tests/functional/sqs/common/test-results/py3test/testing_out_stuff/chunk46/testing_out_stuff/test_queues_managing.py.TestQueuesManagingWithPathTestQueuesManagingWithPath.test_request_to_deleted_queue.tables_format_v0-std/cluster/node_1/stderr'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedRandom name='/home/runner/.ya/build/build_root/ciyv/00277e/ydb/tests/functional/sqs/common/test-results/py3test/testing_out_stuff/chunk46/testing_out_stuff/test_queues_managing.py.TestQueuesManagingWithPathTestQueuesManagingWithPath.test_request_to_deleted_queue.tables_format_v0-std/cluster/node_1/logfile_2blqjllg.log'> ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/subprocess.py:1129: ResourceWarning: subprocess 397308 is still running ResourceWarning: Enable tracemalloc to get the object allocation traceback >> test_queues_managing.py::TestQueuesManagingWithPathTestQueuesManagingWithPath::test_create_fifo_queue_wo_postfix[tables_format_v0] [FAIL] >> test_queues_managing.py::TestQueuesManagingWithPathTestQueuesManagingWithPath::test_create_fifo_queue_wo_postfix[tables_format_v1] >> test_queue_attributes_validation.py::TestQueueAttributesValidation::test_create_queue_with_custom_attributes[tables_format_v0-std] [FAIL] >> test_acl.py::TestSqsWithForceAuthorizationWithPath::test_invalid_token[tables_format_v1-invalid] [FAIL] >> test_acl.py::TestSqsWithForceAuthorizationWithPath::test_invalid_token[tables_format_v1-no] >> test_multiplexing_tables_format.py::TestMultiplexingTablesFormatWithPath::test_read_message[fifo] [FAIL] >> test_multiplexing_tables_format.py::TestMultiplexingTablesFormatWithPath::test_read_message[std] >> test_queues_managing.py::TestQueuesManagingWithPathTestQueuesManagingWithPath::test_create_fifo_queue_wo_postfix[tables_format_v1] [FAIL] >> test_serverless.py::test_discovery[enable_alter_database_create_hive_first--false] >> test_acl.py::TestSqsWithForceAuthorizationWithPath::test_invalid_token[tables_format_v1-no] [FAIL] >> test_acl.py::TestSqsWithForceAuthorizationWithTenant::test_invalid_token[tables_format_v0-empty] >> test_multiplexing_tables_format.py::TestMultiplexingTablesFormatWithPath::test_read_message[std] [FAIL] >> test_multiplexing_tables_format.py::TestMultiplexingTablesFormatWithPath::test_send_message[fifo] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/common/py3test >> test_queue_attributes_validation.py::TestQueueAttributesValidation::test_create_queue_with_custom_attributes[tables_format_v0-std] [FAIL] Test command err: ydb/tests/library/clients/kikimr_monitoring.py:75: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/ciyv/002783/ydb/tests/functional/sqs/common/test-results/py3test/testing_out_stuff/chunk24/testing_out_stuff/test_queue_attributes_validation.py.TestQueueAttributesInCompatibilityMode.test_set_queue_attributes_no_validation.tables_format_v1-std/cluster/node_1/stdout'> (key, value) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/library/clients/kikimr_monitoring.py:75: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/ciyv/002783/ydb/tests/functional/sqs/common/test-results/py3test/testing_out_stuff/chunk24/testing_out_stuff/test_queue_attributes_validation.py.TestQueueAttributesInCompatibilityMode.test_set_queue_attributes_no_validation.tables_format_v1-std/cluster/node_1/stderr'> (key, value) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/library/clients/kikimr_monitoring.py:75: ResourceWarning: unclosed file <_io.BufferedRandom name='/home/runner/.ya/build/build_root/ciyv/002783/ydb/tests/functional/sqs/common/test-results/py3test/testing_out_stuff/chunk24/testing_out_stuff/test_queue_attributes_validation.py.TestQueueAttributesInCompatibilityMode.test_set_queue_attributes_no_validation.tables_format_v1-std/cluster/node_1/logfile_rv41hv3q.log'> (key, value) ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/subprocess.py:1129: ResourceWarning: subprocess 395007 is still running ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/ciyv/002783/ydb/tests/functional/sqs/common/test-results/py3test/testing_out_stuff/chunk24/testing_out_stuff/test_queue_attributes_validation.py.TestQueueAttributesValidation.test_create_queue_with_custom_attributes.tables_format_v0-fifo/cluster/node_1/stdout'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/ciyv/002783/ydb/tests/functional/sqs/common/test-results/py3test/testing_out_stuff/chunk24/testing_out_stuff/test_queue_attributes_validation.py.TestQueueAttributesValidation.test_create_queue_with_custom_attributes.tables_format_v0-fifo/cluster/node_1/stderr'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedRandom name='/home/runner/.ya/build/build_root/ciyv/002783/ydb/tests/functional/sqs/common/test-results/py3test/testing_out_stuff/chunk24/testing_out_stuff/test_queue_attributes_validation.py.TestQueueAttributesValidation.test_create_queue_with_custom_attributes.tables_format_v0-fifo/cluster/node_1/logfile_dvfr0ian.log'> ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/subprocess.py:1129: ResourceWarning: subprocess 399407 is still running ResourceWarning: Enable tracemalloc to get the object allocation traceback >> test_multiplexing_tables_format.py::TestMultiplexingTablesFormatWithPath::test_send_message[fifo] [FAIL] >> test_multiplexing_tables_format.py::TestMultiplexingTablesFormatWithPath::test_send_message[std] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/common/py3test >> test_queues_managing.py::TestQueuesManagingWithPathTestQueuesManagingWithPath::test_create_fifo_queue_wo_postfix[tables_format_v1] [FAIL] Test command err: ydb/tests/library/clients/kikimr_monitoring.py:75: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/ciyv/002786/ydb/tests/functional/sqs/common/test-results/py3test/testing_out_stuff/chunk36/testing_out_stuff/test_queue_tags.py.TestQueueTags.test_untag_queue.tables_format_v1-std/cluster/node_1/stdout'> (key, value) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/library/clients/kikimr_monitoring.py:75: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/ciyv/002786/ydb/tests/functional/sqs/common/test-results/py3test/testing_out_stuff/chunk36/testing_out_stuff/test_queue_tags.py.TestQueueTags.test_untag_queue.tables_format_v1-std/cluster/node_1/stderr'> (key, value) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/library/clients/kikimr_monitoring.py:75: ResourceWarning: unclosed file <_io.BufferedRandom name='/home/runner/.ya/build/build_root/ciyv/002786/ydb/tests/functional/sqs/common/test-results/py3test/testing_out_stuff/chunk36/testing_out_stuff/test_queue_tags.py.TestQueueTags.test_untag_queue.tables_format_v1-std/cluster/node_1/logfile_5ypfm9ny.log'> (key, value) ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/subprocess.py:1129: ResourceWarning: subprocess 395192 is still running ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/ciyv/002786/ydb/tests/functional/sqs/common/test-results/py3test/testing_out_stuff/chunk36/testing_out_stuff/test_queues_managing.py.TestQueuesManagingWithPathTestQueuesManagingWithPath.test_create_fifo_queue_wo_postfix.tables_format_v0/cluster/node_1/stdout'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/ciyv/002786/ydb/tests/functional/sqs/common/test-results/py3test/testing_out_stuff/chunk36/testing_out_stuff/test_queues_managing.py.TestQueuesManagingWithPathTestQueuesManagingWithPath.test_create_fifo_queue_wo_postfix.tables_format_v0/cluster/node_1/stderr'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedRandom name='/home/runner/.ya/build/build_root/ciyv/002786/ydb/tests/functional/sqs/common/test-results/py3test/testing_out_stuff/chunk36/testing_out_stuff/test_queues_managing.py.TestQueuesManagingWithPathTestQueuesManagingWithPath.test_create_fifo_queue_wo_postfix.tables_format_v0/cluster/node_1/logfile_npd6yth7.log'> ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/subprocess.py:1129: ResourceWarning: subprocess 399853 is still running ResourceWarning: Enable tracemalloc to get the object allocation traceback >> test_queue_counters.py::TestSqsGettingCounters::test_action_duration_being_not_immediate [FAIL] >> test_queue_counters.py::TestSqsGettingCounters::test_counters_when_reading_from_empty_queue >> test_queues_managing.py::TestQueuesManagingWithTenant::test_delete_and_create_queue[fifo] [FAIL] >> test_queues_managing.py::TestQueuesManagingWithTenant::test_delete_and_create_queue[std] >> test_multiplexing_tables_format.py::TestMultiplexingTablesFormatWithPath::test_send_message[std] [FAIL] >> test_queue_counters.py::TestSqsGettingCounters::test_counters_when_reading_from_empty_queue [FAIL] >> test_queues_managing.py::TestQueuesManagingWithPathTestQueuesManagingWithPath::test_delete_queue[tables_format_v1-fifo] >> test_queues_managing.py::TestQueuesManagingWithTenant::test_delete_and_create_queue[std] [FAIL] >> test_queues_managing.py::TestQueuesManagingWithTenant::test_delete_queue[tables_format_v0-fifo] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/common/py3test >> test_multiplexing_tables_format.py::TestMultiplexingTablesFormatWithPath::test_send_message[std] [FAIL] Test command err: sys:1: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/ciyv/002777/ydb/tests/functional/sqs/common/test-results/py3test/testing_out_stuff/chunk17/testing_out_stuff/test_multiplexing_tables_format.py.TestMultiplexingTablesFormatWithPath.test_read_message.fifo/cluster/node_1/stdout'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/ciyv/002777/ydb/tests/functional/sqs/common/test-results/py3test/testing_out_stuff/chunk17/testing_out_stuff/test_multiplexing_tables_format.py.TestMultiplexingTablesFormatWithPath.test_read_message.fifo/cluster/node_1/stderr'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedRandom name='/home/runner/.ya/build/build_root/ciyv/002777/ydb/tests/functional/sqs/common/test-results/py3test/testing_out_stuff/chunk17/testing_out_stuff/test_multiplexing_tables_format.py.TestMultiplexingTablesFormatWithPath.test_read_message.fifo/cluster/node_1/logfile_0jotva5p.log'> ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/subprocess.py:1129: ResourceWarning: subprocess 399405 is still running ResourceWarning: Enable tracemalloc to get the object allocation traceback >> test_serverless.py::test_fixtures[enable_alter_database_create_hive_first--false] [GOOD] >> test_queues_managing.py::TestQueuesManagingWithTenant::test_delete_queue[tables_format_v0-fifo] [FAIL] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/common/py3test >> test_queue_counters.py::TestSqsGettingCounters::test_counters_when_reading_from_empty_queue [FAIL] Test command err: ydb/tests/library/clients/kikimr_monitoring.py:75: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/ciyv/002780/ydb/tests/functional/sqs/common/test-results/py3test/testing_out_stuff/chunk28/testing_out_stuff/test_queue_attributes_validation.py.TestQueueAttributesValidation.test_set_queue_attributes.tables_format_v1-std/cluster/node_1/stdout'> (key, value) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/library/clients/kikimr_monitoring.py:75: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/ciyv/002780/ydb/tests/functional/sqs/common/test-results/py3test/testing_out_stuff/chunk28/testing_out_stuff/test_queue_attributes_validation.py.TestQueueAttributesValidation.test_set_queue_attributes.tables_format_v1-std/cluster/node_1/stderr'> (key, value) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/library/clients/kikimr_monitoring.py:75: ResourceWarning: unclosed file <_io.BufferedRandom name='/home/runner/.ya/build/build_root/ciyv/002780/ydb/tests/functional/sqs/common/test-results/py3test/testing_out_stuff/chunk28/testing_out_stuff/test_queue_attributes_validation.py.TestQueueAttributesValidation.test_set_queue_attributes.tables_format_v1-std/cluster/node_1/logfile_anrdp2tb.log'> (key, value) ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/subprocess.py:1129: ResourceWarning: subprocess 396792 is still running ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/ciyv/002780/ydb/tests/functional/sqs/common/test-results/py3test/testing_out_stuff/chunk28/testing_out_stuff/test_queue_counters.py.TestSqsGettingCounters.test_action_duration_being_not_immediate/cluster/node_1/stdout'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/ciyv/002780/ydb/tests/functional/sqs/common/test-results/py3test/testing_out_stuff/chunk28/testing_out_stuff/test_queue_counters.py.TestSqsGettingCounters.test_action_duration_being_not_immediate/cluster/node_1/stderr'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedRandom name='/home/runner/.ya/build/build_root/ciyv/002780/ydb/tests/functional/sqs/common/test-results/py3test/testing_out_stuff/chunk28/testing_out_stuff/test_queue_counters.py.TestSqsGettingCounters.test_action_duration_being_not_immediate/cluster/node_1/logfile_hewg29hq.log'> ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/subprocess.py:1129: ResourceWarning: subprocess 401364 is still running ResourceWarning: Enable tracemalloc to get the object allocation traceback >> test_queues_managing.py::TestQueuesManagingWithPathTestQueuesManagingWithPath::test_delete_and_create_queue[std] [FAIL] >> test_queues_managing.py::TestQueuesManagingWithPathTestQueuesManagingWithPath::test_delete_queue[tables_format_v0-fifo] >> test_serverless.py::test_fixtures[enable_alter_database_create_hive_first--true] |77.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/datashard/async_replication/py3test ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/common/py3test >> test_queues_managing.py::TestQueuesManagingWithTenant::test_delete_queue[tables_format_v0-fifo] [FAIL] Test command err: sys:1: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/ciyv/00277b/ydb/tests/functional/sqs/common/test-results/py3test/testing_out_stuff/chunk51/testing_out_stuff/test_queues_managing.py.TestQueuesManagingWithTenant.test_delete_and_create_queue.fifo/cluster/node_1/stdout'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/ciyv/00277b/ydb/tests/functional/sqs/common/test-results/py3test/testing_out_stuff/chunk51/testing_out_stuff/test_queues_managing.py.TestQueuesManagingWithTenant.test_delete_and_create_queue.fifo/cluster/node_1/stderr'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedRandom name='/home/runner/.ya/build/build_root/ciyv/00277b/ydb/tests/functional/sqs/common/test-results/py3test/testing_out_stuff/chunk51/testing_out_stuff/test_queues_managing.py.TestQueuesManagingWithTenant.test_delete_and_create_queue.fifo/cluster/node_1/logfile_zof6c5sb.log'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/ciyv/00277b/ydb/tests/functional/sqs/common/test-results/py3test/testing_out_stuff/chunk51/testing_out_stuff/test_queues_managing.py.TestQueuesManagingWithTenant.test_delete_and_create_queue.fifo/cluster/slot_1/stdout'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/ciyv/00277b/ydb/tests/functional/sqs/common/test-results/py3test/testing_out_stuff/chunk51/testing_out_stuff/test_queues_managing.py.TestQueuesManagingWithTenant.test_delete_and_create_queue.fifo/cluster/slot_1/stderr'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedRandom name='/home/runner/.ya/build/build_root/ciyv/00277b/ydb/tests/functional/sqs/common/test-results/py3test/testing_out_stuff/chunk51/testing_out_stuff/test_queues_managing.py.TestQueuesManagingWithTenant.test_delete_and_create_queue.fifo/cluster/slot_1/logfile_8vsps35u.log'> ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/subprocess.py:1129: ResourceWarning: subprocess 397581 is still running ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/subprocess.py:1129: ResourceWarning: subprocess 401367 is still running ResourceWarning: Enable tracemalloc to get the object allocation traceback >> test_queues_managing.py::TestQueuesManagingWithPathTestQueuesManagingWithPath::test_delete_queue[tables_format_v0-fifo] [FAIL] >> test_queues_managing.py::TestQueuesManagingWithPathTestQueuesManagingWithPath::test_delete_queue[tables_format_v0-std] >> test_queues_managing.py::TestQueuesManagingWithPathTestQueuesManagingWithPath::test_ya_count_queues[tables_format_v0] >> test_queues_managing.py::TestQueuesManagingWithTenant::test_purge_queue[tables_format_v0-std] [FAIL] >> test_queues_managing.py::TestQueuesManagingWithTenant::test_purge_queue[tables_format_v1-fifo] >> test_serverless.py::test_create_table[enable_alter_database_create_hive_first--false] >> test_queues_managing.py::TestQueuesManagingWithPathTestQueuesManagingWithPath::test_delete_queue[tables_format_v0-std] [FAIL] >> test_queues_managing.py::TestQueuesManagingWithTenant::test_create_fifo_queue_wo_postfix[tables_format_v1] [FAIL] >> test_queues_managing.py::TestQueuesManagingWithTenant::test_create_queue[tables_format_v0-fifo] >> test_queues_managing.py::TestQueuesManagingWithTenant::test_purge_queue[tables_format_v1-fifo] [FAIL] >> test_queues_managing.py::TestQueuesManagingWithTenant::test_purge_queue[tables_format_v1-std] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/common/py3test >> test_queues_managing.py::TestQueuesManagingWithPathTestQueuesManagingWithPath::test_delete_queue[tables_format_v0-std] [FAIL] Test command err: sys:1: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/ciyv/002772/ydb/tests/functional/sqs/common/test-results/py3test/testing_out_stuff/chunk40/testing_out_stuff/test_queues_managing.py.TestQueuesManagingWithPathTestQueuesManagingWithPath.test_delete_and_create_queue.std/cluster/node_1/stdout'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/ciyv/002772/ydb/tests/functional/sqs/common/test-results/py3test/testing_out_stuff/chunk40/testing_out_stuff/test_queues_managing.py.TestQueuesManagingWithPathTestQueuesManagingWithPath.test_delete_and_create_queue.std/cluster/node_1/stderr'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedRandom name='/home/runner/.ya/build/build_root/ciyv/002772/ydb/tests/functional/sqs/common/test-results/py3test/testing_out_stuff/chunk40/testing_out_stuff/test_queues_managing.py.TestQueuesManagingWithPathTestQueuesManagingWithPath.test_delete_and_create_queue.std/cluster/node_1/logfile_5vqq8dcs.log'> ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/subprocess.py:1129: ResourceWarning: subprocess 403193 is still running ResourceWarning: Enable tracemalloc to get the object allocation traceback >> test_queues_managing.py::TestQueuesManagingWithTenant::test_create_queue[tables_format_v0-fifo] [FAIL] >> test_queues_managing.py::TestQueuesManagingWithTenant::test_create_queue[tables_format_v0-std] >> test_queues_managing.py::TestQueuesManagingWithTenant::test_purge_queue[tables_format_v1-std] [FAIL] >> test_queues_managing.py::TestQueuesManagingWithTenant::test_create_queue[tables_format_v0-std] [FAIL] >> test_multiplexing_tables_format.py::TestMultiplexingTablesFormatWithTenant::test_double_create_old[fifo] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/common/py3test >> test_queues_managing.py::TestQueuesManagingWithTenant::test_purge_queue[tables_format_v1-std] [FAIL] Test command err: sys:1: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/ciyv/002788/ydb/tests/functional/sqs/common/test-results/py3test/testing_out_stuff/chunk54/testing_out_stuff/test_queues_managing.py.TestQueuesManagingWithTenant.test_purge_queue.tables_format_v0-std/cluster/node_1/stdout'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/ciyv/002788/ydb/tests/functional/sqs/common/test-results/py3test/testing_out_stuff/chunk54/testing_out_stuff/test_queues_managing.py.TestQueuesManagingWithTenant.test_purge_queue.tables_format_v0-std/cluster/node_1/stderr'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedRandom name='/home/runner/.ya/build/build_root/ciyv/002788/ydb/tests/functional/sqs/common/test-results/py3test/testing_out_stuff/chunk54/testing_out_stuff/test_queues_managing.py.TestQueuesManagingWithTenant.test_purge_queue.tables_format_v0-std/cluster/node_1/logfile_ri1309ja.log'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/ciyv/002788/ydb/tests/functional/sqs/common/test-results/py3test/testing_out_stuff/chunk54/testing_out_stuff/test_queues_managing.py.TestQueuesManagingWithTenant.test_purge_queue.tables_format_v0-std/cluster/slot_1/stdout'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/ciyv/002788/ydb/tests/functional/sqs/common/test-results/py3test/testing_out_stuff/chunk54/testing_out_stuff/test_queues_managing.py.TestQueuesManagingWithTenant.test_purge_queue.tables_format_v0-std/cluster/slot_1/stderr'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedRandom name='/home/runner/.ya/build/build_root/ciyv/002788/ydb/tests/functional/sqs/common/test-results/py3test/testing_out_stuff/chunk54/testing_out_stuff/test_queues_managing.py.TestQueuesManagingWithTenant.test_purge_queue.tables_format_v0-std/cluster/slot_1/logfile_51gzk8nt.log'> ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/subprocess.py:1129: ResourceWarning: subprocess 395005 is still running ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/subprocess.py:1129: ResourceWarning: subprocess 398801 is still running ResourceWarning: Enable tracemalloc to get the object allocation traceback >> test_acl.py::TestSqsACLWithPath::test_apply_permissions[tables_format_v0] >> test_async_replication.py::TestAsyncReplication::test_async_replication[table_index_4__SYNC-pk_types0-all_types0-index0---SYNC] >> test_queues_managing.py::TestQueuesManagingWithPathTestQueuesManagingWithPath::test_delete_queue[tables_format_v1-fifo] [FAIL] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/common/py3test >> test_queues_managing.py::TestQueuesManagingWithTenant::test_create_queue[tables_format_v0-std] [FAIL] Test command err: sys:1: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/ciyv/002774/ydb/tests/functional/sqs/common/test-results/py3test/testing_out_stuff/chunk48/testing_out_stuff/test_queues_managing.py.TestQueuesManagingWithTenant.test_create_fifo_queue_wo_postfix.tables_format_v1/cluster/node_1/stdout'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/ciyv/002774/ydb/tests/functional/sqs/common/test-results/py3test/testing_out_stuff/chunk48/testing_out_stuff/test_queues_managing.py.TestQueuesManagingWithTenant.test_create_fifo_queue_wo_postfix.tables_format_v1/cluster/node_1/stderr'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedRandom name='/home/runner/.ya/build/build_root/ciyv/002774/ydb/tests/functional/sqs/common/test-results/py3test/testing_out_stuff/chunk48/testing_out_stuff/test_queues_managing.py.TestQueuesManagingWithTenant.test_create_fifo_queue_wo_postfix.tables_format_v1/cluster/node_1/logfile_b153b9v6.log'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/ciyv/002774/ydb/tests/functional/sqs/common/test-results/py3test/testing_out_stuff/chunk48/testing_out_stuff/test_queues_managing.py.TestQueuesManagingWithTenant.test_create_fifo_queue_wo_postfix.tables_format_v1/cluster/slot_1/stdout'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/ciyv/002774/ydb/tests/functional/sqs/common/test-results/py3test/testing_out_stuff/chunk48/testing_out_stuff/test_queues_managing.py.TestQueuesManagingWithTenant.test_create_fifo_queue_wo_postfix.tables_format_v1/cluster/slot_1/stderr'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedRandom name='/home/runner/.ya/build/build_root/ciyv/002774/ydb/tests/functional/sqs/common/test-results/py3test/testing_out_stuff/chunk48/testing_out_stuff/test_queues_managing.py.TestQueuesManagingWithTenant.test_create_fifo_queue_wo_postfix.tables_format_v1/cluster/slot_1/logfile_tehuccg8.log'> ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/subprocess.py:1129: ResourceWarning: subprocess 400704 is still running ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/subprocess.py:1129: ResourceWarning: subprocess 404419 is still running ResourceWarning: Enable tracemalloc to get the object allocation traceback >> test_format_without_version.py::TestQueueWithoutVersionWithTenant::test_common[std] >> test_queues_managing.py::TestQueuesManagingWithPathTestQueuesManagingWithPath::test_delete_queue[tables_format_v1-std] >> test_queues_managing.py::TestQueuesManagingWithPathTestQueuesManagingWithPath::test_delete_queue[tables_format_v1-std] [FAIL] >> test_queues_managing.py::TestQueuesManagingWithPathTestQueuesManagingWithPath::test_delete_queue_batch[tables_format_v0] >> test_queues_managing.py::TestQueuesManagingWithPathTestQueuesManagingWithPath::test_delete_queue_batch[tables_format_v0] [FAIL] |77.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/datashard/async_replication/py3test >> test_async_replication.py::TestAsyncReplication::test_async_replication[table_index_3__SYNC-pk_types1-all_types1-index1---SYNC] [FAIL] >> test_serverless.py::test_turn_on_serverless_storage_billing[enable_alter_database_create_hive_first--false] >> test_acl.py::TestSqsWithForceAuthorizationWithTenant::test_invalid_token[tables_format_v0-empty] [FAIL] >> test_acl.py::TestSqsWithForceAuthorizationWithTenant::test_invalid_token[tables_format_v0-invalid] >> test_queues_managing.py::TestQueuesManagingWithPathTestQueuesManagingWithPath::test_ya_count_queues[tables_format_v0] [FAIL] >> test_queues_managing.py::TestQueuesManagingWithPathTestQueuesManagingWithPath::test_ya_count_queues[tables_format_v1] >> test_acl.py::TestSqsWithForceAuthorizationWithTenant::test_invalid_token[tables_format_v0-invalid] [FAIL] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/common/py3test >> test_queues_managing.py::TestQueuesManagingWithPathTestQueuesManagingWithPath::test_delete_queue_batch[tables_format_v0] [FAIL] Test command err: sys:1: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/ciyv/002770/ydb/tests/functional/sqs/common/test-results/py3test/testing_out_stuff/chunk41/testing_out_stuff/test_queues_managing.py.TestQueuesManagingWithPathTestQueuesManagingWithPath.test_delete_queue.tables_format_v1-fifo/cluster/node_1/stdout'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/ciyv/002770/ydb/tests/functional/sqs/common/test-results/py3test/testing_out_stuff/chunk41/testing_out_stuff/test_queues_managing.py.TestQueuesManagingWithPathTestQueuesManagingWithPath.test_delete_queue.tables_format_v1-fifo/cluster/node_1/stderr'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedRandom name='/home/runner/.ya/build/build_root/ciyv/002770/ydb/tests/functional/sqs/common/test-results/py3test/testing_out_stuff/chunk41/testing_out_stuff/test_queues_managing.py.TestQueuesManagingWithPathTestQueuesManagingWithPath.test_delete_queue.tables_format_v1-fifo/cluster/node_1/logfile_luxbpfe1.log'> ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/subprocess.py:1129: ResourceWarning: subprocess 405974 is still running ResourceWarning: Enable tracemalloc to get the object allocation traceback >> test_queues_managing.py::TestQueuesManagingWithPathTestQueuesManagingWithPath::test_ya_count_queues[tables_format_v1] [FAIL] >> test_queues_managing.py::TestQueuesManagingWithTenant::test_create_fifo_queue_wo_postfix[tables_format_v0] >> test_queues_managing.py::TestQueuesManagingWithPathTestQueuesManagingWithPath::test_delete_queue_batch[tables_format_v1] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/common/py3test >> test_acl.py::TestSqsWithForceAuthorizationWithTenant::test_invalid_token[tables_format_v0-invalid] [FAIL] Test command err: ydb/tests/library/clients/kikimr_monitoring.py:75: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/ciyv/002779/ydb/tests/functional/sqs/common/test-results/py3test/testing_out_stuff/chunk5/testing_out_stuff/test_acl.py.TestSqsWithForceAuthorizationWithPath.test_invalid_token.tables_format_v1-invalid/cluster/node_1/stdout'> (key, value) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/library/clients/kikimr_monitoring.py:75: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/ciyv/002779/ydb/tests/functional/sqs/common/test-results/py3test/testing_out_stuff/chunk5/testing_out_stuff/test_acl.py.TestSqsWithForceAuthorizationWithPath.test_invalid_token.tables_format_v1-invalid/cluster/node_1/stderr'> (key, value) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/library/clients/kikimr_monitoring.py:75: ResourceWarning: unclosed file <_io.BufferedRandom name='/home/runner/.ya/build/build_root/ciyv/002779/ydb/tests/functional/sqs/common/test-results/py3test/testing_out_stuff/chunk5/testing_out_stuff/test_acl.py.TestSqsWithForceAuthorizationWithPath.test_invalid_token.tables_format_v1-invalid/cluster/node_1/logfile_fjo2lo_d.log'> (key, value) ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/subprocess.py:1129: ResourceWarning: subprocess 399289 is still running ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/ciyv/002779/ydb/tests/functional/sqs/common/test-results/py3test/testing_out_stuff/chunk5/testing_out_stuff/test_acl.py.TestSqsWithForceAuthorizationWithTenant.test_invalid_token.tables_format_v0-empty/cluster/node_1/stdout'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/ciyv/002779/ydb/tests/functional/sqs/common/test-results/py3test/testing_out_stuff/chunk5/testing_out_stuff/test_acl.py.TestSqsWithForceAuthorizationWithTenant.test_invalid_token.tables_format_v0-empty/cluster/node_1/stderr'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedRandom name='/home/runner/.ya/build/build_root/ciyv/002779/ydb/tests/functional/sqs/common/test-results/py3test/testing_out_stuff/chunk5/testing_out_stuff/test_acl.py.TestSqsWithForceAuthorizationWithTenant.test_invalid_token.tables_format_v0-empty/cluster/node_1/logfile_8pq5h1o_.log'> ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/subprocess.py:1129: ResourceWarning: subprocess 404912 is still running ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/ciyv/002779/ydb/tests/functional/sqs/common/test-results/py3test/testing_out_stuff/chunk5/testing_out_stuff/test_acl.py.TestSqsWithForceAuthorizationWithTenant.test_invalid_token.tables_format_v0-empty/cluster/slot_1/stdout'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/ciyv/002779/ydb/tests/functional/sqs/common/test-results/py3test/testing_out_stuff/chunk5/testing_out_stuff/test_acl.py.TestSqsWithForceAuthorizationWithTenant.test_invalid_token.tables_format_v0-empty/cluster/slot_1/stderr'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedRandom name='/home/runner/.ya/build/build_root/ciyv/002779/ydb/tests/functional/sqs/common/test-results/py3test/testing_out_stuff/chunk5/testing_out_stuff/test_acl.py.TestSqsWithForceAuthorizationWithTenant.test_invalid_token.tables_format_v0-empty/cluster/slot_1/logfile_uy3c3wk1.log'> ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/subprocess.py:1129: ResourceWarning: subprocess 407167 is still running ResourceWarning: Enable tracemalloc to get the object allocation traceback >> test_counters.py::TestSqsCountersFeatures::test_disables_user_counters >> test_async_replication.py::TestAsyncReplication::test_async_replication[table_index_0__ASYNC-pk_types6-all_types6-index6---ASYNC] >> test_acl.py::TestSqsACLWithPath::test_apply_permissions[tables_format_v0] [FAIL] >> test_acl.py::TestSqsACLWithPath::test_apply_permissions[tables_format_v1] >> test_acl.py::TestSqsACLWithPath::test_apply_permissions[tables_format_v1] [FAIL] >> test_acl.py::TestSqsACLWithPath::test_modify_permissions[tables_format_v0] |77.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/datashard/async_replication/py3test >> test_async_replication.py::TestAsyncReplication::test_async_replication[table_index_1__ASYNC-pk_types5-all_types5-index5---ASYNC] [FAIL] >> test_acl.py::TestSqsACLWithPath::test_modify_permissions[tables_format_v0] [FAIL] >> test_acl.py::TestSqsACLWithPath::test_modify_permissions[tables_format_v1] >> test_acl.py::TestSqsACLWithPath::test_modify_permissions[tables_format_v1] [FAIL] >> test_queues_managing.py::TestQueuesManagingWithPathTestQueuesManagingWithPath::test_create_queue_with_invalid_name[tables_format_v0] |77.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/datashard/async_replication/py3test >> test_async_replication.py::TestAsyncReplication::test_async_replication[table_ttl_Timestamp-pk_types12-all_types12-index12-Timestamp--] [FAIL] |77.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/datashard/async_replication/py3test >> test_async_replication.py::TestAsyncReplication::test_async_replication[table_ttl_Uint32-pk_types9-all_types9-index9-Uint32--] [FAIL] |77.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/serverless/py3test ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/common/py3test >> test_acl.py::TestSqsACLWithPath::test_modify_permissions[tables_format_v1] [FAIL] Test command err: sys:1: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/ciyv/002767/ydb/tests/functional/sqs/common/test-results/py3test/testing_out_stuff/chunk2/testing_out_stuff/test_acl.py.TestSqsACLWithPath.test_apply_permissions.tables_format_v0/cluster/node_1/stdout'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/ciyv/002767/ydb/tests/functional/sqs/common/test-results/py3test/testing_out_stuff/chunk2/testing_out_stuff/test_acl.py.TestSqsACLWithPath.test_apply_permissions.tables_format_v0/cluster/node_1/stderr'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedRandom name='/home/runner/.ya/build/build_root/ciyv/002767/ydb/tests/functional/sqs/common/test-results/py3test/testing_out_stuff/chunk2/testing_out_stuff/test_acl.py.TestSqsACLWithPath.test_apply_permissions.tables_format_v0/cluster/node_1/logfile_e84mf_2n.log'> ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/subprocess.py:1129: ResourceWarning: subprocess 408887 is still running ResourceWarning: Enable tracemalloc to get the object allocation traceback >> test_serverless.py::test_create_table_using_exclusive_nodes[enable_alter_database_create_hive_first--true] [FAIL] >> test_serverless.py::test_create_table_with_alter_quotas[enable_alter_database_create_hive_first--false] >> test_queues_managing.py::TestQueuesManagingWithPathTestQueuesManagingWithPath::test_delete_queue_batch[tables_format_v1] [FAIL] >> test_queues_managing.py::TestQueuesManagingWithPathTestQueuesManagingWithPath::test_purge_queue[tables_format_v0-fifo] >> test_multiplexing_tables_format.py::TestMultiplexingTablesFormatWithTenant::test_double_create_old[fifo] [FAIL] >> test_multiplexing_tables_format.py::TestMultiplexingTablesFormatWithTenant::test_double_create_old[std] >> YdbSdkSessionsPool::PeriodicTask/1 [GOOD] >> test_serverless.py::test_create_table[enable_alter_database_create_hive_first--false] [FAIL] >> test_counters.py::TestSqsCountersFeatures::test_disables_user_counters [FAIL] >> test_counters.py::TestSqsCountersFeatures::test_removes_user_counters_after_user_deletion[tables_format_v0] >> test_format_without_version.py::TestQueueWithoutVersionWithTenant::test_common[std] [FAIL] >> test_queues_managing.py::TestQueuesManagingWithPathTestQueuesManagingWithPath::test_purge_queue[tables_format_v0-fifo] [FAIL] >> test_queues_managing.py::TestQueuesManagingWithPathTestQueuesManagingWithPath::test_purge_queue[tables_format_v0-std] >> test_multiplexing_tables_format.py::TestMultiplexingTablesFormatWithTenant::test_double_create_old[std] [FAIL] >> test_multiplexing_tables_format.py::TestMultiplexingTablesFormatWithTenant::test_read_message[fifo] >> test_garbage_collection.py::TestSqsGarbageCollection::test_cleanups_deduplication_table[tables_format_v0] >> test_queue_attributes_validation.py::TestQueueAttributesValidation::test_set_queue_attributes[tables_format_v0-fifo] >> test_serverless.py::test_discovery[enable_alter_database_create_hive_first--false] [GOOD] >> test_counters.py::TestSqsCountersFeatures::test_removes_user_counters_after_user_deletion[tables_format_v0] [FAIL] >> test_counters.py::TestSqsCountersFeatures::test_removes_user_counters_after_user_deletion[tables_format_v1] >> test_queues_managing.py::TestQueuesManagingWithPathTestQueuesManagingWithPath::test_purge_queue[tables_format_v0-std] [FAIL] >> test_serverless.py::test_create_table[enable_alter_database_create_hive_first--true] >> test_async_replication.py::TestAsyncReplication::test_async_replication[table_index_1__SYNC-pk_types3-all_types3-index3---SYNC] >> test_multiplexing_tables_format.py::TestMultiplexingTablesFormatWithTenant::test_read_message[fifo] [FAIL] >> test_counters.py::TestSqsCountersFeatures::test_removes_user_counters_after_user_deletion[tables_format_v1] [FAIL] >> test_counters.py::TestSqsCountersFeatures::test_updates_status_code_counters_when_parsing_errors_occur[tables_format_v0] >> test_serverless.py::test_discovery[enable_alter_database_create_hive_first--true] >> test_async_replication.py::TestAsyncReplication::test_async_replication[table_index_4__SYNC-pk_types0-all_types0-index0---SYNC] [FAIL] >> test_account_actions.py::TestAccountActionsWithPath::test_manage_account[with_queues-tables_format_v0] >> test_counters.py::TestSqsCountersFeatures::test_updates_status_code_counters_when_parsing_errors_occur[tables_format_v0] [FAIL] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/common/py3test >> test_queues_managing.py::TestQueuesManagingWithPathTestQueuesManagingWithPath::test_purge_queue[tables_format_v0-std] [FAIL] Test command err: sys:1: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/ciyv/002763/ydb/tests/functional/sqs/common/test-results/py3test/testing_out_stuff/chunk42/testing_out_stuff/test_queues_managing.py.TestQueuesManagingWithPathTestQueuesManagingWithPath.test_delete_queue_batch.tables_format_v1/cluster/node_1/stdout'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/ciyv/002763/ydb/tests/functional/sqs/common/test-results/py3test/testing_out_stuff/chunk42/testing_out_stuff/test_queues_managing.py.TestQueuesManagingWithPathTestQueuesManagingWithPath.test_delete_queue_batch.tables_format_v1/cluster/node_1/stderr'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedRandom name='/home/runner/.ya/build/build_root/ciyv/002763/ydb/tests/functional/sqs/common/test-results/py3test/testing_out_stuff/chunk42/testing_out_stuff/test_queues_managing.py.TestQueuesManagingWithPathTestQueuesManagingWithPath.test_delete_queue_batch.tables_format_v1/cluster/node_1/logfile_1y3hybg7.log'> ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/subprocess.py:1129: ResourceWarning: subprocess 412496 is still running ResourceWarning: Enable tracemalloc to get the object allocation traceback |77.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/datashard/async_replication/py3test >> test_async_replication.py::TestAsyncReplication::test_async_replication[table_all_types-pk_types7-all_types7-index7---] [FAIL] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/common/py3test >> test_multiplexing_tables_format.py::TestMultiplexingTablesFormatWithTenant::test_read_message[fifo] [FAIL] Test command err: sys:1: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/ciyv/00276a/ydb/tests/functional/sqs/common/test-results/py3test/testing_out_stuff/chunk20/testing_out_stuff/test_multiplexing_tables_format.py.TestMultiplexingTablesFormatWithTenant.test_double_create_old.fifo/cluster/node_1/stdout'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/ciyv/00276a/ydb/tests/functional/sqs/common/test-results/py3test/testing_out_stuff/chunk20/testing_out_stuff/test_multiplexing_tables_format.py.TestMultiplexingTablesFormatWithTenant.test_double_create_old.fifo/cluster/node_1/stderr'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedRandom name='/home/runner/.ya/build/build_root/ciyv/00276a/ydb/tests/functional/sqs/common/test-results/py3test/testing_out_stuff/chunk20/testing_out_stuff/test_multiplexing_tables_format.py.TestMultiplexingTablesFormatWithTenant.test_double_create_old.fifo/cluster/node_1/logfile_stxpu5ib.log'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/ciyv/00276a/ydb/tests/functional/sqs/common/test-results/py3test/testing_out_stuff/chunk20/testing_out_stuff/test_multiplexing_tables_format.py.TestMultiplexingTablesFormatWithTenant.test_double_create_old.fifo/cluster/slot_1/stdout'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/ciyv/00276a/ydb/tests/functional/sqs/common/test-results/py3test/testing_out_stuff/chunk20/testing_out_stuff/test_multiplexing_tables_format.py.TestMultiplexingTablesFormatWithTenant.test_double_create_old.fifo/cluster/slot_1/stderr'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedRandom name='/home/runner/.ya/build/build_root/ciyv/00276a/ydb/tests/functional/sqs/common/test-results/py3test/testing_out_stuff/chunk20/testing_out_stuff/test_multiplexing_tables_format.py.TestMultiplexingTablesFormatWithTenant.test_double_create_old.fifo/cluster/slot_1/logfile_cmakg0it.log'> ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/subprocess.py:1129: ResourceWarning: subprocess 408796 is still running ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/subprocess.py:1129: ResourceWarning: subprocess 412718 is still running ResourceWarning: Enable tracemalloc to get the object allocation traceback >> test_queues_managing.py::TestQueuesManagingWithTenant::test_create_queue[tables_format_v1-fifo] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/common/py3test >> test_counters.py::TestSqsCountersFeatures::test_updates_status_code_counters_when_parsing_errors_occur[tables_format_v0] [FAIL] Test command err: sys:1: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/ciyv/002761/ydb/tests/functional/sqs/common/test-results/py3test/testing_out_stuff/chunk9/testing_out_stuff/test_counters.py.TestSqsCountersFeatures.test_disables_user_counters/cluster/node_1/stdout'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/ciyv/002761/ydb/tests/functional/sqs/common/test-results/py3test/testing_out_stuff/chunk9/testing_out_stuff/test_counters.py.TestSqsCountersFeatures.test_disables_user_counters/cluster/node_1/stderr'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedRandom name='/home/runner/.ya/build/build_root/ciyv/002761/ydb/tests/functional/sqs/common/test-results/py3test/testing_out_stuff/chunk9/testing_out_stuff/test_counters.py.TestSqsCountersFeatures.test_disables_user_counters/cluster/node_1/logfile_rwwah7pd.log'> ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/subprocess.py:1129: ResourceWarning: subprocess 412921 is still running ResourceWarning: Enable tracemalloc to get the object allocation traceback >> test_queues_managing.py::TestQueuesManagingWithPathTestQueuesManagingWithPath::test_create_queue_with_invalid_name[tables_format_v0] [FAIL] >> test_queues_managing.py::TestQueuesManagingWithPathTestQueuesManagingWithPath::test_create_queue_with_invalid_name[tables_format_v1] |77.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/public/sdk/cpp/tests/integration/sessions_pool/gtest >> YdbSdkSessionsPool::PeriodicTask/1 [GOOD] >> test_queues_managing.py::TestQueuesManagingWithPathTestQueuesManagingWithPath::test_create_queue_with_invalid_name[tables_format_v1] [FAIL] >> test_queues_managing.py::TestQueuesManagingWithPathTestQueuesManagingWithPath::test_delete_and_create_queue[fifo] |77.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/serverless/py3test >> test_queues_managing.py::TestQueuesManagingWithPathTestQueuesManagingWithPath::test_delete_and_create_queue[fifo] [FAIL] |77.6%| [TA] $(B)/ydb/public/sdk/cpp/tests/integration/sessions_pool/test-results/gtest/{meta.json ... results_accumulator.log} |77.7%| [TA] {RESULT} $(B)/ydb/public/sdk/cpp/tests/integration/sessions_pool/test-results/gtest/{meta.json ... results_accumulator.log} |77.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/datashard/async_replication/py3test >> test_async_replication.py::TestAsyncReplication::test_async_replication[table_index_2__SYNC-pk_types2-all_types2-index2---SYNC] [FAIL] >> test_garbage_collection.py::TestSqsGarbageCollection::test_cleanups_deduplication_table[tables_format_v0] [FAIL] >> test_garbage_collection.py::TestSqsGarbageCollection::test_cleanups_deduplication_table[tables_format_v1] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/common/py3test >> test_queues_managing.py::TestQueuesManagingWithPathTestQueuesManagingWithPath::test_delete_and_create_queue[fifo] [FAIL] Test command err: sys:1: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/ciyv/00275e/ydb/tests/functional/sqs/common/test-results/py3test/testing_out_stuff/chunk39/testing_out_stuff/test_queues_managing.py.TestQueuesManagingWithPathTestQueuesManagingWithPath.test_create_queue_with_invalid_name.tables_format_v0/cluster/node_1/stdout'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/ciyv/00275e/ydb/tests/functional/sqs/common/test-results/py3test/testing_out_stuff/chunk39/testing_out_stuff/test_queues_managing.py.TestQueuesManagingWithPathTestQueuesManagingWithPath.test_create_queue_with_invalid_name.tables_format_v0/cluster/node_1/stderr'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedRandom name='/home/runner/.ya/build/build_root/ciyv/00275e/ydb/tests/functional/sqs/common/test-results/py3test/testing_out_stuff/chunk39/testing_out_stuff/test_queues_managing.py.TestQueuesManagingWithPathTestQueuesManagingWithPath.test_create_queue_with_invalid_name.tables_format_v0/cluster/node_1/logfile_9uv5_ycr.log'> ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/subprocess.py:1129: ResourceWarning: subprocess 415058 is still running ResourceWarning: Enable tracemalloc to get the object allocation traceback >> test_async_replication.py::TestAsyncReplication::test_async_replication[table_index_0__ASYNC-pk_types6-all_types6-index6---ASYNC] [FAIL] >> test_garbage_collection.py::TestSqsGarbageCollection::test_cleanups_deduplication_table[tables_format_v1] [FAIL] >> test_garbage_collection.py::TestSqsGarbageCollection::test_cleanups_reads_table[tables_format_v0-200] >> test_queue_attributes_validation.py::TestQueueAttributesValidation::test_set_queue_attributes[tables_format_v0-fifo] [FAIL] >> test_queue_attributes_validation.py::TestQueueAttributesValidation::test_set_queue_attributes[tables_format_v0-std] >> test_queues_managing.py::TestQueuesManagingWithPathTestQueuesManagingWithPath::test_create_queue[tables_format_v1-std] >> test_counters.py::TestSqsCountersFeatures::test_updates_status_code_counters_when_parsing_errors_occur[tables_format_v1] >> test_garbage_collection.py::TestSqsGarbageCollection::test_cleanups_reads_table[tables_format_v0-200] [FAIL] >> test_queue_attributes_validation.py::TestQueueAttributesValidation::test_set_queue_attributes[tables_format_v0-std] [FAIL] >> test_serverless.py::test_discovery_exclusive_nodes[enable_alter_database_create_hive_first--false] >> test_queue_attributes_validation.py::TestQueueAttributesValidation::test_set_queue_attributes[tables_format_v1-fifo] >> test_multiplexing_tables_format.py::TestMultiplexingTablesFormatWithPath::test_create_queue[std] >> test_account_actions.py::TestAccountActionsWithPath::test_manage_account[with_queues-tables_format_v0] [FAIL] >> test_account_actions.py::TestAccountActionsWithPath::test_manage_account[with_queues-tables_format_v1] |77.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/datashard/async_replication/py3test >> test_queue_attributes_validation.py::TestQueueAttributesValidation::test_set_queue_attributes[tables_format_v1-fifo] [FAIL] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/common/py3test >> test_garbage_collection.py::TestSqsGarbageCollection::test_cleanups_reads_table[tables_format_v0-200] [FAIL] Test command err: ydb/tests/library/clients/kikimr_monitoring.py:75: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/ciyv/002765/ydb/tests/functional/sqs/common/test-results/py3test/testing_out_stuff/chunk11/testing_out_stuff/test_format_without_version.py.TestQueueWithoutVersionWithTenant.test_common.std/cluster/node_1/stdout'> (key, value) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/library/clients/kikimr_monitoring.py:75: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/ciyv/002765/ydb/tests/functional/sqs/common/test-results/py3test/testing_out_stuff/chunk11/testing_out_stuff/test_format_without_version.py.TestQueueWithoutVersionWithTenant.test_common.std/cluster/node_1/stderr'> (key, value) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/library/clients/kikimr_monitoring.py:75: ResourceWarning: unclosed file <_io.BufferedRandom name='/home/runner/.ya/build/build_root/ciyv/002765/ydb/tests/functional/sqs/common/test-results/py3test/testing_out_stuff/chunk11/testing_out_stuff/test_format_without_version.py.TestQueueWithoutVersionWithTenant.test_common.std/cluster/node_1/logfile_ssjbbmgm.log'> (key, value) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/library/clients/kikimr_monitoring.py:75: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/ciyv/002765/ydb/tests/functional/sqs/common/test-results/py3test/testing_out_stuff/chunk11/testing_out_stuff/test_format_without_version.py.TestQueueWithoutVersionWithTenant.test_common.std/cluster/slot_1/stdout'> (key, value) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/library/clients/kikimr_monitoring.py:75: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/ciyv/002765/ydb/tests/functional/sqs/common/test-results/py3test/testing_out_stuff/chunk11/testing_out_stuff/test_format_without_version.py.TestQueueWithoutVersionWithTenant.test_common.std/cluster/slot_1/stderr'> (key, value) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/library/clients/kikimr_monitoring.py:75: ResourceWarning: unclosed file <_io.BufferedRandom name='/home/runner/.ya/build/build_root/ciyv/002765/ydb/tests/functional/sqs/common/test-results/py3test/testing_out_stuff/chunk11/testing_out_stuff/test_format_without_version.py.TestQueueWithoutVersionWithTenant.test_common.std/cluster/slot_1/logfile_v5w30c9x.log'> (key, value) ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/subprocess.py:1129: ResourceWarning: subprocess 409389 is still running ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/subprocess.py:1129: ResourceWarning: subprocess 413764 is still running ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/ciyv/002765/ydb/tests/functional/sqs/common/test-results/py3test/testing_out_stuff/chunk11/testing_out_stuff/test_garbage_collection.py.TestSqsGarbageCollection.test_cleanups_deduplication_table.tables_format_v0/cluster/node_1/stdout'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/ciyv/002765/ydb/tests/functional/sqs/common/test-results/py3test/testing_out_stuff/chunk11/testing_out_stuff/test_garbage_collection.py.TestSqsGarbageCollection.test_cleanups_deduplication_table.tables_format_v0/cluster/node_1/stderr'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedRandom name='/home/runner/.ya/build/build_root/ciyv/002765/ydb/tests/functional/sqs/common/test-results/py3test/testing_out_stuff/chunk11/testing_out_stuff/test_garbage_collection.py.TestSqsGarbageCollection.test_cleanups_deduplication_table.tables_format_v0/cluster/node_1/logfile_faq71vum.log'> ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/subprocess.py:1129: ResourceWarning: subprocess 417283 is still running ResourceWarning: Enable tracemalloc to get the object allocation traceback >> test_account_actions.py::TestAccountActionsWithPath::test_manage_account[with_queues-tables_format_v1] [FAIL] >> test_account_actions.py::TestAccountActionsWithPath::test_manage_account[without_queues-tables_format_v0] >> test_acl.py::TestSqsWithForceAuthorizationWithPath::test_invalid_token[tables_format_v0-empty] >> test_account_actions.py::TestAccountActionsWithPath::test_manage_account[without_queues-tables_format_v0] [FAIL] >> test_account_actions.py::TestAccountActionsWithPath::test_manage_account[without_queues-tables_format_v1] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/common/py3test >> test_queue_attributes_validation.py::TestQueueAttributesValidation::test_set_queue_attributes[tables_format_v1-fifo] [FAIL] Test command err: sys:1: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/ciyv/00275b/ydb/tests/functional/sqs/common/test-results/py3test/testing_out_stuff/chunk27/testing_out_stuff/test_queue_attributes_validation.py.TestQueueAttributesValidation.test_set_queue_attributes.tables_format_v0-fifo/cluster/node_1/stdout'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/ciyv/00275b/ydb/tests/functional/sqs/common/test-results/py3test/testing_out_stuff/chunk27/testing_out_stuff/test_queue_attributes_validation.py.TestQueueAttributesValidation.test_set_queue_attributes.tables_format_v0-fifo/cluster/node_1/stderr'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedRandom name='/home/runner/.ya/build/build_root/ciyv/00275b/ydb/tests/functional/sqs/common/test-results/py3test/testing_out_stuff/chunk27/testing_out_stuff/test_queue_attributes_validation.py.TestQueueAttributesValidation.test_set_queue_attributes.tables_format_v0-fifo/cluster/node_1/logfile_vhnu5syf.log'> ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/subprocess.py:1129: ResourceWarning: subprocess 417303 is still running ResourceWarning: Enable tracemalloc to get the object allocation traceback |77.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/datashard/async_replication/py3test >> test_async_replication.py::TestAsyncReplication::test_async_replication[table_index_0__SYNC-pk_types4-all_types4-index4---SYNC] [FAIL] >> test_account_actions.py::TestAccountActionsWithPath::test_manage_account[without_queues-tables_format_v1] [FAIL] >> test_multiplexing_tables_format.py::TestMultiplexingTablesFormatWithTenant::test_create_queue[fifo] >> TCdcStreamWithRebootsTests::SplitTable[TabletReboots] [GOOD] >> test_queues_managing.py::TestQueuesManagingWithTenant::test_create_fifo_queue_wo_postfix[tables_format_v0] [FAIL] >> test_multiplexing_tables_format.py::TestMultiplexingTablesFormatWithTenant::test_read_message[std] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/common/py3test >> test_account_actions.py::TestAccountActionsWithPath::test_manage_account[without_queues-tables_format_v1] [FAIL] Test command err: sys:1: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/ciyv/002757/ydb/tests/functional/sqs/common/test-results/py3test/testing_out_stuff/chunk0/testing_out_stuff/test_account_actions.py.TestAccountActionsWithPath.test_manage_account.with_queues-tables_format_v0/cluster/node_1/stdout'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/ciyv/002757/ydb/tests/functional/sqs/common/test-results/py3test/testing_out_stuff/chunk0/testing_out_stuff/test_account_actions.py.TestAccountActionsWithPath.test_manage_account.with_queues-tables_format_v0/cluster/node_1/stderr'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedRandom name='/home/runner/.ya/build/build_root/ciyv/002757/ydb/tests/functional/sqs/common/test-results/py3test/testing_out_stuff/chunk0/testing_out_stuff/test_account_actions.py.TestAccountActionsWithPath.test_manage_account.with_queues-tables_format_v0/cluster/node_1/logfile_iusd5ty2.log'> ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/subprocess.py:1129: ResourceWarning: subprocess 418202 is still running ResourceWarning: Enable tracemalloc to get the object allocation traceback ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/common/py3test >> test_queues_managing.py::TestQueuesManagingWithTenant::test_create_fifo_queue_wo_postfix[tables_format_v0] [FAIL] Test command err: ydb/tests/library/clients/kikimr_monitoring.py:75: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/ciyv/00276c/ydb/tests/functional/sqs/common/test-results/py3test/testing_out_stuff/chunk47/testing_out_stuff/test_queues_managing.py.TestQueuesManagingWithPathTestQueuesManagingWithPath.test_ya_count_queues.tables_format_v0/cluster/node_1/stdout'> (key, value) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/library/clients/kikimr_monitoring.py:75: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/ciyv/00276c/ydb/tests/functional/sqs/common/test-results/py3test/testing_out_stuff/chunk47/testing_out_stuff/test_queues_managing.py.TestQueuesManagingWithPathTestQueuesManagingWithPath.test_ya_count_queues.tables_format_v0/cluster/node_1/stderr'> (key, value) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/library/clients/kikimr_monitoring.py:75: ResourceWarning: unclosed file <_io.BufferedRandom name='/home/runner/.ya/build/build_root/ciyv/00276c/ydb/tests/functional/sqs/common/test-results/py3test/testing_out_stuff/chunk47/testing_out_stuff/test_queues_managing.py.TestQueuesManagingWithPathTestQueuesManagingWithPath.test_ya_count_queues.tables_format_v0/cluster/node_1/logfile_7t_0zqw1.log'> (key, value) ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/subprocess.py:1129: ResourceWarning: subprocess 407005 is still running ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/ciyv/00276c/ydb/tests/functional/sqs/common/test-results/py3test/testing_out_stuff/chunk47/testing_out_stuff/test_queues_managing.py.TestQueuesManagingWithTenant.test_create_fifo_queue_wo_postfix.tables_format_v0/cluster/node_1/stdout'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/ciyv/00276c/ydb/tests/functional/sqs/common/test-results/py3test/testing_out_stuff/chunk47/testing_out_stuff/test_queues_managing.py.TestQueuesManagingWithTenant.test_create_fifo_queue_wo_postfix.tables_format_v0/cluster/node_1/stderr'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedRandom name='/home/runner/.ya/build/build_root/ciyv/00276c/ydb/tests/functional/sqs/common/test-results/py3test/testing_out_stuff/chunk47/testing_out_stuff/test_queues_managing.py.TestQueuesManagingWithTenant.test_create_fifo_queue_wo_postfix.tables_format_v0/cluster/node_1/logfile_cz05wkj0.log'> ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/subprocess.py:1129: ResourceWarning: subprocess 411385 is still running ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/ciyv/00276c/ydb/tests/functional/sqs/common/test-results/py3test/testing_out_stuff/chunk47/testing_out_stuff/test_queues_managing.py.TestQueuesManagingWithTenant.test_create_fifo_queue_wo_postfix.tables_format_v0/cluster/slot_1/stdout'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/ciyv/00276c/ydb/tests/functional/sqs/common/test-results/py3test/testing_out_stuff/chunk47/testing_out_stuff/test_queues_managing.py.TestQueuesManagingWithTenant.test_create_fifo_queue_wo_postfix.tables_format_v0/cluster/slot_1/stderr'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedRandom name='/home/runner/.ya/build/build_root/ciyv/00276c/ydb/tests/functional/sqs/common/test-results/py3test/testing_out_stuff/chunk47/testing_out_stuff/test_queues_managing.py.TestQueuesManagingWithTenant.test_create_fifo_queue_wo_postfix.tables_format_v0/cluster/slot_1/logfile_aoq_u7hy.log'> ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/subprocess.py:1129: ResourceWarning: subprocess 415855 is still running ResourceWarning: Enable tracemalloc to get the object allocation traceback ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_cdc_stream_reboots/unittest >> TCdcStreamWithRebootsTests::SplitTable[TabletReboots] [GOOD] Test command err: =========== RUN: Trace =========== Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2140] Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:116:2058] recipient: [1:110:2141] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:116:2058] recipient: [1:110:2141] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:117:2058] recipient: [1:111:2142] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:117:2058] recipient: [1:111:2142] Leader for TabletID 72057594046678944 is [1:126:2151] sender: [1:127:2058] recipient: [1:109:2140] Leader for TabletID 72057594046447617 is [1:130:2154] sender: [1:132:2058] recipient: [1:110:2141] Leader for TabletID 72057594046316545 is [1:133:2155] sender: [1:135:2058] recipient: [1:111:2142] 2025-05-29T15:32:15.494060Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7488: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-05-29T15:32:15.494079Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7516: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:32:15.494083Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7402: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2025-05-29T15:32:15.494087Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7418: OperationsProcessing config: using default configuration 2025-05-29T15:32:15.494096Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-05-29T15:32:15.494099Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-05-29T15:32:15.494106Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7548: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:32:15.494114Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:39: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-05-29T15:32:15.494185Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7619: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-05-29T15:32:15.494238Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-05-29T15:32:15.504110Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7651: Got new config: QueryServiceConfig { AllExternalDataSourcesAreAvailable: true } 2025-05-29T15:32:15.504129Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:32:15.504226Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7619: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# Leader for TabletID 72057594046447617 is [1:130:2154] sender: [1:175:2058] recipient: [1:15:2062] 2025-05-29T15:32:15.506589Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-05-29T15:32:15.506613Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-05-29T15:32:15.506632Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-05-29T15:32:15.509136Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-05-29T15:32:15.509207Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:445: Clear TempDirsState with owners number: 0 2025-05-29T15:32:15.509311Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1348: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-05-29T15:32:15.509473Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:32: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-05-29T15:32:15.510091Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:157: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:32:15.510123Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:84: [RootDataErasureManager] Stop 2025-05-29T15:32:15.510335Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:32:15.510348Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:32:15.510375Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-05-29T15:32:15.510383Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-29T15:32:15.510390Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:202: TTxServerlessStorageBilling.Complete 2025-05-29T15:32:15.510419Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6671: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:214:2058] recipient: [1:212:2212] IGNORE Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:214:2058] recipient: [1:212:2212] Leader for TabletID 72057594037968897 is [1:218:2216] sender: [1:219:2058] recipient: [1:212:2212] 2025-05-29T15:32:15.511576Z node 1 :HIVE INFO: tablet_helpers.cpp:1130: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:126:2151] sender: [1:239:2058] recipient: [1:15:2062] 2025-05-29T15:32:15.526304Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:372: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-05-29T15:32:15.526367Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:32:15.526420Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-05-29T15:32:15.526452Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:130: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-05-29T15:32:15.526459Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:32:15.526932Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:451: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-05-29T15:32:15.526950Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-05-29T15:32:15.526995Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:32:15.527003Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:313: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-05-29T15:32:15.527008Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:367: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-05-29T15:32:15.527012Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 2 -> 3 2025-05-29T15:32:15.527329Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:32:15.527337Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-05-29T15:32:15.527340Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 3 -> 128 2025-05-29T15:32:15.527576Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:32:15.527582Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:32:15.527586Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:32:15.527590Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1650: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-05-29T15:32:15.528050Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1719: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-05-29T15:32:15.528360Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:652: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-05-29T15:32:15.528380Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1751: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 Leader for TabletID 72057594046316545 is [1:133:2155] sender: [1:254:2058] recipient: [1:15:2062] FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-05-29T15:32:15.528492Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:676: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-05-29T15:32:15.528508Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:680: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 133 RawX2: 4294969451 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-05-29T15:32:15.528513Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:32:15.528544Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Ch ... pactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { MinPartitionsCount: 1 } } SplitBoundary { KeyPrefix { Tuple { Optional { Uint32: 2 } } } } TableSchemaVersion: 2 IsBackup: false CdcStreams { Name: "Stream" Mode: ECdcStreamModeKeysOnly PathId { OwnerId: 72057594046678944 LocalId: 4 } State: ECdcStreamStateReady SchemaVersion: 1 Format: ECdcStreamFormatProto VirtualTimestamps: false AwsRegion: "" ResolvedTimestampsIntervalMs: 0 } IsRestore: false } TablePartitions { EndOfRangeKeyPrefix: "\001\000\004\000\000\000\002\000\000\000" IsPoint: false IsInclusive: false DatashardId: 72075186233409549 } TablePartitions { EndOfRangeKeyPrefix: "" IsPoint: false IsInclusive: false DatashardId: 72075186233409550 } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 2 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 4 PathsLimit: 10000 ShardsInside: 4 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 1 PQPartitionsLimit: 1000000 } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-05-29T15:33:39.487122Z node 106 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Table" Options { ReturnPartitioningInfo: true ReturnPartitionConfig: true BackupInfo: false ReturnBoundaries: true ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-05-29T15:33:39.487247Z node 106 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/Table" took 134us result status StatusSuccess 2025-05-29T15:33:39.487489Z node 106 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Table" PathDescription { Self { Name: "Table" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 1002 CreateStep: 5000003 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 2 TablePartitionVersion: 2 } ChildrenExist: true } Table { Name: "Table" Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Uint32" TypeId: 2 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { MinPartitionsCount: 1 } } SplitBoundary { KeyPrefix { Tuple { Optional { Uint32: 2 } } } } TableSchemaVersion: 2 IsBackup: false CdcStreams { Name: "Stream" Mode: ECdcStreamModeKeysOnly PathId { OwnerId: 72057594046678944 LocalId: 4 } State: ECdcStreamStateReady SchemaVersion: 1 Format: ECdcStreamFormatProto VirtualTimestamps: false AwsRegion: "" ResolvedTimestampsIntervalMs: 0 } IsRestore: false } TablePartitions { EndOfRangeKeyPrefix: "\001\000\004\000\000\000\002\000\000\000" IsPoint: false IsInclusive: false DatashardId: 72075186233409549 } TablePartitions { EndOfRangeKeyPrefix: "" IsPoint: false IsInclusive: false DatashardId: 72075186233409550 } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 2 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 4 PathsLimit: 10000 ShardsInside: 4 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 1 PQPartitionsLimit: 1000000 } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-05-29T15:33:39.487629Z node 106 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Table/Stream/streamImpl" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: true }, at schemeshard: 72057594046678944 2025-05-29T15:33:39.487669Z node 106 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/Table/Stream/streamImpl" took 50us result status StatusSuccess 2025-05-29T15:33:39.487806Z node 106 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Table/Stream/streamImpl" PathDescription { Self { Name: "streamImpl" PathId: 5 SchemeshardId: 72057594046678944 PathType: EPathTypePersQueueGroup CreateFinished: true CreateTxId: 1003 CreateStep: 5000004 ParentPathId: 4 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeStreamImpl Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 PQVersion: 1 } ChildrenExist: false BalancerTabletID: 72075186233409548 } PersQueueGroup { Name: "streamImpl" PathId: 5 TotalGroupCount: 1 PartitionPerTablet: 2 PQTabletConfig { PartitionConfig { MaxCountInPartition: 2147483647 LifetimeSeconds: 86400 WriteSpeedInBytesPerSecond: 1048576 BurstSize: 1048576 } TopicName: "Stream" TopicPath: "/MyRoot/Table/Stream/streamImpl" YdbDatabasePath: "/MyRoot" PartitionKeySchema { Name: "key" TypeId: 2 } MeteringMode: METERING_MODE_REQUEST_UNITS } Partitions { PartitionId: 0 TabletId: 72075186233409547 Status: Active } AlterVersion: 1 BalancerTabletID: 72075186233409548 NextPartitionId: 1 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 4 PathsLimit: 10000 ShardsInside: 4 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 1 PQPartitionsLimit: 1000000 } } PathId: 5 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> test_serverless.py::test_turn_on_serverless_storage_billing[enable_alter_database_create_hive_first--false] [FAIL] >> test_async_replication.py::TestAsyncReplication::test_async_replication[table_index_1__SYNC-pk_types3-all_types3-index3---SYNC] [FAIL] |77.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/datashard/async_replication/py3test >> test_serverless.py::test_turn_on_serverless_storage_billing[enable_alter_database_create_hive_first--true] >> test_serverless.py::test_seamless_migration_to_exclusive_nodes[enable_alter_database_create_hive_first--false] >> test_counters.py::TestSqsCountersFeatures::test_updates_status_code_counters_when_parsing_errors_occur[tables_format_v1] [FAIL] >> test_format_without_version.py::TestQueueWithoutVersionWithPath::test_common[fifo] >> test_queues_managing.py::TestQueuesManagingWithTenant::test_purge_queue_batch[tables_format_v0] >> test_queues_managing.py::TestQueuesManagingWithPathTestQueuesManagingWithPath::test_create_queue[tables_format_v1-std] [FAIL] >> test_queues_managing.py::TestQueuesManagingWithPathTestQueuesManagingWithPath::test_create_queue_generates_event[tables_format_v0] >> test_queues_managing.py::TestQueuesManagingWithTenant::test_create_queue[tables_format_v1-fifo] [FAIL] >> test_queues_managing.py::TestQueuesManagingWithTenant::test_create_queue[tables_format_v1-std] >> test_multiplexing_tables_format.py::TestMultiplexingTablesFormatWithPath::test_create_queue[std] [FAIL] >> test_multiplexing_tables_format.py::TestMultiplexingTablesFormatWithPath::test_create_queue_with_empty_tables_format >> test_queues_managing.py::TestQueuesManagingWithTenant::test_create_queue_generates_event[tables_format_v1] >> test_queues_managing.py::TestQueuesManagingWithTenant::test_create_queue[tables_format_v1-std] [FAIL] >> test_queues_managing.py::TestQueuesManagingWithTenant::test_create_queue_generates_event[tables_format_v0] >> test_queues_managing.py::TestQueuesManagingWithPathTestQueuesManagingWithPath::test_create_queue_generates_event[tables_format_v0] [FAIL] >> test_queues_managing.py::TestQueuesManagingWithPathTestQueuesManagingWithPath::test_create_queue_generates_event[tables_format_v1] >> test_multiplexing_tables_format.py::TestMultiplexingTablesFormatWithPath::test_create_queue_with_empty_tables_format [FAIL] >> test_multiplexing_tables_format.py::TestMultiplexingTablesFormatWithPath::test_create_queue_with_incorrect_tables_format >> test_queues_managing.py::TestQueuesManagingWithTenant::test_create_queue_generates_event[tables_format_v0] [FAIL] >> test_queues_managing.py::TestQueuesManagingWithPathTestQueuesManagingWithPath::test_create_queue_generates_event[tables_format_v1] [FAIL] >> test_acl.py::TestSqsWithForceAuthorizationWithPath::test_invalid_token[tables_format_v0-empty] [FAIL] >> test_acl.py::TestSqsWithForceAuthorizationWithPath::test_invalid_token[tables_format_v0-invalid] >> test_serverless.py::test_fixtures[enable_alter_database_create_hive_first--true] [GOOD] >> test_multiplexing_tables_format.py::TestMultiplexingTablesFormatWithPath::test_create_queue_with_incorrect_tables_format [FAIL] >> test_multiplexing_tables_format.py::TestMultiplexingTablesFormatWithPath::test_create_queue_with_unsupported_tables_format >> test_multiplexing_tables_format.py::TestMultiplexingTablesFormatWithTenant::test_create_queue_with_unsupported_tables_format >> test_queues_managing.py::TestQueuesManagingWithPathTestQueuesManagingWithPath::test_create_queue[tables_format_v0-fifo] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/common/py3test >> test_queues_managing.py::TestQueuesManagingWithTenant::test_create_queue_generates_event[tables_format_v0] [FAIL] Test command err: sys:1: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/ciyv/002755/ydb/tests/functional/sqs/common/test-results/py3test/testing_out_stuff/chunk49/testing_out_stuff/test_queues_managing.py.TestQueuesManagingWithTenant.test_create_queue.tables_format_v1-fifo/cluster/node_1/stdout'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/ciyv/002755/ydb/tests/functional/sqs/common/test-results/py3test/testing_out_stuff/chunk49/testing_out_stuff/test_queues_managing.py.TestQueuesManagingWithTenant.test_create_queue.tables_format_v1-fifo/cluster/node_1/stderr'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedRandom name='/home/runner/.ya/build/build_root/ciyv/002755/ydb/tests/functional/sqs/common/test-results/py3test/testing_out_stuff/chunk49/testing_out_stuff/test_queues_managing.py.TestQueuesManagingWithTenant.test_create_queue.tables_format_v1-fifo/cluster/node_1/logfile_zdqby6cg.log'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/ciyv/002755/ydb/tests/functional/sqs/common/test-results/py3test/testing_out_stuff/chunk49/testing_out_stuff/test_queues_managing.py.TestQueuesManagingWithTenant.test_create_queue.tables_format_v1-fifo/cluster/slot_1/stdout'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/ciyv/002755/ydb/tests/functional/sqs/common/test-results/py3test/testing_out_stuff/chunk49/testing_out_stuff/test_queues_managing.py.TestQueuesManagingWithTenant.test_create_queue.tables_format_v1-fifo/cluster/slot_1/stderr'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedRandom name='/home/runner/.ya/build/build_root/ciyv/002755/ydb/tests/functional/sqs/common/test-results/py3test/testing_out_stuff/chunk49/testing_out_stuff/test_queues_managing.py.TestQueuesManagingWithTenant.test_create_queue.tables_format_v1-fifo/cluster/slot_1/logfile_92qof5l_.log'> ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/subprocess.py:1129: ResourceWarning: subprocess 419165 is still running ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/subprocess.py:1129: ResourceWarning: subprocess 422120 is still running ResourceWarning: Enable tracemalloc to get the object allocation traceback >> test_acl.py::TestSqsWithForceAuthorizationWithPath::test_invalid_token[tables_format_v0-invalid] [FAIL] >> test_acl.py::TestSqsWithForceAuthorizationWithPath::test_invalid_token[tables_format_v0-no] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/common/py3test >> test_queues_managing.py::TestQueuesManagingWithPathTestQueuesManagingWithPath::test_create_queue_generates_event[tables_format_v1] [FAIL] Test command err: sys:1: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/ciyv/002750/ydb/tests/functional/sqs/common/test-results/py3test/testing_out_stuff/chunk38/testing_out_stuff/test_queues_managing.py.TestQueuesManagingWithPathTestQueuesManagingWithPath.test_create_queue.tables_format_v1-std/cluster/node_1/stdout'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/ciyv/002750/ydb/tests/functional/sqs/common/test-results/py3test/testing_out_stuff/chunk38/testing_out_stuff/test_queues_managing.py.TestQueuesManagingWithPathTestQueuesManagingWithPath.test_create_queue.tables_format_v1-std/cluster/node_1/stderr'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedRandom name='/home/runner/.ya/build/build_root/ciyv/002750/ydb/tests/functional/sqs/common/test-results/py3test/testing_out_stuff/chunk38/testing_out_stuff/test_queues_managing.py.TestQueuesManagingWithPathTestQueuesManagingWithPath.test_create_queue.tables_format_v1-std/cluster/node_1/logfile_ozofzu6b.log'> ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/subprocess.py:1129: ResourceWarning: subprocess 421335 is still running ResourceWarning: Enable tracemalloc to get the object allocation traceback >> test_multiplexing_tables_format.py::TestMultiplexingTablesFormatWithPath::test_create_queue_with_unsupported_tables_format [FAIL] |77.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/datashard/async_replication/py3test >> test_acl.py::TestSqsWithForceAuthorizationWithPath::test_invalid_token[tables_format_v0-no] [FAIL] >> test_acl.py::TestSqsWithForceAuthorizationWithPath::test_invalid_token[tables_format_v1-empty] >> test_multiplexing_tables_format.py::TestMultiplexingTablesFormatWithPath::test_double_create[fifo] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/common/py3test >> test_multiplexing_tables_format.py::TestMultiplexingTablesFormatWithPath::test_create_queue_with_unsupported_tables_format [FAIL] Test command err: sys:1: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/ciyv/00274e/ydb/tests/functional/sqs/common/test-results/py3test/testing_out_stuff/chunk15/testing_out_stuff/test_multiplexing_tables_format.py.TestMultiplexingTablesFormatWithPath.test_create_queue.std/cluster/node_1/stdout'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/ciyv/00274e/ydb/tests/functional/sqs/common/test-results/py3test/testing_out_stuff/chunk15/testing_out_stuff/test_multiplexing_tables_format.py.TestMultiplexingTablesFormatWithPath.test_create_queue.std/cluster/node_1/stderr'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedRandom name='/home/runner/.ya/build/build_root/ciyv/00274e/ydb/tests/functional/sqs/common/test-results/py3test/testing_out_stuff/chunk15/testing_out_stuff/test_multiplexing_tables_format.py.TestMultiplexingTablesFormatWithPath.test_create_queue.std/cluster/node_1/logfile_eh9k68b2.log'> ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/subprocess.py:1129: ResourceWarning: subprocess 421631 is still running ResourceWarning: Enable tracemalloc to get the object allocation traceback >> test_acl.py::TestSqsWithForceAuthorizationWithPath::test_invalid_token[tables_format_v1-empty] [FAIL] >> test_format_without_version.py::TestQueueWithoutVersionWithPath::test_common[fifo] [FAIL] >> test_format_without_version.py::TestQueueWithoutVersionWithPath::test_common[std] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/common/py3test >> test_acl.py::TestSqsWithForceAuthorizationWithPath::test_invalid_token[tables_format_v1-empty] [FAIL] Test command err: sys:1: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/ciyv/00274c/ydb/tests/functional/sqs/common/test-results/py3test/testing_out_stuff/chunk4/testing_out_stuff/test_acl.py.TestSqsWithForceAuthorizationWithPath.test_invalid_token.tables_format_v0-empty/cluster/node_1/stdout'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/ciyv/00274c/ydb/tests/functional/sqs/common/test-results/py3test/testing_out_stuff/chunk4/testing_out_stuff/test_acl.py.TestSqsWithForceAuthorizationWithPath.test_invalid_token.tables_format_v0-empty/cluster/node_1/stderr'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedRandom name='/home/runner/.ya/build/build_root/ciyv/00274c/ydb/tests/functional/sqs/common/test-results/py3test/testing_out_stuff/chunk4/testing_out_stuff/test_acl.py.TestSqsWithForceAuthorizationWithPath.test_invalid_token.tables_format_v0-empty/cluster/node_1/logfile_o2q7h9wu.log'> ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/subprocess.py:1129: ResourceWarning: subprocess 422481 is still running ResourceWarning: Enable tracemalloc to get the object allocation traceback >> test_format_without_version.py::TestQueueWithoutVersionWithPath::test_common[std] [FAIL] >> test_format_without_version.py::TestQueueWithoutVersionWithTenant::test_common[fifo] >> test_queue_tags.py::TestQueueTags::test_invalid_tag_queue[tables_format_v1-std] >> test_serverless.py::test_create_table_with_quotas[enable_alter_database_create_hive_first--false] >> test_acl.py::TestSqsACLWithTenant::test_apply_permissions[tables_format_v0] >> test_multiplexing_tables_format.py::TestMultiplexingTablesFormatWithTenant::test_read_message[std] [FAIL] >> test_multiplexing_tables_format.py::TestMultiplexingTablesFormatWithTenant::test_send_message[fifo] >> test_queue_attributes_validation.py::TestQueueAttributesInCompatibilityMode::test_set_queue_attributes_no_validation[tables_format_v0-fifo] >> test_queues_managing.py::TestQueuesManagingWithPathTestQueuesManagingWithPath::test_create_queue[tables_format_v0-fifo] [FAIL] >> test_queues_managing.py::TestQueuesManagingWithPathTestQueuesManagingWithPath::test_create_queue[tables_format_v0-std] >> test_multiplexing_tables_format.py::TestMultiplexingTablesFormatWithTenant::test_send_message[fifo] [FAIL] >> test_multiplexing_tables_format.py::TestMultiplexingTablesFormatWithTenant::test_send_message[std] >> test_serverless.py::test_discovery_exclusive_nodes[enable_alter_database_create_hive_first--false] [GOOD] >> test_serverless.py::test_discovery_exclusive_nodes[enable_alter_database_create_hive_first--true] >> test_queues_managing.py::TestQueuesManagingWithPathTestQueuesManagingWithPath::test_create_queue[tables_format_v0-std] [FAIL] >> test_queues_managing.py::TestQueuesManagingWithPathTestQueuesManagingWithPath::test_create_queue[tables_format_v1-fifo] >> test_async_replication.py::TestAsyncReplication::test_async_replication[table_ttl_Date-pk_types13-all_types13-index13-Date--] >> test_multiplexing_tables_format.py::TestMultiplexingTablesFormatWithTenant::test_send_message[std] [FAIL] >> test_queues_managing.py::TestQueuesManagingWithPathTestQueuesManagingWithPath::test_create_queue[tables_format_v1-fifo] [FAIL] >> test_multiplexing_tables_format.py::TestMultiplexingTablesFormatWithPath::test_double_create[fifo] [FAIL] >> test_multiplexing_tables_format.py::TestMultiplexingTablesFormatWithPath::test_double_create[std] >> test_queues_managing.py::TestQueuesManagingWithTenant::test_purge_queue_batch[tables_format_v0] [FAIL] >> test_queues_managing.py::TestQueuesManagingWithTenant::test_purge_queue_batch[tables_format_v1] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/common/py3test >> test_multiplexing_tables_format.py::TestMultiplexingTablesFormatWithTenant::test_send_message[std] [FAIL] Test command err: sys:1: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/ciyv/002747/ydb/tests/functional/sqs/common/test-results/py3test/testing_out_stuff/chunk21/testing_out_stuff/test_multiplexing_tables_format.py.TestMultiplexingTablesFormatWithTenant.test_read_message.std/cluster/node_1/stdout'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/ciyv/002747/ydb/tests/functional/sqs/common/test-results/py3test/testing_out_stuff/chunk21/testing_out_stuff/test_multiplexing_tables_format.py.TestMultiplexingTablesFormatWithTenant.test_read_message.std/cluster/node_1/stderr'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedRandom name='/home/runner/.ya/build/build_root/ciyv/002747/ydb/tests/functional/sqs/common/test-results/py3test/testing_out_stuff/chunk21/testing_out_stuff/test_multiplexing_tables_format.py.TestMultiplexingTablesFormatWithTenant.test_read_message.std/cluster/node_1/logfile_bh00dtw0.log'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/ciyv/002747/ydb/tests/functional/sqs/common/test-results/py3test/testing_out_stuff/chunk21/testing_out_stuff/test_multiplexing_tables_format.py.TestMultiplexingTablesFormatWithTenant.test_read_message.std/cluster/slot_1/stdout'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/ciyv/002747/ydb/tests/functional/sqs/common/test-results/py3test/testing_out_stuff/chunk21/testing_out_stuff/test_multiplexing_tables_format.py.TestMultiplexingTablesFormatWithTenant.test_read_message.std/cluster/slot_1/stderr'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedRandom name='/home/runner/.ya/build/build_root/ciyv/002747/ydb/tests/functional/sqs/common/test-results/py3test/testing_out_stuff/chunk21/testing_out_stuff/test_multiplexing_tables_format.py.TestMultiplexingTablesFormatWithTenant.test_read_message.std/cluster/slot_1/logfile_3d7r4g2v.log'> ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/subprocess.py:1129: ResourceWarning: subprocess 423554 is still running ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/subprocess.py:1129: ResourceWarning: subprocess 427252 is still running ResourceWarning: Enable tracemalloc to get the object allocation traceback ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/common/py3test >> test_queues_managing.py::TestQueuesManagingWithPathTestQueuesManagingWithPath::test_create_queue[tables_format_v1-fifo] [FAIL] Test command err: sys:1: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/ciyv/00273e/ydb/tests/functional/sqs/common/test-results/py3test/testing_out_stuff/chunk37/testing_out_stuff/test_queues_managing.py.TestQueuesManagingWithPathTestQueuesManagingWithPath.test_create_queue.tables_format_v0-fifo/cluster/node_1/stdout'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/ciyv/00273e/ydb/tests/functional/sqs/common/test-results/py3test/testing_out_stuff/chunk37/testing_out_stuff/test_queues_managing.py.TestQueuesManagingWithPathTestQueuesManagingWithPath.test_create_queue.tables_format_v0-fifo/cluster/node_1/stderr'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedRandom name='/home/runner/.ya/build/build_root/ciyv/00273e/ydb/tests/functional/sqs/common/test-results/py3test/testing_out_stuff/chunk37/testing_out_stuff/test_queues_managing.py.TestQueuesManagingWithPathTestQueuesManagingWithPath.test_create_queue.tables_format_v0-fifo/cluster/node_1/logfile_sj03io42.log'> ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/subprocess.py:1129: ResourceWarning: subprocess 427127 is still running ResourceWarning: Enable tracemalloc to get the object allocation traceback >> test_multiplexing_tables_format.py::TestMultiplexingTablesFormatWithPath::test_double_create[std] [FAIL] >> test_multiplexing_tables_format.py::TestMultiplexingTablesFormatWithPath::test_double_create_old[fifo] >> test_queues_managing.py::TestQueuesManagingWithTenant::test_purge_queue_batch[tables_format_v1] [FAIL] >> test_queues_managing.py::TestQueuesManagingWithTenant::test_queues_count_over_limit[tables_format_v0] >> test_multiplexing_tables_format.py::TestMultiplexingTablesFormatWithPath::test_double_create_old[fifo] [FAIL] >> test_multiplexing_tables_format.py::TestMultiplexingTablesFormatWithPath::test_double_create_old[std] >> test_queues_managing.py::TestQueuesManagingWithTenant::test_queues_count_over_limit[tables_format_v0] [FAIL] >> test_multiplexing_tables_format.py::TestMultiplexingTablesFormatWithTenant::test_create_queue_with_unsupported_tables_format [FAIL] >> test_multiplexing_tables_format.py::TestMultiplexingTablesFormatWithTenant::test_double_create[fifo] >> test_multiplexing_tables_format.py::TestMultiplexingTablesFormatWithPath::test_double_create_old[std] [FAIL] >> test_serverless.py::test_seamless_migration_to_exclusive_nodes[enable_alter_database_create_hive_first--false] [FAIL] >> test_serverless.py::test_seamless_migration_to_exclusive_nodes[enable_alter_database_create_hive_first--true] >> test_multiplexing_tables_format.py::TestMultiplexingTablesFormatWithTenant::test_double_create[fifo] [FAIL] >> test_multiplexing_tables_format.py::TestMultiplexingTablesFormatWithTenant::test_double_create[std] >> test_queue_tags.py::TestQueueTags::test_invalid_tag_queue[tables_format_v1-std] [FAIL] >> test_queue_tags.py::TestQueueTags::test_list_queue_tags[tables_format_v0-fifo] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/common/py3test >> test_queues_managing.py::TestQueuesManagingWithTenant::test_queues_count_over_limit[tables_format_v0] [FAIL] Test command err: sys:1: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/ciyv/002745/ydb/tests/functional/sqs/common/test-results/py3test/testing_out_stuff/chunk55/testing_out_stuff/test_queues_managing.py.TestQueuesManagingWithTenant.test_purge_queue_batch.tables_format_v0/cluster/node_1/stdout'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/ciyv/002745/ydb/tests/functional/sqs/common/test-results/py3test/testing_out_stuff/chunk55/testing_out_stuff/test_queues_managing.py.TestQueuesManagingWithTenant.test_purge_queue_batch.tables_format_v0/cluster/node_1/stderr'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedRandom name='/home/runner/.ya/build/build_root/ciyv/002745/ydb/tests/functional/sqs/common/test-results/py3test/testing_out_stuff/chunk55/testing_out_stuff/test_queues_managing.py.TestQueuesManagingWithTenant.test_purge_queue_batch.tables_format_v0/cluster/node_1/logfile_2289sei9.log'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/ciyv/002745/ydb/tests/functional/sqs/common/test-results/py3test/testing_out_stuff/chunk55/testing_out_stuff/test_queues_managing.py.TestQueuesManagingWithTenant.test_purge_queue_batch.tables_format_v0/cluster/slot_1/stdout'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/ciyv/002745/ydb/tests/functional/sqs/common/test-results/py3test/testing_out_stuff/chunk55/testing_out_stuff/test_queues_managing.py.TestQueuesManagingWithTenant.test_purge_queue_batch.tables_format_v0/cluster/slot_1/stderr'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedRandom name='/home/runner/.ya/build/build_root/ciyv/002745/ydb/tests/functional/sqs/common/test-results/py3test/testing_out_stuff/chunk55/testing_out_stuff/test_queues_managing.py.TestQueuesManagingWithTenant.test_purge_queue_batch.tables_format_v0/cluster/slot_1/logfile_w6q1uzq5.log'> ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/subprocess.py:1129: ResourceWarning: subprocess 424889 is still running ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/subprocess.py:1129: ResourceWarning: subprocess 428987 is still running ResourceWarning: Enable tracemalloc to get the object allocation traceback >> test_multiplexing_tables_format.py::TestMultiplexingTablesFormatWithTenant::test_create_queue[fifo] [FAIL] >> test_multiplexing_tables_format.py::TestMultiplexingTablesFormatWithTenant::test_create_queue[std] >> test_multiplexing_tables_format.py::TestMultiplexingTablesFormatWithTenant::test_double_create[std] [FAIL] >> test_queue_tags.py::TestQueueTags::test_list_queue_tags[tables_format_v0-fifo] [FAIL] >> test_queue_tags.py::TestQueueTags::test_list_queue_tags[tables_format_v0-std] >> ColumnStatistics::CountMinSketchStatistics [FAIL] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/common/py3test >> test_multiplexing_tables_format.py::TestMultiplexingTablesFormatWithPath::test_double_create_old[std] [FAIL] Test command err: sys:1: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/ciyv/00273c/ydb/tests/functional/sqs/common/test-results/py3test/testing_out_stuff/chunk16/testing_out_stuff/test_multiplexing_tables_format.py.TestMultiplexingTablesFormatWithPath.test_double_create.fifo/cluster/node_1/stdout'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/ciyv/00273c/ydb/tests/functional/sqs/common/test-results/py3test/testing_out_stuff/chunk16/testing_out_stuff/test_multiplexing_tables_format.py.TestMultiplexingTablesFormatWithPath.test_double_create.fifo/cluster/node_1/stderr'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedRandom name='/home/runner/.ya/build/build_root/ciyv/00273c/ydb/tests/functional/sqs/common/test-results/py3test/testing_out_stuff/chunk16/testing_out_stuff/test_multiplexing_tables_format.py.TestMultiplexingTablesFormatWithPath.test_double_create.fifo/cluster/node_1/logfile_xbnyg23n.log'> ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/subprocess.py:1129: ResourceWarning: subprocess 428198 is still running ResourceWarning: Enable tracemalloc to get the object allocation traceback >> test_multiplexing_tables_format.py::TestMultiplexingTablesFormatWithTenant::test_create_queue[std] [FAIL] >> test_multiplexing_tables_format.py::TestMultiplexingTablesFormatWithTenant::test_create_queue_with_empty_tables_format >> test_queue_tags.py::TestQueueTags::test_list_queue_tags[tables_format_v0-std] [FAIL] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/common/py3test >> test_multiplexing_tables_format.py::TestMultiplexingTablesFormatWithTenant::test_double_create[std] [FAIL] Test command err: sys:1: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/ciyv/002740/ydb/tests/functional/sqs/common/test-results/py3test/testing_out_stuff/chunk19/testing_out_stuff/test_multiplexing_tables_format.py.TestMultiplexingTablesFormatWithTenant.test_create_queue_with_unsupported_tables_format/cluster/node_1/stdout'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/ciyv/002740/ydb/tests/functional/sqs/common/test-results/py3test/testing_out_stuff/chunk19/testing_out_stuff/test_multiplexing_tables_format.py.TestMultiplexingTablesFormatWithTenant.test_create_queue_with_unsupported_tables_format/cluster/node_1/stderr'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedRandom name='/home/runner/.ya/build/build_root/ciyv/002740/ydb/tests/functional/sqs/common/test-results/py3test/testing_out_stuff/chunk19/testing_out_stuff/test_multiplexing_tables_format.py.TestMultiplexingTablesFormatWithTenant.test_create_queue_with_unsupported_tables_format/cluster/node_1/logfile_yab28ktj.log'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/ciyv/002740/ydb/tests/functional/sqs/common/test-results/py3test/testing_out_stuff/chunk19/testing_out_stuff/test_multiplexing_tables_format.py.TestMultiplexingTablesFormatWithTenant.test_create_queue_with_unsupported_tables_format/cluster/slot_1/stdout'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/ciyv/002740/ydb/tests/functional/sqs/common/test-results/py3test/testing_out_stuff/chunk19/testing_out_stuff/test_multiplexing_tables_format.py.TestMultiplexingTablesFormatWithTenant.test_create_queue_with_unsupported_tables_format/cluster/slot_1/stderr'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedRandom name='/home/runner/.ya/build/build_root/ciyv/002740/ydb/tests/functional/sqs/common/test-results/py3test/testing_out_stuff/chunk19/testing_out_stuff/test_multiplexing_tables_format.py.TestMultiplexingTablesFormatWithTenant.test_create_queue_with_unsupported_tables_format/cluster/slot_1/logfile_5gm279m2.log'> ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/subprocess.py:1129: ResourceWarning: subprocess 426920 is still running ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/subprocess.py:1129: ResourceWarning: subprocess 431783 is still running ResourceWarning: Enable tracemalloc to get the object allocation traceback >> test_queue_attributes_validation.py::TestQueueAttributesInCompatibilityMode::test_set_queue_attributes_no_validation[tables_format_v0-fifo] [FAIL] >> test_queue_attributes_validation.py::TestQueueAttributesInCompatibilityMode::test_set_queue_attributes_no_validation[tables_format_v0-std] >> test_multiplexing_tables_format.py::TestMultiplexingTablesFormatWithTenant::test_create_queue_with_empty_tables_format [FAIL] >> test_multiplexing_tables_format.py::TestMultiplexingTablesFormatWithTenant::test_create_queue_with_incorrect_tables_format >> test_queue_counters.py::TestSqsGettingCounters::test_counters_when_sending_duplicates >> test_queue_attributes_validation.py::TestQueueAttributesInCompatibilityMode::test_set_queue_attributes_no_validation[tables_format_v0-std] [FAIL] >> test_queue_attributes_validation.py::TestQueueAttributesInCompatibilityMode::test_set_queue_attributes_no_validation[tables_format_v1-fifo] |77.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/serverless/py3test ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/common/py3test >> test_queue_tags.py::TestQueueTags::test_list_queue_tags[tables_format_v0-std] [FAIL] Test command err: sys:1: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/ciyv/00273a/ydb/tests/functional/sqs/common/test-results/py3test/testing_out_stuff/chunk32/testing_out_stuff/test_queue_tags.py.TestQueueTags.test_invalid_tag_queue.tables_format_v1-std/cluster/node_1/stdout'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/ciyv/00273a/ydb/tests/functional/sqs/common/test-results/py3test/testing_out_stuff/chunk32/testing_out_stuff/test_queue_tags.py.TestQueueTags.test_invalid_tag_queue.tables_format_v1-std/cluster/node_1/stderr'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedRandom name='/home/runner/.ya/build/build_root/ciyv/00273a/ydb/tests/functional/sqs/common/test-results/py3test/testing_out_stuff/chunk32/testing_out_stuff/test_queue_tags.py.TestQueueTags.test_invalid_tag_queue.tables_format_v1-std/cluster/node_1/logfile_fkwqjttu.log'> ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/subprocess.py:1129: ResourceWarning: subprocess 431867 is still running ResourceWarning: Enable tracemalloc to get the object allocation traceback >> test_format_without_version.py::TestQueueWithoutVersionWithTenant::test_common[fifo] [FAIL] >> test_multiplexing_tables_format.py::TestMultiplexingTablesFormatWithTenant::test_create_queue_with_incorrect_tables_format [FAIL] >> test_queue_attributes_validation.py::TestQueueAttributesInCompatibilityMode::test_set_queue_attributes_no_validation[tables_format_v1-fifo] [FAIL] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/common/py3test >> test_format_without_version.py::TestQueueWithoutVersionWithTenant::test_common[fifo] [FAIL] Test command err: ydb/tests/library/clients/kikimr_monitoring.py:76: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/ciyv/002753/ydb/tests/functional/sqs/common/test-results/py3test/testing_out_stuff/chunk10/testing_out_stuff/test_counters.py.TestSqsCountersFeatures.test_updates_status_code_counters_when_parsing_errors_occur.tables_format_v1/cluster/node_1/stdout'> for key, value in sorted(labels.items(), key=lambda x: x[0]) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/library/clients/kikimr_monitoring.py:76: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/ciyv/002753/ydb/tests/functional/sqs/common/test-results/py3test/testing_out_stuff/chunk10/testing_out_stuff/test_counters.py.TestSqsCountersFeatures.test_updates_status_code_counters_when_parsing_errors_occur.tables_format_v1/cluster/node_1/stderr'> for key, value in sorted(labels.items(), key=lambda x: x[0]) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/library/clients/kikimr_monitoring.py:76: ResourceWarning: unclosed file <_io.BufferedRandom name='/home/runner/.ya/build/build_root/ciyv/002753/ydb/tests/functional/sqs/common/test-results/py3test/testing_out_stuff/chunk10/testing_out_stuff/test_counters.py.TestSqsCountersFeatures.test_updates_status_code_counters_when_parsing_errors_occur.tables_format_v1/cluster/node_1/logfile_voc2wydi.log'> for key, value in sorted(labels.items(), key=lambda x: x[0]) ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/subprocess.py:1129: ResourceWarning: subprocess 421296 is still running ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/library/clients/kikimr_monitoring.py:75: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/ciyv/002753/ydb/tests/functional/sqs/common/test-results/py3test/testing_out_stuff/chunk10/testing_out_stuff/test_format_without_version.py.TestQueueWithoutVersionWithPath.test_common.fifo/cluster/node_1/stdout'> (key, value) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/library/clients/kikimr_monitoring.py:75: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/ciyv/002753/ydb/tests/functional/sqs/common/test-results/py3test/testing_out_stuff/chunk10/testing_out_stuff/test_format_without_version.py.TestQueueWithoutVersionWithPath.test_common.fifo/cluster/node_1/stderr'> (key, value) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/library/clients/kikimr_monitoring.py:75: ResourceWarning: unclosed file <_io.BufferedRandom name='/home/runner/.ya/build/build_root/ciyv/002753/ydb/tests/functional/sqs/common/test-results/py3test/testing_out_stuff/chunk10/testing_out_stuff/test_format_without_version.py.TestQueueWithoutVersionWithPath.test_common.fifo/cluster/node_1/logfile_jojajd0n.log'> (key, value) ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/subprocess.py:1129: ResourceWarning: subprocess 424857 is still running ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/ciyv/002753/ydb/tests/functional/sqs/common/test-results/py3test/testing_out_stuff/chunk10/testing_out_stuff/test_format_without_version.py.TestQueueWithoutVersionWithTenant.test_common.fifo/cluster/node_1/stdout'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/ciyv/002753/ydb/tests/functional/sqs/common/test-results/py3test/testing_out_stuff/chunk10/testing_out_stuff/test_format_without_version.py.TestQueueWithoutVersionWithTenant.test_common.fifo/cluster/node_1/stderr'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedRandom name='/home/runner/.ya/build/build_root/ciyv/002753/ydb/tests/functional/sqs/common/test-results/py3test/testing_out_stuff/chunk10/testing_out_stuff/test_format_without_version.py.TestQueueWithoutVersionWithTenant.test_common.fifo/cluster/node_1/logfile_fv8wbmqn.log'> ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/subprocess.py:1129: ResourceWarning: subprocess 430197 is still running ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/ciyv/002753/ydb/tests/functional/sqs/common/test-results/py3test/testing_out_stuff/chunk10/testing_out_stuff/test_format_without_version.py.TestQueueWithoutVersionWithTenant.test_common.fifo/cluster/slot_1/stdout'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/ciyv/002753/ydb/tests/functional/sqs/common/test-results/py3test/testing_out_stuff/chunk10/testing_out_stuff/test_format_without_version.py.TestQueueWithoutVersionWithTenant.test_common.fifo/cluster/slot_1/stderr'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedRandom name='/home/runner/.ya/build/build_root/ciyv/002753/ydb/tests/functional/sqs/common/test-results/py3test/testing_out_stuff/chunk10/testing_out_stuff/test_format_without_version.py.TestQueueWithoutVersionWithTenant.test_common.fifo/cluster/slot_1/logfile_n9ved4l_.log'> ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/subprocess.py:1129: ResourceWarning: subprocess 433999 is still running ResourceWarning: Enable tracemalloc to get the object allocation traceback ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/common/py3test >> test_multiplexing_tables_format.py::TestMultiplexingTablesFormatWithTenant::test_create_queue_with_incorrect_tables_format [FAIL] Test command err: sys:1: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/ciyv/00274a/ydb/tests/functional/sqs/common/test-results/py3test/testing_out_stuff/chunk18/testing_out_stuff/test_multiplexing_tables_format.py.TestMultiplexingTablesFormatWithTenant.test_create_queue.fifo/cluster/node_1/stdout'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/ciyv/00274a/ydb/tests/functional/sqs/common/test-results/py3test/testing_out_stuff/chunk18/testing_out_stuff/test_multiplexing_tables_format.py.TestMultiplexingTablesFormatWithTenant.test_create_queue.fifo/cluster/node_1/stderr'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedRandom name='/home/runner/.ya/build/build_root/ciyv/00274a/ydb/tests/functional/sqs/common/test-results/py3test/testing_out_stuff/chunk18/testing_out_stuff/test_multiplexing_tables_format.py.TestMultiplexingTablesFormatWithTenant.test_create_queue.fifo/cluster/node_1/logfile_gxbkqr6q.log'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/ciyv/00274a/ydb/tests/functional/sqs/common/test-results/py3test/testing_out_stuff/chunk18/testing_out_stuff/test_multiplexing_tables_format.py.TestMultiplexingTablesFormatWithTenant.test_create_queue.fifo/cluster/slot_1/stdout'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/ciyv/00274a/ydb/tests/functional/sqs/common/test-results/py3test/testing_out_stuff/chunk18/testing_out_stuff/test_multiplexing_tables_format.py.TestMultiplexingTablesFormatWithTenant.test_create_queue.fifo/cluster/slot_1/stderr'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedRandom name='/home/runner/.ya/build/build_root/ciyv/00274a/ydb/tests/functional/sqs/common/test-results/py3test/testing_out_stuff/chunk18/testing_out_stuff/test_multiplexing_tables_format.py.TestMultiplexingTablesFormatWithTenant.test_create_queue.fifo/cluster/slot_1/logfile_j98eq_1m.log'> ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/subprocess.py:1129: ResourceWarning: subprocess 423084 is still running ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/subprocess.py:1129: ResourceWarning: subprocess 426631 is still running ResourceWarning: Enable tracemalloc to get the object allocation traceback ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/common/py3test >> test_queue_attributes_validation.py::TestQueueAttributesInCompatibilityMode::test_set_queue_attributes_no_validation[tables_format_v1-fifo] [FAIL] Test command err: sys:1: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/ciyv/002736/ydb/tests/functional/sqs/common/test-results/py3test/testing_out_stuff/chunk23/testing_out_stuff/test_queue_attributes_validation.py.TestQueueAttributesInCompatibilityMode.test_set_queue_attributes_no_validation.tables_format_v0-fifo/cluster/node_1/stdout'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/ciyv/002736/ydb/tests/functional/sqs/common/test-results/py3test/testing_out_stuff/chunk23/testing_out_stuff/test_queue_attributes_validation.py.TestQueueAttributesInCompatibilityMode.test_set_queue_attributes_no_validation.tables_format_v0-fifo/cluster/node_1/stderr'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedRandom name='/home/runner/.ya/build/build_root/ciyv/002736/ydb/tests/functional/sqs/common/test-results/py3test/testing_out_stuff/chunk23/testing_out_stuff/test_queue_attributes_validation.py.TestQueueAttributesInCompatibilityMode.test_set_queue_attributes_no_validation.tables_format_v0-fifo/cluster/node_1/logfile_i5s57xgn.log'> ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/subprocess.py:1129: ResourceWarning: subprocess 433123 is still running ResourceWarning: Enable tracemalloc to get the object allocation traceback >> test_queues_managing.py::TestQueuesManagingWithTenant::test_create_queue_generates_event[tables_format_v1] [FAIL] >> test_queues_managing.py::TestQueuesManagingWithTenant::test_create_queue_with_invalid_name[tables_format_v0] >> test_queues_managing.py::TestQueuesManagingWithTenant::test_queues_count_over_limit[tables_format_v1] >> test_serverless.py::test_discovery[enable_alter_database_create_hive_first--true] [GOOD] >> test_acl.py::TestSqsACLWithTenant::test_apply_permissions[tables_format_v0] [FAIL] >> test_acl.py::TestSqsACLWithTenant::test_apply_permissions[tables_format_v1] >> test_queues_managing.py::TestQueuesManagingWithTenant::test_request_to_deleted_queue[tables_format_v0-fifo] >> test_queues_managing.py::TestQueuesManagingWithTenant::test_create_queue_with_invalid_name[tables_format_v0] [FAIL] >> test_queues_managing.py::TestQueuesManagingWithTenant::test_create_queue_with_invalid_name[tables_format_v1] >> test_acl.py::TestSqsACLWithTenant::test_apply_permissions[tables_format_v1] [FAIL] >> test_acl.py::TestSqsACLWithTenant::test_modify_permissions[tables_format_v0] >> test_throttling.py::TestSqsThrottlingOnNonexistentQueue::test_action_which_does_not_requere_existing_queue >> test_queues_managing.py::TestQueuesManagingWithTenant::test_create_queue_with_invalid_name[tables_format_v1] [FAIL] >> test_async_replication.py::TestAsyncReplication::test_async_replication[table_ttl_DyNumber-pk_types8-all_types8-index8-DyNumber--] >> test_acl.py::TestSqsACLWithTenant::test_modify_permissions[tables_format_v0] [FAIL] >> test_acl.py::TestSqsACLWithTenant::test_modify_permissions[tables_format_v1] >> test_queue_tags.py::TestQueueTags::test_tag_queue[tables_format_v0-std] >> test_async_replication.py::TestAsyncReplication::test_async_replication[table_ttl_Date-pk_types13-all_types13-index13-Date--] [FAIL] >> test_acl.py::TestSqsACLWithTenant::test_modify_permissions[tables_format_v1] [FAIL] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/common/py3test >> test_queues_managing.py::TestQueuesManagingWithTenant::test_create_queue_with_invalid_name[tables_format_v1] [FAIL] Test command err: sys:1: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/ciyv/002743/ydb/tests/functional/sqs/common/test-results/py3test/testing_out_stuff/chunk50/testing_out_stuff/test_queues_managing.py.TestQueuesManagingWithTenant.test_create_queue_generates_event.tables_format_v1/cluster/node_1/stdout'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/ciyv/002743/ydb/tests/functional/sqs/common/test-results/py3test/testing_out_stuff/chunk50/testing_out_stuff/test_queues_managing.py.TestQueuesManagingWithTenant.test_create_queue_generates_event.tables_format_v1/cluster/node_1/stderr'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedRandom name='/home/runner/.ya/build/build_root/ciyv/002743/ydb/tests/functional/sqs/common/test-results/py3test/testing_out_stuff/chunk50/testing_out_stuff/test_queues_managing.py.TestQueuesManagingWithTenant.test_create_queue_generates_event.tables_format_v1/cluster/node_1/logfile_9rfv1qcy.log'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/ciyv/002743/ydb/tests/functional/sqs/common/test-results/py3test/testing_out_stuff/chunk50/testing_out_stuff/test_queues_managing.py.TestQueuesManagingWithTenant.test_create_queue_generates_event.tables_format_v1/cluster/slot_1/stdout'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/ciyv/002743/ydb/tests/functional/sqs/common/test-results/py3test/testing_out_stuff/chunk50/testing_out_stuff/test_queues_managing.py.TestQueuesManagingWithTenant.test_create_queue_generates_event.tables_format_v1/cluster/slot_1/stderr'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedRandom name='/home/runner/.ya/build/build_root/ciyv/002743/ydb/tests/functional/sqs/common/test-results/py3test/testing_out_stuff/chunk50/testing_out_stuff/test_queues_managing.py.TestQueuesManagingWithTenant.test_create_queue_generates_event.tables_format_v1/cluster/slot_1/logfile_hzlvvpkc.log'> ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/subprocess.py:1129: ResourceWarning: subprocess 426358 is still running ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/subprocess.py:1129: ResourceWarning: subprocess 429429 is still running ResourceWarning: Enable tracemalloc to get the object allocation traceback >> test_queue_counters.py::TestSqsGettingCounters::test_counters_when_sending_duplicates [FAIL] >> test_queue_counters.py::TestSqsGettingCounters::test_counters_when_sending_reading_deleting ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/statistics/service/ut/unittest >> ColumnStatistics::CountMinSketchStatistics [FAIL] Test command err: 2025-05-29T15:31:34.955664Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:453:2413], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-05-29T15:31:34.955703Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-05-29T15:31:34.955719Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/00150a/r3tmp/tmpYEQ5DA/pdisk_1.dat 2025-05-29T15:31:35.048233Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 16858, node 1 2025-05-29T15:31:35.149970Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:31:35.149987Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-05-29T15:31:35.149991Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-05-29T15:31:35.150032Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-05-29T15:31:35.150513Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-05-29T15:31:35.225865Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:31:35.225903Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:31:35.237647Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:20336 2025-05-29T15:31:35.568450Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-05-29T15:31:36.261409Z node 2 :STATISTICS INFO: service_impl.cpp:232: Subscribed for config changes on node 2 2025-05-29T15:31:36.267680Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:31:36.267711Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:31:36.321119Z node 1 :HIVE WARN: hive_impl.cpp:771: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-05-29T15:31:36.321632Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-29T15:31:36.463401Z node 2 :HIVE WARN: tx__create_tablet.cpp:342: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-05-29T15:31:36.463545Z node 2 :HIVE WARN: tx__create_tablet.cpp:342: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-05-29T15:31:36.463684Z node 2 :HIVE WARN: tx__create_tablet.cpp:342: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-05-29T15:31:36.463712Z node 2 :HIVE WARN: tx__create_tablet.cpp:342: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-05-29T15:31:36.463723Z node 2 :HIVE WARN: tx__create_tablet.cpp:342: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-05-29T15:31:36.463764Z node 2 :HIVE WARN: tx__create_tablet.cpp:342: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-05-29T15:31:36.463784Z node 2 :HIVE WARN: tx__create_tablet.cpp:342: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-05-29T15:31:36.463801Z node 2 :HIVE WARN: tx__create_tablet.cpp:342: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-05-29T15:31:36.463832Z node 2 :HIVE WARN: tx__create_tablet.cpp:342: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-05-29T15:31:36.614793Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:31:36.614834Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:31:36.626156Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-29T15:31:36.657973Z node 2 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:31:36.665076Z node 2 :STATISTICS INFO: aggregator_impl.cpp:45: [72075186224037894] OnActivateExecutor 2025-05-29T15:31:36.665102Z node 2 :STATISTICS DEBUG: tx_init_schema.cpp:13: [72075186224037894] TTxInitSchema::Execute 2025-05-29T15:31:36.671067Z node 2 :STATISTICS DEBUG: tx_init_schema.cpp:34: [72075186224037894] TTxInitSchema::Complete 2025-05-29T15:31:36.671251Z node 2 :STATISTICS DEBUG: tx_init.cpp:18: [72075186224037894] TTxInit::Execute 2025-05-29T15:31:36.671267Z node 2 :STATISTICS DEBUG: tx_init.cpp:118: [72075186224037894] Loaded BaseStatistics: schemeshard count# 0 2025-05-29T15:31:36.671272Z node 2 :STATISTICS DEBUG: tx_init.cpp:143: [72075186224037894] Loaded ColumnStatistics: column count# 0 2025-05-29T15:31:36.671276Z node 2 :STATISTICS DEBUG: tx_init.cpp:182: [72075186224037894] Loaded ScheduleTraversals: table count# 0 2025-05-29T15:31:36.671280Z node 2 :STATISTICS DEBUG: tx_init.cpp:216: [72075186224037894] Loaded ForceTraversalOperations: table count# 0 2025-05-29T15:31:36.671284Z node 2 :STATISTICS DEBUG: tx_init.cpp:264: [72075186224037894] Loaded ForceTraversalTables: table count# 0 2025-05-29T15:31:36.671289Z node 2 :STATISTICS DEBUG: tx_init.cpp:271: [72075186224037894] TTxInit::Complete 2025-05-29T15:31:36.671602Z node 2 :STATISTICS INFO: aggregator_impl.cpp:62: [72075186224037894] Subscribed for config changes 2025-05-29T15:31:36.687163Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:7864: ResolveSA(), StatisticsAggregatorId=72075186224037894, at schemeshard: 72075186224037897 2025-05-29T15:31:36.687193Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:7894: ConnectToSA(), pipe client id: [2:1863:2597], at schemeshard: 72075186224037897, StatisticsAggregatorId: 72075186224037894, at schemeshard: 72075186224037897 2025-05-29T15:31:36.688098Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:86: [72075186224037894] EvServerConnected, pipe server id = [2:1874:2605] 2025-05-29T15:31:36.689118Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:86: [72075186224037894] EvServerConnected, pipe server id = [2:1905:2622] 2025-05-29T15:31:36.689392Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:213: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:1905:2622], schemeshard id = 72075186224037897 2025-05-29T15:31:36.689945Z node 2 :STATISTICS DEBUG: tx_configure.cpp:21: [72075186224037894] TTxConfigure::Execute: database# /Root/Database 2025-05-29T15:31:36.693380Z node 2 :STATISTICS DEBUG: table_creator.cpp:147: Table _statistics updater. Describe result: PathErrorUnknown 2025-05-29T15:31:36.693394Z node 2 :STATISTICS NOTICE: table_creator.cpp:167: Table _statistics updater. Creating table 2025-05-29T15:31:36.693404Z node 2 :STATISTICS DEBUG: table_creator.cpp:100: Table _statistics updater. Full table path:/Root/Database/.metadata/_statistics 2025-05-29T15:31:36.695710Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720657:1, at schemeshard: 72075186224037897 2025-05-29T15:31:36.697058Z node 2 :STATISTICS DEBUG: table_creator.cpp:190: Table _statistics updater. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720657 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037897 PathId: 3 } 2025-05-29T15:31:36.697086Z node 2 :STATISTICS DEBUG: table_creator.cpp:261: Table _statistics updater. Subscribe on create table tx: 281474976720657 2025-05-29T15:31:36.798108Z node 2 :STATISTICS DEBUG: tx_configure.cpp:36: [72075186224037894] TTxConfigure::Complete 2025-05-29T15:31:36.880034Z node 2 :STATISTICS DEBUG: table_creator.cpp:290: Table _statistics updater. Request: create. Transaction completed: 281474976720657. Doublechecking... 2025-05-29T15:31:36.933643Z node 2 :STATISTICS DEBUG: table_creator.cpp:362: Table _statistics updater. Column diff is empty, finishing 2025-05-29T15:31:37.437964Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2212:3057], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:31:37.438008Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:31:37.442416Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976715659:0, at schemeshard: 72075186224037897 2025-05-29T15:31:37.472484Z node 2 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037899;self_id=[2:2296:2838];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-05-29T15:31:37.472548Z node 2 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037899;self_id=[2:2296:2838];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-05-29T15:31:37.472588Z node 2 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037899;self_id=[2:2296:2838];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-05-29T15:31:37.472608Z node 2 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037899;self_id=[2:2296:2838];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-05-29T15:31:37.472623Z node 2 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037899;self_id=[2:2296:2838];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-05-29T15:31:37.472642Z node 2 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037899;self_id=[2:2296:2838];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-05-29T15:31:37.472655Z node 2 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037899;self_id=[2:2296:2838];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-05-29T15:31:37.472670Z node 2 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037899;self_id=[2:2296:2838];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_re ... status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:33:46.854697Z node 2 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=2&id=ODFmYTI1Y2ItMzVlODA5NS01YWU1NGYxOS1lZWZiM2Ey, ActorId: [2:7433:5441], ActorState: ExecuteState, TraceId: 01jweav4pn0bzaf51bn3tdg2a7, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: 2025-05-29T15:33:46.854950Z node 2 :STATISTICS DEBUG: query_actor.cpp:240: [TQueryBase] TEvDataQueryResult INTERNAL_ERROR, Issues: {
: Fatal: Execution, code: 1060 subissue: {
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 } }, SessionId: ydb://session/3?node_id=2&id=ODFmYTI1Y2ItMzVlODA5NS01YWU1NGYxOS1lZWZiM2Ey, TxId: 2025-05-29T15:33:46.854965Z node 2 :STATISTICS WARN: query_actor.cpp:372: [TQueryBase] Finish with INTERNAL_ERROR, Issues: {
: Fatal: Execution, code: 1060 subissue: {
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 } }, SessionId: ydb://session/3?node_id=2&id=ODFmYTI1Y2ItMzVlODA5NS01YWU1NGYxOS1lZWZiM2Ey, TxId: 2025-05-29T15:33:47.358411Z node 2 :STATISTICS DEBUG: query_actor.cpp:134: [TQueryBase] Bootstrap. Database: /Root/Database 2025-05-29T15:33:47.359250Z node 2 :STATISTICS DEBUG: query_actor.cpp:197: [TQueryBase] RunDataQuery: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DELETE FROM `.metadata/_statistics` WHERE owner_id = $owner_id AND local_path_id = $local_path_id; 2025-05-29T15:33:47.363291Z node 2 :STATISTICS DEBUG: service_impl.cpp:771: [TStatService::TEvGetStatistics] RequestId[ 8 ], ReplyToActorId[ [2:7471:5474]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-05-29T15:33:47.363360Z node 2 :STATISTICS DEBUG: service_impl.cpp:787: [TStatService::TEvNavigateKeySetResult] RequestId[ 8 ] 2025-05-29T15:33:47.363368Z node 2 :STATISTICS DEBUG: service_impl.cpp:1260: ReplySuccess(), request id = 8, ReplyToActorId = [2:7471:5474], StatRequests.size() = 1 2025-05-29T15:33:47.376587Z node 2 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [2:7467:5470], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:33:47.377546Z node 2 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=2&id=Mjk0ZDE3ZWQtNWRlZDI0LWQwNmU3MWM3LWIzODE5ZDEy, ActorId: [2:7464:5467], ActorState: ExecuteState, TraceId: 01jweav56z98k1f5pvxtwq6xc3, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: 2025-05-29T15:33:47.377784Z node 2 :STATISTICS DEBUG: query_actor.cpp:240: [TQueryBase] TEvDataQueryResult INTERNAL_ERROR, Issues: {
: Fatal: Execution, code: 1060 subissue: {
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 } }, SessionId: ydb://session/3?node_id=2&id=Mjk0ZDE3ZWQtNWRlZDI0LWQwNmU3MWM3LWIzODE5ZDEy, TxId: 2025-05-29T15:33:47.377797Z node 2 :STATISTICS WARN: query_actor.cpp:372: [TQueryBase] Finish with INTERNAL_ERROR, Issues: {
: Fatal: Execution, code: 1060 subissue: {
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 } }, SessionId: ydb://session/3?node_id=2&id=Mjk0ZDE3ZWQtNWRlZDI0LWQwNmU3MWM3LWIzODE5ZDEy, TxId: 2025-05-29T15:33:47.378140Z node 2 :STATISTICS DEBUG: tx_finish_trasersal.cpp:26: [72075186224037894] TTxFinishTraversal::Execute 2025-05-29T15:33:47.392123Z node 2 :STATISTICS DEBUG: tx_finish_trasersal.cpp:36: [72075186224037894] TTxFinishTraversal::Complete schedule traversal for path [OwnerId: 72075186224037897, LocalPathId: 3] 2025-05-29T15:33:47.392153Z node 2 :STATISTICS DEBUG: tx_finish_trasersal.cpp:39: [72075186224037894] TTxFinishTraversal::Complete. No ActorId to send reply. 2025-05-29T15:33:49.551480Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:660: [72075186224037894] ScheduleNextTraversal 2025-05-29T15:33:49.551517Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:668: [72075186224037894] ScheduleNextTraversal. No force traversals. 2025-05-29T15:33:49.551529Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:802: [72075186224037894] IsColumnTable. Path [OwnerId: 72075186224037897, LocalPathId: 4] is column table. 2025-05-29T15:33:49.551536Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:732: [72075186224037894] Start schedule traversal navigate for path [OwnerId: 72075186224037897, LocalPathId: 4] 2025-05-29T15:33:49.552540Z node 2 :STATISTICS DEBUG: tx_navigate.cpp:19: [72075186224037894] TTxNavigate::Execute 2025-05-29T15:33:49.565040Z node 2 :STATISTICS DEBUG: tx_navigate.cpp:72: [72075186224037894] TTxNavigate::Complete 2025-05-29T15:33:49.565229Z node 2 :STATISTICS DEBUG: tx_resolve.cpp:102: [72075186224037894] TTxResolve::Execute 2025-05-29T15:33:49.565250Z node 2 :STATISTICS DEBUG: tx_resolve.cpp:133: [72075186224037894] TTxResolve::Complete 2025-05-29T15:33:49.565590Z node 2 :STATISTICS DEBUG: tx_response_tablet_distribution.cpp:58: [72075186224037894] TTxResponseTabletDistribution::Execute. Node count = 1 2025-05-29T15:33:49.577390Z node 2 :STATISTICS DEBUG: tx_response_tablet_distribution.cpp:92: [72075186224037894] TTxResponseTabletDistribution::Complete 2025-05-29T15:33:49.577478Z node 2 :STATISTICS DEBUG: service_impl.cpp:588: Received TEvAggregateStatistics from node: 2, Round: 2, current Round: 0 2025-05-29T15:33:49.577701Z node 2 :STATISTICS DEBUG: service_impl.cpp:1086: EvClientConnected, node id = 2, client id = [2:7573:5523], server id = [2:7574:5524], tablet id = 72075186224037899, status = OK 2025-05-29T15:33:49.577956Z node 2 :STATISTICS DEBUG: service_impl.cpp:1055: TEvStatisticsRequest send, client id = [2:7573:5523], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-05-29T15:33:49.579054Z node 2 :STATISTICS DEBUG: service_impl.cpp:317: Received TEvStatisticsResponse TabletId: 72075186224037899 2025-05-29T15:33:49.579074Z node 2 :STATISTICS DEBUG: service_impl.cpp:502: Send aggregate statistics response to node: 2 2025-05-29T15:33:49.579172Z node 2 :STATISTICS DEBUG: tx_aggr_stat_response.cpp:27: [72075186224037894] TTxAggregateStatisticsResponse::Execute 2025-05-29T15:33:49.579206Z node 2 :STATISTICS DEBUG: tx_aggr_stat_response.cpp:118: [72075186224037894] TTxAggregateStatisticsResponse::Complete 2025-05-29T15:33:49.579281Z node 2 :STATISTICS DEBUG: query_actor.cpp:134: [TQueryBase] Bootstrap. Database: /Root/Database 2025-05-29T15:33:49.579828Z node 2 :STATISTICS DEBUG: service_impl.cpp:1121: EvClientDestroyed, node id = 2, client id = [2:7573:5523], server id = [2:7574:5524], tablet id = 72075186224037899 2025-05-29T15:33:49.579840Z node 2 :STATISTICS DEBUG: service_impl.cpp:1139: Skip EvClientDestroyed 2025-05-29T15:33:49.580104Z node 2 :STATISTICS DEBUG: query_actor.cpp:197: [TQueryBase] RunDataQuery: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DECLARE $stat_type AS Uint32; DECLARE $column_tags AS List; DECLARE $data AS List; UPSERT INTO `.metadata/_statistics` (owner_id, local_path_id, stat_type, column_tag, data) VALUES ($owner_id, $local_path_id, $stat_type, $column_tags[0], $data[0]), ($owner_id, $local_path_id, $stat_type, $column_tags[1], $data[1]); 2025-05-29T15:33:49.586547Z node 2 :STATISTICS DEBUG: service_impl.cpp:771: [TStatService::TEvGetStatistics] RequestId[ 9 ], ReplyToActorId[ [2:7589:5538]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-05-29T15:33:49.586623Z node 2 :STATISTICS DEBUG: service_impl.cpp:787: [TStatService::TEvNavigateKeySetResult] RequestId[ 9 ] 2025-05-29T15:33:49.586631Z node 2 :STATISTICS DEBUG: service_impl.cpp:1260: ReplySuccess(), request id = 9, ReplyToActorId = [2:7589:5538], StatRequests.size() = 1 2025-05-29T15:33:49.596222Z node 2 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [2:7585:5534], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:33:49.596918Z node 2 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=2&id=ZTRiYmM0YjktNTA1MDI1ZjgtY2RjOWZhMWItNGEzMzVjNGU=, ActorId: [2:7582:5531], ActorState: ExecuteState, TraceId: 01jweav7cc2vdb8dvmxdzm95wj, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: 2025-05-29T15:33:49.597117Z node 2 :STATISTICS DEBUG: query_actor.cpp:240: [TQueryBase] TEvDataQueryResult INTERNAL_ERROR, Issues: {
: Fatal: Execution, code: 1060 subissue: {
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 } }, SessionId: ydb://session/3?node_id=2&id=ZTRiYmM0YjktNTA1MDI1ZjgtY2RjOWZhMWItNGEzMzVjNGU=, TxId: 2025-05-29T15:33:49.597129Z node 2 :STATISTICS WARN: query_actor.cpp:372: [TQueryBase] Finish with INTERNAL_ERROR, Issues: {
: Fatal: Execution, code: 1060 subissue: {
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 } }, SessionId: ydb://session/3?node_id=2&id=ZTRiYmM0YjktNTA1MDI1ZjgtY2RjOWZhMWItNGEzMzVjNGU=, TxId: 2025-05-29T15:33:49.597625Z node 1 :STATISTICS DEBUG: service_impl.cpp:771: [TStatService::TEvGetStatistics] RequestId[ 1 ], ReplyToActorId[ [1:7593:5558]], StatType[ 2 ], StatRequestsCount[ 1 ] 2025-05-29T15:33:49.597676Z node 1 :STATISTICS DEBUG: service_impl.cpp:787: [TStatService::TEvNavigateKeySetResult] RequestId[ 1 ] 2025-05-29T15:33:49.597683Z node 1 :STATISTICS DEBUG: service_impl.cpp:812: [TStatService::TEvNavigateKeySetResult] RequestId[ 1 ] resolve DatabasePath[ [OwnerId: 72057594046644480, LocalPathId: 2] ] 2025-05-29T15:33:49.598185Z node 1 :STATISTICS DEBUG: service_impl.cpp:787: [TStatService::TEvNavigateKeySetResult] RequestId[ 1 ] 2025-05-29T15:33:49.598199Z node 1 :STATISTICS DEBUG: service_impl.cpp:715: [TStatService::QueryStatistics] RequestId[ 1 ], Database[ Root/Database ], TablePath[ /Root/Database/.metadata/_statistics ] 2025-05-29T15:33:49.598208Z node 1 :STATISTICS DEBUG: service_impl.cpp:656: [TStatService::LoadStatistics] QueryId[ 1 ], PathId[ [OwnerId: 72075186224037897, LocalPathId: 4] ], StatType[ 2 ], ColumnTag[ 1 ] 2025-05-29T15:33:49.600560Z node 1 :STATISTICS ERROR: service_impl.cpp:691: [TStatService::ReadRowsResponse] QueryId[ 1 ], RowsCount[ 0 ] 2025-05-29T15:33:49.600636Z node 1 :STATISTICS DEBUG: service_impl.cpp:1152: TEvLoadStatisticsQueryResponse, request id = 1 assertion failed at ydb/core/statistics/service/ut/ut_column_statistics.cpp:50, void NKikimr::NStat::CheckColumnStatistics(TTestActorRuntime &, const TPathId &, const TActorId &, const std::vector &): (stat.Success) TBackTrace::Capture()+28 (0x137F904C) NUnitTest::NPrivate::RaiseError(char const*, TBasicString> const&, bool)+137 (0x139AC3D9) NKikimr::NStat::CheckColumnStatistics(NActors::TTestActorRuntime&, NKikimr::TPathId const&, NActors::TActorId const&, std::__y1::vector> const&)+2379 (0x136EE4CB) NKikimr::NStat::NTestSuiteColumnStatistics::TTestCaseCountMinSketchStatistics::Execute_(NUnitTest::TTestContext&)+1372 (0x136EF68C) NKikimr::NStat::NTestSuiteColumnStatistics::TCurrentTest::Execute()::'lambda'()::operator()() const+71 (0x136F2A77) NUnitTest::TTestBase::Run(std::__y1::function, TBasicString> const&, char const*, bool)+126 (0x139AE28E) NKikimr::NStat::NTestSuiteColumnStatistics::TCurrentTest::Execute()+419 (0x136F2433) NUnitTest::TTestFactory::Execute()+803 (0x139AEA03) NUnitTest::RunMain(int, char**)+3021 (0x139BCD1D) ??+0 (0x7FC6D102ED90) __libc_start_main+128 (0x7FC6D102EE40) _start+41 (0x1283B029) ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/common/py3test >> test_acl.py::TestSqsACLWithTenant::test_modify_permissions[tables_format_v1] [FAIL] Test command err: sys:1: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/ciyv/002738/ydb/tests/functional/sqs/common/test-results/py3test/testing_out_stuff/chunk3/testing_out_stuff/test_acl.py.TestSqsACLWithTenant.test_apply_permissions.tables_format_v0/cluster/node_1/stdout'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/ciyv/002738/ydb/tests/functional/sqs/common/test-results/py3test/testing_out_stuff/chunk3/testing_out_stuff/test_acl.py.TestSqsACLWithTenant.test_apply_permissions.tables_format_v0/cluster/node_1/stderr'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedRandom name='/home/runner/.ya/build/build_root/ciyv/002738/ydb/tests/functional/sqs/common/test-results/py3test/testing_out_stuff/chunk3/testing_out_stuff/test_acl.py.TestSqsACLWithTenant.test_apply_permissions.tables_format_v0/cluster/node_1/logfile_y62_igrn.log'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/ciyv/002738/ydb/tests/functional/sqs/common/test-results/py3test/testing_out_stuff/chunk3/testing_out_stuff/test_acl.py.TestSqsACLWithTenant.test_apply_permissions.tables_format_v0/cluster/slot_1/stdout'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/ciyv/002738/ydb/tests/functional/sqs/common/test-results/py3test/testing_out_stuff/chunk3/testing_out_stuff/test_acl.py.TestSqsACLWithTenant.test_apply_permissions.tables_format_v0/cluster/slot_1/stderr'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedRandom name='/home/runner/.ya/build/build_root/ciyv/002738/ydb/tests/functional/sqs/common/test-results/py3test/testing_out_stuff/chunk3/testing_out_stuff/test_acl.py.TestSqsACLWithTenant.test_apply_permissions.tables_format_v0/cluster/slot_1/logfile_vyamt0t0.log'> ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/subprocess.py:1129: ResourceWarning: subprocess 432478 is still running ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/subprocess.py:1129: ResourceWarning: subprocess 435703 is still running ResourceWarning: Enable tracemalloc to get the object allocation traceback |77.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/serverless/py3test >> test_queue_attributes_validation.py::TestQueueAttributesValidation::test_create_queue_with_custom_attributes[tables_format_v1-fifo] |77.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/serverless/py3test >> test_serverless.py::test_fixtures[enable_alter_database_create_hive_first--true] [GOOD] >> test_queue_counters.py::TestSqsGettingCounters::test_counters_when_sending_reading_deleting [FAIL] >> test_queue_counters.py::TestSqsGettingCounters::test_purge_queue_counters >> test_counters.py::TestSqsCountersExportDelay::test_export_delay[tables_format_v0] >> test_queue_counters.py::TestSqsGettingCounters::test_purge_queue_counters [FAIL] >> TCdcStreamWithRebootsTests::MergeTableResolvedTimestamps[PipeResets] [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/common/py3test >> test_queue_counters.py::TestSqsGettingCounters::test_purge_queue_counters [FAIL] Test command err: sys:1: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/ciyv/002734/ydb/tests/functional/sqs/common/test-results/py3test/testing_out_stuff/chunk29/testing_out_stuff/test_queue_counters.py.TestSqsGettingCounters.test_counters_when_sending_duplicates/cluster/node_1/stdout'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/ciyv/002734/ydb/tests/functional/sqs/common/test-results/py3test/testing_out_stuff/chunk29/testing_out_stuff/test_queue_counters.py.TestSqsGettingCounters.test_counters_when_sending_duplicates/cluster/node_1/stderr'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedRandom name='/home/runner/.ya/build/build_root/ciyv/002734/ydb/tests/functional/sqs/common/test-results/py3test/testing_out_stuff/chunk29/testing_out_stuff/test_queue_counters.py.TestSqsGettingCounters.test_counters_when_sending_duplicates/cluster/node_1/logfile_vvvrnd7u.log'> ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/subprocess.py:1129: ResourceWarning: subprocess 437282 is still running ResourceWarning: Enable tracemalloc to get the object allocation traceback ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_cdc_stream_reboots/unittest >> TCdcStreamWithRebootsTests::MergeTableResolvedTimestamps[PipeResets] [GOOD] Test command err: =========== RUN: Trace =========== Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2140] Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:116:2058] recipient: [1:110:2141] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:116:2058] recipient: [1:110:2141] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:117:2058] recipient: [1:111:2142] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:117:2058] recipient: [1:111:2142] Leader for TabletID 72057594046678944 is [1:126:2151] sender: [1:127:2058] recipient: [1:109:2140] Leader for TabletID 72057594046447617 is [1:130:2154] sender: [1:132:2058] recipient: [1:110:2141] Leader for TabletID 72057594046316545 is [1:133:2155] sender: [1:135:2058] recipient: [1:111:2142] 2025-05-29T15:32:27.658563Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7488: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-05-29T15:32:27.658583Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7516: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:32:27.658589Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7402: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2025-05-29T15:32:27.658594Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7418: OperationsProcessing config: using default configuration 2025-05-29T15:32:27.658606Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-05-29T15:32:27.658610Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-05-29T15:32:27.658618Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7548: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:32:27.658630Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:39: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-05-29T15:32:27.658713Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7619: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-05-29T15:32:27.658799Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-05-29T15:32:27.672870Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7651: Got new config: QueryServiceConfig { AllExternalDataSourcesAreAvailable: true } 2025-05-29T15:32:27.672887Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:32:27.672959Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7619: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# Leader for TabletID 72057594046447617 is [1:130:2154] sender: [1:175:2058] recipient: [1:15:2062] 2025-05-29T15:32:27.675096Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-05-29T15:32:27.675125Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-05-29T15:32:27.675150Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-05-29T15:32:27.677752Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-05-29T15:32:27.677825Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:445: Clear TempDirsState with owners number: 0 2025-05-29T15:32:27.677926Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1348: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-05-29T15:32:27.678082Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:32: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-05-29T15:32:27.678724Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:157: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:32:27.678772Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:84: [RootDataErasureManager] Stop 2025-05-29T15:32:27.678978Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:32:27.678988Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:32:27.679015Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-05-29T15:32:27.679022Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-29T15:32:27.679029Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:202: TTxServerlessStorageBilling.Complete 2025-05-29T15:32:27.679047Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6671: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:214:2058] recipient: [1:212:2212] IGNORE Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:214:2058] recipient: [1:212:2212] Leader for TabletID 72057594037968897 is [1:218:2216] sender: [1:219:2058] recipient: [1:212:2212] 2025-05-29T15:32:27.680218Z node 1 :HIVE INFO: tablet_helpers.cpp:1130: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:126:2151] sender: [1:239:2058] recipient: [1:15:2062] 2025-05-29T15:32:27.700673Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:372: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-05-29T15:32:27.700733Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:32:27.700780Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-05-29T15:32:27.700819Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:130: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-05-29T15:32:27.700829Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:32:27.701430Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:451: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-05-29T15:32:27.701449Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-05-29T15:32:27.701492Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:32:27.701500Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:313: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-05-29T15:32:27.701505Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:367: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-05-29T15:32:27.701510Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 2 -> 3 2025-05-29T15:32:27.701913Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:32:27.701925Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-05-29T15:32:27.701930Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 3 -> 128 2025-05-29T15:32:27.702283Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:32:27.702293Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:32:27.702298Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:32:27.702304Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1650: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-05-29T15:32:27.703011Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1719: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-05-29T15:32:27.703473Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:652: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-05-29T15:32:27.703504Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1751: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 Leader for TabletID 72057594046316545 is [1:133:2155] sender: [1:254:2058] recipient: [1:15:2062] FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-05-29T15:32:27.703680Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:676: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-05-29T15:32:27.703704Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:680: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 133 RawX2: 4294969451 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-05-29T15:32:27.703710Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:32:27.703756Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Ch ... "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Uint32" TypeId: 2 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { MinPartitionsCount: 1 } } TableSchemaVersion: 2 IsBackup: false CdcStreams { Name: "Stream" Mode: ECdcStreamModeKeysOnly PathId { OwnerId: 72057594046678944 LocalId: 4 } State: ECdcStreamStateReady SchemaVersion: 1 Format: ECdcStreamFormatProto VirtualTimestamps: false AwsRegion: "" ResolvedTimestampsIntervalMs: 1000 } IsRestore: false } TablePartitions { EndOfRangeKeyPrefix: "" IsPoint: false IsInclusive: false DatashardId: 72075186233409550 } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 4 PathsLimit: 10000 ShardsInside: 3 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 2 PQPartitionsLimit: 1000000 } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-05-29T15:33:55.391853Z node 94 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Table" Options { ReturnPartitioningInfo: true ReturnPartitionConfig: true BackupInfo: false ReturnBoundaries: true ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-05-29T15:33:55.391953Z node 94 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/Table" took 120us result status StatusSuccess 2025-05-29T15:33:55.392193Z node 94 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Table" PathDescription { Self { Name: "Table" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 1002 CreateStep: 5000003 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 2 TablePartitionVersion: 2 } ChildrenExist: true } Table { Name: "Table" Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Uint32" TypeId: 2 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { MinPartitionsCount: 1 } } TableSchemaVersion: 2 IsBackup: false CdcStreams { Name: "Stream" Mode: ECdcStreamModeKeysOnly PathId { OwnerId: 72057594046678944 LocalId: 4 } State: ECdcStreamStateReady SchemaVersion: 1 Format: ECdcStreamFormatProto VirtualTimestamps: false AwsRegion: "" ResolvedTimestampsIntervalMs: 1000 } IsRestore: false } TablePartitions { EndOfRangeKeyPrefix: "" IsPoint: false IsInclusive: false DatashardId: 72075186233409550 } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 4 PathsLimit: 10000 ShardsInside: 3 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 2 PQPartitionsLimit: 1000000 } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-05-29T15:33:55.392303Z node 94 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Table/Stream/streamImpl" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: true }, at schemeshard: 72057594046678944 2025-05-29T15:33:55.392336Z node 94 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/Table/Stream/streamImpl" took 35us result status StatusSuccess 2025-05-29T15:33:55.392440Z node 94 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Table/Stream/streamImpl" PathDescription { Self { Name: "streamImpl" PathId: 5 SchemeshardId: 72057594046678944 PathType: EPathTypePersQueueGroup CreateFinished: true CreateTxId: 1003 CreateStep: 5000004 ParentPathId: 4 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeStreamImpl Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 PQVersion: 1 } ChildrenExist: false BalancerTabletID: 72075186233409549 } PersQueueGroup { Name: "streamImpl" PathId: 5 TotalGroupCount: 2 PartitionPerTablet: 2 PQTabletConfig { PartitionConfig { MaxCountInPartition: 2147483647 LifetimeSeconds: 86400 WriteSpeedInBytesPerSecond: 1048576 BurstSize: 1048576 } TopicName: "Stream" TopicPath: "/MyRoot/Table/Stream/streamImpl" YdbDatabasePath: "/MyRoot" PartitionKeySchema { Name: "key" TypeId: 2 } MeteringMode: METERING_MODE_REQUEST_UNITS } Partitions { PartitionId: 0 TabletId: 72075186233409548 KeyRange { ToBound: "\001\000\004\000\000\000\377\377\377\177" } Status: Active } Partitions { PartitionId: 1 TabletId: 72075186233409548 KeyRange { FromBound: "\001\000\004\000\000\000\377\377\377\177" } Status: Active } AlterVersion: 1 BalancerTabletID: 72075186233409549 NextPartitionId: 2 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 4 PathsLimit: 10000 ShardsInside: 3 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 2 PQPartitionsLimit: 1000000 } } PathId: 5 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> test_throttling.py::TestSqsThrottlingOnNonexistentQueue::test_action_which_does_not_requere_existing_queue [FAIL] >> test_throttling.py::TestSqsThrottlingOnNonexistentQueue::test_that_queue_can_be_created_despite_lack_of_throttling_budget >> test_async_replication.py::TestAsyncReplication::test_async_replication[table_ttl_Datetime-pk_types11-all_types11-index11-Datetime--] >> test_queues_managing.py::TestQueuesManagingWithTenant::test_delete_queue_batch[tables_format_v0] >> test_serverless.py::test_create_table_with_quotas[enable_alter_database_create_hive_first--false] [GOOD] >> test_queue_tags.py::TestQueueTags::test_tag_queue[tables_format_v0-std] [FAIL] >> test_queue_tags.py::TestQueueTags::test_tag_queue[tables_format_v1-fifo] >> test_throttling.py::TestSqsThrottlingOnNonexistentQueue::test_that_queue_can_be_created_despite_lack_of_throttling_budget [FAIL] >> test_throttling.py::TestSqsThrottlingOnNonexistentQueue::test_throttling_on_nonexistent_queue >> test_garbage_collection.py::TestSqsGarbageCollection::test_cleanups_reads_table[tables_format_v0-30] >> test_queue_tags.py::TestQueueTags::test_tag_queue[tables_format_v1-fifo] [FAIL] >> test_queue_tags.py::TestQueueTags::test_tag_queue[tables_format_v1-std] >> test_serverless.py::test_create_table_with_quotas[enable_alter_database_create_hive_first--true] >> test_throttling.py::TestSqsThrottlingOnNonexistentQueue::test_throttling_on_nonexistent_queue [FAIL] >> test_queue_tags.py::TestQueueTags::test_invalid_tag_queue[tables_format_v0-fifo] |77.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/serverless/py3test >> test_queue_tags.py::TestQueueTags::test_tag_queue[tables_format_v1-std] [FAIL] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/common/py3test >> test_throttling.py::TestSqsThrottlingOnNonexistentQueue::test_throttling_on_nonexistent_queue [FAIL] Test command err: sys:1: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/ciyv/00272e/ydb/tests/functional/sqs/common/test-results/py3test/testing_out_stuff/chunk59/testing_out_stuff/test_throttling.py.TestSqsThrottlingOnNonexistentQueue.test_action_which_does_not_requere_existing_queue/cluster/node_1/stdout'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/ciyv/00272e/ydb/tests/functional/sqs/common/test-results/py3test/testing_out_stuff/chunk59/testing_out_stuff/test_throttling.py.TestSqsThrottlingOnNonexistentQueue.test_action_which_does_not_requere_existing_queue/cluster/node_1/stderr'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedRandom name='/home/runner/.ya/build/build_root/ciyv/00272e/ydb/tests/functional/sqs/common/test-results/py3test/testing_out_stuff/chunk59/testing_out_stuff/test_throttling.py.TestSqsThrottlingOnNonexistentQueue.test_action_which_does_not_requere_existing_queue/cluster/node_1/logfile_1l1x4_c1.log'> ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/subprocess.py:1129: ResourceWarning: subprocess 438882 is still running ResourceWarning: Enable tracemalloc to get the object allocation traceback >> test_queue_attributes_validation.py::TestQueueAttributesValidation::test_create_queue_with_custom_attributes[tables_format_v1-fifo] [FAIL] >> test_queue_attributes_validation.py::TestQueueAttributesValidation::test_create_queue_with_custom_attributes[tables_format_v1-std] >> test_counters.py::TestSqsCountersExportDelay::test_export_delay[tables_format_v0] [FAIL] >> test_counters.py::TestSqsCountersExportDelay::test_export_delay[tables_format_v1] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/common/py3test >> test_queue_tags.py::TestQueueTags::test_tag_queue[tables_format_v1-std] [FAIL] Test command err: sys:1: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/ciyv/00272c/ydb/tests/functional/sqs/common/test-results/py3test/testing_out_stuff/chunk34/testing_out_stuff/test_queue_tags.py.TestQueueTags.test_tag_queue.tables_format_v0-std/cluster/node_1/stdout'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/ciyv/00272c/ydb/tests/functional/sqs/common/test-results/py3test/testing_out_stuff/chunk34/testing_out_stuff/test_queue_tags.py.TestQueueTags.test_tag_queue.tables_format_v0-std/cluster/node_1/stderr'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedRandom name='/home/runner/.ya/build/build_root/ciyv/00272c/ydb/tests/functional/sqs/common/test-results/py3test/testing_out_stuff/chunk34/testing_out_stuff/test_queue_tags.py.TestQueueTags.test_tag_queue.tables_format_v0-std/cluster/node_1/logfile_tx6lw7e_.log'> ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/subprocess.py:1129: ResourceWarning: subprocess 439199 is still running ResourceWarning: Enable tracemalloc to get the object allocation traceback >> test_queue_attributes_validation.py::TestQueueAttributesValidation::test_create_queue_with_default_attributes[tables_format_v0-std] >> test_queues_managing.py::TestQueuesManagingWithTenant::test_request_to_deleted_queue[tables_format_v0-fifo] [FAIL] >> test_queue_attributes_validation.py::TestQueueAttributesValidation::test_create_queue_with_custom_attributes[tables_format_v1-std] [FAIL] >> test_queue_attributes_validation.py::TestQueueAttributesValidation::test_create_queue_with_default_attributes[tables_format_v0-fifo] >> test_counters.py::TestSqsCountersExportDelay::test_export_delay[tables_format_v1] [FAIL] >> test_counters.py::TestSqsCountersFeatures::test_aggregates_transaction_counters[queue] >> test_queues_managing.py::TestQueuesManagingWithTenant::test_request_to_deleted_queue[tables_format_v0-std] >> test_queue_attributes_validation.py::TestQueueAttributesValidation::test_create_queue_with_default_attributes[tables_format_v0-fifo] [FAIL] >> test_queues_managing.py::TestQueuesManagingWithPathTestQueuesManagingWithPath::test_remove_queue_generates_event[tables_format_v0] >> test_queues_managing.py::TestQueuesManagingWithTenant::test_request_to_deleted_queue[tables_format_v0-std] [FAIL] >> test_queues_managing.py::TestQueuesManagingWithTenant::test_request_to_deleted_queue[tables_format_v1-fifo] >> test_async_replication.py::TestAsyncReplication::test_async_replication[table_ttl_DyNumber-pk_types8-all_types8-index8-DyNumber--] [FAIL] >> test_queues_managing.py::TestQueuesManagingWithTenant::test_request_to_deleted_queue[tables_format_v1-fifo] [FAIL] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/common/py3test >> test_queue_attributes_validation.py::TestQueueAttributesValidation::test_create_queue_with_default_attributes[tables_format_v0-fifo] [FAIL] Test command err: sys:1: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/ciyv/00272a/ydb/tests/functional/sqs/common/test-results/py3test/testing_out_stuff/chunk25/testing_out_stuff/test_queue_attributes_validation.py.TestQueueAttributesValidation.test_create_queue_with_custom_attributes.tables_format_v1-fifo/cluster/node_1/stdout'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/ciyv/00272a/ydb/tests/functional/sqs/common/test-results/py3test/testing_out_stuff/chunk25/testing_out_stuff/test_queue_attributes_validation.py.TestQueueAttributesValidation.test_create_queue_with_custom_attributes.tables_format_v1-fifo/cluster/node_1/stderr'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedRandom name='/home/runner/.ya/build/build_root/ciyv/00272a/ydb/tests/functional/sqs/common/test-results/py3test/testing_out_stuff/chunk25/testing_out_stuff/test_queue_attributes_validation.py.TestQueueAttributesValidation.test_create_queue_with_custom_attributes.tables_format_v1-fifo/cluster/node_1/logfile_0_9iqn24.log'> ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/subprocess.py:1129: ResourceWarning: subprocess 440116 is still running ResourceWarning: Enable tracemalloc to get the object allocation traceback |77.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/datashard/async_replication/py3test >> test_async_replication.py::TestAsyncReplication::test_async_replication[table_index_4__SYNC-pk_types0-all_types0-index0---SYNC] [FAIL] >> test_async_replication.py::TestAsyncReplication::test_async_replication[table_ttl_Uint64-pk_types10-all_types10-index10-Uint64--] |77.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/serverless/py3test >> test_serverless.py::test_discovery[enable_alter_database_create_hive_first--true] [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/common/py3test >> test_queues_managing.py::TestQueuesManagingWithTenant::test_request_to_deleted_queue[tables_format_v1-fifo] [FAIL] Test command err: sys:1: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/ciyv/002730/ydb/tests/functional/sqs/common/test-results/py3test/testing_out_stuff/chunk57/testing_out_stuff/test_queues_managing.py.TestQueuesManagingWithTenant.test_request_to_deleted_queue.tables_format_v0-fifo/cluster/node_1/stdout'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/ciyv/002730/ydb/tests/functional/sqs/common/test-results/py3test/testing_out_stuff/chunk57/testing_out_stuff/test_queues_managing.py.TestQueuesManagingWithTenant.test_request_to_deleted_queue.tables_format_v0-fifo/cluster/node_1/stderr'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedRandom name='/home/runner/.ya/build/build_root/ciyv/002730/ydb/tests/functional/sqs/common/test-results/py3test/testing_out_stuff/chunk57/testing_out_stuff/test_queues_managing.py.TestQueuesManagingWithTenant.test_request_to_deleted_queue.tables_format_v0-fifo/cluster/node_1/logfile_gxsb6gxn.log'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/ciyv/002730/ydb/tests/functional/sqs/common/test-results/py3test/testing_out_stuff/chunk57/testing_out_stuff/test_queues_managing.py.TestQueuesManagingWithTenant.test_request_to_deleted_queue.tables_format_v0-fifo/cluster/slot_1/stdout'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/ciyv/002730/ydb/tests/functional/sqs/common/test-results/py3test/testing_out_stuff/chunk57/testing_out_stuff/test_queues_managing.py.TestQueuesManagingWithTenant.test_request_to_deleted_queue.tables_format_v0-fifo/cluster/slot_1/stderr'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedRandom name='/home/runner/.ya/build/build_root/ciyv/002730/ydb/tests/functional/sqs/common/test-results/py3test/testing_out_stuff/chunk57/testing_out_stuff/test_queues_managing.py.TestQueuesManagingWithTenant.test_request_to_deleted_queue.tables_format_v0-fifo/cluster/slot_1/logfile_soy5vg86.log'> ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/subprocess.py:1129: ResourceWarning: subprocess 438376 is still running ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/subprocess.py:1129: ResourceWarning: subprocess 441241 is still running ResourceWarning: Enable tracemalloc to get the object allocation traceback >> test_garbage_collection.py::TestSqsGarbageCollection::test_cleanups_reads_table[tables_format_v0-30] [FAIL] >> test_garbage_collection.py::TestSqsGarbageCollection::test_cleanups_reads_table[tables_format_v1-200] >> test_queue_tags.py::TestQueueTags::test_invalid_tag_queue[tables_format_v0-fifo] [FAIL] >> test_queue_tags.py::TestQueueTags::test_invalid_tag_queue[tables_format_v0-std] >> test_garbage_collection.py::TestSqsGarbageCollection::test_cleanups_reads_table[tables_format_v1-200] [FAIL] >> test_acl.py::TestSqsWithForceAuthorizationWithTenant::test_invalid_token[tables_format_v0-no] >> test_garbage_collection.py::TestSqsGarbageCollection::test_cleanups_reads_table[tables_format_v1-30] |77.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/serverless/py3test >> test_queue_tags.py::TestQueueTags::test_invalid_tag_queue[tables_format_v0-std] [FAIL] >> test_queue_tags.py::TestQueueTags::test_invalid_tag_queue[tables_format_v1-fifo] >> test_garbage_collection.py::TestSqsGarbageCollection::test_cleanups_reads_table[tables_format_v1-30] [FAIL] >> test_garbage_collection.py::TestSqsGarbageCollection::test_removes_messages_by_retention_time[tables_format_v0-fifo] >> test_queue_tags.py::TestQueueTags::test_invalid_tag_queue[tables_format_v1-fifo] [FAIL] >> test_garbage_collection.py::TestSqsGarbageCollection::test_removes_messages_by_retention_time[tables_format_v0-fifo] [FAIL] >> test_counters.py::TestSqsCountersFeatures::test_aggregates_transaction_counters[queue] [FAIL] >> test_counters.py::TestSqsCountersFeatures::test_aggregates_transaction_counters[user] >> TCdcStreamWithRebootsTests::CreateDropRecreate[TabletReboots] [GOOD] >> test_queue_attributes_validation.py::TestQueueAttributesValidation::test_create_queue_with_default_attributes[tables_format_v0-std] [FAIL] >> test_queue_attributes_validation.py::TestQueueAttributesValidation::test_create_queue_with_default_attributes[tables_format_v1-fifo] |77.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/datashard/async_replication/py3test >> test_async_replication.py::TestAsyncReplication::test_async_replication[table_index_0__ASYNC-pk_types6-all_types6-index6---ASYNC] [FAIL] >> test_queues_managing.py::TestQueuesManagingWithTenant::test_queues_count_over_limit[tables_format_v1] [FAIL] >> test_queues_managing.py::TestQueuesManagingWithTenant::test_remove_queue_generates_event[tables_format_v0] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/common/py3test >> test_queue_tags.py::TestQueueTags::test_invalid_tag_queue[tables_format_v1-fifo] [FAIL] Test command err: sys:1: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/ciyv/002721/ydb/tests/functional/sqs/common/test-results/py3test/testing_out_stuff/chunk31/testing_out_stuff/test_queue_tags.py.TestQueueTags.test_invalid_tag_queue.tables_format_v0-fifo/cluster/node_1/stdout'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/ciyv/002721/ydb/tests/functional/sqs/common/test-results/py3test/testing_out_stuff/chunk31/testing_out_stuff/test_queue_tags.py.TestQueueTags.test_invalid_tag_queue.tables_format_v0-fifo/cluster/node_1/stderr'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedRandom name='/home/runner/.ya/build/build_root/ciyv/002721/ydb/tests/functional/sqs/common/test-results/py3test/testing_out_stuff/chunk31/testing_out_stuff/test_queue_tags.py.TestQueueTags.test_invalid_tag_queue.tables_format_v0-fifo/cluster/node_1/logfile_1g7ujt_o.log'> ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/subprocess.py:1129: ResourceWarning: subprocess 443181 is still running ResourceWarning: Enable tracemalloc to get the object allocation traceback >> test_counters.py::TestSqsCountersFeatures::test_aggregates_transaction_counters[user] [FAIL] >> test_queues_managing.py::TestQueuesManagingWithPathTestQueuesManagingWithPath::test_remove_queue_generates_event[tables_format_v0] [FAIL] >> test_queues_managing.py::TestQueuesManagingWithPathTestQueuesManagingWithPath::test_remove_queue_generates_event[tables_format_v1] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/common/py3test >> test_garbage_collection.py::TestSqsGarbageCollection::test_removes_messages_by_retention_time[tables_format_v0-fifo] [FAIL] Test command err: sys:1: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/ciyv/002723/ydb/tests/functional/sqs/common/test-results/py3test/testing_out_stuff/chunk12/testing_out_stuff/test_garbage_collection.py.TestSqsGarbageCollection.test_cleanups_reads_table.tables_format_v0-30/cluster/node_1/stdout'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/ciyv/002723/ydb/tests/functional/sqs/common/test-results/py3test/testing_out_stuff/chunk12/testing_out_stuff/test_garbage_collection.py.TestSqsGarbageCollection.test_cleanups_reads_table.tables_format_v0-30/cluster/node_1/stderr'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedRandom name='/home/runner/.ya/build/build_root/ciyv/002723/ydb/tests/functional/sqs/common/test-results/py3test/testing_out_stuff/chunk12/testing_out_stuff/test_garbage_collection.py.TestSqsGarbageCollection.test_cleanups_reads_table.tables_format_v0-30/cluster/node_1/logfile_u981wuuc.log'> ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/subprocess.py:1129: ResourceWarning: subprocess 443005 is still running ResourceWarning: Enable tracemalloc to get the object allocation traceback >> test_queue_attributes_validation.py::TestQueueAttributesValidation::test_create_queue_with_default_attributes[tables_format_v1-fifo] [FAIL] >> test_queue_attributes_validation.py::TestQueueAttributesValidation::test_create_queue_with_default_attributes[tables_format_v1-std] >> test_queues_managing.py::TestQueuesManagingWithTenant::test_delete_queue_batch[tables_format_v0] [FAIL] >> test_queue_tags.py::TestQueueTags::test_list_queue_tags[tables_format_v1-fifo] >> test_async_replication.py::TestAsyncReplication::test_async_replication[table_ttl_Datetime-pk_types11-all_types11-index11-Datetime--] [FAIL] >> test_queues_managing.py::TestQueuesManagingWithTenant::test_remove_queue_generates_event[tables_format_v0] [FAIL] >> test_queues_managing.py::TestQueuesManagingWithTenant::test_delete_queue_batch[tables_format_v1] >> test_queues_managing.py::TestQueuesManagingWithTenant::test_remove_queue_generates_event[tables_format_v1] >> test_queues_managing.py::TestQueuesManagingWithTenant::test_delete_queue[tables_format_v0-std] >> test_queues_managing.py::TestQueuesManagingWithPathTestQueuesManagingWithPath::test_remove_queue_generates_event[tables_format_v1] [FAIL] >> test_queues_managing.py::TestQueuesManagingWithPathTestQueuesManagingWithPath::test_request_to_deleted_queue[tables_format_v0-fifo] >> test_queue_attributes_validation.py::TestQueueAttributesValidation::test_create_queue_with_default_attributes[tables_format_v1-std] [FAIL] >> test_queues_managing.py::TestQueuesManagingWithPathTestQueuesManagingWithPath::test_purge_queue_batch[tables_format_v1] >> test_queues_managing.py::TestQueuesManagingWithTenant::test_remove_queue_generates_event[tables_format_v1] [FAIL] >> test_queues_managing.py::TestQueuesManagingWithTenant::test_delete_queue_batch[tables_format_v1] [FAIL] >> test_queues_managing.py::TestQueuesManagingWithTenant::test_purge_queue[tables_format_v0-fifo] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/common/py3test >> test_counters.py::TestSqsCountersFeatures::test_aggregates_transaction_counters[user] [FAIL] Test command err: ydb/tests/library/clients/kikimr_monitoring.py:75: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/ciyv/002728/ydb/tests/functional/sqs/common/test-results/py3test/testing_out_stuff/chunk7/testing_out_stuff/test_counters.py.TestSqsCountersExportDelay.test_export_delay.tables_format_v0/cluster/node_1/stdout'> (key, value) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/library/clients/kikimr_monitoring.py:75: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/ciyv/002728/ydb/tests/functional/sqs/common/test-results/py3test/testing_out_stuff/chunk7/testing_out_stuff/test_counters.py.TestSqsCountersExportDelay.test_export_delay.tables_format_v0/cluster/node_1/stderr'> (key, value) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/library/clients/kikimr_monitoring.py:75: ResourceWarning: unclosed file <_io.BufferedRandom name='/home/runner/.ya/build/build_root/ciyv/002728/ydb/tests/functional/sqs/common/test-results/py3test/testing_out_stuff/chunk7/testing_out_stuff/test_counters.py.TestSqsCountersExportDelay.test_export_delay.tables_format_v0/cluster/node_1/logfile_2nergtgo.log'> (key, value) ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/subprocess.py:1129: ResourceWarning: subprocess 440196 is still running ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/ciyv/002728/ydb/tests/functional/sqs/common/test-results/py3test/testing_out_stuff/chunk7/testing_out_stuff/test_counters.py.TestSqsCountersFeatures.test_aggregates_transaction_counters.queue/cluster/node_1/stdout'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/ciyv/002728/ydb/tests/functional/sqs/common/test-results/py3test/testing_out_stuff/chunk7/testing_out_stuff/test_counters.py.TestSqsCountersFeatures.test_aggregates_transaction_counters.queue/cluster/node_1/stderr'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedRandom name='/home/runner/.ya/build/build_root/ciyv/002728/ydb/tests/functional/sqs/common/test-results/py3test/testing_out_stuff/chunk7/testing_out_stuff/test_counters.py.TestSqsCountersFeatures.test_aggregates_transaction_counters.queue/cluster/node_1/logfile_w1dl28kd.log'> ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/subprocess.py:1129: ResourceWarning: subprocess 444231 is still running ResourceWarning: Enable tracemalloc to get the object allocation traceback >> test_queues_managing.py::TestQueuesManagingWithPathTestQueuesManagingWithPath::test_purge_queue[tables_format_v1-fifo] >> test_queues_managing.py::TestQueuesManagingWithPathTestQueuesManagingWithPath::test_request_to_deleted_queue[tables_format_v0-fifo] [FAIL] >> test_queues_managing.py::TestQueuesManagingWithTenant::test_purge_queue[tables_format_v0-fifo] [FAIL] >> test_garbage_collection.py::TestSqsGarbageCollection::test_visibility_change_cleanups_proper_receive_attempt_id[tables_format_v0-with_delete_message] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/common/py3test >> test_queues_managing.py::TestQueuesManagingWithTenant::test_remove_queue_generates_event[tables_format_v1] [FAIL] Test command err: sys:1: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/ciyv/002732/ydb/tests/functional/sqs/common/test-results/py3test/testing_out_stuff/chunk56/testing_out_stuff/test_queues_managing.py.TestQueuesManagingWithTenant.test_queues_count_over_limit.tables_format_v1/cluster/node_1/stdout'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/ciyv/002732/ydb/tests/functional/sqs/common/test-results/py3test/testing_out_stuff/chunk56/testing_out_stuff/test_queues_managing.py.TestQueuesManagingWithTenant.test_queues_count_over_limit.tables_format_v1/cluster/node_1/stderr'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedRandom name='/home/runner/.ya/build/build_root/ciyv/002732/ydb/tests/functional/sqs/common/test-results/py3test/testing_out_stuff/chunk56/testing_out_stuff/test_queues_managing.py.TestQueuesManagingWithTenant.test_queues_count_over_limit.tables_format_v1/cluster/node_1/logfile_d30h8vkp.log'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/ciyv/002732/ydb/tests/functional/sqs/common/test-results/py3test/testing_out_stuff/chunk56/testing_out_stuff/test_queues_managing.py.TestQueuesManagingWithTenant.test_queues_count_over_limit.tables_format_v1/cluster/slot_1/stdout'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/ciyv/002732/ydb/tests/functional/sqs/common/test-results/py3test/testing_out_stuff/chunk56/testing_out_stuff/test_queues_managing.py.TestQueuesManagingWithTenant.test_queues_count_over_limit.tables_format_v1/cluster/slot_1/stderr'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedRandom name='/home/runner/.ya/build/build_root/ciyv/002732/ydb/tests/functional/sqs/common/test-results/py3test/testing_out_stuff/chunk56/testing_out_stuff/test_queues_managing.py.TestQueuesManagingWithTenant.test_queues_count_over_limit.tables_format_v1/cluster/slot_1/logfile_sffl1klg.log'> ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/subprocess.py:1129: ResourceWarning: subprocess 438130 is still running ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/subprocess.py:1129: ResourceWarning: subprocess 440802 is still running ResourceWarning: Enable tracemalloc to get the object allocation traceback ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/common/py3test >> test_queues_managing.py::TestQueuesManagingWithPathTestQueuesManagingWithPath::test_request_to_deleted_queue[tables_format_v0-fifo] [FAIL] Test command err: sys:1: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/ciyv/00271c/ydb/tests/functional/sqs/common/test-results/py3test/testing_out_stuff/chunk45/testing_out_stuff/test_queues_managing.py.TestQueuesManagingWithPathTestQueuesManagingWithPath.test_remove_queue_generates_event.tables_format_v0/cluster/node_1/stdout'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/ciyv/00271c/ydb/tests/functional/sqs/common/test-results/py3test/testing_out_stuff/chunk45/testing_out_stuff/test_queues_managing.py.TestQueuesManagingWithPathTestQueuesManagingWithPath.test_remove_queue_generates_event.tables_format_v0/cluster/node_1/stderr'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedRandom name='/home/runner/.ya/build/build_root/ciyv/00271c/ydb/tests/functional/sqs/common/test-results/py3test/testing_out_stuff/chunk45/testing_out_stuff/test_queues_managing.py.TestQueuesManagingWithPathTestQueuesManagingWithPath.test_remove_queue_generates_event.tables_format_v0/cluster/node_1/logfile_eai9pdx8.log'> ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/subprocess.py:1129: ResourceWarning: subprocess 444657 is still running ResourceWarning: Enable tracemalloc to get the object allocation traceback ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/common/py3test >> test_queue_attributes_validation.py::TestQueueAttributesValidation::test_create_queue_with_default_attributes[tables_format_v1-std] [FAIL] Test command err: sys:1: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/ciyv/00271f/ydb/tests/functional/sqs/common/test-results/py3test/testing_out_stuff/chunk26/testing_out_stuff/test_queue_attributes_validation.py.TestQueueAttributesValidation.test_create_queue_with_default_attributes.tables_format_v0-std/cluster/node_1/stdout'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/ciyv/00271f/ydb/tests/functional/sqs/common/test-results/py3test/testing_out_stuff/chunk26/testing_out_stuff/test_queue_attributes_validation.py.TestQueueAttributesValidation.test_create_queue_with_default_attributes.tables_format_v0-std/cluster/node_1/stderr'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedRandom name='/home/runner/.ya/build/build_root/ciyv/00271f/ydb/tests/functional/sqs/common/test-results/py3test/testing_out_stuff/chunk26/testing_out_stuff/test_queue_attributes_validation.py.TestQueueAttributesValidation.test_create_queue_with_default_attributes.tables_format_v0-std/cluster/node_1/logfile_jrabef7a.log'> ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/subprocess.py:1129: ResourceWarning: subprocess 444034 is still running ResourceWarning: Enable tracemalloc to get the object allocation traceback ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/common/py3test >> test_queues_managing.py::TestQueuesManagingWithTenant::test_purge_queue[tables_format_v0-fifo] [FAIL] Test command err: sys:1: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/ciyv/002725/ydb/tests/functional/sqs/common/test-results/py3test/testing_out_stuff/chunk53/testing_out_stuff/test_queues_managing.py.TestQueuesManagingWithTenant.test_delete_queue_batch.tables_format_v0/cluster/node_1/stdout'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/ciyv/002725/ydb/tests/functional/sqs/common/test-results/py3test/testing_out_stuff/chunk53/testing_out_stuff/test_queues_managing.py.TestQueuesManagingWithTenant.test_delete_queue_batch.tables_format_v0/cluster/node_1/stderr'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedRandom name='/home/runner/.ya/build/build_root/ciyv/002725/ydb/tests/functional/sqs/common/test-results/py3test/testing_out_stuff/chunk53/testing_out_stuff/test_queues_managing.py.TestQueuesManagingWithTenant.test_delete_queue_batch.tables_format_v0/cluster/node_1/logfile_khx4hz1a.log'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/ciyv/002725/ydb/tests/functional/sqs/common/test-results/py3test/testing_out_stuff/chunk53/testing_out_stuff/test_queues_managing.py.TestQueuesManagingWithTenant.test_delete_queue_batch.tables_format_v0/cluster/slot_1/stdout'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/ciyv/002725/ydb/tests/functional/sqs/common/test-results/py3test/testing_out_stuff/chunk53/testing_out_stuff/test_queues_managing.py.TestQueuesManagingWithTenant.test_delete_queue_batch.tables_format_v0/cluster/slot_1/stderr'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedRandom name='/home/runner/.ya/build/build_root/ciyv/002725/ydb/tests/functional/sqs/common/test-results/py3test/testing_out_stuff/chunk53/testing_out_stuff/test_queues_managing.py.TestQueuesManagingWithTenant.test_delete_queue_batch.tables_format_v0/cluster/slot_1/logfile_6zcdzgpz.log'> ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/subprocess.py:1129: ResourceWarning: subprocess 442222 is still running ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/subprocess.py:1129: ResourceWarning: subprocess 445242 is still running ResourceWarning: Enable tracemalloc to get the object allocation traceback >> test_queue_tags.py::TestQueueTags::test_untag_queue[tables_format_v0-fifo] >> test_account_actions.py::TestAccountActionsWithTenant::test_manage_account[with_queues-tables_format_v0] >> TCdcStreamWithRebootsTests::WithPqTransactions[TabletReboots] [GOOD] >> test_queues_managing.py::TestQueuesManagingWithTenant::test_request_to_deleted_queue[tables_format_v1-std] |77.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/datashard/async_replication/py3test >> test_async_replication.py::TestAsyncReplication::test_async_replication[table_index_1__SYNC-pk_types3-all_types3-index3---SYNC] [FAIL] >> test_async_replication.py::TestAsyncReplication::test_async_replication[table_ttl_Uint64-pk_types10-all_types10-index10-Uint64--] [FAIL] >> test_queue_tags.py::TestQueueTags::test_list_queue_tags[tables_format_v1-fifo] [FAIL] >> test_queue_tags.py::TestQueueTags::test_list_queue_tags[tables_format_v1-std] >> test_serverless.py::test_database_with_column_disk_quotas[enable_alter_database_create_hive_first--false] [FAIL] >> test_counters.py::TestSqsCountersFeatures::test_creates_counter[tables_format_v0] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_cdc_stream_reboots/unittest >> TCdcStreamWithRebootsTests::CreateDropRecreate[TabletReboots] [GOOD] Test command err: =========== RUN: Trace =========== Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2140] Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:116:2058] recipient: [1:110:2141] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:116:2058] recipient: [1:110:2141] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:117:2058] recipient: [1:111:2142] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:117:2058] recipient: [1:111:2142] Leader for TabletID 72057594046678944 is [1:126:2151] sender: [1:127:2058] recipient: [1:109:2140] Leader for TabletID 72057594046447617 is [1:130:2154] sender: [1:132:2058] recipient: [1:110:2141] Leader for TabletID 72057594046316545 is [1:133:2155] sender: [1:135:2058] recipient: [1:111:2142] 2025-05-29T15:32:08.739387Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7488: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-05-29T15:32:08.739409Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7516: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:32:08.739415Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7402: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2025-05-29T15:32:08.739420Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7418: OperationsProcessing config: using default configuration 2025-05-29T15:32:08.739437Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-05-29T15:32:08.739443Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-05-29T15:32:08.739458Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7548: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:32:08.739474Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:39: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-05-29T15:32:08.739573Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7619: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-05-29T15:32:08.739644Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-05-29T15:32:08.754437Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7651: Got new config: QueryServiceConfig { AllExternalDataSourcesAreAvailable: true } 2025-05-29T15:32:08.754454Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:32:08.754555Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7619: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# Leader for TabletID 72057594046447617 is [1:130:2154] sender: [1:175:2058] recipient: [1:15:2062] 2025-05-29T15:32:08.757063Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-05-29T15:32:08.757086Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-05-29T15:32:08.757114Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-05-29T15:32:08.760058Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-05-29T15:32:08.760139Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:445: Clear TempDirsState with owners number: 0 2025-05-29T15:32:08.760270Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1348: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-05-29T15:32:08.760452Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:32: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-05-29T15:32:08.761136Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:157: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:32:08.761173Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:84: [RootDataErasureManager] Stop 2025-05-29T15:32:08.761405Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:32:08.761417Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:32:08.761452Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-05-29T15:32:08.761461Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-29T15:32:08.761468Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:202: TTxServerlessStorageBilling.Complete 2025-05-29T15:32:08.761487Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6671: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:214:2058] recipient: [1:212:2212] IGNORE Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:214:2058] recipient: [1:212:2212] Leader for TabletID 72057594037968897 is [1:218:2216] sender: [1:219:2058] recipient: [1:212:2212] 2025-05-29T15:32:08.762847Z node 1 :HIVE INFO: tablet_helpers.cpp:1130: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:126:2151] sender: [1:239:2058] recipient: [1:15:2062] 2025-05-29T15:32:08.784380Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:372: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-05-29T15:32:08.784434Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:32:08.784489Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-05-29T15:32:08.784526Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:130: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-05-29T15:32:08.784538Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:32:08.785095Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:451: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-05-29T15:32:08.785114Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-05-29T15:32:08.785162Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:32:08.785169Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:313: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-05-29T15:32:08.785172Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:367: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-05-29T15:32:08.785176Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 2 -> 3 2025-05-29T15:32:08.785524Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:32:08.785533Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-05-29T15:32:08.785537Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 3 -> 128 2025-05-29T15:32:08.785839Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:32:08.785850Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:32:08.785859Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:32:08.785864Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1650: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-05-29T15:32:08.786311Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1719: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-05-29T15:32:08.786694Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:652: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-05-29T15:32:08.786725Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1751: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 Leader for TabletID 72057594046316545 is [1:133:2155] sender: [1:254:2058] recipient: [1:15:2062] FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-05-29T15:32:08.786900Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:676: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-05-29T15:32:08.786920Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:680: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 133 RawX2: 4294969451 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-05-29T15:32:08.786925Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:32:08.786963Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Ch ... rts is done, operation id: 1005:0 2025-05-29T15:34:02.340735Z node 331 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5174: RemoveTx for txid 1005:0 2025-05-29T15:34:02.340762Z node 331 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 6] was 4 2025-05-29T15:34:02.340767Z node 331 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:973: Operation and all the parts is done, operation id: 1005:1 2025-05-29T15:34:02.340770Z node 331 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5174: RemoveTx for txid 1005:1 2025-05-29T15:34:02.340786Z node 331 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 4 2025-05-29T15:34:02.340790Z node 331 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:973: Operation and all the parts is done, operation id: 1005:2 2025-05-29T15:34:02.340793Z node 331 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5174: RemoveTx for txid 1005:2 2025-05-29T15:34:02.340801Z node 331 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 7] was 5 2025-05-29T15:34:02.340806Z node 331 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:982: Publication still in progress, tx: 1005, publications: 2, subscribers: 0 2025-05-29T15:34:02.340810Z node 331 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:989: Publication details: tx: 1005, [OwnerId: 72057594046678944, LocalPathId: 6], 4 2025-05-29T15:34:02.340813Z node 331 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:989: Publication details: tx: 1005, [OwnerId: 72057594046678944, LocalPathId: 7], 2 2025-05-29T15:34:02.341066Z node 331 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4877: StateWork, received event# 274137603, Sender [331:207:2208], Recipient [331:126:2151]: NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 6] Version: 4 } 2025-05-29T15:34:02.341078Z node 331 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4961: StateWork, processing event NSchemeBoard::NSchemeshardEvents::TEvUpdateAck 2025-05-29T15:34:02.341095Z node 331 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:5834: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 6 Version: 4 PathOwnerId: 72057594046678944, cookie: 1005 2025-05-29T15:34:02.341109Z node 331 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 6 Version: 4 PathOwnerId: 72057594046678944, cookie: 1005 2025-05-29T15:34:02.341113Z node 331 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 1005 2025-05-29T15:34:02.341118Z node 331 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 1005, pathId: [OwnerId: 72057594046678944, LocalPathId: 6], version: 4 2025-05-29T15:34:02.341125Z node 331 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 6] was 3 2025-05-29T15:34:02.341141Z node 331 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:165: TSideEffects ApplyOnExecute at tablet# 72057594046678944 2025-05-29T15:34:02.341584Z node 331 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4877: StateWork, received event# 274137603, Sender [331:207:2208], Recipient [331:126:2151]: NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 7] Version: 2 } 2025-05-29T15:34:02.341597Z node 331 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4961: StateWork, processing event NSchemeBoard::NSchemeshardEvents::TEvUpdateAck 2025-05-29T15:34:02.341610Z node 331 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:5834: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 7 Version: 2 PathOwnerId: 72057594046678944, cookie: 1005 2025-05-29T15:34:02.341625Z node 331 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 7 Version: 2 PathOwnerId: 72057594046678944, cookie: 1005 2025-05-29T15:34:02.341630Z node 331 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1005 2025-05-29T15:34:02.341636Z node 331 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 1005, pathId: [OwnerId: 72057594046678944, LocalPathId: 7], version: 2 2025-05-29T15:34:02.341640Z node 331 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 7] was 4 2025-05-29T15:34:02.341657Z node 331 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 1005, subscribers: 0 2025-05-29T15:34:02.341663Z node 331 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:165: TSideEffects ApplyOnExecute at tablet# 72057594046678944 2025-05-29T15:34:02.342727Z node 331 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:207: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2025-05-29T15:34:02.342802Z node 331 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1005 2025-05-29T15:34:02.342808Z node 331 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:207: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2025-05-29T15:34:02.345883Z node 331 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1005 2025-05-29T15:34:02.345902Z node 331 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:207: TSideEffects ApplyOnComplete at tablet# 72057594046678944 TestWaitNotification wait txId: 1005 2025-05-29T15:34:02.349390Z node 331 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:210: tests -- TTxNotificationSubscriber for txId 1005: send EvNotifyTxCompletion 2025-05-29T15:34:02.349405Z node 331 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:256: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 1005 2025-05-29T15:34:02.349476Z node 331 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4877: StateWork, received event# 269877761, Sender [331:978:2851], Recipient [331:126:2151]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-05-29T15:34:02.349487Z node 331 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4974: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-05-29T15:34:02.349492Z node 331 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5753: Pipe server connected, at tablet: 72057594046678944 2025-05-29T15:34:02.349527Z node 331 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4877: StateWork, received event# 271124996, Sender [331:407:2379], Recipient [331:126:2151]: NKikimrScheme.TEvNotifyTxCompletion TxId: 1005 2025-05-29T15:34:02.349532Z node 331 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4890: StateWork, processing event TEvSchemeShard::TEvNotifyTxCompletion 2025-05-29T15:34:02.349548Z node 331 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 1005, at schemeshard: 72057594046678944 2025-05-29T15:34:02.349585Z node 331 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:227: tests -- TTxNotificationSubscriber for txId 1005: got EvNotifyTxCompletionResult 2025-05-29T15:34:02.349591Z node 331 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:236: tests -- TTxNotificationSubscriber for txId 1005: satisfy waiter [331:976:2849] 2025-05-29T15:34:02.349620Z node 331 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4877: StateWork, received event# 269877764, Sender [331:978:2851], Recipient [331:126:2151]: NKikimr::TEvTabletPipe::TEvServerDisconnected 2025-05-29T15:34:02.349626Z node 331 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4975: StateWork, processing event TEvTabletPipe::TEvServerDisconnected 2025-05-29T15:34:02.349630Z node 331 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5801: Server pipe is reset, at schemeshard: 72057594046678944 TestWaitNotification: OK eventTxId 1005 2025-05-29T15:34:02.349702Z node 331 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4877: StateWork, received event# 271122945, Sender [331:979:2852], Recipient [331:126:2151]: NKikimrSchemeOp.TDescribePath Path: "/MyRoot/Table/Stream" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: true } 2025-05-29T15:34:02.349708Z node 331 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4889: StateWork, processing event TEvSchemeShard::TEvDescribeScheme 2025-05-29T15:34:02.349720Z node 331 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Table/Stream" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: true }, at schemeshard: 72057594046678944 2025-05-29T15:34:02.349774Z node 331 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/Table/Stream" took 52us result status StatusSuccess 2025-05-29T15:34:02.349869Z node 331 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Table/Stream" PathDescription { Self { Name: "Stream" PathId: 6 SchemeshardId: 72057594046678944 PathType: EPathTypeCdcStream CreateFinished: true CreateTxId: 1005 CreateStep: 5000006 ParentPathId: 3 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 4 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 4 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 2 CdcStreamVersion: 1 } ChildrenExist: true } Children { Name: "streamImpl" PathId: 7 SchemeshardId: 72057594046678944 PathType: EPathTypePersQueueGroup CreateFinished: true CreateTxId: 1005 CreateStep: 5000006 ParentPathId: 6 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" PathSubType: EPathSubTypeStreamImpl ChildrenExist: false BalancerTabletID: 72075186233409550 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 4 PathsLimit: 10000 ShardsInside: 3 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 1 PQPartitionsLimit: 1000000 } CdcStreamDescription { Name: "Stream" Mode: ECdcStreamModeKeysOnly PathId { OwnerId: 72057594046678944 LocalId: 6 } State: ECdcStreamStateReady SchemaVersion: 1 Format: ECdcStreamFormatProto VirtualTimestamps: false AwsRegion: "" ResolvedTimestampsIntervalMs: 0 } } PathId: 6 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> test_queues_managing.py::TestQueuesManagingWithPathTestQueuesManagingWithPath::test_purge_queue[tables_format_v1-fifo] [FAIL] >> test_queues_managing.py::TestQueuesManagingWithPathTestQueuesManagingWithPath::test_purge_queue[tables_format_v1-std] >> test_garbage_collection.py::TestSqsGarbageCollection::test_removes_messages_by_retention_time[tables_format_v0-std] >> test_queue_tags.py::TestQueueTags::test_list_queue_tags[tables_format_v1-std] [FAIL] >> test_queue_tags.py::TestQueueTags::test_tag_queue[tables_format_v0-fifo] >> test_queues_managing.py::TestQueuesManagingWithPathTestQueuesManagingWithPath::test_purge_queue_batch[tables_format_v1] [FAIL] >> test_queues_managing.py::TestQueuesManagingWithPathTestQueuesManagingWithPath::test_queues_count_over_limit[tables_format_v0] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_cdc_stream_reboots/unittest >> TCdcStreamWithRebootsTests::WithPqTransactions[TabletReboots] [GOOD] Test command err: =========== RUN: Trace =========== Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2140] Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:116:2058] recipient: [1:110:2141] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:116:2058] recipient: [1:110:2141] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:117:2058] recipient: [1:111:2142] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:117:2058] recipient: [1:111:2142] Leader for TabletID 72057594046678944 is [1:126:2151] sender: [1:127:2058] recipient: [1:109:2140] Leader for TabletID 72057594046447617 is [1:130:2154] sender: [1:132:2058] recipient: [1:110:2141] Leader for TabletID 72057594046316545 is [1:133:2155] sender: [1:135:2058] recipient: [1:111:2142] 2025-05-29T15:32:03.327001Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7488: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-05-29T15:32:03.327024Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7516: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:32:03.327030Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7402: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2025-05-29T15:32:03.327035Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7418: OperationsProcessing config: using default configuration 2025-05-29T15:32:03.327044Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-05-29T15:32:03.327048Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-05-29T15:32:03.327057Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7548: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:32:03.327069Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:39: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-05-29T15:32:03.327163Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7619: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-05-29T15:32:03.327236Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-05-29T15:32:03.345437Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7651: Got new config: QueryServiceConfig { AllExternalDataSourcesAreAvailable: true } 2025-05-29T15:32:03.345457Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:32:03.345530Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7619: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# Leader for TabletID 72057594046447617 is [1:130:2154] sender: [1:175:2058] recipient: [1:15:2062] 2025-05-29T15:32:03.347601Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-05-29T15:32:03.347623Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-05-29T15:32:03.348113Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-05-29T15:32:03.352650Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-05-29T15:32:03.352701Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:445: Clear TempDirsState with owners number: 0 2025-05-29T15:32:03.354809Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1348: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-05-29T15:32:03.355721Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:32: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-05-29T15:32:03.356193Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:157: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:32:03.356239Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:84: [RootDataErasureManager] Stop 2025-05-29T15:32:03.358205Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:32:03.358214Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:32:03.358235Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-05-29T15:32:03.358240Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-29T15:32:03.358245Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:202: TTxServerlessStorageBilling.Complete 2025-05-29T15:32:03.358259Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6671: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:214:2058] recipient: [1:212:2212] IGNORE Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:214:2058] recipient: [1:212:2212] Leader for TabletID 72057594037968897 is [1:218:2216] sender: [1:219:2058] recipient: [1:212:2212] 2025-05-29T15:32:03.359664Z node 1 :HIVE INFO: tablet_helpers.cpp:1130: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:126:2151] sender: [1:239:2058] recipient: [1:15:2062] 2025-05-29T15:32:03.382664Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:372: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-05-29T15:32:03.382753Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:32:03.382806Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-05-29T15:32:03.382859Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:130: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-05-29T15:32:03.382871Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:32:03.383495Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:451: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-05-29T15:32:03.383524Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-05-29T15:32:03.383579Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:32:03.383589Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:313: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-05-29T15:32:03.383595Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:367: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-05-29T15:32:03.383600Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 2 -> 3 2025-05-29T15:32:03.384032Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:32:03.384045Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-05-29T15:32:03.384051Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 3 -> 128 2025-05-29T15:32:03.384420Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:32:03.384431Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:32:03.384437Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:32:03.384444Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1650: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-05-29T15:32:03.385154Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1719: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-05-29T15:32:03.385552Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:652: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-05-29T15:32:03.385590Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1751: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 Leader for TabletID 72057594046316545 is [1:133:2155] sender: [1:254:2058] recipient: [1:15:2062] FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-05-29T15:32:03.385785Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:676: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-05-29T15:32:03.385809Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:680: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 133 RawX2: 4294969451 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-05-29T15:32:03.385817Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:32:03.385887Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Ch ... ublish, at schemeshard: 72057594046678944, txId: 281474976715657, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 5 2025-05-29T15:34:05.403738Z node 179 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 6 2025-05-29T15:34:05.403751Z node 179 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1606: TOperation IsReadyToNotify, TxId: 281474976715657, ready parts: 2/3, is published: true 2025-05-29T15:34:05.403851Z node 179 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6285: Handle TEvProposeTransactionResult, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409546 Status: COMPLETE TxId: 281474976715657 Step: 5000005 OrderId: 281474976715657 ExecLatency: 0 ProposeLatency: 2 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409546 CpuTimeUsec: 321 } } CommitVersion { Step: 5000005 TxId: 281474976715657 } 2025-05-29T15:34:05.403861Z node 179 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1764: TOperation FindRelatedPartByTabletId, TxId: 281474976715657, tablet: 72075186233409546, partId: 1 2025-05-29T15:34:05.403889Z node 179 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:619: TTxOperationReply execute, operationId: 281474976715657:1, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409546 Status: COMPLETE TxId: 281474976715657 Step: 5000005 OrderId: 281474976715657 ExecLatency: 0 ProposeLatency: 2 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409546 CpuTimeUsec: 321 } } CommitVersion { Step: 5000005 TxId: 281474976715657 } 2025-05-29T15:34:05.403908Z node 179 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_part.cpp:108: HandleReply TEvDataShard::TEvProposeTransactionResult Ignore message: tablet# 72057594046678944, ev# TxKind: TX_KIND_SCHEME Origin: 72075186233409546 Status: COMPLETE TxId: 281474976715657 Step: 5000005 OrderId: 281474976715657 ExecLatency: 0 ProposeLatency: 2 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409546 CpuTimeUsec: 321 } } CommitVersion { Step: 5000005 TxId: 281474976715657 } 2025-05-29T15:34:05.404018Z node 179 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5512: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 332 RawX2: 768799148302 } Origin: 72075186233409546 State: 2 TxId: 281474976715657 Step: 0 Generation: 2 2025-05-29T15:34:05.404027Z node 179 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1764: TOperation FindRelatedPartByTabletId, TxId: 281474976715657, tablet: 72075186233409546, partId: 1 2025-05-29T15:34:05.404045Z node 179 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:619: TTxOperationReply execute, operationId: 281474976715657:1, at schemeshard: 72057594046678944, message: Source { RawX1: 332 RawX2: 768799148302 } Origin: 72075186233409546 State: 2 TxId: 281474976715657 Step: 0 Generation: 2 2025-05-29T15:34:05.404055Z node 179 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:1005: NTableState::TProposedWaitParts operationId# 281474976715657:1 HandleReply TEvSchemaChanged at tablet: 72057594046678944 2025-05-29T15:34:05.404064Z node 179 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:1009: NTableState::TProposedWaitParts operationId# 281474976715657:1 HandleReply TEvSchemaChanged at tablet: 72057594046678944 message: Source { RawX1: 332 RawX2: 768799148302 } Origin: 72075186233409546 State: 2 TxId: 281474976715657 Step: 0 Generation: 2 2025-05-29T15:34:05.404076Z node 179 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:655: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 281474976715657:1, shardIdx: 72057594046678944:1, datashard: 72075186233409546, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2025-05-29T15:34:05.404081Z node 179 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:674: all shard schema changes has been received, operationId: 281474976715657:1, at schemeshard: 72057594046678944 2025-05-29T15:34:05.404086Z node 179 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:686: send schema changes ack message, operation: 281474976715657:1, datashard: 72075186233409546, at schemeshard: 72057594046678944 2025-05-29T15:34:05.404095Z node 179 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 281474976715657:1 129 -> 240 2025-05-29T15:34:05.405923Z node 179 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976715657 2025-05-29T15:34:05.405976Z node 179 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976715657 2025-05-29T15:34:05.406146Z node 179 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:647: TTxOperationReply complete, operationId: 281474976715657:1, at schemeshard: 72057594046678944 2025-05-29T15:34:05.406181Z node 179 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:647: TTxOperationReply complete, operationId: 281474976715657:1, at schemeshard: 72057594046678944 2025-05-29T15:34:05.406284Z node 179 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 281474976715657:1, at schemeshard: 72057594046678944 2025-05-29T15:34:05.406297Z node 179 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:482: [72057594046678944] TDone opId# 281474976715657:1 ProgressState 2025-05-29T15:34:05.406320Z node 179 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:906: Part operation is done id#281474976715657:1 progress is 3/3 2025-05-29T15:34:05.406325Z node 179 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 281474976715657 ready parts: 3/3 2025-05-29T15:34:05.406331Z node 179 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:906: Part operation is done id#281474976715657:1 progress is 3/3 2025-05-29T15:34:05.406335Z node 179 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 281474976715657 ready parts: 3/3 2025-05-29T15:34:05.406340Z node 179 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1606: TOperation IsReadyToNotify, TxId: 281474976715657, ready parts: 3/3, is published: true 2025-05-29T15:34:05.406346Z node 179 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 281474976715657 ready parts: 3/3 2025-05-29T15:34:05.406351Z node 179 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:973: Operation and all the parts is done, operation id: 281474976715657:0 2025-05-29T15:34:05.406357Z node 179 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5174: RemoveTx for txid 281474976715657:0 2025-05-29T15:34:05.406373Z node 179 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 3 2025-05-29T15:34:05.406378Z node 179 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:973: Operation and all the parts is done, operation id: 281474976715657:1 2025-05-29T15:34:05.406382Z node 179 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5174: RemoveTx for txid 281474976715657:1 2025-05-29T15:34:05.406401Z node 179 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 5 2025-05-29T15:34:05.406405Z node 179 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:973: Operation and all the parts is done, operation id: 281474976715657:2 2025-05-29T15:34:05.406408Z node 179 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5174: RemoveTx for txid 281474976715657:2 2025-05-29T15:34:05.406413Z node 179 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 4 TestModificationResult got TxId: 1003, wait until txId: 1003 TestWaitNotification wait txId: 1003 2025-05-29T15:34:05.407672Z node 179 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:210: tests -- TTxNotificationSubscriber for txId 1003: send EvNotifyTxCompletion 2025-05-29T15:34:05.407692Z node 179 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:256: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 1003 2025-05-29T15:34:05.407772Z node 179 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 1003, at schemeshard: 72057594046678944 2025-05-29T15:34:05.407794Z node 179 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:227: tests -- TTxNotificationSubscriber for txId 1003: got EvNotifyTxCompletionResult 2025-05-29T15:34:05.407800Z node 179 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:236: tests -- TTxNotificationSubscriber for txId 1003: satisfy waiter [179:718:2634] TestWaitNotification: OK eventTxId 1003 2025-05-29T15:34:05.786170Z node 179 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Table/Stream" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: true }, at schemeshard: 72057594046678944 2025-05-29T15:34:05.786268Z node 179 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/Table/Stream" took 193us result status StatusSuccess 2025-05-29T15:34:05.786411Z node 179 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Table/Stream" PathDescription { Self { Name: "Stream" PathId: 4 SchemeshardId: 72057594046678944 PathType: EPathTypeCdcStream CreateFinished: true CreateTxId: 1003 CreateStep: 5000004 ParentPathId: 3 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 2 CdcStreamVersion: 2 } ChildrenExist: true } Children { Name: "streamImpl" PathId: 5 SchemeshardId: 72057594046678944 PathType: EPathTypePersQueueGroup CreateFinished: true CreateTxId: 1003 CreateStep: 5000004 ParentPathId: 4 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" PathSubType: EPathSubTypeStreamImpl ChildrenExist: false BalancerTabletID: 72075186233409548 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 4 PathsLimit: 10000 ShardsInside: 3 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 1 PQPartitionsLimit: 1000000 } CdcStreamDescription { Name: "Stream" Mode: ECdcStreamModeKeysOnly PathId { OwnerId: 72057594046678944 LocalId: 4 } State: ECdcStreamStateReady SchemaVersion: 2 Format: ECdcStreamFormatProto VirtualTimestamps: false AwsRegion: "" ResolvedTimestampsIntervalMs: 0 ScanProgress { ShardsTotal: 1 ShardsCompleted: 1 } } } PathId: 4 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> test_queues_managing.py::TestQueuesManagingWithPathTestQueuesManagingWithPath::test_purge_queue[tables_format_v1-std] [FAIL] >> test_queues_managing.py::TestQueuesManagingWithPathTestQueuesManagingWithPath::test_purge_queue_batch[tables_format_v0] >> test_queue_tags.py::TestQueueTags::test_tag_queue[tables_format_v0-fifo] [FAIL] >> test_queues_managing.py::TestQueuesManagingWithPathTestQueuesManagingWithPath::test_queues_count_over_limit[tables_format_v0] [FAIL] >> test_queues_managing.py::TestQueuesManagingWithPathTestQueuesManagingWithPath::test_queues_count_over_limit[tables_format_v1] >> test_acl.py::TestSqsWithForceAuthorizationWithTenant::test_invalid_token[tables_format_v0-no] [FAIL] >> test_acl.py::TestSqsWithForceAuthorizationWithTenant::test_invalid_token[tables_format_v1-empty] >> test_serverless.py::test_database_with_column_disk_quotas[enable_alter_database_create_hive_first--true] >> test_queues_managing.py::TestQueuesManagingWithPathTestQueuesManagingWithPath::test_purge_queue_batch[tables_format_v0] [FAIL] >> test_garbage_collection.py::TestSqsGarbageCollection::test_visibility_change_cleanups_proper_receive_attempt_id[tables_format_v0-with_delete_message] [FAIL] >> test_garbage_collection.py::TestSqsGarbageCollection::test_visibility_change_cleanups_proper_receive_attempt_id[tables_format_v1-with_change_visibility] >> test_serverless.py::test_discovery_exclusive_nodes[enable_alter_database_create_hive_first--true] [GOOD] >> test_queues_managing.py::TestQueuesManagingWithPathTestQueuesManagingWithPath::test_queues_count_over_limit[tables_format_v1] [FAIL] >> test_acl.py::TestSqsWithForceAuthorizationWithTenant::test_invalid_token[tables_format_v1-empty] [FAIL] >> test_acl.py::TestSqsWithForceAuthorizationWithTenant::test_invalid_token[tables_format_v1-invalid] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/common/py3test >> test_queue_tags.py::TestQueueTags::test_tag_queue[tables_format_v0-fifo] [FAIL] Test command err: sys:1: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/ciyv/002718/ydb/tests/functional/sqs/common/test-results/py3test/testing_out_stuff/chunk33/testing_out_stuff/test_queue_tags.py.TestQueueTags.test_list_queue_tags.tables_format_v1-fifo/cluster/node_1/stdout'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/ciyv/002718/ydb/tests/functional/sqs/common/test-results/py3test/testing_out_stuff/chunk33/testing_out_stuff/test_queue_tags.py.TestQueueTags.test_list_queue_tags.tables_format_v1-fifo/cluster/node_1/stderr'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedRandom name='/home/runner/.ya/build/build_root/ciyv/002718/ydb/tests/functional/sqs/common/test-results/py3test/testing_out_stuff/chunk33/testing_out_stuff/test_queue_tags.py.TestQueueTags.test_list_queue_tags.tables_format_v1-fifo/cluster/node_1/logfile_xk0l6vl5.log'> ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/subprocess.py:1129: ResourceWarning: subprocess 448360 is still running ResourceWarning: Enable tracemalloc to get the object allocation traceback >> test_garbage_collection.py::TestSqsGarbageCollection::test_visibility_change_cleanups_proper_receive_attempt_id[tables_format_v1-with_change_visibility] [FAIL] >> test_garbage_collection.py::TestSqsGarbageCollection::test_visibility_change_cleanups_proper_receive_attempt_id[tables_format_v1-with_delete_message] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/common/py3test >> test_queues_managing.py::TestQueuesManagingWithPathTestQueuesManagingWithPath::test_purge_queue_batch[tables_format_v0] [FAIL] Test command err: sys:1: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/ciyv/002714/ydb/tests/functional/sqs/common/test-results/py3test/testing_out_stuff/chunk43/testing_out_stuff/test_queues_managing.py.TestQueuesManagingWithPathTestQueuesManagingWithPath.test_purge_queue.tables_format_v1-fifo/cluster/node_1/stdout'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/ciyv/002714/ydb/tests/functional/sqs/common/test-results/py3test/testing_out_stuff/chunk43/testing_out_stuff/test_queues_managing.py.TestQueuesManagingWithPathTestQueuesManagingWithPath.test_purge_queue.tables_format_v1-fifo/cluster/node_1/stderr'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedRandom name='/home/runner/.ya/build/build_root/ciyv/002714/ydb/tests/functional/sqs/common/test-results/py3test/testing_out_stuff/chunk43/testing_out_stuff/test_queues_managing.py.TestQueuesManagingWithPathTestQueuesManagingWithPath.test_purge_queue.tables_format_v1-fifo/cluster/node_1/logfile_jw1lj0xi.log'> ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/subprocess.py:1129: ResourceWarning: subprocess 448851 is still running ResourceWarning: Enable tracemalloc to get the object allocation traceback >> test_acl.py::TestSqsWithForceAuthorizationWithTenant::test_invalid_token[tables_format_v1-invalid] [FAIL] >> test_acl.py::TestSqsWithForceAuthorizationWithTenant::test_invalid_token[tables_format_v1-no] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/common/py3test >> test_queues_managing.py::TestQueuesManagingWithPathTestQueuesManagingWithPath::test_queues_count_over_limit[tables_format_v1] [FAIL] Test command err: sys:1: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/ciyv/002711/ydb/tests/functional/sqs/common/test-results/py3test/testing_out_stuff/chunk44/testing_out_stuff/test_queues_managing.py.TestQueuesManagingWithPathTestQueuesManagingWithPath.test_purge_queue_batch.tables_format_v1/cluster/node_1/stdout'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/ciyv/002711/ydb/tests/functional/sqs/common/test-results/py3test/testing_out_stuff/chunk44/testing_out_stuff/test_queues_managing.py.TestQueuesManagingWithPathTestQueuesManagingWithPath.test_purge_queue_batch.tables_format_v1/cluster/node_1/stderr'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedRandom name='/home/runner/.ya/build/build_root/ciyv/002711/ydb/tests/functional/sqs/common/test-results/py3test/testing_out_stuff/chunk44/testing_out_stuff/test_queues_managing.py.TestQueuesManagingWithPathTestQueuesManagingWithPath.test_purge_queue_batch.tables_format_v1/cluster/node_1/logfile_8wufth12.log'> ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/subprocess.py:1129: ResourceWarning: subprocess 448880 is still running ResourceWarning: Enable tracemalloc to get the object allocation traceback >> test_garbage_collection.py::TestSqsGarbageCollection::test_visibility_change_cleanups_proper_receive_attempt_id[tables_format_v1-with_delete_message] [FAIL] >> test_multiplexing_tables_format.py::TestMultiplexingTablesFormatWithPath::test_create_queue[fifo] >> test_queue_tags.py::TestQueueTags::test_untag_queue[tables_format_v0-fifo] [FAIL] >> test_queue_tags.py::TestQueueTags::test_untag_queue[tables_format_v0-std] >> test_alter_compression.py::TestAlterCompression::test[alter_compression] >> test_acl.py::TestSqsWithForceAuthorizationWithTenant::test_invalid_token[tables_format_v1-no] [FAIL] >> test_alter_tiering.py::TestAlterTiering::test[many_tables] >> test_queues_managing.py::TestQueuesManagingWithTenant::test_delete_queue[tables_format_v0-std] [FAIL] >> test_queues_managing.py::TestQueuesManagingWithTenant::test_delete_queue[tables_format_v1-fifo] >> test_queue_tags.py::TestQueueTags::test_untag_queue[tables_format_v0-std] [FAIL] >> test_queue_tags.py::TestQueueTags::test_untag_queue[tables_format_v1-fifo] >> test_queues_managing.py::TestQueuesManagingWithTenant::test_delete_queue[tables_format_v1-fifo] [FAIL] >> test_queues_managing.py::TestQueuesManagingWithTenant::test_delete_queue[tables_format_v1-std] |77.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/olap/scenario/py3test >> test_scheme_load.py::TestSchemeLoad::test[create_and_drop_tables] >> test_queue_tags.py::TestQueueTags::test_untag_queue[tables_format_v1-fifo] [FAIL] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/common/py3test >> test_acl.py::TestSqsWithForceAuthorizationWithTenant::test_invalid_token[tables_format_v1-no] [FAIL] Test command err: sys:1: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/ciyv/00271a/ydb/tests/functional/sqs/common/test-results/py3test/testing_out_stuff/chunk6/testing_out_stuff/test_acl.py.TestSqsWithForceAuthorizationWithTenant.test_invalid_token.tables_format_v0-no/cluster/node_1/stdout'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/ciyv/00271a/ydb/tests/functional/sqs/common/test-results/py3test/testing_out_stuff/chunk6/testing_out_stuff/test_acl.py.TestSqsWithForceAuthorizationWithTenant.test_invalid_token.tables_format_v0-no/cluster/node_1/stderr'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedRandom name='/home/runner/.ya/build/build_root/ciyv/00271a/ydb/tests/functional/sqs/common/test-results/py3test/testing_out_stuff/chunk6/testing_out_stuff/test_acl.py.TestSqsWithForceAuthorizationWithTenant.test_invalid_token.tables_format_v0-no/cluster/node_1/logfile_3nwmznwe.log'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/ciyv/00271a/ydb/tests/functional/sqs/common/test-results/py3test/testing_out_stuff/chunk6/testing_out_stuff/test_acl.py.TestSqsWithForceAuthorizationWithTenant.test_invalid_token.tables_format_v0-no/cluster/slot_1/stdout'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/ciyv/00271a/ydb/tests/functional/sqs/common/test-results/py3test/testing_out_stuff/chunk6/testing_out_stuff/test_acl.py.TestSqsWithForceAuthorizationWithTenant.test_invalid_token.tables_format_v0-no/cluster/slot_1/stderr'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedRandom name='/home/runner/.ya/build/build_root/ciyv/00271a/ydb/tests/functional/sqs/common/test-results/py3test/testing_out_stuff/chunk6/testing_out_stuff/test_acl.py.TestSqsWithForceAuthorizationWithTenant.test_invalid_token.tables_format_v0-no/cluster/slot_1/logfile_sivlwd90.log'> ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/subprocess.py:1129: ResourceWarning: subprocess 447237 is still running ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/subprocess.py:1129: ResourceWarning: subprocess 450255 is still running ResourceWarning: Enable tracemalloc to get the object allocation traceback >> test_queues_managing.py::TestQueuesManagingWithTenant::test_delete_queue[tables_format_v1-std] [FAIL] >> test_counters.py::TestSqsCountersFeatures::test_creates_counter[tables_format_v0] [FAIL] >> test_counters.py::TestSqsCountersFeatures::test_creates_counter[tables_format_v1] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/common/py3test >> test_queue_tags.py::TestQueueTags::test_untag_queue[tables_format_v1-fifo] [FAIL] Test command err: sys:1: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/ciyv/00270b/ydb/tests/functional/sqs/common/test-results/py3test/testing_out_stuff/chunk35/testing_out_stuff/test_queue_tags.py.TestQueueTags.test_untag_queue.tables_format_v0-fifo/cluster/node_1/stdout'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/ciyv/00270b/ydb/tests/functional/sqs/common/test-results/py3test/testing_out_stuff/chunk35/testing_out_stuff/test_queue_tags.py.TestQueueTags.test_untag_queue.tables_format_v0-fifo/cluster/node_1/stderr'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedRandom name='/home/runner/.ya/build/build_root/ciyv/00270b/ydb/tests/functional/sqs/common/test-results/py3test/testing_out_stuff/chunk35/testing_out_stuff/test_queue_tags.py.TestQueueTags.test_untag_queue.tables_format_v0-fifo/cluster/node_1/logfile_aszcw8bu.log'> ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/subprocess.py:1129: ResourceWarning: subprocess 450923 is still running ResourceWarning: Enable tracemalloc to get the object allocation traceback >> test_insert.py::TestInsert::test[read_data_during_bulk_upsert] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/common/py3test >> test_queues_managing.py::TestQueuesManagingWithTenant::test_delete_queue[tables_format_v1-std] [FAIL] Test command err: sys:1: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/ciyv/002715/ydb/tests/functional/sqs/common/test-results/py3test/testing_out_stuff/chunk52/testing_out_stuff/test_queues_managing.py.TestQueuesManagingWithTenant.test_delete_queue.tables_format_v0-std/cluster/node_1/stdout'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/ciyv/002715/ydb/tests/functional/sqs/common/test-results/py3test/testing_out_stuff/chunk52/testing_out_stuff/test_queues_managing.py.TestQueuesManagingWithTenant.test_delete_queue.tables_format_v0-std/cluster/node_1/stderr'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedRandom name='/home/runner/.ya/build/build_root/ciyv/002715/ydb/tests/functional/sqs/common/test-results/py3test/testing_out_stuff/chunk52/testing_out_stuff/test_queues_managing.py.TestQueuesManagingWithTenant.test_delete_queue.tables_format_v0-std/cluster/node_1/logfile_2tqnrr8p.log'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/ciyv/002715/ydb/tests/functional/sqs/common/test-results/py3test/testing_out_stuff/chunk52/testing_out_stuff/test_queues_managing.py.TestQueuesManagingWithTenant.test_delete_queue.tables_format_v0-std/cluster/slot_1/stdout'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/ciyv/002715/ydb/tests/functional/sqs/common/test-results/py3test/testing_out_stuff/chunk52/testing_out_stuff/test_queues_managing.py.TestQueuesManagingWithTenant.test_delete_queue.tables_format_v0-std/cluster/slot_1/stderr'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedRandom name='/home/runner/.ya/build/build_root/ciyv/002715/ydb/tests/functional/sqs/common/test-results/py3test/testing_out_stuff/chunk52/testing_out_stuff/test_queues_managing.py.TestQueuesManagingWithTenant.test_delete_queue.tables_format_v0-std/cluster/slot_1/logfile_35evzquk.log'> ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/subprocess.py:1129: ResourceWarning: subprocess 448455 is still running ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/subprocess.py:1129: ResourceWarning: subprocess 452075 is still running ResourceWarning: Enable tracemalloc to get the object allocation traceback >> test_garbage_collection.py::TestSqsGarbageCollection::test_removes_messages_by_retention_time[tables_format_v0-std] [FAIL] >> test_garbage_collection.py::TestSqsGarbageCollection::test_removes_messages_by_retention_time[tables_format_v1-fifo] >> test_counters.py::TestSqsCountersFeatures::test_creates_counter[tables_format_v1] [FAIL] >> test_counters.py::TestSqsCountersFeatures::test_detailed_counters[queue] |78.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/olap/scenario/py3test >> TCdcStreamWithRebootsTests::MergeTable[TabletReboots] [GOOD] >> test_garbage_collection.py::TestSqsGarbageCollection::test_removes_messages_by_retention_time[tables_format_v1-fifo] [FAIL] >> test_garbage_collection.py::TestSqsGarbageCollection::test_removes_messages_by_retention_time[tables_format_v1-std] >> test_counters.py::TestSqsCountersFeatures::test_detailed_counters[queue] [FAIL] >> test_counters.py::TestSqsCountersFeatures::test_detailed_counters[user] >> test_account_actions.py::TestAccountActionsWithTenant::test_manage_account[with_queues-tables_format_v0] [FAIL] >> test_account_actions.py::TestAccountActionsWithTenant::test_manage_account[with_queues-tables_format_v1] >> test_garbage_collection.py::TestSqsGarbageCollection::test_removes_messages_by_retention_time[tables_format_v1-std] [FAIL] >> test_garbage_collection.py::TestSqsGarbageCollection::test_visibility_change_cleanups_proper_receive_attempt_id[tables_format_v0-with_change_visibility] >> test_counters.py::TestSqsCountersFeatures::test_detailed_counters[user] [FAIL] >> test_read_update_write_load.py::TestReadUpdateWriteLoad::test[read_update_write_load] >> test_multiplexing_tables_format.py::TestMultiplexingTablesFormatWithPath::test_create_queue[fifo] [FAIL] >> test_account_actions.py::TestAccountActionsWithTenant::test_manage_account[with_queues-tables_format_v1] [FAIL] >> test_account_actions.py::TestAccountActionsWithTenant::test_manage_account[without_queues-tables_format_v0] >> test_queues_managing.py::TestQueuesManagingWithTenant::test_request_to_deleted_queue[tables_format_v1-std] [FAIL] >> test_queues_managing.py::TestQueuesManagingWithTenant::test_ya_count_queues[tables_format_v0] >> test_garbage_collection.py::TestSqsGarbageCollection::test_visibility_change_cleanups_proper_receive_attempt_id[tables_format_v0-with_change_visibility] [FAIL] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/common/py3test >> test_counters.py::TestSqsCountersFeatures::test_detailed_counters[user] [FAIL] Test command err: sys:1: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/ciyv/002704/ydb/tests/functional/sqs/common/test-results/py3test/testing_out_stuff/chunk8/testing_out_stuff/test_counters.py.TestSqsCountersFeatures.test_creates_counter.tables_format_v0/cluster/node_1/stdout'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/ciyv/002704/ydb/tests/functional/sqs/common/test-results/py3test/testing_out_stuff/chunk8/testing_out_stuff/test_counters.py.TestSqsCountersFeatures.test_creates_counter.tables_format_v0/cluster/node_1/stderr'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedRandom name='/home/runner/.ya/build/build_root/ciyv/002704/ydb/tests/functional/sqs/common/test-results/py3test/testing_out_stuff/chunk8/testing_out_stuff/test_counters.py.TestSqsCountersFeatures.test_creates_counter.tables_format_v0/cluster/node_1/logfile_ge1ldls6.log'> ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/subprocess.py:1129: ResourceWarning: subprocess 452766 is still running ResourceWarning: Enable tracemalloc to get the object allocation traceback ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_cdc_stream_reboots/unittest >> TCdcStreamWithRebootsTests::MergeTable[TabletReboots] [GOOD] Test command err: =========== RUN: Trace =========== Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2140] Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:116:2058] recipient: [1:110:2141] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:116:2058] recipient: [1:110:2141] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:117:2058] recipient: [1:111:2142] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:117:2058] recipient: [1:111:2142] Leader for TabletID 72057594046678944 is [1:126:2151] sender: [1:127:2058] recipient: [1:109:2140] Leader for TabletID 72057594046447617 is [1:130:2154] sender: [1:132:2058] recipient: [1:110:2141] Leader for TabletID 72057594046316545 is [1:133:2155] sender: [1:135:2058] recipient: [1:111:2142] 2025-05-29T15:32:24.398430Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7488: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-05-29T15:32:24.398446Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7516: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:32:24.398450Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7402: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2025-05-29T15:32:24.398453Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7418: OperationsProcessing config: using default configuration 2025-05-29T15:32:24.398461Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-05-29T15:32:24.398463Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-05-29T15:32:24.398469Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7548: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:32:24.398476Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:39: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-05-29T15:32:24.398535Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7619: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-05-29T15:32:24.398596Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-05-29T15:32:24.406962Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7651: Got new config: QueryServiceConfig { AllExternalDataSourcesAreAvailable: true } 2025-05-29T15:32:24.406975Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:32:24.407026Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7619: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# Leader for TabletID 72057594046447617 is [1:130:2154] sender: [1:175:2058] recipient: [1:15:2062] 2025-05-29T15:32:24.408968Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-05-29T15:32:24.408985Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-05-29T15:32:24.409002Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-05-29T15:32:24.411280Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-05-29T15:32:24.411326Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:445: Clear TempDirsState with owners number: 0 2025-05-29T15:32:24.411407Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1348: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-05-29T15:32:24.411563Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:32: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-05-29T15:32:24.412052Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:157: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:32:24.412074Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:84: [RootDataErasureManager] Stop 2025-05-29T15:32:24.412210Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:32:24.412215Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:32:24.412231Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-05-29T15:32:24.412235Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-29T15:32:24.412239Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:202: TTxServerlessStorageBilling.Complete 2025-05-29T15:32:24.412250Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6671: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:214:2058] recipient: [1:212:2212] IGNORE Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:214:2058] recipient: [1:212:2212] Leader for TabletID 72057594037968897 is [1:218:2216] sender: [1:219:2058] recipient: [1:212:2212] 2025-05-29T15:32:24.413080Z node 1 :HIVE INFO: tablet_helpers.cpp:1130: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:126:2151] sender: [1:239:2058] recipient: [1:15:2062] 2025-05-29T15:32:24.424958Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:372: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-05-29T15:32:24.424999Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:32:24.425032Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-05-29T15:32:24.425059Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:130: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-05-29T15:32:24.425065Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:32:24.425490Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:451: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-05-29T15:32:24.425504Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-05-29T15:32:24.425534Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:32:24.425539Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:313: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-05-29T15:32:24.425542Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:367: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-05-29T15:32:24.425545Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 2 -> 3 2025-05-29T15:32:24.425909Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:32:24.425920Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-05-29T15:32:24.425924Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 3 -> 128 2025-05-29T15:32:24.426224Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:32:24.426231Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:32:24.426234Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:32:24.426238Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1650: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-05-29T15:32:24.426660Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1719: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-05-29T15:32:24.427056Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:652: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-05-29T15:32:24.427082Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1751: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 Leader for TabletID 72057594046316545 is [1:133:2155] sender: [1:254:2058] recipient: [1:15:2062] FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-05-29T15:32:24.427198Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:676: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-05-29T15:32:24.427216Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:680: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 133 RawX2: 4294969451 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-05-29T15:32:24.427220Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:32:24.427251Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Ch ... "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Uint32" TypeId: 2 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { MinPartitionsCount: 1 } } TableSchemaVersion: 2 IsBackup: false CdcStreams { Name: "Stream" Mode: ECdcStreamModeKeysOnly PathId { OwnerId: 72057594046678944 LocalId: 4 } State: ECdcStreamStateReady SchemaVersion: 1 Format: ECdcStreamFormatProto VirtualTimestamps: false AwsRegion: "" ResolvedTimestampsIntervalMs: 0 } IsRestore: false } TablePartitions { EndOfRangeKeyPrefix: "" IsPoint: false IsInclusive: false DatashardId: 72075186233409550 } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 4 PathsLimit: 10000 ShardsInside: 3 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 2 PQPartitionsLimit: 1000000 } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-05-29T15:34:10.760801Z node 134 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Table" Options { ReturnPartitioningInfo: true ReturnPartitionConfig: true BackupInfo: false ReturnBoundaries: true ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-05-29T15:34:10.760895Z node 134 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/Table" took 122us result status StatusSuccess 2025-05-29T15:34:10.761156Z node 134 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Table" PathDescription { Self { Name: "Table" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 1002 CreateStep: 5000003 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 2 TablePartitionVersion: 2 } ChildrenExist: true } Table { Name: "Table" Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Uint32" TypeId: 2 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { MinPartitionsCount: 1 } } TableSchemaVersion: 2 IsBackup: false CdcStreams { Name: "Stream" Mode: ECdcStreamModeKeysOnly PathId { OwnerId: 72057594046678944 LocalId: 4 } State: ECdcStreamStateReady SchemaVersion: 1 Format: ECdcStreamFormatProto VirtualTimestamps: false AwsRegion: "" ResolvedTimestampsIntervalMs: 0 } IsRestore: false } TablePartitions { EndOfRangeKeyPrefix: "" IsPoint: false IsInclusive: false DatashardId: 72075186233409550 } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 4 PathsLimit: 10000 ShardsInside: 3 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 2 PQPartitionsLimit: 1000000 } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-05-29T15:34:10.761268Z node 134 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Table/Stream/streamImpl" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: true }, at schemeshard: 72057594046678944 2025-05-29T15:34:10.761302Z node 134 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/Table/Stream/streamImpl" took 36us result status StatusSuccess 2025-05-29T15:34:10.761422Z node 134 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Table/Stream/streamImpl" PathDescription { Self { Name: "streamImpl" PathId: 5 SchemeshardId: 72057594046678944 PathType: EPathTypePersQueueGroup CreateFinished: true CreateTxId: 1003 CreateStep: 5000004 ParentPathId: 4 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeStreamImpl Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 PQVersion: 1 } ChildrenExist: false BalancerTabletID: 72075186233409549 } PersQueueGroup { Name: "streamImpl" PathId: 5 TotalGroupCount: 2 PartitionPerTablet: 2 PQTabletConfig { PartitionConfig { MaxCountInPartition: 2147483647 LifetimeSeconds: 86400 WriteSpeedInBytesPerSecond: 1048576 BurstSize: 1048576 } TopicName: "Stream" TopicPath: "/MyRoot/Table/Stream/streamImpl" YdbDatabasePath: "/MyRoot" PartitionKeySchema { Name: "key" TypeId: 2 } MeteringMode: METERING_MODE_REQUEST_UNITS } Partitions { PartitionId: 0 TabletId: 72075186233409548 KeyRange { ToBound: "\001\000\004\000\000\000\377\377\377\177" } Status: Active } Partitions { PartitionId: 1 TabletId: 72075186233409548 KeyRange { FromBound: "\001\000\004\000\000\000\377\377\377\177" } Status: Active } AlterVersion: 1 BalancerTabletID: 72075186233409549 NextPartitionId: 2 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 4 PathsLimit: 10000 ShardsInside: 3 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 2 PQPartitionsLimit: 1000000 } } PathId: 5 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> test_account_actions.py::TestAccountActionsWithTenant::test_manage_account[without_queues-tables_format_v0] [FAIL] >> test_account_actions.py::TestAccountActionsWithTenant::test_manage_account[without_queues-tables_format_v1] >> test_queues_managing.py::TestQueuesManagingWithTenant::test_ya_count_queues[tables_format_v0] [FAIL] >> test_queues_managing.py::TestQueuesManagingWithTenant::test_ya_count_queues[tables_format_v1] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/common/py3test >> test_multiplexing_tables_format.py::TestMultiplexingTablesFormatWithPath::test_create_queue[fifo] [FAIL] Test command err: ydb/tests/library/clients/kikimr_monitoring.py:75: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/ciyv/00270f/ydb/tests/functional/sqs/common/test-results/py3test/testing_out_stuff/chunk14/testing_out_stuff/test_garbage_collection.py.TestSqsGarbageCollection.test_visibility_change_cleanups_proper_receive_attempt_id.tables_format_v0-with_delete_message/cluster/node_1/stdout'> (key, value) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/library/clients/kikimr_monitoring.py:75: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/ciyv/00270f/ydb/tests/functional/sqs/common/test-results/py3test/testing_out_stuff/chunk14/testing_out_stuff/test_garbage_collection.py.TestSqsGarbageCollection.test_visibility_change_cleanups_proper_receive_attempt_id.tables_format_v0-with_delete_message/cluster/node_1/stderr'> (key, value) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/library/clients/kikimr_monitoring.py:75: ResourceWarning: unclosed file <_io.BufferedRandom name='/home/runner/.ya/build/build_root/ciyv/00270f/ydb/tests/functional/sqs/common/test-results/py3test/testing_out_stuff/chunk14/testing_out_stuff/test_garbage_collection.py.TestSqsGarbageCollection.test_visibility_change_cleanups_proper_receive_attempt_id.tables_format_v0-with_delete_message/cluster/node_1/logfile_jtnb1prz.log'> (key, value) ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/subprocess.py:1129: ResourceWarning: subprocess 450000 is still running ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/ciyv/00270f/ydb/tests/functional/sqs/common/test-results/py3test/testing_out_stuff/chunk14/testing_out_stuff/test_multiplexing_tables_format.py.TestMultiplexingTablesFormatWithPath.test_create_queue.fifo/cluster/node_1/stdout'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/ciyv/00270f/ydb/tests/functional/sqs/common/test-results/py3test/testing_out_stuff/chunk14/testing_out_stuff/test_multiplexing_tables_format.py.TestMultiplexingTablesFormatWithPath.test_create_queue.fifo/cluster/node_1/stderr'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedRandom name='/home/runner/.ya/build/build_root/ciyv/00270f/ydb/tests/functional/sqs/common/test-results/py3test/testing_out_stuff/chunk14/testing_out_stuff/test_multiplexing_tables_format.py.TestMultiplexingTablesFormatWithPath.test_create_queue.fifo/cluster/node_1/logfile_rma8j48a.log'> ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/subprocess.py:1129: ResourceWarning: subprocess 454126 is still running ResourceWarning: Enable tracemalloc to get the object allocation traceback ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/common/py3test >> test_garbage_collection.py::TestSqsGarbageCollection::test_visibility_change_cleanups_proper_receive_attempt_id[tables_format_v0-with_change_visibility] [FAIL] Test command err: sys:1: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/ciyv/002702/ydb/tests/functional/sqs/common/test-results/py3test/testing_out_stuff/chunk13/testing_out_stuff/test_garbage_collection.py.TestSqsGarbageCollection.test_removes_messages_by_retention_time.tables_format_v0-std/cluster/node_1/stdout'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/ciyv/002702/ydb/tests/functional/sqs/common/test-results/py3test/testing_out_stuff/chunk13/testing_out_stuff/test_garbage_collection.py.TestSqsGarbageCollection.test_removes_messages_by_retention_time.tables_format_v0-std/cluster/node_1/stderr'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedRandom name='/home/runner/.ya/build/build_root/ciyv/002702/ydb/tests/functional/sqs/common/test-results/py3test/testing_out_stuff/chunk13/testing_out_stuff/test_garbage_collection.py.TestSqsGarbageCollection.test_removes_messages_by_retention_time.tables_format_v0-std/cluster/node_1/logfile_jma6tz5s.log'> ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/subprocess.py:1129: ResourceWarning: subprocess 452803 is still running ResourceWarning: Enable tracemalloc to get the object allocation traceback >> test_account_actions.py::TestAccountActionsWithTenant::test_manage_account[without_queues-tables_format_v1] [FAIL] |78.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/olap/scenario/py3test |78.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/olap/scenario/py3test >> test_queues_managing.py::TestQueuesManagingWithTenant::test_ya_count_queues[tables_format_v1] [FAIL] >> test_cms_restart.py::TestCmsStateStorageRestartsBlockMax::test_restart_as_much_as_can |78.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/cms/py3test ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/common/py3test >> test_account_actions.py::TestAccountActionsWithTenant::test_manage_account[without_queues-tables_format_v1] [FAIL] Test command err: sys:1: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/ciyv/002709/ydb/tests/functional/sqs/common/test-results/py3test/testing_out_stuff/chunk1/testing_out_stuff/test_account_actions.py.TestAccountActionsWithTenant.test_manage_account.with_queues-tables_format_v0/cluster/node_1/stdout'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/ciyv/002709/ydb/tests/functional/sqs/common/test-results/py3test/testing_out_stuff/chunk1/testing_out_stuff/test_account_actions.py.TestAccountActionsWithTenant.test_manage_account.with_queues-tables_format_v0/cluster/node_1/stderr'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedRandom name='/home/runner/.ya/build/build_root/ciyv/002709/ydb/tests/functional/sqs/common/test-results/py3test/testing_out_stuff/chunk1/testing_out_stuff/test_account_actions.py.TestAccountActionsWithTenant.test_manage_account.with_queues-tables_format_v0/cluster/node_1/logfile_zahciegy.log'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/ciyv/002709/ydb/tests/functional/sqs/common/test-results/py3test/testing_out_stuff/chunk1/testing_out_stuff/test_account_actions.py.TestAccountActionsWithTenant.test_manage_account.with_queues-tables_format_v0/cluster/slot_1/stdout'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/ciyv/002709/ydb/tests/functional/sqs/common/test-results/py3test/testing_out_stuff/chunk1/testing_out_stuff/test_account_actions.py.TestAccountActionsWithTenant.test_manage_account.with_queues-tables_format_v0/cluster/slot_1/stderr'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedRandom name='/home/runner/.ya/build/build_root/ciyv/002709/ydb/tests/functional/sqs/common/test-results/py3test/testing_out_stuff/chunk1/testing_out_stuff/test_account_actions.py.TestAccountActionsWithTenant.test_manage_account.with_queues-tables_format_v0/cluster/slot_1/logfile_kr780rnq.log'> ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/subprocess.py:1129: ResourceWarning: subprocess 451282 is still running ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/subprocess.py:1129: ResourceWarning: subprocess 453960 is still running ResourceWarning: Enable tracemalloc to get the object allocation traceback >> test_simple.py::TestSimple::test[alter_table] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/common/py3test >> test_queues_managing.py::TestQueuesManagingWithTenant::test_ya_count_queues[tables_format_v1] [FAIL] Test command err: sys:1: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/ciyv/002706/ydb/tests/functional/sqs/common/test-results/py3test/testing_out_stuff/chunk58/testing_out_stuff/test_queues_managing.py.TestQueuesManagingWithTenant.test_request_to_deleted_queue.tables_format_v1-std/cluster/node_1/stdout'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/ciyv/002706/ydb/tests/functional/sqs/common/test-results/py3test/testing_out_stuff/chunk58/testing_out_stuff/test_queues_managing.py.TestQueuesManagingWithTenant.test_request_to_deleted_queue.tables_format_v1-std/cluster/node_1/stderr'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedRandom name='/home/runner/.ya/build/build_root/ciyv/002706/ydb/tests/functional/sqs/common/test-results/py3test/testing_out_stuff/chunk58/testing_out_stuff/test_queues_managing.py.TestQueuesManagingWithTenant.test_request_to_deleted_queue.tables_format_v1-std/cluster/node_1/logfile_9w9tv38q.log'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/ciyv/002706/ydb/tests/functional/sqs/common/test-results/py3test/testing_out_stuff/chunk58/testing_out_stuff/test_queues_managing.py.TestQueuesManagingWithTenant.test_request_to_deleted_queue.tables_format_v1-std/cluster/slot_1/stdout'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/ciyv/002706/ydb/tests/functional/sqs/common/test-results/py3test/testing_out_stuff/chunk58/testing_out_stuff/test_queues_managing.py.TestQueuesManagingWithTenant.test_request_to_deleted_queue.tables_format_v1-std/cluster/slot_1/stderr'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedRandom name='/home/runner/.ya/build/build_root/ciyv/002706/ydb/tests/functional/sqs/common/test-results/py3test/testing_out_stuff/chunk58/testing_out_stuff/test_queues_managing.py.TestQueuesManagingWithTenant.test_request_to_deleted_queue.tables_format_v1-std/cluster/slot_1/logfile_1fr39g9z.log'> ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/subprocess.py:1129: ResourceWarning: subprocess 451733 is still running ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/subprocess.py:1129: ResourceWarning: subprocess 454127 is still running ResourceWarning: Enable tracemalloc to get the object allocation traceback >> test_dml.py::TestDML::test_dml[table_index_0__ASYNC-pk_types11-all_types11-index11---ASYNC] >> test_dml.py::TestDML::test_dml[table_index_0__SYNC-pk_types9-all_types9-index9---SYNC] >> test_dml.py::TestDML::test_dml[table_index_2_UNIQUE_SYNC-pk_types2-all_types2-index2--UNIQUE-SYNC] >> test_dml.py::TestDML::test_dml[table_ttl_Datetime-pk_types16-all_types16-index16-Datetime--] >> test_cms_state_storage.py::TestCmsStateStorageSimpleKeep::test_check_shutdown_state_storage_nodes |78.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/cms/py3test >> test_dml.py::TestDML::test_dml[table_index_3__SYNC-pk_types6-all_types6-index6---SYNC] |78.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/cms/py3test >> test_dml.py::TestDML::test_dml[table_ttl_Date-pk_types18-all_types18-index18-Date--] |78.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/cms/py3test |78.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/cms/py3test |78.0%| [TA] $(B)/ydb/tests/functional/sqs/common/test-results/py3test/{meta.json ... results_accumulator.log} |78.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/datashard/async_replication/py3test >> test_async_replication.py::TestAsyncReplication::test_async_replication[table_ttl_Date-pk_types13-all_types13-index13-Date--] [FAIL] >> test_serverless.py::test_database_with_disk_quotas[enable_alter_database_create_hive_first--true] [FAIL] >> test_cms_restart.py::TestCmsStateStorageRestartsMirrorKeep::test_restart_as_much_as_can >> test_simple.py::TestSimple::test[alter_table] [GOOD] >> test_dml.py::TestDML::test_dml[table_index_0__ASYNC-pk_types11-all_types11-index11---ASYNC] [FAIL] >> test_simple.py::TestSimple::test[alter_tablestore] |78.1%| [TA] {RESULT} $(B)/ydb/tests/functional/sqs/common/test-results/py3test/{meta.json ... results_accumulator.log} |78.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/datashard/dml/py3test >> test_dml.py::TestDML::test_dml[table_index_0__SYNC-pk_types9-all_types9-index9---SYNC] [FAIL] >> test_dml.py::TestDML::test_dml[table_ttl_Datetime-pk_types16-all_types16-index16-Datetime--] [FAIL] >> test_dml.py::TestDML::test_dml[table_ttl_Uint32-pk_types14-all_types14-index14-Uint32--] >> test_simple.py::TestSimple::test[alter_tablestore] [GOOD] >> test_simple.py::TestSimple::test[table] >> TAsyncIndexTests::CdcAndMergeWithReboots[TabletReboots] [GOOD] >> test_dml.py::TestDML::test_dml[table_index_2_UNIQUE_SYNC-pk_types2-all_types2-index2--UNIQUE-SYNC] [FAIL] >> test_dml.py::TestDML::test_dml[table_ttl_Date-pk_types18-all_types18-index18-Date--] [FAIL] >> test_dml.py::TestDML::test_dml[table_index_4__SYNC-pk_types5-all_types5-index5---SYNC] >> test_dml.py::TestDML::test_dml[table_index_3__SYNC-pk_types6-all_types6-index6---SYNC] [FAIL] |78.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/cms/py3test >> test_scheme_load.py::TestSchemeLoad::test[create_and_drop_tables] [GOOD] >> test_scheme_load.py::TestSchemeLoad::test_multi[create_and_drop_tables] [GOOD] >> test_simple.py::TestSimple::test[table] [GOOD] >> test_simple.py::TestSimple::test[tablestores] |78.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/cms/py3test >> TAsyncIndexTests::SplitBothWithReboots[TabletReboots] [GOOD] |78.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/serverless/py3test >> test_serverless.py::test_discovery_exclusive_nodes[enable_alter_database_create_hive_first--true] [GOOD] >> test_simple.py::TestSimple::test[tablestores] [GOOD] >> test_simple.py::TestSimple::test_multi[alter_table] [GOOD] >> test_simple.py::TestSimple::test_multi[alter_tablestore] [GOOD] >> test_simple.py::TestSimple::test_multi[table] [GOOD] >> test_simple.py::TestSimple::test_multi[tablestores] [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_index/unittest >> TAsyncIndexTests::CdcAndMergeWithReboots[TabletReboots] [GOOD] Test command err: =========== RUN: Trace =========== Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2140] Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:116:2058] recipient: [1:110:2141] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:116:2058] recipient: [1:110:2141] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:117:2058] recipient: [1:111:2142] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:117:2058] recipient: [1:111:2142] Leader for TabletID 72057594046678944 is [1:126:2151] sender: [1:127:2058] recipient: [1:109:2140] Leader for TabletID 72057594046447617 is [1:130:2154] sender: [1:132:2058] recipient: [1:110:2141] Leader for TabletID 72057594046316545 is [1:133:2155] sender: [1:135:2058] recipient: [1:111:2142] 2025-05-29T15:30:19.247210Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7488: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-05-29T15:30:19.247232Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7516: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:30:19.247237Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7402: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2025-05-29T15:30:19.247241Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7418: OperationsProcessing config: using default configuration 2025-05-29T15:30:19.247253Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-05-29T15:30:19.247255Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-05-29T15:30:19.247262Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7548: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:30:19.247273Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:39: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-05-29T15:30:19.247349Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7619: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-05-29T15:30:19.247409Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-05-29T15:30:19.258362Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7651: Got new config: QueryServiceConfig { AllExternalDataSourcesAreAvailable: true } 2025-05-29T15:30:19.258385Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:30:19.258466Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7619: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# Leader for TabletID 72057594046447617 is [1:130:2154] sender: [1:175:2058] recipient: [1:15:2062] 2025-05-29T15:30:19.261280Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-05-29T15:30:19.261307Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-05-29T15:30:19.261338Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-05-29T15:30:19.263643Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-05-29T15:30:19.263702Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:445: Clear TempDirsState with owners number: 0 2025-05-29T15:30:19.263796Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1348: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-05-29T15:30:19.264048Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:32: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-05-29T15:30:19.264922Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:157: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:30:19.264981Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:84: [RootDataErasureManager] Stop 2025-05-29T15:30:19.265279Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:30:19.265290Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:30:19.265326Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-05-29T15:30:19.265335Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-29T15:30:19.265341Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:202: TTxServerlessStorageBilling.Complete 2025-05-29T15:30:19.265360Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6671: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:214:2058] recipient: [1:212:2212] IGNORE Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:214:2058] recipient: [1:212:2212] Leader for TabletID 72057594037968897 is [1:218:2216] sender: [1:219:2058] recipient: [1:212:2212] 2025-05-29T15:30:19.266953Z node 1 :HIVE INFO: tablet_helpers.cpp:1130: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:126:2151] sender: [1:239:2058] recipient: [1:15:2062] 2025-05-29T15:30:19.286772Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:372: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-05-29T15:30:19.286862Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:30:19.286933Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-05-29T15:30:19.286987Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:130: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-05-29T15:30:19.286999Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:30:19.287840Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:451: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-05-29T15:30:19.287863Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-05-29T15:30:19.287904Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:30:19.287912Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:313: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-05-29T15:30:19.287916Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:367: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-05-29T15:30:19.287919Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 2 -> 3 2025-05-29T15:30:19.288270Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:30:19.288279Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-05-29T15:30:19.288282Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 3 -> 128 2025-05-29T15:30:19.288562Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:30:19.288570Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:30:19.288573Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:30:19.288578Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1650: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-05-29T15:30:19.289054Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1719: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-05-29T15:30:19.289397Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:652: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-05-29T15:30:19.289428Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1751: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 Leader for TabletID 72057594046316545 is [1:133:2155] sender: [1:254:2058] recipient: [1:15:2062] FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-05-29T15:30:19.289615Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:676: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-05-29T15:30:19.289632Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:680: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 133 RawX2: 4294969451 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-05-29T15:30:19.289638Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:30:19.289693Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Ch ... iorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { MinPartitionsCount: 1 } } TableIndexes { Name: "UserDefinedIndex" LocalPathId: 4 Type: EIndexTypeGlobalAsync State: EIndexStateReady KeyColumnNames: "indexed" SchemaVersion: 1 PathOwnerId: 72057594046678944 DataSize: 0 IndexImplTableDescriptions { PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { SizeToSplit: 2147483648 MinPartitionsCount: 1 } } } } TableSchemaVersion: 2 IsBackup: false CdcStreams { Name: "Stream" Mode: ECdcStreamModeKeysOnly PathId { OwnerId: 72057594046678944 LocalId: 6 } State: ECdcStreamStateReady SchemaVersion: 1 Format: ECdcStreamFormatProto VirtualTimestamps: false AwsRegion: "" ResolvedTimestampsIntervalMs: 0 } IsRestore: false } TablePartitions { EndOfRangeKeyPrefix: "" IsPoint: false IsInclusive: false DatashardId: 72075186233409551 } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 6 PathsLimit: 10000 ShardsInside: 5 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 2 PQPartitionsLimit: 1000000 } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-05-29T15:34:20.738899Z node 164 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Table/UserDefinedIndex/indexImplTable" Options { ReturnPartitioningInfo: true ReturnPartitionConfig: true BackupInfo: false ReturnBoundaries: true ShowPrivateTable: true }, at schemeshard: 72057594046678944 2025-05-29T15:34:20.738979Z node 164 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/Table/UserDefinedIndex/indexImplTable" took 90us result status StatusSuccess 2025-05-29T15:34:20.739192Z node 164 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Table/UserDefinedIndex/indexImplTable" PathDescription { Self { Name: "indexImplTable" PathId: 5 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 1002 CreateStep: 5000003 ParentPathId: 4 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeAsyncIndexImplTable Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "indexImplTable" Columns { Name: "indexed" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "indexed" KeyColumnNames: "key" KeyColumnIds: 1 KeyColumnIds: 2 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { SizeToSplit: 2147483648 MinPartitionsCount: 1 } } TableSchemaVersion: 1 IsBackup: false IsRestore: false } TablePartitions { EndOfRangeKeyPrefix: "" IsPoint: false IsInclusive: false DatashardId: 72075186233409546 } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 6 PathsLimit: 10000 ShardsInside: 5 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 2 PQPartitionsLimit: 1000000 } } PathId: 5 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-05-29T15:34:20.749600Z node 164 :CHANGE_EXCHANGE DEBUG: change_sender_table_base.cpp:78: [TableChangeSenderShard][72075186233409551:2][72075186233409546][164:1173:2953] Handshake NKikimrChangeExchange.TEvStatus Status: STATUS_OK LastRecordOrder: 0 2025-05-29T15:34:20.749655Z node 164 :CHANGE_EXCHANGE DEBUG: change_sender_async_index.cpp:239: [AsyncIndexChangeSenderMain][72075186233409551:2][164:1143:2953] Handle NKikimr::NChangeExchange::TEvChangeExchangePrivate::TEvReady { PartitionId: 72075186233409546 } 2025-05-29T15:34:20.749706Z node 164 :CHANGE_EXCHANGE DEBUG: change_sender_table_base.cpp:123: [TableChangeSenderShard][72075186233409551:2][72075186233409546][164:1173:2953] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRecords { Records [{ Order: 1 Group: 1748532860725466 Step: 5000004 TxId: 18446744073709551615 PathId: [OwnerId: 72057594046678944, LocalPathId: 4] Kind: AsyncIndex Source: Unspecified Body: 28b TableId: [OwnerId: 72057594046678944, LocalPathId: 3] SchemaVersion: 2 LockId: 0 LockOffset: 0 },{ Order: 3 Group: 1748532860725466 Step: 5000004 TxId: 18446744073709551615 PathId: [OwnerId: 72057594046678944, LocalPathId: 4] Kind: AsyncIndex Source: Unspecified Body: 28b TableId: [OwnerId: 72057594046678944, LocalPathId: 3] SchemaVersion: 2 LockId: 0 LockOffset: 0 },{ Order: 5 Group: 1748532860725466 Step: 5000004 TxId: 18446744073709551615 PathId: [OwnerId: 72057594046678944, LocalPathId: 4] Kind: AsyncIndex Source: Unspecified Body: 28b TableId: [OwnerId: 72057594046678944, LocalPathId: 3] SchemaVersion: 2 LockId: 0 LockOffset: 0 }] } 2025-05-29T15:34:20.754786Z node 164 :CHANGE_EXCHANGE DEBUG: change_sender_table_base.cpp:200: [TableChangeSenderShard][72075186233409551:2][72075186233409546][164:1173:2953] Handle NKikimrChangeExchange.TEvStatus Status: STATUS_OK RecordStatuses { Order: 1 Status: STATUS_OK Reason: REASON_NONE } RecordStatuses { Order: 3 Status: STATUS_OK Reason: REASON_NONE } RecordStatuses { Order: 5 Status: STATUS_OK Reason: REASON_NONE } LastRecordOrder: 5 2025-05-29T15:34:20.754833Z node 164 :CHANGE_EXCHANGE DEBUG: change_sender_async_index.cpp:239: [AsyncIndexChangeSenderMain][72075186233409551:2][164:1143:2953] Handle NKikimr::NChangeExchange::TEvChangeExchangePrivate::TEvReady { PartitionId: 72075186233409546 } >> test_dml.py::TestDML::test_dml[table_ttl_Uint32-pk_types14-all_types14-index14-Uint32--] [FAIL] |78.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/datashard/async_replication/py3test >> test_async_replication.py::TestAsyncReplication::test_async_replication[table_ttl_DyNumber-pk_types8-all_types8-index8-DyNumber--] [FAIL] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_index/unittest >> TAsyncIndexTests::SplitBothWithReboots[TabletReboots] [GOOD] Test command err: =========== RUN: Trace =========== Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2140] Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:116:2058] recipient: [1:110:2141] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:116:2058] recipient: [1:110:2141] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:117:2058] recipient: [1:111:2142] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:117:2058] recipient: [1:111:2142] Leader for TabletID 72057594046678944 is [1:126:2151] sender: [1:127:2058] recipient: [1:109:2140] Leader for TabletID 72057594046447617 is [1:130:2154] sender: [1:132:2058] recipient: [1:110:2141] Leader for TabletID 72057594046316545 is [1:133:2155] sender: [1:135:2058] recipient: [1:111:2142] 2025-05-29T15:30:32.530538Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7488: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-05-29T15:30:32.530568Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7516: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:30:32.530574Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7402: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2025-05-29T15:30:32.530580Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7418: OperationsProcessing config: using default configuration 2025-05-29T15:30:32.530596Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-05-29T15:30:32.530601Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-05-29T15:30:32.530612Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7548: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:30:32.530644Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:39: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-05-29T15:30:32.530768Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7619: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-05-29T15:30:32.530856Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-05-29T15:30:32.548979Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7651: Got new config: QueryServiceConfig { AllExternalDataSourcesAreAvailable: true } 2025-05-29T15:30:32.548998Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:30:32.549086Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7619: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# Leader for TabletID 72057594046447617 is [1:130:2154] sender: [1:175:2058] recipient: [1:15:2062] 2025-05-29T15:30:32.551881Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-05-29T15:30:32.551914Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-05-29T15:30:32.551944Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-05-29T15:30:32.555465Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-05-29T15:30:32.555546Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:445: Clear TempDirsState with owners number: 0 2025-05-29T15:30:32.555679Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1348: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-05-29T15:30:32.555851Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:32: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-05-29T15:30:32.556642Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:157: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:30:32.556704Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:84: [RootDataErasureManager] Stop 2025-05-29T15:30:32.557010Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:30:32.557022Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:30:32.557059Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-05-29T15:30:32.557082Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-29T15:30:32.557089Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:202: TTxServerlessStorageBilling.Complete 2025-05-29T15:30:32.557109Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6671: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:214:2058] recipient: [1:212:2212] IGNORE Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:214:2058] recipient: [1:212:2212] Leader for TabletID 72057594037968897 is [1:218:2216] sender: [1:219:2058] recipient: [1:212:2212] 2025-05-29T15:30:32.558837Z node 1 :HIVE INFO: tablet_helpers.cpp:1130: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:126:2151] sender: [1:239:2058] recipient: [1:15:2062] 2025-05-29T15:30:32.580109Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:372: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-05-29T15:30:32.580191Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:30:32.580254Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-05-29T15:30:32.580303Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:130: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-05-29T15:30:32.580316Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:30:32.580954Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:451: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-05-29T15:30:32.581003Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-05-29T15:30:32.581048Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:30:32.581059Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:313: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-05-29T15:30:32.581065Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:367: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-05-29T15:30:32.581072Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 2 -> 3 2025-05-29T15:30:32.581569Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:30:32.581582Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-05-29T15:30:32.581588Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 3 -> 128 2025-05-29T15:30:32.581928Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:30:32.581941Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:30:32.581948Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:30:32.581956Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1650: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-05-29T15:30:32.582678Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1719: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-05-29T15:30:32.583125Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:652: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-05-29T15:30:32.583165Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1751: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 Leader for TabletID 72057594046316545 is [1:133:2155] sender: [1:254:2058] recipient: [1:15:2062] FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-05-29T15:30:32.583352Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:676: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-05-29T15:30:32.583380Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:680: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 133 RawX2: 4294969451 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-05-29T15:30:32.583387Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:30:32.583461Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Ch ... eToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { SizeToSplit: 2147483648 MinPartitionsCount: 1 } } } } TableSchemaVersion: 1 IsBackup: false IsRestore: false } TablePartitions { EndOfRangeKeyPrefix: "\001\000\004\000\000\0002\000\000\000" IsPoint: false IsInclusive: false DatashardId: 72075186233409548 } TablePartitions { EndOfRangeKeyPrefix: "" IsPoint: false IsInclusive: false DatashardId: 72075186233409549 } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 2 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 4 PathsLimit: 10000 ShardsInside: 5 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-05-29T15:34:22.626557Z node 180 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Table/UserDefinedIndex/indexImplTable" Options { ReturnPartitioningInfo: true ReturnPartitionConfig: true BackupInfo: false ReturnBoundaries: true ShowPrivateTable: true }, at schemeshard: 72057594046678944 2025-05-29T15:34:22.626657Z node 180 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/Table/UserDefinedIndex/indexImplTable" took 118us result status StatusSuccess 2025-05-29T15:34:22.627053Z node 180 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Table/UserDefinedIndex/indexImplTable" PathDescription { Self { Name: "indexImplTable" PathId: 5 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 1002 CreateStep: 5000003 ParentPathId: 4 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 4 PathSubType: EPathSubTypeAsyncIndexImplTable Version { GeneralVersion: 4 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 2 } ChildrenExist: false } Table { Name: "indexImplTable" Columns { Name: "indexed" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "indexed" KeyColumnNames: "key" KeyColumnIds: 1 KeyColumnIds: 2 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { SizeToSplit: 2147483648 MinPartitionsCount: 1 } } SplitBoundary { KeyPrefix { Tuple { Optional { Uint32: 50 } } Tuple { } } } TableSchemaVersion: 1 IsBackup: false IsRestore: false } TablePartitions { EndOfRangeKeyPrefix: "\002\000\004\000\000\0002\000\000\000\000\000\000\200" IsPoint: false IsInclusive: false DatashardId: 72075186233409550 } TablePartitions { EndOfRangeKeyPrefix: "" IsPoint: false IsInclusive: false DatashardId: 72075186233409551 } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 2 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 4 PathsLimit: 10000 ShardsInside: 5 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 5 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-05-29T15:34:22.637504Z node 180 :CHANGE_EXCHANGE DEBUG: change_sender_table_base.cpp:78: [TableChangeSenderShard][72075186233409548:2][72075186233409550][180:1011:2795] Handshake NKikimrChangeExchange.TEvStatus Status: STATUS_OK LastRecordOrder: 0 2025-05-29T15:34:22.637544Z node 180 :CHANGE_EXCHANGE DEBUG: change_sender_table_base.cpp:78: [TableChangeSenderShard][72075186233409548:2][72075186233409551][180:1012:2795] Handshake NKikimrChangeExchange.TEvStatus Status: STATUS_OK LastRecordOrder: 0 2025-05-29T15:34:22.637560Z node 180 :CHANGE_EXCHANGE DEBUG: change_sender_async_index.cpp:239: [AsyncIndexChangeSenderMain][72075186233409548:2][180:954:2795] Handle NKikimr::NChangeExchange::TEvChangeExchangePrivate::TEvReady { PartitionId: 72075186233409550 } 2025-05-29T15:34:22.637579Z node 180 :CHANGE_EXCHANGE DEBUG: change_sender_async_index.cpp:239: [AsyncIndexChangeSenderMain][72075186233409548:2][180:954:2795] Handle NKikimr::NChangeExchange::TEvChangeExchangePrivate::TEvReady { PartitionId: 72075186233409551 } 2025-05-29T15:34:22.637628Z node 180 :CHANGE_EXCHANGE DEBUG: change_sender_table_base.cpp:123: [TableChangeSenderShard][72075186233409548:2][72075186233409550][180:1011:2795] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRecords { Records [{ Order: 1 Group: 1748532862622920 Step: 5000003 TxId: 18446744073709551615 PathId: [OwnerId: 72057594046678944, LocalPathId: 4] Kind: AsyncIndex Source: Unspecified Body: 28b TableId: [OwnerId: 72057594046678944, LocalPathId: 3] SchemaVersion: 1 LockId: 0 LockOffset: 0 },{ Order: 2 Group: 1748532862622920 Step: 5000003 TxId: 18446744073709551615 PathId: [OwnerId: 72057594046678944, LocalPathId: 4] Kind: AsyncIndex Source: Unspecified Body: 28b TableId: [OwnerId: 72057594046678944, LocalPathId: 3] SchemaVersion: 1 LockId: 0 LockOffset: 0 }] } 2025-05-29T15:34:22.637796Z node 180 :CHANGE_EXCHANGE DEBUG: change_sender_table_base.cpp:123: [TableChangeSenderShard][72075186233409548:2][72075186233409551][180:1012:2795] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRecords { Records [{ Order: 3 Group: 1748532862622920 Step: 5000003 TxId: 18446744073709551615 PathId: [OwnerId: 72057594046678944, LocalPathId: 4] Kind: AsyncIndex Source: Unspecified Body: 28b TableId: [OwnerId: 72057594046678944, LocalPathId: 3] SchemaVersion: 1 LockId: 0 LockOffset: 0 }] } 2025-05-29T15:34:22.639060Z node 180 :CHANGE_EXCHANGE DEBUG: change_sender_table_base.cpp:200: [TableChangeSenderShard][72075186233409548:2][72075186233409550][180:1011:2795] Handle NKikimrChangeExchange.TEvStatus Status: STATUS_OK RecordStatuses { Order: 1 Status: STATUS_OK Reason: REASON_NONE } RecordStatuses { Order: 2 Status: STATUS_OK Reason: REASON_NONE } LastRecordOrder: 2 2025-05-29T15:34:22.639094Z node 180 :CHANGE_EXCHANGE DEBUG: change_sender_async_index.cpp:239: [AsyncIndexChangeSenderMain][72075186233409548:2][180:954:2795] Handle NKikimr::NChangeExchange::TEvChangeExchangePrivate::TEvReady { PartitionId: 72075186233409550 } 2025-05-29T15:34:22.639750Z node 180 :CHANGE_EXCHANGE DEBUG: change_sender_table_base.cpp:200: [TableChangeSenderShard][72075186233409548:2][72075186233409551][180:1012:2795] Handle NKikimrChangeExchange.TEvStatus Status: STATUS_OK RecordStatuses { Order: 3 Status: STATUS_OK Reason: REASON_NONE } LastRecordOrder: 3 2025-05-29T15:34:22.640022Z node 180 :CHANGE_EXCHANGE DEBUG: change_sender_async_index.cpp:239: [AsyncIndexChangeSenderMain][72075186233409548:2][180:954:2795] Handle NKikimr::NChangeExchange::TEvChangeExchangePrivate::TEvReady { PartitionId: 72075186233409551 } |78.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/cms/py3test |78.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/datashard/dml/py3test >> test_dml.py::TestDML::test_dml[table_index_2_UNIQUE_SYNC-pk_types2-all_types2-index2--UNIQUE-SYNC] [FAIL] >> test_dml.py::TestDML::test_dml[table_index_1_UNIQUE_SYNC-pk_types3-all_types3-index3--UNIQUE-SYNC] |78.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/cms/py3test >> test_dml.py::TestDML::test_dml[table_index_4__SYNC-pk_types5-all_types5-index5---SYNC] [FAIL] |78.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/datashard/dml/py3test >> test_dml.py::TestDML::test_dml[table_all_types-pk_types12-all_types12-index12---] |78.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/datashard/dml/py3test >> test_dml.py::TestDML::test_dml[table_index_0__ASYNC-pk_types11-all_types11-index11---ASYNC] [FAIL] >> test_dml.py::TestDML::test_dml[table_ttl_Timestamp-pk_types17-all_types17-index17-Timestamp--] |78.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/datashard/dml/py3test >> test_dml.py::TestDML::test_dml[table_index_0__SYNC-pk_types9-all_types9-index9---SYNC] [FAIL] >> test_cms_erasure.py::TestDegradedGroupMirror3dcMax::test_no_degraded_groups_after_shutdown |78.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/datashard/dml/py3test >> test_dml.py::TestDML::test_dml[table_ttl_Datetime-pk_types16-all_types16-index16-Datetime--] [FAIL] |78.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/datashard/async_replication/py3test >> test_async_replication.py::TestAsyncReplication::test_async_replication[table_ttl_Datetime-pk_types11-all_types11-index11-Datetime--] [FAIL] |78.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/cms/py3test >> test_dml.py::TestDML::test_dml[table_index_3_UNIQUE_SYNC-pk_types1-all_types1-index1--UNIQUE-SYNC] >> test_cms_restart.py::TestCmsStateStorageRestartsBlockKeep::test_restart_as_much_as_can |78.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/datashard/dml/py3test >> test_dml.py::TestDML::test_dml[table_ttl_Date-pk_types18-all_types18-index18-Date--] [FAIL] |78.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/datashard/dml/py3test >> test_dml.py::TestDML::test_dml[table_index_3__SYNC-pk_types6-all_types6-index6---SYNC] [FAIL] >> test_dml.py::TestDML::test_dml[table_index_1_UNIQUE_SYNC-pk_types3-all_types3-index3--UNIQUE-SYNC] [FAIL] |78.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/datashard/dml/py3test |78.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/cms/py3test >> TCdcStreamWithRebootsTests::WithoutPqTransactions[TabletReboots] [GOOD] >> test_dml.py::TestDML::test_dml[table_index_1__ASYNC-pk_types10-all_types10-index10---ASYNC] >> test_dml.py::TestDML::test_dml[table_ttl_Timestamp-pk_types17-all_types17-index17-Timestamp--] [FAIL] >> test_dml.py::TestDML::test_dml[table_all_types-pk_types12-all_types12-index12---] [FAIL] |78.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/datashard/async_replication/py3test >> test_async_replication.py::TestAsyncReplication::test_async_replication[table_ttl_Uint64-pk_types10-all_types10-index10-Uint64--] [FAIL] |78.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/cms/py3test |78.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/datashard/dml/py3test >> test_dml.py::TestDML::test_dml[table_ttl_Uint32-pk_types14-all_types14-index14-Uint32--] [FAIL] >> test_dml.py::TestDML::test_dml[table_index_0_UNIQUE_SYNC-pk_types4-all_types4-index4--UNIQUE-SYNC] >> test_cms_erasure.py::TestDegradedGroupBlock42Max::test_no_degraded_groups_after_shutdown |78.2%| [TA] $(B)/ydb/tests/datashard/async_replication/test-results/py3test/{meta.json ... results_accumulator.log} |78.2%| [TA] {RESULT} $(B)/ydb/tests/datashard/async_replication/test-results/py3test/{meta.json ... results_accumulator.log} ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_cdc_stream_reboots/unittest >> TCdcStreamWithRebootsTests::WithoutPqTransactions[TabletReboots] [GOOD] Test command err: =========== RUN: Trace =========== Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2140] Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:116:2058] recipient: [1:110:2141] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:116:2058] recipient: [1:110:2141] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:117:2058] recipient: [1:111:2142] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:117:2058] recipient: [1:111:2142] Leader for TabletID 72057594046678944 is [1:126:2151] sender: [1:127:2058] recipient: [1:109:2140] Leader for TabletID 72057594046447617 is [1:130:2154] sender: [1:132:2058] recipient: [1:110:2141] Leader for TabletID 72057594046316545 is [1:133:2155] sender: [1:135:2058] recipient: [1:111:2142] 2025-05-29T15:32:17.364701Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7488: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-05-29T15:32:17.364717Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7516: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:32:17.364721Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7402: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2025-05-29T15:32:17.364725Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7418: OperationsProcessing config: using default configuration 2025-05-29T15:32:17.364732Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-05-29T15:32:17.364735Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-05-29T15:32:17.364740Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7548: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:32:17.364749Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:39: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-05-29T15:32:17.364812Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7619: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-05-29T15:32:17.364868Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-05-29T15:32:17.373791Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7651: Got new config: QueryServiceConfig { AllExternalDataSourcesAreAvailable: true } 2025-05-29T15:32:17.373806Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:32:17.373871Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7619: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# Leader for TabletID 72057594046447617 is [1:130:2154] sender: [1:175:2058] recipient: [1:15:2062] 2025-05-29T15:32:17.375606Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-05-29T15:32:17.375626Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-05-29T15:32:17.375642Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-05-29T15:32:17.377967Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-05-29T15:32:17.378034Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:445: Clear TempDirsState with owners number: 0 2025-05-29T15:32:17.378135Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1348: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-05-29T15:32:17.378294Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:32: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-05-29T15:32:17.378956Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:157: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:32:17.378990Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:84: [RootDataErasureManager] Stop 2025-05-29T15:32:17.379189Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:32:17.379200Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:32:17.379225Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-05-29T15:32:17.379231Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-29T15:32:17.379238Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:202: TTxServerlessStorageBilling.Complete 2025-05-29T15:32:17.379255Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6671: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:214:2058] recipient: [1:212:2212] IGNORE Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:214:2058] recipient: [1:212:2212] Leader for TabletID 72057594037968897 is [1:218:2216] sender: [1:219:2058] recipient: [1:212:2212] 2025-05-29T15:32:17.380183Z node 1 :HIVE INFO: tablet_helpers.cpp:1130: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:126:2151] sender: [1:239:2058] recipient: [1:15:2062] 2025-05-29T15:32:17.392780Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:372: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-05-29T15:32:17.392827Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:32:17.392865Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-05-29T15:32:17.392893Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:130: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-05-29T15:32:17.392899Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:32:17.393293Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:451: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-05-29T15:32:17.393309Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-05-29T15:32:17.393341Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:32:17.393346Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:313: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-05-29T15:32:17.393349Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:367: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-05-29T15:32:17.393352Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 2 -> 3 2025-05-29T15:32:17.393586Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:32:17.393592Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-05-29T15:32:17.393595Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 3 -> 128 2025-05-29T15:32:17.393798Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:32:17.393803Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:32:17.393806Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:32:17.393810Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1650: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-05-29T15:32:17.394197Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1719: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-05-29T15:32:17.394471Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:652: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-05-29T15:32:17.394491Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1751: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 Leader for TabletID 72057594046316545 is [1:133:2155] sender: [1:254:2058] recipient: [1:15:2062] FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-05-29T15:32:17.394601Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:676: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-05-29T15:32:17.394616Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:680: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 133 RawX2: 4294969451 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-05-29T15:32:17.394620Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:32:17.394651Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Ch ... 86233409546 CpuTimeUsec: 334 } } CommitVersion { Step: 5000005 TxId: 281474976715657 } 2025-05-29T15:34:30.442428Z node 187 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_part.cpp:108: HandleReply TEvDataShard::TEvProposeTransactionResult Ignore message: tablet# 72057594046678944, ev# TxKind: TX_KIND_SCHEME Origin: 72075186233409546 Status: COMPLETE TxId: 281474976715657 Step: 5000005 OrderId: 281474976715657 ExecLatency: 0 ProposeLatency: 2 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409546 CpuTimeUsec: 334 } } CommitVersion { Step: 5000005 TxId: 281474976715657 } 2025-05-29T15:34:30.442493Z node 187 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:5834: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 5 PathOwnerId: 72057594046678944, cookie: 281474976715657 2025-05-29T15:34:30.442505Z node 187 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 5 PathOwnerId: 72057594046678944, cookie: 281474976715657 2025-05-29T15:34:30.442510Z node 187 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72057594046678944, txId: 281474976715657 2025-05-29T15:34:30.442516Z node 187 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 281474976715657, pathId: [OwnerId: 72057594046678944, LocalPathId: 4], version: 5 2025-05-29T15:34:30.442523Z node 187 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 4 2025-05-29T15:34:30.442669Z node 187 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:5834: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 5 PathOwnerId: 72057594046678944, cookie: 281474976715657 2025-05-29T15:34:30.442688Z node 187 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 5 PathOwnerId: 72057594046678944, cookie: 281474976715657 2025-05-29T15:34:30.442693Z node 187 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72057594046678944, txId: 281474976715657 2025-05-29T15:34:30.442698Z node 187 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 281474976715657, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 5 2025-05-29T15:34:30.442702Z node 187 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 6 2025-05-29T15:34:30.442715Z node 187 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1606: TOperation IsReadyToNotify, TxId: 281474976715657, ready parts: 2/3, is published: true 2025-05-29T15:34:30.442776Z node 187 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5512: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 327 RawX2: 803158886665 } Origin: 72075186233409546 State: 2 TxId: 281474976715657 Step: 0 Generation: 2 2025-05-29T15:34:30.442786Z node 187 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1764: TOperation FindRelatedPartByTabletId, TxId: 281474976715657, tablet: 72075186233409546, partId: 1 2025-05-29T15:34:30.442803Z node 187 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:619: TTxOperationReply execute, operationId: 281474976715657:1, at schemeshard: 72057594046678944, message: Source { RawX1: 327 RawX2: 803158886665 } Origin: 72075186233409546 State: 2 TxId: 281474976715657 Step: 0 Generation: 2 2025-05-29T15:34:30.442809Z node 187 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:1005: NTableState::TProposedWaitParts operationId# 281474976715657:1 HandleReply TEvSchemaChanged at tablet: 72057594046678944 2025-05-29T15:34:30.442818Z node 187 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:1009: NTableState::TProposedWaitParts operationId# 281474976715657:1 HandleReply TEvSchemaChanged at tablet: 72057594046678944 message: Source { RawX1: 327 RawX2: 803158886665 } Origin: 72075186233409546 State: 2 TxId: 281474976715657 Step: 0 Generation: 2 2025-05-29T15:34:30.442830Z node 187 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:655: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 281474976715657:1, shardIdx: 72057594046678944:1, datashard: 72075186233409546, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2025-05-29T15:34:30.442834Z node 187 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:674: all shard schema changes has been received, operationId: 281474976715657:1, at schemeshard: 72057594046678944 2025-05-29T15:34:30.442839Z node 187 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:686: send schema changes ack message, operation: 281474976715657:1, datashard: 72075186233409546, at schemeshard: 72057594046678944 2025-05-29T15:34:30.442846Z node 187 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 281474976715657:1 129 -> 240 2025-05-29T15:34:30.450748Z node 187 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:647: TTxOperationReply complete, operationId: 281474976715657:1, at schemeshard: 72057594046678944 2025-05-29T15:34:30.451131Z node 187 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976715657 2025-05-29T15:34:30.451176Z node 187 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976715657 2025-05-29T15:34:30.451199Z node 187 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:647: TTxOperationReply complete, operationId: 281474976715657:1, at schemeshard: 72057594046678944 2025-05-29T15:34:30.451373Z node 187 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 281474976715657:1, at schemeshard: 72057594046678944 2025-05-29T15:34:30.451387Z node 187 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:482: [72057594046678944] TDone opId# 281474976715657:1 ProgressState 2025-05-29T15:34:30.451412Z node 187 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:906: Part operation is done id#281474976715657:1 progress is 3/3 2025-05-29T15:34:30.451418Z node 187 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 281474976715657 ready parts: 3/3 2025-05-29T15:34:30.451425Z node 187 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:906: Part operation is done id#281474976715657:1 progress is 3/3 2025-05-29T15:34:30.451428Z node 187 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 281474976715657 ready parts: 3/3 2025-05-29T15:34:30.451434Z node 187 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1606: TOperation IsReadyToNotify, TxId: 281474976715657, ready parts: 3/3, is published: true 2025-05-29T15:34:30.451442Z node 187 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 281474976715657 ready parts: 3/3 2025-05-29T15:34:30.451449Z node 187 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:973: Operation and all the parts is done, operation id: 281474976715657:0 2025-05-29T15:34:30.451455Z node 187 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5174: RemoveTx for txid 281474976715657:0 2025-05-29T15:34:30.451474Z node 187 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 3 2025-05-29T15:34:30.451479Z node 187 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:973: Operation and all the parts is done, operation id: 281474976715657:1 2025-05-29T15:34:30.451483Z node 187 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5174: RemoveTx for txid 281474976715657:1 2025-05-29T15:34:30.451504Z node 187 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 5 2025-05-29T15:34:30.451509Z node 187 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:973: Operation and all the parts is done, operation id: 281474976715657:2 2025-05-29T15:34:30.451513Z node 187 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5174: RemoveTx for txid 281474976715657:2 2025-05-29T15:34:30.451518Z node 187 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 4 TestWaitNotification: OK eventTxId 1003 2025-05-29T15:34:30.794565Z node 187 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Table/Stream" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: true }, at schemeshard: 72057594046678944 2025-05-29T15:34:30.794695Z node 187 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/Table/Stream" took 175us result status StatusSuccess 2025-05-29T15:34:30.794879Z node 187 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Table/Stream" PathDescription { Self { Name: "Stream" PathId: 4 SchemeshardId: 72057594046678944 PathType: EPathTypeCdcStream CreateFinished: true CreateTxId: 1003 CreateStep: 5000004 ParentPathId: 3 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 2 CdcStreamVersion: 2 } ChildrenExist: true } Children { Name: "streamImpl" PathId: 5 SchemeshardId: 72057594046678944 PathType: EPathTypePersQueueGroup CreateFinished: true CreateTxId: 1003 CreateStep: 5000004 ParentPathId: 4 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" PathSubType: EPathSubTypeStreamImpl ChildrenExist: false BalancerTabletID: 72075186233409548 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 4 PathsLimit: 10000 ShardsInside: 3 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 1 PQPartitionsLimit: 1000000 } CdcStreamDescription { Name: "Stream" Mode: ECdcStreamModeKeysOnly PathId { OwnerId: 72057594046678944 LocalId: 4 } State: ECdcStreamStateReady SchemaVersion: 2 Format: ECdcStreamFormatProto VirtualTimestamps: false AwsRegion: "" ResolvedTimestampsIntervalMs: 0 ScanProgress { ShardsTotal: 1 ShardsCompleted: 1 } } } PathId: 4 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 |78.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/datashard/dml/py3test |78.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/datashard/dml/py3test >> test_dml.py::TestDML::test_dml[table_index_4__SYNC-pk_types5-all_types5-index5---SYNC] [FAIL] >> test_dml.py::TestDML::test_dml[table_index_3_UNIQUE_SYNC-pk_types1-all_types1-index1--UNIQUE-SYNC] [FAIL] >> test_dml.py::TestDML::test_dml[table_index_2__SYNC-pk_types7-all_types7-index7---SYNC] >> test_cms_erasure.py::TestDegradedGroupMirror3dcKeep::test_no_degraded_groups_after_shutdown >> test_dml.py::TestDML::test_dml[table_ttl_Uint64-pk_types15-all_types15-index15-Uint64--] >> test_cms_restart.py::TestCmsStateStorageRestartsMirrorMax::test_restart_as_much_as_can >> test_dml.py::TestDML::test_dml[table_index_1__ASYNC-pk_types10-all_types10-index10---ASYNC] [FAIL] >> test_dml.py::TestDML::test_dml[table_index_1__SYNC-pk_types8-all_types8-index8---SYNC] |78.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/cms/py3test |78.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/cms/py3test |78.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/datashard/dml/py3test >> test_dml.py::TestDML::test_dml[table_index_1_UNIQUE_SYNC-pk_types3-all_types3-index3--UNIQUE-SYNC] [FAIL] >> test_dml.py::TestDML::test_dml[table_index_4_UNIQUE_SYNC-pk_types0-all_types0-index0--UNIQUE-SYNC] >> test_dml.py::TestDML::test_dml[table_index_0_UNIQUE_SYNC-pk_types4-all_types4-index4--UNIQUE-SYNC] [FAIL] >> test_dml.py::TestDML::test_dml[table_ttl_Uint64-pk_types15-all_types15-index15-Uint64--] [FAIL] |78.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/datashard/dml/py3test >> test_dml.py::TestDML::test_dml[table_all_types-pk_types12-all_types12-index12---] [FAIL] |78.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/datashard/dml/py3test >> test_dml.py::TestDML::test_dml[table_ttl_Timestamp-pk_types17-all_types17-index17-Timestamp--] [FAIL] >> test_dml.py::TestDML::test_dml[table_index_2__SYNC-pk_types7-all_types7-index7---SYNC] [FAIL] >> test_cms_erasure.py::TestDegradedGroupBlock42Keep::test_no_degraded_groups_after_shutdown |78.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/olap/scenario/py3test >> test_scheme_load.py::TestSchemeLoad::test_multi[create_and_drop_tables] [GOOD] >> test_serverless.py::test_create_table_with_alter_quotas[enable_alter_database_create_hive_first--false] [GOOD] >> test_dml.py::TestDML::test_dml[table_index_1__SYNC-pk_types8-all_types8-index8---SYNC] [FAIL] >> test_serverless.py::test_create_table_with_alter_quotas[enable_alter_database_create_hive_first--true] |78.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/cms/py3test >> test_dml.py::TestDML::test_dml[table_ttl_DyNumber-pk_types13-all_types13-index13-DyNumber--] |78.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/datashard/dml/py3test >> test_dml.py::TestDML::test_dml[table_index_3_UNIQUE_SYNC-pk_types1-all_types1-index1--UNIQUE-SYNC] [FAIL] |78.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/datashard/dml/py3test >> test_dml.py::TestDML::test_dml[table_index_0_UNIQUE_SYNC-pk_types4-all_types4-index4--UNIQUE-SYNC] [FAIL] |78.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/cms/py3test >> test_dml.py::TestDML::test_dml[table_index_4_UNIQUE_SYNC-pk_types0-all_types0-index0--UNIQUE-SYNC] [FAIL] |78.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/datashard/dml/py3test >> test_dml.py::TestDML::test_dml[table_index_1__ASYNC-pk_types10-all_types10-index10---ASYNC] [FAIL] |78.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/cms/py3test |78.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/cms/py3test |78.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/cms/py3test >> test_cms_state_storage.py::TestCmsStateStorageSimpleMax::test_check_shutdown_state_storage_nodes >> test_parametrized_queries.py::TestParametrizedQueries::test_parametrized_queries[table_ttl_DyNumber-pk_types13-all_types13-index13-DyNumber--] >> test_dml.py::TestDML::test_dml[table_ttl_DyNumber-pk_types13-all_types13-index13-DyNumber--] [FAIL] |78.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/cms/py3test |78.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/olap/scenario/py3test >> test_simple.py::TestSimple::test_multi[tablestores] [GOOD] >> test_serverless.py::test_create_table[enable_alter_database_create_hive_first--true] [FAIL] |78.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/datashard/dml/py3test >> test_dml.py::TestDML::test_dml[table_ttl_Uint64-pk_types15-all_types15-index15-Uint64--] [FAIL] >> test_ttl.py::TestTTLOnIndexedTable::test_case >> test_serverless.py::test_create_table_using_exclusive_nodes[enable_alter_database_create_hive_first--false] >> test_parametrized_queries.py::TestParametrizedQueries::test_parametrized_queries[table_index_1__SYNC-pk_types8-all_types8-index8---SYNC] >> test_ttl.py::TestTTLDefaultEnv::test_case |78.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/datashard/dml/py3test >> test_dml.py::TestDML::test_dml[table_index_2__SYNC-pk_types7-all_types7-index7---SYNC] [FAIL] |78.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/ttl/py3test |78.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/datashard/dml/py3test >> test_dml.py::TestDML::test_dml[table_index_1__SYNC-pk_types8-all_types8-index8---SYNC] [FAIL] >> test_serverless.py::test_turn_on_serverless_storage_billing[enable_alter_database_create_hive_first--true] [FAIL] >> test_ttl.py::TestTTLAlterSettings::test_case >> test_parametrized_queries.py::TestParametrizedQueries::test_parametrized_queries[table_ttl_DyNumber-pk_types13-all_types13-index13-DyNumber--] [FAIL] >> test_ttl.py::TestTTLOnIndexedTable::test_case [FAIL] |78.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/ttl/py3test |78.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/datashard/dml/py3test >> test_dml.py::TestDML::test_dml[table_index_4_UNIQUE_SYNC-pk_types0-all_types0-index0--UNIQUE-SYNC] [FAIL] >> test_ttl.py::TestTTLDefaultEnv::test_case [FAIL] >> test_ttl.py::TestTTLValueSinceUnixEpoch::test_case |78.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/ttl/py3test |78.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/ttl/py3test >> test_parametrized_queries.py::TestParametrizedQueries::test_parametrized_queries[table_index_1__SYNC-pk_types8-all_types8-index8---SYNC] [FAIL] |78.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/ttl/py3test >> test_ttl.py::TestTTLAlterSettings::test_case [GOOD] >> Backup::ProposeBackup |78.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_ttl/unittest |78.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/datashard/dml/py3test >> test_dml.py::TestDML::test_dml[table_ttl_DyNumber-pk_types13-all_types13-index13-DyNumber--] [FAIL] >> test_parametrized_queries.py::TestParametrizedQueries::test_parametrized_queries[table_index_4_UNIQUE_SYNC-pk_types0-all_types0-index0--UNIQUE-SYNC] >> test_parametrized_queries.py::TestParametrizedQueries::test_parametrized_queries[table_index_0__ASYNC-pk_types11-all_types11-index11---ASYNC] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/serverless/py3test >> test_serverless.py::test_database_with_disk_quotas[enable_alter_database_create_hive_first--true] [FAIL] Test command err: contrib/python/tornado/tornado-4/tornado/gen.py:1064: DeprecationWarning: the (type, exc, tb) signature of throw() is deprecated, use the single-arg signature instead. contrib/python/tornado/tornado-4/tornado/gen.py:1064: DeprecationWarning: the (type, exc, tb) signature of throw() is deprecated, use the single-arg signature instead. yielded = self.gen.throw(*exc_info) |78.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/ttl/py3test >> test_ttl.py::TestTTLValueSinceUnixEpoch::test_case [FAIL] >> Backup::ProposeBackup [GOOD] >> EvWrite::AbortInTransaction >> TCdcStreamWithRebootsTests::SplitTableResolvedTimestamps[TabletReboots] [GOOD] |78.4%| [TA] $(B)/ydb/tests/datashard/dml/test-results/py3test/{meta.json ... results_accumulator.log} |78.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/batch_operations/unittest |78.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/statistics/service/ut/unittest >> test_serverless.py::test_seamless_migration_to_exclusive_nodes[enable_alter_database_create_hive_first--true] [FAIL] |78.4%| [TA] {RESULT} $(B)/ydb/tests/datashard/dml/test-results/py3test/{meta.json ... results_accumulator.log} >> EvWrite::AbortInTransaction [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_cdc_stream_reboots/unittest >> TCdcStreamWithRebootsTests::SplitTableResolvedTimestamps[TabletReboots] [GOOD] Test command err: =========== RUN: Trace =========== Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2140] Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:116:2058] recipient: [1:110:2141] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:116:2058] recipient: [1:110:2141] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:117:2058] recipient: [1:111:2142] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:117:2058] recipient: [1:111:2142] Leader for TabletID 72057594046678944 is [1:126:2151] sender: [1:127:2058] recipient: [1:109:2140] Leader for TabletID 72057594046447617 is [1:130:2154] sender: [1:132:2058] recipient: [1:110:2141] Leader for TabletID 72057594046316545 is [1:133:2155] sender: [1:135:2058] recipient: [1:111:2142] 2025-05-29T15:32:08.683858Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7488: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-05-29T15:32:08.683874Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7516: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:32:08.683878Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7402: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2025-05-29T15:32:08.683881Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7418: OperationsProcessing config: using default configuration 2025-05-29T15:32:08.683892Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-05-29T15:32:08.683896Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-05-29T15:32:08.683902Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7548: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:32:08.683911Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:39: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-05-29T15:32:08.683975Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7619: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-05-29T15:32:08.684028Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-05-29T15:32:08.693257Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7651: Got new config: QueryServiceConfig { AllExternalDataSourcesAreAvailable: true } 2025-05-29T15:32:08.693274Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:32:08.693332Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7619: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# Leader for TabletID 72057594046447617 is [1:130:2154] sender: [1:175:2058] recipient: [1:15:2062] 2025-05-29T15:32:08.695406Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-05-29T15:32:08.695426Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-05-29T15:32:08.695442Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-05-29T15:32:08.697490Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-05-29T15:32:08.697547Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:445: Clear TempDirsState with owners number: 0 2025-05-29T15:32:08.697629Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1348: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-05-29T15:32:08.697815Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:32: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-05-29T15:32:08.698454Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:157: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:32:08.698480Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:84: [RootDataErasureManager] Stop 2025-05-29T15:32:08.698687Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:32:08.698697Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:32:08.698722Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-05-29T15:32:08.698728Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-29T15:32:08.698734Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:202: TTxServerlessStorageBilling.Complete 2025-05-29T15:32:08.698768Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6671: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:214:2058] recipient: [1:212:2212] IGNORE Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:214:2058] recipient: [1:212:2212] Leader for TabletID 72057594037968897 is [1:218:2216] sender: [1:219:2058] recipient: [1:212:2212] 2025-05-29T15:32:08.699968Z node 1 :HIVE INFO: tablet_helpers.cpp:1130: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:126:2151] sender: [1:239:2058] recipient: [1:15:2062] 2025-05-29T15:32:08.712832Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:372: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-05-29T15:32:08.712881Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:32:08.712917Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-05-29T15:32:08.712955Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:130: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-05-29T15:32:08.712963Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:32:08.713704Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:451: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-05-29T15:32:08.713724Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-05-29T15:32:08.713763Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:32:08.713772Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:313: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-05-29T15:32:08.713776Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:367: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-05-29T15:32:08.713779Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 2 -> 3 2025-05-29T15:32:08.714179Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:32:08.714194Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-05-29T15:32:08.714200Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 3 -> 128 2025-05-29T15:32:08.714526Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:32:08.714534Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:32:08.714537Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:32:08.714546Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1650: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-05-29T15:32:08.715103Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1719: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-05-29T15:32:08.715462Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:652: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-05-29T15:32:08.715485Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1751: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 Leader for TabletID 72057594046316545 is [1:133:2155] sender: [1:254:2058] recipient: [1:15:2062] FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-05-29T15:32:08.715622Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:676: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-05-29T15:32:08.715639Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:680: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 133 RawX2: 4294969451 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-05-29T15:32:08.715644Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:32:08.715681Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Ch ... onPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { MinPartitionsCount: 1 } } SplitBoundary { KeyPrefix { Tuple { Optional { Uint32: 2 } } } } TableSchemaVersion: 2 IsBackup: false CdcStreams { Name: "Stream" Mode: ECdcStreamModeKeysOnly PathId { OwnerId: 72057594046678944 LocalId: 4 } State: ECdcStreamStateReady SchemaVersion: 1 Format: ECdcStreamFormatProto VirtualTimestamps: false AwsRegion: "" ResolvedTimestampsIntervalMs: 1000 } IsRestore: false } TablePartitions { EndOfRangeKeyPrefix: "\001\000\004\000\000\000\002\000\000\000" IsPoint: false IsInclusive: false DatashardId: 72075186233409549 } TablePartitions { EndOfRangeKeyPrefix: "" IsPoint: false IsInclusive: false DatashardId: 72075186233409550 } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 2 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 4 PathsLimit: 10000 ShardsInside: 4 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 1 PQPartitionsLimit: 1000000 } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-05-29T15:34:54.258467Z node 159 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Table" Options { ReturnPartitioningInfo: true ReturnPartitionConfig: true BackupInfo: false ReturnBoundaries: true ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-05-29T15:34:54.258525Z node 159 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/Table" took 79us result status StatusSuccess 2025-05-29T15:34:54.258753Z node 159 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Table" PathDescription { Self { Name: "Table" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 1002 CreateStep: 5000003 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 2 TablePartitionVersion: 2 } ChildrenExist: true } Table { Name: "Table" Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Uint32" TypeId: 2 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { MinPartitionsCount: 1 } } SplitBoundary { KeyPrefix { Tuple { Optional { Uint32: 2 } } } } TableSchemaVersion: 2 IsBackup: false CdcStreams { Name: "Stream" Mode: ECdcStreamModeKeysOnly PathId { OwnerId: 72057594046678944 LocalId: 4 } State: ECdcStreamStateReady SchemaVersion: 1 Format: ECdcStreamFormatProto VirtualTimestamps: false AwsRegion: "" ResolvedTimestampsIntervalMs: 1000 } IsRestore: false } TablePartitions { EndOfRangeKeyPrefix: "\001\000\004\000\000\000\002\000\000\000" IsPoint: false IsInclusive: false DatashardId: 72075186233409549 } TablePartitions { EndOfRangeKeyPrefix: "" IsPoint: false IsInclusive: false DatashardId: 72075186233409550 } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 2 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 4 PathsLimit: 10000 ShardsInside: 4 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 1 PQPartitionsLimit: 1000000 } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-05-29T15:34:54.258845Z node 159 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Table/Stream/streamImpl" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: true }, at schemeshard: 72057594046678944 2025-05-29T15:34:54.258873Z node 159 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/Table/Stream/streamImpl" took 31us result status StatusSuccess 2025-05-29T15:34:54.258953Z node 159 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Table/Stream/streamImpl" PathDescription { Self { Name: "streamImpl" PathId: 5 SchemeshardId: 72057594046678944 PathType: EPathTypePersQueueGroup CreateFinished: true CreateTxId: 1003 CreateStep: 5000004 ParentPathId: 4 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeStreamImpl Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 PQVersion: 1 } ChildrenExist: false BalancerTabletID: 72075186233409548 } PersQueueGroup { Name: "streamImpl" PathId: 5 TotalGroupCount: 1 PartitionPerTablet: 2 PQTabletConfig { PartitionConfig { MaxCountInPartition: 2147483647 LifetimeSeconds: 86400 WriteSpeedInBytesPerSecond: 1048576 BurstSize: 1048576 } TopicName: "Stream" TopicPath: "/MyRoot/Table/Stream/streamImpl" YdbDatabasePath: "/MyRoot" PartitionKeySchema { Name: "key" TypeId: 2 } MeteringMode: METERING_MODE_REQUEST_UNITS } Partitions { PartitionId: 0 TabletId: 72075186233409547 Status: Active } AlterVersion: 1 BalancerTabletID: 72075186233409548 NextPartitionId: 1 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 4 PathsLimit: 10000 ShardsInside: 4 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 1 PQPartitionsLimit: 1000000 } } PathId: 5 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/columnshard/ut_rw/unittest >> EvWrite::AbortInTransaction [GOOD] Test command err: 2025-05-29T15:34:52.526452Z node 1 :TX_COLUMNSHARD INFO: log.cpp:784: tablet_id=9437184;self_id=[1:138:2169];fline=columnshard.cpp:102;event=initialize_shard;step=OnActivateExecutor; 2025-05-29T15:34:52.530664Z node 1 :TX_COLUMNSHARD INFO: log.cpp:784: tablet_id=9437184;self_id=[1:138:2169];fline=columnshard.cpp:120;event=initialize_shard;step=initialize_tiring_finished; 2025-05-29T15:34:52.530796Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Execute at tablet 9437184 2025-05-29T15:34:52.531695Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=9437184;self_id=[1:138:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-05-29T15:34:52.531789Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=9437184;self_id=[1:138:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-05-29T15:34:52.531840Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=9437184;self_id=[1:138:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-05-29T15:34:52.531873Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=9437184;self_id=[1:138:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-05-29T15:34:52.531898Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=9437184;self_id=[1:138:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-05-29T15:34:52.531926Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=9437184;self_id=[1:138:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-05-29T15:34:52.531945Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=9437184;self_id=[1:138:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-05-29T15:34:52.531966Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=9437184;self_id=[1:138:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-05-29T15:34:52.531985Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=9437184;self_id=[1:138:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-05-29T15:34:52.532004Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=9437184;self_id=[1:138:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-05-29T15:34:52.532026Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=9437184;self_id=[1:138:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2025-05-29T15:34:52.532051Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=9437184;self_id=[1:138:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2025-05-29T15:34:52.539477Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Complete at tablet 9437184 2025-05-29T15:34:52.539535Z node 1 :TX_COLUMNSHARD INFO: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:137;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2025-05-29T15:34:52.539548Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-05-29T15:34:52.539583Z node 1 :TX_COLUMNSHARD INFO: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-05-29T15:34:52.539634Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-05-29T15:34:52.539649Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-05-29T15:34:52.539656Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-05-29T15:34:52.539666Z node 1 :TX_COLUMNSHARD INFO: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2025-05-29T15:34:52.539677Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-05-29T15:34:52.539685Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-05-29T15:34:52.539690Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-05-29T15:34:52.539709Z node 1 :TX_COLUMNSHARD INFO: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-05-29T15:34:52.539718Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-05-29T15:34:52.539725Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-05-29T15:34:52.539730Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-05-29T15:34:52.539741Z node 1 :TX_COLUMNSHARD INFO: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-05-29T15:34:52.539748Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-05-29T15:34:52.539756Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-05-29T15:34:52.539761Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=8;type=CleanInsertionDedup; 2025-05-29T15:34:52.539774Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-05-29T15:34:52.539782Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-05-29T15:34:52.539787Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-05-29T15:34:52.539796Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-05-29T15:34:52.556872Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-05-29T15:34:52.556910Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-05-29T15:34:52.556975Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-05-29T15:34:52.556985Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-05-29T15:34:52.556991Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2025-05-29T15:34:52.557026Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-05-29T15:34:52.557034Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-05-29T15:34:52.557039Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2025-05-29T15:34:52.557053Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-05-29T15:34:52.557061Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanDeprecatedSnapshot;id=CleanDeprecatedSnapshot; 2025-05-29T15:34:52.557066Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=17;type=CleanDeprecatedSnapshot; 2025-05-29T15:34:52.557075Z node 1 :TX_COLUMNSHARD CRIT: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_deprecated_snapshot.cpp:30;tasks_for_rewrite=0; 2025-05-29T15:34:52.557085Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanDeprecatedSnapshot;id=17; 2025-05-29T15:34:52.557093Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV0ChunksMeta;id=RestoreV0ChunksMeta; 2025-05-29T15:34:52.557098Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=18;type=RestoreV0ChunksMeta; 2025-05-29T15:34:52.557189Z node 1 :TX_COLUMNSHARD INFO: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=13; 2025-05-29T15:34:52.557200Z node 1 :TX_COLUMNSHARD INFO: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=6; 2025-05-29T15:34:52.557211Z node 1 :TX_COLUMNSHARD INFO: log.cpp:784: tablet_id=9437184;process=TTxUpdateSchema::Execute ... d=execute;tx_info=TTxBlobsWritingFinished;tablet_id=9437184;tx_state=execute;fline=constructor_meta.cpp:75;memory_size=1596;data_size=1572;sum=2256;count=8;size_of_meta=144; 2025-05-29T15:34:55.416629Z node 2 :TX_COLUMNSHARD DEBUG: log.cpp:784: tablet_id=9437184;self_id=[2:108:2139];ev=NKikimr::NColumnShard::NPrivateEvents::NWrite::TEvWritePortionResult;tablet_id=9437184;event=TEvWritePortionResult;tablet_id=9437184;local_tx_no=4;method=execute;tx_info=TTxBlobsWritingFinished;tablet_id=9437184;tx_state=execute;fline=constructor_portion.cpp:40;memory_size=1668;data_size=1644;sum=2544;count=4;size_of_portion=216; 2025-05-29T15:34:55.417162Z node 2 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: BlobManager on execute at tablet 9437184 Save Batch GenStep: 2:1 Blob count: 44 2025-05-29T15:34:55.417261Z node 2 :TX_COLUMNSHARD_WRITE DEBUG: log.cpp:784: tablet_id=9437184;self_id=[2:108:2139];ev=NKikimr::NColumnShard::NPrivateEvents::NWrite::TEvWritePortionResult;tablet_id=9437184;event=TEvWritePortionResult;tablet_id=9437184;local_tx_no=4;method=execute;tx_info=TTxBlobsWritingFinished;tablet_id=9437184;tx_state=execute;fline=manager.h:148;event=add_by_insert_id;id=1;operation_id=1; 2025-05-29T15:34:55.428869Z node 2 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: BlobManager at tablet 9437184 Save Batch GenStep: 2:1 Blob count: 44 2025-05-29T15:34:55.429143Z node 2 :TX_COLUMNSHARD_TX WARN: log.cpp:784: tablet_id=9437184;self_id=[2:108:2139];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=9437184;event=TEvWrite;fline=manager.cpp:116;event=abort;tx_id=222;problem=finished; 2025-05-29T15:34:55.429168Z node 2 :TX_COLUMNSHARD_TX WARN: log.cpp:784: tablet_id=9437184;self_id=[2:108:2139];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=9437184;event=TEvWrite;fline=manager.cpp:134;event=abort;tx_id=222;problem=finished; 2025-05-29T15:34:55.429225Z node 2 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: PlanStep 1748532895636 at tablet 9437184, mediator 0 2025-05-29T15:34:55.429236Z node 2 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxPlanStep[5] execute at tablet 9437184 2025-05-29T15:34:55.429245Z node 2 :TX_COLUMNSHARD ERROR: ctor_logger.h:56: TxPlanStep[5] Ignore old txIds [112] for step 1748532895636 last planned step 1748532895636 at tablet 9437184 2025-05-29T15:34:55.429256Z node 2 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxPlanStep[5] complete at tablet 9437184 2025-05-29T15:34:55.429351Z node 2 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: EvScan txId: 18446744073709551615 scanId: 0 version: {1748532895636:max} readable: {1748532895636:max} at tablet 9437184 2025-05-29T15:34:55.429376Z node 2 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TTxScan prepare txId: 18446744073709551615 scanId: 0 at tablet 9437184 2025-05-29T15:34:55.429966Z node 2 :TX_COLUMNSHARD DEBUG: log.cpp:784: tablet_id=9437184;self_id=[2:108:2139];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1748532895636:max};tablet=9437184;timeout=0.000000s;cpu_limits=Disabled;;fline=program.cpp:33;event=parse_program;program=Command { Projection { Columns { Id: 1 } Columns { Id: 2 } } } ; 2025-05-29T15:34:55.429992Z node 2 :TX_COLUMNSHARD DEBUG: log.cpp:784: tablet_id=9437184;self_id=[2:108:2139];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1748532895636:max};tablet=9437184;timeout=0.000000s;cpu_limits=Disabled;;fline=program.cpp:102;parse_proto_program=Command { Projection { Columns { Id: 1 } Columns { Id: 2 } } } ; 2025-05-29T15:34:55.430244Z node 2 :TX_COLUMNSHARD DEBUG: log.cpp:784: tablet_id=9437184;self_id=[2:108:2139];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1748532895636:max};tablet=9437184;timeout=0.000000s;cpu_limits=Disabled;;fline=program.cpp:51;event=program_parsed;result={"edges":[{"owner_id":0,"inputs":[{"from":2},{"from":4}]},{"owner_id":2,"inputs":[{"from":5}]},{"owner_id":4,"inputs":[{"from":5}]},{"owner_id":5,"inputs":[{"from":6}]},{"owner_id":6,"inputs":[]}],"nodes":{"2":{"p":{"i":"1","p":{"address":{"name":"key","id":1}},"o":"1","t":"AssembleOriginalData"},"w":9,"id":2},"6":{"p":{"p":{"data":[{"name":"key","id":1},{"name":"field","id":2}]},"o":"0","t":"ReserveMemory"},"w":0,"id":6},"5":{"p":{"i":"0","p":{"data":[{"name":"key","id":1},{"name":"field","id":2}]},"o":"1,2","t":"FetchOriginalData"},"w":4,"id":5},"4":{"p":{"i":"2","p":{"address":{"name":"field","id":2}},"o":"2","t":"AssembleOriginalData"},"w":9,"id":4},"0":{"p":{"i":"1,2","t":"Projection"},"w":18,"id":0}}}; 2025-05-29T15:34:55.430275Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:784: tablet_id=9437184;self_id=[2:108:2139];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1748532895636:max};tablet=9437184;timeout=0.000000s;cpu_limits=Disabled;;fline=read_metadata.h:141;filter_limit_not_detected= range{ from {+Inf} to {-Inf}}; 2025-05-29T15:34:55.430440Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:784: tablet_id=9437184;self_id=[2:108:2139];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1748532895636:max};tablet=9437184;timeout=0.000000s;cpu_limits=Disabled;;fline=tx_scan.cpp:169;event=TTxScan started;actor_id=[2:180:2193];trace_detailed=; 2025-05-29T15:34:55.430581Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:784: fline=context.cpp:84;ff_first=(column_ids=1,2;column_names=field,key;);; 2025-05-29T15:34:55.430625Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:784: fline=context.cpp:99;columns_context_info=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1;column_names=key;);;ff=(column_ids=1,2;column_names=field,key;);;program_input=(column_ids=1,2;column_names=field,key;);;; 2025-05-29T15:34:55.430695Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:784: SelfId=[2:180:2193];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:105;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2025-05-29T15:34:55.430713Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:784: SelfId=[2:180:2193];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:188;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1;column_names=key;);;ff=(column_ids=1,2;column_names=field,key;);;program_input=(column_ids=1,2;column_names=field,key;);;;); 2025-05-29T15:34:55.430725Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:784: SelfId=[2:180:2193];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:193;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1;column_names=key;);;ff=(column_ids=1,2;column_names=field,key;);;program_input=(column_ids=1,2;column_names=field,key;);;;); 2025-05-29T15:34:55.514978Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: actor.cpp:410: Scan [2:180:2193] finished for tablet 9437184 2025-05-29T15:34:55.515131Z node 2 :TX_COLUMNSHARD_SCAN INFO: log.cpp:784: SelfId=[2:180:2193];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:416;event=scan_finish;compute_actor_id=[2:179:2192];stats={"p":[{"events":["f_bootstrap","l_bootstrap","f_ack","f_processing","f_ProduceResults"],"t":0},{"events":["l_ack","l_processing","l_ProduceResults","f_Finish","l_Finish"],"t":0.084}],"full":{"a":1748532895430418,"name":"_full_task","f":1748532895430418,"d_finished":0,"c":0,"l":1748532895515038,"d":84620},"events":[{"name":"bootstrap","f":1748532895430483,"d_finished":174,"c":1,"l":1748532895430657,"d":174},{"a":1748532895430687,"name":"ack","f":1748532895430687,"d_finished":0,"c":0,"l":1748532895515038,"d":84351},{"a":1748532895430683,"name":"processing","f":1748532895430683,"d_finished":0,"c":0,"l":1748532895515038,"d":84355},{"name":"ProduceResults","f":1748532895430652,"d_finished":84248,"c":2,"l":1748532895514945,"d":84248},{"a":1748532895514954,"name":"Finish","f":1748532895514954,"d_finished":0,"c":0,"l":1748532895515038,"d":84}],"id":"9437184::2"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1;column_names=key;);;ff=(column_ids=1,2;column_names=field,key;);;program_input=(column_ids=1,2;column_names=field,key;);;;); 2025-05-29T15:34:55.515154Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:784: SelfId=[2:180:2193];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:366;event=send_data;compute_actor_id=[2:179:2192];bytes=0;rows=0;faults=0;finished=1;fault=0;schema=; 2025-05-29T15:34:55.515197Z node 2 :TX_COLUMNSHARD_SCAN INFO: log.cpp:784: SelfId=[2:180:2193];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:371;event=scan_finished;compute_actor_id=[2:179:2192];packs_sum=0;bytes=0;bytes_sum=0;rows=0;rows_sum=0;faults=0;finished=1;fault=0;stats={"p":[{"events":["f_bootstrap","l_bootstrap","f_ack","f_processing","f_ProduceResults"],"t":0},{"events":["l_ack","l_processing","l_ProduceResults","f_Finish","l_Finish"],"t":0.084}],"full":{"a":1748532895430418,"name":"_full_task","f":1748532895430418,"d_finished":0,"c":0,"l":1748532895515163,"d":84745},"events":[{"name":"bootstrap","f":1748532895430483,"d_finished":174,"c":1,"l":1748532895430657,"d":174},{"a":1748532895430687,"name":"ack","f":1748532895430687,"d_finished":0,"c":0,"l":1748532895515163,"d":84476},{"a":1748532895430683,"name":"processing","f":1748532895430683,"d_finished":0,"c":0,"l":1748532895515163,"d":84480},{"name":"ProduceResults","f":1748532895430652,"d_finished":84248,"c":2,"l":1748532895514945,"d":84248},{"a":1748532895514954,"name":"Finish","f":1748532895514954,"d_finished":0,"c":0,"l":1748532895515163,"d":209}],"id":"9437184::2"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1;column_names=key;);;ff=(column_ids=1,2;column_names=field,key;);;program_input=(column_ids=1,2;column_names=field,key;);;;); 2025-05-29T15:34:55.515221Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:784: SelfId=[2:180:2193];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=stats.cpp:8;event=statistic;begin=2025-05-29T15:34:55.430267Z;index_granules=0;index_portions=0;index_batches=0;committed_batches=0;schema_columns=2;filter_columns=0;additional_columns=0;compacted_portions_bytes=0;inserted_portions_bytes=0;committed_portions_bytes=0;data_filter_bytes=0;data_additional_bytes=0;delta_bytes=0;selected_rows=0; 2025-05-29T15:34:55.515232Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:784: SelfId=[2:180:2193];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=read_context.h:190;event=scan_aborted;reason=unexpected on destructor; 2025-05-29T15:34:55.515247Z node 2 :TX_COLUMNSHARD_SCAN INFO: log.cpp:784: SelfId=[2:180:2193];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=context.h:81;fetching=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1;column_names=key;);;ff=(column_ids=1,2;column_names=field,key;);;program_input=(column_ids=1,2;column_names=field,key;);;; FALLBACK_ACTOR_LOGGING;priority=INFO;component=2100;fline=simple_arrays_cache.h:65;event=slice_from_cache;key=uint64;records=0;count=0; FALLBACK_ACTOR_LOGGING;priority=INFO;component=2100;fline=simple_arrays_cache.h:49;event=insert_to_cache;key=string;records=0;size=0; FALLBACK_ACTOR_LOGGING;priority=INFO;component=2100;fline=simple_arrays_cache.h:65;event=slice_from_cache;key=string;records=0;count=0; |78.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/serverless/py3test >> test_serverless.py::test_turn_on_serverless_storage_billing[enable_alter_database_create_hive_first--true] [FAIL] >> test_parametrized_queries.py::TestParametrizedQueries::test_parametrized_queries[table_index_2__SYNC-pk_types7-all_types7-index7---SYNC] |78.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/datashard/parametrized_queries/py3test >> test_parametrized_queries.py::TestParametrizedQueries::test_parametrized_queries[table_ttl_DyNumber-pk_types13-all_types13-index13-DyNumber--] [FAIL] >> test_parametrized_queries.py::TestParametrizedQueries::test_parametrized_queries[table_ttl_Datetime-pk_types16-all_types16-index16-Datetime--] >> test_parametrized_queries.py::TestParametrizedQueries::test_parametrized_queries[table_index_0__ASYNC-pk_types11-all_types11-index11---ASYNC] [FAIL] >> test_parametrized_queries.py::TestParametrizedQueries::test_parametrized_queries[table_index_2_UNIQUE_SYNC-pk_types2-all_types2-index2--UNIQUE-SYNC] >> test_parametrized_queries.py::TestParametrizedQueries::test_parametrized_queries[table_index_4_UNIQUE_SYNC-pk_types0-all_types0-index0--UNIQUE-SYNC] [FAIL] |78.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/ttl/py3test >> test_ttl.py::TestTTLOnIndexedTable::test_case [FAIL] >> test_parametrized_queries.py::TestParametrizedQueries::test_parametrized_queries[table_all_types-pk_types12-all_types12-index12---] |78.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/ttl/py3test >> test_ttl.py::TestTTLDefaultEnv::test_case [FAIL] >> test_parametrized_queries.py::TestParametrizedQueries::test_parametrized_queries[table_ttl_Uint32-pk_types14-all_types14-index14-Uint32--] >> test_parametrized_queries.py::TestParametrizedQueries::test_parametrized_queries[table_index_3_UNIQUE_SYNC-pk_types1-all_types1-index1--UNIQUE-SYNC] >> test_parametrized_queries.py::TestParametrizedQueries::test_parametrized_queries[table_index_1_UNIQUE_SYNC-pk_types3-all_types3-index3--UNIQUE-SYNC] |78.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/datashard/parametrized_queries/py3test >> test_parametrized_queries.py::TestParametrizedQueries::test_parametrized_queries[table_index_1__SYNC-pk_types8-all_types8-index8---SYNC] [FAIL] |78.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/ttl/py3test >> test_ttl.py::TestTTLAlterSettings::test_case [GOOD] >> test_parametrized_queries.py::TestParametrizedQueries::test_parametrized_queries[table_index_3__SYNC-pk_types6-all_types6-index6---SYNC] >> test_parametrized_queries.py::TestParametrizedQueries::test_parametrized_queries[table_ttl_Uint64-pk_types15-all_types15-index15-Uint64--] >> test_parametrized_queries.py::TestParametrizedQueries::test_parametrized_queries[table_index_2__SYNC-pk_types7-all_types7-index7---SYNC] [FAIL] >> test_parametrized_queries.py::TestParametrizedQueries::test_parametrized_queries[table_index_4__SYNC-pk_types5-all_types5-index5---SYNC] >> test_serverless.py::test_create_table_with_quotas[enable_alter_database_create_hive_first--true] [GOOD] |78.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/ttl/py3test >> test_ttl.py::TestTTLValueSinceUnixEpoch::test_case [FAIL] >> test_parametrized_queries.py::TestParametrizedQueries::test_parametrized_queries[table_ttl_Datetime-pk_types16-all_types16-index16-Datetime--] [FAIL] >> test_parametrized_queries.py::TestParametrizedQueries::test_parametrized_queries[table_ttl_Date-pk_types18-all_types18-index18-Date--] >> test_parametrized_queries.py::TestParametrizedQueries::test_parametrized_queries[table_all_types-pk_types12-all_types12-index12---] [FAIL] >> test_parametrized_queries.py::TestParametrizedQueries::test_parametrized_queries[table_index_3_UNIQUE_SYNC-pk_types1-all_types1-index1--UNIQUE-SYNC] [FAIL] |78.4%| [TA] $(B)/ydb/tests/functional/ttl/test-results/py3test/{meta.json ... results_accumulator.log} |78.4%| [TA] {RESULT} $(B)/ydb/tests/functional/ttl/test-results/py3test/{meta.json ... results_accumulator.log} >> test_parametrized_queries.py::TestParametrizedQueries::test_parametrized_queries[table_index_2_UNIQUE_SYNC-pk_types2-all_types2-index2--UNIQUE-SYNC] [FAIL] >> test_parametrized_queries.py::TestParametrizedQueries::test_parametrized_queries[table_ttl_Uint32-pk_types14-all_types14-index14-Uint32--] [FAIL] >> test_parametrized_queries.py::TestParametrizedQueries::test_parametrized_queries[table_ttl_Timestamp-pk_types17-all_types17-index17-Timestamp--] >> test_parametrized_queries.py::TestParametrizedQueries::test_parametrized_queries[table_index_1_UNIQUE_SYNC-pk_types3-all_types3-index3--UNIQUE-SYNC] [FAIL] >> test_parametrized_queries.py::TestParametrizedQueries::test_parametrized_queries[table_index_3__SYNC-pk_types6-all_types6-index6---SYNC] [FAIL] >> test_parametrized_queries.py::TestParametrizedQueries::test_parametrized_queries[table_index_0__SYNC-pk_types9-all_types9-index9---SYNC] >> test_serverless.py::test_create_table_using_exclusive_nodes[enable_alter_database_create_hive_first--false] [FAIL] >> test_parametrized_queries.py::TestParametrizedQueries::test_parametrized_queries[table_ttl_Uint64-pk_types15-all_types15-index15-Uint64--] [FAIL] |78.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/datashard/parametrized_queries/py3test >> test_parametrized_queries.py::TestParametrizedQueries::test_parametrized_queries[table_index_0__ASYNC-pk_types11-all_types11-index11---ASYNC] [FAIL] |78.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/datashard/parametrized_queries/py3test >> test_parametrized_queries.py::TestParametrizedQueries::test_parametrized_queries[table_index_4_UNIQUE_SYNC-pk_types0-all_types0-index0--UNIQUE-SYNC] [FAIL] |78.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/serverless/py3test >> test_serverless.py::test_seamless_migration_to_exclusive_nodes[enable_alter_database_create_hive_first--true] [FAIL] >> test_parametrized_queries.py::TestParametrizedQueries::test_parametrized_queries[table_ttl_Date-pk_types18-all_types18-index18-Date--] [FAIL] >> test_parametrized_queries.py::TestParametrizedQueries::test_parametrized_queries[table_index_4__SYNC-pk_types5-all_types5-index5---SYNC] [FAIL] >> test_insert.py::TestInsert::test[read_data_during_bulk_upsert] [FAIL] >> test_insert.py::TestInsert::test_multi[read_data_during_bulk_upsert] |78.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/datashard/parametrized_queries/py3test >> test_parametrized_queries.py::TestParametrizedQueries::test_parametrized_queries[table_index_2_UNIQUE_SYNC-pk_types2-all_types2-index2--UNIQUE-SYNC] [FAIL] >> test_parametrized_queries.py::TestParametrizedQueries::test_parametrized_queries[table_ttl_Timestamp-pk_types17-all_types17-index17-Timestamp--] [FAIL] >> test_parametrized_queries.py::TestParametrizedQueries::test_parametrized_queries[table_index_1__ASYNC-pk_types10-all_types10-index10---ASYNC] >> test_isolation.py::TestTransactionIsolation::test_prevents_write_cycles_g0 >> test_parametrized_queries.py::TestParametrizedQueries::test_parametrized_queries[table_index_0__SYNC-pk_types9-all_types9-index9---SYNC] [FAIL] >> test_public_api.py::TestExplain::test_explain_data_query |78.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/datashard/parametrized_queries/py3test >> test_parametrized_queries.py::TestParametrizedQueries::test_parametrized_queries[table_index_2__SYNC-pk_types7-all_types7-index7---SYNC] [FAIL] >> test_session_grace_shutdown.py::Test::test_grace_shutdown_of_session |78.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/serverless/py3test >> test_serverless.py::test_create_table_with_quotas[enable_alter_database_create_hive_first--true] [GOOD] |78.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/datashard/parametrized_queries/py3test >> test_parametrized_queries.py::TestParametrizedQueries::test_parametrized_queries[table_ttl_Datetime-pk_types16-all_types16-index16-Datetime--] [FAIL] |78.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/datashard/parametrized_queries/py3test >> test_parametrized_queries.py::TestParametrizedQueries::test_parametrized_queries[table_all_types-pk_types12-all_types12-index12---] [FAIL] |78.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/datashard/parametrized_queries/py3test >> test_parametrized_queries.py::TestParametrizedQueries::test_parametrized_queries[table_index_3_UNIQUE_SYNC-pk_types1-all_types1-index1--UNIQUE-SYNC] [FAIL] |78.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/datashard/parametrized_queries/py3test >> test_parametrized_queries.py::TestParametrizedQueries::test_parametrized_queries[table_ttl_Uint32-pk_types14-all_types14-index14-Uint32--] [FAIL] >> test_parametrized_queries.py::TestParametrizedQueries::test_parametrized_queries[table_index_1__ASYNC-pk_types10-all_types10-index10---ASYNC] [FAIL] >> test_public_api.py::TestExplain::test_explain_data_query [GOOD] |78.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/datashard/parametrized_queries/py3test >> test_parametrized_queries.py::TestParametrizedQueries::test_parametrized_queries[table_index_1_UNIQUE_SYNC-pk_types3-all_types3-index3--UNIQUE-SYNC] [FAIL] >> test_parametrized_queries.py::TestParametrizedQueries::test_parametrized_queries[table_index_0_UNIQUE_SYNC-pk_types4-all_types4-index4--UNIQUE-SYNC] |78.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/datashard/parametrized_queries/py3test >> test_parametrized_queries.py::TestParametrizedQueries::test_parametrized_queries[table_index_3__SYNC-pk_types6-all_types6-index6---SYNC] [FAIL] >> test_isolation.py::TestTransactionIsolation::test_prevents_write_cycles_g0 [FAIL] >> test_isolation.py::TestTransactionIsolation::test_prevents_aborted_reads_g1a >> test_session_grace_shutdown.py::Test::test_grace_shutdown_of_session [GOOD] >> test_execute_scheme.py::TestExecuteSchemeOperations::test_create_table_if_it_is_created_success >> test_isolation.py::TestTransactionIsolation::test_prevents_aborted_reads_g1a [FAIL] >> test_isolation.py::TestTransactionIsolation::test_prevents_intermediate_reads_g1b >> test_insert.py::TestInsertOperations::test_several_inserts_per_transaction_are_success |78.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/datashard/parametrized_queries/py3test >> test_parametrized_queries.py::TestParametrizedQueries::test_parametrized_queries[table_ttl_Uint64-pk_types15-all_types15-index15-Uint64--] [FAIL] >> test_isolation.py::TestTransactionIsolation::test_prevents_intermediate_reads_g1b [FAIL] >> test_isolation.py::TestTransactionIsolation::test_prevents_circular_information_flow_g1c >> test_indexes.py::TestSecondaryIndexes::test_create_table_with_global_index >> test_session_pool.py::TestSessionPool::test_session_pool_simple_acquire >> test_isolation.py::TestTransactionIsolation::test_prevents_circular_information_flow_g1c [FAIL] >> test_isolation.py::TestTransactionIsolation::test_isolation_mailing_list_example >> test_crud.py::TestCreateAndUpsertWithRepetitions::test_create_and_select_with_repetitions[10-64] |78.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/datashard/parametrized_queries/py3test >> test_parametrized_queries.py::TestParametrizedQueries::test_parametrized_queries[table_index_4__SYNC-pk_types5-all_types5-index5---SYNC] [FAIL] >> test_isolation.py::TestTransactionIsolation::test_isolation_mailing_list_example [FAIL] >> test_isolation.py::TestTransactionIsolation::test_prevents_observed_transaction_vanishes_otv |78.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/datashard/parametrized_queries/py3test >> test_parametrized_queries.py::TestParametrizedQueries::test_parametrized_queries[table_ttl_Date-pk_types18-all_types18-index18-Date--] [FAIL] >> test_isolation.py::TestTransactionIsolation::test_prevents_observed_transaction_vanishes_otv [FAIL] >> test_isolation.py::TestTransactionIsolation::test_does_not_prevent_predicate_many_preceders_pmp >> test_isolation.py::TestTransactionIsolation::test_does_not_prevent_predicate_many_preceders_pmp [FAIL] >> test_isolation.py::TestTransactionIsolation::test_does_not_prevent_predicate_many_preceders_pmp_for_write_predicates >> test_read_table.py::TestReadTableSuccessStories::test_read_table_only_specified_ranges >> test_isolation.py::TestTransactionIsolation::test_does_not_prevent_predicate_many_preceders_pmp_for_write_predicates [FAIL] >> test_isolation.py::TestTransactionIsolation::test_lost_update_p4 >> test_discovery.py::TestDiscoveryExtEndpoint::test_scenario |78.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/datashard/parametrized_queries/py3test >> test_parametrized_queries.py::TestParametrizedQueries::test_parametrized_queries[table_ttl_Timestamp-pk_types17-all_types17-index17-Timestamp--] [FAIL] >> test_isolation.py::TestTransactionIsolation::test_lost_update_p4 [FAIL] >> test_isolation.py::TestTransactionIsolation::test_lost_update_on_value_p4 |78.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/serverless/py3test >> test_serverless.py::test_create_table_using_exclusive_nodes[enable_alter_database_create_hive_first--false] [FAIL] >> test_indexes.py::TestSecondaryIndexes::test_create_table_with_global_index [GOOD] >> test_execute_scheme.py::TestExecuteSchemeOperations::test_create_table_if_it_is_created_success [GOOD] >> test_execute_scheme.py::TestExecuteSchemeOperations::test_create_table_if_it_is_created_fail_add_new_column >> test_isolation.py::TestTransactionIsolation::test_lost_update_on_value_p4 [FAIL] >> test_isolation.py::TestTransactionIsolation::test_lost_update_on_value_with_upsert_p4 >> test_insert.py::TestInsertOperations::test_several_inserts_per_transaction_are_success [FAIL] >> test_insert.py::TestInsertOperations::test_insert_plus_update_per_transaction_are_success >> test_execute_scheme.py::TestExecuteSchemeOperations::test_create_table_if_it_is_created_fail_add_new_column [GOOD] >> test_execute_scheme.py::TestExecuteSchemeOperations::test_create_table_if_it_is_created_fail_change_column_type >> test_execute_scheme.py::TestExecuteSchemeOperations::test_create_table_if_it_is_created_fail_change_column_type [GOOD] >> test_execute_scheme.py::TestExecuteSchemeOperations::test_create_table_if_it_is_created_fail_remove_column >> test_execute_scheme.py::TestExecuteSchemeOperations::test_create_table_if_it_is_created_fail_remove_column [GOOD] >> test_execute_scheme.py::TestExecuteSchemeOperations::test_create_table_if_it_is_created_fail_add_to_key [GOOD] >> test_execute_scheme.py::TestExecuteSchemeOperations::test_create_table_if_it_is_created_fail_remove_from_key >> test_insert.py::TestInsertOperations::test_insert_plus_update_per_transaction_are_success [FAIL] >> test_insert.py::TestInsertOperations::test_update_plus_insert_per_transaction_are_success_prepared_case >> test_execute_scheme.py::TestExecuteSchemeOperations::test_create_table_if_it_is_created_fail_remove_from_key [GOOD] >> test_isolation.py::TestTransactionIsolation::test_lost_update_on_value_with_upsert_p4 [FAIL] >> test_isolation.py::TestTransactionIsolation::test_read_skew_g_single >> test_parametrized_queries.py::TestParametrizedQueries::test_parametrized_queries[table_index_0_UNIQUE_SYNC-pk_types4-all_types4-index4--UNIQUE-SYNC] [FAIL] >> test_session_pool.py::TestSessionPool::test_session_pool_simple_acquire [GOOD] >> test_session_pool.py::TestSessionPool::test_session_pool_no_race_after_future_cancel_case_1 [GOOD] >> test_session_pool.py::TestSessionPool::test_session_pool_no_race_after_future_cancel_case_2 >> alter_compression.py::TestAllCompression::test_all_supported_compression[zstd_6_compression-COMPRESSION = "zstd", COMPRESSION_LEVEL = 6] |78.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/datashard/parametrized_queries/py3test >> test_parametrized_queries.py::TestParametrizedQueries::test_parametrized_queries[table_index_0__SYNC-pk_types9-all_types9-index9---SYNC] [FAIL] >> test_session_pool.py::TestSessionPool::test_session_pool_no_race_after_future_cancel_case_2 [GOOD] >> test_session_pool.py::TestSessionPool::test_session_pool_keep_alive [GOOD] >> test_session_pool.py::TestSessionPool::test_session_pool_no_race_after_future_cancel_case_3 [GOOD] >> test_session_pool.py::TestSessionPool::test_session_pool_no_race_after_future_cancel_case_4 >> TCdcStreamWithRebootsTests::RacySplitTableAndCreateStream[TabletReboots] [GOOD] >> test_insert.py::TestInsertOperations::test_update_plus_insert_per_transaction_are_success_prepared_case [FAIL] >> test_insert.py::TestInsertOperations::test_upsert_plus_insert_per_transaction_are_success_prepared_case >> test_session_pool.py::TestSessionPool::test_session_pool_no_race_after_future_cancel_case_4 [GOOD] >> test_session_pool.py::TestSessionPool::test_session_pool_release_logic [GOOD] >> test_session_pool.py::TestSessionPool::test_session_pool_close_basic_logic_case_1 [GOOD] >> test_session_pool.py::TestSessionPool::test_no_cluster_endpoints_no_failure >> test_isolation.py::TestTransactionIsolation::test_read_skew_g_single [FAIL] >> test_isolation.py::TestTransactionIsolation::test_read_skew_g_single_predicate_deps >> alter_compression.py::TestAllCompression::test_all_supported_compression[lz4_compression-COMPRESSION = "lz4"] >> TSchemeShardSplitBySizeTest::AutoMergeInOne [GOOD] >> alter_compression.py::TestAllCompression::test_all_supported_compression[zstd_compression-COMPRESSION = "zstd"] >> test_isolation.py::TestTransactionIsolation::test_read_skew_g_single_predicate_deps [FAIL] >> test_isolation.py::TestTransactionIsolation::test_read_skew_g_single_write_predicate >> test_insert.py::TestInsertOperations::test_upsert_plus_insert_per_transaction_are_success_prepared_case [FAIL] >> test_insert.py::TestInsertOperations::test_insert_plus_upsert_are_success >> test_isolation.py::TestTransactionIsolation::test_read_skew_g_single_write_predicate [FAIL] >> test_isolation.py::TestTransactionIsolation::test_write_skew_g2_item >> test_insert.py::TestInsertOperations::test_insert_plus_upsert_are_success [FAIL] >> test_insert.py::TestInsertOperations::test_insert_revert_basis >> test_isolation.py::TestTransactionIsolation::test_write_skew_g2_item [FAIL] >> test_isolation.py::TestTransactionIsolation::test_anti_dependency_cycles_g2 >> test_insert.py::TestInsertOperations::test_insert_revert_basis [FAIL] >> test_insert.py::TestInsertOperations::test_query_pairs >> test_crud.py::TestCreateAndUpsertWithRepetitions::test_create_and_select_with_repetitions[10-64] [GOOD] >> test_crud.py::TestCreateAndUpsertWithRepetitions::test_create_and_upsert_data_with_repetitions[10-64] >> test_isolation.py::TestTransactionIsolation::test_anti_dependency_cycles_g2 [FAIL] >> test_isolation.py::TestTransactionIsolation::test_anti_dependency_cycles_g2_two_edges >> test_read_table.py::TestReadTableSuccessStories::test_read_table_only_specified_ranges [FAIL] >> test_read_table.py::TestReadTableSuccessStories::test_read_table_constructed_key_range >> alter_compression.py::TestAllCompression::test_all_supported_compression[zstd_12_compression-COMPRESSION = "zstd", COMPRESSION_LEVEL = 12] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_split_merge/unittest >> TSchemeShardSplitBySizeTest::AutoMergeInOne [GOOD] Test command err: ==== RunWithTabletReboots =========== RUN: Trace =========== Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2140] Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:116:2058] recipient: [1:110:2141] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:116:2058] recipient: [1:110:2141] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:117:2058] recipient: [1:111:2142] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:117:2058] recipient: [1:111:2142] Leader for TabletID 72057594046678944 is [1:126:2151] sender: [1:127:2058] recipient: [1:109:2140] Leader for TabletID 72057594046447617 is [1:130:2154] sender: [1:132:2058] recipient: [1:110:2141] Leader for TabletID 72057594046316545 is [1:133:2155] sender: [1:135:2058] recipient: [1:111:2142] 2025-05-29T15:29:43.152216Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7488: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-05-29T15:29:43.152243Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7516: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:29:43.152249Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7402: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2025-05-29T15:29:43.152255Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7418: OperationsProcessing config: using default configuration 2025-05-29T15:29:43.152270Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-05-29T15:29:43.152275Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-05-29T15:29:43.152284Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7548: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:29:43.152297Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:39: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-05-29T15:29:43.152411Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7619: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-05-29T15:29:43.152485Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-05-29T15:29:43.167389Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7651: Got new config: QueryServiceConfig { AllExternalDataSourcesAreAvailable: true } 2025-05-29T15:29:43.167418Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:29:43.167544Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7619: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# Leader for TabletID 72057594046447617 is [1:130:2154] sender: [1:175:2058] recipient: [1:15:2062] 2025-05-29T15:29:43.171505Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-05-29T15:29:43.171545Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-05-29T15:29:43.171605Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-05-29T15:29:43.174919Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-05-29T15:29:43.175026Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:445: Clear TempDirsState with owners number: 0 2025-05-29T15:29:43.175172Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1348: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-05-29T15:29:43.175343Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:32: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-05-29T15:29:43.177447Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:157: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:29:43.177530Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:84: [RootDataErasureManager] Stop 2025-05-29T15:29:43.177922Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:29:43.177944Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:29:43.177992Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-05-29T15:29:43.178005Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-29T15:29:43.178013Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:202: TTxServerlessStorageBilling.Complete 2025-05-29T15:29:43.178043Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6671: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:214:2058] recipient: [1:212:2212] IGNORE Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:214:2058] recipient: [1:212:2212] Leader for TabletID 72057594037968897 is [1:218:2216] sender: [1:219:2058] recipient: [1:212:2212] 2025-05-29T15:29:43.180626Z node 1 :HIVE INFO: tablet_helpers.cpp:1130: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:126:2151] sender: [1:239:2058] recipient: [1:15:2062] 2025-05-29T15:29:43.201961Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:372: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-05-29T15:29:43.202063Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:29:43.202144Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-05-29T15:29:43.202210Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:130: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-05-29T15:29:43.202223Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:29:43.203143Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:451: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-05-29T15:29:43.203176Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-05-29T15:29:43.203230Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:29:43.203243Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:313: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-05-29T15:29:43.203248Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:367: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-05-29T15:29:43.203254Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 2 -> 3 2025-05-29T15:29:43.203677Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:29:43.203690Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-05-29T15:29:43.203696Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 3 -> 128 2025-05-29T15:29:43.204048Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:29:43.204060Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:29:43.204066Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:29:43.204085Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1650: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-05-29T15:29:43.204693Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1719: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-05-29T15:29:43.205058Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:652: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-05-29T15:29:43.205105Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1751: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 Leader for TabletID 72057594046316545 is [1:133:2155] sender: [1:254:2058] recipient: [1:15:2062] FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-05-29T15:29:43.205307Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:676: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-05-29T15:29:43.205332Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:680: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 133 RawX2: 4294969451 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-05-29T15:29:43.205340Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:29:43.205403Z node 1 :FLAT_TX_SCHEMESHARD INFO: sch ... 76710657 2025-05-29T15:35:17.199033Z node 135 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:619: TTxOperationReply execute, operationId: 281474976710657:0, at schemeshard: 72057594046678944, message: OperationCookie: 281474976710657 TabletId: 72075186233409546 2025-05-29T15:35:17.199054Z node 135 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_split_merge.cpp:386: TSplitMerge TNotifySrc, operationId: 281474976710657:0 HandleReply TEvSplitPartitioningChangedAck, from datashard: 72075186233409546, at schemeshard: 72057594046678944 2025-05-29T15:35:17.199150Z node 135 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:619: TTxOperationReply execute, operationId: 281474976710657:0, at schemeshard: 72057594046678944, message: OperationCookie: 281474976710657 TabletId: 72075186233409547 2025-05-29T15:35:17.199156Z node 135 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_split_merge.cpp:386: TSplitMerge TNotifySrc, operationId: 281474976710657:0 HandleReply TEvSplitPartitioningChangedAck, from datashard: 72075186233409547, at schemeshard: 72057594046678944 2025-05-29T15:35:17.199175Z node 135 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:906: Part operation is done id#281474976710657:0 progress is 1/1 2025-05-29T15:35:17.199181Z node 135 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 281474976710657 ready parts: 1/1 2025-05-29T15:35:17.199186Z node 135 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:906: Part operation is done id#281474976710657:0 progress is 1/1 2025-05-29T15:35:17.199190Z node 135 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 281474976710657 ready parts: 1/1 2025-05-29T15:35:17.199195Z node 135 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1606: TOperation IsReadyToNotify, TxId: 281474976710657, ready parts: 1/1, is published: true 2025-05-29T15:35:17.199202Z node 135 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 281474976710657 ready parts: 1/1 2025-05-29T15:35:17.199208Z node 135 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:973: Operation and all the parts is done, operation id: 281474976710657:0 2025-05-29T15:35:17.199219Z node 135 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5174: RemoveTx for txid 281474976710657:0 2025-05-29T15:35:17.199257Z node 135 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 5 2025-05-29T15:35:17.201168Z node 135 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:647: TTxOperationReply complete, operationId: 281474976710657:0, at schemeshard: 72057594046678944 2025-05-29T15:35:17.201251Z node 135 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:647: TTxOperationReply complete, operationId: 281474976710657:0, at schemeshard: 72057594046678944 2025-05-29T15:35:17.201259Z node 135 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:260: Unable to activate 281474976710657:0 2025-05-29T15:35:17.201415Z node 135 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5549: Handle TEvStateChanged, at schemeshard: 72057594046678944, message: Source { RawX1: 347 RawX2: 579820587288 } TabletId: 72075186233409547 State: 4 2025-05-29T15:35:17.201433Z node 135 :FLAT_TX_SCHEMESHARD INFO: schemeshard__state_changed_reply.cpp:78: TTxShardStateChanged DoExecute, datashard informs about state changing, datashardId: 72075186233409547, state: Offline, at schemeshard: 72057594046678944 2025-05-29T15:35:17.201505Z node 135 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5549: Handle TEvStateChanged, at schemeshard: 72057594046678944, message: Source { RawX1: 346 RawX2: 579820587287 } TabletId: 72075186233409546 State: 4 2025-05-29T15:35:17.201513Z node 135 :FLAT_TX_SCHEMESHARD INFO: schemeshard__state_changed_reply.cpp:78: TTxShardStateChanged DoExecute, datashard informs about state changing, datashardId: 72075186233409546, state: Offline, at schemeshard: 72057594046678944 2025-05-29T15:35:17.202618Z node 135 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:46: Free shard 72057594046678944:2 hive 72057594037968897 at ss 72057594046678944 2025-05-29T15:35:17.210604Z node 135 :HIVE INFO: tablet_helpers.cpp:1356: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 2 TxId_Deprecated: 2 TabletID: 72075186233409547 2025-05-29T15:35:17.210729Z node 135 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5938: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 2 ShardOwnerId: 72057594046678944 ShardLocalIdx: 2, at schemeshard: 72057594046678944 2025-05-29T15:35:17.210849Z node 135 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 4 2025-05-29T15:35:17.211627Z node 135 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:46: Free shard 72057594046678944:1 hive 72057594037968897 at ss 72057594046678944 Forgetting tablet 72075186233409547 2025-05-29T15:35:17.211771Z node 135 :HIVE INFO: tablet_helpers.cpp:1356: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 1 TxId_Deprecated: 1 TabletID: 72075186233409546 Forgetting tablet 72075186233409546 2025-05-29T15:35:17.212496Z node 135 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5938: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 1 ShardOwnerId: 72057594046678944 ShardLocalIdx: 1, at schemeshard: 72057594046678944 2025-05-29T15:35:17.212570Z node 135 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 3 2025-05-29T15:35:17.217632Z node 135 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:174: Deleted shardIdx 72057594046678944:2 2025-05-29T15:35:17.217663Z node 135 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:180: Close pipe to deleted shardIdx 72057594046678944:2 tabletId 72075186233409547 2025-05-29T15:35:17.217701Z node 135 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:174: Deleted shardIdx 72057594046678944:1 2025-05-29T15:35:17.217709Z node 135 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:180: Close pipe to deleted shardIdx 72057594046678944:1 tabletId 72075186233409546 Deleted tabletId 72075186233409546 2025-05-29T15:35:17.217861Z node 135 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Table" Options { ReturnPartitioningInfo: true ReturnPartitionConfig: true BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-05-29T15:35:17.217937Z node 135 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/Table" took 86us result status StatusSuccess 2025-05-29T15:35:17.218152Z node 135 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Table" PathDescription { Self { Name: "Table" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 1002 CreateStep: 5000003 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 2 TablePartitionVersion: 2 } ChildrenExist: false } Table { Name: "Table" Columns { Name: "key1" Type: "Utf8" TypeId: 4608 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "key2" Type: "Uint32" TypeId: 2 Id: 2 NotNull: false IsBuildInProgress: false } Columns { Name: "Value" Type: "Utf8" TypeId: 4608 Id: 3 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key1" KeyColumnNames: "key2" KeyColumnIds: 1 KeyColumnIds: 2 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { SizeToSplit: 100500 MinPartitionsCount: 1 } } TableSchemaVersion: 2 IsBackup: false IsRestore: false } TablePartitions { EndOfRangeKeyPrefix: "" IsPoint: false IsInclusive: false DatashardId: 72075186233409548 } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 4 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 2 PathsLimit: 10000 ShardsInside: 1 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> test_insert.py::TestInsertOperations::test_query_pairs [FAIL] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_cdc_stream_reboots/unittest >> TCdcStreamWithRebootsTests::RacySplitTableAndCreateStream[TabletReboots] [GOOD] Test command err: =========== RUN: Trace =========== Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2140] Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:116:2058] recipient: [1:110:2141] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:116:2058] recipient: [1:110:2141] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:117:2058] recipient: [1:111:2142] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:117:2058] recipient: [1:111:2142] Leader for TabletID 72057594046678944 is [1:126:2151] sender: [1:127:2058] recipient: [1:109:2140] Leader for TabletID 72057594046447617 is [1:130:2154] sender: [1:132:2058] recipient: [1:110:2141] Leader for TabletID 72057594046316545 is [1:133:2155] sender: [1:135:2058] recipient: [1:111:2142] 2025-05-29T15:32:10.484357Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7488: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-05-29T15:32:10.484374Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7516: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:32:10.484378Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7402: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2025-05-29T15:32:10.484381Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7418: OperationsProcessing config: using default configuration 2025-05-29T15:32:10.484389Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-05-29T15:32:10.484392Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-05-29T15:32:10.484397Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7548: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:32:10.484406Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:39: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-05-29T15:32:10.484467Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7619: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-05-29T15:32:10.484536Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-05-29T15:32:10.493554Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7651: Got new config: QueryServiceConfig { AllExternalDataSourcesAreAvailable: true } 2025-05-29T15:32:10.493568Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:32:10.493621Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7619: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# Leader for TabletID 72057594046447617 is [1:130:2154] sender: [1:175:2058] recipient: [1:15:2062] 2025-05-29T15:32:10.495355Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-05-29T15:32:10.495373Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-05-29T15:32:10.495389Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-05-29T15:32:10.497308Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-05-29T15:32:10.497356Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:445: Clear TempDirsState with owners number: 0 2025-05-29T15:32:10.497435Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1348: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-05-29T15:32:10.497577Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:32: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-05-29T15:32:10.498186Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:157: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:32:10.498220Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:84: [RootDataErasureManager] Stop 2025-05-29T15:32:10.498439Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:32:10.498451Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:32:10.498473Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-05-29T15:32:10.498481Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-29T15:32:10.498487Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:202: TTxServerlessStorageBilling.Complete 2025-05-29T15:32:10.498506Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6671: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:214:2058] recipient: [1:212:2212] IGNORE Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:214:2058] recipient: [1:212:2212] Leader for TabletID 72057594037968897 is [1:218:2216] sender: [1:219:2058] recipient: [1:212:2212] 2025-05-29T15:32:10.499645Z node 1 :HIVE INFO: tablet_helpers.cpp:1130: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:126:2151] sender: [1:239:2058] recipient: [1:15:2062] 2025-05-29T15:32:10.519779Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:372: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-05-29T15:32:10.519838Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:32:10.519880Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-05-29T15:32:10.519919Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:130: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-05-29T15:32:10.519930Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:32:10.520469Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:451: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-05-29T15:32:10.520491Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-05-29T15:32:10.520531Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:32:10.520540Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:313: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-05-29T15:32:10.520545Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:367: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-05-29T15:32:10.520551Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 2 -> 3 2025-05-29T15:32:10.520930Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:32:10.520942Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-05-29T15:32:10.520947Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 3 -> 128 2025-05-29T15:32:10.521274Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:32:10.521285Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:32:10.521290Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:32:10.521296Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1650: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-05-29T15:32:10.521926Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1719: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-05-29T15:32:10.522288Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:652: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-05-29T15:32:10.522319Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1751: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 Leader for TabletID 72057594046316545 is [1:133:2155] sender: [1:254:2058] recipient: [1:15:2062] FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-05-29T15:32:10.522496Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:676: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-05-29T15:32:10.522519Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:680: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 133 RawX2: 4294969451 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-05-29T15:32:10.522526Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:32:10.522574Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Ch ... ribers: 1 2025-05-29T15:35:16.490642Z node 230 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:989: Publication details: tx: 1004, [OwnerId: 72057594046678944, LocalPathId: 4], 4 2025-05-29T15:35:16.490646Z node 230 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:989: Publication details: tx: 1004, [OwnerId: 72057594046678944, LocalPathId: 5], 2 2025-05-29T15:35:16.490926Z node 230 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:5834: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 4 PathOwnerId: 72057594046678944, cookie: 1004 2025-05-29T15:35:16.490949Z node 230 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 4 PathOwnerId: 72057594046678944, cookie: 1004 2025-05-29T15:35:16.490955Z node 230 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 1004 2025-05-29T15:35:16.490961Z node 230 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 1004, pathId: [OwnerId: 72057594046678944, LocalPathId: 4], version: 4 2025-05-29T15:35:16.490967Z node 230 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 3 2025-05-29T15:35:16.491258Z node 230 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:5834: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 5 Version: 2 PathOwnerId: 72057594046678944, cookie: 1004 2025-05-29T15:35:16.491276Z node 230 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 5 Version: 2 PathOwnerId: 72057594046678944, cookie: 1004 2025-05-29T15:35:16.491281Z node 230 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1004 2025-05-29T15:35:16.491286Z node 230 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 1004, pathId: [OwnerId: 72057594046678944, LocalPathId: 5], version: 2 2025-05-29T15:35:16.491291Z node 230 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 4 2025-05-29T15:35:16.491306Z node 230 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 1004, subscribers: 1 2025-05-29T15:35:16.491312Z node 230 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:212: TTxAckPublishToSchemeBoard Notify send TEvNotifyTxCompletionResult, at schemeshard: 72057594046678944, to actorId: [230:412:2384] 2025-05-29T15:35:16.493875Z node 230 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1004 2025-05-29T15:35:16.499253Z node 230 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1004 2025-05-29T15:35:16.499422Z node 230 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:227: tests -- TTxNotificationSubscriber for txId 1004: got EvNotifyTxCompletionResult 2025-05-29T15:35:16.499435Z node 230 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:236: tests -- TTxNotificationSubscriber for txId 1004: satisfy waiter [230:418:2390] TestWaitNotification: OK eventTxId 1003 TestWaitNotification: OK eventTxId 1004 2025-05-29T15:35:16.515940Z node 230 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Table" Options { ReturnPartitioningInfo: true ReturnPartitionConfig: true BackupInfo: false ReturnBoundaries: true ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-05-29T15:35:16.516138Z node 230 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/Table" took 204us result status StatusSuccess 2025-05-29T15:35:16.516543Z node 230 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Table" PathDescription { Self { Name: "Table" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 1002 CreateStep: 5000003 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 2 TablePartitionVersion: 2 } ChildrenExist: true } Table { Name: "Table" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Uint64" TypeId: 4 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { MinPartitionsCount: 1 } } SplitBoundary { KeyPrefix { Tuple { Optional { Uint64: 2 } } } } TableSchemaVersion: 2 IsBackup: false CdcStreams { Name: "Stream" Mode: ECdcStreamModeKeysOnly PathId { OwnerId: 72057594046678944 LocalId: 4 } State: ECdcStreamStateReady SchemaVersion: 1 Format: ECdcStreamFormatProto VirtualTimestamps: false AwsRegion: "" ResolvedTimestampsIntervalMs: 0 } IsRestore: false } TablePartitions { EndOfRangeKeyPrefix: "\001\000\010\000\000\000\002\000\000\000\000\000\000\000" IsPoint: false IsInclusive: false DatashardId: 72075186233409547 } TablePartitions { EndOfRangeKeyPrefix: "" IsPoint: false IsInclusive: false DatashardId: 72075186233409548 } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 2 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 4 PathsLimit: 10000 ShardsInside: 4 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 1 PQPartitionsLimit: 1000000 } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-05-29T15:35:16.516687Z node 230 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Table/Stream/streamImpl" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: true }, at schemeshard: 72057594046678944 2025-05-29T15:35:16.516731Z node 230 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/Table/Stream/streamImpl" took 46us result status StatusSuccess 2025-05-29T15:35:16.516825Z node 230 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Table/Stream/streamImpl" PathDescription { Self { Name: "streamImpl" PathId: 5 SchemeshardId: 72057594046678944 PathType: EPathTypePersQueueGroup CreateFinished: true CreateTxId: 1004 CreateStep: 5000004 ParentPathId: 4 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeStreamImpl Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 PQVersion: 1 } ChildrenExist: false BalancerTabletID: 72075186233409550 } PersQueueGroup { Name: "streamImpl" PathId: 5 TotalGroupCount: 1 PartitionPerTablet: 2 PQTabletConfig { PartitionConfig { MaxCountInPartition: 2147483647 LifetimeSeconds: 86400 WriteSpeedInBytesPerSecond: 1048576 BurstSize: 1048576 } TopicName: "Stream" TopicPath: "/MyRoot/Table/Stream/streamImpl" YdbDatabasePath: "/MyRoot" PartitionKeySchema { Name: "key" TypeId: 4 } MeteringMode: METERING_MODE_REQUEST_UNITS } Partitions { PartitionId: 0 TabletId: 72075186233409549 Status: Active } AlterVersion: 1 BalancerTabletID: 72075186233409550 NextPartitionId: 1 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 4 PathsLimit: 10000 ShardsInside: 4 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 1 PQPartitionsLimit: 1000000 } } PathId: 5 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> alter_compression.py::TestAllCompression::test_all_supported_compression[zstd_4_compression-COMPRESSION = "zstd", COMPRESSION_LEVEL = 4] >> test_isolation.py::TestTransactionIsolation::test_anti_dependency_cycles_g2_two_edges [FAIL] >> test_crud.py::TestCreateAndUpsertWithRepetitions::test_create_and_upsert_data_with_repetitions[10-64] [FAIL] >> test_read_table.py::TestReadTableSuccessStories::test_read_table_constructed_key_range [FAIL] >> test_read_table.py::TestReadTableSuccessStories::test_read_table_reads_only_specified_columns |78.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/datashard/parametrized_queries/py3test >> test_parametrized_queries.py::TestParametrizedQueries::test_parametrized_queries[table_index_1__ASYNC-pk_types10-all_types10-index10---ASYNC] [FAIL] >> test_session_grace_shutdown.py::TestIdle::test_idle_shutdown_of_session >> alter_compression.py::TestAllCompression::test_all_supported_compression[zstd_8_compression-COMPRESSION = "zstd", COMPRESSION_LEVEL = 8] >> test_read_table.py::TestReadTableSuccessStories::test_read_table_reads_only_specified_columns [FAIL] >> test_read_table.py::TestReadTableSuccessStories::test_read_table_without_data_has_snapshot >> test_read_table.py::TestReadTableSuccessStories::test_read_table_without_data_has_snapshot [FAIL] >> alter_compression.py::TestAllCompression::test_all_supported_compression[zstd_18_compression-COMPRESSION = "zstd", COMPRESSION_LEVEL = 18] >> alter_compression.py::TestAllCompression::test_all_supported_compression[lz4_compression-COMPRESSION = "lz4"] [FAIL] >> alter_compression.py::TestAllCompression::test_all_supported_compression[zstd_10_compression-COMPRESSION = "zstd", COMPRESSION_LEVEL = 10] >> alter_compression.py::TestAllCompression::test_all_supported_compression[zstd_compression-COMPRESSION = "zstd"] [FAIL] >> alter_compression.py::TestAllCompression::test_all_supported_compression[zstd_10_compression-COMPRESSION = "zstd", COMPRESSION_LEVEL = 10] [FAIL] >> alter_compression.py::TestAlterCompression::test_availability_data >> test_serverless.py::test_create_table_with_alter_quotas[enable_alter_database_create_hive_first--true] [GOOD] >> alter_compression.py::TestAllCompression::test_all_supported_compression[zstd_11_compression-COMPRESSION = "zstd", COMPRESSION_LEVEL = 11] >> alter_compression.py::TestAllCompression::test_all_supported_compression[zstd_11_compression-COMPRESSION = "zstd", COMPRESSION_LEVEL = 11] [FAIL] >> alter_compression.py::TestAllCompression::test_all_supported_compression[zstd_2_compression-COMPRESSION = "zstd", COMPRESSION_LEVEL = 2] |78.6%| [TA] $(B)/ydb/core/tx/schemeshard/ut_split_merge/test-results/unittest/{meta.json ... results_accumulator.log} >> alter_compression.py::TestAllCompression::test_all_supported_compression[zstd_20_compression-COMPRESSION = "zstd", COMPRESSION_LEVEL = 20] |78.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/api/py3test >> test_insert.py::TestInsertOperations::test_query_pairs [FAIL] |78.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/olap/column_family/compression/py3test >> alter_compression.py::TestAllCompression::test_all_supported_compression[zstd_11_compression-COMPRESSION = "zstd", COMPRESSION_LEVEL = 11] [FAIL] >> alter_compression.py::TestAllCompression::test_all_supported_compression[zstd_12_compression-COMPRESSION = "zstd", COMPRESSION_LEVEL = 12] [FAIL] >> alter_compression.py::TestAllCompression::test_all_supported_compression[zstd_4_compression-COMPRESSION = "zstd", COMPRESSION_LEVEL = 4] [FAIL] >> alter_compression.py::TestAllCompression::test_all_supported_compression[zstd_13_compression-COMPRESSION = "zstd", COMPRESSION_LEVEL = 13] >> alter_compression.py::TestAllCompression::test_all_supported_compression[zstd_5_compression-COMPRESSION = "zstd", COMPRESSION_LEVEL = 5] >> alter_compression.py::TestAllCompression::test_all_supported_compression[zstd_13_compression-COMPRESSION = "zstd", COMPRESSION_LEVEL = 13] [FAIL] >> alter_compression.py::TestAllCompression::test_all_supported_compression[zstd_14_compression-COMPRESSION = "zstd", COMPRESSION_LEVEL = 14] >> alter_compression.py::TestAllCompression::test_all_supported_compression[zstd_14_compression-COMPRESSION = "zstd", COMPRESSION_LEVEL = 14] [FAIL] >> alter_compression.py::TestAllCompression::test_all_supported_compression[zstd_6_compression-COMPRESSION = "zstd", COMPRESSION_LEVEL = 6] [FAIL] >> alter_compression.py::TestAllCompression::test_all_supported_compression[zstd_7_compression-COMPRESSION = "zstd", COMPRESSION_LEVEL = 7] |78.6%| [TA] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_split_merge/test-results/unittest/{meta.json ... results_accumulator.log} |78.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/api/py3test >> test_indexes.py::TestSecondaryIndexes::test_create_table_with_global_index [GOOD] >> alter_compression.py::TestAllCompression::test_all_supported_compression[zstd_5_compression-COMPRESSION = "zstd", COMPRESSION_LEVEL = 5] [FAIL] >> alter_compression.py::TestAllCompression::test_all_supported_compression[zstd_7_compression-COMPRESSION = "zstd", COMPRESSION_LEVEL = 7] [FAIL] |78.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/datashard/parametrized_queries/py3test >> test_parametrized_queries.py::TestParametrizedQueries::test_parametrized_queries[table_index_0_UNIQUE_SYNC-pk_types4-all_types4-index4--UNIQUE-SYNC] [FAIL] |78.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/olap/column_family/compression/py3test >> alter_compression.py::TestAllCompression::test_all_supported_compression[zstd_14_compression-COMPRESSION = "zstd", COMPRESSION_LEVEL = 14] [FAIL] >> alter_compression.py::TestAllCompression::test_all_supported_compression[zstd_8_compression-COMPRESSION = "zstd", COMPRESSION_LEVEL = 8] [FAIL] >> alter_compression.py::TestAllCompression::test_all_supported_compression[zstd_9_compression-COMPRESSION = "zstd", COMPRESSION_LEVEL = 9] |78.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/olap/column_family/compression/py3test >> alter_compression.py::TestAllCompression::test_all_supported_compression[zstd_5_compression-COMPRESSION = "zstd", COMPRESSION_LEVEL = 5] [FAIL] |78.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/olap/column_family/compression/py3test >> alter_compression.py::TestAllCompression::test_all_supported_compression[zstd_7_compression-COMPRESSION = "zstd", COMPRESSION_LEVEL = 7] [FAIL] >> alter_compression.py::TestAllCompression::test_all_supported_compression[zstd_9_compression-COMPRESSION = "zstd", COMPRESSION_LEVEL = 9] [FAIL] |78.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/olap/column_family/compression/py3test >> alter_compression.py::TestAllCompression::test_all_supported_compression[zstd_9_compression-COMPRESSION = "zstd", COMPRESSION_LEVEL = 9] [FAIL] >> test_discovery.py::TestDiscoveryExtEndpoint::test_scenario [GOOD] |78.6%| [TA] $(B)/ydb/tests/datashard/parametrized_queries/test-results/py3test/{meta.json ... results_accumulator.log} >> alter_compression.py::TestAlterCompression::test_availability_data [FAIL] |78.6%| [TA] {RESULT} $(B)/ydb/tests/datashard/parametrized_queries/test-results/py3test/{meta.json ... results_accumulator.log} >> test_secondary_index.py::TestSecondaryIndex::test_secondary_index[table_index_1__ASYNC-pk_types10-all_types10-index10---ASYNC] >> test_secondary_index.py::TestSecondaryIndex::test_secondary_index_cover[table_index_4_UNIQUE_SYNC-pk_types0-all_types0-index0--UNIQUE-SYNC] >> alter_compression.py::TestAllCompression::test_all_supported_compression[zstd_18_compression-COMPRESSION = "zstd", COMPRESSION_LEVEL = 18] [FAIL] >> alter_compression.py::TestAllCompression::test_all_supported_compression[zstd_19_compression-COMPRESSION = "zstd", COMPRESSION_LEVEL = 19] >> test_secondary_index.py::TestSecondaryIndex::test_secondary_index[table_index_1__SYNC-pk_types8-all_types8-index8---SYNC] >> alter_compression.py::TestAllCompression::test_all_supported_compression[zstd_19_compression-COMPRESSION = "zstd", COMPRESSION_LEVEL = 19] [FAIL] >> alter_compression.py::TestAllCompression::test_all_supported_compression[zstd_2_compression-COMPRESSION = "zstd", COMPRESSION_LEVEL = 2] [FAIL] >> alter_compression.py::TestAllCompression::test_all_supported_compression[zstd_3_compression-COMPRESSION = "zstd", COMPRESSION_LEVEL = 3] >> test_secondary_index.py::TestSecondaryIndex::test_secondary_index[table_index_2__SYNC-pk_types7-all_types7-index7---SYNC] >> alter_compression.py::TestAllCompression::test_all_supported_compression[zstd_20_compression-COMPRESSION = "zstd", COMPRESSION_LEVEL = 20] [FAIL] >> alter_compression.py::TestAllCompression::test_all_supported_compression[zstd_21_compression-COMPRESSION = "zstd", COMPRESSION_LEVEL = 21] >> alter_compression.py::TestAllCompression::test_all_supported_compression[zstd_15_compression-COMPRESSION = "zstd", COMPRESSION_LEVEL = 15] >> alter_compression.py::TestAllCompression::test_all_supported_compression[zstd_3_compression-COMPRESSION = "zstd", COMPRESSION_LEVEL = 3] [FAIL] >> alter_compression.py::TestAllCompression::test_all_supported_compression[zstd_21_compression-COMPRESSION = "zstd", COMPRESSION_LEVEL = 21] [FAIL] >> test_secondary_index.py::TestSecondaryIndex::test_secondary_index[table_index_3__SYNC-pk_types6-all_types6-index6---SYNC] |78.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/olap/column_family/compression/py3test >> alter_compression.py::TestAllCompression::test_all_supported_compression[zstd_19_compression-COMPRESSION = "zstd", COMPRESSION_LEVEL = 19] [FAIL] |78.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/serverless/py3test >> test_serverless.py::test_create_table_with_alter_quotas[enable_alter_database_create_hive_first--true] [GOOD] |78.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/olap/column_family/compression/py3test >> alter_compression.py::TestAllCompression::test_all_supported_compression[zstd_3_compression-COMPRESSION = "zstd", COMPRESSION_LEVEL = 3] [FAIL] >> test_secondary_index.py::TestSecondaryIndex::test_secondary_index_cover[table_index_1_UNIQUE_SYNC-pk_types3-all_types3-index3--UNIQUE-SYNC] |78.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/olap/column_family/compression/py3test >> alter_compression.py::TestAllCompression::test_all_supported_compression[zstd_21_compression-COMPRESSION = "zstd", COMPRESSION_LEVEL = 21] [FAIL] >> test_secondary_index.py::TestSecondaryIndex::test_secondary_index_cover[table_index_3__SYNC-pk_types6-all_types6-index6---SYNC] >> test_secondary_index.py::TestSecondaryIndex::test_secondary_index[table_index_1__ASYNC-pk_types10-all_types10-index10---ASYNC] [FAIL] >> test_secondary_index.py::TestSecondaryIndex::test_secondary_index[table_index_4__SYNC-pk_types5-all_types5-index5---SYNC] >> test_secondary_index.py::TestSecondaryIndex::test_secondary_index_cover[table_index_3_UNIQUE_SYNC-pk_types1-all_types1-index1--UNIQUE-SYNC] |78.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/api/py3test >> test_isolation.py::TestTransactionIsolation::test_anti_dependency_cycles_g2_two_edges [FAIL] >> test_read_table.py::TestReadTableTruncatedResults::test_truncated_results[async_read_table] |78.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/datashard/secondary_index/py3test >> alter_compression.py::TestAllCompression::test_all_supported_compression[zstd_15_compression-COMPRESSION = "zstd", COMPRESSION_LEVEL = 15] [FAIL] >> alter_compression.py::TestAllCompression::test_all_supported_compression[zstd_16_compression-COMPRESSION = "zstd", COMPRESSION_LEVEL = 16] |78.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/datashard/secondary_index/py3test >> test_secondary_index.py::TestSecondaryIndex::test_secondary_index[table_index_1__SYNC-pk_types8-all_types8-index8---SYNC] [FAIL] >> alter_compression.py::TestAllCompression::test_all_supported_compression[zstd_16_compression-COMPRESSION = "zstd", COMPRESSION_LEVEL = 16] [FAIL] >> alter_compression.py::TestAllCompression::test_all_supported_compression[zstd_17_compression-COMPRESSION = "zstd", COMPRESSION_LEVEL = 17] >> alter_compression.py::TestAllCompression::test_all_supported_compression[zstd_17_compression-COMPRESSION = "zstd", COMPRESSION_LEVEL = 17] [FAIL] >> test_secondary_index.py::TestSecondaryIndex::test_secondary_index_cover[table_index_4_UNIQUE_SYNC-pk_types0-all_types0-index0--UNIQUE-SYNC] [FAIL] >> test_secondary_index.py::TestSecondaryIndex::test_secondary_index[table_index_2__SYNC-pk_types7-all_types7-index7---SYNC] [FAIL] >> test_public_api.py::TestCRUDOperations::test_prepared_query_pipeline >> test_secondary_index.py::TestSecondaryIndex::test_secondary_index_cover[table_index_1_UNIQUE_SYNC-pk_types3-all_types3-index3--UNIQUE-SYNC] [FAIL] |78.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/olap/column_family/compression/py3test >> alter_compression.py::TestAllCompression::test_all_supported_compression[zstd_17_compression-COMPRESSION = "zstd", COMPRESSION_LEVEL = 17] [FAIL] |78.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/olap/column_family/compression/py3test >> alter_compression.py::TestAlterCompression::test_availability_data [FAIL] >> test_secondary_index.py::TestSecondaryIndex::test_secondary_index_cover[table_index_3__SYNC-pk_types6-all_types6-index6---SYNC] [FAIL] >> test_secondary_index.py::TestSecondaryIndex::test_secondary_index_cover[table_index_0__SYNC-pk_types9-all_types9-index9---SYNC] >> test_read_table.py::TestReadTableTruncatedResults::test_truncated_results[async_read_table] [FAIL] >> test_read_table.py::TestReadTableTruncatedResults::test_truncated_results[read_table] >> test_secondary_index.py::TestSecondaryIndex::test_secondary_index[table_index_3_UNIQUE_SYNC-pk_types1-all_types1-index1--UNIQUE-SYNC] >> test_secondary_index.py::TestSecondaryIndex::test_secondary_index[table_index_3__SYNC-pk_types6-all_types6-index6---SYNC] [FAIL] >> test_secondary_index.py::TestSecondaryIndex::test_secondary_index[table_index_0__ASYNC-pk_types11-all_types11-index11---ASYNC] |78.7%| [TA] $(B)/ydb/tests/olap/column_family/compression/test-results/py3test/{meta.json ... results_accumulator.log} >> test_read_table.py::TestReadTableTruncatedResults::test_truncated_results[read_table] [FAIL] |78.7%| [TA] {RESULT} $(B)/ydb/tests/olap/column_family/compression/test-results/py3test/{meta.json ... results_accumulator.log} >> test_crud.py::TestCRUDOperations::test_create_table_and_drop_table_success >> test_public_api.py::TestCRUDOperations::test_prepared_query_pipeline [GOOD] >> test_public_api.py::TestCRUDOperations::test_scheme_client_ops |78.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/datashard/secondary_index/py3test >> test_secondary_index.py::TestSecondaryIndex::test_secondary_index_cover[table_index_4_UNIQUE_SYNC-pk_types0-all_types0-index0--UNIQUE-SYNC] [FAIL] >> test_secondary_index.py::TestSecondaryIndex::test_secondary_index[table_index_4__SYNC-pk_types5-all_types5-index5---SYNC] [FAIL] >> test_secondary_index.py::TestSecondaryIndex::test_secondary_index_cover[table_index_4__SYNC-pk_types5-all_types5-index5---SYNC] >> test_secondary_index.py::TestSecondaryIndex::test_secondary_index_cover[table_index_0__ASYNC-pk_types11-all_types11-index11---ASYNC] |78.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/datashard/secondary_index/py3test >> test_secondary_index.py::TestSecondaryIndex::test_secondary_index[table_index_2__SYNC-pk_types7-all_types7-index7---SYNC] [FAIL] >> test_secondary_index.py::TestSecondaryIndex::test_secondary_index_cover[table_index_3_UNIQUE_SYNC-pk_types1-all_types1-index1--UNIQUE-SYNC] [FAIL] >> test_public_api.py::TestCRUDOperations::test_scheme_client_ops [FAIL] >> test_public_api.py::TestCRUDOperations::test_scheme_operation_errors_handle >> test_public_api.py::TestCRUDOperations::test_scheme_operation_errors_handle [GOOD] >> test_public_api.py::TestCRUDOperations::test_none_values [GOOD] >> test_public_api.py::TestCRUDOperations::test_parse_list_type >> test_public_api.py::TestCRUDOperations::test_parse_list_type [GOOD] >> test_public_api.py::TestCRUDOperations::test_parse_tuple [GOOD] >> test_public_api.py::TestCRUDOperations::test_dict_type [GOOD] >> test_public_api.py::TestCRUDOperations::test_struct_type [GOOD] >> test_public_api.py::TestCRUDOperations::test_data_types >> test_public_api.py::TestCRUDOperations::test_data_types [GOOD] >> test_public_api.py::TestCRUDOperations::test_struct_type_parameter [GOOD] |78.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/datashard/secondary_index/py3test >> test_secondary_index.py::TestSecondaryIndex::test_secondary_index_cover[table_index_3__SYNC-pk_types6-all_types6-index6---SYNC] [FAIL] >> test_public_api.py::TestCRUDOperations::test_bulk_prepared_insert_many_values |78.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/api/py3test >> test_execute_scheme.py::TestExecuteSchemeOperations::test_create_table_if_it_is_created_fail_remove_from_key [GOOD] >> test_public_api.py::TestCRUDOperations::test_bulk_prepared_insert_many_values [FAIL] >> test_public_api.py::TestCRUDOperations::test_bulk_upsert >> test_crud.py::TestCRUDOperations::test_create_table_and_drop_table_success [GOOD] |78.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/datashard/secondary_index/py3test >> test_secondary_index.py::TestSecondaryIndex::test_secondary_index[table_index_1__ASYNC-pk_types10-all_types10-index10---ASYNC] [FAIL] >> test_crud.py::TestCRUDOperations::test_create_table_wrong_primary_key_failed1 [GOOD] >> test_crud.py::TestCRUDOperations::test_create_table_wrong_primary_key_failed2 [GOOD] >> test_public_api.py::TestCRUDOperations::test_bulk_upsert [GOOD] >> test_public_api.py::TestCRUDOperations::test_all_enums_are_presented_as_exceptions [GOOD] >> test_public_api.py::TestCRUDOperations::test_type_builders_str_methods [GOOD] >> test_public_api.py::TestCRUDOperations::test_create_and_delete_session_then_use_it_again [GOOD] >> test_public_api.py::TestCRUDOperations::test_locks_invalidated_error >> TAsyncIndexTests::MergeBothWithReboots[TabletReboots] [GOOD] >> test_session_pool.py::TestSessionPool::test_no_cluster_endpoints_no_failure [GOOD] >> test_session_pool.py::TestSessionPool::test_session_pool_close_basic_logic_case_2 >> test_public_api.py::TestCRUDOperations::test_locks_invalidated_error [FAIL] >> test_public_api.py::TestCRUDOperations::test_tcl >> test_public_api.py::TestCRUDOperations::test_tcl [GOOD] >> test_public_api.py::TestCRUDOperations::test_tcl_2 >> test_secondary_index.py::TestSecondaryIndex::test_secondary_index[table_index_0__ASYNC-pk_types11-all_types11-index11---ASYNC] [FAIL] >> test_public_api.py::TestCRUDOperations::test_tcl_2 [GOOD] >> test_public_api.py::TestCRUDOperations::test_tcl_3 [GOOD] >> test_public_api.py::TestCRUDOperations::test_reuse_session_to_tx_leak >> test_secondary_index.py::TestSecondaryIndex::test_secondary_index_cover[table_index_0_UNIQUE_SYNC-pk_types4-all_types4-index4--UNIQUE-SYNC] >> test_session_pool.py::TestSessionPool::test_session_pool_close_basic_logic_case_2 [GOOD] >> test_session_pool.py::TestSessionPool::test_session_pool_min_size_feature |78.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/datashard/secondary_index/py3test >> test_secondary_index.py::TestSecondaryIndex::test_secondary_index[table_index_1__SYNC-pk_types8-all_types8-index8---SYNC] [FAIL] >> test_session_pool.py::TestSessionPool::test_session_pool_min_size_feature [GOOD] >> test_secondary_index.py::TestSecondaryIndex::test_secondary_index_cover[table_index_0__SYNC-pk_types9-all_types9-index9---SYNC] [FAIL] >> test_secondary_index.py::TestSecondaryIndex::test_secondary_index[table_index_3_UNIQUE_SYNC-pk_types1-all_types1-index1--UNIQUE-SYNC] [FAIL] |78.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/datashard/secondary_index/py3test >> test_secondary_index.py::TestSecondaryIndex::test_secondary_index_cover[table_index_3_UNIQUE_SYNC-pk_types1-all_types1-index1--UNIQUE-SYNC] [FAIL] >> test_secondary_index.py::TestSecondaryIndex::test_secondary_index[table_index_2_UNIQUE_SYNC-pk_types2-all_types2-index2--UNIQUE-SYNC] >> test_secondary_index.py::TestSecondaryIndex::test_secondary_index_cover[table_index_0__ASYNC-pk_types11-all_types11-index11---ASYNC] [FAIL] |78.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/datashard/secondary_index/py3test |78.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/datashard/secondary_index/py3test >> test_secondary_index.py::TestSecondaryIndex::test_secondary_index_cover[table_index_1_UNIQUE_SYNC-pk_types3-all_types3-index3--UNIQUE-SYNC] [FAIL] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_index/unittest >> TAsyncIndexTests::MergeBothWithReboots[TabletReboots] [GOOD] Test command err: =========== RUN: Trace =========== Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2140] Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:116:2058] recipient: [1:110:2141] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:116:2058] recipient: [1:110:2141] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:117:2058] recipient: [1:111:2142] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:117:2058] recipient: [1:111:2142] Leader for TabletID 72057594046678944 is [1:126:2151] sender: [1:127:2058] recipient: [1:109:2140] Leader for TabletID 72057594046447617 is [1:130:2154] sender: [1:132:2058] recipient: [1:110:2141] Leader for TabletID 72057594046316545 is [1:133:2155] sender: [1:135:2058] recipient: [1:111:2142] 2025-05-29T15:30:38.715610Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7488: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-05-29T15:30:38.715639Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7516: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:30:38.715646Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7402: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2025-05-29T15:30:38.715652Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7418: OperationsProcessing config: using default configuration 2025-05-29T15:30:38.715668Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-05-29T15:30:38.715673Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-05-29T15:30:38.715683Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7548: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:30:38.715698Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:39: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-05-29T15:30:38.715803Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7619: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-05-29T15:30:38.715890Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-05-29T15:30:38.733092Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7651: Got new config: QueryServiceConfig { AllExternalDataSourcesAreAvailable: true } 2025-05-29T15:30:38.733113Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:30:38.733223Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7619: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# Leader for TabletID 72057594046447617 is [1:130:2154] sender: [1:175:2058] recipient: [1:15:2062] 2025-05-29T15:30:38.736598Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-05-29T15:30:38.736639Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-05-29T15:30:38.736677Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-05-29T15:30:38.739792Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-05-29T15:30:38.739906Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:445: Clear TempDirsState with owners number: 0 2025-05-29T15:30:38.740047Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1348: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-05-29T15:30:38.740226Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:32: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-05-29T15:30:38.740903Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:157: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:30:38.740951Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:84: [RootDataErasureManager] Stop 2025-05-29T15:30:38.741231Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:30:38.741242Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:30:38.741282Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-05-29T15:30:38.741290Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-29T15:30:38.741296Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:202: TTxServerlessStorageBilling.Complete 2025-05-29T15:30:38.741320Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6671: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:214:2058] recipient: [1:212:2212] IGNORE Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:214:2058] recipient: [1:212:2212] Leader for TabletID 72057594037968897 is [1:218:2216] sender: [1:219:2058] recipient: [1:212:2212] 2025-05-29T15:30:38.742968Z node 1 :HIVE INFO: tablet_helpers.cpp:1130: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:126:2151] sender: [1:239:2058] recipient: [1:15:2062] 2025-05-29T15:30:38.766860Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:372: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-05-29T15:30:38.766958Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:30:38.767053Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-05-29T15:30:38.767107Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:130: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-05-29T15:30:38.767118Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:30:38.769653Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:451: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-05-29T15:30:38.769700Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-05-29T15:30:38.769769Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:30:38.769783Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:313: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-05-29T15:30:38.769790Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:367: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-05-29T15:30:38.769797Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 2 -> 3 2025-05-29T15:30:38.770540Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:30:38.770557Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-05-29T15:30:38.770563Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 3 -> 128 2025-05-29T15:30:38.770986Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:30:38.771000Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:30:38.771007Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:30:38.771016Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1650: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-05-29T15:30:38.771949Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1719: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-05-29T15:30:38.772457Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:652: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-05-29T15:30:38.772508Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1751: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 Leader for TabletID 72057594046316545 is [1:133:2155] sender: [1:254:2058] recipient: [1:15:2062] FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-05-29T15:30:38.772747Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:676: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-05-29T15:30:38.772779Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:680: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 133 RawX2: 4294969451 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-05-29T15:30:38.772787Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:30:38.772876Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Ch ... ackground_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { MinPartitionsCount: 1 } } TableIndexes { Name: "UserDefinedIndex" LocalPathId: 4 Type: EIndexTypeGlobalAsync State: EIndexStateReady KeyColumnNames: "indexed" SchemaVersion: 1 PathOwnerId: 72057594046678944 DataSize: 0 IndexImplTableDescriptions { PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { MinPartitionsCount: 1 } } } } TableSchemaVersion: 1 IsBackup: false IsRestore: false } TablePartitions { EndOfRangeKeyPrefix: "" IsPoint: false IsInclusive: false DatashardId: 72075186233409550 } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 4 PathsLimit: 10000 ShardsInside: 3 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-05-29T15:35:39.862786Z node 202 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Table/UserDefinedIndex/indexImplTable" Options { ReturnPartitioningInfo: true ReturnPartitionConfig: true BackupInfo: false ReturnBoundaries: true ShowPrivateTable: true }, at schemeshard: 72057594046678944 2025-05-29T15:35:39.862857Z node 202 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/Table/UserDefinedIndex/indexImplTable" took 109us result status StatusSuccess 2025-05-29T15:35:39.863027Z node 202 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Table/UserDefinedIndex/indexImplTable" PathDescription { Self { Name: "indexImplTable" PathId: 5 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 1002 CreateStep: 5000003 ParentPathId: 4 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 4 PathSubType: EPathSubTypeAsyncIndexImplTable Version { GeneralVersion: 4 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 2 } ChildrenExist: false } Table { Name: "indexImplTable" Columns { Name: "indexed" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "indexed" KeyColumnNames: "key" KeyColumnIds: 1 KeyColumnIds: 2 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { MinPartitionsCount: 1 } } TableSchemaVersion: 1 IsBackup: false IsRestore: false } TablePartitions { EndOfRangeKeyPrefix: "" IsPoint: false IsInclusive: false DatashardId: 72075186233409551 } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 4 PathsLimit: 10000 ShardsInside: 3 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 5 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-05-29T15:35:39.873446Z node 202 :CHANGE_EXCHANGE DEBUG: change_sender_table_base.cpp:78: [TableChangeSenderShard][72075186233409550:2][72075186233409551][202:1104:2882] Handshake NKikimrChangeExchange.TEvStatus Status: STATUS_OK LastRecordOrder: 0 2025-05-29T15:35:39.873495Z node 202 :CHANGE_EXCHANGE DEBUG: change_sender_async_index.cpp:239: [AsyncIndexChangeSenderMain][72075186233409550:2][202:1064:2882] Handle NKikimr::NChangeExchange::TEvChangeExchangePrivate::TEvReady { PartitionId: 72075186233409551 } 2025-05-29T15:35:39.873532Z node 202 :CHANGE_EXCHANGE DEBUG: change_sender_table_base.cpp:123: [TableChangeSenderShard][72075186233409550:2][72075186233409551][202:1104:2882] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRecords { Records [{ Order: 1 Group: 1748532939859877 Step: 5000003 TxId: 18446744073709551615 PathId: [OwnerId: 72057594046678944, LocalPathId: 4] Kind: AsyncIndex Source: Unspecified Body: 28b TableId: [OwnerId: 72057594046678944, LocalPathId: 3] SchemaVersion: 1 LockId: 0 LockOffset: 0 },{ Order: 2 Group: 1748532939859877 Step: 5000003 TxId: 18446744073709551615 PathId: [OwnerId: 72057594046678944, LocalPathId: 4] Kind: AsyncIndex Source: Unspecified Body: 28b TableId: [OwnerId: 72057594046678944, LocalPathId: 3] SchemaVersion: 1 LockId: 0 LockOffset: 0 },{ Order: 3 Group: 1748532939859877 Step: 5000003 TxId: 18446744073709551615 PathId: [OwnerId: 72057594046678944, LocalPathId: 4] Kind: AsyncIndex Source: Unspecified Body: 28b TableId: [OwnerId: 72057594046678944, LocalPathId: 3] SchemaVersion: 1 LockId: 0 LockOffset: 0 }] } 2025-05-29T15:35:39.876153Z node 202 :CHANGE_EXCHANGE DEBUG: change_sender_table_base.cpp:200: [TableChangeSenderShard][72075186233409550:2][72075186233409551][202:1104:2882] Handle NKikimrChangeExchange.TEvStatus Status: STATUS_OK RecordStatuses { Order: 1 Status: STATUS_OK Reason: REASON_NONE } RecordStatuses { Order: 2 Status: STATUS_OK Reason: REASON_NONE } RecordStatuses { Order: 3 Status: STATUS_OK Reason: REASON_NONE } LastRecordOrder: 3 2025-05-29T15:35:39.876193Z node 202 :CHANGE_EXCHANGE DEBUG: change_sender_async_index.cpp:239: [AsyncIndexChangeSenderMain][72075186233409550:2][202:1064:2882] Handle NKikimr::NChangeExchange::TEvChangeExchangePrivate::TEvReady { PartitionId: 72075186233409551 } >> test_secondary_index.py::TestSecondaryIndex::test_secondary_index_cover[table_index_2__SYNC-pk_types7-all_types7-index7---SYNC] >> test_public_api.py::TestCRUDOperations::test_reuse_session_to_tx_leak [GOOD] >> test_public_api.py::TestCRUDOperations::test_direct_leak_tx_but_no_actual_leak_by_best_efforts >> test_public_api.py::TestCRUDOperations::test_direct_leak_tx_but_no_actual_leak_by_best_efforts [GOOD] >> test_public_api.py::TestCRUDOperations::test_presented_in_cache [GOOD] >> test_public_api.py::TestCRUDOperations::test_decimal_values_negative_stories >> test_public_api.py::TestCRUDOperations::test_decimal_values_negative_stories [GOOD] >> test_public_api.py::TestCRUDOperations::test_decimal_values >> test_secondary_index.py::TestSecondaryIndex::test_secondary_index_cover[table_index_4__SYNC-pk_types5-all_types5-index5---SYNC] [FAIL] |78.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/datashard/secondary_index/py3test >> test_public_api.py::TestCRUDOperations::test_decimal_values [GOOD] >> test_public_api.py::TestCRUDOperations::test_list_directory_with_children |78.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/datashard/secondary_index/py3test >> test_secondary_index.py::TestSecondaryIndex::test_secondary_index[table_index_3__SYNC-pk_types6-all_types6-index6---SYNC] [FAIL] >> test_public_api.py::TestCRUDOperations::test_list_directory_with_children [GOOD] >> test_public_api.py::TestCRUDOperations::test_validate_describe_path_result [GOOD] >> test_public_api.py::TestCRUDOperations::test_acl_modifications_1 [GOOD] >> test_public_api.py::TestCRUDOperations::test_acl_modification_2 [GOOD] >> test_public_api.py::TestCRUDOperations::test_can_execute_valid_statement_after_invalid_success >> test_secondary_index.py::TestSecondaryIndex::test_secondary_index[table_index_0_UNIQUE_SYNC-pk_types4-all_types4-index4--UNIQUE-SYNC] >> test_public_api.py::TestCRUDOperations::test_can_execute_valid_statement_after_invalid_success [GOOD] >> test_public_api.py::TestCRUDOperations::test_modify_permissions_3 [GOOD] >> test_public_api.py::TestCRUDOperations::test_directory_that_doesnt_exists [GOOD] >> test_public_api.py::TestCRUDOperations::test_crud_acl_actions [GOOD] >> test_public_api.py::TestCRUDOperations::test_too_many_pending_transactions >> test_secondary_index.py::TestSecondaryIndex::test_secondary_index[table_index_4_UNIQUE_SYNC-pk_types0-all_types0-index0--UNIQUE-SYNC] >> test_public_api.py::TestCRUDOperations::test_too_many_pending_transactions [GOOD] >> test_public_api.py::TestCRUDOperations::test_query_set1 >> test_secondary_index.py::TestSecondaryIndex::test_secondary_index[table_index_0__SYNC-pk_types9-all_types9-index9---SYNC] >> test_public_api.py::TestCRUDOperations::test_query_set1 [GOOD] >> test_public_api.py::TestCRUDOperations::test_queries_set2 |78.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/datashard/secondary_index/py3test >> test_secondary_index.py::TestSecondaryIndex::test_secondary_index[table_index_4__SYNC-pk_types5-all_types5-index5---SYNC] [FAIL] >> test_secondary_index.py::TestSecondaryIndex::test_secondary_index_cover[table_index_2_UNIQUE_SYNC-pk_types2-all_types2-index2--UNIQUE-SYNC] >> test_secondary_index.py::TestSecondaryIndex::test_secondary_index[table_index_1_UNIQUE_SYNC-pk_types3-all_types3-index3--UNIQUE-SYNC] >> test_public_api.py::TestCRUDOperations::test_queries_set2 [GOOD] >> test_public_api.py::TestCRUDOperations::test_when_result_set_is_large_then_issue_occure |78.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/datashard/secondary_index/py3test >> test_secondary_index.py::TestSecondaryIndex::test_secondary_index[table_index_3_UNIQUE_SYNC-pk_types1-all_types1-index1--UNIQUE-SYNC] [FAIL] >> test_secondary_index.py::TestSecondaryIndex::test_secondary_index_cover[table_index_1__SYNC-pk_types8-all_types8-index8---SYNC] >> test_secondary_index.py::TestSecondaryIndex::test_secondary_index_cover[table_index_0_UNIQUE_SYNC-pk_types4-all_types4-index4--UNIQUE-SYNC] [FAIL] >> test_secondary_index.py::TestSecondaryIndex::test_secondary_index[table_index_2_UNIQUE_SYNC-pk_types2-all_types2-index2--UNIQUE-SYNC] [FAIL] |78.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/datashard/secondary_index/py3test >> test_secondary_index.py::TestSecondaryIndex::test_secondary_index_cover[table_index_4__SYNC-pk_types5-all_types5-index5---SYNC] [FAIL] >> test_secondary_index.py::TestSecondaryIndex::test_secondary_index_cover[table_index_1__ASYNC-pk_types10-all_types10-index10---ASYNC] >> test_read_table.py::TestReadTableWithTabletKills::test_read_table_async_simple[async_read_table] >> test_secondary_index.py::TestSecondaryIndex::test_secondary_index_cover[table_index_2__SYNC-pk_types7-all_types7-index7---SYNC] [FAIL] |78.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/datashard/secondary_index/py3test >> test_secondary_index.py::TestSecondaryIndex::test_secondary_index[table_index_0__ASYNC-pk_types11-all_types11-index11---ASYNC] [FAIL] |78.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/datashard/secondary_index/py3test >> test_secondary_index.py::TestSecondaryIndex::test_secondary_index_cover[table_index_0__SYNC-pk_types9-all_types9-index9---SYNC] [FAIL] |78.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/datashard/secondary_index/py3test >> test_secondary_index.py::TestSecondaryIndex::test_secondary_index_cover[table_index_0__ASYNC-pk_types11-all_types11-index11---ASYNC] [FAIL] >> test_public_api.py::TestCRUDOperations::test_when_result_set_is_large_then_issue_occure [GOOD] >> test_secondary_index.py::TestSecondaryIndex::test_secondary_index[table_index_0_UNIQUE_SYNC-pk_types4-all_types4-index4--UNIQUE-SYNC] [FAIL] >> test_secondary_index.py::TestSecondaryIndex::test_secondary_index[table_index_4_UNIQUE_SYNC-pk_types0-all_types0-index0--UNIQUE-SYNC] [FAIL] >> test_secondary_index.py::TestSecondaryIndex::test_secondary_index[table_index_0__SYNC-pk_types9-all_types9-index9---SYNC] [FAIL] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/tools/nemesis/ut/py3test Test command err: contrib/python/Flask/py3/flask/scaffold.py:109: DeprecationWarning: 'pkgutil.find_loader' is deprecated and slated for removal in Python 3.14; use importlib.util.find_spec() instead contrib/python/Werkzeug/py3/werkzeug/routing/rules.py:751: DeprecationWarning: ast.Str is deprecated and will be removed in Python 3.14; use ast.Constant instead contrib/python/Werkzeug/py3/werkzeug/routing/rules.py:748: DeprecationWarning: ast.Str is deprecated and will be removed in Python 3.14; use ast.Constant instead contrib/tools/python3/Lib/ast.py:587: DeprecationWarning: Attribute s is deprecated and will be removed in Python 3.14; use value instead contrib/python/Werkzeug/py3/werkzeug/routing/rules.py:755: DeprecationWarning: ast.Str is deprecated and will be removed in Python 3.14; use ast.Constant instead contrib/python/Werkzeug/py3/werkzeug/routing/rules.py:756: DeprecationWarning: Attribute s is deprecated and will be removed in Python 3.14; use value instead contrib/python/Werkzeug/py3/werkzeug/routing/rules.py:756: DeprecationWarning: ast.Str is deprecated and will be removed in Python 3.14; use ast.Constant instead |78.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/datashard/secondary_index/py3test >> test_secondary_index.py::TestSecondaryIndex::test_secondary_index_cover[table_index_0_UNIQUE_SYNC-pk_types4-all_types4-index4--UNIQUE-SYNC] [FAIL] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/tools/nemesis/ut/py3test Test command err: contrib/python/Flask/py3/flask/scaffold.py:109: DeprecationWarning: 'pkgutil.find_loader' is deprecated and slated for removal in Python 3.14; use importlib.util.find_spec() instead contrib/python/Werkzeug/py3/werkzeug/routing/rules.py:751: DeprecationWarning: ast.Str is deprecated and will be removed in Python 3.14; use ast.Constant instead contrib/python/Werkzeug/py3/werkzeug/routing/rules.py:748: DeprecationWarning: ast.Str is deprecated and will be removed in Python 3.14; use ast.Constant instead contrib/tools/python3/Lib/ast.py:587: DeprecationWarning: Attribute s is deprecated and will be removed in Python 3.14; use value instead contrib/python/Werkzeug/py3/werkzeug/routing/rules.py:755: DeprecationWarning: ast.Str is deprecated and will be removed in Python 3.14; use ast.Constant instead contrib/python/Werkzeug/py3/werkzeug/routing/rules.py:756: DeprecationWarning: Attribute s is deprecated and will be removed in Python 3.14; use value instead contrib/python/Werkzeug/py3/werkzeug/routing/rules.py:756: DeprecationWarning: ast.Str is deprecated and will be removed in Python 3.14; use ast.Constant instead >> test_read_table.py::TestReadTableWithTabletKills::test_read_table_async_simple[async_read_table] [FAIL] >> test_read_table.py::TestReadTableWithTabletKills::test_read_table_async_simple[read_table] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/datashard/ut_volatile/unittest >> DataShardVolatile::DistributedWriteLostPlanThenSplit 2025-05-29 15:35:36,359 WARNING devtools.ya.test.programs.test_tool.run_test.run_test: Wrapper execution timed out 2025-05-29 15:35:36,526 WARNING devtools.ya.test.programs.test_tool.run_test.run_test: Wrapper has overrun 600 secs timeout. Process tree before termination: pid rss ref pdirt 3797710 46.1M 40.9M 23.3M test_tool run_ut @/home/runner/.ya/build/build_root/ciyv/0017fe/ydb/core/tx/datashard/ut_volatile/test-results/unittest/testing_out_stuff/chunk0/testing_out_stuff/test_tool.a 3798386 183M 171M 86.7M └─ ydb-core-tx-datashard-ut_volatile --trace-path-append /home/runner/.ya/build/build_root/ciyv/0017fe/ydb/core/tx/datashard/ut_volatile/test-results/unittest/testing_out_ Test command err: 2025-05-29T15:25:38.658195Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:102:2148], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-05-29T15:25:38.658237Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-05-29T15:25:38.658255Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/0017fe/r3tmp/tmpOi1gh2/pdisk_1.dat 2025-05-29T15:25:39.031365Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-05-29T15:25:39.048921Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:25:39.069431Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [1:32:2079] 1748532337442656 != 1748532337442660 2025-05-29T15:25:39.119010Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:213: actor# [1:59:2106] HANDLE TEvClientConnected success connect from tablet# 72057594046447617 2025-05-29T15:25:39.119345Z node 1 :TX_PROXY DEBUG: client.cpp:89: Handle TEvAllocateResult ACCEPTED RangeBegin# 281474976715656 RangeEnd# 281474976720656 txAllocator# 72057594046447617 2025-05-29T15:25:39.119427Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:25:39.119451Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:25:39.130780Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-29T15:25:39.324099Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:315: actor# [1:59:2106] Handle TEvProposeTransaction 2025-05-29T15:25:39.324127Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:238: actor# [1:59:2106] TxId# 281474976715657 ProcessProposeTransaction 2025-05-29T15:25:39.324163Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:257: actor# [1:59:2106] Cookie# 0 userReqId# "" txid# 281474976715657 SEND to# [1:640:2548] 2025-05-29T15:25:39.532412Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1562: Actor# [1:640:2548] txid# 281474976715657 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/Root" OperationType: ESchemeOpCreateTable CreateTable { Name: "table-1" Columns { Name: "key" Type: "Uint32" FamilyName: "" NotNull: false } Columns { Name: "value" Type: "Uint32" FamilyName: "" NotNull: false } KeyColumnNames: "key" UniformPartitionsCount: 1 } } } ExecTimeoutPeriod: 18446744073709551615 2025-05-29T15:25:39.532469Z node 1 :TX_PROXY DEBUG: schemereq.cpp:569: Actor# [1:640:2548] txid# 281474976715657 Bootstrap, UserSID: CheckAdministrator: 0 CheckDatabaseAdministrator: 0 2025-05-29T15:25:39.532673Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1627: Actor# [1:640:2548] txid# 281474976715657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P6 2025-05-29T15:25:39.532688Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1617: Actor# [1:640:2548] txid# 281474976715657 TEvNavigateKeySet requested from SchemeCache 2025-05-29T15:25:39.532752Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1450: Actor# [1:640:2548] txid# 281474976715657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-05-29T15:25:39.532791Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1497: Actor# [1:640:2548] HANDLE EvNavigateKeySetResult, txid# 281474976715657 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 1000 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2025-05-29T15:25:39.532807Z node 1 :TX_PROXY DEBUG: schemereq.cpp:103: Actor# [1:640:2548] txid# 281474976715657 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976715657 TabletId# 72057594046644480} 2025-05-29T15:25:39.533263Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-05-29T15:25:39.533415Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1352: Actor# [1:640:2548] txid# 281474976715657 HANDLE EvClientConnected 2025-05-29T15:25:39.533556Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1374: Actor# [1:640:2548] txid# 281474976715657 Status StatusAccepted HANDLE {TEvModifySchemeTransactionResult Status# StatusAccepted txid# 281474976715657} 2025-05-29T15:25:39.533567Z node 1 :TX_PROXY DEBUG: schemereq.cpp:549: Actor# [1:640:2548] txid# 281474976715657 SEND to# [1:592:2518] Source {TEvProposeTransactionStatus txid# 281474976715657 Status# 53} 2025-05-29T15:25:39.549994Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3097: StateInit, received event# 268828672, Sender [1:656:2563], Recipient [1:664:2569]: NKikimr::TEvTablet::TEvBoot 2025-05-29T15:25:39.550278Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3097: StateInit, received event# 268828673, Sender [1:656:2563], Recipient [1:664:2569]: NKikimr::TEvTablet::TEvRestored 2025-05-29T15:25:39.550363Z node 1 :TX_DATASHARD INFO: datashard.cpp:373: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:664:2569] 2025-05-29T15:25:39.550434Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:630: TxInitSchema.Execute 2025-05-29T15:25:39.563080Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3110: StateInactive, received event# 268828684, Sender [1:656:2563], Recipient [1:664:2569]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-05-29T15:25:39.592859Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:696: TxInitSchema.Complete 2025-05-29T15:25:39.592907Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:48: TDataShard::TTxInit::Execute 2025-05-29T15:25:39.593076Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1315: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-05-29T15:25:39.593085Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1371: LoadLockChangeRecords at tablet: 72075186224037888 2025-05-29T15:25:39.593092Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1420: LoadChangeRecordCommits at tablet: 72075186224037888 2025-05-29T15:25:39.593152Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:92: TDataShard::TTxInit::Complete 2025-05-29T15:25:39.593174Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:100: TDataShard::TTxInitRestored::Execute 2025-05-29T15:25:39.593187Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:111: DataShard 72075186224037888 persisting started state actor id [1:681:2569] in generation 1 2025-05-29T15:25:39.603916Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:120: TDataShard::TTxInitRestored::Complete 2025-05-29T15:25:39.612707Z node 1 :TX_DATASHARD INFO: datashard.cpp:417: Switched to work state WaitScheme tabletId 72075186224037888 2025-05-29T15:25:39.612794Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:457: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-05-29T15:25:39.612822Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1250: Change sender created: at tablet: 72075186224037888, actorId: [1:683:2579] 2025-05-29T15:25:39.612827Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1255: Trying to activate change sender: at tablet: 72075186224037888 2025-05-29T15:25:39.612832Z node 1 :TX_DATASHARD INFO: datashard.cpp:1272: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-05-29T15:25:39.612838Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-05-29T15:25:39.612905Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3129: StateWork, received event# 2146435072, Sender [1:664:2569], Recipient [1:664:2569]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-05-29T15:25:39.612912Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3154: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-05-29T15:25:39.613010Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 72075186224037888 2025-05-29T15:25:39.613034Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-05-29T15:25:39.613049Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2025-05-29T15:25:39.613056Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-05-29T15:25:39.613064Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:385: Check unit PlanQueue at 72075186224037888 2025-05-29T15:25:39.613070Z node 1 :TX_DATASHARD TRACE: plan_queue_unit.cpp:59: TPlanQueueUnit at 72075186224037888 has no attached operations 2025-05-29T15:25:39.613099Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:403: Unit PlanQueue has no ready operations at 72075186224037888 2025-05-29T15:25:39.613105Z node 1 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 72075186224037888 TxInFly 0 2025-05-29T15:25:39.613110Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2025-05-29T15:25:39.613207Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3129: StateWork, received event# 269877761, Sender [1:672:2573], Recipient [1:664:2569]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-05-29T15:25:39.613213Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3165: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-05-29T15:25:39.613220Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3710: Server connected at leader tablet# 72075186224037888, clientId# [1:662:2567], serverId# [1:672:2573], sessionId# [0:0:0] 2025-05-29T15:25:39.613233Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3129: StateWork, received event# 269549568, Sender [1:411:2405], Recipient [1:672:2573] 2025-05-29T15:25:39.613238Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3135: StateWork, processing event TEvDataShard::TEvProposeTransaction 2025-05-29T15:25:39.613257Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037888 2025-05-29T15:25:39.613312Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1827: Trying to execute [0:281474976715657] at 72075186224037888 on unit CheckSchemeTx 2025-05-29T15:25:39.613322Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:132: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-05-29T15:25:39.613338Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:220: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2025-05-29T15:25:39.613352Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1862: Execution status for [0:281474976715657] at 72075186224037888 is ExecutedNoMoreRestarts 2025-05-29T15:25:39.613357Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1910: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit CheckSchemeTx 2025-05-29T15:25:39.613363Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1916: Add [0:281474976715657] at 72075186224037888 to execution unit StoreSchemeTx 2025-05-29T15:25:39.613368Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1827: Trying to execute [0:281474976715657] at ... 29: StateWork, received event# 2146435073, Sender [0:0:0], Recipient [17:768:2635]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvCleanupTransaction 2025-05-29T15:35:30.440626Z node 17 :TX_DATASHARD TRACE: datashard_impl.h:3155: StateWork, processing event TEvPrivate::TEvCleanupTransaction 2025-05-29T15:35:30.440660Z node 17 :TX_DATASHARD TRACE: datashard_pipeline.cpp:214: No cleanup at 72075186224037889 outdated step 1697000 last cleanup 0 2025-05-29T15:35:30.440682Z node 17 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037889 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-05-29T15:35:30.440691Z node 17 :TX_DATASHARD TRACE: datashard_pipeline.cpp:385: Check unit PlanQueue at 72075186224037889 2025-05-29T15:35:30.440697Z node 17 :TX_DATASHARD TRACE: plan_queue_unit.cpp:59: TPlanQueueUnit at 72075186224037889 has no attached operations 2025-05-29T15:35:30.440702Z node 17 :TX_DATASHARD TRACE: datashard_pipeline.cpp:403: Unit PlanQueue has no ready operations at 72075186224037889 2025-05-29T15:35:30.440743Z node 17 :TX_DATASHARD TRACE: datashard_impl.h:3129: StateWork, received event# 2146435079, Sender [0:0:0], Recipient [17:768:2635]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvPeriodicWakeup 2025-05-29T15:35:31.003665Z node 17 :TX_DATASHARD TRACE: datashard_impl.h:3129: StateWork, received event# 2146435079, Sender [0:0:0], Recipient [17:664:2569]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvPeriodicWakeup 2025-05-29T15:35:31.003771Z node 17 :TX_DATASHARD TRACE: datashard_impl.h:3437: TEvPeriodicTableStats from datashard 72075186224037888, FollowerId 0, tableId 2 2025-05-29T15:35:31.179051Z node 17 :TX_DATASHARD TRACE: datashard_impl.h:3129: StateWork, received event# 2146435079, Sender [0:0:0], Recipient [17:768:2635]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvPeriodicWakeup 2025-05-29T15:35:31.179158Z node 17 :TX_DATASHARD TRACE: datashard_impl.h:3437: TEvPeriodicTableStats from datashard 72075186224037889, FollowerId 0, tableId 3 2025-05-29T15:35:31.759549Z node 17 :TX_DATASHARD TRACE: datashard_impl.h:3129: StateWork, received event# 2146435079, Sender [0:0:0], Recipient [17:664:2569]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvPeriodicWakeup 2025-05-29T15:35:31.895432Z node 17 :TX_DATASHARD TRACE: datashard_impl.h:3129: StateWork, received event# 2146435079, Sender [0:0:0], Recipient [17:768:2635]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvPeriodicWakeup 2025-05-29T15:35:32.461965Z node 17 :TX_DATASHARD TRACE: datashard_impl.h:3129: StateWork, received event# 2146435073, Sender [0:0:0], Recipient [17:664:2569]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvCleanupTransaction 2025-05-29T15:35:32.462505Z node 17 :TX_DATASHARD TRACE: datashard_impl.h:3155: StateWork, processing event TEvPrivate::TEvCleanupTransaction 2025-05-29T15:35:32.462549Z node 17 :TX_DATASHARD TRACE: datashard_pipeline.cpp:214: No cleanup at 72075186224037888 outdated step 1711000 last cleanup 0 2025-05-29T15:35:32.462576Z node 17 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-05-29T15:35:32.462585Z node 17 :TX_DATASHARD TRACE: datashard_pipeline.cpp:385: Check unit PlanQueue at 72075186224037888 2025-05-29T15:35:32.462610Z node 17 :TX_DATASHARD TRACE: plan_queue_unit.cpp:59: TPlanQueueUnit at 72075186224037888 has no attached operations 2025-05-29T15:35:32.462616Z node 17 :TX_DATASHARD TRACE: datashard_pipeline.cpp:403: Unit PlanQueue has no ready operations at 72075186224037888 2025-05-29T15:35:32.463605Z node 17 :TX_DATASHARD TRACE: datashard_impl.h:3129: StateWork, received event# 2146435079, Sender [0:0:0], Recipient [17:664:2569]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvPeriodicWakeup 2025-05-29T15:35:32.463693Z node 17 :TX_DATASHARD TRACE: datashard_impl.h:3437: TEvPeriodicTableStats from datashard 72075186224037888, FollowerId 0, tableId 2 2025-05-29T15:35:32.658492Z node 17 :TX_DATASHARD TRACE: datashard_impl.h:3129: StateWork, received event# 2146435073, Sender [0:0:0], Recipient [17:768:2635]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvCleanupTransaction 2025-05-29T15:35:32.658526Z node 17 :TX_DATASHARD TRACE: datashard_impl.h:3155: StateWork, processing event TEvPrivate::TEvCleanupTransaction 2025-05-29T15:35:32.658566Z node 17 :TX_DATASHARD TRACE: datashard_pipeline.cpp:214: No cleanup at 72075186224037889 outdated step 1712000 last cleanup 0 2025-05-29T15:35:32.658586Z node 17 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037889 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-05-29T15:35:32.658594Z node 17 :TX_DATASHARD TRACE: datashard_pipeline.cpp:385: Check unit PlanQueue at 72075186224037889 2025-05-29T15:35:32.658600Z node 17 :TX_DATASHARD TRACE: plan_queue_unit.cpp:59: TPlanQueueUnit at 72075186224037889 has no attached operations 2025-05-29T15:35:32.658605Z node 17 :TX_DATASHARD TRACE: datashard_pipeline.cpp:403: Unit PlanQueue has no ready operations at 72075186224037889 2025-05-29T15:35:32.658661Z node 17 :TX_DATASHARD TRACE: datashard_impl.h:3129: StateWork, received event# 2146435079, Sender [0:0:0], Recipient [17:768:2635]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvPeriodicWakeup 2025-05-29T15:35:32.658699Z node 17 :TX_DATASHARD TRACE: datashard_impl.h:3437: TEvPeriodicTableStats from datashard 72075186224037889, FollowerId 0, tableId 3 2025-05-29T15:35:33.198836Z node 17 :TX_DATASHARD TRACE: datashard_impl.h:3129: StateWork, received event# 2146435079, Sender [0:0:0], Recipient [17:664:2569]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvPeriodicWakeup 2025-05-29T15:35:33.361243Z node 17 :TX_DATASHARD TRACE: datashard_impl.h:3129: StateWork, received event# 2146435079, Sender [0:0:0], Recipient [17:768:2635]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvPeriodicWakeup 2025-05-29T15:35:33.883745Z node 17 :TX_DATASHARD TRACE: datashard_impl.h:3129: StateWork, received event# 2146435079, Sender [0:0:0], Recipient [17:664:2569]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvPeriodicWakeup 2025-05-29T15:35:33.883857Z node 17 :TX_DATASHARD TRACE: datashard_impl.h:3437: TEvPeriodicTableStats from datashard 72075186224037888, FollowerId 0, tableId 2 2025-05-29T15:35:34.043346Z node 17 :TX_DATASHARD TRACE: datashard_impl.h:3129: StateWork, received event# 2146435079, Sender [0:0:0], Recipient [17:768:2635]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvPeriodicWakeup 2025-05-29T15:35:34.043464Z node 17 :TX_DATASHARD TRACE: datashard_impl.h:3437: TEvPeriodicTableStats from datashard 72075186224037889, FollowerId 0, tableId 3 2025-05-29T15:35:34.670801Z node 17 :TX_DATASHARD TRACE: datashard_impl.h:3129: StateWork, received event# 2146435073, Sender [0:0:0], Recipient [17:664:2569]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvCleanupTransaction 2025-05-29T15:35:34.670837Z node 17 :TX_DATASHARD TRACE: datashard_impl.h:3155: StateWork, processing event TEvPrivate::TEvCleanupTransaction 2025-05-29T15:35:34.670870Z node 17 :TX_DATASHARD TRACE: datashard_pipeline.cpp:214: No cleanup at 72075186224037888 outdated step 1726000 last cleanup 0 2025-05-29T15:35:34.670892Z node 17 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-05-29T15:35:34.670901Z node 17 :TX_DATASHARD TRACE: datashard_pipeline.cpp:385: Check unit PlanQueue at 72075186224037888 2025-05-29T15:35:34.670908Z node 17 :TX_DATASHARD TRACE: plan_queue_unit.cpp:59: TPlanQueueUnit at 72075186224037888 has no attached operations 2025-05-29T15:35:34.670912Z node 17 :TX_DATASHARD TRACE: datashard_pipeline.cpp:403: Unit PlanQueue has no ready operations at 72075186224037888 2025-05-29T15:35:34.670954Z node 17 :TX_DATASHARD TRACE: datashard_impl.h:3129: StateWork, received event# 2146435079, Sender [0:0:0], Recipient [17:664:2569]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvPeriodicWakeup 2025-05-29T15:35:34.816711Z node 17 :TX_DATASHARD TRACE: datashard_impl.h:3129: StateWork, received event# 2146435073, Sender [0:0:0], Recipient [17:768:2635]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvCleanupTransaction 2025-05-29T15:35:34.816757Z node 17 :TX_DATASHARD TRACE: datashard_impl.h:3155: StateWork, processing event TEvPrivate::TEvCleanupTransaction 2025-05-29T15:35:34.816801Z node 17 :TX_DATASHARD TRACE: datashard_pipeline.cpp:214: No cleanup at 72075186224037889 outdated step 1727000 last cleanup 0 2025-05-29T15:35:34.816822Z node 17 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037889 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-05-29T15:35:34.816830Z node 17 :TX_DATASHARD TRACE: datashard_pipeline.cpp:385: Check unit PlanQueue at 72075186224037889 2025-05-29T15:35:34.816835Z node 17 :TX_DATASHARD TRACE: plan_queue_unit.cpp:59: TPlanQueueUnit at 72075186224037889 has no attached operations 2025-05-29T15:35:34.816840Z node 17 :TX_DATASHARD TRACE: datashard_pipeline.cpp:403: Unit PlanQueue has no ready operations at 72075186224037889 2025-05-29T15:35:34.816878Z node 17 :TX_DATASHARD TRACE: datashard_impl.h:3129: StateWork, received event# 2146435079, Sender [0:0:0], Recipient [17:768:2635]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvPeriodicWakeup 2025-05-29T15:35:35.364437Z node 17 :TX_DATASHARD TRACE: datashard_impl.h:3129: StateWork, received event# 2146435079, Sender [0:0:0], Recipient [17:664:2569]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvPeriodicWakeup 2025-05-29T15:35:35.364545Z node 17 :TX_DATASHARD TRACE: datashard_impl.h:3437: TEvPeriodicTableStats from datashard 72075186224037888, FollowerId 0, tableId 2 2025-05-29T15:35:35.530850Z node 17 :TX_DATASHARD TRACE: datashard_impl.h:3129: StateWork, received event# 2146435079, Sender [0:0:0], Recipient [17:768:2635]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvPeriodicWakeup 2025-05-29T15:35:35.530968Z node 17 :TX_DATASHARD TRACE: datashard_impl.h:3437: TEvPeriodicTableStats from datashard 72075186224037889, FollowerId 0, tableId 3 2025-05-29T15:35:36.104867Z node 17 :TX_DATASHARD TRACE: datashard_impl.h:3129: StateWork, received event# 2146435079, Sender [0:0:0], Recipient [17:664:2569]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvPeriodicWakeup 2025-05-29T15:35:36.242510Z node 17 :TX_DATASHARD TRACE: datashard_impl.h:3129: StateWork, received event# 2146435079, Sender [0:0:0], Recipient [17:768:2635]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvPeriodicWakeup Traceback (most recent call last): File "library/python/testing/yatest_common/yatest/common/process.py", line 384, in wait wait_for( File "library/python/testing/yatest_common/yatest/common/process.py", line 764, in wait_for raise TimeoutError(truncate(message, MAX_MESSAGE_LEN)) yatest.common.process.TimeoutError: 600 second(s) wait timeout has expired: Command '['/home/runner/.ya/tools/v4/8689590287/test_tool', 'run_ut', '@/home/runner/.ya/build/build_root/ciyv/0017fe/ydb/core/tx/datashard/ut_volatile/test-results/unittest/testing_out_stuff/chunk0/testing_out_stuff/test_tool.args']' stopped by 600 seconds timeout During handling of the above exception, another exception occurred: Traceback (most recent call last): File "devtools/ya/test/programs/test_tool/run_test/run_test.py", line 1738, in main res.wait(check_exit_code=False, timeout=run_timeout, on_timeout=timeout_callback) File "library/python/testing/yatest_common/yatest/common/process.py", line 398, in wait raise ExecutionTimeoutError(self, str(e)) yatest.common.process.ExecutionTimeoutError: (("600 second(s) wait timeout has expired: Command '['/home/runner/.ya/tools/v4/8689590287/test_tool', 'run_ut', '@/home/runner/.ya/build/build_root/ciyv/0017fe/ydb/core/tx/datashard/ut_volatile/test-results/unittest/testing_out_stuff/chunk0/testing_out_stuff/test_tool.args']' stopped by 600 seconds timeout",), {}) >> test_secondary_index.py::TestSecondaryIndex::test_secondary_index[table_index_1_UNIQUE_SYNC-pk_types3-all_types3-index3--UNIQUE-SYNC] [FAIL] >> test_secondary_index.py::TestSecondaryIndex::test_secondary_index_cover[table_index_2_UNIQUE_SYNC-pk_types2-all_types2-index2--UNIQUE-SYNC] [FAIL] >> test_secondary_index.py::TestSecondaryIndex::test_secondary_index_cover[table_index_1__SYNC-pk_types8-all_types8-index8---SYNC] [FAIL] >> test_read_table.py::TestReadTableWithTabletKills::test_read_table_async_simple[read_table] [FAIL] |78.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/datashard/secondary_index/py3test >> test_secondary_index.py::TestSecondaryIndex::test_secondary_index_cover[table_index_2__SYNC-pk_types7-all_types7-index7---SYNC] [FAIL] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/tools/nemesis/ut/py3test Test command err: contrib/python/Flask/py3/flask/scaffold.py:109: DeprecationWarning: 'pkgutil.find_loader' is deprecated and slated for removal in Python 3.14; use importlib.util.find_spec() instead contrib/python/Werkzeug/py3/werkzeug/routing/rules.py:751: DeprecationWarning: ast.Str is deprecated and will be removed in Python 3.14; use ast.Constant instead contrib/python/Werkzeug/py3/werkzeug/routing/rules.py:748: DeprecationWarning: ast.Str is deprecated and will be removed in Python 3.14; use ast.Constant instead contrib/tools/python3/Lib/ast.py:587: DeprecationWarning: Attribute s is deprecated and will be removed in Python 3.14; use value instead contrib/python/Werkzeug/py3/werkzeug/routing/rules.py:755: DeprecationWarning: ast.Str is deprecated and will be removed in Python 3.14; use ast.Constant instead contrib/python/Werkzeug/py3/werkzeug/routing/rules.py:756: DeprecationWarning: Attribute s is deprecated and will be removed in Python 3.14; use value instead contrib/python/Werkzeug/py3/werkzeug/routing/rules.py:756: DeprecationWarning: ast.Str is deprecated and will be removed in Python 3.14; use ast.Constant instead >> test_secondary_index.py::TestSecondaryIndex::test_secondary_index_cover[table_index_1__ASYNC-pk_types10-all_types10-index10---ASYNC] [FAIL] |78.8%| [TA] $(B)/ydb/core/tx/datashard/ut_volatile/test-results/unittest/{meta.json ... results_accumulator.log} >> test_s3.py::TestYdbS3TTL::test_s3[table_index_3__SYNC-pk_types1-all_types1-index1---SYNC] |78.8%| [TA] {RESULT} $(B)/ydb/core/tx/datashard/ut_volatile/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/tools/nemesis/ut/py3test Test command err: contrib/python/Flask/py3/flask/scaffold.py:109: DeprecationWarning: 'pkgutil.find_loader' is deprecated and slated for removal in Python 3.14; use importlib.util.find_spec() instead contrib/python/Werkzeug/py3/werkzeug/routing/rules.py:751: DeprecationWarning: ast.Str is deprecated and will be removed in Python 3.14; use ast.Constant instead contrib/python/Werkzeug/py3/werkzeug/routing/rules.py:748: DeprecationWarning: ast.Str is deprecated and will be removed in Python 3.14; use ast.Constant instead contrib/tools/python3/Lib/ast.py:587: DeprecationWarning: Attribute s is deprecated and will be removed in Python 3.14; use value instead contrib/python/Werkzeug/py3/werkzeug/routing/rules.py:755: DeprecationWarning: ast.Str is deprecated and will be removed in Python 3.14; use ast.Constant instead contrib/python/Werkzeug/py3/werkzeug/routing/rules.py:756: DeprecationWarning: Attribute s is deprecated and will be removed in Python 3.14; use value instead contrib/python/Werkzeug/py3/werkzeug/routing/rules.py:756: DeprecationWarning: ast.Str is deprecated and will be removed in Python 3.14; use ast.Constant instead |78.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/api/py3test >> test_session_pool.py::TestSessionPool::test_session_pool_min_size_feature [GOOD] |78.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/datashard/s3/py3test >> test_s3.py::TestYdbS3TTL::test_s3[table_ttl_Uint32-pk_types9-all_types9-index9-Uint32--] >> test_s3.py::TestYdbS3TTL::test_s3[table_ttl_Uint64-pk_types10-all_types10-index10-Uint64--] |78.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/datashard/secondary_index/py3test >> test_secondary_index.py::TestSecondaryIndex::test_secondary_index[table_index_4_UNIQUE_SYNC-pk_types0-all_types0-index0--UNIQUE-SYNC] [FAIL] >> test_s3.py::TestYdbS3TTL::test_s3[table_index_4__SYNC-pk_types0-all_types0-index0---SYNC] >> test_s3.py::TestYdbS3TTL::test_s3[table_index_1__ASYNC-pk_types5-all_types5-index5---ASYNC] |78.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/datashard/secondary_index/py3test >> test_secondary_index.py::TestSecondaryIndex::test_secondary_index[table_index_2_UNIQUE_SYNC-pk_types2-all_types2-index2--UNIQUE-SYNC] [FAIL] |78.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/datashard/s3/py3test |78.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/datashard/secondary_index/py3test >> test_secondary_index.py::TestSecondaryIndex::test_secondary_index_cover[table_index_2_UNIQUE_SYNC-pk_types2-all_types2-index2--UNIQUE-SYNC] [FAIL] |78.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/datashard/secondary_index/py3test >> test_secondary_index.py::TestSecondaryIndex::test_secondary_index[table_index_1_UNIQUE_SYNC-pk_types3-all_types3-index3--UNIQUE-SYNC] [FAIL] >> test_s3.py::TestYdbS3TTL::test_s3[table_ttl_Timestamp-pk_types12-all_types12-index12-Timestamp--] >> test_crud.py::TestSelect::test_advanced_select_failed[select distinct b, a from (select a, b from t1 union all select b, a from t1 order by b) order by B-Column B is not in source column set.*] >> test_s3.py::TestYdbS3TTL::test_s3[table_index_2__SYNC-pk_types2-all_types2-index2---SYNC] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/tools/nemesis/ut/py3test Test command err: contrib/python/Flask/py3/flask/scaffold.py:109: DeprecationWarning: 'pkgutil.find_loader' is deprecated and slated for removal in Python 3.14; use importlib.util.find_spec() instead contrib/python/Werkzeug/py3/werkzeug/routing/rules.py:751: DeprecationWarning: ast.Str is deprecated and will be removed in Python 3.14; use ast.Constant instead contrib/python/Werkzeug/py3/werkzeug/routing/rules.py:748: DeprecationWarning: ast.Str is deprecated and will be removed in Python 3.14; use ast.Constant instead contrib/tools/python3/Lib/ast.py:587: DeprecationWarning: Attribute s is deprecated and will be removed in Python 3.14; use value instead contrib/python/Werkzeug/py3/werkzeug/routing/rules.py:755: DeprecationWarning: ast.Str is deprecated and will be removed in Python 3.14; use ast.Constant instead contrib/python/Werkzeug/py3/werkzeug/routing/rules.py:756: DeprecationWarning: Attribute s is deprecated and will be removed in Python 3.14; use value instead contrib/python/Werkzeug/py3/werkzeug/routing/rules.py:756: DeprecationWarning: ast.Str is deprecated and will be removed in Python 3.14; use ast.Constant instead >> test_read_update_write_load.py::TestReadUpdateWriteLoad::test[read_update_write_load] [GOOD] |78.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/statistics/service/ut/unittest >> test_read_update_write_load.py::TestReadUpdateWriteLoad::test_multi[read_update_write_load] [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/tools/nemesis/ut/py3test Test command err: contrib/python/Flask/py3/flask/scaffold.py:109: DeprecationWarning: 'pkgutil.find_loader' is deprecated and slated for removal in Python 3.14; use importlib.util.find_spec() instead contrib/python/Werkzeug/py3/werkzeug/routing/rules.py:751: DeprecationWarning: ast.Str is deprecated and will be removed in Python 3.14; use ast.Constant instead contrib/python/Werkzeug/py3/werkzeug/routing/rules.py:748: DeprecationWarning: ast.Str is deprecated and will be removed in Python 3.14; use ast.Constant instead contrib/tools/python3/Lib/ast.py:587: DeprecationWarning: Attribute s is deprecated and will be removed in Python 3.14; use value instead contrib/python/Werkzeug/py3/werkzeug/routing/rules.py:755: DeprecationWarning: ast.Str is deprecated and will be removed in Python 3.14; use ast.Constant instead contrib/python/Werkzeug/py3/werkzeug/routing/rules.py:756: DeprecationWarning: Attribute s is deprecated and will be removed in Python 3.14; use value instead contrib/python/Werkzeug/py3/werkzeug/routing/rules.py:756: DeprecationWarning: ast.Str is deprecated and will be removed in Python 3.14; use ast.Constant instead |78.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/datashard/secondary_index/py3test >> test_secondary_index.py::TestSecondaryIndex::test_secondary_index[table_index_0_UNIQUE_SYNC-pk_types4-all_types4-index4--UNIQUE-SYNC] [FAIL] >> test_s3.py::TestYdbS3TTL::test_s3[table_index_0__ASYNC-pk_types6-all_types6-index6---ASYNC] |78.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/datashard/secondary_index/py3test >> test_secondary_index.py::TestSecondaryIndex::test_secondary_index[table_index_0__SYNC-pk_types9-all_types9-index9---SYNC] [FAIL] >> test_s3.py::TestYdbS3TTL::test_s3[table_ttl_DyNumber-pk_types8-all_types8-index8-DyNumber--] >> test_s3.py::TestYdbS3TTL::test_s3[table_index_0__SYNC-pk_types4-all_types4-index4---SYNC] >> test_session_grace_shutdown.py::TestIdle::test_idle_shutdown_of_session [GOOD] |78.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/api/py3test >> test_read_table.py::TestReadTableWithTabletKills::test_read_table_async_simple[read_table] [FAIL] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/tools/nemesis/ut/py3test Test command err: contrib/python/Flask/py3/flask/scaffold.py:109: DeprecationWarning: 'pkgutil.find_loader' is deprecated and slated for removal in Python 3.14; use importlib.util.find_spec() instead contrib/python/Werkzeug/py3/werkzeug/routing/rules.py:751: DeprecationWarning: ast.Str is deprecated and will be removed in Python 3.14; use ast.Constant instead contrib/python/Werkzeug/py3/werkzeug/routing/rules.py:748: DeprecationWarning: ast.Str is deprecated and will be removed in Python 3.14; use ast.Constant instead contrib/tools/python3/Lib/ast.py:587: DeprecationWarning: Attribute s is deprecated and will be removed in Python 3.14; use value instead contrib/python/Werkzeug/py3/werkzeug/routing/rules.py:755: DeprecationWarning: ast.Str is deprecated and will be removed in Python 3.14; use ast.Constant instead contrib/python/Werkzeug/py3/werkzeug/routing/rules.py:756: DeprecationWarning: Attribute s is deprecated and will be removed in Python 3.14; use value instead contrib/python/Werkzeug/py3/werkzeug/routing/rules.py:756: DeprecationWarning: ast.Str is deprecated and will be removed in Python 3.14; use ast.Constant instead |78.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/datashard/secondary_index/py3test >> test_secondary_index.py::TestSecondaryIndex::test_secondary_index_cover[table_index_1__SYNC-pk_types8-all_types8-index8---SYNC] [FAIL] >> test_crud.py::TestSelect::test_advanced_select_failed[select distinct b, a from (select a, b from t1 union all select b, a from t1 order by b) order by B-Column B is not in source column set.*] [GOOD] >> test_crud.py::TestSelect::test_advanced_select_failed[select count(a, b) from t1-Aggregation function Count requires exactly 1 argument] >> test_crud.py::TestSelect::test_advanced_select_failed[select count(a, b) from t1-Aggregation function Count requires exactly 1 argument] [GOOD] >> test_crud.py::TestSelect::test_advanced_select_failed[select min(a, b) from t1-Aggregation function Min requires exactly 1 argument] [GOOD] >> test_crud.py::TestSelect::test_advanced_select_failed[select min(*) from t1-.*is not allowed here] >> test_crud.py::TestSelect::test_advanced_select_failed[select min(*) from t1-.*is not allowed here] [GOOD] |78.9%| [TA] $(B)/ydb/core/tx/schemeshard/ut_index/test-results/unittest/{meta.json ... results_accumulator.log} >> test_s3.py::TestYdbS3TTL::test_s3[table_all_types-pk_types7-all_types7-index7---] |78.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/datashard/secondary_index/py3test >> test_secondary_index.py::TestSecondaryIndex::test_secondary_index_cover[table_index_1__ASYNC-pk_types10-all_types10-index10---ASYNC] [FAIL] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/tools/nemesis/ut/py3test Test command err: contrib/python/Flask/py3/flask/scaffold.py:109: DeprecationWarning: 'pkgutil.find_loader' is deprecated and slated for removal in Python 3.14; use importlib.util.find_spec() instead contrib/python/Werkzeug/py3/werkzeug/routing/rules.py:751: DeprecationWarning: ast.Str is deprecated and will be removed in Python 3.14; use ast.Constant instead contrib/python/Werkzeug/py3/werkzeug/routing/rules.py:748: DeprecationWarning: ast.Str is deprecated and will be removed in Python 3.14; use ast.Constant instead contrib/tools/python3/Lib/ast.py:587: DeprecationWarning: Attribute s is deprecated and will be removed in Python 3.14; use value instead contrib/python/Werkzeug/py3/werkzeug/routing/rules.py:755: DeprecationWarning: ast.Str is deprecated and will be removed in Python 3.14; use ast.Constant instead contrib/python/Werkzeug/py3/werkzeug/routing/rules.py:756: DeprecationWarning: Attribute s is deprecated and will be removed in Python 3.14; use value instead contrib/python/Werkzeug/py3/werkzeug/routing/rules.py:756: DeprecationWarning: ast.Str is deprecated and will be removed in Python 3.14; use ast.Constant instead >> test_s3.py::TestYdbS3TTL::test_s3[table_index_1__SYNC-pk_types3-all_types3-index3---SYNC] >> test_s3.py::TestYdbS3TTL::test_s3[table_index_3__SYNC-pk_types1-all_types1-index1---SYNC] [FAIL] >> test_s3.py::TestYdbS3TTL::test_s3[table_ttl_Uint64-pk_types10-all_types10-index10-Uint64--] [FAIL] >> test_tablet.py::TestMassiveKills::test_tablets_are_ok_after_many_kills >> test_s3.py::TestYdbS3TTL::test_s3[table_ttl_Date-pk_types13-all_types13-index13-Date--] >> test_s3.py::TestYdbS3TTL::test_s3[table_ttl_Uint32-pk_types9-all_types9-index9-Uint32--] [FAIL] >> test_s3.py::TestYdbS3TTL::test_s3[table_index_4__SYNC-pk_types0-all_types0-index0---SYNC] [FAIL] >> test_s3.py::TestYdbS3TTL::test_s3[table_index_1__ASYNC-pk_types5-all_types5-index5---ASYNC] [FAIL] >> test_s3.py::TestYdbS3TTL::test_s3[table_ttl_Timestamp-pk_types12-all_types12-index12-Timestamp--] [FAIL] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/tools/nemesis/ut/py3test Test command err: contrib/python/Flask/py3/flask/scaffold.py:109: DeprecationWarning: 'pkgutil.find_loader' is deprecated and slated for removal in Python 3.14; use importlib.util.find_spec() instead contrib/python/Werkzeug/py3/werkzeug/routing/rules.py:751: DeprecationWarning: ast.Str is deprecated and will be removed in Python 3.14; use ast.Constant instead contrib/python/Werkzeug/py3/werkzeug/routing/rules.py:748: DeprecationWarning: ast.Str is deprecated and will be removed in Python 3.14; use ast.Constant instead contrib/tools/python3/Lib/ast.py:587: DeprecationWarning: Attribute s is deprecated and will be removed in Python 3.14; use value instead contrib/python/Werkzeug/py3/werkzeug/routing/rules.py:755: DeprecationWarning: ast.Str is deprecated and will be removed in Python 3.14; use ast.Constant instead contrib/python/Werkzeug/py3/werkzeug/routing/rules.py:756: DeprecationWarning: Attribute s is deprecated and will be removed in Python 3.14; use value instead contrib/python/Werkzeug/py3/werkzeug/routing/rules.py:756: DeprecationWarning: ast.Str is deprecated and will be removed in Python 3.14; use ast.Constant instead ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/tools/nemesis/ut/py3test Test command err: contrib/python/Flask/py3/flask/scaffold.py:109: DeprecationWarning: 'pkgutil.find_loader' is deprecated and slated for removal in Python 3.14; use importlib.util.find_spec() instead contrib/python/Werkzeug/py3/werkzeug/routing/rules.py:751: DeprecationWarning: ast.Str is deprecated and will be removed in Python 3.14; use ast.Constant instead contrib/python/Werkzeug/py3/werkzeug/routing/rules.py:748: DeprecationWarning: ast.Str is deprecated and will be removed in Python 3.14; use ast.Constant instead contrib/tools/python3/Lib/ast.py:587: DeprecationWarning: Attribute s is deprecated and will be removed in Python 3.14; use value instead contrib/python/Werkzeug/py3/werkzeug/routing/rules.py:755: DeprecationWarning: ast.Str is deprecated and will be removed in Python 3.14; use ast.Constant instead contrib/python/Werkzeug/py3/werkzeug/routing/rules.py:756: DeprecationWarning: Attribute s is deprecated and will be removed in Python 3.14; use value instead contrib/python/Werkzeug/py3/werkzeug/routing/rules.py:756: DeprecationWarning: ast.Str is deprecated and will be removed in Python 3.14; use ast.Constant instead |78.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/datashard/s3/py3test |79.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/api/py3test >> test_session_grace_shutdown.py::TestIdle::test_idle_shutdown_of_session [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/tools/nemesis/ut/py3test Test command err: contrib/python/Flask/py3/flask/scaffold.py:109: DeprecationWarning: 'pkgutil.find_loader' is deprecated and slated for removal in Python 3.14; use importlib.util.find_spec() instead contrib/python/Werkzeug/py3/werkzeug/routing/rules.py:751: DeprecationWarning: ast.Str is deprecated and will be removed in Python 3.14; use ast.Constant instead contrib/python/Werkzeug/py3/werkzeug/routing/rules.py:748: DeprecationWarning: ast.Str is deprecated and will be removed in Python 3.14; use ast.Constant instead contrib/tools/python3/Lib/ast.py:587: DeprecationWarning: Attribute s is deprecated and will be removed in Python 3.14; use value instead contrib/python/Werkzeug/py3/werkzeug/routing/rules.py:755: DeprecationWarning: ast.Str is deprecated and will be removed in Python 3.14; use ast.Constant instead contrib/python/Werkzeug/py3/werkzeug/routing/rules.py:756: DeprecationWarning: Attribute s is deprecated and will be removed in Python 3.14; use value instead contrib/python/Werkzeug/py3/werkzeug/routing/rules.py:756: DeprecationWarning: ast.Str is deprecated and will be removed in Python 3.14; use ast.Constant instead |79.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/datashard/s3/py3test >> test_s3.py::TestYdbS3TTL::test_s3[table_index_2__SYNC-pk_types2-all_types2-index2---SYNC] [FAIL] >> test_s3.py::TestYdbS3TTL::test_s3[table_index_0__ASYNC-pk_types6-all_types6-index6---ASYNC] [FAIL] >> test_s3.py::TestYdbS3TTL::test_s3[table_index_0__SYNC-pk_types4-all_types4-index4---SYNC] [FAIL] >> test_s3.py::TestYdbS3TTL::test_s3[table_ttl_Datetime-pk_types11-all_types11-index11-Datetime--] >> test_s3.py::TestYdbS3TTL::test_s3[table_ttl_DyNumber-pk_types8-all_types8-index8-DyNumber--] [FAIL] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/datashard/s3/py3test >> test_s3.py::TestYdbS3TTL::test_s3[table_index_3__SYNC-pk_types1-all_types1-index1---SYNC] [FAIL] Test command err: library/recipes/common/__init__.py:29: ResourceWarning: unclosed file <_io.TextIOWrapper name='/home/runner/.ya/build/build_root/ciyv/002248/ydb/tests/datashard/s3/test-results/py3test/testing_out_stuff/chunk6/testing_out_stuff/moto_server.out.log' mode='w' encoding='utf-8'> process = subprocess.Popen( ResourceWarning: Enable tracemalloc to get the object allocation traceback library/recipes/common/__init__.py:29: ResourceWarning: unclosed file <_io.TextIOWrapper name='/home/runner/.ya/build/build_root/ciyv/002248/ydb/tests/datashard/s3/test-results/py3test/testing_out_stuff/chunk6/testing_out_stuff/moto_server.err.log' mode='w' encoding='utf-8'> process = subprocess.Popen( ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/subprocess.py:1129: ResourceWarning: subprocess 519843 is still running ResourceWarning: Enable tracemalloc to get the object allocation traceback ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/datashard/s3/py3test >> test_s3.py::TestYdbS3TTL::test_s3[table_ttl_Uint32-pk_types9-all_types9-index9-Uint32--] [FAIL] Test command err: library/recipes/common/__init__.py:29: ResourceWarning: unclosed file <_io.TextIOWrapper name='/home/runner/.ya/build/build_root/ciyv/002246/ydb/tests/datashard/s3/test-results/py3test/testing_out_stuff/chunk12/testing_out_stuff/moto_server.out.log' mode='w' encoding='utf-8'> process = subprocess.Popen( ResourceWarning: Enable tracemalloc to get the object allocation traceback library/recipes/common/__init__.py:29: ResourceWarning: unclosed file <_io.TextIOWrapper name='/home/runner/.ya/build/build_root/ciyv/002246/ydb/tests/datashard/s3/test-results/py3test/testing_out_stuff/chunk12/testing_out_stuff/moto_server.err.log' mode='w' encoding='utf-8'> process = subprocess.Popen( ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/subprocess.py:1129: ResourceWarning: subprocess 519865 is still running ResourceWarning: Enable tracemalloc to get the object allocation traceback ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/datashard/s3/py3test >> test_s3.py::TestYdbS3TTL::test_s3[table_index_4__SYNC-pk_types0-all_types0-index0---SYNC] [FAIL] Test command err: library/recipes/common/__init__.py:29: ResourceWarning: unclosed file <_io.TextIOWrapper name='/home/runner/.ya/build/build_root/ciyv/00223f/ydb/tests/datashard/s3/test-results/py3test/testing_out_stuff/chunk7/testing_out_stuff/moto_server.out.log' mode='w' encoding='utf-8'> process = subprocess.Popen( ResourceWarning: Enable tracemalloc to get the object allocation traceback library/recipes/common/__init__.py:29: ResourceWarning: unclosed file <_io.TextIOWrapper name='/home/runner/.ya/build/build_root/ciyv/00223f/ydb/tests/datashard/s3/test-results/py3test/testing_out_stuff/chunk7/testing_out_stuff/moto_server.err.log' mode='w' encoding='utf-8'> process = subprocess.Popen( ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/subprocess.py:1129: ResourceWarning: subprocess 519906 is still running ResourceWarning: Enable tracemalloc to get the object allocation traceback |79.0%| [TA] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_index/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/tools/nemesis/ut/py3test Test command err: contrib/python/Flask/py3/flask/scaffold.py:109: DeprecationWarning: 'pkgutil.find_loader' is deprecated and slated for removal in Python 3.14; use importlib.util.find_spec() instead contrib/python/Werkzeug/py3/werkzeug/routing/rules.py:751: DeprecationWarning: ast.Str is deprecated and will be removed in Python 3.14; use ast.Constant instead contrib/python/Werkzeug/py3/werkzeug/routing/rules.py:748: DeprecationWarning: ast.Str is deprecated and will be removed in Python 3.14; use ast.Constant instead contrib/tools/python3/Lib/ast.py:587: DeprecationWarning: Attribute s is deprecated and will be removed in Python 3.14; use value instead contrib/python/Werkzeug/py3/werkzeug/routing/rules.py:755: DeprecationWarning: ast.Str is deprecated and will be removed in Python 3.14; use ast.Constant instead contrib/python/Werkzeug/py3/werkzeug/routing/rules.py:756: DeprecationWarning: Attribute s is deprecated and will be removed in Python 3.14; use value instead contrib/python/Werkzeug/py3/werkzeug/routing/rules.py:756: DeprecationWarning: ast.Str is deprecated and will be removed in Python 3.14; use ast.Constant instead ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/tools/nemesis/ut/py3test Test command err: contrib/python/Flask/py3/flask/scaffold.py:109: DeprecationWarning: 'pkgutil.find_loader' is deprecated and slated for removal in Python 3.14; use importlib.util.find_spec() instead contrib/python/Werkzeug/py3/werkzeug/routing/rules.py:751: DeprecationWarning: ast.Str is deprecated and will be removed in Python 3.14; use ast.Constant instead contrib/python/Werkzeug/py3/werkzeug/routing/rules.py:748: DeprecationWarning: ast.Str is deprecated and will be removed in Python 3.14; use ast.Constant instead contrib/tools/python3/Lib/ast.py:587: DeprecationWarning: Attribute s is deprecated and will be removed in Python 3.14; use value instead contrib/python/Werkzeug/py3/werkzeug/routing/rules.py:755: DeprecationWarning: ast.Str is deprecated and will be removed in Python 3.14; use ast.Constant instead contrib/python/Werkzeug/py3/werkzeug/routing/rules.py:756: DeprecationWarning: Attribute s is deprecated and will be removed in Python 3.14; use value instead contrib/python/Werkzeug/py3/werkzeug/routing/rules.py:756: DeprecationWarning: ast.Str is deprecated and will be removed in Python 3.14; use ast.Constant instead ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/tools/nemesis/ut/py3test Test command err: contrib/python/Flask/py3/flask/scaffold.py:109: DeprecationWarning: 'pkgutil.find_loader' is deprecated and slated for removal in Python 3.14; use importlib.util.find_spec() instead contrib/python/Werkzeug/py3/werkzeug/routing/rules.py:751: DeprecationWarning: ast.Str is deprecated and will be removed in Python 3.14; use ast.Constant instead contrib/python/Werkzeug/py3/werkzeug/routing/rules.py:748: DeprecationWarning: ast.Str is deprecated and will be removed in Python 3.14; use ast.Constant instead contrib/tools/python3/Lib/ast.py:587: DeprecationWarning: Attribute s is deprecated and will be removed in Python 3.14; use value instead contrib/python/Werkzeug/py3/werkzeug/routing/rules.py:755: DeprecationWarning: ast.Str is deprecated and will be removed in Python 3.14; use ast.Constant instead contrib/python/Werkzeug/py3/werkzeug/routing/rules.py:756: DeprecationWarning: Attribute s is deprecated and will be removed in Python 3.14; use value instead contrib/python/Werkzeug/py3/werkzeug/routing/rules.py:756: DeprecationWarning: ast.Str is deprecated and will be removed in Python 3.14; use ast.Constant instead >> test_s3.py::TestYdbS3TTL::test_s3[table_all_types-pk_types7-all_types7-index7---] [FAIL] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/tools/nemesis/ut/py3test Test command err: contrib/python/Flask/py3/flask/scaffold.py:109: DeprecationWarning: 'pkgutil.find_loader' is deprecated and slated for removal in Python 3.14; use importlib.util.find_spec() instead contrib/python/Werkzeug/py3/werkzeug/routing/rules.py:751: DeprecationWarning: ast.Str is deprecated and will be removed in Python 3.14; use ast.Constant instead contrib/python/Werkzeug/py3/werkzeug/routing/rules.py:748: DeprecationWarning: ast.Str is deprecated and will be removed in Python 3.14; use ast.Constant instead contrib/tools/python3/Lib/ast.py:587: DeprecationWarning: Attribute s is deprecated and will be removed in Python 3.14; use value instead contrib/python/Werkzeug/py3/werkzeug/routing/rules.py:755: DeprecationWarning: ast.Str is deprecated and will be removed in Python 3.14; use ast.Constant instead contrib/python/Werkzeug/py3/werkzeug/routing/rules.py:756: DeprecationWarning: Attribute s is deprecated and will be removed in Python 3.14; use value instead contrib/python/Werkzeug/py3/werkzeug/routing/rules.py:756: DeprecationWarning: ast.Str is deprecated and will be removed in Python 3.14; use ast.Constant instead ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/datashard/s3/py3test >> test_s3.py::TestYdbS3TTL::test_s3[table_ttl_Timestamp-pk_types12-all_types12-index12-Timestamp--] [FAIL] Test command err: library/recipes/common/__init__.py:29: ResourceWarning: unclosed file <_io.TextIOWrapper name='/home/runner/.ya/build/build_root/ciyv/00223b/ydb/tests/datashard/s3/test-results/py3test/testing_out_stuff/chunk11/testing_out_stuff/moto_server.out.log' mode='w' encoding='utf-8'> process = subprocess.Popen( ResourceWarning: Enable tracemalloc to get the object allocation traceback library/recipes/common/__init__.py:29: ResourceWarning: unclosed file <_io.TextIOWrapper name='/home/runner/.ya/build/build_root/ciyv/00223b/ydb/tests/datashard/s3/test-results/py3test/testing_out_stuff/chunk11/testing_out_stuff/moto_server.err.log' mode='w' encoding='utf-8'> process = subprocess.Popen( ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/subprocess.py:1129: ResourceWarning: subprocess 519978 is still running ResourceWarning: Enable tracemalloc to get the object allocation traceback |79.0%| [TA] $(B)/ydb/tests/datashard/secondary_index/test-results/py3test/{meta.json ... results_accumulator.log} >> test_public_api.py::TestSessionNotFound::test_session_not_found |79.0%| [TA] {RESULT} $(B)/ydb/tests/datashard/secondary_index/test-results/py3test/{meta.json ... results_accumulator.log} ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/datashard/s3/py3test >> test_s3.py::TestYdbS3TTL::test_s3[table_ttl_Uint64-pk_types10-all_types10-index10-Uint64--] [FAIL] Test command err: library/recipes/common/__init__.py:29: ResourceWarning: unclosed file <_io.TextIOWrapper name='/home/runner/.ya/build/build_root/ciyv/002244/ydb/tests/datashard/s3/test-results/py3test/testing_out_stuff/chunk13/testing_out_stuff/moto_server.out.log' mode='w' encoding='utf-8'> process = subprocess.Popen( ResourceWarning: Enable tracemalloc to get the object allocation traceback library/recipes/common/__init__.py:29: ResourceWarning: unclosed file <_io.TextIOWrapper name='/home/runner/.ya/build/build_root/ciyv/002244/ydb/tests/datashard/s3/test-results/py3test/testing_out_stuff/chunk13/testing_out_stuff/moto_server.err.log' mode='w' encoding='utf-8'> process = subprocess.Popen( ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/subprocess.py:1129: ResourceWarning: subprocess 519903 is still running ResourceWarning: Enable tracemalloc to get the object allocation traceback >> test_s3.py::TestYdbS3TTL::test_s3[table_index_1__SYNC-pk_types3-all_types3-index3---SYNC] [FAIL] |79.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/datashard/s3/py3test >> test_s3.py::TestYdbS3TTL::test_s3[table_ttl_Date-pk_types13-all_types13-index13-Date--] [FAIL] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/datashard/s3/py3test >> test_s3.py::TestYdbS3TTL::test_s3[table_index_2__SYNC-pk_types2-all_types2-index2---SYNC] [FAIL] Test command err: library/recipes/common/__init__.py:29: ResourceWarning: unclosed file <_io.TextIOWrapper name='/home/runner/.ya/build/build_root/ciyv/002238/ydb/tests/datashard/s3/test-results/py3test/testing_out_stuff/chunk5/testing_out_stuff/moto_server.out.log' mode='w' encoding='utf-8'> process = subprocess.Popen( ResourceWarning: Enable tracemalloc to get the object allocation traceback library/recipes/common/__init__.py:29: ResourceWarning: unclosed file <_io.TextIOWrapper name='/home/runner/.ya/build/build_root/ciyv/002238/ydb/tests/datashard/s3/test-results/py3test/testing_out_stuff/chunk5/testing_out_stuff/moto_server.err.log' mode='w' encoding='utf-8'> process = subprocess.Popen( ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/subprocess.py:1129: ResourceWarning: subprocess 520048 is still running ResourceWarning: Enable tracemalloc to get the object allocation traceback >> test_discovery.py::TestDiscoveryFaultInjectionSlotStop::test_scenario ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/datashard/s3/py3test >> test_s3.py::TestYdbS3TTL::test_s3[table_index_1__ASYNC-pk_types5-all_types5-index5---ASYNC] [FAIL] Test command err: library/recipes/common/__init__.py:29: ResourceWarning: unclosed file <_io.TextIOWrapper name='/home/runner/.ya/build/build_root/ciyv/00223d/ydb/tests/datashard/s3/test-results/py3test/testing_out_stuff/chunk3/testing_out_stuff/moto_server.out.log' mode='w' encoding='utf-8'> process = subprocess.Popen( ResourceWarning: Enable tracemalloc to get the object allocation traceback library/recipes/common/__init__.py:29: ResourceWarning: unclosed file <_io.TextIOWrapper name='/home/runner/.ya/build/build_root/ciyv/00223d/ydb/tests/datashard/s3/test-results/py3test/testing_out_stuff/chunk3/testing_out_stuff/moto_server.err.log' mode='w' encoding='utf-8'> process = subprocess.Popen( ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/subprocess.py:1129: ResourceWarning: subprocess 519923 is still running ResourceWarning: Enable tracemalloc to get the object allocation traceback >> test_disk.py::TestSafeDiskBreak::test_erase_method ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/tools/nemesis/ut/py3test Test command err: contrib/python/Flask/py3/flask/scaffold.py:109: DeprecationWarning: 'pkgutil.find_loader' is deprecated and slated for removal in Python 3.14; use importlib.util.find_spec() instead contrib/python/Werkzeug/py3/werkzeug/routing/rules.py:751: DeprecationWarning: ast.Str is deprecated and will be removed in Python 3.14; use ast.Constant instead contrib/python/Werkzeug/py3/werkzeug/routing/rules.py:748: DeprecationWarning: ast.Str is deprecated and will be removed in Python 3.14; use ast.Constant instead contrib/tools/python3/Lib/ast.py:587: DeprecationWarning: Attribute s is deprecated and will be removed in Python 3.14; use value instead contrib/python/Werkzeug/py3/werkzeug/routing/rules.py:755: DeprecationWarning: ast.Str is deprecated and will be removed in Python 3.14; use ast.Constant instead contrib/python/Werkzeug/py3/werkzeug/routing/rules.py:756: DeprecationWarning: Attribute s is deprecated and will be removed in Python 3.14; use value instead contrib/python/Werkzeug/py3/werkzeug/routing/rules.py:756: DeprecationWarning: ast.Str is deprecated and will be removed in Python 3.14; use ast.Constant instead >> zip_bomb.py::TestZipBomb::test ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/tools/nemesis/ut/py3test Test command err: contrib/python/Flask/py3/flask/scaffold.py:109: DeprecationWarning: 'pkgutil.find_loader' is deprecated and slated for removal in Python 3.14; use importlib.util.find_spec() instead contrib/python/Werkzeug/py3/werkzeug/routing/rules.py:751: DeprecationWarning: ast.Str is deprecated and will be removed in Python 3.14; use ast.Constant instead contrib/python/Werkzeug/py3/werkzeug/routing/rules.py:748: DeprecationWarning: ast.Str is deprecated and will be removed in Python 3.14; use ast.Constant instead contrib/tools/python3/Lib/ast.py:587: DeprecationWarning: Attribute s is deprecated and will be removed in Python 3.14; use value instead contrib/python/Werkzeug/py3/werkzeug/routing/rules.py:755: DeprecationWarning: ast.Str is deprecated and will be removed in Python 3.14; use ast.Constant instead contrib/python/Werkzeug/py3/werkzeug/routing/rules.py:756: DeprecationWarning: Attribute s is deprecated and will be removed in Python 3.14; use value instead contrib/python/Werkzeug/py3/werkzeug/routing/rules.py:756: DeprecationWarning: ast.Str is deprecated and will be removed in Python 3.14; use ast.Constant instead |79.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/olap/py3test |79.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/olap/py3test >> test_insert.py::TestInsert::test_multi[read_data_during_bulk_upsert] [FAIL] |79.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/olap/py3test ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/datashard/s3/py3test >> test_s3.py::TestYdbS3TTL::test_s3[table_index_0__SYNC-pk_types4-all_types4-index4---SYNC] [FAIL] Test command err: library/recipes/common/__init__.py:29: ResourceWarning: unclosed file <_io.TextIOWrapper name='/home/runner/.ya/build/build_root/ciyv/002231/ydb/tests/datashard/s3/test-results/py3test/testing_out_stuff/chunk2/testing_out_stuff/moto_server.out.log' mode='w' encoding='utf-8'> process = subprocess.Popen( ResourceWarning: Enable tracemalloc to get the object allocation traceback library/recipes/common/__init__.py:29: ResourceWarning: unclosed file <_io.TextIOWrapper name='/home/runner/.ya/build/build_root/ciyv/002231/ydb/tests/datashard/s3/test-results/py3test/testing_out_stuff/chunk2/testing_out_stuff/moto_server.err.log' mode='w' encoding='utf-8'> process = subprocess.Popen( ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/subprocess.py:1129: ResourceWarning: subprocess 521158 is still running ResourceWarning: Enable tracemalloc to get the object allocation traceback >> TCdcStreamWithRebootsTests::MergeTableResolvedTimestamps[TabletReboots] [GOOD] >> test_log_scenario.py::TestLogScenario::test_log_uniform ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/datashard/s3/py3test >> test_s3.py::TestYdbS3TTL::test_s3[table_index_0__ASYNC-pk_types6-all_types6-index6---ASYNC] [FAIL] Test command err: library/recipes/common/__init__.py:29: ResourceWarning: unclosed file <_io.TextIOWrapper name='/home/runner/.ya/build/build_root/ciyv/002236/ydb/tests/datashard/s3/test-results/py3test/testing_out_stuff/chunk1/testing_out_stuff/moto_server.out.log' mode='w' encoding='utf-8'> process = subprocess.Popen( ResourceWarning: Enable tracemalloc to get the object allocation traceback library/recipes/common/__init__.py:29: ResourceWarning: unclosed file <_io.TextIOWrapper name='/home/runner/.ya/build/build_root/ciyv/002236/ydb/tests/datashard/s3/test-results/py3test/testing_out_stuff/chunk1/testing_out_stuff/moto_server.err.log' mode='w' encoding='utf-8'> process = subprocess.Popen( ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/subprocess.py:1129: ResourceWarning: subprocess 520484 is still running ResourceWarning: Enable tracemalloc to get the object allocation traceback |79.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/olap/py3test >> test_crud.py::TestClientTimeouts::test_can_set_timeouts_on_query |79.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/olap/py3test >> test_log_scenario.py::TestLogScenario::test_log_deviation[1051200] >> zip_bomb.py::TestZipBomb::test [FAIL] >> test_postgres.py::TestPGSQL::test_sql_suite[plan-create_table.test] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_cdc_stream_reboots/unittest >> TCdcStreamWithRebootsTests::MergeTableResolvedTimestamps[TabletReboots] [GOOD] Test command err: =========== RUN: Trace =========== Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2140] Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:116:2058] recipient: [1:110:2141] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:116:2058] recipient: [1:110:2141] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:117:2058] recipient: [1:111:2142] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:117:2058] recipient: [1:111:2142] Leader for TabletID 72057594046678944 is [1:126:2151] sender: [1:127:2058] recipient: [1:109:2140] Leader for TabletID 72057594046447617 is [1:130:2154] sender: [1:132:2058] recipient: [1:110:2141] Leader for TabletID 72057594046316545 is [1:133:2155] sender: [1:135:2058] recipient: [1:111:2142] 2025-05-29T15:32:15.448662Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7488: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-05-29T15:32:15.448679Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7516: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:32:15.448683Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7402: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2025-05-29T15:32:15.448686Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7418: OperationsProcessing config: using default configuration 2025-05-29T15:32:15.448695Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-05-29T15:32:15.448698Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-05-29T15:32:15.448704Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7548: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:32:15.448713Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:39: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-05-29T15:32:15.448805Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7619: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-05-29T15:32:15.448861Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-05-29T15:32:15.458331Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7651: Got new config: QueryServiceConfig { AllExternalDataSourcesAreAvailable: true } 2025-05-29T15:32:15.458346Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:32:15.458421Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7619: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# Leader for TabletID 72057594046447617 is [1:130:2154] sender: [1:175:2058] recipient: [1:15:2062] 2025-05-29T15:32:15.460282Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-05-29T15:32:15.460307Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-05-29T15:32:15.460323Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-05-29T15:32:15.462274Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-05-29T15:32:15.462326Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:445: Clear TempDirsState with owners number: 0 2025-05-29T15:32:15.462404Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1348: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-05-29T15:32:15.462547Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:32: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-05-29T15:32:15.463150Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:157: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:32:15.463181Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:84: [RootDataErasureManager] Stop 2025-05-29T15:32:15.463342Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:32:15.463351Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:32:15.463371Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-05-29T15:32:15.463376Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-29T15:32:15.463380Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:202: TTxServerlessStorageBilling.Complete 2025-05-29T15:32:15.463391Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6671: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:214:2058] recipient: [1:212:2212] IGNORE Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:214:2058] recipient: [1:212:2212] Leader for TabletID 72057594037968897 is [1:218:2216] sender: [1:219:2058] recipient: [1:212:2212] 2025-05-29T15:32:15.464341Z node 1 :HIVE INFO: tablet_helpers.cpp:1130: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:126:2151] sender: [1:239:2058] recipient: [1:15:2062] 2025-05-29T15:32:15.476998Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:372: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-05-29T15:32:15.477051Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:32:15.477093Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-05-29T15:32:15.477124Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:130: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-05-29T15:32:15.477131Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:32:15.477660Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:451: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-05-29T15:32:15.477681Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-05-29T15:32:15.477722Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:32:15.477731Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:313: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-05-29T15:32:15.477737Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:367: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-05-29T15:32:15.477742Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 2 -> 3 2025-05-29T15:32:15.478027Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:32:15.478034Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-05-29T15:32:15.478037Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 3 -> 128 2025-05-29T15:32:15.478269Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:32:15.478276Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:32:15.478279Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:32:15.478284Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1650: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-05-29T15:32:15.478824Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1719: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-05-29T15:32:15.479131Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:652: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-05-29T15:32:15.479156Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1751: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 Leader for TabletID 72057594046316545 is [1:133:2155] sender: [1:254:2058] recipient: [1:15:2062] FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-05-29T15:32:15.479281Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:676: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-05-29T15:32:15.479296Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:680: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 133 RawX2: 4294969451 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-05-29T15:32:15.479301Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:32:15.479339Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Ch ... 2" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Uint32" TypeId: 2 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { MinPartitionsCount: 1 } } TableSchemaVersion: 2 IsBackup: false CdcStreams { Name: "Stream" Mode: ECdcStreamModeKeysOnly PathId { OwnerId: 72057594046678944 LocalId: 4 } State: ECdcStreamStateReady SchemaVersion: 1 Format: ECdcStreamFormatProto VirtualTimestamps: false AwsRegion: "" ResolvedTimestampsIntervalMs: 1000 } IsRestore: false } TablePartitions { EndOfRangeKeyPrefix: "" IsPoint: false IsInclusive: false DatashardId: 72075186233409550 } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 4 PathsLimit: 10000 ShardsInside: 3 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 2 PQPartitionsLimit: 1000000 } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-05-29T15:36:16.388903Z node 219 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Table" Options { ReturnPartitioningInfo: true ReturnPartitionConfig: true BackupInfo: false ReturnBoundaries: true ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-05-29T15:36:16.389018Z node 219 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/Table" took 154us result status StatusSuccess 2025-05-29T15:36:16.389240Z node 219 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Table" PathDescription { Self { Name: "Table" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 1002 CreateStep: 5000003 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 2 TablePartitionVersion: 2 } ChildrenExist: true } Table { Name: "Table" Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Uint32" TypeId: 2 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { MinPartitionsCount: 1 } } TableSchemaVersion: 2 IsBackup: false CdcStreams { Name: "Stream" Mode: ECdcStreamModeKeysOnly PathId { OwnerId: 72057594046678944 LocalId: 4 } State: ECdcStreamStateReady SchemaVersion: 1 Format: ECdcStreamFormatProto VirtualTimestamps: false AwsRegion: "" ResolvedTimestampsIntervalMs: 1000 } IsRestore: false } TablePartitions { EndOfRangeKeyPrefix: "" IsPoint: false IsInclusive: false DatashardId: 72075186233409550 } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 4 PathsLimit: 10000 ShardsInside: 3 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 2 PQPartitionsLimit: 1000000 } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-05-29T15:36:16.389345Z node 219 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Table/Stream/streamImpl" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: true }, at schemeshard: 72057594046678944 2025-05-29T15:36:16.389377Z node 219 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/Table/Stream/streamImpl" took 34us result status StatusSuccess 2025-05-29T15:36:16.389473Z node 219 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Table/Stream/streamImpl" PathDescription { Self { Name: "streamImpl" PathId: 5 SchemeshardId: 72057594046678944 PathType: EPathTypePersQueueGroup CreateFinished: true CreateTxId: 1003 CreateStep: 5000004 ParentPathId: 4 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeStreamImpl Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 PQVersion: 1 } ChildrenExist: false BalancerTabletID: 72075186233409549 } PersQueueGroup { Name: "streamImpl" PathId: 5 TotalGroupCount: 2 PartitionPerTablet: 2 PQTabletConfig { PartitionConfig { MaxCountInPartition: 2147483647 LifetimeSeconds: 86400 WriteSpeedInBytesPerSecond: 1048576 BurstSize: 1048576 } TopicName: "Stream" TopicPath: "/MyRoot/Table/Stream/streamImpl" YdbDatabasePath: "/MyRoot" PartitionKeySchema { Name: "key" TypeId: 2 } MeteringMode: METERING_MODE_REQUEST_UNITS } Partitions { PartitionId: 0 TabletId: 72075186233409548 KeyRange { ToBound: "\001\000\004\000\000\000\377\377\377\177" } Status: Active } Partitions { PartitionId: 1 TabletId: 72075186233409548 KeyRange { FromBound: "\001\000\004\000\000\000\377\377\377\177" } Status: Active } AlterVersion: 1 BalancerTabletID: 72075186233409549 NextPartitionId: 2 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 4 PathsLimit: 10000 ShardsInside: 3 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 2 PQPartitionsLimit: 1000000 } } PathId: 5 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> test_log_scenario.py::TestLogScenario::test_log_deviation[180] >> test_sql_logic.py::TestSQLLogic::test_sql_suite[results-select3-5.test] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/datashard/s3/py3test >> test_s3.py::TestYdbS3TTL::test_s3[table_all_types-pk_types7-all_types7-index7---] [FAIL] Test command err: library/recipes/common/__init__.py:29: ResourceWarning: unclosed file <_io.TextIOWrapper name='/home/runner/.ya/build/build_root/ciyv/00222f/ydb/tests/datashard/s3/test-results/py3test/testing_out_stuff/chunk0/testing_out_stuff/moto_server.out.log' mode='w' encoding='utf-8'> process = subprocess.Popen( ResourceWarning: Enable tracemalloc to get the object allocation traceback library/recipes/common/__init__.py:29: ResourceWarning: unclosed file <_io.TextIOWrapper name='/home/runner/.ya/build/build_root/ciyv/00222f/ydb/tests/datashard/s3/test-results/py3test/testing_out_stuff/chunk0/testing_out_stuff/moto_server.err.log' mode='w' encoding='utf-8'> process = subprocess.Popen( ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/subprocess.py:1129: ResourceWarning: subprocess 521867 is still running ResourceWarning: Enable tracemalloc to get the object allocation traceback >> test_s3.py::TestYdbS3TTL::test_s3[table_ttl_Datetime-pk_types11-all_types11-index11-Datetime--] [FAIL] |79.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/suite_tests/py3test ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/datashard/s3/py3test >> test_s3.py::TestYdbS3TTL::test_s3[table_ttl_DyNumber-pk_types8-all_types8-index8-DyNumber--] [FAIL] Test command err: library/recipes/common/__init__.py:29: ResourceWarning: unclosed file <_io.TextIOWrapper name='/home/runner/.ya/build/build_root/ciyv/002234/ydb/tests/datashard/s3/test-results/py3test/testing_out_stuff/chunk10/testing_out_stuff/moto_server.out.log' mode='w' encoding='utf-8'> process = subprocess.Popen( ResourceWarning: Enable tracemalloc to get the object allocation traceback library/recipes/common/__init__.py:29: ResourceWarning: unclosed file <_io.TextIOWrapper name='/home/runner/.ya/build/build_root/ciyv/002234/ydb/tests/datashard/s3/test-results/py3test/testing_out_stuff/chunk10/testing_out_stuff/moto_server.err.log' mode='w' encoding='utf-8'> process = subprocess.Popen( ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/subprocess.py:1129: ResourceWarning: subprocess 521035 is still running ResourceWarning: Enable tracemalloc to get the object allocation traceback |79.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/suite_tests/py3test ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/tools/nemesis/ut/py3test Test command err: contrib/python/Flask/py3/flask/scaffold.py:109: DeprecationWarning: 'pkgutil.find_loader' is deprecated and slated for removal in Python 3.14; use importlib.util.find_spec() instead contrib/python/Werkzeug/py3/werkzeug/routing/rules.py:751: DeprecationWarning: ast.Str is deprecated and will be removed in Python 3.14; use ast.Constant instead contrib/python/Werkzeug/py3/werkzeug/routing/rules.py:748: DeprecationWarning: ast.Str is deprecated and will be removed in Python 3.14; use ast.Constant instead contrib/tools/python3/Lib/ast.py:587: DeprecationWarning: Attribute s is deprecated and will be removed in Python 3.14; use value instead contrib/python/Werkzeug/py3/werkzeug/routing/rules.py:755: DeprecationWarning: ast.Str is deprecated and will be removed in Python 3.14; use ast.Constant instead contrib/python/Werkzeug/py3/werkzeug/routing/rules.py:756: DeprecationWarning: Attribute s is deprecated and will be removed in Python 3.14; use value instead contrib/python/Werkzeug/py3/werkzeug/routing/rules.py:756: DeprecationWarning: ast.Str is deprecated and will be removed in Python 3.14; use ast.Constant instead >> test_postgres.py::TestPGSQL::test_sql_suite[results-create_table.test] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/datashard/s3/py3test >> test_s3.py::TestYdbS3TTL::test_s3[table_index_1__SYNC-pk_types3-all_types3-index3---SYNC] [FAIL] Test command err: library/recipes/common/__init__.py:29: ResourceWarning: unclosed file <_io.TextIOWrapper name='/home/runner/.ya/build/build_root/ciyv/00222b/ydb/tests/datashard/s3/test-results/py3test/testing_out_stuff/chunk4/testing_out_stuff/moto_server.out.log' mode='w' encoding='utf-8'> process = subprocess.Popen( ResourceWarning: Enable tracemalloc to get the object allocation traceback library/recipes/common/__init__.py:29: ResourceWarning: unclosed file <_io.TextIOWrapper name='/home/runner/.ya/build/build_root/ciyv/00222b/ydb/tests/datashard/s3/test-results/py3test/testing_out_stuff/chunk4/testing_out_stuff/moto_server.err.log' mode='w' encoding='utf-8'> process = subprocess.Popen( ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/subprocess.py:1129: ResourceWarning: subprocess 522319 is still running ResourceWarning: Enable tracemalloc to get the object allocation traceback >> test_serverless.py::test_database_with_column_disk_quotas[enable_alter_database_create_hive_first--true] [FAIL] |79.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/suite_tests/py3test ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/datashard/s3/py3test >> test_s3.py::TestYdbS3TTL::test_s3[table_ttl_Date-pk_types13-all_types13-index13-Date--] [FAIL] Test command err: library/recipes/common/__init__.py:29: ResourceWarning: unclosed file <_io.TextIOWrapper name='/home/runner/.ya/build/build_root/ciyv/002229/ydb/tests/datashard/s3/test-results/py3test/testing_out_stuff/chunk8/testing_out_stuff/moto_server.out.log' mode='w' encoding='utf-8'> process = subprocess.Popen( ResourceWarning: Enable tracemalloc to get the object allocation traceback library/recipes/common/__init__.py:29: ResourceWarning: unclosed file <_io.TextIOWrapper name='/home/runner/.ya/build/build_root/ciyv/002229/ydb/tests/datashard/s3/test-results/py3test/testing_out_stuff/chunk8/testing_out_stuff/moto_server.err.log' mode='w' encoding='utf-8'> process = subprocess.Popen( ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/subprocess.py:1129: ResourceWarning: subprocess 522831 is still running ResourceWarning: Enable tracemalloc to get the object allocation traceback ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/olap/scenario/py3test >> test_read_update_write_load.py::TestReadUpdateWriteLoad::test_multi[read_update_write_load] [GOOD] Test command err: Was written: 0.0 MiB, Speed: 0.0 MiB/s Step 1. only write Write: 10% 2466 30% 2466 50% 2466 90% 2466 99% 2466 ms Write: 10% 1908 30% 1908 50% 1908 90% 1908 99% 1908 ms Write: 10% 1995 30% 1995 50% 1995 90% 1995 99% 1995 ms Write: 10% 4439 30% 4439 50% 4439 90% 4439 99% 4439 ms Write: 10% 6643 30% 6643 50% 6643 90% 6643 99% 6643 ms Write: 10% 9060 30% 9060 50% 9060 90% 9060 99% 9060 ms Write: 10% 9350 30% 9350 50% 9350 90% 9350 99% 9350 ms Write: 10% 11209 30% 11209 50% 11209 90% 11209 99% 11209 ms Write: 10% 12901 30% 12901 50% 12901 90% 12901 99% 12901 ms Write: 10% 11297 30% 11297 50% 11297 90% 11297 99% 11297 ms Write: 10% 12620 30% 12620 50% 12620 90% 12620 99% 12620 ms Write: 10% 11325 30% 11325 50% 11325 90% 11325 99% 11325 ms Write: 10% 9736 30% 9736 50% 9736 90% 9736 99% 9736 ms Write: 10% 9625 30% 9625 50% 9625 90% 9625 99% 9625 ms Write: 10% 10624 30% 10624 50% 10624 90% 10624 99% 10624 ms Write: 10% 11210 30% 11210 50% 11210 90% 11210 99% 11210 ms Write: 10% 8116 30% 8116 50% 8116 90% 8116 99% 8116 ms Write: 10% 11293 30% 11293 50% 11293 90% 11293 99% 11293 ms Write: 10% 10788 30% 10788 50% 10788 90% 10788 99% 10788 ms Write: 10% 10734 30% 10734 50% 10734 90% 10734 99% 10734 ms Write: 10% 9858 30% 9858 50% 9858 90% 9858 99% 9858 ms Write: 10% 11228 30% 11228 50% 11228 90% 11228 99% 11228 ms Write: 10% 10502 30% 10502 50% 10502 90% 10502 99% 10502 ms Write: 10% 5769 30% 5769 50% 5769 90% 5769 99% 5769 ms Write: 10% 9028 30% 9028 50% 9028 90% 9028 99% 9028 ms Write: 10% 8831 30% 8831 50% 8831 90% 8831 99% 8831 ms Write: 10% 8924 30% 8924 50% 8924 90% 8924 99% 8924 ms Write: 10% 8671 30% 8671 50% 8671 90% 8671 99% 8671 ms Write: 10% 7717 30% 7717 50% 7717 90% 7717 99% 7717 ms Write: 10% 7684 30% 7684 50% 7684 90% 7684 99% 7684 ms Write: 10% 8103 30% 8103 50% 8103 90% 8103 99% 8103 ms Write: 10% 8471 30% 8471 50% 8471 90% 8471 99% 8471 ms Write: 10% 8217 30% 8217 50% 8217 90% 8217 99% 8217 ms Write: 10% 8450 30% 8450 50% 8450 90% 8450 99% 8450 ms Write: 10% 2987 30% 2987 50% 2987 90% 2987 99% 2987 ms Write: 10% 7797 30% 7797 50% 7797 90% 7797 99% 7797 ms Write: 10% 7270 30% 7270 50% 7270 90% 7270 99% 7270 ms Write: 10% 7177 30% 7177 50% 7177 90% 7177 99% 7177 ms Write: 10% 6729 30% 6729 50% 6729 90% 6729 99% 6729 ms Write: 10% 7294 30% 7294 50% 7294 90% 7294 99% 7294 ms Write: 10% 6154 30% 6154 50% 6154 90% 6154 99% 6154 ms Write: 10% 4756 30% 4756 50% 4756 90% 4756 99% 4756 ms Write: 10% 9163 30% 9163 50% 9163 90% 9163 99% 9163 ms Write: 10% 6071 30% 6071 50% 6071 90% 6071 99% 6071 ms Write: 10% 5726 30% 5726 50% 5726 90% 5726 99% 5726 ms Write: 10% 3004 30% 3004 50% 3004 90% 3004 99% 3004 ms Write: 10% 4614 30% 4614 50% 4614 90% 4614 99% 4614 ms Write: 10% 4376 30% 4376 50% 4376 90% 4376 99% 4376 ms Write: 10% 7005 30% 7005 50% 7005 90% 7005 99% 7005 ms Write: 10% 3101 30% 3101 50% 3101 90% 3101 99% 3101 ms Write: 10% 4770 30% 4770 50% 4770 90% 4770 99% 4770 ms Write: 10% 2579 30% 2579 50% 2579 90% 2579 99% 2579 ms Write: 10% 2514 30% 2514 50% 2514 90% 2514 99% 2514 ms Write: 10% 6326 30% 6326 50% 6326 90% 6326 99% 6326 ms Write: 10% 8218 30% 8218 50% 8218 90% 8218 99% 8218 ms Write: 10% 2505 30% 2505 50% 2505 90% 2505 99% 2505 ms Write: 10% 7931 30% 7931 50% 7931 90% 7931 99% 7931 ms Write: 10% 1951 30% 1951 50% 1951 90% 1951 99% 1951 ms Write: 10% 4208 30% 4208 50% 4208 90% 4208 99% 4208 ms Write: 10% 4632 30% 4632 50% 4632 90% 4632 99% 4632 ms Write: 10% 1660 30% 1660 50% 1660 90% 1660 99% 1660 ms Write: 10% 1818 30% 1818 50% 1818 90% 1818 99% 1818 ms Write: 10% 1655 30% 1655 50% 1655 90% 1655 99% 1655 ms Write: 10% 4855 30% 4855 50% 4855 90% 4855 99% 4855 ms Step 2. read write Write: 10% 3716 30% 3716 50% 3716 90% 3716 99% 3716 ms Write: 10% 3845 30% 3845 50% 3845 90% 3845 99% 3845 ms Write: 10% 9298 30% 9298 50% 9298 90% 9298 99% 9298 ms Write: 10% 10278 30% 10278 50% 10278 90% 10278 99% 10278 ms Write: 10% 12852 30% 12852 50% 12852 90% 12852 99% 12852 ms Write: 10% 11851 30% 11851 50% 11851 90% 11851 99% 11851 ms Write: 10% 12866 30% 12866 50% 12866 90% 12866 99% 12866 ms Write: 10% 12968 30% 12968 50% 12968 90% 12968 99% 12968 ms Write: 10% 11958 30% 11958 50% 11958 90% 11958 99% 11958 ms Write: 10% 11840 30% 11840 50% 11840 90% 11840 99% 11840 ms Write: 10% 11923 30% 11923 50% 11923 90% 11923 99% 11923 ms Write: 10% 12019 30% 12019 50% 12019 90% 12019 99% 12019 ms Write: 10% 11921 30% 11921 50% 11921 90% 11921 99% 11921 ms Write: 10% 11725 30% 11725 50% 11725 90% 11725 99% 11725 ms Write: 10% 12033 30% 12033 50% 12033 90% 12033 99% 12033 ms Write: 10% 11732 30% 11732 50% 11732 90% 11732 99% 11732 ms Write: 10% 12430 30% 12430 50% 12430 90% 12430 99% 12430 ms Write: 10% 12348 30% 12348 50% 12348 90% 12348 99% 12348 ms Write: 10% 11861 30% 11861 50% 11861 90% 11861 99% 11861 ms Write: 10% 11893 30% 11893 50% 11893 90% 11893 99% 11893 ms Write: 10% 11570 30% 11570 50% 11570 90% 11570 99% 11570 ms Write: 10% 11545 30% 11545 50% 11545 90% 11545 99% 11545 ms Write: 10% 10811 30% 10811 50% 10811 90% 10811 99% 10811 ms Write: 10% 10504 30% 10504 50% 10504 90% 10504 99% 10504 ms Write: 10% 4869 30% 4869 50% 4869 90% 4869 99% 4869 ms Write: 10% 8957 30% 8957 50% 8957 90% 8957 99% 8957 ms Write: 10% 9429 30% 9429 50% 9429 90% 9429 99% 9429 ms Write: 10% 11560 30% 11560 50% 11560 90% 11560 99% 11560 ms Write: 10% 5428 30% 5428 50% 5428 90% 5428 99% 5428 ms Write: 10% 8835 30% 8835 50% 8835 90% 8835 99% 8835 ms Write: 10% 8935 30% 8935 50% 8935 90% 8935 99% 8935 ms Write: 10% 2399 30% 2399 50% 2399 90% 2399 99% 2399 ms Write: 10% 7304 30% 7304 50% 7304 90% 7304 99% 7304 ms Write: 10% 8467 30% 8467 50% 8467 90% 8467 99% 8467 ms Write: 10% 8377 30% 8377 50% 8377 90% 8377 99% 8377 ms Write: 10% 8137 30% 8137 50% 8137 90% 8137 99% 8137 ms Write: 10% 2529 30% 2529 50% 2529 90% 2529 99% 2529 ms Write: 10% 6822 30% 6822 50% 6822 90% 6822 99% 6822 ms Write: 10% 5918 30% 5918 50% 5918 90% 5918 99% 5918 ms Write: 10% 6029 30% 6029 50% 6029 90% 6029 99% 6029 ms Write: 10% 5478 30% 5478 50% 5478 90% 5478 99% 5478 ms Write: 10% 4911 30% 4911 50% 4911 90% 4911 99% 4911 ms Write: 10% 6073 30% 6073 50% 6073 90% 6073 99% 6073 ms Write: 10% 4724 30% 4724 50% 4724 90% 4724 99% 4724 ms Write: 10% 3604 30% 3604 50% 3604 90% 3604 99% 3604 ms Write: 10% 3834 30% 3834 50% 3834 90% 3834 99% 3834 ms Write: 10% 3650 30% 3650 50% 3650 90% 3650 99% 3650 ms Write: 10% 2802 30% 2802 50% 2802 90% 2802 99% 2802 ms Write: 10% 3299 30% 3299 50% 3299 90% 3299 99% 3299 ms Write: 10% 3174 30% 3174 50% 3174 90% 3174 99% 3174 ms Write: 10% 2695 30% 2695 50% 2695 90% 2695 99% 2695 ms Write: 10% 3528 30% 3528 50% 3528 90% 3528 99% 3528 ms Write: 10% 4439 30% 4439 50% 4439 90% 4439 99% 4439 ms Write: 10% 2750 30% 2750 50% 2750 90% 2750 99% 2750 ms Write: 10% 2388 30% 2388 50% 2388 90% 2388 99% 2388 ms Write: 10% 4137 30% 4137 50% 4137 90% 4137 99% 4137 ms Write: 10% 3594 30% 3594 50% 3594 90% 3594 99% 3594 ms Write: 10% 2665 30% 2665 50% 2665 90% 2665 99% 2665 ms Write: 10% 3449 30% 3449 50% 3449 90% 3449 99% 3449 ms Write: 10% 2168 30% 2168 50% 2168 90% 2168 99% 2168 ms Write: 10% 2495 30% 2495 50% 2495 90% 2495 99% 2495 ms Write: 10% 2782 30% 2782 50% 2782 90% 2782 99% 2782 ms Write: 10% 2637 30% 2637 50% 2637 90% 2637 99% 2637 ms Write: 10% 4483 30% 4483 50% 4483 90% 4483 99% 4483 ms Read: 10% 5253 30% 8487 50% 11721 90% 18188 99% 19644 ms Step 3. write modify Write: 10% 5739 30% 5739 50% 5739 90% 5739 99% 5739 ms Write: 10% 5160 30% 5160 50% 5160 90% 5160 99% 5160 ms Write: 10% 10920 30% 10920 50% 10920 90% 10920 99% 10920 ms Write: 10% 11125 30% 11125 50% 11125 90% 11125 99% 11125 ms Write: 10% 10730 30% 10730 50% 10730 90% 10730 99% 10730 ms Write: 10% 11151 30% 11151 50% 11151 90% 11151 99% 11151 ms Write: 10% 11991 30% 11991 50% 11991 90% 11991 99% 11991 ms Write: 10% 13756 30% 13756 50% 13756 90% 13756 99% 13756 ms Write: 10% 13943 30% 13943 50% 13943 90% 13943 99% 13943 ms Write: 10% 12814 30% 12814 50% 12814 90% 12814 99% 12814 ms Write: 10% 13926 30% 13926 50% 13926 90% 13926 99% 13926 ms Write: 10% 14262 30% 14262 50% 14262 90% 14262 99% 14262 ms Write: 10% 14419 30% 14419 50% 14419 90% 14419 99% 14419 ms Write: 10% 11114 30% 11114 50% 11114 90% 11114 99% 11114 ms Write: 10% 12646 30% 12646 50% 12646 90% 12646 99% 12646 ms Write: 10% 12755 30% 12755 50% 12755 90% 12755 99% 12755 ms Write: 10% 14536 30% 14536 50% 14536 90% 14536 99% 14536 ms Write: 10% 12823 30% 12823 50% 12823 90% 12823 99% 12823 ms Write: 10% 12879 30% 12879 50% 12879 90% 12879 99% 12879 ms Write: 10% 13873 30% 13873 50% 13873 90% 13873 99% 13873 ms Write: 10% 14824 30% 14824 50% 14824 90% 14824 99% 14824 ms Write: 10% 13379 30% 13379 50% 13379 90% 13379 99% 13379 ms Write: 10% 7015 30% 7015 50% 7015 90% 7015 99% 7015 ms Write: 10% 11323 30% 11323 50% 11323 90% 11323 99% 11323 ms Write: 10% 8032 30% 8032 50% 8032 90% 8032 99% 8032 ms Write: 10% 8110 30% 8110 50% 8110 90% 8110 99% 8110 ms Write: 10% 11077 30% 11077 50% 11077 90% 11077 99% 11077 ms Write: 10% 9611 30% 9611 50% 9611 90% 9611 99% 9611 ms Write: 10% 9186 30% 9186 50% 9186 90% 9186 99% 9186 ms Write: 10% 9405 30% 9405 50% 9405 90% 9405 99% 9405 ms Write: 10% 8850 30% 8850 50% 8850 90% 8850 99% 8850 ms Write: 10% 8597 30% 8597 50% 8597 90% 8597 99% 8597 ms Write: 10% 8091 30% 8091 50% 8091 90% 8091 99% 8091 ms Write: 10% 8125 30% 8125 50% 8125 90% 8125 99% 8125 ms Write: 10% 10957 30% 10957 50% 10957 90% 10957 99% 10957 ms Write: 10% 7521 30% 7521 50% 7521 90% 7521 99% 7521 ms Write: 10% 6850 30% 6850 50% 6850 90% 6850 99% 6850 ms Write: 10% 6638 30% 6638 50% 6638 90% 6638 99% 6638 ms Write: 10% 5081 30% 5081 50% 5081 90% 5081 99% 5081 ms Write: 10% 6228 30% 6228 50% 6228 90% 6228 99% 6228 ms Write: 10% 5414 30% 5414 50% 5414 90% 5414 99% 5414 ms Write: 10% 5494 30% 5494 50% 5494 90% 5494 99% 5494 ms Write: 10% 6024 30% 6024 50% 6024 90% 6024 99% 6024 ms Write: 10% 5627 30% 5627 50% 5627 90% 5627 99% 5627 ms Write: 10% 6645 30% 6645 50% 6645 90% 6645 99% 6645 ms Write: 10% 4160 30% 4160 50% 4160 90% 4160 99% 4160 ms Write: 10% 5797 30% 5797 50% 5797 90% 5797 99% 5797 ms Write: 10% 5238 30% 5238 50% 5238 90% 5238 99% 5238 ms Write: 10% 7744 30% 7744 50% 7744 90% 7744 99% 7744 ms Write: 10% 4026 30% 4026 50% 4026 90% 4026 99% 4026 ms Write: 10% 4493 30% 4493 50% 4493 90% 4493 99% 4493 ms Write: 10% 3901 30% 3901 50% 3901 90% 3901 99% 3901 ms Write: 10% 3933 30% 3933 50% 3933 90% 3933 99% 3933 ms Write: 10% 4189 30% 4189 50% 4189 90% 4189 99% 4189 ms Write: 10% 3902 30% 3902 50% 3902 90% 3902 99% 3902 ms Write: 10% 4163 30% 4163 50% 4163 90% 4163 99% 4163 ms Write: 10% 3877 30% 3877 50% 3877 90% 3877 99% 3877 ms Write: 10% 4789 30% 4789 50% 4789 90% 4789 99% 4789 ms Write: 10% 4789 30% 4789 50% 4789 90% 4789 99% 4789 ms Write: 10% 3591 30% 3591 50% 3591 90% 3591 99% 3591 ms Write: 10% 3893 30% 3893 50% 3893 90% 3893 99% 3893 ms Write: 10% 4187 30% 4187 50% 4187 90% 4187 99% 4187 ms Write: 10% 5681 30% 5681 50% 5681 90% 5681 99% 5681 ms Write: 10% 4743 30% 4743 50% 4743 90% 4743 99% 4743 ms Was written: 18.75 MiB, Speed: 0.3125 MiB/s Update: 10% 669 30% 669 50% 669 90% 669 99% 669 ms Step 4. read modify write Write: 10% 10342 30% 10342 50% 10342 90% 10342 99% 10342 ms Write: 10% 12059 30% 12059 50% 12059 90% 12059 99% 12059 ms Write: 10% 16064 30% 16064 50% 16064 90% 16064 99% 16064 ms Write: 10% 16528 30% 16528 50% 16528 90% 16528 99% 16528 ms Write: 10% 16150 30% 16150 50% 16150 90% 16150 99% 16150 ms Write: 10% 17207 30% 17207 50% 17207 90% 17207 99% 17207 ms Write: 10% 15863 30% 15863 50% 15863 90% 15863 99% 15863 ms Write: 10% 16106 30% 16106 50% 16106 90% 16106 99% 16106 ms Write: 10% 16875 30% 16875 50% 16875 90% 16875 99% 16875 ms Write: 10% 16737 30% 16737 50% 16737 90% 16737 99% 16737 ms Write: 10% 15692 30% 15692 50% 15692 90% 15692 99% 15692 ms Write: 10% 16667 30% 16667 50% 16667 90% 16667 99% 16667 ms Write: 10% 17680 30% 17680 50% 17680 90% 17680 99% 17680 ms Write: 10% 13145 30% 13145 50% 13145 90% 13145 99% 13145 ms Write: 10% 16026 30% 16026 50% 16026 90% 16026 99% 16026 ms Write: 10% 15065 30% 15065 50% 15065 90% 15065 99% 15065 ms Write: 10% 14404 30% 14404 50% 14404 90% 14404 99% 14404 ms Write: 10% 15191 30% 15191 50% 15191 90% 15191 99% 15191 ms Write: 10% 9173 30% 9173 50% 9173 90% 9173 99% 9173 ms Write: 10% 14054 30% 14054 50% 14054 90% 14054 99% 14054 ms Write: 10% 13770 30% 13770 50% 13770 90% 13770 99% 13770 ms Write: 10% 13866 30% 13866 50% 13866 90% 13866 99% 13866 ms Write: 10% 12439 30% 12439 50% 12439 90% 12439 99% 12439 ms Write: 10% 11662 30% 11662 50% 11662 90% 11662 99% 11662 ms Write: 10% 10890 30% 10890 50% 10890 90% 10890 99% 10890 ms Write: 10% 10860 30% 10860 50% 10860 90% 10860 99% 10860 ms Write: 10% 10632 30% 10632 50% 10632 90% 10632 99% 10632 ms Write: 10% 10125 30% 10125 50% 10125 90% 10125 99% 10125 ms Write: 10% 9752 30% 9752 50% 9752 90% 9752 99% 9752 ms Write: 10% 14599 30% 14599 50% 14599 90% 14599 99% 14599 ms Write: 10% 9330 30% 9330 50% 9330 90% 9330 99% 9330 ms Write: 10% 8926 30% 8926 50% 8926 90% 8926 99% 8926 ms Write: 10% 9111 30% 9111 50% 9111 90% 9111 99% 9111 ms Write: 10% 9708 30% 9708 50% 9708 90% 9708 99% 9708 ms Write: 10% 14432 30% 14432 50% 14432 90% 14432 99% 14432 ms Write: 10% 8697 30% 8697 50% 8697 90% 8697 99% 8697 ms Write: 10% 9554 30% 9554 50% 9554 90% 9554 99% 9554 ms Write: 10% 7322 30% 7322 50% 7322 90% 7322 99% 7322 ms Write: 10% 8650 30% 8650 50% 8650 90% 8650 99% 8650 ms Write: 10% 7961 30% 7961 50% 7961 90% 7961 99% 7961 ms Write: 10% 7030 30% 7030 50% 7030 90% 7030 99% 7030 ms Write: 10% 8685 30% 8685 50% 8685 90% 8685 99% 8685 ms Write: 10% 6250 30% 6250 50% 6250 90% 6250 99% 6250 ms Write: 10% 6599 30% 6599 50% 6599 90% 6599 99% 6599 ms Write: 10% 3582 30% 3582 50% 3582 90% 3582 99% 3582 ms Write: 10% 6092 30% 6092 50% 6092 90% 6092 99% 6092 ms Write: 10% 3756 30% 3756 50% 3756 90% 3756 99% 3756 ms Write: 10% 4611 30% 4611 50% 4611 90% 4611 99% 4611 ms Write: 10% 7284 30% 7284 50% 7284 90% 7284 99% 7284 ms Write: 10% 5303 30% 5303 50% 5303 90% 5303 99% 5303 ms Write: 10% 4042 30% 4042 50% 4042 90% 4042 99% 4042 ms Write: 10% 6029 30% 6029 50% 6029 90% 6029 99% 6029 ms Write: 10% 5339 30% 5339 50% 5339 90% 5339 99% 5339 ms Write: 10% 4319 30% 4319 50% 4319 90% 4319 99% 4319 ms Write: 10% 4038 30% 4038 50% 4038 90% 4038 99% 4038 ms Write: 10% 3368 30% 3368 50% 3368 90% 3368 99% 3368 ms Write: 10% 3408 30% 3408 50% 3408 90% 3408 99% 3408 ms Write: 10% 4332 30% 4332 50% 4332 90% 4332 99% 4332 ms Write: 10% 4925 30% 4925 50% 4925 90% 4925 99% 4925 ms Write: 10% 3856 30% 3856 50% 3856 90% 3856 99% 3856 ms Write: 10% 4781 30% 4781 50% 4781 90% 4781 99% 4781 ms Write: 10% 4648 30% 4648 50% 4648 90% 4648 99% 4648 ms Write: 10% 3497 30% 3497 50% 3497 90% 3497 99% 3497 ms Write: 10% 3272 30% 3272 50% 3272 90% 3272 99% 3272 ms Read: 10% 7245 30% 10543 50% 13841 90% 20436 99% 21920 ms Update: 10% 1449 30% 1449 50% 1449 90% 1449 99% 1449 ms >> test_sql_logic.py::TestSQLLogic::test_sql_suite[plan-select3-15.test] >> test_sql_logic.py::TestSQLLogic::test_sql_suite[results-select2-1.test] |79.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/olap/py3test |79.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/suite_tests/py3test >> test_postgres.py::TestPGSQL::test_sql_suite[plan-jointest/join0.test] >> test_crud.py::TestClientTimeouts::test_can_set_timeouts_on_query [GOOD] >> test_postgres.py::TestPGSQL::test_sql_suite[plan-jointest/join3.test] |79.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/suite_tests/py3test ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/datashard/s3/py3test >> test_s3.py::TestYdbS3TTL::test_s3[table_ttl_Datetime-pk_types11-all_types11-index11-Datetime--] [FAIL] Test command err: library/recipes/common/__init__.py:29: ResourceWarning: unclosed file <_io.TextIOWrapper name='/home/runner/.ya/build/build_root/ciyv/00221d/ydb/tests/datashard/s3/test-results/py3test/testing_out_stuff/chunk9/testing_out_stuff/moto_server.out.log' mode='w' encoding='utf-8'> process = subprocess.Popen( ResourceWarning: Enable tracemalloc to get the object allocation traceback library/recipes/common/__init__.py:29: ResourceWarning: unclosed file <_io.TextIOWrapper name='/home/runner/.ya/build/build_root/ciyv/00221d/ydb/tests/datashard/s3/test-results/py3test/testing_out_stuff/chunk9/testing_out_stuff/moto_server.err.log' mode='w' encoding='utf-8'> process = subprocess.Popen( ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/subprocess.py:1129: ResourceWarning: subprocess 526117 is still running ResourceWarning: Enable tracemalloc to get the object allocation traceback |79.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/suite_tests/py3test >> test_sql_logic.py::TestSQLLogic::test_sql_suite[results-select3-5.test] [FAIL] >> test_sql_logic.py::TestSQLLogic::test_sql_suite[results-select3-6.test] |79.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/suite_tests/py3test |79.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test |79.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/suite_tests/py3test |79.2%| [TA] $(B)/ydb/tests/datashard/s3/test-results/py3test/{meta.json ... results_accumulator.log} |79.2%| [TA] {RESULT} $(B)/ydb/tests/datashard/s3/test-results/py3test/{meta.json ... results_accumulator.log} |79.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test >> test_cms_state_storage.py::TestCmsStateStorageSimpleKeep::test_check_shutdown_state_storage_nodes [GOOD] >> test_discovery.py::TestDiscoveryFaultInjectionSlotStop::test_scenario [GOOD] >> test_sql_logic.py::TestSQLLogic::test_sql_suite[results-select3-6.test] [FAIL] >> test_sql_logic.py::TestSQLLogic::test_sql_suite[results-select3-7.test] >> test_stream_query.py::TestStreamQuery::test_sql_suite[plan-window.test] >> test_postgres.py::TestPGSQL::test_sql_suite[results-abstime.test] >> test_sql_logic.py::TestSQLLogic::test_sql_suite[results-select2-1.test] [FAIL] >> test_sql_logic.py::TestSQLLogic::test_sql_suite[plan-select3-15.test] [FAIL] >> test_sql_logic.py::TestSQLLogic::test_sql_suite[results-select2-2.test] >> test_sql_logic.py::TestSQLLogic::test_sql_suite[plan-select3-2.test] >> test_postgres.py::TestPGSQL::test_sql_suite[plan-jointest/join3.test] [FAIL] >> test_postgres.py::TestPGSQL::test_sql_suite[plan-jointest/join4.test] >> test_sql_logic.py::TestSQLLogic::test_sql_suite[results-select3-7.test] [FAIL] >> test_sql_logic.py::TestSQLLogic::test_sql_suite[results-select3-8.test] >> test_select.py::TestDML::test_select[table_index_1__ASYNC-pk_types10-all_types10-index10---ASYNC] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/olap/py3test >> zip_bomb.py::TestZipBomb::test [FAIL] Test command err: Pid 527587 contrib/python/pytest/py3/_pytest/threadexception.py:77: PytestUnhandledThreadExceptionWarning: Exception in thread Thread-1 (watch_rss) Traceback (most recent call last): File "contrib/tools/python3/Lib/threading.py", line 1075, in _bootstrap_inner self.run() File "contrib/tools/python3/Lib/threading.py", line 1012, in run self._target(*self._args, **self._kwargs) File "ydb/tests/olap/zip_bomb.py", line 89, in watch_rss if rss_kb > maxrss: ^^^^^^^^^^^^^^^ TypeError: '>' not supported between instances of 'NoneType' and 'int' >> test_postgres.py::TestPGSQL::test_sql_suite[plan-jointest/join4.test] [FAIL] >> test_postgres.py::TestPGSQL::test_sql_suite[plan-select.test] |79.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/datashard/select/py3test |79.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/suite_tests/py3test >> test_sql_logic.py::TestSQLLogic::test_sql_suite[results-select2-2.test] [FAIL] >> test_sql_logic.py::TestSQLLogic::test_sql_suite[results-select2-3.test] >> test_sql_logic.py::TestSQLLogic::test_sql_suite[results-select3-8.test] [FAIL] >> test_sql_logic.py::TestSQLLogic::test_sql_suite[results-select3-9.test] >> test_sql_logic.py::TestSQLLogic::test_sql_suite[plan-select3-2.test] [FAIL] >> test_sql_logic.py::TestSQLLogic::test_sql_suite[plan-select3-3.test] |79.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/cms/py3test >> test_cms_state_storage.py::TestCmsStateStorageSimpleKeep::test_check_shutdown_state_storage_nodes [GOOD] |79.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/suite_tests/py3test >> test_postgres.py::TestPGSQL::test_sql_suite[plan-create_table.test] [GOOD] >> test_postgres.py::TestPGSQL::test_sql_suite[plan-jointest/coalesce-and-join.test] |79.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/suite_tests/py3test >> test_postgres.py::TestPGSQL::test_sql_suite[plan-select.test] [FAIL] >> test_sql_logic.py::TestSQLLogic::test_sql_suite[plan-select3-3.test] [FAIL] |79.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/datashard/select/py3test >> test_sql_logic.py::TestSQLLogic::test_sql_suite[plan-select3-4.test] >> test_sql_logic.py::TestSQLLogic::test_sql_suite[results-select3-9.test] [FAIL] >> test_sql_logic.py::TestSQLLogic::test_sql_suite[results-select2-3.test] [FAIL] >> test_sql_logic.py::TestSQLLogic::test_sql_suite[results-select2-4.test] |79.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/datashard/select/py3test >> test_postgres.py::TestPGSQL::test_sql_suite[results-abstime.test] [FAIL] >> test_postgres.py::TestPGSQL::test_sql_suite[results-boolean.test] >> test_cms_restart.py::TestCmsStateStorageRestartsBlockMax::test_restart_as_much_as_can [GOOD] >> test_select.py::TestDML::test_select[table_index_1__SYNC-pk_types8-all_types8-index8---SYNC] >> test_postgres.py::TestPGSQL::test_sql_suite[results-create_table.test] [GOOD] >> test_postgres.py::TestPGSQL::test_sql_suite[results-jointest/coalesce-and-join.test] >> test_postgres.py::TestPGSQL::test_sql_suite[plan-jointest/coalesce-and-join.test] [FAIL] >> test_postgres.py::TestPGSQL::test_sql_suite[plan-jointest/join-group-by-with-null.test] >> test_sql_logic.py::TestSQLLogic::test_sql_suite[results-select2-4.test] [FAIL] >> test_sql_logic.py::TestSQLLogic::test_sql_suite[results-select2-5.test] >> test_sql_logic.py::TestSQLLogic::test_sql_suite[plan-select3-4.test] [FAIL] >> test_sql_logic.py::TestSQLLogic::test_sql_suite[plan-select3-5.test] >> test_postgres.py::TestPGSQL::test_sql_suite[results-boolean.test] [GOOD] >> test_postgres.py::TestPGSQL::test_sql_suite[results-case.test] |79.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/suite_tests/py3test >> test_stream_query.py::TestStreamQuery::test_sql_suite[plan-window.test] [FAIL] >> test_postgres.py::TestPGSQL::test_sql_suite[plan-jointest/join0.test] [FAIL] >> test_postgres.py::TestPGSQL::test_sql_suite[plan-jointest/join1.test] >> test_select.py::TestDML::test_select[table_index_2__SYNC-pk_types7-all_types7-index7---SYNC] >> test_sql_logic.py::TestSQLLogic::test_sql_suite[plan-select3-5.test] [FAIL] >> test_sql_logic.py::TestSQLLogic::test_sql_suite[results-select2-5.test] [FAIL] >> test_postgres.py::TestPGSQL::test_sql_suite[plan-jointest/join-group-by-with-null.test] [FAIL] >> test_postgres.py::TestPGSQL::test_sql_suite[results-jointest/coalesce-and-join.test] [FAIL] >> test_postgres.py::TestPGSQL::test_sql_suite[results-jointest/join-group-by-with-null.test] |79.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test >> test_select.py::TestDML::test_select[table_index_1__ASYNC-pk_types10-all_types10-index10---ASYNC] [FAIL] >> test_disk.py::TestSafeDiskBreak::test_erase_method [GOOD] >> test_cms_erasure.py::TestDegradedGroupMirror3dcMax::test_no_degraded_groups_after_shutdown [GOOD] |79.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test >> test_sql_logic.py::TestSQLLogic::test_sql_suite[plan-select3-10.test] >> test_postgres.py::TestPGSQL::test_sql_suite[results-case.test] [FAIL] >> test_sql_logic.py::TestSQLLogic::test_sql_suite[results-select3-14.test] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/olap/scenario/py3test >> test_insert.py::TestInsert::test_multi[read_data_during_bulk_upsert] [FAIL] Test command err: contrib/python/pytest/py3/_pytest/threadexception.py:77: PytestUnhandledThreadExceptionWarning: Exception in thread Thread-217 (_test_suffix) Traceback (most recent call last): File "contrib/tools/python3/Lib/threading.py", line 1075, in _bootstrap_inner self.run() File "contrib/tools/python3/Lib/threading.py", line 1012, in run self._target(*self._args, **self._kwargs) File "/home/runner/.ya/build/build_root/ciyv/00268d/environment/arcadia/ydb/tests/olap/scenario/conftest.py", line 110, in _test_suffix ctx.executable(self, ctx) File "ydb/tests/olap/scenario/test_insert.py", line 103, in scenario_read_data_during_bulk_upsert thread2.join_all() File "ydb/tests/olap/common/thread_helper.py", line 45, in join_all thread.join(timeout=timeout) File "ydb/tests/olap/common/thread_helper.py", line 18, in join raise self.exc File "ydb/tests/olap/common/thread_helper.py", line 11, in run self.ret = self._target(*self._args, **self._kwargs) ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ File "ydb/tests/olap/scenario/test_insert.py", line 59, in _loop_insert raise Exception('Insert failed table {}'.format(table)) Exception: Insert failed table 0 |79.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/datashard/select/py3test >> test_postgres.py::TestPGSQL::test_sql_suite[results-jointest/join-group-by-with-null.test] [FAIL] |79.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/suite_tests/py3test >> test_postgres.py::TestPGSQL::test_sql_suite[plan-select.test] [FAIL] |79.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/suite_tests/py3test >> test_sql_logic.py::TestSQLLogic::test_sql_suite[results-select3-9.test] [FAIL] >> test_select.py::TestDML::test_select[table_index_1__SYNC-pk_types8-all_types8-index8---SYNC] [FAIL] >> test_sql_logic.py::TestSQLLogic::test_sql_suite[results-select1-1.test] >> test_public_api.py::TestSessionNotFound::test_session_not_found [GOOD] |79.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/datashard/select/py3test >> test_select.py::TestDML::test_select[table_index_2_UNIQUE_SYNC-pk_types2-all_types2-index2--UNIQUE-SYNC] >> test_postgres.py::TestPGSQL::test_sql_suite[results-select_distinct.test] >> test_tablet.py::TestMassiveKills::test_tablets_are_ok_after_many_kills [GOOD] |79.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/cms/py3test >> test_cms_restart.py::TestCmsStateStorageRestartsBlockMax::test_restart_as_much_as_can [GOOD] |79.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/datashard/select/py3test >> test_select.py::TestDML::test_select[table_index_1__ASYNC-pk_types10-all_types10-index10---ASYNC] [FAIL] |79.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/cms/py3test >> test_cms_erasure.py::TestDegradedGroupMirror3dcMax::test_no_degraded_groups_after_shutdown [GOOD] >> test_select.py::TestDML::test_select[table_index_2__SYNC-pk_types7-all_types7-index7---SYNC] [FAIL] >> test_sql_logic.py::TestSQLLogic::test_sql_suite[plan-select3-10.test] [FAIL] >> test_sql_logic.py::TestSQLLogic::test_sql_suite[plan-select3-11.test] >> test_sql_logic.py::TestSQLLogic::test_sql_suite[results-select3-14.test] [FAIL] >> test_sql_logic.py::TestSQLLogic::test_sql_suite[results-select3-15.test] >> test_cms_erasure.py::TestDegradedGroupBlock42Max::test_no_degraded_groups_after_shutdown [GOOD] |79.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/suite_tests/py3test >> test_stream_query.py::TestStreamQuery::test_sql_suite[plan-window.test] [FAIL] >> test_select.py::TestDML::test_select[table_ttl_Datetime-pk_types16-all_types16-index16-Datetime--] >> test_sql_logic.py::TestSQLLogic::test_sql_suite[results-select3-15.test] [FAIL] >> test_sql_logic.py::TestSQLLogic::test_sql_suite[plan-select3-11.test] [FAIL] >> test_sql_logic.py::TestSQLLogic::test_sql_suite[plan-select3-12.test] >> test_sql_logic.py::TestSQLLogic::test_sql_suite[results-select3-2.test] >> test_cms_erasure.py::TestDegradedGroupMirror3dcKeep::test_no_degraded_groups_after_shutdown [GOOD] |79.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/suite_tests/py3test >> test_sql_logic.py::TestSQLLogic::test_sql_suite[results-select2-5.test] [FAIL] >> test_crud.py::TestManySelectsInRow::test_selects_in_row_success[500-500-50] >> test_sql_logic.py::TestSQLLogic::test_sql_suite[plan-select3-6.test] |79.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/tools/nemesis/ut/py3test >> test_disk.py::TestSafeDiskBreak::test_erase_method [GOOD] Test command err: contrib/python/Flask/py3/flask/scaffold.py:109: DeprecationWarning: 'pkgutil.find_loader' is deprecated and slated for removal in Python 3.14; use importlib.util.find_spec() instead contrib/python/Werkzeug/py3/werkzeug/routing/rules.py:751: DeprecationWarning: ast.Str is deprecated and will be removed in Python 3.14; use ast.Constant instead contrib/python/Werkzeug/py3/werkzeug/routing/rules.py:748: DeprecationWarning: ast.Str is deprecated and will be removed in Python 3.14; use ast.Constant instead contrib/tools/python3/Lib/ast.py:587: DeprecationWarning: Attribute s is deprecated and will be removed in Python 3.14; use value instead contrib/python/Werkzeug/py3/werkzeug/routing/rules.py:755: DeprecationWarning: ast.Str is deprecated and will be removed in Python 3.14; use ast.Constant instead contrib/python/Werkzeug/py3/werkzeug/routing/rules.py:756: DeprecationWarning: Attribute s is deprecated and will be removed in Python 3.14; use value instead contrib/python/Werkzeug/py3/werkzeug/routing/rules.py:756: DeprecationWarning: ast.Str is deprecated and will be removed in Python 3.14; use ast.Constant instead |79.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/suite_tests/py3test >> test_postgres.py::TestPGSQL::test_sql_suite[results-case.test] [FAIL] >> test_sql_logic.py::TestSQLLogic::test_sql_suite[plan-select3-12.test] [FAIL] >> test_sql_logic.py::TestSQLLogic::test_sql_suite[plan-select3-13.test] |79.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/suite_tests/py3test >> test_sql_logic.py::TestSQLLogic::test_sql_suite[plan-select3-5.test] [FAIL] >> test_sql_logic.py::TestSQLLogic::test_sql_suite[results-select3-2.test] [FAIL] >> test_sql_logic.py::TestSQLLogic::test_sql_suite[results-select3-3.test] |79.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/suite_tests/py3test >> test_postgres.py::TestPGSQL::test_sql_suite[plan-jointest/join-group-by-with-null.test] [FAIL] >> test_sql_logic.py::TestSQLLogic::test_sql_suite[results-select1-1.test] [FAIL] >> test_sql_logic.py::TestSQLLogic::test_sql_suite[results-select1-2.test] >> test_postgres.py::TestPGSQL::test_sql_suite[results-jointest/join0.test] >> test_select.py::TestDML::test_select[table_index_0_UNIQUE_SYNC-pk_types4-all_types4-index4--UNIQUE-SYNC] |79.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/datashard/select/py3test |79.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/datashard/select/py3test >> test_select.py::TestDML::test_select[table_index_2__SYNC-pk_types7-all_types7-index7---SYNC] [FAIL] >> test_sql_logic.py::TestSQLLogic::test_sql_suite[results-select1-2.test] [FAIL] |79.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/datashard/select/py3test |79.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/suite_tests/py3test >> test_postgres.py::TestPGSQL::test_sql_suite[results-jointest/join-group-by-with-null.test] [FAIL] >> test_sql_logic.py::TestSQLLogic::test_sql_suite[plan-select3-13.test] [FAIL] >> test_sql_logic.py::TestSQLLogic::test_sql_suite[plan-select3-14.test] >> test_sql_logic.py::TestSQLLogic::test_sql_suite[results-select1-3.test] >> test_postgres.py::TestPGSQL::test_sql_suite[results-jointest/join3.test] >> test_sql_logic.py::TestSQLLogic::test_sql_suite[results-select3-3.test] [FAIL] >> test_sql_logic.py::TestSQLLogic::test_sql_suite[results-select3-4.test] |79.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/cms/py3test >> test_cms_erasure.py::TestDegradedGroupBlock42Max::test_no_degraded_groups_after_shutdown [GOOD] >> test_postgres.py::TestPGSQL::test_sql_suite[plan-jointest/join1.test] [FAIL] >> test_postgres.py::TestPGSQL::test_sql_suite[plan-jointest/join2.test] >> test_select.py::TestDML::test_select[table_index_1_UNIQUE_SYNC-pk_types3-all_types3-index3--UNIQUE-SYNC] >> test_crud.py::TestManySelectsInRow::test_selects_in_row_success[500-500-50] [FAIL] >> test_sql_logic.py::TestSQLLogic::test_sql_suite[plan-select3-14.test] [FAIL] >> test_discovery.py::TestMirror3DCDiscovery::test_mirror3dc_discovery_logic |79.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/cms/py3test >> test_cms_erasure.py::TestDegradedGroupMirror3dcKeep::test_no_degraded_groups_after_shutdown [GOOD] |79.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/datashard/select/py3test >> test_select.py::TestDML::test_select[table_index_1__SYNC-pk_types8-all_types8-index8---SYNC] [FAIL] >> test_cms_erasure.py::TestDegradedGroupBlock42Keep::test_no_degraded_groups_after_shutdown [GOOD] >> test_sql_logic.py::TestSQLLogic::test_sql_suite[results-select1-3.test] [FAIL] >> test_sql_logic.py::TestSQLLogic::test_sql_suite[results-select1-4.test] >> test_sql_logic.py::TestSQLLogic::test_sql_suite[results-select3-4.test] [FAIL] >> test_stream_query.py::TestStreamQuery::test_sql_suite[results-window.test] >> test_select.py::TestDML::test_select[table_ttl_Datetime-pk_types16-all_types16-index16-Datetime--] [FAIL] |79.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/suite_tests/py3test |79.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/datashard/select/py3test >> test_postgres.py::TestPGSQL::test_sql_suite[plan-select_distinct.test] >> test_cms_restart.py::TestCmsStateStorageRestartsMirrorKeep::test_restart_as_much_as_can [GOOD] >> test_select.py::TestDML::test_select[table_index_2_UNIQUE_SYNC-pk_types2-all_types2-index2--UNIQUE-SYNC] [FAIL] >> test_sql_logic.py::TestSQLLogic::test_sql_suite[plan-select3-6.test] [FAIL] >> test_sql_logic.py::TestSQLLogic::test_sql_suite[plan-select3-7.test] >> test_cms_state_storage.py::TestCmsStateStorageSimpleMax::test_check_shutdown_state_storage_nodes [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/tools/nemesis/ut/py3test >> test_tablet.py::TestMassiveKills::test_tablets_are_ok_after_many_kills [GOOD] Test command err: contrib/python/Flask/py3/flask/scaffold.py:109: DeprecationWarning: 'pkgutil.find_loader' is deprecated and slated for removal in Python 3.14; use importlib.util.find_spec() instead contrib/python/Werkzeug/py3/werkzeug/routing/rules.py:751: DeprecationWarning: ast.Str is deprecated and will be removed in Python 3.14; use ast.Constant instead contrib/python/Werkzeug/py3/werkzeug/routing/rules.py:748: DeprecationWarning: ast.Str is deprecated and will be removed in Python 3.14; use ast.Constant instead contrib/tools/python3/Lib/ast.py:587: DeprecationWarning: Attribute s is deprecated and will be removed in Python 3.14; use value instead contrib/python/Werkzeug/py3/werkzeug/routing/rules.py:755: DeprecationWarning: ast.Str is deprecated and will be removed in Python 3.14; use ast.Constant instead contrib/python/Werkzeug/py3/werkzeug/routing/rules.py:756: DeprecationWarning: Attribute s is deprecated and will be removed in Python 3.14; use value instead contrib/python/Werkzeug/py3/werkzeug/routing/rules.py:756: DeprecationWarning: ast.Str is deprecated and will be removed in Python 3.14; use ast.Constant instead >> test_sql_logic.py::TestSQLLogic::test_sql_suite[results-select1-4.test] [FAIL] >> test_sql_logic.py::TestSQLLogic::test_sql_suite[results-select1-5.test] |79.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/suite_tests/py3test >> test_postgres.py::TestPGSQL::test_sql_suite[plan-abstime.test] >> test_sql_logic.py::TestSQLLogic::test_sql_suite[results-select3-1.test] >> test_select.py::TestDML::test_select[table_index_0__ASYNC-pk_types11-all_types11-index11---ASYNC] >> test_sql_logic.py::TestSQLLogic::test_sql_suite[plan-select3-7.test] [FAIL] >> test_sql_logic.py::TestSQLLogic::test_sql_suite[plan-select3-8.test] >> test_public_api.py::TestSessionNotFoundOperations::test_session_pool >> test_sql_logic.py::TestSQLLogic::test_sql_suite[plan-select2-1.test] >> test_sql_logic.py::TestSQLLogic::test_sql_suite[results-select1-5.test] [FAIL] |79.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/datashard/select/py3test >> test_postgres.py::TestPGSQL::test_sql_suite[results-jointest/join3.test] [FAIL] >> test_postgres.py::TestPGSQL::test_sql_suite[results-select_distinct.test] [FAIL] >> test_postgres.py::TestPGSQL::test_sql_suite[results-jointest/join4.test] >> test_postgres.py::TestPGSQL::test_sql_suite[results-strings.test] |79.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/suite_tests/py3test >> test_sql_logic.py::TestSQLLogic::test_sql_suite[results-select3-4.test] [FAIL] |79.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/cms/py3test >> test_cms_erasure.py::TestDegradedGroupBlock42Keep::test_no_degraded_groups_after_shutdown [GOOD] >> test_select.py::TestDML::test_select[table_index_0_UNIQUE_SYNC-pk_types4-all_types4-index4--UNIQUE-SYNC] [FAIL] >> test_sql_logic.py::TestSQLLogic::test_sql_suite[plan-select3-8.test] [FAIL] >> test_sql_logic.py::TestSQLLogic::test_sql_suite[plan-select3-9.test] |79.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/serverless/py3test >> test_serverless.py::test_database_with_column_disk_quotas[enable_alter_database_create_hive_first--true] [FAIL] |79.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/datashard/select/py3test |79.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/suite_tests/py3test |79.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/cms/py3test >> test_cms_restart.py::TestCmsStateStorageRestartsMirrorKeep::test_restart_as_much_as_can [GOOD] >> test_postgres.py::TestPGSQL::test_sql_suite[results-jointest/join4.test] [FAIL] >> test_postgres.py::TestPGSQL::test_sql_suite[results-select.test] >> test_cms_restart.py::TestCmsStateStorageRestartsBlockKeep::test_restart_as_much_as_can [GOOD] >> test_stream_query.py::TestStreamQuery::test_sql_suite[results-window.test] [FAIL] |79.5%| [TA] $(B)/ydb/tests/tools/nemesis/ut/test-results/py3test/{meta.json ... results_accumulator.log} |79.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/suite_tests/py3test >> test_select.py::TestDML::test_select[table_index_1_UNIQUE_SYNC-pk_types3-all_types3-index3--UNIQUE-SYNC] [FAIL] |79.5%| [TA] {RESULT} $(B)/ydb/tests/tools/nemesis/ut/test-results/py3test/{meta.json ... results_accumulator.log} >> test_sql_logic.py::TestSQLLogic::test_sql_suite[plan-select3-9.test] [FAIL] >> test_sql_logic.py::TestSQLLogic::test_sql_suite[results-insert.test] |79.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/datashard/select/py3test >> test_select.py::TestDML::test_select[table_index_0_UNIQUE_SYNC-pk_types4-all_types4-index4--UNIQUE-SYNC] [FAIL] >> test_postgres.py::TestPGSQL::test_sql_suite[plan-abstime.test] [FAIL] >> test_postgres.py::TestPGSQL::test_sql_suite[plan-boolean.test] >> test_cp_ic.py::TestCpIc::test_discovery >> test_sql_logic.py::TestSQLLogic::test_sql_suite[plan-insert.test] >> test_postgres.py::TestPGSQL::test_sql_suite[plan-boolean.test] [GOOD] >> test_postgres.py::TestPGSQL::test_sql_suite[plan-case.test] >> test_public_api.py::TestSessionNotFoundOperations::test_session_pool [GOOD] >> test_public_api.py::TestSessionNotFoundOperations::test_ok_keep_alive_example |79.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/datashard/select/py3test |79.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/api/py3test >> test_crud.py::TestManySelectsInRow::test_selects_in_row_success[500-500-50] [FAIL] >> test_public_api.py::TestSessionNotFoundOperations::test_ok_keep_alive_example [GOOD] >> test_public_api.py::TestSessionNotFoundOperations::test_can_commit_bad_tx >> test_sql_logic.py::TestSQLLogic::test_sql_suite[results-select3-1.test] [FAIL] >> test_sql_logic.py::TestSQLLogic::test_sql_suite[results-select3-10.test] >> test_public_api.py::TestSessionNotFoundOperations::test_can_commit_bad_tx [GOOD] >> test_public_api.py::TestSessionNotFoundOperations::test_cannot_commit_bad_tx >> test_public_api.py::TestSessionNotFoundOperations::test_cannot_commit_bad_tx [GOOD] |79.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/suite_tests/py3test >> test_sql_logic.py::TestSQLLogic::test_sql_suite[plan-select3-14.test] [FAIL] >> test_postgres.py::TestPGSQL::test_sql_suite[results-select.test] [FAIL] >> test_public_api.py::TestSessionNotFoundOperations::test_commit_successfully_after_success_commit >> test_public_api.py::TestSessionNotFoundOperations::test_commit_successfully_after_success_commit [GOOD] |79.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/suite_tests/py3test >> test_sql_logic.py::TestSQLLogic::test_sql_suite[results-select1-5.test] [FAIL] >> test_public_api.py::TestSessionNotFoundOperations::test_invalid_keep_alive_example >> test_public_api.py::TestSessionNotFoundOperations::test_invalid_keep_alive_example [GOOD] >> test_postgres.py::TestPGSQL::test_sql_suite[plan-jointest/join2.test] [FAIL] >> test_public_api.py::TestSessionNotFoundOperations::test_describe_table_with_bounds |79.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/datashard/select/py3test >> test_select.py::TestDML::test_select[table_ttl_Datetime-pk_types16-all_types16-index16-Datetime--] [FAIL] >> test_public_api.py::TestSessionNotFoundOperations::test_describe_table_with_bounds [GOOD] >> test_public_api.py::TestSessionNotFoundOperations::test_native_datetime_types >> test_select.py::TestDML::test_select[table_index_0__ASYNC-pk_types11-all_types11-index11---ASYNC] [FAIL] >> test_select.py::TestDML::test_select[table_ttl_Date-pk_types18-all_types18-index18-Date--] >> test_public_api.py::TestSessionNotFoundOperations::test_native_datetime_types [GOOD] >> test_public_api.py::TestSessionNotFoundOperations::test_native_date_types |79.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/suite_tests/py3test >> test_select.py::TestDML::test_select[table_ttl_Uint64-pk_types15-all_types15-index15-Uint64--] >> test_public_api.py::TestSessionNotFoundOperations::test_native_date_types [GOOD] >> test_public_api.py::TestSessionNotFoundOperations::test_keep_in_cache_disabled |79.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/suite_tests/py3test >> test_public_api.py::TestSessionNotFoundOperations::test_keep_in_cache_disabled [GOOD] >> test_public_api.py::TestSessionNotFoundOperations::test_explicit_partitions_case_1 >> test_sql_logic.py::TestSQLLogic::test_sql_suite[results-select3-10.test] [FAIL] >> test_sql_logic.py::TestSQLLogic::test_sql_suite[results-select3-11.test] |79.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/datashard/select/py3test >> test_select.py::TestDML::test_select[table_index_2_UNIQUE_SYNC-pk_types2-all_types2-index2--UNIQUE-SYNC] [FAIL] |79.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/datashard/select/py3test >> test_sql_logic.py::TestSQLLogic::test_sql_suite[results-insert.test] [FAIL] >> test_select.py::TestDML::test_select[table_index_0__SYNC-pk_types9-all_types9-index9---SYNC] >> test_public_api.py::TestSessionNotFoundOperations::test_explicit_partitions_case_1 [GOOD] >> test_public_api.py::TestSessionNotFoundOperations::test_explict_partitions_case_2 >> test_public_api.py::TestSessionNotFoundOperations::test_explict_partitions_case_2 [GOOD] >> test_public_api.py::TestSessionNotFoundOperations::test_simple_table_profile_settings [GOOD] |79.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/cms/py3test >> test_cms_state_storage.py::TestCmsStateStorageSimpleMax::test_check_shutdown_state_storage_nodes [GOOD] >> test_cp_ic.py::TestCpIc::test_discovery [FAIL] >> test_sql_logic.py::TestSQLLogic::test_sql_suite[plan-select2-1.test] [FAIL] >> test_cms_restart.py::TestCmsStateStorageRestartsMirrorMax::test_restart_as_much_as_can [GOOD] >> test_sql_logic.py::TestSQLLogic::test_sql_suite[plan-select2-2.test] >> test_replication.py::TestReplicationAfterNodesRestart::test_replication[mirror-3-dc] >> test_postgres.py::TestPGSQL::test_sql_suite[plan-case.test] [FAIL] |79.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/datashard/select/py3test >> test_select.py::TestDML::test_select[table_index_1_UNIQUE_SYNC-pk_types3-all_types3-index3--UNIQUE-SYNC] [FAIL] |79.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/cms/py3test >> test_cms_restart.py::TestCmsStateStorageRestartsBlockKeep::test_restart_as_much_as_can [GOOD] >> test_postgres.py::TestPGSQL::test_sql_suite[results-jointest/join0.test] [FAIL] >> test_postgres.py::TestPGSQL::test_sql_suite[results-jointest/join1.test] >> test_sql_logic.py::TestSQLLogic::test_sql_suite[results-select3-11.test] [FAIL] >> test_sql_logic.py::TestSQLLogic::test_sql_suite[results-select3-12.test] |79.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/blobstorage/py3test >> test_select.py::TestDML::test_select[table_all_types-pk_types12-all_types12-index12---] >> test_sql_logic.py::TestSQLLogic::test_sql_suite[plan-select2-2.test] [FAIL] >> test_sql_logic.py::TestSQLLogic::test_sql_suite[plan-select2-3.test] >> test_postgres.py::TestPGSQL::test_sql_suite[plan-select_distinct.test] [FAIL] >> test_sql_logic.py::TestSQLLogic::test_sql_suite[results-select3-12.test] [FAIL] >> test_sql_logic.py::TestSQLLogic::test_sql_suite[results-select3-13.test] >> test_postgres.py::TestPGSQL::test_sql_suite[plan-strings.test] >> test_select.py::TestDML::test_as_table >> test_select.py::TestDML::test_select[table_index_3_UNIQUE_SYNC-pk_types1-all_types1-index1--UNIQUE-SYNC] >> test_tablet_channel_migration.py::TestChannelsOps::test_when_write_and_change_tablet_channel_then_can_read_from_tablet |79.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/suite_tests/py3test >> test_stream_query.py::TestStreamQuery::test_sql_suite[results-window.test] [FAIL] |79.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/blobstorage/py3test |79.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/blobstorage/py3test |79.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/blobstorage/py3test |79.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/blobstorage/py3test |79.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/blobstorage/py3test ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/fq/multi_plane/py3test >> test_cp_ic.py::TestCpIc::test_discovery [FAIL] Test command err: sys:1: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback >> test_sql_logic.py::TestSQLLogic::test_sql_suite[plan-select2-3.test] [FAIL] |79.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/datashard/select/py3test >> test_sql_logic.py::TestSQLLogic::test_sql_suite[plan-select2-4.test] >> test_select.py::TestDML::test_select[table_index_4_UNIQUE_SYNC-pk_types0-all_types0-index0--UNIQUE-SYNC] >> test_sql_logic.py::TestSQLLogic::test_sql_suite[plan-insert.test] [FAIL] >> test_sql_logic.py::TestSQLLogic::test_sql_suite[plan-select1-1.test] >> test_sql_logic.py::TestSQLLogic::test_sql_suite[results-select3-13.test] [FAIL] |79.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/datashard/select/py3test >> test_select.py::TestDML::test_select[table_ttl_Uint64-pk_types15-all_types15-index15-Uint64--] [FAIL] >> test_sql_logic.py::TestSQLLogic::test_sql_suite[plan-select2-4.test] [FAIL] >> test_postgres.py::TestPGSQL::test_sql_suite[plan-strings.test] [GOOD] >> test_sql_logic.py::TestSQLLogic::test_sql_suite[plan-select2-5.test] >> test_select.py::TestDML::test_select[table_ttl_Date-pk_types18-all_types18-index18-Date--] [FAIL] |79.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/datashard/select/py3test |79.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/blobstorage/py3test >> test_postgres.py::TestPGSQL::test_sql_suite[plan-window.test] >> test_sql_logic.py::TestSQLLogic::test_sql_suite[plan-select1-1.test] [FAIL] >> test_select.py::TestDML::test_select[table_index_0__SYNC-pk_types9-all_types9-index9---SYNC] [FAIL] >> test_sql_logic.py::TestSQLLogic::test_sql_suite[plan-select1-2.test] |79.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/suite_tests/py3test >> test_postgres.py::TestPGSQL::test_sql_suite[results-select.test] [FAIL] >> test_select.py::TestDML::test_as_table [GOOD] |79.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/datashard/select/py3test >> test_select.py::TestDML::test_select[table_index_0__ASYNC-pk_types11-all_types11-index11---ASYNC] [FAIL] >> test_select.py::TestDML::test_select[table_all_types-pk_types12-all_types12-index12---] [FAIL] >> test_sql_logic.py::TestSQLLogic::test_sql_suite[plan-select2-5.test] [FAIL] >> test_sql_logic.py::TestSQLLogic::test_sql_suite[plan-select3-1.test] >> test_tablet_channel_migration.py::TestChannelsOps::test_when_write_and_change_tablet_channel_then_can_read_from_tablet [GOOD] >> test_select.py::TestDML::test_select[table_ttl_Timestamp-pk_types17-all_types17-index17-Timestamp--] >> test_select.py::TestDML::test_select[table_ttl_DyNumber-pk_types13-all_types13-index13-DyNumber--] >> test_discovery.py::TestMirror3DCDiscovery::test_mirror3dc_discovery_logic [GOOD] >> test_sql_logic.py::TestSQLLogic::test_sql_suite[plan-select1-2.test] [FAIL] >> test_sql_logic.py::TestSQLLogic::test_sql_suite[plan-select1-3.test] >> test_select.py::TestDML::test_select[table_index_4__SYNC-pk_types5-all_types5-index5---SYNC] |79.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/suite_tests/py3test >> test_sql_logic.py::TestSQLLogic::test_sql_suite[results-insert.test] [FAIL] |79.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/blobstorage/py3test |79.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/cms/py3test >> test_cms_restart.py::TestCmsStateStorageRestartsMirrorMax::test_restart_as_much_as_can [GOOD] |79.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/suite_tests/py3test >> test_postgres.py::TestPGSQL::test_sql_suite[plan-case.test] [FAIL] |79.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/blobstorage/py3test |79.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/blobstorage/py3test >> test_sql_logic.py::TestSQLLogic::test_sql_suite[plan-select3-1.test] [FAIL] >> test_sql_logic.py::TestSQLLogic::test_sql_suite[plan-select1-3.test] [FAIL] >> test_sql_logic.py::TestSQLLogic::test_sql_suite[plan-select1-4.test] |79.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/datashard/select/py3test >> test_select.py::TestDML::test_select[table_index_3_UNIQUE_SYNC-pk_types1-all_types1-index1--UNIQUE-SYNC] [FAIL] |79.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/datashard/select/py3test >> test_select.py::TestDML::test_select[table_ttl_Uint64-pk_types15-all_types15-index15-Uint64--] [FAIL] |79.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/blobstorage/py3test |79.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/datashard/select/py3test >> test_select.py::TestDML::test_select[table_index_3__SYNC-pk_types6-all_types6-index6---SYNC] |79.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/datashard/select/py3test >> test_select.py::TestDML::test_select[table_index_0__SYNC-pk_types9-all_types9-index9---SYNC] [FAIL] >> test_sql_logic.py::TestSQLLogic::test_sql_suite[plan-select1-4.test] [FAIL] |79.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/blobstorage/py3test >> test_sql_logic.py::TestSQLLogic::test_sql_suite[plan-select1-5.test] |79.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/blobstorage/py3test >> test_postgres.py::TestPGSQL::test_sql_suite[results-jointest/join1.test] [FAIL] >> test_public_api.py::TestBadSession::test_simple >> test_select.py::TestDML::test_select[table_index_4_UNIQUE_SYNC-pk_types0-all_types0-index0--UNIQUE-SYNC] [FAIL] >> test_postgres.py::TestPGSQL::test_sql_suite[results-jointest/join2.test] >> test_select.py::TestDML::test_select[table_ttl_Uint32-pk_types14-all_types14-index14-Uint32--] |79.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/datashard/select/py3test |79.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/blobstorage/py3test |79.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/datashard/select/py3test >> test_sql_logic.py::TestSQLLogic::test_sql_suite[plan-select1-5.test] [FAIL] |79.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/suite_tests/py3test >> test_sql_logic.py::TestSQLLogic::test_sql_suite[results-select3-13.test] [FAIL] |79.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/blobstorage/py3test |79.7%| [TA] $(B)/ydb/tests/functional/cms/test-results/py3test/{meta.json ... results_accumulator.log} |79.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/blobstorage/py3test >> test_select.py::TestDML::test_select[table_ttl_DyNumber-pk_types13-all_types13-index13-DyNumber--] [FAIL] >> test_select.py::TestDML::test_select[table_ttl_Timestamp-pk_types17-all_types17-index17-Timestamp--] [FAIL] |79.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/blobstorage/py3test |79.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/datashard/select/py3test |79.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/datashard/select/py3test >> test_select.py::TestDML::test_select[table_ttl_Date-pk_types18-all_types18-index18-Date--] [FAIL] |79.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/datashard/select/py3test |79.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/blobstorage/py3test |79.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/datashard/select/py3test |79.8%| [TA] {RESULT} $(B)/ydb/tests/functional/cms/test-results/py3test/{meta.json ... results_accumulator.log} >> test_select.py::TestDML::test_select[table_index_4__SYNC-pk_types5-all_types5-index5---SYNC] [FAIL] |79.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/blobstorage/py3test ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/datashard/select/py3test >> test_select.py::TestDML::test_as_table [GOOD] Test command err: contrib/python/ydb/py3/ydb/types.py:59: DeprecationWarning: datetime.datetime.utcfromtimestamp() is deprecated and scheduled for removal in a future version. Use timezone-aware objects to represent datetimes in UTC: datetime.datetime.fromtimestamp(timestamp, datetime.UTC). |79.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/datashard/select/py3test >> test_select.py::TestDML::test_select[table_all_types-pk_types12-all_types12-index12---] [FAIL] |79.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/blobstorage/py3test >> test_postgres.py::TestPGSQL::test_sql_suite[plan-window.test] [FAIL] |79.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/datashard/select/py3test |79.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/blobstorage/py3test >> test_tablet_channel_migration.py::TestChannelsOps::test_when_write_and_change_tablet_channel_then_can_read_from_tablet [GOOD] |79.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/datashard/select/py3test >> test_select.py::TestDML::test_select[table_index_4_UNIQUE_SYNC-pk_types0-all_types0-index0--UNIQUE-SYNC] [FAIL] |79.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/datashard/select/py3test |79.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/blobstorage/py3test |79.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/datashard/select/py3test >> test_select.py::TestDML::test_select[table_index_3__SYNC-pk_types6-all_types6-index6---SYNC] [FAIL] >> KqpBatchUpdate::ColumnTable |79.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/blobstorage/py3test |79.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/suite_tests/py3test >> test_sql_logic.py::TestSQLLogic::test_sql_suite[plan-select3-1.test] [FAIL] >> test_tablet_channel_migration.py::TestChannelsOps::test_when_write_in_new_channel_then_can_read_from_tablet |79.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/datashard/select/py3test >> test_select.py::TestDML::test_select[table_index_3_UNIQUE_SYNC-pk_types1-all_types1-index1--UNIQUE-SYNC] [FAIL] |79.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/statistics/service/ut/unittest |79.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/blobstorage/py3test >> test_select.py::TestDML::test_select[table_ttl_Uint32-pk_types14-all_types14-index14-Uint32--] [FAIL] |79.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/blobstorage/py3test >> docker_wrapper_test.py::test_pg_generated[Test64BitErrorChecking] |79.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/blobstorage/py3test |79.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/blobstorage/py3test >> test_self_heal.py::TestEnableSelfHeal::test_replication |79.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/blobstorage/py3test |79.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/datashard/select/py3test >> test_select.py::TestDML::test_select[table_index_4__SYNC-pk_types5-all_types5-index5---SYNC] [FAIL] |79.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/postgres_integrations/go-libpq/py3test |79.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/suite_tests/py3test >> test_postgres.py::TestPGSQL::test_sql_suite[plan-window.test] [FAIL] |79.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/blobstorage/py3test |79.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/blobstorage/py3test |79.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/blobstorage/py3test >> test_tablet_channel_migration.py::TestChannelsOps::test_when_write_in_new_channel_then_can_read_from_tablet [GOOD] |79.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/blobstorage/py3test |79.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/blobstorage/py3test |79.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/suite_tests/py3test >> test_sql_logic.py::TestSQLLogic::test_sql_suite[plan-select1-5.test] [FAIL] |79.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/blobstorage/py3test |79.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/datashard/select/py3test >> test_select.py::TestDML::test_select[table_index_3__SYNC-pk_types6-all_types6-index6---SYNC] [FAIL] |79.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/api/py3test >> test_discovery.py::TestMirror3DCDiscovery::test_mirror3dc_discovery_logic [GOOD] >> test_replication.py::TestReplicationAfterNodesRestart::test_replication[block-4-2] |79.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/datashard/select/py3test >> test_select.py::TestDML::test_select[table_ttl_DyNumber-pk_types13-all_types13-index13-DyNumber--] [FAIL] |79.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test >> test_postgres.py::TestPGSQL::test_sql_suite[results-jointest/join2.test] [FAIL] |79.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/blobstorage/py3test |79.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test |80.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/datashard/select/py3test >> test_select.py::TestDML::test_select[table_ttl_Timestamp-pk_types17-all_types17-index17-Timestamp--] [FAIL] |80.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/suite_tests/py3test >> test_postgres.py::TestPGSQL::test_sql_suite[plan-jointest/join2.test] [FAIL] |80.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/blobstorage/py3test |80.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test |80.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/blobstorage/py3test |80.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/blobstorage/py3test |80.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test |80.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/blobstorage/py3test ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/batch_operations/unittest >> KqpBatchUpdate::ColumnTable Test command err: Trying to start YDB, gRPC: 25460, MsgBus: 11999 2025-05-29T15:37:19.404778Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509892221740539861:2204];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:37:19.404898Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/00276f/r3tmp/tmpRVGQN7/pdisk_1.dat 2025-05-29T15:37:19.568757Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:37:19.590694Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:37:19.590759Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:37:19.591997Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 25460, node 1 2025-05-29T15:37:19.608420Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:37:19.608433Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-05-29T15:37:19.608435Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-05-29T15:37:19.608490Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:11999 TClient is connected to server localhost:11999 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-29T15:37:19.742077Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:37:19.750105Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-05-29T15:37:19.763282Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:37:19.815476Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-05-29T15:37:19.888609Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 2025-05-29T15:37:19.918087Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:37:20.007048Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509892221740541322:2403], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:37:20.007091Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:37:20.054001Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-05-29T15:37:20.069937Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-05-29T15:37:20.084365Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-05-29T15:37:20.105193Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-05-29T15:37:20.116601Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-05-29T15:37:20.130530Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-05-29T15:37:20.151281Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-05-29T15:37:20.175540Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509892226035509269:2466], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:37:20.175579Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:37:20.175748Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509892226035509274:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:37:20.176798Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715669:3, at schemeshard: 72057594046644480 2025-05-29T15:37:20.180475Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976715669, at schemeshard: 72057594046644480 2025-05-29T15:37:20.180525Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7509892226035509276:2470], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715669 completed, doublechecking } 2025-05-29T15:37:20.241010Z node 1 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [1:7509892226035509327:3396] txid# 281474976715670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-29T15:37:20.372000Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [1:7509892226035509343:2474], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:37:20.374203Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=1&id=YjVhN2NjMy1jYTk0MTJjLTZhZjYzNWFhLTZlNzIwMGEx, ActorId: [1:7509892221740541304:2401], ActorState: ExecuteState, TraceId: 01jweb1n1eadwe10t7kchjvk1j, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: VERIFY failed (2025-05-29T15:37:20.375630Z): assertion failed in non-unittest thread with message: assertion failed at ydb/core/kqp/ut/common/kqp_ut_common.h:375, void NKikimr::NKqp::AssertSuccessResult(const NYdb::TStatus &): (result.IsSuccess())
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 library/cpp/testing/unittest/registar.cpp:36 RaiseError(): requirement UnittestThread failed 0. /-S/util/system/yassert.cpp:83: InternalPanicImpl @ 0x13A9DC85 1. /-S/util/system/yassert.cpp:55: Panic @ 0x13A94C86 2. /tmp//-S/library/cpp/testing/unittest/registar.cpp:36: RaiseError @ 0x13C36A16 3. /-S/ydb/core/kqp/ut/common/kqp_ut_common.h:375: AssertSuccessResult @ 0x260D85F2 4. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:365: CreateSampleTables @ 0x260D7EF2 5. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:581: operator() @ 0x260F9ACC 6. /-S/library/cpp/threading/future/core/future-inl.h:493: SetValue @ 0x260F9ACC 7. /-S/library/cpp/threading/future/async.h:24: operator() @ 0x260F9ACC 8. /-S/util/thread/pool.h:71: Process @ 0x260F9ACC 9. /-S/util/thread/pool.cpp:418: DoExecute @ 0x13AA5609 10. /-S/util/thread/factory.h:15: Execute @ 0x13AA3FF9 11. /-S/util/thread/factory.cpp:36: ThreadProc @ 0x13AA3FF9 12. /-S/util/system/thread.cpp:244: ThreadProxy @ 0x13A9F46C 13. ??:0: ?? @ 0x7F533EEADAC2 14. ??:0: ?? @ 0x7F533EF3F84F >> test_auditlog.py::test_dynconfig |80.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test |80.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/blobstorage/py3test |80.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test |80.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/blobstorage/py3test |80.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/blobstorage/py3test >> test_replication.py::TestReplicationAfterNodesRestart::test_replication[mirror-3-dc] [GOOD] |80.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test |80.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/blobstorage/py3test >> test_auditlog.py::test_broken_dynconfig[_client_session_pool_no_auth-_bad_dynconfig] |80.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/blobstorage/py3test |80.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test |80.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test |80.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test |80.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test |80.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/datashard/select/py3test >> test_select.py::TestDML::test_select[table_ttl_Uint32-pk_types14-all_types14-index14-Uint32--] [FAIL] |80.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/blobstorage/py3test |80.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test |80.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test >> test_auditlog.py::test_create_and_remove_tenant >> test_auditlog.py::test_single_dml_query_logged[update] |80.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test |80.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/blobstorage/py3test |80.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test |80.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/blobstorage/py3test >> test_replication.py::TestReplicationAfterNodesRestart::test_replication[none] >> test_self_heal.py::TestEnableSelfHeal::test_replication [GOOD] |80.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/blobstorage/py3test >> test_tablet_channel_migration.py::TestChannelsOps::test_when_write_in_new_channel_then_can_read_from_tablet [GOOD] |80.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test |80.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test |80.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test |80.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/blobstorage/py3test |80.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/blobstorage/py3test |80.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test >> test_auditlog.py::test_single_dml_query_logged[insert] >> test_auditlog.py::test_single_dml_query_logged[upsert] |80.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test |80.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test |80.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/blobstorage/py3test |80.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/blobstorage/py3test >> test_self_heal.py::TestEnableSelfHeal::test_replication [GOOD] |80.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test |80.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/blobstorage/py3test |80.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test |80.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/blobstorage/py3test |80.2%| [TA] $(B)/ydb/tests/datashard/select/test-results/py3test/{meta.json ... results_accumulator.log} |80.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/blobstorage/py3test |80.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test |80.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test >> test_auditlog.py::test_broken_dynconfig[_client_session_pool_bad_auth-_good_dynconfig] |80.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test |80.2%| [TA] {RESULT} $(B)/ydb/tests/datashard/select/test-results/py3test/{meta.json ... results_accumulator.log} |80.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/blobstorage/py3test >> test_auditlog.py::test_dynconfig [FAIL] |80.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/blobstorage/py3test |80.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test >> test_public_api.py::TestBadSession::test_simple [GOOD] >> test_auditlog.py::test_broken_dynconfig[_client_session_pool_with_auth_other-_bad_dynconfig] >> test_auditlog.py::test_broken_dynconfig[_client_session_pool_no_auth-_bad_dynconfig] [FAIL] |80.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/blobstorage/py3test >> test_public_api.py::TestDriverCanRecover::test_driver_recovery |80.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test |80.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/blobstorage/py3test |80.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test >> test_auditlog.py::test_broken_dynconfig[_client_session_pool_with_auth_root-_good_dynconfig] >> test_auditlog.py::test_create_and_remove_tenant [GOOD] |80.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test |80.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test |80.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test |80.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test |80.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test >> test_auditlog.py::test_single_dml_query_logged[update] [FAIL] |80.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/blobstorage/py3test |80.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/blobstorage/py3test |80.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/blobstorage/py3test |80.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test |80.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/blobstorage/py3test >> test_auditlog.py::test_single_dml_query_logged[select] >> test_pdisk_format_info.py::TestPDiskInfo::test_read_disk_state |80.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test |80.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test >> test_auditlog.py::test_single_dml_query_logged[replace] |80.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test |80.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/blobstorage/py3test >> test_replication.py::TestReplicationAfterNodesRestart::test_replication[mirror-3-dc] [GOOD] >> test_replication.py::TestReplicationAfterNodesRestart::test_replication[none] [GOOD] |80.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/blobstorage/py3test >> test_auditlog.py::test_single_dml_query_logged[upsert] [FAIL] |80.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/blobstorage/py3test |80.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test |80.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test >> test_auditlog.py::test_single_dml_query_logged[insert] [FAIL] |80.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test |80.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test |80.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/blobstorage/py3test >> test_auditlog.py::test_broken_dynconfig[_client_session_pool_bad_auth-_good_dynconfig] [FAIL] >> test_pdisk_format_info.py::TestPDiskInfo::test_read_disk_state [GOOD] >> test_replication.py::TestReplicationAfterNodesRestart::test_replication[mirror-3] |80.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test |80.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/blobstorage/py3test |80.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test |80.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/blobstorage/py3test >> test_replication.py::TestReplicationAfterNodesRestart::test_replication[none] [GOOD] |80.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/blobstorage/py3test |80.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test |80.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test >> test_auditlog.py::test_dynconfig [FAIL] |80.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test >> test_replication.py::TestReplicationAfterNodesRestart::test_replication[block-4-2] [GOOD] |80.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/blobstorage/py3test >> test_auditlog.py::test_broken_dynconfig[_client_session_pool_with_auth_other-_bad_dynconfig] [FAIL] |80.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test |80.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test >> test_auditlog.py::test_broken_dynconfig[_client_session_pool_no_auth-_bad_dynconfig] [FAIL] >> test_auditlog.py::test_cloud_ids_are_logged[attrs1] >> test_auditlog.py::test_broken_dynconfig[_client_session_pool_with_auth_root-_good_dynconfig] [FAIL] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test >> test_auditlog.py::test_create_and_remove_tenant [GOOD] Test command err: AAA /home/runner/.ya/build/build_root/ciyv/0020d9/ydb/tests/functional/audit/test-results/py3test/testing_out_stuff/test_auditlog/chunk10/testing_out_stuff/test_auditlog.py.test_create_and_remove_tenant/audit.txt 2025-05-29T15:37:32.509439Z: {"sanitized_token":"{none}","subject":"{none}","status":"SUCCESS","component":"console","operation":"BEGIN INIT DATABASE CONFIG","remote_address":"::1","database":"/Root/users/database"} 2025-05-29T15:37:32.516415Z: {"paths":"[/Root/users/database]","tx_id":"281474976715660","database":"/Root","sanitized_token":"{none}","remote_address":"{none}","status":"SUCCESS","subject":"{none}","detailed_status":"StatusAccepted","operation":"CREATE DATABASE","component":"schemeshard"} 2025-05-29T15:37:32.541109Z: {"paths":"[/Root/users/database]","tx_id":"281474976715661","database":"/Root","sanitized_token":"{none}","remote_address":"{none}","status":"SUCCESS","subject":"{none}","detailed_status":"StatusAccepted","operation":"ALTER DATABASE","component":"schemeshard"} 2025-05-29T15:37:34.572787Z: {"sanitized_token":"{none}","subject":"{none}","status":"SUCCESS","component":"console","operation":"END INIT DATABASE CONFIG","remote_address":"::1","database":"/Root/users/database"} 2025-05-29T15:37:34.625311Z: {"paths":"[.metadata/workload_manager/pools/default]","tx_id":"281474976720657","new_owner":"metadata@system","acl_add":"[+(SR|DS):all-users@well-known, +(SR|DS):root@builtin]","database":"/Root/users/database","sanitized_token":"{none}","remote_address":"{none}","status":"SUCCESS","subject":"metadata@system","detailed_status":"StatusAccepted","operation":"CREATE RESOURCE POOL","component":"schemeshard"} 2025-05-29T15:37:34.718730Z: {"reason":"Check failed: path: '/Root/users/database/.metadata/workload_manager/pools/default', error: path exist, request accepts it (id: [OwnerId: 72075186224037897, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92","paths":"[default]","tx_id":"281474976720658","new_owner":"metadata@system","acl_add":"[+(SR|DS):all-users@well-known, +(SR|DS):root@builtin]","database":"/Root/users/database","sanitized_token":"{none}","remote_address":"{none}","status":"SUCCESS","subject":"metadata@system","detailed_status":"StatusAlreadyExists","operation":"CREATE RESOURCE POOL","component":"schemeshard"} 2025-05-29T15:37:36.111712Z: {"sanitized_token":"{none}","subject":"{none}","status":"SUCCESS","component":"console","operation":"BEGIN REMOVE DATABASE","remote_address":"::1","database":"/Root/users/database"} 2025-05-29T15:37:36.116684Z: {"paths":"[/Root/users/database]","tx_id":"281474976715662","database":"/Root","sanitized_token":"{none}","remote_address":"{none}","status":"SUCCESS","subject":"{none}","detailed_status":"StatusAccepted","operation":"DROP DATABASE","component":"schemeshard"} 2025-05-29T15:37:36.148027Z: {"sanitized_token":"{none}","subject":"{none}","status":"SUCCESS","component":"console","operation":"END REMOVE DATABASE","remote_address":"::1","database":"/Root/users/database"} >> test_auditlog.py::test_broken_dynconfig[_client_session_pool_with_auth_root-_bad_dynconfig] |80.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test |80.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test |80.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/blobstorage/py3test |80.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test >> test_public_api.py::TestDriverCanRecover::test_driver_recovery [GOOD] |80.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/blobstorage/py3test |80.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test |80.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test |80.5%| [TA] $(B)/ydb/tests/functional/serverless/test-results/py3test/{meta.json ... results_accumulator.log} >> test_auditlog.py::test_single_dml_query_logged[select] [FAIL] |80.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/blobstorage/py3test |80.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/blobstorage/py3test >> test_auditlog.py::test_single_dml_query_logged[replace] [FAIL] |80.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/blobstorage/py3test >> docker_wrapper_test.py::test_pg_generated[Test64BitErrorChecking] [SKIPPED] |80.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test |80.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/blobstorage/py3test |80.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test >> test_auditlog.py::test_single_dml_query_logged[insert] [FAIL] |80.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test |80.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test >> test_auditlog.py::test_single_dml_query_logged[update] [FAIL] |80.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/suite_tests/py3test >> test_postgres.py::TestPGSQL::test_sql_suite[results-jointest/join2.test] [FAIL] >> docker_wrapper_test.py::test_pg_generated[TestAppendEncodedText] [GOOD] >> docker_wrapper_test.py::test_pg_generated[TestAppendEscapedText] [GOOD] >> docker_wrapper_test.py::test_pg_generated[TestAppendEscapedTextExistingBuffer] [GOOD] >> docker_wrapper_test.py::test_pg_generated[TestArrayScanBackend] [GOOD] >> docker_wrapper_test.py::test_pg_generated[TestArrayScanner] [GOOD] >> docker_wrapper_test.py::test_pg_generated[TestArrayValueBackend] [SKIPPED] >> docker_wrapper_test.py::test_pg_generated[TestArrayValuer] [GOOD] >> docker_wrapper_test.py::test_pg_generated[TestBadConn] [GOOD] >> docker_wrapper_test.py::test_pg_generated[TestBinaryByteSliceToInt] [SKIPPED] >> docker_wrapper_test.py::test_pg_generated[TestBinaryByteSlicetoUUID] [SKIPPED] >> docker_wrapper_test.py::test_pg_generated[TestBindError] [SKIPPED] >> docker_wrapper_test.py::test_pg_generated[TestBoolArrayScanBytes] >> docker_wrapper_test.py::test_pg_generated[TestBoolArrayScanBytes] [GOOD] >> docker_wrapper_test.py::test_pg_generated[TestBoolArrayScanEmpty] [GOOD] >> docker_wrapper_test.py::test_pg_generated[TestBoolArrayScanError] [GOOD] >> docker_wrapper_test.py::test_pg_generated[TestBoolArrayScanNil] [GOOD] >> docker_wrapper_test.py::test_pg_generated[TestBoolArrayScanString] [GOOD] >> docker_wrapper_test.py::test_pg_generated[TestBoolArrayScanUnsupported] [GOOD] >> docker_wrapper_test.py::test_pg_generated[TestBoolArrayValue] [GOOD] >> docker_wrapper_test.py::test_pg_generated[TestByteSliceToText] [GOOD] >> docker_wrapper_test.py::test_pg_generated[TestByteaArrayScanBytes] [GOOD] >> docker_wrapper_test.py::test_pg_generated[TestByteaArrayScanEmpty] [GOOD] >> docker_wrapper_test.py::test_pg_generated[TestByteaArrayScanError] [GOOD] >> docker_wrapper_test.py::test_pg_generated[TestByteaArrayScanNil] [GOOD] >> docker_wrapper_test.py::test_pg_generated[TestByteaArrayScanString] [GOOD] >> docker_wrapper_test.py::test_pg_generated[TestByteaArrayScanUnsupported] [GOOD] >> docker_wrapper_test.py::test_pg_generated[TestByteaArrayValue] [GOOD] >> docker_wrapper_test.py::test_pg_generated[TestByteaOutputFormatEncoding] [GOOD] >> docker_wrapper_test.py::test_pg_generated[TestByteaOutputFormats] |80.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test |80.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test |80.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test |80.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test |80.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test >> docker_wrapper_test.py::test_pg_generated[TestByteaOutputFormats] [FAIL] >> docker_wrapper_test.py::test_pg_generated[TestCloseBadConn] [GOOD] >> docker_wrapper_test.py::test_pg_generated[TestCommit] [SKIPPED] >> docker_wrapper_test.py::test_pg_generated[TestCommitInFailedTransaction] [GOOD] >> docker_wrapper_test.py::test_pg_generated[TestCommitInFailedTransactionWithCancelContext] [GOOD] >> docker_wrapper_test.py::test_pg_generated[TestConnClose] [GOOD] >> docker_wrapper_test.py::test_pg_generated[TestConnExecDeadlock] [GOOD] >> docker_wrapper_test.py::test_pg_generated[TestConnListen] [SKIPPED] >> docker_wrapper_test.py::test_pg_generated[TestConnPing] [SKIPPED] >> docker_wrapper_test.py::test_pg_generated[TestConnPrepareContext] [GOOD] >> docker_wrapper_test.py::test_pg_generated[TestConnPrepareContext/context.Background] [GOOD] >> docker_wrapper_test.py::test_pg_generated[TestConnPrepareContext/context.WithTimeout] [GOOD] >> docker_wrapper_test.py::test_pg_generated[TestConnPrepareContext/context.WithTimeout_exceeded] [GOOD] >> docker_wrapper_test.py::test_pg_generated[TestConnUnlisten] [SKIPPED] >> docker_wrapper_test.py::test_pg_generated[TestConnUnlistenAll] [SKIPPED] |80.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test |80.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test |80.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test >> test_auditlog.py::test_single_dml_query_logged[upsert] [FAIL] >> docker_wrapper_test.py::test_pg_generated[TestConnectorWithNoticeHandler_Simple] [SKIPPED] >> docker_wrapper_test.py::test_pg_generated[TestConnectorWithNotificationHandler_Simple] [SKIPPED] >> docker_wrapper_test.py::test_pg_generated[TestContextCancelBegin] [SKIPPED] >> docker_wrapper_test.py::test_pg_generated[TestContextCancelExec] [SKIPPED] >> docker_wrapper_test.py::test_pg_generated[TestContextCancelQuery] [SKIPPED] >> docker_wrapper_test.py::test_pg_generated[TestCopyFromError] [SKIPPED] >> docker_wrapper_test.py::test_pg_generated[TestCopyInBinaryError] [SKIPPED] >> docker_wrapper_test.py::test_pg_generated[TestCopyInMultipleValues] [SKIPPED] >> docker_wrapper_test.py::test_pg_generated[TestCopyInRaiseStmtTrigger] [SKIPPED] >> docker_wrapper_test.py::test_pg_generated[TestCopyInSchemaStmt] [GOOD] >> docker_wrapper_test.py::test_pg_generated[TestCopyInStmt] [GOOD] >> docker_wrapper_test.py::test_pg_generated[TestCopyInStmtAffectedRows] >> docker_wrapper_test.py::test_pg_generated[TestCopyInStmtAffectedRows] [SKIPPED] >> docker_wrapper_test.py::test_pg_generated[TestCopyInTypes] [SKIPPED] >> docker_wrapper_test.py::test_pg_generated[TestCopyInWrongType] [SKIPPED] >> docker_wrapper_test.py::test_pg_generated[TestCopyOutsideOfTxnError] [GOOD] >> docker_wrapper_test.py::test_pg_generated[TestCopyRespLoopConnectionError] >> docker_wrapper_test.py::test_pg_generated[TestCopyRespLoopConnectionError] [SKIPPED] >> docker_wrapper_test.py::test_pg_generated[TestCopySyntaxError] [SKIPPED] >> docker_wrapper_test.py::test_pg_generated[TestDataType] [GOOD] |80.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/blobstorage/py3test >> test_pdisk_format_info.py::TestPDiskInfo::test_read_disk_state [GOOD] |80.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test |80.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test |80.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test |80.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test >> test_auditlog.py::test_broken_dynconfig[_client_session_pool_with_auth_other-_bad_dynconfig] [FAIL] >> test_auditlog.py::test_broken_dynconfig[_client_session_pool_with_auth_other-_good_dynconfig] >> docker_wrapper_test.py::test_pg_generated[TestDataTypeLength] [GOOD] >> docker_wrapper_test.py::test_pg_generated[TestDataTypeName] [GOOD] >> docker_wrapper_test.py::test_pg_generated[TestDataTypePrecisionScale] [GOOD] >> docker_wrapper_test.py::test_pg_generated[TestDecodeBool] >> docker_wrapper_test.py::test_pg_generated[TestDecodeBool] [GOOD] >> docker_wrapper_test.py::test_pg_generated[TestDecodeUUIDBackend] [GOOD] >> docker_wrapper_test.py::test_pg_generated[TestDecodeUUIDBinaryError] [GOOD] >> docker_wrapper_test.py::test_pg_generated[TestEmptyQuery] [SKIPPED] >> docker_wrapper_test.py::test_pg_generated[TestEmptyResultSetColumns] [GOOD] >> docker_wrapper_test.py::test_pg_generated[TestEncodeAndParseTs] [GOOD] >> docker_wrapper_test.py::test_pg_generated[TestEncodeDecode] >> docker_wrapper_test.py::test_pg_generated[TestEncodeDecode] [SKIPPED] >> docker_wrapper_test.py::test_pg_generated[TestErrorClass] [SKIPPED] >> docker_wrapper_test.py::test_pg_generated[TestErrorDuringStartup] [SKIPPED] >> docker_wrapper_test.py::test_pg_generated[TestErrorDuringStartupClosesConn] [GOOD] >> docker_wrapper_test.py::test_pg_generated[TestErrorOnExec] [SKIPPED] >> docker_wrapper_test.py::test_pg_generated[TestErrorOnQuery] [SKIPPED] >> docker_wrapper_test.py::test_pg_generated[TestErrorOnQueryRowSimpleQuery] [SKIPPED] >> docker_wrapper_test.py::test_pg_generated[TestErrorSQLState] >> docker_wrapper_test.py::test_pg_generated[TestErrorSQLState] [GOOD] >> docker_wrapper_test.py::test_pg_generated[TestExec] [SKIPPED] >> docker_wrapper_test.py::test_pg_generated[TestFloat32ArrayScanBytes] [GOOD] >> docker_wrapper_test.py::test_pg_generated[TestFloat32ArrayScanEmpty] [GOOD] >> docker_wrapper_test.py::test_pg_generated[TestFloat32ArrayScanError] [GOOD] >> docker_wrapper_test.py::test_pg_generated[TestFloat32ArrayScanNil] [GOOD] >> docker_wrapper_test.py::test_pg_generated[TestFloat32ArrayScanString] [GOOD] >> docker_wrapper_test.py::test_pg_generated[TestFloat32ArrayScanUnsupported] [GOOD] >> docker_wrapper_test.py::test_pg_generated[TestFloat32ArrayValue] [GOOD] >> docker_wrapper_test.py::test_pg_generated[TestFloat64ArrayScanBytes] [GOOD] >> docker_wrapper_test.py::test_pg_generated[TestFloat64ArrayScanEmpty] [GOOD] >> docker_wrapper_test.py::test_pg_generated[TestFloat64ArrayScanError] [GOOD] >> docker_wrapper_test.py::test_pg_generated[TestFloat64ArrayScanNil] [GOOD] >> docker_wrapper_test.py::test_pg_generated[TestFloat64ArrayScanString] [GOOD] >> docker_wrapper_test.py::test_pg_generated[TestFloat64ArrayScanUnsupported] [GOOD] |80.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test >> test_auditlog.py::test_broken_dynconfig[_client_session_pool_bad_auth-_good_dynconfig] [FAIL] >> docker_wrapper_test.py::test_pg_generated[TestFloat64ArrayValue] [GOOD] >> docker_wrapper_test.py::test_pg_generated[TestFormatAndParseTimestamp] [GOOD] >> docker_wrapper_test.py::test_pg_generated[TestFormatTs] >> docker_wrapper_test.py::test_pg_generated[TestFormatTs] [GOOD] >> docker_wrapper_test.py::test_pg_generated[TestFormatTsBackend] [SKIPPED] >> docker_wrapper_test.py::test_pg_generated[TestFullParseURL] [GOOD] >> docker_wrapper_test.py::test_pg_generated[TestGenericArrayScanDelimiter] [GOOD] >> docker_wrapper_test.py::test_pg_generated[TestGenericArrayScanErrors] >> docker_wrapper_test.py::test_pg_generated[TestGenericArrayScanErrors] [GOOD] >> docker_wrapper_test.py::test_pg_generated[TestGenericArrayScanScannerArrayBytes] [GOOD] >> docker_wrapper_test.py::test_pg_generated[TestGenericArrayScanScannerArrayString] [GOOD] >> docker_wrapper_test.py::test_pg_generated[TestGenericArrayScanScannerSliceBytes] [GOOD] >> docker_wrapper_test.py::test_pg_generated[TestGenericArrayScanScannerSliceEmpty] >> docker_wrapper_test.py::test_pg_generated[TestGenericArrayScanScannerSliceEmpty] [GOOD] >> docker_wrapper_test.py::test_pg_generated[TestGenericArrayScanScannerSliceNil] [GOOD] >> docker_wrapper_test.py::test_pg_generated[TestGenericArrayScanScannerSliceString] [GOOD] >> docker_wrapper_test.py::test_pg_generated[TestGenericArrayScanUnsupported] [GOOD] >> docker_wrapper_test.py::test_pg_generated[TestGenericArrayValue] [GOOD] >> docker_wrapper_test.py::test_pg_generated[TestGenericArrayValueErrors] [GOOD] >> docker_wrapper_test.py::test_pg_generated[TestGenericArrayValueUnsupported] [GOOD] >> docker_wrapper_test.py::test_pg_generated[TestHasCorrectRootGroupPermissions] [SKIPPED] >> docker_wrapper_test.py::test_pg_generated[TestIPv6LoopbackParseURL] [GOOD] >> test_auditlog.py::test_broken_dynconfig[_client_session_pool_with_auth_root-_bad_dynconfig] [FAIL] >> docker_wrapper_test.py::test_pg_generated[TestInfinityTimestamp] [SKIPPED] >> docker_wrapper_test.py::test_pg_generated[TestInt32ArrayScanBytes] [GOOD] >> docker_wrapper_test.py::test_pg_generated[TestInt32ArrayScanEmpty] [GOOD] >> docker_wrapper_test.py::test_pg_generated[TestInt32ArrayScanError] [GOOD] >> docker_wrapper_test.py::test_pg_generated[TestInt32ArrayScanNil] [GOOD] >> docker_wrapper_test.py::test_pg_generated[TestInt32ArrayScanString] [GOOD] >> docker_wrapper_test.py::test_pg_generated[TestInt32ArrayScanUnsupported] [GOOD] >> docker_wrapper_test.py::test_pg_generated[TestInt32ArrayValue] [GOOD] >> docker_wrapper_test.py::test_pg_generated[TestInt64ArrayScanBytes] [GOOD] >> docker_wrapper_test.py::test_pg_generated[TestInt64ArrayScanEmpty] [GOOD] >> docker_wrapper_test.py::test_pg_generated[TestInt64ArrayScanError] [GOOD] >> docker_wrapper_test.py::test_pg_generated[TestInt64ArrayScanNil] [GOOD] >> docker_wrapper_test.py::test_pg_generated[TestInt64ArrayScanString] [GOOD] >> docker_wrapper_test.py::test_pg_generated[TestInt64ArrayScanUnsupported] [GOOD] |80.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test |80.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test >> docker_wrapper_test.py::test_pg_generated[TestInt64ArrayValue] [GOOD] |80.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test >> test_auditlog.py::test_cloud_ids_are_logged[attrs1] [FAIL] >> docker_wrapper_test.py::test_pg_generated[TestInvalidProtocolParseURL] [GOOD] >> docker_wrapper_test.py::test_pg_generated[TestIsUTF8] [GOOD] >> docker_wrapper_test.py::test_pg_generated[TestIssue1046] [SKIPPED] >> docker_wrapper_test.py::test_pg_generated[TestIssue1062] [SKIPPED] >> docker_wrapper_test.py::test_pg_generated[TestIssue186] [SKIPPED] >> docker_wrapper_test.py::test_pg_generated[TestIssue196] [GOOD] >> docker_wrapper_test.py::test_pg_generated[TestIssue282] >> docker_wrapper_test.py::test_pg_generated[TestIssue282] [FAIL] >> docker_wrapper_test.py::test_pg_generated[TestIssue494] [SKIPPED] >> docker_wrapper_test.py::test_pg_generated[TestIssue617] [GOOD] >> docker_wrapper_test.py::test_pg_generated[TestListenerClose] [GOOD] >> docker_wrapper_test.py::test_pg_generated[TestListenerConnCloseWhileQueryIsExecuting] [GOOD] >> docker_wrapper_test.py::test_pg_generated[TestListenerFailedQuery] [SKIPPED] >> docker_wrapper_test.py::test_pg_generated[TestListenerListen] [SKIPPED] >> docker_wrapper_test.py::test_pg_generated[TestListenerPing] [SKIPPED] >> docker_wrapper_test.py::test_pg_generated[TestListenerReconnect] [SKIPPED] >> docker_wrapper_test.py::test_pg_generated[TestListenerUnlisten] [SKIPPED] >> docker_wrapper_test.py::test_pg_generated[TestListenerUnlistenAll] [SKIPPED] >> docker_wrapper_test.py::test_pg_generated[TestMinimalURL] [GOOD] >> docker_wrapper_test.py::test_pg_generated[TestMultipleEmptyResult] [GOOD] >> docker_wrapper_test.py::test_pg_generated[TestMultipleResult] >> docker_wrapper_test.py::test_pg_generated[TestMultipleResult] [FAIL] >> docker_wrapper_test.py::test_pg_generated[TestMultipleSimpleQuery] >> docker_wrapper_test.py::test_pg_generated[TestMultipleSimpleQuery] [FAIL] >> docker_wrapper_test.py::test_pg_generated[TestNewConnector_Connect] [SKIPPED] >> docker_wrapper_test.py::test_pg_generated[TestNewConnector_Driver] [SKIPPED] >> docker_wrapper_test.py::test_pg_generated[TestNewConnector_WorksWithOpenDB] [SKIPPED] >> docker_wrapper_test.py::test_pg_generated[TestNewListenerConn] [GOOD] >> docker_wrapper_test.py::test_pg_generated[TestNoData] [GOOD] >> docker_wrapper_test.py::test_pg_generated[TestNotifyExtra] [SKIPPED] >> docker_wrapper_test.py::test_pg_generated[TestNullAfterNonNull] [SKIPPED] >> docker_wrapper_test.py::test_pg_generated[TestOpenURL] [GOOD] >> docker_wrapper_test.py::test_pg_generated[TestParameterCountMismatch] [GOOD] >> docker_wrapper_test.py::test_pg_generated[TestParseArray] [GOOD] >> docker_wrapper_test.py::test_pg_generated[TestParseArrayError] >> docker_wrapper_test.py::test_pg_generated[TestParseArrayError] [GOOD] >> docker_wrapper_test.py::test_pg_generated[TestParseComplete] [GOOD] >> docker_wrapper_test.py::test_pg_generated[TestParseEnviron] [GOOD] >> docker_wrapper_test.py::test_pg_generated[TestParseErrorInExtendedQuery] [SKIPPED] >> docker_wrapper_test.py::test_pg_generated[TestParseOpts] >> docker_wrapper_test.py::test_pg_generated[TestParseOpts] [GOOD] >> docker_wrapper_test.py::test_pg_generated[TestParseTs] [GOOD] >> docker_wrapper_test.py::test_pg_generated[TestParseTsErrors] [GOOD] >> docker_wrapper_test.py::test_pg_generated[TestPgpass] [GOOD] >> docker_wrapper_test.py::test_pg_generated[TestPing] [SKIPPED] >> docker_wrapper_test.py::test_pg_generated[TestQueryCancelRace] [SKIPPED] >> docker_wrapper_test.py::test_pg_generated[TestQueryCancelledReused] [SKIPPED] >> docker_wrapper_test.py::test_pg_generated[TestQueryRowBugWorkaround] [SKIPPED] >> docker_wrapper_test.py::test_pg_generated[TestQuickClose] [GOOD] >> docker_wrapper_test.py::test_pg_generated[TestQuoteIdentifier] [GOOD] >> docker_wrapper_test.py::test_pg_generated[TestQuoteLiteral] [GOOD] >> docker_wrapper_test.py::test_pg_generated[TestReadFloatPrecision] [GOOD] >> docker_wrapper_test.py::test_pg_generated[TestReconnect] [SKIPPED] >> docker_wrapper_test.py::test_pg_generated[TestReturning] [SKIPPED] >> docker_wrapper_test.py::test_pg_generated[TestRowsCloseBeforeDone] [GOOD] >> docker_wrapper_test.py::test_pg_generated[TestRowsColumnTypes] [GOOD] >> docker_wrapper_test.py::test_pg_generated[TestRowsResultTag] >> docker_wrapper_test.py::test_pg_generated[TestRowsResultTag] [SKIPPED] |80.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test |80.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test |80.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test |80.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test |80.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test |80.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test |80.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test |80.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test |80.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test |80.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test >> docker_wrapper_test.py::test_pg_generated[TestRuntimeParameters] [SKIPPED] >> docker_wrapper_test.py::test_pg_generated[TestSNISupport] [GOOD] >> docker_wrapper_test.py::test_pg_generated[TestSNISupport/SNI_is_not_passed_when_disabled] [GOOD] >> docker_wrapper_test.py::test_pg_generated[TestSNISupport/SNI_is_not_set_for_IPv4] [GOOD] >> docker_wrapper_test.py::test_pg_generated[TestSNISupport/SNI_is_passed_when_asked_for] [GOOD] >> docker_wrapper_test.py::test_pg_generated[TestSNISupport/SNI_is_set_by_default] [GOOD] >> docker_wrapper_test.py::test_pg_generated[TestSSLClientCertificates] [SKIPPED] >> docker_wrapper_test.py::test_pg_generated[TestSSLConnection] [SKIPPED] >> docker_wrapper_test.py::test_pg_generated[TestSSLRequireWithRootCert] [SKIPPED] >> docker_wrapper_test.py::test_pg_generated[TestSSLVerifyCA] [SKIPPED] >> docker_wrapper_test.py::test_pg_generated[TestSSLVerifyFull] [SKIPPED] >> docker_wrapper_test.py::test_pg_generated[TestScanNilTimestamp] >> docker_wrapper_test.py::test_pg_generated[TestScanNilTimestamp] [GOOD] >> docker_wrapper_test.py::test_pg_generated[TestScanTimestamp] [GOOD] >> docker_wrapper_test.py::test_pg_generated[TestSimpleParseURL] [GOOD] >> docker_wrapper_test.py::test_pg_generated[TestSimpleQuery] >> docker_wrapper_test.py::test_pg_generated[TestSimpleQuery] [GOOD] >> docker_wrapper_test.py::test_pg_generated[TestStatment] [GOOD] >> docker_wrapper_test.py::test_pg_generated[TestStmtExecContext] [SKIPPED] >> docker_wrapper_test.py::test_pg_generated[TestStmtExecContext/context.Background] [SKIPPED] >> docker_wrapper_test.py::test_pg_generated[TestStmtExecContext/context.WithTimeout] [SKIPPED] >> docker_wrapper_test.py::test_pg_generated[TestStmtExecContext/context.WithTimeout_exceeded] [SKIPPED] >> docker_wrapper_test.py::test_pg_generated[TestStmtQueryContext] [SKIPPED] >> docker_wrapper_test.py::test_pg_generated[TestStmtQueryContext/context.Background] [SKIPPED] >> docker_wrapper_test.py::test_pg_generated[TestStmtQueryContext/context.WithTimeout] [SKIPPED] >> docker_wrapper_test.py::test_pg_generated[TestStmtQueryContext/context.WithTimeout_exceeded] [SKIPPED] >> docker_wrapper_test.py::test_pg_generated[TestStringArrayScanBytes] [GOOD] >> docker_wrapper_test.py::test_pg_generated[TestStringArrayScanEmpty] [GOOD] >> docker_wrapper_test.py::test_pg_generated[TestStringArrayScanError] [GOOD] >> docker_wrapper_test.py::test_pg_generated[TestStringArrayScanNil] >> docker_wrapper_test.py::test_pg_generated[TestStringArrayScanNil] [GOOD] >> docker_wrapper_test.py::test_pg_generated[TestStringArrayScanString] [GOOD] >> docker_wrapper_test.py::test_pg_generated[TestStringArrayScanUnsupported] [GOOD] >> docker_wrapper_test.py::test_pg_generated[TestStringArrayValue] >> docker_wrapper_test.py::test_pg_generated[TestStringArrayValue] [GOOD] >> docker_wrapper_test.py::test_pg_generated[TestStringToBytea] [GOOD] >> docker_wrapper_test.py::test_pg_generated[TestStringToUUID] [GOOD] >> docker_wrapper_test.py::test_pg_generated[TestStringWithNul] [SKIPPED] >> docker_wrapper_test.py::test_pg_generated[TestTextByteSliceToInt] |80.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test |80.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test >> test_auditlog.py::test_broken_dynconfig[_client_session_pool_with_auth_root-_good_dynconfig] [FAIL] >> docker_wrapper_test.py::test_pg_generated[TestTextByteSliceToInt] [GOOD] >> docker_wrapper_test.py::test_pg_generated[TestTextByteSliceToUUID] [GOOD] >> docker_wrapper_test.py::test_pg_generated[TestTextDecodeIntoString] [GOOD] |80.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test >> docker_wrapper_test.py::test_pg_generated[TestTimeWithTimezone] [GOOD] >> docker_wrapper_test.py::test_pg_generated[TestTimeWithTimezone/11:59:59+00:00_=>_0000-01-01T11:59:59Z] [GOOD] >> docker_wrapper_test.py::test_pg_generated[TestTimeWithTimezone/11:59:59+04:00_=>_0000-01-01T11:59:59+04:00] [GOOD] >> docker_wrapper_test.py::test_pg_generated[TestTimeWithTimezone/11:59:59+04:01:02_=>_0000-01-01T11:59:59+04:01] [GOOD] >> docker_wrapper_test.py::test_pg_generated[TestTimeWithTimezone/11:59:59-04:01:02_=>_0000-01-01T11:59:59-04:01] [GOOD] >> docker_wrapper_test.py::test_pg_generated[TestTimeWithTimezone/24:00+00_=>_0000-01-02T00:00:00Z] [GOOD] >> docker_wrapper_test.py::test_pg_generated[TestTimeWithTimezone/24:00-04:00_=>_0000-01-02T00:00:00-04:00] [GOOD] >> docker_wrapper_test.py::test_pg_generated[TestTimeWithTimezone/24:00:00+00_=>_0000-01-02T00:00:00Z] [GOOD] >> docker_wrapper_test.py::test_pg_generated[TestTimeWithTimezone/24:00:00.0+00_=>_0000-01-02T00:00:00Z] [GOOD] >> docker_wrapper_test.py::test_pg_generated[TestTimeWithTimezone/24:00:00.000000+00_=>_0000-01-02T00:00:00Z] [GOOD] >> docker_wrapper_test.py::test_pg_generated[TestTimeWithTimezone/24:00Z_=>_0000-01-02T00:00:00Z] [GOOD] >> docker_wrapper_test.py::test_pg_generated[TestTimeWithoutTimezone] [GOOD] >> docker_wrapper_test.py::test_pg_generated[TestTimeWithoutTimezone/11:59:59_=>_0000-01-01T11:59:59Z] [GOOD] >> docker_wrapper_test.py::test_pg_generated[TestTimeWithoutTimezone/24:00:00.000000_=>_0000-01-02T00:00:00Z] [GOOD] >> docker_wrapper_test.py::test_pg_generated[TestTimeWithoutTimezone/24:00:00.0_=>_0000-01-02T00:00:00Z] [GOOD] >> docker_wrapper_test.py::test_pg_generated[TestTimeWithoutTimezone/24:00:00_=>_0000-01-02T00:00:00Z] [GOOD] >> docker_wrapper_test.py::test_pg_generated[TestTimeWithoutTimezone/24:00_=>_0000-01-02T00:00:00Z] [GOOD] >> docker_wrapper_test.py::test_pg_generated[TestTimestampWithOutTimezone] [GOOD] >> docker_wrapper_test.py::test_pg_generated[TestTimestampWithTimeZone] [SKIPPED] >> docker_wrapper_test.py::test_pg_generated[TestTxOptions] [SKIPPED] >> docker_wrapper_test.py::test_pg_generated[TestXactMultiStmt] [SKIPPED] |80.7%| [TA] $(B)/ydb/core/tx/schemeshard/ut_cdc_stream_reboots/test-results/unittest/{meta.json ... results_accumulator.log} |80.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test >> test_auditlog.py::test_single_dml_query_logged[select] [FAIL] >> test_replication.py::TestReplicationAfterNodesRestart::test_replication[mirror-3] [GOOD] >> test_auditlog.py::test_broken_dynconfig[_client_session_pool_with_auth_other-_good_dynconfig] [FAIL] |80.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test >> test_auditlog.py::test_single_dml_query_logged[replace] [FAIL] |80.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test >> test_public_api.py::TestSelectAfterDropWithRepetitions::test_select_on_dropped_table_unsuccessful[10] >> test_inflight.py::TestS3::test_inflight[v1-client0-kikimr_params0] >> test_formats.py::TestS3Formats::test_format[v1-test.csv-csv_with_names-kikimr_settings0] |80.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test >> test_vector_index.py::TestVectorIndex::test_vector_index[table_index_1-pk_types9-all_types9-index9-Uint8] >> test_vector_index.py::TestVectorIndex::test_vector_index_prefix[table_index_3-pk_types13-all_types13-index13-Int8] |80.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test |80.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test |80.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test >> test_auditlog.py::test_cloud_ids_are_logged[attrs1] [FAIL] >> test_s3_1.py::TestS3::test_write_result[v1-kikimr_params0-client0] >> test_yq_v2.py::TestS3::test_yqv2_enabled[v2-False-client0] >> test_compressions.py::TestS3Compressions::test_compression[v1-test.json.gz-gzip] |80.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test >> test_streaming_join.py::TestStreamingJoin::test_grace_join[v1-client0] >> test_vector_index.py::TestVectorIndex::test_vector_index_prefix[table_index_0-pk_types16-all_types16-index16-Int8] >> test_bindings_0.py::TestBindings::test_binding_operations[v1-kikimr_settings0-client0] >> test_auditlog.py::test_dml_requests_arent_logged_when_anonymous >> test_test_connection.py::TestConnection::test_test_s3_connection[v1-client0] >> test_public_api.py::TestSelectAfterDropWithRepetitions::test_select_on_dropped_table_unsuccessful[10] [FAIL] >> test_vector_index.py::TestVectorIndex::test_vector_index_prefix[table_index_0_float-pk_types4-all_types4-index4-Float] >> test_vector_index.py::TestVectorIndex::test_vector_index[table_index_4-pk_types6-all_types6-index6-Uint8] |80.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/blobstorage/py3test >> test_replication.py::TestReplicationAfterNodesRestart::test_replication[block-4-2] [GOOD] |80.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test >> test_auditlog.py::test_broken_dynconfig[_client_session_pool_with_auth_root-_bad_dynconfig] [FAIL] |80.8%| [TA] {RESULT} $(B)/ydb/tests/functional/serverless/test-results/py3test/{meta.json ... results_accumulator.log} >> test_ydb_over_fq.py::TestYdbOverFq::test_list_directory_v2[v2-client0] >> test_inflight.py::TestS3::test_inflight[v1-client0-kikimr_params0] [FAIL] >> test_inflight.py::TestS3::test_data_inflight[v1-client0-kikimr_params0] >> test_vector_index.py::TestVectorIndex::test_vector_index[table_index_3_float-pk_types1-all_types1-index1-Float] >> test_size_limit.py::TestS3::test_size_limit[v2-client0-5-kikimr_params0-false] >> test_statistics.py::TestS3::test_egress[v2-client0-json_list] >> test_formats.py::TestS3Formats::test_format[v1-test.csv-csv_with_names-kikimr_settings0] [FAIL] >> test_s3_0.py::TestS3::test_csv[v1-false-client0] >> test_vector_index.py::TestVectorIndex::test_vector_index_prefix[table_index_1_float-pk_types3-all_types3-index3-Float] >> test_vector_index.py::TestVectorIndex::test_vector_index[table_index_0-pk_types16-all_types16-index16-Int8] >> test_yq_v2.py::TestS3::test_yqv2_enabled[v2-False-client0] [FAIL] >> test_s3_1.py::TestS3::test_write_result[v1-kikimr_params0-client0] [FAIL] >> test_inflight.py::TestS3::test_data_inflight[v1-client0-kikimr_params0] [FAIL] >> test_compressions.py::TestS3Compressions::test_compression[v1-test.json.gz-gzip] [FAIL] >> test_bindings_1.py::TestBindings::test_s3_insert[v1-kikimr_settings0-client0] >> test_vector_index.py::TestVectorIndex::test_vector_index_prefix[table_index_1-pk_types9-all_types9-index9-Uint8] >> test_public_api.py::TestMetaDataInvalidation::test_invalidation_success >> test_vector_index.py::TestVectorIndex::test_vector_index[table_index_1-pk_types9-all_types9-index9-Uint8] [FAIL] >> test_format_setting.py::TestS3::test_interval_unit[v1-client0] >> test_yq_v2.py::TestS3::test_yqv2_enabled[v2-True-client0] >> test_inflight.py::TestS3::test_inflight[v1-client0-kikimr_params1] >> test_compressions.py::TestS3Compressions::test_compression[v1-test.json.lz4-lz4] >> test_formats.py::TestS3Formats::test_format[v1-test.tsv-tsv_with_names-kikimr_settings0] >> test_s3_1.py::TestS3::test_top_level_listing_2[v1-kikimr_params0-false-client0] >> test_yq_v2.py::TestS3::test_yqv2_enabled[v2-True-client0] [FAIL] >> test_vector_index.py::TestVectorIndex::test_vector_index[table_all_types_float-pk_types5-all_types5-index5-Float] >> test_formats.py::TestS3Formats::test_format[v1-test.tsv-tsv_with_names-kikimr_settings0] [FAIL] >> test_streaming_join.py::TestStreamingJoin::test_grace_join[v1-client0] [FAIL] >> test_s3_1.py::TestS3::test_top_level_listing_2[v1-kikimr_params0-false-client0] [FAIL] >> test_compressions.py::TestS3Compressions::test_compression[v1-test.json.lz4-lz4] [FAIL] >> BasicStatistics::NotFullStatisticsColumnshard [GOOD] >> test_bindings_0.py::TestBindings::test_binding_operations[v1-kikimr_settings0-client0] [FAIL] >> test_insert.py::TestS3::test_insert[v1-false-client0-json_list-dataset] >> test_inflight.py::TestS3::test_inflight[v1-client0-kikimr_params1] [FAIL] >> test_public_metrics.py::TestPublicMetrics::test_public_metrics[v2-client0] >> test_ydb_over_fq.py::TestYdbOverFq::test_list_directory_v2[v2-client0] [FAIL] >> test_s3_0.py::TestS3::test_csv[v1-false-client0] [FAIL] >> test_test_connection.py::TestConnection::test_test_s3_connection[v1-client0] [FAIL] >> test_size_limit.py::TestS3::test_size_limit[v2-client0-5-kikimr_params0-false] [FAIL] >> test_vector_index.py::TestVectorIndex::test_vector_index_prefix[table_index_3-pk_types13-all_types13-index13-Int8] [FAIL] >> test_yq_v2.py::TestS3::test_removed_database_path[v2-client0] >> test_formats.py::TestS3Formats::test_format[v1-test_each_row.json-json_each_row-kikimr_settings0] >> test_formats.py::TestS3Formats::test_format[v1-test_each_row.json-json_each_row-kikimr_settings0] [FAIL] >> test_formats.py::TestS3Formats::test_format[v1-test_list.json-json_list-kikimr_settings0] >> test_s3_0.py::TestS3::test_csv[v1-true-client0] >> test_ydb_over_fq.py::TestYdbOverFq::test_list_directory_v1[v1-client0] >> test_inflight.py::TestS3::test_inflight[v1-client0-kikimr_params2] >> test_bindings_0.py::TestBindings::test_binding_operations[v2-kikimr_settings0-client0] >> test_size_limit.py::TestS3::test_size_limit[v2-client0-5-kikimr_params0-true] >> test_yq_v2.py::TestS3::test_removed_database_path[v2-client0] [FAIL] |80.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/postgres_integrations/go-libpq/py3test >> docker_wrapper_test.py::test_pg_generated[TestXactMultiStmt] [SKIPPED] >> test_s3_1.py::TestS3::test_top_level_listing_2[v1-kikimr_params0-true-client0] >> test_formats.py::TestS3Formats::test_format[v1-test_list.json-json_list-kikimr_settings0] [FAIL] >> test_test_connection.py::TestConnection::test_test_s3_connection[v2-client0] >> test_statistics.py::TestS3::test_egress[v2-client0-json_list] [FAIL] >> test_inflight.py::TestS3::test_inflight[v1-client0-kikimr_params2] [FAIL] >> test_public_api.py::TestMetaDataInvalidation::test_invalidation_success [FAIL] >> test_vector_index.py::TestVectorIndex::test_vector_index_prefix[table_index_0-pk_types16-all_types16-index16-Int8] [FAIL] >> test_bindings_1.py::TestBindings::test_s3_insert[v1-kikimr_settings0-client0] [FAIL] >> test_auditlog.py::test_dml_requests_arent_logged_when_anonymous [FAIL] >> test_explicit_partitioning_0.py::TestS3::test_partitioned_by[v1-false-client0] >> test_vector_index.py::TestVectorIndex::test_vector_index_prefix[table_index_0_float-pk_types4-all_types4-index4-Float] [FAIL] >> test_ydb_over_fq.py::TestYdbOverFq::test_list_directory_v1[v1-client0] [FAIL] >> test_s3_0.py::TestS3::test_csv[v1-true-client0] [FAIL] >> test_yq_v2.py::TestS3::test_query_parameters[v2-client0] >> test_size_limit.py::TestS3::test_size_limit[v2-client0-5-kikimr_params0-true] [FAIL] >> test_compressions.py::TestS3Compressions::test_compression[v1-test.json.br-brotli] >> test_formats.py::TestS3Formats::test_format[v1-test.parquet-parquet-kikimr_settings0] >> test_bindings_0.py::TestBindings::test_binding_operations[v2-kikimr_settings0-client0] [FAIL] >> test_bindings_1.py::TestBindings::test_s3_insert[v2-kikimr_settings0-client0] >> test_s3_0.py::TestS3::test_csv[v2-false-client0] >> test_yq_v2.py::TestS3::test_query_parameters[v2-client0] [FAIL] >> test_s3_1.py::TestS3::test_top_level_listing_2[v1-kikimr_params0-true-client0] [FAIL] >> test_s3_0.py::TestS3::test_csv[v2-false-client0] [FAIL] >> test_formats.py::TestS3Formats::test_format[v1-test.parquet-parquet-kikimr_settings0] [FAIL] >> test_public_api.py::TestJsonExample::test_json_unexpected_failure >> test_statistics.py::TestS3::test_egress[v2-client0-json_each_row] >> test_test_connection.py::TestConnection::test_test_s3_connection[v2-client0] [FAIL] >> test_size_limit.py::TestS3::test_size_limit[v2-client0-100-kikimr_params0-false] >> test_ydb_over_fq.py::TestYdbOverFq::test_list_without_streams[v1-client0] >> test_bindings_0.py::TestBindings::test_modify_connection_with_a_lot_of_bindings[v1-kikimr_settings0-client0] >> test_size_limit.py::TestS3::test_size_limit[v2-client0-100-kikimr_params0-false] [FAIL] >> test_s3_0.py::TestS3::test_csv[v2-true-client0] >> test_size_limit.py::TestS3::test_size_limit[v2-client0-100-kikimr_params0-true] >> test_compressions.py::TestS3Compressions::test_compression[v1-test.json.br-brotli] [FAIL] >> test_bindings_1.py::TestBindings::test_s3_insert[v2-kikimr_settings0-client0] [FAIL] >> test_s3_1.py::TestS3::test_top_level_listing_2[v2-kikimr_params0-false-client0] >> test_s3_0.py::TestS3::test_csv[v2-true-client0] [FAIL] >> test_formats.py::TestS3Formats::test_format[v2-test.csv-csv_with_names-kikimr_settings0] >> test_statistics.py::TestS3::test_egress[v2-client0-json_each_row] [FAIL] >> test_ydb_over_fq.py::TestYdbOverFq::test_list_without_streams[v1-client0] [FAIL] >> test_test_connection.py::TestConnection::test_test_s3_connection_uri[v1-client0] >> test_s3_1.py::TestS3::test_top_level_listing_2[v2-kikimr_params0-false-client0] [FAIL] >> test_bindings_1.py::TestBindings::test_s3_format_mismatch[v1-client0] >> test_public_metrics.py::TestPublicMetrics::test_public_metrics[v2-client0] [FAIL] >> test_vector_index.py::TestVectorIndex::test_vector_index[table_all_types_float-pk_types5-all_types5-index5-Float] [FAIL] >> test_bindings_0.py::TestBindings::test_modify_connection_with_a_lot_of_bindings[v1-kikimr_settings0-client0] [FAIL] >> test_size_limit.py::TestS3::test_size_limit[v2-client0-100-kikimr_params0-true] [FAIL] >> test_vector_index.py::TestVectorIndex::test_vector_index[table_index_0-pk_types16-all_types16-index16-Int8] [FAIL] >> test_validation.py::TestS3::test_empty[v1-client0] >> test_vector_index.py::TestVectorIndex::test_vector_index[table_index_4-pk_types6-all_types6-index6-Uint8] [FAIL] >> test_vector_index.py::TestVectorIndex::test_vector_index_prefix[table_index_1-pk_types9-all_types9-index9-Uint8] [FAIL] >> test_format_setting.py::TestS3::test_interval_unit[v1-client0] [FAIL] >> test_compressions.py::TestS3Compressions::test_compression[v1-test.json.bz2-bzip2] >> test_vector_index.py::TestVectorIndex::test_vector_index_prefix[table_index_1_float-pk_types3-all_types3-index3-Float] [FAIL] >> test_s3_0.py::TestS3::test_inference[v2-client0] >> test_insert.py::TestS3::test_insert[v1-false-client0-json_list-dataset] [FAIL] >> test_vector_index.py::TestVectorIndex::test_vector_index[table_index_3_float-pk_types1-all_types1-index1-Float] [FAIL] >> test_statistics.py::TestS3::test_egress[v2-client0-csv_with_names] >> test_ydb_over_fq.py::TestYdbOverFq::test_list_without_streams[v2-client0] |80.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/datashard/vector_index/py3test >> test_vector_index.py::TestVectorIndex::test_vector_index[table_index_1-pk_types9-all_types9-index9-Uint8] [FAIL] |80.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test >> test_auditlog.py::test_broken_dynconfig[_client_session_pool_with_auth_other-_good_dynconfig] [FAIL] |80.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/blobstorage/py3test >> test_replication.py::TestReplicationAfterNodesRestart::test_replication[mirror-3] [GOOD] >> test_bindings_1.py::TestBindings::test_s3_format_mismatch[v1-client0] [FAIL] >> test_public_metrics.py::TestPublicMetrics::test_public_metrics[v1-client0] >> test_test_connection.py::TestConnection::test_test_s3_connection_uri[v1-client0] [FAIL] >> test_formats.py::TestS3Formats::test_format[v2-test.csv-csv_with_names-kikimr_settings0] [FAIL] >> test_format_setting.py::TestS3::test_interval_unit[v2-client0] >> test_s3_0.py::TestS3::test_inference[v2-client0] [FAIL] >> test_s3_0.py::TestS3::test_inference_null_column[v2-client0] >> test_insert.py::TestS3::test_insert[v1-false-client0-json_list-dataにちは% set] >> test_size_limit.py::TestS3::test_size_limit[v2-client0-500-kikimr_params0-false] >> test_ydb_over_fq.py::TestYdbOverFq::test_list_without_streams[v2-client0] [FAIL] >> test_bindings_0.py::TestBindings::test_modify_connection_with_a_lot_of_bindings[v2-kikimr_settings0-client0] >> test_public_metrics.py::TestPublicMetrics::test_public_metrics[v1-client0] [FAIL] >> test_s3_1.py::TestS3::test_top_level_listing_2[v2-kikimr_params0-true-client0] >> test_statistics.py::TestS3::test_egress[v2-client0-csv_with_names] [FAIL] >> test_statistics.py::TestS3::test_egress[v2-client0-parquet] >> test_formats.py::TestS3Formats::test_format[v2-test.tsv-tsv_with_names-kikimr_settings0] >> test_format_setting.py::TestS3::test_interval_unit[v2-client0] [FAIL] >> test_test_connection.py::TestConnection::test_test_s3_connection_uri[v2-client0] >> test_bindings_0.py::TestBindings::test_modify_connection_with_a_lot_of_bindings[v2-kikimr_settings0-client0] [FAIL] >> test_size_limit.py::TestS3::test_size_limit[v2-client0-500-kikimr_params0-false] [FAIL] >> test_statistics.py::TestS3::test_egress[v2-client0-parquet] [FAIL] >> test_bindings_1.py::TestBindings::test_s3_format_mismatch[v2-client0] >> test_s3_0.py::TestS3::test_inference_null_column[v2-client0] [FAIL] >> test_compressions.py::TestS3Compressions::test_compression[v1-test.json.bz2-bzip2] [FAIL] >> test_ydb_over_fq.py::TestYdbOverFq::test_execute_data_query[v1-client0] >> test_formats.py::TestS3Formats::test_format[v2-test.tsv-tsv_with_names-kikimr_settings0] [FAIL] >> test_format_setting.py::TestS3::test_bad_format_setting[v1-client0] >> test_test_connection.py::TestConnection::test_test_s3_connection_uri[v2-client0] [FAIL] >> test_compressions.py::TestS3Compressions::test_compression[v1-test.json.zst-zstd] >> test_format_setting.py::TestS3::test_bad_format_setting[v1-client0] [FAIL] >> test_statistics.py::TestS3::test_egress[v1-client0-json_list] >> test_bindings_1.py::TestBindings::test_s3_format_mismatch[v2-client0] [FAIL] >> test_s3_1.py::TestS3::test_top_level_listing_2[v2-kikimr_params0-true-client0] [FAIL] >> test_size_limit.py::TestS3::test_size_limit[v2-client0-500-kikimr_params0-true] >> test_insert.py::TestS3::test_insert[v1-false-client0-json_list-dataにちは% set] [FAIL] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/fq/s3/py3test >> test_streaming_join.py::TestStreamingJoin::test_grace_join[v1-client0] [FAIL] Test command err: library/recipes/common/__init__.py:29: ResourceWarning: unclosed file <_io.TextIOWrapper name='/home/runner/.ya/build/build_root/ciyv/00204b/ydb/tests/fq/s3/test-results/py3test/testing_out_stuff/test_streaming_join/testing_out_stuff/moto_server.out.log' mode='w' encoding='utf-8'> process = subprocess.Popen( ResourceWarning: Enable tracemalloc to get the object allocation traceback library/recipes/common/__init__.py:29: ResourceWarning: unclosed file <_io.TextIOWrapper name='/home/runner/.ya/build/build_root/ciyv/00204b/ydb/tests/fq/s3/test-results/py3test/testing_out_stuff/test_streaming_join/testing_out_stuff/moto_server.err.log' mode='w' encoding='utf-8'> process = subprocess.Popen( ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/subprocess.py:1129: ResourceWarning: subprocess 582564 is still running ResourceWarning: Enable tracemalloc to get the object allocation traceback ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/fq/s3/py3test >> test_yq_v2.py::TestS3::test_query_parameters[v2-client0] [FAIL] Test command err: library/recipes/common/__init__.py:29: ResourceWarning: unclosed file <_io.TextIOWrapper name='/home/runner/.ya/build/build_root/ciyv/00205b/ydb/tests/fq/s3/test-results/py3test/testing_out_stuff/test_yq_v2/testing_out_stuff/moto_server.out.log' mode='w' encoding='utf-8'> process = subprocess.Popen( ResourceWarning: Enable tracemalloc to get the object allocation traceback library/recipes/common/__init__.py:29: ResourceWarning: unclosed file <_io.TextIOWrapper name='/home/runner/.ya/build/build_root/ciyv/00205b/ydb/tests/fq/s3/test-results/py3test/testing_out_stuff/test_yq_v2/testing_out_stuff/moto_server.err.log' mode='w' encoding='utf-8'> process = subprocess.Popen( ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/subprocess.py:1129: ResourceWarning: subprocess 581751 is still running ResourceWarning: Enable tracemalloc to get the object allocation traceback >> test_public_api.py::TestJsonExample::test_json_unexpected_failure [FAIL] >> test_explicit_partitioning_0.py::TestS3::test_partitioned_by[v1-false-client0] [FAIL] >> test_formats.py::TestS3Formats::test_format[v2-test_each_row.json-json_each_row-kikimr_settings0] >> test_bindings_0.py::TestBindings::test_binding_operations[v1-kikimr_settings1-client0] >> test_compressions.py::TestS3Compressions::test_compression[v1-test.json.zst-zstd] [FAIL] >> test_ydb_over_fq.py::TestYdbOverFq::test_execute_data_query[v1-client0] [FAIL] >> test_s3_0.py::TestS3::test_inference_optional_types[v2-client0] >> test_statistics.py::TestS3::test_egress[v1-client0-json_list] [FAIL] >> test_bindings_1.py::TestBindings::test_pg_binding[v1-client0] >> test_size_limit.py::TestS3::test_size_limit[v2-client0-500-kikimr_params0-true] [FAIL] >> test_size_limit.py::TestS3::test_size_limit[v1-client0-5-kikimr_params0-false] >> test_explicit_partitioning_0.py::TestS3::test_partitioned_by[v1-true-client0] >> test_format_setting.py::TestS3::test_bad_format_setting[v2-client0] >> test_test_connection.py::TestConnection::test_test_s3_connection_error[v1-client0] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/fq/s3/py3test >> test_inflight.py::TestS3::test_inflight[v1-client0-kikimr_params2] [FAIL] Test command err: library/recipes/common/__init__.py:29: ResourceWarning: unclosed file <_io.TextIOWrapper name='/home/runner/.ya/build/build_root/ciyv/002060/ydb/tests/fq/s3/test-results/py3test/testing_out_stuff/test_inflight/testing_out_stuff/moto_server.out.log' mode='w' encoding='utf-8'> process = subprocess.Popen( ResourceWarning: Enable tracemalloc to get the object allocation traceback library/recipes/common/__init__.py:29: ResourceWarning: unclosed file <_io.TextIOWrapper name='/home/runner/.ya/build/build_root/ciyv/002060/ydb/tests/fq/s3/test-results/py3test/testing_out_stuff/test_inflight/testing_out_stuff/moto_server.err.log' mode='w' encoding='utf-8'> process = subprocess.Popen( ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback >> test_s3_0.py::TestS3::test_inference_optional_types[v2-client0] [FAIL] >> test_ydb_over_fq.py::TestYdbOverFq::test_execute_data_query[v2-client0] >> test_s3_1.py::TestS3::test_precompute[v1-false-client0] >> test_bindings_0.py::TestBindings::test_binding_operations[v1-kikimr_settings1-client0] [FAIL] >> test_formats.py::TestS3Formats::test_format[v2-test_each_row.json-json_each_row-kikimr_settings0] [FAIL] >> test_insert.py::TestS3::test_insert[v1-false-client0-json_each_row-dataset] >> test_size_limit.py::TestS3::test_size_limit[v1-client0-5-kikimr_params0-false] [FAIL] >> test_size_limit.py::TestS3::test_size_limit[v1-client0-5-kikimr_params0-true] >> test_statistics.py::TestS3::test_egress[v1-client0-json_each_row] >> test_public_api.py::TestJsonExample::test_json_success >> test_s3_0.py::TestS3::test_inference_multiple_files[v2-client0] >> test_ydb_over_fq.py::TestYdbOverFq::test_execute_data_query[v2-client0] [FAIL] >> test_bindings_1.py::TestBindings::test_pg_binding[v1-client0] [FAIL] >> test_compressions.py::TestS3Compressions::test_compression[v1-test.json.xz-xz] >> test_s3_0.py::TestS3::test_inference_multiple_files[v2-client0] [FAIL] >> test_test_connection.py::TestConnection::test_test_s3_connection_error[v1-client0] [FAIL] >> test_explicit_partitioning_0.py::TestS3::test_partitioned_by[v1-true-client0] [FAIL] >> test_size_limit.py::TestS3::test_size_limit[v1-client0-5-kikimr_params0-true] [FAIL] >> test_insert.py::TestS3::test_insert[v1-false-client0-json_each_row-dataset] [FAIL] >> test_public_api.py::TestJsonExample::test_json_success [FAIL] >> test_ydb_over_fq.py::TestYdbOverFq::test_stream_execute_scan_query[v1-client0] >> test_formats.py::TestS3Formats::test_format[v2-test_list.json-json_list-kikimr_settings0] >> test_s3_0.py::TestS3::test_inference_file_error[v2-client0] >> test_compressions.py::TestS3Compressions::test_compression[v1-test.json.xz-xz] [FAIL] >> test_format_setting.py::TestS3::test_bad_format_setting[v2-client0] [FAIL] >> test_s3_1.py::TestS3::test_precompute[v1-false-client0] [FAIL] >> test_bindings_0.py::TestBindings::test_binding_operations[v2-kikimr_settings1-client0] >> test_statistics.py::TestS3::test_egress[v1-client0-json_each_row] [FAIL] >> test_ydb_over_fq.py::TestYdbOverFq::test_stream_execute_scan_query[v1-client0] [FAIL] >> test_ydb_over_fq.py::TestYdbOverFq::test_stream_execute_scan_query[v2-client0] >> test_size_limit.py::TestS3::test_size_limit[v1-client0-100-kikimr_params0-false] >> test_explicit_partitioning_0.py::TestS3::test_partitioned_by[v2-false-client0] >> test_s3_0.py::TestS3::test_inference_file_error[v2-client0] [FAIL] >> test_bindings_1.py::TestBindings::test_pg_binding[v2-client0] >> test_compressions.py::TestS3Compressions::test_compression[v2-test.json.gz-gzip] >> test_s3_1.py::TestS3::test_precompute[v1-true-client0] >> test_statistics.py::TestS3::test_egress[v1-client0-csv_with_names] >> test_ydb_over_fq.py::TestYdbOverFq::test_stream_execute_scan_query[v2-client0] [FAIL] >> test_insert.py::TestS3::test_insert[v1-false-client0-json_each_row-dataにちは% set] >> test_test_connection.py::TestConnection::test_test_s3_connection_error[v2-client0] >> test_size_limit.py::TestS3::test_size_limit[v1-client0-100-kikimr_params0-false] [FAIL] >> test_formats.py::TestS3Formats::test_format[v2-test_list.json-json_list-kikimr_settings0] [FAIL] >> test_bindings_0.py::TestBindings::test_binding_operations[v2-kikimr_settings1-client0] [FAIL] >> test_insert.py::TestS3::test_insert[v1-false-client0-json_each_row-dataにちは% set] [FAIL] >> test_compressions.py::TestS3Compressions::test_compression[v2-test.json.gz-gzip] [FAIL] >> test_test_connection.py::TestConnection::test_test_s3_connection_error[v2-client0] [FAIL] >> test_format_setting.py::TestS3::test_date_simple[v1-date/simple/test.csv-csv_with_names] >> test_s3_1.py::TestS3::test_precompute[v1-true-client0] [FAIL] >> test_statistics.py::TestS3::test_egress[v1-client0-csv_with_names] [FAIL] >> test_statistics.py::TestS3::test_egress[v1-client0-parquet] >> test_bindings_1.py::TestBindings::test_pg_binding[v2-client0] [FAIL] >> test_bindings_1.py::TestBindings::test_count_for_pg_binding[v1-yql_syntax-client0] >> test_formats.py::TestS3Formats::test_format[v2-test.parquet-parquet-kikimr_settings0] >> test_s3_0.py::TestS3::test_inference_parameters[v2-client0] >> test_bindings_0.py::TestBindings::test_modify_connection_with_a_lot_of_bindings[v1-kikimr_settings1-client0] >> test_insert.py::TestS3::test_insert[v1-false-client0-csv_with_names-dataset] >> test_formats.py::TestS3Formats::test_format[v2-test.parquet-parquet-kikimr_settings0] [FAIL] >> test_formats.py::TestS3Formats::test_format_inference[v2-test.csv-csv_with_names] >> test_ydb_over_fq.py::TestYdbOverFq::test_execute_data_query_results[v1-client0] >> test_format_setting.py::TestS3::test_date_simple[v1-date/simple/test.csv-csv_with_names] [FAIL] >> test_s3_0.py::TestS3::test_inference_parameters[v2-client0] [FAIL] >> test_statistics.py::TestS3::test_egress[v1-client0-parquet] [FAIL] >> test_statistics.py::TestS3::test_convert[v2-client0-json_list-json_list] >> test_size_limit.py::TestS3::test_size_limit[v1-client0-100-kikimr_params0-true] >> test_formats.py::TestS3Formats::test_format_inference[v2-test.csv-csv_with_names] [FAIL] >> test_compressions.py::TestS3Compressions::test_compression[v2-test.json.lz4-lz4] >> test_bindings_0.py::TestBindings::test_modify_connection_with_a_lot_of_bindings[v1-kikimr_settings1-client0] [FAIL] >> test_format_setting.py::TestS3::test_date_simple[v1-date/simple/test.tsv-tsv_with_names] >> test_s3_1.py::TestS3::test_precompute[v2-false-client0] >> test_statistics.py::TestS3::test_convert[v2-client0-json_list-json_list] [FAIL] >> test_formats.py::TestS3Formats::test_format_inference[v2-test.tsv-tsv_with_names] >> test_bindings_1.py::TestBindings::test_count_for_pg_binding[v1-yql_syntax-client0] [FAIL] >> test_formats.py::TestS3Formats::test_format_inference[v2-test.tsv-tsv_with_names] [FAIL] >> test_statistics.py::TestS3::test_convert[v2-client0-json_list-json_each_row] >> test_compressions.py::TestS3Compressions::test_compression[v2-test.json.lz4-lz4] [FAIL] >> test_s3_0.py::TestS3::test_inference_timestamp[v2-client0] >> test_ydb_over_fq.py::TestYdbOverFq::test_execute_data_query_results[v1-client0] [FAIL] >> test_ydb_over_fq.py::TestYdbOverFq::test_execute_data_query_results[v2-client0] >> test_bindings_0.py::TestBindings::test_modify_connection_with_a_lot_of_bindings[v2-kikimr_settings1-client0] >> test_bindings_1.py::TestBindings::test_count_for_pg_binding[v1-pg_syntax-client0] >> test_s3_1.py::TestS3::test_precompute[v2-false-client0] [FAIL] >> test_compressions.py::TestS3Compressions::test_compression[v2-test.json.br-brotli] >> test_formats.py::TestS3Formats::test_format_inference[v2-test_each_row.json-json_each_row] >> test_compressions.py::TestS3Compressions::test_compression[v2-test.json.br-brotli] [FAIL] |80.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/datashard/vector_index/py3test >> test_vector_index.py::TestVectorIndex::test_vector_index_prefix[table_index_0_float-pk_types4-all_types4-index4-Float] [FAIL] >> test_bindings_0.py::TestBindings::test_modify_connection_with_a_lot_of_bindings[v2-kikimr_settings1-client0] [FAIL] >> test_size_limit.py::TestS3::test_size_limit[v1-client0-100-kikimr_params0-true] [FAIL] >> test_s3_1.py::TestS3::test_precompute[v2-true-client0] >> test_s3_1.py::TestS3::test_precompute[v2-true-client0] [FAIL] >> test_size_limit.py::TestS3::test_size_limit[v1-client0-500-kikimr_params0-false] >> test_bindings_0.py::TestBindings::test_name_uniqueness_constraint[v1-client0] >> test_formats.py::TestS3Formats::test_format_inference[v2-test_each_row.json-json_each_row] [FAIL] >> test_compressions.py::TestS3Compressions::test_compression[v2-test.json.bz2-bzip2] >> test_formats.py::TestS3Formats::test_format_inference[v2-test_list.json-json_list] >> test_public_api.py::TestForPotentialDeadlock::test_deadlocked_threads_on_cleanup >> test_compressions.py::TestS3Compressions::test_compression[v2-test.json.bz2-bzip2] [FAIL] >> test_formats.py::TestS3Formats::test_format_inference[v2-test_list.json-json_list] [FAIL] >> test_s3_1.py::TestS3::test_failed_precompute[v1-false-client0] >> test_compressions.py::TestS3Compressions::test_compression[v2-test.json.zst-zstd] >> test_s3_1.py::TestS3::test_failed_precompute[v1-false-client0] [FAIL] >> test_s3_1.py::TestS3::test_failed_precompute[v1-true-client0] >> test_compressions.py::TestS3Compressions::test_compression[v2-test.json.zst-zstd] [FAIL] >> test_formats.py::TestS3Formats::test_format_inference[v2-test.parquet-parquet] >> test_s3_1.py::TestS3::test_failed_precompute[v1-true-client0] [FAIL] >> test_s3_1.py::TestS3::test_failed_precompute[v2-false-client0] >> test_compressions.py::TestS3Compressions::test_compression[v2-test.json.xz-xz] >> test_compressions.py::TestS3Compressions::test_compression[v2-test.json.xz-xz] [FAIL] >> test_s3_1.py::TestS3::test_failed_precompute[v2-false-client0] [FAIL] >> test_formats.py::TestS3Formats::test_format_inference[v2-test.parquet-parquet] [FAIL] >> test_compressions.py::TestS3Compressions::test_compression_inference[v2-test.csv.gz-gzip] >> test_compressions.py::TestS3Compressions::test_compression_inference[v2-test.csv.gz-gzip] [FAIL] >> test_formats.py::TestS3Formats::test_btc[v1] >> test_s3_1.py::TestS3::test_failed_precompute[v2-true-client0] >> test_compressions.py::TestS3Compressions::test_compression_inference[v2-test.csv.lz4-lz4] >> test_compressions.py::TestS3Compressions::test_compression_inference[v2-test.csv.lz4-lz4] [FAIL] >> test_formats.py::TestS3Formats::test_btc[v1] [FAIL] >> test_formats.py::TestS3Formats::test_btc[v2] >> test_s3_1.py::TestS3::test_failed_precompute[v2-true-client0] [FAIL] >> test_compressions.py::TestS3Compressions::test_compression_inference[v2-test.csv.br-brotli] >> test_s3_1.py::TestS3::test_missed[v1-false-client0] >> test_s3_0.py::TestS3::test_inference_timestamp[v2-client0] [FAIL] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/statistics/service/ut/unittest >> BasicStatistics::NotFullStatisticsColumnshard [GOOD] >> test_format_setting.py::TestS3::test_date_simple[v1-date/simple/test.tsv-tsv_with_names] [FAIL] Test command err: 2025-05-29T15:31:41.935058Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:453:2413], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-05-29T15:31:41.935098Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-05-29T15:31:41.935114Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/0014c3/r3tmp/tmpb4O8ah/pdisk_1.dat 2025-05-29T15:31:42.021936Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 13659, node 1 2025-05-29T15:31:42.125858Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:31:42.125882Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-05-29T15:31:42.125888Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-05-29T15:31:42.125958Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-05-29T15:31:42.126688Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-05-29T15:31:42.203604Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:31:42.203650Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:31:42.215691Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:8262 2025-05-29T15:31:42.544555Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-05-29T15:31:43.298888Z node 2 :STATISTICS INFO: service_impl.cpp:232: Subscribed for config changes on node 2 2025-05-29T15:31:43.305997Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:31:43.306035Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:31:43.360060Z node 1 :HIVE WARN: hive_impl.cpp:771: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-05-29T15:31:43.360650Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-29T15:31:43.520231Z node 2 :HIVE WARN: tx__create_tablet.cpp:342: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-05-29T15:31:43.520394Z node 2 :HIVE WARN: tx__create_tablet.cpp:342: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-05-29T15:31:43.520529Z node 2 :HIVE WARN: tx__create_tablet.cpp:342: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-05-29T15:31:43.520555Z node 2 :HIVE WARN: tx__create_tablet.cpp:342: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-05-29T15:31:43.520567Z node 2 :HIVE WARN: tx__create_tablet.cpp:342: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-05-29T15:31:43.520601Z node 2 :HIVE WARN: tx__create_tablet.cpp:342: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-05-29T15:31:43.520614Z node 2 :HIVE WARN: tx__create_tablet.cpp:342: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-05-29T15:31:43.520627Z node 2 :HIVE WARN: tx__create_tablet.cpp:342: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-05-29T15:31:43.520657Z node 2 :HIVE WARN: tx__create_tablet.cpp:342: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-05-29T15:31:43.672854Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:31:43.672888Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:31:43.684307Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-29T15:31:43.712924Z node 2 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:31:43.718294Z node 2 :STATISTICS INFO: aggregator_impl.cpp:45: [72075186224037894] OnActivateExecutor 2025-05-29T15:31:43.718318Z node 2 :STATISTICS DEBUG: tx_init_schema.cpp:13: [72075186224037894] TTxInitSchema::Execute 2025-05-29T15:31:43.723719Z node 2 :STATISTICS DEBUG: tx_init_schema.cpp:34: [72075186224037894] TTxInitSchema::Complete 2025-05-29T15:31:43.723900Z node 2 :STATISTICS DEBUG: tx_init.cpp:18: [72075186224037894] TTxInit::Execute 2025-05-29T15:31:43.723916Z node 2 :STATISTICS DEBUG: tx_init.cpp:118: [72075186224037894] Loaded BaseStatistics: schemeshard count# 0 2025-05-29T15:31:43.723920Z node 2 :STATISTICS DEBUG: tx_init.cpp:143: [72075186224037894] Loaded ColumnStatistics: column count# 0 2025-05-29T15:31:43.723923Z node 2 :STATISTICS DEBUG: tx_init.cpp:182: [72075186224037894] Loaded ScheduleTraversals: table count# 0 2025-05-29T15:31:43.723928Z node 2 :STATISTICS DEBUG: tx_init.cpp:216: [72075186224037894] Loaded ForceTraversalOperations: table count# 0 2025-05-29T15:31:43.723932Z node 2 :STATISTICS DEBUG: tx_init.cpp:264: [72075186224037894] Loaded ForceTraversalTables: table count# 0 2025-05-29T15:31:43.723936Z node 2 :STATISTICS DEBUG: tx_init.cpp:271: [72075186224037894] TTxInit::Complete 2025-05-29T15:31:43.724205Z node 2 :STATISTICS INFO: aggregator_impl.cpp:62: [72075186224037894] Subscribed for config changes 2025-05-29T15:31:43.738947Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:7864: ResolveSA(), StatisticsAggregatorId=72075186224037894, at schemeshard: 72075186224037897 2025-05-29T15:31:43.738972Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:7894: ConnectToSA(), pipe client id: [2:1863:2597], at schemeshard: 72075186224037897, StatisticsAggregatorId: 72075186224037894, at schemeshard: 72075186224037897 2025-05-29T15:31:43.739867Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:86: [72075186224037894] EvServerConnected, pipe server id = [2:1874:2605] 2025-05-29T15:31:43.740967Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:86: [72075186224037894] EvServerConnected, pipe server id = [2:1905:2622] 2025-05-29T15:31:43.741204Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:213: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:1905:2622], schemeshard id = 72075186224037897 2025-05-29T15:31:43.741786Z node 2 :STATISTICS DEBUG: tx_configure.cpp:21: [72075186224037894] TTxConfigure::Execute: database# /Root/Database 2025-05-29T15:31:43.745359Z node 2 :STATISTICS DEBUG: table_creator.cpp:147: Table _statistics updater. Describe result: PathErrorUnknown 2025-05-29T15:31:43.745372Z node 2 :STATISTICS NOTICE: table_creator.cpp:167: Table _statistics updater. Creating table 2025-05-29T15:31:43.745382Z node 2 :STATISTICS DEBUG: table_creator.cpp:100: Table _statistics updater. Full table path:/Root/Database/.metadata/_statistics 2025-05-29T15:31:43.747862Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720657:1, at schemeshard: 72075186224037897 2025-05-29T15:31:43.749227Z node 2 :STATISTICS DEBUG: table_creator.cpp:190: Table _statistics updater. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720657 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037897 PathId: 3 } 2025-05-29T15:31:43.749255Z node 2 :STATISTICS DEBUG: table_creator.cpp:261: Table _statistics updater. Subscribe on create table tx: 281474976720657 2025-05-29T15:31:43.838936Z node 2 :STATISTICS DEBUG: tx_configure.cpp:36: [72075186224037894] TTxConfigure::Complete 2025-05-29T15:31:43.922399Z node 2 :STATISTICS DEBUG: table_creator.cpp:290: Table _statistics updater. Request: create. Transaction completed: 281474976720657. Doublechecking... 2025-05-29T15:31:43.977348Z node 2 :STATISTICS DEBUG: table_creator.cpp:362: Table _statistics updater. Column diff is empty, finishing 2025-05-29T15:31:44.464060Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2212:3057], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:31:44.464093Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:31:44.467261Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976715659:0, at schemeshard: 72075186224037897 2025-05-29T15:31:44.496874Z node 2 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037899;self_id=[2:2323:2855];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-05-29T15:31:44.496930Z node 2 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037899;self_id=[2:2323:2855];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-05-29T15:31:44.496960Z node 2 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037899;self_id=[2:2323:2855];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-05-29T15:31:44.496975Z node 2 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037899;self_id=[2:2323:2855];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-05-29T15:31:44.496990Z node 2 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037899;self_id=[2:2323:2855];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-05-29T15:31:44.497005Z node 2 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037899;self_id=[2:2323:2855];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-05-29T15:31:44.497022Z node 2 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037899;self_id=[2:2323:2855];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-05-29T15:31:44.497040Z node 2 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037899;self_id=[2:2323:2855];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_reg ... 08483Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:668: [72075186224037894] ScheduleNextTraversal. No force traversals. 2025-05-29T15:37:30.473295Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:660: [72075186224037894] ScheduleNextTraversal 2025-05-29T15:37:30.473334Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:668: [72075186224037894] ScheduleNextTraversal. No force traversals. 2025-05-29T15:37:31.472017Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:285: [72075186224037894] EvPropagateTimeout 2025-05-29T15:37:32.912051Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:660: [72075186224037894] ScheduleNextTraversal 2025-05-29T15:37:32.912101Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:668: [72075186224037894] ScheduleNextTraversal. No force traversals. 2025-05-29T15:37:33.979057Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:330: [72075186224037894] PropagateStatistics(), node count = 1, schemeshard count = 1 2025-05-29T15:37:33.979148Z node 2 :STATISTICS DEBUG: service_impl.cpp:937: EvPropagateStatistics, node id = 2 2025-05-29T15:37:35.451415Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:660: [72075186224037894] ScheduleNextTraversal 2025-05-29T15:37:35.451453Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:668: [72075186224037894] ScheduleNextTraversal. No force traversals. 2025-05-29T15:37:37.719317Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:285: [72075186224037894] EvPropagateTimeout 2025-05-29T15:37:37.913632Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:660: [72075186224037894] ScheduleNextTraversal 2025-05-29T15:37:37.913673Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:668: [72075186224037894] ScheduleNextTraversal. No force traversals. 2025-05-29T15:37:39.048326Z node 1 :STATISTICS DEBUG: schemeshard_impl.cpp:7864: ResolveSA(), StatisticsAggregatorId=18446744073709551615, at schemeshard: 72057594046644480 2025-05-29T15:37:39.048368Z node 1 :STATISTICS DEBUG: schemeshard_impl.cpp:7876: ConnectToSA(), no StatisticsAggregatorId, at schemeshard: 72057594046644480 2025-05-29T15:37:39.048374Z node 1 :STATISTICS DEBUG: schemeshard_impl.cpp:7907: SendBaseStatsToSA(), no StatisticsAggregatorId, at schemeshard: 72057594046644480 2025-05-29T15:37:39.048379Z node 1 :STATISTICS DEBUG: schemeshard_impl.cpp:7834: Schedule next SendBaseStatsToSA in 30.000000s, at schemeshard: 72057594046644480 2025-05-29T15:37:40.574818Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:330: [72075186224037894] PropagateStatistics(), node count = 1, schemeshard count = 1 2025-05-29T15:37:40.574896Z node 2 :STATISTICS DEBUG: service_impl.cpp:937: EvPropagateStatistics, node id = 2 2025-05-29T15:37:40.859201Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:660: [72075186224037894] ScheduleNextTraversal 2025-05-29T15:37:40.859237Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:668: [72075186224037894] ScheduleNextTraversal. No force traversals. 2025-05-29T15:37:43.337240Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:660: [72075186224037894] ScheduleNextTraversal 2025-05-29T15:37:43.337277Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:668: [72075186224037894] ScheduleNextTraversal. No force traversals. 2025-05-29T15:37:44.360780Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:285: [72075186224037894] EvPropagateTimeout 2025-05-29T15:37:45.906097Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:660: [72075186224037894] ScheduleNextTraversal 2025-05-29T15:37:45.906134Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:668: [72075186224037894] ScheduleNextTraversal. No force traversals. 2025-05-29T15:37:46.945413Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:330: [72075186224037894] PropagateStatistics(), node count = 1, schemeshard count = 1 2025-05-29T15:37:46.945524Z node 2 :STATISTICS DEBUG: service_impl.cpp:937: EvPropagateStatistics, node id = 2 2025-05-29T15:37:48.289602Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:660: [72075186224037894] ScheduleNextTraversal 2025-05-29T15:37:48.289641Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:668: [72075186224037894] ScheduleNextTraversal. No force traversals. 2025-05-29T15:37:49.350166Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:285: [72075186224037894] EvPropagateTimeout 2025-05-29T15:37:49.456634Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:660: [72075186224037894] ScheduleNextTraversal 2025-05-29T15:37:49.456704Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:668: [72075186224037894] ScheduleNextTraversal. No force traversals. 2025-05-29T15:37:50.972457Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:330: [72075186224037894] PropagateStatistics(), node count = 1, schemeshard count = 1 2025-05-29T15:37:50.972575Z node 2 :STATISTICS DEBUG: service_impl.cpp:937: EvPropagateStatistics, node id = 2 2025-05-29T15:37:51.129687Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:660: [72075186224037894] ScheduleNextTraversal 2025-05-29T15:37:51.129727Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:668: [72075186224037894] ScheduleNextTraversal. No force traversals. 2025-05-29T15:37:52.443149Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:660: [72075186224037894] ScheduleNextTraversal 2025-05-29T15:37:52.443192Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:668: [72075186224037894] ScheduleNextTraversal. No force traversals. 2025-05-29T15:37:53.058287Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:285: [72075186224037894] EvPropagateTimeout 2025-05-29T15:37:54.031180Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:660: [72075186224037894] ScheduleNextTraversal 2025-05-29T15:37:54.031229Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:668: [72075186224037894] ScheduleNextTraversal. No force traversals. 2025-05-29T15:37:54.713950Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:330: [72075186224037894] PropagateStatistics(), node count = 1, schemeshard count = 1 2025-05-29T15:37:54.714055Z node 2 :STATISTICS DEBUG: service_impl.cpp:937: EvPropagateStatistics, node id = 2 2025-05-29T15:37:55.547522Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:660: [72075186224037894] ScheduleNextTraversal 2025-05-29T15:37:55.547560Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:668: [72075186224037894] ScheduleNextTraversal. No force traversals. 2025-05-29T15:37:56.770155Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:285: [72075186224037894] EvPropagateTimeout 2025-05-29T15:37:56.883112Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:660: [72075186224037894] ScheduleNextTraversal 2025-05-29T15:37:56.883151Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:668: [72075186224037894] ScheduleNextTraversal. No force traversals. 2025-05-29T15:37:58.228612Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:330: [72075186224037894] PropagateStatistics(), node count = 1, schemeshard count = 1 2025-05-29T15:37:58.228718Z node 2 :STATISTICS DEBUG: service_impl.cpp:937: EvPropagateStatistics, node id = 2 2025-05-29T15:37:58.366130Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:660: [72075186224037894] ScheduleNextTraversal 2025-05-29T15:37:58.366169Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:668: [72075186224037894] ScheduleNextTraversal. No force traversals. 2025-05-29T15:37:59.680113Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:660: [72075186224037894] ScheduleNextTraversal 2025-05-29T15:37:59.680152Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:668: [72075186224037894] ScheduleNextTraversal. No force traversals. 2025-05-29T15:38:00.271085Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:285: [72075186224037894] EvPropagateTimeout 2025-05-29T15:38:01.112382Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:660: [72075186224037894] ScheduleNextTraversal 2025-05-29T15:38:01.112420Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:668: [72075186224037894] ScheduleNextTraversal. No force traversals. 2025-05-29T15:38:01.707237Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:330: [72075186224037894] PropagateStatistics(), node count = 1, schemeshard count = 1 2025-05-29T15:38:01.707359Z node 2 :STATISTICS DEBUG: service_impl.cpp:937: EvPropagateStatistics, node id = 2 2025-05-29T15:38:02.461686Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:660: [72075186224037894] ScheduleNextTraversal 2025-05-29T15:38:02.461724Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:668: [72075186224037894] ScheduleNextTraversal. No force traversals. 2025-05-29T15:38:03.777610Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:285: [72075186224037894] EvPropagateTimeout 2025-05-29T15:38:03.896071Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:660: [72075186224037894] ScheduleNextTraversal 2025-05-29T15:38:03.896108Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:668: [72075186224037894] ScheduleNextTraversal. No force traversals. 2025-05-29T15:38:04.570396Z node 1 :STATISTICS DEBUG: schemeshard_impl.cpp:7864: ResolveSA(), StatisticsAggregatorId=18446744073709551615, at schemeshard: 72057594046644480 2025-05-29T15:38:04.570443Z node 1 :STATISTICS DEBUG: schemeshard_impl.cpp:7876: ConnectToSA(), no StatisticsAggregatorId, at schemeshard: 72057594046644480 2025-05-29T15:38:04.570449Z node 1 :STATISTICS DEBUG: schemeshard_impl.cpp:7907: SendBaseStatsToSA(), no StatisticsAggregatorId, at schemeshard: 72057594046644480 2025-05-29T15:38:04.570453Z node 1 :STATISTICS DEBUG: schemeshard_impl.cpp:7834: Schedule next SendBaseStatsToSA in 30.000000s, at schemeshard: 72057594046644480 2025-05-29T15:38:05.520632Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:330: [72075186224037894] PropagateStatistics(), node count = 1, schemeshard count = 1 2025-05-29T15:38:05.520714Z node 2 :STATISTICS DEBUG: service_impl.cpp:937: EvPropagateStatistics, node id = 2 2025-05-29T15:38:05.671432Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:660: [72075186224037894] ScheduleNextTraversal 2025-05-29T15:38:05.671484Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:668: [72075186224037894] ScheduleNextTraversal. No force traversals. 2025-05-29T15:38:06.314162Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:7991: SendBaseStatsToSA(), path count: 2, at schemeshard: 72075186224037897 2025-05-29T15:38:06.314196Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:7834: Schedule next SendBaseStatsToSA in 208.000000s, at schemeshard: 72075186224037897 2025-05-29T15:38:06.314273Z node 2 :STATISTICS DEBUG: tx_schemeshard_stats.cpp:21: [72075186224037894] TTxSchemeShardStats::Execute: schemeshard id# 72075186224037897, stats size# 51 ... waiting for TEvSchemeShardStats 2 (done) ... waiting for TEvPropagateStatistics 2025-05-29T15:38:06.326596Z node 2 :STATISTICS DEBUG: tx_schemeshard_stats.cpp:132: [72075186224037894] TTxSchemeShardStats::Complete 2025-05-29T15:38:07.068155Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:660: [72075186224037894] ScheduleNextTraversal 2025-05-29T15:38:07.068193Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:668: [72075186224037894] ScheduleNextTraversal. No force traversals. 2025-05-29T15:38:07.656576Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:285: [72075186224037894] EvPropagateTimeout 2025-05-29T15:38:08.518116Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:660: [72075186224037894] ScheduleNextTraversal 2025-05-29T15:38:08.518154Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:668: [72075186224037894] ScheduleNextTraversal. No force traversals. 2025-05-29T15:38:09.091816Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:330: [72075186224037894] PropagateStatistics(), node count = 1, schemeshard count = 1 2025-05-29T15:38:09.091945Z node 2 :STATISTICS DEBUG: service_impl.cpp:937: EvPropagateStatistics, node id = 2 ... waiting for TEvPropagateStatistics (done) 2025-05-29T15:38:09.092055Z node 2 :STATISTICS DEBUG: service_impl.cpp:771: [TStatService::TEvGetStatistics] RequestId[ 18 ], ReplyToActorId[ [2:16193:10086]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-05-29T15:38:09.093025Z node 2 :STATISTICS DEBUG: service_impl.cpp:787: [TStatService::TEvNavigateKeySetResult] RequestId[ 18 ] 2025-05-29T15:38:09.093043Z node 2 :STATISTICS DEBUG: service_impl.cpp:1260: ReplySuccess(), request id = 18, ReplyToActorId = [2:16193:10086], StatRequests.size() = 1 >> test_s3_1.py::TestS3::test_missed[v1-false-client0] [FAIL] >> test_format_setting.py::TestS3::test_date_simple[v1-date/simple/test.json-json_each_row] >> test_explicit_partitioning_0.py::TestS3::test_partitioned_by[v2-false-client0] [FAIL] >> test_s3_0.py::TestS3::test_inference_projection[v2-client0] >> test_bindings_1.py::TestBindings::test_count_for_pg_binding[v1-pg_syntax-client0] [FAIL] >> test_insert.py::TestS3::test_insert[v1-false-client0-csv_with_names-dataset] [FAIL] >> test_insert.py::TestS3::test_insert[v1-false-client0-csv_with_names-dataにちは% set] >> test_explicit_partitioning_0.py::TestS3::test_partitioned_by[v2-true-client0] >> test_bindings_1.py::TestBindings::test_count_for_pg_binding[v2-yql_syntax-client0] >> test_s3_1.py::TestS3::test_missed[v1-true-client0] |80.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/datashard/vector_index/py3test >> test_vector_index.py::TestVectorIndex::test_vector_index[table_index_0-pk_types16-all_types16-index16-Int8] [FAIL] >> test_statistics.py::TestS3::test_convert[v2-client0-json_list-json_each_row] [FAIL] >> test_statistics.py::TestS3::test_convert[v2-client0-json_list-csv_with_names] |80.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/datashard/vector_index/py3test >> test_vector_index.py::TestVectorIndex::test_vector_index_prefix[table_index_1_float-pk_types3-all_types3-index3-Float] [FAIL] >> test_size_limit.py::TestS3::test_size_limit[v1-client0-500-kikimr_params0-false] [FAIL] >> test_size_limit.py::TestS3::test_size_limit[v1-client0-500-kikimr_params0-true] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/fq/s3/py3test >> test_test_connection.py::TestConnection::test_test_s3_connection_error[v2-client0] [FAIL] Test command err: library/recipes/common/__init__.py:29: ResourceWarning: unclosed file <_io.TextIOWrapper name='/home/runner/.ya/build/build_root/ciyv/002063/ydb/tests/fq/s3/test-results/py3test/testing_out_stuff/test_test_connection/testing_out_stuff/moto_server.out.log' mode='w' encoding='utf-8'> process = subprocess.Popen( ResourceWarning: Enable tracemalloc to get the object allocation traceback library/recipes/common/__init__.py:29: ResourceWarning: unclosed file <_io.TextIOWrapper name='/home/runner/.ya/build/build_root/ciyv/002063/ydb/tests/fq/s3/test-results/py3test/testing_out_stuff/test_test_connection/testing_out_stuff/moto_server.err.log' mode='w' encoding='utf-8'> process = subprocess.Popen( ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/subprocess.py:1129: ResourceWarning: subprocess 583130 is still running ResourceWarning: Enable tracemalloc to get the object allocation traceback >> test_formats.py::TestS3Formats::test_btc[v2] [FAIL] >> test_s3_0.py::TestS3::test_inference_projection[v2-client0] [FAIL] >> test_formats.py::TestS3Formats::test_invalid_format[v1-client0] >> test_s3_0.py::TestS3::test_inference_null_column_name[v2-client0] >> test_ydb_over_fq.py::TestYdbOverFq::test_execute_data_query_results[v2-client0] [FAIL] >> test_ydb_over_fq.py::TestYdbOverFq::test_execute_data_query_error[v1-client0] >> test_bindings_1.py::TestBindings::test_count_for_pg_binding[v2-yql_syntax-client0] [FAIL] >> test_bindings_1.py::TestBindings::test_count_for_pg_binding[v2-pg_syntax-client0] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/fq/s3/py3test >> test_public_metrics.py::TestPublicMetrics::test_public_metrics[v1-client0] [FAIL] Test command err: library/recipes/common/__init__.py:29: ResourceWarning: unclosed file <_io.TextIOWrapper name='/home/runner/.ya/build/build_root/ciyv/002040/ydb/tests/fq/s3/test-results/py3test/testing_out_stuff/test_public_metrics/testing_out_stuff/moto_server.out.log' mode='w' encoding='utf-8'> process = subprocess.Popen( ResourceWarning: Enable tracemalloc to get the object allocation traceback library/recipes/common/__init__.py:29: ResourceWarning: unclosed file <_io.TextIOWrapper name='/home/runner/.ya/build/build_root/ciyv/002040/ydb/tests/fq/s3/test-results/py3test/testing_out_stuff/test_public_metrics/testing_out_stuff/moto_server.err.log' mode='w' encoding='utf-8'> process = subprocess.Popen( ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/subprocess.py:1129: ResourceWarning: subprocess 586759 is still running ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback >> test_explicit_partitioning_0.py::TestS3::test_partitioned_by[v2-true-client0] [FAIL] >> test_format_setting.py::TestS3::test_date_simple[v1-date/simple/test.json-json_each_row] [FAIL] >> test_format_setting.py::TestS3::test_date_simple[v1-date/simple/test.parquet-parquet] >> test_validation.py::TestS3::test_empty[v1-client0] [FAIL] >> test_validation.py::TestS3::test_empty[v2-client0] >> test_insert.py::TestS3::test_insert[v1-false-client0-csv_with_names-dataにちは% set] [FAIL] >> test_statistics.py::TestS3::test_convert[v2-client0-json_list-csv_with_names] [FAIL] >> test_explicit_partitioning_0.py::TestS3::test_projection[v1-false-client0] >> test_insert.py::TestS3::test_insert[v1-false-client0-parquet-dataset] >> test_s3_1.py::TestS3::test_missed[v1-true-client0] [FAIL] >> test_statistics.py::TestS3::test_convert[v2-client0-json_list-parquet] >> test_s3_1.py::TestS3::test_missed[v2-false-client0] >> test_compressions.py::TestS3Compressions::test_compression_inference[v2-test.csv.br-brotli] [FAIL] >> test_compressions.py::TestS3Compressions::test_compression_inference[v2-test.csv.bz2-bzip2] >> test_bindings_1.py::TestBindings::test_count_for_pg_binding[v2-pg_syntax-client0] [FAIL] >> test_bindings_1.py::TestBindings::test_ast_in_failed_query_compilation[v1-client0] >> test_bindings_0.py::TestBindings::test_name_uniqueness_constraint[v1-client0] [FAIL] |80.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/datashard/vector_index/py3test >> test_vector_index.py::TestVectorIndex::test_vector_index_prefix[table_index_3-pk_types13-all_types13-index13-Int8] [FAIL] >> test_bindings_0.py::TestBindings::test_name_uniqueness_constraint[v2-client0] >> test_s3_0.py::TestS3::test_inference_null_column_name[v2-client0] [FAIL] >> test_size_limit.py::TestS3::test_size_limit[v1-client0-500-kikimr_params0-true] [FAIL] >> test_s3_0.py::TestS3::test_inference_unsupported_types[v2-client0] >> test_size_limit.py::TestS3::test_size_limit[v2-client0-5-kikimr_params1-false] >> test_formats.py::TestS3Formats::test_invalid_format[v1-client0] [FAIL] >> test_vector_index.py::TestVectorIndex::test_vector_index_prefix[table_all_types-pk_types17-all_types17-index17-Int8] >> test_validation.py::TestS3::test_empty[v2-client0] [FAIL] >> test_postgres.py::TestPGSQL::test_sql_suite[results-strings.test] [GOOD] >> test_formats.py::TestS3Formats::test_invalid_format[v2-client0] >> test_validation.py::TestS3::test_nested_issues[v1-client0] >> test_statistics.py::TestS3::test_convert[v2-client0-json_list-parquet] [FAIL] >> test_statistics.py::TestS3::test_convert[v2-client0-json_each_row-json_list] >> test_explicit_partitioning_0.py::TestS3::test_projection[v1-false-client0] [FAIL] |80.8%| [TA] $(B)/ydb/tests/postgres_integrations/go-libpq/test-results/py3test/{meta.json ... results_accumulator.log} |80.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/datashard/vector_index/py3test >> test_vector_index.py::TestVectorIndex::test_vector_index_prefix[table_index_1-pk_types9-all_types9-index9-Uint8] [FAIL] >> test_compressions.py::TestS3Compressions::test_compression_inference[v2-test.csv.bz2-bzip2] [FAIL] >> test_s3_1.py::TestS3::test_missed[v2-false-client0] [FAIL] >> test_ydb_over_fq.py::TestYdbOverFq::test_execute_data_query_error[v1-client0] [FAIL] >> test_insert.py::TestS3::test_insert[v1-false-client0-parquet-dataset] [FAIL] >> test_explicit_partitioning_0.py::TestS3::test_projection[v1-true-client0] >> test_postgres.py::TestPGSQL::test_sql_suite[results-window.test] >> test_compressions.py::TestS3Compressions::test_compression_inference[v2-test.csv.zst-zstd] >> test_insert.py::TestS3::test_insert[v1-false-client0-parquet-dataにちは% set] >> test_ydb_over_fq.py::TestYdbOverFq::test_execute_data_query_error[v2-client0] >> test_bindings_0.py::TestBindings::test_name_uniqueness_constraint[v2-client0] [FAIL] >> test_format_setting.py::TestS3::test_date_simple[v1-date/simple/test.parquet-parquet] [FAIL] >> test_format_setting.py::TestS3::test_date_simple[v2-date/simple/test.csv-csv_with_names] >> test_s3_1.py::TestS3::test_missed[v2-true-client0] >> test_bindings_1.py::TestBindings::test_ast_in_failed_query_compilation[v1-client0] [FAIL] >> test_bindings_1.py::TestBindings::test_ast_in_failed_query_compilation[v2-client0] >> test_s3_0.py::TestS3::test_inference_unsupported_types[v2-client0] [FAIL] |80.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/datashard/vector_index/py3test >> test_vector_index.py::TestVectorIndex::test_vector_index_prefix[table_index_0-pk_types16-all_types16-index16-Int8] [FAIL] >> test_s3_0.py::TestS3::test_json_list_formats[v2-client0] >> test_formats.py::TestS3Formats::test_invalid_format[v2-client0] [FAIL] >> test_formats.py::TestS3Formats::test_invalid_input_compression[v1-client0] >> test_compressions.py::TestS3Compressions::test_compression_inference[v2-test.csv.zst-zstd] [FAIL] >> test_explicit_partitioning_0.py::TestS3::test_projection[v1-true-client0] [FAIL] >> test_statistics.py::TestS3::test_convert[v2-client0-json_each_row-json_list] [FAIL] >> test_bindings_1.py::TestBindings::test_ast_in_failed_query_compilation[v2-client0] [FAIL] >> test_insert.py::TestS3::test_insert[v1-false-client0-parquet-dataにちは% set] [FAIL] >> test_validation.py::TestS3::test_nested_issues[v1-client0] [FAIL] >> test_compressions.py::TestS3Compressions::test_compression_inference[v2-test.csv.xz-xz] >> test_explicit_partitioning_0.py::TestS3::test_projection[v2-false-client0] >> test_bindings_1.py::TestBindings::test_raw_empty_schema_binding[v1-client0] >> test_vector_index.py::TestVectorIndex::test_vector_index_prefix[table_all_types-pk_types11-all_types11-index11-Uint8] >> test_statistics.py::TestS3::test_convert[v2-client0-json_each_row-json_each_row] >> test_validation.py::TestS3::test_nested_issues[v2-client0] >> test_s3_1.py::TestS3::test_missed[v2-true-client0] [FAIL] >> test_ydb_over_fq.py::TestYdbOverFq::test_execute_data_query_error[v2-client0] [FAIL] >> test_format_setting.py::TestS3::test_date_simple[v2-date/simple/test.csv-csv_with_names] [FAIL] >> test_size_limit.py::TestS3::test_size_limit[v2-client0-5-kikimr_params1-false] [FAIL] >> test_size_limit.py::TestS3::test_size_limit[v2-client0-5-kikimr_params1-true] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_get_queue_attributes_only_attributes_table[tables_format_v1-std] >> test_insert.py::TestS3::test_insert[v1-true-client0-json_list-dataset] >> test_s3_0.py::TestS3::test_json_list_formats[v2-client0] [FAIL] >> test_formats.py::TestS3Formats::test_invalid_input_compression[v1-client0] [FAIL] >> test_s3_1.py::TestS3::test_simple_hits_47[v1-false-client0] >> test_ydb_over_fq.py::TestYdbOverFq::test_explain_data_query[v1-client0] >> test_formats.py::TestS3Formats::test_invalid_input_compression[v2-client0] >> test_insert.py::TestS3::test_insert[v1-true-client0-json_list-dataset] [FAIL] >> test_explicit_partitioning_0.py::TestS3::test_projection[v2-false-client0] [FAIL] >> test_public_api.py::TestForPotentialDeadlock::test_deadlocked_threads_on_cleanup [GOOD] >> test_bindings_1.py::TestBindings::test_raw_empty_schema_binding[v1-client0] [FAIL] >> test_format_setting.py::TestS3::test_date_simple[v2-date/simple/test.tsv-tsv_with_names] >> test_bindings_1.py::TestBindings::test_raw_empty_schema_binding[v2-client0] >> test_s3_0.py::TestS3::test_csv_with_hopping[v1-client0] >> test_insert.py::TestS3::test_insert[v1-true-client0-json_list-dataにちは% set] >> test_explicit_partitioning_0.py::TestS3::test_projection[v2-true-client0] >> test_ydb_over_fq.py::TestYdbOverFq::test_explain_data_query[v1-client0] [FAIL] >> test_validation.py::TestS3::test_nested_issues[v2-client0] [FAIL] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_receive_attempt_reloads_same_messages[tables_format_v1-after_crutch_batch] >> test_validation.py::TestS3::test_nested_type[v1-client0] >> test_size_limit.py::TestS3::test_size_limit[v2-client0-5-kikimr_params1-true] [FAIL] >> test_compressions.py::TestS3Compressions::test_compression_inference[v2-test.csv.xz-xz] [FAIL] >> test_compressions.py::TestS3Compressions::test_big_compression[v1-big.json.gz-gzip] >> test_vector_index.py::TestVectorIndex::test_vector_index_prefix[table_index_2-pk_types14-all_types14-index14-Int8] >> test_ydb_over_fq.py::TestYdbOverFq::test_explain_data_query[v2-client0] >> test_size_limit.py::TestS3::test_size_limit[v2-client0-100-kikimr_params1-false] >> test_statistics.py::TestS3::test_convert[v2-client0-json_each_row-json_each_row] [FAIL] >> test_s3_1.py::TestS3::test_simple_hits_47[v1-false-client0] [FAIL] >> test_vector_index.py::TestVectorIndex::test_vector_index_prefix[table_index_0-pk_types10-all_types10-index10-Uint8] >> test_s3_0.py::TestS3::test_csv_with_hopping[v1-client0] [FAIL] >> test_statistics.py::TestS3::test_convert[v2-client0-json_each_row-csv_with_names] >> test_s3_1.py::TestS3::test_simple_hits_47[v1-true-client0] >> test_s3_0.py::TestS3::test_csv_with_hopping[v2-client0] >> test_formats.py::TestS3Formats::test_invalid_input_compression[v2-client0] [FAIL] >> test_formats.py::TestS3Formats::test_invalid_output_compression[v1-client0] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_send_and_read_message[tables_format_v1-fifo] >> test_bindings_1.py::TestBindings::test_raw_empty_schema_binding[v2-client0] [FAIL] >> test_explicit_partitioning_0.py::TestS3::test_projection[v2-true-client0] [FAIL] >> test_insert.py::TestS3::test_insert[v1-true-client0-json_list-dataにちは% set] [FAIL] >> test_validation.py::TestS3::test_nested_type[v1-client0] [FAIL] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_does_not_change_visibility_for_deleted_message[tables_format_v1-std] >> test_format_setting.py::TestS3::test_date_simple[v2-date/simple/test.tsv-tsv_with_names] [FAIL] >> test_public_api.py::TestRecursiveCreation::test_mkdir >> test_size_limit.py::TestS3::test_size_limit[v2-client0-100-kikimr_params1-false] [FAIL] >> test_statistics.py::TestS3::test_convert[v2-client0-json_each_row-csv_with_names] [FAIL] >> test_compressions.py::TestS3Compressions::test_big_compression[v1-big.json.gz-gzip] [FAIL] >> test_vector_index.py::TestVectorIndex::test_vector_index[table_index_2_float-pk_types2-all_types2-index2-Float] >> test_ydb_over_fq.py::TestYdbOverFq::test_explain_data_query[v2-client0] [FAIL] >> test_s3_1.py::TestS3::test_simple_hits_47[v1-true-client0] [FAIL] >> test_vector_index.py::TestVectorIndex::test_vector_index[table_all_types-pk_types11-all_types11-index11-Uint8] >> test_formats.py::TestS3Formats::test_invalid_output_compression[v1-client0] [FAIL] >> test_s3_0.py::TestS3::test_csv_with_hopping[v2-client0] [FAIL] >> test_alter_compression.py::TestAlterCompression::test[alter_compression] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_invalid_queue_url[tables_format_v0] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_send_message[tables_format_v0-std] >> test_bindings_1.py::TestBindings::test_binding_with_backslash_in_location[v1-client0] >> test_explicit_partitioning_1.py::TestS3::test_projection_date_type_validation[v1-false-client0-year Int32-False] >> test_vector_index.py::TestVectorIndex::test_vector_index_prefix[table_index_2-pk_types8-all_types8-index8-Uint8] >> test_vector_index.py::TestVectorIndex::test_vector_index_prefix[table_all_types-pk_types17-all_types17-index17-Int8] [FAIL] >> test_vector_index.py::TestVectorIndex::test_vector_index_prefix[table_all_types-pk_types11-all_types11-index11-Uint8] [FAIL] >> test_early_finish.py::TestEarlyFinish::test_early_finish[v1-client0] >> test_postgres.py::TestPGSQL::test_sql_suite[results-window.test] [FAIL] >> test_size_limit.py::TestS3::test_size_limit[v2-client0-100-kikimr_params1-true] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_receive_attempt_reloads_same_messages[tables_format_v1-after_crutch_batch] [FAIL] >> test_vector_index.py::TestVectorIndex::test_vector_index_prefix[table_index_0-pk_types10-all_types10-index10-Uint8] [FAIL] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_get_queue_attributes_only_attributes_table[tables_format_v1-std] [FAIL] >> test_public_api.py::TestRecursiveCreation::test_mkdir [GOOD] >> test_format_setting.py::TestS3::test_date_simple[v2-date/simple/test.json-json_each_row] >> test_statistics.py::TestS3::test_convert[v2-client0-json_each_row-parquet] >> test_compressions.py::TestS3Compressions::test_big_compression[v1-big.json.lz4-lz4] >> test_ydb_over_fq.py::TestYdbOverFq::test_describe_table[v1-client0] >> test_s3_1.py::TestS3::test_simple_hits_47[v2-false-client0] >> test_s3_0.py::TestS3::test_raw[v1-false-client0] >> test_alter_compression.py::TestAlterCompression::test_multi[alter_compression] [GOOD] >> test_explicit_partitioning_0.py::TestS3::test_pruning[v1-false-client0] >> test_size_limit.py::TestS3::test_size_limit[v2-client0-100-kikimr_params1-true] [FAIL] >> test_bindings_1.py::TestBindings::test_binding_with_backslash_in_location[v1-client0] [FAIL] >> test_insert.py::TestS3::test_insert[v1-true-client0-json_each_row-dataset] >> test_formats.py::TestS3Formats::test_invalid_output_compression[v2-client0] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_get_queue_attributes_only_runtime_attributes[tables_format_v0-fifo] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_does_not_change_visibility_for_deleted_message[tables_format_v1-std] [FAIL] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_does_not_change_visibility_not_in_flight[tables_format_v0-fifo] >> test_validation.py::TestS3::test_nested_type[v2-client0] >> test_s3_1.py::TestS3::test_simple_hits_47[v2-false-client0] [FAIL] >> test_compressions.py::TestS3Compressions::test_big_compression[v1-big.json.lz4-lz4] [FAIL] >> test_public_api.py::TestRecursiveCreation::test_create_table >> test_format_setting.py::TestS3::test_date_simple[v2-date/simple/test.json-json_each_row] [FAIL] >> test_ydb_over_fq.py::TestYdbOverFq::test_describe_table[v1-client0] [FAIL] >> test_s3_0.py::TestS3::test_raw[v1-false-client0] [FAIL] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_send_message[tables_format_v0-std] [FAIL] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_receive_attempt_reloads_same_messages[tables_format_v1-standard_mode] >> test_explicit_partitioning_0.py::TestS3::test_pruning[v1-false-client0] [FAIL] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_does_not_change_visibility_not_in_flight[tables_format_v0-fifo] [FAIL] >> test_bindings_1.py::TestBindings::test_binding_with_backslash_in_location[v2-client0] >> test_size_limit.py::TestS3::test_size_limit[v2-client0-500-kikimr_params1-false] >> test_statistics.py::TestS3::test_convert[v2-client0-json_each_row-parquet] [FAIL] >> test_format_setting.py::TestS3::test_date_simple[v2-date/simple/test.parquet-parquet] >> test_public_api.py::TestRecursiveCreation::test_create_table [GOOD] >> test_validation.py::TestS3::test_nested_type[v2-client0] [FAIL] >> test_insert.py::TestS3::test_insert[v1-true-client0-json_each_row-dataset] [FAIL] >> test_explicit_partitioning_0.py::TestS3::test_pruning[v1-true-client0] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_send_message[tables_format_v1-fifo] >> test_ydb_over_fq.py::TestYdbOverFq::test_describe_table[v2-client0] >> test_formats.py::TestS3Formats::test_invalid_output_compression[v2-client0] [FAIL] >> test_format_setting.py::TestS3::test_date_simple[v2-date/simple/test.parquet-parquet] [FAIL] >> test_s3_0.py::TestS3::test_raw[v1-true-client0] >> test_insert.py::TestS3::test_insert[v1-true-client0-json_each_row-dataにちは% set] >> test_s3_1.py::TestS3::test_simple_hits_47[v2-true-client0] >> test_compressions.py::TestS3Compressions::test_big_compression[v1-big.json.br-brotli] |80.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/datashard/vector_index/py3test >> test_vector_index.py::TestVectorIndex::test_vector_index[table_index_4-pk_types6-all_types6-index6-Uint8] [FAIL] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_does_not_change_visibility_not_in_flight[tables_format_v0-std] |80.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test >> test_auditlog.py::test_dml_requests_arent_logged_when_anonymous [FAIL] |80.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/datashard/vector_index/py3test >> test_vector_index.py::TestVectorIndex::test_vector_index[table_all_types_float-pk_types5-all_types5-index5-Float] [FAIL] |80.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/datashard/vector_index/py3test >> test_vector_index.py::TestVectorIndex::test_vector_index[table_index_3_float-pk_types1-all_types1-index1-Float] [FAIL] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_send_and_read_message[tables_format_v1-fifo] [FAIL] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_get_queue_attributes_only_runtime_attributes[tables_format_v0-fifo] [FAIL] >> test_explicit_partitioning_1.py::TestS3::test_projection_date_type_validation[v1-false-client0-year Int32-False] [FAIL] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_receive_attempt_reloads_same_messages[tables_format_v1-standard_mode] [FAIL] >> test_vector_index.py::TestVectorIndex::test_vector_index[table_all_types-pk_types11-all_types11-index11-Uint8] [FAIL] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_invalid_queue_url[tables_format_v0] [FAIL] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_invalid_queue_url[tables_format_v1] >> test_vector_index.py::TestVectorIndex::test_vector_index_prefix[table_index_2-pk_types14-all_types14-index14-Int8] [FAIL] >> test_push_down.py::TestS3PushDown::test_simple_case[v2-client0] >> test_log_scenario.py::TestLogScenario::test_log_uniform [GOOD] >> test_size_limit.py::TestS3::test_size_limit[v2-client0-500-kikimr_params1-false] [FAIL] >> test_size_limit.py::TestS3::test_size_limit[v2-client0-500-kikimr_params1-true] >> test_early_finish.py::TestEarlyFinish::test_early_finish[v1-client0] [FAIL] >> test_vector_index.py::TestVectorIndex::test_vector_index[table_index_2_float-pk_types2-all_types2-index2-Float] [FAIL] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_send_message[tables_format_v1-fifo] [FAIL] >> test_format_setting.py::TestS3::test_date_simple_insert[v1-date/simple/test.csv-csv_with_names] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_send_and_read_message[tables_format_v1-std] >> test_insert.py::TestS3::test_insert[v1-true-client0-json_each_row-dataにちは% set] [FAIL] >> test_statistics.py::TestS3::test_convert[v2-client0-csv_with_names-json_list] >> test_bindings_1.py::TestBindings::test_binding_with_backslash_in_location[v2-client0] [FAIL] >> test_s3_1.py::TestS3::test_simple_hits_47[v2-true-client0] [FAIL] >> test_bindings_1.py::TestBindings::test_decimal_binding[v1-client0] >> test_size_limit.py::TestS3::test_size_limit[v2-client0-500-kikimr_params1-true] [FAIL] >> test_explicit_partitioning_0.py::TestS3::test_pruning[v1-true-client0] [FAIL] >> test_explicit_partitioning_0.py::TestS3::test_pruning[v2-false-client0] >> test_explicit_partitioning_1.py::TestS3::test_projection_date_type_validation[v1-false-client1-year Int32 NOT NULL-False] >> test_compressions.py::TestS3Compressions::test_big_compression[v1-big.json.br-brotli] [FAIL] >> test_ydb_over_fq.py::TestYdbOverFq::test_describe_table[v2-client0] [FAIL] >> test_ydb_over_fq.py::TestYdbOverFq::test_insert_data_query[v1-client0] >> test_s3_0.py::TestS3::test_raw[v1-true-client0] [FAIL] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_get_queue_attributes_only_runtime_attributes[tables_format_v0-std] >> test_formats.py::TestS3Formats::test_custom_csv_delimiter_format[v1-client0] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_send_and_read_multiple_messages[tables_format_v0] >> test_bindings_1.py::TestBindings::test_decimal_binding[v1-client0] [FAIL] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_does_not_change_visibility_not_in_flight[tables_format_v0-std] [FAIL] >> test_format_setting.py::TestS3::test_date_simple_insert[v1-date/simple/test.csv-csv_with_names] [FAIL] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_invalid_queue_url[tables_format_v1] [FAIL] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_send_and_read_message[tables_format_v1-std] [FAIL] >> test_bindings_1.py::TestBindings::test_decimal_binding[v2-client0] >> test_explicit_partitioning_0.py::TestS3::test_pruning[v2-false-client0] [FAIL] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_send_message[tables_format_v1-std] >> test_explicit_partitioning_1.py::TestS3::test_projection_date_type_validation[v1-false-client1-year Int32 NOT NULL-False] [FAIL] >> test_compressions.py::TestS3Compressions::test_big_compression[v1-big.json.bz2-bzip2] >> test_insert.py::TestS3::test_insert[v1-true-client0-csv_with_names-dataset] >> test_statistics.py::TestS3::test_convert[v2-client0-csv_with_names-json_list] [FAIL] >> test_s3_1.py::TestS3::test_i18n_unpartitioned[v1-false-exact_file-True-client0] >> test_format_setting.py::TestS3::test_date_simple_insert[v1-date/simple/test.tsv-tsv_with_names] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_does_not_change_visibility_not_in_flight[tables_format_v1-fifo] >> test_size_limit.py::TestS3::test_size_limit[v1-client0-5-kikimr_params1-false] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_list_queues_of_nonexistent_user[tables_format_v0] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_send_message[tables_format_v1-std] [FAIL] >> test_explicit_partitioning_0.py::TestS3::test_pruning[v2-true-client0] >> test_formats.py::TestS3Formats::test_custom_csv_delimiter_format[v1-client0] [FAIL] >> test_s3_0.py::TestS3::test_raw[v2-false-client0] >> test_compressions.py::TestS3Compressions::test_big_compression[v1-big.json.bz2-bzip2] [FAIL] >> test_format_setting.py::TestS3::test_date_simple_insert[v1-date/simple/test.tsv-tsv_with_names] [FAIL] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_get_queue_attributes_only_runtime_attributes[tables_format_v0-std] [FAIL] >> test_ydb_over_fq.py::TestYdbOverFq::test_insert_data_query[v1-client0] [FAIL] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_does_not_change_visibility_not_in_flight[tables_format_v1-fifo] [FAIL] >> test_ydb_over_fq.py::TestYdbOverFq::test_insert_data_query[v2-client0] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_send_and_read_multiple_messages[tables_format_v0] [FAIL] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_send_and_read_multiple_messages[tables_format_v0] >> test_explicit_partitioning_0.py::TestS3::test_pruning[v2-true-client0] [FAIL] >> test_explicit_partitioning_0.py::TestS3::test_validation[v1-client0] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_list_queues_of_nonexistent_user[tables_format_v0] [FAIL] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_list_queues_of_nonexistent_user[tables_format_v1] >> test_explicit_partitioning_1.py::TestS3::test_projection_date_type_validation[v1-false-client2-year Uint32-False] >> test_bindings_1.py::TestBindings::test_decimal_binding[v2-client0] [FAIL] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_send_message_batch[tables_format_v0-fifo] >> test_format_setting.py::TestS3::test_date_simple_insert[v1-date/simple/test.json-json_each_row] >> test_statistics.py::TestS3::test_convert[v2-client0-csv_with_names-json_each_row] >> test_s3_0.py::TestS3::test_raw[v2-false-client0] [FAIL] >> test_s3_1.py::TestS3::test_i18n_unpartitioned[v1-false-exact_file-True-client0] [FAIL] >> test_insert.py::TestS3::test_insert[v1-true-client0-csv_with_names-dataset] [FAIL] >> test_formats.py::TestS3Formats::test_custom_csv_delimiter_format[v2-client0] >> test_ydb_over_fq.py::TestYdbOverFq::test_insert_data_query[v2-client0] [FAIL] >> test_size_limit.py::TestS3::test_size_limit[v1-client0-5-kikimr_params1-false] [FAIL] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_does_not_change_visibility_not_in_flight[tables_format_v1-std] >> test_insert.py::TestS3::test_insert[v1-true-client0-csv_with_names-dataにちは% set] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_get_queue_attributes_only_runtime_attributes[tables_format_v1-fifo] >> test_formats.py::TestS3Formats::test_custom_csv_delimiter_format[v2-client0] [FAIL] >> test_format_setting.py::TestS3::test_date_simple_insert[v1-date/simple/test.json-json_each_row] [FAIL] >> test_compressions.py::TestS3Compressions::test_big_compression[v1-big.json.zst-zstd] >> test_s3_0.py::TestS3::test_raw[v2-true-client0] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_send_message_batch[tables_format_v0-fifo] [FAIL] >> test_s3_1.py::TestS3::test_i18n_unpartitioned[v1-false-exact_file-False-client0] >> test_explicit_partitioning_1.py::TestS3::test_projection_date_type_validation[v1-false-client2-year Uint32-False] [FAIL] >> test_explicit_partitioning_0.py::TestS3::test_validation[v1-client0] [FAIL] >> test_explicit_partitioning_0.py::TestS3::test_validation[v2-client0] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_send_and_read_multiple_messages[tables_format_v1] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_send_and_read_multiple_messages[tables_format_v0] [FAIL] >> test_statistics.py::TestS3::test_convert[v2-client0-csv_with_names-json_each_row] [FAIL] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_get_queue_attributes_only_runtime_attributes[tables_format_v1-fifo] [FAIL] >> test_formats.py::TestS3Formats::test_no_not_nullable_column[v1-client0] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_list_queues_of_nonexistent_user[tables_format_v1] [FAIL] >> test_insert.py::TestS3::test_insert[v1-true-client0-csv_with_names-dataにちは% set] [FAIL] >> test_s3_0.py::TestS3::test_raw[v2-true-client0] [FAIL] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_send_message_batch[tables_format_v0-std] >> test_s3_1.py::TestS3::test_i18n_unpartitioned[v1-false-exact_file-False-client0] [FAIL] >> test_size_limit.py::TestS3::test_size_limit[v1-client0-5-kikimr_params1-true] >> test_explicit_partitioning_0.py::TestS3::test_validation[v2-client0] [FAIL] >> test_explicit_partitioning_1.py::TestS3::test_projection_date_type_validation[v1-false-client3-year Uint32 NOT NULL-True] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_send_and_read_multiple_messages[tables_format_v1] [FAIL] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_does_not_change_visibility_not_in_flight[tables_format_v1-std] [FAIL] >> test_formats.py::TestS3Formats::test_no_not_nullable_column[v1-client0] [FAIL] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_send_and_read_multiple_messages[tables_format_v1] >> test_compressions.py::TestS3Compressions::test_big_compression[v1-big.json.zst-zstd] [FAIL] >> test_statistics.py::TestS3::test_convert[v2-client0-csv_with_names-csv_with_names] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_get_queue_attributes_only_runtime_attributes[tables_format_v1-std] >> test_format_setting.py::TestS3::test_date_simple_insert[v1-date/simple/test.parquet-parquet] >> test_s3_1.py::TestS3::test_i18n_unpartitioned[v1-false-directory_scan-True-client0] >> test_insert.py::TestS3::test_insert[v1-true-client0-parquet-dataset] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_send_message_batch[tables_format_v0-std] [FAIL] >> test_formats.py::TestS3Formats::test_no_not_nullable_column[v2-client0] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_validates_deduplication_id[tables_format_v0] >> test_size_limit.py::TestS3::test_size_limit[v1-client0-5-kikimr_params1-true] [FAIL] >> test_s3_0.py::TestS3::test_limit[v1-false-kikimr_params0-client0] >> test_insert.py::TestS3::test_insert[v1-true-client0-parquet-dataset] [FAIL] >> test_explicit_partitioning_0.py::TestS3::test_no_schema_columns_except_partitioning_ones[v1-false-client0] >> test_formats.py::TestS3Formats::test_no_not_nullable_column[v2-client0] [FAIL] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_multi_read_dont_stall[tables_format_v0] >> test_statistics.py::TestS3::test_convert[v2-client0-csv_with_names-csv_with_names] [FAIL] >> test_s3_1.py::TestS3::test_i18n_unpartitioned[v1-false-directory_scan-True-client0] [FAIL] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_validates_deduplication_id[tables_format_v0] [FAIL] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_send_and_read_multiple_messages[tables_format_v1] [FAIL] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_get_queue_attributes_only_runtime_attributes[tables_format_v1-std] [FAIL] >> test_formats.py::TestS3Formats::test_no_nullable_column[v1-client0] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/fq/s3/py3test >> test_bindings_0.py::TestBindings::test_name_uniqueness_constraint[v2-client0] [FAIL] >> test_s3_0.py::TestS3::test_limit[v1-false-kikimr_params0-client0] [FAIL] >> test_size_limit.py::TestS3::test_size_limit[v1-client0-100-kikimr_params1-false] Test command err: library/recipes/common/__init__.py:29: ResourceWarning: unclosed file <_io.TextIOWrapper name='/home/runner/.ya/build/build_root/ciyv/002057/ydb/tests/fq/s3/test-results/py3test/testing_out_stuff/test_bindings_0/testing_out_stuff/moto_server.out.log' mode='w' encoding='utf-8'> process = subprocess.Popen( ResourceWarning: Enable tracemalloc to get the object allocation traceback library/recipes/common/__init__.py:29: ResourceWarning: unclosed file <_io.TextIOWrapper name='/home/runner/.ya/build/build_root/ciyv/002057/ydb/tests/fq/s3/test-results/py3test/testing_out_stuff/test_bindings_0/testing_out_stuff/moto_server.err.log' mode='w' encoding='utf-8'> process = subprocess.Popen( ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback |80.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/datashard/vector_index/py3test >> test_vector_index.py::TestVectorIndex::test_vector_index_prefix[table_all_types-pk_types11-all_types11-index11-Uint8] [FAIL] |80.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/datashard/vector_index/py3test >> test_vector_index.py::TestVectorIndex::test_vector_index_prefix[table_all_types-pk_types17-all_types17-index17-Int8] [FAIL] |80.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/suite_tests/py3test >> test_postgres.py::TestPGSQL::test_sql_suite[results-window.test] [FAIL] |80.9%| [TA] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_cdc_stream_reboots/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/fq/s3/py3test >> test_validation.py::TestS3::test_nested_type[v2-client0] [FAIL] Test command err: library/recipes/common/__init__.py:29: ResourceWarning: unclosed file <_io.TextIOWrapper name='/home/runner/.ya/build/build_root/ciyv/002025/ydb/tests/fq/s3/test-results/py3test/testing_out_stuff/test_validation/testing_out_stuff/moto_server.out.log' mode='w' encoding='utf-8'> process = subprocess.Popen( ResourceWarning: Enable tracemalloc to get the object allocation traceback library/recipes/common/__init__.py:29: ResourceWarning: unclosed file <_io.TextIOWrapper name='/home/runner/.ya/build/build_root/ciyv/002025/ydb/tests/fq/s3/test-results/py3test/testing_out_stuff/test_validation/testing_out_stuff/moto_server.err.log' mode='w' encoding='utf-8'> process = subprocess.Popen( ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/subprocess.py:1129: ResourceWarning: subprocess 588971 is still running ResourceWarning: Enable tracemalloc to get the object allocation traceback >> test_format_setting.py::TestS3::test_date_simple_insert[v1-date/simple/test.parquet-parquet] [FAIL] >> test_compressions.py::TestS3Compressions::test_big_compression[v1-big.json.xz-xz] >> test_insert.py::TestS3::test_insert[v1-true-client0-parquet-dataにちは% set] >> test_statistics.py::TestS3::test_convert[v2-client0-csv_with_names-parquet] >> test_s3_0.py::TestS3::test_limit[v1-true-kikimr_params0-client0] >> test_formats.py::TestS3Formats::test_no_nullable_column[v1-client0] [FAIL] >> test_s3_1.py::TestS3::test_i18n_unpartitioned[v1-false-directory_scan-False-client0] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_send_message[tables_format_v0-fifo] >> test_explicit_partitioning_0.py::TestS3::test_no_schema_columns_except_partitioning_ones[v1-false-client0] [FAIL] >> test_s3_0.py::TestS3::test_limit[v1-true-kikimr_params0-client0] [FAIL] >> test_s3_0.py::TestS3::test_limit[v2-false-kikimr_params0-client0] >> test_insert.py::TestS3::test_insert[v1-true-client0-parquet-dataにちは% set] [FAIL] >> test_statistics.py::TestS3::test_convert[v2-client0-csv_with_names-parquet] [FAIL] >> test_size_limit.py::TestS3::test_size_limit[v1-client0-100-kikimr_params1-false] [FAIL] >> test_s3_1.py::TestS3::test_i18n_unpartitioned[v1-false-directory_scan-False-client0] [FAIL] >> test_insert.py::TestS3::test_insert[v2-false-client0-json_list-dataset] >> test_compressions.py::TestS3Compressions::test_big_compression[v1-big.json.xz-xz] [FAIL] >> test_formats.py::TestS3Formats::test_no_nullable_column[v2-client0] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_send_message[tables_format_v0-fifo] [FAIL] >> test_format_setting.py::TestS3::test_date_simple_insert[v2-date/simple/test.csv-csv_with_names] >> test_explicit_partitioning_0.py::TestS3::test_no_schema_columns_except_partitioning_ones[v1-true-client0] >> test_statistics.py::TestS3::test_convert[v2-client0-parquet-json_list] >> test_s3_1.py::TestS3::test_i18n_unpartitioned[v1-true-exact_file-True-client0] >> test_insert.py::TestS3::test_insert[v2-false-client0-json_list-dataset] [FAIL] >> test_s3_0.py::TestS3::test_limit[v2-false-kikimr_params0-client0] [FAIL] >> test_s3_0.py::TestS3::test_limit[v2-true-kikimr_params0-client0] >> test_s3_0.py::TestS3::test_limit[v2-true-kikimr_params0-client0] [FAIL] >> test_statistics.py::TestS3::test_convert[v2-client0-parquet-json_list] [FAIL] >> test_size_limit.py::TestS3::test_size_limit[v1-client0-100-kikimr_params1-true] >> test_formats.py::TestS3Formats::test_no_nullable_column[v2-client0] [FAIL] >> test_explicit_partitioning_0.py::TestS3::test_no_schema_columns_except_partitioning_ones[v1-true-client0] [FAIL] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_multi_read_dont_stall[tables_format_v0] [FAIL] >> test_statistics.py::TestS3::test_convert[v2-client0-parquet-json_each_row] >> test_format_setting.py::TestS3::test_date_simple_insert[v2-date/simple/test.csv-csv_with_names] [FAIL] >> test_s3_1.py::TestS3::test_i18n_unpartitioned[v1-true-exact_file-True-client0] [FAIL] >> test_compressions.py::TestS3Compressions::test_big_compression[v2-big.json.gz-gzip] >> test_explicit_partitioning_0.py::TestS3::test_no_schema_columns_except_partitioning_ones[v2-false-client0] >> test_insert.py::TestS3::test_insert[v2-false-client0-json_list-dataにちは% set] >> test_size_limit.py::TestS3::test_size_limit[v1-client0-100-kikimr_params1-true] [FAIL] >> test_s3_0.py::TestS3::test_bad_format[v1-false-client0] >> test_statistics.py::TestS3::test_convert[v2-client0-parquet-json_each_row] [FAIL] >> test_statistics.py::TestS3::test_convert[v2-client0-parquet-csv_with_names] >> test_formats.py::TestS3Formats::test_invalid_column_type_in_csv[v1-client0] >> test_explicit_partitioning_0.py::TestS3::test_no_schema_columns_except_partitioning_ones[v2-false-client0] [FAIL] >> test_compressions.py::TestS3Compressions::test_big_compression[v2-big.json.gz-gzip] [FAIL] >> test_insert.py::TestS3::test_insert[v2-false-client0-json_list-dataにちは% set] [FAIL] >> test_insert.py::TestS3::test_insert[v2-false-client0-json_each_row-dataset] >> test_statistics.py::TestS3::test_convert[v2-client0-parquet-csv_with_names] [FAIL] >> test_s3_1.py::TestS3::test_i18n_unpartitioned[v1-true-exact_file-False-client0] >> test_format_setting.py::TestS3::test_date_simple_insert[v2-date/simple/test.tsv-tsv_with_names] >> test_compressions.py::TestS3Compressions::test_big_compression[v2-big.json.lz4-lz4] >> test_size_limit.py::TestS3::test_size_limit[v1-client0-500-kikimr_params1-false] >> test_explicit_partitioning_0.py::TestS3::test_no_schema_columns_except_partitioning_ones[v2-true-client0] >> test_insert.py::TestS3::test_insert[v2-false-client0-json_each_row-dataset] [FAIL] >> test_statistics.py::TestS3::test_convert[v2-client0-parquet-parquet] >> test_format_setting.py::TestS3::test_date_simple_insert[v2-date/simple/test.tsv-tsv_with_names] [FAIL] >> test_format_setting.py::TestS3::test_date_simple_insert[v2-date/simple/test.json-json_each_row] >> test_formats.py::TestS3Formats::test_invalid_column_type_in_csv[v1-client0] [FAIL] >> test_s3_0.py::TestS3::test_bad_format[v1-false-client0] [FAIL] >> test_s3_1.py::TestS3::test_i18n_unpartitioned[v1-true-exact_file-False-client0] [FAIL] >> test_compressions.py::TestS3Compressions::test_big_compression[v2-big.json.lz4-lz4] [FAIL] >> test_explicit_partitioning_0.py::TestS3::test_no_schema_columns_except_partitioning_ones[v2-true-client0] [FAIL] >> test_statistics.py::TestS3::test_convert[v2-client0-parquet-parquet] [FAIL] >> test_s3_0.py::TestS3::test_bad_format[v1-true-client0] >> test_insert.py::TestS3::test_insert[v2-false-client0-json_each_row-dataにちは% set] >> test_compressions.py::TestS3Compressions::test_big_compression[v2-big.json.br-brotli] >> test_size_limit.py::TestS3::test_size_limit[v1-client0-500-kikimr_params1-false] [FAIL] >> test_s3_1.py::TestS3::test_i18n_unpartitioned[v1-true-directory_scan-True-client0] >> test_format_setting.py::TestS3::test_date_simple_insert[v2-date/simple/test.json-json_each_row] [FAIL] >> test_formats.py::TestS3Formats::test_invalid_column_type_in_csv[v2-client0] >> test_statistics.py::TestS3::test_convert[v1-client0-json_list-json_list] >> test_explicit_partitioning_0.py::TestS3::test_projection_date[v1-false-client0] >> test_s3_0.py::TestS3::test_bad_format[v1-true-client0] [FAIL] >> test_format_setting.py::TestS3::test_date_simple_insert[v2-date/simple/test.parquet-parquet] >> test_compressions.py::TestS3Compressions::test_big_compression[v2-big.json.br-brotli] [FAIL] >> test_s3_1.py::TestS3::test_i18n_unpartitioned[v1-true-directory_scan-True-client0] [FAIL] >> test_size_limit.py::TestS3::test_size_limit[v1-client0-500-kikimr_params1-true] >> test_explicit_partitioning_0.py::TestS3::test_projection_date[v1-false-client0] [FAIL] >> test_s3_0.py::TestS3::test_bad_format[v2-false-client0] >> test_format_setting.py::TestS3::test_date_simple_insert[v2-date/simple/test.parquet-parquet] [FAIL] >> test_insert.py::TestS3::test_insert[v2-false-client0-json_each_row-dataにちは% set] [FAIL] >> test_size_limit.py::TestS3::test_size_limit[v1-client0-500-kikimr_params1-true] [FAIL] >> test_explicit_partitioning_0.py::TestS3::test_projection_date[v1-true-client0] >> test_formats.py::TestS3Formats::test_invalid_column_type_in_csv[v2-client0] [FAIL] >> test_statistics.py::TestS3::test_convert[v1-client0-json_list-json_list] [FAIL] >> test_s3_1.py::TestS3::test_i18n_unpartitioned[v1-true-directory_scan-False-client0] >> test_compressions.py::TestS3Compressions::test_big_compression[v2-big.json.bz2-bzip2] >> test_insert.py::TestS3::test_insert[v2-false-client0-csv_with_names-dataset] >> test_format_setting.py::TestS3::test_timestamp_simple_iso[v1-timestamp/simple_iso/test.csv-csv_with_names] >> test_statistics.py::TestS3::test_convert[v1-client0-json_list-json_each_row] >> test_s3_1.py::TestS3::test_i18n_unpartitioned[v1-true-directory_scan-False-client0] [FAIL] >> test_formats.py::TestS3Formats::test_invalid_column_in_parquet[v1-client0] >> test_size_limit.py::TestS3::test_size_limit[v2-client0-5-kikimr_params2-false] >> test_insert.py::TestS3::test_insert[v2-false-client0-csv_with_names-dataset] [FAIL] >> test_format_setting.py::TestS3::test_timestamp_simple_iso[v1-timestamp/simple_iso/test.csv-csv_with_names] [FAIL] >> test_statistics.py::TestS3::test_convert[v1-client0-json_list-json_each_row] [FAIL] >> test_compressions.py::TestS3Compressions::test_big_compression[v2-big.json.bz2-bzip2] [FAIL] >> test_size_limit.py::TestS3::test_size_limit[v2-client0-5-kikimr_params2-false] [FAIL] >> test_format_setting.py::TestS3::test_timestamp_simple_iso[v1-timestamp/simple_iso/test.tsv-tsv_with_names] >> test_s3_1.py::TestS3::test_i18n_unpartitioned[v2-false-exact_file-True-client0] >> test_formats.py::TestS3Formats::test_invalid_column_in_parquet[v1-client0] [FAIL] >> test_size_limit.py::TestS3::test_size_limit[v2-client0-5-kikimr_params2-true] >> test_insert.py::TestS3::test_insert[v2-false-client0-csv_with_names-dataにちは% set] >> test_format_setting.py::TestS3::test_timestamp_simple_iso[v1-timestamp/simple_iso/test.tsv-tsv_with_names] [FAIL] >> test_compressions.py::TestS3Compressions::test_big_compression[v2-big.json.zst-zstd] >> test_size_limit.py::TestS3::test_size_limit[v2-client0-5-kikimr_params2-true] [FAIL] >> test_format_setting.py::TestS3::test_timestamp_simple_iso[v1-timestamp/simple_iso/test.json-json_each_row] >> test_size_limit.py::TestS3::test_size_limit[v2-client0-100-kikimr_params2-false] >> test_formats.py::TestS3Formats::test_invalid_column_in_parquet[v2-client0] >> test_compressions.py::TestS3Compressions::test_big_compression[v2-big.json.zst-zstd] [FAIL] >> test_compressions.py::TestS3Compressions::test_big_compression[v2-big.json.xz-xz] >> test_size_limit.py::TestS3::test_size_limit[v2-client0-100-kikimr_params2-false] [FAIL] >> test_size_limit.py::TestS3::test_size_limit[v2-client0-100-kikimr_params2-true] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/fq/s3/py3test >> test_bindings_1.py::TestBindings::test_decimal_binding[v2-client0] [FAIL] Test command err: library/recipes/common/__init__.py:29: ResourceWarning: unclosed file <_io.TextIOWrapper name='/home/runner/.ya/build/build_root/ciyv/00203b/ydb/tests/fq/s3/test-results/py3test/testing_out_stuff/test_bindings_1/testing_out_stuff/moto_server.out.log' mode='w' encoding='utf-8'> process = subprocess.Popen( ResourceWarning: Enable tracemalloc to get the object allocation traceback library/recipes/common/__init__.py:29: ResourceWarning: unclosed file <_io.TextIOWrapper name='/home/runner/.ya/build/build_root/ciyv/00203b/ydb/tests/fq/s3/test-results/py3test/testing_out_stuff/test_bindings_1/testing_out_stuff/moto_server.err.log' mode='w' encoding='utf-8'> process = subprocess.Popen( ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/messaging/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_send_message_batch[tables_format_v0-std] [FAIL] Test command err: sys:1: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/ciyv/002013/ydb/tests/functional/sqs/messaging/test-results/py3test/testing_out_stuff/chunk31/testing_out_stuff/test_generic_messaging.py.TestSqsGenericMessagingWithPath.test_send_message.tables_format_v0-std/cluster/node_1/stdout'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/ciyv/002013/ydb/tests/functional/sqs/messaging/test-results/py3test/testing_out_stuff/chunk31/testing_out_stuff/test_generic_messaging.py.TestSqsGenericMessagingWithPath.test_send_message.tables_format_v0-std/cluster/node_1/stderr'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedRandom name='/home/runner/.ya/build/build_root/ciyv/002013/ydb/tests/functional/sqs/messaging/test-results/py3test/testing_out_stuff/chunk31/testing_out_stuff/test_generic_messaging.py.TestSqsGenericMessagingWithPath.test_send_message.tables_format_v0-std/cluster/node_1/logfile_4zk8x8yl.log'> ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/subprocess.py:1129: ResourceWarning: subprocess 592956 is still running ResourceWarning: Enable tracemalloc to get the object allocation traceback |80.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/datashard/vector_index/py3test >> test_vector_index.py::TestVectorIndex::test_vector_index_prefix[table_index_0-pk_types10-all_types10-index10-Uint8] [FAIL] >> test_s3_0.py::TestS3::test_bad_format[v2-false-client0] [FAIL] >> test_statistics.py::TestS3::test_convert[v1-client0-json_list-csv_with_names] >> test_s3_0.py::TestS3::test_bad_format[v2-true-client0] >> test_explicit_partitioning_1.py::TestS3::test_projection_date_type_validation[v1-false-client3-year Uint32 NOT NULL-True] [FAIL] >> test_explicit_partitioning_1.py::TestS3::test_projection_date_type_validation[v1-false-client4-year Int64-False] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/messaging/py3test >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_validates_deduplication_id[tables_format_v0] [FAIL] Test command err: sys:1: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/ciyv/00201d/ydb/tests/functional/sqs/messaging/test-results/py3test/testing_out_stuff/chunk10/testing_out_stuff/test_fifo_messaging.py.TestSqsFifoMessagingWithTenant.test_receive_attempt_reloads_same_messages.tables_format_v1-after_crutch_batch/cluster/node_1/stdout'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/ciyv/00201d/ydb/tests/functional/sqs/messaging/test-results/py3test/testing_out_stuff/chunk10/testing_out_stuff/test_fifo_messaging.py.TestSqsFifoMessagingWithTenant.test_receive_attempt_reloads_same_messages.tables_format_v1-after_crutch_batch/cluster/node_1/stderr'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedRandom name='/home/runner/.ya/build/build_root/ciyv/00201d/ydb/tests/functional/sqs/messaging/test-results/py3test/testing_out_stuff/chunk10/testing_out_stuff/test_fifo_messaging.py.TestSqsFifoMessagingWithTenant.test_receive_attempt_reloads_same_messages.tables_format_v1-after_crutch_batch/cluster/node_1/logfile_hct1za7r.log'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/ciyv/00201d/ydb/tests/functional/sqs/messaging/test-results/py3test/testing_out_stuff/chunk10/testing_out_stuff/test_fifo_messaging.py.TestSqsFifoMessagingWithTenant.test_receive_attempt_reloads_same_messages.tables_format_v1-after_crutch_batch/cluster/slot_1/stdout'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/ciyv/00201d/ydb/tests/functional/sqs/messaging/test-results/py3test/testing_out_stuff/chunk10/testing_out_stuff/test_fifo_messaging.py.TestSqsFifoMessagingWithTenant.test_receive_attempt_reloads_same_messages.tables_format_v1-after_crutch_batch/cluster/slot_1/stderr'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedRandom name='/home/runner/.ya/build/build_root/ciyv/00201d/ydb/tests/functional/sqs/messaging/test-results/py3test/testing_out_stuff/chunk10/testing_out_stuff/test_fifo_messaging.py.TestSqsFifoMessagingWithTenant.test_receive_attempt_reloads_same_messages.tables_format_v1-after_crutch_batch/cluster/slot_1/logfile_da6f2h66.log'> ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/subprocess.py:1129: ResourceWarning: subprocess 591116 is still running ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/subprocess.py:1129: ResourceWarning: subprocess 594364 is still running ResourceWarning: Enable tracemalloc to get the object allocation traceback ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/messaging/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_get_queue_attributes_only_runtime_attributes[tables_format_v1-std] [FAIL] Test command err: sys:1: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/ciyv/002020/ydb/tests/functional/sqs/messaging/test-results/py3test/testing_out_stuff/chunk47/testing_out_stuff/test_generic_messaging.py.TestSqsGenericMessagingWithTenant.test_get_queue_attributes_only_attributes_table.tables_format_v1-std/cluster/node_1/stdout'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/ciyv/002020/ydb/tests/functional/sqs/messaging/test-results/py3test/testing_out_stuff/chunk47/testing_out_stuff/test_generic_messaging.py.TestSqsGenericMessagingWithTenant.test_get_queue_attributes_only_attributes_table.tables_format_v1-std/cluster/node_1/stderr'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedRandom name='/home/runner/.ya/build/build_root/ciyv/002020/ydb/tests/functional/sqs/messaging/test-results/py3test/testing_out_stuff/chunk47/testing_out_stuff/test_generic_messaging.py.TestSqsGenericMessagingWithTenant.test_get_queue_attributes_only_attributes_table.tables_format_v1-std/cluster/node_1/logfile_8xtz335p.log'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/ciyv/002020/ydb/tests/functional/sqs/messaging/test-results/py3test/testing_out_stuff/chunk47/testing_out_stuff/test_generic_messaging.py.TestSqsGenericMessagingWithTenant.test_get_queue_attributes_only_attributes_table.tables_format_v1-std/cluster/slot_1/stdout'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/ciyv/002020/ydb/tests/functional/sqs/messaging/test-results/py3test/testing_out_stuff/chunk47/testing_out_stuff/test_generic_messaging.py.TestSqsGenericMessagingWithTenant.test_get_queue_attributes_only_attributes_table.tables_format_v1-std/cluster/slot_1/stderr'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedRandom name='/home/runner/.ya/build/build_root/ciyv/002020/ydb/tests/functional/sqs/messaging/test-results/py3test/testing_out_stuff/chunk47/testing_out_stuff/test_generic_messaging.py.TestSqsGenericMessagingWithTenant.test_get_queue_attributes_only_attributes_table.tables_format_v1-std/cluster/slot_1/logfile_o0jboh3b.log'> ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/subprocess.py:1129: ResourceWarning: subprocess 590882 is still running ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/subprocess.py:1129: ResourceWarning: subprocess 594324 is still running ResourceWarning: Enable tracemalloc to get the object allocation traceback ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/fq/s3/py3test >> test_ydb_over_fq.py::TestYdbOverFq::test_insert_data_query[v2-client0] [FAIL] Test command err: library/recipes/common/__init__.py:29: ResourceWarning: unclosed file <_io.TextIOWrapper name='/home/runner/.ya/build/build_root/ciyv/002035/ydb/tests/fq/s3/test-results/py3test/testing_out_stuff/test_ydb_over_fq/testing_out_stuff/moto_server.out.log' mode='w' encoding='utf-8'> process = subprocess.Popen( ResourceWarning: Enable tracemalloc to get the object allocation traceback library/recipes/common/__init__.py:29: ResourceWarning: unclosed file <_io.TextIOWrapper name='/home/runner/.ya/build/build_root/ciyv/002035/ydb/tests/fq/s3/test-results/py3test/testing_out_stuff/test_ydb_over_fq/testing_out_stuff/moto_server.err.log' mode='w' encoding='utf-8'> process = subprocess.Popen( ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/subprocess.py:1129: ResourceWarning: subprocess 584318 is still running ResourceWarning: Enable tracemalloc to get the object allocation traceback ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/messaging/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_does_not_change_visibility_not_in_flight[tables_format_v1-std] [FAIL] Test command err: sys:1: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/ciyv/002015/ydb/tests/functional/sqs/messaging/test-results/py3test/testing_out_stuff/chunk23/testing_out_stuff/test_generic_messaging.py.TestSqsGenericMessagingWithPath.test_does_not_change_visibility_for_deleted_message.tables_format_v1-std/cluster/node_1/stdout'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/ciyv/002015/ydb/tests/functional/sqs/messaging/test-results/py3test/testing_out_stuff/chunk23/testing_out_stuff/test_generic_messaging.py.TestSqsGenericMessagingWithPath.test_does_not_change_visibility_for_deleted_message.tables_format_v1-std/cluster/node_1/stderr'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedRandom name='/home/runner/.ya/build/build_root/ciyv/002015/ydb/tests/functional/sqs/messaging/test-results/py3test/testing_out_stuff/chunk23/testing_out_stuff/test_generic_messaging.py.TestSqsGenericMessagingWithPath.test_does_not_change_visibility_for_deleted_message.tables_format_v1-std/cluster/node_1/logfile_i_uilvjz.log'> ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/subprocess.py:1129: ResourceWarning: subprocess 591395 is still running ResourceWarning: Enable tracemalloc to get the object allocation traceback ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/messaging/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_send_message[tables_format_v0-fifo] [FAIL] Test command err: sys:1: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/ciyv/00201a/ydb/tests/functional/sqs/messaging/test-results/py3test/testing_out_stuff/chunk52/testing_out_stuff/test_generic_messaging.py.TestSqsGenericMessagingWithTenant.test_send_and_read_message.tables_format_v1-fifo/cluster/node_1/stdout'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/ciyv/00201a/ydb/tests/functional/sqs/messaging/test-results/py3test/testing_out_stuff/chunk52/testing_out_stuff/test_generic_messaging.py.TestSqsGenericMessagingWithTenant.test_send_and_read_message.tables_format_v1-fifo/cluster/node_1/stderr'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedRandom name='/home/runner/.ya/build/build_root/ciyv/00201a/ydb/tests/functional/sqs/messaging/test-results/py3test/testing_out_stuff/chunk52/testing_out_stuff/test_generic_messaging.py.TestSqsGenericMessagingWithTenant.test_send_and_read_message.tables_format_v1-fifo/cluster/node_1/logfile_3f754b57.log'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/ciyv/00201a/ydb/tests/functional/sqs/messaging/test-results/py3test/testing_out_stuff/chunk52/testing_out_stuff/test_generic_messaging.py.TestSqsGenericMessagingWithTenant.test_send_and_read_message.tables_format_v1-fifo/cluster/slot_1/stdout'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/ciyv/00201a/ydb/tests/functional/sqs/messaging/test-results/py3test/testing_out_stuff/chunk52/testing_out_stuff/test_generic_messaging.py.TestSqsGenericMessagingWithTenant.test_send_and_read_message.tables_format_v1-fifo/cluster/slot_1/stderr'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedRandom name='/home/runner/.ya/build/build_root/ciyv/00201a/ydb/tests/functional/sqs/messaging/test-results/py3test/testing_out_stuff/chunk52/testing_out_stuff/test_generic_messaging.py.TestSqsGenericMessagingWithTenant.test_send_and_read_message.tables_format_v1-fifo/cluster/slot_1/logfile_bb1wsmg9.log'> ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/subprocess.py:1129: ResourceWarning: subprocess 591396 is still running ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/subprocess.py:1129: ResourceWarning: subprocess 594685 is still running ResourceWarning: Enable tracemalloc to get the object allocation traceback >> test_explicit_partitioning_0.py::TestS3::test_projection_date[v1-true-client0] [FAIL] >> test_explicit_partitioning_0.py::TestS3::test_projection_date[v2-false-client0] >> test_s3_1.py::TestS3::test_i18n_unpartitioned[v2-false-exact_file-True-client0] [FAIL] >> test_s3_1.py::TestS3::test_i18n_unpartitioned[v2-false-exact_file-False-client0] >> test_formats.py::TestS3Formats::test_invalid_column_in_parquet[v2-client0] [FAIL] >> test_formats.py::TestS3Formats::test_simple_pg_types[v1-client0] >> test_size_limit.py::TestS3::test_size_limit[v2-client0-100-kikimr_params2-true] [FAIL] >> test_size_limit.py::TestS3::test_size_limit[v2-client0-500-kikimr_params2-false] >> test_compressions.py::TestS3Compressions::test_big_compression[v2-big.json.xz-xz] [FAIL] >> test_compressions.py::TestS3Compressions::test_invalid_compression[v1-client0] >> test_s3_0.py::TestS3::test_bad_format[v2-true-client0] [FAIL] >> test_insert.py::TestS3::test_insert[v2-false-client0-csv_with_names-dataにちは% set] [FAIL] >> test_s3_0.py::TestS3::test_bad_request_on_invalid_parquet[v2-client0] >> test_format_setting.py::TestS3::test_timestamp_simple_iso[v1-timestamp/simple_iso/test.json-json_each_row] [FAIL] >> test_insert.py::TestS3::test_insert[v2-false-client0-parquet-dataset] >> test_statistics.py::TestS3::test_convert[v1-client0-json_list-csv_with_names] [FAIL] >> test_explicit_partitioning_1.py::TestS3::test_projection_date_type_validation[v1-false-client4-year Int64-False] [FAIL] >> test_format_setting.py::TestS3::test_timestamp_simple_iso[v1-timestamp/simple_iso/test.parquet-parquet] >> test_statistics.py::TestS3::test_convert[v1-client0-json_list-parquet] >> test_explicit_partitioning_1.py::TestS3::test_projection_date_type_validation[v1-false-client5-year Int64 NOT NULL-False] >> test_s3_1.py::TestS3::test_i18n_unpartitioned[v2-false-exact_file-False-client0] [FAIL] >> test_explicit_partitioning_0.py::TestS3::test_projection_date[v2-false-client0] [FAIL] >> test_formats.py::TestS3Formats::test_simple_pg_types[v1-client0] [FAIL] >> test_explicit_partitioning_0.py::TestS3::test_projection_date[v2-true-client0] >> test_formats.py::TestS3Formats::test_simple_pg_types[v2-client0] >> test_s3_1.py::TestS3::test_i18n_unpartitioned[v2-false-directory_scan-True-client0] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/messaging/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_multi_read_dont_stall[tables_format_v0] [FAIL] Test command err: sys:1: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/ciyv/002017/ydb/tests/functional/sqs/messaging/test-results/py3test/testing_out_stuff/chunk48/testing_out_stuff/test_generic_messaging.py.TestSqsGenericMessagingWithTenant.test_invalid_queue_url.tables_format_v0/cluster/node_1/stdout'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/ciyv/002017/ydb/tests/functional/sqs/messaging/test-results/py3test/testing_out_stuff/chunk48/testing_out_stuff/test_generic_messaging.py.TestSqsGenericMessagingWithTenant.test_invalid_queue_url.tables_format_v0/cluster/node_1/stderr'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedRandom name='/home/runner/.ya/build/build_root/ciyv/002017/ydb/tests/functional/sqs/messaging/test-results/py3test/testing_out_stuff/chunk48/testing_out_stuff/test_generic_messaging.py.TestSqsGenericMessagingWithTenant.test_invalid_queue_url.tables_format_v0/cluster/node_1/logfile_7rnj7coi.log'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/ciyv/002017/ydb/tests/functional/sqs/messaging/test-results/py3test/testing_out_stuff/chunk48/testing_out_stuff/test_generic_messaging.py.TestSqsGenericMessagingWithTenant.test_invalid_queue_url.tables_format_v0/cluster/slot_1/stdout'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/ciyv/002017/ydb/tests/functional/sqs/messaging/test-results/py3test/testing_out_stuff/chunk48/testing_out_stuff/test_generic_messaging.py.TestSqsGenericMessagingWithTenant.test_invalid_queue_url.tables_format_v0/cluster/slot_1/stderr'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedRandom name='/home/runner/.ya/build/build_root/ciyv/002017/ydb/tests/functional/sqs/messaging/test-results/py3test/testing_out_stuff/chunk48/testing_out_stuff/test_generic_messaging.py.TestSqsGenericMessagingWithTenant.test_invalid_queue_url.tables_format_v0/cluster/slot_1/logfile_n_33vytw.log'> ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/subprocess.py:1129: ResourceWarning: subprocess 592346 is still running ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/subprocess.py:1129: ResourceWarning: subprocess 595618 is still running ResourceWarning: Enable tracemalloc to get the object allocation traceback |81.0%| [TA] {RESULT} $(B)/ydb/tests/postgres_integrations/go-libpq/test-results/py3test/{meta.json ... results_accumulator.log} >> test_vector_index.py::TestVectorIndex::test_vector_index_prefix[table_index_2-pk_types8-all_types8-index8-Uint8] [FAIL] >> test_size_limit.py::TestS3::test_size_limit[v2-client0-500-kikimr_params2-false] [FAIL] >> test_size_limit.py::TestS3::test_size_limit[v2-client0-500-kikimr_params2-true] >> test_s3_0.py::TestS3::test_bad_request_on_invalid_parquet[v2-client0] [FAIL] >> test_s3_0.py::TestS3::test_bad_request_on_compression[v2-client0] >> test_compressions.py::TestS3Compressions::test_invalid_compression[v1-client0] [FAIL] >> test_compressions.py::TestS3Compressions::test_invalid_compression[v2-client0] >> test_format_setting.py::TestS3::test_timestamp_simple_iso[v1-timestamp/simple_iso/test.parquet-parquet] [FAIL] >> test_insert.py::TestS3::test_insert[v2-false-client0-parquet-dataset] [FAIL] >> test_insert.py::TestS3::test_insert[v2-false-client0-parquet-dataにちは% set] >> test_statistics.py::TestS3::test_convert[v1-client0-json_list-parquet] [FAIL] >> test_explicit_partitioning_1.py::TestS3::test_projection_date_type_validation[v1-false-client5-year Int64 NOT NULL-False] [FAIL] >> test_explicit_partitioning_1.py::TestS3::test_projection_date_type_validation[v1-false-client6-year Uint64-False] >> test_format_setting.py::TestS3::test_timestamp_simple_iso[v2-timestamp/simple_iso/test.csv-csv_with_names] >> test_statistics.py::TestS3::test_convert[v1-client0-json_each_row-json_list] >> test_explicit_partitioning_0.py::TestS3::test_projection_date[v2-true-client0] [FAIL] >> test_explicit_partitioning_0.py::TestS3::test_projection_validate_columns[v1-client0] >> test_s3_0.py::TestS3::test_bad_request_on_compression[v2-client0] [FAIL] >> test_s3_0.py::TestS3::test_checkpoints_on_join_s3_with_yds[v1-mvp_external_ydb_endpoint0-client0] >> test_size_limit.py::TestS3::test_size_limit[v2-client0-500-kikimr_params2-true] [FAIL] >> test_size_limit.py::TestS3::test_size_limit[v1-client0-5-kikimr_params2-false] >> test_s3_1.py::TestS3::test_i18n_unpartitioned[v2-false-directory_scan-True-client0] [FAIL] >> test_s3_1.py::TestS3::test_i18n_unpartitioned[v2-false-directory_scan-False-client0] >> test_compressions.py::TestS3Compressions::test_invalid_compression[v2-client0] [FAIL] >> test_compressions.py::TestS3Compressions::test_invalid_compression_inference[v2-client0] >> test_format_setting.py::TestS3::test_timestamp_simple_iso[v2-timestamp/simple_iso/test.csv-csv_with_names] [FAIL] >> test_explicit_partitioning_1.py::TestS3::test_projection_date_type_validation[v1-false-client6-year Uint64-False] [FAIL] >> test_explicit_partitioning_1.py::TestS3::test_projection_date_type_validation[v1-false-client7-year Uint64 NOT NULL-False] >> test_formats.py::TestS3Formats::test_simple_pg_types[v2-client0] [FAIL] >> test_format_setting.py::TestS3::test_timestamp_simple_iso[v2-timestamp/simple_iso/test.tsv-tsv_with_names] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_send_message[tables_format_v0-std] >> test_push_down.py::TestS3PushDown::test_simple_case[v2-client0] [FAIL] >> test_push_down.py::TestS3PushDown::test_simple_case[v1-client0] >> test_insert.py::TestS3::test_insert[v2-false-client0-parquet-dataにちは% set] [FAIL] >> test_formats.py::TestS3Formats::test_precompute[v1-client0] >> test_insert.py::TestS3::test_insert[v2-true-client0-json_list-dataset] >> test_statistics.py::TestS3::test_convert[v1-client0-json_each_row-json_list] [FAIL] >> test_statistics.py::TestS3::test_convert[v1-client0-json_each_row-json_each_row] >> test_explicit_partitioning_0.py::TestS3::test_projection_validate_columns[v1-client0] [FAIL] >> test_explicit_partitioning_0.py::TestS3::test_projection_validate_columns[v2-client0] >> test_size_limit.py::TestS3::test_size_limit[v1-client0-5-kikimr_params2-false] [FAIL] >> test_size_limit.py::TestS3::test_size_limit[v1-client0-5-kikimr_params2-true] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_empty_queue_url[tables_format_v0] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_read_dont_stall[tables_format_v1] >> test_vector_index.py::TestVectorIndex::test_vector_index_prefix[table_all_types_float-pk_types5-all_types5-index5-Float] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_multi_read_dont_stall[tables_format_v1] >> test_s3_1.py::TestS3::test_i18n_unpartitioned[v2-false-directory_scan-False-client0] [FAIL] >> test_explicit_partitioning_1.py::TestS3::test_projection_date_type_validation[v1-false-client7-year Uint64 NOT NULL-False] [FAIL] >> test_s3_1.py::TestS3::test_i18n_unpartitioned[v2-true-exact_file-True-client0] >> test_vector_index.py::TestVectorIndex::test_vector_index_prefix[table_index_3-pk_types7-all_types7-index7-Uint8] >> test_explicit_partitioning_1.py::TestS3::test_projection_date_type_validation[v1-false-client8-year String NOT NULL-True] >> test_vector_index.py::TestVectorIndex::test_vector_index_prefix[table_index_1-pk_types15-all_types15-index15-Int8] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_invalid_queue_url[tables_format_v0] >> test_vector_index.py::TestVectorIndex::test_vector_index[table_index_3-pk_types7-all_types7-index7-Uint8] >> test_compressions.py::TestS3Compressions::test_invalid_compression_inference[v2-client0] [FAIL] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_visibility_timeout_works[tables_format_v1] >> test_push_down.py::TestS3PushDown::test_simple_case[v1-client0] [FAIL] >> test_insert.py::TestS3::test_insert[v2-true-client0-json_list-dataset] [FAIL] >> test_insert.py::TestS3::test_insert[v2-true-client0-json_list-dataにちは% set] >> test_size_limit.py::TestS3::test_size_limit[v1-client0-5-kikimr_params2-true] [FAIL] >> test_explicit_partitioning_0.py::TestS3::test_projection_validate_columns[v2-client0] [FAIL] >> test_formats.py::TestS3Formats::test_precompute[v1-client0] [FAIL] >> test_formats.py::TestS3Formats::test_precompute[v2-client0] >> test_size_limit.py::TestS3::test_size_limit[v1-client0-100-kikimr_params2-false] >> test_format_setting.py::TestS3::test_timestamp_simple_iso[v2-timestamp/simple_iso/test.tsv-tsv_with_names] [FAIL] >> test_format_setting.py::TestS3::test_timestamp_simple_iso[v2-timestamp/simple_iso/test.json-json_each_row] >> test_vector_index.py::TestVectorIndex::test_vector_index[table_index_4-pk_types12-all_types12-index12-Int8] >> test_explicit_partitioning_0.py::TestS3::test_no_paritioning_columns[v1-false-client0] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_send_message_batch[tables_format_v1-fifo] >> test_vector_index.py::TestVectorIndex::test_vector_index_prefix[table_index_4-pk_types6-all_types6-index6-Uint8] >> test_explicit_partitioning_1.py::TestS3::test_projection_date_type_validation[v1-false-client8-year String NOT NULL-True] [FAIL] >> test_explicit_partitioning_1.py::TestS3::test_projection_date_type_validation[v1-false-client9-year String-False] >> test_statistics.py::TestS3::test_convert[v1-client0-json_each_row-json_each_row] [FAIL] >> test_statistics.py::TestS3::test_convert[v1-client0-json_each_row-csv_with_names] >> test_format_setting.py::TestS3::test_timestamp_simple_iso[v2-timestamp/simple_iso/test.json-json_each_row] [FAIL] >> test_format_setting.py::TestS3::test_timestamp_simple_iso[v2-timestamp/simple_iso/test.parquet-parquet] >> test_s3_0.py::TestS3::test_checkpoints_on_join_s3_with_yds[v1-mvp_external_ydb_endpoint0-client0] [FAIL] >> test_s3_0.py::TestS3::test_double_optional_types_validation[v2-client0] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_change_visibility_batch_works[tables_format_v1-fifo] >> test_size_limit.py::TestS3::test_size_limit[v1-client0-100-kikimr_params2-false] [FAIL] >> test_formats.py::TestS3Formats::test_precompute[v2-client0] [FAIL] >> test_formats.py::TestS3Formats::test_raw_empty_schema_query[v1-client0] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/fq/s3/py3test >> test_early_finish.py::TestEarlyFinish::test_early_finish[v1-client0] [FAIL] Test command err: library/recipes/common/__init__.py:29: ResourceWarning: unclosed file <_io.TextIOWrapper name='/home/runner/.ya/build/build_root/ciyv/002022/ydb/tests/fq/s3/test-results/py3test/testing_out_stuff/test_early_finish/testing_out_stuff/moto_server.out.log' mode='w' encoding='utf-8'> process = subprocess.Popen( ResourceWarning: Enable tracemalloc to get the object allocation traceback library/recipes/common/__init__.py:29: ResourceWarning: unclosed file <_io.TextIOWrapper name='/home/runner/.ya/build/build_root/ciyv/002022/ydb/tests/fq/s3/test-results/py3test/testing_out_stuff/test_early_finish/testing_out_stuff/moto_server.err.log' mode='w' encoding='utf-8'> process = subprocess.Popen( ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/subprocess.py:1129: ResourceWarning: subprocess 595562 is still running ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback >> test_explicit_partitioning_0.py::TestS3::test_no_paritioning_columns[v1-false-client0] [FAIL] >> test_insert.py::TestS3::test_insert[v2-true-client0-json_list-dataにちは% set] [FAIL] |81.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/datashard/vector_index/py3test >> test_vector_index.py::TestVectorIndex::test_vector_index[table_all_types-pk_types11-all_types11-index11-Uint8] [FAIL] >> test_insert.py::TestS3::test_insert[v2-true-client0-json_each_row-dataset] >> test_size_limit.py::TestS3::test_size_limit[v1-client0-100-kikimr_params2-true] >> test_explicit_partitioning_0.py::TestS3::test_no_paritioning_columns[v1-true-client0] >> test_explicit_partitioning_1.py::TestS3::test_projection_date_type_validation[v1-false-client9-year String-False] [FAIL] >> test_explicit_partitioning_1.py::TestS3::test_projection_date_type_validation[v1-false-client10-year Utf8-False] >> test_statistics.py::TestS3::test_convert[v1-client0-json_each_row-csv_with_names] [FAIL] >> test_statistics.py::TestS3::test_convert[v1-client0-json_each_row-parquet] >> test_public_api.py::TestAttributes::test_create_table >> test_s3_1.py::TestS3::test_i18n_unpartitioned[v2-true-exact_file-True-client0] [FAIL] >> test_s3_1.py::TestS3::test_i18n_unpartitioned[v2-true-exact_file-False-client0] >> test_formats.py::TestS3Formats::test_raw_empty_schema_query[v1-client0] [FAIL] >> test_formats.py::TestS3Formats::test_raw_empty_schema_query[v2-client0] >> test_size_limit.py::TestS3::test_size_limit[v1-client0-100-kikimr_params2-true] [FAIL] >> test_format_setting.py::TestS3::test_timestamp_simple_iso[v2-timestamp/simple_iso/test.parquet-parquet] [FAIL] >> test_format_setting.py::TestS3::test_timestamp_simple_iso_insert[v1-timestamp/simple_iso/test.csv-csv_with_names] >> test_explicit_partitioning_1.py::TestS3::test_projection_date_type_validation[v1-false-client10-year Utf8-False] [FAIL] >> test_explicit_partitioning_1.py::TestS3::test_projection_date_type_validation[v1-false-client11-year Utf8 NOT NULL-True] >> test_size_limit.py::TestS3::test_size_limit[v1-client0-500-kikimr_params2-false] >> test_insert.py::TestS3::test_insert[v2-true-client0-json_each_row-dataset] [FAIL] >> test_insert.py::TestS3::test_insert[v2-true-client0-json_each_row-dataにちは% set] >> test_explicit_partitioning_0.py::TestS3::test_no_paritioning_columns[v1-true-client0] [FAIL] |81.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/datashard/vector_index/py3test >> test_vector_index.py::TestVectorIndex::test_vector_index_prefix[table_index_2-pk_types14-all_types14-index14-Int8] [FAIL] >> test_explicit_partitioning_0.py::TestS3::test_no_paritioning_columns[v2-false-client0] |81.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/datashard/vector_index/py3test >> test_vector_index.py::TestVectorIndex::test_vector_index[table_index_2_float-pk_types2-all_types2-index2-Float] [FAIL] >> test_s3_1.py::TestS3::test_i18n_unpartitioned[v2-true-exact_file-False-client0] [FAIL] >> test_s3_1.py::TestS3::test_i18n_unpartitioned[v2-true-directory_scan-True-client0] >> test_size_limit.py::TestS3::test_size_limit[v1-client0-500-kikimr_params2-false] [FAIL] >> test_formats.py::TestS3Formats::test_raw_empty_schema_query[v2-client0] [FAIL] >> test_formats.py::TestS3Formats::test_with_infer_and_unsupported_option[v2-client0] |81.0%| [TA] $(B)/ydb/tests/functional/blobstorage/test-results/py3test/{meta.json ... results_accumulator.log} >> test_size_limit.py::TestS3::test_size_limit[v1-client0-500-kikimr_params2-true] >> test_s3_0.py::TestS3::test_double_optional_types_validation[v2-client0] [FAIL] >> test_s3_0.py::TestS3::test_json_list_validation[v1-client0] >> test_explicit_partitioning_1.py::TestS3::test_projection_date_type_validation[v1-false-client11-year Utf8 NOT NULL-True] [FAIL] >> test_explicit_partitioning_1.py::TestS3::test_projection_date_type_validation[v1-false-client12-year Date-False] |81.0%| [TA] {RESULT} $(B)/ydb/tests/functional/blobstorage/test-results/py3test/{meta.json ... results_accumulator.log} >> test_insert.py::TestS3::test_insert[v2-true-client0-json_each_row-dataにちは% set] [FAIL] >> test_insert.py::TestS3::test_insert[v2-true-client0-csv_with_names-dataset] >> test_explicit_partitioning_0.py::TestS3::test_no_paritioning_columns[v2-false-client0] [FAIL] >> test_explicit_partitioning_0.py::TestS3::test_no_paritioning_columns[v2-true-client0] >> test_format_setting.py::TestS3::test_timestamp_simple_iso_insert[v1-timestamp/simple_iso/test.csv-csv_with_names] [FAIL] >> test_format_setting.py::TestS3::test_timestamp_simple_iso_insert[v1-timestamp/simple_iso/test.tsv-tsv_with_names] >> test_size_limit.py::TestS3::test_size_limit[v1-client0-500-kikimr_params2-true] [FAIL] >> test_s3_1.py::TestS3::test_i18n_unpartitioned[v2-true-directory_scan-True-client0] [FAIL] >> test_s3_0.py::TestS3::test_json_list_validation[v1-client0] [FAIL] >> test_s3_0.py::TestS3::test_json_list_validation[v2-client0] >> test_s3_1.py::TestS3::test_i18n_unpartitioned[v2-true-directory_scan-False-client0] >> test_statistics.py::TestS3::test_convert[v1-client0-json_each_row-parquet] [FAIL] >> test_statistics.py::TestS3::test_convert[v1-client0-csv_with_names-json_list] >> test_formats.py::TestS3Formats::test_with_infer_and_unsupported_option[v2-client0] [FAIL] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_empty_queue_url[tables_format_v0] [FAIL] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_empty_queue_url[tables_format_v1] >> test_insert.py::TestS3::test_insert[v2-true-client0-csv_with_names-dataset] [FAIL] >> test_insert.py::TestS3::test_insert[v2-true-client0-csv_with_names-dataにちは% set] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_empty_queue_url[tables_format_v1] [FAIL] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_visibility_timeout_works[tables_format_v1] [FAIL] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_wrong_attribute_name[tables_format_v0] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_get_queue_attributes_only_attributes_table[tables_format_v0-fifo] >> test_s3_0.py::TestS3::test_json_list_validation[v2-client0] [FAIL] >> test_s3_0.py::TestS3::test_schema_validation[v1-client0] >> test_explicit_partitioning_0.py::TestS3::test_no_paritioning_columns[v2-true-client0] [FAIL] >> test_explicit_partitioning_0.py::TestS3::test_projection_integer_type_validation[v1-false-client0-year Int32 NOT NULL-True] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_invalid_queue_url[tables_format_v0] [FAIL] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_invalid_queue_url[tables_format_v1] >> test_explicit_partitioning_1.py::TestS3::test_projection_date_type_validation[v1-false-client12-year Date-False] [FAIL] >> test_explicit_partitioning_1.py::TestS3::test_projection_date_type_validation[v1-false-client13-year Date NOT NULL-True] >> test_statistics.py::TestS3::test_convert[v1-client0-csv_with_names-json_list] [FAIL] >> test_statistics.py::TestS3::test_convert[v1-client0-csv_with_names-json_each_row] >> test_vector_index.py::TestVectorIndex::test_vector_index_prefix[table_index_4_float-pk_types0-all_types0-index0-Float] >> test_format_setting.py::TestS3::test_timestamp_simple_iso_insert[v1-timestamp/simple_iso/test.tsv-tsv_with_names] [FAIL] >> test_format_setting.py::TestS3::test_timestamp_simple_iso_insert[v1-timestamp/simple_iso/test.json-json_each_row] >> test_s3_1.py::TestS3::test_i18n_unpartitioned[v2-true-directory_scan-False-client0] [FAIL] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_wrong_attribute_name[tables_format_v0] [FAIL] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_wrong_attribute_name[tables_format_v1] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_get_queue_attributes_only_attributes_table[tables_format_v0-fifo] [FAIL] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/fq/s3/py3test >> test_push_down.py::TestS3PushDown::test_simple_case[v1-client0] [FAIL] Test command err: library/recipes/common/__init__.py:29: ResourceWarning: unclosed file <_io.TextIOWrapper name='/home/runner/.ya/build/build_root/ciyv/00201c/ydb/tests/fq/s3/test-results/py3test/testing_out_stuff/test_push_down/testing_out_stuff/moto_server.out.log' mode='w' encoding='utf-8'> process = subprocess.Popen( ResourceWarning: Enable tracemalloc to get the object allocation traceback library/recipes/common/__init__.py:29: ResourceWarning: unclosed file <_io.TextIOWrapper name='/home/runner/.ya/build/build_root/ciyv/00201c/ydb/tests/fq/s3/test-results/py3test/testing_out_stuff/test_push_down/testing_out_stuff/moto_server.err.log' mode='w' encoding='utf-8'> process = subprocess.Popen( ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/subprocess.py:1129: ResourceWarning: subprocess 596428 is still running ResourceWarning: Enable tracemalloc to get the object allocation traceback >> test_s3_1.py::TestS3::test_i18n_partitioning[v1-false-hive-False-client0] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_get_queue_attributes_only_attributes_table[tables_format_v1-std] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_get_queue_attributes_only_attributes_table[tables_format_v0-std] >> test_s3_0.py::TestS3::test_schema_validation[v1-client0] [FAIL] >> test_s3_0.py::TestS3::test_schema_validation[v2-client0] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/fq/s3/py3test >> test_compressions.py::TestS3Compressions::test_invalid_compression_inference[v2-client0] [FAIL] Test command err: library/recipes/common/__init__.py:29: ResourceWarning: unclosed file <_io.TextIOWrapper name='/home/runner/.ya/build/build_root/ciyv/002053/ydb/tests/fq/s3/test-results/py3test/testing_out_stuff/test_compressions/testing_out_stuff/moto_server.out.log' mode='w' encoding='utf-8'> process = subprocess.Popen( ResourceWarning: Enable tracemalloc to get the object allocation traceback library/recipes/common/__init__.py:29: ResourceWarning: unclosed file <_io.TextIOWrapper name='/home/runner/.ya/build/build_root/ciyv/002053/ydb/tests/fq/s3/test-results/py3test/testing_out_stuff/test_compressions/testing_out_stuff/moto_server.err.log' mode='w' encoding='utf-8'> process = subprocess.Popen( ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/subprocess.py:1129: ResourceWarning: subprocess 582171 is still running ResourceWarning: Enable tracemalloc to get the object allocation traceback >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_invalid_queue_url[tables_format_v1] [FAIL] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_list_queues_of_nonexistent_user[tables_format_v0] >> test_insert.py::TestS3::test_insert[v2-true-client0-csv_with_names-dataにちは% set] [FAIL] >> test_insert.py::TestS3::test_insert[v2-true-client0-parquet-dataset] |81.0%| [TA] $(B)/ydb/tests/functional/suite_tests/test-results/py3test/{meta.json ... results_accumulator.log} |81.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/datashard/vector_index/py3test >> test_vector_index.py::TestVectorIndex::test_vector_index_prefix[table_index_2-pk_types8-all_types8-index8-Uint8] [FAIL] >> test_explicit_partitioning_0.py::TestS3::test_projection_integer_type_validation[v1-false-client0-year Int32 NOT NULL-True] [FAIL] >> test_explicit_partitioning_0.py::TestS3::test_projection_integer_type_validation[v1-false-client1-year Uint32 NOT NULL-True] >> test_format_setting.py::TestS3::test_timestamp_simple_iso_insert[v1-timestamp/simple_iso/test.json-json_each_row] [FAIL] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_get_queue_attributes_only_attributes_table[tables_format_v0-std] [FAIL] >> test_explicit_partitioning_1.py::TestS3::test_projection_date_type_validation[v1-false-client13-year Date NOT NULL-True] [FAIL] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_get_queue_attributes_only_attributes_table[tables_format_v1-fifo] >> test_format_setting.py::TestS3::test_timestamp_simple_iso_insert[v1-timestamp/simple_iso/test.parquet-parquet] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_wrong_attribute_name[tables_format_v1] [FAIL] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_wrong_delete_fails[tables_format_v0] >> test_explicit_partitioning_1.py::TestS3::test_projection_date_type_validation[v1-false-client14-year Datetime-False] >> test_statistics.py::TestS3::test_convert[v1-client0-csv_with_names-json_each_row] [FAIL] >> test_s3_0.py::TestS3::test_schema_validation[v2-client0] [FAIL] >> test_statistics.py::TestS3::test_convert[v1-client0-csv_with_names-csv_with_names] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_list_queues_of_nonexistent_user[tables_format_v0] [FAIL] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_list_queues_of_nonexistent_user[tables_format_v1] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_get_queue_attributes_only_attributes_table[tables_format_v1-fifo] [FAIL] >> test_s3_1.py::TestS3::test_i18n_partitioning[v1-false-hive-False-client0] [FAIL] >> test_s3_1.py::TestS3::test_i18n_partitioning[v1-false-hive-True-client0] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_read_dont_stall[tables_format_v1] [FAIL] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_wrong_delete_fails[tables_format_v0] [FAIL] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_receive_with_very_big_visibility_timeout[tables_format_v0] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_wrong_delete_fails[tables_format_v1] >> test_explicit_partitioning_1.py::TestS3::test_projection_date_type_validation[v1-false-client14-year Datetime-False] [FAIL] >> test_explicit_partitioning_1.py::TestS3::test_projection_date_type_validation[v1-false-client15-year Datetime NOT NULL-True] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_does_not_change_visibility_for_deleted_message[tables_format_v1-std] |81.0%| [TA] {RESULT} $(B)/ydb/tests/functional/suite_tests/test-results/py3test/{meta.json ... results_accumulator.log} >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_list_queues_of_nonexistent_user[tables_format_v1] [FAIL] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_multi_read_dont_stall[tables_format_v0] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_wrong_delete_fails[tables_format_v1] [FAIL] >> test_insert.py::TestS3::test_insert[v2-true-client0-parquet-dataset] [FAIL] >> test_insert.py::TestS3::test_insert[v2-true-client0-parquet-dataにちは% set] >> test_vector_index.py::TestVectorIndex::test_vector_index_prefix[table_all_types_float-pk_types5-all_types5-index5-Float] [FAIL] >> test_format_setting.py::TestS3::test_timestamp_simple_iso_insert[v1-timestamp/simple_iso/test.parquet-parquet] [FAIL] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_receive_with_very_big_visibility_timeout[tables_format_v0] [FAIL] >> test_statistics.py::TestS3::test_convert[v1-client0-csv_with_names-csv_with_names] [FAIL] >> test_statistics.py::TestS3::test_convert[v1-client0-csv_with_names-parquet] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_receive_with_very_big_visibility_timeout[tables_format_v1] >> test_format_setting.py::TestS3::test_timestamp_simple_iso_insert[v2-timestamp/simple_iso/test.csv-csv_with_names] >> test_s3_1.py::TestS3::test_i18n_partitioning[v1-false-hive-True-client0] [FAIL] >> test_s3_1.py::TestS3::test_i18n_partitioning[v1-false-projection-False-client0] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_change_visibility_batch_works[tables_format_v1-fifo] [FAIL] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_multi_read_dont_stall[tables_format_v0] [FAIL] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/messaging/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_get_queue_attributes_only_attributes_table[tables_format_v1-fifo] [FAIL] Test command err: sys:1: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/ciyv/002011/ydb/tests/functional/sqs/messaging/test-results/py3test/testing_out_stuff/chunk24/testing_out_stuff/test_generic_messaging.py.TestSqsGenericMessagingWithPath.test_empty_queue_url.tables_format_v0/cluster/node_1/stdout'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/ciyv/002011/ydb/tests/functional/sqs/messaging/test-results/py3test/testing_out_stuff/chunk24/testing_out_stuff/test_generic_messaging.py.TestSqsGenericMessagingWithPath.test_empty_queue_url.tables_format_v0/cluster/node_1/stderr'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedRandom name='/home/runner/.ya/build/build_root/ciyv/002011/ydb/tests/functional/sqs/messaging/test-results/py3test/testing_out_stuff/chunk24/testing_out_stuff/test_generic_messaging.py.TestSqsGenericMessagingWithPath.test_empty_queue_url.tables_format_v0/cluster/node_1/logfile_yb9p5_ut.log'> ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/subprocess.py:1129: ResourceWarning: subprocess 597226 is still running ResourceWarning: Enable tracemalloc to get the object allocation traceback >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_change_visibility_batch_works[tables_format_v1-std] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_multi_read_dont_stall[tables_format_v1] [FAIL] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_partial_delete_works[tables_format_v0] >> test_explicit_partitioning_0.py::TestS3::test_projection_integer_type_validation[v1-false-client1-year Uint32 NOT NULL-True] [FAIL] >> test_explicit_partitioning_0.py::TestS3::test_projection_integer_type_validation[v1-false-client2-year Uint64 NOT NULL-True] >> test_public_api.py::TestAttributes::test_create_table [GOOD] >> test_public_api.py::TestAttributes::test_copy_table >> test_public_metrics.py::TestPublicMetrics::test_select_limit[v1] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/messaging/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_wrong_delete_fails[tables_format_v1] [FAIL] Test command err: sys:1: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/ciyv/002006/ydb/tests/functional/sqs/messaging/test-results/py3test/testing_out_stuff/chunk35/testing_out_stuff/test_generic_messaging.py.TestSqsGenericMessagingWithPath.test_visibility_timeout_works.tables_format_v1/cluster/node_1/stdout'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/ciyv/002006/ydb/tests/functional/sqs/messaging/test-results/py3test/testing_out_stuff/chunk35/testing_out_stuff/test_generic_messaging.py.TestSqsGenericMessagingWithPath.test_visibility_timeout_works.tables_format_v1/cluster/node_1/stderr'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedRandom name='/home/runner/.ya/build/build_root/ciyv/002006/ydb/tests/functional/sqs/messaging/test-results/py3test/testing_out_stuff/chunk35/testing_out_stuff/test_generic_messaging.py.TestSqsGenericMessagingWithPath.test_visibility_timeout_works.tables_format_v1/cluster/node_1/logfile_tosyjjlw.log'> ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/subprocess.py:1129: ResourceWarning: subprocess 597828 is still running ResourceWarning: Enable tracemalloc to get the object allocation traceback >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_send_message_batch[tables_format_v1-fifo] [FAIL] >> test_row_dispatcher.py::TestPqRowDispatcher::test_filter_missing_fields >> test_select_1.py::TestSelect1::test_select_10_p_19_plus_1[v1] >> test_insert.py::TestS3::test_insert[v2-true-client0-parquet-dataにちは% set] [FAIL] >> test_insert.py::TestS3::test_big_json_list_insert[v1-client0] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_send_message_batch[tables_format_v1-std] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_partial_delete_works[tables_format_v0] [FAIL] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_change_visibility_batch_works[tables_format_v1-std] [FAIL] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_change_visibility_to_zero_works[tables_format_v0-fifo] >> test_public_api.py::TestAttributes::test_copy_table [GOOD] >> test_public_api.py::TestAttributes::test_create_indexed_table >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_partial_delete_works[tables_format_v1] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_receive_with_very_big_visibility_timeout[tables_format_v1] [FAIL] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_send_and_read_message[tables_format_v0-fifo] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_send_message[tables_format_v0-std] [FAIL] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_send_message[tables_format_v1-fifo] >> test_explicit_partitioning_1.py::TestS3::test_projection_date_type_validation[v1-false-client15-year Datetime NOT NULL-True] [FAIL] >> test_explicit_partitioning_1.py::TestS3::test_projection_date_type_validation[v1-true-client0-year Int32-False] >> test_public_api.py::TestAttributes::test_create_indexed_table [GOOD] >> test_public_api.py::TestAttributes::test_alter_table >> test_s3_1.py::TestS3::test_i18n_partitioning[v1-false-projection-False-client0] [FAIL] >> test_s3_1.py::TestS3::test_i18n_partitioning[v1-false-projection-True-client0] >> test_format_setting.py::TestS3::test_timestamp_simple_iso_insert[v2-timestamp/simple_iso/test.csv-csv_with_names] [FAIL] >> test_format_setting.py::TestS3::test_timestamp_simple_iso_insert[v2-timestamp/simple_iso/test.tsv-tsv_with_names] >> test_public_api.py::TestAttributes::test_alter_table [GOOD] >> test_public_api.py::TestAttributes::test_limits[attributes0] [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/messaging/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_multi_read_dont_stall[tables_format_v0] [FAIL] Test command err: sys:1: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/ciyv/00200b/ydb/tests/functional/sqs/messaging/test-results/py3test/testing_out_stuff/chunk26/testing_out_stuff/test_generic_messaging.py.TestSqsGenericMessagingWithPath.test_invalid_queue_url.tables_format_v0/cluster/node_1/stdout'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/ciyv/00200b/ydb/tests/functional/sqs/messaging/test-results/py3test/testing_out_stuff/chunk26/testing_out_stuff/test_generic_messaging.py.TestSqsGenericMessagingWithPath.test_invalid_queue_url.tables_format_v0/cluster/node_1/stderr'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedRandom name='/home/runner/.ya/build/build_root/ciyv/00200b/ydb/tests/functional/sqs/messaging/test-results/py3test/testing_out_stuff/chunk26/testing_out_stuff/test_generic_messaging.py.TestSqsGenericMessagingWithPath.test_invalid_queue_url.tables_format_v0/cluster/node_1/logfile_5__1lwiw.log'> ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/subprocess.py:1129: ResourceWarning: subprocess 597718 is still running ResourceWarning: Enable tracemalloc to get the object allocation traceback >> test_statistics.py::TestS3::test_convert[v1-client0-csv_with_names-parquet] [FAIL] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_send_message_batch[tables_format_v1-std] [FAIL] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_send_to_nonexistent_queue[tables_format_v0] >> test_public_api.py::TestAttributes::test_limits[attributes1] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_send_and_read_message[tables_format_v0-fifo] [FAIL] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_send_and_read_message[tables_format_v0-std] >> test_yq_streaming.py::TestYqStreaming::test_early_finish_case2[v1] >> test_statistics.py::TestS3::test_convert[v1-client0-parquet-json_list] >> test_vector_index.py::TestVectorIndex::test_vector_index[table_all_types-pk_types17-all_types17-index17-Int8] >> test_public_api.py::TestAttributes::test_limits[attributes2] [GOOD] >> test_public_api.py::TestAttributes::test_limits[attributes3] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_send_message[tables_format_v1-fifo] [FAIL] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_send_message[tables_format_v1-std] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_change_visibility_to_zero_works[tables_format_v0-fifo] [FAIL] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_change_visibility_to_zero_works[tables_format_v0-std] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_partial_delete_works[tables_format_v1] [FAIL] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_queue_attributes[tables_format_v0-fifo] >> test_public_api.py::TestAttributes::test_limits[attributes3] [GOOD] >> test_public_api.py::TestAttributes::test_limits[attributes4] >> test_public_api.py::TestAttributes::test_limits[attributes4] [GOOD] >> test_explicit_partitioning_1.py::TestS3::test_projection_date_type_validation[v1-true-client0-year Int32-False] [FAIL] >> test_explicit_partitioning_1.py::TestS3::test_projection_date_type_validation[v1-true-client1-year Int32 NOT NULL-False] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/fq/s3/py3test >> test_size_limit.py::TestS3::test_size_limit[v1-client0-500-kikimr_params2-true] [FAIL] Test command err: library/recipes/common/__init__.py:29: ResourceWarning: unclosed file <_io.TextIOWrapper name='/home/runner/.ya/build/build_root/ciyv/002038/ydb/tests/fq/s3/test-results/py3test/testing_out_stuff/test_size_limit/testing_out_stuff/moto_server.out.log' mode='w' encoding='utf-8'> process = subprocess.Popen( ResourceWarning: Enable tracemalloc to get the object allocation traceback library/recipes/common/__init__.py:29: ResourceWarning: unclosed file <_io.TextIOWrapper name='/home/runner/.ya/build/build_root/ciyv/002038/ydb/tests/fq/s3/test-results/py3test/testing_out_stuff/test_size_limit/testing_out_stuff/moto_server.err.log' mode='w' encoding='utf-8'> process = subprocess.Popen( ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback >> test_explicit_partitioning_0.py::TestS3::test_projection_integer_type_validation[v1-false-client2-year Uint64 NOT NULL-True] [FAIL] >> test_explicit_partitioning_0.py::TestS3::test_projection_integer_type_validation[v1-false-client3-year Date NOT NULL-False] >> test_insert.py::TestS3::test_big_json_list_insert[v1-client0] [FAIL] >> test_insert.py::TestS3::test_big_json_list_insert[v2-client0] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_send_and_read_message[tables_format_v0-std] [FAIL] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_send_to_nonexistent_queue[tables_format_v0] [FAIL] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_send_to_nonexistent_queue[tables_format_v1] >> test_s3_1.py::TestS3::test_i18n_partitioning[v1-false-projection-True-client0] [FAIL] >> test_s3_1.py::TestS3::test_i18n_partitioning[v1-true-hive-False-client0] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_change_visibility_to_zero_works[tables_format_v0-std] [FAIL] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_change_visibility_to_zero_works[tables_format_v1-fifo] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_queue_attributes[tables_format_v0-fifo] [FAIL] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_queue_attributes[tables_format_v0-std] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_create_q_twice[tables_format_v0-fifo] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_send_message[tables_format_v1-std] [FAIL] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_send_message_batch[tables_format_v0-fifo] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/fq/s3/py3test >> test_formats.py::TestS3Formats::test_with_infer_and_unsupported_option[v2-client0] [FAIL] Test command err: library/recipes/common/__init__.py:29: ResourceWarning: unclosed file <_io.TextIOWrapper name='/home/runner/.ya/build/build_root/ciyv/002066/ydb/tests/fq/s3/test-results/py3test/testing_out_stuff/test_formats/testing_out_stuff/moto_server.out.log' mode='w' encoding='utf-8'> process = subprocess.Popen( ResourceWarning: Enable tracemalloc to get the object allocation traceback library/recipes/common/__init__.py:29: ResourceWarning: unclosed file <_io.TextIOWrapper name='/home/runner/.ya/build/build_root/ciyv/002066/ydb/tests/fq/s3/test-results/py3test/testing_out_stuff/test_formats/testing_out_stuff/moto_server.err.log' mode='w' encoding='utf-8'> process = subprocess.Popen( ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback >> test_format_setting.py::TestS3::test_timestamp_simple_iso_insert[v2-timestamp/simple_iso/test.tsv-tsv_with_names] [FAIL] >> test_format_setting.py::TestS3::test_timestamp_simple_iso_insert[v2-timestamp/simple_iso/test.json-json_each_row] >> test_statistics.py::TestS3::test_convert[v1-client0-parquet-json_list] [FAIL] >> test_statistics.py::TestS3::test_convert[v1-client0-parquet-json_each_row] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_get_queue_attributes_only_attributes_table[tables_format_v1-std] [FAIL] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_get_queue_attributes_only_runtime_attributes[tables_format_v0-fifo] >> test_explicit_partitioning_1.py::TestS3::test_projection_date_type_validation[v1-true-client1-year Int32 NOT NULL-False] [FAIL] >> test_explicit_partitioning_1.py::TestS3::test_projection_date_type_validation[v1-true-client2-year Uint32-False] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_send_to_nonexistent_queue[tables_format_v1] [FAIL] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_set_very_big_visibility_timeout[tables_format_v0] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_change_visibility_to_zero_works[tables_format_v1-fifo] [FAIL] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_queue_attributes[tables_format_v0-std] [FAIL] >> test_vector_index.py::TestVectorIndex::test_vector_index_prefix[table_index_1-pk_types15-all_types15-index15-Int8] [FAIL] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_send_message_batch[tables_format_v0-fifo] [FAIL] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_send_message_batch[tables_format_v0-std] >> test_public_metrics.py::TestPublicMetrics::test_select_limit[v1] [FAIL] >> test_public_metrics.py::TestPublicMetrics::test_select_unlimited[v1-STATS_MODE_FULL] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_get_queue_attributes_only_runtime_attributes[tables_format_v0-fifo] [FAIL] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/messaging/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_send_and_read_message[tables_format_v0-std] [FAIL] Test command err: sys:1: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/ciyv/002010/ydb/tests/functional/sqs/messaging/test-results/py3test/testing_out_stuff/chunk51/testing_out_stuff/test_generic_messaging.py.TestSqsGenericMessagingWithTenant.test_read_dont_stall.tables_format_v1/cluster/node_1/stdout'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/ciyv/002010/ydb/tests/functional/sqs/messaging/test-results/py3test/testing_out_stuff/chunk51/testing_out_stuff/test_generic_messaging.py.TestSqsGenericMessagingWithTenant.test_read_dont_stall.tables_format_v1/cluster/node_1/stderr'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedRandom name='/home/runner/.ya/build/build_root/ciyv/002010/ydb/tests/functional/sqs/messaging/test-results/py3test/testing_out_stuff/chunk51/testing_out_stuff/test_generic_messaging.py.TestSqsGenericMessagingWithTenant.test_read_dont_stall.tables_format_v1/cluster/node_1/logfile_6k4eal41.log'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/ciyv/002010/ydb/tests/functional/sqs/messaging/test-results/py3test/testing_out_stuff/chunk51/testing_out_stuff/test_generic_messaging.py.TestSqsGenericMessagingWithTenant.test_read_dont_stall.tables_format_v1/cluster/slot_1/stdout'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/ciyv/002010/ydb/tests/functional/sqs/messaging/test-results/py3test/testing_out_stuff/chunk51/testing_out_stuff/test_generic_messaging.py.TestSqsGenericMessagingWithTenant.test_read_dont_stall.tables_format_v1/cluster/slot_1/stderr'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedRandom name='/home/runner/.ya/build/build_root/ciyv/002010/ydb/tests/functional/sqs/messaging/test-results/py3test/testing_out_stuff/chunk51/testing_out_stuff/test_generic_messaging.py.TestSqsGenericMessagingWithTenant.test_read_dont_stall.tables_format_v1/cluster/slot_1/logfile_7ad4u3ru.log'> ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/subprocess.py:1129: ResourceWarning: subprocess 596987 is still running ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/subprocess.py:1129: ResourceWarning: subprocess 600910 is still running ResourceWarning: Enable tracemalloc to get the object allocation traceback |81.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/olap/scenario/py3test >> test_alter_compression.py::TestAlterCompression::test_multi[alter_compression] [GOOD] >> test_insert.py::TestS3::test_big_json_list_insert[v2-client0] [FAIL] >> test_s3_1.py::TestS3::test_i18n_partitioning[v1-true-hive-False-client0] [FAIL] >> test_s3_1.py::TestS3::test_i18n_partitioning[v1-true-hive-True-client0] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_get_queue_attributes_only_runtime_attributes[tables_format_v0-std] >> test_select_1.py::TestSelect1::test_select_10_p_19_plus_1[v1] [FAIL] >> test_select_1.py::TestSelect1::test_select_10_p_19_plus_1[v2] >> test_insert.py::TestS3::test_insert_csv_delimiter[v1-client0] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_set_very_big_visibility_timeout[tables_format_v0] [FAIL] >> test_row_dispatcher.py::TestPqRowDispatcher::test_filter_missing_fields [FAIL] >> test_row_dispatcher.py::TestPqRowDispatcher::test_filter_use_unsupported_predicate >> test_vector_index.py::TestVectorIndex::test_vector_index[table_index_4-pk_types12-all_types12-index12-Int8] [FAIL] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_send_message_batch[tables_format_v0-std] [FAIL] |81.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/olap/py3test >> test_log_scenario.py::TestLogScenario::test_log_uniform [GOOD] >> test_explicit_partitioning_0.py::TestS3::test_projection_integer_type_validation[v1-false-client3-year Date NOT NULL-False] [FAIL] >> test_explicit_partitioning_0.py::TestS3::test_projection_integer_type_validation[v1-false-client4-year String NOT NULL-True] >> test_format_setting.py::TestS3::test_timestamp_simple_iso_insert[v2-timestamp/simple_iso/test.json-json_each_row] [FAIL] >> test_format_setting.py::TestS3::test_timestamp_simple_iso_insert[v2-timestamp/simple_iso/test.parquet-parquet] >> test_explicit_partitioning_1.py::TestS3::test_projection_date_type_validation[v1-true-client2-year Uint32-False] [FAIL] >> test_explicit_partitioning_1.py::TestS3::test_projection_date_type_validation[v1-true-client3-year Uint32 NOT NULL-True] >> test_vector_index.py::TestVectorIndex::test_vector_index[table_index_3-pk_types7-all_types7-index7-Uint8] [FAIL] |81.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/datashard/vector_index/py3test >> test_vector_index.py::TestVectorIndex::test_vector_index_prefix[table_all_types_float-pk_types5-all_types5-index5-Float] [FAIL] >> test_vector_index.py::TestVectorIndex::test_vector_index_prefix[table_index_3-pk_types7-all_types7-index7-Uint8] [FAIL] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_get_queue_attributes_only_runtime_attributes[tables_format_v0-std] [FAIL] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/messaging/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_queue_attributes[tables_format_v0-std] [FAIL] Test command err: sys:1: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/ciyv/002008/ydb/tests/functional/sqs/messaging/test-results/py3test/testing_out_stuff/chunk49/testing_out_stuff/test_generic_messaging.py.TestSqsGenericMessagingWithTenant.test_multi_read_dont_stall.tables_format_v1/cluster/node_1/stdout'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/ciyv/002008/ydb/tests/functional/sqs/messaging/test-results/py3test/testing_out_stuff/chunk49/testing_out_stuff/test_generic_messaging.py.TestSqsGenericMessagingWithTenant.test_multi_read_dont_stall.tables_format_v1/cluster/node_1/stderr'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedRandom name='/home/runner/.ya/build/build_root/ciyv/002008/ydb/tests/functional/sqs/messaging/test-results/py3test/testing_out_stuff/chunk49/testing_out_stuff/test_generic_messaging.py.TestSqsGenericMessagingWithTenant.test_multi_read_dont_stall.tables_format_v1/cluster/node_1/logfile_o8sv5ysy.log'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/ciyv/002008/ydb/tests/functional/sqs/messaging/test-results/py3test/testing_out_stuff/chunk49/testing_out_stuff/test_generic_messaging.py.TestSqsGenericMessagingWithTenant.test_multi_read_dont_stall.tables_format_v1/cluster/slot_1/stdout'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/ciyv/002008/ydb/tests/functional/sqs/messaging/test-results/py3test/testing_out_stuff/chunk49/testing_out_stuff/test_generic_messaging.py.TestSqsGenericMessagingWithTenant.test_multi_read_dont_stall.tables_format_v1/cluster/slot_1/stderr'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedRandom name='/home/runner/.ya/build/build_root/ciyv/002008/ydb/tests/functional/sqs/messaging/test-results/py3test/testing_out_stuff/chunk49/testing_out_stuff/test_generic_messaging.py.TestSqsGenericMessagingWithTenant.test_multi_read_dont_stall.tables_format_v1/cluster/slot_1/logfile_40s2cbgw.log'> ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/subprocess.py:1129: ResourceWarning: subprocess 597384 is still running ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/subprocess.py:1129: ResourceWarning: subprocess 601683 is still running ResourceWarning: Enable tracemalloc to get the object allocation traceback ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/messaging/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_change_visibility_to_zero_works[tables_format_v1-fifo] [FAIL] Test command err: sys:1: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/ciyv/002000/ydb/tests/functional/sqs/messaging/test-results/py3test/testing_out_stuff/chunk38/testing_out_stuff/test_generic_messaging.py.TestSqsGenericMessagingWithTenant.test_change_visibility_batch_works.tables_format_v1-fifo/cluster/node_1/stdout'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/ciyv/002000/ydb/tests/functional/sqs/messaging/test-results/py3test/testing_out_stuff/chunk38/testing_out_stuff/test_generic_messaging.py.TestSqsGenericMessagingWithTenant.test_change_visibility_batch_works.tables_format_v1-fifo/cluster/node_1/stderr'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedRandom name='/home/runner/.ya/build/build_root/ciyv/002000/ydb/tests/functional/sqs/messaging/test-results/py3test/testing_out_stuff/chunk38/testing_out_stuff/test_generic_messaging.py.TestSqsGenericMessagingWithTenant.test_change_visibility_batch_works.tables_format_v1-fifo/cluster/node_1/logfile_i6zcmrmk.log'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/ciyv/002000/ydb/tests/functional/sqs/messaging/test-results/py3test/testing_out_stuff/chunk38/testing_out_stuff/test_generic_messaging.py.TestSqsGenericMessagingWithTenant.test_change_visibility_batch_works.tables_format_v1-fifo/cluster/slot_1/stdout'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/ciyv/002000/ydb/tests/functional/sqs/messaging/test-results/py3test/testing_out_stuff/chunk38/testing_out_stuff/test_generic_messaging.py.TestSqsGenericMessagingWithTenant.test_change_visibility_batch_works.tables_format_v1-fifo/cluster/slot_1/stderr'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedRandom name='/home/runner/.ya/build/build_root/ciyv/002000/ydb/tests/functional/sqs/messaging/test-results/py3test/testing_out_stuff/chunk38/testing_out_stuff/test_generic_messaging.py.TestSqsGenericMessagingWithTenant.test_change_visibility_batch_works.tables_format_v1-fifo/cluster/slot_1/logfile_xwl_lb7p.log'> ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/subprocess.py:1129: ResourceWarning: subprocess 598772 is still running ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/subprocess.py:1129: ResourceWarning: subprocess 602169 is still running ResourceWarning: Enable tracemalloc to get the object allocation traceback >> test_statistics.py::TestS3::test_convert[v1-client0-parquet-json_each_row] [FAIL] >> test_statistics.py::TestS3::test_convert[v1-client0-parquet-csv_with_names] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_get_queue_attributes_only_runtime_attributes[tables_format_v1-fifo] >> test_insert.py::TestS3::test_insert_csv_delimiter[v1-client0] [FAIL] >> test_yq_streaming.py::TestYqStreaming::test_early_finish_case2[v1] [FAIL] >> test_yq_streaming.py::TestYqStreaming::test_early_finish_case3[v1] >> test_s3_1.py::TestS3::test_i18n_partitioning[v1-true-hive-True-client0] [FAIL] >> test_s3_1.py::TestS3::test_i18n_partitioning[v1-true-projection-False-client0] >> test_insert.py::TestS3::test_insert_csv_delimiter[v2-client0] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/messaging/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_set_very_big_visibility_timeout[tables_format_v0] [FAIL] Test command err: sys:1: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/ciyv/002003/ydb/tests/functional/sqs/messaging/test-results/py3test/testing_out_stuff/chunk54/testing_out_stuff/test_generic_messaging.py.TestSqsGenericMessagingWithTenant.test_send_message_batch.tables_format_v1-fifo/cluster/node_1/stdout'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/ciyv/002003/ydb/tests/functional/sqs/messaging/test-results/py3test/testing_out_stuff/chunk54/testing_out_stuff/test_generic_messaging.py.TestSqsGenericMessagingWithTenant.test_send_message_batch.tables_format_v1-fifo/cluster/node_1/stderr'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedRandom name='/home/runner/.ya/build/build_root/ciyv/002003/ydb/tests/functional/sqs/messaging/test-results/py3test/testing_out_stuff/chunk54/testing_out_stuff/test_generic_messaging.py.TestSqsGenericMessagingWithTenant.test_send_message_batch.tables_format_v1-fifo/cluster/node_1/logfile_5w6g8rs0.log'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/ciyv/002003/ydb/tests/functional/sqs/messaging/test-results/py3test/testing_out_stuff/chunk54/testing_out_stuff/test_generic_messaging.py.TestSqsGenericMessagingWithTenant.test_send_message_batch.tables_format_v1-fifo/cluster/slot_1/stdout'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/ciyv/002003/ydb/tests/functional/sqs/messaging/test-results/py3test/testing_out_stuff/chunk54/testing_out_stuff/test_generic_messaging.py.TestSqsGenericMessagingWithTenant.test_send_message_batch.tables_format_v1-fifo/cluster/slot_1/stderr'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedRandom name='/home/runner/.ya/build/build_root/ciyv/002003/ydb/tests/functional/sqs/messaging/test-results/py3test/testing_out_stuff/chunk54/testing_out_stuff/test_generic_messaging.py.TestSqsGenericMessagingWithTenant.test_send_message_batch.tables_format_v1-fifo/cluster/slot_1/logfile_hcxx773l.log'> ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/subprocess.py:1129: ResourceWarning: subprocess 597949 is still running ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/subprocess.py:1129: ResourceWarning: subprocess 602231 is still running ResourceWarning: Enable tracemalloc to get the object allocation traceback ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/fq/s3/py3test >> test_s3_0.py::TestS3::test_schema_validation[v2-client0] [FAIL] Test command err: library/recipes/common/__init__.py:29: ResourceWarning: unclosed file <_io.TextIOWrapper name='/home/runner/.ya/build/build_root/ciyv/002032/ydb/tests/fq/s3/test-results/py3test/testing_out_stuff/test_s3_0/testing_out_stuff/moto_server.out.log' mode='w' encoding='utf-8'> process = subprocess.Popen( ResourceWarning: Enable tracemalloc to get the object allocation traceback library/recipes/common/__init__.py:29: ResourceWarning: unclosed file <_io.TextIOWrapper name='/home/runner/.ya/build/build_root/ciyv/002032/ydb/tests/fq/s3/test-results/py3test/testing_out_stuff/test_s3_0/testing_out_stuff/moto_server.err.log' mode='w' encoding='utf-8'> process = subprocess.Popen( ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback >> test_explicit_partitioning_1.py::TestS3::test_projection_date_type_validation[v1-true-client3-year Uint32 NOT NULL-True] [FAIL] >> test_explicit_partitioning_1.py::TestS3::test_projection_date_type_validation[v1-true-client4-year Int64-False] >> test_vector_index.py::TestVectorIndex::test_vector_index[table_index_0_float-pk_types4-all_types4-index4-Float] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/messaging/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_send_message_batch[tables_format_v0-std] [FAIL] Test command err: sys:1: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/ciyv/00200e/ydb/tests/functional/sqs/messaging/test-results/py3test/testing_out_stuff/chunk53/testing_out_stuff/test_generic_messaging.py.TestSqsGenericMessagingWithTenant.test_send_message.tables_format_v0-std/cluster/node_1/stdout'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/ciyv/00200e/ydb/tests/functional/sqs/messaging/test-results/py3test/testing_out_stuff/chunk53/testing_out_stuff/test_generic_messaging.py.TestSqsGenericMessagingWithTenant.test_send_message.tables_format_v0-std/cluster/node_1/stderr'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedRandom name='/home/runner/.ya/build/build_root/ciyv/00200e/ydb/tests/functional/sqs/messaging/test-results/py3test/testing_out_stuff/chunk53/testing_out_stuff/test_generic_messaging.py.TestSqsGenericMessagingWithTenant.test_send_message.tables_format_v0-std/cluster/node_1/logfile_ahpln6g4.log'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/ciyv/00200e/ydb/tests/functional/sqs/messaging/test-results/py3test/testing_out_stuff/chunk53/testing_out_stuff/test_generic_messaging.py.TestSqsGenericMessagingWithTenant.test_send_message.tables_format_v0-std/cluster/slot_1/stdout'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/ciyv/00200e/ydb/tests/functional/sqs/messaging/test-results/py3test/testing_out_stuff/chunk53/testing_out_stuff/test_generic_messaging.py.TestSqsGenericMessagingWithTenant.test_send_message.tables_format_v0-std/cluster/slot_1/stderr'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedRandom name='/home/runner/.ya/build/build_root/ciyv/00200e/ydb/tests/functional/sqs/messaging/test-results/py3test/testing_out_stuff/chunk53/testing_out_stuff/test_generic_messaging.py.TestSqsGenericMessagingWithTenant.test_send_message.tables_format_v0-std/cluster/slot_1/logfile_2ta_ryen.log'> ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/subprocess.py:1129: ResourceWarning: subprocess 596908 is still running ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/subprocess.py:1129: ResourceWarning: subprocess 602178 is still running ResourceWarning: Enable tracemalloc to get the object allocation traceback >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_get_queue_attributes_only_runtime_attributes[tables_format_v1-fifo] [FAIL] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_get_queue_attributes_only_runtime_attributes[tables_format_v1-std] >> test_format_setting.py::TestS3::test_timestamp_simple_iso_insert[v2-timestamp/simple_iso/test.parquet-parquet] [FAIL] >> test_format_setting.py::TestS3::test_timestamp_simple_posix[v1-common/simple_posix/test.csv-csv_with_names] >> test_explicit_partitioning_0.py::TestS3::test_projection_integer_type_validation[v1-false-client4-year String NOT NULL-True] [FAIL] >> test_explicit_partitioning_0.py::TestS3::test_projection_integer_type_validation[v1-false-client5-year String-False] >> test_public_metrics.py::TestPublicMetrics::test_select_unlimited[v1-STATS_MODE_FULL] [FAIL] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_read_dont_stall[tables_format_v1] >> test_select_1.py::TestSelect1::test_select_10_p_19_plus_1[v2] [FAIL] >> test_explicit_partitioning_1.py::TestS3::test_projection_date_type_validation[v1-true-client4-year Int64-False] [FAIL] >> test_explicit_partitioning_1.py::TestS3::test_projection_date_type_validation[v1-true-client5-year Int64 NOT NULL-False] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_get_queue_attributes_only_runtime_attributes[tables_format_v1-std] [FAIL] >> test_statistics.py::TestS3::test_convert[v1-client0-parquet-csv_with_names] [FAIL] >> test_statistics.py::TestS3::test_convert[v1-client0-parquet-parquet] >> test_s3_1.py::TestS3::test_i18n_partitioning[v1-true-projection-False-client0] [FAIL] >> test_s3_1.py::TestS3::test_i18n_partitioning[v1-true-projection-True-client0] >> test_insert.py::TestS3::test_insert_csv_delimiter[v2-client0] [FAIL] >> test_insert.py::TestS3::test_append[v1-client0] >> test_select_1.py::TestSelect1::test_select_z_x_y[v1] >> test_row_dispatcher.py::TestPqRowDispatcher::test_filter_use_unsupported_predicate [FAIL] >> test_format_setting.py::TestS3::test_timestamp_simple_posix[v1-common/simple_posix/test.csv-csv_with_names] [FAIL] >> test_format_setting.py::TestS3::test_timestamp_simple_posix[v1-common/simple_posix/test.tsv-tsv_with_names] >> test_yq_streaming.py::TestYqStreaming::test_early_finish_case3[v1] [FAIL] >> test_vector_index.py::TestVectorIndex::test_vector_index_prefix[table_index_3_float-pk_types1-all_types1-index1-Float] >> test_insert.py::TestS3::test_append[v1-client0] [FAIL] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/messaging/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_get_queue_attributes_only_runtime_attributes[tables_format_v1-std] [FAIL] Test command err: sys:1: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/ciyv/001fff/ydb/tests/functional/sqs/messaging/test-results/py3test/testing_out_stuff/chunk25/testing_out_stuff/test_generic_messaging.py.TestSqsGenericMessagingWithPath.test_get_queue_attributes_only_attributes_table.tables_format_v1-std/cluster/node_1/stdout'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/ciyv/001fff/ydb/tests/functional/sqs/messaging/test-results/py3test/testing_out_stuff/chunk25/testing_out_stuff/test_generic_messaging.py.TestSqsGenericMessagingWithPath.test_get_queue_attributes_only_attributes_table.tables_format_v1-std/cluster/node_1/stderr'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedRandom name='/home/runner/.ya/build/build_root/ciyv/001fff/ydb/tests/functional/sqs/messaging/test-results/py3test/testing_out_stuff/chunk25/testing_out_stuff/test_generic_messaging.py.TestSqsGenericMessagingWithPath.test_get_queue_attributes_only_attributes_table.tables_format_v1-std/cluster/node_1/logfile_2a7uxmz8.log'> ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/subprocess.py:1129: ResourceWarning: subprocess 602908 is still running ResourceWarning: Enable tracemalloc to get the object allocation traceback >> test_s3_1.py::TestS3::test_i18n_partitioning[v1-true-projection-True-client0] [FAIL] >> test_s3_1.py::TestS3::test_i18n_partitioning[v2-false-hive-False-client0] >> test_explicit_partitioning_1.py::TestS3::test_projection_date_type_validation[v1-true-client5-year Int64 NOT NULL-False] [FAIL] >> test_explicit_partitioning_1.py::TestS3::test_projection_date_type_validation[v1-true-client6-year Uint64-False] >> test_insert.py::TestS3::test_append[v2-client0] >> test_explicit_partitioning_0.py::TestS3::test_projection_integer_type_validation[v1-false-client5-year String-False] [FAIL] >> test_explicit_partitioning_0.py::TestS3::test_projection_integer_type_validation[v1-false-client6-year Utf8 NOT NULL-True] >> test_statistics.py::TestS3::test_convert[v1-client0-parquet-parquet] [FAIL] >> test_statistics.py::TestS3::test_precompute[v2-client0] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_does_not_change_visibility_for_deleted_message[tables_format_v1-std] [FAIL] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_create_q_twice[tables_format_v0-fifo] [FAIL] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_create_q_twice[tables_format_v0-std] >> test_vector_index.py::TestVectorIndex::test_vector_index_prefix[table_index_4-pk_types6-all_types6-index6-Uint8] [FAIL] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_does_not_change_visibility_not_in_flight[tables_format_v0-fifo] >> test_format_setting.py::TestS3::test_timestamp_simple_posix[v1-common/simple_posix/test.tsv-tsv_with_names] [FAIL] >> test_vector_index.py::TestVectorIndex::test_vector_index[table_all_types-pk_types17-all_types17-index17-Int8] [FAIL] >> test_format_setting.py::TestS3::test_timestamp_simple_posix[v1-common/simple_posix/test.json-json_each_row] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_validates_group_id[tables_format_v0] >> test_vector_index.py::TestVectorIndex::test_vector_index[table_index_2-pk_types8-all_types8-index8-Uint8] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_does_not_change_visibility_not_in_flight[tables_format_v0-fifo] [FAIL] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_does_not_change_visibility_not_in_flight[tables_format_v0-std] >> test_explicit_partitioning_1.py::TestS3::test_projection_date_type_validation[v1-true-client6-year Uint64-False] [FAIL] >> test_explicit_partitioning_1.py::TestS3::test_projection_date_type_validation[v1-true-client7-year Uint64 NOT NULL-False] >> test_insert.py::TestS3::test_append[v2-client0] [FAIL] >> test_insert.py::TestS3::test_part_split[v1-client0] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_create_q_twice[tables_format_v0-std] [FAIL] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_create_q_twice[tables_format_v1-fifo] >> test_s3_1.py::TestS3::test_i18n_partitioning[v2-false-hive-False-client0] [FAIL] >> test_explicit_partitioning_0.py::TestS3::test_projection_integer_type_validation[v1-false-client6-year Utf8 NOT NULL-True] [FAIL] >> test_explicit_partitioning_0.py::TestS3::test_projection_integer_type_validation[v1-false-client7-year Utf8-False] >> test_s3_1.py::TestS3::test_i18n_partitioning[v2-false-hive-True-client0] >> test_statistics.py::TestS3::test_precompute[v2-client0] [FAIL] >> test_statistics.py::TestS3::test_precompute[v1-client0] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_does_not_change_visibility_not_in_flight[tables_format_v0-std] [FAIL] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_does_not_change_visibility_not_in_flight[tables_format_v1-fifo] >> test_select_1.py::TestSelect1::test_select_z_x_y[v1] [FAIL] |81.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/datashard/vector_index/py3test >> test_vector_index.py::TestVectorIndex::test_vector_index[table_index_3-pk_types7-all_types7-index7-Uint8] [FAIL] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/fq/yds/py3test >> test_public_metrics.py::TestPublicMetrics::test_select_unlimited[v1-STATS_MODE_FULL] [FAIL] Test command err: sys:1: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback >> test_select_1.py::TestSelect1::test_select_z_x_y[v2] >> test_explicit_partitioning_1.py::TestS3::test_projection_date_type_validation[v1-true-client7-year Uint64 NOT NULL-False] [FAIL] >> test_explicit_partitioning_1.py::TestS3::test_projection_date_type_validation[v1-true-client8-year String NOT NULL-True] >> test_format_setting.py::TestS3::test_timestamp_simple_posix[v1-common/simple_posix/test.json-json_each_row] [FAIL] >> test_format_setting.py::TestS3::test_timestamp_simple_posix[v1-common/simple_posix/test.parquet-parquet] |81.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/datashard/vector_index/py3test >> test_vector_index.py::TestVectorIndex::test_vector_index_prefix[table_index_3-pk_types7-all_types7-index7-Uint8] [FAIL] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_create_q_twice[tables_format_v1-fifo] [FAIL] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_does_not_change_visibility_not_in_flight[tables_format_v1-fifo] [FAIL] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_does_not_change_visibility_not_in_flight[tables_format_v1-std] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_create_q_twice[tables_format_v1-std] >> test_insert.py::TestS3::test_part_split[v1-client0] [FAIL] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/fq/yds/py3test >> test_select_1.py::TestSelect1::test_select_10_p_19_plus_1[v2] [FAIL] Test command err: sys:1: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback >> test_insert.py::TestS3::test_part_split[v2-client0] >> test_vector_index.py::TestVectorIndex::test_vector_index[table_index_4_float-pk_types0-all_types0-index0-Float] >> test_s3_1.py::TestS3::test_i18n_partitioning[v2-false-hive-True-client0] [FAIL] >> test_s3_1.py::TestS3::test_i18n_partitioning[v2-false-projection-False-client0] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/fq/yds/py3test >> test_row_dispatcher.py::TestPqRowDispatcher::test_filter_use_unsupported_predicate [FAIL] Test command err: contrib/python/PyYAML/py3/yaml/scanner.py:286: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback >> test_explicit_partitioning_0.py::TestS3::test_projection_integer_type_validation[v1-false-client7-year Utf8-False] [FAIL] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_does_not_change_visibility_not_in_flight[tables_format_v1-std] [FAIL] >> test_explicit_partitioning_0.py::TestS3::test_projection_integer_type_validation[v1-false-client8-year Int32-False] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_create_q_twice[tables_format_v1-std] [FAIL] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_create_queue_by_nonexistent_user_fails[tables_format_v0] >> test_statistics.py::TestS3::test_precompute[v1-client0] [FAIL] >> test_statistics.py::TestS3::test_sum[v2-client0] >> test_explicit_partitioning_1.py::TestS3::test_projection_date_type_validation[v1-true-client8-year String NOT NULL-True] [FAIL] >> test_explicit_partitioning_1.py::TestS3::test_projection_date_type_validation[v1-true-client9-year String-False] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_create_q_twice[tables_format_v0-fifo] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_read_dont_stall[tables_format_v1] [FAIL] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_receive_with_very_big_visibility_timeout[tables_format_v0] >> test_vector_index.py::TestVectorIndex::test_vector_index[table_index_1-pk_types15-all_types15-index15-Int8] >> test_vector_index.py::TestVectorIndex::test_vector_index_prefix[table_index_4_float-pk_types0-all_types0-index0-Float] [FAIL] >> test_format_setting.py::TestS3::test_timestamp_simple_posix[v1-common/simple_posix/test.parquet-parquet] [FAIL] >> test_format_setting.py::TestS3::test_timestamp_simple_posix[v2-common/simple_posix/test.csv-csv_with_names] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_create_queue_by_nonexistent_user_fails[tables_format_v0] [FAIL] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_delete_message_works[tables_format_v0] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_delay_one_message[tables_format_v0-fifo] >> test_select_1.py::TestSelect1::test_select_z_x_y[v2] [FAIL] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/messaging/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_does_not_change_visibility_not_in_flight[tables_format_v1-std] [FAIL] Test command err: sys:1: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/ciyv/001ffc/ydb/tests/functional/sqs/messaging/test-results/py3test/testing_out_stuff/chunk45/testing_out_stuff/test_generic_messaging.py.TestSqsGenericMessagingWithTenant.test_does_not_change_visibility_for_deleted_message.tables_format_v1-std/cluster/node_1/stdout'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/ciyv/001ffc/ydb/tests/functional/sqs/messaging/test-results/py3test/testing_out_stuff/chunk45/testing_out_stuff/test_generic_messaging.py.TestSqsGenericMessagingWithTenant.test_does_not_change_visibility_for_deleted_message.tables_format_v1-std/cluster/node_1/stderr'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedRandom name='/home/runner/.ya/build/build_root/ciyv/001ffc/ydb/tests/functional/sqs/messaging/test-results/py3test/testing_out_stuff/chunk45/testing_out_stuff/test_generic_messaging.py.TestSqsGenericMessagingWithTenant.test_does_not_change_visibility_for_deleted_message.tables_format_v1-std/cluster/node_1/logfile_36cotaol.log'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/ciyv/001ffc/ydb/tests/functional/sqs/messaging/test-results/py3test/testing_out_stuff/chunk45/testing_out_stuff/test_generic_messaging.py.TestSqsGenericMessagingWithTenant.test_does_not_change_visibility_for_deleted_message.tables_format_v1-std/cluster/slot_1/stdout'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/ciyv/001ffc/ydb/tests/functional/sqs/messaging/test-results/py3test/testing_out_stuff/chunk45/testing_out_stuff/test_generic_messaging.py.TestSqsGenericMessagingWithTenant.test_does_not_change_visibility_for_deleted_message.tables_format_v1-std/cluster/slot_1/stderr'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedRandom name='/home/runner/.ya/build/build_root/ciyv/001ffc/ydb/tests/functional/sqs/messaging/test-results/py3test/testing_out_stuff/chunk45/testing_out_stuff/test_generic_messaging.py.TestSqsGenericMessagingWithTenant.test_does_not_change_visibility_for_deleted_message.tables_format_v1-std/cluster/slot_1/logfile_pimt7s03.log'> ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/subprocess.py:1129: ResourceWarning: subprocess 603703 is still running ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/subprocess.py:1129: ResourceWarning: subprocess 605109 is still running ResourceWarning: Enable tracemalloc to get the object allocation traceback >> test_s3_1.py::TestS3::test_i18n_partitioning[v2-false-projection-False-client0] [FAIL] >> test_s3_1.py::TestS3::test_i18n_partitioning[v2-false-projection-True-client0] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_receive_with_very_big_visibility_timeout[tables_format_v0] [FAIL] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_receive_with_very_big_visibility_timeout[tables_format_v1] >> test_explicit_partitioning_1.py::TestS3::test_projection_date_type_validation[v1-true-client9-year String-False] [FAIL] >> test_insert.py::TestS3::test_part_split[v2-client0] [FAIL] >> test_insert.py::TestS3::test_part_merge[v1-client0] >> test_explicit_partitioning_1.py::TestS3::test_projection_date_type_validation[v1-true-client10-year Utf8-False] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/fq/yds/py3test >> test_yq_streaming.py::TestYqStreaming::test_early_finish_case3[v1] [FAIL] Test command err: sys:1: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback >> test_explicit_partitioning_0.py::TestS3::test_projection_integer_type_validation[v1-false-client8-year Int32-False] [FAIL] >> test_explicit_partitioning_0.py::TestS3::test_projection_integer_type_validation[v1-false-client9-year Uint32-False] >> test_statistics.py::TestS3::test_sum[v2-client0] [FAIL] >> test_statistics.py::TestS3::test_sum[v1-client0] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_receive_with_very_big_visibility_timeout[tables_format_v1] [FAIL] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_send_and_read_message[tables_format_v0-fifo] >> test_format_setting.py::TestS3::test_timestamp_simple_posix[v2-common/simple_posix/test.csv-csv_with_names] [FAIL] >> test_format_setting.py::TestS3::test_timestamp_simple_posix[v2-common/simple_posix/test.tsv-tsv_with_names] |81.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/datashard/vector_index/py3test >> test_vector_index.py::TestVectorIndex::test_vector_index_prefix[table_index_4-pk_types6-all_types6-index6-Uint8] [FAIL] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/messaging/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_create_queue_by_nonexistent_user_fails[tables_format_v0] [FAIL] Test command err: sys:1: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/ciyv/001ff9/ydb/tests/functional/sqs/messaging/test-results/py3test/testing_out_stuff/chunk18/testing_out_stuff/test_generic_messaging.py.TestSqsGenericMessagingWithPath.test_create_q_twice.tables_format_v0-fifo/cluster/node_1/stdout'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/ciyv/001ff9/ydb/tests/functional/sqs/messaging/test-results/py3test/testing_out_stuff/chunk18/testing_out_stuff/test_generic_messaging.py.TestSqsGenericMessagingWithPath.test_create_q_twice.tables_format_v0-fifo/cluster/node_1/stderr'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedRandom name='/home/runner/.ya/build/build_root/ciyv/001ff9/ydb/tests/functional/sqs/messaging/test-results/py3test/testing_out_stuff/chunk18/testing_out_stuff/test_generic_messaging.py.TestSqsGenericMessagingWithPath.test_create_q_twice.tables_format_v0-fifo/cluster/node_1/logfile_09w9cstb.log'> ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/subprocess.py:1129: ResourceWarning: subprocess 604603 is still running ResourceWarning: Enable tracemalloc to get the object allocation traceback |81.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/datashard/vector_index/py3test >> test_s3_1.py::TestS3::test_i18n_partitioning[v2-false-projection-True-client0] [FAIL] >> test_s3_1.py::TestS3::test_i18n_partitioning[v2-true-hive-False-client0] >> test_explicit_partitioning_1.py::TestS3::test_projection_date_type_validation[v1-true-client10-year Utf8-False] [FAIL] >> test_explicit_partitioning_1.py::TestS3::test_projection_date_type_validation[v1-true-client11-year Utf8 NOT NULL-True] >> test_invalid_consumer.py::TestConsumer::test_invalid[v1] |81.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/datashard/vector_index/py3test >> test_vector_index.py::TestVectorIndex::test_vector_index_prefix[table_index_1-pk_types15-all_types15-index15-Int8] [FAIL] |81.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/datashard/vector_index/py3test >> test_vector_index.py::TestVectorIndex::test_vector_index[table_index_4-pk_types12-all_types12-index12-Int8] [FAIL] >> test_vector_index.py::TestVectorIndex::test_vector_index[table_index_0_float-pk_types4-all_types4-index4-Float] [FAIL] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_validates_group_id[tables_format_v0] [FAIL] >> test_explicit_partitioning_0.py::TestS3::test_projection_integer_type_validation[v1-false-client9-year Uint32-False] [FAIL] >> test_explicit_partitioning_0.py::TestS3::test_projection_integer_type_validation[v1-false-client10-year Int64 NOT NULL-True] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_validates_group_id[tables_format_v1] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_send_and_read_message[tables_format_v0-fifo] [FAIL] >> test_insert.py::TestS3::test_part_merge[v1-client0] [FAIL] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_send_and_read_message[tables_format_v0-std] >> test_insert.py::TestS3::test_part_merge[v2-client0] >> test_format_setting.py::TestS3::test_timestamp_simple_posix[v2-common/simple_posix/test.tsv-tsv_with_names] [FAIL] >> test_format_setting.py::TestS3::test_timestamp_simple_posix[v2-common/simple_posix/test.json-json_each_row] >> test_statistics.py::TestS3::test_sum[v1-client0] [FAIL] >> test_statistics.py::TestS3::test_aborted_by_user[v2-client0] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_receive_attempt_reloads_same_messages[tables_format_v1-after_crutch_batch] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_validates_group_id[tables_format_v1] [FAIL] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_validates_receive_attempt_id[tables_format_v0] >> test_vector_index.py::TestVectorIndex::test_vector_index[table_index_1_float-pk_types3-all_types3-index3-Float] >> test_explicit_partitioning_1.py::TestS3::test_projection_date_type_validation[v1-true-client11-year Utf8 NOT NULL-True] [FAIL] >> test_explicit_partitioning_1.py::TestS3::test_projection_date_type_validation[v1-true-client12-year Date-False] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_send_and_read_message[tables_format_v0-std] [FAIL] >> test_s3_1.py::TestS3::test_i18n_partitioning[v2-true-hive-False-client0] [FAIL] >> test_s3_1.py::TestS3::test_i18n_partitioning[v2-true-hive-True-client0] >> test_vector_index.py::TestVectorIndex::test_vector_index_prefix[table_index_2_float-pk_types2-all_types2-index2-Float] >> test_row_dispatcher.py::TestPqRowDispatcher::test_sensors >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_deduplication[tables_format_v1-by_deduplication_id] >> test_explicit_partitioning_0.py::TestS3::test_projection_integer_type_validation[v1-false-client10-year Int64 NOT NULL-True] [FAIL] >> test_explicit_partitioning_0.py::TestS3::test_projection_integer_type_validation[v1-false-client11-year Int64-False] >> test_insert.py::TestS3::test_part_merge[v2-client0] [FAIL] >> test_insert.py::TestS3::test_part_binding[v1-client0-json_list] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_validates_receive_attempt_id[tables_format_v0] [FAIL] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_validates_receive_attempt_id[tables_format_v1] >> test_statistics.py::TestS3::test_aborted_by_user[v2-client0] [FAIL] >> test_invalid_consumer.py::TestConsumer::test_invalid[v1] [FAIL] >> test_statistics.py::TestS3::test_aborted_by_user[v1-client0] >> test_kill_pq_bill.py::TestKillPqBill::test_do_not_bill_pq[v1-mvp_external_ydb_endpoint0] >> test_format_setting.py::TestS3::test_timestamp_simple_posix[v2-common/simple_posix/test.json-json_each_row] [FAIL] >> test_format_setting.py::TestS3::test_timestamp_simple_posix[v2-common/simple_posix/test.parquet-parquet] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_validates_receive_attempt_id[tables_format_v1] [FAIL] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_visibility_change_disables_receive_attempt_id[tables_format_v0-with_change_visibility] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/messaging/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_send_and_read_message[tables_format_v0-std] [FAIL] Test command err: sys:1: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/ciyv/001ff6/ydb/tests/functional/sqs/messaging/test-results/py3test/testing_out_stuff/chunk29/testing_out_stuff/test_generic_messaging.py.TestSqsGenericMessagingWithPath.test_read_dont_stall.tables_format_v1/cluster/node_1/stdout'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/ciyv/001ff6/ydb/tests/functional/sqs/messaging/test-results/py3test/testing_out_stuff/chunk29/testing_out_stuff/test_generic_messaging.py.TestSqsGenericMessagingWithPath.test_read_dont_stall.tables_format_v1/cluster/node_1/stderr'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedRandom name='/home/runner/.ya/build/build_root/ciyv/001ff6/ydb/tests/functional/sqs/messaging/test-results/py3test/testing_out_stuff/chunk29/testing_out_stuff/test_generic_messaging.py.TestSqsGenericMessagingWithPath.test_read_dont_stall.tables_format_v1/cluster/node_1/logfile_ni3ls5no.log'> ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/subprocess.py:1129: ResourceWarning: subprocess 605825 is still running ResourceWarning: Enable tracemalloc to get the object allocation traceback >> test_explicit_partitioning_1.py::TestS3::test_projection_date_type_validation[v1-true-client12-year Date-False] [FAIL] >> test_explicit_partitioning_1.py::TestS3::test_projection_date_type_validation[v1-true-client13-year Date NOT NULL-True] |81.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/datashard/vector_index/py3test >> test_vector_index.py::TestVectorIndex::test_vector_index_prefix[table_index_4_float-pk_types0-all_types0-index0-Float] [FAIL] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/fq/yds/py3test >> test_select_1.py::TestSelect1::test_select_z_x_y[v2] [FAIL] Test command err: sys:1: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback >> test_insert.py::TestS3::test_part_binding[v1-client0-json_list] [FAIL] >> test_insert.py::TestS3::test_part_binding[v1-client0-json_each_row] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_visibility_change_disables_receive_attempt_id[tables_format_v0-with_change_visibility] [FAIL] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_visibility_change_disables_receive_attempt_id[tables_format_v0-with_delete_message] >> test_s3_1.py::TestS3::test_i18n_partitioning[v2-true-hive-True-client0] [FAIL] >> test_public_api.py::TestDocApiTables::test_create_table >> test_statistics.py::TestS3::test_aborted_by_user[v1-client0] [FAIL] >> test_s3_1.py::TestS3::test_i18n_partitioning[v2-true-projection-False-client0] >> test_vector_index.py::TestVectorIndex::test_vector_index[table_index_3-pk_types13-all_types13-index13-Int8] >> test_format_setting.py::TestS3::test_timestamp_simple_posix[v2-common/simple_posix/test.parquet-parquet] [FAIL] >> test_format_setting.py::TestS3::test_timestamp_simple_posix_insert[v1-common/simple_posix/test.csv-csv_with_names] >> test_row_dispatcher.py::TestPqRowDispatcher::test_stop_start >> test_explicit_partitioning_0.py::TestS3::test_projection_integer_type_validation[v1-false-client11-year Int64-False] [FAIL] >> test_explicit_partitioning_0.py::TestS3::test_projection_integer_type_validation[v1-false-client12-year Uint64-False] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_visibility_change_disables_receive_attempt_id[tables_format_v0-with_change_visibility] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_visibility_change_disables_receive_attempt_id[tables_format_v0-with_delete_message] [FAIL] >> test_kill_pq_bill.py::TestKillPqBill::test_do_not_bill_pq[v1-mvp_external_ydb_endpoint0] [FAIL] >> test_explicit_partitioning_1.py::TestS3::test_projection_date_type_validation[v1-true-client13-year Date NOT NULL-True] [FAIL] >> test_explicit_partitioning_1.py::TestS3::test_projection_date_type_validation[v1-true-client14-year Datetime-False] >> test_insert.py::TestS3::test_part_binding[v1-client0-json_each_row] [FAIL] >> test_insert.py::TestS3::test_part_binding[v1-client0-csv_with_names] >> test_vector_index.py::TestVectorIndex::test_vector_index[table_index_0-pk_types10-all_types10-index10-Uint8] |81.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/datashard/vector_index/py3test >> test_vector_index.py::TestVectorIndex::test_vector_index[table_index_0_float-pk_types4-all_types4-index4-Float] [FAIL] >> test_fifo_messaging.py::TestSqsFifoMicroBatchesWithPath::test_micro_batch_read[tables_format_v0] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/messaging/py3test >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_visibility_change_disables_receive_attempt_id[tables_format_v0-with_delete_message] [FAIL] Test command err: sys:1: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/ciyv/001ff3/ydb/tests/functional/sqs/messaging/test-results/py3test/testing_out_stuff/chunk4/testing_out_stuff/test_fifo_messaging.py.TestSqsFifoMessagingWithPath.test_validates_group_id.tables_format_v0/cluster/node_1/stdout'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/ciyv/001ff3/ydb/tests/functional/sqs/messaging/test-results/py3test/testing_out_stuff/chunk4/testing_out_stuff/test_fifo_messaging.py.TestSqsFifoMessagingWithPath.test_validates_group_id.tables_format_v0/cluster/node_1/stderr'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedRandom name='/home/runner/.ya/build/build_root/ciyv/001ff3/ydb/tests/functional/sqs/messaging/test-results/py3test/testing_out_stuff/chunk4/testing_out_stuff/test_fifo_messaging.py.TestSqsFifoMessagingWithPath.test_validates_group_id.tables_format_v0/cluster/node_1/logfile_tqjhr9f3.log'> ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/subprocess.py:1129: ResourceWarning: subprocess 606677 is still running ResourceWarning: Enable tracemalloc to get the object allocation traceback >> test_format_setting.py::TestS3::test_timestamp_simple_posix_insert[v1-common/simple_posix/test.csv-csv_with_names] [FAIL] >> test_format_setting.py::TestS3::test_timestamp_simple_posix_insert[v1-common/simple_posix/test.tsv-tsv_with_names] |81.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/datashard/vector_index/py3test >> test_vector_index.py::TestVectorIndex::test_vector_index[table_all_types-pk_types17-all_types17-index17-Int8] [FAIL] >> test_row_dispatcher.py::TestPqRowDispatcher::test_sensors [FAIL] >> test_explicit_partitioning_0.py::TestS3::test_projection_integer_type_validation[v1-false-client12-year Uint64-False] [FAIL] >> test_explicit_partitioning_0.py::TestS3::test_projection_integer_type_validation[v1-false-client13-year Date-False] >> test_s3_1.py::TestS3::test_i18n_partitioning[v2-true-projection-False-client0] [FAIL] >> test_s3_1.py::TestS3::test_i18n_partitioning[v2-true-projection-True-client0] >> test_row_dispatcher.py::TestPqRowDispatcher::test_simple_not_null >> test_explicit_partitioning_1.py::TestS3::test_projection_date_type_validation[v1-true-client14-year Datetime-False] [FAIL] >> test_explicit_partitioning_1.py::TestS3::test_projection_date_type_validation[v1-true-client15-year Datetime NOT NULL-True] >> test_select_timings.py::TestSelectTimings::test_select_timings[v1-analytics-finished] >> test_insert.py::TestS3::test_part_binding[v1-client0-csv_with_names] [FAIL] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_receive_attempt_reloads_same_messages[tables_format_v1-after_crutch_batch] [FAIL] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_receive_attempt_reloads_same_messages[tables_format_v1-standard_mode] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_create_q_twice[tables_format_v0-fifo] [FAIL] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_create_q_twice[tables_format_v0-std] >> test_insert.py::TestS3::test_part_binding[v2-client0-json_list] >> test_row_dispatcher.py::TestPqRowDispatcher::test_nested_types_without_predicate >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_delete_message_works[tables_format_v0] [FAIL] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_delete_message_works[tables_format_v1] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_deduplication[tables_format_v1-by_deduplication_id] [FAIL] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_deduplication[tables_format_v1-content_based] >> test_vector_index.py::TestVectorIndex::test_vector_index[table_index_2-pk_types8-all_types8-index8-Uint8] [FAIL] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_create_q_twice[tables_format_v0-std] [FAIL] |81.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/datashard/vector_index/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_create_q_twice[tables_format_v1-fifo] >> test_format_setting.py::TestS3::test_timestamp_simple_posix_insert[v1-common/simple_posix/test.tsv-tsv_with_names] [FAIL] >> test_format_setting.py::TestS3::test_timestamp_simple_posix_insert[v1-common/simple_posix/test.json-json_each_row] >> test_explicit_partitioning_0.py::TestS3::test_projection_integer_type_validation[v1-false-client13-year Date-False] [FAIL] >> test_explicit_partitioning_0.py::TestS3::test_projection_integer_type_validation[v1-true-client0-year Int32 NOT NULL-True] >> test_s3_1.py::TestS3::test_i18n_partitioning[v2-true-projection-True-client0] [FAIL] >> test_s3_1.py::TestS3::test_huge_source[v1-false-client0] >> test_explicit_partitioning_1.py::TestS3::test_projection_date_type_validation[v1-true-client15-year Datetime NOT NULL-True] [FAIL] >> test_explicit_partitioning_1.py::TestS3::test_projection_date_type_validation[v2-false-client0-year Int32-False] >> test_vector_index.py::TestVectorIndex::test_vector_index_prefix[table_index_4-pk_types12-all_types12-index12-Int8] >> test_row_dispatcher.py::TestPqRowDispatcher::test_stop_start [FAIL] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_receive_attempt_reloads_same_messages[tables_format_v1-standard_mode] [FAIL] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_send_and_read_multiple_messages[tables_format_v0] >> test_row_dispatcher.py::TestPqRowDispatcher::test_stop_start_with_filter >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_deduplication[tables_format_v1-content_based] [FAIL] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_delete_message_works[tables_format_v0] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_delay_one_message[tables_format_v0-fifo] [FAIL] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_delay_one_message[tables_format_v0-std] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_delete_message_works[tables_format_v1] [FAIL] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_does_not_change_visibility_for_deleted_message[tables_format_v0-fifo] >> test_vector_index.py::TestVectorIndex::test_vector_index_prefix[table_index_3_float-pk_types1-all_types1-index1-Float] [FAIL] >> test_row_dispatcher.py::TestPqRowDispatcher::test_simple_not_null [FAIL] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_create_q_twice[tables_format_v1-fifo] [FAIL] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_create_q_twice[tables_format_v1-std] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_delete_message_works[tables_format_v0] >> test_select_timings.py::TestSelectTimings::test_select_timings[v1-analytics-finished] [FAIL] >> test_select_timings.py::TestSelectTimings::test_select_timings[v1-streaming-aborted] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_send_and_read_multiple_messages[tables_format_v0] [FAIL] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_send_and_read_multiple_messages[tables_format_v1] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_delete_message_works[tables_format_v0] [FAIL] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_delete_message_works[tables_format_v1] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_delay_one_message[tables_format_v0-std] [FAIL] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_delay_one_message[tables_format_v1-fifo] >> test_insert.py::TestS3::test_part_binding[v2-client0-json_list] [FAIL] >> test_insert.py::TestS3::test_part_binding[v2-client0-json_each_row] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_does_not_change_visibility_for_deleted_message[tables_format_v0-fifo] [FAIL] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_create_q_twice[tables_format_v1-std] [FAIL] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_create_queue_by_nonexistent_user_fails[tables_format_v0] >> test_explicit_partitioning_0.py::TestS3::test_projection_integer_type_validation[v1-true-client0-year Int32 NOT NULL-True] [FAIL] >> test_vector_index.py::TestVectorIndex::test_vector_index[table_index_4_float-pk_types0-all_types0-index0-Float] [FAIL] >> test_explicit_partitioning_1.py::TestS3::test_projection_date_type_validation[v2-false-client0-year Int32-False] [FAIL] >> test_explicit_partitioning_1.py::TestS3::test_projection_date_type_validation[v2-false-client1-year Int32 NOT NULL-False] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_send_and_read_multiple_messages[tables_format_v1] [FAIL] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/fq/yds/py3test >> test_kill_pq_bill.py::TestKillPqBill::test_do_not_bill_pq[v1-mvp_external_ydb_endpoint0] [FAIL] Test command err: sys:1: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_does_not_change_visibility_for_deleted_message[tables_format_v0-std] >> test_explicit_partitioning_0.py::TestS3::test_projection_integer_type_validation[v1-true-client1-year Uint32 NOT NULL-True] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_validates_deduplication_id[tables_format_v0] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_delete_message_works[tables_format_v1] [FAIL] >> test_format_setting.py::TestS3::test_timestamp_simple_posix_insert[v1-common/simple_posix/test.json-json_each_row] [FAIL] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_fifo_read_delete_single_message >> test_format_setting.py::TestS3::test_timestamp_simple_posix_insert[v1-common/simple_posix/test.parquet-parquet] >> test_vector_index.py::TestVectorIndex::test_vector_index[table_index_1-pk_types15-all_types15-index15-Int8] [FAIL] >> test_s3_1.py::TestS3::test_huge_source[v1-false-client0] [FAIL] >> test_s3_1.py::TestS3::test_huge_source[v1-true-client0] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_delay_one_message[tables_format_v1-fifo] [FAIL] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_delay_one_message[tables_format_v1-std] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_create_queue_by_nonexistent_user_fails[tables_format_v0] [FAIL] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_does_not_change_visibility_for_deleted_message[tables_format_v0-std] [FAIL] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_does_not_change_visibility_for_deleted_message[tables_format_v1-fifo] >> test_row_dispatcher.py::TestPqRowDispatcher::test_nested_types_without_predicate [FAIL] >> test_row_dispatcher.py::TestPqRowDispatcher::test_read_raw_format_with_row_dispatcher >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_validates_deduplication_id[tables_format_v0] [FAIL] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_validates_deduplication_id[tables_format_v1] >> test_row_dispatcher.py::TestPqRowDispatcher::test_stop_start_with_filter [FAIL] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_delay_one_message[tables_format_v1-std] [FAIL] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_delete_message_batch_deduplicates_receipt_handle[tables_format_v0-fifo] >> test_insert.py::TestS3::test_part_binding[v2-client0-json_each_row] [FAIL] >> test_insert.py::TestS3::test_part_binding[v2-client0-csv_with_names] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_fifo_read_delete_single_message [FAIL] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_only_single_read_infly_from_fifo >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_does_not_change_visibility_for_deleted_message[tables_format_v1-fifo] [FAIL] >> test_explicit_partitioning_1.py::TestS3::test_projection_date_type_validation[v2-false-client1-year Int32 NOT NULL-False] [FAIL] >> test_explicit_partitioning_0.py::TestS3::test_projection_integer_type_validation[v1-true-client1-year Uint32 NOT NULL-True] [FAIL] >> test_explicit_partitioning_0.py::TestS3::test_projection_integer_type_validation[v1-true-client2-year Uint64 NOT NULL-True] >> test_explicit_partitioning_1.py::TestS3::test_projection_date_type_validation[v2-false-client2-year Uint32-False] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_visibility_timeout_works[tables_format_v1] >> test_vector_index.py::TestVectorIndex::test_vector_index[table_index_1_float-pk_types3-all_types3-index3-Float] [FAIL] >> test_s3_1.py::TestS3::test_huge_source[v1-true-client0] [FAIL] >> test_s3_1.py::TestS3::test_huge_source[v2-false-client0] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/fq/s3/py3test >> test_statistics.py::TestS3::test_aborted_by_user[v1-client0] [FAIL] Test command err: library/recipes/common/__init__.py:29: ResourceWarning: unclosed file <_io.TextIOWrapper name='/home/runner/.ya/build/build_root/ciyv/002044/ydb/tests/fq/s3/test-results/py3test/testing_out_stuff/test_statistics/testing_out_stuff/moto_server.out.log' mode='w' encoding='utf-8'> process = subprocess.Popen( ResourceWarning: Enable tracemalloc to get the object allocation traceback library/recipes/common/__init__.py:29: ResourceWarning: unclosed file <_io.TextIOWrapper name='/home/runner/.ya/build/build_root/ciyv/002044/ydb/tests/fq/s3/test-results/py3test/testing_out_stuff/test_statistics/testing_out_stuff/moto_server.err.log' mode='w' encoding='utf-8'> process = subprocess.Popen( ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/subprocess.py:1129: ResourceWarning: subprocess 584623 is still running ResourceWarning: Enable tracemalloc to get the object allocation traceback >> test_format_setting.py::TestS3::test_timestamp_simple_posix_insert[v1-common/simple_posix/test.parquet-parquet] [FAIL] >> test_format_setting.py::TestS3::test_timestamp_simple_posix_insert[v2-common/simple_posix/test.csv-csv_with_names] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_validates_deduplication_id[tables_format_v1] [FAIL] >> test_recovery_match_recognize.py::TestRecoveryMatchRecognize::test_time_order_recoverer[v1-kikimr0] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_only_single_read_infly_from_fifo [FAIL] >> test_select_timings.py::TestSelectTimings::test_select_timings[v1-streaming-aborted] [FAIL] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_delete_message_batch_deduplicates_receipt_handle[tables_format_v0-fifo] [FAIL] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/messaging/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_create_queue_by_nonexistent_user_fails[tables_format_v0] [FAIL] Test command err: sys:1: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/ciyv/001fec/ydb/tests/functional/sqs/messaging/test-results/py3test/testing_out_stuff/chunk40/testing_out_stuff/test_generic_messaging.py.TestSqsGenericMessagingWithTenant.test_create_q_twice.tables_format_v0-fifo/cluster/node_1/stdout'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/ciyv/001fec/ydb/tests/functional/sqs/messaging/test-results/py3test/testing_out_stuff/chunk40/testing_out_stuff/test_generic_messaging.py.TestSqsGenericMessagingWithTenant.test_create_q_twice.tables_format_v0-fifo/cluster/node_1/stderr'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedRandom name='/home/runner/.ya/build/build_root/ciyv/001fec/ydb/tests/functional/sqs/messaging/test-results/py3test/testing_out_stuff/chunk40/testing_out_stuff/test_generic_messaging.py.TestSqsGenericMessagingWithTenant.test_create_q_twice.tables_format_v0-fifo/cluster/node_1/logfile_ked7pgfx.log'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/ciyv/001fec/ydb/tests/functional/sqs/messaging/test-results/py3test/testing_out_stuff/chunk40/testing_out_stuff/test_generic_messaging.py.TestSqsGenericMessagingWithTenant.test_create_q_twice.tables_format_v0-fifo/cluster/slot_1/stdout'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/ciyv/001fec/ydb/tests/functional/sqs/messaging/test-results/py3test/testing_out_stuff/chunk40/testing_out_stuff/test_generic_messaging.py.TestSqsGenericMessagingWithTenant.test_create_q_twice.tables_format_v0-fifo/cluster/slot_1/stderr'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedRandom name='/home/runner/.ya/build/build_root/ciyv/001fec/ydb/tests/functional/sqs/messaging/test-results/py3test/testing_out_stuff/chunk40/testing_out_stuff/test_generic_messaging.py.TestSqsGenericMessagingWithTenant.test_create_q_twice.tables_format_v0-fifo/cluster/slot_1/logfile_wr0x7o52.log'> ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/subprocess.py:1129: ResourceWarning: subprocess 607968 is still running ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/subprocess.py:1129: ResourceWarning: subprocess 610245 is still running ResourceWarning: Enable tracemalloc to get the object allocation traceback ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/messaging/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_does_not_change_visibility_for_deleted_message[tables_format_v1-fifo] [FAIL] Test command err: sys:1: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/ciyv/001ff1/ydb/tests/functional/sqs/messaging/test-results/py3test/testing_out_stuff/chunk44/testing_out_stuff/test_generic_messaging.py.TestSqsGenericMessagingWithTenant.test_delete_message_works.tables_format_v0/cluster/node_1/stdout'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/ciyv/001ff1/ydb/tests/functional/sqs/messaging/test-results/py3test/testing_out_stuff/chunk44/testing_out_stuff/test_generic_messaging.py.TestSqsGenericMessagingWithTenant.test_delete_message_works.tables_format_v0/cluster/node_1/stderr'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedRandom name='/home/runner/.ya/build/build_root/ciyv/001ff1/ydb/tests/functional/sqs/messaging/test-results/py3test/testing_out_stuff/chunk44/testing_out_stuff/test_generic_messaging.py.TestSqsGenericMessagingWithTenant.test_delete_message_works.tables_format_v0/cluster/node_1/logfile_jbnjy_nr.log'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/ciyv/001ff1/ydb/tests/functional/sqs/messaging/test-results/py3test/testing_out_stuff/chunk44/testing_out_stuff/test_generic_messaging.py.TestSqsGenericMessagingWithTenant.test_delete_message_works.tables_format_v0/cluster/slot_1/stdout'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/ciyv/001ff1/ydb/tests/functional/sqs/messaging/test-results/py3test/testing_out_stuff/chunk44/testing_out_stuff/test_generic_messaging.py.TestSqsGenericMessagingWithTenant.test_delete_message_works.tables_format_v0/cluster/slot_1/stderr'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedRandom name='/home/runner/.ya/build/build_root/ciyv/001ff1/ydb/tests/functional/sqs/messaging/test-results/py3test/testing_out_stuff/chunk44/testing_out_stuff/test_generic_messaging.py.TestSqsGenericMessagingWithTenant.test_delete_message_works.tables_format_v0/cluster/slot_1/logfile_y3dmit9_.log'> ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/subprocess.py:1129: ResourceWarning: subprocess 608047 is still running ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/subprocess.py:1129: ResourceWarning: subprocess 610636 is still running ResourceWarning: Enable tracemalloc to get the object allocation traceback >> test_fifo_messaging.py::TestSqsFifoMicroBatchesWithPath::test_micro_batch_read[tables_format_v0] [FAIL] >> test_fifo_messaging.py::TestSqsFifoMicroBatchesWithPath::test_micro_batch_read[tables_format_v1] >> test_public_api.py::TestDocApiTables::test_create_table [GOOD] >> test_public_api.py::TestDocApiTables::test_alter_table[None-BadRequest] >> test_insert.py::TestS3::test_part_binding[v2-client0-csv_with_names] [FAIL] >> test_fifo_messaging.py::TestSqsFifoMicroBatchesWithPath::test_micro_batch_read[tables_format_v1] [FAIL] >> test_fifo_messaging.py::TestSqsFifoMicroBatchesWithTenant::test_micro_batch_read[tables_format_v0] >> test_public_api.py::TestDocApiTables::test_alter_table[None-BadRequest] [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/fq/yds/py3test >> test_row_dispatcher.py::TestPqRowDispatcher::test_simple_not_null [FAIL] Test command err: sys:1: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/messaging/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_delete_message_batch_deduplicates_receipt_handle[tables_format_v0-fifo] [FAIL] Test command err: sys:1: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/ciyv/001fee/ydb/tests/functional/sqs/messaging/test-results/py3test/testing_out_stuff/chunk42/testing_out_stuff/test_generic_messaging.py.TestSqsGenericMessagingWithTenant.test_delay_one_message.tables_format_v0-fifo/cluster/node_1/stdout'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/ciyv/001fee/ydb/tests/functional/sqs/messaging/test-results/py3test/testing_out_stuff/chunk42/testing_out_stuff/test_generic_messaging.py.TestSqsGenericMessagingWithTenant.test_delay_one_message.tables_format_v0-fifo/cluster/node_1/stderr'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedRandom name='/home/runner/.ya/build/build_root/ciyv/001fee/ydb/tests/functional/sqs/messaging/test-results/py3test/testing_out_stuff/chunk42/testing_out_stuff/test_generic_messaging.py.TestSqsGenericMessagingWithTenant.test_delay_one_message.tables_format_v0-fifo/cluster/node_1/logfile_7fbkbo3y.log'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/ciyv/001fee/ydb/tests/functional/sqs/messaging/test-results/py3test/testing_out_stuff/chunk42/testing_out_stuff/test_generic_messaging.py.TestSqsGenericMessagingWithTenant.test_delay_one_message.tables_format_v0-fifo/cluster/slot_1/stdout'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/ciyv/001fee/ydb/tests/functional/sqs/messaging/test-results/py3test/testing_out_stuff/chunk42/testing_out_stuff/test_generic_messaging.py.TestSqsGenericMessagingWithTenant.test_delay_one_message.tables_format_v0-fifo/cluster/slot_1/stderr'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedRandom name='/home/runner/.ya/build/build_root/ciyv/001fee/ydb/tests/functional/sqs/messaging/test-results/py3test/testing_out_stuff/chunk42/testing_out_stuff/test_generic_messaging.py.TestSqsGenericMessagingWithTenant.test_delay_one_message.tables_format_v0-fifo/cluster/slot_1/logfile_koycjuvo.log'> ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/subprocess.py:1129: ResourceWarning: subprocess 608048 is still running ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/subprocess.py:1129: ResourceWarning: subprocess 611334 is still running ResourceWarning: Enable tracemalloc to get the object allocation traceback >> test_insert.py::TestS3::test_error[v1-client0-json_each_row] >> test_s3_1.py::TestS3::test_huge_source[v2-false-client0] [FAIL] >> test_s3_1.py::TestS3::test_huge_source[v2-true-client0] >> test_public_api.py::TestDocApiTables::test_alter_table[settings1-None] >> test_yds_bindings.py::TestBindings::test_yds_insert[v1] [SKIPPED] >> test_yq_streaming.py::TestYqStreaming::test_early_finish_case1[v1] >> test_row_dispatcher.py::TestPqRowDispatcher::test_read_raw_format_with_row_dispatcher [FAIL] >> test_public_api.py::TestDocApiTables::test_alter_table[settings1-None] [GOOD] >> test_public_api.py::TestDocApiTables::test_drop_table[None-None] >> test_vector_index.py::TestVectorIndex::test_vector_index[table_index_2-pk_types14-all_types14-index14-Int8] >> test_format_setting.py::TestS3::test_timestamp_simple_posix_insert[v2-common/simple_posix/test.csv-csv_with_names] [FAIL] >> test_public_api.py::TestDocApiTables::test_drop_table[None-None] [GOOD] >> test_public_api.py::TestDocApiTables::test_drop_table[settings1-None] >> test_format_setting.py::TestS3::test_timestamp_simple_posix_insert[v2-common/simple_posix/test.tsv-tsv_with_names] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/messaging/py3test >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_only_single_read_infly_from_fifo [FAIL] Test command err: sys:1: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/ciyv/001fe6/ydb/tests/functional/sqs/messaging/test-results/py3test/testing_out_stuff/chunk1/testing_out_stuff/test_fifo_messaging.py.TestSqsFifoMessagingWithPath.test_deduplication.tables_format_v1-by_deduplication_id/cluster/node_1/stdout'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/ciyv/001fe6/ydb/tests/functional/sqs/messaging/test-results/py3test/testing_out_stuff/chunk1/testing_out_stuff/test_fifo_messaging.py.TestSqsFifoMessagingWithPath.test_deduplication.tables_format_v1-by_deduplication_id/cluster/node_1/stderr'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedRandom name='/home/runner/.ya/build/build_root/ciyv/001fe6/ydb/tests/functional/sqs/messaging/test-results/py3test/testing_out_stuff/chunk1/testing_out_stuff/test_fifo_messaging.py.TestSqsFifoMessagingWithPath.test_deduplication.tables_format_v1-by_deduplication_id/cluster/node_1/logfile__6d8hl6x.log'> ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/subprocess.py:1129: ResourceWarning: subprocess 610013 is still running ResourceWarning: Enable tracemalloc to get the object allocation traceback |81.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/datashard/vector_index/py3test >> test_vector_index.py::TestVectorIndex::test_vector_index_prefix[table_index_3_float-pk_types1-all_types1-index1-Float] [FAIL] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/messaging/py3test >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_validates_deduplication_id[tables_format_v1] [FAIL] Test command err: sys:1: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/ciyv/001fe9/ydb/tests/functional/sqs/messaging/test-results/py3test/testing_out_stuff/chunk3/testing_out_stuff/test_fifo_messaging.py.TestSqsFifoMessagingWithPath.test_receive_attempt_reloads_same_messages.tables_format_v1-after_crutch_batch/cluster/node_1/stdout'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/ciyv/001fe9/ydb/tests/functional/sqs/messaging/test-results/py3test/testing_out_stuff/chunk3/testing_out_stuff/test_fifo_messaging.py.TestSqsFifoMessagingWithPath.test_receive_attempt_reloads_same_messages.tables_format_v1-after_crutch_batch/cluster/node_1/stderr'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedRandom name='/home/runner/.ya/build/build_root/ciyv/001fe9/ydb/tests/functional/sqs/messaging/test-results/py3test/testing_out_stuff/chunk3/testing_out_stuff/test_fifo_messaging.py.TestSqsFifoMessagingWithPath.test_receive_attempt_reloads_same_messages.tables_format_v1-after_crutch_batch/cluster/node_1/logfile_g2y7uqzf.log'> ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/subprocess.py:1129: ResourceWarning: subprocess 609375 is still running ResourceWarning: Enable tracemalloc to get the object allocation traceback >> test_public_api.py::TestDocApiTables::test_drop_table[settings1-None] [GOOD] >> test_explicit_partitioning_1.py::TestS3::test_projection_date_type_validation[v2-false-client2-year Uint32-False] [FAIL] >> test_explicit_partitioning_1.py::TestS3::test_projection_date_type_validation[v2-false-client3-year Uint32 NOT NULL-True] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_visibility_change_disables_receive_attempt_id[tables_format_v0-with_change_visibility] [FAIL] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_visibility_change_disables_receive_attempt_id[tables_format_v0-with_delete_message] >> test_s3_1.py::TestS3::test_huge_source[v2-true-client0] [FAIL] >> test_s3_1.py::TestS3::test_top_level_listing[v1-false-client0] >> test_explicit_partitioning_0.py::TestS3::test_projection_integer_type_validation[v1-true-client2-year Uint64 NOT NULL-True] [FAIL] >> test_explicit_partitioning_0.py::TestS3::test_projection_integer_type_validation[v1-true-client3-year Date NOT NULL-False] >> test_explicit_partitioning_1.py::TestS3::test_projection_date_type_validation[v2-false-client3-year Uint32 NOT NULL-True] [FAIL] >> test_explicit_partitioning_1.py::TestS3::test_projection_date_type_validation[v2-false-client4-year Int64-False] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_validates_message_body[tables_format_v0] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_visibility_change_disables_receive_attempt_id[tables_format_v0-with_delete_message] [FAIL] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/scheme/unittest >> KqpScheme::AlterColumnTableTiering 2025-05-29 15:39:01,540 WARNING devtools.ya.test.programs.test_tool.run_test.run_test: Wrapper execution timed out 2025-05-29 15:39:01,671 WARNING devtools.ya.test.programs.test_tool.run_test.run_test: Wrapper has overrun 600 secs timeout. Process tree before termination: pid rss ref pdirt 4119615 46.2M 44.4M 23.3M test_tool run_ut @/home/runner/.ya/build/build_root/ciyv/0011ce/ydb/core/kqp/ut/scheme/test-results/unittest/testing_out_stuff/chunk17/testing_out_stuff/test_tool.args 4141962 392M 389M 154M └─ ydb-core-kqp-ut-scheme --trace-path-append /home/runner/.ya/build/build_root/ciyv/0011ce/ydb/core/kqp/ut/scheme/test-results/unittest/testing_out_stuff/chunk17/ytest.re Test command err: Trying to start YDB, gRPC: 30919, MsgBus: 9472 2025-05-29T15:29:02.303339Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509890085472056189:2071];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:29:02.303362Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/0011ce/r3tmp/tmpVXF7nE/pdisk_1.dat 2025-05-29T15:29:02.360438Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 30919, node 1 2025-05-29T15:29:02.381508Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:29:02.381525Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-05-29T15:29:02.381527Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-05-29T15:29:02.381582Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:9472 2025-05-29T15:29:02.405519Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:29:02.405549Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:29:02.406630Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:9472 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-29T15:29:02.443711Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:29:02.449958Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:29:02.483077Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:29:02.514433Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:29:02.528122Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:29:02.703736Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890085472057790:2403], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:29:02.703768Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:29:02.773526Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-05-29T15:29:02.781699Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-05-29T15:29:02.793500Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-05-29T15:29:02.800320Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-05-29T15:29:02.814690Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-05-29T15:29:02.828987Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-05-29T15:29:02.843003Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-05-29T15:29:02.863352Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890085472058445:2466], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:29:02.863387Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:29:02.863405Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890085472058450:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:29:02.864300Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715669:3, at schemeshard: 72057594046644480 2025-05-29T15:29:02.869502Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7509890085472058452:2470], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715669 completed, doublechecking } 2025-05-29T15:29:02.932683Z node 1 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [1:7509890085472058503:3398] txid# 281474976715670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } VERIFY failed (2025-05-29T15:29:03.031322Z): assertion failed in non-unittest thread with message: assertion failed at ydb/core/kqp/ut/common/kqp_ut_common.h:375, void NKikimr::NKqp::AssertSuccessResult(const NYdb::TStatus &): (result.IsSuccess())
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 library/cpp/testing/unittest/registar.cpp:36 RaiseError(): requirement UnittestThread failed 2025-05-29T15:29:03.029493Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [1:7509890085472058519:2474], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:29:03.030260Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=1&id=MTQ4MmI4NzAtMWE5YzVkNjctOWZiYzZlYTMtNjcyODllYQ==, ActorId: [1:7509890085472057772:2401], ActorState: ExecuteState, TraceId: 01jweajfcee0phar0hmmjkqzgq, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: 0. /-S/util/system/yassert.cpp:83: InternalPanicImpl @ 0x1633E055 1. /-S/util/system/yassert.cpp:55: Panic @ 0x16335056 2. /tmp//-S/library/cpp/testing/unittest/registar.cpp:36: RaiseError @ 0x164D75B6 3. /-S/ydb/core/kqp/ut/common/kqp_ut_common.h:375: AssertSuccessResult @ 0x15DE0A22 4. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:365: CreateSampleTables @ 0x289A0492 5. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:581: operator() @ 0x289C1A1C 6. /-S/library/cpp/threading/future/core/future-inl.h:493: SetValue @ 0x289C1A1C 7. /-S/library/cpp/threading/future/async.h:24: operator() @ 0x289C1A1C 8. /-S/util/thread/pool.h:71: Process @ 0x289C1A1C 9. /-S/util/thread/pool.cpp:418: DoExecute @ 0x163457A9 10. /-S/util/thread/factory.h:15: Execute @ 0x16344199 11. /-S/util/thread/factory.cpp:36: ThreadProc @ 0x16344199 12. /-S/util/system/thread.cpp:244: ThreadProxy @ 0x1633F60C 13. ??:0: ?? @ 0x7FA643346AC2 14. ??:0: ?? @ 0x7FA6433D884F Trying to start YDB, gRPC: 62750, MsgBus: 3131 2025-05-29T15:29:07.008814Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509890106022979455:2074];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:29:07.008837Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/0011ce/r3tmp/tmp3s14NA/pdisk_1.dat 2025-05-29T15:29:07.074603Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 62750, node 1 2025-05-29T15:29:07.093046Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:29:07.093062Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-05-29T15:29:07.093065Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 202 ... cpp:81;event=OnAlteringProblem;error=cannot UPSERT objects:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 ; 2025-05-29T15:38:57.342208Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [1:7509892641655218222:6944], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:38:57.343948Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=1&id=ZTVkNzNkOWEtNTY5MzEyYTEtYTY0OGVjZDAtYThlMTRmNGI=, ActorId: [1:7509892641655218192:6942], ActorState: ExecuteState, TraceId: 01jweb4kxdcrsdpfexhtd1v3nn, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: 01jweb4kx8fh9hew73en0m61yq 2025-05-29T15:38:57.344635Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=request_actor.h:64;event=unexpected reply;error_message=operation { ready: true status: INTERNAL_ERROR issues { message: "Execution" issue_code: 1060 issues { message: "yql/essentials/ast/yql_expr.h:1874: index out of range" issue_code: 1 } } result { [type.googleapis.com/Ydb.Table.ExecuteQueryResult] { tx_meta { } } } } ;request=session_id: "ydb://session/3?node_id=1&id=ZTVkNzNkOWEtNTY5MzEyYTEtYTY0OGVjZDAtYThlMTRmNGI=" tx_control { tx_id: "01jweb4kx8fh9hew73en0m61yq" commit_tx: true } query { yql_text: "DECLARE $objects AS List>;\nUPSERT INTO `//Root/.metadata/initialization/migrations`\nSELECT componentId,modificationId,instant FROM AS_TABLE($objects)\n" } parameters { key: "$objects" value { type { list_type { item { struct_type { members { name: "componentId" type { type_id: UTF8 } } members { name: "modificationId" type { type_id: UTF8 } } members { name: "instant" type { type_id: UINT32 } } } } } } value { items { items { text_value: "INITIALIZATION" } items { text_value: "create" } items { uint32_value: 1748533137 } } } } } ; 2025-05-29T15:38:57.344740Z node 1 :METADATA_INITIALIZER ERROR: log.cpp:784: fline=accessor_init.cpp:81;event=OnAlteringProblem;error=cannot UPSERT objects:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 ; 2025-05-29T15:38:58.486818Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [1:7509892645950185600:7118], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:38:58.487949Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=1&id=YTVhMzA1MmMtNjZjYWIzNjgtY2Y5NDRmYmYtYjc4YzJmNzQ=, ActorId: [1:7509892645950185568:7052], ActorState: ExecuteState, TraceId: 01jweb4n167pdt7kdg1vwhdz7w, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: 01jweb4n11bbs8b27nhagh4bbk 2025-05-29T15:38:58.491531Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=request_actor.h:64;event=unexpected reply;error_message=operation { ready: true status: INTERNAL_ERROR issues { message: "Execution" issue_code: 1060 issues { message: "yql/essentials/ast/yql_expr.h:1874: index out of range" issue_code: 1 } } result { [type.googleapis.com/Ydb.Table.ExecuteQueryResult] { tx_meta { } } } } ;request=session_id: "ydb://session/3?node_id=1&id=YTVhMzA1MmMtNjZjYWIzNjgtY2Y5NDRmYmYtYjc4YzJmNzQ=" tx_control { tx_id: "01jweb4n11bbs8b27nhagh4bbk" commit_tx: true } query { yql_text: "DECLARE $objects AS List>;\nUPSERT INTO `//Root/.metadata/initialization/migrations`\nSELECT componentId,modificationId,instant FROM AS_TABLE($objects)\n" } parameters { key: "$objects" value { type { list_type { item { struct_type { members { name: "componentId" type { type_id: UTF8 } } members { name: "modificationId" type { type_id: UTF8 } } members { name: "instant" type { type_id: UINT32 } } } } } } value { items { items { text_value: "INITIALIZATION" } items { text_value: "create" } items { uint32_value: 1748533138 } } } } } ; 2025-05-29T15:38:58.491637Z node 1 :METADATA_INITIALIZER ERROR: log.cpp:784: fline=accessor_init.cpp:81;event=OnAlteringProblem;error=cannot UPSERT objects:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 ; 2025-05-29T15:38:59.652200Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [1:7509892650245152976:7280], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:38:59.653081Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=1&id=YjRlMTMyOTEtNTU3YjIzNWUtMmE5MWRhMy03MDYzZjdiYQ==, ActorId: [1:7509892650245152946:7198], ActorState: ExecuteState, TraceId: 01jweb4p5e01w6rd63njpz1aa0, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: 01jweb4p581gzr0v93bkgkgdm5 2025-05-29T15:38:59.655685Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=request_actor.h:64;event=unexpected reply;error_message=operation { ready: true status: INTERNAL_ERROR issues { message: "Execution" issue_code: 1060 issues { message: "yql/essentials/ast/yql_expr.h:1874: index out of range" issue_code: 1 } } result { [type.googleapis.com/Ydb.Table.ExecuteQueryResult] { tx_meta { } } } } ;request=session_id: "ydb://session/3?node_id=1&id=YjRlMTMyOTEtNTU3YjIzNWUtMmE5MWRhMy03MDYzZjdiYQ==" tx_control { tx_id: "01jweb4p581gzr0v93bkgkgdm5" commit_tx: true } query { yql_text: "DECLARE $objects AS List>;\nUPSERT INTO `//Root/.metadata/initialization/migrations`\nSELECT componentId,modificationId,instant FROM AS_TABLE($objects)\n" } parameters { key: "$objects" value { type { list_type { item { struct_type { members { name: "componentId" type { type_id: UTF8 } } members { name: "modificationId" type { type_id: UTF8 } } members { name: "instant" type { type_id: UINT32 } } } } } } value { items { items { text_value: "INITIALIZATION" } items { text_value: "create" } items { uint32_value: 1748533139 } } } } } ; 2025-05-29T15:38:59.655754Z node 1 :METADATA_INITIALIZER ERROR: log.cpp:784: fline=accessor_init.cpp:81;event=OnAlteringProblem;error=cannot UPSERT objects:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 ; 2025-05-29T15:39:00.777152Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [1:7509892654540120381:7472], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:39:00.778043Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=1&id=MWQ2MjkwMTUtOWUzMDVhMjYtZTY2MDI3ZTQtZmUyYmE3Y2Q=, ActorId: [1:7509892654540120322:7377], ActorState: ExecuteState, TraceId: 01jweb4q8t3cbg2r6mgesf17jp, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: 01jweb4q8pd5ctvqxvrzr6xcc5 2025-05-29T15:39:00.779083Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=request_actor.h:64;event=unexpected reply;error_message=operation { ready: true status: INTERNAL_ERROR issues { message: "Execution" issue_code: 1060 issues { message: "yql/essentials/ast/yql_expr.h:1874: index out of range" issue_code: 1 } } result { [type.googleapis.com/Ydb.Table.ExecuteQueryResult] { tx_meta { } } } } ;request=session_id: "ydb://session/3?node_id=1&id=MWQ2MjkwMTUtOWUzMDVhMjYtZTY2MDI3ZTQtZmUyYmE3Y2Q=" tx_control { tx_id: "01jweb4q8pd5ctvqxvrzr6xcc5" commit_tx: true } query { yql_text: "DECLARE $objects AS List>;\nUPSERT INTO `//Root/.metadata/initialization/migrations`\nSELECT componentId,modificationId,instant FROM AS_TABLE($objects)\n" } parameters { key: "$objects" value { type { list_type { item { struct_type { members { name: "componentId" type { type_id: UTF8 } } members { name: "modificationId" type { type_id: UTF8 } } members { name: "instant" type { type_id: UINT32 } } } } } } value { items { items { text_value: "INITIALIZATION" } items { text_value: "create" } items { uint32_value: 1748533140 } } } } } ; 2025-05-29T15:39:00.779194Z node 1 :METADATA_INITIALIZER ERROR: log.cpp:784: fline=accessor_init.cpp:81;event=OnAlteringProblem;error=cannot UPSERT objects:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 ; Traceback (most recent call last): File "library/python/testing/yatest_common/yatest/common/process.py", line 384, in wait wait_for( File "library/python/testing/yatest_common/yatest/common/process.py", line 764, in wait_for raise TimeoutError(truncate(message, MAX_MESSAGE_LEN)) yatest.common.process.TimeoutError: 600 second(s) wait timeout has expired: Command '['/home/runner/.ya/tools/v4/8689590287/test_tool', 'run_ut', '@/home/runner/.ya/build/build_root/ciyv/0011ce/ydb/core/kqp/ut/scheme/test-results/unittest/testing_out_stuff/chunk17/testing_out_stuff/test_tool.args']' stopped by 600 seconds timeout During handling of the above exception, another exception occurred: Traceback (most recent call last): File "devtools/ya/test/programs/test_tool/run_test/run_test.py", line 1738, in main res.wait(check_exit_code=False, timeout=run_timeout, on_timeout=timeout_callback) File "library/python/testing/yatest_common/yatest/common/process.py", line 398, in wait raise ExecutionTimeoutError(self, str(e)) yatest.common.process.ExecutionTimeoutError: (("600 second(s) wait timeout has expired: Command '['/home/runner/.ya/tools/v4/8689590287/test_tool', 'run_ut', '@/home/runner/.ya/build/build_root/ciyv/0011ce/ydb/core/kqp/ut/scheme/test-results/unittest/testing_out_stuff/chunk17/testing_out_stuff/test_tool.args']' stopped by 600 seconds timeout",), {}) >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_visibility_change_disables_receive_attempt_id[tables_format_v1-with_change_visibility] >> test_recovery.py::TestRecovery::test_delete >> test_format_setting.py::TestS3::test_timestamp_simple_posix_insert[v2-common/simple_posix/test.tsv-tsv_with_names] [FAIL] >> test_format_setting.py::TestS3::test_timestamp_simple_posix_insert[v2-common/simple_posix/test.json-json_each_row] >> test_insert.py::TestS3::test_error[v1-client0-json_each_row] [FAIL] >> test_insert.py::TestS3::test_error[v1-client0-csv_with_names] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/scheme/unittest >> KqpOlapScheme::InvalidColumnInTieringRule 2025-05-29 15:38:59,866 WARNING devtools.ya.test.programs.test_tool.run_test.run_test: Wrapper execution timed out 2025-05-29 15:39:00,089 WARNING devtools.ya.test.programs.test_tool.run_test.run_test: Wrapper has overrun 600 secs timeout. Process tree before termination: pid rss ref pdirt 4115845 46.0M 44.3M 23.2M test_tool run_ut @/home/runner/.ya/build/build_root/ciyv/001204/ydb/core/kqp/ut/scheme/test-results/unittest/testing_out_stuff/chunk13/testing_out_stuff/test_tool.args 4117252 393M 392M 155M └─ ydb-core-kqp-ut-scheme --trace-path-append /home/runner/.ya/build/build_root/ciyv/001204/ydb/core/kqp/ut/scheme/test-results/unittest/testing_out_stuff/chunk13/ytest.re Test command err: Trying to start YDB, gRPC: 16350, MsgBus: 32654 2025-05-29T15:29:00.673227Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509890079444883020:2066];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:29:00.673255Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/001204/r3tmp/tmpcjU38C/pdisk_1.dat 2025-05-29T15:29:00.750624Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 16350, node 1 2025-05-29T15:29:00.774833Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:29:00.774867Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:29:00.775922Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-29T15:29:00.782957Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:29:00.782974Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-05-29T15:29:00.782976Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-05-29T15:29:00.783023Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:32654 TClient is connected to server localhost:32654 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-29T15:29:00.848686Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:29:00.851976Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 CREATE TABLE `/Root/ColumnTableTest` (id Int32 NOT NULL, id_second Int32 NOT NULL, level Int32, created_at Timestamp NOT NULL, PRIMARY KEY (created_at, id_second)) PARTITION BY HASH(created_at) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 1, TTL = Interval("PT1H") ON created_at); 2025-05-29T15:29:01.055537Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890083739850948:2331], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:29:01.055572Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:29:01.108727Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-05-29T15:29:01.120420Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;self_id=[1:7509890083739851025:2335];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-05-29T15:29:01.120504Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;self_id=[1:7509890083739851025:2335];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-05-29T15:29:01.120556Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;self_id=[1:7509890083739851025:2335];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-05-29T15:29:01.120578Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;self_id=[1:7509890083739851025:2335];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-05-29T15:29:01.120605Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;self_id=[1:7509890083739851025:2335];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-05-29T15:29:01.120633Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;self_id=[1:7509890083739851025:2335];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-05-29T15:29:01.120658Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;self_id=[1:7509890083739851025:2335];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-05-29T15:29:01.120679Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;self_id=[1:7509890083739851025:2335];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-05-29T15:29:01.120700Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;self_id=[1:7509890083739851025:2335];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-05-29T15:29:01.120727Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;self_id=[1:7509890083739851025:2335];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-05-29T15:29:01.120748Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;self_id=[1:7509890083739851025:2335];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2025-05-29T15:29:01.120768Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;self_id=[1:7509890083739851025:2335];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2025-05-29T15:29:01.126675Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-05-29T15:29:01.126696Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-05-29T15:29:01.126711Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-05-29T15:29:01.126717Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-05-29T15:29:01.126754Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-05-29T15:29:01.126760Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-05-29T15:29:01.126773Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-05-29T15:29:01.126779Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-05-29T15:29:01.126791Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-05-29T15:29:01.126796Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-05-29T15:29:01.126803Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-05-29T15:29:01.126809Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-05-29T15:29:01.126832Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-05-29T15:29:01.126840Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-05-29T15:29:01.126860Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-05-29T15:29:01.126866Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-05-29T15:29:01.126879Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-05-29T15:29:01.126885Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;process=TTxU ... 81;event=OnAlteringProblem;error=cannot UPSERT objects:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 ; 2025-05-29T15:38:56.070979Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [1:7509892639245447745:5780], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:38:56.071978Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=1&id=ZjkwOTQ1NmMtMTdkZWZmODAtODczNTc5OGUtZTlmNjVmNzM=, ActorId: [1:7509892634950480420:5865], ActorState: ExecuteState, TraceId: 01jweb4jnke5535fprbfyzxdfb, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: 01jweb4jnf1fb2smd80g9s1yvn 2025-05-29T15:38:56.072698Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=request_actor.h:64;event=unexpected reply;error_message=operation { ready: true status: INTERNAL_ERROR issues { message: "Execution" issue_code: 1060 issues { message: "yql/essentials/ast/yql_expr.h:1874: index out of range" issue_code: 1 } } result { [type.googleapis.com/Ydb.Table.ExecuteQueryResult] { tx_meta { } } } } ;request=session_id: "ydb://session/3?node_id=1&id=ZjkwOTQ1NmMtMTdkZWZmODAtODczNTc5OGUtZTlmNjVmNzM=" tx_control { tx_id: "01jweb4jnf1fb2smd80g9s1yvn" commit_tx: true } query { yql_text: "DECLARE $objects AS List>;\nUPSERT INTO `//Root/.metadata/initialization/migrations`\nSELECT componentId,modificationId,instant FROM AS_TABLE($objects)\n" } parameters { key: "$objects" value { type { list_type { item { struct_type { members { name: "componentId" type { type_id: UTF8 } } members { name: "modificationId" type { type_id: UTF8 } } members { name: "instant" type { type_id: UINT32 } } } } } } value { items { items { text_value: "INITIALIZATION" } items { text_value: "create" } items { uint32_value: 1748533135 } } } } } ; 2025-05-29T15:38:56.072819Z node 1 :METADATA_INITIALIZER ERROR: log.cpp:784: fline=accessor_init.cpp:81;event=OnAlteringProblem;error=cannot UPSERT objects:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 ; 2025-05-29T15:38:57.211544Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [1:7509892643540415125:5603], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:38:57.211738Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=1&id=ZWRmNzllZjQtNGEwM2ZkZjgtNWNkY2Y2NDYtY2IxNWY0MDM=, ActorId: [1:7509892643540415095:5709], ActorState: ExecuteState, TraceId: 01jweb4krn1gxnyr3m1vf65xgd, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: 01jweb4krd3nhybwa1yfkq644s 2025-05-29T15:38:57.212970Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=request_actor.h:64;event=unexpected reply;error_message=operation { ready: true status: INTERNAL_ERROR issues { message: "Execution" issue_code: 1060 issues { message: "yql/essentials/ast/yql_expr.h:1874: index out of range" issue_code: 1 } } result { [type.googleapis.com/Ydb.Table.ExecuteQueryResult] { tx_meta { } } } } ;request=session_id: "ydb://session/3?node_id=1&id=ZWRmNzllZjQtNGEwM2ZkZjgtNWNkY2Y2NDYtY2IxNWY0MDM=" tx_control { tx_id: "01jweb4krd3nhybwa1yfkq644s" commit_tx: true } query { yql_text: "DECLARE $objects AS List>;\nUPSERT INTO `//Root/.metadata/initialization/migrations`\nSELECT componentId,modificationId,instant FROM AS_TABLE($objects)\n" } parameters { key: "$objects" value { type { list_type { item { struct_type { members { name: "componentId" type { type_id: UTF8 } } members { name: "modificationId" type { type_id: UTF8 } } members { name: "instant" type { type_id: UINT32 } } } } } } value { items { items { text_value: "INITIALIZATION" } items { text_value: "create" } items { uint32_value: 1748533137 } } } } } ; 2025-05-29T15:38:57.213033Z node 1 :METADATA_INITIALIZER ERROR: log.cpp:784: fline=accessor_init.cpp:81;event=OnAlteringProblem;error=cannot UPSERT objects:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 ; 2025-05-29T15:38:58.369929Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [1:7509892647835382506:10283], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:38:58.371068Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=1&id=OTlhMzg5NGYtM2I0N2Q3YWUtOTdjNmY2YmItYzU4YjczYzc=, ActorId: [1:7509892647835382475:10263], ActorState: ExecuteState, TraceId: 01jweb4mxd3cjgx9vz90bsvqg6, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: 01jweb4mx1b9k7v9vqtswk7qxn 2025-05-29T15:38:58.371833Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=request_actor.h:64;event=unexpected reply;error_message=operation { ready: true status: INTERNAL_ERROR issues { message: "Execution" issue_code: 1060 issues { message: "yql/essentials/ast/yql_expr.h:1874: index out of range" issue_code: 1 } } result { [type.googleapis.com/Ydb.Table.ExecuteQueryResult] { tx_meta { } } } } ;request=session_id: "ydb://session/3?node_id=1&id=OTlhMzg5NGYtM2I0N2Q3YWUtOTdjNmY2YmItYzU4YjczYzc=" tx_control { tx_id: "01jweb4mx1b9k7v9vqtswk7qxn" commit_tx: true } query { yql_text: "DECLARE $objects AS List>;\nUPSERT INTO `//Root/.metadata/initialization/migrations`\nSELECT componentId,modificationId,instant FROM AS_TABLE($objects)\n" } parameters { key: "$objects" value { type { list_type { item { struct_type { members { name: "componentId" type { type_id: UTF8 } } members { name: "modificationId" type { type_id: UTF8 } } members { name: "instant" type { type_id: UINT32 } } } } } } value { items { items { text_value: "INITIALIZATION" } items { text_value: "create" } items { uint32_value: 1748533138 } } } } } ; 2025-05-29T15:38:58.372044Z node 1 :METADATA_INITIALIZER ERROR: log.cpp:784: fline=accessor_init.cpp:81;event=OnAlteringProblem;error=cannot UPSERT objects:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 ; 2025-05-29T15:38:59.647151Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [1:7509892652130349886:10435], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:38:59.647400Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=1&id=YTQ0MTJhMjYtYjgzMGZlYmItYWE1MGE0ZmUtZmFmOGM1NTQ=, ActorId: [1:7509892652130349856:10406], ActorState: ExecuteState, TraceId: 01jweb4p2238c6jmt30pjthywd, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: 01jweb4p0y7rhwptdk0477ft1h 2025-05-29T15:38:59.652227Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=request_actor.h:64;event=unexpected reply;error_message=operation { ready: true status: INTERNAL_ERROR issues { message: "Execution" issue_code: 1060 issues { message: "yql/essentials/ast/yql_expr.h:1874: index out of range" issue_code: 1 } } result { [type.googleapis.com/Ydb.Table.ExecuteQueryResult] { tx_meta { } } } } ;request=session_id: "ydb://session/3?node_id=1&id=YTQ0MTJhMjYtYjgzMGZlYmItYWE1MGE0ZmUtZmFmOGM1NTQ=" tx_control { tx_id: "01jweb4p0y7rhwptdk0477ft1h" commit_tx: true } query { yql_text: "DECLARE $objects AS List>;\nUPSERT INTO `//Root/.metadata/initialization/migrations`\nSELECT componentId,modificationId,instant FROM AS_TABLE($objects)\n" } parameters { key: "$objects" value { type { list_type { item { struct_type { members { name: "componentId" type { type_id: UTF8 } } members { name: "modificationId" type { type_id: UTF8 } } members { name: "instant" type { type_id: UINT32 } } } } } } value { items { items { text_value: "INITIALIZATION" } items { text_value: "create" } items { uint32_value: 1748533139 } } } } } ; 2025-05-29T15:38:59.652334Z node 1 :METADATA_INITIALIZER ERROR: log.cpp:784: fline=accessor_init.cpp:81;event=OnAlteringProblem;error=cannot UPSERT objects:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 ; Traceback (most recent call last): File "library/python/testing/yatest_common/yatest/common/process.py", line 384, in wait wait_for( File "library/python/testing/yatest_common/yatest/common/process.py", line 764, in wait_for raise TimeoutError(truncate(message, MAX_MESSAGE_LEN)) yatest.common.process.TimeoutError: 600 second(s) wait timeout has expired: Command '['/home/runner/.ya/tools/v4/8689590287/test_tool', 'run_ut', '@/home/runner/.ya/build/build_root/ciyv/001204/ydb/core/kqp/ut/scheme/test-results/unittest/testing_out_stuff/chunk13/testing_out_stuff/test_tool.args']' stopped by 600 seconds timeout During handling of the above exception, another exception occurred: Traceback (most recent call last): File "devtools/ya/test/programs/test_tool/run_test/run_test.py", line 1738, in main res.wait(check_exit_code=False, timeout=run_timeout, on_timeout=timeout_callback) File "library/python/testing/yatest_common/yatest/common/process.py", line 398, in wait raise ExecutionTimeoutError(self, str(e)) yatest.common.process.ExecutionTimeoutError: (("600 second(s) wait timeout has expired: Command '['/home/runner/.ya/tools/v4/8689590287/test_tool', 'run_ut', '@/home/runner/.ya/build/build_root/ciyv/001204/ydb/core/kqp/ut/scheme/test-results/unittest/testing_out_stuff/chunk13/testing_out_stuff/test_tool.args']' stopped by 600 seconds timeout",), {}) >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_visibility_change_disables_receive_attempt_id[tables_format_v1-with_change_visibility] [FAIL] >> test_yq_streaming.py::TestYqStreaming::test_early_finish_case1[v1] [FAIL] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_visibility_change_disables_receive_attempt_id[tables_format_v1-with_delete_message] >> test_s3_1.py::TestS3::test_top_level_listing[v1-false-client0] [FAIL] >> test_s3_1.py::TestS3::test_top_level_listing[v1-true-client0] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_visibility_change_disables_receive_attempt_id[tables_format_v1-with_delete_message] [FAIL] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_visibility_timeout_works[tables_format_v0] >> test_recovery_match_recognize.py::TestRecoveryMatchRecognize::test_time_order_recoverer[v1-kikimr0] [FAIL] >> test_explicit_partitioning_1.py::TestS3::test_projection_date_type_validation[v2-false-client4-year Int64-False] [FAIL] >> test_explicit_partitioning_1.py::TestS3::test_projection_date_type_validation[v2-false-client5-year Int64 NOT NULL-False] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/fq/yds/py3test >> test_select_timings.py::TestSelectTimings::test_select_timings[v1-streaming-aborted] [FAIL] Test command err: sys:1: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback |81.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/datashard/vector_index/py3test >> test_vector_index.py::TestVectorIndex::test_vector_index[table_index_1_float-pk_types3-all_types3-index3-Float] [FAIL] >> test_vector_index.py::TestVectorIndex::test_vector_index_prefix[table_index_2_float-pk_types2-all_types2-index2-Float] [FAIL] >> test_recovery_mz.py::TestRecovery::test_recovery[v1] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_visibility_timeout_works[tables_format_v1] >> test_explicit_partitioning_0.py::TestS3::test_projection_integer_type_validation[v1-true-client3-year Date NOT NULL-False] [FAIL] >> test_explicit_partitioning_0.py::TestS3::test_projection_integer_type_validation[v1-true-client4-year String NOT NULL-True] >> test_insert.py::TestS3::test_error[v1-client0-csv_with_names] [FAIL] >> test_insert.py::TestS3::test_error[v1-client0-tsv_with_names] >> test_vector_index.py::TestVectorIndex::test_vector_index[table_index_3-pk_types13-all_types13-index13-Int8] [FAIL] |81.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test |81.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/datashard/vector_index/py3test >> test_vector_index.py::TestVectorIndex::test_vector_index[table_index_2-pk_types8-all_types8-index8-Uint8] [FAIL] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_send_and_read_message[tables_format_v1-fifo] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/fq/yds/py3test >> test_row_dispatcher.py::TestPqRowDispatcher::test_read_raw_format_with_row_dispatcher [FAIL] Test command err: sys:1: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/fq/yds/py3test >> test_row_dispatcher.py::TestPqRowDispatcher::test_stop_start_with_filter [FAIL] Test command err: sys:1: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback >> test_explicit_partitioning_1.py::TestS3::test_projection_date_type_validation[v2-false-client5-year Int64 NOT NULL-False] [FAIL] >> test_explicit_partitioning_1.py::TestS3::test_projection_date_type_validation[v2-false-client6-year Uint64-False] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_visibility_timeout_works[tables_format_v0] [FAIL] >> test_format_setting.py::TestS3::test_timestamp_simple_posix_insert[v2-common/simple_posix/test.json-json_each_row] [FAIL] >> test_format_setting.py::TestS3::test_timestamp_simple_posix_insert[v2-common/simple_posix/test.parquet-parquet] >> test_vector_index.py::TestVectorIndex::test_vector_index[table_index_0-pk_types10-all_types10-index10-Uint8] [FAIL] |81.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test >> test_recovery.py::TestRecovery::test_delete [FAIL] >> test_recovery.py::TestRecovery::test_ic_disconnection [SKIPPED] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_delete_message_works[tables_format_v0] [FAIL] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_delete_message_works[tables_format_v1] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/scheme/unittest >> KqpOlapScheme::AddColumnWithTtl 2025-05-29 15:39:00,869 WARNING devtools.ya.test.programs.test_tool.run_test.run_test: Wrapper execution timed out 2025-05-29 15:39:01,068 WARNING devtools.ya.test.programs.test_tool.run_test.run_test: Wrapper has overrun 600 secs timeout. Process tree before termination: pid rss ref pdirt 4117789 46.0M 45.1M 23.2M test_tool run_ut @/home/runner/.ya/build/build_root/ciyv/0011f2/ydb/core/kqp/ut/scheme/test-results/unittest/testing_out_stuff/chunk8/testing_out_stuff/test_tool.args 4119406 395M 379M 157M └─ ydb-core-kqp-ut-scheme --trace-path-append /home/runner/.ya/build/build_root/ciyv/0011f2/ydb/core/kqp/ut/scheme/test-results/unittest/testing_out_stuff/chunk8/ytest.rep Test command err: Trying to start YDB, gRPC: 62570, MsgBus: 10734 2025-05-29T15:29:01.691155Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509890081406514107:2202];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:29:01.727891Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/0011f2/r3tmp/tmpc2RC3h/pdisk_1.dat 2025-05-29T15:29:01.761801Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1228: Notification cookie mismatch for subscription [1:7509890081406513942:2079] 1748532541687652 != 1748532541687655 2025-05-29T15:29:01.763866Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 62570, node 1 2025-05-29T15:29:01.794983Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:29:01.794999Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-05-29T15:29:01.795001Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-05-29T15:29:01.795043Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:10734 2025-05-29T15:29:01.833468Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:29:01.833498Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:29:01.834653Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:10734 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-29T15:29:01.859760Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:29:01.866380Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 CREATE TABLE `/Root/ColumnTableTest` (id Int32 NOT NULL, id_second Int32 NOT NULL, level Int32, created_at Timestamp NOT NULL, PRIMARY KEY (id, id_second)) PARTITION BY HASH(id) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 1); 2025-05-29T15:29:02.140871Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890085701481902:2331], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:29:02.140923Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:29:02.179632Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-05-29T15:29:02.190555Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;self_id=[1:7509890085701481978:2335];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-05-29T15:29:02.190622Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;self_id=[1:7509890085701481978:2335];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-05-29T15:29:02.190656Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;self_id=[1:7509890085701481978:2335];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-05-29T15:29:02.190676Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;self_id=[1:7509890085701481978:2335];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-05-29T15:29:02.190693Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;self_id=[1:7509890085701481978:2335];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-05-29T15:29:02.190716Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;self_id=[1:7509890085701481978:2335];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-05-29T15:29:02.190732Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;self_id=[1:7509890085701481978:2335];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-05-29T15:29:02.190786Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;self_id=[1:7509890085701481978:2335];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-05-29T15:29:02.190806Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;self_id=[1:7509890085701481978:2335];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-05-29T15:29:02.190824Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;self_id=[1:7509890085701481978:2335];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-05-29T15:29:02.190845Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;self_id=[1:7509890085701481978:2335];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2025-05-29T15:29:02.190861Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;self_id=[1:7509890085701481978:2335];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2025-05-29T15:29:02.191334Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-05-29T15:29:02.191347Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-05-29T15:29:02.191358Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-05-29T15:29:02.191362Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-05-29T15:29:02.191374Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-05-29T15:29:02.191380Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-05-29T15:29:02.191392Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-05-29T15:29:02.191396Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-05-29T15:29:02.191409Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-05-29T15:29:02.191412Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-05-29T15:29:02.191420Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-05-29T15:29:02.191429Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-05-29T15:29:02.191450Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-05-29T15:29:02.191462Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-05-29T15:29:02.191483Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-05-29T15:29:02.191490Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-05-29T15:29:02.191505Z node 1 :TX_COLUMNSHARD WARN: log.cpp:784: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;descrip ... t.cpp:81;event=OnAlteringProblem;error=cannot UPSERT objects:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 ; 2025-05-29T15:38:56.538697Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [1:7509892636912105532:6131], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:38:56.539675Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=1&id=ZTE0MGM3OTQtZmUwNjRhZjctYzc3NmZjNGUtYmZiOGE1ZTc=, ActorId: [1:7509892636912105473:6026], ActorState: ExecuteState, TraceId: 01jweb4k43djqw9y2tbxf5fn5g, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: 01jweb4k3hcrqrnywqcae53hjq 2025-05-29T15:38:56.541093Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=request_actor.h:64;event=unexpected reply;error_message=operation { ready: true status: INTERNAL_ERROR issues { message: "Execution" issue_code: 1060 issues { message: "yql/essentials/ast/yql_expr.h:1874: index out of range" issue_code: 1 } } result { [type.googleapis.com/Ydb.Table.ExecuteQueryResult] { tx_meta { } } } } ;request=session_id: "ydb://session/3?node_id=1&id=ZTE0MGM3OTQtZmUwNjRhZjctYzc3NmZjNGUtYmZiOGE1ZTc=" tx_control { tx_id: "01jweb4k3hcrqrnywqcae53hjq" commit_tx: true } query { yql_text: "DECLARE $objects AS List>;\nUPSERT INTO `//Root/.metadata/initialization/migrations`\nSELECT componentId,modificationId,instant FROM AS_TABLE($objects)\n" } parameters { key: "$objects" value { type { list_type { item { struct_type { members { name: "componentId" type { type_id: UTF8 } } members { name: "modificationId" type { type_id: UTF8 } } members { name: "instant" type { type_id: UINT32 } } } } } } value { items { items { text_value: "INITIALIZATION" } items { text_value: "create" } items { uint32_value: 1748533136 } } } } } ; 2025-05-29T15:38:56.541240Z node 1 :METADATA_INITIALIZER ERROR: log.cpp:784: fline=accessor_init.cpp:81;event=OnAlteringProblem;error=cannot UPSERT objects:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 ; 2025-05-29T15:38:57.654066Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [1:7509892641207072912:6337], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:38:57.654752Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=1&id=MjhhMzYzMTYtNzRhYTlhZDktM2E4Y2YxNmMtY2FlMDZjYQ==, ActorId: [1:7509892641207072882:6286], ActorState: ExecuteState, TraceId: 01jweb4m77bt1e2161sjsfew87, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: 01jweb4m72d6z8j9n57e8h6qj0 2025-05-29T15:38:57.656590Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=request_actor.h:64;event=unexpected reply;error_message=operation { ready: true status: INTERNAL_ERROR issues { message: "Execution" issue_code: 1060 issues { message: "yql/essentials/ast/yql_expr.h:1874: index out of range" issue_code: 1 } } result { [type.googleapis.com/Ydb.Table.ExecuteQueryResult] { tx_meta { } } } } ;request=session_id: "ydb://session/3?node_id=1&id=MjhhMzYzMTYtNzRhYTlhZDktM2E4Y2YxNmMtY2FlMDZjYQ==" tx_control { tx_id: "01jweb4m72d6z8j9n57e8h6qj0" commit_tx: true } query { yql_text: "DECLARE $objects AS List>;\nUPSERT INTO `//Root/.metadata/initialization/migrations`\nSELECT componentId,modificationId,instant FROM AS_TABLE($objects)\n" } parameters { key: "$objects" value { type { list_type { item { struct_type { members { name: "componentId" type { type_id: UTF8 } } members { name: "modificationId" type { type_id: UTF8 } } members { name: "instant" type { type_id: UINT32 } } } } } } value { items { items { text_value: "INITIALIZATION" } items { text_value: "create" } items { uint32_value: 1748533137 } } } } } ; 2025-05-29T15:38:57.656740Z node 1 :METADATA_INITIALIZER ERROR: log.cpp:784: fline=accessor_init.cpp:81;event=OnAlteringProblem;error=cannot UPSERT objects:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 ; 2025-05-29T15:38:58.782998Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [1:7509892645502040292:6482], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:38:58.784031Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=1&id=ODhkZDlhNDMtNDg2ZThkNTgtYWE1NmY1YzMtNzMwY2VkMjM=, ActorId: [1:7509892645502040262:6425], ActorState: ExecuteState, TraceId: 01jweb4nae4cm6bkxqw9evq7m5, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: 01jweb4na09xaefvnn3w89e0a7 2025-05-29T15:38:58.785301Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=request_actor.h:64;event=unexpected reply;error_message=operation { ready: true status: INTERNAL_ERROR issues { message: "Execution" issue_code: 1060 issues { message: "yql/essentials/ast/yql_expr.h:1874: index out of range" issue_code: 1 } } result { [type.googleapis.com/Ydb.Table.ExecuteQueryResult] { tx_meta { } } } } ;request=session_id: "ydb://session/3?node_id=1&id=ODhkZDlhNDMtNDg2ZThkNTgtYWE1NmY1YzMtNzMwY2VkMjM=" tx_control { tx_id: "01jweb4na09xaefvnn3w89e0a7" commit_tx: true } query { yql_text: "DECLARE $objects AS List>;\nUPSERT INTO `//Root/.metadata/initialization/migrations`\nSELECT componentId,modificationId,instant FROM AS_TABLE($objects)\n" } parameters { key: "$objects" value { type { list_type { item { struct_type { members { name: "componentId" type { type_id: UTF8 } } members { name: "modificationId" type { type_id: UTF8 } } members { name: "instant" type { type_id: UINT32 } } } } } } value { items { items { text_value: "INITIALIZATION" } items { text_value: "create" } items { uint32_value: 1748533138 } } } } } ; 2025-05-29T15:38:58.785671Z node 1 :METADATA_INITIALIZER ERROR: log.cpp:784: fline=accessor_init.cpp:81;event=OnAlteringProblem;error=cannot UPSERT objects:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 ; 2025-05-29T15:38:59.891101Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [1:7509892649797007672:6569], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:38:59.892148Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=1&id=Njg0MDEwZTktZmQ5NWQ4MDEtMWU3NDQ1MDYtZDExZDU2ZDE=, ActorId: [1:7509892649797007642:6511], ActorState: ExecuteState, TraceId: 01jweb4pcv80wcpzre61p4vp2q, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: 01jweb4pcr8xw983d8dxfdd13t 2025-05-29T15:38:59.893400Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=request_actor.h:64;event=unexpected reply;error_message=operation { ready: true status: INTERNAL_ERROR issues { message: "Execution" issue_code: 1060 issues { message: "yql/essentials/ast/yql_expr.h:1874: index out of range" issue_code: 1 } } result { [type.googleapis.com/Ydb.Table.ExecuteQueryResult] { tx_meta { } } } } ;request=session_id: "ydb://session/3?node_id=1&id=Njg0MDEwZTktZmQ5NWQ4MDEtMWU3NDQ1MDYtZDExZDU2ZDE=" tx_control { tx_id: "01jweb4pcr8xw983d8dxfdd13t" commit_tx: true } query { yql_text: "DECLARE $objects AS List>;\nUPSERT INTO `//Root/.metadata/initialization/migrations`\nSELECT componentId,modificationId,instant FROM AS_TABLE($objects)\n" } parameters { key: "$objects" value { type { list_type { item { struct_type { members { name: "componentId" type { type_id: UTF8 } } members { name: "modificationId" type { type_id: UTF8 } } members { name: "instant" type { type_id: UINT32 } } } } } } value { items { items { text_value: "INITIALIZATION" } items { text_value: "create" } items { uint32_value: 1748533139 } } } } } ; 2025-05-29T15:38:59.893466Z node 1 :METADATA_INITIALIZER ERROR: log.cpp:784: fline=accessor_init.cpp:81;event=OnAlteringProblem;error=cannot UPSERT objects:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 ; Traceback (most recent call last): File "library/python/testing/yatest_common/yatest/common/process.py", line 384, in wait wait_for( File "library/python/testing/yatest_common/yatest/common/process.py", line 764, in wait_for raise TimeoutError(truncate(message, MAX_MESSAGE_LEN)) yatest.common.process.TimeoutError: 600 second(s) wait timeout has expired: Command '['/home/runner/.ya/tools/v4/8689590287/test_tool', 'run_ut', '@/home/runner/.ya/build/build_root/ciyv/0011f2/ydb/core/kqp/ut/scheme/test-results/unittest/testing_out_stuff/chunk8/testing_out_stuff/test_tool.args']' stopped by 600 seconds timeout During handling of the above exception, another exception occurred: Traceback (most recent call last): File "devtools/ya/test/programs/test_tool/run_test/run_test.py", line 1738, in main res.wait(check_exit_code=False, timeout=run_timeout, on_timeout=timeout_callback) File "library/python/testing/yatest_common/yatest/common/process.py", line 398, in wait raise ExecutionTimeoutError(self, str(e)) yatest.common.process.ExecutionTimeoutError: (("600 second(s) wait timeout has expired: Command '['/home/runner/.ya/tools/v4/8689590287/test_tool', 'run_ut', '@/home/runner/.ya/build/build_root/ciyv/0011f2/ydb/core/kqp/ut/scheme/test-results/unittest/testing_out_stuff/chunk8/testing_out_stuff/test_tool.args']' stopped by 600 seconds timeout",), {}) >> test_s3_1.py::TestS3::test_top_level_listing[v1-true-client0] [FAIL] >> test_s3_1.py::TestS3::test_top_level_listing[v2-false-client0] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_delete_message_works[tables_format_v1] [FAIL] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_fifo_read_delete_single_message >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_delay_one_message[tables_format_v0-fifo] >> test_recovery_mz.py::TestRecovery::test_recovery[v1] [FAIL] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/messaging/py3test >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_visibility_timeout_works[tables_format_v0] [FAIL] Test command err: sys:1: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/ciyv/001fe1/ydb/tests/functional/sqs/messaging/test-results/py3test/testing_out_stuff/chunk12/testing_out_stuff/test_fifo_messaging.py.TestSqsFifoMessagingWithTenant.test_visibility_change_disables_receive_attempt_id.tables_format_v0-with_change_visibility/cluster/node_1/stdout'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/ciyv/001fe1/ydb/tests/functional/sqs/messaging/test-results/py3test/testing_out_stuff/chunk12/testing_out_stuff/test_fifo_messaging.py.TestSqsFifoMessagingWithTenant.test_visibility_change_disables_receive_attempt_id.tables_format_v0-with_change_visibility/cluster/node_1/stderr'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedRandom name='/home/runner/.ya/build/build_root/ciyv/001fe1/ydb/tests/functional/sqs/messaging/test-results/py3test/testing_out_stuff/chunk12/testing_out_stuff/test_fifo_messaging.py.TestSqsFifoMessagingWithTenant.test_visibility_change_disables_receive_attempt_id.tables_format_v0-with_change_visibility/cluster/node_1/logfile_6xu0ltp9.log'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/ciyv/001fe1/ydb/tests/functional/sqs/messaging/test-results/py3test/testing_out_stuff/chunk12/testing_out_stuff/test_fifo_messaging.py.TestSqsFifoMessagingWithTenant.test_visibility_change_disables_receive_attempt_id.tables_format_v0-with_change_visibility/cluster/slot_1/stdout'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/ciyv/001fe1/ydb/tests/functional/sqs/messaging/test-results/py3test/testing_out_stuff/chunk12/testing_out_stuff/test_fifo_messaging.py.TestSqsFifoMessagingWithTenant.test_visibility_change_disables_receive_attempt_id.tables_format_v0-with_change_visibility/cluster/slot_1/stderr'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedRandom name='/home/runner/.ya/build/build_root/ciyv/001fe1/ydb/tests/functional/sqs/messaging/test-results/py3test/testing_out_stuff/chunk12/testing_out_stuff/test_fifo_messaging.py.TestSqsFifoMessagingWithTenant.test_visibility_change_disables_receive_attempt_id.tables_format_v0-with_change_visibility/cluster/slot_1/logfile_y5_rz8p3.log'> ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/subprocess.py:1129: ResourceWarning: subprocess 611570 is still running ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/subprocess.py:1129: ResourceWarning: subprocess 614295 is still running ResourceWarning: Enable tracemalloc to get the object allocation traceback >> test_explicit_partitioning_1.py::TestS3::test_projection_date_type_validation[v2-false-client6-year Uint64-False] [FAIL] >> test_explicit_partitioning_1.py::TestS3::test_projection_date_type_validation[v2-false-client7-year Uint64 NOT NULL-False] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_fifo_read_delete_single_message [FAIL] |81.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/datashard/vector_index/py3test >> test_vector_index.py::TestVectorIndex::test_vector_index[table_index_4_float-pk_types0-all_types0-index0-Float] [FAIL] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_only_single_read_infly_from_fifo >> test_mem_alloc.py::TestMemAlloc::test_hop_alloc[v1] >> test_stop.py::TestStop::test_stop_query[v1-streaming] |81.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/datashard/vector_index/py3test >> test_vector_index.py::TestVectorIndex::test_vector_index[table_index_1-pk_types15-all_types15-index15-Int8] [FAIL] >> test_insert.py::TestS3::test_error[v1-client0-tsv_with_names] [FAIL] >> test_insert.py::TestS3::test_error[v1-client0-parquet] >> test_format_setting.py::TestS3::test_timestamp_simple_posix_insert[v2-common/simple_posix/test.parquet-parquet] [FAIL] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_only_single_read_infly_from_fifo [FAIL] >> test_format_setting.py::TestS3::test_date_time_simple_iso[v1-date_time/simple_iso/test.csv-csv_with_names] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_queue_attributes[tables_format_v0] >> test_explicit_partitioning_1.py::TestS3::test_projection_date_type_validation[v2-false-client7-year Uint64 NOT NULL-False] [FAIL] >> test_explicit_partitioning_1.py::TestS3::test_projection_date_type_validation[v2-false-client8-year String NOT NULL-True] |81.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test >> test_s3_1.py::TestS3::test_top_level_listing[v2-false-client0] [FAIL] >> test_s3_1.py::TestS3::test_top_level_listing[v2-true-client0] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/fq/yds/py3test >> test_yq_streaming.py::TestYqStreaming::test_early_finish_case1[v1] [FAIL] Test command err: sys:1: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback >> test_explicit_partitioning_0.py::TestS3::test_projection_integer_type_validation[v1-true-client4-year String NOT NULL-True] [FAIL] >> test_explicit_partitioning_0.py::TestS3::test_projection_integer_type_validation[v1-true-client5-year String-False] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_queue_attributes[tables_format_v0] [FAIL] >> test_insert.py::TestS3::test_error[v1-client0-parquet] [FAIL] >> test_insert.py::TestS3::test_insert_empty_object[v1] >> test_fifo_messaging.py::TestSqsFifoMicroBatchesWithTenant::test_micro_batch_read[tables_format_v0] [FAIL] >> test_fifo_messaging.py::TestSqsFifoMicroBatchesWithTenant::test_micro_batch_read[tables_format_v1] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_queue_attributes[tables_format_v1-fifo] >> test_vector_index.py::TestVectorIndex::test_vector_index_prefix[table_index_4-pk_types12-all_types12-index12-Int8] [FAIL] >> test_explicit_partitioning_1.py::TestS3::test_projection_date_type_validation[v2-false-client8-year String NOT NULL-True] [FAIL] >> test_explicit_partitioning_1.py::TestS3::test_projection_date_type_validation[v2-false-client9-year String-False] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/ydb/backup_ut/unittest >> BackupRestore::RestoreReplicationWithoutSecret 2025-05-29 15:38:50,004 WARNING devtools.ya.test.programs.test_tool.run_test.run_test: Wrapper execution timed out 2025-05-29 15:38:50,165 WARNING devtools.ya.test.programs.test_tool.run_test.run_test: Wrapper has overrun 600 secs timeout. Process tree before termination: pid rss ref pdirt 4087162 46.2M 45.8M 23.4M test_tool run_ut @/home/runner/.ya/build/build_root/ciyv/001cfb/ydb/services/ydb/backup_ut/test-results/unittest/testing_out_stuff/chunk1/testing_out_stuff/test_tool.args 4089350 549M 516M 269M └─ ydb-services-ydb-backup_ut --trace-path-append /home/runner/.ya/build/build_root/ciyv/001cfb/ydb/services/ydb/backup_ut/test-results/unittest/testing_out_stuff/chunk1/y Test command err: 2025-05-29T15:28:51.108772Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509890037053532557:2074];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:28:51.108853Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/001cfb/r3tmp/tmpMZ4hLC/pdisk_1.dat 2025-05-29T15:28:51.190860Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 29907, node 1 2025-05-29T15:28:51.209686Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:28:51.209725Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:28:51.211621Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-29T15:28:51.213870Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:28:51.213885Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-05-29T15:28:51.213887Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-05-29T15:28:51.213933Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:7897 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-29T15:28:51.256091Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:28:51.532927Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890037053533481:2334], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:28:51.532962Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:28:51.533136Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890037053533508:2337], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:28:51.534665Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:315: actor# [1:7509890037053532788:2140] Handle TEvProposeTransaction 2025-05-29T15:28:51.534678Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:238: actor# [1:7509890037053532788:2140] TxId# 281474976715658 ProcessProposeTransaction 2025-05-29T15:28:51.534696Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:257: actor# [1:7509890037053532788:2140] Cookie# 0 userReqId# "" txid# 281474976715658 SEND to# [1:7509890037053533511:2597] 2025-05-29T15:28:51.550035Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1562: Actor# [1:7509890037053533511:2597] txid# 281474976715658 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/Root/.metadata/workload_manager/pools" OperationType: ESchemeOpCreateResourcePool ModifyACL { Name: "default" DiffACL: "\n!\010\000\022\035\010\001\020\201\004\032\024all-users@well-known \003\n\031\010\000\022\025\010\001\020\201\004\032\014root@builtin \003" NewOwner: "metadata@system" } Internal: true CreateResourcePool { Name: "default" Properties { Properties { key: "concurrent_query_limit" value: "-1" } Properties { key: "database_load_cpu_threshold" value: "-1" } Properties { key: "query_cancel_after_seconds" value: "0" } Properties { key: "query_cpu_limit_percent_per_node" value: "-1" } Properties { key: "query_memory_limit_percent_per_node" value: "-1" } Properties { key: "queue_size" value: "-1" } Properties { key: "resource_weight" value: "-1" } Properties { key: "total_cpu_limit_percent_per_node" value: "-1" } } } } } UserToken: "\n\017metadata@system\022\000" DatabaseName: "/Root" 2025-05-29T15:28:51.550100Z node 1 :TX_PROXY DEBUG: schemereq.cpp:569: Actor# [1:7509890037053533511:2597] txid# 281474976715658 Bootstrap, UserSID: metadata@system CheckAdministrator: 0 CheckDatabaseAdministrator: 0 2025-05-29T15:28:51.550104Z node 1 :TX_PROXY DEBUG: schemereq.cpp:578: Actor# [1:7509890037053533511:2597] txid# 281474976715658 Bootstrap, UserSID: metadata@system IsClusterAdministrator: 1 2025-05-29T15:28:51.550756Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1627: Actor# [1:7509890037053533511:2597] txid# 281474976715658 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P6 2025-05-29T15:28:51.550777Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1617: Actor# [1:7509890037053533511:2597] txid# 281474976715658 TEvNavigateKeySet requested from SchemeCache 2025-05-29T15:28:51.550810Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1450: Actor# [1:7509890037053533511:2597] txid# 281474976715658 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-05-29T15:28:51.550867Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1497: Actor# [1:7509890037053533511:2597] HANDLE EvNavigateKeySetResult, txid# 281474976715658 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2025-05-29T15:28:51.550881Z node 1 :TX_PROXY DEBUG: schemereq.cpp:103: Actor# [1:7509890037053533511:2597] txid# 281474976715658 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976715658 TabletId# 72057594046644480} 2025-05-29T15:28:51.550931Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1352: Actor# [1:7509890037053533511:2597] txid# 281474976715658 HANDLE EvClientConnected 2025-05-29T15:28:51.550968Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4877: StateWork, received event# 269877761, Sender [1:7509890037053533536:2603], Recipient [1:7509890037053533004:2279]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-05-29T15:28:51.550973Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4974: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-05-29T15:28:51.550975Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5753: Pipe server connected, at tablet: 72057594046644480 2025-05-29T15:28:51.550981Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4877: StateWork, received event# 271122432, Sender [1:7509890037053533511:2597], Recipient [1:7509890037053533004:2279]: {TEvModifySchemeTransaction txid# 281474976715658 TabletId# 72057594046644480} 2025-05-29T15:28:51.550983Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:4888: StateWork, processing event TEvSchemeShard::TEvModifySchemeTransaction 2025-05-29T15:28:51.551729Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:372: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/Root" OperationType: ESchemeOpCreateResourcePool ModifyACL { Name: "default" DiffACL: "\n!\010\000\022\035\010\001\020\201\004\032\024all-users@well-known \003\n\031\010\000\022\025\010\001\020\201\004\032\014root@builtin \003" NewOwner: "metadata@system" } Internal: true CreateResourcePool { Name: ".metadata/workload_manager/pools/default" Properties { Properties { key: "concurrent_query_limit" value: "-1" } Properties { key: "database_load_cpu_threshold" value: "-1" } Properties { key: "query_cancel_after_seconds" value: "0" } Properties { key: "query_cpu_limit_percent_per_node" value: "-1" } Properties { key: "query_memory_limit_percent_per_node" value: "-1" } Properties { key: "queue_size" value: "-1" } Properties { key: "resource_weight" value: "-1" } Properties { key: "total_cpu_limit_percent_per_node" value: "-1" } } } } TxId: 281474976715658 TabletId: 72057594046644480 Owner: "metadata@system" UserToken: "***" PeerName: "" , at schemeshard: 72057594046644480 2025-05-29T15:28:51.551842Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_mkdir.cpp:115: TMkDir Propose, path: /Root/.metadata, operationId: 281474976715658:0, at schemeshard: 72057594046644480 2025-05-29T15:28:51.551874Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:319: AttachChild: child attached as only one child to the parent, parent id: [OwnerId: 72057594046644480, LocalPathId: 1], parent name: Root, child name: .metadata, child id: [OwnerId: 72057594046644480, LocalPathId: 2], at schemeshard: 72057594046644480 2025-05-29T15:28:51.551888Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046644480, LocalPathId: 2] was 0 2025-05-29T15:28:51.551904Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:130: IgniteOperation, opId: 281474976715658:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-05-29T15:28:51.551910Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_mkdir.cpp:115: TMkDir Propose, path: /Root/.metadata/workload_manager, operationId: 281474976715658:1, at schemeshard: 72057594046644480 2025-05-29T15:28:51.551919Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:319: AttachChild: child attached as only one child to the parent, parent id: [OwnerId: 72057594046644480, LocalPathId: 2], parent name: .metadata, child name: workload_manager, child id: [OwnerId: 72057594046644480, LocalPathId: 3], at schemeshard: 72057594046644480 2025-05-29T15:28:51.551924Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046644480, LocalPathId: 3] was 0 2025-05-29T15:28:51.551933Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:130: IgniteOperation, opId: 281474976715658:2, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-05-29T15:28:51.551936Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_mkdir.cpp:115: TMkDir Propose, path: /Root/.metadata/workload_manager/pools, operationId: 281474976715658:2, at schemeshard: 72057594046644480 2025-05-29T15:28:51.551944Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:319: AttachChild: child attached as only one child to the parent, parent id: [OwnerId: 72057594046644480, LocalPathId: 3], parent name: worklo ... annot UPSERT objects:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 ; 2025-05-29T15:38:46.034514Z node 40 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [40:7509892596248188205:4188], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:38:46.034657Z node 40 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=40&id=OTA2ZmY5MzYtODA3YTU0MjAtYWI4MzAyMjEtM2RiYjc2YTQ=, ActorId: [40:7509892591953220877:4152], ActorState: ExecuteState, TraceId: 01jweb48w44agp9kah1aamrnj3, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: 01jweb48w01wbbgr07txqdbjws 2025-05-29T15:38:46.035768Z node 40 :METADATA_PROVIDER ERROR: log.cpp:784: fline=request_actor.h:64;event=unexpected reply;error_message=operation { ready: true status: INTERNAL_ERROR issues { message: "Execution" issue_code: 1060 issues { message: "yql/essentials/ast/yql_expr.h:1874: index out of range" issue_code: 1 } } result { [type.googleapis.com/Ydb.Table.ExecuteQueryResult] { tx_meta { } } } } ;request=session_id: "ydb://session/3?node_id=40&id=OTA2ZmY5MzYtODA3YTU0MjAtYWI4MzAyMjEtM2RiYjc2YTQ=" tx_control { tx_id: "01jweb48w01wbbgr07txqdbjws" commit_tx: true } query { yql_text: "DECLARE $objects AS List>;\nUPSERT INTO `//Root/.metadata/initialization/migrations`\nSELECT componentId,modificationId,instant FROM AS_TABLE($objects)\n" } parameters { key: "$objects" value { type { list_type { item { struct_type { members { name: "componentId" type { type_id: UTF8 } } members { name: "modificationId" type { type_id: UTF8 } } members { name: "instant" type { type_id: UINT32 } } } } } } value { items { items { text_value: "INITIALIZATION" } items { text_value: "create" } items { uint32_value: 1748533125 } } } } } ; 2025-05-29T15:38:46.035893Z node 40 :METADATA_INITIALIZER ERROR: log.cpp:784: fline=accessor_init.cpp:81;event=OnAlteringProblem;error=cannot UPSERT objects:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 ; 2025-05-29T15:38:47.205284Z node 40 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [40:7509892600543155615:4252], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:38:47.206315Z node 40 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=40&id=ZmJiYzBkNmItYzhkOTdlNmUtZTUzNGQ5NS1lMzUwMThlNg==, ActorId: [40:7509892600543155581:4219], ActorState: ExecuteState, TraceId: 01jweb4a0m5jg1yqqd8c4qmmfc, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: 01jweb4a0e8p7bn0ntm0ej6er1 2025-05-29T15:38:47.207180Z node 40 :METADATA_PROVIDER ERROR: log.cpp:784: fline=request_actor.h:64;event=unexpected reply;error_message=operation { ready: true status: INTERNAL_ERROR issues { message: "Execution" issue_code: 1060 issues { message: "yql/essentials/ast/yql_expr.h:1874: index out of range" issue_code: 1 } } result { [type.googleapis.com/Ydb.Table.ExecuteQueryResult] { tx_meta { } } } } ;request=session_id: "ydb://session/3?node_id=40&id=ZmJiYzBkNmItYzhkOTdlNmUtZTUzNGQ5NS1lMzUwMThlNg==" tx_control { tx_id: "01jweb4a0e8p7bn0ntm0ej6er1" commit_tx: true } query { yql_text: "DECLARE $objects AS List>;\nUPSERT INTO `//Root/.metadata/initialization/migrations`\nSELECT componentId,modificationId,instant FROM AS_TABLE($objects)\n" } parameters { key: "$objects" value { type { list_type { item { struct_type { members { name: "componentId" type { type_id: UTF8 } } members { name: "modificationId" type { type_id: UTF8 } } members { name: "instant" type { type_id: UINT32 } } } } } } value { items { items { text_value: "INITIALIZATION" } items { text_value: "create" } items { uint32_value: 1748533127 } } } } } ; 2025-05-29T15:38:47.207311Z node 40 :METADATA_INITIALIZER ERROR: log.cpp:784: fline=accessor_init.cpp:81;event=OnAlteringProblem;error=cannot UPSERT objects:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 ; 2025-05-29T15:38:48.344384Z node 40 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [40:7509892604838123033:4323], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:38:48.345143Z node 40 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=40&id=YjZiZWM5MjEtMmRjZjNmMjAtYzVjOTAyMWMtYjJhMzIwNWM=, ActorId: [40:7509892604838122992:4289], ActorState: ExecuteState, TraceId: 01jweb4b454ka2z804q5weq43v, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: 01jweb4b3y12jat4c0tkqtts8m 2025-05-29T15:38:48.347303Z node 40 :METADATA_PROVIDER ERROR: log.cpp:784: fline=request_actor.h:64;event=unexpected reply;error_message=operation { ready: true status: INTERNAL_ERROR issues { message: "Execution" issue_code: 1060 issues { message: "yql/essentials/ast/yql_expr.h:1874: index out of range" issue_code: 1 } } result { [type.googleapis.com/Ydb.Table.ExecuteQueryResult] { tx_meta { } } } } ;request=session_id: "ydb://session/3?node_id=40&id=YjZiZWM5MjEtMmRjZjNmMjAtYzVjOTAyMWMtYjJhMzIwNWM=" tx_control { tx_id: "01jweb4b3y12jat4c0tkqtts8m" commit_tx: true } query { yql_text: "DECLARE $objects AS List>;\nUPSERT INTO `//Root/.metadata/initialization/migrations`\nSELECT componentId,modificationId,instant FROM AS_TABLE($objects)\n" } parameters { key: "$objects" value { type { list_type { item { struct_type { members { name: "componentId" type { type_id: UTF8 } } members { name: "modificationId" type { type_id: UTF8 } } members { name: "instant" type { type_id: UINT32 } } } } } } value { items { items { text_value: "INITIALIZATION" } items { text_value: "create" } items { uint32_value: 1748533128 } } } } } ; 2025-05-29T15:38:48.347379Z node 40 :METADATA_INITIALIZER ERROR: log.cpp:784: fline=accessor_init.cpp:81;event=OnAlteringProblem;error=cannot UPSERT objects:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 ; 2025-05-29T15:38:49.494607Z node 40 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [40:7509892609133090451:4387], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:38:49.495770Z node 40 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=40&id=ZDIzYjAyZjctNWE4MWQxOTctYjgzODM0NjQtNDI2ZDk3M2U=, ActorId: [40:7509892609133090415:4363], ActorState: ExecuteState, TraceId: 01jweb4c833mqshjd7w2vbzjsc, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: 01jweb4c7x7cnkjcmatm7g70sy 2025-05-29T15:38:49.496933Z node 40 :METADATA_PROVIDER ERROR: log.cpp:784: fline=request_actor.h:64;event=unexpected reply;error_message=operation { ready: true status: INTERNAL_ERROR issues { message: "Execution" issue_code: 1060 issues { message: "yql/essentials/ast/yql_expr.h:1874: index out of range" issue_code: 1 } } result { [type.googleapis.com/Ydb.Table.ExecuteQueryResult] { tx_meta { } } } } ;request=session_id: "ydb://session/3?node_id=40&id=ZDIzYjAyZjctNWE4MWQxOTctYjgzODM0NjQtNDI2ZDk3M2U=" tx_control { tx_id: "01jweb4c7x7cnkjcmatm7g70sy" commit_tx: true } query { yql_text: "DECLARE $objects AS List>;\nUPSERT INTO `//Root/.metadata/initialization/migrations`\nSELECT componentId,modificationId,instant FROM AS_TABLE($objects)\n" } parameters { key: "$objects" value { type { list_type { item { struct_type { members { name: "componentId" type { type_id: UTF8 } } members { name: "modificationId" type { type_id: UTF8 } } members { name: "instant" type { type_id: UINT32 } } } } } } value { items { items { text_value: "INITIALIZATION" } items { text_value: "create" } items { uint32_value: 1748533129 } } } } } ; 2025-05-29T15:38:49.497119Z node 40 :METADATA_INITIALIZER ERROR: log.cpp:784: fline=accessor_init.cpp:81;event=OnAlteringProblem;error=cannot UPSERT objects:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 ; Traceback (most recent call last): File "library/python/testing/yatest_common/yatest/common/process.py", line 384, in wait wait_for( File "library/python/testing/yatest_common/yatest/common/process.py", line 764, in wait_for raise TimeoutError(truncate(message, MAX_MESSAGE_LEN)) yatest.common.process.TimeoutError: 600 second(s) wait timeout has expired: Command '['/home/runner/.ya/tools/v4/8689590287/test_tool', 'run_ut', '@/home/runner/.ya/build/build_root/ciyv/001cfb/ydb/services/ydb/backup_ut/test-results/unittest/testing_out_stuff/chunk1/testing_out_stuff/test_tool.args']' stopped by 600 seconds timeout During handling of the above exception, another exception occurred: Traceback (most recent call last): File "devtools/ya/test/programs/test_tool/run_test/run_test.py", line 1738, in main res.wait(check_exit_code=False, timeout=run_timeout, on_timeout=timeout_callback) File "library/python/testing/yatest_common/yatest/common/process.py", line 398, in wait raise ExecutionTimeoutError(self, str(e)) yatest.common.process.ExecutionTimeoutError: (("600 second(s) wait timeout has expired: Command '['/home/runner/.ya/tools/v4/8689590287/test_tool', 'run_ut', '@/home/runner/.ya/build/build_root/ciyv/001cfb/ydb/services/ydb/backup_ut/test-results/unittest/testing_out_stuff/chunk1/testing_out_stuff/test_tool.args']' stopped by 600 seconds timeout",), {}) >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_delete_message_batch_deduplicates_receipt_handle[tables_format_v0-std] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_visibility_change_disables_receive_attempt_id[tables_format_v1-with_change_visibility] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_visibility_timeout_works[tables_format_v1] [FAIL] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_wrong_attribute_name[tables_format_v0] |81.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/datashard/vector_index/py3test >> test_vector_index.py::TestVectorIndex::test_vector_index_prefix[table_index_2_float-pk_types2-all_types2-index2-Float] [FAIL] >> test_format_setting.py::TestS3::test_date_time_simple_iso[v1-date_time/simple_iso/test.csv-csv_with_names] [FAIL] >> test_format_setting.py::TestS3::test_date_time_simple_iso[v1-date_time/simple_iso/test.tsv-tsv_with_names] >> test_mem_alloc.py::TestMemAlloc::test_hop_alloc[v1] [FAIL] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_delete_message_batch_deduplicates_receipt_handle[tables_format_v0-std] >> test_fifo_messaging.py::TestSqsFifoMicroBatchesWithTenant::test_micro_batch_read[tables_format_v1] [FAIL] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_can_read_new_written_data_on_visibility_timeout[tables_format_v0] >> test_mem_alloc.py::TestMemAlloc::test_join_alloc[v1] >> test_s3_1.py::TestS3::test_top_level_listing[v2-true-client0] [FAIL] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/fq/yds/py3test >> test_recovery.py::TestRecovery::test_ic_disconnection [SKIPPED] Test command err: sys:1: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback |81.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/messaging/py3test >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_queue_attributes[tables_format_v0] [FAIL] Test command err: sys:1: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/ciyv/001fdd/ydb/tests/functional/sqs/messaging/test-results/py3test/testing_out_stuff/chunk8/testing_out_stuff/test_fifo_messaging.py.TestSqsFifoMessagingWithTenant.test_delete_message_works.tables_format_v0/cluster/node_1/stdout'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/ciyv/001fdd/ydb/tests/functional/sqs/messaging/test-results/py3test/testing_out_stuff/chunk8/testing_out_stuff/test_fifo_messaging.py.TestSqsFifoMessagingWithTenant.test_delete_message_works.tables_format_v0/cluster/node_1/stderr'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedRandom name='/home/runner/.ya/build/build_root/ciyv/001fdd/ydb/tests/functional/sqs/messaging/test-results/py3test/testing_out_stuff/chunk8/testing_out_stuff/test_fifo_messaging.py.TestSqsFifoMessagingWithTenant.test_delete_message_works.tables_format_v0/cluster/node_1/logfile_x039rejc.log'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/ciyv/001fdd/ydb/tests/functional/sqs/messaging/test-results/py3test/testing_out_stuff/chunk8/testing_out_stuff/test_fifo_messaging.py.TestSqsFifoMessagingWithTenant.test_delete_message_works.tables_format_v0/cluster/slot_1/stdout'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/ciyv/001fdd/ydb/tests/functional/sqs/messaging/test-results/py3test/testing_out_stuff/chunk8/testing_out_stuff/test_fifo_messaging.py.TestSqsFifoMessagingWithTenant.test_delete_message_works.tables_format_v0/cluster/slot_1/stderr'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedRandom name='/home/runner/.ya/build/build_root/ciyv/001fdd/ydb/tests/functional/sqs/messaging/test-results/py3test/testing_out_stuff/chunk8/testing_out_stuff/test_fifo_messaging.py.TestSqsFifoMessagingWithTenant.test_delete_message_works.tables_format_v0/cluster/slot_1/logfile_502q1ns1.log'> ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/subprocess.py:1129: ResourceWarning: subprocess 614021 is still running ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/subprocess.py:1129: ResourceWarning: subprocess 616015 is still running ResourceWarning: Enable tracemalloc to get the object allocation traceback >> test_stop.py::TestStop::test_stop_query[v1-streaming] [FAIL] >> test_watermarks.py::TestWatermarks::test_idle_watermarks[v1-mvp_external_ydb_endpoint0] >> test_explicit_partitioning_1.py::TestS3::test_projection_date_type_validation[v2-false-client9-year String-False] [FAIL] >> test_explicit_partitioning_1.py::TestS3::test_projection_date_type_validation[v2-false-client10-year Utf8-False] |81.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_send_and_read_message[tables_format_v1-fifo] [FAIL] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_send_and_read_message[tables_format_v1-std] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_wrong_attribute_name[tables_format_v0] [FAIL] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_wrong_attribute_name[tables_format_v1] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_validates_message_body[tables_format_v0] [FAIL] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_validates_message_body[tables_format_v1] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/fq/yds/py3test >> test_recovery_mz.py::TestRecovery::test_recovery[v1] [FAIL] Test command err: sys:1: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback >> test_row_dispatcher.py::TestPqRowDispatcher::test_restart_compute_node >> test_insert.py::TestS3::test_insert_empty_object[v1] [FAIL] >> test_insert.py::TestS3::test_insert_empty_object[v2] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_send_and_read_message[tables_format_v1-std] [FAIL] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_send_and_read_multiple_messages[tables_format_v0] >> test_format_setting.py::TestS3::test_date_time_simple_iso[v1-date_time/simple_iso/test.tsv-tsv_with_names] [FAIL] >> test_format_setting.py::TestS3::test_date_time_simple_iso[v1-date_time/simple_iso/test.json-json_each_row] >> test_explicit_partitioning_1.py::TestS3::test_projection_date_type_validation[v2-false-client10-year Utf8-False] [FAIL] >> test_explicit_partitioning_1.py::TestS3::test_projection_date_type_validation[v2-false-client11-year Utf8 NOT NULL-True] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_validates_message_body[tables_format_v1] [FAIL] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_wrong_attribute_name[tables_format_v1] [FAIL] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_wrong_delete_fails[tables_format_v0] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_send_and_read_multiple_messages[tables_format_v0] [FAIL] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_send_and_read_multiple_messages[tables_format_v1] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_visibility_timeout_expires_on_wait_timeout[tables_format_v0] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_create_queue_by_nonexistent_user_fails[tables_format_v1] >> test_mem_alloc.py::TestMemAlloc::test_join_alloc[v1] [FAIL] |81.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_wrong_delete_fails[tables_format_v0] [FAIL] >> test_format_setting.py::TestS3::test_date_time_simple_iso[v1-date_time/simple_iso/test.json-json_each_row] [FAIL] >> test_explicit_partitioning_0.py::TestS3::test_projection_integer_type_validation[v1-true-client5-year String-False] [FAIL] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_wrong_delete_fails[tables_format_v1] >> test_explicit_partitioning_0.py::TestS3::test_projection_integer_type_validation[v1-true-client6-year Utf8 NOT NULL-True] >> test_watermarks.py::TestWatermarks::test_idle_watermarks[v1-mvp_external_ydb_endpoint0] [FAIL] >> test_format_setting.py::TestS3::test_date_time_simple_iso[v1-date_time/simple_iso/test.parquet-parquet] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_visibility_timeout_expires_on_wait_timeout[tables_format_v0] [FAIL] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_visibility_timeout_expires_on_wait_timeout[tables_format_v1] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_zero_visibility_timeout_works[tables_format_v0-fifo] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_send_and_read_multiple_messages[tables_format_v1] [FAIL] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_send_message[tables_format_v0-fifo] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_wrong_delete_fails[tables_format_v1] [FAIL] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_visibility_timeout_works[tables_format_v1] [FAIL] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_write_and_read_to_different_groups[tables_format_v0] >> test_explicit_partitioning_1.py::TestS3::test_projection_date_type_validation[v2-false-client11-year Utf8 NOT NULL-True] [FAIL] >> test_explicit_partitioning_1.py::TestS3::test_projection_date_type_validation[v2-false-client12-year Date-False] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_queue_attributes[tables_format_v1-fifo] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_visibility_timeout_expires_on_wait_timeout[tables_format_v1] [FAIL] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_visibility_timeout_works[tables_format_v0] >> test_insert.py::TestS3::test_insert_empty_object[v2] [FAIL] >> test_insert.py::TestS3::test_insert_without_format_error[v1-client0] |81.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/datashard/vector_index/py3test >> test_vector_index.py::TestVectorIndex::test_vector_index[table_index_3-pk_types13-all_types13-index13-Int8] [FAIL] >> test_vector_index.py::TestVectorIndex::test_vector_index[table_index_2-pk_types14-all_types14-index14-Int8] [FAIL] >> test_row_dispatcher.py::TestPqRowDispatcher::test_restart_compute_node [FAIL] >> test_row_dispatcher.py::TestPqRowDispatcher::test_scheme_error >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_write_and_read_to_different_groups[tables_format_v0] [FAIL] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_write_and_read_to_different_groups[tables_format_v1] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_send_message[tables_format_v0-fifo] [FAIL] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_visibility_timeout_works[tables_format_v0] [FAIL] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_delay_one_message[tables_format_v0-fifo] [FAIL] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_delay_one_message[tables_format_v0-std] >> test_delete_read_rules_after_abort_by_system.py::TestDeleteReadRulesAfterAbortBySystem::test_delete_read_rules_after_abort_by_system |81.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/datashard/vector_index/py3test >> test_vector_index.py::TestVectorIndex::test_vector_index[table_index_0-pk_types10-all_types10-index10-Uint8] [FAIL] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/messaging/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_wrong_delete_fails[tables_format_v1] [FAIL] Test command err: sys:1: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/ciyv/001fda/ydb/tests/functional/sqs/messaging/test-results/py3test/testing_out_stuff/chunk57/testing_out_stuff/test_generic_messaging.py.TestSqsGenericMessagingWithTenant.test_visibility_timeout_works.tables_format_v1/cluster/node_1/stdout'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/ciyv/001fda/ydb/tests/functional/sqs/messaging/test-results/py3test/testing_out_stuff/chunk57/testing_out_stuff/test_generic_messaging.py.TestSqsGenericMessagingWithTenant.test_visibility_timeout_works.tables_format_v1/cluster/node_1/stderr'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedRandom name='/home/runner/.ya/build/build_root/ciyv/001fda/ydb/tests/functional/sqs/messaging/test-results/py3test/testing_out_stuff/chunk57/testing_out_stuff/test_generic_messaging.py.TestSqsGenericMessagingWithTenant.test_visibility_timeout_works.tables_format_v1/cluster/node_1/logfile_z2hhe6gu.log'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/ciyv/001fda/ydb/tests/functional/sqs/messaging/test-results/py3test/testing_out_stuff/chunk57/testing_out_stuff/test_generic_messaging.py.TestSqsGenericMessagingWithTenant.test_visibility_timeout_works.tables_format_v1/cluster/slot_1/stdout'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/ciyv/001fda/ydb/tests/functional/sqs/messaging/test-results/py3test/testing_out_stuff/chunk57/testing_out_stuff/test_generic_messaging.py.TestSqsGenericMessagingWithTenant.test_visibility_timeout_works.tables_format_v1/cluster/slot_1/stderr'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedRandom name='/home/runner/.ya/build/build_root/ciyv/001fda/ydb/tests/functional/sqs/messaging/test-results/py3test/testing_out_stuff/chunk57/testing_out_stuff/test_generic_messaging.py.TestSqsGenericMessagingWithTenant.test_visibility_timeout_works.tables_format_v1/cluster/slot_1/logfile_xsyzd78r.log'> ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/subprocess.py:1129: ResourceWarning: subprocess 615282 is still running ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/subprocess.py:1129: ResourceWarning: subprocess 617430 is still running ResourceWarning: Enable tracemalloc to get the object allocation traceback >> test_explicit_partitioning_0.py::TestS3::test_projection_integer_type_validation[v1-true-client6-year Utf8 NOT NULL-True] [FAIL] >> test_explicit_partitioning_0.py::TestS3::test_projection_integer_type_validation[v1-true-client7-year Utf8-False] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_delete_message_batch_deduplicates_receipt_handle[tables_format_v0-std] [FAIL] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_delete_message_batch_deduplicates_receipt_handle[tables_format_v1-fifo] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_write_and_read_to_different_groups[tables_format_v1] [FAIL] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_write_read_delete_many_groups[tables_format_v0] >> test_explicit_partitioning_1.py::TestS3::test_projection_date_type_validation[v2-false-client12-year Date-False] [FAIL] >> test_explicit_partitioning_1.py::TestS3::test_projection_date_type_validation[v2-false-client13-year Date NOT NULL-True] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_queue_attributes[tables_format_v1-fifo] [FAIL] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_queue_attributes[tables_format_v1-std] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_delay_one_message[tables_format_v0-std] [FAIL] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_delay_one_message[tables_format_v1-fifo] >> test_insert.py::TestS3::test_insert_without_format_error[v1-client0] [FAIL] >> test_insert.py::TestS3::test_insert_without_format_error[v2-client0] >> test_select_timings.py::TestSelectTimings::test_select_timings[v1-streaming-finished] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_visibility_change_disables_receive_attempt_id[tables_format_v1-with_change_visibility] [FAIL] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_visibility_change_disables_receive_attempt_id[tables_format_v1-with_delete_message] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_validates_message_body[tables_format_v0] >> test_format_setting.py::TestS3::test_date_time_simple_iso[v1-date_time/simple_iso/test.parquet-parquet] [FAIL] >> test_format_setting.py::TestS3::test_date_time_simple_iso[v2-date_time/simple_iso/test.csv-csv_with_names] |81.4%| [TA] $(B)/ydb/core/kqp/ut/scheme/test-results/unittest/{meta.json ... results_accumulator.log} >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_delete_message_batch_deduplicates_receipt_handle[tables_format_v1-fifo] [FAIL] |81.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_delete_message_batch_deduplicates_receipt_handle[tables_format_v1-std] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_write_read_delete_many_groups[tables_format_v0] [FAIL] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_write_read_delete_many_groups[tables_format_v1] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_queue_attributes[tables_format_v1-std] [FAIL] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_queue_attributes_batch[tables_format_v0] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/messaging/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_visibility_timeout_works[tables_format_v0] [FAIL] Test command err: sys:1: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/ciyv/001fd7/ydb/tests/functional/sqs/messaging/test-results/py3test/testing_out_stuff/chunk56/testing_out_stuff/test_generic_messaging.py.TestSqsGenericMessagingWithTenant.test_validates_message_body.tables_format_v0/cluster/node_1/stdout'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/ciyv/001fd7/ydb/tests/functional/sqs/messaging/test-results/py3test/testing_out_stuff/chunk56/testing_out_stuff/test_generic_messaging.py.TestSqsGenericMessagingWithTenant.test_validates_message_body.tables_format_v0/cluster/node_1/stderr'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedRandom name='/home/runner/.ya/build/build_root/ciyv/001fd7/ydb/tests/functional/sqs/messaging/test-results/py3test/testing_out_stuff/chunk56/testing_out_stuff/test_generic_messaging.py.TestSqsGenericMessagingWithTenant.test_validates_message_body.tables_format_v0/cluster/node_1/logfile_azgk_vbs.log'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/ciyv/001fd7/ydb/tests/functional/sqs/messaging/test-results/py3test/testing_out_stuff/chunk56/testing_out_stuff/test_generic_messaging.py.TestSqsGenericMessagingWithTenant.test_validates_message_body.tables_format_v0/cluster/slot_1/stdout'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/ciyv/001fd7/ydb/tests/functional/sqs/messaging/test-results/py3test/testing_out_stuff/chunk56/testing_out_stuff/test_generic_messaging.py.TestSqsGenericMessagingWithTenant.test_validates_message_body.tables_format_v0/cluster/slot_1/stderr'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedRandom name='/home/runner/.ya/build/build_root/ciyv/001fd7/ydb/tests/functional/sqs/messaging/test-results/py3test/testing_out_stuff/chunk56/testing_out_stuff/test_generic_messaging.py.TestSqsGenericMessagingWithTenant.test_validates_message_body.tables_format_v0/cluster/slot_1/logfile_1i7h4otf.log'> ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/subprocess.py:1129: ResourceWarning: subprocess 616665 is still running ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/subprocess.py:1129: ResourceWarning: subprocess 618589 is still running ResourceWarning: Enable tracemalloc to get the object allocation traceback ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/messaging/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_send_message[tables_format_v0-fifo] [FAIL] Test command err: sys:1: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/ciyv/001fd2/ydb/tests/functional/sqs/messaging/test-results/py3test/testing_out_stuff/chunk30/testing_out_stuff/test_generic_messaging.py.TestSqsGenericMessagingWithPath.test_send_and_read_message.tables_format_v1-fifo/cluster/node_1/stdout'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/ciyv/001fd2/ydb/tests/functional/sqs/messaging/test-results/py3test/testing_out_stuff/chunk30/testing_out_stuff/test_generic_messaging.py.TestSqsGenericMessagingWithPath.test_send_and_read_message.tables_format_v1-fifo/cluster/node_1/stderr'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedRandom name='/home/runner/.ya/build/build_root/ciyv/001fd2/ydb/tests/functional/sqs/messaging/test-results/py3test/testing_out_stuff/chunk30/testing_out_stuff/test_generic_messaging.py.TestSqsGenericMessagingWithPath.test_send_and_read_message.tables_format_v1-fifo/cluster/node_1/logfile_xmca0ppm.log'> ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/subprocess.py:1129: ResourceWarning: subprocess 617540 is still running ResourceWarning: Enable tracemalloc to get the object allocation traceback >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_delay_one_message[tables_format_v1-fifo] [FAIL] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_can_read_new_written_data_on_visibility_timeout[tables_format_v0] [FAIL] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_delay_one_message[tables_format_v1-std] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_visibility_change_disables_receive_attempt_id[tables_format_v1-with_delete_message] [FAIL] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_visibility_timeout_works[tables_format_v0] >> test_explicit_partitioning_0.py::TestS3::test_projection_integer_type_validation[v1-true-client7-year Utf8-False] [FAIL] >> test_explicit_partitioning_0.py::TestS3::test_projection_integer_type_validation[v1-true-client8-year Int32-False] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/fq/s3/py3test >> test_s3_1.py::TestS3::test_top_level_listing[v2-true-client0] [FAIL] Test command err: library/recipes/common/__init__.py:29: ResourceWarning: unclosed file <_io.TextIOWrapper name='/home/runner/.ya/build/build_root/ciyv/00204f/ydb/tests/fq/s3/test-results/py3test/testing_out_stuff/test_s3_1/testing_out_stuff/moto_server.out.log' mode='w' encoding='utf-8'> process = subprocess.Popen( ResourceWarning: Enable tracemalloc to get the object allocation traceback library/recipes/common/__init__.py:29: ResourceWarning: unclosed file <_io.TextIOWrapper name='/home/runner/.ya/build/build_root/ciyv/00204f/ydb/tests/fq/s3/test-results/py3test/testing_out_stuff/test_s3_1/testing_out_stuff/moto_server.err.log' mode='w' encoding='utf-8'> process = subprocess.Popen( ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_write_read_delete_many_groups[tables_format_v1] [FAIL] >> test_insert.py::TestS3::test_insert_without_format_error[v2-client0] [FAIL] >> test_explicit_partitioning_1.py::TestS3::test_projection_date_type_validation[v2-false-client13-year Date NOT NULL-True] [FAIL] >> test_explicit_partitioning_1.py::TestS3::test_projection_date_type_validation[v2-false-client14-year Datetime-False] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_delete_message_batch_deduplicates_receipt_handle[tables_format_v1-std] [FAIL] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_delete_message_batch_works[tables_format_v0] >> test_row_dispatcher.py::TestPqRowDispatcher::test_scheme_error [FAIL] >> test_insert.py::TestS3::test_raw_format_validation[v1-client0] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/fq/yds/py3test >> test_mem_alloc.py::TestMemAlloc::test_join_alloc[v1] [FAIL] Test command err: sys:1: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_queue_attributes_batch[tables_format_v0] [FAIL] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_queue_attributes_batch[tables_format_v1] |81.4%| [TA] {RESULT} $(B)/ydb/core/kqp/ut/scheme/test-results/unittest/{meta.json ... results_accumulator.log} >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_multi_read_dont_stall[tables_format_v1] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_delay_one_message[tables_format_v1-std] [FAIL] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_delete_message_batch_deduplicates_receipt_handle[tables_format_v0-fifo] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_visibility_timeout_works[tables_format_v0] [FAIL] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_delete_message_batch_works[tables_format_v0] [FAIL] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_visibility_timeout_works[tables_format_v1] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_queue_attributes_batch[tables_format_v1] [FAIL] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_read_dont_stall[tables_format_v0] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_delete_message_batch_works[tables_format_v1] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_delete_message_batch_deduplicates_receipt_handle[tables_format_v0-fifo] [FAIL] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/messaging/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_can_read_new_written_data_on_visibility_timeout[tables_format_v0] [FAIL] Test command err: ydb/tests/library/clients/kikimr_monitoring.py:76: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/ciyv/001fe3/ydb/tests/functional/sqs/messaging/test-results/py3test/testing_out_stuff/chunk14/testing_out_stuff/test_fifo_messaging.py.TestSqsFifoMicroBatchesWithPath.test_micro_batch_read.tables_format_v0/cluster/node_1/stdout'> for key, value in sorted(labels.items(), key=lambda x: x[0]) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/library/clients/kikimr_monitoring.py:76: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/ciyv/001fe3/ydb/tests/functional/sqs/messaging/test-results/py3test/testing_out_stuff/chunk14/testing_out_stuff/test_fifo_messaging.py.TestSqsFifoMicroBatchesWithPath.test_micro_batch_read.tables_format_v0/cluster/node_1/stderr'> for key, value in sorted(labels.items(), key=lambda x: x[0]) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/library/clients/kikimr_monitoring.py:76: ResourceWarning: unclosed file <_io.BufferedRandom name='/home/runner/.ya/build/build_root/ciyv/001fe3/ydb/tests/functional/sqs/messaging/test-results/py3test/testing_out_stuff/chunk14/testing_out_stuff/test_fifo_messaging.py.TestSqsFifoMicroBatchesWithPath.test_micro_batch_read.tables_format_v0/cluster/node_1/logfile_8gj4h41r.log'> for key, value in sorted(labels.items(), key=lambda x: x[0]) ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/subprocess.py:1129: ResourceWarning: subprocess 612605 is still running ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/library/clients/kikimr_monitoring.py:75: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/ciyv/001fe3/ydb/tests/functional/sqs/messaging/test-results/py3test/testing_out_stuff/chunk14/testing_out_stuff/test_fifo_messaging.py.TestSqsFifoMicroBatchesWithTenant.test_micro_batch_read.tables_format_v0/cluster/node_1/stdout'> (key, value) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/library/clients/kikimr_monitoring.py:75: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/ciyv/001fe3/ydb/tests/functional/sqs/messaging/test-results/py3test/testing_out_stuff/chunk14/testing_out_stuff/test_fifo_messaging.py.TestSqsFifoMicroBatchesWithTenant.test_micro_batch_read.tables_format_v0/cluster/node_1/stderr'> (key, value) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/library/clients/kikimr_monitoring.py:75: ResourceWarning: unclosed file <_io.BufferedRandom name='/home/runner/.ya/build/build_root/ciyv/001fe3/ydb/tests/functional/sqs/messaging/test-results/py3test/testing_out_stuff/chunk14/testing_out_stuff/test_fifo_messaging.py.TestSqsFifoMicroBatchesWithTenant.test_micro_batch_read.tables_format_v0/cluster/node_1/logfile_zkeujowu.log'> (key, value) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/library/clients/kikimr_monitoring.py:75: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/ciyv/001fe3/ydb/tests/functional/sqs/messaging/test-results/py3test/testing_out_stuff/chunk14/testing_out_stuff/test_fifo_messaging.py.TestSqsFifoMicroBatchesWithTenant.test_micro_batch_read.tables_format_v0/cluster/slot_1/stdout'> (key, value) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/library/clients/kikimr_monitoring.py:75: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/ciyv/001fe3/ydb/tests/functional/sqs/messaging/test-results/py3test/testing_out_stuff/chunk14/testing_out_stuff/test_fifo_messaging.py.TestSqsFifoMicroBatchesWithTenant.test_micro_batch_read.tables_format_v0/cluster/slot_1/stderr'> (key, value) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/library/clients/kikimr_monitoring.py:75: ResourceWarning: unclosed file <_io.BufferedRandom name='/home/runner/.ya/build/build_root/ciyv/001fe3/ydb/tests/functional/sqs/messaging/test-results/py3test/testing_out_stuff/chunk14/testing_out_stuff/test_fifo_messaging.py.TestSqsFifoMicroBatchesWithTenant.test_micro_batch_read.tables_format_v0/cluster/slot_1/logfile_uogw39ga.log'> (key, value) ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/subprocess.py:1129: ResourceWarning: subprocess 615832 is still running ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/subprocess.py:1129: ResourceWarning: subprocess 617701 is still running ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/ciyv/001fe3/ydb/tests/functional/sqs/messaging/test-results/py3test/testing_out_stuff/chunk14/testing_out_stuff/test_generic_messaging.py.TestSqsGenericMessagingWithPath.test_can_read_new_written_data_on_visibility_timeout.tables_format_v0/cluster/node_1/stdout'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/ciyv/001fe3/ydb/tests/functional/sqs/messaging/test-results/py3test/testing_out_stuff/chunk14/testing_out_stuff/test_generic_messaging.py.TestSqsGenericMessagingWithPath.test_can_read_new_written_data_on_visibility_timeout.tables_format_v0/cluster/node_1/stderr'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedRandom name='/home/runner/.ya/build/build_root/ciyv/001fe3/ydb/tests/functional/sqs/messaging/test-results/py3test/testing_out_stuff/chunk14/testing_out_stuff/test_generic_messaging.py.TestSqsGenericMessagingWithPath.test_can_read_new_written_data_on_visibility_timeout.tables_format_v0/cluster/node_1/logfile_njrfq3is.log'> ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/subprocess.py:1129: ResourceWarning: subprocess 620224 is still running ResourceWarning: Enable tracemalloc to get the object allocation traceback >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_can_read_new_written_data_on_visibility_timeout[tables_format_v1] >> test_format_setting.py::TestS3::test_date_time_simple_iso[v2-date_time/simple_iso/test.csv-csv_with_names] [FAIL] >> test_format_setting.py::TestS3::test_date_time_simple_iso[v2-date_time/simple_iso/test.tsv-tsv_with_names] >> test_select_1.py::TestSelect1::test_compile_error[v1] |81.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test >> test_explicit_partitioning_0.py::TestS3::test_projection_integer_type_validation[v1-true-client8-year Int32-False] [FAIL] >> test_explicit_partitioning_0.py::TestS3::test_projection_integer_type_validation[v1-true-client9-year Uint32-False] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_delete_message_batch_deduplicates_receipt_handle[tables_format_v0-std] [FAIL] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_delete_message_batch_deduplicates_receipt_handle[tables_format_v1-fifo] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/messaging/py3test >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_write_read_delete_many_groups[tables_format_v1] [FAIL] Test command err: sys:1: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/ciyv/001fd4/ydb/tests/functional/sqs/messaging/test-results/py3test/testing_out_stuff/chunk13/testing_out_stuff/test_fifo_messaging.py.TestSqsFifoMessagingWithTenant.test_visibility_timeout_works.tables_format_v1/cluster/node_1/stdout'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/ciyv/001fd4/ydb/tests/functional/sqs/messaging/test-results/py3test/testing_out_stuff/chunk13/testing_out_stuff/test_fifo_messaging.py.TestSqsFifoMessagingWithTenant.test_visibility_timeout_works.tables_format_v1/cluster/node_1/stderr'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedRandom name='/home/runner/.ya/build/build_root/ciyv/001fd4/ydb/tests/functional/sqs/messaging/test-results/py3test/testing_out_stuff/chunk13/testing_out_stuff/test_fifo_messaging.py.TestSqsFifoMessagingWithTenant.test_visibility_timeout_works.tables_format_v1/cluster/node_1/logfile__oxzg7dp.log'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/ciyv/001fd4/ydb/tests/functional/sqs/messaging/test-results/py3test/testing_out_stuff/chunk13/testing_out_stuff/test_fifo_messaging.py.TestSqsFifoMessagingWithTenant.test_visibility_timeout_works.tables_format_v1/cluster/slot_1/stdout'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/ciyv/001fd4/ydb/tests/functional/sqs/messaging/test-results/py3test/testing_out_stuff/chunk13/testing_out_stuff/test_fifo_messaging.py.TestSqsFifoMessagingWithTenant.test_visibility_timeout_works.tables_format_v1/cluster/slot_1/stderr'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedRandom name='/home/runner/.ya/build/build_root/ciyv/001fd4/ydb/tests/functional/sqs/messaging/test-results/py3test/testing_out_stuff/chunk13/testing_out_stuff/test_fifo_messaging.py.TestSqsFifoMessagingWithTenant.test_visibility_timeout_works.tables_format_v1/cluster/slot_1/logfile_8denx9qo.log'> ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/subprocess.py:1129: ResourceWarning: subprocess 617157 is still running ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/subprocess.py:1129: ResourceWarning: subprocess 619671 is still running ResourceWarning: Enable tracemalloc to get the object allocation traceback ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/fq/yds/py3test >> test_watermarks.py::TestWatermarks::test_idle_watermarks[v1-mvp_external_ydb_endpoint0] [FAIL] Test command err: sys:1: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_delete_message_batch_works[tables_format_v1] [FAIL] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_visibility_timeout_works[tables_format_v1] [FAIL] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_write_and_read_to_different_groups[tables_format_v0] >> test_explicit_partitioning_1.py::TestS3::test_projection_date_type_validation[v2-false-client14-year Datetime-False] [FAIL] >> test_explicit_partitioning_1.py::TestS3::test_projection_date_type_validation[v2-false-client15-year Datetime NOT NULL-True] |81.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/datashard/vector_index/py3test >> test_vector_index.py::TestVectorIndex::test_vector_index[table_index_2-pk_types14-all_types14-index14-Int8] [FAIL] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/api/py3test >> test_public_api.py::TestDocApiTables::test_drop_table[settings1-None] [GOOD] Test command err: contrib/python/tornado/tornado-4/tornado/gen.py:1064: DeprecationWarning: the (type, exc, tb) signature of throw() is deprecated, use the single-arg signature instead. contrib/python/ydb/py3/ydb/types.py:59: DeprecationWarning: datetime.datetime.utcfromtimestamp() is deprecated and scheduled for removal in a future version. Use timezone-aware objects to represent datetimes in UTC: datetime.datetime.fromtimestamp(timestamp, datetime.UTC). >> test_select_timings.py::TestSelectTimings::test_select_timings[v1-streaming-finished] [FAIL] >> test_stop.py::TestStop::test_stop_query[v1-analytics] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_read_dont_stall[tables_format_v0] [FAIL] >> test_delete_read_rules_after_abort_by_system.py::TestDeleteReadRulesAfterAbortBySystem::test_delete_read_rules_after_abort_by_system [FAIL] >> test_disposition.py::TestContinueMode::test_disposition_fresh[v1-mvp_external_ydb_endpoint0] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_write_and_read_to_different_groups[tables_format_v0] [FAIL] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_delete_message_batch_deduplicates_receipt_handle[tables_format_v1-fifo] [FAIL] >> test_insert.py::TestS3::test_raw_format_validation[v1-client0] [FAIL] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_write_and_read_to_different_groups[tables_format_v1] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_delete_message_batch_deduplicates_receipt_handle[tables_format_v1-std] >> test_insert.py::TestS3::test_raw_format_validation[v2-client0] >> test_explicit_partitioning_0.py::TestS3::test_projection_integer_type_validation[v1-true-client9-year Uint32-False] [FAIL] >> test_explicit_partitioning_0.py::TestS3::test_projection_integer_type_validation[v1-true-client10-year Int64 NOT NULL-True] |81.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test >> test_explicit_partitioning_1.py::TestS3::test_projection_date_type_validation[v2-false-client15-year Datetime NOT NULL-True] [FAIL] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/messaging/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_delete_message_batch_deduplicates_receipt_handle[tables_format_v0-fifo] [FAIL] Test command err: sys:1: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/ciyv/001fce/ydb/tests/functional/sqs/messaging/test-results/py3test/testing_out_stuff/chunk20/testing_out_stuff/test_generic_messaging.py.TestSqsGenericMessagingWithPath.test_delay_one_message.tables_format_v0-fifo/cluster/node_1/stdout'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/ciyv/001fce/ydb/tests/functional/sqs/messaging/test-results/py3test/testing_out_stuff/chunk20/testing_out_stuff/test_generic_messaging.py.TestSqsGenericMessagingWithPath.test_delay_one_message.tables_format_v0-fifo/cluster/node_1/stderr'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedRandom name='/home/runner/.ya/build/build_root/ciyv/001fce/ydb/tests/functional/sqs/messaging/test-results/py3test/testing_out_stuff/chunk20/testing_out_stuff/test_generic_messaging.py.TestSqsGenericMessagingWithPath.test_delay_one_message.tables_format_v0-fifo/cluster/node_1/logfile_abyevan4.log'> ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/subprocess.py:1129: ResourceWarning: subprocess 618527 is still running ResourceWarning: Enable tracemalloc to get the object allocation traceback >> test_row_dispatcher.py::TestPqRowDispatcher::test_filter_with_mr >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_delete_message_batch_deduplicates_receipt_handle[tables_format_v1-std] [FAIL] >> test_format_setting.py::TestS3::test_date_time_simple_iso[v2-date_time/simple_iso/test.tsv-tsv_with_names] [FAIL] >> test_explicit_partitioning_1.py::TestS3::test_projection_date_type_validation[v2-true-client0-year Int32-False] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_write_and_read_to_different_groups[tables_format_v1] [FAIL] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_delete_message_batch_works[tables_format_v0] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/messaging/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_delete_message_batch_works[tables_format_v1] [FAIL] Test command err: sys:1: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/ciyv/001fcb/ydb/tests/functional/sqs/messaging/test-results/py3test/testing_out_stuff/chunk21/testing_out_stuff/test_generic_messaging.py.TestSqsGenericMessagingWithPath.test_delete_message_batch_deduplicates_receipt_handle.tables_format_v0-std/cluster/node_1/stdout'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/ciyv/001fcb/ydb/tests/functional/sqs/messaging/test-results/py3test/testing_out_stuff/chunk21/testing_out_stuff/test_generic_messaging.py.TestSqsGenericMessagingWithPath.test_delete_message_batch_deduplicates_receipt_handle.tables_format_v0-std/cluster/node_1/stderr'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedRandom name='/home/runner/.ya/build/build_root/ciyv/001fcb/ydb/tests/functional/sqs/messaging/test-results/py3test/testing_out_stuff/chunk21/testing_out_stuff/test_generic_messaging.py.TestSqsGenericMessagingWithPath.test_delete_message_batch_deduplicates_receipt_handle.tables_format_v0-std/cluster/node_1/logfile_4tp5tcsw.log'> ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/subprocess.py:1129: ResourceWarning: subprocess 620029 is still running ResourceWarning: Enable tracemalloc to get the object allocation traceback >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_delete_message_works[tables_format_v0] >> test_format_setting.py::TestS3::test_date_time_simple_iso[v2-date_time/simple_iso/test.json-json_each_row] |81.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/datashard/vector_index/py3test >> test_vector_index.py::TestVectorIndex::test_vector_index_prefix[table_index_4-pk_types12-all_types12-index12-Int8] [FAIL] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/messaging/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_read_dont_stall[tables_format_v0] [FAIL] Test command err: sys:1: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/ciyv/001fc2/ydb/tests/functional/sqs/messaging/test-results/py3test/testing_out_stuff/chunk28/testing_out_stuff/test_generic_messaging.py.TestSqsGenericMessagingWithPath.test_queue_attributes.tables_format_v1-fifo/cluster/node_1/stdout'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/ciyv/001fc2/ydb/tests/functional/sqs/messaging/test-results/py3test/testing_out_stuff/chunk28/testing_out_stuff/test_generic_messaging.py.TestSqsGenericMessagingWithPath.test_queue_attributes.tables_format_v1-fifo/cluster/node_1/stderr'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedRandom name='/home/runner/.ya/build/build_root/ciyv/001fc2/ydb/tests/functional/sqs/messaging/test-results/py3test/testing_out_stuff/chunk28/testing_out_stuff/test_generic_messaging.py.TestSqsGenericMessagingWithPath.test_queue_attributes.tables_format_v1-fifo/cluster/node_1/logfile_dvj1u750.log'> ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/subprocess.py:1129: ResourceWarning: subprocess 619921 is still running ResourceWarning: Enable tracemalloc to get the object allocation traceback >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_delete_message_batch_works[tables_format_v0] [FAIL] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_delete_message_batch_works[tables_format_v1] >> test_explicit_partitioning_0.py::TestS3::test_projection_integer_type_validation[v1-true-client10-year Int64 NOT NULL-True] [FAIL] >> test_insert.py::TestS3::test_raw_format_validation[v2-client0] [FAIL] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/fq/yds/py3test >> test_row_dispatcher.py::TestPqRowDispatcher::test_scheme_error [FAIL] Test command err: sys:1: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback >> test_insert.py::TestS3::test_block_insert_enable[v1-client0] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_create_queue_by_nonexistent_user_fails[tables_format_v1] [FAIL] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_delay_message_batch[tables_format_v0-fifo] >> test_row_dispatcher.py::TestPqRowDispatcher::test_metadatafields >> test_explicit_partitioning_0.py::TestS3::test_projection_integer_type_validation[v1-true-client11-year Int64-False] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_delete_message_batch_works[tables_format_v1] [FAIL] >> test_stop.py::TestStop::test_stop_query[v1-analytics] [FAIL] >> test_explicit_partitioning_1.py::TestS3::test_projection_date_type_validation[v2-true-client0-year Int32-False] [FAIL] >> test_explicit_partitioning_1.py::TestS3::test_projection_date_type_validation[v2-true-client1-year Int32 NOT NULL-False] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_set_very_big_visibility_timeout[tables_format_v1] >> test_select_1.py::TestSelect1::test_compile_error[v1] [FAIL] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/messaging/py3test >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_write_and_read_to_different_groups[tables_format_v1] [FAIL] Test command err: sys:1: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/ciyv/001fc5/ydb/tests/functional/sqs/messaging/test-results/py3test/testing_out_stuff/chunk5/testing_out_stuff/test_fifo_messaging.py.TestSqsFifoMessagingWithPath.test_visibility_change_disables_receive_attempt_id.tables_format_v1-with_change_visibility/cluster/node_1/stdout'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/ciyv/001fc5/ydb/tests/functional/sqs/messaging/test-results/py3test/testing_out_stuff/chunk5/testing_out_stuff/test_fifo_messaging.py.TestSqsFifoMessagingWithPath.test_visibility_change_disables_receive_attempt_id.tables_format_v1-with_change_visibility/cluster/node_1/stderr'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedRandom name='/home/runner/.ya/build/build_root/ciyv/001fc5/ydb/tests/functional/sqs/messaging/test-results/py3test/testing_out_stuff/chunk5/testing_out_stuff/test_fifo_messaging.py.TestSqsFifoMessagingWithPath.test_visibility_change_disables_receive_attempt_id.tables_format_v1-with_change_visibility/cluster/node_1/logfile__g54onej.log'> ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/subprocess.py:1129: ResourceWarning: subprocess 619970 is still running ResourceWarning: Enable tracemalloc to get the object allocation traceback 2025-05-29 15:39:22,332 WARNING libarchive: File (test_fifo_messaging.py.TestSqsFifoMessagingWithPath.test_visibility_change_disables_receive_attempt_id.tables_format_v1-with_change_visibility/cluster/node_1/logfile__g54onej.log) size has changed. Can't write more data than was declared in the tar header (160892). (probably file was changed during archiving) >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_delay_message_batch[tables_format_v0-fifo] [FAIL] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_delay_message_batch[tables_format_v0-std] >> test_select_1.py::TestSelect1::test_compile_error[v2] >> test_disposition.py::TestContinueMode::test_disposition_fresh[v1-mvp_external_ydb_endpoint0] [FAIL] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_validates_message_body[tables_format_v0] [FAIL] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_validates_message_body[tables_format_v1] >> test_format_setting.py::TestS3::test_date_time_simple_iso[v2-date_time/simple_iso/test.json-json_each_row] [FAIL] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_zero_visibility_timeout_works[tables_format_v0-fifo] >> test_format_setting.py::TestS3::test_date_time_simple_iso[v2-date_time/simple_iso/test.parquet-parquet] >> test_insert.py::TestS3::test_block_insert_enable[v1-client0] [FAIL] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_zero_visibility_timeout_works[tables_format_v0-fifo] [FAIL] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_zero_visibility_timeout_works[tables_format_v0-std] >> test_insert.py::TestS3::test_block_insert_enable[v2-client0] >> test_explicit_partitioning_0.py::TestS3::test_projection_integer_type_validation[v1-true-client11-year Int64-False] [FAIL] >> test_explicit_partitioning_0.py::TestS3::test_projection_integer_type_validation[v1-true-client12-year Uint64-False] >> test_auditlog.py::test_cloud_ids_are_logged[attrs0] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/messaging/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_delete_message_batch_works[tables_format_v1] [FAIL] Test command err: sys:1: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/ciyv/001fc8/ydb/tests/functional/sqs/messaging/test-results/py3test/testing_out_stuff/chunk43/testing_out_stuff/test_generic_messaging.py.TestSqsGenericMessagingWithTenant.test_delete_message_batch_deduplicates_receipt_handle.tables_format_v0-std/cluster/node_1/stdout'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/ciyv/001fc8/ydb/tests/functional/sqs/messaging/test-results/py3test/testing_out_stuff/chunk43/testing_out_stuff/test_generic_messaging.py.TestSqsGenericMessagingWithTenant.test_delete_message_batch_deduplicates_receipt_handle.tables_format_v0-std/cluster/node_1/stderr'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedRandom name='/home/runner/.ya/build/build_root/ciyv/001fc8/ydb/tests/functional/sqs/messaging/test-results/py3test/testing_out_stuff/chunk43/testing_out_stuff/test_generic_messaging.py.TestSqsGenericMessagingWithTenant.test_delete_message_batch_deduplicates_receipt_handle.tables_format_v0-std/cluster/node_1/logfile_kvmxtutz.log'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/ciyv/001fc8/ydb/tests/functional/sqs/messaging/test-results/py3test/testing_out_stuff/chunk43/testing_out_stuff/test_generic_messaging.py.TestSqsGenericMessagingWithTenant.test_delete_message_batch_deduplicates_receipt_handle.tables_format_v0-std/cluster/slot_1/stdout'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/ciyv/001fc8/ydb/tests/functional/sqs/messaging/test-results/py3test/testing_out_stuff/chunk43/testing_out_stuff/test_generic_messaging.py.TestSqsGenericMessagingWithTenant.test_delete_message_batch_deduplicates_receipt_handle.tables_format_v0-std/cluster/slot_1/stderr'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedRandom name='/home/runner/.ya/build/build_root/ciyv/001fc8/ydb/tests/functional/sqs/messaging/test-results/py3test/testing_out_stuff/chunk43/testing_out_stuff/test_generic_messaging.py.TestSqsGenericMessagingWithTenant.test_delete_message_batch_deduplicates_receipt_handle.tables_format_v0-std/cluster/slot_1/logfile_t_sw7bx8.log'> ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/subprocess.py:1129: ResourceWarning: subprocess 619918 is still running ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/subprocess.py:1129: ResourceWarning: subprocess 622114 is still running ResourceWarning: Enable tracemalloc to get the object allocation traceback >> test_explicit_partitioning_1.py::TestS3::test_projection_date_type_validation[v2-true-client1-year Int32 NOT NULL-False] [FAIL] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_validates_message_body[tables_format_v1] [FAIL] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_delay_message_batch[tables_format_v0-std] [FAIL] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_delay_message_batch[tables_format_v1-fifo] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_visibility_timeout_expires_on_wait_timeout[tables_format_v0] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_zero_visibility_timeout_works[tables_format_v0-std] [FAIL] >> test_explicit_partitioning_1.py::TestS3::test_projection_date_type_validation[v2-true-client2-year Uint32-False] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_zero_visibility_timeout_works[tables_format_v1-fifo] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_multi_read_dont_stall[tables_format_v1] [FAIL] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_partial_delete_works[tables_format_v0] >> test_row_dispatcher.py::TestPqRowDispatcher::test_filter_with_mr [FAIL] >> test_row_dispatcher.py::TestPqRowDispatcher::test_filters_non_optional_field |81.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_queue_attributes[tables_format_v1-fifo] [FAIL] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_queue_attributes[tables_format_v1-std] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_delay_message_batch[tables_format_v1-fifo] [FAIL] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_delay_message_batch[tables_format_v1-std] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_write_read_delete_many_groups[tables_format_v0] >> test_format_setting.py::TestS3::test_date_time_simple_iso[v2-date_time/simple_iso/test.parquet-parquet] [FAIL] >> test_format_setting.py::TestS3::test_date_time_simple_iso_insert[v1-date_time/simple_iso/test.csv-csv_with_names] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_zero_visibility_timeout_works[tables_format_v1-fifo] [FAIL] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_zero_visibility_timeout_works[tables_format_v1-std] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_partial_delete_works[tables_format_v0] [FAIL] >> test_insert.py::TestS3::test_block_insert_enable[v2-client0] [FAIL] >> test_insert.py::TestS3::test_block_insert_value[v1-client0] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_partial_delete_works[tables_format_v1] >> test_row_dispatcher.py::TestPqRowDispatcher::test_metadatafields [FAIL] >> test_explicit_partitioning_0.py::TestS3::test_projection_integer_type_validation[v1-true-client12-year Uint64-False] [FAIL] >> test_explicit_partitioning_0.py::TestS3::test_projection_integer_type_validation[v1-true-client13-year Date-False] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_visibility_timeout_expires_on_wait_timeout[tables_format_v0] [FAIL] >> test_metrics_cleanup.py::TestCleanup::test_cleanup[v1] >> test_bad_syntax.py::TestBadSyntax::test_bad_syntax[v1-mvp_external_ydb_endpoint0-with_created_read_rules-modify-streaming] >> test_row_dispatcher.py::TestPqRowDispatcher::test_nested_types >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_visibility_timeout_expires_on_wait_timeout[tables_format_v1] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_delay_message_batch[tables_format_v1-std] [FAIL] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_queue_attributes[tables_format_v1-std] [FAIL] >> test_select_1.py::TestSelect1::test_compile_error[v2] [FAIL] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_create_queue_by_nonexistent_user_fails[tables_format_v1] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_queue_attributes_batch[tables_format_v0] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_zero_visibility_timeout_works[tables_format_v1-std] [FAIL] >> test_generic_messaging.py::TestYandexAttributesPrefix::test_allows_yandex_message_attribute_prefix[tables_format_v0] >> test_explicit_partitioning_1.py::TestS3::test_projection_date_type_validation[v2-true-client2-year Uint32-False] [FAIL] >> test_explicit_partitioning_1.py::TestS3::test_projection_date_type_validation[v2-true-client3-year Uint32 NOT NULL-True] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_partial_delete_works[tables_format_v1] [FAIL] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_queue_attributes[tables_format_v0-fifo] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_visibility_timeout_expires_on_wait_timeout[tables_format_v1] [FAIL] |81.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_set_very_big_visibility_timeout[tables_format_v1] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_visibility_timeout_works[tables_format_v0] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_queue_attributes[tables_format_v0-fifo] [FAIL] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_queue_attributes[tables_format_v0-std] >> test_format_setting.py::TestS3::test_date_time_simple_iso_insert[v1-date_time/simple_iso/test.csv-csv_with_names] [FAIL] >> test_format_setting.py::TestS3::test_date_time_simple_iso_insert[v1-date_time/simple_iso/test.tsv-tsv_with_names] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_queue_attributes_batch[tables_format_v0] [FAIL] >> test_explicit_partitioning_0.py::TestS3::test_projection_integer_type_validation[v1-true-client13-year Date-False] [FAIL] >> test_insert.py::TestS3::test_block_insert_value[v1-client0] [FAIL] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/fq/yds/py3test >> test_stop.py::TestStop::test_stop_query[v1-analytics] [FAIL] Test command err: sys:1: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_delete_message_works[tables_format_v0] [FAIL] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_delete_message_works[tables_format_v1] >> test_explicit_partitioning_0.py::TestS3::test_projection_integer_type_validation[v2-false-client0-year Int32 NOT NULL-True] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_queue_attributes[tables_format_v0-std] [FAIL] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_queue_attributes_batch[tables_format_v1] >> test_insert.py::TestS3::test_block_insert_value[v2-client0] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_visibility_timeout_works[tables_format_v0] [FAIL] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_can_read_new_written_data_on_visibility_timeout[tables_format_v1] [FAIL] >> test_explicit_partitioning_1.py::TestS3::test_projection_date_type_validation[v2-true-client3-year Uint32 NOT NULL-True] [FAIL] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_change_message_visibility_with_very_big_timeout[tables_format_v0] >> test_continue_mode.py::TestContinueMode::test_deny_disposition_from_checkpoint_in_create_query[v1-mvp_external_ydb_endpoint0] >> test_row_dispatcher.py::TestPqRowDispatcher::test_filters_non_optional_field [FAIL] >> test_restart_query.py::TestRestartQuery::test_restart_runtime_errors[v1-mvp_external_ydb_endpoint0-analytics] >> test_explicit_partitioning_1.py::TestS3::test_projection_date_type_validation[v2-true-client4-year Int64-False] >> test_select_1.py::TestSelect1::test_select_pg[v1] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_delete_message_works[tables_format_v1] [FAIL] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_does_not_change_visibility_for_deleted_message[tables_format_v0-fifo] |81.5%| [TA] $(B)/ydb/tests/functional/api/test-results/py3test/{meta.json ... results_accumulator.log} |81.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_change_message_visibility_with_very_big_timeout[tables_format_v0] [FAIL] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_change_message_visibility_with_very_big_timeout[tables_format_v1] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/messaging/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_delay_message_batch[tables_format_v1-std] [FAIL] Test command err: sys:1: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/ciyv/001fbd/ydb/tests/functional/sqs/messaging/test-results/py3test/testing_out_stuff/chunk41/testing_out_stuff/test_generic_messaging.py.TestSqsGenericMessagingWithTenant.test_create_queue_by_nonexistent_user_fails.tables_format_v1/cluster/node_1/stdout'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/ciyv/001fbd/ydb/tests/functional/sqs/messaging/test-results/py3test/testing_out_stuff/chunk41/testing_out_stuff/test_generic_messaging.py.TestSqsGenericMessagingWithTenant.test_create_queue_by_nonexistent_user_fails.tables_format_v1/cluster/node_1/stderr'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedRandom name='/home/runner/.ya/build/build_root/ciyv/001fbd/ydb/tests/functional/sqs/messaging/test-results/py3test/testing_out_stuff/chunk41/testing_out_stuff/test_generic_messaging.py.TestSqsGenericMessagingWithTenant.test_create_queue_by_nonexistent_user_fails.tables_format_v1/cluster/node_1/logfile_2ff72zyq.log'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/ciyv/001fbd/ydb/tests/functional/sqs/messaging/test-results/py3test/testing_out_stuff/chunk41/testing_out_stuff/test_generic_messaging.py.TestSqsGenericMessagingWithTenant.test_create_queue_by_nonexistent_user_fails.tables_format_v1/cluster/slot_1/stdout'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/ciyv/001fbd/ydb/tests/functional/sqs/messaging/test-results/py3test/testing_out_stuff/chunk41/testing_out_stuff/test_generic_messaging.py.TestSqsGenericMessagingWithTenant.test_create_queue_by_nonexistent_user_fails.tables_format_v1/cluster/slot_1/stderr'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedRandom name='/home/runner/.ya/build/build_root/ciyv/001fbd/ydb/tests/functional/sqs/messaging/test-results/py3test/testing_out_stuff/chunk41/testing_out_stuff/test_generic_messaging.py.TestSqsGenericMessagingWithTenant.test_create_queue_by_nonexistent_user_fails.tables_format_v1/cluster/slot_1/logfile_5ta9l2dd.log'> ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/subprocess.py:1129: ResourceWarning: subprocess 621226 is still running ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/subprocess.py:1129: ResourceWarning: subprocess 623900 is still running ResourceWarning: Enable tracemalloc to get the object allocation traceback >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_queue_attributes_batch[tables_format_v1] [FAIL] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_read_dont_stall[tables_format_v0] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_set_very_big_visibility_timeout[tables_format_v1] [FAIL] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_validates_message_attribute_value[tables_format_v0] >> test_row_dispatcher.py::TestPqRowDispatcher::test_nested_types [FAIL] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_does_not_change_visibility_for_deleted_message[tables_format_v0-fifo] [FAIL] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_does_not_change_visibility_for_deleted_message[tables_format_v0-std] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/messaging/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_visibility_timeout_works[tables_format_v0] [FAIL] Test command err: sys:1: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/ciyv/001fb7/ydb/tests/functional/sqs/messaging/test-results/py3test/testing_out_stuff/chunk34/testing_out_stuff/test_generic_messaging.py.TestSqsGenericMessagingWithPath.test_validates_message_body.tables_format_v0/cluster/node_1/stdout'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/ciyv/001fb7/ydb/tests/functional/sqs/messaging/test-results/py3test/testing_out_stuff/chunk34/testing_out_stuff/test_generic_messaging.py.TestSqsGenericMessagingWithPath.test_validates_message_body.tables_format_v0/cluster/node_1/stderr'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedRandom name='/home/runner/.ya/build/build_root/ciyv/001fb7/ydb/tests/functional/sqs/messaging/test-results/py3test/testing_out_stuff/chunk34/testing_out_stuff/test_generic_messaging.py.TestSqsGenericMessagingWithPath.test_validates_message_body.tables_format_v0/cluster/node_1/logfile_dtd10_zt.log'> ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/subprocess.py:1129: ResourceWarning: subprocess 623838 is still running ResourceWarning: Enable tracemalloc to get the object allocation traceback ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/fq/yds/py3test >> test_disposition.py::TestContinueMode::test_disposition_fresh[v1-mvp_external_ydb_endpoint0] [FAIL] Test command err: sys:1: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/messaging/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_queue_attributes[tables_format_v0-std] [FAIL] Test command err: sys:1: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/ciyv/001fb4/ydb/tests/functional/sqs/messaging/test-results/py3test/testing_out_stuff/chunk27/testing_out_stuff/test_generic_messaging.py.TestSqsGenericMessagingWithPath.test_multi_read_dont_stall.tables_format_v1/cluster/node_1/stdout'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/ciyv/001fb4/ydb/tests/functional/sqs/messaging/test-results/py3test/testing_out_stuff/chunk27/testing_out_stuff/test_generic_messaging.py.TestSqsGenericMessagingWithPath.test_multi_read_dont_stall.tables_format_v1/cluster/node_1/stderr'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedRandom name='/home/runner/.ya/build/build_root/ciyv/001fb4/ydb/tests/functional/sqs/messaging/test-results/py3test/testing_out_stuff/chunk27/testing_out_stuff/test_generic_messaging.py.TestSqsGenericMessagingWithPath.test_multi_read_dont_stall.tables_format_v1/cluster/node_1/logfile_0anx13j4.log'> ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/subprocess.py:1129: ResourceWarning: subprocess 624564 is still running ResourceWarning: Enable tracemalloc to get the object allocation traceback |81.6%| [TA] {RESULT} $(B)/ydb/tests/functional/api/test-results/py3test/{meta.json ... results_accumulator.log} >> test_metrics_cleanup.py::TestCleanup::test_cleanup[v1] [FAIL] >> test_metrics_cleanup.py::TestCleanup::test_keep[v1] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_can_read_from_different_groups[tables_format_v0] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_change_message_visibility_with_very_big_timeout[tables_format_v1] [FAIL] >> test_explicit_partitioning_0.py::TestS3::test_projection_integer_type_validation[v2-false-client0-year Int32 NOT NULL-True] [FAIL] >> test_explicit_partitioning_0.py::TestS3::test_projection_integer_type_validation[v2-false-client1-year Uint32 NOT NULL-True] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_read_dont_stall[tables_format_v0] [FAIL] |81.6%| [TA] $(B)/ydb/tests/datashard/vector_index/test-results/py3test/{meta.json ... results_accumulator.log} >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_change_visibility_batch_works[tables_format_v0-fifo] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_validates_message_attribute_value[tables_format_v0] [FAIL] >> test_insert.py::TestS3::test_block_insert_value[v2-client0] [FAIL] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_validates_message_attribute_value[tables_format_v1] >> test_explicit_partitioning_1.py::TestS3::test_projection_date_type_validation[v2-true-client4-year Int64-False] [FAIL] >> test_explicit_partitioning_1.py::TestS3::test_projection_date_type_validation[v2-true-client5-year Int64 NOT NULL-False] >> test_insert.py::TestS3::test_insert_deadlock[v1-false-client0] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_does_not_change_visibility_for_deleted_message[tables_format_v0-std] [FAIL] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_does_not_change_visibility_for_deleted_message[tables_format_v1-fifo] >> test_row_dispatcher.py::TestPqRowDispatcher::test_filters_optional_field >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_empty_queue_url[tables_format_v0] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_change_visibility_batch_works[tables_format_v0-fifo] [FAIL] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_change_visibility_batch_works[tables_format_v0-std] >> test_format_setting.py::TestS3::test_date_time_simple_iso_insert[v1-date_time/simple_iso/test.tsv-tsv_with_names] [FAIL] >> test_format_setting.py::TestS3::test_date_time_simple_iso_insert[v1-date_time/simple_iso/test.json-json_each_row] >> test_bad_syntax.py::TestBadSyntax::test_bad_syntax[v1-mvp_external_ydb_endpoint0-with_created_read_rules-modify-streaming] [FAIL] >> test_bad_syntax.py::TestBadSyntax::test_bad_syntax[v1-mvp_external_ydb_endpoint0-without_created_read_rules-create-analytics] |81.6%| [TA] {RESULT} $(B)/ydb/tests/datashard/vector_index/test-results/py3test/{meta.json ... results_accumulator.log} >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_validates_message_attribute_value[tables_format_v1] [FAIL] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_validates_message_attributes[tables_format_v0] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_does_not_change_visibility_for_deleted_message[tables_format_v1-fifo] [FAIL] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/messaging/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_read_dont_stall[tables_format_v0] [FAIL] Test command err: sys:1: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/ciyv/001fba/ydb/tests/functional/sqs/messaging/test-results/py3test/testing_out_stuff/chunk50/testing_out_stuff/test_generic_messaging.py.TestSqsGenericMessagingWithTenant.test_queue_attributes.tables_format_v1-fifo/cluster/node_1/stdout'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/ciyv/001fba/ydb/tests/functional/sqs/messaging/test-results/py3test/testing_out_stuff/chunk50/testing_out_stuff/test_generic_messaging.py.TestSqsGenericMessagingWithTenant.test_queue_attributes.tables_format_v1-fifo/cluster/node_1/stderr'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedRandom name='/home/runner/.ya/build/build_root/ciyv/001fba/ydb/tests/functional/sqs/messaging/test-results/py3test/testing_out_stuff/chunk50/testing_out_stuff/test_generic_messaging.py.TestSqsGenericMessagingWithTenant.test_queue_attributes.tables_format_v1-fifo/cluster/node_1/logfile_6xo4yz_4.log'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/ciyv/001fba/ydb/tests/functional/sqs/messaging/test-results/py3test/testing_out_stuff/chunk50/testing_out_stuff/test_generic_messaging.py.TestSqsGenericMessagingWithTenant.test_queue_attributes.tables_format_v1-fifo/cluster/slot_1/stdout'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/ciyv/001fba/ydb/tests/functional/sqs/messaging/test-results/py3test/testing_out_stuff/chunk50/testing_out_stuff/test_generic_messaging.py.TestSqsGenericMessagingWithTenant.test_queue_attributes.tables_format_v1-fifo/cluster/slot_1/stderr'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedRandom name='/home/runner/.ya/build/build_root/ciyv/001fba/ydb/tests/functional/sqs/messaging/test-results/py3test/testing_out_stuff/chunk50/testing_out_stuff/test_generic_messaging.py.TestSqsGenericMessagingWithTenant.test_queue_attributes.tables_format_v1-fifo/cluster/slot_1/logfile_qj82wan4.log'> ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/subprocess.py:1129: ResourceWarning: subprocess 622208 is still running ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/subprocess.py:1129: ResourceWarning: subprocess 625598 is still running ResourceWarning: Enable tracemalloc to get the object allocation traceback >> test_restart_query.py::TestRestartQuery::test_restart_runtime_errors[v1-mvp_external_ydb_endpoint0-analytics] [FAIL] >> test_restart_query.py::TestRestartQuery::test_restart_runtime_errors[v1-mvp_external_ydb_endpoint0-streaming] |81.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_change_visibility_batch_works[tables_format_v0-std] [FAIL] >> test_explicit_partitioning_0.py::TestS3::test_projection_integer_type_validation[v2-false-client1-year Uint32 NOT NULL-True] [FAIL] >> test_continue_mode.py::TestContinueMode::test_deny_disposition_from_checkpoint_in_create_query[v1-mvp_external_ydb_endpoint0] [FAIL] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_zero_visibility_timeout_works[tables_format_v0-fifo] [FAIL] >> test_explicit_partitioning_0.py::TestS3::test_projection_integer_type_validation[v2-false-client2-year Uint64 NOT NULL-True] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_zero_visibility_timeout_works[tables_format_v0-std] >> test_explicit_partitioning_1.py::TestS3::test_projection_date_type_validation[v2-true-client5-year Int64 NOT NULL-False] [FAIL] >> test_continue_mode.py::TestContinueMode::test_deny_state_load_mode_from_checkpoint_in_modify_query[v1-mvp_external_ydb_endpoint0] >> test_explicit_partitioning_1.py::TestS3::test_projection_date_type_validation[v2-true-client6-year Uint64-False] >> test_metrics_cleanup.py::TestCleanup::test_keep[v1] [FAIL] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/fq/yds/py3test >> test_select_1.py::TestSelect1::test_compile_error[v2] [FAIL] Test command err: sys:1: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback >> test_format_setting.py::TestS3::test_date_time_simple_iso_insert[v1-date_time/simple_iso/test.json-json_each_row] [FAIL] >> test_format_setting.py::TestS3::test_date_time_simple_iso_insert[v1-date_time/simple_iso/test.parquet-parquet] >> test_insert.py::TestS3::test_insert_deadlock[v1-false-client0] [FAIL] >> test_insert.py::TestS3::test_insert_deadlock[v1-true-client0] >> test_generic_messaging.py::TestYandexAttributesPrefix::test_allows_yandex_message_attribute_prefix[tables_format_v0] [FAIL] >> test_bad_syntax.py::TestBadSyntax::test_bad_syntax[v1-mvp_external_ydb_endpoint0-without_created_read_rules-modify-analytics] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_validates_message_attributes[tables_format_v0] [FAIL] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_validates_message_attributes[tables_format_v1] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_zero_visibility_timeout_works[tables_format_v0-std] [FAIL] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_zero_visibility_timeout_works[tables_format_v1-fifo] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/messaging/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_does_not_change_visibility_for_deleted_message[tables_format_v1-fifo] [FAIL] Test command err: sys:1: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/ciyv/001fae/ydb/tests/functional/sqs/messaging/test-results/py3test/testing_out_stuff/chunk22/testing_out_stuff/test_generic_messaging.py.TestSqsGenericMessagingWithPath.test_delete_message_works.tables_format_v0/cluster/node_1/stdout'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/ciyv/001fae/ydb/tests/functional/sqs/messaging/test-results/py3test/testing_out_stuff/chunk22/testing_out_stuff/test_generic_messaging.py.TestSqsGenericMessagingWithPath.test_delete_message_works.tables_format_v0/cluster/node_1/stderr'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedRandom name='/home/runner/.ya/build/build_root/ciyv/001fae/ydb/tests/functional/sqs/messaging/test-results/py3test/testing_out_stuff/chunk22/testing_out_stuff/test_generic_messaging.py.TestSqsGenericMessagingWithPath.test_delete_message_works.tables_format_v0/cluster/node_1/logfile_hvyriq3z.log'> ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/subprocess.py:1129: ResourceWarning: subprocess 626171 is still running ResourceWarning: Enable tracemalloc to get the object allocation traceback >> test_select_1.py::TestSelect1::test_select_pg[v1] [FAIL] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_write_read_delete_many_groups[tables_format_v0] [FAIL] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_write_read_delete_many_groups[tables_format_v1] >> test_select_1.py::TestSelect1::test_select_pg[v2] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/fq/yds/py3test >> test_row_dispatcher.py::TestPqRowDispatcher::test_filters_non_optional_field [FAIL] Test command err: sys:1: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_create_queue_by_nonexistent_user_fails[tables_format_v1] [FAIL] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_delay_message_batch[tables_format_v0-fifo] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_change_visibility_batch_works[tables_format_v1-fifo] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_queue_attributes[tables_format_v0] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_validates_message_attributes[tables_format_v1] [FAIL] >> test_row_dispatcher.py::TestPqRowDispatcher::test_filters_optional_field [FAIL] >> test_row_dispatcher.py::TestPqRowDispatcher::test_group_by_hop_restart_node >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_zero_visibility_timeout_works[tables_format_v1-fifo] [FAIL] >> test_bad_syntax.py::TestBadSyntax::test_bad_syntax[v1-mvp_external_ydb_endpoint0-without_created_read_rules-create-analytics] [FAIL] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_write_read_delete_many_groups[tables_format_v1] [FAIL] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_can_read_from_different_groups[tables_format_v0] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_zero_visibility_timeout_works[tables_format_v1-std] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/messaging/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_change_visibility_batch_works[tables_format_v0-std] [FAIL] Test command err: sys:1: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/ciyv/001fb1/ydb/tests/functional/sqs/messaging/test-results/py3test/testing_out_stuff/chunk37/testing_out_stuff/test_generic_messaging.py.TestSqsGenericMessagingWithTenant.test_can_read_new_written_data_on_visibility_timeout.tables_format_v1/cluster/node_1/stdout'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/ciyv/001fb1/ydb/tests/functional/sqs/messaging/test-results/py3test/testing_out_stuff/chunk37/testing_out_stuff/test_generic_messaging.py.TestSqsGenericMessagingWithTenant.test_can_read_new_written_data_on_visibility_timeout.tables_format_v1/cluster/node_1/stderr'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedRandom name='/home/runner/.ya/build/build_root/ciyv/001fb1/ydb/tests/functional/sqs/messaging/test-results/py3test/testing_out_stuff/chunk37/testing_out_stuff/test_generic_messaging.py.TestSqsGenericMessagingWithTenant.test_can_read_new_written_data_on_visibility_timeout.tables_format_v1/cluster/node_1/logfile_54n_ils9.log'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/ciyv/001fb1/ydb/tests/functional/sqs/messaging/test-results/py3test/testing_out_stuff/chunk37/testing_out_stuff/test_generic_messaging.py.TestSqsGenericMessagingWithTenant.test_can_read_new_written_data_on_visibility_timeout.tables_format_v1/cluster/slot_1/stdout'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/ciyv/001fb1/ydb/tests/functional/sqs/messaging/test-results/py3test/testing_out_stuff/chunk37/testing_out_stuff/test_generic_messaging.py.TestSqsGenericMessagingWithTenant.test_can_read_new_written_data_on_visibility_timeout.tables_format_v1/cluster/slot_1/stderr'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedRandom name='/home/runner/.ya/build/build_root/ciyv/001fb1/ydb/tests/functional/sqs/messaging/test-results/py3test/testing_out_stuff/chunk37/testing_out_stuff/test_generic_messaging.py.TestSqsGenericMessagingWithTenant.test_can_read_new_written_data_on_visibility_timeout.tables_format_v1/cluster/slot_1/logfile_c8rpsrgj.log'> ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/subprocess.py:1129: ResourceWarning: subprocess 624773 is still running ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/subprocess.py:1129: ResourceWarning: subprocess 627039 is still running ResourceWarning: Enable tracemalloc to get the object allocation traceback >> test_explicit_partitioning_0.py::TestS3::test_projection_integer_type_validation[v2-false-client2-year Uint64 NOT NULL-True] [FAIL] >> test_explicit_partitioning_0.py::TestS3::test_projection_integer_type_validation[v2-false-client3-year Date NOT NULL-False] >> test_explicit_partitioning_1.py::TestS3::test_projection_date_type_validation[v2-true-client6-year Uint64-False] [FAIL] >> test_explicit_partitioning_1.py::TestS3::test_projection_date_type_validation[v2-true-client7-year Uint64 NOT NULL-False] >> test_bad_syntax.py::TestBadSyntax::test_bad_syntax[v1-mvp_external_ydb_endpoint0-without_created_read_rules-create-streaming] >> test_restart_query.py::TestRestartQuery::test_restart_runtime_errors[v1-mvp_external_ydb_endpoint0-streaming] [FAIL] >> test_yq_streaming.py::TestYqStreaming::test_yq_streaming_read_from_binding[v1] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_delay_message_batch[tables_format_v0-fifo] [FAIL] >> test_insert.py::TestS3::test_insert_deadlock[v1-true-client0] [FAIL] >> test_insert.py::TestS3::test_insert_deadlock[v2-false-client0] >> test_format_setting.py::TestS3::test_date_time_simple_iso_insert[v1-date_time/simple_iso/test.parquet-parquet] [FAIL] >> test_format_setting.py::TestS3::test_date_time_simple_iso_insert[v2-date_time/simple_iso/test.csv-csv_with_names] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_delay_message_batch[tables_format_v0-std] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/fq/yds/py3test >> test_row_dispatcher.py::TestPqRowDispatcher::test_nested_types [FAIL] Test command err: sys:1: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_zero_visibility_timeout_works[tables_format_v1-std] [FAIL] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_can_read_new_written_data_on_visibility_timeout[tables_format_v0] >> test_continue_mode.py::TestContinueMode::test_deny_state_load_mode_from_checkpoint_in_modify_query[v1-mvp_external_ydb_endpoint0] [FAIL] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/messaging/py3test >> test_generic_messaging.py::TestYandexAttributesPrefix::test_allows_yandex_message_attribute_prefix[tables_format_v0] [FAIL] Test command err: ydb/tests/library/clients/kikimr_monitoring.py:75: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/ciyv/001fbf/ydb/tests/functional/sqs/messaging/test-results/py3test/testing_out_stuff/chunk58/testing_out_stuff/test_generic_messaging.py.TestSqsGenericMessagingWithTenant.test_zero_visibility_timeout_works.tables_format_v0-fifo/cluster/node_1/stdout'> (key, value) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/library/clients/kikimr_monitoring.py:75: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/ciyv/001fbf/ydb/tests/functional/sqs/messaging/test-results/py3test/testing_out_stuff/chunk58/testing_out_stuff/test_generic_messaging.py.TestSqsGenericMessagingWithTenant.test_zero_visibility_timeout_works.tables_format_v0-fifo/cluster/node_1/stderr'> (key, value) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/library/clients/kikimr_monitoring.py:75: ResourceWarning: unclosed file <_io.BufferedRandom name='/home/runner/.ya/build/build_root/ciyv/001fbf/ydb/tests/functional/sqs/messaging/test-results/py3test/testing_out_stuff/chunk58/testing_out_stuff/test_generic_messaging.py.TestSqsGenericMessagingWithTenant.test_zero_visibility_timeout_works.tables_format_v0-fifo/cluster/node_1/logfile_57vs0zen.log'> (key, value) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/library/clients/kikimr_monitoring.py:75: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/ciyv/001fbf/ydb/tests/functional/sqs/messaging/test-results/py3test/testing_out_stuff/chunk58/testing_out_stuff/test_generic_messaging.py.TestSqsGenericMessagingWithTenant.test_zero_visibility_timeout_works.tables_format_v0-fifo/cluster/slot_1/stdout'> (key, value) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/library/clients/kikimr_monitoring.py:75: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/ciyv/001fbf/ydb/tests/functional/sqs/messaging/test-results/py3test/testing_out_stuff/chunk58/testing_out_stuff/test_generic_messaging.py.TestSqsGenericMessagingWithTenant.test_zero_visibility_timeout_works.tables_format_v0-fifo/cluster/slot_1/stderr'> (key, value) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/library/clients/kikimr_monitoring.py:75: ResourceWarning: unclosed file <_io.BufferedRandom name='/home/runner/.ya/build/build_root/ciyv/001fbf/ydb/tests/functional/sqs/messaging/test-results/py3test/testing_out_stuff/chunk58/testing_out_stuff/test_generic_messaging.py.TestSqsGenericMessagingWithTenant.test_zero_visibility_timeout_works.tables_format_v0-fifo/cluster/slot_1/logfile_me90yskt.log'> (key, value) ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/subprocess.py:1129: ResourceWarning: subprocess 622097 is still running ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/subprocess.py:1129: ResourceWarning: subprocess 624777 is still running ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/ciyv/001fbf/ydb/tests/functional/sqs/messaging/test-results/py3test/testing_out_stuff/chunk58/testing_out_stuff/test_generic_messaging.py.TestYandexAttributesPrefix.test_allows_yandex_message_attribute_prefix.tables_format_v0/cluster/node_1/stdout'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/ciyv/001fbf/ydb/tests/functional/sqs/messaging/test-results/py3test/testing_out_stuff/chunk58/testing_out_stuff/test_generic_messaging.py.TestYandexAttributesPrefix.test_allows_yandex_message_attribute_prefix.tables_format_v0/cluster/node_1/stderr'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedRandom name='/home/runner/.ya/build/build_root/ciyv/001fbf/ydb/tests/functional/sqs/messaging/test-results/py3test/testing_out_stuff/chunk58/testing_out_stuff/test_generic_messaging.py.TestYandexAttributesPrefix.test_allows_yandex_message_attribute_prefix.tables_format_v0/cluster/node_1/logfile_eds3mt42.log'> ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/subprocess.py:1129: ResourceWarning: subprocess 628897 is still running ResourceWarning: Enable tracemalloc to get the object allocation traceback >> test_cpu_quota.py::TestCpuQuota::test_cpu_quota[v1-mvp_external_ydb_endpoint0] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_send_message_batch[tables_format_v1-fifo] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/messaging/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_validates_message_attributes[tables_format_v1] [FAIL] Test command err: sys:1: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/ciyv/001fa8/ydb/tests/functional/sqs/messaging/test-results/py3test/testing_out_stuff/chunk33/testing_out_stuff/test_generic_messaging.py.TestSqsGenericMessagingWithPath.test_set_very_big_visibility_timeout.tables_format_v1/cluster/node_1/stdout'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/ciyv/001fa8/ydb/tests/functional/sqs/messaging/test-results/py3test/testing_out_stuff/chunk33/testing_out_stuff/test_generic_messaging.py.TestSqsGenericMessagingWithPath.test_set_very_big_visibility_timeout.tables_format_v1/cluster/node_1/stderr'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedRandom name='/home/runner/.ya/build/build_root/ciyv/001fa8/ydb/tests/functional/sqs/messaging/test-results/py3test/testing_out_stuff/chunk33/testing_out_stuff/test_generic_messaging.py.TestSqsGenericMessagingWithPath.test_set_very_big_visibility_timeout.tables_format_v1/cluster/node_1/logfile_4temkp2w.log'> ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/subprocess.py:1129: ResourceWarning: subprocess 626724 is still running ResourceWarning: Enable tracemalloc to get the object allocation traceback >> test_explicit_partitioning_0.py::TestS3::test_projection_integer_type_validation[v2-false-client3-year Date NOT NULL-False] [FAIL] >> test_explicit_partitioning_0.py::TestS3::test_projection_integer_type_validation[v2-false-client4-year String NOT NULL-True] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_validates_deduplication_id[tables_format_v1] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_delay_message_batch[tables_format_v0-std] [FAIL] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_delay_message_batch[tables_format_v1-fifo] >> test_dispatch.py::TestMapping::test_mapping >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_delay_message_batch[tables_format_v1-fifo] [FAIL] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_delay_message_batch[tables_format_v1-std] >> test_explicit_partitioning_1.py::TestS3::test_projection_date_type_validation[v2-true-client7-year Uint64 NOT NULL-False] [FAIL] >> test_disposition.py::TestContinueMode::test_disposition_from_time[v1-mvp_external_ydb_endpoint0] >> test_explicit_partitioning_1.py::TestS3::test_projection_date_type_validation[v2-true-client8-year String NOT NULL-True] >> test_insert.py::TestS3::test_insert_deadlock[v2-false-client0] [FAIL] >> test_insert.py::TestS3::test_insert_deadlock[v2-true-client0] >> test_bad_syntax.py::TestBadSyntax::test_type_as_column[v1] >> test_format_setting.py::TestS3::test_date_time_simple_iso_insert[v2-date_time/simple_iso/test.csv-csv_with_names] [FAIL] >> test_format_setting.py::TestS3::test_date_time_simple_iso_insert[v2-date_time/simple_iso/test.tsv-tsv_with_names] >> test_bad_syntax.py::TestBadSyntax::test_bad_syntax[v1-mvp_external_ydb_endpoint0-without_created_read_rules-modify-analytics] [FAIL] >> test_bad_syntax.py::TestBadSyntax::test_bad_syntax[v1-mvp_external_ydb_endpoint0-without_created_read_rules-modify-streaming] >> test_recovery.py::TestRecovery::test_program_state_recovery >> test_row_dispatcher.py::TestPqRowDispatcher::test_group_by_hop_restart_node [FAIL] >> test_cpu_quota.py::TestCpuQuota::test_cpu_quota[v1-mvp_external_ydb_endpoint0] [FAIL] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_can_read_from_different_groups[tables_format_v0] [FAIL] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_can_read_from_different_groups[tables_format_v1] >> test_bad_syntax.py::TestBadSyntax::test_bad_syntax[v1-mvp_external_ydb_endpoint0-without_created_read_rules-create-streaming] [FAIL] >> test_explicit_partitioning_0.py::TestS3::test_projection_integer_type_validation[v2-false-client4-year String NOT NULL-True] [FAIL] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/fq/yds/py3test >> test_metrics_cleanup.py::TestCleanup::test_keep[v1] [FAIL] Test command err: sys:1: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_delay_message_batch[tables_format_v1-std] [FAIL] >> test_explicit_partitioning_0.py::TestS3::test_projection_integer_type_validation[v2-false-client5-year String-False] >> test_explicit_partitioning_1.py::TestS3::test_projection_date_type_validation[v2-true-client8-year String NOT NULL-True] [FAIL] >> test_explicit_partitioning_1.py::TestS3::test_projection_date_type_validation[v2-true-client9-year String-False] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_set_very_big_visibility_timeout[tables_format_v1] [FAIL] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_validates_message_attribute_value[tables_format_v0] >> test_yq_streaming.py::TestYqStreaming::test_match_recognize_sink[v1] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_can_read_from_different_groups[tables_format_v1] [FAIL] >> test_select_1.py::TestSelect1::test_select_pg[v2] [FAIL] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_crutch_groups_selection_algorithm_selects_second_group_batch[tables_format_v0] >> test_yq_streaming.py::TestYqStreaming::test_yq_streaming_read_from_binding[v1] [FAIL] >> test_yq_streaming.py::TestYqStreaming::test_yq_streaming_read_from_binding_date_time[v1] >> test_auditlog.py::test_cloud_ids_are_logged[attrs0] [FAIL] >> test_format_setting.py::TestS3::test_date_time_simple_iso_insert[v2-date_time/simple_iso/test.tsv-tsv_with_names] [FAIL] >> test_format_setting.py::TestS3::test_date_time_simple_iso_insert[v2-date_time/simple_iso/test.json-json_each_row] |81.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/messaging/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_delay_message_batch[tables_format_v1-std] [FAIL] Test command err: sys:1: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/ciyv/001f9f/ydb/tests/functional/sqs/messaging/test-results/py3test/testing_out_stuff/chunk19/testing_out_stuff/test_generic_messaging.py.TestSqsGenericMessagingWithPath.test_create_queue_by_nonexistent_user_fails.tables_format_v1/cluster/node_1/stdout'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/ciyv/001f9f/ydb/tests/functional/sqs/messaging/test-results/py3test/testing_out_stuff/chunk19/testing_out_stuff/test_generic_messaging.py.TestSqsGenericMessagingWithPath.test_create_queue_by_nonexistent_user_fails.tables_format_v1/cluster/node_1/stderr'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedRandom name='/home/runner/.ya/build/build_root/ciyv/001f9f/ydb/tests/functional/sqs/messaging/test-results/py3test/testing_out_stuff/chunk19/testing_out_stuff/test_generic_messaging.py.TestSqsGenericMessagingWithPath.test_create_queue_by_nonexistent_user_fails.tables_format_v1/cluster/node_1/logfile_h4x24dsz.log'> ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/subprocess.py:1129: ResourceWarning: subprocess 628782 is still running ResourceWarning: Enable tracemalloc to get the object allocation traceback >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_validates_message_attribute_value[tables_format_v0] [FAIL] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_validates_message_attribute_value[tables_format_v1] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_change_visibility_to_zero_works[tables_format_v1-std] >> test_explicit_partitioning_0.py::TestS3::test_projection_integer_type_validation[v2-false-client5-year String-False] [FAIL] >> test_explicit_partitioning_0.py::TestS3::test_projection_integer_type_validation[v2-false-client6-year Utf8 NOT NULL-True] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_can_read_new_written_data_on_visibility_timeout[tables_format_v1] >> test_insert.py::TestS3::test_insert_deadlock[v2-true-client0] [FAIL] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/fq/yds/py3test >> test_restart_query.py::TestRestartQuery::test_restart_runtime_errors[v1-mvp_external_ydb_endpoint0-streaming] [FAIL] Test command err: sys:1: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_crutch_groups_selection_algorithm_selects_second_group_batch[tables_format_v0] [FAIL] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_crutch_groups_selection_algorithm_selects_second_group_batch[tables_format_v1] >> test_explicit_partitioning_1.py::TestS3::test_projection_date_type_validation[v2-true-client9-year String-False] [FAIL] >> test_explicit_partitioning_1.py::TestS3::test_projection_date_type_validation[v2-true-client10-year Utf8-False] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_queue_attributes[tables_format_v0] [FAIL] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_queue_attributes[tables_format_v1] >> test_disposition.py::TestContinueMode::test_disposition_from_time[v1-mvp_external_ydb_endpoint0] [FAIL] >> test_dispatch.py::TestMapping::test_mapping [FAIL] >> test_dispatch.py::TestMapping::test_idle >> test_disposition.py::TestContinueMode::test_disposition_oldest[v1-mvp_external_ydb_endpoint0] >> test_bad_syntax.py::TestBadSyntax::test_type_as_column[v1] [FAIL] |81.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test >> test_big_state.py::TestBigState::test_gt_8mb[v1] >> test_bad_syntax.py::TestBadSyntax::test_bad_syntax[v1-mvp_external_ydb_endpoint0-without_created_read_rules-modify-streaming] [FAIL] >> test_bad_syntax.py::TestBadSyntax::test_require_as[v1] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_queue_attributes[tables_format_v1] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_crutch_groups_selection_algorithm_selects_second_group_batch[tables_format_v1] [FAIL] >> test_explicit_partitioning_0.py::TestS3::test_projection_integer_type_validation[v2-false-client6-year Utf8 NOT NULL-True] [FAIL] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_queue_attributes[tables_format_v1] [FAIL] >> test_explicit_partitioning_0.py::TestS3::test_projection_integer_type_validation[v2-false-client7-year Utf8-False] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_read_dont_stall[tables_format_v0] >> test_yq_streaming.py::TestYqStreaming::test_yq_streaming_read_from_binding_date_time[v1] [FAIL] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_deduplication[tables_format_v0-by_deduplication_id] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_validates_message_attribute_value[tables_format_v1] [FAIL] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_validates_message_attributes[tables_format_v0] >> test_generic_messaging.py::TestYandexAttributesPrefix::test_allows_yandex_message_attribute_prefix[tables_format_v1] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_read_dont_stall[tables_format_v0] [FAIL] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_read_dont_stall[tables_format_v1] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_deduplication[tables_format_v0-by_deduplication_id] [FAIL] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_deduplication[tables_format_v0-content_based] >> test_format_setting.py::TestS3::test_date_time_simple_iso_insert[v2-date_time/simple_iso/test.json-json_each_row] [FAIL] >> test_select_1.py::TestSelect1::test_unwrap_null[v1] >> test_format_setting.py::TestS3::test_date_time_simple_iso_insert[v2-date_time/simple_iso/test.parquet-parquet] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_validates_message_attributes[tables_format_v0] [FAIL] >> test_yq_streaming.py::TestYqStreaming::test_match_recognize_sink[v1] [FAIL] >> test_explicit_partitioning_1.py::TestS3::test_projection_date_type_validation[v2-true-client10-year Utf8-False] [FAIL] >> test_explicit_partitioning_1.py::TestS3::test_projection_date_type_validation[v2-true-client11-year Utf8 NOT NULL-True] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_validates_message_attributes[tables_format_v1] >> test_yq_streaming.py::TestYqStreaming::test_state_load_mode[v1] >> test_big_state.py::TestBigState::test_gt_8mb[v1] [FAIL] >> test_explicit_partitioning_0.py::TestS3::test_projection_integer_type_validation[v2-false-client7-year Utf8-False] [FAIL] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_change_visibility_batch_works[tables_format_v1-fifo] [FAIL] >> test_continue_mode.py::TestContinueMode::test_continue_from_offsets[v1-mvp_external_ydb_endpoint0] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/fq/yds/py3test >> test_bad_syntax.py::TestBadSyntax::test_bad_syntax[v1-mvp_external_ydb_endpoint0-without_created_read_rules-create-streaming] [FAIL] Test command err: sys:1: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback >> test_explicit_partitioning_0.py::TestS3::test_projection_integer_type_validation[v2-false-client8-year Int32-False] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_change_visibility_batch_works[tables_format_v1-std] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_read_dont_stall[tables_format_v1] [FAIL] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_receive_attempt_reloads_same_messages[tables_format_v0-after_crutch_batch] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_deduplication[tables_format_v0-content_based] [FAIL] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_validates_message_attributes[tables_format_v1] [FAIL] >> test_dispatch.py::TestMapping::test_idle [FAIL] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/fq/yds/py3test >> test_cpu_quota.py::TestCpuQuota::test_cpu_quota[v1-mvp_external_ydb_endpoint0] [FAIL] Test command err: sys:1: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback >> test_disposition.py::TestContinueMode::test_disposition_oldest[v1-mvp_external_ydb_endpoint0] [FAIL] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_receive_attempt_reloads_same_messages[tables_format_v0-after_crutch_batch] [FAIL] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_receive_attempt_reloads_same_messages[tables_format_v0-standard_mode] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_change_visibility_batch_works[tables_format_v1-std] [FAIL] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_change_visibility_to_zero_works[tables_format_v0-fifo] >> test_bad_syntax.py::TestBadSyntax::test_require_as[v1] [FAIL] >> test_recovery.py::TestRecovery::test_program_state_recovery [FAIL] >> test_recovery.py::TestRecovery::test_program_state_recovery_error_if_no_states >> test_format_setting.py::TestS3::test_date_time_simple_iso_insert[v2-date_time/simple_iso/test.parquet-parquet] [FAIL] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/fq/yds/py3test >> test_row_dispatcher.py::TestPqRowDispatcher::test_group_by_hop_restart_node [FAIL] Test command err: sys:1: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_can_read_new_written_data_on_visibility_timeout[tables_format_v0] [FAIL] >> test_format_setting.py::TestS3::test_date_time_simple_posix[v1-common/simple_posix/test.csv-csv_with_names] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_receive_attempt_reloads_same_messages[tables_format_v0-standard_mode] [FAIL] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/messaging/py3test >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_deduplication[tables_format_v0-content_based] [FAIL] Test command err: sys:1: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/ciyv/001f99/ydb/tests/functional/sqs/messaging/test-results/py3test/testing_out_stuff/chunk0/testing_out_stuff/test_fifo_messaging.py.TestSqsFifoMessagingWithPath.test_can_read_from_different_groups.tables_format_v0/cluster/node_1/stdout'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/ciyv/001f99/ydb/tests/functional/sqs/messaging/test-results/py3test/testing_out_stuff/chunk0/testing_out_stuff/test_fifo_messaging.py.TestSqsFifoMessagingWithPath.test_can_read_from_different_groups.tables_format_v0/cluster/node_1/stderr'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedRandom name='/home/runner/.ya/build/build_root/ciyv/001f99/ydb/tests/functional/sqs/messaging/test-results/py3test/testing_out_stuff/chunk0/testing_out_stuff/test_fifo_messaging.py.TestSqsFifoMessagingWithPath.test_can_read_from_different_groups.tables_format_v0/cluster/node_1/logfile_5969zwil.log'> ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/subprocess.py:1129: ResourceWarning: subprocess 630463 is still running ResourceWarning: Enable tracemalloc to get the object allocation traceback >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_send_message_batch[tables_format_v1-fifo] [FAIL] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_send_message_batch[tables_format_v1-std] >> test_yq_streaming.py::TestYqStreaming::test_state_load_mode[v1] [FAIL] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_validates_deduplication_id[tables_format_v1] [FAIL] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_validates_group_id[tables_format_v0] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_change_visibility_to_zero_works[tables_format_v0-fifo] [FAIL] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_change_visibility_to_zero_works[tables_format_v0-std] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/messaging/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_validates_message_attributes[tables_format_v1] [FAIL] Test command err: sys:1: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/ciyv/001fa2/ydb/tests/functional/sqs/messaging/test-results/py3test/testing_out_stuff/chunk55/testing_out_stuff/test_generic_messaging.py.TestSqsGenericMessagingWithTenant.test_set_very_big_visibility_timeout.tables_format_v1/cluster/node_1/stdout'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/ciyv/001fa2/ydb/tests/functional/sqs/messaging/test-results/py3test/testing_out_stuff/chunk55/testing_out_stuff/test_generic_messaging.py.TestSqsGenericMessagingWithTenant.test_set_very_big_visibility_timeout.tables_format_v1/cluster/node_1/stderr'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedRandom name='/home/runner/.ya/build/build_root/ciyv/001fa2/ydb/tests/functional/sqs/messaging/test-results/py3test/testing_out_stuff/chunk55/testing_out_stuff/test_generic_messaging.py.TestSqsGenericMessagingWithTenant.test_set_very_big_visibility_timeout.tables_format_v1/cluster/node_1/logfile_vh6yyaum.log'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/ciyv/001fa2/ydb/tests/functional/sqs/messaging/test-results/py3test/testing_out_stuff/chunk55/testing_out_stuff/test_generic_messaging.py.TestSqsGenericMessagingWithTenant.test_set_very_big_visibility_timeout.tables_format_v1/cluster/slot_1/stdout'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/ciyv/001fa2/ydb/tests/functional/sqs/messaging/test-results/py3test/testing_out_stuff/chunk55/testing_out_stuff/test_generic_messaging.py.TestSqsGenericMessagingWithTenant.test_set_very_big_visibility_timeout.tables_format_v1/cluster/slot_1/stderr'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedRandom name='/home/runner/.ya/build/build_root/ciyv/001fa2/ydb/tests/functional/sqs/messaging/test-results/py3test/testing_out_stuff/chunk55/testing_out_stuff/test_generic_messaging.py.TestSqsGenericMessagingWithTenant.test_set_very_big_visibility_timeout.tables_format_v1/cluster/slot_1/logfile_zxi2ziol.log'> ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/subprocess.py:1129: ResourceWarning: subprocess 629114 is still running ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/subprocess.py:1129: ResourceWarning: subprocess 631976 is still running ResourceWarning: Enable tracemalloc to get the object allocation traceback >> test_retry.py::TestRetry::test_fail_first[kikimr0] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/fq/yds/py3test >> test_select_1.py::TestSelect1::test_select_pg[v2] [FAIL] Test command err: sys:1: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_validates_group_id[tables_format_v0] [FAIL] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_validates_group_id[tables_format_v1] >> test_continue_mode.py::TestContinueMode::test_continue_from_offsets[v1-mvp_external_ydb_endpoint0] [FAIL] >> test_recovery.py::TestRecovery::test_program_state_recovery_error_if_no_states [FAIL] >> test_select_1.py::TestSelect1::test_unwrap_null[v1] [FAIL] >> test_select_limit.py::TestSelectLimit::test_select_limit[v1] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/fq/s3/py3test >> test_insert.py::TestS3::test_insert_deadlock[v2-true-client0] [FAIL] Test command err: library/recipes/common/__init__.py:29: ResourceWarning: unclosed file <_io.TextIOWrapper name='/home/runner/.ya/build/build_root/ciyv/00202e/ydb/tests/fq/s3/test-results/py3test/testing_out_stuff/test_insert/testing_out_stuff/moto_server.out.log' mode='w' encoding='utf-8'> process = subprocess.Popen( ResourceWarning: Enable tracemalloc to get the object allocation traceback library/recipes/common/__init__.py:29: ResourceWarning: unclosed file <_io.TextIOWrapper name='/home/runner/.ya/build/build_root/ciyv/00202e/ydb/tests/fq/s3/test-results/py3test/testing_out_stuff/test_insert/testing_out_stuff/moto_server.err.log' mode='w' encoding='utf-8'> process = subprocess.Popen( ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/subprocess.py:1129: ResourceWarning: subprocess 587853 is still running ResourceWarning: Enable tracemalloc to get the object allocation traceback >> test_format_setting.py::TestS3::test_date_time_simple_posix[v1-common/simple_posix/test.csv-csv_with_names] [FAIL] >> test_format_setting.py::TestS3::test_date_time_simple_posix[v1-common/simple_posix/test.tsv-tsv_with_names] >> test_bad_syntax.py::TestBadSyntax::test_bad_syntax[v1-mvp_external_ydb_endpoint0-with_created_read_rules-create-analytics] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_change_visibility_to_zero_works[tables_format_v0-std] [FAIL] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_change_visibility_to_zero_works[tables_format_v1-fifo] >> test_pq_read_write.py::TestPqReadWrite::test_pq_read_write[v1-with_checkpoints-mvp_external_ydb_endpoint0] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/messaging/py3test >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_receive_attempt_reloads_same_messages[tables_format_v0-standard_mode] [FAIL] Test command err: sys:1: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/ciyv/001f95/ydb/tests/functional/sqs/messaging/test-results/py3test/testing_out_stuff/chunk2/testing_out_stuff/test_fifo_messaging.py.TestSqsFifoMessagingWithPath.test_queue_attributes.tables_format_v0/cluster/node_1/stdout'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/ciyv/001f95/ydb/tests/functional/sqs/messaging/test-results/py3test/testing_out_stuff/chunk2/testing_out_stuff/test_fifo_messaging.py.TestSqsFifoMessagingWithPath.test_queue_attributes.tables_format_v0/cluster/node_1/stderr'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedRandom name='/home/runner/.ya/build/build_root/ciyv/001f95/ydb/tests/functional/sqs/messaging/test-results/py3test/testing_out_stuff/chunk2/testing_out_stuff/test_fifo_messaging.py.TestSqsFifoMessagingWithPath.test_queue_attributes.tables_format_v0/cluster/node_1/logfile_1gdoppv8.log'> ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/subprocess.py:1129: ResourceWarning: subprocess 632539 is still running ResourceWarning: Enable tracemalloc to get the object allocation traceback >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_validates_group_id[tables_format_v1] [FAIL] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_change_visibility_to_zero_works[tables_format_v1-std] [FAIL] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_change_visibility_works[tables_format_v0-fifo] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_validates_receive_attempt_id[tables_format_v0] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_change_visibility_to_zero_works[tables_format_v1-fifo] [FAIL] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/messaging/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_can_read_new_written_data_on_visibility_timeout[tables_format_v0] [FAIL] Test command err: ydb/tests/library/clients/kikimr_monitoring.py:75: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/ciyv/001fab/ydb/tests/functional/sqs/messaging/test-results/py3test/testing_out_stuff/chunk36/testing_out_stuff/test_generic_messaging.py.TestSqsGenericMessagingWithPath.test_zero_visibility_timeout_works.tables_format_v0-fifo/cluster/node_1/stdout'> (key, value) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/library/clients/kikimr_monitoring.py:75: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/ciyv/001fab/ydb/tests/functional/sqs/messaging/test-results/py3test/testing_out_stuff/chunk36/testing_out_stuff/test_generic_messaging.py.TestSqsGenericMessagingWithPath.test_zero_visibility_timeout_works.tables_format_v0-fifo/cluster/node_1/stderr'> (key, value) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/library/clients/kikimr_monitoring.py:75: ResourceWarning: unclosed file <_io.BufferedRandom name='/home/runner/.ya/build/build_root/ciyv/001fab/ydb/tests/functional/sqs/messaging/test-results/py3test/testing_out_stuff/chunk36/testing_out_stuff/test_generic_messaging.py.TestSqsGenericMessagingWithPath.test_zero_visibility_timeout_works.tables_format_v0-fifo/cluster/node_1/logfile_adwlu38e.log'> (key, value) ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/subprocess.py:1129: ResourceWarning: subprocess 626981 is still running ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/ciyv/001fab/ydb/tests/functional/sqs/messaging/test-results/py3test/testing_out_stuff/chunk36/testing_out_stuff/test_generic_messaging.py.TestSqsGenericMessagingWithTenant.test_can_read_new_written_data_on_visibility_timeout.tables_format_v0/cluster/node_1/stdout'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/ciyv/001fab/ydb/tests/functional/sqs/messaging/test-results/py3test/testing_out_stuff/chunk36/testing_out_stuff/test_generic_messaging.py.TestSqsGenericMessagingWithTenant.test_can_read_new_written_data_on_visibility_timeout.tables_format_v0/cluster/node_1/stderr'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedRandom name='/home/runner/.ya/build/build_root/ciyv/001fab/ydb/tests/functional/sqs/messaging/test-results/py3test/testing_out_stuff/chunk36/testing_out_stuff/test_generic_messaging.py.TestSqsGenericMessagingWithTenant.test_can_read_new_written_data_on_visibility_timeout.tables_format_v0/cluster/node_1/logfile_egiik0fo.log'> ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/subprocess.py:1129: ResourceWarning: subprocess 633250 is still running ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/ciyv/001fab/ydb/tests/functional/sqs/messaging/test-results/py3test/testing_out_stuff/chunk36/testing_out_stuff/test_generic_messaging.py.TestSqsGenericMessagingWithTenant.test_can_read_new_written_data_on_visibility_timeout.tables_format_v0/cluster/slot_1/stdout'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/ciyv/001fab/ydb/tests/functional/sqs/messaging/test-results/py3test/testing_out_stuff/chunk36/testing_out_stuff/test_generic_messaging.py.TestSqsGenericMessagingWithTenant.test_can_read_new_written_data_on_visibility_timeout.tables_format_v0/cluster/slot_1/stderr'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedRandom name='/home/runner/.ya/build/build_root/ciyv/001fab/ydb/tests/functional/sqs/messaging/test-results/py3test/testing_out_stuff/chunk36/testing_out_stuff/test_generic_messaging.py.TestSqsGenericMessagingWithTenant.test_can_read_new_written_data_on_visibility_timeout.tables_format_v0/cluster/slot_1/logfile_gjgiska6.log'> ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/subprocess.py:1129: ResourceWarning: subprocess 635874 is still running ResourceWarning: Enable tracemalloc to get the object allocation traceback >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_change_visibility_works[tables_format_v0-fifo] [FAIL] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_can_read_from_different_groups[tables_format_v0] [FAIL] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_can_read_from_different_groups[tables_format_v1] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/fq/yds/py3test >> test_yq_streaming.py::TestYqStreaming::test_yq_streaming_read_from_binding_date_time[v1] [FAIL] Test command err: sys:1: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_change_visibility_works[tables_format_v0-std] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_send_message_batch[tables_format_v1-std] [FAIL] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_send_to_nonexistent_queue[tables_format_v0] >> test_explicit_partitioning_0.py::TestS3::test_projection_integer_type_validation[v2-false-client8-year Int32-False] [FAIL] >> test_explicit_partitioning_0.py::TestS3::test_projection_integer_type_validation[v2-false-client9-year Uint32-False] >> test_recovery.py::TestRecovery::test_recovery >> test_format_setting.py::TestS3::test_date_time_simple_posix[v1-common/simple_posix/test.tsv-tsv_with_names] [FAIL] >> test_format_setting.py::TestS3::test_date_time_simple_posix[v1-common/simple_posix/test.json-json_each_row] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_validates_receive_attempt_id[tables_format_v0] [FAIL] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_validates_receive_attempt_id[tables_format_v1] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_empty_queue_url[tables_format_v0] [FAIL] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_empty_queue_url[tables_format_v1] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_change_visibility_works[tables_format_v0-std] [FAIL] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_change_visibility_works[tables_format_v1-fifo] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_change_visibility_to_zero_works[tables_format_v1-std] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_can_read_from_different_groups[tables_format_v1] [FAIL] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_crutch_groups_selection_algorithm_selects_second_group_batch[tables_format_v0] |81.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test >> test_select_limit.py::TestSelectLimit::test_select_limit[v1] [FAIL] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_send_to_nonexistent_queue[tables_format_v0] [FAIL] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_validates_receive_attempt_id[tables_format_v1] [FAIL] >> test_retry.py::TestRetry::test_fail_first[kikimr0] [FAIL] >> test_retry.py::TestRetry::test_low_rate[kikimr0] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/ydb/backup_ut/unittest >> BackupRestore::TestAllSchemeObjectTypes-EPathTypeReplication 2025-05-29 15:39:36,217 WARNING devtools.ya.test.programs.test_tool.run_test.run_test: Wrapper execution timed out 2025-05-29 15:39:36,443 WARNING devtools.ya.test.programs.test_tool.run_test.run_test: Wrapper has overrun 600 secs timeout. Process tree before termination: pid rss ref pdirt 4152999 46.0M 45.5M 23.2M test_tool run_ut @/home/runner/.ya/build/build_root/ciyv/001ca5/ydb/services/ydb/backup_ut/test-results/unittest/testing_out_stuff/chunk4/testing_out_stuff/test_tool.args 4153675 493M 473M 213M └─ ydb-services-ydb-backup_ut --trace-path-append /home/runner/.ya/build/build_root/ciyv/001ca5/ydb/services/ydb/backup_ut/test-results/unittest/testing_out_stuff/chunk4/y Test command err: 2025-05-29T15:29:37.507858Z node 1 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7509890236731006912:2221];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:29:37.507880Z node 1 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/001ca5/r3tmp/tmpFR7HUu/pdisk_1.dat 2025-05-29T15:29:37.591963Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 61870, node 1 2025-05-29T15:29:37.607638Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:29:37.607652Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-05-29T15:29:37.607655Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-05-29T15:29:37.607713Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-05-29T15:29:37.607811Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:29:37.607839Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:29:37.609550Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:14028 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-29T15:29:37.662285Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:29:37.907577Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890236731007698:2334], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:29:37.907600Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:29:37.949049Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-05-29T15:29:38.016049Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890241025975163:2344], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:29:38.016079Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7509890241025975168:2347], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:29:38.016092Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:29:38.016799Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480 2025-05-29T15:29:38.020257Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7509890241025975170:2348], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2025-05-29T15:29:38.120573Z node 1 :TX_PROXY ERROR: schemereq.cpp:546: Actor# [1:7509890241025975243:2778] txid# 281474976715660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-29T15:29:38.138271Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [1:7509890241025975261:2352], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:29:38.138383Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=1&id=NGZhNzA3MjAtNTVkYzcyYzgtOTUwY2I1NTgtMzZjYTUxMTA=, ActorId: [1:7509890236731007680:2332], ActorState: ExecuteState, TraceId: 01jweakhpz3dpp64mvah4xsz8e, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: assertion failed at ydb/services/ydb/backup_ut/ydb_backup_ut.cpp:167, TDataQueryResult (anonymous namespace)::ExecuteDataModificationQuery(TSession &, const TString &, const TExecDataQuerySettings &): (result.IsSuccess()) script: UPSERT INTO `/Root/table` ( Key, Value ) VALUES (1, "one"), (2, "two"), (3, "three"), (4, "four"), (5, "five"); issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 TBackTrace::Capture()+28 (0x139A39CC) NUnitTest::NPrivate::RaiseError(char const*, TBasicString> const&, bool)+137 (0x13B57539) ??+0 (0x13831F7D) ??+0 (0x1383F3FE) NTestSuiteBackupRestore::TestTableBackupRestore()+2306 (0x1383EC92) NTestSuiteBackupRestore::TCurrentTest::Execute()::'lambda'()::operator()() const+71 (0x13875F87) NUnitTest::TTestBase::Run(std::__y1::function, TBasicString> const&, char const*, bool)+126 (0x13B593EE) NTestSuiteBackupRestore::TCurrentTest::Execute()+436 (0x13875944) NUnitTest::TTestFactory::Execute()+803 (0x13B59B63) NUnitTest::RunMain(int, char**)+3021 (0x13B6B70D) ??+0 (0x7FAC61F13D90) __libc_start_main+128 (0x7FAC61F13E40) _start+41 (0x12914029) 2025-05-29T15:29:38.948927Z node 4 :METADATA_PROVIDER WARN: log.cpp:784: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7509890239912089815:2272];send_to=[0:7307199536658146131:7762515]; 2025-05-29T15:29:38.948971Z node 4 :METADATA_PROVIDER ERROR: log.cpp:784: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ciyv/001ca5/r3tmp/tmpN1jrLu/pdisk_1.dat 2025-05-29T15:29:38.970985Z node 4 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 12190, node 4 2025-05-29T15:29:38.994937Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-29T15:29:38.994950Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-05-29T15:29:38.994952Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-05-29T15:29:38.995001Z node 4 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:19714 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-29T15:29:39.050144Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-29T15:29:39.050204Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-29T15:29:39.051895Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-29T15:29:39.054990Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-29T15:29:39.312863Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7509890244207057862:2338], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:29:39.312886Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7509890244207057854:2335], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-29T15:29:39.312906Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:256: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status ... annot UPSERT objects:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 ; 2025-05-29T15:39:32.542226Z node 16 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [16:7509892792880077597:9599], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:39:32.543357Z node 16 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=16&id=ZDdhYzI1MTAtODdjNDc4YWEtNDdiMDQ0ZjYtMWJiYzBkMTA=, ActorId: [16:7509892792880077533:9672], ActorState: ExecuteState, TraceId: 01jweb5p9c3rb59wvr5c67sycn, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: 01jweb5p95bfwmx76ppbf3rrg4 2025-05-29T15:39:32.545120Z node 16 :METADATA_PROVIDER ERROR: log.cpp:784: fline=request_actor.h:64;event=unexpected reply;error_message=operation { ready: true status: INTERNAL_ERROR issues { message: "Execution" issue_code: 1060 issues { message: "yql/essentials/ast/yql_expr.h:1874: index out of range" issue_code: 1 } } result { [type.googleapis.com/Ydb.Table.ExecuteQueryResult] { tx_meta { } } } } ;request=session_id: "ydb://session/3?node_id=16&id=ZDdhYzI1MTAtODdjNDc4YWEtNDdiMDQ0ZjYtMWJiYzBkMTA=" tx_control { tx_id: "01jweb5p95bfwmx76ppbf3rrg4" commit_tx: true } query { yql_text: "DECLARE $objects AS List>;\nUPSERT INTO `//Root/.metadata/initialization/migrations`\nSELECT componentId,modificationId,instant FROM AS_TABLE($objects)\n" } parameters { key: "$objects" value { type { list_type { item { struct_type { members { name: "componentId" type { type_id: UTF8 } } members { name: "modificationId" type { type_id: UTF8 } } members { name: "instant" type { type_id: UINT32 } } } } } } value { items { items { text_value: "INITIALIZATION" } items { text_value: "create" } items { uint32_value: 1748533172 } } } } } ; 2025-05-29T15:39:32.545270Z node 16 :METADATA_INITIALIZER ERROR: log.cpp:784: fline=accessor_init.cpp:81;event=OnAlteringProblem;error=cannot UPSERT objects:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 ; 2025-05-29T15:39:33.715282Z node 16 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [16:7509892797175045013:9512], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:39:33.716501Z node 16 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=16&id=YmFhNWQxNTItNzM5YjVlNDUtNmE0ZjNiNjMtZjZiNmU5ZGE=, ActorId: [16:7509892797175044966:9555], ActorState: ExecuteState, TraceId: 01jweb5qdmbr3jkj4n62jknpxs, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: 01jweb5qdbdnrzzvtfef48q1nk 2025-05-29T15:39:33.718504Z node 16 :METADATA_PROVIDER ERROR: log.cpp:784: fline=request_actor.h:64;event=unexpected reply;error_message=operation { ready: true status: INTERNAL_ERROR issues { message: "Execution" issue_code: 1060 issues { message: "yql/essentials/ast/yql_expr.h:1874: index out of range" issue_code: 1 } } result { [type.googleapis.com/Ydb.Table.ExecuteQueryResult] { tx_meta { } } } } ;request=session_id: "ydb://session/3?node_id=16&id=YmFhNWQxNTItNzM5YjVlNDUtNmE0ZjNiNjMtZjZiNmU5ZGE=" tx_control { tx_id: "01jweb5qdbdnrzzvtfef48q1nk" commit_tx: true } query { yql_text: "DECLARE $objects AS List>;\nUPSERT INTO `//Root/.metadata/initialization/migrations`\nSELECT componentId,modificationId,instant FROM AS_TABLE($objects)\n" } parameters { key: "$objects" value { type { list_type { item { struct_type { members { name: "componentId" type { type_id: UTF8 } } members { name: "modificationId" type { type_id: UTF8 } } members { name: "instant" type { type_id: UINT32 } } } } } } value { items { items { text_value: "INITIALIZATION" } items { text_value: "create" } items { uint32_value: 1748533173 } } } } } ; 2025-05-29T15:39:33.718678Z node 16 :METADATA_INITIALIZER ERROR: log.cpp:784: fline=accessor_init.cpp:81;event=OnAlteringProblem;error=cannot UPSERT objects:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 ; 2025-05-29T15:39:34.861997Z node 16 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [16:7509892801470012438:9402], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:39:34.862222Z node 16 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=16&id=MzEzNGYyNWEtYmQ1ODIwMmItNDE5ODg2YjQtYTYwOGEyNg==, ActorId: [16:7509892801470012404:9445], ActorState: ExecuteState, TraceId: 01jweb5rht8h19pqb8cp32a4hc, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: 01jweb5rhf18sfd1tvwqxmzgx8 2025-05-29T15:39:34.863253Z node 16 :METADATA_PROVIDER ERROR: log.cpp:784: fline=request_actor.h:64;event=unexpected reply;error_message=operation { ready: true status: INTERNAL_ERROR issues { message: "Execution" issue_code: 1060 issues { message: "yql/essentials/ast/yql_expr.h:1874: index out of range" issue_code: 1 } } result { [type.googleapis.com/Ydb.Table.ExecuteQueryResult] { tx_meta { } } } } ;request=session_id: "ydb://session/3?node_id=16&id=MzEzNGYyNWEtYmQ1ODIwMmItNDE5ODg2YjQtYTYwOGEyNg==" tx_control { tx_id: "01jweb5rhf18sfd1tvwqxmzgx8" commit_tx: true } query { yql_text: "DECLARE $objects AS List>;\nUPSERT INTO `//Root/.metadata/initialization/migrations`\nSELECT componentId,modificationId,instant FROM AS_TABLE($objects)\n" } parameters { key: "$objects" value { type { list_type { item { struct_type { members { name: "componentId" type { type_id: UTF8 } } members { name: "modificationId" type { type_id: UTF8 } } members { name: "instant" type { type_id: UINT32 } } } } } } value { items { items { text_value: "INITIALIZATION" } items { text_value: "create" } items { uint32_value: 1748533174 } } } } } ; 2025-05-29T15:39:34.863419Z node 16 :METADATA_INITIALIZER ERROR: log.cpp:784: fline=accessor_init.cpp:81;event=OnAlteringProblem;error=cannot UPSERT objects:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 ; 2025-05-29T15:39:36.022449Z node 16 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:564: Compilation failed, self: [16:7509892805764979850:9319], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 2025-05-29T15:39:36.023742Z node 16 :KQP_SESSION WARN: kqp_session_actor.cpp:2152: SessionId: ydb://session/3?node_id=16&id=NGRhYTc3ODYtYzg5ZjBmNzgtOWFlZWQwY2UtMjMwZjM1OWU=, ActorId: [16:7509892805764979817:9350], ActorState: ExecuteState, TraceId: 01jweb5snh30s5ncas5ca3pf11, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: 01jweb5sn24g8w60408dm1zsrm 2025-05-29T15:39:36.026553Z node 16 :METADATA_PROVIDER ERROR: log.cpp:784: fline=request_actor.h:64;event=unexpected reply;error_message=operation { ready: true status: INTERNAL_ERROR issues { message: "Execution" issue_code: 1060 issues { message: "yql/essentials/ast/yql_expr.h:1874: index out of range" issue_code: 1 } } result { [type.googleapis.com/Ydb.Table.ExecuteQueryResult] { tx_meta { } } } } ;request=session_id: "ydb://session/3?node_id=16&id=NGRhYTc3ODYtYzg5ZjBmNzgtOWFlZWQwY2UtMjMwZjM1OWU=" tx_control { tx_id: "01jweb5sn24g8w60408dm1zsrm" commit_tx: true } query { yql_text: "DECLARE $objects AS List>;\nUPSERT INTO `//Root/.metadata/initialization/migrations`\nSELECT componentId,modificationId,instant FROM AS_TABLE($objects)\n" } parameters { key: "$objects" value { type { list_type { item { struct_type { members { name: "componentId" type { type_id: UTF8 } } members { name: "modificationId" type { type_id: UTF8 } } members { name: "instant" type { type_id: UINT32 } } } } } } value { items { items { text_value: "INITIALIZATION" } items { text_value: "create" } items { uint32_value: 1748533175 } } } } } ; 2025-05-29T15:39:36.026757Z node 16 :METADATA_INITIALIZER ERROR: log.cpp:784: fline=accessor_init.cpp:81;event=OnAlteringProblem;error=cannot UPSERT objects:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 ; Traceback (most recent call last): File "library/python/testing/yatest_common/yatest/common/process.py", line 384, in wait wait_for( File "library/python/testing/yatest_common/yatest/common/process.py", line 764, in wait_for raise TimeoutError(truncate(message, MAX_MESSAGE_LEN)) yatest.common.process.TimeoutError: 600 second(s) wait timeout has expired: Command '['/home/runner/.ya/tools/v4/8689590287/test_tool', 'run_ut', '@/home/runner/.ya/build/build_root/ciyv/001ca5/ydb/services/ydb/backup_ut/test-results/unittest/testing_out_stuff/chunk4/testing_out_stuff/test_tool.args']' stopped by 600 seconds timeout During handling of the above exception, another exception occurred: Traceback (most recent call last): File "devtools/ya/test/programs/test_tool/run_test/run_test.py", line 1738, in main res.wait(check_exit_code=False, timeout=run_timeout, on_timeout=timeout_callback) File "library/python/testing/yatest_common/yatest/common/process.py", line 398, in wait raise ExecutionTimeoutError(self, str(e)) yatest.common.process.ExecutionTimeoutError: (("600 second(s) wait timeout has expired: Command '['/home/runner/.ya/tools/v4/8689590287/test_tool', 'run_ut', '@/home/runner/.ya/build/build_root/ciyv/001ca5/ydb/services/ydb/backup_ut/test-results/unittest/testing_out_stuff/chunk4/testing_out_stuff/test_tool.args']' stopped by 600 seconds timeout",), {}) >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_send_to_nonexistent_queue[tables_format_v1] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_empty_queue_url[tables_format_v1] [FAIL] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_get_queue_attributes_only_attributes_table[tables_format_v0-fifo] >> test_bad_syntax.py::TestBadSyntax::test_bad_syntax[v1-mvp_external_ydb_endpoint0-with_created_read_rules-create-analytics] [FAIL] >> test_bad_syntax.py::TestBadSyntax::test_bad_syntax[v1-mvp_external_ydb_endpoint0-with_created_read_rules-create-streaming] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_crutch_groups_selection_algorithm_selects_second_group_batch[tables_format_v0] [FAIL] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/fq/yds/py3test >> test_disposition.py::TestContinueMode::test_disposition_oldest[v1-mvp_external_ydb_endpoint0] [FAIL] Test command err: sys:1: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_change_visibility_works[tables_format_v1-fifo] [FAIL] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_change_visibility_works[tables_format_v1-std] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/fq/yds/py3test >> test_bad_syntax.py::TestBadSyntax::test_require_as[v1] [FAIL] Test command err: sys:1: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_send_to_nonexistent_queue[tables_format_v1] [FAIL] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/fq/multi_plane/py3test >> test_dispatch.py::TestMapping::test_idle [FAIL] Test command err: sys:1: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_set_very_big_visibility_timeout[tables_format_v0] >> test_generic_messaging.py::TestYandexAttributesPrefix::test_allows_yandex_message_attribute_prefix[tables_format_v1] [FAIL] >> test_polling.py::TestSqsPolling::test_receive_message_with_polling[tables_format_v0-long_polling-fifo] >> test_2_selects_limit.py::TestSelectLimit::test_select_same[v1] >> test_explicit_partitioning_0.py::TestS3::test_projection_integer_type_validation[v2-false-client9-year Uint32-False] [FAIL] >> test_explicit_partitioning_0.py::TestS3::test_projection_integer_type_validation[v2-false-client10-year Int64 NOT NULL-True] >> test_pq_read_write.py::TestPqReadWrite::test_pq_read_write[v1-with_checkpoints-mvp_external_ydb_endpoint0] [FAIL] >> test_pq_read_write.py::TestPqReadWrite::test_pq_read_write[v1-without_checkpoints-mvp_external_ydb_endpoint0] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_change_visibility_works[tables_format_v1-std] [FAIL] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_get_queue_attributes_only_attributes_table[tables_format_v0-fifo] [FAIL] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_get_queue_attributes_only_attributes_table[tables_format_v0-std] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_queue_attributes[tables_format_v1] [FAIL] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_read_dont_stall[tables_format_v0] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/messaging/py3test >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_validates_receive_attempt_id[tables_format_v1] [FAIL] Test command err: sys:1: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/ciyv/001f8f/ydb/tests/functional/sqs/messaging/test-results/py3test/testing_out_stuff/chunk11/testing_out_stuff/test_fifo_messaging.py.TestSqsFifoMessagingWithTenant.test_validates_deduplication_id.tables_format_v1/cluster/node_1/stdout'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/ciyv/001f8f/ydb/tests/functional/sqs/messaging/test-results/py3test/testing_out_stuff/chunk11/testing_out_stuff/test_fifo_messaging.py.TestSqsFifoMessagingWithTenant.test_validates_deduplication_id.tables_format_v1/cluster/node_1/stderr'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedRandom name='/home/runner/.ya/build/build_root/ciyv/001f8f/ydb/tests/functional/sqs/messaging/test-results/py3test/testing_out_stuff/chunk11/testing_out_stuff/test_fifo_messaging.py.TestSqsFifoMessagingWithTenant.test_validates_deduplication_id.tables_format_v1/cluster/node_1/logfile_8fb_mtx2.log'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/ciyv/001f8f/ydb/tests/functional/sqs/messaging/test-results/py3test/testing_out_stuff/chunk11/testing_out_stuff/test_fifo_messaging.py.TestSqsFifoMessagingWithTenant.test_validates_deduplication_id.tables_format_v1/cluster/slot_1/stdout'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/ciyv/001f8f/ydb/tests/functional/sqs/messaging/test-results/py3test/testing_out_stuff/chunk11/testing_out_stuff/test_fifo_messaging.py.TestSqsFifoMessagingWithTenant.test_validates_deduplication_id.tables_format_v1/cluster/slot_1/stderr'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedRandom name='/home/runner/.ya/build/build_root/ciyv/001f8f/ydb/tests/functional/sqs/messaging/test-results/py3test/testing_out_stuff/chunk11/testing_out_stuff/test_fifo_messaging.py.TestSqsFifoMessagingWithTenant.test_validates_deduplication_id.tables_format_v1/cluster/slot_1/logfile_pm8i1ugp.log'> ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/subprocess.py:1129: ResourceWarning: subprocess 633791 is still running ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/subprocess.py:1129: ResourceWarning: subprocess 636728 is still running ResourceWarning: Enable tracemalloc to get the object allocation traceback |81.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test >> test_auditlog.py::test_cloud_ids_are_logged[attrs0] [FAIL] >> test_explicit_partitioning_1.py::TestS3::test_projection_date_type_validation[v2-true-client11-year Utf8 NOT NULL-True] [FAIL] >> test_explicit_partitioning_1.py::TestS3::test_projection_date_type_validation[v2-true-client12-year Date-False] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/messaging/py3test >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_crutch_groups_selection_algorithm_selects_second_group_batch[tables_format_v0] [FAIL] Test command err: ydb/tests/library/clients/kikimr_monitoring.py:76: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/ciyv/001fa5/ydb/tests/functional/sqs/messaging/test-results/py3test/testing_out_stuff/chunk6/testing_out_stuff/test_fifo_messaging.py.TestSqsFifoMessagingWithPath.test_write_read_delete_many_groups.tables_format_v0/cluster/node_1/stdout'> for key, value in sorted(labels.items(), key=lambda x: x[0]) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/library/clients/kikimr_monitoring.py:76: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/ciyv/001fa5/ydb/tests/functional/sqs/messaging/test-results/py3test/testing_out_stuff/chunk6/testing_out_stuff/test_fifo_messaging.py.TestSqsFifoMessagingWithPath.test_write_read_delete_many_groups.tables_format_v0/cluster/node_1/stderr'> for key, value in sorted(labels.items(), key=lambda x: x[0]) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/library/clients/kikimr_monitoring.py:76: ResourceWarning: unclosed file <_io.BufferedRandom name='/home/runner/.ya/build/build_root/ciyv/001fa5/ydb/tests/functional/sqs/messaging/test-results/py3test/testing_out_stuff/chunk6/testing_out_stuff/test_fifo_messaging.py.TestSqsFifoMessagingWithPath.test_write_read_delete_many_groups.tables_format_v0/cluster/node_1/logfile_fp8m503h.log'> for key, value in sorted(labels.items(), key=lambda x: x[0]) ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/subprocess.py:1129: ResourceWarning: subprocess 627934 is still running ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/ciyv/001fa5/ydb/tests/functional/sqs/messaging/test-results/py3test/testing_out_stuff/chunk6/testing_out_stuff/test_fifo_messaging.py.TestSqsFifoMessagingWithTenant.test_can_read_from_different_groups.tables_format_v0/cluster/node_1/stdout'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/ciyv/001fa5/ydb/tests/functional/sqs/messaging/test-results/py3test/testing_out_stuff/chunk6/testing_out_stuff/test_fifo_messaging.py.TestSqsFifoMessagingWithTenant.test_can_read_from_different_groups.tables_format_v0/cluster/node_1/stderr'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedRandom name='/home/runner/.ya/build/build_root/ciyv/001fa5/ydb/tests/functional/sqs/messaging/test-results/py3test/testing_out_stuff/chunk6/testing_out_stuff/test_fifo_messaging.py.TestSqsFifoMessagingWithTenant.test_can_read_from_different_groups.tables_format_v0/cluster/node_1/logfile_gbgigi6e.log'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/ciyv/001fa5/ydb/tests/functional/sqs/messaging/test-results/py3test/testing_out_stuff/chunk6/testing_out_stuff/test_fifo_messaging.py.TestSqsFifoMessagingWithTenant.test_can_read_from_different_groups.tables_format_v0/cluster/slot_1/stdout'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/ciyv/001fa5/ydb/tests/functional/sqs/messaging/test-results/py3test/testing_out_stuff/chunk6/testing_out_stuff/test_fifo_messaging.py.TestSqsFifoMessagingWithTenant.test_can_read_from_different_groups.tables_format_v0/cluster/slot_1/stderr'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedRandom name='/home/runner/.ya/build/build_root/ciyv/001fa5/ydb/tests/functional/sqs/messaging/test-results/py3test/testing_out_stuff/chunk6/testing_out_stuff/test_fifo_messaging.py.TestSqsFifoMessagingWithTenant.test_can_read_from_different_groups.tables_format_v0/cluster/slot_1/logfile_9w3o02uy.log'> ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/subprocess.py:1129: ResourceWarning: subprocess 632567 is still running ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/subprocess.py:1129: ResourceWarning: subprocess 635606 is still running ResourceWarning: Enable tracemalloc to get the object allocation traceback >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_set_very_big_visibility_timeout[tables_format_v0] [FAIL] >> test_format_setting.py::TestS3::test_date_time_simple_posix[v1-common/simple_posix/test.json-json_each_row] [FAIL] >> test_format_setting.py::TestS3::test_date_time_simple_posix[v1-common/simple_posix/test.parquet-parquet] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_read_dont_stall[tables_format_v0] [FAIL] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_read_dont_stall[tables_format_v1] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/fq/yds/py3test >> test_recovery.py::TestRecovery::test_program_state_recovery_error_if_no_states [FAIL] Test command err: sys:1: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback >> test_explicit_partitioning_0.py::TestS3::test_projection_integer_type_validation[v2-false-client10-year Int64 NOT NULL-True] [FAIL] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_get_queue_attributes_only_attributes_table[tables_format_v0-std] [FAIL] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_get_queue_attributes_only_attributes_table[tables_format_v1-fifo] >> test_explicit_partitioning_0.py::TestS3::test_projection_integer_type_validation[v2-false-client11-year Int64-False] >> test_recovery.py::TestRecovery::test_recovery [FAIL] >> test_recovery_match_recognize.py::TestRecoveryMatchRecognize::test_match_recognize[v1-kikimr0] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/messaging/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_change_visibility_to_zero_works[tables_format_v1-fifo] [FAIL] Test command err: sys:1: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/ciyv/001f97/ydb/tests/functional/sqs/messaging/test-results/py3test/testing_out_stuff/chunk16/testing_out_stuff/test_generic_messaging.py.TestSqsGenericMessagingWithPath.test_change_visibility_batch_works.tables_format_v1-fifo/cluster/node_1/stdout'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/ciyv/001f97/ydb/tests/functional/sqs/messaging/test-results/py3test/testing_out_stuff/chunk16/testing_out_stuff/test_generic_messaging.py.TestSqsGenericMessagingWithPath.test_change_visibility_batch_works.tables_format_v1-fifo/cluster/node_1/stderr'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedRandom name='/home/runner/.ya/build/build_root/ciyv/001f97/ydb/tests/functional/sqs/messaging/test-results/py3test/testing_out_stuff/chunk16/testing_out_stuff/test_generic_messaging.py.TestSqsGenericMessagingWithPath.test_change_visibility_batch_works.tables_format_v1-fifo/cluster/node_1/logfile_0me0yqdd.log'> ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/subprocess.py:1129: ResourceWarning: subprocess 632496 is still running ResourceWarning: Enable tracemalloc to get the object allocation traceback >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_read_dont_stall[tables_format_v1] [FAIL] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_receive_attempt_reloads_same_messages[tables_format_v0-after_crutch_batch] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/fq/yds/py3test >> test_yq_streaming.py::TestYqStreaming::test_state_load_mode[v1] [FAIL] Test command err: sys:1: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback >> test_retry.py::TestRetry::test_low_rate[kikimr0] [FAIL] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/messaging/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_change_visibility_works[tables_format_v1-std] [FAIL] Test command err: sys:1: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/ciyv/001f8a/ydb/tests/functional/sqs/messaging/test-results/py3test/testing_out_stuff/chunk17/testing_out_stuff/test_generic_messaging.py.TestSqsGenericMessagingWithPath.test_change_visibility_to_zero_works.tables_format_v1-std/cluster/node_1/stdout'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/ciyv/001f8a/ydb/tests/functional/sqs/messaging/test-results/py3test/testing_out_stuff/chunk17/testing_out_stuff/test_generic_messaging.py.TestSqsGenericMessagingWithPath.test_change_visibility_to_zero_works.tables_format_v1-std/cluster/node_1/stderr'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedRandom name='/home/runner/.ya/build/build_root/ciyv/001f8a/ydb/tests/functional/sqs/messaging/test-results/py3test/testing_out_stuff/chunk17/testing_out_stuff/test_generic_messaging.py.TestSqsGenericMessagingWithPath.test_change_visibility_to_zero_works.tables_format_v1-std/cluster/node_1/logfile__sva32o0.log'> ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/subprocess.py:1129: ResourceWarning: subprocess 635642 is still running ResourceWarning: Enable tracemalloc to get the object allocation traceback ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/messaging/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_set_very_big_visibility_timeout[tables_format_v0] [FAIL] Test command err: sys:1: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/ciyv/001f91/ydb/tests/functional/sqs/messaging/test-results/py3test/testing_out_stuff/chunk32/testing_out_stuff/test_generic_messaging.py.TestSqsGenericMessagingWithPath.test_send_message_batch.tables_format_v1-fifo/cluster/node_1/stdout'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/ciyv/001f91/ydb/tests/functional/sqs/messaging/test-results/py3test/testing_out_stuff/chunk32/testing_out_stuff/test_generic_messaging.py.TestSqsGenericMessagingWithPath.test_send_message_batch.tables_format_v1-fifo/cluster/node_1/stderr'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedRandom name='/home/runner/.ya/build/build_root/ciyv/001f91/ydb/tests/functional/sqs/messaging/test-results/py3test/testing_out_stuff/chunk32/testing_out_stuff/test_generic_messaging.py.TestSqsGenericMessagingWithPath.test_send_message_batch.tables_format_v1-fifo/cluster/node_1/logfile_vpyrmzns.log'> ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/subprocess.py:1129: ResourceWarning: subprocess 633697 is still running ResourceWarning: Enable tracemalloc to get the object allocation traceback >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_receive_attempt_reloads_same_messages[tables_format_v0-after_crutch_batch] [FAIL] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_receive_attempt_reloads_same_messages[tables_format_v0-standard_mode] >> test_quota_exhaustion.py::TestYdbWorkload::test |81.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_can_read_new_written_data_on_visibility_timeout[tables_format_v1] [FAIL] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_change_message_visibility_with_very_big_timeout[tables_format_v0] >> test_format_setting.py::TestS3::test_date_time_simple_posix[v1-common/simple_posix/test.parquet-parquet] [FAIL] >> test_format_setting.py::TestS3::test_date_time_simple_posix[v2-common/simple_posix/test.csv-csv_with_names] >> test_bad_syntax.py::TestBadSyntax::test_bad_syntax[v1-mvp_external_ydb_endpoint0-with_created_read_rules-create-streaming] [FAIL] >> test_bad_syntax.py::TestBadSyntax::test_bad_syntax[v1-mvp_external_ydb_endpoint0-with_created_read_rules-modify-analytics] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_receive_attempt_reloads_same_messages[tables_format_v0-standard_mode] [FAIL] >> test_explicit_partitioning_0.py::TestS3::test_projection_integer_type_validation[v2-false-client11-year Int64-False] [FAIL] >> test_explicit_partitioning_0.py::TestS3::test_projection_integer_type_validation[v2-false-client12-year Uint64-False] >> test_explicit_partitioning_1.py::TestS3::test_projection_date_type_validation[v2-true-client12-year Date-False] [FAIL] >> test_pq_read_write.py::TestPqReadWrite::test_pq_read_write[v1-without_checkpoints-mvp_external_ydb_endpoint0] [FAIL] >> test_explicit_partitioning_1.py::TestS3::test_projection_date_type_validation[v2-true-client13-year Date NOT NULL-True] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_crutch_groups_selection_algorithm_selects_second_group_batch[tables_format_v1] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_get_queue_attributes_only_attributes_table[tables_format_v1-fifo] [FAIL] >> test_pq_read_write.py::TestPqReadWrite::test_pq_read_schema_metadata[v1-mvp_external_ydb_endpoint0-with_checkpoints] >> test_retry_high_rate.py::TestRetry::test_high_rate[kikimr0] |81.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test |81.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/olap/data_quotas/py3test ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/fq/yds/py3test >> test_select_limit.py::TestSelectLimit::test_select_limit[v1] [FAIL] Test command err: sys:1: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback |81.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/olap/data_quotas/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_change_message_visibility_with_very_big_timeout[tables_format_v0] [FAIL] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_change_message_visibility_with_very_big_timeout[tables_format_v1] >> test_recovery_match_recognize.py::TestRecoveryMatchRecognize::test_match_recognize[v1-kikimr0] [FAIL] >> test_2_selects_limit.py::TestSelectLimit::test_select_same[v1] [FAIL] >> test_2_selects_limit.py::TestSelectLimit::test_select_sequence[v1] >> test_explicit_partitioning_1.py::TestS3::test_projection_date_type_validation[v2-true-client13-year Date NOT NULL-True] [FAIL] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/messaging/py3test >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_receive_attempt_reloads_same_messages[tables_format_v0-standard_mode] [FAIL] Test command err: sys:1: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/ciyv/001f84/ydb/tests/functional/sqs/messaging/test-results/py3test/testing_out_stuff/chunk9/testing_out_stuff/test_fifo_messaging.py.TestSqsFifoMessagingWithTenant.test_queue_attributes.tables_format_v1/cluster/node_1/stdout'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/ciyv/001f84/ydb/tests/functional/sqs/messaging/test-results/py3test/testing_out_stuff/chunk9/testing_out_stuff/test_fifo_messaging.py.TestSqsFifoMessagingWithTenant.test_queue_attributes.tables_format_v1/cluster/node_1/stderr'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedRandom name='/home/runner/.ya/build/build_root/ciyv/001f84/ydb/tests/functional/sqs/messaging/test-results/py3test/testing_out_stuff/chunk9/testing_out_stuff/test_fifo_messaging.py.TestSqsFifoMessagingWithTenant.test_queue_attributes.tables_format_v1/cluster/node_1/logfile_cm8wyfqs.log'> ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/subprocess.py:1129: ResourceWarning: subprocess 637004 is still running ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/ciyv/001f84/ydb/tests/functional/sqs/messaging/test-results/py3test/testing_out_stuff/chunk9/testing_out_stuff/test_fifo_messaging.py.TestSqsFifoMessagingWithTenant.test_queue_attributes.tables_format_v1/cluster/slot_1/stdout'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/ciyv/001f84/ydb/tests/functional/sqs/messaging/test-results/py3test/testing_out_stuff/chunk9/testing_out_stuff/test_fifo_messaging.py.TestSqsFifoMessagingWithTenant.test_queue_attributes.tables_format_v1/cluster/slot_1/stderr'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedRandom name='/home/runner/.ya/build/build_root/ciyv/001f84/ydb/tests/functional/sqs/messaging/test-results/py3test/testing_out_stuff/chunk9/testing_out_stuff/test_fifo_messaging.py.TestSqsFifoMessagingWithTenant.test_queue_attributes.tables_format_v1/cluster/slot_1/logfile_kgl8nooc.log'> ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/subprocess.py:1129: ResourceWarning: subprocess 638724 is still running ResourceWarning: Enable tracemalloc to get the object allocation traceback >> test_explicit_partitioning_1.py::TestS3::test_projection_date_type_validation[v2-true-client14-year Datetime-False] >> test_bad_syntax.py::TestBadSyntax::test_bad_syntax[v1-mvp_external_ydb_endpoint0-with_created_read_rules-modify-analytics] [FAIL] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_change_message_visibility_with_very_big_timeout[tables_format_v1] [FAIL] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/messaging/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_get_queue_attributes_only_attributes_table[tables_format_v1-fifo] [FAIL] Test command err: sys:1: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/ciyv/001f9c/ydb/tests/functional/sqs/messaging/test-results/py3test/testing_out_stuff/chunk46/testing_out_stuff/test_generic_messaging.py.TestSqsGenericMessagingWithTenant.test_empty_queue_url.tables_format_v0/cluster/node_1/stdout'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/ciyv/001f9c/ydb/tests/functional/sqs/messaging/test-results/py3test/testing_out_stuff/chunk46/testing_out_stuff/test_generic_messaging.py.TestSqsGenericMessagingWithTenant.test_empty_queue_url.tables_format_v0/cluster/node_1/stderr'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedRandom name='/home/runner/.ya/build/build_root/ciyv/001f9c/ydb/tests/functional/sqs/messaging/test-results/py3test/testing_out_stuff/chunk46/testing_out_stuff/test_generic_messaging.py.TestSqsGenericMessagingWithTenant.test_empty_queue_url.tables_format_v0/cluster/node_1/logfile_paqvmqz1.log'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/ciyv/001f9c/ydb/tests/functional/sqs/messaging/test-results/py3test/testing_out_stuff/chunk46/testing_out_stuff/test_generic_messaging.py.TestSqsGenericMessagingWithTenant.test_empty_queue_url.tables_format_v0/cluster/slot_1/stdout'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/ciyv/001f9c/ydb/tests/functional/sqs/messaging/test-results/py3test/testing_out_stuff/chunk46/testing_out_stuff/test_generic_messaging.py.TestSqsGenericMessagingWithTenant.test_empty_queue_url.tables_format_v0/cluster/slot_1/stderr'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedRandom name='/home/runner/.ya/build/build_root/ciyv/001f9c/ydb/tests/functional/sqs/messaging/test-results/py3test/testing_out_stuff/chunk46/testing_out_stuff/test_generic_messaging.py.TestSqsGenericMessagingWithTenant.test_empty_queue_url.tables_format_v0/cluster/slot_1/logfile_nx13y0hk.log'> ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/subprocess.py:1129: ResourceWarning: subprocess 631101 is still running ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/subprocess.py:1129: ResourceWarning: subprocess 635172 is still running ResourceWarning: Enable tracemalloc to get the object allocation traceback >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_change_visibility_batch_works[tables_format_v0-fifo] >> test_polling.py::TestSqsPolling::test_receive_message_with_polling[tables_format_v0-long_polling-fifo] [FAIL] >> test_polling.py::TestSqsPolling::test_receive_message_with_polling[tables_format_v0-long_polling-std] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/fq/yds/py3test >> test_continue_mode.py::TestContinueMode::test_continue_from_offsets[v1-mvp_external_ydb_endpoint0] [FAIL] Test command err: sys:1: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback >> test_quota_exhaustion.py::TestYdbWorkload::test_delete >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_change_visibility_batch_works[tables_format_v0-fifo] [FAIL] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_change_visibility_batch_works[tables_format_v0-std] >> test_explicit_partitioning_1.py::TestS3::test_projection_date_type_validation[v2-true-client14-year Datetime-False] [FAIL] >> test_format_setting.py::TestS3::test_date_time_simple_posix[v2-common/simple_posix/test.csv-csv_with_names] [FAIL] >> test_polling.py::TestSqsPolling::test_receive_message_with_polling[tables_format_v0-long_polling-std] [FAIL] >> test_polling.py::TestSqsPolling::test_receive_message_with_polling[tables_format_v1-long_polling-fifo] >> test_explicit_partitioning_1.py::TestS3::test_projection_date_type_validation[v2-true-client15-year Datetime NOT NULL-True] >> test_format_setting.py::TestS3::test_date_time_simple_posix[v2-common/simple_posix/test.tsv-tsv_with_names] |81.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/olap/data_quotas/py3test >> test_retry_high_rate.py::TestRetry::test_high_rate[kikimr0] [FAIL] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_change_visibility_batch_works[tables_format_v0-std] [FAIL] >> test_pq_read_write.py::TestPqReadWrite::test_pq_read_schema_metadata[v1-mvp_external_ydb_endpoint0-with_checkpoints] [FAIL] >> test_pq_read_write.py::TestPqReadWrite::test_pq_read_schema_metadata[v1-mvp_external_ydb_endpoint0-without_checkpoints] |81.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/olap/data_quotas/py3test |81.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test |81.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test |81.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/olap/data_quotas/py3test |81.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/statistics/service/ut/unittest >> test_2_selects_limit.py::TestSelectLimit::test_select_sequence[v1] [FAIL] >> test_3_selects.py::TestSelects::test_3_selects[v1-mvp_external_ydb_endpoint0] >> test_polling.py::TestSqsPolling::test_receive_message_with_polling[tables_format_v1-long_polling-fifo] [FAIL] >> test_polling.py::TestSqsPolling::test_receive_message_with_polling[tables_format_v1-long_polling-std] |81.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test >> test_quota_exhaustion.py::TestYdbWorkload::test_duplicates ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/fq/multi_plane/py3test >> test_retry.py::TestRetry::test_low_rate[kikimr0] [FAIL] Test command err: sys:1: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/fq/yds/py3test >> test_recovery_match_recognize.py::TestRecoveryMatchRecognize::test_match_recognize[v1-kikimr0] [FAIL] Test command err: sys:1: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback >> test_polling.py::TestSqsPolling::test_receive_message_with_polling[tables_format_v1-long_polling-std] [FAIL] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/messaging/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_change_visibility_batch_works[tables_format_v0-std] [FAIL] Test command err: sys:1: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/ciyv/001f8d/ydb/tests/functional/sqs/messaging/test-results/py3test/testing_out_stuff/chunk15/testing_out_stuff/test_generic_messaging.py.TestSqsGenericMessagingWithPath.test_can_read_new_written_data_on_visibility_timeout.tables_format_v1/cluster/node_1/stdout'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/ciyv/001f8d/ydb/tests/functional/sqs/messaging/test-results/py3test/testing_out_stuff/chunk15/testing_out_stuff/test_generic_messaging.py.TestSqsGenericMessagingWithPath.test_can_read_new_written_data_on_visibility_timeout.tables_format_v1/cluster/node_1/stderr'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedRandom name='/home/runner/.ya/build/build_root/ciyv/001f8d/ydb/tests/functional/sqs/messaging/test-results/py3test/testing_out_stuff/chunk15/testing_out_stuff/test_generic_messaging.py.TestSqsGenericMessagingWithPath.test_can_read_new_written_data_on_visibility_timeout.tables_format_v1/cluster/node_1/logfile_nt4ia51r.log'> ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/subprocess.py:1129: ResourceWarning: subprocess 635648 is still running ResourceWarning: Enable tracemalloc to get the object allocation traceback >> test_select_1.py::TestSelect1::test_select_1[v1] >> test_format_setting.py::TestS3::test_date_time_simple_posix[v2-common/simple_posix/test.tsv-tsv_with_names] [FAIL] >> test_format_setting.py::TestS3::test_date_time_simple_posix[v2-common/simple_posix/test.json-json_each_row] >> test_select_limit_db_id.py::TestSelectLimitWithDbId::test_select_same_with_id[v1-mvp_external_ydb_endpoint0] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_change_visibility_to_zero_works[tables_format_v1-std] [FAIL] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_change_visibility_works[tables_format_v0-fifo] >> test_explicit_partitioning_1.py::TestS3::test_projection_date_type_validation[v2-true-client15-year Datetime NOT NULL-True] [FAIL] >> test_explicit_partitioning_1.py::TestS3::test_binding_projection_integer_type_validation[v1-client0-column_type0-True] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/fq/yds/py3test >> test_pq_read_write.py::TestPqReadWrite::test_pq_read_write[v1-without_checkpoints-mvp_external_ydb_endpoint0] [FAIL] Test command err: sys:1: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback >> test_pq_read_write.py::TestPqReadWrite::test_pq_read_schema_metadata[v1-mvp_external_ydb_endpoint0-without_checkpoints] [FAIL] >> test_auditlog.py::test_dml_requests_arent_logged_when_sid_is_expected |81.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/olap/data_quotas/py3test |81.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_change_visibility_works[tables_format_v0-fifo] [FAIL] >> test_select_1.py::TestSelect1::test_ast_in_failed_query_runtime[v1] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_change_visibility_works[tables_format_v0-std] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/messaging/py3test >> test_polling.py::TestSqsPolling::test_receive_message_with_polling[tables_format_v1-long_polling-std] [FAIL] Test command err: ydb/tests/library/clients/kikimr_monitoring.py:76: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/ciyv/001f87/ydb/tests/functional/sqs/messaging/test-results/py3test/testing_out_stuff/chunk59/testing_out_stuff/test_generic_messaging.py.TestYandexAttributesPrefix.test_allows_yandex_message_attribute_prefix.tables_format_v1/cluster/node_1/stdout'> for key, value in sorted(labels.items(), key=lambda x: x[0]) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/library/clients/kikimr_monitoring.py:76: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/ciyv/001f87/ydb/tests/functional/sqs/messaging/test-results/py3test/testing_out_stuff/chunk59/testing_out_stuff/test_generic_messaging.py.TestYandexAttributesPrefix.test_allows_yandex_message_attribute_prefix.tables_format_v1/cluster/node_1/stderr'> for key, value in sorted(labels.items(), key=lambda x: x[0]) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/library/clients/kikimr_monitoring.py:76: ResourceWarning: unclosed file <_io.BufferedRandom name='/home/runner/.ya/build/build_root/ciyv/001f87/ydb/tests/functional/sqs/messaging/test-results/py3test/testing_out_stuff/chunk59/testing_out_stuff/test_generic_messaging.py.TestYandexAttributesPrefix.test_allows_yandex_message_attribute_prefix.tables_format_v1/cluster/node_1/logfile_fadkvl29.log'> for key, value in sorted(labels.items(), key=lambda x: x[0]) ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/subprocess.py:1129: ResourceWarning: subprocess 637236 is still running ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/ciyv/001f87/ydb/tests/functional/sqs/messaging/test-results/py3test/testing_out_stuff/chunk59/testing_out_stuff/test_polling.py.TestSqsPolling.test_receive_message_with_polling.tables_format_v0-long_polling-fifo/cluster/node_1/stdout'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/ciyv/001f87/ydb/tests/functional/sqs/messaging/test-results/py3test/testing_out_stuff/chunk59/testing_out_stuff/test_polling.py.TestSqsPolling.test_receive_message_with_polling.tables_format_v0-long_polling-fifo/cluster/node_1/stderr'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedRandom name='/home/runner/.ya/build/build_root/ciyv/001f87/ydb/tests/functional/sqs/messaging/test-results/py3test/testing_out_stuff/chunk59/testing_out_stuff/test_polling.py.TestSqsPolling.test_receive_message_with_polling.tables_format_v0-long_polling-fifo/cluster/node_1/logfile_cobzp2vo.log'> ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/subprocess.py:1129: ResourceWarning: subprocess 639913 is still running ResourceWarning: Enable tracemalloc to get the object allocation traceback >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_change_visibility_works[tables_format_v0-std] [FAIL] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_change_visibility_works[tables_format_v1-fifo] >> test_3_selects.py::TestSelects::test_3_selects[v1-mvp_external_ydb_endpoint0] [FAIL] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/fq/yds/py3test >> test_bad_syntax.py::TestBadSyntax::test_bad_syntax[v1-mvp_external_ydb_endpoint0-with_created_read_rules-modify-analytics] [FAIL] Test command err: sys:1: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_crutch_groups_selection_algorithm_selects_second_group_batch[tables_format_v1] [FAIL] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_deduplication[tables_format_v0-by_deduplication_id] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_change_visibility_works[tables_format_v1-fifo] [FAIL] >> test_yq_streaming.py::TestYqStreaming::test_yq_streaming[v1] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_change_visibility_works[tables_format_v1-std] >> test_quota_exhaustion.py::TestYdbWorkload::test [FAIL] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/fq/multi_plane/py3test >> test_retry_high_rate.py::TestRetry::test_high_rate[kikimr0] [FAIL] Test command err: sys:1: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback >> test_explicit_partitioning_0.py::TestS3::test_projection_integer_type_validation[v2-false-client12-year Uint64-False] [FAIL] >> test_explicit_partitioning_0.py::TestS3::test_projection_integer_type_validation[v2-false-client13-year Date-False] |81.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_deduplication[tables_format_v0-by_deduplication_id] [FAIL] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_deduplication[tables_format_v0-content_based] >> test_select_limit_db_id.py::TestSelectLimitWithDbId::test_select_same_with_id[v1-mvp_external_ydb_endpoint0] [FAIL] >> test_select_timings.py::TestSelectTimings::test_select_timings[v1-analytics-aborted] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_change_visibility_works[tables_format_v1-std] [FAIL] >> test_disposition.py::TestContinueMode::test_disposition_time_ago[v1-mvp_external_ydb_endpoint0] >> test_format_setting.py::TestS3::test_date_time_simple_posix[v2-common/simple_posix/test.json-json_each_row] [FAIL] >> test_format_setting.py::TestS3::test_date_time_simple_posix[v2-common/simple_posix/test.parquet-parquet] |81.9%| [TA] $(B)/ydb/tests/fq/multi_plane/test-results/py3test/{meta.json ... results_accumulator.log} |81.9%| [TA] {RESULT} $(B)/ydb/tests/fq/multi_plane/test-results/py3test/{meta.json ... results_accumulator.log} |81.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test |81.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test >> test_select_1.py::TestSelect1::test_ast_in_failed_query_runtime[v1] [FAIL] >> test_select_1.py::TestSelect1::test_ast_in_failed_query_runtime[v2] |81.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/messaging/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_change_visibility_works[tables_format_v1-std] [FAIL] Test command err: sys:1: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/ciyv/001f7e/ydb/tests/functional/sqs/messaging/test-results/py3test/testing_out_stuff/chunk39/testing_out_stuff/test_generic_messaging.py.TestSqsGenericMessagingWithTenant.test_change_visibility_to_zero_works.tables_format_v1-std/cluster/node_1/stdout'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/ciyv/001f7e/ydb/tests/functional/sqs/messaging/test-results/py3test/testing_out_stuff/chunk39/testing_out_stuff/test_generic_messaging.py.TestSqsGenericMessagingWithTenant.test_change_visibility_to_zero_works.tables_format_v1-std/cluster/node_1/stderr'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedRandom name='/home/runner/.ya/build/build_root/ciyv/001f7e/ydb/tests/functional/sqs/messaging/test-results/py3test/testing_out_stuff/chunk39/testing_out_stuff/test_generic_messaging.py.TestSqsGenericMessagingWithTenant.test_change_visibility_to_zero_works.tables_format_v1-std/cluster/node_1/logfile_o2yaoqna.log'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/ciyv/001f7e/ydb/tests/functional/sqs/messaging/test-results/py3test/testing_out_stuff/chunk39/testing_out_stuff/test_generic_messaging.py.TestSqsGenericMessagingWithTenant.test_change_visibility_to_zero_works.tables_format_v1-std/cluster/slot_1/stdout'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/ciyv/001f7e/ydb/tests/functional/sqs/messaging/test-results/py3test/testing_out_stuff/chunk39/testing_out_stuff/test_generic_messaging.py.TestSqsGenericMessagingWithTenant.test_change_visibility_to_zero_works.tables_format_v1-std/cluster/slot_1/stderr'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedRandom name='/home/runner/.ya/build/build_root/ciyv/001f7e/ydb/tests/functional/sqs/messaging/test-results/py3test/testing_out_stuff/chunk39/testing_out_stuff/test_generic_messaging.py.TestSqsGenericMessagingWithTenant.test_change_visibility_to_zero_works.tables_format_v1-std/cluster/slot_1/logfile_sv7aflg6.log'> ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/subprocess.py:1129: ResourceWarning: subprocess 639449 is still running ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/subprocess.py:1129: ResourceWarning: subprocess 641668 is still running ResourceWarning: Enable tracemalloc to get the object allocation traceback |81.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/olap/data_quotas/py3test |81.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_deduplication[tables_format_v0-content_based] [FAIL] >> test_format_setting.py::TestS3::test_date_time_simple_posix[v2-common/simple_posix/test.parquet-parquet] [FAIL] >> test_format_setting.py::TestS3::test_date_time_simple_posix_insert[v1-common/simple_posix/test.csv-csv_with_names] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_deduplication[tables_format_v1-by_deduplication_id] >> test_row_dispatcher.py::TestPqRowDispatcher::test_simple_optional >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_queue_counters_are_in_folder[tables_format_v0] >> test_explicit_partitioning_1.py::TestS3::test_binding_projection_integer_type_validation[v1-client0-column_type0-True] [FAIL] >> test_yq_streaming.py::TestYqStreaming::test_yq_streaming[v1] [FAIL] >> test_select_timings.py::TestSelectTimings::test_select_timings[v1-analytics-aborted] [FAIL] >> test_explicit_partitioning_1.py::TestS3::test_binding_projection_integer_type_validation[v1-client1-column_type1-True] |82.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test >> test_yq_streaming.py::TestYqStreaming::test_yq_streaming_read_date_time_format[v1] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_empty_access_key_id[tables_format_v0] >> test_select_1.py::TestSelect1::test_select_1[v1] [FAIL] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/fq/yds/py3test >> test_pq_read_write.py::TestPqReadWrite::test_pq_read_schema_metadata[v1-mvp_external_ydb_endpoint0-without_checkpoints] [FAIL] Test command err: sys:1: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback |82.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test >> test_select_1.py::TestSelect1::test_select_1[v2] >> test_disposition.py::TestContinueMode::test_disposition_time_ago[v1-mvp_external_ydb_endpoint0] [FAIL] >> test_eval.py::TestEval::test_eval_2_2[v1] |82.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test |82.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test >> test_quota_exhaustion.py::TestYdbWorkload::test_delete [FAIL] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_list_queues_for_unknown_cloud[tables_format_v0] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_deduplication[tables_format_v1-by_deduplication_id] [FAIL] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_deduplication[tables_format_v1-content_based] >> test_format_setting.py::TestS3::test_date_time_simple_posix_insert[v1-common/simple_posix/test.csv-csv_with_names] [FAIL] >> test_format_setting.py::TestS3::test_date_time_simple_posix_insert[v1-common/simple_posix/test.tsv-tsv_with_names] >> test_common.py::TestCommonSqsYandexCloudMode::test_private_queue_recreation[tables_format_v1-fifo] |82.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test >> test_read_rules_deletion.py::TestReadRulesDeletion::test_delete_read_rules[v1-simple] >> test_explicit_partitioning_1.py::TestS3::test_binding_projection_integer_type_validation[v1-client1-column_type1-True] [FAIL] >> test_explicit_partitioning_1.py::TestS3::test_binding_projection_integer_type_validation[v1-client2-column_type2-True] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_deduplication[tables_format_v1-content_based] [FAIL] >> test_common.py::TestCommonYandexWithTenant::test_private_queue_recreation[tables_format_v1-fifo] >> test_explicit_partitioning_0.py::TestS3::test_projection_integer_type_validation[v2-false-client13-year Date-False] [FAIL] >> test_explicit_partitioning_0.py::TestS3::test_projection_integer_type_validation[v2-true-client0-year Int32 NOT NULL-True] >> test_select_1.py::TestSelect1::test_ast_in_failed_query_runtime[v2] [FAIL] |82.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test >> test_watermarks.py::TestWatermarks::test_pq_watermarks[v1-mvp_external_ydb_endpoint0] >> test_row_dispatcher.py::TestPqRowDispatcher::test_simple_optional [FAIL] >> test_row_dispatcher.py::TestPqRowDispatcher::test_start_new_query >> test_eval.py::TestEval::test_eval_2_2[v1] [FAIL] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_yc_events_processor[tables_format_v0] >> test_yq_streaming.py::TestYqStreaming::test_yq_streaming_read_date_time_format[v1] [FAIL] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_cloud_queues_with_iam_token[tables_format_v0-fifo] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/messaging/py3test >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_deduplication[tables_format_v1-content_based] [FAIL] Test command err: sys:1: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/ciyv/001f81/ydb/tests/functional/sqs/messaging/test-results/py3test/testing_out_stuff/chunk7/testing_out_stuff/test_fifo_messaging.py.TestSqsFifoMessagingWithTenant.test_crutch_groups_selection_algorithm_selects_second_group_batch.tables_format_v1/cluster/node_1/stdout'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/ciyv/001f81/ydb/tests/functional/sqs/messaging/test-results/py3test/testing_out_stuff/chunk7/testing_out_stuff/test_fifo_messaging.py.TestSqsFifoMessagingWithTenant.test_crutch_groups_selection_algorithm_selects_second_group_batch.tables_format_v1/cluster/node_1/stderr'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedRandom name='/home/runner/.ya/build/build_root/ciyv/001f81/ydb/tests/functional/sqs/messaging/test-results/py3test/testing_out_stuff/chunk7/testing_out_stuff/test_fifo_messaging.py.TestSqsFifoMessagingWithTenant.test_crutch_groups_selection_algorithm_selects_second_group_batch.tables_format_v1/cluster/node_1/logfile__7sqsqjn.log'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/ciyv/001f81/ydb/tests/functional/sqs/messaging/test-results/py3test/testing_out_stuff/chunk7/testing_out_stuff/test_fifo_messaging.py.TestSqsFifoMessagingWithTenant.test_crutch_groups_selection_algorithm_selects_second_group_batch.tables_format_v1/cluster/slot_1/stdout'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/ciyv/001f81/ydb/tests/functional/sqs/messaging/test-results/py3test/testing_out_stuff/chunk7/testing_out_stuff/test_fifo_messaging.py.TestSqsFifoMessagingWithTenant.test_crutch_groups_selection_algorithm_selects_second_group_batch.tables_format_v1/cluster/slot_1/stderr'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedRandom name='/home/runner/.ya/build/build_root/ciyv/001f81/ydb/tests/functional/sqs/messaging/test-results/py3test/testing_out_stuff/chunk7/testing_out_stuff/test_fifo_messaging.py.TestSqsFifoMessagingWithTenant.test_crutch_groups_selection_algorithm_selects_second_group_batch.tables_format_v1/cluster/slot_1/logfile_rdj0xvyp.log'> ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/subprocess.py:1129: ResourceWarning: subprocess 641280 is still running ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/subprocess.py:1129: ResourceWarning: subprocess 643009 is still running ResourceWarning: Enable tracemalloc to get the object allocation traceback >> test_format_setting.py::TestS3::test_date_time_simple_posix_insert[v1-common/simple_posix/test.tsv-tsv_with_names] [FAIL] >> test_row_dispatcher.py::TestPqRowDispatcher::test_2_connection >> test_format_setting.py::TestS3::test_date_time_simple_posix_insert[v1-common/simple_posix/test.json-json_each_row] >> test_yq_streaming.py::TestYqStreaming::test_early_finish_case4[v1] >> test_explicit_partitioning_1.py::TestS3::test_binding_projection_integer_type_validation[v1-client2-column_type2-True] [FAIL] |82.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test >> test_explicit_partitioning_1.py::TestS3::test_binding_projection_integer_type_validation[v1-client3-column_type3-False] >> test_explicit_partitioning_0.py::TestS3::test_projection_integer_type_validation[v2-true-client0-year Int32 NOT NULL-True] [FAIL] >> test_explicit_partitioning_0.py::TestS3::test_projection_integer_type_validation[v2-true-client1-year Uint32 NOT NULL-True] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/fq/yds/py3test >> test_3_selects.py::TestSelects::test_3_selects[v1-mvp_external_ydb_endpoint0] [FAIL] Test command err: sys:1: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback >> test_quota_exhaustion.py::TestYdbWorkload::test_duplicates [FAIL] |82.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test >> test_select_1.py::TestSelect1::test_select_1[v2] [FAIL] >> test_read_rules_deletion.py::TestReadRulesDeletion::test_delete_read_rules[v1-simple] [FAIL] >> test_read_rules_deletion.py::TestReadRulesDeletion::test_delete_read_rules[v1-with_recovery] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_not_throttling_with_custom_queue_name[fifo-tables_format_v0] >> test_format_setting.py::TestS3::test_date_time_simple_posix_insert[v1-common/simple_posix/test.json-json_each_row] [FAIL] >> test_row_dispatcher.py::TestPqRowDispatcher::test_group_by_hop_restart_query [SKIPPED] |82.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test |82.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test >> test_format_setting.py::TestS3::test_date_time_simple_posix_insert[v1-common/simple_posix/test.parquet-parquet] >> test_row_dispatcher.py::TestPqRowDispatcher::test_many_partitions |82.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test >> test_common.py::TestCommonYandexWithPath::test_private_queue_recreation[tables_format_v0-fifo] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/fq/yds/py3test >> test_select_timings.py::TestSelectTimings::test_select_timings[v1-analytics-aborted] [FAIL] Test command err: sys:1: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback |82.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test >> test_watermarks.py::TestWatermarks::test_pq_watermarks[v1-mvp_external_ydb_endpoint0] [FAIL] >> test_yds_bindings.py::TestBindings::test_raw_empty_schema_binding[v1] >> test_explicit_partitioning_0.py::TestS3::test_projection_integer_type_validation[v2-true-client1-year Uint32 NOT NULL-True] [FAIL] >> test_explicit_partitioning_0.py::TestS3::test_projection_integer_type_validation[v2-true-client2-year Uint64 NOT NULL-True] >> test_row_dispatcher.py::TestPqRowDispatcher::test_start_new_query [FAIL] |82.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_dlq_mechanics_in_cloud[tables_format_v0-tables_format_v1-fifo] |82.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test >> test_explicit_partitioning_1.py::TestS3::test_binding_projection_integer_type_validation[v1-client3-column_type3-False] [FAIL] >> test_common.py::TestCommonYandexWithTenant::test_private_create_queue[tables_format_v1-fifo] >> test_explicit_partitioning_1.py::TestS3::test_binding_projection_integer_type_validation[v1-client4-column_type4-True] |82.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test >> test_yq_streaming.py::TestYqStreaming::test_early_finish_case4[v1] [FAIL] >> test_yq_streaming.py::TestYqStreaming::test_early_finish_case5[v1] >> test_format_setting.py::TestS3::test_date_time_simple_posix_insert[v1-common/simple_posix/test.parquet-parquet] [FAIL] >> test_format_setting.py::TestS3::test_date_time_simple_posix_insert[v2-common/simple_posix/test.csv-csv_with_names] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/fq/yds/py3test >> test_select_1.py::TestSelect1::test_ast_in_failed_query_runtime[v2] [FAIL] Test command err: sys:1: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback >> test_read_rules_deletion.py::TestReadRulesDeletion::test_delete_read_rules[v1-with_recovery] [FAIL] >> test_row_dispatcher.py::TestPqRowDispatcher::test_2_connection [FAIL] >> test_auditlog.py::test_dml_requests_arent_logged_when_sid_is_expected [FAIL] >> test_row_dispatcher.py::TestPqRowDispatcher::test_3_sessions ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/fq/yds/py3test >> test_eval.py::TestEval::test_eval_2_2[v1] [FAIL] Test command err: sys:1: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback >> test_explicit_partitioning_0.py::TestS3::test_projection_integer_type_validation[v2-true-client2-year Uint64 NOT NULL-True] [FAIL] >> test_explicit_partitioning_0.py::TestS3::test_projection_integer_type_validation[v2-true-client3-year Date NOT NULL-False] >> test_row_dispatcher.py::TestPqRowDispatcher::test_many_partitions [FAIL] |82.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test |82.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_not_throttling_with_custom_queue_name[std-tables_format_v0] |82.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test >> test_explicit_partitioning_1.py::TestS3::test_binding_projection_integer_type_validation[v1-client4-column_type4-True] [FAIL] >> test_format_setting.py::TestS3::test_date_time_simple_posix_insert[v2-common/simple_posix/test.csv-csv_with_names] [FAIL] >> test_format_setting.py::TestS3::test_date_time_simple_posix_insert[v2-common/simple_posix/test.tsv-tsv_with_names] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/fq/yds/py3test >> test_select_1.py::TestSelect1::test_select_1[v2] [FAIL] Test command err: sys:1: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback >> test_explicit_partitioning_1.py::TestS3::test_binding_projection_integer_type_validation[v1-client5-column_type5-True] >> test_yq_streaming.py::TestYqStreaming::test_early_finish_case5[v1] [FAIL] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/fq/yds/py3test >> test_yq_streaming.py::TestYqStreaming::test_yq_streaming_read_date_time_format[v1] [FAIL] Test command err: sys:1: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback >> test_auditlog.py::test_single_dml_query_logged[delete] >> test_explicit_partitioning_0.py::TestS3::test_projection_integer_type_validation[v2-true-client3-year Date NOT NULL-False] [FAIL] >> test_explicit_partitioning_0.py::TestS3::test_projection_integer_type_validation[v2-true-client4-year String NOT NULL-True] |82.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/olap/data_quotas/py3test >> test_quota_exhaustion.py::TestYdbWorkload::test [FAIL] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_queue_counters_are_in_folder[tables_format_v0] [FAIL] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_queue_counters_are_in_folder[tables_format_v1] >> test_auditlog.py::test_broken_dynconfig[_client_session_pool_no_auth-_good_dynconfig] |82.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test >> test_yds_bindings.py::TestBindings::test_raw_empty_schema_binding[v1] [FAIL] >> test_explicit_partitioning_1.py::TestS3::test_binding_projection_integer_type_validation[v1-client5-column_type5-True] [FAIL] >> test_explicit_partitioning_1.py::TestS3::test_binding_projection_integer_type_validation[v1-client6-column_type6-True] >> test_row_dispatcher.py::TestPqRowDispatcher::test_3_sessions [FAIL] >> test_format_setting.py::TestS3::test_date_time_simple_posix_insert[v2-common/simple_posix/test.tsv-tsv_with_names] [FAIL] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/fq/yds/py3test >> test_row_dispatcher.py::TestPqRowDispatcher::test_start_new_query [FAIL] Test command err: sys:1: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback |82.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test >> test_format_setting.py::TestS3::test_date_time_simple_posix_insert[v2-common/simple_posix/test.json-json_each_row] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_list_queues_for_unknown_cloud[tables_format_v0] [FAIL] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_list_queues_for_unknown_cloud[tables_format_v1] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_queue_counters_are_in_folder[tables_format_v1] [FAIL] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_empty_access_key_id[tables_format_v0] [FAIL] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_empty_access_key_id[tables_format_v1] >> test_explicit_partitioning_0.py::TestS3::test_projection_integer_type_validation[v2-true-client4-year String NOT NULL-True] [FAIL] >> test_explicit_partitioning_0.py::TestS3::test_projection_integer_type_validation[v2-true-client5-year String-False] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_cloud_queues_with_iam_token[tables_format_v0-fifo] [FAIL] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_cloud_queues_with_iam_token[tables_format_v1-fifo] >> test_yandex_cloud_queue_counters.py::TestYmqQueueCounters::test_counters_when_sending_duplicates >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_cloud_queues_with_iam_token[tables_format_v0-std] >> test_common.py::TestCommonSqsYandexCloudMode::test_private_queue_recreation[tables_format_v1-fifo] [FAIL] >> test_common.py::TestCommonSqsYandexCloudMode::test_private_queue_recreation[tables_format_v1-std] |82.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test |82.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test >> test_common.py::TestCommonYandexWithTenant::test_private_queue_recreation[tables_format_v1-fifo] [FAIL] >> test_common.py::TestCommonYandexWithTenant::test_private_queue_recreation[tables_format_v1-std] >> test_common.py::TestCommonSqsYandexCloudMode::test_private_queue_recreation[tables_format_v1-std] [FAIL] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/cloud/py3test >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_queue_counters_are_in_folder[tables_format_v1] [FAIL] Test command err: sys:1: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/ciyv/001f3c/ydb/tests/functional/sqs/cloud/test-results/py3test/testing_out_stuff/chunk31/testing_out_stuff/test_yandex_cloud_mode.py.TestSqsYandexCloudMode.test_queue_counters_are_in_folder.tables_format_v0/cluster/node_1/stdout'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/ciyv/001f3c/ydb/tests/functional/sqs/cloud/test-results/py3test/testing_out_stuff/chunk31/testing_out_stuff/test_yandex_cloud_mode.py.TestSqsYandexCloudMode.test_queue_counters_are_in_folder.tables_format_v0/cluster/node_1/stderr'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedRandom name='/home/runner/.ya/build/build_root/ciyv/001f3c/ydb/tests/functional/sqs/cloud/test-results/py3test/testing_out_stuff/chunk31/testing_out_stuff/test_yandex_cloud_mode.py.TestSqsYandexCloudMode.test_queue_counters_are_in_folder.tables_format_v0/cluster/node_1/logfile_58juikan.log'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/ciyv/001f3c/ydb/tests/functional/sqs/cloud/test-results/py3test/testing_out_stuff/chunk31/testing_out_stuff/test_yandex_cloud_mode.py.TestSqsYandexCloudMode.test_queue_counters_are_in_folder.tables_format_v0/cluster/slot_1/stdout'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/ciyv/001f3c/ydb/tests/functional/sqs/cloud/test-results/py3test/testing_out_stuff/chunk31/testing_out_stuff/test_yandex_cloud_mode.py.TestSqsYandexCloudMode.test_queue_counters_are_in_folder.tables_format_v0/cluster/slot_1/stderr'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedRandom name='/home/runner/.ya/build/build_root/ciyv/001f3c/ydb/tests/functional/sqs/cloud/test-results/py3test/testing_out_stuff/chunk31/testing_out_stuff/test_yandex_cloud_mode.py.TestSqsYandexCloudMode.test_queue_counters_are_in_folder.tables_format_v0/cluster/slot_1/logfile_qo7wwo5j.log'> ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/subprocess.py:1129: ResourceWarning: subprocess 645727 is still running ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/subprocess.py:1129: ResourceWarning: subprocess 648060 is still running ResourceWarning: Enable tracemalloc to get the object allocation traceback 2025-05-29 15:40:16,063 WARNING libarchive: File (test_yandex_cloud_mode.py.TestSqsYandexCloudMode.test_queue_counters_are_in_folder.tables_format_v0/cluster/node_1/logfile_58juikan.log) size has changed. Can't write more data than was declared in the tar header (181652). (probably file was changed during archiving) >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_empty_access_key_id[tables_format_v1] [FAIL] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_cloud_queues_with_iam_token[tables_format_v0-std] [FAIL] >> test_format_setting.py::TestS3::test_date_time_simple_posix_insert[v2-common/simple_posix/test.json-json_each_row] [FAIL] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/olap/data_quotas/py3test >> test_quota_exhaustion.py::TestYdbWorkload::test_delete [FAIL] Test command err: Database name /Root/test >> test_explicit_partitioning_1.py::TestS3::test_binding_projection_integer_type_validation[v1-client6-column_type6-True] [FAIL] >> test_explicit_partitioning_1.py::TestS3::test_binding_projection_integer_type_validation[v1-client7-column_type7-False] >> test_format_setting.py::TestS3::test_date_time_simple_posix_insert[v2-common/simple_posix/test.parquet-parquet] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_list_queues_for_unknown_cloud[tables_format_v1] [FAIL] |82.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test >> test_common.py::TestCommonYandexWithTenant::test_private_queue_recreation[tables_format_v1-std] [FAIL] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_ymq_expiring_counters ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/fq/yds/py3test >> test_read_rules_deletion.py::TestReadRulesDeletion::test_delete_read_rules[v1-with_recovery] [FAIL] Test command err: sys:1: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/fq/yds/py3test >> test_row_dispatcher.py::TestPqRowDispatcher::test_many_partitions [FAIL] Test command err: sys:1: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback >> test_explicit_partitioning_0.py::TestS3::test_projection_integer_type_validation[v2-true-client5-year String-False] [FAIL] >> test_explicit_partitioning_0.py::TestS3::test_projection_integer_type_validation[v2-true-client6-year Utf8 NOT NULL-True] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/cloud/py3test >> test_common.py::TestCommonSqsYandexCloudMode::test_private_queue_recreation[tables_format_v1-std] [FAIL] Test command err: sys:1: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/ciyv/001f2c/ydb/tests/functional/sqs/cloud/test-results/py3test/testing_out_stuff/chunk3/testing_out_stuff/test_common.py.TestCommonSqsYandexCloudMode.test_private_queue_recreation.tables_format_v1-fifo/cluster/node_1/stdout'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/ciyv/001f2c/ydb/tests/functional/sqs/cloud/test-results/py3test/testing_out_stuff/chunk3/testing_out_stuff/test_common.py.TestCommonSqsYandexCloudMode.test_private_queue_recreation.tables_format_v1-fifo/cluster/node_1/stderr'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedRandom name='/home/runner/.ya/build/build_root/ciyv/001f2c/ydb/tests/functional/sqs/cloud/test-results/py3test/testing_out_stuff/chunk3/testing_out_stuff/test_common.py.TestCommonSqsYandexCloudMode.test_private_queue_recreation.tables_format_v1-fifo/cluster/node_1/logfile_488z0x8z.log'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/ciyv/001f2c/ydb/tests/functional/sqs/cloud/test-results/py3test/testing_out_stuff/chunk3/testing_out_stuff/test_common.py.TestCommonSqsYandexCloudMode.test_private_queue_recreation.tables_format_v1-fifo/cluster/slot_1/stdout'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/ciyv/001f2c/ydb/tests/functional/sqs/cloud/test-results/py3test/testing_out_stuff/chunk3/testing_out_stuff/test_common.py.TestCommonSqsYandexCloudMode.test_private_queue_recreation.tables_format_v1-fifo/cluster/slot_1/stderr'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedRandom name='/home/runner/.ya/build/build_root/ciyv/001f2c/ydb/tests/functional/sqs/cloud/test-results/py3test/testing_out_stuff/chunk3/testing_out_stuff/test_common.py.TestCommonSqsYandexCloudMode.test_private_queue_recreation.tables_format_v1-fifo/cluster/slot_1/logfile_jscakzjh.log'> ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/subprocess.py:1129: ResourceWarning: subprocess 646253 is still running ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/subprocess.py:1129: ResourceWarning: subprocess 648672 is still running ResourceWarning: Enable tracemalloc to get the object allocation traceback >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_yc_events_processor[tables_format_v0] [FAIL] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_yc_events_processor[tables_format_v1] >> test_yandex_cloud_queue_counters.py::TestYmqQueueCounters::test_ymq_send_read_delete |82.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/cloud/py3test >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_empty_access_key_id[tables_format_v1] [FAIL] Test command err: sys:1: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/ciyv/001f30/ydb/tests/functional/sqs/cloud/test-results/py3test/testing_out_stuff/chunk25/testing_out_stuff/test_yandex_cloud_mode.py.TestSqsYandexCloudMode.test_empty_access_key_id.tables_format_v0/cluster/node_1/stdout'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/ciyv/001f30/ydb/tests/functional/sqs/cloud/test-results/py3test/testing_out_stuff/chunk25/testing_out_stuff/test_yandex_cloud_mode.py.TestSqsYandexCloudMode.test_empty_access_key_id.tables_format_v0/cluster/node_1/stderr'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedRandom name='/home/runner/.ya/build/build_root/ciyv/001f30/ydb/tests/functional/sqs/cloud/test-results/py3test/testing_out_stuff/chunk25/testing_out_stuff/test_yandex_cloud_mode.py.TestSqsYandexCloudMode.test_empty_access_key_id.tables_format_v0/cluster/node_1/logfile_741equak.log'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/ciyv/001f30/ydb/tests/functional/sqs/cloud/test-results/py3test/testing_out_stuff/chunk25/testing_out_stuff/test_yandex_cloud_mode.py.TestSqsYandexCloudMode.test_empty_access_key_id.tables_format_v0/cluster/slot_1/stdout'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/ciyv/001f30/ydb/tests/functional/sqs/cloud/test-results/py3test/testing_out_stuff/chunk25/testing_out_stuff/test_yandex_cloud_mode.py.TestSqsYandexCloudMode.test_empty_access_key_id.tables_format_v0/cluster/slot_1/stderr'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedRandom name='/home/runner/.ya/build/build_root/ciyv/001f30/ydb/tests/functional/sqs/cloud/test-results/py3test/testing_out_stuff/chunk25/testing_out_stuff/test_yandex_cloud_mode.py.TestSqsYandexCloudMode.test_empty_access_key_id.tables_format_v0/cluster/slot_1/logfile_rfrasf3w.log'> ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/subprocess.py:1129: ResourceWarning: subprocess 645769 is still running ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/subprocess.py:1129: ResourceWarning: subprocess 647396 is still running ResourceWarning: Enable tracemalloc to get the object allocation traceback 2025-05-29 15:40:18,519 WARNING libarchive: File (test_yandex_cloud_mode.py.TestSqsYandexCloudMode.test_empty_access_key_id.tables_format_v0/cluster/slot_1/logfile_rfrasf3w.log) size has changed. Can't write more data than was declared in the tar header (156088). (probably file was changed during archiving) |82.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test |82.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test >> test_common.py::TestCommonYandexWithTenant::test_private_create_queue[tables_format_v1-fifo] [FAIL] >> test_common.py::TestCommonYandexWithTenant::test_private_create_queue[tables_format_v1-std] |82.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test >> test_format_setting.py::TestS3::test_date_time_simple_posix_insert[v2-common/simple_posix/test.parquet-parquet] [FAIL] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_yc_events_processor[tables_format_v1] [FAIL] >> test_explicit_partitioning_1.py::TestS3::test_binding_projection_integer_type_validation[v1-client7-column_type7-False] [FAIL] >> test_explicit_partitioning_1.py::TestS3::test_binding_projection_integer_type_validation[v1-client8-column_type8-False] >> test_format_setting.py::TestS3::test_timestamp_unix_time_insert[v1-timestamp/unix_time/test.csv-csv_with_names-UNIX_TIME_SECONDS] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/cloud/py3test >> test_common.py::TestCommonYandexWithTenant::test_private_queue_recreation[tables_format_v1-std] [FAIL] Test command err: sys:1: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/ciyv/001f33/ydb/tests/functional/sqs/cloud/test-results/py3test/testing_out_stuff/chunk11/testing_out_stuff/test_common.py.TestCommonYandexWithTenant.test_private_queue_recreation.tables_format_v1-fifo/cluster/node_1/stdout'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/ciyv/001f33/ydb/tests/functional/sqs/cloud/test-results/py3test/testing_out_stuff/chunk11/testing_out_stuff/test_common.py.TestCommonYandexWithTenant.test_private_queue_recreation.tables_format_v1-fifo/cluster/node_1/stderr'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedRandom name='/home/runner/.ya/build/build_root/ciyv/001f33/ydb/tests/functional/sqs/cloud/test-results/py3test/testing_out_stuff/chunk11/testing_out_stuff/test_common.py.TestCommonYandexWithTenant.test_private_queue_recreation.tables_format_v1-fifo/cluster/node_1/logfile_5ozkpncx.log'> ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/subprocess.py:1129: ResourceWarning: subprocess 646556 is still running ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/ciyv/001f33/ydb/tests/functional/sqs/cloud/test-results/py3test/testing_out_stuff/chunk11/testing_out_stuff/test_common.py.TestCommonYandexWithTenant.test_private_queue_recreation.tables_format_v1-fifo/cluster/slot_1/stdout'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/ciyv/001f33/ydb/tests/functional/sqs/cloud/test-results/py3test/testing_out_stuff/chunk11/testing_out_stuff/test_common.py.TestCommonYandexWithTenant.test_private_queue_recreation.tables_format_v1-fifo/cluster/slot_1/stderr'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedRandom name='/home/runner/.ya/build/build_root/ciyv/001f33/ydb/tests/functional/sqs/cloud/test-results/py3test/testing_out_stuff/chunk11/testing_out_stuff/test_common.py.TestCommonYandexWithTenant.test_private_queue_recreation.tables_format_v1-fifo/cluster/slot_1/logfile_pftr2gpk.log'> ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/subprocess.py:1129: ResourceWarning: subprocess 649615 is still running ResourceWarning: Enable tracemalloc to get the object allocation traceback ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/cloud/py3test >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_cloud_queues_with_iam_token[tables_format_v0-std] [FAIL] Test command err: sys:1: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/ciyv/001f28/ydb/tests/functional/sqs/cloud/test-results/py3test/testing_out_stuff/chunk14/testing_out_stuff/test_yandex_cloud_mode.py.TestSqsYandexCloudMode.test_cloud_queues_with_iam_token.tables_format_v0-fifo/cluster/node_1/stdout'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/ciyv/001f28/ydb/tests/functional/sqs/cloud/test-results/py3test/testing_out_stuff/chunk14/testing_out_stuff/test_yandex_cloud_mode.py.TestSqsYandexCloudMode.test_cloud_queues_with_iam_token.tables_format_v0-fifo/cluster/node_1/stderr'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedRandom name='/home/runner/.ya/build/build_root/ciyv/001f28/ydb/tests/functional/sqs/cloud/test-results/py3test/testing_out_stuff/chunk14/testing_out_stuff/test_yandex_cloud_mode.py.TestSqsYandexCloudMode.test_cloud_queues_with_iam_token.tables_format_v0-fifo/cluster/node_1/logfile_szurlvnq.log'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/ciyv/001f28/ydb/tests/functional/sqs/cloud/test-results/py3test/testing_out_stuff/chunk14/testing_out_stuff/test_yandex_cloud_mode.py.TestSqsYandexCloudMode.test_cloud_queues_with_iam_token.tables_format_v0-fifo/cluster/slot_1/stdout'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/ciyv/001f28/ydb/tests/functional/sqs/cloud/test-results/py3test/testing_out_stuff/chunk14/testing_out_stuff/test_yandex_cloud_mode.py.TestSqsYandexCloudMode.test_cloud_queues_with_iam_token.tables_format_v0-fifo/cluster/slot_1/stderr'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedRandom name='/home/runner/.ya/build/build_root/ciyv/001f28/ydb/tests/functional/sqs/cloud/test-results/py3test/testing_out_stuff/chunk14/testing_out_stuff/test_yandex_cloud_mode.py.TestSqsYandexCloudMode.test_cloud_queues_with_iam_token.tables_format_v0-fifo/cluster/slot_1/logfile_6ioe31eb.log'> ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/subprocess.py:1129: ResourceWarning: subprocess 647128 is still running ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/subprocess.py:1129: ResourceWarning: subprocess 649517 is still running ResourceWarning: Enable tracemalloc to get the object allocation traceback ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/cloud/py3test >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_list_queues_for_unknown_cloud[tables_format_v1] [FAIL] Test command err: sys:1: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/ciyv/001f38/ydb/tests/functional/sqs/cloud/test-results/py3test/testing_out_stuff/chunk28/testing_out_stuff/test_yandex_cloud_mode.py.TestSqsYandexCloudMode.test_list_queues_for_unknown_cloud.tables_format_v0/cluster/node_1/stdout'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/ciyv/001f38/ydb/tests/functional/sqs/cloud/test-results/py3test/testing_out_stuff/chunk28/testing_out_stuff/test_yandex_cloud_mode.py.TestSqsYandexCloudMode.test_list_queues_for_unknown_cloud.tables_format_v0/cluster/node_1/stderr'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedRandom name='/home/runner/.ya/build/build_root/ciyv/001f38/ydb/tests/functional/sqs/cloud/test-results/py3test/testing_out_stuff/chunk28/testing_out_stuff/test_yandex_cloud_mode.py.TestSqsYandexCloudMode.test_list_queues_for_unknown_cloud.tables_format_v0/cluster/node_1/logfile_0o5lsjcb.log'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/ciyv/001f38/ydb/tests/functional/sqs/cloud/test-results/py3test/testing_out_stuff/chunk28/testing_out_stuff/test_yandex_cloud_mode.py.TestSqsYandexCloudMode.test_list_queues_for_unknown_cloud.tables_format_v0/cluster/slot_1/stdout'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/ciyv/001f38/ydb/tests/functional/sqs/cloud/test-results/py3test/testing_out_stuff/chunk28/testing_out_stuff/test_yandex_cloud_mode.py.TestSqsYandexCloudMode.test_list_queues_for_unknown_cloud.tables_format_v0/cluster/slot_1/stderr'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedRandom name='/home/runner/.ya/build/build_root/ciyv/001f38/ydb/tests/functional/sqs/cloud/test-results/py3test/testing_out_stuff/chunk28/testing_out_stuff/test_yandex_cloud_mode.py.TestSqsYandexCloudMode.test_list_queues_for_unknown_cloud.tables_format_v0/cluster/slot_1/logfile_63_cr94o.log'> ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/subprocess.py:1129: ResourceWarning: subprocess 645984 is still running ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/subprocess.py:1129: ResourceWarning: subprocess 648514 is still running ResourceWarning: Enable tracemalloc to get the object allocation traceback ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/fq/yds/py3test >> test_yds_bindings.py::TestBindings::test_raw_empty_schema_binding[v1] [FAIL] Test command err: sys:1: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_dlq_message_counters_in_cloud[tables_format_v1-fifo] >> test_explicit_partitioning_0.py::TestS3::test_projection_integer_type_validation[v2-true-client6-year Utf8 NOT NULL-True] [FAIL] >> test_explicit_partitioning_0.py::TestS3::test_projection_integer_type_validation[v2-true-client7-year Utf8-False] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/fq/yds/py3test >> test_yq_streaming.py::TestYqStreaming::test_early_finish_case5[v1] [FAIL] Test command err: sys:1: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback |82.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test |82.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/olap/data_quotas/py3test >> test_quota_exhaustion.py::TestYdbWorkload::test_duplicates [FAIL] Test command err: Database name /Root/test >> test_common.py::TestCommonYandexWithTenant::test_private_create_queue[tables_format_v1-std] [FAIL] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_dlq_setup_in_cloud[tables_format_v1-fifo] >> test_common.py::TestCommonYandexWithPath::test_private_queue_recreation[tables_format_v0-fifo] [FAIL] >> test_common.py::TestCommonYandexWithPath::test_private_queue_recreation[tables_format_v0-std] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_not_throttling_with_custom_queue_name[fifo-tables_format_v0] [FAIL] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_not_throttling_with_custom_queue_name[fifo-tables_format_v1] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/fq/yds/py3test >> test_row_dispatcher.py::TestPqRowDispatcher::test_3_sessions [FAIL] Test command err: sys:1: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_dlq_mechanics_in_cloud[tables_format_v0-tables_format_v1-fifo] [FAIL] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_dlq_mechanics_in_cloud[tables_format_v0-tables_format_v1-std] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/cloud/py3test >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_yc_events_processor[tables_format_v1] [FAIL] Test command err: sys:1: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/ciyv/001f35/ydb/tests/functional/sqs/cloud/test-results/py3test/testing_out_stuff/chunk34/testing_out_stuff/test_yandex_cloud_mode.py.TestSqsYandexCloudMode.test_yc_events_processor.tables_format_v0/cluster/node_1/stdout'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/ciyv/001f35/ydb/tests/functional/sqs/cloud/test-results/py3test/testing_out_stuff/chunk34/testing_out_stuff/test_yandex_cloud_mode.py.TestSqsYandexCloudMode.test_yc_events_processor.tables_format_v0/cluster/node_1/stderr'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedRandom name='/home/runner/.ya/build/build_root/ciyv/001f35/ydb/tests/functional/sqs/cloud/test-results/py3test/testing_out_stuff/chunk34/testing_out_stuff/test_yandex_cloud_mode.py.TestSqsYandexCloudMode.test_yc_events_processor.tables_format_v0/cluster/node_1/logfile_v60_8d1_.log'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/ciyv/001f35/ydb/tests/functional/sqs/cloud/test-results/py3test/testing_out_stuff/chunk34/testing_out_stuff/test_yandex_cloud_mode.py.TestSqsYandexCloudMode.test_yc_events_processor.tables_format_v0/cluster/slot_1/stdout'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/ciyv/001f35/ydb/tests/functional/sqs/cloud/test-results/py3test/testing_out_stuff/chunk34/testing_out_stuff/test_yandex_cloud_mode.py.TestSqsYandexCloudMode.test_yc_events_processor.tables_format_v0/cluster/slot_1/stderr'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedRandom name='/home/runner/.ya/build/build_root/ciyv/001f35/ydb/tests/functional/sqs/cloud/test-results/py3test/testing_out_stuff/chunk34/testing_out_stuff/test_yandex_cloud_mode.py.TestSqsYandexCloudMode.test_yc_events_processor.tables_format_v0/cluster/slot_1/logfile_an2do6am.log'> ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/subprocess.py:1129: ResourceWarning: subprocess 647314 is still running ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/subprocess.py:1129: ResourceWarning: subprocess 650586 is still running ResourceWarning: Enable tracemalloc to get the object allocation traceback >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_not_throttling_with_custom_queue_name[fifo-tables_format_v1] [FAIL] >> test_common.py::TestCommonYandexWithPath::test_private_queue_recreation[tables_format_v0-std] [FAIL] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_dlq_mechanics_in_cloud[tables_format_v0-tables_format_v0-fifo] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/cloud/py3test >> test_common.py::TestCommonYandexWithTenant::test_private_create_queue[tables_format_v1-std] [FAIL] Test command err: sys:1: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/ciyv/001f1f/ydb/tests/functional/sqs/cloud/test-results/py3test/testing_out_stuff/chunk9/testing_out_stuff/test_common.py.TestCommonYandexWithTenant.test_private_create_queue.tables_format_v1-fifo/cluster/node_1/stdout'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/ciyv/001f1f/ydb/tests/functional/sqs/cloud/test-results/py3test/testing_out_stuff/chunk9/testing_out_stuff/test_common.py.TestCommonYandexWithTenant.test_private_create_queue.tables_format_v1-fifo/cluster/node_1/stderr'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedRandom name='/home/runner/.ya/build/build_root/ciyv/001f1f/ydb/tests/functional/sqs/cloud/test-results/py3test/testing_out_stuff/chunk9/testing_out_stuff/test_common.py.TestCommonYandexWithTenant.test_private_create_queue.tables_format_v1-fifo/cluster/node_1/logfile_8dzmytwk.log'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/ciyv/001f1f/ydb/tests/functional/sqs/cloud/test-results/py3test/testing_out_stuff/chunk9/testing_out_stuff/test_common.py.TestCommonYandexWithTenant.test_private_create_queue.tables_format_v1-fifo/cluster/slot_1/stdout'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/ciyv/001f1f/ydb/tests/functional/sqs/cloud/test-results/py3test/testing_out_stuff/chunk9/testing_out_stuff/test_common.py.TestCommonYandexWithTenant.test_private_create_queue.tables_format_v1-fifo/cluster/slot_1/stderr'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedRandom name='/home/runner/.ya/build/build_root/ciyv/001f1f/ydb/tests/functional/sqs/cloud/test-results/py3test/testing_out_stuff/chunk9/testing_out_stuff/test_common.py.TestCommonYandexWithTenant.test_private_create_queue.tables_format_v1-fifo/cluster/slot_1/logfile_a9ase9y8.log'> ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/subprocess.py:1129: ResourceWarning: subprocess 648688 is still running ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/subprocess.py:1129: ResourceWarning: subprocess 651148 is still running ResourceWarning: Enable tracemalloc to get the object allocation traceback >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_dlq_mechanics_in_cloud[tables_format_v0-tables_format_v1-std] [FAIL] >> test_explicit_partitioning_0.py::TestS3::test_projection_integer_type_validation[v2-true-client7-year Utf8-False] [FAIL] >> test_explicit_partitioning_0.py::TestS3::test_projection_integer_type_validation[v2-true-client8-year Int32-False] |82.2%| [TA] $(B)/ydb/tests/olap/data_quotas/test-results/py3test/{meta.json ... results_accumulator.log} |82.3%| [TA] $(B)/ydb/tests/functional/sqs/messaging/test-results/py3test/{meta.json ... results_accumulator.log} >> test_format_setting.py::TestS3::test_timestamp_unix_time_insert[v1-timestamp/unix_time/test.csv-csv_with_names-UNIX_TIME_SECONDS] [FAIL] >> test_format_setting.py::TestS3::test_timestamp_unix_time_insert[v1-timestamp/unix_time/test.csv-csv_with_names-UNIX_TIME_MICROSECONDS] |82.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test >> test_auditlog.py::test_dml_requests_arent_logged_when_sid_is_expected [FAIL] |82.3%| [TA] {RESULT} $(B)/ydb/tests/olap/data_quotas/test-results/py3test/{meta.json ... results_accumulator.log} ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/cloud/py3test >> test_common.py::TestCommonYandexWithPath::test_private_queue_recreation[tables_format_v0-std] [FAIL] Test command err: sys:1: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/ciyv/001f22/ydb/tests/functional/sqs/cloud/test-results/py3test/testing_out_stuff/chunk6/testing_out_stuff/test_common.py.TestCommonYandexWithPath.test_private_queue_recreation.tables_format_v0-fifo/cluster/node_1/stdout'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/ciyv/001f22/ydb/tests/functional/sqs/cloud/test-results/py3test/testing_out_stuff/chunk6/testing_out_stuff/test_common.py.TestCommonYandexWithPath.test_private_queue_recreation.tables_format_v0-fifo/cluster/node_1/stderr'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedRandom name='/home/runner/.ya/build/build_root/ciyv/001f22/ydb/tests/functional/sqs/cloud/test-results/py3test/testing_out_stuff/chunk6/testing_out_stuff/test_common.py.TestCommonYandexWithPath.test_private_queue_recreation.tables_format_v0-fifo/cluster/node_1/logfile_uvkir7rn.log'> ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/subprocess.py:1129: ResourceWarning: subprocess 648168 is still running ResourceWarning: Enable tracemalloc to get the object allocation traceback >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_not_throttling_with_custom_queue_name[std-tables_format_v0] [FAIL] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_not_throttling_with_custom_queue_name[std-tables_format_v1] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/cloud/py3test >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_not_throttling_with_custom_queue_name[fifo-tables_format_v1] [FAIL] Test command err: sys:1: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/ciyv/001f25/ydb/tests/functional/sqs/cloud/test-results/py3test/testing_out_stuff/chunk29/testing_out_stuff/test_yandex_cloud_mode.py.TestSqsYandexCloudMode.test_not_throttling_with_custom_queue_name.fifo-tables_format_v0/cluster/node_1/stdout'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/ciyv/001f25/ydb/tests/functional/sqs/cloud/test-results/py3test/testing_out_stuff/chunk29/testing_out_stuff/test_yandex_cloud_mode.py.TestSqsYandexCloudMode.test_not_throttling_with_custom_queue_name.fifo-tables_format_v0/cluster/node_1/stderr'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedRandom name='/home/runner/.ya/build/build_root/ciyv/001f25/ydb/tests/functional/sqs/cloud/test-results/py3test/testing_out_stuff/chunk29/testing_out_stuff/test_yandex_cloud_mode.py.TestSqsYandexCloudMode.test_not_throttling_with_custom_queue_name.fifo-tables_format_v0/cluster/node_1/logfile_equn2k8p.log'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/ciyv/001f25/ydb/tests/functional/sqs/cloud/test-results/py3test/testing_out_stuff/chunk29/testing_out_stuff/test_yandex_cloud_mode.py.TestSqsYandexCloudMode.test_not_throttling_with_custom_queue_name.fifo-tables_format_v0/cluster/slot_1/stdout'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/ciyv/001f25/ydb/tests/functional/sqs/cloud/test-results/py3test/testing_out_stuff/chunk29/testing_out_stuff/test_yandex_cloud_mode.py.TestSqsYandexCloudMode.test_not_throttling_with_custom_queue_name.fifo-tables_format_v0/cluster/slot_1/stderr'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedRandom name='/home/runner/.ya/build/build_root/ciyv/001f25/ydb/tests/functional/sqs/cloud/test-results/py3test/testing_out_stuff/chunk29/testing_out_stuff/test_yandex_cloud_mode.py.TestSqsYandexCloudMode.test_not_throttling_with_custom_queue_name.fifo-tables_format_v0/cluster/slot_1/logfile__udto37t.log'> ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/subprocess.py:1129: ResourceWarning: subprocess 647808 is still running ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/subprocess.py:1129: ResourceWarning: subprocess 650576 is still running ResourceWarning: Enable tracemalloc to get the object allocation traceback >> test_explicit_partitioning_0.py::TestS3::test_projection_integer_type_validation[v2-true-client8-year Int32-False] [FAIL] >> test_explicit_partitioning_0.py::TestS3::test_projection_integer_type_validation[v2-true-client9-year Uint32-False] >> test_auditlog.py::test_dml_requests_logged_when_sid_is_unexpected >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_dlq_message_counters_in_cloud[tables_format_v0-fifo] |82.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test |82.3%| [TA] {RESULT} $(B)/ydb/tests/functional/sqs/messaging/test-results/py3test/{meta.json ... results_accumulator.log} |82.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test >> test_common.py::TestCommonYandexWithPath::test_private_create_queue[tables_format_v1-fifo] |82.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/cloud/py3test >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_dlq_mechanics_in_cloud[tables_format_v0-tables_format_v1-std] [FAIL] Test command err: sys:1: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/ciyv/001f1c/ydb/tests/functional/sqs/cloud/test-results/py3test/testing_out_stuff/chunk18/testing_out_stuff/test_yandex_cloud_mode.py.TestSqsYandexCloudMode.test_dlq_mechanics_in_cloud.tables_format_v0-tables_format_v1-fifo/cluster/node_1/stdout'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/ciyv/001f1c/ydb/tests/functional/sqs/cloud/test-results/py3test/testing_out_stuff/chunk18/testing_out_stuff/test_yandex_cloud_mode.py.TestSqsYandexCloudMode.test_dlq_mechanics_in_cloud.tables_format_v0-tables_format_v1-fifo/cluster/node_1/stderr'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedRandom name='/home/runner/.ya/build/build_root/ciyv/001f1c/ydb/tests/functional/sqs/cloud/test-results/py3test/testing_out_stuff/chunk18/testing_out_stuff/test_yandex_cloud_mode.py.TestSqsYandexCloudMode.test_dlq_mechanics_in_cloud.tables_format_v0-tables_format_v1-fifo/cluster/node_1/logfile_01icc5h5.log'> ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/subprocess.py:1129: ResourceWarning: subprocess 648590 is still running ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/ciyv/001f1c/ydb/tests/functional/sqs/cloud/test-results/py3test/testing_out_stuff/chunk18/testing_out_stuff/test_yandex_cloud_mode.py.TestSqsYandexCloudMode.test_dlq_mechanics_in_cloud.tables_format_v0-tables_format_v1-fifo/cluster/slot_1/stdout'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/ciyv/001f1c/ydb/tests/functional/sqs/cloud/test-results/py3test/testing_out_stuff/chunk18/testing_out_stuff/test_yandex_cloud_mode.py.TestSqsYandexCloudMode.test_dlq_mechanics_in_cloud.tables_format_v0-tables_format_v1-fifo/cluster/slot_1/stderr'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedRandom name='/home/runner/.ya/build/build_root/ciyv/001f1c/ydb/tests/functional/sqs/cloud/test-results/py3test/testing_out_stuff/chunk18/testing_out_stuff/test_yandex_cloud_mode.py.TestSqsYandexCloudMode.test_dlq_mechanics_in_cloud.tables_format_v0-tables_format_v1-fifo/cluster/slot_1/logfile_nm5kmolr.log'> ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/subprocess.py:1129: ResourceWarning: subprocess 650537 is still running ResourceWarning: Enable tracemalloc to get the object allocation traceback |82.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_not_throttling_with_custom_queue_name[std-tables_format_v1] [FAIL] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_cloud_queues_with_iam_token[tables_format_v1-fifo] [FAIL] |82.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_cloud_queues_with_iam_token[tables_format_v1-std] >> test_format_setting.py::TestS3::test_timestamp_unix_time_insert[v1-timestamp/unix_time/test.csv-csv_with_names-UNIX_TIME_MICROSECONDS] [FAIL] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_cloud_double_create_queue[fifo-tables_format_v0] |82.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test >> test_format_setting.py::TestS3::test_timestamp_unix_time_insert[v1-timestamp/unix_time/test.csv-csv_with_names-UNIX_TIME_MILLISECONDS] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_count_queues[tables_format_v0] |82.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test |82.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test >> test_yandex_cloud_queue_counters.py::TestYmqQueueCounters::test_counters_when_sending_duplicates [FAIL] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_cloud_queues_with_iam_token[tables_format_v1-std] [FAIL] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_dlq_mechanics_in_cloud[tables_format_v1-tables_format_v1-fifo] >> test_auditlog.py::test_dml_begin_commit_logged |82.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/cloud/py3test >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_not_throttling_with_custom_queue_name[std-tables_format_v1] [FAIL] Test command err: sys:1: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/ciyv/001f19/ydb/tests/functional/sqs/cloud/test-results/py3test/testing_out_stuff/chunk30/testing_out_stuff/test_yandex_cloud_mode.py.TestSqsYandexCloudMode.test_not_throttling_with_custom_queue_name.std-tables_format_v0/cluster/node_1/stdout'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/ciyv/001f19/ydb/tests/functional/sqs/cloud/test-results/py3test/testing_out_stuff/chunk30/testing_out_stuff/test_yandex_cloud_mode.py.TestSqsYandexCloudMode.test_not_throttling_with_custom_queue_name.std-tables_format_v0/cluster/node_1/stderr'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedRandom name='/home/runner/.ya/build/build_root/ciyv/001f19/ydb/tests/functional/sqs/cloud/test-results/py3test/testing_out_stuff/chunk30/testing_out_stuff/test_yandex_cloud_mode.py.TestSqsYandexCloudMode.test_not_throttling_with_custom_queue_name.std-tables_format_v0/cluster/node_1/logfile_lgr11073.log'> ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/subprocess.py:1129: ResourceWarning: subprocess 650180 is still running ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/ciyv/001f19/ydb/tests/functional/sqs/cloud/test-results/py3test/testing_out_stuff/chunk30/testing_out_stuff/test_yandex_cloud_mode.py.TestSqsYandexCloudMode.test_not_throttling_with_custom_queue_name.std-tables_format_v0/cluster/slot_1/stdout'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/ciyv/001f19/ydb/tests/functional/sqs/cloud/test-results/py3test/testing_out_stuff/chunk30/testing_out_stuff/test_yandex_cloud_mode.py.TestSqsYandexCloudMode.test_not_throttling_with_custom_queue_name.std-tables_format_v0/cluster/slot_1/stderr'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedRandom name='/home/runner/.ya/build/build_root/ciyv/001f19/ydb/tests/functional/sqs/cloud/test-results/py3test/testing_out_stuff/chunk30/testing_out_stuff/test_yandex_cloud_mode.py.TestSqsYandexCloudMode.test_not_throttling_with_custom_queue_name.std-tables_format_v0/cluster/slot_1/logfile_v5l_8sh7.log'> ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/subprocess.py:1129: ResourceWarning: subprocess 653384 is still running ResourceWarning: Enable tracemalloc to get the object allocation traceback |82.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test |82.3%| [TA] $(B)/ydb/services/ydb/backup_ut/test-results/unittest/{meta.json ... results_accumulator.log} |82.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test |82.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test >> test_explicit_partitioning_1.py::TestS3::test_binding_projection_integer_type_validation[v1-client8-column_type8-False] [FAIL] >> test_explicit_partitioning_1.py::TestS3::test_binding_projection_integer_type_validation[v1-client9-column_type9-False] >> test_common.py::TestCommonSqsYandexCloudMode::test_private_create_queue[tables_format_v1-fifo] >> test_format_setting.py::TestS3::test_timestamp_unix_time_insert[v1-timestamp/unix_time/test.csv-csv_with_names-UNIX_TIME_MILLISECONDS] [FAIL] >> test_format_setting.py::TestS3::test_timestamp_unix_time_insert[v1-timestamp/unix_time/test.tsv-tsv_with_names-UNIX_TIME_SECONDS] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_retryable_iam_error[tables_format_v0] >> test_auditlog.py::test_single_dml_query_logged[delete] [FAIL] >> test_yandex_cloud_queue_counters.py::TestYmqQueueCounters::test_ymq_send_read_delete [FAIL] >> test_auditlog.py::test_dml_requests_logged_when_unauthorized >> test_common.py::TestCommonYandexWithPath::test_private_create_queue[tables_format_v1-fifo] [FAIL] >> test_common.py::TestCommonYandexWithPath::test_private_create_queue[tables_format_v1-std] |82.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test |82.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test |82.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test >> test_format_setting.py::TestS3::test_timestamp_unix_time_insert[v1-timestamp/unix_time/test.tsv-tsv_with_names-UNIX_TIME_SECONDS] [FAIL] >> test_common.py::TestCommonYandexWithPath::test_private_create_queue[tables_format_v0-fifo] >> test_common.py::TestCommonSqsYandexCloudMode::test_private_queue_recreation[tables_format_v0-fifo] >> test_common.py::TestCommonYandexWithPath::test_private_create_queue[tables_format_v1-std] [FAIL] >> test_format_setting.py::TestS3::test_timestamp_unix_time_insert[v1-timestamp/unix_time/test.tsv-tsv_with_names-UNIX_TIME_MICROSECONDS] >> test_auditlog.py::test_broken_dynconfig[_client_session_pool_no_auth-_good_dynconfig] [FAIL] |82.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test |82.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test |82.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test |82.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/cloud/py3test >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_cloud_queues_with_iam_token[tables_format_v1-std] [FAIL] Test command err: sys:1: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/ciyv/001f10/ydb/tests/functional/sqs/cloud/test-results/py3test/testing_out_stuff/chunk15/testing_out_stuff/test_yandex_cloud_mode.py.TestSqsYandexCloudMode.test_cloud_queues_with_iam_token.tables_format_v1-fifo/cluster/node_1/stdout'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/ciyv/001f10/ydb/tests/functional/sqs/cloud/test-results/py3test/testing_out_stuff/chunk15/testing_out_stuff/test_yandex_cloud_mode.py.TestSqsYandexCloudMode.test_cloud_queues_with_iam_token.tables_format_v1-fifo/cluster/node_1/stderr'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedRandom name='/home/runner/.ya/build/build_root/ciyv/001f10/ydb/tests/functional/sqs/cloud/test-results/py3test/testing_out_stuff/chunk15/testing_out_stuff/test_yandex_cloud_mode.py.TestSqsYandexCloudMode.test_cloud_queues_with_iam_token.tables_format_v1-fifo/cluster/node_1/logfile_y7k3ainq.log'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/ciyv/001f10/ydb/tests/functional/sqs/cloud/test-results/py3test/testing_out_stuff/chunk15/testing_out_stuff/test_yandex_cloud_mode.py.TestSqsYandexCloudMode.test_cloud_queues_with_iam_token.tables_format_v1-fifo/cluster/slot_1/stdout'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/ciyv/001f10/ydb/tests/functional/sqs/cloud/test-results/py3test/testing_out_stuff/chunk15/testing_out_stuff/test_yandex_cloud_mode.py.TestSqsYandexCloudMode.test_cloud_queues_with_iam_token.tables_format_v1-fifo/cluster/slot_1/stderr'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedRandom name='/home/runner/.ya/build/build_root/ciyv/001f10/ydb/tests/functional/sqs/cloud/test-results/py3test/testing_out_stuff/chunk15/testing_out_stuff/test_yandex_cloud_mode.py.TestSqsYandexCloudMode.test_cloud_queues_with_iam_token.tables_format_v1-fifo/cluster/slot_1/logfile_dn3h5ave.log'> ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/subprocess.py:1129: ResourceWarning: subprocess 651692 is still running ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/subprocess.py:1129: ResourceWarning: subprocess 654040 is still running ResourceWarning: Enable tracemalloc to get the object allocation traceback 2025-05-29 15:40:29,551 WARNING libarchive: File (test_yandex_cloud_mode.py.TestSqsYandexCloudMode.test_cloud_queues_with_iam_token.tables_format_v1-fifo/cluster/node_1/logfile_y7k3ainq.log) size has changed. Can't write more data than was declared in the tar header (184082). (probably file was changed during archiving) |82.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/cloud/py3test >> test_yandex_cloud_queue_counters.py::TestYmqQueueCounters::test_counters_when_sending_duplicates [FAIL] Test command err: sys:1: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/ciyv/001f16/ydb/tests/functional/sqs/cloud/test-results/py3test/testing_out_stuff/chunk36/testing_out_stuff/test_yandex_cloud_queue_counters.py.TestYmqQueueCounters.test_counters_when_sending_duplicates/cluster/node_1/stdout'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/ciyv/001f16/ydb/tests/functional/sqs/cloud/test-results/py3test/testing_out_stuff/chunk36/testing_out_stuff/test_yandex_cloud_queue_counters.py.TestYmqQueueCounters.test_counters_when_sending_duplicates/cluster/node_1/stderr'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedRandom name='/home/runner/.ya/build/build_root/ciyv/001f16/ydb/tests/functional/sqs/cloud/test-results/py3test/testing_out_stuff/chunk36/testing_out_stuff/test_yandex_cloud_queue_counters.py.TestYmqQueueCounters.test_counters_when_sending_duplicates/cluster/node_1/logfile_pl4taty7.log'> ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/subprocess.py:1129: ResourceWarning: subprocess 651734 is still running ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/ciyv/001f16/ydb/tests/functional/sqs/cloud/test-results/py3test/testing_out_stuff/chunk36/testing_out_stuff/test_yandex_cloud_queue_counters.py.TestYmqQueueCounters.test_counters_when_sending_duplicates/cluster/slot_1/stdout'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/ciyv/001f16/ydb/tests/functional/sqs/cloud/test-results/py3test/testing_out_stuff/chunk36/testing_out_stuff/test_yandex_cloud_queue_counters.py.TestYmqQueueCounters.test_counters_when_sending_duplicates/cluster/slot_1/stderr'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedRandom name='/home/runner/.ya/build/build_root/ciyv/001f16/ydb/tests/functional/sqs/cloud/test-results/py3test/testing_out_stuff/chunk36/testing_out_stuff/test_yandex_cloud_queue_counters.py.TestYmqQueueCounters.test_counters_when_sending_duplicates/cluster/slot_1/logfile_vgisr2f7.log'> ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/subprocess.py:1129: ResourceWarning: subprocess 654488 is still running ResourceWarning: Enable tracemalloc to get the object allocation traceback |82.4%| [TA] $(B)/ydb/tests/fq/yds/test-results/py3test/{meta.json ... results_accumulator.log} ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/cloud/py3test >> test_yandex_cloud_queue_counters.py::TestYmqQueueCounters::test_ymq_send_read_delete [FAIL] Test command err: sys:1: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/ciyv/001f0a/ydb/tests/functional/sqs/cloud/test-results/py3test/testing_out_stuff/chunk39/testing_out_stuff/test_yandex_cloud_queue_counters.py.TestYmqQueueCounters.test_ymq_send_read_delete/cluster/node_1/stdout'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/ciyv/001f0a/ydb/tests/functional/sqs/cloud/test-results/py3test/testing_out_stuff/chunk39/testing_out_stuff/test_yandex_cloud_queue_counters.py.TestYmqQueueCounters.test_ymq_send_read_delete/cluster/node_1/stderr'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedRandom name='/home/runner/.ya/build/build_root/ciyv/001f0a/ydb/tests/functional/sqs/cloud/test-results/py3test/testing_out_stuff/chunk39/testing_out_stuff/test_yandex_cloud_queue_counters.py.TestYmqQueueCounters.test_ymq_send_read_delete/cluster/node_1/logfile_g1sgbw2u.log'> ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/subprocess.py:1129: ResourceWarning: subprocess 652650 is still running ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/ciyv/001f0a/ydb/tests/functional/sqs/cloud/test-results/py3test/testing_out_stuff/chunk39/testing_out_stuff/test_yandex_cloud_queue_counters.py.TestYmqQueueCounters.test_ymq_send_read_delete/cluster/slot_1/stdout'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/ciyv/001f0a/ydb/tests/functional/sqs/cloud/test-results/py3test/testing_out_stuff/chunk39/testing_out_stuff/test_yandex_cloud_queue_counters.py.TestYmqQueueCounters.test_ymq_send_read_delete/cluster/slot_1/stderr'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedRandom name='/home/runner/.ya/build/build_root/ciyv/001f0a/ydb/tests/functional/sqs/cloud/test-results/py3test/testing_out_stuff/chunk39/testing_out_stuff/test_yandex_cloud_queue_counters.py.TestYmqQueueCounters.test_ymq_send_read_delete/cluster/slot_1/logfile_kfcfcuj8.log'> ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/subprocess.py:1129: ResourceWarning: subprocess 654557 is still running ResourceWarning: Enable tracemalloc to get the object allocation traceback >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_ymq_expiring_counters [FAIL] >> test_yandex_cloud_queue_counters.py::TestYmqQueueCounters::test_counters_when_reading_from_empty_queue >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_dlq_mechanics_in_cloud[tables_format_v0-tables_format_v0-fifo] [FAIL] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_dlq_mechanics_in_cloud[tables_format_v0-tables_format_v0-std] >> test_format_setting.py::TestS3::test_timestamp_unix_time_insert[v1-timestamp/unix_time/test.tsv-tsv_with_names-UNIX_TIME_MICROSECONDS] [FAIL] >> test_explicit_partitioning_0.py::TestS3::test_projection_integer_type_validation[v2-true-client9-year Uint32-False] [FAIL] >> test_format_setting.py::TestS3::test_timestamp_unix_time_insert[v1-timestamp/unix_time/test.tsv-tsv_with_names-UNIX_TIME_MILLISECONDS] >> test_explicit_partitioning_0.py::TestS3::test_projection_integer_type_validation[v2-true-client10-year Int64 NOT NULL-True] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_dlq_mechanics_in_cloud[tables_format_v1-tables_format_v0-fifo] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_dlq_mechanics_in_cloud[tables_format_v0-tables_format_v0-std] [FAIL] >> test_explicit_partitioning_0.py::TestS3::test_projection_integer_type_validation[v2-true-client10-year Int64 NOT NULL-True] [FAIL] >> test_common.py::TestCommonYandexWithTenant::test_private_create_queue[tables_format_v0-fifo] >> test_explicit_partitioning_0.py::TestS3::test_projection_integer_type_validation[v2-true-client11-year Int64-False] >> test_format_setting.py::TestS3::test_timestamp_unix_time_insert[v1-timestamp/unix_time/test.tsv-tsv_with_names-UNIX_TIME_MILLISECONDS] [FAIL] >> test_format_setting.py::TestS3::test_timestamp_unix_time_insert[v1-timestamp/unix_time/test.json-json_each_row-UNIX_TIME_SECONDS] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/cloud/py3test >> test_common.py::TestCommonYandexWithPath::test_private_create_queue[tables_format_v1-std] [FAIL] Test command err: sys:1: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/ciyv/001ef2/ydb/tests/functional/sqs/cloud/test-results/py3test/testing_out_stuff/chunk5/testing_out_stuff/test_common.py.TestCommonYandexWithPath.test_private_create_queue.tables_format_v1-fifo/cluster/node_1/stdout'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/ciyv/001ef2/ydb/tests/functional/sqs/cloud/test-results/py3test/testing_out_stuff/chunk5/testing_out_stuff/test_common.py.TestCommonYandexWithPath.test_private_create_queue.tables_format_v1-fifo/cluster/node_1/stderr'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedRandom name='/home/runner/.ya/build/build_root/ciyv/001ef2/ydb/tests/functional/sqs/cloud/test-results/py3test/testing_out_stuff/chunk5/testing_out_stuff/test_common.py.TestCommonYandexWithPath.test_private_create_queue.tables_format_v1-fifo/cluster/node_1/logfile_c67isu6j.log'> ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/subprocess.py:1129: ResourceWarning: subprocess 655435 is still running ResourceWarning: Enable tracemalloc to get the object allocation traceback |82.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test |82.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test |82.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test |82.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test |82.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/cloud/py3test >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_dlq_mechanics_in_cloud[tables_format_v0-tables_format_v0-std] [FAIL] Test command err: sys:1: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/ciyv/001f04/ydb/tests/functional/sqs/cloud/test-results/py3test/testing_out_stuff/chunk17/testing_out_stuff/test_yandex_cloud_mode.py.TestSqsYandexCloudMode.test_dlq_mechanics_in_cloud.tables_format_v0-tables_format_v0-fifo/cluster/node_1/stdout'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/ciyv/001f04/ydb/tests/functional/sqs/cloud/test-results/py3test/testing_out_stuff/chunk17/testing_out_stuff/test_yandex_cloud_mode.py.TestSqsYandexCloudMode.test_dlq_mechanics_in_cloud.tables_format_v0-tables_format_v0-fifo/cluster/node_1/stderr'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedRandom name='/home/runner/.ya/build/build_root/ciyv/001f04/ydb/tests/functional/sqs/cloud/test-results/py3test/testing_out_stuff/chunk17/testing_out_stuff/test_yandex_cloud_mode.py.TestSqsYandexCloudMode.test_dlq_mechanics_in_cloud.tables_format_v0-tables_format_v0-fifo/cluster/node_1/logfile_kka0allu.log'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/ciyv/001f04/ydb/tests/functional/sqs/cloud/test-results/py3test/testing_out_stuff/chunk17/testing_out_stuff/test_yandex_cloud_mode.py.TestSqsYandexCloudMode.test_dlq_mechanics_in_cloud.tables_format_v0-tables_format_v0-fifo/cluster/slot_1/stdout'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/ciyv/001f04/ydb/tests/functional/sqs/cloud/test-results/py3test/testing_out_stuff/chunk17/testing_out_stuff/test_yandex_cloud_mode.py.TestSqsYandexCloudMode.test_dlq_mechanics_in_cloud.tables_format_v0-tables_format_v0-fifo/cluster/slot_1/stderr'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedRandom name='/home/runner/.ya/build/build_root/ciyv/001f04/ydb/tests/functional/sqs/cloud/test-results/py3test/testing_out_stuff/chunk17/testing_out_stuff/test_yandex_cloud_mode.py.TestSqsYandexCloudMode.test_dlq_mechanics_in_cloud.tables_format_v0-tables_format_v0-fifo/cluster/slot_1/logfile_ej02iaq6.log'> ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/subprocess.py:1129: ResourceWarning: subprocess 654580 is still running ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/subprocess.py:1129: ResourceWarning: subprocess 656995 is still running ResourceWarning: Enable tracemalloc to get the object allocation traceback |82.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test |82.5%| [TA] {RESULT} $(B)/ydb/services/ydb/backup_ut/test-results/unittest/{meta.json ... results_accumulator.log} >> test_explicit_partitioning_1.py::TestS3::test_binding_projection_integer_type_validation[v1-client9-column_type9-False] [FAIL] >> test_explicit_partitioning_1.py::TestS3::test_binding_projection_integer_type_validation[v1-client10-column_type10-False] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_dlq_setup_in_cloud[tables_format_v1-fifo] [FAIL] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_dlq_setup_in_cloud[tables_format_v1-std] |82.5%| [TA] {RESULT} $(B)/ydb/tests/fq/yds/test-results/py3test/{meta.json ... results_accumulator.log} >> test_format_setting.py::TestS3::test_timestamp_unix_time_insert[v1-timestamp/unix_time/test.json-json_each_row-UNIX_TIME_SECONDS] [FAIL] >> test_format_setting.py::TestS3::test_timestamp_unix_time_insert[v1-timestamp/unix_time/test.json-json_each_row-UNIX_TIME_MICROSECONDS] >> test_common.py::TestCommonYandexWithPath::test_private_queue_recreation[tables_format_v1-fifo] >> test_auditlog.py::test_dml_requests_logged_when_sid_is_unexpected [FAIL] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_dlq_setup_in_cloud[tables_format_v0-fifo] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_dlq_setup_in_cloud[tables_format_v1-std] [FAIL] |82.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_cloud_double_create_queue[std-tables_format_v0] >> test_auditlog.py::test_broken_dynconfig[_client_session_pool_bad_auth-_bad_dynconfig] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_count_queues[tables_format_v0] [FAIL] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_count_queues[tables_format_v1] >> test_format_setting.py::TestS3::test_timestamp_unix_time_insert[v1-timestamp/unix_time/test.json-json_each_row-UNIX_TIME_MICROSECONDS] [FAIL] >> test_format_setting.py::TestS3::test_timestamp_unix_time_insert[v1-timestamp/unix_time/test.json-json_each_row-UNIX_TIME_MILLISECONDS] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_count_queues[tables_format_v1] [FAIL] |82.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/cloud/py3test >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_dlq_setup_in_cloud[tables_format_v1-std] [FAIL] Test command err: sys:1: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/ciyv/001f0d/ydb/tests/functional/sqs/cloud/test-results/py3test/testing_out_stuff/chunk24/testing_out_stuff/test_yandex_cloud_mode.py.TestSqsYandexCloudMode.test_dlq_setup_in_cloud.tables_format_v1-fifo/cluster/node_1/stdout'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/ciyv/001f0d/ydb/tests/functional/sqs/cloud/test-results/py3test/testing_out_stuff/chunk24/testing_out_stuff/test_yandex_cloud_mode.py.TestSqsYandexCloudMode.test_dlq_setup_in_cloud.tables_format_v1-fifo/cluster/node_1/stderr'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedRandom name='/home/runner/.ya/build/build_root/ciyv/001f0d/ydb/tests/functional/sqs/cloud/test-results/py3test/testing_out_stuff/chunk24/testing_out_stuff/test_yandex_cloud_mode.py.TestSqsYandexCloudMode.test_dlq_setup_in_cloud.tables_format_v1-fifo/cluster/node_1/logfile_r1206xeu.log'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/ciyv/001f0d/ydb/tests/functional/sqs/cloud/test-results/py3test/testing_out_stuff/chunk24/testing_out_stuff/test_yandex_cloud_mode.py.TestSqsYandexCloudMode.test_dlq_setup_in_cloud.tables_format_v1-fifo/cluster/slot_1/stdout'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/ciyv/001f0d/ydb/tests/functional/sqs/cloud/test-results/py3test/testing_out_stuff/chunk24/testing_out_stuff/test_yandex_cloud_mode.py.TestSqsYandexCloudMode.test_dlq_setup_in_cloud.tables_format_v1-fifo/cluster/slot_1/stderr'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedRandom name='/home/runner/.ya/build/build_root/ciyv/001f0d/ydb/tests/functional/sqs/cloud/test-results/py3test/testing_out_stuff/chunk24/testing_out_stuff/test_yandex_cloud_mode.py.TestSqsYandexCloudMode.test_dlq_setup_in_cloud.tables_format_v1-fifo/cluster/slot_1/logfile_j_x5t_w1.log'> ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/subprocess.py:1129: ResourceWarning: subprocess 653924 is still running ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/subprocess.py:1129: ResourceWarning: subprocess 656990 is still running ResourceWarning: Enable tracemalloc to get the object allocation traceback |82.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test >> test_auditlog.py::test_single_dml_query_logged[delete] [FAIL] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_dlq_message_counters_in_cloud[tables_format_v0-fifo] [FAIL] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_dlq_message_counters_in_cloud[tables_format_v0-std] >> test_auditlog.py::test_dml_requests_logged_when_unauthorized [FAIL] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_dlq_message_counters_in_cloud[tables_format_v1-fifo] [FAIL] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_dlq_message_counters_in_cloud[tables_format_v1-std] >> test_explicit_partitioning_0.py::TestS3::test_projection_integer_type_validation[v2-true-client11-year Int64-False] [FAIL] >> test_explicit_partitioning_0.py::TestS3::test_projection_integer_type_validation[v2-true-client12-year Uint64-False] >> test_yandex_cloud_queue_counters.py::TestYmqQueueCounters::test_sqs_action_counters ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/cloud/py3test >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_count_queues[tables_format_v1] [FAIL] Test command err: sys:1: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/ciyv/001eef/ydb/tests/functional/sqs/cloud/test-results/py3test/testing_out_stuff/chunk16/testing_out_stuff/test_yandex_cloud_mode.py.TestSqsYandexCloudMode.test_count_queues.tables_format_v0/cluster/node_1/stdout'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/ciyv/001eef/ydb/tests/functional/sqs/cloud/test-results/py3test/testing_out_stuff/chunk16/testing_out_stuff/test_yandex_cloud_mode.py.TestSqsYandexCloudMode.test_count_queues.tables_format_v0/cluster/node_1/stderr'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedRandom name='/home/runner/.ya/build/build_root/ciyv/001eef/ydb/tests/functional/sqs/cloud/test-results/py3test/testing_out_stuff/chunk16/testing_out_stuff/test_yandex_cloud_mode.py.TestSqsYandexCloudMode.test_count_queues.tables_format_v0/cluster/node_1/logfile_eqc6101k.log'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/ciyv/001eef/ydb/tests/functional/sqs/cloud/test-results/py3test/testing_out_stuff/chunk16/testing_out_stuff/test_yandex_cloud_mode.py.TestSqsYandexCloudMode.test_count_queues.tables_format_v0/cluster/slot_1/stdout'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/ciyv/001eef/ydb/tests/functional/sqs/cloud/test-results/py3test/testing_out_stuff/chunk16/testing_out_stuff/test_yandex_cloud_mode.py.TestSqsYandexCloudMode.test_count_queues.tables_format_v0/cluster/slot_1/stderr'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedRandom name='/home/runner/.ya/build/build_root/ciyv/001eef/ydb/tests/functional/sqs/cloud/test-results/py3test/testing_out_stuff/chunk16/testing_out_stuff/test_yandex_cloud_mode.py.TestSqsYandexCloudMode.test_count_queues.tables_format_v0/cluster/slot_1/logfile_fk0mu_8r.log'> ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/subprocess.py:1129: ResourceWarning: subprocess 655797 is still running ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/subprocess.py:1129: ResourceWarning: subprocess 659917 is still running ResourceWarning: Enable tracemalloc to get the object allocation traceback |82.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test >> TSchemeShardTTLTests::BuildAsyncIndexShouldSucceed >> test_common.py::TestCommonSqsYandexCloudMode::test_private_create_queue[tables_format_v0-fifo] >> test_yandex_cloud_queue_counters.py::TestYmqQueueCounters::test_purge_queue_counters |82.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_dlq_message_counters_in_cloud[tables_format_v0-std] [FAIL] |82.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test >> test_format_setting.py::TestS3::test_timestamp_unix_time_insert[v1-timestamp/unix_time/test.json-json_each_row-UNIX_TIME_MILLISECONDS] [FAIL] >> test_format_setting.py::TestS3::test_timestamp_unix_time_insert[v1-timestamp/unix_time/test.parquet-parquet-UNIX_TIME_SECONDS] >> test_explicit_partitioning_1.py::TestS3::test_binding_projection_integer_type_validation[v1-client10-column_type10-False] [FAIL] >> test_explicit_partitioning_1.py::TestS3::test_binding_projection_integer_type_validation[v1-client11-column_type11-False] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_empty_auth_header >> test_common.py::TestCommonYandexWithPath::test_private_queue_recreation[tables_format_v1-fifo] [FAIL] >> test_common.py::TestCommonYandexWithPath::test_private_queue_recreation[tables_format_v1-std] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/cloud/py3test >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_dlq_message_counters_in_cloud[tables_format_v0-std] [FAIL] Test command err: sys:1: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/ciyv/001ef9/ydb/tests/functional/sqs/cloud/test-results/py3test/testing_out_stuff/chunk21/testing_out_stuff/test_yandex_cloud_mode.py.TestSqsYandexCloudMode.test_dlq_message_counters_in_cloud.tables_format_v0-fifo/cluster/node_1/stdout'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/ciyv/001ef9/ydb/tests/functional/sqs/cloud/test-results/py3test/testing_out_stuff/chunk21/testing_out_stuff/test_yandex_cloud_mode.py.TestSqsYandexCloudMode.test_dlq_message_counters_in_cloud.tables_format_v0-fifo/cluster/node_1/stderr'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedRandom name='/home/runner/.ya/build/build_root/ciyv/001ef9/ydb/tests/functional/sqs/cloud/test-results/py3test/testing_out_stuff/chunk21/testing_out_stuff/test_yandex_cloud_mode.py.TestSqsYandexCloudMode.test_dlq_message_counters_in_cloud.tables_format_v0-fifo/cluster/node_1/logfile_zzs5tt96.log'> ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/subprocess.py:1129: ResourceWarning: subprocess 655342 is still running ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/ciyv/001ef9/ydb/tests/functional/sqs/cloud/test-results/py3test/testing_out_stuff/chunk21/testing_out_stuff/test_yandex_cloud_mode.py.TestSqsYandexCloudMode.test_dlq_message_counters_in_cloud.tables_format_v0-fifo/cluster/slot_1/stdout'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/ciyv/001ef9/ydb/tests/functional/sqs/cloud/test-results/py3test/testing_out_stuff/chunk21/testing_out_stuff/test_yandex_cloud_mode.py.TestSqsYandexCloudMode.test_dlq_message_counters_in_cloud.tables_format_v0-fifo/cluster/slot_1/stderr'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedRandom name='/home/runner/.ya/build/build_root/ciyv/001ef9/ydb/tests/functional/sqs/cloud/test-results/py3test/testing_out_stuff/chunk21/testing_out_stuff/test_yandex_cloud_mode.py.TestSqsYandexCloudMode.test_dlq_message_counters_in_cloud.tables_format_v0-fifo/cluster/slot_1/logfile_90t0hp2d.log'> ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/subprocess.py:1129: ResourceWarning: subprocess 657154 is still running ResourceWarning: Enable tracemalloc to get the object allocation traceback |82.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/batch_operations/unittest >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_dlq_message_counters_in_cloud[tables_format_v1-std] [FAIL] |82.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_fifo_groups_with_dlq_in_cloud[tables_format_v1] >> test_common.py::TestCommonYandexWithPath::test_private_queue_recreation[tables_format_v1-std] [FAIL] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_cloud_double_create_queue[fifo-tables_format_v0] [FAIL] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_cloud_double_create_queue[fifo-tables_format_v1] >> TSchemeShardTTLTests::BuildAsyncIndexShouldSucceed [GOOD] >> test_common.py::TestCommonYandexWithTenant::test_private_create_queue[tables_format_v0-fifo] [FAIL] >> test_common.py::TestCommonYandexWithTenant::test_private_create_queue[tables_format_v0-std] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_queues_count_over_limit[tables_format_v0] >> test_common.py::TestCommonYandexWithPath::test_private_create_queue[tables_format_v0-fifo] [FAIL] >> test_common.py::TestCommonYandexWithPath::test_private_create_queue[tables_format_v0-std] |82.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/statistics/service/ut/unittest >> test_common.py::TestCommonYandexWithTenant::test_private_create_queue[tables_format_v0-std] [FAIL] >> overlapping_portions.py::TestOverlappingPortions::test >> test_common.py::TestCommonYandexWithPath::test_private_create_queue[tables_format_v0-std] [FAIL] |82.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test >> test_auditlog.py::test_dml_requests_logged_when_sid_is_unexpected [FAIL] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_dlq_mechanics_in_cloud[tables_format_v1-tables_format_v1-fifo] [FAIL] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_dlq_mechanics_in_cloud[tables_format_v1-tables_format_v1-std] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_ttl/unittest >> TSchemeShardTTLTests::BuildAsyncIndexShouldSucceed [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:116:2058] recipient: [1:111:2141] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:116:2058] recipient: [1:111:2141] Leader for TabletID 72057594046678944 is [1:128:2152] sender: [1:132:2058] recipient: [1:111:2141] 2025-05-29T15:40:46.679280Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7488: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-05-29T15:40:46.679315Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7516: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:40:46.679321Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7402: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-05-29T15:40:46.679326Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7418: OperationsProcessing config: using default configuration 2025-05-29T15:40:46.679341Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-05-29T15:40:46.679346Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7424: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-05-29T15:40:46.679357Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7548: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-29T15:40:46.679371Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:39: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-05-29T15:40:46.679499Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7619: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-05-29T15:40:46.679595Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-05-29T15:40:46.747202Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7309: Cannot subscribe to console configs 2025-05-29T15:40:46.747230Z node 1 :IMPORT WARN: schemeshard_import.cpp:303: Table profiles were not loaded 2025-05-29T15:40:46.757100Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-05-29T15:40:46.757320Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-05-29T15:40:46.757378Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-05-29T15:40:46.771812Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-05-29T15:40:46.774530Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:445: Clear TempDirsState with owners number: 0 2025-05-29T15:40:46.774662Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1348: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-05-29T15:40:46.774793Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:32: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-05-29T15:40:46.780946Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:157: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:40:46.781031Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_data_erasure_manager.cpp:84: [RootDataErasureManager] Stop 2025-05-29T15:40:46.781369Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:40:46.781380Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29T15:40:46.781401Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-05-29T15:40:46.781410Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-29T15:40:46.781416Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:202: TTxServerlessStorageBilling.Complete 2025-05-29T15:40:46.781457Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6671: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-05-29T15:40:46.798424Z node 1 :HIVE INFO: tablet_helpers.cpp:1130: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:128:2152] sender: [1:242:2058] recipient: [1:15:2062] 2025-05-29T15:40:46.929555Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:372: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-05-29T15:40:46.929648Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:40:46.929726Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-05-29T15:40:46.929774Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:130: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-05-29T15:40:46.929785Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:178: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:40:46.937811Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:451: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-05-29T15:40:46.937860Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-05-29T15:40:46.937943Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:40:46.937957Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:313: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-05-29T15:40:46.937963Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:367: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-05-29T15:40:46.937970Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 2 -> 3 2025-05-29T15:40:46.944311Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:40:46.944347Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-05-29T15:40:46.944358Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 3 -> 128 2025-05-29T15:40:46.946557Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:40:46.946584Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-29T15:40:46.946592Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:40:46.946602Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1650: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-05-29T15:40:46.947474Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1719: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-05-29T15:40:46.949353Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:652: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-05-29T15:40:46.949414Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1751: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-05-29T15:40:46.949626Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:676: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-05-29T15:40:46.949672Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:680: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 124 RawX2: 4294969445 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-05-29T15:40:46.949681Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:40:46.949794Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 1:0 128 -> 240 2025-05-29T15:40:46.949805Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-29T15:40:46.949839Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:476: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-05-29T15:40:46.949852Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:402: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-05-29T15:40:46.962868Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-29T15:40:46.962905Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-29T15:40:46.962970Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-29 ... D DEBUG: schemeshard__notify.cpp:30: NotifyTxCompletion operation in-flight, txId: 281474976710760, at schemeshard: 72057594046678944 2025-05-29T15:40:48.893280Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1606: TOperation IsReadyToNotify, TxId: 281474976710760, ready parts: 0/1, is published: true 2025-05-29T15:40:48.893286Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__notify.cpp:131: NotifyTxCompletion transaction is registered, txId: 281474976710760, at schemeshard: 72057594046678944 FAKE_COORDINATOR: Add transaction: 281474976710760 at step: 5000006 FAKE_COORDINATOR: advance: minStep5000006 State->FrontStep: 5000005 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710760 at step: 5000006 2025-05-29T15:40:48.893378Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:676: TTxOperationPlanStep Execute, stepId: 5000006, transactions count in step: 1, at schemeshard: 72057594046678944 2025-05-29T15:40:48.893416Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:680: TTxOperationPlanStep Execute, message: Transactions { TxId: 281474976710760 Coordinator: 72057594046316545 AckTo { RawX1: 124 RawX2: 4294969445 } } Step: 5000006 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-05-29T15:40:48.893426Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_drop_lock.cpp:44: [72057594046678944] TDropLock TPropose opId# 281474976710760:0 HandleReply TEvOperationPlan: step# 5000006 2025-05-29T15:40:48.893434Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2495: Change state for txid 281474976710760:0 128 -> 240 2025-05-29T15:40:48.893977Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:485: TTxOperationProgress Execute, operationId: 281474976710760:0, at schemeshard: 72057594046678944 2025-05-29T15:40:48.893990Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:482: [72057594046678944] TDone opId# 281474976710760:0 ProgressState 2025-05-29T15:40:48.894008Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:906: Part operation is done id#281474976710760:0 progress is 1/1 2025-05-29T15:40:48.894012Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 281474976710760 ready parts: 1/1 2025-05-29T15:40:48.894020Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:906: Part operation is done id#281474976710760:0 progress is 1/1 2025-05-29T15:40:48.894024Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 281474976710760 ready parts: 1/1 2025-05-29T15:40:48.894029Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1606: TOperation IsReadyToNotify, TxId: 281474976710760, ready parts: 1/1, is published: true 2025-05-29T15:40:48.894042Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1629: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:128:2152] message: TxId: 281474976710760 2025-05-29T15:40:48.894050Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1641: TOperation IsReadyToDone TxId: 281474976710760 ready parts: 1/1 2025-05-29T15:40:48.894056Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:973: Operation and all the parts is done, operation id: 281474976710760:0 2025-05-29T15:40:48.894061Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5174: RemoveTx for txid 281474976710760:0 2025-05-29T15:40:48.894079Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:487: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 FAKE_COORDINATOR: Erasing txId 281474976710760 2025-05-29T15:40:48.894445Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6746: Handle: TEvNotifyTxCompletionResult: txId# 281474976710760 2025-05-29T15:40:48.894459Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6748: Message: TxId: 281474976710760 2025-05-29T15:40:48.894472Z node 1 :BUILD_INDEX INFO: schemeshard_build_index__progress.cpp:2338: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxReply : TEvNotifyTxCompletionResult, txId# 281474976710760, buildInfoId: 102 2025-05-29T15:40:48.894489Z node 1 :BUILD_INDEX DEBUG: schemeshard_build_index__progress.cpp:2341: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxReply : TEvNotifyTxCompletionResult, txId# 281474976710760, buildInfo: TBuildInfo{ IndexBuildId: 102, Uid: , DomainPathId: [OwnerId: 72057594046678944, LocalPathId: 1], TablePathId: [OwnerId: 72057594046678944, LocalPathId: 2], IndexType: EIndexTypeGlobalAsync, IndexName: UserDefinedIndexByValue, IndexColumn: value, State: Unlocking, IsCancellationRequested: 0, Issue: , SubscribersCount: 1, CreateSender: [1:387:2358], AlterMainTableTxId: 0, AlterMainTableTxStatus: StatusSuccess, AlterMainTableTxDone: 0, LockTxId: 281474976710757, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976710758, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 5000004, ApplyTxId: 281474976710759, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, UnlockTxId: 281474976710760, UnlockTxStatus: StatusAccepted, UnlockTxDone: 0, ToUploadShards: 0, DoneShards: 0, Processed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }, Billed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }} 2025-05-29T15:40:48.899715Z node 1 :BUILD_INDEX NOTICE: schemeshard_build_index__progress.cpp:1121: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Execute: 102 Unlocking 2025-05-29T15:40:48.899750Z node 1 :BUILD_INDEX DEBUG: schemeshard_build_index__progress.cpp:1122: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Execute: 102 Unlocking TBuildInfo{ IndexBuildId: 102, Uid: , DomainPathId: [OwnerId: 72057594046678944, LocalPathId: 1], TablePathId: [OwnerId: 72057594046678944, LocalPathId: 2], IndexType: EIndexTypeGlobalAsync, IndexName: UserDefinedIndexByValue, IndexColumn: value, State: Unlocking, IsCancellationRequested: 0, Issue: , SubscribersCount: 1, CreateSender: [1:387:2358], AlterMainTableTxId: 0, AlterMainTableTxStatus: StatusSuccess, AlterMainTableTxDone: 0, LockTxId: 281474976710757, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976710758, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 5000004, ApplyTxId: 281474976710759, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, UnlockTxId: 281474976710760, UnlockTxStatus: StatusAccepted, UnlockTxDone: 1, ToUploadShards: 0, DoneShards: 0, Processed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }, Billed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }} 2025-05-29T15:40:48.994467Z node 1 :BUILD_INDEX INFO: schemeshard_build_index_tx_base.cpp:25: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: Change state from Unlocking to Done 2025-05-29T15:40:48.996357Z node 1 :BUILD_INDEX NOTICE: schemeshard_build_index__progress.cpp:1121: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Execute: 102 Done 2025-05-29T15:40:48.996410Z node 1 :BUILD_INDEX DEBUG: schemeshard_build_index__progress.cpp:1122: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Execute: 102 Done TBuildInfo{ IndexBuildId: 102, Uid: , DomainPathId: [OwnerId: 72057594046678944, LocalPathId: 1], TablePathId: [OwnerId: 72057594046678944, LocalPathId: 2], IndexType: EIndexTypeGlobalAsync, IndexName: UserDefinedIndexByValue, IndexColumn: value, State: Done, IsCancellationRequested: 0, Issue: , SubscribersCount: 1, CreateSender: [1:387:2358], AlterMainTableTxId: 0, AlterMainTableTxStatus: StatusSuccess, AlterMainTableTxDone: 0, LockTxId: 281474976710757, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976710758, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 5000004, ApplyTxId: 281474976710759, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, UnlockTxId: 281474976710760, UnlockTxStatus: StatusAccepted, UnlockTxDone: 1, ToUploadShards: 0, DoneShards: 0, Processed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }, Billed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }} 2025-05-29T15:40:48.996418Z node 1 :BUILD_INDEX TRACE: schemeshard_build_index_tx_base.cpp:339: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TIndexBuildInfo SendNotifications: : id# 102, subscribers count# 1 2025-05-29T15:40:48.996465Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:227: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2025-05-29T15:40:48.996474Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:236: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [1:478:2438] TestWaitNotification: OK eventTxId 102 2025-05-29T15:40:48.996641Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/TTLEnabledTable" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-05-29T15:40:48.996721Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/TTLEnabledTable" took 102us result status StatusSuccess 2025-05-29T15:40:48.996932Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/TTLEnabledTable" PathDescription { Self { Name: "TTLEnabledTable" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 6 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 6 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 TableSchemaVersion: 3 TablePartitionVersion: 1 } ChildrenExist: true } Table { Name: "TTLEnabledTable" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Uint64" TypeId: 4 Id: 2 NotNull: false IsBuildInProgress: false } Columns { Name: "modified_at" Type: "Timestamp" TypeId: 50 Id: 3 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 TableIndexes { Name: "UserDefinedIndexByValue" LocalPathId: 3 Type: EIndexTypeGlobalAsync State: EIndexStateReady KeyColumnNames: "value" SchemaVersion: 2 PathOwnerId: 72057594046678944 DataSize: 0 IndexImplTableDescriptions { } } TableSchemaVersion: 3 TTLSettings { Enabled { ColumnName: "modified_at" ExpireAfterSeconds: 3600 Tiers { ApplyAfterSeconds: 3600 Delete { } } } } IsBackup: false IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 3 PathsLimit: 10000 ShardsInside: 2 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/cloud/py3test >> test_common.py::TestCommonYandexWithPath::test_private_queue_recreation[tables_format_v1-std] [FAIL] Test command err: sys:1: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/ciyv/001ecd/ydb/tests/functional/sqs/cloud/test-results/py3test/testing_out_stuff/chunk7/testing_out_stuff/test_common.py.TestCommonYandexWithPath.test_private_queue_recreation.tables_format_v1-fifo/cluster/node_1/stdout'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/ciyv/001ecd/ydb/tests/functional/sqs/cloud/test-results/py3test/testing_out_stuff/chunk7/testing_out_stuff/test_common.py.TestCommonYandexWithPath.test_private_queue_recreation.tables_format_v1-fifo/cluster/node_1/stderr'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedRandom name='/home/runner/.ya/build/build_root/ciyv/001ecd/ydb/tests/functional/sqs/cloud/test-results/py3test/testing_out_stuff/chunk7/testing_out_stuff/test_common.py.TestCommonYandexWithPath.test_private_queue_recreation.tables_format_v1-fifo/cluster/node_1/logfile_sd6l64_3.log'> ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/subprocess.py:1129: ResourceWarning: subprocess 661437 is still running ResourceWarning: Enable tracemalloc to get the object allocation traceback >> test_common.py::TestCommonYandexWithTenant::test_private_queue_recreation[tables_format_v0-fifo] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/cloud/py3test >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_dlq_message_counters_in_cloud[tables_format_v1-std] [FAIL] Test command err: sys:1: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/ciyv/001f07/ydb/tests/functional/sqs/cloud/test-results/py3test/testing_out_stuff/chunk22/testing_out_stuff/test_yandex_cloud_mode.py.TestSqsYandexCloudMode.test_dlq_message_counters_in_cloud.tables_format_v1-fifo/cluster/node_1/stdout'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/ciyv/001f07/ydb/tests/functional/sqs/cloud/test-results/py3test/testing_out_stuff/chunk22/testing_out_stuff/test_yandex_cloud_mode.py.TestSqsYandexCloudMode.test_dlq_message_counters_in_cloud.tables_format_v1-fifo/cluster/node_1/stderr'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedRandom name='/home/runner/.ya/build/build_root/ciyv/001f07/ydb/tests/functional/sqs/cloud/test-results/py3test/testing_out_stuff/chunk22/testing_out_stuff/test_yandex_cloud_mode.py.TestSqsYandexCloudMode.test_dlq_message_counters_in_cloud.tables_format_v1-fifo/cluster/node_1/logfile_o0ms80_7.log'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/ciyv/001f07/ydb/tests/functional/sqs/cloud/test-results/py3test/testing_out_stuff/chunk22/testing_out_stuff/test_yandex_cloud_mode.py.TestSqsYandexCloudMode.test_dlq_message_counters_in_cloud.tables_format_v1-fifo/cluster/slot_1/stdout'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/ciyv/001f07/ydb/tests/functional/sqs/cloud/test-results/py3test/testing_out_stuff/chunk22/testing_out_stuff/test_yandex_cloud_mode.py.TestSqsYandexCloudMode.test_dlq_message_counters_in_cloud.tables_format_v1-fifo/cluster/slot_1/stderr'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedRandom name='/home/runner/.ya/build/build_root/ciyv/001f07/ydb/tests/functional/sqs/cloud/test-results/py3test/testing_out_stuff/chunk22/testing_out_stuff/test_yandex_cloud_mode.py.TestSqsYandexCloudMode.test_dlq_message_counters_in_cloud.tables_format_v1-fifo/cluster/slot_1/logfile_fnrw6zrj.log'> ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/subprocess.py:1129: ResourceWarning: subprocess 653462 is still running ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/subprocess.py:1129: ResourceWarning: subprocess 655964 is still running ResourceWarning: Enable tracemalloc to get the object allocation traceback >> test_explicit_partitioning_0.py::TestS3::test_projection_integer_type_validation[v2-true-client12-year Uint64-False] [FAIL] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_dlq_mechanics_in_cloud[tables_format_v1-tables_format_v0-fifo] [FAIL] >> test_explicit_partitioning_0.py::TestS3::test_projection_integer_type_validation[v2-true-client13-year Date-False] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_dlq_mechanics_in_cloud[tables_format_v1-tables_format_v0-std] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_dlq_mechanics_in_cloud[tables_format_v1-tables_format_v1-std] [FAIL] >> test_format_setting.py::TestS3::test_timestamp_unix_time_insert[v1-timestamp/unix_time/test.parquet-parquet-UNIX_TIME_SECONDS] [FAIL] >> test_format_setting.py::TestS3::test_timestamp_unix_time_insert[v1-timestamp/unix_time/test.parquet-parquet-UNIX_TIME_MICROSECONDS] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/cloud/py3test >> test_common.py::TestCommonYandexWithTenant::test_private_create_queue[tables_format_v0-std] [FAIL] Test command err: sys:1: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/ciyv/001ee0/ydb/tests/functional/sqs/cloud/test-results/py3test/testing_out_stuff/chunk8/testing_out_stuff/test_common.py.TestCommonYandexWithTenant.test_private_create_queue.tables_format_v0-fifo/cluster/node_1/stdout'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/ciyv/001ee0/ydb/tests/functional/sqs/cloud/test-results/py3test/testing_out_stuff/chunk8/testing_out_stuff/test_common.py.TestCommonYandexWithTenant.test_private_create_queue.tables_format_v0-fifo/cluster/node_1/stderr'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedRandom name='/home/runner/.ya/build/build_root/ciyv/001ee0/ydb/tests/functional/sqs/cloud/test-results/py3test/testing_out_stuff/chunk8/testing_out_stuff/test_common.py.TestCommonYandexWithTenant.test_private_create_queue.tables_format_v0-fifo/cluster/node_1/logfile_mx4qz03r.log'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/ciyv/001ee0/ydb/tests/functional/sqs/cloud/test-results/py3test/testing_out_stuff/chunk8/testing_out_stuff/test_common.py.TestCommonYandexWithTenant.test_private_create_queue.tables_format_v0-fifo/cluster/slot_1/stdout'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/ciyv/001ee0/ydb/tests/functional/sqs/cloud/test-results/py3test/testing_out_stuff/chunk8/testing_out_stuff/test_common.py.TestCommonYandexWithTenant.test_private_create_queue.tables_format_v0-fifo/cluster/slot_1/stderr'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedRandom name='/home/runner/.ya/build/build_root/ciyv/001ee0/ydb/tests/functional/sqs/cloud/test-results/py3test/testing_out_stuff/chunk8/testing_out_stuff/test_common.py.TestCommonYandexWithTenant.test_private_create_queue.tables_format_v0-fifo/cluster/slot_1/logfile_zijxggjm.log'> ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/subprocess.py:1129: ResourceWarning: subprocess 660086 is still running ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/subprocess.py:1129: ResourceWarning: subprocess 662583 is still running ResourceWarning: Enable tracemalloc to get the object allocation traceback >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_cloud_double_create_queue[fifo-tables_format_v1] [FAIL] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_retryable_iam_error[tables_format_v0] [FAIL] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_retryable_iam_error[tables_format_v1] >> test_yandex_cloud_queue_counters.py::TestYmqQueueCounters::test_counters_when_reading_from_empty_queue [FAIL] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/cloud/py3test >> test_common.py::TestCommonYandexWithPath::test_private_create_queue[tables_format_v0-std] [FAIL] Test command err: sys:1: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/ciyv/001ee4/ydb/tests/functional/sqs/cloud/test-results/py3test/testing_out_stuff/chunk4/testing_out_stuff/test_common.py.TestCommonYandexWithPath.test_private_create_queue.tables_format_v0-fifo/cluster/node_1/stdout'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/ciyv/001ee4/ydb/tests/functional/sqs/cloud/test-results/py3test/testing_out_stuff/chunk4/testing_out_stuff/test_common.py.TestCommonYandexWithPath.test_private_create_queue.tables_format_v0-fifo/cluster/node_1/stderr'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedRandom name='/home/runner/.ya/build/build_root/ciyv/001ee4/ydb/tests/functional/sqs/cloud/test-results/py3test/testing_out_stuff/chunk4/testing_out_stuff/test_common.py.TestCommonYandexWithPath.test_private_create_queue.tables_format_v0-fifo/cluster/node_1/logfile_52jjm8ty.log'> ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/subprocess.py:1129: ResourceWarning: subprocess 658948 is still running ResourceWarning: Enable tracemalloc to get the object allocation traceback >> test_common.py::TestCommonSqsYandexCloudMode::test_private_queue_recreation[tables_format_v0-fifo] [FAIL] >> test_common.py::TestCommonSqsYandexCloudMode::test_private_queue_recreation[tables_format_v0-std] >> test_explicit_partitioning_1.py::TestS3::test_binding_projection_integer_type_validation[v1-client11-column_type11-False] [FAIL] >> test_explicit_partitioning_1.py::TestS3::test_binding_projection_integer_type_validation[v1-client12-column_type12-False] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/cloud/py3test >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_dlq_mechanics_in_cloud[tables_format_v1-tables_format_v1-std] [FAIL] Test command err: sys:1: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/ciyv/001efc/ydb/tests/functional/sqs/cloud/test-results/py3test/testing_out_stuff/chunk20/testing_out_stuff/test_yandex_cloud_mode.py.TestSqsYandexCloudMode.test_dlq_mechanics_in_cloud.tables_format_v1-tables_format_v1-fifo/cluster/node_1/stdout'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/ciyv/001efc/ydb/tests/functional/sqs/cloud/test-results/py3test/testing_out_stuff/chunk20/testing_out_stuff/test_yandex_cloud_mode.py.TestSqsYandexCloudMode.test_dlq_mechanics_in_cloud.tables_format_v1-tables_format_v1-fifo/cluster/node_1/stderr'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedRandom name='/home/runner/.ya/build/build_root/ciyv/001efc/ydb/tests/functional/sqs/cloud/test-results/py3test/testing_out_stuff/chunk20/testing_out_stuff/test_yandex_cloud_mode.py.TestSqsYandexCloudMode.test_dlq_mechanics_in_cloud.tables_format_v1-tables_format_v1-fifo/cluster/node_1/logfile_j_1h160d.log'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/ciyv/001efc/ydb/tests/functional/sqs/cloud/test-results/py3test/testing_out_stuff/chunk20/testing_out_stuff/test_yandex_cloud_mode.py.TestSqsYandexCloudMode.test_dlq_mechanics_in_cloud.tables_format_v1-tables_format_v1-fifo/cluster/slot_1/stdout'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/ciyv/001efc/ydb/tests/functional/sqs/cloud/test-results/py3test/testing_out_stuff/chunk20/testing_out_stuff/test_yandex_cloud_mode.py.TestSqsYandexCloudMode.test_dlq_mechanics_in_cloud.tables_format_v1-tables_format_v1-fifo/cluster/slot_1/stderr'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedRandom name='/home/runner/.ya/build/build_root/ciyv/001efc/ydb/tests/functional/sqs/cloud/test-results/py3test/testing_out_stuff/chunk20/testing_out_stuff/test_yandex_cloud_mode.py.TestSqsYandexCloudMode.test_dlq_mechanics_in_cloud.tables_format_v1-tables_format_v1-fifo/cluster/slot_1/logfile_o5zqd796.log'> ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/subprocess.py:1129: ResourceWarning: subprocess 656046 is still running ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/subprocess.py:1129: ResourceWarning: subprocess 660626 is still running ResourceWarning: Enable tracemalloc to get the object allocation traceback |82.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/statistics/service/ut/unittest >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_retryable_iam_error[tables_format_v1] [FAIL] >> test_format_setting.py::TestS3::test_timestamp_unix_time_insert[v1-timestamp/unix_time/test.parquet-parquet-UNIX_TIME_MICROSECONDS] [FAIL] >> test_format_setting.py::TestS3::test_timestamp_unix_time_insert[v1-timestamp/unix_time/test.parquet-parquet-UNIX_TIME_MILLISECONDS] >> test_explicit_partitioning_0.py::TestS3::test_projection_integer_type_validation[v2-true-client13-year Date-False] [FAIL] >> test_explicit_partitioning_0.py::TestS3::test_projection_enum_type_invalid_validation[v1-false-client0-year Int32 NOT NULL-False] >> test_auditlog.py::test_dml_begin_commit_logged [FAIL] |82.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test >> test_auditlog.py::test_broken_dynconfig[_client_session_pool_no_auth-_good_dynconfig] [FAIL] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_dlq_mechanics_in_cloud[tables_format_v1-tables_format_v0-std] [FAIL] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/cloud/py3test >> test_yandex_cloud_queue_counters.py::TestYmqQueueCounters::test_counters_when_reading_from_empty_queue [FAIL] Test command err: ydb/tests/library/clients/kikimr_monitoring.py:75: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/ciyv/001f13/ydb/tests/functional/sqs/cloud/test-results/py3test/testing_out_stuff/chunk35/testing_out_stuff/test_yandex_cloud_mode.py.TestSqsYandexCloudMode.test_ymq_expiring_counters/cluster/node_1/stdout'> (key, value) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/library/clients/kikimr_monitoring.py:75: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/ciyv/001f13/ydb/tests/functional/sqs/cloud/test-results/py3test/testing_out_stuff/chunk35/testing_out_stuff/test_yandex_cloud_mode.py.TestSqsYandexCloudMode.test_ymq_expiring_counters/cluster/node_1/stderr'> (key, value) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/library/clients/kikimr_monitoring.py:75: ResourceWarning: unclosed file <_io.BufferedRandom name='/home/runner/.ya/build/build_root/ciyv/001f13/ydb/tests/functional/sqs/cloud/test-results/py3test/testing_out_stuff/chunk35/testing_out_stuff/test_yandex_cloud_mode.py.TestSqsYandexCloudMode.test_ymq_expiring_counters/cluster/node_1/logfile_ngadelen.log'> (key, value) ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/subprocess.py:1129: ResourceWarning: subprocess 652601 is still running ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/library/clients/kikimr_monitoring.py:75: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/ciyv/001f13/ydb/tests/functional/sqs/cloud/test-results/py3test/testing_out_stuff/chunk35/testing_out_stuff/test_yandex_cloud_mode.py.TestSqsYandexCloudMode.test_ymq_expiring_counters/cluster/slot_1/stdout'> (key, value) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/library/clients/kikimr_monitoring.py:75: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/ciyv/001f13/ydb/tests/functional/sqs/cloud/test-results/py3test/testing_out_stuff/chunk35/testing_out_stuff/test_yandex_cloud_mode.py.TestSqsYandexCloudMode.test_ymq_expiring_counters/cluster/slot_1/stderr'> (key, value) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/library/clients/kikimr_monitoring.py:75: ResourceWarning: unclosed file <_io.BufferedRandom name='/home/runner/.ya/build/build_root/ciyv/001f13/ydb/tests/functional/sqs/cloud/test-results/py3test/testing_out_stuff/chunk35/testing_out_stuff/test_yandex_cloud_mode.py.TestSqsYandexCloudMode.test_ymq_expiring_counters/cluster/slot_1/logfile_hm3w__rx.log'> (key, value) ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/subprocess.py:1129: ResourceWarning: subprocess 655444 is still running ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/ciyv/001f13/ydb/tests/functional/sqs/cloud/test-results/py3test/testing_out_stuff/chunk35/testing_out_stuff/test_yandex_cloud_queue_counters.py.TestYmqQueueCounters.test_counters_when_reading_from_empty_queue/cluster/node_1/stdout'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/ciyv/001f13/ydb/tests/functional/sqs/cloud/test-results/py3test/testing_out_stuff/chunk35/testing_out_stuff/test_yandex_cloud_queue_counters.py.TestYmqQueueCounters.test_counters_when_reading_from_empty_queue/cluster/node_1/stderr'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedRandom name='/home/runner/.ya/build/build_root/ciyv/001f13/ydb/tests/functional/sqs/cloud/test-results/py3test/testing_out_stuff/chunk35/testing_out_stuff/test_yandex_cloud_queue_counters.py.TestYmqQueueCounters.test_counters_when_reading_from_empty_queue/cluster/node_1/logfile_dnby6w3n.log'> ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/subprocess.py:1129: ResourceWarning: subprocess 658949 is still running ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/ciyv/001f13/ydb/tests/functional/sqs/cloud/test-results/py3test/testing_out_stuff/chunk35/testing_out_stuff/test_yandex_cloud_queue_counters.py.TestYmqQueueCounters.test_counters_when_reading_from_empty_queue/cluster/slot_1/stdout'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/ciyv/001f13/ydb/tests/functional/sqs/cloud/test-results/py3test/testing_out_stuff/chunk35/testing_out_stuff/test_yandex_cloud_queue_counters.py.TestYmqQueueCounters.test_counters_when_reading_from_empty_queue/cluster/slot_1/stderr'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedRandom name='/home/runner/.ya/build/build_root/ciyv/001f13/ydb/tests/functional/sqs/cloud/test-results/py3test/testing_out_stuff/chunk35/testing_out_stuff/test_yandex_cloud_queue_counters.py.TestYmqQueueCounters.test_counters_when_reading_from_empty_queue/cluster/slot_1/logfile_e9dvvhh_.log'> ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/subprocess.py:1129: ResourceWarning: subprocess 662615 is still running ResourceWarning: Enable tracemalloc to get the object allocation traceback >> test_common.py::TestCommonSqsYandexCloudMode::test_private_create_queue[tables_format_v1-fifo] [FAIL] >> test_common.py::TestCommonSqsYandexCloudMode::test_private_create_queue[tables_format_v1-std] >> test_explicit_partitioning_0.py::TestS3::test_projection_enum_type_invalid_validation[v1-false-client0-year Int32 NOT NULL-False] [FAIL] >> test_explicit_partitioning_0.py::TestS3::test_projection_enum_type_invalid_validation[v1-false-client1-year Uint32 NOT NULL-False] >> test_common.py::TestCommonSqsYandexCloudMode::test_private_queue_recreation[tables_format_v0-std] [FAIL] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/cloud/py3test >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_retryable_iam_error[tables_format_v1] [FAIL] Test command err: sys:1: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/ciyv/001eec/ydb/tests/functional/sqs/cloud/test-results/py3test/testing_out_stuff/chunk33/testing_out_stuff/test_yandex_cloud_mode.py.TestSqsYandexCloudMode.test_retryable_iam_error.tables_format_v0/cluster/node_1/stdout'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/ciyv/001eec/ydb/tests/functional/sqs/cloud/test-results/py3test/testing_out_stuff/chunk33/testing_out_stuff/test_yandex_cloud_mode.py.TestSqsYandexCloudMode.test_retryable_iam_error.tables_format_v0/cluster/node_1/stderr'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedRandom name='/home/runner/.ya/build/build_root/ciyv/001eec/ydb/tests/functional/sqs/cloud/test-results/py3test/testing_out_stuff/chunk33/testing_out_stuff/test_yandex_cloud_mode.py.TestSqsYandexCloudMode.test_retryable_iam_error.tables_format_v0/cluster/node_1/logfile_6e4j79l4.log'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/ciyv/001eec/ydb/tests/functional/sqs/cloud/test-results/py3test/testing_out_stuff/chunk33/testing_out_stuff/test_yandex_cloud_mode.py.TestSqsYandexCloudMode.test_retryable_iam_error.tables_format_v0/cluster/slot_1/stdout'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/ciyv/001eec/ydb/tests/functional/sqs/cloud/test-results/py3test/testing_out_stuff/chunk33/testing_out_stuff/test_yandex_cloud_mode.py.TestSqsYandexCloudMode.test_retryable_iam_error.tables_format_v0/cluster/slot_1/stderr'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedRandom name='/home/runner/.ya/build/build_root/ciyv/001eec/ydb/tests/functional/sqs/cloud/test-results/py3test/testing_out_stuff/chunk33/testing_out_stuff/test_yandex_cloud_mode.py.TestSqsYandexCloudMode.test_retryable_iam_error.tables_format_v0/cluster/slot_1/logfile_u5s_gny5.log'> ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/subprocess.py:1129: ResourceWarning: subprocess 657693 is still running ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/subprocess.py:1129: ResourceWarning: subprocess 663414 is still running ResourceWarning: Enable tracemalloc to get the object allocation traceback >> test_auditlog.py::test_broken_dynconfig[_client_session_pool_bad_auth-_bad_dynconfig] [FAIL] >> test_explicit_partitioning_1.py::TestS3::test_binding_projection_integer_type_validation[v1-client12-column_type12-False] [FAIL] >> test_format_setting.py::TestS3::test_timestamp_unix_time_insert[v1-timestamp/unix_time/test.parquet-parquet-UNIX_TIME_MILLISECONDS] [FAIL] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/cloud/py3test >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_cloud_double_create_queue[fifo-tables_format_v1] [FAIL] Test command err: sys:1: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/ciyv/001f01/ydb/tests/functional/sqs/cloud/test-results/py3test/testing_out_stuff/chunk12/testing_out_stuff/test_yandex_cloud_mode.py.TestSqsYandexCloudMode.test_cloud_double_create_queue.fifo-tables_format_v0/cluster/node_1/stdout'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/ciyv/001f01/ydb/tests/functional/sqs/cloud/test-results/py3test/testing_out_stuff/chunk12/testing_out_stuff/test_yandex_cloud_mode.py.TestSqsYandexCloudMode.test_cloud_double_create_queue.fifo-tables_format_v0/cluster/node_1/stderr'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedRandom name='/home/runner/.ya/build/build_root/ciyv/001f01/ydb/tests/functional/sqs/cloud/test-results/py3test/testing_out_stuff/chunk12/testing_out_stuff/test_yandex_cloud_mode.py.TestSqsYandexCloudMode.test_cloud_double_create_queue.fifo-tables_format_v0/cluster/node_1/logfile_9yxmcd5o.log'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/ciyv/001f01/ydb/tests/functional/sqs/cloud/test-results/py3test/testing_out_stuff/chunk12/testing_out_stuff/test_yandex_cloud_mode.py.TestSqsYandexCloudMode.test_cloud_double_create_queue.fifo-tables_format_v0/cluster/slot_1/stdout'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/ciyv/001f01/ydb/tests/functional/sqs/cloud/test-results/py3test/testing_out_stuff/chunk12/testing_out_stuff/test_yandex_cloud_mode.py.TestSqsYandexCloudMode.test_cloud_double_create_queue.fifo-tables_format_v0/cluster/slot_1/stderr'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedRandom name='/home/runner/.ya/build/build_root/ciyv/001f01/ydb/tests/functional/sqs/cloud/test-results/py3test/testing_out_stuff/chunk12/testing_out_stuff/test_yandex_cloud_mode.py.TestSqsYandexCloudMode.test_cloud_double_create_queue.fifo-tables_format_v0/cluster/slot_1/logfile_elekzpys.log'> ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/subprocess.py:1129: ResourceWarning: subprocess 655794 is still running ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/subprocess.py:1129: ResourceWarning: subprocess 660658 is still running ResourceWarning: Enable tracemalloc to get the object allocation traceback ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/cloud/py3test >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_dlq_mechanics_in_cloud[tables_format_v1-tables_format_v0-std] [FAIL] Test command err: sys:1: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/ciyv/001edd/ydb/tests/functional/sqs/cloud/test-results/py3test/testing_out_stuff/chunk19/testing_out_stuff/test_yandex_cloud_mode.py.TestSqsYandexCloudMode.test_dlq_mechanics_in_cloud.tables_format_v1-tables_format_v0-fifo/cluster/node_1/stdout'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/ciyv/001edd/ydb/tests/functional/sqs/cloud/test-results/py3test/testing_out_stuff/chunk19/testing_out_stuff/test_yandex_cloud_mode.py.TestSqsYandexCloudMode.test_dlq_mechanics_in_cloud.tables_format_v1-tables_format_v0-fifo/cluster/node_1/stderr'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedRandom name='/home/runner/.ya/build/build_root/ciyv/001edd/ydb/tests/functional/sqs/cloud/test-results/py3test/testing_out_stuff/chunk19/testing_out_stuff/test_yandex_cloud_mode.py.TestSqsYandexCloudMode.test_dlq_mechanics_in_cloud.tables_format_v1-tables_format_v0-fifo/cluster/node_1/logfile_cjpcrn1d.log'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/ciyv/001edd/ydb/tests/functional/sqs/cloud/test-results/py3test/testing_out_stuff/chunk19/testing_out_stuff/test_yandex_cloud_mode.py.TestSqsYandexCloudMode.test_dlq_mechanics_in_cloud.tables_format_v1-tables_format_v0-fifo/cluster/slot_1/stdout'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/ciyv/001edd/ydb/tests/functional/sqs/cloud/test-results/py3test/testing_out_stuff/chunk19/testing_out_stuff/test_yandex_cloud_mode.py.TestSqsYandexCloudMode.test_dlq_mechanics_in_cloud.tables_format_v1-tables_format_v0-fifo/cluster/slot_1/stderr'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedRandom name='/home/runner/.ya/build/build_root/ciyv/001edd/ydb/tests/functional/sqs/cloud/test-results/py3test/testing_out_stuff/chunk19/testing_out_stuff/test_yandex_cloud_mode.py.TestSqsYandexCloudMode.test_dlq_mechanics_in_cloud.tables_format_v1-tables_format_v0-fifo/cluster/slot_1/logfile_nu7ezni2.log'> ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/subprocess.py:1129: ResourceWarning: subprocess 659502 is still running ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/subprocess.py:1129: ResourceWarning: subprocess 664137 is still running ResourceWarning: Enable tracemalloc to get the object allocation traceback >> test_format_setting.py::TestS3::test_timestamp_unix_time_insert[v2-timestamp/unix_time/test.csv-csv_with_names-UNIX_TIME_SECONDS] >> test_explicit_partitioning_1.py::TestS3::test_binding_projection_integer_type_validation[v1-client13-column_type13-False] >> test_common.py::TestCommonSqsYandexCloudMode::test_private_create_queue[tables_format_v1-std] [FAIL] >> test_explicit_partitioning_0.py::TestS3::test_projection_enum_type_invalid_validation[v1-false-client1-year Uint32 NOT NULL-False] [FAIL] >> test_explicit_partitioning_0.py::TestS3::test_projection_enum_type_invalid_validation[v1-false-client2-year Uint64 NOT NULL-False] |82.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test >> test_auditlog.py::test_dml_requests_logged_when_unauthorized [FAIL] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_cloud_double_create_queue[std-tables_format_v0] [FAIL] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_cloud_double_create_queue[std-tables_format_v1] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_cloud_double_create_queue[std-tables_format_v1] [FAIL] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/cloud/py3test >> test_common.py::TestCommonSqsYandexCloudMode::test_private_queue_recreation[tables_format_v0-std] [FAIL] Test command err: sys:1: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/ciyv/001ee8/ydb/tests/functional/sqs/cloud/test-results/py3test/testing_out_stuff/chunk2/testing_out_stuff/test_common.py.TestCommonSqsYandexCloudMode.test_private_queue_recreation.tables_format_v0-fifo/cluster/node_1/stdout'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/ciyv/001ee8/ydb/tests/functional/sqs/cloud/test-results/py3test/testing_out_stuff/chunk2/testing_out_stuff/test_common.py.TestCommonSqsYandexCloudMode.test_private_queue_recreation.tables_format_v0-fifo/cluster/node_1/stderr'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedRandom name='/home/runner/.ya/build/build_root/ciyv/001ee8/ydb/tests/functional/sqs/cloud/test-results/py3test/testing_out_stuff/chunk2/testing_out_stuff/test_common.py.TestCommonSqsYandexCloudMode.test_private_queue_recreation.tables_format_v0-fifo/cluster/node_1/logfile_qfscae_s.log'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/ciyv/001ee8/ydb/tests/functional/sqs/cloud/test-results/py3test/testing_out_stuff/chunk2/testing_out_stuff/test_common.py.TestCommonSqsYandexCloudMode.test_private_queue_recreation.tables_format_v0-fifo/cluster/slot_1/stdout'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/ciyv/001ee8/ydb/tests/functional/sqs/cloud/test-results/py3test/testing_out_stuff/chunk2/testing_out_stuff/test_common.py.TestCommonSqsYandexCloudMode.test_private_queue_recreation.tables_format_v0-fifo/cluster/slot_1/stderr'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedRandom name='/home/runner/.ya/build/build_root/ciyv/001ee8/ydb/tests/functional/sqs/cloud/test-results/py3test/testing_out_stuff/chunk2/testing_out_stuff/test_common.py.TestCommonSqsYandexCloudMode.test_private_queue_recreation.tables_format_v0-fifo/cluster/slot_1/logfile_uri22kgv.log'> ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/subprocess.py:1129: ResourceWarning: subprocess 658606 is still running ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/subprocess.py:1129: ResourceWarning: subprocess 660618 is still running ResourceWarning: Enable tracemalloc to get the object allocation traceback >> test_format_setting.py::TestS3::test_timestamp_unix_time_insert[v2-timestamp/unix_time/test.csv-csv_with_names-UNIX_TIME_SECONDS] [FAIL] >> test_format_setting.py::TestS3::test_timestamp_unix_time_insert[v2-timestamp/unix_time/test.csv-csv_with_names-UNIX_TIME_MICROSECONDS] >> InMemoryControlPlaneStorage::ExecuteSimpleStreamQuery >> TYdbControlPlaneStorageListBindings::ShouldSuccess >> test_common.py::TestCommonYandexWithTenant::test_private_queue_recreation[tables_format_v0-fifo] [FAIL] >> test_common.py::TestCommonYandexWithTenant::test_private_queue_recreation[tables_format_v0-std] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_dlq_setup_in_cloud[tables_format_v0-fifo] [FAIL] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_dlq_setup_in_cloud[tables_format_v0-std] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/cloud/py3test >> test_common.py::TestCommonSqsYandexCloudMode::test_private_create_queue[tables_format_v1-std] [FAIL] Test command err: sys:1: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/ciyv/001ef6/ydb/tests/functional/sqs/cloud/test-results/py3test/testing_out_stuff/chunk1/testing_out_stuff/test_common.py.TestCommonSqsYandexCloudMode.test_private_create_queue.tables_format_v1-fifo/cluster/node_1/stdout'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/ciyv/001ef6/ydb/tests/functional/sqs/cloud/test-results/py3test/testing_out_stuff/chunk1/testing_out_stuff/test_common.py.TestCommonSqsYandexCloudMode.test_private_create_queue.tables_format_v1-fifo/cluster/node_1/stderr'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedRandom name='/home/runner/.ya/build/build_root/ciyv/001ef6/ydb/tests/functional/sqs/cloud/test-results/py3test/testing_out_stuff/chunk1/testing_out_stuff/test_common.py.TestCommonSqsYandexCloudMode.test_private_create_queue.tables_format_v1-fifo/cluster/node_1/logfile_czz_hqde.log'> ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/subprocess.py:1129: ResourceWarning: subprocess 657077 is still running ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/ciyv/001ef6/ydb/tests/functional/sqs/cloud/test-results/py3test/testing_out_stuff/chunk1/testing_out_stuff/test_common.py.TestCommonSqsYandexCloudMode.test_private_create_queue.tables_format_v1-fifo/cluster/slot_1/stdout'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/ciyv/001ef6/ydb/tests/functional/sqs/cloud/test-results/py3test/testing_out_stuff/chunk1/testing_out_stuff/test_common.py.TestCommonSqsYandexCloudMode.test_private_create_queue.tables_format_v1-fifo/cluster/slot_1/stderr'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedRandom name='/home/runner/.ya/build/build_root/ciyv/001ef6/ydb/tests/functional/sqs/cloud/test-results/py3test/testing_out_stuff/chunk1/testing_out_stuff/test_common.py.TestCommonSqsYandexCloudMode.test_private_create_queue.tables_format_v1-fifo/cluster/slot_1/logfile_6bco33o_.log'> ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/subprocess.py:1129: ResourceWarning: subprocess 660400 is still running ResourceWarning: Enable tracemalloc to get the object allocation traceback >> TYdbControlPlaneStorageModifyConnection::ShouldSuccess ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/cloud/py3test >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_cloud_double_create_queue[std-tables_format_v1] [FAIL] Test command err: sys:1: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/ciyv/001ed0/ydb/tests/functional/sqs/cloud/test-results/py3test/testing_out_stuff/chunk13/testing_out_stuff/test_yandex_cloud_mode.py.TestSqsYandexCloudMode.test_cloud_double_create_queue.std-tables_format_v0/cluster/node_1/stdout'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/ciyv/001ed0/ydb/tests/functional/sqs/cloud/test-results/py3test/testing_out_stuff/chunk13/testing_out_stuff/test_yandex_cloud_mode.py.TestSqsYandexCloudMode.test_cloud_double_create_queue.std-tables_format_v0/cluster/node_1/stderr'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedRandom name='/home/runner/.ya/build/build_root/ciyv/001ed0/ydb/tests/functional/sqs/cloud/test-results/py3test/testing_out_stuff/chunk13/testing_out_stuff/test_yandex_cloud_mode.py.TestSqsYandexCloudMode.test_cloud_double_create_queue.std-tables_format_v0/cluster/node_1/logfile_4__i5ryp.log'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/ciyv/001ed0/ydb/tests/functional/sqs/cloud/test-results/py3test/testing_out_stuff/chunk13/testing_out_stuff/test_yandex_cloud_mode.py.TestSqsYandexCloudMode.test_cloud_double_create_queue.std-tables_format_v0/cluster/slot_1/stdout'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/ciyv/001ed0/ydb/tests/functional/sqs/cloud/test-results/py3test/testing_out_stuff/chunk13/testing_out_stuff/test_yandex_cloud_mode.py.TestSqsYandexCloudMode.test_cloud_double_create_queue.std-tables_format_v0/cluster/slot_1/stderr'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedRandom name='/home/runner/.ya/build/build_root/ciyv/001ed0/ydb/tests/functional/sqs/cloud/test-results/py3test/testing_out_stuff/chunk13/testing_out_stuff/test_yandex_cloud_mode.py.TestSqsYandexCloudMode.test_cloud_double_create_queue.std-tables_format_v0/cluster/slot_1/logfile_blwu75qz.log'> ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/subprocess.py:1129: ResourceWarning: subprocess 662077 is still running ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/subprocess.py:1129: ResourceWarning: subprocess 666681 is still running ResourceWarning: Enable tracemalloc to get the object allocation traceback >> test_common.py::TestCommonYandexWithTenant::test_private_queue_recreation[tables_format_v0-std] [FAIL] >> test_explicit_partitioning_0.py::TestS3::test_projection_enum_type_invalid_validation[v1-false-client2-year Uint64 NOT NULL-False] [FAIL] >> test_explicit_partitioning_0.py::TestS3::test_projection_enum_type_invalid_validation[v1-false-client3-year Date NOT NULL-False] >> test_yandex_cloud_queue_counters.py::TestYmqQueueCounters::test_sqs_action_counters [FAIL] >> test_split_merge.py::TestSplitMerge::test_merge_split[table_index_1_UNIQUE_SYNC-pk_types3-all_types3-index3--UNIQUE-SYNC] >> test_split_merge.py::TestSplitMerge::test_merge_split[table_index_0__ASYNC-pk_types11-all_types11-index11---ASYNC] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_dlq_setup_in_cloud[tables_format_v0-std] [FAIL] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/cloud/py3test >> test_common.py::TestCommonYandexWithTenant::test_private_queue_recreation[tables_format_v0-std] [FAIL] Test command err: sys:1: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/ciyv/001ec3/ydb/tests/functional/sqs/cloud/test-results/py3test/testing_out_stuff/chunk10/testing_out_stuff/test_common.py.TestCommonYandexWithTenant.test_private_queue_recreation.tables_format_v0-fifo/cluster/node_1/stdout'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/ciyv/001ec3/ydb/tests/functional/sqs/cloud/test-results/py3test/testing_out_stuff/chunk10/testing_out_stuff/test_common.py.TestCommonYandexWithTenant.test_private_queue_recreation.tables_format_v0-fifo/cluster/node_1/stderr'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedRandom name='/home/runner/.ya/build/build_root/ciyv/001ec3/ydb/tests/functional/sqs/cloud/test-results/py3test/testing_out_stuff/chunk10/testing_out_stuff/test_common.py.TestCommonYandexWithTenant.test_private_queue_recreation.tables_format_v0-fifo/cluster/node_1/logfile_eyb1o51t.log'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/ciyv/001ec3/ydb/tests/functional/sqs/cloud/test-results/py3test/testing_out_stuff/chunk10/testing_out_stuff/test_common.py.TestCommonYandexWithTenant.test_private_queue_recreation.tables_format_v0-fifo/cluster/slot_1/stdout'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/ciyv/001ec3/ydb/tests/functional/sqs/cloud/test-results/py3test/testing_out_stuff/chunk10/testing_out_stuff/test_common.py.TestCommonYandexWithTenant.test_private_queue_recreation.tables_format_v0-fifo/cluster/slot_1/stderr'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedRandom name='/home/runner/.ya/build/build_root/ciyv/001ec3/ydb/tests/functional/sqs/cloud/test-results/py3test/testing_out_stuff/chunk10/testing_out_stuff/test_common.py.TestCommonYandexWithTenant.test_private_queue_recreation.tables_format_v0-fifo/cluster/slot_1/logfile_p3s760mr.log'> ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/subprocess.py:1129: ResourceWarning: subprocess 666673 is still running ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/subprocess.py:1129: ResourceWarning: subprocess 669829 is still running ResourceWarning: Enable tracemalloc to get the object allocation traceback >> test_split_merge.py::TestSplitMerge::test_merge_split[table_Int32-pk_types20-all_types20-index20---] >> TYdbControlPlaneStorageCreateBinding::ShouldSucceed >> TYdbControlPlaneStorageListConnections::ShouldSuccess >> InMemoryControlPlaneStorage::ExecuteSimpleStreamQuery [GOOD] >> test_split_merge.py::TestSplitMerge::test_merge_split[table_Date-pk_types32-all_types32-index32---] >> test_explicit_partitioning_1.py::TestS3::test_binding_projection_integer_type_validation[v1-client13-column_type13-False] [FAIL] >> test_explicit_partitioning_1.py::TestS3::test_binding_projection_integer_type_validation[v2-client0-column_type0-True] >> InMemoryControlPlaneStorage::ExecuteSimpleAnalyticsQuery >> test_split_merge.py::TestSplitMerge::test_merge_split[table_Utf8-pk_types30-all_types30-index30---] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/cloud/py3test >> test_yandex_cloud_queue_counters.py::TestYmqQueueCounters::test_sqs_action_counters [FAIL] Test command err: sys:1: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/ciyv/001ed4/ydb/tests/functional/sqs/cloud/test-results/py3test/testing_out_stuff/chunk38/testing_out_stuff/test_yandex_cloud_queue_counters.py.TestYmqQueueCounters.test_sqs_action_counters/cluster/node_1/stdout'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/ciyv/001ed4/ydb/tests/functional/sqs/cloud/test-results/py3test/testing_out_stuff/chunk38/testing_out_stuff/test_yandex_cloud_queue_counters.py.TestYmqQueueCounters.test_sqs_action_counters/cluster/node_1/stderr'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedRandom name='/home/runner/.ya/build/build_root/ciyv/001ed4/ydb/tests/functional/sqs/cloud/test-results/py3test/testing_out_stuff/chunk38/testing_out_stuff/test_yandex_cloud_queue_counters.py.TestYmqQueueCounters.test_sqs_action_counters/cluster/node_1/logfile_fafp9abd.log'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/ciyv/001ed4/ydb/tests/functional/sqs/cloud/test-results/py3test/testing_out_stuff/chunk38/testing_out_stuff/test_yandex_cloud_queue_counters.py.TestYmqQueueCounters.test_sqs_action_counters/cluster/slot_1/stdout'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/ciyv/001ed4/ydb/tests/functional/sqs/cloud/test-results/py3test/testing_out_stuff/chunk38/testing_out_stuff/test_yandex_cloud_queue_counters.py.TestYmqQueueCounters.test_sqs_action_counters/cluster/slot_1/stderr'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedRandom name='/home/runner/.ya/build/build_root/ciyv/001ed4/ydb/tests/functional/sqs/cloud/test-results/py3test/testing_out_stuff/chunk38/testing_out_stuff/test_yandex_cloud_queue_counters.py.TestYmqQueueCounters.test_sqs_action_counters/cluster/slot_1/logfile_8sirn_it.log'> ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/subprocess.py:1129: ResourceWarning: subprocess 663594 is still running ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/subprocess.py:1129: ResourceWarning: subprocess 669443 is still running ResourceWarning: Enable tracemalloc to get the object allocation traceback >> TYdbControlPlaneStorageCreateConnection::ShouldSucccess |82.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/datashard/split_merge/py3test |82.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test >> test_auditlog.py::test_broken_dynconfig[_client_session_pool_bad_auth-_bad_dynconfig] [FAIL] >> TYdbControlPlaneStorageDescribeBindingPermissions::ShouldApplyPermissionViewPublic >> test_format_setting.py::TestS3::test_timestamp_unix_time_insert[v2-timestamp/unix_time/test.csv-csv_with_names-UNIX_TIME_MICROSECONDS] [FAIL] >> test_format_setting.py::TestS3::test_timestamp_unix_time_insert[v2-timestamp/unix_time/test.csv-csv_with_names-UNIX_TIME_MILLISECONDS] >> TYdbControlPlaneStorageModifyBinding::ShouldSuccess |82.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test >> test_auditlog.py::test_dml_begin_commit_logged [FAIL] >> test_yandex_cloud_queue_counters.py::TestYmqQueueCounters::test_purge_queue_counters [FAIL] >> test_split_merge.py::TestSplitMerge::test_merge_split[table_Timestamp64-pk_types38-all_types38-index38---] >> TYdbControlPlaneStorageModifyBinding::ShouldValidate >> TYdbControlPlaneStoragePipeline::ShouldSkipBindingIfDisabledConnection >> TYdbControlPlaneStorageDeleteQuery::ShouldSuccess >> test_split_merge.py::TestSplitMerge::test_merge_split[table_Uint32-pk_types23-all_types23-index23---] >> test_split_merge.py::TestSplitMerge::test_merge_split[table_ttl_Uint32-pk_types14-all_types14-index14-Uint32--] >> TYdbControlPlaneStorageCreateQuery::ShouldSucccess >> TYdbControlPlaneStorageDescribeQuery::ShouldSuccess >> test_format_setting.py::TestS3::test_timestamp_unix_time_insert[v2-timestamp/unix_time/test.csv-csv_with_names-UNIX_TIME_MILLISECONDS] [FAIL] >> test_format_setting.py::TestS3::test_timestamp_unix_time_insert[v2-timestamp/unix_time/test.tsv-tsv_with_names-UNIX_TIME_SECONDS] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_empty_auth_header [FAIL] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_fifo_groups_with_dlq_in_cloud[tables_format_v0] >> TYdbControlPlaneStorageModifyQuery::ShouldSuccess ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/cloud/py3test >> test_yandex_cloud_queue_counters.py::TestYmqQueueCounters::test_purge_queue_counters [FAIL] Test command err: sys:1: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/ciyv/001ea2/ydb/tests/functional/sqs/cloud/test-results/py3test/testing_out_stuff/chunk37/testing_out_stuff/test_yandex_cloud_queue_counters.py.TestYmqQueueCounters.test_purge_queue_counters/cluster/node_1/stdout'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/ciyv/001ea2/ydb/tests/functional/sqs/cloud/test-results/py3test/testing_out_stuff/chunk37/testing_out_stuff/test_yandex_cloud_queue_counters.py.TestYmqQueueCounters.test_purge_queue_counters/cluster/node_1/stderr'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedRandom name='/home/runner/.ya/build/build_root/ciyv/001ea2/ydb/tests/functional/sqs/cloud/test-results/py3test/testing_out_stuff/chunk37/testing_out_stuff/test_yandex_cloud_queue_counters.py.TestYmqQueueCounters.test_purge_queue_counters/cluster/node_1/logfile_lksm42xf.log'> ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/subprocess.py:1129: ResourceWarning: subprocess 663914 is still running ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/ciyv/001ea2/ydb/tests/functional/sqs/cloud/test-results/py3test/testing_out_stuff/chunk37/testing_out_stuff/test_yandex_cloud_queue_counters.py.TestYmqQueueCounters.test_purge_queue_counters/cluster/slot_1/stdout'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/ciyv/001ea2/ydb/tests/functional/sqs/cloud/test-results/py3test/testing_out_stuff/chunk37/testing_out_stuff/test_yandex_cloud_queue_counters.py.TestYmqQueueCounters.test_purge_queue_counters/cluster/slot_1/stderr'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedRandom name='/home/runner/.ya/build/build_root/ciyv/001ea2/ydb/tests/functional/sqs/cloud/test-results/py3test/testing_out_stuff/chunk37/testing_out_stuff/test_yandex_cloud_queue_counters.py.TestYmqQueueCounters.test_purge_queue_counters/cluster/slot_1/logfile_k8lzeclx.log'> ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/subprocess.py:1129: ResourceWarning: subprocess 667409 is still running ResourceWarning: Enable tracemalloc to get the object allocation traceback >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_fifo_groups_with_dlq_in_cloud[tables_format_v0] [FAIL] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/cloud/py3test >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_dlq_setup_in_cloud[tables_format_v0-std] [FAIL] Test command err: sys:1: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/ciyv/001ed9/ydb/tests/functional/sqs/cloud/test-results/py3test/testing_out_stuff/chunk23/testing_out_stuff/test_yandex_cloud_mode.py.TestSqsYandexCloudMode.test_dlq_setup_in_cloud.tables_format_v0-fifo/cluster/node_1/stdout'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/ciyv/001ed9/ydb/tests/functional/sqs/cloud/test-results/py3test/testing_out_stuff/chunk23/testing_out_stuff/test_yandex_cloud_mode.py.TestSqsYandexCloudMode.test_dlq_setup_in_cloud.tables_format_v0-fifo/cluster/node_1/stderr'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedRandom name='/home/runner/.ya/build/build_root/ciyv/001ed9/ydb/tests/functional/sqs/cloud/test-results/py3test/testing_out_stuff/chunk23/testing_out_stuff/test_yandex_cloud_mode.py.TestSqsYandexCloudMode.test_dlq_setup_in_cloud.tables_format_v0-fifo/cluster/node_1/logfile_rnx26beb.log'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/ciyv/001ed9/ydb/tests/functional/sqs/cloud/test-results/py3test/testing_out_stuff/chunk23/testing_out_stuff/test_yandex_cloud_mode.py.TestSqsYandexCloudMode.test_dlq_setup_in_cloud.tables_format_v0-fifo/cluster/slot_1/stdout'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/ciyv/001ed9/ydb/tests/functional/sqs/cloud/test-results/py3test/testing_out_stuff/chunk23/testing_out_stuff/test_yandex_cloud_mode.py.TestSqsYandexCloudMode.test_dlq_setup_in_cloud.tables_format_v0-fifo/cluster/slot_1/stderr'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedRandom name='/home/runner/.ya/build/build_root/ciyv/001ed9/ydb/tests/functional/sqs/cloud/test-results/py3test/testing_out_stuff/chunk23/testing_out_stuff/test_yandex_cloud_mode.py.TestSqsYandexCloudMode.test_dlq_setup_in_cloud.tables_format_v0-fifo/cluster/slot_1/logfile_oik7vai9.log'> ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/subprocess.py:1129: ResourceWarning: subprocess 661810 is still running ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/subprocess.py:1129: ResourceWarning: subprocess 665078 is still running ResourceWarning: Enable tracemalloc to get the object allocation traceback 2025-05-29 15:41:10,806 WARNING libarchive: File (test_yandex_cloud_mode.py.TestSqsYandexCloudMode.test_dlq_setup_in_cloud.tables_format_v0-fifo/cluster/slot_1/logfile_oik7vai9.log) size has changed. Can't write more data than was declared in the tar header (219642). (probably file was changed during archiving) >> TYdbControlPlaneStoragePipeline::ShouldCheckSimplePipeline >> test_split_merge.py::TestSplitMerge::test_merge_split[table_index_3_UNIQUE_SYNC-pk_types1-all_types1-index1--UNIQUE-SYNC] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_queues_count_over_limit[tables_format_v0] [FAIL] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_queues_count_over_limit[tables_format_v1] >> TYdbControlPlaneStorageDeleteBindingPermissions::ShouldApplyPermissionViewPublic >> test_explicit_partitioning_0.py::TestS3::test_projection_enum_type_invalid_validation[v1-false-client3-year Date NOT NULL-False] [FAIL] >> test_explicit_partitioning_0.py::TestS3::test_projection_enum_type_invalid_validation[v1-false-client4-year Utf8 NOT NULL-False] >> test_common.py::TestCommonSqsYandexCloudMode::test_private_create_queue[tables_format_v0-fifo] [FAIL] >> test_common.py::TestCommonSqsYandexCloudMode::test_private_create_queue[tables_format_v0-std] >> test_format_setting.py::TestS3::test_timestamp_unix_time_insert[v2-timestamp/unix_time/test.tsv-tsv_with_names-UNIX_TIME_SECONDS] [FAIL] >> test_format_setting.py::TestS3::test_timestamp_unix_time_insert[v2-timestamp/unix_time/test.tsv-tsv_with_names-UNIX_TIME_MICROSECONDS] >> test_explicit_partitioning_1.py::TestS3::test_binding_projection_integer_type_validation[v2-client0-column_type0-True] [FAIL] >> test_explicit_partitioning_1.py::TestS3::test_binding_projection_integer_type_validation[v2-client1-column_type1-True] >> test_split_merge.py::TestSplitMerge::test_merge_split[table_index_0__SYNC-pk_types9-all_types9-index9---SYNC] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/cloud/py3test >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_fifo_groups_with_dlq_in_cloud[tables_format_v0] [FAIL] Test command err: sys:1: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/ciyv/001ec9/ydb/tests/functional/sqs/cloud/test-results/py3test/testing_out_stuff/chunk26/testing_out_stuff/test_yandex_cloud_mode.py.TestSqsYandexCloudMode.test_empty_auth_header/cluster/node_1/stdout'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/ciyv/001ec9/ydb/tests/functional/sqs/cloud/test-results/py3test/testing_out_stuff/chunk26/testing_out_stuff/test_yandex_cloud_mode.py.TestSqsYandexCloudMode.test_empty_auth_header/cluster/node_1/stderr'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedRandom name='/home/runner/.ya/build/build_root/ciyv/001ec9/ydb/tests/functional/sqs/cloud/test-results/py3test/testing_out_stuff/chunk26/testing_out_stuff/test_yandex_cloud_mode.py.TestSqsYandexCloudMode.test_empty_auth_header/cluster/node_1/logfile_yo57w6ye.log'> ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/subprocess.py:1129: ResourceWarning: subprocess 664846 is still running ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/ciyv/001ec9/ydb/tests/functional/sqs/cloud/test-results/py3test/testing_out_stuff/chunk26/testing_out_stuff/test_yandex_cloud_mode.py.TestSqsYandexCloudMode.test_empty_auth_header/cluster/slot_1/stdout'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/ciyv/001ec9/ydb/tests/functional/sqs/cloud/test-results/py3test/testing_out_stuff/chunk26/testing_out_stuff/test_yandex_cloud_mode.py.TestSqsYandexCloudMode.test_empty_auth_header/cluster/slot_1/stderr'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedRandom name='/home/runner/.ya/build/build_root/ciyv/001ec9/ydb/tests/functional/sqs/cloud/test-results/py3test/testing_out_stuff/chunk26/testing_out_stuff/test_yandex_cloud_mode.py.TestSqsYandexCloudMode.test_empty_auth_header/cluster/slot_1/logfile_gtw692u2.log'> ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/subprocess.py:1129: ResourceWarning: subprocess 667320 is still running ResourceWarning: Enable tracemalloc to get the object allocation traceback >> test_split_merge.py::TestSplitMerge::test_merge_split[table_Int64-pk_types19-all_types19-index19---] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_queues_count_over_limit[tables_format_v1] [FAIL] >> TYdbControlPlaneStorageListQueries::ShouldSuccess |82.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/datashard/split_merge/py3test >> test_split_merge.py::TestSplitMerge::test_merge_split[table_Date32-pk_types36-all_types36-index36---] >> test_explicit_partitioning_1.py::TestS3::test_binding_projection_integer_type_validation[v2-client1-column_type1-True] [FAIL] >> test_explicit_partitioning_1.py::TestS3::test_binding_projection_integer_type_validation[v2-client2-column_type2-True] >> test_split_merge.py::TestSplitMerge::test_merge_split[table_index_0__ASYNC-pk_types11-all_types11-index11---ASYNC] [FAIL] >> test_explicit_partitioning_0.py::TestS3::test_projection_enum_type_invalid_validation[v1-false-client4-year Utf8 NOT NULL-False] [FAIL] >> test_explicit_partitioning_0.py::TestS3::test_projection_enum_type_invalid_validation[v1-false-client5-year Int64 NOT NULL-False] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_fifo_groups_with_dlq_in_cloud[tables_format_v1] [FAIL] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_list_clouds |82.7%| [TA] $(B)/ydb/tests/functional/audit/test-results/py3test/{meta.json ... results_accumulator.log} >> test_common.py::TestCommonSqsYandexCloudMode::test_private_create_queue[tables_format_v0-std] [FAIL] >> test_split_merge.py::TestSplitMerge::test_merge_split[table_Decimal3510-pk_types27-all_types27-index27---] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/cloud/py3test >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_queues_count_over_limit[tables_format_v1] [FAIL] Test command err: sys:1: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/ciyv/001ebb/ydb/tests/functional/sqs/cloud/test-results/py3test/testing_out_stuff/chunk32/testing_out_stuff/test_yandex_cloud_mode.py.TestSqsYandexCloudMode.test_queues_count_over_limit.tables_format_v0/cluster/node_1/stdout'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/ciyv/001ebb/ydb/tests/functional/sqs/cloud/test-results/py3test/testing_out_stuff/chunk32/testing_out_stuff/test_yandex_cloud_mode.py.TestSqsYandexCloudMode.test_queues_count_over_limit.tables_format_v0/cluster/node_1/stderr'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedRandom name='/home/runner/.ya/build/build_root/ciyv/001ebb/ydb/tests/functional/sqs/cloud/test-results/py3test/testing_out_stuff/chunk32/testing_out_stuff/test_yandex_cloud_mode.py.TestSqsYandexCloudMode.test_queues_count_over_limit.tables_format_v0/cluster/node_1/logfile_8128yfnu.log'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/ciyv/001ebb/ydb/tests/functional/sqs/cloud/test-results/py3test/testing_out_stuff/chunk32/testing_out_stuff/test_yandex_cloud_mode.py.TestSqsYandexCloudMode.test_queues_count_over_limit.tables_format_v0/cluster/slot_1/stdout'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/ciyv/001ebb/ydb/tests/functional/sqs/cloud/test-results/py3test/testing_out_stuff/chunk32/testing_out_stuff/test_yandex_cloud_mode.py.TestSqsYandexCloudMode.test_queues_count_over_limit.tables_format_v0/cluster/slot_1/stderr'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedRandom name='/home/runner/.ya/build/build_root/ciyv/001ebb/ydb/tests/functional/sqs/cloud/test-results/py3test/testing_out_stuff/chunk32/testing_out_stuff/test_yandex_cloud_mode.py.TestSqsYandexCloudMode.test_queues_count_over_limit.tables_format_v0/cluster/slot_1/logfile_f3uy6jyl.log'> ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/subprocess.py:1129: ResourceWarning: subprocess 665289 is still running ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/subprocess.py:1129: ResourceWarning: subprocess 669175 is still running ResourceWarning: Enable tracemalloc to get the object allocation traceback |82.7%| [TA] {RESULT} $(B)/ydb/tests/functional/audit/test-results/py3test/{meta.json ... results_accumulator.log} >> test_format_setting.py::TestS3::test_timestamp_unix_time_insert[v2-timestamp/unix_time/test.tsv-tsv_with_names-UNIX_TIME_MICROSECONDS] [FAIL] >> test_format_setting.py::TestS3::test_timestamp_unix_time_insert[v2-timestamp/unix_time/test.tsv-tsv_with_names-UNIX_TIME_MILLISECONDS] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_list_clouds [FAIL] >> TYdbControlPlaneStorageGetQueryStatusPermissions::ShouldApplyPermissionViewPublic >> test_split_merge.py::TestSplitMerge::test_merge_split[table_Utf8-pk_types30-all_types30-index30---] [FAIL] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/cloud/py3test >> test_common.py::TestCommonSqsYandexCloudMode::test_private_create_queue[tables_format_v0-std] [FAIL] Test command err: sys:1: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/ciyv/001eb1/ydb/tests/functional/sqs/cloud/test-results/py3test/testing_out_stuff/chunk0/testing_out_stuff/test_common.py.TestCommonSqsYandexCloudMode.test_private_create_queue.tables_format_v0-fifo/cluster/node_1/stdout'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/ciyv/001eb1/ydb/tests/functional/sqs/cloud/test-results/py3test/testing_out_stuff/chunk0/testing_out_stuff/test_common.py.TestCommonSqsYandexCloudMode.test_private_create_queue.tables_format_v0-fifo/cluster/node_1/stderr'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedRandom name='/home/runner/.ya/build/build_root/ciyv/001eb1/ydb/tests/functional/sqs/cloud/test-results/py3test/testing_out_stuff/chunk0/testing_out_stuff/test_common.py.TestCommonSqsYandexCloudMode.test_private_create_queue.tables_format_v0-fifo/cluster/node_1/logfile_o48wtl5j.log'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/ciyv/001eb1/ydb/tests/functional/sqs/cloud/test-results/py3test/testing_out_stuff/chunk0/testing_out_stuff/test_common.py.TestCommonSqsYandexCloudMode.test_private_create_queue.tables_format_v0-fifo/cluster/slot_1/stdout'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/ciyv/001eb1/ydb/tests/functional/sqs/cloud/test-results/py3test/testing_out_stuff/chunk0/testing_out_stuff/test_common.py.TestCommonSqsYandexCloudMode.test_private_create_queue.tables_format_v0-fifo/cluster/slot_1/stderr'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedRandom name='/home/runner/.ya/build/build_root/ciyv/001eb1/ydb/tests/functional/sqs/cloud/test-results/py3test/testing_out_stuff/chunk0/testing_out_stuff/test_common.py.TestCommonSqsYandexCloudMode.test_private_create_queue.tables_format_v0-fifo/cluster/slot_1/logfile_gf3if2ta.log'> ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/subprocess.py:1129: ResourceWarning: subprocess 664101 is still running ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/subprocess.py:1129: ResourceWarning: subprocess 671567 is still running ResourceWarning: Enable tracemalloc to get the object allocation traceback ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/cloud/py3test >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_list_clouds [FAIL] Test command err: sys:1: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/ciyv/001eaa/ydb/tests/functional/sqs/cloud/test-results/py3test/testing_out_stuff/chunk27/testing_out_stuff/test_yandex_cloud_mode.py.TestSqsYandexCloudMode.test_fifo_groups_with_dlq_in_cloud.tables_format_v1/cluster/node_1/stdout'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/ciyv/001eaa/ydb/tests/functional/sqs/cloud/test-results/py3test/testing_out_stuff/chunk27/testing_out_stuff/test_yandex_cloud_mode.py.TestSqsYandexCloudMode.test_fifo_groups_with_dlq_in_cloud.tables_format_v1/cluster/node_1/stderr'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedRandom name='/home/runner/.ya/build/build_root/ciyv/001eaa/ydb/tests/functional/sqs/cloud/test-results/py3test/testing_out_stuff/chunk27/testing_out_stuff/test_yandex_cloud_mode.py.TestSqsYandexCloudMode.test_fifo_groups_with_dlq_in_cloud.tables_format_v1/cluster/node_1/logfile_v747cc9d.log'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/ciyv/001eaa/ydb/tests/functional/sqs/cloud/test-results/py3test/testing_out_stuff/chunk27/testing_out_stuff/test_yandex_cloud_mode.py.TestSqsYandexCloudMode.test_fifo_groups_with_dlq_in_cloud.tables_format_v1/cluster/slot_1/stdout'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/ciyv/001eaa/ydb/tests/functional/sqs/cloud/test-results/py3test/testing_out_stuff/chunk27/testing_out_stuff/test_yandex_cloud_mode.py.TestSqsYandexCloudMode.test_fifo_groups_with_dlq_in_cloud.tables_format_v1/cluster/slot_1/stderr'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedRandom name='/home/runner/.ya/build/build_root/ciyv/001eaa/ydb/tests/functional/sqs/cloud/test-results/py3test/testing_out_stuff/chunk27/testing_out_stuff/test_yandex_cloud_mode.py.TestSqsYandexCloudMode.test_fifo_groups_with_dlq_in_cloud.tables_format_v1/cluster/slot_1/logfile_z3vkrnzu.log'> ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/subprocess.py:1129: ResourceWarning: subprocess 664956 is still running ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/subprocess.py:1129: ResourceWarning: subprocess 669532 is still running ResourceWarning: Enable tracemalloc to get the object allocation traceback >> test_split_merge.py::TestSplitMerge::test_merge_split[table_ttl_Uint32-pk_types14-all_types14-index14-Uint32--] [FAIL] >> test_split_merge.py::TestSplitMerge::test_merge_split[table_Int32-pk_types20-all_types20-index20---] [FAIL] >> test_format_setting.py::TestS3::test_timestamp_unix_time_insert[v2-timestamp/unix_time/test.tsv-tsv_with_names-UNIX_TIME_MILLISECONDS] [FAIL] >> test_format_setting.py::TestS3::test_timestamp_unix_time_insert[v2-timestamp/unix_time/test.json-json_each_row-UNIX_TIME_SECONDS] >> test_split_merge.py::TestSplitMerge::test_merge_split[table_Timestamp64-pk_types38-all_types38-index38---] [FAIL] >> test_split_merge.py::TestSplitMerge::test_merge_split[table_all_types-pk_types12-all_types12-index12---] >> test_explicit_partitioning_0.py::TestS3::test_projection_enum_type_invalid_validation[v1-false-client5-year Int64 NOT NULL-False] [FAIL] >> test_split_merge.py::TestSplitMerge::test_merge_split[table_Date-pk_types32-all_types32-index32---] [FAIL] >> test_explicit_partitioning_0.py::TestS3::test_projection_enum_type_invalid_validation[v1-false-client6-year Int32-False] >> test_split_merge.py::TestSplitMerge::test_merge_split[table_Uint8-pk_types24-all_types24-index24---] >> test_split_merge.py::TestSplitMerge::test_merge_split[table_Decimal150-pk_types25-all_types25-index25---] >> test_explicit_partitioning_1.py::TestS3::test_binding_projection_integer_type_validation[v2-client2-column_type2-True] [FAIL] >> test_explicit_partitioning_1.py::TestS3::test_binding_projection_integer_type_validation[v2-client3-column_type3-False] >> test_format_setting.py::TestS3::test_timestamp_unix_time_insert[v2-timestamp/unix_time/test.json-json_each_row-UNIX_TIME_SECONDS] [FAIL] >> test_format_setting.py::TestS3::test_timestamp_unix_time_insert[v2-timestamp/unix_time/test.json-json_each_row-UNIX_TIME_MICROSECONDS] |82.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/datashard/split_merge/py3test >> test_split_merge.py::TestSplitMerge::test_merge_split[table_index_0__ASYNC-pk_types11-all_types11-index11---ASYNC] [FAIL] |82.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/datashard/split_merge/py3test >> test_split_merge.py::TestSplitMerge::test_merge_split[table_ttl_Uint32-pk_types14-all_types14-index14-Uint32--] [FAIL] |82.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/datashard/split_merge/py3test >> test_split_merge.py::TestSplitMerge::test_merge_split[table_Utf8-pk_types30-all_types30-index30---] [FAIL] >> test_split_merge.py::TestSplitMerge::test_merge_split[table_Interval-pk_types35-all_types35-index35---] >> test_split_merge.py::TestSplitMerge::test_merge_split[table_index_1_UNIQUE_SYNC-pk_types3-all_types3-index3--UNIQUE-SYNC] [FAIL] |82.7%| [TA] $(B)/ydb/tests/functional/sqs/cloud/test-results/py3test/{meta.json ... results_accumulator.log} |82.7%| [TA] {RESULT} $(B)/ydb/tests/functional/sqs/cloud/test-results/py3test/{meta.json ... results_accumulator.log} >> test_split_merge.py::TestSplitMerge::test_merge_split[table_UUID-pk_types31-all_types31-index31---] >> test_format_setting.py::TestS3::test_timestamp_unix_time_insert[v2-timestamp/unix_time/test.json-json_each_row-UNIX_TIME_MICROSECONDS] [FAIL] >> test_format_setting.py::TestS3::test_timestamp_unix_time_insert[v2-timestamp/unix_time/test.json-json_each_row-UNIX_TIME_MILLISECONDS] >> test_split_merge.py::TestSplitMerge::test_merge_split[table_Uint32-pk_types23-all_types23-index23---] [FAIL] >> test_explicit_partitioning_0.py::TestS3::test_projection_enum_type_invalid_validation[v1-false-client6-year Int32-False] [FAIL] >> test_explicit_partitioning_0.py::TestS3::test_projection_enum_type_invalid_validation[v1-false-client7-year Uint32-False] >> test_split_merge.py::TestSplitMerge::test_merge_split[table_ttl_Datetime-pk_types16-all_types16-index16-Datetime--] >> test_split_merge.py::TestSplitMerge::test_merge_split[table_index_4_UNIQUE_SYNC-pk_types0-all_types0-index0--UNIQUE-SYNC] >> test_format_setting.py::TestS3::test_timestamp_unix_time_insert[v2-timestamp/unix_time/test.json-json_each_row-UNIX_TIME_MILLISECONDS] [FAIL] >> test_format_setting.py::TestS3::test_timestamp_unix_time_insert[v2-timestamp/unix_time/test.parquet-parquet-UNIX_TIME_SECONDS] >> test_split_merge.py::TestSplitMerge::test_merge_split[table_index_0__SYNC-pk_types9-all_types9-index9---SYNC] [FAIL] |82.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/datashard/split_merge/py3test >> test_split_merge.py::TestSplitMerge::test_merge_split[table_Uint32-pk_types23-all_types23-index23---] [FAIL] >> TYdbControlPlaneStorageCreateQuery::ShouldSucccess [FAIL] >> TYdbControlPlaneStorageCreateQuery::ShouldValidate >> test_explicit_partitioning_1.py::TestS3::test_binding_projection_integer_type_validation[v2-client3-column_type3-False] [FAIL] >> test_explicit_partitioning_1.py::TestS3::test_binding_projection_integer_type_validation[v2-client4-column_type4-True] >> TYdbControlPlaneStorageListBindings::ShouldSuccess [FAIL] >> TYdbControlPlaneStorageListBindings::ShouldFilterByName >> TYdbControlPlaneStorageModifyQuery::ShouldSuccess [FAIL] >> TYdbControlPlaneStorageModifyQuery::ShouldModifyRunningQuery >> test_split_merge.py::TestSplitMerge::test_merge_split[table_Int64-pk_types19-all_types19-index19---] [FAIL] >> test_split_merge.py::TestSplitMerge::test_merge_split[table_index_3_UNIQUE_SYNC-pk_types1-all_types1-index1--UNIQUE-SYNC] [FAIL] >> test_split_merge.py::TestSplitMerge::test_merge_split[table_Datetime-pk_types33-all_types33-index33---] >> TYdbControlPlaneStorageListConnections::ShouldSuccess [FAIL] >> TYdbControlPlaneStorageListConnections::ShouldPageToken >> test_format_setting.py::TestS3::test_timestamp_unix_time_insert[v2-timestamp/unix_time/test.parquet-parquet-UNIX_TIME_SECONDS] [FAIL] >> test_format_setting.py::TestS3::test_timestamp_unix_time_insert[v2-timestamp/unix_time/test.parquet-parquet-UNIX_TIME_MICROSECONDS] >> TYdbControlPlaneStoragePipeline::ShouldSkipBindingIfDisabledConnection [FAIL] >> TYdbControlPlaneStoragePipeline::ShouldSaveTopicConsumers |82.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/datashard/split_merge/py3test >> test_split_merge.py::TestSplitMerge::test_merge_split[table_Timestamp64-pk_types38-all_types38-index38---] [FAIL] |82.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/datashard/split_merge/py3test >> test_split_merge.py::TestSplitMerge::test_merge_split[table_Int32-pk_types20-all_types20-index20---] [FAIL] |82.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/datashard/split_merge/py3test >> test_split_merge.py::TestSplitMerge::test_merge_split[table_index_1_UNIQUE_SYNC-pk_types3-all_types3-index3--UNIQUE-SYNC] [FAIL] >> TYdbControlPlaneStorageModifyBinding::ShouldValidate [FAIL] >> TYdbControlPlaneStorageModifyBinding::ShouleCheckObjectStorageProjectionByColumns >> TYdbControlPlaneStorageDeleteQuery::ShouldSuccess [FAIL] >> TYdbControlPlaneStorageDeleteQuery::ShouldValidate >> TYdbControlPlaneStorageCreateBinding::ShouldSucceed [FAIL] >> TYdbControlPlaneStorageCreateBinding::ShouldCheckMaxLengthName >> test_split_merge.py::TestSplitMerge::test_merge_split[table_Decimal3510-pk_types27-all_types27-index27---] [FAIL] >> TYdbControlPlaneStorageModifyConnection::ShouldSuccess [FAIL] >> TYdbControlPlaneStorageModifyConnection::ShouldValidate |82.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/datashard/split_merge/py3test >> TYdbControlPlaneStorageCreateQuery::ShouldValidate [GOOD] >> TYdbControlPlaneStorageCreateQueryPermissions::ShouldApplyPermissionManagePublicSuccess >> TYdbControlPlaneStorageCreateConnection::ShouldSucccess [FAIL] >> TYdbControlPlaneStorageCreateConnection::ShouldDisableCurrentIam >> TYdbControlPlaneStorageGetQueryStatusPermissions::ShouldApplyPermissionViewPublic [FAIL] >> TYdbControlPlaneStorageGetResult::ShouldSuccess |82.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/datashard/split_merge/py3test >> test_split_merge.py::TestSplitMerge::test_merge_split[table_index_0__SYNC-pk_types9-all_types9-index9---SYNC] [FAIL] |82.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/datashard/split_merge/py3test >> test_split_merge.py::TestSplitMerge::test_merge_split[table_Int64-pk_types19-all_types19-index19---] [FAIL] >> TYdbControlPlaneStorageDeleteBindingPermissions::ShouldApplyPermissionViewPublic [FAIL] >> TYdbControlPlaneStorageDeleteConnection::ShouldSuccess >> test_format_setting.py::TestS3::test_timestamp_unix_time_insert[v2-timestamp/unix_time/test.parquet-parquet-UNIX_TIME_MICROSECONDS] [FAIL] >> test_format_setting.py::TestS3::test_timestamp_unix_time_insert[v2-timestamp/unix_time/test.parquet-parquet-UNIX_TIME_MILLISECONDS] >> TYdbControlPlaneStorageListQueries::ShouldSuccess [FAIL] >> TYdbControlPlaneStorageListQueries::ShouldPageToken >> test_split_merge.py::TestSplitMerge::test_merge_split[table_Uint8-pk_types24-all_types24-index24---] [FAIL] >> test_explicit_partitioning_0.py::TestS3::test_projection_enum_type_invalid_validation[v1-false-client7-year Uint32-False] [FAIL] >> test_explicit_partitioning_0.py::TestS3::test_projection_enum_type_invalid_validation[v1-false-client8-year Int64-False] >> InMemoryControlPlaneStorage::ExecuteSimpleAnalyticsQuery [FAIL] |82.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/datashard/split_merge/py3test >> test_split_merge.py::TestSplitMerge::test_merge_split[table_Date-pk_types32-all_types32-index32---] [FAIL] >> test_split_merge.py::TestSplitMerge::test_merge_split[table_Decimal229-pk_types26-all_types26-index26---] >> TYdbControlPlaneStorageDescribeQuery::ShouldSuccess [FAIL] >> TYdbControlPlaneStorageDescribeQuery::ShouldValidate >> test_split_merge.py::TestSplitMerge::test_merge_split[table_Date32-pk_types36-all_types36-index36---] [FAIL] >> ShouldNotShowPassword::ShouldNotShowPasswordClickHouse >> test_format_setting.py::TestS3::test_timestamp_unix_time_insert[v2-timestamp/unix_time/test.parquet-parquet-UNIX_TIME_MILLISECONDS] [FAIL] >> test_format_setting.py::TestS3::test_timestamp_simple_format_insert[v1-common/simple_format/test.csv-csv_with_names] >> test_explicit_partitioning_1.py::TestS3::test_binding_projection_integer_type_validation[v2-client4-column_type4-True] [FAIL] >> test_explicit_partitioning_1.py::TestS3::test_binding_projection_integer_type_validation[v2-client5-column_type5-True] >> TYdbControlPlaneStorageCreateQueryPermissions::ShouldApplyPermissionManagePublicSuccess [FAIL] >> TYdbControlPlaneStorageCreateQueryPermissions::ShouldApplyPermissionManagePublicFailed >> TYdbControlPlaneStorageModifyBinding::ShouldSuccess [FAIL] >> TYdbControlPlaneStorageModifyBinding::ShouldCheckLowerCaseName >> TYdbControlPlaneStorageCreateConnection::ShouldDisableCurrentIam [GOOD] >> TYdbControlPlaneStorageCreateConnection::ShouldValidate >> test_split_merge.py::TestSplitMerge::test_merge_split[table_ttl_DyNumber-pk_types13-all_types13-index13-DyNumber--] >> test_split_merge.py::TestSplitMerge::test_merge_split[table_Timestamp-pk_types34-all_types34-index34---] >> test_explicit_partitioning_1.py::TestS3::test_binding_projection_integer_type_validation[v2-client5-column_type5-True] [FAIL] >> test_explicit_partitioning_1.py::TestS3::test_binding_projection_integer_type_validation[v2-client6-column_type6-True] >> test_split_merge.py::TestSplitMerge::test_merge_split[table_index_3__SYNC-pk_types6-all_types6-index6---SYNC] >> test_split_merge.py::TestSplitMerge::test_merge_split[table_UUID-pk_types31-all_types31-index31---] [FAIL] >> test_split_merge.py::TestSplitMerge::test_merge_split[table_index_1__ASYNC-pk_types10-all_types10-index10---ASYNC] >> TYdbControlPlaneStorageGetResult::ShouldSuccess [FAIL] >> TYdbControlPlaneStorageGetResult::ShouldEmpty >> test_split_merge.py::TestSplitMerge::test_merge_split[table_ttl_Date-pk_types18-all_types18-index18-Date--] >> test_format_setting.py::TestS3::test_timestamp_simple_format_insert[v1-common/simple_format/test.csv-csv_with_names] [FAIL] >> test_format_setting.py::TestS3::test_timestamp_simple_format_insert[v1-common/simple_format/test.tsv-tsv_with_names] >> TYdbControlPlaneStorageListBindings::ShouldFilterByName [FAIL] >> TYdbControlPlaneStorageListBindings::ShouldFilterByMe >> test_split_merge.py::TestSplitMerge::test_merge_split[table_ttl_Uint64-pk_types15-all_types15-index15-Uint64--] >> TYdbControlPlaneStorageDescribeQuery::ShouldValidate [FAIL] >> TYdbControlPlaneStorageDescribeQuery::ShouldCheckSuperUser >> test_split_merge.py::TestSplitMerge::test_merge_split[table_all_types-pk_types12-all_types12-index12---] [FAIL] >> test_split_merge.py::TestSplitMerge::test_merge_split[table_ttl_Timestamp-pk_types17-all_types17-index17-Timestamp--] >> TYdbControlPlaneStorageDeleteQuery::ShouldValidate [FAIL] >> TYdbControlPlaneStorageDeleteQuery::ShouldCheckSuperUser >> TYdbControlPlaneStoragePipeline::ShouldCheckSimplePipeline [FAIL] >> TYdbControlPlaneStoragePipeline::ShouldIncrementGeneration |82.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/datashard/split_merge/py3test >> test_split_merge.py::TestSplitMerge::test_merge_split[table_index_3_UNIQUE_SYNC-pk_types1-all_types1-index1--UNIQUE-SYNC] [FAIL] >> TYdbControlPlaneStorageModifyQuery::ShouldModifyRunningQuery [FAIL] >> TYdbControlPlaneStorageModifyQuery::ShouldValidate >> TYdbControlPlaneStorageListConnections::ShouldPageToken [FAIL] >> TYdbControlPlaneStorageListConnections::ShouldEmptyPageToken >> test_split_merge.py::TestSplitMerge::test_merge_split[table_Datetime-pk_types33-all_types33-index33---] [FAIL] >> TYdbControlPlaneStoragePipeline::ShouldSaveTopicConsumers [FAIL] >> TYdbControlPlaneStoragePipeline::ShouldSaveDqGraphs >> TYdbControlPlaneStorageListQueries::ShouldPageToken [FAIL] >> TYdbControlPlaneStorageListQueries::ShouldEmptyPageToken |82.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/datashard/split_merge/py3test |82.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/datashard/split_merge/py3test >> test_split_merge.py::TestSplitMerge::test_merge_split[table_Uint8-pk_types24-all_types24-index24---] [FAIL] >> TYdbControlPlaneStorageCreateQueryPermissions::ShouldApplyPermissionManagePublicFailed [GOOD] >> TYdbControlPlaneStorageCreateQueryPermissions::ShouldApplyPermissionQueryInvokeSuccess >> test_format_setting.py::TestS3::test_timestamp_simple_format_insert[v1-common/simple_format/test.tsv-tsv_with_names] [FAIL] >> test_format_setting.py::TestS3::test_timestamp_simple_format_insert[v1-common/simple_format/test.json-json_each_row] >> TYdbControlPlaneStorageModifyBinding::ShouleCheckObjectStorageProjectionByColumns [FAIL] >> TYdbControlPlaneStorageModifyBindingPermissions::ShouldApplyPermissionEmpty |82.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/datashard/split_merge/py3test >> test_split_merge.py::TestSplitMerge::test_merge_split[table_Date32-pk_types36-all_types36-index36---] [FAIL] |82.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/datashard/split_merge/py3test >> test_split_merge.py::TestSplitMerge::test_merge_split[table_Decimal3510-pk_types27-all_types27-index27---] [FAIL] >> test_explicit_partitioning_0.py::TestS3::test_projection_enum_type_invalid_validation[v1-false-client8-year Int64-False] [FAIL] >> TYdbControlPlaneStorageCreateBinding::ShouldCheckMaxLengthName [FAIL] >> TYdbControlPlaneStorageCreateBinding::ShouldCheckMultipleDotsName >> test_explicit_partitioning_0.py::TestS3::test_projection_enum_type_invalid_validation[v1-false-client9-year Uint64-False] >> TYdbControlPlaneStorageCreateConnection::ShouldValidate [GOOD] >> TYdbControlPlaneStorageCreateConnection::ShouldCheckUniqueName >> TYdbControlPlaneStorageModifyQuery::ShouldValidate [FAIL] >> TYdbControlPlaneStorageModifyQuery::ShouldCheckSuperUser >> test_split_merge.py::TestSplitMerge::test_merge_split[table_Interval64-pk_types39-all_types39-index39---] >> TYdbControlPlaneStorageDescribeQuery::ShouldCheckSuperUser [FAIL] >> TYdbControlPlaneStorageDescribeQueryPermissions::ShouldApplyPermissionEmpty >> test_split_merge.py::TestSplitMerge::test_merge_split[table_String-pk_types29-all_types29-index29---] |82.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/datashard/split_merge/py3test >> test_split_merge.py::TestSplitMerge::test_merge_split[table_UUID-pk_types31-all_types31-index31---] [FAIL] >> test_format_setting.py::TestS3::test_timestamp_simple_format_insert[v1-common/simple_format/test.json-json_each_row] [FAIL] >> test_format_setting.py::TestS3::test_timestamp_simple_format_insert[v1-common/simple_format/test.parquet-parquet] >> test_split_merge.py::TestSplitMerge::test_merge_split[table_index_4_UNIQUE_SYNC-pk_types0-all_types0-index0--UNIQUE-SYNC] [FAIL] >> test_split_merge.py::TestSplitMerge::test_merge_split[table_Decimal229-pk_types26-all_types26-index26---] [FAIL] >> test_split_merge.py::TestSplitMerge::test_merge_split[table_Decimal150-pk_types25-all_types25-index25---] [FAIL] >> TYdbControlPlaneStorageDescribeBindingPermissions::ShouldApplyPermissionViewPublic [FAIL] >> TYdbControlPlaneStorageDescribeConnection::ShouldSuccess >> TYdbControlPlaneStorageGetResult::ShouldEmpty [FAIL] >> TYdbControlPlaneStorageGetResultDataPermissions::ShouldApplyPermissionEmpty >> test_split_merge.py::TestSplitMerge::test_merge_split[table_index_0_UNIQUE_SYNC-pk_types4-all_types4-index4--UNIQUE-SYNC] >> TYdbControlPlaneStoragePipeline::ShouldIncrementGeneration [FAIL] >> TYdbControlPlaneStoragePipeline::ShouldCheckStopModifyRun >> test_explicit_partitioning_1.py::TestS3::test_binding_projection_integer_type_validation[v2-client6-column_type6-True] [FAIL] >> test_explicit_partitioning_1.py::TestS3::test_binding_projection_integer_type_validation[v2-client7-column_type7-False] |82.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/datashard/split_merge/py3test >> test_split_merge.py::TestSplitMerge::test_merge_split[table_all_types-pk_types12-all_types12-index12---] [FAIL] >> TYdbControlPlaneStorageModifyQuery::ShouldCheckSuperUser [FAIL] >> TYdbControlPlaneStorageModifyQuery::ShouldCheckWithoutIdempotencyKey >> TYdbControlPlaneStorageModifyBinding::ShouldCheckLowerCaseName [FAIL] >> TYdbControlPlaneStorageModifyBinding::ShouldCheckMaxLengthName >> ShouldNotShowPassword::ShouldNotShowPasswordClickHouse [FAIL] >> ShouldNotShowPassword::ShouldNotShowPasswordPostgreSQL >> test_format_setting.py::TestS3::test_timestamp_simple_format_insert[v1-common/simple_format/test.parquet-parquet] [FAIL] >> test_format_setting.py::TestS3::test_timestamp_simple_format_insert[v2-common/simple_format/test.csv-csv_with_names] >> TYdbControlPlaneStorageDescribeQueryPermissions::ShouldApplyPermissionEmpty [FAIL] >> TYdbControlPlaneStorageDescribeQueryPermissions::ShouldApplyPermissionViewPublic >> TYdbControlPlaneStoragePipeline::ShouldSaveDqGraphs [FAIL] >> TYdbControlPlaneStoragePipeline::ShouldSaveResultSetMetas >> test_split_merge.py::TestSplitMerge::test_merge_split[table_Int8-pk_types21-all_types21-index21---] >> TYdbControlPlaneStorageModifyConnection::ShouldValidate [FAIL] >> TYdbControlPlaneStorageModifyConnection::ShouldCheckSuperUser |82.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/datashard/split_merge/py3test >> test_split_merge.py::TestSplitMerge::test_merge_split[table_Datetime-pk_types33-all_types33-index33---] [FAIL] >> test_split_merge.py::TestSplitMerge::test_merge_split[table_ttl_DyNumber-pk_types13-all_types13-index13-DyNumber--] [FAIL] >> TYdbControlPlaneStorageListBindings::ShouldFilterByMe [FAIL] >> TYdbControlPlaneStorageListBindings::ShouldPageToken >> test_split_merge.py::TestSplitMerge::test_merge_split[table_Interval-pk_types35-all_types35-index35---] [FAIL] |82.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/datashard/split_merge/py3test >> test_split_merge.py::TestSplitMerge::test_merge_split[table_Decimal150-pk_types25-all_types25-index25---] [FAIL] >> test_split_merge.py::TestSplitMerge::test_merge_split[table_Uint64-pk_types22-all_types22-index22---] >> test_format_setting.py::TestS3::test_timestamp_simple_format_insert[v2-common/simple_format/test.csv-csv_with_names] [FAIL] >> test_format_setting.py::TestS3::test_timestamp_simple_format_insert[v2-common/simple_format/test.tsv-tsv_with_names] >> TYdbControlPlaneStorageDeleteQuery::ShouldCheckSuperUser [FAIL] >> TYdbControlPlaneStorageDeleteQuery::ShouldCheckPreviousRevisionSuccess >> TYdbControlPlaneStorageModifyQuery::ShouldCheckWithoutIdempotencyKey [FAIL] >> TYdbControlPlaneStorageModifyQueryPermissions::ShouldApplyPermissionQueryInvokeSuccess >> test_split_merge.py::TestSplitMerge::test_merge_split[table_index_3__SYNC-pk_types6-all_types6-index6---SYNC] [FAIL] |82.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/datashard/split_merge/py3test >> test_split_merge.py::TestSplitMerge::test_merge_split[table_index_4_UNIQUE_SYNC-pk_types0-all_types0-index0--UNIQUE-SYNC] [FAIL] >> TYdbControlPlaneStorageCreateQueryPermissions::ShouldApplyPermissionQueryInvokeSuccess [FAIL] >> TYdbControlPlaneStorageCreateQueryPermissions::ShouldApplyPermissionQueryInvokeFailed >> TYdbControlPlaneStorageDescribeQueryPermissions::ShouldApplyPermissionViewPublic [FAIL] >> TYdbControlPlaneStorageDescribeQueryPermissions::ShouldApplyPermissionViewPrivate >> test_split_merge.py::TestSplitMerge::test_merge_split[table_ttl_Timestamp-pk_types17-all_types17-index17-Timestamp--] [FAIL] >> test_split_merge.py::TestSplitMerge::test_merge_split[table_DyNumber-pk_types28-all_types28-index28---] >> test_split_merge.py::TestSplitMerge::test_merge_split[table_index_4__SYNC-pk_types5-all_types5-index5---SYNC] >> TYdbControlPlaneStorageModifyBindingPermissions::ShouldApplyPermissionEmpty [FAIL] >> TYdbControlPlaneStorageModifyBindingPermissions::ShouldApplyPermissionViewPublic |82.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/datashard/split_merge/py3test >> test_split_merge.py::TestSplitMerge::test_merge_split[table_Decimal229-pk_types26-all_types26-index26---] [FAIL] >> TYdbControlPlaneStorageListConnections::ShouldEmptyPageToken [FAIL] >> TYdbControlPlaneStorageListConnections::ShouldValidate >> TYdbControlPlaneStorageGetResultDataPermissions::ShouldApplyPermissionEmpty [FAIL] >> TYdbControlPlaneStorageGetResultDataPermissions::ShouldApplyPermissionViewPublic >> TYdbControlPlaneStorageListQueries::ShouldEmptyPageToken [FAIL] >> TYdbControlPlaneStorageListQueries::ShouldValidate >> test_explicit_partitioning_0.py::TestS3::test_projection_enum_type_invalid_validation[v1-false-client9-year Uint64-False] [FAIL] >> test_explicit_partitioning_0.py::TestS3::test_projection_enum_type_invalid_validation[v1-false-client10-year String NOT NULL-True] >> TYdbControlPlaneStoragePipeline::ShouldCheckStopModifyRun [FAIL] >> TYdbControlPlaneStoragePipeline::ShouldCheckJobMeta |82.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/datashard/split_merge/py3test >> test_split_merge.py::TestSplitMerge::test_merge_split[table_ttl_DyNumber-pk_types13-all_types13-index13-DyNumber--] [FAIL] >> test_split_merge.py::TestSplitMerge::test_merge_split[table_String-pk_types29-all_types29-index29---] [FAIL] >> TYdbControlPlaneStoragePipeline::ShouldSaveResultSetMetas [FAIL] >> TYdbControlPlaneStorageQuotas::GetDefaultQuotas |82.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/datashard/split_merge/py3test >> test_split_merge.py::TestSplitMerge::test_merge_split[table_Interval-pk_types35-all_types35-index35---] [FAIL] >> TYdbControlPlaneStorageCreateConnection::ShouldCheckUniqueName [FAIL] >> TYdbControlPlaneStorageCreateConnectionPermissions::ShouldApplyPermissionManagePublicSuccess >> test_explicit_partitioning_0.py::TestS3::test_projection_enum_type_invalid_validation[v1-false-client10-year String NOT NULL-True] [FAIL] >> test_explicit_partitioning_0.py::TestS3::test_projection_enum_type_invalid_validation[v1-false-client11-year String-False] >> test_format_setting.py::TestS3::test_timestamp_simple_format_insert[v2-common/simple_format/test.tsv-tsv_with_names] [FAIL] >> test_format_setting.py::TestS3::test_timestamp_simple_format_insert[v2-common/simple_format/test.json-json_each_row] >> TYdbControlPlaneStorageCreateBinding::ShouldCheckMultipleDotsName [FAIL] >> TYdbControlPlaneStorageCreateBinding::ShouldCheckNotAvailable >> test_split_merge.py::TestSplitMerge::test_merge_split[table_ttl_Date-pk_types18-all_types18-index18-Date--] [FAIL] >> test_explicit_partitioning_1.py::TestS3::test_binding_projection_integer_type_validation[v2-client7-column_type7-False] [FAIL] >> test_explicit_partitioning_1.py::TestS3::test_binding_projection_integer_type_validation[v2-client8-column_type8-False] >> test_split_merge.py::TestSplitMerge::test_merge_split[table_ttl_Uint64-pk_types15-all_types15-index15-Uint64--] [FAIL] >> TYdbControlPlaneStorageCreateQueryPermissions::ShouldApplyPermissionQueryInvokeFailed [GOOD] >> TYdbControlPlaneStorageDeleteBinding::ShouldSuccess >> test_split_merge.py::TestSplitMerge::test_merge_split[table_index_1__SYNC-pk_types8-all_types8-index8---SYNC] |82.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/datashard/split_merge/py3test >> test_split_merge.py::TestSplitMerge::test_merge_split[table_ttl_Timestamp-pk_types17-all_types17-index17-Timestamp--] [FAIL] >> TYdbControlPlaneStorageDescribeQueryPermissions::ShouldApplyPermissionViewPrivate [FAIL] >> TYdbControlPlaneStorageDescribeQueryPermissions::ShouldApplyPermissionViewPrivatePublic >> TYdbControlPlaneStorageModifyQueryPermissions::ShouldApplyPermissionQueryInvokeSuccess [FAIL] >> TYdbControlPlaneStorageModifyQueryPermissions::ShouldApplyPermissionQueryInvokeFailed >> test_explicit_partitioning_0.py::TestS3::test_projection_enum_type_invalid_validation[v1-false-client11-year String-False] [FAIL] >> test_explicit_partitioning_0.py::TestS3::test_projection_enum_type_invalid_validation[v1-false-client12-year Utf8-False] >> test_split_merge.py::TestSplitMerge::test_merge_split[table_Datetime64-pk_types37-all_types37-index37---] >> test_format_setting.py::TestS3::test_timestamp_simple_format_insert[v2-common/simple_format/test.json-json_each_row] [FAIL] >> test_format_setting.py::TestS3::test_timestamp_simple_format_insert[v2-common/simple_format/test.parquet-parquet] |82.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/datashard/split_merge/py3test >> test_split_merge.py::TestSplitMerge::test_merge_split[table_index_3__SYNC-pk_types6-all_types6-index6---SYNC] [FAIL] >> test_split_merge.py::TestSplitMerge::test_merge_split[table_Interval64-pk_types39-all_types39-index39---] [FAIL] >> TYdbControlPlaneStorageModifyBinding::ShouldCheckMaxLengthName [FAIL] >> TYdbControlPlaneStorageModifyBinding::ShouldCheckMultipleDotsName >> TYdbControlPlaneStorageQuotas::GetDefaultQuotas [GOOD] >> TYdbControlPlaneStorageQuotas::OverrideQuotas >> test_explicit_partitioning_0.py::TestS3::test_projection_enum_type_invalid_validation[v1-false-client12-year Utf8-False] [FAIL] >> test_explicit_partitioning_0.py::TestS3::test_projection_enum_type_invalid_validation[v1-false-client13-year Date-False] |82.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/datashard/split_merge/py3test >> TYdbControlPlaneStorageDeleteQuery::ShouldCheckPreviousRevisionSuccess [FAIL] >> TYdbControlPlaneStorageDeleteQuery::ShouldProhibitDeletionOfRunningQuery >> test_split_merge.py::TestSplitMerge::test_merge_split[table_index_2__SYNC-pk_types7-all_types7-index7---SYNC] >> test_rename.py::test_client_gets_retriable_errors_when_rename[replace_table-create_simple_table-False] >> TYdbControlPlaneStorageDescribeConnection::ShouldSuccess [FAIL] >> TYdbControlPlaneStorageDescribeConnection::ShouldCheckPermission >> TYdbControlPlaneStoragePipeline::ShouldCheckJobMeta [FAIL] >> TYdbControlPlaneStoragePipeline::ShouldCheckClearFields >> TYdbControlPlaneStorageDescribeQueryPermissions::ShouldApplyPermissionViewPrivatePublic [FAIL] >> TYdbControlPlaneStorageDescribeQueryPermissions::ShouldApplyPermissionViewAst >> TYdbControlPlaneStorageGetResultDataPermissions::ShouldApplyPermissionViewPublic [FAIL] >> TYdbControlPlaneStorageGetResultDataPermissions::ShouldApplyPermissionViewPrivate >> test_format_setting.py::TestS3::test_timestamp_simple_format_insert[v2-common/simple_format/test.parquet-parquet] [FAIL] >> test_format_setting.py::TestS3::test_date_time_simple_format_insert[v1-common/simple_format/test.csv-csv_with_names] |82.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/rename/py3test >> TYdbControlPlaneStorageListQueries::ShouldValidate [FAIL] >> TYdbControlPlaneStorageListQueries::ShouldFilterName >> TYdbControlPlaneStorageModifyQueryPermissions::ShouldApplyPermissionQueryInvokeFailed [FAIL] >> TYdbControlPlaneStorageModifyQueryPermissions::ShouldApplyPermissionEmpty |82.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/datashard/split_merge/py3test >> test_split_merge.py::TestSplitMerge::test_merge_split[table_ttl_Uint64-pk_types15-all_types15-index15-Uint64--] [FAIL] >> test_rename.py::test_client_gets_retriable_errors_when_rename[substitute_table-create_indexed_table-True] |82.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/datashard/split_merge/py3test >> test_split_merge.py::TestSplitMerge::test_merge_split[table_Interval64-pk_types39-all_types39-index39---] [FAIL] |82.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/datashard/split_merge/py3test >> test_split_merge.py::TestSplitMerge::test_merge_split[table_ttl_Date-pk_types18-all_types18-index18-Date--] [FAIL] >> test_explicit_partitioning_1.py::TestS3::test_binding_projection_integer_type_validation[v2-client8-column_type8-False] [FAIL] >> test_explicit_partitioning_1.py::TestS3::test_binding_projection_integer_type_validation[v2-client9-column_type9-False] >> TYdbControlPlaneStorageQuotas::OverrideQuotas [GOOD] >> TYdbControlPlaneStorageQuotas::GetStaleUsage >> test_explicit_partitioning_0.py::TestS3::test_projection_enum_type_invalid_validation[v1-false-client13-year Date-False] [FAIL] >> test_explicit_partitioning_0.py::TestS3::test_projection_enum_type_invalid_validation[v1-true-client0-year Int32 NOT NULL-False] |82.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/datashard/split_merge/py3test >> test_split_merge.py::TestSplitMerge::test_merge_split[table_String-pk_types29-all_types29-index29---] [FAIL] >> TYdbControlPlaneStorageListBindings::ShouldPageToken [FAIL] >> TYdbControlPlaneStorageListBindings::ShouldValidate >> TYdbControlPlaneStorageModifyQueryPermissions::ShouldApplyPermissionEmpty [FAIL] >> TYdbControlPlaneStorageModifyQueryPermissions::ShouldApplyPermissionManagePublic >> test_format_setting.py::TestS3::test_date_time_simple_format_insert[v1-common/simple_format/test.csv-csv_with_names] [FAIL] >> test_format_setting.py::TestS3::test_date_time_simple_format_insert[v1-common/simple_format/test.tsv-tsv_with_names] >> test_split_merge.py::TestSplitMerge::test_merge_split[table_index_4__SYNC-pk_types5-all_types5-index5---SYNC] [FAIL] >> TYdbControlPlaneStorageModifyBindingPermissions::ShouldApplyPermissionViewPublic [FAIL] >> TYdbControlPlaneStorageModifyBindingPermissions::ShouldApplyPermissionViewPrivate >> test_split_merge.py::TestSplitMerge::test_merge_split[table_index_1__SYNC-pk_types8-all_types8-index8---SYNC] [FAIL] >> TYdbControlPlaneStorageListConnections::ShouldValidate [FAIL] >> TYdbControlPlaneStorageListConnectionsPermissions::ShouldApplyPermissionEmpty >> test_split_merge.py::TestSplitMerge::test_merge_split[table_Int8-pk_types21-all_types21-index21---] [FAIL] >> TYdbControlPlaneStoragePipeline::ShouldCheckClearFields [FAIL] >> TYdbControlPlaneStoragePipeline::ShouldCheckNodesHealthCheck >> TYdbControlPlaneStorageDeleteQuery::ShouldProhibitDeletionOfRunningQuery [FAIL] >> TYdbControlPlaneStorageDeleteQueryPermissions::ShouldApplyPermissionEmpty >> test_rename.py::test_client_gets_retriable_errors_when_rename[replace_table-create_indexed_table-True] >> test_format_setting.py::TestS3::test_date_time_simple_format_insert[v1-common/simple_format/test.tsv-tsv_with_names] [FAIL] >> test_format_setting.py::TestS3::test_date_time_simple_format_insert[v1-common/simple_format/test.json-json_each_row] >> test_split_merge.py::TestSplitMerge::test_merge_split[table_DyNumber-pk_types28-all_types28-index28---] [FAIL] >> test_explicit_partitioning_0.py::TestS3::test_projection_enum_type_invalid_validation[v1-true-client0-year Int32 NOT NULL-False] [FAIL] >> test_explicit_partitioning_0.py::TestS3::test_projection_enum_type_invalid_validation[v1-true-client1-year Uint32 NOT NULL-False] >> ShouldNotShowPassword::ShouldNotShowPasswordPostgreSQL [FAIL] >> TYdbControlPlaneStorageControlQuery::ShouldSucccess >> TYdbControlPlaneStorageCreateBinding::ShouldCheckNotAvailable [FAIL] >> TYdbControlPlaneStorageCreateBinding::ShouldValidate >> TYdbControlPlaneStorageCreateConnectionPermissions::ShouldApplyPermissionManagePublicSuccess [FAIL] >> TYdbControlPlaneStorageCreateConnectionPermissions::ShouldApplyPermissionManagePublicFailed >> TYdbControlPlaneStorageListQueries::ShouldFilterName [FAIL] >> TYdbControlPlaneStorageListQueries::ShouldFilterByMe >> TYdbControlPlaneStorageGetResultDataPermissions::ShouldApplyPermissionViewPrivate [FAIL] >> TYdbControlPlaneStorageGetResultDataPermissions::ShouldApplyPermissionViewPrivatePublic >> test_explicit_partitioning_1.py::TestS3::test_binding_projection_integer_type_validation[v2-client9-column_type9-False] [FAIL] >> test_explicit_partitioning_1.py::TestS3::test_binding_projection_integer_type_validation[v2-client10-column_type10-False] >> test_split_merge.py::TestSplitMerge::test_merge_split[table_index_1__ASYNC-pk_types10-all_types10-index10---ASYNC] [FAIL] >> test_split_merge.py::TestSplitMerge::test_merge_split[table_Datetime64-pk_types37-all_types37-index37---] [FAIL] >> TYdbControlPlaneStorageModifyBinding::ShouldCheckMultipleDotsName [FAIL] >> TYdbControlPlaneStorageModifyBinding::ShouldCheckPermission >> TYdbControlPlaneStorageModifyQueryPermissions::ShouldApplyPermissionManagePublic [FAIL] >> TYdbControlPlaneStorageModifyQueryPermissions::ShouldApplyPermissionManagePrivate >> test_rename.py::test_client_gets_retriable_errors_when_rename[replace_table-create_indexed_async_table-True] >> test_format_setting.py::TestS3::test_date_time_simple_format_insert[v1-common/simple_format/test.json-json_each_row] [FAIL] |82.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/datashard/split_merge/py3test >> test_split_merge.py::TestSplitMerge::test_merge_split[table_index_4__SYNC-pk_types5-all_types5-index5---SYNC] [FAIL] >> test_format_setting.py::TestS3::test_date_time_simple_format_insert[v1-common/simple_format/test.parquet-parquet] |82.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/rename/py3test >> test_rename.py::test_client_gets_retriable_errors_when_rename[substitute_table-create_indexed_async_table-True] >> test_explicit_partitioning_1.py::TestS3::test_binding_projection_integer_type_validation[v2-client10-column_type10-False] [FAIL] >> test_explicit_partitioning_1.py::TestS3::test_binding_projection_integer_type_validation[v2-client11-column_type11-False] |82.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/rename/py3test |82.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/rename/py3test >> test_rename.py::test_client_gets_retriable_errors_when_rename[substitute_table-create_simple_table-False] >> TYdbControlPlaneStorageDeleteBinding::ShouldSuccess [FAIL] >> TYdbControlPlaneStorageDeleteBinding::ShouldCheckPermission >> test_split_merge.py::TestSplitMerge::test_merge_split[table_Uint64-pk_types22-all_types22-index22---] [FAIL] >> test_split_merge.py::TestSplitMerge::test_merge_split[table_Timestamp-pk_types34-all_types34-index34---] [FAIL] |82.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/datashard/split_merge/py3test >> test_split_merge.py::TestSplitMerge::test_merge_split[table_index_1__SYNC-pk_types8-all_types8-index8---SYNC] [FAIL] >> TYdbControlPlaneStorageDeleteQueryPermissions::ShouldApplyPermissionEmpty [FAIL] >> TYdbControlPlaneStorageDeleteQueryPermissions::ShouldApplyPermissionManagePublic >> TYdbControlPlaneStorageCreateConnectionPermissions::ShouldApplyPermissionManagePublicFailed [GOOD] >> TYdbControlPlaneStorageCreateQuery::ShouldCheckIdempotencyKey >> test_format_setting.py::TestS3::test_date_time_simple_format_insert[v1-common/simple_format/test.parquet-parquet] [FAIL] >> test_format_setting.py::TestS3::test_date_time_simple_format_insert[v2-common/simple_format/test.csv-csv_with_names] >> TYdbControlPlaneStorageModifyQueryPermissions::ShouldApplyPermissionManagePrivate [FAIL] >> TYdbControlPlaneStorageModifyQueryPermissions::ShouldApplyPermissionManagePrivatePublic >> TYdbControlPlaneStoragePipeline::ShouldCheckNodesHealthCheck [FAIL] >> TYdbControlPlaneStoragePipeline::ShouldCheckResultSetMeta >> test_explicit_partitioning_1.py::TestS3::test_binding_projection_integer_type_validation[v2-client11-column_type11-False] [FAIL] >> test_explicit_partitioning_1.py::TestS3::test_binding_projection_integer_type_validation[v2-client12-column_type12-False] >> TYdbControlPlaneStorageListQueries::ShouldFilterByMe [FAIL] >> TYdbControlPlaneStorageListQueries::ShouldFilterType >> TYdbControlPlaneStorageControlQuery::ShouldSucccess [FAIL] >> TYdbControlPlaneStorageControlQuery::ShouldValidate >> test_split_merge.py::TestSplitMerge::test_merge_split[table_index_2_UNIQUE_SYNC-pk_types2-all_types2-index2--UNIQUE-SYNC] |82.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/datashard/split_merge/py3test >> test_split_merge.py::TestSplitMerge::test_merge_split[table_DyNumber-pk_types28-all_types28-index28---] [FAIL] >> TYdbControlPlaneStorageGetResultDataPermissions::ShouldApplyPermissionViewPrivatePublic [FAIL] >> TYdbControlPlaneStorageGetTask::ShouldValidate >> TYdbControlPlaneStorageDeleteQueryPermissions::ShouldApplyPermissionManagePublic [FAIL] >> TYdbControlPlaneStorageDeleteQueryPermissions::ShouldApplyPermissionManagePrivate >> test_split_merge.py::TestSplitMerge::test_merge_split[table_index_2__SYNC-pk_types7-all_types7-index7---SYNC] [FAIL] >> test_explicit_partitioning_0.py::TestS3::test_projection_enum_type_invalid_validation[v1-true-client1-year Uint32 NOT NULL-False] [FAIL] >> test_explicit_partitioning_0.py::TestS3::test_projection_enum_type_invalid_validation[v1-true-client2-year Uint64 NOT NULL-False] >> TYdbControlPlaneStorageDescribeQueryPermissions::ShouldApplyPermissionViewAst [FAIL] >> TYdbControlPlaneStorageDescribeQueryPermissions::ShouldNotApplyPermissionViewAstAndViewQueryText >> TYdbControlPlaneStorageDescribeConnection::ShouldCheckPermission [FAIL] >> TYdbControlPlaneStorageDescribeConnection::ShouldCheckExist |82.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/datashard/split_merge/py3test >> test_split_merge.py::TestSplitMerge::test_merge_split[table_Datetime64-pk_types37-all_types37-index37---] [FAIL] >> TYdbControlPlaneStorageQuotas::GetStaleUsage [GOOD] >> TYdbControlPlaneStorageQuotas::PushUsageUpdate >> TYdbControlPlaneStorageModifyQueryPermissions::ShouldApplyPermissionManagePrivatePublic [FAIL] >> TYdbControlPlaneStorageNodesHealthCheck::ShouldValidate |82.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/datashard/split_merge/py3test >> test_split_merge.py::TestSplitMerge::test_merge_split[table_index_1__ASYNC-pk_types10-all_types10-index10---ASYNC] [FAIL] |82.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/datashard/split_merge/py3test >> test_split_merge.py::TestSplitMerge::test_merge_split[table_Int8-pk_types21-all_types21-index21---] [FAIL] |83.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/datashard/split_merge/py3test >> test_split_merge.py::TestSplitMerge::test_merge_split[table_Timestamp-pk_types34-all_types34-index34---] [FAIL] >> test_format_setting.py::TestS3::test_date_time_simple_format_insert[v2-common/simple_format/test.csv-csv_with_names] [FAIL] >> test_format_setting.py::TestS3::test_date_time_simple_format_insert[v2-common/simple_format/test.tsv-tsv_with_names] >> TYdbControlPlaneStorageListBindings::ShouldValidate [FAIL] >> TYdbControlPlaneStorageListBindingsPermissions::ShouldApplyPermissionEmpty >> TYdbControlPlaneStorageListConnectionsPermissions::ShouldApplyPermissionEmpty [FAIL] >> TYdbControlPlaneStorageListConnectionsPermissions::ShouldApplyPermissionViewPublic >> test_dump_restore.py::TestDumpRestore::test_dump_restore[table_all_types-pk_types7-all_types7-index7---] >> TYdbControlPlaneStorageDeleteQueryPermissions::ShouldApplyPermissionManagePrivate [FAIL] >> TYdbControlPlaneStorageDeleteQueryPermissions::ShouldApplyPermissionManagePrivatePublic >> TYdbControlPlaneStorageGetTask::ShouldValidate [GOOD] >> TYdbControlPlaneStorageGetTask::ShouldWorkWithEmptyPending >> TYdbControlPlaneStorageCreateQuery::ShouldCheckIdempotencyKey [FAIL] >> TYdbControlPlaneStorageCreateQuery::ShouldCreateJob >> TYdbControlPlaneStorageModifyBindingPermissions::ShouldApplyPermissionViewPrivate [FAIL] >> TYdbControlPlaneStorageModifyBindingPermissions::ShouldApplyPermissionViewPrivateAfterModify |83.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/datashard/split_merge/py3test >> test_split_merge.py::TestSplitMerge::test_merge_split[table_index_2__SYNC-pk_types7-all_types7-index7---SYNC] [FAIL] >> TYdbControlPlaneStoragePipeline::ShouldCheckResultSetMeta [FAIL] >> TYdbControlPlaneStoragePipeline::ShouldCheckRemovingOldResultSet |83.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/datashard/split_merge/py3test >> test_split_merge.py::TestSplitMerge::test_merge_split[table_Uint64-pk_types22-all_types22-index22---] [FAIL] >> test_explicit_partitioning_1.py::TestS3::test_binding_projection_integer_type_validation[v2-client12-column_type12-False] [FAIL] >> test_explicit_partitioning_1.py::TestS3::test_binding_projection_integer_type_validation[v2-client13-column_type13-False] >> TYdbControlPlaneStorageNodesHealthCheck::ShouldValidate [GOOD] >> TYdbControlPlaneStoragePingTask::ShouldValidate >> TYdbControlPlaneStorageCreateBinding::ShouldValidate [FAIL] >> TYdbControlPlaneStorageCreateBinding::ShouldValidateFormatSetting >> TYdbControlPlaneStorageQuotas::PushUsageUpdate [GOOD] >> TYdbControlPlaneStorageRateLimiter::ShouldValidateCreate >> TYdbControlPlaneStorageListQueries::ShouldFilterType [FAIL] >> TYdbControlPlaneStorageListQueries::ShouldFilterMode >> test_dump_restore.py::TestDumpRestore::test_dump_restore[table_index_0__ASYNC-pk_types6-all_types6-index6---ASYNC] >> test_explicit_partitioning_0.py::TestS3::test_projection_enum_type_invalid_validation[v1-true-client2-year Uint64 NOT NULL-False] [FAIL] >> test_explicit_partitioning_0.py::TestS3::test_projection_enum_type_invalid_validation[v1-true-client3-year Date NOT NULL-False] >> test_format_setting.py::TestS3::test_date_time_simple_format_insert[v2-common/simple_format/test.tsv-tsv_with_names] [FAIL] >> test_format_setting.py::TestS3::test_date_time_simple_format_insert[v2-common/simple_format/test.json-json_each_row] >> TYdbControlPlaneStorageDeleteQueryPermissions::ShouldApplyPermissionManagePrivatePublic [FAIL] >> TYdbControlPlaneStorageDescribeBinding::ShouldSuccess >> TYdbControlPlaneStorageGetTask::ShouldWorkWithEmptyPending [GOOD] >> TYdbControlPlaneStorageGetTask::ShouldBatchingGetTasks >> test_update_script_tables.py::TestUpdateScriptTablesYdb::test_recreate_tables[ALTER TABLE {} DROP COLUMN syntax-`.metadata/script_executions`] >> TYdbControlPlaneStorageDeleteBinding::ShouldCheckPermission [FAIL] >> TYdbControlPlaneStorageDeleteBinding::ShouldCheckExist >> TYdbControlPlaneStoragePingTask::ShouldValidate [GOOD] >> TYdbControlPlaneStoragePipeline::ShouldCheckAbortInTerminatedState >> test_dump_restore.py::TestDumpRestore::test_dump_restore[table_index_3__SYNC-pk_types1-all_types1-index1---SYNC] >> test_explicit_partitioning_0.py::TestS3::test_projection_enum_type_invalid_validation[v1-true-client3-year Date NOT NULL-False] [FAIL] >> test_explicit_partitioning_0.py::TestS3::test_projection_enum_type_invalid_validation[v1-true-client4-year Utf8 NOT NULL-False] |83.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/restarts/py3test >> TYdbControlPlaneStorageRateLimiter::ShouldValidateCreate [GOOD] >> TYdbControlPlaneStorageRateLimiter::ShouldValidateDelete >> test_format_setting.py::TestS3::test_date_time_simple_format_insert[v2-common/simple_format/test.json-json_each_row] [FAIL] >> test_format_setting.py::TestS3::test_date_time_simple_format_insert[v2-common/simple_format/test.parquet-parquet] >> test_dump_restore.py::TestDumpRestore::test_dump_restore[table_index_2__SYNC-pk_types2-all_types2-index2---SYNC] >> test_update_script_tables.py::TestUpdateScriptTablesYdb::test_recreate_tables[ALTER TABLE {} DROP COLUMN syntax, DROP COLUMN ast-`.metadata/script_executions`] >> TYdbControlPlaneStorageListQueries::ShouldFilterMode [FAIL] >> TYdbControlPlaneStorageListQueries::ShouldFilterVisibility >> test_dump_restore.py::TestDumpRestore::test_dump_restore[table_ttl_Uint64-pk_types10-all_types10-index10-Uint64--] >> test_explicit_partitioning_1.py::TestS3::test_binding_projection_integer_type_validation[v2-client13-column_type13-False] [FAIL] >> test_explicit_partitioning_1.py::TestS3::test_binding_projection_enum_type_validation[v1-client0-column_type0-False] >> test_dump_restore.py::TestDumpRestore::test_dump_restore[table_index_1__SYNC-pk_types3-all_types3-index3---SYNC] |83.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/script_execution/py3test >> TYdbControlPlaneStorageCreateQuery::ShouldCreateJob [FAIL] >> TYdbControlPlaneStorageCreateQuery::ShouldCheckListJobs >> test_explicit_partitioning_0.py::TestS3::test_projection_enum_type_invalid_validation[v1-true-client4-year Utf8 NOT NULL-False] [FAIL] >> test_explicit_partitioning_0.py::TestS3::test_projection_enum_type_invalid_validation[v1-true-client5-year Int64 NOT NULL-False] >> test_explicit_partitioning_1.py::TestS3::test_binding_projection_enum_type_validation[v1-client0-column_type0-False] [FAIL] >> test_explicit_partitioning_1.py::TestS3::test_binding_projection_enum_type_validation[v1-client1-column_type1-False] >> TYdbControlPlaneStorageDeleteConnection::ShouldSuccess [FAIL] >> TYdbControlPlaneStorageDeleteConnection::ShouldCheckPermission >> TYdbControlPlaneStorageRateLimiter::ShouldValidateDelete [GOOD] >> TYdbControlPlaneStorageRateLimiter::ShouldCreateRateLimiterResource >> test_format_setting.py::TestS3::test_date_time_simple_format_insert[v2-common/simple_format/test.parquet-parquet] [FAIL] >> test_format_setting.py::TestS3::test_date_time_simple_posix_big_file[v1-common/simple_posix/big.csv-csv_with_names-POSIX] >> test_dump_restore.py::TestDumpRestore::test_dump_restore[table_all_types-pk_types7-all_types7-index7---] [FAIL] >> TYdbControlPlaneStorageGetTask::ShouldBatchingGetTasks [FAIL] >> TYdbControlPlaneStorageListBindings::ShouldEmptyPageToken >> test_dump_restore.py::TestDumpRestore::test_dump_restore[table_index_0__ASYNC-pk_types6-all_types6-index6---ASYNC] [FAIL] >> test_dump_restore.py::TestDumpRestore::test_dump_restore[table_ttl_DyNumber-pk_types8-all_types8-index8-DyNumber--] >> test_explicit_partitioning_1.py::TestS3::test_binding_projection_enum_type_validation[v1-client1-column_type1-False] [FAIL] >> test_explicit_partitioning_1.py::TestS3::test_binding_projection_enum_type_validation[v1-client2-column_type2-False] >> TYdbControlPlaneStorageListConnectionsPermissions::ShouldApplyPermissionViewPublic [FAIL] >> TYdbControlPlaneStorageListConnectionsPermissions::ShouldApplyPermissionViewPrivate >> TYdbControlPlaneStorageDescribeConnection::ShouldCheckExist [FAIL] >> TYdbControlPlaneStorageDescribeConnection::ShouldValidate >> TYdbControlPlaneStoragePipeline::ShouldCheckAbortInTerminatedState [FAIL] >> TYdbControlPlaneStoragePipeline::ShouldCheckAst >> test_explicit_partitioning_1.py::TestS3::test_binding_projection_enum_type_validation[v1-client2-column_type2-False] [FAIL] |83.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/datashard/dump_restore/py3test >> test_dump_restore.py::TestDumpRestore::test_dump_restore[table_all_types-pk_types7-all_types7-index7---] [FAIL] >> test_explicit_partitioning_1.py::TestS3::test_binding_projection_enum_type_validation[v1-client3-column_type3-False] >> test_explicit_partitioning_0.py::TestS3::test_projection_enum_type_invalid_validation[v1-true-client5-year Int64 NOT NULL-False] [FAIL] >> test_explicit_partitioning_0.py::TestS3::test_projection_enum_type_invalid_validation[v1-true-client6-year Int32-False] >> test_split_merge.py::TestSplitMerge::test_merge_split[table_index_0_UNIQUE_SYNC-pk_types4-all_types4-index4--UNIQUE-SYNC] [FAIL] >> TYdbControlPlaneStorageCreateQuery::ShouldCheckListJobs [FAIL] >> TYdbControlPlaneStorageCreateQuery::ShouldListJobsByQuery >> TYdbControlPlaneStorageModifyBinding::ShouldCheckPermission [FAIL] >> TYdbControlPlaneStorageModifyBinding::ShouldCheckNotExistOldName |83.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/datashard/dump_restore/py3test >> TYdbControlPlaneStorageListQueries::ShouldFilterVisibility [FAIL] >> TYdbControlPlaneStorageListQueries::ShouldFilterAutomatic >> test_format_setting.py::TestS3::test_date_time_simple_posix_big_file[v1-common/simple_posix/big.csv-csv_with_names-POSIX] [FAIL] >> test_format_setting.py::TestS3::test_date_time_simple_posix_big_file[v1-common/simple_format/big.csv-csv_with_names-%Y-%m-%d] >> test_restarts.py::TestRestartClusterMirror34::test_when_create_many_tablets_and_restart_cluster_then_every_thing_is_ok >> TYdbControlPlaneStorageListBindingsPermissions::ShouldApplyPermissionEmpty [FAIL] >> TYdbControlPlaneStorageListBindingsPermissions::ShouldApplyPermissionViewPublic >> TYdbControlPlaneStorageModifyBindingPermissions::ShouldApplyPermissionViewPrivateAfterModify [FAIL] >> TYdbControlPlaneStorageModifyBindingPermissions::ShouldApplyPermissionViewPrivatePublic >> test_dump_restore.py::TestDumpRestore::test_dump_restore[table_index_2__SYNC-pk_types2-all_types2-index2---SYNC] [FAIL] >> test_dump_restore.py::TestDumpRestore::test_dump_restore[table_index_3__SYNC-pk_types1-all_types1-index1---SYNC] [FAIL] >> TYdbControlPlaneStorageDescribeBinding::ShouldSuccess [FAIL] >> TYdbControlPlaneStorageDescribeBinding::ShouldCheckPermission >> TYdbControlPlaneStorageDeleteBinding::ShouldCheckExist [FAIL] >> TYdbControlPlaneStorageDeleteBinding::ShouldValidate >> test_dump_restore.py::TestDumpRestore::test_dump_restore[table_index_1__SYNC-pk_types3-all_types3-index3---SYNC] [FAIL] >> test_dump_restore.py::TestDumpRestore::test_dump_restore[table_ttl_Uint64-pk_types10-all_types10-index10-Uint64--] [FAIL] >> test_update_script_tables.py::TestUpdateScriptTablesYdb::test_recreate_tables[ALTER TABLE {} DROP COLUMN syntax, DROP COLUMN ast, DROP COLUMN stats-`.metadata/script_executions`] >> test_split_merge.py::TestSplitMerge::test_merge_split[table_ttl_Datetime-pk_types16-all_types16-index16-Datetime--] [FAIL] >> TYdbControlPlaneStorageRateLimiter::ShouldCreateRateLimiterResource [FAIL] >> TYdbControlPlaneStorageRateLimiter::ShouldDeleteRateLimiterResource >> TYdbControlPlaneStorageCreateBinding::ShouldValidateFormatSetting [FAIL] >> TYdbControlPlaneStorageCreateBindingPermissions::ShouldApplyPermissionManagePublicSuccess >> test_dump_restore.py::TestDumpRestore::test_dump_restore[table_ttl_DyNumber-pk_types8-all_types8-index8-DyNumber--] [FAIL] |83.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/datashard/dump_restore/py3test >> test_dump_restore.py::TestDumpRestore::test_dump_restore[table_index_0__ASYNC-pk_types6-all_types6-index6---ASYNC] [FAIL] >> test_format_setting.py::TestS3::test_date_time_simple_posix_big_file[v1-common/simple_format/big.csv-csv_with_names-%Y-%m-%d] [FAIL] >> test_format_setting.py::TestS3::test_date_time_simple_posix_big_file[v1-date_time/simple_iso/big.csv-csv_with_names-ISO] |83.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/datashard/split_merge/py3test >> test_split_merge.py::TestSplitMerge::test_merge_split[table_index_0_UNIQUE_SYNC-pk_types4-all_types4-index4--UNIQUE-SYNC] [FAIL] >> test_split_merge.py::TestSplitMerge::test_merge_split[table_index_2_UNIQUE_SYNC-pk_types2-all_types2-index2--UNIQUE-SYNC] [FAIL] |83.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/datashard/dump_restore/py3test >> test_dump_restore.py::TestDumpRestore::test_dump_restore[table_index_2__SYNC-pk_types2-all_types2-index2---SYNC] [FAIL] >> test_explicit_partitioning_1.py::TestS3::test_binding_projection_enum_type_validation[v1-client3-column_type3-False] [FAIL] >> test_explicit_partitioning_1.py::TestS3::test_binding_projection_enum_type_validation[v1-client4-column_type4-False] >> test_format_setting.py::TestS3::test_date_time_simple_posix_big_file[v1-date_time/simple_iso/big.csv-csv_with_names-ISO] [FAIL] |83.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/datashard/dump_restore/py3test >> test_dump_restore.py::TestDumpRestore::test_dump_restore[table_index_1__SYNC-pk_types3-all_types3-index3---SYNC] [FAIL] >> test_format_setting.py::TestS3::test_date_time_simple_posix_big_file[v2-common/simple_posix/big.csv-csv_with_names-POSIX] >> TYdbControlPlaneStorageCreateQuery::ShouldListJobsByQuery [FAIL] >> TYdbControlPlaneStorageCreateQuery::ShouldListJobsCreatedByMe |83.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/datashard/split_merge/py3test >> test_split_merge.py::TestSplitMerge::test_merge_split[table_ttl_Datetime-pk_types16-all_types16-index16-Datetime--] [FAIL] >> TYdbControlPlaneStorageListBindings::ShouldEmptyPageToken [FAIL] >> TYdbControlPlaneStorageListBindings::ShouldCheckLimit >> TYdbControlPlaneStorageListQueries::ShouldFilterAutomatic [FAIL] >> TYdbControlPlaneStorageListQueriesPermissions::ShouldApplyPermissionEmpty |83.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/datashard/dump_restore/py3test >> test_dump_restore.py::TestDumpRestore::test_dump_restore[table_ttl_Uint64-pk_types10-all_types10-index10-Uint64--] [FAIL] >> TYdbControlPlaneStoragePipeline::ShouldCheckRemovingOldResultSet [FAIL] >> TYdbControlPlaneStoragePipeline::ShouldCheckPrioritySelectionEntities >> TYdbControlPlaneStoragePipeline::ShouldCheckAst [FAIL] >> TYdbControlPlaneStoragePipeline::ShouldCheckAstClear >> TYdbControlPlaneStorageListConnectionsPermissions::ShouldApplyPermissionViewPrivate [FAIL] >> TYdbControlPlaneStorageListConnectionsPermissions::ShouldApplyPermissionViewPrivatePublic >> test_explicit_partitioning_0.py::TestS3::test_projection_enum_type_invalid_validation[v1-true-client6-year Int32-False] [FAIL] >> test_explicit_partitioning_0.py::TestS3::test_projection_enum_type_invalid_validation[v1-true-client7-year Uint32-False] |83.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/datashard/dump_restore/py3test >> test_dump_restore.py::TestDumpRestore::test_dump_restore[table_ttl_DyNumber-pk_types8-all_types8-index8-DyNumber--] [FAIL] |83.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/datashard/split_merge/py3test >> test_split_merge.py::TestSplitMerge::test_merge_split[table_index_2_UNIQUE_SYNC-pk_types2-all_types2-index2--UNIQUE-SYNC] [FAIL] |83.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/datashard/dump_restore/py3test >> test_dump_restore.py::TestDumpRestore::test_dump_restore[table_index_3__SYNC-pk_types1-all_types1-index1---SYNC] [FAIL] >> test_restarts.py::TestRestartSingleBlock42::test_restart_single_node_is_ok >> test_format_setting.py::TestS3::test_date_time_simple_posix_big_file[v2-common/simple_posix/big.csv-csv_with_names-POSIX] [FAIL] >> test_format_setting.py::TestS3::test_date_time_simple_posix_big_file[v2-common/simple_format/big.csv-csv_with_names-%Y-%m-%d] >> test_dump_restore.py::TestDumpRestore::test_dump_restore[table_ttl_Uint32-pk_types9-all_types9-index9-Uint32--] >> test_explicit_partitioning_1.py::TestS3::test_binding_projection_enum_type_validation[v1-client4-column_type4-False] [FAIL] >> test_explicit_partitioning_1.py::TestS3::test_binding_projection_enum_type_validation[v1-client5-column_type5-True] >> TYdbControlPlaneStorageDescribeBinding::ShouldCheckPermission [FAIL] >> TYdbControlPlaneStorageDescribeBinding::ShouldCheckExist >> TYdbControlPlaneStorageCreateQuery::ShouldListJobsCreatedByMe [FAIL] >> TYdbControlPlaneStorageCreateQuery::ShouldCheckDescribeJob >> test_dump_restore.py::TestDumpRestore::test_dump_restore[table_index_1__ASYNC-pk_types5-all_types5-index5---ASYNC] >> TYdbControlPlaneStorageListQueriesPermissions::ShouldApplyPermissionEmpty [FAIL] >> TYdbControlPlaneStorageListQueriesPermissions::ShouldApplyPermissionViewPublic >> test_format_setting.py::TestS3::test_date_time_simple_posix_big_file[v2-common/simple_format/big.csv-csv_with_names-%Y-%m-%d] [FAIL] >> test_format_setting.py::TestS3::test_date_time_simple_posix_big_file[v2-date_time/simple_iso/big.csv-csv_with_names-ISO] >> TYdbControlPlaneStorageModifyConnection::ShouldCheckSuperUser [FAIL] >> TYdbControlPlaneStorageModifyConnection::ShouldCheckWithoutIdempotencyKey >> TYdbControlPlaneStorageDescribeConnection::ShouldValidate [FAIL] >> TYdbControlPlaneStorageDescribeConnection::ShouldCheckSuperUser >> test_update_script_tables.py::TestUpdateScriptTablesYdb::test_recreate_tables[DROP TABLE {}-`.metadata/script_executions`] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/olap/ttl_tiering/py3test >> data_migration_when_alter_ttl.py::TestDataMigrationWhenAlterTtl::test 2025-05-29 15:43:11,720 WARNING devtools.ya.test.programs.test_tool.run_test.run_test: Wrapper execution timed out 2025-05-29 15:43:12,593 WARNING devtools.ya.test.programs.test_tool.run_test.run_test: Wrapper has overrun 600 secs timeout. Process tree before termination: pid rss ref pdirt 390524 155M 152M 100M ydb-tests-olap-ttl_tiering --basetemp /home/runner/.ya/build/build_root/ciyv/0027c3/tmp --capture no -c pkg:library.python.pytest:pytest.yatest.ini -p no:factor --doctest-modu 392327 513M 508M 262M ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/.ya/build/build_root/ciyv/0027c3/ydb/tests/olap/ttl_tiering/test-results/py3test/testing_out_stuff/chunk1 395362 91.4M 89.5M 64.1M └─ moto_server s3 --port 20427 Test command err: library/recipes/common/__init__.py:29: ResourceWarning: unclosed file <_io.TextIOWrapper name='/home/runner/.ya/build/build_root/ciyv/0027c3/ydb/tests/olap/ttl_tiering/test-results/py3test/testing_out_stuff/chunk1/testing_out_stuff/moto_server.out.log' mode='w' encoding='utf-8'> process = subprocess.Popen( ResourceWarning: Enable tracemalloc to get the object allocation traceback library/recipes/common/__init__.py:29: ResourceWarning: unclosed file <_io.TextIOWrapper name='/home/runner/.ya/build/build_root/ciyv/0027c3/ydb/tests/olap/ttl_tiering/test-results/py3test/testing_out_stuff/chunk1/testing_out_stuff/moto_server.err.log' mode='w' encoding='utf-8'> process = subprocess.Popen( ResourceWarning: Enable tracemalloc to get the object allocation traceback File "library/python/pytest/main.py", line 101, in main rc = pytest.main( File "contrib/python/pytest/py3/_pytest/config/__init__.py", line 175, in main ret: Union[ExitCode, int] = config.hook.pytest_cmdline_main( File "contrib/python/pluggy/py3/pluggy/_hooks.py", line 513, in __call__ return self._hookexec(self.name, self._hookimpls.copy(), kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_manager.py", line 120, in _hookexec return self._inner_hookexec(hook_name, methods, kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_callers.py", line 103, in _multicall res = hook_impl.function(*args) File "contrib/python/pytest/py3/_pytest/main.py", line 320, in pytest_cmdline_main return wrap_session(config, _main) File "contrib/python/pytest/py3/_pytest/main.py", line 273, in wrap_session session.exitstatus = doit(config, session) or 0 File "contrib/python/pytest/py3/_pytest/main.py", line 327, in _main config.hook.pytest_runtestloop(session=session) File "contrib/python/pluggy/py3/pluggy/_hooks.py", line 513, in __call__ return self._hookexec(self.name, self._hookimpls.copy(), kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_manager.py", line 120, in _hookexec return self._inner_hookexec(hook_name, methods, kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_callers.py", line 103, in _multicall res = hook_impl.function(*args) File "contrib/python/pytest/py3/_pytest/main.py", line 352, in pytest_runtestloop item.config.hook.pytest_runtest_protocol(item=item, nextitem=nextitem) File "contrib/python/pluggy/py3/pluggy/_hooks.py", line 513, in __call__ return self._hookexec(self.name, self._hookimpls.copy(), kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_manager.py", line 120, in _hookexec return self._inner_hookexec(hook_name, methods, kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_callers.py", line 103, in _multicall res = hook_impl.function(*args) File "contrib/python/pytest/py3/_pytest/runner.py", line 115, in pytest_runtest_protocol runtestprotocol(item, nextitem=nextitem) File "contrib/python/pytest/py3/_pytest/runner.py", line 134, in runtestprotocol reports.append(call_and_report(item, "call", log)) File "contrib/python/pytest/py3/_pytest/runner.py", line 223, in call_and_report call = call_runtest_hook(item, when, **kwds) File "contrib/python/pytest/py3/_pytest/runner.py", line 262, in call_runtest_hook return CallInfo.from_call( File "contrib/python/pytest/py3/_pytest/runner.py", line 342, in from_call result: Optional[TResult] = func() File "contrib/python/pytest/py3/_pytest/runner.py", line 263, in lambda: ihook(item=item, **kwds), when=when, reraise=reraise File "contrib/python/pluggy/py3/pluggy/_hooks.py", line 513, in __call__ return self._hookexec(self.name, self._hookimpls.copy(), kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_manager.py", line 120, in _hookexec return self._inner_hookexec(hook_name, methods, kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_callers.py", line 103, in _multicall res = hook_impl.function(*args) File "contrib/python/pytest/py3/_pytest/runner.py", line 170, in pytest_runtest_call item.runtest() File "contrib/python/pytest/py3/_pytest/python.py", line 1844, in runtest self.ihook.pytest_pyfunc_call(pyfuncitem=self) File "contrib/python/pluggy/py3/pluggy/_hooks.py", line 513, in __call__ return self._hookexec(self.name, self._hookimpls.copy(), kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_manager.py", line 120, in _hookexec return self._inner_hookexec(hook_name, methods, kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_callers.py", line 103, in _multicall res = hook_impl.function(*args) File "library/python/pytest/plugins/ya.py", line 563, in pytest_pyfunc_call pyfuncitem.retval = testfunction(**testargs) File "ydb/tests/olap/ttl_tiering/data_migration_when_alter_ttl.py", line 47, in test self.ydb_client.query( File "ydb/tests/olap/common/ydb_client.py", line 24, in query return self.session_pool.execute_with_retries(statement) File "contrib/python/ydb/py3/ydb/query/pool.py", line 204, in execute_with_retries return retry_operation_sync(wrapped_callee, retry_settings) File "contrib/python/ydb/py3/ydb/retries.py", line 133, in retry_operation_sync for next_opt in opt_generator: File "contrib/python/ydb/py3/ydb/retries.py", line 94, in retry_operation_impl result = YdbRetryOperationFinalResult(callee(*args, **kwargs)) File "contrib/python/ydb/py3/ydb/query/pool.py", line 202, in wrapped_callee return [result_set for result_set in it] File "contrib/python/ydb/py3/ydb/_utilities.py", line 173, in __next__ return self._next() File "contrib/python/ydb/py3/ydb/_utilities.py", line 164, in _next res = self.wrapper(next(self.it)) File "contrib/python/grpcio/py3/grpc/_channel.py", line 475, in __next__ return self._next() File "contrib/python/grpcio/py3/grpc/_channel.py", line 872, in _next _common.wait(self._state.condition.wait, _response_ready) File "contrib/python/grpcio/py3/grpc/_common.py", line 150, in wait _wait_once(wait_fn, MAXIMUM_WAIT_TIMEOUT, spin_cb) File "contrib/python/grpcio/py3/grpc/_common.py", line 112, in _wait_once wait_fn(timeout=timeout) File "contrib/tools/python3/Lib/threading.py", line 359, in wait gotit = waiter.acquire(True, timeout) File "library/python/pytest/plugins/ya.py", line 344, in _graceful_shutdown traceback.print_stack(file=sys.stderr) Traceback (most recent call last): File "library/python/testing/yatest_common/yatest/common/process.py", line 384, in wait wait_for( File "library/python/testing/yatest_common/yatest/common/process.py", line 764, in wait_for raise TimeoutError(truncate(message, MAX_MESSAGE_LEN)) yatest.common.process.TimeoutError: ...ttl_tiering/ydb-tests-olap-ttl_tiering', '--basetemp', '/home/runner/.ya/build/build_root/ciyv/0027c3/tmp', '--capture', 'no', '-c', 'pkg:library.python.pytest:pytest.yatest.ini', '-p', 'no:factor', '--doctest-modules', '--ya-trace', '/home/runner/.ya/build/build_root/ciyv/0027c3/ydb/tests/olap/ttl_tiering/test-results/py3test/testing_out_stuff/chunk1/ytest.report.trace', '--build-root', '/home/runner/.ya/build/build_root/ciyv/0027c3', '--source-root', '/home/runner/.ya/build/build_root/ciyv/0027c3/environment/arcadia', '--output-dir', '/home/runner/.ya/build/build_root/ciyv/0027c3/ydb/tests/olap/ttl_tiering/test-results/py3test/testing_out_stuff/chunk1/testing_out_stuff', '--durations', '0', '--project-path', 'ydb/tests/olap/ttl_tiering', '--test-tool-bin', '/home/runner/.ya/tools/v4/8689590287/test_tool', '--ya-version', '2', '--collect-cores', '--build-type', 'relwithdebinfo', '--tb', 'short', '--modulo', '10', '--modulo-index', '1', '--partition-mode', 'SEQUENTIAL', '--dep-root', 'ydb/tests/olap/ttl_tiering', '--flags', 'APPLE_SDK_LOCAL=yes', '--flags', 'CFLAGS=-fno-omit-frame-pointer -Wno-unknown-argument', '--flags', 'DEBUGINFO_LINES_ONLY=yes', '--flags', 'DISABLE_FLAKE8_MIGRATIONS=yes', '--flags', 'OPENSOURCE=yes', '--flags', 'TESTS_REQUESTED=yes', '--flags', 'USE_AIO=static', '--flags', 'USE_CLANG_CL=yes', '--flags', 'USE_EAT_MY_DATA=yes', '--flags', 'USE_ICONV=static', '--flags', 'USE_IDN=static', '--flags', 'USE_PREBUILT_TOOLS=no']' stopped by 600 seconds timeout During handling of the above exception, another exception occurred: Traceback (most recent call last): File "devtools/ya/test/programs/test_tool/run_test/run_test.py", line 1738, in main res.wait(check_exit_code=False, timeout=run_timeout, on_timeout=timeout_callback) File "library/python/testing/yatest_common/yatest/common/process.py", line 398, in wait raise ExecutionTimeoutError(self, str(e)) yatest.common.process.ExecutionTimeoutError: (("...ttl_tiering/ydb-tests-olap-ttl_tiering', '--basetemp', '/home/runner/.ya/build/build_root/ciyv/0027c3/tmp', '--capture', 'no', '-c', 'pkg:library.python.pytest:pytest.yatest.ini', '-p', 'no:factor', '--doctest-modules', '--ya-trace', '/home/runner/.ya/build/build_root/ciyv/0027c3/ydb/tests/olap/ttl_tiering/test-results/py3test/testing_out_stuff/chunk1/ytest.report.trace', '--build-root', '/home/runner/.ya/build/build_root/ciyv/0027c3', '--source-root', '/home/runner/.ya/build/build_root/ciyv/0027c3/environment/arcadia', '--output-dir', '/home/runner/.ya/build/build_root/ciyv/0027c3/ydb/tests/olap/ttl_tiering/test-results/py3test/testing_out_stuff/chunk1/testing_out_stuff', '--durations', '0', '--project-path', 'ydb/tests/olap/ttl_tiering', '--test-tool-bin', '/home/runner/.ya/tools/v4/8689590287/test_tool', '--ya-version', '2', '--collect-cores', '--build-type', 'relwithdebinfo', '--tb', 'short', '--modulo', '10', '--modulo-index', '1', '--partition-mode', 'SEQUENTIAL', '--dep-root', 'ydb/tests/olap/ttl_tiering', '--flags', 'APPLE_SDK_LOCAL=yes', '--flags', 'CFLAGS=-fno-omit-frame-pointer -Wno-unknown-argument', '--flags', 'DEBUGINFO_LINES_ONLY=yes', '--flags', 'DISABLE_FLAKE8_MIGRATIONS=yes', '--flags', 'OPENSOURCE=yes', '--flags', 'TESTS_REQUESTED=yes', '--flags', 'USE_AIO=static', '--flags', 'USE_CLANG_CL=yes', '--flags', 'USE_EAT_MY_DATA=yes', '--flags', 'USE_ICONV=static', '--flags', 'USE_IDN=static', '--flags', 'USE_PREBUILT_TOOLS=no']' stopped by 600 seconds timeout",), {}) |83.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/datashard/dump_restore/py3test ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/olap/ttl_tiering/py3test >> ttl_delete_s3.py::TestDeleteS3Ttl::test_data_unchanged_after_ttl_change 2025-05-29 15:43:13,074 WARNING devtools.ya.test.programs.test_tool.run_test.run_test: Wrapper execution timed out 2025-05-29 15:43:13,332 WARNING devtools.ya.test.programs.test_tool.run_test.run_test: Wrapper has overrun 600 secs timeout. Process tree before termination: pid rss ref pdirt 391208 155M 136M 99.9M ydb-tests-olap-ttl_tiering --basetemp /home/runner/.ya/build/build_root/ciyv/0027b3/tmp --capture no -c pkg:library.python.pytest:pytest.yatest.ini -p no:factor --doctest-modu 393752 512M 503M 261M ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/.ya/build/build_root/ciyv/0027b3/ydb/tests/olap/ttl_tiering/test-results/py3test/testing_out_stuff/chunk3 396889 91.8M 91.1M 64.1M └─ moto_server s3 --port 9449 Test command err: library/recipes/common/__init__.py:29: ResourceWarning: unclosed file <_io.TextIOWrapper name='/home/runner/.ya/build/build_root/ciyv/0027b3/ydb/tests/olap/ttl_tiering/test-results/py3test/testing_out_stuff/chunk3/testing_out_stuff/moto_server.out.log' mode='w' encoding='utf-8'> process = subprocess.Popen( ResourceWarning: Enable tracemalloc to get the object allocation traceback library/recipes/common/__init__.py:29: ResourceWarning: unclosed file <_io.TextIOWrapper name='/home/runner/.ya/build/build_root/ciyv/0027b3/ydb/tests/olap/ttl_tiering/test-results/py3test/testing_out_stuff/chunk3/testing_out_stuff/moto_server.err.log' mode='w' encoding='utf-8'> process = subprocess.Popen( ResourceWarning: Enable tracemalloc to get the object allocation traceback File "library/python/pytest/main.py", line 101, in main rc = pytest.main( File "contrib/python/pytest/py3/_pytest/config/__init__.py", line 175, in main ret: Union[ExitCode, int] = config.hook.pytest_cmdline_main( File "contrib/python/pluggy/py3/pluggy/_hooks.py", line 513, in __call__ return self._hookexec(self.name, self._hookimpls.copy(), kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_manager.py", line 120, in _hookexec return self._inner_hookexec(hook_name, methods, kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_callers.py", line 103, in _multicall res = hook_impl.function(*args) File "contrib/python/pytest/py3/_pytest/main.py", line 320, in pytest_cmdline_main return wrap_session(config, _main) File "contrib/python/pytest/py3/_pytest/main.py", line 273, in wrap_session session.exitstatus = doit(config, session) or 0 File "contrib/python/pytest/py3/_pytest/main.py", line 327, in _main config.hook.pytest_runtestloop(session=session) File "contrib/python/pluggy/py3/pluggy/_hooks.py", line 513, in __call__ return self._hookexec(self.name, self._hookimpls.copy(), kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_manager.py", line 120, in _hookexec return self._inner_hookexec(hook_name, methods, kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_callers.py", line 103, in _multicall res = hook_impl.function(*args) File "contrib/python/pytest/py3/_pytest/main.py", line 352, in pytest_runtestloop item.config.hook.pytest_runtest_protocol(item=item, nextitem=nextitem) File "contrib/python/pluggy/py3/pluggy/_hooks.py", line 513, in __call__ return self._hookexec(self.name, self._hookimpls.copy(), kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_manager.py", line 120, in _hookexec return self._inner_hookexec(hook_name, methods, kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_callers.py", line 103, in _multicall res = hook_impl.function(*args) File "contrib/python/pytest/py3/_pytest/runner.py", line 115, in pytest_runtest_protocol runtestprotocol(item, nextitem=nextitem) File "contrib/python/pytest/py3/_pytest/runner.py", line 134, in runtestprotocol reports.append(call_and_report(item, "call", log)) File "contrib/python/pytest/py3/_pytest/runner.py", line 223, in call_and_report call = call_runtest_hook(item, when, **kwds) File "contrib/python/pytest/py3/_pytest/runner.py", line 262, in call_runtest_hook return CallInfo.from_call( File "contrib/python/pytest/py3/_pytest/runner.py", line 342, in from_call result: Optional[TResult] = func() File "contrib/python/pytest/py3/_pytest/runner.py", line 263, in lambda: ihook(item=item, **kwds), when=when, reraise=reraise File "contrib/python/pluggy/py3/pluggy/_hooks.py", line 513, in __call__ return self._hookexec(self.name, self._hookimpls.copy(), kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_manager.py", line 120, in _hookexec return self._inner_hookexec(hook_name, methods, kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_callers.py", line 103, in _multicall res = hook_impl.function(*args) File "contrib/python/pytest/py3/_pytest/runner.py", line 170, in pytest_runtest_call item.runtest() File "contrib/python/pytest/py3/_pytest/python.py", line 1844, in runtest self.ihook.pytest_pyfunc_call(pyfuncitem=self) File "contrib/python/pluggy/py3/pluggy/_hooks.py", line 513, in __call__ return self._hookexec(self.name, self._hookimpls.copy(), kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_manager.py", line 120, in _hookexec return self._inner_hookexec(hook_name, methods, kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_callers.py", line 103, in _multicall res = hook_impl.function(*args) File "library/python/pytest/plugins/ya.py", line 563, in pytest_pyfunc_call pyfuncitem.retval = testfunction(**testargs) File "ydb/tests/olap/ttl_tiering/ttl_delete_s3.py", line 173, in test_data_unchanged_after_ttl_change self.teset_generator(test_name, buckets, ttl, single_upsert_row_count, upsert_number) File "ydb/tests/olap/ttl_tiering/ttl_delete_s3.py", line 135, in teset_generator self.ydb_client.query(f"CREATE OBJECT {access_key_id_secret_name} (TYPE SECRET) WITH value='{self.s3_client.key_id}'") File "ydb/tests/olap/common/ydb_client.py", line 24, in query return self.session_pool.execute_with_retries(statement) File "contrib/python/ydb/py3/ydb/query/pool.py", line 204, in execute_with_retries return retry_operation_sync(wrapped_callee, retry_settings) File "contrib/python/ydb/py3/ydb/retries.py", line 133, in retry_operation_sync for next_opt in opt_generator: File "contrib/python/ydb/py3/ydb/retries.py", line 94, in retry_operation_impl result = YdbRetryOperationFinalResult(callee(*args, **kwargs)) File "contrib/python/ydb/py3/ydb/query/pool.py", line 202, in wrapped_callee return [result_set for result_set in it] File "contrib/python/ydb/py3/ydb/_utilities.py", line 173, in __next__ return self._next() File "contrib/python/ydb/py3/ydb/_utilities.py", line 164, in _next res = self.wrapper(next(self.it)) File "contrib/python/grpcio/py3/grpc/_channel.py", line 475, in __next__ return self._next() File "contrib/python/grpcio/py3/grpc/_channel.py", line 872, in _next _common.wait(self._state.condition.wait, _response_ready) File "contrib/python/grpcio/py3/grpc/_common.py", line 150, in wait _wait_once(wait_fn, MAXIMUM_WAIT_TIMEOUT, spin_cb) File "contrib/python/grpcio/py3/grpc/_common.py", line 112, in _wait_once wait_fn(timeout=timeout) File "contrib/tools/python3/Lib/threading.py", line 359, in wait gotit = waiter.acquire(True, timeout) File "library/python/pytest/plugins/ya.py", line 344, in _graceful_shutdown traceback.print_stack(file=sys.stderr) Traceback (most recent call last): File "library/python/testing/yatest_common/yatest/common/process.py", line 384, in wait wait_for( File "library/python/testing/yatest_common/yatest/common/process.py", line 764, in wait_for raise TimeoutError(truncate(message, MAX_MESSAGE_LEN)) yatest.common.process.TimeoutError: ...ttl_tiering/ydb-tests-olap-ttl_tiering', '--basetemp', '/home/runner/.ya/build/build_root/ciyv/0027b3/tmp', '--capture', 'no', '-c', 'pkg:library.python.pytest:pytest.yatest.ini', '-p', 'no:factor', '--doctest-modules', '--ya-trace', '/home/runner/.ya/build/build_root/ciyv/0027b3/ydb/tests/olap/ttl_tiering/test-results/py3test/testing_out_stuff/chunk3/ytest.report.trace', '--build-root', '/home/runner/.ya/build/build_root/ciyv/0027b3', '--source-root', '/home/runner/.ya/build/build_root/ciyv/0027b3/environment/arcadia', '--output-dir', '/home/runner/.ya/build/build_root/ciyv/0027b3/ydb/tests/olap/ttl_tiering/test-results/py3test/testing_out_stuff/chunk3/testing_out_stuff', '--durations', '0', '--project-path', 'ydb/tests/olap/ttl_tiering', '--test-tool-bin', '/home/runner/.ya/tools/v4/8689590287/test_tool', '--ya-version', '2', '--collect-cores', '--build-type', 'relwithdebinfo', '--tb', 'short', '--modulo', '10', '--modulo-index', '3', '--partition-mode', 'SEQUENTIAL', '--dep-root', 'ydb/tests/olap/ttl_tiering', '--flags', 'APPLE_SDK_LOCAL=yes', '--flags', 'CFLAGS=-fno-omit-frame-pointer -Wno-unknown-argument', '--flags', 'DEBUGINFO_LINES_ONLY=yes', '--flags', 'DISABLE_FLAKE8_MIGRATIONS=yes', '--flags', 'OPENSOURCE=yes', '--flags', 'TESTS_REQUESTED=yes', '--flags', 'USE_AIO=static', '--flags', 'USE_CLANG_CL=yes', '--flags', 'USE_EAT_MY_DATA=yes', '--flags', 'USE_ICONV=static', '--flags', 'USE_IDN=static', '--flags', 'USE_PREBUILT_TOOLS=no']' stopped by 600 seconds timeout During handling of the above exception, another exception occurred: Traceback (most recent call last): File "devtools/ya/test/programs/test_tool/run_test/run_test.py", line 1738, in main res.wait(check_exit_code=False, timeout=run_timeout, on_timeout=timeout_callback) File "library/python/testing/yatest_common/yatest/common/process.py", line 398, in wait raise ExecutionTimeoutError(self, str(e)) yatest.common.process.ExecutionTimeoutError: (("...ttl_tiering/ydb-tests-olap-ttl_tiering', '--basetemp', '/home/runner/.ya/build/build_root/ciyv/0027b3/tmp', '--capture', 'no', '-c', 'pkg:library.python.pytest:pytest.yatest.ini', '-p', 'no:factor', '--doctest-modules', '--ya-trace', '/home/runner/.ya/build/build_root/ciyv/0027b3/ydb/tests/olap/ttl_tiering/test-results/py3test/testing_out_stuff/chunk3/ytest.report.trace', '--build-root', '/home/runner/.ya/build/build_root/ciyv/0027b3', '--source-root', '/home/runner/.ya/build/build_root/ciyv/0027b3/environment/arcadia', '--output-dir', '/home/runner/.ya/build/build_root/ciyv/0027b3/ydb/tests/olap/ttl_tiering/test-results/py3test/testing_out_stuff/chunk3/testing_out_stuff', '--durations', '0', '--project-path', 'ydb/tests/olap/ttl_tiering', '--test-tool-bin', '/home/runner/.ya/tools/v4/8689590287/test_tool', '--ya-version', '2', '--collect-cores', '--build-type', 'relwithdebinfo', '--tb', 'short', '--modulo', '10', '--modulo-index', '3', '--partition-mode', 'SEQUENTIAL', '--dep-root', 'ydb/tests/olap/ttl_tiering', '--flags', 'APPLE_SDK_LOCAL=yes', '--flags', 'CFLAGS=-fno-omit-frame-pointer -Wno-unknown-argument', '--flags', 'DEBUGINFO_LINES_ONLY=yes', '--flags', 'DISABLE_FLAKE8_MIGRATIONS=yes', '--flags', 'OPENSOURCE=yes', '--flags', 'TESTS_REQUESTED=yes', '--flags', 'USE_AIO=static', '--flags', 'USE_CLANG_CL=yes', '--flags', 'USE_EAT_MY_DATA=yes', '--flags', 'USE_ICONV=static', '--flags', 'USE_IDN=static', '--flags', 'USE_PREBUILT_TOOLS=no']' stopped by 600 seconds timeout",), {}) ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/olap/ttl_tiering/py3test >> unstable_connection.py::TestUnstableConnection::test 2025-05-29 15:43:13,244 WARNING devtools.ya.test.programs.test_tool.run_test.run_test: Wrapper execution timed out 2025-05-29 15:43:13,609 WARNING devtools.ya.test.programs.test_tool.run_test.run_test: Wrapper has overrun 600 secs timeout. Process tree before termination: pid rss ref pdirt 391356 155M 132M 99.6M ydb-tests-olap-ttl_tiering --basetemp /home/runner/.ya/build/build_root/ciyv/0027b1/tmp --capture no -c pkg:library.python.pytest:pytest.yatest.ini -p no:factor --doctest-modu 394030 520M 512M 267M ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/.ya/build/build_root/ciyv/0027b1/ydb/tests/olap/ttl_tiering/test-results/py3test/testing_out_stuff/chunk6 397003 91.9M 89.3M 64.1M └─ moto_server s3 --port 8973 Test command err: library/recipes/common/__init__.py:29: ResourceWarning: unclosed file <_io.TextIOWrapper name='/home/runner/.ya/build/build_root/ciyv/0027b1/ydb/tests/olap/ttl_tiering/test-results/py3test/testing_out_stuff/chunk6/testing_out_stuff/moto_server.out.log' mode='w' encoding='utf-8'> process = subprocess.Popen( ResourceWarning: Enable tracemalloc to get the object allocation traceback library/recipes/common/__init__.py:29: ResourceWarning: unclosed file <_io.TextIOWrapper name='/home/runner/.ya/build/build_root/ciyv/0027b1/ydb/tests/olap/ttl_tiering/test-results/py3test/testing_out_stuff/chunk6/testing_out_stuff/moto_server.err.log' mode='w' encoding='utf-8'> process = subprocess.Popen( ResourceWarning: Enable tracemalloc to get the object allocation traceback File "library/python/pytest/main.py", line 101, in main rc = pytest.main( File "contrib/python/pytest/py3/_pytest/config/__init__.py", line 175, in main ret: Union[ExitCode, int] = config.hook.pytest_cmdline_main( File "contrib/python/pluggy/py3/pluggy/_hooks.py", line 513, in __call__ return self._hookexec(self.name, self._hookimpls.copy(), kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_manager.py", line 120, in _hookexec return self._inner_hookexec(hook_name, methods, kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_callers.py", line 103, in _multicall res = hook_impl.function(*args) File "contrib/python/pytest/py3/_pytest/main.py", line 320, in pytest_cmdline_main return wrap_session(config, _main) File "contrib/python/pytest/py3/_pytest/main.py", line 273, in wrap_session session.exitstatus = doit(config, session) or 0 File "contrib/python/pytest/py3/_pytest/main.py", line 327, in _main config.hook.pytest_runtestloop(session=session) File "contrib/python/pluggy/py3/pluggy/_hooks.py", line 513, in __call__ return self._hookexec(self.name, self._hookimpls.copy(), kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_manager.py", line 120, in _hookexec return self._inner_hookexec(hook_name, methods, kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_callers.py", line 103, in _multicall res = hook_impl.function(*args) File "contrib/python/pytest/py3/_pytest/main.py", line 352, in pytest_runtestloop item.config.hook.pytest_runtest_protocol(item=item, nextitem=nextitem) File "contrib/python/pluggy/py3/pluggy/_hooks.py", line 513, in __call__ return self._hookexec(self.name, self._hookimpls.copy(), kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_manager.py", line 120, in _hookexec return self._inner_hookexec(hook_name, methods, kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_callers.py", line 103, in _multicall res = hook_impl.function(*args) File "contrib/python/pytest/py3/_pytest/runner.py", line 115, in pytest_runtest_protocol runtestprotocol(item, nextitem=nextitem) File "contrib/python/pytest/py3/_pytest/runner.py", line 134, in runtestprotocol reports.append(call_and_report(item, "call", log)) File "contrib/python/pytest/py3/_pytest/runner.py", line 223, in call_and_report call = call_runtest_hook(item, when, **kwds) File "contrib/python/pytest/py3/_pytest/runner.py", line 262, in call_runtest_hook return CallInfo.from_call( File "contrib/python/pytest/py3/_pytest/runner.py", line 342, in from_call result: Optional[TResult] = func() File "contrib/python/pytest/py3/_pytest/runner.py", line 263, in lambda: ihook(item=item, **kwds), when=when, reraise=reraise File "contrib/python/pluggy/py3/pluggy/_hooks.py", line 513, in __call__ return self._hookexec(self.name, self._hookimpls.copy(), kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_manager.py", line 120, in _hookexec return self._inner_hookexec(hook_name, methods, kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_callers.py", line 103, in _multicall res = hook_impl.function(*args) File "contrib/python/pytest/py3/_pytest/runner.py", line 170, in pytest_runtest_call item.runtest() File "contrib/python/pytest/py3/_pytest/python.py", line 1844, in runtest self.ihook.pytest_pyfunc_call(pyfuncitem=self) File "contrib/python/pluggy/py3/pluggy/_hooks.py", line 513, in __call__ return self._hookexec(self.name, self._hookimpls.copy(), kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_manager.py", line 120, in _hookexec return self._inner_hookexec(hook_name, methods, kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_callers.py", line 103, in _multicall res = hook_impl.function(*args) File "library/python/pytest/plugins/ya.py", line 563, in pytest_pyfunc_call pyfuncitem.retval = testfunction(**testargs) File "ydb/tests/olap/ttl_tiering/unstable_connection.py", line 116, in test self.ydb_client.query(f"CREATE OBJECT {access_key_id_secret_name} (TYPE SECRET) WITH value='{self.s3_client.key_id}'") File "ydb/tests/olap/common/ydb_client.py", line 24, in query return self.session_pool.execute_with_retries(statement) File "contrib/python/ydb/py3/ydb/query/pool.py", line 204, in execute_with_retries return retry_operation_sync(wrapped_callee, retry_settings) File "contrib/python/ydb/py3/ydb/retries.py", line 133, in retry_operation_sync for next_opt in opt_generator: File "contrib/python/ydb/py3/ydb/retries.py", line 94, in retry_operation_impl result = YdbRetryOperationFinalResult(callee(*args, **kwargs)) File "contrib/python/ydb/py3/ydb/query/pool.py", line 202, in wrapped_callee return [result_set for result_set in it] File "contrib/python/ydb/py3/ydb/_utilities.py", line 173, in __next__ return self._next() File "contrib/python/ydb/py3/ydb/_utilities.py", line 164, in _next res = self.wrapper(next(self.it)) File "contrib/python/grpcio/py3/grpc/_channel.py", line 475, in __next__ return self._next() File "contrib/python/grpcio/py3/grpc/_channel.py", line 872, in _next _common.wait(self._state.condition.wait, _response_ready) File "contrib/python/grpcio/py3/grpc/_common.py", line 150, in wait _wait_once(wait_fn, MAXIMUM_WAIT_TIMEOUT, spin_cb) File "contrib/python/grpcio/py3/grpc/_common.py", line 112, in _wait_once wait_fn(timeout=timeout) File "contrib/tools/python3/Lib/threading.py", line 359, in wait gotit = waiter.acquire(True, timeout) File "library/python/pytest/plugins/ya.py", line 344, in _graceful_shutdown traceback.print_stack(file=sys.stderr) Traceback (most recent call last): File "library/python/testing/yatest_common/yatest/common/process.py", line 384, in wait wait_for( File "library/python/testing/yatest_common/yatest/common/process.py", line 764, in wait_for raise TimeoutError(truncate(message, MAX_MESSAGE_LEN)) yatest.common.process.TimeoutError: ...ttl_tiering/ydb-tests-olap-ttl_tiering', '--basetemp', '/home/runner/.ya/build/build_root/ciyv/0027b1/tmp', '--capture', 'no', '-c', 'pkg:library.python.pytest:pytest.yatest.ini', '-p', 'no:factor', '--doctest-modules', '--ya-trace', '/home/runner/.ya/build/build_root/ciyv/0027b1/ydb/tests/olap/ttl_tiering/test-results/py3test/testing_out_stuff/chunk6/ytest.report.trace', '--build-root', '/home/runner/.ya/build/build_root/ciyv/0027b1', '--source-root', '/home/runner/.ya/build/build_root/ciyv/0027b1/environment/arcadia', '--output-dir', '/home/runner/.ya/build/build_root/ciyv/0027b1/ydb/tests/olap/ttl_tiering/test-results/py3test/testing_out_stuff/chunk6/testing_out_stuff', '--durations', '0', '--project-path', 'ydb/tests/olap/ttl_tiering', '--test-tool-bin', '/home/runner/.ya/tools/v4/8689590287/test_tool', '--ya-version', '2', '--collect-cores', '--build-type', 'relwithdebinfo', '--tb', 'short', '--modulo', '10', '--modulo-index', '6', '--partition-mode', 'SEQUENTIAL', '--dep-root', 'ydb/tests/olap/ttl_tiering', '--flags', 'APPLE_SDK_LOCAL=yes', '--flags', 'CFLAGS=-fno-omit-frame-pointer -Wno-unknown-argument', '--flags', 'DEBUGINFO_LINES_ONLY=yes', '--flags', 'DISABLE_FLAKE8_MIGRATIONS=yes', '--flags', 'OPENSOURCE=yes', '--flags', 'TESTS_REQUESTED=yes', '--flags', 'USE_AIO=static', '--flags', 'USE_CLANG_CL=yes', '--flags', 'USE_EAT_MY_DATA=yes', '--flags', 'USE_ICONV=static', '--flags', 'USE_IDN=static', '--flags', 'USE_PREBUILT_TOOLS=no']' stopped by 600 seconds timeout During handling of the above exception, another exception occurred: Traceback (most recent call last): File "devtools/ya/test/programs/test_tool/run_test/run_test.py", line 1738, in main res.wait(check_exit_code=False, timeout=run_timeout, on_timeout=timeout_callback) File "library/python/testing/yatest_common/yatest/common/process.py", line 398, in wait raise ExecutionTimeoutError(self, str(e)) yatest.common.process.ExecutionTimeoutError: (("...ttl_tiering/ydb-tests-olap-ttl_tiering', '--basetemp', '/home/runner/.ya/build/build_root/ciyv/0027b1/tmp', '--capture', 'no', '-c', 'pkg:library.python.pytest:pytest.yatest.ini', '-p', 'no:factor', '--doctest-modules', '--ya-trace', '/home/runner/.ya/build/build_root/ciyv/0027b1/ydb/tests/olap/ttl_tiering/test-results/py3test/testing_out_stuff/chunk6/ytest.report.trace', '--build-root', '/home/runner/.ya/build/build_root/ciyv/0027b1', '--source-root', '/home/runner/.ya/build/build_root/ciyv/0027b1/environment/arcadia', '--output-dir', '/home/runner/.ya/build/build_root/ciyv/0027b1/ydb/tests/olap/ttl_tiering/test-results/py3test/testing_out_stuff/chunk6/testing_out_stuff', '--durations', '0', '--project-path', 'ydb/tests/olap/ttl_tiering', '--test-tool-bin', '/home/runner/.ya/tools/v4/8689590287/test_tool', '--ya-version', '2', '--collect-cores', '--build-type', 'relwithdebinfo', '--tb', 'short', '--modulo', '10', '--modulo-index', '6', '--partition-mode', 'SEQUENTIAL', '--dep-root', 'ydb/tests/olap/ttl_tiering', '--flags', 'APPLE_SDK_LOCAL=yes', '--flags', 'CFLAGS=-fno-omit-frame-pointer -Wno-unknown-argument', '--flags', 'DEBUGINFO_LINES_ONLY=yes', '--flags', 'DISABLE_FLAKE8_MIGRATIONS=yes', '--flags', 'OPENSOURCE=yes', '--flags', 'TESTS_REQUESTED=yes', '--flags', 'USE_AIO=static', '--flags', 'USE_CLANG_CL=yes', '--flags', 'USE_EAT_MY_DATA=yes', '--flags', 'USE_ICONV=static', '--flags', 'USE_IDN=static', '--flags', 'USE_PREBUILT_TOOLS=no']' stopped by 600 seconds timeout",), {}) >> TYdbControlPlaneStorageModifyBindingPermissions::ShouldApplyPermissionViewPrivatePublic [FAIL] >> TYdbControlPlaneStorageModifyConnection::ShouldCheckPermission >> test_explicit_partitioning_0.py::TestS3::test_projection_enum_type_invalid_validation[v1-true-client7-year Uint32-False] [FAIL] >> test_explicit_partitioning_0.py::TestS3::test_projection_enum_type_invalid_validation[v1-true-client8-year Int64-False] >> TYdbControlPlaneStorageListBindingsPermissions::ShouldApplyPermissionViewPublic [FAIL] >> TYdbControlPlaneStorageListBindingsPermissions::ShouldApplyPermissionViewPrivate >> test_format_setting.py::TestS3::test_date_time_simple_posix_big_file[v2-date_time/simple_iso/big.csv-csv_with_names-ISO] [FAIL] >> test_format_setting.py::TestS3::test_precompute_with_pg_binding[v1-yql_types-yql_syntax-client0] >> test_update_script_tables.py::TestUpdateScriptTablesYdb::test_recreate_tables[ALTER TABLE {} DROP COLUMN syntax-`.metadata/script_executions`] [GOOD] |83.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/datashard/dump_restore/py3test ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/olap/ttl_tiering/py3test >> ttl_unavailable_s3.py::TestUnavailableS3::test 2025-05-29 15:43:12,984 WARNING devtools.ya.test.programs.test_tool.run_test.run_test: Wrapper execution timed out 2025-05-29 15:43:13,409 WARNING devtools.ya.test.programs.test_tool.run_test.run_test: Wrapper has overrun 600 secs timeout. Process tree before termination: pid rss ref pdirt 391169 155M 135M 100.0M ydb-tests-olap-ttl_tiering --basetemp /home/runner/.ya/build/build_root/ciyv/0027b6/tmp --capture no -c pkg:library.python.pytest:pytest.yatest.ini -p no:factor --doctest-mod 393590 518M 509M 265M ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/.ya/build/build_root/ciyv/0027b6/ydb/tests/olap/ttl_tiering/test-results/py3test/testing_out_stuff/chunk5 396551 91.8M 88.7M 64.1M └─ moto_server s3 --port 1572 Test command err: library/recipes/common/__init__.py:29: ResourceWarning: unclosed file <_io.TextIOWrapper name='/home/runner/.ya/build/build_root/ciyv/0027b6/ydb/tests/olap/ttl_tiering/test-results/py3test/testing_out_stuff/chunk5/testing_out_stuff/moto_server.out.log' mode='w' encoding='utf-8'> process = subprocess.Popen( ResourceWarning: Enable tracemalloc to get the object allocation traceback library/recipes/common/__init__.py:29: ResourceWarning: unclosed file <_io.TextIOWrapper name='/home/runner/.ya/build/build_root/ciyv/0027b6/ydb/tests/olap/ttl_tiering/test-results/py3test/testing_out_stuff/chunk5/testing_out_stuff/moto_server.err.log' mode='w' encoding='utf-8'> process = subprocess.Popen( ResourceWarning: Enable tracemalloc to get the object allocation traceback File "library/python/pytest/main.py", line 101, in main rc = pytest.main( File "contrib/python/pytest/py3/_pytest/config/__init__.py", line 175, in main ret: Union[ExitCode, int] = config.hook.pytest_cmdline_main( File "contrib/python/pluggy/py3/pluggy/_hooks.py", line 513, in __call__ return self._hookexec(self.name, self._hookimpls.copy(), kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_manager.py", line 120, in _hookexec return self._inner_hookexec(hook_name, methods, kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_callers.py", line 103, in _multicall res = hook_impl.function(*args) File "contrib/python/pytest/py3/_pytest/main.py", line 320, in pytest_cmdline_main return wrap_session(config, _main) File "contrib/python/pytest/py3/_pytest/main.py", line 273, in wrap_session session.exitstatus = doit(config, session) or 0 File "contrib/python/pytest/py3/_pytest/main.py", line 327, in _main config.hook.pytest_runtestloop(session=session) File "contrib/python/pluggy/py3/pluggy/_hooks.py", line 513, in __call__ return self._hookexec(self.name, self._hookimpls.copy(), kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_manager.py", line 120, in _hookexec return self._inner_hookexec(hook_name, methods, kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_callers.py", line 103, in _multicall res = hook_impl.function(*args) File "contrib/python/pytest/py3/_pytest/main.py", line 352, in pytest_runtestloop item.config.hook.pytest_runtest_protocol(item=item, nextitem=nextitem) File "contrib/python/pluggy/py3/pluggy/_hooks.py", line 513, in __call__ return self._hookexec(self.name, self._hookimpls.copy(), kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_manager.py", line 120, in _hookexec return self._inner_hookexec(hook_name, methods, kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_callers.py", line 103, in _multicall res = hook_impl.function(*args) File "contrib/python/pytest/py3/_pytest/runner.py", line 115, in pytest_runtest_protocol runtestprotocol(item, nextitem=nextitem) File "contrib/python/pytest/py3/_pytest/runner.py", line 134, in runtestprotocol reports.append(call_and_report(item, "call", log)) File "contrib/python/pytest/py3/_pytest/runner.py", line 223, in call_and_report call = call_runtest_hook(item, when, **kwds) File "contrib/python/pytest/py3/_pytest/runner.py", line 262, in call_runtest_hook return CallInfo.from_call( File "contrib/python/pytest/py3/_pytest/runner.py", line 342, in from_call result: Optional[TResult] = func() File "contrib/python/pytest/py3/_pytest/runner.py", line 263, in lambda: ihook(item=item, **kwds), when=when, reraise=reraise File "contrib/python/pluggy/py3/pluggy/_hooks.py", line 513, in __call__ return self._hookexec(self.name, self._hookimpls.copy(), kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_manager.py", line 120, in _hookexec return self._inner_hookexec(hook_name, methods, kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_callers.py", line 103, in _multicall res = hook_impl.function(*args) File "contrib/python/pytest/py3/_pytest/runner.py", line 170, in pytest_runtest_call item.runtest() File "contrib/python/pytest/py3/_pytest/python.py", line 1844, in runtest self.ihook.pytest_pyfunc_call(pyfuncitem=self) File "contrib/python/pluggy/py3/pluggy/_hooks.py", line 513, in __call__ return self._hookexec(self.name, self._hookimpls.copy(), kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_manager.py", line 120, in _hookexec return self._inner_hookexec(hook_name, methods, kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_callers.py", line 103, in _multicall res = hook_impl.function(*args) File "library/python/pytest/plugins/ya.py", line 563, in pytest_pyfunc_call pyfuncitem.retval = testfunction(**testargs) File "ydb/tests/olap/ttl_tiering/ttl_unavailable_s3.py", line 40, in test self.ydb_client.query(f"CREATE OBJECT s3_id (TYPE SECRET) WITH value = '{self.s3_client.key_id}'") File "ydb/tests/olap/common/ydb_client.py", line 24, in query return self.session_pool.execute_with_retries(statement) File "contrib/python/ydb/py3/ydb/query/pool.py", line 204, in execute_with_retries return retry_operation_sync(wrapped_callee, retry_settings) File "contrib/python/ydb/py3/ydb/retries.py", line 133, in retry_operation_sync for next_opt in opt_generator: File "contrib/python/ydb/py3/ydb/retries.py", line 94, in retry_operation_impl result = YdbRetryOperationFinalResult(callee(*args, **kwargs)) File "contrib/python/ydb/py3/ydb/query/pool.py", line 202, in wrapped_callee return [result_set for result_set in it] File "contrib/python/ydb/py3/ydb/_utilities.py", line 173, in __next__ return self._next() File "contrib/python/ydb/py3/ydb/_utilities.py", line 164, in _next res = self.wrapper(next(self.it)) File "contrib/python/grpcio/py3/grpc/_channel.py", line 475, in __next__ return self._next() File "contrib/python/grpcio/py3/grpc/_channel.py", line 872, in _next _common.wait(self._state.condition.wait, _response_ready) File "contrib/python/grpcio/py3/grpc/_common.py", line 150, in wait _wait_once(wait_fn, MAXIMUM_WAIT_TIMEOUT, spin_cb) File "contrib/python/grpcio/py3/grpc/_common.py", line 112, in _wait_once wait_fn(timeout=timeout) File "contrib/tools/python3/Lib/threading.py", line 359, in wait gotit = waiter.acquire(True, timeout) File "library/python/pytest/plugins/ya.py", line 344, in _graceful_shutdown traceback.print_stack(file=sys.stderr) Traceback (most recent call last): File "library/python/testing/yatest_common/yatest/common/process.py", line 384, in wait wait_for( File "library/python/testing/yatest_common/yatest/common/process.py", line 764, in wait_for raise TimeoutError(truncate(message, MAX_MESSAGE_LEN)) yatest.common.process.TimeoutError: ...ttl_tiering/ydb-tests-olap-ttl_tiering', '--basetemp', '/home/runner/.ya/build/build_root/ciyv/0027b6/tmp', '--capture', 'no', '-c', 'pkg:library.python.pytest:pytest.yatest.ini', '-p', 'no:factor', '--doctest-modules', '--ya-trace', '/home/runner/.ya/build/build_root/ciyv/0027b6/ydb/tests/olap/ttl_tiering/test-results/py3test/testing_out_stuff/chunk5/ytest.report.trace', '--build-root', '/home/runner/.ya/build/build_root/ciyv/0027b6', '--source-root', '/home/runner/.ya/build/build_root/ciyv/0027b6/environment/arcadia', '--output-dir', '/home/runner/.ya/build/build_root/ciyv/0027b6/ydb/tests/olap/ttl_tiering/test-results/py3test/testing_out_stuff/chunk5/testing_out_stuff', '--durations', '0', '--project-path', 'ydb/tests/olap/ttl_tiering', '--test-tool-bin', '/home/runner/.ya/tools/v4/8689590287/test_tool', '--ya-version', '2', '--collect-cores', '--build-type', 'relwithdebinfo', '--tb', 'short', '--modulo', '10', '--modulo-index', '5', '--partition-mode', 'SEQUENTIAL', '--dep-root', 'ydb/tests/olap/ttl_tiering', '--flags', 'APPLE_SDK_LOCAL=yes', '--flags', 'CFLAGS=-fno-omit-frame-pointer -Wno-unknown-argument', '--flags', 'DEBUGINFO_LINES_ONLY=yes', '--flags', 'DISABLE_FLAKE8_MIGRATIONS=yes', '--flags', 'OPENSOURCE=yes', '--flags', 'TESTS_REQUESTED=yes', '--flags', 'USE_AIO=static', '--flags', 'USE_CLANG_CL=yes', '--flags', 'USE_EAT_MY_DATA=yes', '--flags', 'USE_ICONV=static', '--flags', 'USE_IDN=static', '--flags', 'USE_PREBUILT_TOOLS=no']' stopped by 600 seconds timeout During handling of the above exception, another exception occurred: Traceback (most recent call last): File "devtools/ya/test/programs/test_tool/run_test/run_test.py", line 1738, in main res.wait(check_exit_code=False, timeout=run_timeout, on_timeout=timeout_callback) File "library/python/testing/yatest_common/yatest/common/process.py", line 398, in wait raise ExecutionTimeoutError(self, str(e)) yatest.common.process.ExecutionTimeoutError: (("...ttl_tiering/ydb-tests-olap-ttl_tiering', '--basetemp', '/home/runner/.ya/build/build_root/ciyv/0027b6/tmp', '--capture', 'no', '-c', 'pkg:library.python.pytest:pytest.yatest.ini', '-p', 'no:factor', '--doctest-modules', '--ya-trace', '/home/runner/.ya/build/build_root/ciyv/0027b6/ydb/tests/olap/ttl_tiering/test-results/py3test/testing_out_stuff/chunk5/ytest.report.trace', '--build-root', '/home/runner/.ya/build/build_root/ciyv/0027b6', '--source-root', '/home/runner/.ya/build/build_root/ciyv/0027b6/environment/arcadia', '--output-dir', '/home/runner/.ya/build/build_root/ciyv/0027b6/ydb/tests/olap/ttl_tiering/test-results/py3test/testing_out_stuff/chunk5/testing_out_stuff', '--durations', '0', '--project-path', 'ydb/tests/olap/ttl_tiering', '--test-tool-bin', '/home/runner/.ya/tools/v4/8689590287/test_tool', '--ya-version', '2', '--collect-cores', '--build-type', 'relwithdebinfo', '--tb', 'short', '--modulo', '10', '--modulo-index', '5', '--partition-mode', 'SEQUENTIAL', '--dep-root', 'ydb/tests/olap/ttl_tiering', '--flags', 'APPLE_SDK_LOCAL=yes', '--flags', 'CFLAGS=-fno-omit-frame-pointer -Wno-unknown-argument', '--flags', 'DEBUGINFO_LINES_ONLY=yes', '--flags', 'DISABLE_FLAKE8_MIGRATIONS=yes', '--flags', 'OPENSOURCE=yes', '--flags', 'TESTS_REQUESTED=yes', '--flags', 'USE_AIO=static', '--flags', 'USE_CLANG_CL=yes', '--flags', 'USE_EAT_MY_DATA=yes', '--flags', 'USE_ICONV=static', '--flags', 'USE_IDN=static', '--flags', 'USE_PREBUILT_TOOLS=no']' stopped by 600 seconds timeout",), {}) |83.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/script_execution/py3test >> test_restarts.py::TestRestartMultipleMirror34::test_tablets_are_successfully_started_after_few_killed_nodes >> test_explicit_partitioning_1.py::TestS3::test_binding_projection_enum_type_validation[v1-client5-column_type5-True] [FAIL] >> test_explicit_partitioning_1.py::TestS3::test_binding_projection_enum_type_validation[v1-client6-column_type6-False] >> TYdbControlPlaneStoragePipeline::ShouldCheckPrioritySelectionEntities [FAIL] >> TYdbControlPlaneStoragePipeline::ShouldCheckResultSetLimit ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/olap/ttl_tiering/py3test >> ttl_delete_s3.py::TestDeleteTtl::test_ttl_delete 2025-05-29 15:43:11,714 WARNING devtools.ya.test.programs.test_tool.run_test.run_test: Wrapper execution timed out 2025-05-29 15:43:13,078 WARNING devtools.ya.test.programs.test_tool.run_test.run_test: Wrapper has overrun 600 secs timeout. Process tree before termination: pid rss ref pdirt 390438 155M 153M 100M ydb-tests-olap-ttl_tiering --basetemp /home/runner/.ya/build/build_root/ciyv/0027c6/tmp --capture no -c pkg:library.python.pytest:pytest.yatest.ini -p no:factor --doctest-modu 392318 567M 524M 314M ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/.ya/build/build_root/ciyv/0027c6/ydb/tests/olap/ttl_tiering/test-results/py3test/testing_out_stuff/chunk4 395365 91.8M 89.9M 64.1M └─ moto_server s3 --port 19063 Test command err: library/recipes/common/__init__.py:29: ResourceWarning: unclosed file <_io.TextIOWrapper name='/home/runner/.ya/build/build_root/ciyv/0027c6/ydb/tests/olap/ttl_tiering/test-results/py3test/testing_out_stuff/chunk4/testing_out_stuff/moto_server.out.log' mode='w' encoding='utf-8'> process = subprocess.Popen( ResourceWarning: Enable tracemalloc to get the object allocation traceback library/recipes/common/__init__.py:29: ResourceWarning: unclosed file <_io.TextIOWrapper name='/home/runner/.ya/build/build_root/ciyv/0027c6/ydb/tests/olap/ttl_tiering/test-results/py3test/testing_out_stuff/chunk4/testing_out_stuff/moto_server.err.log' mode='w' encoding='utf-8'> process = subprocess.Popen( ResourceWarning: Enable tracemalloc to get the object allocation traceback File "library/python/pytest/main.py", line 101, in main rc = pytest.main( File "contrib/python/pytest/py3/_pytest/config/__init__.py", line 175, in main ret: Union[ExitCode, int] = config.hook.pytest_cmdline_main( File "contrib/python/pluggy/py3/pluggy/_hooks.py", line 513, in __call__ return self._hookexec(self.name, self._hookimpls.copy(), kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_manager.py", line 120, in _hookexec return self._inner_hookexec(hook_name, methods, kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_callers.py", line 103, in _multicall res = hook_impl.function(*args) File "contrib/python/pytest/py3/_pytest/main.py", line 320, in pytest_cmdline_main return wrap_session(config, _main) File "contrib/python/pytest/py3/_pytest/main.py", line 273, in wrap_session session.exitstatus = doit(config, session) or 0 File "contrib/python/pytest/py3/_pytest/main.py", line 327, in _main config.hook.pytest_runtestloop(session=session) File "contrib/python/pluggy/py3/pluggy/_hooks.py", line 513, in __call__ return self._hookexec(self.name, self._hookimpls.copy(), kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_manager.py", line 120, in _hookexec return self._inner_hookexec(hook_name, methods, kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_callers.py", line 103, in _multicall res = hook_impl.function(*args) File "contrib/python/pytest/py3/_pytest/main.py", line 352, in pytest_runtestloop item.config.hook.pytest_runtest_protocol(item=item, nextitem=nextitem) File "contrib/python/pluggy/py3/pluggy/_hooks.py", line 513, in __call__ return self._hookexec(self.name, self._hookimpls.copy(), kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_manager.py", line 120, in _hookexec return self._inner_hookexec(hook_name, methods, kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_callers.py", line 103, in _multicall res = hook_impl.function(*args) File "contrib/python/pytest/py3/_pytest/runner.py", line 115, in pytest_runtest_protocol runtestprotocol(item, nextitem=nextitem) File "contrib/python/pytest/py3/_pytest/runner.py", line 134, in runtestprotocol reports.append(call_and_report(item, "call", log)) File "contrib/python/pytest/py3/_pytest/runner.py", line 223, in call_and_report call = call_runtest_hook(item, when, **kwds) File "contrib/python/pytest/py3/_pytest/runner.py", line 262, in call_runtest_hook return CallInfo.from_call( File "contrib/python/pytest/py3/_pytest/runner.py", line 342, in from_call result: Optional[TResult] = func() File "contrib/python/pytest/py3/_pytest/runner.py", line 263, in lambda: ihook(item=item, **kwds), when=when, reraise=reraise File "contrib/python/pluggy/py3/pluggy/_hooks.py", line 513, in __call__ return self._hookexec(self.name, self._hookimpls.copy(), kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_manager.py", line 120, in _hookexec return self._inner_hookexec(hook_name, methods, kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_callers.py", line 103, in _multicall res = hook_impl.function(*args) File "contrib/python/pytest/py3/_pytest/runner.py", line 170, in pytest_runtest_call item.runtest() File "contrib/python/pytest/py3/_pytest/python.py", line 1844, in runtest self.ihook.pytest_pyfunc_call(pyfuncitem=self) File "contrib/python/pluggy/py3/pluggy/_hooks.py", line 513, in __call__ return self._hookexec(self.name, self._hookimpls.copy(), kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_manager.py", line 120, in _hookexec return self._inner_hookexec(hook_name, methods, kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_callers.py", line 103, in _multicall res = hook_impl.function(*args) File "library/python/pytest/plugins/ya.py", line 563, in pytest_pyfunc_call pyfuncitem.retval = testfunction(**testargs) File "ydb/tests/olap/ttl_tiering/ttl_delete_s3.py", line 324, in test_ttl_delete self.ydb_client.query(f"CREATE OBJECT {access_key_id_secret_name} (TYPE SECRET) WITH value='{self.s3_client.key_id}'") File "ydb/tests/olap/common/ydb_client.py", line 24, in query return self.session_pool.execute_with_retries(statement) File "contrib/python/ydb/py3/ydb/query/pool.py", line 204, in execute_with_retries return retry_operation_sync(wrapped_callee, retry_settings) File "contrib/python/ydb/py3/ydb/retries.py", line 133, in retry_operation_sync for next_opt in opt_generator: File "contrib/python/ydb/py3/ydb/retries.py", line 94, in retry_operation_impl result = YdbRetryOperationFinalResult(callee(*args, **kwargs)) File "contrib/python/ydb/py3/ydb/query/pool.py", line 202, in wrapped_callee return [result_set for result_set in it] File "contrib/python/ydb/py3/ydb/_utilities.py", line 173, in __next__ return self._next() File "contrib/python/ydb/py3/ydb/_utilities.py", line 164, in _next res = self.wrapper(next(self.it)) File "contrib/python/grpcio/py3/grpc/_channel.py", line 475, in __next__ return self._next() File "contrib/python/grpcio/py3/grpc/_channel.py", line 872, in _next _common.wait(self._state.condition.wait, _response_ready) File "contrib/python/grpcio/py3/grpc/_common.py", line 150, in wait _wait_once(wait_fn, MAXIMUM_WAIT_TIMEOUT, spin_cb) File "contrib/python/grpcio/py3/grpc/_common.py", line 112, in _wait_once wait_fn(timeout=timeout) File "contrib/tools/python3/Lib/threading.py", line 359, in wait gotit = waiter.acquire(True, timeout) File "library/python/pytest/plugins/ya.py", line 344, in _graceful_shutdown traceback.print_stack(file=sys.stderr) Traceback (most recent call last): File "library/python/testing/yatest_common/yatest/common/process.py", line 384, in wait wait_for( File "library/python/testing/yatest_common/yatest/common/process.py", line 764, in wait_for raise TimeoutError(truncate(message, MAX_MESSAGE_LEN)) yatest.common.process.TimeoutError: ...ttl_tiering/ydb-tests-olap-ttl_tiering', '--basetemp', '/home/runner/.ya/build/build_root/ciyv/0027c6/tmp', '--capture', 'no', '-c', 'pkg:library.python.pytest:pytest.yatest.ini', '-p', 'no:factor', '--doctest-modules', '--ya-trace', '/home/runner/.ya/build/build_root/ciyv/0027c6/ydb/tests/olap/ttl_tiering/test-results/py3test/testing_out_stuff/chunk4/ytest.report.trace', '--build-root', '/home/runner/.ya/build/build_root/ciyv/0027c6', '--source-root', '/home/runner/.ya/build/build_root/ciyv/0027c6/environment/arcadia', '--output-dir', '/home/runner/.ya/build/build_root/ciyv/0027c6/ydb/tests/olap/ttl_tiering/test-results/py3test/testing_out_stuff/chunk4/testing_out_stuff', '--durations', '0', '--project-path', 'ydb/tests/olap/ttl_tiering', '--test-tool-bin', '/home/runner/.ya/tools/v4/8689590287/test_tool', '--ya-version', '2', '--collect-cores', '--build-type', 'relwithdebinfo', '--tb', 'short', '--modulo', '10', '--modulo-index', '4', '--partition-mode', 'SEQUENTIAL', '--dep-root', 'ydb/tests/olap/ttl_tiering', '--flags', 'APPLE_SDK_LOCAL=yes', '--flags', 'CFLAGS=-fno-omit-frame-pointer -Wno-unknown-argument', '--flags', 'DEBUGINFO_LINES_ONLY=yes', '--flags', 'DISABLE_FLAKE8_MIGRATIONS=yes', '--flags', 'OPENSOURCE=yes', '--flags', 'TESTS_REQUESTED=yes', '--flags', 'USE_AIO=static', '--flags', 'USE_CLANG_CL=yes', '--flags', 'USE_EAT_MY_DATA=yes', '--flags', 'USE_ICONV=static', '--flags', 'USE_IDN=static', '--flags', 'USE_PREBUILT_TOOLS=no']' stopped by 600 seconds timeout During handling of the above exception, another exception occurred: Traceback (most recent call last): File "devtools/ya/test/programs/test_tool/run_test/run_test.py", line 1738, in main res.wait(check_exit_code=False, timeout=run_timeout, on_timeout=timeout_callback) File "library/python/testing/yatest_common/yatest/common/process.py", line 398, in wait raise ExecutionTimeoutError(self, str(e)) yatest.common.process.ExecutionTimeoutError: (("...ttl_tiering/ydb-tests-olap-ttl_tiering', '--basetemp', '/home/runner/.ya/build/build_root/ciyv/0027c6/tmp', '--capture', 'no', '-c', 'pkg:library.python.pytest:pytest.yatest.ini', '-p', 'no:factor', '--doctest-modules', '--ya-trace', '/home/runner/.ya/build/build_root/ciyv/0027c6/ydb/tests/olap/ttl_tiering/test-results/py3test/testing_out_stuff/chunk4/ytest.report.trace', '--build-root', '/home/runner/.ya/build/build_root/ciyv/0027c6', '--source-root', '/home/runner/.ya/build/build_root/ciyv/0027c6/environment/arcadia', '--output-dir', '/home/runner/.ya/build/build_root/ciyv/0027c6/ydb/tests/olap/ttl_tiering/test-results/py3test/testing_out_stuff/chunk4/testing_out_stuff', '--durations', '0', '--project-path', 'ydb/tests/olap/ttl_tiering', '--test-tool-bin', '/home/runner/.ya/tools/v4/8689590287/test_tool', '--ya-version', '2', '--collect-cores', '--build-type', 'relwithdebinfo', '--tb', 'short', '--modulo', '10', '--modulo-index', '4', '--partition-mode', 'SEQUENTIAL', '--dep-root', 'ydb/tests/olap/ttl_tiering', '--flags', 'APPLE_SDK_LOCAL=yes', '--flags', 'CFLAGS=-fno-omit-frame-pointer -Wno-unknown-argument', '--flags', 'DEBUGINFO_LINES_ONLY=yes', '--flags', 'DISABLE_FLAKE8_MIGRATIONS=yes', '--flags', 'OPENSOURCE=yes', '--flags', 'TESTS_REQUESTED=yes', '--flags', 'USE_AIO=static', '--flags', 'USE_CLANG_CL=yes', '--flags', 'USE_EAT_MY_DATA=yes', '--flags', 'USE_ICONV=static', '--flags', 'USE_IDN=static', '--flags', 'USE_PREBUILT_TOOLS=no']' stopped by 600 seconds timeout",), {}) >> test_dump_restore.py::TestDumpRestore::test_dump_restore[table_ttl_Uint32-pk_types9-all_types9-index9-Uint32--] [FAIL] >> test_update_script_tables.py::TestUpdateScriptTablesYdb::test_recreate_tables[ALTER TABLE {} DROP COLUMN syntax, DROP COLUMN ast-`.metadata/script_executions`] [GOOD] >> test_explicit_partitioning_0.py::TestS3::test_projection_enum_type_invalid_validation[v1-true-client8-year Int64-False] [FAIL] >> test_explicit_partitioning_0.py::TestS3::test_projection_enum_type_invalid_validation[v1-true-client9-year Uint64-False] >> test_dump_restore.py::TestDumpRestore::test_dump_restore[table_index_0__SYNC-pk_types4-all_types4-index4---SYNC] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/olap/ttl_tiering/py3test >> data_correctness.py::TestDataCorrectness::test 2025-05-29 15:43:11,855 WARNING devtools.ya.test.programs.test_tool.run_test.run_test: Wrapper execution timed out 2025-05-29 15:43:12,551 WARNING devtools.ya.test.programs.test_tool.run_test.run_test: Wrapper has overrun 600 secs timeout. Process tree before termination: pid rss ref pdirt 390593 155M 153M 100M ydb-tests-olap-ttl_tiering --basetemp /home/runner/.ya/build/build_root/ciyv/0027c1/tmp --capture no -c pkg:library.python.pytest:pytest.yatest.ini -p no:factor --doctest-modu 392360 520M 514M 267M ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/.ya/build/build_root/ciyv/0027c1/ydb/tests/olap/ttl_tiering/test-results/py3test/testing_out_stuff/chunk0 395364 91.3M 89.3M 64.1M └─ moto_server s3 --port 7031 Test command err: library/recipes/common/__init__.py:29: ResourceWarning: unclosed file <_io.TextIOWrapper name='/home/runner/.ya/build/build_root/ciyv/0027c1/ydb/tests/olap/ttl_tiering/test-results/py3test/testing_out_stuff/chunk0/testing_out_stuff/moto_server.out.log' mode='w' encoding='utf-8'> process = subprocess.Popen( ResourceWarning: Enable tracemalloc to get the object allocation traceback library/recipes/common/__init__.py:29: ResourceWarning: unclosed file <_io.TextIOWrapper name='/home/runner/.ya/build/build_root/ciyv/0027c1/ydb/tests/olap/ttl_tiering/test-results/py3test/testing_out_stuff/chunk0/testing_out_stuff/moto_server.err.log' mode='w' encoding='utf-8'> process = subprocess.Popen( ResourceWarning: Enable tracemalloc to get the object allocation traceback File "library/python/pytest/main.py", line 101, in main rc = pytest.main( File "contrib/python/pytest/py3/_pytest/config/__init__.py", line 175, in main ret: Union[ExitCode, int] = config.hook.pytest_cmdline_main( File "contrib/python/pluggy/py3/pluggy/_hooks.py", line 513, in __call__ return self._hookexec(self.name, self._hookimpls.copy(), kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_manager.py", line 120, in _hookexec return self._inner_hookexec(hook_name, methods, kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_callers.py", line 103, in _multicall res = hook_impl.function(*args) File "contrib/python/pytest/py3/_pytest/main.py", line 320, in pytest_cmdline_main return wrap_session(config, _main) File "contrib/python/pytest/py3/_pytest/main.py", line 273, in wrap_session session.exitstatus = doit(config, session) or 0 File "contrib/python/pytest/py3/_pytest/main.py", line 327, in _main config.hook.pytest_runtestloop(session=session) File "contrib/python/pluggy/py3/pluggy/_hooks.py", line 513, in __call__ return self._hookexec(self.name, self._hookimpls.copy(), kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_manager.py", line 120, in _hookexec return self._inner_hookexec(hook_name, methods, kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_callers.py", line 103, in _multicall res = hook_impl.function(*args) File "contrib/python/pytest/py3/_pytest/main.py", line 352, in pytest_runtestloop item.config.hook.pytest_runtest_protocol(item=item, nextitem=nextitem) File "contrib/python/pluggy/py3/pluggy/_hooks.py", line 513, in __call__ return self._hookexec(self.name, self._hookimpls.copy(), kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_manager.py", line 120, in _hookexec return self._inner_hookexec(hook_name, methods, kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_callers.py", line 103, in _multicall res = hook_impl.function(*args) File "contrib/python/pytest/py3/_pytest/runner.py", line 115, in pytest_runtest_protocol runtestprotocol(item, nextitem=nextitem) File "contrib/python/pytest/py3/_pytest/runner.py", line 134, in runtestprotocol reports.append(call_and_report(item, "call", log)) File "contrib/python/pytest/py3/_pytest/runner.py", line 223, in call_and_report call = call_runtest_hook(item, when, **kwds) File "contrib/python/pytest/py3/_pytest/runner.py", line 262, in call_runtest_hook return CallInfo.from_call( File "contrib/python/pytest/py3/_pytest/runner.py", line 342, in from_call result: Optional[TResult] = func() File "contrib/python/pytest/py3/_pytest/runner.py", line 263, in lambda: ihook(item=item, **kwds), when=when, reraise=reraise File "contrib/python/pluggy/py3/pluggy/_hooks.py", line 513, in __call__ return self._hookexec(self.name, self._hookimpls.copy(), kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_manager.py", line 120, in _hookexec return self._inner_hookexec(hook_name, methods, kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_callers.py", line 103, in _multicall res = hook_impl.function(*args) File "contrib/python/pytest/py3/_pytest/runner.py", line 170, in pytest_runtest_call item.runtest() File "contrib/python/pytest/py3/_pytest/python.py", line 1844, in runtest self.ihook.pytest_pyfunc_call(pyfuncitem=self) File "contrib/python/pluggy/py3/pluggy/_hooks.py", line 513, in __call__ return self._hookexec(self.name, self._hookimpls.copy(), kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_manager.py", line 120, in _hookexec return self._inner_hookexec(hook_name, methods, kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_callers.py", line 103, in _multicall res = hook_impl.function(*args) File "library/python/pytest/plugins/ya.py", line 563, in pytest_pyfunc_call pyfuncitem.retval = testfunction(**testargs) File "ydb/tests/olap/ttl_tiering/data_correctness.py", line 116, in test self.ydb_client.query( File "ydb/tests/olap/common/ydb_client.py", line 24, in query return self.session_pool.execute_with_retries(statement) File "contrib/python/ydb/py3/ydb/query/pool.py", line 204, in execute_with_retries return retry_operation_sync(wrapped_callee, retry_settings) File "contrib/python/ydb/py3/ydb/retries.py", line 133, in retry_operation_sync for next_opt in opt_generator: File "contrib/python/ydb/py3/ydb/retries.py", line 94, in retry_operation_impl result = YdbRetryOperationFinalResult(callee(*args, **kwargs)) File "contrib/python/ydb/py3/ydb/query/pool.py", line 202, in wrapped_callee return [result_set for result_set in it] File "contrib/python/ydb/py3/ydb/_utilities.py", line 173, in __next__ return self._next() File "contrib/python/ydb/py3/ydb/_utilities.py", line 164, in _next res = self.wrapper(next(self.it)) File "contrib/python/grpcio/py3/grpc/_channel.py", line 475, in __next__ return self._next() File "contrib/python/grpcio/py3/grpc/_channel.py", line 872, in _next _common.wait(self._state.condition.wait, _response_ready) File "contrib/python/grpcio/py3/grpc/_common.py", line 150, in wait _wait_once(wait_fn, MAXIMUM_WAIT_TIMEOUT, spin_cb) File "contrib/python/grpcio/py3/grpc/_common.py", line 112, in _wait_once wait_fn(timeout=timeout) File "contrib/tools/python3/Lib/threading.py", line 359, in wait gotit = waiter.acquire(True, timeout) File "library/python/pytest/plugins/ya.py", line 344, in _graceful_shutdown traceback.print_stack(file=sys.stderr) Traceback (most recent call last): File "library/python/testing/yatest_common/yatest/common/process.py", line 384, in wait wait_for( File "library/python/testing/yatest_common/yatest/common/process.py", line 764, in wait_for raise TimeoutError(truncate(message, MAX_MESSAGE_LEN)) yatest.common.process.TimeoutError: ...ttl_tiering/ydb-tests-olap-ttl_tiering', '--basetemp', '/home/runner/.ya/build/build_root/ciyv/0027c1/tmp', '--capture', 'no', '-c', 'pkg:library.python.pytest:pytest.yatest.ini', '-p', 'no:factor', '--doctest-modules', '--ya-trace', '/home/runner/.ya/build/build_root/ciyv/0027c1/ydb/tests/olap/ttl_tiering/test-results/py3test/testing_out_stuff/chunk0/ytest.report.trace', '--build-root', '/home/runner/.ya/build/build_root/ciyv/0027c1', '--source-root', '/home/runner/.ya/build/build_root/ciyv/0027c1/environment/arcadia', '--output-dir', '/home/runner/.ya/build/build_root/ciyv/0027c1/ydb/tests/olap/ttl_tiering/test-results/py3test/testing_out_stuff/chunk0/testing_out_stuff', '--durations', '0', '--project-path', 'ydb/tests/olap/ttl_tiering', '--test-tool-bin', '/home/runner/.ya/tools/v4/8689590287/test_tool', '--ya-version', '2', '--collect-cores', '--build-type', 'relwithdebinfo', '--tb', 'short', '--modulo', '10', '--modulo-index', '0', '--partition-mode', 'SEQUENTIAL', '--dep-root', 'ydb/tests/olap/ttl_tiering', '--flags', 'APPLE_SDK_LOCAL=yes', '--flags', 'CFLAGS=-fno-omit-frame-pointer -Wno-unknown-argument', '--flags', 'DEBUGINFO_LINES_ONLY=yes', '--flags', 'DISABLE_FLAKE8_MIGRATIONS=yes', '--flags', 'OPENSOURCE=yes', '--flags', 'TESTS_REQUESTED=yes', '--flags', 'USE_AIO=static', '--flags', 'USE_CLANG_CL=yes', '--flags', 'USE_EAT_MY_DATA=yes', '--flags', 'USE_ICONV=static', '--flags', 'USE_IDN=static', '--flags', 'USE_PREBUILT_TOOLS=no']' stopped by 600 seconds timeout During handling of the above exception, another exception occurred: Traceback (most recent call last): File "devtools/ya/test/programs/test_tool/run_test/run_test.py", line 1738, in main res.wait(check_exit_code=False, timeout=run_timeout, on_timeout=timeout_callback) File "library/python/testing/yatest_common/yatest/common/process.py", line 398, in wait raise ExecutionTimeoutError(self, str(e)) yatest.common.process.ExecutionTimeoutError: (("...ttl_tiering/ydb-tests-olap-ttl_tiering', '--basetemp', '/home/runner/.ya/build/build_root/ciyv/0027c1/tmp', '--capture', 'no', '-c', 'pkg:library.python.pytest:pytest.yatest.ini', '-p', 'no:factor', '--doctest-modules', '--ya-trace', '/home/runner/.ya/build/build_root/ciyv/0027c1/ydb/tests/olap/ttl_tiering/test-results/py3test/testing_out_stuff/chunk0/ytest.report.trace', '--build-root', '/home/runner/.ya/build/build_root/ciyv/0027c1', '--source-root', '/home/runner/.ya/build/build_root/ciyv/0027c1/environment/arcadia', '--output-dir', '/home/runner/.ya/build/build_root/ciyv/0027c1/ydb/tests/olap/ttl_tiering/test-results/py3test/testing_out_stuff/chunk0/testing_out_stuff', '--durations', '0', '--project-path', 'ydb/tests/olap/ttl_tiering', '--test-tool-bin', '/home/runner/.ya/tools/v4/8689590287/test_tool', '--ya-version', '2', '--collect-cores', '--build-type', 'relwithdebinfo', '--tb', 'short', '--modulo', '10', '--modulo-index', '0', '--partition-mode', 'SEQUENTIAL', '--dep-root', 'ydb/tests/olap/ttl_tiering', '--flags', 'APPLE_SDK_LOCAL=yes', '--flags', 'CFLAGS=-fno-omit-frame-pointer -Wno-unknown-argument', '--flags', 'DEBUGINFO_LINES_ONLY=yes', '--flags', 'DISABLE_FLAKE8_MIGRATIONS=yes', '--flags', 'OPENSOURCE=yes', '--flags', 'TESTS_REQUESTED=yes', '--flags', 'USE_AIO=static', '--flags', 'USE_CLANG_CL=yes', '--flags', 'USE_EAT_MY_DATA=yes', '--flags', 'USE_ICONV=static', '--flags', 'USE_IDN=static', '--flags', 'USE_PREBUILT_TOOLS=no']' stopped by 600 seconds timeout",), {}) |83.1%| [TA] $(B)/ydb/tests/datashard/split_merge/test-results/py3test/{meta.json ... results_accumulator.log} >> TYdbControlPlaneStorageCreateQuery::ShouldCheckDescribeJob [FAIL] >> TYdbControlPlaneStorageModifyBinding::ShouldCheckNotExistOldName [FAIL] >> TYdbControlPlaneStorageCreateQuery::ShouldCheckDescribeIncorrectJob >> TYdbControlPlaneStorageModifyBinding::ShouldCheckMoveToScope ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/olap/ttl_tiering/py3test >> tier_delete.py::TestTierDelete::test_delete_s3_ttl 2025-05-29 15:43:12,623 WARNING devtools.ya.test.programs.test_tool.run_test.run_test: Wrapper execution timed out 2025-05-29 15:43:13,438 WARNING devtools.ya.test.programs.test_tool.run_test.run_test: Wrapper has overrun 600 secs timeout. Process tree before termination: pid rss ref pdirt 391089 155M 153M 99.2M ydb-tests-olap-ttl_tiering --basetemp /home/runner/.ya/build/build_root/ciyv/0027ba/tmp --capture no -c pkg:library.python.pytest:pytest.yatest.ini -p no:factor --doctest-modu 392591 512M 478M 260M ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/.ya/build/build_root/ciyv/0027ba/ydb/tests/olap/ttl_tiering/test-results/py3test/testing_out_stuff/chunk2 395989 91.6M 86.4M 64.1M └─ moto_server s3 --port 62546 Test command err: library/recipes/common/__init__.py:29: ResourceWarning: unclosed file <_io.TextIOWrapper name='/home/runner/.ya/build/build_root/ciyv/0027ba/ydb/tests/olap/ttl_tiering/test-results/py3test/testing_out_stuff/chunk2/testing_out_stuff/moto_server.out.log' mode='w' encoding='utf-8'> process = subprocess.Popen( ResourceWarning: Enable tracemalloc to get the object allocation traceback library/recipes/common/__init__.py:29: ResourceWarning: unclosed file <_io.TextIOWrapper name='/home/runner/.ya/build/build_root/ciyv/0027ba/ydb/tests/olap/ttl_tiering/test-results/py3test/testing_out_stuff/chunk2/testing_out_stuff/moto_server.err.log' mode='w' encoding='utf-8'> process = subprocess.Popen( ResourceWarning: Enable tracemalloc to get the object allocation traceback File "library/python/pytest/main.py", line 101, in main rc = pytest.main( File "contrib/python/pytest/py3/_pytest/config/__init__.py", line 175, in main ret: Union[ExitCode, int] = config.hook.pytest_cmdline_main( File "contrib/python/pluggy/py3/pluggy/_hooks.py", line 513, in __call__ return self._hookexec(self.name, self._hookimpls.copy(), kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_manager.py", line 120, in _hookexec return self._inner_hookexec(hook_name, methods, kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_callers.py", line 103, in _multicall res = hook_impl.function(*args) File "contrib/python/pytest/py3/_pytest/main.py", line 320, in pytest_cmdline_main return wrap_session(config, _main) File "contrib/python/pytest/py3/_pytest/main.py", line 273, in wrap_session session.exitstatus = doit(config, session) or 0 File "contrib/python/pytest/py3/_pytest/main.py", line 327, in _main config.hook.pytest_runtestloop(session=session) File "contrib/python/pluggy/py3/pluggy/_hooks.py", line 513, in __call__ return self._hookexec(self.name, self._hookimpls.copy(), kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_manager.py", line 120, in _hookexec return self._inner_hookexec(hook_name, methods, kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_callers.py", line 103, in _multicall res = hook_impl.function(*args) File "contrib/python/pytest/py3/_pytest/main.py", line 352, in pytest_runtestloop item.config.hook.pytest_runtest_protocol(item=item, nextitem=nextitem) File "contrib/python/pluggy/py3/pluggy/_hooks.py", line 513, in __call__ return self._hookexec(self.name, self._hookimpls.copy(), kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_manager.py", line 120, in _hookexec return self._inner_hookexec(hook_name, methods, kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_callers.py", line 103, in _multicall res = hook_impl.function(*args) File "contrib/python/pytest/py3/_pytest/runner.py", line 115, in pytest_runtest_protocol runtestprotocol(item, nextitem=nextitem) File "contrib/python/pytest/py3/_pytest/runner.py", line 134, in runtestprotocol reports.append(call_and_report(item, "call", log)) File "contrib/python/pytest/py3/_pytest/runner.py", line 223, in call_and_report call = call_runtest_hook(item, when, **kwds) File "contrib/python/pytest/py3/_pytest/runner.py", line 262, in call_runtest_hook return CallInfo.from_call( File "contrib/python/pytest/py3/_pytest/runner.py", line 342, in from_call result: Optional[TResult] = func() File "contrib/python/pytest/py3/_pytest/runner.py", line 263, in lambda: ihook(item=item, **kwds), when=when, reraise=reraise File "contrib/python/pluggy/py3/pluggy/_hooks.py", line 513, in __call__ return self._hookexec(self.name, self._hookimpls.copy(), kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_manager.py", line 120, in _hookexec return self._inner_hookexec(hook_name, methods, kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_callers.py", line 103, in _multicall res = hook_impl.function(*args) File "contrib/python/pytest/py3/_pytest/runner.py", line 170, in pytest_runtest_call item.runtest() File "contrib/python/pytest/py3/_pytest/python.py", line 1844, in runtest self.ihook.pytest_pyfunc_call(pyfuncitem=self) File "contrib/python/pluggy/py3/pluggy/_hooks.py", line 513, in __call__ return self._hookexec(self.name, self._hookimpls.copy(), kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_manager.py", line 120, in _hookexec return self._inner_hookexec(hook_name, methods, kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_callers.py", line 103, in _multicall res = hook_impl.function(*args) File "library/python/pytest/plugins/ya.py", line 563, in pytest_pyfunc_call pyfuncitem.retval = testfunction(**testargs) File "ydb/tests/olap/ttl_tiering/tier_delete.py", line 69, in test_delete_s3_ttl self.ydb_client.query(s) File "ydb/tests/olap/common/ydb_client.py", line 24, in query return self.session_pool.execute_with_retries(statement) File "contrib/python/ydb/py3/ydb/query/pool.py", line 204, in execute_with_retries return retry_operation_sync(wrapped_callee, retry_settings) File "contrib/python/ydb/py3/ydb/retries.py", line 133, in retry_operation_sync for next_opt in opt_generator: File "contrib/python/ydb/py3/ydb/retries.py", line 94, in retry_operation_impl result = YdbRetryOperationFinalResult(callee(*args, **kwargs)) File "contrib/python/ydb/py3/ydb/query/pool.py", line 202, in wrapped_callee return [result_set for result_set in it] File "contrib/python/ydb/py3/ydb/_utilities.py", line 173, in __next__ return self._next() File "contrib/python/ydb/py3/ydb/_utilities.py", line 164, in _next res = self.wrapper(next(self.it)) File "contrib/python/grpcio/py3/grpc/_channel.py", line 475, in __next__ return self._next() File "contrib/python/grpcio/py3/grpc/_channel.py", line 872, in _next _common.wait(self._state.condition.wait, _response_ready) File "contrib/python/grpcio/py3/grpc/_common.py", line 150, in wait _wait_once(wait_fn, MAXIMUM_WAIT_TIMEOUT, spin_cb) File "contrib/python/grpcio/py3/grpc/_common.py", line 112, in _wait_once wait_fn(timeout=timeout) File "contrib/tools/python3/Lib/threading.py", line 359, in wait gotit = waiter.acquire(True, timeout) File "library/python/pytest/plugins/ya.py", line 344, in _graceful_shutdown traceback.print_stack(file=sys.stderr) Thread 0x00007fe4a57ff640 (most recent call first): File "contrib/tools/python3/Lib/threading.py", line 359 in wait File "contrib/python/grpcio/py3/grpc/_common.py", line 112 in _wait_once File "contrib/python/grpcio/py3/grpc/_common.py", line 150 in wait File "contrib/python/grpcio/py3/grpc/_channel.py", line 872 in _next File "contrib/python/grpcio/py3/grpc/_channel.py", line 475 in __next__ File "contrib/python/ydb/py3/ydb/_utilities.py", line 164 in _next File "contrib/python/ydb/py3/ydb/_utilities.py", line 173 in __next__ File "contrib/python/ydb/py3/ydb/query/session.py", line 267 in _check_session_status_loop File "contrib/tools/python3/Lib/threading.py", line 1012 in run File "contrib/tools/python3/Lib/threading.py", line 1075 in _bootstrap_inner File "contrib/tools/python3/Lib/threading.py", line 1032 in _bootstrap Thread 0x00007fe4a49ff640 (most recent call first): File "contrib/python/grpcio/py3/grpc/_channel.py", line 1392 in channel_spin File "contrib/tools/python3/Lib/threading.py", line 1012 in run File "contrib/tools/python3/Lib/threading.py", line 1075 in _bootstrap_inner File "contrib/tools/python3/Lib/threading.py", line 1032 in _bootstrap Thread 0x00007fe4a65ff640 (most recent call first): File "contrib/tools/python3/Lib/threading.py", line 359 in wait File "contrib/python/ydb/py3/ydb/pool.py", line 274 in run File "contrib/tools/python3/Lib/threading.py", line 1075 in _bootstrap_inner File "contrib/tools/python3/Lib/threading.py", line 1032 in _bootstrap Current thread 0x00007fe4c0876440 (most recent call first): File "contrib/tools/python3/Lib/threading.py", line 359 in wait File "contrib/python/grpcio/py3/grpc/_common.py", line 112 in _wait_once File "contrib/python/grpcio/py3/grpc/_common.py", line 150 in wait File "contrib/python/grpcio/py3/grpc/_channel.py", line 872 in _next File "contrib/python/grpcio/py3/grpc/_channel.py", line 475 in __next__ File "contrib/python/ydb/py3/ydb/_utilities.py", line 164 in _next File "contrib/python/ydb/py3/ydb/_utilities.py", line 173 in __next__ File "contrib/python/ydb/py3/ydb/query/pool.py", line 202 in wrapped_callee File "contrib/python/ydb/py3/ydb/retries.py", line 94 in retry_operation_impl File "contrib/python/ydb/py3/ydb/retries.py", line 133 in retry_operation_sync File "contrib/python/ydb/py3/ydb/query/pool.py", line 204 in execute_with_retries File "ydb/tests/olap/common/ydb_client.py", line 24 in query File "ydb/tests/olap/ttl_tiering/tier_delete.py", line 84 in test_delete_s3_ttl File "library/python/pytest/plugins/ya.py", line 563 in pytest_pyfunc_call File "contrib/python/pluggy/py3/pluggy/_callers.py", line 103 in _multicall File "contrib/python/pluggy/py3/pluggy/_manager.py", line 120 in _hookexec File "contrib/python/pluggy/py3/pluggy/_hooks.py", line 513 in __call__ File "contrib/python/pytest/py3/_pytest/python.py", line 1844 in runtest File "contrib/python/pytest/py3/_pytest/runner.py", line 170 in pytest_runtest_call File "contrib/python/pluggy/py3/pluggy/_callers.py", line 103 in _multicall File "contrib/python/pluggy/py3/pluggy/_manager.py", line 120 in _hookexec File "contrib/python/pluggy/py3/pluggy/_hooks.py", line 513 in __call__ File "contrib/python/pytest/py3/_pytest/runner.py", line 263 in File "contrib/python/pytest/py3/_pytest/runner.py", line 342 in from_call File "contrib/python/pytest/py3/_pytest/runner.py", line 262 in call_runtest_hook File "contrib/python/pytest/py3/_pytest/runner.py", line 223 in call_and_report File "contrib/python/pytest/py3/_pytest/runner.py", line 134 in runtestprotocol File "contrib/python/pytest/py3/_pytest/runner.py", line 115 in pytest_runtest_protocol File "contrib/python/pluggy/py3/pluggy/_callers.py", line 103 in _multicall File "contrib/python/pluggy/py3/pluggy/_manager.py", line 120 in _hookexec File "contrib/python/pluggy/py3/pluggy/_hooks.py", line 513 in __call__ File "contrib/python/pytest/py3/_pytest/main.py", line 352 in pytest_runtestloop File "contrib/python/pluggy/py3/pluggy/_callers.py", line 103 in _multicall File "contrib/python/pluggy/py3/pluggy/_manager.py", line 120 in _hookexec File "contrib/python/pluggy/py3/pluggy/_hooks.py", line 513 in __call__ File "contrib/python/pytest/py3/_pytest/main.py", line 327 in _main File "contrib/python/pytest/py3/_pytest/main.py", line 273 in wrap_session File "contrib/python/pytest/py3/_pytest/main.py", line 320 in pytest_cmdline_main File "contrib/python/pluggy/py3/pluggy/_callers.py", line 103 in _multicall File "contrib/python/pluggy/py3/pluggy/_manager.py", line 120 in _hookexec File "contrib/python/pluggy/py3/pluggy/_hooks.py", line 513 in __call__ File "contrib/python/pytest/py3/_pytest/config/__init__.py", line 175 in main File "library/python/pytest/main.py", line 101 in main Traceback (most recent call last): File "library/python/testing/yatest_common/yatest/common/process.py", line 384, in wait wait_for( File "library/python/testing/yatest_common/yatest/common/process.py", line 764, in wait_for raise TimeoutError(truncate(message, MAX_MESSAGE_LEN)) yatest.common.process.TimeoutError: ...ttl_tiering/ydb-tests-olap-ttl_tiering', '--basetemp', '/home/runner/.ya/build/build_root/ciyv/0027ba/tmp', '--capture', 'no', '-c', 'pkg:library.python.pytest:pytest.yatest.ini', '-p', 'no:factor', '--doctest-modules', '--ya-trace', '/home/runner/.ya/build/build_root/ciyv/0027ba/ydb/tests/olap/ttl_tiering/test-results/py3test/testing_out_stuff/chunk2/ytest.report.trace', '--build-root', '/home/runner/.ya/build/build_root/ciyv/0027ba', '--source-root', '/home/runner/.ya/build/build_root/ciyv/0027ba/environment/arcadia', '--output-dir', '/home/runner/.ya/build/build_root/ciyv/0027ba/ydb/tests/olap/ttl_tiering/test-results/py3test/testing_out_stuff/chunk2/testing_out_stuff', '--durations', '0', '--project-path', 'ydb/tests/olap/ttl_tiering', '--test-tool-bin', '/home/runner/.ya/tools/v4/8689590287/test_tool', '--ya-version', '2', '--collect-cores', '--build-type', 'relwithdebinfo', '--tb', 'short', '--modulo', '10', '--modulo-index', '2', '--partition-mode', 'SEQUENTIAL', '--dep-root', 'ydb/tests/olap/ttl_tiering', '--flags', 'APPLE_SDK_LOCAL=yes', '--flags', 'CFLAGS=-fno-omit-frame-pointer -Wno-unknown-argument', '--flags', 'DEBUGINFO_LINES_ONLY=yes', '--flags', 'DISABLE_FLAKE8_MIGRATIONS=yes', '--flags', 'OPENSOURCE=yes', '--flags', 'TESTS_REQUESTED=yes', '--flags', 'USE_AIO=static', '--flags', 'USE_CLANG_CL=yes', '--flags', 'USE_EAT_MY_DATA=yes', '--flags', 'USE_ICONV=static', '--flags', 'USE_IDN=static', '--flags', 'USE_PREBUILT_TOOLS=no']' stopped by 600 seconds timeout During handling of the above exception, another exception occurred: Traceback (most recent call last): File "devtools/ya/test/programs/test_tool/run_test/run_test.py", line 1738, in main res.wait(check_exit_code=False, timeout=run_timeout, on_timeout=timeout_callback) File "library/python/testing/yatest_common/yatest/common/process.py", line 398, in wait raise ExecutionTimeoutError(self, str(e)) yatest.common.process.ExecutionTimeoutError: (("...ttl_tiering/ydb-tests-olap-ttl_tiering', '--basetemp', '/home/runner/.ya/build/build_root/ciyv/0027ba/tmp', '--capture', 'no', '-c', 'pkg:library.python.pytest:pytest.yatest.ini', '-p', 'no:factor', '--doctest-modules', '--ya-trace', '/home/runner/.ya/build/build_root/ciyv/0027ba/ydb/tests/olap/ttl_tiering/test-results/py3test/testing_out_stuff/chunk2/ytest.report.trace', '--build-root', '/home/runner/.ya/build/build_root/ciyv/0027ba', '--source-root', '/home/runner/.ya/build/build_root/ciyv/0027ba/environment/arcadia', '--output-dir', '/home/runner/.ya/build/build_root/ciyv/0027ba/ydb/tests/olap/ttl_tiering/test-results/py3test/testing_out_stuff/chunk2/testing_out_stuff', '--durations', '0', '--project-path', 'ydb/tests/olap/ttl_tiering', '--test-tool-bin', '/home/runner/.ya/tools/v4/8689590287/test_tool', '--ya-version', '2', '--collect-cores', '--build-type', 'relwithdebinfo', '--tb', 'short', '--modulo', '10', '--modulo-index', '2', '--partition-mode', 'SEQUENTIAL', '--dep-root', 'ydb/tests/olap/ttl_tiering', '--flags', 'APPLE_SDK_LOCAL=yes', '--flags', 'CFLAGS=-fno-omit-frame-pointer -Wno-unknown-argument', '--flags', 'DEBUGINFO_LINES_ONLY=yes', '--flags', 'DISABLE_FLAKE8_MIGRATIONS=yes', '--flags', 'OPENSOURCE=yes', '--flags', 'TESTS_REQUESTED=yes', '--flags', 'USE_AIO=static', '--flags', 'USE_CLANG_CL=yes', '--flags', 'USE_EAT_MY_DATA=yes', '--flags', 'USE_ICONV=static', '--flags', 'USE_IDN=static', '--flags', 'USE_PREBUILT_TOOLS=no']' stopped by 600 seconds timeout",), {}) 2025-05-29 15:43:44,684 WARNING library.python.cores: Core dump dir doesn't exist: /coredumps 2025-05-29 15:43:44,684 WARNING library.python.cores: Core dump dir doesn't exist: /var/tmp/cores 2025-05-29 15:43:45,215 WARNING libarchive: File (tier_delete.py.TestTierDelete.test_delete_s3_ttl/cluster/node_1/logfile_pfcvwosq.log) size has changed. Can't write more data than was declared in the tar header (4304759). (probably file was changed during archiving) >> TRegisterCheckTest::ShouldRegisterCheckSameGenerationAndTransact >> test_explicit_partitioning_1.py::TestS3::test_binding_projection_enum_type_validation[v1-client6-column_type6-False] [FAIL] >> test_explicit_partitioning_1.py::TestS3::test_binding_projection_enum_type_validation[v1-client7-column_type7-False] |83.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/datashard/dump_restore/py3test >> test_explicit_partitioning_0.py::TestS3::test_projection_enum_type_invalid_validation[v1-true-client9-year Uint64-False] [FAIL] >> test_explicit_partitioning_0.py::TestS3::test_projection_enum_type_invalid_validation[v1-true-client10-year String NOT NULL-True] |83.1%| [TA] {RESULT} $(B)/ydb/tests/datashard/split_merge/test-results/py3test/{meta.json ... results_accumulator.log} >> TYdbControlPlaneStorageListConnectionsPermissions::ShouldApplyPermissionViewPrivatePublic [FAIL] >> TYdbControlPlaneStorageListJobsPermissions::ShouldApplyPermissionEmpty >> test_dump_restore.py::TestDumpRestore::test_dump_restore[table_index_1__ASYNC-pk_types5-all_types5-index5---ASYNC] [FAIL] >> TYdbControlPlaneStoragePipeline::ShouldCheckResultSetLimit [FAIL] >> TYdbControlPlaneStoragePipeline::ShouldCheckGetResultDataRequest >> TYdbControlPlaneStorageCreateQuery::ShouldCheckDescribeIncorrectJob [GOOD] >> TYdbControlPlaneStorageCreateQuery::ShouldCheckDescribeJobIncorrectVisibility >> test_explicit_partitioning_1.py::TestS3::test_binding_projection_enum_type_validation[v1-client7-column_type7-False] [FAIL] >> test_explicit_partitioning_1.py::TestS3::test_binding_projection_enum_type_validation[v1-client8-column_type8-False] |83.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/datashard/dump_restore/py3test >> test_dump_restore.py::TestDumpRestore::test_dump_restore[table_ttl_Uint32-pk_types9-all_types9-index9-Uint32--] [FAIL] >> test_format_setting.py::TestS3::test_precompute_with_pg_binding[v1-yql_types-yql_syntax-client0] [FAIL] >> test_format_setting.py::TestS3::test_precompute_with_pg_binding[v1-yql_types-pg_syntax-client0] |83.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/script_execution/py3test >> test_restarts.py::TestRestartSingleMirror3DC::test_restart_single_node_is_ok |83.1%| [TA] $(B)/ydb/tests/olap/ttl_tiering/test-results/py3test/{meta.json ... results_accumulator.log} >> TYdbControlPlaneStorageCreateBindingPermissions::ShouldApplyPermissionManagePublicSuccess [FAIL] >> TYdbControlPlaneStorageCreateBindingPermissions::ShouldApplyPermissionManagePublicFailed >> test_dump_restore.py::TestDumpRestore::test_dump_restore[table_ttl_Datetime-pk_types11-all_types11-index11-Datetime--] |83.1%| [TA] {RESULT} $(B)/ydb/tests/olap/ttl_tiering/test-results/py3test/{meta.json ... results_accumulator.log} >> test_explicit_partitioning_1.py::TestS3::test_binding_projection_enum_type_validation[v1-client8-column_type8-False] [FAIL] >> TRegisterCheckTest::ShouldRegisterCheckSameGenerationAndTransact [FAIL] >> test_explicit_partitioning_1.py::TestS3::test_binding_projection_enum_type_validation[v1-client9-column_type9-False] |83.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/restarts/py3test >> test_dump_restore.py::TestDumpRestore::test_dump_restore[table_ttl_Timestamp-pk_types12-all_types12-index12-Timestamp--] >> test_explicit_partitioning_0.py::TestS3::test_projection_enum_type_invalid_validation[v1-true-client10-year String NOT NULL-True] [FAIL] >> test_explicit_partitioning_0.py::TestS3::test_projection_enum_type_invalid_validation[v1-true-client11-year String-False] >> TYdbControlPlaneStorageListQueriesPermissions::ShouldApplyPermissionViewPublic [FAIL] >> TYdbControlPlaneStorageListQueriesPermissions::ShouldApplyPermissionViewPrivate >> test_restarts.py::TestRestartMultipleBlock42::test_tablets_are_successfully_started_after_few_killed_nodes >> TYdbControlPlaneStorageListBindingsPermissions::ShouldApplyPermissionViewPrivate [FAIL] >> TYdbControlPlaneStorageListBindingsPermissions::ShouldApplyPermissionViewPrivatePublic |83.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/script_execution/py3test >> TYdbControlPlaneStorageDescribeQueryPermissions::ShouldNotApplyPermissionViewAstAndViewQueryText [FAIL] >> TYdbControlPlaneStorageGetQueryStatus::ShouldSuccess >> TYdbControlPlaneStorageControlQuery::ShouldValidate [FAIL] >> TYdbControlPlaneStorageControlQuery::ShouldCheckIdempotencyKey >> test_dump_restore.py::TestDumpRestore::test_dump_restore[table_index_0__SYNC-pk_types4-all_types4-index4---SYNC] [FAIL] |83.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/datashard/dump_restore/py3test >> test_dump_restore.py::TestDumpRestore::test_dump_restore[table_index_1__ASYNC-pk_types5-all_types5-index5---ASYNC] [FAIL] >> test_explicit_partitioning_0.py::TestS3::test_projection_enum_type_invalid_validation[v1-true-client11-year String-False] [FAIL] >> test_update_script_tables.py::TestUpdateScriptTablesYdb::test_recreate_tables[ALTER TABLE {} DROP COLUMN syntax, DROP COLUMN ast, DROP COLUMN stats-`.metadata/script_executions`] [GOOD] >> TYdbControlPlaneStoragePipeline::ShouldCheckGetResultDataRequest [FAIL] >> TYdbControlPlaneStoragePipeline::ShouldRetryQuery >> test_explicit_partitioning_0.py::TestS3::test_projection_enum_type_invalid_validation[v1-true-client12-year Utf8-False] >> TYdbControlPlaneStorageCreateQuery::ShouldCheckDescribeJobIncorrectVisibility [FAIL] >> TYdbControlPlaneStorageCreateQuery::ShouldSaveQuery >> test_restarts.py::TestRestartClusterBlock42::test_when_create_many_tablets_and_restart_cluster_then_every_thing_is_ok >> test_explicit_partitioning_1.py::TestS3::test_binding_projection_enum_type_validation[v1-client9-column_type9-False] [FAIL] >> test_explicit_partitioning_1.py::TestS3::test_binding_projection_enum_type_validation[v1-client10-column_type10-False] |83.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/script_execution/py3test >> test_update_script_tables.py::TestUpdateScriptTablesYdb::test_recreate_tables[ALTER TABLE {} DROP COLUMN syntax-`.metadata/script_executions`] [GOOD] >> test_explicit_partitioning_0.py::TestS3::test_projection_enum_type_invalid_validation[v1-true-client12-year Utf8-False] [FAIL] >> test_explicit_partitioning_0.py::TestS3::test_projection_enum_type_invalid_validation[v1-true-client13-year Date-False] >> test_format_setting.py::TestS3::test_precompute_with_pg_binding[v1-yql_types-pg_syntax-client0] [FAIL] >> test_format_setting.py::TestS3::test_precompute_with_pg_binding[v1-pg_types-yql_syntax-client0] >> TYdbControlPlaneStorageCreateQuery::ShouldSaveQuery [FAIL] >> TYdbControlPlaneStorageCreateQuery::ShouldCheckQueryName |83.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/script_execution/py3test >> test_update_script_tables.py::TestUpdateScriptTablesYdb::test_recreate_tables[ALTER TABLE {} DROP COLUMN syntax, DROP COLUMN ast-`.metadata/script_executions`] [GOOD] >> test_explicit_partitioning_1.py::TestS3::test_binding_projection_enum_type_validation[v1-client10-column_type10-False] [FAIL] >> test_explicit_partitioning_1.py::TestS3::test_binding_projection_enum_type_validation[v1-client11-column_type11-False] |83.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/script_execution/py3test |83.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/datashard/dump_restore/py3test >> test_dump_restore.py::TestDumpRestore::test_dump_restore[table_index_0__SYNC-pk_types4-all_types4-index4---SYNC] [FAIL] >> TRegisterCheckTest::ShouldRegisterCheckNextGeneration >> TYdbControlPlaneStoragePipeline::ShouldRetryQuery [FAIL] >> TYdbControlPlaneStoragePipeline::ShouldCheckNotAutomaticTtl >> test_update_script_tables.py::TestUpdateScriptTablesYdb::test_recreate_tables[DROP TABLE {}-`.metadata/script_executions`] [GOOD] >> test_dump_restore.py::TestDumpRestore::test_dump_restore[table_ttl_Datetime-pk_types11-all_types11-index11-Datetime--] [FAIL] >> test_dump_restore.py::TestDumpRestore::test_dump_restore[table_index_4__SYNC-pk_types0-all_types0-index0---SYNC] >> TRegisterCheckTest::ShouldRegisterCheckNextGeneration [FAIL] >> test_explicit_partitioning_0.py::TestS3::test_projection_enum_type_invalid_validation[v1-true-client13-year Date-False] [FAIL] >> test_explicit_partitioning_0.py::TestS3::test_projection_enum_type_invalid_validation[v2-false-client0-year Int32 NOT NULL-False] >> test_restarts.py::TestRestartMultipleMirror3DC::test_tablets_are_successfully_started_after_few_killed_nodes >> TYdbControlPlaneStorageModifyConnection::ShouldCheckPermission [FAIL] >> TYdbControlPlaneStorageModifyConnection::ShouldCheckExist >> test_explicit_partitioning_1.py::TestS3::test_binding_projection_enum_type_validation[v1-client11-column_type11-False] [FAIL] >> test_explicit_partitioning_1.py::TestS3::test_binding_projection_enum_type_validation[v1-client12-column_type12-False] >> test_dump_restore.py::TestDumpRestore::test_dump_restore[table_ttl_Timestamp-pk_types12-all_types12-index12-Timestamp--] [FAIL] >> test_explicit_partitioning_0.py::TestS3::test_projection_enum_type_invalid_validation[v2-false-client0-year Int32 NOT NULL-False] [FAIL] >> test_explicit_partitioning_0.py::TestS3::test_projection_enum_type_invalid_validation[v2-false-client1-year Uint32 NOT NULL-False] >> TYdbControlPlaneStorageListBindingsPermissions::ShouldApplyPermissionViewPrivatePublic [FAIL] >> TYdbControlPlaneStorageListConnections::ShouldCheckLimit >> TRegisterCheckTest::ShouldNotRegisterCheckPrevGeneration >> test_format_setting.py::TestS3::test_precompute_with_pg_binding[v1-pg_types-yql_syntax-client0] [FAIL] >> test_format_setting.py::TestS3::test_precompute_with_pg_binding[v1-pg_types-pg_syntax-client0] >> TFqYdbTest::ShouldStatusToIssuesProcessExceptions [GOOD] >> TRegisterCheckTest::ShouldNotRegisterCheckPrevGeneration [FAIL] >> TYdbControlPlaneStoragePipeline::ShouldCheckNotAutomaticTtl [FAIL] >> TYdbControlPlaneStoragePipeline::ShouldCheckChangeAutomaticTtl >> test_explicit_partitioning_0.py::TestS3::test_projection_enum_type_invalid_validation[v2-false-client1-year Uint32 NOT NULL-False] [FAIL] >> test_explicit_partitioning_0.py::TestS3::test_projection_enum_type_invalid_validation[v2-false-client2-year Uint64 NOT NULL-False] |83.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/datashard/dump_restore/py3test >> test_dump_restore.py::TestDumpRestore::test_dump_restore[table_ttl_Datetime-pk_types11-all_types11-index11-Datetime--] [FAIL] >> TYdbControlPlaneStorageDeleteBinding::ShouldValidate [FAIL] >> TYdbControlPlaneStorageDeleteBinding::ShouldCheckSuperUser >> TYdbControlPlaneStorageCreateQuery::ShouldCheckQueryName [FAIL] >> TYdbControlPlaneStorageCreateQuery::ShouldCheckAvailableConnections |83.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/script_execution/py3test >> test_update_script_tables.py::TestUpdateScriptTablesYdb::test_recreate_tables[ALTER TABLE {} DROP COLUMN syntax, DROP COLUMN ast, DROP COLUMN stats-`.metadata/script_executions`] [GOOD] >> test_explicit_partitioning_1.py::TestS3::test_binding_projection_enum_type_validation[v1-client12-column_type12-False] [FAIL] >> test_explicit_partitioning_1.py::TestS3::test_binding_projection_enum_type_validation[v1-client13-column_type13-False] >> TYdbControlPlaneStorageListQueriesPermissions::ShouldApplyPermissionViewPrivate [FAIL] >> TYdbControlPlaneStorageListQueriesPermissions::ShouldApplyPermissionViewPrivatePublic >> test_ydb_recursive_remove.py::TestRecursiveRemove::test_various_scheme_objects |83.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/datashard/dump_restore/py3test >> test_dump_restore.py::TestDumpRestore::test_dump_restore[table_ttl_Timestamp-pk_types12-all_types12-index12-Timestamp--] [FAIL] >> test_ydb_scheme.py::TestSchemeDescribe::test_describe_view ------- [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/fq/libs/ydb/ut/unittest >> TRegisterCheckTest::ShouldRegisterCheckSameGenerationAndTransact [FAIL] Test command err: assertion failed at ydb/core/fq/libs/ydb/ut/ydb_ut.cpp:359, virtual void NFq::NTestSuiteTRegisterCheckTest::TTestCaseShouldRegisterCheckSameGenerationAndTransact::Execute_(NUnitTest::TTestContext &): (status.IsSuccess())
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 TBackTrace::Capture()+28 (0xB3C28EC) NUnitTest::NPrivate::RaiseError(char const*, TBasicString> const&, bool)+137 (0xB575C79) NFq::NTestSuiteTRegisterCheckTest::TTestCaseShouldRegisterCheckSameGenerationAndTransact::Execute_(NUnitTest::TTestContext&)+1995 (0xB2B37EB) NFq::NTestSuiteTRegisterCheckTest::TCurrentTest::Execute()::'lambda'()::operator()() const+71 (0xB2B8BE7) NUnitTest::TTestBase::Run(std::__y1::function, TBasicString> const&, char const*, bool)+126 (0xB577B2E) NFq::NTestSuiteTRegisterCheckTest::TCurrentTest::Execute()+425 (0xB2B8449) NUnitTest::TTestFactory::Execute()+803 (0xB5782A3) NUnitTest::RunMain(int, char**)+3021 (0xB5865BD) ??+0 (0x7F242DDE1D90) __libc_start_main+128 (0x7F242DDE1E40) _start+41 (0xA40B029) |83.2%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/fq/libs/ydb/ut/unittest >> TFqYdbTest::ShouldStatusToIssuesProcessExceptions [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/olap/scenario/py3test >> test_alter_tiering.py::TestAlterTiering::test[many_tables] 2025-05-29 15:44:05,123 WARNING devtools.ya.test.programs.test_tool.run_test.run_test: Wrapper execution timed out 2025-05-29 15:44:05,981 WARNING devtools.ya.test.programs.test_tool.run_test.run_test: Wrapper has overrun 600 secs timeout. Process tree before termination: pid rss ref pdirt 450630 175M 173M 112M ydb-tests-olap-scenario --basetemp /home/runner/.ya/build/build_root/ciyv/0026ac/tmp --capture no -c pkg:library.python.pytest:pytest.yatest.ini -p no:factor --doctest-modules 454609 509M 505M 259M └─ ydbd server --suppress-version-check --yaml-config=/home/runner/.ya/build/build_root/ciyv/0026ac/ydb/tests/olap/scenario/test-results/py3test/testing_out_stuff/chunk1/te Test command err: File "library/python/pytest/main.py", line 101, in main rc = pytest.main( File "contrib/python/pytest/py3/_pytest/config/__init__.py", line 175, in main ret: Union[ExitCode, int] = config.hook.pytest_cmdline_main( File "contrib/python/pluggy/py3/pluggy/_hooks.py", line 513, in __call__ return self._hookexec(self.name, self._hookimpls.copy(), kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_manager.py", line 120, in _hookexec return self._inner_hookexec(hook_name, methods, kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_callers.py", line 103, in _multicall res = hook_impl.function(*args) File "contrib/python/pytest/py3/_pytest/main.py", line 320, in pytest_cmdline_main return wrap_session(config, _main) File "contrib/python/pytest/py3/_pytest/main.py", line 273, in wrap_session session.exitstatus = doit(config, session) or 0 File "contrib/python/pytest/py3/_pytest/main.py", line 327, in _main config.hook.pytest_runtestloop(session=session) File "contrib/python/pluggy/py3/pluggy/_hooks.py", line 513, in __call__ return self._hookexec(self.name, self._hookimpls.copy(), kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_manager.py", line 120, in _hookexec return self._inner_hookexec(hook_name, methods, kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_callers.py", line 103, in _multicall res = hook_impl.function(*args) File "contrib/python/pytest/py3/_pytest/main.py", line 352, in pytest_runtestloop item.config.hook.pytest_runtest_protocol(item=item, nextitem=nextitem) File "contrib/python/pluggy/py3/pluggy/_hooks.py", line 513, in __call__ return self._hookexec(self.name, self._hookimpls.copy(), kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_manager.py", line 120, in _hookexec return self._inner_hookexec(hook_name, methods, kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_callers.py", line 103, in _multicall res = hook_impl.function(*args) File "contrib/python/pytest/py3/_pytest/runner.py", line 115, in pytest_runtest_protocol runtestprotocol(item, nextitem=nextitem) File "contrib/python/pytest/py3/_pytest/runner.py", line 134, in runtestprotocol reports.append(call_and_report(item, "call", log)) File "contrib/python/pytest/py3/_pytest/runner.py", line 223, in call_and_report call = call_runtest_hook(item, when, **kwds) File "contrib/python/pytest/py3/_pytest/runner.py", line 262, in call_runtest_hook return CallInfo.from_call( File "contrib/python/pytest/py3/_pytest/runner.py", line 342, in from_call result: Optional[TResult] = func() File "contrib/python/pytest/py3/_pytest/runner.py", line 263, in lambda: ihook(item=item, **kwds), when=when, reraise=reraise File "contrib/python/pluggy/py3/pluggy/_hooks.py", line 513, in __call__ return self._hookexec(self.name, self._hookimpls.copy(), kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_manager.py", line 120, in _hookexec return self._inner_hookexec(hook_name, methods, kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_callers.py", line 103, in _multicall res = hook_impl.function(*args) File "contrib/python/pytest/py3/_pytest/runner.py", line 170, in pytest_runtest_call item.runtest() File "contrib/python/pytest/py3/_pytest/python.py", line 1844, in runtest self.ihook.pytest_pyfunc_call(pyfuncitem=self) File "contrib/python/pluggy/py3/pluggy/_hooks.py", line 513, in __call__ return self._hookexec(self.name, self._hookimpls.copy(), kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_manager.py", line 120, in _hookexec return self._inner_hookexec(hook_name, methods, kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_callers.py", line 103, in _multicall res = hook_impl.function(*args) File "library/python/pytest/plugins/ya.py", line 563, in pytest_pyfunc_call pyfuncitem.retval = testfunction(**testargs) File "/home/runner/.ya/build/build_root/ciyv/0026ac/environment/arcadia/ydb/tests/olap/scenario/conftest.py", line 100, in test self._test_suffix(ctx, get_external_param("table_suffix", ""), exit_codes, 0) File "/home/runner/.ya/build/build_root/ciyv/0026ac/environment/arcadia/ydb/tests/olap/scenario/conftest.py", line 110, in _test_suffix ctx.executable(self, ctx) File "ydb/tests/olap/scenario/test_alter_tiering.py", line 300, in scenario_many_tables self._setup_tiering_test(ctx) File "ydb/tests/olap/scenario/test_alter_tiering.py", line 142, in _setup_tiering_test sth.execute_scheme_query(UpsertSecret(self.access_key_secret, self.s3_access_key)) File "ydb/tests/olap/scenario/helpers/scenario_tests_helper.py", line 430, in execute_scheme_query self._run_with_expected_status( File "ydb/tests/olap/scenario/helpers/scenario_tests_helper.py", line 343, in _run_with_expected_status result = operation() File "ydb/tests/olap/scenario/helpers/scenario_tests_helper.py", line 431, in lambda: YdbCluster.get_ydb_driver().table_client.session().create().execute_scheme(yql), expected_status, retriable_status, retries File "contrib/python/ydb/py3/ydb/table.py", line 1756, in execute_scheme return self._driver( File "contrib/python/ydb/py3/ydb/tracing.py", line 70, in wrapper return f(self, *args, **kwargs) File "contrib/python/ydb/py3/ydb/pool.py", line 464, in __call__ res = connection( File "contrib/python/ydb/py3/ydb/connection.py", line 458, in __call__ response = rpc_state( File "contrib/python/ydb/py3/ydb/connection.py", line 242, in __call__ response, rendezvous = self.rpc.with_call(*args, **kwargs) File "contrib/python/grpcio/py3/grpc/_channel.py", line 1041, in with_call state, call, = self._blocking(request, timeout, metadata, credentials, File "contrib/python/grpcio/py3/grpc/_channel.py", line 1017, in _blocking event = call.next_event() File "library/python/pytest/plugins/ya.py", line 344, in _graceful_shutdown traceback.print_stack(file=sys.stderr) Thread 0x00007f78fcdff640 (most recent call first): File "ydb/tests/library/common/wait_for.py", line 19 in wait_for File "ydb/tests/library/harness/daemon.py", line 198 in stop File "ydb/tests/library/harness/kikimr_runner.py", line 257 in stop File "ydb/tests/library/harness/kikimr_runner.py", line 552 in __stop_node File "ydb/tests/library/harness/kikimr_runner.py", line 566 in stop_node File "contrib/tools/python3/Lib/threading.py", line 1012 in run File "contrib/tools/python3/Lib/threading.py", line 1075 in _bootstrap_inner File "contrib/tools/python3/Lib/threading.py", line 1032 in _bootstrap Thread 0x00007f78fbfff640 (most recent call first): File "contrib/tools/python3/Lib/selectors.py", line 415 in select File "contrib/tools/python3/Lib/socketserver.py", line 235 in serve_forever File "contrib/python/Werkzeug/py3/werkzeug/serving.py", line 766 in serve_forever File "contrib/python/moto/py3/moto/moto_server/threaded_moto_server.py", line 24 in _server_entry File "contrib/tools/python3/Lib/threading.py", line 1012 in run File "contrib/tools/python3/Lib/threading.py", line 1075 in _bootstrap_inner File "contrib/tools/python3/Lib/threading.py", line 1032 in _bootstrap Thread 0x00007f78fdbff640 (most recent call first): File "contrib/tools/python3/Lib/threading.py", line 359 in wait File "contrib/python/ydb/py3/ydb/pool.py", line 274 in run File "contrib/tools/python3/Lib/threading.py", line 1075 in _bootstrap_inner File "contrib/tools/python3/Lib/threading.py", line 1032 in _bootstrap Current thread 0x00007f79191e1440 (most recent call first): File "contrib/tools/python3/Lib/threading.py", line 1169 in _wait_for_tstate_lock File "contrib/tools/python3/Lib/threading.py", line 1149 in join File "ydb/tests/library/harness/kikimr_runner.py", line 577 in stop File "/home/runner/.ya/build/build_root/ciyv/0026ac/environment/arcadia/ydb/tests/olap/scenario/conftest.py", line 61 in stop File "/home/runner/.ya/build/build_root/ciyv/0026ac/environment/arcadia/ydb/tests/olap/scenario/conftest.py", line 79 in teardown_class File "contrib/python/pytest/py3/_pytest/python.py", line 764 in _call_with_optional_argument File "contrib/python/pytest/py3/_pytest/python.py", line 847 in xunit_setup_class_fixture File "contrib/python/pytest/py3/_pytest/fixtures.py", line 926 in _teardown_yield_fixture File "contrib/python/pytest/py3/_pytest/fixtures.py", line 1042 in finish File "contrib/python/pytest/py3/_pytest/runner.py", line 543 in teardown_exact File "contrib/python/pytest/py3/_pytest/runner.py", line 109 in pytest_sessionfinish File "contrib/python/pluggy/py3/pluggy/_callers.py", line 103 in _multicall File "contrib/python/pluggy/py3/pluggy/_manager.py", line 120 in _hookexec File "contrib/python/pluggy/py3/pluggy/_hooks.py", line 513 in __call__ File "contrib/python/pytest/py3/_pytest/main.py", line 308 in wrap_session File "contrib/python/pytest/py3/_pytest/main.py", line 320 in pytest_cmdline_main File "contrib/python/pluggy/py3/pluggy/_callers.py", line 103 in _multicall File "contrib/python/pluggy/py3/pluggy/_manager.py", line 120 in _hookexec File "contrib/python/pluggy/py3/pluggy/_hooks.py", line 513 in __call__ File "contrib/python/pytest/py3/_pytest/config/__init__.py", line 175 in main File "library/python/pytest/main.py", line 101 in main Traceback (most recent call last): File "library/python/testing/yatest_common/yatest/common/process.py", line 384, in wait wait_for( File "library/python/testing/yatest_common/yatest/common/process.py", line 764, in wait_for raise TimeoutError(truncate(message, MAX_MESSAGE_LEN)) yatest.common.process.TimeoutError: ...ac/ydb/tests/olap/scenario/ydb-tests-olap-scenario', '--basetemp', '/home/runner/.ya/build/build_root/ciyv/0026ac/tmp', '--capture', 'no', '-c', 'pkg:library.python.pytest:pytest.yatest.ini', '-p', 'no:factor', '--doctest-modules', '--ya-trace', '/home/runner/.ya/build/build_root/ciyv/0026ac/ydb/tests/olap/scenario/test-results/py3test/testing_out_stuff/chunk1/ytest.report.trace', '--build-root', '/home/runner/.ya/build/build_root/ciyv/0026ac', '--source-root', '/home/runner/.ya/build/build_root/ciyv/0026ac/environment/arcadia', '--output-dir', '/home/runner/.ya/build/build_root/ciyv/0026ac/ydb/tests/olap/scenario/test-results/py3test/testing_out_stuff/chunk1/testing_out_stuff', '--durations', '0', '--project-path', 'ydb/tests/olap/scenario', '--test-tool-bin', '/home/runner/.ya/tools/v4/8689590287/test_tool', '--ya-version', '2', '--collect-cores', '--build-type', 'relwithdebinfo', '--tb', 'short', '--modulo', '10', '--modulo-index', '1', '--partition-mode', 'SEQUENTIAL', '--dep-root', 'ydb/tests/olap/scenario', '--flags', 'APPLE_SDK_LOCAL=yes', '--flags', 'CFLAGS=-fno-omit-frame-pointer -Wno-unknown-argument', '--flags', 'DEBUGINFO_LINES_ONLY=yes', '--flags', 'DISABLE_FLAKE8_MIGRATIONS=yes', '--flags', 'OPENSOURCE=yes', '--flags', 'TESTS_REQUESTED=yes', '--flags', 'USE_AIO=static', '--flags', 'USE_CLANG_CL=yes', '--flags', 'USE_EAT_MY_DATA=yes', '--flags', 'USE_ICONV=static', '--flags', 'USE_IDN=static', '--flags', 'USE_PREBUILT_TOOLS=no']' stopped by 600 seconds timeout During handling of the above exception, another exception occurred: Traceback (most recent call last): File "devtools/ya/test/programs/test_tool/run_test/run_test.py", line 1738, in main res.wait(check_exit_code=False, timeout=run_timeout, on_timeout=timeout_callback) File "library/python/testing/yatest_common/yatest/common/process.py", line 398, in wait raise ExecutionTimeoutError(self, str(e)) yatest.common.process.ExecutionTimeoutError: (("...ac/ydb/tests/olap/scenario/ydb-tests-olap-scenario', '--basetemp', '/home/runner/.ya/build/build_root/ciyv/0026ac/tmp', '--capture', 'no', '-c', 'pkg:library.python.pytest:pytest.yatest.ini', '-p', 'no:factor', '--doctest-modules', '--ya-trace', '/home/runner/.ya/build/build_root/ciyv/0026ac/ydb/tests/olap/scenario/test-results/py3test/testing_out_stuff/chunk1/ytest.report.trace', '--build-root', '/home/runner/.ya/build/build_root/ciyv/0026ac', '--source-root', '/home/runner/.ya/build/build_root/ciyv/0026ac/environment/arcadia', '--output-dir', '/home/runner/.ya/build/build_root/ciyv/0026ac/ydb/tests/olap/scenario/test-results/py3test/testing_out_stuff/chunk1/testing_out_stuff', '--durations', '0', '--project-path', 'ydb/tests/olap/scenario', '--test-tool-bin', '/home/runner/.ya/tools/v4/8689590287/test_tool', '--ya-version', '2', '--collect-cores', '--build-type', 'relwithdebinfo', '--tb', 'short', '--modulo', '10', '--modulo-index', '1', '--partition-mode', 'SEQUENTIAL', '--dep-root', 'ydb/tests/olap/scenario', '--flags', 'APPLE_SDK_LOCAL=yes', '--flags', 'CFLAGS=-fno-omit-frame-pointer -Wno-unknown-argument', '--flags', 'DEBUGINFO_LINES_ONLY=yes', '--flags', 'DISABLE_FLAKE8_MIGRATIONS=yes', '--flags', 'OPENSOURCE=yes', '--flags', 'TESTS_REQUESTED=yes', '--flags', 'USE_AIO=static', '--flags', 'USE_CLANG_CL=yes', '--flags', 'USE_EAT_MY_DATA=yes', '--flags', 'USE_ICONV=static', '--flags', 'USE_IDN=static', '--flags', 'USE_PREBUILT_TOOLS=no']' stopped by 600 seconds timeout",), {}) 2025-05-29 15:44:37,533 WARNING library.python.cores: Core dump dir doesn't exist: /coredumps 2025-05-29 15:44:37,533 WARNING library.python.cores: Core dump dir doesn't exist: /var/tmp/cores >> TYdbControlPlaneStoragePipeline::ShouldCheckChangeAutomaticTtl [FAIL] >> TYdbControlPlaneStoragePipeline::ShouldCheckResultsTTL >> test_format_setting.py::TestS3::test_precompute_with_pg_binding[v1-pg_types-pg_syntax-client0] [FAIL] >> test_format_setting.py::TestS3::test_precompute_with_pg_binding[v2-yql_types-yql_syntax-client0] >> test_dump_restore.py::TestDumpRestore::test_dump_restore[table_index_4__SYNC-pk_types0-all_types0-index0---SYNC] [FAIL] |83.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/script_execution/py3test >> test_update_script_tables.py::TestUpdateScriptTablesYdb::test_recreate_tables[DROP TABLE {}-`.metadata/script_executions`] [GOOD] >> TYdbControlPlaneStorageListBindings::ShouldCheckLimit [FAIL] >> TYdbControlPlaneStorageListBindings::ShouldCheckScopeVisibility >> test_format_setting.py::TestS3::test_precompute_with_pg_binding[v2-yql_types-yql_syntax-client0] [FAIL] >> test_format_setting.py::TestS3::test_precompute_with_pg_binding[v2-yql_types-pg_syntax-client0] >> test_explicit_partitioning_0.py::TestS3::test_projection_enum_type_invalid_validation[v2-false-client2-year Uint64 NOT NULL-False] [FAIL] >> test_explicit_partitioning_1.py::TestS3::test_binding_projection_enum_type_validation[v1-client13-column_type13-False] [FAIL] >> test_explicit_partitioning_1.py::TestS3::test_binding_projection_enum_type_validation[v2-client0-column_type0-False] >> test_explicit_partitioning_0.py::TestS3::test_projection_enum_type_invalid_validation[v2-false-client3-year Date NOT NULL-False] >> TRegisterCheckTest::ShouldNotRegisterCheckPrevGeneration2 |83.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/script_execution/py3test >> TRegisterCheckTest::ShouldNotRegisterCheckPrevGeneration2 [FAIL] >> TYdbControlPlaneStoragePipeline::ShouldCheckAstClear [FAIL] >> TYdbControlPlaneStoragePipeline::ShouldCheckAutomaticTtl >> TYdbControlPlaneStorageCreateQuery::ShouldCheckAvailableConnections [FAIL] |83.2%| [TA] $(B)/ydb/tests/functional/script_execution/test-results/py3test/{meta.json ... results_accumulator.log} |83.2%| [TA] {RESULT} $(B)/ydb/tests/functional/script_execution/test-results/py3test/{meta.json ... results_accumulator.log} >> TYdbControlPlaneStorageModifyConnection::ShouldCheckExist [FAIL] >> TYdbControlPlaneStorageModifyConnection::ShouldCheckNotExistOldName |83.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/datashard/dump_restore/py3test >> test_dump_restore.py::TestDumpRestore::test_dump_restore[table_index_4__SYNC-pk_types0-all_types0-index0---SYNC] [FAIL] >> TYdbControlPlaneStoragePipeline::ShouldCheckResultsTTL [FAIL] >> TYdbControlPlaneStoragePipeline::ShouldCheckDisableCurrentIamGetTask >> TYdbControlPlaneStorageListConnections::ShouldCheckLimit [FAIL] >> TYdbControlPlaneStorageListConnections::ShouldCheckScopeVisibility >> test_ydb_recursive_remove.py::TestRecursiveRemove::test_various_scheme_objects [GOOD] ------- [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/fq/libs/ydb/ut/unittest >> TRegisterCheckTest::ShouldNotRegisterCheckPrevGeneration [FAIL] Test command err: assertion failed at ydb/core/fq/libs/ydb/ut/ydb_ut.cpp:235, virtual void NFq::NTestSuiteTRegisterCheckTest::TTestCaseShouldNotRegisterCheckPrevGeneration::Execute_(NUnitTest::TTestContext &): (status.IsSuccess())
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 TBackTrace::Capture()+28 (0xB3C28EC) NUnitTest::NPrivate::RaiseError(char const*, TBasicString> const&, bool)+137 (0xB575C79) NFq::NTestSuiteTRegisterCheckTest::TTestCaseShouldNotRegisterCheckPrevGeneration::Execute_(NUnitTest::TTestContext&)+769 (0xB2B0DA1) NFq::NTestSuiteTRegisterCheckTest::TCurrentTest::Execute()::'lambda'()::operator()() const+71 (0xB2B8BE7) NUnitTest::TTestBase::Run(std::__y1::function, TBasicString> const&, char const*, bool)+126 (0xB577B2E) NFq::NTestSuiteTRegisterCheckTest::TCurrentTest::Execute()+425 (0xB2B8449) NUnitTest::TTestFactory::Execute()+803 (0xB5782A3) NUnitTest::RunMain(int, char**)+3021 (0xB5865BD) ??+0 (0x7F29B2D98D90) __libc_start_main+128 (0x7F29B2D98E40) _start+41 (0xA40B029) >> TYdbControlPlaneStorageDescribeBinding::ShouldCheckExist [FAIL] >> TYdbControlPlaneStorageDescribeBinding::ShouldValidate >> test_ydb_scheme.py::TestSchemeDescribe::test_describe_view [GOOD] >> test_explicit_partitioning_1.py::TestS3::test_binding_projection_enum_type_validation[v2-client0-column_type0-False] [FAIL] >> test_explicit_partitioning_1.py::TestS3::test_binding_projection_enum_type_validation[v2-client1-column_type1-False] >> test_ydb_table.py::TestExecuteQueryWithParams::test_uint32 >> test_ydb_sql.py::TestExecuteSqlWithParams::test_uint32 >> test_explicit_partitioning_0.py::TestS3::test_projection_enum_type_invalid_validation[v2-false-client3-year Date NOT NULL-False] [FAIL] >> test_explicit_partitioning_0.py::TestS3::test_projection_enum_type_invalid_validation[v2-false-client4-year Utf8 NOT NULL-False] ------- [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/fq/libs/ydb/ut/unittest >> TRegisterCheckTest::ShouldRegisterCheckNextGeneration [FAIL] Test command err: assertion failed at ydb/core/fq/libs/ydb/ut/ydb_ut.cpp:194, virtual void NFq::NTestSuiteTRegisterCheckTest::TTestCaseShouldRegisterCheckNextGeneration::Execute_(NUnitTest::TTestContext &): (status.IsSuccess())
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 TBackTrace::Capture()+28 (0xB3C28EC) NUnitTest::NPrivate::RaiseError(char const*, TBasicString> const&, bool)+137 (0xB575C79) NFq::NTestSuiteTRegisterCheckTest::TTestCaseShouldRegisterCheckNextGeneration::Execute_(NUnitTest::TTestContext&)+763 (0xB2B01BB) NFq::NTestSuiteTRegisterCheckTest::TCurrentTest::Execute()::'lambda'()::operator()() const+71 (0xB2B8BE7) NUnitTest::TTestBase::Run(std::__y1::function, TBasicString> const&, char const*, bool)+126 (0xB577B2E) NFq::NTestSuiteTRegisterCheckTest::TCurrentTest::Execute()+425 (0xB2B8449) NUnitTest::TTestFactory::Execute()+803 (0xB5782A3) NUnitTest::RunMain(int, char**)+3021 (0xB5865BD) ??+0 (0x7F1C1B963D90) __libc_start_main+128 (0x7F1C1B963E40) _start+41 (0xA40B029) >> test_ydb_scheme.py::TestSchemeDescribe::test_describe_view_json >> test_dump_restore.py::TestDumpRestore::test_dump_restore[table_ttl_Date-pk_types13-all_types13-index13-Date--] >> TYdbControlPlaneStoragePipeline::ShouldCheckAutomaticTtl [FAIL] >> TRegisterCheckTest::ShouldRegisterCheckNewGenerationAndTransact >> test_ydb_flame_graph.py::TestExecuteWithFlameGraph::test_fg_with_full_stats >> TYdbControlPlaneStorageDescribeConnection::ShouldCheckSuperUser [FAIL] >> test_explicit_partitioning_1.py::TestS3::test_binding_projection_enum_type_validation[v2-client1-column_type1-False] [FAIL] |83.2%| [TA] $(B)/ydb/tests/olap/scenario/test-results/py3test/{meta.json ... results_accumulator.log} >> TYdbControlPlaneStorageDescribeConnection::ShouldNotShowClickHousePassword >> test_explicit_partitioning_1.py::TestS3::test_binding_projection_enum_type_validation[v2-client2-column_type2-False] >> TRegisterCheckTest::ShouldRegisterCheckNewGenerationAndTransact [FAIL] |83.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/ydb_cli/py3test >> test_ydb_recursive_remove.py::TestRecursiveRemove::test_various_scheme_objects [GOOD] >> test_ydb_impex.py::TestImpex::test_simple[csv-additional_args0-row] >> TYdbControlPlaneStoragePipeline::ShouldCheckDisableCurrentIamGetTask [FAIL] >> TYdbControlPlaneStoragePipeline::ShouldReturnPartialBatchForGetTask >> TRegisterCheckTest::ShouldRegisterCheckSameGeneration |83.3%| [TA] {RESULT} $(B)/ydb/tests/olap/scenario/test-results/py3test/{meta.json ... results_accumulator.log} >> TRegisterCheckTest::ShouldRegisterCheckSameGeneration [FAIL] >> test_explicit_partitioning_1.py::TestS3::test_binding_projection_enum_type_validation[v2-client2-column_type2-False] [FAIL] >> test_explicit_partitioning_1.py::TestS3::test_binding_projection_enum_type_validation[v2-client3-column_type3-False] >> test_ydb_scheme.py::TestSchemeDescribe::test_describe_view_json [GOOD] >> test_ydb_sql.py::TestExecuteSqlWithParams::test_uint32 [FAIL] >> test_ydb_table.py::TestExecuteQueryWithParams::test_uint32 [FAIL] >> test_ydb_table.py::TestExecuteQueryWithParams::test_uint64_and_string >> test_ydb_sql.py::TestExecuteSqlWithParams::test_uint64_and_string >> test_explicit_partitioning_0.py::TestS3::test_projection_enum_type_invalid_validation[v2-false-client4-year Utf8 NOT NULL-False] [FAIL] >> test_explicit_partitioning_0.py::TestS3::test_projection_enum_type_invalid_validation[v2-false-client5-year Int64 NOT NULL-False] >> test_ydb_scripting.py::TestExecuteScriptWithParams::test_uint32 >> TYdbControlPlaneStorageModifyConnection::ShouldCheckNotExistOldName [FAIL] >> TYdbControlPlaneStorageModifyConnection::ShouldCheckLowerCaseName >> TFqYdbTest::ShouldStatusToIssuesProcessEmptyIssues [GOOD] >> test_ydb_scheme.py::TestSchemeDescribe::test_describe_external_table_references_json >> test_rename.py::test_client_gets_retriable_errors_when_rename[substitute_table-create_simple_table-False] [GOOD] >> TYdbControlPlaneStorageRateLimiter::ShouldDeleteRateLimiterResource [FAIL] >> TYdbControlPlaneStorageTest::ShouldCreateTable >> test_ydb_flame_graph.py::TestExecuteWithFlameGraph::test_fg_with_full_stats [FAIL] >> test_ydb_flame_graph.py::TestExecuteWithFlameGraph::test_fg_with_profile_stats >> TRegisterCheckTest::ShouldRegisterCheckNewGeneration >> TYdbControlPlaneStoragePipeline::ShouldReturnPartialBatchForGetTask [FAIL] ------- [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/fq/libs/ydb/ut/unittest >> TRegisterCheckTest::ShouldNotRegisterCheckPrevGeneration2 [FAIL] Test command err: assertion failed at ydb/core/fq/libs/ydb/ut/ydb_ut.cpp:277, virtual void NFq::NTestSuiteTRegisterCheckTest::TTestCaseShouldNotRegisterCheckPrevGeneration2::Execute_(NUnitTest::TTestContext &): (status.IsSuccess())
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 TBackTrace::Capture()+28 (0xB3C28EC) NUnitTest::NPrivate::RaiseError(char const*, TBasicString> const&, bool)+137 (0xB575C79) NFq::NTestSuiteTRegisterCheckTest::TTestCaseShouldNotRegisterCheckPrevGeneration2::Execute_(NUnitTest::TTestContext&)+769 (0xB2B19A1) NFq::NTestSuiteTRegisterCheckTest::TCurrentTest::Execute()::'lambda'()::operator()() const+71 (0xB2B8BE7) NUnitTest::TTestBase::Run(std::__y1::function, TBasicString> const&, char const*, bool)+126 (0xB577B2E) NFq::NTestSuiteTRegisterCheckTest::TCurrentTest::Execute()+425 (0xB2B8449) NUnitTest::TTestFactory::Execute()+803 (0xB5782A3) NUnitTest::RunMain(int, char**)+3021 (0xB5865BD) ??+0 (0x7FAD9AE3AD90) __libc_start_main+128 (0x7FAD9AE3AE40) _start+41 (0xA40B029) >> test_ydb_impex.py::TestImpex::test_simple[csv-additional_args0-row] [GOOD] >> test_ydb_impex.py::TestImpex::test_simple[csv-additional_args0-column] >> test_ydb_sql.py::TestExecuteSqlWithParams::test_uint64_and_string [FAIL] >> test_ydb_sql.py::TestExecuteSqlWithParams::test_list >> test_ydb_flame_graph.py::TestExecuteWithFlameGraph::test_fg_with_profile_stats [FAIL] >> test_ydb_flame_graph.py::TestExecuteWithFlameGraph::test_fg_with_basic_stats >> test_ydb_sql.py::TestExecuteSqlWithParams::test_list [FAIL] >> test_ydb_sql.py::TestExecuteSqlWithParams::test_struct >> TYdbControlPlaneStorageDescribeBinding::ShouldValidate [FAIL] >> TYdbControlPlaneStorageDescribeBinding::ShouldCheckSuperUser >> test_ydb_flame_graph.py::TestExecuteWithFlameGraph::test_fg_with_basic_stats [FAIL] >> test_ydb_flame_graph.py::TestExecuteWithFlameGraph::test_fg_to_dir >> test_ydb_impex.py::TestImpex::test_simple[csv-additional_args0-column] [GOOD] >> test_ydb_impex.py::TestImpex::test_simple[csv-additional_args1-row] >> test_ydb_sql.py::TestExecuteSqlWithParams::test_struct [FAIL] >> test_dump_restore.py::TestDumpRestore::test_dump_restore[table_ttl_Date-pk_types13-all_types13-index13-Date--] [FAIL] >> test_format_setting.py::TestS3::test_precompute_with_pg_binding[v2-yql_types-pg_syntax-client0] [FAIL] >> test_format_setting.py::TestS3::test_precompute_with_pg_binding[v2-pg_types-yql_syntax-client0] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/fq/control_plane_storage/unittest >> TYdbControlPlaneStorageCreateQuery::ShouldCheckAvailableConnections [FAIL] Test command err: Netstat: sh: 1: netstat: not found Process stat: USER PID %CPU %MEM VSZ RSS TTY STAT START TIME COMMAND root 1 0.0 0.0 167292 9760 ? Ss 12:35 0:03 /sbin/init root 2 0.0 0.0 0 0 ? S 12:35 0:00 [kthreadd] root 3 0.0 0.0 0 0 ? I< 12:35 0:00 [rcu_gp] root 4 0.0 0.0 0 0 ? I< 12:35 0:00 [rcu_par_gp] root 5 0.0 0.0 0 0 ? I< 12:35 0:00 [slub_flushwq] root 6 0.0 0.0 0 0 ? I< 12:35 0:00 [netns] root 8 0.0 0.0 0 0 ? I< 12:35 0:00 [kworker/0:0H-events_highpri] root 11 0.0 0.0 0 0 ? I< 12:35 0:00 [mm_percpu_wq] root 12 0.0 0.0 0 0 ? S 12:35 0:00 [rcu_tasks_rude_] root 13 0.0 0.0 0 0 ? S 12:35 0:00 [rcu_tasks_trace] root 14 0.0 0.0 0 0 ? S 12:35 0:00 [ksoftirqd/0] root 15 0.0 0.0 0 0 ? I 12:35 0:08 [rcu_sched] root 16 0.0 0.0 0 0 ? S 12:35 0:00 [migration/0] root 17 0.0 0.0 0 0 ? S 12:35 0:00 [idle_inject/0] root 19 0.0 0.0 0 0 ? S 12:35 0:00 [cpuhp/0] root 20 0.0 0.0 0 0 ? S 12:35 0:00 [cpuhp/1] root 21 0.0 0.0 0 0 ? S 12:35 0:00 [idle_inject/1] root 22 0.0 0.0 0 0 ? S 12:35 0:02 [migration/1] root 23 0.0 0.0 0 0 ? S 12:35 0:00 [ksoftirqd/1] root 25 0.0 0.0 0 0 ? I< 12:35 0:00 [kworker/1:0H-events_highpri] root 26 0.0 0.0 0 0 ? S 12:35 0:00 [cpuhp/2] root 27 0.0 0.0 0 0 ? S 12:35 0:00 [idle_inject/2] root 28 0.0 0.0 0 0 ? S 12:35 0:02 [migration/2] root 29 0.0 0.0 0 0 ? S 12:35 0:00 [ksoftirqd/2] root 31 0.0 0.0 0 0 ? I< 12:35 0:00 [kworker/2:0H-events_highpri] root 32 0.0 0.0 0 0 ? S 12:35 0:00 [cpuhp/3] root 33 0.0 0.0 0 0 ? S 12:35 0:00 [idle_inject/3] root 34 0.0 0.0 0 0 ? S 12:35 0:02 [migration/3] root 35 0.0 0.0 0 0 ? S 12:35 0:00 [ksoftirqd/3] root 37 0.0 0.0 0 0 ? I< 12:35 0:00 [kworker/3:0H-events_highpri] root 38 0.0 0.0 0 0 ? S 12:35 0:00 [cpuhp/4] root 39 0.0 0.0 0 0 ? S 12:35 0:00 [idle_inject/4] root 40 0.0 0.0 0 0 ? S 12:35 0:02 [migration/4] root 41 0.0 0.0 0 0 ? S 12:35 0:00 [ksoftirqd/4] root 43 0.0 0.0 0 0 ? I< 12:35 0:00 [kworker/4:0H-events_highpri] root 44 0.0 0.0 0 0 ? S 12:35 0:00 [cpuhp/5] root 45 0.0 0.0 0 0 ? S 12:35 0:00 [idle_inject/5] root 46 0.0 0.0 0 0 ? S 12:35 0:02 [migration/5] root 47 0.0 0.0 0 0 ? S 12:35 0:00 [ksoftirqd/5] root 49 0.0 0.0 0 0 ? I< 12:35 0:00 [kworker/5:0H-events_highpri] root 50 0.0 0.0 0 0 ? S 12:35 0:00 [cpuhp/6] root 51 0.0 0.0 0 0 ? S 12:35 0:00 [idle_inject/6] root 52 0.0 0.0 0 0 ? S 12:35 0:02 [migration/6] root 53 0.0 0.0 0 0 ? S 12:35 0:00 [ksoftirqd/6] root 55 0.0 0.0 0 0 ? I< 12:35 0:00 [kworker/6:0H-events_highpri] root 56 0.0 0.0 0 0 ? S 12:35 0:00 [cpuhp/7] root 57 0.0 0.0 0 0 ? S 12:35 0:00 [idle_inject/7] root 58 0.0 0.0 0 0 ? S 12:35 0:02 [migration/7] root 59 0.0 0.0 0 0 ? S 12:35 0:00 [ksoftirqd/7] root 61 0.0 0.0 0 0 ? I< 12:35 0:00 [kworker/7:0H-events_highpri] root 62 0.0 0.0 0 0 ? S 12:35 0:00 [cpuhp/8] root 63 0.0 0.0 0 0 ? S 12:35 0:00 [idle_inject/8] root 64 0.0 0.0 0 0 ? S 12:35 0:02 [migration/8] root 65 0.0 0.0 0 0 ? S 12:35 0:00 [ksoftirqd/8] root 67 0.0 0.0 0 0 ? I< 12:35 0:00 [kworker/8:0H-events_highpri] root 68 0.0 0.0 0 0 ? S 12:35 0:00 [cpuhp/9] root 69 0.0 0.0 0 0 ? S 12:35 0:00 [idle_inject/9] root 70 0.0 0.0 0 0 ? S 12:35 0:02 [migration/9] root 71 0.0 0.0 0 0 ? S 12:35 0:00 [ksoftirqd/9] root 73 0.0 0.0 0 0 ? I< 12:35 0:00 [kworker/9:0H-events_highpri] root 74 0.0 0.0 0 0 ? S 12:35 0:00 [cpuhp/10] root 75 0.0 0.0 0 0 ? S 12:35 0:00 [idle_inject/10] root 76 0.0 0.0 0 0 ? S 12:35 0:02 [migration/10] root 77 0.0 0.0 0 0 ? S 12:35 0:00 [ksoftirqd/10] root 79 0.0 0.0 0 0 ? I< 12:35 0:00 [kworker/10:0H-events_highpri] root 80 0.0 0.0 0 0 ? S 12:35 0:00 [cpuhp/11] root 81 0.0 0.0 0 0 ? S 12:35 0:00 [idle_inject/11] root 82 0.0 0.0 0 0 ? S 12:35 0:02 [migration/11] root 83 0.0 0.0 0 0 ? S 12:35 0:00 [ksoftirqd/11] root 85 0.0 0.0 0 0 ? I< 12:35 0:00 [kworker/11:0H-events_highpri] root 86 0.0 0.0 0 0 ? S 12:35 0:00 [cpuhp/12] root 87 0.0 0.0 0 0 ? S 12:35 0:00 [idle_inject/12] root 88 0.0 0.0 0 0 ? S 12:35 0:02 [migration/12] root 89 0.0 0.0 0 0 ? S 12:35 0:00 [ksoftirqd/12] root 90 0.0 0.0 0 0 ? I 12:35 0:00 [kworker/12:0-rcu_par_gp] root 91 0.0 0.0 0 0 ? I< 12:35 0:00 [kworker/12:0H-events_highpri] root 92 0.0 0.0 0 0 ? S 12:35 0:00 [cpuhp/13] root 93 0.0 0.0 0 0 ? S 12:35 0:00 [idle_inject/13] root 94 0.0 0.0 0 0 ? S 12:35 0:02 [migration/13] root 95 0.0 0.0 0 0 ? S 12:35 0:00 [ksoftirqd/13] root 97 0.0 0.0 0 0 ? I< 12:35 0:00 [kworker/13:0H-events_highpri] root 98 0.0 0.0 0 0 ? S 12:35 0:00 [cpuhp/14] root 99 0.0 0.0 0 0 ? S 12:35 0:00 [idle_inject/14] root 100 0.0 0.0 0 0 ? S 12:35 0:02 [migration/14] root 101 0.0 0.0 0 0 ? S 12:35 0:00 [ksoftirqd/14] root 103 0.0 0.0 0 0 ? I< 12:35 0:00 [kworker/14:0H-kblockd] root 104 0.0 0.0 0 0 ? S 12:35 0:00 [cpuhp/15] root 105 0.0 0.0 0 0 ? S 12:35 0:00 [idle_inject/15] root 106 0.0 0.0 0 0 ? S 12:35 0:02 [migration/15] root 107 0.0 0.0 0 0 ? S 12:35 0:00 [ksoftirqd/15] root 109 0.0 0.0 0 0 ? I< 12:35 0:00 [kworker/15:0H-events_highpri] root 110 0.0 0.0 0 0 ? S 12:35 0:00 [cpuhp/16] root 111 0.0 0.0 0 0 ? S 12:35 0:00 [idle_inject/16] root 112 0.0 0.0 0 0 ? S 12:35 0:02 [migration/16] root 113 0.0 0.0 0 0 ? S 12:35 0:00 [ksoftirqd/16] root 115 0.0 0.0 0 0 ? I< 12:35 0:00 [kworker/16:0H-kblockd] root 116 0.0 0.0 0 0 ? S 12:35 0:00 [cpuhp/17] root 117 0.0 0.0 0 0 ? S 12:35 0:00 [idle_inject/17] root 118 0.0 0.0 0 0 ? S 12:35 0:02 [migration/17] root 119 0.0 0.0 0 0 ? S 12:35 0:00 [ksoftirqd/17] root 121 0.0 0.0 0 0 ? I< 12:35 0:00 [kworker/17:0H-events_highpri] root 122 0.0 0.0 0 0 ? S 12:35 0:00 [cpuhp/18] root 123 0.0 0.0 0 0 ? S 12:35 0:00 [idle_inject/18] root 124 0.0 0.0 0 0 ? S 12:35 0:02 [migration/18] root 125 0.0 0.0 0 0 ? S 12:35 0:00 [ksoftirqd/18] root 127 0.0 0.0 0 0 ? I< 12:35 0:00 [kworker/18:0H-events_highpri] root 128 0.0 0.0 0 0 ? S 12:35 0:00 [cpuhp/19] root 129 0.0 0.0 0 0 ? S 12:35 0:00 [idle_inject/19] root 130 0.0 0.0 0 0 ? S 12:35 0:02 [migration/19] root 131 0.0 0.0 0 0 ? S 12:35 0:00 [ksoftirqd/19] root 133 0.0 0.0 0 0 ? I< 12:35 0:00 [kworker/19:0H-events_highpri] root 134 0.0 0.0 0 0 ? S 12:35 0:00 [cpuhp/20] root 135 0.0 0.0 0 0 ? S 12:35 0:00 [idle_inject/20] root 136 0.0 0.0 0 0 ? S 12:35 0:02 [migration/20] root 137 0.0 0.0 0 0 ? S 12:35 0:00 [ksoftirqd/20] root 139 0.0 0.0 0 0 ? I< 12:35 0:00 [kworker/20:0H-events_highpri] root 140 0.0 0.0 0 0 ? S 12:35 0:00 [cpuhp/21] root 141 0.0 0.0 0 0 ? S 12:35 0:00 [idle_inject/21] root 142 0.0 0.0 0 0 ? S 12:35 0:02 [migration/21] root 143 0.0 0.0 0 0 ? S 12:35 0:00 [ksoftirqd/21] root 145 0.0 0.0 0 0 ? I< 12:35 0:00 [kworker/21:0H-events_highpri] root 146 0.0 0.0 0 0 ? S 12:35 0:00 [cpuhp/22] root 147 0.0 0.0 0 0 ? S 12:35 0:00 [idle_inject/22] root 148 0.0 0.0 0 0 ? S 12:35 0:02 [migration/22] root 149 0.0 0.0 0 0 ? S 12:35 0:00 [ksoftirqd/22] root 151 0.0 0.0 0 0 ? I< 12:35 0:00 [kworker/22:0H-events_highpri] root 152 0.0 0.0 0 0 ? S 12:35 0:00 [cpuhp/23] root 153 0.0 0.0 0 0 ? S 12:35 0:00 [idle_inject/23] root 154 0.0 0.0 0 0 ? S 12:35 0:02 [migration/23] root 155 0.0 0.0 0 0 ? S 12:35 0:00 [ksoftirqd/23] root 157 0.0 0.0 0 0 ? I< 12:35 0:00 [kworker/23:0H-events_highpri] root 158 0.0 0.0 0 0 ? S 12:35 0:00 [cpuhp/24] root 159 0.0 0.0 0 0 ? S 12:35 0:00 [idle_inject/24] root 160 0.0 0.0 0 0 ? S 12:35 0:02 [migration/24] root 161 0.0 0.0 0 0 ? S 12:35 0:00 [ksoftirqd/24] root 163 0.0 0.0 0 0 ? I< 12:35 0:00 [kworker/24:0H-events_highpri] root 164 0.0 0.0 0 0 ? S 12:35 0:00 [cpuhp/25] root 165 0.0 0.0 0 0 ? S 12:35 0:00 [idle_inject/25] root 166 0.0 0.0 0 0 ? S 12:35 0:02 [migration/25] root 167 0.0 0.0 0 0 ? S 12:35 0:00 [ksoftirqd/25] root 169 0.0 0.0 0 0 ? I< 12:35 0:00 [kworker/25:0H-events_highpri] root 170 0.0 ... Int64; DECLARE $internal as String; INSERT INTO `connections` (`scope`, `connection_id`, `user`, `visibility`, `name`, `connection_type`, `connection`, `revision`, `internal`) VALUES ($scope, $connection_id, $user, $visibility, $name, $connection_type, $connection, $revision, $internal); 2025-05-29T15:44:40.346329Z node 17 :YQ_CONTROL_PLANE_STORAGE WARN: ydb_control_plane_storage.cpp:470: DB Error, Status: BAD_SESSION, Issues: {
: Error: Session not found: ydb://session/3?node_id=1&id=NTRlNzMzMWMtNDUyMGI0MDgtMjczMjQ5ZTEtMTg3Mjk0MzM= }, Query: --!syntax_v1 -- Query name: CreateConnection PRAGMA TablePathPrefix("local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageCreateQuery::TTestCaseShouldCheckAvailableConnections::Execute_(NUnitTest::TTestContext&)"); DECLARE $scope as String; DECLARE $connection_id as String; DECLARE $user as String; DECLARE $visibility as Int64; DECLARE $name as String; DECLARE $connection_type as Int64; DECLARE $connection as String; DECLARE $revision as Int64; DECLARE $internal as String; INSERT INTO `connections` (`scope`, `connection_id`, `user`, `visibility`, `name`, `connection_type`, `connection`, `revision`, `internal`) VALUES ($scope, $connection_id, $user, $visibility, $name, $connection_type, $connection, $revision, $internal); 2025-05-29T15:44:42.298942Z node 17 :YQ_CONTROL_PLANE_STORAGE WARN: ydb_control_plane_storage.cpp:470: DB Error, Status: BAD_SESSION, Issues: {
: Error: Session not found: ydb://session/3?node_id=1&id=YTFhZjE4MDYtNTg4Y2RmOWMtMjU4YTM5ZGEtNDA5M2E2Nzk= }, Query: --!syntax_v1 -- Query name: CreateConnection PRAGMA TablePathPrefix("local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageCreateQuery::TTestCaseShouldCheckAvailableConnections::Execute_(NUnitTest::TTestContext&)"); DECLARE $scope as String; DECLARE $connection_id as String; DECLARE $user as String; DECLARE $visibility as Int64; DECLARE $name as String; DECLARE $connection_type as Int64; DECLARE $connection as String; DECLARE $revision as Int64; DECLARE $internal as String; INSERT INTO `connections` (`scope`, `connection_id`, `user`, `visibility`, `name`, `connection_type`, `connection`, `revision`, `internal`) VALUES ($scope, $connection_id, $user, $visibility, $name, $connection_type, $connection, $revision, $internal); 2025-05-29T15:44:43.348809Z node 17 :YQ_CONTROL_PLANE_STORAGE WARN: ydb_control_plane_storage.cpp:470: DB Error, Status: BAD_SESSION, Issues: {
: Error: Session not found: ydb://session/3?node_id=1&id=YjY0ZjU5ZjUtNmFhYTVlZDUtNGQxZjYwNjctMWY5NDI4Zjc= }, Query: --!syntax_v1 -- Query name: CreateConnection PRAGMA TablePathPrefix("local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageCreateQuery::TTestCaseShouldCheckAvailableConnections::Execute_(NUnitTest::TTestContext&)"); DECLARE $scope as String; DECLARE $connection_id as String; DECLARE $user as String; DECLARE $visibility as Int64; DECLARE $name as String; DECLARE $connection_type as Int64; DECLARE $connection as String; DECLARE $revision as Int64; DECLARE $internal as String; INSERT INTO `connections` (`scope`, `connection_id`, `user`, `visibility`, `name`, `connection_type`, `connection`, `revision`, `internal`) VALUES ($scope, $connection_id, $user, $visibility, $name, $connection_type, $connection, $revision, $internal); 2025-05-29T15:44:44.374761Z node 17 :YQ_CONTROL_PLANE_STORAGE WARN: ydb_control_plane_storage.cpp:470: DB Error, Status: BAD_SESSION, Issues: {
: Error: Session not found: ydb://session/3?node_id=1&id=NDVlMjA5NmMtNWI4OWNmMzAtZGJkM2ZkZWItZjRhY2E3ZQ== }, Query: --!syntax_v1 -- Query name: CreateConnection PRAGMA TablePathPrefix("local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageCreateQuery::TTestCaseShouldCheckAvailableConnections::Execute_(NUnitTest::TTestContext&)"); DECLARE $scope as String; DECLARE $connection_id as String; DECLARE $user as String; DECLARE $visibility as Int64; DECLARE $name as String; DECLARE $connection_type as Int64; DECLARE $connection as String; DECLARE $revision as Int64; DECLARE $internal as String; INSERT INTO `connections` (`scope`, `connection_id`, `user`, `visibility`, `name`, `connection_type`, `connection`, `revision`, `internal`) VALUES ($scope, $connection_id, $user, $visibility, $name, $connection_type, $connection, $revision, $internal); 2025-05-29T15:44:45.039412Z node 17 :YQ_CONTROL_PLANE_STORAGE WARN: ydb_control_plane_storage.cpp:470: DB Error, Status: BAD_SESSION, Issues: {
: Error: Session not found: ydb://session/3?node_id=1&id=NzEzYjVkYWQtNzE0OWQ5YmEtZDNmZThhYTAtMTIxNjFjNTg= }, Query: --!syntax_v1 -- Query name: CreateConnection PRAGMA TablePathPrefix("local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageCreateQuery::TTestCaseShouldCheckAvailableConnections::Execute_(NUnitTest::TTestContext&)"); DECLARE $scope as String; DECLARE $connection_id as String; DECLARE $user as String; DECLARE $visibility as Int64; DECLARE $name as String; DECLARE $connection_type as Int64; DECLARE $connection as String; DECLARE $revision as Int64; DECLARE $internal as String; INSERT INTO `connections` (`scope`, `connection_id`, `user`, `visibility`, `name`, `connection_type`, `connection`, `revision`, `internal`) VALUES ($scope, $connection_id, $user, $visibility, $name, $connection_type, $connection, $revision, $internal); 2025-05-29T15:44:45.832592Z node 17 :YQ_CONTROL_PLANE_STORAGE WARN: ydb_control_plane_storage.cpp:470: DB Error, Status: BAD_SESSION, Issues: {
: Error: Session not found: ydb://session/3?node_id=1&id=OWFkMDJmMjUtN2EwZjJmYjYtYzYxYWU3OTAtY2NlYTZhNDE= }, Query: --!syntax_v1 -- Query name: CreateConnection PRAGMA TablePathPrefix("local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageCreateQuery::TTestCaseShouldCheckAvailableConnections::Execute_(NUnitTest::TTestContext&)"); DECLARE $scope as String; DECLARE $connection_id as String; DECLARE $user as String; DECLARE $visibility as Int64; DECLARE $name as String; DECLARE $connection_type as Int64; DECLARE $connection as String; DECLARE $revision as Int64; DECLARE $internal as String; INSERT INTO `connections` (`scope`, `connection_id`, `user`, `visibility`, `name`, `connection_type`, `connection`, `revision`, `internal`) VALUES ($scope, $connection_id, $user, $visibility, $name, $connection_type, $connection, $revision, $internal); 2025-05-29T15:44:46.270251Z node 17 :YQ_CONTROL_PLANE_STORAGE WARN: ydb_control_plane_storage.cpp:470: DB Error, Status: BAD_SESSION, Issues: {
: Error: Session not found: ydb://session/3?node_id=1&id=ODcxYmJiMjMtYTBiMTUyOGYtODc5ZjFiYmItODBiMmY0ZTI= }, Query: --!syntax_v1 -- Query name: CreateConnection PRAGMA TablePathPrefix("local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageCreateQuery::TTestCaseShouldCheckAvailableConnections::Execute_(NUnitTest::TTestContext&)"); DECLARE $scope as String; DECLARE $connection_id as String; DECLARE $user as String; DECLARE $visibility as Int64; DECLARE $name as String; DECLARE $connection_type as Int64; DECLARE $connection as String; DECLARE $revision as Int64; DECLARE $internal as String; INSERT INTO `connections` (`scope`, `connection_id`, `user`, `visibility`, `name`, `connection_type`, `connection`, `revision`, `internal`) VALUES ($scope, $connection_id, $user, $visibility, $name, $connection_type, $connection, $revision, $internal); 2025-05-29T15:44:46.698698Z node 17 :YQ_CONTROL_PLANE_STORAGE WARN: ydb_control_plane_storage.cpp:470: DB Error, Status: BAD_SESSION, Issues: {
: Error: Session not found: ydb://session/3?node_id=1&id=M2FlN2E1NjYtMmZhOGU0NjAtNWQzNTFjYmYtOGM4ZjA1ZGM= }, Query: --!syntax_v1 -- Query name: CreateConnection PRAGMA TablePathPrefix("local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageCreateQuery::TTestCaseShouldCheckAvailableConnections::Execute_(NUnitTest::TTestContext&)"); DECLARE $scope as String; DECLARE $connection_id as String; DECLARE $user as String; DECLARE $visibility as Int64; DECLARE $name as String; DECLARE $connection_type as Int64; DECLARE $connection as String; DECLARE $revision as Int64; DECLARE $internal as String; INSERT INTO `connections` (`scope`, `connection_id`, `user`, `visibility`, `name`, `connection_type`, `connection`, `revision`, `internal`) VALUES ($scope, $connection_id, $user, $visibility, $name, $connection_type, $connection, $revision, $internal); 2025-05-29T15:44:47.702653Z node 17 :YQ_CONTROL_PLANE_STORAGE WARN: ydb_control_plane_storage.cpp:470: DB Error, Status: BAD_SESSION, Issues: {
: Error: Session not found: ydb://session/3?node_id=1&id=ZDlhM2RkYTYtN2EzZWU3NjgtZjI0ZDM0ZmUtNTc3NDllMzE= }, Query: --!syntax_v1 -- Query name: CreateConnection PRAGMA TablePathPrefix("local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageCreateQuery::TTestCaseShouldCheckAvailableConnections::Execute_(NUnitTest::TTestContext&)"); DECLARE $scope as String; DECLARE $connection_id as String; DECLARE $user as String; DECLARE $visibility as Int64; DECLARE $name as String; DECLARE $connection_type as Int64; DECLARE $connection as String; DECLARE $revision as Int64; DECLARE $internal as String; INSERT INTO `connections` (`scope`, `connection_id`, `user`, `visibility`, `name`, `connection_type`, `connection`, `revision`, `internal`) VALUES ($scope, $connection_id, $user, $visibility, $name, $connection_type, $connection, $revision, $internal); 2025-05-29T15:44:49.194639Z node 17 :YQ_CONTROL_PLANE_STORAGE WARN: ydb_control_plane_storage.cpp:470: DB Error, Status: BAD_SESSION, Issues: {
: Error: Session not found: ydb://session/3?node_id=1&id=NWE4ZjhiOC0yOGZmNDI1OC1hZmZjODBmMi04Y2YwMjMwMw== }, Query: --!syntax_v1 -- Query name: CreateConnection PRAGMA TablePathPrefix("local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageCreateQuery::TTestCaseShouldCheckAvailableConnections::Execute_(NUnitTest::TTestContext&)"); DECLARE $scope as String; DECLARE $connection_id as String; DECLARE $user as String; DECLARE $visibility as Int64; DECLARE $name as String; DECLARE $connection_type as Int64; DECLARE $connection as String; DECLARE $revision as Int64; DECLARE $internal as String; INSERT INTO `connections` (`scope`, `connection_id`, `user`, `visibility`, `name`, `connection_type`, `connection`, `revision`, `internal`) VALUES ($scope, $connection_id, $user, $visibility, $name, $connection_type, $connection, $revision, $internal); 2025-05-29T15:44:49.194911Z node 17 :YQ_CONTROL_PLANE_STORAGE WARN: ydb_control_plane_storage_impl.h:770: [yandexcloud://test_folder_id_1, test_user@staff, utcue9mv9je4cv9c1nfm] CreateConnectionRequest: {content { name: "test_connection_name_1" setting { data_streams { database_id: "my_database_id" auth { current_iam { } } } } acl { visibility: SCOPE } } } ERROR: {
: Error: Session not found: ydb://session/3?node_id=1&id=NWE4ZjhiOC0yOGZmNDI1OC1hZmZjODBmMi04Y2YwMjMwMw== } assertion failed at ydb/tests/fq/control_plane_storage/ydb_control_plane_storage_queries_ut.cpp:246, virtual void NFq::NTestSuiteTYdbControlPlaneStorageCreateQuery::TTestCaseShouldCheckAvailableConnections::Execute_(NUnitTest::TTestContext &): (!issues)
: Error: Session not found: ydb://session/3?node_id=1&id=NWE4ZjhiOC0yOGZmNDI1OC1hZmZjODBmMi04Y2YwMjMwMw== 0. /-S/util/system/backtrace.cpp:284: ?? @ 0x13CB116B 1. /tmp//-S/library/cpp/testing/unittest/registar.cpp:46: RaiseError @ 0x13E64D18 2. /tmp//-S/ydb/tests/fq/control_plane_storage/ydb_control_plane_storage_queries_ut.cpp:246: Execute_ @ 0x13B2ADEC 3. /tmp//-S/ydb/tests/fq/control_plane_storage/ydb_control_plane_storage_queries_ut.cpp:17: operator() @ 0x13B99076 4. /tmp//-S/library/cpp/testing/unittest/registar.cpp:373: Run @ 0x13E66BCD 5. /tmp//-S/ydb/tests/fq/control_plane_storage/ydb_control_plane_storage_queries_ut.cpp:17: Execute @ 0x13B98A16 6. /tmp//-S/library/cpp/testing/unittest/registar.cpp:494: Execute @ 0x13E67342 7. /tmp//-S/library/cpp/testing/unittest/utmain.cpp:872: RunMain @ 0x13E7565C 8. ??:0: ?? @ 0x7F611797FD8F 9. ??:0: ?? @ 0x7F611797FE3F 10. ??:0: ?? @ 0x12969028 >> test_ydb_backup.py::TestBackupSingle::test_single_table_backup >> test_ydb_scheme.py::TestSchemeDescribe::test_describe_external_table_references_json [GOOD] >> test_explicit_partitioning_1.py::TestS3::test_binding_projection_enum_type_validation[v2-client3-column_type3-False] [FAIL] >> test_explicit_partitioning_1.py::TestS3::test_binding_projection_enum_type_validation[v2-client4-column_type4-False] >> test_ydb_impex.py::TestImpex::test_simple[csv-additional_args1-row] [GOOD] |83.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/batch_operations/unittest >> test_ydb_impex.py::TestImpex::test_simple[csv-additional_args1-column] >> test_ydb_flame_graph.py::TestExecuteWithFlameGraph::test_fg_to_dir [FAIL] >> TYdbControlPlaneStorageListConnections::ShouldCheckScopeVisibility [FAIL] >> TYdbControlPlaneStorageListConnections::ShouldCheckPrivateVisibility >> test_ydb_impex.py::TestImpex::test_simple[csv-additional_args1-column] [GOOD] >> test_ydb_impex.py::TestImpex::test_simple[tsv-additional_args2-row] >> TYdbControlPlaneStorageCreateBindingPermissions::ShouldApplyPermissionManagePublicFailed [FAIL] >> TYdbControlPlaneStorageCreateConnection::ShouldCheckNotAvailable >> TYdbControlPlaneStorageListQueriesPermissions::ShouldApplyPermissionViewPrivatePublic [FAIL] >> TYdbControlPlaneStorageModifyBinding::ShouldCheckAllowedSymbolsName |83.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/ydb_cli/py3test >> test_ydb_flame_graph.py::TestExecuteWithFlameGraph::test_fg_to_dir [FAIL] >> test_ydb_impex.py::TestImpex::test_simple[tsv-additional_args2-row] [GOOD] >> test_ydb_impex.py::TestImpex::test_simple[tsv-additional_args2-column] |83.3%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/fq/libs/ydb/ut/unittest >> TFqYdbTest::ShouldStatusToIssuesProcessEmptyIssues [GOOD] >> TYdbControlPlaneStorageModifyBinding::ShouldCheckMoveToScope [FAIL] >> TYdbControlPlaneStorageModifyBinding::ShouldCheckModifyTheSame >> test_ydb_scripting.py::TestExecuteScriptWithParams::test_uint32 [FAIL] >> test_ydb_scripting.py::TestExecuteScriptWithParams::test_uint64_and_string |83.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/statistics/service/ut/unittest >> test_format_setting.py::TestS3::test_precompute_with_pg_binding[v2-pg_types-yql_syntax-client0] [FAIL] >> test_explicit_partitioning_0.py::TestS3::test_projection_enum_type_invalid_validation[v2-false-client5-year Int64 NOT NULL-False] [FAIL] >> test_ydb_impex.py::TestImpex::test_simple[tsv-additional_args2-column] [GOOD] >> test_format_setting.py::TestS3::test_precompute_with_pg_binding[v2-pg_types-pg_syntax-client0] >> test_explicit_partitioning_0.py::TestS3::test_projection_enum_type_invalid_validation[v2-false-client6-year Int32-False] >> test_ydb_impex.py::TestImpex::test_simple[tsv-additional_args3-row] >> TYdbControlPlaneStorageModifyConnection::ShouldCheckLowerCaseName [FAIL] >> TYdbControlPlaneStorageModifyConnection::ShouldCheckMaxLengthName >> test_ydb_scripting.py::TestExecuteScriptWithParams::test_uint64_and_string [FAIL] >> test_ydb_scripting.py::TestExecuteScriptWithParams::test_list |83.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/datashard/dump_restore/py3test >> test_dump_restore.py::TestDumpRestore::test_dump_restore[table_ttl_Date-pk_types13-all_types13-index13-Date--] [FAIL] >> test_ydb_impex.py::TestImpex::test_simple[tsv-additional_args3-row] [GOOD] >> test_ydb_impex.py::TestImpex::test_simple[tsv-additional_args3-column] >> test_ydb_table.py::TestExecuteQueryWithParams::test_uint64_and_string [FAIL] >> test_ydb_table.py::TestExecuteQueryWithParams::test_list >> test_ydb_table.py::TestExecuteQueryWithParams::test_list [FAIL] >> test_ydb_table.py::TestExecuteQueryWithParams::test_struct >> test_ydb_scripting.py::TestExecuteScriptWithParams::test_list [FAIL] >> test_ydb_scripting.py::TestExecuteScriptWithParams::test_struct >> test_ydb_table.py::TestExecuteQueryWithParams::test_struct [FAIL] >> test_ydb_table.py::TestExecuteQueryWithParams::test_scan_query_with_parameters |83.3%| [TA] $(B)/ydb/tests/datashard/dump_restore/test-results/py3test/{meta.json ... results_accumulator.log} >> test_ydb_scripting.py::TestExecuteScriptWithParams::test_struct [FAIL] >> TCheckGenerationTest::ShouldRollbackTransactionWhenCheckFails |83.3%| [TA] {RESULT} $(B)/ydb/tests/datashard/dump_restore/test-results/py3test/{meta.json ... results_accumulator.log} >> test_ydb_table.py::TestExecuteQueryWithParams::test_scan_query_with_parameters [FAIL] >> test_explicit_partitioning_1.py::TestS3::test_binding_projection_enum_type_validation[v2-client4-column_type4-False] [FAIL] >> test_explicit_partitioning_1.py::TestS3::test_binding_projection_enum_type_validation[v2-client5-column_type5-True] >> test_ydb_backup.py::TestBackupSingle::test_single_table_backup [FAIL] >> TCheckGenerationTest::ShouldRollbackTransactionWhenCheckFails [FAIL] >> TCheckGenerationTest::ShouldRollbackTransactionWhenCheckFails2 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/fq/control_plane_storage/unittest >> TYdbControlPlaneStoragePipeline::ShouldCheckAutomaticTtl [FAIL] Test command err: Netstat: sh: 1: netstat: not found Process stat: USER PID %CPU %MEM VSZ RSS TTY STAT START TIME COMMAND root 1 0.0 0.0 167292 9728 ? Ss 12:35 0:03 /sbin/init root 2 0.0 0.0 0 0 ? S 12:35 0:00 [kthreadd] root 3 0.0 0.0 0 0 ? I< 12:35 0:00 [rcu_gp] root 4 0.0 0.0 0 0 ? I< 12:35 0:00 [rcu_par_gp] root 5 0.0 0.0 0 0 ? I< 12:35 0:00 [slub_flushwq] root 6 0.0 0.0 0 0 ? I< 12:35 0:00 [netns] root 8 0.0 0.0 0 0 ? I< 12:35 0:00 [kworker/0:0H-events_highpri] root 11 0.0 0.0 0 0 ? I< 12:35 0:00 [mm_percpu_wq] root 12 0.0 0.0 0 0 ? S 12:35 0:00 [rcu_tasks_rude_] root 13 0.0 0.0 0 0 ? S 12:35 0:00 [rcu_tasks_trace] root 14 0.0 0.0 0 0 ? S 12:35 0:00 [ksoftirqd/0] root 15 0.0 0.0 0 0 ? I 12:35 0:08 [rcu_sched] root 16 0.0 0.0 0 0 ? S 12:35 0:00 [migration/0] root 17 0.0 0.0 0 0 ? S 12:35 0:00 [idle_inject/0] root 19 0.0 0.0 0 0 ? S 12:35 0:00 [cpuhp/0] root 20 0.0 0.0 0 0 ? S 12:35 0:00 [cpuhp/1] root 21 0.0 0.0 0 0 ? S 12:35 0:00 [idle_inject/1] root 22 0.0 0.0 0 0 ? S 12:35 0:02 [migration/1] root 23 0.0 0.0 0 0 ? S 12:35 0:00 [ksoftirqd/1] root 25 0.0 0.0 0 0 ? I< 12:35 0:00 [kworker/1:0H-events_highpri] root 26 0.0 0.0 0 0 ? S 12:35 0:00 [cpuhp/2] root 27 0.0 0.0 0 0 ? S 12:35 0:00 [idle_inject/2] root 28 0.0 0.0 0 0 ? S 12:35 0:02 [migration/2] root 29 0.0 0.0 0 0 ? S 12:35 0:00 [ksoftirqd/2] root 31 0.0 0.0 0 0 ? I< 12:35 0:00 [kworker/2:0H-events_highpri] root 32 0.0 0.0 0 0 ? S 12:35 0:00 [cpuhp/3] root 33 0.0 0.0 0 0 ? S 12:35 0:00 [idle_inject/3] root 34 0.0 0.0 0 0 ? S 12:35 0:02 [migration/3] root 35 0.0 0.0 0 0 ? S 12:35 0:00 [ksoftirqd/3] root 37 0.0 0.0 0 0 ? I< 12:35 0:00 [kworker/3:0H-events_highpri] root 38 0.0 0.0 0 0 ? S 12:35 0:00 [cpuhp/4] root 39 0.0 0.0 0 0 ? S 12:35 0:00 [idle_inject/4] root 40 0.0 0.0 0 0 ? S 12:35 0:02 [migration/4] root 41 0.0 0.0 0 0 ? S 12:35 0:00 [ksoftirqd/4] root 43 0.0 0.0 0 0 ? I< 12:35 0:00 [kworker/4:0H-events_highpri] root 44 0.0 0.0 0 0 ? S 12:35 0:00 [cpuhp/5] root 45 0.0 0.0 0 0 ? S 12:35 0:00 [idle_inject/5] root 46 0.0 0.0 0 0 ? S 12:35 0:02 [migration/5] root 47 0.0 0.0 0 0 ? S 12:35 0:00 [ksoftirqd/5] root 49 0.0 0.0 0 0 ? I< 12:35 0:00 [kworker/5:0H-events_highpri] root 50 0.0 0.0 0 0 ? S 12:35 0:00 [cpuhp/6] root 51 0.0 0.0 0 0 ? S 12:35 0:00 [idle_inject/6] root 52 0.0 0.0 0 0 ? S 12:35 0:02 [migration/6] root 53 0.0 0.0 0 0 ? S 12:35 0:00 [ksoftirqd/6] root 55 0.0 0.0 0 0 ? I< 12:35 0:00 [kworker/6:0H-events_highpri] root 56 0.0 0.0 0 0 ? S 12:35 0:00 [cpuhp/7] root 57 0.0 0.0 0 0 ? S 12:35 0:00 [idle_inject/7] root 58 0.0 0.0 0 0 ? S 12:35 0:02 [migration/7] root 59 0.0 0.0 0 0 ? S 12:35 0:00 [ksoftirqd/7] root 61 0.0 0.0 0 0 ? I< 12:35 0:00 [kworker/7:0H-events_highpri] root 62 0.0 0.0 0 0 ? S 12:35 0:00 [cpuhp/8] root 63 0.0 0.0 0 0 ? S 12:35 0:00 [idle_inject/8] root 64 0.0 0.0 0 0 ? S 12:35 0:02 [migration/8] root 65 0.0 0.0 0 0 ? S 12:35 0:00 [ksoftirqd/8] root 67 0.0 0.0 0 0 ? I< 12:35 0:00 [kworker/8:0H-events_highpri] root 68 0.0 0.0 0 0 ? S 12:35 0:00 [cpuhp/9] root 69 0.0 0.0 0 0 ? S 12:35 0:00 [idle_inject/9] root 70 0.0 0.0 0 0 ? S 12:35 0:02 [migration/9] root 71 0.0 0.0 0 0 ? S 12:35 0:00 [ksoftirqd/9] root 73 0.0 0.0 0 0 ? I< 12:35 0:00 [kworker/9:0H-events_highpri] root 74 0.0 0.0 0 0 ? S 12:35 0:00 [cpuhp/10] root 75 0.0 0.0 0 0 ? S 12:35 0:00 [idle_inject/10] root 76 0.0 0.0 0 0 ? S 12:35 0:02 [migration/10] root 77 0.0 0.0 0 0 ? S 12:35 0:00 [ksoftirqd/10] root 79 0.0 0.0 0 0 ? I< 12:35 0:00 [kworker/10:0H-events_highpri] root 80 0.0 0.0 0 0 ? S 12:35 0:00 [cpuhp/11] root 81 0.0 0.0 0 0 ? S 12:35 0:00 [idle_inject/11] root 82 0.0 0.0 0 0 ? S 12:35 0:02 [migration/11] root 83 0.0 0.0 0 0 ? S 12:35 0:00 [ksoftirqd/11] root 85 0.0 0.0 0 0 ? I< 12:35 0:00 [kworker/11:0H-events_highpri] root 86 0.0 0.0 0 0 ? S 12:35 0:00 [cpuhp/12] root 87 0.0 0.0 0 0 ? S 12:35 0:00 [idle_inject/12] root 88 0.0 0.0 0 0 ? S 12:35 0:02 [migration/12] root 89 0.0 0.0 0 0 ? S 12:35 0:00 [ksoftirqd/12] root 90 0.0 0.0 0 0 ? I 12:35 0:00 [kworker/12:0-rcu_par_gp] root 91 0.0 0.0 0 0 ? I< 12:35 0:00 [kworker/12:0H-events_highpri] root 92 0.0 0.0 0 0 ? S 12:35 0:00 [cpuhp/13] root 93 0.0 0.0 0 0 ? S 12:35 0:00 [idle_inject/13] root 94 0.0 0.0 0 0 ? S 12:35 0:02 [migration/13] root 95 0.0 0.0 0 0 ? S 12:35 0:00 [ksoftirqd/13] root 97 0.0 0.0 0 0 ? I< 12:35 0:00 [kworker/13:0H-events_highpri] root 98 0.0 0.0 0 0 ? S 12:35 0:00 [cpuhp/14] root 99 0.0 0.0 0 0 ? S 12:35 0:00 [idle_inject/14] root 100 0.0 0.0 0 0 ? S 12:35 0:02 [migration/14] root 101 0.0 0.0 0 0 ? S 12:35 0:00 [ksoftirqd/14] root 103 0.0 0.0 0 0 ? I< 12:35 0:00 [kworker/14:0H-kblockd] root 104 0.0 0.0 0 0 ? S 12:35 0:00 [cpuhp/15] root 105 0.0 0.0 0 0 ? S 12:35 0:00 [idle_inject/15] root 106 0.0 0.0 0 0 ? S 12:35 0:02 [migration/15] root 107 0.0 0.0 0 0 ? S 12:35 0:00 [ksoftirqd/15] root 109 0.0 0.0 0 0 ? I< 12:35 0:00 [kworker/15:0H-events_highpri] root 110 0.0 0.0 0 0 ? S 12:35 0:00 [cpuhp/16] root 111 0.0 0.0 0 0 ? S 12:35 0:00 [idle_inject/16] root 112 0.0 0.0 0 0 ? S 12:35 0:02 [migration/16] root 113 0.0 0.0 0 0 ? S 12:35 0:00 [ksoftirqd/16] root 115 0.0 0.0 0 0 ? I< 12:35 0:00 [kworker/16:0H-kblockd] root 116 0.0 0.0 0 0 ? S 12:35 0:00 [cpuhp/17] root 117 0.0 0.0 0 0 ? S 12:35 0:00 [idle_inject/17] root 118 0.0 0.0 0 0 ? S 12:35 0:02 [migration/17] root 119 0.0 0.0 0 0 ? S 12:35 0:00 [ksoftirqd/17] root 121 0.0 0.0 0 0 ? I< 12:35 0:00 [kworker/17:0H-events_highpri] root 122 0.0 0.0 0 0 ? S 12:35 0:00 [cpuhp/18] root 123 0.0 0.0 0 0 ? S 12:35 0:00 [idle_inject/18] root 124 0.0 0.0 0 0 ? S 12:35 0:02 [migration/18] root 125 0.0 0.0 0 0 ? S 12:35 0:00 [ksoftirqd/18] root 127 0.0 0.0 0 0 ? I< 12:35 0:00 [kworker/18:0H-events_highpri] root 128 0.0 0.0 0 0 ? S 12:35 0:00 [cpuhp/19] root 129 0.0 0.0 0 0 ? S 12:35 0:00 [idle_inject/19] root 130 0.0 0.0 0 0 ? S 12:35 0:02 [migration/19] root 131 0.0 0.0 0 0 ? S 12:35 0:00 [ksoftirqd/19] root 133 0.0 0.0 0 0 ? I< 12:35 0:00 [kworker/19:0H-events_highpri] root 134 0.0 0.0 0 0 ? S 12:35 0:00 [cpuhp/20] root 135 0.0 0.0 0 0 ? S 12:35 0:00 [idle_inject/20] root 136 0.0 0.0 0 0 ? S 12:35 0:02 [migration/20] root 137 0.0 0.0 0 0 ? S 12:35 0:00 [ksoftirqd/20] root 139 0.0 0.0 0 0 ? I< 12:35 0:00 [kworker/20:0H-events_highpri] root 140 0.0 0.0 0 0 ? S 12:35 0:00 [cpuhp/21] root 141 0.0 0.0 0 0 ? S 12:35 0:00 [idle_inject/21] root 142 0.0 0.0 0 0 ? S 12:35 0:02 [migration/21] root 143 0.0 0.0 0 0 ? S 12:35 0:00 [ksoftirqd/21] root 145 0.0 0.0 0 0 ? I< 12:35 0:00 [kworker/21:0H-events_highpri] root 146 0.0 0.0 0 0 ? S 12:35 0:00 [cpuhp/22] root 147 0.0 0.0 0 0 ? S 12:35 0:00 [idle_inject/22] root 148 0.0 0.0 0 0 ? S 12:35 0:02 [migration/22] root 149 0.0 0.0 0 0 ? S 12:35 0:00 [ksoftirqd/22] root 151 0.0 0.0 0 0 ? I< 12:35 0:00 [kworker/22:0H-events_highpri] root 152 0.0 0.0 0 0 ? S 12:35 0:00 [cpuhp/23] root 153 0.0 0.0 0 0 ? S 12:35 0:00 [idle_inject/23] root 154 0.0 0.0 0 0 ? S 12:35 0:02 [migration/23] root 155 0.0 0.0 0 0 ? S 12:35 0:00 [ksoftirqd/23] root 157 0.0 0.0 0 0 ? I< 12:35 0:00 [kworker/23:0H-events_highpri] root 158 0.0 0.0 0 0 ? S 12:35 0:00 [cpuhp/24] root 159 0.0 0.0 0 0 ? S 12:35 0:00 [idle_inject/24] root 160 0.0 0.0 0 0 ? S 12:35 0:02 [migration/24] root 161 0.0 0.0 0 0 ? S 12:35 0:00 [ksoftirqd/24] root 163 0.0 0.0 0 0 ? I< 12:35 0:00 [kworker/24:0H-events_highpri] root 164 0.0 0.0 0 0 ? S 12:35 0:00 [cpuhp/25] root 165 0.0 0.0 0 0 ? S 12:35 0:00 [idle_inject/25] root 166 0.0 0.0 0 0 ? S 12:35 0:02 [migration/25] root 167 0.0 0.0 0 0 ? S 12:35 0:00 [ksoftirqd/25] root 169 0.0 0.0 0 0 ? I< 12:35 0:00 [kworker/25:0H-events_highpri] root 170 0.0 ... jYTZiMGMtZDMxYzM1YzUtZjZkMTZjYmItYzYwNDFhNDQ= }, Query: --!syntax_v1 -- Query name: CreateQuery(write) PRAGMA TablePathPrefix("local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStoragePipeline::TTestCaseShouldCheckAutomaticTtl::Execute_(NUnitTest::TTestContext&)"); DECLARE $tenant as String; DECLARE $scope as String; DECLARE $query_id as String; DECLARE $name as String; DECLARE $status as Int64; DECLARE $query_type as Int64; DECLARE $execute_mode as Int64; DECLARE $user as String; DECLARE $visibility as Int64; DECLARE $automatic as Bool; DECLARE $revision as Int64; DECLARE $query as String; DECLARE $internal as String; DECLARE $job_id as String; DECLARE $job as String; DECLARE $zero_timestamp as Timestamp; DECLARE $now as Timestamp; INSERT INTO `jobs` (`scope`, `query_id`, `job_id`, `job`, `user`, `visibility`) VALUES ($scope, $query_id, $job_id, $job, $user, $visibility);INSERT INTO `pending_small` (`tenant`, `scope`, `query_id`, `query_type`, `last_seen_at`, `assigned_until`, `retry_rate`, `retry_counter`, `retry_counter_updated_at`, `hostname`, `owner`) VALUES ($tenant, $scope, $query_id, $query_type, $zero_timestamp, $zero_timestamp, 0, 0, $now, "", "");INSERT INTO `queries` (`scope`, `query_id`, `name`, `status`, `query_type`, `execute_mode`, `user`, `visibility`, `automatic`, `revision`, `query`, `internal`, `last_job_id`, `generation`, `meta_revision`, `tenant`) VALUES ($scope, $query_id, $name, $status, $query_type, $execute_mode, $user, $visibility, $automatic, $revision, $query, $internal, $job_id, 0, 0, $tenant); 2025-05-29T15:44:59.159464Z node 17 :YQ_CONTROL_PLANE_STORAGE WARN: ydb_control_plane_storage.cpp:599: DB Error, Status: BAD_SESSION, Issues: {
: Error: Session not found: ydb://session/3?node_id=1&id=YTM2ZjY2YWYtZGFiYjI0ZDgtM2QxMmYxN2EtYmFlZmM1OWE= }, Query: --!syntax_v1 -- Query name: CreateQuery(write) PRAGMA TablePathPrefix("local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStoragePipeline::TTestCaseShouldCheckAutomaticTtl::Execute_(NUnitTest::TTestContext&)"); DECLARE $tenant as String; DECLARE $scope as String; DECLARE $query_id as String; DECLARE $name as String; DECLARE $status as Int64; DECLARE $query_type as Int64; DECLARE $execute_mode as Int64; DECLARE $user as String; DECLARE $visibility as Int64; DECLARE $automatic as Bool; DECLARE $revision as Int64; DECLARE $query as String; DECLARE $internal as String; DECLARE $job_id as String; DECLARE $job as String; DECLARE $zero_timestamp as Timestamp; DECLARE $now as Timestamp; INSERT INTO `jobs` (`scope`, `query_id`, `job_id`, `job`, `user`, `visibility`) VALUES ($scope, $query_id, $job_id, $job, $user, $visibility);INSERT INTO `pending_small` (`tenant`, `scope`, `query_id`, `query_type`, `last_seen_at`, `assigned_until`, `retry_rate`, `retry_counter`, `retry_counter_updated_at`, `hostname`, `owner`) VALUES ($tenant, $scope, $query_id, $query_type, $zero_timestamp, $zero_timestamp, 0, 0, $now, "", "");INSERT INTO `queries` (`scope`, `query_id`, `name`, `status`, `query_type`, `execute_mode`, `user`, `visibility`, `automatic`, `revision`, `query`, `internal`, `last_job_id`, `generation`, `meta_revision`, `tenant`) VALUES ($scope, $query_id, $name, $status, $query_type, $execute_mode, $user, $visibility, $automatic, $revision, $query, $internal, $job_id, 0, 0, $tenant); 2025-05-29T15:44:59.621188Z node 17 :YQ_CONTROL_PLANE_STORAGE WARN: ydb_control_plane_storage.cpp:599: DB Error, Status: BAD_SESSION, Issues: {
: Error: Session not found: ydb://session/3?node_id=1&id=OGY3Y2UxNjktMmM3YTdiYjYtNzEyYzQ4NjgtNGZhMTlkNTk= }, Query: --!syntax_v1 -- Query name: CreateQuery(write) PRAGMA TablePathPrefix("local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStoragePipeline::TTestCaseShouldCheckAutomaticTtl::Execute_(NUnitTest::TTestContext&)"); DECLARE $tenant as String; DECLARE $scope as String; DECLARE $query_id as String; DECLARE $name as String; DECLARE $status as Int64; DECLARE $query_type as Int64; DECLARE $execute_mode as Int64; DECLARE $user as String; DECLARE $visibility as Int64; DECLARE $automatic as Bool; DECLARE $revision as Int64; DECLARE $query as String; DECLARE $internal as String; DECLARE $job_id as String; DECLARE $job as String; DECLARE $zero_timestamp as Timestamp; DECLARE $now as Timestamp; INSERT INTO `jobs` (`scope`, `query_id`, `job_id`, `job`, `user`, `visibility`) VALUES ($scope, $query_id, $job_id, $job, $user, $visibility);INSERT INTO `pending_small` (`tenant`, `scope`, `query_id`, `query_type`, `last_seen_at`, `assigned_until`, `retry_rate`, `retry_counter`, `retry_counter_updated_at`, `hostname`, `owner`) VALUES ($tenant, $scope, $query_id, $query_type, $zero_timestamp, $zero_timestamp, 0, 0, $now, "", "");INSERT INTO `queries` (`scope`, `query_id`, `name`, `status`, `query_type`, `execute_mode`, `user`, `visibility`, `automatic`, `revision`, `query`, `internal`, `last_job_id`, `generation`, `meta_revision`, `tenant`) VALUES ($scope, $query_id, $name, $status, $query_type, $execute_mode, $user, $visibility, $automatic, $revision, $query, $internal, $job_id, 0, 0, $tenant); 2025-05-29T15:45:00.097919Z node 17 :YQ_CONTROL_PLANE_STORAGE WARN: ydb_control_plane_storage.cpp:599: DB Error, Status: BAD_SESSION, Issues: {
: Error: Session not found: ydb://session/3?node_id=1&id=ZTQ4ZTg0NTktNDIwYjYzOWEtOWQxODllYjctMjA2YWQ1Nzk= }, Query: --!syntax_v1 -- Query name: CreateQuery(write) PRAGMA TablePathPrefix("local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStoragePipeline::TTestCaseShouldCheckAutomaticTtl::Execute_(NUnitTest::TTestContext&)"); DECLARE $tenant as String; DECLARE $scope as String; DECLARE $query_id as String; DECLARE $name as String; DECLARE $status as Int64; DECLARE $query_type as Int64; DECLARE $execute_mode as Int64; DECLARE $user as String; DECLARE $visibility as Int64; DECLARE $automatic as Bool; DECLARE $revision as Int64; DECLARE $query as String; DECLARE $internal as String; DECLARE $job_id as String; DECLARE $job as String; DECLARE $zero_timestamp as Timestamp; DECLARE $now as Timestamp; INSERT INTO `jobs` (`scope`, `query_id`, `job_id`, `job`, `user`, `visibility`) VALUES ($scope, $query_id, $job_id, $job, $user, $visibility);INSERT INTO `pending_small` (`tenant`, `scope`, `query_id`, `query_type`, `last_seen_at`, `assigned_until`, `retry_rate`, `retry_counter`, `retry_counter_updated_at`, `hostname`, `owner`) VALUES ($tenant, $scope, $query_id, $query_type, $zero_timestamp, $zero_timestamp, 0, 0, $now, "", "");INSERT INTO `queries` (`scope`, `query_id`, `name`, `status`, `query_type`, `execute_mode`, `user`, `visibility`, `automatic`, `revision`, `query`, `internal`, `last_job_id`, `generation`, `meta_revision`, `tenant`) VALUES ($scope, $query_id, $name, $status, $query_type, $execute_mode, $user, $visibility, $automatic, $revision, $query, $internal, $job_id, 0, 0, $tenant); 2025-05-29T15:45:00.757605Z node 17 :YQ_CONTROL_PLANE_STORAGE WARN: ydb_control_plane_storage.cpp:599: DB Error, Status: BAD_SESSION, Issues: {
: Error: Session not found: ydb://session/3?node_id=1&id=ZjY2ZDM5NzMtMTMwOTJmMDQtMjQ2YTBlZjgtZWNkYWM2YWU= }, Query: --!syntax_v1 -- Query name: CreateQuery(write) PRAGMA TablePathPrefix("local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStoragePipeline::TTestCaseShouldCheckAutomaticTtl::Execute_(NUnitTest::TTestContext&)"); DECLARE $tenant as String; DECLARE $scope as String; DECLARE $query_id as String; DECLARE $name as String; DECLARE $status as Int64; DECLARE $query_type as Int64; DECLARE $execute_mode as Int64; DECLARE $user as String; DECLARE $visibility as Int64; DECLARE $automatic as Bool; DECLARE $revision as Int64; DECLARE $query as String; DECLARE $internal as String; DECLARE $job_id as String; DECLARE $job as String; DECLARE $zero_timestamp as Timestamp; DECLARE $now as Timestamp; INSERT INTO `jobs` (`scope`, `query_id`, `job_id`, `job`, `user`, `visibility`) VALUES ($scope, $query_id, $job_id, $job, $user, $visibility);INSERT INTO `pending_small` (`tenant`, `scope`, `query_id`, `query_type`, `last_seen_at`, `assigned_until`, `retry_rate`, `retry_counter`, `retry_counter_updated_at`, `hostname`, `owner`) VALUES ($tenant, $scope, $query_id, $query_type, $zero_timestamp, $zero_timestamp, 0, 0, $now, "", "");INSERT INTO `queries` (`scope`, `query_id`, `name`, `status`, `query_type`, `execute_mode`, `user`, `visibility`, `automatic`, `revision`, `query`, `internal`, `last_job_id`, `generation`, `meta_revision`, `tenant`) VALUES ($scope, $query_id, $name, $status, $query_type, $execute_mode, $user, $visibility, $automatic, $revision, $query, $internal, $job_id, 0, 0, $tenant); 2025-05-29T15:45:01.206206Z node 17 :YQ_CONTROL_PLANE_STORAGE WARN: ydb_control_plane_storage.cpp:599: DB Error, Status: BAD_SESSION, Issues: {
: Error: Session not found: ydb://session/3?node_id=1&id=MTk5ZmNlODgtODY1YTQ2MjItMjgxNzZlZTMtYWI5ZmJhNGQ= }, Query: --!syntax_v1 -- Query name: CreateQuery(write) PRAGMA TablePathPrefix("local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStoragePipeline::TTestCaseShouldCheckAutomaticTtl::Execute_(NUnitTest::TTestContext&)"); DECLARE $tenant as String; DECLARE $scope as String; DECLARE $query_id as String; DECLARE $name as String; DECLARE $status as Int64; DECLARE $query_type as Int64; DECLARE $execute_mode as Int64; DECLARE $user as String; DECLARE $visibility as Int64; DECLARE $automatic as Bool; DECLARE $revision as Int64; DECLARE $query as String; DECLARE $internal as String; DECLARE $job_id as String; DECLARE $job as String; DECLARE $zero_timestamp as Timestamp; DECLARE $now as Timestamp; INSERT INTO `jobs` (`scope`, `query_id`, `job_id`, `job`, `user`, `visibility`) VALUES ($scope, $query_id, $job_id, $job, $user, $visibility);INSERT INTO `pending_small` (`tenant`, `scope`, `query_id`, `query_type`, `last_seen_at`, `assigned_until`, `retry_rate`, `retry_counter`, `retry_counter_updated_at`, `hostname`, `owner`) VALUES ($tenant, $scope, $query_id, $query_type, $zero_timestamp, $zero_timestamp, 0, 0, $now, "", "");INSERT INTO `queries` (`scope`, `query_id`, `name`, `status`, `query_type`, `execute_mode`, `user`, `visibility`, `automatic`, `revision`, `query`, `internal`, `last_job_id`, `generation`, `meta_revision`, `tenant`) VALUES ($scope, $query_id, $name, $status, $query_type, $execute_mode, $user, $visibility, $automatic, $revision, $query, $internal, $job_id, 0, 0, $tenant); 2025-05-29T15:45:01.206522Z node 17 :YQ_CONTROL_PLANE_STORAGE WARN: ydb_control_plane_storage_impl.h:770: CreateQueryRequest - CreateQueryResult: {content { type: ANALYTICS name: "test_query_name_1" acl { visibility: SCOPE } text: "SELECT 1;" automatic: true } execute_mode: RUN } ERROR: {
: Error: Session not found: ydb://session/3?node_id=1&id=MTk5ZmNlODgtODY1YTQ2MjItMjgxNzZlZTMtYWI5ZmJhNGQ= } assertion failed at ydb/tests/fq/control_plane_storage/ydb_control_plane_storage_internal_ut.cpp:1460, virtual void NFq::NTestSuiteTYdbControlPlaneStoragePipeline::TTestCaseShouldCheckAutomaticTtl::Execute_(NUnitTest::TTestContext &): (!issues)
: Error: Session not found: ydb://session/3?node_id=1&id=MTk5ZmNlODgtODY1YTQ2MjItMjgxNzZlZTMtYWI5ZmJhNGQ= 0. /-S/util/system/backtrace.cpp:284: ?? @ 0x13CB116B 1. /tmp//-S/library/cpp/testing/unittest/registar.cpp:46: RaiseError @ 0x13E64D18 2. /tmp//-S/ydb/tests/fq/control_plane_storage/ydb_control_plane_storage_internal_ut.cpp:1460: Execute_ @ 0x13A0DD9F 3. /tmp//-S/ydb/tests/fq/control_plane_storage/ydb_control_plane_storage_internal_ut.cpp:238: operator() @ 0x13A64FE6 4. /tmp//-S/library/cpp/testing/unittest/registar.cpp:373: Run @ 0x13E66BCD 5. /tmp//-S/ydb/tests/fq/control_plane_storage/ydb_control_plane_storage_internal_ut.cpp:238: Execute @ 0x13A64980 6. /tmp//-S/library/cpp/testing/unittest/registar.cpp:494: Execute @ 0x13E67342 7. /tmp//-S/library/cpp/testing/unittest/utmain.cpp:872: RunMain @ 0x13E7565C 8. ??:0: ?? @ 0x7F32471F2D8F 9. ??:0: ?? @ 0x7F32471F2E3F 10. ??:0: ?? @ 0x12969028 >> TCheckGenerationTest::ShouldRollbackTransactionWhenCheckFails2 [FAIL] >> TYdbControlPlaneStorageListJobsPermissions::ShouldApplyPermissionEmpty [FAIL] >> TYdbControlPlaneStorageListJobsPermissions::ShouldApplyPermissionViewPublic >> test_ydb_sql.py::TestExecuteSqlWithParamsFromJson::test_script_from_file >> test_distconf.py::TestKiKiMRDistConfReassignStateStorageReuseSameNodes::test_cluster_change_state_storage >> test_rename.py::test_client_gets_retriable_errors_when_rename[substitute_table-create_indexed_async_table-True] [GOOD] |83.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/ydb_cli/py3test >> test_ydb_scheme.py::TestSchemeDescribe::test_describe_external_table_references_json [GOOD] |83.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/ydb_cli/py3test |83.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/config/py3test >> TYdbControlPlaneStorageDescribeBinding::ShouldCheckSuperUser [FAIL] >> TYdbControlPlaneStorageDescribeBindingPermissions::ShouldApplyPermissionEmpty >> TYdbControlPlaneStorageDeleteConnection::ShouldCheckPermission [FAIL] >> TYdbControlPlaneStorageDeleteConnection::ShouldCheckExist >> test_format_setting.py::TestS3::test_precompute_with_pg_binding[v2-pg_types-pg_syntax-client0] [FAIL] >> test_format_setting.py::TestS3::test_timestamp_completeness_iso[v1-timestamp/completeness_iso/test.csv-csv_with_names] >> test_restarts.py::TestRestartClusterMirror3DC::test_when_create_many_tablets_and_restart_cluster_then_every_thing_is_ok >> test_explicit_partitioning_1.py::TestS3::test_binding_projection_enum_type_validation[v2-client5-column_type5-True] [FAIL] >> test_explicit_partitioning_1.py::TestS3::test_binding_projection_enum_type_validation[v2-client6-column_type6-False] >> TYdbControlPlaneStorageModifyConnection::ShouldCheckMaxLengthName [FAIL] >> TYdbControlPlaneStorageModifyConnection::ShouldCheckMultipleDotsName >> test_ydb_scripting.py::TestExecuteScriptWithFormats::test_yql_script_pretty >> test_ydb_table.py::TestExecuteQueryWithFormats::test_data_query_pretty >> test_ydb_backup.py::TestBackupSingleNotNull::test_single_table_backup >> test_explicit_partitioning_0.py::TestS3::test_projection_enum_type_invalid_validation[v2-false-client6-year Int32-False] [FAIL] >> test_explicit_partitioning_0.py::TestS3::test_projection_enum_type_invalid_validation[v2-false-client7-year Uint32-False] >> test_config_with_metadata.py::TestConfigWithMetadataBlock::test_cluster_is_operational_with_metadata >> test_ydb_impex.py::TestImpex::test_simple[tsv-additional_args3-column] [GOOD] >> test_ydb_impex.py::TestImpex::test_simple[json-additional_args4-row] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/fq/control_plane_storage/unittest >> TYdbControlPlaneStoragePipeline::ShouldReturnPartialBatchForGetTask [FAIL] Test command err: Netstat: sh: 1: netstat: not found Process stat: USER PID %CPU %MEM VSZ RSS TTY STAT START TIME COMMAND root 1 0.0 0.0 167292 9716 ? Ss 12:35 0:03 /sbin/init root 2 0.0 0.0 0 0 ? S 12:35 0:00 [kthreadd] root 3 0.0 0.0 0 0 ? I< 12:35 0:00 [rcu_gp] root 4 0.0 0.0 0 0 ? I< 12:35 0:00 [rcu_par_gp] root 5 0.0 0.0 0 0 ? I< 12:35 0:00 [slub_flushwq] root 6 0.0 0.0 0 0 ? I< 12:35 0:00 [netns] root 8 0.0 0.0 0 0 ? I< 12:35 0:00 [kworker/0:0H-events_highpri] root 11 0.0 0.0 0 0 ? I< 12:35 0:00 [mm_percpu_wq] root 12 0.0 0.0 0 0 ? S 12:35 0:00 [rcu_tasks_rude_] root 13 0.0 0.0 0 0 ? S 12:35 0:00 [rcu_tasks_trace] root 14 0.0 0.0 0 0 ? S 12:35 0:00 [ksoftirqd/0] root 15 0.0 0.0 0 0 ? I 12:35 0:08 [rcu_sched] root 16 0.0 0.0 0 0 ? S 12:35 0:00 [migration/0] root 17 0.0 0.0 0 0 ? S 12:35 0:00 [idle_inject/0] root 19 0.0 0.0 0 0 ? S 12:35 0:00 [cpuhp/0] root 20 0.0 0.0 0 0 ? S 12:35 0:00 [cpuhp/1] root 21 0.0 0.0 0 0 ? S 12:35 0:00 [idle_inject/1] root 22 0.0 0.0 0 0 ? S 12:35 0:02 [migration/1] root 23 0.0 0.0 0 0 ? S 12:35 0:00 [ksoftirqd/1] root 25 0.0 0.0 0 0 ? I< 12:35 0:00 [kworker/1:0H-events_highpri] root 26 0.0 0.0 0 0 ? S 12:35 0:00 [cpuhp/2] root 27 0.0 0.0 0 0 ? S 12:35 0:00 [idle_inject/2] root 28 0.0 0.0 0 0 ? S 12:35 0:02 [migration/2] root 29 0.0 0.0 0 0 ? S 12:35 0:00 [ksoftirqd/2] root 31 0.0 0.0 0 0 ? I< 12:35 0:00 [kworker/2:0H-events_highpri] root 32 0.0 0.0 0 0 ? S 12:35 0:00 [cpuhp/3] root 33 0.0 0.0 0 0 ? S 12:35 0:00 [idle_inject/3] root 34 0.0 0.0 0 0 ? S 12:35 0:02 [migration/3] root 35 0.0 0.0 0 0 ? S 12:35 0:00 [ksoftirqd/3] root 37 0.0 0.0 0 0 ? I< 12:35 0:00 [kworker/3:0H-events_highpri] root 38 0.0 0.0 0 0 ? S 12:35 0:00 [cpuhp/4] root 39 0.0 0.0 0 0 ? S 12:35 0:00 [idle_inject/4] root 40 0.0 0.0 0 0 ? S 12:35 0:02 [migration/4] root 41 0.0 0.0 0 0 ? S 12:35 0:00 [ksoftirqd/4] root 43 0.0 0.0 0 0 ? I< 12:35 0:00 [kworker/4:0H-events_highpri] root 44 0.0 0.0 0 0 ? S 12:35 0:00 [cpuhp/5] root 45 0.0 0.0 0 0 ? S 12:35 0:00 [idle_inject/5] root 46 0.0 0.0 0 0 ? S 12:35 0:02 [migration/5] root 47 0.0 0.0 0 0 ? S 12:35 0:00 [ksoftirqd/5] root 49 0.0 0.0 0 0 ? I< 12:35 0:00 [kworker/5:0H-events_highpri] root 50 0.0 0.0 0 0 ? S 12:35 0:00 [cpuhp/6] root 51 0.0 0.0 0 0 ? S 12:35 0:00 [idle_inject/6] root 52 0.0 0.0 0 0 ? S 12:35 0:02 [migration/6] root 53 0.0 0.0 0 0 ? S 12:35 0:00 [ksoftirqd/6] root 55 0.0 0.0 0 0 ? I< 12:35 0:00 [kworker/6:0H-events_highpri] root 56 0.0 0.0 0 0 ? S 12:35 0:00 [cpuhp/7] root 57 0.0 0.0 0 0 ? S 12:35 0:00 [idle_inject/7] root 58 0.0 0.0 0 0 ? S 12:35 0:02 [migration/7] root 59 0.0 0.0 0 0 ? S 12:35 0:00 [ksoftirqd/7] root 61 0.0 0.0 0 0 ? I< 12:35 0:00 [kworker/7:0H-events_highpri] root 62 0.0 0.0 0 0 ? S 12:35 0:00 [cpuhp/8] root 63 0.0 0.0 0 0 ? S 12:35 0:00 [idle_inject/8] root 64 0.0 0.0 0 0 ? S 12:35 0:02 [migration/8] root 65 0.0 0.0 0 0 ? S 12:35 0:00 [ksoftirqd/8] root 67 0.0 0.0 0 0 ? I< 12:35 0:00 [kworker/8:0H-events_highpri] root 68 0.0 0.0 0 0 ? S 12:35 0:00 [cpuhp/9] root 69 0.0 0.0 0 0 ? S 12:35 0:00 [idle_inject/9] root 70 0.0 0.0 0 0 ? S 12:35 0:02 [migration/9] root 71 0.0 0.0 0 0 ? S 12:35 0:00 [ksoftirqd/9] root 73 0.0 0.0 0 0 ? I< 12:35 0:00 [kworker/9:0H-events_highpri] root 74 0.0 0.0 0 0 ? S 12:35 0:00 [cpuhp/10] root 75 0.0 0.0 0 0 ? S 12:35 0:00 [idle_inject/10] root 76 0.0 0.0 0 0 ? S 12:35 0:02 [migration/10] root 77 0.0 0.0 0 0 ? S 12:35 0:00 [ksoftirqd/10] root 79 0.0 0.0 0 0 ? I< 12:35 0:00 [kworker/10:0H-events_highpri] root 80 0.0 0.0 0 0 ? S 12:35 0:00 [cpuhp/11] root 81 0.0 0.0 0 0 ? S 12:35 0:00 [idle_inject/11] root 82 0.0 0.0 0 0 ? S 12:35 0:02 [migration/11] root 83 0.0 0.0 0 0 ? S 12:35 0:00 [ksoftirqd/11] root 85 0.0 0.0 0 0 ? I< 12:35 0:00 [kworker/11:0H-events_highpri] root 86 0.0 0.0 0 0 ? S 12:35 0:00 [cpuhp/12] root 87 0.0 0.0 0 0 ? S 12:35 0:00 [idle_inject/12] root 88 0.0 0.0 0 0 ? S 12:35 0:02 [migration/12] root 89 0.0 0.0 0 0 ? S 12:35 0:00 [ksoftirqd/12] root 90 0.0 0.0 0 0 ? I 12:35 0:00 [kworker/12:0-rcu_par_gp] root 91 0.0 0.0 0 0 ? I< 12:35 0:00 [kworker/12:0H-events_highpri] root 92 0.0 0.0 0 0 ? S 12:35 0:00 [cpuhp/13] root 93 0.0 0.0 0 0 ? S 12:35 0:00 [idle_inject/13] root 94 0.0 0.0 0 0 ? S 12:35 0:02 [migration/13] root 95 0.0 0.0 0 0 ? S 12:35 0:00 [ksoftirqd/13] root 97 0.0 0.0 0 0 ? I< 12:35 0:00 [kworker/13:0H-events_highpri] root 98 0.0 0.0 0 0 ? S 12:35 0:00 [cpuhp/14] root 99 0.0 0.0 0 0 ? S 12:35 0:00 [idle_inject/14] root 100 0.0 0.0 0 0 ? S 12:35 0:02 [migration/14] root 101 0.0 0.0 0 0 ? S 12:35 0:00 [ksoftirqd/14] root 103 0.0 0.0 0 0 ? I< 12:35 0:00 [kworker/14:0H-kblockd] root 104 0.0 0.0 0 0 ? S 12:35 0:00 [cpuhp/15] root 105 0.0 0.0 0 0 ? S 12:35 0:00 [idle_inject/15] root 106 0.0 0.0 0 0 ? S 12:35 0:02 [migration/15] root 107 0.0 0.0 0 0 ? S 12:35 0:00 [ksoftirqd/15] root 109 0.0 0.0 0 0 ? I< 12:35 0:00 [kworker/15:0H-events_highpri] root 110 0.0 0.0 0 0 ? S 12:35 0:00 [cpuhp/16] root 111 0.0 0.0 0 0 ? S 12:35 0:00 [idle_inject/16] root 112 0.0 0.0 0 0 ? S 12:35 0:02 [migration/16] root 113 0.0 0.0 0 0 ? S 12:35 0:00 [ksoftirqd/16] root 115 0.0 0.0 0 0 ? I< 12:35 0:00 [kworker/16:0H-kblockd] root 116 0.0 0.0 0 0 ? S 12:35 0:00 [cpuhp/17] root 117 0.0 0.0 0 0 ? S 12:35 0:00 [idle_inject/17] root 118 0.0 0.0 0 0 ? S 12:35 0:02 [migration/17] root 119 0.0 0.0 0 0 ? S 12:35 0:00 [ksoftirqd/17] root 121 0.0 0.0 0 0 ? I< 12:35 0:00 [kworker/17:0H-events_highpri] root 122 0.0 0.0 0 0 ? S 12:35 0:00 [cpuhp/18] root 123 0.0 0.0 0 0 ? S 12:35 0:00 [idle_inject/18] root 124 0.0 0.0 0 0 ? S 12:35 0:02 [migration/18] root 125 0.0 0.0 0 0 ? S 12:35 0:00 [ksoftirqd/18] root 127 0.0 0.0 0 0 ? I< 12:35 0:00 [kworker/18:0H-events_highpri] root 128 0.0 0.0 0 0 ? S 12:35 0:00 [cpuhp/19] root 129 0.0 0.0 0 0 ? S 12:35 0:00 [idle_inject/19] root 130 0.0 0.0 0 0 ? S 12:35 0:02 [migration/19] root 131 0.0 0.0 0 0 ? S 12:35 0:00 [ksoftirqd/19] root 133 0.0 0.0 0 0 ? I< 12:35 0:00 [kworker/19:0H-events_highpri] root 134 0.0 0.0 0 0 ? S 12:35 0:00 [cpuhp/20] root 135 0.0 0.0 0 0 ? S 12:35 0:00 [idle_inject/20] root 136 0.0 0.0 0 0 ? S 12:35 0:02 [migration/20] root 137 0.0 0.0 0 0 ? S 12:35 0:00 [ksoftirqd/20] root 139 0.0 0.0 0 0 ? I< 12:35 0:00 [kworker/20:0H-events_highpri] root 140 0.0 0.0 0 0 ? S 12:35 0:00 [cpuhp/21] root 141 0.0 0.0 0 0 ? S 12:35 0:00 [idle_inject/21] root 142 0.0 0.0 0 0 ? S 12:35 0:02 [migration/21] root 143 0.0 0.0 0 0 ? S 12:35 0:00 [ksoftirqd/21] root 145 0.0 0.0 0 0 ? I< 12:35 0:00 [kworker/21:0H-events_highpri] root 146 0.0 0.0 0 0 ? S 12:35 0:00 [cpuhp/22] root 147 0.0 0.0 0 0 ? S 12:35 0:00 [idle_inject/22] root 148 0.0 0.0 0 0 ? S 12:35 0:02 [migration/22] root 149 0.0 0.0 0 0 ? S 12:35 0:00 [ksoftirqd/22] root 151 0.0 0.0 0 0 ? I< 12:35 0:00 [kworker/22:0H-events_highpri] root 152 0.0 0.0 0 0 ? S 12:35 0:00 [cpuhp/23] root 153 0.0 0.0 0 0 ? S 12:35 0:00 [idle_inject/23] root 154 0.0 0.0 0 0 ? S 12:35 0:02 [migration/23] root 155 0.0 0.0 0 0 ? S 12:35 0:00 [ksoftirqd/23] root 157 0.0 0.0 0 0 ? I< 12:35 0:00 [kworker/23:0H-events_highpri] root 158 0.0 0.0 0 0 ? S 12:35 0:00 [cpuhp/24] root 159 0.0 0.0 0 0 ? S 12:35 0:00 [idle_inject/24] root 160 0.0 0.0 0 0 ? S 12:35 0:02 [migration/24] root 161 0.0 0.0 0 0 ? S 12:35 0:00 [ksoftirqd/24] root 163 0.0 0.0 0 0 ? I< 12:35 0:00 [kworker/24:0H-events_highpri] root 164 0.0 0.0 0 0 ? S 12:35 0:00 [cpuhp/25] root 165 0.0 0.0 0 0 ? S 12:35 0:00 [idle_inject/25] root 166 0.0 0.0 0 0 ? S 12:35 0:02 [migration/25] root 167 0.0 0.0 0 0 ? S 12:35 0:00 [ksoftirqd/25] root 169 0.0 0.0 0 0 ? I< 12:35 0:00 [kworker/25:0H-events_highpri] root 170 0.0 ... ntax_v1 -- Query name: CreateQuery(write) PRAGMA TablePathPrefix("local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStoragePipeline::TTestCaseShouldReturnPartialBatchForGetTask::Execute_(NUnitTest::TTestContext&)"); DECLARE $tenant as String; DECLARE $scope as String; DECLARE $query_id as String; DECLARE $name as String; DECLARE $status as Int64; DECLARE $query_type as Int64; DECLARE $execute_mode as Int64; DECLARE $user as String; DECLARE $visibility as Int64; DECLARE $automatic as Bool; DECLARE $revision as Int64; DECLARE $query as String; DECLARE $internal as String; DECLARE $job_id as String; DECLARE $job as String; DECLARE $zero_timestamp as Timestamp; DECLARE $now as Timestamp; INSERT INTO `jobs` (`scope`, `query_id`, `job_id`, `job`, `user`, `visibility`) VALUES ($scope, $query_id, $job_id, $job, $user, $visibility);INSERT INTO `pending_small` (`tenant`, `scope`, `query_id`, `query_type`, `last_seen_at`, `assigned_until`, `retry_rate`, `retry_counter`, `retry_counter_updated_at`, `hostname`, `owner`) VALUES ($tenant, $scope, $query_id, $query_type, $zero_timestamp, $zero_timestamp, 0, 0, $now, "", "");INSERT INTO `queries` (`scope`, `query_id`, `name`, `status`, `query_type`, `execute_mode`, `user`, `visibility`, `automatic`, `revision`, `query`, `internal`, `last_job_id`, `generation`, `meta_revision`, `tenant`) VALUES ($scope, $query_id, $name, $status, $query_type, $execute_mode, $user, $visibility, $automatic, $revision, $query, $internal, $job_id, 0, 0, $tenant); 2025-05-29T15:45:13.305681Z node 17 :YQ_CONTROL_PLANE_STORAGE WARN: ydb_control_plane_storage.cpp:599: DB Error, Status: BAD_SESSION, Issues: {
: Error: Session not found: ydb://session/3?node_id=1&id=MWFmZDgyMTYtMzM1ZTM2NzQtOTBjMGMzNDQtMmQ2MDJjNWU= }, Query: --!syntax_v1 -- Query name: CreateQuery(write) PRAGMA TablePathPrefix("local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStoragePipeline::TTestCaseShouldReturnPartialBatchForGetTask::Execute_(NUnitTest::TTestContext&)"); DECLARE $tenant as String; DECLARE $scope as String; DECLARE $query_id as String; DECLARE $name as String; DECLARE $status as Int64; DECLARE $query_type as Int64; DECLARE $execute_mode as Int64; DECLARE $user as String; DECLARE $visibility as Int64; DECLARE $automatic as Bool; DECLARE $revision as Int64; DECLARE $query as String; DECLARE $internal as String; DECLARE $job_id as String; DECLARE $job as String; DECLARE $zero_timestamp as Timestamp; DECLARE $now as Timestamp; INSERT INTO `jobs` (`scope`, `query_id`, `job_id`, `job`, `user`, `visibility`) VALUES ($scope, $query_id, $job_id, $job, $user, $visibility);INSERT INTO `pending_small` (`tenant`, `scope`, `query_id`, `query_type`, `last_seen_at`, `assigned_until`, `retry_rate`, `retry_counter`, `retry_counter_updated_at`, `hostname`, `owner`) VALUES ($tenant, $scope, $query_id, $query_type, $zero_timestamp, $zero_timestamp, 0, 0, $now, "", "");INSERT INTO `queries` (`scope`, `query_id`, `name`, `status`, `query_type`, `execute_mode`, `user`, `visibility`, `automatic`, `revision`, `query`, `internal`, `last_job_id`, `generation`, `meta_revision`, `tenant`) VALUES ($scope, $query_id, $name, $status, $query_type, $execute_mode, $user, $visibility, $automatic, $revision, $query, $internal, $job_id, 0, 0, $tenant); 2025-05-29T15:45:13.648797Z node 17 :YQ_CONTROL_PLANE_STORAGE WARN: ydb_control_plane_storage.cpp:599: DB Error, Status: BAD_SESSION, Issues: {
: Error: Session not found: ydb://session/3?node_id=1&id=Yzk3MDk3NDgtZjBlNWU4OGItNThjMDBkZWYtYzdiMWZkYzM= }, Query: --!syntax_v1 -- Query name: CreateQuery(write) PRAGMA TablePathPrefix("local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStoragePipeline::TTestCaseShouldReturnPartialBatchForGetTask::Execute_(NUnitTest::TTestContext&)"); DECLARE $tenant as String; DECLARE $scope as String; DECLARE $query_id as String; DECLARE $name as String; DECLARE $status as Int64; DECLARE $query_type as Int64; DECLARE $execute_mode as Int64; DECLARE $user as String; DECLARE $visibility as Int64; DECLARE $automatic as Bool; DECLARE $revision as Int64; DECLARE $query as String; DECLARE $internal as String; DECLARE $job_id as String; DECLARE $job as String; DECLARE $zero_timestamp as Timestamp; DECLARE $now as Timestamp; INSERT INTO `jobs` (`scope`, `query_id`, `job_id`, `job`, `user`, `visibility`) VALUES ($scope, $query_id, $job_id, $job, $user, $visibility);INSERT INTO `pending_small` (`tenant`, `scope`, `query_id`, `query_type`, `last_seen_at`, `assigned_until`, `retry_rate`, `retry_counter`, `retry_counter_updated_at`, `hostname`, `owner`) VALUES ($tenant, $scope, $query_id, $query_type, $zero_timestamp, $zero_timestamp, 0, 0, $now, "", "");INSERT INTO `queries` (`scope`, `query_id`, `name`, `status`, `query_type`, `execute_mode`, `user`, `visibility`, `automatic`, `revision`, `query`, `internal`, `last_job_id`, `generation`, `meta_revision`, `tenant`) VALUES ($scope, $query_id, $name, $status, $query_type, $execute_mode, $user, $visibility, $automatic, $revision, $query, $internal, $job_id, 0, 0, $tenant); 2025-05-29T15:45:13.977053Z node 17 :YQ_CONTROL_PLANE_STORAGE WARN: ydb_control_plane_storage.cpp:599: DB Error, Status: BAD_SESSION, Issues: {
: Error: Session not found: ydb://session/3?node_id=1&id=MWRjNjk1ZTQtMjgwYTczNTEtYTg0MWQxZGEtNmIyMWJjNDg= }, Query: --!syntax_v1 -- Query name: CreateQuery(write) PRAGMA TablePathPrefix("local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStoragePipeline::TTestCaseShouldReturnPartialBatchForGetTask::Execute_(NUnitTest::TTestContext&)"); DECLARE $tenant as String; DECLARE $scope as String; DECLARE $query_id as String; DECLARE $name as String; DECLARE $status as Int64; DECLARE $query_type as Int64; DECLARE $execute_mode as Int64; DECLARE $user as String; DECLARE $visibility as Int64; DECLARE $automatic as Bool; DECLARE $revision as Int64; DECLARE $query as String; DECLARE $internal as String; DECLARE $job_id as String; DECLARE $job as String; DECLARE $zero_timestamp as Timestamp; DECLARE $now as Timestamp; INSERT INTO `jobs` (`scope`, `query_id`, `job_id`, `job`, `user`, `visibility`) VALUES ($scope, $query_id, $job_id, $job, $user, $visibility);INSERT INTO `pending_small` (`tenant`, `scope`, `query_id`, `query_type`, `last_seen_at`, `assigned_until`, `retry_rate`, `retry_counter`, `retry_counter_updated_at`, `hostname`, `owner`) VALUES ($tenant, $scope, $query_id, $query_type, $zero_timestamp, $zero_timestamp, 0, 0, $now, "", "");INSERT INTO `queries` (`scope`, `query_id`, `name`, `status`, `query_type`, `execute_mode`, `user`, `visibility`, `automatic`, `revision`, `query`, `internal`, `last_job_id`, `generation`, `meta_revision`, `tenant`) VALUES ($scope, $query_id, $name, $status, $query_type, $execute_mode, $user, $visibility, $automatic, $revision, $query, $internal, $job_id, 0, 0, $tenant); 2025-05-29T15:45:14.399743Z node 17 :YQ_CONTROL_PLANE_STORAGE WARN: ydb_control_plane_storage.cpp:599: DB Error, Status: BAD_SESSION, Issues: {
: Error: Session not found: ydb://session/3?node_id=1&id=NjY3ZDlmNTEtMjg5ZTAzYmItM2RhYjQyOGItZjMwNGQ1MGE= }, Query: --!syntax_v1 -- Query name: CreateQuery(write) PRAGMA TablePathPrefix("local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStoragePipeline::TTestCaseShouldReturnPartialBatchForGetTask::Execute_(NUnitTest::TTestContext&)"); DECLARE $tenant as String; DECLARE $scope as String; DECLARE $query_id as String; DECLARE $name as String; DECLARE $status as Int64; DECLARE $query_type as Int64; DECLARE $execute_mode as Int64; DECLARE $user as String; DECLARE $visibility as Int64; DECLARE $automatic as Bool; DECLARE $revision as Int64; DECLARE $query as String; DECLARE $internal as String; DECLARE $job_id as String; DECLARE $job as String; DECLARE $zero_timestamp as Timestamp; DECLARE $now as Timestamp; INSERT INTO `jobs` (`scope`, `query_id`, `job_id`, `job`, `user`, `visibility`) VALUES ($scope, $query_id, $job_id, $job, $user, $visibility);INSERT INTO `pending_small` (`tenant`, `scope`, `query_id`, `query_type`, `last_seen_at`, `assigned_until`, `retry_rate`, `retry_counter`, `retry_counter_updated_at`, `hostname`, `owner`) VALUES ($tenant, $scope, $query_id, $query_type, $zero_timestamp, $zero_timestamp, 0, 0, $now, "", "");INSERT INTO `queries` (`scope`, `query_id`, `name`, `status`, `query_type`, `execute_mode`, `user`, `visibility`, `automatic`, `revision`, `query`, `internal`, `last_job_id`, `generation`, `meta_revision`, `tenant`) VALUES ($scope, $query_id, $name, $status, $query_type, $execute_mode, $user, $visibility, $automatic, $revision, $query, $internal, $job_id, 0, 0, $tenant); 2025-05-29T15:45:14.587365Z node 17 :YQ_CONTROL_PLANE_STORAGE WARN: ydb_control_plane_storage.cpp:599: DB Error, Status: BAD_SESSION, Issues: {
: Error: Session not found: ydb://session/3?node_id=1&id=YjI3YThmNTAtNzJmMDJjZTgtNGI5NjY3ODEtYjI4OWVjNGM= }, Query: --!syntax_v1 -- Query name: CreateQuery(write) PRAGMA TablePathPrefix("local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStoragePipeline::TTestCaseShouldReturnPartialBatchForGetTask::Execute_(NUnitTest::TTestContext&)"); DECLARE $tenant as String; DECLARE $scope as String; DECLARE $query_id as String; DECLARE $name as String; DECLARE $status as Int64; DECLARE $query_type as Int64; DECLARE $execute_mode as Int64; DECLARE $user as String; DECLARE $visibility as Int64; DECLARE $automatic as Bool; DECLARE $revision as Int64; DECLARE $query as String; DECLARE $internal as String; DECLARE $job_id as String; DECLARE $job as String; DECLARE $zero_timestamp as Timestamp; DECLARE $now as Timestamp; INSERT INTO `jobs` (`scope`, `query_id`, `job_id`, `job`, `user`, `visibility`) VALUES ($scope, $query_id, $job_id, $job, $user, $visibility);INSERT INTO `pending_small` (`tenant`, `scope`, `query_id`, `query_type`, `last_seen_at`, `assigned_until`, `retry_rate`, `retry_counter`, `retry_counter_updated_at`, `hostname`, `owner`) VALUES ($tenant, $scope, $query_id, $query_type, $zero_timestamp, $zero_timestamp, 0, 0, $now, "", "");INSERT INTO `queries` (`scope`, `query_id`, `name`, `status`, `query_type`, `execute_mode`, `user`, `visibility`, `automatic`, `revision`, `query`, `internal`, `last_job_id`, `generation`, `meta_revision`, `tenant`) VALUES ($scope, $query_id, $name, $status, $query_type, $execute_mode, $user, $visibility, $automatic, $revision, $query, $internal, $job_id, 0, 0, $tenant); 2025-05-29T15:45:14.587681Z node 17 :YQ_CONTROL_PLANE_STORAGE WARN: ydb_control_plane_storage_impl.h:770: CreateQueryRequest - CreateQueryResult: {content { type: ANALYTICS name: "test_query_name_1" acl { visibility: SCOPE } text: "SELECT 1;" } execute_mode: RUN } ERROR: {
: Error: Session not found: ydb://session/3?node_id=1&id=YjI3YThmNTAtNzJmMDJjZTgtNGI5NjY3ODEtYjI4OWVjNGM= } assertion failed at ydb/tests/fq/control_plane_storage/ydb_control_plane_storage_internal_ut.cpp:2261, virtual void NFq::NTestSuiteTYdbControlPlaneStoragePipeline::TTestCaseShouldReturnPartialBatchForGetTask::Execute_(NUnitTest::TTestContext &): (!issues)
: Error: Session not found: ydb://session/3?node_id=1&id=YjI3YThmNTAtNzJmMDJjZTgtNGI5NjY3ODEtYjI4OWVjNGM= 0. /-S/util/system/backtrace.cpp:284: ?? @ 0x13CB116B 1. /tmp//-S/library/cpp/testing/unittest/registar.cpp:46: RaiseError @ 0x13E64D18 2. /tmp//-S/ydb/tests/fq/control_plane_storage/ydb_control_plane_storage_internal_ut.cpp:2261: Execute_ @ 0x13A4F19D 3. /tmp//-S/ydb/tests/fq/control_plane_storage/ydb_control_plane_storage_internal_ut.cpp:238: operator() @ 0x13A64FE6 4. /tmp//-S/library/cpp/testing/unittest/registar.cpp:373: Run @ 0x13E66BCD 5. /tmp//-S/ydb/tests/fq/control_plane_storage/ydb_control_plane_storage_internal_ut.cpp:238: Execute @ 0x13A64980 6. /tmp//-S/library/cpp/testing/unittest/registar.cpp:494: Execute @ 0x13E67342 7. /tmp//-S/library/cpp/testing/unittest/utmain.cpp:872: RunMain @ 0x13E7565C 8. ??:0: ?? @ 0x7FD0EF4ABD8F 9. ??:0: ?? @ 0x7FD0EF4ABE3F 10. ??:0: ?? @ 0x12969028 >> test_rename.py::test_client_gets_retriable_errors_when_rename[replace_table-create_simple_table-False] [GOOD] >> test_ydb_sql.py::TestExecuteSqlWithParamsFromJson::test_script_from_file [FAIL] >> test_restarts.py::TestRestartClusterMirror34::test_when_create_many_tablets_and_restart_cluster_then_every_thing_is_ok [GOOD] ------- [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/fq/libs/ydb/ut/unittest >> TRegisterCheckTest::ShouldRegisterCheckSameGeneration [FAIL] Test command err: assertion failed at ydb/core/fq/libs/ydb/ut/ydb_ut.cpp:152, virtual void NFq::NTestSuiteTRegisterCheckTest::TTestCaseShouldRegisterCheckSameGeneration::Execute_(NUnitTest::TTestContext &): (status.IsSuccess())
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 TBackTrace::Capture()+28 (0xB3C28EC) NUnitTest::NPrivate::RaiseError(char const*, TBasicString> const&, bool)+137 (0xB575C79) NFq::NTestSuiteTRegisterCheckTest::TTestCaseShouldRegisterCheckSameGeneration::Execute_(NUnitTest::TTestContext&)+763 (0xB2AF5DB) NFq::NTestSuiteTRegisterCheckTest::TCurrentTest::Execute()::'lambda'()::operator()() const+71 (0xB2B8BE7) NUnitTest::TTestBase::Run(std::__y1::function, TBasicString> const&, char const*, bool)+126 (0xB577B2E) NFq::NTestSuiteTRegisterCheckTest::TCurrentTest::Execute()+425 (0xB2B8449) NUnitTest::TTestFactory::Execute()+803 (0xB5782A3) NUnitTest::RunMain(int, char**)+3021 (0xB5865BD) ??+0 (0x7F3ED7F36D90) __libc_start_main+128 (0x7F3ED7F36E40) _start+41 (0xA40B029) >> test_config_migration.py::TestConfigMigrationToV2::test_migration_to_v2 >> test_explicit_partitioning_1.py::TestS3::test_binding_projection_enum_type_validation[v2-client6-column_type6-False] [FAIL] >> test_explicit_partitioning_1.py::TestS3::test_binding_projection_enum_type_validation[v2-client7-column_type7-False] |83.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/config/py3test |83.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/restarts/py3test >> test_restarts.py::TestRestartClusterMirror34::test_when_create_many_tablets_and_restart_cluster_then_every_thing_is_ok [GOOD] >> test_config_with_metadata.py::TestKiKiMRStoreConfigDir::test_cluster_works_with_auto_conf_dir >> TYdbControlPlaneStorageListConnections::ShouldCheckPrivateVisibility [FAIL] >> TYdbControlPlaneStorageListConnections::ShouldCheckSuperUser >> test_rename.py::test_client_gets_retriable_errors_when_rename[replace_table-create_indexed_async_table-True] [GOOD] >> test_config_with_metadata.py::TestConfigWithoutMetadataMirror::test_cluster_is_operational_without_metadata >> test_format_setting.py::TestS3::test_timestamp_completeness_iso[v1-timestamp/completeness_iso/test.csv-csv_with_names] [FAIL] >> test_format_setting.py::TestS3::test_timestamp_completeness_iso[v2-timestamp/completeness_iso/test.csv-csv_with_names] |83.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/config/py3test >> test_explicit_partitioning_0.py::TestS3::test_projection_enum_type_invalid_validation[v2-false-client7-year Uint32-False] [FAIL] >> test_explicit_partitioning_0.py::TestS3::test_projection_enum_type_invalid_validation[v2-false-client8-year Int64-False] >> test_log_scenario.py::TestLogScenario::test_log_deviation[1051200] [FAIL] ------- [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/fq/libs/ydb/ut/unittest >> TRegisterCheckTest::ShouldRegisterCheckNewGenerationAndTransact [FAIL] Test command err: assertion failed at ydb/core/fq/libs/ydb/ut/ydb_ut.cpp:328, virtual void NFq::NTestSuiteTRegisterCheckTest::TTestCaseShouldRegisterCheckNewGenerationAndTransact::Execute_(NUnitTest::TTestContext &): (status.IsSuccess())
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 TBackTrace::Capture()+28 (0xB3C28EC) NUnitTest::NPrivate::RaiseError(char const*, TBasicString> const&, bool)+137 (0xB575C79) NFq::NTestSuiteTRegisterCheckTest::TTestCaseShouldRegisterCheckNewGenerationAndTransact::Execute_(NUnitTest::TTestContext&)+1991 (0xB2B2A67) NFq::NTestSuiteTRegisterCheckTest::TCurrentTest::Execute()::'lambda'()::operator()() const+71 (0xB2B8BE7) NUnitTest::TTestBase::Run(std::__y1::function, TBasicString> const&, char const*, bool)+126 (0xB577B2E) NFq::NTestSuiteTRegisterCheckTest::TCurrentTest::Execute()+425 (0xB2B8449) NUnitTest::TTestFactory::Execute()+803 (0xB5782A3) NUnitTest::RunMain(int, char**)+3021 (0xB5865BD) ??+0 (0x7F8933DA0D90) __libc_start_main+128 (0x7F8933DA0E40) _start+41 (0xA40B029) ------- [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/fq/libs/ydb/ut/unittest >> TCheckGenerationTest::ShouldRollbackTransactionWhenCheckFails2 [FAIL] Test command err: assertion failed at ydb/core/fq/libs/ydb/ut/ydb_ut.cpp:406, virtual void NFq::NTestSuiteTCheckGenerationTest::TTestCaseShouldRollbackTransactionWhenCheckFails::Execute_(NUnitTest::TTestContext &): (status.IsSuccess())
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 TBackTrace::Capture()+28 (0xB3C28EC) NUnitTest::NPrivate::RaiseError(char const*, TBasicString> const&, bool)+137 (0xB575C79) NFq::NTestSuiteTCheckGenerationTest::TTestCaseShouldRollbackTransactionWhenCheckFails::Execute_(NUnitTest::TTestContext&)+769 (0xB2B4571) NFq::NTestSuiteTCheckGenerationTest::TCurrentTest::Execute()::'lambda'()::operator()() const+71 (0xB2BA547) NUnitTest::TTestBase::Run(std::__y1::function, TBasicString> const&, char const*, bool)+126 (0xB577B2E) NFq::NTestSuiteTCheckGenerationTest::TCurrentTest::Execute()+426 (0xB2B9F0A) NUnitTest::TTestFactory::Execute()+803 (0xB5782A3) NUnitTest::RunMain(int, char**)+3021 (0xB5865BD) ??+0 (0x7F6BDE265D90) __libc_start_main+128 (0x7F6BDE265E40) _start+41 (0xA40B029) assertion failed at ydb/core/fq/libs/ydb/ut/ydb_ut.cpp:448, virtual void NFq::NTestSuiteTCheckGenerationTest::TTestCaseShouldRollbackTransactionWhenCheckFails2::Execute_(NUnitTest::TTestContext &): (status.IsSuccess())
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/ast/yql_expr.h:1874: index out of range, code: 1 TBackTrace::Capture()+28 (0xB3C28EC) NUnitTest::NPrivate::RaiseError(char const*, TBasicString> const&, bool)+137 (0xB575C79) NFq::NTestSuiteTCheckGenerationTest::TTestCaseShouldRollbackTransactionWhenCheckFails2::Execute_(NUnitTest::TTestContext&)+769 (0xB2B5171) NFq::NTestSuiteTCheckGenerationTest::TCurrentTest::Execute()::'lambda'()::operator()() const+71 (0xB2BA547) NUnitTest::TTestBase::Run(std::__y1::function, TBasicString> const&, char const*, bool)+126 (0xB577B2E) NFq::NTestSuiteTCheckGenerationTest::TCurrentTest::Execute()+426 (0xB2B9F0A) NUnitTest::TTestFactory::Execute()+803 (0xB5782A3) NUnitTest::RunMain(int, char**)+3021 (0xB5865BD) ??+0 (0x7F6BDE265D90) __libc_start_main+128 (0x7F6BDE265E40) _start+41 (0xA40B029) >> test_distconf.py::TestKiKiMRDistConfReassignStateStorageToTheSameConfig::test_cluster_change_state_storage >> test_ydb_scripting.py::TestExecuteScriptWithFormats::test_yql_script_pretty [FAIL] >> test_ydb_scripting.py::TestExecuteScriptWithFormats::test_yql_script_json_base64 >> TYdbControlPlaneStorageModifyConnection::ShouldCheckMultipleDotsName [FAIL] >> TYdbControlPlaneStorageModifyConnection::ShouldCheckAllowedSymbolsName >> test_ydb_sql.py::TestExecuteSqlWithParamsFromStdin::test_simple_json[sql] >> test_format_setting.py::TestS3::test_timestamp_completeness_iso[v2-timestamp/completeness_iso/test.csv-csv_with_names] [FAIL] >> test_format_setting.py::TestS3::test_date_time_completeness_iso[v1-date_time/completeness_iso/test.csv-csv_with_names] >> test_explicit_partitioning_1.py::TestS3::test_binding_projection_enum_type_validation[v2-client7-column_type7-False] [FAIL] >> test_explicit_partitioning_1.py::TestS3::test_binding_projection_enum_type_validation[v2-client8-column_type8-False] >> test_log_scenario.py::TestLogScenario::test_log_deviation[180] [FAIL] >> test_ydb_scripting.py::TestExecuteScriptWithFormats::test_yql_script_json_base64 [FAIL] >> test_ydb_scripting.py::TestExecuteScriptWithFormats::test_yql_script_json_base64_array |83.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/config/py3test >> test_ydb_scripting.py::TestExecuteScriptWithFormats::test_yql_script_json_base64_array [FAIL] >> test_ydb_scripting.py::TestExecuteScriptWithFormats::test_yql_script_json_unicode >> test_explicit_partitioning_0.py::TestS3::test_projection_enum_type_invalid_validation[v2-false-client8-year Int64-False] [FAIL] >> test_explicit_partitioning_0.py::TestS3::test_projection_enum_type_invalid_validation[v2-false-client9-year Uint64-False]